summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorKarl Berry <karl@freefriends.org>2013-04-08 00:43:40 +0000
committerKarl Berry <karl@freefriends.org>2013-04-08 00:43:40 +0000
commit824f7b0903de8ad7f6ee3d7656005e4c59155e06 (patch)
treeabfc8673ef9916f3ab7074e811207384c301492b
parent689aefb0727676ed3cddf331337b4be226495e72 (diff)
context import for TL13, from www.pragma-ade.com/context/beta/cont-tmf.zip
git-svn-id: svn://tug.org/texlive/trunk@29731 c570f23f-e606-0410-a88d-b1316a301751
-rw-r--r--Build/source/texk/texlive/linked_scripts/Makefile.am10
-rw-r--r--Build/source/texk/texlive/linked_scripts/Makefile.in9
-rw-r--r--Build/source/texk/texlive/linked_scripts/context/stubs/unix/context2
-rw-r--r--Build/source/texk/texlive/linked_scripts/context/stubs/unix/ctxtools2
-rw-r--r--Build/source/texk/texlive/linked_scripts/context/stubs/unix/luatools2
-rw-r--r--Build/source/texk/texlive/linked_scripts/context/stubs/unix/mtxrun17082
-rw-r--r--Build/source/texk/texlive/linked_scripts/context/stubs/unix/pstopdf2
-rw-r--r--Build/source/texk/texlive/linked_scripts/context/stubs/unix/texexec2
-rw-r--r--Build/source/texk/texlive/linked_scripts/context/stubs/unix/texmfstart2
-rw-r--r--Master/texmf-dist/bibtex/bst/context/cont-ab.bst1
-rw-r--r--Master/texmf-dist/bibtex/bst/context/cont-au.bst1
-rw-r--r--Master/texmf-dist/bibtex/bst/context/cont-no.bst1
-rw-r--r--Master/texmf-dist/bibtex/bst/context/cont-ti.bst1
-rw-r--r--Master/texmf-dist/context/data/scite/lexers/data/scite-context-data-context.lua4
-rw-r--r--Master/texmf-dist/context/data/scite/lexers/data/scite-context-data-interfaces.lua4
-rw-r--r--Master/texmf-dist/context/data/scite/lexers/data/scite-context-data-metafun.lua4
-rw-r--r--Master/texmf-dist/context/data/scite/lexers/data/scite-context-data-metapost.lua2
-rw-r--r--Master/texmf-dist/context/data/scite/lexers/data/scite-context-data-tex.lua4
-rw-r--r--Master/texmf-dist/context/data/scite/lexers/scite-context-lexer-cld.lua1
-rw-r--r--Master/texmf-dist/context/data/scite/lexers/scite-context-lexer-lua-longstring.lua8
-rw-r--r--Master/texmf-dist/context/data/scite/lexers/scite-context-lexer-lua.lua123
-rw-r--r--Master/texmf-dist/context/data/scite/lexers/scite-context-lexer-mps.lua15
-rw-r--r--Master/texmf-dist/context/data/scite/lexers/scite-context-lexer-tex.lua85
-rw-r--r--Master/texmf-dist/context/data/scite/lexers/scite-context-lexer-txt.lua40
-rw-r--r--Master/texmf-dist/context/data/scite/lexers/scite-context-lexer-web.lua155
-rw-r--r--Master/texmf-dist/context/data/scite/lexers/scite-context-lexer-xml-comment.lua5
-rw-r--r--Master/texmf-dist/context/data/scite/lexers/scite-context-lexer-xml-script.lua30
-rw-r--r--Master/texmf-dist/context/data/scite/lexers/scite-context-lexer-xml.lua49
-rw-r--r--Master/texmf-dist/context/data/scite/lexers/scite-context-lexer.lua643
-rw-r--r--Master/texmf-dist/context/data/scite/lexers/themes/scite-context-theme.lua1
-rw-r--r--Master/texmf-dist/context/data/scite/scite-context-data-context.properties284
-rw-r--r--Master/texmf-dist/context/data/scite/scite-context-data-interfaces.properties1622
-rw-r--r--Master/texmf-dist/context/data/scite/scite-context-data-metafun.properties50
-rw-r--r--Master/texmf-dist/context/data/scite/scite-context-data-metapost.properties110
-rw-r--r--Master/texmf-dist/context/data/scite/scite-context-data-tex.properties384
-rw-r--r--Master/texmf-dist/context/data/scite/scite-context-external.properties23
-rw-r--r--Master/texmf-dist/context/data/scite/scite-context-internal.properties4
-rw-r--r--Master/texmf-dist/context/data/scite/scite-context-readme.pdfbin205340 -> 212111 bytes
-rw-r--r--Master/texmf-dist/context/data/scite/scite-context-readme.tex22
-rw-r--r--Master/texmf-dist/context/data/scite/scite-context.properties42
-rw-r--r--Master/texmf-dist/context/data/scite/scite-ctx.lua67
-rw-r--r--Master/texmf-dist/context/data/scite/scite-ctx.properties3
-rw-r--r--Master/texmf-dist/context/data/scite/scite-pragma.properties4
-rw-r--r--Master/texmf-dist/doc/context/bib/bibmod-doc.pdfbin284752 -> 0 bytes
-rw-r--r--Master/texmf-dist/doc/context/bib/bibmod-doc.tex750
-rw-r--r--Master/texmf-dist/doc/context/scripts/mkii/ctxtools.html58
-rw-r--r--Master/texmf-dist/doc/context/scripts/mkii/ctxtools.man75
-rw-r--r--Master/texmf-dist/doc/context/scripts/mkii/ctxtools.xml31
-rw-r--r--Master/texmf-dist/doc/context/scripts/mkii/imgtopdf.html49
-rw-r--r--Master/texmf-dist/doc/context/scripts/mkii/imgtopdf.man48
-rw-r--r--Master/texmf-dist/doc/context/scripts/mkii/imgtopdf.xml22
-rw-r--r--Master/texmf-dist/doc/context/scripts/mkii/mptopdf.html44
-rw-r--r--Master/texmf-dist/doc/context/scripts/mkii/mptopdf.xml17
-rw-r--r--Master/texmf-dist/doc/context/scripts/mkii/pdftools.html49
-rw-r--r--Master/texmf-dist/doc/context/scripts/mkii/pdftools.man48
-rw-r--r--Master/texmf-dist/doc/context/scripts/mkii/pdftools.xml22
-rw-r--r--Master/texmf-dist/doc/context/scripts/mkii/pstopdf.html43
-rw-r--r--Master/texmf-dist/doc/context/scripts/mkii/pstopdf.man30
-rw-r--r--Master/texmf-dist/doc/context/scripts/mkii/pstopdf.xml16
-rw-r--r--Master/texmf-dist/doc/context/scripts/mkii/rlxtools.html43
-rw-r--r--Master/texmf-dist/doc/context/scripts/mkii/rlxtools.man30
-rw-r--r--Master/texmf-dist/doc/context/scripts/mkii/rlxtools.xml16
-rw-r--r--Master/texmf-dist/doc/context/scripts/mkii/texexec.html57
-rw-r--r--Master/texmf-dist/doc/context/scripts/mkii/texexec.man72
-rw-r--r--Master/texmf-dist/doc/context/scripts/mkii/texexec.xml30
-rw-r--r--Master/texmf-dist/doc/context/scripts/mkii/texmfstart.html94
-rw-r--r--Master/texmf-dist/doc/context/scripts/mkii/texmfstart.man147
-rw-r--r--Master/texmf-dist/doc/context/scripts/mkii/texmfstart.xml78
-rw-r--r--Master/texmf-dist/doc/context/scripts/mkii/textools.html57
-rw-r--r--Master/texmf-dist/doc/context/scripts/mkii/textools.man72
-rw-r--r--Master/texmf-dist/doc/context/scripts/mkii/textools.xml30
-rw-r--r--Master/texmf-dist/doc/context/scripts/mkii/texutil.html48
-rw-r--r--Master/texmf-dist/doc/context/scripts/mkii/texutil.man45
-rw-r--r--Master/texmf-dist/doc/context/scripts/mkii/texutil.xml21
-rw-r--r--Master/texmf-dist/doc/context/scripts/mkii/tmftools.html44
-rw-r--r--Master/texmf-dist/doc/context/scripts/mkii/tmftools.man30
-rw-r--r--Master/texmf-dist/doc/context/scripts/mkii/tmftools.xml18
-rw-r--r--Master/texmf-dist/doc/context/scripts/mkii/xmltools.html48
-rw-r--r--Master/texmf-dist/doc/context/scripts/mkii/xmltools.man45
-rw-r--r--Master/texmf-dist/doc/context/scripts/mkii/xmltools.xml21
-rw-r--r--Master/texmf-dist/doc/context/scripts/mkiv/context.html104
-rw-r--r--Master/texmf-dist/doc/context/scripts/mkiv/context.man167
-rw-r--r--Master/texmf-dist/doc/context/scripts/mkiv/context.xml187
-rw-r--r--Master/texmf-dist/doc/context/scripts/mkiv/luatools.html59
-rw-r--r--Master/texmf-dist/doc/context/scripts/mkiv/luatools.man78
-rw-r--r--Master/texmf-dist/doc/context/scripts/mkiv/luatools.xml31
-rw-r--r--Master/texmf-dist/doc/context/scripts/mkiv/mtx-babel.html44
-rw-r--r--Master/texmf-dist/doc/context/scripts/mkiv/mtx-babel.man33
-rw-r--r--Master/texmf-dist/doc/context/scripts/mkiv/mtx-babel.xml17
-rw-r--r--Master/texmf-dist/doc/context/scripts/mkiv/mtx-base.html59
-rw-r--r--Master/texmf-dist/doc/context/scripts/mkiv/mtx-base.man78
-rw-r--r--Master/texmf-dist/doc/context/scripts/mkiv/mtx-base.xml32
-rw-r--r--Master/texmf-dist/doc/context/scripts/mkiv/mtx-cache.html46
-rw-r--r--Master/texmf-dist/doc/context/scripts/mkiv/mtx-cache.man36
-rw-r--r--Master/texmf-dist/doc/context/scripts/mkiv/mtx-cache.xml20
-rw-r--r--Master/texmf-dist/doc/context/scripts/mkiv/mtx-chars.html44
-rw-r--r--Master/texmf-dist/doc/context/scripts/mkiv/mtx-chars.man33
-rw-r--r--Master/texmf-dist/doc/context/scripts/mkiv/mtx-chars.xml17
-rw-r--r--Master/texmf-dist/doc/context/scripts/mkiv/mtx-check.html42
-rw-r--r--Master/texmf-dist/doc/context/scripts/mkiv/mtx-check.man27
-rw-r--r--Master/texmf-dist/doc/context/scripts/mkiv/mtx-check.xml15
-rw-r--r--Master/texmf-dist/doc/context/scripts/mkiv/mtx-colors.html44
-rw-r--r--Master/texmf-dist/doc/context/scripts/mkiv/mtx-colors.man27
-rw-r--r--Master/texmf-dist/doc/context/scripts/mkiv/mtx-colors.xml23
-rw-r--r--Master/texmf-dist/doc/context/scripts/mkiv/mtx-context.html104
-rw-r--r--Master/texmf-dist/doc/context/scripts/mkiv/mtx-context.man167
-rw-r--r--Master/texmf-dist/doc/context/scripts/mkiv/mtx-context.xml187
-rw-r--r--Master/texmf-dist/doc/context/scripts/mkiv/mtx-epub.html44
-rw-r--r--Master/texmf-dist/doc/context/scripts/mkiv/mtx-epub.man27
-rw-r--r--Master/texmf-dist/doc/context/scripts/mkiv/mtx-epub.xml23
-rw-r--r--Master/texmf-dist/doc/context/scripts/mkiv/mtx-fcd.html56
-rw-r--r--Master/texmf-dist/doc/context/scripts/mkiv/mtx-fcd.man54
-rw-r--r--Master/texmf-dist/doc/context/scripts/mkiv/mtx-fcd.xml35
-rw-r--r--Master/texmf-dist/doc/context/scripts/mkiv/mtx-flac.html45
-rw-r--r--Master/texmf-dist/doc/context/scripts/mkiv/mtx-flac.man27
-rw-r--r--Master/texmf-dist/doc/context/scripts/mkiv/mtx-flac.xml24
-rw-r--r--Master/texmf-dist/doc/context/scripts/mkiv/mtx-fonts.html71
-rw-r--r--Master/texmf-dist/doc/context/scripts/mkiv/mtx-fonts.man63
-rw-r--r--Master/texmf-dist/doc/context/scripts/mkiv/mtx-fonts.xml59
-rw-r--r--Master/texmf-dist/doc/context/scripts/mkiv/mtx-grep.html45
-rw-r--r--Master/texmf-dist/doc/context/scripts/mkiv/mtx-grep.man36
-rw-r--r--Master/texmf-dist/doc/context/scripts/mkiv/mtx-grep.xml21
-rw-r--r--Master/texmf-dist/doc/context/scripts/mkiv/mtx-interface.html60
-rw-r--r--Master/texmf-dist/doc/context/scripts/mkiv/mtx-interface.man69
-rw-r--r--Master/texmf-dist/doc/context/scripts/mkiv/mtx-interface.xml37
-rw-r--r--Master/texmf-dist/doc/context/scripts/mkiv/mtx-metapost.html50
-rw-r--r--Master/texmf-dist/doc/context/scripts/mkiv/mtx-metapost.man39
-rw-r--r--Master/texmf-dist/doc/context/scripts/mkiv/mtx-metapost.xml32
-rw-r--r--Master/texmf-dist/doc/context/scripts/mkiv/mtx-metatex.html43
-rw-r--r--Master/texmf-dist/doc/context/scripts/mkiv/mtx-metatex.man30
-rw-r--r--Master/texmf-dist/doc/context/scripts/mkiv/mtx-metatex.xml16
-rw-r--r--Master/texmf-dist/doc/context/scripts/mkiv/mtx-modules.html44
-rw-r--r--Master/texmf-dist/doc/context/scripts/mkiv/mtx-modules.man33
-rw-r--r--Master/texmf-dist/doc/context/scripts/mkiv/mtx-modules.xml17
-rw-r--r--Master/texmf-dist/doc/context/scripts/mkiv/mtx-package.html42
-rw-r--r--Master/texmf-dist/doc/context/scripts/mkiv/mtx-package.man27
-rw-r--r--Master/texmf-dist/doc/context/scripts/mkiv/mtx-package.xml15
-rw-r--r--Master/texmf-dist/doc/context/scripts/mkiv/mtx-patterns.html51
-rw-r--r--Master/texmf-dist/doc/context/scripts/mkiv/mtx-patterns.man39
-rw-r--r--Master/texmf-dist/doc/context/scripts/mkiv/mtx-patterns.xml30
-rw-r--r--Master/texmf-dist/doc/context/scripts/mkiv/mtx-pdf.html44
-rw-r--r--Master/texmf-dist/doc/context/scripts/mkiv/mtx-pdf.man33
-rw-r--r--Master/texmf-dist/doc/context/scripts/mkiv/mtx-pdf.xml17
-rw-r--r--Master/texmf-dist/doc/context/scripts/mkiv/mtx-profile.html43
-rw-r--r--Master/texmf-dist/doc/context/scripts/mkiv/mtx-profile.man30
-rw-r--r--Master/texmf-dist/doc/context/scripts/mkiv/mtx-profile.xml16
-rw-r--r--Master/texmf-dist/doc/context/scripts/mkiv/mtx-rsync.html44
-rw-r--r--Master/texmf-dist/doc/context/scripts/mkiv/mtx-rsync.man33
-rw-r--r--Master/texmf-dist/doc/context/scripts/mkiv/mtx-rsync.xml17
-rw-r--r--Master/texmf-dist/doc/context/scripts/mkiv/mtx-scite.html42
-rw-r--r--Master/texmf-dist/doc/context/scripts/mkiv/mtx-scite.man27
-rw-r--r--Master/texmf-dist/doc/context/scripts/mkiv/mtx-scite.xml15
-rw-r--r--Master/texmf-dist/doc/context/scripts/mkiv/mtx-server.html47
-rw-r--r--Master/texmf-dist/doc/context/scripts/mkiv/mtx-server.man42
-rw-r--r--Master/texmf-dist/doc/context/scripts/mkiv/mtx-server.xml20
-rw-r--r--Master/texmf-dist/doc/context/scripts/mkiv/mtx-texworks.html43
-rw-r--r--Master/texmf-dist/doc/context/scripts/mkiv/mtx-texworks.man30
-rw-r--r--Master/texmf-dist/doc/context/scripts/mkiv/mtx-texworks.xml16
-rw-r--r--Master/texmf-dist/doc/context/scripts/mkiv/mtx-timing.html44
-rw-r--r--Master/texmf-dist/doc/context/scripts/mkiv/mtx-timing.man33
-rw-r--r--Master/texmf-dist/doc/context/scripts/mkiv/mtx-timing.xml17
-rw-r--r--Master/texmf-dist/doc/context/scripts/mkiv/mtx-tools.html56
-rw-r--r--Master/texmf-dist/doc/context/scripts/mkiv/mtx-tools.man63
-rw-r--r--Master/texmf-dist/doc/context/scripts/mkiv/mtx-tools.xml31
-rw-r--r--Master/texmf-dist/doc/context/scripts/mkiv/mtx-unzip.html44
-rw-r--r--Master/texmf-dist/doc/context/scripts/mkiv/mtx-unzip.man33
-rw-r--r--Master/texmf-dist/doc/context/scripts/mkiv/mtx-unzip.xml17
-rw-r--r--Master/texmf-dist/doc/context/scripts/mkiv/mtx-update.html59
-rw-r--r--Master/texmf-dist/doc/context/scripts/mkiv/mtx-update.man78
-rw-r--r--Master/texmf-dist/doc/context/scripts/mkiv/mtx-update.xml32
-rw-r--r--Master/texmf-dist/doc/context/scripts/mkiv/mtx-watch.html49
-rw-r--r--Master/texmf-dist/doc/context/scripts/mkiv/mtx-watch.man48
-rw-r--r--Master/texmf-dist/doc/context/scripts/mkiv/mtx-watch.xml22
-rw-r--r--Master/texmf-dist/doc/context/scripts/mkiv/mtxrun.html94
-rw-r--r--Master/texmf-dist/doc/context/scripts/mkiv/mtxrun.man147
-rw-r--r--Master/texmf-dist/doc/context/scripts/mkiv/mtxrun.xml79
-rw-r--r--Master/texmf-dist/doc/context/scripts/perl/texshow.170
-rw-r--r--Master/texmf-dist/doc/context/scripts/perl/texshow.html137
-rw-r--r--Master/texmf-dist/doc/man/man1/context.1261
-rw-r--r--Master/texmf-dist/doc/man/man1/ctxtools.1148
-rw-r--r--Master/texmf-dist/doc/man/man1/luatools.178
-rw-r--r--Master/texmf-dist/doc/man/man1/luatools.man1.pdfbin0 -> 5105 bytes
-rw-r--r--Master/texmf-dist/doc/man/man1/mtx-babel.133
-rw-r--r--Master/texmf-dist/doc/man/man1/mtx-babel.man1.pdfbin0 -> 3672 bytes
-rw-r--r--Master/texmf-dist/doc/man/man1/mtx-base.178
-rw-r--r--Master/texmf-dist/doc/man/man1/mtx-base.man1.pdfbin0 -> 5124 bytes
-rw-r--r--Master/texmf-dist/doc/man/man1/mtx-cache.136
-rw-r--r--Master/texmf-dist/doc/man/man1/mtx-cache.man1.pdfbin0 -> 3707 bytes
-rw-r--r--Master/texmf-dist/doc/man/man1/mtx-chars.133
-rw-r--r--Master/texmf-dist/doc/man/man1/mtx-chars.man1.pdfbin0 -> 3566 bytes
-rw-r--r--Master/texmf-dist/doc/man/man1/mtx-check.127
-rw-r--r--Master/texmf-dist/doc/man/man1/mtx-check.man1.pdfbin0 -> 3580 bytes
-rw-r--r--Master/texmf-dist/doc/man/man1/mtx-colors.127
-rw-r--r--Master/texmf-dist/doc/man/man1/mtx-colors.man1.pdfbin0 -> 3442 bytes
-rw-r--r--Master/texmf-dist/doc/man/man1/mtx-context.1167
-rw-r--r--Master/texmf-dist/doc/man/man1/mtx-context.man1.pdfbin0 -> 7542 bytes
-rw-r--r--Master/texmf-dist/doc/man/man1/mtx-epub.127
-rw-r--r--Master/texmf-dist/doc/man/man1/mtx-epub.man1.pdfbin0 -> 3535 bytes
-rw-r--r--Master/texmf-dist/doc/man/man1/mtx-fcd.154
-rw-r--r--Master/texmf-dist/doc/man/man1/mtx-fcd.man1.pdfbin0 -> 3960 bytes
-rw-r--r--Master/texmf-dist/doc/man/man1/mtx-flac.127
-rw-r--r--Master/texmf-dist/doc/man/man1/mtx-flac.man1.pdfbin0 -> 3625 bytes
-rw-r--r--Master/texmf-dist/doc/man/man1/mtx-fonts.163
-rw-r--r--Master/texmf-dist/doc/man/man1/mtx-fonts.man1.pdfbin0 -> 4206 bytes
-rw-r--r--Master/texmf-dist/doc/man/man1/mtx-grep.136
-rw-r--r--Master/texmf-dist/doc/man/man1/mtx-grep.man1.pdfbin0 -> 3545 bytes
-rw-r--r--Master/texmf-dist/doc/man/man1/mtx-interface.169
-rw-r--r--Master/texmf-dist/doc/man/man1/mtx-interface.man1.pdfbin0 -> 4105 bytes
-rw-r--r--Master/texmf-dist/doc/man/man1/mtx-metapost.139
-rw-r--r--Master/texmf-dist/doc/man/man1/mtx-metapost.man1.pdf110
-rw-r--r--Master/texmf-dist/doc/man/man1/mtx-metatex.130
-rw-r--r--Master/texmf-dist/doc/man/man1/mtx-metatex.man1.pdfbin0 -> 3630 bytes
-rw-r--r--Master/texmf-dist/doc/man/man1/mtx-modules.133
-rw-r--r--Master/texmf-dist/doc/man/man1/mtx-modules.man1.pdfbin0 -> 3760 bytes
-rw-r--r--Master/texmf-dist/doc/man/man1/mtx-package.127
-rw-r--r--Master/texmf-dist/doc/man/man1/mtx-package.man1.pdfbin0 -> 3568 bytes
-rw-r--r--Master/texmf-dist/doc/man/man1/mtx-patterns.139
-rw-r--r--Master/texmf-dist/doc/man/man1/mtx-patterns.man1.pdfbin0 -> 4013 bytes
-rw-r--r--Master/texmf-dist/doc/man/man1/mtx-pdf.133
-rw-r--r--Master/texmf-dist/doc/man/man1/mtx-pdf.man1.pdfbin0 -> 3661 bytes
-rw-r--r--Master/texmf-dist/doc/man/man1/mtx-profile.130
-rw-r--r--Master/texmf-dist/doc/man/man1/mtx-profile.man1.pdf117
-rw-r--r--Master/texmf-dist/doc/man/man1/mtx-rsync.133
-rw-r--r--Master/texmf-dist/doc/man/man1/mtx-rsync.man1.pdfbin0 -> 3635 bytes
-rw-r--r--Master/texmf-dist/doc/man/man1/mtx-scite.127
-rw-r--r--Master/texmf-dist/doc/man/man1/mtx-scite.man1.pdfbin0 -> 3446 bytes
-rw-r--r--Master/texmf-dist/doc/man/man1/mtx-server.142
-rw-r--r--Master/texmf-dist/doc/man/man1/mtx-server.man1.pdf113
-rw-r--r--Master/texmf-dist/doc/man/man1/mtx-texworks.130
-rw-r--r--Master/texmf-dist/doc/man/man1/mtx-texworks.man1.pdfbin0 -> 3557 bytes
-rw-r--r--Master/texmf-dist/doc/man/man1/mtx-timing.133
-rw-r--r--Master/texmf-dist/doc/man/man1/mtx-timing.man1.pdfbin0 -> 3679 bytes
-rw-r--r--Master/texmf-dist/doc/man/man1/mtx-tools.163
-rw-r--r--Master/texmf-dist/doc/man/man1/mtx-tools.man1.pdfbin0 -> 3967 bytes
-rw-r--r--Master/texmf-dist/doc/man/man1/mtx-unzip.133
-rw-r--r--Master/texmf-dist/doc/man/man1/mtx-unzip.man1.pdf111
-rw-r--r--Master/texmf-dist/doc/man/man1/mtx-update.178
-rw-r--r--Master/texmf-dist/doc/man/man1/mtx-update.man1.pdfbin0 -> 5030 bytes
-rw-r--r--Master/texmf-dist/doc/man/man1/mtx-watch.148
-rw-r--r--Master/texmf-dist/doc/man/man1/mtx-watch.man1.pdfbin0 -> 3867 bytes
-rw-r--r--Master/texmf-dist/doc/man/man1/mtxrun.1147
-rw-r--r--Master/texmf-dist/doc/man/man1/mtxrun.man1.pdfbin0 -> 7032 bytes
-rw-r--r--Master/texmf-dist/doc/man/man1/pstopdf.187
-rw-r--r--Master/texmf-dist/doc/man/man1/texexec.1656
-rw-r--r--Master/texmf-dist/doc/man/man1/texmfstart.1443
-rw-r--r--Master/texmf-dist/fonts/map/pdftex/context/koeieletters.map8
-rw-r--r--Master/texmf-dist/fonts/opentype/context/tests/texmfhome.otfbin1884 -> 0 bytes
-rw-r--r--Master/texmf-dist/fonts/pfm/hoekwater/context/contnav.pfmbin0 -> 482 bytes
-rw-r--r--Master/texmf-dist/metapost/context/base/metafun.mpiv7
-rw-r--r--Master/texmf-dist/metapost/context/base/mp-abck.mpiv2
-rw-r--r--Master/texmf-dist/metapost/context/base/mp-apos.mpiv2
-rw-r--r--Master/texmf-dist/metapost/context/base/mp-asnc.mpiv2
-rw-r--r--Master/texmf-dist/metapost/context/base/mp-butt.mpiv2
-rw-r--r--Master/texmf-dist/metapost/context/base/mp-char.mpiv2
-rw-r--r--Master/texmf-dist/metapost/context/base/mp-chem.mpiv2074
-rw-r--r--Master/texmf-dist/metapost/context/base/mp-core.mpiv2
-rw-r--r--Master/texmf-dist/metapost/context/base/mp-crop.mpiv14
-rw-r--r--Master/texmf-dist/metapost/context/base/mp-figs.mpiv2
-rw-r--r--Master/texmf-dist/metapost/context/base/mp-form.mpiv2
-rw-r--r--Master/texmf-dist/metapost/context/base/mp-func.mpiv2
-rw-r--r--Master/texmf-dist/metapost/context/base/mp-grap.mpiv526
-rw-r--r--Master/texmf-dist/metapost/context/base/mp-grid.mpiv2
-rw-r--r--Master/texmf-dist/metapost/context/base/mp-grph.mpiv2
-rw-r--r--Master/texmf-dist/metapost/context/base/mp-mlib.mpiv94
-rw-r--r--Master/texmf-dist/metapost/context/base/mp-page.mpiv246
-rw-r--r--Master/texmf-dist/metapost/context/base/mp-shap.mpiv2
-rw-r--r--Master/texmf-dist/metapost/context/base/mp-step.mpiv2
-rw-r--r--Master/texmf-dist/metapost/context/base/mp-text.mpiv2
-rw-r--r--Master/texmf-dist/metapost/context/base/mp-tool.mpiv197
-rw-r--r--Master/texmf-dist/scripts/context/lua/mtx-babel.lua22
-rw-r--r--Master/texmf-dist/scripts/context/lua/mtx-base.lua54
-rw-r--r--Master/texmf-dist/scripts/context/lua/mtx-cache.lua27
-rw-r--r--Master/texmf-dist/scripts/context/lua/mtx-chars.lua22
-rw-r--r--Master/texmf-dist/scripts/context/lua/mtx-check.lua18
-rw-r--r--Master/texmf-dist/scripts/context/lua/mtx-colors.lua30
-rw-r--r--Master/texmf-dist/scripts/context/lua/mtx-context.lua1726
-rw-r--r--Master/texmf-dist/scripts/context/lua/mtx-context.xml187
-rw-r--r--Master/texmf-dist/scripts/context/lua/mtx-convert.lua34
-rw-r--r--Master/texmf-dist/scripts/context/lua/mtx-epub.lua143
-rw-r--r--Master/texmf-dist/scripts/context/lua/mtx-fcd.lua386
-rw-r--r--Master/texmf-dist/scripts/context/lua/mtx-flac.lua70
-rw-r--r--Master/texmf-dist/scripts/context/lua/mtx-fonts.lua148
-rw-r--r--Master/texmf-dist/scripts/context/lua/mtx-grep.lua35
-rw-r--r--Master/texmf-dist/scripts/context/lua/mtx-interface.lua60
-rw-r--r--Master/texmf-dist/scripts/context/lua/mtx-metapost.lua53
-rw-r--r--Master/texmf-dist/scripts/context/lua/mtx-metatex.lua20
-rw-r--r--Master/texmf-dist/scripts/context/lua/mtx-mk-help.lua473
-rw-r--r--Master/texmf-dist/scripts/context/lua/mtx-modules.lua22
-rw-r--r--Master/texmf-dist/scripts/context/lua/mtx-package.lua40
-rw-r--r--Master/texmf-dist/scripts/context/lua/mtx-patterns.lua57
-rw-r--r--Master/texmf-dist/scripts/context/lua/mtx-pdf.lua162
-rw-r--r--Master/texmf-dist/scripts/context/lua/mtx-profile.lua55
-rw-r--r--Master/texmf-dist/scripts/context/lua/mtx-rsync.lua31
-rw-r--r--Master/texmf-dist/scripts/context/lua/mtx-scite.lua48
-rw-r--r--Master/texmf-dist/scripts/context/lua/mtx-server-ctx-fonttest.lua30
-rw-r--r--Master/texmf-dist/scripts/context/lua/mtx-server-ctx-help.lua217
-rw-r--r--Master/texmf-dist/scripts/context/lua/mtx-server.lua82
-rw-r--r--Master/texmf-dist/scripts/context/lua/mtx-texworks.lua20
-rw-r--r--Master/texmf-dist/scripts/context/lua/mtx-timing.lua25
-rw-r--r--Master/texmf-dist/scripts/context/lua/mtx-tools.lua53
-rw-r--r--Master/texmf-dist/scripts/context/lua/mtx-unzip.lua22
-rw-r--r--Master/texmf-dist/scripts/context/lua/mtx-update.lua87
-rw-r--r--Master/texmf-dist/scripts/context/lua/mtx-watch.lua45
-rw-r--r--Master/texmf-dist/scripts/context/lua/mtxlibs.lua240
-rwxr-xr-xMaster/texmf-dist/scripts/context/lua/mtxrun.lua28110
-rw-r--r--Master/texmf-dist/scripts/context/ruby/base/logger.rb3
-rw-r--r--Master/texmf-dist/scripts/context/ruby/base/tex.rb3
-rw-r--r--Master/texmf-dist/scripts/context/ruby/fcd_start.rb472
-rw-r--r--Master/texmf-dist/scripts/context/ruby/mpstools.rb7
-rw-r--r--Master/texmf-dist/scripts/context/ruby/texexec.rb2
-rw-r--r--Master/texmf-dist/scripts/context/ruby/texsync.rb206
-rwxr-xr-xMaster/texmf-dist/scripts/context/stubs/mswin/ctxtools.exe (renamed from Master/texmf-dist/scripts/context/stubs/mswin/mptopdf.exe)bin6144 -> 6144 bytes
-rwxr-xr-xMaster/texmf-dist/scripts/context/stubs/mswin/mtxrun.dllbin9216 -> 38400 bytes
-rwxr-xr-xMaster/texmf-dist/scripts/context/stubs/mswin/mtxrun.exebin6144 -> 34816 bytes
-rw-r--r--Master/texmf-dist/scripts/context/stubs/mswin/mtxrun.lua28110
-rwxr-xr-xMaster/texmf-dist/scripts/context/stubs/mswin/pstopdf.exebin0 -> 6144 bytes
-rwxr-xr-xMaster/texmf-dist/scripts/context/stubs/mswin/setuptex.bat34
-rw-r--r--Master/texmf-dist/scripts/context/stubs/unix/ctxtools2
-rwxr-xr-xMaster/texmf-dist/scripts/context/stubs/unix/mtxrun28110
-rw-r--r--Master/texmf-dist/scripts/context/stubs/unix/pstopdf2
-rw-r--r--Master/texmf-dist/tex/context/base/anch-bar.mkiv150
-rw-r--r--Master/texmf-dist/tex/context/base/anch-bck.mkvi14
-rw-r--r--Master/texmf-dist/tex/context/base/anch-pgr.lua375
-rw-r--r--Master/texmf-dist/tex/context/base/anch-pgr.mkiv120
-rw-r--r--Master/texmf-dist/tex/context/base/anch-pos.lua76
-rw-r--r--Master/texmf-dist/tex/context/base/anch-pos.mkiv8
-rw-r--r--Master/texmf-dist/tex/context/base/anch-tab.mkiv188
-rw-r--r--Master/texmf-dist/tex/context/base/attr-col.lua88
-rw-r--r--Master/texmf-dist/tex/context/base/attr-eff.lua42
-rw-r--r--Master/texmf-dist/tex/context/base/attr-eff.mkiv43
-rw-r--r--Master/texmf-dist/tex/context/base/attr-ini.lua38
-rw-r--r--Master/texmf-dist/tex/context/base/attr-ini.mkiv17
-rw-r--r--Master/texmf-dist/tex/context/base/attr-lay.lua102
-rw-r--r--Master/texmf-dist/tex/context/base/attr-lay.mkiv82
-rw-r--r--Master/texmf-dist/tex/context/base/attr-mkr.lua26
-rw-r--r--Master/texmf-dist/tex/context/base/attr-mkr.mkiv25
-rw-r--r--Master/texmf-dist/tex/context/base/attr-neg.lua5
-rw-r--r--Master/texmf-dist/tex/context/base/attr-neg.mkiv10
-rw-r--r--Master/texmf-dist/tex/context/base/back-exp.lua391
-rw-r--r--Master/texmf-dist/tex/context/base/back-exp.mkiv26
-rw-r--r--Master/texmf-dist/tex/context/base/back-ini.lua14
-rw-r--r--Master/texmf-dist/tex/context/base/back-ini.mkiv6
-rw-r--r--Master/texmf-dist/tex/context/base/back-pdf.mkiv32
-rw-r--r--Master/texmf-dist/tex/context/base/back-swf.mkiv14
-rw-r--r--Master/texmf-dist/tex/context/base/back-u3d.mkiv7
-rw-r--r--Master/texmf-dist/tex/context/base/bibl-bib.lua25
-rw-r--r--Master/texmf-dist/tex/context/base/bibl-bib.mkiv7
-rw-r--r--Master/texmf-dist/tex/context/base/bibl-tra.lua16
-rw-r--r--Master/texmf-dist/tex/context/base/bibl-tra.mkiv342
-rw-r--r--Master/texmf-dist/tex/context/base/blob-ini.lua2
-rw-r--r--Master/texmf-dist/tex/context/base/buff-ini.lua193
-rw-r--r--Master/texmf-dist/tex/context/base/buff-ini.mkii12
-rw-r--r--Master/texmf-dist/tex/context/base/buff-ini.mkiv47
-rw-r--r--Master/texmf-dist/tex/context/base/buff-par.lua29
-rw-r--r--Master/texmf-dist/tex/context/base/buff-par.mkiv151
-rw-r--r--Master/texmf-dist/tex/context/base/buff-par.mkvi131
-rw-r--r--Master/texmf-dist/tex/context/base/buff-ver.lua43
-rw-r--r--Master/texmf-dist/tex/context/base/buff-ver.mkiv32
-rw-r--r--Master/texmf-dist/tex/context/base/catc-ctx.mkiv11
-rw-r--r--Master/texmf-dist/tex/context/base/catc-def.mkiv7
-rw-r--r--Master/texmf-dist/tex/context/base/catc-ini.lua21
-rw-r--r--Master/texmf-dist/tex/context/base/catc-ini.mkiv64
-rw-r--r--Master/texmf-dist/tex/context/base/char-cjk.lua23
-rw-r--r--Master/texmf-dist/tex/context/base/char-def.lua8860
-rw-r--r--Master/texmf-dist/tex/context/base/char-enc.lua41
-rw-r--r--Master/texmf-dist/tex/context/base/char-ent.lua8
-rw-r--r--Master/texmf-dist/tex/context/base/char-ini.lua1033
-rw-r--r--Master/texmf-dist/tex/context/base/char-ini.mkiv6
-rw-r--r--Master/texmf-dist/tex/context/base/char-map.lua3
-rw-r--r--Master/texmf-dist/tex/context/base/char-tex.lua13
-rw-r--r--Master/texmf-dist/tex/context/base/char-utf.lua260
-rw-r--r--Master/texmf-dist/tex/context/base/chem-ini.lua66
-rw-r--r--Master/texmf-dist/tex/context/base/chem-str.lua957
-rw-r--r--Master/texmf-dist/tex/context/base/chem-str.mkiv354
-rw-r--r--Master/texmf-dist/tex/context/base/cldf-bas.lua56
-rw-r--r--Master/texmf-dist/tex/context/base/cldf-bas.mkiv1
-rw-r--r--Master/texmf-dist/tex/context/base/cldf-com.lua9
-rw-r--r--Master/texmf-dist/tex/context/base/cldf-ini.lua240
-rw-r--r--Master/texmf-dist/tex/context/base/cldf-ini.mkiv14
-rw-r--r--Master/texmf-dist/tex/context/base/cldf-int.lua11
-rw-r--r--Master/texmf-dist/tex/context/base/cldf-prs.lua54
-rw-r--r--Master/texmf-dist/tex/context/base/cldf-ver.lua6
-rw-r--r--Master/texmf-dist/tex/context/base/colo-ext.mkiv10
-rw-r--r--Master/texmf-dist/tex/context/base/colo-icc.lua18
-rw-r--r--Master/texmf-dist/tex/context/base/colo-ini.lua202
-rw-r--r--Master/texmf-dist/tex/context/base/colo-ini.mkiv83
-rw-r--r--Master/texmf-dist/tex/context/base/colo-run.lua6
-rw-r--r--Master/texmf-dist/tex/context/base/colo-run.mkiv28
-rw-r--r--Master/texmf-dist/tex/context/base/cont-log.mkiv10
-rw-r--r--Master/texmf-dist/tex/context/base/cont-new.mkii2
-rw-r--r--Master/texmf-dist/tex/context/base/cont-new.mkiv501
-rw-r--r--Master/texmf-dist/tex/context/base/cont-nop.mkiv22
-rw-r--r--Master/texmf-dist/tex/context/base/cont-yes.mkiv91
-rw-r--r--Master/texmf-dist/tex/context/base/context-base.lmx69
-rw-r--r--Master/texmf-dist/tex/context/base/context-help.lmx6
-rw-r--r--Master/texmf-dist/tex/context/base/context-version.pdfbin4128 -> 4131 bytes
-rw-r--r--Master/texmf-dist/tex/context/base/context-version.pngbin105721 -> 40047 bytes
-rw-r--r--Master/texmf-dist/tex/context/base/context.mkii4
-rw-r--r--Master/texmf-dist/tex/context/base/context.mkiv89
-rw-r--r--Master/texmf-dist/tex/context/base/context.rme7
-rw-r--r--Master/texmf-dist/tex/context/base/context.todo6
-rw-r--r--Master/texmf-dist/tex/context/base/core-con.lua469
-rw-r--r--Master/texmf-dist/tex/context/base/core-con.mkiv188
-rw-r--r--Master/texmf-dist/tex/context/base/core-ctx.ctx23
-rw-r--r--Master/texmf-dist/tex/context/base/core-ctx.lua342
-rw-r--r--Master/texmf-dist/tex/context/base/core-ctx.mkiv17
-rw-r--r--Master/texmf-dist/tex/context/base/core-dat.lua85
-rw-r--r--Master/texmf-dist/tex/context/base/core-def.mkiv122
-rw-r--r--Master/texmf-dist/tex/context/base/core-env.lua32
-rw-r--r--Master/texmf-dist/tex/context/base/core-env.mkiv152
-rw-r--r--Master/texmf-dist/tex/context/base/core-fnt.mkiv158
-rw-r--r--Master/texmf-dist/tex/context/base/core-ini.mkiv228
-rw-r--r--Master/texmf-dist/tex/context/base/core-mis.mkiv738
-rw-r--r--Master/texmf-dist/tex/context/base/core-sys.lua90
-rw-r--r--Master/texmf-dist/tex/context/base/core-sys.mkiv86
-rw-r--r--Master/texmf-dist/tex/context/base/core-two.lua2
-rw-r--r--Master/texmf-dist/tex/context/base/core-uti.lua99
-rw-r--r--Master/texmf-dist/tex/context/base/core-uti.mkiv8
-rw-r--r--Master/texmf-dist/tex/context/base/core-var.mkiv239
-rw-r--r--Master/texmf-dist/tex/context/base/data-aux.lua6
-rw-r--r--Master/texmf-dist/tex/context/base/data-con.lua22
-rw-r--r--Master/texmf-dist/tex/context/base/data-env.lua10
-rw-r--r--Master/texmf-dist/tex/context/base/data-exp.lua20
-rw-r--r--Master/texmf-dist/tex/context/base/data-fil.lua18
-rw-r--r--Master/texmf-dist/tex/context/base/data-ini.lua36
-rw-r--r--Master/texmf-dist/tex/context/base/data-lst.lua13
-rw-r--r--Master/texmf-dist/tex/context/base/data-lua.lua241
-rw-r--r--Master/texmf-dist/tex/context/base/data-met.lua14
-rw-r--r--Master/texmf-dist/tex/context/base/data-pre.lua75
-rw-r--r--Master/texmf-dist/tex/context/base/data-res.lua216
-rw-r--r--Master/texmf-dist/tex/context/base/data-sch.lua80
-rw-r--r--Master/texmf-dist/tex/context/base/data-tex.lua32
-rw-r--r--Master/texmf-dist/tex/context/base/data-tmf.lua10
-rw-r--r--Master/texmf-dist/tex/context/base/data-tmp.lua95
-rw-r--r--Master/texmf-dist/tex/context/base/data-tre.lua6
-rw-r--r--Master/texmf-dist/tex/context/base/data-use.lua10
-rw-r--r--Master/texmf-dist/tex/context/base/data-vir.lua22
-rw-r--r--Master/texmf-dist/tex/context/base/data-zip.lua62
-rw-r--r--Master/texmf-dist/tex/context/base/enco-ini.mkiv115
-rw-r--r--Master/texmf-dist/tex/context/base/file-ini.lua5
-rw-r--r--Master/texmf-dist/tex/context/base/file-ini.mkvi20
-rw-r--r--Master/texmf-dist/tex/context/base/file-job.lua515
-rw-r--r--Master/texmf-dist/tex/context/base/file-job.mkvi26
-rw-r--r--Master/texmf-dist/tex/context/base/file-lib.lua11
-rw-r--r--Master/texmf-dist/tex/context/base/file-mod.lua45
-rw-r--r--Master/texmf-dist/tex/context/base/file-mod.mkvi9
-rw-r--r--Master/texmf-dist/tex/context/base/file-res.lua12
-rw-r--r--Master/texmf-dist/tex/context/base/file-syn.lua7
-rw-r--r--Master/texmf-dist/tex/context/base/font-afk.lua200
-rw-r--r--Master/texmf-dist/tex/context/base/font-afm.lua412
-rw-r--r--Master/texmf-dist/tex/context/base/font-age.lua3
-rw-r--r--Master/texmf-dist/tex/context/base/font-agl.lua2
-rw-r--r--Master/texmf-dist/tex/context/base/font-aux.lua145
-rw-r--r--Master/texmf-dist/tex/context/base/font-aux.mkvi26
-rw-r--r--Master/texmf-dist/tex/context/base/font-chk.lua5
-rw-r--r--Master/texmf-dist/tex/context/base/font-chk.mkiv22
-rw-r--r--Master/texmf-dist/tex/context/base/font-cid.lua25
-rw-r--r--Master/texmf-dist/tex/context/base/font-col.lua128
-rw-r--r--Master/texmf-dist/tex/context/base/font-con.lua130
-rw-r--r--Master/texmf-dist/tex/context/base/font-ctx.lua703
-rw-r--r--Master/texmf-dist/tex/context/base/font-def.lua88
-rw-r--r--Master/texmf-dist/tex/context/base/font-enh.lua52
-rw-r--r--Master/texmf-dist/tex/context/base/font-ext.lua76
-rw-r--r--Master/texmf-dist/tex/context/base/font-fbk.lua69
-rw-r--r--Master/texmf-dist/tex/context/base/font-fea.mkvi190
-rw-r--r--Master/texmf-dist/tex/context/base/font-gds.lua132
-rw-r--r--Master/texmf-dist/tex/context/base/font-gds.mkvi (renamed from Master/texmf-dist/tex/context/base/font-gds.mkiv)4
-rw-r--r--Master/texmf-dist/tex/context/base/font-hsh.lua182
-rw-r--r--Master/texmf-dist/tex/context/base/font-ini.lua24
-rw-r--r--Master/texmf-dist/tex/context/base/font-ini.mkvi62
-rw-r--r--Master/texmf-dist/tex/context/base/font-ldr.lua4
-rw-r--r--Master/texmf-dist/tex/context/base/font-lib.mkvi10
-rw-r--r--Master/texmf-dist/tex/context/base/font-log.lua6
-rw-r--r--Master/texmf-dist/tex/context/base/font-lua.lua2
-rw-r--r--Master/texmf-dist/tex/context/base/font-map.lua55
-rw-r--r--Master/texmf-dist/tex/context/base/font-mat.mkvi15
-rw-r--r--Master/texmf-dist/tex/context/base/font-mis.lua4
-rw-r--r--Master/texmf-dist/tex/context/base/font-nod.lua434
-rw-r--r--Master/texmf-dist/tex/context/base/font-odk.lua904
-rw-r--r--Master/texmf-dist/tex/context/base/font-odv.lua2074
-rw-r--r--Master/texmf-dist/tex/context/base/font-ota.lua289
-rw-r--r--Master/texmf-dist/tex/context/base/font-otb.lua107
-rw-r--r--Master/texmf-dist/tex/context/base/font-otc.lua9
-rw-r--r--Master/texmf-dist/tex/context/base/font-otd.lua157
-rw-r--r--Master/texmf-dist/tex/context/base/font-otf.lua214
-rw-r--r--Master/texmf-dist/tex/context/base/font-oti.lua13
-rw-r--r--Master/texmf-dist/tex/context/base/font-otn.lua733
-rw-r--r--Master/texmf-dist/tex/context/base/font-otp.lua323
-rw-r--r--Master/texmf-dist/tex/context/base/font-ott.lua181
-rw-r--r--Master/texmf-dist/tex/context/base/font-otx.lua393
-rw-r--r--Master/texmf-dist/tex/context/base/font-pat.lua24
-rw-r--r--Master/texmf-dist/tex/context/base/font-pre.mkiv80
-rw-r--r--Master/texmf-dist/tex/context/base/font-run.mkiv47
-rw-r--r--Master/texmf-dist/tex/context/base/font-set.mkvi8
-rw-r--r--Master/texmf-dist/tex/context/base/font-sol.lua884
-rw-r--r--Master/texmf-dist/tex/context/base/font-sol.mkvi (renamed from Master/texmf-dist/tex/context/base/node-spl.mkiv)63
-rw-r--r--Master/texmf-dist/tex/context/base/font-sty.mkvi50
-rw-r--r--Master/texmf-dist/tex/context/base/font-sym.mkvi4
-rw-r--r--Master/texmf-dist/tex/context/base/font-syn.lua592
-rw-r--r--Master/texmf-dist/tex/context/base/font-tfm.lua19
-rw-r--r--Master/texmf-dist/tex/context/base/font-tra.mkiv155
-rw-r--r--Master/texmf-dist/tex/context/base/font-vf.lua16
-rw-r--r--Master/texmf-dist/tex/context/base/grph-epd.lua1
-rw-r--r--Master/texmf-dist/tex/context/base/grph-epd.mkiv14
-rw-r--r--Master/texmf-dist/tex/context/base/grph-fig.mkiv729
-rw-r--r--Master/texmf-dist/tex/context/base/grph-fil.lua17
-rw-r--r--Master/texmf-dist/tex/context/base/grph-inc.lua1052
-rw-r--r--Master/texmf-dist/tex/context/base/grph-inc.mkiv969
-rw-r--r--Master/texmf-dist/tex/context/base/grph-raw.lua5
-rw-r--r--Master/texmf-dist/tex/context/base/grph-raw.mkiv14
-rw-r--r--Master/texmf-dist/tex/context/base/grph-swf.lua35
-rw-r--r--Master/texmf-dist/tex/context/base/grph-trf.mkiv492
-rw-r--r--Master/texmf-dist/tex/context/base/grph-u3d.lua10
-rw-r--r--Master/texmf-dist/tex/context/base/grph-wnd.lua6
-rw-r--r--Master/texmf-dist/tex/context/base/java-imp-fld.mkiv140
-rw-r--r--Master/texmf-dist/tex/context/base/java-ini.lua70
-rw-r--r--Master/texmf-dist/tex/context/base/l-boolean.lua45
-rw-r--r--Master/texmf-dist/tex/context/base/l-dir.lua58
-rw-r--r--Master/texmf-dist/tex/context/base/l-file.lua674
-rw-r--r--Master/texmf-dist/tex/context/base/l-function.lua11
-rw-r--r--Master/texmf-dist/tex/context/base/l-io.lua125
-rw-r--r--Master/texmf-dist/tex/context/base/l-lpeg.lua534
-rw-r--r--Master/texmf-dist/tex/context/base/l-lua.lua393
-rw-r--r--Master/texmf-dist/tex/context/base/l-md5.lua90
-rw-r--r--Master/texmf-dist/tex/context/base/l-number.lua241
-rw-r--r--Master/texmf-dist/tex/context/base/l-os.lua93
-rw-r--r--Master/texmf-dist/tex/context/base/l-pdfview.lua34
-rw-r--r--Master/texmf-dist/tex/context/base/l-string.lua211
-rw-r--r--Master/texmf-dist/tex/context/base/l-table.lua744
-rw-r--r--Master/texmf-dist/tex/context/base/l-unicode.lua1001
-rw-r--r--Master/texmf-dist/tex/context/base/l-url.lua129
-rw-r--r--Master/texmf-dist/tex/context/base/lang-def.lua84
-rw-r--r--Master/texmf-dist/tex/context/base/lang-def.mkiv34
-rw-r--r--Master/texmf-dist/tex/context/base/lang-frd.mkiv143
-rw-r--r--Master/texmf-dist/tex/context/base/lang-frq-de.lua12
-rw-r--r--Master/texmf-dist/tex/context/base/lang-frq-en.lua26
-rw-r--r--Master/texmf-dist/tex/context/base/lang-frq-nl.lua12
-rw-r--r--Master/texmf-dist/tex/context/base/lang-frq.mkiv235
-rw-r--r--Master/texmf-dist/tex/context/base/lang-ini.lua179
-rw-r--r--Master/texmf-dist/tex/context/base/lang-ini.mkiv93
-rw-r--r--Master/texmf-dist/tex/context/base/lang-lab.lua166
-rw-r--r--Master/texmf-dist/tex/context/base/lang-lab.mkiv74
-rw-r--r--Master/texmf-dist/tex/context/base/lang-mis.mkiv31
-rw-r--r--Master/texmf-dist/tex/context/base/lang-txt.lua271
-rw-r--r--Master/texmf-dist/tex/context/base/lang-url.lua10
-rw-r--r--Master/texmf-dist/tex/context/base/lang-wrd.lua115
-rw-r--r--Master/texmf-dist/tex/context/base/lpdf-ano.lua48
-rw-r--r--Master/texmf-dist/tex/context/base/lpdf-col.lua139
-rw-r--r--Master/texmf-dist/tex/context/base/lpdf-epa.lua160
-rw-r--r--Master/texmf-dist/tex/context/base/lpdf-epd.lua52
-rw-r--r--Master/texmf-dist/tex/context/base/lpdf-fld.lua72
-rw-r--r--Master/texmf-dist/tex/context/base/lpdf-fmt.lua88
-rw-r--r--Master/texmf-dist/tex/context/base/lpdf-grp.lua2
-rw-r--r--Master/texmf-dist/tex/context/base/lpdf-ini.lua56
-rw-r--r--Master/texmf-dist/tex/context/base/lpdf-nod.lua71
-rw-r--r--Master/texmf-dist/tex/context/base/lpdf-ren.lua120
-rw-r--r--Master/texmf-dist/tex/context/base/lpdf-swf.lua10
-rw-r--r--Master/texmf-dist/tex/context/base/lpdf-tag.lua16
-rw-r--r--Master/texmf-dist/tex/context/base/lpdf-u3d.lua4
-rw-r--r--Master/texmf-dist/tex/context/base/lpdf-wid.lua10
-rw-r--r--Master/texmf-dist/tex/context/base/lpdf-xmp.lua17
-rw-r--r--Master/texmf-dist/tex/context/base/luat-bas.mkiv34
-rw-r--r--Master/texmf-dist/tex/context/base/luat-bwc.lua2
-rw-r--r--Master/texmf-dist/tex/context/base/luat-cbk.lua37
-rw-r--r--Master/texmf-dist/tex/context/base/luat-cnf.lua91
-rw-r--r--Master/texmf-dist/tex/context/base/luat-cod.lua34
-rw-r--r--Master/texmf-dist/tex/context/base/luat-cod.mkiv24
-rw-r--r--Master/texmf-dist/tex/context/base/luat-env.lua242
-rw-r--r--Master/texmf-dist/tex/context/base/luat-exe.lua16
-rw-r--r--Master/texmf-dist/tex/context/base/luat-fio.lua65
-rw-r--r--Master/texmf-dist/tex/context/base/luat-fmt.lua65
-rw-r--r--Master/texmf-dist/tex/context/base/luat-ini.lua267
-rw-r--r--Master/texmf-dist/tex/context/base/luat-ini.mkiv112
-rw-r--r--Master/texmf-dist/tex/context/base/luat-iop.lua6
-rw-r--r--Master/texmf-dist/tex/context/base/luat-lib.mkiv19
-rw-r--r--Master/texmf-dist/tex/context/base/luat-lua.lua34
-rw-r--r--Master/texmf-dist/tex/context/base/luat-mac.lua120
-rw-r--r--Master/texmf-dist/tex/context/base/luat-run.lua40
-rw-r--r--Master/texmf-dist/tex/context/base/luat-soc.lua16
-rw-r--r--Master/texmf-dist/tex/context/base/luat-sto.lua114
-rw-r--r--Master/texmf-dist/tex/context/base/lxml-aux.lua45
-rw-r--r--Master/texmf-dist/tex/context/base/lxml-css.lua48
-rw-r--r--Master/texmf-dist/tex/context/base/lxml-css.mkiv2
-rw-r--r--Master/texmf-dist/tex/context/base/lxml-ctx.lua2
-rw-r--r--Master/texmf-dist/tex/context/base/lxml-ctx.mkiv3
-rw-r--r--Master/texmf-dist/tex/context/base/lxml-dir.lua41
-rw-r--r--Master/texmf-dist/tex/context/base/lxml-ent.lua5
-rw-r--r--Master/texmf-dist/tex/context/base/lxml-inf.lua2
-rw-r--r--Master/texmf-dist/tex/context/base/lxml-ini.mkiv12
-rw-r--r--Master/texmf-dist/tex/context/base/lxml-lpt.lua189
-rw-r--r--Master/texmf-dist/tex/context/base/lxml-tab.lua121
-rw-r--r--Master/texmf-dist/tex/context/base/lxml-tex.lua190
-rw-r--r--Master/texmf-dist/tex/context/base/m-barcodes.mkiv8
-rw-r--r--Master/texmf-dist/tex/context/base/m-chart.lua179
-rw-r--r--Master/texmf-dist/tex/context/base/m-chart.mkii2
-rw-r--r--Master/texmf-dist/tex/context/base/m-chart.mkvi142
-rw-r--r--Master/texmf-dist/tex/context/base/m-database.lua35
-rw-r--r--Master/texmf-dist/tex/context/base/m-database.mkiv33
-rw-r--r--Master/texmf-dist/tex/context/base/m-graph.mkiv153
-rw-r--r--Master/texmf-dist/tex/context/base/m-ipsum.mkiv198
-rw-r--r--Master/texmf-dist/tex/context/base/m-json.mkiv30
-rw-r--r--Master/texmf-dist/tex/context/base/m-morse.mkvi25
-rw-r--r--Master/texmf-dist/tex/context/base/m-nodechart.mkvi257
-rw-r--r--Master/texmf-dist/tex/context/base/m-oldfun.mkiv (renamed from Master/texmf-dist/tex/context/base/supp-fun.mkiv)44
-rw-r--r--Master/texmf-dist/tex/context/base/m-oldnum.mkiv (renamed from Master/texmf-dist/tex/context/base/supp-num.mkiv)2
-rw-r--r--Master/texmf-dist/tex/context/base/m-pstricks.lua8
-rw-r--r--Master/texmf-dist/tex/context/base/m-spreadsheet.lua332
-rw-r--r--Master/texmf-dist/tex/context/base/m-spreadsheet.mkiv295
-rw-r--r--Master/texmf-dist/tex/context/base/m-timing.mkiv22
-rw-r--r--Master/texmf-dist/tex/context/base/m-translate.mkiv27
-rw-r--r--Master/texmf-dist/tex/context/base/m-visual.mkiv581
-rw-r--r--Master/texmf-dist/tex/context/base/math-act.lua219
-rw-r--r--Master/texmf-dist/tex/context/base/math-ali.mkiv187
-rw-r--r--Master/texmf-dist/tex/context/base/math-arr.mkiv401
-rw-r--r--Master/texmf-dist/tex/context/base/math-def.mkiv22
-rw-r--r--Master/texmf-dist/tex/context/base/math-del.mkiv2
-rw-r--r--Master/texmf-dist/tex/context/base/math-dim.lua6
-rw-r--r--Master/texmf-dist/tex/context/base/math-ext.lua130
-rw-r--r--Master/texmf-dist/tex/context/base/math-fbk.lua312
-rw-r--r--Master/texmf-dist/tex/context/base/math-fen.mkiv108
-rw-r--r--Master/texmf-dist/tex/context/base/math-for.mkiv15
-rw-r--r--Master/texmf-dist/tex/context/base/math-frc.mkiv230
-rw-r--r--Master/texmf-dist/tex/context/base/math-ini.lua404
-rw-r--r--Master/texmf-dist/tex/context/base/math-ini.mkiv867
-rw-r--r--Master/texmf-dist/tex/context/base/math-int.mkiv145
-rw-r--r--Master/texmf-dist/tex/context/base/math-map.lua167
-rw-r--r--Master/texmf-dist/tex/context/base/math-mis.mkiv60
-rw-r--r--Master/texmf-dist/tex/context/base/math-noa.lua748
-rw-r--r--Master/texmf-dist/tex/context/base/math-stc.mkvi780
-rw-r--r--Master/texmf-dist/tex/context/base/math-tag.lua75
-rw-r--r--Master/texmf-dist/tex/context/base/math-ttv.lua801
-rw-r--r--Master/texmf-dist/tex/context/base/math-vfu.lua1082
-rw-r--r--Master/texmf-dist/tex/context/base/meta-fig.mkiv3
-rw-r--r--Master/texmf-dist/tex/context/base/meta-fun.lua4
-rw-r--r--Master/texmf-dist/tex/context/base/meta-grd.mkiv116
-rw-r--r--Master/texmf-dist/tex/context/base/meta-imp-dum.mkiv124
-rw-r--r--Master/texmf-dist/tex/context/base/meta-imp-txt.mkiv30
-rw-r--r--Master/texmf-dist/tex/context/base/meta-ini.lua127
-rw-r--r--Master/texmf-dist/tex/context/base/meta-ini.mkiv429
-rw-r--r--Master/texmf-dist/tex/context/base/meta-pag.mkiv12
-rw-r--r--Master/texmf-dist/tex/context/base/meta-pdf.lua15
-rw-r--r--Master/texmf-dist/tex/context/base/meta-pdh.mkiv2
-rw-r--r--Master/texmf-dist/tex/context/base/meta-tex.lua2
-rw-r--r--Master/texmf-dist/tex/context/base/mlib-ctx.lua99
-rw-r--r--Master/texmf-dist/tex/context/base/mlib-pdf.lua138
-rw-r--r--Master/texmf-dist/tex/context/base/mlib-pdf.mkiv45
-rw-r--r--Master/texmf-dist/tex/context/base/mlib-pps.lua342
-rw-r--r--Master/texmf-dist/tex/context/base/mlib-pps.mkiv24
-rw-r--r--Master/texmf-dist/tex/context/base/mlib-run.lua367
-rw-r--r--Master/texmf-dist/tex/context/base/mtx-context-copy.tex151
-rw-r--r--Master/texmf-dist/tex/context/base/mtx-context-select.tex3
-rw-r--r--Master/texmf-dist/tex/context/base/mtx-context-timing.tex8
-rw-r--r--Master/texmf-dist/tex/context/base/mult-aux.lua34
-rw-r--r--Master/texmf-dist/tex/context/base/mult-aux.mkiv231
-rw-r--r--Master/texmf-dist/tex/context/base/mult-chk.lua7
-rw-r--r--Master/texmf-dist/tex/context/base/mult-chk.mkiv14
-rw-r--r--Master/texmf-dist/tex/context/base/mult-de.mkii16
-rw-r--r--Master/texmf-dist/tex/context/base/mult-def.lua76
-rw-r--r--Master/texmf-dist/tex/context/base/mult-def.mkiv86
-rw-r--r--Master/texmf-dist/tex/context/base/mult-dim.mkvi3
-rw-r--r--Master/texmf-dist/tex/context/base/mult-en.mkii16
-rw-r--r--Master/texmf-dist/tex/context/base/mult-fr.mkii16
-rw-r--r--Master/texmf-dist/tex/context/base/mult-fun.lua101
-rw-r--r--Master/texmf-dist/tex/context/base/mult-ini.lua134
-rw-r--r--Master/texmf-dist/tex/context/base/mult-ini.mkiv78
-rw-r--r--Master/texmf-dist/tex/context/base/mult-it.mkii16
-rw-r--r--Master/texmf-dist/tex/context/base/mult-low.lua122
-rw-r--r--Master/texmf-dist/tex/context/base/mult-mes.lua1895
-rw-r--r--Master/texmf-dist/tex/context/base/mult-mps.lua5
-rw-r--r--Master/texmf-dist/tex/context/base/mult-nl.mkii16
-rw-r--r--Master/texmf-dist/tex/context/base/mult-pe.mkii16
-rw-r--r--Master/texmf-dist/tex/context/base/mult-prm.lua14
-rw-r--r--Master/texmf-dist/tex/context/base/mult-prm.mkiv18
-rw-r--r--Master/texmf-dist/tex/context/base/mult-ro.mkii16
-rw-r--r--Master/texmf-dist/tex/context/base/mult-sys.mkiv574
-rw-r--r--Master/texmf-dist/tex/context/base/node-acc.lua14
-rw-r--r--Master/texmf-dist/tex/context/base/node-aux.lua110
-rw-r--r--Master/texmf-dist/tex/context/base/node-bck.lua189
-rw-r--r--Master/texmf-dist/tex/context/base/node-bck.mkiv26
-rw-r--r--Master/texmf-dist/tex/context/base/node-dir.lua2
-rw-r--r--Master/texmf-dist/tex/context/base/node-ext.lua4
-rw-r--r--Master/texmf-dist/tex/context/base/node-fin.lua1198
-rw-r--r--Master/texmf-dist/tex/context/base/node-fnt.lua29
-rw-r--r--Master/texmf-dist/tex/context/base/node-ini.lua123
-rw-r--r--Master/texmf-dist/tex/context/base/node-ini.mkiv4
-rw-r--r--Master/texmf-dist/tex/context/base/node-inj.lua128
-rw-r--r--Master/texmf-dist/tex/context/base/node-mig.lua12
-rw-r--r--Master/texmf-dist/tex/context/base/node-par.lua118
-rw-r--r--Master/texmf-dist/tex/context/base/node-par.mkiv82
-rw-r--r--Master/texmf-dist/tex/context/base/node-pro.lua52
-rw-r--r--Master/texmf-dist/tex/context/base/node-ref.lua153
-rw-r--r--Master/texmf-dist/tex/context/base/node-res.lua35
-rw-r--r--Master/texmf-dist/tex/context/base/node-rul.lua158
-rw-r--r--Master/texmf-dist/tex/context/base/node-rul.mkiv40
-rw-r--r--Master/texmf-dist/tex/context/base/node-ser.lua38
-rw-r--r--Master/texmf-dist/tex/context/base/node-shp.lua106
-rw-r--r--Master/texmf-dist/tex/context/base/node-snp.lua66
-rw-r--r--Master/texmf-dist/tex/context/base/node-spl.lua619
-rw-r--r--Master/texmf-dist/tex/context/base/node-tra.lua716
-rw-r--r--Master/texmf-dist/tex/context/base/node-tsk.lua28
-rw-r--r--Master/texmf-dist/tex/context/base/node-typ.lua11
-rw-r--r--Master/texmf-dist/tex/context/base/norm-ctx.mkiv75
-rw-r--r--Master/texmf-dist/tex/context/base/pack-bar.mkiv97
-rw-r--r--Master/texmf-dist/tex/context/base/pack-bck.mkvi5
-rw-r--r--Master/texmf-dist/tex/context/base/pack-box.mkiv883
-rw-r--r--Master/texmf-dist/tex/context/base/pack-com.mkiv53
-rw-r--r--Master/texmf-dist/tex/context/base/pack-cut.mkiv163
-rw-r--r--Master/texmf-dist/tex/context/base/pack-lyr.mkiv2
-rw-r--r--Master/texmf-dist/tex/context/base/pack-mrl.mkiv922
-rw-r--r--Master/texmf-dist/tex/context/base/pack-obj.lua25
-rw-r--r--Master/texmf-dist/tex/context/base/pack-obj.mkiv143
-rw-r--r--Master/texmf-dist/tex/context/base/pack-pos.mkiv190
-rw-r--r--Master/texmf-dist/tex/context/base/pack-rul.mkiv1081
-rw-r--r--Master/texmf-dist/tex/context/base/page-app.mkiv22
-rw-r--r--Master/texmf-dist/tex/context/base/page-bck.mkiv396
-rw-r--r--Master/texmf-dist/tex/context/base/page-box.mkvi8
-rw-r--r--Master/texmf-dist/tex/context/base/page-brk.mkiv14
-rw-r--r--Master/texmf-dist/tex/context/base/page-col.mkiv245
-rw-r--r--Master/texmf-dist/tex/context/base/page-com.mkiv176
-rw-r--r--Master/texmf-dist/tex/context/base/page-fac.mkiv48
-rw-r--r--Master/texmf-dist/tex/context/base/page-flt.lua24
-rw-r--r--Master/texmf-dist/tex/context/base/page-flt.mkiv214
-rw-r--r--Master/texmf-dist/tex/context/base/page-flw.mkiv186
-rw-r--r--Master/texmf-dist/tex/context/base/page-grd.mkiv6
-rw-r--r--Master/texmf-dist/tex/context/base/page-imp.mkiv110
-rw-r--r--Master/texmf-dist/tex/context/base/page-inf.mkiv6
-rw-r--r--Master/texmf-dist/tex/context/base/page-ini.mkiv46
-rw-r--r--Master/texmf-dist/tex/context/base/page-inj.lua101
-rw-r--r--Master/texmf-dist/tex/context/base/page-inj.mkvi217
-rw-r--r--Master/texmf-dist/tex/context/base/page-ins.lua97
-rw-r--r--Master/texmf-dist/tex/context/base/page-ins.mkiv197
-rw-r--r--Master/texmf-dist/tex/context/base/page-lay.mkiv79
-rw-r--r--Master/texmf-dist/tex/context/base/page-lin.lua32
-rw-r--r--Master/texmf-dist/tex/context/base/page-lin.mkiv597
-rw-r--r--Master/texmf-dist/tex/context/base/page-mak.mkvi40
-rw-r--r--Master/texmf-dist/tex/context/base/page-mbk.mkvi4
-rw-r--r--Master/texmf-dist/tex/context/base/page-mix.lua660
-rw-r--r--Master/texmf-dist/tex/context/base/page-mix.mkiv801
-rw-r--r--Master/texmf-dist/tex/context/base/page-mul.mkiv2259
-rw-r--r--Master/texmf-dist/tex/context/base/page-not.mkiv20
-rw-r--r--Master/texmf-dist/tex/context/base/page-one.mkiv259
-rw-r--r--Master/texmf-dist/tex/context/base/page-otr.mkvi4
-rw-r--r--Master/texmf-dist/tex/context/base/page-par.mkiv88
-rw-r--r--Master/texmf-dist/tex/context/base/page-plg.mkiv110
-rw-r--r--Master/texmf-dist/tex/context/base/page-pst.lua (renamed from Master/texmf-dist/tex/context/base/page-mis.lua)7
-rw-r--r--Master/texmf-dist/tex/context/base/page-pst.mkiv (renamed from Master/texmf-dist/tex/context/base/page-mis.mkiv)31
-rw-r--r--Master/texmf-dist/tex/context/base/page-run.mkiv8
-rw-r--r--Master/texmf-dist/tex/context/base/page-sel.mkiv347
-rw-r--r--Master/texmf-dist/tex/context/base/page-sel.mkvi369
-rw-r--r--Master/texmf-dist/tex/context/base/page-set.mkiv616
-rw-r--r--Master/texmf-dist/tex/context/base/page-sid.mkiv60
-rw-r--r--Master/texmf-dist/tex/context/base/page-spr.mkiv90
-rw-r--r--Master/texmf-dist/tex/context/base/page-str.lua18
-rw-r--r--Master/texmf-dist/tex/context/base/page-str.mkiv39
-rw-r--r--Master/texmf-dist/tex/context/base/page-txt.mkvi132
-rw-r--r--Master/texmf-dist/tex/context/base/phys-dim.lua92
-rw-r--r--Master/texmf-dist/tex/context/base/phys-dim.mkiv33
-rw-r--r--Master/texmf-dist/tex/context/base/ppchtex.mkii3
-rw-r--r--Master/texmf-dist/tex/context/base/ppchtex.mkiv18
-rw-r--r--Master/texmf-dist/tex/context/base/prop-ini.mkiv2
-rw-r--r--Master/texmf-dist/tex/context/base/regi-ini.lua181
-rw-r--r--Master/texmf-dist/tex/context/base/s-abr-01.tex561
-rw-r--r--Master/texmf-dist/tex/context/base/s-abr-04.tex3
-rw-r--r--Master/texmf-dist/tex/context/base/s-art-01.mkiv10
-rw-r--r--Master/texmf-dist/tex/context/base/s-def-01.mkiv2
-rw-r--r--Master/texmf-dist/tex/context/base/s-fnt-10.mkiv2
-rw-r--r--Master/texmf-dist/tex/context/base/s-fnt-20.mkiv29
-rw-r--r--Master/texmf-dist/tex/context/base/s-fnt-21.mkiv20
-rw-r--r--Master/texmf-dist/tex/context/base/s-fnt-28.mkiv2
-rw-r--r--Master/texmf-dist/tex/context/base/s-fnt-29.mkiv2
-rw-r--r--Master/texmf-dist/tex/context/base/s-fnt-31.mkiv2
-rw-r--r--Master/texmf-dist/tex/context/base/s-fnt-32.mkiv2
-rw-r--r--Master/texmf-dist/tex/context/base/s-fonts-tables.lua2
-rw-r--r--Master/texmf-dist/tex/context/base/s-inf-01.mkvi46
-rw-r--r--Master/texmf-dist/tex/context/base/s-inf-03.mkiv144
-rw-r--r--Master/texmf-dist/tex/context/base/s-lan-04.mkiv2
-rw-r--r--Master/texmf-dist/tex/context/base/s-lan-06.mkiv53
-rw-r--r--Master/texmf-dist/tex/context/base/s-mat-10.mkiv29
-rw-r--r--Master/texmf-dist/tex/context/base/s-mat-11.mkiv22
-rw-r--r--Master/texmf-dist/tex/context/base/s-mat-20.mkiv161
-rw-r--r--Master/texmf-dist/tex/context/base/s-mod-00.mkiv8
-rw-r--r--Master/texmf-dist/tex/context/base/s-mod-01.mkiv3
-rw-r--r--Master/texmf-dist/tex/context/base/s-mod-02.mkiv8
-rw-r--r--Master/texmf-dist/tex/context/base/s-mod.ctx1
-rw-r--r--Master/texmf-dist/tex/context/base/s-pre-05.tex14
-rw-r--r--Master/texmf-dist/tex/context/base/s-pre-60.mkiv134
-rw-r--r--Master/texmf-dist/tex/context/base/s-pre-61.tex3
-rw-r--r--Master/texmf-dist/tex/context/base/s-pre-62.tex2
-rw-r--r--Master/texmf-dist/tex/context/base/s-pre-63.tex1
-rw-r--r--Master/texmf-dist/tex/context/base/s-pre-67.tex2
-rw-r--r--Master/texmf-dist/tex/context/base/s-pre-69.mkiv2
-rw-r--r--Master/texmf-dist/tex/context/base/scrn-bar.mkvi6
-rw-r--r--Master/texmf-dist/tex/context/base/scrn-but.lua4
-rw-r--r--Master/texmf-dist/tex/context/base/scrn-but.mkvi36
-rw-r--r--Master/texmf-dist/tex/context/base/scrn-fld.lua6
-rw-r--r--Master/texmf-dist/tex/context/base/scrn-fld.mkvi12
-rw-r--r--Master/texmf-dist/tex/context/base/scrn-hlp.lua3
-rw-r--r--Master/texmf-dist/tex/context/base/scrn-ini.lua4
-rw-r--r--Master/texmf-dist/tex/context/base/scrn-ini.mkvi33
-rw-r--r--Master/texmf-dist/tex/context/base/scrn-pag.mkvi6
-rw-r--r--Master/texmf-dist/tex/context/base/scrn-ref.lua2
-rw-r--r--Master/texmf-dist/tex/context/base/scrn-wid.lua18
-rw-r--r--Master/texmf-dist/tex/context/base/scrn-wid.mkvi135
-rw-r--r--Master/texmf-dist/tex/context/base/scrp-cjk.lua28
-rw-r--r--Master/texmf-dist/tex/context/base/scrp-eth.lua5
-rw-r--r--Master/texmf-dist/tex/context/base/scrp-ini.lua448
-rw-r--r--Master/texmf-dist/tex/context/base/scrp-ini.mkiv16
-rw-r--r--Master/texmf-dist/tex/context/base/sort-ini.lua222
-rw-r--r--Master/texmf-dist/tex/context/base/sort-lan.lua20
-rw-r--r--Master/texmf-dist/tex/context/base/spac-adj.lua17
-rw-r--r--Master/texmf-dist/tex/context/base/spac-ali.lua20
-rw-r--r--Master/texmf-dist/tex/context/base/spac-ali.mkiv203
-rw-r--r--Master/texmf-dist/tex/context/base/spac-cha.mkiv191
-rw-r--r--Master/texmf-dist/tex/context/base/spac-chr.lua30
-rw-r--r--Master/texmf-dist/tex/context/base/spac-chr.mkiv38
-rw-r--r--Master/texmf-dist/tex/context/base/spac-def.mkiv116
-rw-r--r--Master/texmf-dist/tex/context/base/spac-flr.mkiv112
-rw-r--r--Master/texmf-dist/tex/context/base/spac-grd.mkiv61
-rw-r--r--Master/texmf-dist/tex/context/base/spac-hor.mkiv182
-rw-r--r--Master/texmf-dist/tex/context/base/spac-par.mkiv7
-rw-r--r--Master/texmf-dist/tex/context/base/spac-ver.lua802
-rw-r--r--Master/texmf-dist/tex/context/base/spac-ver.mkiv414
-rw-r--r--Master/texmf-dist/tex/context/base/status-files.pdfbin24413 -> 24764 bytes
-rw-r--r--Master/texmf-dist/tex/context/base/status-lua.pdfbin180149 -> 211760 bytes
-rw-r--r--Master/texmf-dist/tex/context/base/status-mkiv.lua7488
-rw-r--r--Master/texmf-dist/tex/context/base/status-mkiv.tex317
-rw-r--r--Master/texmf-dist/tex/context/base/strc-bkm.lua4
-rw-r--r--Master/texmf-dist/tex/context/base/strc-bkm.mkiv150
-rw-r--r--Master/texmf-dist/tex/context/base/strc-blk.lua7
-rw-r--r--Master/texmf-dist/tex/context/base/strc-blk.mkiv96
-rw-r--r--Master/texmf-dist/tex/context/base/strc-con.mkvi244
-rw-r--r--Master/texmf-dist/tex/context/base/strc-def.mkiv59
-rw-r--r--Master/texmf-dist/tex/context/base/strc-des.mkii2
-rw-r--r--Master/texmf-dist/tex/context/base/strc-des.mkvi52
-rw-r--r--Master/texmf-dist/tex/context/base/strc-doc.lua185
-rw-r--r--Master/texmf-dist/tex/context/base/strc-doc.mkiv234
-rw-r--r--Master/texmf-dist/tex/context/base/strc-enu.mkvi127
-rw-r--r--Master/texmf-dist/tex/context/base/strc-flt.mkvi1099
-rw-r--r--Master/texmf-dist/tex/context/base/strc-ind.mkiv12
-rw-r--r--Master/texmf-dist/tex/context/base/strc-ini.lua67
-rw-r--r--Master/texmf-dist/tex/context/base/strc-itm.mkvi128
-rw-r--r--Master/texmf-dist/tex/context/base/strc-lab.mkiv232
-rw-r--r--Master/texmf-dist/tex/context/base/strc-lev.lua7
-rw-r--r--Master/texmf-dist/tex/context/base/strc-lnt.mkvi133
-rw-r--r--Master/texmf-dist/tex/context/base/strc-lst.lua156
-rw-r--r--Master/texmf-dist/tex/context/base/strc-lst.mkvi42
-rw-r--r--Master/texmf-dist/tex/context/base/strc-mar.lua92
-rw-r--r--Master/texmf-dist/tex/context/base/strc-mar.mkiv8
-rw-r--r--Master/texmf-dist/tex/context/base/strc-mat.mkii10
-rw-r--r--Master/texmf-dist/tex/context/base/strc-mat.mkiv498
-rw-r--r--Master/texmf-dist/tex/context/base/strc-not.lua212
-rw-r--r--Master/texmf-dist/tex/context/base/strc-not.mkvi646
-rw-r--r--Master/texmf-dist/tex/context/base/strc-num.lua307
-rw-r--r--Master/texmf-dist/tex/context/base/strc-num.mkiv496
-rw-r--r--Master/texmf-dist/tex/context/base/strc-pag.lua18
-rw-r--r--Master/texmf-dist/tex/context/base/strc-pag.mkiv219
-rw-r--r--Master/texmf-dist/tex/context/base/strc-ref.lua224
-rw-r--r--Master/texmf-dist/tex/context/base/strc-ref.mkvi378
-rw-r--r--Master/texmf-dist/tex/context/base/strc-reg.lua20
-rw-r--r--Master/texmf-dist/tex/context/base/strc-reg.mkiv460
-rw-r--r--Master/texmf-dist/tex/context/base/strc-ren.mkiv868
-rw-r--r--Master/texmf-dist/tex/context/base/strc-sbe.mkiv102
-rw-r--r--Master/texmf-dist/tex/context/base/strc-sec.mkiv833
-rw-r--r--Master/texmf-dist/tex/context/base/strc-syn.lua2
-rw-r--r--Master/texmf-dist/tex/context/base/strc-syn.mkiv4
-rw-r--r--Master/texmf-dist/tex/context/base/strc-tag.lua1
-rw-r--r--Master/texmf-dist/tex/context/base/strc-tag.mkiv84
-rw-r--r--Master/texmf-dist/tex/context/base/supp-ali.mkiv173
-rw-r--r--Master/texmf-dist/tex/context/base/supp-box.lua12
-rw-r--r--Master/texmf-dist/tex/context/base/supp-box.mkiv100
-rw-r--r--Master/texmf-dist/tex/context/base/supp-mat.mkiv143
-rw-r--r--Master/texmf-dist/tex/context/base/supp-ran.lua28
-rw-r--r--Master/texmf-dist/tex/context/base/supp-ran.mkiv4
-rw-r--r--Master/texmf-dist/tex/context/base/supp-vis.mkiv185
-rw-r--r--Master/texmf-dist/tex/context/base/symb-imp-cc.mkiv52
-rw-r--r--Master/texmf-dist/tex/context/base/symb-ini.lua10
-rw-r--r--Master/texmf-dist/tex/context/base/symb-run.mkiv8
-rw-r--r--Master/texmf-dist/tex/context/base/syst-aux.lua34
-rw-r--r--Master/texmf-dist/tex/context/base/syst-aux.mkiv3670
-rw-r--r--Master/texmf-dist/tex/context/base/syst-con.lua23
-rw-r--r--Master/texmf-dist/tex/context/base/syst-con.mkiv10
-rw-r--r--Master/texmf-dist/tex/context/base/syst-fnt.mkiv2
-rw-r--r--Master/texmf-dist/tex/context/base/syst-gen.mkii5
-rw-r--r--Master/texmf-dist/tex/context/base/syst-ini.mkiv340
-rw-r--r--Master/texmf-dist/tex/context/base/syst-lua.lua52
-rw-r--r--Master/texmf-dist/tex/context/base/syst-lua.mkiv20
-rw-r--r--Master/texmf-dist/tex/context/base/tabl-ltb.mkiv8
-rw-r--r--Master/texmf-dist/tex/context/base/tabl-mis.mkiv288
-rw-r--r--Master/texmf-dist/tex/context/base/tabl-ntb.mkiv1914
-rw-r--r--Master/texmf-dist/tex/context/base/tabl-nte.mkiv67
-rw-r--r--Master/texmf-dist/tex/context/base/tabl-pln.mkiv155
-rw-r--r--Master/texmf-dist/tex/context/base/tabl-tab.mkiv240
-rw-r--r--Master/texmf-dist/tex/context/base/tabl-tbl.lua7
-rw-r--r--Master/texmf-dist/tex/context/base/tabl-tbl.mkiv385
-rw-r--r--Master/texmf-dist/tex/context/base/tabl-tsp.mkiv470
-rw-r--r--Master/texmf-dist/tex/context/base/tabl-xnt.mkvi2
-rw-r--r--Master/texmf-dist/tex/context/base/tabl-xtb.lua24
-rw-r--r--Master/texmf-dist/tex/context/base/tabl-xtb.mkvi108
-rw-r--r--Master/texmf-dist/tex/context/base/task-ini.lua35
-rw-r--r--Master/texmf-dist/tex/context/base/toks-ini.lua7
-rw-r--r--Master/texmf-dist/tex/context/base/trac-ctx.lua48
-rw-r--r--Master/texmf-dist/tex/context/base/trac-ctx.mkiv60
-rw-r--r--Master/texmf-dist/tex/context/base/trac-deb.lua44
-rw-r--r--Master/texmf-dist/tex/context/base/trac-deb.mkiv6
-rw-r--r--Master/texmf-dist/tex/context/base/trac-exp.lua229
-rw-r--r--Master/texmf-dist/tex/context/base/trac-fil.lua214
-rw-r--r--Master/texmf-dist/tex/context/base/trac-inf.lua86
-rw-r--r--Master/texmf-dist/tex/context/base/trac-jus.lua136
-rw-r--r--Master/texmf-dist/tex/context/base/trac-jus.mkiv25
-rw-r--r--Master/texmf-dist/tex/context/base/trac-lmx.lua725
-rw-r--r--Master/texmf-dist/tex/context/base/trac-log.lua340
-rw-r--r--Master/texmf-dist/tex/context/base/trac-pro.lua8
-rw-r--r--Master/texmf-dist/tex/context/base/trac-set.lua205
-rw-r--r--Master/texmf-dist/tex/context/base/trac-tex.lua3
-rw-r--r--Master/texmf-dist/tex/context/base/trac-tim.lua66
-rw-r--r--Master/texmf-dist/tex/context/base/trac-vis.lua913
-rw-r--r--Master/texmf-dist/tex/context/base/trac-vis.mkiv886
-rw-r--r--Master/texmf-dist/tex/context/base/trac-xml.lua183
-rw-r--r--Master/texmf-dist/tex/context/base/type-imp-husayni.mkiv290
-rw-r--r--Master/texmf-dist/tex/context/base/type-imp-latinmodern.mkiv21
-rw-r--r--Master/texmf-dist/tex/context/base/type-imp-lucida-opentype.mkiv4
-rw-r--r--Master/texmf-dist/tex/context/base/type-imp-texgyre.mkiv64
-rw-r--r--Master/texmf-dist/tex/context/base/type-imp-xits.mkiv14
-rw-r--r--Master/texmf-dist/tex/context/base/type-ini.lua23
-rw-r--r--Master/texmf-dist/tex/context/base/type-ini.mkvi20
-rw-r--r--Master/texmf-dist/tex/context/base/typo-bld.lua185
-rw-r--r--Master/texmf-dist/tex/context/base/typo-bld.mkiv64
-rw-r--r--Master/texmf-dist/tex/context/base/typo-brk.lua24
-rw-r--r--Master/texmf-dist/tex/context/base/typo-brk.mkiv18
-rw-r--r--Master/texmf-dist/tex/context/base/typo-cap.lua22
-rw-r--r--Master/texmf-dist/tex/context/base/typo-cap.mkiv44
-rw-r--r--Master/texmf-dist/tex/context/base/typo-cln.lua5
-rw-r--r--Master/texmf-dist/tex/context/base/typo-del.mkiv58
-rw-r--r--Master/texmf-dist/tex/context/base/typo-dig.lua67
-rw-r--r--Master/texmf-dist/tex/context/base/typo-dir.lua26
-rw-r--r--Master/texmf-dist/tex/context/base/typo-dir.mkiv10
-rw-r--r--Master/texmf-dist/tex/context/base/typo-itc.lua25
-rw-r--r--Master/texmf-dist/tex/context/base/typo-itc.mkvi2
-rw-r--r--Master/texmf-dist/tex/context/base/typo-itm.mkiv273
-rw-r--r--Master/texmf-dist/tex/context/base/typo-krn.lua67
-rw-r--r--Master/texmf-dist/tex/context/base/typo-krn.mkiv57
-rw-r--r--Master/texmf-dist/tex/context/base/typo-lan.lua72
-rw-r--r--Master/texmf-dist/tex/context/base/typo-lan.mkiv63
-rw-r--r--Master/texmf-dist/tex/context/base/typo-mar.lua143
-rw-r--r--Master/texmf-dist/tex/context/base/typo-mar.mkiv7
-rw-r--r--Master/texmf-dist/tex/context/base/typo-pag.lua24
-rw-r--r--Master/texmf-dist/tex/context/base/typo-par.lua11
-rw-r--r--Master/texmf-dist/tex/context/base/typo-prc.lua19
-rw-r--r--Master/texmf-dist/tex/context/base/typo-rep.lua9
-rw-r--r--Master/texmf-dist/tex/context/base/typo-scr.mkiv250
-rw-r--r--Master/texmf-dist/tex/context/base/typo-spa.lua42
-rw-r--r--Master/texmf-dist/tex/context/base/typo-spa.mkiv19
-rw-r--r--Master/texmf-dist/tex/context/base/typo-txt.mkvi174
-rw-r--r--Master/texmf-dist/tex/context/base/unic-ini.mkiv2
-rw-r--r--Master/texmf-dist/tex/context/base/util-deb.lua30
-rw-r--r--Master/texmf-dist/tex/context/base/util-dim.lua20
-rw-r--r--Master/texmf-dist/tex/context/base/util-env.lua258
-rw-r--r--Master/texmf-dist/tex/context/base/util-fmt.lua22
-rw-r--r--Master/texmf-dist/tex/context/base/util-jsn.lua145
-rw-r--r--Master/texmf-dist/tex/context/base/util-lib.lua233
-rw-r--r--Master/texmf-dist/tex/context/base/util-lua.lua372
-rw-r--r--Master/texmf-dist/tex/context/base/util-mrg.lua137
-rw-r--r--Master/texmf-dist/tex/context/base/util-pck.lua32
-rw-r--r--Master/texmf-dist/tex/context/base/util-prs.lua387
-rw-r--r--Master/texmf-dist/tex/context/base/util-ran.lua107
-rw-r--r--Master/texmf-dist/tex/context/base/util-seq.lua15
-rw-r--r--Master/texmf-dist/tex/context/base/util-sql-imp-client.lua256
-rw-r--r--Master/texmf-dist/tex/context/base/util-sql-imp-library.lua289
-rw-r--r--Master/texmf-dist/tex/context/base/util-sql-imp-swiglib.lua490
-rw-r--r--Master/texmf-dist/tex/context/base/util-sql-loggers.lua277
-rw-r--r--Master/texmf-dist/tex/context/base/util-sql-sessions.lua349
-rw-r--r--Master/texmf-dist/tex/context/base/util-sql-tickets.lua772
-rw-r--r--Master/texmf-dist/tex/context/base/util-sql-users.lua410
-rw-r--r--Master/texmf-dist/tex/context/base/util-sql.lua443
-rw-r--r--Master/texmf-dist/tex/context/base/util-sta.lua342
-rw-r--r--Master/texmf-dist/tex/context/base/util-sto.lua81
-rw-r--r--Master/texmf-dist/tex/context/base/util-str.lua708
-rw-r--r--Master/texmf-dist/tex/context/base/util-tab.lua383
-rw-r--r--Master/texmf-dist/tex/context/base/util-tpl.lua174
-rw-r--r--Master/texmf-dist/tex/context/base/x-asciimath.mkiv4
-rw-r--r--Master/texmf-dist/tex/context/base/x-calcmath.lua85
-rw-r--r--Master/texmf-dist/tex/context/base/x-calcmath.mkiv2
-rw-r--r--Master/texmf-dist/tex/context/base/x-chemml.lua6
-rw-r--r--Master/texmf-dist/tex/context/base/x-chemml.mkiv24
-rw-r--r--Master/texmf-dist/tex/context/base/x-dir-05.mkiv4
-rw-r--r--Master/texmf-dist/tex/context/base/x-ldx.ctx4
-rw-r--r--Master/texmf-dist/tex/context/base/x-ldx.lua10
-rw-r--r--Master/texmf-dist/tex/context/base/x-ldx.mkiv11
-rw-r--r--Master/texmf-dist/tex/context/base/x-mathml.lua54
-rw-r--r--Master/texmf-dist/tex/context/base/x-mathml.mkiv184
-rw-r--r--Master/texmf-dist/tex/context/base/x-res-01.mkiv71
-rw-r--r--Master/texmf-dist/tex/context/base/x-steps.mkiv102
-rw-r--r--Master/texmf-dist/tex/context/base/x-udhr.mkiv2
-rw-r--r--Master/texmf-dist/tex/context/base/x-xfdf.mkiv72
-rw-r--r--Master/texmf-dist/tex/context/base/xtag-ini.mkii50
-rw-r--r--Master/texmf-dist/tex/context/base/xtag-pre.mkii13
-rw-r--r--Master/texmf-dist/tex/context/colors/icc/context/colorprofiles.lua (renamed from Master/texmf-dist/tex/context/colors/icc/colorprofiles.lua)0
-rw-r--r--Master/texmf-dist/tex/context/colors/icc/context/colorprofiles.xml (renamed from Master/texmf-dist/tex/context/colors/icc/colorprofiles.xml)0
-rw-r--r--Master/texmf-dist/tex/context/config/cont-de.ini2
-rw-r--r--Master/texmf-dist/tex/context/config/cont-en.ini2
-rw-r--r--Master/texmf-dist/tex/context/config/cont-fr.ini2
-rw-r--r--Master/texmf-dist/tex/context/config/cont-it.ini2
-rw-r--r--Master/texmf-dist/tex/context/config/cont-nl.ini2
-rw-r--r--Master/texmf-dist/tex/context/config/cont-ro.ini2
-rw-r--r--Master/texmf-dist/tex/context/fonts/cc-icons.lfg23
-rw-r--r--Master/texmf-dist/tex/context/fonts/demo.lfg29
-rw-r--r--Master/texmf-dist/tex/context/fonts/husayni.lfg6
-rw-r--r--Master/texmf-dist/tex/context/fonts/lm-math.lfg126
-rw-r--r--Master/texmf-dist/tex/context/fonts/lm.lfg35
-rw-r--r--Master/texmf-dist/tex/context/fonts/lucida-opentype-math.lfg7
-rw-r--r--Master/texmf-dist/tex/context/fonts/texgyre.lfg24
-rw-r--r--Master/texmf-dist/tex/context/interface/keys-cs.xml16
-rw-r--r--Master/texmf-dist/tex/context/interface/keys-de.xml16
-rw-r--r--Master/texmf-dist/tex/context/interface/keys-en.xml16
-rw-r--r--Master/texmf-dist/tex/context/interface/keys-fr.xml16
-rw-r--r--Master/texmf-dist/tex/context/interface/keys-it.xml16
-rw-r--r--Master/texmf-dist/tex/context/interface/keys-nl.xml16
-rw-r--r--Master/texmf-dist/tex/context/interface/keys-pe.xml16
-rw-r--r--Master/texmf-dist/tex/context/interface/keys-ro.xml16
-rw-r--r--Master/texmf-dist/tex/context/sample/d-res-01.xml50
-rw-r--r--Master/texmf-dist/tex/context/sample/lorem.tex11
-rw-r--r--Master/texmf-dist/tex/context/test/pdf-x1a-2001.mkiv2
-rw-r--r--Master/texmf-dist/tex/context/test/pdf-x1a-2003.mkiv2
-rw-r--r--Master/texmf-dist/tex/context/test/pdf-x3-2002.mkiv2
-rw-r--r--Master/texmf-dist/tex/context/test/pdf-x3-2003.mkiv2
-rw-r--r--Master/texmf-dist/tex/context/test/pdf-x4.mkiv2
-rw-r--r--Master/texmf-dist/tex/context/test/pdf-x4p.mkiv4
-rw-r--r--Master/texmf-dist/tex/generic/context/luatex/luatex-basics-gen.lua84
-rw-r--r--Master/texmf-dist/tex/generic/context/luatex/luatex-basics-nod.lua9
-rw-r--r--Master/texmf-dist/tex/generic/context/luatex/luatex-fonts-ext.lua16
-rw-r--r--Master/texmf-dist/tex/generic/context/luatex/luatex-fonts-merged.lua22453
-rw-r--r--Master/texmf-dist/tex/generic/context/luatex/luatex-fonts.lua133
-rw-r--r--Master/texmf-dist/tex/generic/context/luatex/luatex-languages.lua45
-rw-r--r--Master/texmf-dist/tex/generic/context/luatex/luatex-languages.tex17
-rw-r--r--Master/texmf-dist/tex/generic/context/luatex/luatex-mplib.tex11
-rw-r--r--Master/texmf-dist/tex/generic/context/luatex/luatex-plain.tex7
-rw-r--r--Master/texmf-dist/tex/generic/context/luatex/luatex-swiglib-test.lua25
-rw-r--r--Master/texmf-dist/tex/generic/context/luatex/luatex-swiglib-test.tex11
-rw-r--r--Master/texmf-dist/tex/generic/context/luatex/luatex-swiglib.lua62
-rw-r--r--Master/texmf-dist/tex/generic/context/luatex/luatex-swiglib.tex20
-rw-r--r--Master/texmf-dist/tex/generic/context/luatex/luatex-test.tex18
-rw-r--r--Master/texmf-dist/tex/mptopdf/config/mptopdf.ini6
-rwxr-xr-xMaster/tlpkg/libexec/ctan2tds4
-rwxr-xr-xMaster/tlpkg/libexec/mptopdf-extract8
1044 files changed, 167519 insertions, 98985 deletions
diff --git a/Build/source/texk/texlive/linked_scripts/Makefile.am b/Build/source/texk/texlive/linked_scripts/Makefile.am
index 1eec8f89dda..935347388d2 100644
--- a/Build/source/texk/texlive/linked_scripts/Makefile.am
+++ b/Build/source/texk/texlive/linked_scripts/Makefile.am
@@ -159,8 +159,14 @@ texmf_dist_other_scripts = \
urlbst/urlbst \
vpe/vpe.pl
texmf_dist_context_scripts = \
- context/lua/mtxrun.lua \
- context/shell/context.sh
+ context/stubs/unix/context \
+ context/stubs/unix/ctxtools \
+ context/stubs/unix/luatools \
+ context/stubs/unix/mtxrun \
+ context/stubs/unix/pstopdf \
+ context/stubs/unix/texexec \
+ context/stubs/unix/texmfstart
+
nobase_dist_texmf_dist_scripts_SCRIPTS = \
$(texmf_dist_shell_scripts) \
$(texmf_dist_other_scripts) \
diff --git a/Build/source/texk/texlive/linked_scripts/Makefile.in b/Build/source/texk/texlive/linked_scripts/Makefile.in
index 7f7fcd0956a..07bd4df4c95 100644
--- a/Build/source/texk/texlive/linked_scripts/Makefile.in
+++ b/Build/source/texk/texlive/linked_scripts/Makefile.in
@@ -337,8 +337,13 @@ texmf_dist_other_scripts = \
vpe/vpe.pl
texmf_dist_context_scripts = \
- context/lua/mtxrun.lua \
- context/shell/context.sh
+ context/stubs/unix/context \
+ context/stubs/unix/ctxtools \
+ context/stubs/unix/luatools \
+ context/stubs/unix/mtxrun \
+ context/stubs/unix/pstopdf \
+ context/stubs/unix/texexec \
+ context/stubs/unix/texmfstart
nobase_dist_texmf_dist_scripts_SCRIPTS = \
$(texmf_dist_shell_scripts) \
diff --git a/Build/source/texk/texlive/linked_scripts/context/stubs/unix/context b/Build/source/texk/texlive/linked_scripts/context/stubs/unix/context
new file mode 100644
index 00000000000..fa62ba8d109
--- /dev/null
+++ b/Build/source/texk/texlive/linked_scripts/context/stubs/unix/context
@@ -0,0 +1,2 @@
+#!/bin/sh
+mtxrun --script context "$@"
diff --git a/Build/source/texk/texlive/linked_scripts/context/stubs/unix/ctxtools b/Build/source/texk/texlive/linked_scripts/context/stubs/unix/ctxtools
new file mode 100644
index 00000000000..2e6bd4afaa0
--- /dev/null
+++ b/Build/source/texk/texlive/linked_scripts/context/stubs/unix/ctxtools
@@ -0,0 +1,2 @@
+#!/bin/sh
+mtxrun --script ctxtools "$@"
diff --git a/Build/source/texk/texlive/linked_scripts/context/stubs/unix/luatools b/Build/source/texk/texlive/linked_scripts/context/stubs/unix/luatools
new file mode 100644
index 00000000000..c17b483be53
--- /dev/null
+++ b/Build/source/texk/texlive/linked_scripts/context/stubs/unix/luatools
@@ -0,0 +1,2 @@
+#!/bin/sh
+mtxrun --script base "$@"
diff --git a/Build/source/texk/texlive/linked_scripts/context/stubs/unix/mtxrun b/Build/source/texk/texlive/linked_scripts/context/stubs/unix/mtxrun
new file mode 100644
index 00000000000..00f63a5791d
--- /dev/null
+++ b/Build/source/texk/texlive/linked_scripts/context/stubs/unix/mtxrun
@@ -0,0 +1,17082 @@
+#!/usr/bin/env texlua
+
+-- for k, v in next, _G.string do
+-- local tv = type(v)
+-- if tv == "table" then
+-- for kk, vv in next, v do
+-- print(k,kk,vv)
+-- end
+-- else
+-- print(tv,k,v)
+-- end
+-- end
+
+if not modules then modules = { } end modules ['mtxrun'] = {
+ version = 1.001,
+ comment = "runner, lua replacement for texmfstart.rb",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+-- one can make a stub:
+--
+-- #!/bin/sh
+-- env LUATEXDIR=/....../texmf/scripts/context/lua luatex --luaonly mtxrun.lua "$@"
+
+-- filename : mtxrun.lua
+-- comment : companion to context.tex
+-- author : Hans Hagen, PRAGMA-ADE, Hasselt NL
+-- copyright: PRAGMA ADE / ConTeXt Development Team
+-- license : see context related readme files
+
+-- This script is based on texmfstart.rb but does not use kpsewhich to
+-- locate files. Although kpse is a library it never came to opening up
+-- its interface to other programs (esp scripting languages) and so we
+-- do it ourselves. The lua variant evolved out of an experimental ruby
+-- one. Interesting is that using a scripting language instead of c does
+-- not have a speed penalty. Actually the lua variant is more efficient,
+-- especially when multiple calls to kpsewhich are involved. The lua
+-- library also gives way more control.
+
+-- to be done / considered
+--
+-- support for --exec or make it default
+-- support for jar files (or maybe not, never used, too messy)
+-- support for $RUBYINPUTS cum suis (if still needed)
+-- remember for subruns: _CTX_K_V_#{original}_
+-- remember for subruns: _CTX_K_S_#{original}_
+-- remember for subruns: TEXMFSTART.#{original} [tex.rb texmfstart.rb]
+
+-- begin library merge
+
+
+
+do -- create closure to overcome 200 locals limit
+
+package.loaded["l-lua"] = package.loaded["l-lua"] or true
+
+-- original size: 10048, stripped down to: 5684
+
+if not modules then modules={} end modules ['l-lua']={
+ version=1.001,
+ comment="companion to luat-lib.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+local major,minor=string.match(_VERSION,"^[^%d]+(%d+)%.(%d+).*$")
+_MAJORVERSION=tonumber(major) or 5
+_MINORVERSION=tonumber(minor) or 1
+_LUAVERSION=_MAJORVERSION+_MINORVERSION/10
+if not lpeg then
+ lpeg=require("lpeg")
+end
+if loadstring then
+ local loadnormal=load
+ function load(first,...)
+ if type(first)=="string" then
+ return loadstring(first,...)
+ else
+ return loadnormal(first,...)
+ end
+ end
+else
+ loadstring=load
+end
+if not ipairs then
+ local function iterate(a,i)
+ i=i+1
+ local v=a[i]
+ if v~=nil then
+ return i,v
+ end
+ end
+ function ipairs(a)
+ return iterate,a,0
+ end
+end
+if not pairs then
+ function pairs(t)
+ return next,t
+ end
+end
+if not table.unpack then
+ table.unpack=_G.unpack
+elseif not unpack then
+ _G.unpack=table.unpack
+end
+if not package.loaders then
+ package.loaders=package.searchers
+end
+local print,select,tostring=print,select,tostring
+local inspectors={}
+function setinspector(inspector)
+ inspectors[#inspectors+1]=inspector
+end
+function inspect(...)
+ for s=1,select("#",...) do
+ local value=select(s,...)
+ local done=false
+ for i=1,#inspectors do
+ done=inspectors[i](value)
+ if done then
+ break
+ end
+ end
+ if not done then
+ print(tostring(value))
+ end
+ end
+end
+local dummy=function() end
+function optionalrequire(...)
+ local ok,result=xpcall(require,dummy,...)
+ if ok then
+ return result
+ end
+end
+local type=type
+local gsub,format=string.gsub,string.format
+local package=package
+local searchers=package.searchers or package.loaders
+local libpaths=nil
+local clibpaths=nil
+local libhash={}
+local clibhash={}
+local libextras={}
+local clibextras={}
+local filejoin=file and file.join or function(path,name) return path.."/"..name end
+local isreadable=file and file.is_readable or function(name) local f=io.open(name) if f then f:close() return true end end
+local addsuffix=file and file.addsuffix or function(name,suffix) return name.."."..suffix end
+local function cleanpath(path)
+ return path
+end
+local helpers=package.helpers or {
+ libpaths=function() return {} end,
+ clibpaths=function() return {} end,
+ cleanpath=cleanpath,
+ trace=false,
+ report=function(...) print(format(...)) end,
+}
+package.helpers=helpers
+local function getlibpaths()
+ return libpaths or helpers.libpaths(libhash)
+end
+local function getclibpaths()
+ return clibpaths or helpers.clibpaths(clibhash)
+end
+package.libpaths=getlibpaths
+package.clibpaths=getclibpaths
+local function addpath(what,paths,extras,hash,...)
+ local pathlist={... }
+ local cleanpath=helpers.cleanpath
+ local trace=helpers.trace
+ local report=helpers.report
+ local function add(path)
+ local path=cleanpath(path)
+ if not hash[path] then
+ if trace then
+ report("extra %s path: %s",what,path)
+ end
+ paths [#paths+1]=path
+ extras[#extras+1]=path
+ end
+ end
+ for p=1,#pathlist do
+ local path=pathlist[p]
+ if type(path)=="table" then
+ for i=1,#path do
+ add(path[i])
+ end
+ else
+ add(path)
+ end
+ end
+ return paths,extras
+end
+function package.extralibpath(...)
+ libpaths,libextras=addpath("lua",getlibpaths(),libextras,libhash,...)
+end
+function package.extraclibpath(...)
+ clibpaths,clibextras=addpath("lib",getclibpaths(),clibextras,clibhash,...)
+end
+if not searchers[-2] then
+ searchers[-2]=searchers[2]
+end
+searchers[2]=function(name)
+ return helpers.loaded(name)
+end
+searchers[3]=nil
+local function loadedaslib(resolved,rawname)
+ local init="luaopen_"..gsub(rawname,"%.","_")
+ if helpers.trace then
+ helpers.report("calling loadlib with '%s' with init '%s'",resolved,init)
+ end
+ return package.loadlib(resolved,init)
+end
+local function loadedbylua(name)
+ if helpers.trace then
+ helpers.report("locating '%s' using normal loader",name)
+ end
+ return true,searchers[-2](name)
+end
+local function loadedbypath(name,rawname,paths,islib,what)
+ local trace=helpers.trace
+ local report=helpers.report
+ if trace then
+ report("locating '%s' as '%s' on '%s' paths",rawname,name,what)
+ end
+ for p=1,#paths do
+ local path=paths[p]
+ local resolved=filejoin(path,name)
+ if trace then
+ report("checking for '%s' using '%s' path '%s'",name,what,path)
+ end
+ if isreadable(resolved) then
+ if trace then
+ report("lib '%s' located on '%s'",name,resolved)
+ end
+ if islib then
+ return true,loadedaslib(resolved,rawname)
+ else
+ return true,loadfile(resolved)
+ end
+ end
+ end
+end
+local function notloaded(name)
+ if helpers.trace then
+ helpers.report("? unable to locate library '%s'",name)
+ end
+end
+helpers.loadedaslib=loadedaslib
+helpers.loadedbylua=loadedbylua
+helpers.loadedbypath=loadedbypath
+helpers.notloaded=notloaded
+function helpers.loaded(name)
+ local thename=gsub(name,"%.","/")
+ local luaname=addsuffix(thename,"lua")
+ local libname=addsuffix(thename,os.libsuffix or "so")
+ local libpaths=getlibpaths()
+ local clibpaths=getclibpaths()
+ local done,result=loadedbypath(luaname,name,libpaths,false,"lua")
+ if done then
+ return result
+ end
+ local done,result=loadedbypath(luaname,name,clibpaths,false,"lua")
+ if done then
+ return result
+ end
+ local done,result=loadedbypath(libname,name,clibpaths,true,"lib")
+ if done then
+ return result
+ end
+ local done,result=loadedbylua(name)
+ if done then
+ return result
+ end
+ return notloaded(name)
+end
+
+
+end -- of closure
+
+do -- create closure to overcome 200 locals limit
+
+package.loaded["l-lpeg"] = package.loaded["l-lpeg"] or true
+
+-- original size: 26252, stripped down to: 14371
+
+if not modules then modules={} end modules ['l-lpeg']={
+ version=1.001,
+ comment="companion to luat-lib.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+lpeg=require("lpeg")
+local type,next,tostring=type,next,tostring
+local byte,char,gmatch,format=string.byte,string.char,string.gmatch,string.format
+local floor=math.floor
+local P,R,S,V,Ct,C,Cs,Cc,Cp,Cmt=lpeg.P,lpeg.R,lpeg.S,lpeg.V,lpeg.Ct,lpeg.C,lpeg.Cs,lpeg.Cc,lpeg.Cp,lpeg.Cmt
+local lpegtype,lpegmatch,lpegprint=lpeg.type,lpeg.match,lpeg.print
+setinspector(function(v) if lpegtype(v) then lpegprint(v) return true end end)
+lpeg.patterns=lpeg.patterns or {}
+local patterns=lpeg.patterns
+local anything=P(1)
+local endofstring=P(-1)
+local alwaysmatched=P(true)
+patterns.anything=anything
+patterns.endofstring=endofstring
+patterns.beginofstring=alwaysmatched
+patterns.alwaysmatched=alwaysmatched
+local digit,sign=R('09'),S('+-')
+local cr,lf,crlf=P("\r"),P("\n"),P("\r\n")
+local newline=crlf+S("\r\n")
+local escaped=P("\\")*anything
+local squote=P("'")
+local dquote=P('"')
+local space=P(" ")
+local utfbom_32_be=P('\000\000\254\255')
+local utfbom_32_le=P('\255\254\000\000')
+local utfbom_16_be=P('\255\254')
+local utfbom_16_le=P('\254\255')
+local utfbom_8=P('\239\187\191')
+local utfbom=utfbom_32_be+utfbom_32_le+utfbom_16_be+utfbom_16_le+utfbom_8
+local utftype=utfbom_32_be*Cc("utf-32-be")+utfbom_32_le*Cc("utf-32-le")+utfbom_16_be*Cc("utf-16-be")+utfbom_16_le*Cc("utf-16-le")+utfbom_8*Cc("utf-8")+alwaysmatched*Cc("utf-8")
+local utfoffset=utfbom_32_be*Cc(4)+utfbom_32_le*Cc(4)+utfbom_16_be*Cc(2)+utfbom_16_le*Cc(2)+utfbom_8*Cc(3)+Cc(0)
+local utf8next=R("\128\191")
+patterns.utf8one=R("\000\127")
+patterns.utf8two=R("\194\223")*utf8next
+patterns.utf8three=R("\224\239")*utf8next*utf8next
+patterns.utf8four=R("\240\244")*utf8next*utf8next*utf8next
+patterns.utfbom=utfbom
+patterns.utftype=utftype
+patterns.utfoffset=utfoffset
+local utf8char=patterns.utf8one+patterns.utf8two+patterns.utf8three+patterns.utf8four
+local validutf8char=utf8char^0*endofstring*Cc(true)+Cc(false)
+local utf8character=P(1)*R("\128\191")^0
+patterns.utf8=utf8char
+patterns.utf8char=utf8char
+patterns.utf8character=utf8character
+patterns.validutf8=validutf8char
+patterns.validutf8char=validutf8char
+local eol=S("\n\r")
+local spacer=S(" \t\f\v")
+local whitespace=eol+spacer
+local nonspacer=1-spacer
+local nonwhitespace=1-whitespace
+patterns.eol=eol
+patterns.spacer=spacer
+patterns.whitespace=whitespace
+patterns.nonspacer=nonspacer
+patterns.nonwhitespace=nonwhitespace
+local stripper=spacer^0*C((spacer^0*nonspacer^1)^0)
+local collapser=Cs(spacer^0/""*nonspacer^0*((spacer^0/" "*nonspacer^1)^0))
+patterns.stripper=stripper
+patterns.collapser=collapser
+patterns.digit=digit
+patterns.sign=sign
+patterns.cardinal=sign^0*digit^1
+patterns.integer=sign^0*digit^1
+patterns.unsigned=digit^0*P('.')*digit^1
+patterns.float=sign^0*patterns.unsigned
+patterns.cunsigned=digit^0*P(',')*digit^1
+patterns.cfloat=sign^0*patterns.cunsigned
+patterns.number=patterns.float+patterns.integer
+patterns.cnumber=patterns.cfloat+patterns.integer
+patterns.oct=P("0")*R("07")^1
+patterns.octal=patterns.oct
+patterns.HEX=P("0x")*R("09","AF")^1
+patterns.hex=P("0x")*R("09","af")^1
+patterns.hexadecimal=P("0x")*R("09","AF","af")^1
+patterns.lowercase=R("az")
+patterns.uppercase=R("AZ")
+patterns.letter=patterns.lowercase+patterns.uppercase
+patterns.space=space
+patterns.tab=P("\t")
+patterns.spaceortab=patterns.space+patterns.tab
+patterns.newline=newline
+patterns.emptyline=newline^1
+patterns.equal=P("=")
+patterns.comma=P(",")
+patterns.commaspacer=P(",")*spacer^0
+patterns.period=P(".")
+patterns.colon=P(":")
+patterns.semicolon=P(";")
+patterns.underscore=P("_")
+patterns.escaped=escaped
+patterns.squote=squote
+patterns.dquote=dquote
+patterns.nosquote=(escaped+(1-squote))^0
+patterns.nodquote=(escaped+(1-dquote))^0
+patterns.unsingle=(squote/"")*patterns.nosquote*(squote/"")
+patterns.undouble=(dquote/"")*patterns.nodquote*(dquote/"")
+patterns.unquoted=patterns.undouble+patterns.unsingle
+patterns.unspacer=((patterns.spacer^1)/"")^0
+patterns.singlequoted=squote*patterns.nosquote*squote
+patterns.doublequoted=dquote*patterns.nodquote*dquote
+patterns.quoted=patterns.doublequoted+patterns.singlequoted
+patterns.propername=R("AZ","az","__")*R("09","AZ","az","__")^0*P(-1)
+patterns.somecontent=(anything-newline-space)^1
+patterns.beginline=#(1-newline)
+patterns.longtostring=Cs(whitespace^0/""*nonwhitespace^0*((whitespace^0/" "*(patterns.quoted+nonwhitespace)^1)^0))
+local function anywhere(pattern)
+ return P { P(pattern)+1*V(1) }
+end
+lpeg.anywhere=anywhere
+function lpeg.instringchecker(p)
+ p=anywhere(p)
+ return function(str)
+ return lpegmatch(p,str) and true or false
+ end
+end
+function lpeg.splitter(pattern,action)
+ return (((1-P(pattern))^1)/action+1)^0
+end
+function lpeg.tsplitter(pattern,action)
+ return Ct((((1-P(pattern))^1)/action+1)^0)
+end
+local splitters_s,splitters_m,splitters_t={},{},{}
+local function splitat(separator,single)
+ local splitter=(single and splitters_s[separator]) or splitters_m[separator]
+ if not splitter then
+ separator=P(separator)
+ local other=C((1-separator)^0)
+ if single then
+ local any=anything
+ splitter=other*(separator*C(any^0)+"")
+ splitters_s[separator]=splitter
+ else
+ splitter=other*(separator*other)^0
+ splitters_m[separator]=splitter
+ end
+ end
+ return splitter
+end
+local function tsplitat(separator)
+ local splitter=splitters_t[separator]
+ if not splitter then
+ splitter=Ct(splitat(separator))
+ splitters_t[separator]=splitter
+ end
+ return splitter
+end
+lpeg.splitat=splitat
+lpeg.tsplitat=tsplitat
+function string.splitup(str,separator)
+ if not separator then
+ separator=","
+ end
+ return lpegmatch(splitters_m[separator] or splitat(separator),str)
+end
+local cache={}
+function lpeg.split(separator,str)
+ local c=cache[separator]
+ if not c then
+ c=tsplitat(separator)
+ cache[separator]=c
+ end
+ return lpegmatch(c,str)
+end
+function string.split(str,separator)
+ if separator then
+ local c=cache[separator]
+ if not c then
+ c=tsplitat(separator)
+ cache[separator]=c
+ end
+ return lpegmatch(c,str)
+ else
+ return { str }
+ end
+end
+local spacing=patterns.spacer^0*newline
+local empty=spacing*Cc("")
+local nonempty=Cs((1-spacing)^1)*spacing^-1
+local content=(empty+nonempty)^1
+patterns.textline=content
+local linesplitter=tsplitat(newline)
+patterns.linesplitter=linesplitter
+function string.splitlines(str)
+ return lpegmatch(linesplitter,str)
+end
+local cache={}
+function lpeg.checkedsplit(separator,str)
+ local c=cache[separator]
+ if not c then
+ separator=P(separator)
+ local other=C((1-separator)^1)
+ c=Ct(separator^0*other*(separator^1*other)^0)
+ cache[separator]=c
+ end
+ return lpegmatch(c,str)
+end
+function string.checkedsplit(str,separator)
+ local c=cache[separator]
+ if not c then
+ separator=P(separator)
+ local other=C((1-separator)^1)
+ c=Ct(separator^0*other*(separator^1*other)^0)
+ cache[separator]=c
+ end
+ return lpegmatch(c,str)
+end
+local function f2(s) local c1,c2=byte(s,1,2) return c1*64+c2-12416 end
+local function f3(s) local c1,c2,c3=byte(s,1,3) return (c1*64+c2)*64+c3-925824 end
+local function f4(s) local c1,c2,c3,c4=byte(s,1,4) return ((c1*64+c2)*64+c3)*64+c4-63447168 end
+local utf8byte=patterns.utf8one/byte+patterns.utf8two/f2+patterns.utf8three/f3+patterns.utf8four/f4
+patterns.utf8byte=utf8byte
+local cache={}
+function lpeg.stripper(str)
+ if type(str)=="string" then
+ local s=cache[str]
+ if not s then
+ s=Cs(((S(str)^1)/""+1)^0)
+ cache[str]=s
+ end
+ return s
+ else
+ return Cs(((str^1)/""+1)^0)
+ end
+end
+local cache={}
+function lpeg.keeper(str)
+ if type(str)=="string" then
+ local s=cache[str]
+ if not s then
+ s=Cs((((1-S(str))^1)/""+1)^0)
+ cache[str]=s
+ end
+ return s
+ else
+ return Cs((((1-str)^1)/""+1)^0)
+ end
+end
+function lpeg.frontstripper(str)
+ return (P(str)+P(true))*Cs(anything^0)
+end
+function lpeg.endstripper(str)
+ return Cs((1-P(str)*endofstring)^0)
+end
+function lpeg.replacer(one,two,makefunction,isutf)
+ local pattern
+ local u=isutf and utf8char or 1
+ if type(one)=="table" then
+ local no=#one
+ local p=P(false)
+ if no==0 then
+ for k,v in next,one do
+ p=p+P(k)/v
+ end
+ pattern=Cs((p+u)^0)
+ elseif no==1 then
+ local o=one[1]
+ one,two=P(o[1]),o[2]
+ pattern=Cs((one/two+u)^0)
+ else
+ for i=1,no do
+ local o=one[i]
+ p=p+P(o[1])/o[2]
+ end
+ pattern=Cs((p+u)^0)
+ end
+ else
+ pattern=Cs((P(one)/(two or "")+u)^0)
+ end
+ if makefunction then
+ return function(str)
+ return lpegmatch(pattern,str)
+ end
+ else
+ return pattern
+ end
+end
+function lpeg.finder(lst,makefunction)
+ local pattern
+ if type(lst)=="table" then
+ pattern=P(false)
+ if #lst==0 then
+ for k,v in next,lst do
+ pattern=pattern+P(k)
+ end
+ else
+ for i=1,#lst do
+ pattern=pattern+P(lst[i])
+ end
+ end
+ else
+ pattern=P(lst)
+ end
+ pattern=(1-pattern)^0*pattern
+ if makefunction then
+ return function(str)
+ return lpegmatch(pattern,str)
+ end
+ else
+ return pattern
+ end
+end
+local splitters_f,splitters_s={},{}
+function lpeg.firstofsplit(separator)
+ local splitter=splitters_f[separator]
+ if not splitter then
+ separator=P(separator)
+ splitter=C((1-separator)^0)
+ splitters_f[separator]=splitter
+ end
+ return splitter
+end
+function lpeg.secondofsplit(separator)
+ local splitter=splitters_s[separator]
+ if not splitter then
+ separator=P(separator)
+ splitter=(1-separator)^0*separator*C(anything^0)
+ splitters_s[separator]=splitter
+ end
+ return splitter
+end
+function lpeg.balancer(left,right)
+ left,right=P(left),P(right)
+ return P { left*((1-left-right)+V(1))^0*right }
+end
+local nany=utf8char/""
+function lpeg.counter(pattern)
+ pattern=Cs((P(pattern)/" "+nany)^0)
+ return function(str)
+ return #lpegmatch(pattern,str)
+ end
+end
+utf=utf or (unicode and unicode.utf8) or {}
+local utfcharacters=utf and utf.characters or string.utfcharacters
+local utfgmatch=utf and utf.gmatch
+local utfchar=utf and utf.char
+lpeg.UP=lpeg.P
+if utfcharacters then
+ function lpeg.US(str)
+ local p=P(false)
+ for uc in utfcharacters(str) do
+ p=p+P(uc)
+ end
+ return p
+ end
+elseif utfgmatch then
+ function lpeg.US(str)
+ local p=P(false)
+ for uc in utfgmatch(str,".") do
+ p=p+P(uc)
+ end
+ return p
+ end
+else
+ function lpeg.US(str)
+ local p=P(false)
+ local f=function(uc)
+ p=p+P(uc)
+ end
+ lpegmatch((utf8char/f)^0,str)
+ return p
+ end
+end
+local range=utf8byte*utf8byte+Cc(false)
+function lpeg.UR(str,more)
+ local first,last
+ if type(str)=="number" then
+ first=str
+ last=more or first
+ else
+ first,last=lpegmatch(range,str)
+ if not last then
+ return P(str)
+ end
+ end
+ if first==last then
+ return P(str)
+ elseif utfchar and (last-first<8) then
+ local p=P(false)
+ for i=first,last do
+ p=p+P(utfchar(i))
+ end
+ return p
+ else
+ local f=function(b)
+ return b>=first and b<=last
+ end
+ return utf8byte/f
+ end
+end
+function lpeg.is_lpeg(p)
+ return p and lpegtype(p)=="pattern"
+end
+function lpeg.oneof(list,...)
+ if type(list)~="table" then
+ list={ list,... }
+ end
+ local p=P(list[1])
+ for l=2,#list do
+ p=p+P(list[l])
+ end
+ return p
+end
+local sort=table.sort
+local function copyindexed(old)
+ local new={}
+ for i=1,#old do
+ new[i]=old
+ end
+ return new
+end
+local function sortedkeys(tab)
+ local keys,s={},0
+ for key,_ in next,tab do
+ s=s+1
+ keys[s]=key
+ end
+ sort(keys)
+ return keys
+end
+function lpeg.append(list,pp,delayed,checked)
+ local p=pp
+ if #list>0 then
+ local keys=copyindexed(list)
+ sort(keys)
+ for i=#keys,1,-1 do
+ local k=keys[i]
+ if p then
+ p=P(k)+p
+ else
+ p=P(k)
+ end
+ end
+ elseif delayed then
+ local keys=sortedkeys(list)
+ if p then
+ for i=1,#keys,1 do
+ local k=keys[i]
+ local v=list[k]
+ p=P(k)/list+p
+ end
+ else
+ for i=1,#keys do
+ local k=keys[i]
+ local v=list[k]
+ if p then
+ p=P(k)+p
+ else
+ p=P(k)
+ end
+ end
+ if p then
+ p=p/list
+ end
+ end
+ elseif checked then
+ local keys=sortedkeys(list)
+ for i=1,#keys do
+ local k=keys[i]
+ local v=list[k]
+ if p then
+ if k==v then
+ p=P(k)+p
+ else
+ p=P(k)/v+p
+ end
+ else
+ if k==v then
+ p=P(k)
+ else
+ p=P(k)/v
+ end
+ end
+ end
+ else
+ local keys=sortedkeys(list)
+ for i=1,#keys do
+ local k=keys[i]
+ local v=list[k]
+ if p then
+ p=P(k)/v+p
+ else
+ p=P(k)/v
+ end
+ end
+ end
+ return p
+end
+local function make(t)
+ local p
+ local keys=sortedkeys(t)
+ for i=1,#keys do
+ local k=keys[i]
+ local v=t[k]
+ if not p then
+ if next(v) then
+ p=P(k)*make(v)
+ else
+ p=P(k)
+ end
+ else
+ if next(v) then
+ p=p+P(k)*make(v)
+ else
+ p=p+P(k)
+ end
+ end
+ end
+ return p
+end
+function lpeg.utfchartabletopattern(list)
+ local tree={}
+ for i=1,#list do
+ local t=tree
+ for c in gmatch(list[i],".") do
+ if not t[c] then
+ t[c]={}
+ end
+ t=t[c]
+ end
+ end
+ return make(tree)
+end
+patterns.containseol=lpeg.finder(eol)
+local function nextstep(n,step,result)
+ local m=n%step
+ local d=floor(n/step)
+ if d>0 then
+ local v=V(tostring(step))
+ local s=result.start
+ for i=1,d do
+ if s then
+ s=v*s
+ else
+ s=v
+ end
+ end
+ result.start=s
+ end
+ if step>1 and result.start then
+ local v=V(tostring(step/2))
+ result[tostring(step)]=v*v
+ end
+ if step>0 then
+ return nextstep(m,step/2,result)
+ else
+ return result
+ end
+end
+function lpeg.times(pattern,n)
+ return P(nextstep(n,2^16,{ "start",["1"]=pattern }))
+end
+local digit=R("09")
+local period=P(".")
+local zero=P("0")
+local trailingzeros=zero^0*-digit
+local case_1=period*trailingzeros/""
+local case_2=period*(digit-trailingzeros)^1*(trailingzeros/"")
+local number=digit^1*(case_1+case_2)
+local stripper=Cs((number+1)^0)
+lpeg.patterns.stripzeros=stripper
+
+
+end -- of closure
+
+do -- create closure to overcome 200 locals limit
+
+package.loaded["l-function"] = package.loaded["l-function"] or true
+
+-- original size: 361, stripped down to: 322
+
+if not modules then modules={} end modules ['l-functions']={
+ version=1.001,
+ comment="companion to luat-lib.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+functions=functions or {}
+function functions.dummy() end
+
+
+end -- of closure
+
+do -- create closure to overcome 200 locals limit
+
+package.loaded["l-string"] = package.loaded["l-string"] or true
+
+-- original size: 5513, stripped down to: 2708
+
+if not modules then modules={} end modules ['l-string']={
+ version=1.001,
+ comment="companion to luat-lib.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+local string=string
+local sub,gmatch,format,char,byte,rep,lower=string.sub,string.gmatch,string.format,string.char,string.byte,string.rep,string.lower
+local lpegmatch,patterns=lpeg.match,lpeg.patterns
+local P,S,C,Ct,Cc,Cs=lpeg.P,lpeg.S,lpeg.C,lpeg.Ct,lpeg.Cc,lpeg.Cs
+local unquoted=patterns.squote*C(patterns.nosquote)*patterns.squote+patterns.dquote*C(patterns.nodquote)*patterns.dquote
+function string.unquoted(str)
+ return lpegmatch(unquoted,str) or str
+end
+function string.quoted(str)
+ return format("%q",str)
+end
+function string.count(str,pattern)
+ local n=0
+ for _ in gmatch(str,pattern) do
+ n=n+1
+ end
+ return n
+end
+function string.limit(str,n,sentinel)
+ if #str>n then
+ sentinel=sentinel or "..."
+ return sub(str,1,(n-#sentinel))..sentinel
+ else
+ return str
+ end
+end
+local stripper=patterns.stripper
+local collapser=patterns.collapser
+local longtostring=patterns.longtostring
+function string.strip(str)
+ return lpegmatch(stripper,str) or ""
+end
+function string.collapsespaces(str)
+ return lpegmatch(collapser,str) or ""
+end
+function string.longtostring(str)
+ return lpegmatch(longtostring,str) or ""
+end
+local pattern=P(" ")^0*P(-1)
+function string.is_empty(str)
+ if str=="" then
+ return true
+ else
+ return lpegmatch(pattern,str) and true or false
+ end
+end
+local anything=patterns.anything
+local allescapes=Cc("%")*S(".-+%?()[]*")
+local someescapes=Cc("%")*S(".-+%()[]")
+local matchescapes=Cc(".")*S("*?")
+local pattern_a=Cs ((allescapes+anything )^0 )
+local pattern_b=Cs ((someescapes+matchescapes+anything )^0 )
+local pattern_c=Cs (Cc("^")*(someescapes+matchescapes+anything )^0*Cc("$") )
+function string.escapedpattern(str,simple)
+ return lpegmatch(simple and pattern_b or pattern_a,str)
+end
+function string.topattern(str,lowercase,strict)
+ if str=="" or type(str)~="string" then
+ return ".*"
+ elseif strict then
+ str=lpegmatch(pattern_c,str)
+ else
+ str=lpegmatch(pattern_b,str)
+ end
+ if lowercase then
+ return lower(str)
+ else
+ return str
+ end
+end
+function string.valid(str,default)
+ return (type(str)=="string" and str~="" and str) or default or nil
+end
+string.itself=function(s) return s end
+local pattern=Ct(C(1)^0)
+function string.totable(str)
+ return lpegmatch(pattern,str)
+end
+local replacer=lpeg.replacer("@","%%")
+function string.tformat(fmt,...)
+ return format(lpegmatch(replacer,fmt),...)
+end
+string.quote=string.quoted
+string.unquote=string.unquoted
+
+
+end -- of closure
+
+do -- create closure to overcome 200 locals limit
+
+package.loaded["l-table"] = package.loaded["l-table"] or true
+
+-- original size: 44643, stripped down to: 19717
+
+if not modules then modules={} end modules ['l-table']={
+ version=1.001,
+ comment="companion to luat-lib.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+local type,next,tostring,tonumber,ipairs,select=type,next,tostring,tonumber,ipairs,select
+local table,string=table,string
+local concat,sort,insert,remove=table.concat,table.sort,table.insert,table.remove
+local format,lower,dump=string.format,string.lower,string.dump
+local getmetatable,setmetatable=getmetatable,setmetatable
+local getinfo=debug.getinfo
+local lpegmatch,patterns=lpeg.match,lpeg.patterns
+local floor=math.floor
+local stripper=patterns.stripper
+function table.strip(tab)
+ local lst,l={},0
+ for i=1,#tab do
+ local s=lpegmatch(stripper,tab[i]) or ""
+ if s=="" then
+ else
+ l=l+1
+ lst[l]=s
+ end
+ end
+ return lst
+end
+function table.keys(t)
+ if t then
+ local keys,k={},0
+ for key,_ in next,t do
+ k=k+1
+ keys[k]=key
+ end
+ return keys
+ else
+ return {}
+ end
+end
+local function compare(a,b)
+ local ta,tb=type(a),type(b)
+ if ta==tb then
+ return a<b
+ else
+ return tostring(a)<tostring(b)
+ end
+end
+local function sortedkeys(tab)
+ if tab then
+ local srt,category,s={},0,0
+ for key,_ in next,tab do
+ s=s+1
+ srt[s]=key
+ if category==3 then
+ else
+ local tkey=type(key)
+ if tkey=="string" then
+ category=(category==2 and 3) or 1
+ elseif tkey=="number" then
+ category=(category==1 and 3) or 2
+ else
+ category=3
+ end
+ end
+ end
+ if category==0 or category==3 then
+ sort(srt,compare)
+ else
+ sort(srt)
+ end
+ return srt
+ else
+ return {}
+ end
+end
+local function sortedhashkeys(tab,cmp)
+ if tab then
+ local srt,s={},0
+ for key,_ in next,tab do
+ if key then
+ s=s+1
+ srt[s]=key
+ end
+ end
+ sort(srt,cmp)
+ return srt
+ else
+ return {}
+ end
+end
+function table.allkeys(t)
+ local keys={}
+ for k,v in next,t do
+ for k,v in next,v do
+ keys[k]=true
+ end
+ end
+ return sortedkeys(keys)
+end
+table.sortedkeys=sortedkeys
+table.sortedhashkeys=sortedhashkeys
+local function nothing() end
+local function sortedhash(t,cmp)
+ if t then
+ local s
+ if cmp then
+ s=sortedhashkeys(t,function(a,b) return cmp(t,a,b) end)
+ else
+ s=sortedkeys(t)
+ end
+ local n=0
+ local function kv(s)
+ n=n+1
+ local k=s[n]
+ return k,t[k]
+ end
+ return kv,s
+ else
+ return nothing
+ end
+end
+table.sortedhash=sortedhash
+table.sortedpairs=sortedhash
+function table.append(t,list)
+ local n=#t
+ for i=1,#list do
+ n=n+1
+ t[n]=list[i]
+ end
+ return t
+end
+function table.prepend(t,list)
+ local nl=#list
+ local nt=nl+#t
+ for i=#t,1,-1 do
+ t[nt]=t[i]
+ nt=nt-1
+ end
+ for i=1,#list do
+ t[i]=list[i]
+ end
+ return t
+end
+function table.merge(t,...)
+ t=t or {}
+ for i=1,select("#",...) do
+ for k,v in next,(select(i,...)) do
+ t[k]=v
+ end
+ end
+ return t
+end
+function table.merged(...)
+ local t={}
+ for i=1,select("#",...) do
+ for k,v in next,(select(i,...)) do
+ t[k]=v
+ end
+ end
+ return t
+end
+function table.imerge(t,...)
+ local nt=#t
+ for i=1,select("#",...) do
+ local nst=select(i,...)
+ for j=1,#nst do
+ nt=nt+1
+ t[nt]=nst[j]
+ end
+ end
+ return t
+end
+function table.imerged(...)
+ local tmp,ntmp={},0
+ for i=1,select("#",...) do
+ local nst=select(i,...)
+ for j=1,#nst do
+ ntmp=ntmp+1
+ tmp[ntmp]=nst[j]
+ end
+ end
+ return tmp
+end
+local function fastcopy(old,metatabletoo)
+ if old then
+ local new={}
+ for k,v in next,old do
+ if type(v)=="table" then
+ new[k]=fastcopy(v,metatabletoo)
+ else
+ new[k]=v
+ end
+ end
+ if metatabletoo then
+ local mt=getmetatable(old)
+ if mt then
+ setmetatable(new,mt)
+ end
+ end
+ return new
+ else
+ return {}
+ end
+end
+local function copy(t,tables)
+ tables=tables or {}
+ local tcopy={}
+ if not tables[t] then
+ tables[t]=tcopy
+ end
+ for i,v in next,t do
+ if type(i)=="table" then
+ if tables[i] then
+ i=tables[i]
+ else
+ i=copy(i,tables)
+ end
+ end
+ if type(v)~="table" then
+ tcopy[i]=v
+ elseif tables[v] then
+ tcopy[i]=tables[v]
+ else
+ tcopy[i]=copy(v,tables)
+ end
+ end
+ local mt=getmetatable(t)
+ if mt then
+ setmetatable(tcopy,mt)
+ end
+ return tcopy
+end
+table.fastcopy=fastcopy
+table.copy=copy
+function table.derive(parent)
+ local child={}
+ if parent then
+ setmetatable(child,{ __index=parent })
+ end
+ return child
+end
+function table.tohash(t,value)
+ local h={}
+ if t then
+ if value==nil then value=true end
+ for _,v in next,t do
+ h[v]=value
+ end
+ end
+ return h
+end
+function table.fromhash(t)
+ local hsh,h={},0
+ for k,v in next,t do
+ if v then
+ h=h+1
+ hsh[h]=k
+ end
+ end
+ return hsh
+end
+local noquotes,hexify,handle,reduce,compact,inline,functions
+local reserved=table.tohash {
+ 'and','break','do','else','elseif','end','false','for','function','if',
+ 'in','local','nil','not','or','repeat','return','then','true','until','while',
+}
+local function simple_table(t)
+ if #t>0 then
+ local n=0
+ for _,v in next,t do
+ n=n+1
+ end
+ if n==#t then
+ local tt,nt={},0
+ for i=1,#t do
+ local v=t[i]
+ local tv=type(v)
+ if tv=="number" then
+ nt=nt+1
+ if hexify then
+ tt[nt]=format("0x%04X",v)
+ else
+ tt[nt]=tostring(v)
+ end
+ elseif tv=="boolean" then
+ nt=nt+1
+ tt[nt]=tostring(v)
+ elseif tv=="string" then
+ nt=nt+1
+ tt[nt]=format("%q",v)
+ else
+ tt=nil
+ break
+ end
+ end
+ return tt
+ end
+ end
+ return nil
+end
+local propername=patterns.propername
+local function dummy() end
+local function do_serialize(root,name,depth,level,indexed)
+ if level>0 then
+ depth=depth.." "
+ if indexed then
+ handle(format("%s{",depth))
+ else
+ local tn=type(name)
+ if tn=="number" then
+ if hexify then
+ handle(format("%s[0x%04X]={",depth,name))
+ else
+ handle(format("%s[%s]={",depth,name))
+ end
+ elseif tn=="string" then
+ if noquotes and not reserved[name] and lpegmatch(propername,name) then
+ handle(format("%s%s={",depth,name))
+ else
+ handle(format("%s[%q]={",depth,name))
+ end
+ elseif tn=="boolean" then
+ handle(format("%s[%s]={",depth,tostring(name)))
+ else
+ handle(format("%s{",depth))
+ end
+ end
+ end
+ if root and next(root) then
+ local first,last=nil,0
+ if compact then
+ last=#root
+ for k=1,last do
+ if root[k]==nil then
+ last=k-1
+ break
+ end
+ end
+ if last>0 then
+ first=1
+ end
+ end
+ local sk=sortedkeys(root)
+ for i=1,#sk do
+ local k=sk[i]
+ local v=root[k]
+ local t,tk=type(v),type(k)
+ if compact and first and tk=="number" and k>=first and k<=last then
+ if t=="number" then
+ if hexify then
+ handle(format("%s 0x%04X,",depth,v))
+ else
+ handle(format("%s %s,",depth,v))
+ end
+ elseif t=="string" then
+ if reduce and tonumber(v) then
+ handle(format("%s %s,",depth,v))
+ else
+ handle(format("%s %q,",depth,v))
+ end
+ elseif t=="table" then
+ if not next(v) then
+ handle(format("%s {},",depth))
+ elseif inline then
+ local st=simple_table(v)
+ if st then
+ handle(format("%s { %s },",depth,concat(st,", ")))
+ else
+ do_serialize(v,k,depth,level+1,true)
+ end
+ else
+ do_serialize(v,k,depth,level+1,true)
+ end
+ elseif t=="boolean" then
+ handle(format("%s %s,",depth,tostring(v)))
+ elseif t=="function" then
+ if functions then
+ handle(format('%s load(%q),',depth,dump(v)))
+ else
+ handle(format('%s "function",',depth))
+ end
+ else
+ handle(format("%s %q,",depth,tostring(v)))
+ end
+ elseif k=="__p__" then
+ if false then
+ handle(format("%s __p__=nil,",depth))
+ end
+ elseif t=="number" then
+ if tk=="number" then
+ if hexify then
+ handle(format("%s [0x%04X]=0x%04X,",depth,k,v))
+ else
+ handle(format("%s [%s]=%s,",depth,k,v))
+ end
+ elseif tk=="boolean" then
+ if hexify then
+ handle(format("%s [%s]=0x%04X,",depth,tostring(k),v))
+ else
+ handle(format("%s [%s]=%s,",depth,tostring(k),v))
+ end
+ elseif noquotes and not reserved[k] and lpegmatch(propername,k) then
+ if hexify then
+ handle(format("%s %s=0x%04X,",depth,k,v))
+ else
+ handle(format("%s %s=%s,",depth,k,v))
+ end
+ else
+ if hexify then
+ handle(format("%s [%q]=0x%04X,",depth,k,v))
+ else
+ handle(format("%s [%q]=%s,",depth,k,v))
+ end
+ end
+ elseif t=="string" then
+ if reduce and tonumber(v) then
+ if tk=="number" then
+ if hexify then
+ handle(format("%s [0x%04X]=%s,",depth,k,v))
+ else
+ handle(format("%s [%s]=%s,",depth,k,v))
+ end
+ elseif tk=="boolean" then
+ handle(format("%s [%s]=%s,",depth,tostring(k),v))
+ elseif noquotes and not reserved[k] and lpegmatch(propername,k) then
+ handle(format("%s %s=%s,",depth,k,v))
+ else
+ handle(format("%s [%q]=%s,",depth,k,v))
+ end
+ else
+ if tk=="number" then
+ if hexify then
+ handle(format("%s [0x%04X]=%q,",depth,k,v))
+ else
+ handle(format("%s [%s]=%q,",depth,k,v))
+ end
+ elseif tk=="boolean" then
+ handle(format("%s [%s]=%q,",depth,tostring(k),v))
+ elseif noquotes and not reserved[k] and lpegmatch(propername,k) then
+ handle(format("%s %s=%q,",depth,k,v))
+ else
+ handle(format("%s [%q]=%q,",depth,k,v))
+ end
+ end
+ elseif t=="table" then
+ if not next(v) then
+ if tk=="number" then
+ if hexify then
+ handle(format("%s [0x%04X]={},",depth,k))
+ else
+ handle(format("%s [%s]={},",depth,k))
+ end
+ elseif tk=="boolean" then
+ handle(format("%s [%s]={},",depth,tostring(k)))
+ elseif noquotes and not reserved[k] and lpegmatch(propername,k) then
+ handle(format("%s %s={},",depth,k))
+ else
+ handle(format("%s [%q]={},",depth,k))
+ end
+ elseif inline then
+ local st=simple_table(v)
+ if st then
+ if tk=="number" then
+ if hexify then
+ handle(format("%s [0x%04X]={ %s },",depth,k,concat(st,", ")))
+ else
+ handle(format("%s [%s]={ %s },",depth,k,concat(st,", ")))
+ end
+ elseif tk=="boolean" then
+ handle(format("%s [%s]={ %s },",depth,tostring(k),concat(st,", ")))
+ elseif noquotes and not reserved[k] and lpegmatch(propername,k) then
+ handle(format("%s %s={ %s },",depth,k,concat(st,", ")))
+ else
+ handle(format("%s [%q]={ %s },",depth,k,concat(st,", ")))
+ end
+ else
+ do_serialize(v,k,depth,level+1)
+ end
+ else
+ do_serialize(v,k,depth,level+1)
+ end
+ elseif t=="boolean" then
+ if tk=="number" then
+ if hexify then
+ handle(format("%s [0x%04X]=%s,",depth,k,tostring(v)))
+ else
+ handle(format("%s [%s]=%s,",depth,k,tostring(v)))
+ end
+ elseif tk=="boolean" then
+ handle(format("%s [%s]=%s,",depth,tostring(k),tostring(v)))
+ elseif noquotes and not reserved[k] and lpegmatch(propername,k) then
+ handle(format("%s %s=%s,",depth,k,tostring(v)))
+ else
+ handle(format("%s [%q]=%s,",depth,k,tostring(v)))
+ end
+ elseif t=="function" then
+ if functions then
+ local f=getinfo(v).what=="C" and dump(dummy) or dump(v)
+ if tk=="number" then
+ if hexify then
+ handle(format("%s [0x%04X]=load(%q),",depth,k,f))
+ else
+ handle(format("%s [%s]=load(%q),",depth,k,f))
+ end
+ elseif tk=="boolean" then
+ handle(format("%s [%s]=load(%q),",depth,tostring(k),f))
+ elseif noquotes and not reserved[k] and lpegmatch(propername,k) then
+ handle(format("%s %s=load(%q),",depth,k,f))
+ else
+ handle(format("%s [%q]=load(%q),",depth,k,f))
+ end
+ end
+ else
+ if tk=="number" then
+ if hexify then
+ handle(format("%s [0x%04X]=%q,",depth,k,tostring(v)))
+ else
+ handle(format("%s [%s]=%q,",depth,k,tostring(v)))
+ end
+ elseif tk=="boolean" then
+ handle(format("%s [%s]=%q,",depth,tostring(k),tostring(v)))
+ elseif noquotes and not reserved[k] and lpegmatch(propername,k) then
+ handle(format("%s %s=%q,",depth,k,tostring(v)))
+ else
+ handle(format("%s [%q]=%q,",depth,k,tostring(v)))
+ end
+ end
+ end
+ end
+ if level>0 then
+ handle(format("%s},",depth))
+ end
+end
+local function serialize(_handle,root,name,specification)
+ local tname=type(name)
+ if type(specification)=="table" then
+ noquotes=specification.noquotes
+ hexify=specification.hexify
+ handle=_handle or specification.handle or print
+ reduce=specification.reduce or false
+ functions=specification.functions
+ compact=specification.compact
+ inline=specification.inline and compact
+ if functions==nil then
+ functions=true
+ end
+ if compact==nil then
+ compact=true
+ end
+ if inline==nil then
+ inline=compact
+ end
+ else
+ noquotes=false
+ hexify=false
+ handle=_handle or print
+ reduce=false
+ compact=true
+ inline=true
+ functions=true
+ end
+ if tname=="string" then
+ if name=="return" then
+ handle("return {")
+ else
+ handle(name.."={")
+ end
+ elseif tname=="number" then
+ if hexify then
+ handle(format("[0x%04X]={",name))
+ else
+ handle("["..name.."]={")
+ end
+ elseif tname=="boolean" then
+ if name then
+ handle("return {")
+ else
+ handle("{")
+ end
+ else
+ handle("t={")
+ end
+ if root then
+ if getmetatable(root) then
+ local dummy=root._w_h_a_t_e_v_e_r_
+ root._w_h_a_t_e_v_e_r_=nil
+ end
+ if next(root) then
+ do_serialize(root,name,"",0)
+ end
+ end
+ handle("}")
+end
+function table.serialize(root,name,specification)
+ local t,n={},0
+ local function flush(s)
+ n=n+1
+ t[n]=s
+ end
+ serialize(flush,root,name,specification)
+ return concat(t,"\n")
+end
+table.tohandle=serialize
+local maxtab=2*1024
+function table.tofile(filename,root,name,specification)
+ local f=io.open(filename,'w')
+ if f then
+ if maxtab>1 then
+ local t,n={},0
+ local function flush(s)
+ n=n+1
+ t[n]=s
+ if n>maxtab then
+ f:write(concat(t,"\n"),"\n")
+ t,n={},0
+ end
+ end
+ serialize(flush,root,name,specification)
+ f:write(concat(t,"\n"),"\n")
+ else
+ local function flush(s)
+ f:write(s,"\n")
+ end
+ serialize(flush,root,name,specification)
+ end
+ f:close()
+ io.flush()
+ end
+end
+local function flattened(t,f,depth)
+ if f==nil then
+ f={}
+ depth=0xFFFF
+ elseif tonumber(f) then
+ depth=f
+ f={}
+ elseif not depth then
+ depth=0xFFFF
+ end
+ for k,v in next,t do
+ if type(k)~="number" then
+ if depth>0 and type(v)=="table" then
+ flattened(v,f,depth-1)
+ else
+ f[k]=v
+ end
+ end
+ end
+ local n=#f
+ for k=1,#t do
+ local v=t[k]
+ if depth>0 and type(v)=="table" then
+ flattened(v,f,depth-1)
+ n=#f
+ else
+ n=n+1
+ f[n]=v
+ end
+ end
+ return f
+end
+table.flattened=flattened
+local function unnest(t,f)
+ if not f then
+ f={}
+ end
+ for i=1,#t do
+ local v=t[i]
+ if type(v)=="table" then
+ if type(v[1])=="table" then
+ unnest(v,f)
+ else
+ f[#f+1]=v
+ end
+ else
+ f[#f+1]=v
+ end
+ end
+ return f
+end
+function table.unnest(t)
+ return unnest(t)
+end
+local function are_equal(a,b,n,m)
+ if a and b and #a==#b then
+ n=n or 1
+ m=m or #a
+ for i=n,m do
+ local ai,bi=a[i],b[i]
+ if ai==bi then
+ elseif type(ai)=="table" and type(bi)=="table" then
+ if not are_equal(ai,bi) then
+ return false
+ end
+ else
+ return false
+ end
+ end
+ return true
+ else
+ return false
+ end
+end
+local function identical(a,b)
+ for ka,va in next,a do
+ local vb=b[ka]
+ if va==vb then
+ elseif type(va)=="table" and type(vb)=="table" then
+ if not identical(va,vb) then
+ return false
+ end
+ else
+ return false
+ end
+ end
+ return true
+end
+table.identical=identical
+table.are_equal=are_equal
+function table.compact(t)
+ if t then
+ for k,v in next,t do
+ if not next(v) then
+ t[k]=nil
+ end
+ end
+ end
+end
+function table.contains(t,v)
+ if t then
+ for i=1,#t do
+ if t[i]==v then
+ return i
+ end
+ end
+ end
+ return false
+end
+function table.count(t)
+ local n=0
+ for k,v in next,t do
+ n=n+1
+ end
+ return n
+end
+function table.swapped(t,s)
+ local n={}
+ if s then
+ for k,v in next,s do
+ n[k]=v
+ end
+ end
+ for k,v in next,t do
+ n[v]=k
+ end
+ return n
+end
+function table.mirrored(t)
+ local n={}
+ for k,v in next,t do
+ n[v]=k
+ n[k]=v
+ end
+ return n
+end
+function table.reversed(t)
+ if t then
+ local tt,tn={},#t
+ if tn>0 then
+ local ttn=0
+ for i=tn,1,-1 do
+ ttn=ttn+1
+ tt[ttn]=t[i]
+ end
+ end
+ return tt
+ end
+end
+function table.reverse(t)
+ if t then
+ local n=#t
+ for i=1,floor(n/2) do
+ local j=n-i+1
+ t[i],t[j]=t[j],t[i]
+ end
+ return t
+ end
+end
+function table.sequenced(t,sep,simple)
+ if not t then
+ return ""
+ end
+ local n=#t
+ local s={}
+ if n>0 then
+ for i=1,n do
+ s[i]=tostring(t[i])
+ end
+ else
+ n=0
+ for k,v in sortedhash(t) do
+ if simple then
+ if v==true then
+ n=n+1
+ s[n]=k
+ elseif v and v~="" then
+ n=n+1
+ s[n]=k.."="..tostring(v)
+ end
+ else
+ n=n+1
+ s[n]=k.."="..tostring(v)
+ end
+ end
+ end
+ return concat(s,sep or " | ")
+end
+function table.print(t,...)
+ if type(t)~="table" then
+ print(tostring(t))
+ else
+ serialize(print,t,...)
+ end
+end
+setinspector(function(v) if type(v)=="table" then serialize(print,v,"table") return true end end)
+function table.sub(t,i,j)
+ return { unpack(t,i,j) }
+end
+function table.is_empty(t)
+ return not t or not next(t)
+end
+function table.has_one_entry(t)
+ return t and not next(t,next(t))
+end
+function table.loweredkeys(t)
+ local l={}
+ for k,v in next,t do
+ l[lower(k)]=v
+ end
+ return l
+end
+function table.unique(old)
+ local hash={}
+ local new={}
+ local n=0
+ for i=1,#old do
+ local oi=old[i]
+ if not hash[oi] then
+ n=n+1
+ new[n]=oi
+ hash[oi]=true
+ end
+ end
+ return new
+end
+function table.sorted(t,...)
+ sort(t,...)
+ return t
+end
+
+
+end -- of closure
+
+do -- create closure to overcome 200 locals limit
+
+package.loaded["l-io"] = package.loaded["l-io"] or true
+
+-- original size: 8799, stripped down to: 6325
+
+if not modules then modules={} end modules ['l-io']={
+ version=1.001,
+ comment="companion to luat-lib.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+local io=io
+local byte,find,gsub,format=string.byte,string.find,string.gsub,string.format
+local concat=table.concat
+local floor=math.floor
+local type=type
+if string.find(os.getenv("PATH"),";") then
+ io.fileseparator,io.pathseparator="\\",";"
+else
+ io.fileseparator,io.pathseparator="/",":"
+end
+local function readall(f)
+ return f:read("*all")
+end
+local function readall(f)
+ local size=f:seek("end")
+ if size==0 then
+ return ""
+ elseif size<1024*1024 then
+ f:seek("set",0)
+ return f:read('*all')
+ else
+ local done=f:seek("set",0)
+ if size<1024*1024 then
+ step=1024*1024
+ elseif size>16*1024*1024 then
+ step=16*1024*1024
+ else
+ step=floor(size/(1024*1024))*1024*1024/8
+ end
+ local data={}
+ while true do
+ local r=f:read(step)
+ if not r then
+ return concat(data)
+ else
+ data[#data+1]=r
+ end
+ end
+ end
+end
+io.readall=readall
+function io.loaddata(filename,textmode)
+ local f=io.open(filename,(textmode and 'r') or 'rb')
+ if f then
+ local data=readall(f)
+ f:close()
+ if #data>0 then
+ return data
+ end
+ end
+end
+function io.savedata(filename,data,joiner)
+ local f=io.open(filename,"wb")
+ if f then
+ if type(data)=="table" then
+ f:write(concat(data,joiner or ""))
+ elseif type(data)=="function" then
+ data(f)
+ else
+ f:write(data or "")
+ end
+ f:close()
+ io.flush()
+ return true
+ else
+ return false
+ end
+end
+function io.loadlines(filename,n)
+ local f=io.open(filename,'r')
+ if not f then
+ elseif n then
+ local lines={}
+ for i=1,n do
+ local line=f:read("*lines")
+ if line then
+ lines[#lines+1]=line
+ else
+ break
+ end
+ end
+ f:close()
+ lines=concat(lines,"\n")
+ if #lines>0 then
+ return lines
+ end
+ else
+ local line=f:read("*line") or ""
+ f:close()
+ if #line>0 then
+ return line
+ end
+ end
+end
+function io.loadchunk(filename,n)
+ local f=io.open(filename,'rb')
+ if f then
+ local data=f:read(n or 1024)
+ f:close()
+ if #data>0 then
+ return data
+ end
+ end
+end
+function io.exists(filename)
+ local f=io.open(filename)
+ if f==nil then
+ return false
+ else
+ f:close()
+ return true
+ end
+end
+function io.size(filename)
+ local f=io.open(filename)
+ if f==nil then
+ return 0
+ else
+ local s=f:seek("end")
+ f:close()
+ return s
+ end
+end
+function io.noflines(f)
+ if type(f)=="string" then
+ local f=io.open(filename)
+ if f then
+ local n=f and io.noflines(f) or 0
+ f:close()
+ return n
+ else
+ return 0
+ end
+ else
+ local n=0
+ for _ in f:lines() do
+ n=n+1
+ end
+ f:seek('set',0)
+ return n
+ end
+end
+local nextchar={
+ [ 4]=function(f)
+ return f:read(1,1,1,1)
+ end,
+ [ 2]=function(f)
+ return f:read(1,1)
+ end,
+ [ 1]=function(f)
+ return f:read(1)
+ end,
+ [-2]=function(f)
+ local a,b=f:read(1,1)
+ return b,a
+ end,
+ [-4]=function(f)
+ local a,b,c,d=f:read(1,1,1,1)
+ return d,c,b,a
+ end
+}
+function io.characters(f,n)
+ if f then
+ return nextchar[n or 1],f
+ end
+end
+local nextbyte={
+ [4]=function(f)
+ local a,b,c,d=f:read(1,1,1,1)
+ if d then
+ return byte(a),byte(b),byte(c),byte(d)
+ end
+ end,
+ [3]=function(f)
+ local a,b,c=f:read(1,1,1)
+ if b then
+ return byte(a),byte(b),byte(c)
+ end
+ end,
+ [2]=function(f)
+ local a,b=f:read(1,1)
+ if b then
+ return byte(a),byte(b)
+ end
+ end,
+ [1]=function (f)
+ local a=f:read(1)
+ if a then
+ return byte(a)
+ end
+ end,
+ [-2]=function (f)
+ local a,b=f:read(1,1)
+ if b then
+ return byte(b),byte(a)
+ end
+ end,
+ [-3]=function(f)
+ local a,b,c=f:read(1,1,1)
+ if b then
+ return byte(c),byte(b),byte(a)
+ end
+ end,
+ [-4]=function(f)
+ local a,b,c,d=f:read(1,1,1,1)
+ if d then
+ return byte(d),byte(c),byte(b),byte(a)
+ end
+ end
+}
+function io.bytes(f,n)
+ if f then
+ return nextbyte[n or 1],f
+ else
+ return nil,nil
+ end
+end
+function io.ask(question,default,options)
+ while true do
+ io.write(question)
+ if options then
+ io.write(format(" [%s]",concat(options,"|")))
+ end
+ if default then
+ io.write(format(" [%s]",default))
+ end
+ io.write(format(" "))
+ io.flush()
+ local answer=io.read()
+ answer=gsub(answer,"^%s*(.*)%s*$","%1")
+ if answer=="" and default then
+ return default
+ elseif not options then
+ return answer
+ else
+ for k=1,#options do
+ if options[k]==answer then
+ return answer
+ end
+ end
+ local pattern="^"..answer
+ for k=1,#options do
+ local v=options[k]
+ if find(v,pattern) then
+ return v
+ end
+ end
+ end
+ end
+end
+local function readnumber(f,n,m)
+ if m then
+ f:seek("set",n)
+ n=m
+ end
+ if n==1 then
+ return byte(f:read(1))
+ elseif n==2 then
+ local a,b=byte(f:read(2),1,2)
+ return 256*a+b
+ elseif n==3 then
+ local a,b,c=byte(f:read(3),1,3)
+ return 256*256*a+256*b+c
+ elseif n==4 then
+ local a,b,c,d=byte(f:read(4),1,4)
+ return 256*256*256*a+256*256*b+256*c+d
+ elseif n==8 then
+ local a,b=readnumber(f,4),readnumber(f,4)
+ return 256*a+b
+ elseif n==12 then
+ local a,b,c=readnumber(f,4),readnumber(f,4),readnumber(f,4)
+ return 256*256*a+256*b+c
+ elseif n==-2 then
+ local b,a=byte(f:read(2),1,2)
+ return 256*a+b
+ elseif n==-3 then
+ local c,b,a=byte(f:read(3),1,3)
+ return 256*256*a+256*b+c
+ elseif n==-4 then
+ local d,c,b,a=byte(f:read(4),1,4)
+ return 256*256*256*a+256*256*b+256*c+d
+ elseif n==-8 then
+ local h,g,f,e,d,c,b,a=byte(f:read(8),1,8)
+ return 256*256*256*256*256*256*256*a+256*256*256*256*256*256*b+256*256*256*256*256*c+256*256*256*256*d+256*256*256*e+256*256*f+256*g+h
+ else
+ return 0
+ end
+end
+io.readnumber=readnumber
+function io.readstring(f,n,m)
+ if m then
+ f:seek("set",n)
+ n=m
+ end
+ local str=gsub(f:read(n),"\000","")
+ return str
+end
+if not io.i_limiter then function io.i_limiter() end end
+if not io.o_limiter then function io.o_limiter() end end
+
+
+end -- of closure
+
+do -- create closure to overcome 200 locals limit
+
+package.loaded["l-number"] = package.loaded["l-number"] or true
+
+-- original size: 4939, stripped down to: 2830
+
+if not modules then modules={} end modules ['l-number']={
+ version=1.001,
+ comment="companion to luat-lib.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+local tostring,tonumber=tostring,tonumber
+local format,floor,match,rep=string.format,math.floor,string.match,string.rep
+local concat,insert=table.concat,table.insert
+local lpegmatch=lpeg.match
+number=number or {}
+local number=number
+if bit32 then
+ local btest,bor=bit32.btest,bit32.bor
+ function number.bit(p)
+ return 2^(p-1)
+ end
+ number.hasbit=btest
+ number.setbit=bor
+ function number.setbit(x,p)
+ return btest(x,p) and x or x+p
+ end
+ function number.clearbit(x,p)
+ return btest(x,p) and x-p or x
+ end
+else
+ function number.bit(p)
+ return 2^(p-1)
+ end
+ function number.hasbit(x,p)
+ return x%(p+p)>=p
+ end
+ function number.setbit(x,p)
+ return (x%(p+p)>=p) and x or x+p
+ end
+ function number.clearbit(x,p)
+ return (x%(p+p)>=p) and x-p or x
+ end
+end
+if bit32 then
+ local bextract=bit32.extract
+ local t={
+ "0","0","0","0","0","0","0","0",
+ "0","0","0","0","0","0","0","0",
+ "0","0","0","0","0","0","0","0",
+ "0","0","0","0","0","0","0","0",
+ }
+ function number.tobitstring(b,m)
+ local n=32
+ for i=0,31 do
+ local v=bextract(b,i)
+ local k=32-i
+ if v==1 then
+ n=k
+ t[k]="1"
+ else
+ t[k]="0"
+ end
+ end
+ if m then
+ m=33-m*8
+ if m<1 then
+ m=1
+ end
+ return concat(t,"",m)
+ elseif n<8 then
+ return concat(t)
+ elseif n<16 then
+ return concat(t,"",9)
+ elseif n<24 then
+ return concat(t,"",17)
+ else
+ return concat(t,"",25)
+ end
+ end
+else
+ function number.tobitstring(n,m)
+ if n>0 then
+ local t={}
+ while n>0 do
+ insert(t,1,n%2>0 and 1 or 0)
+ n=floor(n/2)
+ end
+ local nn=8-#t%8
+ if nn>0 and nn<8 then
+ for i=1,nn do
+ insert(t,1,0)
+ end
+ end
+ if m then
+ m=m*8-#t
+ if m>0 then
+ insert(t,1,rep("0",m))
+ end
+ end
+ return concat(t)
+ elseif m then
+ rep("00000000",m)
+ else
+ return "00000000"
+ end
+ end
+end
+function number.valid(str,default)
+ return tonumber(str) or default or nil
+end
+function number.toevenhex(n)
+ local s=format("%X",n)
+ if #s%2==0 then
+ return s
+ else
+ return "0"..s
+ end
+end
+local one=lpeg.C(1-lpeg.S('')/tonumber)^1
+function number.toset(n)
+ return lpegmatch(one,tostring(n))
+end
+local function bits(n,i,...)
+ if n>0 then
+ local m=n%2
+ local n=floor(n/2)
+ if m>0 then
+ return bits(n,i+1,i,...)
+ else
+ return bits(n,i+1,...)
+ end
+ else
+ return...
+ end
+end
+function number.bits(n)
+ return { bits(n,1) }
+end
+
+
+end -- of closure
+
+do -- create closure to overcome 200 locals limit
+
+package.loaded["l-set"] = package.loaded["l-set"] or true
+
+-- original size: 1923, stripped down to: 1133
+
+if not modules then modules={} end modules ['l-set']={
+ version=1.001,
+ comment="companion to luat-lib.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+set=set or {}
+local nums={}
+local tabs={}
+local concat=table.concat
+local next,type=next,type
+set.create=table.tohash
+function set.tonumber(t)
+ if next(t) then
+ local s=""
+ for k,v in next,t do
+ if v then
+ s=s.." "..k
+ end
+ end
+ local n=nums[s]
+ if not n then
+ n=#tabs+1
+ tabs[n]=t
+ nums[s]=n
+ end
+ return n
+ else
+ return 0
+ end
+end
+function set.totable(n)
+ if n==0 then
+ return {}
+ else
+ return tabs[n] or {}
+ end
+end
+function set.tolist(n)
+ if n==0 or not tabs[n] then
+ return ""
+ else
+ local t,n={},0
+ for k,v in next,tabs[n] do
+ if v then
+ n=n+1
+ t[n]=k
+ end
+ end
+ return concat(t," ")
+ end
+end
+function set.contains(n,s)
+ if type(n)=="table" then
+ return n[s]
+ elseif n==0 then
+ return false
+ else
+ local t=tabs[n]
+ return t and t[s]
+ end
+end
+
+
+end -- of closure
+
+do -- create closure to overcome 200 locals limit
+
+package.loaded["l-os"] = package.loaded["l-os"] or true
+
+-- original size: 13692, stripped down to: 8406
+
+if not modules then modules={} end modules ['l-os']={
+ version=1.001,
+ comment="companion to luat-lib.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+local os=os
+local date,time=os.date,os.time
+local find,format,gsub,upper,gmatch=string.find,string.format,string.gsub,string.upper,string.gmatch
+local concat=table.concat
+local random,ceil,randomseed=math.random,math.ceil,math.randomseed
+local rawget,rawset,type,getmetatable,setmetatable,tonumber,tostring=rawget,rawset,type,getmetatable,setmetatable,tonumber,tostring
+math.initialseed=tonumber(string.sub(string.reverse(tostring(ceil(socket and socket.gettime()*10000 or time()))),1,6))
+randomseed(math.initialseed)
+if not os.__getenv__ then
+ os.__getenv__=os.getenv
+ os.__setenv__=os.setenv
+ if os.env then
+ local osgetenv=os.getenv
+ local ossetenv=os.setenv
+ local osenv=os.env local _=osenv.PATH
+ function os.setenv(k,v)
+ if v==nil then
+ v=""
+ end
+ local K=upper(k)
+ osenv[K]=v
+ if type(v)=="table" then
+ v=concat(v,";")
+ end
+ ossetenv(K,v)
+ end
+ function os.getenv(k)
+ local K=upper(k)
+ local v=osenv[K] or osenv[k] or osgetenv(K) or osgetenv(k)
+ if v=="" then
+ return nil
+ else
+ return v
+ end
+ end
+ else
+ local ossetenv=os.setenv
+ local osgetenv=os.getenv
+ local osenv={}
+ function os.setenv(k,v)
+ if v==nil then
+ v=""
+ end
+ local K=upper(k)
+ osenv[K]=v
+ end
+ function os.getenv(k)
+ local K=upper(k)
+ local v=osenv[K] or osgetenv(K) or osgetenv(k)
+ if v=="" then
+ return nil
+ else
+ return v
+ end
+ end
+ local function __index(t,k)
+ return os.getenv(k)
+ end
+ local function __newindex(t,k,v)
+ os.setenv(k,v)
+ end
+ os.env={}
+ setmetatable(os.env,{ __index=__index,__newindex=__newindex } )
+ end
+end
+local execute,spawn,exec,iopopen,ioflush=os.execute,os.spawn or os.execute,os.exec or os.execute,io.popen,io.flush
+function os.execute(...) ioflush() return execute(...) end
+function os.spawn (...) ioflush() return spawn (...) end
+function os.exec (...) ioflush() return exec (...) end
+function io.popen (...) ioflush() return iopopen(...) end
+function os.resultof(command)
+ local handle=io.popen(command,"r")
+ return handle and handle:read("*all") or ""
+end
+if not io.fileseparator then
+ if find(os.getenv("PATH"),";") then
+ io.fileseparator,io.pathseparator,os.type="\\",";",os.type or "mswin"
+ else
+ io.fileseparator,io.pathseparator,os.type="/",":",os.type or "unix"
+ end
+end
+os.type=os.type or (io.pathseparator==";" and "windows") or "unix"
+os.name=os.name or (os.type=="windows" and "mswin" ) or "linux"
+if os.type=="windows" then
+ os.libsuffix,os.binsuffix,os.binsuffixes='dll','exe',{ 'exe','cmd','bat' }
+else
+ os.libsuffix,os.binsuffix,os.binsuffixes='so','',{ '' }
+end
+local launchers={
+ windows="start %s",
+ macosx="open %s",
+ unix="$BROWSER %s &> /dev/null &",
+}
+function os.launch(str)
+ os.execute(format(launchers[os.name] or launchers.unix,str))
+end
+if not os.times then
+ function os.times()
+ return {
+ utime=os.gettimeofday(),
+ stime=0,
+ cutime=0,
+ cstime=0,
+ }
+ end
+end
+os.gettimeofday=os.gettimeofday or os.clock
+local startuptime=os.gettimeofday()
+function os.runtime()
+ return os.gettimeofday()-startuptime
+end
+os.resolvers=os.resolvers or {}
+local resolvers=os.resolvers
+setmetatable(os,{ __index=function(t,k)
+ local r=resolvers[k]
+ return r and r(t,k) or nil
+end })
+local name,platform=os.name or "linux",os.getenv("MTX_PLATFORM") or ""
+local function guess()
+ local architecture=os.resultof("uname -m") or ""
+ if architecture~="" then
+ return architecture
+ end
+ architecture=os.getenv("HOSTTYPE") or ""
+ if architecture~="" then
+ return architecture
+ end
+ return os.resultof("echo $HOSTTYPE") or ""
+end
+if platform~="" then
+ os.platform=platform
+elseif os.type=="windows" then
+ function os.resolvers.platform(t,k)
+ local platform,architecture="",os.getenv("PROCESSOR_ARCHITECTURE") or ""
+ if find(architecture,"AMD64") then
+ platform="mswin-64"
+ else
+ platform="mswin"
+ end
+ os.setenv("MTX_PLATFORM",platform)
+ os.platform=platform
+ return platform
+ end
+elseif name=="linux" then
+ function os.resolvers.platform(t,k)
+ local platform,architecture="",os.getenv("HOSTTYPE") or os.resultof("uname -m") or ""
+ if find(architecture,"x86_64") then
+ platform="linux-64"
+ elseif find(architecture,"ppc") then
+ platform="linux-ppc"
+ else
+ platform="linux"
+ end
+ os.setenv("MTX_PLATFORM",platform)
+ os.platform=platform
+ return platform
+ end
+elseif name=="macosx" then
+ function os.resolvers.platform(t,k)
+ local platform,architecture="",os.resultof("echo $HOSTTYPE") or ""
+ if architecture=="" then
+ platform="osx-intel"
+ elseif find(architecture,"i386") then
+ platform="osx-intel"
+ elseif find(architecture,"x86_64") then
+ platform="osx-64"
+ else
+ platform="osx-ppc"
+ end
+ os.setenv("MTX_PLATFORM",platform)
+ os.platform=platform
+ return platform
+ end
+elseif name=="sunos" then
+ function os.resolvers.platform(t,k)
+ local platform,architecture="",os.resultof("uname -m") or ""
+ if find(architecture,"sparc") then
+ platform="solaris-sparc"
+ else
+ platform="solaris-intel"
+ end
+ os.setenv("MTX_PLATFORM",platform)
+ os.platform=platform
+ return platform
+ end
+elseif name=="freebsd" then
+ function os.resolvers.platform(t,k)
+ local platform,architecture="",os.resultof("uname -m") or ""
+ if find(architecture,"amd64") then
+ platform="freebsd-amd64"
+ else
+ platform="freebsd"
+ end
+ os.setenv("MTX_PLATFORM",platform)
+ os.platform=platform
+ return platform
+ end
+elseif name=="kfreebsd" then
+ function os.resolvers.platform(t,k)
+ local platform,architecture="",os.getenv("HOSTTYPE") or os.resultof("uname -m") or ""
+ if find(architecture,"x86_64") then
+ platform="kfreebsd-amd64"
+ else
+ platform="kfreebsd-i386"
+ end
+ os.setenv("MTX_PLATFORM",platform)
+ os.platform=platform
+ return platform
+ end
+else
+ function os.resolvers.platform(t,k)
+ local platform="linux"
+ os.setenv("MTX_PLATFORM",platform)
+ os.platform=platform
+ return platform
+ end
+end
+local t={ 8,9,"a","b" }
+function os.uuid()
+ return format("%04x%04x-4%03x-%s%03x-%04x-%04x%04x%04x",
+ random(0xFFFF),random(0xFFFF),
+ random(0x0FFF),
+ t[ceil(random(4))] or 8,random(0x0FFF),
+ random(0xFFFF),
+ random(0xFFFF),random(0xFFFF),random(0xFFFF)
+ )
+end
+local d
+function os.timezone(delta)
+ d=d or tonumber(tonumber(date("%H")-date("!%H")))
+ if delta then
+ if d>0 then
+ return format("+%02i:00",d)
+ else
+ return format("-%02i:00",-d)
+ end
+ else
+ return 1
+ end
+end
+local timeformat=format("%%s%s",os.timezone(true))
+local dateformat="!%Y-%m-%d %H:%M:%S"
+function os.fulltime(t,default)
+ t=tonumber(t) or 0
+ if t>0 then
+ elseif default then
+ return default
+ else
+ t=nil
+ end
+ return format(timeformat,date(dateformat,t))
+end
+local dateformat="%Y-%m-%d %H:%M:%S"
+function os.localtime(t,default)
+ t=tonumber(t) or 0
+ if t>0 then
+ elseif default then
+ return default
+ else
+ t=nil
+ end
+ return date(dateformat,t)
+end
+function os.converttime(t,default)
+ local t=tonumber(t)
+ if t and t>0 then
+ return date(dateformat,t)
+ else
+ return default or "-"
+ end
+end
+local memory={}
+local function which(filename)
+ local fullname=memory[filename]
+ if fullname==nil then
+ local suffix=file.suffix(filename)
+ local suffixes=suffix=="" and os.binsuffixes or { suffix }
+ for directory in gmatch(os.getenv("PATH"),"[^"..io.pathseparator.."]+") do
+ local df=file.join(directory,filename)
+ for i=1,#suffixes do
+ local dfs=file.addsuffix(df,suffixes[i])
+ if io.exists(dfs) then
+ fullname=dfs
+ break
+ end
+ end
+ end
+ if not fullname then
+ fullname=false
+ end
+ memory[filename]=fullname
+ end
+ return fullname
+end
+os.which=which
+os.where=which
+function os.today()
+ return date("!*t")
+end
+function os.now()
+ return date("!%Y-%m-%d %H:%M:%S")
+end
+if not os.sleep and socket then
+ os.sleep=socket.sleep
+end
+
+
+end -- of closure
+
+do -- create closure to overcome 200 locals limit
+
+package.loaded["l-file"] = package.loaded["l-file"] or true
+
+-- original size: 16648, stripped down to: 9051
+
+if not modules then modules={} end modules ['l-file']={
+ version=1.001,
+ comment="companion to luat-lib.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+file=file or {}
+local file=file
+if not lfs then
+ lfs=optionalrequire("lfs")
+end
+if not lfs then
+ lfs={
+ getcurrentdir=function()
+ return "."
+ end,
+ attributes=function()
+ return nil
+ end,
+ isfile=function(name)
+ local f=io.open(name,'rb')
+ if f then
+ f:close()
+ return true
+ end
+ end,
+ isdir=function(name)
+ print("you need to load lfs")
+ return false
+ end
+ }
+elseif not lfs.isfile then
+ local attributes=lfs.attributes
+ function lfs.isdir(name)
+ return attributes(name,"mode")=="directory"
+ end
+ function lfs.isfile(name)
+ return attributes(name,"mode")=="file"
+ end
+end
+local insert,concat=table.insert,table.concat
+local match=string.match
+local lpegmatch=lpeg.match
+local getcurrentdir,attributes=lfs.currentdir,lfs.attributes
+local checkedsplit=string.checkedsplit
+local P,R,S,C,Cs,Cp,Cc,Ct=lpeg.P,lpeg.R,lpeg.S,lpeg.C,lpeg.Cs,lpeg.Cp,lpeg.Cc,lpeg.Ct
+local colon=P(":")
+local period=P(".")
+local periods=P("..")
+local fwslash=P("/")
+local bwslash=P("\\")
+local slashes=S("\\/")
+local noperiod=1-period
+local noslashes=1-slashes
+local name=noperiod^1
+local suffix=period/""*(1-period-slashes)^1*-1
+local pattern=C((1-(slashes^1*noslashes^1*-1))^1)*P(1)
+local function pathpart(name,default)
+ return name and lpegmatch(pattern,name) or default or ""
+end
+local pattern=(noslashes^0*slashes)^1*C(noslashes^1)*-1
+local function basename(name)
+ return name and lpegmatch(pattern,name) or name
+end
+local pattern=(noslashes^0*slashes^1)^0*Cs((1-suffix)^1)*suffix^0
+local function nameonly(name)
+ return name and lpegmatch(pattern,name) or name
+end
+local pattern=(noslashes^0*slashes)^0*(noperiod^1*period)^1*C(noperiod^1)*-1
+local function suffixonly(name)
+ return name and lpegmatch(pattern,name) or ""
+end
+file.pathpart=pathpart
+file.basename=basename
+file.nameonly=nameonly
+file.suffixonly=suffixonly
+file.suffix=suffixonly
+file.dirname=pathpart
+file.extname=suffixonly
+local drive=C(R("az","AZ"))*colon
+local path=C((noslashes^0*slashes)^0)
+local suffix=period*C(P(1-period)^0*P(-1))
+local base=C((1-suffix)^0)
+local rest=C(P(1)^0)
+drive=drive+Cc("")
+path=path+Cc("")
+base=base+Cc("")
+suffix=suffix+Cc("")
+local pattern_a=drive*path*base*suffix
+local pattern_b=path*base*suffix
+local pattern_c=C(drive*path)*C(base*suffix)
+local pattern_d=path*rest
+function file.splitname(str,splitdrive)
+ if not str then
+ elseif splitdrive then
+ return lpegmatch(pattern_a,str)
+ else
+ return lpegmatch(pattern_b,str)
+ end
+end
+function file.splitbase(str)
+ return str and lpegmatch(pattern_d,str)
+end
+function file.nametotable(str,splitdrive)
+ if str then
+ local path,drive,subpath,name,base,suffix=lpegmatch(pattern_c,str)
+ if splitdrive then
+ return {
+ path=path,
+ drive=drive,
+ subpath=subpath,
+ name=name,
+ base=base,
+ suffix=suffix,
+ }
+ else
+ return {
+ path=path,
+ name=name,
+ base=base,
+ suffix=suffix,
+ }
+ end
+ end
+end
+local pattern=Cs(((period*(1-period-slashes)^1*-1)/""+1)^1)
+function file.removesuffix(name)
+ return name and lpegmatch(pattern,name)
+end
+local suffix=period/""*(1-period-slashes)^1*-1
+local pattern=Cs((noslashes^0*slashes^1)^0*((1-suffix)^1))*Cs(suffix)
+function file.addsuffix(filename,suffix,criterium)
+ if not filename or not suffix or suffix=="" then
+ return filename
+ elseif criterium==true then
+ return filename.."."..suffix
+ elseif not criterium then
+ local n,s=lpegmatch(pattern,filename)
+ if not s or s=="" then
+ return filename.."."..suffix
+ else
+ return filename
+ end
+ else
+ local n,s=lpegmatch(pattern,filename)
+ if s and s~="" then
+ local t=type(criterium)
+ if t=="table" then
+ for i=1,#criterium do
+ if s==criterium[i] then
+ return filename
+ end
+ end
+ elseif t=="string" then
+ if s==criterium then
+ return filename
+ end
+ end
+ end
+ return (n or filename).."."..suffix
+ end
+end
+local suffix=period*(1-period-slashes)^1*-1
+local pattern=Cs((1-suffix)^0)
+function file.replacesuffix(name,suffix)
+ if name and suffix and suffix~="" then
+ return lpegmatch(pattern,name).."."..suffix
+ else
+ return name
+ end
+end
+local reslasher=lpeg.replacer(P("\\"),"/")
+function file.reslash(str)
+ return str and lpegmatch(reslasher,str)
+end
+function file.is_writable(name)
+ if not name then
+ elseif lfs.isdir(name) then
+ name=name.."/m_t_x_t_e_s_t.tmp"
+ local f=io.open(name,"wb")
+ if f then
+ f:close()
+ os.remove(name)
+ return true
+ end
+ elseif lfs.isfile(name) then
+ local f=io.open(name,"ab")
+ if f then
+ f:close()
+ return true
+ end
+ else
+ local f=io.open(name,"ab")
+ if f then
+ f:close()
+ os.remove(name)
+ return true
+ end
+ end
+ return false
+end
+local readable=P("r")*Cc(true)
+function file.is_readable(name)
+ if name then
+ local a=attributes(name)
+ return a and lpegmatch(readable,a.permissions) or false
+ else
+ return false
+ end
+end
+file.isreadable=file.is_readable
+file.iswritable=file.is_writable
+function file.size(name)
+ if name then
+ local a=attributes(name)
+ return a and a.size or 0
+ else
+ return 0
+ end
+end
+function file.splitpath(str,separator)
+ return str and checkedsplit(lpegmatch(reslasher,str),separator or io.pathseparator)
+end
+function file.joinpath(tab,separator)
+ return tab and concat(tab,separator or io.pathseparator)
+end
+local stripper=Cs(P(fwslash)^0/""*reslasher)
+local isnetwork=fwslash*fwslash*(1-fwslash)+(1-fwslash-colon)^1*colon
+local isroot=fwslash^1*-1
+local hasroot=fwslash^1
+local deslasher=lpeg.replacer(S("\\/")^1,"/")
+function file.join(...)
+ local lst={... }
+ local one=lst[1]
+ if lpegmatch(isnetwork,one) then
+ local two=lpegmatch(deslasher,concat(lst,"/",2))
+ return one.."/"..two
+ elseif lpegmatch(isroot,one) then
+ local two=lpegmatch(deslasher,concat(lst,"/",2))
+ if lpegmatch(hasroot,two) then
+ return two
+ else
+ return "/"..two
+ end
+ elseif one=="" then
+ return lpegmatch(stripper,concat(lst,"/",2))
+ else
+ return lpegmatch(deslasher,concat(lst,"/"))
+ end
+end
+local drivespec=R("az","AZ")^1*colon
+local anchors=fwslash+drivespec
+local untouched=periods+(1-period)^1*P(-1)
+local splitstarter=(Cs(drivespec*(bwslash/"/"+fwslash)^0)+Cc(false))*Ct(lpeg.splitat(S("/\\")^1))
+local absolute=fwslash
+function file.collapsepath(str,anchor)
+ if not str then
+ return
+ end
+ if anchor and not lpegmatch(anchors,str) then
+ str=getcurrentdir().."/"..str
+ end
+ if str=="" or str=="." then
+ return "."
+ elseif lpegmatch(untouched,str) then
+ return lpegmatch(reslasher,str)
+ end
+ local starter,oldelements=lpegmatch(splitstarter,str)
+ local newelements={}
+ local i=#oldelements
+ while i>0 do
+ local element=oldelements[i]
+ if element=='.' then
+ elseif element=='..' then
+ local n=i-1
+ while n>0 do
+ local element=oldelements[n]
+ if element~='..' and element~='.' then
+ oldelements[n]='.'
+ break
+ else
+ n=n-1
+ end
+ end
+ if n<1 then
+ insert(newelements,1,'..')
+ end
+ elseif element~="" then
+ insert(newelements,1,element)
+ end
+ i=i-1
+ end
+ if #newelements==0 then
+ return starter or "."
+ elseif starter then
+ return starter..concat(newelements,'/')
+ elseif lpegmatch(absolute,str) then
+ return "/"..concat(newelements,'/')
+ else
+ return concat(newelements,'/')
+ end
+end
+local validchars=R("az","09","AZ","--","..")
+local pattern_a=lpeg.replacer(1-validchars)
+local pattern_a=Cs((validchars+P(1)/"-")^1)
+local whatever=P("-")^0/""
+local pattern_b=Cs(whatever*(1-whatever*-1)^1)
+function file.robustname(str,strict)
+ if str then
+ str=lpegmatch(pattern_a,str) or str
+ if strict then
+ return lpegmatch(pattern_b,str) or str
+ else
+ return str
+ end
+ end
+end
+file.readdata=io.loaddata
+file.savedata=io.savedata
+function file.copy(oldname,newname)
+ if oldname and newname then
+ local data=io.loaddata(oldname)
+ if data and data~="" then
+ file.savedata(newname,data)
+ end
+ end
+end
+local letter=R("az","AZ")+S("_-+")
+local separator=P("://")
+local qualified=period^0*fwslash+letter*colon+letter^1*separator+letter^1*fwslash
+local rootbased=fwslash+letter*colon
+lpeg.patterns.qualified=qualified
+lpeg.patterns.rootbased=rootbased
+function file.is_qualified_path(filename)
+ return filename and lpegmatch(qualified,filename)~=nil
+end
+function file.is_rootbased_path(filename)
+ return filename and lpegmatch(rootbased,filename)~=nil
+end
+function file.strip(name,dir)
+ if name then
+ local b,a=match(name,"^(.-)"..dir.."(.*)$")
+ return a~="" and a or name
+ end
+end
+
+
+end -- of closure
+
+do -- create closure to overcome 200 locals limit
+
+package.loaded["l-md5"] = package.loaded["l-md5"] or true
+
+-- original size: 3760, stripped down to: 2088
+
+if not modules then modules={} end modules ['l-md5']={
+ version=1.001,
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+if not md5 then
+ md5=optionalrequire("md5")
+end
+if not md5 then
+ md5={
+ sum=function(str) print("error: md5 is not loaded (sum ignored)") return str end,
+ sumhexa=function(str) print("error: md5 is not loaded (sumhexa ignored)") return str end,
+ }
+end
+local md5,file=md5,file
+local gsub,format,byte=string.gsub,string.format,string.byte
+local md5sum=md5.sum
+local function convert(str,fmt)
+ return (gsub(md5sum(str),".",function(chr) return format(fmt,byte(chr)) end))
+end
+if not md5.HEX then function md5.HEX(str) return convert(str,"%02X") end end
+if not md5.hex then function md5.hex(str) return convert(str,"%02x") end end
+if not md5.dec then function md5.dec(str) return convert(str,"%03i") end end
+function file.needsupdating(oldname,newname,threshold)
+ local oldtime=lfs.attributes(oldname,"modification")
+ if oldtime then
+ local newtime=lfs.attributes(newname,"modification")
+ if not newtime then
+ return true
+ elseif newtime>=oldtime then
+ return false
+ elseif oldtime-newtime<(threshold or 1) then
+ return false
+ else
+ return true
+ end
+ else
+ return false
+ end
+end
+file.needs_updating=file.needsupdating
+function file.syncmtimes(oldname,newname)
+ local oldtime=lfs.attributes(oldname,"modification")
+ if oldtime and lfs.isfile(newname) then
+ lfs.touch(newname,oldtime,oldtime)
+ end
+end
+function file.checksum(name)
+ if md5 then
+ local data=io.loaddata(name)
+ if data then
+ return md5.HEX(data)
+ end
+ end
+ return nil
+end
+function file.loadchecksum(name)
+ if md5 then
+ local data=io.loaddata(name..".md5")
+ return data and (gsub(data,"%s",""))
+ end
+ return nil
+end
+function file.savechecksum(name,checksum)
+ if not checksum then checksum=file.checksum(name) end
+ if checksum then
+ io.savedata(name..".md5",checksum)
+ return checksum
+ end
+ return nil
+end
+
+
+end -- of closure
+
+do -- create closure to overcome 200 locals limit
+
+package.loaded["l-url"] = package.loaded["l-url"] or true
+
+-- original size: 11806, stripped down to: 5417
+
+if not modules then modules={} end modules ['l-url']={
+ version=1.001,
+ comment="companion to luat-lib.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+local char,format,byte=string.char,string.format,string.byte
+local concat=table.concat
+local tonumber,type=tonumber,type
+local P,C,R,S,Cs,Cc,Ct,Cf,Cg,V=lpeg.P,lpeg.C,lpeg.R,lpeg.S,lpeg.Cs,lpeg.Cc,lpeg.Ct,lpeg.Cf,lpeg.Cg,lpeg.V
+local lpegmatch,lpegpatterns,replacer=lpeg.match,lpeg.patterns,lpeg.replacer
+url=url or {}
+local url=url
+local tochar=function(s) return char(tonumber(s,16)) end
+local colon=P(":")
+local qmark=P("?")
+local hash=P("#")
+local slash=P("/")
+local percent=P("%")
+local endofstring=P(-1)
+local hexdigit=R("09","AF","af")
+local plus=P("+")
+local nothing=Cc("")
+local escapedchar=(percent*C(hexdigit*hexdigit))/tochar
+local escaped=(plus/" ")+escapedchar
+local noslash=P("/")/""
+local schemestr=Cs((escaped+(1-colon-slash-qmark-hash))^2)
+local authoritystr=Cs((escaped+(1- slash-qmark-hash))^0)
+local pathstr=Cs((escaped+(1- qmark-hash))^0)
+local querystr=Cs(((1- hash))^0)
+local fragmentstr=Cs((escaped+(1- endofstring))^0)
+local scheme=schemestr*colon+nothing
+local authority=slash*slash*authoritystr+nothing
+local path=slash*pathstr+nothing
+local query=qmark*querystr+nothing
+local fragment=hash*fragmentstr+nothing
+local validurl=scheme*authority*path*query*fragment
+local parser=Ct(validurl)
+lpegpatterns.url=validurl
+lpegpatterns.urlsplitter=parser
+local escapes={}
+setmetatable(escapes,{ __index=function(t,k)
+ local v=format("%%%02X",byte(k))
+ t[k]=v
+ return v
+end })
+local escaper=Cs((R("09","AZ","az")^1+P(" ")/"%%20"+S("-./_")^1+P(1)/escapes)^0)
+local unescaper=Cs((escapedchar+1)^0)
+lpegpatterns.urlunescaped=escapedchar
+lpegpatterns.urlescaper=escaper
+lpegpatterns.urlunescaper=unescaper
+local function split(str)
+ return (type(str)=="string" and lpegmatch(parser,str)) or str
+end
+local isscheme=schemestr*colon*slash*slash
+local function hasscheme(str)
+ if str then
+ local scheme=lpegmatch(isscheme,str)
+ return scheme~="" and scheme or false
+ else
+ return false
+ end
+end
+local rootletter=R("az","AZ")+S("_-+")
+local separator=P("://")
+local qualified=P(".")^0*P("/")+rootletter*P(":")+rootletter^1*separator+rootletter^1*P("/")
+local rootbased=P("/")+rootletter*P(":")
+local barswapper=replacer("|",":")
+local backslashswapper=replacer("\\","/")
+local equal=P("=")
+local amp=P("&")
+local key=Cs(((escapedchar+1)-equal )^0)
+local value=Cs(((escapedchar+1)-amp -endofstring)^0)
+local splitquery=Cf (Ct("")*P { "sequence",
+ sequence=V("pair")*(amp*V("pair"))^0,
+ pair=Cg(key*equal*value),
+},rawset)
+local function hashed(str)
+ if str=="" then
+ return {
+ scheme="invalid",
+ original=str,
+ }
+ end
+ local s=split(str)
+ local rawscheme=s[1]
+ local rawquery=s[4]
+ local somescheme=rawscheme~=""
+ local somequery=rawquery~=""
+ if not somescheme and not somequery then
+ s={
+ scheme="file",
+ authority="",
+ path=str,
+ query="",
+ fragment="",
+ original=str,
+ noscheme=true,
+ filename=str,
+ }
+ else
+ local authority,path,filename=s[2],s[3]
+ if authority=="" then
+ filename=path
+ elseif path=="" then
+ filename=""
+ else
+ filename=authority.."/"..path
+ end
+ s={
+ scheme=rawscheme,
+ authority=authority,
+ path=path,
+ query=lpegmatch(unescaper,rawquery),
+ queries=lpegmatch(splitquery,rawquery),
+ fragment=s[5],
+ original=str,
+ noscheme=false,
+ filename=filename,
+ }
+ end
+ return s
+end
+url.split=split
+url.hasscheme=hasscheme
+url.hashed=hashed
+function url.addscheme(str,scheme)
+ if hasscheme(str) then
+ return str
+ elseif not scheme then
+ return "file:///"..str
+ else
+ return scheme..":///"..str
+ end
+end
+function url.construct(hash)
+ local fullurl,f={},0
+ local scheme,authority,path,query,fragment=hash.scheme,hash.authority,hash.path,hash.query,hash.fragment
+ if scheme and scheme~="" then
+ f=f+1;fullurl[f]=scheme.."://"
+ end
+ if authority and authority~="" then
+ f=f+1;fullurl[f]=authority
+ end
+ if path and path~="" then
+ f=f+1;fullurl[f]="/"..path
+ end
+ if query and query~="" then
+ f=f+1;fullurl[f]="?"..query
+ end
+ if fragment and fragment~="" then
+ f=f+1;fullurl[f]="#"..fragment
+ end
+ return lpegmatch(escaper,concat(fullurl))
+end
+local pattern=Cs(noslash*R("az","AZ")*(S(":|")/":")*noslash*P(1)^0)
+function url.filename(filename)
+ local spec=hashed(filename)
+ local path=spec.path
+ return (spec.scheme=="file" and path and lpegmatch(pattern,path)) or filename
+end
+local function escapestring(str)
+ return lpegmatch(escaper,str)
+end
+url.escape=escapestring
+function url.query(str)
+ if type(str)=="string" then
+ return lpegmatch(splitquery,str) or ""
+ else
+ return str
+ end
+end
+function url.toquery(data)
+ local td=type(data)
+ if td=="string" then
+ return #str and escape(data) or nil
+ elseif td=="table" then
+ if next(data) then
+ local t={}
+ for k,v in next,data do
+ t[#t+1]=format("%s=%s",k,escapestring(v))
+ end
+ return concat(t,"&")
+ end
+ else
+ end
+end
+local pattern=Cs(noslash^0*(1-noslash*P(-1))^0)
+function url.barepath(path)
+ if not path or path=="" then
+ return ""
+ else
+ return lpegmatch(pattern,path)
+ end
+end
+
+
+end -- of closure
+
+do -- create closure to overcome 200 locals limit
+
+package.loaded["l-dir"] = package.loaded["l-dir"] or true
+
+-- original size: 13139, stripped down to: 8196
+
+if not modules then modules={} end modules ['l-dir']={
+ version=1.001,
+ comment="companion to luat-lib.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+local type,select=type,select
+local find,gmatch,match,gsub=string.find,string.gmatch,string.match,string.gsub
+local concat,insert,remove=table.concat,table.insert,table.remove
+local lpegmatch=lpeg.match
+local P,S,R,C,Cc,Cs,Ct,Cv,V=lpeg.P,lpeg.S,lpeg.R,lpeg.C,lpeg.Cc,lpeg.Cs,lpeg.Ct,lpeg.Cv,lpeg.V
+dir=dir or {}
+local dir=dir
+local lfs=lfs
+local attributes=lfs.attributes
+local walkdir=lfs.dir
+local isdir=lfs.isdir
+local isfile=lfs.isfile
+local currentdir=lfs.currentdir
+local chdir=lfs.chdir
+if not isdir then
+ function isdir(name)
+ local a=attributes(name)
+ return a and a.mode=="directory"
+ end
+ lfs.isdir=isdir
+end
+if not isfile then
+ function isfile(name)
+ local a=attributes(name)
+ return a and a.mode=="file"
+ end
+ lfs.isfile=isfile
+end
+function dir.current()
+ return (gsub(currentdir(),"\\","/"))
+end
+local lfsisdir=isdir
+local function isdir(path)
+ path=gsub(path,"[/\\]+$","")
+ return lfsisdir(path)
+end
+lfs.isdir=isdir
+local function globpattern(path,patt,recurse,action)
+ if path=="/" then
+ path=path.."."
+ elseif not find(path,"/$") then
+ path=path..'/'
+ end
+ if isdir(path) then
+ for name in walkdir(path) do
+ local full=path..name
+ local mode=attributes(full,'mode')
+ if mode=='file' then
+ if find(full,patt) then
+ action(full)
+ end
+ elseif recurse and (mode=="directory") and (name~='.') and (name~="..") then
+ globpattern(full,patt,recurse,action)
+ end
+ end
+ end
+end
+dir.globpattern=globpattern
+local function collectpattern(path,patt,recurse,result)
+ local ok,scanner
+ result=result or {}
+ if path=="/" then
+ ok,scanner,first=xpcall(function() return walkdir(path..".") end,function() end)
+ else
+ ok,scanner,first=xpcall(function() return walkdir(path) end,function() end)
+ end
+ if ok and type(scanner)=="function" then
+ if not find(path,"/$") then path=path..'/' end
+ for name in scanner,first do
+ local full=path..name
+ local attr=attributes(full)
+ local mode=attr.mode
+ if mode=='file' then
+ if find(full,patt) then
+ result[name]=attr
+ end
+ elseif recurse and (mode=="directory") and (name~='.') and (name~="..") then
+ attr.list=collectpattern(full,patt,recurse)
+ result[name]=attr
+ end
+ end
+ end
+ return result
+end
+dir.collectpattern=collectpattern
+local pattern=Ct {
+ [1]=(C(P(".")+P("/")^1)+C(R("az","AZ")*P(":")*P("/")^0)+Cc("./"))*V(2)*V(3),
+ [2]=C(((1-S("*?/"))^0*P("/"))^0),
+ [3]=C(P(1)^0)
+}
+local filter=Cs ((
+ P("**")/".*"+P("*")/"[^/]*"+P("?")/"[^/]"+P(".")/"%%."+P("+")/"%%+"+P("-")/"%%-"+P(1)
+)^0 )
+local function glob(str,t)
+ if type(t)=="function" then
+ if type(str)=="table" then
+ for s=1,#str do
+ glob(str[s],t)
+ end
+ elseif isfile(str) then
+ t(str)
+ else
+ local split=lpegmatch(pattern,str)
+ if split then
+ local root,path,base=split[1],split[2],split[3]
+ local recurse=find(base,"%*%*")
+ local start=root..path
+ local result=lpegmatch(filter,start..base)
+ globpattern(start,result,recurse,t)
+ end
+ end
+ else
+ if type(str)=="table" then
+ local t=t or {}
+ for s=1,#str do
+ glob(str[s],t)
+ end
+ return t
+ elseif isfile(str) then
+ if t then
+ t[#t+1]=str
+ return t
+ else
+ return { str }
+ end
+ else
+ local split=lpegmatch(pattern,str)
+ if split then
+ local t=t or {}
+ local action=action or function(name) t[#t+1]=name end
+ local root,path,base=split[1],split[2],split[3]
+ local recurse=find(base,"%*%*")
+ local start=root..path
+ local result=lpegmatch(filter,start..base)
+ globpattern(start,result,recurse,action)
+ return t
+ else
+ return {}
+ end
+ end
+ end
+end
+dir.glob=glob
+local function globfiles(path,recurse,func,files)
+ if type(func)=="string" then
+ local s=func
+ func=function(name) return find(name,s) end
+ end
+ files=files or {}
+ local noffiles=#files
+ for name in walkdir(path) do
+ if find(name,"^%.") then
+ else
+ local mode=attributes(name,'mode')
+ if mode=="directory" then
+ if recurse then
+ globfiles(path.."/"..name,recurse,func,files)
+ end
+ elseif mode=="file" then
+ if not func or func(name) then
+ noffiles=noffiles+1
+ files[noffiles]=path.."/"..name
+ end
+ end
+ end
+ end
+ return files
+end
+dir.globfiles=globfiles
+function dir.ls(pattern)
+ return concat(glob(pattern),"\n")
+end
+local make_indeed=true
+local onwindows=os.type=="windows" or find(os.getenv("PATH"),";")
+if onwindows then
+ function dir.mkdirs(...)
+ local str,pth="",""
+ for i=1,select("#",...) do
+ local s=select(i,...)
+ if s=="" then
+ elseif str=="" then
+ str=s
+ else
+ str=str.."/"..s
+ end
+ end
+ local first,middle,last
+ local drive=false
+ first,middle,last=match(str,"^(//)(//*)(.*)$")
+ if first then
+ else
+ first,last=match(str,"^(//)/*(.-)$")
+ if first then
+ middle,last=match(str,"([^/]+)/+(.-)$")
+ if middle then
+ pth="//"..middle
+ else
+ pth="//"..last
+ last=""
+ end
+ else
+ first,middle,last=match(str,"^([a-zA-Z]:)(/*)(.-)$")
+ if first then
+ pth,drive=first..middle,true
+ else
+ middle,last=match(str,"^(/*)(.-)$")
+ if not middle then
+ last=str
+ end
+ end
+ end
+ end
+ for s in gmatch(last,"[^/]+") do
+ if pth=="" then
+ pth=s
+ elseif drive then
+ pth,drive=pth..s,false
+ else
+ pth=pth.."/"..s
+ end
+ if make_indeed and not isdir(pth) then
+ lfs.mkdir(pth)
+ end
+ end
+ return pth,(isdir(pth)==true)
+ end
+else
+ function dir.mkdirs(...)
+ local str,pth="",""
+ for i=1,select("#",...) do
+ local s=select(i,...)
+ if s and s~="" then
+ if str~="" then
+ str=str.."/"..s
+ else
+ str=s
+ end
+ end
+ end
+ str=gsub(str,"/+","/")
+ if find(str,"^/") then
+ pth="/"
+ for s in gmatch(str,"[^/]+") do
+ local first=(pth=="/")
+ if first then
+ pth=pth..s
+ else
+ pth=pth.."/"..s
+ end
+ if make_indeed and not first and not isdir(pth) then
+ lfs.mkdir(pth)
+ end
+ end
+ else
+ pth="."
+ for s in gmatch(str,"[^/]+") do
+ pth=pth.."/"..s
+ if make_indeed and not isdir(pth) then
+ lfs.mkdir(pth)
+ end
+ end
+ end
+ return pth,(isdir(pth)==true)
+ end
+end
+dir.makedirs=dir.mkdirs
+if onwindows then
+ function dir.expandname(str)
+ local first,nothing,last=match(str,"^(//)(//*)(.*)$")
+ if first then
+ first=dir.current().."/"
+ end
+ if not first then
+ first,last=match(str,"^(//)/*(.*)$")
+ end
+ if not first then
+ first,last=match(str,"^([a-zA-Z]:)(.*)$")
+ if first and not find(last,"^/") then
+ local d=currentdir()
+ if chdir(first) then
+ first=dir.current()
+ end
+ chdir(d)
+ end
+ end
+ if not first then
+ first,last=dir.current(),str
+ end
+ last=gsub(last,"//","/")
+ last=gsub(last,"/%./","/")
+ last=gsub(last,"^/*","")
+ first=gsub(first,"/*$","")
+ if last=="" or last=="." then
+ return first
+ else
+ return first.."/"..last
+ end
+ end
+else
+ function dir.expandname(str)
+ if not find(str,"^/") then
+ str=currentdir().."/"..str
+ end
+ str=gsub(str,"//","/")
+ str=gsub(str,"/%./","/")
+ str=gsub(str,"(.)/%.$","%1")
+ return str
+ end
+end
+file.expandname=dir.expandname
+local stack={}
+function dir.push(newdir)
+ insert(stack,currentdir())
+ if newdir and newdir~="" then
+ chdir(newdir)
+ end
+end
+function dir.pop()
+ local d=remove(stack)
+ if d then
+ chdir(d)
+ end
+ return d
+end
+
+
+end -- of closure
+
+do -- create closure to overcome 200 locals limit
+
+package.loaded["l-boolean"] = package.loaded["l-boolean"] or true
+
+-- original size: 1781, stripped down to: 1503
+
+if not modules then modules={} end modules ['l-boolean']={
+ version=1.001,
+ comment="companion to luat-lib.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+local type,tonumber=type,tonumber
+boolean=boolean or {}
+local boolean=boolean
+function boolean.tonumber(b)
+ if b then return 1 else return 0 end
+end
+function toboolean(str,tolerant)
+ if str==nil then
+ return false
+ elseif str==false then
+ return false
+ elseif str==true then
+ return true
+ elseif str=="true" then
+ return true
+ elseif str=="false" then
+ return false
+ elseif not tolerant then
+ return false
+ elseif str==0 then
+ return false
+ elseif (tonumber(str) or 0)>0 then
+ return true
+ else
+ return str=="yes" or str=="on" or str=="t"
+ end
+end
+string.toboolean=toboolean
+function string.booleanstring(str)
+ if str=="0" then
+ return false
+ elseif str=="1" then
+ return true
+ elseif str=="" then
+ return false
+ elseif str=="false" then
+ return false
+ elseif str=="true" then
+ return true
+ elseif (tonumber(str) or 0)>0 then
+ return true
+ else
+ return str=="yes" or str=="on" or str=="t"
+ end
+end
+function string.is_boolean(str,default)
+ if type(str)=="string" then
+ if str=="true" or str=="yes" or str=="on" or str=="t" then
+ return true
+ elseif str=="false" or str=="no" or str=="off" or str=="f" then
+ return false
+ end
+ end
+ return default
+end
+
+
+end -- of closure
+
+do -- create closure to overcome 200 locals limit
+
+package.loaded["l-unicode"] = package.loaded["l-unicode"] or true
+
+-- original size: 26810, stripped down to: 11943
+
+if not modules then modules={} end modules ['l-unicode']={
+ version=1.001,
+ comment="companion to luat-lib.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+utf=utf or (unicode and unicode.utf8) or {}
+utf.characters=utf.characters or string.utfcharacters
+utf.values=utf.values or string.utfvalues
+local type=type
+local char,byte,format,sub=string.char,string.byte,string.format,string.sub
+local concat=table.concat
+local P,C,R,Cs,Ct,Cmt,Cc,Carg,Cp=lpeg.P,lpeg.C,lpeg.R,lpeg.Cs,lpeg.Ct,lpeg.Cmt,lpeg.Cc,lpeg.Carg,lpeg.Cp
+local lpegmatch,patterns=lpeg.match,lpeg.patterns
+local bytepairs=string.bytepairs
+local finder=lpeg.finder
+local replacer=lpeg.replacer
+local utfvalues=utf.values
+local utfgmatch=utf.gmatch
+local p_utftype=patterns.utftype
+local p_utfoffset=patterns.utfoffset
+local p_utf8char=patterns.utf8char
+local p_utf8byte=patterns.utf8byte
+local p_utfbom=patterns.utfbom
+local p_newline=patterns.newline
+local p_whitespace=patterns.whitespace
+if not unicode then
+ unicode={ utf=utf }
+end
+if not utf.char then
+ local floor,char=math.floor,string.char
+ function utf.char(n)
+ if n<0x80 then
+ return char(n)
+ elseif n<0x800 then
+ return char(
+ 0xC0+floor(n/0x40),
+ 0x80+(n%0x40)
+ )
+ elseif n<0x10000 then
+ return char(
+ 0xE0+floor(n/0x1000),
+ 0x80+(floor(n/0x40)%0x40),
+ 0x80+(n%0x40)
+ )
+ elseif n<0x200000 then
+ return char(
+ 0xF0+floor(n/0x40000),
+ 0x80+(floor(n/0x1000)%0x40),
+ 0x80+(floor(n/0x40)%0x40),
+ 0x80+(n%0x40)
+ )
+ else
+ return ""
+ end
+ end
+end
+if not utf.byte then
+ local utf8byte=patterns.utf8byte
+ function utf.byte(c)
+ return lpegmatch(utf8byte,c)
+ end
+end
+local utfchar,utfbyte=utf.char,utf.byte
+function utf.filetype(data)
+ return data and lpegmatch(p_utftype,data) or "unknown"
+end
+local toentities=Cs (
+ (
+ patterns.utf8one+(
+ patterns.utf8two+patterns.utf8three+patterns.utf8four
+ )/function(s) local b=utfbyte(s) if b<127 then return s else return format("&#%X;",b) end end
+ )^0
+)
+patterns.toentities=toentities
+function utf.toentities(str)
+ return lpegmatch(toentities,str)
+end
+local one=P(1)
+local two=C(1)*C(1)
+local four=C(R(utfchar(0xD8),utfchar(0xFF)))*C(1)*C(1)*C(1)
+local pattern=P("\254\255")*Cs((
+ four/function(a,b,c,d)
+ local ab=0xFF*byte(a)+byte(b)
+ local cd=0xFF*byte(c)+byte(d)
+ return utfchar((ab-0xD800)*0x400+(cd-0xDC00)+0x10000)
+ end+two/function(a,b)
+ return utfchar(byte(a)*256+byte(b))
+ end+one
+ )^1 )+P("\255\254")*Cs((
+ four/function(b,a,d,c)
+ local ab=0xFF*byte(a)+byte(b)
+ local cd=0xFF*byte(c)+byte(d)
+ return utfchar((ab-0xD800)*0x400+(cd-0xDC00)+0x10000)
+ end+two/function(b,a)
+ return utfchar(byte(a)*256+byte(b))
+ end+one
+ )^1 )
+function string.toutf(s)
+ return lpegmatch(pattern,s) or s
+end
+local validatedutf=Cs (
+ (
+ patterns.utf8one+patterns.utf8two+patterns.utf8three+patterns.utf8four+P(1)/"�"
+ )^0
+)
+patterns.validatedutf=validatedutf
+function utf.is_valid(str)
+ return type(str)=="string" and lpegmatch(validatedutf,str) or false
+end
+if not utf.len then
+ local n,f=0,1
+ local utfcharcounter=patterns.utfbom^-1*Cmt (
+ Cc(1)*patterns.utf8one^1+Cc(2)*patterns.utf8two^1+Cc(3)*patterns.utf8three^1+Cc(4)*patterns.utf8four^1,
+ function(_,t,d)
+ n=n+(t-f)/d
+ f=t
+ return true
+ end
+ )^0
+ function utf.len(str)
+ n,f=0,1
+ lpegmatch(utfcharcounter,str or "")
+ return n
+ end
+end
+utf.length=utf.len
+if not utf.sub then
+ local utflength=utf.length
+ local b,e,n,first,last=0,0,0,0,0
+ local function slide_zero(s,p)
+ n=n+1
+ if n>=last then
+ e=p-1
+ else
+ return p
+ end
+ end
+ local function slide_one(s,p)
+ n=n+1
+ if n==first then
+ b=p
+ end
+ if n>=last then
+ e=p-1
+ else
+ return p
+ end
+ end
+ local function slide_two(s,p)
+ n=n+1
+ if n==first then
+ b=p
+ else
+ return true
+ end
+ end
+ local pattern_zero=Cmt(p_utf8char,slide_zero)^0
+ local pattern_one=Cmt(p_utf8char,slide_one )^0
+ local pattern_two=Cmt(p_utf8char,slide_two )^0
+ function utf.sub(str,start,stop)
+ if not start then
+ return str
+ end
+ if start==0 then
+ start=1
+ end
+ if not stop then
+ if start<0 then
+ local l=utflength(str)
+ start=l+start
+ else
+ start=start-1
+ end
+ b,n,first=0,0,start
+ lpegmatch(pattern_two,str)
+ if n>=first then
+ return sub(str,b)
+ else
+ return ""
+ end
+ end
+ if start<0 or stop<0 then
+ local l=utf.length(str)
+ if start<0 then
+ start=l+start
+ if start<=0 then
+ start=1
+ else
+ start=start+1
+ end
+ end
+ if stop<0 then
+ stop=l+stop
+ if stop==0 then
+ stop=1
+ else
+ stop=stop+1
+ end
+ end
+ end
+ if start>stop then
+ return ""
+ elseif start>1 then
+ b,e,n,first,last=0,0,0,start-1,stop
+ lpegmatch(pattern_one,str)
+ if n>=first and e==0 then
+ e=#str
+ end
+ return sub(str,b,e)
+ else
+ b,e,n,last=1,0,0,stop
+ lpegmatch(pattern_zero,str)
+ if e==0 then
+ e=#str
+ end
+ return sub(str,b,e)
+ end
+ end
+end
+function utf.remapper(mapping)
+ local pattern=Cs((p_utf8char/mapping)^0)
+ return function(str)
+ if not str or str=="" then
+ return ""
+ else
+ return lpegmatch(pattern,str)
+ end
+ end,pattern
+end
+function utf.replacer(t)
+ local r=replacer(t,false,false,true)
+ return function(str)
+ return lpegmatch(r,str)
+ end
+end
+function utf.subtituter(t)
+ local f=finder (t)
+ local r=replacer(t,false,false,true)
+ return function(str)
+ local i=lpegmatch(f,str)
+ if not i then
+ return str
+ elseif i>#str then
+ return str
+ else
+ return lpegmatch(r,str)
+ end
+ end
+end
+local utflinesplitter=p_utfbom^-1*lpeg.tsplitat(p_newline)
+local utfcharsplitter_ows=p_utfbom^-1*Ct(C(p_utf8char)^0)
+local utfcharsplitter_iws=p_utfbom^-1*Ct((p_whitespace^1+C(p_utf8char))^0)
+local utfcharsplitter_raw=Ct(C(p_utf8char)^0)
+patterns.utflinesplitter=utflinesplitter
+function utf.splitlines(str)
+ return lpegmatch(utflinesplitter,str or "")
+end
+function utf.split(str,ignorewhitespace)
+ if ignorewhitespace then
+ return lpegmatch(utfcharsplitter_iws,str or "")
+ else
+ return lpegmatch(utfcharsplitter_ows,str or "")
+ end
+end
+function utf.totable(str)
+ return lpegmatch(utfcharsplitter_raw,str)
+end
+function utf.magic(f)
+ local str=f:read(4) or ""
+ local off=lpegmatch(p_utfoffset,str)
+ if off<4 then
+ f:seek('set',off)
+ end
+ return lpegmatch(p_utftype,str)
+end
+local function utf16_to_utf8_be(t)
+ if type(t)=="string" then
+ t=lpegmatch(utflinesplitter,t)
+ end
+ local result={}
+ for i=1,#t do
+ local r,more=0,0
+ for left,right in bytepairs(t[i]) do
+ if right then
+ local now=256*left+right
+ if more>0 then
+ now=(more-0xD800)*0x400+(now-0xDC00)+0x10000
+ more=0
+ r=r+1
+ result[r]=utfchar(now)
+ elseif now>=0xD800 and now<=0xDBFF then
+ more=now
+ else
+ r=r+1
+ result[r]=utfchar(now)
+ end
+ end
+ end
+ t[i]=concat(result,"",1,r)
+ end
+ return t
+end
+local function utf16_to_utf8_le(t)
+ if type(t)=="string" then
+ t=lpegmatch(utflinesplitter,t)
+ end
+ local result={}
+ for i=1,#t do
+ local r,more=0,0
+ for left,right in bytepairs(t[i]) do
+ if right then
+ local now=256*right+left
+ if more>0 then
+ now=(more-0xD800)*0x400+(now-0xDC00)+0x10000
+ more=0
+ r=r+1
+ result[r]=utfchar(now)
+ elseif now>=0xD800 and now<=0xDBFF then
+ more=now
+ else
+ r=r+1
+ result[r]=utfchar(now)
+ end
+ end
+ end
+ t[i]=concat(result,"",1,r)
+ end
+ return t
+end
+local function utf32_to_utf8_be(t)
+ if type(t)=="string" then
+ t=lpegmatch(utflinesplitter,t)
+ end
+ local result={}
+ for i=1,#t do
+ local r,more=0,-1
+ for a,b in bytepairs(t[i]) do
+ if a and b then
+ if more<0 then
+ more=256*256*256*a+256*256*b
+ else
+ r=r+1
+ result[t]=utfchar(more+256*a+b)
+ more=-1
+ end
+ else
+ break
+ end
+ end
+ t[i]=concat(result,"",1,r)
+ end
+ return t
+end
+local function utf32_to_utf8_le(t)
+ if type(t)=="string" then
+ t=lpegmatch(utflinesplitter,t)
+ end
+ local result={}
+ for i=1,#t do
+ local r,more=0,-1
+ for a,b in bytepairs(t[i]) do
+ if a and b then
+ if more<0 then
+ more=256*b+a
+ else
+ r=r+1
+ result[t]=utfchar(more+256*256*256*b+256*256*a)
+ more=-1
+ end
+ else
+ break
+ end
+ end
+ t[i]=concat(result,"",1,r)
+ end
+ return t
+end
+utf.utf32_to_utf8_be=utf32_to_utf8_be
+utf.utf32_to_utf8_le=utf32_to_utf8_le
+utf.utf16_to_utf8_be=utf16_to_utf8_be
+utf.utf16_to_utf8_le=utf16_to_utf8_le
+function utf.utf8_to_utf8(t)
+ return type(t)=="string" and lpegmatch(utflinesplitter,t) or t
+end
+function utf.utf16_to_utf8(t,endian)
+ return endian and utf16_to_utf8_be(t) or utf16_to_utf8_le(t) or t
+end
+function utf.utf32_to_utf8(t,endian)
+ return endian and utf32_to_utf8_be(t) or utf32_to_utf8_le(t) or t
+end
+local function little(c)
+ local b=byte(c)
+ if b<0x10000 then
+ return char(b%256,b/256)
+ else
+ b=b-0x10000
+ local b1,b2=b/1024+0xD800,b%1024+0xDC00
+ return char(b1%256,b1/256,b2%256,b2/256)
+ end
+end
+local function big(c)
+ local b=byte(c)
+ if b<0x10000 then
+ return char(b/256,b%256)
+ else
+ b=b-0x10000
+ local b1,b2=b/1024+0xD800,b%1024+0xDC00
+ return char(b1/256,b1%256,b2/256,b2%256)
+ end
+end
+local _,l_remap=utf.remapper(little)
+local _,b_remap=utf.remapper(big)
+function utf.utf8_to_utf16(str,littleendian)
+ if littleendian then
+ return char(255,254)..lpegmatch(l_remap,str)
+ else
+ return char(254,255)..lpegmatch(b_remap,str)
+ end
+end
+local pattern=Cs (
+ (p_utf8byte/function(unicode ) return format("0x%04X",unicode) end)*(p_utf8byte*Carg(1)/function(unicode,separator) return format("%s0x%04X",separator,unicode) end)^0
+)
+function utf.tocodes(str,separator)
+ return lpegmatch(pattern,str,1,separator or " ")
+end
+function utf.ustring(s)
+ return format("U+%05X",type(s)=="number" and s or utfbyte(s))
+end
+function utf.xstring(s)
+ return format("0x%05X",type(s)=="number" and s or utfbyte(s))
+end
+local p_nany=p_utf8char/""
+if utfgmatch then
+ function utf.count(str,what)
+ if type(what)=="string" then
+ local n=0
+ for _ in utfgmatch(str,what) do
+ n=n+1
+ end
+ return n
+ else
+ return #lpegmatch(Cs((P(what)/" "+p_nany)^0),str)
+ end
+ end
+else
+ local cache={}
+ function utf.count(str,what)
+ if type(what)=="string" then
+ local p=cache[what]
+ if not p then
+ p=Cs((P(what)/" "+p_nany)^0)
+ cache[p]=p
+ end
+ return #lpegmatch(p,str)
+ else
+ return #lpegmatch(Cs((P(what)/" "+p_nany)^0),str)
+ end
+ end
+end
+if not utf.characters then
+ function utf.characters(str)
+ return gmatch(str,".[\128-\191]*")
+ end
+ string.utfcharacters=utf.characters
+end
+if not utf.values then
+ local find=string.find
+ local dummy=function()
+ end
+ function utf.values(str)
+ local n=#str
+ if n==0 then
+ return dummy
+ elseif n==1 then
+ return function() return utfbyte(str) end
+ else
+ local p=1
+ return function()
+ local b,e=find(str,".[\128-\191]*",p)
+ if b then
+ p=e+1
+ return utfbyte(sub(str,b,e))
+ end
+ end
+ end
+ end
+ string.utfvalues=utf.values
+end
+
+
+end -- of closure
+
+do -- create closure to overcome 200 locals limit
+
+package.loaded["l-math"] = package.loaded["l-math"] or true
+
+-- original size: 915, stripped down to: 836
+
+if not modules then modules={} end modules ['l-math']={
+ version=1.001,
+ comment="companion to luat-lib.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+local floor,sin,cos,tan=math.floor,math.sin,math.cos,math.tan
+if not math.round then
+ function math.round(x) return floor(x+0.5) end
+end
+if not math.div then
+ function math.div(n,m) return floor(n/m) end
+end
+if not math.mod then
+ function math.mod(n,m) return n%m end
+end
+local pipi=2*math.pi/360
+if not math.sind then
+ function math.sind(d) return sin(d*pipi) end
+ function math.cosd(d) return cos(d*pipi) end
+ function math.tand(d) return tan(d*pipi) end
+end
+if not math.odd then
+ function math.odd (n) return n%2~=0 end
+ function math.even(n) return n%2==0 end
+end
+
+
+end -- of closure
+
+do -- create closure to overcome 200 locals limit
+
+package.loaded["util-str"] = package.loaded["util-str"] or true
+
+-- original size: 22834, stripped down to: 12570
+
+if not modules then modules={} end modules ['util-str']={
+ version=1.001,
+ comment="companion to luat-lib.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+utilities=utilities or {}
+utilities.strings=utilities.strings or {}
+local strings=utilities.strings
+local format,gsub,rep,sub=string.format,string.gsub,string.rep,string.sub
+local load,dump=load,string.dump
+local tonumber,type,tostring=tonumber,type,tostring
+local unpack,concat=table.unpack,table.concat
+local P,V,C,S,R,Ct,Cs,Cp,Carg,Cc=lpeg.P,lpeg.V,lpeg.C,lpeg.S,lpeg.R,lpeg.Ct,lpeg.Cs,lpeg.Cp,lpeg.Carg,lpeg.Cc
+local patterns,lpegmatch=lpeg.patterns,lpeg.match
+local utfchar,utfbyte=utf.char,utf.byte
+local loadstripped=_LUAVERSION<5.2 and load or function(str)
+ return load(dump(load(str),true))
+end
+if not number then number={} end
+local stripper=patterns.stripzeros
+local function points(n)
+ return (not n or n==0) and "0pt" or lpegmatch(stripper,format("%.5fpt",n/65536))
+end
+local function basepoints(n)
+ return (not n or n==0) and "0bp" or lpegmatch(stripper,format("%.5fbp",n*(7200/7227)/65536))
+end
+number.points=points
+number.basepoints=basepoints
+local rubish=patterns.spaceortab^0*patterns.newline
+local anyrubish=patterns.spaceortab+patterns.newline
+local anything=patterns.anything
+local stripped=(patterns.spaceortab^1/"")*patterns.newline
+local leading=rubish^0/""
+local trailing=(anyrubish^1*patterns.endofstring)/""
+local redundant=rubish^3/"\n"
+local pattern=Cs(leading*(trailing+redundant+stripped+anything)^0)
+function strings.collapsecrlf(str)
+ return lpegmatch(pattern,str)
+end
+local repeaters={}
+function strings.newrepeater(str,offset)
+ offset=offset or 0
+ local s=repeaters[str]
+ if not s then
+ s={}
+ repeaters[str]=s
+ end
+ local t=s[offset]
+ if t then
+ return t
+ end
+ t={}
+ setmetatable(t,{ __index=function(t,k)
+ if not k then
+ return ""
+ end
+ local n=k+offset
+ local s=n>0 and rep(str,n) or ""
+ t[k]=s
+ return s
+ end })
+ s[offset]=t
+ return t
+end
+local extra,tab,start=0,0,4,0
+local nspaces=strings.newrepeater(" ")
+string.nspaces=nspaces
+local pattern=Carg(1)/function(t)
+ extra,tab,start=0,t or 7,1
+ end*Cs((
+ Cp()*patterns.tab/function(position)
+ local current=(position-start+1)+extra
+ local spaces=tab-(current-1)%tab
+ if spaces>0 then
+ extra=extra+spaces-1
+ return nspaces[spaces]
+ else
+ return ""
+ end
+ end+patterns.newline*Cp()/function(position)
+ extra,start=0,position
+ end+patterns.anything
+ )^1)
+function strings.tabtospace(str,tab)
+ return lpegmatch(pattern,str,1,tab or 7)
+end
+function strings.striplong(str)
+ str=gsub(str,"^%s*","")
+ str=gsub(str,"[\n\r]+ *","\n")
+ return str
+end
+function strings.nice(str)
+ str=gsub(str,"[:%-+_]+"," ")
+ return str
+end
+local n=0
+local sequenced=table.sequenced
+function string.autodouble(s,sep)
+ if s==nil then
+ return '""'
+ end
+ local t=type(s)
+ if t=="number" then
+ return tostring(s)
+ end
+ if t=="table" then
+ return ('"'..sequenced(s,sep or ",")..'"')
+ end
+ return ('"'..tostring(s)..'"')
+end
+function string.autosingle(s,sep)
+ if s==nil then
+ return "''"
+ end
+ local t=type(s)
+ if t=="number" then
+ return tostring(s)
+ end
+ if t=="table" then
+ return ("'"..sequenced(s,sep or ",").."'")
+ end
+ return ("'"..tostring(s).."'")
+end
+local tracedchars={}
+string.tracedchars=tracedchars
+strings.tracers=tracedchars
+function string.tracedchar(b)
+ if type(b)=="number" then
+ return tracedchars[b] or (utfchar(b).." (U+"..format('%05X',b)..")")
+ else
+ local c=utfbyte(b)
+ return tracedchars[c] or (b.." (U+"..format('%05X',c)..")")
+ end
+end
+function number.signed(i)
+ if i>0 then
+ return "+",i
+ else
+ return "-",-i
+ end
+end
+local preamble=[[
+local type = type
+local tostring = tostring
+local tonumber = tonumber
+local format = string.format
+local concat = table.concat
+local signed = number.signed
+local points = number.points
+local basepoints = number.basepoints
+local utfchar = utf.char
+local utfbyte = utf.byte
+local lpegmatch = lpeg.match
+local nspaces = string.nspaces
+local tracedchar = string.tracedchar
+local autosingle = string.autosingle
+local autodouble = string.autodouble
+local sequenced = table.sequenced
+]]
+local template=[[
+%s
+%s
+return function(%s) return %s end
+]]
+local arguments={ "a1" }
+setmetatable(arguments,{ __index=function(t,k)
+ local v=t[k-1]..",a"..k
+ t[k]=v
+ return v
+ end
+})
+local prefix_any=C((S("+- .")+R("09"))^0)
+local prefix_tab=C((1-R("az","AZ","09","%%"))^0)
+local format_s=function(f)
+ n=n+1
+ if f and f~="" then
+ return format("format('%%%ss',a%s)",f,n)
+ else
+ return format("(a%s or '')",n)
+ end
+end
+local format_S=function(f)
+ n=n+1
+ if f and f~="" then
+ return format("format('%%%ss',tostring(a%s))",f,n)
+ else
+ return format("tostring(a%s)",n)
+ end
+end
+local format_q=function()
+ n=n+1
+ return format("(a%s and format('%%q',a%s) or '')",n,n)
+end
+local format_Q=function()
+ n=n+1
+ return format("format('%%q',tostring(a%s))",n)
+end
+local format_i=function(f)
+ n=n+1
+ if f and f~="" then
+ return format("format('%%%si',a%s)",f,n)
+ else
+ return format("a%s",n)
+ end
+end
+local format_d=format_i
+local format_I=function(f)
+ n=n+1
+ return format("format('%%s%%%si',signed(a%s))",f,n)
+end
+local format_f=function(f)
+ n=n+1
+ return format("format('%%%sf',a%s)",f,n)
+end
+local format_g=function(f)
+ n=n+1
+ return format("format('%%%sg',a%s)",f,n)
+end
+local format_G=function(f)
+ n=n+1
+ return format("format('%%%sG',a%s)",f,n)
+end
+local format_e=function(f)
+ n=n+1
+ return format("format('%%%se',a%s)",f,n)
+end
+local format_E=function(f)
+ n=n+1
+ return format("format('%%%sE',a%s)",f,n)
+end
+local format_x=function(f)
+ n=n+1
+ return format("format('%%%sx',a%s)",f,n)
+end
+local format_X=function(f)
+ n=n+1
+ return format("format('%%%sX',a%s)",f,n)
+end
+local format_o=function(f)
+ n=n+1
+ return format("format('%%%so',a%s)",f,n)
+end
+local format_c=function()
+ n=n+1
+ return format("utfchar(a%s)",n)
+end
+local format_C=function()
+ n=n+1
+ return format("tracedchar(a%s)",n)
+end
+local format_r=function(f)
+ n=n+1
+ return format("format('%%%s.0f',a%s)",f,n)
+end
+local format_h=function(f)
+ n=n+1
+ if f=="-" then
+ f=sub(f,2)
+ return format("format('%%%sx',type(a%s) == 'number' and a%s or utfbyte(a%s))",f=="" and "05" or f,n,n,n)
+ else
+ return format("format('0x%%%sx',type(a%s) == 'number' and a%s or utfbyte(a%s))",f=="" and "05" or f,n,n,n)
+ end
+end
+local format_H=function(f)
+ n=n+1
+ if f=="-" then
+ f=sub(f,2)
+ return format("format('%%%sX',type(a%s) == 'number' and a%s or utfbyte(a%s))",f=="" and "05" or f,n,n,n)
+ else
+ return format("format('0x%%%sX',type(a%s) == 'number' and a%s or utfbyte(a%s))",f=="" and "05" or f,n,n,n)
+ end
+end
+local format_u=function(f)
+ n=n+1
+ if f=="-" then
+ f=sub(f,2)
+ return format("format('%%%sx',type(a%s) == 'number' and a%s or utfbyte(a%s))",f=="" and "05" or f,n,n,n)
+ else
+ return format("format('u+%%%sx',type(a%s) == 'number' and a%s or utfbyte(a%s))",f=="" and "05" or f,n,n,n)
+ end
+end
+local format_U=function(f)
+ n=n+1
+ if f=="-" then
+ f=sub(f,2)
+ return format("format('%%%sX',type(a%s) == 'number' and a%s or utfbyte(a%s))",f=="" and "05" or f,n,n,n)
+ else
+ return format("format('U+%%%sX',type(a%s) == 'number' and a%s or utfbyte(a%s))",f=="" and "05" or f,n,n,n)
+ end
+end
+local format_p=function()
+ n=n+1
+ return format("points(a%s)",n)
+end
+local format_b=function()
+ n=n+1
+ return format("basepoints(a%s)",n)
+end
+local format_t=function(f)
+ n=n+1
+ if f and f~="" then
+ return format("concat(a%s,%q)",n,f)
+ else
+ return format("concat(a%s)",n)
+ end
+end
+local format_T=function(f)
+ n=n+1
+ if f and f~="" then
+ return format("sequenced(a%s,%q)",n,f)
+ else
+ return format("sequenced(a%s)",n)
+ end
+end
+local format_l=function()
+ n=n+1
+ return format("(a%s and 'true' or 'false')",n)
+end
+local format_L=function()
+ n=n+1
+ return format("(a%s and 'TRUE' or 'FALSE')",n)
+end
+local format_N=function()
+ n=n+1
+ return format("tostring(tonumber(a%s) or a%s)",n,n)
+end
+local format_a=function(f)
+ n=n+1
+ if f and f~="" then
+ return format("autosingle(a%s,%q)",n,f)
+ else
+ return format("autosingle(a%s)",n)
+ end
+end
+local format_A=function(f)
+ n=n+1
+ if f and f~="" then
+ return format("autodouble(a%s,%q)",n,f)
+ else
+ return format("autodouble(a%s)",n)
+ end
+end
+local format_w=function(f)
+ n=n+1
+ f=tonumber(f)
+ if f then
+ return format("nspaces[%s+a%s]",f,n)
+ else
+ return format("nspaces[a%s]",n)
+ end
+end
+local format_W=function(f)
+ return format("nspaces[%s]",tonumber(f) or 0)
+end
+local format_rest=function(s)
+ return format("%q",s)
+end
+local format_extension=function(extensions,f,name)
+ local extension=extensions[name] or "tostring(%s)"
+ local f=tonumber(f) or 1
+ if f==0 then
+ return extension
+ elseif f==1 then
+ n=n+1
+ local a="a"..n
+ return format(extension,a,a)
+ elseif f<0 then
+ local a="a"..(n+f+1)
+ return format(extension,a,a)
+ else
+ local t={}
+ for i=1,f do
+ n=n+1
+ t[#t+1]="a"..n
+ end
+ return format(extension,unpack(t))
+ end
+end
+local builder=Cs { "start",
+ start=(
+ (
+ P("%")/""*(
+ V("!")
++V("s")+V("q")+V("i")+V("d")+V("f")+V("g")+V("G")+V("e")+V("E")+V("x")+V("X")+V("o")
++V("c")+V("C")+V("S")
++V("Q")
++V("N")
++V("r")+V("h")+V("H")+V("u")+V("U")+V("p")+V("b")+V("t")+V("T")+V("l")+V("L")+V("I")+V("h")
++V("w")
++V("W")
++V("a")
++V("A")
++V("*")
+ )+V("*")
+ )*(P(-1)+Carg(1))
+ )^0,
+ ["s"]=(prefix_any*P("s"))/format_s,
+ ["q"]=(prefix_any*P("q"))/format_q,
+ ["i"]=(prefix_any*P("i"))/format_i,
+ ["d"]=(prefix_any*P("d"))/format_d,
+ ["f"]=(prefix_any*P("f"))/format_f,
+ ["g"]=(prefix_any*P("g"))/format_g,
+ ["G"]=(prefix_any*P("G"))/format_G,
+ ["e"]=(prefix_any*P("e"))/format_e,
+ ["E"]=(prefix_any*P("E"))/format_E,
+ ["x"]=(prefix_any*P("x"))/format_x,
+ ["X"]=(prefix_any*P("X"))/format_X,
+ ["o"]=(prefix_any*P("o"))/format_o,
+ ["S"]=(prefix_any*P("S"))/format_S,
+ ["Q"]=(prefix_any*P("Q"))/format_S,
+ ["N"]=(prefix_any*P("N"))/format_N,
+ ["c"]=(prefix_any*P("c"))/format_c,
+ ["C"]=(prefix_any*P("C"))/format_C,
+ ["r"]=(prefix_any*P("r"))/format_r,
+ ["h"]=(prefix_any*P("h"))/format_h,
+ ["H"]=(prefix_any*P("H"))/format_H,
+ ["u"]=(prefix_any*P("u"))/format_u,
+ ["U"]=(prefix_any*P("U"))/format_U,
+ ["p"]=(prefix_any*P("p"))/format_p,
+ ["b"]=(prefix_any*P("b"))/format_b,
+ ["t"]=(prefix_tab*P("t"))/format_t,
+ ["T"]=(prefix_tab*P("T"))/format_T,
+ ["l"]=(prefix_tab*P("l"))/format_l,
+ ["L"]=(prefix_tab*P("L"))/format_L,
+ ["I"]=(prefix_any*P("I"))/format_I,
+ ["w"]=(prefix_any*P("w"))/format_w,
+ ["W"]=(prefix_any*P("W"))/format_W,
+ ["a"]=(prefix_any*P("a"))/format_a,
+ ["A"]=(prefix_any*P("A"))/format_A,
+ ["*"]=Cs(((1-P("%"))^1+P("%%")/"%%%%")^1)/format_rest,
+ ["!"]=Carg(2)*prefix_any*P("!")*C((1-P("!"))^1)*P("!")/format_extension,
+}
+local direct=Cs (
+ P("%")/""*Cc([[local format = string.format return function(str) return format("%]])*(S("+- .")+R("09"))^0*S("sqidfgGeExXo")*Cc([[",str) end]])*P(-1)
+ )
+local function make(t,str)
+ local f
+ local p
+ local p=lpegmatch(direct,str)
+ if p then
+ f=loadstripped(p)()
+ else
+ n=0
+ p=lpegmatch(builder,str,1,"..",t._extensions_)
+ if n>0 then
+ p=format(template,preamble,t._preamble_,arguments[n],p)
+ f=loadstripped(p)()
+ else
+ f=function() return str end
+ end
+ end
+ t[str]=f
+ return f
+end
+local function use(t,fmt,...)
+ return t[fmt](...)
+end
+strings.formatters={}
+function strings.formatters.new()
+ local t={ _extensions_={},_preamble_="",_type_="formatter" }
+ setmetatable(t,{ __index=make,__call=use })
+ return t
+end
+local formatters=strings.formatters.new()
+string.formatters=formatters
+string.formatter=function(str,...) return formatters[str](...) end
+local function add(t,name,template,preamble)
+ if type(t)=="table" and t._type_=="formatter" then
+ t._extensions_[name]=template or "%s"
+ if preamble then
+ t._preamble_=preamble.."\n"..t._preamble_
+ end
+ end
+end
+strings.formatters.add=add
+lpeg.patterns.xmlescape=Cs((P("<")/"&lt;"+P(">")/"&gt;"+P("&")/"&amp;"+P('"')/"&quot;"+P(1))^0)
+lpeg.patterns.texescape=Cs((C(S("#$%\\{}"))/"\\%1"+P(1))^0)
+add(formatters,"xml",[[lpegmatch(xmlescape,%s)]],[[local xmlescape = lpeg.patterns.xmlescape]])
+add(formatters,"tex",[[lpegmatch(texescape,%s)]],[[local texescape = lpeg.patterns.texescape]])
+
+
+end -- of closure
+
+do -- create closure to overcome 200 locals limit
+
+package.loaded["util-tab"] = package.loaded["util-tab"] or true
+
+-- original size: 14491, stripped down to: 8512
+
+if not modules then modules={} end modules ['util-tab']={
+ version=1.001,
+ comment="companion to luat-lib.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+utilities=utilities or {}
+utilities.tables=utilities.tables or {}
+local tables=utilities.tables
+local format,gmatch,gsub=string.format,string.gmatch,string.gsub
+local concat,insert,remove=table.concat,table.insert,table.remove
+local setmetatable,getmetatable,tonumber,tostring=setmetatable,getmetatable,tonumber,tostring
+local type,next,rawset,tonumber,tostring,load,select=type,next,rawset,tonumber,tostring,load,select
+local lpegmatch,P,Cs,Cc=lpeg.match,lpeg.P,lpeg.Cs,lpeg.Cc
+local serialize,sortedkeys,sortedpairs=table.serialize,table.sortedkeys,table.sortedpairs
+local formatters=string.formatters
+local splitter=lpeg.tsplitat(".")
+function tables.definetable(target,nofirst,nolast)
+ local composed,shortcut,t=nil,nil,{}
+ local snippets=lpegmatch(splitter,target)
+ for i=1,#snippets-(nolast and 1 or 0) do
+ local name=snippets[i]
+ if composed then
+ composed=shortcut.."."..name
+ shortcut=shortcut.."_"..name
+ t[#t+1]=formatters["local %s = %s if not %s then %s = { } %s = %s end"](shortcut,composed,shortcut,shortcut,composed,shortcut)
+ else
+ composed=name
+ shortcut=name
+ if not nofirst then
+ t[#t+1]=formatters["%s = %s or { }"](composed,composed)
+ end
+ end
+ end
+ if nolast then
+ composed=shortcut.."."..snippets[#snippets]
+ end
+ return concat(t,"\n"),composed
+end
+function tables.definedtable(...)
+ local t=_G
+ for i=1,select("#",...) do
+ local li=select(i,...)
+ local tl=t[li]
+ if not tl then
+ tl={}
+ t[li]=tl
+ end
+ t=tl
+ end
+ return t
+end
+function tables.accesstable(target,root)
+ local t=root or _G
+ for name in gmatch(target,"([^%.]+)") do
+ t=t[name]
+ if not t then
+ return
+ end
+ end
+ return t
+end
+function tables.migratetable(target,v,root)
+ local t=root or _G
+ local names=string.split(target,".")
+ for i=1,#names-1 do
+ local name=names[i]
+ t[name]=t[name] or {}
+ t=t[name]
+ if not t then
+ return
+ end
+ end
+ t[names[#names]]=v
+end
+function tables.removevalue(t,value)
+ if value then
+ for i=1,#t do
+ if t[i]==value then
+ remove(t,i)
+ end
+ end
+ end
+end
+function tables.insertbeforevalue(t,value,extra)
+ for i=1,#t do
+ if t[i]==extra then
+ remove(t,i)
+ end
+ end
+ for i=1,#t do
+ if t[i]==value then
+ insert(t,i,extra)
+ return
+ end
+ end
+ insert(t,1,extra)
+end
+function tables.insertaftervalue(t,value,extra)
+ for i=1,#t do
+ if t[i]==extra then
+ remove(t,i)
+ end
+ end
+ for i=1,#t do
+ if t[i]==value then
+ insert(t,i+1,extra)
+ return
+ end
+ end
+ insert(t,#t+1,extra)
+end
+local escape=Cs(Cc('"')*((P('"')/'""'+P(1))^0)*Cc('"'))
+function table.tocsv(t,specification)
+ if t and #t>0 then
+ local result={}
+ local r={}
+ specification=specification or {}
+ local fields=specification.fields
+ if type(fields)~="string" then
+ fields=sortedkeys(t[1])
+ end
+ local separator=specification.separator or ","
+ if specification.preamble==true then
+ for f=1,#fields do
+ r[f]=lpegmatch(escape,tostring(fields[f]))
+ end
+ result[1]=concat(r,separator)
+ end
+ for i=1,#t do
+ local ti=t[i]
+ for f=1,#fields do
+ local field=ti[fields[f]]
+ if type(field)=="string" then
+ r[f]=lpegmatch(escape,field)
+ else
+ r[f]=tostring(field)
+ end
+ end
+ result[#result+1]=concat(r,separator)
+ end
+ return concat(result,"\n")
+ else
+ return ""
+ end
+end
+local nspaces=utilities.strings.newrepeater(" ")
+local function toxml(t,d,result,step)
+ for k,v in sortedpairs(t) do
+ local s=nspaces[d]
+ local tk=type(k)
+ local tv=type(v)
+ if tv=="table" then
+ if tk=="number" then
+ result[#result+1]=formatters["%s<entry n='%s'>"](s,k)
+ toxml(v,d+step,result,step)
+ result[#result+1]=formatters["%s</entry>"](s,k)
+ else
+ result[#result+1]=formatters["%s<%s>"](s,k)
+ toxml(v,d+step,result,step)
+ result[#result+1]=formatters["%s</%s>"](s,k)
+ end
+ elseif tv=="string" then
+ if tk=="number" then
+ result[#result+1]=formatters["%s<entry n='%s'>%!xml!</entry>"](s,k,v,k)
+ else
+ result[#result+1]=formatters["%s<%s>%!xml!</%s>"](s,k,v,k)
+ end
+ elseif tk=="number" then
+ result[#result+1]=formatters["%s<entry n='%s'>%S</entry>"](s,k,v,k)
+ else
+ result[#result+1]=formatters["%s<%s>%S</%s>"](s,k,v,k)
+ end
+ end
+end
+function table.toxml(t,specification)
+ specification=specification or {}
+ local name=specification.name
+ local noroot=name==false
+ local result=(specification.nobanner or noroot) and {} or { "<?xml version='1.0' standalone='yes' ?>" }
+ local indent=specification.indent or 0
+ local spaces=specification.spaces or 1
+ if noroot then
+ toxml(t,indent,result,spaces)
+ else
+ toxml({ [name or "data"]=t },indent,result,spaces)
+ end
+ return concat(result,"\n")
+end
+function tables.encapsulate(core,capsule,protect)
+ if type(capsule)~="table" then
+ protect=true
+ capsule={}
+ end
+ for key,value in next,core do
+ if capsule[key] then
+ print(formatters["\ninvalid %s %a in %a"]("inheritance",key,core))
+ os.exit()
+ else
+ capsule[key]=value
+ end
+ end
+ if protect then
+ for key,value in next,core do
+ core[key]=nil
+ end
+ setmetatable(core,{
+ __index=capsule,
+ __newindex=function(t,key,value)
+ if capsule[key] then
+ print(formatters["\ninvalid %s %a' in %a"]("overload",key,core))
+ os.exit()
+ else
+ rawset(t,key,value)
+ end
+ end
+ } )
+ end
+end
+local function fastserialize(t,r,outer)
+ r[#r+1]="{"
+ local n=#t
+ if n>0 then
+ for i=1,n do
+ local v=t[i]
+ local tv=type(v)
+ if tv=="string" then
+ r[#r+1]=formatters["%q,"](v)
+ elseif tv=="number" then
+ r[#r+1]=formatters["%s,"](v)
+ elseif tv=="table" then
+ fastserialize(v,r)
+ elseif tv=="boolean" then
+ r[#r+1]=formatters["%S,"](v)
+ end
+ end
+ else
+ for k,v in next,t do
+ local tv=type(v)
+ if tv=="string" then
+ r[#r+1]=formatters["[%q]=%q,"](k,v)
+ elseif tv=="number" then
+ r[#r+1]=formatters["[%q]=%s,"](k,v)
+ elseif tv=="table" then
+ r[#r+1]=formatters["[%q]="](k)
+ fastserialize(v,r)
+ elseif tv=="boolean" then
+ r[#r+1]=formatters["[%q]=%S,"](k,v)
+ end
+ end
+ end
+ if outer then
+ r[#r+1]="}"
+ else
+ r[#r+1]="},"
+ end
+ return r
+end
+function table.fastserialize(t,prefix)
+ return concat(fastserialize(t,{ prefix or "return" },true))
+end
+function table.deserialize(str)
+ if not str or str=="" then
+ return
+ end
+ local code=load(str)
+ if not code then
+ return
+ end
+ code=code()
+ if not code then
+ return
+ end
+ return code
+end
+function table.load(filename)
+ if filename then
+ local t=io.loaddata(filename)
+ if t and t~="" then
+ t=load(t)
+ if type(t)=="function" then
+ t=t()
+ if type(t)=="table" then
+ return t
+ end
+ end
+ end
+ end
+end
+function table.save(filename,t,n,...)
+ io.savedata(filename,serialize(t,n==nil and true or n,...))
+end
+local function slowdrop(t)
+ local r={}
+ local l={}
+ for i=1,#t do
+ local ti=t[i]
+ local j=0
+ for k,v in next,ti do
+ j=j+1
+ l[j]=formatters["%s=%q"](k,v)
+ end
+ r[i]=formatters[" {%t},\n"](l)
+ end
+ return formatters["return {\n%st}"](r)
+end
+local function fastdrop(t)
+ local r={ "return {\n" }
+ for i=1,#t do
+ local ti=t[i]
+ r[#r+1]=" {"
+ for k,v in next,ti do
+ r[#r+1]=formatters["%s=%q"](k,v)
+ end
+ r[#r+1]="},\n"
+ end
+ r[#r+1]="}"
+ return concat(r)
+end
+function table.drop(t,slow)
+ if #t==0 then
+ return "return { }"
+ elseif slow==true then
+ return slowdrop(t)
+ else
+ return fastdrop(t)
+ end
+end
+function table.autokey(t,k)
+ local v={}
+ t[k]=v
+ return v
+end
+local selfmapper={ __index=function(t,k) t[k]=k return k end }
+function table.twowaymapper(t)
+ if not t then
+ t={}
+ else
+ for i=0,#t do
+ local ti=t[i]
+ if ti then
+ local i=tostring(i)
+ t[i]=ti
+ t[ti]=i
+ end
+ end
+ t[""]=t[0] or ""
+ end
+ setmetatable(t,selfmapper)
+ return t
+end
+
+
+end -- of closure
+
+do -- create closure to overcome 200 locals limit
+
+package.loaded["util-sto"] = package.loaded["util-sto"] or true
+
+-- original size: 4432, stripped down to: 3123
+
+if not modules then modules={} end modules ['util-sto']={
+ version=1.001,
+ comment="companion to luat-lib.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+local setmetatable,getmetatable,type=setmetatable,getmetatable,type
+utilities=utilities or {}
+utilities.storage=utilities.storage or {}
+local storage=utilities.storage
+function storage.mark(t)
+ if not t then
+ print("\nfatal error: storage cannot be marked\n")
+ os.exit()
+ return
+ end
+ local m=getmetatable(t)
+ if not m then
+ m={}
+ setmetatable(t,m)
+ end
+ m.__storage__=true
+ return t
+end
+function storage.allocate(t)
+ t=t or {}
+ local m=getmetatable(t)
+ if not m then
+ m={}
+ setmetatable(t,m)
+ end
+ m.__storage__=true
+ return t
+end
+function storage.marked(t)
+ local m=getmetatable(t)
+ return m and m.__storage__
+end
+function storage.checked(t)
+ if not t then
+ report("\nfatal error: storage has not been allocated\n")
+ os.exit()
+ return
+ end
+ return t
+end
+function storage.setinitializer(data,initialize)
+ local m=getmetatable(data) or {}
+ m.__index=function(data,k)
+ m.__index=nil
+ initialize()
+ return data[k]
+ end
+ setmetatable(data,m)
+end
+local keyisvalue={ __index=function(t,k)
+ t[k]=k
+ return k
+end }
+function storage.sparse(t)
+ t=t or {}
+ setmetatable(t,keyisvalue)
+ return t
+end
+local function f_empty () return "" end
+local function f_self (t,k) t[k]=k return k end
+local function f_table (t,k) local v={} t[k]=v return v end
+local function f_ignore() end
+local t_empty={ __index=f_empty }
+local t_self={ __index=f_self }
+local t_table={ __index=f_table }
+local t_ignore={ __newindex=f_ignore }
+function table.setmetatableindex(t,f)
+ if type(t)~="table" then
+ f,t=t,{}
+ end
+ local m=getmetatable(t)
+ if m then
+ if f=="empty" then
+ m.__index=f_empty
+ elseif f=="key" then
+ m.__index=f_self
+ elseif f=="table" then
+ m.__index=f_table
+ else
+ m.__index=f
+ end
+ else
+ if f=="empty" then
+ setmetatable(t,t_empty)
+ elseif f=="key" then
+ setmetatable(t,t_self)
+ elseif f=="table" then
+ setmetatable(t,t_table)
+ else
+ setmetatable(t,{ __index=f })
+ end
+ end
+ return t
+end
+function table.setmetatablenewindex(t,f)
+ if type(t)~="table" then
+ f,t=t,{}
+ end
+ local m=getmetatable(t)
+ if m then
+ if f=="ignore" then
+ m.__newindex=f_ignore
+ else
+ m.__newindex=f
+ end
+ else
+ if f=="ignore" then
+ setmetatable(t,t_ignore)
+ else
+ setmetatable(t,{ __newindex=f })
+ end
+ end
+ return t
+end
+function table.setmetatablecall(t,f)
+ if type(t)~="table" then
+ f,t=t,{}
+ end
+ local m=getmetatable(t)
+ if m then
+ m.__call=f
+ else
+ setmetatable(t,{ __call=f })
+ end
+ return t
+end
+function table.setmetatablekey(t,key,value)
+ local m=getmetatable(t)
+ if not m then
+ m={}
+ setmetatable(t,m)
+ end
+ m[key]=value
+ return t
+end
+function table.getmetatablekey(t,key,value)
+ local m=getmetatable(t)
+ return m and m[key]
+end
+
+
+end -- of closure
+
+do -- create closure to overcome 200 locals limit
+
+package.loaded["util-prs"] = package.loaded["util-prs"] or true
+
+-- original size: 16976, stripped down to: 12143
+
+if not modules then modules={} end modules ['util-prs']={
+ version=1.001,
+ comment="companion to luat-lib.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+local lpeg,table,string=lpeg,table,string
+local P,R,V,S,C,Ct,Cs,Carg,Cc,Cg,Cf,Cp=lpeg.P,lpeg.R,lpeg.V,lpeg.S,lpeg.C,lpeg.Ct,lpeg.Cs,lpeg.Carg,lpeg.Cc,lpeg.Cg,lpeg.Cf,lpeg.Cp
+local lpegmatch,lpegpatterns=lpeg.match,lpeg.patterns
+local concat,format,gmatch,find=table.concat,string.format,string.gmatch,string.find
+local tostring,type,next,rawset=tostring,type,next,rawset
+utilities=utilities or {}
+local parsers=utilities.parsers or {}
+utilities.parsers=parsers
+local patterns=parsers.patterns or {}
+parsers.patterns=patterns
+local setmetatableindex=table.setmetatableindex
+local sortedhash=table.sortedhash
+local digit=R("09")
+local space=P(' ')
+local equal=P("=")
+local comma=P(",")
+local lbrace=P("{")
+local rbrace=P("}")
+local lparent=P("(")
+local rparent=P(")")
+local period=S(".")
+local punctuation=S(".,:;")
+local spacer=lpegpatterns.spacer
+local whitespace=lpegpatterns.whitespace
+local newline=lpegpatterns.newline
+local anything=lpegpatterns.anything
+local endofstring=lpegpatterns.endofstring
+local nobrace=1-(lbrace+rbrace )
+local noparent=1-(lparent+rparent)
+local escape,left,right=P("\\"),P('{'),P('}')
+lpegpatterns.balanced=P {
+ [1]=((escape*(left+right))+(1-(left+right))+V(2))^0,
+ [2]=left*V(1)*right
+}
+local nestedbraces=P { lbrace*(nobrace+V(1))^0*rbrace }
+local nestedparents=P { lparent*(noparent+V(1))^0*rparent }
+local spaces=space^0
+local argument=Cs((lbrace/"")*((nobrace+nestedbraces)^0)*(rbrace/""))
+local content=(1-endofstring)^0
+lpegpatterns.nestedbraces=nestedbraces
+lpegpatterns.nestedparents=nestedparents
+lpegpatterns.nested=nestedbraces
+lpegpatterns.argument=argument
+lpegpatterns.content=content
+local value=P(lbrace*C((nobrace+nestedbraces)^0)*rbrace)+C((nestedbraces+(1-comma))^0)
+local key=C((1-equal-comma)^1)
+local pattern_a=(space+comma)^0*(key*equal*value+key*C(""))
+local pattern_c=(space+comma)^0*(key*equal*value)
+local key=C((1-space-equal-comma)^1)
+local pattern_b=spaces*comma^0*spaces*(key*((spaces*equal*spaces*value)+C("")))
+local hash={}
+local function set(key,value)
+ hash[key]=value
+end
+local pattern_a_s=(pattern_a/set)^1
+local pattern_b_s=(pattern_b/set)^1
+local pattern_c_s=(pattern_c/set)^1
+patterns.settings_to_hash_a=pattern_a_s
+patterns.settings_to_hash_b=pattern_b_s
+patterns.settings_to_hash_c=pattern_c_s
+function parsers.make_settings_to_hash_pattern(set,how)
+ if how=="strict" then
+ return (pattern_c/set)^1
+ elseif how=="tolerant" then
+ return (pattern_b/set)^1
+ else
+ return (pattern_a/set)^1
+ end
+end
+function parsers.settings_to_hash(str,existing)
+ if str and str~="" then
+ hash=existing or {}
+ lpegmatch(pattern_a_s,str)
+ return hash
+ else
+ return {}
+ end
+end
+function parsers.settings_to_hash_tolerant(str,existing)
+ if str and str~="" then
+ hash=existing or {}
+ lpegmatch(pattern_b_s,str)
+ return hash
+ else
+ return {}
+ end
+end
+function parsers.settings_to_hash_strict(str,existing)
+ if str and str~="" then
+ hash=existing or {}
+ lpegmatch(pattern_c_s,str)
+ return next(hash) and hash
+ else
+ return nil
+ end
+end
+local separator=comma*space^0
+local value=P(lbrace*C((nobrace+nestedbraces)^0)*rbrace)+C((nestedbraces+(1-comma))^0)
+local pattern=spaces*Ct(value*(separator*value)^0)
+patterns.settings_to_array=pattern
+function parsers.settings_to_array(str,strict)
+ if not str or str=="" then
+ return {}
+ elseif strict then
+ if find(str,"{") then
+ return lpegmatch(pattern,str)
+ else
+ return { str }
+ end
+ else
+ return lpegmatch(pattern,str)
+ end
+end
+local function set(t,v)
+ t[#t+1]=v
+end
+local value=P(Carg(1)*value)/set
+local pattern=value*(separator*value)^0*Carg(1)
+function parsers.add_settings_to_array(t,str)
+ return lpegmatch(pattern,str,nil,t)
+end
+function parsers.hash_to_string(h,separator,yes,no,strict,omit)
+ if h then
+ local t,tn,s={},0,table.sortedkeys(h)
+ omit=omit and table.tohash(omit)
+ for i=1,#s do
+ local key=s[i]
+ if not omit or not omit[key] then
+ local value=h[key]
+ if type(value)=="boolean" then
+ if yes and no then
+ if value then
+ tn=tn+1
+ t[tn]=key..'='..yes
+ elseif not strict then
+ tn=tn+1
+ t[tn]=key..'='..no
+ end
+ elseif value or not strict then
+ tn=tn+1
+ t[tn]=key..'='..tostring(value)
+ end
+ else
+ tn=tn+1
+ t[tn]=key..'='..value
+ end
+ end
+ end
+ return concat(t,separator or ",")
+ else
+ return ""
+ end
+end
+function parsers.array_to_string(a,separator)
+ if a then
+ return concat(a,separator or ",")
+ else
+ return ""
+ end
+end
+function parsers.settings_to_set(str,t)
+ t=t or {}
+ for s in gmatch(str,"[^, ]+") do
+ t[s]=true
+ end
+ return t
+end
+function parsers.simple_hash_to_string(h,separator)
+ local t,tn={},0
+ for k,v in sortedhash(h) do
+ if v then
+ tn=tn+1
+ t[tn]=k
+ end
+ end
+ return concat(t,separator or ",")
+end
+local value=P(lbrace*C((nobrace+nestedbraces)^0)*rbrace)+C(digit^1*lparent*(noparent+nestedparents)^1*rparent)+C((nestedbraces+(1-comma))^1)
+local pattern_a=spaces*Ct(value*(separator*value)^0)
+local function repeater(n,str)
+ if not n then
+ return str
+ else
+ local s=lpegmatch(pattern_a,str)
+ if n==1 then
+ return unpack(s)
+ else
+ local t,tn={},0
+ for i=1,n do
+ for j=1,#s do
+ tn=tn+1
+ t[tn]=s[j]
+ end
+ end
+ return unpack(t)
+ end
+ end
+end
+local value=P(lbrace*C((nobrace+nestedbraces)^0)*rbrace)+(C(digit^1)/tonumber*lparent*Cs((noparent+nestedparents)^1)*rparent)/repeater+C((nestedbraces+(1-comma))^1)
+local pattern_b=spaces*Ct(value*(separator*value)^0)
+function parsers.settings_to_array_with_repeat(str,expand)
+ if expand then
+ return lpegmatch(pattern_b,str) or {}
+ else
+ return lpegmatch(pattern_a,str) or {}
+ end
+end
+local value=lbrace*C((nobrace+nestedbraces)^0)*rbrace
+local pattern=Ct((space+value)^0)
+function parsers.arguments_to_table(str)
+ return lpegmatch(pattern,str)
+end
+function parsers.getparameters(self,class,parentclass,settings)
+ local sc=self[class]
+ if not sc then
+ sc={}
+ self[class]=sc
+ if parentclass then
+ local sp=self[parentclass]
+ if not sp then
+ sp={}
+ self[parentclass]=sp
+ end
+ setmetatableindex(sc,sp)
+ end
+ end
+ parsers.settings_to_hash(settings,sc)
+end
+function parsers.listitem(str)
+ return gmatch(str,"[^, ]+")
+end
+local pattern=Cs { "start",
+ start=V("one")+V("two")+V("three"),
+ rest=(Cc(",")*V("thousand"))^0*(P(".")+endofstring)*anything^0,
+ thousand=digit*digit*digit,
+ one=digit*V("rest"),
+ two=digit*digit*V("rest"),
+ three=V("thousand")*V("rest"),
+}
+lpegpatterns.splitthousands=pattern
+function parsers.splitthousands(str)
+ return lpegmatch(pattern,str) or str
+end
+local optionalwhitespace=whitespace^0
+lpegpatterns.words=Ct((Cs((1-punctuation-whitespace)^1)+anything)^1)
+lpegpatterns.sentences=Ct((optionalwhitespace*Cs((1-period)^0*period))^1)
+lpegpatterns.paragraphs=Ct((optionalwhitespace*Cs((whitespace^1*endofstring/""+1-(spacer^0*newline*newline))^1))^1)
+local dquote=P('"')
+local equal=P('=')
+local escape=P('\\')
+local separator=S(' ,')
+local key=C((1-equal)^1)
+local value=dquote*C((1-dquote-escape*dquote)^0)*dquote
+local pattern=Cf(Ct("")*Cg(key*equal*value)*separator^0,rawset)^0*P(-1)
+patterns.keq_to_hash_c=pattern
+function parsers.keq_to_hash(str)
+ if str and str~="" then
+ return lpegmatch(pattern,str)
+ else
+ return {}
+ end
+end
+local defaultspecification={ separator=",",quote='"' }
+function parsers.csvsplitter(specification)
+ specification=specification and table.setmetatableindex(specification,defaultspecification) or defaultspecification
+ local separator=specification.separator
+ local quotechar=specification.quote
+ local separator=S(separator~="" and separator or ",")
+ local whatever=C((1-separator-newline)^0)
+ if quotechar and quotechar~="" then
+ local quotedata=nil
+ for chr in gmatch(quotechar,".") do
+ local quotechar=P(chr)
+ local quoteword=quotechar*C((1-quotechar)^0)*quotechar
+ if quotedata then
+ quotedata=quotedata+quoteword
+ else
+ quotedata=quoteword
+ end
+ end
+ whatever=quotedata+whatever
+ end
+ local parser=Ct((Ct(whatever*(separator*whatever)^0)*S("\n\r"))^0 )
+ return function(data)
+ return lpegmatch(parser,data)
+ end
+end
+function parsers.rfc4180splitter(specification)
+ specification=specification and table.setmetatableindex(specification,defaultspecification) or defaultspecification
+ local separator=specification.separator
+ local quotechar=P(specification.quote)
+ local dquotechar=quotechar*quotechar
+/specification.quote
+ local separator=S(separator~="" and separator or ",")
+ local escaped=quotechar*Cs((dquotechar+(1-quotechar))^0)*quotechar
+ local non_escaped=C((1-quotechar-newline-separator)^1)
+ local field=escaped+non_escaped
+ local record=Ct((field*separator^-1)^1)
+ local headerline=record*Cp()
+ local wholeblob=Ct((newline^-1*record)^0)
+ return function(data,getheader)
+ if getheader then
+ local header,position=lpegmatch(headerline,data)
+ local data=lpegmatch(wholeblob,data,position)
+ return data,header
+ else
+ return lpegmatch(wholeblob,data)
+ end
+ end
+end
+local function ranger(first,last,n,action)
+ if not first then
+ elseif last==true then
+ for i=first,n or first do
+ action(i)
+ end
+ elseif last then
+ for i=first,last do
+ action(i)
+ end
+ else
+ action(first)
+ end
+end
+local cardinal=lpegpatterns.cardinal/tonumber
+local spacers=lpegpatterns.spacer^0
+local endofstring=lpegpatterns.endofstring
+local stepper=spacers*(C(cardinal)*(spacers*S(":-")*spacers*(C(cardinal)+Cc(true) )+Cc(false) )*Carg(1)*Carg(2)/ranger*S(", ")^0 )^1
+local stepper=spacers*(C(cardinal)*(spacers*S(":-")*spacers*(C(cardinal)+(P("*")+endofstring)*Cc(true) )+Cc(false) )*Carg(1)*Carg(2)/ranger*S(", ")^0 )^1*endofstring
+function parsers.stepper(str,n,action)
+ if type(n)=="function" then
+ lpegmatch(stepper,str,1,false,n or print)
+ else
+ lpegmatch(stepper,str,1,n,action or print)
+ end
+end
+local pattern_math=Cs((P("%")/"\\percent "+P("^")*Cc("{")*lpegpatterns.integer*Cc("}")+P(1))^0)
+local pattern_text=Cs((P("%")/"\\percent "+(P("^")/"\\high")*Cc("{")*lpegpatterns.integer*Cc("}")+P(1))^0)
+patterns.unittotex=pattern
+function parsers.unittotex(str,textmode)
+ return lpegmatch(textmode and pattern_text or pattern_math,str)
+end
+local pattern=Cs((P("^")/"<sup>"*lpegpatterns.integer*Cc("</sup>")+P(1))^0)
+function parsers.unittoxml(str)
+ return lpegmatch(pattern,str)
+end
+local cache={}
+local spaces=lpeg.patterns.space^0
+local dummy=function() end
+table.setmetatableindex(cache,function(t,k)
+ local separator=P(k)
+ local value=(1-separator)^0
+ local pattern=spaces*C(value)*separator^0*Cp()
+ t[k]=pattern
+ return pattern
+end)
+local commalistiterator=cache[","]
+function utilities.parsers.iterator(str,separator)
+ local n=#str
+ if n==0 then
+ return dummy
+ else
+ local pattern=separator and cache[separator] or commalistiterator
+ local p=1
+ return function()
+ if p<=n then
+ local s,e=lpegmatch(pattern,str,p)
+ if e then
+ p=e
+ return s
+ end
+ end
+ end
+ end
+end
+local function initialize(t,name)
+ local source=t[name]
+ if source then
+ local result={}
+ for k,v in next,t[name] do
+ result[k]=v
+ end
+ return result
+ else
+ return {}
+ end
+end
+local function fetch(t,name)
+ return t[name] or {}
+end
+function process(result,more)
+ for k,v in next,more do
+ result[k]=v
+ end
+ return result
+end
+local name=C((1-S(", "))^1)
+local parser=(Carg(1)*name/initialize)*(S(", ")^1*(Carg(1)*name/fetch))^0
+local merge=Cf(parser,process)
+function utilities.parsers.mergehashes(hash,list)
+ return lpegmatch(merge,list,1,hash)
+end
+
+
+end -- of closure
+
+do -- create closure to overcome 200 locals limit
+
+package.loaded["util-fmt"] = package.loaded["util-fmt"] or true
+
+-- original size: 2274, stripped down to: 1781
+
+if not modules then modules={} end modules ['util-fmt']={
+ version=1.001,
+ comment="companion to luat-lib.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+utilities=utilities or {}
+utilities.formatters=utilities.formatters or {}
+local formatters=utilities.formatters
+local concat,format=table.concat,string.format
+local tostring,type=tostring,type
+local strip=string.strip
+local lpegmatch=lpeg.match
+local stripper=lpeg.patterns.stripzeros
+function formatters.stripzeros(str)
+ return lpegmatch(stripper,str)
+end
+function formatters.formatcolumns(result,between)
+ if result and #result>0 then
+ between=between or " "
+ local widths,numbers={},{}
+ local first=result[1]
+ local n=#first
+ for i=1,n do
+ widths[i]=0
+ end
+ for i=1,#result do
+ local r=result[i]
+ for j=1,n do
+ local rj=r[j]
+ local tj=type(rj)
+ if tj=="number" then
+ numbers[j]=true
+ end
+ if tj~="string" then
+ rj=tostring(rj)
+ r[j]=rj
+ end
+ local w=#rj
+ if w>widths[j] then
+ widths[j]=w
+ end
+ end
+ end
+ for i=1,n do
+ local w=widths[i]
+ if numbers[i] then
+ if w>80 then
+ widths[i]="%s"..between
+ else
+ widths[i]="%0"..w.."i"..between
+ end
+ else
+ if w>80 then
+ widths[i]="%s"..between
+ elseif w>0 then
+ widths[i]="%-"..w.."s"..between
+ else
+ widths[i]="%s"
+ end
+ end
+ end
+ local template=strip(concat(widths))
+ for i=1,#result do
+ local str=format(template,unpack(result[i]))
+ result[i]=strip(str)
+ end
+ end
+ return result
+end
+
+
+end -- of closure
+
+do -- create closure to overcome 200 locals limit
+
+package.loaded["trac-set"] = package.loaded["trac-set"] or true
+
+-- original size: 12365, stripped down to: 8799
+
+if not modules then modules={} end modules ['trac-set']={
+ version=1.001,
+ comment="companion to luat-lib.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+local type,next,tostring=type,next,tostring
+local concat=table.concat
+local format,find,lower,gsub,topattern=string.format,string.find,string.lower,string.gsub,string.topattern
+local is_boolean=string.is_boolean
+local settings_to_hash=utilities.parsers.settings_to_hash
+local allocate=utilities.storage.allocate
+utilities=utilities or {}
+local utilities=utilities
+local setters=utilities.setters or {}
+utilities.setters=setters
+local data={}
+local trace_initialize=false
+function setters.initialize(filename,name,values)
+ local setter=data[name]
+ if setter then
+ frozen=true
+ local data=setter.data
+ if data then
+ for key,newvalue in next,values do
+ local newvalue=is_boolean(newvalue,newvalue)
+ local functions=data[key]
+ if functions then
+ local oldvalue=functions.value
+ if functions.frozen then
+ if trace_initialize then
+ setter.report("%s: %a is %s to %a",filename,key,"frozen",oldvalue)
+ end
+ elseif #functions>0 and not oldvalue then
+ if trace_initialize then
+ setter.report("%s: %a is %s to %a",filename,key,"set",newvalue)
+ end
+ for i=1,#functions do
+ functions[i](newvalue)
+ end
+ functions.value=newvalue
+ functions.frozen=functions.frozen or frozen
+ else
+ if trace_initialize then
+ setter.report("%s: %a is %s as %a",filename,key,"kept",oldvalue)
+ end
+ end
+ else
+ functions={ default=newvalue,frozen=frozen }
+ data[key]=functions
+ if trace_initialize then
+ setter.report("%s: %a is %s to %a",filename,key,"defaulted",newvalue)
+ end
+ end
+ end
+ return true
+ end
+ end
+end
+local function set(t,what,newvalue)
+ local data=t.data
+ if not data.frozen then
+ local done=t.done
+ if type(what)=="string" then
+ what=settings_to_hash(what)
+ end
+ if type(what)~="table" then
+ return
+ end
+ if not done then
+ done={}
+ t.done=done
+ end
+ for w,value in next,what do
+ if value=="" then
+ value=newvalue
+ elseif not value then
+ value=false
+ else
+ value=is_boolean(value,value)
+ end
+ w=topattern(w,true,true)
+ for name,functions in next,data do
+ if done[name] then
+ elseif find(name,w) then
+ done[name]=true
+ for i=1,#functions do
+ functions[i](value)
+ end
+ functions.value=value
+ end
+ end
+ end
+ end
+end
+local function reset(t)
+ local data=t.data
+ if not data.frozen then
+ for name,functions in next,data do
+ for i=1,#functions do
+ functions[i](false)
+ end
+ functions.value=false
+ end
+ end
+end
+local function enable(t,what)
+ set(t,what,true)
+end
+local function disable(t,what)
+ local data=t.data
+ if not what or what=="" then
+ t.done={}
+ reset(t)
+ else
+ set(t,what,false)
+ end
+end
+function setters.register(t,what,...)
+ local data=t.data
+ what=lower(what)
+ local functions=data[what]
+ if not functions then
+ functions={}
+ data[what]=functions
+ if trace_initialize then
+ t.report("defining %a",what)
+ end
+ end
+ local default=functions.default
+ for i=1,select("#",...) do
+ local fnc=select(i,...)
+ local typ=type(fnc)
+ if typ=="string" then
+ if trace_initialize then
+ t.report("coupling %a to %a",what,fnc)
+ end
+ local s=fnc
+ fnc=function(value) set(t,s,value) end
+ elseif typ~="function" then
+ fnc=nil
+ end
+ if fnc then
+ functions[#functions+1]=fnc
+ local value=functions.value or default
+ if value~=nil then
+ fnc(value)
+ functions.value=value
+ end
+ end
+ end
+ return false
+end
+function setters.enable(t,what)
+ local e=t.enable
+ t.enable,t.done=enable,{}
+ enable(t,what)
+ t.enable,t.done=e,{}
+end
+function setters.disable(t,what)
+ local e=t.disable
+ t.disable,t.done=disable,{}
+ disable(t,what)
+ t.disable,t.done=e,{}
+end
+function setters.reset(t)
+ t.done={}
+ reset(t)
+end
+function setters.list(t)
+ local list=table.sortedkeys(t.data)
+ local user,system={},{}
+ for l=1,#list do
+ local what=list[l]
+ if find(what,"^%*") then
+ system[#system+1]=what
+ else
+ user[#user+1]=what
+ end
+ end
+ return user,system
+end
+function setters.show(t)
+ local category=t.name
+ local list=setters.list(t)
+ t.report()
+ for k=1,#list do
+ local name=list[k]
+ local functions=t.data[name]
+ if functions then
+ local value,default,modules=functions.value,functions.default,#functions
+ value=value==nil and "unset" or tostring(value)
+ default=default==nil and "unset" or tostring(default)
+ t.report("%-50s modules: %2i default: %-12s value: %-12s",name,modules,default,value)
+ end
+ end
+ t.report()
+end
+local enable,disable,register,list,show=setters.enable,setters.disable,setters.register,setters.list,setters.show
+function setters.report(setter,...)
+ print(format("%-15s : %s\n",setter.name,format(...)))
+end
+local function default(setter,name)
+ local d=setter.data[name]
+ return d and d.default
+end
+local function value(setter,name)
+ local d=setter.data[name]
+ return d and (d.value or d.default)
+end
+function setters.new(name)
+ local setter
+ setter={
+ data=allocate(),
+ name=name,
+ report=function(...) setters.report (setter,...) end,
+ enable=function(...) enable (setter,...) end,
+ disable=function(...) disable (setter,...) end,
+ register=function(...) register(setter,...) end,
+ list=function(...) list (setter,...) end,
+ show=function(...) show (setter,...) end,
+ default=function(...) return default (setter,...) end,
+ value=function(...) return value (setter,...) end,
+ }
+ data[name]=setter
+ return setter
+end
+trackers=setters.new("trackers")
+directives=setters.new("directives")
+experiments=setters.new("experiments")
+local t_enable,t_disable=trackers .enable,trackers .disable
+local d_enable,d_disable=directives .enable,directives .disable
+local e_enable,e_disable=experiments.enable,experiments.disable
+local trace_directives=false local trace_directives=false trackers.register("system.directives",function(v) trace_directives=v end)
+local trace_experiments=false local trace_experiments=false trackers.register("system.experiments",function(v) trace_experiments=v end)
+function directives.enable(...)
+ if trace_directives then
+ directives.report("enabling: % t",{...})
+ end
+ d_enable(...)
+end
+function directives.disable(...)
+ if trace_directives then
+ directives.report("disabling: % t",{...})
+ end
+ d_disable(...)
+end
+function experiments.enable(...)
+ if trace_experiments then
+ experiments.report("enabling: % t",{...})
+ end
+ e_enable(...)
+end
+function experiments.disable(...)
+ if trace_experiments then
+ experiments.report("disabling: % t",{...})
+ end
+ e_disable(...)
+end
+directives.register("system.nostatistics",function(v)
+ if statistics then
+ statistics.enable=not v
+ else
+ end
+end)
+directives.register("system.nolibraries",function(v)
+ if libraries then
+ libraries=nil
+ else
+ end
+end)
+if environment then
+ local engineflags=environment.engineflags
+ if engineflags then
+ local list=engineflags["c:trackers"] or engineflags["trackers"]
+ if type(list)=="string" then
+ setters.initialize("commandline flags","trackers",settings_to_hash(list))
+ end
+ local list=engineflags["c:directives"] or engineflags["directives"]
+ if type(list)=="string" then
+ setters.initialize("commandline flags","directives",settings_to_hash(list))
+ end
+ end
+end
+if texconfig then
+ local function set(k,v)
+ v=tonumber(v)
+ if v then
+ texconfig[k]=v
+ end
+ end
+ directives.register("luatex.expanddepth",function(v) set("expand_depth",v) end)
+ directives.register("luatex.hashextra",function(v) set("hash_extra",v) end)
+ directives.register("luatex.nestsize",function(v) set("nest_size",v) end)
+ directives.register("luatex.maxinopen",function(v) set("max_in_open",v) end)
+ directives.register("luatex.maxprintline",function(v) set("max_print_line",v) end)
+ directives.register("luatex.maxstrings",function(v) set("max_strings",v) end)
+ directives.register("luatex.paramsize",function(v) set("param_size",v) end)
+ directives.register("luatex.savesize",function(v) set("save_size",v) end)
+ directives.register("luatex.stacksize",function(v) set("stack_size",v) end)
+end
+
+
+end -- of closure
+
+do -- create closure to overcome 200 locals limit
+
+package.loaded["trac-log"] = package.loaded["trac-log"] or true
+
+-- original size: 21795, stripped down to: 14194
+
+if not modules then modules={} end modules ['trac-log']={
+ version=1.001,
+ comment="companion to trac-log.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+local write_nl,write=texio and texio.write_nl or print,texio and texio.write or io.write
+local format,gmatch,find=string.format,string.gmatch,string.find
+local concat,insert,remove=table.concat,table.insert,table.remove
+local topattern=string.topattern
+local texcount=tex and tex.count
+local next,type,select=next,type,select
+local utfchar=utf.char
+local setmetatableindex=table.setmetatableindex
+local formatters=string.formatters
+logs=logs or {}
+local logs=logs
+local moreinfo=[[
+More information about ConTeXt and the tools that come with it can be found at:
+]].."\n"..[[
+maillist : ntg-context@ntg.nl / http://www.ntg.nl/mailman/listinfo/ntg-context
+webpage : http://www.pragma-ade.nl / http://tex.aanhet.net
+wiki : http://contextgarden.net
+]]
+utilities.strings.formatters.add (
+ formatters,"unichr",
+ [["U+" .. format("%%05X",%s) .. " (" .. utfchar(%s) .. ")"]]
+)
+utilities.strings.formatters.add (
+ formatters,"chruni",
+ [[utfchar(%s) .. " (U+" .. format("%%05X",%s) .. ")"]]
+)
+local function ignore() end
+setmetatableindex(logs,function(t,k) t[k]=ignore;return ignore end)
+local report,subreport,status,settarget,setformats,settranslations
+local direct,subdirect,writer,pushtarget,poptarget
+if tex and (tex.jobname or tex.formatname) then
+ local valueiskey={ __index=function(t,k) t[k]=k return k end }
+ local target="term and log"
+ logs.flush=io.flush
+ local formats={} setmetatable(formats,valueiskey)
+ local translations={} setmetatable(translations,valueiskey)
+ writer=function(...)
+ write_nl(target,...)
+ end
+ newline=function()
+ write_nl(target,"\n")
+ end
+ local f_one=formatters["%-15s > %s\n"]
+ local f_two=formatters["%-15s >\n"]
+ report=function(a,b,c,...)
+ if c then
+ write_nl(target,f_one(translations[a],formatters[formats[b]](c,...)))
+ elseif b then
+ write_nl(target,f_one(translations[a],formats[b]))
+ elseif a then
+ write_nl(target,f_two(translations[a]))
+ else
+ write_nl(target,"\n")
+ end
+ end
+ local f_one=formatters["%-15s > %s"]
+ local f_two=formatters["%-15s >"]
+ direct=function(a,b,c,...)
+ if c then
+ return f_one(translations[a],formatters[formats[b]](c,...))
+ elseif b then
+ return f_one(translations[a],formats[b])
+ elseif a then
+ return f_two(translations[a])
+ else
+ return ""
+ end
+ end
+ local f_one=formatters["%-15s > %s > %s\n"]
+ local f_two=formatters["%-15s > %s >\n"]
+ subreport=function(a,s,b,c,...)
+ if c then
+ write_nl(target,f_one(translations[a],translations[s],formatters[formats[b]](c,...)))
+ elseif b then
+ write_nl(target,f_one(translations[a],translations[s],formats[b]))
+ elseif a then
+ write_nl(target,f_two(translations[a],translations[s]))
+ else
+ write_nl(target,"\n")
+ end
+ end
+ local f_one=formatters["%-15s > %s > %s"]
+ local f_two=formatters["%-15s > %s >"]
+ subdirect=function(a,s,b,c,...)
+ if c then
+ return f_one(translations[a],translations[s],formatters[formats[b]](c,...))
+ elseif b then
+ return f_one(translations[a],translations[s],formats[b])
+ elseif a then
+ return f_two(translations[a],translations[s])
+ else
+ return ""
+ end
+ end
+ local f_one=formatters["%-15s : %s\n"]
+ local f_two=formatters["%-15s :\n"]
+ status=function(a,b,c,...)
+ if c then
+ write_nl(target,f_one(translations[a],formatters[formats[b]](c,...)))
+ elseif b then
+ write_nl(target,f_one(translations[a],formats[b]))
+ elseif a then
+ write_nl(target,f_two(translations[a]))
+ else
+ write_nl(target,"\n")
+ end
+ end
+ local targets={
+ logfile="log",
+ log="log",
+ file="log",
+ console="term",
+ terminal="term",
+ both="term and log",
+ }
+ settarget=function(whereto)
+ target=targets[whereto or "both"] or targets.both
+ if target=="term" or target=="term and log" then
+ logs.flush=io.flush
+ else
+ logs.flush=ignore
+ end
+ end
+ local stack={}
+ pushtarget=function(newtarget)
+ insert(stack,target)
+ settarget(newtarget)
+ end
+ poptarget=function()
+ if #stack>0 then
+ settarget(remove(stack))
+ end
+ end
+ setformats=function(f)
+ formats=f
+ end
+ settranslations=function(t)
+ translations=t
+ end
+else
+ logs.flush=ignore
+ writer=write_nl
+ newline=function()
+ write_nl("\n")
+ end
+ local f_one=formatters["%-15s | %s"]
+ local f_two=formatters["%-15s |"]
+ report=function(a,b,c,...)
+ if c then
+ write_nl(f_one(a,formatters[b](c,...)))
+ elseif b then
+ write_nl(f_one(a,b))
+ elseif a then
+ write_nl(f_two(a))
+ else
+ write_nl("")
+ end
+ end
+ local f_one=formatters["%-15s | %s | %s"]
+ local f_two=formatters["%-15s | %s |"]
+ subreport=function(a,sub,b,c,...)
+ if c then
+ write_nl(f_one(a,sub,formatters[b](c,...)))
+ elseif b then
+ write_nl(f_one(a,sub,b))
+ elseif a then
+ write_nl(f_two(a,sub))
+ else
+ write_nl("")
+ end
+ end
+ local f_one=formatters["%-15s : %s\n"]
+ local f_two=formatters["%-15s :\n"]
+ status=function(a,b,c,...)
+ if c then
+ write_nl(f_one(a,formatters[b](c,...)))
+ elseif b then
+ write_nl(f_one(a,b))
+ elseif a then
+ write_nl(f_two(a))
+ else
+ write_nl("\n")
+ end
+ end
+ direct=ignore
+ subdirect=ignore
+ settarget=ignore
+ pushtarget=ignore
+ poptarget=ignore
+ setformats=ignore
+ settranslations=ignore
+end
+logs.report=report
+logs.subreport=subreport
+logs.status=status
+logs.settarget=settarget
+logs.pushtarget=pushtarget
+logs.poptarget=poptarget
+logs.setformats=setformats
+logs.settranslations=settranslations
+logs.direct=direct
+logs.subdirect=subdirect
+logs.writer=writer
+logs.newline=newline
+local data,states={},nil
+function logs.reporter(category,subcategory)
+ local logger=data[category]
+ if not logger then
+ local state=false
+ if states==true then
+ state=true
+ elseif type(states)=="table" then
+ for c,_ in next,states do
+ if find(category,c) then
+ state=true
+ break
+ end
+ end
+ end
+ logger={
+ reporters={},
+ state=state,
+ }
+ data[category]=logger
+ end
+ local reporter=logger.reporters[subcategory or "default"]
+ if not reporter then
+ if subcategory then
+ reporter=function(...)
+ if not logger.state then
+ subreport(category,subcategory,...)
+ end
+ end
+ logger.reporters[subcategory]=reporter
+ else
+ local tag=category
+ reporter=function(...)
+ if not logger.state then
+ report(category,...)
+ end
+ end
+ logger.reporters.default=reporter
+ end
+ end
+ return reporter
+end
+logs.new=logs.reporter
+local ctxreport=logs.writer
+function logs.setmessenger(m)
+ ctxreport=m
+end
+function logs.messenger(category,subcategory)
+ if subcategory then
+ return function(...)
+ ctxreport(subdirect(category,subcategory,...))
+ end
+ else
+ return function(...)
+ ctxreport(direct(category,...))
+ end
+ end
+end
+local function setblocked(category,value)
+ if category==true then
+ category,value="*",true
+ elseif category==false then
+ category,value="*",false
+ elseif value==nil then
+ value=true
+ end
+ if category=="*" then
+ states=value
+ for k,v in next,data do
+ v.state=value
+ end
+ else
+ states=utilities.parsers.settings_to_hash(category)
+ for c,_ in next,states do
+ if data[c] then
+ v.state=value
+ else
+ c=topattern(c,true,true)
+ for k,v in next,data do
+ if find(k,c) then
+ v.state=value
+ end
+ end
+ end
+ end
+ end
+end
+function logs.disable(category,value)
+ setblocked(category,value==nil and true or value)
+end
+function logs.enable(category)
+ setblocked(category,false)
+end
+function logs.categories()
+ return table.sortedkeys(data)
+end
+function logs.show()
+ local n,c,s,max=0,0,0,0
+ for category,v in table.sortedpairs(data) do
+ n=n+1
+ local state=v.state
+ local reporters=v.reporters
+ local nc=#category
+ if nc>c then
+ c=nc
+ end
+ for subcategory,_ in next,reporters do
+ local ns=#subcategory
+ if ns>c then
+ s=ns
+ end
+ local m=nc+ns
+ if m>max then
+ max=m
+ end
+ end
+ local subcategories=concat(table.sortedkeys(reporters),", ")
+ if state==true then
+ state="disabled"
+ elseif state==false then
+ state="enabled"
+ else
+ state="unknown"
+ end
+ report("logging","category %a, subcategories %a, state %a",category,subcategories,state)
+ end
+ report("logging","categories: %s, max category: %s, max subcategory: %s, max combined: %s",n,c,s,max)
+end
+local delayed_reporters={}
+setmetatableindex(delayed_reporters,function(t,k)
+ local v=logs.reporter(k.name)
+ t[k]=v
+ return v
+end)
+function utilities.setters.report(setter,...)
+ delayed_reporters[setter](...)
+end
+directives.register("logs.blocked",function(v)
+ setblocked(v,true)
+end)
+directives.register("logs.target",function(v)
+ settarget(v)
+end)
+local report_pages=logs.reporter("pages")
+local real,user,sub
+function logs.start_page_number()
+ real,user,sub=texcount.realpageno,texcount.userpageno,texcount.subpageno
+end
+local timing=false
+local starttime=nil
+local lasttime=nil
+trackers.register("pages.timing",function(v)
+ starttime=os.clock()
+ timing=true
+end)
+function logs.stop_page_number()
+ if timing then
+ local elapsed,average
+ local stoptime=os.clock()
+ if not lasttime or real<2 then
+ elapsed=stoptime
+ average=stoptime
+ starttime=stoptime
+ else
+ elapsed=stoptime-lasttime
+ average=(stoptime-starttime)/(real-1)
+ end
+ lasttime=stoptime
+ if real<=0 then
+ report_pages("flushing page, time %0.04f / %0.04f",elapsed,average)
+ elseif user<=0 then
+ report_pages("flushing realpage %s, time %0.04f / %0.04f",real,elapsed,average)
+ elseif sub<=0 then
+ report_pages("flushing realpage %s, userpage %s, time %0.04f / %0.04f",real,user,elapsed,average)
+ else
+ report_pages("flushing realpage %s, userpage %s, subpage %s, time %0.04f / %0.04f",real,user,sub,elapsed,average)
+ end
+ else
+ if real<=0 then
+ report_pages("flushing page")
+ elseif user<=0 then
+ report_pages("flushing realpage %s",real)
+ elseif sub<=0 then
+ report_pages("flushing realpage %s, userpage %s",real,user)
+ else
+ report_pages("flushing realpage %s, userpage %s, subpage %s",real,user,sub)
+ end
+ end
+ logs.flush()
+end
+local report_files=logs.reporter("files")
+local nesting=0
+local verbose=false
+local hasscheme=url.hasscheme
+function logs.show_open(name)
+end
+function logs.show_close(name)
+end
+function logs.show_load(name)
+end
+local simple=logs.reporter("comment")
+logs.simple=simple
+logs.simpleline=simple
+function logs.setprogram () end
+function logs.extendbanner() end
+function logs.reportlines () end
+function logs.reportbanner() end
+function logs.reportline () end
+function logs.simplelines () end
+function logs.help () end
+local Carg,C,lpegmatch=lpeg.Carg,lpeg.C,lpeg.match
+local p_newline=lpeg.patterns.newline
+local linewise=(
+ Carg(1)*C((1-p_newline)^1)/function(t,s) t.report(s) end+Carg(1)*p_newline^2/function(t) t.report() end+p_newline
+)^1
+local function reportlines(t,str)
+ if str then
+ lpegmatch(linewise,str,1,t)
+ end
+end
+local function reportbanner(t)
+ local banner=t.banner
+ if banner then
+ t.report(banner)
+ t.report()
+ end
+end
+local function reportversion(t)
+ local banner=t.banner
+ if banner then
+ t.report(banner)
+ end
+end
+local function reporthelp(t,...)
+ local helpinfo=t.helpinfo
+ if type(helpinfo)=="string" then
+ reportlines(t,helpinfo)
+ elseif type(helpinfo)=="table" then
+ for i=1,select("#",...) do
+ reportlines(t,t.helpinfo[select(i,...)])
+ if i<n then
+ t.report()
+ end
+ end
+ end
+end
+local function reportinfo(t)
+ t.report()
+ reportlines(t,t.moreinfo)
+end
+local function reportexport(t,method)
+ report(t.helpinfo)
+end
+local reporters={
+ lines=reportlines,
+ banner=reportbanner,
+ version=reportversion,
+ help=reporthelp,
+ info=reportinfo,
+ export=reportexport,
+}
+local exporters={
+}
+logs.reporters=reporters
+logs.exporters=exporters
+function logs.application(t)
+ t.name=t.name or "unknown"
+ t.banner=t.banner
+ t.moreinfo=moreinfo
+ t.report=logs.reporter(t.name)
+ t.help=function(...)
+ reporters.banner(t)
+ reporters.help(t,...)
+ reporters.info(t)
+ end
+ t.export=function(...)
+ reporters.export(t,...)
+ end
+ t.identify=function()
+ reporters.banner(t)
+ end
+ t.version=function()
+ reporters.version(t)
+ end
+ return t
+end
+function logs.system(whereto,process,jobname,category,...)
+ local message=formatters["%s %s => %s => %s => %s\r"](os.date("%d/%m/%y %H:%m:%S"),process,jobname,category,format(...))
+ for i=1,10 do
+ local f=io.open(whereto,"a")
+ if f then
+ f:write(message)
+ f:close()
+ break
+ else
+ sleep(0.1)
+ end
+ end
+end
+local report_system=logs.reporter("system","logs")
+function logs.obsolete(old,new)
+ local o=loadstring("return "..new)()
+ if type(o)=="function" then
+ return function(...)
+ report_system("function %a is obsolete, use %a",old,new)
+ loadstring(old.."="..new.." return "..old)()(...)
+ end
+ elseif type(o)=="table" then
+ local t,m={},{}
+ m.__index=function(t,k)
+ report_system("table %a is obsolete, use %a",old,new)
+ m.__index,m.__newindex=o,o
+ return o[k]
+ end
+ m.__newindex=function(t,k,v)
+ report_system("table %a is obsolete, use %a",old,new)
+ m.__index,m.__newindex=o,o
+ o[k]=v
+ end
+ if libraries then
+ libraries.obsolete[old]=t
+ end
+ setmetatable(t,m)
+ return t
+ end
+end
+if utilities then
+ utilities.report=report_system
+end
+if tex and tex.error then
+ function logs.texerrormessage(...)
+ tex.error(format(...),{})
+ end
+else
+ function logs.texerrormessage(...)
+ print(format(...))
+ end
+end
+io.stdout:setvbuf('no')
+io.stderr:setvbuf('no')
+
+
+end -- of closure
+
+do -- create closure to overcome 200 locals limit
+
+package.loaded["trac-inf"] = package.loaded["trac-inf"] or true
+
+-- original size: 5791, stripped down to: 4540
+
+if not modules then modules={} end modules ['trac-inf']={
+ version=1.001,
+ comment="companion to trac-inf.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+local type,tonumber=type,tonumber
+local format,lower=string.format,string.lower
+local concat=table.concat
+local clock=os.gettimeofday or os.clock
+statistics=statistics or {}
+local statistics=statistics
+statistics.enable=true
+statistics.threshold=0.01
+local statusinfo,n,registered,timers={},0,{},{}
+table.setmetatableindex(timers,function(t,k)
+ local v={ timing=0,loadtime=0 }
+ t[k]=v
+ return v
+end)
+local function hastiming(instance)
+ return instance and timers[instance]
+end
+local function resettiming(instance)
+ timers[instance or "notimer"]={ timing=0,loadtime=0 }
+end
+local function starttiming(instance)
+ local timer=timers[instance or "notimer"]
+ local it=timer.timing or 0
+ if it==0 then
+ timer.starttime=clock()
+ if not timer.loadtime then
+ timer.loadtime=0
+ end
+ end
+ timer.timing=it+1
+end
+local function stoptiming(instance,report)
+ local timer=timers[instance or "notimer"]
+ local it=timer.timing
+ if it>1 then
+ timer.timing=it-1
+ else
+ local starttime=timer.starttime
+ if starttime then
+ local stoptime=clock()
+ local loadtime=stoptime-starttime
+ timer.stoptime=stoptime
+ timer.loadtime=timer.loadtime+loadtime
+ if report then
+ statistics.report("load time %0.3f",loadtime)
+ end
+ timer.timing=0
+ return loadtime
+ end
+ end
+ return 0
+end
+local function elapsed(instance)
+ if type(instance)=="number" then
+ return instance or 0
+ else
+ local timer=timers[instance or "notimer"]
+ return timer and timer.loadtime or 0
+ end
+end
+local function elapsedtime(instance)
+ return format("%0.3f",elapsed(instance))
+end
+local function elapsedindeed(instance)
+ return elapsed(instance)>statistics.threshold
+end
+local function elapsedseconds(instance,rest)
+ if elapsedindeed(instance) then
+ return format("%0.3f seconds %s",elapsed(instance),rest or "")
+ end
+end
+statistics.hastiming=hastiming
+statistics.resettiming=resettiming
+statistics.starttiming=starttiming
+statistics.stoptiming=stoptiming
+statistics.elapsed=elapsed
+statistics.elapsedtime=elapsedtime
+statistics.elapsedindeed=elapsedindeed
+statistics.elapsedseconds=elapsedseconds
+function statistics.register(tag,fnc)
+ if statistics.enable and type(fnc)=="function" then
+ local rt=registered[tag] or (#statusinfo+1)
+ statusinfo[rt]={ tag,fnc }
+ registered[tag]=rt
+ if #tag>n then n=#tag end
+ end
+end
+local report=logs.reporter("mkiv lua stats")
+function statistics.show()
+ if statistics.enable then
+ local register=statistics.register
+ register("luatex banner",function()
+ return lower(status.banner)
+ end)
+ register("control sequences",function()
+ return format("%s of %s + %s",status.cs_count,status.hash_size,status.hash_extra)
+ end)
+ register("callbacks",function()
+ local total,indirect=status.callbacks or 0,status.indirect_callbacks or 0
+ return format("%s direct, %s indirect, %s total",total-indirect,indirect,total)
+ end)
+ if jit then
+ local status={ jit.status() }
+ if status[1] then
+ register("luajit status",function()
+ return concat(status," ",2)
+ end)
+ end
+ end
+ register("current memory usage",statistics.memused)
+ register("runtime",statistics.runtime)
+ logs.newline()
+ for i=1,#statusinfo do
+ local s=statusinfo[i]
+ local r=s[2]()
+ if r then
+ report("%s: %s",s[1],r)
+ end
+ end
+ statistics.enable=false
+ end
+end
+function statistics.memused()
+ local round=math.round or math.floor
+ return format("%s MB (ctx: %s MB)",round(collectgarbage("count")/1000),round(status.luastate_bytes/1000000))
+end
+starttiming(statistics)
+function statistics.formatruntime(runtime)
+ return format("%s seconds",runtime)
+end
+function statistics.runtime()
+ stoptiming(statistics)
+ return statistics.formatruntime(elapsedtime(statistics))
+end
+local report=logs.reporter("system")
+function statistics.timed(action)
+ starttiming("run")
+ action()
+ stoptiming("run")
+ report("total runtime: %s",elapsedtime("run"))
+end
+commands=commands or {}
+function commands.resettimer(name)
+ resettiming(name or "whatever")
+ starttiming(name or "whatever")
+end
+function commands.elapsedtime(name)
+ stoptiming(name or "whatever")
+ context(elapsedtime(name or "whatever"))
+end
+
+
+end -- of closure
+
+do -- create closure to overcome 200 locals limit
+
+package.loaded["trac-pro"] = package.loaded["trac-pro"] or true
+
+-- original size: 5773, stripped down to: 3453
+
+if not modules then modules={} end modules ['trac-pro']={
+ version=1.001,
+ comment="companion to luat-lib.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+local getmetatable,setmetatable,rawset,type=getmetatable,setmetatable,rawset,type
+local trace_namespaces=false trackers.register("system.namespaces",function(v) trace_namespaces=v end)
+local report_system=logs.reporter("system","protection")
+namespaces=namespaces or {}
+local namespaces=namespaces
+local registered={}
+local function report_index(k,name)
+ if trace_namespaces then
+ report_system("reference to %a in protected namespace %a: %s",k,name,debug.traceback())
+ else
+ report_system("reference to %a in protected namespace %a",k,name)
+ end
+end
+local function report_newindex(k,name)
+ if trace_namespaces then
+ report_system("assignment to %a in protected namespace %a: %s",k,name,debug.traceback())
+ else
+ report_system("assignment to %a in protected namespace %a",k,name)
+ end
+end
+local function register(name)
+ local data=name=="global" and _G or _G[name]
+ if not data then
+ return
+ end
+ registered[name]=data
+ local m=getmetatable(data)
+ if not m then
+ m={}
+ setmetatable(data,m)
+ end
+ local index,newindex={},{}
+ m.__saved__index=m.__index
+ m.__no__index=function(t,k)
+ if not index[k] then
+ index[k]=true
+ report_index(k,name)
+ end
+ return nil
+ end
+ m.__saved__newindex=m.__newindex
+ m.__no__newindex=function(t,k,v)
+ if not newindex[k] then
+ newindex[k]=true
+ report_newindex(k,name)
+ end
+ rawset(t,k,v)
+ end
+ m.__protection__depth=0
+end
+local function private(name)
+ local data=registered[name]
+ if not data then
+ data=_G[name]
+ if not data then
+ data={}
+ _G[name]=data
+ end
+ register(name)
+ end
+ return data
+end
+local function protect(name)
+ local data=registered[name]
+ if not data then
+ return
+ end
+ local m=getmetatable(data)
+ local pd=m.__protection__depth
+ if pd>0 then
+ m.__protection__depth=pd+1
+ else
+ m.__save_d_index,m.__saved__newindex=m.__index,m.__newindex
+ m.__index,m.__newindex=m.__no__index,m.__no__newindex
+ m.__protection__depth=1
+ end
+end
+local function unprotect(name)
+ local data=registered[name]
+ if not data then
+ return
+ end
+ local m=getmetatable(data)
+ local pd=m.__protection__depth
+ if pd>1 then
+ m.__protection__depth=pd-1
+ else
+ m.__index,m.__newindex=m.__saved__index,m.__saved__newindex
+ m.__protection__depth=0
+ end
+end
+local function protectall()
+ for name,_ in next,registered do
+ if name~="global" then
+ protect(name)
+ end
+ end
+end
+local function unprotectall()
+ for name,_ in next,registered do
+ if name~="global" then
+ unprotect(name)
+ end
+ end
+end
+namespaces.register=register
+namespaces.private=private
+namespaces.protect=protect
+namespaces.unprotect=unprotect
+namespaces.protectall=protectall
+namespaces.unprotectall=unprotectall
+namespaces.private("namespaces") registered={} register("global")
+directives.register("system.protect",function(v)
+ if v then
+ protectall()
+ else
+ unprotectall()
+ end
+end)
+directives.register("system.checkglobals",function(v)
+ if v then
+ report_system("enabling global namespace guard")
+ protect("global")
+ else
+ report_system("disabling global namespace guard")
+ unprotect("global")
+ end
+end)
+
+
+end -- of closure
+
+do -- create closure to overcome 200 locals limit
+
+package.loaded["util-lua"] = package.loaded["util-lua"] or true
+
+-- original size: 12575, stripped down to: 8700
+
+if not modules then modules={} end modules ['util-lua']={
+ version=1.001,
+ comment="companion to luat-lib.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ comment="the strip code is written by Peter Cawley",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+local rep,sub,byte,dump,format=string.rep,string.sub,string.byte,string.dump,string.format
+local load,loadfile,type=load,loadfile,type
+utilities=utilities or {}
+utilities.lua=utilities.lua or {}
+local luautilities=utilities.lua
+local report_lua=logs.reporter("system","lua")
+local tracestripping=false
+local forcestupidcompile=true
+luautilities.stripcode=true
+luautilities.alwaysstripcode=false
+luautilities.nofstrippedchunks=0
+luautilities.nofstrippedbytes=0
+local strippedchunks={}
+luautilities.strippedchunks=strippedchunks
+luautilities.suffixes={
+ tma="tma",
+ tmc=jit and "tmb" or "tmc",
+ lua="lua",
+ luc=jit and "lub" or "luc",
+ lui="lui",
+ luv="luv",
+ luj="luj",
+ tua="tua",
+ tuc="tuc",
+}
+if jit or status.luatex_version>=74 then
+ local function register(name)
+ if tracestripping then
+ report_lua("stripped bytecode from %a",name or "unknown")
+ end
+ strippedchunks[#strippedchunks+1]=name
+ luautilities.nofstrippedchunks=luautilities.nofstrippedchunks+1
+ end
+ local function stupidcompile(luafile,lucfile,strip)
+ local code=io.loaddata(luafile)
+ if code and code~="" then
+ code=load(code)
+ if code then
+ code=dump(code,strip and luautilities.stripcode or luautilities.alwaysstripcode)
+ if code and code~="" then
+ register(name)
+ io.savedata(lucfile,code)
+ return true,0
+ end
+ else
+ report_lua("fatal error %a in file %a",1,luafile)
+ end
+ else
+ report_lua("fatal error %a in file %a",2,luafile)
+ end
+ return false,0
+ end
+ function luautilities.loadedluacode(fullname,forcestrip,name)
+ name=name or fullname
+ local code=environment.loadpreprocessedfile and environment.loadpreprocessedfile(fullname) or loadfile(fullname)
+ if code then
+ code()
+ end
+ if forcestrip and luautilities.stripcode then
+ if type(forcestrip)=="function" then
+ forcestrip=forcestrip(fullname)
+ end
+ if forcestrip or luautilities.alwaysstripcode then
+ register(name)
+ return load(dump(code,true)),0
+ else
+ return code,0
+ end
+ elseif luautilities.alwaysstripcode then
+ register(name)
+ return load(dump(code,true)),0
+ else
+ return code,0
+ end
+ end
+ function luautilities.strippedloadstring(code,forcestrip,name)
+ if forcestrip and luautilities.stripcode or luautilities.alwaysstripcode then
+ code=load(code)
+ if not code then
+ report_lua("fatal error %a in file %a",3,name)
+ end
+ register(name)
+ code=dump(code,true)
+ end
+ return load(code),0
+ end
+ function luautilities.compile(luafile,lucfile,cleanup,strip,fallback)
+ report_lua("compiling %a into %a",luafile,lucfile)
+ os.remove(lucfile)
+ local done=stupidcompile(luafile,lucfile,strip~=false)
+ if done then
+ report_lua("dumping %a into %a stripped",luafile,lucfile)
+ if cleanup==true and lfs.isfile(lucfile) and lfs.isfile(luafile) then
+ report_lua("removing %a",luafile)
+ os.remove(luafile)
+ end
+ end
+ return done
+ end
+ function luautilities.loadstripped(...)
+ local l=load(...)
+ if l then
+ return load(dump(l,true))
+ end
+ end
+else
+ local function register(name,before,after)
+ local delta=before-after
+ if tracestripping then
+ report_lua("bytecodes stripped from %a, # before %s, # after %s, delta %s",name,before,after,delta)
+ end
+ strippedchunks[#strippedchunks+1]=name
+ luautilities.nofstrippedchunks=luautilities.nofstrippedchunks+1
+ luautilities.nofstrippedbytes=luautilities.nofstrippedbytes+delta
+ return delta
+ end
+ local strip_code_pc
+ if _MAJORVERSION==5 and _MINORVERSION==1 then
+ strip_code_pc=function(dump,name)
+ local before=#dump
+ local version,format,endian,int,size,ins,num=byte(dump,5,11)
+ local subint
+ if endian==1 then
+ subint=function(dump,i,l)
+ local val=0
+ for n=l,1,-1 do
+ val=val*256+byte(dump,i+n-1)
+ end
+ return val,i+l
+ end
+ else
+ subint=function(dump,i,l)
+ local val=0
+ for n=1,l,1 do
+ val=val*256+byte(dump,i+n-1)
+ end
+ return val,i+l
+ end
+ end
+ local strip_function
+ strip_function=function(dump)
+ local count,offset=subint(dump,1,size)
+ local stripped,dirty=rep("\0",size),offset+count
+ offset=offset+count+int*2+4
+ offset=offset+int+subint(dump,offset,int)*ins
+ count,offset=subint(dump,offset,int)
+ for n=1,count do
+ local t
+ t,offset=subint(dump,offset,1)
+ if t==1 then
+ offset=offset+1
+ elseif t==4 then
+ offset=offset+size+subint(dump,offset,size)
+ elseif t==3 then
+ offset=offset+num
+ end
+ end
+ count,offset=subint(dump,offset,int)
+ stripped=stripped..sub(dump,dirty,offset-1)
+ for n=1,count do
+ local proto,off=strip_function(sub(dump,offset,-1))
+ stripped,offset=stripped..proto,offset+off-1
+ end
+ offset=offset+subint(dump,offset,int)*int+int
+ count,offset=subint(dump,offset,int)
+ for n=1,count do
+ offset=offset+subint(dump,offset,size)+size+int*2
+ end
+ count,offset=subint(dump,offset,int)
+ for n=1,count do
+ offset=offset+subint(dump,offset,size)+size
+ end
+ stripped=stripped..rep("\0",int*3)
+ return stripped,offset
+ end
+ dump=sub(dump,1,12)..strip_function(sub(dump,13,-1))
+ local after=#dump
+ local delta=register(name,before,after)
+ return dump,delta
+ end
+ else
+ strip_code_pc=function(dump,name)
+ return dump,0
+ end
+ end
+ function luautilities.loadedluacode(fullname,forcestrip,name)
+ local code=environment.loadpreprocessedfile and environment.preprocessedloadfile(fullname) or loadfile(fullname)
+ if code then
+ code()
+ end
+ if forcestrip and luautilities.stripcode then
+ if type(forcestrip)=="function" then
+ forcestrip=forcestrip(fullname)
+ end
+ if forcestrip then
+ local code,n=strip_code_pc(dump(code),name)
+ return load(code),n
+ elseif luautilities.alwaysstripcode then
+ return load(strip_code_pc(dump(code),name))
+ else
+ return code,0
+ end
+ elseif luautilities.alwaysstripcode then
+ return load(strip_code_pc(dump(code),name))
+ else
+ return code,0
+ end
+ end
+ function luautilities.strippedloadstring(code,forcestrip,name)
+ local n=0
+ if (forcestrip and luautilities.stripcode) or luautilities.alwaysstripcode then
+ code=load(code)
+ if not code then
+ report_lua("fatal error in file %a",name)
+ end
+ code,n=strip_code_pc(dump(code),name)
+ end
+ return load(code),n
+ end
+ local function stupidcompile(luafile,lucfile,strip)
+ local code=io.loaddata(luafile)
+ local n=0
+ if code and code~="" then
+ code=load(code)
+ if not code then
+ report_lua("fatal error in file %a",luafile)
+ end
+ code=dump(code)
+ if strip then
+ code,n=strip_code_pc(code,luautilities.stripcode or luautilities.alwaysstripcode,luafile)
+ end
+ if code and code~="" then
+ io.savedata(lucfile,code)
+ end
+ end
+ return n
+ end
+ local luac_normal="texluac -o %q %q"
+ local luac_strip="texluac -s -o %q %q"
+ function luautilities.compile(luafile,lucfile,cleanup,strip,fallback)
+ report_lua("compiling %a into %a",luafile,lucfile)
+ os.remove(lucfile)
+ local done=false
+ if strip~=false then
+ strip=true
+ end
+ if forcestupidcompile then
+ fallback=true
+ elseif strip then
+ done=os.spawn(format(luac_strip,lucfile,luafile))==0
+ else
+ done=os.spawn(format(luac_normal,lucfile,luafile))==0
+ end
+ if not done and fallback then
+ local n=stupidcompile(luafile,lucfile,strip)
+ if n>0 then
+ report_lua("%a dumped into %a (%i bytes stripped)",luafile,lucfile,n)
+ else
+ report_lua("%a dumped into %a (unstripped)",luafile,lucfile)
+ end
+ cleanup=false
+ done=true
+ end
+ if done and cleanup==true and lfs.isfile(lucfile) and lfs.isfile(luafile) then
+ report_lua("removing %a",luafile)
+ os.remove(luafile)
+ end
+ return done
+ end
+ luautilities.loadstripped=loadstring
+end
+
+
+end -- of closure
+
+do -- create closure to overcome 200 locals limit
+
+package.loaded["util-deb"] = package.loaded["util-deb"] or true
+
+-- original size: 3708, stripped down to: 2568
+
+if not modules then modules={} end modules ['util-deb']={
+ version=1.001,
+ comment="companion to luat-lib.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+local debug=require "debug"
+local getinfo=debug.getinfo
+local type,next,tostring=type,next,tostring
+local format,find=string.format,string.find
+local is_boolean=string.is_boolean
+utilities=utilities or {}
+local debugger=utilities.debugger or {}
+utilities.debugger=debugger
+local counters={}
+local names={}
+local report=logs.reporter("debugger")
+local function hook()
+ local f=getinfo(2)
+ if f then
+ local n="unknown"
+ if f.what=="C" then
+ n=f.name or '<anonymous>'
+ if not names[n] then
+ names[n]=format("%42s",n)
+ end
+ else
+ n=f.name or f.namewhat or f.what
+ if not n or n=="" then
+ n="?"
+ end
+ if not names[n] then
+ names[n]=format("%42s : % 5i : %s",n,f.linedefined or 0,f.short_src or "unknown source")
+ end
+ end
+ counters[n]=(counters[n] or 0)+1
+ end
+end
+function debugger.showstats(printer,threshold)
+ printer=printer or report
+ threshold=threshold or 0
+ local total,grandtotal,functions=0,0,0
+ local dataset={}
+ for name,count in next,counters do
+ dataset[#dataset+1]={ name,count }
+ end
+ table.sort(dataset,function(a,b) return a[2]==b[2] and b[1]>a[1] or a[2]>b[2] end)
+ for i=1,#dataset do
+ local d=dataset[i]
+ local name=d[1]
+ local count=d[2]
+ if count>threshold and not find(name,"for generator") then
+ printer(format("%8i %s\n",count,names[name]))
+ total=total+count
+ end
+ grandtotal=grandtotal+count
+ functions=functions+1
+ end
+ printer("\n")
+ printer(format("functions : % 10i\n",functions))
+ printer(format("total : % 10i\n",total))
+ printer(format("grand total: % 10i\n",grandtotal))
+ printer(format("threshold : % 10i\n",threshold))
+end
+function debugger.savestats(filename,threshold)
+ local f=io.open(filename,'w')
+ if f then
+ debugger.showstats(function(str) f:write(str) end,threshold)
+ f:close()
+ end
+end
+function debugger.enable()
+ debug.sethook(hook,"c")
+end
+function debugger.disable()
+ debug.sethook()
+end
+function traceback()
+ local level=1
+ while true do
+ local info=debug.getinfo(level,"Sl")
+ if not info then
+ break
+ elseif info.what=="C" then
+ print(format("%3i : C function",level))
+ else
+ print(format("%3i : [%s]:%d",level,info.short_src,info.currentline))
+ end
+ level=level+1
+ end
+end
+
+
+end -- of closure
+
+do -- create closure to overcome 200 locals limit
+
+package.loaded["util-mrg"] = package.loaded["util-mrg"] or true
+
+-- original size: 7294, stripped down to: 5798
+
+if not modules then modules={} end modules ['util-mrg']={
+ version=1.001,
+ comment="companion to luat-lib.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+local gsub,format=string.gsub,string.format
+local concat=table.concat
+local type,next=type,next
+local P,R,S,V,Ct,C,Cs,Cc,Cp,Cmt,Cb,Cg=lpeg.P,lpeg.R,lpeg.S,lpeg.V,lpeg.Ct,lpeg.C,lpeg.Cs,lpeg.Cc,lpeg.Cp,lpeg.Cmt,lpeg.Cb,lpeg.Cg
+local lpegmatch,patterns=lpeg.match,lpeg.patterns
+utilities=utilities or {}
+local merger=utilities.merger or {}
+utilities.merger=merger
+merger.strip_comment=true
+local report=logs.reporter("system","merge")
+utilities.report=report
+local m_begin_merge="begin library merge"
+local m_end_merge="end library merge"
+local m_begin_closure="do -- create closure to overcome 200 locals limit"
+local m_end_closure="end -- of closure"
+local m_pattern="%c+".."%-%-%s+"..m_begin_merge.."%c+(.-)%c+".."%-%-%s+"..m_end_merge.."%c+"
+local m_format="\n\n-- "..m_begin_merge.."\n%s\n".."-- "..m_end_merge.."\n\n"
+local m_faked="-- ".."created merged file".."\n\n".."-- "..m_begin_merge.."\n\n".."-- "..m_end_merge.."\n\n"
+local m_report=[[
+-- used libraries : %s
+-- skipped libraries : %s
+-- original bytes : %s
+-- stripped bytes : %s
+]]
+local m_preloaded=[[package.loaded[%q] = package.loaded[%q] or true]]
+local function self_fake()
+ return m_faked
+end
+local function self_nothing()
+ return ""
+end
+local function self_load(name)
+ local data=io.loaddata(name) or ""
+ if data=="" then
+ report("unknown file %a",name)
+ else
+ report("inserting file %a",name)
+ end
+ return data or ""
+end
+local space=patterns.space
+local eol=patterns.newline
+local equals=P("=")^0
+local open=P("[")*Cg(equals,"init")*P("[")*P("\n")^-1
+local close=P("]")*C(equals)*P("]")
+local closeeq=Cmt(close*Cb("init"),function(s,i,a,b) return a==b end)
+local longstring=open*(1-closeeq)^0*close
+local quoted=patterns.quoted
+local emptyline=space^0*eol
+local operator1=P("<=")+P(">=")+P("~=")+P("..")+S("/^<>=*+%%")
+local operator2=S("*+/")
+local operator3=S("-")
+local separator=S(",;")
+local ignore=(P("]")*space^1*P("=")*space^1*P("]"))/"]=["+(P("=")*space^1*P("{"))/"={"+(P("(")*space^1)/"("+(P("{")*(space+eol)^1*P("}"))/"{}"
+local strings=quoted
+local longcmt=(emptyline^0*P("--")*longstring*emptyline^0)/""
+local longstr=longstring
+local comment=emptyline^0*P("--")*P("-")^0*(1-eol)^0*emptyline^1/"\n"
+local pack=((eol+space)^0/"")*operator1*((eol+space)^0/"")+((eol+space)^0/"")*operator2*((space)^0/"")+((eol+space)^1/"")*operator3*((space)^1/"")+((space)^0/"")*separator*((space)^0/"")
+local lines=emptyline^2/"\n"
+local spaces=(space*space)/" "
+local compact=Cs ((
+ ignore+strings+longcmt+longstr+comment+pack+lines+spaces+1
+)^1 )
+local strip=Cs((emptyline^2/"\n"+1)^0)
+local stripreturn=Cs((1-P("return")*space^1*P(1-space-eol)^1*(space+eol)^0*P(-1))^1)
+function merger.compact(data)
+ return lpegmatch(strip,lpegmatch(compact,data))
+end
+local function self_compact(data)
+ local delta=0
+ if merger.strip_comment then
+ local before=#data
+ data=lpegmatch(compact,data)
+ data=lpegmatch(strip,data)
+ local after=#data
+ delta=before-after
+ report("original size %s, compacted to %s, stripped %s",before,after,delta)
+ data=format("-- original size: %s, stripped down to: %s\n\n%s",before,after,data)
+ end
+ return lpegmatch(stripreturn,data) or data,delta
+end
+local function self_save(name,data)
+ if data~="" then
+ io.savedata(name,data)
+ report("saving %s with size %s",name,#data)
+ end
+end
+local function self_swap(data,code)
+ return data~="" and (gsub(data,m_pattern,function() return format(m_format,code) end,1)) or ""
+end
+local function self_libs(libs,list)
+ local result,f,frozen,foundpath={},nil,false,nil
+ result[#result+1]="\n"
+ if type(libs)=='string' then libs={ libs } end
+ if type(list)=='string' then list={ list } end
+ for i=1,#libs do
+ local lib=libs[i]
+ for j=1,#list do
+ local pth=gsub(list[j],"\\","/")
+ report("checking library path %a",pth)
+ local name=pth.."/"..lib
+ if lfs.isfile(name) then
+ foundpath=pth
+ end
+ end
+ if foundpath then break end
+ end
+ if foundpath then
+ report("using library path %a",foundpath)
+ local right,wrong,original,stripped={},{},0,0
+ for i=1,#libs do
+ local lib=libs[i]
+ local fullname=foundpath.."/"..lib
+ if lfs.isfile(fullname) then
+ report("using library %a",fullname)
+ local preloaded=file.nameonly(lib)
+ local data=io.loaddata(fullname,true)
+ original=original+#data
+ local data,delta=self_compact(data)
+ right[#right+1]=lib
+ result[#result+1]=m_begin_closure
+ result[#result+1]=format(m_preloaded,preloaded,preloaded)
+ result[#result+1]=data
+ result[#result+1]=m_end_closure
+ stripped=stripped+delta
+ else
+ report("skipping library %a",fullname)
+ wrong[#wrong+1]=lib
+ end
+ end
+ right=#right>0 and concat(right," ") or "-"
+ wrong=#wrong>0 and concat(wrong," ") or "-"
+ report("used libraries: %a",right)
+ report("skipped libraries: %a",wrong)
+ report("original bytes: %a",original)
+ report("stripped bytes: %a",stripped)
+ result[#result+1]=format(m_report,right,wrong,original,stripped)
+ else
+ report("no valid library path found")
+ end
+ return concat(result,"\n\n")
+end
+function merger.selfcreate(libs,list,target)
+ if target then
+ self_save(target,self_swap(self_fake(),self_libs(libs,list)))
+ end
+end
+function merger.selfmerge(name,libs,list,target)
+ self_save(target or name,self_swap(self_load(name),self_libs(libs,list)))
+end
+function merger.selfclean(name)
+ self_save(name,self_swap(self_load(name),self_nothing()))
+end
+
+
+end -- of closure
+
+do -- create closure to overcome 200 locals limit
+
+package.loaded["util-tpl"] = package.loaded["util-tpl"] or true
+
+-- original size: 5655, stripped down to: 3242
+
+if not modules then modules={} end modules ['util-tpl']={
+ version=1.001,
+ comment="companion to luat-lib.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+utilities.templates=utilities.templates or {}
+local templates=utilities.templates
+local trace_template=false trackers.register("templates.trace",function(v) trace_template=v end)
+local report_template=logs.reporter("template")
+local tostring=tostring
+local format,sub=string.format,string.sub
+local P,C,Cs,Carg,lpegmatch=lpeg.P,lpeg.C,lpeg.Cs,lpeg.Carg,lpeg.match
+local replacer
+local function replacekey(k,t,how,recursive)
+ local v=t[k]
+ if not v then
+ if trace_template then
+ report_template("unknown key %a",k)
+ end
+ return ""
+ else
+ v=tostring(v)
+ if trace_template then
+ report_template("setting key %a to value %a",k,v)
+ end
+ if recursive then
+ return lpegmatch(replacer,v,1,t,how,recursive)
+ else
+ return v
+ end
+ end
+end
+local sqlescape=lpeg.replacer {
+ { "'","''" },
+ { "\\","\\\\" },
+ { "\r\n","\\n" },
+ { "\r","\\n" },
+}
+local sqlquotedescape=lpeg.Cs(lpeg.Cc("'")*sqlescape*lpeg.Cc("'"))
+local escapers={
+ lua=function(s)
+ return sub(format("%q",s),2,-2)
+ end,
+ sql=function(s)
+ return lpegmatch(sqlescape,s)
+ end,
+}
+local quotedescapers={
+ lua=function(s)
+ return format("%q",s)
+ end,
+ sql=function(s)
+ return lpegmatch(sqlquotedescape,s)
+ end,
+}
+lpeg.patterns.sqlescape=sqlescape
+lpeg.patterns.sqlescape=sqlquotedescape
+local luaescaper=escapers.lua
+local quotedluaescaper=quotedescapers.lua
+local function replacekeyunquoted(s,t,how,recurse)
+ local escaper=how and escapers[how] or luaescaper
+ return escaper(replacekey(s,t,how,recurse))
+end
+local function replacekeyquoted(s,t,how,recurse)
+ local escaper=how and quotedescapers[how] or quotedluaescaper
+ return escaper(replacekey(s,t,how,recurse))
+end
+local single=P("%")
+local double=P("%%")
+local lquoted=P("%[")
+local rquoted=P("]%")
+local lquotedq=P("%(")
+local rquotedq=P(")%")
+local escape=double/'%%'
+local nosingle=single/''
+local nodouble=double/''
+local nolquoted=lquoted/''
+local norquoted=rquoted/''
+local nolquotedq=lquotedq/''
+local norquotedq=rquotedq/''
+local key=nosingle*((C((1-nosingle )^1)*Carg(1)*Carg(2)*Carg(3))/replacekey )*nosingle
+local quoted=nolquotedq*((C((1-norquotedq)^1)*Carg(1)*Carg(2)*Carg(3))/replacekeyquoted )*norquotedq
+local unquoted=nolquoted*((C((1-norquoted )^1)*Carg(1)*Carg(2)*Carg(3))/replacekeyunquoted)*norquoted
+local any=P(1)
+ replacer=Cs((unquoted+quoted+escape+key+any)^0)
+local function replace(str,mapping,how,recurse)
+ if mapping and str then
+ return lpegmatch(replacer,str,1,mapping,how or "lua",recurse or false) or str
+ else
+ return str
+ end
+end
+templates.replace=replace
+function templates.load(filename,mapping,how,recurse)
+ local data=io.loaddata(filename) or ""
+ if mapping and next(mapping) then
+ return replace(data,mapping,how,recurse)
+ else
+ return data
+ end
+end
+function templates.resolve(t,mapping,how,recurse)
+ if not mapping then
+ mapping=t
+ end
+ for k,v in next,t do
+ t[k]=replace(v,mapping,how,recurse)
+ end
+ return t
+end
+
+
+end -- of closure
+
+do -- create closure to overcome 200 locals limit
+
+package.loaded["util-env"] = package.loaded["util-env"] or true
+
+-- original size: 7702, stripped down to: 4701
+
+if not modules then modules={} end modules ['util-env']={
+ version=1.001,
+ comment="companion to luat-lib.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+local allocate,mark=utilities.storage.allocate,utilities.storage.mark
+local format,sub,match,gsub,find=string.format,string.sub,string.match,string.gsub,string.find
+local unquoted,quoted=string.unquoted,string.quoted
+local concat,insert,remove=table.concat,table.insert,table.remove
+environment=environment or {}
+local environment=environment
+os.setlocale(nil,nil)
+function os.setlocale()
+end
+local validengines=allocate {
+ ["luatex"]=true,
+ ["luajittex"]=true,
+}
+local basicengines=allocate {
+ ["luatex"]="luatex",
+ ["texlua"]="luatex",
+ ["texluac"]="luatex",
+ ["luajittex"]="luajittex",
+ ["texluajit"]="luajittex",
+}
+local luaengines=allocate {
+ ["lua"]=true,
+ ["luajit"]=true,
+}
+environment.validengines=validengines
+environment.basicengines=basicengines
+if not arg then
+elseif luaengines[file.removesuffix(arg[-1])] then
+elseif validengines[file.removesuffix(arg[0])] then
+ if arg[1]=="--luaonly" then
+ arg[-1]=arg[0]
+ arg[ 0]=arg[2]
+ for k=3,#arg do
+ arg[k-2]=arg[k]
+ end
+ remove(arg)
+ remove(arg)
+ else
+ end
+ local originalzero=file.basename(arg[0])
+ local specialmapping={ luatools=="base" }
+ if originalzero~="mtxrun" and originalzero~="mtxrun.lua" then
+ arg[0]=specialmapping[originalzero] or originalzero
+ insert(arg,0,"--script")
+ insert(arg,0,"mtxrun")
+ end
+end
+environment.arguments=allocate()
+environment.files=allocate()
+environment.sortedflags=nil
+function environment.initializearguments(arg)
+ local arguments,files={},{}
+ environment.arguments,environment.files,environment.sortedflags=arguments,files,nil
+ for index=1,#arg do
+ local argument=arg[index]
+ if index>0 then
+ local flag,value=match(argument,"^%-+(.-)=(.-)$")
+ if flag then
+ flag=gsub(flag,"^c:","")
+ arguments[flag]=unquoted(value or "")
+ else
+ flag=match(argument,"^%-+(.+)")
+ if flag then
+ flag=gsub(flag,"^c:","")
+ arguments[flag]=true
+ else
+ files[#files+1]=argument
+ end
+ end
+ end
+ end
+ environment.ownname=file.reslash(environment.ownname or arg[0] or 'unknown.lua')
+end
+function environment.setargument(name,value)
+ environment.arguments[name]=value
+end
+function environment.getargument(name,partial)
+ local arguments,sortedflags=environment.arguments,environment.sortedflags
+ if arguments[name] then
+ return arguments[name]
+ elseif partial then
+ if not sortedflags then
+ sortedflags=allocate(table.sortedkeys(arguments))
+ for k=1,#sortedflags do
+ sortedflags[k]="^"..sortedflags[k]
+ end
+ environment.sortedflags=sortedflags
+ end
+ for k=1,#sortedflags do
+ local v=sortedflags[k]
+ if find(name,v) then
+ return arguments[sub(v,2,#v)]
+ end
+ end
+ end
+ return nil
+end
+environment.argument=environment.getargument
+function environment.splitarguments(separator)
+ local done,before,after=false,{},{}
+ local originalarguments=environment.originalarguments
+ for k=1,#originalarguments do
+ local v=originalarguments[k]
+ if not done and v==separator then
+ done=true
+ elseif done then
+ after[#after+1]=v
+ else
+ before[#before+1]=v
+ end
+ end
+ return before,after
+end
+function environment.reconstructcommandline(arg,noquote)
+ arg=arg or environment.originalarguments
+ if noquote and #arg==1 then
+ local a=arg[1]
+ a=resolvers.resolve(a)
+ a=unquoted(a)
+ return a
+ elseif #arg>0 then
+ local result={}
+ for i=1,#arg do
+ local a=arg[i]
+ a=resolvers.resolve(a)
+ a=unquoted(a)
+ a=gsub(a,'"','\\"')
+ if find(a," ") then
+ result[#result+1]=quoted(a)
+ else
+ result[#result+1]=a
+ end
+ end
+ return concat(result," ")
+ else
+ return ""
+ end
+end
+if arg then
+ local newarg,instring={},false
+ for index=1,#arg do
+ local argument=arg[index]
+ if find(argument,"^\"") then
+ newarg[#newarg+1]=gsub(argument,"^\"","")
+ if not find(argument,"\"$") then
+ instring=true
+ end
+ elseif find(argument,"\"$") then
+ newarg[#newarg]=newarg[#newarg].." "..gsub(argument,"\"$","")
+ instring=false
+ elseif instring then
+ newarg[#newarg]=newarg[#newarg].." "..argument
+ else
+ newarg[#newarg+1]=argument
+ end
+ end
+ for i=1,-5,-1 do
+ newarg[i]=arg[i]
+ end
+ environment.initializearguments(newarg)
+ environment.originalarguments=mark(newarg)
+ environment.rawarguments=mark(arg)
+ arg={}
+end
+
+
+end -- of closure
+
+do -- create closure to overcome 200 locals limit
+
+package.loaded["luat-env"] = package.loaded["luat-env"] or true
+
+-- original size: 5874, stripped down to: 4184
+
+ if not modules then modules={} end modules ['luat-env']={
+ version=1.001,
+ comment="companion to luat-lib.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+local rawset,rawget,loadfile,assert=rawset,rawget,loadfile,assert
+local trace_locating=false trackers.register("resolvers.locating",function(v) trace_locating=v end)
+local report_lua=logs.reporter("resolvers","lua")
+local luautilities=utilities.lua
+local luasuffixes=luautilities.suffixes
+environment=environment or {}
+local environment=environment
+local mt={
+ __index=function(_,k)
+ if k=="version" then
+ local version=tex.toks and tex.toks.contextversiontoks
+ if version and version~="" then
+ rawset(environment,"version",version)
+ return version
+ else
+ return "unknown"
+ end
+ elseif k=="kind" then
+ local kind=tex.toks and tex.toks.contextkindtoks
+ if kind and kind~="" then
+ rawset(environment,"kind",kind)
+ return kind
+ else
+ return "unknown"
+ end
+ elseif k=="jobname" or k=="formatname" then
+ local name=tex and tex[k]
+ if name or name=="" then
+ rawset(environment,k,name)
+ return name
+ else
+ return "unknown"
+ end
+ elseif k=="outputfilename" then
+ local name=environment.jobname
+ rawset(environment,k,name)
+ return name
+ end
+ end
+}
+setmetatable(environment,mt)
+function environment.texfile(filename)
+ return resolvers.findfile(filename,'tex')
+end
+function environment.luafile(filename)
+ local resolved=resolvers.findfile(filename,'tex') or ""
+ if resolved~="" then
+ return resolved
+ end
+ resolved=resolvers.findfile(filename,'texmfscripts') or ""
+ if resolved~="" then
+ return resolved
+ end
+ return resolvers.findfile(filename,'luatexlibs') or ""
+end
+local stripindeed=false directives.register("system.compile.strip",function(v) stripindeed=v end)
+local function strippable(filename)
+ if stripindeed then
+ local modu=modules[file.nameonly(filename)]
+ return modu and modu.dataonly
+ else
+ return false
+ end
+end
+function environment.luafilechunk(filename,silent)
+ filename=file.replacesuffix(filename,"lua")
+ local fullname=environment.luafile(filename)
+ if fullname and fullname~="" then
+ local data=luautilities.loadedluacode(fullname,strippable,filename)
+ if trace_locating then
+ report_lua("loading file %a %s",fullname,not data and "failed" or "succeeded")
+ elseif not silent then
+ texio.write("<",data and "+ " or "- ",fullname,">")
+ end
+ return data
+ else
+ if trace_locating then
+ report_lua("unknown file %a",filename)
+ end
+ return nil
+ end
+end
+function environment.loadluafile(filename,version)
+ local lucname,luaname,chunk
+ local basename=file.removesuffix(filename)
+ if basename==filename then
+ luaname=file.addsuffix(basename,luasuffixes.lua)
+ lucname=file.addsuffix(basename,luasuffixes.luc)
+ else
+ luaname=basename
+ lucname=nil
+ end
+ local fullname=(lucname and environment.luafile(lucname)) or ""
+ if fullname~="" then
+ if trace_locating then
+ report_lua("loading %a",fullname)
+ end
+ chunk=loadfile(fullname)
+ end
+ if chunk then
+ assert(chunk)()
+ if version then
+ local v=version
+ if modules and modules[filename] then
+ v=modules[filename].version
+ elseif versions and versions[filename] then
+ v=versions[filename]
+ end
+ if v==version then
+ return true
+ else
+ if trace_locating then
+ report_lua("version mismatch for %a, lua version %a, luc version %a",filename,v,version)
+ end
+ environment.loadluafile(filename)
+ end
+ else
+ return true
+ end
+ end
+ fullname=(luaname and environment.luafile(luaname)) or ""
+ if fullname~="" then
+ if trace_locating then
+ report_lua("loading %a",fullname)
+ end
+ chunk=loadfile(fullname)
+ if not chunk then
+ if trace_locating then
+ report_lua("unknown file %a",filename)
+ end
+ else
+ assert(chunk)()
+ return true
+ end
+ end
+ return false
+end
+
+
+end -- of closure
+
+do -- create closure to overcome 200 locals limit
+
+package.loaded["lxml-tab"] = package.loaded["lxml-tab"] or true
+
+-- original size: 42495, stripped down to: 26647
+
+if not modules then modules={} end modules ['lxml-tab']={
+ version=1.001,
+ comment="this module is the basis for the lxml-* ones",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+local trace_entities=false trackers.register("xml.entities",function(v) trace_entities=v end)
+local report_xml=logs and logs.reporter("xml","core") or function(...) print(string.format(...)) end
+xml=xml or {}
+local xml=xml
+local concat,remove,insert=table.concat,table.remove,table.insert
+local type,next,setmetatable,getmetatable,tonumber=type,next,setmetatable,getmetatable,tonumber
+local lower,find,match,gsub=string.lower,string.find,string.match,string.gsub
+local utfchar=utf.char
+local lpegmatch=lpeg.match
+local P,S,R,C,V,C,Cs=lpeg.P,lpeg.S,lpeg.R,lpeg.C,lpeg.V,lpeg.C,lpeg.Cs
+local formatters=string.formatters
+xml.xmlns=xml.xmlns or {}
+local check=P(false)
+local parse=check
+function xml.registerns(namespace,pattern)
+ check=check+C(P(lower(pattern)))/namespace
+ parse=P { P(check)+1*V(1) }
+end
+function xml.checkns(namespace,url)
+ local ns=lpegmatch(parse,lower(url))
+ if ns and namespace~=ns then
+ xml.xmlns[namespace]=ns
+ end
+end
+function xml.resolvens(url)
+ return lpegmatch(parse,lower(url)) or ""
+end
+local nsremap,resolvens=xml.xmlns,xml.resolvens
+local stack={}
+local top={}
+local dt={}
+local at={}
+local xmlns={}
+local errorstr=nil
+local entities={}
+local strip=false
+local cleanup=false
+local utfize=false
+local resolve_predefined=false
+local unify_predefined=false
+local dcache={}
+local hcache={}
+local acache={}
+local mt={}
+local function initialize_mt(root)
+ mt={ __index=root }
+end
+function xml.setproperty(root,k,v)
+ getmetatable(root).__index[k]=v
+end
+function xml.checkerror(top,toclose)
+ return ""
+end
+local function add_attribute(namespace,tag,value)
+ if cleanup and #value>0 then
+ value=cleanup(value)
+ end
+ if tag=="xmlns" then
+ xmlns[#xmlns+1]=resolvens(value)
+ at[tag]=value
+ elseif namespace=="" then
+ at[tag]=value
+ elseif namespace=="xmlns" then
+ xml.checkns(tag,value)
+ at["xmlns:"..tag]=value
+ else
+ at[namespace..":"..tag]=value
+ end
+end
+local function add_empty(spacing,namespace,tag)
+ if #spacing>0 then
+ dt[#dt+1]=spacing
+ end
+ local resolved=namespace=="" and xmlns[#xmlns] or nsremap[namespace] or namespace
+ top=stack[#stack]
+ dt=top.dt
+ local t={ ns=namespace or "",rn=resolved,tg=tag,at=at,dt={},__p__=top }
+ dt[#dt+1]=t
+ setmetatable(t,mt)
+ if at.xmlns then
+ remove(xmlns)
+ end
+ at={}
+end
+local function add_begin(spacing,namespace,tag)
+ if #spacing>0 then
+ dt[#dt+1]=spacing
+ end
+ local resolved=namespace=="" and xmlns[#xmlns] or nsremap[namespace] or namespace
+ top={ ns=namespace or "",rn=resolved,tg=tag,at=at,dt={},__p__=stack[#stack] }
+ setmetatable(top,mt)
+ dt=top.dt
+ stack[#stack+1]=top
+ at={}
+end
+local function add_end(spacing,namespace,tag)
+ if #spacing>0 then
+ dt[#dt+1]=spacing
+ end
+ local toclose=remove(stack)
+ top=stack[#stack]
+ if #stack<1 then
+ errorstr=formatters["unable to close %s %s"](tag,xml.checkerror(top,toclose) or "")
+ elseif toclose.tg~=tag then
+ errorstr=formatters["unable to close %s with %s %s"](toclose.tg,tag,xml.checkerror(top,toclose) or "")
+ end
+ dt=top.dt
+ dt[#dt+1]=toclose
+ if toclose.at.xmlns then
+ remove(xmlns)
+ end
+end
+local function add_text(text)
+ if cleanup and #text>0 then
+ dt[#dt+1]=cleanup(text)
+ else
+ dt[#dt+1]=text
+ end
+end
+local function add_special(what,spacing,text)
+ if #spacing>0 then
+ dt[#dt+1]=spacing
+ end
+ if strip and (what=="@cm@" or what=="@dt@") then
+ else
+ dt[#dt+1]={ special=true,ns="",tg=what,dt={ text } }
+ end
+end
+local function set_message(txt)
+ errorstr="garbage at the end of the file: "..gsub(txt,"([ \n\r\t]*)","")
+end
+local reported_attribute_errors={}
+local function attribute_value_error(str)
+ if not reported_attribute_errors[str] then
+ report_xml("invalid attribute value %a",str)
+ reported_attribute_errors[str]=true
+ at._error_=str
+ end
+ return str
+end
+local function attribute_specification_error(str)
+ if not reported_attribute_errors[str] then
+ report_xml("invalid attribute specification %a",str)
+ reported_attribute_errors[str]=true
+ at._error_=str
+ end
+ return str
+end
+xml.placeholders={
+ unknown_dec_entity=function(str) return str=="" and "&error;" or formatters["&%s;"](str) end,
+ unknown_hex_entity=function(str) return formatters["&#x%s;"](str) end,
+ unknown_any_entity=function(str) return formatters["&#x%s;"](str) end,
+}
+local placeholders=xml.placeholders
+local function fromhex(s)
+ local n=tonumber(s,16)
+ if n then
+ return utfchar(n)
+ else
+ return formatters["h:%s"](s),true
+ end
+end
+local function fromdec(s)
+ local n=tonumber(s)
+ if n then
+ return utfchar(n)
+ else
+ return formatters["d:%s"](s),true
+ end
+end
+local rest=(1-P(";"))^0
+local many=P(1)^0
+local parsedentity=P("&")*(P("#x")*(rest/fromhex)+P("#")*(rest/fromdec))*P(";")*P(-1)+(P("#x")*(many/fromhex)+P("#")*(many/fromdec))
+local predefined_unified={
+ [38]="&amp;",
+ [42]="&quot;",
+ [47]="&apos;",
+ [74]="&lt;",
+ [76]="&gt;",
+}
+local predefined_simplified={
+ [38]="&",amp="&",
+ [42]='"',quot='"',
+ [47]="'",apos="'",
+ [74]="<",lt="<",
+ [76]=">",gt=">",
+}
+local nofprivates=0xF0000
+local privates_u={
+ [ [[&]] ]="&amp;",
+ [ [["]] ]="&quot;",
+ [ [[']] ]="&apos;",
+ [ [[<]] ]="&lt;",
+ [ [[>]] ]="&gt;",
+}
+local privates_p={}
+local privates_n={
+}
+local escaped=utf.remapper(privates_u)
+local function unescaped(s)
+ local p=privates_n[s]
+ if not p then
+ nofprivates=nofprivates+1
+ p=utfchar(nofprivates)
+ privates_n[s]=p
+ s="&"..s..";"
+ privates_u[p]=s
+ privates_p[p]=s
+ end
+ return p
+end
+local unprivatized=utf.remapper(privates_p)
+xml.privatetoken=unescaped
+xml.unprivatized=unprivatized
+xml.privatecodes=privates_n
+local function handle_hex_entity(str)
+ local h=hcache[str]
+ if not h then
+ local n=tonumber(str,16)
+ h=unify_predefined and predefined_unified[n]
+ if h then
+ if trace_entities then
+ report_xml("utfize, converting hex entity &#x%s; into %a",str,h)
+ end
+ elseif utfize then
+ h=(n and utfchar(n)) or xml.unknown_hex_entity(str) or ""
+ if not n then
+ report_xml("utfize, ignoring hex entity &#x%s;",str)
+ elseif trace_entities then
+ report_xml("utfize, converting hex entity &#x%s; into %a",str,h)
+ end
+ else
+ if trace_entities then
+ report_xml("found entity &#x%s;",str)
+ end
+ h="&#x"..str..";"
+ end
+ hcache[str]=h
+ end
+ return h
+end
+local function handle_dec_entity(str)
+ local d=dcache[str]
+ if not d then
+ local n=tonumber(str)
+ d=unify_predefined and predefined_unified[n]
+ if d then
+ if trace_entities then
+ report_xml("utfize, converting dec entity &#%s; into %a",str,d)
+ end
+ elseif utfize then
+ d=(n and utfchar(n)) or placeholders.unknown_dec_entity(str) or ""
+ if not n then
+ report_xml("utfize, ignoring dec entity &#%s;",str)
+ elseif trace_entities then
+ report_xml("utfize, converting dec entity &#%s; into %a",str,d)
+ end
+ else
+ if trace_entities then
+ report_xml("found entity &#%s;",str)
+ end
+ d="&#"..str..";"
+ end
+ dcache[str]=d
+ end
+ return d
+end
+xml.parsedentitylpeg=parsedentity
+local function handle_any_entity(str)
+ if resolve then
+ local a=acache[str]
+ if not a then
+ a=resolve_predefined and predefined_simplified[str]
+ if a then
+ if trace_entities then
+ report_xml("resolving entity &%s; to predefined %a",str,a)
+ end
+ else
+ if type(resolve)=="function" then
+ a=resolve(str) or entities[str]
+ else
+ a=entities[str]
+ end
+ if a then
+ if type(a)=="function" then
+ if trace_entities then
+ report_xml("expanding entity &%s; to function call",str)
+ end
+ a=a(str) or ""
+ end
+ a=lpegmatch(parsedentity,a) or a
+ if trace_entities then
+ report_xml("resolving entity &%s; to internal %a",str,a)
+ end
+ else
+ local unknown_any_entity=placeholders.unknown_any_entity
+ if unknown_any_entity then
+ a=unknown_any_entity(str) or ""
+ end
+ if a then
+ if trace_entities then
+ report_xml("resolving entity &%s; to external %s",str,a)
+ end
+ else
+ if trace_entities then
+ report_xml("keeping entity &%s;",str)
+ end
+ if str=="" then
+ a="&error;"
+ else
+ a="&"..str..";"
+ end
+ end
+ end
+ end
+ acache[str]=a
+ elseif trace_entities then
+ if not acache[str] then
+ report_xml("converting entity &%s; to %a",str,a)
+ acache[str]=a
+ end
+ end
+ return a
+ else
+ local a=acache[str]
+ if not a then
+ a=resolve_predefined and predefined_simplified[str]
+ if a then
+ acache[str]=a
+ if trace_entities then
+ report_xml("entity &%s; becomes %a",str,a)
+ end
+ elseif str=="" then
+ if trace_entities then
+ report_xml("invalid entity &%s;",str)
+ end
+ a="&error;"
+ acache[str]=a
+ else
+ if trace_entities then
+ report_xml("entity &%s; is made private",str)
+ end
+ a=unescaped(str)
+ acache[str]=a
+ end
+ end
+ return a
+ end
+end
+local function handle_end_entity(chr)
+ report_xml("error in entity, %a found instead of %a",chr,";")
+end
+local space=S(' \r\n\t')
+local open=P('<')
+local close=P('>')
+local squote=S("'")
+local dquote=S('"')
+local equal=P('=')
+local slash=P('/')
+local colon=P(':')
+local semicolon=P(';')
+local ampersand=P('&')
+local valid=R('az','AZ','09')+S('_-.')
+local name_yes=C(valid^1)*colon*C(valid^1)
+local name_nop=C(P(true))*C(valid^1)
+local name=name_yes+name_nop
+local utfbom=lpeg.patterns.utfbom
+local spacing=C(space^0)
+local anyentitycontent=(1-open-semicolon-space-close)^0
+local hexentitycontent=R("AF","af","09")^0
+local decentitycontent=R("09")^0
+local parsedentity=P("#")/""*(
+ P("x")/""*(hexentitycontent/handle_hex_entity)+(decentitycontent/handle_dec_entity)
+ )+(anyentitycontent/handle_any_entity)
+local entity=ampersand/""*parsedentity*((semicolon/"")+#(P(1)/handle_end_entity))
+local text_unparsed=C((1-open)^1)
+local text_parsed=Cs(((1-open-ampersand)^1+entity)^1)
+local somespace=space^1
+local optionalspace=space^0
+local value=(squote*Cs((entity+(1-squote))^0)*squote)+(dquote*Cs((entity+(1-dquote))^0)*dquote)
+local endofattributes=slash*close+close
+local whatever=space*name*optionalspace*equal
+local wrongvalue=Cs(P(entity+(1-space-endofattributes))^1)/attribute_value_error
+local attributevalue=value+wrongvalue
+local attribute=(somespace*name*optionalspace*equal*optionalspace*attributevalue)/add_attribute
+local attributes=(attribute+somespace^-1*(((1-endofattributes)^1)/attribute_specification_error))^0
+local parsedtext=text_parsed/add_text
+local unparsedtext=text_unparsed/add_text
+local balanced=P { "["*((1-S"[]")+V(1))^0*"]" }
+local emptyelement=(spacing*open*name*attributes*optionalspace*slash*close)/add_empty
+local beginelement=(spacing*open*name*attributes*optionalspace*close)/add_begin
+local endelement=(spacing*open*slash*name*optionalspace*close)/add_end
+local begincomment=open*P("!--")
+local endcomment=P("--")*close
+local begininstruction=open*P("?")
+local endinstruction=P("?")*close
+local begincdata=open*P("![CDATA[")
+local endcdata=P("]]")*close
+local someinstruction=C((1-endinstruction)^0)
+local somecomment=C((1-endcomment )^0)
+local somecdata=C((1-endcdata )^0)
+local function normalentity(k,v ) entities[k]=v end
+local function systementity(k,v,n) entities[k]=v end
+local function publicentity(k,v,n) entities[k]=v end
+local begindoctype=open*P("!DOCTYPE")
+local enddoctype=close
+local beginset=P("[")
+local endset=P("]")
+local doctypename=C((1-somespace-close)^0)
+local elementdoctype=optionalspace*P("<!ELEMENT")*(1-close)^0*close
+local basiccomment=begincomment*((1-endcomment)^0)*endcomment
+local normalentitytype=(doctypename*somespace*value)/normalentity
+local publicentitytype=(doctypename*somespace*P("PUBLIC")*somespace*value)/publicentity
+local systementitytype=(doctypename*somespace*P("SYSTEM")*somespace*value*somespace*P("NDATA")*somespace*doctypename)/systementity
+local entitydoctype=optionalspace*P("<!ENTITY")*somespace*(systementitytype+publicentitytype+normalentitytype)*optionalspace*close
+local doctypeset=beginset*optionalspace*P(elementdoctype+entitydoctype+basiccomment+space)^0*optionalspace*endset
+local definitiondoctype=doctypename*somespace*doctypeset
+local publicdoctype=doctypename*somespace*P("PUBLIC")*somespace*value*somespace*value*somespace*doctypeset
+local systemdoctype=doctypename*somespace*P("SYSTEM")*somespace*value*somespace*doctypeset
+local simpledoctype=(1-close)^1
+local somedoctype=C((somespace*(publicdoctype+systemdoctype+definitiondoctype+simpledoctype)*optionalspace)^0)
+local somedoctype=C((somespace*(publicdoctype+systemdoctype+definitiondoctype+simpledoctype)*optionalspace)^0)
+local instruction=(spacing*begininstruction*someinstruction*endinstruction)/function(...) add_special("@pi@",...) end
+local comment=(spacing*begincomment*somecomment*endcomment )/function(...) add_special("@cm@",...) end
+local cdata=(spacing*begincdata*somecdata*endcdata )/function(...) add_special("@cd@",...) end
+local doctype=(spacing*begindoctype*somedoctype*enddoctype )/function(...) add_special("@dt@",...) end
+local trailer=space^0*(text_unparsed/set_message)^0
+local grammar_parsed_text=P { "preamble",
+ preamble=utfbom^0*instruction^0*(doctype+comment+instruction)^0*V("parent")*trailer,
+ parent=beginelement*V("children")^0*endelement,
+ children=parsedtext+V("parent")+emptyelement+comment+cdata+instruction,
+}
+local grammar_unparsed_text=P { "preamble",
+ preamble=utfbom^0*instruction^0*(doctype+comment+instruction)^0*V("parent")*trailer,
+ parent=beginelement*V("children")^0*endelement,
+ children=unparsedtext+V("parent")+emptyelement+comment+cdata+instruction,
+}
+local function _xmlconvert_(data,settings)
+ settings=settings or {}
+ strip=settings.strip_cm_and_dt
+ utfize=settings.utfize_entities
+ resolve=settings.resolve_entities
+ resolve_predefined=settings.resolve_predefined_entities
+ unify_predefined=settings.unify_predefined_entities
+ cleanup=settings.text_cleanup
+ entities=settings.entities or {}
+ if utfize==nil then
+ settings.utfize_entities=true
+ utfize=true
+ end
+ if resolve_predefined==nil then
+ settings.resolve_predefined_entities=true
+ resolve_predefined=true
+ end
+ stack,top,at,xmlns,errorstr={},{},{},{},nil
+ acache,hcache,dcache={},{},{}
+ reported_attribute_errors={}
+ if settings.parent_root then
+ mt=getmetatable(settings.parent_root)
+ else
+ initialize_mt(top)
+ end
+ stack[#stack+1]=top
+ top.dt={}
+ dt=top.dt
+ if not data or data=="" then
+ errorstr="empty xml file"
+ elseif utfize or resolve then
+ if lpegmatch(grammar_parsed_text,data) then
+ errorstr=""
+ else
+ errorstr="invalid xml file - parsed text"
+ end
+ elseif type(data)=="string" then
+ if lpegmatch(grammar_unparsed_text,data) then
+ errorstr=""
+ else
+ errorstr="invalid xml file - unparsed text"
+ end
+ else
+ errorstr="invalid xml file - no text at all"
+ end
+ local result
+ if errorstr and errorstr~="" then
+ result={ dt={ { ns="",tg="error",dt={ errorstr },at={},er=true } } }
+ setmetatable(stack,mt)
+ local errorhandler=settings.error_handler
+ if errorhandler==false then
+ else
+ errorhandler=errorhandler or xml.errorhandler
+ if errorhandler then
+ local currentresource=settings.currentresource
+ if currentresource and currentresource~="" then
+ xml.errorhandler(formatters["load error in [%s]: %s"](currentresource,errorstr))
+ else
+ xml.errorhandler(formatters["load error: %s"](errorstr))
+ end
+ end
+ end
+ else
+ result=stack[1]
+ end
+ if not settings.no_root then
+ result={ special=true,ns="",tg='@rt@',dt=result.dt,at={},entities=entities,settings=settings }
+ setmetatable(result,mt)
+ local rdt=result.dt
+ for k=1,#rdt do
+ local v=rdt[k]
+ if type(v)=="table" and not v.special then
+ result.ri=k
+ v.__p__=result
+ break
+ end
+ end
+ end
+ if errorstr and errorstr~="" then
+ result.error=true
+ end
+ result.statistics={
+ entities={
+ decimals=dcache,
+ hexadecimals=hcache,
+ names=acache,
+ }
+ }
+ strip,utfize,resolve,resolve_predefined=nil,nil,nil,nil
+ unify_predefined,cleanup,entities=nil,nil,nil
+ stack,top,at,xmlns,errorstr=nil,nil,nil,nil,nil
+ acache,hcache,dcache=nil,nil,nil
+ reported_attribute_errors,mt,errorhandler=nil,nil,nil
+ return result
+end
+function xmlconvert(data,settings)
+ local ok,result=pcall(function() return _xmlconvert_(data,settings) end)
+ if ok then
+ return result
+ else
+ return _xmlconvert_("",settings)
+ end
+end
+xml.convert=xmlconvert
+function xml.inheritedconvert(data,xmldata)
+ local settings=xmldata.settings
+ if settings then
+ settings.parent_root=xmldata
+ end
+ local xc=xmlconvert(data,settings)
+ return xc
+end
+function xml.is_valid(root)
+ return root and root.dt and root.dt[1] and type(root.dt[1])=="table" and not root.dt[1].er
+end
+function xml.package(tag,attributes,data)
+ local ns,tg=match(tag,"^(.-):?([^:]+)$")
+ local t={ ns=ns,tg=tg,dt=data or "",at=attributes or {} }
+ setmetatable(t,mt)
+ return t
+end
+function xml.is_valid(root)
+ return root and not root.error
+end
+xml.errorhandler=report_xml
+function xml.load(filename,settings)
+ local data=""
+ if type(filename)=="string" then
+ local f=io.open(filename,'r')
+ if f then
+ data=f:read("*all")
+ f:close()
+ end
+ elseif filename then
+ data=filename:read("*all")
+ end
+ if settings then
+ settings.currentresource=filename
+ local result=xmlconvert(data,settings)
+ settings.currentresource=nil
+ return result
+ else
+ return xmlconvert(data,{ currentresource=filename })
+ end
+end
+local no_root={ no_root=true }
+function xml.toxml(data)
+ if type(data)=="string" then
+ local root={ xmlconvert(data,no_root) }
+ return (#root>1 and root) or root[1]
+ else
+ return data
+ end
+end
+local function copy(old,tables)
+ if old then
+ tables=tables or {}
+ local new={}
+ if not tables[old] then
+ tables[old]=new
+ end
+ for k,v in next,old do
+ new[k]=(type(v)=="table" and (tables[v] or copy(v,tables))) or v
+ end
+ local mt=getmetatable(old)
+ if mt then
+ setmetatable(new,mt)
+ end
+ return new
+ else
+ return {}
+ end
+end
+xml.copy=copy
+function xml.checkbom(root)
+ if root.ri then
+ local dt=root.dt
+ for k=1,#dt do
+ local v=dt[k]
+ if type(v)=="table" and v.special and v.tg=="@pi@" and find(v.dt[1],"xml.*version=") then
+ return
+ end
+ end
+ insert(dt,1,{ special=true,ns="",tg="@pi@",dt={ "xml version='1.0' standalone='yes'" } } )
+ insert(dt,2,"\n" )
+ end
+end
+local function verbose_element(e,handlers)
+ local handle=handlers.handle
+ local serialize=handlers.serialize
+ local ens,etg,eat,edt,ern=e.ns,e.tg,e.at,e.dt,e.rn
+ local ats=eat and next(eat) and {}
+ if ats then
+ for k,v in next,eat do
+ ats[#ats+1]=formatters['%s=%q'](k,escaped(v))
+ end
+ end
+ if ern and trace_entities and ern~=ens then
+ ens=ern
+ end
+ if ens~="" then
+ if edt and #edt>0 then
+ if ats then
+ handle("<",ens,":",etg," ",concat(ats," "),">")
+ else
+ handle("<",ens,":",etg,">")
+ end
+ for i=1,#edt do
+ local e=edt[i]
+ if type(e)=="string" then
+ handle(escaped(e))
+ else
+ serialize(e,handlers)
+ end
+ end
+ handle("</",ens,":",etg,">")
+ else
+ if ats then
+ handle("<",ens,":",etg," ",concat(ats," "),"/>")
+ else
+ handle("<",ens,":",etg,"/>")
+ end
+ end
+ else
+ if edt and #edt>0 then
+ if ats then
+ handle("<",etg," ",concat(ats," "),">")
+ else
+ handle("<",etg,">")
+ end
+ for i=1,#edt do
+ local e=edt[i]
+ if type(e)=="string" then
+ handle(escaped(e))
+ else
+ serialize(e,handlers)
+ end
+ end
+ handle("</",etg,">")
+ else
+ if ats then
+ handle("<",etg," ",concat(ats," "),"/>")
+ else
+ handle("<",etg,"/>")
+ end
+ end
+ end
+end
+local function verbose_pi(e,handlers)
+ handlers.handle("<?",e.dt[1],"?>")
+end
+local function verbose_comment(e,handlers)
+ handlers.handle("<!--",e.dt[1],"-->")
+end
+local function verbose_cdata(e,handlers)
+ handlers.handle("<![CDATA[",e.dt[1],"]]>")
+end
+local function verbose_doctype(e,handlers)
+ handlers.handle("<!DOCTYPE ",e.dt[1],">")
+end
+local function verbose_root(e,handlers)
+ handlers.serialize(e.dt,handlers)
+end
+local function verbose_text(e,handlers)
+ handlers.handle(escaped(e))
+end
+local function verbose_document(e,handlers)
+ local serialize=handlers.serialize
+ local functions=handlers.functions
+ for i=1,#e do
+ local ei=e[i]
+ if type(ei)=="string" then
+ functions["@tx@"](ei,handlers)
+ else
+ serialize(ei,handlers)
+ end
+ end
+end
+local function serialize(e,handlers,...)
+ local initialize=handlers.initialize
+ local finalize=handlers.finalize
+ local functions=handlers.functions
+ if initialize then
+ local state=initialize(...)
+ if not state==true then
+ return state
+ end
+ end
+ local etg=e.tg
+ if etg then
+ (functions[etg] or functions["@el@"])(e,handlers)
+ else
+ functions["@dc@"](e,handlers)
+ end
+ if finalize then
+ return finalize()
+ end
+end
+local function xserialize(e,handlers)
+ local functions=handlers.functions
+ local etg=e.tg
+ if etg then
+ (functions[etg] or functions["@el@"])(e,handlers)
+ else
+ functions["@dc@"](e,handlers)
+ end
+end
+local handlers={}
+local function newhandlers(settings)
+ local t=table.copy(handlers[settings and settings.parent or "verbose"] or {})
+ if settings then
+ for k,v in next,settings do
+ if type(v)=="table" then
+ local tk=t[k] if not tk then tk={} t[k]=tk end
+ for kk,vv in next,v do
+ tk[kk]=vv
+ end
+ else
+ t[k]=v
+ end
+ end
+ if settings.name then
+ handlers[settings.name]=t
+ end
+ end
+ utilities.storage.mark(t)
+ return t
+end
+local nofunction=function() end
+function xml.sethandlersfunction(handler,name,fnc)
+ handler.functions[name]=fnc or nofunction
+end
+function xml.gethandlersfunction(handler,name)
+ return handler.functions[name]
+end
+function xml.gethandlers(name)
+ return handlers[name]
+end
+newhandlers {
+ name="verbose",
+ initialize=false,
+ finalize=false,
+ serialize=xserialize,
+ handle=print,
+ functions={
+ ["@dc@"]=verbose_document,
+ ["@dt@"]=verbose_doctype,
+ ["@rt@"]=verbose_root,
+ ["@el@"]=verbose_element,
+ ["@pi@"]=verbose_pi,
+ ["@cm@"]=verbose_comment,
+ ["@cd@"]=verbose_cdata,
+ ["@tx@"]=verbose_text,
+ }
+}
+local result
+local xmlfilehandler=newhandlers {
+ name="file",
+ initialize=function(name)
+ result=io.open(name,"wb")
+ return result
+ end,
+ finalize=function()
+ result:close()
+ return true
+ end,
+ handle=function(...)
+ result:write(...)
+ end,
+}
+function xml.save(root,name)
+ serialize(root,xmlfilehandler,name)
+end
+local result
+local xmlstringhandler=newhandlers {
+ name="string",
+ initialize=function()
+ result={}
+ return result
+ end,
+ finalize=function()
+ return concat(result)
+ end,
+ handle=function(...)
+ result[#result+1]=concat {... }
+ end,
+}
+local function xmltostring(root)
+ if not root then
+ return ""
+ elseif type(root)=="string" then
+ return root
+ else
+ return serialize(root,xmlstringhandler) or ""
+ end
+end
+local function __tostring(root)
+ return (root and xmltostring(root)) or ""
+end
+initialize_mt=function(root)
+ mt={ __tostring=__tostring,__index=root }
+end
+xml.defaulthandlers=handlers
+xml.newhandlers=newhandlers
+xml.serialize=serialize
+xml.tostring=xmltostring
+local function xmlstring(e,handle)
+ if not handle or (e.special and e.tg~="@rt@") then
+ elseif e.tg then
+ local edt=e.dt
+ if edt then
+ for i=1,#edt do
+ xmlstring(edt[i],handle)
+ end
+ end
+ else
+ handle(e)
+ end
+end
+xml.string=xmlstring
+function xml.settings(e)
+ while e do
+ local s=e.settings
+ if s then
+ return s
+ else
+ e=e.__p__
+ end
+ end
+ return nil
+end
+function xml.root(e)
+ local r=e
+ while e do
+ e=e.__p__
+ if e then
+ r=e
+ end
+ end
+ return r
+end
+function xml.parent(root)
+ return root.__p__
+end
+function xml.body(root)
+ return root.ri and root.dt[root.ri] or root
+end
+function xml.name(root)
+ if not root then
+ return ""
+ end
+ local ns=root.ns
+ local tg=root.tg
+ if ns=="" then
+ return tg
+ else
+ return ns..":"..tg
+ end
+end
+function xml.erase(dt,k)
+ if dt then
+ if k then
+ dt[k]=""
+ else for k=1,#dt do
+ dt[1]={ "" }
+ end end
+ end
+end
+function xml.assign(dt,k,root)
+ if dt and k then
+ dt[k]=type(root)=="table" and xml.body(root) or root
+ return dt[k]
+ else
+ return xml.body(root)
+ end
+end
+function xml.tocdata(e,wrapper)
+ local whatever=type(e)=="table" and xmltostring(e.dt) or e or ""
+ if wrapper then
+ whatever=formatters["<%s>%s</%s>"](wrapper,whatever,wrapper)
+ end
+ local t={ special=true,ns="",tg="@cd@",at={},rn="",dt={ whatever },__p__=e }
+ setmetatable(t,getmetatable(e))
+ e.dt={ t }
+end
+function xml.makestandalone(root)
+ if root.ri then
+ local dt=root.dt
+ for k=1,#dt do
+ local v=dt[k]
+ if type(v)=="table" and v.special and v.tg=="@pi@" then
+ local txt=v.dt[1]
+ if find(txt,"xml.*version=") then
+ v.dt[1]=txt.." standalone='yes'"
+ break
+ end
+ end
+ end
+ end
+ return root
+end
+function xml.kind(e)
+ local dt=e and e.dt
+ if dt then
+ local n=#dt
+ if n==1 then
+ local d=dt[1]
+ if d.special then
+ local tg=d.tg
+ if tg=="@cd@" then
+ return "cdata"
+ elseif tg=="@cm" then
+ return "comment"
+ elseif tg=="@pi@" then
+ return "instruction"
+ elseif tg=="@dt@" then
+ return "declaration"
+ end
+ elseif type(d)=="string" then
+ return "text"
+ end
+ return "element"
+ elseif n>0 then
+ return "mixed"
+ end
+ end
+ return "empty"
+end
+
+
+end -- of closure
+
+do -- create closure to overcome 200 locals limit
+
+package.loaded["lxml-lpt"] = package.loaded["lxml-lpt"] or true
+
+-- original size: 48956, stripped down to: 30516
+
+if not modules then modules={} end modules ['lxml-lpt']={
+ version=1.001,
+ comment="this module is the basis for the lxml-* ones",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+local concat,remove,insert=table.concat,table.remove,table.insert
+local type,next,tonumber,tostring,setmetatable,load,select=type,next,tonumber,tostring,setmetatable,load,select
+local format,upper,lower,gmatch,gsub,find,rep=string.format,string.upper,string.lower,string.gmatch,string.gsub,string.find,string.rep
+local lpegmatch,lpegpatterns=lpeg.match,lpeg.patterns
+local setmetatableindex=table.setmetatableindex
+local formatters=string.formatters
+local trace_lpath=false if trackers then trackers.register("xml.path",function(v) trace_lpath=v end) end
+local trace_lparse=false if trackers then trackers.register("xml.parse",function(v) trace_lparse=v end) end
+local trace_lprofile=false if trackers then trackers.register("xml.profile",function(v) trace_lpath=v trace_lparse=v trace_lprofile=v end) end
+local report_lpath=logs.reporter("xml","lpath")
+local xml=xml
+local lpathcalls=0 function xml.lpathcalls () return lpathcalls end
+local lpathcached=0 function xml.lpathcached() return lpathcached end
+xml.functions=xml.functions or {}
+local functions=xml.functions
+xml.expressions=xml.expressions or {}
+local expressions=xml.expressions
+xml.finalizers=xml.finalizers or {}
+local finalizers=xml.finalizers
+xml.specialhandler=xml.specialhandler or {}
+local specialhandler=xml.specialhandler
+lpegpatterns.xml=lpegpatterns.xml or {}
+local xmlpatterns=lpegpatterns.xml
+finalizers.xml=finalizers.xml or {}
+finalizers.tex=finalizers.tex or {}
+local function fallback (t,name)
+ local fn=finalizers[name]
+ if fn then
+ t[name]=fn
+ else
+ report_lpath("unknown sub finalizer %a",name)
+ fn=function() end
+ end
+ return fn
+end
+setmetatableindex(finalizers.xml,fallback)
+setmetatableindex(finalizers.tex,fallback)
+xml.defaultprotocol="xml"
+local apply_axis={}
+apply_axis['root']=function(list)
+ local collected={}
+ for l=1,#list do
+ local ll=list[l]
+ local rt=ll
+ while ll do
+ ll=ll.__p__
+ if ll then
+ rt=ll
+ end
+ end
+ collected[l]=rt
+ end
+ return collected
+end
+apply_axis['self']=function(list)
+ return list
+end
+apply_axis['child']=function(list)
+ local collected,c={},0
+ for l=1,#list do
+ local ll=list[l]
+ local dt=ll.dt
+ if dt then
+ local en=0
+ for k=1,#dt do
+ local dk=dt[k]
+ if dk.tg then
+ c=c+1
+ collected[c]=dk
+ dk.ni=k
+ en=en+1
+ dk.ei=en
+ end
+ end
+ ll.en=en
+ end
+ end
+ return collected
+end
+local function collect(list,collected,c)
+ local dt=list.dt
+ if dt then
+ local en=0
+ for k=1,#dt do
+ local dk=dt[k]
+ if dk.tg then
+ c=c+1
+ collected[c]=dk
+ dk.ni=k
+ en=en+1
+ dk.ei=en
+ c=collect(dk,collected,c)
+ end
+ end
+ list.en=en
+ end
+ return c
+end
+apply_axis['descendant']=function(list)
+ local collected,c={},0
+ for l=1,#list do
+ c=collect(list[l],collected,c)
+ end
+ return collected
+end
+local function collect(list,collected,c)
+ local dt=list.dt
+ if dt then
+ local en=0
+ for k=1,#dt do
+ local dk=dt[k]
+ if dk.tg then
+ c=c+1
+ collected[c]=dk
+ dk.ni=k
+ en=en+1
+ dk.ei=en
+ c=collect(dk,collected,c)
+ end
+ end
+ list.en=en
+ end
+ return c
+end
+apply_axis['descendant-or-self']=function(list)
+ local collected,c={},0
+ for l=1,#list do
+ local ll=list[l]
+ if ll.special~=true then
+ c=c+1
+ collected[c]=ll
+ end
+ c=collect(ll,collected,c)
+ end
+ return collected
+end
+apply_axis['ancestor']=function(list)
+ local collected,c={},0
+ for l=1,#list do
+ local ll=list[l]
+ while ll do
+ ll=ll.__p__
+ if ll then
+ c=c+1
+ collected[c]=ll
+ end
+ end
+ end
+ return collected
+end
+apply_axis['ancestor-or-self']=function(list)
+ local collected,c={},0
+ for l=1,#list do
+ local ll=list[l]
+ c=c+1
+ collected[c]=ll
+ while ll do
+ ll=ll.__p__
+ if ll then
+ c=c+1
+ collected[c]=ll
+ end
+ end
+ end
+ return collected
+end
+apply_axis['parent']=function(list)
+ local collected,c={},0
+ for l=1,#list do
+ local pl=list[l].__p__
+ if pl then
+ c=c+1
+ collected[c]=pl
+ end
+ end
+ return collected
+end
+apply_axis['attribute']=function(list)
+ return {}
+end
+apply_axis['namespace']=function(list)
+ return {}
+end
+apply_axis['following']=function(list)
+ return {}
+end
+apply_axis['preceding']=function(list)
+ return {}
+end
+apply_axis['following-sibling']=function(list)
+ local collected,c={},0
+ for l=1,#list do
+ local ll=list[l]
+ local p=ll.__p__
+ local d=p.dt
+ for i=ll.ni+1,#d do
+ local di=d[i]
+ if type(di)=="table" then
+ c=c+1
+ collected[c]=di
+ end
+ end
+ end
+ return collected
+end
+apply_axis['preceding-sibling']=function(list)
+ local collected,c={},0
+ for l=1,#list do
+ local ll=list[l]
+ local p=ll.__p__
+ local d=p.dt
+ for i=1,ll.ni-1 do
+ local di=d[i]
+ if type(di)=="table" then
+ c=c+1
+ collected[c]=di
+ end
+ end
+ end
+ return collected
+end
+apply_axis['reverse-sibling']=function(list)
+ local collected,c={},0
+ for l=1,#list do
+ local ll=list[l]
+ local p=ll.__p__
+ local d=p.dt
+ for i=ll.ni-1,1,-1 do
+ local di=d[i]
+ if type(di)=="table" then
+ c=c+1
+ collected[c]=di
+ end
+ end
+ end
+ return collected
+end
+apply_axis['auto-descendant-or-self']=apply_axis['descendant-or-self']
+apply_axis['auto-descendant']=apply_axis['descendant']
+apply_axis['auto-child']=apply_axis['child']
+apply_axis['auto-self']=apply_axis['self']
+apply_axis['initial-child']=apply_axis['child']
+local function apply_nodes(list,directive,nodes)
+ local maxn=#nodes
+ if maxn==3 then
+ local nns,ntg=nodes[2],nodes[3]
+ if not nns and not ntg then
+ if directive then
+ return list
+ else
+ return {}
+ end
+ else
+ local collected,c,m,p={},0,0,nil
+ if not nns then
+ for l=1,#list do
+ local ll=list[l]
+ local ltg=ll.tg
+ if ltg then
+ if directive then
+ if ntg==ltg then
+ local llp=ll.__p__;if llp~=p then p,m=llp,1 else m=m+1 end
+ c=c+1
+ collected[c],ll.mi=ll,m
+ end
+ elseif ntg~=ltg then
+ local llp=ll.__p__;if llp~=p then p,m=llp,1 else m=m+1 end
+ c=c+1
+ collected[c],ll.mi=ll,m
+ end
+ end
+ end
+ elseif not ntg then
+ for l=1,#list do
+ local ll=list[l]
+ local lns=ll.rn or ll.ns
+ if lns then
+ if directive then
+ if lns==nns then
+ local llp=ll.__p__;if llp~=p then p,m=llp,1 else m=m+1 end
+ c=c+1
+ collected[c],ll.mi=ll,m
+ end
+ elseif lns~=nns then
+ local llp=ll.__p__;if llp~=p then p,m=llp,1 else m=m+1 end
+ c=c+1
+ collected[c],ll.mi=ll,m
+ end
+ end
+ end
+ else
+ for l=1,#list do
+ local ll=list[l]
+ local ltg=ll.tg
+ if ltg then
+ local lns=ll.rn or ll.ns
+ local ok=ltg==ntg and lns==nns
+ if directive then
+ if ok then
+ local llp=ll.__p__;if llp~=p then p,m=llp,1 else m=m+1 end
+ c=c+1
+ collected[c],ll.mi=ll,m
+ end
+ elseif not ok then
+ local llp=ll.__p__;if llp~=p then p,m=llp,1 else m=m+1 end
+ c=c+1
+ collected[c],ll.mi=ll,m
+ end
+ end
+ end
+ end
+ return collected
+ end
+ else
+ local collected,c,m,p={},0,0,nil
+ for l=1,#list do
+ local ll=list[l]
+ local ltg=ll.tg
+ if ltg then
+ local lns=ll.rn or ll.ns
+ local ok=false
+ for n=1,maxn,3 do
+ local nns,ntg=nodes[n+1],nodes[n+2]
+ ok=(not ntg or ltg==ntg) and (not nns or lns==nns)
+ if ok then
+ break
+ end
+ end
+ if directive then
+ if ok then
+ local llp=ll.__p__;if llp~=p then p,m=llp,1 else m=m+1 end
+ c=c+1
+ collected[c],ll.mi=ll,m
+ end
+ elseif not ok then
+ local llp=ll.__p__;if llp~=p then p,m=llp,1 else m=m+1 end
+ c=c+1
+ collected[c],ll.mi=ll,m
+ end
+ end
+ end
+ return collected
+ end
+end
+local quit_expression=false
+local function apply_expression(list,expression,order)
+ local collected,c={},0
+ quit_expression=false
+ for l=1,#list do
+ local ll=list[l]
+ if expression(list,ll,l,order) then
+ c=c+1
+ collected[c]=ll
+ end
+ if quit_expression then
+ break
+ end
+ end
+ return collected
+end
+local P,V,C,Cs,Cc,Ct,R,S,Cg,Cb=lpeg.P,lpeg.V,lpeg.C,lpeg.Cs,lpeg.Cc,lpeg.Ct,lpeg.R,lpeg.S,lpeg.Cg,lpeg.Cb
+local spaces=S(" \n\r\t\f")^0
+local lp_space=S(" \n\r\t\f")
+local lp_any=P(1)
+local lp_noequal=P("!=")/"~="+P("<=")+P(">=")+P("==")
+local lp_doequal=P("=")/"=="
+local lp_or=P("|")/" or "
+local lp_and=P("&")/" and "
+local lp_builtin=P (
+ P("text")/"(ll.dt[1] or '')"+
+ P("content")/"ll.dt"+
+ P("name")/"((ll.ns~='' and ll.ns..':'..ll.tg) or ll.tg)"+P("tag")/"ll.tg"+P("position")/"l"+
+ P("firstindex")/"1"+P("lastindex")/"(#ll.__p__.dt or 1)"+P("firstelement")/"1"+P("lastelement")/"(ll.__p__.en or 1)"+P("first")/"1"+P("last")/"#list"+P("rootposition")/"order"+P("order")/"order"+P("element")/"(ll.ei or 1)"+P("index")/"(ll.ni or 1)"+P("match")/"(ll.mi or 1)"+
+ P("ns")/"ll.ns"
+ )*((spaces*P("(")*spaces*P(")"))/"")
+local lp_attribute=(P("@")+P("attribute::"))/""*Cc("(ll.at and ll.at['")*((R("az","AZ")+S("-_:"))^1)*Cc("'])")
+lp_fastpos_p=P("+")^0*R("09")^1*P(-1)/"l==%0"
+lp_fastpos_n=P("-")*R("09")^1*P(-1)/"(%0<0 and (#list+%0==l))"
+local lp_fastpos=lp_fastpos_n+lp_fastpos_p
+local lp_reserved=C("and")+C("or")+C("not")+C("div")+C("mod")+C("true")+C("false")
+local lp_lua_function=Cs((R("az","AZ","__")^1*(P(".")*R("az","AZ","__")^1)^1)*("("))/"%0"
+local lp_function=C(R("az","AZ","__")^1)*P("(")/function(t)
+ if expressions[t] then
+ return "expr."..t.."("
+ else
+ return "expr.error("
+ end
+end
+local lparent=P("(")
+local rparent=P(")")
+local noparent=1-(lparent+rparent)
+local nested=P{lparent*(noparent+V(1))^0*rparent}
+local value=P(lparent*C((noparent+nested)^0)*rparent)
+local lp_child=Cc("expr.child(ll,'")*R("az","AZ","--","__")^1*Cc("')")
+local lp_number=S("+-")*R("09")^1
+local lp_string=Cc("'")*R("az","AZ","--","__")^1*Cc("'")
+local lp_content=(P("'")*(1-P("'"))^0*P("'")+P('"')*(1-P('"'))^0*P('"'))
+local cleaner
+local lp_special=(C(P("name")+P("text")+P("tag")+P("count")+P("child")))*value/function(t,s)
+ if expressions[t] then
+ s=s and s~="" and lpegmatch(cleaner,s)
+ if s and s~="" then
+ return "expr."..t.."(ll,"..s..")"
+ else
+ return "expr."..t.."(ll)"
+ end
+ else
+ return "expr.error("..t..")"
+ end
+end
+local content=lp_builtin+lp_attribute+lp_special+lp_noequal+lp_doequal+lp_or+lp_and+lp_reserved+lp_lua_function+lp_function+lp_content+
+ lp_child+lp_any
+local converter=Cs (
+ lp_fastpos+(P { lparent*(V(1))^0*rparent+content } )^0
+)
+cleaner=Cs ((
+ lp_reserved+lp_number+lp_string+1 )^1 )
+local template_e=[[
+ local expr = xml.expressions
+ return function(list,ll,l,order)
+ return %s
+ end
+]]
+local template_f_y=[[
+ local finalizer = xml.finalizers['%s']['%s']
+ return function(collection)
+ return finalizer(collection,%s)
+ end
+]]
+local template_f_n=[[
+ return xml.finalizers['%s']['%s']
+]]
+local register_self={ kind="axis",axis="self" }
+local register_parent={ kind="axis",axis="parent" }
+local register_descendant={ kind="axis",axis="descendant" }
+local register_child={ kind="axis",axis="child" }
+local register_descendant_or_self={ kind="axis",axis="descendant-or-self" }
+local register_root={ kind="axis",axis="root" }
+local register_ancestor={ kind="axis",axis="ancestor" }
+local register_ancestor_or_self={ kind="axis",axis="ancestor-or-self" }
+local register_attribute={ kind="axis",axis="attribute" }
+local register_namespace={ kind="axis",axis="namespace" }
+local register_following={ kind="axis",axis="following" }
+local register_following_sibling={ kind="axis",axis="following-sibling" }
+local register_preceding={ kind="axis",axis="preceding" }
+local register_preceding_sibling={ kind="axis",axis="preceding-sibling" }
+local register_reverse_sibling={ kind="axis",axis="reverse-sibling" }
+local register_auto_descendant_or_self={ kind="axis",axis="auto-descendant-or-self" }
+local register_auto_descendant={ kind="axis",axis="auto-descendant" }
+local register_auto_self={ kind="axis",axis="auto-self" }
+local register_auto_child={ kind="axis",axis="auto-child" }
+local register_initial_child={ kind="axis",axis="initial-child" }
+local register_all_nodes={ kind="nodes",nodetest=true,nodes={ true,false,false } }
+local skip={}
+local function errorrunner_e(str,cnv)
+ if not skip[str] then
+ report_lpath("error in expression: %s => %s",str,cnv)
+ skip[str]=cnv or str
+ end
+ return false
+end
+local function errorrunner_f(str,arg)
+ report_lpath("error in finalizer: %s(%s)",str,arg or "")
+ return false
+end
+local function register_nodes(nodetest,nodes)
+ return { kind="nodes",nodetest=nodetest,nodes=nodes }
+end
+local function register_expression(expression)
+ local converted=lpegmatch(converter,expression)
+ local runner=load(format(template_e,converted))
+ runner=(runner and runner()) or function() errorrunner_e(expression,converted) end
+ return { kind="expression",expression=expression,converted=converted,evaluator=runner }
+end
+local function register_finalizer(protocol,name,arguments)
+ local runner
+ if arguments and arguments~="" then
+ runner=load(format(template_f_y,protocol or xml.defaultprotocol,name,arguments))
+ else
+ runner=load(format(template_f_n,protocol or xml.defaultprotocol,name))
+ end
+ runner=(runner and runner()) or function() errorrunner_f(name,arguments) end
+ return { kind="finalizer",name=name,arguments=arguments,finalizer=runner }
+end
+local expression=P { "ex",
+ ex="["*C((V("sq")+V("dq")+(1-S("[]"))+V("ex"))^0)*"]",
+ sq="'"*(1-S("'"))^0*"'",
+ dq='"'*(1-S('"'))^0*'"',
+}
+local arguments=P { "ar",
+ ar="("*Cs((V("sq")+V("dq")+V("nq")+P(1-P(")")))^0)*")",
+ nq=((1-S("),'\""))^1)/function(s) return format("%q",s) end,
+ sq=P("'")*(1-P("'"))^0*P("'"),
+ dq=P('"')*(1-P('"'))^0*P('"'),
+}
+local function register_error(str)
+ return { kind="error",error=format("unparsed: %s",str) }
+end
+local special_1=P("*")*Cc(register_auto_descendant)*Cc(register_all_nodes)
+local special_2=P("/")*Cc(register_auto_self)
+local special_3=P("")*Cc(register_auto_self)
+local no_nextcolon=P(-1)+#(1-P(":"))
+local no_nextlparent=P(-1)+#(1-P("("))
+local pathparser=Ct { "patterns",
+ patterns=spaces*V("protocol")*spaces*(
+ (V("special")*spaces*P(-1) )+(V("initial")*spaces*V("step")*spaces*(P("/")*spaces*V("step")*spaces)^0 )
+ ),
+ protocol=Cg(V("letters"),"protocol")*P("://")+Cg(Cc(nil),"protocol"),
+ step=((V("shortcuts")+P("/")+V("axis"))*spaces*V("nodes")^0+V("error"))*spaces*V("expressions")^0*spaces*V("finalizer")^0,
+ axis=V("descendant")+V("child")+V("parent")+V("self")+V("root")+V("ancestor")+V("descendant_or_self")+V("following_sibling")+V("following")+V("reverse_sibling")+V("preceding_sibling")+V("preceding")+V("ancestor_or_self")+#(1-P(-1))*Cc(register_auto_child),
+ special=special_1+special_2+special_3,
+ initial=(P("/")*spaces*Cc(register_initial_child))^-1,
+ error=(P(1)^1)/register_error,
+ shortcuts_a=V("s_descendant_or_self")+V("s_descendant")+V("s_child")+V("s_parent")+V("s_self")+V("s_root")+V("s_ancestor"),
+ shortcuts=V("shortcuts_a")*(spaces*"/"*spaces*V("shortcuts_a"))^0,
+ s_descendant_or_self=(P("***/")+P("/"))*Cc(register_descendant_or_self),
+ s_descendant=P("**")*Cc(register_descendant),
+ s_child=P("*")*no_nextcolon*Cc(register_child ),
+ s_parent=P("..")*Cc(register_parent ),
+ s_self=P("." )*Cc(register_self ),
+ s_root=P("^^")*Cc(register_root ),
+ s_ancestor=P("^")*Cc(register_ancestor ),
+ descendant=P("descendant::")*Cc(register_descendant ),
+ child=P("child::")*Cc(register_child ),
+ parent=P("parent::")*Cc(register_parent ),
+ self=P("self::")*Cc(register_self ),
+ root=P('root::')*Cc(register_root ),
+ ancestor=P('ancestor::')*Cc(register_ancestor ),
+ descendant_or_self=P('descendant-or-self::')*Cc(register_descendant_or_self ),
+ ancestor_or_self=P('ancestor-or-self::')*Cc(register_ancestor_or_self ),
+ following=P('following::')*Cc(register_following ),
+ following_sibling=P('following-sibling::')*Cc(register_following_sibling ),
+ preceding=P('preceding::')*Cc(register_preceding ),
+ preceding_sibling=P('preceding-sibling::')*Cc(register_preceding_sibling ),
+ reverse_sibling=P('reverse-sibling::')*Cc(register_reverse_sibling ),
+ nodes=(V("nodefunction")*spaces*P("(")*V("nodeset")*P(")")+V("nodetest")*V("nodeset"))/register_nodes,
+ expressions=expression/register_expression,
+ letters=R("az")^1,
+ name=(1-S("/[]()|:*!"))^1,
+ negate=P("!")*Cc(false),
+ nodefunction=V("negate")+P("not")*Cc(false)+Cc(true),
+ nodetest=V("negate")+Cc(true),
+ nodename=(V("negate")+Cc(true))*spaces*((V("wildnodename")*P(":")*V("wildnodename"))+(Cc(false)*V("wildnodename"))),
+ wildnodename=(C(V("name"))+P("*")*Cc(false))*no_nextlparent,
+ nodeset=spaces*Ct(V("nodename")*(spaces*P("|")*spaces*V("nodename"))^0)*spaces,
+ finalizer=(Cb("protocol")*P("/")^-1*C(V("name"))*arguments*P(-1))/register_finalizer,
+}
+xmlpatterns.pathparser=pathparser
+local cache={}
+local function nodesettostring(set,nodetest)
+ local t={}
+ for i=1,#set,3 do
+ local directive,ns,tg=set[i],set[i+1],set[i+2]
+ if not ns or ns=="" then ns="*" end
+ if not tg or tg=="" then tg="*" end
+ tg=(tg=="@rt@" and "[root]") or format("%s:%s",ns,tg)
+ t[i]=(directive and tg) or format("not(%s)",tg)
+ end
+ if nodetest==false then
+ return format("not(%s)",concat(t,"|"))
+ else
+ return concat(t,"|")
+ end
+end
+local function tagstostring(list)
+ if #list==0 then
+ return "no elements"
+ else
+ local t={}
+ for i=1,#list do
+ local li=list[i]
+ local ns,tg=li.ns,li.tg
+ if not ns or ns=="" then ns="*" end
+ if not tg or tg=="" then tg="*" end
+ t[i]=(tg=="@rt@" and "[root]") or format("%s:%s",ns,tg)
+ end
+ return concat(t," ")
+ end
+end
+xml.nodesettostring=nodesettostring
+local lpath
+local lshowoptions={ functions=false }
+local function lshow(parsed)
+ if type(parsed)=="string" then
+ parsed=lpath(parsed)
+ end
+ report_lpath("%s://%s => %s",parsed.protocol or xml.defaultprotocol,parsed.pattern,
+ table.serialize(parsed,false,lshowoptions))
+end
+xml.lshow=lshow
+local function add_comment(p,str)
+ local pc=p.comment
+ if not pc then
+ p.comment={ str }
+ else
+ pc[#pc+1]=str
+ end
+end
+lpath=function (pattern)
+ lpathcalls=lpathcalls+1
+ if type(pattern)=="table" then
+ return pattern
+ else
+ local parsed=cache[pattern]
+ if parsed then
+ lpathcached=lpathcached+1
+ else
+ parsed=lpegmatch(pathparser,pattern)
+ if parsed then
+ parsed.pattern=pattern
+ local np=#parsed
+ if np==0 then
+ parsed={ pattern=pattern,register_self,state="parsing error" }
+ report_lpath("parsing error in pattern: %s",pattern)
+ lshow(parsed)
+ else
+ local pi=parsed[1]
+ if pi.axis=="auto-child" then
+ if false then
+ add_comment(parsed,"auto-child replaced by auto-descendant-or-self")
+ parsed[1]=register_auto_descendant_or_self
+ else
+ add_comment(parsed,"auto-child replaced by auto-descendant")
+ parsed[1]=register_auto_descendant
+ end
+ elseif pi.axis=="initial-child" and np>1 and parsed[2].axis then
+ add_comment(parsed,"initial-child removed")
+ remove(parsed,1)
+ end
+ local np=#parsed
+ if np>1 then
+ local pnp=parsed[np]
+ if pnp.kind=="nodes" and pnp.nodetest==true then
+ local nodes=pnp.nodes
+ if nodes[1]==true and nodes[2]==false and nodes[3]==false then
+ add_comment(parsed,"redundant final wildcard filter removed")
+ remove(parsed,np)
+ end
+ end
+ end
+ end
+ else
+ parsed={ pattern=pattern }
+ end
+ cache[pattern]=parsed
+ if trace_lparse and not trace_lprofile then
+ lshow(parsed)
+ end
+ end
+ return parsed
+ end
+end
+xml.lpath=lpath
+local profiled={} xml.profiled=profiled
+local function profiled_apply(list,parsed,nofparsed,order)
+ local p=profiled[parsed.pattern]
+ if p then
+ p.tested=p.tested+1
+ else
+ p={ tested=1,matched=0,finalized=0 }
+ profiled[parsed.pattern]=p
+ end
+ local collected=list
+ for i=1,nofparsed do
+ local pi=parsed[i]
+ local kind=pi.kind
+ if kind=="axis" then
+ collected=apply_axis[pi.axis](collected)
+ elseif kind=="nodes" then
+ collected=apply_nodes(collected,pi.nodetest,pi.nodes)
+ elseif kind=="expression" then
+ collected=apply_expression(collected,pi.evaluator,order)
+ elseif kind=="finalizer" then
+ collected=pi.finalizer(collected)
+ p.matched=p.matched+1
+ p.finalized=p.finalized+1
+ return collected
+ end
+ if not collected or #collected==0 then
+ local pn=i<nofparsed and parsed[nofparsed]
+ if pn and pn.kind=="finalizer" then
+ collected=pn.finalizer(collected)
+ p.finalized=p.finalized+1
+ return collected
+ end
+ return nil
+ end
+ end
+ if collected then
+ p.matched=p.matched+1
+ end
+ return collected
+end
+local function traced_apply(list,parsed,nofparsed,order)
+ if trace_lparse then
+ lshow(parsed)
+ end
+ report_lpath("collecting: %s",parsed.pattern)
+ report_lpath("root tags : %s",tagstostring(list))
+ report_lpath("order : %s",order or "unset")
+ local collected=list
+ for i=1,nofparsed do
+ local pi=parsed[i]
+ local kind=pi.kind
+ if kind=="axis" then
+ collected=apply_axis[pi.axis](collected)
+ report_lpath("% 10i : ax : %s",(collected and #collected) or 0,pi.axis)
+ elseif kind=="nodes" then
+ collected=apply_nodes(collected,pi.nodetest,pi.nodes)
+ report_lpath("% 10i : ns : %s",(collected and #collected) or 0,nodesettostring(pi.nodes,pi.nodetest))
+ elseif kind=="expression" then
+ collected=apply_expression(collected,pi.evaluator,order)
+ report_lpath("% 10i : ex : %s -> %s",(collected and #collected) or 0,pi.expression,pi.converted)
+ elseif kind=="finalizer" then
+ collected=pi.finalizer(collected)
+ report_lpath("% 10i : fi : %s : %s(%s)",(type(collected)=="table" and #collected) or 0,parsed.protocol or xml.defaultprotocol,pi.name,pi.arguments or "")
+ return collected
+ end
+ if not collected or #collected==0 then
+ local pn=i<nofparsed and parsed[nofparsed]
+ if pn and pn.kind=="finalizer" then
+ collected=pn.finalizer(collected)
+ report_lpath("% 10i : fi : %s : %s(%s)",(type(collected)=="table" and #collected) or 0,parsed.protocol or xml.defaultprotocol,pn.name,pn.arguments or "")
+ return collected
+ end
+ return nil
+ end
+ end
+ return collected
+end
+local function normal_apply(list,parsed,nofparsed,order)
+ local collected=list
+ for i=1,nofparsed do
+ local pi=parsed[i]
+ local kind=pi.kind
+ if kind=="axis" then
+ local axis=pi.axis
+ if axis~="self" then
+ collected=apply_axis[axis](collected)
+ end
+ elseif kind=="nodes" then
+ collected=apply_nodes(collected,pi.nodetest,pi.nodes)
+ elseif kind=="expression" then
+ collected=apply_expression(collected,pi.evaluator,order)
+ elseif kind=="finalizer" then
+ return pi.finalizer(collected)
+ end
+ if not collected or #collected==0 then
+ local pf=i<nofparsed and parsed[nofparsed].finalizer
+ if pf then
+ return pf(collected)
+ end
+ return nil
+ end
+ end
+ return collected
+end
+local function applylpath(list,pattern)
+ if not list then
+ return
+ end
+ local parsed=cache[pattern]
+ if parsed then
+ lpathcalls=lpathcalls+1
+ lpathcached=lpathcached+1
+ elseif type(pattern)=="table" then
+ lpathcalls=lpathcalls+1
+ parsed=pattern
+ else
+ parsed=lpath(pattern) or pattern
+ end
+ if not parsed then
+ return
+ end
+ local nofparsed=#parsed
+ if nofparsed==0 then
+ return
+ end
+ if not trace_lpath then
+ return normal_apply ({ list },parsed,nofparsed,list.mi)
+ elseif trace_lprofile then
+ return profiled_apply({ list },parsed,nofparsed,list.mi)
+ else
+ return traced_apply ({ list },parsed,nofparsed,list.mi)
+ end
+end
+xml.applylpath=applylpath
+function xml.filter(root,pattern)
+ return applylpath(root,pattern)
+end
+expressions.child=function(e,pattern)
+ return applylpath(e,pattern)
+end
+expressions.count=function(e,pattern)
+ local collected=applylpath(e,pattern)
+ return pattern and (collected and #collected) or 0
+end
+expressions.oneof=function(s,...)
+ for i=1,select("#",...) do
+ if s==select(i,...) then
+ return true
+ end
+ end
+ return false
+end
+expressions.error=function(str)
+ xml.errorhandler(format("unknown function in lpath expression: %s",tostring(str or "?")))
+ return false
+end
+expressions.undefined=function(s)
+ return s==nil
+end
+expressions.quit=function(s)
+ if s or s==nil then
+ quit_expression=true
+ end
+ return true
+end
+expressions.print=function(...)
+ print(...)
+ return true
+end
+expressions.contains=find
+expressions.find=find
+expressions.upper=upper
+expressions.lower=lower
+expressions.number=tonumber
+expressions.boolean=toboolean
+function expressions.contains(str,pattern)
+ local t=type(str)
+ if t=="string" then
+ if find(str,pattern) then
+ return true
+ end
+ elseif t=="table" then
+ for i=1,#str do
+ local d=str[i]
+ if type(d)=="string" and find(d,pattern) then
+ return true
+ end
+ end
+ end
+ return false
+end
+local function traverse(root,pattern,handle)
+ local collected=applylpath(root,pattern)
+ if collected then
+ for c=1,#collected do
+ local e=collected[c]
+ local r=e.__p__
+ handle(r,r.dt,e.ni)
+ end
+ end
+end
+local function selection(root,pattern,handle)
+ local collected=applylpath(root,pattern)
+ if collected then
+ if handle then
+ for c=1,#collected do
+ handle(collected[c])
+ end
+ else
+ return collected
+ end
+ end
+end
+xml.traverse=traverse
+xml.selection=selection
+local function dofunction(collected,fnc,...)
+ if collected then
+ local f=functions[fnc]
+ if f then
+ for c=1,#collected do
+ f(collected[c],...)
+ end
+ else
+ report_lpath("unknown function %a",fnc)
+ end
+ end
+end
+finalizers.xml["function"]=dofunction
+finalizers.tex["function"]=dofunction
+expressions.text=function(e,n)
+ local rdt=e.__p__.dt
+ return rdt and rdt[n] or ""
+end
+expressions.name=function(e,n)
+ local found=false
+ n=tonumber(n) or 0
+ if n==0 then
+ found=type(e)=="table" and e
+ elseif n<0 then
+ local d,k=e.__p__.dt,e.ni
+ for i=k-1,1,-1 do
+ local di=d[i]
+ if type(di)=="table" then
+ if n==-1 then
+ found=di
+ break
+ else
+ n=n+1
+ end
+ end
+ end
+ else
+ local d,k=e.__p__.dt,e.ni
+ for i=k+1,#d,1 do
+ local di=d[i]
+ if type(di)=="table" then
+ if n==1 then
+ found=di
+ break
+ else
+ n=n-1
+ end
+ end
+ end
+ end
+ if found then
+ local ns,tg=found.rn or found.ns or "",found.tg
+ if ns~="" then
+ return ns..":"..tg
+ else
+ return tg
+ end
+ else
+ return ""
+ end
+end
+expressions.tag=function(e,n)
+ if not e then
+ return ""
+ else
+ local found=false
+ n=tonumber(n) or 0
+ if n==0 then
+ found=(type(e)=="table") and e
+ elseif n<0 then
+ local d,k=e.__p__.dt,e.ni
+ for i=k-1,1,-1 do
+ local di=d[i]
+ if type(di)=="table" then
+ if n==-1 then
+ found=di
+ break
+ else
+ n=n+1
+ end
+ end
+ end
+ else
+ local d,k=e.__p__.dt,e.ni
+ for i=k+1,#d,1 do
+ local di=d[i]
+ if type(di)=="table" then
+ if n==1 then
+ found=di
+ break
+ else
+ n=n-1
+ end
+ end
+ end
+ end
+ return (found and found.tg) or ""
+ end
+end
+local dummy=function() end
+function xml.elements(root,pattern,reverse)
+ local collected=applylpath(root,pattern)
+ if not collected then
+ return dummy
+ elseif reverse then
+ local c=#collected+1
+ return function()
+ if c>1 then
+ c=c-1
+ local e=collected[c]
+ local r=e.__p__
+ return r,r.dt,e.ni
+ end
+ end
+ else
+ local n,c=#collected,0
+ return function()
+ if c<n then
+ c=c+1
+ local e=collected[c]
+ local r=e.__p__
+ return r,r.dt,e.ni
+ end
+ end
+ end
+end
+function xml.collected(root,pattern,reverse)
+ local collected=applylpath(root,pattern)
+ if not collected then
+ return dummy
+ elseif reverse then
+ local c=#collected+1
+ return function()
+ if c>1 then
+ c=c-1
+ return collected[c]
+ end
+ end
+ else
+ local n,c=#collected,0
+ return function()
+ if c<n then
+ c=c+1
+ return collected[c]
+ end
+ end
+ end
+end
+function xml.inspect(collection,pattern)
+ pattern=pattern or "."
+ for e in xml.collected(collection,pattern or ".") do
+ report_lpath("pattern: %s\n\n%s\n",pattern,xml.tostring(e))
+ end
+end
+local function split(e)
+ local dt=e.dt
+ if dt then
+ for i=1,#dt do
+ local dti=dt[i]
+ if type(dti)=="string" then
+ dti=gsub(dti,"^[\n\r]*(.-)[\n\r]*","%1")
+ dti=gsub(dti,"[\n\r]+","\n\n")
+ dt[i]=dti
+ else
+ split(dti)
+ end
+ end
+ end
+ return e
+end
+function xml.finalizers.paragraphs(c)
+ for i=1,#c do
+ split(c[i])
+ end
+ return c
+end
+
+
+end -- of closure
+
+do -- create closure to overcome 200 locals limit
+
+package.loaded["lxml-mis"] = package.loaded["lxml-mis"] or true
+
+-- original size: 3684, stripped down to: 1957
+
+if not modules then modules={} end modules ['lxml-mis']={
+ version=1.001,
+ comment="this module is the basis for the lxml-* ones",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+local xml,lpeg,string=xml,lpeg,string
+local concat=table.concat
+local type,next,tonumber,tostring,setmetatable,loadstring=type,next,tonumber,tostring,setmetatable,loadstring
+local format,gsub,match=string.format,string.gsub,string.match
+local lpegmatch,lpegpatterns=lpeg.match,lpeg.patterns
+local P,S,R,C,V,Cc,Cs=lpeg.P,lpeg.S,lpeg.R,lpeg.C,lpeg.V,lpeg.Cc,lpeg.Cs
+lpegpatterns.xml=lpegpatterns.xml or {}
+local xmlpatterns=lpegpatterns.xml
+local function xmlgsub(t,old,new)
+ local dt=t.dt
+ if dt then
+ for k=1,#dt do
+ local v=dt[k]
+ if type(v)=="string" then
+ dt[k]=gsub(v,old,new)
+ else
+ xmlgsub(v,old,new)
+ end
+ end
+ end
+end
+function xml.stripleadingspaces(dk,d,k)
+ if d and k then
+ local dkm=d[k-1]
+ if dkm and type(dkm)=="string" then
+ local s=match(dkm,"\n(%s+)")
+ xmlgsub(dk,"\n"..rep(" ",#s),"\n")
+ end
+ end
+end
+local normal=(1-S("<&>"))^0
+local special=P("<")/"&lt;"+P(">")/"&gt;"+P("&")/"&amp;"
+local escaped=Cs(normal*(special*normal)^0)
+local normal=(1-S"&")^0
+local special=P("&lt;")/"<"+P("&gt;")/">"+P("&amp;")/"&"
+local unescaped=Cs(normal*(special*normal)^0)
+local cleansed=Cs(((P("<")*(1-P(">"))^0*P(">"))/""+1)^0)
+xmlpatterns.escaped=escaped
+xmlpatterns.unescaped=unescaped
+xmlpatterns.cleansed=cleansed
+function xml.escaped (str) return lpegmatch(escaped,str) end
+function xml.unescaped(str) return lpegmatch(unescaped,str) end
+function xml.cleansed (str) return lpegmatch(cleansed,str) end
+function xml.fillin(root,pattern,str,check)
+ local e=xml.first(root,pattern)
+ if e then
+ local n=#e.dt
+ if not check or n==0 or (n==1 and e.dt[1]=="") then
+ e.dt={ str }
+ end
+ end
+end
+
+
+end -- of closure
+
+do -- create closure to overcome 200 locals limit
+
+package.loaded["lxml-aux"] = package.loaded["lxml-aux"] or true
+
+-- original size: 23804, stripped down to: 16817
+
+if not modules then modules={} end modules ['lxml-aux']={
+ version=1.001,
+ comment="this module is the basis for the lxml-* ones",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+local trace_manipulations=false trackers.register("lxml.manipulations",function(v) trace_manipulations=v end)
+local report_xml=logs.reporter("xml")
+local xml=xml
+local xmlconvert,xmlcopy,xmlname=xml.convert,xml.copy,xml.name
+local xmlinheritedconvert=xml.inheritedconvert
+local xmlapplylpath=xml.applylpath
+local xmlfilter=xml.filter
+local type,setmetatable,getmetatable=type,setmetatable,getmetatable
+local insert,remove,fastcopy,concat=table.insert,table.remove,table.fastcopy,table.concat
+local gmatch,gsub,format,find,strip=string.gmatch,string.gsub,string.format,string.find,string.strip
+local utfbyte=utf.byte
+local function report(what,pattern,c,e)
+ report_xml("%s element %a, root %a, position %a, index %a, pattern %a",what,xmlname(e),xmlname(e.__p__),c,e.ni,pattern)
+end
+local function withelements(e,handle,depth)
+ if e and handle then
+ local edt=e.dt
+ if edt then
+ depth=depth or 0
+ for i=1,#edt do
+ local e=edt[i]
+ if type(e)=="table" then
+ handle(e,depth)
+ withelements(e,handle,depth+1)
+ end
+ end
+ end
+ end
+end
+xml.withelements=withelements
+function xml.withelement(e,n,handle)
+ if e and n~=0 and handle then
+ local edt=e.dt
+ if edt then
+ if n>0 then
+ for i=1,#edt do
+ local ei=edt[i]
+ if type(ei)=="table" then
+ if n==1 then
+ handle(ei)
+ return
+ else
+ n=n-1
+ end
+ end
+ end
+ elseif n<0 then
+ for i=#edt,1,-1 do
+ local ei=edt[i]
+ if type(ei)=="table" then
+ if n==-1 then
+ handle(ei)
+ return
+ else
+ n=n+1
+ end
+ end
+ end
+ end
+ end
+ end
+end
+function xml.each(root,pattern,handle,reverse)
+ local collected=xmlapplylpath(root,pattern)
+ if collected then
+ if reverse then
+ for c=#collected,1,-1 do
+ handle(collected[c])
+ end
+ else
+ for c=1,#collected do
+ handle(collected[c])
+ end
+ end
+ return collected
+ end
+end
+function xml.processattributes(root,pattern,handle)
+ local collected=xmlapplylpath(root,pattern)
+ if collected and handle then
+ for c=1,#collected do
+ handle(collected[c].at)
+ end
+ end
+ return collected
+end
+function xml.collect(root,pattern)
+ return xmlapplylpath(root,pattern)
+end
+function xml.collecttexts(root,pattern,flatten)
+ local collected=xmlapplylpath(root,pattern)
+ if collected and flatten then
+ local xmltostring=xml.tostring
+ for c=1,#collected do
+ collected[c]=xmltostring(collected[c].dt)
+ end
+ end
+ return collected or {}
+end
+function xml.collect_tags(root,pattern,nonamespace)
+ local collected=xmlapplylpath(root,pattern)
+ if collected then
+ local t,n={},0
+ for c=1,#collected do
+ local e=collected[c]
+ local ns,tg=e.ns,e.tg
+ n=n+1
+ if nonamespace then
+ t[n]=tg
+ elseif ns=="" then
+ t[n]=tg
+ else
+ t[n]=ns..":"..tg
+ end
+ end
+ return t
+ end
+end
+local no_root={ no_root=true }
+local function redo_ni(d)
+ for k=1,#d do
+ local dk=d[k]
+ if type(dk)=="table" then
+ dk.ni=k
+ end
+ end
+end
+local function xmltoelement(whatever,root)
+ if not whatever then
+ return nil
+ end
+ local element
+ if type(whatever)=="string" then
+ element=xmlinheritedconvert(whatever,root)
+ else
+ element=whatever
+ end
+ if element.error then
+ return whatever
+ end
+ if element then
+ end
+ return element
+end
+xml.toelement=xmltoelement
+local function copiedelement(element,newparent)
+ if type(element)=="string" then
+ return element
+ else
+ element=xmlcopy(element).dt
+ if newparent and type(element)=="table" then
+ element.__p__=newparent
+ end
+ return element
+ end
+end
+function xml.delete(root,pattern)
+ if not pattern or pattern=="" then
+ local p=root.__p__
+ if p then
+ if trace_manipulations then
+ report('deleting',"--",c,root)
+ end
+ local d=p.dt
+ remove(d,root.ni)
+ redo_ni(d)
+ end
+ else
+ local collected=xmlapplylpath(root,pattern)
+ if collected then
+ for c=1,#collected do
+ local e=collected[c]
+ local p=e.__p__
+ if p then
+ if trace_manipulations then
+ report('deleting',pattern,c,e)
+ end
+ local d=p.dt
+ remove(d,e.ni)
+ redo_ni(d)
+ end
+ end
+ end
+ end
+end
+function xml.replace(root,pattern,whatever)
+ local element=root and xmltoelement(whatever,root)
+ local collected=element and xmlapplylpath(root,pattern)
+ if collected then
+ for c=1,#collected do
+ local e=collected[c]
+ local p=e.__p__
+ if p then
+ if trace_manipulations then
+ report('replacing',pattern,c,e)
+ end
+ local d=p.dt
+ d[e.ni]=copiedelement(element,p)
+ redo_ni(d)
+ end
+ end
+ end
+end
+local function wrap(e,wrapper)
+ local t={
+ rn=e.rn,
+ tg=e.tg,
+ ns=e.ns,
+ at=e.at,
+ dt=e.dt,
+ __p__=e,
+ }
+ setmetatable(t,getmetatable(e))
+ e.rn=wrapper.rn or e.rn or ""
+ e.tg=wrapper.tg or e.tg or ""
+ e.ns=wrapper.ns or e.ns or ""
+ e.at=fastcopy(wrapper.at)
+ e.dt={ t }
+end
+function xml.wrap(root,pattern,whatever)
+ if whatever then
+ local wrapper=xmltoelement(whatever,root)
+ local collected=xmlapplylpath(root,pattern)
+ if collected then
+ for c=1,#collected do
+ local e=collected[c]
+ if trace_manipulations then
+ report('wrapping',pattern,c,e)
+ end
+ wrap(e,wrapper)
+ end
+ end
+ else
+ wrap(root,xmltoelement(pattern))
+ end
+end
+local function inject_element(root,pattern,whatever,prepend)
+ local element=root and xmltoelement(whatever,root)
+ local collected=element and xmlapplylpath(root,pattern)
+ local function inject_e(e)
+ local r=e.__p__
+ local d,k,rri=r.dt,e.ni,r.ri
+ local edt=(rri and d[rri].dt) or (d and d[k] and d[k].dt)
+ if edt then
+ local be,af
+ local cp=copiedelement(element,e)
+ if prepend then
+ be,af=cp,edt
+ else
+ be,af=edt,cp
+ end
+ local bn=#be
+ for i=1,#af do
+ bn=bn+1
+ be[bn]=af[i]
+ end
+ if rri then
+ r.dt[rri].dt=be
+ else
+ d[k].dt=be
+ end
+ redo_ni(d)
+ end
+ end
+ if not collected then
+ elseif collected.tg then
+ inject_e(collected)
+ else
+ for c=1,#collected do
+ inject_e(collected[c])
+ end
+ end
+end
+local function insert_element(root,pattern,whatever,before)
+ local element=root and xmltoelement(whatever,root)
+ local collected=element and xmlapplylpath(root,pattern)
+ local function insert_e(e)
+ local r=e.__p__
+ local d,k=r.dt,e.ni
+ if not before then
+ k=k+1
+ end
+ insert(d,k,copiedelement(element,r))
+ redo_ni(d)
+ end
+ if not collected then
+ elseif collected.tg then
+ insert_e(collected)
+ else
+ for c=1,#collected do
+ insert_e(collected[c])
+ end
+ end
+end
+xml.insert_element=insert_element
+xml.insertafter=insert_element
+xml.insertbefore=function(r,p,e) insert_element(r,p,e,true) end
+xml.injectafter=inject_element
+xml.injectbefore=function(r,p,e) inject_element(r,p,e,true) end
+local function include(xmldata,pattern,attribute,recursive,loaddata)
+ pattern=pattern or 'include'
+ loaddata=loaddata or io.loaddata
+ local collected=xmlapplylpath(xmldata,pattern)
+ if collected then
+ for c=1,#collected do
+ local ek=collected[c]
+ local name=nil
+ local ekdt=ek.dt
+ local ekat=ek.at
+ local epdt=ek.__p__.dt
+ if not attribute or attribute=="" then
+ name=(type(ekdt)=="table" and ekdt[1]) or ekdt
+ end
+ if not name then
+ for a in gmatch(attribute or "href","([^|]+)") do
+ name=ekat[a]
+ if name then break end
+ end
+ end
+ local data=(name and name~="" and loaddata(name)) or ""
+ if data=="" then
+ epdt[ek.ni]=""
+ elseif ekat["parse"]=="text" then
+ epdt[ek.ni]=xml.escaped(data)
+ else
+ local xi=xmlinheritedconvert(data,xmldata)
+ if not xi then
+ epdt[ek.ni]=""
+ else
+ if recursive then
+ include(xi,pattern,attribute,recursive,loaddata)
+ end
+ epdt[ek.ni]=xml.body(xi)
+ end
+ end
+ end
+ end
+end
+xml.include=include
+local function stripelement(e,nolines,anywhere)
+ local edt=e.dt
+ if edt then
+ if anywhere then
+ local t,n={},0
+ for e=1,#edt do
+ local str=edt[e]
+ if type(str)~="string" then
+ n=n+1
+ t[n]=str
+ elseif str~="" then
+ if nolines then
+ str=gsub(str,"%s+"," ")
+ end
+ str=gsub(str,"^%s*(.-)%s*$","%1")
+ if str~="" then
+ n=n+1
+ t[n]=str
+ end
+ end
+ end
+ e.dt=t
+ else
+ if #edt>0 then
+ local str=edt[1]
+ if type(str)~="string" then
+ elseif str=="" then
+ remove(edt,1)
+ else
+ if nolines then
+ str=gsub(str,"%s+"," ")
+ end
+ str=gsub(str,"^%s+","")
+ if str=="" then
+ remove(edt,1)
+ else
+ edt[1]=str
+ end
+ end
+ end
+ local nedt=#edt
+ if nedt>0 then
+ local str=edt[nedt]
+ if type(str)~="string" then
+ elseif str=="" then
+ remove(edt)
+ else
+ if nolines then
+ str=gsub(str,"%s+"," ")
+ end
+ str=gsub(str,"%s+$","")
+ if str=="" then
+ remove(edt)
+ else
+ edt[nedt]=str
+ end
+ end
+ end
+ end
+ end
+ return e
+end
+xml.stripelement=stripelement
+function xml.strip(root,pattern,nolines,anywhere)
+ local collected=xmlapplylpath(root,pattern)
+ if collected then
+ for i=1,#collected do
+ stripelement(collected[i],nolines,anywhere)
+ end
+ end
+end
+local function renamespace(root,oldspace,newspace)
+ local ndt=#root.dt
+ for i=1,ndt or 0 do
+ local e=root[i]
+ if type(e)=="table" then
+ if e.ns==oldspace then
+ e.ns=newspace
+ if e.rn then
+ e.rn=newspace
+ end
+ end
+ local edt=e.dt
+ if edt then
+ renamespace(edt,oldspace,newspace)
+ end
+ end
+ end
+end
+xml.renamespace=renamespace
+function xml.remaptag(root,pattern,newtg)
+ local collected=xmlapplylpath(root,pattern)
+ if collected then
+ for c=1,#collected do
+ collected[c].tg=newtg
+ end
+ end
+end
+function xml.remapnamespace(root,pattern,newns)
+ local collected=xmlapplylpath(root,pattern)
+ if collected then
+ for c=1,#collected do
+ collected[c].ns=newns
+ end
+ end
+end
+function xml.checknamespace(root,pattern,newns)
+ local collected=xmlapplylpath(root,pattern)
+ if collected then
+ for c=1,#collected do
+ local e=collected[c]
+ if (not e.rn or e.rn=="") and e.ns=="" then
+ e.rn=newns
+ end
+ end
+ end
+end
+function xml.remapname(root,pattern,newtg,newns,newrn)
+ local collected=xmlapplylpath(root,pattern)
+ if collected then
+ for c=1,#collected do
+ local e=collected[c]
+ e.tg,e.ns,e.rn=newtg,newns,newrn
+ end
+ end
+end
+function xml.cdatatotext(e)
+ local dt=e.dt
+ if #dt==1 then
+ local first=dt[1]
+ if first.tg=="@cd@" then
+ e.dt=first.dt
+ end
+ else
+ end
+end
+function xml.texttocdata(e)
+ local dt=e.dt
+ local s=xml.tostring(dt)
+ e.tg="@cd@"
+ e.special=true
+ e.ns=""
+ e.rn=""
+ e.dt={ s }
+ e.at=nil
+end
+function xml.elementtocdata(e)
+ local dt=e.dt
+ local s=xml.tostring(e)
+ e.tg="@cd@"
+ e.special=true
+ e.ns=""
+ e.rn=""
+ e.dt={ s }
+ e.at=nil
+end
+xml.builtinentities=table.tohash { "amp","quot","apos","lt","gt" }
+local entities=characters and characters.entities or nil
+local builtinentities=xml.builtinentities
+function xml.addentitiesdoctype(root,option)
+ if not entities then
+ require("char-ent")
+ entities=characters.entities
+ end
+ if entities and root and root.tg=="@rt@" and root.statistics then
+ local list={}
+ local hexify=option=="hexadecimal"
+ for k,v in table.sortedhash(root.statistics.entities.names) do
+ if not builtinentities[k] then
+ local e=entities[k]
+ if not e then
+ e=format("[%s]",k)
+ elseif hexify then
+ e=format("&#%05X;",utfbyte(k))
+ end
+ list[#list+1]=format(" <!ENTITY %s %q >",k,e)
+ end
+ end
+ local dt=root.dt
+ local n=dt[1].tg=="@pi@" and 2 or 1
+ if #list>0 then
+ insert(dt,n,{ "\n" })
+ insert(dt,n,{
+ tg="@dt@",
+ dt={ format("Something [\n%s\n] ",concat(list)) },
+ ns="",
+ special=true,
+ })
+ insert(dt,n,{ "\n\n" })
+ else
+ end
+ end
+end
+xml.all=xml.each
+xml.insert=xml.insertafter
+xml.inject=xml.injectafter
+xml.after=xml.insertafter
+xml.before=xml.insertbefore
+xml.process=xml.each
+xml.obsolete=xml.obsolete or {}
+local obsolete=xml.obsolete
+xml.strip_whitespace=xml.strip obsolete.strip_whitespace=xml.strip
+xml.collect_elements=xml.collect obsolete.collect_elements=xml.collect
+xml.delete_element=xml.delete obsolete.delete_element=xml.delete
+xml.replace_element=xml.replace obsolete.replace_element=xml.replacet
+xml.each_element=xml.each obsolete.each_element=xml.each
+xml.process_elements=xml.process obsolete.process_elements=xml.process
+xml.insert_element_after=xml.insertafter obsolete.insert_element_after=xml.insertafter
+xml.insert_element_before=xml.insertbefore obsolete.insert_element_before=xml.insertbefore
+xml.inject_element_after=xml.injectafter obsolete.inject_element_after=xml.injectafter
+xml.inject_element_before=xml.injectbefore obsolete.inject_element_before=xml.injectbefore
+xml.process_attributes=xml.processattributes obsolete.process_attributes=xml.processattributes
+xml.collect_texts=xml.collecttexts obsolete.collect_texts=xml.collecttexts
+xml.inject_element=xml.inject obsolete.inject_element=xml.inject
+xml.remap_tag=xml.remaptag obsolete.remap_tag=xml.remaptag
+xml.remap_name=xml.remapname obsolete.remap_name=xml.remapname
+xml.remap_namespace=xml.remapnamespace obsolete.remap_namespace=xml.remapnamespace
+function xml.cdata(e)
+ if e then
+ local dt=e.dt
+ if dt and #dt==1 then
+ local first=dt[1]
+ return first.tg=="@cd@" and first.dt[1] or ""
+ end
+ end
+ return ""
+end
+function xml.finalizers.xml.cdata(collected)
+ if collected then
+ local e=collected[1]
+ if e then
+ local dt=e.dt
+ if dt and #dt==1 then
+ local first=dt[1]
+ return first.tg=="@cd@" and first.dt[1] or ""
+ end
+ end
+ end
+ return ""
+end
+function xml.insertcomment(e,str,n)
+ table.insert(e.dt,n or 1,{
+ tg="@cm@",
+ ns="",
+ special=true,
+ at={},
+ dt={ str },
+ })
+end
+function xml.setcdata(e,str)
+ e.dt={ {
+ tg="@cd@",
+ ns="",
+ special=true,
+ at={},
+ dt={ str },
+ } }
+end
+function xml.separate(x,pattern)
+ local collected=xmlapplylpath(x,pattern)
+ if collected then
+ for c=1,#collected do
+ local e=collected[c]
+ local d=e.dt
+ if d==x then
+ report_xml("warning: xml.separate changes root")
+ x=d
+ end
+ local t,n={ "\n" },1
+ local i,nd=1,#d
+ while i<=nd do
+ while i<=nd do
+ local di=d[i]
+ if type(di)=="string" then
+ if di=="\n" or find(di,"^%s+$") then
+ i=i+1
+ else
+ d[i]=strip(di)
+ break
+ end
+ else
+ break
+ end
+ end
+ if i>nd then
+ break
+ end
+ t[n+1]="\n"
+ t[n+2]=d[i]
+ t[n+3]="\n"
+ n=n+3
+ i=i+1
+ end
+ t[n+1]="\n"
+ setmetatable(t,getmetatable(d))
+ e.dt=t
+ end
+ end
+ return x
+end
+local helpers=xml.helpers or {}
+xml.helpers=helpers
+local function normal(e,action)
+ local edt=e.dt
+ if edt then
+ for i=1,#edt do
+ local str=edt[i]
+ if type(str)=="string" and str~="" then
+ edt[i]=action(str)
+ end
+ end
+ end
+end
+local function recurse(e,action)
+ local edt=e.dt
+ if edt then
+ for i=1,#edt do
+ local str=edt[i]
+ if type(str)~="string" then
+ recurse(str,action,recursive)
+ elseif str~="" then
+ edt[i]=action(str)
+ end
+ end
+ end
+end
+function helpers.recursetext(collected,action,recursive)
+ if recursive then
+ for i=1,#collected do
+ recurse(collected[i],action)
+ end
+ else
+ for i=1,#collected do
+ normal(collected[i],action)
+ end
+ end
+end
+
+
+end -- of closure
+
+do -- create closure to overcome 200 locals limit
+
+package.loaded["lxml-xml"] = package.loaded["lxml-xml"] or true
+
+-- original size: 10274, stripped down to: 7538
+
+if not modules then modules={} end modules ['lxml-xml']={
+ version=1.001,
+ comment="this module is the basis for the lxml-* ones",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+local concat=table.concat
+local find,lower,upper=string.find,string.lower,string.upper
+local xml=xml
+local finalizers=xml.finalizers.xml
+local xmlfilter=xml.filter
+local xmltostring=xml.tostring
+local xmlserialize=xml.serialize
+local xmlcollected=xml.collected
+local xmlnewhandlers=xml.newhandlers
+local function first(collected)
+ return collected and collected[1]
+end
+local function last(collected)
+ return collected and collected[#collected]
+end
+local function all(collected)
+ return collected
+end
+local reverse=table.reversed
+local function attribute(collected,name)
+ if collected and #collected>0 then
+ local at=collected[1].at
+ return at and at[name]
+ end
+end
+local function att(id,name)
+ local at=id.at
+ return at and at[name]
+end
+local function count(collected)
+ return collected and #collected or 0
+end
+local function position(collected,n)
+ if not collected then
+ return 0
+ end
+ local nc=#collected
+ if nc==0 then
+ return 0
+ end
+ n=tonumber(n) or 0
+ if n<0 then
+ return collected[nc+n+1]
+ elseif n>0 then
+ return collected[n]
+ else
+ return collected[1].mi or 0
+ end
+end
+local function match(collected)
+ return collected and #collected>0 and collected[1].mi or 0
+end
+local function index(collected)
+ return collected and #collected>0 and collected[1].ni or 0
+end
+local function attributes(collected,arguments)
+ if collected and #collected>0 then
+ local at=collected[1].at
+ if arguments then
+ return at[arguments]
+ elseif next(at) then
+ return at
+ end
+ end
+end
+local function chainattribute(collected,arguments)
+ if collected and #collected>0 then
+ local e=collected[1]
+ while e do
+ local at=e.at
+ if at then
+ local a=at[arguments]
+ if a then
+ return a
+ end
+ else
+ break
+ end
+ e=e.__p__
+ end
+ end
+ return ""
+end
+local function raw(collected)
+ if collected and #collected>0 then
+ local e=collected[1] or collected
+ return e and xmltostring(e) or ""
+ else
+ return ""
+ end
+end
+local xmltexthandler=xmlnewhandlers {
+ name="string",
+ initialize=function()
+ result={}
+ return result
+ end,
+ finalize=function()
+ return concat(result)
+ end,
+ handle=function(...)
+ result[#result+1]=concat {... }
+ end,
+ escape=false,
+}
+local function xmltotext(root)
+ local dt=root.dt
+ if not dt then
+ return ""
+ end
+ local nt=#dt
+ if nt==0 then
+ return ""
+ elseif nt==1 and type(dt[1])=="string" then
+ return dt[1]
+ else
+ return xmlserialize(root,xmltexthandler) or ""
+ end
+end
+local function text(collected)
+ if collected then
+ local e=collected[1] or collected
+ return e and xmltotext(e) or ""
+ else
+ return ""
+ end
+end
+local function texts(collected)
+ if not collected then
+ return {}
+ end
+ local nc=#collected
+ if nc==0 then
+ return {}
+ end
+ local t,n={},0
+ for c=1,nc do
+ local e=collected[c]
+ if e and e.dt then
+ n=n+1
+ t[n]=e.dt
+ end
+ end
+ return t
+end
+local function tag(collected,n)
+ if not collected then
+ return
+ end
+ local nc=#collected
+ if nc==0 then
+ return
+ end
+ local c
+ if n==0 or not n then
+ c=collected[1]
+ elseif n>1 then
+ c=collected[n]
+ else
+ c=collected[nc-n+1]
+ end
+ return c and c.tg
+end
+local function name(collected,n)
+ if not collected then
+ return
+ end
+ local nc=#collected
+ if nc==0 then
+ return
+ end
+ local c
+ if n==0 or not n then
+ c=collected[1]
+ elseif n>1 then
+ c=collected[n]
+ else
+ c=collected[nc-n+1]
+ end
+ if not c then
+ elseif c.ns=="" then
+ return c.tg
+ else
+ return c.ns..":"..c.tg
+ end
+end
+local function tags(collected,nonamespace)
+ if not collected then
+ return
+ end
+ local nc=#collected
+ if nc==0 then
+ return
+ end
+ local t,n={},0
+ for c=1,nc do
+ local e=collected[c]
+ local ns,tg=e.ns,e.tg
+ n=n+1
+ if nonamespace or ns=="" then
+ t[n]=tg
+ else
+ t[n]=ns..":"..tg
+ end
+ end
+ return t
+end
+local function empty(collected,spacesonly)
+ if not collected then
+ return true
+ end
+ local nc=#collected
+ if nc==0 then
+ return true
+ end
+ for c=1,nc do
+ local e=collected[c]
+ if e then
+ local edt=e.dt
+ if edt then
+ local n=#edt
+ if n==1 then
+ local edk=edt[1]
+ local typ=type(edk)
+ if typ=="table" then
+ return false
+ elseif edk~="" then
+ return false
+ elseif spacesonly and not find(edk,"%S") then
+ return false
+ end
+ elseif n>1 then
+ return false
+ end
+ end
+ end
+ end
+ return true
+end
+finalizers.first=first
+finalizers.last=last
+finalizers.all=all
+finalizers.reverse=reverse
+finalizers.elements=all
+finalizers.default=all
+finalizers.attribute=attribute
+finalizers.att=att
+finalizers.count=count
+finalizers.position=position
+finalizers.match=match
+finalizers.index=index
+finalizers.attributes=attributes
+finalizers.chainattribute=chainattribute
+finalizers.text=text
+finalizers.texts=texts
+finalizers.tag=tag
+finalizers.name=name
+finalizers.tags=tags
+finalizers.empty=empty
+function xml.first(id,pattern)
+ return first(xmlfilter(id,pattern))
+end
+function xml.last(id,pattern)
+ return last(xmlfilter(id,pattern))
+end
+function xml.count(id,pattern)
+ return count(xmlfilter(id,pattern))
+end
+function xml.attribute(id,pattern,a,default)
+ return attribute(xmlfilter(id,pattern),a,default)
+end
+function xml.raw(id,pattern)
+ if pattern then
+ return raw(xmlfilter(id,pattern))
+ else
+ return raw(id)
+ end
+end
+function xml.text(id,pattern)
+ if pattern then
+ local collected=xmlfilter(id,pattern)
+ return collected and #collected>0 and xmltotext(collected[1]) or ""
+ elseif id then
+ return xmltotext(id) or ""
+ else
+ return ""
+ end
+end
+xml.content=text
+function xml.position(id,pattern,n)
+ return position(xmlfilter(id,pattern),n)
+end
+function xml.match(id,pattern)
+ return match(xmlfilter(id,pattern))
+end
+function xml.empty(id,pattern,spacesonly)
+ return empty(xmlfilter(id,pattern),spacesonly)
+end
+xml.all=xml.filter
+xml.index=xml.position
+xml.found=xml.filter
+local function totable(x)
+ local t={}
+ for e in xmlcollected(x[1] or x,"/*") do
+ t[e.tg]=xmltostring(e.dt) or ""
+ end
+ return next(t) and t or nil
+end
+xml.table=totable
+finalizers.table=totable
+local function textonly(e,t)
+ if e then
+ local edt=e.dt
+ if edt then
+ for i=1,#edt do
+ local e=edt[i]
+ if type(e)=="table" then
+ textonly(e,t)
+ else
+ t[#t+1]=e
+ end
+ end
+ end
+ end
+ return t
+end
+function xml.textonly(e)
+ return concat(textonly(e,{}))
+end
+function finalizers.lowerall(collected)
+ for c=1,#collected do
+ local e=collected[c]
+ if not e.special then
+ e.tg=lower(e.tg)
+ local eat=e.at
+ if eat then
+ local t={}
+ for k,v in next,eat do
+ t[lower(k)]=v
+ end
+ e.at=t
+ end
+ end
+ end
+end
+function finalizers.upperall(collected)
+ for c=1,#collected do
+ local e=collected[c]
+ if not e.special then
+ e.tg=upper(e.tg)
+ local eat=e.at
+ if eat then
+ local t={}
+ for k,v in next,eat do
+ t[upper(k)]=v
+ end
+ e.at=t
+ end
+ end
+ end
+end
+
+
+end -- of closure
+
+do -- create closure to overcome 200 locals limit
+
+package.loaded["trac-xml"] = package.loaded["trac-xml"] or true
+
+-- original size: 6351, stripped down to: 4919
+
+if not modules then modules={} end modules ['trac-xml']={
+ version=1.001,
+ comment="companion to trac-log.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+local formatters=string.formatters
+local reporters=logs.reporters
+local xmlserialize=xml.serialize
+local xmlcollected=xml.collected
+local xmltext=xml.text
+local xmlfirst=xml.first
+local function showhelp(specification,...)
+ local root=xml.convert(specification.helpinfo or "")
+ if not root then
+ return
+ end
+ local xs=xml.gethandlers("string")
+ xml.sethandlersfunction(xs,"short",function(e,handler) xmlserialize(e.dt,handler) end)
+ xml.sethandlersfunction(xs,"ref",function(e,handler) handler.handle("--"..e.at.name) end)
+ local wantedcategories=select("#",...)==0 and true or table.tohash {... }
+ local nofcategories=xml.count(root,"/application/flags/category")
+ local report=specification.report
+ for category in xmlcollected(root,"/application/flags/category") do
+ local categoryname=category.at.name or ""
+ if wantedcategories==true or wantedcategories[categoryname] then
+ if nofcategories>1 then
+ report("%s options:",categoryname)
+ report()
+ end
+ for subcategory in xmlcollected(category,"/subcategory") do
+ for flag in xmlcollected(subcategory,"/flag") do
+ local name=flag.at.name
+ local value=flag.at.value
+ local short=xmltext(xmlfirst(flag,"/short"))
+ if value then
+ report("--%-20s %s",formatters["%s=%s"](name,value),short)
+ else
+ report("--%-20s %s",name,short)
+ end
+ end
+ report()
+ end
+ end
+ end
+ for category in xmlcollected(root,"/application/examples/category") do
+ local title=xmltext(xmlfirst(category,"/title"))
+ if title and title~="" then
+ report()
+ report(title)
+ report()
+ end
+ for subcategory in xmlcollected(category,"/subcategory") do
+ for example in xmlcollected(subcategory,"/example") do
+ local command=xmltext(xmlfirst(example,"/command"))
+ local comment=xmltext(xmlfirst(example,"/comment"))
+ report(command)
+ end
+ report()
+ end
+ end
+ for comment in xmlcollected(root,"/application/comments/comment") do
+ local comment=xmltext(comment)
+ report()
+ report(comment)
+ report()
+ end
+end
+local reporthelp=reporters.help
+local exporthelp=reporters.export
+local function xmlfound(t)
+ local helpinfo=t.helpinfo
+ if type(helpinfo)=="table" then
+ return false
+ end
+ if type(helpinfo)~="string" then
+ helpinfo="Warning: no helpinfo found."
+ t.helpinfo=helpinfo
+ return false
+ end
+ if string.find(helpinfo,".xml$") then
+ local ownscript=environment.ownscript
+ local helpdata=false
+ if ownscript then
+ local helpfile=file.join(file.pathpart(ownscript),helpinfo)
+ helpdata=io.loaddata(helpfile)
+ if helpdata=="" then
+ helpdata=false
+ end
+ end
+ if not helpdata then
+ local helpfile=resolvers.findfile(helpinfo,"tex")
+ helpdata=helpfile and io.loaddata(helpfile)
+ end
+ if helpdata and helpdata~="" then
+ helpinfo=helpdata
+ else
+ helpinfo=formatters["Warning: help file %a is not found."](helpinfo)
+ end
+ end
+ t.helpinfo=helpinfo
+ return string.find(t.helpinfo,"^<%?xml") and true or false
+end
+function reporters.help(t,...)
+ if xmlfound(t) then
+ showhelp(t,...)
+ else
+ reporthelp(t,...)
+ end
+end
+function reporters.export(t,methods,filename)
+ if not xmlfound(t) then
+ return exporthelp(t)
+ end
+ if not methods or methods=="" then
+ methods=environment.arguments["exporthelp"]
+ end
+ if not filename or filename=="" then
+ filename=environment.files[1]
+ end
+ dofile(resolvers.findfile("trac-exp.lua","tex"))
+ local exporters=logs.exporters
+ if not exporters or not methods then
+ return exporthelp(t)
+ end
+ if methods=="all" then
+ methods=table.keys(exporters)
+ elseif type(methods)=="string" then
+ methods=utilities.parsers.settings_to_array(methods)
+ else
+ return exporthelp(t)
+ end
+ if type(filename)~="string" or filename=="" then
+ filename=false
+ elseif file.pathpart(filename)=="" then
+ t.report("export file %a will not be saved on the current path (safeguard)",filename)
+ return
+ end
+ for i=1,#methods do
+ local method=methods[i]
+ local exporter=exporters[method]
+ if exporter then
+ local result=exporter(t,method)
+ if result and result~="" then
+ if filename then
+ local fullname=file.replacesuffix(filename,method)
+ t.report("saving export in %a",fullname)
+ io.savedata(fullname,result)
+ else
+ reporters.lines(t,result)
+ end
+ else
+ t.report("no output from exporter %a",method)
+ end
+ else
+ t.report("unknown exporter %a",method)
+ end
+ end
+end
+
+
+end -- of closure
+
+do -- create closure to overcome 200 locals limit
+
+package.loaded["data-ini"] = package.loaded["data-ini"] or true
+
+-- original size: 7898, stripped down to: 5501
+
+if not modules then modules={} end modules ['data-ini']={
+ version=1.001,
+ comment="companion to luat-lib.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files",
+}
+local gsub,find,gmatch,char=string.gsub,string.find,string.gmatch,string.char
+local next,type=next,type
+local filedirname,filebasename,filejoin=file.dirname,file.basename,file.join
+local trace_locating=false trackers.register("resolvers.locating",function(v) trace_locating=v end)
+local trace_detail=false trackers.register("resolvers.details",function(v) trace_detail=v end)
+local trace_expansions=false trackers.register("resolvers.expansions",function(v) trace_expansions=v end)
+local report_initialization=logs.reporter("resolvers","initialization")
+local ostype,osname,ossetenv,osgetenv=os.type,os.name,os.setenv,os.getenv
+resolvers=resolvers or {}
+local resolvers=resolvers
+texconfig.kpse_init=false
+texconfig.shell_escape='t'
+if not (environment and environment.default_texmfcnf) and kpse and kpse.default_texmfcnf then
+ local default_texmfcnf=kpse.default_texmfcnf()
+ default_texmfcnf=gsub(default_texmfcnf,"$SELFAUTOLOC","selfautoloc:")
+ default_texmfcnf=gsub(default_texmfcnf,"$SELFAUTODIR","selfautodir:")
+ default_texmfcnf=gsub(default_texmfcnf,"$SELFAUTOPARENT","selfautoparent:")
+ default_texmfcnf=gsub(default_texmfcnf,"$HOME","home:")
+ environment.default_texmfcnf=default_texmfcnf
+end
+kpse={ original=kpse }
+setmetatable(kpse,{
+ __index=function(kp,name)
+ report_initialization("fatal error: kpse library is accessed (key: %s)",name)
+ os.exit()
+ end
+} )
+do
+ local osfontdir=osgetenv("OSFONTDIR")
+ if osfontdir and osfontdir~="" then
+ elseif osname=="windows" then
+ ossetenv("OSFONTDIR","c:/windows/fonts//")
+ elseif osname=="macosx" then
+ ossetenv("OSFONTDIR","$HOME/Library/Fonts//;/Library/Fonts//;/System/Library/Fonts//")
+ end
+end
+do
+ local homedir=osgetenv(ostype=="windows" and 'USERPROFILE' or 'HOME') or ''
+ if not homedir or homedir=="" then
+ homedir=char(127)
+ end
+ homedir=file.collapsepath(homedir)
+ ossetenv("HOME",homedir)
+ ossetenv("USERPROFILE",homedir)
+ environment.homedir=homedir
+end
+do
+ local args=environment.originalarguments or arg
+ if not environment.ownmain then
+ environment.ownmain=status and string.match(string.lower(status.banner),"this is ([%a]+)") or "luatex"
+ end
+ local ownbin=environment.ownbin or args[-2] or arg[-2] or args[-1] or arg[-1] or arg[0] or "luatex"
+ local ownpath=environment.ownpath or os.selfdir
+ ownbin=file.collapsepath(ownbin)
+ ownpath=file.collapsepath(ownpath)
+ if not ownpath or ownpath=="" or ownpath=="unset" then
+ ownpath=args[-1] or arg[-1]
+ ownpath=ownpath and filedirname(gsub(ownpath,"\\","/"))
+ if not ownpath or ownpath=="" then
+ ownpath=args[-0] or arg[-0]
+ ownpath=ownpath and filedirname(gsub(ownpath,"\\","/"))
+ end
+ local binary=ownbin
+ if not ownpath or ownpath=="" then
+ ownpath=ownpath and filedirname(binary)
+ end
+ if not ownpath or ownpath=="" then
+ if os.binsuffix~="" then
+ binary=file.replacesuffix(binary,os.binsuffix)
+ end
+ local path=osgetenv("PATH")
+ if path then
+ for p in gmatch(path,"[^"..io.pathseparator.."]+") do
+ local b=filejoin(p,binary)
+ if lfs.isfile(b) then
+ local olddir=lfs.currentdir()
+ if lfs.chdir(p) then
+ local pp=lfs.currentdir()
+ if trace_locating and p~=pp then
+ report_initialization("following symlink %a to %a",p,pp)
+ end
+ ownpath=pp
+ lfs.chdir(olddir)
+ else
+ if trace_locating then
+ report_initialization("unable to check path %a",p)
+ end
+ ownpath=p
+ end
+ break
+ end
+ end
+ end
+ end
+ if not ownpath or ownpath=="" then
+ ownpath="."
+ report_initialization("forcing fallback to ownpath %a",ownpath)
+ elseif trace_locating then
+ report_initialization("using ownpath %a",ownpath)
+ end
+ end
+ environment.ownbin=ownbin
+ environment.ownpath=ownpath
+end
+resolvers.ownpath=environment.ownpath
+function resolvers.getownpath()
+ return environment.ownpath
+end
+do
+ local ownpath=environment.ownpath or dir.current()
+ if ownpath then
+ ossetenv('SELFAUTOLOC',file.collapsepath(ownpath))
+ ossetenv('SELFAUTODIR',file.collapsepath(ownpath.."/.."))
+ ossetenv('SELFAUTOPARENT',file.collapsepath(ownpath.."/../.."))
+ else
+ report_initialization("error: unable to locate ownpath")
+ os.exit()
+ end
+end
+local texos=environment.texos or osgetenv("TEXOS")
+local texmfos=environment.texmfos or osgetenv('SELFAUTODIR')
+if not texos or texos=="" then
+ texos=file.basename(texmfos)
+end
+ossetenv('TEXMFOS',texmfos)
+ossetenv('TEXOS',texos)
+ossetenv('SELFAUTOSYSTEM',os.platform)
+environment.texos=texos
+environment.texmfos=texmfos
+local texroot=environment.texroot or osgetenv("TEXROOT")
+if not texroot or texroot=="" then
+ texroot=osgetenv('SELFAUTOPARENT')
+ ossetenv('TEXROOT',texroot)
+end
+environment.texroot=file.collapsepath(texroot)
+if profiler then
+ directives.register("system.profile",function()
+ profiler.start("luatex-profile.log")
+ end)
+end
+if not resolvers.resolve then
+ function resolvers.resolve (s) return s end
+ function resolvers.unresolve(s) return s end
+ function resolvers.repath (s) return s end
+end
+
+
+end -- of closure
+
+do -- create closure to overcome 200 locals limit
+
+package.loaded["data-exp"] = package.loaded["data-exp"] or true
+
+-- original size: 14643, stripped down to: 9517
+
+if not modules then modules={} end modules ['data-exp']={
+ version=1.001,
+ comment="companion to luat-lib.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files",
+}
+local format,find,gmatch,lower,char,sub=string.format,string.find,string.gmatch,string.lower,string.char,string.sub
+local concat,sort=table.concat,table.sort
+local lpegmatch,lpegpatterns=lpeg.match,lpeg.patterns
+local Ct,Cs,Cc,P,C,S=lpeg.Ct,lpeg.Cs,lpeg.Cc,lpeg.P,lpeg.C,lpeg.S
+local type,next=type,next
+local ostype=os.type
+local collapsepath=file.collapsepath
+local trace_locating=false trackers.register("resolvers.locating",function(v) trace_locating=v end)
+local trace_expansions=false trackers.register("resolvers.expansions",function(v) trace_expansions=v end)
+local report_expansions=logs.reporter("resolvers","expansions")
+local resolvers=resolvers
+local function f_first(a,b)
+ local t,n={},0
+ for s in gmatch(b,"[^,]+") do
+ n=n+1;t[n]=a..s
+ end
+ return concat(t,",")
+end
+local function f_second(a,b)
+ local t,n={},0
+ for s in gmatch(a,"[^,]+") do
+ n=n+1;t[n]=s..b
+ end
+ return concat(t,",")
+end
+local function f_both(a,b)
+ local t,n={},0
+ for sb in gmatch(b,"[^,]+") do
+ for sa in gmatch(a,"[^,]+") do
+ n=n+1;t[n]=sa..sb
+ end
+ end
+ return concat(t,",")
+end
+local left=P("{")
+local right=P("}")
+local var=P((1-S("{}" ))^0)
+local set=P((1-S("{},"))^0)
+local other=P(1)
+local l_first=Cs((Cc("{")*(C(set)*left*C(var)*right/f_first)*Cc("}")+other )^0 )
+local l_second=Cs((Cc("{")*(left*C(var)*right*C(set)/f_second)*Cc("}")+other )^0 )
+local l_both=Cs((Cc("{")*(left*C(var)*right*left*C(var)*right/f_both)*Cc("}")+other )^0 )
+local l_rest=Cs((left*var*(left/"")*var*(right/"")*var*right+other )^0 )
+local stripper_1=lpeg.stripper ("{}@")
+local replacer_1=lpeg.replacer { { ",}",",@}" },{ "{,","{@," },}
+local function splitpathexpr(str,newlist,validate)
+ if trace_expansions then
+ report_expansions("expanding variable %a",str)
+ end
+ local t,ok,done=newlist or {},false,false
+ local n=#t
+ str=lpegmatch(replacer_1,str)
+ repeat
+ local old=str
+ repeat
+ local old=str
+ str=lpegmatch(l_first,str)
+ until old==str
+ repeat
+ local old=str
+ str=lpegmatch(l_second,str)
+ until old==str
+ repeat
+ local old=str
+ str=lpegmatch(l_both,str)
+ until old==str
+ repeat
+ local old=str
+ str=lpegmatch(l_rest,str)
+ until old==str
+ until old==str
+ str=lpegmatch(stripper_1,str)
+ if validate then
+ for s in gmatch(str,"[^,]+") do
+ s=validate(s)
+ if s then
+ n=n+1
+ t[n]=s
+ end
+ end
+ else
+ for s in gmatch(str,"[^,]+") do
+ n=n+1
+ t[n]=s
+ end
+ end
+ if trace_expansions then
+ for k=1,#t do
+ report_expansions("% 4i: %s",k,t[k])
+ end
+ end
+ return t
+end
+local function validate(s)
+ s=collapsepath(s)
+ return s~="" and not find(s,"^!*unset/*$") and s
+end
+resolvers.validatedpath=validate
+function resolvers.expandedpathfromlist(pathlist)
+ local newlist={}
+ for k=1,#pathlist do
+ splitpathexpr(pathlist[k],newlist,validate)
+ end
+ return newlist
+end
+local cleanup=lpeg.replacer {
+ { "!","" },
+ { "\\","/" },
+}
+function resolvers.cleanpath(str)
+ local doslashes=(P("\\")/"/"+1)^0
+ local donegation=(P("!")/"" )^0
+ local homedir=lpegmatch(Cs(donegation*doslashes),environment.homedir or "")
+ if homedir=="~" or homedir=="" or not lfs.isdir(homedir) then
+ if trace_expansions then
+ report_expansions("no home dir set, ignoring dependent paths")
+ end
+ function resolvers.cleanpath(str)
+ if not str or find(str,"~") then
+ return ""
+ else
+ return lpegmatch(cleanup,str)
+ end
+ end
+ else
+ local dohome=((P("~")+P("$HOME"))/homedir)^0
+ local cleanup=Cs(donegation*dohome*doslashes)
+ function resolvers.cleanpath(str)
+ return str and lpegmatch(cleanup,str) or ""
+ end
+ end
+ return resolvers.cleanpath(str)
+end
+local expandhome=P("~")/"$HOME"
+local dodouble=P('"')/""*(expandhome+(1-P('"')))^0*P('"')/""
+local dosingle=P("'")/""*(expandhome+(1-P("'")))^0*P("'")/""
+local dostring=(expandhome+1 )^0
+local stripper=Cs(
+ lpegpatterns.unspacer*(dosingle+dodouble+dostring)*lpegpatterns.unspacer
+)
+function resolvers.checkedvariable(str)
+ return type(str)=="string" and lpegmatch(stripper,str) or str
+end
+local cache={}
+local splitter=lpeg.tsplitat(";")
+local backslashswapper=lpeg.replacer("\\","/")
+local function splitconfigurationpath(str)
+ if str then
+ local found=cache[str]
+ if not found then
+ if str=="" then
+ found={}
+ else
+ local split=lpegmatch(splitter,lpegmatch(backslashswapper,str))
+ found={}
+ local noffound=0
+ for i=1,#split do
+ local s=split[i]
+ if not find(s,"^{*unset}*") then
+ noffound=noffound+1
+ found[noffound]=s
+ end
+ end
+ if trace_expansions then
+ report_expansions("splitting path specification %a",str)
+ for k=1,noffound do
+ report_expansions("% 4i: %s",k,found[k])
+ end
+ end
+ cache[str]=found
+ end
+ end
+ return found
+ end
+end
+resolvers.splitconfigurationpath=splitconfigurationpath
+function resolvers.splitpath(str)
+ if type(str)=='table' then
+ return str
+ else
+ return splitconfigurationpath(str)
+ end
+end
+function resolvers.joinpath(str)
+ if type(str)=='table' then
+ return file.joinpath(str)
+ else
+ return str
+ end
+end
+local attributes,directory=lfs.attributes,lfs.dir
+local weird=P(".")^1+lpeg.anywhere(S("~`!#$%^&*()={}[]:;\"\'||<>,?\n\r\t"))
+local timer={}
+local scanned={}
+local nofscans=0
+local scancache={}
+local function scan(files,spec,path,n,m,r)
+ local full=(path=="" and spec) or (spec..path..'/')
+ local dirs={}
+ local nofdirs=0
+ for name in directory(full) do
+ if not lpegmatch(weird,name) then
+ local mode=attributes(full..name,'mode')
+ if mode=='file' then
+ n=n+1
+ local f=files[name]
+ if f then
+ if type(f)=='string' then
+ files[name]={ f,path }
+ else
+ f[#f+1]=path
+ end
+ else
+ files[name]=path
+ local lower=lower(name)
+ if name~=lower then
+ files["remap:"..lower]=name
+ r=r+1
+ end
+ end
+ elseif mode=='directory' then
+ m=m+1
+ nofdirs=nofdirs+1
+ if path~="" then
+ dirs[nofdirs]=path..'/'..name
+ else
+ dirs[nofdirs]=name
+ end
+ end
+ end
+ end
+ if nofdirs>0 then
+ sort(dirs)
+ for i=1,nofdirs do
+ files,n,m,r=scan(files,spec,dirs[i],n,m,r)
+ end
+ end
+ scancache[sub(full,1,-2)]=files
+ return files,n,m,r
+end
+local fullcache={}
+function resolvers.scanfiles(path,branch,usecache)
+ statistics.starttiming(timer)
+ local realpath=resolvers.resolve(path)
+ if usecache then
+ local files=fullcache[realpath]
+ if files then
+ if trace_locating then
+ report_expansions("using caches scan of path %a, branch %a",path,branch or path)
+ end
+ return files
+ end
+ end
+ if trace_locating then
+ report_expansions("scanning path %a, branch %a",path,branch or path)
+ end
+ local files,n,m,r=scan({},realpath..'/',"",0,0,0)
+ files.__path__=path
+ files.__files__=n
+ files.__directories__=m
+ files.__remappings__=r
+ if trace_locating then
+ report_expansions("%s files found on %s directories with %s uppercase remappings",n,m,r)
+ end
+ if usecache then
+ scanned[#scanned+1]=realpath
+ fullcache[realpath]=files
+ end
+ nofscans=nofscans+1
+ statistics.stoptiming(timer)
+ return files
+end
+local function simplescan(files,spec,path)
+ local full=(path=="" and spec) or (spec..path..'/')
+ local dirs={}
+ local nofdirs=0
+ for name in directory(full) do
+ if not lpegmatch(weird,name) then
+ local mode=attributes(full..name,'mode')
+ if mode=='file' then
+ if not files[name] then
+ files[name]=path
+ end
+ elseif mode=='directory' then
+ nofdirs=nofdirs+1
+ if path~="" then
+ dirs[nofdirs]=path..'/'..name
+ else
+ dirs[nofdirs]=name
+ end
+ end
+ end
+ end
+ if nofdirs>0 then
+ sort(dirs)
+ for i=1,nofdirs do
+ files=simplescan(files,spec,dirs[i])
+ end
+ end
+ return files
+end
+local simplecache={}
+local nofsharedscans=0
+function resolvers.simplescanfiles(path,branch,usecache)
+ statistics.starttiming(timer)
+ local realpath=resolvers.resolve(path)
+ if usecache then
+ local files=simplecache[realpath]
+ if not files then
+ files=scancache[realpath]
+ if files then
+ nofsharedscans=nofsharedscans+1
+ end
+ end
+ if files then
+ if trace_locating then
+ report_expansions("using caches scan of path %a, branch %a",path,branch or path)
+ end
+ return files
+ end
+ end
+ if trace_locating then
+ report_expansions("scanning path %a, branch %a",path,branch or path)
+ end
+ local files=simplescan({},realpath..'/',"")
+ if trace_locating then
+ report_expansions("%s files found",table.count(files))
+ end
+ if usecache then
+ scanned[#scanned+1]=realpath
+ simplecache[realpath]=files
+ end
+ nofscans=nofscans+1
+ statistics.stoptiming(timer)
+ return files
+end
+function resolvers.scandata()
+ table.sort(scanned)
+ return {
+ n=nofscans,
+ shared=nofsharedscans,
+ time=statistics.elapsedtime(timer),
+ paths=scanned,
+ }
+end
+
+
+end -- of closure
+
+do -- create closure to overcome 200 locals limit
+
+package.loaded["data-env"] = package.loaded["data-env"] or true
+
+-- original size: 8762, stripped down to: 6484
+
+if not modules then modules={} end modules ['data-env']={
+ version=1.001,
+ comment="companion to luat-lib.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files",
+}
+local lower,gsub=string.lower,string.gsub
+local resolvers=resolvers
+local allocate=utilities.storage.allocate
+local setmetatableindex=table.setmetatableindex
+local suffixonly=file.suffixonly
+local formats=allocate()
+local suffixes=allocate()
+local dangerous=allocate()
+local suffixmap=allocate()
+resolvers.formats=formats
+resolvers.suffixes=suffixes
+resolvers.dangerous=dangerous
+resolvers.suffixmap=suffixmap
+local luasuffixes=utilities.lua.suffixes
+local relations=allocate {
+ core={
+ ofm={
+ names={ "ofm","omega font metric","omega font metrics" },
+ variable='OFMFONTS',
+ suffixes={ 'ofm','tfm' },
+ },
+ ovf={
+ names={ "ovf","omega virtual font","omega virtual fonts" },
+ variable='OVFFONTS',
+ suffixes={ 'ovf','vf' },
+ },
+ tfm={
+ names={ "tfm","tex font metric","tex font metrics" },
+ variable='TFMFONTS',
+ suffixes={ 'tfm' },
+ },
+ vf={
+ names={ "vf","virtual font","virtual fonts" },
+ variable='VFFONTS',
+ suffixes={ 'vf' },
+ },
+ otf={
+ names={ "otf","opentype","opentype font","opentype fonts"},
+ variable='OPENTYPEFONTS',
+ suffixes={ 'otf' },
+ },
+ ttf={
+ names={ "ttf","truetype","truetype font","truetype fonts","truetype collection","truetype collections","truetype dictionary","truetype dictionaries" },
+ variable='TTFONTS',
+ suffixes={ 'ttf','ttc','dfont' },
+ },
+ afm={
+ names={ "afm","adobe font metric","adobe font metrics" },
+ variable="AFMFONTS",
+ suffixes={ "afm" },
+ },
+ pfb={
+ names={ "pfb","type1","type 1","type1 font","type 1 font","type1 fonts","type 1 fonts" },
+ variable='T1FONTS',
+ suffixes={ 'pfb','pfa' },
+ },
+ fea={
+ names={ "fea","font feature","font features","font feature file","font feature files" },
+ variable='FONTFEATURES',
+ suffixes={ 'fea' },
+ },
+ cid={
+ names={ "cid","cid map","cid maps","cid file","cid files" },
+ variable='FONTCIDMAPS',
+ suffixes={ 'cid','cidmap' },
+ },
+ fmt={
+ names={ "fmt","format","tex format" },
+ variable='TEXFORMATS',
+ suffixes={ 'fmt' },
+ },
+ mem={
+ names={ 'mem',"metapost format" },
+ variable='MPMEMS',
+ suffixes={ 'mem' },
+ },
+ mp={
+ names={ "mp" },
+ variable='MPINPUTS',
+ suffixes={ 'mp','mpvi','mpiv','mpii' },
+ },
+ tex={
+ names={ "tex" },
+ variable='TEXINPUTS',
+ suffixes={ 'tex',"mkvi","mkiv","mkii" },
+ },
+ icc={
+ names={ "icc","icc profile","icc profiles" },
+ variable='ICCPROFILES',
+ suffixes={ 'icc' },
+ },
+ texmfscripts={
+ names={ "texmfscript","texmfscripts","script","scripts" },
+ variable='TEXMFSCRIPTS',
+ suffixes={ 'rb','pl','py' },
+ },
+ lua={
+ names={ "lua" },
+ variable='LUAINPUTS',
+ suffixes={ luasuffixes.lua,luasuffixes.luc,luasuffixes.tma,luasuffixes.tmc },
+ },
+ lib={
+ names={ "lib" },
+ variable='CLUAINPUTS',
+ suffixes=os.libsuffix and { os.libsuffix } or { 'dll','so' },
+ },
+ bib={
+ names={ 'bib' },
+ suffixes={ 'bib' },
+ },
+ bst={
+ names={ 'bst' },
+ suffixes={ 'bst' },
+ },
+ fontconfig={
+ names={ 'fontconfig','fontconfig file','fontconfig files' },
+ variable='FONTCONFIG_PATH',
+ },
+ },
+ obsolete={
+ enc={
+ names={ "enc","enc files","enc file","encoding files","encoding file" },
+ variable='ENCFONTS',
+ suffixes={ 'enc' },
+ },
+ map={
+ names={ "map","map files","map file" },
+ variable='TEXFONTMAPS',
+ suffixes={ 'map' },
+ },
+ lig={
+ names={ "lig files","lig file","ligature file","ligature files" },
+ variable='LIGFONTS',
+ suffixes={ 'lig' },
+ },
+ opl={
+ names={ "opl" },
+ variable='OPLFONTS',
+ suffixes={ 'opl' },
+ },
+ ovp={
+ names={ "ovp" },
+ variable='OVPFONTS',
+ suffixes={ 'ovp' },
+ },
+ },
+ kpse={
+ base={
+ names={ 'base',"metafont format" },
+ variable='MFBASES',
+ suffixes={ 'base','bas' },
+ },
+ cmap={
+ names={ 'cmap','cmap files','cmap file' },
+ variable='CMAPFONTS',
+ suffixes={ 'cmap' },
+ },
+ cnf={
+ names={ 'cnf' },
+ suffixes={ 'cnf' },
+ },
+ web={
+ names={ 'web' },
+ suffixes={ 'web','ch' }
+ },
+ cweb={
+ names={ 'cweb' },
+ suffixes={ 'w','web','ch' },
+ },
+ gf={
+ names={ 'gf' },
+ suffixes={ '<resolution>gf' },
+ },
+ mf={
+ names={ 'mf' },
+ variable='MFINPUTS',
+ suffixes={ 'mf' },
+ },
+ mft={
+ names={ 'mft' },
+ suffixes={ 'mft' },
+ },
+ pk={
+ names={ 'pk' },
+ suffixes={ '<resolution>pk' },
+ },
+ },
+}
+resolvers.relations=relations
+function resolvers.updaterelations()
+ for category,categories in next,relations do
+ for name,relation in next,categories do
+ local rn=relation.names
+ local rv=relation.variable
+ local rs=relation.suffixes
+ if rn and rv then
+ for i=1,#rn do
+ local rni=lower(gsub(rn[i]," ",""))
+ formats[rni]=rv
+ if rs then
+ suffixes[rni]=rs
+ for i=1,#rs do
+ local rsi=rs[i]
+ suffixmap[rsi]=rni
+ end
+ end
+ end
+ end
+ if rs then
+ end
+ end
+ end
+end
+resolvers.updaterelations()
+local function simplified(t,k)
+ return k and rawget(t,lower(gsub(k," ",""))) or nil
+end
+setmetatableindex(formats,simplified)
+setmetatableindex(suffixes,simplified)
+setmetatableindex(suffixmap,simplified)
+function resolvers.suffixofformat(str)
+ local s=suffixes[str]
+ return s and s[1] or ""
+end
+function resolvers.suffixofformat(str)
+ return suffixes[str] or {}
+end
+for name,format in next,formats do
+ dangerous[name]=true
+end
+dangerous.tex=nil
+function resolvers.formatofvariable(str)
+ return formats[str] or ''
+end
+function resolvers.formatofsuffix(str)
+ return suffixmap[suffixonly(str)] or 'tex'
+end
+function resolvers.variableofformat(str)
+ return formats[str] or ''
+end
+function resolvers.variableofformatorsuffix(str)
+ local v=formats[str]
+ if v then
+ return v
+ end
+ v=suffixmap[suffixonly(str)]
+ if v then
+ return formats[v]
+ end
+ return ''
+end
+
+
+end -- of closure
+
+do -- create closure to overcome 200 locals limit
+
+package.loaded["data-tmp"] = package.loaded["data-tmp"] or true
+
+-- original size: 14308, stripped down to: 10956
+
+if not modules then modules={} end modules ['data-tmp']={
+ version=1.100,
+ comment="companion to luat-lib.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+local format,lower,gsub,concat=string.format,string.lower,string.gsub,table.concat
+local serialize,serializetofile=table.serialize,table.tofile
+local mkdirs,isdir,isfile=dir.mkdirs,lfs.isdir,lfs.isfile
+local addsuffix,is_writable,is_readable=file.addsuffix,file.is_writable,file.is_readable
+local formatters=string.formatters
+local trace_locating=false trackers.register("resolvers.locating",function(v) trace_locating=v end)
+local trace_cache=false trackers.register("resolvers.cache",function(v) trace_cache=v end)
+local report_caches=logs.reporter("resolvers","caches")
+local report_resolvers=logs.reporter("resolvers","caching")
+local resolvers=resolvers
+local directive_cleanup=false directives.register("system.compile.cleanup",function(v) directive_cleanup=v end)
+local directive_strip=false directives.register("system.compile.strip",function(v) directive_strip=v end)
+local compile=utilities.lua.compile
+function utilities.lua.compile(luafile,lucfile,cleanup,strip)
+ if cleanup==nil then cleanup=directive_cleanup end
+ if strip==nil then strip=directive_strip end
+ return compile(luafile,lucfile,cleanup,strip)
+end
+caches=caches or {}
+local caches=caches
+local luasuffixes=utilities.lua.suffixes
+caches.base=caches.base or "luatex-cache"
+caches.more=caches.more or "context"
+caches.direct=false
+caches.tree=false
+caches.force=true
+caches.ask=false
+caches.relocate=false
+caches.defaults={ "TMPDIR","TEMPDIR","TMP","TEMP","HOME","HOMEPATH" }
+local writable,readables,usedreadables=nil,{},{}
+local function identify()
+ local texmfcaches=resolvers.cleanpathlist("TEXMFCACHE")
+ if texmfcaches then
+ for k=1,#texmfcaches do
+ local cachepath=texmfcaches[k]
+ if cachepath~="" then
+ cachepath=resolvers.resolve(cachepath)
+ cachepath=resolvers.cleanpath(cachepath)
+ cachepath=file.collapsepath(cachepath)
+ local valid=isdir(cachepath)
+ if valid then
+ if is_readable(cachepath) then
+ readables[#readables+1]=cachepath
+ if not writable and is_writable(cachepath) then
+ writable=cachepath
+ end
+ end
+ elseif not writable and caches.force then
+ local cacheparent=file.dirname(cachepath)
+ if is_writable(cacheparent) and true then
+ if not caches.ask or io.ask(format("\nShould I create the cache path %s?",cachepath),"no",{ "yes","no" })=="yes" then
+ mkdirs(cachepath)
+ if isdir(cachepath) and is_writable(cachepath) then
+ report_caches("path %a created",cachepath)
+ writable=cachepath
+ readables[#readables+1]=cachepath
+ end
+ end
+ end
+ end
+ end
+ end
+ end
+ local texmfcaches=caches.defaults
+ if texmfcaches then
+ for k=1,#texmfcaches do
+ local cachepath=texmfcaches[k]
+ cachepath=resolvers.expansion(cachepath)
+ if cachepath~="" then
+ cachepath=resolvers.resolve(cachepath)
+ cachepath=resolvers.cleanpath(cachepath)
+ local valid=isdir(cachepath)
+ if valid and is_readable(cachepath) then
+ if not writable and is_writable(cachepath) then
+ readables[#readables+1]=cachepath
+ writable=cachepath
+ break
+ end
+ end
+ end
+ end
+ end
+ if not writable then
+ report_caches("fatal error: there is no valid writable cache path defined")
+ os.exit()
+ elseif #readables==0 then
+ report_caches("fatal error: there is no valid readable cache path defined")
+ os.exit()
+ end
+ writable=dir.expandname(resolvers.cleanpath(writable))
+ local base,more,tree=caches.base,caches.more,caches.tree or caches.treehash()
+ if tree then
+ caches.tree=tree
+ writable=mkdirs(writable,base,more,tree)
+ for i=1,#readables do
+ readables[i]=file.join(readables[i],base,more,tree)
+ end
+ else
+ writable=mkdirs(writable,base,more)
+ for i=1,#readables do
+ readables[i]=file.join(readables[i],base,more)
+ end
+ end
+ if trace_cache then
+ for i=1,#readables do
+ report_caches("using readable path %a (order %s)",readables[i],i)
+ end
+ report_caches("using writable path %a",writable)
+ end
+ identify=function()
+ return writable,readables
+ end
+ return writable,readables
+end
+function caches.usedpaths()
+ local writable,readables=identify()
+ if #readables>1 then
+ local result={}
+ for i=1,#readables do
+ local readable=readables[i]
+ if usedreadables[i] or readable==writable then
+ result[#result+1]=formatters["readable: %a (order %s)"](readable,i)
+ end
+ end
+ result[#result+1]=formatters["writable: %a"](writable)
+ return result
+ else
+ return writable
+ end
+end
+function caches.configfiles()
+ return concat(resolvers.instance.specification,";")
+end
+function caches.hashed(tree)
+ tree=gsub(tree,"[\\/]+$","")
+ tree=lower(tree)
+ local hash=md5.hex(tree)
+ if trace_cache or trace_locating then
+ report_caches("hashing tree %a, hash %a",tree,hash)
+ end
+ return hash
+end
+function caches.treehash()
+ local tree=caches.configfiles()
+ if not tree or tree=="" then
+ return false
+ else
+ return caches.hashed(tree)
+ end
+end
+local r_cache,w_cache={},{}
+local function getreadablepaths(...)
+ local tags={... }
+ local hash=concat(tags,"/")
+ local done=r_cache[hash]
+ if not done then
+ local writable,readables=identify()
+ if #tags>0 then
+ done={}
+ for i=1,#readables do
+ done[i]=file.join(readables[i],...)
+ end
+ else
+ done=readables
+ end
+ r_cache[hash]=done
+ end
+ return done
+end
+local function getwritablepath(...)
+ local tags={... }
+ local hash=concat(tags,"/")
+ local done=w_cache[hash]
+ if not done then
+ local writable,readables=identify()
+ if #tags>0 then
+ done=mkdirs(writable,...)
+ else
+ done=writable
+ end
+ w_cache[hash]=done
+ end
+ return done
+end
+caches.getreadablepaths=getreadablepaths
+caches.getwritablepath=getwritablepath
+function caches.getfirstreadablefile(filename,...)
+ local rd=getreadablepaths(...)
+ for i=1,#rd do
+ local path=rd[i]
+ local fullname=file.join(path,filename)
+ if is_readable(fullname) then
+ usedreadables[i]=true
+ return fullname,path
+ end
+ end
+ return caches.setfirstwritablefile(filename,...)
+end
+function caches.setfirstwritablefile(filename,...)
+ local wr=getwritablepath(...)
+ local fullname=file.join(wr,filename)
+ return fullname,wr
+end
+function caches.define(category,subcategory)
+ return function()
+ return getwritablepath(category,subcategory)
+ end
+end
+function caches.setluanames(path,name)
+ return format("%s/%s.%s",path,name,luasuffixes.tma),format("%s/%s.%s",path,name,luasuffixes.tmc)
+end
+function caches.loaddata(readables,name)
+ if type(readables)=="string" then
+ readables={ readables }
+ end
+ for i=1,#readables do
+ local path=readables[i]
+ local tmaname,tmcname=caches.setluanames(path,name)
+ local loader=false
+ if isfile(tmcname) then
+ loader=loadfile(tmcname)
+ end
+ if not loader and isfile(tmaname) then
+ utilities.lua.compile(tmaname,tmcname)
+ if isfile(tmcname) then
+ loader=loadfile(tmcname)
+ end
+ if not loader then
+ loader=loadfile(tmaname)
+ end
+ end
+ if loader then
+ loader=loader()
+ collectgarbage("step")
+ return loader
+ end
+ end
+ return false
+end
+function caches.is_writable(filepath,filename)
+ local tmaname,tmcname=caches.setluanames(filepath,filename)
+ return is_writable(tmaname)
+end
+local saveoptions={ compact=true }
+function caches.savedata(filepath,filename,data,raw)
+ local tmaname,tmcname=caches.setluanames(filepath,filename)
+ local reduce,simplify=true,true
+ if raw then
+ reduce,simplify=false,false
+ end
+ data.cache_uuid=os.uuid()
+ if caches.direct then
+ file.savedata(tmaname,serialize(data,true,saveoptions))
+ else
+ serializetofile(tmaname,data,true,saveoptions)
+ end
+ utilities.lua.compile(tmaname,tmcname)
+end
+local content_state={}
+function caches.contentstate()
+ return content_state or {}
+end
+function caches.loadcontent(cachename,dataname)
+ local name=caches.hashed(cachename)
+ local full,path=caches.getfirstreadablefile(addsuffix(name,luasuffixes.lua),"trees")
+ local filename=file.join(path,name)
+ local blob=loadfile(addsuffix(filename,luasuffixes.luc)) or loadfile(addsuffix(filename,luasuffixes.lua))
+ if blob then
+ local data=blob()
+ if data and data.content then
+ if data.type==dataname then
+ if data.version==resolvers.cacheversion then
+ content_state[#content_state+1]=data.uuid
+ if trace_locating then
+ report_resolvers("loading %a for %a from %a",dataname,cachename,filename)
+ end
+ return data.content
+ else
+ report_resolvers("skipping %a for %a from %a (version mismatch)",dataname,cachename,filename)
+ end
+ else
+ report_resolvers("skipping %a for %a from %a (datatype mismatch)",dataname,cachename,filename)
+ end
+ elseif trace_locating then
+ report_resolvers("skipping %a for %a from %a (no content)",dataname,cachename,filename)
+ end
+ elseif trace_locating then
+ report_resolvers("skipping %a for %a from %a (invalid file)",dataname,cachename,filename)
+ end
+end
+function caches.collapsecontent(content)
+ for k,v in next,content do
+ if type(v)=="table" and #v==1 then
+ content[k]=v[1]
+ end
+ end
+end
+function caches.savecontent(cachename,dataname,content)
+ local name=caches.hashed(cachename)
+ local full,path=caches.setfirstwritablefile(addsuffix(name,luasuffixes.lua),"trees")
+ local filename=file.join(path,name)
+ local luaname=addsuffix(filename,luasuffixes.lua)
+ local lucname=addsuffix(filename,luasuffixes.luc)
+ if trace_locating then
+ report_resolvers("preparing %a for %a",dataname,cachename)
+ end
+ local data={
+ type=dataname,
+ root=cachename,
+ version=resolvers.cacheversion,
+ date=os.date("%Y-%m-%d"),
+ time=os.date("%H:%M:%S"),
+ content=content,
+ uuid=os.uuid(),
+ }
+ local ok=io.savedata(luaname,serialize(data,true))
+ if ok then
+ if trace_locating then
+ report_resolvers("category %a, cachename %a saved in %a",dataname,cachename,luaname)
+ end
+ if utilities.lua.compile(luaname,lucname) then
+ if trace_locating then
+ report_resolvers("%a compiled to %a",dataname,lucname)
+ end
+ return true
+ else
+ if trace_locating then
+ report_resolvers("compiling failed for %a, deleting file %a",dataname,lucname)
+ end
+ os.remove(lucname)
+ end
+ elseif trace_locating then
+ report_resolvers("unable to save %a in %a (access error)",dataname,luaname)
+ end
+end
+
+
+end -- of closure
+
+do -- create closure to overcome 200 locals limit
+
+package.loaded["data-met"] = package.loaded["data-met"] or true
+
+-- original size: 4915, stripped down to: 3942
+
+if not modules then modules={} end modules ['data-met']={
+ version=1.100,
+ comment="companion to luat-lib.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+local find,format=string.find,string.format
+local sequenced=table.sequenced
+local addurlscheme,urlhashed=url.addscheme,url.hashed
+local trace_locating=false
+trackers.register("resolvers.locating",function(v) trace_methods=v end)
+trackers.register("resolvers.methods",function(v) trace_methods=v end)
+local report_methods=logs.reporter("resolvers","methods")
+local allocate=utilities.storage.allocate
+local resolvers=resolvers
+local registered={}
+local function splitmethod(filename)
+ if not filename then
+ return { scheme="unknown",original=filename }
+ end
+ if type(filename)=="table" then
+ return filename
+ end
+ filename=file.collapsepath(filename)
+ if not find(filename,"://") then
+ return { scheme="file",path=filename,original=filename,filename=filename }
+ end
+ local specification=url.hashed(filename)
+ if not specification.scheme or specification.scheme=="" then
+ return { scheme="file",path=filename,original=filename,filename=filename }
+ else
+ return specification
+ end
+end
+resolvers.splitmethod=splitmethod
+local function methodhandler(what,first,...)
+ local method=registered[what]
+ if method then
+ local how,namespace=method.how,method.namespace
+ if how=="uri" or how=="url" then
+ local specification=splitmethod(first)
+ local scheme=specification.scheme
+ local resolver=namespace and namespace[scheme]
+ if resolver then
+ if trace_methods then
+ report_methods("resolving, method %a, how %a, handler %a, argument %a",what,how,scheme,first)
+ end
+ return resolver(specification,...)
+ else
+ resolver=namespace.default or namespace.file
+ if resolver then
+ if trace_methods then
+ report_methods("resolving, method %a, how %a, handler %a, argument %a",what,how,"default",first)
+ end
+ return resolver(specification,...)
+ elseif trace_methods then
+ report_methods("resolving, method %a, how %a, handler %a, argument %a",what,how,"unset")
+ end
+ end
+ elseif how=="tag" then
+ local resolver=namespace and namespace[first]
+ if resolver then
+ if trace_methods then
+ report_methods("resolving, method %a, how %a, tag %a",what,how,first)
+ end
+ return resolver(...)
+ else
+ resolver=namespace.default or namespace.file
+ if resolver then
+ if trace_methods then
+ report_methods("resolving, method %a, how %a, tag %a",what,how,"default")
+ end
+ return resolver(...)
+ elseif trace_methods then
+ report_methods("resolving, method %a, how %a, tag %a",what,how,"unset")
+ end
+ end
+ end
+ else
+ report_methods("resolving, invalid method %a")
+ end
+end
+resolvers.methodhandler=methodhandler
+function resolvers.registermethod(name,namespace,how)
+ registered[name]={ how=how or "tag",namespace=namespace }
+ namespace["byscheme"]=function(scheme,filename,...)
+ if scheme=="file" then
+ return methodhandler(name,filename,...)
+ else
+ return methodhandler(name,addurlscheme(filename,scheme),...)
+ end
+ end
+end
+local concatinators=allocate { notfound=file.join }
+local locators=allocate { notfound=function() end }
+local hashers=allocate { notfound=function() end }
+local generators=allocate { notfound=function() end }
+resolvers.concatinators=concatinators
+resolvers.locators=locators
+resolvers.hashers=hashers
+resolvers.generators=generators
+local registermethod=resolvers.registermethod
+registermethod("concatinators",concatinators,"tag")
+registermethod("locators",locators,"uri")
+registermethod("hashers",hashers,"uri")
+registermethod("generators",generators,"uri")
+
+
+end -- of closure
+
+do -- create closure to overcome 200 locals limit
+
+package.loaded["data-res"] = package.loaded["data-res"] or true
+
+-- original size: 60821, stripped down to: 42503
+
+if not modules then modules={} end modules ['data-res']={
+ version=1.001,
+ comment="companion to luat-lib.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files",
+}
+local gsub,find,lower,upper,match,gmatch=string.gsub,string.find,string.lower,string.upper,string.match,string.gmatch
+local concat,insert,sortedkeys=table.concat,table.insert,table.sortedkeys
+local next,type,rawget=next,type,rawget
+local os=os
+local P,S,R,C,Cc,Cs,Ct,Carg=lpeg.P,lpeg.S,lpeg.R,lpeg.C,lpeg.Cc,lpeg.Cs,lpeg.Ct,lpeg.Carg
+local lpegmatch,lpegpatterns=lpeg.match,lpeg.patterns
+local formatters=string.formatters
+local filedirname=file.dirname
+local filebasename=file.basename
+local suffixonly=file.suffixonly
+local filejoin=file.join
+local collapsepath=file.collapsepath
+local joinpath=file.joinpath
+local allocate=utilities.storage.allocate
+local settings_to_array=utilities.parsers.settings_to_array
+local setmetatableindex=table.setmetatableindex
+local luasuffixes=utilities.lua.suffixes
+local trace_locating=false trackers.register("resolvers.locating",function(v) trace_locating=v end)
+local trace_detail=false trackers.register("resolvers.details",function(v) trace_detail=v end)
+local trace_expansions=false trackers.register("resolvers.expansions",function(v) trace_expansions=v end)
+local report_resolving=logs.reporter("resolvers","resolving")
+local resolvers=resolvers
+local expandedpathfromlist=resolvers.expandedpathfromlist
+local checkedvariable=resolvers.checkedvariable
+local splitconfigurationpath=resolvers.splitconfigurationpath
+local methodhandler=resolvers.methodhandler
+local initializesetter=utilities.setters.initialize
+local ostype,osname,osenv,ossetenv,osgetenv=os.type,os.name,os.env,os.setenv,os.getenv
+resolvers.cacheversion='1.0.1'
+resolvers.configbanner=''
+resolvers.homedir=environment.homedir
+resolvers.criticalvars=allocate { "SELFAUTOLOC","SELFAUTODIR","SELFAUTOPARENT","TEXMFCNF","TEXMF","TEXOS" }
+resolvers.luacnfname="texmfcnf.lua"
+resolvers.luacnfstate="unknown"
+if environment.default_texmfcnf then
+ resolvers.luacnfspec=environment.default_texmfcnf
+else
+ resolvers.luacnfspec="{selfautoloc:,selfautodir:,selfautoparent:}{,/texmf{-local,}/web2c}"
+end
+resolvers.luacnfspec='home:texmf/web2c;'..resolvers.luacnfspec
+local unset_variable="unset"
+local formats=resolvers.formats
+local suffixes=resolvers.suffixes
+local dangerous=resolvers.dangerous
+local suffixmap=resolvers.suffixmap
+resolvers.defaultsuffixes={ "tex" }
+resolvers.instance=resolvers.instance or nil
+local instance=resolvers.instance or nil
+function resolvers.setenv(key,value,raw)
+ if instance then
+ instance.environment[key]=value
+ ossetenv(key,raw and value or resolvers.resolve(value))
+ end
+end
+local function getenv(key)
+ local value=rawget(instance.environment,key)
+ if value and value~="" then
+ return value
+ else
+ local e=osgetenv(key)
+ return e~=nil and e~="" and checkedvariable(e) or ""
+ end
+end
+resolvers.getenv=getenv
+resolvers.env=getenv
+local function resolve(k)
+ return instance.expansions[k]
+end
+local dollarstripper=lpeg.stripper("$")
+local inhibitstripper=P("!")^0*Cs(P(1)^0)
+local backslashswapper=lpeg.replacer("\\","/")
+local somevariable=P("$")/""
+local somekey=C(R("az","AZ","09","__","--")^1)
+local somethingelse=P(";")*((1-S("!{}/\\"))^1*P(";")/"")+P(";")*(P(";")/"")+P(1)
+local variableexpander=Cs((somevariable*(somekey/resolve)+somethingelse)^1 )
+local cleaner=P("\\")/"/"+P(";")*S("!{}/\\")^0*P(";")^1/";"
+local variablecleaner=Cs((cleaner+P(1))^0)
+local somevariable=R("az","AZ","09","__","--")^1/resolve
+local variable=(P("$")/"")*(somevariable+(P("{")/"")*somevariable*(P("}")/""))
+local variableresolver=Cs((variable+P(1))^0)
+local function expandedvariable(var)
+ return lpegmatch(variableexpander,var) or var
+end
+function resolvers.newinstance()
+ if trace_locating then
+ report_resolving("creating instance")
+ end
+ local environment,variables,expansions,order=allocate(),allocate(),allocate(),allocate()
+ local newinstance={
+ environment=environment,
+ variables=variables,
+ expansions=expansions,
+ order=order,
+ files=allocate(),
+ setups=allocate(),
+ found=allocate(),
+ foundintrees=allocate(),
+ hashes=allocate(),
+ hashed=allocate(),
+ specification=allocate(),
+ lists=allocate(),
+ data=allocate(),
+ fakepaths=allocate(),
+ remember=true,
+ diskcache=true,
+ renewcache=false,
+ renewtree=false,
+ loaderror=false,
+ savelists=true,
+ pattern=nil,
+ force_suffixes=true,
+ }
+ setmetatableindex(variables,function(t,k)
+ local v
+ for i=1,#order do
+ v=order[i][k]
+ if v~=nil then
+ t[k]=v
+ return v
+ end
+ end
+ if v==nil then
+ v=""
+ end
+ t[k]=v
+ return v
+ end)
+ setmetatableindex(environment,function(t,k)
+ local v=osgetenv(k)
+ if v==nil then
+ v=variables[k]
+ end
+ if v~=nil then
+ v=checkedvariable(v) or ""
+ end
+ v=resolvers.repath(v)
+ t[k]=v
+ return v
+ end)
+ setmetatableindex(expansions,function(t,k)
+ local v=environment[k]
+ if type(v)=="string" then
+ v=lpegmatch(variableresolver,v)
+ v=lpegmatch(variablecleaner,v)
+ end
+ t[k]=v
+ return v
+ end)
+ return newinstance
+end
+function resolvers.setinstance(someinstance)
+ instance=someinstance
+ resolvers.instance=someinstance
+ return someinstance
+end
+function resolvers.reset()
+ return resolvers.setinstance(resolvers.newinstance())
+end
+local function reset_hashes()
+ instance.lists={}
+ instance.found={}
+end
+local slash=P("/")
+local pathexpressionpattern=Cs (
+ Cc("^")*(
+ Cc("%")*S(".-")+slash^2*P(-1)/"/.*"
++slash^2/"/[^/]*/*"+(1-slash)*P(-1)*Cc("/")+P(1)
+ )^1*Cc("$")
+)
+local cache={}
+local function makepathexpression(str)
+ if str=="." then
+ return "^%./$"
+ else
+ local c=cache[str]
+ if not c then
+ c=lpegmatch(pathexpressionpattern,str)
+ cache[str]=c
+ end
+ return c
+ end
+end
+local function reportcriticalvariables(cnfspec)
+ if trace_locating then
+ for i=1,#resolvers.criticalvars do
+ local k=resolvers.criticalvars[i]
+ local v=resolvers.getenv(k) or "unknown"
+ report_resolving("variable %a set to %a",k,v)
+ end
+ report_resolving()
+ if cnfspec then
+ report_resolving("using configuration specification %a",type(cnfspec)=="table" and concat(cnfspec,",") or cnfspec)
+ end
+ report_resolving()
+ end
+ reportcriticalvariables=function() end
+end
+local function identify_configuration_files()
+ local specification=instance.specification
+ if #specification==0 then
+ local cnfspec=getenv("TEXMFCNF")
+ if cnfspec=="" then
+ cnfspec=resolvers.luacnfspec
+ resolvers.luacnfstate="default"
+ else
+ resolvers.luacnfstate="environment"
+ end
+ reportcriticalvariables(cnfspec)
+ local cnfpaths=expandedpathfromlist(resolvers.splitpath(cnfspec))
+ local luacnfname=resolvers.luacnfname
+ for i=1,#cnfpaths do
+ local filename=collapsepath(filejoin(cnfpaths[i],luacnfname))
+ local realname=resolvers.resolve(filename)
+ if lfs.isfile(realname) then
+ specification[#specification+1]=filename
+ if trace_locating then
+ report_resolving("found configuration file %a",realname)
+ end
+ elseif trace_locating then
+ report_resolving("unknown configuration file %a",realname)
+ end
+ end
+ if trace_locating then
+ report_resolving()
+ end
+ elseif trace_locating then
+ report_resolving("configuration files already identified")
+ end
+end
+local function load_configuration_files()
+ local specification=instance.specification
+ if #specification>0 then
+ local luacnfname=resolvers.luacnfname
+ for i=1,#specification do
+ local filename=specification[i]
+ local pathname=filedirname(filename)
+ local filename=filejoin(pathname,luacnfname)
+ local realname=resolvers.resolve(filename)
+ local blob=loadfile(realname)
+ if blob then
+ local setups=instance.setups
+ local data=blob()
+ local parent=data and data.parent
+ if parent then
+ local filename=filejoin(pathname,parent)
+ local realname=resolvers.resolve(filename)
+ local blob=loadfile(realname)
+ if blob then
+ local parentdata=blob()
+ if parentdata then
+ report_resolving("loading configuration file %a",filename)
+ data=table.merged(parentdata,data)
+ end
+ end
+ end
+ data=data and data.content
+ if data then
+ if trace_locating then
+ report_resolving("loading configuration file %a",filename)
+ report_resolving()
+ end
+ local variables=data.variables or {}
+ local warning=false
+ for k,v in next,data do
+ local variant=type(v)
+ if variant=="table" then
+ initializesetter(filename,k,v)
+ elseif variables[k]==nil then
+ if trace_locating and not warning then
+ report_resolving("variables like %a in configuration file %a should move to the 'variables' subtable",
+ k,resolvers.resolve(filename))
+ warning=true
+ end
+ variables[k]=v
+ end
+ end
+ setups[pathname]=variables
+ if resolvers.luacnfstate=="default" then
+ local cnfspec=variables["TEXMFCNF"]
+ if cnfspec then
+ if trace_locating then
+ report_resolving("reloading configuration due to TEXMF redefinition")
+ end
+ resolvers.setenv("TEXMFCNF",cnfspec)
+ instance.specification={}
+ identify_configuration_files()
+ load_configuration_files()
+ resolvers.luacnfstate="configuration"
+ break
+ end
+ end
+ else
+ if trace_locating then
+ report_resolving("skipping configuration file %a (no content)",filename)
+ end
+ setups[pathname]={}
+ instance.loaderror=true
+ end
+ elseif trace_locating then
+ report_resolving("skipping configuration file %a (no valid format)",filename)
+ end
+ instance.order[#instance.order+1]=instance.setups[pathname]
+ if instance.loaderror then
+ break
+ end
+ end
+ elseif trace_locating then
+ report_resolving("warning: no lua configuration files found")
+ end
+end
+local function load_file_databases()
+ instance.loaderror,instance.files=false,allocate()
+ if not instance.renewcache then
+ local hashes=instance.hashes
+ for k=1,#hashes do
+ local hash=hashes[k]
+ resolvers.hashers.byscheme(hash.type,hash.name)
+ if instance.loaderror then break end
+ end
+ end
+end
+local function locate_file_databases()
+ local texmfpaths=resolvers.expandedpathlist("TEXMF")
+ if #texmfpaths>0 then
+ for i=1,#texmfpaths do
+ local path=collapsepath(texmfpaths[i])
+ path=gsub(path,"/+$","")
+ local stripped=lpegmatch(inhibitstripper,path)
+ if stripped~="" then
+ local runtime=stripped==path
+ path=resolvers.cleanpath(path)
+ local spec=resolvers.splitmethod(stripped)
+ if runtime and (spec.noscheme or spec.scheme=="file") then
+ stripped="tree:///"..stripped
+ elseif spec.scheme=="cache" or spec.scheme=="file" then
+ stripped=spec.path
+ end
+ if trace_locating then
+ if runtime then
+ report_resolving("locating list of %a (runtime) (%s)",path,stripped)
+ else
+ report_resolving("locating list of %a (cached)",path)
+ end
+ end
+ methodhandler('locators',stripped)
+ end
+ end
+ if trace_locating then
+ report_resolving()
+ end
+ elseif trace_locating then
+ report_resolving("no texmf paths are defined (using TEXMF)")
+ end
+end
+local function generate_file_databases()
+ local hashes=instance.hashes
+ for k=1,#hashes do
+ local hash=hashes[k]
+ methodhandler('generators',hash.name)
+ end
+ if trace_locating then
+ report_resolving()
+ end
+end
+local function save_file_databases()
+ for i=1,#instance.hashes do
+ local hash=instance.hashes[i]
+ local cachename=hash.name
+ if hash.cache then
+ local content=instance.files[cachename]
+ caches.collapsecontent(content)
+ if trace_locating then
+ report_resolving("saving tree %a",cachename)
+ end
+ caches.savecontent(cachename,"files",content)
+ elseif trace_locating then
+ report_resolving("not saving runtime tree %a",cachename)
+ end
+ end
+end
+function resolvers.renew(hashname)
+ if hashname and hashname~="" then
+ local expanded=resolvers.expansion(hashname) or ""
+ if expanded~="" then
+ if trace_locating then
+ report_resolving("identifying tree %a from %a",expanded,hashname)
+ end
+ hashname=expanded
+ else
+ if trace_locating then
+ report_resolving("identifying tree %a",hashname)
+ end
+ end
+ local realpath=resolvers.resolve(hashname)
+ if lfs.isdir(realpath) then
+ if trace_locating then
+ report_resolving("using path %a",realpath)
+ end
+ methodhandler('generators',hashname)
+ local content=instance.files[hashname]
+ caches.collapsecontent(content)
+ if trace_locating then
+ report_resolving("saving tree %a",hashname)
+ end
+ caches.savecontent(hashname,"files",content)
+ else
+ report_resolving("invalid path %a",realpath)
+ end
+ end
+end
+local function load_databases()
+ locate_file_databases()
+ if instance.diskcache and not instance.renewcache then
+ load_file_databases()
+ if instance.loaderror then
+ generate_file_databases()
+ save_file_databases()
+ end
+ else
+ generate_file_databases()
+ if instance.renewcache then
+ save_file_databases()
+ end
+ end
+end
+function resolvers.appendhash(type,name,cache)
+ if not instance.hashed[name] then
+ if trace_locating then
+ report_resolving("hash %a appended",name)
+ end
+ insert(instance.hashes,{ type=type,name=name,cache=cache } )
+ instance.hashed[name]=cache
+ end
+end
+function resolvers.prependhash(type,name,cache)
+ if not instance.hashed[name] then
+ if trace_locating then
+ report_resolving("hash %a prepended",name)
+ end
+ insert(instance.hashes,1,{ type=type,name=name,cache=cache } )
+ instance.hashed[name]=cache
+ end
+end
+function resolvers.extendtexmfvariable(specification)
+ local t=resolvers.splitpath(getenv("TEXMF"))
+ insert(t,1,specification)
+ local newspec=concat(t,",")
+ if instance.environment["TEXMF"] then
+ instance.environment["TEXMF"]=newspec
+ elseif instance.variables["TEXMF"] then
+ instance.variables["TEXMF"]=newspec
+ else
+ end
+ reset_hashes()
+end
+function resolvers.splitexpansions()
+ local ie=instance.expansions
+ for k,v in next,ie do
+ local t,tn,h,p={},0,{},splitconfigurationpath(v)
+ for kk=1,#p do
+ local vv=p[kk]
+ if vv~="" and not h[vv] then
+ tn=tn+1
+ t[tn]=vv
+ h[vv]=true
+ end
+ end
+ if #t>1 then
+ ie[k]=t
+ else
+ ie[k]=t[1]
+ end
+ end
+end
+function resolvers.datastate()
+ return caches.contentstate()
+end
+function resolvers.variable(name)
+ local name=name and lpegmatch(dollarstripper,name)
+ local result=name and instance.variables[name]
+ return result~=nil and result or ""
+end
+function resolvers.expansion(name)
+ local name=name and lpegmatch(dollarstripper,name)
+ local result=name and instance.expansions[name]
+ return result~=nil and result or ""
+end
+function resolvers.unexpandedpathlist(str)
+ local pth=resolvers.variable(str)
+ local lst=resolvers.splitpath(pth)
+ return expandedpathfromlist(lst)
+end
+function resolvers.unexpandedpath(str)
+ return joinpath(resolvers.unexpandedpathlist(str))
+end
+local done={}
+function resolvers.resetextrapath()
+ local ep=instance.extra_paths
+ if not ep then
+ ep,done={},{}
+ instance.extra_paths=ep
+ elseif #ep>0 then
+ instance.lists,done={},{}
+ end
+end
+function resolvers.registerextrapath(paths,subpaths)
+ paths=settings_to_array(paths)
+ subpaths=settings_to_array(subpaths)
+ local ep=instance.extra_paths or {}
+ local oldn=#ep
+ local newn=oldn
+ local nofpaths=#paths
+ local nofsubpaths=#subpaths
+ if nofpaths>0 then
+ if nofsubpaths>0 then
+ for i=1,nofpaths do
+ local p=paths[i]
+ for j=1,nofsubpaths do
+ local s=subpaths[j]
+ local ps=p.."/"..s
+ if not done[ps] then
+ newn=newn+1
+ ep[newn]=resolvers.cleanpath(ps)
+ done[ps]=true
+ end
+ end
+ end
+ else
+ for i=1,nofpaths do
+ local p=paths[i]
+ if not done[p] then
+ newn=newn+1
+ ep[newn]=resolvers.cleanpath(p)
+ done[p]=true
+ end
+ end
+ end
+ elseif nofsubpaths>0 then
+ for i=1,oldn do
+ for j=1,nofsubpaths do
+ local s=subpaths[j]
+ local ps=ep[i].."/"..s
+ if not done[ps] then
+ newn=newn+1
+ ep[newn]=resolvers.cleanpath(ps)
+ done[ps]=true
+ end
+ end
+ end
+ end
+ if newn>0 then
+ instance.extra_paths=ep
+ end
+ if newn>oldn then
+ instance.lists={}
+ end
+end
+local function made_list(instance,list)
+ local ep=instance.extra_paths
+ if not ep or #ep==0 then
+ return list
+ else
+ local done,new,newn={},{},0
+ for k=1,#list do
+ local v=list[k]
+ if not done[v] then
+ if find(v,"^[%.%/]$") then
+ done[v]=true
+ newn=newn+1
+ new[newn]=v
+ else
+ break
+ end
+ end
+ end
+ for k=1,#ep do
+ local v=ep[k]
+ if not done[v] then
+ done[v]=true
+ newn=newn+1
+ new[newn]=v
+ end
+ end
+ for k=1,#list do
+ local v=list[k]
+ if not done[v] then
+ done[v]=true
+ newn=newn+1
+ new[newn]=v
+ end
+ end
+ return new
+ end
+end
+function resolvers.cleanpathlist(str)
+ local t=resolvers.expandedpathlist(str)
+ if t then
+ for i=1,#t do
+ t[i]=collapsepath(resolvers.cleanpath(t[i]))
+ end
+ end
+ return t
+end
+function resolvers.expandpath(str)
+ return joinpath(resolvers.expandedpathlist(str))
+end
+function resolvers.expandedpathlist(str)
+ if not str then
+ return {}
+ elseif instance.savelists then
+ str=lpegmatch(dollarstripper,str)
+ local lists=instance.lists
+ local lst=lists[str]
+ if not lst then
+ local l=made_list(instance,resolvers.splitpath(resolvers.expansion(str)))
+ lst=expandedpathfromlist(l)
+ lists[str]=lst
+ end
+ return lst
+ else
+ local lst=resolvers.splitpath(resolvers.expansion(str))
+ return made_list(instance,expandedpathfromlist(lst))
+ end
+end
+function resolvers.expandedpathlistfromvariable(str)
+ str=lpegmatch(dollarstripper,str)
+ local tmp=resolvers.variableofformatorsuffix(str)
+ return resolvers.expandedpathlist(tmp~="" and tmp or str)
+end
+function resolvers.expandpathfromvariable(str)
+ return joinpath(resolvers.expandedpathlistfromvariable(str))
+end
+function resolvers.expandbraces(str)
+ local ori=str
+ local pth=expandedpathfromlist(resolvers.splitpath(ori))
+ return joinpath(pth)
+end
+function resolvers.registerfilehash(name,content,someerror)
+ if content then
+ instance.files[name]=content
+ else
+ instance.files[name]={}
+ if somerror==true then
+ instance.loaderror=someerror
+ end
+ end
+end
+local function isreadable(name)
+ local readable=lfs.isfile(name)
+ if trace_detail then
+ if readable then
+ report_resolving("file %a is readable",name)
+ else
+ report_resolving("file %a is not readable",name)
+ end
+ end
+ return readable
+end
+local function collect_files(names)
+ local filelist,noffiles={},0
+ for k=1,#names do
+ local fname=names[k]
+ if trace_detail then
+ report_resolving("checking name %a",fname)
+ end
+ local bname=filebasename(fname)
+ local dname=filedirname(fname)
+ if dname=="" or find(dname,"^%.") then
+ dname=false
+ else
+ dname=gsub(dname,"%*",".*")
+ dname="/"..dname.."$"
+ end
+ local hashes=instance.hashes
+ for h=1,#hashes do
+ local hash=hashes[h]
+ local blobpath=hash.name
+ local files=blobpath and instance.files[blobpath]
+ if files then
+ if trace_detail then
+ report_resolving("deep checking %a, base %a, pattern %a",blobpath,bname,dname)
+ end
+ local blobfile=files[bname]
+ if not blobfile then
+ local rname="remap:"..bname
+ blobfile=files[rname]
+ if blobfile then
+ bname=files[rname]
+ blobfile=files[bname]
+ end
+ end
+ if blobfile then
+ local blobroot=files.__path__ or blobpath
+ if type(blobfile)=='string' then
+ if not dname or find(blobfile,dname) then
+ local variant=hash.type
+ local search=filejoin(blobroot,blobfile,bname)
+ local result=methodhandler('concatinators',hash.type,blobroot,blobfile,bname)
+ if trace_detail then
+ report_resolving("match: variant %a, search %a, result %a",variant,search,result)
+ end
+ noffiles=noffiles+1
+ filelist[noffiles]={ variant,search,result }
+ end
+ else
+ for kk=1,#blobfile do
+ local vv=blobfile[kk]
+ if not dname or find(vv,dname) then
+ local variant=hash.type
+ local search=filejoin(blobroot,vv,bname)
+ local result=methodhandler('concatinators',hash.type,blobroot,vv,bname)
+ if trace_detail then
+ report_resolving("match: variant %a, search %a, result %a",variant,search,result)
+ end
+ noffiles=noffiles+1
+ filelist[noffiles]={ variant,search,result }
+ end
+ end
+ end
+ end
+ elseif trace_locating then
+ report_resolving("no match in %a (%s)",blobpath,bname)
+ end
+ end
+ end
+ return noffiles>0 and filelist or nil
+end
+local fit={}
+function resolvers.registerintrees(filename,format,filetype,usedmethod,foundname)
+ local foundintrees=instance.foundintrees
+ if usedmethod=="direct" and filename==foundname and fit[foundname] then
+ else
+ local t={
+ filename=filename,
+ format=format~="" and format or nil,
+ filetype=filetype~="" and filetype or nil,
+ usedmethod=usedmethod,
+ foundname=foundname,
+ }
+ fit[foundname]=t
+ foundintrees[#foundintrees+1]=t
+ end
+end
+local function can_be_dir(name)
+ local fakepaths=instance.fakepaths
+ if not fakepaths[name] then
+ if lfs.isdir(name) then
+ fakepaths[name]=1
+ else
+ fakepaths[name]=2
+ end
+ end
+ return fakepaths[name]==1
+end
+local preparetreepattern=Cs((P(".")/"%%."+P("-")/"%%-"+P(1))^0*Cc("$"))
+local collect_instance_files
+local function find_analyze(filename,askedformat,allresults)
+ local filetype,wantedfiles,ext='',{},suffixonly(filename)
+ wantedfiles[#wantedfiles+1]=filename
+ if askedformat=="" then
+ if ext=="" or not suffixmap[ext] then
+ local defaultsuffixes=resolvers.defaultsuffixes
+ for i=1,#defaultsuffixes do
+ local forcedname=filename..'.'..defaultsuffixes[i]
+ wantedfiles[#wantedfiles+1]=forcedname
+ filetype=resolvers.formatofsuffix(forcedname)
+ if trace_locating then
+ report_resolving("forcing filetype %a",filetype)
+ end
+ end
+ else
+ filetype=resolvers.formatofsuffix(filename)
+ if trace_locating then
+ report_resolving("using suffix based filetype %a",filetype)
+ end
+ end
+ else
+ if ext=="" or not suffixmap[ext] then
+ local format_suffixes=suffixes[askedformat]
+ if format_suffixes then
+ for i=1,#format_suffixes do
+ wantedfiles[#wantedfiles+1]=filename.."."..format_suffixes[i]
+ end
+ end
+ end
+ filetype=askedformat
+ if trace_locating then
+ report_resolving("using given filetype %a",filetype)
+ end
+ end
+ return filetype,wantedfiles
+end
+local function find_direct(filename,allresults)
+ if not dangerous[askedformat] and isreadable(filename) then
+ if trace_detail then
+ report_resolving("file %a found directly",filename)
+ end
+ return "direct",{ filename }
+ end
+end
+local function find_wildcard(filename,allresults)
+ if find(filename,'%*') then
+ if trace_locating then
+ report_resolving("checking wildcard %a",filename)
+ end
+ local method,result=resolvers.findwildcardfiles(filename)
+ if result then
+ return "wildcard",result
+ end
+ end
+end
+local function find_qualified(filename,allresults,askedformat,alsostripped)
+ if not file.is_qualified_path(filename) then
+ return
+ end
+ if trace_locating then
+ report_resolving("checking qualified name %a",filename)
+ end
+ if isreadable(filename) then
+ if trace_detail then
+ report_resolving("qualified file %a found",filename)
+ end
+ return "qualified",{ filename }
+ end
+ if trace_detail then
+ report_resolving("locating qualified file %a",filename)
+ end
+ local forcedname,suffix="",suffixonly(filename)
+ if suffix=="" then
+ local format_suffixes=askedformat=="" and resolvers.defaultsuffixes or suffixes[askedformat]
+ if format_suffixes then
+ for i=1,#format_suffixes do
+ local s=format_suffixes[i]
+ forcedname=filename.."."..s
+ if isreadable(forcedname) then
+ if trace_locating then
+ report_resolving("no suffix, forcing format filetype %a",s)
+ end
+ return "qualified",{ forcedname }
+ end
+ end
+ end
+ end
+ if alsostripped and suffix and suffix~="" then
+ local basename=filebasename(filename)
+ local pattern=lpegmatch(preparetreepattern,filename)
+ local savedformat=askedformat
+ local format=savedformat or ""
+ if format=="" then
+ askedformat=resolvers.formatofsuffix(suffix)
+ end
+ if not format then
+ askedformat="othertextfiles"
+ end
+ if basename~=filename then
+ local resolved=collect_instance_files(basename,askedformat,allresults)
+ if #resolved==0 then
+ local lowered=lower(basename)
+ if filename~=lowered then
+ resolved=collect_instance_files(lowered,askedformat,allresults)
+ end
+ end
+ resolvers.format=savedformat
+ if #resolved>0 then
+ local result={}
+ for r=1,#resolved do
+ local rr=resolved[r]
+ if find(rr,pattern) then
+ result[#result+1]=rr
+ end
+ end
+ if #result>0 then
+ return "qualified",result
+ end
+ end
+ end
+ end
+end
+local function check_subpath(fname)
+ if isreadable(fname) then
+ if trace_detail then
+ report_resolving("found %a by deep scanning",fname)
+ end
+ return fname
+ end
+end
+local function find_intree(filename,filetype,wantedfiles,allresults)
+ local typespec=resolvers.variableofformat(filetype)
+ local pathlist=resolvers.expandedpathlist(typespec)
+ local method="intree"
+ if pathlist and #pathlist>0 then
+ local filelist=collect_files(wantedfiles)
+ local dirlist={}
+ if filelist then
+ for i=1,#filelist do
+ dirlist[i]=filedirname(filelist[i][3]).."/"
+ end
+ end
+ if trace_detail then
+ report_resolving("checking filename %a",filename)
+ end
+ local resolve=resolvers.resolve
+ local result={}
+ for k=1,#pathlist do
+ local path=pathlist[k]
+ local pathname=lpegmatch(inhibitstripper,path)
+ local doscan=path==pathname
+ if not find (pathname,'//$') then
+ doscan=false
+ end
+ local done=false
+ if filelist then
+ local expression=makepathexpression(pathname)
+ if trace_detail then
+ report_resolving("using pattern %a for path %a",expression,pathname)
+ end
+ for k=1,#filelist do
+ local fl=filelist[k]
+ local f=fl[2]
+ local d=dirlist[k]
+ if find(d,expression) or find(resolve(d),expression) then
+ result[#result+1]=resolve(fl[3])
+ done=true
+ if allresults then
+ if trace_detail then
+ report_resolving("match to %a in hash for file %a and path %a, continue scanning",expression,f,d)
+ end
+ else
+ if trace_detail then
+ report_resolving("match to %a in hash for file %a and path %a, quit scanning",expression,f,d)
+ end
+ break
+ end
+ elseif trace_detail then
+ report_resolving("no match to %a in hash for file %a and path %a",expression,f,d)
+ end
+ end
+ end
+ if done then
+ method="database"
+ else
+ method="filesystem"
+ pathname=gsub(pathname,"/+$","")
+ pathname=resolve(pathname)
+ local scheme=url.hasscheme(pathname)
+ if not scheme or scheme=="file" then
+ local pname=gsub(pathname,"%.%*$",'')
+ if not find(pname,"%*") then
+ if can_be_dir(pname) then
+ for k=1,#wantedfiles do
+ local w=wantedfiles[k]
+ local fname=check_subpath(filejoin(pname,w))
+ if fname then
+ result[#result+1]=fname
+ done=true
+ if not allresults then
+ break
+ end
+ end
+ end
+ if not done and doscan then
+ local files=resolvers.simplescanfiles(pname,false,true)
+ for k=1,#wantedfiles do
+ local w=wantedfiles[k]
+ local subpath=files[w]
+ if not subpath or subpath=="" then
+ elseif type(subpath)=="string" then
+ local fname=check_subpath(filejoin(pname,subpath,w))
+ if fname then
+ result[#result+1]=fname
+ done=true
+ if not allresults then
+ break
+ end
+ end
+ else
+ for i=1,#subpath do
+ local sp=subpath[i]
+ if sp=="" then
+ else
+ local fname=check_subpath(filejoin(pname,sp,w))
+ if fname then
+ result[#result+1]=fname
+ done=true
+ if not allresults then
+ break
+ end
+ end
+ end
+ end
+ if done and not allresults then
+ break
+ end
+ end
+ end
+ end
+ end
+ else
+ end
+ end
+ end
+ if done and not allresults then
+ break
+ end
+ end
+ if #result>0 then
+ return method,result
+ end
+ end
+end
+local function find_onpath(filename,filetype,wantedfiles,allresults)
+ if trace_detail then
+ report_resolving("checking filename %a, filetype %a, wanted files %a",filename,filetype,concat(wantedfiles," | "))
+ end
+ local result={}
+ for k=1,#wantedfiles do
+ local fname=wantedfiles[k]
+ if fname and isreadable(fname) then
+ filename=fname
+ result[#result+1]=filejoin('.',fname)
+ if not allresults then
+ break
+ end
+ end
+ end
+ if #result>0 then
+ return "onpath",result
+ end
+end
+local function find_otherwise(filename,filetype,wantedfiles,allresults)
+ local filelist=collect_files(wantedfiles)
+ local fl=filelist and filelist[1]
+ if fl then
+ return "otherwise",{ resolvers.resolve(fl[3]) }
+ end
+end
+collect_instance_files=function(filename,askedformat,allresults)
+ askedformat=askedformat or ""
+ filename=collapsepath(filename)
+ if allresults then
+ local filetype,wantedfiles=find_analyze(filename,askedformat)
+ local results={
+ { find_direct (filename,true) },
+ { find_wildcard (filename,true) },
+ { find_qualified(filename,true,askedformat) },
+ { find_intree (filename,filetype,wantedfiles,true) },
+ { find_onpath (filename,filetype,wantedfiles,true) },
+ { find_otherwise(filename,filetype,wantedfiles,true) },
+ }
+ local result,status,done={},{},{}
+ for k,r in next,results do
+ local method,list=r[1],r[2]
+ if method and list then
+ for i=1,#list do
+ local c=collapsepath(list[i])
+ if not done[c] then
+ result[#result+1]=c
+ done[c]=true
+ end
+ status[#status+1]=formatters["%-10s: %s"](method,c)
+ end
+ end
+ end
+ if trace_detail then
+ report_resolving("lookup status: %s",table.serialize(status,filename))
+ end
+ return result,status
+ else
+ local method,result,stamp,filetype,wantedfiles
+ if instance.remember then
+ stamp=formatters["%s--%s"](filename,askedformat)
+ result=stamp and instance.found[stamp]
+ if result then
+ if trace_locating then
+ report_resolving("remembered file %a",filename)
+ end
+ return result
+ end
+ end
+ method,result=find_direct(filename)
+ if not result then
+ method,result=find_wildcard(filename)
+ if not result then
+ method,result=find_qualified(filename,false,askedformat)
+ if not result then
+ filetype,wantedfiles=find_analyze(filename,askedformat)
+ method,result=find_intree(filename,filetype,wantedfiles)
+ if not result then
+ method,result=find_onpath(filename,filetype,wantedfiles)
+ if not result then
+ method,result=find_otherwise(filename,filetype,wantedfiles)
+ end
+ end
+ end
+ end
+ end
+ if result and #result>0 then
+ local foundname=collapsepath(result[1])
+ resolvers.registerintrees(filename,askedformat,filetype,method,foundname)
+ result={ foundname }
+ else
+ result={}
+ end
+ if stamp then
+ if trace_locating then
+ report_resolving("remembering file %a",filename)
+ end
+ instance.found[stamp]=result
+ end
+ return result
+ end
+end
+local function findfiles(filename,filetype,allresults)
+ local result,status=collect_instance_files(filename,filetype or "",allresults)
+ if not result or #result==0 then
+ local lowered=lower(filename)
+ if filename~=lowered then
+ result,status=collect_instance_files(lowered,filetype or "",allresults)
+ end
+ end
+ return result or {},status
+end
+function resolvers.findfiles(filename,filetype)
+ return findfiles(filename,filetype,true)
+end
+function resolvers.findfile(filename,filetype)
+ return findfiles(filename,filetype,false)[1] or ""
+end
+function resolvers.findpath(filename,filetype)
+ return filedirname(findfiles(filename,filetype,false)[1] or "")
+end
+local function findgivenfiles(filename,allresults)
+ local bname,result=filebasename(filename),{}
+ local hashes=instance.hashes
+ local noffound=0
+ for k=1,#hashes do
+ local hash=hashes[k]
+ local files=instance.files[hash.name] or {}
+ local blist=files[bname]
+ if not blist then
+ local rname="remap:"..bname
+ blist=files[rname]
+ if blist then
+ bname=files[rname]
+ blist=files[bname]
+ end
+ end
+ if blist then
+ if type(blist)=='string' then
+ local found=methodhandler('concatinators',hash.type,hash.name,blist,bname) or ""
+ if found~="" then
+ noffound=noffound+1
+ result[noffound]=resolvers.resolve(found)
+ if not allresults then
+ break
+ end
+ end
+ else
+ for kk=1,#blist do
+ local vv=blist[kk]
+ local found=methodhandler('concatinators',hash.type,hash.name,vv,bname) or ""
+ if found~="" then
+ noffound=noffound+1
+ result[noffound]=resolvers.resolve(found)
+ if not allresults then break end
+ end
+ end
+ end
+ end
+ end
+ return result
+end
+function resolvers.findgivenfiles(filename)
+ return findgivenfiles(filename,true)
+end
+function resolvers.findgivenfile(filename)
+ return findgivenfiles(filename,false)[1] or ""
+end
+local function doit(path,blist,bname,tag,variant,result,allresults)
+ local done=false
+ if blist and variant then
+ local resolve=resolvers.resolve
+ if type(blist)=='string' then
+ if find(lower(blist),path) then
+ local full=methodhandler('concatinators',variant,tag,blist,bname) or ""
+ result[#result+1]=resolve(full)
+ done=true
+ end
+ else
+ for kk=1,#blist do
+ local vv=blist[kk]
+ if find(lower(vv),path) then
+ local full=methodhandler('concatinators',variant,tag,vv,bname) or ""
+ result[#result+1]=resolve(full)
+ done=true
+ if not allresults then break end
+ end
+ end
+ end
+ end
+ return done
+end
+local makewildcard=Cs(
+ (P("^")^0*P("/")*P(-1)+P(-1))/".*"+(P("^")^0*P("/")/"")^0*(P("*")/".*"+P("-")/"%%-"+P(".")/"%%."+P("?")/"."+P("\\")/"/"+P(1))^0
+)
+function resolvers.wildcardpattern(pattern)
+ return lpegmatch(makewildcard,pattern) or pattern
+end
+local function findwildcardfiles(filename,allresults,result)
+ result=result or {}
+ local base=filebasename(filename)
+ local dirn=filedirname(filename)
+ local path=lower(lpegmatch(makewildcard,dirn) or dirn)
+ local name=lower(lpegmatch(makewildcard,base) or base)
+ local files,done=instance.files,false
+ if find(name,"%*") then
+ local hashes=instance.hashes
+ for k=1,#hashes do
+ local hash=hashes[k]
+ local hashname,hashtype=hash.name,hash.type
+ for kk,hh in next,files[hashname] do
+ if not find(kk,"^remap:") then
+ if find(lower(kk),name) then
+ if doit(path,hh,kk,hashname,hashtype,result,allresults) then done=true end
+ if done and not allresults then break end
+ end
+ end
+ end
+ end
+ else
+ local hashes=instance.hashes
+ for k=1,#hashes do
+ local hash=hashes[k]
+ local hashname,hashtype=hash.name,hash.type
+ if doit(path,files[hashname][bname],bname,hashname,hashtype,result,allresults) then done=true end
+ if done and not allresults then break end
+ end
+ end
+ return result
+end
+function resolvers.findwildcardfiles(filename,result)
+ return findwildcardfiles(filename,true,result)
+end
+function resolvers.findwildcardfile(filename)
+ return findwildcardfiles(filename,false)[1] or ""
+end
+function resolvers.automount()
+end
+function resolvers.load(option)
+ statistics.starttiming(instance)
+ identify_configuration_files()
+ load_configuration_files()
+ if option~="nofiles" then
+ load_databases()
+ resolvers.automount()
+ end
+ statistics.stoptiming(instance)
+ local files=instance.files
+ return files and next(files) and true
+end
+function resolvers.loadtime()
+ return statistics.elapsedtime(instance)
+end
+local function report(str)
+ if trace_locating then
+ report_resolving(str)
+ else
+ print(str)
+ end
+end
+function resolvers.dowithfilesandreport(command,files,...)
+ if files and #files>0 then
+ if trace_locating then
+ report('')
+ end
+ if type(files)=="string" then
+ files={ files }
+ end
+ for f=1,#files do
+ local file=files[f]
+ local result=command(file,...)
+ if type(result)=='string' then
+ report(result)
+ else
+ for i=1,#result do
+ report(result[i])
+ end
+ end
+ end
+ end
+end
+function resolvers.showpath(str)
+ return joinpath(resolvers.expandedpathlist(resolvers.formatofvariable(str)))
+end
+function resolvers.registerfile(files,name,path)
+ if files[name] then
+ if type(files[name])=='string' then
+ files[name]={ files[name],path }
+ else
+ files[name]=path
+ end
+ else
+ files[name]=path
+ end
+end
+function resolvers.dowithpath(name,func)
+ local pathlist=resolvers.expandedpathlist(name)
+ for i=1,#pathlist do
+ func("^"..resolvers.cleanpath(pathlist[i]))
+ end
+end
+function resolvers.dowithvariable(name,func)
+ func(expandedvariable(name))
+end
+function resolvers.locateformat(name)
+ local engine=environment.ownmain or "luatex"
+ local barename=file.removesuffix(name)
+ local fullname=file.addsuffix(barename,"fmt")
+ local fmtname=caches.getfirstreadablefile(fullname,"formats",engine) or ""
+ if fmtname=="" then
+ fmtname=resolvers.findfile(fullname)
+ fmtname=resolvers.cleanpath(fmtname)
+ end
+ if fmtname~="" then
+ local barename=file.removesuffix(fmtname)
+ local luaname=file.addsuffix(barename,luasuffixes.lua)
+ local lucname=file.addsuffix(barename,luasuffixes.luc)
+ local luiname=file.addsuffix(barename,luasuffixes.lui)
+ if lfs.isfile(luiname) then
+ return barename,luiname
+ elseif lfs.isfile(lucname) then
+ return barename,lucname
+ elseif lfs.isfile(luaname) then
+ return barename,luaname
+ end
+ end
+ return nil,nil
+end
+function resolvers.booleanvariable(str,default)
+ local b=resolvers.expansion(str)
+ if b=="" then
+ return default
+ else
+ b=toboolean(b)
+ return (b==nil and default) or b
+ end
+end
+function resolvers.dowithfilesintree(pattern,handle,before,after)
+ local instance=resolvers.instance
+ local hashes=instance.hashes
+ for i=1,#hashes do
+ local hash=hashes[i]
+ local blobtype=hash.type
+ local blobpath=hash.name
+ if blobpath then
+ if before then
+ before(blobtype,blobpath,pattern)
+ end
+ local files=instance.files[blobpath]
+ local total,checked,done=0,0,0
+ if files then
+ for k,v in table.sortedhash(files) do
+ total=total+1
+ if find(k,"^remap:") then
+ elseif find(k,pattern) then
+ if type(v)=="string" then
+ checked=checked+1
+ if handle(blobtype,blobpath,v,k) then
+ done=done+1
+ end
+ else
+ checked=checked+#v
+ for i=1,#v do
+ if handle(blobtype,blobpath,v[i],k) then
+ done=done+1
+ end
+ end
+ end
+ end
+ end
+ end
+ if after then
+ after(blobtype,blobpath,pattern,total,checked,done)
+ end
+ end
+ end
+end
+resolvers.obsolete=resolvers.obsolete or {}
+local obsolete=resolvers.obsolete
+resolvers.find_file=resolvers.findfile obsolete.find_file=resolvers.findfile
+resolvers.find_files=resolvers.findfiles obsolete.find_files=resolvers.findfiles
+
+
+end -- of closure
+
+do -- create closure to overcome 200 locals limit
+
+package.loaded["data-pre"] = package.loaded["data-pre"] or true
+
+-- original size: 6430, stripped down to: 4219
+
+if not modules then modules={} end modules ['data-pre']={
+ version=1.001,
+ comment="companion to luat-lib.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+local resolvers=resolvers
+local prefixes=utilities.storage.allocate()
+resolvers.prefixes=prefixes
+local cleanpath,findgivenfile,expansion=resolvers.cleanpath,resolvers.findgivenfile,resolvers.expansion
+local getenv=resolvers.getenv
+local P,S,R,C,Cs,lpegmatch=lpeg.P,lpeg.S,lpeg.R,lpeg.C,lpeg.Cs,lpeg.match
+local joinpath,basename,dirname=file.join,file.basename,file.dirname
+local getmetatable,rawset,type=getmetatable,rawset,type
+prefixes.environment=function(str)
+ return cleanpath(expansion(str))
+end
+prefixes.relative=function(str,n)
+ if io.exists(str) then
+ elseif io.exists("./"..str) then
+ str="./"..str
+ else
+ local p="../"
+ for i=1,n or 2 do
+ if io.exists(p..str) then
+ str=p..str
+ break
+ else
+ p=p.."../"
+ end
+ end
+ end
+ return cleanpath(str)
+end
+prefixes.auto=function(str)
+ local fullname=prefixes.relative(str)
+ if not lfs.isfile(fullname) then
+ fullname=prefixes.locate(str)
+ end
+ return fullname
+end
+prefixes.locate=function(str)
+ local fullname=findgivenfile(str) or ""
+ return cleanpath((fullname~="" and fullname) or str)
+end
+prefixes.filename=function(str)
+ local fullname=findgivenfile(str) or ""
+ return cleanpath(basename((fullname~="" and fullname) or str))
+end
+prefixes.pathname=function(str)
+ local fullname=findgivenfile(str) or ""
+ return cleanpath(dirname((fullname~="" and fullname) or str))
+end
+prefixes.selfautoloc=function(str)
+ return cleanpath(joinpath(getenv('SELFAUTOLOC'),str))
+end
+prefixes.selfautoparent=function(str)
+ return cleanpath(joinpath(getenv('SELFAUTOPARENT'),str))
+end
+prefixes.selfautodir=function(str)
+ return cleanpath(joinpath(getenv('SELFAUTODIR'),str))
+end
+prefixes.home=function(str)
+ return cleanpath(joinpath(getenv('HOME'),str))
+end
+local function toppath()
+ local inputstack=resolvers.inputstack
+ if not inputstack then
+ return "."
+ end
+ local pathname=dirname(inputstack[#inputstack] or "")
+ if pathname=="" then
+ return "."
+ else
+ return pathname
+ end
+end
+resolvers.toppath=toppath
+prefixes.toppath=function(str)
+ return cleanpath(joinpath(toppath(),str))
+end
+prefixes.env=prefixes.environment
+prefixes.rel=prefixes.relative
+prefixes.loc=prefixes.locate
+prefixes.kpse=prefixes.locate
+prefixes.full=prefixes.locate
+prefixes.file=prefixes.filename
+prefixes.path=prefixes.pathname
+function resolvers.allprefixes(separator)
+ local all=table.sortedkeys(prefixes)
+ if separator then
+ for i=1,#all do
+ all[i]=all[i]..":"
+ end
+ end
+ return all
+end
+local function _resolve_(method,target)
+ local action=prefixes[method]
+ if action then
+ return action(target)
+ else
+ return method..":"..target
+ end
+end
+local resolved,abstract={},{}
+function resolvers.resetresolve(str)
+ resolved,abstract={},{}
+end
+local pattern=Cs((C(R("az")^2)*P(":")*C((1-S(" \"\';,"))^1)/_resolve_+P(1))^0)
+local function resolve(str)
+ if type(str)=="table" then
+ local t={}
+ for i=1,#str do
+ t[i]=resolve(str[i])
+ end
+ return t
+ else
+ local res=resolved[str]
+ if not res then
+ res=lpegmatch(pattern,str)
+ resolved[str]=res
+ abstract[res]=str
+ end
+ return res
+ end
+end
+local function unresolve(str)
+ return abstract[str] or str
+end
+resolvers.resolve=resolve
+resolvers.unresolve=unresolve
+if type(os.uname)=="function" then
+ for k,v in next,os.uname() do
+ if not prefixes[k] then
+ prefixes[k]=function() return v end
+ end
+ end
+end
+if os.type=="unix" then
+ local pattern
+ local function makepattern(t,k,v)
+ if t then
+ rawset(t,k,v)
+ end
+ local colon=P(":")
+ for k,v in table.sortedpairs(prefixes) do
+ if p then
+ p=P(k)+p
+ else
+ p=P(k)
+ end
+ end
+ pattern=Cs((p*colon+colon/";"+P(1))^0)
+ end
+ makepattern()
+ getmetatable(prefixes).__newindex=makepattern
+ function resolvers.repath(str)
+ return lpegmatch(pattern,str)
+ end
+else
+ function resolvers.repath(str)
+ return str
+ end
+end
+
+
+end -- of closure
+
+do -- create closure to overcome 200 locals limit
+
+package.loaded["data-inp"] = package.loaded["data-inp"] or true
+
+-- original size: 910, stripped down to: 823
+
+if not modules then modules={} end modules ['data-inp']={
+ version=1.001,
+ comment="companion to luat-lib.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+local allocate=utilities.storage.allocate
+local resolvers=resolvers
+local methodhandler=resolvers.methodhandler
+local registermethod=resolvers.registermethod
+local finders=allocate { helpers={},notfound=function() end }
+local openers=allocate { helpers={},notfound=function() end }
+local loaders=allocate { helpers={},notfound=function() return false,nil,0 end }
+registermethod("finders",finders,"uri")
+registermethod("openers",openers,"uri")
+registermethod("loaders",loaders,"uri")
+resolvers.finders=finders
+resolvers.openers=openers
+resolvers.loaders=loaders
+
+
+end -- of closure
+
+do -- create closure to overcome 200 locals limit
+
+package.loaded["data-out"] = package.loaded["data-out"] or true
+
+-- original size: 530, stripped down to: 475
+
+if not modules then modules={} end modules ['data-out']={
+ version=1.001,
+ comment="companion to luat-lib.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+local allocate=utilities.storage.allocate
+local resolvers=resolvers
+local registermethod=resolvers.registermethod
+local savers=allocate { helpers={} }
+resolvers.savers=savers
+registermethod("savers",savers,"uri")
+
+
+end -- of closure
+
+do -- create closure to overcome 200 locals limit
+
+package.loaded["data-fil"] = package.loaded["data-fil"] or true
+
+-- original size: 3801, stripped down to: 3231
+
+if not modules then modules={} end modules ['data-fil']={
+ version=1.001,
+ comment="companion to luat-lib.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+local trace_locating=false trackers.register("resolvers.locating",function(v) trace_locating=v end)
+local report_files=logs.reporter("resolvers","files")
+local resolvers=resolvers
+local finders,openers,loaders,savers=resolvers.finders,resolvers.openers,resolvers.loaders,resolvers.savers
+local locators,hashers,generators,concatinators=resolvers.locators,resolvers.hashers,resolvers.generators,resolvers.concatinators
+local checkgarbage=utilities.garbagecollector and utilities.garbagecollector.check
+function locators.file(specification)
+ local name=specification.filename
+ local realname=resolvers.resolve(name)
+ if realname and realname~='' and lfs.isdir(realname) then
+ if trace_locating then
+ report_files("file locator %a found as %a",name,realname)
+ end
+ resolvers.appendhash('file',name,true)
+ elseif trace_locating then
+ report_files("file locator %a not found",name)
+ end
+end
+function hashers.file(specification)
+ local name=specification.filename
+ local content=caches.loadcontent(name,'files')
+ resolvers.registerfilehash(name,content,content==nil)
+end
+function generators.file(specification)
+ local path=specification.filename
+ local content=resolvers.scanfiles(path,false,true)
+ resolvers.registerfilehash(path,content,true)
+end
+concatinators.file=file.join
+function finders.file(specification,filetype)
+ local filename=specification.filename
+ local foundname=resolvers.findfile(filename,filetype)
+ if foundname and foundname~="" then
+ if trace_locating then
+ report_files("file finder: %a found",filename)
+ end
+ return foundname
+ else
+ if trace_locating then
+ report_files("file finder: %a not found",filename)
+ end
+ return finders.notfound()
+ end
+end
+function openers.helpers.textopener(tag,filename,f)
+ return {
+ reader=function() return f:read () end,
+ close=function() logs.show_close(filename) return f:close() end,
+ }
+end
+function openers.file(specification,filetype)
+ local filename=specification.filename
+ if filename and filename~="" then
+ local f=io.open(filename,"r")
+ if f then
+ if trace_locating then
+ report_files("file opener: %a opened",filename)
+ end
+ return openers.helpers.textopener("file",filename,f)
+ end
+ end
+ if trace_locating then
+ report_files("file opener: %a not found",filename)
+ end
+ return openers.notfound()
+end
+function loaders.file(specification,filetype)
+ local filename=specification.filename
+ if filename and filename~="" then
+ local f=io.open(filename,"rb")
+ if f then
+ logs.show_load(filename)
+ if trace_locating then
+ report_files("file loader: %a loaded",filename)
+ end
+ local s=f:read("*a")
+ if checkgarbage then
+ checkgarbage(#s)
+ end
+ f:close()
+ if s then
+ return true,s,#s
+ end
+ end
+ end
+ if trace_locating then
+ report_files("file loader: %a not found",filename)
+ end
+ return loaders.notfound()
+end
+
+
+end -- of closure
+
+do -- create closure to overcome 200 locals limit
+
+package.loaded["data-con"] = package.loaded["data-con"] or true
+
+-- original size: 4940, stripped down to: 3580
+
+if not modules then modules={} end modules ['data-con']={
+ version=1.100,
+ comment="companion to luat-lib.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+local format,lower,gsub=string.format,string.lower,string.gsub
+local trace_cache=false trackers.register("resolvers.cache",function(v) trace_cache=v end)
+local trace_containers=false trackers.register("resolvers.containers",function(v) trace_containers=v end)
+local trace_storage=false trackers.register("resolvers.storage",function(v) trace_storage=v end)
+containers=containers or {}
+local containers=containers
+containers.usecache=true
+local report_containers=logs.reporter("resolvers","containers")
+local allocated={}
+local mt={
+ __index=function(t,k)
+ if k=="writable" then
+ local writable=caches.getwritablepath(t.category,t.subcategory) or { "." }
+ t.writable=writable
+ return writable
+ elseif k=="readables" then
+ local readables=caches.getreadablepaths(t.category,t.subcategory) or { "." }
+ t.readables=readables
+ return readables
+ end
+ end,
+ __storage__=true
+}
+function containers.define(category,subcategory,version,enabled)
+ if category and subcategory then
+ local c=allocated[category]
+ if not c then
+ c={}
+ allocated[category]=c
+ end
+ local s=c[subcategory]
+ if not s then
+ s={
+ category=category,
+ subcategory=subcategory,
+ storage={},
+ enabled=enabled,
+ version=version or math.pi,
+ trace=false,
+ }
+ setmetatable(s,mt)
+ c[subcategory]=s
+ end
+ return s
+ end
+end
+function containers.is_usable(container,name)
+ return container.enabled and caches and caches.is_writable(container.writable,name)
+end
+function containers.is_valid(container,name)
+ if name and name~="" then
+ local storage=container.storage[name]
+ return storage and storage.cache_version==container.version
+ else
+ return false
+ end
+end
+function containers.read(container,name)
+ local storage=container.storage
+ local stored=storage[name]
+ if not stored and container.enabled and caches and containers.usecache then
+ stored=caches.loaddata(container.readables,name)
+ if stored and stored.cache_version==container.version then
+ if trace_cache or trace_containers then
+ report_containers("action %a, category %a, name %a","load",container.subcategory,name)
+ end
+ else
+ stored=nil
+ end
+ storage[name]=stored
+ elseif stored then
+ if trace_cache or trace_containers then
+ report_containers("action %a, category %a, name %a","reuse",container.subcategory,name)
+ end
+ end
+ return stored
+end
+function containers.write(container,name,data)
+ if data then
+ data.cache_version=container.version
+ if container.enabled and caches then
+ local unique,shared=data.unique,data.shared
+ data.unique,data.shared=nil,nil
+ caches.savedata(container.writable,name,data)
+ if trace_cache or trace_containers then
+ report_containers("action %a, category %a, name %a","save",container.subcategory,name)
+ end
+ data.unique,data.shared=unique,shared
+ end
+ if trace_cache or trace_containers then
+ report_containers("action %a, category %a, name %a","store",container.subcategory,name)
+ end
+ container.storage[name]=data
+ end
+ return data
+end
+function containers.content(container,name)
+ return container.storage[name]
+end
+function containers.cleanname(name)
+ return (gsub(lower(name),"[^%w%d]+","-"))
+end
+
+
+end -- of closure
+
+do -- create closure to overcome 200 locals limit
+
+package.loaded["data-use"] = package.loaded["data-use"] or true
+
+-- original size: 3913, stripped down to: 2998
+
+if not modules then modules={} end modules ['data-use']={
+ version=1.001,
+ comment="companion to luat-lib.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+local format,lower,gsub,find=string.format,string.lower,string.gsub,string.find
+local trace_locating=false trackers.register("resolvers.locating",function(v) trace_locating=v end)
+local report_mounts=logs.reporter("resolvers","mounts")
+local resolvers=resolvers
+resolvers.automounted=resolvers.automounted or {}
+function resolvers.automount(usecache)
+ local mountpaths=resolvers.cleanpathlist(resolvers.expansion('TEXMFMOUNT'))
+ if (not mountpaths or #mountpaths==0) and usecache then
+ mountpaths=caches.getreadablepaths("mount")
+ end
+ if mountpaths and #mountpaths>0 then
+ statistics.starttiming(resolvers.instance)
+ for k=1,#mountpaths do
+ local root=mountpaths[k]
+ local f=io.open(root.."/url.tmi")
+ if f then
+ for line in f:lines() do
+ if line then
+ if find(line,"^[%%#%-]") then
+ elseif find(line,"^zip://") then
+ if trace_locating then
+ report_mounts("mounting %a",line)
+ end
+ table.insert(resolvers.automounted,line)
+ resolvers.usezipfile(line)
+ end
+ end
+ end
+ f:close()
+ end
+ end
+ statistics.stoptiming(resolvers.instance)
+ end
+end
+statistics.register("used config file",function() return caches.configfiles() end)
+statistics.register("used cache path",function() return caches.usedpaths() end)
+function statistics.savefmtstatus(texname,formatbanner,sourcefile)
+ local enginebanner=status.list().banner
+ if formatbanner and enginebanner and sourcefile then
+ local luvname=file.replacesuffix(texname,"luv")
+ local luvdata={
+ enginebanner=enginebanner,
+ formatbanner=formatbanner,
+ sourcehash=md5.hex(io.loaddata(resolvers.findfile(sourcefile)) or "unknown"),
+ sourcefile=sourcefile,
+ }
+ io.savedata(luvname,table.serialize(luvdata,true))
+ end
+end
+function statistics.checkfmtstatus(texname)
+ local enginebanner=status.list().banner
+ if enginebanner and texname then
+ local luvname=file.replacesuffix(texname,"luv")
+ if lfs.isfile(luvname) then
+ local luv=dofile(luvname)
+ if luv and luv.sourcefile then
+ local sourcehash=md5.hex(io.loaddata(resolvers.findfile(luv.sourcefile)) or "unknown")
+ local luvbanner=luv.enginebanner or "?"
+ if luvbanner~=enginebanner then
+ return format("engine mismatch (luv: %s <> bin: %s)",luvbanner,enginebanner)
+ end
+ local luvhash=luv.sourcehash or "?"
+ if luvhash~=sourcehash then
+ return format("source mismatch (luv: %s <> bin: %s)",luvhash,sourcehash)
+ end
+ else
+ return "invalid status file"
+ end
+ else
+ return "missing status file"
+ end
+ end
+ return true
+end
+
+
+end -- of closure
+
+do -- create closure to overcome 200 locals limit
+
+package.loaded["data-zip"] = package.loaded["data-zip"] or true
+
+-- original size: 8489, stripped down to: 6757
+
+if not modules then modules={} end modules ['data-zip']={
+ version=1.001,
+ comment="companion to luat-lib.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+local format,find,match=string.format,string.find,string.match
+local trace_locating=false trackers.register("resolvers.locating",function(v) trace_locating=v end)
+local report_zip=logs.reporter("resolvers","zip")
+local resolvers=resolvers
+zip=zip or {}
+local zip=zip
+zip.archives=zip.archives or {}
+local archives=zip.archives
+zip.registeredfiles=zip.registeredfiles or {}
+local registeredfiles=zip.registeredfiles
+local limited=false
+directives.register("system.inputmode",function(v)
+ if not limited then
+ local i_limiter=io.i_limiter(v)
+ if i_limiter then
+ zip.open=i_limiter.protect(zip.open)
+ limited=true
+ end
+ end
+end)
+local function validzip(str)
+ if not find(str,"^zip://") then
+ return "zip:///"..str
+ else
+ return str
+ end
+end
+function zip.openarchive(name)
+ if not name or name=="" then
+ return nil
+ else
+ local arch=archives[name]
+ if not arch then
+ local full=resolvers.findfile(name) or ""
+ arch=(full~="" and zip.open(full)) or false
+ archives[name]=arch
+ end
+ return arch
+ end
+end
+function zip.closearchive(name)
+ if not name or (name=="" and archives[name]) then
+ zip.close(archives[name])
+ archives[name]=nil
+ end
+end
+function resolvers.locators.zip(specification)
+ local archive=specification.filename
+ local zipfile=archive and archive~="" and zip.openarchive(archive)
+ if trace_locating then
+ if zipfile then
+ report_zip("locator: archive %a found",archive)
+ else
+ report_zip("locator: archive %a not found",archive)
+ end
+ end
+end
+function resolvers.hashers.zip(specification)
+ local archive=specification.filename
+ if trace_locating then
+ report_zip("loading file %a",archive)
+ end
+ resolvers.usezipfile(specification.original)
+end
+function resolvers.concatinators.zip(zipfile,path,name)
+ if not path or path=="" then
+ return format('%s?name=%s',zipfile,name)
+ else
+ return format('%s?name=%s/%s',zipfile,path,name)
+ end
+end
+function resolvers.finders.zip(specification)
+ local original=specification.original
+ local archive=specification.filename
+ if archive then
+ local query=url.query(specification.query)
+ local queryname=query.name
+ if queryname then
+ local zfile=zip.openarchive(archive)
+ if zfile then
+ if trace_locating then
+ report_zip("finder: archive %a found",archive)
+ end
+ local dfile=zfile:open(queryname)
+ if dfile then
+ dfile=zfile:close()
+ if trace_locating then
+ report_zip("finder: file %a found",queryname)
+ end
+ return specification.original
+ elseif trace_locating then
+ report_zip("finder: file %a not found",queryname)
+ end
+ elseif trace_locating then
+ report_zip("finder: unknown archive %a",archive)
+ end
+ end
+ end
+ if trace_locating then
+ report_zip("finder: %a not found",original)
+ end
+ return resolvers.finders.notfound()
+end
+function resolvers.openers.zip(specification)
+ local original=specification.original
+ local archive=specification.filename
+ if archive then
+ local query=url.query(specification.query)
+ local queryname=query.name
+ if queryname then
+ local zfile=zip.openarchive(archive)
+ if zfile then
+ if trace_locating then
+ report_zip("opener; archive %a opened",archive)
+ end
+ local dfile=zfile:open(queryname)
+ if dfile then
+ if trace_locating then
+ report_zip("opener: file %a found",queryname)
+ end
+ return resolvers.openers.helpers.textopener('zip',original,dfile)
+ elseif trace_locating then
+ report_zip("opener: file %a not found",queryname)
+ end
+ elseif trace_locating then
+ report_zip("opener: unknown archive %a",archive)
+ end
+ end
+ end
+ if trace_locating then
+ report_zip("opener: %a not found",original)
+ end
+ return resolvers.openers.notfound()
+end
+function resolvers.loaders.zip(specification)
+ local original=specification.original
+ local archive=specification.filename
+ if archive then
+ local query=url.query(specification.query)
+ local queryname=query.name
+ if queryname then
+ local zfile=zip.openarchive(archive)
+ if zfile then
+ if trace_locating then
+ report_zip("loader: archive %a opened",archive)
+ end
+ local dfile=zfile:open(queryname)
+ if dfile then
+ logs.show_load(original)
+ if trace_locating then
+ report_zip("loader; file %a loaded",original)
+ end
+ local s=dfile:read("*all")
+ dfile:close()
+ return true,s,#s
+ elseif trace_locating then
+ report_zip("loader: file %a not found",queryname)
+ end
+ elseif trace_locating then
+ report_zip("loader; unknown archive %a",archive)
+ end
+ end
+ end
+ if trace_locating then
+ report_zip("loader: %a not found",original)
+ end
+ return resolvers.openers.notfound()
+end
+function resolvers.usezipfile(archive)
+ local specification=resolvers.splitmethod(archive)
+ local archive=specification.filename
+ if archive and not registeredfiles[archive] then
+ local z=zip.openarchive(archive)
+ if z then
+ local instance=resolvers.instance
+ local tree=url.query(specification.query).tree or ""
+ if trace_locating then
+ report_zip("registering: archive %a",archive)
+ end
+ statistics.starttiming(instance)
+ resolvers.prependhash('zip',archive)
+ resolvers.extendtexmfvariable(archive)
+ registeredfiles[archive]=z
+ instance.files[archive]=resolvers.registerzipfile(z,tree)
+ statistics.stoptiming(instance)
+ elseif trace_locating then
+ report_zip("registering: unknown archive %a",archive)
+ end
+ elseif trace_locating then
+ report_zip("registering: archive %a not found",archive)
+ end
+end
+function resolvers.registerzipfile(z,tree)
+ local files,filter={},""
+ if tree=="" then
+ filter="^(.+)/(.-)$"
+ else
+ filter=format("^%s/(.+)/(.-)$",tree)
+ end
+ if trace_locating then
+ report_zip("registering: using filter %a",filter)
+ end
+ local register,n=resolvers.registerfile,0
+ for i in z:files() do
+ local path,name=match(i.filename,filter)
+ if path then
+ if name and name~='' then
+ register(files,name,path)
+ n=n+1
+ else
+ end
+ else
+ register(files,i.filename,'')
+ n=n+1
+ end
+ end
+ report_zip("registering: %s files registered",n)
+ return files
+end
+
+
+end -- of closure
+
+do -- create closure to overcome 200 locals limit
+
+package.loaded["data-tre"] = package.loaded["data-tre"] or true
+
+-- original size: 2508, stripped down to: 2074
+
+if not modules then modules={} end modules ['data-tre']={
+ version=1.001,
+ comment="companion to luat-lib.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+local find,gsub,format=string.find,string.gsub,string.format
+local trace_locating=false trackers.register("resolvers.locating",function(v) trace_locating=v end)
+local report_trees=logs.reporter("resolvers","trees")
+local resolvers=resolvers
+local done,found,notfound={},{},resolvers.finders.notfound
+function resolvers.finders.tree(specification)
+ local spec=specification.filename
+ local fnd=found[spec]
+ if fnd==nil then
+ if spec~="" then
+ local path,name=file.dirname(spec),file.basename(spec)
+ if path=="" then path="." end
+ local hash=done[path]
+ if not hash then
+ local pattern=path.."/*"
+ hash=dir.glob(pattern)
+ done[path]=hash
+ end
+ local pattern="/"..gsub(name,"([%.%-%+])","%%%1").."$"
+ for k=1,#hash do
+ local v=hash[k]
+ if find(v,pattern) then
+ found[spec]=v
+ return v
+ end
+ end
+ end
+ fnd=notfound()
+ found[spec]=fnd
+ end
+ return fnd
+end
+function resolvers.locators.tree(specification)
+ local name=specification.filename
+ local realname=resolvers.resolve(name)
+ if realname and realname~='' and lfs.isdir(realname) then
+ if trace_locating then
+ report_trees("locator %a found",realname)
+ end
+ resolvers.appendhash('tree',name,false)
+ elseif trace_locating then
+ report_trees("locator %a not found",name)
+ end
+end
+function resolvers.hashers.tree(specification)
+ local name=specification.filename
+ if trace_locating then
+ report_trees("analysing %a",name)
+ end
+ resolvers.methodhandler("hashers",name)
+ resolvers.generators.file(specification)
+end
+resolvers.concatinators.tree=resolvers.concatinators.file
+resolvers.generators.tree=resolvers.generators.file
+resolvers.openers.tree=resolvers.openers.file
+resolvers.loaders.tree=resolvers.loaders.file
+
+
+end -- of closure
+
+do -- create closure to overcome 200 locals limit
+
+package.loaded["data-sch"] = package.loaded["data-sch"] or true
+
+-- original size: 6202, stripped down to: 5149
+
+if not modules then modules={} end modules ['data-sch']={
+ version=1.001,
+ comment="companion to luat-lib.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+local load=load
+local gsub,concat,format=string.gsub,table.concat,string.format
+local finders,openers,loaders=resolvers.finders,resolvers.openers,resolvers.loaders
+local trace_schemes=false trackers.register("resolvers.schemes",function(v) trace_schemes=v end)
+local report_schemes=logs.reporter("resolvers","schemes")
+local http=require("socket.http")
+local ltn12=require("ltn12")
+local resolvers=resolvers
+local schemes=resolvers.schemes or {}
+resolvers.schemes=schemes
+local cleaners={}
+schemes.cleaners=cleaners
+local threshold=24*60*60
+directives.register("schemes.threshold",function(v) threshold=tonumber(v) or threshold end)
+function cleaners.none(specification)
+ return specification.original
+end
+function cleaners.strip(specification)
+ return (gsub(specification.original,"[^%a%d%.]+","-"))
+end
+function cleaners.md5(specification)
+ return file.addsuffix(md5.hex(specification.original),file.suffix(specification.path))
+end
+local cleaner=cleaners.strip
+directives.register("schemes.cleanmethod",function(v) cleaner=cleaners[v] or cleaners.strip end)
+function resolvers.schemes.cleanname(specification)
+ local hash=cleaner(specification)
+ if trace_schemes then
+ report_schemes("hashing %a to %a",specification.original,hash)
+ end
+ return hash
+end
+local cached,loaded,reused,thresholds,handlers={},{},{},{},{}
+local function runcurl(name,cachename)
+ local command="curl --silent --create-dirs --output "..cachename.." "..name
+ os.spawn(command)
+end
+local function fetch(specification)
+ local original=specification.original
+ local scheme=specification.scheme
+ local cleanname=schemes.cleanname(specification)
+ local cachename=caches.setfirstwritablefile(cleanname,"schemes")
+ if not cached[original] then
+ statistics.starttiming(schemes)
+ if not io.exists(cachename) or (os.difftime(os.time(),lfs.attributes(cachename).modification)>(thresholds[protocol] or threshold)) then
+ cached[original]=cachename
+ local handler=handlers[scheme]
+ if handler then
+ if trace_schemes then
+ report_schemes("fetching %a, protocol %a, method %a",original,scheme,"built-in")
+ end
+ logs.flush()
+ handler(specification,cachename)
+ else
+ if trace_schemes then
+ report_schemes("fetching %a, protocol %a, method %a",original,scheme,"curl")
+ end
+ logs.flush()
+ runcurl(original,cachename)
+ end
+ end
+ if io.exists(cachename) then
+ cached[original]=cachename
+ if trace_schemes then
+ report_schemes("using cached %a, protocol %a, cachename %a",original,scheme,cachename)
+ end
+ else
+ cached[original]=""
+ if trace_schemes then
+ report_schemes("using missing %a, protocol %a",original,scheme)
+ end
+ end
+ loaded[scheme]=loaded[scheme]+1
+ statistics.stoptiming(schemes)
+ else
+ if trace_schemes then
+ report_schemes("reusing %a, protocol %a",original,scheme)
+ end
+ reused[scheme]=reused[scheme]+1
+ end
+ return cached[original]
+end
+local function finder(specification,filetype)
+ return resolvers.methodhandler("finders",fetch(specification),filetype)
+end
+local opener=openers.file
+local loader=loaders.file
+local function install(scheme,handler,newthreshold)
+ handlers [scheme]=handler
+ loaded [scheme]=0
+ reused [scheme]=0
+ finders [scheme]=finder
+ openers [scheme]=opener
+ loaders [scheme]=loader
+ thresholds[scheme]=newthreshold or threshold
+end
+schemes.install=install
+local function http_handler(specification,cachename)
+ local tempname=cachename..".tmp"
+ local f=io.open(tempname,"wb")
+ local status,message=http.request {
+ url=specification.original,
+ sink=ltn12.sink.file(f)
+ }
+ if not status then
+ os.remove(tempname)
+ else
+ os.remove(cachename)
+ os.rename(tempname,cachename)
+ end
+ return cachename
+end
+install('http',http_handler)
+install('https')
+install('ftp')
+statistics.register("scheme handling time",function()
+ local l,r,nl,nr={},{},0,0
+ for k,v in table.sortedhash(loaded) do
+ if v>0 then
+ nl=nl+1
+ l[nl]=k..":"..v
+ end
+ end
+ for k,v in table.sortedhash(reused) do
+ if v>0 then
+ nr=nr+1
+ r[nr]=k..":"..v
+ end
+ end
+ local n=nl+nr
+ if n>0 then
+ l=nl>0 and concat(l) or "none"
+ r=nr>0 and concat(r) or "none"
+ return format("%s seconds, %s processed, threshold %s seconds, loaded: %s, reused: %s",
+ statistics.elapsedtime(schemes),n,threshold,l,r)
+ else
+ return nil
+ end
+end)
+local httprequest=http.request
+local toquery=url.toquery
+local function fetchstring(url,data)
+ local q=data and toquery(data)
+ if q then
+ url=url.."?"..q
+ end
+ local reply=httprequest(url)
+ return reply
+end
+schemes.fetchstring=fetchstring
+function schemes.fetchtable(url,data)
+ local reply=fetchstring(url,data)
+ if reply then
+ local s=load("return "..reply)
+ if s then
+ return s()
+ end
+ end
+end
+
+
+end -- of closure
+
+do -- create closure to overcome 200 locals limit
+
+package.loaded["data-lua"] = package.loaded["data-lua"] or true
+
+-- original size: 4861, stripped down to: 3693
+
+if not modules then modules={} end modules ['data-lua']={
+ version=1.001,
+ comment="companion to luat-lib.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+local resolvers,package=resolvers,package
+local gsub=string.gsub
+local concat=table.concat
+local addsuffix=file.addsuffix
+local P,S,Cs,lpegmatch=lpeg.P,lpeg.S,lpeg.Cs,lpeg.match
+local libsuffixes={ 'tex','lua' }
+local clibsuffixes={ 'lib' }
+local libformats={ 'TEXINPUTS','LUAINPUTS' }
+local clibformats={ 'CLUAINPUTS' }
+local helpers=package.helpers
+trackers.register("resolvers.libraries",function(v) helpers.trace=v end)
+trackers.register("resolvers.locating",function(v) helpers.trace=v end)
+helpers.report=logs.reporter("resolvers","libraries")
+local pattern=Cs(P("!")^0/""*(P("/")*P(-1)/"/"+P("/")^1/"/"+1)^0)
+local function cleanpath(path)
+ return resolvers.resolve(lpegmatch(pattern,path))
+end
+helpers.cleanpath=cleanpath
+local loadedaslib=helpers.loadedaslib
+local loadedbylua=helpers.loadedbylua
+local loadedbypath=helpers.loadedbypath
+local notloaded=helpers.notloaded
+local getlibpaths=package.libpaths
+local getclibpaths=package.clibpaths
+function helpers.libpaths(libhash)
+ local libpaths={}
+ for i=1,#libformats do
+ local paths=resolvers.expandedpathlistfromvariable(libformats[i])
+ for i=1,#paths do
+ local path=cleanpath(paths[i])
+ if not libhash[path] then
+ libpaths[#libpaths+1]=path
+ libhash[path]=true
+ end
+ end
+ end
+ return libpaths
+end
+function helpers.clibpaths(clibhash)
+ local clibpaths={}
+ for i=1,#clibformats do
+ local paths=resolvers.expandedpathlistfromvariable(clibformats[i])
+ for i=1,#paths do
+ local path=cleanpath(paths[i])
+ if not clibhash[path] then
+ clibpaths[#clibpaths+1]=path
+ clibhash[path]=true
+ end
+ end
+ end
+ return clibpaths
+end
+local function loadedbyformat(name,rawname,suffixes,islib)
+ local trace=helpers.trace
+ local report=helpers.report
+ if trace then
+ report("locating %a as %a using formats %a",rawname,name,suffixes)
+ end
+ for i=1,#suffixes do
+ local format=suffixes[i]
+ local resolved=resolvers.findfile(name,format) or ""
+ if trace then
+ report("checking %a using format %a",name,format)
+ end
+ if resolved~="" then
+ if trace then
+ report("lib %a located on %a",name,resolved)
+ end
+ if islib then
+ return true,loadedaslib(resolved,rawname)
+ else
+ return true,loadfile(resolved)
+ end
+ end
+ end
+end
+helpers.loadedbyformat=loadedbyformat
+local pattern=Cs((((1-S("\\/"))^0*(S("\\/")^1/"/"))^0*(P(".")^1/"/"+P(1))^1)*-1)
+local function lualibfile(name)
+ return lpegmatch(pattern,name) or name
+end
+helpers.lualibfile=lualibfile
+function helpers.loaded(name)
+ local thename=lualibfile(name)
+ local luaname=addsuffix(thename,"lua")
+ local libname=addsuffix(thename,os.libsuffix)
+ local libpaths=getlibpaths()
+ local clibpaths=getclibpaths()
+ local done,result=loadedbyformat(luaname,name,libsuffixes,false)
+ if done then
+ return result
+ end
+ local done,result=loadedbyformat(libname,name,clibsuffixes,true)
+ if done then
+ return result
+ end
+ local done,result=loadedbypath(luaname,name,libpaths,false,"lua")
+ if done then
+ return result
+ end
+ local done,result=loadedbypath(luaname,name,clibpaths,false,"lua")
+ if done then
+ return result
+ end
+ local done,result=loadedbypath(libname,name,clibpaths,true,"lib")
+ if done then
+ return result
+ end
+ local done,result=loadedbylua(name)
+ if done then
+ return result
+ end
+ return notloaded(name)
+end
+resolvers.loadlualib=require
+
+
+end -- of closure
+
+do -- create closure to overcome 200 locals limit
+
+package.loaded["data-aux"] = package.loaded["data-aux"] or true
+
+-- original size: 2394, stripped down to: 2005
+
+if not modules then modules={} end modules ['data-aux']={
+ version=1.001,
+ comment="companion to luat-lib.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+local find=string.find
+local type,next=type,next
+local trace_locating=false trackers.register("resolvers.locating",function(v) trace_locating=v end)
+local resolvers=resolvers
+local report_scripts=logs.reporter("resolvers","scripts")
+function resolvers.updatescript(oldname,newname)
+ local scriptpath="scripts/context/lua"
+ newname=file.addsuffix(newname,"lua")
+ local oldscript=resolvers.cleanpath(oldname)
+ if trace_locating then
+ report_scripts("to be replaced old script %a",oldscript)
+ end
+ local newscripts=resolvers.findfiles(newname) or {}
+ if #newscripts==0 then
+ if trace_locating then
+ report_scripts("unable to locate new script")
+ end
+ else
+ for i=1,#newscripts do
+ local newscript=resolvers.cleanpath(newscripts[i])
+ if trace_locating then
+ report_scripts("checking new script %a",newscript)
+ end
+ if oldscript==newscript then
+ if trace_locating then
+ report_scripts("old and new script are the same")
+ end
+ elseif not find(newscript,scriptpath) then
+ if trace_locating then
+ report_scripts("new script should come from %a",scriptpath)
+ end
+ elseif not (find(oldscript,file.removesuffix(newname).."$") or find(oldscript,newname.."$")) then
+ if trace_locating then
+ report_scripts("invalid new script name")
+ end
+ else
+ local newdata=io.loaddata(newscript)
+ if newdata then
+ if trace_locating then
+ report_scripts("old script content replaced by new content")
+ end
+ io.savedata(oldscript,newdata)
+ break
+ elseif trace_locating then
+ report_scripts("unable to load new script")
+ end
+ end
+ end
+ end
+end
+
+
+end -- of closure
+
+do -- create closure to overcome 200 locals limit
+
+package.loaded["data-tmf"] = package.loaded["data-tmf"] or true
+
+-- original size: 2600, stripped down to: 1627
+
+if not modules then modules={} end modules ['data-tmf']={
+ version=1.001,
+ comment="companion to luat-lib.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+local resolvers=resolvers
+local report_tds=logs.reporter("resolvers","tds")
+function resolvers.load_tree(tree,resolve)
+ if type(tree)=="string" and tree~="" then
+ local getenv,setenv=resolvers.getenv,resolvers.setenv
+ local texos="texmf-"..os.platform
+ local oldroot=environment.texroot
+ local newroot=file.collapsepath(tree)
+ local newtree=file.join(newroot,texos)
+ local newpath=file.join(newtree,"bin")
+ if not lfs.isdir(newtree) then
+ report_tds("no %a under tree %a",texos,tree)
+ os.exit()
+ end
+ if not lfs.isdir(newpath) then
+ report_tds("no '%s/bin' under tree %a",texos,tree)
+ os.exit()
+ end
+ local texmfos=newtree
+ environment.texroot=newroot
+ environment.texos=texos
+ environment.texmfos=texmfos
+ if resolve then
+ resolvers.luacnfspec=resolvers.resolve(resolvers.luacnfspec)
+ end
+ setenv('SELFAUTOPARENT',newroot)
+ setenv('SELFAUTODIR',newtree)
+ setenv('SELFAUTOLOC',newpath)
+ setenv('TEXROOT',newroot)
+ setenv('TEXOS',texos)
+ setenv('TEXMFOS',texmfos)
+ setenv('TEXMFCNF',resolvers.luacnfspec,true)
+ setenv('PATH',newpath..io.pathseparator..getenv('PATH'))
+ report_tds("changing from root %a to %a",oldroot,newroot)
+ report_tds("prepending %a to PATH",newpath)
+ report_tds("setting TEXMFCNF to %a",resolvers.luacnfspec)
+ report_tds()
+ end
+end
+
+
+end -- of closure
+
+do -- create closure to overcome 200 locals limit
+
+package.loaded["data-lst"] = package.loaded["data-lst"] or true
+
+-- original size: 2654, stripped down to: 2301
+
+if not modules then modules={} end modules ['data-lst']={
+ version=1.001,
+ comment="companion to luat-lib.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+local find,concat,upper,format=string.find,table.concat,string.upper,string.format
+local fastcopy,sortedpairs=table.fastcopy,table.sortedpairs
+resolvers.listers=resolvers.listers or {}
+local resolvers=resolvers
+local report_lists=logs.reporter("resolvers","lists")
+local function tabstr(str)
+ if type(str)=='table' then
+ return concat(str," | ")
+ else
+ return str
+ end
+end
+function resolvers.listers.variables(pattern)
+ local instance=resolvers.instance
+ local environment=instance.environment
+ local variables=instance.variables
+ local expansions=instance.expansions
+ local pattern=upper(pattern or "")
+ local configured={}
+ local order=instance.order
+ for i=1,#order do
+ for k,v in next,order[i] do
+ if v~=nil and configured[k]==nil then
+ configured[k]=v
+ end
+ end
+ end
+ local env=fastcopy(environment)
+ local var=fastcopy(variables)
+ local exp=fastcopy(expansions)
+ for key,value in sortedpairs(configured) do
+ if key~="" and (pattern=="" or find(upper(key),pattern)) then
+ report_lists(key)
+ report_lists(" env: %s",tabstr(rawget(environment,key)) or "unset")
+ report_lists(" var: %s",tabstr(configured[key]) or "unset")
+ report_lists(" exp: %s",tabstr(expansions[key]) or "unset")
+ report_lists(" res: %s",tabstr(resolvers.resolve(expansions[key])) or "unset")
+ end
+ end
+ instance.environment=fastcopy(env)
+ instance.variables=fastcopy(var)
+ instance.expansions=fastcopy(exp)
+end
+local report_resolved=logs.reporter("system","resolved")
+function resolvers.listers.configurations()
+ local configurations=resolvers.instance.specification
+ for i=1,#configurations do
+ report_resolved("file : %s",resolvers.resolve(configurations[i]))
+ end
+ report_resolved("")
+ local list=resolvers.expandedpathfromlist(resolvers.splitpath(resolvers.luacnfspec))
+ for i=1,#list do
+ local li=resolvers.resolve(list[i])
+ if lfs.isdir(li) then
+ report_resolved("path - %s",li)
+ else
+ report_resolved("path + %s",li)
+ end
+ end
+end
+
+
+end -- of closure
+
+do -- create closure to overcome 200 locals limit
+
+package.loaded["util-lib"] = package.loaded["util-lib"] or true
+
+-- original size: 8911, stripped down to: 4216
+
+if not modules then modules={} end modules ['util-lib']={
+ version=1.001,
+ comment="companion to luat-lib.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files",
+}
+local gsub,find=string.gsub,string.find
+local pathpart,nameonly,joinfile=file.pathpart,file.nameonly,file.join
+local findfile,findfiles=resolvers and resolvers.findfile,resolvers and resolvers.findfiles
+local loaded=package.loaded
+local report_swiglib=logs.reporter("swiglib")
+local trace_swiglib=false trackers.register("resolvers.swiglib",function(v) trace_swiglib=v end)
+local function requireswiglib(required,version)
+ local library=loaded[required]
+ if library==nil then
+ local required_full=gsub(required,"%.","/")
+ local required_path=pathpart(required_full)
+ local required_base=nameonly(required_full)
+ local required_name=required_base.."."..os.libsuffix
+ local version=type(version)=="string" and version~="" and version or false
+ local function check(locate,...)
+ local found_library=nil
+ if version then
+ local asked_library=joinfile(required_path,version,required_name)
+ if trace_swiglib then
+ report_swiglib("checking %s: %a","with version",asked_library)
+ end
+ found_library=locate(asked_library,...)
+ if not found_library or found_library==""then
+ asked_library=joinfile(required_path,required_name)
+ if trace_swiglib then
+ report_swiglib("checking %s: %a","without version",asked_library)
+ end
+ found_library=locate(asked_library,...)
+ end
+ else
+ local asked_library=joinfile(required_path,required_name)
+ if trace_swiglib then
+ report_swiglib("checking %s: %a","without version",asked_library)
+ end
+ found_library=locate(asked_library,...)
+ end
+ return found_library and found_library~="" and found_library or false
+ end
+ local found_library=findfile and check(findfile,"lib")
+ if findfiles and not found_library then
+ local asked_library=joinfile(required_path,".*",required_name)
+ if trace_swiglib then
+ report_swiglib("checking %s: %a","latest version",asked_library)
+ end
+ local list=findfiles(asked_library,"lib",true)
+ if list and #list>0 then
+ table.sort(list)
+ found_library=list[#list]
+ end
+ end
+ if not found_library then
+ package.extraclibpath(environment.ownpath)
+ local paths=package.clibpaths()
+ for i=1,#paths do
+ local found_library=check(lfs.isfile)
+ if found_library then
+ break
+ end
+ end
+ end
+ if not found_library then
+ if trace_swiglib then
+ report_swiglib("not found: %a",asked_library)
+ end
+ library=false
+ else
+ local path=pathpart(found_library)
+ local base=nameonly(found_library)
+ dir.push(path)
+ if trace_swiglib then
+ report_swiglib("found: %a",found_library)
+ end
+ library=package.loadlib(found_library,"luaopen_"..required_base)
+ if type(library)=="function" then
+ library=library()
+ else
+ library=false
+ end
+ dir.pop()
+ end
+ if not library then
+ report_swiglib("unknown: %a",required)
+ elseif trace_swiglib then
+ report_swiglib("stored: %a",required)
+ end
+ loaded[required]=library
+ else
+ report_swiglib("reused: %a",required)
+ end
+ return library
+end
+local savedrequire=require
+function require(name,version)
+ if find(name,"^swiglib%.") then
+ return requireswiglib(name,version)
+ else
+ return savedrequire(name)
+ end
+end
+local swiglibs={}
+function swiglib(name,version)
+ local library=swiglibs[name]
+ if not library then
+ statistics.starttiming(swiglibs)
+ report_swiglib("loading %a",name)
+ library=requireswiglib("swiglib."..name,version)
+ swiglibs[name]=library
+ statistics.stoptiming(swiglibs)
+ end
+ return library
+end
+statistics.register("used swiglibs",function()
+ if next(swiglibs) then
+ return string.format("%s, initial load time %s seconds",table.concat(table.sortedkeys(swiglibs)," "),statistics.elapsedtime(swiglibs))
+ end
+end)
+
+
+end -- of closure
+
+do -- create closure to overcome 200 locals limit
+
+package.loaded["luat-sta"] = package.loaded["luat-sta"] or true
+
+-- original size: 5703, stripped down to: 2507
+
+if not modules then modules={} end modules ['luat-sta']={
+ version=1.001,
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+local gmatch,match=string.gmatch,string.match
+local type=type
+states=states or {}
+local states=states
+states.data=states.data or {}
+local data=states.data
+states.hash=states.hash or {}
+local hash=states.hash
+states.tag=states.tag or ""
+states.filename=states.filename or ""
+function states.save(filename,tag)
+ tag=tag or states.tag
+ filename=file.addsuffix(filename or states.filename,'lus')
+ io.savedata(filename,
+ "-- generator : luat-sta.lua\n".."-- state tag : "..tag.."\n\n"..table.serialize(data[tag or states.tag] or {},true)
+ )
+end
+function states.load(filename,tag)
+ states.filename=filename
+ states.tag=tag or "whatever"
+ states.filename=file.addsuffix(states.filename,'lus')
+ data[states.tag],hash[states.tag]=(io.exists(filename) and dofile(filename)) or {},{}
+end
+local function set_by_tag(tag,key,value,default,persistent)
+ local d,h=data[tag],hash[tag]
+ if d then
+ if type(d)=="table" then
+ local dkey,hkey=key,key
+ local pre,post=match(key,"(.+)%.([^%.]+)$")
+ if pre and post then
+ for k in gmatch(pre,"[^%.]+") do
+ local dk=d[k]
+ if not dk then
+ dk={}
+ d[k]=dk
+ elseif type(dk)=="string" then
+ break
+ end
+ d=dk
+ end
+ dkey,hkey=post,key
+ end
+ if value==nil then
+ value=default
+ elseif value==false then
+ elseif persistent then
+ value=value or d[dkey] or default
+ else
+ value=value or default
+ end
+ d[dkey],h[hkey]=value,value
+ elseif type(d)=="string" then
+ data[tag],hash[tag]=value,value
+ end
+ end
+end
+local function get_by_tag(tag,key,default)
+ local h=hash[tag]
+ if h and h[key] then
+ return h[key]
+ else
+ local d=data[tag]
+ if d then
+ for k in gmatch(key,"[^%.]+") do
+ local dk=d[k]
+ if dk~=nil then
+ d=dk
+ else
+ return default
+ end
+ end
+ if d==false then
+ return false
+ else
+ return d or default
+ end
+ end
+ end
+end
+states.set_by_tag=set_by_tag
+states.get_by_tag=get_by_tag
+function states.set(key,value,default,persistent)
+ set_by_tag(states.tag,key,value,default,persistent)
+end
+function states.get(key,default)
+ return get_by_tag(states.tag,key,default)
+end
+
+
+end -- of closure
+
+do -- create closure to overcome 200 locals limit
+
+package.loaded["luat-fmt"] = package.loaded["luat-fmt"] or true
+
+-- original size: 5951, stripped down to: 4922
+
+if not modules then modules={} end modules ['luat-fmt']={
+ version=1.001,
+ comment="companion to mtxrun",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+local format=string.format
+local concat=table.concat
+local quoted=string.quoted
+local luasuffixes=utilities.lua.suffixes
+local report_format=logs.reporter("resolvers","formats")
+local function primaryflags()
+ local trackers=environment.argument("trackers")
+ local directives=environment.argument("directives")
+ local flags={}
+ if trackers and trackers~="" then
+ flags={ "--trackers="..quoted(trackers) }
+ end
+ if directives and directives~="" then
+ flags={ "--directives="..quoted(directives) }
+ end
+ if environment.argument("jit") then
+ flags={ "--jiton" }
+ end
+ return concat(flags," ")
+end
+function environment.make_format(name)
+ local engine=environment.ownmain or "luatex"
+ local olddir=dir.current()
+ local path=caches.getwritablepath("formats",engine) or ""
+ if path~="" then
+ lfs.chdir(path)
+ end
+ report_format("using format path %a",dir.current())
+ local texsourcename=file.addsuffix(name,"mkiv")
+ local fulltexsourcename=resolvers.findfile(texsourcename,"tex") or ""
+ if fulltexsourcename=="" then
+ texsourcename=file.addsuffix(name,"tex")
+ fulltexsourcename=resolvers.findfile(texsourcename,"tex") or ""
+ end
+ if fulltexsourcename=="" then
+ report_format("no tex source file with name %a (mkiv or tex)",name)
+ lfs.chdir(olddir)
+ return
+ else
+ report_format("using tex source file %a",fulltexsourcename)
+ end
+ local texsourcepath=dir.expandname(file.dirname(fulltexsourcename))
+ local specificationname=file.replacesuffix(fulltexsourcename,"lus")
+ local fullspecificationname=resolvers.findfile(specificationname,"tex") or ""
+ if fullspecificationname=="" then
+ specificationname=file.join(texsourcepath,"context.lus")
+ fullspecificationname=resolvers.findfile(specificationname,"tex") or ""
+ end
+ if fullspecificationname=="" then
+ report_format("unknown stub specification %a",specificationname)
+ lfs.chdir(olddir)
+ return
+ end
+ local specificationpath=file.dirname(fullspecificationname)
+ local usedluastub=nil
+ local usedlualibs=dofile(fullspecificationname)
+ if type(usedlualibs)=="string" then
+ usedluastub=file.join(file.dirname(fullspecificationname),usedlualibs)
+ elseif type(usedlualibs)=="table" then
+ report_format("using stub specification %a",fullspecificationname)
+ local texbasename=file.basename(name)
+ local luastubname=file.addsuffix(texbasename,luasuffixes.lua)
+ local lucstubname=file.addsuffix(texbasename,luasuffixes.luc)
+ report_format("creating initialization file %a",luastubname)
+ utilities.merger.selfcreate(usedlualibs,specificationpath,luastubname)
+ if utilities.lua.compile(luastubname,lucstubname) and lfs.isfile(lucstubname) then
+ report_format("using compiled initialization file %a",lucstubname)
+ usedluastub=lucstubname
+ else
+ report_format("using uncompiled initialization file %a",luastubname)
+ usedluastub=luastubname
+ end
+ else
+ report_format("invalid stub specification %a",fullspecificationname)
+ lfs.chdir(olddir)
+ return
+ end
+ local command=format("%s --ini %s --lua=%s %s %sdump",engine,primaryflags(),quoted(usedluastub),quoted(fulltexsourcename),os.platform=="unix" and "\\\\" or "\\")
+ report_format("running command: %s\n",command)
+ os.spawn(command)
+ local pattern=file.removesuffix(file.basename(usedluastub)).."-*.mem"
+ local mp=dir.glob(pattern)
+ if mp then
+ for i=1,#mp do
+ local name=mp[i]
+ report_format("removing related mplib format %a",file.basename(name))
+ os.remove(name)
+ end
+ end
+ lfs.chdir(olddir)
+end
+function environment.run_format(name,data,more)
+ if name and name~="" then
+ local engine=environment.ownmain or "luatex"
+ local barename=file.removesuffix(name)
+ local fmtname=caches.getfirstreadablefile(file.addsuffix(barename,"fmt"),"formats",engine)
+ if fmtname=="" then
+ fmtname=resolvers.findfile(file.addsuffix(barename,"fmt")) or ""
+ end
+ fmtname=resolvers.cleanpath(fmtname)
+ if fmtname=="" then
+ report_format("no format with name %a",name)
+ else
+ local barename=file.removesuffix(name)
+ local luaname=file.addsuffix(barename,"luc")
+ if not lfs.isfile(luaname) then
+ luaname=file.addsuffix(barename,"lua")
+ end
+ if not lfs.isfile(luaname) then
+ report_format("using format name %a",fmtname)
+ report_format("no luc/lua file with name %a",barename)
+ else
+ local command=format("%s %s --fmt=%s --lua=%s %s %s",engine,primaryflags(),quoted(barename),quoted(luaname),quoted(data),more~="" and quoted(more) or "")
+ report_format("running command: %s",command)
+ os.spawn(command)
+ end
+ end
+ end
+end
+
+
+end -- of closure
+
+-- used libraries : l-lua.lua l-lpeg.lua l-function.lua l-string.lua l-table.lua l-io.lua l-number.lua l-set.lua l-os.lua l-file.lua l-md5.lua l-url.lua l-dir.lua l-boolean.lua l-unicode.lua l-math.lua util-str.lua util-tab.lua util-sto.lua util-prs.lua util-fmt.lua trac-set.lua trac-log.lua trac-inf.lua trac-pro.lua util-lua.lua util-deb.lua util-mrg.lua util-tpl.lua util-env.lua luat-env.lua lxml-tab.lua lxml-lpt.lua lxml-mis.lua lxml-aux.lua lxml-xml.lua trac-xml.lua data-ini.lua data-exp.lua data-env.lua data-tmp.lua data-met.lua data-res.lua data-pre.lua data-inp.lua data-out.lua data-fil.lua data-con.lua data-use.lua data-zip.lua data-tre.lua data-sch.lua data-lua.lua data-aux.lua data-tmf.lua data-lst.lua util-lib.lua luat-sta.lua luat-fmt.lua
+-- skipped libraries : -
+-- original bytes : 658276
+-- stripped bytes : 241564
+
+-- end library merge
+
+-- We need this hack till luatex is fixed.
+--
+-- for k,v in pairs(arg) do print(k,v) end
+
+if arg and (arg[0] == 'luatex' or arg[0] == 'luatex.exe') and arg[1] == "--luaonly" then
+ arg[-1]=arg[0] arg[0]=arg[2] for k=3,#arg do arg[k-2]=arg[k] end arg[#arg]=nil arg[#arg]=nil
+end
+
+-- End of hack.
+
+local format, gsub, gmatch, match, find = string.format, string.gsub, string.gmatch, string.match, string.find
+local concat = table.concat
+
+local ownname = environment and environment.ownname or arg[0] or 'mtxrun.lua'
+local ownpath = gsub(match(ownname,"^(.+)[\\/].-$") or ".","\\","/")
+local owntree = environment and environment.ownpath or ownpath
+
+local ownlibs = { -- order can be made better
+
+ 'l-lua.lua',
+ 'l-lpeg.lua',
+ 'l-function.lua',
+ 'l-string.lua',
+ 'l-table.lua',
+ 'l-io.lua',
+ 'l-number.lua',
+ 'l-set.lua',
+ 'l-os.lua',
+ 'l-file.lua',
+ 'l-md5.lua',
+ 'l-url.lua',
+ 'l-dir.lua',
+ 'l-boolean.lua',
+ 'l-unicode.lua',
+ 'l-math.lua',
+
+ 'util-str.lua', -- code might move to l-string
+ 'util-tab.lua',
+ 'util-sto.lua',
+ 'util-prs.lua',
+ 'util-fmt.lua',
+
+ 'trac-set.lua',
+ 'trac-log.lua',
+ 'trac-inf.lua', -- was before trac-set
+ 'trac-pro.lua', -- not really needed
+ 'util-lua.lua', -- indeed here?
+ 'util-deb.lua',
+
+ 'util-mrg.lua',
+ 'util-tpl.lua',
+
+ 'util-env.lua',
+ 'luat-env.lua', -- can come before inf (as in mkiv)
+
+ 'lxml-tab.lua',
+ 'lxml-lpt.lua',
+ -- 'lxml-ent.lua',
+ 'lxml-mis.lua',
+ 'lxml-aux.lua',
+ 'lxml-xml.lua',
+
+ 'trac-xml.lua',
+
+ 'data-ini.lua',
+ 'data-exp.lua',
+ 'data-env.lua',
+ 'data-tmp.lua',
+ 'data-met.lua',
+ 'data-res.lua',
+ 'data-pre.lua',
+ 'data-inp.lua',
+ 'data-out.lua',
+ 'data-fil.lua',
+ 'data-con.lua',
+ 'data-use.lua',
+-- 'data-tex.lua',
+-- 'data-bin.lua',
+ 'data-zip.lua',
+ 'data-tre.lua',
+ 'data-sch.lua',
+ 'data-lua.lua',
+ 'data-aux.lua', -- updater
+ 'data-tmf.lua',
+ 'data-lst.lua',
+
+ 'util-lib.lua', -- swiglib
+
+ 'luat-sta.lua',
+ 'luat-fmt.lua',
+
+}
+
+local ownlist = {
+ '.',
+ ownpath ,
+ ownpath .. "/../sources", -- HH's development path
+ owntree .. "/../../texmf-local/tex/context/base",
+ owntree .. "/../../texmf-context/tex/context/base",
+ owntree .. "/../../texmf-dist/tex/context/base",
+ owntree .. "/../../texmf/tex/context/base",
+ owntree .. "/../../../texmf-local/tex/context/base",
+ owntree .. "/../../../texmf-context/tex/context/base",
+ owntree .. "/../../../texmf-dist/tex/context/base",
+ owntree .. "/../../../texmf/tex/context/base",
+}
+
+if ownpath == "." then table.remove(ownlist,1) end
+
+own = {
+ name = ownname,
+ path = ownpath,
+ tree = owntree,
+ list = ownlist,
+ libs = ownlibs,
+}
+
+local function locate_libs()
+ for l=1,#ownlibs do
+ local lib = ownlibs[l]
+ for p =1,#ownlist do
+ local pth = ownlist[p]
+ local filename = pth .. "/" .. lib
+ local found = lfs.isfile(filename)
+ if found then
+ package.path = package.path .. ";" .. pth .. "/?.lua" -- in case l-* does a require
+ return pth
+ end
+ end
+ end
+end
+
+local function load_libs()
+ local found = locate_libs()
+ if found then
+ for l=1,#ownlibs do
+ local filename = found .. "/" .. ownlibs[l]
+ local codeblob = loadfile(filename)
+ if codeblob then
+ codeblob()
+ end
+ end
+ else
+ resolvers = nil
+ end
+end
+
+if not resolvers then
+ load_libs()
+end
+
+if not resolvers then
+ print("")
+ print("Mtxrun is unable to start up due to lack of libraries. You may")
+ print("try to run 'lua mtxrun.lua --selfmerge' in the path where this")
+ print("script is located (normally under ..../scripts/context/lua) which")
+ print("will make this script library independent.")
+ os.exit()
+end
+
+-- verbosity
+
+local e_verbose = environment.arguments["verbose"]
+
+if e_verbose then
+ trackers.enable("resolvers.locating")
+end
+
+-- some common flags (also passed through environment)
+
+local e_silent = environment.argument("silent")
+local e_noconsole = environment.argument("noconsole")
+
+local e_trackers = environment.argument("trackers")
+local e_directives = environment.argument("directives")
+local e_experiments = environment.argument("experiments")
+
+if e_silent == true then
+ e_silent = "*"
+end
+
+if type(e_silent) == "string" then
+ if type(e_directives) == "string" then
+ e_directives = format("%s,logs.blocked={%s}",e_directives,e_silent)
+ else
+ e_directives = format("logs.blocked={%s}",e_silent)
+ end
+end
+
+if e_noconsole then
+ if type(e_directives) == "string" then
+ e_directives = format("%s,logs.target=file",e_directives)
+ else
+ e_directives = format("logs.target=file")
+ end
+end
+
+if e_trackers then trackers .enable(e_trackers) end
+if e_directives then directives .enable(e_directives) end
+if e_experiments then experiments.enable(e_experiments) end
+
+if not environment.trackers then environment.trackers = e_trackers end
+if not environment.directives then environment.directives = e_directives end
+if not environment.experiments then environment.experiments = e_experiments end
+
+--
+
+local instance = resolvers.reset()
+
+local helpinfo = [[
+<?xml version="1.0" ?>
+<application>
+ <metadata>
+ <entry name="name">mtxrun</entry>
+ <entry name="detail">ConTeXt TDS Runner Tool</entry>
+ <entry name="version">1.31</entry>
+ </metadata>
+ <flags>
+ <category name="basic">
+ <subcategory>
+ <flag name="script"><short>run an mtx script (lua prefered method) (<ref name="noquotes"/>), no script gives list</short></flag>
+ <flag name="execute"><short>run a script or program (texmfstart method) (<ref name="noquotes"/>)</short></flag>
+ <flag name="resolve"><short>resolve prefixed arguments</short></flag>
+ <flag name="ctxlua"><short>run internally (using preloaded libs)</short></flag>
+ <flag name="internal"><short>run script using built in libraries (same as <ref name="ctxlua"/>)</short></flag>
+ <flag name="locate"><short>locate given filename in database (default) or system (<ref name="first"/> <ref name="all"/> <ref name="detail"/>)</short></flag>
+ </subcategory>
+ <subcategory>
+ <flag name="autotree"><short>use texmf tree cf. env texmfstart_tree or texmfstarttree</short></flag>
+ <flag name="tree" value="pathtotree"><short>use given texmf tree (default file: setuptex.tmf)</short></flag>
+ <flag name="environment" value="name"><short>use given (tmf) environment file</short></flag>
+ <flag name="path" value="runpath"><short>go to given path before execution</short></flag>
+ <flag name="ifchanged" value="filename"><short>only execute when given file has changed (md checksum)</short></flag>
+ <flag name="iftouched" value="old,new"><short>only execute when given file has changed (time stamp)</short></flag>
+ </subcategory>
+ <subcategory>
+ <flag name="makestubs"><short>create stubs for (context related) scripts</short></flag>
+ <flag name="removestubs"><short>remove stubs (context related) scripts</short></flag>
+ <flag name="stubpath" value="binpath"><short>paths where stubs wil be written</short></flag>
+ <flag name="windows"><short>create windows (mswin) stubs</short></flag>
+ <flag name="unix"><short>create unix (linux) stubs</short></flag>
+ </subcategory>
+ <subcategory>
+ <flag name="verbose"><short>give a bit more info</short></flag>
+ <flag name="trackers" value="list"><short>enable given trackers</short></flag>
+ <flag name="progname" value="str"><short>format or backend</short></flag>
+ </subcategory>
+ <subcategory>
+ <flag name="edit"><short>launch editor with found file</short></flag>
+ <flag name="launch"><short>launch files like manuals, assumes os support (<ref name="all"/>)</short></flag>
+ </subcategory>
+ <subcategory>
+ <flag name="timedrun"><short>run a script and time its run</short></flag>
+ <flag name="autogenerate"><short>regenerate databases if needed (handy when used to run context in an editor)</short></flag>
+ </subcategory>
+ <subcategory>
+ <flag name="usekpse"><short>use kpse as fallback (when no mkiv and cache installed, often slower)</short></flag>
+ <flag name="forcekpse"><short>force using kpse (handy when no mkiv and cache installed but less functionality)</short></flag>
+ </subcategory>
+ <subcategory>
+ <flag name="prefixes"><short>show supported prefixes</short></flag>
+ </subcategory>
+ <subcategory>
+ <flag name="generate"><short>generate file database</short></flag>
+ </subcategory>
+ <subcategory>
+ <flag name="variables"><short>show configuration variables</short></flag>
+ <flag name="configurations"><short>show configuration order</short></flag>
+ </subcategory>
+ <subcategory>
+ <flag name="directives"><short>show (known) directives</short></flag>
+ <flag name="trackers"><short>show (known) trackers</short></flag>
+ <flag name="experiments"><short>show (known) experiments</short></flag>
+ </subcategory>
+ <subcategory>
+ <flag name="expand-braces"><short>expand complex variable</short></flag>
+ <flag name="expand-path"><short>expand variable (resolve paths)</short></flag>
+ <flag name="expand-var"><short>expand variable (resolve references)</short></flag>
+ <flag name="show-path"><short>show path expansion of ...</short></flag>
+ <flag name="var-value"><short>report value of variable</short></flag>
+ <flag name="find-file"><short>report file location</short></flag>
+ <flag name="find-path"><short>report path of file</short></flag>
+ </subcategory>
+ <subcategory>
+ <flag name="pattern" value="string"><short>filter variables</short></flag>
+ </subcategory>
+ </category>
+ </flags>
+</application>
+]]
+
+local application = logs.application {
+ name = "mtxrun",
+ banner = "ConTeXt TDS Runner Tool 1.31",
+ helpinfo = helpinfo,
+}
+
+local report = application.report
+
+messages = messages or { } -- for the moment
+
+runners = runners or { } -- global (might become local)
+
+runners.applications = {
+ ["lua"] = "luatex --luaonly",
+ ["luc"] = "luatex --luaonly",
+ ["pl"] = "perl",
+ ["py"] = "python",
+ ["rb"] = "ruby",
+}
+
+runners.suffixes = {
+ 'rb', 'lua', 'py', 'pl'
+}
+
+runners.registered = {
+ texexec = { 'texexec.rb', false }, -- context mkii runner (only tool not to be luafied)
+ texutil = { 'texutil.rb', true }, -- old perl based index sorter for mkii (old versions need it)
+ texfont = { 'texfont.pl', true }, -- perl script that makes mkii font metric files
+ texfind = { 'texfind.pl', false }, -- perltk based tex searching tool, mostly used at pragma
+ texshow = { 'texshow.pl', false }, -- perltk based context help system, will be luafied
+ -- texwork = { 'texwork.pl', false }, -- perltk based editing environment, only used at pragma
+ makempy = { 'makempy.pl', true },
+ mptopdf = { 'mptopdf.pl', true },
+ pstopdf = { 'pstopdf.rb', true }, -- converts ps (and some more) images, does some cleaning (replaced)
+ -- examplex = { 'examplex.rb', false },
+ concheck = { 'concheck.rb', false },
+ runtools = { 'runtools.rb', true },
+ textools = { 'textools.rb', true },
+ tmftools = { 'tmftools.rb', true },
+ ctxtools = { 'ctxtools.rb', true },
+ rlxtools = { 'rlxtools.rb', true },
+ pdftools = { 'pdftools.rb', true },
+ mpstools = { 'mpstools.rb', true },
+ -- exatools = { 'exatools.rb', true },
+ xmltools = { 'xmltools.rb', true },
+ -- luatools = { 'luatools.lua', true },
+ mtxtools = { 'mtxtools.rb', true },
+ pdftrimwhite = { 'pdftrimwhite.pl', false },
+}
+
+runners.launchers = {
+ windows = { },
+ unix = { },
+}
+
+-- like runners.libpath("framework"): looks on script's subpath
+
+function runners.libpath(...)
+ package.prepend_libpath(file.dirname(environment.ownscript),...)
+ package.prepend_libpath(file.dirname(environment.ownname) ,...)
+end
+
+function runners.prepare()
+ local checkname = environment.argument("ifchanged")
+ if type(checkname) == "string" and checkname ~= "" then
+ local oldchecksum = file.loadchecksum(checkname)
+ local newchecksum = file.checksum(checkname)
+ if oldchecksum == newchecksum then
+ if e_verbose then
+ report("file '%s' is unchanged",checkname)
+ end
+ return "skip"
+ elseif e_verbose then
+ report("file '%s' is changed, processing started",checkname)
+ end
+ file.savechecksum(checkname)
+ end
+ local touchname = environment.argument("iftouched")
+ if type(touchname) == "string" and touchname ~= "" then
+ local oldname, newname = string.splitup(touchname, ",")
+ if oldname and newname and oldname ~= "" and newname ~= "" then
+ if not file.needs_updating(oldname,newname) then
+ if e_verbose then
+ report("file '%s' and '%s' have same age",oldname,newname)
+ end
+ return "skip"
+ elseif e_verbose then
+ report("file '%s' is older than '%s'",oldname,newname)
+ end
+ end
+ end
+ local runpath = environment.argument("path")
+ if type(runpath) == "string" and not lfs.chdir(runpath) then
+ report("unable to change to path '%s'",runpath)
+ return "error"
+ end
+ runners.prepare = function() end
+ return "run"
+end
+
+function runners.execute_script(fullname,internal,nosplit)
+ local noquote = environment.argument("noquotes")
+ if fullname and fullname ~= "" then
+ local state = runners.prepare()
+ if state == 'error' then
+ return false
+ elseif state == 'skip' then
+ return true
+ elseif state == "run" then
+ local path, name, suffix = file.splitname(fullname)
+ local result = ""
+ if path ~= "" then
+ result = fullname
+ elseif name then
+ name = gsub(name,"^int[%a]*:",function()
+ internal = true
+ return ""
+ end )
+ name = gsub(name,"^script:","")
+ if suffix == "" and runners.registered[name] and runners.registered[name][1] then
+ name = runners.registered[name][1]
+ suffix = file.suffix(name)
+ end
+ if suffix == "" then
+ -- loop over known suffixes
+ for _,s in pairs(runners.suffixes) do
+ result = resolvers.findfile(name .. "." .. s, 'texmfscripts')
+ if result ~= "" then
+ break
+ end
+ end
+ elseif runners.applications[suffix] then
+ result = resolvers.findfile(name, 'texmfscripts')
+ else
+ -- maybe look on path
+ result = resolvers.findfile(name, 'other text files')
+ end
+ end
+ if result and result ~= "" then
+ if not no_split then
+ local before, after = environment.splitarguments(fullname) -- already done
+ environment.arguments_before, environment.arguments_after = before, after
+ end
+ if internal then
+ arg = { } for _,v in pairs(environment.arguments_after) do arg[#arg+1] = v end
+ environment.ownscript = result
+ dofile(result)
+ else
+ local binary = runners.applications[file.suffix(result)]
+ result = string.quoted(string.unquoted(result))
+ -- if string.match(result,' ') and not string.match(result,"^\".*\"$") then
+ -- result = '"' .. result .. '"'
+ -- end
+ if binary and binary ~= "" then
+ result = binary .. " " .. result
+ end
+ local command = result .. " " .. environment.reconstructcommandline(environment.arguments_after,noquote)
+ if e_verbose then
+ report()
+ report("executing: %s",command)
+ report()
+ report()
+ io.flush()
+ end
+ -- no os.exec because otherwise we get the wrong return value
+ local code = os.execute(command) -- maybe spawn
+ if code == 0 then
+ return true
+ else
+ if binary then
+ binary = file.addsuffix(binary,os.binsuffix)
+ for p in gmatch(os.getenv("PATH"),"[^"..io.pathseparator.."]+") do
+ if lfs.isfile(file.join(p,binary)) then
+ return false
+ end
+ end
+ report()
+ report("This script needs '%s' which seems not to be installed.",binary)
+ report()
+ end
+ return false
+ end
+ end
+ end
+ end
+ end
+ return false
+end
+
+function runners.execute_program(fullname)
+ local noquote = environment.argument("noquotes")
+ if fullname and fullname ~= "" then
+ local state = runners.prepare()
+ if state == 'error' then
+ return false
+ elseif state == 'skip' then
+ return true
+ elseif state == "run" then
+ local before, after = environment.splitarguments(fullname)
+ for k=1,#after do after[k] = resolvers.resolve(after[k]) end
+ environment.initializearguments(after)
+ fullname = gsub(fullname,"^bin:","")
+ local command = fullname .. " " .. (environment.reconstructcommandline(after or "",noquote) or "")
+ report()
+ report("executing: %s",command)
+ report()
+ report()
+ io.flush()
+ local code = os.exec(command) -- (fullname,unpack(after)) does not work / maybe spawn
+ return code == 0
+ end
+ end
+ return false
+end
+
+-- the --usekpse flag will fallback (not default) on kpse (hm, we can better update mtx-stubs)
+
+local windows_stub = '@echo off\013\010setlocal\013\010set ownpath=%%~dp0%%\013\010texlua "%%ownpath%%mtxrun.lua" --usekpse --execute %s %%*\013\010endlocal\013\010'
+local unix_stub = '#!/bin/sh\010mtxrun --usekpse --execute %s \"$@\"\010'
+
+function runners.handle_stubs(create)
+ local stubpath = environment.argument('stubpath') or '.' -- 'auto' no longer subpathssupported
+ local windows = environment.argument('windows') or environment.argument('mswin') or false
+ local unix = environment.argument('unix') or environment.argument('linux') or false
+ if not windows and not unix then
+ if os.platform == "unix" then
+ unix = true
+ else
+ windows = true
+ end
+ end
+ for _,v in pairs(runners.registered) do
+ local name, doit = v[1], v[2]
+ if doit then
+ local base = gsub(file.basename(name), "%.(.-)$", "")
+ if create then
+ if windows then
+ io.savedata(file.join(stubpath,base..".bat"),format(windows_stub,name))
+ report("windows stub for '%s' created",base)
+ end
+ if unix then
+ io.savedata(file.join(stubpath,base),format(unix_stub,name))
+ report("unix stub for '%s' created",base)
+ end
+ else
+ if windows and (os.remove(file.join(stubpath,base..'.bat')) or os.remove(file.join(stubpath,base..'.cmd'))) then
+ report("windows stub for '%s' removed", base)
+ end
+ if unix and (os.remove(file.join(stubpath,base)) or os.remove(file.join(stubpath,base..'.sh'))) then
+ report("unix stub for '%s' removed",base)
+ end
+ end
+ end
+ end
+end
+
+function runners.resolve_string(filename)
+ if filename and filename ~= "" then
+ runners.report_location(resolvers.resolve(filename))
+ end
+end
+
+-- differs from texmfstart where locate appends .com .exe .bat ... todo
+
+function runners.locate_file(filename) -- was given file but only searches in tree
+ if filename and filename ~= "" then
+ if environment.argument("first") then
+ runners.report_location(resolvers.findfile(filename))
+ -- resolvers.dowithfilesandreport(resolvers.findfile,filename)
+ elseif environment.argument("all") then
+ local result, status = resolvers.findfiles(filename)
+ if status and environment.argument("detail") then
+ runners.report_location(status)
+ else
+ runners.report_location(result)
+ end
+ else
+ runners.report_location(resolvers.findgivenfile(filename))
+ -- resolvers.dowithfilesandreport(resolvers.findgivenfile,filename)
+ end
+ end
+end
+
+function runners.locate_platform()
+ runners.report_location(os.platform)
+end
+
+function runners.report_location(result)
+ if type(result) == "table" then
+ for i=1,#result do
+ if i > 1 then
+ io.write("\n")
+ end
+ io.write(result[i])
+ end
+ else
+ io.write(result)
+ end
+end
+
+function runners.edit_script(filename) -- we assume that gvim is present on most systems (todo: also in cnf file)
+ local editor = os.getenv("MTXRUN_EDITOR") or os.getenv("TEXMFSTART_EDITOR") or os.getenv("EDITOR") or 'gvim'
+ local rest = resolvers.resolve(filename)
+ if rest ~= "" then
+ local command = editor .. " " .. rest
+ if e_verbose then
+ report()
+ report("starting editor: %s",command)
+ report()
+ report()
+ end
+ os.launch(command)
+ end
+end
+
+function runners.save_script_session(filename, list)
+ local t = { }
+ for i=1,#list do
+ local key = list[i]
+ t[key] = environment.arguments[key]
+ end
+ io.savedata(filename,table.serialize(t,true))
+end
+
+function runners.load_script_session(filename)
+ if lfs.isfile(filename) then
+ local t = io.loaddata(filename)
+ if t then
+ t = loadstring(t)
+ if t then t = t() end
+ for key, value in pairs(t) do
+ environment.arguments[key] = value
+ end
+ end
+ end
+end
+
+function resolvers.launch(str)
+ -- maybe we also need to test on mtxrun.launcher.suffix environment
+ -- variable or on windows consult the assoc and ftype vars and such
+ local launchers = runners.launchers[os.platform] if launchers then
+ local suffix = file.suffix(str) if suffix then
+ local runner = launchers[suffix] if runner then
+ str = runner .. " " .. str
+ end
+ end
+ end
+ os.launch(str)
+end
+
+function runners.launch_file(filename)
+ trackers.enable("resolvers.locating")
+ local allresults = environment.arguments["all"]
+ local pattern = environment.arguments["pattern"]
+ if not pattern or pattern == "" then
+ pattern = filename
+ end
+ if not pattern or pattern == "" then
+ report("provide name or --pattern=")
+ else
+ local t = resolvers.findfiles(pattern,nil,allresults)
+ if not t or #t == 0 then
+ t = resolvers.findfiles("*/" .. pattern,nil,allresults)
+ end
+ if not t or #t == 0 then
+ t = resolvers.findfiles("*/" .. pattern .. "*",nil,allresults)
+ end
+ if t and #t > 0 then
+ if allresults then
+ for _, v in pairs(t) do
+ report("launching %s", v)
+ resolvers.launch(v)
+ end
+ else
+ report("launching %s", t[1])
+ resolvers.launch(t[1])
+ end
+ else
+ report("no match for %s", pattern)
+ end
+ end
+end
+
+local mtxprefixes = {
+ { "^mtx%-", "mtx-" },
+ { "^mtx%-t%-", "mtx-t-" },
+}
+
+function runners.find_mtx_script(filename)
+ local function found(name)
+ local path = file.dirname(name)
+ if path and path ~= "" then
+ return false
+ else
+ local fullname = own and own.path and file.join(own.path,name)
+ return io.exists(fullname) and fullname
+ end
+ end
+ filename = file.addsuffix(filename,"lua")
+ local basename = file.removesuffix(file.basename(filename))
+ local suffix = file.suffix(filename)
+ -- qualified path, raw name
+ local fullname = file.is_qualified_path(filename) and io.exists(filename) and filename
+ if fullname and fullname ~= "" then
+ return fullname
+ end
+ -- current path, raw name
+ fullname = "./" .. filename
+ fullname = io.exists(fullname) and fullname
+ if fullname and fullname ~= "" then
+ return fullname
+ end
+ -- mtx- prefix checking
+ for i=1,#mtxprefixes do
+ local mtxprefix = mtxprefixes[i]
+ mtxprefix = find(filename,mtxprefix[1]) and "" or mtxprefix[2]
+ -- context namespace, mtx-<filename>
+ fullname = mtxprefix .. filename
+ fullname = found(fullname) or resolvers.findfile(fullname)
+ if fullname and fullname ~= "" then
+ return fullname
+ end
+ -- context namespace, mtx-<filename>s
+ fullname = mtxprefix .. basename .. "s" .. "." .. suffix
+ fullname = found(fullname) or resolvers.findfile(fullname)
+ if fullname and fullname ~= "" then
+ return fullname
+ end
+ -- context namespace, mtx-<filename minus trailing s>
+ fullname = mtxprefix .. gsub(basename,"s$","") .. "." .. suffix
+ fullname = found(fullname) or resolvers.findfile(fullname)
+ if fullname and fullname ~= "" then
+ return fullname
+ end
+ end
+ -- context namespace, just <filename>
+ fullname = resolvers.findfile(filename)
+ return fullname
+end
+
+function runners.register_arguments(...)
+ local arguments = environment.arguments_after
+ local passedon = { ... }
+ for i=#passedon,1,-1 do
+ local pi = passedon[i]
+ if pi then
+ table.insert(arguments,1,pi)
+ end
+ end
+end
+
+function runners.execute_ctx_script(filename,...)
+ runners.register_arguments(...)
+ local arguments = environment.arguments_after
+ local fullname = runners.find_mtx_script(filename) or ""
+ if file.suffix(fullname) == "cld" then
+ -- handy in editors where we force --autopdf
+ report("running cld script: %s",filename)
+ table.insert(arguments,1,fullname)
+ table.insert(arguments,"--autopdf")
+ fullname = runners.find_mtx_script("context") or ""
+ end
+ -- retry after generate but only if --autogenerate
+ if fullname == "" and environment.argument("autogenerate") then -- might become the default
+ instance.renewcache = true
+ trackers.enable("resolvers.locating")
+ resolvers.load()
+ --
+ fullname = runners.find_mtx_script(filename) or ""
+ end
+ -- that should do it
+ if fullname ~= "" then
+ local state = runners.prepare()
+ if state == 'error' then
+ return false
+ elseif state == 'skip' then
+ return true
+ elseif state == "run" then
+ -- load and save ... kind of undocumented
+ arg = { } for _,v in pairs(arguments) do arg[#arg+1] = resolvers.resolve(v) end
+ environment.initializearguments(arg)
+ local loadname = environment.arguments['load']
+ if loadname then
+ if type(loadname) ~= "string" then loadname = file.basename(fullname) end
+ loadname = file.replacesuffix(loadname,"cfg")
+ runners.load_script_session(loadname)
+ end
+ filename = environment.files[1]
+ if e_verbose then
+ report("using script: %s\n",fullname)
+ end
+ environment.ownscript = fullname
+ dofile(fullname)
+ local savename = environment.arguments['save']
+ if savename then
+ local save_list = runners.save_list
+ if save_list and next(save_list) then
+ if type(savename) ~= "string" then savename = file.basename(fullname) end
+ savename = file.replacesuffix(savename,"cfg")
+ runners.save_script_session(savename,save_list)
+ end
+ end
+ return true
+ end
+ else
+ if filename == "" or filename == "help" then
+ local context = resolvers.findfile("mtx-context.lua")
+ trackers.enable("resolvers.locating")
+ if context ~= "" then
+ local result = dir.glob((gsub(context,"mtx%-context","mtx-*"))) -- () needed
+ local valid = { }
+ table.sort(result)
+ for i=1,#result do
+ local scriptname = result[i]
+ local scriptbase = match(scriptname,".*mtx%-([^%-]-)%.lua")
+ if scriptbase then
+ local data = io.loaddata(scriptname)
+ local banner, version = match(data,"[\n\r]logs%.extendbanner%s*%(%s*[\"\']([^\n\r]+)%s*(%d+%.%d+)")
+ if banner then
+ valid[#valid+1] = { scriptbase, version, banner }
+ end
+ end
+ end
+ if #valid > 0 then
+ application.identify()
+ report("no script name given, known scripts:")
+ report()
+ for k=1,#valid do
+ local v = valid[k]
+ report("%-12s %4s %s",v[1],v[2],v[3])
+ end
+ end
+ else
+ report("no script name given")
+ end
+ else
+ filename = file.addsuffix(filename,"lua")
+ if file.is_qualified_path(filename) then
+ report("unknown script '%s'",filename)
+ else
+ report("unknown script '%s' or 'mtx-%s'",filename,filename)
+ end
+ end
+ return false
+ end
+end
+
+function runners.prefixes()
+ application.identify()
+ report()
+ report(concat(resolvers.allprefixes(true)," "))
+end
+
+function runners.timedrun(filename) -- just for me
+ if filename and filename ~= "" then
+ runners.timed(function() os.execute(filename) end)
+ end
+end
+
+function runners.timed(action)
+ statistics.timed(action)
+end
+
+function runners.associate(filename)
+ os.launch(filename)
+end
+
+function runners.gethelp(filename)
+ local url = environment.argument("url")
+ if url and url ~= "" then
+ local command = string.gsub(environment.argument("command") or "unknown","^%s*\\*(.-)%s*$","%1")
+ url = utilities.templates.replace(url,{ command = command })
+ os.launch(url)
+ else
+ report("no --url given")
+ end
+end
+
+-- this is a bit dirty ... first we store the first filename and next we
+-- split the arguments so that we only see the ones meant for this script
+-- ... later we will use the second half
+
+local filename = environment.files[1] or ""
+local ok = true
+
+local before, after = environment.splitarguments(filename)
+environment.arguments_before, environment.arguments_after = before, after
+environment.initializearguments(before)
+
+instance.lsrmode = environment.argument("lsr") or false
+
+-- maybe the unset has to go to this level
+
+local is_mkii_stub = runners.registered[file.removesuffix(file.basename(filename))]
+
+local e_argument = environment.argument
+
+if e_argument("usekpse") or e_argument("forcekpse") or is_mkii_stub then
+
+ resolvers.load_tree(e_argument('tree'),true) -- force resolve of TEXMFCNF
+
+ os.setenv("engine","")
+ os.setenv("progname","")
+
+ local remapper = {
+ otf = "opentype fonts",
+ ttf = "truetype fonts",
+ ttc = "truetype fonts",
+ pfb = "type1 fonts",
+ other = "other text files",
+ }
+
+ local progname = e_argument("progname") or 'context'
+
+ local function kpse_initialized()
+ texconfig.kpse_init = true
+ local t = os.clock()
+ local k = kpse.original.new("luatex",progname)
+ local dummy = k:find_file("mtxrun.lua") -- so that we're initialized
+ report("kpse fallback with progname '%s' initialized in %s seconds",progname,os.clock()-t)
+ kpse_initialized = function() return k end
+ return k
+ end
+
+ local findfile = resolvers.findfile
+ local showpath = resolvers.showpath
+
+ if e_argument("forcekpse") then
+
+ function resolvers.findfile(name,kind)
+ return (kpse_initialized():find_file(resolvers.cleanpath(name),(kind ~= "" and (remapper[kind] or kind)) or "tex") or "") or ""
+ end
+ function resolvers.showpath(name)
+ return (kpse_initialized():show_path(name)) or ""
+ end
+
+ elseif e_argument("usekpse") or is_mkii_stub then
+
+ resolvers.load()
+
+ function resolvers.findfile(name,kind)
+ local found = findfile(name,kind) or ""
+ if found ~= "" then
+ return found
+ else
+ return (kpse_initialized():find_file(resolvers.cleanpath(name),(kind ~= "" and (remapper[kind] or kind)) or "tex") or "") or ""
+ end
+ end
+ function resolvers.showpath(name)
+ local found = showpath(name) or ""
+ if found ~= "" then
+ return found
+ else
+ return (kpse_initialized():show_path(name)) or ""
+ end
+ end
+
+ end
+
+ function runners.loadbase()
+ end
+
+else
+
+ function runners.loadbase(...)
+ if not resolvers.load(...) then
+ report("forcing cache reload")
+ instance.renewcache = true
+ trackers.enable("resolvers.locating")
+ if not resolvers.load(...) then
+ report("the resolver databases are not present or outdated")
+ end
+ end
+ end
+
+ resolvers.load_tree(e_argument('tree'),e_argument("resolve"))
+
+end
+
+if e_argument("script") or e_argument("scripts") then
+
+ -- run a script by loading it (using libs), pass args
+
+ runners.loadbase()
+ if is_mkii_stub then
+ ok = runners.execute_script(filename,false,true)
+ else
+ ok = runners.execute_ctx_script(filename)
+ end
+
+elseif e_argument("selfmerge") then
+
+ -- embed used libraries
+
+ runners.loadbase()
+ local found = locate_libs()
+ if found then
+ utilities.merger.selfmerge(own.name,own.libs,{ found })
+ end
+
+elseif e_argument("selfclean") then
+
+ -- remove embedded libraries
+
+ runners.loadbase()
+ utilities.merger.selfclean(own.name)
+
+elseif e_argument("selfupdate") then
+
+ runners.loadbase()
+ trackers.enable("resolvers.locating")
+ resolvers.updatescript(own.name,"mtxrun")
+
+elseif e_argument("ctxlua") or e_argument("internal") then
+
+ -- run a script by loading it (using libs)
+
+ runners.loadbase()
+ ok = runners.execute_script(filename,true)
+
+elseif e_argument("execute") then
+
+ -- execute script
+
+ runners.loadbase()
+ ok = runners.execute_script(filename)
+
+elseif e_argument("direct") then
+
+ -- equals bin:
+
+ runners.loadbase()
+ ok = runners.execute_program(filename)
+
+elseif e_argument("edit") then
+
+ -- edit file
+
+ runners.loadbase()
+ runners.edit_script(filename)
+
+elseif e_argument("launch") then
+
+ runners.loadbase()
+ runners.launch_file(filename)
+
+elseif e_argument("associate") then
+
+ runners.associate(filename)
+
+elseif e_argument("gethelp") then
+
+ runners.gethelp()
+
+elseif e_argument("makestubs") then
+
+ -- make stubs (depricated)
+
+ runners.handle_stubs(true)
+
+elseif e_argument("removestubs") then
+
+ -- remove stub (depricated)
+
+ runners.loadbase()
+ runners.handle_stubs(false)
+
+elseif e_argument("resolve") then
+
+ -- resolve string
+
+ runners.loadbase()
+ runners.resolve_string(filename)
+
+elseif e_argument("locate") then
+
+ -- locate file (only database)
+
+ runners.loadbase()
+ runners.locate_file(filename)
+
+elseif e_argument("platform") or e_argument("show-platform") then
+
+ -- locate platform
+
+ runners.loadbase()
+ runners.locate_platform()
+
+elseif e_argument("prefixes") then
+
+ runners.loadbase()
+ runners.prefixes()
+
+elseif e_argument("timedrun") then
+
+ -- locate platform
+
+ runners.loadbase()
+ runners.timedrun(filename)
+
+elseif e_argument("variables") or e_argument("show-variables") or e_argument("expansions") or e_argument("show-expansions") then
+
+ -- luatools: runners.execute_ctx_script("mtx-base","--expansions",filename)
+
+ resolvers.load("nofiles")
+ resolvers.listers.variables(e_argument("pattern"))
+
+elseif e_argument("configurations") or e_argument("show-configurations") then
+
+ -- luatools: runners.execute_ctx_script("mtx-base","--configurations",filename)
+
+ resolvers.load("nofiles")
+ resolvers.listers.configurations()
+
+elseif e_argument("find-file") then
+
+ -- luatools: runners.execute_ctx_script("mtx-base","--find-file",filename)
+
+ resolvers.load()
+ local e_all = e_argument("all")
+ local e_pattern = e_argument("pattern")
+ local e_format = e_argument("format")
+ local finder = e_all and resolvers.findfiles or resolvers.findfile
+ if not e_pattern then
+ runners.register_arguments(filename)
+ environment.initializearguments(environment.arguments_after)
+ resolvers.dowithfilesandreport(finder,environment.files,e_format)
+ elseif type(e_pattern) == "string" then
+ resolvers.dowithfilesandreport(finder,{ e_pattern },e_format)
+ end
+
+elseif e_argument("find-path") then
+
+ -- luatools: runners.execute_ctx_script("mtx-base","--find-path",filename)
+
+ resolvers.load()
+ local path = resolvers.findpath(filename, instance.my_format)
+ if e_verbose then
+ report(path)
+ else
+ print(path)
+ end
+
+elseif e_argument("expand-braces") then
+
+ -- luatools: runners.execute_ctx_script("mtx-base","--expand-braces",filename)
+
+ resolvers.load("nofiles")
+ runners.register_arguments(filename)
+ environment.initializearguments(environment.arguments_after)
+ resolvers.dowithfilesandreport(resolvers.expandbraces, environment.files)
+
+elseif e_argument("expand-path") then
+
+ -- luatools: runners.execute_ctx_script("mtx-base","--expand-path",filename)
+
+ resolvers.load("nofiles")
+ runners.register_arguments(filename)
+ environment.initializearguments(environment.arguments_after)
+ resolvers.dowithfilesandreport(resolvers.expandpath, environment.files)
+
+elseif e_argument("expand-var") or e_argument("expand-variable") then
+
+ -- luatools: runners.execute_ctx_script("mtx-base","--expand-var",filename)
+
+ resolvers.load("nofiles")
+ runners.register_arguments(filename)
+ environment.initializearguments(environment.arguments_after)
+ resolvers.dowithfilesandreport(resolvers.expansion, environment.files)
+
+elseif e_argument("show-path") or e_argument("path-value") then
+
+ -- luatools: runners.execute_ctx_script("mtx-base","--show-path",filename)
+
+ resolvers.load("nofiles")
+ runners.register_arguments(filename)
+ environment.initializearguments(environment.arguments_after)
+ resolvers.dowithfilesandreport(resolvers.showpath, environment.files)
+
+elseif e_argument("var-value") or e_argument("show-value") then
+
+ -- luatools: runners.execute_ctx_script("mtx-base","--show-value",filename)
+
+ resolvers.load("nofiles")
+ runners.register_arguments(filename)
+ environment.initializearguments(environment.arguments_after)
+ resolvers.dowithfilesandreport(resolvers.variable,environment.files)
+
+elseif e_argument("format-path") then
+
+ -- luatools: runners.execute_ctx_script("mtx-base","--format-path",filename)
+
+ resolvers.load()
+ report(caches.getwritablepath("format"))
+
+elseif e_argument("pattern") then
+
+ -- luatools
+
+ runners.execute_ctx_script("mtx-base","--pattern='" .. e_argument("pattern") .. "'",filename)
+
+elseif e_argument("generate") then
+
+ -- luatools
+
+ if filename and filename ~= "" then
+ resolvers.load("nofiles")
+ trackers.enable("resolvers.locating")
+ resolvers.renew(filename)
+ else
+ instance.renewcache = true
+ trackers.enable("resolvers.locating")
+ resolvers.load()
+ end
+
+ e_verbose = true
+
+elseif e_argument("make") or e_argument("ini") or e_argument("compile") then
+
+ -- luatools: runners.execute_ctx_script("mtx-base","--make",filename)
+
+ resolvers.load()
+ trackers.enable("resolvers.locating")
+ environment.make_format(filename)
+
+elseif e_argument("run") then
+
+ -- luatools
+
+ runners.execute_ctx_script("mtx-base","--run",filename)
+
+elseif e_argument("fmt") then
+
+ -- luatools
+
+ runners.execute_ctx_script("mtx-base","--fmt",filename)
+
+elseif e_argument("help") and filename=='base' then
+
+ -- luatools
+
+ runners.execute_ctx_script("mtx-base","--help")
+
+elseif e_argument("version") then
+
+ application.version()
+
+elseif e_argument("directives") then
+
+ directives.show()
+
+elseif e_argument("trackers") then
+
+ trackers.show()
+
+elseif e_argument("experiments") then
+
+ experiments.show()
+
+elseif e_argument("exporthelp") then
+
+ runners.loadbase()
+ application.export(e_argument("exporthelp"),filename)
+
+elseif e_argument("help") or filename=='help' or filename == "" then
+
+ application.help()
+
+elseif find(filename,"^bin:") then
+
+ runners.loadbase()
+ ok = runners.execute_program(filename)
+
+elseif is_mkii_stub then
+
+ -- execute mkii script
+
+ runners.loadbase()
+ ok = runners.execute_script(filename,false,true)
+
+elseif false then
+
+ runners.loadbase()
+ ok = runners.execute_ctx_script(filename)
+ if not ok then
+ ok = runners.execute_script(filename)
+ end
+
+elseif environment.files[1] == 'texmfcnf.lua' then -- so that we don't need to load mtx-base
+
+ resolvers.load("nofiles")
+ resolvers.listers.configurations()
+
+else
+ runners.loadbase()
+ runners.execute_ctx_script("mtx-base",filename)
+
+end
+
+if e_verbose then
+ report()
+ report("runtime: %0.3f seconds",os.runtime())
+end
+
+if os.type ~= "windows" then
+ texio.write("\n") -- is this still valid?
+end
+
+if ok == false then ok = 1 elseif ok == true then ok = 0 end
+
+os.exit(ok,true) -- true forces a cleanup in 5.2+
diff --git a/Build/source/texk/texlive/linked_scripts/context/stubs/unix/pstopdf b/Build/source/texk/texlive/linked_scripts/context/stubs/unix/pstopdf
new file mode 100644
index 00000000000..116f5f4a32e
--- /dev/null
+++ b/Build/source/texk/texlive/linked_scripts/context/stubs/unix/pstopdf
@@ -0,0 +1,2 @@
+#!/bin/sh
+mtxrun --script pstopdf "$@"
diff --git a/Build/source/texk/texlive/linked_scripts/context/stubs/unix/texexec b/Build/source/texk/texlive/linked_scripts/context/stubs/unix/texexec
new file mode 100644
index 00000000000..cd5900ff84c
--- /dev/null
+++ b/Build/source/texk/texlive/linked_scripts/context/stubs/unix/texexec
@@ -0,0 +1,2 @@
+#!/bin/sh
+mtxrun --usekpse --execute texexec "$@"
diff --git a/Build/source/texk/texlive/linked_scripts/context/stubs/unix/texmfstart b/Build/source/texk/texlive/linked_scripts/context/stubs/unix/texmfstart
new file mode 100644
index 00000000000..1799b357915
--- /dev/null
+++ b/Build/source/texk/texlive/linked_scripts/context/stubs/unix/texmfstart
@@ -0,0 +1,2 @@
+#!/bin/sh
+mtxrun --usekpse "$@"
diff --git a/Master/texmf-dist/bibtex/bst/context/cont-ab.bst b/Master/texmf-dist/bibtex/bst/context/cont-ab.bst
index c524ff8ee08..e09da7ca56b 100644
--- a/Master/texmf-dist/bibtex/bst/context/cont-ab.bst
+++ b/Master/texmf-dist/bibtex/bst/context/cont-ab.bst
@@ -618,6 +618,7 @@ FUNCTION {misc}
format.t.title
format.key
"" "\city" address do.out
+ "" "\day" day do.out
"" "\month" month do.out
"" "\pubname" publisher do.out
"" "\howpublished" howpublished do.out
diff --git a/Master/texmf-dist/bibtex/bst/context/cont-au.bst b/Master/texmf-dist/bibtex/bst/context/cont-au.bst
index 2eba98b0ae2..d0b87186c66 100644
--- a/Master/texmf-dist/bibtex/bst/context/cont-au.bst
+++ b/Master/texmf-dist/bibtex/bst/context/cont-au.bst
@@ -618,6 +618,7 @@ FUNCTION {misc}
format.t.title
format.key
"" "\city" address do.out
+ "" "\day" day do.out
"" "\month" month do.out
"" "\pubname" publisher do.out
"" "\howpublished" howpublished do.out
diff --git a/Master/texmf-dist/bibtex/bst/context/cont-no.bst b/Master/texmf-dist/bibtex/bst/context/cont-no.bst
index dbeb8a45dcc..393d13db812 100644
--- a/Master/texmf-dist/bibtex/bst/context/cont-no.bst
+++ b/Master/texmf-dist/bibtex/bst/context/cont-no.bst
@@ -618,6 +618,7 @@ FUNCTION {misc}
format.t.title
format.key
"" "\city" address do.out
+ "" "\day" day do.out
"" "\month" month do.out
"" "\pubname" publisher do.out
"" "\howpublished" howpublished do.out
diff --git a/Master/texmf-dist/bibtex/bst/context/cont-ti.bst b/Master/texmf-dist/bibtex/bst/context/cont-ti.bst
index 734cbb4b6f3..34175a0ba84 100644
--- a/Master/texmf-dist/bibtex/bst/context/cont-ti.bst
+++ b/Master/texmf-dist/bibtex/bst/context/cont-ti.bst
@@ -618,6 +618,7 @@ FUNCTION {misc}
format.t.title
format.key
"" "\city" address do.out
+ "" "\day" day do.out
"" "\month" month do.out
"" "\pubname" publisher do.out
"" "\howpublished" howpublished do.out
diff --git a/Master/texmf-dist/context/data/scite/lexers/data/scite-context-data-context.lua b/Master/texmf-dist/context/data/scite/lexers/data/scite-context-data-context.lua
index aa36277b72b..dc44a6a597f 100644
--- a/Master/texmf-dist/context/data/scite/lexers/data/scite-context-data-context.lua
+++ b/Master/texmf-dist/context/data/scite/lexers/data/scite-context-data-context.lua
@@ -1,4 +1,4 @@
return {
- ["constants"]={ "zerocount", "minusone", "minustwo", "plusone", "plustwo", "plusthree", "plusfour", "plusfive", "plussix", "plusseven", "pluseight", "plusnine", "plusten", "plussixteen", "plushundred", "plusthousand", "plustenthousand", "plustwentythousand", "medcard", "maxcard", "zeropoint", "onepoint", "halfapoint", "onebasepoint", "maxdimen", "scaledpoint", "thousandpoint", "points", "halfpoint", "zeroskip", "pluscxxvii", "pluscxxviii", "pluscclv", "pluscclvi", "normalpagebox", "endoflinetoken", "outputnewlinechar", "emptytoks", "empty", "undefined", "voidbox", "emptybox", "emptyvbox", "emptyhbox", "bigskipamount", "medskipamount", "smallskipamount", "fmtname", "fmtversion", "texengine", "texenginename", "texengineversion", "luatexengine", "pdftexengine", "xetexengine", "unknownengine", "etexversion", "pdftexversion", "xetexversion", "xetexrevision", "activecatcode", "bgroup", "egroup", "endline", "conditionaltrue", "conditionalfalse", "attributeunsetvalue", "uprotationangle", "rightrotationangle", "downrotationangle", "leftrotationangle", "inicatcodes", "ctxcatcodes", "texcatcodes", "notcatcodes", "txtcatcodes", "vrbcatcodes", "prtcatcodes", "nilcatcodes", "luacatcodes", "tpacatcodes", "tpbcatcodes", "xmlcatcodes", "escapecatcode", "begingroupcatcode", "endgroupcatcode", "mathshiftcatcode", "alignmentcatcode", "endoflinecatcode", "parametercatcode", "superscriptcatcode", "subscriptcatcode", "ignorecatcode", "spacecatcode", "lettercatcode", "othercatcode", "activecatcode", "commentcatcode", "invalidcatcode", "tabasciicode", "newlineasciicode", "formfeedasciicode", "endoflineasciicode", "endoffileasciicode", "spaceasciicode", "hashasciicode", "dollarasciicode", "commentasciicode", "ampersandasciicode", "colonasciicode", "backslashasciicode", "circumflexasciicode", "underscoreasciicode", "leftbraceasciicode", "barasciicode", "rightbraceasciicode", "tildeasciicode", "delasciicode", "lessthanasciicode", "morethanasciicode", "doublecommentsignal", "atsignasciicode", "exclamationmarkasciicode", "questionmarkasciicode", "doublequoteasciicode", "singlequoteasciicode", "forwardslashasciicode", "primeasciicode", "activemathcharcode", "activetabtoken", "activeformfeedtoken", "activeendoflinetoken", "batchmodecode", "nonstopmodecode", "scrollmodecode", "errorstopmodecode", "bottomlevelgroupcode", "simplegroupcode", "hboxgroupcode", "adjustedhboxgroupcode", "vboxgroupcode", "vtopgroupcode", "aligngroupcode", "noaligngroupcode", "outputgroupcode", "mathgroupcode", "discretionarygroupcode", "insertgroupcode", "vcentergroupcode", "mathchoicegroupcode", "semisimplegroupcode", "mathshiftgroupcode", "mathleftgroupcode", "vadjustgroupcode", "charnodecode", "hlistnodecode", "vlistnodecode", "rulenodecode", "insertnodecode", "marknodecode", "adjustnodecode", "ligaturenodecode", "discretionarynodecode", "whatsitnodecode", "mathnodecode", "gluenodecode", "kernnodecode", "penaltynodecode", "unsetnodecode", "mathsnodecode", "charifcode", "catifcode", "numifcode", "dimifcode", "oddifcode", "vmodeifcode", "hmodeifcode", "mmodeifcode", "innerifcode", "voidifcode", "hboxifcode", "vboxifcode", "xifcode", "eofifcode", "trueifcode", "falseifcode", "caseifcode", "definedifcode", "csnameifcode", "fontcharifcode", "fontslantperpoint", "fontinterwordspace", "fontinterwordstretch", "fontinterwordshrink", "fontexheight", "fontemwidth", "fontextraspace", "slantperpoint", "interwordspace", "interwordstretch", "interwordshrink", "exheight", "emwidth", "extraspace", "mathsupdisplay", "mathsupnormal", "mathsupcramped", "mathsubnormal", "mathsubcombined", "mathaxisheight", "startmode", "stopmode", "startnotmode", "stopnotmode", "startmodeset", "stopmodeset", "doifmode", "doifmodeelse", "doifnotmode", "startallmodes", "stopallmodes", "startnotallmodes", "stopnotallmodes", "doifallmodes", "doifallmodeselse", "doifnotallmodes", "startenvironment", "stopenvironment", "environment", "startcomponent", "stopcomponent", "component", "startproduct", "stopproduct", "product", "startproject", "stopproject", "project", "starttext", "stoptext", "startnotext", "stopnotext", "startdocument", "stopdocument", "documentvariable", "startmodule", "stopmodule", "usemodule", "startTEXpage", "stopTEXpage", "enablemode", "disablemode", "preventmode", "pushmode", "popmode", "typescriptone", "typescripttwo", "typescriptthree", "mathsizesuffix", "mathordcode", "mathopcode", "mathbincode", "mathrelcode", "mathopencode", "mathclosecode", "mathpunctcode", "mathalphacode", "mathinnercode", "mathnothingcode", "mathlimopcode", "mathnolopcode", "mathboxcode", "mathchoicecode", "mathaccentcode", "mathradicalcode", "constantnumber", "constantnumberargument", "constantdimen", "constantdimenargument", "constantemptyargument", "continueifinputfile" },
- ["helpers"]={ "startsetups", "stopsetups", "startxmlsetups", "stopxmlsetups", "startluasetups", "stopluasetups", "starttexsetups", "stoptexsetups", "startrawsetups", "stoprawsetups", "startlocalsetups", "stoplocalsetups", "starttexdefinition", "stoptexdefinition", "starttexcode", "stoptexcode", "doifsetupselse", "doifsetups", "doifnotsetups", "setup", "setups", "texsetup", "xmlsetup", "luasetup", "directsetup", "newmode", "setmode", "resetmode", "newsystemmode", "setsystemmode", "resetsystemmode", "pushsystemmode", "popsystemmode", "booleanmodevalue", "newcount", "newdimen", "newskip", "newmuskip", "newbox", "newtoks", "newread", "newwrite", "newmarks", "newinsert", "newattribute", "newif", "newlanguage", "newfamily", "newfam", "newhelp", "then", "donothing", "dontcomplain", "donetrue", "donefalse", "htdp", "unvoidbox", "vfilll", "mathbox", "mathlimop", "mathnolop", "mathnothing", "mathalpha", "currentcatcodetable", "defaultcatcodetable", "catcodetablename", "newcatcodetable", "startcatcodetable", "stopcatcodetable", "startextendcatcodetable", "stopextendcatcodetable", "pushcatcodetable", "popcatcodetable", "restorecatcodes", "setcatcodetable", "letcatcodecommand", "defcatcodecommand", "uedcatcodecommand", "hglue", "vglue", "hfillneg", "vfillneg", "hfilllneg", "vfilllneg", "ruledhss", "ruledhfil", "ruledhfill", "ruledhfilneg", "ruledhfillneg", "normalhfillneg", "ruledvss", "ruledvfil", "ruledvfill", "ruledvfilneg", "ruledvfillneg", "normalvfillneg", "ruledhbox", "ruledvbox", "ruledvtop", "ruledvcenter", "ruledhskip", "ruledvskip", "ruledkern", "ruledmskip", "ruledmkern", "ruledhglue", "ruledvglue", "normalhglue", "normalvglue", "ruledpenalty", "scratchcounter", "globalscratchcounter", "scratchdimen", "globalscratchdimen", "scratchskip", "globalscratchskip", "scratchmuskip", "globalscratchmuskip", "scratchtoks", "globalscratchtoks", "scratchbox", "globalscratchbox", "nextbox", "dowithnextbox", "dowithnextboxcs", "dowithnextboxcontent", "dowithnextboxcontentcs", "scratchwidth", "scratchheight", "scratchdepth", "scratchoffset", "scratchdistance", "scratchhsize", "scratchvsize", "scratchcounterone", "scratchcountertwo", "scratchcounterthree", "scratchdimenone", "scratchdimentwo", "scratchdimenthree", "scratchskipone", "scratchskiptwo", "scratchskipthree", "scratchmuskipone", "scratchmuskiptwo", "scratchmuskipthree", "scratchtoksone", "scratchtokstwo", "scratchtoksthree", "scratchboxone", "scratchboxtwo", "scratchboxthree", "doif", "doifnot", "doifelse", "doifinset", "doifnotinset", "doifinsetelse", "doifnextcharelse", "doifnextoptionalelse", "doifnextbgroupelse", "doifnextparenthesiselse", "doiffastoptionalcheckelse", "doifundefinedelse", "doifdefinedelse", "doifundefined", "doifdefined", "doifelsevalue", "doifvalue", "doifnotvalue", "doifnothing", "doifsomething", "doifelsenothing", "doifsomethingelse", "doifvaluenothing", "doifvaluesomething", "doifelsevaluenothing", "doifdimensionelse", "doifnumberelse", "doifcommonelse", "doifcommon", "doifnotcommon", "doifinstring", "doifnotinstring", "doifinstringelse", "doifassignmentelse", "tracingall", "tracingnone", "loggingall", "appendtoks", "prependtoks", "appendtotoks", "prependtotoks", "to", "endgraf", "empty", "null", "space", "quad", "enspace", "obeyspaces", "obeylines", "normalspace", "executeifdefined", "singleexpandafter", "doubleexpandafter", "tripleexpandafter", "dontleavehmode", "removelastspace", "removeunwantedspaces", "wait", "writestatus", "define", "redefine", "setmeasure", "setemeasure", "setgmeasure", "setxmeasure", "definemeasure", "measure", "getvalue", "setvalue", "setevalue", "setgvalue", "setxvalue", "letvalue", "letgvalue", "resetvalue", "undefinevalue", "ignorevalue", "setuvalue", "setuevalue", "setugvalue", "setuxvalue", "globallet", "glet", "getparameters", "geteparameters", "getgparameters", "getxparameters", "forgetparameters", "copyparameters", "processcommalist", "processcommacommand", "quitcommalist", "quitprevcommalist", "processaction", "processallactions", "processfirstactioninset", "processallactionsinset", "unexpanded", "expanded", "startexpanded", "stopexpanded", "protected", "protect", "unprotect", "firstofoneargument", "firstoftwoarguments", "secondoftwoarguments", "firstofthreearguments", "secondofthreearguments", "thirdofthreearguments", "firstoffourarguments", "secondoffourarguments", "thirdoffourarguments", "fourthoffourarguments", "firstoffivearguments", "secondoffivearguments", "thirdoffivearguments", "fourthoffivearguments", "fifthoffivearguments", "firstofsixarguments", "secondofsixarguments", "thirdofsixarguments", "fourthofsixarguments", "fifthofsixarguments", "sixthofsixarguments", "firstofoneunexpanded", "gobbleoneargument", "gobbletwoarguments", "gobblethreearguments", "gobblefourarguments", "gobblefivearguments", "gobblesixarguments", "gobblesevenarguments", "gobbleeightarguments", "gobbleninearguments", "gobbletenarguments", "gobbleoneoptional", "gobbletwooptionals", "gobblethreeoptionals", "gobblefouroptionals", "gobblefiveoptionals", "dorecurse", "doloop", "exitloop", "dostepwiserecurse", "recurselevel", "recursedepth", "dofastloopcs", "newconstant", "setnewconstant", "newconditional", "settrue", "setfalse", "setconstant", "newmacro", "setnewmacro", "newfraction", "dosingleempty", "dodoubleempty", "dotripleempty", "doquadrupleempty", "doquintupleempty", "dosixtupleempty", "doseventupleempty", "dosingleargument", "dodoubleargument", "dotripleargument", "doquadrupleargument", "dosinglegroupempty", "dodoublegroupempty", "dotriplegroupempty", "doquadruplegroupempty", "doquintuplegroupempty", "nopdfcompression", "maximumpdfcompression", "normalpdfcompression", "modulonumber", "dividenumber", "getfirstcharacter", "doiffirstcharelse", "startnointerference", "stopnointerference", "strut", "setstrut", "strutbox", "strutht", "strutdp", "strutwd", "begstrut", "endstrut" },
+ ["constants"]={ "zerocount", "minusone", "minustwo", "plusone", "plustwo", "plusthree", "plusfour", "plusfive", "plussix", "plusseven", "pluseight", "plusnine", "plusten", "plussixteen", "plushundred", "plusthousand", "plustenthousand", "plustwentythousand", "medcard", "maxcard", "zeropoint", "onepoint", "halfapoint", "onebasepoint", "maxdimen", "scaledpoint", "thousandpoint", "points", "halfpoint", "zeroskip", "zeromuskip", "onemuskip", "pluscxxvii", "pluscxxviii", "pluscclv", "pluscclvi", "normalpagebox", "endoflinetoken", "outputnewlinechar", "emptytoks", "empty", "undefined", "voidbox", "emptybox", "emptyvbox", "emptyhbox", "bigskipamount", "medskipamount", "smallskipamount", "fmtname", "fmtversion", "texengine", "texenginename", "texengineversion", "luatexengine", "pdftexengine", "xetexengine", "unknownengine", "etexversion", "pdftexversion", "xetexversion", "xetexrevision", "activecatcode", "bgroup", "egroup", "endline", "conditionaltrue", "conditionalfalse", "attributeunsetvalue", "uprotationangle", "rightrotationangle", "downrotationangle", "leftrotationangle", "inicatcodes", "ctxcatcodes", "texcatcodes", "notcatcodes", "txtcatcodes", "vrbcatcodes", "prtcatcodes", "nilcatcodes", "luacatcodes", "tpacatcodes", "tpbcatcodes", "xmlcatcodes", "escapecatcode", "begingroupcatcode", "endgroupcatcode", "mathshiftcatcode", "alignmentcatcode", "endoflinecatcode", "parametercatcode", "superscriptcatcode", "subscriptcatcode", "ignorecatcode", "spacecatcode", "lettercatcode", "othercatcode", "activecatcode", "commentcatcode", "invalidcatcode", "tabasciicode", "newlineasciicode", "formfeedasciicode", "endoflineasciicode", "endoffileasciicode", "spaceasciicode", "hashasciicode", "dollarasciicode", "commentasciicode", "ampersandasciicode", "colonasciicode", "backslashasciicode", "circumflexasciicode", "underscoreasciicode", "leftbraceasciicode", "barasciicode", "rightbraceasciicode", "tildeasciicode", "delasciicode", "lessthanasciicode", "morethanasciicode", "doublecommentsignal", "atsignasciicode", "exclamationmarkasciicode", "questionmarkasciicode", "doublequoteasciicode", "singlequoteasciicode", "forwardslashasciicode", "primeasciicode", "activemathcharcode", "activetabtoken", "activeformfeedtoken", "activeendoflinetoken", "batchmodecode", "nonstopmodecode", "scrollmodecode", "errorstopmodecode", "bottomlevelgroupcode", "simplegroupcode", "hboxgroupcode", "adjustedhboxgroupcode", "vboxgroupcode", "vtopgroupcode", "aligngroupcode", "noaligngroupcode", "outputgroupcode", "mathgroupcode", "discretionarygroupcode", "insertgroupcode", "vcentergroupcode", "mathchoicegroupcode", "semisimplegroupcode", "mathshiftgroupcode", "mathleftgroupcode", "vadjustgroupcode", "charnodecode", "hlistnodecode", "vlistnodecode", "rulenodecode", "insertnodecode", "marknodecode", "adjustnodecode", "ligaturenodecode", "discretionarynodecode", "whatsitnodecode", "mathnodecode", "gluenodecode", "kernnodecode", "penaltynodecode", "unsetnodecode", "mathsnodecode", "charifcode", "catifcode", "numifcode", "dimifcode", "oddifcode", "vmodeifcode", "hmodeifcode", "mmodeifcode", "innerifcode", "voidifcode", "hboxifcode", "vboxifcode", "xifcode", "eofifcode", "trueifcode", "falseifcode", "caseifcode", "definedifcode", "csnameifcode", "fontcharifcode", "fontslantperpoint", "fontinterwordspace", "fontinterwordstretch", "fontinterwordshrink", "fontexheight", "fontemwidth", "fontextraspace", "slantperpoint", "interwordspace", "interwordstretch", "interwordshrink", "exheight", "emwidth", "extraspace", "mathsupdisplay", "mathsupnormal", "mathsupcramped", "mathsubnormal", "mathsubcombined", "mathaxisheight", "startmode", "stopmode", "startnotmode", "stopnotmode", "startmodeset", "stopmodeset", "doifmode", "doifmodeelse", "doifnotmode", "startallmodes", "stopallmodes", "startnotallmodes", "stopnotallmodes", "doifallmodes", "doifallmodeselse", "doifnotallmodes", "startenvironment", "stopenvironment", "environment", "startcomponent", "stopcomponent", "component", "startproduct", "stopproduct", "product", "startproject", "stopproject", "project", "starttext", "stoptext", "startnotext", "stopnotext", "startdocument", "stopdocument", "documentvariable", "setupdocument", "startmodule", "stopmodule", "usemodule", "usetexmodule", "useluamodule", "startTEXpage", "stopTEXpage", "enablemode", "disablemode", "preventmode", "globalenablemode", "globaldisablemode", "globalpreventmode", "pushmode", "popmode", "typescriptone", "typescripttwo", "typescriptthree", "mathsizesuffix", "mathordcode", "mathopcode", "mathbincode", "mathrelcode", "mathopencode", "mathclosecode", "mathpunctcode", "mathalphacode", "mathinnercode", "mathnothingcode", "mathlimopcode", "mathnolopcode", "mathboxcode", "mathchoicecode", "mathaccentcode", "mathradicalcode", "constantnumber", "constantnumberargument", "constantdimen", "constantdimenargument", "constantemptyargument", "continueifinputfile", "luastringsep", "!!bs", "!!es" },
+ ["helpers"]={ "startsetups", "stopsetups", "startxmlsetups", "stopxmlsetups", "startluasetups", "stopluasetups", "starttexsetups", "stoptexsetups", "startrawsetups", "stoprawsetups", "startlocalsetups", "stoplocalsetups", "starttexdefinition", "stoptexdefinition", "starttexcode", "stoptexcode", "startcontextcode", "stopcontextcode", "doifsetupselse", "doifsetups", "doifnotsetups", "setup", "setups", "texsetup", "xmlsetup", "luasetup", "directsetup", "doifelsecommandhandler", "doifnotcommandhandler", "doifcommandhandler", "newmode", "setmode", "resetmode", "newsystemmode", "setsystemmode", "resetsystemmode", "pushsystemmode", "popsystemmode", "booleanmodevalue", "newcount", "newdimen", "newskip", "newmuskip", "newbox", "newtoks", "newread", "newwrite", "newmarks", "newinsert", "newattribute", "newif", "newlanguage", "newfamily", "newfam", "newhelp", "then", "firstargumentfalse", "firstargumenttrue", "secondargumentfalse", "secondargumenttrue", "thirdargumentfalse", "thirdargumenttrue", "fourthargumentfalse", "fourthargumenttrue", "fifthargumentfalse", "fifthsargumenttrue", "sixthargumentfalse", "sixtsargumenttrue", "doglobal", "dodoglobal", "redoglobal", "resetglobal", "donothing", "dontcomplain", "forgetall", "donetrue", "donefalse", "htdp", "unvoidbox", "hfilll", "vfilll", "mathbox", "mathlimop", "mathnolop", "mathnothing", "mathalpha", "currentcatcodetable", "defaultcatcodetable", "catcodetablename", "newcatcodetable", "startcatcodetable", "stopcatcodetable", "startextendcatcodetable", "stopextendcatcodetable", "pushcatcodetable", "popcatcodetable", "restorecatcodes", "setcatcodetable", "letcatcodecommand", "defcatcodecommand", "uedcatcodecommand", "hglue", "vglue", "hfillneg", "vfillneg", "hfilllneg", "vfilllneg", "ruledhss", "ruledhfil", "ruledhfill", "ruledhfilneg", "ruledhfillneg", "normalhfillneg", "ruledvss", "ruledvfil", "ruledvfill", "ruledvfilneg", "ruledvfillneg", "normalvfillneg", "ruledhbox", "ruledvbox", "ruledvtop", "ruledvcenter", "ruledhskip", "ruledvskip", "ruledkern", "ruledmskip", "ruledmkern", "ruledhglue", "ruledvglue", "normalhglue", "normalvglue", "ruledpenalty", "filledhboxb", "filledhboxr", "filledhboxg", "filledhboxc", "filledhboxm", "filledhboxy", "filledhboxk", "scratchcounter", "globalscratchcounter", "scratchdimen", "globalscratchdimen", "scratchskip", "globalscratchskip", "scratchmuskip", "globalscratchmuskip", "scratchtoks", "globalscratchtoks", "scratchbox", "globalscratchbox", "availablehsize", "localhsize", "setlocalhsize", "nextbox", "dowithnextbox", "dowithnextboxcs", "dowithnextboxcontent", "dowithnextboxcontentcs", "scratchwidth", "scratchheight", "scratchdepth", "scratchoffset", "scratchdistance", "scratchhsize", "scratchvsize", "scratchxoffset", "scratchyoffset", "scratchhoffset", "scratchvoffset", "scratchxposition", "scratchyposition", "scratchtopoffset", "scratchbottomoffset", "scratchleftoffset", "scratchrightoffset", "scratchcounterone", "scratchcountertwo", "scratchcounterthree", "scratchdimenone", "scratchdimentwo", "scratchdimenthree", "scratchskipone", "scratchskiptwo", "scratchskipthree", "scratchmuskipone", "scratchmuskiptwo", "scratchmuskipthree", "scratchtoksone", "scratchtokstwo", "scratchtoksthree", "scratchboxone", "scratchboxtwo", "scratchboxthree", "scratchnx", "scratchny", "scratchmx", "scratchmy", "scratchunicode", "scratchleftskip", "scratchrightskip", "scratchtopskip", "scratchbottomskip", "doif", "doifnot", "doifelse", "doifinset", "doifnotinset", "doifinsetelse", "doifnextcharelse", "doifnextoptionalelse", "doifnextbgroupelse", "doifnextparenthesiselse", "doiffastoptionalcheckelse", "doifundefinedelse", "doifdefinedelse", "doifundefined", "doifdefined", "doifelsevalue", "doifvalue", "doifnotvalue", "doifnothing", "doifsomething", "doifelsenothing", "doifsomethingelse", "doifvaluenothing", "doifvaluesomething", "doifelsevaluenothing", "doifdimensionelse", "doifnumberelse", "doifnumber", "doifnotnumber", "doifcommonelse", "doifcommon", "doifnotcommon", "doifinstring", "doifnotinstring", "doifinstringelse", "doifassignmentelse", "docheckassignment", "tracingall", "tracingnone", "loggingall", "removetoks", "appendtoks", "prependtoks", "appendtotoks", "prependtotoks", "to", "endgraf", "endpar", "everyendpar", "reseteverypar", "finishpar", "empty", "null", "space", "quad", "enspace", "obeyspaces", "obeylines", "obeyedspace", "obeyedline", "normalspace", "executeifdefined", "singleexpandafter", "doubleexpandafter", "tripleexpandafter", "dontleavehmode", "removelastspace", "removeunwantedspaces", "keepunwantedspaces", "wait", "writestatus", "define", "redefine", "setmeasure", "setemeasure", "setgmeasure", "setxmeasure", "definemeasure", "freezemeasure", "measure", "installcorenamespace", "getvalue", "setvalue", "setevalue", "setgvalue", "setxvalue", "letvalue", "letgvalue", "resetvalue", "undefinevalue", "ignorevalue", "setuvalue", "setuevalue", "setugvalue", "setuxvalue", "globallet", "glet", "udef", "ugdef", "uedef", "uxdef", "getparameters", "geteparameters", "getgparameters", "getxparameters", "forgetparameters", "copyparameters", "getdummyparameters", "dummyparameter", "directdummyparameter", "setdummyparameter", "letdummyparameter", "usedummystyleandcolor", "usedummystyleparameter", "usedummycolorparameter", "processcommalist", "processcommacommand", "quitcommalist", "quitprevcommalist", "processaction", "processallactions", "processfirstactioninset", "processallactionsinset", "unexpanded", "expanded", "startexpanded", "stopexpanded", "protected", "protect", "unprotect", "firstofoneargument", "firstoftwoarguments", "secondoftwoarguments", "firstofthreearguments", "secondofthreearguments", "thirdofthreearguments", "firstoffourarguments", "secondoffourarguments", "thirdoffourarguments", "fourthoffourarguments", "firstoffivearguments", "secondoffivearguments", "thirdoffivearguments", "fourthoffivearguments", "fifthoffivearguments", "firstofsixarguments", "secondofsixarguments", "thirdofsixarguments", "fourthofsixarguments", "fifthofsixarguments", "sixthofsixarguments", "firstofoneunexpanded", "gobbleoneargument", "gobbletwoarguments", "gobblethreearguments", "gobblefourarguments", "gobblefivearguments", "gobblesixarguments", "gobblesevenarguments", "gobbleeightarguments", "gobbleninearguments", "gobbletenarguments", "gobbleoneoptional", "gobbletwooptionals", "gobblethreeoptionals", "gobblefouroptionals", "gobblefiveoptionals", "dorecurse", "doloop", "exitloop", "dostepwiserecurse", "recurselevel", "recursedepth", "dofastloopcs", "newconstant", "setnewconstant", "newconditional", "settrue", "setfalse", "setconstant", "newmacro", "setnewmacro", "newfraction", "newsignal", "dosingleempty", "dodoubleempty", "dotripleempty", "doquadrupleempty", "doquintupleempty", "dosixtupleempty", "doseventupleempty", "dosingleargument", "dodoubleargument", "dotripleargument", "doquadrupleargument", "doquintupleargument", "dosixtupleargument", "doseventupleargument", "dosinglegroupempty", "dodoublegroupempty", "dotriplegroupempty", "doquadruplegroupempty", "doquintuplegroupempty", "permitspacesbetweengroups", "dontpermitspacesbetweengroups", "nopdfcompression", "maximumpdfcompression", "normalpdfcompression", "modulonumber", "dividenumber", "getfirstcharacter", "doiffirstcharelse", "startnointerference", "stopnointerference", "twodigits", "threedigits", "strut", "setstrut", "strutbox", "strutht", "strutdp", "strutwd", "struthtdp", "begstrut", "endstrut", "lineheight", "ordordspacing", "ordopspacing", "ordbinspacing", "ordrelspacing", "ordopenspacing", "ordclosespacing", "ordpunctspacing", "ordinnerspacing", "opordspacing", "opopspacing", "opbinspacing", "oprelspacing", "opopenspacing", "opclosespacing", "oppunctspacing", "opinnerspacing", "binordspacing", "binopspacing", "binbinspacing", "binrelspacing", "binopenspacing", "binclosespacing", "binpunctspacing", "bininnerspacing", "relordspacing", "relopspacing", "relbinspacing", "relrelspacing", "relopenspacing", "relclosespacing", "relpunctspacing", "relinnerspacing", "openordspacing", "openopspacing", "openbinspacing", "openrelspacing", "openopenspacing", "openclosespacing", "openpunctspacing", "openinnerspacing", "closeordspacing", "closeopspacing", "closebinspacing", "closerelspacing", "closeopenspacing", "closeclosespacing", "closepunctspacing", "closeinnerspacing", "punctordspacing", "punctopspacing", "punctbinspacing", "punctrelspacing", "punctopenspacing", "punctclosespacing", "punctpunctspacing", "punctinnerspacing", "innerordspacing", "inneropspacing", "innerbinspacing", "innerrelspacing", "inneropenspacing", "innerclosespacing", "innerpunctspacing", "innerinnerspacing", "normalreqno", "startimath", "stopimath", "normalstartimath", "normalstopimath", "startdmath", "stopdmath", "normalstartdmath", "normalstopdmath", "uncramped", "cramped", "triggermathstyle", "mathstylefont", "mathsmallstylefont", "mathstyleface", "mathsmallstyleface", "mathstylecommand", "mathpalette", "mathstylehbox", "mathstylevbox", "mathstylevcenter", "mathstylevcenteredhbox", "mathstylevcenteredvbox", "mathtext", "setmathsmalltextbox", "setmathtextbox", "triggerdisplaystyle", "triggertextstyle", "triggerscriptstyle", "triggerscriptscriptstyle", "triggeruncrampedstyle", "triggercrampedstyle", "triggersmallstyle", "triggeruncrampedsmallstyle", "triggercrampedsmallstyle", "triggerbigstyle", "triggeruncrampedbigstyle", "triggercrampedbigstyle", "luaexpr", "expdoifelse", "expdoif", "expdoifnot", "expdoifcommonelse", "expdoifinsetelse", "ctxdirectlua", "ctxlatelua", "ctxsprint", "ctxwrite", "ctxcommand", "ctxdirectcommand", "ctxlatecommand", "ctxreport", "ctxlua", "luacode", "lateluacode", "directluacode", "registerctxluafile", "ctxloadluafile", "luaversion", "luamajorversion", "luaminorversion", "ctxluacode", "luaconditional", "luaexpanded", "startluaparameterset", "stopluaparameterset", "luaparameterset", "definenamedlua", "obeylualines", "obeyluatokens", "startluacode", "stopluacode", "startlua", "stoplua", "carryoverpar", "Umathbotaccent" },
} \ No newline at end of file
diff --git a/Master/texmf-dist/context/data/scite/lexers/data/scite-context-data-interfaces.lua b/Master/texmf-dist/context/data/scite/lexers/data/scite-context-data-interfaces.lua
index ee7b7571841..b2c09b62a82 100644
--- a/Master/texmf-dist/context/data/scite/lexers/data/scite-context-data-interfaces.lua
+++ b/Master/texmf-dist/context/data/scite/lexers/data/scite-context-data-interfaces.lua
@@ -1,10 +1,10 @@
return {
["cs"]={ "CAP", "Cap", "Caps", "Cisla", "KAP", "Kap", "Kaps", "MESIC", "Rimskecislice", "SLOVA", "SLOVO", "Slova", "Slovo", "VSEDNIDEN", "Znak", "Znaky", "aktualnicislonadpisu", "aktualnidatum", "appendix", "arg", "atleftmargin", "atrightmargin", "barevnalista", "barva", "bilemisto", "bottomspace", "bublinkovanapoveda", "bydliste", "bypassblocks", "cap", "celkovypocetstran", "cernalinka", "cernelinky", "chapter", "chem", "cisla", "cislonadpisu", "cislopodrovnice", "cislorovnice", "cislostrany", "citace", "citovat", "comment", "completecombinedlist", "completelistoffloats", "completelistofsorts", "completelistofsynonyms", "completepagenumber", "completeregister", "coupledregister", "crlf", "cutspace", "datum", "decrementnumber", "definebodyfontDEF", "definebodyfontREF", "definecolumnbreak", "definecolumnset", "definecombination", "definedfont", "definefontfeature", "definefonthandling", "defineindentedtext", "defineinmargin", "defineitemgroup", "definelayer", "definelayout", "definemathalignment", "definepagebreak", "defineplacement", "definerawfont", "definerule", "definetextposition", "definetextvariable", "definetype", "definetypeface", "definuj", "definujakcent", "definujbarvu", "definujblok", "definujbloksekce", "definujbuffer", "definujfont", "definujformatodkazu", "definujhbox", "definujhlavnipole", "definujinterakcnimenu", "definujkombinovanyseznam", "definujkonverzi", "definujlogo", "definujnadpis", "definujobrazeksymbol", "definujodkaz", "definujodstavce", "definujopis", "definujoramovani", "definujoramovanytext", "definujpaletu", "definujplvouciobjekt", "definujpodpole", "definujpole", "definujpopis", "definujpopisek", "definujprekryv", "definujpreskok", "definujprikaz", "definujprofil", "definujprogram", "definujprostredizakladnihofontu", "definujrejstrik", "definujsablonutabulky", "definujsekci", "definujseznam", "definujseznamodkazu", "definujskupinubarev", "definujstartstop", "definujstyl", "definujstylfontu", "definujsymbol", "definujsynonumumfontu", "definujsynonyma", "definujtabelaci", "definujtext", "definujtrideni", "definujupravu", "definujvelikostpapiru", "definujverzi", "definujvycet", "definujvystup", "definujzakladnifont", "definujzasobnikpoli", "definujznaceni", "definujznak", "delkaseznamu", "description", "dodrzujprofil", "dodrzujverzi", "dodrzujverziprofilu", "dvoustrannypapir", "emptylines", "enumeration", "externiobraz", "fakt", "footnotetext", "forceblocks", "framedtext", "getnumber", "headsym", "hl", "hlavnijazyk", "hlavniuroven", "hodnotabarvy", "hodnotasedi", "immediatebetweenlist", "immediatetolist", "indentation", "ininner", "inneredgedistance", "inneredgewidth", "innermargindistance", "innermarginwidth", "inouter", "instalacejazyka", "interakcnilista", "interakcnitlacitka", "interaktivnimenu", "jazyk", "jdidolu", "jdina", "jdinabox", "jdinastranu", "jmeno", "kap", "klonujpole", "komponenta", "konvertujcislo", "kopirujpole", "korekcebilehomista", "labeling", "leg", "listsymbol", "loadsorts", "loadsynonyms", "maoramovani", "mapfontsize", "marginalnilinka", "marginalninadpis", "marginalnislovo", "marginalnitext", "matematika", "mazaramovani", "mediaeval", "meritko", "mesic", "mezera", "moveformula", "movesidefloat", "mrizka", "nadpis", "nadruhyokraj", "nalevo", "nalevyokraj", "name", "naokraj", "napravo", "napravyokraj", "nastavbarvu", "nastavbarvy", "nastavbilamista", "nastavblok", "nastavbloksekce", "nastavbuffer", "nastavcernelinky", "nastavcislonadpisu", "nastavcislostrany", "nastavcislovani", "nastavcislovaniodstavcu", "nastavcislovaniradku", "nastavcislovanistran", "nastavcitaci", "nastavdefinicipoznamekpodcarou", "nastavdeleniplvoucichobjektu", "nastavdelitko", "nastavdolnitexty", "nastaveni", "nastavexterniobrazy", "nastavhorejsek", "nastavhornitexty", "nastavinterakci", "nastavinterakcnilistu", "nastavinterakcnimenu", "nastavinterakcniobrazovku", "nastavjazyk", "nastavkapitalky", "nastavkombinovanyseznam", "nastavkomentar", "nastavkomentarstrany", "nastavlegendu", "nastavmarginalie", "nastavmarginalniblok", "nastavmarginalnilinky", "nastavmeziradkovoumezeru", "nastavnadpis", "nastavnadpisy", "nastavodkazovani", "nastavodsazovani", "nastavodstavce", "nastavopis", "nastavoramovanetexty", "nastavoramovani", "nastavorez", "nastavotoceni", "nastavpaletu", "nastavplvouciobjekt", "nastavplvouciobjekty", "nastavpodcislostrany", "nastavpodtrzeni", "nastavpole", "nastavpolozky", "nastavpopisek", "nastavpopisky", "nastavpopisy", "nastavpozadi", "nastavpoznamkypodcarou", "nastavprechodstrany", "nastavpreskok", "nastavprofily", "nastavprogramy", "nastavprostredizakladnihofontu", "nastavpublikace", "nastavradkovani", "nastavradky", "nastavrastr", "nastavrejstrik", "nastavrovnice", "nastavsadusymbolu", "nastavsekci", "nastavseznam", "nastavseznamodkazu", "nastavsirkucary", "nastavsloupce", "nastavspodek", "nastavspojeni", "nastavsynchronizaci", "nastavsynchronizacnilistu", "nastavsynonyma", "nastavsystem", "nastavtab", "nastavtabelaci", "nastavtabulky", "nastavtenkelinky", "nastavtext", "nastavtexthlavicky", "nastavtextovelinky", "nastavtextpopisku", "nastavtexttexty", "nastavtextyupati", "nastavtextyzahlavi", "nastavtlacitka", "nastavtoleranci", "nastavtrideni", "nastavtype", "nastavumisteniprotejsku", "nastavumistovani", "nastavupati", "nastavupravu", "nastavurl", "nastavusporadani", "nastavvelikostpapiru", "nastavverze", "nastavvsechnapole", "nastavvycty", "nastavvyplnovelinky", "nastavvyplnoveradky", "nastavvystup", "nastavvzhled", "nastavzahlavi", "nastavzakladnifont", "nastavzarovnani", "nastavznaceni", "nastavzuzeni", "nastrane", "navigating", "nejakyradek", "nekde", "nextsection", "neznamo", "nivy", "nizky", "nocap", "nokap", "nop", "numberofsubpages", "obrazovka", "odkaz", "odkaznadatum", "odkaznastranu", "odkaznatext", "odkazujici", "odsazenishora", "odsazenizleva", "odsazovani", "okr", "opakovat", "opis", "opissoubor", "oramovani", "oref", "orez", "otocit", "outeredgedistance", "outeredgewidth", "outermargindistance", "outermarginwidth", "overbar", "overbars", "overstrike", "overstrikes", "oznaceni", "oznacverzi", "pagedepth", "pageoffset", "paragraph", "parovastrana", "part", "pis", "placefloat", "placeheadnumber", "placeheadtext", "placelistoffloats", "placelistofsorts", "placelistofsynonyms", "placepagenumber", "placerawlist", "placereferencelist", "placerule", "placetextvariable", "plnezneni", "pol", "pole", "polozka", "polozky", "popisky", "poppisek", "porovnejpaletu", "porovnejskupinubarev", "positiontext", "pozadi", "pozice", "poznamka", "poznamkapodcarou", "pref", "prelozit", "premistinamrizku", "prepninazakladnifont", "preskoc", "prizpusobivepole", "prizpusobvzhled", "produkt", "program", "projekt", "propojeneznaceni", "propojenydokument", "propojenyrejstrik", "prostredi", "publikace", "ran", "ref", "register", "reservefloat", "reset", "resetnumber", "resettextcontent", "resetznaceni", "rimskecislice", "rozdelplvouciobjekt", "rozmer", "rozpojeneznaceni", "roztazene", "schovejbloky", "section", "sedabarva", "seeregister", "setnumber", "settextcontent", "settextvariable", "setupanswerarea", "setupcolumnset", "setupcolumnsetlines", "setupcolumnsetstart", "setupfonthandling", "setupfontsynonym", "setupforms", "setupindentedtext", "setupinterlinespace2", "setupitemgroup", "setuplistalternative", "setupmathalignment", "setupnumber", "setuppaper", "setupplacement", "setuprule", "setupstartstop", "setupstrut", "setuptextposition", "setuptextvariable", "sirkalevehookraje", "sirkalevemarginalie", "sirkamarginalie", "sirkaokraje", "sirkapapiru", "sirkapravehookraje", "sirkapravemarginalie", "sirkasazby", "sirkaseznamu", "sirkatextu", "sirkatiskpapiru", "sloupec", "slovovpravo", "sort", "spodek", "stanovcharakteristickuseznamu", "stanovcislonadpisu", "startalignment", "startbarva", "startbuffer", "startcislovaniradku", "startcitace", "startcolumnmakeup", "startcolumns", "startcolumnset", "startcombination", "startcomment", "startdescription", "startdocument", "startdokument", "startenumeration", "startfakt", "startfigure", "startfloattext", "startformula", "startframedtext", "startglobalni", "starthiding", "startinteraktivnimenu", "startitemgroup", "startkodovani", "startkomponenta", "startkorekceradku", "startlegend", "startline", "startlinecorrection", "startlinenumbering", "startlines", "startlocal", "startlocalenvironment", "startlocalfootnotes", "startlokalni", "startlokalnipoznamkypodcarou", "startmakeup", "startmarginalniblok", "startmarginalnilinka", "startmarginblock", "startnamemakeup", "startnarrower", "startnezhustene", "startobraz", "startopposite", "startoverlay", "startoverview", "startparagraph", "startpositioning", "startpostponing", "startpozadi", "startprehled", "startprekryv", "startprodukt", "startprofil", "startprofile", "startprojekt", "startprostredi", "startprotejsek", "startradek", "startradky", "startrastr", "startregister", "startsadasymbolu", "startsloupce", "startspojeni", "startsymbolset", "startsynchronizace", "startsynchronization", "starttable", "starttables", "starttabulate", "starttabulka", "starttabulky", "starttext", "starttextovalinka", "starttyping", "startumistovani", "startunpacked", "startuprava", "startverze", "startzarovnavani", "startzhustene", "startzuzeni", "stopalignment", "stopbarva", "stopbuffer", "stopcislovaniradku", "stopcitace", "stopcolumnmakeup", "stopcolumns", "stopcolumnset", "stopcombination", "stopcomment", "stopdescription", "stopdocument", "stopdokument", "stopenumeration", "stopfakt", "stopfigure", "stopfloattext", "stopformula", "stopframedtext", "stopglobalni", "stophiding", "stopinteraktivnimenu", "stopitemgroup", "stopkodovani", "stopkomponenta", "stopkorekceradku", "stoplegend", "stopline", "stoplinecorrection", "stoplinenumbering", "stoplines", "stoplocal", "stoplocalenvironment", "stoplocalfootnotes", "stoplokalni", "stoplokalnipoznamkypodcarou", "stopmakeup", "stopmarginalniblok", "stopmarginalnilinka", "stopmarginblock", "stopnamemakeup", "stopnarrower", "stopnezhustene", "stopopposite", "stopoverlay", "stopoverview", "stopparagraph", "stoppositioning", "stoppostponing", "stoppozadi", "stopprehled", "stopprekryv", "stopprodukt", "stopprofil", "stopprofile", "stopprojekt", "stopprostredi", "stopprotejsek", "stopradek", "stopradky", "stoprastr", "stopsloupce", "stopspojeni", "stopsymbolset", "stopsynchronizace", "stopsynchronization", "stoptable", "stoptables", "stoptabulate", "stoptabulka", "stoptabulky", "stoptext", "stoptextovalinka", "stoptyping", "stopumistovani", "stopunpacked", "stopuprava", "stopverze", "stopzarovnavani", "stopzhustene", "stopzuzeni", "strana", "sub", "subject", "subpagenumber", "subsection", "subsubject", "subsubsection", "subsubsubject", "switchtorawfont", "sym", "symbol", "synchronizacnilista", "synchronizovat", "synonym", "tab", "tecky", "tenkalinka", "tenkelinky", "testcolumn", "testpage", "tex", "texthlavicky", "textovalinka", "textpopisku", "textvariable", "title", "tlacitko", "tlacitkomenu", "tloustkacary", "tref", "tvrdamezera", "tvrdemezery", "txt", "typebuffer", "ukazbarvu", "ukazexterniobrazy", "ukazmrizku", "ukaznastaveni", "ukazpaletu", "ukazpodpery", "ukazpole", "ukazpostredizakladnihofontu", "ukazramecek", "ukazsadusymbolu", "ukazskupinubarev", "ukazupravu", "ukazvytisk", "ukazvzhled", "ukazzakladnifont", "umistikombinovanyseznam", "umistilegendu", "umistiloga", "umistilokalnipoznamkypodcarou", "umistinadsebe", "umistinamrizku", "umistipodrovnici", "umistipoznamkypodcarou", "umistirejstrik", "umistirovnici", "umistiseznam", "umistivedlesebe", "umistizalozky", "underbar", "underbars", "urcicharakteristikurejstriku", "useXMLfilter", "usedirectory", "usetypescript", "usetypescriptfile", "uzijJSscripts", "uzijURL", "uzijadresar", "uzijbloky", "uzijexternidokument", "uzijexterniobraz", "uzijexternisoubor", "uzijexternisoubory", "uzijexternizvuk", "uzijkodovani", "uzijmodul", "uzijmoduly", "uzijodkazy", "uzijprikazy", "uzijspeciality", "uzijsymbol", "uzijurl", "verze", "vl", "vlasovalinka", "vlevo", "vpravo", "vradku", "vsedniden", "vyberbloky", "vyberpapir", "vyberverzi", "vyplnenytext", "vyplnovelinky", "vyplnovepole", "vyplnovyradek", "vyskahorejsku", "vyskapapiru", "vyskasazby", "vyskaseznamu", "vyskaspodku", "vyskatextu", "vyskatiskpapiru", "vyskaupati", "vyskazahlavi", "vysoky", "vyznam", "vzdalenosthorejsku", "vzdalenostlevehookraje", "vzdalenostlevemarginalie", "vzdalenostmarginalie", "vzdalenostokraje", "vzdalenostpravehookraje", "vzdalenostpravemarginalie", "vzdalenostspodku", "vzdalenostupati", "vzdalenostzahlavi", "zablokujinterakcnimenu", "zachovejbloky", "zadnamezera", "zadnebilemisto", "zadnedalsibloky", "zadnedalsisoubory", "zadnehorniadolniradky", "zadneodsazovani", "zadnezahlaviaupati", "zadneznaceni", "zadnyrozmer", "zadnyseznam", "zadnytest", "zalozka", "zapisdorejstriku", "zapisdoseznamu", "zapisdoseznamuodkazu", "zapismeziseznam", "zaramovani", "zarovnanonastred", "zarovnanovlevo", "zarovnanovpravo", "zasobnikpoli", "zaznamovepole", "zhustene", "ziskejbuffer", "ziskejznaceni", "zlomek", "znaceni", "znak", "znaky", "zpracujbloky", "zpracujstranu", "zrcadlit", "zref", "zvysujicicislo" },
["de"]={ "Buchstabe", "Buchstaben", "CAP", "Cap", "Caps", "KAP", "Kap", "Kaps", "MONAT", "Roemischezahlen", "WOCHENTAG", "WOERTER", "WORT", "Woerter", "Wort", "Ziffern", "abstandlinkerrand", "abstandoben", "abstandrechterrand", "abstandunten", "amgitterausrichten", "amgitterneuausrichten", "appendix", "arg", "atleftmargin", "atrightmargin", "aufseite", "ausfuellfeld", "ausfuelltext", "ausschnitt", "bearbeitebloecke", "bearbeiteseite", "bedeutung", "behaltebloecke", "bei", "bemerkung", "benutzekodierung", "benutzespezielles", "benutzeverzeichnis", "beschrifteversion", "beschriftung", "bestimmekopfnummer", "bestimmelistencharakeristika", "bestimmeregistercharakteristika", "bildschirm", "blanko", "bookmark", "bottomspace", "breitelinkerrand", "breiterechterrand", "bruch", "buchstabe", "buchstaben", "but", "bypassblocks", "cap", "chapter", "chem", "comment", "completecombinedlist", "completelistoffloats", "completelistofsorts", "completelistofsynonyms", "completepagenumber", "completeregister", "coupledregister", "crlf", "cutspace", "datum", "decrementnumber", "definebodyfontDEF", "definebodyfontREF", "definecolumnbreak", "definecolumnset", "definecombination", "definedfont", "definefontfeature", "definefonthandling", "defineindentedtext", "defineinmargin", "defineitemgroup", "definelayer", "definelayout", "definemathalignment", "defineoutput", "definepagebreak", "defineplacement", "definerawfont", "definerule", "defineschriftsynonym", "definetextposition", "definetextvariable", "definetype", "definetypeface", "definiereabbsymbol", "definiereabsaetze", "definiereabschnitt", "definiereabschnittsblock", "definiereakzent", "definierebefehl", "definierebeschreibung", "definierebeschreibungen", "definierebeschriftung", "definiereblanko", "definiereblock", "definierefarbe", "definierefarbengruppe", "definierefeld", "definierefeldstapel", "definierefliesstext", "definierefliesstextumgebung", "definieregleitobjekt", "definierehauptfeld", "definierehbox", "definiereinteraktionsmenue", "definierekonversion", "definierelabel", "definiereliste", "definierelogo", "definieren", "definierenummerierung", "definiereoverlay", "definierepalette", "definierepapierformat", "definiereprofil", "definiereprogramme", "definierepuffer", "definierereferenz", "definierereferenzformat", "definierereferenzliste", "definiereregister", "definiereschrift", "definiereschriftstil", "definieresortieren", "definierestartstop", "definierestil", "definieresubfeld", "definieresymbol", "definieresynonyme", "definieretabellenvorlage", "definieretabulator", "definieretext", "definieretippen", "definiereueberschrift", "definiereumbruch", "definiereumrahmt", "definiereumrahmtertext", "definiereversion", "definierezeichen", "definierezusammengestellteliste", "description", "dimension", "doppelseite", "doppelseitigespapier", "drehen", "duennelinie", "duennerumriss", "einezeile", "einstellungen", "einziehen", "emptylines", "entknuepfebeschriftung", "enumeration", "externeabbildung", "farbbalken", "farbe", "farbewert", "feld", "feldstapel", "festesspatium", "folgeprofil", "folgeprofilversion", "folgeversion", "footnotetext", "forceblocks", "format", "formelnummer", "framedtext", "fussnote", "fusszeileabstand", "fusszeilenhoehe", "gefuelltesrechteck", "gefuelltezeile", "geg", "gesamtseitenanzahl", "gestreckt", "getnumber", "gitter", "graufarbe", "grauwert", "haarlinie", "hauptsprache", "headsym", "heutigesdatum", "heutigeskopfnummer", "hintergrund", "hl", "hoch", "hoeheoben", "hoeheunten", "holebeschriftung", "holepuffer", "imlinken", "imlinkenrand", "immaumrise", "immediatebetweenlist", "immediatetolist", "imrechten", "imrechtenrand", "imumriss", "in", "inaktiviereinteraktionsmenue", "inanderermarginale", "indentation", "ininner", "inlinkermarginale", "inmarginalie", "inneredgedistance", "inneredgewidth", "innermargindistance", "innermarginwidth", "inouter", "inrechtermarginale", "installieresprache", "interaktionsbalken", "interaktionsknopfe", "interaktionsmenue", "inzeile", "irgendwo", "its", "kap", "keindimension", "keinebeschriftung", "keinebloeckemehr", "keinedateienmehr", "keinekopfundfusszeilen", "keineliste", "keinspatium", "keintest", "keinzeilenobenundunten", "keinzwischenraum", "kleinerdurchschuss", "klonierefeld", "knopf", "komponente", "konvertierezahl", "kopf", "kopfniveau", "kopfnummer", "kopfweite", "kopfzeilenabstand", "kopfzeilenhoehe", "kopierefeld", "korrigierezwischenraum", "label", "labeling", "labels", "labeltext", "leg", "liniendicke", "linkemarginalafstand", "linkemarginalbreite", "linksbuendig", "listenbreite", "listenhoehe", "listenlaenge", "listsymbol", "loadsorts", "loadsynonyms", "mapfontsize", "mar", "marginalafstand", "marginalbreite", "marginallinie", "marginaltext", "marginaltitel", "marginalwort", "mathematik", "maumrise", "mediaeval", "menueknopf", "monat", "moveformula", "movesidefloat", "nachunten", "name", "navigating", "nextsection", "nichteinziehen", "nocap", "nokap", "nop", "notiz", "numberofsubpages", "nummererhoehen", "outeredgedistance", "outeredgewidth", "outermargindistance", "outermarginwidth", "overbar", "overbars", "overstrike", "overstrikes", "pagedepth", "pageoffset", "papierbreite", "papierhoehe", "paragraph", "part", "passelayoutan", "passendfeld", "placefloat", "placeheadnumber", "placeheadtext", "placelistoffloats", "placelistofsorts", "placelistofsynonyms", "placepagenumber", "placerawlist", "placereferencelist", "placerule", "placetextvariable", "platzierebookmarks", "platziereformel", "platzierefussnoten", "platzierelegende", "platziereliste", "platzierelogo", "platzierelokalefussnoten", "platzierenebeneinander", "platziereregister", "platziereuntereinander", "platziereunterformel", "platzierezusammengestellteliste", "pos", "position", "positiontext", "posten", "printpapierbreite", "printpapierhoehe", "produkt", "programm", "projekt", "publikation", "punkt", "ran", "randabstand", "randbreite", "rechteck", "rechtecke", "rechtemarginalafstand", "rechtemarginalbreite", "rechtsbuendig", "ref", "referenz", "referieren", "register", "registrierefelder", "reservefloat", "resetnumber", "resettextcontent", "roemischezahlen", "ruecksetzten", "ruecksetztenbeschriftung", "rumpfweite", "satzbreite", "satzhoehe", "schreibezumregister", "schreibezurliste", "schreibezurreferenzliste", "schreibezwischenliste", "section", "seeregister", "seite", "seitenreferenz", "seitenummer", "setnumber", "settext", "settextvariable", "setupanswerarea", "setupcolumnset", "setupcolumnsetlines", "setupcolumnsetstart", "setupfonthandling", "setupfontsynonym", "setupforms", "setupindentedtext", "setupinterlinespace2", "setupitemgroup", "setuplistalternative", "setupmathalignment", "setupnumber", "setuppaper", "setupplacement", "setuprule", "setupstartstop", "setupstrut", "setuptextposition", "setuptextvariable", "showsymbolset", "sort", "spalte", "spatium", "spiegeln", "sprache", "startabbildung", "startalignment", "startausrichtung", "startbuffer", "startcolumnmakeup", "startcolumns", "startcolumnset", "startcombination", "startcomment", "startdescription", "startdocument", "startdokument", "startenger", "startenumeration", "startfarbe", "startfigure", "startfloattext", "startformula", "startframedtext", "startgeg", "startgegenueber", "startglobal", "startgrosserdurchschuss", "starthiding", "starthintergrund", "startinteraktionsmenue", "startitemgroup", "startkleinerdurchschuss", "startkodierung", "startkombination", "startkomponente", "startlegend", "startline", "startlinecorrection", "startlinenumbering", "startlines", "startlocal", "startlocalenvironment", "startlocalfootnotes", "startlokal", "startlokalefussnoten", "startmakeup", "startmarginalblock", "startmarginallinie", "startmarginblock", "startnamemakeup", "startnarrower", "startopposite", "startoverlay", "startoverview", "startparagraph", "startpositionieren", "startpositioning", "startpostponing", "startprodukt", "startprofil", "startprofile", "startprojekt", "startraster", "startregister", "startspalten", "startsymbolset", "startsynchronisation", "startsynchronization", "starttabelle", "starttabellen", "starttable", "starttables", "starttabulate", "starttext", "starttextlinie", "starttyping", "startueberblick", "startumbruch", "startumgebung", "startunpacked", "startversion", "startzeile", "startzeilen", "startzeilenkorrektur", "startzeilennumerierung", "startzitat", "stelleabsaetzeein", "stelleabsatznummerierungein", "stelleabschnittein", "stelleabschnittsblockein", "stelleanordnenein", "stelleaufzaehlungenein", "stelleausgabeein", "stelleausrichtungein", "stelleausschnittein", "stellebeschreibungein", "stellebeschriftungein", "stellebilderunterschriftein", "stellebildunterschriftein", "stellebindestrichein", "stelleblankoein", "stelleblockein", "stelledrehenein", "stelleduennerumrissein", "stelleeinziehenein", "stelleengerein", "stelleexterneabbildungenein", "stellefarbeein", "stellefarbenein", "stellefeldein", "stellefelderin", "stellefliesstextein", "stellefliesstextumgebungein", "stelleformelnein", "stellefussnotendefinitionein", "stellefussnotenein", "stellefusszeileein", "stellefusszeilentextein", "stellegefuelltesrechteckein", "stellegefuelltezeileein", "stellegegenueberplatzierenein", "stellegleitobjekteein", "stellegleitobjektein", "stellehintergruendeein", "stellehintergrundein", "stelleinmarginalieein", "stelleinteraktionein", "stelleinteraktionsbalkenein", "stelleinteraktionsbildschirmein", "stelleinteraktionsmenueein", "stelleknopfein", "stellekombinationein", "stellekommentarein", "stellekopfzahlein", "stellekopfzeileein", "stellekopfzeilentextein", "stellelabeltextein", "stellelayoutein", "stellelegendeein", "stellelinienbreiteein", "stellelisteein", "stellemarginalblockein", "stellemarginallinieein", "stellenobenein", "stellenummerierungein", "stellepaletteein", "stellepapierformatein", "stelleplatziegeteiltegleitobjekt", "stellepositionierenein", "stellepostenein", "stelleprofilein", "stelleprogrammein", "stellepublikationein", "stellepufferein", "stellerasterein", "stellerechteckein", "stellereferenzierenein", "stellereferenzlisteein", "stelleregisterein", "stelleseitenkommentarein", "stelleseitennummerein", "stelleseitennummeriernungein", "stelleseitenuebergangein", "stellesortierenein", "stellespaltenein", "stellespatiumein", "stellespracheein", "stellesymbolsetein", "stellesynchronisationein", "stellesynchronisationsbalkenein", "stellesynonymein", "stellesystemein", "stelletabein", "stelletabellenein", "stelletabulatorein", "stelletextein", "stelletextobenein", "stelletexttexteein", "stelletextumrissein", "stelletextuntenein", "stelletipein", "stelletippenein", "stelletoleranzein", "stelleueberschriftein", "stelleueberschriftenein", "stelleueberschrifttextein", "stelleumbruchein", "stelleumrahmtein", "stelleumrahmtetexteein", "stelleuntenein", "stelleunterseitennummerein", "stelleunterstreichenein", "stelleurlein", "stelleversalienein", "stelleversionein", "stellezeilenabstandein", "stellezeilenein", "stellezeilennumerierungein", "stellezitierenein", "stellezusammengestelltelisteein", "stellezwischenraumein", "stopalignment", "stopausrichtung", "stopbuffer", "stopcolumnmakeup", "stopcolumns", "stopcolumnset", "stopcombination", "stopcomment", "stopdescription", "stopdocument", "stopdokument", "stopenger", "stopenumeration", "stopfarbe", "stopfigure", "stopfloattext", "stopformula", "stopframedtext", "stopgeg", "stopgegenueber", "stopglobal", "stopgrosserdurchschuss", "stophiding", "stophintergrund", "stopinteraktionsmenue", "stopitemgroup", "stopkleinerdurchschuss", "stopkodierung", "stopkombination", "stopkomponente", "stoplegend", "stopline", "stoplinecorrection", "stoplinenumbering", "stoplines", "stoplocal", "stoplocalenvironment", "stoplocalfootnotes", "stoplokal", "stoplokalefussnoten", "stopmakeup", "stopmarginalblock", "stopmarginallinie", "stopmarginblock", "stopnamemakeup", "stopnarrower", "stopopposite", "stopoverlay", "stopoverview", "stopparagraph", "stoppositionieren", "stoppositioning", "stoppostponing", "stopprodukt", "stopprofil", "stopprofile", "stopprojekt", "stopraster", "stopspalten", "stopsymbolset", "stopsynchronisation", "stopsynchronization", "stoptabelle", "stoptabellen", "stoptable", "stoptables", "stoptabulate", "stoptext", "stoptextlinie", "stoptyping", "stopueberblick", "stopumbruch", "stopumgebung", "stopunpacked", "stopversion", "stopzeile", "stopzeilen", "stopzeilenkorrektur", "stopzeilennumerierung", "stopzitat", "sub", "subject", "subpagenumber", "subsection", "subsubject", "subsubsection", "subsubsubject", "switchtorawfont", "sym", "symbol", "synchronisationsbalken", "synchronisieren", "synonym", "tab", "teilegleitobjekt", "testcolumn", "testpage", "tex", "textbreite", "texthoehe", "textlinie", "textreferenz", "textvariable", "tief", "tiho", "tip", "tippedatei", "tippen", "tippepuffer", "title", "tooltip", "txt", "ueber", "ueberschrifttext", "uebersetzten", "umgebung", "umrahmt", "unbekant", "underbar", "underbars", "unterformelnummer", "useXMLfilter", "usedirectory", "usetypescript", "usetypescriptfile", "verbergebloecke", "vergleichefarbengruppe", "vergleichepalette", "verknuepfebeschriftung", "verknuepfedokument", "verknuepfregister", "version", "verweis", "verweisdatum", "verwendeJSscript", "verwendeURL", "verwendebefehl", "verwendebloecke", "verwendeexteresdokument", "verwendeexterneabbildung", "verwendeexternedatei", "verwendeexternedateien", "verwendeexternestonstueck", "verwendemodul", "verwendemodule", "verwendereferenzen", "verwendesymbole", "verwendeurl", "vl", "volleswort", "von", "waehlebloeckeaus", "waehlepapieraus", "waehleversionaus", "wechselezumfliesstext", "wiederholen", "wochentag", "wohnort", "wortrechts", "zeigedruck", "zeigeeinstellungen", "zeigeexterneabbildungen", "zeigefarbe", "zeigefarbengruppe", "zeigefelder", "zeigefliesstext", "zeigefliesstextumgebung", "zeigegitter", "zeigelayout", "zeigepalette", "zeigerahmen", "zeigestruts", "zeigeumbruch", "zentriert", "ziffern", "zitat", "zitieren", "zu", "zurbox", "zurseite", "zwischenraum" },
- ["en"]={ "CAP", "Cap", "Caps", "Character", "Characters", "MONTH", "Numbers", "Romannumerals", "WEEKDAY", "WORD", "WORDS", "Word", "Words", "about", "adaptlayout", "adding", "appendix", "arg", "at", "atleftmargin", "atpage", "atrightmargin", "background", "backspace", "blackrule", "blackrules", "blank", "bookmark", "bottomdistance", "bottomheight", "bottomspace", "but", "button", "bypassblocks", "cap", "chapter", "character", "characters", "chem", "clip", "clonefield", "color", "colorbar", "colorvalue", "column", "comment", "comparecolorgroup", "comparepalet", "completecombinedlist", "completelistoffloats", "completelistofsorts", "completelistofsynonyms", "completepagenumber", "completeregister", "component", "convertnumber", "copyfield", "correctwhitespace", "coupledocument", "coupledregister", "couplemarking", "couplepage", "couplepaper", "coupleregister", "crlf", "currentdate", "currentheadnumber", "cutspace", "date", "decouplemarking", "decrementnumber", "define", "defineaccent", "defineblank", "defineblock", "definebodyfont", "definebodyfontDEF", "definebodyfontREF", "definebodyfontenvironment", "definebuffer", "definecharacter", "definecolor", "definecolorgroup", "definecolumnbreak", "definecolumnset", "definecombination", "definecombinedlist", "definecommand", "defineconversion", "definedescription", "definedfont", "defineenumeration", "definefield", "definefieldstack", "definefiguresymbol", "definefloat", "definefont", "definefontfeature", "definefonthandling", "definefontstyle", "definefontsynonym", "defineframed", "defineframedtext", "definehbox", "definehead", "defineindentedtext", "defineinmargin", "defineinteractionmenu", "defineitemgroup", "definelabel", "definelayer", "definelayout", "definelist", "definelogo", "definemainfield", "definemakeup", "definemarking", "definemathalignment", "defineoutput", "defineoverlay", "definepagebreak", "definepalet", "definepapersize", "defineparagraphs", "defineplacement", "defineprofile", "defineprogram", "definerawfont", "definereference", "definereferenceformat", "definereferencelist", "defineregister", "definerule", "definesection", "definesectionblock", "definesorting", "definestartstop", "definestyle", "definesubfield", "definesymbol", "definesynonyms", "definetabletemplate", "definetabulate", "definetext", "definetextposition", "definetextvariable", "definetype", "definetypeface", "definetyping", "defineversion", "description", "determineheadnumber", "determinelistcharacteristics", "determineregistercharacteristics", "dimension", "disableinteractionmenu", "domicile", "donttest", "edgedistance", "edgewidth", "emptylines", "enumeration", "environment", "externalfigure", "fact", "field", "fieldstack", "fillinfield", "fillinline", "fillinrules", "fillintext", "fitfield", "fixedspace", "fixedspaces", "followprofile", "followprofileversion", "followversion", "footerdistance", "footerheight", "footnote", "footnotetext", "forceblocks", "formulanumber", "fraction", "framed", "framedtext", "from", "getbuffer", "getmarking", "getnumber", "godown", "goto", "gotobox", "gotopage", "graycolor", "greyvalue", "grid", "hairline", "head", "headerdistance", "headerheight", "headlevel", "headnumber", "headsym", "headtext", "hideblocks", "high", "hl", "immediatebetweenlist", "immediatetolist", "in", "incrementnumber", "indentation", "indenting", "inframed", "infull", "ininner", "inleft", "inleftedge", "inleftmargin", "inline", "inmaframed", "inmargin", "inneredgedistance", "inneredgewidth", "innermargindistance", "innermarginwidth", "inothermargin", "inouter", "inright", "inrightedge", "inrightmargin", "installlanguage", "interactionbar", "interactionbuttons", "interactionmenu", "item", "items", "its", "keepblocks", "label", "labeling", "labels", "labeltext", "language", "leftaligned", "leftedgedistance", "leftedgewidth", "leftmargindistance", "leftmarginwidth", "leg", "linethickness", "listheight", "listlength", "listsymbol", "listwidth", "loadsorts", "loadsynonyms", "logfields", "lohi", "low", "maframed", "mainlanguage", "makeupheight", "makeupwidth", "mapfontsize", "mar", "margindistance", "marginrule", "margintext", "margintitle", "marginwidth", "marginword", "marking", "markversion", "mathematics", "mediaeval", "menubutton", "midaligned", "mirror", "month", "moveformula", "moveongrid", "movesidefloat", "name", "navigating", "nextsection", "nocap", "nodimension", "noheaderandfooterlines", "noindenting", "nolist", "nomarking", "nomoreblocks", "nomorefiles", "nop", "nospace", "note", "notopandbottomlines", "nowhitespace", "numberofsubpages", "numbers", "outeredgedistance", "outeredgewidth", "outermargindistance", "outermarginwidth", "overbar", "overbars", "overstrike", "overstrikes", "packed", "page", "pagedepth", "pagenumber", "pageoffset", "pagereference", "paperheight", "paperwidth", "paragraph", "part", "periods", "placebookmarks", "placecombinedlist", "placefloat", "placefootnotes", "placeformula", "placeheadnumber", "placeheadtext", "placelegend", "placelist", "placelistoffloats", "placelistofsorts", "placelistofsynonyms", "placelocalfootnotes", "placelogos", "placeongrid", "placeontopofeachother", "placepagenumber", "placerawlist", "placereferencelist", "placeregister", "placerule", "placesidebyside", "placesubformula", "placetextvariable", "position", "positiontext", "printpaperheight", "printpaperwidth", "processblocks", "processpage", "product", "program", "project", "publication", "quotation", "quote", "ran", "redo", "ref", "reference", "referral", "referraldate", "referring", "register", "remark", "reservefloat", "reset", "resetmarking", "resetnumber", "resettextcontent", "rightaligned", "rightedgedistance", "rightedgewidth", "rightmargindistance", "rightmarginwidth", "romannumerals", "rotate", "scale", "screen", "section", "seeregister", "selectblocks", "selectpaper", "selectversion", "setnumber", "settextcontent", "settextvariable", "setupalign", "setupanswerarea", "setuparranging", "setupbackground", "setupbackgrounds", "setupblackrules", "setupblank", "setupblock", "setupbodyfont", "setupbodyfontenvironment", "setupbottom", "setupbottomtexts", "setupbuffer", "setupbuttons", "setupcapitals", "setupcaption", "setupcaptions", "setupclipping", "setupcolor", "setupcolors", "setupcolumns", "setupcolumnset", "setupcolumnsetlines", "setupcolumnsetstart", "setupcombinations", "setupcombinedlist", "setupcomment", "setupdescriptions", "setupenumerations", "setupexternalfigures", "setupfield", "setupfields", "setupfillinlines", "setupfillinrules", "setupfloat", "setupfloats", "setupfloatsplitting", "setupfonthandling", "setupfontsynonym", "setupfooter", "setupfootertexts", "setupfootnotedefinition", "setupfootnotes", "setupforms", "setupformulae", "setupframed", "setupframedtexts", "setuphead", "setupheader", "setupheadertexts", "setupheadnumber", "setupheads", "setupheadtext", "setuphyphenmark", "setupindentedtext", "setupindenting", "setupinmargin", "setupinteraction", "setupinteractionbar", "setupinteractionmenu", "setupinteractionscreen", "setupinterlinespace", "setupinterlinespace2", "setupitemgroup", "setupitemizations", "setupitems", "setuplabeltext", "setuplanguage", "setuplayout", "setuplegend", "setuplinenumbering", "setuplines", "setuplinewidth", "setuplist", "setuplistalternative", "setupmakeup", "setupmarginblocks", "setupmarginrules", "setupmarking", "setupmathalignment", "setupnarrower", "setupnumber", "setupnumbering", "setupoppositeplacing", "setupoutput", "setuppagecomment", "setuppagenumber", "setuppagenumbering", "setuppagetransitions", "setuppalet", "setuppaper", "setuppapersize", "setupparagraphnumbering", "setupparagraphs", "setupplacement", "setuppositioning", "setupprofiles", "setupprograms", "setuppublications", "setupquote", "setupreferencelist", "setupreferencing", "setupregister", "setuprotate", "setuprule", "setups", "setupscreens", "setupsection", "setupsectionblock", "setupsorting", "setupspacing", "setupstartstop", "setupstrut", "setupsubpagenumber", "setupsymbolset", "setupsynchronization", "setupsynchronizationbar", "setupsynonyms", "setupsystem", "setuptab", "setuptables", "setuptabulate", "setuptext", "setuptextposition", "setuptextrules", "setuptexttexts", "setuptextvariable", "setupthinrules", "setuptolerance", "setuptop", "setuptoptexts", "setuptype", "setuptyping", "setupunderbar", "setupurl", "setupversions", "setupwhitespace", "showbodyfont", "showbodyfontenvironment", "showcolor", "showcolorgroup", "showexternalfigures", "showfields", "showframe", "showgrid", "showlayout", "showmakeup", "showpalet", "showprint", "showsetups", "showstruts", "showsymbolset", "someline", "somewhere", "sort", "space", "splitfloat", "startalignment", "startbackground", "startbuffer", "startcoding", "startcolor", "startcolumnmakeup", "startcolumns", "startcolumnset", "startcombination", "startcomment", "startcomponent", "startdescription", "startdocument", "startenumeration", "startenvironment", "startfact", "startfigure", "startfloattext", "startformula", "startframedtext", "startglobal", "starthiding", "startinteractionmenu", "startitemgroup", "startlegend", "startline", "startlinecorrection", "startlinenumbering", "startlines", "startlocal", "startlocalenvironment", "startlocalfootnotes", "startmakeup", "startmarginblock", "startmarginrule", "startnamemakeup", "startnarrower", "startopposite", "startoverlay", "startoverview", "startpacked", "startparagraph", "startpositioning", "startpostponing", "startproduct", "startprofile", "startproject", "startquotation", "startraster", "startregister", "startsymbolset", "startsynchronization", "starttable", "starttables", "starttabulate", "starttext", "starttextrule", "starttyping", "startunpacked", "startversion", "stopalignment", "stopbackground", "stopbuffer", "stopcoding", "stopcolor", "stopcolumnmakeup", "stopcolumns", "stopcolumnset", "stopcombination", "stopcomment", "stopcomponent", "stopdescription", "stopdocument", "stopenumeration", "stopenvironment", "stopfact", "stopfigure", "stopfloattext", "stopformula", "stopframedtext", "stopglobal", "stophiding", "stopinteractionmenu", "stopitemgroup", "stoplegend", "stopline", "stoplinecorrection", "stoplinenumbering", "stoplines", "stoplocal", "stoplocalenvironment", "stoplocalfootnotes", "stopmakeup", "stopmarginblock", "stopmarginrule", "stopnamemakeup", "stopnarrower", "stopopposite", "stopoverlay", "stopoverview", "stoppacked", "stopparagraph", "stoppositioning", "stoppostponing", "stopproduct", "stopprofile", "stopproject", "stopquotation", "stopraster", "stopsymbolset", "stopsynchronization", "stoptable", "stoptables", "stoptabulate", "stoptext", "stoptextrule", "stoptyping", "stopunpacked", "stopversion", "stretched", "sub", "subformulanumber", "subject", "subpagenumber", "subsection", "subsubject", "subsubsection", "subsubsubject", "switchtobodyfont", "switchtorawfont", "sym", "symbol", "synchronizationbar", "synchronize", "synonym", "tab", "testcolumn", "testpage", "tex", "textheight", "textreference", "textrule", "textvariable", "textwidth", "thinrule", "thinrules", "title", "tooltip", "topdistance", "topheight", "topspace", "totalnumberofpages", "translate", "txt", "typ", "type", "typebuffer", "typefile", "underbar", "underbars", "unitmeaning", "unknown", "useJSscripts", "useURL", "useXMLfilter", "useblocks", "usecommands", "usedirectory", "useencoding", "useexternaldocument", "useexternalfigure", "useexternalfile", "useexternalfiles", "useexternalsoundtrack", "usemodule", "usemodules", "usereferences", "usespecials", "usesymbols", "usetypescript", "usetypescriptfile", "useurl", "version", "vl", "weekday", "whitespace", "wordright", "writebetweenlist", "writetolist", "writetoreferencelist", "writetoregister" },
+ ["en"]={ "CAP", "Cap", "Caps", "Character", "Characters", "MONTH", "Numbers", "Romannumerals", "WEEKDAY", "WORD", "WORDS", "Word", "Words", "about", "adaptlayout", "adding", "appendix", "arg", "at", "atleftmargin", "atpage", "atrightmargin", "background", "backspace", "blackrule", "blackrules", "blank", "bookmark", "bottomdistance", "bottomheight", "bottomspace", "but", "button", "bypassblocks", "cap", "chapter", "character", "characters", "chem", "clip", "clonefield", "color", "colorbar", "colorvalue", "column", "comment", "comparecolorgroup", "comparepalet", "completecombinedlist", "completelistoffloats", "completelistofsorts", "completelistofsynonyms", "completepagenumber", "completeregister", "component", "convertnumber", "copyfield", "correctwhitespace", "coupledocument", "coupledregister", "couplemarking", "couplepage", "couplepaper", "coupleregister", "crlf", "currentdate", "currentheadnumber", "cutspace", "date", "decouplemarking", "decrementnumber", "define", "defineaccent", "defineblank", "defineblock", "definebodyfont", "definebodyfontDEF", "definebodyfontREF", "definebodyfontenvironment", "definebuffer", "definecharacter", "definecolor", "definecolorgroup", "definecolumnbreak", "definecolumnset", "definecombination", "definecombinedlist", "definecommand", "defineconversion", "definedescription", "definedfont", "defineenumeration", "definefield", "definefieldstack", "definefiguresymbol", "definefloat", "definefont", "definefontfeature", "definefonthandling", "definefontstyle", "definefontsynonym", "defineframed", "defineframedtext", "definehbox", "definehead", "defineindentedtext", "defineinmargin", "defineinteractionmenu", "defineitemgroup", "definelabel", "definelayer", "definelayout", "definelist", "definelogo", "definemainfield", "definemakeup", "definemarking", "definemathalignment", "defineoutput", "defineoverlay", "definepagebreak", "definepalet", "definepapersize", "defineparagraphs", "defineplacement", "defineprofile", "defineprogram", "definerawfont", "definereference", "definereferenceformat", "definereferencelist", "defineregister", "definerule", "definesection", "definesectionblock", "definesorting", "definestartstop", "definestyle", "definesubfield", "definesymbol", "definesynonyms", "definetabletemplate", "definetabulate", "definetext", "definetextposition", "definetextvariable", "definetype", "definetypeface", "definetyping", "defineversion", "description", "determineheadnumber", "determinelistcharacteristics", "determineregistercharacteristics", "dimension", "disableinteractionmenu", "domicile", "donttest", "edgedistance", "edgewidth", "emptylines", "enumeration", "environment", "externalfigure", "fact", "field", "fieldstack", "fillinfield", "fillinline", "fillinrules", "fillintext", "fitfield", "fixedspace", "fixedspaces", "followprofile", "followprofileversion", "followversion", "footerdistance", "footerheight", "footnote", "footnotetext", "forceblocks", "formulanumber", "fraction", "framed", "framedtext", "from", "getbuffer", "getmarking", "getnumber", "godown", "goto", "gotobox", "gotopage", "graycolor", "greyvalue", "grid", "hairline", "head", "headerdistance", "headerheight", "headlevel", "headnumber", "headsym", "headtext", "hideblocks", "high", "hl", "immediatebetweenlist", "immediatetolist", "in", "incrementnumber", "indentation", "indenting", "inframed", "infull", "ininner", "inleft", "inleftedge", "inleftmargin", "inline", "inmaframed", "inmargin", "inneredgedistance", "inneredgewidth", "innermargindistance", "innermarginwidth", "inothermargin", "inouter", "inright", "inrightedge", "inrightmargin", "installlanguage", "interactionbar", "interactionbuttons", "interactionmenu", "item", "items", "its", "keepblocks", "label", "labeling", "labels", "labeltext", "language", "leftaligned", "leftedgedistance", "leftedgewidth", "leftmargindistance", "leftmarginwidth", "leg", "linethickness", "listheight", "listlength", "listsymbol", "listwidth", "loadsorts", "loadsynonyms", "logfields", "lohi", "low", "maframed", "mainlanguage", "makeupheight", "makeupwidth", "mapfontsize", "mar", "margindistance", "marginrule", "margintext", "margintitle", "marginwidth", "marginword", "marking", "markversion", "mathematics", "mediaeval", "menubutton", "midaligned", "mirror", "month", "moveformula", "moveongrid", "movesidefloat", "name", "navigating", "nextsection", "nocap", "nodimension", "noheaderandfooterlines", "noindenting", "nolist", "nomarking", "nomoreblocks", "nomorefiles", "nop", "nospace", "note", "notopandbottomlines", "nowhitespace", "numberofsubpages", "numbers", "outeredgedistance", "outeredgewidth", "outermargindistance", "outermarginwidth", "overbar", "overbars", "overstrike", "overstrikes", "packed", "page", "pagedepth", "pagenumber", "pageoffset", "pagereference", "paperheight", "paperwidth", "paragraph", "part", "periods", "placebookmarks", "placecombinedlist", "placefloat", "placefootnotes", "placeformula", "placeheadnumber", "placeheadtext", "placelegend", "placelist", "placelistoffloats", "placelistofsorts", "placelistofsynonyms", "placelocalfootnotes", "placelogos", "placeongrid", "placeontopofeachother", "placepagenumber", "placerawlist", "placereferencelist", "placeregister", "placerule", "placesidebyside", "placesubformula", "placetextvariable", "position", "positiontext", "printpaperheight", "printpaperwidth", "processblocks", "processpage", "product", "program", "project", "publication", "quotation", "quote", "ran", "redo", "ref", "reference", "referral", "referraldate", "referring", "register", "remark", "reservefloat", "reset", "resetmarking", "resetnumber", "resettextcontent", "rightaligned", "rightedgedistance", "rightedgewidth", "rightmargindistance", "rightmarginwidth", "romannumerals", "rotate", "scale", "screen", "section", "seeregister", "selectblocks", "selectpaper", "selectversion", "setnumber", "settextcontent", "settextvariable", "setupalign", "setupanswerarea", "setuparranging", "setupbackground", "setupbackgrounds", "setupblackrules", "setupblank", "setupblock", "setupbodyfont", "setupbodyfontenvironment", "setupbottom", "setupbottomtexts", "setupbuffer", "setupbuttons", "setupcapitals", "setupcaption", "setupcaptions", "setupclipping", "setupcolor", "setupcolors", "setupcolumns", "setupcolumnset", "setupcolumnsetlines", "setupcolumnsetstart", "setupcombinations", "setupcombinedlist", "setupcomment", "setupdescriptions", "setupenumerations", "setupexternalfigures", "setupfield", "setupfields", "setupfillinlines", "setupfillinrules", "setupfloat", "setupfloats", "setupfloatsplitting", "setupfonthandling", "setupfontsynonym", "setupfooter", "setupfootertexts", "setupfootnotedefinition", "setupfootnotes", "setupforms", "setupformulae", "setupframed", "setupframedtexts", "setuphead", "setupheader", "setupheadertexts", "setupheadnumber", "setupheads", "setupheadtext", "setuphyphenmark", "setupindentedtext", "setupindenting", "setupinmargin", "setupinteraction", "setupinteractionbar", "setupinteractionmenu", "setupinteractionscreen", "setupinterlinespace", "setupinterlinespace2", "setupitemgroup", "setupitemizations", "setupitems", "setuplabeltext", "setuplanguage", "setuplayout", "setuplegend", "setuplinenumbering", "setuplines", "setuplinewidth", "setuplist", "setuplistalternative", "setupmakeup", "setupmarginblocks", "setupmarginrules", "setupmarking", "setupmathalignment", "setupnarrower", "setupnumber", "setupnumbering", "setupoppositeplacing", "setupoutput", "setuppagecomment", "setuppagenumber", "setuppagenumbering", "setuppagetransitions", "setuppalet", "setuppaper", "setuppapersize", "setupparagraphnumbering", "setupparagraphs", "setupplacement", "setuppositioning", "setupprofiles", "setupprograms", "setuppublications", "setupquote", "setupreferencelist", "setupreferencing", "setupregister", "setuprotate", "setuprule", "setups", "setupscreens", "setupsection", "setupsectionblock", "setupsorting", "setupspacing", "setupstartstop", "setupstrut", "setupsubpagenumber", "setupsymbolset", "setupsynchronization", "setupsynchronizationbar", "setupsynonyms", "setupsystem", "setuptab", "setuptables", "setuptabulate", "setuptext", "setuptextposition", "setuptextrules", "setuptexttexts", "setuptextvariable", "setupthinrules", "setuptolerance", "setuptop", "setuptoptexts", "setuptype", "setuptyping", "setupunderbar", "setupurl", "setupversions", "setupwhitespace", "showbodyfont", "showbodyfontenvironment", "showcolor", "showcolorgroup", "showexternalfigures", "showfields", "showframe", "showgrid", "showlayout", "showmakeup", "showpalet", "showprint", "showsetups", "showstruts", "showsymbolset", "someline", "somewhere", "sort", "space", "splitfloat", "startalignment", "startbackground", "startbuffer", "startcoding", "startcolor", "startcolumnmakeup", "startcolumns", "startcolumnset", "startcombination", "startcomment", "startcomponent", "startdescription", "startdocument", "startenumeration", "startenvironment", "startfact", "startfigure", "startfloattext", "startformula", "startframedtext", "startglobal", "starthiding", "startinteractionmenu", "startitemgroup", "startlegend", "startline", "startlinecorrection", "startlinenumbering", "startlines", "startlocal", "startlocalenvironment", "startlocalfootnotes", "startmakeup", "startmarginblock", "startmarginrule", "startnamemakeup", "startnarrower", "startopposite", "startoverlay", "startoverview", "startpacked", "startparagraph", "startpositioning", "startpostponing", "startproduct", "startprofile", "startproject", "startquotation", "startraster", "startregister", "startsymbolset", "startsynchronization", "starttable", "starttables", "starttabulate", "starttext", "starttextrule", "starttyping", "startunpacked", "startversion", "stopalignment", "stopbackground", "stopbuffer", "stopcoding", "stopcolor", "stopcolumnmakeup", "stopcolumns", "stopcolumnset", "stopcombination", "stopcomment", "stopcomponent", "stopdescription", "stopdocument", "stopenumeration", "stopenvironment", "stopfact", "stopfigure", "stopfloattext", "stopformula", "stopframedtext", "stopglobal", "stophiding", "stopinteractionmenu", "stopitemgroup", "stoplegend", "stopline", "stoplinecorrection", "stoplinenumbering", "stoplines", "stoplocal", "stoplocalenvironment", "stoplocalfootnotes", "stopmakeup", "stopmarginblock", "stopmarginrule", "stopnamemakeup", "stopnarrower", "stopopposite", "stopoverlay", "stopoverview", "stoppacked", "stopparagraph", "stoppositioning", "stoppostponing", "stopproduct", "stopprofile", "stopproject", "stopquotation", "stopraster", "stopsymbolset", "stopsynchronization", "stoptable", "stoptables", "stoptabulate", "stoptext", "stoptextrule", "stoptyping", "stopunpacked", "stopversion", "stretched", "sub", "subformulanumber", "subject", "subpagenumber", "subsection", "subsubject", "subsubsection", "subsubsubject", "switchtobodyfont", "switchtorawfont", "sym", "symbol", "symoffset", "synchronizationbar", "synchronize", "synonym", "tab", "testcolumn", "testpage", "tex", "textheight", "textreference", "textrule", "textvariable", "textwidth", "thinrule", "thinrules", "title", "tooltip", "topdistance", "topheight", "topspace", "totalnumberofpages", "translate", "txt", "typ", "type", "typebuffer", "typefile", "underbar", "underbars", "unitmeaning", "unknown", "useJSscripts", "useURL", "useXMLfilter", "useblocks", "usecommands", "usedirectory", "useencoding", "useexternaldocument", "useexternalfigure", "useexternalfile", "useexternalfiles", "useexternalsoundtrack", "usemodule", "usemodules", "usereferences", "usespecials", "usesymbols", "usetypescript", "usetypescriptfile", "useurl", "version", "vl", "weekday", "whitespace", "wordright", "writebetweenlist", "writetolist", "writetoreferencelist", "writetoregister" },
["fr"]={ "CAP", "Cap", "Caps", "Caractere", "Caracteres", "Chiffresromains", "JOURSEMAINE", "MOIS", "MOT", "MOTS", "Mot", "Mots", "Numeros", "a", "adaptedisposition", "affectenumero", "affectevariabletexte", "ajustechamp", "alaligne", "alapage", "aligneadroite", "aligneagauche", "aligneaumilieu", "appendix", "arg", "arriereplan", "atleftmargin", "atrightmargin", "baha", "barrecouleur", "barreinteraction", "barresynchronisation", "bas", "bouton", "boutonmenu", "boutonsinteraction", "but", "cacheblocs", "cap", "caractere", "caracteres", "champ", "changepolicebrute", "changepolicecorps", "chapter", "chem", "chiffresromains", "citation", "citer", "clip", "clonechamp", "colonne", "comment", "commentaire", "comparegroupecouleur", "comparepalette", "completecombinedlist", "completelistoffloats", "completelistofsorts", "completelistofsynonyms", "completenumeropage", "completeregistre", "composant", "composeenalinea", "concernant", "convertitnumero", "copitchamp", "corrigeespaceblanc", "couleur", "couleurgrise", "coupledocument", "coupledregister", "couplemarquage", "couplepapier", "coupleregistre", "crlf", "cutspace", "dactylographier", "dans", "dansautremarge", "dansborddroit", "dansbordgauche", "dansdroite", "dansgauche", "dansmarge", "dansmargedroite", "dansmargegauche", "date", "datecourante", "daterecommandation", "de", "decouplemarquage", "decrementenumero", "definebodyfontDEF", "definebodyfontREF", "definecombination", "definedfont", "definefontfeature", "definefonthandling", "defineframed", "defineframedtext", "defineindentedtext", "defineitemgroup", "definemathalignment", "defineplacement", "definetypeface", "definicaractere", "definit", "definitaccent", "definitbloc", "definitblocsection", "definitbuffer", "definitcalque", "definitchamp", "definitchampprincipal", "definitcommande", "definitconversion", "definitcouleur", "definitdactylo", "definitdansmarge", "definitdemarrestoppe", "definitdescription", "definitdisposition", "definitenumeration", "definitenvironnementpolicecorps", "definitetiquette", "definitflottant", "definitformatreference", "definitgroupecouleur", "definithbox", "definitjeucolonne", "definitliste", "definitlisteimbriquee", "definitlistereference", "definitlogo", "definitmakeup", "definitmarquage", "definitmenuinteraction", "definitnotepdp", "definitpalette", "definitparagraphes", "definitpilechamp", "definitpolice", "definitpolicebrute", "definitpolicecorps", "definitpositiontexte", "definitprofil", "definitprogramme", "definitreference", "definitregistre", "definitregle", "definitrevetement", "definitsautdecolonne", "definitsautdepage", "definitsection", "definitsortie", "definitsouschamp", "definitstyle", "definitstylepolice", "definitsymbole", "definitsymbolefigure", "definitsynonymepolice", "definitsynonymes", "definittabulation", "definittaillepapier", "definittete", "definittexte", "definittrametableau", "definittri", "definittype", "definitvariabletexte", "definitversion", "definitvide", "demarrealignement", "demarrearriereplan", "demarreblocmarge", "demarrecitation", "demarreciter", "demarrecodage", "demarrecolonnes", "demarrecombinaison", "demarrecompoetroite", "demarrecomposant", "demarrecorrectionligne", "demarrecouleur", "demarredegroupe", "demarredocument", "demarreenvironement", "demarrefigure", "demarreglobal", "demarregroupe", "demarrejeucolonne", "demarrejeusymboles", "demarreligne", "demarreligneregleetexte", "demarrelignes", "demarrelocal", "demarremakeup", "demarremargereglee", "demarrenotespdplocales", "demarrenumerotationligne", "demarreopposition", "demarrepositionnement", "demarreproduit", "demarreprofil", "demarreprojet", "demarreraster", "demarrerevetement", "demarresynchronisation", "demarretableau", "demarretableaux", "demarretexte", "demarreversion", "demarrevuedensemble", "deplaceformule", "deplacesurgrille", "description", "determinecaracteristiqueliste", "determinecaracteristiquesregistre", "determinenumerotete", "dimension", "distancebord", "distanceborddroit", "distancebordgauche", "distanceentete", "distanceinf", "distancemarge", "distancemargedroite", "distancemargegauche", "distancepdp", "distancesup", "domicile", "echelle", "ecran", "ecritdansliste", "ecritdanslistereference", "ecritentreliste", "ecritregistre", "el", "element", "elements", "emptylines", "enumeration", "environement", "espace", "espaceblanc", "espacefixe", "espaceinf", "espacesfixes", "espacesup", "etiquette", "etiquettes", "etire", "fait", "faitreference", "fichierdactylo", "figureexterne", "forceblocs", "fraction", "framed", "framedtext", "gardeblocs", "getnumber", "grille", "groupe", "haut", "hauteureditionpapier", "hauteurentete", "hauteurinf", "hauteurliste", "hauteurmakeup", "hauteurpapier", "hauteurpdp", "hauteursup", "hauteurtexte", "headsym", "hl", "immediatebetweenlist", "immediatetolist", "inconnu", "incrementenumero", "indentation", "inframed", "infull", "inhibemenuinteraction", "ininner", "inmframed", "inneredgedistance", "inneredgewidth", "innermargindistance", "innermarginwidth", "inouter", "installelangue", "joursemaine", "labeling", "labeltexte", "langue", "langueprincipale", "largeurbord", "largeurborddroit", "largeurbordgauche", "largeureditionpapier", "largeurligne", "largeurliste", "largeurmakeup", "largeurmarge", "largeurmargedroite", "largeurmargegauche", "largeurpapier", "largeurtexte", "leg", "ligneh", "lignenoire", "ligneregleetexte", "lignesnoires", "listesymbole", "llongueurliste", "loadsorts", "loadsynonyms", "logchamp", "mapfontsize", "mar", "margereglee", "marquage", "marquageversion", "marquepage", "mathematique", "mediaeval", "menuinteraction", "mframed", "mois", "montrecadre", "montrechamps", "montrecouleur", "montredisposition", "montreedition", "montreenvironnementpolicecorps", "montrefiguresexternes", "montregrille", "montregroupecouleur", "montrejeusymboles", "montremakeup", "montrepalette", "montrepolicecorps", "montrereglages", "montrestruts", "motdroit", "motmarge", "movesidefloat", "name", "navigating", "nextsection", "niveautete", "nocap", "nombredesouspages", "nombretotaldepages", "nommacro", "nop", "note", "notepdp", "numeroformule", "numeropage", "numeros", "numerosousformule", "numerotete", "numerotetecourant", "obtientmarquage", "oriente", "outeredgedistance", "outeredgewidth", "outermargindistance", "outermarginwidth", "overbar", "overbars", "overstrike", "overstrikes", "page", "pagedepth", "pagedouble", "pageoffset", "paragraph", "part", "pasplusdeblocs", "pasplusdefichiers", "periodes", "pilechamp", "placecoteacote", "placeflottant", "placeformule", "placelegende", "placelesunsaudessusdesautres", "placeliste", "placelisteinmbriquee", "placelistereference", "placelistoffloats", "placelistofsorts", "placelistofsynonyms", "placelogos", "placemarquespages", "placenotespdp", "placenotespdplocales", "placenumeropage", "placenumerotete", "placerawlist", "placeregistre", "placeregle", "placesousformule", "placesurgrille", "placetextetete", "placevariabletexte", "position", "positionnetexte", "prendbuffer", "produit", "programme", "projet", "publication", "qqpart", "ran", "raz", "razmarquage", "raznumero", "recommandation", "ref", "refait", "reference", "referencepage", "referencetexte", "reflete", "register", "reglages", "reglealignement", "reglearrangement", "reglearriereplan", "reglearriereplans", "reglebarreinteraction", "reglebarresynchronisation", "reglebloc", "regleblocmarge", "regleblocsection", "regleboutons", "reglebuffer", "reglecapitales", "reglechamp", "reglechamps", "regleclipping", "reglecolonnes", "reglecombinaisons", "reglecommentaire", "reglecommentairepage", "reglecompoetroite", "reglecomposeenalinea", "reglecouleur", "reglecouleurs", "regledactylo", "regledansmarge", "regledemarrestoppe", "regledescriptions", "regledisposition", "regleecraninteraction", "regleecrans", "regleelements", "regleencadre", "regleentete", "regleenumerations", "regleenvironnementpolicecorps", "regleepaisseurligne", "regleespaceblanc", "regleespacement", "regleespacementinterligne", "reglefiguresexternes", "regleflottant", "regleflottants", "regleformulaires", "regleformules", "reglegroupeselements", "regleinf", "regleinteraction", "regleintitule", "regleintitules", "reglejeucolonne", "reglejeusymboles", "reglelabeltexte", "reglelangue", "reglelegende", "reglelignes", "reglelignesnoires", "reglelignesreglestexte", "regleliste", "reglelisteimbriquee", "reglelistereference", "reglemakeup", "reglemargereglee", "reglemarquage", "reglemarquagehyphenation", "reglemenuinteraction", "reglenotepdp", "reglenumero", "reglenumeropage", "reglenumerotation", "reglenumerotationligne", "reglenumerotationpage", "reglenumerotationparagraphe", "reglenumerotete", "regleoriente", "reglepalette", "reglepapier", "regleparagraphes", "reglepdp", "regleplacementopposition", "reglepolicecorps", "reglepositionnement", "reglepositiontexte", "regleprofils", "regleprogrammes", "reglepublications", "reglereferencage", "regleregistre", "regleregle", "regleremplitligne", "regleremplitlignesreglees", "reglesection", "regleseparationflottant", "reglesortie", "reglesouslignage", "reglesousnumeropage", "reglestrut", "reglesup", "reglesynchronisation", "reglesynonymes", "reglesysteme", "regletab", "regletableaux", "regletabulation", "regletaillepapier", "regletete", "regletetes", "regletexte", "regletextesentete", "regletextesinf", "regletextespdp", "regletextessup", "regletextestexte", "regletextetete", "regletolerance", "regletraitsfins", "regletransitionspage", "regletri", "regletype", "regleurl", "reglevariabletexte", "regleversions", "remplitchamp", "remplitligne", "remplitlignesreglees", "remplittexte", "reservefloat", "resettextcontent", "retourarriere", "sansalinea", "sansdimension", "sansespace", "sansespaceblanc", "sanslignesenteteetpdp", "sanslignessupetinf", "sansliste", "sansmarquage", "sanstest", "sauteblocs", "section", "seeregister", "selectionneblocs", "selectionnepapier", "selectionneversion", "sensunite", "separeflottant", "settext", "setupanswerarea", "setupcolumnsetlines", "setupcolumnsetstart", "setupfonthandling", "setupfontsynonym", "setupframedtexts", "setupindentedtext", "setupinterlinespace2", "setupitemgroup", "setuplistalternative", "setupmathalignment", "setupplacement", "sort", "sousnumeropage", "startalignment", "startarriereplan", "startbuffer", "startcitation", "startcolumnmakeup", "startcolumns", "startcombination", "startcomment", "startcomposant", "startcouleur", "startdescription", "startdocument", "startenumeration", "startenvironement", "startfait", "startfigure", "startfloattext", "startformula", "startframedtext", "startgroupe", "starthiding", "startitemgroup", "startlegend", "startligneregleetexte", "startline", "startlinecorrection", "startlinenumbering", "startlines", "startlocal", "startlocalenvironment", "startlocalfootnotes", "startmakeup", "startmargereglee", "startmarginblock", "startmenuinteraction", "startnamemakeup", "startnarrower", "startopposite", "startoverlay", "startoverview", "startparagraph", "startpositioning", "startpostponing", "startproduit", "startprofile", "startprojet", "startregister", "startsymbolset", "startsynchronization", "starttable", "starttables", "starttabulate", "starttyping", "startunpacked", "startversion", "stopalignment", "stoparriereplan", "stopbuffer", "stopcitation", "stopcolumnmakeup", "stopcolumns", "stopcombination", "stopcomment", "stopcompoetroite", "stopcomposant", "stopcouleur", "stopdescription", "stopdocument", "stopenumeration", "stopenvironement", "stopfait", "stopfigure", "stopfloattext", "stopformula", "stopframedtext", "stopgroupe", "stophiding", "stopitemgroup", "stoplegend", "stopligneregleetexte", "stopline", "stoplinecorrection", "stoplinenumbering", "stoplines", "stoplocal", "stoplocalenvironment", "stoplocalfootnotes", "stopmakeup", "stopmargereglee", "stopmarginblock", "stopmenuinteraction", "stopnamemakeup", "stopnarrower", "stopopposite", "stopoverlay", "stopoverview", "stopparagraph", "stoppealignement", "stoppearriereplan", "stoppeblocmarge", "stoppecitation", "stoppecodage", "stoppecolonnes", "stoppecombinaison", "stoppecomposant", "stoppecorrectionligne", "stoppecouleur", "stoppedegroupe", "stoppedocument", "stoppeenvironement", "stoppeglobal", "stoppegroupe", "stoppejeucolonne", "stoppeligne", "stoppeligneregleetexte", "stoppelignes", "stoppelocal", "stoppemakeup", "stoppemargereglee", "stoppenotespdplocales", "stoppenumerotationligne", "stoppeopposition", "stoppepositionnement", "stoppeproduit", "stoppeprofil", "stoppeprojet", "stopperaster", "stopperevetement", "stoppesynchronisation", "stoppetableau", "stoppetableaux", "stoppetexte", "stoppeversion", "stoppevuedensemble", "stoppositioning", "stoppostponing", "stopproduit", "stopprofile", "stopprojet", "stopsymbolset", "stopsynchronization", "stoptable", "stoptables", "stoptabulate", "stoptyping", "stopunpacked", "stopversion", "sub", "subject", "subsection", "subsubject", "subsubsection", "subsubsubject", "suggestion", "suivantprofil", "suivantversion", "suivantversionprofil", "sym", "symbole", "synchronise", "synonym", "tab", "tapebuffer", "testcolumn", "testpage", "tete", "tex", "textemarge", "textenotepdp", "textetete", "title", "titremarge", "traduire", "traiteblocs", "traitepage", "traitfin", "traitsfins", "txt", "typ", "underbar", "underbars", "uneligne", "useXMLfilter", "usedirectory", "usetypescript", "usetypescriptfile", "utiliseJSscripts", "utiliseURL", "utiliseblocs", "utilisechemin", "utilisecommandes", "utilisedocumentexterne", "utiliseencodage", "utilisefichierexterne", "utilisefichiersexternes", "utilisefigureexterne", "utilisemodule", "utilisemodules", "utilisepsiteaudioexterne", "utilisereferences", "utilisespecialites", "utilisesymboles", "utiliseurl", "va", "vaalaboite", "vaalapage", "vaenbas", "valeurcouleur", "valeurgris", "variabletexte", "version", "vide", "vl" },
["it"]={ "CAP", "Cap", "Caps", "GIORNOSETTIMANA", "Lettera", "Lettere", "MESE", "Numeri", "Numeriromani", "PAROLA", "PAROLE", "Parola", "Parole", "accoppiacarta", "accoppiadocumento", "accoppiamarcatura", "accoppiapagina", "accoppiaregistro", "adattacampo", "adattalayout", "al", "allineacentro", "allineadestra", "allineasinistra", "altezzacarta", "altezzacartastampa", "altezzacima", "altezzaelenco", "altezzafondo", "altezzaintestazione", "altezzamakeup", "altezzapdp", "altezzatesto", "ambiente", "ampiezzabordo", "ampiezzabordodestro", "ampiezzabordosinistro", "ampiezzacarta", "ampiezzacartastampa", "ampiezzaelenco", "ampiezzamakeup", "ampiezzamargine", "ampiezzamarginedestro", "ampiezzamarginesinistro", "ampiezzatesto", "ap", "apagina", "appendix", "arg", "atleftmargin", "atrightmargin", "barracolori", "barrainterazione", "barrasincronizzazione", "bastablocchi", "bastafile", "cambiaafontdeltesto", "campi", "camporiempimento", "cap", "capello", "chapter", "chim", "circondato", "citazione", "clip", "clonacampo", "colonna", "colore", "coloregrigio", "comment", "commento", "completecombinedlist", "completelistoffloats", "completelistofsorts", "completelistofsynonyms", "completeregister", "componenet", "confrontagruppocolori", "confrontatavolozza", "convertinumero", "copiacampo", "correggispaziobianco", "coupledregister", "crlf", "cutspace", "da", "daqualcheparte", "data", "datadioggi", "datareferral", "decrementnumber", "definebodyfontDEF", "definebodyfontREF", "definecolumnbreak", "definecombination", "definedfont", "definefontfeature", "definefonthandling", "defineindentedtext", "defineinmargin", "defineitemgroup", "definelayer", "definemathalignment", "definepagebreak", "defineplacement", "definetypeface", "definisci", "definisciaccento", "definisciambientefontdeltesto", "definisciblocco", "definiscibloccosezione", "definiscibuffer", "definiscicampo", "definiscicampoprincipale", "definiscicapoversi", "definiscicarattere", "definiscicolore", "definiscicomando", "definisciconversione", "definiscidescrizione", "definiscidimensionicarta", "definiscielenco", "definiscielencocombinato", "definiscienumerazione", "definiscietichetta", "definiscifigurasimbolo", "definiscifont", "definiscifontdeltesto", "definiscifontgrezzo", "definisciformatoriferimento", "definiscigruppocolonne", "definiscigruppocolori", "definiscihbox", "definisciincorniciato", "definisciiniziatermina", "definiscilayout", "definiscilinea", "definiscilistariferimenti", "definiscilogo", "definiscimakeup", "definiscimarcatura", "definiscimenuinterazione", "definiscimodellotabella", "definiscioggettomobile", "definisciordinamento", "definiscioutput", "definisciposizionetesto", "definisciprofilo", "definisciprogramma", "definisciregistro", "definisciriferimento", "definiscirigovuoto", "definiscisezione", "definiscisimbolo", "definiscisinonimi", "definiscisinonimofont", "definiscisottocampo", "definiscisovrapposizione", "definiscistackcampi", "definiscistile", "definiscistilefont", "definiscitabulato", "definiscitavolozza", "definiscitesta", "definiscitesto", "definiscitestoincorniciato", "definiscitype", "definiscityping", "definiscivariabiletesto", "definisciversion", "description", "determinacaratteristicheregistro", "determinacarattersticheelenco", "determinanumerotesta", "dimensione", "disabilitamenuinterazione", "distanzabordo", "distanzabordodestro", "distanzabordosinistro", "distanzacima", "distanzafondo", "distanzaintestazione", "distanzamargine", "distanzamarginedestro", "distanzamarginesinistro", "distanzapdp", "domicilio", "el", "elaborablocchi", "elaborapagina", "elementi", "elemento", "emptylines", "enumeration", "etichetta", "etichette", "fatto", "figuraesterna", "fondo", "forzablocchi", "framedtext", "frazione", "getnumber", "giornosettimana", "griglia", "headsym", "hl", "ignoto", "immediatebetweenlist", "immediatetolist", "impaccato", "impostaallineamento", "impostaambientefontdeltesto", "impostaampiezzariga", "impostabarrainterazione", "impostabarrasincronizzazione", "impostablocchimargine", "impostablocco", "impostabloccosezione", "impostabuffer", "impostacampi", "impostacampo", "impostacapoversi", "impostacaption", "impostacaptions", "impostacima", "impostaclippling", "impostacolonne", "impostacolore", "impostacolori", "impostacombinazioni", "impostacommento", "impostacommentopagina", "impostadefinizionenotepdp", "impostadescrizioni", "impostadimensionicarta", "impostaelementi", "impostaelencazioni", "impostaelenco", "impostaelencocombinato", "impostaenumerazioni", "impostafigureesterne", "impostafondo", "impostafontdeltesto", "impostaforms", "impostaformule", "impostagruppocolonne", "impostaincorniciato", "impostainiziatermina", "impostainmargine", "impostainstestazione", "impostainterazione", "impostainterlinea", "impostalayout", "impostalegenda", "impostalinea", "impostalineemargine", "impostalineenere", "impostalineeriempimento", "impostalineesottili", "impostalineetesto", "impostalingua", "impostalistariferimenti", "impostamaiuscole", "impostamakeup", "impostamarcatura", "impostamenuinterazione", "impostamenzione", "impostanotepdp", "impostanumerazione", "impostanumerazionecapoversi", "impostanumerazionepagina", "impostanumerazionerighe", "impostanumeropagina", "impostanumerosottopagina", "impostanumerotesta", "impostaoggettimobili", "impostaoggettomobile", "impostaordinamento", "impostaoutput", "impostaparranging", "impostapdp", "impostapiustretto", "impostaposizionamento", "impostaposizionamentoopposti", "impostaposizionetesto", "impostaprofili", "impostaprogrammi", "impostapubblicazioni", "impostapulsanti", "impostaregistro", "impostarientro", "impostariferimento", "impostarighe", "impostarigheriempimento", "impostarigovuoto", "impostarotazione", "impostaschermi", "impostaschermointerazione", "impostasegnosillabazione", "impostasetsimboli", "impostasezione", "impostasfondi", "impostasfondo", "impostasincronizzazione", "impostasinonimi", "impostasistema", "impostasottolinea", "impostaspaziatura", "impostaspaziobianco", "impostaspezzamentooggettomobile", "impostastrut", "impostatab", "impostatabelle", "impostatabulato", "impostatavolozza", "impostatesta", "impostateste", "impostatesticima", "impostatestifondo", "impostatestiincorniciati", "impostatestiintestazioni", "impostatestipdp", "impostatesto", "impostatestoetichette", "impostatestointestazioni", "impostatestotesti", "impostatolleranza", "impostatransizionepagina", "impostatype", "impostatyping", "impostaurl", "impostavariabiletesto", "impostaversioni", "impostazioni", "in", "inaltromargine", "incorniciato", "incrementanumero", "indentation", "indestra", "ininner", "iniziaallineamento", "iniziaambiente", "iniziabloccomargine", "iniziacitazione", "iniziacodifica", "iniziacolonne", "iniziacolore", "iniziacombinazione", "iniziacomponente", "iniziacorrezioneriga", "iniziadocumento", "iniziafigura", "iniziaglobale", "iniziagruppocolonne", "iniziaimpaccato", "inizialineamargine", "inizialineatesto", "inizialocale", "iniziamakeup", "inizianotepdplocali", "inizianumerazionerighe", "iniziaopposto", "iniziaoverview", "iniziapiustretto", "iniziaposizionamento", "iniziaprodotto", "iniziaprofilo", "iniziaprogetto", "iniziaraster", "iniziariga", "iniziarighe", "iniziasetsimboli", "iniziasfondo", "iniziasincronizzazione", "iniziasovrapposizione", "iniziatabella", "iniziatabelle", "iniziatesto", "iniziaunpacked", "iniziaversione", "inlatodestro", "inlatosinistro", "inmaframed", "inmargine", "inmarginedestro", "inmarginesinistro", "inneredgedistance", "inneredgewidth", "innermargindistance", "innermarginwidth", "inouter", "inriga", "insinistra", "installalingua", "intorno", "labeling", "leg", "lettera", "lettere", "lineamargine", "lineanera", "lineasottile", "lineatesto", "lineenere", "lineeriempimento", "lineesottili", "lingua", "linguaprincipale", "listsymbol", "livellotesta", "loadsorts", "loadsynonyms", "logcampi", "lunghezzaelenco", "maframed", "mapfontsize", "mar", "marcatura", "marcaversione", "matematica", "mediaeval", "menuinterattivo", "menzione", "mese", "mettielenco", "mettielencocombinato", "mettifiancoafianco", "mettiformula", "mettiingriglia", "mettilegenda", "mettilinea", "mettiloghi", "mettinotepdp", "mettinotepdplocali", "mettinumeropagina", "mettiregistro", "mettisegnalibro", "mettisottoformula", "mettiunosullaltro", "mettivariabiletesto", "mostraambientefontdeltesto", "mostracampi", "mostracolore", "mostracornice", "mostrafiguresterne", "mostrafontdeltesto", "mostragriglia", "mostragruppocolori", "mostraimpostazioni", "mostralyout", "mostramakeup", "mostrasetsimboli", "mostrastampa", "mostrastruts", "mostratavolozza", "movesidefloat", "name", "nascondiblocchi", "navigating", "nextsection", "nientedimensioni", "nienteelenco", "nientelineecimafondo", "nientelineintestazionepdp", "nientemarcatura", "nienterientro", "nientespazio", "nientespaziobianco", "nocap", "nome", "nomeunita", "nop", "nota", "notapdp", "notest", "numberofsubpages", "numeri", "numeriromani", "numeroformula", "numeropagina", "numeropaginacompleto", "numerosottoformula", "numerotesta", "numerotestacorrente", "numerototaledipagine", "outeredgedistance", "outeredgewidth", "outermargindistance", "outermarginwidth", "overbar", "overbars", "overstrike", "overstrikes", "pagedepth", "pageoffset", "pagina", "paragraph", "paroladestra", "parolainmargine", "part", "passaafontgrezzo", "ped", "pedap", "perlungo", "placefloat", "placelistoffloats", "placelistofsorts", "placelistofsynonyms", "placerawlist", "placereferencelist", "posizionanumerotesta", "posizionatesto", "posizionatestotesta", "posizione", "prendibuffer", "prendimarcatura", "prodotto", "progetto", "programma", "pubblicazione", "pulsante", "pulsantemenu", "pulsantinterazione", "punti", "qualcheriga", "ran", "referral", "referring", "register", "reimposta", "reimpostamarcatura", "reservefloat", "resetnumber", "resettextcontent", "rientro", "rif", "rifai", "riferimento", "riferimentopagina", "riferimentotesto", "riflessione", "rigariempimento", "rigovuoto", "ruota", "saltablocchi", "scala", "schermo", "scrividentroelenco", "scriviinelenco", "scriviinlistariferimenti", "scriviinregistro", "section", "seeregister", "segnalibro", "seguiprofilo", "seguiversione", "seguiversioneprofilo", "selezionablocchi", "selezionacarta", "selezionaversione", "separamarcatura", "setnumber", "settext", "setupanswerarea", "setupcolumnsetlines", "setupcolumnsetstart", "setupfonthandling", "setupfontsynonym", "setupindentedtext", "setupinterlinespace2", "setupitemgroup", "setuplistalternative", "setupmathalignment", "setuppaper", "setupplacement", "setvariabiletesto", "sfondo", "sim", "simbolo", "sincronizza", "sort", "spazifissi", "spazio", "spaziobianco", "spaziocima", "spaziodietro", "spaziofisso", "spaziofondo", "spessoreriga", "spezzaoggettomobile", "spostaagriglia", "spostaformula", "stackcampi", "startalignment", "startambiente", "startbuffer", "startcitazione", "startcolore", "startcolumnmakeup", "startcolumns", "startcombination", "startcomment", "startcomponenet", "startdescription", "startdocument", "startenumeration", "startfatto", "startfigure", "startfloattext", "startformula", "startframedtext", "starthiding", "startimpaccato", "startitemgroup", "startlegend", "startline", "startlineamargine", "startlineatesto", "startlinecorrection", "startlinenumbering", "startlines", "startlocal", "startlocalenvironment", "startlocalfootnotes", "startmakeup", "startmarginblock", "startmenuinterattivo", "startnamemakeup", "startnarrower", "startopposite", "startoverlay", "startoverview", "startparagraph", "startpositioning", "startpostponing", "startprodotto", "startprofile", "startprogetto", "startregister", "startsfondo", "startsymbolset", "startsynchronization", "starttable", "starttables", "starttabulate", "starttyping", "startunpacked", "startversione", "stirato", "stopalignment", "stopambiente", "stopbuffer", "stopcitazione", "stopcolore", "stopcolumnmakeup", "stopcolumns", "stopcombination", "stopcomment", "stopcomponenet", "stopdescription", "stopdocument", "stopenumeration", "stopfatto", "stopfigure", "stopfloattext", "stopformula", "stopframedtext", "stophiding", "stopimpaccato", "stopitemgroup", "stoplegend", "stopline", "stoplineamargine", "stoplineatesto", "stoplinecorrection", "stoplinenumbering", "stoplines", "stoplocal", "stoplocalenvironment", "stoplocalfootnotes", "stopmakeup", "stopmarginblock", "stopmenuinterattivo", "stopnamemakeup", "stopnarrower", "stopopposite", "stopoverlay", "stopoverview", "stopparagraph", "stoppositioning", "stoppostponing", "stopprodotto", "stopprofile", "stopprogetto", "stopsfondo", "stopsymbolset", "stopsynchronization", "stoptable", "stoptables", "stoptabulate", "stoptyping", "stopunpacked", "stopversione", "sub", "subject", "subpagenumber", "subsection", "subsubject", "subsubsection", "subsubsubject", "synonym", "tab", "terminaallineamento", "terminaambiente", "terminabloccomargine", "terminacitazione", "terminacodifica", "terminacolonne", "terminacolore", "terminacombinazione", "terminacomponente", "terminacorrezioneriga", "terminadocumento", "terminaglobale", "terminagruppocolonne", "terminaimpaccato", "terminalineamargine", "terminalineatesto", "terminalocale", "terminamakeup", "terminanotepdplocali", "terminanumerazionerighe", "terminaopposto", "terminaoverview", "terminapiustretto", "terminaposizionamento", "terminaprodotto", "terminaprofili", "terminaprogetto", "terminaraster", "terminariga", "terminarighe", "terminasfondo", "terminasincronizzazione", "terminasovrapposizione", "terminatabella", "terminatabelle", "terminatesto", "terminaunpacked", "terminaversioni", "testa", "testcolumn", "testoetichetta", "testoinmargine", "testoinstestazioni", "testonotapdp", "testoriempimento", "testpage", "tex", "tieniblocchi", "title", "titoloinmargine", "tooltip", "traduci", "txt", "typ", "type", "typebuffer", "typefile", "underbar", "underbars", "usaJSscripts", "usaURL", "usablocco", "usacartella", "usacodifica", "usacolonnasonoraesterna", "usacomandi", "usadocumentoesterno", "usafiguraesterna", "usafileesterni", "usafileesterno", "usamoduli", "usamodulo", "usariferimenti", "usasimboli", "usaspecialita", "usaurl", "useXMLfilter", "usedirectory", "usetypescript", "usetypescriptfile", "vaia", "vaiabox", "vaiapagina", "vaigiu", "valorecolore", "valoregrigio", "variabiletesto", "versione", "vl" },
- ["nl"]={ "CAP", "Cap", "Caps", "Cijfers", "KAP", "Kap", "Kaps", "Letter", "Letters", "MAAND", "Romeins", "WEEKDAG", "WOORD", "WOORDEN", "Woord", "Woorden", "aantalsubpaginas", "about", "achtergrond", "appendix", "arg", "bepaalkopnummer", "bepaallijstkenmerken", "bepaalregisterkenmerken", "betekenis", "binnenmargeafstand", "binnenmargebreedte", "binnenrandafstand", "binnenrandbreedte", "blanko", "blokje", "blokjes", "blokkeerinteractiemenu", "bodemwit", "bookmark", "bovenafstand", "bovenhoogte", "breuk", "buitenmargeafstand", "buitenmargebreedte", "buitenrandafstand", "buitenrandbreedte", "but", "button", "cap", "chapter", "chem", "cijfers", "citaat", "citeer", "clip", "comment", "completecombinedlist", "completelistoffloats", "completelistofsorts", "completelistofsynonyms", "converteernummer", "copieerveld", "corrigeerwitruimte", "coupledregister", "crlf", "datum", "definebodyfontDEF", "definebodyfontREF", "definedfont", "definefontfeature", "definefonthandling", "definerawfont", "definetypeface", "definieer", "definieeraccent", "definieeralineas", "definieerbeeldmerk", "definieerblanko", "definieerblok", "definieerbuffer", "definieercombinatie", "definieercommando", "definieerconversie", "definieerfiguursymbool", "definieerfont", "definieerfontstijl", "definieerfontsynoniem", "definieerhbox", "definieerhoofdveld", "definieeringesprongentext", "definieerinmarge", "definieerinteractiemenu", "definieeritemgroep", "definieerkadertekst", "definieerkarakter", "definieerkleur", "definieerkleurgroep", "definieerkolomgroep", "definieerkolomovergang", "definieerkop", "definieerkorps", "definieerkorpsomgeving", "definieerlayer", "definieerlayout", "definieerletter", "definieerlijn", "definieerlijst", "definieermarkering", "definieeromlijnd", "definieeropmaak", "definieeroverlay", "definieerpaginaovergang", "definieerpalet", "definieerpapierformaat", "definieerplaats", "definieerplaatsblok", "definieerprofiel", "definieerprogramma", "definieerreferentie", "definieerreferentieformaat", "definieerreferentielijst", "definieerregister", "definieersamengesteldelijst", "definieersectie", "definieersectieblok", "definieersorteren", "definieerstartstop", "definieersubveld", "definieersymbool", "definieersynoniemen", "definieertabelvorm", "definieertabulatie", "definieertekst", "definieertekstpositie", "definieertekstvariabele", "definieertype", "definieertypen", "definieeruitvoer", "definieerveld", "definieerveldstapel", "definieerversie", "definieerwiskundeuitlijnen", "description", "dimensie", "directnaarlijst", "directtussenlijst", "doordefinieren", "doorlabelen", "doornummeren", "dunnelijn", "dunnelijnen", "eenregel", "enumeration", "ergens", "externfiguur", "forceerblokken", "formulenummer", "framedtext", "gebruikJSscripts", "gebruikURL", "gebruikXMLfilter", "gebruikblokken", "gebruikcommandos", "gebruikexterndocument", "gebruikexternefile", "gebruikexternefiles", "gebruikexternfiguur", "gebruikexterngeluidsfragment", "gebruikgebied", "gebruikmodule", "gebruikmodules", "gebruikreferenties", "gebruikspecials", "gebruiksymbolen", "gebruiktypescript", "gebruiktypescriptfile", "gebruikurl", "geenblokkenmeer", "geenbovenenonderregels", "geendimensie", "geenfilesmeer", "geenhoofdenvoetregels", "geenlijst", "geenmarkering", "geenspatie", "geentest", "geenwitruimte", "geg", "grijskleur", "grijswaarde", "haalbuffer", "haalmarkering", "haalnummer", "haarlijn", "handhaafblokken", "herhaal", "hl", "hoofdafstand", "hoofdhoogte", "hoofdtaal", "hoog", "huidigedatum", "huidigekopnummer", "in", "inanderemarge", "inbinnen", "inbuiten", "indentation", "inlijnd", "inlinker", "inlinkermarge", "inlinkerrand", "inmarge", "inrechter", "inrechtermarge", "inrechterrand", "inregel", "inspringen", "installeertaal", "instellingen", "interactiebalk", "interactiebuttons", "interactiemenu", "invullijnen", "invulregel", "invultekst", "invulveld", "inwilijnd", "items", "its", "kantlijn", "kap", "kenmerk", "kenmerkdatum", "kentekstvariabeletoe", "kleur", "kleurenbalk", "kleurwaarde", "kloonveld", "kolom", "kop", "kopniveau", "kopnummer", "koppeldocument", "koppelmarkering", "koppelpagina", "koppelpapier", "koppelregister", "kopsym", "koptekst", "kopwit", "laag", "label", "labeling", "labels", "labeltekst", "laho", "leg", "legeregels", "letter", "letters", "lijndikte", "lijstbreedte", "lijsthoogte", "lijstlengte", "lijstsymbool", "linkermargeafstand", "linkermargebreedte", "linkerrandafstand", "linkerrandbreedte", "loadsorts", "loadsynonyms", "maand", "mapfontsize", "mar", "margeafstand", "margebreedte", "margetekst", "margetitel", "margewoord", "markeer", "markeerversie", "mediaeval", "menubutton", "naam", "naar", "naarbox", "naarpagina", "name", "navigerend", "nextsection", "nietinspringen", "nocap", "nokap", "noot", "nop", "omgeving", "omlaag", "omlijnd", "onbekend", "onderafstand", "onderdeel", "onderhoogte", "ontkoppelmarkering", "op", "opelkaar", "oplinkermarge", "oppagina", "oprechtermarge", "overbar", "overbars", "overstrike", "overstrikes", "pagina", "paginadiepte", "paginanummer", "paginaoffset", "paginareferentie", "papierbreedte", "papierhoogte", "paragraph", "part", "paslayoutaan", "passeerblokken", "passendveld", "plaatsbeeldmerken", "plaatsbookmarks", "plaatsformule", "plaatskopnummer", "plaatskoptekst", "plaatslegenda", "plaatslijn", "plaatslijst", "plaatslokalevoetnoten", "plaatsnaastelkaar", "plaatsonderelkaar", "plaatsopgrid", "plaatspaginanummer", "plaatsplaatsblok", "plaatsreferentielijst", "plaatsregister", "plaatsruwelijst", "plaatssamengesteldelijst", "plaatssubformule", "plaatstekstvariabele", "plaatsvoetnoten", "placelistoffloats", "placelistofsorts", "placelistofsynonyms", "positioneer", "positioneertekst", "printpapierbreedte", "printpapierhoogte", "produkt", "programma", "projekt", "publicatie", "punten", "ran", "randafstand", "randbreedte", "rechtermargeafstand", "rechtermargebreedte", "rechterrandafstand", "rechterrandbreedte", "ref", "refereer", "referentie", "regellinks", "regelmidden", "regelrechts", "register", "registreervelden", "reservefloat", "reset", "resetmarkering", "resetnummer", "resettekstinhoud", "resettextcontent", "romeins", "rooster", "roteer", "rugwit", "schaal", "scherm", "schrijfnaarlijst", "schrijfnaarreferentielijst", "schrijfnaarregister", "schrijftussenlijst", "section", "seeregister", "selecteerblokken", "selecteerpapier", "selecteerversie", "setnummer", "setupfonthandling", "setupfontsynonym", "setupinterlinespace2", "setuplistalternative", "snijwit", "som", "sort", "spatie", "spiegel", "splitsplaatsblok", "startachtergrond", "startalignment", "startbuffer", "startcitaat", "startcodering", "startcolumns", "startcombinatie", "startcombination", "startcomment", "startdescription", "startdocument", "startenumeration", "startfigure", "startfiguur", "startfloattext", "startformula", "startframedtext", "startgeg", "startglobaal", "starthiding", "startinteractiemenu", "startitemgroup", "startkantlijn", "startkleur", "startkolomgroep", "startkolommen", "startkolomopmaak", "startlegend", "startline", "startlinecorrection", "startlinenumbering", "startlines", "startlocal", "startlocalenvironment", "startlocalfootnotes", "startlokaal", "startlokalevoetnoten", "startmakeup", "startmargeblok", "startmarginblock", "startnaast", "startnamemakeup", "startnarrower", "startomgeving", "startonderdeel", "startopelkaar", "startopmaak", "startopposite", "startoverlay", "startoverview", "startoverzicht", "startparagraph", "startpositioneren", "startpositioning", "startpostponing", "startprodukt", "startprofiel", "startprofile", "startprojekt", "startraster", "startregel", "startregelcorrectie", "startregelnummeren", "startregels", "startregister", "startsmaller", "startsymbolset", "startsymboolset", "startsynchronisatie", "startsynchronization", "starttabel", "starttabellen", "starttable", "starttables", "starttabulate", "starttekst", "starttekstlijn", "starttyping", "startuitlijnen", "startunpacked", "startvanelkaar", "startversie", "stelachtergrondenin", "stelachtergrondin", "stelalineasin", "stelantwoordgebiedin", "stelarrangerenin", "stelblankoin", "stelblokin", "stelblokjesin", "stelblokkopjein", "stelblokkopjesin", "stelbovenin", "stelboventekstenin", "stelbufferin", "stelbuttonsin", "stelciterenin", "stelclipin", "stelcombinatiesin", "stelcommentaarin", "steldoordefinierenin", "steldoornummerenin", "steldunnelijnenin", "stelexternefigurenin", "stelformulesin", "stelformulierenin", "stelhoofdin", "stelhoofdtekstenin", "stelingesprongentextin", "stelinmargein", "stelinspringenin", "stelinteractiebalkin", "stelinteractiein", "stelinteractiemenuin", "stelinteractieschermin", "stelinterliniein", "stelinvullijnenin", "stelinvulregelsin", "stelitemgroepin", "stelitemsin", "stelkadertekstenin", "stelkantlijnin", "stelkapitalenin", "stelkleurenin", "stelkleurin", "stelkolomgroepin", "stelkolomgroepregelsin", "stelkolomgroepstartin", "stelkolommenin", "stelkopin", "stelkopnummerin", "stelkoppeltekenin", "stelkoppenin", "stelkoptekstin", "stelkorpsin", "stelkorpsomgevingin", "stellabeltekstin", "stellayoutin", "stellegendain", "stellijndiktein", "stellijnin", "stellijstin", "stelmargeblokkenin", "stelmarkeringin", "stelnaastplaatsenin", "stelnummerenin", "stelnummerin", "stelomlijndin", "stelonderin", "stelonderstrepenin", "stelondertekstenin", "stelopmaakin", "stelopsommingenin", "stelpaginacommentaarin", "stelpaginanummerin", "stelpaginanummeringin", "stelpaginaovergangenin", "stelpaletin", "stelpapierformaatin", "stelpapierin", "stelparagraafnummerenin", "stelplaatsblokin", "stelplaatsblokkenin", "stelplaatsbloksplitsenin", "stelplaatsin", "stelpositionerenin", "stelprofielenin", "stelprogrammasin", "stelpublicatiesin", "stelrastersin", "stelreferentielijstin", "stelrefererenin", "stelregelnummerenin", "stelregelsin", "stelregisterin", "stelroterenin", "stelsamengesteldelijstin", "stelsectieblokin", "stelsectiein", "stelsmallerin", "stelsorterenin", "stelspatieringin", "stelstartstopin", "stelstrutin", "stelsubpaginanummerin", "stelsymboolsetin", "stelsynchronisatiebalkin", "stelsynchronisatiein", "stelsynoniemenin", "stelsysteemin", "steltaalin", "steltabellenin", "steltabin", "steltabulatiein", "steltekstin", "steltekstinhoudin", "steltekstlijnenin", "steltekstpositiein", "stelteksttekstenin", "steltekstvariabelein", "steltolerantiein", "steltypein", "steltypenin", "steluitlijnenin", "steluitvoerin", "stelurlin", "stelveldenin", "stelveldin", "stelversiesin", "stelvoetin", "stelvoetnootdefinitiein", "stelvoetnotenin", "stelvoettekstenin", "stelwiskundeuitlijnenin", "stelwitruimtein", "stopachtergrond", "stopalignment", "stopbuffer", "stopcitaat", "stopcodering", "stopcolumns", "stopcombinatie", "stopcombination", "stopcomment", "stopdescription", "stopdocument", "stopenumeration", "stopfigure", "stopfloattext", "stopformula", "stopframedtext", "stopgeg", "stopglobaal", "stophiding", "stopinteractiemenu", "stopitemgroup", "stopkantlijn", "stopkleur", "stopkolomgroep", "stopkolommen", "stopkolomopmaak", "stoplegend", "stopline", "stoplinecorrection", "stoplinenumbering", "stoplines", "stoplocal", "stoplocalenvironment", "stoplocalfootnotes", "stoplokaal", "stoplokalevoetnoten", "stopmakeup", "stopmargeblok", "stopmarginblock", "stopnaast", "stopnamemakeup", "stopnarrower", "stopomgeving", "stoponderdeel", "stopopelkaar", "stopopmaak", "stopopposite", "stopoverlay", "stopoverview", "stopoverzicht", "stopparagraph", "stoppositioneren", "stoppositioning", "stoppostponing", "stopprodukt", "stopprofiel", "stopprofile", "stopprojekt", "stopraster", "stopregel", "stopregelcorrectie", "stopregelnummeren", "stopregels", "stopsmaller", "stopsymbolset", "stopsynchronisatie", "stopsynchronization", "stoptabel", "stoptabellen", "stoptable", "stoptables", "stoptabulate", "stoptekst", "stoptekstlijn", "stoptyping", "stopuitlijnen", "stopunpacked", "stopvanelkaar", "stopversie", "sub", "subformulenummer", "subject", "subpaginanummer", "subsection", "subsubject", "subsubsection", "subsubsubject", "suggestie", "switchnaarkorps", "switchtorawfont", "sym", "symbool", "synchronisatiebalk", "synchroniseer", "synonym", "taal", "tab", "tekstbreedte", "teksthoogte", "tekstlijn", "tekstreferentie", "tekstvariabele", "testkolom", "testpagina", "tex", "title", "toelichting", "toonexternefiguren", "toongrid", "tooninstellingen", "toonkader", "toonkleur", "toonkleurgroep", "toonkorps", "toonkorpsomgeving", "toonlayout", "toonopmaak", "toonpalet", "toonprint", "toonstruts", "toonsymboolset", "toonvelden", "totaalaantalpaginas", "txt", "typ", "type", "typebuffer", "typefile", "uit", "uitgerekt", "underbar", "underbars", "usecodering", "usedirectory", "vastespatie", "vastespaties", "veld", "veldstapel", "verbergblokken", "vergelijkkleurgroep", "vergelijkpalet", "verhoognummer", "verlaagnummer", "verplaatsformule", "verplaatsopgrid", "verplaatszijblok", "versie", "vertaal", "verwerkblokken", "verwerkpagina", "vl", "voetafstand", "voethoogte", "voetnoot", "voetnoottekst", "volgprofiel", "volgprofielversie", "volgversie", "volledigepaginanummer", "volledigregister", "voluit", "weekdag", "wilijnd", "wiskunde", "witruimte", "woonplaats", "woordrechts", "zetbreedte", "zethoogte" },
+ ["nl"]={ "CAP", "Cap", "Caps", "Cijfers", "KAP", "Kap", "Kaps", "Letter", "Letters", "MAAND", "Romeins", "WEEKDAG", "WOORD", "WOORDEN", "Woord", "Woorden", "aantalsubpaginas", "about", "achtergrond", "appendix", "arg", "bepaalkopnummer", "bepaallijstkenmerken", "bepaalregisterkenmerken", "betekenis", "binnenmargeafstand", "binnenmargebreedte", "binnenrandafstand", "binnenrandbreedte", "blanko", "blokje", "blokjes", "blokkeerinteractiemenu", "bodemwit", "bookmark", "bovenafstand", "bovenhoogte", "breuk", "buitenmargeafstand", "buitenmargebreedte", "buitenrandafstand", "buitenrandbreedte", "but", "button", "cap", "chapter", "chem", "cijfers", "citaat", "citeer", "clip", "comment", "completecombinedlist", "completelistoffloats", "completelistofsorts", "completelistofsynonyms", "converteernummer", "copieerveld", "corrigeerwitruimte", "coupledregister", "crlf", "datum", "definebodyfontDEF", "definebodyfontREF", "definedfont", "definefontfeature", "definefonthandling", "definerawfont", "definetypeface", "definieer", "definieeraccent", "definieeralineas", "definieerbeeldmerk", "definieerblanko", "definieerblok", "definieerbuffer", "definieercombinatie", "definieercommando", "definieerconversie", "definieerfiguursymbool", "definieerfont", "definieerfontstijl", "definieerfontsynoniem", "definieerhbox", "definieerhoofdveld", "definieeringesprongentext", "definieerinmarge", "definieerinteractiemenu", "definieeritemgroep", "definieerkadertekst", "definieerkarakter", "definieerkleur", "definieerkleurgroep", "definieerkolomgroep", "definieerkolomovergang", "definieerkop", "definieerkorps", "definieerkorpsomgeving", "definieerlayer", "definieerlayout", "definieerletter", "definieerlijn", "definieerlijst", "definieermarkering", "definieeromlijnd", "definieeropmaak", "definieeroverlay", "definieerpaginaovergang", "definieerpalet", "definieerpapierformaat", "definieerplaats", "definieerplaatsblok", "definieerprofiel", "definieerprogramma", "definieerreferentie", "definieerreferentieformaat", "definieerreferentielijst", "definieerregister", "definieersamengesteldelijst", "definieersectie", "definieersectieblok", "definieersorteren", "definieerstartstop", "definieersubveld", "definieersymbool", "definieersynoniemen", "definieertabelvorm", "definieertabulatie", "definieertekst", "definieertekstpositie", "definieertekstvariabele", "definieertype", "definieertypen", "definieeruitvoer", "definieerveld", "definieerveldstapel", "definieerversie", "definieerwiskundeuitlijnen", "description", "dimensie", "directnaarlijst", "directtussenlijst", "doordefinieren", "doorlabelen", "doornummeren", "dunnelijn", "dunnelijnen", "eenregel", "enumeration", "ergens", "externfiguur", "forceerblokken", "formulenummer", "framedtext", "gebruikJSscripts", "gebruikURL", "gebruikXMLfilter", "gebruikblokken", "gebruikcommandos", "gebruikexterndocument", "gebruikexternefile", "gebruikexternefiles", "gebruikexternfiguur", "gebruikexterngeluidsfragment", "gebruikgebied", "gebruikmodule", "gebruikmodules", "gebruikreferenties", "gebruikspecials", "gebruiksymbolen", "gebruiktypescript", "gebruiktypescriptfile", "gebruikurl", "geenblokkenmeer", "geenbovenenonderregels", "geendimensie", "geenfilesmeer", "geenhoofdenvoetregels", "geenlijst", "geenmarkering", "geenspatie", "geentest", "geenwitruimte", "geg", "grijskleur", "grijswaarde", "haalbuffer", "haalmarkering", "haalnummer", "haarlijn", "handhaafblokken", "herhaal", "hl", "hoofdafstand", "hoofdhoogte", "hoofdtaal", "hoog", "huidigedatum", "huidigekopnummer", "in", "inanderemarge", "inbinnen", "inbuiten", "indentation", "inlijnd", "inlinker", "inlinkermarge", "inlinkerrand", "inmarge", "inrechter", "inrechtermarge", "inrechterrand", "inregel", "inspringen", "installeertaal", "instellingen", "interactiebalk", "interactiebuttons", "interactiemenu", "invullijnen", "invulregel", "invultekst", "invulveld", "inwilijnd", "items", "its", "kantlijn", "kap", "kenmerk", "kenmerkdatum", "kentekstvariabeletoe", "kleur", "kleurenbalk", "kleurwaarde", "kloonveld", "kolom", "kop", "kopniveau", "kopnummer", "koppeldocument", "koppelmarkering", "koppelpagina", "koppelpapier", "koppelregister", "kopsym", "koptekst", "kopwit", "laag", "label", "labeling", "labels", "labeltekst", "laho", "leg", "legeregels", "letter", "letters", "lijndikte", "lijstbreedte", "lijsthoogte", "lijstlengte", "lijstsymbool", "linkermargeafstand", "linkermargebreedte", "linkerrandafstand", "linkerrandbreedte", "loadsorts", "loadsynonyms", "maand", "mapfontsize", "mar", "margeafstand", "margebreedte", "margetekst", "margetitel", "margewoord", "markeer", "markeerversie", "mediaeval", "menubutton", "naam", "naar", "naarbox", "naarpagina", "name", "navigerend", "nextsection", "nietinspringen", "nocap", "nokap", "noot", "nop", "omgeving", "omlaag", "omlijnd", "onbekend", "onderafstand", "onderdeel", "onderhoogte", "ontkoppelmarkering", "op", "opelkaar", "oplinkermarge", "oppagina", "oprechtermarge", "overbar", "overbars", "overstrike", "overstrikes", "pagina", "paginadiepte", "paginanummer", "paginaoffset", "paginareferentie", "papierbreedte", "papierhoogte", "paragraph", "part", "paslayoutaan", "passeerblokken", "passendveld", "plaatsbeeldmerken", "plaatsbookmarks", "plaatsformule", "plaatskopnummer", "plaatskoptekst", "plaatslegenda", "plaatslijn", "plaatslijst", "plaatslokalevoetnoten", "plaatsnaastelkaar", "plaatsonderelkaar", "plaatsopgrid", "plaatspaginanummer", "plaatsplaatsblok", "plaatsreferentielijst", "plaatsregister", "plaatsruwelijst", "plaatssamengesteldelijst", "plaatssubformule", "plaatstekstvariabele", "plaatsvoetnoten", "placelistoffloats", "placelistofsorts", "placelistofsynonyms", "positioneer", "positioneertekst", "printpapierbreedte", "printpapierhoogte", "produkt", "programma", "projekt", "publicatie", "punten", "ran", "randafstand", "randbreedte", "rechtermargeafstand", "rechtermargebreedte", "rechterrandafstand", "rechterrandbreedte", "ref", "refereer", "referentie", "regellinks", "regelmidden", "regelrechts", "register", "registreervelden", "reservefloat", "reset", "resetmarkering", "resetnummer", "resettekstinhoud", "resettextcontent", "romeins", "rooster", "roteer", "rugwit", "schaal", "scherm", "schrijfnaarlijst", "schrijfnaarreferentielijst", "schrijfnaarregister", "schrijftussenlijst", "section", "seeregister", "selecteerblokken", "selecteerpapier", "selecteerversie", "setnummer", "setupfonthandling", "setupfontsynonym", "setupinterlinespace2", "setuplistalternative", "snijwit", "som", "sort", "spatie", "spiegel", "splitsplaatsblok", "startachtergrond", "startalignment", "startbuffer", "startcitaat", "startcodering", "startcolumns", "startcombinatie", "startcombination", "startcomment", "startdescription", "startdocument", "startenumeration", "startfigure", "startfiguur", "startfloattext", "startformula", "startframedtext", "startgeg", "startglobaal", "starthiding", "startinteractiemenu", "startitemgroup", "startkantlijn", "startkleur", "startkolomgroep", "startkolommen", "startkolomopmaak", "startlegend", "startline", "startlinecorrection", "startlinenumbering", "startlines", "startlocal", "startlocalenvironment", "startlocalfootnotes", "startlokaal", "startlokalevoetnoten", "startmakeup", "startmargeblok", "startmarginblock", "startnaast", "startnamemakeup", "startnarrower", "startomgeving", "startonderdeel", "startopelkaar", "startopmaak", "startopposite", "startoverlay", "startoverview", "startoverzicht", "startparagraph", "startpositioneren", "startpositioning", "startpostponing", "startprodukt", "startprofiel", "startprofile", "startprojekt", "startraster", "startregel", "startregelcorrectie", "startregelnummeren", "startregels", "startregister", "startsmaller", "startsymbolset", "startsymboolset", "startsynchronisatie", "startsynchronization", "starttabel", "starttabellen", "starttable", "starttables", "starttabulate", "starttekst", "starttekstlijn", "starttyping", "startuitlijnen", "startunpacked", "startvanelkaar", "startversie", "stelachtergrondenin", "stelachtergrondin", "stelalineasin", "stelantwoordgebiedin", "stelarrangerenin", "stelblankoin", "stelblokin", "stelblokjesin", "stelblokkopjein", "stelblokkopjesin", "stelbovenin", "stelboventekstenin", "stelbufferin", "stelbuttonsin", "stelciterenin", "stelclipin", "stelcombinatiesin", "stelcommentaarin", "steldoordefinierenin", "steldoornummerenin", "steldunnelijnenin", "stelexternefigurenin", "stelformulesin", "stelformulierenin", "stelhoofdin", "stelhoofdtekstenin", "stelingesprongentextin", "stelinmargein", "stelinspringenin", "stelinteractiebalkin", "stelinteractiein", "stelinteractiemenuin", "stelinteractieschermin", "stelinterliniein", "stelinvullijnenin", "stelinvulregelsin", "stelitemgroepin", "stelitemsin", "stelkadertekstenin", "stelkantlijnin", "stelkapitalenin", "stelkleurenin", "stelkleurin", "stelkolomgroepin", "stelkolomgroepregelsin", "stelkolomgroepstartin", "stelkolommenin", "stelkopin", "stelkopnummerin", "stelkoppeltekenin", "stelkoppenin", "stelkoptekstin", "stelkorpsin", "stelkorpsomgevingin", "stellabeltekstin", "stellayoutin", "stellegendain", "stellijndiktein", "stellijnin", "stellijstin", "stelmargeblokkenin", "stelmarkeringin", "stelnaastplaatsenin", "stelnummerenin", "stelnummerin", "stelomlijndin", "stelonderin", "stelonderstrepenin", "stelondertekstenin", "stelopmaakin", "stelopsommingenin", "stelpaginacommentaarin", "stelpaginanummerin", "stelpaginanummeringin", "stelpaginaovergangenin", "stelpaletin", "stelpapierformaatin", "stelpapierin", "stelparagraafnummerenin", "stelplaatsblokin", "stelplaatsblokkenin", "stelplaatsbloksplitsenin", "stelplaatsin", "stelpositionerenin", "stelprofielenin", "stelprogrammasin", "stelpublicatiesin", "stelrastersin", "stelreferentielijstin", "stelrefererenin", "stelregelnummerenin", "stelregelsin", "stelregisterin", "stelroterenin", "stelsamengesteldelijstin", "stelsectieblokin", "stelsectiein", "stelsmallerin", "stelsorterenin", "stelspatieringin", "stelstartstopin", "stelstrutin", "stelsubpaginanummerin", "stelsymboolsetin", "stelsynchronisatiebalkin", "stelsynchronisatiein", "stelsynoniemenin", "stelsysteemin", "steltaalin", "steltabellenin", "steltabin", "steltabulatiein", "steltekstin", "steltekstinhoudin", "steltekstlijnenin", "steltekstpositiein", "stelteksttekstenin", "steltekstvariabelein", "steltolerantiein", "steltypein", "steltypenin", "steluitlijnenin", "steluitvoerin", "stelurlin", "stelveldenin", "stelveldin", "stelversiesin", "stelvoetin", "stelvoetnootdefinitiein", "stelvoetnotenin", "stelvoettekstenin", "stelwiskundeuitlijnenin", "stelwitruimtein", "stopachtergrond", "stopalignment", "stopbuffer", "stopcitaat", "stopcodering", "stopcolumns", "stopcombinatie", "stopcombination", "stopcomment", "stopdescription", "stopdocument", "stopenumeration", "stopfigure", "stopfloattext", "stopformula", "stopframedtext", "stopgeg", "stopglobaal", "stophiding", "stopinteractiemenu", "stopitemgroup", "stopkantlijn", "stopkleur", "stopkolomgroep", "stopkolommen", "stopkolomopmaak", "stoplegend", "stopline", "stoplinecorrection", "stoplinenumbering", "stoplines", "stoplocal", "stoplocalenvironment", "stoplocalfootnotes", "stoplokaal", "stoplokalevoetnoten", "stopmakeup", "stopmargeblok", "stopmarginblock", "stopnaast", "stopnamemakeup", "stopnarrower", "stopomgeving", "stoponderdeel", "stopopelkaar", "stopopmaak", "stopopposite", "stopoverlay", "stopoverview", "stopoverzicht", "stopparagraph", "stoppositioneren", "stoppositioning", "stoppostponing", "stopprodukt", "stopprofiel", "stopprofile", "stopprojekt", "stopraster", "stopregel", "stopregelcorrectie", "stopregelnummeren", "stopregels", "stopsmaller", "stopsymbolset", "stopsynchronisatie", "stopsynchronization", "stoptabel", "stoptabellen", "stoptable", "stoptables", "stoptabulate", "stoptekst", "stoptekstlijn", "stoptyping", "stopuitlijnen", "stopunpacked", "stopvanelkaar", "stopversie", "sub", "subformulenummer", "subject", "subpaginanummer", "subsection", "subsubject", "subsubsection", "subsubsubject", "suggestie", "switchnaarkorps", "switchtorawfont", "sym", "symbool", "symoffset", "synchronisatiebalk", "synchroniseer", "synonym", "taal", "tab", "tekstbreedte", "teksthoogte", "tekstlijn", "tekstreferentie", "tekstvariabele", "testkolom", "testpagina", "tex", "title", "toelichting", "toonexternefiguren", "toongrid", "tooninstellingen", "toonkader", "toonkleur", "toonkleurgroep", "toonkorps", "toonkorpsomgeving", "toonlayout", "toonopmaak", "toonpalet", "toonprint", "toonstruts", "toonsymboolset", "toonvelden", "totaalaantalpaginas", "txt", "typ", "type", "typebuffer", "typefile", "uit", "uitgerekt", "underbar", "underbars", "usecodering", "usedirectory", "vastespatie", "vastespaties", "veld", "veldstapel", "verbergblokken", "vergelijkkleurgroep", "vergelijkpalet", "verhoognummer", "verlaagnummer", "verplaatsformule", "verplaatsopgrid", "verplaatszijblok", "versie", "vertaal", "verwerkblokken", "verwerkpagina", "vl", "voetafstand", "voethoogte", "voetnoot", "voetnoottekst", "volgprofiel", "volgprofielversie", "volgversie", "volledigepaginanummer", "volledigregister", "voluit", "weekdag", "wilijnd", "wiskunde", "witruimte", "woonplaats", "woordrechts", "zetbreedte", "zethoogte" },
["pe"]={ "CAP", "Cap", "Caps", "Character", "Characters", "MONTH", "Numbers", "Romannumerals", "WEEKDAY", "WORD", "WORDS", "Word", "Words", "appendix", "cap", "chapter", "chem", "comment", "completecombinedlist", "completelistoffloats", "completelistofsorts", "completelistofsynonyms", "coupledregister", "crlf", "definebodyfontDEF", "definebodyfontREF", "definedfont", "definefontfeature", "definefonthandling", "defineindentedtext", "definetypeface", "description", "enumeration", "framedtext", "indentation", "inmframed", "its", "labeling", "loadsorts", "loadsynonyms", "mapfontsize", "mediaeval", "mframed", "name", "nextsection", "nocap", "overbar", "overbars", "overstrike", "overstrikes", "paragraph", "part", "placelistoffloats", "placelistofsorts", "placelistofsynonyms", "ran", "register", "reservefloat", "resettextcontent", "section", "seeregister", "setupanswerarea", "setupcapitals", "setupfonthandling", "setupfontsynonym", "setupindentedtext", "setupinterlinespace2", "setuplistalternative", "setupurl", "sort", "startalignment", "startbuffer", "startcolumns", "startcombination", "startcomment", "startdescription", "startdocument", "startenumeration", "startfigure", "startfloattext", "startformula", "startframedtext", "starthiding", "startitemgroup", "startlegend", "startline", "startlinecorrection", "startlinenumbering", "startlines", "startlocal", "startlocalenvironment", "startlocalfootnotes", "startmakeup", "startmarginblock", "startnamemakeup", "startnarrower", "startopposite", "startoverlay", "startoverview", "startparagraph", "startpositioning", "startpostponing", "startprofile", "startraster", "startregister", "startsymbolset", "startsynchronization", "starttable", "starttables", "starttabulate", "starttyping", "startunpacked", "startتولید", "startحقیقت", "startخط‌حاشیه", "startخط‌متن", "startرنگ", "startفشرده", "startمحیط", "startمنوی‌پانل", "startمولفه", "startنسخه", "startنقل‌قول", "startپروژه", "startپس‌زمینه", "stopalignment", "stopbuffer", "stopcolumns", "stopcombination", "stopcomment", "stopdescription", "stopdocument", "stopenumeration", "stopfigure", "stopfloattext", "stopformula", "stopframedtext", "stophiding", "stopitemgroup", "stoplegend", "stopline", "stoplinecorrection", "stoplinenumbering", "stoplines", "stoplocal", "stoplocalenvironment", "stoplocalfootnotes", "stopmakeup", "stopmarginblock", "stopnamemakeup", "stopnarrower", "stopopposite", "stopoverlay", "stopoverview", "stopparagraph", "stoppositioning", "stoppostponing", "stopprofile", "stopraster", "stopsymbolset", "stopsynchronization", "stoptable", "stoptables", "stoptabulate", "stoptyping", "stopunpacked", "stopتولید", "stopحقیقت", "stopخط‌حاشیه", "stopخط‌متن", "stopرنگ", "stopفشرده", "stopمحیط", "stopمنوی‌پانل", "stopمولفه", "stopنسخه", "stopنقل‌قول", "stopپروژه", "stopپس‌زمینه", "sub", "subject", "subsection", "subsubject", "subsubsection", "subsubsubject", "synonym", "title", "tooltip", "txt", "typ", "underbar", "underbars", "useJSscripts", "useURL", "useXMLfilter", "usedirectory", "useurl", "آفست‌صفحه", "آیتم", "آیتمها", "آینه", "اجباربلوکها", "ارتفاع‌آرایش", "ارتفاع‌بالا", "ارتفاع‌برگ", "ارتفاع‌ته‌برگ", "ارتفاع‌خط", "ارتفاع‌سربرگ", "ارتفاع‌متن", "ارتفاع‌پایین", "از", "ازکارانداختن‌منوی‌پانل", "استفاده‌بلوکها", "استفاده‌دستخط‌تایپ", "استفاده‌رمزینه", "استفاده‌شکل‌خارجی", "استفاده‌فرمانها", "استفاده‌قطعه‌موزیک‌خارجی", "استفاده‌مدول", "استفاده‌مدولها", "استفاده‌مرجعها", "استفاده‌مسیر", "استفاده‌نمادها", "استفاده‌نوشتارخارجی", "استفاده‌ویژگیها", "استفاده‌پرونده‌خارجی", "استفاده‌پرونده‌دستخط‌تایپ", "استفاده‌پرونده‌های‌خارجی", "اعدادلاتین", "افزودن", "اما", "امتحان‌نکن", "انتخاب‌برگ", "انتخاب‌بلوکها", "انتخاب‌نسخه", "انتقال‌به‌توری", "انتقال‌فرمول", "انتقال‌کنار‌شناور", "انجام‌دوباره", "بارگذاریها", "بارگذاری‌آرایش", "بارگذاری‌آیتمها", "بارگذاری‌ارجاع", "بارگذاری‌اندازه‌برگ", "بارگذاری‌باریکتر", "بارگذاری‌بافر", "بارگذاری‌بالا", "بارگذاری‌بخش", "بارگذاری‌بردباری", "بارگذاری‌برنامه‌ها", "بارگذاری‌برگ", "بارگذاری‌بست", "بارگذاری‌بلوک", "بارگذاری‌بلوکهای‌حاشیه", "بارگذاری‌بلوک‌بخش", "بارگذاری‌تایپ", "بارگذاری‌تایپ‌کردن", "بارگذاری‌تب", "بارگذاری‌ترتیب", "بارگذاری‌ترکیب‌ها", "بارگذاری‌تطابق", "بارگذاری‌تعریف‌پانوشت", "بارگذاری‌تنظیم", "بارگذاری‌تنظیم‌ریاضی", "بارگذاری‌ته‌برگ", "بارگذاری‌تورفتگی", "بارگذاری‌توضیح", "بارگذاری‌توضیح‌صفحه", "بارگذاری‌ثبت", "بارگذاری‌جانشانی", "بارگذاری‌جدولها", "بارگذاری‌جدول‌بندی", "بارگذاری‌خالی", "بارگذاری‌خروجی", "بارگذاری‌خط", "بارگذاری‌خطها", "بارگذاری‌خطهای‌حاشیه", "بارگذاری‌خطهای‌سیاه", "بارگذاری‌خطهای‌متن", "بارگذاری‌خطهای‌مجموعه‌ستون", "بارگذاری‌خطها‌ی‌نازک", "بارگذاری‌درج‌درخطها", "بارگذاری‌درج‌مخالف", "بارگذاری‌درون‌حاشیه", "بارگذاری‌دوران", "بارگذاری‌دکمه‌ها", "بارگذاری‌راهنما", "بارگذاری‌رنگ", "بارگذاری‌رنگها", "بارگذاری‌زبان", "بارگذاری‌ستونها", "بارگذاری‌سر", "بارگذاری‌سربرگ", "بارگذاری‌سرها", "بارگذاری‌سیستم", "بارگذاری‌شرح", "بارگذاری‌شرحها", "بارگذاری‌شروع‌مجموعه‌ستون", "بارگذاری‌شروع‌پایان", "بارگذاری‌شماره", "بارگذاری‌شماره‌زیرصفحه", "بارگذاری‌شماره‌سر", "بارگذاری‌شماره‌صفحه", "بارگذاری‌شماره‌گذاری", "بارگذاری‌شماره‌گذاریها", "بارگذاری‌شماره‌گذاری‌صفحه", "بارگذاری‌شماره‌گذاری‌پاراگراف", "بارگذاری‌شماره‌‌گذاری‌خط", "بارگذاری‌شناور", "بارگذاری‌شناورها", "بارگذاری‌شکافتن‌شناورها", "بارگذاری‌شکلهای‌خارجی", "بارگذاری‌طرح", "بارگذاری‌طرح‌بندی", "بارگذاری‌عرض‌خط", "بارگذاری‌فاصله‌بین‌خط", "بارگذاری‌فرمولها", "بارگذاری‌فضای‌سفید", "بارگذاری‌فضا‌گذاری", "بارگذاری‌قالبی", "بارگذاری‌قلم‌متن", "بارگذاری‌لوح", "بارگذاری‌لیست", "بارگذاری‌لیست‌ترکیبی", "بارگذاری‌لیست‌مرجع", "بارگذاری‌مترادفها", "بارگذاری‌متغیر‌متن", "بارگذاری‌متن", "بارگذاری‌متنهای‌بالا", "بارگذاری‌متن‌سر", "بارگذاری‌متن‌سربرگ", "بارگذاری‌متن‌قالبی", "بارگذاری‌متن‌متنها", "بارگذاری‌متن‌پانوشت", "بارگذاری‌متن‌پایین", "بارگذاری‌مجموعه‌ستون", "بارگذاری‌مجموعه‌نماد", "بارگذاری‌محیط‌قلم‌متن", "بارگذاری‌منوی‌پانل", "بارگذاری‌مکان‌متن", "بارگذاری‌مکان‌گذاری", "بارگذاری‌میدان", "بارگذاری‌میدانها", "بارگذاری‌میله‌تطابق", "بارگذاری‌میله‌زیر", "بارگذاری‌میله‌پانل", "بارگذاری‌نسخه‌ها", "بارگذاری‌نشانه‌شکستن", "بارگذاری‌نشانه‌گذاری", "بارگذاری‌نشرها", "بارگذاری‌نقل", "بارگذاری‌پاراگرافها", "بارگذاری‌پانل", "بارگذاری‌پانوشتها", "بارگذاری‌پایین", "بارگذاری‌پرده‌ها", "بارگذاری‌پرده‌پانل", "بارگذاری‌پروفایلها", "بارگذاری‌پرکردن‌خطها", "بارگذاری‌پس‌زمینه", "بارگذاری‌پس‌زمینه‌ها", "بارگذاری‌چیدن", "بارگذاری‌گذارصفحه", "بارگذاری‌گروههای‌آیتم", "بارگذاری‌گروه‌آیتم", "بازنشانی", "بازنشانی‌شماره", "بازنشانی‌متن", "بازنشانی‌نشانه‌گذاری", "باگذاری‌متن‌برچسب", "بدون‌بعد", "بدون‌بلوکهای‌بیشتر", "بدون‌تورفتگی", "بدون‌خط‌بالاوپایین", "بدون‌خط‌سروته‌برگ", "بدون‌فایلهای‌بیشتر", "بدون‌فضا", "بدون‌فضای‌سفید", "بدون‌لیست", "بدون‌نشانه‌گذاری", "برنامه", "بروبه", "بروبه‌جعبه", "بروبه‌صفحه", "بروپایین", "برچسب", "برچسبها", "بعد", "بلند", "بلوکهای‌پردازش", "بلوکها‌پنهان", "بنویس‌بین‌لیست", "بنویس‌درثبت", "بنویس‌درلیست‌مرجع", "بنویس‌در‌لیست", "تاریخ", "تاریخ‌جاری", "تاریخ‌رجوع", "تایپ", "تایپ‌بافر", "تایپ‌پرونده", "تب", "ترجمه", "تطابق", "تعریف", "تعریف‌آرایش", "تعریف‌آرم", "تعریف‌الگوی‌جدول", "تعریف‌اندازه‌برگ", "تعریف‌بافر", "تعریف‌بخش", "تعریف‌برنامه", "تعریف‌برچسب", "تعریف‌بلوک", "تعریف‌بلوک‌بخش", "تعریف‌تایپ", "تعریف‌تایپ‌کردن", "تعریف‌تبدیل", "تعریف‌ترتیب", "تعریف‌ترکیب", "تعریف‌تنظیم‌ریاضی", "تعریف‌توده‌میدان", "تعریف‌ثبت", "تعریف‌جانشانی", "تعریف‌جدول‌بندی", "تعریف‌جعبه‌‌افقی", "تعریف‌حرف", "تعریف‌خالی", "تعریف‌خروجی", "تعریف‌خط‌حائل", "تعریف‌درون‌حاشیه", "تعریف‌رنگ", "تعریف‌زیرمیدان", "تعریف‌سبک", "تعریف‌سبک‌قلم", "تعریف‌سر", "تعریف‌شرح", "تعریف‌شروع‌پایان", "تعریف‌شماره‌بندی", "تعریف‌شمایل‌مرجع", "تعریف‌شناور", "تعریف‌شکستن‌ستون", "تعریف‌شکست‌صفحه", "تعریف‌طرح‌بندی", "تعریف‌فرمان", "تعریف‌قالبی", "تعریف‌قلم", "تعریف‌قلم‌خام", "تعریف‌قلم‌متن", "تعریف‌لایه", "تعریف‌لهجه", "تعریف‌لوح", "تعریف‌لیست", "تعریف‌لیست‌ترکیبی", "تعریف‌لیست‌مرجع", "تعریف‌مترادفها", "تعریف‌مترادف‌قلم", "تعریف‌متغیرمتن", "تعریف‌متن", "تعریف‌متن‌قالبی", "تعریف‌مجموعه‌ستون", "تعریف‌محیط‌قلم‌بدنه", "تعریف‌مرجع", "تعریف‌منوی‌پانل", "تعریف‌مکان‌متن", "تعریف‌میدان", "تعریف‌میدان‌اصلی", "تعریف‌نسخه", "تعریف‌نشانه‌گذاری", "تعریف‌نماد", "تعریف‌نمادشکل", "تعریف‌پاراگرافها", "تعریف‌پروفایل", "تعریف‌پوشش", "تعریف‌گروه‌آیتم", "تعریف‌گروه‌رنگ", "تعیین‌شماره", "تعیین‌شماره‌سر", "تعیین‌متغیر‌متن", "تعیین‌محتوای‌متن", "تعیین‌مشخصات‌ثبت", "تعیین‌مشخصات‌لیست", "تغییربه‌قلم‌بدنه", "تغییربه‌قلم‌خام", "تنظیم‌راست", "تنظیم‌طرح‌بندی", "تنظیم‌وسط", "توجه", "تورفتگی", "توری", "تولید", "تک", "ثبت‌زوج", "ثبت‌کامل", "جداسازی‌نشانه‌گذاری", "حاش", "حرف", "حرفها", "حفظ‌بلوکها", "حقیقت", "خالی", "خطهای‌سیاه", "خطهای‌نازک", "خطها‌خالی", "خط‌حاشیه", "خط‌سیاه", "خط‌متن", "خط‌مو", "خط‌نازک", "خ‌ا", "خ‌ع", "در", "درج‌آرمها", "درج‌ثبت", "درج‌خط", "درج‌درخط", "درج‌درخطها", "درج‌درمتن", "درج‌درمیدان", "درج‌در‌بالای‌یکدیگر", "درج‌در‌توری", "درج‌راهنما", "درج‌زیرفرمول", "درج‌شماره‌سر", "درج‌شماره‌صفحه", "درج‌شناور", "درج‌فرمول", "درج‌لیست", "درج‌لیست‌خام", "درج‌لیست‌مختلط", "درج‌لیست‌مرجع", "درج‌متغیرمتن", "درج‌متن‌سر", "درج‌پانوشتها", "درج‌پانوشتهای‌موضعی", "درج‌چوب‌خط", "درج‌کنار‌به‌کنار", "درحاشیه", "درحاشیه‌دیگر", "درحاشیه‌راست", "درحاشیه‌چپ", "درخارجی", "درخط", "درداخلی", "درراست", "درصفحه", "درقالبی", "درلبه‌راست", "درلبه‌چپ", "درمورد", "درون", "درپر", "درچپ", "دریافت‌بافر", "دریافت‌شماره", "دریافت‌نشانه", "دوران", "دکمه", "دکمه‌منو", "دکمه‌پانل", "رج", "رجوع", "رنگ", "رنگ‌خاکستری", "روزهفته", "ریاضی", "زبان", "زبان‌اصلی", "ستون", "ستون‌امتحان", "سر", "سرپوش‌کوچک‌نه", "شروع‌آرایش", "شروع‌آرایش‌ستون", "شروع‌باریکتر", "شروع‌بازبینی", "شروع‌بلوک‌حاشیه", "شروع‌ترکیب", "شروع‌تصحیح‌خط", "شروع‌تطابق", "شروع‌تنظیم", "شروع‌تولید", "شروع‌جدول", "شروع‌جدولها", "شروع‌خط", "شروع‌خطها", "شروع‌خط‌حاشیه", "شروع‌خط‌متن", "شروع‌رنگ", "شروع‌ستونها", "شروع‌سراسری", "شروع‌شماره‌گذاری‌خط", "شروع‌شکل", "شروع‌غیر‌فشرده", "شروع‌فشرده", "شروع‌متن", "شروع‌مجموعه‌ستون", "شروع‌مجموعه‌نماد", "شروع‌محیط", "شروع‌مخالف", "شروع‌موضعی", "شروع‌مولفه", "شروع‌مکان‌گذاری", "شروع‌نسخه", "شروع‌نقل‌قول", "شروع‌نوشتار", "شروع‌پانوشتهای‌موضعی", "شروع‌پروفایل", "شروع‌پروژه", "شروع‌پس‌زمینه", "شروع‌پوشش", "شروع‌کد", "شماره‌افزایش", "شماره‌زیرصفحه", "شماره‌زیرفرمول", "شماره‌سر", "شماره‌سرجاری", "شماره‌صفحه", "شماره‌صفحه‌کامل", "شماره‌فرمول", "شماره‌مبدل", "شماره‌ها", "شماره‌کاهش", "شماره‌کل‌صفحه‌ها", "شکافتن‌شناور", "شکل‌خارجی", "صفحه", "صفحه‌تست", "صفحه‌زوج", "صفحه‌پردازش", "طول‌لیست", "عبوربلوکها", "عرض‌آرایش", "عرض‌برگ", "عرض‌حاشیه", "عرض‌حاشیه‌خارجی", "عرض‌حاشیه‌داخلی", "عرض‌حاشیه‌راست", "عرض‌حاشیه‌چپ", "عرض‌خط", "عرض‌لبه", "عرض‌لبه‌خارجی", "عرض‌لبه‌داخلی", "عرض‌لبه‌راست", "عرض‌لبه‌چپ", "عرض‌لیست", "عرض‌متن", "عمق‌صفحه", "عنوان‌حاشیه", "فاصله‌بالا", "فاصله‌ته‌برگ", "فاصله‌حاشیه", "فاصله‌حاشیه‌خارجی", "فاصله‌حاشیه‌داخلی", "فاصله‌حاشیه‌راست", "فاصله‌حاشیه‌چپ", "فاصله‌سربرگ", "فاصله‌لبه", "فاصله‌لبه‌خارجی", "فاصله‌لبه‌داخلی", "فاصله‌لبه‌راست", "فاصله‌لبه‌چپ", "فاصله‌پایین", "فاصله‌پشت", "فشرده", "فضا", "فضاهای‌ثابت", "فضای‌بالا", "فضای‌برش", "فضای‌ثابت", "فضای‌سفید", "فضای‌سفیدصحیح", "فضای‌پایین", "فوری‌به‌لیست", "فوری‌بین‌لیست", "قالبی", "لوح‌مقایسه", "ماه", "متغیر متن", "متن‌برچسب", "متن‌حاشیه", "متن‌سر", "متن‌پانوشت", "محیط", "مراجعه", "مرجع", "مرجع‌صفحه", "مرجع‌متن", "مرحله‌سر", "مسکن", "معنی‌واحد", "مقایسه‌گروه‌رنگ", "مقدارخاکستری", "مقداررنگ", "مقیاس", "منفی", "منوی‌پانل", "مولفه", "مکان", "مکان‌متن", "میدان", "میدانهای‌گزارش", "میدان‌شبیه‌سازی", "میدان‌پشته", "میدان‌کپی", "میله‌تطابق", "میله‌رنگ", "میله‌پانل", "ناشناس", "نام‌ماکرو", "نسخه", "نسخه‌نشانه", "نشانه‌گذاری", "نشانه‌گذاری‌زوج", "نشر", "نصب‌زبان", "نقطه‌ها", "نقل", "نقل‌قول", "نم", "نماد", "نمادسر", "نمادلیست", "نمایش‌آرایش", "نمایش‌بارگذاریها", "نمایش‌بستها", "نمایش‌توری", "نمایش‌رنگ", "نمایش‌شکلهای‌خارجی", "نمایش‌طرح‌بندی", "نمایش‌قالب", "نمایش‌قلم‌بدنه", "نمایش‌لوح", "نمایش‌مجموعه‌علامت", "نمایش‌محیط‌قلم‌بدنه", "نمایش‌میدانها", "نمایش‌چاپ", "نمایش‌گروه‌رنگ", "نوشتارزوج", "هدایت", "پا", "پابا", "پانوشت", "پایان‌آرایش", "پایان‌آرایش‌ستون", "پایان‌بازبینی", "پایان‌بلوک‌حاشیه", "پایان‌ترکیب", "پایان‌تصحیح‌خط", "پایان‌تطابق", "پایان‌تنظیم", "پایان‌تولید", "پایان‌جدول", "پایان‌جدولها", "پایان‌خط", "پایان‌خطها", "پایان‌خط‌حاشیه", "پایان‌خط‌متن", "پایان‌رنگ", "پایان‌ستونها", "پایان‌سراسری", "پایان‌شماره‌گذاری‌خط", "پایان‌غیرفشرده", "پایان‌فشرده", "پایان‌متن", "پایان‌مجموعه‌ستون", "پایان‌محیط", "پایان‌مخالف", "پایان‌موضعی", "پایان‌مولفه", "پایان‌مکان‌گذاری", "پایان‌نازکتر", "پایان‌نسخه", "پایان‌نقل‌قول", "پایان‌نوشتار", "پایان‌پانوشتهای‌موضعی", "پایان‌پروفایل", "پایان‌پروژه", "پایان‌پس‌زمینه", "پایان‌پوشش", "پایان‌کد", "پایین", "پرده", "پروژه", "پرکردن‌میدان", "پس‌زمینه", "پیروی‌نسخه", "پیروی‌نسخه‌پروفایل", "پیروی‌پروفایل", "چاپ‌ارتفاع‌برگ", "چاپ‌عرض‌برگ", "چوبخط", "چپ‌چین", "کاغذزوج", "کسر", "کشیده", "کلمه‌حاشیه", "کلمه‌راست", "گیره", "یادداشت", "یک‌جا", "یک‌خط" },
["ro"]={ "CAP", "CUVANT", "CUVINTE", "Cap", "Caps", "Cuvant", "Cuvinte", "KAP", "Kap", "Kaps", "LUNA", "Litera", "Litere", "Numere", "Numereromane", "ZIDINSAPTAMANA", "adapteazaaspect", "adubuffer", "adumarcaje", "afiseazaaspect", "afiseazacampuri", "afiseazaculoare", "afiseazafiguriexterne", "afiseazafonttext", "afiseazagrid", "afiseazagrupculoare", "afiseazamakeup", "afiseazamediufonttext", "afiseazapaleta", "afiseazarama", "afiseazasetari", "afiseazasetsimboluri", "afiseazastruts", "afiseazatiparire", "aliniat", "aliniatcentru", "aliniatdreapta", "aliniatstanga", "appendix", "arg", "ascundeblocuri", "atleftmargin", "atrightmargin", "baraculoare", "barainteractiune", "barasincronizare", "blanc", "but", "butoaneinteractiune", "buton", "butonmeniu", "camp", "campumplere", "cap", "chapter", "chem", "citat", "clip", "cloneazacamp", "coloana", "comment", "comparagrupculoare", "comparapaleta", "completeazanumarpagina", "completecombinedlist", "completelistoffloats", "completelistofsorts", "completelistofsynonyms", "completeregister", "componenta", "convertestenumar", "copiazacamp", "corecteazaspatiualb", "coupledregister", "crlf", "culoare", "culoaregri", "cupleazadocument", "cupleazamarcaje", "cupleazaregistru", "cutspace", "cuvantdreapta", "cuvantmarginal", "data", "datacurenta", "datareferit", "decrementnumber", "decupleazamarcaje", "definebodyfontDEF", "definebodyfontREF", "definecolumnbreak", "definecolumnset", "definecombination", "definedfont", "definefontfeature", "definefonthandling", "defineindentedtext", "defineinmargin", "defineitemgroup", "definelayer", "definelayout", "definemathalignment", "definepagebreak", "defineplacement", "defineste", "definesteaccent", "definesteantet", "definesteblanc", "definestebloc", "definesteblocsectiune", "definestebuffer", "definestecamp", "definestecampprincipal", "definestecaracter", "definestecomanda", "definesteconversie", "definesteculoare", "definestedescriere", "definestedimensiunehartie", "definesteenumerare", "definesteeticheta", "definestefloat", "definestefont", "definestefontraw", "definestefonttext", "definesteformatreferinte", "definestegrupculori", "definestehbox", "definesteinconjurare", "definestelista", "definestelistacombinata", "definestelistareferinte", "definestelogo", "definestemakeup", "definestemarcaje", "definestemediulfonttext", "definestemeniuinteractiune", "definesteoutput", "definesteoverlay", "definestepaleta", "definesteparagraf", "definestepozitietext", "definesteprofil", "definesteprogram", "definestereferinte", "definesteregistru", "definesterigla", "definestesablontabel", "definestesectiune", "definestesimbol", "definestesimbolfigura", "definestesinonim", "definestesinonimfont", "definestesortare", "definestestartstop", "definestestil", "definestestilfont", "definestestivacampuri", "definestesubcamp", "definestetabulatori", "definestetext", "definestetexteinconjurate", "definestetextinconjurat", "definestetyping", "definestevariabilatext", "definesteversiune", "definetype", "definetypeface", "description", "despre", "determinacaracteristicilelistei", "determinacaracteristiciregistru", "determinanumartitlu", "dezactiveazameniuinteractiune", "dimensiune", "din", "distantaantet", "distantacolt", "distantacoltdreapta", "distantacoltstanga", "distantajos", "distantamargine", "distantamarginedreapta", "distantamarginestanga", "distantasubsol", "distantasus", "domiciliu", "dute", "dutebox", "dutepagina", "ecran", "el", "element", "emptylines", "enumeration", "eticheta", "etichete", "fact", "faraaliniat", "faradimensiune", "farafisiere", "faraliniiantetsisubsol", "faraliniisussijos", "faralista", "faramarcaje", "faraspatiu", "faraspatiualb", "figuraexterna", "firdepar", "folosesteURL", "folosestebloc", "folosestecodificarea", "folosestecomenzi", "folosestedirector", "folosestedocumentextern", "folosestefiguraexterna", "folosestefisiereexterne", "folosestefisierextern", "folosestemodul", "folosestemodule", "folosestemuzicaexterna", "folosestereferinte", "folosestescriptJS", "folosestesimboluri", "folosestespeciale", "folosesteurl", "footnotetext", "forteazablocuri", "fractie", "framed", "framedtext", "fundal", "gatablocuri", "getnumber", "grid", "grosimelinie", "hartiedubla", "headsym", "hl", "immediatebetweenlist", "immediatetolist", "impachetat", "impartefloat", "in", "inalt", "inaltamargine", "inaltimeantet", "inaltimehartie", "inaltimehartieimprimanta", "inaltimejos", "inaltimelista", "inaltimemakeup", "inaltimesubsol", "inaltimesus", "inaltimetext", "indentation", "indreapta", "inframed", "ininner", "injos", "inlinie", "inmaframed", "inmargineadreapta", "inmargineastanga", "inneredgedistance", "inneredgewidth", "innermargindistance", "innermarginwidth", "inouter", "inparteadreapta", "inparteastanga", "instalarelimba", "instanga", "intins", "jos", "jossus", "kap", "la", "labeling", "lapagina", "latimecoltdreapta", "latimecoltstanga", "latimecolturi", "latimehartie", "latimehartieimprimanta", "latimelista", "latimemakeup", "latimemargine", "latimemarginedreapta", "latimemarginestanga", "latimetext", "leg", "limba", "limbaprincipala", "liniemargine", "linieneagra", "liniesubtire", "linieumplere", "liniinegre", "liniisubtiri", "listsymbol", "litera", "litere", "loadsorts", "loadsynonyms", "logcampuri", "luna", "lungimelista", "maframed", "mapfontsize", "mar", "marcaje", "marcheazaversiune", "marginal", "matematica", "mediaeval", "mediu", "meniuinteractiune", "minicitat", "moveformula", "movesidefloat", "mutapegrid", "name", "navigating", "necunoscut", "nextsection", "niveltitlu", "nocap", "nokap", "nop", "nota", "notasubsol", "numarformula", "numarincrement", "numarpagina", "numarsubformula", "numartitlu", "numartitlucurent", "numartotalpagini", "numberofsubpages", "nume", "numere", "numereromane", "numeunitate", "nutesta", "olinie", "outeredgedistance", "outeredgewidth", "outermargindistance", "outermarginwidth", "overbar", "overbars", "overstrike", "overstrikes", "pagedepth", "pageoffset", "pagina", "paginadubla", "paragraph", "part", "pastreazablocuri", "pelung", "placefloat", "placeheadnumber", "placeheadtext", "placelistoffloats", "placelistofsorts", "placelistofsynonyms", "placerawlist", "placereferencelist", "plaseazapegrid", "plaseazasemnecarte", "potrivestecamp", "pozitie", "pozitietext", "proceseazabloc", "proceseazapagina", "produs", "program", "proiect", "publicatie", "puncte", "punedeasuprafiecareia", "punefatainfata", "puneformula", "punelegenda", "punelista", "punelistacombinata", "punelogouri", "punenotesubsol", "punenotesubsollocale", "punenumarpagina", "puneregistru", "punerigla", "punesubformula", "punevariabilatext", "ran", "ref", "refa", "referinta", "referintapagina", "referintatext", "referit", "referring", "reflexie", "register", "remarca", "reservefloat", "reset", "reseteazamarcaje", "resetnumber", "resettextcontent", "riglatext", "rigleumplere", "roteste", "saripesteblocuri", "scala", "scriebuffer", "scrieinlista", "scrieinlistareferinte", "scrieinregistru", "scrieintreliste", "section", "seeregister", "selecteazablocuri", "selecteazahartie", "selecteazaversiune", "semncarte", "setarebarasincronizare", "setareitemization", "setarelimba", "setareoutput", "setarepozitie", "setaresincronizare", "setari", "seteazaaliniat", "seteazaalinierea", "seteazaantet", "seteazaaranjareapag", "seteazaaspect", "seteazabarainteractiune", "seteazablanc", "seteazabloc", "seteazablocsectiune", "seteazablocurimarginale", "seteazabuffer", "seteazabutoane", "seteazacamp", "seteazacampuri", "seteazaclipping", "seteazacoloane", "seteazacombinari", "seteazacomentariu", "seteazacomentariupagina", "seteazaculoare", "seteazaculori", "seteazadefinireanotasubsol", "seteazadescriere", "seteazadimensiunihartie", "seteazaecrane", "seteazaecraninteractiune", "seteazaelemente", "seteazaenumerare", "seteazafiguriexterne", "seteazafloat", "seteazafloats", "seteazafonttext", "seteazaformulare", "seteazaformule", "seteazafundal", "seteazafundaluri", "seteazagrosimelinie", "seteazaimpartireafloat", "seteazainconjurat", "seteazaingust", "seteazainteractiunea", "seteazajos", "seteazalegenda", "seteazalegendele", "seteazaliniesilabe", "seteazaliniesubtire", "seteazalinii", "seteazaliniimargine", "seteazaliniinegre", "seteazaliniiumplere", "seteazalista", "seteazalistacombinata", "seteazalistareferinte", "seteazamajuscule", "seteazamakeup", "seteazamarcaje", "seteazamarginal", "seteazamediulfonttext", "seteazameniuinteractiune", "seteazaminicitat", "seteazanotasubsol", "seteazanumarpagina", "seteazanumarsubpagina", "seteazanumartitlu", "seteazanumerotare", "seteazanumerotarelinii", "seteazanumerotarepagina", "seteazanumerotareparagrafe", "seteazapaleta", "seteazaparagrafe", "seteazaplasareaopozita", "seteazapozitietext", "seteazaprofile", "seteazaprograme", "seteazapublicatii", "seteazareferinte", "seteazaregistru", "seteazarigla", "seteazarigletext", "seteazarigleumplere", "seteazarotare", "seteazasectiune", "seteazasimbol", "seteazasinonime", "seteazasistem", "seteazasortare", "seteazaspatiu", "seteazaspatiualb", "seteazaspatiuinterliniar", "seteazastrut", "seteazasublinie", "seteazasubsol", "seteazasus", "seteazatab", "seteazatabele", "seteazatabulatori", "seteazatext", "seteazatexteantet", "seteazatextejos", "seteazatextesubsol", "seteazatextesus", "seteazatextetext", "seteazatexteticheta", "seteazatexttitlu", "seteazatitlu", "seteazatitluri", "seteazatoleranta", "seteazatranzitiepagina", "seteazatype", "seteazatyping", "seteazaurl", "seteazavariabilatext", "seteazaversiuni", "setnumber", "settextcontent", "setupanswerarea", "setupcolumnset", "setupcolumnsetlines", "setupcolumnsetstart", "setupfonthandling", "setupfontsynonym", "setupindentedtext", "setupinterlinespace2", "setupitemgroup", "setuplistalternative", "setupmathalignment", "setupnumber", "setuppaper", "setupplacement", "setupstartstop", "setvariabilatext", "sim", "simbol", "sincronizeaza", "sort", "spatiifixate", "spatiu", "spatiualb", "spatiufixat", "spatiujos", "spatiuspate", "spatiusus", "startalignment", "startaliniere", "startblocmarginal", "startbuffer", "startcitat", "startcodificare", "startcoloane", "startcolumnmakeup", "startcolumns", "startcolumnset", "startcombinare", "startcombination", "startcomment", "startcomponenta", "startcorectielinie", "startculoare", "startdescription", "startdocument", "startenumeration", "startfact", "startfigura", "startfigure", "startfloattext", "startformula", "startframedtext", "startfundal", "startglobal", "starthiding", "startimpachetat", "startingust", "startitemgroup", "startlegend", "startline", "startlinecorrection", "startlinenumbering", "startlines", "startlinie", "startliniemargine", "startlinii", "startlocal", "startlocalenvironment", "startlocalfootnotes", "startmakeup", "startmarginblock", "startmediu", "startmeniuinteractiune", "startnamemakeup", "startnarrower", "startneimpachetat", "startnotesubsollocale", "startnumerotarelinii", "startopozit", "startopposite", "startoverlay", "startoverview", "startparagraph", "startpositioning", "startpostponing", "startpozitionare", "startprodus", "startprofil", "startprofile", "startproiect", "startraster", "startregister", "startriglatext", "startsetsimboluri", "startsincronizare", "startsymbolset", "startsynchronization", "starttabel", "starttabele", "starttable", "starttables", "starttabulate", "starttext", "starttyping", "startunpacked", "startversiune", "stivacampuri", "stopalignment", "stopaliniere", "stopblobal", "stopblocmarginal", "stopbuffer", "stopcitat", "stopcodificare", "stopcoloane", "stopcolumnmakeup", "stopcolumns", "stopcolumnset", "stopcombinare", "stopcombination", "stopcomment", "stopcomponenta", "stopcorectielinie", "stopculoare", "stopdescription", "stopdocument", "stopenumeration", "stopfact", "stopfigure", "stopfloattext", "stopformula", "stopframedtext", "stopfundal", "stophiding", "stopimpachetat", "stopingust", "stopitemgroup", "stoplegend", "stopline", "stoplinecorrection", "stoplinenumbering", "stoplines", "stoplinie", "stopliniemargine", "stoplinii", "stoplocal", "stoplocalenvironment", "stoplocalfootnotes", "stopmakeup", "stopmarginblock", "stopmediu", "stopmeniuinteractiune", "stopnamemakeup", "stopnarrower", "stopneimpachetat", "stopnotesubsollocale", "stopnumerotarelinii", "stopopozit", "stopopposite", "stopoverlay", "stopoverview", "stopparagraph", "stoppositioning", "stoppostponing", "stoppozitionare", "stopprodus", "stopprofil", "stopprofile", "stopproiect", "stopraster", "stopriglatext", "stopsincronizare", "stopsymbolset", "stopsynchronization", "stoptabel", "stoptabele", "stoptable", "stoptables", "stoptabulate", "stoptext", "stoptyping", "stopunpacked", "stopversiune", "sub", "subject", "subpagenumber", "subsection", "subsubject", "subsubsection", "subsubsubject", "synonym", "tab", "testcolumn", "testpage", "tex", "texteticheta", "textmarginal", "texttitlu", "textumplere", "title", "titlu", "titlumarginal", "tooltip", "traduce", "trecilafontraw", "trecilafonttext", "txt", "typ", "type", "typefile", "underbar", "underbars", "undeva", "urmeazaprofil", "urmeazaversiune", "urmeazaversiuneprofil", "useXMLfilter", "usedirectory", "usetypescript", "usetypescriptfile", "valoareculoare", "valoaregri", "variabilatext", "versiune", "vl", "zidinsaptamana" },
} \ No newline at end of file
diff --git a/Master/texmf-dist/context/data/scite/lexers/data/scite-context-data-metafun.lua b/Master/texmf-dist/context/data/scite/lexers/data/scite-context-data-metafun.lua
index 749be7663c1..524a1ca494f 100644
--- a/Master/texmf-dist/context/data/scite/lexers/data/scite-context-data-metafun.lua
+++ b/Master/texmf-dist/context/data/scite/lexers/data/scite-context-data-metafun.lua
@@ -1,4 +1,4 @@
return {
- ["commands"]={ "sqr", "log", "ln", "exp", "inv", "pow", "pi", "radian", "tand", "cotd", "sin", "cos", "tan", "cot", "atan", "asin", "acos", "invsin", "invcos", "acosh", "asinh", "sinh", "cosh", "paired", "tripled", "unitcircle", "fulldiamond", "unitdiamond", "fullsquare", "llcircle", "lrcircle", "urcircle", "ulcircle", "tcircle", "bcircle", "lcircle", "rcircle", "lltriangle", "lrtriangle", "urtriangle", "ultriangle", "smoothed", "cornered", "superellipsed", "randomized", "squeezed", "enlonged", "shortened", "punked", "curved", "unspiked", "simplified", "blownup", "stretched", "enlarged", "leftenlarged", "topenlarged", "rightenlarged", "bottomenlarged", "crossed", "laddered", "randomshifted", "interpolated", "paralleled", "cutends", "peepholed", "llenlarged", "lrenlarged", "urenlarged", "ulenlarged", "llmoved", "lrmoved", "urmoved", "ulmoved", "rightarrow", "leftarrow", "centerarrow", "boundingbox", "innerboundingbox", "outerboundingbox", "pushboundingbox", "popboundingbox", "bottomboundary", "leftboundary", "topboundary", "rightboundary", "xsized", "ysized", "xysized", "sized", "xyscaled", "intersection_point", "intersection_found", "penpoint", "bbwidth", "bbheight", "withshade", "withlinearshading", "withcircularshading", "withfromshadecolor", "withtoshadecolor", "withshading", "shadedinto", "withcircularshade", "withlinearshade", "cmyk", "spotcolor", "multitonecolor", "namedcolor", "drawfill", "undrawfill", "inverted", "uncolored", "softened", "grayed", "greyed", "onlayer", "along", "graphictext", "loadfigure", "externalfigure", "withmask", "figure", "register", "bitmapimage", "colordecimals", "ddecimal", "dddecimal", "ddddecimal", "textext", "thetextext", "rawtextext", "textextoffset", "verbatim", "thelabel", "label", "transparent", "withtransparency", "asgroup", "infont", "set_linear_vector", "linear_shade", "define_linear_shade", "define_circular_linear_shade", "define_sampled_linear_shade", "set_circular_vector", "circular_shade", "define_circular_shade", "define_circular_linear_shade", "define_sampled_circular_shade", "space", "CRLF", "grayscale", "greyscale", "withgray", "withgrey", "colorpart", "readfile", "clearxy", "unitvector", "center", "epsed", "anchored", "originpath", "infinite", "break", "xstretched", "ystretched", "snapped", "pathconnectors", "function", "constructedpath", "constructedpairs", "punkedfunction", "curvedfunction", "tightfunction", "punkedpath", "curvedpath", "tightpath", "punkedpairs", "curvedpairs", "tightpairs", "evenly", "oddly", "condition", "pushcurrentpicture", "popcurrentpicture", "arrowpath", "tensecircle", "roundedsquare", "colortype", "whitecolor", "blackcolor", "normalfill", "normaldraw", "visualizepaths", "naturalizepaths", "drawboundary", "drawwholepath", "visualizeddraw", "visualizedfill", "draworigin", "drawboundingbox", "drawpath", "drawpoint", "drawpoints", "drawcontrolpoints", "drawcontrollines", "drawpointlabels", "drawlineoptions", "drawpointoptions", "drawcontroloptions", "drawlabeloptions", "draworiginoptions", "drawboundoptions", "drawpathoptions", "resetdrawoptions" },
- ["internals"]={ "nocolormodel", "greycolormodel", "graycolormodel", "rgbcolormodel", "cmykcolormodel", "shadefactor", "textextoffset", "normaltransparent", "multiplytransparent", "screentransparent", "overlaytransparent", "softlighttransparent", "hardlighttransparent", "colordodgetransparent", "colorburntransparent", "darkentransparent", "lightentransparent", "differencetransparent", "exclusiontransparent", "huetransparent", "saturationtransparent", "colortransparent", "luminositytransparent" },
+ ["commands"]={ "sqr", "log", "ln", "exp", "inv", "pow", "pi", "radian", "tand", "cotd", "sin", "cos", "tan", "cot", "atan", "asin", "acos", "invsin", "invcos", "acosh", "asinh", "sinh", "cosh", "paired", "tripled", "unitcircle", "fulldiamond", "unitdiamond", "fullsquare", "llcircle", "lrcircle", "urcircle", "ulcircle", "tcircle", "bcircle", "lcircle", "rcircle", "lltriangle", "lrtriangle", "urtriangle", "ultriangle", "smoothed", "cornered", "superellipsed", "randomized", "squeezed", "enlonged", "shortened", "punked", "curved", "unspiked", "simplified", "blownup", "stretched", "enlarged", "leftenlarged", "topenlarged", "rightenlarged", "bottomenlarged", "crossed", "laddered", "randomshifted", "interpolated", "paralleled", "cutends", "peepholed", "llenlarged", "lrenlarged", "urenlarged", "ulenlarged", "llmoved", "lrmoved", "urmoved", "ulmoved", "rightarrow", "leftarrow", "centerarrow", "boundingbox", "innerboundingbox", "outerboundingbox", "pushboundingbox", "popboundingbox", "bottomboundary", "leftboundary", "topboundary", "rightboundary", "xsized", "ysized", "xysized", "sized", "xyscaled", "intersection_point", "intersection_found", "penpoint", "bbwidth", "bbheight", "withshade", "withlinearshading", "withcircularshading", "withfromshadecolor", "withtoshadecolor", "withshading", "shadedinto", "withcircularshade", "withlinearshade", "cmyk", "spotcolor", "multitonecolor", "namedcolor", "drawfill", "undrawfill", "inverted", "uncolored", "softened", "grayed", "greyed", "onlayer", "along", "graphictext", "loadfigure", "externalfigure", "withmask", "figure", "register", "bitmapimage", "colordecimals", "ddecimal", "dddecimal", "ddddecimal", "textext", "thetextext", "rawtextext", "textextoffset", "verbatim", "thelabel", "label", "autoalign", "transparent", "withtransparency", "property", "properties", "withproperties", "asgroup", "infont", "set_linear_vector", "linear_shade", "define_linear_shade", "define_circular_linear_shade", "define_sampled_linear_shade", "set_circular_vector", "circular_shade", "define_circular_shade", "define_circular_linear_shade", "define_sampled_circular_shade", "space", "CRLF", "grayscale", "greyscale", "withgray", "withgrey", "colorpart", "readfile", "clearxy", "unitvector", "center", "epsed", "anchored", "originpath", "infinite", "break", "xstretched", "ystretched", "snapped", "pathconnectors", "function", "constructedpath", "constructedpairs", "punkedfunction", "curvedfunction", "tightfunction", "punkedpath", "curvedpath", "tightpath", "punkedpairs", "curvedpairs", "tightpairs", "evenly", "oddly", "condition", "pushcurrentpicture", "popcurrentpicture", "arrowpath", "tensecircle", "roundedsquare", "colortype", "whitecolor", "blackcolor", "normalfill", "normaldraw", "visualizepaths", "naturalizepaths", "drawboundary", "drawwholepath", "visualizeddraw", "visualizedfill", "draworigin", "drawboundingbox", "drawpath", "drawpoint", "drawpoints", "drawcontrolpoints", "drawcontrollines", "drawpointlabels", "drawlineoptions", "drawpointoptions", "drawcontroloptions", "drawlabeloptions", "draworiginoptions", "drawboundoptions", "drawpathoptions", "resetdrawoptions", "decorated", "redecorated", "undecorated" },
+ ["internals"]={ "nocolormodel", "greycolormodel", "graycolormodel", "rgbcolormodel", "cmykcolormodel", "shadefactor", "textextoffset", "normaltransparent", "multiplytransparent", "screentransparent", "overlaytransparent", "softlighttransparent", "hardlighttransparent", "colordodgetransparent", "colorburntransparent", "darkentransparent", "lightentransparent", "differencetransparent", "exclusiontransparent", "huetransparent", "saturationtransparent", "colortransparent", "luminositytransparent", "metapostversion", "maxdimensions" },
} \ No newline at end of file
diff --git a/Master/texmf-dist/context/data/scite/lexers/data/scite-context-data-metapost.lua b/Master/texmf-dist/context/data/scite/lexers/data/scite-context-data-metapost.lua
index d3c19908956..df97c0a268a 100644
--- a/Master/texmf-dist/context/data/scite/lexers/data/scite-context-data-metapost.lua
+++ b/Master/texmf-dist/context/data/scite/lexers/data/scite-context-data-metapost.lua
@@ -1,7 +1,7 @@
return {
["commands"]={ "beginfig", "endfig", "rotatedaround", "reflectedabout", "arrowhead", "currentpen", "currentpicture", "cuttings", "defaultfont", "extra_beginfig", "extra_endfig", "ditto", "EOF", "down", "evenly", "fullcircle", "halfcircle", "identity", "in", "left", "origin", "pensquare", "quartercircle", "right", "unitsquare", "up", "withdots", "abs", "bbox", "ceiling", "center", "cutafter", "cutbefore", "dir", "directionpoint", "div", "dotprod", "intersectionpoint", "inverse", "mod", "round", "unitvector", "whatever", "cutdraw", "draw", "drawarrow", "drawdblarrow", "fill", "filldraw", "drawdot", "loggingall", "interact", "tracingall", "tracingnone", "pickup", "undraw", "unfill", "unfilldraw", "buildcycle", "dashpattern", "decr", "dotlabel", "dotlabels", "drawoptions", "incr", "label", "labels", "max", "min", "thelabel", "z", "beginchar", "blacker", "capsule_end", "change_width", "define_blacker_pixels", "define_corrected_pixels", "define_good_x_pixels", "define_good_y_pixels", "define_horizontal_corrected_pixels", "define_pixels", "define_whole_blacker_pixels", "define_whole_pixels", "define_whole_vertical_blacker_pixels", "define_whole_vertical_pixels", "endchar", "extra_beginchar", "extra_endchar", "extra_setup", "font_coding_scheme", "clearxy", "clearit", "clearpen", "shipit", "font_extra_space", "exitunless", "relax", "hide", "gobble", "gobbled", "stop", "blankpicture", "counterclockwise", "tensepath", "takepower", "direction", "softjoin", "makelabel", "rotatedabout", "flex", "superellipse", "erase", "image", "nullpen", "savepen", "clearpen", "penpos", "penlabels", "range", "numtok", "thru", "z", "laboff", "bye", "red", "green", "blue", "cyan", "magenta", "yellow", "black", "white", "background", "graypart", "graycolor", "mm", "pt", "dd", "bp", "cm", "pc", "cc", "in" },
["internals"]={ "mitered", "rounded", "beveled", "butt", "squared", "eps", "epsilon", "infinity", "bboxmargin", "ahlength", "ahangle", "labeloffset", "dotlabeldiam", "defaultpen", "defaultscale", "join_radius", "pen_lft", "pen_rt", "pen_top", "pen_bot" },
- ["primitives"]={ "charcode", "day", "linecap", "linejoin", "miterlimit", "month", "pausing", "prologues", "showstopping", "time", "tracingcapsules", "tracingchoices", "mpprocset", "tracingcommands", "tracingequations", "tracinglostchars", "tracingmacros", "tracingonline", "tracingoutput", "tracingrestores", "tracingspecs", "tracingstats", "tracingtitles", "truecorners", "warningcheck", "year", "false", "nullpicture", "pencircle", "true", "and", "angle", "arclength", "arctime", "ASCII", "boolean", "bot", "char", "color", "cosd", "cycle", "decimal", "directiontime", "floor", "fontsize", "hex", "infont", "intersectiontimes", "known", "length", "llcorner", "lrcorner", "makepath", "makepen", "mexp", "mlog", "normaldeviate", "not", "numeric", "oct", "odd", "or", "path", "pair", "pen", "penoffset", "picture", "point", "postcontrol", "precontrol", "reverse", "rotated", "scaled", "shifted", "sind", "slanted", "sqrt", "str", "string", "subpath", "substring", "transform", "transformed", "ulcorner", "uniformdeviate", "unknown", "urcorner", "xpart", "xscaled", "xxpart", "xypart", "ypart", "yscaled", "yxpart", "yypart", "zscaled", "addto", "clip", "input", "interim", "let", "newinternal", "save", "setbounds", "shipout", "show", "showdependencies", "showtoken", "showvariable", "special", "begingroup", "endgroup", "of", "curl", "tension", "and", "controls", "interpath", "on", "off", "def", "vardef", "enddef", "expr", "suffix", "text", "primary", "secondary", "tertiary", "primarydef", "secondarydef", "tertiarydef", "randomseed", "also", "contour", "doublepath", "withcolor", "withpen", "dashed", "if", "else", "elseif", "fi", "for", "endfor", "forever", "exitif", "forsuffixes", "downto", "upto", "step", "until", "charlist", "extensible", "fontdimen", "headerbyte", "kern", "ligtable", "boundarychar", "chardp", "charext", "charht", "charic", "charwd", "designsize", "fontmaking", "charexists", "cullit", "currenttransform", "gfcorners", "grayfont", "hround", "imagerules", "lowres_fix", "nodisplays", "notransforms", "openit", "displaying", "currentwindow", "screen_rows", "screen_cols", "pixels_per_inch", "cull", "display", "openwindow", "numspecial", "totalweight", "autorounding", "fillin", "proofing", "tracingpens", "xoffset", "chardx", "granularity", "smoothing", "turningcheck", "yoffset", "chardy", "hppp", "tracingedges", "vppp", "extra_beginfig", "extra_endfig", "mpxbreak", "endinput", "message", "delimiters", "turningnumber", "errmessage", "readstring", "scantokens", "end", "outer", "inner", "write", "to", "readfrom", "withprescript", "withpostscript", "top", "bot", "lft", "rt", "ulft", "urt", "llft", "lrt", "redpart", "greenpart", "bluepart", "cyanpart", "magentapart", "yellowpart", "blackpart", "greypart", "rgbcolor", "cmykcolor", "greycolor", "graycolor", "colormodel", "graypart", "expandafter" },
+ ["primitives"]={ "charcode", "day", "linecap", "linejoin", "miterlimit", "month", "pausing", "prologues", "showstopping", "time", "tracingcapsules", "tracingchoices", "mpprocset", "tracingcommands", "tracingequations", "tracinglostchars", "tracingmacros", "tracingonline", "tracingoutput", "tracingrestores", "tracingspecs", "tracingstats", "tracingtitles", "truecorners", "warningcheck", "year", "false", "nullpicture", "pencircle", "true", "and", "angle", "arclength", "arctime", "ASCII", "boolean", "bot", "char", "color", "cosd", "cycle", "decimal", "directiontime", "floor", "fontsize", "hex", "infont", "intersectiontimes", "known", "length", "llcorner", "lrcorner", "makepath", "makepen", "mexp", "mlog", "normaldeviate", "not", "numeric", "oct", "odd", "or", "path", "pair", "pen", "penoffset", "picture", "point", "postcontrol", "precontrol", "reverse", "rotated", "scaled", "shifted", "sind", "slanted", "sqrt", "str", "string", "subpath", "substring", "transform", "transformed", "ulcorner", "uniformdeviate", "unknown", "urcorner", "xpart", "xscaled", "xxpart", "xypart", "ypart", "yscaled", "yxpart", "yypart", "zscaled", "addto", "clip", "input", "interim", "let", "newinternal", "save", "setbounds", "shipout", "show", "showdependencies", "showtoken", "showvariable", "special", "begingroup", "endgroup", "of", "curl", "tension", "and", "controls", "interpath", "on", "off", "def", "vardef", "enddef", "expr", "suffix", "text", "primary", "secondary", "tertiary", "primarydef", "secondarydef", "tertiarydef", "randomseed", "also", "contour", "doublepath", "withcolor", "withpen", "dashed", "if", "else", "elseif", "fi", "for", "endfor", "forever", "exitif", "within", "forsuffixes", "downto", "upto", "step", "until", "charlist", "extensible", "fontdimen", "headerbyte", "kern", "ligtable", "boundarychar", "chardp", "charext", "charht", "charic", "charwd", "designsize", "fontmaking", "charexists", "cullit", "currenttransform", "gfcorners", "grayfont", "hround", "imagerules", "lowres_fix", "nodisplays", "notransforms", "openit", "displaying", "currentwindow", "screen_rows", "screen_cols", "pixels_per_inch", "cull", "display", "openwindow", "numspecial", "totalweight", "autorounding", "fillin", "proofing", "tracingpens", "xoffset", "chardx", "granularity", "smoothing", "turningcheck", "yoffset", "chardy", "hppp", "tracingedges", "vppp", "extra_beginfig", "extra_endfig", "mpxbreak", "endinput", "message", "delimiters", "turningnumber", "errmessage", "readstring", "scantokens", "end", "outer", "inner", "write", "to", "readfrom", "withprescript", "withpostscript", "top", "bot", "lft", "rt", "ulft", "urt", "llft", "lrt", "redpart", "greenpart", "bluepart", "cyanpart", "magentapart", "yellowpart", "blackpart", "greypart", "rgbcolor", "cmykcolor", "greycolor", "graycolor", "colormodel", "graypart", "dashpart", "penpart", "stroked", "filled", "textual", "clipped", "bounded", "expandafter" },
["shortcuts"]={ "..", "...", "--", "---", "&" },
["tex"]={ "btex", "etex", "verbatimtex" },
} \ No newline at end of file
diff --git a/Master/texmf-dist/context/data/scite/lexers/data/scite-context-data-tex.lua b/Master/texmf-dist/context/data/scite/lexers/data/scite-context-data-tex.lua
index 0dd0e1764b0..7d710740c29 100644
--- a/Master/texmf-dist/context/data/scite/lexers/data/scite-context-data-tex.lua
+++ b/Master/texmf-dist/context/data/scite/lexers/data/scite-context-data-tex.lua
@@ -1,9 +1,9 @@
return {
["aleph"]={ "AlephVersion", "Alephminorversion", "Alephrevision", "Alephversion", "Omegaminorversion", "Omegarevision", "Omegaversion", "boxdir", "pagebottomoffset", "pagerightoffset" },
["etex"]={ "botmarks", "clubpenalties", "currentgrouplevel", "currentgrouptype", "currentifbranch", "currentiflevel", "currentiftype", "detokenize", "dimexpr", "displaywidowpenalties", "eTeXVersion", "eTeXminorversion", "eTeXrevision", "eTeXversion", "everyeof", "firstmarks", "fontchardp", "fontcharht", "fontcharic", "fontcharwd", "glueexpr", "glueshrink", "glueshrinkorder", "gluestretch", "gluestretchorder", "gluetomu", "ifcsname", "ifdefined", "iffontchar", "interactionmode", "interlinepenalties", "lastlinefit", "lastnodetype", "marks", "muexpr", "mutoglue", "numexpr", "pagediscards", "parshapedimen", "parshapeindent", "parshapelength", "predisplaydirection", "protected", "readline", "savinghyphcodes", "savingvdiscards", "scantokens", "showgroups", "showifs", "showtokens", "splitbotmarks", "splitdiscards", "splitfirstmarks", "topmarks", "tracingassigns", "tracinggroups", "tracingifs", "tracingnesting", "tracingscantokens", "unexpanded", "unless", "widowpenalties" },
- ["luatex"]={ "Udelcode", "Udelcodenum", "Udelimiter", "Udelimiterover", "Udelimiterunder", "Umathaccent", "Umathaccents", "Umathaxis", "Umathbinbinspacing", "Umathbinclosespacing", "Umathbininnerspacing", "Umathbinopenspacing", "Umathbinopspacing", "Umathbinordspacing", "Umathbinpunctspacing", "Umathbinrelspacing", "Umathbotaccent", "Umathchar", "Umathchardef", "Umathcharnum", "Umathclosebinspacing", "Umathcloseclosespacing", "Umathcloseinnerspacing", "Umathcloseopenspacing", "Umathcloseopspacing", "Umathcloseordspacing", "Umathclosepunctspacing", "Umathcloserelspacing", "Umathcode", "Umathcodenum", "Umathconnectoroverlapmin", "Umathfractiondelsize", "Umathfractiondenomdown", "Umathfractiondenomvgap", "Umathfractionnumup", "Umathfractionnumvgap", "Umathfractionrule", "Umathinnerbinspacing", "Umathinnerclosespacing", "Umathinnerinnerspacing", "Umathinneropenspacing", "Umathinneropspacing", "Umathinnerordspacing", "Umathinnerpunctspacing", "Umathinnerrelspacing", "Umathlimitabovebgap", "Umathlimitabovekern", "Umathlimitabovevgap", "Umathlimitbelowbgap", "Umathlimitbelowkern", "Umathlimitbelowvgap", "Umathopbinspacing", "Umathopclosespacing", "Umathopenbinspacing", "Umathopenclosespacing", "Umathopeninnerspacing", "Umathopenopenspacing", "Umathopenopspacing", "Umathopenordspacing", "Umathopenpunctspacing", "Umathopenrelspacing", "Umathoperatorsize", "Umathopinnerspacing", "Umathopopenspacing", "Umathopopspacing", "Umathopordspacing", "Umathoppunctspacing", "Umathoprelspacing", "Umathordbinspacing", "Umathordclosespacing", "Umathordinnerspacing", "Umathordopenspacing", "Umathordopspacing", "Umathordordspacing", "Umathordpunctspacing", "Umathordrelspacing", "Umathoverbarkern", "Umathoverbarrule", "Umathoverbarvgap", "Umathoverdelimiterbgap", "Umathoverdelimitervgap", "Umathpunctbinspacing", "Umathpunctclosespacing", "Umathpunctinnerspacing", "Umathpunctopenspacing", "Umathpunctopspacing", "Umathpunctordspacing", "Umathpunctpunctspacing", "Umathpunctrelspacing", "Umathquad", "Umathradicaldegreeafter", "Umathradicaldegreebefore", "Umathradicaldegreeraise", "Umathradicalkern", "Umathradicalrule", "Umathradicalvgap", "Umathrelbinspacing", "Umathrelclosespacing", "Umathrelinnerspacing", "Umathrelopenspacing", "Umathrelopspacing", "Umathrelordspacing", "Umathrelpunctspacing", "Umathrelrelspacing", "Umathspaceafterscript", "Umathstackdenomdown", "Umathstacknumup", "Umathstackvgap", "Umathsubshiftdown", "Umathsubshiftdrop", "Umathsubsupshiftdown", "Umathsubsupvgap", "Umathsubtopmax", "Umathsupbottommin", "Umathsupshiftdrop", "Umathsupshiftup", "Umathsupsubbottommax", "Umathunderbarkern", "Umathunderbarrule", "Umathunderbarvgap", "Umathunderdelimiterbgap", "Umathunderdelimitervgap", "Uoverdelimiter", "Uradical", "Uroot", "Ustack", "Ustartdisplaymath", "Ustartmath", "Ustopdisplaymath", "Ustopmath", "Usubscript", "Usuperscript", "Uunderdelimiter", "alignmark", "aligntab", "attribute", "attributedef", "catcodetable", "clearmarks", "crampeddisplaystyle", "crampedscriptscriptstyle", "crampedscriptstyle", "crampedtextstyle", "fontid", "formatname", "gleaders", "ifabsdim", "ifabsnum", "ifprimitive", "initcatcodetable", "latelua", "luaescapestring", "luastartup", "luatexdatestamp", "luatexrevision", "luatexversion", "mathstyle", "nokerns", "noligs", "outputbox", "pageleftoffset", "pagetopoffset", "postexhyphenchar", "posthyphenchar", "preexhyphenchar", "prehyphenchar", "primitive", "savecatcodetable", "scantextokens", "suppressfontnotfounderror", "suppressifcsnameerror", "suppresslongerror", "suppressoutererror", "synctex" },
+ ["luatex"]={ "Uchar", "Udelcode", "Udelcodenum", "Udelimiter", "Udelimiterover", "Udelimiterunder", "Umathaccent", "Umathaxis", "Umathbinbinspacing", "Umathbinclosespacing", "Umathbininnerspacing", "Umathbinopenspacing", "Umathbinopspacing", "Umathbinordspacing", "Umathbinpunctspacing", "Umathbinrelspacing", "Umathchar", "Umathchardef", "Umathcharnum", "Umathclosebinspacing", "Umathcloseclosespacing", "Umathcloseinnerspacing", "Umathcloseopenspacing", "Umathcloseopspacing", "Umathcloseordspacing", "Umathclosepunctspacing", "Umathcloserelspacing", "Umathcode", "Umathcodenum", "Umathconnectoroverlapmin", "Umathfractiondelsize", "Umathfractiondenomdown", "Umathfractiondenomvgap", "Umathfractionnumup", "Umathfractionnumvgap", "Umathfractionrule", "Umathinnerbinspacing", "Umathinnerclosespacing", "Umathinnerinnerspacing", "Umathinneropenspacing", "Umathinneropspacing", "Umathinnerordspacing", "Umathinnerpunctspacing", "Umathinnerrelspacing", "Umathlimitabovebgap", "Umathlimitabovekern", "Umathlimitabovevgap", "Umathlimitbelowbgap", "Umathlimitbelowkern", "Umathlimitbelowvgap", "Umathopbinspacing", "Umathopclosespacing", "Umathopenbinspacing", "Umathopenclosespacing", "Umathopeninnerspacing", "Umathopenopenspacing", "Umathopenopspacing", "Umathopenordspacing", "Umathopenpunctspacing", "Umathopenrelspacing", "Umathoperatorsize", "Umathopinnerspacing", "Umathopopenspacing", "Umathopopspacing", "Umathopordspacing", "Umathoppunctspacing", "Umathoprelspacing", "Umathordbinspacing", "Umathordclosespacing", "Umathordinnerspacing", "Umathordopenspacing", "Umathordopspacing", "Umathordordspacing", "Umathordpunctspacing", "Umathordrelspacing", "Umathoverbarkern", "Umathoverbarrule", "Umathoverbarvgap", "Umathoverdelimiterbgap", "Umathoverdelimitervgap", "Umathpunctbinspacing", "Umathpunctclosespacing", "Umathpunctinnerspacing", "Umathpunctopenspacing", "Umathpunctopspacing", "Umathpunctordspacing", "Umathpunctpunctspacing", "Umathpunctrelspacing", "Umathquad", "Umathradicaldegreeafter", "Umathradicaldegreebefore", "Umathradicaldegreeraise", "Umathradicalkern", "Umathradicalrule", "Umathradicalvgap", "Umathrelbinspacing", "Umathrelclosespacing", "Umathrelinnerspacing", "Umathrelopenspacing", "Umathrelopspacing", "Umathrelordspacing", "Umathrelpunctspacing", "Umathrelrelspacing", "Umathspaceafterscript", "Umathstackdenomdown", "Umathstacknumup", "Umathstackvgap", "Umathsubshiftdown", "Umathsubshiftdrop", "Umathsubsupshiftdown", "Umathsubsupvgap", "Umathsubtopmax", "Umathsupbottommin", "Umathsupshiftdrop", "Umathsupshiftup", "Umathsupsubbottommax", "Umathunderbarkern", "Umathunderbarrule", "Umathunderbarvgap", "Umathunderdelimiterbgap", "Umathunderdelimitervgap", "Uoverdelimiter", "Uradical", "Uroot", "Ustack", "Ustartdisplaymath", "Ustartmath", "Ustopdisplaymath", "Ustopmath", "Usubscript", "Usuperscript", "Uunderdelimiter", "alignmark", "aligntab", "attribute", "attributedef", "catcodetable", "clearmarks", "crampeddisplaystyle", "crampedscriptscriptstyle", "crampedscriptstyle", "crampedtextstyle", "fontid", "formatname", "gleaders", "ifabsdim", "ifabsnum", "ifprimitive", "initcatcodetable", "latelua", "luaescapestring", "luastartup", "luatexdatestamp", "luatexrevision", "luatexversion", "mathstyle", "nokerns", "noligs", "outputbox", "pageleftoffset", "pagetopoffset", "postexhyphenchar", "posthyphenchar", "preexhyphenchar", "prehyphenchar", "primitive", "savecatcodetable", "scantextokens", "suppressfontnotfounderror", "suppressifcsnameerror", "suppresslongerror", "suppressoutererror", "synctex" },
["omega"]={ "OmegaVersion", "bodydir", "chardp", "charht", "charit", "charwd", "leftghost", "localbrokenpenalty", "localinterlinepenalty", "localleftbox", "localrightbox", "mathdir", "odelcode", "odelimiter", "omathaccent", "omathchar", "omathchardef", "omathcode", "oradical", "pagedir", "pageheight", "pagewidth", "pardir", "rightghost", "textdir" },
["pdftex"]={ "efcode", "expanded", "ifincsname", "ifpdfabsdim", "ifpdfabsnum", "ifpdfprimitive", "leftmarginkern", "letterspacefont", "lpcode", "pdfadjustspacing", "pdfannot", "pdfcatalog", "pdfcolorstack", "pdfcolorstackinit", "pdfcompresslevel", "pdfcopyfont", "pdfcreationdate", "pdfdecimaldigits", "pdfdest", "pdfdestmargin", "pdfdraftmode", "pdfeachlinedepth", "pdfeachlineheight", "pdfendlink", "pdfendthread", "pdffirstlineheight", "pdffontattr", "pdffontexpand", "pdffontname", "pdffontobjnum", "pdffontsize", "pdfgamma", "pdfgentounicode", "pdfglyphtounicode", "pdfhorigin", "pdfignoreddimen", "pdfimageapplygamma", "pdfimagegamma", "pdfimagehicolor", "pdfimageresolution", "pdfincludechars", "pdfinclusioncopyfonts", "pdfinclusionerrorlevel", "pdfinfo", "pdfinsertht", "pdflastannot", "pdflastlinedepth", "pdflastlink", "pdflastobj", "pdflastxform", "pdflastximage", "pdflastximagecolordepth", "pdflastximagepages", "pdflastxpos", "pdflastypos", "pdflinkmargin", "pdfliteral", "pdfmapfile", "pdfmapline", "pdfminorversion", "pdfnames", "pdfnoligatures", "pdfnormaldeviate", "pdfobj", "pdfobjcompresslevel", "pdfoptionpdfminorversion", "pdfoutline", "pdfoutput", "pdfpageattr", "pdfpagebox", "pdfpageheight", "pdfpageref", "pdfpageresources", "pdfpagesattr", "pdfpagewidth", "pdfpkmode", "pdfpkresolution", "pdfprimitive", "pdfprotrudechars", "pdfpxdimen", "pdfrandomseed", "pdfrefobj", "pdfrefxform", "pdfrefximage", "pdfreplacefont", "pdfrestore", "pdfretval", "pdfsave", "pdfsavepos", "pdfsetmatrix", "pdfsetrandomseed", "pdfstartlink", "pdfstartthread", "pdftexbanner", "pdftexrevision", "pdftexversion", "pdfthread", "pdfthreadmargin", "pdftracingfonts", "pdftrailer", "pdfuniformdeviate", "pdfuniqueresname", "pdfvorigin", "pdfxform", "pdfxformattr", "pdfxformname", "pdfxformresources", "pdfximage", "pdfximagebbox", "quitvmode", "rightmarginkern", "rpcode", "tagcode" },
- ["tex"]={ "-", "/", "AlephVersion", "Alephminorversion", "Alephrevision", "Alephversion", "OmegaVersion", "Omegaminorversion", "Omegarevision", "Omegaversion", "Udelcode", "Udelcodenum", "Udelimiter", "Udelimiterover", "Udelimiterunder", "Umathaccent", "Umathaccents", "Umathaxis", "Umathbinbinspacing", "Umathbinclosespacing", "Umathbininnerspacing", "Umathbinopenspacing", "Umathbinopspacing", "Umathbinordspacing", "Umathbinpunctspacing", "Umathbinrelspacing", "Umathbotaccent", "Umathchar", "Umathchardef", "Umathcharnum", "Umathclosebinspacing", "Umathcloseclosespacing", "Umathcloseinnerspacing", "Umathcloseopenspacing", "Umathcloseopspacing", "Umathcloseordspacing", "Umathclosepunctspacing", "Umathcloserelspacing", "Umathcode", "Umathcodenum", "Umathconnectoroverlapmin", "Umathfractiondelsize", "Umathfractiondenomdown", "Umathfractiondenomvgap", "Umathfractionnumup", "Umathfractionnumvgap", "Umathfractionrule", "Umathinnerbinspacing", "Umathinnerclosespacing", "Umathinnerinnerspacing", "Umathinneropenspacing", "Umathinneropspacing", "Umathinnerordspacing", "Umathinnerpunctspacing", "Umathinnerrelspacing", "Umathlimitabovebgap", "Umathlimitabovekern", "Umathlimitabovevgap", "Umathlimitbelowbgap", "Umathlimitbelowkern", "Umathlimitbelowvgap", "Umathopbinspacing", "Umathopclosespacing", "Umathopenbinspacing", "Umathopenclosespacing", "Umathopeninnerspacing", "Umathopenopenspacing", "Umathopenopspacing", "Umathopenordspacing", "Umathopenpunctspacing", "Umathopenrelspacing", "Umathoperatorsize", "Umathopinnerspacing", "Umathopopenspacing", "Umathopopspacing", "Umathopordspacing", "Umathoppunctspacing", "Umathoprelspacing", "Umathordbinspacing", "Umathordclosespacing", "Umathordinnerspacing", "Umathordopenspacing", "Umathordopspacing", "Umathordordspacing", "Umathordpunctspacing", "Umathordrelspacing", "Umathoverbarkern", "Umathoverbarrule", "Umathoverbarvgap", "Umathoverdelimiterbgap", "Umathoverdelimitervgap", "Umathpunctbinspacing", "Umathpunctclosespacing", "Umathpunctinnerspacing", "Umathpunctopenspacing", "Umathpunctopspacing", "Umathpunctordspacing", "Umathpunctpunctspacing", "Umathpunctrelspacing", "Umathquad", "Umathradicaldegreeafter", "Umathradicaldegreebefore", "Umathradicaldegreeraise", "Umathradicalkern", "Umathradicalrule", "Umathradicalvgap", "Umathrelbinspacing", "Umathrelclosespacing", "Umathrelinnerspacing", "Umathrelopenspacing", "Umathrelopspacing", "Umathrelordspacing", "Umathrelpunctspacing", "Umathrelrelspacing", "Umathspaceafterscript", "Umathstackdenomdown", "Umathstacknumup", "Umathstackvgap", "Umathsubshiftdown", "Umathsubshiftdrop", "Umathsubsupshiftdown", "Umathsubsupvgap", "Umathsubtopmax", "Umathsupbottommin", "Umathsupshiftdrop", "Umathsupshiftup", "Umathsupsubbottommax", "Umathunderbarkern", "Umathunderbarrule", "Umathunderbarvgap", "Umathunderdelimiterbgap", "Umathunderdelimitervgap", "Uoverdelimiter", "Uradical", "Uroot", "Ustack", "Ustartdisplaymath", "Ustartmath", "Ustopdisplaymath", "Ustopmath", "Usubscript", "Usuperscript", "Uunderdelimiter", "above", "abovedisplayshortskip", "abovedisplayskip", "abovewithdelims", "accent", "adjdemerits", "advance", "afterassignment", "aftergroup", "alignmark", "aligntab", "atop", "atopwithdelims", "attribute", "attributedef", "badness", "baselineskip", "batchmode", "begingroup", "belowdisplayshortskip", "belowdisplayskip", "binoppenalty", "bodydir", "botmark", "botmarks", "box", "boxdir", "boxmaxdepth", "brokenpenalty", "catcode", "catcodetable", "char", "chardef", "chardp", "charht", "charit", "charwd", "cleaders", "clearmarks", "closein", "closeout", "clubpenalties", "clubpenalty", "copy", "count", "countdef", "cr", "crampeddisplaystyle", "crampedscriptscriptstyle", "crampedscriptstyle", "crampedtextstyle", "crcr", "csname", "currentgrouplevel", "currentgrouptype", "currentifbranch", "currentiflevel", "currentiftype", "day", "deadcycles", "def", "defaulthyphenchar", "defaultskewchar", "delcode", "delimiter", "delimiterfactor", "delimitershortfall", "detokenize", "dimen", "dimendef", "dimexpr", "directlua", "discretionary", "displayindent", "displaylimits", "displaystyle", "displaywidowpenalties", "displaywidowpenalty", "displaywidth", "divide", "doublehyphendemerits", "dp", "dump", "eTeXVersion", "eTeXminorversion", "eTeXrevision", "eTeXversion", "edef", "efcode", "else", "emergencystretch", "end", "endcsname", "endgroup", "endinput", "endlinechar", "eqno", "errhelp", "errmessage", "errorcontextlines", "errorstopmode", "escapechar", "everycr", "everydisplay", "everyeof", "everyhbox", "everyjob", "everymath", "everypar", "everyvbox", "exhyphenchar", "exhyphenpenalty", "expandafter", "expanded", "fam", "fi", "finalhyphendemerits", "firstmark", "firstmarks", "floatingpenalty", "font", "fontchardp", "fontcharht", "fontcharic", "fontcharwd", "fontdimen", "fontid", "fontname", "formatname", "futurelet", "gdef", "gleaders", "global", "globaldefs", "glueexpr", "glueshrink", "glueshrinkorder", "gluestretch", "gluestretchorder", "gluetomu", "halign", "hangafter", "hangindent", "hbadness", "hbox", "hfil", "hfill", "hfilneg", "hfuzz", "hoffset", "holdinginserts", "hrule", "hsize", "hskip", "hss", "ht", "hyphenation", "hyphenchar", "hyphenpenalty", "if", "ifabsdim", "ifabsnum", "ifcase", "ifcat", "ifcsname", "ifdefined", "ifdim", "ifeof", "iffalse", "iffontchar", "ifhbox", "ifhmode", "ifincsname", "ifinner", "ifmmode", "ifnum", "ifodd", "ifpdfabsdim", "ifpdfabsnum", "ifpdfprimitive", "ifprimitive", "iftrue", "ifvbox", "ifvmode", "ifvoid", "ifx", "ignorespaces", "immediate", "indent", "initcatcodetable", "input", "inputlineno", "insert", "insertpenalties", "interactionmode", "interlinepenalties", "interlinepenalty", "jobname", "kern", "language", "lastbox", "lastkern", "lastlinefit", "lastnodetype", "lastpenalty", "lastskip", "latelua", "lccode", "leaders", "left", "leftghost", "lefthyphenmin", "leftmarginkern", "leftskip", "leqno", "let", "letterspacefont", "limits", "linepenalty", "lineskip", "lineskiplimit", "localbrokenpenalty", "localinterlinepenalty", "localleftbox", "localrightbox", "long", "looseness", "lower", "lowercase", "lpcode", "luaescapestring", "luastartup", "luatexdatestamp", "luatexrevision", "luatexversion", "mag", "mark", "marks", "mathaccent", "mathbin", "mathchar", "mathchardef", "mathchoice", "mathclose", "mathcode", "mathdir", "mathinner", "mathop", "mathopen", "mathord", "mathpunct", "mathrel", "mathstyle", "mathsurround", "maxdeadcycles", "maxdepth", "meaning", "medmuskip", "message", "middle", "mkern", "month", "moveleft", "moveright", "mskip", "muexpr", "multiply", "muskip", "muskipdef", "mutoglue", "newlinechar", "noalign", "noboundary", "noexpand", "noindent", "nokerns", "noligs", "nolimits", "nolocaldirs", "nolocalwhatsits", "nonscript", "nonstopmode", "nulldelimiterspace", "nullfont", "number", "numexpr", "odelcode", "odelimiter", "omathaccent", "omathchar", "omathchardef", "omathcode", "omit", "openin", "openout", "or", "oradical", "outer", "output", "outputbox", "outputpenalty", "over", "overfullrule", "overline", "overwithdelims", "pagebottomoffset", "pagedepth", "pagedir", "pagediscards", "pagefilllstretch", "pagefillstretch", "pagefilstretch", "pagegoal", "pageheight", "pageleftoffset", "pagerightoffset", "pageshrink", "pagestretch", "pagetopoffset", "pagetotal", "pagewidth", "par", "pardir", "parfillskip", "parindent", "parshape", "parshapedimen", "parshapeindent", "parshapelength", "parskip", "patterns", "pausing", "pdfadjustspacing", "pdfannot", "pdfcatalog", "pdfcolorstack", "pdfcolorstackinit", "pdfcompresslevel", "pdfcopyfont", "pdfcreationdate", "pdfdecimaldigits", "pdfdest", "pdfdestmargin", "pdfdraftmode", "pdfeachlinedepth", "pdfeachlineheight", "pdfendlink", "pdfendthread", "pdffirstlineheight", "pdffontattr", "pdffontexpand", "pdffontname", "pdffontobjnum", "pdffontsize", "pdfgamma", "pdfgentounicode", "pdfglyphtounicode", "pdfhorigin", "pdfignoreddimen", "pdfimageapplygamma", "pdfimagegamma", "pdfimagehicolor", "pdfimageresolution", "pdfincludechars", "pdfinclusioncopyfonts", "pdfinclusionerrorlevel", "pdfinfo", "pdfinsertht", "pdflastannot", "pdflastlinedepth", "pdflastlink", "pdflastobj", "pdflastxform", "pdflastximage", "pdflastximagecolordepth", "pdflastximagepages", "pdflastxpos", "pdflastypos", "pdflinkmargin", "pdfliteral", "pdfmapfile", "pdfmapline", "pdfminorversion", "pdfnames", "pdfnoligatures", "pdfnormaldeviate", "pdfobj", "pdfobjcompresslevel", "pdfoptionpdfminorversion", "pdfoutline", "pdfoutput", "pdfpageattr", "pdfpagebox", "pdfpageheight", "pdfpageref", "pdfpageresources", "pdfpagesattr", "pdfpagewidth", "pdfpkmode", "pdfpkresolution", "pdfprimitive", "pdfprotrudechars", "pdfpxdimen", "pdfrandomseed", "pdfrefobj", "pdfrefxform", "pdfrefximage", "pdfreplacefont", "pdfrestore", "pdfretval", "pdfsave", "pdfsavepos", "pdfsetmatrix", "pdfsetrandomseed", "pdfstartlink", "pdfstartthread", "pdftexbanner", "pdftexrevision", "pdftexversion", "pdfthread", "pdfthreadmargin", "pdftracingfonts", "pdftrailer", "pdfuniformdeviate", "pdfuniqueresname", "pdfvorigin", "pdfxform", "pdfxformattr", "pdfxformname", "pdfxformresources", "pdfximage", "pdfximagebbox", "penalty", "postdisplaypenalty", "postexhyphenchar", "posthyphenchar", "predisplaydirection", "predisplaypenalty", "predisplaysize", "preexhyphenchar", "prehyphenchar", "pretolerance", "prevdepth", "prevgraf", "primitive", "protected", "quitvmode", "radical", "raise", "read", "readline", "relax", "relpenalty", "right", "rightghost", "righthyphenmin", "rightmarginkern", "rightskip", "romannumeral", "rpcode", "savecatcodetable", "savinghyphcodes", "savingvdiscards", "scantextokens", "scantokens", "scriptfont", "scriptscriptfont", "scriptscriptstyle", "scriptspace", "scriptstyle", "scrollmode", "setbox", "setlanguage", "sfcode", "shipout", "show", "showbox", "showboxbreadth", "showboxdepth", "showgroups", "showifs", "showlists", "showthe", "showtokens", "skewchar", "skip", "skipdef", "spacefactor", "spaceskip", "span", "special", "splitbotmark", "splitbotmarks", "splitdiscards", "splitfirstmark", "splitfirstmarks", "splitmaxdepth", "splittopskip", "string", "suppressfontnotfounderror", "suppressifcsnameerror", "suppresslongerror", "suppressoutererror", "synctex", "tabskip", "tagcode", "textdir", "textfont", "textstyle", "the", "thickmuskip", "thinmuskip", "time", "toks", "toksdef", "tolerance", "topmark", "topmarks", "topskip", "tracingassigns", "tracingcommands", "tracinggroups", "tracingifs", "tracinglostchars", "tracingmacros", "tracingnesting", "tracingonline", "tracingoutput", "tracingpages", "tracingparagraphs", "tracingrestores", "tracingscantokens", "tracingstats", "uccode", "uchyph", "underline", "unexpanded", "unhbox", "unhcopy", "unkern", "unless", "unpenalty", "unskip", "unvbox", "unvcopy", "uppercase", "vadjust", "valign", "vbadness", "vbox", "vcenter", "vfil", "vfill", "vfilneg", "vfuzz", "voffset", "vrule", "vsize", "vskip", "vsplit", "vss", "vtop", "wd", "widowpenalties", "widowpenalty", "write", "xdef", "xleaders", "xspaceskip", "year" },
+ ["tex"]={ "-", "/", "AlephVersion", "Alephminorversion", "Alephrevision", "Alephversion", "OmegaVersion", "Omegaminorversion", "Omegarevision", "Omegaversion", "Udelcode", "Udelcodenum", "Udelimiter", "Udelimiterover", "Udelimiterunder", "Umathaccent", "Umathaxis", "Umathbinbinspacing", "Umathbinclosespacing", "Umathbininnerspacing", "Umathbinopenspacing", "Umathbinopspacing", "Umathbinordspacing", "Umathbinpunctspacing", "Umathbinrelspacing", "Umathchar", "Umathchardef", "Umathcharnum", "Umathclosebinspacing", "Umathcloseclosespacing", "Umathcloseinnerspacing", "Umathcloseopenspacing", "Umathcloseopspacing", "Umathcloseordspacing", "Umathclosepunctspacing", "Umathcloserelspacing", "Umathcode", "Umathcodenum", "Umathconnectoroverlapmin", "Umathfractiondelsize", "Umathfractiondenomdown", "Umathfractiondenomvgap", "Umathfractionnumup", "Umathfractionnumvgap", "Umathfractionrule", "Umathinnerbinspacing", "Umathinnerclosespacing", "Umathinnerinnerspacing", "Umathinneropenspacing", "Umathinneropspacing", "Umathinnerordspacing", "Umathinnerpunctspacing", "Umathinnerrelspacing", "Umathlimitabovebgap", "Umathlimitabovekern", "Umathlimitabovevgap", "Umathlimitbelowbgap", "Umathlimitbelowkern", "Umathlimitbelowvgap", "Umathopbinspacing", "Umathopclosespacing", "Umathopenbinspacing", "Umathopenclosespacing", "Umathopeninnerspacing", "Umathopenopenspacing", "Umathopenopspacing", "Umathopenordspacing", "Umathopenpunctspacing", "Umathopenrelspacing", "Umathoperatorsize", "Umathopinnerspacing", "Umathopopenspacing", "Umathopopspacing", "Umathopordspacing", "Umathoppunctspacing", "Umathoprelspacing", "Umathordbinspacing", "Umathordclosespacing", "Umathordinnerspacing", "Umathordopenspacing", "Umathordopspacing", "Umathordordspacing", "Umathordpunctspacing", "Umathordrelspacing", "Umathoverbarkern", "Umathoverbarrule", "Umathoverbarvgap", "Umathoverdelimiterbgap", "Umathoverdelimitervgap", "Umathpunctbinspacing", "Umathpunctclosespacing", "Umathpunctinnerspacing", "Umathpunctopenspacing", "Umathpunctopspacing", "Umathpunctordspacing", "Umathpunctpunctspacing", "Umathpunctrelspacing", "Umathquad", "Umathradicaldegreeafter", "Umathradicaldegreebefore", "Umathradicaldegreeraise", "Umathradicalkern", "Umathradicalrule", "Umathradicalvgap", "Umathrelbinspacing", "Umathrelclosespacing", "Umathrelinnerspacing", "Umathrelopenspacing", "Umathrelopspacing", "Umathrelordspacing", "Umathrelpunctspacing", "Umathrelrelspacing", "Umathspaceafterscript", "Umathstackdenomdown", "Umathstacknumup", "Umathstackvgap", "Umathsubshiftdown", "Umathsubshiftdrop", "Umathsubsupshiftdown", "Umathsubsupvgap", "Umathsubtopmax", "Umathsupbottommin", "Umathsupshiftdrop", "Umathsupshiftup", "Umathsupsubbottommax", "Umathunderbarkern", "Umathunderbarrule", "Umathunderbarvgap", "Umathunderdelimiterbgap", "Umathunderdelimitervgap", "Uoverdelimiter", "Uradical", "Uroot", "Ustack", "Ustartdisplaymath", "Ustartmath", "Ustopdisplaymath", "Ustopmath", "Usubscript", "Usuperscript", "Uunderdelimiter", "above", "abovedisplayshortskip", "abovedisplayskip", "abovewithdelims", "accent", "adjdemerits", "advance", "afterassignment", "aftergroup", "alignmark", "aligntab", "atop", "atopwithdelims", "attribute", "attributedef", "badness", "baselineskip", "batchmode", "begingroup", "belowdisplayshortskip", "belowdisplayskip", "binoppenalty", "bodydir", "botmark", "botmarks", "box", "boxdir", "boxmaxdepth", "brokenpenalty", "catcode", "catcodetable", "char", "chardef", "chardp", "charht", "charit", "charwd", "cleaders", "clearmarks", "closein", "closeout", "clubpenalties", "clubpenalty", "copy", "count", "countdef", "cr", "crampeddisplaystyle", "crampedscriptscriptstyle", "crampedscriptstyle", "crampedtextstyle", "crcr", "csname", "currentgrouplevel", "currentgrouptype", "currentifbranch", "currentiflevel", "currentiftype", "day", "deadcycles", "def", "defaulthyphenchar", "defaultskewchar", "delcode", "delimiter", "delimiterfactor", "delimitershortfall", "detokenize", "dimen", "dimendef", "dimexpr", "directlua", "discretionary", "displayindent", "displaylimits", "displaystyle", "displaywidowpenalties", "displaywidowpenalty", "displaywidth", "divide", "doublehyphendemerits", "dp", "dump", "eTeXVersion", "eTeXminorversion", "eTeXrevision", "eTeXversion", "edef", "efcode", "else", "emergencystretch", "end", "endcsname", "endgroup", "endinput", "endlinechar", "eqno", "errhelp", "errmessage", "errorcontextlines", "errorstopmode", "escapechar", "everycr", "everydisplay", "everyeof", "everyhbox", "everyjob", "everymath", "everypar", "everyvbox", "exhyphenchar", "exhyphenpenalty", "expandafter", "expanded", "fam", "fi", "finalhyphendemerits", "firstmark", "firstmarks", "floatingpenalty", "font", "fontchardp", "fontcharht", "fontcharic", "fontcharwd", "fontdimen", "fontid", "fontname", "formatname", "futurelet", "gdef", "gleaders", "global", "globaldefs", "glueexpr", "glueshrink", "glueshrinkorder", "gluestretch", "gluestretchorder", "gluetomu", "halign", "hangafter", "hangindent", "hbadness", "hbox", "hfil", "hfill", "hfilneg", "hfuzz", "hoffset", "holdinginserts", "hrule", "hsize", "hskip", "hss", "ht", "hyphenation", "hyphenchar", "hyphenpenalty", "if", "ifabsdim", "ifabsnum", "ifcase", "ifcat", "ifcsname", "ifdefined", "ifdim", "ifeof", "iffalse", "iffontchar", "ifhbox", "ifhmode", "ifincsname", "ifinner", "ifmmode", "ifnum", "ifodd", "ifpdfabsdim", "ifpdfabsnum", "ifpdfprimitive", "ifprimitive", "iftrue", "ifvbox", "ifvmode", "ifvoid", "ifx", "ignorespaces", "immediate", "indent", "initcatcodetable", "input", "inputlineno", "insert", "insertpenalties", "interactionmode", "interlinepenalties", "interlinepenalty", "jobname", "kern", "language", "lastbox", "lastkern", "lastlinefit", "lastnodetype", "lastpenalty", "lastskip", "latelua", "lccode", "leaders", "left", "leftghost", "lefthyphenmin", "leftmarginkern", "leftskip", "leqno", "let", "letterspacefont", "limits", "linepenalty", "lineskip", "lineskiplimit", "localbrokenpenalty", "localinterlinepenalty", "localleftbox", "localrightbox", "long", "looseness", "lower", "lowercase", "lpcode", "luaescapestring", "luastartup", "luatexdatestamp", "luatexrevision", "luatexversion", "mag", "mark", "marks", "mathaccent", "mathbin", "mathchar", "mathchardef", "mathchoice", "mathclose", "mathcode", "mathdir", "mathinner", "mathop", "mathopen", "mathord", "mathpunct", "mathrel", "mathstyle", "mathsurround", "maxdeadcycles", "maxdepth", "meaning", "medmuskip", "message", "middle", "mkern", "month", "moveleft", "moveright", "mskip", "muexpr", "multiply", "muskip", "muskipdef", "mutoglue", "newlinechar", "noalign", "noboundary", "noexpand", "noindent", "nokerns", "noligs", "nolimits", "nolocaldirs", "nolocalwhatsits", "nonscript", "nonstopmode", "nulldelimiterspace", "nullfont", "number", "numexpr", "odelcode", "odelimiter", "omathaccent", "omathchar", "omathchardef", "omathcode", "omit", "openin", "openout", "or", "oradical", "outer", "output", "outputbox", "outputpenalty", "over", "overfullrule", "overline", "overwithdelims", "pagebottomoffset", "pagedepth", "pagedir", "pagediscards", "pagefilllstretch", "pagefillstretch", "pagefilstretch", "pagegoal", "pageheight", "pageleftoffset", "pagerightoffset", "pageshrink", "pagestretch", "pagetopoffset", "pagetotal", "pagewidth", "par", "pardir", "parfillskip", "parindent", "parshape", "parshapedimen", "parshapeindent", "parshapelength", "parskip", "patterns", "pausing", "pdfadjustspacing", "pdfannot", "pdfcatalog", "pdfcolorstack", "pdfcolorstackinit", "pdfcompresslevel", "pdfcopyfont", "pdfcreationdate", "pdfdecimaldigits", "pdfdest", "pdfdestmargin", "pdfdraftmode", "pdfeachlinedepth", "pdfeachlineheight", "pdfendlink", "pdfendthread", "pdffirstlineheight", "pdffontattr", "pdffontexpand", "pdffontname", "pdffontobjnum", "pdffontsize", "pdfgamma", "pdfgentounicode", "pdfglyphtounicode", "pdfhorigin", "pdfignoreddimen", "pdfimageapplygamma", "pdfimagegamma", "pdfimagehicolor", "pdfimageresolution", "pdfincludechars", "pdfinclusioncopyfonts", "pdfinclusionerrorlevel", "pdfinfo", "pdfinsertht", "pdflastannot", "pdflastlinedepth", "pdflastlink", "pdflastobj", "pdflastxform", "pdflastximage", "pdflastximagecolordepth", "pdflastximagepages", "pdflastxpos", "pdflastypos", "pdflinkmargin", "pdfliteral", "pdfmapfile", "pdfmapline", "pdfminorversion", "pdfnames", "pdfnoligatures", "pdfnormaldeviate", "pdfobj", "pdfobjcompresslevel", "pdfoptionpdfminorversion", "pdfoutline", "pdfoutput", "pdfpageattr", "pdfpagebox", "pdfpageheight", "pdfpageref", "pdfpageresources", "pdfpagesattr", "pdfpagewidth", "pdfpkmode", "pdfpkresolution", "pdfprimitive", "pdfprotrudechars", "pdfpxdimen", "pdfrandomseed", "pdfrefobj", "pdfrefxform", "pdfrefximage", "pdfreplacefont", "pdfrestore", "pdfretval", "pdfsave", "pdfsavepos", "pdfsetmatrix", "pdfsetrandomseed", "pdfstartlink", "pdfstartthread", "pdftexbanner", "pdftexrevision", "pdftexversion", "pdfthread", "pdfthreadmargin", "pdftracingfonts", "pdftrailer", "pdfuniformdeviate", "pdfuniqueresname", "pdfvorigin", "pdfxform", "pdfxformattr", "pdfxformname", "pdfxformresources", "pdfximage", "pdfximagebbox", "penalty", "postdisplaypenalty", "postexhyphenchar", "posthyphenchar", "predisplaydirection", "predisplaypenalty", "predisplaysize", "preexhyphenchar", "prehyphenchar", "pretolerance", "prevdepth", "prevgraf", "primitive", "protected", "quitvmode", "radical", "raise", "read", "readline", "relax", "relpenalty", "right", "rightghost", "righthyphenmin", "rightmarginkern", "rightskip", "romannumeral", "rpcode", "savecatcodetable", "savinghyphcodes", "savingvdiscards", "scantextokens", "scantokens", "scriptfont", "scriptscriptfont", "scriptscriptstyle", "scriptspace", "scriptstyle", "scrollmode", "setbox", "setlanguage", "sfcode", "shipout", "show", "showbox", "showboxbreadth", "showboxdepth", "showgroups", "showifs", "showlists", "showthe", "showtokens", "skewchar", "skip", "skipdef", "spacefactor", "spaceskip", "span", "special", "splitbotmark", "splitbotmarks", "splitdiscards", "splitfirstmark", "splitfirstmarks", "splitmaxdepth", "splittopskip", "string", "suppressfontnotfounderror", "suppressifcsnameerror", "suppresslongerror", "suppressoutererror", "synctex", "tabskip", "tagcode", "textdir", "textfont", "textstyle", "the", "thickmuskip", "thinmuskip", "time", "toks", "toksdef", "tolerance", "topmark", "topmarks", "topskip", "tracingassigns", "tracingcommands", "tracinggroups", "tracingifs", "tracinglostchars", "tracingmacros", "tracingnesting", "tracingonline", "tracingoutput", "tracingpages", "tracingparagraphs", "tracingrestores", "tracingscantokens", "tracingstats", "uccode", "uchyph", "underline", "unexpanded", "unhbox", "unhcopy", "unkern", "unless", "unpenalty", "unskip", "unvbox", "unvcopy", "uppercase", "vadjust", "valign", "vbadness", "vbox", "vcenter", "vfil", "vfill", "vfilneg", "vfuzz", "voffset", "vrule", "vsize", "vskip", "vsplit", "vss", "vtop", "wd", "widowpenalties", "widowpenalty", "write", "xdef", "xleaders", "xspaceskip", "year" },
["xetex"]={ "XeTeXversion" },
} \ No newline at end of file
diff --git a/Master/texmf-dist/context/data/scite/lexers/scite-context-lexer-cld.lua b/Master/texmf-dist/context/data/scite/lexers/scite-context-lexer-cld.lua
index 4aa2901d232..1e30c18a287 100644
--- a/Master/texmf-dist/context/data/scite/lexers/scite-context-lexer-cld.lua
+++ b/Master/texmf-dist/context/data/scite/lexers/scite-context-lexer-cld.lua
@@ -7,7 +7,6 @@ local info = {
}
local lexer = lexer
-local token = lexer.token
local cldlexer = { _NAME = "cld", _FILENAME = "scite-context-lexer-cld" }
local whitespace = lexer.WHITESPACE -- maybe we need to fix this
diff --git a/Master/texmf-dist/context/data/scite/lexers/scite-context-lexer-lua-longstring.lua b/Master/texmf-dist/context/data/scite/lexers/scite-context-lexer-lua-longstring.lua
index 26bdb8dbccb..fdec301bef2 100644
--- a/Master/texmf-dist/context/data/scite/lexers/scite-context-lexer-lua-longstring.lua
+++ b/Master/texmf-dist/context/data/scite/lexers/scite-context-lexer-lua-longstring.lua
@@ -1,3 +1,11 @@
+local info = {
+ version = 1.002,
+ comment = "scintilla lpeg lexer for lua",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files",
+}
+
local lexer = lexer
local token = lexer.token
local P = lpeg.P
diff --git a/Master/texmf-dist/context/data/scite/lexers/scite-context-lexer-lua.lua b/Master/texmf-dist/context/data/scite/lexers/scite-context-lexer-lua.lua
index 0ace7afc6ff..9672e110e4f 100644
--- a/Master/texmf-dist/context/data/scite/lexers/scite-context-lexer-lua.lua
+++ b/Master/texmf-dist/context/data/scite/lexers/scite-context-lexer-lua.lua
@@ -6,6 +6,8 @@ local info = {
license = "see context related readme files",
}
+-- todo: _G.print (keep _G colored)
+
if not lexer._CONTEXTEXTENSIONS then require("scite-context-lexer") end
local lexer = lexer
@@ -34,18 +36,32 @@ local keywords = {
}
local functions = {
- 'assert', 'collectgarbage', 'dofile', 'error', 'getfenv', 'getmetatable',
- 'ipairs', 'load', 'loadfile', 'loadstring', 'module', 'next', 'pairs',
- 'pcall', 'print', 'rawequal', 'rawget', 'rawset', 'require', 'setfenv',
- 'setmetatable', 'tonumber', 'tostring', 'type', 'unpack', 'xpcall', "select",
+ 'assert', 'collectgarbage', 'dofile', 'error', 'getmetatable',
+ 'ipairs', 'load', 'loadfile', 'module', 'next', 'pairs',
+ 'pcall', 'print', 'rawequal', 'rawget', 'rawset', 'require',
+ 'setmetatable', 'tonumber', 'tostring', 'type', 'unpack', 'xpcall', 'select',
+
+ "string", "table", "coroutine", "debug", "file", "io", "lpeg", "math", "os", "package", "bit32",
}
local constants = {
- '_G', '_VERSION', '_M', "...",
+ '_G', '_VERSION', '_M', '...', '_ENV',
+ -- here too
+ '__add', '__call', '__concat', '__div', '__eq', '__gc', '__index',
+ '__le', '__lt', '__metatable', '__mode', '__mul', '__newindex',
+ '__pow', '__sub', '__tostring', '__unm',
+}
+
+local internals = { -- __
+ 'add', 'call', 'concat', 'div', 'eq', 'gc', 'index',
+ 'le', 'lt', 'metatable', 'mode', 'mul', 'newindex',
+ 'pow', 'sub', 'tostring', 'unm',
}
local depricated = {
"arg", "arg.n",
+ "loadstring", "setfenv", "getfenv",
+ "pack",
}
local csnames = { -- todo: option
@@ -147,7 +163,7 @@ local operator = token("special", S('+-*/%^#=<>;:,{}[]().') + P('~=') ) --
local structure = token("special", S('{}[]()'))
local optionalspace = spacing^0
-local hasargument = #S("{(")
+local hasargument = #S("{([")
local gotokeyword = token("keyword", P("goto"))
* spacing
@@ -156,14 +172,28 @@ local gotolabel = token("keyword", P("::"))
* token("grouping",validword)
* token("keyword", P("::"))
-local keyword = token("keyword", exact_match(keywords ))
-local builtin = token("plain", exact_match(functions))
-local constant = token("data", exact_match(constants))
-local csname = token("user", exact_match(csnames ))
+local p_keywords = exact_match(keywords)
+local p_functions = exact_match(functions)
+local p_constants = exact_match(constants)
+local p_internals = P("__")
+ * exact_match(internals)
+local p_csnames = exact_match(csnames)
+
+local keyword = token("keyword", p_keywords)
+local builtin = token("plain", p_functions)
+local constant = token("data", p_constants)
+local internal = token("data", p_internals)
+local csname = token("user", p_csnames)
* (
optionalspace * hasargument
- + ( optionalspace * token("special", P(".")) * optionalspace * token("user", validword) )^1
+ + ( optionalspace * token("special", S(".:")) * optionalspace * token("user", validword) )^1
)
+local identifier = token("default", validword)
+ * ( optionalspace * token("special", S(".:")) * optionalspace * (
+ token("warning", p_keywords) +
+ token("data", p_internals) +
+ token("default", validword )
+ ) )^0
lualexer._rules = {
{ 'whitespace', spacing },
@@ -178,36 +208,83 @@ lualexer._rules = {
{ 'number', number },
{ 'longcomment', longcomment },
{ 'shortcomment', shortcomment },
--- { 'number', number },
{ 'label', gotolabel },
{ 'operator', operator },
{ 'rest', rest },
}
+-- -- experiment
+--
+-- local idtoken = R("az","AZ","__")
+--
+-- function context.one_of_match(specification)
+-- local pattern = idtoken -- the concat catches _ etc
+-- local list = { }
+-- for i=1,#specification do
+-- local style = specification[i][1]
+-- local words = specification[i][2]
+-- pattern = pattern + S(table.concat(words))
+-- for i=1,#words do
+-- list[words[i]] = style
+-- end
+-- end
+-- return Cmt(pattern^1, function(_,i,s)
+-- local style = list[s]
+-- if style then
+-- return true, { style, i } -- and i or nil
+-- else
+-- -- fail
+-- end
+-- end)
+-- end
+--
+-- local whatever = context.one_of_match {
+-- { "keyword", keywords }, -- keyword
+-- { "plain", functions }, -- builtin
+-- { "data", constants }, -- constant
+-- }
+--
+-- lualexer._rules = {
+-- { 'whitespace', spacing },
+-- { 'whatever', whatever },
+-- { 'csname', csname },
+-- { 'goto', gotokeyword },
+-- { 'identifier', identifier },
+-- { 'string', string },
+-- { 'number', number },
+-- { 'longcomment', longcomment },
+-- { 'shortcomment', shortcomment },
+-- { 'label', gotolabel },
+-- { 'operator', operator },
+-- { 'rest', rest },
+-- }
+
lualexer._tokenstyles = context.styleset
+-- lualexer._foldpattern = R("az")^2 + S("{}[]") -- separate entry else interference
+
+lualexer._foldpattern = (P("end") + P("if") + P("do") + P("function") + P("repeat") + P("until")) * P(#(1 - R("az")))
+ + S("{}[]")
+
lualexer._foldsymbols = {
_patterns = {
- -- '%l+', -- costly
- -- '%l%l+',
'[a-z][a-z]+',
- -- '[%({%)}%[%]]',
'[{}%[%]]',
},
['keyword'] = { -- challenge: if=0 then=1 else=-1 elseif=-1
- ['if'] = 1,
- ['end'] = -1,
- ['do'] = 1,
- ['function'] = 1,
- ['repeat'] = 1,
+ ['if'] = 1, -- if .. [then|else] .. end
+ ['do'] = 1, -- [while] do .. end
+ ['function'] = 1, -- function .. end
+ ['repeat'] = 1, -- repeat .. until
['until'] = -1,
+ ['end'] = -1,
},
['comment'] = {
['['] = 1, [']'] = -1,
},
- ['quote'] = { -- to be tested
- ['['] = 1, [']'] = -1,
- },
+ -- ['quote'] = { -- confusing
+ -- ['['] = 1, [']'] = -1,
+ -- },
['special'] = {
-- ['('] = 1, [')'] = -1,
['{'] = 1, ['}'] = -1,
diff --git a/Master/texmf-dist/context/data/scite/lexers/scite-context-lexer-mps.lua b/Master/texmf-dist/context/data/scite/lexers/scite-context-lexer-mps.lua
index 1a2a2571d4c..96c5e9c3cb9 100644
--- a/Master/texmf-dist/context/data/scite/lexers/scite-context-lexer-mps.lua
+++ b/Master/texmf-dist/context/data/scite/lexers/scite-context-lexer-mps.lua
@@ -70,6 +70,9 @@ local any = lexer.any
local dquote = P('"')
local cstoken = R("az","AZ") + P("_")
+local mptoken = R("az","AZ")
+local leftbrace = P("{")
+local rightbrace = P("}")
local number = context.patterns.real
local cstokentex = R("az","AZ","\127\255") + S("@!?_")
@@ -97,6 +100,14 @@ local special = token('special', S("#()[]{}<>=:\"")) -- or else := <> etc s
local texlike = token('warning', P("\\") * cstokentex^1)
local extra = token('extra', S("`~%^&_-+*/\'|\\"))
+local nested = P { leftbrace * (V(1) + (1-rightbrace))^0 * rightbrace }
+local texlike = token('embedded', P("\\") * (P("MP") + P("mp")) * mptoken^1)
+ * spacing^0
+ * token('grouping', leftbrace)
+ * token('rest', (nested + (1-rightbrace))^0 )
+ * token('grouping', rightbrace)
+ + token('warning', P("\\") * cstokentex^1)
+
metafunlexer._rules = {
{ 'whitespace', spacing },
{ 'comment', comment },
@@ -118,9 +129,11 @@ metafunlexer._rules = {
metafunlexer._tokenstyles = context.styleset
+metafunlexer._foldpattern = R("az")^2 -- separate entry else interference
+
metafunlexer._foldsymbols = {
_patterns = {
- "%l+",
+ '[a-z][a-z]+',
},
["primitive"] = {
["beginfig"] = 1,
diff --git a/Master/texmf-dist/context/data/scite/lexers/scite-context-lexer-tex.lua b/Master/texmf-dist/context/data/scite/lexers/scite-context-lexer-tex.lua
index 1ff68750fb2..e39ca2baaf2 100644
--- a/Master/texmf-dist/context/data/scite/lexers/scite-context-lexer-tex.lua
+++ b/Master/texmf-dist/context/data/scite/lexers/scite-context-lexer-tex.lua
@@ -59,6 +59,8 @@ local constants = { }
do -- todo: only once, store in global
+ -- commands helpers primitives
+
local definitions = context.loaddefinitions("scite-context-data-interfaces")
if definitions then
@@ -80,10 +82,17 @@ do -- todo: only once, store in global
end
local definitions = context.loaddefinitions("scite-context-data-context")
+ local overloaded = { }
if definitions then
helpers = definitions.helpers or { }
constants = definitions.constants or { }
+ for i=1,#helpers do
+ overloaded[helpers[i]] = true
+ end
+ for i=1,#constants do
+ overloaded[constants[i]] = true
+ end
end
local definitions = context.loaddefinitions("scite-context-data-tex")
@@ -92,20 +101,25 @@ do -- todo: only once, store in global
local function add(data,normal)
for k, v in next, data do
if v ~= "/" and v ~= "-" then
- primitives[#primitives+1] = v
+ if not overloaded[v] then
+ primitives[#primitives+1] = v
+ end
if normal then
- primitives[#primitives+1] = "normal" .. v
+ v = "normal" .. v
+ if not overloaded[v] then
+ primitives[#primitives+1] = v
+ end
end
end
end
end
add(definitions.tex,true)
- add(definitions.etex)
- add(definitions.pdftex)
- add(definitions.aleph)
- add(definitions.omega)
- add(definitions.luatex)
- add(definitions.xetex)
+ add(definitions.etex,true)
+ add(definitions.pdftex,true)
+ add(definitions.aleph,true)
+ add(definitions.omega,true)
+ add(definitions.luatex,true)
+ add(definitions.xetex,true)
end
end
@@ -128,12 +142,13 @@ local checkedword = context.checkedword
local styleofword = context.styleofword
local setwordlist = context.setwordlist
local validwords = false
+local validminimum = 3
-- % language=uk
local knownpreamble = Cmt(#P("% "), function(input,i,_) -- todo : utfbomb
if i < 10 then
- validwords = false
+ validwords, validminimum = false, 3
local s, e, word = find(input,'^(.+)[\n\r]',i) -- combine with match
if word then
local interface = match(word,"interface=([a-z]+)")
@@ -141,7 +156,7 @@ local knownpreamble = Cmt(#P("% "), function(input,i,_) -- todo : utfbomb
currentcommands = commands[interface] or commands.en or { }
end
local language = match(word,"language=([a-z]+)")
- validwords = language and setwordlist(language)
+ validwords, validminimum = setwordlist(language)
end
end
return false
@@ -224,15 +239,17 @@ local p_unit = P("pt") + P("bp") + P("sp") + P("mm") + P("cm") +
--
-- local p_word = Cmt(iwordpattern, function(_,i,s)
-- if validwords then
--- return checkedword(validwords,s,i)
+-- return checkedword(validwords,validminimum,s,i)
-- else
--- return true, { "text", i }
+-- -- return true, { "text", i }
+-- return true, "text", i
-- end
-- end)
--
-- So we use this one instead:
-local p_word = Ct( iwordpattern / function(s) return styleofword(validwords,s) end * Cp() ) -- the function can be inlined
+----- p_word = Ct( iwordpattern / function(s) return styleofword(validwords,validminimum,s) end * Cp() ) -- the function can be inlined
+local p_word = iwordpattern / function(s) return styleofword(validwords,validminimum,s) end * Cp() -- the function can be inlined
----- p_text = (1 - p_grouping - p_special - p_extra - backslash - space + hspace)^1
@@ -343,7 +360,7 @@ local function stopinlinelua_b(_,i,s) -- {
if luastatus == "display" then
return false
elseif luastatus == "inline" then
- lualevel = lualevel + 1
+ lualevel = lualevel + 1 -- ?
return false
else
return true
@@ -372,15 +389,17 @@ contextlexer._reset_parser = function()
lualevel = 0
end
-local luaenvironment = P("luacode")
+local luaenvironment = P("lua") * (P("setups") + P("code") + P(true))
local inlinelua = P("\\") * (
P("ctx") * ( P("lua") + P("command") + P("late") * (P("lua") + P("command")) )
+ P("cld") * ( P("command") + P("context") )
+ + P("luaexpr")
+ + (P("direct") + P("late")) * P("lua")
)
local startlua = P("\\start") * Cmt(luaenvironment,startdisplaylua)
- + inlinelua * space^0 * Cmt(P("{"),startinlinelua)
+ + inlinelua * space^0 * ( Cmt(P("{"),startinlinelua) )
local stoplua = P("\\stop") * Cmt(luaenvironment,stopdisplaylua)
+ Cmt(P("{"),stopinlinelua_b)
@@ -389,7 +408,8 @@ local stoplua = P("\\stop") * Cmt(luaenvironment,stopdisplaylua)
local startluacode = token("embedded", startlua)
local stopluacode = #stoplua * token("embedded", stoplua)
-local metafuncall = ( P("reusable") + P("usable") + P("unique") + P("use") ) * ("MPgraphic")
+local metafuncall = ( P("reusable") + P("usable") + P("unique") + P("use") + P("reuse") ) * ("MPgraphic")
+ + P("uniqueMPpagegraphic")
local metafunenvironment = metafuncall -- ( P("use") + P("reusable") + P("unique") ) * ("MPgraphic")
+ P("MP") * ( P("code")+ P("page") + P("inclusions") + P("initializations") + P("definitions") + P("extensions") + P("graphic") )
@@ -443,22 +463,33 @@ contextlexer._tokenstyles = context.styleset
-- contextlexer._tokenstyles[#contextlexer._tokenstyles + 1] = { cldlexer._NAME..'_whitespace', lexer.style_whitespace }
-- contextlexer._tokenstyles[#contextlexer._tokenstyles + 1] = { mpslexer._NAME..'_whitespace', lexer.style_whitespace }
+local environment = {
+ ["\\start"] = 1, ["\\stop"] = -1,
+ -- ["\\begin"] = 1, ["\\end" ] = -1,
+}
+
+-- local block = {
+-- ["\\begin"] = 1, ["\\end" ] = -1,
+-- }
-local folds = {
- ["\\start"] = 1, ["\\stop" ] = -1,
- ["\\begin"] = 1, ["\\end" ] = -1,
+local group = {
+ ["{"] = 1, ["}"] = -1,
}
-contextlexer._foldsymbols = {
+contextlexer._foldpattern = P("\\" ) * (P("start") + P("stop")) + S("{}") -- separate entry else interference
+
+contextlexer._foldsymbols = { -- these need to be style references
_patterns = {
"\\start", "\\stop", -- regular environments
- "\\begin", "\\end", -- (moveable) blocks
+ -- "\\begin", "\\end", -- (moveable) blocks
+ "[{}]",
},
- ["helper"] = folds,
- ["data"] = folds,
- ["command"] = folds,
- ["user"] = folds, -- csname
- ["grouping"] = folds,
+ ["command"] = environment,
+ ["constant"] = environment,
+ ["data"] = environment,
+ ["user"] = environment,
+ ["embedded"] = environment,
+ ["grouping"] = group,
}
return contextlexer
diff --git a/Master/texmf-dist/context/data/scite/lexers/scite-context-lexer-txt.lua b/Master/texmf-dist/context/data/scite/lexers/scite-context-lexer-txt.lua
index 012167aeb4c..fe062fb94e5 100644
--- a/Master/texmf-dist/context/data/scite/lexers/scite-context-lexer-txt.lua
+++ b/Master/texmf-dist/context/data/scite/lexers/scite-context-lexer-txt.lua
@@ -13,19 +13,20 @@ local token = lexer.token
local P, S, Cmt, Cp, Ct = lpeg.P, lpeg.S, lpeg.Cmt, lpeg.Cp, lpeg.Ct
local find, match = string.find, string.match
-local textlexer = { _NAME = "txt", _FILENAME = "scite-context-lexer-txt" }
-local whitespace = lexer.WHITESPACE
-local context = lexer.context
+local textlexer = { _NAME = "txt", _FILENAME = "scite-context-lexer-txt" }
+local whitespace = lexer.WHITESPACE
+local context = lexer.context
-local space = lexer.space
-local any = lexer.any
+local space = lexer.space
+local any = lexer.any
-local wordtoken = context.patterns.wordtoken
-local wordpattern = context.patterns.wordpattern
-local checkedword = context.checkedword
-local styleofword = context.styleofword
-local setwordlist = context.setwordlist
-local validwords = false
+local wordtoken = context.patterns.wordtoken
+local wordpattern = context.patterns.wordpattern
+local checkedword = context.checkedword
+local styleofword = context.styleofword
+local setwordlist = context.setwordlist
+local validwords = false
+local validminimum = 3
-- local styleset = context.newstyleset {
-- "default",
@@ -37,12 +38,12 @@ local validwords = false
local p_preamble = Cmt(#(S("#!-%") * P(" ")), function(input,i,_) -- todo: utf bomb
if i == 1 then -- < 10 then
- validwords = false
+ validwords, validminimum = false, 3
local s, e, line = find(input,'^[#!%-%%](.+)[\n\r]',i)
if line then
local language = match(line,"language=([a-z]+)")
if language then
- validwords = setwordlist(language)
+ validwords, validminimum = setwordlist(language)
end
end
end
@@ -52,17 +53,9 @@ end)
local t_preamble =
token("preamble", p_preamble)
--- local t_word =
--- Cmt(wordpattern, function(_,i,s)
--- if validwords then
--- return checkedword(validwords,s,i)
--- else
--- return true, { "text", i }
--- end
--- end)
-
local t_word =
- Ct( wordpattern / function(s) return styleofword(validwords,s) end * Cp() ) -- the function can be inlined
+-- Ct( wordpattern / function(s) return styleofword(validwords,validminimum,s) end * Cp() ) -- the function can be inlined
+ wordpattern / function(s) return styleofword(validwords,validminimum,s) end * Cp() -- the function can be inlined
local t_text =
token("default", wordtoken^1)
@@ -81,6 +74,7 @@ textlexer._rules = {
{ "rest", t_rest },
}
+textlexer._LEXBYLINE = true -- new (needs testing, not yet as the system changed in 3.24)
textlexer._tokenstyles = context.styleset
return textlexer
diff --git a/Master/texmf-dist/context/data/scite/lexers/scite-context-lexer-web.lua b/Master/texmf-dist/context/data/scite/lexers/scite-context-lexer-web.lua
new file mode 100644
index 00000000000..f59a3205dd8
--- /dev/null
+++ b/Master/texmf-dist/context/data/scite/lexers/scite-context-lexer-web.lua
@@ -0,0 +1,155 @@
+local info = {
+ version = 1.002,
+ comment = "scintilla lpeg lexer for w",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files",
+}
+
+-- this will be extended
+
+if not lexer._CONTEXTEXTENSIONS then require("scite-context-lexer") end
+
+local lexer = lexer
+local token, style, colors, exact_match, no_style = lexer.token, lexer.style, lexer.colors, lexer.exact_match, lexer.style_nothing
+local P, R, S, C, Cg, Cb, Cs, Cmt, lpegmatch = lpeg.P, lpeg.R, lpeg.S, lpeg.C, lpeg.Cg, lpeg.Cb, lpeg.Cs, lpeg.Cmt, lpeg.match
+local setmetatable = setmetatable
+
+local weblexer = { _NAME = "web", _FILENAME = "scite-context-lexer-web" }
+local whitespace = lexer.WHITESPACE
+local context = lexer.context
+
+local keywords = { -- copied from cpp.lua
+ -- c
+ 'asm', 'auto', 'break', 'case', 'const', 'continue', 'default', 'do', 'else',
+ 'extern', 'false', 'for', 'goto', 'if', 'inline', 'register', 'return',
+ 'sizeof', 'static', 'switch', 'true', 'typedef', 'volatile', 'while',
+ 'restrict',
+ -- hm
+ '_Bool', '_Complex', '_Pragma', '_Imaginary',
+ -- c++.
+ 'catch', 'class', 'const_cast', 'delete', 'dynamic_cast', 'explicit',
+ 'export', 'friend', 'mutable', 'namespace', 'new', 'operator', 'private',
+ 'protected', 'public', 'signals', 'slots', 'reinterpret_cast',
+ 'static_assert', 'static_cast', 'template', 'this', 'throw', 'try', 'typeid',
+ 'typename', 'using', 'virtual'
+}
+
+local datatypes = { -- copied from cpp.lua
+ 'bool', 'char', 'double', 'enum', 'float', 'int', 'long', 'short', 'signed',
+ 'struct', 'union', 'unsigned', 'void'
+}
+
+local macros = { -- copied from cpp.lua
+ 'define', 'elif', 'else', 'endif', 'error', 'if', 'ifdef', 'ifndef', 'import',
+ 'include', 'line', 'pragma', 'undef', 'using', 'warning'
+}
+
+local space = lexer.space -- S(" \n\r\t\f\v")
+local any = lexer.any
+local patterns = context.patterns
+local restofline = patterns.restofline
+local startofline = patterns.startofline
+
+local squote = P("'")
+local dquote = P('"')
+local escaped = P("\\") * P(1)
+local slashes = P('//')
+local begincomment = P("/*")
+local endcomment = P("*/")
+local percent = P("%")
+
+local spacing = token(whitespace, space^1)
+local rest = token("default", any)
+
+local shortcomment = token("comment", slashes * restofline^0)
+local longcomment = token("comment", begincomment * (1-endcomment)^0 * endcomment^-1)
+local texcomment = token("comment", percent * restofline^0)
+
+local shortstring = token("quote", dquote) -- can be shared
+ * token("string", (escaped + (1-dquote))^0)
+ * token("quote", dquote)
+ + token("quote", squote)
+ * token("string", (escaped + (1-squote))^0)
+ * token("quote", squote)
+
+local integer = P("-")^-1 * (lexer.hex_num + lexer.dec_num)
+local number = token("number", lexer.float + integer)
+
+local validword = R("AZ","az","__") * R("AZ","az","__","09")^0
+
+local identifier = token("default",validword)
+
+local operator = token("special", S('+-*/%^!=<>;:{}[]().&|?~'))
+
+----- optionalspace = spacing^0
+
+local p_keywords = exact_match(keywords )
+local p_datatypes = exact_match(datatypes)
+local p_macros = exact_match(macros)
+
+local keyword = token("keyword", p_keywords)
+local datatype = token("keyword", p_datatypes)
+local identifier = token("default", validword)
+
+local macro = token("data", #P('#') * startofline * P('#') * S('\t ')^0 * p_macros)
+
+local beginweb = P("@")
+local endweb = P("@c")
+
+local webcomment = token("comment", #beginweb * startofline * beginweb * (1-endweb)^0 * endweb)
+
+local texlexer = lexer.load('scite-context-lexer-tex')
+
+lexer.embed_lexer(weblexer, texlexer, #beginweb * startofline * token("comment",beginweb), token("comment",endweb))
+
+weblexer._rules = {
+ { 'whitespace', spacing },
+ { 'keyword', keyword },
+ { 'type', datatype },
+ { 'identifier', identifier },
+ { 'string', shortstring },
+ -- { 'webcomment', webcomment },
+ { 'texcomment', texcomment },
+ { 'longcomment', longcomment },
+ { 'shortcomment', shortcomment },
+ { 'number', number },
+ { 'macro', macro },
+ { 'operator', operator },
+ { 'rest', rest },
+}
+
+weblexer._tokenstyles = context.styleset
+
+weblexer._foldpattern = P("/*") + P("*/") + S("{}") -- separate entry else interference
+
+weblexer._foldsymbols = {
+ _patterns = {
+ '[{}]',
+ '/%*',
+ '%*/',
+ },
+ -- ["data"] = { -- macro
+ -- ['region'] = 1,
+ -- ['endregion'] = -1,
+ -- ['if'] = 1,
+ -- ['ifdef'] = 1,
+ -- ['ifndef'] = 1,
+ -- ['endif'] = -1,
+ -- },
+ ["special"] = { -- operator
+ ['{'] = 1,
+ ['}'] = -1,
+ },
+ ["comment"] = {
+ ['/*'] = 1,
+ ['*/'] = -1,
+ }
+}
+
+-- -- by indentation:
+--
+weblexer._foldpatterns = nil
+weblexer._foldsymbols = nil
+
+return weblexer
diff --git a/Master/texmf-dist/context/data/scite/lexers/scite-context-lexer-xml-comment.lua b/Master/texmf-dist/context/data/scite/lexers/scite-context-lexer-xml-comment.lua
index eab3b2a61c8..104310f9417 100644
--- a/Master/texmf-dist/context/data/scite/lexers/scite-context-lexer-xml-comment.lua
+++ b/Master/texmf-dist/context/data/scite/lexers/scite-context-lexer-xml-comment.lua
@@ -27,12 +27,15 @@ xmlcommentlexer._rules = {
xmlcommentlexer._tokenstyles = context.styleset
+xmlcommentlexer._foldpattern = P("<!--") + P("-->")
+
xmlcommentlexer._foldsymbols = {
_patterns = {
"<%!%-%-", "%-%->", -- comments
},
["comment"] = {
- ["<!--"] = 1, ["-->" ] = -1,
+ ["<!--"] = 1,
+ ["-->" ] = -1,
}
}
diff --git a/Master/texmf-dist/context/data/scite/lexers/scite-context-lexer-xml-script.lua b/Master/texmf-dist/context/data/scite/lexers/scite-context-lexer-xml-script.lua
new file mode 100644
index 00000000000..fd1aae7f70c
--- /dev/null
+++ b/Master/texmf-dist/context/data/scite/lexers/scite-context-lexer-xml-script.lua
@@ -0,0 +1,30 @@
+local info = {
+ version = 1.002,
+ comment = "scintilla lpeg lexer for xml cdata",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files",
+}
+
+local lexer = lexer
+local token = lexer.token
+local P = lpeg.P
+
+local xmlscriptlexer = { _NAME = "xml-script", _FILENAME = "scite-context-lexer-xml-script" }
+local whitespace = lexer.WHITESPACE -- triggers states
+local context = lexer.context
+
+local space = lexer.space
+local nospace = 1 - space - (P("</") * P("script") + P("SCRIPT")) * P(">")
+
+local p_spaces = token(whitespace, space ^1)
+local p_cdata = token("default", nospace^1)
+
+xmlscriptlexer._rules = {
+ { "whitespace", p_spaces },
+ { "script", p_cdata },
+}
+
+xmlscriptlexer._tokenstyles = context.styleset
+
+return xmlscriptlexer
diff --git a/Master/texmf-dist/context/data/scite/lexers/scite-context-lexer-xml.lua b/Master/texmf-dist/context/data/scite/lexers/scite-context-lexer-xml.lua
index 34636127fc7..241e2259123 100644
--- a/Master/texmf-dist/context/data/scite/lexers/scite-context-lexer-xml.lua
+++ b/Master/texmf-dist/context/data/scite/lexers/scite-context-lexer-xml.lua
@@ -1,6 +1,6 @@
local info = {
version = 1.002,
- comment = "scintilla lpeg lexer for metafun",
+ comment = "scintilla lpeg lexer for xml",
author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
copyright = "PRAGMA ADE / ConTeXt Development Team",
license = "see context related readme files",
@@ -27,6 +27,8 @@ local context = lexer.context
local xmlcommentlexer = lexer.load("scite-context-lexer-xml-comment") -- indirect (some issue with the lexer framework)
local xmlcdatalexer = lexer.load("scite-context-lexer-xml-cdata") -- indirect (some issue with the lexer framework)
+local xmlscriptlexer = lexer.load("scite-context-lexer-xml-script") -- indirect (some issue with the lexer framework)
+local lualexer = lexer.load("scite-context-lexer-lua") --
local space = lexer.space -- S(" \t\n\r\v\f")
local any = lexer.any -- P(1)
@@ -51,6 +53,11 @@ local opencdata = P("<![CDATA[")
local closecdata = P("]]>")
local opendoctype = P("<!DOCTYPE") -- could grab the whole doctype
local closedoctype = P("]>") + P(">")
+local openscript = openbegin * (P("script") + P("SCRIPT")) * (1-closeend)^0 * closeend -- begin
+local closescript = openend * (P("script") + P("SCRIPT")) * closeend
+
+local openlua = "<?lua"
+local closelua = "?>"
-- <!DOCTYPE Something PUBLIC "... ..." "..." [ ... ] >
-- <!DOCTYPE Something PUBLIC "... ..." "..." >
@@ -71,7 +78,7 @@ local checkedword = context.checkedword
local styleofword = context.styleofword
local setwordlist = context.setwordlist
local validwords = false
-
+local validminimum = 3
-- <?xml version="1.0" encoding="UTF-8" language="uk" ?>
--
@@ -79,29 +86,21 @@ local validwords = false
local p_preamble = Cmt(#P("<?xml "), function(input,i,_) -- todo: utf bomb
if i < 200 then
- validwords = false
+ validwords, validminimum = false, 3
local language = match(input,"^<%?xml[^>]*%?>%s*<%?context%-directive%s+editor%s+language%s+(..)%s+%?>")
-- if not language then
-- language = match(input,'^<%?xml[^>]*language=[\"\'](..)[\"\'][^>]*%?>',i)
-- end
if language then
- validwords = setwordlist(language)
+ validwords, validminimum = setwordlist(language)
end
end
return false
end)
--- local p_word =
--- Cmt(iwordpattern, function(_,i,s)
--- if validwords then
--- return checkedword(validwords,s,i)
--- else
--- return true, { "text", i } -- or default
--- end
--- end)
-
local p_word =
- Ct( iwordpattern / function(s) return styleofword(validwords,s) end * Cp() ) -- the function can be inlined
+-- Ct( iwordpattern / function(s) return styleofword(validwords,validminimum,s) end * Cp() ) -- the function can be inlined
+ iwordpattern / function(s) return styleofword(validwords,validminimum,s) end * Cp() -- the function can be inlined
local p_rest =
token("default", any)
@@ -227,8 +226,10 @@ local p_doctype = token("command",P("<!DOCTYPE"))
* p_optionalwhitespace
* token("command",P(">"))
-lexer.embed_lexer(xmllexer, xmlcommentlexer, token("command",opencomment), token("command",closecomment))
-lexer.embed_lexer(xmllexer, xmlcdatalexer, token("command",opencdata), token("command",closecdata))
+lexer.embed_lexer(xmllexer, lualexer, token("command", openlua), token("command", closelua))
+lexer.embed_lexer(xmllexer, xmlcommentlexer, token("command", opencomment), token("command", closecomment))
+lexer.embed_lexer(xmllexer, xmlcdatalexer, token("command", opencdata), token("command", closecdata))
+lexer.embed_lexer(xmllexer, xmlscriptlexer, token("command", openscript), token("command", closescript))
-- local p_name =
-- token("plain",name)
@@ -308,9 +309,9 @@ xmllexer._rules = {
{ "whitespace", p_spacing },
{ "preamble", p_preamble },
{ "word", p_word },
--- { "text", p_text },
--- { "comment", p_comment },
--- { "cdata", p_cdata },
+ -- { "text", p_text },
+ -- { "comment", p_comment },
+ -- { "cdata", p_cdata },
{ "doctype", p_doctype },
{ "instruction", p_instruction },
{ "close", p_close },
@@ -322,12 +323,18 @@ xmllexer._rules = {
xmllexer._tokenstyles = context.styleset
+xmllexer._foldpattern = P("</") + P("<") + P("/>") -- separate entry else interference
+
xmllexer._foldsymbols = { -- somehow doesn't work yet
_patterns = {
- "[<>]",
+ "</",
+ "/>",
+ "<",
},
["keyword"] = {
- ["<"] = 1, [">"] = -1,
+ ["</"] = -1,
+ ["/>"] = -1,
+ ["<"] = 1,
},
}
diff --git a/Master/texmf-dist/context/data/scite/lexers/scite-context-lexer.lua b/Master/texmf-dist/context/data/scite/lexers/scite-context-lexer.lua
index 2db37e26b41..7883177b41e 100644
--- a/Master/texmf-dist/context/data/scite/lexers/scite-context-lexer.lua
+++ b/Master/texmf-dist/context/data/scite/lexers/scite-context-lexer.lua
@@ -1,5 +1,5 @@
local info = {
- version = 1.002,
+ version = 1.324,
comment = "basics for scintilla lpeg lexer for context/metafun",
author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
copyright = "PRAGMA ADE / ConTeXt Development Team",
@@ -8,9 +8,26 @@ local info = {
}
+-- todo: move all code here
+-- todo: explore adapted dll ... properties + init
+
-- The fold and lex functions are copied and patched from original code by Mitchell (see
-- lexer.lua). All errors are mine.
--
+-- Starting with SciTE version 3.20 there is an issue with coloring. As we still lack
+-- a connection with scite itself (properties as well as printing to the log pane) we
+-- cannot trace this (on windows). As far as I can see, there are no fundamental
+-- changes in lexer.lua or LexLPeg.cxx so it must be in scintilla itself. So for the
+-- moment I stick to 3.10. Indicators are: no lexing of 'next' and 'goto <label>' in the
+-- Lua lexer and no brace highlighting either. Interesting is that it does work ok in
+-- the cld lexer (so the Lua code is okay). Also the fact that char-def.lua lexes fast
+-- is a signal that the lexer quits somewhere halfway.
+--
+-- After checking 3.24 and adapting to the new lexer tables things are okay again. So,
+-- this version assumes 3.24 or higher. In 3.24 we have a different token result, i.e. no
+-- longer a { tag, pattern } but just two return values. I didn't check other changes but
+-- will do that when I run into issues.
+--
-- I've considered making a whole copy and patch the other functions too as we need
-- an extra nesting model. However, I don't want to maintain too much. An unfortunate
-- change in 3.03 is that no longer a script can be specified. This means that instead
@@ -29,7 +46,8 @@ local info = {
-- and the cursor is at the last line of a 200K line file. Optimizing the fold function
-- brought down loading of char-def.lua from 14 sec => 8 sec. Replacing the word_match
-- function and optimizing the lex function gained another 2+ seconds. A 6 second load
--- is quite ok for me.
+-- is quite ok for me. The changed lexer table structure (no subtables) brings loading
+-- down to a few seconds.
--
-- When the lexer path is copied to the textadept lexer path, and the theme definition to
-- theme path (as lexer.lua), the lexer works there as well. When I have time and motive
@@ -52,7 +70,7 @@ local info = {
--
-- Eventually it might be safer to copy the other methods from lexer.lua here as well so
-- that we have no dependencies, apart from the c library (for which at some point the api
--- will be stable I guess).
+-- will be stable I hope).
--
-- It's a pitty that there is no scintillua library for the OSX version of scite. Even
-- better would be to have the scintillua library as integral part of scite as that way I
@@ -64,7 +82,7 @@ local info = {
local lpeg = require 'lpeg'
-local R, P, S, C, V, Cp, Cs, Ct, Cmt, Cc, Cf, Cg = lpeg.R, lpeg.P, lpeg.S, lpeg.C, lpeg.V, lpeg.Cp, lpeg.Cs, lpeg.Ct, lpeg.Cmt, lpeg.Cc, lpeg.Cf, lpeg.Cg
+local R, P, S, C, V, Cp, Cs, Ct, Cmt, Cc, Cf, Cg, Carg = lpeg.R, lpeg.P, lpeg.S, lpeg.C, lpeg.V, lpeg.Cp, lpeg.Cs, lpeg.Ct, lpeg.Cmt, lpeg.Cc, lpeg.Cf, lpeg.Cg, lpeg.Carg
local lpegmatch = lpeg.match
local find, gmatch, match, lower, upper, gsub = string.find, string.gmatch, string.match, string.lower, string.upper, string.gsub
local concat = table.concat
@@ -173,6 +191,12 @@ patterns.spacing = space^1
patterns.nospacing = (1-space)^1
patterns.anything = P(1)
+local endof = S("\n\r\f")
+
+patterns.startofline = P(function(input,index)
+ return (index == 1 or lpegmatch(endof,input,index-1)) and index
+end)
+
function context.exact_match(words,word_chars,case_insensitive)
local characters = concat(words)
local pattern -- the concat catches _ etc
@@ -210,8 +234,11 @@ function context.exact_match(words,word_chars,case_insensitive)
end
-- spell checking (we can only load lua files)
-
+--
-- return {
+-- min = 3,
+-- max = 40,
+-- n = 12345,
-- words = {
-- ["someword"] = "someword",
-- ["anotherword"] = "Anotherword",
@@ -220,66 +247,81 @@ end
local lists = { }
-local splitter = (Cf(Ct("") * (Cg(C((1-S(" \t\n\r"))^1 * Cc(true))) + P(1))^1,rawset) )^0
-local splitter = (Cf(Ct("") * (Cg(C(R("az","AZ","\127\255")^1) * Cc(true)) + P(1))^1,rawset) )^0
-
-local function splitwords(words)
- return lpegmatch(splitter,words)
-end
-
function context.setwordlist(tag,limit) -- returns hash (lowercase keys and original values)
if not tag or tag == "" then
- return false
- elseif lists[tag] ~= nil then
- return lists[tag]
- else
- local list = context.loaddefinitions("spell-" .. tag)
+ return false, 3
+ end
+ local list = lists[tag]
+ if not list then
+ list = context.loaddefinitions("spell-" .. tag)
if not list or type(list) ~= "table" then
- lists[tag] = false
- return false
- elseif type(list.words) == "string" then
- list = splitwords(list.words) or false
- lists[tag] = list
- return list
+ list = { words = false, min = 3 }
else
- list = list.words or false
- lists[tag] = list
- return list
+ list.words = list.words or false
+ list.min = list.min or 3
end
+ lists[tag] = list
end
+ return list.words, list.min
end
patterns.wordtoken = R("az","AZ","\127\255")
patterns.wordpattern = patterns.wordtoken^3 -- todo: if limit and #s < limit then
-function context.checkedword(validwords,s,i) -- ,limit
- if not validwords then
- return true, { "text", i }
--- return true, { "default", i }
+-- -- pre 3.24:
+--
+-- function context.checkedword(validwords,validminimum,s,i) -- ,limit
+-- if not validwords then -- or #s < validminimum then
+-- return true, { "text", i } -- { "default", i }
+-- else
+-- -- keys are lower
+-- local word = validwords[s]
+-- if word == s then
+-- return true, { "okay", i } -- exact match
+-- elseif word then
+-- return true, { "warning", i } -- case issue
+-- else
+-- local word = validwords[lower(s)]
+-- if word == s then
+-- return true, { "okay", i } -- exact match
+-- elseif word then
+-- return true, { "warning", i } -- case issue
+-- elseif upper(s) == s then
+-- return true, { "warning", i } -- probably a logo or acronym
+-- else
+-- return true, { "error", i }
+-- end
+-- end
+-- end
+-- end
+
+function context.checkedword(validwords,validminimum,s,i) -- ,limit
+ if not validwords then -- or #s < validminimum then
+ return true, "text", i -- { "default", i }
else
-- keys are lower
local word = validwords[s]
if word == s then
- return true, { "okay", i } -- exact match
+ return true, "okay", i -- exact match
elseif word then
- return true, { "warning", i } -- case issue
+ return true, "warning", i -- case issue
else
local word = validwords[lower(s)]
if word == s then
- return true, { "okay", i } -- exact match
+ return true, "okay", i -- exact match
elseif word then
- return true, { "warning", i } -- case issue
+ return true, "warning", i -- case issue
elseif upper(s) == s then
- return true, { "warning", i } -- probably a logo or acronym
+ return true, "warning", i -- probably a logo or acronym
else
- return true, { "error", i }
+ return true, "error", i
end
end
end
end
-function context.styleofword(validwords,s) -- ,limit
- if not validwords then
+function context.styleofword(validwords,validminimum,s) -- ,limit
+ if not validwords or #s < validminimum then
return "text"
else
-- keys are lower
@@ -319,29 +361,57 @@ setmetatable(h_table, { __index = function(t,level) local v = { level, FOLD_HEAD
setmetatable(b_table, { __index = function(t,level) local v = { level, FOLD_BLANK } t[level] = v return v end })
setmetatable(n_table, { __index = function(t,level) local v = { level } t[level] = v return v end })
--- local newline = P("\r\n") + S("\r\n")
--- local splitlines = Ct( ( Ct ( (Cp() * Cs((1-newline)^1) * newline^-1) + (Cp() * Cc("") * newline) ) )^0)
+-- -- todo: move the local functions outside (see below) .. old variant < 3.24
--
--- local lines = lpegmatch(splitlines,text) -- iterating over lines is faster
--- for i=1, #lines do
--- local li = lines[i]
--- local line = li[2]
--- if line ~= "" then
--- local pos = li[1]
+-- local newline = P("\r\n") + S("\r\n")
+-- local p_yes = Cp() * Cs((1-newline)^1) * newline^-1
+-- local p_nop = newline
+--
+-- local function fold_by_parsing(text,start_pos,start_line,start_level,lexer)
+-- local foldsymbols = lexer._foldsymbols
+-- if not foldsymbols then
+-- return { }
+-- end
+-- local patterns = foldsymbols._patterns
+-- if not patterns then
+-- return { }
+-- end
+-- local nofpatterns = #patterns
+-- if nofpatterns == 0 then
+-- return { }
+-- end
+-- local folds = { }
+-- local line_num = start_line
+-- local prev_level = start_level
+-- local current_level = prev_level
+-- local validmatches = foldsymbols._validmatches
+-- if not validmatches then
+-- validmatches = { }
+-- for symbol, matches in next, foldsymbols do -- whatever = { start = 1, stop = -1 }
+-- if not find(symbol,"^_") then -- brrr
+-- for s, _ in next, matches do
+-- validmatches[s] = true
+-- end
+-- end
+-- end
+-- foldsymbols._validmatches = validmatches
+-- end
+-- -- of course we could instead build a nice lpeg checker .. something for
+-- -- a rainy day with a stack of new cd's at hand
+-- local function action_y(pos,line)
-- for i=1,nofpatterns do
-- for s, m in gmatch(line,patterns[i]) do
--- if hash[m] then
--- local symbols = fold_symbols[get_style_at(start_pos + pos + s - 1)]
+-- if validmatches[m] then
+-- local symbols = foldsymbols[get_style_at(start_pos + pos + s - 1)]
-- if symbols then
--- local l = symbols[m]
--- if l then
--- local t = type(l)
--- if t == 'number' then
--- current_level = current_level + l
--- elseif t == 'function' then
--- current_level = current_level + l(text, pos, line, s, match)
+-- local action = symbols[m]
+-- if action then
+-- if type(action) == 'number' then -- we could store this in validmatches if there was only one symbol category
+-- current_level = current_level + action
+-- else
+-- current_level = current_level + action(text,pos,line,s,m)
-- end
--- if current_level < FOLD_BASE then -- integrate in previous
+-- if current_level < FOLD_BASE then
-- current_level = FOLD_BASE
-- end
-- end
@@ -355,128 +425,298 @@ setmetatable(n_table, { __index = function(t,level) local v = { level
-- folds[line_num] = n_table[prev_level] -- { prev_level }
-- end
-- prev_level = current_level
--- else
+-- line_num = line_num + 1
+-- end
+-- local function action_n()
-- folds[line_num] = b_table[prev_level] -- { prev_level, FOLD_BLANK }
+-- line_num = line_num + 1
-- end
--- line_num = line_num + 1
+-- if lexer._reset_parser then
+-- lexer._reset_parser()
+-- end
+-- local lpegpattern = (p_yes/action_y + p_nop/action_n)^0 -- not too efficient but indirect function calls are neither but
+-- lpegmatch(lpegpattern,text) -- keys are not pressed that fast ... large files are slow anyway
+-- return folds
-- end
+-- The 3.24 variant; no longer subtable optimization is needed:
+
local newline = P("\r\n") + S("\r\n")
local p_yes = Cp() * Cs((1-newline)^1) * newline^-1
local p_nop = newline
+local folders = { }
+
local function fold_by_parsing(text,start_pos,start_line,start_level,lexer)
- local foldsymbols = lexer._foldsymbols
- if not foldsymbols then
- return { }
- end
- local patterns = foldsymbols._patterns
- if not patterns then
- return { }
- end
- local nofpatterns = #patterns
- if nofpatterns == 0 then
- return { }
- end
- local folds = { }
- local line_num = start_line
- local prev_level = start_level
- local current_level = prev_level
- local validmatches = foldsymbols._validmatches
- if not validmatches then
- validmatches = { }
- for symbol, matches in next, foldsymbols do -- whatever = { start = 1, stop = -1 }
- if not find(symbol,"^_") then -- brrr
- for s, _ in next, matches do
- validmatches[s] = true
+ local folder = folders[lexer]
+ if not folder then
+ --
+ local pattern, folds, text, start_pos, line_num, prev_level, current_level
+ --
+ local fold_symbols = lexer._foldsymbols
+ local fold_pattern = lexer._foldpattern -- use lpeg instead (context extension)
+ --
+ if fold_pattern then
+ -- if no functions are found then we could have a faster one
+
+ -- fold_pattern = Cp() * C(fold_pattern) * Carg(1) / function(s,match,pos)
+ -- local symbols = fold_symbols[get_style_at(start_pos + pos + s - 1)]
+ -- local l = symbols and symbols[match]
+ -- if l then
+ -- local t = type(l)
+ -- if t == 'number' then
+ -- current_level = current_level + l
+ -- elseif t == 'function' then
+ -- current_level = current_level + l(text, pos, line, s, match)
+ -- end
+ -- end
+ -- end
+ -- fold_pattern = (fold_pattern + P(1))^0
+ -- local action_y = function(pos,line)
+ -- lpegmatch(fold_pattern,line,1,pos)
+ -- folds[line_num] = prev_level
+ -- if current_level > prev_level then
+ -- folds[line_num] = prev_level + FOLD_HEADER
+ -- end
+ -- if current_level < FOLD_BASE then
+ -- current_level = FOLD_BASE
+ -- end
+ -- prev_level = current_level
+ -- line_num = line_num + 1
+ -- end
+ -- local action_n = function()
+ -- folds[line_num] = prev_level + FOLD_BLANK
+ -- line_num = line_num + 1
+ -- end
+ -- pattern = (p_yes/action_y + p_nop/action_n)^0
+
+ fold_pattern = Cp() * C(fold_pattern) / function(s,match)
+ local symbols = fold_symbols[get_style_at(start_pos + s)]
+ if symbols then
+ local l = symbols[match]
+ if l then
+ current_level = current_level + l
+ end
end
end
- end
- foldsymbols._validmatches = validmatches
- end
- local function action_y(pos,line) -- we can consider moving the local functions outside (drawback: folds is kept)
- for i=1,nofpatterns do
- for s, m in gmatch(line,patterns[i]) do
- if validmatches[m] then
- local symbols = foldsymbols[get_style_at(start_pos + pos + s - 1)]
- if symbols then
- local action = symbols[m]
- if action then
- if type(action) == 'number' then -- we could store this in validmatches if there was only one symbol category
- current_level = current_level + action
- else
- current_level = current_level + action(text,pos,line,s,m)
- end
- if current_level < FOLD_BASE then
- current_level = FOLD_BASE
- end
+ local action_y = function()
+ folds[line_num] = prev_level
+ if current_level > prev_level then
+ folds[line_num] = prev_level + FOLD_HEADER
+ end
+ if current_level < FOLD_BASE then
+ current_level = FOLD_BASE
+ end
+ prev_level = current_level
+ line_num = line_num + 1
+ end
+ local action_n = function()
+ folds[line_num] = prev_level + FOLD_BLANK
+ line_num = line_num + 1
+ end
+ pattern = ((fold_pattern + (1-newline))^1 * newline / action_y + newline/action_n)^0
+
+ else
+ -- the traditional one but a bit optimized
+ local fold_symbols_patterns = fold_symbols._patterns
+ local action_y = function(pos,line)
+ for j = 1, #fold_symbols_patterns do
+ for s, match in gmatch(line,fold_symbols_patterns[j]) do -- '()('..patterns[i]..')'
+ local symbols = fold_symbols[get_style_at(start_pos + pos + s - 1)]
+ local l = symbols and symbols[match]
+ local t = type(l)
+ if t == 'number' then
+ current_level = current_level + l
+ elseif t == 'function' then
+ current_level = current_level + l(text, pos, line, s, match)
end
end
end
+ folds[line_num] = prev_level
+ if current_level > prev_level then
+ folds[line_num] = prev_level + FOLD_HEADER
+ end
+ if current_level < FOLD_BASE then
+ current_level = FOLD_BASE
+ end
+ prev_level = current_level
+ line_num = line_num + 1
end
+ local action_n = function()
+ folds[line_num] = prev_level + FOLD_BLANK
+ line_num = line_num + 1
+ end
+ pattern = (p_yes/action_y + p_nop/action_n)^0
end
- if current_level > prev_level then
- folds[line_num] = h_table[prev_level] -- { prev_level, FOLD_HEADER }
- else
- folds[line_num] = n_table[prev_level] -- { prev_level }
+ --
+ local reset_parser = lexer._reset_parser
+ --
+ folder = function(_text_,_start_pos_,_start_line_,_start_level_)
+ if reset_parser then
+ reset_parser()
+ end
+ folds = { }
+ text = _text_
+ start_pos = _start_pos_
+ line_num = _start_line_
+ prev_level = _start_level_
+ current_level = prev_level
+ lpegmatch(pattern,text)
+-- return folds
+local t = folds
+folds = nil
+return t -- so folds can be collected
end
- prev_level = current_level
- line_num = line_num + 1
+ folders[lexer] = folder
end
- local function action_n()
- folds[line_num] = b_table[prev_level] -- { prev_level, FOLD_BLANK }
- line_num = line_num + 1
- end
- if lexer._reset_parser then
- lexer._reset_parser()
- end
- local lpegpattern = (p_yes/action_y + p_nop/action_n)^0 -- not too efficient but indirect function calls are neither but
- lpegmatch(lpegpattern,text) -- keys are not pressed that fast ... large files are slow anyway
- return folds
+ return folder(text,start_pos,start_line,start_level,lexer)
end
-local function fold_by_indentation(text,start_pos,start_line,start_level)
- local folds = { }
- local current_line = start_line
- local prev_level = start_level
- for _, line in gmatch(text,'([\t ]*)(.-)\r?\n') do
- if line ~= "" then
- local current_level = FOLD_BASE + get_indent_amount(current_line)
- if current_level > prev_level then -- next level
- local i = current_line - 1
- while true do
- local f = folds[i]
- if f and f[2] == FOLD_BLANK then
- i = i - 1
- else
- break
- end
- end
- local f = folds[i]
- if f then
- f[2] = FOLD_HEADER
- end -- low indent
- folds[current_line] = n_table[current_level] -- { current_level } -- high indent
- elseif current_level < prev_level then -- prev level
- local f = folds[current_line - 1]
- if f then
- f[1] = prev_level -- high indent
- end
- folds[current_line] = n_table[current_level] -- { current_level } -- low indent
- else -- same level
- folds[current_line] = n_table[prev_level] -- { prev_level }
+-- local function fold_by_indentation(text,start_pos,start_line,start_level)
+-- local folds = { }
+-- local current_line = start_line
+-- local prev_level = start_level
+-- for line in gmatch(text,'[\t ]*(.-)\r?\n') do
+-- if line ~= "" then
+-- local current_level = FOLD_BASE + get_indent_amount(current_line)
+-- if current_level > prev_level then -- next level
+-- local i = current_line - 1
+-- while true do
+-- local f = folds[i]
+-- if f and f[2] == FOLD_BLANK then
+-- i = i - 1
+-- else
+-- break
+-- end
+-- end
+-- local f = folds[i]
+-- if f then
+-- f[2] = FOLD_HEADER
+-- end -- low indent
+-- folds[current_line] = n_table[current_level] -- { current_level } -- high indent
+-- elseif current_level < prev_level then -- prev level
+-- local f = folds[current_line - 1]
+-- if f then
+-- f[1] = prev_level -- high indent
+-- end
+-- folds[current_line] = n_table[current_level] -- { current_level } -- low indent
+-- else -- same level
+-- folds[current_line] = n_table[prev_level] -- { prev_level }
+-- end
+-- prev_level = current_level
+-- else
+-- folds[current_line] = b_table[prev_level] -- { prev_level, FOLD_BLANK }
+-- end
+-- current_line = current_line + 1
+-- end
+-- return folds
+-- end
+
+-- local function fold_by_indentation(text,start_pos,start_line,start_level)
+-- local folds = { }
+-- local current_line = start_line
+-- local prev_level = start_level
+-- for line in gmatch(text,'[\t ]*(.-)\r?\n') do
+-- if line ~= '' then
+-- local current_level = FOLD_BASE + get_indent_amount(current_line)
+-- if current_level > prev_level then -- next level
+-- local i = current_line - 1
+-- local f
+-- while true do
+-- f = folds[i]
+-- if not f then
+-- break
+-- elseif f[2] == FOLD_BLANK then
+-- i = i - 1
+-- else
+-- f[2] = FOLD_HEADER -- low indent
+-- break
+-- end
+-- end
+-- folds[current_line] = { current_level } -- high indent
+-- elseif current_level < prev_level then -- prev level
+-- local f = folds[current_line - 1]
+-- if f then
+-- f[1] = prev_level -- high indent
+-- end
+-- folds[current_line] = { current_level } -- low indent
+-- else -- same level
+-- folds[current_line] = { prev_level }
+-- end
+-- prev_level = current_level
+-- else
+-- folds[current_line] = { prev_level, FOLD_BLANK }
+-- end
+-- current_line = current_line + 1
+-- end
+-- for line, level in next, folds do
+-- folds[line] = level[1] + (level[2] or 0)
+-- end
+-- return folds
+-- end
+
+local folds, current_line, prev_level
+
+local function action_y()
+ local current_level = FOLD_BASE + get_indent_amount(current_line)
+ if current_level > prev_level then -- next level
+ local i = current_line - 1
+ local f
+ while true do
+ f = folds[i]
+ if not f then
+ break
+ elseif f[2] == FOLD_BLANK then
+ i = i - 1
+ else
+ f[2] = FOLD_HEADER -- low indent
+ break
end
- prev_level = current_level
- else
- folds[current_line] = b_table[prev_level] -- { prev_level, FOLD_BLANK }
end
- current_line = current_line + 1
+ folds[current_line] = { current_level } -- high indent
+ elseif current_level < prev_level then -- prev level
+ local f = folds[current_line - 1]
+ if f then
+ f[1] = prev_level -- high indent
+ end
+ folds[current_line] = { current_level } -- low indent
+ else -- same level
+ folds[current_line] = { prev_level }
end
- return folds
+ prev_level = current_level
+ current_line = current_line + 1
+end
+
+local function action_n()
+ folds[current_line] = { prev_level, FOLD_BLANK }
+ current_line = current_line + 1
+end
+
+local pattern = ( S("\t ")^0 * ( (1-S("\n\r"))^1 / action_y + P(true) / action_n) * newline )^0
+
+local function fold_by_indentation(text,start_pos,start_line,start_level)
+ -- initialize
+ folds = { }
+ current_line = start_line
+ prev_level = start_level
+ -- define
+ -- -- not here .. pattern binds and local functions are not frozen
+ -- analyze
+ lpegmatch(pattern,text)
+ -- flatten
+ for line, level in next, folds do
+ folds[line] = level[1] + (level[2] or 0)
+ end
+ -- done
+-- return folds
+local t = folds
+folds = nil
+return t -- so folds can be collected
end
local function fold_by_line(text,start_pos,start_line,start_level)
local folds = { }
+ -- can also be lpeg'd
for _ in gmatch(text,".-\r?\n") do
folds[start_line] = n_table[start_level] -- { start_level }
start_line = start_line + 1
@@ -501,7 +741,7 @@ function context.fold(text,start_pos,start_line,start_level) -- hm, we had size
if filesize <= threshold_by_lexer then
return fold_by_lexer(text,start_pos,start_line,start_level,lexer)
end
- elseif fold_by_symbols and get_property('fold.by.parsing',1) > 0 then
+ elseif fold_by_symbols then -- and get_property('fold.by.parsing',1) > 0 then
if filesize <= threshold_by_parsing then
return fold_by_parsing(text,start_pos,start_line,start_level,lexer)
end
@@ -589,6 +829,10 @@ local function build_grammar(lexer, initial_rule)
end
-- so far. We need these local functions in the next one.
+--
+-- Before 3.24 we had tokens[..] = { category, position }, now it's a two values.
+
+local lineparsers = { }
function context.lex(text,init_style)
local lexer = global._LEXER
@@ -599,50 +843,75 @@ function context.lex(text,init_style)
local tokens = { }
local offset = 0
local noftokens = 0
- if true then
- for line in gmatch(text,'[^\r\n]*\r?\n?') do -- could be an lpeg
- local line_tokens = lpegmatch(grammar,line)
+ -- -- pre 3.24
+ --
+ -- for line in gmatch(text,'[^\r\n]*\r?\n?') do -- could be an lpeg
+ -- local line_tokens = lpegmatch(grammar,line)
+ -- if line_tokens then
+ -- for i=1,#line_tokens do
+ -- local token = line_tokens[i]
+ -- token[2] = token[2] + offset
+ -- noftokens = noftokens + 1
+ -- tokens[noftokens] = token
+ -- end
+ -- end
+ -- offset = offset + #line
+ -- if noftokens > 0 and tokens[noftokens][2] ~= offset then
+ -- noftokens = noftokens + 1
+ -- tokens[noftokens] = { 'default', offset + 1 }
+ -- end
+ -- end
+
+ -- for line in gmatch(text,'[^\r\n]*\r?\n?') do
+ -- local line_tokens = lpegmatch(grammar,line)
+ -- if line_tokens then
+ -- for i=1,#line_tokens,2 do
+ -- noftokens = noftokens + 1
+ -- tokens[noftokens] = line_tokens[i]
+ -- noftokens = noftokens + 1
+ -- tokens[noftokens] = line_tokens[i + 1] + offset
+ -- end
+ -- end
+ -- offset = offset + #line
+ -- if noftokens > 0 and tokens[noftokens] ~= offset then
+ -- noftokens = noftokens + 1
+ -- tokens[noftokens] = 'default'
+ -- noftokens = noftokens + 1
+ -- tokens[noftokens] = offset + 1
+ -- end
+ -- end
+
+ local lineparser = lineparsers[lexer]
+ if not lineparser then -- probably a cmt is more efficient
+ lineparser = C((1-newline)^0 * newline) / function(line)
+ local length = #line
+ local line_tokens = length > 0 and lpegmatch(grammar,line)
if line_tokens then
- for i=1,#line_tokens do
- local token = line_tokens[i]
- token[2] = token[2] + offset
+ for i=1,#line_tokens,2 do
noftokens = noftokens + 1
- tokens[noftokens] = token
- end
- end
- offset = offset + #line
- if noftokens > 0 and tokens[noftokens][2] ~= offset then
- noftokens = noftokens + 1
- tokens[noftokens] = { 'default', offset + 1 }
- end
- end
- else -- alternative
- local lasttoken, lastoffset
- for line in gmatch(text,'[^\r\n]*\r?\n?') do -- could be an lpeg
- local line_tokens = lpegmatch(grammar,line)
- if line_tokens then
- for i=1,#line_tokens do
- lasttoken = line_tokens[i]
- lastoffset = lasttoken[2] + offset
- lasttoken[2] = lastoffset
+ tokens[noftokens] = line_tokens[i]
noftokens = noftokens + 1
- tokens[noftokens] = lasttoken
+ tokens[noftokens] = line_tokens[i + 1] + offset
end
end
- offset = offset + #line
- if lastoffset ~= offset then
- lastoffset = offset + 1
- lasttoken = { 'default', lastoffset }
+ offset = offset + length
+ if noftokens > 0 and tokens[noftokens] ~= offset then
+ noftokens = noftokens + 1
+ tokens[noftokens] = 'default'
noftokens = noftokens + 1
- tokens[noftokens] = lasttoken
+ tokens[noftokens] = offset + 1
end
end
+ lineparser = lineparser^0
+ lineparsers[lexer] = lineparser
end
+ lpegmatch(lineparser,text)
return tokens
+
elseif lexer._CHILDREN then
-- as we cannot print, tracing is not possible ... this might change as we can as well
-- generate them all in one go (sharing as much as possible)
- local _hash = lexer._HASH
+ local hash = lexer._HASH -- hm, was _hash
if not hash then
hash = { }
lexer._HASH = hash
@@ -678,8 +947,14 @@ end
-- todo: keywords: one lookup and multiple matches
+-- function context.token(name, patt)
+-- return Ct(patt * Cc(name) * Cp())
+-- end
+--
+-- -- hm, changed in 3.24 .. no longer a table
+
function context.token(name, patt)
- return Ct(patt * Cc(name) * Cp())
+ return patt * Cc(name) * Cp()
end
lexer.fold = context.fold
diff --git a/Master/texmf-dist/context/data/scite/lexers/themes/scite-context-theme.lua b/Master/texmf-dist/context/data/scite/lexers/themes/scite-context-theme.lua
index 7b305d3e5d5..e32fe9dee15 100644
--- a/Master/texmf-dist/context/data/scite/lexers/themes/scite-context-theme.lua
+++ b/Master/texmf-dist/context/data/scite/lexers/themes/scite-context-theme.lua
@@ -107,7 +107,6 @@ local style_tag = style { fore = colors.cyan }
----- style_standout = style { fore = colors.orange, bold = true }
local style_command = style { fore = colors.green, bold = true }
local style_internal = style { fore = colors.orange, bold = true }
-local style_internal = style { fore = colors.orange, bold = true }
local style_preamble = style { fore = colors.yellow }
local style_grouping = style { fore = colors.red }
diff --git a/Master/texmf-dist/context/data/scite/scite-context-data-context.properties b/Master/texmf-dist/context/data/scite/scite-context-data-context.properties
index 37a39e1e0c8..4af84cde5c9 100644
--- a/Master/texmf-dist/context/data/scite/scite-context-data-context.properties
+++ b/Master/texmf-dist/context/data/scite/scite-context-data-context.properties
@@ -1,130 +1,178 @@
+keywordclass.context.constants=\
+zerocount minusone minustwo plusone \
+plustwo plusthree plusfour plusfive plussix \
+plusseven pluseight plusnine plusten plussixteen \
+plushundred plusthousand plustenthousand plustwentythousand medcard \
+maxcard zeropoint onepoint halfapoint onebasepoint \
+maxdimen scaledpoint thousandpoint points halfpoint \
+zeroskip zeromuskip onemuskip pluscxxvii pluscxxviii \
+pluscclv pluscclvi normalpagebox endoflinetoken outputnewlinechar \
+emptytoks empty undefined voidbox emptybox \
+emptyvbox emptyhbox bigskipamount medskipamount smallskipamount \
+fmtname fmtversion texengine texenginename texengineversion \
+luatexengine pdftexengine xetexengine unknownengine etexversion \
+pdftexversion xetexversion xetexrevision activecatcode bgroup \
+egroup endline conditionaltrue conditionalfalse attributeunsetvalue \
+uprotationangle rightrotationangle downrotationangle leftrotationangle inicatcodes \
+ctxcatcodes texcatcodes notcatcodes txtcatcodes vrbcatcodes \
+prtcatcodes nilcatcodes luacatcodes tpacatcodes tpbcatcodes \
+xmlcatcodes escapecatcode begingroupcatcode endgroupcatcode mathshiftcatcode \
+alignmentcatcode endoflinecatcode parametercatcode superscriptcatcode subscriptcatcode \
+ignorecatcode spacecatcode lettercatcode othercatcode activecatcode \
+commentcatcode invalidcatcode tabasciicode newlineasciicode formfeedasciicode \
+endoflineasciicode endoffileasciicode spaceasciicode hashasciicode dollarasciicode \
+commentasciicode ampersandasciicode colonasciicode backslashasciicode circumflexasciicode \
+underscoreasciicode leftbraceasciicode barasciicode rightbraceasciicode tildeasciicode \
+delasciicode lessthanasciicode morethanasciicode doublecommentsignal atsignasciicode \
+exclamationmarkasciicode questionmarkasciicode doublequoteasciicode singlequoteasciicode forwardslashasciicode \
+primeasciicode activemathcharcode activetabtoken activeformfeedtoken activeendoflinetoken \
+batchmodecode nonstopmodecode scrollmodecode errorstopmodecode bottomlevelgroupcode \
+simplegroupcode hboxgroupcode adjustedhboxgroupcode vboxgroupcode vtopgroupcode \
+aligngroupcode noaligngroupcode outputgroupcode mathgroupcode discretionarygroupcode \
+insertgroupcode vcentergroupcode mathchoicegroupcode semisimplegroupcode mathshiftgroupcode \
+mathleftgroupcode vadjustgroupcode charnodecode hlistnodecode vlistnodecode \
+rulenodecode insertnodecode marknodecode adjustnodecode ligaturenodecode \
+discretionarynodecode whatsitnodecode mathnodecode gluenodecode kernnodecode \
+penaltynodecode unsetnodecode mathsnodecode charifcode catifcode \
+numifcode dimifcode oddifcode vmodeifcode hmodeifcode \
+mmodeifcode innerifcode voidifcode hboxifcode vboxifcode \
+xifcode eofifcode trueifcode falseifcode caseifcode \
+definedifcode csnameifcode fontcharifcode fontslantperpoint fontinterwordspace \
+fontinterwordstretch fontinterwordshrink fontexheight fontemwidth fontextraspace \
+slantperpoint interwordspace interwordstretch interwordshrink exheight \
+emwidth extraspace mathsupdisplay mathsupnormal mathsupcramped \
+mathsubnormal mathsubcombined mathaxisheight startmode stopmode \
+startnotmode stopnotmode startmodeset stopmodeset doifmode \
+doifmodeelse doifnotmode startallmodes stopallmodes startnotallmodes \
+stopnotallmodes doifallmodes doifallmodeselse doifnotallmodes startenvironment \
+stopenvironment environment startcomponent stopcomponent component \
+startproduct stopproduct product startproject stopproject \
+project starttext stoptext startnotext stopnotext \
+startdocument stopdocument documentvariable setupdocument startmodule \
+stopmodule usemodule usetexmodule useluamodule startTEXpage \
+stopTEXpage enablemode disablemode preventmode globalenablemode \
+globaldisablemode globalpreventmode pushmode popmode typescriptone \
+typescripttwo typescriptthree mathsizesuffix mathordcode mathopcode \
+mathbincode mathrelcode mathopencode mathclosecode mathpunctcode \
+mathalphacode mathinnercode mathnothingcode mathlimopcode mathnolopcode \
+mathboxcode mathchoicecode mathaccentcode mathradicalcode constantnumber \
+constantnumberargument constantdimen constantdimenargument constantemptyargument continueifinputfile \
+luastringsep !!bs !!es
+
keywordclass.context.helpers=\
startsetups stopsetups startxmlsetups stopxmlsetups \
startluasetups stopluasetups starttexsetups stoptexsetups startrawsetups \
stoprawsetups startlocalsetups stoplocalsetups starttexdefinition stoptexdefinition \
-starttexcode stoptexcode doifsetupselse doifsetups doifnotsetups \
-setup setups texsetup xmlsetup luasetup \
-directsetup newmode setmode resetmode newsystemmode \
+starttexcode stoptexcode startcontextcode stopcontextcode doifsetupselse \
+doifsetups doifnotsetups setup setups texsetup \
+xmlsetup luasetup directsetup doifelsecommandhandler doifnotcommandhandler \
+doifcommandhandler newmode setmode resetmode newsystemmode \
setsystemmode resetsystemmode pushsystemmode popsystemmode booleanmodevalue \
newcount newdimen newskip newmuskip newbox \
newtoks newread newwrite newmarks newinsert \
newattribute newif newlanguage newfamily newfam \
-newhelp then donothing dontcomplain donetrue \
-donefalse htdp unvoidbox vfilll mathbox \
-mathlimop mathnolop mathnothing mathalpha currentcatcodetable \
-defaultcatcodetable catcodetablename newcatcodetable startcatcodetable stopcatcodetable \
-startextendcatcodetable stopextendcatcodetable pushcatcodetable popcatcodetable restorecatcodes \
-setcatcodetable letcatcodecommand defcatcodecommand uedcatcodecommand hglue \
-vglue hfillneg vfillneg hfilllneg vfilllneg \
-ruledhss ruledhfil ruledhfill ruledhfilneg ruledhfillneg \
-normalhfillneg ruledvss ruledvfil ruledvfill ruledvfilneg \
-ruledvfillneg normalvfillneg ruledhbox ruledvbox ruledvtop \
-ruledvcenter ruledhskip ruledvskip ruledkern ruledmskip \
-ruledmkern ruledhglue ruledvglue normalhglue normalvglue \
-ruledpenalty scratchcounter globalscratchcounter scratchdimen globalscratchdimen \
+newhelp then firstargumentfalse firstargumenttrue secondargumentfalse \
+secondargumenttrue thirdargumentfalse thirdargumenttrue fourthargumentfalse fourthargumenttrue \
+fifthargumentfalse fifthsargumenttrue sixthargumentfalse sixtsargumenttrue doglobal \
+dodoglobal redoglobal resetglobal donothing dontcomplain \
+forgetall donetrue donefalse htdp unvoidbox \
+hfilll vfilll mathbox mathlimop mathnolop \
+mathnothing mathalpha currentcatcodetable defaultcatcodetable catcodetablename \
+newcatcodetable startcatcodetable stopcatcodetable startextendcatcodetable stopextendcatcodetable \
+pushcatcodetable popcatcodetable restorecatcodes setcatcodetable letcatcodecommand \
+defcatcodecommand uedcatcodecommand hglue vglue hfillneg \
+vfillneg hfilllneg vfilllneg ruledhss ruledhfil \
+ruledhfill ruledhfilneg ruledhfillneg normalhfillneg ruledvss \
+ruledvfil ruledvfill ruledvfilneg ruledvfillneg normalvfillneg \
+ruledhbox ruledvbox ruledvtop ruledvcenter ruledhskip \
+ruledvskip ruledkern ruledmskip ruledmkern ruledhglue \
+ruledvglue normalhglue normalvglue ruledpenalty filledhboxb \
+filledhboxr filledhboxg filledhboxc filledhboxm filledhboxy \
+filledhboxk scratchcounter globalscratchcounter scratchdimen globalscratchdimen \
scratchskip globalscratchskip scratchmuskip globalscratchmuskip scratchtoks \
-globalscratchtoks scratchbox globalscratchbox nextbox dowithnextbox \
-dowithnextboxcs dowithnextboxcontent dowithnextboxcontentcs scratchwidth scratchheight \
-scratchdepth scratchoffset scratchdistance scratchhsize scratchvsize \
-scratchcounterone scratchcountertwo scratchcounterthree scratchdimenone scratchdimentwo \
-scratchdimenthree scratchskipone scratchskiptwo scratchskipthree scratchmuskipone \
-scratchmuskiptwo scratchmuskipthree scratchtoksone scratchtokstwo scratchtoksthree \
-scratchboxone scratchboxtwo scratchboxthree doif doifnot \
-doifelse doifinset doifnotinset doifinsetelse doifnextcharelse \
-doifnextoptionalelse doifnextbgroupelse doifnextparenthesiselse doiffastoptionalcheckelse doifundefinedelse \
-doifdefinedelse doifundefined doifdefined doifelsevalue doifvalue \
-doifnotvalue doifnothing doifsomething doifelsenothing doifsomethingelse \
-doifvaluenothing doifvaluesomething doifelsevaluenothing doifdimensionelse doifnumberelse \
-doifcommonelse doifcommon doifnotcommon doifinstring doifnotinstring \
-doifinstringelse doifassignmentelse tracingall tracingnone loggingall \
-appendtoks prependtoks appendtotoks prependtotoks to \
-endgraf empty null space quad \
-enspace obeyspaces obeylines normalspace executeifdefined \
-singleexpandafter doubleexpandafter tripleexpandafter dontleavehmode removelastspace \
-removeunwantedspaces wait writestatus define redefine \
-setmeasure setemeasure setgmeasure setxmeasure definemeasure \
-measure getvalue setvalue setevalue setgvalue \
+globalscratchtoks scratchbox globalscratchbox availablehsize localhsize \
+setlocalhsize nextbox dowithnextbox dowithnextboxcs dowithnextboxcontent \
+dowithnextboxcontentcs scratchwidth scratchheight scratchdepth scratchoffset \
+scratchdistance scratchhsize scratchvsize scratchxoffset scratchyoffset \
+scratchhoffset scratchvoffset scratchxposition scratchyposition scratchtopoffset \
+scratchbottomoffset scratchleftoffset scratchrightoffset scratchcounterone scratchcountertwo \
+scratchcounterthree scratchdimenone scratchdimentwo scratchdimenthree scratchskipone \
+scratchskiptwo scratchskipthree scratchmuskipone scratchmuskiptwo scratchmuskipthree \
+scratchtoksone scratchtokstwo scratchtoksthree scratchboxone scratchboxtwo \
+scratchboxthree scratchnx scratchny scratchmx scratchmy \
+scratchunicode scratchleftskip scratchrightskip scratchtopskip scratchbottomskip \
+doif doifnot doifelse doifinset doifnotinset \
+doifinsetelse doifnextcharelse doifnextoptionalelse doifnextbgroupelse doifnextparenthesiselse \
+doiffastoptionalcheckelse doifundefinedelse doifdefinedelse doifundefined doifdefined \
+doifelsevalue doifvalue doifnotvalue doifnothing doifsomething \
+doifelsenothing doifsomethingelse doifvaluenothing doifvaluesomething doifelsevaluenothing \
+doifdimensionelse doifnumberelse doifnumber doifnotnumber doifcommonelse \
+doifcommon doifnotcommon doifinstring doifnotinstring doifinstringelse \
+doifassignmentelse docheckassignment tracingall tracingnone loggingall \
+removetoks appendtoks prependtoks appendtotoks prependtotoks \
+to endgraf endpar everyendpar reseteverypar \
+finishpar empty null space quad \
+enspace obeyspaces obeylines obeyedspace obeyedline \
+normalspace executeifdefined singleexpandafter doubleexpandafter tripleexpandafter \
+dontleavehmode removelastspace removeunwantedspaces keepunwantedspaces wait \
+writestatus define redefine setmeasure setemeasure \
+setgmeasure setxmeasure definemeasure freezemeasure measure \
+installcorenamespace getvalue setvalue setevalue setgvalue \
setxvalue letvalue letgvalue resetvalue undefinevalue \
ignorevalue setuvalue setuevalue setugvalue setuxvalue \
-globallet glet getparameters geteparameters getgparameters \
-getxparameters forgetparameters copyparameters processcommalist processcommacommand \
-quitcommalist quitprevcommalist processaction processallactions processfirstactioninset \
-processallactionsinset unexpanded expanded startexpanded stopexpanded \
-protected protect unprotect firstofoneargument firstoftwoarguments \
-secondoftwoarguments firstofthreearguments secondofthreearguments thirdofthreearguments firstoffourarguments \
-secondoffourarguments thirdoffourarguments fourthoffourarguments firstoffivearguments secondoffivearguments \
-thirdoffivearguments fourthoffivearguments fifthoffivearguments firstofsixarguments secondofsixarguments \
-thirdofsixarguments fourthofsixarguments fifthofsixarguments sixthofsixarguments firstofoneunexpanded \
-gobbleoneargument gobbletwoarguments gobblethreearguments gobblefourarguments gobblefivearguments \
-gobblesixarguments gobblesevenarguments gobbleeightarguments gobbleninearguments gobbletenarguments \
-gobbleoneoptional gobbletwooptionals gobblethreeoptionals gobblefouroptionals gobblefiveoptionals \
-dorecurse doloop exitloop dostepwiserecurse recurselevel \
-recursedepth dofastloopcs newconstant setnewconstant newconditional \
-settrue setfalse setconstant newmacro setnewmacro \
-newfraction dosingleempty dodoubleempty dotripleempty doquadrupleempty \
-doquintupleempty dosixtupleempty doseventupleempty dosingleargument dodoubleargument \
-dotripleargument doquadrupleargument dosinglegroupempty dodoublegroupempty dotriplegroupempty \
-doquadruplegroupempty doquintuplegroupempty nopdfcompression maximumpdfcompression normalpdfcompression \
-modulonumber dividenumber getfirstcharacter doiffirstcharelse startnointerference \
-stopnointerference strut setstrut strutbox strutht \
-strutdp strutwd begstrut endstrut
-
-keywordclass.context.constants=\
-zerocount minusone minustwo plusone \
-plustwo plusthree plusfour plusfive plussix \
-plusseven pluseight plusnine plusten plussixteen \
-plushundred plusthousand plustenthousand plustwentythousand medcard \
-maxcard zeropoint onepoint halfapoint onebasepoint \
-maxdimen scaledpoint thousandpoint points halfpoint \
-zeroskip pluscxxvii pluscxxviii pluscclv pluscclvi \
-normalpagebox endoflinetoken outputnewlinechar emptytoks empty \
-undefined voidbox emptybox emptyvbox emptyhbox \
-bigskipamount medskipamount smallskipamount fmtname fmtversion \
-texengine texenginename texengineversion luatexengine pdftexengine \
-xetexengine unknownengine etexversion pdftexversion xetexversion \
-xetexrevision activecatcode bgroup egroup endline \
-conditionaltrue conditionalfalse attributeunsetvalue uprotationangle rightrotationangle \
-downrotationangle leftrotationangle inicatcodes ctxcatcodes texcatcodes \
-notcatcodes txtcatcodes vrbcatcodes prtcatcodes nilcatcodes \
-luacatcodes tpacatcodes tpbcatcodes xmlcatcodes escapecatcode \
-begingroupcatcode endgroupcatcode mathshiftcatcode alignmentcatcode endoflinecatcode \
-parametercatcode superscriptcatcode subscriptcatcode ignorecatcode spacecatcode \
-lettercatcode othercatcode activecatcode commentcatcode invalidcatcode \
-tabasciicode newlineasciicode formfeedasciicode endoflineasciicode endoffileasciicode \
-spaceasciicode hashasciicode dollarasciicode commentasciicode ampersandasciicode \
-colonasciicode backslashasciicode circumflexasciicode underscoreasciicode leftbraceasciicode \
-barasciicode rightbraceasciicode tildeasciicode delasciicode lessthanasciicode \
-morethanasciicode doublecommentsignal atsignasciicode exclamationmarkasciicode questionmarkasciicode \
-doublequoteasciicode singlequoteasciicode forwardslashasciicode primeasciicode activemathcharcode \
-activetabtoken activeformfeedtoken activeendoflinetoken batchmodecode nonstopmodecode \
-scrollmodecode errorstopmodecode bottomlevelgroupcode simplegroupcode hboxgroupcode \
-adjustedhboxgroupcode vboxgroupcode vtopgroupcode aligngroupcode noaligngroupcode \
-outputgroupcode mathgroupcode discretionarygroupcode insertgroupcode vcentergroupcode \
-mathchoicegroupcode semisimplegroupcode mathshiftgroupcode mathleftgroupcode vadjustgroupcode \
-charnodecode hlistnodecode vlistnodecode rulenodecode insertnodecode \
-marknodecode adjustnodecode ligaturenodecode discretionarynodecode whatsitnodecode \
-mathnodecode gluenodecode kernnodecode penaltynodecode unsetnodecode \
-mathsnodecode charifcode catifcode numifcode dimifcode \
-oddifcode vmodeifcode hmodeifcode mmodeifcode innerifcode \
-voidifcode hboxifcode vboxifcode xifcode eofifcode \
-trueifcode falseifcode caseifcode definedifcode csnameifcode \
-fontcharifcode fontslantperpoint fontinterwordspace fontinterwordstretch fontinterwordshrink \
-fontexheight fontemwidth fontextraspace slantperpoint interwordspace \
-interwordstretch interwordshrink exheight emwidth extraspace \
-mathsupdisplay mathsupnormal mathsupcramped mathsubnormal mathsubcombined \
-mathaxisheight startmode stopmode startnotmode stopnotmode \
-startmodeset stopmodeset doifmode doifmodeelse doifnotmode \
-startallmodes stopallmodes startnotallmodes stopnotallmodes doifallmodes \
-doifallmodeselse doifnotallmodes startenvironment stopenvironment environment \
-startcomponent stopcomponent component startproduct stopproduct \
-product startproject stopproject project starttext \
-stoptext startnotext stopnotext startdocument stopdocument \
-documentvariable startmodule stopmodule usemodule startTEXpage \
-stopTEXpage enablemode disablemode preventmode pushmode \
-popmode typescriptone typescripttwo typescriptthree mathsizesuffix \
-mathordcode mathopcode mathbincode mathrelcode mathopencode \
-mathclosecode mathpunctcode mathalphacode mathinnercode mathnothingcode \
-mathlimopcode mathnolopcode mathboxcode mathchoicecode mathaccentcode \
-mathradicalcode constantnumber constantnumberargument constantdimen constantdimenargument \
-constantemptyargument continueifinputfile
+globallet glet udef ugdef uedef \
+uxdef getparameters geteparameters getgparameters getxparameters \
+forgetparameters copyparameters getdummyparameters dummyparameter directdummyparameter \
+setdummyparameter letdummyparameter usedummystyleandcolor usedummystyleparameter usedummycolorparameter \
+processcommalist processcommacommand quitcommalist quitprevcommalist processaction \
+processallactions processfirstactioninset processallactionsinset unexpanded expanded \
+startexpanded stopexpanded protected protect unprotect \
+firstofoneargument firstoftwoarguments secondoftwoarguments firstofthreearguments secondofthreearguments \
+thirdofthreearguments firstoffourarguments secondoffourarguments thirdoffourarguments fourthoffourarguments \
+firstoffivearguments secondoffivearguments thirdoffivearguments fourthoffivearguments fifthoffivearguments \
+firstofsixarguments secondofsixarguments thirdofsixarguments fourthofsixarguments fifthofsixarguments \
+sixthofsixarguments firstofoneunexpanded gobbleoneargument gobbletwoarguments gobblethreearguments \
+gobblefourarguments gobblefivearguments gobblesixarguments gobblesevenarguments gobbleeightarguments \
+gobbleninearguments gobbletenarguments gobbleoneoptional gobbletwooptionals gobblethreeoptionals \
+gobblefouroptionals gobblefiveoptionals dorecurse doloop exitloop \
+dostepwiserecurse recurselevel recursedepth dofastloopcs newconstant \
+setnewconstant newconditional settrue setfalse setconstant \
+newmacro setnewmacro newfraction newsignal dosingleempty \
+dodoubleempty dotripleempty doquadrupleempty doquintupleempty dosixtupleempty \
+doseventupleempty dosingleargument dodoubleargument dotripleargument doquadrupleargument \
+doquintupleargument dosixtupleargument doseventupleargument dosinglegroupempty dodoublegroupempty \
+dotriplegroupempty doquadruplegroupempty doquintuplegroupempty permitspacesbetweengroups dontpermitspacesbetweengroups \
+nopdfcompression maximumpdfcompression normalpdfcompression modulonumber dividenumber \
+getfirstcharacter doiffirstcharelse startnointerference stopnointerference twodigits \
+threedigits strut setstrut strutbox strutht \
+strutdp strutwd struthtdp begstrut endstrut \
+lineheight ordordspacing ordopspacing ordbinspacing ordrelspacing \
+ordopenspacing ordclosespacing ordpunctspacing ordinnerspacing opordspacing \
+opopspacing opbinspacing oprelspacing opopenspacing opclosespacing \
+oppunctspacing opinnerspacing binordspacing binopspacing binbinspacing \
+binrelspacing binopenspacing binclosespacing binpunctspacing bininnerspacing \
+relordspacing relopspacing relbinspacing relrelspacing relopenspacing \
+relclosespacing relpunctspacing relinnerspacing openordspacing openopspacing \
+openbinspacing openrelspacing openopenspacing openclosespacing openpunctspacing \
+openinnerspacing closeordspacing closeopspacing closebinspacing closerelspacing \
+closeopenspacing closeclosespacing closepunctspacing closeinnerspacing punctordspacing \
+punctopspacing punctbinspacing punctrelspacing punctopenspacing punctclosespacing \
+punctpunctspacing punctinnerspacing innerordspacing inneropspacing innerbinspacing \
+innerrelspacing inneropenspacing innerclosespacing innerpunctspacing innerinnerspacing \
+normalreqno startimath stopimath normalstartimath normalstopimath \
+startdmath stopdmath normalstartdmath normalstopdmath uncramped \
+cramped triggermathstyle mathstylefont mathsmallstylefont mathstyleface \
+mathsmallstyleface mathstylecommand mathpalette mathstylehbox mathstylevbox \
+mathstylevcenter mathstylevcenteredhbox mathstylevcenteredvbox mathtext setmathsmalltextbox \
+setmathtextbox triggerdisplaystyle triggertextstyle triggerscriptstyle triggerscriptscriptstyle \
+triggeruncrampedstyle triggercrampedstyle triggersmallstyle triggeruncrampedsmallstyle triggercrampedsmallstyle \
+triggerbigstyle triggeruncrampedbigstyle triggercrampedbigstyle luaexpr expdoifelse \
+expdoif expdoifnot expdoifcommonelse expdoifinsetelse ctxdirectlua \
+ctxlatelua ctxsprint ctxwrite ctxcommand ctxdirectcommand \
+ctxlatecommand ctxreport ctxlua luacode lateluacode \
+directluacode registerctxluafile ctxloadluafile luaversion luamajorversion \
+luaminorversion ctxluacode luaconditional luaexpanded startluaparameterset \
+stopluaparameterset luaparameterset definenamedlua obeylualines obeyluatokens \
+startluacode stopluacode startlua stoplua carryoverpar \
+Umathbotaccent
diff --git a/Master/texmf-dist/context/data/scite/scite-context-data-interfaces.properties b/Master/texmf-dist/context/data/scite/scite-context-data-interfaces.properties
index c205bb568d9..9c2ca4623fc 100644
--- a/Master/texmf-dist/context/data/scite/scite-context-data-interfaces.properties
+++ b/Master/texmf-dist/context/data/scite/scite-context-data-interfaces.properties
@@ -1,488 +1,3 @@
-keywordclass.context.it=\
-CAP Cap Caps GIORNOSETTIMANA \
-Lettera Lettere MESE Numeri Numeriromani \
-PAROLA PAROLE Parola Parole accoppiacarta \
-accoppiadocumento accoppiamarcatura accoppiapagina accoppiaregistro adattacampo \
-adattalayout al allineacentro allineadestra allineasinistra \
-altezzacarta altezzacartastampa altezzacima altezzaelenco altezzafondo \
-altezzaintestazione altezzamakeup altezzapdp altezzatesto ambiente \
-ampiezzabordo ampiezzabordodestro ampiezzabordosinistro ampiezzacarta ampiezzacartastampa \
-ampiezzaelenco ampiezzamakeup ampiezzamargine ampiezzamarginedestro ampiezzamarginesinistro \
-ampiezzatesto ap apagina appendix arg \
-atleftmargin atrightmargin barracolori barrainterazione barrasincronizzazione \
-bastablocchi bastafile cambiaafontdeltesto campi camporiempimento \
-cap capello chapter chim circondato \
-citazione clip clonacampo colonna colore \
-coloregrigio comment commento completecombinedlist completelistoffloats \
-completelistofsorts completelistofsynonyms completeregister componenet confrontagruppocolori \
-confrontatavolozza convertinumero copiacampo correggispaziobianco coupledregister \
-crlf cutspace da daqualcheparte data \
-datadioggi datareferral decrementnumber definebodyfontDEF definebodyfontREF \
-definecolumnbreak definecombination definedfont definefontfeature definefonthandling \
-defineindentedtext defineinmargin defineitemgroup definelayer definemathalignment \
-definepagebreak defineplacement definetypeface definisci definisciaccento \
-definisciambientefontdeltesto definisciblocco definiscibloccosezione definiscibuffer definiscicampo \
-definiscicampoprincipale definiscicapoversi definiscicarattere definiscicolore definiscicomando \
-definisciconversione definiscidescrizione definiscidimensionicarta definiscielenco definiscielencocombinato \
-definiscienumerazione definiscietichetta definiscifigurasimbolo definiscifont definiscifontdeltesto \
-definiscifontgrezzo definisciformatoriferimento definiscigruppocolonne definiscigruppocolori definiscihbox \
-definisciincorniciato definisciiniziatermina definiscilayout definiscilinea definiscilistariferimenti \
-definiscilogo definiscimakeup definiscimarcatura definiscimenuinterazione definiscimodellotabella \
-definiscioggettomobile definisciordinamento definiscioutput definisciposizionetesto definisciprofilo \
-definisciprogramma definisciregistro definisciriferimento definiscirigovuoto definiscisezione \
-definiscisimbolo definiscisinonimi definiscisinonimofont definiscisottocampo definiscisovrapposizione \
-definiscistackcampi definiscistile definiscistilefont definiscitabulato definiscitavolozza \
-definiscitesta definiscitesto definiscitestoincorniciato definiscitype definiscityping \
-definiscivariabiletesto definisciversion description determinacaratteristicheregistro determinacarattersticheelenco \
-determinanumerotesta dimensione disabilitamenuinterazione distanzabordo distanzabordodestro \
-distanzabordosinistro distanzacima distanzafondo distanzaintestazione distanzamargine \
-distanzamarginedestro distanzamarginesinistro distanzapdp domicilio el \
-elaborablocchi elaborapagina elementi elemento emptylines \
-enumeration etichetta etichette fatto figuraesterna \
-fondo forzablocchi framedtext frazione getnumber \
-giornosettimana griglia headsym hl ignoto \
-immediatebetweenlist immediatetolist impaccato impostaallineamento impostaambientefontdeltesto \
-impostaampiezzariga impostabarrainterazione impostabarrasincronizzazione impostablocchimargine impostablocco \
-impostabloccosezione impostabuffer impostacampi impostacampo impostacapoversi \
-impostacaption impostacaptions impostacima impostaclippling impostacolonne \
-impostacolore impostacolori impostacombinazioni impostacommento impostacommentopagina \
-impostadefinizionenotepdp impostadescrizioni impostadimensionicarta impostaelementi impostaelencazioni \
-impostaelenco impostaelencocombinato impostaenumerazioni impostafigureesterne impostafondo \
-impostafontdeltesto impostaforms impostaformule impostagruppocolonne impostaincorniciato \
-impostainiziatermina impostainmargine impostainstestazione impostainterazione impostainterlinea \
-impostalayout impostalegenda impostalinea impostalineemargine impostalineenere \
-impostalineeriempimento impostalineesottili impostalineetesto impostalingua impostalistariferimenti \
-impostamaiuscole impostamakeup impostamarcatura impostamenuinterazione impostamenzione \
-impostanotepdp impostanumerazione impostanumerazionecapoversi impostanumerazionepagina impostanumerazionerighe \
-impostanumeropagina impostanumerosottopagina impostanumerotesta impostaoggettimobili impostaoggettomobile \
-impostaordinamento impostaoutput impostaparranging impostapdp impostapiustretto \
-impostaposizionamento impostaposizionamentoopposti impostaposizionetesto impostaprofili impostaprogrammi \
-impostapubblicazioni impostapulsanti impostaregistro impostarientro impostariferimento \
-impostarighe impostarigheriempimento impostarigovuoto impostarotazione impostaschermi \
-impostaschermointerazione impostasegnosillabazione impostasetsimboli impostasezione impostasfondi \
-impostasfondo impostasincronizzazione impostasinonimi impostasistema impostasottolinea \
-impostaspaziatura impostaspaziobianco impostaspezzamentooggettomobile impostastrut impostatab \
-impostatabelle impostatabulato impostatavolozza impostatesta impostateste \
-impostatesticima impostatestifondo impostatestiincorniciati impostatestiintestazioni impostatestipdp \
-impostatesto impostatestoetichette impostatestointestazioni impostatestotesti impostatolleranza \
-impostatransizionepagina impostatype impostatyping impostaurl impostavariabiletesto \
-impostaversioni impostazioni in inaltromargine incorniciato \
-incrementanumero indentation indestra ininner iniziaallineamento \
-iniziaambiente iniziabloccomargine iniziacitazione iniziacodifica iniziacolonne \
-iniziacolore iniziacombinazione iniziacomponente iniziacorrezioneriga iniziadocumento \
-iniziafigura iniziaglobale iniziagruppocolonne iniziaimpaccato inizialineamargine \
-inizialineatesto inizialocale iniziamakeup inizianotepdplocali inizianumerazionerighe \
-iniziaopposto iniziaoverview iniziapiustretto iniziaposizionamento iniziaprodotto \
-iniziaprofilo iniziaprogetto iniziaraster iniziariga iniziarighe \
-iniziasetsimboli iniziasfondo iniziasincronizzazione iniziasovrapposizione iniziatabella \
-iniziatabelle iniziatesto iniziaunpacked iniziaversione inlatodestro \
-inlatosinistro inmaframed inmargine inmarginedestro inmarginesinistro \
-inneredgedistance inneredgewidth innermargindistance innermarginwidth inouter \
-inriga insinistra installalingua intorno labeling \
-leg lettera lettere lineamargine lineanera \
-lineasottile lineatesto lineenere lineeriempimento lineesottili \
-lingua linguaprincipale listsymbol livellotesta loadsorts \
-loadsynonyms logcampi lunghezzaelenco maframed mapfontsize \
-mar marcatura marcaversione matematica mediaeval \
-menuinterattivo menzione mese mettielenco mettielencocombinato \
-mettifiancoafianco mettiformula mettiingriglia mettilegenda mettilinea \
-mettiloghi mettinotepdp mettinotepdplocali mettinumeropagina mettiregistro \
-mettisegnalibro mettisottoformula mettiunosullaltro mettivariabiletesto mostraambientefontdeltesto \
-mostracampi mostracolore mostracornice mostrafiguresterne mostrafontdeltesto \
-mostragriglia mostragruppocolori mostraimpostazioni mostralyout mostramakeup \
-mostrasetsimboli mostrastampa mostrastruts mostratavolozza movesidefloat \
-name nascondiblocchi navigating nextsection nientedimensioni \
-nienteelenco nientelineecimafondo nientelineintestazionepdp nientemarcatura nienterientro \
-nientespazio nientespaziobianco nocap nome nomeunita \
-nop nota notapdp notest numberofsubpages \
-numeri numeriromani numeroformula numeropagina numeropaginacompleto \
-numerosottoformula numerotesta numerotestacorrente numerototaledipagine outeredgedistance \
-outeredgewidth outermargindistance outermarginwidth overbar overbars \
-overstrike overstrikes pagedepth pageoffset pagina \
-paragraph paroladestra parolainmargine part passaafontgrezzo \
-ped pedap perlungo placefloat placelistoffloats \
-placelistofsorts placelistofsynonyms placerawlist placereferencelist posizionanumerotesta \
-posizionatesto posizionatestotesta posizione prendibuffer prendimarcatura \
-prodotto progetto programma pubblicazione pulsante \
-pulsantemenu pulsantinterazione punti qualcheriga ran \
-referral referring register reimposta reimpostamarcatura \
-reservefloat resetnumber resettextcontent rientro rif \
-rifai riferimento riferimentopagina riferimentotesto riflessione \
-rigariempimento rigovuoto ruota saltablocchi scala \
-schermo scrividentroelenco scriviinelenco scriviinlistariferimenti scriviinregistro \
-section seeregister segnalibro seguiprofilo seguiversione \
-seguiversioneprofilo selezionablocchi selezionacarta selezionaversione separamarcatura \
-setnumber settext setupanswerarea setupcolumnsetlines setupcolumnsetstart \
-setupfonthandling setupfontsynonym setupindentedtext setupinterlinespace2 setupitemgroup \
-setuplistalternative setupmathalignment setuppaper setupplacement setvariabiletesto \
-sfondo sim simbolo sincronizza sort \
-spazifissi spazio spaziobianco spaziocima spaziodietro \
-spaziofisso spaziofondo spessoreriga spezzaoggettomobile spostaagriglia \
-spostaformula stackcampi startalignment startambiente startbuffer \
-startcitazione startcolore startcolumnmakeup startcolumns startcombination \
-startcomment startcomponenet startdescription startdocument startenumeration \
-startfatto startfigure startfloattext startformula startframedtext \
-starthiding startimpaccato startitemgroup startlegend startline \
-startlineamargine startlineatesto startlinecorrection startlinenumbering startlines \
-startlocal startlocalenvironment startlocalfootnotes startmakeup startmarginblock \
-startmenuinterattivo startnamemakeup startnarrower startopposite startoverlay \
-startoverview startparagraph startpositioning startpostponing startprodotto \
-startprofile startprogetto startregister startsfondo startsymbolset \
-startsynchronization starttable starttables starttabulate starttyping \
-startunpacked startversione stirato stopalignment stopambiente \
-stopbuffer stopcitazione stopcolore stopcolumnmakeup stopcolumns \
-stopcombination stopcomment stopcomponenet stopdescription stopdocument \
-stopenumeration stopfatto stopfigure stopfloattext stopformula \
-stopframedtext stophiding stopimpaccato stopitemgroup stoplegend \
-stopline stoplineamargine stoplineatesto stoplinecorrection stoplinenumbering \
-stoplines stoplocal stoplocalenvironment stoplocalfootnotes stopmakeup \
-stopmarginblock stopmenuinterattivo stopnamemakeup stopnarrower stopopposite \
-stopoverlay stopoverview stopparagraph stoppositioning stoppostponing \
-stopprodotto stopprofile stopprogetto stopsfondo stopsymbolset \
-stopsynchronization stoptable stoptables stoptabulate stoptyping \
-stopunpacked stopversione sub subject subpagenumber \
-subsection subsubject subsubsection subsubsubject synonym \
-tab terminaallineamento terminaambiente terminabloccomargine terminacitazione \
-terminacodifica terminacolonne terminacolore terminacombinazione terminacomponente \
-terminacorrezioneriga terminadocumento terminaglobale terminagruppocolonne terminaimpaccato \
-terminalineamargine terminalineatesto terminalocale terminamakeup terminanotepdplocali \
-terminanumerazionerighe terminaopposto terminaoverview terminapiustretto terminaposizionamento \
-terminaprodotto terminaprofili terminaprogetto terminaraster terminariga \
-terminarighe terminasfondo terminasincronizzazione terminasovrapposizione terminatabella \
-terminatabelle terminatesto terminaunpacked terminaversioni testa \
-testcolumn testoetichetta testoinmargine testoinstestazioni testonotapdp \
-testoriempimento testpage tex tieniblocchi title \
-titoloinmargine tooltip traduci txt typ \
-type typebuffer typefile underbar underbars \
-usaJSscripts usaURL usablocco usacartella usacodifica \
-usacolonnasonoraesterna usacomandi usadocumentoesterno usafiguraesterna usafileesterni \
-usafileesterno usamoduli usamodulo usariferimenti usasimboli \
-usaspecialita usaurl useXMLfilter usedirectory usetypescript \
-usetypescriptfile vaia vaiabox vaiapagina vaigiu \
-valorecolore valoregrigio variabiletesto versione vl
-
-keywordclass.context.fr=\
-CAP Cap Caps Caractere \
-Caracteres Chiffresromains JOURSEMAINE MOIS MOT \
-MOTS Mot Mots Numeros a \
-adaptedisposition affectenumero affectevariabletexte ajustechamp alaligne \
-alapage aligneadroite aligneagauche aligneaumilieu appendix \
-arg arriereplan atleftmargin atrightmargin baha \
-barrecouleur barreinteraction barresynchronisation bas bouton \
-boutonmenu boutonsinteraction but cacheblocs cap \
-caractere caracteres champ changepolicebrute changepolicecorps \
-chapter chem chiffresromains citation citer \
-clip clonechamp colonne comment commentaire \
-comparegroupecouleur comparepalette completecombinedlist completelistoffloats completelistofsorts \
-completelistofsynonyms completenumeropage completeregistre composant composeenalinea \
-concernant convertitnumero copitchamp corrigeespaceblanc couleur \
-couleurgrise coupledocument coupledregister couplemarquage couplepapier \
-coupleregistre crlf cutspace dactylographier dans \
-dansautremarge dansborddroit dansbordgauche dansdroite dansgauche \
-dansmarge dansmargedroite dansmargegauche date datecourante \
-daterecommandation de decouplemarquage decrementenumero definebodyfontDEF \
-definebodyfontREF definecombination definedfont definefontfeature definefonthandling \
-defineframed defineframedtext defineindentedtext defineitemgroup definemathalignment \
-defineplacement definetypeface definicaractere definit definitaccent \
-definitbloc definitblocsection definitbuffer definitcalque definitchamp \
-definitchampprincipal definitcommande definitconversion definitcouleur definitdactylo \
-definitdansmarge definitdemarrestoppe definitdescription definitdisposition definitenumeration \
-definitenvironnementpolicecorps definitetiquette definitflottant definitformatreference definitgroupecouleur \
-definithbox definitjeucolonne definitliste definitlisteimbriquee definitlistereference \
-definitlogo definitmakeup definitmarquage definitmenuinteraction definitnotepdp \
-definitpalette definitparagraphes definitpilechamp definitpolice definitpolicebrute \
-definitpolicecorps definitpositiontexte definitprofil definitprogramme definitreference \
-definitregistre definitregle definitrevetement definitsautdecolonne definitsautdepage \
-definitsection definitsortie definitsouschamp definitstyle definitstylepolice \
-definitsymbole definitsymbolefigure definitsynonymepolice definitsynonymes definittabulation \
-definittaillepapier definittete definittexte definittrametableau definittri \
-definittype definitvariabletexte definitversion definitvide demarrealignement \
-demarrearriereplan demarreblocmarge demarrecitation demarreciter demarrecodage \
-demarrecolonnes demarrecombinaison demarrecompoetroite demarrecomposant demarrecorrectionligne \
-demarrecouleur demarredegroupe demarredocument demarreenvironement demarrefigure \
-demarreglobal demarregroupe demarrejeucolonne demarrejeusymboles demarreligne \
-demarreligneregleetexte demarrelignes demarrelocal demarremakeup demarremargereglee \
-demarrenotespdplocales demarrenumerotationligne demarreopposition demarrepositionnement demarreproduit \
-demarreprofil demarreprojet demarreraster demarrerevetement demarresynchronisation \
-demarretableau demarretableaux demarretexte demarreversion demarrevuedensemble \
-deplaceformule deplacesurgrille description determinecaracteristiqueliste determinecaracteristiquesregistre \
-determinenumerotete dimension distancebord distanceborddroit distancebordgauche \
-distanceentete distanceinf distancemarge distancemargedroite distancemargegauche \
-distancepdp distancesup domicile echelle ecran \
-ecritdansliste ecritdanslistereference ecritentreliste ecritregistre el \
-element elements emptylines enumeration environement \
-espace espaceblanc espacefixe espaceinf espacesfixes \
-espacesup etiquette etiquettes etire fait \
-faitreference fichierdactylo figureexterne forceblocs fraction \
-framed framedtext gardeblocs getnumber grille \
-groupe haut hauteureditionpapier hauteurentete hauteurinf \
-hauteurliste hauteurmakeup hauteurpapier hauteurpdp hauteursup \
-hauteurtexte headsym hl immediatebetweenlist immediatetolist \
-inconnu incrementenumero indentation inframed infull \
-inhibemenuinteraction ininner inmframed inneredgedistance inneredgewidth \
-innermargindistance innermarginwidth inouter installelangue joursemaine \
-labeling labeltexte langue langueprincipale largeurbord \
-largeurborddroit largeurbordgauche largeureditionpapier largeurligne largeurliste \
-largeurmakeup largeurmarge largeurmargedroite largeurmargegauche largeurpapier \
-largeurtexte leg ligneh lignenoire ligneregleetexte \
-lignesnoires listesymbole llongueurliste loadsorts loadsynonyms \
-logchamp mapfontsize mar margereglee marquage \
-marquageversion marquepage mathematique mediaeval menuinteraction \
-mframed mois montrecadre montrechamps montrecouleur \
-montredisposition montreedition montreenvironnementpolicecorps montrefiguresexternes montregrille \
-montregroupecouleur montrejeusymboles montremakeup montrepalette montrepolicecorps \
-montrereglages montrestruts motdroit motmarge movesidefloat \
-name navigating nextsection niveautete nocap \
-nombredesouspages nombretotaldepages nommacro nop note \
-notepdp numeroformule numeropage numeros numerosousformule \
-numerotete numerotetecourant obtientmarquage oriente outeredgedistance \
-outeredgewidth outermargindistance outermarginwidth overbar overbars \
-overstrike overstrikes page pagedepth pagedouble \
-pageoffset paragraph part pasplusdeblocs pasplusdefichiers \
-periodes pilechamp placecoteacote placeflottant placeformule \
-placelegende placelesunsaudessusdesautres placeliste placelisteinmbriquee placelistereference \
-placelistoffloats placelistofsorts placelistofsynonyms placelogos placemarquespages \
-placenotespdp placenotespdplocales placenumeropage placenumerotete placerawlist \
-placeregistre placeregle placesousformule placesurgrille placetextetete \
-placevariabletexte position positionnetexte prendbuffer produit \
-programme projet publication qqpart ran \
-raz razmarquage raznumero recommandation ref \
-refait reference referencepage referencetexte reflete \
-register reglages reglealignement reglearrangement reglearriereplan \
-reglearriereplans reglebarreinteraction reglebarresynchronisation reglebloc regleblocmarge \
-regleblocsection regleboutons reglebuffer reglecapitales reglechamp \
-reglechamps regleclipping reglecolonnes reglecombinaisons reglecommentaire \
-reglecommentairepage reglecompoetroite reglecomposeenalinea reglecouleur reglecouleurs \
-regledactylo regledansmarge regledemarrestoppe regledescriptions regledisposition \
-regleecraninteraction regleecrans regleelements regleencadre regleentete \
-regleenumerations regleenvironnementpolicecorps regleepaisseurligne regleespaceblanc regleespacement \
-regleespacementinterligne reglefiguresexternes regleflottant regleflottants regleformulaires \
-regleformules reglegroupeselements regleinf regleinteraction regleintitule \
-regleintitules reglejeucolonne reglejeusymboles reglelabeltexte reglelangue \
-reglelegende reglelignes reglelignesnoires reglelignesreglestexte regleliste \
-reglelisteimbriquee reglelistereference reglemakeup reglemargereglee reglemarquage \
-reglemarquagehyphenation reglemenuinteraction reglenotepdp reglenumero reglenumeropage \
-reglenumerotation reglenumerotationligne reglenumerotationpage reglenumerotationparagraphe reglenumerotete \
-regleoriente reglepalette reglepapier regleparagraphes reglepdp \
-regleplacementopposition reglepolicecorps reglepositionnement reglepositiontexte regleprofils \
-regleprogrammes reglepublications reglereferencage regleregistre regleregle \
-regleremplitligne regleremplitlignesreglees reglesection regleseparationflottant reglesortie \
-reglesouslignage reglesousnumeropage reglestrut reglesup reglesynchronisation \
-reglesynonymes reglesysteme regletab regletableaux regletabulation \
-regletaillepapier regletete regletetes regletexte regletextesentete \
-regletextesinf regletextespdp regletextessup regletextestexte regletextetete \
-regletolerance regletraitsfins regletransitionspage regletri regletype \
-regleurl reglevariabletexte regleversions remplitchamp remplitligne \
-remplitlignesreglees remplittexte reservefloat resettextcontent retourarriere \
-sansalinea sansdimension sansespace sansespaceblanc sanslignesenteteetpdp \
-sanslignessupetinf sansliste sansmarquage sanstest sauteblocs \
-section seeregister selectionneblocs selectionnepapier selectionneversion \
-sensunite separeflottant settext setupanswerarea setupcolumnsetlines \
-setupcolumnsetstart setupfonthandling setupfontsynonym setupframedtexts setupindentedtext \
-setupinterlinespace2 setupitemgroup setuplistalternative setupmathalignment setupplacement \
-sort sousnumeropage startalignment startarriereplan startbuffer \
-startcitation startcolumnmakeup startcolumns startcombination startcomment \
-startcomposant startcouleur startdescription startdocument startenumeration \
-startenvironement startfait startfigure startfloattext startformula \
-startframedtext startgroupe starthiding startitemgroup startlegend \
-startligneregleetexte startline startlinecorrection startlinenumbering startlines \
-startlocal startlocalenvironment startlocalfootnotes startmakeup startmargereglee \
-startmarginblock startmenuinteraction startnamemakeup startnarrower startopposite \
-startoverlay startoverview startparagraph startpositioning startpostponing \
-startproduit startprofile startprojet startregister startsymbolset \
-startsynchronization starttable starttables starttabulate starttyping \
-startunpacked startversion stopalignment stoparriereplan stopbuffer \
-stopcitation stopcolumnmakeup stopcolumns stopcombination stopcomment \
-stopcompoetroite stopcomposant stopcouleur stopdescription stopdocument \
-stopenumeration stopenvironement stopfait stopfigure stopfloattext \
-stopformula stopframedtext stopgroupe stophiding stopitemgroup \
-stoplegend stopligneregleetexte stopline stoplinecorrection stoplinenumbering \
-stoplines stoplocal stoplocalenvironment stoplocalfootnotes stopmakeup \
-stopmargereglee stopmarginblock stopmenuinteraction stopnamemakeup stopnarrower \
-stopopposite stopoverlay stopoverview stopparagraph stoppealignement \
-stoppearriereplan stoppeblocmarge stoppecitation stoppecodage stoppecolonnes \
-stoppecombinaison stoppecomposant stoppecorrectionligne stoppecouleur stoppedegroupe \
-stoppedocument stoppeenvironement stoppeglobal stoppegroupe stoppejeucolonne \
-stoppeligne stoppeligneregleetexte stoppelignes stoppelocal stoppemakeup \
-stoppemargereglee stoppenotespdplocales stoppenumerotationligne stoppeopposition stoppepositionnement \
-stoppeproduit stoppeprofil stoppeprojet stopperaster stopperevetement \
-stoppesynchronisation stoppetableau stoppetableaux stoppetexte stoppeversion \
-stoppevuedensemble stoppositioning stoppostponing stopproduit stopprofile \
-stopprojet stopsymbolset stopsynchronization stoptable stoptables \
-stoptabulate stoptyping stopunpacked stopversion sub \
-subject subsection subsubject subsubsection subsubsubject \
-suggestion suivantprofil suivantversion suivantversionprofil sym \
-symbole synchronise synonym tab tapebuffer \
-testcolumn testpage tete tex textemarge \
-textenotepdp textetete title titremarge traduire \
-traiteblocs traitepage traitfin traitsfins txt \
-typ underbar underbars uneligne useXMLfilter \
-usedirectory usetypescript usetypescriptfile utiliseJSscripts utiliseURL \
-utiliseblocs utilisechemin utilisecommandes utilisedocumentexterne utiliseencodage \
-utilisefichierexterne utilisefichiersexternes utilisefigureexterne utilisemodule utilisemodules \
-utilisepsiteaudioexterne utilisereferences utilisespecialites utilisesymboles utiliseurl \
-va vaalaboite vaalapage vaenbas valeurcouleur \
-valeurgris variabletexte version vide vl
-
-keywordclass.context.de=\
-Buchstabe Buchstaben CAP Cap \
-Caps KAP Kap Kaps MONAT \
-Roemischezahlen WOCHENTAG WOERTER WORT Woerter \
-Wort Ziffern abstandlinkerrand abstandoben abstandrechterrand \
-abstandunten amgitterausrichten amgitterneuausrichten appendix arg \
-atleftmargin atrightmargin aufseite ausfuellfeld ausfuelltext \
-ausschnitt bearbeitebloecke bearbeiteseite bedeutung behaltebloecke \
-bei bemerkung benutzekodierung benutzespezielles benutzeverzeichnis \
-beschrifteversion beschriftung bestimmekopfnummer bestimmelistencharakeristika bestimmeregistercharakteristika \
-bildschirm blanko bookmark bottomspace breitelinkerrand \
-breiterechterrand bruch buchstabe buchstaben but \
-bypassblocks cap chapter chem comment \
-completecombinedlist completelistoffloats completelistofsorts completelistofsynonyms completepagenumber \
-completeregister coupledregister crlf cutspace datum \
-decrementnumber definebodyfontDEF definebodyfontREF definecolumnbreak definecolumnset \
-definecombination definedfont definefontfeature definefonthandling defineindentedtext \
-defineinmargin defineitemgroup definelayer definelayout definemathalignment \
-defineoutput definepagebreak defineplacement definerawfont definerule \
-defineschriftsynonym definetextposition definetextvariable definetype definetypeface \
-definiereabbsymbol definiereabsaetze definiereabschnitt definiereabschnittsblock definiereakzent \
-definierebefehl definierebeschreibung definierebeschreibungen definierebeschriftung definiereblanko \
-definiereblock definierefarbe definierefarbengruppe definierefeld definierefeldstapel \
-definierefliesstext definierefliesstextumgebung definieregleitobjekt definierehauptfeld definierehbox \
-definiereinteraktionsmenue definierekonversion definierelabel definiereliste definierelogo \
-definieren definierenummerierung definiereoverlay definierepalette definierepapierformat \
-definiereprofil definiereprogramme definierepuffer definierereferenz definierereferenzformat \
-definierereferenzliste definiereregister definiereschrift definiereschriftstil definieresortieren \
-definierestartstop definierestil definieresubfeld definieresymbol definieresynonyme \
-definieretabellenvorlage definieretabulator definieretext definieretippen definiereueberschrift \
-definiereumbruch definiereumrahmt definiereumrahmtertext definiereversion definierezeichen \
-definierezusammengestellteliste description dimension doppelseite doppelseitigespapier \
-drehen duennelinie duennerumriss einezeile einstellungen \
-einziehen emptylines entknuepfebeschriftung enumeration externeabbildung \
-farbbalken farbe farbewert feld feldstapel \
-festesspatium folgeprofil folgeprofilversion folgeversion footnotetext \
-forceblocks format formelnummer framedtext fussnote \
-fusszeileabstand fusszeilenhoehe gefuelltesrechteck gefuelltezeile geg \
-gesamtseitenanzahl gestreckt getnumber gitter graufarbe \
-grauwert haarlinie hauptsprache headsym heutigesdatum \
-heutigeskopfnummer hintergrund hl hoch hoeheoben \
-hoeheunten holebeschriftung holepuffer imlinken imlinkenrand \
-immaumrise immediatebetweenlist immediatetolist imrechten imrechtenrand \
-imumriss in inaktiviereinteraktionsmenue inanderermarginale indentation \
-ininner inlinkermarginale inmarginalie inneredgedistance inneredgewidth \
-innermargindistance innermarginwidth inouter inrechtermarginale installieresprache \
-interaktionsbalken interaktionsknopfe interaktionsmenue inzeile irgendwo \
-its kap keindimension keinebeschriftung keinebloeckemehr \
-keinedateienmehr keinekopfundfusszeilen keineliste keinspatium keintest \
-keinzeilenobenundunten keinzwischenraum kleinerdurchschuss klonierefeld knopf \
-komponente konvertierezahl kopf kopfniveau kopfnummer \
-kopfweite kopfzeilenabstand kopfzeilenhoehe kopierefeld korrigierezwischenraum \
-label labeling labels labeltext leg \
-liniendicke linkemarginalafstand linkemarginalbreite linksbuendig listenbreite \
-listenhoehe listenlaenge listsymbol loadsorts loadsynonyms \
-mapfontsize mar marginalafstand marginalbreite marginallinie \
-marginaltext marginaltitel marginalwort mathematik maumrise \
-mediaeval menueknopf monat moveformula movesidefloat \
-nachunten name navigating nextsection nichteinziehen \
-nocap nokap nop notiz numberofsubpages \
-nummererhoehen outeredgedistance outeredgewidth outermargindistance outermarginwidth \
-overbar overbars overstrike overstrikes pagedepth \
-pageoffset papierbreite papierhoehe paragraph part \
-passelayoutan passendfeld placefloat placeheadnumber placeheadtext \
-placelistoffloats placelistofsorts placelistofsynonyms placepagenumber placerawlist \
-placereferencelist placerule placetextvariable platzierebookmarks platziereformel \
-platzierefussnoten platzierelegende platziereliste platzierelogo platzierelokalefussnoten \
-platzierenebeneinander platziereregister platziereuntereinander platziereunterformel platzierezusammengestellteliste \
-pos position positiontext posten printpapierbreite \
-printpapierhoehe produkt programm projekt publikation \
-punkt ran randabstand randbreite rechteck \
-rechtecke rechtemarginalafstand rechtemarginalbreite rechtsbuendig ref \
-referenz referieren register registrierefelder reservefloat \
-resetnumber resettextcontent roemischezahlen ruecksetzten ruecksetztenbeschriftung \
-rumpfweite satzbreite satzhoehe schreibezumregister schreibezurliste \
-schreibezurreferenzliste schreibezwischenliste section seeregister seite \
-seitenreferenz seitenummer setnumber settext settextvariable \
-setupanswerarea setupcolumnset setupcolumnsetlines setupcolumnsetstart setupfonthandling \
-setupfontsynonym setupforms setupindentedtext setupinterlinespace2 setupitemgroup \
-setuplistalternative setupmathalignment setupnumber setuppaper setupplacement \
-setuprule setupstartstop setupstrut setuptextposition setuptextvariable \
-showsymbolset sort spalte spatium spiegeln \
-sprache startabbildung startalignment startausrichtung startbuffer \
-startcolumnmakeup startcolumns startcolumnset startcombination startcomment \
-startdescription startdocument startdokument startenger startenumeration \
-startfarbe startfigure startfloattext startformula startframedtext \
-startgeg startgegenueber startglobal startgrosserdurchschuss starthiding \
-starthintergrund startinteraktionsmenue startitemgroup startkleinerdurchschuss startkodierung \
-startkombination startkomponente startlegend startline startlinecorrection \
-startlinenumbering startlines startlocal startlocalenvironment startlocalfootnotes \
-startlokal startlokalefussnoten startmakeup startmarginalblock startmarginallinie \
-startmarginblock startnamemakeup startnarrower startopposite startoverlay \
-startoverview startparagraph startpositionieren startpositioning startpostponing \
-startprodukt startprofil startprofile startprojekt startraster \
-startregister startspalten startsymbolset startsynchronisation startsynchronization \
-starttabelle starttabellen starttable starttables starttabulate \
-starttext starttextlinie starttyping startueberblick startumbruch \
-startumgebung startunpacked startversion startzeile startzeilen \
-startzeilenkorrektur startzeilennumerierung startzitat stelleabsaetzeein stelleabsatznummerierungein \
-stelleabschnittein stelleabschnittsblockein stelleanordnenein stelleaufzaehlungenein stelleausgabeein \
-stelleausrichtungein stelleausschnittein stellebeschreibungein stellebeschriftungein stellebilderunterschriftein \
-stellebildunterschriftein stellebindestrichein stelleblankoein stelleblockein stelledrehenein \
-stelleduennerumrissein stelleeinziehenein stelleengerein stelleexterneabbildungenein stellefarbeein \
-stellefarbenein stellefeldein stellefelderin stellefliesstextein stellefliesstextumgebungein \
-stelleformelnein stellefussnotendefinitionein stellefussnotenein stellefusszeileein stellefusszeilentextein \
-stellegefuelltesrechteckein stellegefuelltezeileein stellegegenueberplatzierenein stellegleitobjekteein stellegleitobjektein \
-stellehintergruendeein stellehintergrundein stelleinmarginalieein stelleinteraktionein stelleinteraktionsbalkenein \
-stelleinteraktionsbildschirmein stelleinteraktionsmenueein stelleknopfein stellekombinationein stellekommentarein \
-stellekopfzahlein stellekopfzeileein stellekopfzeilentextein stellelabeltextein stellelayoutein \
-stellelegendeein stellelinienbreiteein stellelisteein stellemarginalblockein stellemarginallinieein \
-stellenobenein stellenummerierungein stellepaletteein stellepapierformatein stelleplatziegeteiltegleitobjekt \
-stellepositionierenein stellepostenein stelleprofilein stelleprogrammein stellepublikationein \
-stellepufferein stellerasterein stellerechteckein stellereferenzierenein stellereferenzlisteein \
-stelleregisterein stelleseitenkommentarein stelleseitennummerein stelleseitennummeriernungein stelleseitenuebergangein \
-stellesortierenein stellespaltenein stellespatiumein stellespracheein stellesymbolsetein \
-stellesynchronisationein stellesynchronisationsbalkenein stellesynonymein stellesystemein stelletabein \
-stelletabellenein stelletabulatorein stelletextein stelletextobenein stelletexttexteein \
-stelletextumrissein stelletextuntenein stelletipein stelletippenein stelletoleranzein \
-stelleueberschriftein stelleueberschriftenein stelleueberschrifttextein stelleumbruchein stelleumrahmtein \
-stelleumrahmtetexteein stelleuntenein stelleunterseitennummerein stelleunterstreichenein stelleurlein \
-stelleversalienein stelleversionein stellezeilenabstandein stellezeilenein stellezeilennumerierungein \
-stellezitierenein stellezusammengestelltelisteein stellezwischenraumein stopalignment stopausrichtung \
-stopbuffer stopcolumnmakeup stopcolumns stopcolumnset stopcombination \
-stopcomment stopdescription stopdocument stopdokument stopenger \
-stopenumeration stopfarbe stopfigure stopfloattext stopformula \
-stopframedtext stopgeg stopgegenueber stopglobal stopgrosserdurchschuss \
-stophiding stophintergrund stopinteraktionsmenue stopitemgroup stopkleinerdurchschuss \
-stopkodierung stopkombination stopkomponente stoplegend stopline \
-stoplinecorrection stoplinenumbering stoplines stoplocal stoplocalenvironment \
-stoplocalfootnotes stoplokal stoplokalefussnoten stopmakeup stopmarginalblock \
-stopmarginallinie stopmarginblock stopnamemakeup stopnarrower stopopposite \
-stopoverlay stopoverview stopparagraph stoppositionieren stoppositioning \
-stoppostponing stopprodukt stopprofil stopprofile stopprojekt \
-stopraster stopspalten stopsymbolset stopsynchronisation stopsynchronization \
-stoptabelle stoptabellen stoptable stoptables stoptabulate \
-stoptext stoptextlinie stoptyping stopueberblick stopumbruch \
-stopumgebung stopunpacked stopversion stopzeile stopzeilen \
-stopzeilenkorrektur stopzeilennumerierung stopzitat sub subject \
-subpagenumber subsection subsubject subsubsection subsubsubject \
-switchtorawfont sym symbol synchronisationsbalken synchronisieren \
-synonym tab teilegleitobjekt testcolumn testpage \
-tex textbreite texthoehe textlinie textreferenz \
-textvariable tief tiho tip tippedatei \
-tippen tippepuffer title tooltip txt \
-ueber ueberschrifttext uebersetzten umgebung umrahmt \
-unbekant underbar underbars unterformelnummer useXMLfilter \
-usedirectory usetypescript usetypescriptfile verbergebloecke vergleichefarbengruppe \
-vergleichepalette verknuepfebeschriftung verknuepfedokument verknuepfregister version \
-verweis verweisdatum verwendeJSscript verwendeURL verwendebefehl \
-verwendebloecke verwendeexteresdokument verwendeexterneabbildung verwendeexternedatei verwendeexternedateien \
-verwendeexternestonstueck verwendemodul verwendemodule verwendereferenzen verwendesymbole \
-verwendeurl vl volleswort von waehlebloeckeaus \
-waehlepapieraus waehleversionaus wechselezumfliesstext wiederholen wochentag \
-wohnort wortrechts zeigedruck zeigeeinstellungen zeigeexterneabbildungen \
-zeigefarbe zeigefarbengruppe zeigefelder zeigefliesstext zeigefliesstextumgebung \
-zeigegitter zeigelayout zeigepalette zeigerahmen zeigestruts \
-zeigeumbruch zentriert ziffern zitat zitieren \
-zu zurbox zurseite zwischenraum
-
keywordclass.context.cs=\
CAP Cap Caps Cisla \
KAP Kap Kaps MESIC Rimskecislice \
@@ -643,6 +158,165 @@ ziskejbuffer ziskejznaceni zlomek znaceni znak \
znaky zpracujbloky zpracujstranu zrcadlit zref \
zvysujicicislo
+keywordclass.context.de=\
+Buchstabe Buchstaben CAP Cap \
+Caps KAP Kap Kaps MONAT \
+Roemischezahlen WOCHENTAG WOERTER WORT Woerter \
+Wort Ziffern abstandlinkerrand abstandoben abstandrechterrand \
+abstandunten amgitterausrichten amgitterneuausrichten appendix arg \
+atleftmargin atrightmargin aufseite ausfuellfeld ausfuelltext \
+ausschnitt bearbeitebloecke bearbeiteseite bedeutung behaltebloecke \
+bei bemerkung benutzekodierung benutzespezielles benutzeverzeichnis \
+beschrifteversion beschriftung bestimmekopfnummer bestimmelistencharakeristika bestimmeregistercharakteristika \
+bildschirm blanko bookmark bottomspace breitelinkerrand \
+breiterechterrand bruch buchstabe buchstaben but \
+bypassblocks cap chapter chem comment \
+completecombinedlist completelistoffloats completelistofsorts completelistofsynonyms completepagenumber \
+completeregister coupledregister crlf cutspace datum \
+decrementnumber definebodyfontDEF definebodyfontREF definecolumnbreak definecolumnset \
+definecombination definedfont definefontfeature definefonthandling defineindentedtext \
+defineinmargin defineitemgroup definelayer definelayout definemathalignment \
+defineoutput definepagebreak defineplacement definerawfont definerule \
+defineschriftsynonym definetextposition definetextvariable definetype definetypeface \
+definiereabbsymbol definiereabsaetze definiereabschnitt definiereabschnittsblock definiereakzent \
+definierebefehl definierebeschreibung definierebeschreibungen definierebeschriftung definiereblanko \
+definiereblock definierefarbe definierefarbengruppe definierefeld definierefeldstapel \
+definierefliesstext definierefliesstextumgebung definieregleitobjekt definierehauptfeld definierehbox \
+definiereinteraktionsmenue definierekonversion definierelabel definiereliste definierelogo \
+definieren definierenummerierung definiereoverlay definierepalette definierepapierformat \
+definiereprofil definiereprogramme definierepuffer definierereferenz definierereferenzformat \
+definierereferenzliste definiereregister definiereschrift definiereschriftstil definieresortieren \
+definierestartstop definierestil definieresubfeld definieresymbol definieresynonyme \
+definieretabellenvorlage definieretabulator definieretext definieretippen definiereueberschrift \
+definiereumbruch definiereumrahmt definiereumrahmtertext definiereversion definierezeichen \
+definierezusammengestellteliste description dimension doppelseite doppelseitigespapier \
+drehen duennelinie duennerumriss einezeile einstellungen \
+einziehen emptylines entknuepfebeschriftung enumeration externeabbildung \
+farbbalken farbe farbewert feld feldstapel \
+festesspatium folgeprofil folgeprofilversion folgeversion footnotetext \
+forceblocks format formelnummer framedtext fussnote \
+fusszeileabstand fusszeilenhoehe gefuelltesrechteck gefuelltezeile geg \
+gesamtseitenanzahl gestreckt getnumber gitter graufarbe \
+grauwert haarlinie hauptsprache headsym heutigesdatum \
+heutigeskopfnummer hintergrund hl hoch hoeheoben \
+hoeheunten holebeschriftung holepuffer imlinken imlinkenrand \
+immaumrise immediatebetweenlist immediatetolist imrechten imrechtenrand \
+imumriss in inaktiviereinteraktionsmenue inanderermarginale indentation \
+ininner inlinkermarginale inmarginalie inneredgedistance inneredgewidth \
+innermargindistance innermarginwidth inouter inrechtermarginale installieresprache \
+interaktionsbalken interaktionsknopfe interaktionsmenue inzeile irgendwo \
+its kap keindimension keinebeschriftung keinebloeckemehr \
+keinedateienmehr keinekopfundfusszeilen keineliste keinspatium keintest \
+keinzeilenobenundunten keinzwischenraum kleinerdurchschuss klonierefeld knopf \
+komponente konvertierezahl kopf kopfniveau kopfnummer \
+kopfweite kopfzeilenabstand kopfzeilenhoehe kopierefeld korrigierezwischenraum \
+label labeling labels labeltext leg \
+liniendicke linkemarginalafstand linkemarginalbreite linksbuendig listenbreite \
+listenhoehe listenlaenge listsymbol loadsorts loadsynonyms \
+mapfontsize mar marginalafstand marginalbreite marginallinie \
+marginaltext marginaltitel marginalwort mathematik maumrise \
+mediaeval menueknopf monat moveformula movesidefloat \
+nachunten name navigating nextsection nichteinziehen \
+nocap nokap nop notiz numberofsubpages \
+nummererhoehen outeredgedistance outeredgewidth outermargindistance outermarginwidth \
+overbar overbars overstrike overstrikes pagedepth \
+pageoffset papierbreite papierhoehe paragraph part \
+passelayoutan passendfeld placefloat placeheadnumber placeheadtext \
+placelistoffloats placelistofsorts placelistofsynonyms placepagenumber placerawlist \
+placereferencelist placerule placetextvariable platzierebookmarks platziereformel \
+platzierefussnoten platzierelegende platziereliste platzierelogo platzierelokalefussnoten \
+platzierenebeneinander platziereregister platziereuntereinander platziereunterformel platzierezusammengestellteliste \
+pos position positiontext posten printpapierbreite \
+printpapierhoehe produkt programm projekt publikation \
+punkt ran randabstand randbreite rechteck \
+rechtecke rechtemarginalafstand rechtemarginalbreite rechtsbuendig ref \
+referenz referieren register registrierefelder reservefloat \
+resetnumber resettextcontent roemischezahlen ruecksetzten ruecksetztenbeschriftung \
+rumpfweite satzbreite satzhoehe schreibezumregister schreibezurliste \
+schreibezurreferenzliste schreibezwischenliste section seeregister seite \
+seitenreferenz seitenummer setnumber settext settextvariable \
+setupanswerarea setupcolumnset setupcolumnsetlines setupcolumnsetstart setupfonthandling \
+setupfontsynonym setupforms setupindentedtext setupinterlinespace2 setupitemgroup \
+setuplistalternative setupmathalignment setupnumber setuppaper setupplacement \
+setuprule setupstartstop setupstrut setuptextposition setuptextvariable \
+showsymbolset sort spalte spatium spiegeln \
+sprache startabbildung startalignment startausrichtung startbuffer \
+startcolumnmakeup startcolumns startcolumnset startcombination startcomment \
+startdescription startdocument startdokument startenger startenumeration \
+startfarbe startfigure startfloattext startformula startframedtext \
+startgeg startgegenueber startglobal startgrosserdurchschuss starthiding \
+starthintergrund startinteraktionsmenue startitemgroup startkleinerdurchschuss startkodierung \
+startkombination startkomponente startlegend startline startlinecorrection \
+startlinenumbering startlines startlocal startlocalenvironment startlocalfootnotes \
+startlokal startlokalefussnoten startmakeup startmarginalblock startmarginallinie \
+startmarginblock startnamemakeup startnarrower startopposite startoverlay \
+startoverview startparagraph startpositionieren startpositioning startpostponing \
+startprodukt startprofil startprofile startprojekt startraster \
+startregister startspalten startsymbolset startsynchronisation startsynchronization \
+starttabelle starttabellen starttable starttables starttabulate \
+starttext starttextlinie starttyping startueberblick startumbruch \
+startumgebung startunpacked startversion startzeile startzeilen \
+startzeilenkorrektur startzeilennumerierung startzitat stelleabsaetzeein stelleabsatznummerierungein \
+stelleabschnittein stelleabschnittsblockein stelleanordnenein stelleaufzaehlungenein stelleausgabeein \
+stelleausrichtungein stelleausschnittein stellebeschreibungein stellebeschriftungein stellebilderunterschriftein \
+stellebildunterschriftein stellebindestrichein stelleblankoein stelleblockein stelledrehenein \
+stelleduennerumrissein stelleeinziehenein stelleengerein stelleexterneabbildungenein stellefarbeein \
+stellefarbenein stellefeldein stellefelderin stellefliesstextein stellefliesstextumgebungein \
+stelleformelnein stellefussnotendefinitionein stellefussnotenein stellefusszeileein stellefusszeilentextein \
+stellegefuelltesrechteckein stellegefuelltezeileein stellegegenueberplatzierenein stellegleitobjekteein stellegleitobjektein \
+stellehintergruendeein stellehintergrundein stelleinmarginalieein stelleinteraktionein stelleinteraktionsbalkenein \
+stelleinteraktionsbildschirmein stelleinteraktionsmenueein stelleknopfein stellekombinationein stellekommentarein \
+stellekopfzahlein stellekopfzeileein stellekopfzeilentextein stellelabeltextein stellelayoutein \
+stellelegendeein stellelinienbreiteein stellelisteein stellemarginalblockein stellemarginallinieein \
+stellenobenein stellenummerierungein stellepaletteein stellepapierformatein stelleplatziegeteiltegleitobjekt \
+stellepositionierenein stellepostenein stelleprofilein stelleprogrammein stellepublikationein \
+stellepufferein stellerasterein stellerechteckein stellereferenzierenein stellereferenzlisteein \
+stelleregisterein stelleseitenkommentarein stelleseitennummerein stelleseitennummeriernungein stelleseitenuebergangein \
+stellesortierenein stellespaltenein stellespatiumein stellespracheein stellesymbolsetein \
+stellesynchronisationein stellesynchronisationsbalkenein stellesynonymein stellesystemein stelletabein \
+stelletabellenein stelletabulatorein stelletextein stelletextobenein stelletexttexteein \
+stelletextumrissein stelletextuntenein stelletipein stelletippenein stelletoleranzein \
+stelleueberschriftein stelleueberschriftenein stelleueberschrifttextein stelleumbruchein stelleumrahmtein \
+stelleumrahmtetexteein stelleuntenein stelleunterseitennummerein stelleunterstreichenein stelleurlein \
+stelleversalienein stelleversionein stellezeilenabstandein stellezeilenein stellezeilennumerierungein \
+stellezitierenein stellezusammengestelltelisteein stellezwischenraumein stopalignment stopausrichtung \
+stopbuffer stopcolumnmakeup stopcolumns stopcolumnset stopcombination \
+stopcomment stopdescription stopdocument stopdokument stopenger \
+stopenumeration stopfarbe stopfigure stopfloattext stopformula \
+stopframedtext stopgeg stopgegenueber stopglobal stopgrosserdurchschuss \
+stophiding stophintergrund stopinteraktionsmenue stopitemgroup stopkleinerdurchschuss \
+stopkodierung stopkombination stopkomponente stoplegend stopline \
+stoplinecorrection stoplinenumbering stoplines stoplocal stoplocalenvironment \
+stoplocalfootnotes stoplokal stoplokalefussnoten stopmakeup stopmarginalblock \
+stopmarginallinie stopmarginblock stopnamemakeup stopnarrower stopopposite \
+stopoverlay stopoverview stopparagraph stoppositionieren stoppositioning \
+stoppostponing stopprodukt stopprofil stopprofile stopprojekt \
+stopraster stopspalten stopsymbolset stopsynchronisation stopsynchronization \
+stoptabelle stoptabellen stoptable stoptables stoptabulate \
+stoptext stoptextlinie stoptyping stopueberblick stopumbruch \
+stopumgebung stopunpacked stopversion stopzeile stopzeilen \
+stopzeilenkorrektur stopzeilennumerierung stopzitat sub subject \
+subpagenumber subsection subsubject subsubsection subsubsubject \
+switchtorawfont sym symbol synchronisationsbalken synchronisieren \
+synonym tab teilegleitobjekt testcolumn testpage \
+tex textbreite texthoehe textlinie textreferenz \
+textvariable tief tiho tip tippedatei \
+tippen tippepuffer title tooltip txt \
+ueber ueberschrifttext uebersetzten umgebung umrahmt \
+unbekant underbar underbars unterformelnummer useXMLfilter \
+usedirectory usetypescript usetypescriptfile verbergebloecke vergleichefarbengruppe \
+vergleichepalette verknuepfebeschriftung verknuepfedokument verknuepfregister version \
+verweis verweisdatum verwendeJSscript verwendeURL verwendebefehl \
+verwendebloecke verwendeexteresdokument verwendeexterneabbildung verwendeexternedatei verwendeexternedateien \
+verwendeexternestonstueck verwendemodul verwendemodule verwendereferenzen verwendesymbole \
+verwendeurl vl volleswort von waehlebloeckeaus \
+waehlepapieraus waehleversionaus wechselezumfliesstext wiederholen wochentag \
+wohnort wortrechts zeigedruck zeigeeinstellungen zeigeexterneabbildungen \
+zeigefarbe zeigefarbengruppe zeigefelder zeigefliesstext zeigefliesstextumgebung \
+zeigegitter zeigelayout zeigepalette zeigerahmen zeigestruts \
+zeigeumbruch zentriert ziffern zitat zitieren \
+zu zurbox zurseite zwischenraum
+
keywordclass.context.en=\
CAP Cap Caps Character \
Characters MONTH Numbers Romannumerals WEEKDAY \
@@ -779,177 +453,505 @@ stopsymbolset stopsynchronization stoptable stoptables stoptabulate \
stoptext stoptextrule stoptyping stopunpacked stopversion \
stretched sub subformulanumber subject subpagenumber \
subsection subsubject subsubsection subsubsubject switchtobodyfont \
-switchtorawfont sym symbol synchronizationbar synchronize \
-synonym tab testcolumn testpage tex \
-textheight textreference textrule textvariable textwidth \
-thinrule thinrules title tooltip topdistance \
-topheight topspace totalnumberofpages translate txt \
-typ type typebuffer typefile underbar \
-underbars unitmeaning unknown useJSscripts useURL \
-useXMLfilter useblocks usecommands usedirectory useencoding \
-useexternaldocument useexternalfigure useexternalfile useexternalfiles useexternalsoundtrack \
-usemodule usemodules usereferences usespecials usesymbols \
-usetypescript usetypescriptfile useurl version vl \
-weekday whitespace wordright writebetweenlist writetolist \
-writetoreferencelist writetoregister
+switchtorawfont sym symbol symoffset synchronizationbar \
+synchronize synonym tab testcolumn testpage \
+tex textheight textreference textrule textvariable \
+textwidth thinrule thinrules title tooltip \
+topdistance topheight topspace totalnumberofpages translate \
+txt typ type typebuffer typefile \
+underbar underbars unitmeaning unknown useJSscripts \
+useURL useXMLfilter useblocks usecommands usedirectory \
+useencoding useexternaldocument useexternalfigure useexternalfile useexternalfiles \
+useexternalsoundtrack usemodule usemodules usereferences usespecials \
+usesymbols usetypescript usetypescriptfile useurl version \
+vl weekday whitespace wordright writebetweenlist \
+writetolist writetoreferencelist writetoregister
-keywordclass.context.ro=\
-CAP CUVANT CUVINTE Cap \
-Caps Cuvant Cuvinte KAP Kap \
-Kaps LUNA Litera Litere Numere \
-Numereromane ZIDINSAPTAMANA adapteazaaspect adubuffer adumarcaje \
-afiseazaaspect afiseazacampuri afiseazaculoare afiseazafiguriexterne afiseazafonttext \
-afiseazagrid afiseazagrupculoare afiseazamakeup afiseazamediufonttext afiseazapaleta \
-afiseazarama afiseazasetari afiseazasetsimboluri afiseazastruts afiseazatiparire \
-aliniat aliniatcentru aliniatdreapta aliniatstanga appendix \
-arg ascundeblocuri atleftmargin atrightmargin baraculoare \
-barainteractiune barasincronizare blanc but butoaneinteractiune \
-buton butonmeniu camp campumplere cap \
-chapter chem citat clip cloneazacamp \
-coloana comment comparagrupculoare comparapaleta completeazanumarpagina \
-completecombinedlist completelistoffloats completelistofsorts completelistofsynonyms completeregister \
-componenta convertestenumar copiazacamp corecteazaspatiualb coupledregister \
-crlf culoare culoaregri cupleazadocument cupleazamarcaje \
-cupleazaregistru cutspace cuvantdreapta cuvantmarginal data \
-datacurenta datareferit decrementnumber decupleazamarcaje definebodyfontDEF \
-definebodyfontREF definecolumnbreak definecolumnset definecombination definedfont \
-definefontfeature definefonthandling defineindentedtext defineinmargin defineitemgroup \
-definelayer definelayout definemathalignment definepagebreak defineplacement \
-defineste definesteaccent definesteantet definesteblanc definestebloc \
-definesteblocsectiune definestebuffer definestecamp definestecampprincipal definestecaracter \
-definestecomanda definesteconversie definesteculoare definestedescriere definestedimensiunehartie \
-definesteenumerare definesteeticheta definestefloat definestefont definestefontraw \
-definestefonttext definesteformatreferinte definestegrupculori definestehbox definesteinconjurare \
-definestelista definestelistacombinata definestelistareferinte definestelogo definestemakeup \
-definestemarcaje definestemediulfonttext definestemeniuinteractiune definesteoutput definesteoverlay \
-definestepaleta definesteparagraf definestepozitietext definesteprofil definesteprogram \
-definestereferinte definesteregistru definesterigla definestesablontabel definestesectiune \
-definestesimbol definestesimbolfigura definestesinonim definestesinonimfont definestesortare \
-definestestartstop definestestil definestestilfont definestestivacampuri definestesubcamp \
-definestetabulatori definestetext definestetexteinconjurate definestetextinconjurat definestetyping \
-definestevariabilatext definesteversiune definetype definetypeface description \
-despre determinacaracteristicilelistei determinacaracteristiciregistru determinanumartitlu dezactiveazameniuinteractiune \
-dimensiune din distantaantet distantacolt distantacoltdreapta \
-distantacoltstanga distantajos distantamargine distantamarginedreapta distantamarginestanga \
-distantasubsol distantasus domiciliu dute dutebox \
-dutepagina ecran el element emptylines \
-enumeration eticheta etichete fact faraaliniat \
-faradimensiune farafisiere faraliniiantetsisubsol faraliniisussijos faralista \
-faramarcaje faraspatiu faraspatiualb figuraexterna firdepar \
-folosesteURL folosestebloc folosestecodificarea folosestecomenzi folosestedirector \
-folosestedocumentextern folosestefiguraexterna folosestefisiereexterne folosestefisierextern folosestemodul \
-folosestemodule folosestemuzicaexterna folosestereferinte folosestescriptJS folosestesimboluri \
-folosestespeciale folosesteurl footnotetext forteazablocuri fractie \
-framed framedtext fundal gatablocuri getnumber \
-grid grosimelinie hartiedubla headsym hl \
-immediatebetweenlist immediatetolist impachetat impartefloat in \
-inalt inaltamargine inaltimeantet inaltimehartie inaltimehartieimprimanta \
-inaltimejos inaltimelista inaltimemakeup inaltimesubsol inaltimesus \
-inaltimetext indentation indreapta inframed ininner \
-injos inlinie inmaframed inmargineadreapta inmargineastanga \
+keywordclass.context.fr=\
+CAP Cap Caps Caractere \
+Caracteres Chiffresromains JOURSEMAINE MOIS MOT \
+MOTS Mot Mots Numeros a \
+adaptedisposition affectenumero affectevariabletexte ajustechamp alaligne \
+alapage aligneadroite aligneagauche aligneaumilieu appendix \
+arg arriereplan atleftmargin atrightmargin baha \
+barrecouleur barreinteraction barresynchronisation bas bouton \
+boutonmenu boutonsinteraction but cacheblocs cap \
+caractere caracteres champ changepolicebrute changepolicecorps \
+chapter chem chiffresromains citation citer \
+clip clonechamp colonne comment commentaire \
+comparegroupecouleur comparepalette completecombinedlist completelistoffloats completelistofsorts \
+completelistofsynonyms completenumeropage completeregistre composant composeenalinea \
+concernant convertitnumero copitchamp corrigeespaceblanc couleur \
+couleurgrise coupledocument coupledregister couplemarquage couplepapier \
+coupleregistre crlf cutspace dactylographier dans \
+dansautremarge dansborddroit dansbordgauche dansdroite dansgauche \
+dansmarge dansmargedroite dansmargegauche date datecourante \
+daterecommandation de decouplemarquage decrementenumero definebodyfontDEF \
+definebodyfontREF definecombination definedfont definefontfeature definefonthandling \
+defineframed defineframedtext defineindentedtext defineitemgroup definemathalignment \
+defineplacement definetypeface definicaractere definit definitaccent \
+definitbloc definitblocsection definitbuffer definitcalque definitchamp \
+definitchampprincipal definitcommande definitconversion definitcouleur definitdactylo \
+definitdansmarge definitdemarrestoppe definitdescription definitdisposition definitenumeration \
+definitenvironnementpolicecorps definitetiquette definitflottant definitformatreference definitgroupecouleur \
+definithbox definitjeucolonne definitliste definitlisteimbriquee definitlistereference \
+definitlogo definitmakeup definitmarquage definitmenuinteraction definitnotepdp \
+definitpalette definitparagraphes definitpilechamp definitpolice definitpolicebrute \
+definitpolicecorps definitpositiontexte definitprofil definitprogramme definitreference \
+definitregistre definitregle definitrevetement definitsautdecolonne definitsautdepage \
+definitsection definitsortie definitsouschamp definitstyle definitstylepolice \
+definitsymbole definitsymbolefigure definitsynonymepolice definitsynonymes definittabulation \
+definittaillepapier definittete definittexte definittrametableau definittri \
+definittype definitvariabletexte definitversion definitvide demarrealignement \
+demarrearriereplan demarreblocmarge demarrecitation demarreciter demarrecodage \
+demarrecolonnes demarrecombinaison demarrecompoetroite demarrecomposant demarrecorrectionligne \
+demarrecouleur demarredegroupe demarredocument demarreenvironement demarrefigure \
+demarreglobal demarregroupe demarrejeucolonne demarrejeusymboles demarreligne \
+demarreligneregleetexte demarrelignes demarrelocal demarremakeup demarremargereglee \
+demarrenotespdplocales demarrenumerotationligne demarreopposition demarrepositionnement demarreproduit \
+demarreprofil demarreprojet demarreraster demarrerevetement demarresynchronisation \
+demarretableau demarretableaux demarretexte demarreversion demarrevuedensemble \
+deplaceformule deplacesurgrille description determinecaracteristiqueliste determinecaracteristiquesregistre \
+determinenumerotete dimension distancebord distanceborddroit distancebordgauche \
+distanceentete distanceinf distancemarge distancemargedroite distancemargegauche \
+distancepdp distancesup domicile echelle ecran \
+ecritdansliste ecritdanslistereference ecritentreliste ecritregistre el \
+element elements emptylines enumeration environement \
+espace espaceblanc espacefixe espaceinf espacesfixes \
+espacesup etiquette etiquettes etire fait \
+faitreference fichierdactylo figureexterne forceblocs fraction \
+framed framedtext gardeblocs getnumber grille \
+groupe haut hauteureditionpapier hauteurentete hauteurinf \
+hauteurliste hauteurmakeup hauteurpapier hauteurpdp hauteursup \
+hauteurtexte headsym hl immediatebetweenlist immediatetolist \
+inconnu incrementenumero indentation inframed infull \
+inhibemenuinteraction ininner inmframed inneredgedistance inneredgewidth \
+innermargindistance innermarginwidth inouter installelangue joursemaine \
+labeling labeltexte langue langueprincipale largeurbord \
+largeurborddroit largeurbordgauche largeureditionpapier largeurligne largeurliste \
+largeurmakeup largeurmarge largeurmargedroite largeurmargegauche largeurpapier \
+largeurtexte leg ligneh lignenoire ligneregleetexte \
+lignesnoires listesymbole llongueurliste loadsorts loadsynonyms \
+logchamp mapfontsize mar margereglee marquage \
+marquageversion marquepage mathematique mediaeval menuinteraction \
+mframed mois montrecadre montrechamps montrecouleur \
+montredisposition montreedition montreenvironnementpolicecorps montrefiguresexternes montregrille \
+montregroupecouleur montrejeusymboles montremakeup montrepalette montrepolicecorps \
+montrereglages montrestruts motdroit motmarge movesidefloat \
+name navigating nextsection niveautete nocap \
+nombredesouspages nombretotaldepages nommacro nop note \
+notepdp numeroformule numeropage numeros numerosousformule \
+numerotete numerotetecourant obtientmarquage oriente outeredgedistance \
+outeredgewidth outermargindistance outermarginwidth overbar overbars \
+overstrike overstrikes page pagedepth pagedouble \
+pageoffset paragraph part pasplusdeblocs pasplusdefichiers \
+periodes pilechamp placecoteacote placeflottant placeformule \
+placelegende placelesunsaudessusdesautres placeliste placelisteinmbriquee placelistereference \
+placelistoffloats placelistofsorts placelistofsynonyms placelogos placemarquespages \
+placenotespdp placenotespdplocales placenumeropage placenumerotete placerawlist \
+placeregistre placeregle placesousformule placesurgrille placetextetete \
+placevariabletexte position positionnetexte prendbuffer produit \
+programme projet publication qqpart ran \
+raz razmarquage raznumero recommandation ref \
+refait reference referencepage referencetexte reflete \
+register reglages reglealignement reglearrangement reglearriereplan \
+reglearriereplans reglebarreinteraction reglebarresynchronisation reglebloc regleblocmarge \
+regleblocsection regleboutons reglebuffer reglecapitales reglechamp \
+reglechamps regleclipping reglecolonnes reglecombinaisons reglecommentaire \
+reglecommentairepage reglecompoetroite reglecomposeenalinea reglecouleur reglecouleurs \
+regledactylo regledansmarge regledemarrestoppe regledescriptions regledisposition \
+regleecraninteraction regleecrans regleelements regleencadre regleentete \
+regleenumerations regleenvironnementpolicecorps regleepaisseurligne regleespaceblanc regleespacement \
+regleespacementinterligne reglefiguresexternes regleflottant regleflottants regleformulaires \
+regleformules reglegroupeselements regleinf regleinteraction regleintitule \
+regleintitules reglejeucolonne reglejeusymboles reglelabeltexte reglelangue \
+reglelegende reglelignes reglelignesnoires reglelignesreglestexte regleliste \
+reglelisteimbriquee reglelistereference reglemakeup reglemargereglee reglemarquage \
+reglemarquagehyphenation reglemenuinteraction reglenotepdp reglenumero reglenumeropage \
+reglenumerotation reglenumerotationligne reglenumerotationpage reglenumerotationparagraphe reglenumerotete \
+regleoriente reglepalette reglepapier regleparagraphes reglepdp \
+regleplacementopposition reglepolicecorps reglepositionnement reglepositiontexte regleprofils \
+regleprogrammes reglepublications reglereferencage regleregistre regleregle \
+regleremplitligne regleremplitlignesreglees reglesection regleseparationflottant reglesortie \
+reglesouslignage reglesousnumeropage reglestrut reglesup reglesynchronisation \
+reglesynonymes reglesysteme regletab regletableaux regletabulation \
+regletaillepapier regletete regletetes regletexte regletextesentete \
+regletextesinf regletextespdp regletextessup regletextestexte regletextetete \
+regletolerance regletraitsfins regletransitionspage regletri regletype \
+regleurl reglevariabletexte regleversions remplitchamp remplitligne \
+remplitlignesreglees remplittexte reservefloat resettextcontent retourarriere \
+sansalinea sansdimension sansespace sansespaceblanc sanslignesenteteetpdp \
+sanslignessupetinf sansliste sansmarquage sanstest sauteblocs \
+section seeregister selectionneblocs selectionnepapier selectionneversion \
+sensunite separeflottant settext setupanswerarea setupcolumnsetlines \
+setupcolumnsetstart setupfonthandling setupfontsynonym setupframedtexts setupindentedtext \
+setupinterlinespace2 setupitemgroup setuplistalternative setupmathalignment setupplacement \
+sort sousnumeropage startalignment startarriereplan startbuffer \
+startcitation startcolumnmakeup startcolumns startcombination startcomment \
+startcomposant startcouleur startdescription startdocument startenumeration \
+startenvironement startfait startfigure startfloattext startformula \
+startframedtext startgroupe starthiding startitemgroup startlegend \
+startligneregleetexte startline startlinecorrection startlinenumbering startlines \
+startlocal startlocalenvironment startlocalfootnotes startmakeup startmargereglee \
+startmarginblock startmenuinteraction startnamemakeup startnarrower startopposite \
+startoverlay startoverview startparagraph startpositioning startpostponing \
+startproduit startprofile startprojet startregister startsymbolset \
+startsynchronization starttable starttables starttabulate starttyping \
+startunpacked startversion stopalignment stoparriereplan stopbuffer \
+stopcitation stopcolumnmakeup stopcolumns stopcombination stopcomment \
+stopcompoetroite stopcomposant stopcouleur stopdescription stopdocument \
+stopenumeration stopenvironement stopfait stopfigure stopfloattext \
+stopformula stopframedtext stopgroupe stophiding stopitemgroup \
+stoplegend stopligneregleetexte stopline stoplinecorrection stoplinenumbering \
+stoplines stoplocal stoplocalenvironment stoplocalfootnotes stopmakeup \
+stopmargereglee stopmarginblock stopmenuinteraction stopnamemakeup stopnarrower \
+stopopposite stopoverlay stopoverview stopparagraph stoppealignement \
+stoppearriereplan stoppeblocmarge stoppecitation stoppecodage stoppecolonnes \
+stoppecombinaison stoppecomposant stoppecorrectionligne stoppecouleur stoppedegroupe \
+stoppedocument stoppeenvironement stoppeglobal stoppegroupe stoppejeucolonne \
+stoppeligne stoppeligneregleetexte stoppelignes stoppelocal stoppemakeup \
+stoppemargereglee stoppenotespdplocales stoppenumerotationligne stoppeopposition stoppepositionnement \
+stoppeproduit stoppeprofil stoppeprojet stopperaster stopperevetement \
+stoppesynchronisation stoppetableau stoppetableaux stoppetexte stoppeversion \
+stoppevuedensemble stoppositioning stoppostponing stopproduit stopprofile \
+stopprojet stopsymbolset stopsynchronization stoptable stoptables \
+stoptabulate stoptyping stopunpacked stopversion sub \
+subject subsection subsubject subsubsection subsubsubject \
+suggestion suivantprofil suivantversion suivantversionprofil sym \
+symbole synchronise synonym tab tapebuffer \
+testcolumn testpage tete tex textemarge \
+textenotepdp textetete title titremarge traduire \
+traiteblocs traitepage traitfin traitsfins txt \
+typ underbar underbars uneligne useXMLfilter \
+usedirectory usetypescript usetypescriptfile utiliseJSscripts utiliseURL \
+utiliseblocs utilisechemin utilisecommandes utilisedocumentexterne utiliseencodage \
+utilisefichierexterne utilisefichiersexternes utilisefigureexterne utilisemodule utilisemodules \
+utilisepsiteaudioexterne utilisereferences utilisespecialites utilisesymboles utiliseurl \
+va vaalaboite vaalapage vaenbas valeurcouleur \
+valeurgris variabletexte version vide vl
+
+keywordclass.context.it=\
+CAP Cap Caps GIORNOSETTIMANA \
+Lettera Lettere MESE Numeri Numeriromani \
+PAROLA PAROLE Parola Parole accoppiacarta \
+accoppiadocumento accoppiamarcatura accoppiapagina accoppiaregistro adattacampo \
+adattalayout al allineacentro allineadestra allineasinistra \
+altezzacarta altezzacartastampa altezzacima altezzaelenco altezzafondo \
+altezzaintestazione altezzamakeup altezzapdp altezzatesto ambiente \
+ampiezzabordo ampiezzabordodestro ampiezzabordosinistro ampiezzacarta ampiezzacartastampa \
+ampiezzaelenco ampiezzamakeup ampiezzamargine ampiezzamarginedestro ampiezzamarginesinistro \
+ampiezzatesto ap apagina appendix arg \
+atleftmargin atrightmargin barracolori barrainterazione barrasincronizzazione \
+bastablocchi bastafile cambiaafontdeltesto campi camporiempimento \
+cap capello chapter chim circondato \
+citazione clip clonacampo colonna colore \
+coloregrigio comment commento completecombinedlist completelistoffloats \
+completelistofsorts completelistofsynonyms completeregister componenet confrontagruppocolori \
+confrontatavolozza convertinumero copiacampo correggispaziobianco coupledregister \
+crlf cutspace da daqualcheparte data \
+datadioggi datareferral decrementnumber definebodyfontDEF definebodyfontREF \
+definecolumnbreak definecombination definedfont definefontfeature definefonthandling \
+defineindentedtext defineinmargin defineitemgroup definelayer definemathalignment \
+definepagebreak defineplacement definetypeface definisci definisciaccento \
+definisciambientefontdeltesto definisciblocco definiscibloccosezione definiscibuffer definiscicampo \
+definiscicampoprincipale definiscicapoversi definiscicarattere definiscicolore definiscicomando \
+definisciconversione definiscidescrizione definiscidimensionicarta definiscielenco definiscielencocombinato \
+definiscienumerazione definiscietichetta definiscifigurasimbolo definiscifont definiscifontdeltesto \
+definiscifontgrezzo definisciformatoriferimento definiscigruppocolonne definiscigruppocolori definiscihbox \
+definisciincorniciato definisciiniziatermina definiscilayout definiscilinea definiscilistariferimenti \
+definiscilogo definiscimakeup definiscimarcatura definiscimenuinterazione definiscimodellotabella \
+definiscioggettomobile definisciordinamento definiscioutput definisciposizionetesto definisciprofilo \
+definisciprogramma definisciregistro definisciriferimento definiscirigovuoto definiscisezione \
+definiscisimbolo definiscisinonimi definiscisinonimofont definiscisottocampo definiscisovrapposizione \
+definiscistackcampi definiscistile definiscistilefont definiscitabulato definiscitavolozza \
+definiscitesta definiscitesto definiscitestoincorniciato definiscitype definiscityping \
+definiscivariabiletesto definisciversion description determinacaratteristicheregistro determinacarattersticheelenco \
+determinanumerotesta dimensione disabilitamenuinterazione distanzabordo distanzabordodestro \
+distanzabordosinistro distanzacima distanzafondo distanzaintestazione distanzamargine \
+distanzamarginedestro distanzamarginesinistro distanzapdp domicilio el \
+elaborablocchi elaborapagina elementi elemento emptylines \
+enumeration etichetta etichette fatto figuraesterna \
+fondo forzablocchi framedtext frazione getnumber \
+giornosettimana griglia headsym hl ignoto \
+immediatebetweenlist immediatetolist impaccato impostaallineamento impostaambientefontdeltesto \
+impostaampiezzariga impostabarrainterazione impostabarrasincronizzazione impostablocchimargine impostablocco \
+impostabloccosezione impostabuffer impostacampi impostacampo impostacapoversi \
+impostacaption impostacaptions impostacima impostaclippling impostacolonne \
+impostacolore impostacolori impostacombinazioni impostacommento impostacommentopagina \
+impostadefinizionenotepdp impostadescrizioni impostadimensionicarta impostaelementi impostaelencazioni \
+impostaelenco impostaelencocombinato impostaenumerazioni impostafigureesterne impostafondo \
+impostafontdeltesto impostaforms impostaformule impostagruppocolonne impostaincorniciato \
+impostainiziatermina impostainmargine impostainstestazione impostainterazione impostainterlinea \
+impostalayout impostalegenda impostalinea impostalineemargine impostalineenere \
+impostalineeriempimento impostalineesottili impostalineetesto impostalingua impostalistariferimenti \
+impostamaiuscole impostamakeup impostamarcatura impostamenuinterazione impostamenzione \
+impostanotepdp impostanumerazione impostanumerazionecapoversi impostanumerazionepagina impostanumerazionerighe \
+impostanumeropagina impostanumerosottopagina impostanumerotesta impostaoggettimobili impostaoggettomobile \
+impostaordinamento impostaoutput impostaparranging impostapdp impostapiustretto \
+impostaposizionamento impostaposizionamentoopposti impostaposizionetesto impostaprofili impostaprogrammi \
+impostapubblicazioni impostapulsanti impostaregistro impostarientro impostariferimento \
+impostarighe impostarigheriempimento impostarigovuoto impostarotazione impostaschermi \
+impostaschermointerazione impostasegnosillabazione impostasetsimboli impostasezione impostasfondi \
+impostasfondo impostasincronizzazione impostasinonimi impostasistema impostasottolinea \
+impostaspaziatura impostaspaziobianco impostaspezzamentooggettomobile impostastrut impostatab \
+impostatabelle impostatabulato impostatavolozza impostatesta impostateste \
+impostatesticima impostatestifondo impostatestiincorniciati impostatestiintestazioni impostatestipdp \
+impostatesto impostatestoetichette impostatestointestazioni impostatestotesti impostatolleranza \
+impostatransizionepagina impostatype impostatyping impostaurl impostavariabiletesto \
+impostaversioni impostazioni in inaltromargine incorniciato \
+incrementanumero indentation indestra ininner iniziaallineamento \
+iniziaambiente iniziabloccomargine iniziacitazione iniziacodifica iniziacolonne \
+iniziacolore iniziacombinazione iniziacomponente iniziacorrezioneriga iniziadocumento \
+iniziafigura iniziaglobale iniziagruppocolonne iniziaimpaccato inizialineamargine \
+inizialineatesto inizialocale iniziamakeup inizianotepdplocali inizianumerazionerighe \
+iniziaopposto iniziaoverview iniziapiustretto iniziaposizionamento iniziaprodotto \
+iniziaprofilo iniziaprogetto iniziaraster iniziariga iniziarighe \
+iniziasetsimboli iniziasfondo iniziasincronizzazione iniziasovrapposizione iniziatabella \
+iniziatabelle iniziatesto iniziaunpacked iniziaversione inlatodestro \
+inlatosinistro inmaframed inmargine inmarginedestro inmarginesinistro \
inneredgedistance inneredgewidth innermargindistance innermarginwidth inouter \
-inparteadreapta inparteastanga instalarelimba instanga intins \
-jos jossus kap la labeling \
-lapagina latimecoltdreapta latimecoltstanga latimecolturi latimehartie \
-latimehartieimprimanta latimelista latimemakeup latimemargine latimemarginedreapta \
-latimemarginestanga latimetext leg limba limbaprincipala \
-liniemargine linieneagra liniesubtire linieumplere liniinegre \
-liniisubtiri listsymbol litera litere loadsorts \
-loadsynonyms logcampuri luna lungimelista maframed \
-mapfontsize mar marcaje marcheazaversiune marginal \
-matematica mediaeval mediu meniuinteractiune minicitat \
-moveformula movesidefloat mutapegrid name navigating \
-necunoscut nextsection niveltitlu nocap nokap \
-nop nota notasubsol numarformula numarincrement \
-numarpagina numarsubformula numartitlu numartitlucurent numartotalpagini \
-numberofsubpages nume numere numereromane numeunitate \
-nutesta olinie outeredgedistance outeredgewidth outermargindistance \
-outermarginwidth overbar overbars overstrike overstrikes \
-pagedepth pageoffset pagina paginadubla paragraph \
-part pastreazablocuri pelung placefloat placeheadnumber \
-placeheadtext placelistoffloats placelistofsorts placelistofsynonyms placerawlist \
-placereferencelist plaseazapegrid plaseazasemnecarte potrivestecamp pozitie \
-pozitietext proceseazabloc proceseazapagina produs program \
-proiect publicatie puncte punedeasuprafiecareia punefatainfata \
-puneformula punelegenda punelista punelistacombinata punelogouri \
-punenotesubsol punenotesubsollocale punenumarpagina puneregistru punerigla \
-punesubformula punevariabilatext ran ref refa \
-referinta referintapagina referintatext referit referring \
-reflexie register remarca reservefloat reset \
-reseteazamarcaje resetnumber resettextcontent riglatext rigleumplere \
-roteste saripesteblocuri scala scriebuffer scrieinlista \
-scrieinlistareferinte scrieinregistru scrieintreliste section seeregister \
-selecteazablocuri selecteazahartie selecteazaversiune semncarte setarebarasincronizare \
-setareitemization setarelimba setareoutput setarepozitie setaresincronizare \
-setari seteazaaliniat seteazaalinierea seteazaantet seteazaaranjareapag \
-seteazaaspect seteazabarainteractiune seteazablanc seteazabloc seteazablocsectiune \
-seteazablocurimarginale seteazabuffer seteazabutoane seteazacamp seteazacampuri \
-seteazaclipping seteazacoloane seteazacombinari seteazacomentariu seteazacomentariupagina \
-seteazaculoare seteazaculori seteazadefinireanotasubsol seteazadescriere seteazadimensiunihartie \
-seteazaecrane seteazaecraninteractiune seteazaelemente seteazaenumerare seteazafiguriexterne \
-seteazafloat seteazafloats seteazafonttext seteazaformulare seteazaformule \
-seteazafundal seteazafundaluri seteazagrosimelinie seteazaimpartireafloat seteazainconjurat \
-seteazaingust seteazainteractiunea seteazajos seteazalegenda seteazalegendele \
-seteazaliniesilabe seteazaliniesubtire seteazalinii seteazaliniimargine seteazaliniinegre \
-seteazaliniiumplere seteazalista seteazalistacombinata seteazalistareferinte seteazamajuscule \
-seteazamakeup seteazamarcaje seteazamarginal seteazamediulfonttext seteazameniuinteractiune \
-seteazaminicitat seteazanotasubsol seteazanumarpagina seteazanumarsubpagina seteazanumartitlu \
-seteazanumerotare seteazanumerotarelinii seteazanumerotarepagina seteazanumerotareparagrafe seteazapaleta \
-seteazaparagrafe seteazaplasareaopozita seteazapozitietext seteazaprofile seteazaprograme \
-seteazapublicatii seteazareferinte seteazaregistru seteazarigla seteazarigletext \
-seteazarigleumplere seteazarotare seteazasectiune seteazasimbol seteazasinonime \
-seteazasistem seteazasortare seteazaspatiu seteazaspatiualb seteazaspatiuinterliniar \
-seteazastrut seteazasublinie seteazasubsol seteazasus seteazatab \
-seteazatabele seteazatabulatori seteazatext seteazatexteantet seteazatextejos \
-seteazatextesubsol seteazatextesus seteazatextetext seteazatexteticheta seteazatexttitlu \
-seteazatitlu seteazatitluri seteazatoleranta seteazatranzitiepagina seteazatype \
-seteazatyping seteazaurl seteazavariabilatext seteazaversiuni setnumber \
-settextcontent setupanswerarea setupcolumnset setupcolumnsetlines setupcolumnsetstart \
+inriga insinistra installalingua intorno labeling \
+leg lettera lettere lineamargine lineanera \
+lineasottile lineatesto lineenere lineeriempimento lineesottili \
+lingua linguaprincipale listsymbol livellotesta loadsorts \
+loadsynonyms logcampi lunghezzaelenco maframed mapfontsize \
+mar marcatura marcaversione matematica mediaeval \
+menuinterattivo menzione mese mettielenco mettielencocombinato \
+mettifiancoafianco mettiformula mettiingriglia mettilegenda mettilinea \
+mettiloghi mettinotepdp mettinotepdplocali mettinumeropagina mettiregistro \
+mettisegnalibro mettisottoformula mettiunosullaltro mettivariabiletesto mostraambientefontdeltesto \
+mostracampi mostracolore mostracornice mostrafiguresterne mostrafontdeltesto \
+mostragriglia mostragruppocolori mostraimpostazioni mostralyout mostramakeup \
+mostrasetsimboli mostrastampa mostrastruts mostratavolozza movesidefloat \
+name nascondiblocchi navigating nextsection nientedimensioni \
+nienteelenco nientelineecimafondo nientelineintestazionepdp nientemarcatura nienterientro \
+nientespazio nientespaziobianco nocap nome nomeunita \
+nop nota notapdp notest numberofsubpages \
+numeri numeriromani numeroformula numeropagina numeropaginacompleto \
+numerosottoformula numerotesta numerotestacorrente numerototaledipagine outeredgedistance \
+outeredgewidth outermargindistance outermarginwidth overbar overbars \
+overstrike overstrikes pagedepth pageoffset pagina \
+paragraph paroladestra parolainmargine part passaafontgrezzo \
+ped pedap perlungo placefloat placelistoffloats \
+placelistofsorts placelistofsynonyms placerawlist placereferencelist posizionanumerotesta \
+posizionatesto posizionatestotesta posizione prendibuffer prendimarcatura \
+prodotto progetto programma pubblicazione pulsante \
+pulsantemenu pulsantinterazione punti qualcheriga ran \
+referral referring register reimposta reimpostamarcatura \
+reservefloat resetnumber resettextcontent rientro rif \
+rifai riferimento riferimentopagina riferimentotesto riflessione \
+rigariempimento rigovuoto ruota saltablocchi scala \
+schermo scrividentroelenco scriviinelenco scriviinlistariferimenti scriviinregistro \
+section seeregister segnalibro seguiprofilo seguiversione \
+seguiversioneprofilo selezionablocchi selezionacarta selezionaversione separamarcatura \
+setnumber settext setupanswerarea setupcolumnsetlines setupcolumnsetstart \
setupfonthandling setupfontsynonym setupindentedtext setupinterlinespace2 setupitemgroup \
-setuplistalternative setupmathalignment setupnumber setuppaper setupplacement \
-setupstartstop setvariabilatext sim simbol sincronizeaza \
-sort spatiifixate spatiu spatiualb spatiufixat \
-spatiujos spatiuspate spatiusus startalignment startaliniere \
-startblocmarginal startbuffer startcitat startcodificare startcoloane \
-startcolumnmakeup startcolumns startcolumnset startcombinare startcombination \
-startcomment startcomponenta startcorectielinie startculoare startdescription \
-startdocument startenumeration startfact startfigura startfigure \
-startfloattext startformula startframedtext startfundal startglobal \
-starthiding startimpachetat startingust startitemgroup startlegend \
-startline startlinecorrection startlinenumbering startlines startlinie \
-startliniemargine startlinii startlocal startlocalenvironment startlocalfootnotes \
-startmakeup startmarginblock startmediu startmeniuinteractiune startnamemakeup \
-startnarrower startneimpachetat startnotesubsollocale startnumerotarelinii startopozit \
-startopposite startoverlay startoverview startparagraph startpositioning \
-startpostponing startpozitionare startprodus startprofil startprofile \
-startproiect startraster startregister startriglatext startsetsimboluri \
-startsincronizare startsymbolset startsynchronization starttabel starttabele \
-starttable starttables starttabulate starttext starttyping \
-startunpacked startversiune stivacampuri stopalignment stopaliniere \
-stopblobal stopblocmarginal stopbuffer stopcitat stopcodificare \
-stopcoloane stopcolumnmakeup stopcolumns stopcolumnset stopcombinare \
-stopcombination stopcomment stopcomponenta stopcorectielinie stopculoare \
-stopdescription stopdocument stopenumeration stopfact stopfigure \
-stopfloattext stopformula stopframedtext stopfundal stophiding \
-stopimpachetat stopingust stopitemgroup stoplegend stopline \
-stoplinecorrection stoplinenumbering stoplines stoplinie stopliniemargine \
-stoplinii stoplocal stoplocalenvironment stoplocalfootnotes stopmakeup \
-stopmarginblock stopmediu stopmeniuinteractiune stopnamemakeup stopnarrower \
-stopneimpachetat stopnotesubsollocale stopnumerotarelinii stopopozit stopopposite \
+setuplistalternative setupmathalignment setuppaper setupplacement setvariabiletesto \
+sfondo sim simbolo sincronizza sort \
+spazifissi spazio spaziobianco spaziocima spaziodietro \
+spaziofisso spaziofondo spessoreriga spezzaoggettomobile spostaagriglia \
+spostaformula stackcampi startalignment startambiente startbuffer \
+startcitazione startcolore startcolumnmakeup startcolumns startcombination \
+startcomment startcomponenet startdescription startdocument startenumeration \
+startfatto startfigure startfloattext startformula startframedtext \
+starthiding startimpaccato startitemgroup startlegend startline \
+startlineamargine startlineatesto startlinecorrection startlinenumbering startlines \
+startlocal startlocalenvironment startlocalfootnotes startmakeup startmarginblock \
+startmenuinterattivo startnamemakeup startnarrower startopposite startoverlay \
+startoverview startparagraph startpositioning startpostponing startprodotto \
+startprofile startprogetto startregister startsfondo startsymbolset \
+startsynchronization starttable starttables starttabulate starttyping \
+startunpacked startversione stirato stopalignment stopambiente \
+stopbuffer stopcitazione stopcolore stopcolumnmakeup stopcolumns \
+stopcombination stopcomment stopcomponenet stopdescription stopdocument \
+stopenumeration stopfatto stopfigure stopfloattext stopformula \
+stopframedtext stophiding stopimpaccato stopitemgroup stoplegend \
+stopline stoplineamargine stoplineatesto stoplinecorrection stoplinenumbering \
+stoplines stoplocal stoplocalenvironment stoplocalfootnotes stopmakeup \
+stopmarginblock stopmenuinterattivo stopnamemakeup stopnarrower stopopposite \
stopoverlay stopoverview stopparagraph stoppositioning stoppostponing \
-stoppozitionare stopprodus stopprofil stopprofile stopproiect \
-stopraster stopriglatext stopsincronizare stopsymbolset stopsynchronization \
-stoptabel stoptabele stoptable stoptables stoptabulate \
-stoptext stoptyping stopunpacked stopversiune sub \
-subject subpagenumber subsection subsubject subsubsection \
-subsubsubject synonym tab testcolumn testpage \
-tex texteticheta textmarginal texttitlu textumplere \
-title titlu titlumarginal tooltip traduce \
-trecilafontraw trecilafonttext txt typ type \
-typefile underbar underbars undeva urmeazaprofil \
-urmeazaversiune urmeazaversiuneprofil useXMLfilter usedirectory usetypescript \
-usetypescriptfile valoareculoare valoaregri variabilatext versiune \
-vl zidinsaptamana
+stopprodotto stopprofile stopprogetto stopsfondo stopsymbolset \
+stopsynchronization stoptable stoptables stoptabulate stoptyping \
+stopunpacked stopversione sub subject subpagenumber \
+subsection subsubject subsubsection subsubsubject synonym \
+tab terminaallineamento terminaambiente terminabloccomargine terminacitazione \
+terminacodifica terminacolonne terminacolore terminacombinazione terminacomponente \
+terminacorrezioneriga terminadocumento terminaglobale terminagruppocolonne terminaimpaccato \
+terminalineamargine terminalineatesto terminalocale terminamakeup terminanotepdplocali \
+terminanumerazionerighe terminaopposto terminaoverview terminapiustretto terminaposizionamento \
+terminaprodotto terminaprofili terminaprogetto terminaraster terminariga \
+terminarighe terminasfondo terminasincronizzazione terminasovrapposizione terminatabella \
+terminatabelle terminatesto terminaunpacked terminaversioni testa \
+testcolumn testoetichetta testoinmargine testoinstestazioni testonotapdp \
+testoriempimento testpage tex tieniblocchi title \
+titoloinmargine tooltip traduci txt typ \
+type typebuffer typefile underbar underbars \
+usaJSscripts usaURL usablocco usacartella usacodifica \
+usacolonnasonoraesterna usacomandi usadocumentoesterno usafiguraesterna usafileesterni \
+usafileesterno usamoduli usamodulo usariferimenti usasimboli \
+usaspecialita usaurl useXMLfilter usedirectory usetypescript \
+usetypescriptfile vaia vaiabox vaiapagina vaigiu \
+valorecolore valoregrigio variabiletesto versione vl
+
+keywordclass.context.nl=\
+CAP Cap Caps Cijfers \
+KAP Kap Kaps Letter Letters \
+MAAND Romeins WEEKDAG WOORD WOORDEN \
+Woord Woorden aantalsubpaginas about achtergrond \
+appendix arg bepaalkopnummer bepaallijstkenmerken bepaalregisterkenmerken \
+betekenis binnenmargeafstand binnenmargebreedte binnenrandafstand binnenrandbreedte \
+blanko blokje blokjes blokkeerinteractiemenu bodemwit \
+bookmark bovenafstand bovenhoogte breuk buitenmargeafstand \
+buitenmargebreedte buitenrandafstand buitenrandbreedte but button \
+cap chapter chem cijfers citaat \
+citeer clip comment completecombinedlist completelistoffloats \
+completelistofsorts completelistofsynonyms converteernummer copieerveld corrigeerwitruimte \
+coupledregister crlf datum definebodyfontDEF definebodyfontREF \
+definedfont definefontfeature definefonthandling definerawfont definetypeface \
+definieer definieeraccent definieeralineas definieerbeeldmerk definieerblanko \
+definieerblok definieerbuffer definieercombinatie definieercommando definieerconversie \
+definieerfiguursymbool definieerfont definieerfontstijl definieerfontsynoniem definieerhbox \
+definieerhoofdveld definieeringesprongentext definieerinmarge definieerinteractiemenu definieeritemgroep \
+definieerkadertekst definieerkarakter definieerkleur definieerkleurgroep definieerkolomgroep \
+definieerkolomovergang definieerkop definieerkorps definieerkorpsomgeving definieerlayer \
+definieerlayout definieerletter definieerlijn definieerlijst definieermarkering \
+definieeromlijnd definieeropmaak definieeroverlay definieerpaginaovergang definieerpalet \
+definieerpapierformaat definieerplaats definieerplaatsblok definieerprofiel definieerprogramma \
+definieerreferentie definieerreferentieformaat definieerreferentielijst definieerregister definieersamengesteldelijst \
+definieersectie definieersectieblok definieersorteren definieerstartstop definieersubveld \
+definieersymbool definieersynoniemen definieertabelvorm definieertabulatie definieertekst \
+definieertekstpositie definieertekstvariabele definieertype definieertypen definieeruitvoer \
+definieerveld definieerveldstapel definieerversie definieerwiskundeuitlijnen description \
+dimensie directnaarlijst directtussenlijst doordefinieren doorlabelen \
+doornummeren dunnelijn dunnelijnen eenregel enumeration \
+ergens externfiguur forceerblokken formulenummer framedtext \
+gebruikJSscripts gebruikURL gebruikXMLfilter gebruikblokken gebruikcommandos \
+gebruikexterndocument gebruikexternefile gebruikexternefiles gebruikexternfiguur gebruikexterngeluidsfragment \
+gebruikgebied gebruikmodule gebruikmodules gebruikreferenties gebruikspecials \
+gebruiksymbolen gebruiktypescript gebruiktypescriptfile gebruikurl geenblokkenmeer \
+geenbovenenonderregels geendimensie geenfilesmeer geenhoofdenvoetregels geenlijst \
+geenmarkering geenspatie geentest geenwitruimte geg \
+grijskleur grijswaarde haalbuffer haalmarkering haalnummer \
+haarlijn handhaafblokken herhaal hl hoofdafstand \
+hoofdhoogte hoofdtaal hoog huidigedatum huidigekopnummer \
+in inanderemarge inbinnen inbuiten indentation \
+inlijnd inlinker inlinkermarge inlinkerrand inmarge \
+inrechter inrechtermarge inrechterrand inregel inspringen \
+installeertaal instellingen interactiebalk interactiebuttons interactiemenu \
+invullijnen invulregel invultekst invulveld inwilijnd \
+items its kantlijn kap kenmerk \
+kenmerkdatum kentekstvariabeletoe kleur kleurenbalk kleurwaarde \
+kloonveld kolom kop kopniveau kopnummer \
+koppeldocument koppelmarkering koppelpagina koppelpapier koppelregister \
+kopsym koptekst kopwit laag label \
+labeling labels labeltekst laho leg \
+legeregels letter letters lijndikte lijstbreedte \
+lijsthoogte lijstlengte lijstsymbool linkermargeafstand linkermargebreedte \
+linkerrandafstand linkerrandbreedte loadsorts loadsynonyms maand \
+mapfontsize mar margeafstand margebreedte margetekst \
+margetitel margewoord markeer markeerversie mediaeval \
+menubutton naam naar naarbox naarpagina \
+name navigerend nextsection nietinspringen nocap \
+nokap noot nop omgeving omlaag \
+omlijnd onbekend onderafstand onderdeel onderhoogte \
+ontkoppelmarkering op opelkaar oplinkermarge oppagina \
+oprechtermarge overbar overbars overstrike overstrikes \
+pagina paginadiepte paginanummer paginaoffset paginareferentie \
+papierbreedte papierhoogte paragraph part paslayoutaan \
+passeerblokken passendveld plaatsbeeldmerken plaatsbookmarks plaatsformule \
+plaatskopnummer plaatskoptekst plaatslegenda plaatslijn plaatslijst \
+plaatslokalevoetnoten plaatsnaastelkaar plaatsonderelkaar plaatsopgrid plaatspaginanummer \
+plaatsplaatsblok plaatsreferentielijst plaatsregister plaatsruwelijst plaatssamengesteldelijst \
+plaatssubformule plaatstekstvariabele plaatsvoetnoten placelistoffloats placelistofsorts \
+placelistofsynonyms positioneer positioneertekst printpapierbreedte printpapierhoogte \
+produkt programma projekt publicatie punten \
+ran randafstand randbreedte rechtermargeafstand rechtermargebreedte \
+rechterrandafstand rechterrandbreedte ref refereer referentie \
+regellinks regelmidden regelrechts register registreervelden \
+reservefloat reset resetmarkering resetnummer resettekstinhoud \
+resettextcontent romeins rooster roteer rugwit \
+schaal scherm schrijfnaarlijst schrijfnaarreferentielijst schrijfnaarregister \
+schrijftussenlijst section seeregister selecteerblokken selecteerpapier \
+selecteerversie setnummer setupfonthandling setupfontsynonym setupinterlinespace2 \
+setuplistalternative snijwit som sort spatie \
+spiegel splitsplaatsblok startachtergrond startalignment startbuffer \
+startcitaat startcodering startcolumns startcombinatie startcombination \
+startcomment startdescription startdocument startenumeration startfigure \
+startfiguur startfloattext startformula startframedtext startgeg \
+startglobaal starthiding startinteractiemenu startitemgroup startkantlijn \
+startkleur startkolomgroep startkolommen startkolomopmaak startlegend \
+startline startlinecorrection startlinenumbering startlines startlocal \
+startlocalenvironment startlocalfootnotes startlokaal startlokalevoetnoten startmakeup \
+startmargeblok startmarginblock startnaast startnamemakeup startnarrower \
+startomgeving startonderdeel startopelkaar startopmaak startopposite \
+startoverlay startoverview startoverzicht startparagraph startpositioneren \
+startpositioning startpostponing startprodukt startprofiel startprofile \
+startprojekt startraster startregel startregelcorrectie startregelnummeren \
+startregels startregister startsmaller startsymbolset startsymboolset \
+startsynchronisatie startsynchronization starttabel starttabellen starttable \
+starttables starttabulate starttekst starttekstlijn starttyping \
+startuitlijnen startunpacked startvanelkaar startversie stelachtergrondenin \
+stelachtergrondin stelalineasin stelantwoordgebiedin stelarrangerenin stelblankoin \
+stelblokin stelblokjesin stelblokkopjein stelblokkopjesin stelbovenin \
+stelboventekstenin stelbufferin stelbuttonsin stelciterenin stelclipin \
+stelcombinatiesin stelcommentaarin steldoordefinierenin steldoornummerenin steldunnelijnenin \
+stelexternefigurenin stelformulesin stelformulierenin stelhoofdin stelhoofdtekstenin \
+stelingesprongentextin stelinmargein stelinspringenin stelinteractiebalkin stelinteractiein \
+stelinteractiemenuin stelinteractieschermin stelinterliniein stelinvullijnenin stelinvulregelsin \
+stelitemgroepin stelitemsin stelkadertekstenin stelkantlijnin stelkapitalenin \
+stelkleurenin stelkleurin stelkolomgroepin stelkolomgroepregelsin stelkolomgroepstartin \
+stelkolommenin stelkopin stelkopnummerin stelkoppeltekenin stelkoppenin \
+stelkoptekstin stelkorpsin stelkorpsomgevingin stellabeltekstin stellayoutin \
+stellegendain stellijndiktein stellijnin stellijstin stelmargeblokkenin \
+stelmarkeringin stelnaastplaatsenin stelnummerenin stelnummerin stelomlijndin \
+stelonderin stelonderstrepenin stelondertekstenin stelopmaakin stelopsommingenin \
+stelpaginacommentaarin stelpaginanummerin stelpaginanummeringin stelpaginaovergangenin stelpaletin \
+stelpapierformaatin stelpapierin stelparagraafnummerenin stelplaatsblokin stelplaatsblokkenin \
+stelplaatsbloksplitsenin stelplaatsin stelpositionerenin stelprofielenin stelprogrammasin \
+stelpublicatiesin stelrastersin stelreferentielijstin stelrefererenin stelregelnummerenin \
+stelregelsin stelregisterin stelroterenin stelsamengesteldelijstin stelsectieblokin \
+stelsectiein stelsmallerin stelsorterenin stelspatieringin stelstartstopin \
+stelstrutin stelsubpaginanummerin stelsymboolsetin stelsynchronisatiebalkin stelsynchronisatiein \
+stelsynoniemenin stelsysteemin steltaalin steltabellenin steltabin \
+steltabulatiein steltekstin steltekstinhoudin steltekstlijnenin steltekstpositiein \
+stelteksttekstenin steltekstvariabelein steltolerantiein steltypein steltypenin \
+steluitlijnenin steluitvoerin stelurlin stelveldenin stelveldin \
+stelversiesin stelvoetin stelvoetnootdefinitiein stelvoetnotenin stelvoettekstenin \
+stelwiskundeuitlijnenin stelwitruimtein stopachtergrond stopalignment stopbuffer \
+stopcitaat stopcodering stopcolumns stopcombinatie stopcombination \
+stopcomment stopdescription stopdocument stopenumeration stopfigure \
+stopfloattext stopformula stopframedtext stopgeg stopglobaal \
+stophiding stopinteractiemenu stopitemgroup stopkantlijn stopkleur \
+stopkolomgroep stopkolommen stopkolomopmaak stoplegend stopline \
+stoplinecorrection stoplinenumbering stoplines stoplocal stoplocalenvironment \
+stoplocalfootnotes stoplokaal stoplokalevoetnoten stopmakeup stopmargeblok \
+stopmarginblock stopnaast stopnamemakeup stopnarrower stopomgeving \
+stoponderdeel stopopelkaar stopopmaak stopopposite stopoverlay \
+stopoverview stopoverzicht stopparagraph stoppositioneren stoppositioning \
+stoppostponing stopprodukt stopprofiel stopprofile stopprojekt \
+stopraster stopregel stopregelcorrectie stopregelnummeren stopregels \
+stopsmaller stopsymbolset stopsynchronisatie stopsynchronization stoptabel \
+stoptabellen stoptable stoptables stoptabulate stoptekst \
+stoptekstlijn stoptyping stopuitlijnen stopunpacked stopvanelkaar \
+stopversie sub subformulenummer subject subpaginanummer \
+subsection subsubject subsubsection subsubsubject suggestie \
+switchnaarkorps switchtorawfont sym symbool symoffset \
+synchronisatiebalk synchroniseer synonym taal tab \
+tekstbreedte teksthoogte tekstlijn tekstreferentie tekstvariabele \
+testkolom testpagina tex title toelichting \
+toonexternefiguren toongrid tooninstellingen toonkader toonkleur \
+toonkleurgroep toonkorps toonkorpsomgeving toonlayout toonopmaak \
+toonpalet toonprint toonstruts toonsymboolset toonvelden \
+totaalaantalpaginas txt typ type typebuffer \
+typefile uit uitgerekt underbar underbars \
+usecodering usedirectory vastespatie vastespaties veld \
+veldstapel verbergblokken vergelijkkleurgroep vergelijkpalet verhoognummer \
+verlaagnummer verplaatsformule verplaatsopgrid verplaatszijblok versie \
+vertaal verwerkblokken verwerkpagina vl voetafstand \
+voethoogte voetnoot voetnoottekst volgprofiel volgprofielversie \
+volgversie volledigepaginanummer volledigregister voluit weekdag \
+wilijnd wiskunde witruimte woonplaats woordrechts \
+zetbreedte zethoogte
keywordclass.context.pe=\
CAP Cap Caps Character \
@@ -1114,163 +1116,161 @@ useXMLfilter usedirectory useurl آفست‌صفحه آیتم \
کسر کشیده کلمه‌حاشیه کلمه‌راست گیره \
یادداشت یک‌جا یک‌خط
-keywordclass.context.nl=\
-CAP Cap Caps Cijfers \
-KAP Kap Kaps Letter Letters \
-MAAND Romeins WEEKDAG WOORD WOORDEN \
-Woord Woorden aantalsubpaginas about achtergrond \
-appendix arg bepaalkopnummer bepaallijstkenmerken bepaalregisterkenmerken \
-betekenis binnenmargeafstand binnenmargebreedte binnenrandafstand binnenrandbreedte \
-blanko blokje blokjes blokkeerinteractiemenu bodemwit \
-bookmark bovenafstand bovenhoogte breuk buitenmargeafstand \
-buitenmargebreedte buitenrandafstand buitenrandbreedte but button \
-cap chapter chem cijfers citaat \
-citeer clip comment completecombinedlist completelistoffloats \
-completelistofsorts completelistofsynonyms converteernummer copieerveld corrigeerwitruimte \
-coupledregister crlf datum definebodyfontDEF definebodyfontREF \
-definedfont definefontfeature definefonthandling definerawfont definetypeface \
-definieer definieeraccent definieeralineas definieerbeeldmerk definieerblanko \
-definieerblok definieerbuffer definieercombinatie definieercommando definieerconversie \
-definieerfiguursymbool definieerfont definieerfontstijl definieerfontsynoniem definieerhbox \
-definieerhoofdveld definieeringesprongentext definieerinmarge definieerinteractiemenu definieeritemgroep \
-definieerkadertekst definieerkarakter definieerkleur definieerkleurgroep definieerkolomgroep \
-definieerkolomovergang definieerkop definieerkorps definieerkorpsomgeving definieerlayer \
-definieerlayout definieerletter definieerlijn definieerlijst definieermarkering \
-definieeromlijnd definieeropmaak definieeroverlay definieerpaginaovergang definieerpalet \
-definieerpapierformaat definieerplaats definieerplaatsblok definieerprofiel definieerprogramma \
-definieerreferentie definieerreferentieformaat definieerreferentielijst definieerregister definieersamengesteldelijst \
-definieersectie definieersectieblok definieersorteren definieerstartstop definieersubveld \
-definieersymbool definieersynoniemen definieertabelvorm definieertabulatie definieertekst \
-definieertekstpositie definieertekstvariabele definieertype definieertypen definieeruitvoer \
-definieerveld definieerveldstapel definieerversie definieerwiskundeuitlijnen description \
-dimensie directnaarlijst directtussenlijst doordefinieren doorlabelen \
-doornummeren dunnelijn dunnelijnen eenregel enumeration \
-ergens externfiguur forceerblokken formulenummer framedtext \
-gebruikJSscripts gebruikURL gebruikXMLfilter gebruikblokken gebruikcommandos \
-gebruikexterndocument gebruikexternefile gebruikexternefiles gebruikexternfiguur gebruikexterngeluidsfragment \
-gebruikgebied gebruikmodule gebruikmodules gebruikreferenties gebruikspecials \
-gebruiksymbolen gebruiktypescript gebruiktypescriptfile gebruikurl geenblokkenmeer \
-geenbovenenonderregels geendimensie geenfilesmeer geenhoofdenvoetregels geenlijst \
-geenmarkering geenspatie geentest geenwitruimte geg \
-grijskleur grijswaarde haalbuffer haalmarkering haalnummer \
-haarlijn handhaafblokken herhaal hl hoofdafstand \
-hoofdhoogte hoofdtaal hoog huidigedatum huidigekopnummer \
-in inanderemarge inbinnen inbuiten indentation \
-inlijnd inlinker inlinkermarge inlinkerrand inmarge \
-inrechter inrechtermarge inrechterrand inregel inspringen \
-installeertaal instellingen interactiebalk interactiebuttons interactiemenu \
-invullijnen invulregel invultekst invulveld inwilijnd \
-items its kantlijn kap kenmerk \
-kenmerkdatum kentekstvariabeletoe kleur kleurenbalk kleurwaarde \
-kloonveld kolom kop kopniveau kopnummer \
-koppeldocument koppelmarkering koppelpagina koppelpapier koppelregister \
-kopsym koptekst kopwit laag label \
-labeling labels labeltekst laho leg \
-legeregels letter letters lijndikte lijstbreedte \
-lijsthoogte lijstlengte lijstsymbool linkermargeafstand linkermargebreedte \
-linkerrandafstand linkerrandbreedte loadsorts loadsynonyms maand \
-mapfontsize mar margeafstand margebreedte margetekst \
-margetitel margewoord markeer markeerversie mediaeval \
-menubutton naam naar naarbox naarpagina \
-name navigerend nextsection nietinspringen nocap \
-nokap noot nop omgeving omlaag \
-omlijnd onbekend onderafstand onderdeel onderhoogte \
-ontkoppelmarkering op opelkaar oplinkermarge oppagina \
-oprechtermarge overbar overbars overstrike overstrikes \
-pagina paginadiepte paginanummer paginaoffset paginareferentie \
-papierbreedte papierhoogte paragraph part paslayoutaan \
-passeerblokken passendveld plaatsbeeldmerken plaatsbookmarks plaatsformule \
-plaatskopnummer plaatskoptekst plaatslegenda plaatslijn plaatslijst \
-plaatslokalevoetnoten plaatsnaastelkaar plaatsonderelkaar plaatsopgrid plaatspaginanummer \
-plaatsplaatsblok plaatsreferentielijst plaatsregister plaatsruwelijst plaatssamengesteldelijst \
-plaatssubformule plaatstekstvariabele plaatsvoetnoten placelistoffloats placelistofsorts \
-placelistofsynonyms positioneer positioneertekst printpapierbreedte printpapierhoogte \
-produkt programma projekt publicatie punten \
-ran randafstand randbreedte rechtermargeafstand rechtermargebreedte \
-rechterrandafstand rechterrandbreedte ref refereer referentie \
-regellinks regelmidden regelrechts register registreervelden \
-reservefloat reset resetmarkering resetnummer resettekstinhoud \
-resettextcontent romeins rooster roteer rugwit \
-schaal scherm schrijfnaarlijst schrijfnaarreferentielijst schrijfnaarregister \
-schrijftussenlijst section seeregister selecteerblokken selecteerpapier \
-selecteerversie setnummer setupfonthandling setupfontsynonym setupinterlinespace2 \
-setuplistalternative snijwit som sort spatie \
-spiegel splitsplaatsblok startachtergrond startalignment startbuffer \
-startcitaat startcodering startcolumns startcombinatie startcombination \
-startcomment startdescription startdocument startenumeration startfigure \
-startfiguur startfloattext startformula startframedtext startgeg \
-startglobaal starthiding startinteractiemenu startitemgroup startkantlijn \
-startkleur startkolomgroep startkolommen startkolomopmaak startlegend \
-startline startlinecorrection startlinenumbering startlines startlocal \
-startlocalenvironment startlocalfootnotes startlokaal startlokalevoetnoten startmakeup \
-startmargeblok startmarginblock startnaast startnamemakeup startnarrower \
-startomgeving startonderdeel startopelkaar startopmaak startopposite \
-startoverlay startoverview startoverzicht startparagraph startpositioneren \
-startpositioning startpostponing startprodukt startprofiel startprofile \
-startprojekt startraster startregel startregelcorrectie startregelnummeren \
-startregels startregister startsmaller startsymbolset startsymboolset \
-startsynchronisatie startsynchronization starttabel starttabellen starttable \
-starttables starttabulate starttekst starttekstlijn starttyping \
-startuitlijnen startunpacked startvanelkaar startversie stelachtergrondenin \
-stelachtergrondin stelalineasin stelantwoordgebiedin stelarrangerenin stelblankoin \
-stelblokin stelblokjesin stelblokkopjein stelblokkopjesin stelbovenin \
-stelboventekstenin stelbufferin stelbuttonsin stelciterenin stelclipin \
-stelcombinatiesin stelcommentaarin steldoordefinierenin steldoornummerenin steldunnelijnenin \
-stelexternefigurenin stelformulesin stelformulierenin stelhoofdin stelhoofdtekstenin \
-stelingesprongentextin stelinmargein stelinspringenin stelinteractiebalkin stelinteractiein \
-stelinteractiemenuin stelinteractieschermin stelinterliniein stelinvullijnenin stelinvulregelsin \
-stelitemgroepin stelitemsin stelkadertekstenin stelkantlijnin stelkapitalenin \
-stelkleurenin stelkleurin stelkolomgroepin stelkolomgroepregelsin stelkolomgroepstartin \
-stelkolommenin stelkopin stelkopnummerin stelkoppeltekenin stelkoppenin \
-stelkoptekstin stelkorpsin stelkorpsomgevingin stellabeltekstin stellayoutin \
-stellegendain stellijndiktein stellijnin stellijstin stelmargeblokkenin \
-stelmarkeringin stelnaastplaatsenin stelnummerenin stelnummerin stelomlijndin \
-stelonderin stelonderstrepenin stelondertekstenin stelopmaakin stelopsommingenin \
-stelpaginacommentaarin stelpaginanummerin stelpaginanummeringin stelpaginaovergangenin stelpaletin \
-stelpapierformaatin stelpapierin stelparagraafnummerenin stelplaatsblokin stelplaatsblokkenin \
-stelplaatsbloksplitsenin stelplaatsin stelpositionerenin stelprofielenin stelprogrammasin \
-stelpublicatiesin stelrastersin stelreferentielijstin stelrefererenin stelregelnummerenin \
-stelregelsin stelregisterin stelroterenin stelsamengesteldelijstin stelsectieblokin \
-stelsectiein stelsmallerin stelsorterenin stelspatieringin stelstartstopin \
-stelstrutin stelsubpaginanummerin stelsymboolsetin stelsynchronisatiebalkin stelsynchronisatiein \
-stelsynoniemenin stelsysteemin steltaalin steltabellenin steltabin \
-steltabulatiein steltekstin steltekstinhoudin steltekstlijnenin steltekstpositiein \
-stelteksttekstenin steltekstvariabelein steltolerantiein steltypein steltypenin \
-steluitlijnenin steluitvoerin stelurlin stelveldenin stelveldin \
-stelversiesin stelvoetin stelvoetnootdefinitiein stelvoetnotenin stelvoettekstenin \
-stelwiskundeuitlijnenin stelwitruimtein stopachtergrond stopalignment stopbuffer \
-stopcitaat stopcodering stopcolumns stopcombinatie stopcombination \
-stopcomment stopdescription stopdocument stopenumeration stopfigure \
-stopfloattext stopformula stopframedtext stopgeg stopglobaal \
-stophiding stopinteractiemenu stopitemgroup stopkantlijn stopkleur \
-stopkolomgroep stopkolommen stopkolomopmaak stoplegend stopline \
-stoplinecorrection stoplinenumbering stoplines stoplocal stoplocalenvironment \
-stoplocalfootnotes stoplokaal stoplokalevoetnoten stopmakeup stopmargeblok \
-stopmarginblock stopnaast stopnamemakeup stopnarrower stopomgeving \
-stoponderdeel stopopelkaar stopopmaak stopopposite stopoverlay \
-stopoverview stopoverzicht stopparagraph stoppositioneren stoppositioning \
-stoppostponing stopprodukt stopprofiel stopprofile stopprojekt \
-stopraster stopregel stopregelcorrectie stopregelnummeren stopregels \
-stopsmaller stopsymbolset stopsynchronisatie stopsynchronization stoptabel \
-stoptabellen stoptable stoptables stoptabulate stoptekst \
-stoptekstlijn stoptyping stopuitlijnen stopunpacked stopvanelkaar \
-stopversie sub subformulenummer subject subpaginanummer \
-subsection subsubject subsubsection subsubsubject suggestie \
-switchnaarkorps switchtorawfont sym symbool synchronisatiebalk \
-synchroniseer synonym taal tab tekstbreedte \
-teksthoogte tekstlijn tekstreferentie tekstvariabele testkolom \
-testpagina tex title toelichting toonexternefiguren \
-toongrid tooninstellingen toonkader toonkleur toonkleurgroep \
-toonkorps toonkorpsomgeving toonlayout toonopmaak toonpalet \
-toonprint toonstruts toonsymboolset toonvelden totaalaantalpaginas \
-txt typ type typebuffer typefile \
-uit uitgerekt underbar underbars usecodering \
-usedirectory vastespatie vastespaties veld veldstapel \
-verbergblokken vergelijkkleurgroep vergelijkpalet verhoognummer verlaagnummer \
-verplaatsformule verplaatsopgrid verplaatszijblok versie vertaal \
-verwerkblokken verwerkpagina vl voetafstand voethoogte \
-voetnoot voetnoottekst volgprofiel volgprofielversie volgversie \
-volledigepaginanummer volledigregister voluit weekdag wilijnd \
-wiskunde witruimte woonplaats woordrechts zetbreedte \
-zethoogte
+keywordclass.context.ro=\
+CAP CUVANT CUVINTE Cap \
+Caps Cuvant Cuvinte KAP Kap \
+Kaps LUNA Litera Litere Numere \
+Numereromane ZIDINSAPTAMANA adapteazaaspect adubuffer adumarcaje \
+afiseazaaspect afiseazacampuri afiseazaculoare afiseazafiguriexterne afiseazafonttext \
+afiseazagrid afiseazagrupculoare afiseazamakeup afiseazamediufonttext afiseazapaleta \
+afiseazarama afiseazasetari afiseazasetsimboluri afiseazastruts afiseazatiparire \
+aliniat aliniatcentru aliniatdreapta aliniatstanga appendix \
+arg ascundeblocuri atleftmargin atrightmargin baraculoare \
+barainteractiune barasincronizare blanc but butoaneinteractiune \
+buton butonmeniu camp campumplere cap \
+chapter chem citat clip cloneazacamp \
+coloana comment comparagrupculoare comparapaleta completeazanumarpagina \
+completecombinedlist completelistoffloats completelistofsorts completelistofsynonyms completeregister \
+componenta convertestenumar copiazacamp corecteazaspatiualb coupledregister \
+crlf culoare culoaregri cupleazadocument cupleazamarcaje \
+cupleazaregistru cutspace cuvantdreapta cuvantmarginal data \
+datacurenta datareferit decrementnumber decupleazamarcaje definebodyfontDEF \
+definebodyfontREF definecolumnbreak definecolumnset definecombination definedfont \
+definefontfeature definefonthandling defineindentedtext defineinmargin defineitemgroup \
+definelayer definelayout definemathalignment definepagebreak defineplacement \
+defineste definesteaccent definesteantet definesteblanc definestebloc \
+definesteblocsectiune definestebuffer definestecamp definestecampprincipal definestecaracter \
+definestecomanda definesteconversie definesteculoare definestedescriere definestedimensiunehartie \
+definesteenumerare definesteeticheta definestefloat definestefont definestefontraw \
+definestefonttext definesteformatreferinte definestegrupculori definestehbox definesteinconjurare \
+definestelista definestelistacombinata definestelistareferinte definestelogo definestemakeup \
+definestemarcaje definestemediulfonttext definestemeniuinteractiune definesteoutput definesteoverlay \
+definestepaleta definesteparagraf definestepozitietext definesteprofil definesteprogram \
+definestereferinte definesteregistru definesterigla definestesablontabel definestesectiune \
+definestesimbol definestesimbolfigura definestesinonim definestesinonimfont definestesortare \
+definestestartstop definestestil definestestilfont definestestivacampuri definestesubcamp \
+definestetabulatori definestetext definestetexteinconjurate definestetextinconjurat definestetyping \
+definestevariabilatext definesteversiune definetype definetypeface description \
+despre determinacaracteristicilelistei determinacaracteristiciregistru determinanumartitlu dezactiveazameniuinteractiune \
+dimensiune din distantaantet distantacolt distantacoltdreapta \
+distantacoltstanga distantajos distantamargine distantamarginedreapta distantamarginestanga \
+distantasubsol distantasus domiciliu dute dutebox \
+dutepagina ecran el element emptylines \
+enumeration eticheta etichete fact faraaliniat \
+faradimensiune farafisiere faraliniiantetsisubsol faraliniisussijos faralista \
+faramarcaje faraspatiu faraspatiualb figuraexterna firdepar \
+folosesteURL folosestebloc folosestecodificarea folosestecomenzi folosestedirector \
+folosestedocumentextern folosestefiguraexterna folosestefisiereexterne folosestefisierextern folosestemodul \
+folosestemodule folosestemuzicaexterna folosestereferinte folosestescriptJS folosestesimboluri \
+folosestespeciale folosesteurl footnotetext forteazablocuri fractie \
+framed framedtext fundal gatablocuri getnumber \
+grid grosimelinie hartiedubla headsym hl \
+immediatebetweenlist immediatetolist impachetat impartefloat in \
+inalt inaltamargine inaltimeantet inaltimehartie inaltimehartieimprimanta \
+inaltimejos inaltimelista inaltimemakeup inaltimesubsol inaltimesus \
+inaltimetext indentation indreapta inframed ininner \
+injos inlinie inmaframed inmargineadreapta inmargineastanga \
+inneredgedistance inneredgewidth innermargindistance innermarginwidth inouter \
+inparteadreapta inparteastanga instalarelimba instanga intins \
+jos jossus kap la labeling \
+lapagina latimecoltdreapta latimecoltstanga latimecolturi latimehartie \
+latimehartieimprimanta latimelista latimemakeup latimemargine latimemarginedreapta \
+latimemarginestanga latimetext leg limba limbaprincipala \
+liniemargine linieneagra liniesubtire linieumplere liniinegre \
+liniisubtiri listsymbol litera litere loadsorts \
+loadsynonyms logcampuri luna lungimelista maframed \
+mapfontsize mar marcaje marcheazaversiune marginal \
+matematica mediaeval mediu meniuinteractiune minicitat \
+moveformula movesidefloat mutapegrid name navigating \
+necunoscut nextsection niveltitlu nocap nokap \
+nop nota notasubsol numarformula numarincrement \
+numarpagina numarsubformula numartitlu numartitlucurent numartotalpagini \
+numberofsubpages nume numere numereromane numeunitate \
+nutesta olinie outeredgedistance outeredgewidth outermargindistance \
+outermarginwidth overbar overbars overstrike overstrikes \
+pagedepth pageoffset pagina paginadubla paragraph \
+part pastreazablocuri pelung placefloat placeheadnumber \
+placeheadtext placelistoffloats placelistofsorts placelistofsynonyms placerawlist \
+placereferencelist plaseazapegrid plaseazasemnecarte potrivestecamp pozitie \
+pozitietext proceseazabloc proceseazapagina produs program \
+proiect publicatie puncte punedeasuprafiecareia punefatainfata \
+puneformula punelegenda punelista punelistacombinata punelogouri \
+punenotesubsol punenotesubsollocale punenumarpagina puneregistru punerigla \
+punesubformula punevariabilatext ran ref refa \
+referinta referintapagina referintatext referit referring \
+reflexie register remarca reservefloat reset \
+reseteazamarcaje resetnumber resettextcontent riglatext rigleumplere \
+roteste saripesteblocuri scala scriebuffer scrieinlista \
+scrieinlistareferinte scrieinregistru scrieintreliste section seeregister \
+selecteazablocuri selecteazahartie selecteazaversiune semncarte setarebarasincronizare \
+setareitemization setarelimba setareoutput setarepozitie setaresincronizare \
+setari seteazaaliniat seteazaalinierea seteazaantet seteazaaranjareapag \
+seteazaaspect seteazabarainteractiune seteazablanc seteazabloc seteazablocsectiune \
+seteazablocurimarginale seteazabuffer seteazabutoane seteazacamp seteazacampuri \
+seteazaclipping seteazacoloane seteazacombinari seteazacomentariu seteazacomentariupagina \
+seteazaculoare seteazaculori seteazadefinireanotasubsol seteazadescriere seteazadimensiunihartie \
+seteazaecrane seteazaecraninteractiune seteazaelemente seteazaenumerare seteazafiguriexterne \
+seteazafloat seteazafloats seteazafonttext seteazaformulare seteazaformule \
+seteazafundal seteazafundaluri seteazagrosimelinie seteazaimpartireafloat seteazainconjurat \
+seteazaingust seteazainteractiunea seteazajos seteazalegenda seteazalegendele \
+seteazaliniesilabe seteazaliniesubtire seteazalinii seteazaliniimargine seteazaliniinegre \
+seteazaliniiumplere seteazalista seteazalistacombinata seteazalistareferinte seteazamajuscule \
+seteazamakeup seteazamarcaje seteazamarginal seteazamediulfonttext seteazameniuinteractiune \
+seteazaminicitat seteazanotasubsol seteazanumarpagina seteazanumarsubpagina seteazanumartitlu \
+seteazanumerotare seteazanumerotarelinii seteazanumerotarepagina seteazanumerotareparagrafe seteazapaleta \
+seteazaparagrafe seteazaplasareaopozita seteazapozitietext seteazaprofile seteazaprograme \
+seteazapublicatii seteazareferinte seteazaregistru seteazarigla seteazarigletext \
+seteazarigleumplere seteazarotare seteazasectiune seteazasimbol seteazasinonime \
+seteazasistem seteazasortare seteazaspatiu seteazaspatiualb seteazaspatiuinterliniar \
+seteazastrut seteazasublinie seteazasubsol seteazasus seteazatab \
+seteazatabele seteazatabulatori seteazatext seteazatexteantet seteazatextejos \
+seteazatextesubsol seteazatextesus seteazatextetext seteazatexteticheta seteazatexttitlu \
+seteazatitlu seteazatitluri seteazatoleranta seteazatranzitiepagina seteazatype \
+seteazatyping seteazaurl seteazavariabilatext seteazaversiuni setnumber \
+settextcontent setupanswerarea setupcolumnset setupcolumnsetlines setupcolumnsetstart \
+setupfonthandling setupfontsynonym setupindentedtext setupinterlinespace2 setupitemgroup \
+setuplistalternative setupmathalignment setupnumber setuppaper setupplacement \
+setupstartstop setvariabilatext sim simbol sincronizeaza \
+sort spatiifixate spatiu spatiualb spatiufixat \
+spatiujos spatiuspate spatiusus startalignment startaliniere \
+startblocmarginal startbuffer startcitat startcodificare startcoloane \
+startcolumnmakeup startcolumns startcolumnset startcombinare startcombination \
+startcomment startcomponenta startcorectielinie startculoare startdescription \
+startdocument startenumeration startfact startfigura startfigure \
+startfloattext startformula startframedtext startfundal startglobal \
+starthiding startimpachetat startingust startitemgroup startlegend \
+startline startlinecorrection startlinenumbering startlines startlinie \
+startliniemargine startlinii startlocal startlocalenvironment startlocalfootnotes \
+startmakeup startmarginblock startmediu startmeniuinteractiune startnamemakeup \
+startnarrower startneimpachetat startnotesubsollocale startnumerotarelinii startopozit \
+startopposite startoverlay startoverview startparagraph startpositioning \
+startpostponing startpozitionare startprodus startprofil startprofile \
+startproiect startraster startregister startriglatext startsetsimboluri \
+startsincronizare startsymbolset startsynchronization starttabel starttabele \
+starttable starttables starttabulate starttext starttyping \
+startunpacked startversiune stivacampuri stopalignment stopaliniere \
+stopblobal stopblocmarginal stopbuffer stopcitat stopcodificare \
+stopcoloane stopcolumnmakeup stopcolumns stopcolumnset stopcombinare \
+stopcombination stopcomment stopcomponenta stopcorectielinie stopculoare \
+stopdescription stopdocument stopenumeration stopfact stopfigure \
+stopfloattext stopformula stopframedtext stopfundal stophiding \
+stopimpachetat stopingust stopitemgroup stoplegend stopline \
+stoplinecorrection stoplinenumbering stoplines stoplinie stopliniemargine \
+stoplinii stoplocal stoplocalenvironment stoplocalfootnotes stopmakeup \
+stopmarginblock stopmediu stopmeniuinteractiune stopnamemakeup stopnarrower \
+stopneimpachetat stopnotesubsollocale stopnumerotarelinii stopopozit stopopposite \
+stopoverlay stopoverview stopparagraph stoppositioning stoppostponing \
+stoppozitionare stopprodus stopprofil stopprofile stopproiect \
+stopraster stopriglatext stopsincronizare stopsymbolset stopsynchronization \
+stoptabel stoptabele stoptable stoptables stoptabulate \
+stoptext stoptyping stopunpacked stopversiune sub \
+subject subpagenumber subsection subsubject subsubsection \
+subsubsubject synonym tab testcolumn testpage \
+tex texteticheta textmarginal texttitlu textumplere \
+title titlu titlumarginal tooltip traduce \
+trecilafontraw trecilafonttext txt typ type \
+typefile underbar underbars undeva urmeazaprofil \
+urmeazaversiune urmeazaversiuneprofil useXMLfilter usedirectory usetypescript \
+usetypescriptfile valoareculoare valoaregri variabilatext versiune \
+vl zidinsaptamana
diff --git a/Master/texmf-dist/context/data/scite/scite-context-data-metafun.properties b/Master/texmf-dist/context/data/scite/scite-context-data-metafun.properties
index e0413b2dd75..b20b3e856e8 100644
--- a/Master/texmf-dist/context/data/scite/scite-context-data-metafun.properties
+++ b/Master/texmf-dist/context/data/scite/scite-context-data-metafun.properties
@@ -1,10 +1,3 @@
-keywordclass.metafun.internals=\
-nocolormodel greycolormodel graycolormodel rgbcolormodel \
-cmykcolormodel shadefactor textextoffset normaltransparent multiplytransparent \
-screentransparent overlaytransparent softlighttransparent hardlighttransparent colordodgetransparent \
-colorburntransparent darkentransparent lightentransparent differencetransparent exclusiontransparent \
-huetransparent saturationtransparent colortransparent luminositytransparent
-
keywordclass.metafun.commands=\
sqr log ln exp \
inv pow pi radian tand \
@@ -33,21 +26,30 @@ grayed greyed onlayer along graphictext \
loadfigure externalfigure withmask figure register \
bitmapimage colordecimals ddecimal dddecimal ddddecimal \
textext thetextext rawtextext textextoffset verbatim \
-thelabel label transparent withtransparency asgroup \
-infont set_linear_vector linear_shade define_linear_shade define_circular_linear_shade \
-define_sampled_linear_shade set_circular_vector circular_shade define_circular_shade define_circular_linear_shade \
-define_sampled_circular_shade space CRLF grayscale greyscale \
-withgray withgrey colorpart readfile clearxy \
-unitvector center epsed anchored originpath \
-infinite break xstretched ystretched snapped \
-pathconnectors function constructedpath constructedpairs punkedfunction \
-curvedfunction tightfunction punkedpath curvedpath tightpath \
-punkedpairs curvedpairs tightpairs evenly oddly \
-condition pushcurrentpicture popcurrentpicture arrowpath tensecircle \
-roundedsquare colortype whitecolor blackcolor normalfill \
-normaldraw visualizepaths naturalizepaths drawboundary drawwholepath \
-visualizeddraw visualizedfill draworigin drawboundingbox drawpath \
-drawpoint drawpoints drawcontrolpoints drawcontrollines drawpointlabels \
-drawlineoptions drawpointoptions drawcontroloptions drawlabeloptions draworiginoptions \
-drawboundoptions drawpathoptions resetdrawoptions
+thelabel label autoalign transparent withtransparency \
+property properties withproperties asgroup infont \
+set_linear_vector linear_shade define_linear_shade define_circular_linear_shade define_sampled_linear_shade \
+set_circular_vector circular_shade define_circular_shade define_circular_linear_shade define_sampled_circular_shade \
+space CRLF grayscale greyscale withgray \
+withgrey colorpart readfile clearxy unitvector \
+center epsed anchored originpath infinite \
+break xstretched ystretched snapped pathconnectors \
+function constructedpath constructedpairs punkedfunction curvedfunction \
+tightfunction punkedpath curvedpath tightpath punkedpairs \
+curvedpairs tightpairs evenly oddly condition \
+pushcurrentpicture popcurrentpicture arrowpath tensecircle roundedsquare \
+colortype whitecolor blackcolor normalfill normaldraw \
+visualizepaths naturalizepaths drawboundary drawwholepath visualizeddraw \
+visualizedfill draworigin drawboundingbox drawpath drawpoint \
+drawpoints drawcontrolpoints drawcontrollines drawpointlabels drawlineoptions \
+drawpointoptions drawcontroloptions drawlabeloptions draworiginoptions drawboundoptions \
+drawpathoptions resetdrawoptions decorated redecorated undecorated
+
+keywordclass.metafun.internals=\
+nocolormodel greycolormodel graycolormodel rgbcolormodel \
+cmykcolormodel shadefactor textextoffset normaltransparent multiplytransparent \
+screentransparent overlaytransparent softlighttransparent hardlighttransparent colordodgetransparent \
+colorburntransparent darkentransparent lightentransparent differencetransparent exclusiontransparent \
+huetransparent saturationtransparent colortransparent luminositytransparent metapostversion \
+maxdimensions
diff --git a/Master/texmf-dist/context/data/scite/scite-context-data-metapost.properties b/Master/texmf-dist/context/data/scite/scite-context-data-metapost.properties
index d2027c9564e..5566a3865ea 100644
--- a/Master/texmf-dist/context/data/scite/scite-context-data-metapost.properties
+++ b/Master/texmf-dist/context/data/scite/scite-context-data-metapost.properties
@@ -1,54 +1,3 @@
-keywordclass.metapost.primitives=\
-charcode day linecap linejoin \
-miterlimit month pausing prologues showstopping \
-time tracingcapsules tracingchoices mpprocset tracingcommands \
-tracingequations tracinglostchars tracingmacros tracingonline tracingoutput \
-tracingrestores tracingspecs tracingstats tracingtitles truecorners \
-warningcheck year false nullpicture pencircle \
-true and angle arclength arctime \
-ASCII boolean bot char color \
-cosd cycle decimal directiontime floor \
-fontsize hex infont intersectiontimes known \
-length llcorner lrcorner makepath makepen \
-mexp mlog normaldeviate not numeric \
-oct odd or path pair \
-pen penoffset picture point postcontrol \
-precontrol reverse rotated scaled shifted \
-sind slanted sqrt str string \
-subpath substring transform transformed ulcorner \
-uniformdeviate unknown urcorner xpart xscaled \
-xxpart xypart ypart yscaled yxpart \
-yypart zscaled addto clip input \
-interim let newinternal save setbounds \
-shipout show showdependencies showtoken showvariable \
-special begingroup endgroup of curl \
-tension and controls interpath on \
-off def vardef enddef expr \
-suffix text primary secondary tertiary \
-primarydef secondarydef tertiarydef randomseed also \
-contour doublepath withcolor withpen dashed \
-if else elseif fi for \
-endfor forever exitif forsuffixes downto \
-upto step until charlist extensible \
-fontdimen headerbyte kern ligtable boundarychar \
-chardp charext charht charic charwd \
-designsize fontmaking charexists cullit currenttransform \
-gfcorners grayfont hround imagerules lowres_fix \
-nodisplays notransforms openit displaying currentwindow \
-screen_rows screen_cols pixels_per_inch cull display \
-openwindow numspecial totalweight autorounding fillin \
-proofing tracingpens xoffset chardx granularity \
-smoothing turningcheck yoffset chardy hppp \
-tracingedges vppp extra_beginfig extra_endfig mpxbreak \
-endinput message delimiters turningnumber errmessage \
-readstring scantokens end outer inner \
-write to readfrom withprescript withpostscript \
-top bot lft rt ulft \
-urt llft lrt redpart greenpart \
-bluepart cyanpart magentapart yellowpart blackpart \
-greypart rgbcolor cmykcolor greycolor graycolor \
-colormodel graypart expandafter
-
keywordclass.metapost.commands=\
beginfig endfig rotatedaround reflectedabout \
arrowhead currentpen currentpicture cuttings defaultfont \
@@ -80,9 +29,6 @@ yellow black white background graypart \
graycolor mm pt dd bp \
cm pc cc in
-keywordclass.metapost.tex=\
-btex etex verbatimtex
-
keywordclass.metapost.internals=\
mitered rounded beveled butt \
squared eps epsilon infinity bboxmargin \
@@ -90,7 +36,63 @@ ahlength ahangle labeloffset dotlabeldiam defaultpen \
defaultscale join_radius pen_lft pen_rt pen_top \
pen_bot
+keywordclass.metapost.primitives=\
+charcode day linecap linejoin \
+miterlimit month pausing prologues showstopping \
+time tracingcapsules tracingchoices mpprocset tracingcommands \
+tracingequations tracinglostchars tracingmacros tracingonline tracingoutput \
+tracingrestores tracingspecs tracingstats tracingtitles truecorners \
+warningcheck year false nullpicture pencircle \
+true and angle arclength arctime \
+ASCII boolean bot char color \
+cosd cycle decimal directiontime floor \
+fontsize hex infont intersectiontimes known \
+length llcorner lrcorner makepath makepen \
+mexp mlog normaldeviate not numeric \
+oct odd or path pair \
+pen penoffset picture point postcontrol \
+precontrol reverse rotated scaled shifted \
+sind slanted sqrt str string \
+subpath substring transform transformed ulcorner \
+uniformdeviate unknown urcorner xpart xscaled \
+xxpart xypart ypart yscaled yxpart \
+yypart zscaled addto clip input \
+interim let newinternal save setbounds \
+shipout show showdependencies showtoken showvariable \
+special begingroup endgroup of curl \
+tension and controls interpath on \
+off def vardef enddef expr \
+suffix text primary secondary tertiary \
+primarydef secondarydef tertiarydef randomseed also \
+contour doublepath withcolor withpen dashed \
+if else elseif fi for \
+endfor forever exitif within forsuffixes \
+downto upto step until charlist \
+extensible fontdimen headerbyte kern ligtable \
+boundarychar chardp charext charht charic \
+charwd designsize fontmaking charexists cullit \
+currenttransform gfcorners grayfont hround imagerules \
+lowres_fix nodisplays notransforms openit displaying \
+currentwindow screen_rows screen_cols pixels_per_inch cull \
+display openwindow numspecial totalweight autorounding \
+fillin proofing tracingpens xoffset chardx \
+granularity smoothing turningcheck yoffset chardy \
+hppp tracingedges vppp extra_beginfig extra_endfig \
+mpxbreak endinput message delimiters turningnumber \
+errmessage readstring scantokens end outer \
+inner write to readfrom withprescript \
+withpostscript top bot lft rt \
+ulft urt llft lrt redpart \
+greenpart bluepart cyanpart magentapart yellowpart \
+blackpart greypart rgbcolor cmykcolor greycolor \
+graycolor colormodel graypart dashpart penpart \
+stroked filled textual clipped bounded \
+expandafter
+
keywordclass.metapost.shortcuts=\
.. ... -- --- \
&
+keywordclass.metapost.tex=\
+btex etex verbatimtex
+
diff --git a/Master/texmf-dist/context/data/scite/scite-context-data-tex.properties b/Master/texmf-dist/context/data/scite/scite-context-data-tex.properties
index 0f63b994dee..195125433e1 100644
--- a/Master/texmf-dist/context/data/scite/scite-context-data-tex.properties
+++ b/Master/texmf-dist/context/data/scite/scite-context-data-tex.properties
@@ -1,3 +1,8 @@
+keywordclass.tex.aleph=\
+AlephVersion Alephminorversion Alephrevision Alephversion \
+Omegaminorversion Omegarevision Omegaversion boxdir pagebottomoffset \
+pagerightoffset
+
keywordclass.tex.etex=\
botmarks clubpenalties currentgrouplevel currentgrouptype \
currentifbranch currentiflevel currentiftype detokenize dimexpr \
@@ -13,6 +18,51 @@ showtokens splitbotmarks splitdiscards splitfirstmarks topmarks \
tracingassigns tracinggroups tracingifs tracingnesting tracingscantokens \
unexpanded unless widowpenalties
+keywordclass.tex.luatex=\
+Uchar Udelcode Udelcodenum Udelimiter \
+Udelimiterover Udelimiterunder Umathaccent Umathaxis Umathbinbinspacing \
+Umathbinclosespacing Umathbininnerspacing Umathbinopenspacing Umathbinopspacing Umathbinordspacing \
+Umathbinpunctspacing Umathbinrelspacing Umathchar Umathchardef Umathcharnum \
+Umathclosebinspacing Umathcloseclosespacing Umathcloseinnerspacing Umathcloseopenspacing Umathcloseopspacing \
+Umathcloseordspacing Umathclosepunctspacing Umathcloserelspacing Umathcode Umathcodenum \
+Umathconnectoroverlapmin Umathfractiondelsize Umathfractiondenomdown Umathfractiondenomvgap Umathfractionnumup \
+Umathfractionnumvgap Umathfractionrule Umathinnerbinspacing Umathinnerclosespacing Umathinnerinnerspacing \
+Umathinneropenspacing Umathinneropspacing Umathinnerordspacing Umathinnerpunctspacing Umathinnerrelspacing \
+Umathlimitabovebgap Umathlimitabovekern Umathlimitabovevgap Umathlimitbelowbgap Umathlimitbelowkern \
+Umathlimitbelowvgap Umathopbinspacing Umathopclosespacing Umathopenbinspacing Umathopenclosespacing \
+Umathopeninnerspacing Umathopenopenspacing Umathopenopspacing Umathopenordspacing Umathopenpunctspacing \
+Umathopenrelspacing Umathoperatorsize Umathopinnerspacing Umathopopenspacing Umathopopspacing \
+Umathopordspacing Umathoppunctspacing Umathoprelspacing Umathordbinspacing Umathordclosespacing \
+Umathordinnerspacing Umathordopenspacing Umathordopspacing Umathordordspacing Umathordpunctspacing \
+Umathordrelspacing Umathoverbarkern Umathoverbarrule Umathoverbarvgap Umathoverdelimiterbgap \
+Umathoverdelimitervgap Umathpunctbinspacing Umathpunctclosespacing Umathpunctinnerspacing Umathpunctopenspacing \
+Umathpunctopspacing Umathpunctordspacing Umathpunctpunctspacing Umathpunctrelspacing Umathquad \
+Umathradicaldegreeafter Umathradicaldegreebefore Umathradicaldegreeraise Umathradicalkern Umathradicalrule \
+Umathradicalvgap Umathrelbinspacing Umathrelclosespacing Umathrelinnerspacing Umathrelopenspacing \
+Umathrelopspacing Umathrelordspacing Umathrelpunctspacing Umathrelrelspacing Umathspaceafterscript \
+Umathstackdenomdown Umathstacknumup Umathstackvgap Umathsubshiftdown Umathsubshiftdrop \
+Umathsubsupshiftdown Umathsubsupvgap Umathsubtopmax Umathsupbottommin Umathsupshiftdrop \
+Umathsupshiftup Umathsupsubbottommax Umathunderbarkern Umathunderbarrule Umathunderbarvgap \
+Umathunderdelimiterbgap Umathunderdelimitervgap Uoverdelimiter Uradical Uroot \
+Ustack Ustartdisplaymath Ustartmath Ustopdisplaymath Ustopmath \
+Usubscript Usuperscript Uunderdelimiter alignmark aligntab \
+attribute attributedef catcodetable clearmarks crampeddisplaystyle \
+crampedscriptscriptstyle crampedscriptstyle crampedtextstyle fontid formatname \
+gleaders ifabsdim ifabsnum ifprimitive initcatcodetable \
+latelua luaescapestring luastartup luatexdatestamp luatexrevision \
+luatexversion mathstyle nokerns noligs outputbox \
+pageleftoffset pagetopoffset postexhyphenchar posthyphenchar preexhyphenchar \
+prehyphenchar primitive savecatcodetable scantextokens suppressfontnotfounderror \
+suppressifcsnameerror suppresslongerror suppressoutererror synctex
+
+keywordclass.tex.omega=\
+OmegaVersion bodydir chardp charht \
+charit charwd leftghost localbrokenpenalty localinterlinepenalty \
+localleftbox localrightbox mathdir odelcode odelimiter \
+omathaccent omathchar omathchardef omathcode oradical \
+pagedir pageheight pagewidth pardir rightghost \
+textdir
+
keywordclass.tex.pdftex=\
efcode expanded ifincsname ifpdfabsdim \
ifpdfabsnum ifpdfprimitive leftmarginkern letterspacefont lpcode \
@@ -38,201 +88,151 @@ pdftrailer pdfuniformdeviate pdfuniqueresname pdfvorigin pdfxform \
pdfxformattr pdfxformname pdfxformresources pdfximage pdfximagebbox \
quitvmode rightmarginkern rpcode tagcode
-keywordclass.tex.xetex=\
-XeTeXversion
-
keywordclass.tex.tex=\
- / AlephVersion Alephminorversion \
Alephrevision Alephversion OmegaVersion Omegaminorversion Omegarevision \
Omegaversion Udelcode Udelcodenum Udelimiter Udelimiterover \
-Udelimiterunder Umathaccent Umathaccents Umathaxis Umathbinbinspacing \
-Umathbinclosespacing Umathbininnerspacing Umathbinopenspacing Umathbinopspacing Umathbinordspacing \
-Umathbinpunctspacing Umathbinrelspacing Umathbotaccent Umathchar Umathchardef \
-Umathcharnum Umathclosebinspacing Umathcloseclosespacing Umathcloseinnerspacing Umathcloseopenspacing \
-Umathcloseopspacing Umathcloseordspacing Umathclosepunctspacing Umathcloserelspacing Umathcode \
-Umathcodenum Umathconnectoroverlapmin Umathfractiondelsize Umathfractiondenomdown Umathfractiondenomvgap \
-Umathfractionnumup Umathfractionnumvgap Umathfractionrule Umathinnerbinspacing Umathinnerclosespacing \
-Umathinnerinnerspacing Umathinneropenspacing Umathinneropspacing Umathinnerordspacing Umathinnerpunctspacing \
-Umathinnerrelspacing Umathlimitabovebgap Umathlimitabovekern Umathlimitabovevgap Umathlimitbelowbgap \
-Umathlimitbelowkern Umathlimitbelowvgap Umathopbinspacing Umathopclosespacing Umathopenbinspacing \
-Umathopenclosespacing Umathopeninnerspacing Umathopenopenspacing Umathopenopspacing Umathopenordspacing \
-Umathopenpunctspacing Umathopenrelspacing Umathoperatorsize Umathopinnerspacing Umathopopenspacing \
-Umathopopspacing Umathopordspacing Umathoppunctspacing Umathoprelspacing Umathordbinspacing \
-Umathordclosespacing Umathordinnerspacing Umathordopenspacing Umathordopspacing Umathordordspacing \
-Umathordpunctspacing Umathordrelspacing Umathoverbarkern Umathoverbarrule Umathoverbarvgap \
-Umathoverdelimiterbgap Umathoverdelimitervgap Umathpunctbinspacing Umathpunctclosespacing Umathpunctinnerspacing \
-Umathpunctopenspacing Umathpunctopspacing Umathpunctordspacing Umathpunctpunctspacing Umathpunctrelspacing \
-Umathquad Umathradicaldegreeafter Umathradicaldegreebefore Umathradicaldegreeraise Umathradicalkern \
-Umathradicalrule Umathradicalvgap Umathrelbinspacing Umathrelclosespacing Umathrelinnerspacing \
-Umathrelopenspacing Umathrelopspacing Umathrelordspacing Umathrelpunctspacing Umathrelrelspacing \
-Umathspaceafterscript Umathstackdenomdown Umathstacknumup Umathstackvgap Umathsubshiftdown \
-Umathsubshiftdrop Umathsubsupshiftdown Umathsubsupvgap Umathsubtopmax Umathsupbottommin \
-Umathsupshiftdrop Umathsupshiftup Umathsupsubbottommax Umathunderbarkern Umathunderbarrule \
-Umathunderbarvgap Umathunderdelimiterbgap Umathunderdelimitervgap Uoverdelimiter Uradical \
-Uroot Ustack Ustartdisplaymath Ustartmath Ustopdisplaymath \
-Ustopmath Usubscript Usuperscript Uunderdelimiter above \
-abovedisplayshortskip abovedisplayskip abovewithdelims accent adjdemerits \
-advance afterassignment aftergroup alignmark aligntab \
-atop atopwithdelims attribute attributedef badness \
-baselineskip batchmode begingroup belowdisplayshortskip belowdisplayskip \
-binoppenalty bodydir botmark botmarks box \
-boxdir boxmaxdepth brokenpenalty catcode catcodetable \
-char chardef chardp charht charit \
-charwd cleaders clearmarks closein closeout \
-clubpenalties clubpenalty copy count countdef \
-cr crampeddisplaystyle crampedscriptscriptstyle crampedscriptstyle crampedtextstyle \
-crcr csname currentgrouplevel currentgrouptype currentifbranch \
-currentiflevel currentiftype day deadcycles def \
-defaulthyphenchar defaultskewchar delcode delimiter delimiterfactor \
-delimitershortfall detokenize dimen dimendef dimexpr \
-directlua discretionary displayindent displaylimits displaystyle \
-displaywidowpenalties displaywidowpenalty displaywidth divide doublehyphendemerits \
-dp dump eTeXVersion eTeXminorversion eTeXrevision \
-eTeXversion edef efcode else emergencystretch \
-end endcsname endgroup endinput endlinechar \
-eqno errhelp errmessage errorcontextlines errorstopmode \
-escapechar everycr everydisplay everyeof everyhbox \
-everyjob everymath everypar everyvbox exhyphenchar \
-exhyphenpenalty expandafter expanded fam fi \
-finalhyphendemerits firstmark firstmarks floatingpenalty font \
-fontchardp fontcharht fontcharic fontcharwd fontdimen \
-fontid fontname formatname futurelet gdef \
-gleaders global globaldefs glueexpr glueshrink \
-glueshrinkorder gluestretch gluestretchorder gluetomu halign \
-hangafter hangindent hbadness hbox hfil \
-hfill hfilneg hfuzz hoffset holdinginserts \
-hrule hsize hskip hss ht \
-hyphenation hyphenchar hyphenpenalty if ifabsdim \
-ifabsnum ifcase ifcat ifcsname ifdefined \
-ifdim ifeof iffalse iffontchar ifhbox \
-ifhmode ifincsname ifinner ifmmode ifnum \
-ifodd ifpdfabsdim ifpdfabsnum ifpdfprimitive ifprimitive \
-iftrue ifvbox ifvmode ifvoid ifx \
-ignorespaces immediate indent initcatcodetable input \
-inputlineno insert insertpenalties interactionmode interlinepenalties \
-interlinepenalty jobname kern language lastbox \
-lastkern lastlinefit lastnodetype lastpenalty lastskip \
-latelua lccode leaders left leftghost \
-lefthyphenmin leftmarginkern leftskip leqno let \
-letterspacefont limits linepenalty lineskip lineskiplimit \
-localbrokenpenalty localinterlinepenalty localleftbox localrightbox long \
-looseness lower lowercase lpcode luaescapestring \
-luastartup luatexdatestamp luatexrevision luatexversion mag \
-mark marks mathaccent mathbin mathchar \
-mathchardef mathchoice mathclose mathcode mathdir \
-mathinner mathop mathopen mathord mathpunct \
-mathrel mathstyle mathsurround maxdeadcycles maxdepth \
-meaning medmuskip message middle mkern \
-month moveleft moveright mskip muexpr \
-multiply muskip muskipdef mutoglue newlinechar \
-noalign noboundary noexpand noindent nokerns \
-noligs nolimits nolocaldirs nolocalwhatsits nonscript \
-nonstopmode nulldelimiterspace nullfont number numexpr \
-odelcode odelimiter omathaccent omathchar omathchardef \
-omathcode omit openin openout or \
-oradical outer output outputbox outputpenalty \
-over overfullrule overline overwithdelims pagebottomoffset \
-pagedepth pagedir pagediscards pagefilllstretch pagefillstretch \
-pagefilstretch pagegoal pageheight pageleftoffset pagerightoffset \
-pageshrink pagestretch pagetopoffset pagetotal pagewidth \
-par pardir parfillskip parindent parshape \
-parshapedimen parshapeindent parshapelength parskip patterns \
-pausing pdfadjustspacing pdfannot pdfcatalog pdfcolorstack \
-pdfcolorstackinit pdfcompresslevel pdfcopyfont pdfcreationdate pdfdecimaldigits \
-pdfdest pdfdestmargin pdfdraftmode pdfeachlinedepth pdfeachlineheight \
-pdfendlink pdfendthread pdffirstlineheight pdffontattr pdffontexpand \
-pdffontname pdffontobjnum pdffontsize pdfgamma pdfgentounicode \
-pdfglyphtounicode pdfhorigin pdfignoreddimen pdfimageapplygamma pdfimagegamma \
-pdfimagehicolor pdfimageresolution pdfincludechars pdfinclusioncopyfonts pdfinclusionerrorlevel \
-pdfinfo pdfinsertht pdflastannot pdflastlinedepth pdflastlink \
-pdflastobj pdflastxform pdflastximage pdflastximagecolordepth pdflastximagepages \
-pdflastxpos pdflastypos pdflinkmargin pdfliteral pdfmapfile \
-pdfmapline pdfminorversion pdfnames pdfnoligatures pdfnormaldeviate \
-pdfobj pdfobjcompresslevel pdfoptionpdfminorversion pdfoutline pdfoutput \
-pdfpageattr pdfpagebox pdfpageheight pdfpageref pdfpageresources \
-pdfpagesattr pdfpagewidth pdfpkmode pdfpkresolution pdfprimitive \
-pdfprotrudechars pdfpxdimen pdfrandomseed pdfrefobj pdfrefxform \
-pdfrefximage pdfreplacefont pdfrestore pdfretval pdfsave \
-pdfsavepos pdfsetmatrix pdfsetrandomseed pdfstartlink pdfstartthread \
-pdftexbanner pdftexrevision pdftexversion pdfthread pdfthreadmargin \
-pdftracingfonts pdftrailer pdfuniformdeviate pdfuniqueresname pdfvorigin \
-pdfxform pdfxformattr pdfxformname pdfxformresources pdfximage \
-pdfximagebbox penalty postdisplaypenalty postexhyphenchar posthyphenchar \
-predisplaydirection predisplaypenalty predisplaysize preexhyphenchar prehyphenchar \
-pretolerance prevdepth prevgraf primitive protected \
-quitvmode radical raise read readline \
-relax relpenalty right rightghost righthyphenmin \
-rightmarginkern rightskip romannumeral rpcode savecatcodetable \
-savinghyphcodes savingvdiscards scantextokens scantokens scriptfont \
-scriptscriptfont scriptscriptstyle scriptspace scriptstyle scrollmode \
-setbox setlanguage sfcode shipout show \
-showbox showboxbreadth showboxdepth showgroups showifs \
-showlists showthe showtokens skewchar skip \
-skipdef spacefactor spaceskip span special \
-splitbotmark splitbotmarks splitdiscards splitfirstmark splitfirstmarks \
-splitmaxdepth splittopskip string suppressfontnotfounderror suppressifcsnameerror \
-suppresslongerror suppressoutererror synctex tabskip tagcode \
-textdir textfont textstyle the thickmuskip \
-thinmuskip time toks toksdef tolerance \
-topmark topmarks topskip tracingassigns tracingcommands \
-tracinggroups tracingifs tracinglostchars tracingmacros tracingnesting \
-tracingonline tracingoutput tracingpages tracingparagraphs tracingrestores \
-tracingscantokens tracingstats uccode uchyph underline \
-unexpanded unhbox unhcopy unkern unless \
-unpenalty unskip unvbox unvcopy uppercase \
-vadjust valign vbadness vbox vcenter \
-vfil vfill vfilneg vfuzz voffset \
-vrule vsize vskip vsplit vss \
-vtop wd widowpenalties widowpenalty write \
-xdef xleaders xspaceskip year
-
-keywordclass.tex.aleph=\
-AlephVersion Alephminorversion Alephrevision Alephversion \
-Omegaminorversion Omegarevision Omegaversion boxdir pagebottomoffset \
-pagerightoffset
-
-keywordclass.tex.luatex=\
-Udelcode Udelcodenum Udelimiter Udelimiterover \
-Udelimiterunder Umathaccent Umathaccents Umathaxis Umathbinbinspacing \
-Umathbinclosespacing Umathbininnerspacing Umathbinopenspacing Umathbinopspacing Umathbinordspacing \
-Umathbinpunctspacing Umathbinrelspacing Umathbotaccent Umathchar Umathchardef \
-Umathcharnum Umathclosebinspacing Umathcloseclosespacing Umathcloseinnerspacing Umathcloseopenspacing \
-Umathcloseopspacing Umathcloseordspacing Umathclosepunctspacing Umathcloserelspacing Umathcode \
-Umathcodenum Umathconnectoroverlapmin Umathfractiondelsize Umathfractiondenomdown Umathfractiondenomvgap \
-Umathfractionnumup Umathfractionnumvgap Umathfractionrule Umathinnerbinspacing Umathinnerclosespacing \
-Umathinnerinnerspacing Umathinneropenspacing Umathinneropspacing Umathinnerordspacing Umathinnerpunctspacing \
-Umathinnerrelspacing Umathlimitabovebgap Umathlimitabovekern Umathlimitabovevgap Umathlimitbelowbgap \
-Umathlimitbelowkern Umathlimitbelowvgap Umathopbinspacing Umathopclosespacing Umathopenbinspacing \
-Umathopenclosespacing Umathopeninnerspacing Umathopenopenspacing Umathopenopspacing Umathopenordspacing \
-Umathopenpunctspacing Umathopenrelspacing Umathoperatorsize Umathopinnerspacing Umathopopenspacing \
-Umathopopspacing Umathopordspacing Umathoppunctspacing Umathoprelspacing Umathordbinspacing \
-Umathordclosespacing Umathordinnerspacing Umathordopenspacing Umathordopspacing Umathordordspacing \
-Umathordpunctspacing Umathordrelspacing Umathoverbarkern Umathoverbarrule Umathoverbarvgap \
-Umathoverdelimiterbgap Umathoverdelimitervgap Umathpunctbinspacing Umathpunctclosespacing Umathpunctinnerspacing \
-Umathpunctopenspacing Umathpunctopspacing Umathpunctordspacing Umathpunctpunctspacing Umathpunctrelspacing \
-Umathquad Umathradicaldegreeafter Umathradicaldegreebefore Umathradicaldegreeraise Umathradicalkern \
-Umathradicalrule Umathradicalvgap Umathrelbinspacing Umathrelclosespacing Umathrelinnerspacing \
-Umathrelopenspacing Umathrelopspacing Umathrelordspacing Umathrelpunctspacing Umathrelrelspacing \
-Umathspaceafterscript Umathstackdenomdown Umathstacknumup Umathstackvgap Umathsubshiftdown \
-Umathsubshiftdrop Umathsubsupshiftdown Umathsubsupvgap Umathsubtopmax Umathsupbottommin \
-Umathsupshiftdrop Umathsupshiftup Umathsupsubbottommax Umathunderbarkern Umathunderbarrule \
-Umathunderbarvgap Umathunderdelimiterbgap Umathunderdelimitervgap Uoverdelimiter Uradical \
-Uroot Ustack Ustartdisplaymath Ustartmath Ustopdisplaymath \
-Ustopmath Usubscript Usuperscript Uunderdelimiter alignmark \
-aligntab attribute attributedef catcodetable clearmarks \
-crampeddisplaystyle crampedscriptscriptstyle crampedscriptstyle crampedtextstyle fontid \
-formatname gleaders ifabsdim ifabsnum ifprimitive \
-initcatcodetable latelua luaescapestring luastartup luatexdatestamp \
-luatexrevision luatexversion mathstyle nokerns noligs \
-outputbox pageleftoffset pagetopoffset postexhyphenchar posthyphenchar \
-preexhyphenchar prehyphenchar primitive savecatcodetable scantextokens \
-suppressfontnotfounderror suppressifcsnameerror suppresslongerror suppressoutererror synctex
+Udelimiterunder Umathaccent Umathaxis Umathbinbinspacing Umathbinclosespacing \
+Umathbininnerspacing Umathbinopenspacing Umathbinopspacing Umathbinordspacing Umathbinpunctspacing \
+Umathbinrelspacing Umathchar Umathchardef Umathcharnum Umathclosebinspacing \
+Umathcloseclosespacing Umathcloseinnerspacing Umathcloseopenspacing Umathcloseopspacing Umathcloseordspacing \
+Umathclosepunctspacing Umathcloserelspacing Umathcode Umathcodenum Umathconnectoroverlapmin \
+Umathfractiondelsize Umathfractiondenomdown Umathfractiondenomvgap Umathfractionnumup Umathfractionnumvgap \
+Umathfractionrule Umathinnerbinspacing Umathinnerclosespacing Umathinnerinnerspacing Umathinneropenspacing \
+Umathinneropspacing Umathinnerordspacing Umathinnerpunctspacing Umathinnerrelspacing Umathlimitabovebgap \
+Umathlimitabovekern Umathlimitabovevgap Umathlimitbelowbgap Umathlimitbelowkern Umathlimitbelowvgap \
+Umathopbinspacing Umathopclosespacing Umathopenbinspacing Umathopenclosespacing Umathopeninnerspacing \
+Umathopenopenspacing Umathopenopspacing Umathopenordspacing Umathopenpunctspacing Umathopenrelspacing \
+Umathoperatorsize Umathopinnerspacing Umathopopenspacing Umathopopspacing Umathopordspacing \
+Umathoppunctspacing Umathoprelspacing Umathordbinspacing Umathordclosespacing Umathordinnerspacing \
+Umathordopenspacing Umathordopspacing Umathordordspacing Umathordpunctspacing Umathordrelspacing \
+Umathoverbarkern Umathoverbarrule Umathoverbarvgap Umathoverdelimiterbgap Umathoverdelimitervgap \
+Umathpunctbinspacing Umathpunctclosespacing Umathpunctinnerspacing Umathpunctopenspacing Umathpunctopspacing \
+Umathpunctordspacing Umathpunctpunctspacing Umathpunctrelspacing Umathquad Umathradicaldegreeafter \
+Umathradicaldegreebefore Umathradicaldegreeraise Umathradicalkern Umathradicalrule Umathradicalvgap \
+Umathrelbinspacing Umathrelclosespacing Umathrelinnerspacing Umathrelopenspacing Umathrelopspacing \
+Umathrelordspacing Umathrelpunctspacing Umathrelrelspacing Umathspaceafterscript Umathstackdenomdown \
+Umathstacknumup Umathstackvgap Umathsubshiftdown Umathsubshiftdrop Umathsubsupshiftdown \
+Umathsubsupvgap Umathsubtopmax Umathsupbottommin Umathsupshiftdrop Umathsupshiftup \
+Umathsupsubbottommax Umathunderbarkern Umathunderbarrule Umathunderbarvgap Umathunderdelimiterbgap \
+Umathunderdelimitervgap Uoverdelimiter Uradical Uroot Ustack \
+Ustartdisplaymath Ustartmath Ustopdisplaymath Ustopmath Usubscript \
+Usuperscript Uunderdelimiter above abovedisplayshortskip abovedisplayskip \
+abovewithdelims accent adjdemerits advance afterassignment \
+aftergroup alignmark aligntab atop atopwithdelims \
+attribute attributedef badness baselineskip batchmode \
+begingroup belowdisplayshortskip belowdisplayskip binoppenalty bodydir \
+botmark botmarks box boxdir boxmaxdepth \
+brokenpenalty catcode catcodetable char chardef \
+chardp charht charit charwd cleaders \
+clearmarks closein closeout clubpenalties clubpenalty \
+copy count countdef cr crampeddisplaystyle \
+crampedscriptscriptstyle crampedscriptstyle crampedtextstyle crcr csname \
+currentgrouplevel currentgrouptype currentifbranch currentiflevel currentiftype \
+day deadcycles def defaulthyphenchar defaultskewchar \
+delcode delimiter delimiterfactor delimitershortfall detokenize \
+dimen dimendef dimexpr directlua discretionary \
+displayindent displaylimits displaystyle displaywidowpenalties displaywidowpenalty \
+displaywidth divide doublehyphendemerits dp dump \
+eTeXVersion eTeXminorversion eTeXrevision eTeXversion edef \
+efcode else emergencystretch end endcsname \
+endgroup endinput endlinechar eqno errhelp \
+errmessage errorcontextlines errorstopmode escapechar everycr \
+everydisplay everyeof everyhbox everyjob everymath \
+everypar everyvbox exhyphenchar exhyphenpenalty expandafter \
+expanded fam fi finalhyphendemerits firstmark \
+firstmarks floatingpenalty font fontchardp fontcharht \
+fontcharic fontcharwd fontdimen fontid fontname \
+formatname futurelet gdef gleaders global \
+globaldefs glueexpr glueshrink glueshrinkorder gluestretch \
+gluestretchorder gluetomu halign hangafter hangindent \
+hbadness hbox hfil hfill hfilneg \
+hfuzz hoffset holdinginserts hrule hsize \
+hskip hss ht hyphenation hyphenchar \
+hyphenpenalty if ifabsdim ifabsnum ifcase \
+ifcat ifcsname ifdefined ifdim ifeof \
+iffalse iffontchar ifhbox ifhmode ifincsname \
+ifinner ifmmode ifnum ifodd ifpdfabsdim \
+ifpdfabsnum ifpdfprimitive ifprimitive iftrue ifvbox \
+ifvmode ifvoid ifx ignorespaces immediate \
+indent initcatcodetable input inputlineno insert \
+insertpenalties interactionmode interlinepenalties interlinepenalty jobname \
+kern language lastbox lastkern lastlinefit \
+lastnodetype lastpenalty lastskip latelua lccode \
+leaders left leftghost lefthyphenmin leftmarginkern \
+leftskip leqno let letterspacefont limits \
+linepenalty lineskip lineskiplimit localbrokenpenalty localinterlinepenalty \
+localleftbox localrightbox long looseness lower \
+lowercase lpcode luaescapestring luastartup luatexdatestamp \
+luatexrevision luatexversion mag mark marks \
+mathaccent mathbin mathchar mathchardef mathchoice \
+mathclose mathcode mathdir mathinner mathop \
+mathopen mathord mathpunct mathrel mathstyle \
+mathsurround maxdeadcycles maxdepth meaning medmuskip \
+message middle mkern month moveleft \
+moveright mskip muexpr multiply muskip \
+muskipdef mutoglue newlinechar noalign noboundary \
+noexpand noindent nokerns noligs nolimits \
+nolocaldirs nolocalwhatsits nonscript nonstopmode nulldelimiterspace \
+nullfont number numexpr odelcode odelimiter \
+omathaccent omathchar omathchardef omathcode omit \
+openin openout or oradical outer \
+output outputbox outputpenalty over overfullrule \
+overline overwithdelims pagebottomoffset pagedepth pagedir \
+pagediscards pagefilllstretch pagefillstretch pagefilstretch pagegoal \
+pageheight pageleftoffset pagerightoffset pageshrink pagestretch \
+pagetopoffset pagetotal pagewidth par pardir \
+parfillskip parindent parshape parshapedimen parshapeindent \
+parshapelength parskip patterns pausing pdfadjustspacing \
+pdfannot pdfcatalog pdfcolorstack pdfcolorstackinit pdfcompresslevel \
+pdfcopyfont pdfcreationdate pdfdecimaldigits pdfdest pdfdestmargin \
+pdfdraftmode pdfeachlinedepth pdfeachlineheight pdfendlink pdfendthread \
+pdffirstlineheight pdffontattr pdffontexpand pdffontname pdffontobjnum \
+pdffontsize pdfgamma pdfgentounicode pdfglyphtounicode pdfhorigin \
+pdfignoreddimen pdfimageapplygamma pdfimagegamma pdfimagehicolor pdfimageresolution \
+pdfincludechars pdfinclusioncopyfonts pdfinclusionerrorlevel pdfinfo pdfinsertht \
+pdflastannot pdflastlinedepth pdflastlink pdflastobj pdflastxform \
+pdflastximage pdflastximagecolordepth pdflastximagepages pdflastxpos pdflastypos \
+pdflinkmargin pdfliteral pdfmapfile pdfmapline pdfminorversion \
+pdfnames pdfnoligatures pdfnormaldeviate pdfobj pdfobjcompresslevel \
+pdfoptionpdfminorversion pdfoutline pdfoutput pdfpageattr pdfpagebox \
+pdfpageheight pdfpageref pdfpageresources pdfpagesattr pdfpagewidth \
+pdfpkmode pdfpkresolution pdfprimitive pdfprotrudechars pdfpxdimen \
+pdfrandomseed pdfrefobj pdfrefxform pdfrefximage pdfreplacefont \
+pdfrestore pdfretval pdfsave pdfsavepos pdfsetmatrix \
+pdfsetrandomseed pdfstartlink pdfstartthread pdftexbanner pdftexrevision \
+pdftexversion pdfthread pdfthreadmargin pdftracingfonts pdftrailer \
+pdfuniformdeviate pdfuniqueresname pdfvorigin pdfxform pdfxformattr \
+pdfxformname pdfxformresources pdfximage pdfximagebbox penalty \
+postdisplaypenalty postexhyphenchar posthyphenchar predisplaydirection predisplaypenalty \
+predisplaysize preexhyphenchar prehyphenchar pretolerance prevdepth \
+prevgraf primitive protected quitvmode radical \
+raise read readline relax relpenalty \
+right rightghost righthyphenmin rightmarginkern rightskip \
+romannumeral rpcode savecatcodetable savinghyphcodes savingvdiscards \
+scantextokens scantokens scriptfont scriptscriptfont scriptscriptstyle \
+scriptspace scriptstyle scrollmode setbox setlanguage \
+sfcode shipout show showbox showboxbreadth \
+showboxdepth showgroups showifs showlists showthe \
+showtokens skewchar skip skipdef spacefactor \
+spaceskip span special splitbotmark splitbotmarks \
+splitdiscards splitfirstmark splitfirstmarks splitmaxdepth splittopskip \
+string suppressfontnotfounderror suppressifcsnameerror suppresslongerror suppressoutererror \
+synctex tabskip tagcode textdir textfont \
+textstyle the thickmuskip thinmuskip time \
+toks toksdef tolerance topmark topmarks \
+topskip tracingassigns tracingcommands tracinggroups tracingifs \
+tracinglostchars tracingmacros tracingnesting tracingonline tracingoutput \
+tracingpages tracingparagraphs tracingrestores tracingscantokens tracingstats \
+uccode uchyph underline unexpanded unhbox \
+unhcopy unkern unless unpenalty unskip \
+unvbox unvcopy uppercase vadjust valign \
+vbadness vbox vcenter vfil vfill \
+vfilneg vfuzz voffset vrule vsize \
+vskip vsplit vss vtop wd \
+widowpenalties widowpenalty write xdef xleaders \
+xspaceskip year
-keywordclass.tex.omega=\
-OmegaVersion bodydir chardp charht \
-charit charwd leftghost localbrokenpenalty localinterlinepenalty \
-localleftbox localrightbox mathdir odelcode odelimiter \
-omathaccent omathchar omathchardef omathcode oradical \
-pagedir pageheight pagewidth pardir rightghost \
-textdir
+keywordclass.tex.xetex=\
+XeTeXversion
diff --git a/Master/texmf-dist/context/data/scite/scite-context-external.properties b/Master/texmf-dist/context/data/scite/scite-context-external.properties
index 6bba2b7ff91..5c71493412a 100644
--- a/Master/texmf-dist/context/data/scite/scite-context-external.properties
+++ b/Master/texmf-dist/context/data/scite/scite-context-external.properties
@@ -30,23 +30,28 @@ if PLAT_GTK
lexer.*.lpeg=lpeg
+file.patterns.cweb=*.h;*.c;*.w;*.hh;*.cc;*.ww;*.hpp;*.cpp;*.hxx;*.cxx;
+
lexer.$(file.patterns.metapost)=lpeg_scite-context-lexer-mps
lexer.$(file.patterns.metafun)=lpeg_scite-context-lexer-mps
lexer.$(file.patterns.context)=lpeg_scite-context-lexer-tex
-lexer.$(file.patterns.tex)=lpeg_scite-context-lexer-tex
lexer.$(file.patterns.lua)=lpeg_scite-context-lexer-lua
lexer.$(file.patterns.example)=lpeg_scite-context-lexer-xml
lexer.$(file.patterns.text)=lpeg_scite-context-lexer-txt
lexer.$(file.patterns.pdf)=lpeg_scite-context-lexer-pdf
+lexer.$(file.patterns.cweb)=lpeg_scite-context-lexer-web
+
+lexer.$(file.patterns.tex)=lpeg_scite-context-lexer-tex
+lexer.$(file.patterns.xml)=lpeg_scite-context-lexer-xml
+lexer.$(file.patterns.html)=lpeg_scite-context-lexer-xml
+lexer.$(file.patterns.cpp)=lpeg_scite-context-lexer-web
+
+# It's a real pitty that we cannot overload the errorlist lexer. That would
+# make scite even more interesting. Add to that including lpeg and the lpeg
+# lexer and thereby providing an interface to properties.
-#~ lexer.$(file.patterns.metapost)=lpeg_ctx-mps
-#~ lexer.$(file.patterns.metafun)=lpeg_ctx-mps
-#~ lexer.$(file.patterns.context)=lpeg_ctx-tex
-#~ lexer.$(file.patterns.tex)=lpeg_ctx-tex
-#~ lexer.$(file.patterns.lua)=lpeg_ctx-lua
-#~ lexer.$(file.patterns.example)=lpeg_ctx-xml
-#~ lexer.$(file.patterns.text)=lpeg_ctx-txt
-#~ lexer.$(file.patterns.pdf)=lpeg_ctx-pdf
+# lexer.errorlist=lpeg_scite-context-lexer-txt
+# lexer.output=lpeg_scite-context-lexer-txt
comment.block.lpeg_scite-context-lexer-tex=%
comment.block.at.line.start.lpeg_scite-context-lexer-tex=1
diff --git a/Master/texmf-dist/context/data/scite/scite-context-internal.properties b/Master/texmf-dist/context/data/scite/scite-context-internal.properties
index 92806b8e22e..130e64f1e97 100644
--- a/Master/texmf-dist/context/data/scite/scite-context-internal.properties
+++ b/Master/texmf-dist/context/data/scite/scite-context-internal.properties
@@ -80,3 +80,7 @@ lexer.context.auto.if=1
lexer.$(file.patterns.context)=tex
lexer.$(file.patterns.tex)=tex
+
+lexer.$(file.patterns.example)=xml
+lexer.$(file.patterns.lua)=lua
+lexer.$(file.patterns.metafun)=metapost
diff --git a/Master/texmf-dist/context/data/scite/scite-context-readme.pdf b/Master/texmf-dist/context/data/scite/scite-context-readme.pdf
index 32c9628b60d..5e68aecc783 100644
--- a/Master/texmf-dist/context/data/scite/scite-context-readme.pdf
+++ b/Master/texmf-dist/context/data/scite/scite-context-readme.pdf
Binary files differ
diff --git a/Master/texmf-dist/context/data/scite/scite-context-readme.tex b/Master/texmf-dist/context/data/scite/scite-context-readme.tex
index 6221d7cde94..0789401b83c 100644
--- a/Master/texmf-dist/context/data/scite/scite-context-readme.tex
+++ b/Master/texmf-dist/context/data/scite/scite-context-readme.tex
@@ -200,17 +200,19 @@ can move it around. There are no special dependencies on the
operating system.
Next you need to install the lpeg lexers. \footnote {Versions
-later than 2.11 will not run on Windows 2K. In that case you need
+later than 2.11 will not run on \MSWINDOWS\ 2K. In that case you need
to comment the external lexer import.} These can be fetched from:
\starttyping
-code.google.com/p/scintilla
+http://foicica.com/scintillua/
\stoptyping
-On windows you need to copy the \type {lexers} subfolder to the
-\type {wscite} folder. For Linux the place depends on the
-distribution and I just copy them in the same path as where the
-regular properties files live.
+On \MSWINDOWS\ you need to copy the \type {lexers} subfolder to the \type
+{wscite} folder. For \LINUX\ the place depends on the distribution and I just
+copy them in the same path as where the regular properties files live. \footnote
+{If you update, don't do so without testing first. Sometimes there are changes in
+\SCITE\ that influence the lexers in which case you have to wait till we have
+update them to suit those changes.}
For \UNIX, one can take a precompiled version as well. Here we
need to split the set of files into:
@@ -231,7 +233,7 @@ If you want to use \CONTEXT, you need to copy the relevant files from
to the path were \SCITE\ keeps its property files (\type (*.properties).
There is a file called \type {SciteGlobal.properties}. At the end
-of that file (on windows it is in the path where the Scite binary)
+of that file (on \MSWINDOWS\ it is in the path where the Scite binary)
you then add a line to the end:
\starttyping
@@ -312,7 +314,7 @@ property files instead.
These files go to the \type {lexers} subpath in your \SCITE\
installation. Normally this sits in the binary path. The
-following files provide some extensions. On windows you can copy
+following files provide some extensions. On \MSWINDOWS\ you can copy
these files to the path where the \SCITE\ binary lives.
\starttyping
@@ -344,7 +346,7 @@ scite-ctx-context.properties
scite-ctx-example.properties
\stoptyping
-On Windows these go to:
+On \MSWINDOWS\ these go to:
\starttyping
c:/Users/YourName
@@ -444,7 +446,7 @@ But probably better is to use the next directive just below the
usual \XML\ marker line:
\starttyping
-<?context-xml-directive editor language uk ?>
+<?context-directive editor language uk ?>
\stoptyping
\subject{Interface selection}
diff --git a/Master/texmf-dist/context/data/scite/scite-context.properties b/Master/texmf-dist/context/data/scite/scite-context.properties
index 1a4f0f5e822..571396b58a6 100644
--- a/Master/texmf-dist/context/data/scite/scite-context.properties
+++ b/Master/texmf-dist/context/data/scite/scite-context.properties
@@ -56,45 +56,41 @@ textwrapper.length=68
file.patterns.tex=
file.patterns.latex=
-file.patterns.context=*.tex;*.mkii;*.mkiv;*.mkvi;
+file.patterns.context=*.tex;*.mkii;*.mkiv;*.mkvi;*.mkix;*.mkxi;
open.suffix.$(file.patterns.context)=.tex
# Example : patterns
file.patterns.xml=
-file.patterns.example=*.xml;*.xsl;*.xsd;*.fo;*.exa;*.rlb;*.rlg;*.rlv;*.rng;*.xfdf;*.xslt;*.dtd;*.lmx;*.ctx;*.export;
-
+file.patterns.example=*.xml;*.xsl;*.xsd;*.fo;*.exa;*.rlb;*.rlg;*.rlv;*.rng;*.xfdf;*.xslt;*.dtd;*.lmx;*.htm;*.html;*.xhtml*.ctx;*.export;
open.suffix.$(file.patterns.example)=.xml
-
filter.example=eXaMpLe|$(file.patterns.example)|
-
-lexer.$(file.patterns.example)=xml
+#~ lexer.$(file.patterns.example)=xml
# Lua : patterns
-file.patterns.lua=*.lua;*.luc;*.cld;*.tuc;*.luj;*.lum;*.tma;*.lfg
-
+file.patterns.lua=*.lua;*.luc;*.cld;*.tuc;*.luj;*.lum;*.tma;*.lfg;*.luv;*.lui
open.suffix.$(file.patterns.lua)=.lua
-
filter.lua=Lua MkIV|$(file.patterns.lua)|
-lexer.$(file.patterns.lua)=lua
+#~ lexer.$(file.patterns.lua)=lua
command.compile.$(file.patterns.lua)=mtxrun --script "$(FileNameExt)"
command.go.$(file.patterns.lua)=mtxrun --script "$(FileNameExt)"
+# W: patterns
+#
+# file.patterns.w=*.c;*.w;*.cc;*.cpp;*.cxx;*.h;*.hh;*.hpp;*.hxx;
+
# Test: patterns
file.patterns.text=*.txt
-
filter.text=Text |$(file.patterns.text)|
+# lexer.$(file.patterns.txt)=txt
file.patterns.pdf=*.pdf
-
filter.pdf=PDF |$(file.patterns.pdf)|
-# lexer.$(file.patterns.txt)=txt
-
# Commands: some scripts
if PLAT_WIN
@@ -140,6 +136,8 @@ if PLAT_GTK
# Commands: help info, e:\websites\www.pragma-ade.com\showcase.pdf / todo: manuals
command.help.$(file.patterns.context)=$(name.context.texshow) $(CurrentWord)
+command.help.$(file.patterns.context)=mtxrun --gethelp --url="http://localhost:31415/mtx-server-ctx-help.lua?command=%command%" --command="$(CurrentWord)"
+command.help.$(file.patterns.context)=mtxrun --gethelp --url="http://www.contextgarden.net/Command/%command%" --command="$(CurrentWord)"
command.help.$(file.patterns.example)=
command.help.$(file.patterns.metafun)=
@@ -169,9 +167,9 @@ command.compile.$(file.patterns.example)=$(name.example.xmlcheck) $(FileNameExt)
command.compile.*.fo=$(name.example.xmlcheck) $(FileNameExt)
command.build.$(file.patterns.context)=$(name.context.run) $(FileNameExt)
-command.build.$(file.patterns.metafun)=$(name.metafun.mptopdf) $(FileNameExt)
-command.build.$(file.patterns.example)=$(name.context.run) --xml $(FileNameExt)
-command.build.*.fo=$(name.context.run) $(name.flag.pdfopen) --xml --use=foxet $(FileNameExt)
+command.build.$(file.patterns.metafun)=$(name.context.mtxrun) --script context $(name.flag.pdfopen) $(FileNameExt)
+command.build.$(file.patterns.example)=$(name.context.run) --forcexml $(FileNameExt)
+command.build.*.fo=$(name.context.run) $(name.flag.pdfopen) --forcexml --use=foxet $(FileNameExt)
command.build.subsystem.$(file.patterns.context)=1
command.build.subsystem.$(file.patterns.metafun)=1
@@ -551,16 +549,12 @@ style.errorlist.16=fore:$(colors.red)
# Metapost: patterns
-file.patterns.metafun=
-
file.patterns.metafun=*.mp;*.mpx;*.mpy;*.mpii;*.mpiv
-
filter.metafun=MetaFun|$(file.patterns.metafun)|
-
-lexer.$(file.patterns.metafun)=metapost
+#~ lexer.$(file.patterns.metafun)=metapost
command.compile.$(file.patterns.metafun)=
-command.build.$(file.patterns.metafun)=context $(FileNameExt)
+command.build.$(file.patterns.metafun)=$(name.context.mtxrun) --script context $(name.flag.pdfopen) $(FileNameExt)
command.go.$(file.patterns.metafun)=gv $(FileName).1
command.0.$(file.patterns.metafun)=
@@ -664,3 +658,5 @@ fold.margin.colour=#CCCCCC
# testing
#~ cache.layout=
+
+find.command=mtxrun --script grep "$(find.what)" "$(find.files)"
diff --git a/Master/texmf-dist/context/data/scite/scite-ctx.lua b/Master/texmf-dist/context/data/scite/scite-ctx.lua
index 3557994f383..546cac41197 100644
--- a/Master/texmf-dist/context/data/scite/scite-ctx.lua
+++ b/Master/texmf-dist/context/data/scite/scite-ctx.lua
@@ -70,7 +70,7 @@
props = props or { } -- setmetatable(props,{ __index = function(k,v) props[k] = "unknown" return "unknown" end } )
-local byte, lower, upper, gsub, sub, find, rep, match, gmatch = string.byte, string.lower, string.upper, string.gsub, string.sub, string.find, string.rep, string.match, string.gmatch
+local byte, lower, upper, gsub, sub, find, rep, match, gmatch, format = string.byte, string.lower, string.upper, string.gsub, string.sub, string.find, string.rep, string.match, string.gmatch, string.format
local sort, concat = table.sort, table.concat
local crlf = "\n"
@@ -80,7 +80,7 @@ function traceln(str)
io.flush()
end
-function string.grab(str,delimiter)
+local function grab(str,delimiter)
local list = { }
for snippet in gmatch(str,delimiter) do
list[#list+1] = snippet
@@ -88,15 +88,15 @@ function string.grab(str,delimiter)
return list
end
-function string.expand(str)
+local function expand(str)
return (gsub(str,"ENV%((%w+)%)", os.envvar))
end
-function string.strip(str)
+local function strip(str)
return (gsub(str,"^%s*(.-)%s*$", "%1"))
end
-function table.alphasort(list,i)
+local function alphasort(list,i)
if i and i > 0 then
local function alphacmp(a,b)
return lower(gsub(sub(a,i),'0',' ')) < lower(gsub(sub(b,i),'0',' '))
@@ -181,13 +181,16 @@ function extend_to_end() -- editor:LineEndExtend() does not work
while line == editor:LineFromPosition(selectionend+1) do
selectionend = selectionend + 1
editor:SetSel(selectionstart,selectionend)
+ if selectionend ~= editor.SelectionEnd then
+ break -- no progress
+ end
end
editor:SetSel(selectionstart,selectionend)
return selectionend
end
function getfiletype()
- local firstline = editor:GetLine(0)
+ local firstline = editor:GetLine(0) or ""
if editor.Lexer == SCLEX_TEX then
return 'tex'
elseif editor.Lexer == SCLEX_XML then
@@ -238,12 +241,12 @@ do
else
print("- 'ctxspellpath is not set")
end
- print("- ctx.spellcheck.wordpath expands to " .. string.expand(props['ctx.spellcheck.wordpath']))
+ print("- ctx.spellcheck.wordpath expands to " .. expand(props['ctx.spellcheck.wordpath']))
end
print("\n- ctx.wraptext.length is set to " .. props['ctx.wraptext.length'])
if props['ctx.helpinfo'] ~= '' then
print("\n- key bindings:\n")
- print((gsub(string.strip(props['ctx.helpinfo']),"%s*\|%s*","\n")))
+ print((gsub(strip(props['ctx.helpinfo']),"%s*|%s*","\n")))
end
print("\n- recognized first lines:\n")
print("xml <?xml version='1.0' language='nl'")
@@ -371,13 +374,13 @@ function sort_text()
local startcolumn = props['SelectionStartColumn'] - 1
local endcolumn = props['SelectionEndColumn'] - 1
+
startposition = extend_to_start()
endposition = extend_to_end()
local selection = gsub(editor:GetSelText(), "%s*$", '')
-
- list = string.grab(selection,"[^\n\r]+")
- table.alphasort(list, startcolumn)
+ local list = grab(selection,"[^\n\r]+")
+ alphasort(list, startcolumn)
local replacement = concat(list, "\n")
editor:GotoPos(startposition)
@@ -462,9 +465,9 @@ function quote_text()
end
local replacement = editor:GetSelText()
- replacement = gsub(replacement,"\`\`(.-)\'\'", leftquotation .. "%1" .. rightquotation)
+ replacement = gsub(replacement,"``(.-)\'\'", leftquotation .. "%1" .. rightquotation)
replacement = gsub(replacement,"\"(.-)\"", leftquotation .. "%1" .. rightquotation)
- replacement = gsub(replacement,"\`(.-)\'", leftquote .. "%1" .. rightquote )
+ replacement = gsub(replacement,"`(.-)`", leftquote .. "%1" .. rightquote )
replacement = gsub(replacement,"\'(.-)\'", leftquote .. "%1" .. rightquote )
editor:ReplaceSel(replacement)
@@ -475,9 +478,9 @@ function compound_text()
local filetype = getfiletype()
if filetype == 'xml' then
- editor:ReplaceSel(gsub(editor:GetSelText(),"(>[^<%-][^<%-]+)([-\/])(%w%w+)","%1<compound token='%2'/>%3"))
+ editor:ReplaceSel(gsub(editor:GetSelText(),"(>[^<%-][^<%-]+)([-/])(%w%w+)","%1<compound token='%2'/>%3"))
else
- editor:ReplaceSel(gsub(editor:GetSelText(),"([^\|])([-\/]+)([^\|])","%1|%2|%3"))
+ editor:ReplaceSel(gsub(editor:GetSelText(),"([^|])([-/]+)([^|])","%1|%2|%3"))
end
end
@@ -570,12 +573,12 @@ function check_text() -- obsolete, replaced by lexer
wordfile, worddone, wordlist = fname, 0, {}
for filename in gmatch(wordfile,"[^%,]+") do
if wordpath ~= '' then
- filename = string.expand(wordpath) .. '/' .. filename
+ filename = expand(wordpath) .. '/' .. filename
end
if io.exists(filename) then
traceln("loading " .. filename)
for line in io.lines(filename) do
- if not find(line,"^[\%\#\-]") then
+ if not find(line,"^[%#-]") then
str = gsub(line,"%s*$", '')
rawset(wordlist,str,true)
worddone = worddone + 1
@@ -644,6 +647,34 @@ function reset_text()
editor:SetStyling(editor.TextLength,INDIC_PLAIN)
end
+function add_text()
+
+ local startposition = editor.SelectionStart
+ local endposition = editor.SelectionEnd
+
+ if startposition == endposition then return end
+
+ local selection = gsub(editor:GetSelText(), "%s*$", '')
+
+ local n, sum = 0, 0
+ for s in gmatch(selection,"[%d%.%,]+") do
+ s = gsub(s,",",".")
+ local m = tonumber(s)
+ if m then
+ n = n + 1
+ sum = sum + m
+ traceln(format("%4i : %s",n,m))
+ end
+ end
+ if n > 0 then
+ traceln("")
+ traceln(format("sum : %s",sum))
+ else
+ traceln("no numbers selected")
+ end
+
+end
+
-- menu
local menuactions = {}
@@ -651,7 +682,7 @@ local menufunctions = {}
function UserListShow(menutrigger, menulist)
local menuentries = {}
- local list = string.grab(menulist,"[^%|]+")
+ local list = grab(menulist,"[^%|]+")
menuactions = {}
for i=1, #list do
if list[i] ~= '' then
diff --git a/Master/texmf-dist/context/data/scite/scite-ctx.properties b/Master/texmf-dist/context/data/scite/scite-ctx.properties
index 8b2651c0d01..e7223d762b2 100644
--- a/Master/texmf-dist/context/data/scite/scite-ctx.properties
+++ b/Master/texmf-dist/context/data/scite/scite-ctx.properties
@@ -25,6 +25,7 @@ ctx.menulist.default=\
wrap=wrap_text|\
unwrap=unwrap_text|\
sort=sort_text|\
+ add=add_text|\
check=check_text|\
reset=reset_text
@@ -35,6 +36,7 @@ ctx.menulist.context=\
document=document_text|\
quote=quote_text|\
compound=compound_text|\
+ add=add_text|\
check=check_text|\
reset=reset_text
@@ -46,6 +48,7 @@ ctx.menulist.example=\
document=document_text|\
quote=quote_text|\
compound=compound_text|\
+ add=add_text|\
check=check_text|\
reset=reset_text
diff --git a/Master/texmf-dist/context/data/scite/scite-pragma.properties b/Master/texmf-dist/context/data/scite/scite-pragma.properties
index 450c609b042..7308f1fb60e 100644
--- a/Master/texmf-dist/context/data/scite/scite-pragma.properties
+++ b/Master/texmf-dist/context/data/scite/scite-pragma.properties
@@ -35,4 +35,6 @@ Metapost|metapost||\
XML|xml||\
Lua|lua||\
Text|txt||\
-PDF|pdf||
+PDF|pdf||\
+CWeb|web||\
+Text|txt||
diff --git a/Master/texmf-dist/doc/context/bib/bibmod-doc.pdf b/Master/texmf-dist/doc/context/bib/bibmod-doc.pdf
deleted file mode 100644
index ea1db316004..00000000000
--- a/Master/texmf-dist/doc/context/bib/bibmod-doc.pdf
+++ /dev/null
Binary files differ
diff --git a/Master/texmf-dist/doc/context/bib/bibmod-doc.tex b/Master/texmf-dist/doc/context/bib/bibmod-doc.tex
deleted file mode 100644
index 2ffdfa9f2f2..00000000000
--- a/Master/texmf-dist/doc/context/bib/bibmod-doc.tex
+++ /dev/null
@@ -1,750 +0,0 @@
-
-\usemodule[int-load]
-\def\loadsetups{}
-\setupinteraction[state=start]
-\setupcolors[state=start]
-
-\usemodule[bib,set-11,mod-01]
-
-
-\startXMLmapping[zero]
-\processXMLfilegrouped{t-bib.xml}
-\stopXMLmapping
-
-\setupitemize[each][packed]
-
-\setuphead[section][page=]
-
-\setupoutput[pdftex]
-
-\def\BIBTEX{Bib\TeX}
-\def\MAPS{Maps}
-
-
-\startbuffer[bibexample]
-\startpublication[k=me,
- t=manual,
- a=Hoekwater,
- y=2006,
- s=TH2006,
- n=1,
- u=http://contextgarden.net/Bibliography]
-\author{Taco}[T.]{}{Hoekwater}
-\title{\CONTEXT\ Publication Module, The user documententation}
-\pubyear{2006}
-\note{In case you didn't know: it's the document you are reading now}
-\pages{14}
-\stoppublication
-\stopbuffer
-
-\getbuffer[bibexample]
-
-\startmodule[type=tex]
-
-\startdocumentation
-
-\module
- [ file=bibmod-doc,
- version=2006.09.15,
- title=Module Documentation,
- subtitle=Bibliographies,
- author={Taco Hoekwater},
- date=\currentdate,
- copyright=Taco Hoekwater]
-
-\completecontent
-
-\section{Introduction}
-
-The bibliographic module (\type{t-bib.tex}) takes care of references
-to publications and the typesetting of publication lists, as well as
-providing an interface between \BIBTEX and \CONTEXT. This manual
-documents version 2009.03.02.
-
-The bibliographic subsystem consists of the main module
-\type{t-bib.tex}; four \BIBTEX\ styles (\type{cont-xx.bst}); and a set
-of example configuration files (\type{bibl-xxx.tex}) that set up
-specific formatting styles for both the citations and the list of
-references.
-
-
-\subsection{General overview}
-
-A typical input file obeys following structure:
-\startitemize[n]
-\item A call to \type{\usemodule[bib]}.
-\item Optionally, a few setup commands for the bibliographic module.
-\item A number of definitions of publications to be referenced in the
-main text of the article. The source of these definitions can be
-a combination of:
- \startitemize
- \item The \type{\jobname.bbl} file (automatically read at \type{\starttext})
- \item extra bbl files
- \item a file or inline macros before \type{\starttext}
- \stopitemize
- These possibilities will be explained below. For now, it is
- only important to realize that of all these definitions have to be known
- {\it before} the first citation in the text.
-\item \type{\starttext}
-\item The body text, with a number of \type{\cite} and \type{\nocite} commands.
-\item The list of publications, called using the command
- \type{\placepublications} or the command\break \type{\completepublications}.
-\item \type{\stoptext}
-\stopitemize
-
-\section{Setup commands}
-
-Bibliographic references use a specific `style', a collection of rules
-for the use of \type{\cite} as well as for the formatting that is
-applied to the publication list. The \CONTEXT\ bibliographic module
-expects you to define all of these style options in one single
-file of which the names starts with the prefix \type{bibl-}.
-
-Unlike the normal situation in \LATEX, this style {\it also\/}
-includes the formatting of the items themselves. Because of this, the
-\type{.bbl} file is set up as a database of entries with fields.
-
-\subsection{Global settings: \type{\setuppublications}}
-
-The most important user-level command is
-\type{\setuppublications}. Most of the options to this command
-shoudl be set by the bibliography style file, but a few of them are of
-immediate interest to the casual user as well.
-
-Like all setup commands, thus command should be given before
-\type{\starttext}, as it sets up global information about the
-bibliographic references used in the document. \CONTEXT\ needs this
-information in order to function correctly.
-
-\setup{setuppublications}
-
-\starttabulate[|l|p|]
-\NC alternative\NC This gives the name of a bibliography style. \crlf
- The chosen style defines the other default options, the options
- given in this documentation are the defaults as they are set up
- by the `apa' style. When this argument is given, the newly set
-style is read in first, before the other options are processed. Thus
-allowing you to override specific settings from the chosen style.\NC\NR
-\NC refcommand \NC the default option for \type{\cite}\NC \NR
-\NC sorttype\NC How the publications in the final publication
- list should be sorted. `cite' means: by the order in which
- they were first cited in your text. `bbl' tells the
- module to keep the relative ordering in which the publication
- definitions were found\crlf
- The current default for apa is `cite'\NC\NR
-\NC criterium\NC Whether to list only the referenced
- publications or all of them.\crlf
- If this value is `all', then if `sorttype' equals `cite', this
- means that all referred-to publications are listed
- before all others, otherwise (if `sorttype' equals `bbl') you will
- just get a typeset version of the used database(s).\crlf
- The default for apa is `used'.\NC\NR
-\NC numbering\NC Whether or not the publication list
- should be labelled and if so, how. \type{yes} uses the item number in
- the publication list as label. \type{short} uses the short
- label. \type{bib}
- uses the original number in the \BIBTEX\ database as a label.
- Anything else turns labelling off.\crlf
- The default for apa is `no'\NC\NR
-\NC autohang\NC Whether or not the
- hanging indent should be re-calculated based on the real size of the
- label. This option only applies if numbering is turned on.\crlf
- The default is `no'.\NC\NR
-\NC monthconversion\NC The presentation form of any month field, if it
- is entered in the database as a numeric value. The default is to
- typeset the number without any conversion\NC\NR
-\stoptabulate
-
-\subsection{How the entries are formatted: \type{\setuppublicationlist}}
-
-\setup{setuppublicationlist}
-
-The list of publications at the end of the article is comparable with
-a sequence of normal \CONTEXT\ `list items' that behaves much like the
-list that defines the table of contents. {\it In previous versions, it was
-in fact implemented as a `normal' \CONTEXT\ list, but this is no
-longer the true.\/}
-
-The module defines a set of extra options. These option names are static, they
-do {\it not} change to follow the selected \CONTEXT\ interface language.
-
-The first two options provide default widths for `autohang':
-\starttabulate[|l|p|]
-\NC totalnumber\NC The total number of items in the following list (used for autohang).\NC\NR
-\NC samplesize\NC The longest short label in the list (used for autohang)\NC\NR
-\stoptabulate
-
-A third option can be used to overrule the use of \type{\title} as
-heading for \type{\completepublications}
-
-\starttabulate[|l|p|]
-\NC title\NC The sectioning command.\NC\NR
-\stoptabulate
-
-A fourth option can be used to nullify the printing of `year suffixes'
-in cases where the author(s) has written multiple works within a
-single year.
-
-\starttabulate[|l|p|]
-\NC maybeyear\NC either \type{on} or \type{off}. Default is \type{on}\NC\NR
-\stoptabulate
-
-The other extra options are needed to control micro||typesetting
-of things that are buried deep within macros. There is a separate
-command to handle the larger layout options
-(\type{\setuppublicationlayout}, explained below), but the options
-here are the only way to make changes in the formatting used for
-editors', authors', and article authors' names.
-\starttabulate[|l|p|]
-\NC author \NC command to typeset one author in the publication list.\NC \NR
-\NC artauthor \NC command to typeset one article author in the publication list.\NC \NR
-\NC editor \NC command to typeset one editor in the publication list.\NC \NR
-\NC namesep \NC the separation between consecutive names (either
- editors, authors or artauthors).\NC \NR
-\NC lastnamesep \NC the separation before the last name in a list of names.\NC \NR
-\NC firstnamesep \NC the separation following the fistname or inits
- within a name in the publication list.\NC \NR
-\NC juniorsep \NC likewise for `junior'.\NC \NR
-\NC vonsep \NC likewise for `von'.\NC \NR
-\NC surnamesep \NC likewise for surname.\NC \NR
-\NC authoretallimit \NC Number of authors needed to trigger `et al.' handling.\NC \NR
-\NC authoretaltext \NC Text to show at the end of an abbreviated list.\NC \NR
-\NC authoretaldisplay \NC Number of authors to actually display in an abbreviated list.\NC \NR
-\NC artauthoretallimit \NC Number of authors needed to trigger `et al.' handling.\NC \NR
-\NC artauthoretaltext \NC Text to show at the end of an abbreviated list.\NC \NR
-\NC artauthoretaldisplay \NC Number of authors to actually display in an abbreviated list.\NC \NR
-\NC editoretallimit \NC Number of editors needed to trigger `et al.' handling.\NC \NR
-\NC editoretaltext \NC Text to show at the end of an abbreviated list.\NC \NR
-\NC editoretaldisplay \NC Number of editors to actually display in an abbreviated list.\NC \NR
-\NC authorcommand \NC A three-argument macro to typeset the list of authors.\NC\NR
-\NC artauthorcommand \NC A three-argument macro to typeset the list of authors.\NC\NR
-\NC editorcommand \NC A three-argument macro to typeset the list of authors.\NC \NR
-\stoptabulate
-
-The commands after `author' e.g. are predefined
-macros that control how a single name is typeset. The four supplied
-macros provide formatting that looks like this:
-
-{\setvalue{@@currentalternative}{data}
-\starttabulate[|l|p|]
-\NC\tex{invertedauthor}\NC \invertedauthor{Taco}{von}{Hoekwater}{T}{jr}\NC\NR
-\NC\tex{invertedshortauthor}\NC \invertedshortauthor{Taco}{von}{Hoekwater}{T}{jr}\NC\NR
-\NC\tex{normalauthor}\NC \normalauthor{Taco}{von}{Hoekwater}{T}{jr}\NC\NR
-\NC\tex{normalshortauthor}\NC \normalshortauthor{Taco}{von}{Hoekwater}{T}{jr}\NC\NR
-\stoptabulate
-}
-As you can see in the examples, there is a connection between certain
-styles of displaying a name and the punctuation used. Punctuation in
-this document has been set up by the `apa' style, and that style makes
-sure that \type{\invertedshortauthor} looks good, since that is the default
-command for `apa' style. (Keep in mind that the comma at the end of the
-author will be inserted by either `namesep' or `lastnamesep'.)
-
-In case you are not happy with the predefined macros; it is quite simple to
-define one of these macros yourself, it is a simple macro with 5
-arguments: firstnames, von-part, surname, inits, junior.
-
-For example, here is the definition of \type{\normalauthor},
-\starttyping
-\def\normalauthor#1#2#3#4#5%
- {\bibdoif{#1}{#1\bibalternative{firstnamesep}}%
- \bibdoif{#2}{#2\bibalternative{vonsep}}%
- #3%
- \bibdoif{#5}{\bibalternative{surnamesep}#5\unskip}}
-\stoptyping
-but commands can be a lot simpler, like this:
-\starttyping
-\def\surnameonly#1#2#3#4#5{#3}
-\setuppublicationlist[editor=\surnameonly]
-\stoptyping
-
-The three-argument macro after `authorcommand' etc. can be used to
-overrule the typesetting of the list of authors (normally done by the
-internal macro \type{\dospecialbibinsert}). This is mostly a hook for
-duplicated author lists in the publication list, that can be handled
-like so:
-
-\starttyping
-\def\oldlist{}
-\def\AbbreviateAuthors#1#2#3%
- {\xdef\newlist{#3}%
- \ifx\oldlist\newlist \hbox to 2em{\hss---\hss}%
- \else \dospecialbibinsert{#1}{#2}{#3}\fi
- \global\let\oldlist\newlist }
-
-\setuppublicationlist
- [artauthorcommand=\AbbreviateAuthors]
-\stoptyping
-The first argument is a list type `author', `artauthor', or `editor',
-the second argument is the number of items that should be typeset,
-and the third argument is a macro containing the commalist of persons,
-in a form suitable for \tex{invertedauthor} and friends.
-
-
-The following options are initialized depending on the
-global settings for `numbering' and `autohang':
-\starttabulate[|l|p|]
-\NC width\NC Set to the calculated width of the largest label, but only if autohang is `yes'\NC\NR
-\NC distance\NC Set to 0pt, but only if autohang is `yes'\NC\NR
-\NC numbercommand\NC A command given in `setuppublications' if numbering is turned on, otherwise empty.\NC\NR
-\NC textcommand\NC Set to a macro that outdents the body text if numbering is turned off, otherwise empty\NC\NR
-\stoptabulate
-
-
-\subsection{Setting citation options: \type{\setupcite}}
-
-The \type{\cite} command has a lot of alternatives, as could be seen
-above in the setting of `refcommand'. And these alternatives have
-their own options:
-
-\setup{setupcite}
-
-\starttabulate[|l|p|]
-\NC andtext \NC separation between two authors (for \type{\cite[author]} styles)\NC \NR
-\NC otherstext \NC text used for `et.al.' (for \type{\cite[author]} styles)\NC \NR
-\NC namesep \NC the separation between consecutive authors (for \type{\cite[author]} styles)\NC \NR
-\NC pubsep \NC separator between publication references in a
- \type{\cite} command.\NC \NR
-\NC lastpubsep \NC same, but for the
- last publication in the list.\NC \NR
-\NC left \NC left side of a \type{\cite} (like \type{[})\NC \NR
-\NC inbetween \NC the separator between parts of a single citation.\NC\NR
-\NC right \NC right side of a \type{\cite} (like \type{]})\NC \NR
-\NC compress \NC Whether \type{\cite} should try to
-compress it's argument list. \NC\NR
-\stoptabulate
-Not all options apply to all types of \type{\cite} commands.
-E.g. `compress' does not apply to the citation
-list for all options of \type{\cite}, since sometimes compression does
-not make sense or is not possible. The `num' version compresses
-into a condensed sorted list, and the various `author' styles try
-to compress all publications by one author, but e.g. years are
-never compressed.
-
-Likewise, `inbetween' only applies to three types: `authoryear' (a
-space), `authoryears' (a comma followed by a space), and `num' (where
-it is `--' (an endash), the character used to separate number ranges).
-
-\subsection{Setting up \BIBTEX: \type{\setupbibtex}}
-
-\BIBTEX\ bibliographic databases are converted into \type{.bbl} files,
-and the generated file is just a more \TEX-minded representation of
-the full database(s).
-
-The four \type{.bst} files do not do any actual formatting on the
-entries, and they do not subset the database either. Instead, the
-{\it entire} database is converted into \TEX-parseable records. About the
-only thing the \type{.bst} files do is sorting the entries (and
-\BIBTEX\ itself resolves any `STRING' specifications, of course).
-
-The module will read the created \type{\jobname.bbl} file
-and select the parts that are needed for the current article.
-
-\setup{setupbibtex}
-
-\starttabulate[|l|p|]
-\NC database\NC List of bibtex database file names to be
- used. The module will write a very short \type{.aux} file instructing
- \BIBTEX\ to create a (possibly very large) \type{\jobname.bbl} file,
- that will be \type{\input} by the module (at \type{\starttext}).\NC\NR
-\NC sort\NC How the publications in the
- \BIBTEX\ database file should be sorted.\crlf
- The default here is `no' (\type{cont-no.bst}), meaning no sorting at all.
- `author' (\type{cont-au.bst}) sorts alphabetically on author and within that on year,
- `title' (\type{cont-ti.bst}) sorts alphabetically on title and then on author and
- year, and `short' (\type{cont-ab.bst}) sorts on the short key that is generated
- by \BIBTEX. If \type{FILE} is given, it specifies an individual \type{.bst} file name used
- by \BIBTEX.\NC\NR
-\stoptabulate
-
-Starting with version 2006.08.08, the module registers \BIBTEX\ as a
-program to be run by texexec, so you no longer need to run \BIBTEX\ by
-hand (and in MkIV, the module runs \BIBTEX\ on the fly using Lua).
-
-Still, you may want to create the \type{\jobname.bbl} yourself. The
-\type{.bbl} syntax is explained below. There is no default
-database of course, and you do not {\it have} to use one: it is
-perfectly OK to just \type{\input} a file with the bibliographic
-records, as long as it has the right input syntax. Or even to include
-the definitions themselves in the preamble of your document.
-
-\subsection{Borrowing publications: \type{\usepublications}}
-
-It is also possible to instruct the module to use the bibliographic
-references belonging to another document. This is done by using the command
-\type{\usepublications[files]}, where \type{files} is a list of other
-\CONTEXT\ documents (without extension).
-
-\setup{usepublications}
-
-To be precise, this command will use the \type{.bbl} and \type{.tuo}
-files from the other document(s), and will therefore not work if these
-files cannot be found (the \type{.tuo} file is needed to get correct
-page references for \type{\cite[page]}).
-
-
-\subsection{Legacy database support}
-
-Old \BIBTEX\ databases tend to contain \LaTeX-specific commands and,
-especially, command||definitions. To make it easier to handle these
-databases, a support module that defines a simplified version of
-\LaTeX's \type{\newcommand} is shipped alongside the bib module.
-You can load this support code by adding
-\starttyping
-\usemodule[bibltx]
-\stoptyping
-to your document preamble.
-
-\section{Citations}
-
-Citations are normally handled through the \type{\cite} command.
-
-\type{\cite} has two basic appearances:
-
-\subsection{Default and explicit citations}
-
-\setup{cite}
-
-The single-argument form executes the style-defined default citation
-command. This is the preferred way of usage, since some styles might
-use numeric citations while others might use a variation of the
-(author,year) style.
-
-The two-argument form allows you to manually select the style you want.
-
-\subsubsection{Citation types}
-
-Following is the full list of recognized keywords for \type{\cite},
-with a short explanation where the data comes from. Most of the
-information that is usable within \type{\cite} comes from the argument
-to \type{\startpublication}. This command is covered in detail below.
-
-
-All of these options are {\it valid} in all publication styles, since
-\CONTEXT\ always has the needed information available. But not all of
-these are {\it sensible} in a particular style: using numbered references if
-the list of publications itself is not numbered is not a good idea, for
-instance. Also, some of the keys are somewhat strange and only
-provided for future extensions.
-
-First, here are the simple ones:
-\starttabulate[|l|l|p|]
-\NC author\NC \cite[author][me] \NC(from `a')\hfil\NC\NR
-\NC doi\NC \cite[doi][me]\NC (from `d')\hfil\NC\NR
-\NC key\NC \cite[key][me]\NC (from `k')\hfil\NC\NR
-\NC serial\NC \cite[serial][me]\NC (from `n')\hfil\NC\NR
-\NC short\NC \cite[short][me]\NC (from `s')\hfil\NC\NR
-\NC type\NC \cite[type][me]\NC (from `t')\hfil\NC\NR
-\NC year\NC \cite[year][me]\NC (from `y')\hfil\NC\NR
-\NC url\NC \cite[url][me]\NC (from `u')\hfil\NC\NR
-\stoptabulate
-Keep in mind that `n' is a database sequence number, and not
-necesarily the same number that is used in the list of
-publications. For instance, if `sorttype' is cite, the list will be
-re-ordered, but the `n' value will remain the same. To get to the
-number that is finally used, use
-\starttabulate[|l|l|p|]
-\NC num\NC \cite[num][me]\NC (this is a reference to
- the sequence number used in the publication list)\hfil\NC\NR
-\stoptabulate
-If the list of publications is not numbered visually, there will still
-be a number available.
-
-Three of the options are combinations:
-\starttabulate[|l|l|p|]
-\NC authoryear\NC \cite[authoryear][me]\NC(from `a' and `y')\hfil\NC\NR
-\NC authoryears\NC \cite[authoryears][me]\NC(from `a' and `y')\hfil\NC\NR
-\NC authornum\NC \cite[authornum][me]\NC(from `a' and `num')\hfil\NC\NR
-\NC data\NC \vtop{\hsize .45\hsize \cite[data][me]}\NC The data content of the entry\hfil\NC\NR
-\stoptabulate
-
-And the last one is a page reference to the page where the
-the entry is typeset within the publication list.
-
-\starttabulate[|l|l|p|]
-\NC page\NC \cite[page][me]\NC (a page reference)\hfil\NC\NR
-\stoptabulate
-
-\subsection{Citations with local setups}
-
-\setup{citealt}
-
-The arguments in this form are inherited from \type{\setupcite},
-except for \type{extras}. The argument of `\type{extras}' is typeset
-at the end of the reference, but before a potential `\type{right}', so
-it can be used for e.g. page or chapter specifiers.
-
-\subsection{Invisible citations}
-
-\setup{nocite}
-
-This command registers the references in the argument list, but does
-not generate typeset material. It can be used to force certain entries
-from the database to appear in the typeset list of publications.
-
-\section{Placing the publication list}
-
-To typset the list of publications, use \type{\completepublications}
-or \type{\placepublications} at the location in your text where you
-want the publication list to appear. As is normal in \CONTEXT,
-\type{\placepublications} gives you a raw list, and
-\type{\completepublications} a list with a title.
-
-
-The default for the publication list is to contain only the `locally'
-referenced items, so if you want to use your own heading instead of
-the default one, you most likely want to call
-\type{\placepublications} with an explicit criterium, like so:
-\starttyping
-\placepublications[criterium=all]
-\stoptyping
-
-If you use a numeric list style combined with multiple , each
-\type{\placepublications} or \type{\completepublications} commands,
-by default each one restarts the displayed number. If you do not
-like that, you can add an option argument like so:
-\starttyping
-\placepublications[option=continue]
-\stoptyping
-
-
-The module uses the following defaults for the generated head:
-\starttyping
-\setupheadtext[en][pubs=References]
-\setupheadtext[nl][pubs=Literatuur]
-\setupheadtext[de][pubs=Literatur]
-\setupheadtext[it][pubs=Bibliografia]
-\setupheadtext[sl][pubs=Literatura]
-\setupheadtext[fr][pubs=Bibliographie]
-\stoptyping
-These (or new ones) can be redefined as needed.
-
-\section{The bbl file}
-
-A typical bbl file consists of one initial command
-(\type{\setuppublicationlist}) that sets some information
-about the number of entries in the bbl file and the widths
-of the labels for the list, and that command is followed by a number of
-appearances of \type{\startpublication ... \stoppublication}
-
-The full appearance version of \type{\cite}
-accepts a number of option keywords, and we saw earlier that
-the argument of the \type{\startpublication} command
-defines most of the things we can reference to. This section explains
-the precise syntax for \type{\startpublication}.
-
-Each single block defines one bibliographic entry. I apologise
-for the use of single||letter keys, but these have the advantage of
-being a)\quad short and b)\quad safe w.r.t. the multi-lingual interface.
-
-\setup{startpublication}
-
-Here is the full example that has been used throughout this document:
-
-\typebuffer[bibexample]
-
-\subsection{Defining a publication}
-
-The list of commands that is allowed to appear between \type{\startpublication}
-and \type{\stoppublication} is given below.
-
-Order within an entry is irrelevant, except for the relative ordering
-within each of the three commands that might appear more than once:
-\type{\artauthor}, \type{\author} and \type{\editor}.
-
-Most of these are `normal' \BIBTEX\ field names (in lowercase), but
-some are extra special, either because they come from non-standard
-databases that I know of, or because the bst file has pre-processed
-the contents of the field.
-
-\subsubsection{Complex fields}
-
-The three fields that contain names are extra special, because they
-have more than one argument. These are: \type{\artauthor},
-\type{\author} and \type{\editor}. These commands require three
-arguments, and there can be two extra optional arguments as well.
-
-
-\starttabulate[|l|l|p|]
-\NC\tex{artauthor[]\{\}[]\{\}\{\}}\NC\tfx AUTHOR\NC For an author of any publication
- that appears within a larger publication, like an article that appears
- within a journal or as part of a proceedings. \NC\NR
-\NC\tex{author[]\{\}[]\{\}\{\}}\NC\tfx AUTHOR\NC The author of a standalone
- publication, like a monograph.\NC\NR
-\NC\tex{editor[]\{\}[]\{\}\{\}}\NC\tfx EDITOR\NC The editor of e.g.
- an edited volume.\NC\NR
-\stoptabulate
-
-The argument lists have this form:
-
-\starttyping
-\author[junior]{firstnames}[inits]{von}{surname}
-\stoptyping
-
-and the meanings are as follows:
-\starttabulate[|l|p|]
-\NC \type{junior} \NC a designation of lineage, only used if confusion is possible (due to family members having the same name).\NC\NR
-\NC \type{firstnames} \NC individual (given) name(s)\NC\NR
-\NC \type{inits} \NC abbreviated form(s) of \type{firstnames}.\NC\NR
-\NC \type{von} \NC any bits of the family name that are normally disregarded in sorting\NC\NR
-\NC \type{surname} \NC remainder of the family (last) name\NC\NR
-\stoptabulate
-
-
-\subsubsection{Simple fields}
-
-Rather a large list, this is caused by the desire to support as many
-existing \BIBTEX\ databases as possible. Please note that a few of
-the fields have names that are not the same as in \BIBTEX, because a
-1~on~1 mapping causes conflicts with predefined macro names in
-\CONTEXT.
-
-\starttabulate[|l|p(2.5cm)|p|]
-\NC\type{\abstract}\NC\tfx ABSTRACT\NC just text.\NC\NR
-\NC\type{\annotate}\NC\tfx ANNOTATE \NC just text.\NC\NR
-\NC\type{\arttitle}\NC\tfx TITLE\NC The title of a partial publication (one that has \type{\artauthor}s).\NC\NR
-\NC\type{\assignee}\NC\tfx ASSIGNEE\NC Assigned person for a patent\NC\NR
-\NC\type{\bibnumber}\NC\tfx NUMBER \NC \NC\NR
-\NC\type{\bibtype}\NC\tfx TYPE \NC See the \BIBTEX\
- documentation for it's use. This is {\it not} related
- to the type of entry that is used for deciding on the
- layout.\NC\NR
-\NC\type{\biburl}\NC\tfx URL \NC Location on the internet. \NC\NR
-\NC\type{\chapter}\NC\tfx CHAPTER \NC the chapter number, if this entry is
-referring to a smaller section of a publication. It might actually
-be a part number or a (sub)section number. The field \type{\bibtype} (above)
-differentiates between these.\NC\NR
-\NC\type{\city}\NC\tfx CITY\NC city of publication.\NC\NR
-\NC\type{\comment}\NC\tfx COMMENT\NC just text.\NC\NR
-\NC\type{\country}\NC\tfx COUNTRY\NC country of publication.\NC\NR
-\NC\type{\crossref}\NC\tfx CROSSREF\NC A cross-reference to another
- bibliographic entry. It will insert a citation
- to that entry, forcing it to be typeset as well.\NC\NR
-\NC\type{\day}\NC\tfx DAY \NC Date of publication (for a patent)\NC\NR
-\NC\type{\dayfiled}\NC\tfx DAYFILED\NC Filing date for a patent\NC\NR
-\NC\type{\doi}\NC\tfx DOI \NC Document Object Identifier\NC\NR
-\NC\type{\edition}\NC\tfx EDITION\NC The edition.\NC\NR
-\NC\type{\eprint}\NC\tfx EPRINT\NC E-print information\NC\NR
-\NC\type{\howpublished}\NC\tfx HOWPUBLISHED\NC \NC\NR
-\NC\type{\isbn}\NC\tfx ISNB\NC isbn number (for books)\NC\NR
-\NC\type{\issn}\NC\tfx ISSN\NC issn number (for journals)\NC\NR
-\NC\type{\issue}\NC\tfx ISSUE\NC issue number (for journals)\NC\NR
-\NC\type{\journal}\NC\tfx JOURNAL \NC The journal's name.\NC\NR
-\NC\type{\keyword}\NC\tfx KEYWORD \NC just text (for use in indices).\NC\NR
-\NC\type{\keywords}\NC\tfx KEYWORDS \NC just text (for use in indices).\NC\NR
-\NC\type{\lang}\NC\tfx LANGUAGE \NC The language of the
- current bibliographic record\NC\NR
-\NC\type{\month}\NC\tfx MONTH\NC Month of publication\NC\NR
-\NC\type{\monthfiled}\NC\tfx MONTHFILED\NC Filing month for a patent\NC\NR
-\NC\type{\names}\NC\tfx NAMES\NC just text (for use in indices).\NC\NR
-\NC\type{\nationality}\NC\tfx NATIONALITY\NC Nationality information for a patent\NC\NR
-\NC\type{\note}\NC\tfx NOTE \NC just text (this is the `standard' \BIBTEX\ commentary field).\NC\NR
-\NC\type{\notes}\NC\tfx NOTES \NC just text.\NC\NR
-\NC\type{\organization}\NC\tfx ORGANIZATION\NC Like institute, but for e.g. companies.\NC\NR
-\NC\type{\pages}\NC\tfx PAGES\NC Either the number of pages, or the page range
- for a partial publication. The `t' key to startpublication
- will decide automatically what is meant.\NC\NR
-\NC\type{\pubname}\NC\tfx INSTITUTION,\crlf PUBLISHER,\crlf SCHOOL\NC Publisher or institution name.\NC\NR
-\NC\type{\pubyear}\NC\tfx YEAR \NC Year of publication. Within this command,
- the \BIBTEX\ bst files will sometimes insert the command
- \type{\maybeyear}, which is needed to make sure that
- the bbl file stay flexible enough to allow all styles of
- formatting.\NC\NR
-\NC\type{\revision}\NC\tfx REVISION \NC Release version\NC\NR
-\NC\type{\series}\NC\tfx SERIES \NC Possible book series information.\NC\NR
-\NC\type{\size}\NC\tfx SIZE \NC Size in KB of a PDF file (this came from
- the NTG \MAPS\ database)\NC\NR
-\NC\type{\thekey}\NC\tfx KEY \NC See the \BIBTEX\
- documentation for it's use. This is {\it not} related to
- the key used for citing this entry.\NC\NR
-\NC\type{\title}\NC\tfx TITLE,\crlf BOOKTITLE \NC The title of a book.\NC\NR
-\NC\type{\volume}\NC\tfx VOLUME \NC Volume number for multi-part books or
- journals.\NC\NR
-\NC\type{\yearfiled}\NC\tfx YEARFILED\NC Filing year for a patent\NC\NR
-\stoptabulate
-
-When the \type{\lang} field's content is a full word instead of a
-two||letter code, correct processing depends on an auxiliary command
-\type{\setbiblanguage}, to be used like this:
-\starttyping
-\setbiblanguage{English}{en}
-\stoptyping
-The first argument is a literal \type{\lang} argument, the second
-argument has to be a two||letter language abbreviation understood by
-\CONTEXT.
-
-Adding in one of your own fields is reasonably simple:
-
-\starttyping
-\newbibfield[mycommand]
-\stoptyping
-This will define \type{\mycommand} for use within
-a publication (plus \type{\bib@mycommand}, it's internal form) as
-well as the command \type{\insertmycommand} that can be used
-within \type{\setuppublicationlayout} to fetch the supplied
-value (see below).
-
-
-\section{Defining a publication type layout}
-
-Publication style files of course take care of setting defaults for the
-commands as explained earlier, but the largest part of a such a
-publication style is concerned with specifying layouts for various
-types of publications.
-
-The command that does the work is \type{\setuppublicationlayout}:
-
-\setup{setuppublicationlayout}
-
-The first argument that is a publication (\BIBTEX\ entry) type, and
-all publications that have this type given as argument to the `t' key
-of \type{\startpublication} will be typeset by executing the commands
-that appear in the group following the command.
-
-For example, here is a possible way to typeset an article: from \type{bibl-apa}:
-\starttyping
-\setuppublicationlayout[article]{%
- \insertartauthors{}{ }{\insertthekey{}{ }{}}%
- \insertpubyear{(}{). }{\unskip.}%
- \insertarttitle{\bgroup }{\egroup. }{}%
- \insertjournal{\bgroup \it}{\egroup}
- {\insertcrossref{In }{}{}}%
- \insertvolume
- {, }
- {\insertissue{(}{)}{}\insertpages{:}{.}{.}}
- {\insertpages{, pages }{.}{.}}%
- \insertnote{ }{.}{}%
- \insertcomment{}{.}{}%
-}
-\stoptyping
-For every command in the long list given in the previous paragraph, there is
-a corresponding \type{\insertxxx} command. (As usual, \type{\author}
-etc. are special: they have a macro called \type{\insertxxxs}
-instead). All of these \type{\insertxxx} macros use the same logic:
-
-\starttyping
-\insertartauthors{<before>}{<after>}{<not found>}
-\stoptyping
-
-Sounds easy? It is! But it is also often tedious: database entries can
-be tricky things: some without issue numbers, others without page
-numbers, some even without authors. So, you often need to nest rather
-a lot of commands in the \type{<not found>} section of the `upper'
-command, and \type{\unskip} and \type{\ignorespaces} are good friends
-as well.
-
-Incidentally, the distributed \type{bibl-xxx} files define layouts for
-the `standard' publication types that are defined in the example
-bibliography that comes with \BIBTEX. The list of possbile types is in
-no way limited to that list, but it provides a reasonable starting
-point.
-
-\section{References}
-
-\placepublications[criterium=all]
-
-\stopdocumentation
-
-\stopmodule
-
-\stoptext
diff --git a/Master/texmf-dist/doc/context/scripts/mkii/ctxtools.html b/Master/texmf-dist/doc/context/scripts/mkii/ctxtools.html
new file mode 100644
index 00000000000..0cb8799c261
--- /dev/null
+++ b/Master/texmf-dist/doc/context/scripts/mkii/ctxtools.html
@@ -0,0 +1,58 @@
+<?xml version="1.0" encoding="UTF-8"?>
+
+<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Strict//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd">
+
+
+
+
+
+<html xmlns="http://www.w3.org/1999/xhtml" lang="en" xml:lang="en">
+ <head>
+ <title>CtxTools 1.3.5</title>
+ <meta http-equiv="Content-Type" content="text/html; charset=UTF-8"/>
+ <style type="text/css">
+ body { color: #FFFFFF; background-color: #808080; font-family: optima, verdana, futura, "lucida sans", arial, geneva, helvetica, sans; font-size: 12px; line-height: 18px; } a:link, a:active, a:visited { color: #FFFFFF; } a.dir-view:link, a.dir-view:active, a.dir-view:visited { color: #FFFFFF; text-decoration: underline; } .valid { color: #00FF00; } .invalid { color: #FF0000; } button, .commonlink, .smallbutton { font-weight: bold; font-size: 12px; text-decoration: none; color: #000000; border-color: #7F7F7F; border-style: solid; border-width: .125ex; background-color: #FFFFFF; padding: .5ex; } .smallbutton { width: 1em; } a.commonlink:link, a.commonlink:active, a.commonlink:visited, a.smalllink:link, a.smalllink:active, a.smalllink:visited { font-weight: bold; font-size: 12px; text-decoration: none; color: #000000; } h1, .title { font-style: normal; font-weight: normal; font-size: 18px; line-height: 18px; margin-bottom: 20px; } h2, .subtitle { font-style: normal; font-weight: normal; font-size: 12px; margin-top: 18px; margin-bottom: 18px; } table { line-height: 18px; font-size: 12px; margin: 0; } th { font-weight: bold; text-align: left; padding-bottom: 6px; } .tc { font-weight: bold; text-align: left; } p, li { max-width: 60em; } .empty-line { margin-top: 4px; } .more-room { margin-right: 1.5em; } .much-more-room { margin-right: 3em; } #main { position: absolute; left: 10%; top: 10%; right: 10%; bottom: 10%; z-index: 2; width: 80%; height: 80%; padding: 0%; margin: 0%; overflow: auto; border-style: none; border-width: 0; background-color: #3F3F3F; } #main-settings { margin: 12px; x_max-width: 60em; line-height: 18px; font-size: 12px; } #left { position: absolute; top : 10%; left: 0%; bottom: 0%; right: 90%; z-index: 1; width: 10%; height: 90%; padding: 0%; margin: 0%; font-size: 16px; border-style: none; border-width: 0; background-color: #4F6F6F; } #right { position: absolute; top : 0%; left: 90%; bottom: 10%; right: 0%; z-index: 1; width: 10%; height: 90%; padding: 0%; margin: 0%; font-size: 16px; border-style: none; border-width: 0; background-color: #4F6F6F; _margin-left: -15px; } #bottom { position: absolute; left: 10%; right: 0%; top: 90%; bottom: 0%; z-index: 1; width: 90%; height: 10%; padding: 0%; margin: 0%; font-size: 16px; border-style: none; border-width: 0; background-color: #6F6F8F; } #top { position: absolute; left: 0%; right: 10%; top: 0%; bottom: 90%; z-index: 1; width: 90%; height: 10%; padding: 0%; margin: 0%; font-size: 16px; border-style: none; border-width: 0; background-color: #6F6F8F; } #top-one { position: absolute; bottom: 50%; width: 100%; buggedheight: 100%; } #top-two { position: relative; margin-bottom: -9px; margin-left: 12px; margin-right: 12px; line-height: 18px; text-align: right; vertical-align: middle; } #bottom-one { position: absolute; bottom: 50%; width: 100%; buggedheight: 100%; } #bottom-two { position: relative; margin-bottom: -9px; margin-left: 12px; margin-right: 12px; line-height: 18px; text-align: left; vertical-align: middle; } #left-one { position: absolute; width: 100%; buggedheight: 100%; } #left-two { position: relative; margin-top: 12px; line-height: 18px; text-align: center; vertical-align: top; } #right-one { display: table; height: 100%; width: 100%; } #right-two { display: table-row; height: 100%; width: 100%; } #right-three { display: table-cell; width: 100%; vertical-align: bottom; _position: absolute; _top: 100%; } #right-four { text-align: center; margin-bottom: 2ex; _position: relative; _top: -100%; } #more-top { position: absolute; top: 0%; left: 90%; bottom: 90%; right: 0%; z-index: 3; width: 10%; height: 10%; padding: 0%; margin: 0%; border-style: none; border-width: 0; } #more-top-settings { text-align: center; } #more-right-settings { margin-right: 12px; margin-left: 12px; line-height: 18px; font-size: 10px; text-align: center; } #right-safari { _display: table; width: 100%; height: 100%; }
+ </style>
+ <style type="text/css">
+ </style>
+ </head>
+ <body>
+ <div id="top"> <div id="top-one">
+ <div id="top-two">CtxTools 1.3.5 </div>
+ </div>
+ </div>
+ <div id="bottom"> <div id="bottom-one">
+ <div id="bottom-two">wiki: http://contextgarden.net | mail: ntg-context@ntg.nl | website: http://www.pragma-ade.nl</div>
+ </div>
+ </div>
+ <div id="left"></div>
+ <div id="right"></div>
+ <div id="main">
+ <div id='main-settings'>
+ <h1>Command line options</h1>
+<table>
+ <tr><th style="width: 10em">flag</th><th style="width: 8em">value</th><th>description</th></tr>
+ <tr><th/><td/><td/></tr>
+ <tr><th>--touchcontextfile</th><td></td><td>update context version</td></tr>
+ <tr><th>--contextversion</th><td></td><td>report context version</td></tr>
+ <tr><th>--jeditinterface</th><td></td><td>generate jedit syntax files [--pipe]</td></tr>
+ <tr><th>--bbeditinterface</th><td></td><td>generate bbedit syntax files [--pipe]</td></tr>
+ <tr><th>--sciteinterface</th><td></td><td>generate scite syntax files [--pipe]</td></tr>
+ <tr><th>--rawinterface</th><td></td><td>generate raw syntax files [--pipe]</td></tr>
+ <tr><th>--translateinterface</th><td></td><td>generate interface files (xml) [nl de ..]</td></tr>
+ <tr><th>--purgefiles</th><td></td><td>remove temporary files [--all --recurse] [basename]</td></tr>
+ <tr><th>--documentation generate documentation [--type</th><td>]</td><td>[filename]</td></tr>
+ <tr><th>--filterpages'</th><td></td><td>) # no help, hidden temporary feature</td></tr>
+ <tr><th>--dpxmapfiles</th><td></td><td>convert pdftex mapfiles to dvipdfmx [--force] [texmfroot]</td></tr>
+ <tr><th>--listentities</th><td></td><td>create doctype entity definition from enco-uc.tex</td></tr>
+ <tr><th>--brandfiles</th><td></td><td>add context copyright notice [--force]</td></tr>
+ <tr><th>--platformize</th><td></td><td>replace line-endings [--recurse --force] [pattern]</td></tr>
+ <tr><th>--dependencies analyze depedencies within context [--save --compact --filter</th><td>[macros|filenames] ]</td><td>[filename]</td></tr>
+ <tr><th>--updatecontext</th><td></td><td>download latest version and remake formats [--proxy]</td></tr>
+ <tr><th>--disarmutfbom</th><td></td><td>remove utf bom [--force]</td></tr>
+ </table>
+<br/>
+ </div>
+ </div>
+ </body>
+ </html>
diff --git a/Master/texmf-dist/doc/context/scripts/mkii/ctxtools.man b/Master/texmf-dist/doc/context/scripts/mkii/ctxtools.man
new file mode 100644
index 00000000000..93218f2295f
--- /dev/null
+++ b/Master/texmf-dist/doc/context/scripts/mkii/ctxtools.man
@@ -0,0 +1,75 @@
+.TH "ctxtools" "1" "01-01-2013" "version 1.3.5" "CtxTools"
+.SH NAME
+.B ctxtools
+.SH SYNOPSIS
+.B ctxtools [
+.I OPTIONS ...
+.B ] [
+.I FILENAMES
+.B ]
+.SH DESCRIPTION
+.B CtxTools
+.SH OPTIONS
+.TP
+.B --touchcontextfile
+update context version
+.TP
+.B --contextversion
+report context version
+.TP
+.B --jeditinterface
+generate jedit syntax files [--pipe]
+.TP
+.B --bbeditinterface
+generate bbedit syntax files [--pipe]
+.TP
+.B --sciteinterface
+generate scite syntax files [--pipe]
+.TP
+.B --rawinterface
+generate raw syntax files [--pipe]
+.TP
+.B --translateinterface
+generate interface files (xml) [nl de ..]
+.TP
+.B --purgefiles
+remove temporary files [--all --recurse] [basename]
+.TP
+.B --documentation generate documentation [--type=]
+[filename]
+.TP
+.B --filterpages'
+) # no help, hidden temporary feature
+.TP
+.B --dpxmapfiles
+convert pdftex mapfiles to dvipdfmx [--force] [texmfroot]
+.TP
+.B --listentities
+create doctype entity definition from enco-uc.tex
+.TP
+.B --brandfiles
+add context copyright notice [--force]
+.TP
+.B --platformize
+replace line-endings [--recurse --force] [pattern]
+.TP
+.B --dependencies analyze depedencies within context [--save --compact --filter=[macros|filenames] ]
+[filename]
+.TP
+.B --updatecontext
+download latest version and remake formats [--proxy]
+.TP
+.B --disarmutfbom
+remove utf bom [--force]
+.SH AUTHOR
+More information about ConTeXt and the tools that come with it can be found at:
+
+
+.B "maillist:"
+ntg-context@ntg.nl / http://www.ntg.nl/mailman/listinfo/ntg-context
+
+.B "webpage:"
+http://www.pragma-ade.nl / http://tex.aanhet.net
+
+.B "wiki:"
+http://contextgarden.net
diff --git a/Master/texmf-dist/doc/context/scripts/mkii/ctxtools.xml b/Master/texmf-dist/doc/context/scripts/mkii/ctxtools.xml
new file mode 100644
index 00000000000..8f51cd93a0d
--- /dev/null
+++ b/Master/texmf-dist/doc/context/scripts/mkii/ctxtools.xml
@@ -0,0 +1,31 @@
+<?xml version="1.0"?>
+<application>
+ <metadata>
+ <entry name="name">ctxtools</entry>
+ <entry name="detail">CtxTools</entry>
+ <entry name="version">1.3.5</entry>
+ </metadata>
+ <flags>
+ <category name="basic">
+ <subcategory>
+ <flag name="touchcontextfile"><short>update context version</short></flag>
+ <flag name="contextversion"><short>report context version</short></flag>
+ <flag name="jeditinterface"><short>generate jedit syntax files [<ref name="pipe]"/></short></flag>
+ <flag name="bbeditinterface"><short>generate bbedit syntax files [<ref name="pipe]"/></short></flag>
+ <flag name="sciteinterface"><short>generate scite syntax files [<ref name="pipe]"/></short></flag>
+ <flag name="rawinterface"><short>generate raw syntax files [<ref name="pipe]"/></short></flag>
+ <flag name="translateinterface"><short>generate interface files (xml) [nl de ..]</short></flag>
+ <flag name="purgefiles"><short>remove temporary files [<ref name="all"/> <ref name="recurse]"/> [basename]</short></flag>
+ <flag name="documentation generate documentation [--type" value="]"><short>[filename]</short></flag>
+ <flag name="filterpages'"><short>) # no help, hidden temporary feature</short></flag>
+ <flag name="dpxmapfiles"><short>convert pdftex mapfiles to dvipdfmx [<ref name="force]"/> [texmfroot]</short></flag>
+ <flag name="listentities"><short>create doctype entity definition from enco-uc.tex</short></flag>
+ <flag name="brandfiles"><short>add context copyright notice [<ref name="force]"/></short></flag>
+ <flag name="platformize"><short>replace line-endings [<ref name="recurse"/> <ref name="force]"/> [pattern]</short></flag>
+ <flag name="dependencies analyze depedencies within context [--save --compact --filter" value="[macros|filenames] ]"><short>[filename]</short></flag>
+ <flag name="updatecontext"><short>download latest version and remake formats [<ref name="proxy]"/></short></flag>
+ <flag name="disarmutfbom"><short>remove utf bom [<ref name="force]"/></short></flag>
+ </subcategory>
+ </category>
+ </flags>
+</application>
diff --git a/Master/texmf-dist/doc/context/scripts/mkii/imgtopdf.html b/Master/texmf-dist/doc/context/scripts/mkii/imgtopdf.html
new file mode 100644
index 00000000000..37d447aef12
--- /dev/null
+++ b/Master/texmf-dist/doc/context/scripts/mkii/imgtopdf.html
@@ -0,0 +1,49 @@
+<?xml version="1.0" encoding="UTF-8"?>
+
+<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Strict//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd">
+
+
+
+
+
+<html xmlns="http://www.w3.org/1999/xhtml" lang="en" xml:lang="en">
+ <head>
+ <title>ImgToPdf 1.1.2</title>
+ <meta http-equiv="Content-Type" content="text/html; charset=UTF-8"/>
+ <style type="text/css">
+ body { color: #FFFFFF; background-color: #808080; font-family: optima, verdana, futura, "lucida sans", arial, geneva, helvetica, sans; font-size: 12px; line-height: 18px; } a:link, a:active, a:visited { color: #FFFFFF; } a.dir-view:link, a.dir-view:active, a.dir-view:visited { color: #FFFFFF; text-decoration: underline; } .valid { color: #00FF00; } .invalid { color: #FF0000; } button, .commonlink, .smallbutton { font-weight: bold; font-size: 12px; text-decoration: none; color: #000000; border-color: #7F7F7F; border-style: solid; border-width: .125ex; background-color: #FFFFFF; padding: .5ex; } .smallbutton { width: 1em; } a.commonlink:link, a.commonlink:active, a.commonlink:visited, a.smalllink:link, a.smalllink:active, a.smalllink:visited { font-weight: bold; font-size: 12px; text-decoration: none; color: #000000; } h1, .title { font-style: normal; font-weight: normal; font-size: 18px; line-height: 18px; margin-bottom: 20px; } h2, .subtitle { font-style: normal; font-weight: normal; font-size: 12px; margin-top: 18px; margin-bottom: 18px; } table { line-height: 18px; font-size: 12px; margin: 0; } th { font-weight: bold; text-align: left; padding-bottom: 6px; } .tc { font-weight: bold; text-align: left; } p, li { max-width: 60em; } .empty-line { margin-top: 4px; } .more-room { margin-right: 1.5em; } .much-more-room { margin-right: 3em; } #main { position: absolute; left: 10%; top: 10%; right: 10%; bottom: 10%; z-index: 2; width: 80%; height: 80%; padding: 0%; margin: 0%; overflow: auto; border-style: none; border-width: 0; background-color: #3F3F3F; } #main-settings { margin: 12px; x_max-width: 60em; line-height: 18px; font-size: 12px; } #left { position: absolute; top : 10%; left: 0%; bottom: 0%; right: 90%; z-index: 1; width: 10%; height: 90%; padding: 0%; margin: 0%; font-size: 16px; border-style: none; border-width: 0; background-color: #4F6F6F; } #right { position: absolute; top : 0%; left: 90%; bottom: 10%; right: 0%; z-index: 1; width: 10%; height: 90%; padding: 0%; margin: 0%; font-size: 16px; border-style: none; border-width: 0; background-color: #4F6F6F; _margin-left: -15px; } #bottom { position: absolute; left: 10%; right: 0%; top: 90%; bottom: 0%; z-index: 1; width: 90%; height: 10%; padding: 0%; margin: 0%; font-size: 16px; border-style: none; border-width: 0; background-color: #6F6F8F; } #top { position: absolute; left: 0%; right: 10%; top: 0%; bottom: 90%; z-index: 1; width: 90%; height: 10%; padding: 0%; margin: 0%; font-size: 16px; border-style: none; border-width: 0; background-color: #6F6F8F; } #top-one { position: absolute; bottom: 50%; width: 100%; buggedheight: 100%; } #top-two { position: relative; margin-bottom: -9px; margin-left: 12px; margin-right: 12px; line-height: 18px; text-align: right; vertical-align: middle; } #bottom-one { position: absolute; bottom: 50%; width: 100%; buggedheight: 100%; } #bottom-two { position: relative; margin-bottom: -9px; margin-left: 12px; margin-right: 12px; line-height: 18px; text-align: left; vertical-align: middle; } #left-one { position: absolute; width: 100%; buggedheight: 100%; } #left-two { position: relative; margin-top: 12px; line-height: 18px; text-align: center; vertical-align: top; } #right-one { display: table; height: 100%; width: 100%; } #right-two { display: table-row; height: 100%; width: 100%; } #right-three { display: table-cell; width: 100%; vertical-align: bottom; _position: absolute; _top: 100%; } #right-four { text-align: center; margin-bottom: 2ex; _position: relative; _top: -100%; } #more-top { position: absolute; top: 0%; left: 90%; bottom: 90%; right: 0%; z-index: 3; width: 10%; height: 10%; padding: 0%; margin: 0%; border-style: none; border-width: 0; } #more-top-settings { text-align: center; } #more-right-settings { margin-right: 12px; margin-left: 12px; line-height: 18px; font-size: 10px; text-align: center; } #right-safari { _display: table; width: 100%; height: 100%; }
+ </style>
+ <style type="text/css">
+ </style>
+ </head>
+ <body>
+ <div id="top"> <div id="top-one">
+ <div id="top-two">ImgToPdf 1.1.2 </div>
+ </div>
+ </div>
+ <div id="bottom"> <div id="bottom-one">
+ <div id="bottom-two">wiki: http://contextgarden.net | mail: ntg-context@ntg.nl | website: http://www.pragma-ade.nl</div>
+ </div>
+ </div>
+ <div id="left"></div>
+ <div id="right"></div>
+ <div id="main">
+ <div id='main-settings'>
+ <h1>Command line options</h1>
+<table>
+ <tr><th style="width: 10em">flag</th><th style="width: 8em">value</th><th>description</th></tr>
+ <tr><th/><td/><td/></tr>
+ <tr><th>--convert</th><td></td><td>convert image into pdf</td></tr>
+ <tr><th>--compression</th><td></td><td>level of compression in percent</td></tr>
+ <tr><th>--depth</th><td></td><td>image depth in bits</td></tr>
+ <tr><th>--colorspace</th><td></td><td> colorspace (rgb,cmyk,gray)</td></tr>
+ <tr><th>--quality</th><td></td><td>quality in percent</td></tr>
+ <tr><th>--inputpath</th><td></td><td>path where files are looked for</td></tr>
+ <tr><th>--outputpath</th><td></td><td>path where files end up</td></tr>
+ <tr><th>--auto</th><td></td><td>determine settings automatically</td></tr>
+ </table>
+<br/>
+ </div>
+ </div>
+ </body>
+ </html>
diff --git a/Master/texmf-dist/doc/context/scripts/mkii/imgtopdf.man b/Master/texmf-dist/doc/context/scripts/mkii/imgtopdf.man
new file mode 100644
index 00000000000..5f165363a77
--- /dev/null
+++ b/Master/texmf-dist/doc/context/scripts/mkii/imgtopdf.man
@@ -0,0 +1,48 @@
+.TH "imgtopdf" "1" "01-01-2013" "version 1.1.2" "ImgToPdf"
+.SH NAME
+.B imgtopdf
+.SH SYNOPSIS
+.B imgtopdf [
+.I OPTIONS ...
+.B ] [
+.I FILENAMES
+.B ]
+.SH DESCRIPTION
+.B ImgToPdf
+.SH OPTIONS
+.TP
+.B --convert
+convert image into pdf
+.TP
+.B --compression
+level of compression in percent
+.TP
+.B --depth
+image depth in bits
+.TP
+.B --colorspace
+ colorspace (rgb,cmyk,gray)
+.TP
+.B --quality
+quality in percent
+.TP
+.B --inputpath
+path where files are looked for
+.TP
+.B --outputpath
+path where files end up
+.TP
+.B --auto
+determine settings automatically
+.SH AUTHOR
+More information about ConTeXt and the tools that come with it can be found at:
+
+
+.B "maillist:"
+ntg-context@ntg.nl / http://www.ntg.nl/mailman/listinfo/ntg-context
+
+.B "webpage:"
+http://www.pragma-ade.nl / http://tex.aanhet.net
+
+.B "wiki:"
+http://contextgarden.net
diff --git a/Master/texmf-dist/doc/context/scripts/mkii/imgtopdf.xml b/Master/texmf-dist/doc/context/scripts/mkii/imgtopdf.xml
new file mode 100644
index 00000000000..b516852c3d2
--- /dev/null
+++ b/Master/texmf-dist/doc/context/scripts/mkii/imgtopdf.xml
@@ -0,0 +1,22 @@
+<?xml version="1.0"?>
+<application>
+ <metadata>
+ <entry name="name">imgtopdf</entry>
+ <entry name="detail">ImgToPdf</entry>
+ <entry name="version">1.1.2</entry>
+ </metadata>
+ <flags>
+ <category name="basic">
+ <subcategory>
+ <flag name="convert"><short>convert image into pdf</short></flag>
+ <flag name="compression"><short>level of compression in percent</short></flag>
+ <flag name="depth"><short>image depth in bits</short></flag>
+ <flag name="colorspace"><short> colorspace (rgb,cmyk,gray)</short></flag>
+ <flag name="quality"><short>quality in percent</short></flag>
+ <flag name="inputpath"><short>path where files are looked for</short></flag>
+ <flag name="outputpath"><short>path where files end up</short></flag>
+ <flag name="auto"><short>determine settings automatically</short></flag>
+ </subcategory>
+ </category>
+ </flags>
+</application>
diff --git a/Master/texmf-dist/doc/context/scripts/mkii/mptopdf.html b/Master/texmf-dist/doc/context/scripts/mkii/mptopdf.html
new file mode 100644
index 00000000000..e80a3a342ac
--- /dev/null
+++ b/Master/texmf-dist/doc/context/scripts/mkii/mptopdf.html
@@ -0,0 +1,44 @@
+<?xml version="1.0" encoding="UTF-8"?>
+
+<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Strict//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd">
+
+
+
+
+
+<html xmlns="http://www.w3.org/1999/xhtml" lang="en" xml:lang="en">
+ <head>
+ <title>MPtoPDF 1.4.1</title>
+ <meta http-equiv="Content-Type" content="text/html; charset=UTF-8"/>
+ <style type="text/css">
+ body { color: #FFFFFF; background-color: #808080; font-family: optima, verdana, futura, "lucida sans", arial, geneva, helvetica, sans; font-size: 12px; line-height: 18px; } a:link, a:active, a:visited { color: #FFFFFF; } a.dir-view:link, a.dir-view:active, a.dir-view:visited { color: #FFFFFF; text-decoration: underline; } .valid { color: #00FF00; } .invalid { color: #FF0000; } button, .commonlink, .smallbutton { font-weight: bold; font-size: 12px; text-decoration: none; color: #000000; border-color: #7F7F7F; border-style: solid; border-width: .125ex; background-color: #FFFFFF; padding: .5ex; } .smallbutton { width: 1em; } a.commonlink:link, a.commonlink:active, a.commonlink:visited, a.smalllink:link, a.smalllink:active, a.smalllink:visited { font-weight: bold; font-size: 12px; text-decoration: none; color: #000000; } h1, .title { font-style: normal; font-weight: normal; font-size: 18px; line-height: 18px; margin-bottom: 20px; } h2, .subtitle { font-style: normal; font-weight: normal; font-size: 12px; margin-top: 18px; margin-bottom: 18px; } table { line-height: 18px; font-size: 12px; margin: 0; } th { font-weight: bold; text-align: left; padding-bottom: 6px; } .tc { font-weight: bold; text-align: left; } p, li { max-width: 60em; } .empty-line { margin-top: 4px; } .more-room { margin-right: 1.5em; } .much-more-room { margin-right: 3em; } #main { position: absolute; left: 10%; top: 10%; right: 10%; bottom: 10%; z-index: 2; width: 80%; height: 80%; padding: 0%; margin: 0%; overflow: auto; border-style: none; border-width: 0; background-color: #3F3F3F; } #main-settings { margin: 12px; x_max-width: 60em; line-height: 18px; font-size: 12px; } #left { position: absolute; top : 10%; left: 0%; bottom: 0%; right: 90%; z-index: 1; width: 10%; height: 90%; padding: 0%; margin: 0%; font-size: 16px; border-style: none; border-width: 0; background-color: #4F6F6F; } #right { position: absolute; top : 0%; left: 90%; bottom: 10%; right: 0%; z-index: 1; width: 10%; height: 90%; padding: 0%; margin: 0%; font-size: 16px; border-style: none; border-width: 0; background-color: #4F6F6F; _margin-left: -15px; } #bottom { position: absolute; left: 10%; right: 0%; top: 90%; bottom: 0%; z-index: 1; width: 90%; height: 10%; padding: 0%; margin: 0%; font-size: 16px; border-style: none; border-width: 0; background-color: #6F6F8F; } #top { position: absolute; left: 0%; right: 10%; top: 0%; bottom: 90%; z-index: 1; width: 90%; height: 10%; padding: 0%; margin: 0%; font-size: 16px; border-style: none; border-width: 0; background-color: #6F6F8F; } #top-one { position: absolute; bottom: 50%; width: 100%; buggedheight: 100%; } #top-two { position: relative; margin-bottom: -9px; margin-left: 12px; margin-right: 12px; line-height: 18px; text-align: right; vertical-align: middle; } #bottom-one { position: absolute; bottom: 50%; width: 100%; buggedheight: 100%; } #bottom-two { position: relative; margin-bottom: -9px; margin-left: 12px; margin-right: 12px; line-height: 18px; text-align: left; vertical-align: middle; } #left-one { position: absolute; width: 100%; buggedheight: 100%; } #left-two { position: relative; margin-top: 12px; line-height: 18px; text-align: center; vertical-align: top; } #right-one { display: table; height: 100%; width: 100%; } #right-two { display: table-row; height: 100%; width: 100%; } #right-three { display: table-cell; width: 100%; vertical-align: bottom; _position: absolute; _top: 100%; } #right-four { text-align: center; margin-bottom: 2ex; _position: relative; _top: -100%; } #more-top { position: absolute; top: 0%; left: 90%; bottom: 90%; right: 0%; z-index: 3; width: 10%; height: 10%; padding: 0%; margin: 0%; border-style: none; border-width: 0; } #more-top-settings { text-align: center; } #more-right-settings { margin-right: 12px; margin-left: 12px; line-height: 18px; font-size: 10px; text-align: center; } #right-safari { _display: table; width: 100%; height: 100%; }
+ </style>
+ <style type="text/css">
+ </style>
+ </head>
+ <body>
+ <div id="top"> <div id="top-one">
+ <div id="top-two">MPtoPDF 1.4.1 </div>
+ </div>
+ </div>
+ <div id="bottom"> <div id="bottom-one">
+ <div id="bottom-two">wiki: http://contextgarden.net | mail: ntg-context@ntg.nl | website: http://www.pragma-ade.nl</div>
+ </div>
+ </div>
+ <div id="left"></div>
+ <div id="right"></div>
+ <div id="main">
+ <div id='main-settings'>
+ <h1>Command line options</h1>
+<table>
+ <tr><th style="width: 10em">flag</th><th style="width: 8em">value</th><th>description</th></tr>
+ <tr><th/><td/><td/></tr>
+ <tr><th>--metafun</th><td></td><td>use the metafun format to process the file (default is mpost)</td></tr>
+ <tr><th>--texexec</th><td></td><td>use texexec (context) to process text snippets</td></tr>
+ <tr><th>--latex</th><td></td><td>use latex to process text snippets</td></tr>
+ </table>
+<br/>
+ </div>
+ </div>
+ </body>
+ </html>
diff --git a/Master/texmf-dist/doc/context/scripts/mkii/mptopdf.xml b/Master/texmf-dist/doc/context/scripts/mkii/mptopdf.xml
new file mode 100644
index 00000000000..a3b06a23136
--- /dev/null
+++ b/Master/texmf-dist/doc/context/scripts/mkii/mptopdf.xml
@@ -0,0 +1,17 @@
+<?xml version="1.0"?>
+<application>
+ <metadata>
+ <entry name="name">mptopdf</entry>
+ <entry name="detail">convert MetaPost to PDF</entry>
+ <entry name="version">1.4.1</entry>
+ </metadata>
+ <flags>
+ <category name="basic">
+ <subcategory>
+ <flag name="metafun"><short>use the metafun format to process the file (default is mpost)</short></flag>
+ <flag name="texexec"><short>use texexec (context) to process text snippets</short></flag>
+ <flag name="latex"><short>use latex to process text snippets</short></flag>
+ </subcategory>
+ </category>
+ </flags>
+</application>
diff --git a/Master/texmf-dist/doc/context/scripts/mkii/pdftools.html b/Master/texmf-dist/doc/context/scripts/mkii/pdftools.html
new file mode 100644
index 00000000000..4c1e7628100
--- /dev/null
+++ b/Master/texmf-dist/doc/context/scripts/mkii/pdftools.html
@@ -0,0 +1,49 @@
+<?xml version="1.0" encoding="UTF-8"?>
+
+<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Strict//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd">
+
+
+
+
+
+<html xmlns="http://www.w3.org/1999/xhtml" lang="en" xml:lang="en">
+ <head>
+ <title>PDFTools 1.2.1</title>
+ <meta http-equiv="Content-Type" content="text/html; charset=UTF-8"/>
+ <style type="text/css">
+ body { color: #FFFFFF; background-color: #808080; font-family: optima, verdana, futura, "lucida sans", arial, geneva, helvetica, sans; font-size: 12px; line-height: 18px; } a:link, a:active, a:visited { color: #FFFFFF; } a.dir-view:link, a.dir-view:active, a.dir-view:visited { color: #FFFFFF; text-decoration: underline; } .valid { color: #00FF00; } .invalid { color: #FF0000; } button, .commonlink, .smallbutton { font-weight: bold; font-size: 12px; text-decoration: none; color: #000000; border-color: #7F7F7F; border-style: solid; border-width: .125ex; background-color: #FFFFFF; padding: .5ex; } .smallbutton { width: 1em; } a.commonlink:link, a.commonlink:active, a.commonlink:visited, a.smalllink:link, a.smalllink:active, a.smalllink:visited { font-weight: bold; font-size: 12px; text-decoration: none; color: #000000; } h1, .title { font-style: normal; font-weight: normal; font-size: 18px; line-height: 18px; margin-bottom: 20px; } h2, .subtitle { font-style: normal; font-weight: normal; font-size: 12px; margin-top: 18px; margin-bottom: 18px; } table { line-height: 18px; font-size: 12px; margin: 0; } th { font-weight: bold; text-align: left; padding-bottom: 6px; } .tc { font-weight: bold; text-align: left; } p, li { max-width: 60em; } .empty-line { margin-top: 4px; } .more-room { margin-right: 1.5em; } .much-more-room { margin-right: 3em; } #main { position: absolute; left: 10%; top: 10%; right: 10%; bottom: 10%; z-index: 2; width: 80%; height: 80%; padding: 0%; margin: 0%; overflow: auto; border-style: none; border-width: 0; background-color: #3F3F3F; } #main-settings { margin: 12px; x_max-width: 60em; line-height: 18px; font-size: 12px; } #left { position: absolute; top : 10%; left: 0%; bottom: 0%; right: 90%; z-index: 1; width: 10%; height: 90%; padding: 0%; margin: 0%; font-size: 16px; border-style: none; border-width: 0; background-color: #4F6F6F; } #right { position: absolute; top : 0%; left: 90%; bottom: 10%; right: 0%; z-index: 1; width: 10%; height: 90%; padding: 0%; margin: 0%; font-size: 16px; border-style: none; border-width: 0; background-color: #4F6F6F; _margin-left: -15px; } #bottom { position: absolute; left: 10%; right: 0%; top: 90%; bottom: 0%; z-index: 1; width: 90%; height: 10%; padding: 0%; margin: 0%; font-size: 16px; border-style: none; border-width: 0; background-color: #6F6F8F; } #top { position: absolute; left: 0%; right: 10%; top: 0%; bottom: 90%; z-index: 1; width: 90%; height: 10%; padding: 0%; margin: 0%; font-size: 16px; border-style: none; border-width: 0; background-color: #6F6F8F; } #top-one { position: absolute; bottom: 50%; width: 100%; buggedheight: 100%; } #top-two { position: relative; margin-bottom: -9px; margin-left: 12px; margin-right: 12px; line-height: 18px; text-align: right; vertical-align: middle; } #bottom-one { position: absolute; bottom: 50%; width: 100%; buggedheight: 100%; } #bottom-two { position: relative; margin-bottom: -9px; margin-left: 12px; margin-right: 12px; line-height: 18px; text-align: left; vertical-align: middle; } #left-one { position: absolute; width: 100%; buggedheight: 100%; } #left-two { position: relative; margin-top: 12px; line-height: 18px; text-align: center; vertical-align: top; } #right-one { display: table; height: 100%; width: 100%; } #right-two { display: table-row; height: 100%; width: 100%; } #right-three { display: table-cell; width: 100%; vertical-align: bottom; _position: absolute; _top: 100%; } #right-four { text-align: center; margin-bottom: 2ex; _position: relative; _top: -100%; } #more-top { position: absolute; top: 0%; left: 90%; bottom: 90%; right: 0%; z-index: 3; width: 10%; height: 10%; padding: 0%; margin: 0%; border-style: none; border-width: 0; } #more-top-settings { text-align: center; } #more-right-settings { margin-right: 12px; margin-left: 12px; line-height: 18px; font-size: 10px; text-align: center; } #right-safari { _display: table; width: 100%; height: 100%; }
+ </style>
+ <style type="text/css">
+ </style>
+ </head>
+ <body>
+ <div id="top"> <div id="top-one">
+ <div id="top-two">PDFTools 1.2.1 </div>
+ </div>
+ </div>
+ <div id="bottom"> <div id="bottom-one">
+ <div id="bottom-two">wiki: http://contextgarden.net | mail: ntg-context@ntg.nl | website: http://www.pragma-ade.nl</div>
+ </div>
+ </div>
+ <div id="left"></div>
+ <div id="right"></div>
+ <div id="main">
+ <div id='main-settings'>
+ <h1>Command line options</h1>
+<table>
+ <tr><th style="width: 10em">flag</th><th style="width: 8em">value</th><th>description</th></tr>
+ <tr><th/><td/><td/></tr>
+ <tr><th>--spotimage filename --colorspec</th><td></td><td>--colorname= [--retain --invert --subpath=]</td></tr>
+ <tr><th>--colorimage filename --colorspec</th><td></td><td>[--retain --invert --colorname= ]</td></tr>
+ <tr><th>--convertimage</th><td></td><td>filename [--retain --subpath]</td></tr>
+ <tr><th>--downsampleimage</th><td></td><td>filename [--retain --subpath --lowres --normal]</td></tr>
+ <tr><th>--info</th><td></td><td>filename</td></tr>
+ <tr><th>--countpages</th><td></td><td>[--pattern --threshold]</td></tr>
+ <tr><th>--checkembedded</th><td></td><td>[--pattern]</td></tr>
+ <tr><th>--analyzefile</th><td></td><td>filename</td></tr>
+ </table>
+<br/>
+ </div>
+ </div>
+ </body>
+ </html>
diff --git a/Master/texmf-dist/doc/context/scripts/mkii/pdftools.man b/Master/texmf-dist/doc/context/scripts/mkii/pdftools.man
new file mode 100644
index 00000000000..2f182ed0235
--- /dev/null
+++ b/Master/texmf-dist/doc/context/scripts/mkii/pdftools.man
@@ -0,0 +1,48 @@
+.TH "pdftools" "1" "01-01-2013" "version 1.2.1" "PDFTools"
+.SH NAME
+.B pdftools
+.SH SYNOPSIS
+.B pdftools [
+.I OPTIONS ...
+.B ] [
+.I FILENAMES
+.B ]
+.SH DESCRIPTION
+.B PDFTools
+.SH OPTIONS
+.TP
+.B --spotimage filename --colorspec
+--colorname= [--retain --invert --subpath=]
+.TP
+.B --colorimage filename --colorspec
+[--retain --invert --colorname= ]
+.TP
+.B --convertimage
+filename [--retain --subpath]
+.TP
+.B --downsampleimage
+filename [--retain --subpath --lowres --normal]
+.TP
+.B --info
+filename
+.TP
+.B --countpages
+[--pattern --threshold]
+.TP
+.B --checkembedded
+[--pattern]
+.TP
+.B --analyzefile
+filename
+.SH AUTHOR
+More information about ConTeXt and the tools that come with it can be found at:
+
+
+.B "maillist:"
+ntg-context@ntg.nl / http://www.ntg.nl/mailman/listinfo/ntg-context
+
+.B "webpage:"
+http://www.pragma-ade.nl / http://tex.aanhet.net
+
+.B "wiki:"
+http://contextgarden.net
diff --git a/Master/texmf-dist/doc/context/scripts/mkii/pdftools.xml b/Master/texmf-dist/doc/context/scripts/mkii/pdftools.xml
new file mode 100644
index 00000000000..292ef4f88f2
--- /dev/null
+++ b/Master/texmf-dist/doc/context/scripts/mkii/pdftools.xml
@@ -0,0 +1,22 @@
+<?xml version="1.0"?>
+<application>
+ <metadata>
+ <entry name="name">pdftools</entry>
+ <entry name="detail">PDFTools</entry>
+ <entry name="version">1.2.1</entry>
+ </metadata>
+ <flags>
+ <category name="basic">
+ <subcategory>
+ <flag name="spotimage filename --colorspec" value=""><short><ref name="colorname="/> [<ref name="retain"/> <ref name="invert"/> <ref name="subpath=]"/></short></flag>
+ <flag name="colorimage filename --colorspec" value=""><short>[<ref name="retain"/> <ref name="invert"/> <ref name="colorname="/> ]</short></flag>
+ <flag name="convertimage"><short>filename [<ref name="retain"/> <ref name="subpath]"/></short></flag>
+ <flag name="downsampleimage"><short>filename [<ref name="retain"/> <ref name="subpath"/> <ref name="lowres"/> <ref name="normal]"/></short></flag>
+ <flag name="info"><short>filename</short></flag>
+ <flag name="countpages"><short>[<ref name="pattern"/> <ref name="threshold]"/></short></flag>
+ <flag name="checkembedded"><short>[<ref name="pattern]"/></short></flag>
+ <flag name="analyzefile"><short>filename</short></flag>
+ </subcategory>
+ </category>
+ </flags>
+</application>
diff --git a/Master/texmf-dist/doc/context/scripts/mkii/pstopdf.html b/Master/texmf-dist/doc/context/scripts/mkii/pstopdf.html
new file mode 100644
index 00000000000..17f9c002dd8
--- /dev/null
+++ b/Master/texmf-dist/doc/context/scripts/mkii/pstopdf.html
@@ -0,0 +1,43 @@
+<?xml version="1.0" encoding="UTF-8"?>
+
+<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Strict//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd">
+
+
+
+
+
+<html xmlns="http://www.w3.org/1999/xhtml" lang="en" xml:lang="en">
+ <head>
+ <title>PStoPDF 2.0.1</title>
+ <meta http-equiv="Content-Type" content="text/html; charset=UTF-8"/>
+ <style type="text/css">
+ body { color: #FFFFFF; background-color: #808080; font-family: optima, verdana, futura, "lucida sans", arial, geneva, helvetica, sans; font-size: 12px; line-height: 18px; } a:link, a:active, a:visited { color: #FFFFFF; } a.dir-view:link, a.dir-view:active, a.dir-view:visited { color: #FFFFFF; text-decoration: underline; } .valid { color: #00FF00; } .invalid { color: #FF0000; } button, .commonlink, .smallbutton { font-weight: bold; font-size: 12px; text-decoration: none; color: #000000; border-color: #7F7F7F; border-style: solid; border-width: .125ex; background-color: #FFFFFF; padding: .5ex; } .smallbutton { width: 1em; } a.commonlink:link, a.commonlink:active, a.commonlink:visited, a.smalllink:link, a.smalllink:active, a.smalllink:visited { font-weight: bold; font-size: 12px; text-decoration: none; color: #000000; } h1, .title { font-style: normal; font-weight: normal; font-size: 18px; line-height: 18px; margin-bottom: 20px; } h2, .subtitle { font-style: normal; font-weight: normal; font-size: 12px; margin-top: 18px; margin-bottom: 18px; } table { line-height: 18px; font-size: 12px; margin: 0; } th { font-weight: bold; text-align: left; padding-bottom: 6px; } .tc { font-weight: bold; text-align: left; } p, li { max-width: 60em; } .empty-line { margin-top: 4px; } .more-room { margin-right: 1.5em; } .much-more-room { margin-right: 3em; } #main { position: absolute; left: 10%; top: 10%; right: 10%; bottom: 10%; z-index: 2; width: 80%; height: 80%; padding: 0%; margin: 0%; overflow: auto; border-style: none; border-width: 0; background-color: #3F3F3F; } #main-settings { margin: 12px; x_max-width: 60em; line-height: 18px; font-size: 12px; } #left { position: absolute; top : 10%; left: 0%; bottom: 0%; right: 90%; z-index: 1; width: 10%; height: 90%; padding: 0%; margin: 0%; font-size: 16px; border-style: none; border-width: 0; background-color: #4F6F6F; } #right { position: absolute; top : 0%; left: 90%; bottom: 10%; right: 0%; z-index: 1; width: 10%; height: 90%; padding: 0%; margin: 0%; font-size: 16px; border-style: none; border-width: 0; background-color: #4F6F6F; _margin-left: -15px; } #bottom { position: absolute; left: 10%; right: 0%; top: 90%; bottom: 0%; z-index: 1; width: 90%; height: 10%; padding: 0%; margin: 0%; font-size: 16px; border-style: none; border-width: 0; background-color: #6F6F8F; } #top { position: absolute; left: 0%; right: 10%; top: 0%; bottom: 90%; z-index: 1; width: 90%; height: 10%; padding: 0%; margin: 0%; font-size: 16px; border-style: none; border-width: 0; background-color: #6F6F8F; } #top-one { position: absolute; bottom: 50%; width: 100%; buggedheight: 100%; } #top-two { position: relative; margin-bottom: -9px; margin-left: 12px; margin-right: 12px; line-height: 18px; text-align: right; vertical-align: middle; } #bottom-one { position: absolute; bottom: 50%; width: 100%; buggedheight: 100%; } #bottom-two { position: relative; margin-bottom: -9px; margin-left: 12px; margin-right: 12px; line-height: 18px; text-align: left; vertical-align: middle; } #left-one { position: absolute; width: 100%; buggedheight: 100%; } #left-two { position: relative; margin-top: 12px; line-height: 18px; text-align: center; vertical-align: top; } #right-one { display: table; height: 100%; width: 100%; } #right-two { display: table-row; height: 100%; width: 100%; } #right-three { display: table-cell; width: 100%; vertical-align: bottom; _position: absolute; _top: 100%; } #right-four { text-align: center; margin-bottom: 2ex; _position: relative; _top: -100%; } #more-top { position: absolute; top: 0%; left: 90%; bottom: 90%; right: 0%; z-index: 3; width: 10%; height: 10%; padding: 0%; margin: 0%; border-style: none; border-width: 0; } #more-top-settings { text-align: center; } #more-right-settings { margin-right: 12px; margin-left: 12px; line-height: 18px; font-size: 10px; text-align: center; } #right-safari { _display: table; width: 100%; height: 100%; }
+ </style>
+ <style type="text/css">
+ </style>
+ </head>
+ <body>
+ <div id="top"> <div id="top-one">
+ <div id="top-two">PStoPDF 2.0.1 </div>
+ </div>
+ </div>
+ <div id="bottom"> <div id="bottom-one">
+ <div id="bottom-two">wiki: http://contextgarden.net | mail: ntg-context@ntg.nl | website: http://www.pragma-ade.nl</div>
+ </div>
+ </div>
+ <div id="left"></div>
+ <div id="right"></div>
+ <div id="main">
+ <div id='main-settings'>
+ <h1>Command line options</h1>
+<table>
+ <tr><th style="width: 10em">flag</th><th style="width: 8em">value</th><th>description</th></tr>
+ <tr><th/><td/><td/></tr>
+ <tr><th>--request</th><td></td><td>handles exa request file</td></tr>
+ <tr><th>--watch</th><td></td><td>watch folders for conversions (untested)</td></tr>
+ </table>
+<br/>
+ </div>
+ </div>
+ </body>
+ </html>
diff --git a/Master/texmf-dist/doc/context/scripts/mkii/pstopdf.man b/Master/texmf-dist/doc/context/scripts/mkii/pstopdf.man
new file mode 100644
index 00000000000..71786ce8b79
--- /dev/null
+++ b/Master/texmf-dist/doc/context/scripts/mkii/pstopdf.man
@@ -0,0 +1,30 @@
+.TH "pstopdf" "1" "01-01-2013" "version 2.0.1" "PStoPDF"
+.SH NAME
+.B pstopdf
+.SH SYNOPSIS
+.B pstopdf [
+.I OPTIONS ...
+.B ] [
+.I FILENAMES
+.B ]
+.SH DESCRIPTION
+.B PStoPDF
+.SH OPTIONS
+.TP
+.B --request
+handles exa request file
+.TP
+.B --watch
+watch folders for conversions (untested)
+.SH AUTHOR
+More information about ConTeXt and the tools that come with it can be found at:
+
+
+.B "maillist:"
+ntg-context@ntg.nl / http://www.ntg.nl/mailman/listinfo/ntg-context
+
+.B "webpage:"
+http://www.pragma-ade.nl / http://tex.aanhet.net
+
+.B "wiki:"
+http://contextgarden.net
diff --git a/Master/texmf-dist/doc/context/scripts/mkii/pstopdf.xml b/Master/texmf-dist/doc/context/scripts/mkii/pstopdf.xml
new file mode 100644
index 00000000000..d7a7de0ae87
--- /dev/null
+++ b/Master/texmf-dist/doc/context/scripts/mkii/pstopdf.xml
@@ -0,0 +1,16 @@
+<?xml version="1.0"?>
+<application>
+ <metadata>
+ <entry name="name">pstopdf</entry>
+ <entry name="detail">PStoPDF</entry>
+ <entry name="version">2.0.1</entry>
+ </metadata>
+ <flags>
+ <category name="basic">
+ <subcategory>
+ <flag name="request"><short>handles exa request file</short></flag>
+ <flag name="watch"><short>watch folders for conversions (untested)</short></flag>
+ </subcategory>
+ </category>
+ </flags>
+</application>
diff --git a/Master/texmf-dist/doc/context/scripts/mkii/rlxtools.html b/Master/texmf-dist/doc/context/scripts/mkii/rlxtools.html
new file mode 100644
index 00000000000..a2e39d28677
--- /dev/null
+++ b/Master/texmf-dist/doc/context/scripts/mkii/rlxtools.html
@@ -0,0 +1,43 @@
+<?xml version="1.0" encoding="UTF-8"?>
+
+<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Strict//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd">
+
+
+
+
+
+<html xmlns="http://www.w3.org/1999/xhtml" lang="en" xml:lang="en">
+ <head>
+ <title>RlxTools 1.0.1</title>
+ <meta http-equiv="Content-Type" content="text/html; charset=UTF-8"/>
+ <style type="text/css">
+ body { color: #FFFFFF; background-color: #808080; font-family: optima, verdana, futura, "lucida sans", arial, geneva, helvetica, sans; font-size: 12px; line-height: 18px; } a:link, a:active, a:visited { color: #FFFFFF; } a.dir-view:link, a.dir-view:active, a.dir-view:visited { color: #FFFFFF; text-decoration: underline; } .valid { color: #00FF00; } .invalid { color: #FF0000; } button, .commonlink, .smallbutton { font-weight: bold; font-size: 12px; text-decoration: none; color: #000000; border-color: #7F7F7F; border-style: solid; border-width: .125ex; background-color: #FFFFFF; padding: .5ex; } .smallbutton { width: 1em; } a.commonlink:link, a.commonlink:active, a.commonlink:visited, a.smalllink:link, a.smalllink:active, a.smalllink:visited { font-weight: bold; font-size: 12px; text-decoration: none; color: #000000; } h1, .title { font-style: normal; font-weight: normal; font-size: 18px; line-height: 18px; margin-bottom: 20px; } h2, .subtitle { font-style: normal; font-weight: normal; font-size: 12px; margin-top: 18px; margin-bottom: 18px; } table { line-height: 18px; font-size: 12px; margin: 0; } th { font-weight: bold; text-align: left; padding-bottom: 6px; } .tc { font-weight: bold; text-align: left; } p, li { max-width: 60em; } .empty-line { margin-top: 4px; } .more-room { margin-right: 1.5em; } .much-more-room { margin-right: 3em; } #main { position: absolute; left: 10%; top: 10%; right: 10%; bottom: 10%; z-index: 2; width: 80%; height: 80%; padding: 0%; margin: 0%; overflow: auto; border-style: none; border-width: 0; background-color: #3F3F3F; } #main-settings { margin: 12px; x_max-width: 60em; line-height: 18px; font-size: 12px; } #left { position: absolute; top : 10%; left: 0%; bottom: 0%; right: 90%; z-index: 1; width: 10%; height: 90%; padding: 0%; margin: 0%; font-size: 16px; border-style: none; border-width: 0; background-color: #4F6F6F; } #right { position: absolute; top : 0%; left: 90%; bottom: 10%; right: 0%; z-index: 1; width: 10%; height: 90%; padding: 0%; margin: 0%; font-size: 16px; border-style: none; border-width: 0; background-color: #4F6F6F; _margin-left: -15px; } #bottom { position: absolute; left: 10%; right: 0%; top: 90%; bottom: 0%; z-index: 1; width: 90%; height: 10%; padding: 0%; margin: 0%; font-size: 16px; border-style: none; border-width: 0; background-color: #6F6F8F; } #top { position: absolute; left: 0%; right: 10%; top: 0%; bottom: 90%; z-index: 1; width: 90%; height: 10%; padding: 0%; margin: 0%; font-size: 16px; border-style: none; border-width: 0; background-color: #6F6F8F; } #top-one { position: absolute; bottom: 50%; width: 100%; buggedheight: 100%; } #top-two { position: relative; margin-bottom: -9px; margin-left: 12px; margin-right: 12px; line-height: 18px; text-align: right; vertical-align: middle; } #bottom-one { position: absolute; bottom: 50%; width: 100%; buggedheight: 100%; } #bottom-two { position: relative; margin-bottom: -9px; margin-left: 12px; margin-right: 12px; line-height: 18px; text-align: left; vertical-align: middle; } #left-one { position: absolute; width: 100%; buggedheight: 100%; } #left-two { position: relative; margin-top: 12px; line-height: 18px; text-align: center; vertical-align: top; } #right-one { display: table; height: 100%; width: 100%; } #right-two { display: table-row; height: 100%; width: 100%; } #right-three { display: table-cell; width: 100%; vertical-align: bottom; _position: absolute; _top: 100%; } #right-four { text-align: center; margin-bottom: 2ex; _position: relative; _top: -100%; } #more-top { position: absolute; top: 0%; left: 90%; bottom: 90%; right: 0%; z-index: 3; width: 10%; height: 10%; padding: 0%; margin: 0%; border-style: none; border-width: 0; } #more-top-settings { text-align: center; } #more-right-settings { margin-right: 12px; margin-left: 12px; line-height: 18px; font-size: 10px; text-align: center; } #right-safari { _display: table; width: 100%; height: 100%; }
+ </style>
+ <style type="text/css">
+ </style>
+ </head>
+ <body>
+ <div id="top"> <div id="top-one">
+ <div id="top-two">RlxTools 1.0.1 </div>
+ </div>
+ </div>
+ <div id="bottom"> <div id="bottom-one">
+ <div id="bottom-two">wiki: http://contextgarden.net | mail: ntg-context@ntg.nl | website: http://www.pragma-ade.nl</div>
+ </div>
+ </div>
+ <div id="left"></div>
+ <div id="right"></div>
+ <div id="main">
+ <div id='main-settings'>
+ <h1>Command line options</h1>
+<table>
+ <tr><th style="width: 10em">flag</th><th style="width: 8em">value</th><th>description</th></tr>
+ <tr><th/><td/><td/></tr>
+ <tr><th>--manipulate</th><td></td><td>[--test] manipulatorfile resourselog</td></tr>
+ <tr><th>--identify</th><td></td><td>[--collect] filename</td></tr>
+ </table>
+<br/>
+ </div>
+ </div>
+ </body>
+ </html>
diff --git a/Master/texmf-dist/doc/context/scripts/mkii/rlxtools.man b/Master/texmf-dist/doc/context/scripts/mkii/rlxtools.man
new file mode 100644
index 00000000000..f1bc70b1cc9
--- /dev/null
+++ b/Master/texmf-dist/doc/context/scripts/mkii/rlxtools.man
@@ -0,0 +1,30 @@
+.TH "rlxtools" "1" "01-01-2013" "version 1.0.1" "RlxTools"
+.SH NAME
+.B rlxtools
+.SH SYNOPSIS
+.B rlxtools [
+.I OPTIONS ...
+.B ] [
+.I FILENAMES
+.B ]
+.SH DESCRIPTION
+.B RlxTools
+.SH OPTIONS
+.TP
+.B --manipulate
+[--test] manipulatorfile resourselog
+.TP
+.B --identify
+[--collect] filename
+.SH AUTHOR
+More information about ConTeXt and the tools that come with it can be found at:
+
+
+.B "maillist:"
+ntg-context@ntg.nl / http://www.ntg.nl/mailman/listinfo/ntg-context
+
+.B "webpage:"
+http://www.pragma-ade.nl / http://tex.aanhet.net
+
+.B "wiki:"
+http://contextgarden.net
diff --git a/Master/texmf-dist/doc/context/scripts/mkii/rlxtools.xml b/Master/texmf-dist/doc/context/scripts/mkii/rlxtools.xml
new file mode 100644
index 00000000000..8a221eccf63
--- /dev/null
+++ b/Master/texmf-dist/doc/context/scripts/mkii/rlxtools.xml
@@ -0,0 +1,16 @@
+<?xml version="1.0"?>
+<application>
+ <metadata>
+ <entry name="name">rlxtools</entry>
+ <entry name="detail">RlxTools</entry>
+ <entry name="version">1.0.1</entry>
+ </metadata>
+ <flags>
+ <category name="basic">
+ <subcategory>
+ <flag name="manipulate"><short>[<ref name="test]"/> manipulatorfile resourselog</short></flag>
+ <flag name="identify"><short>[<ref name="collect]"/> filename</short></flag>
+ </subcategory>
+ </category>
+ </flags>
+</application>
diff --git a/Master/texmf-dist/doc/context/scripts/mkii/texexec.html b/Master/texmf-dist/doc/context/scripts/mkii/texexec.html
new file mode 100644
index 00000000000..6b0d7c4bec2
--- /dev/null
+++ b/Master/texmf-dist/doc/context/scripts/mkii/texexec.html
@@ -0,0 +1,57 @@
+<?xml version="1.0" encoding="UTF-8"?>
+
+<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Strict//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd">
+
+
+
+
+
+<html xmlns="http://www.w3.org/1999/xhtml" lang="en" xml:lang="en">
+ <head>
+ <title>TeXExec 6.2.1</title>
+ <meta http-equiv="Content-Type" content="text/html; charset=UTF-8"/>
+ <style type="text/css">
+ body { color: #FFFFFF; background-color: #808080; font-family: optima, verdana, futura, "lucida sans", arial, geneva, helvetica, sans; font-size: 12px; line-height: 18px; } a:link, a:active, a:visited { color: #FFFFFF; } a.dir-view:link, a.dir-view:active, a.dir-view:visited { color: #FFFFFF; text-decoration: underline; } .valid { color: #00FF00; } .invalid { color: #FF0000; } button, .commonlink, .smallbutton { font-weight: bold; font-size: 12px; text-decoration: none; color: #000000; border-color: #7F7F7F; border-style: solid; border-width: .125ex; background-color: #FFFFFF; padding: .5ex; } .smallbutton { width: 1em; } a.commonlink:link, a.commonlink:active, a.commonlink:visited, a.smalllink:link, a.smalllink:active, a.smalllink:visited { font-weight: bold; font-size: 12px; text-decoration: none; color: #000000; } h1, .title { font-style: normal; font-weight: normal; font-size: 18px; line-height: 18px; margin-bottom: 20px; } h2, .subtitle { font-style: normal; font-weight: normal; font-size: 12px; margin-top: 18px; margin-bottom: 18px; } table { line-height: 18px; font-size: 12px; margin: 0; } th { font-weight: bold; text-align: left; padding-bottom: 6px; } .tc { font-weight: bold; text-align: left; } p, li { max-width: 60em; } .empty-line { margin-top: 4px; } .more-room { margin-right: 1.5em; } .much-more-room { margin-right: 3em; } #main { position: absolute; left: 10%; top: 10%; right: 10%; bottom: 10%; z-index: 2; width: 80%; height: 80%; padding: 0%; margin: 0%; overflow: auto; border-style: none; border-width: 0; background-color: #3F3F3F; } #main-settings { margin: 12px; x_max-width: 60em; line-height: 18px; font-size: 12px; } #left { position: absolute; top : 10%; left: 0%; bottom: 0%; right: 90%; z-index: 1; width: 10%; height: 90%; padding: 0%; margin: 0%; font-size: 16px; border-style: none; border-width: 0; background-color: #4F6F6F; } #right { position: absolute; top : 0%; left: 90%; bottom: 10%; right: 0%; z-index: 1; width: 10%; height: 90%; padding: 0%; margin: 0%; font-size: 16px; border-style: none; border-width: 0; background-color: #4F6F6F; _margin-left: -15px; } #bottom { position: absolute; left: 10%; right: 0%; top: 90%; bottom: 0%; z-index: 1; width: 90%; height: 10%; padding: 0%; margin: 0%; font-size: 16px; border-style: none; border-width: 0; background-color: #6F6F8F; } #top { position: absolute; left: 0%; right: 10%; top: 0%; bottom: 90%; z-index: 1; width: 90%; height: 10%; padding: 0%; margin: 0%; font-size: 16px; border-style: none; border-width: 0; background-color: #6F6F8F; } #top-one { position: absolute; bottom: 50%; width: 100%; buggedheight: 100%; } #top-two { position: relative; margin-bottom: -9px; margin-left: 12px; margin-right: 12px; line-height: 18px; text-align: right; vertical-align: middle; } #bottom-one { position: absolute; bottom: 50%; width: 100%; buggedheight: 100%; } #bottom-two { position: relative; margin-bottom: -9px; margin-left: 12px; margin-right: 12px; line-height: 18px; text-align: left; vertical-align: middle; } #left-one { position: absolute; width: 100%; buggedheight: 100%; } #left-two { position: relative; margin-top: 12px; line-height: 18px; text-align: center; vertical-align: top; } #right-one { display: table; height: 100%; width: 100%; } #right-two { display: table-row; height: 100%; width: 100%; } #right-three { display: table-cell; width: 100%; vertical-align: bottom; _position: absolute; _top: 100%; } #right-four { text-align: center; margin-bottom: 2ex; _position: relative; _top: -100%; } #more-top { position: absolute; top: 0%; left: 90%; bottom: 90%; right: 0%; z-index: 3; width: 10%; height: 10%; padding: 0%; margin: 0%; border-style: none; border-width: 0; } #more-top-settings { text-align: center; } #more-right-settings { margin-right: 12px; margin-left: 12px; line-height: 18px; font-size: 10px; text-align: center; } #right-safari { _display: table; width: 100%; height: 100%; }
+ </style>
+ <style type="text/css">
+ </style>
+ </head>
+ <body>
+ <div id="top"> <div id="top-one">
+ <div id="top-two">TeXExec 6.2.1 </div>
+ </div>
+ </div>
+ <div id="bottom"> <div id="bottom-one">
+ <div id="bottom-two">wiki: http://contextgarden.net | mail: ntg-context@ntg.nl | website: http://www.pragma-ade.nl</div>
+ </div>
+ </div>
+ <div id="left"></div>
+ <div id="right"></div>
+ <div id="main">
+ <div id='main-settings'>
+ <h1>Command line options</h1>
+<table>
+ <tr><th style="width: 10em">flag</th><th style="width: 8em">value</th><th>description</th></tr>
+ <tr><th/><td/><td/></tr>
+ <tr><th>--make</th><td></td><td>make formats</td></tr>
+ <tr><th>--check</th><td></td><td>check versions</td></tr>
+ <tr><th>--process</th><td></td><td>process file</td></tr>
+ <tr><th>--mptex</th><td></td><td>process mp file</td></tr>
+ <tr><th>--mpxtex</th><td></td><td>process mpx file</td></tr>
+ <tr><th>--mpgraphic</th><td></td><td>process mp file to stand-alone graphics</td></tr>
+ <tr><th>--mpstatic</th><td></td><td>process mp/ctx file to stand-alone graphics</td></tr>
+ <tr><th>--listing</th><td></td><td>list of file content</td></tr>
+ <tr><th>--figures</th><td></td><td>generate overview of figures</td></tr>
+ <tr><th>--modules</th><td></td><td>generate module documentation</td></tr>
+ <tr><th>--pdfarrange</th><td></td><td>impose pages (booklets)</td></tr>
+ <tr><th>--pdfselect</th><td></td><td>select pages from file(s)</td></tr>
+ <tr><th>--pdfcopy</th><td></td><td>copy pages from file(s)</td></tr>
+ <tr><th>--pdftrim</th><td></td><td>trim pages from file(s)</td></tr>
+ <tr><th>--pdfcombine</th><td></td><td>combine multiple pages</td></tr>
+ <tr><th>--pdfsplit</th><td></td><td>split file in pages</td></tr>
+ </table>
+<br/>
+ </div>
+ </div>
+ </body>
+ </html>
diff --git a/Master/texmf-dist/doc/context/scripts/mkii/texexec.man b/Master/texmf-dist/doc/context/scripts/mkii/texexec.man
new file mode 100644
index 00000000000..54213a30776
--- /dev/null
+++ b/Master/texmf-dist/doc/context/scripts/mkii/texexec.man
@@ -0,0 +1,72 @@
+.TH "texexec" "1" "01-01-2013" "version 6.2.1" "TeXExec"
+.SH NAME
+.B texexec
+.SH SYNOPSIS
+.B texexec [
+.I OPTIONS ...
+.B ] [
+.I FILENAMES
+.B ]
+.SH DESCRIPTION
+.B TeXExec
+.SH OPTIONS
+.TP
+.B --make
+make formats
+.TP
+.B --check
+check versions
+.TP
+.B --process
+process file
+.TP
+.B --mptex
+process mp file
+.TP
+.B --mpxtex
+process mpx file
+.TP
+.B --mpgraphic
+process mp file to stand-alone graphics
+.TP
+.B --mpstatic
+process mp/ctx file to stand-alone graphics
+.TP
+.B --listing
+list of file content
+.TP
+.B --figures
+generate overview of figures
+.TP
+.B --modules
+generate module documentation
+.TP
+.B --pdfarrange
+impose pages (booklets)
+.TP
+.B --pdfselect
+select pages from file(s)
+.TP
+.B --pdfcopy
+copy pages from file(s)
+.TP
+.B --pdftrim
+trim pages from file(s)
+.TP
+.B --pdfcombine
+combine multiple pages
+.TP
+.B --pdfsplit
+split file in pages
+.SH AUTHOR
+More information about ConTeXt and the tools that come with it can be found at:
+
+
+.B "maillist:"
+ntg-context@ntg.nl / http://www.ntg.nl/mailman/listinfo/ntg-context
+
+.B "webpage:"
+http://www.pragma-ade.nl / http://tex.aanhet.net
+
+.B "wiki:"
+http://contextgarden.net
diff --git a/Master/texmf-dist/doc/context/scripts/mkii/texexec.xml b/Master/texmf-dist/doc/context/scripts/mkii/texexec.xml
new file mode 100644
index 00000000000..8f85a4f2502
--- /dev/null
+++ b/Master/texmf-dist/doc/context/scripts/mkii/texexec.xml
@@ -0,0 +1,30 @@
+<?xml version="1.0"?>
+<application>
+ <metadata>
+ <entry name="name">texexec</entry>
+ <entry name="detail">TeXExec</entry>
+ <entry name="version">6.2.1</entry>
+ </metadata>
+ <flags>
+ <category name="basic">
+ <subcategory>
+ <flag name="make"><short>make formats</short></flag>
+ <flag name="check"><short>check versions</short></flag>
+ <flag name="process"><short>process file</short></flag>
+ <flag name="mptex"><short>process mp file</short></flag>
+ <flag name="mpxtex"><short>process mpx file</short></flag>
+ <flag name="mpgraphic"><short>process mp file to stand-alone graphics</short></flag>
+ <flag name="mpstatic"><short>process mp/ctx file to stand-alone graphics</short></flag>
+ <flag name="listing"><short>list of file content</short></flag>
+ <flag name="figures"><short>generate overview of figures</short></flag>
+ <flag name="modules"><short>generate module documentation</short></flag>
+ <flag name="pdfarrange"><short>impose pages (booklets)</short></flag>
+ <flag name="pdfselect"><short>select pages from file(s)</short></flag>
+ <flag name="pdfcopy"><short>copy pages from file(s)</short></flag>
+ <flag name="pdftrim"><short>trim pages from file(s)</short></flag>
+ <flag name="pdfcombine"><short>combine multiple pages</short></flag>
+ <flag name="pdfsplit"><short>split file in pages</short></flag>
+ </subcategory>
+ </category>
+ </flags>
+</application>
diff --git a/Master/texmf-dist/doc/context/scripts/mkii/texmfstart.html b/Master/texmf-dist/doc/context/scripts/mkii/texmfstart.html
new file mode 100644
index 00000000000..534e95671de
--- /dev/null
+++ b/Master/texmf-dist/doc/context/scripts/mkii/texmfstart.html
@@ -0,0 +1,94 @@
+<?xml version="1.0" encoding="UTF-8"?>
+
+<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Strict//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd">
+
+
+
+
+
+<html xmlns="http://www.w3.org/1999/xhtml" lang="en" xml:lang="en">
+ <head>
+ <title>texmfstart 7.0.0</title>
+ <meta http-equiv="Content-Type" content="text/html; charset=UTF-8"/>
+ <style type="text/css">
+ body { color: #FFFFFF; background-color: #808080; font-family: optima, verdana, futura, "lucida sans", arial, geneva, helvetica, sans; font-size: 12px; line-height: 18px; } a:link, a:active, a:visited { color: #FFFFFF; } a.dir-view:link, a.dir-view:active, a.dir-view:visited { color: #FFFFFF; text-decoration: underline; } .valid { color: #00FF00; } .invalid { color: #FF0000; } button, .commonlink, .smallbutton { font-weight: bold; font-size: 12px; text-decoration: none; color: #000000; border-color: #7F7F7F; border-style: solid; border-width: .125ex; background-color: #FFFFFF; padding: .5ex; } .smallbutton { width: 1em; } a.commonlink:link, a.commonlink:active, a.commonlink:visited, a.smalllink:link, a.smalllink:active, a.smalllink:visited { font-weight: bold; font-size: 12px; text-decoration: none; color: #000000; } h1, .title { font-style: normal; font-weight: normal; font-size: 18px; line-height: 18px; margin-bottom: 20px; } h2, .subtitle { font-style: normal; font-weight: normal; font-size: 12px; margin-top: 18px; margin-bottom: 18px; } table { line-height: 18px; font-size: 12px; margin: 0; } th { font-weight: bold; text-align: left; padding-bottom: 6px; } .tc { font-weight: bold; text-align: left; } p, li { max-width: 60em; } .empty-line { margin-top: 4px; } .more-room { margin-right: 1.5em; } .much-more-room { margin-right: 3em; } #main { position: absolute; left: 10%; top: 10%; right: 10%; bottom: 10%; z-index: 2; width: 80%; height: 80%; padding: 0%; margin: 0%; overflow: auto; border-style: none; border-width: 0; background-color: #3F3F3F; } #main-settings { margin: 12px; x_max-width: 60em; line-height: 18px; font-size: 12px; } #left { position: absolute; top : 10%; left: 0%; bottom: 0%; right: 90%; z-index: 1; width: 10%; height: 90%; padding: 0%; margin: 0%; font-size: 16px; border-style: none; border-width: 0; background-color: #4F6F6F; } #right { position: absolute; top : 0%; left: 90%; bottom: 10%; right: 0%; z-index: 1; width: 10%; height: 90%; padding: 0%; margin: 0%; font-size: 16px; border-style: none; border-width: 0; background-color: #4F6F6F; _margin-left: -15px; } #bottom { position: absolute; left: 10%; right: 0%; top: 90%; bottom: 0%; z-index: 1; width: 90%; height: 10%; padding: 0%; margin: 0%; font-size: 16px; border-style: none; border-width: 0; background-color: #6F6F8F; } #top { position: absolute; left: 0%; right: 10%; top: 0%; bottom: 90%; z-index: 1; width: 90%; height: 10%; padding: 0%; margin: 0%; font-size: 16px; border-style: none; border-width: 0; background-color: #6F6F8F; } #top-one { position: absolute; bottom: 50%; width: 100%; buggedheight: 100%; } #top-two { position: relative; margin-bottom: -9px; margin-left: 12px; margin-right: 12px; line-height: 18px; text-align: right; vertical-align: middle; } #bottom-one { position: absolute; bottom: 50%; width: 100%; buggedheight: 100%; } #bottom-two { position: relative; margin-bottom: -9px; margin-left: 12px; margin-right: 12px; line-height: 18px; text-align: left; vertical-align: middle; } #left-one { position: absolute; width: 100%; buggedheight: 100%; } #left-two { position: relative; margin-top: 12px; line-height: 18px; text-align: center; vertical-align: top; } #right-one { display: table; height: 100%; width: 100%; } #right-two { display: table-row; height: 100%; width: 100%; } #right-three { display: table-cell; width: 100%; vertical-align: bottom; _position: absolute; _top: 100%; } #right-four { text-align: center; margin-bottom: 2ex; _position: relative; _top: -100%; } #more-top { position: absolute; top: 0%; left: 90%; bottom: 90%; right: 0%; z-index: 3; width: 10%; height: 10%; padding: 0%; margin: 0%; border-style: none; border-width: 0; } #more-top-settings { text-align: center; } #more-right-settings { margin-right: 12px; margin-left: 12px; line-height: 18px; font-size: 10px; text-align: center; } #right-safari { _display: table; width: 100%; height: 100%; }
+ </style>
+ <style type="text/css">
+ </style>
+ </head>
+ <body>
+ <div id="top"> <div id="top-one">
+ <div id="top-two">texmfstart 7.0.0 </div>
+ </div>
+ </div>
+ <div id="bottom"> <div id="bottom-one">
+ <div id="bottom-two">wiki: http://contextgarden.net | mail: ntg-context@ntg.nl | website: http://www.pragma-ade.nl</div>
+ </div>
+ </div>
+ <div id="left"></div>
+ <div id="right"></div>
+ <div id="main">
+ <div id='main-settings'>
+ <h1>Command line options</h1>
+<table>
+ <tr><th style="width: 10em">flag</th><th style="width: 8em">value</th><th>description</th></tr>
+ <tr><th/><td/><td/></tr>
+ <tr><th>--script</th><td></td><td>run an mtx script (lua prefered method) (--noquotes), no script gives list</td></tr>
+ <tr><th>--execute</th><td></td><td>run a script or program (texmfstart method) (--noquotes)</td></tr>
+ <tr><th>--resolve</th><td></td><td>resolve prefixed arguments</td></tr>
+ <tr><th>--ctxlua</th><td></td><td>run internally (using preloaded libs)</td></tr>
+ <tr><th>--internal</th><td></td><td>run script using built in libraries (same as --ctxlua)</td></tr>
+ <tr><th>--locate</th><td></td><td>locate given filename in database (default) or system (--first --all --detail)</td></tr>
+ <tr><th/><td/><td/></tr>
+ <tr><th>--autotree</th><td></td><td>use texmf tree cf. env texmfstart_tree or texmfstarttree</td></tr>
+ <tr><th>--tree</th><td>pathtotree</td><td>use given texmf tree (default file: setuptex.tmf)</td></tr>
+ <tr><th>--environment</th><td>name</td><td>use given (tmf) environment file</td></tr>
+ <tr><th>--path</th><td>runpath</td><td>go to given path before execution</td></tr>
+ <tr><th>--ifchanged</th><td>filename</td><td>only execute when given file has changed (md checksum)</td></tr>
+ <tr><th>--iftouched</th><td>old,new</td><td>only execute when given file has changed (time stamp)</td></tr>
+ <tr><th/><td/><td/></tr>
+ <tr><th>--makestubs</th><td></td><td>create stubs for (context related) scripts</td></tr>
+ <tr><th>--removestubs</th><td></td><td>remove stubs (context related) scripts</td></tr>
+ <tr><th>--stubpath</th><td>binpath</td><td>paths where stubs wil be written</td></tr>
+ <tr><th>--windows</th><td></td><td>create windows (mswin) stubs</td></tr>
+ <tr><th>--unix</th><td></td><td>create unix (linux) stubs</td></tr>
+ <tr><th/><td/><td/></tr>
+ <tr><th>--verbose</th><td></td><td>give a bit more info</td></tr>
+ <tr><th>--trackers</th><td>list</td><td>enable given trackers</td></tr>
+ <tr><th>--progname</th><td>str</td><td>format or backend</td></tr>
+ <tr><th/><td/><td/></tr>
+ <tr><th>--edit</th><td></td><td>launch editor with found file</td></tr>
+ <tr><th>--launch</th><td></td><td>launch files like manuals, assumes os support (--all)</td></tr>
+ <tr><th/><td/><td/></tr>
+ <tr><th>--timedrun</th><td></td><td>run a script and time its run</td></tr>
+ <tr><th>--autogenerate</th><td></td><td>regenerate databases if needed (handy when used to run context in an editor)</td></tr>
+ <tr><th/><td/><td/></tr>
+ <tr><th>--usekpse</th><td></td><td>use kpse as fallback (when no mkiv and cache installed, often slower)</td></tr>
+ <tr><th>--forcekpse</th><td></td><td>force using kpse (handy when no mkiv and cache installed but less functionality)</td></tr>
+ <tr><th/><td/><td/></tr>
+ <tr><th>--prefixes</th><td></td><td>show supported prefixes</td></tr>
+ <tr><th/><td/><td/></tr>
+ <tr><th>--generate</th><td></td><td>generate file database</td></tr>
+ <tr><th/><td/><td/></tr>
+ <tr><th>--variables</th><td></td><td>show configuration variables</td></tr>
+ <tr><th>--configurations</th><td></td><td>show configuration order</td></tr>
+ <tr><th/><td/><td/></tr>
+ <tr><th>--directives</th><td></td><td>show (known) directives</td></tr>
+ <tr><th>--trackers</th><td></td><td>show (known) trackers</td></tr>
+ <tr><th>--experiments</th><td></td><td>show (known) experiments</td></tr>
+ <tr><th/><td/><td/></tr>
+ <tr><th>--expand-braces</th><td></td><td>expand complex variable</td></tr>
+ <tr><th>--expand-path</th><td></td><td>expand variable (resolve paths)</td></tr>
+ <tr><th>--expand-var</th><td></td><td>expand variable (resolve references)</td></tr>
+ <tr><th>--show-path</th><td></td><td>show path expansion of ...</td></tr>
+ <tr><th>--var-value</th><td></td><td>report value of variable</td></tr>
+ <tr><th>--find-file</th><td></td><td>report file location</td></tr>
+ <tr><th>--find-path</th><td></td><td>report path of file</td></tr>
+ <tr><th/><td/><td/></tr>
+ <tr><th>--pattern</th><td>string</td><td>filter variables</td></tr>
+ </table>
+<br/>
+ </div>
+ </div>
+ </body>
+ </html>
diff --git a/Master/texmf-dist/doc/context/scripts/mkii/texmfstart.man b/Master/texmf-dist/doc/context/scripts/mkii/texmfstart.man
new file mode 100644
index 00000000000..212097ff153
--- /dev/null
+++ b/Master/texmf-dist/doc/context/scripts/mkii/texmfstart.man
@@ -0,0 +1,147 @@
+.TH "mtxrun" "1" "01-01-2013" "version 1.31" "ConTeXt TDS Runner Tool"
+.SH NAME
+.B mtxrun
+.SH SYNOPSIS
+.B mtxrun [
+.I OPTIONS ...
+.B ] [
+.I FILENAMES
+.B ]
+.SH DESCRIPTION
+.B ConTeXt TDS Runner Tool
+.SH OPTIONS
+.TP
+.B --script
+run an mtx script (lua prefered method) (--noquotes), no script gives list
+.TP
+.B --execute
+run a script or program (texmfstart method) (--noquotes)
+.TP
+.B --resolve
+resolve prefixed arguments
+.TP
+.B --ctxlua
+run internally (using preloaded libs)
+.TP
+.B --internal
+run script using built in libraries (same as --ctxlua)
+.TP
+.B --locate
+locate given filename in database (default) or system (--first --all --detail)
+.TP
+.B --autotree
+use texmf tree cf. env texmfstart_tree or texmfstarttree
+.TP
+.B --tree=pathtotree
+use given texmf tree (default file: setuptex.tmf)
+.TP
+.B --environment=name
+use given (tmf) environment file
+.TP
+.B --path=runpath
+go to given path before execution
+.TP
+.B --ifchanged=filename
+only execute when given file has changed (md checksum)
+.TP
+.B --iftouched=old,new
+only execute when given file has changed (time stamp)
+.TP
+.B --makestubs
+create stubs for (context related) scripts
+.TP
+.B --removestubs
+remove stubs (context related) scripts
+.TP
+.B --stubpath=binpath
+paths where stubs wil be written
+.TP
+.B --windows
+create windows (mswin) stubs
+.TP
+.B --unix
+create unix (linux) stubs
+.TP
+.B --verbose
+give a bit more info
+.TP
+.B --trackers=list
+enable given trackers
+.TP
+.B --progname=str
+format or backend
+.TP
+.B --edit
+launch editor with found file
+.TP
+.B --launch
+launch files like manuals, assumes os support (--all)
+.TP
+.B --timedrun
+run a script and time its run
+.TP
+.B --autogenerate
+regenerate databases if needed (handy when used to run context in an editor)
+.TP
+.B --usekpse
+use kpse as fallback (when no mkiv and cache installed, often slower)
+.TP
+.B --forcekpse
+force using kpse (handy when no mkiv and cache installed but less functionality)
+.TP
+.B --prefixes
+show supported prefixes
+.TP
+.B --generate
+generate file database
+.TP
+.B --variables
+show configuration variables
+.TP
+.B --configurations
+show configuration order
+.TP
+.B --directives
+show (known) directives
+.TP
+.B --trackers
+show (known) trackers
+.TP
+.B --experiments
+show (known) experiments
+.TP
+.B --expand-braces
+expand complex variable
+.TP
+.B --expand-path
+expand variable (resolve paths)
+.TP
+.B --expand-var
+expand variable (resolve references)
+.TP
+.B --show-path
+show path expansion of ...
+.TP
+.B --var-value
+report value of variable
+.TP
+.B --find-file
+report file location
+.TP
+.B --find-path
+report path of file
+.TP
+.B --pattern=string
+filter variables
+.SH AUTHOR
+More information about ConTeXt and the tools that come with it can be found at:
+
+
+.B "maillist:"
+ntg-context@ntg.nl / http://www.ntg.nl/mailman/listinfo/ntg-context
+
+.B "webpage:"
+http://www.pragma-ade.nl / http://tex.aanhet.net
+
+.B "wiki:"
+http://contextgarden.net
diff --git a/Master/texmf-dist/doc/context/scripts/mkii/texmfstart.xml b/Master/texmf-dist/doc/context/scripts/mkii/texmfstart.xml
new file mode 100644
index 00000000000..249a9bb54ad
--- /dev/null
+++ b/Master/texmf-dist/doc/context/scripts/mkii/texmfstart.xml
@@ -0,0 +1,78 @@
+<?xml version="1.0"?><application>
+ <metadata>
+ <entry name="name">mtxrun</entry>
+ <entry name="detail">ConTeXt TDS Runner Tool</entry>
+ <entry name="version">1.31</entry>
+ </metadata>
+ <flags>
+ <category name="basic">
+ <subcategory>
+ <flag name="script"><short>run an mtx script (lua prefered method) (<ref name="noquotes"/>), no script gives list</short></flag>
+ <flag name="execute"><short>run a script or program (texmfstart method) (<ref name="noquotes"/>)</short></flag>
+ <flag name="resolve"><short>resolve prefixed arguments</short></flag>
+ <flag name="ctxlua"><short>run internally (using preloaded libs)</short></flag>
+ <flag name="internal"><short>run script using built in libraries (same as <ref name="ctxlua"/>)</short></flag>
+ <flag name="locate"><short>locate given filename in database (default) or system (<ref name="first"/> <ref name="all"/> <ref name="detail"/>)</short></flag>
+ </subcategory>
+ <subcategory>
+ <flag name="autotree"><short>use texmf tree cf. env texmfstart_tree or texmfstarttree</short></flag>
+ <flag name="tree" value="pathtotree"><short>use given texmf tree (default file: setuptex.tmf)</short></flag>
+ <flag name="environment" value="name"><short>use given (tmf) environment file</short></flag>
+ <flag name="path" value="runpath"><short>go to given path before execution</short></flag>
+ <flag name="ifchanged" value="filename"><short>only execute when given file has changed (md checksum)</short></flag>
+ <flag name="iftouched" value="old,new"><short>only execute when given file has changed (time stamp)</short></flag>
+ </subcategory>
+ <subcategory>
+ <flag name="makestubs"><short>create stubs for (context related) scripts</short></flag>
+ <flag name="removestubs"><short>remove stubs (context related) scripts</short></flag>
+ <flag name="stubpath" value="binpath"><short>paths where stubs wil be written</short></flag>
+ <flag name="windows"><short>create windows (mswin) stubs</short></flag>
+ <flag name="unix"><short>create unix (linux) stubs</short></flag>
+ </subcategory>
+ <subcategory>
+ <flag name="verbose"><short>give a bit more info</short></flag>
+ <flag name="trackers" value="list"><short>enable given trackers</short></flag>
+ <flag name="progname" value="str"><short>format or backend</short></flag>
+ </subcategory>
+ <subcategory>
+ <flag name="edit"><short>launch editor with found file</short></flag>
+ <flag name="launch"><short>launch files like manuals, assumes os support (<ref name="all"/>)</short></flag>
+ </subcategory>
+ <subcategory>
+ <flag name="timedrun"><short>run a script and time its run</short></flag>
+ <flag name="autogenerate"><short>regenerate databases if needed (handy when used to run context in an editor)</short></flag>
+ </subcategory>
+ <subcategory>
+ <flag name="usekpse"><short>use kpse as fallback (when no mkiv and cache installed, often slower)</short></flag>
+ <flag name="forcekpse"><short>force using kpse (handy when no mkiv and cache installed but less functionality)</short></flag>
+ </subcategory>
+ <subcategory>
+ <flag name="prefixes"><short>show supported prefixes</short></flag>
+ </subcategory>
+ <subcategory>
+ <flag name="generate"><short>generate file database</short></flag>
+ </subcategory>
+ <subcategory>
+ <flag name="variables"><short>show configuration variables</short></flag>
+ <flag name="configurations"><short>show configuration order</short></flag>
+ </subcategory>
+ <subcategory>
+ <flag name="directives"><short>show (known) directives</short></flag>
+ <flag name="trackers"><short>show (known) trackers</short></flag>
+ <flag name="experiments"><short>show (known) experiments</short></flag>
+ </subcategory>
+ <subcategory>
+ <flag name="expand-braces"><short>expand complex variable</short></flag>
+ <flag name="expand-path"><short>expand variable (resolve paths)</short></flag>
+ <flag name="expand-var"><short>expand variable (resolve references)</short></flag>
+ <flag name="show-path"><short>show path expansion of ...</short></flag>
+ <flag name="var-value"><short>report value of variable</short></flag>
+ <flag name="find-file"><short>report file location</short></flag>
+ <flag name="find-path"><short>report path of file</short></flag>
+ </subcategory>
+ <subcategory>
+ <flag name="pattern" value="string"><short>filter variables</short></flag>
+ </subcategory>
+ </category>
+ </flags>
+</application> \ No newline at end of file
diff --git a/Master/texmf-dist/doc/context/scripts/mkii/textools.html b/Master/texmf-dist/doc/context/scripts/mkii/textools.html
new file mode 100644
index 00000000000..8874050fb8b
--- /dev/null
+++ b/Master/texmf-dist/doc/context/scripts/mkii/textools.html
@@ -0,0 +1,57 @@
+<?xml version="1.0" encoding="UTF-8"?>
+
+<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Strict//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd">
+
+
+
+
+
+<html xmlns="http://www.w3.org/1999/xhtml" lang="en" xml:lang="en">
+ <head>
+ <title>TeXTools 1.3.1</title>
+ <meta http-equiv="Content-Type" content="text/html; charset=UTF-8"/>
+ <style type="text/css">
+ body { color: #FFFFFF; background-color: #808080; font-family: optima, verdana, futura, "lucida sans", arial, geneva, helvetica, sans; font-size: 12px; line-height: 18px; } a:link, a:active, a:visited { color: #FFFFFF; } a.dir-view:link, a.dir-view:active, a.dir-view:visited { color: #FFFFFF; text-decoration: underline; } .valid { color: #00FF00; } .invalid { color: #FF0000; } button, .commonlink, .smallbutton { font-weight: bold; font-size: 12px; text-decoration: none; color: #000000; border-color: #7F7F7F; border-style: solid; border-width: .125ex; background-color: #FFFFFF; padding: .5ex; } .smallbutton { width: 1em; } a.commonlink:link, a.commonlink:active, a.commonlink:visited, a.smalllink:link, a.smalllink:active, a.smalllink:visited { font-weight: bold; font-size: 12px; text-decoration: none; color: #000000; } h1, .title { font-style: normal; font-weight: normal; font-size: 18px; line-height: 18px; margin-bottom: 20px; } h2, .subtitle { font-style: normal; font-weight: normal; font-size: 12px; margin-top: 18px; margin-bottom: 18px; } table { line-height: 18px; font-size: 12px; margin: 0; } th { font-weight: bold; text-align: left; padding-bottom: 6px; } .tc { font-weight: bold; text-align: left; } p, li { max-width: 60em; } .empty-line { margin-top: 4px; } .more-room { margin-right: 1.5em; } .much-more-room { margin-right: 3em; } #main { position: absolute; left: 10%; top: 10%; right: 10%; bottom: 10%; z-index: 2; width: 80%; height: 80%; padding: 0%; margin: 0%; overflow: auto; border-style: none; border-width: 0; background-color: #3F3F3F; } #main-settings { margin: 12px; x_max-width: 60em; line-height: 18px; font-size: 12px; } #left { position: absolute; top : 10%; left: 0%; bottom: 0%; right: 90%; z-index: 1; width: 10%; height: 90%; padding: 0%; margin: 0%; font-size: 16px; border-style: none; border-width: 0; background-color: #4F6F6F; } #right { position: absolute; top : 0%; left: 90%; bottom: 10%; right: 0%; z-index: 1; width: 10%; height: 90%; padding: 0%; margin: 0%; font-size: 16px; border-style: none; border-width: 0; background-color: #4F6F6F; _margin-left: -15px; } #bottom { position: absolute; left: 10%; right: 0%; top: 90%; bottom: 0%; z-index: 1; width: 90%; height: 10%; padding: 0%; margin: 0%; font-size: 16px; border-style: none; border-width: 0; background-color: #6F6F8F; } #top { position: absolute; left: 0%; right: 10%; top: 0%; bottom: 90%; z-index: 1; width: 90%; height: 10%; padding: 0%; margin: 0%; font-size: 16px; border-style: none; border-width: 0; background-color: #6F6F8F; } #top-one { position: absolute; bottom: 50%; width: 100%; buggedheight: 100%; } #top-two { position: relative; margin-bottom: -9px; margin-left: 12px; margin-right: 12px; line-height: 18px; text-align: right; vertical-align: middle; } #bottom-one { position: absolute; bottom: 50%; width: 100%; buggedheight: 100%; } #bottom-two { position: relative; margin-bottom: -9px; margin-left: 12px; margin-right: 12px; line-height: 18px; text-align: left; vertical-align: middle; } #left-one { position: absolute; width: 100%; buggedheight: 100%; } #left-two { position: relative; margin-top: 12px; line-height: 18px; text-align: center; vertical-align: top; } #right-one { display: table; height: 100%; width: 100%; } #right-two { display: table-row; height: 100%; width: 100%; } #right-three { display: table-cell; width: 100%; vertical-align: bottom; _position: absolute; _top: 100%; } #right-four { text-align: center; margin-bottom: 2ex; _position: relative; _top: -100%; } #more-top { position: absolute; top: 0%; left: 90%; bottom: 90%; right: 0%; z-index: 3; width: 10%; height: 10%; padding: 0%; margin: 0%; border-style: none; border-width: 0; } #more-top-settings { text-align: center; } #more-right-settings { margin-right: 12px; margin-left: 12px; line-height: 18px; font-size: 10px; text-align: center; } #right-safari { _display: table; width: 100%; height: 100%; }
+ </style>
+ <style type="text/css">
+ </style>
+ </head>
+ <body>
+ <div id="top"> <div id="top-one">
+ <div id="top-two">TeXTools 1.3.1 </div>
+ </div>
+ </div>
+ <div id="bottom"> <div id="bottom-one">
+ <div id="bottom-two">wiki: http://contextgarden.net | mail: ntg-context@ntg.nl | website: http://www.pragma-ade.nl</div>
+ </div>
+ </div>
+ <div id="left"></div>
+ <div id="right"></div>
+ <div id="main">
+ <div id='main-settings'>
+ <h1>Command line options</h1>
+<table>
+ <tr><th style="width: 10em">flag</th><th style="width: 8em">value</th><th>description</th></tr>
+ <tr><th/><td/><td/></tr>
+ <tr><th>--removemapnames</th><td></td><td>[pattern] [--recurse]</td></tr>
+ <tr><th>--restoremapnames</th><td></td><td>[pattern] [--recurse]</td></tr>
+ <tr><th>--hidemapnames</th><td></td><td>[pattern] [--recurse]</td></tr>
+ <tr><th>--videmapnames</th><td></td><td>[pattern] [--recurse]</td></tr>
+ <tr><th>--findfile</th><td></td><td>filename [--recurse]</td></tr>
+ <tr><th>--unzipfiles</th><td></td><td>[pattern] [--recurse]</td></tr>
+ <tr><th>--fixafmfiles</th><td></td><td>[pattern] [--recurse]</td></tr>
+ <tr><th>--mactodos</th><td></td><td>[pattern] [--recurse]</td></tr>
+ <tr><th>--fixtexmftrees</th><td></td><td>[texmfroot] [--force]</td></tr>
+ <tr><th>--replacefile</th><td></td><td>filename [--force]</td></tr>
+ <tr><th>--updatetree</th><td></td><td>fromroot toroot [--force --nocheck --merge --delete]</td></tr>
+ <tr><th>--downcasefilenames</th><td></td><td>[--recurse] [--force]</td></tr>
+ <tr><th>--stripformfeeds</th><td></td><td>[--recurse] [--force]</td></tr>
+ <tr><th>--showfont</th><td></td><td>filename</td></tr>
+ <tr><th>--encmake</th><td></td><td>afmfile encodingname</td></tr>
+ <tr><th>--tpmmake</th><td></td><td>tpm file (run in texmf root)</td></tr>
+ </table>
+<br/>
+ </div>
+ </div>
+ </body>
+ </html>
diff --git a/Master/texmf-dist/doc/context/scripts/mkii/textools.man b/Master/texmf-dist/doc/context/scripts/mkii/textools.man
new file mode 100644
index 00000000000..f8aba4683ea
--- /dev/null
+++ b/Master/texmf-dist/doc/context/scripts/mkii/textools.man
@@ -0,0 +1,72 @@
+.TH "textools" "1" "01-01-2013" "version 1.3.1" "TeXTools"
+.SH NAME
+.B textools
+.SH SYNOPSIS
+.B textools [
+.I OPTIONS ...
+.B ] [
+.I FILENAMES
+.B ]
+.SH DESCRIPTION
+.B TeXTools
+.SH OPTIONS
+.TP
+.B --removemapnames
+[pattern] [--recurse]
+.TP
+.B --restoremapnames
+[pattern] [--recurse]
+.TP
+.B --hidemapnames
+[pattern] [--recurse]
+.TP
+.B --videmapnames
+[pattern] [--recurse]
+.TP
+.B --findfile
+filename [--recurse]
+.TP
+.B --unzipfiles
+[pattern] [--recurse]
+.TP
+.B --fixafmfiles
+[pattern] [--recurse]
+.TP
+.B --mactodos
+[pattern] [--recurse]
+.TP
+.B --fixtexmftrees
+[texmfroot] [--force]
+.TP
+.B --replacefile
+filename [--force]
+.TP
+.B --updatetree
+fromroot toroot [--force --nocheck --merge --delete]
+.TP
+.B --downcasefilenames
+[--recurse] [--force]
+.TP
+.B --stripformfeeds
+[--recurse] [--force]
+.TP
+.B --showfont
+filename
+.TP
+.B --encmake
+afmfile encodingname
+.TP
+.B --tpmmake
+tpm file (run in texmf root)
+.SH AUTHOR
+More information about ConTeXt and the tools that come with it can be found at:
+
+
+.B "maillist:"
+ntg-context@ntg.nl / http://www.ntg.nl/mailman/listinfo/ntg-context
+
+.B "webpage:"
+http://www.pragma-ade.nl / http://tex.aanhet.net
+
+.B "wiki:"
+http://contextgarden.net
diff --git a/Master/texmf-dist/doc/context/scripts/mkii/textools.xml b/Master/texmf-dist/doc/context/scripts/mkii/textools.xml
new file mode 100644
index 00000000000..963c4a3a4d9
--- /dev/null
+++ b/Master/texmf-dist/doc/context/scripts/mkii/textools.xml
@@ -0,0 +1,30 @@
+<?xml version="1.0"?>
+<application>
+ <metadata>
+ <entry name="name">textools</entry>
+ <entry name="detail">TeXTools</entry>
+ <entry name="version">1.3.1</entry>
+ </metadata>
+ <flags>
+ <category name="basic">
+ <subcategory>
+ <flag name="removemapnames"><short>[pattern] [<ref name="recurse]"/></short></flag>
+ <flag name="restoremapnames"><short>[pattern] [<ref name="recurse]"/></short></flag>
+ <flag name="hidemapnames"><short>[pattern] [<ref name="recurse]"/></short></flag>
+ <flag name="videmapnames"><short>[pattern] [<ref name="recurse]"/></short></flag>
+ <flag name="findfile"><short>filename [<ref name="recurse]"/></short></flag>
+ <flag name="unzipfiles"><short>[pattern] [<ref name="recurse]"/></short></flag>
+ <flag name="fixafmfiles"><short>[pattern] [<ref name="recurse]"/></short></flag>
+ <flag name="mactodos"><short>[pattern] [<ref name="recurse]"/></short></flag>
+ <flag name="fixtexmftrees"><short>[texmfroot] [<ref name="force]"/></short></flag>
+ <flag name="replacefile"><short>filename [<ref name="force]"/></short></flag>
+ <flag name="updatetree"><short>fromroot toroot [<ref name="force"/> <ref name="nocheck"/> <ref name="merge"/> <ref name="delete]"/></short></flag>
+ <flag name="downcasefilenames"><short>[<ref name="recurse]"/> [<ref name="force]"/></short></flag>
+ <flag name="stripformfeeds"><short>[<ref name="recurse]"/> [<ref name="force]"/></short></flag>
+ <flag name="showfont"><short>filename</short></flag>
+ <flag name="encmake"><short>afmfile encodingname</short></flag>
+ <flag name="tpmmake"><short>tpm file (run in texmf root)</short></flag>
+ </subcategory>
+ </category>
+ </flags>
+</application>
diff --git a/Master/texmf-dist/doc/context/scripts/mkii/texutil.html b/Master/texmf-dist/doc/context/scripts/mkii/texutil.html
new file mode 100644
index 00000000000..05880e2788a
--- /dev/null
+++ b/Master/texmf-dist/doc/context/scripts/mkii/texutil.html
@@ -0,0 +1,48 @@
+<?xml version="1.0" encoding="UTF-8"?>
+
+<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Strict//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd">
+
+
+
+
+
+<html xmlns="http://www.w3.org/1999/xhtml" lang="en" xml:lang="en">
+ <head>
+ <title>TeXUtil 9.1.0</title>
+ <meta http-equiv="Content-Type" content="text/html; charset=UTF-8"/>
+ <style type="text/css">
+ body { color: #FFFFFF; background-color: #808080; font-family: optima, verdana, futura, "lucida sans", arial, geneva, helvetica, sans; font-size: 12px; line-height: 18px; } a:link, a:active, a:visited { color: #FFFFFF; } a.dir-view:link, a.dir-view:active, a.dir-view:visited { color: #FFFFFF; text-decoration: underline; } .valid { color: #00FF00; } .invalid { color: #FF0000; } button, .commonlink, .smallbutton { font-weight: bold; font-size: 12px; text-decoration: none; color: #000000; border-color: #7F7F7F; border-style: solid; border-width: .125ex; background-color: #FFFFFF; padding: .5ex; } .smallbutton { width: 1em; } a.commonlink:link, a.commonlink:active, a.commonlink:visited, a.smalllink:link, a.smalllink:active, a.smalllink:visited { font-weight: bold; font-size: 12px; text-decoration: none; color: #000000; } h1, .title { font-style: normal; font-weight: normal; font-size: 18px; line-height: 18px; margin-bottom: 20px; } h2, .subtitle { font-style: normal; font-weight: normal; font-size: 12px; margin-top: 18px; margin-bottom: 18px; } table { line-height: 18px; font-size: 12px; margin: 0; } th { font-weight: bold; text-align: left; padding-bottom: 6px; } .tc { font-weight: bold; text-align: left; } p, li { max-width: 60em; } .empty-line { margin-top: 4px; } .more-room { margin-right: 1.5em; } .much-more-room { margin-right: 3em; } #main { position: absolute; left: 10%; top: 10%; right: 10%; bottom: 10%; z-index: 2; width: 80%; height: 80%; padding: 0%; margin: 0%; overflow: auto; border-style: none; border-width: 0; background-color: #3F3F3F; } #main-settings { margin: 12px; x_max-width: 60em; line-height: 18px; font-size: 12px; } #left { position: absolute; top : 10%; left: 0%; bottom: 0%; right: 90%; z-index: 1; width: 10%; height: 90%; padding: 0%; margin: 0%; font-size: 16px; border-style: none; border-width: 0; background-color: #4F6F6F; } #right { position: absolute; top : 0%; left: 90%; bottom: 10%; right: 0%; z-index: 1; width: 10%; height: 90%; padding: 0%; margin: 0%; font-size: 16px; border-style: none; border-width: 0; background-color: #4F6F6F; _margin-left: -15px; } #bottom { position: absolute; left: 10%; right: 0%; top: 90%; bottom: 0%; z-index: 1; width: 90%; height: 10%; padding: 0%; margin: 0%; font-size: 16px; border-style: none; border-width: 0; background-color: #6F6F8F; } #top { position: absolute; left: 0%; right: 10%; top: 0%; bottom: 90%; z-index: 1; width: 90%; height: 10%; padding: 0%; margin: 0%; font-size: 16px; border-style: none; border-width: 0; background-color: #6F6F8F; } #top-one { position: absolute; bottom: 50%; width: 100%; buggedheight: 100%; } #top-two { position: relative; margin-bottom: -9px; margin-left: 12px; margin-right: 12px; line-height: 18px; text-align: right; vertical-align: middle; } #bottom-one { position: absolute; bottom: 50%; width: 100%; buggedheight: 100%; } #bottom-two { position: relative; margin-bottom: -9px; margin-left: 12px; margin-right: 12px; line-height: 18px; text-align: left; vertical-align: middle; } #left-one { position: absolute; width: 100%; buggedheight: 100%; } #left-two { position: relative; margin-top: 12px; line-height: 18px; text-align: center; vertical-align: top; } #right-one { display: table; height: 100%; width: 100%; } #right-two { display: table-row; height: 100%; width: 100%; } #right-three { display: table-cell; width: 100%; vertical-align: bottom; _position: absolute; _top: 100%; } #right-four { text-align: center; margin-bottom: 2ex; _position: relative; _top: -100%; } #more-top { position: absolute; top: 0%; left: 90%; bottom: 90%; right: 0%; z-index: 3; width: 10%; height: 10%; padding: 0%; margin: 0%; border-style: none; border-width: 0; } #more-top-settings { text-align: center; } #more-right-settings { margin-right: 12px; margin-left: 12px; line-height: 18px; font-size: 10px; text-align: center; } #right-safari { _display: table; width: 100%; height: 100%; }
+ </style>
+ <style type="text/css">
+ </style>
+ </head>
+ <body>
+ <div id="top"> <div id="top-one">
+ <div id="top-two">TeXUtil 9.1.0 </div>
+ </div>
+ </div>
+ <div id="bottom"> <div id="bottom-one">
+ <div id="bottom-two">wiki: http://contextgarden.net | mail: ntg-context@ntg.nl | website: http://www.pragma-ade.nl</div>
+ </div>
+ </div>
+ <div id="left"></div>
+ <div id="right"></div>
+ <div id="main">
+ <div id='main-settings'>
+ <h1>Command line options</h1>
+<table>
+ <tr><th style="width: 10em">flag</th><th style="width: 8em">value</th><th>description</th></tr>
+ <tr><th/><td/><td/></tr>
+ <tr><th>--references</th><td></td><td>convert tui file into tuo file</td></tr>
+ <tr><th>--figures</th><td></td><td>generate figure dimensions file</td></tr>
+ <tr><th>--logfile</th><td></td><td>filter essential log messages</td></tr>
+ <tr><th>--purgefiles</th><td></td><td>remove most temporary files</td></tr>
+ <tr><th>--purgeallfiles</th><td></td><td>remove all temporary files</td></tr>
+ <tr><th>--documentation</th><td></td><td>generate documentation file from source</td></tr>
+ <tr><th>--analyzefile</th><td></td><td>analyze pdf file</td></tr>
+ </table>
+<br/>
+ </div>
+ </div>
+ </body>
+ </html>
diff --git a/Master/texmf-dist/doc/context/scripts/mkii/texutil.man b/Master/texmf-dist/doc/context/scripts/mkii/texutil.man
new file mode 100644
index 00000000000..9268e7b19c7
--- /dev/null
+++ b/Master/texmf-dist/doc/context/scripts/mkii/texutil.man
@@ -0,0 +1,45 @@
+.TH "texutil" "1" "01-01-2013" "version 9.1.0" "TeXUtil"
+.SH NAME
+.B texutil
+.SH SYNOPSIS
+.B texutil [
+.I OPTIONS ...
+.B ] [
+.I FILENAMES
+.B ]
+.SH DESCRIPTION
+.B TeXUtil
+.SH OPTIONS
+.TP
+.B --references
+convert tui file into tuo file
+.TP
+.B --figures
+generate figure dimensions file
+.TP
+.B --logfile
+filter essential log messages
+.TP
+.B --purgefiles
+remove most temporary files
+.TP
+.B --purgeallfiles
+remove all temporary files
+.TP
+.B --documentation
+generate documentation file from source
+.TP
+.B --analyzefile
+analyze pdf file
+.SH AUTHOR
+More information about ConTeXt and the tools that come with it can be found at:
+
+
+.B "maillist:"
+ntg-context@ntg.nl / http://www.ntg.nl/mailman/listinfo/ntg-context
+
+.B "webpage:"
+http://www.pragma-ade.nl / http://tex.aanhet.net
+
+.B "wiki:"
+http://contextgarden.net
diff --git a/Master/texmf-dist/doc/context/scripts/mkii/texutil.xml b/Master/texmf-dist/doc/context/scripts/mkii/texutil.xml
new file mode 100644
index 00000000000..fde9abd586a
--- /dev/null
+++ b/Master/texmf-dist/doc/context/scripts/mkii/texutil.xml
@@ -0,0 +1,21 @@
+<?xml version="1.0"?>
+<application>
+ <metadata>
+ <entry name="name">texutil</entry>
+ <entry name="detail">TeXUtil</entry>
+ <entry name="version">9.1.0</entry>
+ </metadata>
+ <flags>
+ <category name="basic">
+ <subcategory>
+ <flag name="references"><short>convert tui file into tuo file</short></flag>
+ <flag name="figures"><short>generate figure dimensions file</short></flag>
+ <flag name="logfile"><short>filter essential log messages</short></flag>
+ <flag name="purgefiles"><short>remove most temporary files</short></flag>
+ <flag name="purgeallfiles"><short>remove all temporary files</short></flag>
+ <flag name="documentation"><short>generate documentation file from source</short></flag>
+ <flag name="analyzefile"><short>analyze pdf file</short></flag>
+ </subcategory>
+ </category>
+ </flags>
+</application> \ No newline at end of file
diff --git a/Master/texmf-dist/doc/context/scripts/mkii/tmftools.html b/Master/texmf-dist/doc/context/scripts/mkii/tmftools.html
new file mode 100644
index 00000000000..d3db5fd16b7
--- /dev/null
+++ b/Master/texmf-dist/doc/context/scripts/mkii/tmftools.html
@@ -0,0 +1,44 @@
+<?xml version="1.0" encoding="UTF-8"?>
+
+<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Strict//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd">
+
+
+
+
+
+<html xmlns="http://www.w3.org/1999/xhtml" lang="en" xml:lang="en">
+ <head>
+ <title>TMFTools 1.2.1</title>
+ <meta http-equiv="Content-Type" content="text/html; charset=UTF-8"/>
+ <style type="text/css">
+ body { color: #FFFFFF; background-color: #808080; font-family: optima, verdana, futura, "lucida sans", arial, geneva, helvetica, sans; font-size: 12px; line-height: 18px; } a:link, a:active, a:visited { color: #FFFFFF; } a.dir-view:link, a.dir-view:active, a.dir-view:visited { color: #FFFFFF; text-decoration: underline; } .valid { color: #00FF00; } .invalid { color: #FF0000; } button, .commonlink, .smallbutton { font-weight: bold; font-size: 12px; text-decoration: none; color: #000000; border-color: #7F7F7F; border-style: solid; border-width: .125ex; background-color: #FFFFFF; padding: .5ex; } .smallbutton { width: 1em; } a.commonlink:link, a.commonlink:active, a.commonlink:visited, a.smalllink:link, a.smalllink:active, a.smalllink:visited { font-weight: bold; font-size: 12px; text-decoration: none; color: #000000; } h1, .title { font-style: normal; font-weight: normal; font-size: 18px; line-height: 18px; margin-bottom: 20px; } h2, .subtitle { font-style: normal; font-weight: normal; font-size: 12px; margin-top: 18px; margin-bottom: 18px; } table { line-height: 18px; font-size: 12px; margin: 0; } th { font-weight: bold; text-align: left; padding-bottom: 6px; } .tc { font-weight: bold; text-align: left; } p, li { max-width: 60em; } .empty-line { margin-top: 4px; } .more-room { margin-right: 1.5em; } .much-more-room { margin-right: 3em; } #main { position: absolute; left: 10%; top: 10%; right: 10%; bottom: 10%; z-index: 2; width: 80%; height: 80%; padding: 0%; margin: 0%; overflow: auto; border-style: none; border-width: 0; background-color: #3F3F3F; } #main-settings { margin: 12px; x_max-width: 60em; line-height: 18px; font-size: 12px; } #left { position: absolute; top : 10%; left: 0%; bottom: 0%; right: 90%; z-index: 1; width: 10%; height: 90%; padding: 0%; margin: 0%; font-size: 16px; border-style: none; border-width: 0; background-color: #4F6F6F; } #right { position: absolute; top : 0%; left: 90%; bottom: 10%; right: 0%; z-index: 1; width: 10%; height: 90%; padding: 0%; margin: 0%; font-size: 16px; border-style: none; border-width: 0; background-color: #4F6F6F; _margin-left: -15px; } #bottom { position: absolute; left: 10%; right: 0%; top: 90%; bottom: 0%; z-index: 1; width: 90%; height: 10%; padding: 0%; margin: 0%; font-size: 16px; border-style: none; border-width: 0; background-color: #6F6F8F; } #top { position: absolute; left: 0%; right: 10%; top: 0%; bottom: 90%; z-index: 1; width: 90%; height: 10%; padding: 0%; margin: 0%; font-size: 16px; border-style: none; border-width: 0; background-color: #6F6F8F; } #top-one { position: absolute; bottom: 50%; width: 100%; buggedheight: 100%; } #top-two { position: relative; margin-bottom: -9px; margin-left: 12px; margin-right: 12px; line-height: 18px; text-align: right; vertical-align: middle; } #bottom-one { position: absolute; bottom: 50%; width: 100%; buggedheight: 100%; } #bottom-two { position: relative; margin-bottom: -9px; margin-left: 12px; margin-right: 12px; line-height: 18px; text-align: left; vertical-align: middle; } #left-one { position: absolute; width: 100%; buggedheight: 100%; } #left-two { position: relative; margin-top: 12px; line-height: 18px; text-align: center; vertical-align: top; } #right-one { display: table; height: 100%; width: 100%; } #right-two { display: table-row; height: 100%; width: 100%; } #right-three { display: table-cell; width: 100%; vertical-align: bottom; _position: absolute; _top: 100%; } #right-four { text-align: center; margin-bottom: 2ex; _position: relative; _top: -100%; } #more-top { position: absolute; top: 0%; left: 90%; bottom: 90%; right: 0%; z-index: 3; width: 10%; height: 10%; padding: 0%; margin: 0%; border-style: none; border-width: 0; } #more-top-settings { text-align: center; } #more-right-settings { margin-right: 12px; margin-left: 12px; line-height: 18px; font-size: 10px; text-align: center; } #right-safari { _display: table; width: 100%; height: 100%; }
+ </style>
+ <style type="text/css">
+ </style>
+ </head>
+ <body>
+ <div id="top"> <div id="top-one">
+ <div id="top-two">TMFTools 1.2.1 </div>
+ </div>
+ </div>
+ <div id="bottom"> <div id="bottom-one">
+ <div id="bottom-two">wiki: http://contextgarden.net | mail: ntg-context@ntg.nl | website: http://www.pragma-ade.nl</div>
+ </div>
+ </div>
+ <div id="left"></div>
+ <div id="right"></div>
+ <div id="main">
+ <div id='main-settings'>
+ <h1>Command line options</h1>
+<table>
+ <tr><th style="width: 10em">flag</th><th style="width: 8em">value</th><th>description</th></tr>
+ <tr><th/><td/><td/></tr>
+ <tr><th>--analyze</th><td></td><td>[--strict --sort --rootpath --treepath --delete --force] [pattern]</td></tr>
+ <tr><th/><td/><td/></tr>
+ <tr><th>--serve</th><td></td><td>act as kpse server</td></tr>
+ </table>
+<br/>
+ </div>
+ </div>
+ </body>
+ </html>
diff --git a/Master/texmf-dist/doc/context/scripts/mkii/tmftools.man b/Master/texmf-dist/doc/context/scripts/mkii/tmftools.man
new file mode 100644
index 00000000000..9a88f4c4d4a
--- /dev/null
+++ b/Master/texmf-dist/doc/context/scripts/mkii/tmftools.man
@@ -0,0 +1,30 @@
+.TH "tmftools" "1" "01-01-2013" "version 1.1.0" "TMFTools"
+.SH NAME
+.B tmftools
+.SH SYNOPSIS
+.B tmftools [
+.I OPTIONS ...
+.B ] [
+.I FILENAMES
+.B ]
+.SH DESCRIPTION
+.B TMFTools
+.SH OPTIONS
+.TP
+.B --analyze
+[--strict --sort --rootpath --treepath --delete --force] [pattern]
+.TP
+.B --serve
+act as kpse server
+.SH AUTHOR
+More information about ConTeXt and the tools that come with it can be found at:
+
+
+.B "maillist:"
+ntg-context@ntg.nl / http://www.ntg.nl/mailman/listinfo/ntg-context
+
+.B "webpage:"
+http://www.pragma-ade.nl / http://tex.aanhet.net
+
+.B "wiki:"
+http://contextgarden.net
diff --git a/Master/texmf-dist/doc/context/scripts/mkii/tmftools.xml b/Master/texmf-dist/doc/context/scripts/mkii/tmftools.xml
new file mode 100644
index 00000000000..c52aa30673d
--- /dev/null
+++ b/Master/texmf-dist/doc/context/scripts/mkii/tmftools.xml
@@ -0,0 +1,18 @@
+<?xml version="1.0"?>
+<application>
+ <metadata>
+ <entry name="name">tmftools</entry>
+ <entry name="detail">TMFTools</entry>
+ <entry name="version">1.1.0</entry>
+ </metadata>
+ <flags>
+ <category name="basic">
+ <subcategory>
+ <flag name="analyze"><short>[<ref name="strict"/> <ref name="sort"/> <ref name="rootpath"/> <ref name="treepath"/> <ref name="delete"/> <ref name="force"/>] [pattern]</short></flag>
+ </subcategory>
+ <subcategory>
+ <flag name="serve"><short>act as kpse server</short></flag>
+ </subcategory>
+ </category>
+ </flags>
+</application>
diff --git a/Master/texmf-dist/doc/context/scripts/mkii/xmltools.html b/Master/texmf-dist/doc/context/scripts/mkii/xmltools.html
new file mode 100644
index 00000000000..7712336097f
--- /dev/null
+++ b/Master/texmf-dist/doc/context/scripts/mkii/xmltools.html
@@ -0,0 +1,48 @@
+<?xml version="1.0" encoding="UTF-8"?>
+
+<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Strict//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd">
+
+
+
+
+
+<html xmlns="http://www.w3.org/1999/xhtml" lang="en" xml:lang="en">
+ <head>
+ <title>XMLTools 1.2.1</title>
+ <meta http-equiv="Content-Type" content="text/html; charset=UTF-8"/>
+ <style type="text/css">
+ body { color: #FFFFFF; background-color: #808080; font-family: optima, verdana, futura, "lucida sans", arial, geneva, helvetica, sans; font-size: 12px; line-height: 18px; } a:link, a:active, a:visited { color: #FFFFFF; } a.dir-view:link, a.dir-view:active, a.dir-view:visited { color: #FFFFFF; text-decoration: underline; } .valid { color: #00FF00; } .invalid { color: #FF0000; } button, .commonlink, .smallbutton { font-weight: bold; font-size: 12px; text-decoration: none; color: #000000; border-color: #7F7F7F; border-style: solid; border-width: .125ex; background-color: #FFFFFF; padding: .5ex; } .smallbutton { width: 1em; } a.commonlink:link, a.commonlink:active, a.commonlink:visited, a.smalllink:link, a.smalllink:active, a.smalllink:visited { font-weight: bold; font-size: 12px; text-decoration: none; color: #000000; } h1, .title { font-style: normal; font-weight: normal; font-size: 18px; line-height: 18px; margin-bottom: 20px; } h2, .subtitle { font-style: normal; font-weight: normal; font-size: 12px; margin-top: 18px; margin-bottom: 18px; } table { line-height: 18px; font-size: 12px; margin: 0; } th { font-weight: bold; text-align: left; padding-bottom: 6px; } .tc { font-weight: bold; text-align: left; } p, li { max-width: 60em; } .empty-line { margin-top: 4px; } .more-room { margin-right: 1.5em; } .much-more-room { margin-right: 3em; } #main { position: absolute; left: 10%; top: 10%; right: 10%; bottom: 10%; z-index: 2; width: 80%; height: 80%; padding: 0%; margin: 0%; overflow: auto; border-style: none; border-width: 0; background-color: #3F3F3F; } #main-settings { margin: 12px; x_max-width: 60em; line-height: 18px; font-size: 12px; } #left { position: absolute; top : 10%; left: 0%; bottom: 0%; right: 90%; z-index: 1; width: 10%; height: 90%; padding: 0%; margin: 0%; font-size: 16px; border-style: none; border-width: 0; background-color: #4F6F6F; } #right { position: absolute; top : 0%; left: 90%; bottom: 10%; right: 0%; z-index: 1; width: 10%; height: 90%; padding: 0%; margin: 0%; font-size: 16px; border-style: none; border-width: 0; background-color: #4F6F6F; _margin-left: -15px; } #bottom { position: absolute; left: 10%; right: 0%; top: 90%; bottom: 0%; z-index: 1; width: 90%; height: 10%; padding: 0%; margin: 0%; font-size: 16px; border-style: none; border-width: 0; background-color: #6F6F8F; } #top { position: absolute; left: 0%; right: 10%; top: 0%; bottom: 90%; z-index: 1; width: 90%; height: 10%; padding: 0%; margin: 0%; font-size: 16px; border-style: none; border-width: 0; background-color: #6F6F8F; } #top-one { position: absolute; bottom: 50%; width: 100%; buggedheight: 100%; } #top-two { position: relative; margin-bottom: -9px; margin-left: 12px; margin-right: 12px; line-height: 18px; text-align: right; vertical-align: middle; } #bottom-one { position: absolute; bottom: 50%; width: 100%; buggedheight: 100%; } #bottom-two { position: relative; margin-bottom: -9px; margin-left: 12px; margin-right: 12px; line-height: 18px; text-align: left; vertical-align: middle; } #left-one { position: absolute; width: 100%; buggedheight: 100%; } #left-two { position: relative; margin-top: 12px; line-height: 18px; text-align: center; vertical-align: top; } #right-one { display: table; height: 100%; width: 100%; } #right-two { display: table-row; height: 100%; width: 100%; } #right-three { display: table-cell; width: 100%; vertical-align: bottom; _position: absolute; _top: 100%; } #right-four { text-align: center; margin-bottom: 2ex; _position: relative; _top: -100%; } #more-top { position: absolute; top: 0%; left: 90%; bottom: 90%; right: 0%; z-index: 3; width: 10%; height: 10%; padding: 0%; margin: 0%; border-style: none; border-width: 0; } #more-top-settings { text-align: center; } #more-right-settings { margin-right: 12px; margin-left: 12px; line-height: 18px; font-size: 10px; text-align: center; } #right-safari { _display: table; width: 100%; height: 100%; }
+ </style>
+ <style type="text/css">
+ </style>
+ </head>
+ <body>
+ <div id="top"> <div id="top-one">
+ <div id="top-two">XMLTools 1.2.1 </div>
+ </div>
+ </div>
+ <div id="bottom"> <div id="bottom-one">
+ <div id="bottom-two">wiki: http://contextgarden.net | mail: ntg-context@ntg.nl | website: http://www.pragma-ade.nl</div>
+ </div>
+ </div>
+ <div id="left"></div>
+ <div id="right"></div>
+ <div id="main">
+ <div id='main-settings'>
+ <h1>Command line options</h1>
+<table>
+ <tr><th style="width: 10em">flag</th><th style="width: 8em">value</th><th>description</th></tr>
+ <tr><th/><td/><td/></tr>
+ <tr><th>--dir</th><td></td><td>generate directory listing</td></tr>
+ <tr><th>--mmlpages</th><td></td><td>generate graphic from mathml</td></tr>
+ <tr><th>--analyze</th><td></td><td>report entities and elements [--utf --process]</td></tr>
+ <tr><th>--cleanup</th><td></td><td>cleanup xml file [--force]</td></tr>
+ <tr><th>--enhance</th><td></td><td>enhance xml file (partial)</td></tr>
+ <tr><th>--filter</th><td></td><td>filter elements from xml file [--element]</td></tr>
+ <tr><th>--dir</th><td></td><td>generate ddirectory listing</td></tr>
+ </table>
+<br/>
+ </div>
+ </div>
+ </body>
+ </html>
diff --git a/Master/texmf-dist/doc/context/scripts/mkii/xmltools.man b/Master/texmf-dist/doc/context/scripts/mkii/xmltools.man
new file mode 100644
index 00000000000..3d2ca856ff3
--- /dev/null
+++ b/Master/texmf-dist/doc/context/scripts/mkii/xmltools.man
@@ -0,0 +1,45 @@
+.TH "xmltools" "1" "01-01-2013" "version 1.2.2" "XMLTools"
+.SH NAME
+.B xmltools
+.SH SYNOPSIS
+.B xmltools [
+.I OPTIONS ...
+.B ] [
+.I FILENAMES
+.B ]
+.SH DESCRIPTION
+.B XMLTools
+.SH OPTIONS
+.TP
+.B --dir
+generate directory listing
+.TP
+.B --mmlpages
+generate graphic from mathml
+.TP
+.B --analyze
+report entities and elements [--utf --process]
+.TP
+.B --cleanup
+cleanup xml file [--force]
+.TP
+.B --enhance
+enhance xml file (partial)
+.TP
+.B --filter
+filter elements from xml file [--element]
+.TP
+.B --dir
+generate ddirectory listing
+.SH AUTHOR
+More information about ConTeXt and the tools that come with it can be found at:
+
+
+.B "maillist:"
+ntg-context@ntg.nl / http://www.ntg.nl/mailman/listinfo/ntg-context
+
+.B "webpage:"
+http://www.pragma-ade.nl / http://tex.aanhet.net
+
+.B "wiki:"
+http://contextgarden.net
diff --git a/Master/texmf-dist/doc/context/scripts/mkii/xmltools.xml b/Master/texmf-dist/doc/context/scripts/mkii/xmltools.xml
new file mode 100644
index 00000000000..4b784c4e721
--- /dev/null
+++ b/Master/texmf-dist/doc/context/scripts/mkii/xmltools.xml
@@ -0,0 +1,21 @@
+<?xml version="1.0"?>
+<application>
+ <metadata>
+ <entry name="name">xmltools</entry>
+ <entry name="detail">XMLTools</entry>
+ <entry name="version">1.2.2</entry>
+ </metadata>
+ <flags>
+ <category name="basic">
+ <subcategory>
+ <flag name="dir"><short>generate directory listing</short></flag>
+ <flag name="mmlpages"><short>generate graphic from mathml</short></flag>
+ <flag name="analyze"><short>report entities and elements [<ref name="utf"/> <ref name="process"/>]</short></flag>
+ <flag name="cleanup"><short>cleanup xml file [<ref name="force"/>]</short></flag>
+ <flag name="enhance"><short>enhance xml file (partial)</short></flag>
+ <flag name="filter"><short>filter elements from xml file [<ref name="element"/>]</short></flag>
+ <flag name="dir"><short>generate ddirectory listing</short></flag>
+ </subcategory>
+ </category>
+ </flags>
+</application>
diff --git a/Master/texmf-dist/doc/context/scripts/mkiv/context.html b/Master/texmf-dist/doc/context/scripts/mkiv/context.html
new file mode 100644
index 00000000000..40e479d300f
--- /dev/null
+++ b/Master/texmf-dist/doc/context/scripts/mkiv/context.html
@@ -0,0 +1,104 @@
+<?xml version="1.0" encoding="UTF-8"?>
+
+<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Strict//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd">
+
+
+
+
+
+<html xmlns="http://www.w3.org/1999/xhtml" lang="en" xml:lang="en">
+ <head>
+ <title>ConTeXt Process Management 0.60</title>
+ <meta http-equiv="Content-Type" content="text/html; charset=UTF-8"/>
+ <style type="text/css">
+ body { color: #FFFFFF; background-color: #808080; font-family: optima, verdana, futura, "lucida sans", arial, geneva, helvetica, sans; font-size: 12px; line-height: 18px; } a:link, a:active, a:visited { color: #FFFFFF; } a.dir-view:link, a.dir-view:active, a.dir-view:visited { color: #FFFFFF; text-decoration: underline; } .valid { color: #00FF00; } .invalid { color: #FF0000; } button, .commonlink, .smallbutton { font-weight: bold; font-size: 12px; text-decoration: none; color: #000000; border-color: #7F7F7F; border-style: solid; border-width: .125ex; background-color: #FFFFFF; padding: .5ex; } .smallbutton { width: 1em; } a.commonlink:link, a.commonlink:active, a.commonlink:visited, a.smalllink:link, a.smalllink:active, a.smalllink:visited { font-weight: bold; font-size: 12px; text-decoration: none; color: #000000; } h1, .title { font-style: normal; font-weight: normal; font-size: 18px; line-height: 18px; margin-bottom: 20px; } h2, .subtitle { font-style: normal; font-weight: normal; font-size: 12px; margin-top: 18px; margin-bottom: 18px; } table { line-height: 18px; font-size: 12px; margin: 0; } th { font-weight: bold; text-align: left; padding-bottom: 6px; } .tc { font-weight: bold; text-align: left; } p, li { max-width: 60em; } .empty-line { margin-top: 4px; } .more-room { margin-right: 1.5em; } .much-more-room { margin-right: 3em; } #main { position: absolute; left: 10%; top: 10%; right: 10%; bottom: 10%; z-index: 2; width: 80%; height: 80%; padding: 0%; margin: 0%; overflow: auto; border-style: none; border-width: 0; background-color: #3F3F3F; } #main-settings { margin: 12px; x_max-width: 60em; line-height: 18px; font-size: 12px; } #left { position: absolute; top : 10%; left: 0%; bottom: 0%; right: 90%; z-index: 1; width: 10%; height: 90%; padding: 0%; margin: 0%; font-size: 16px; border-style: none; border-width: 0; background-color: #4F6F6F; } #right { position: absolute; top : 0%; left: 90%; bottom: 10%; right: 0%; z-index: 1; width: 10%; height: 90%; padding: 0%; margin: 0%; font-size: 16px; border-style: none; border-width: 0; background-color: #4F6F6F; _margin-left: -15px; } #bottom { position: absolute; left: 10%; right: 0%; top: 90%; bottom: 0%; z-index: 1; width: 90%; height: 10%; padding: 0%; margin: 0%; font-size: 16px; border-style: none; border-width: 0; background-color: #6F6F8F; } #top { position: absolute; left: 0%; right: 10%; top: 0%; bottom: 90%; z-index: 1; width: 90%; height: 10%; padding: 0%; margin: 0%; font-size: 16px; border-style: none; border-width: 0; background-color: #6F6F8F; } #top-one { position: absolute; bottom: 50%; width: 100%; buggedheight: 100%; } #top-two { position: relative; margin-bottom: -9px; margin-left: 12px; margin-right: 12px; line-height: 18px; text-align: right; vertical-align: middle; } #bottom-one { position: absolute; bottom: 50%; width: 100%; buggedheight: 100%; } #bottom-two { position: relative; margin-bottom: -9px; margin-left: 12px; margin-right: 12px; line-height: 18px; text-align: left; vertical-align: middle; } #left-one { position: absolute; width: 100%; buggedheight: 100%; } #left-two { position: relative; margin-top: 12px; line-height: 18px; text-align: center; vertical-align: top; } #right-one { display: table; height: 100%; width: 100%; } #right-two { display: table-row; height: 100%; width: 100%; } #right-three { display: table-cell; width: 100%; vertical-align: bottom; _position: absolute; _top: 100%; } #right-four { text-align: center; margin-bottom: 2ex; _position: relative; _top: -100%; } #more-top { position: absolute; top: 0%; left: 90%; bottom: 90%; right: 0%; z-index: 3; width: 10%; height: 10%; padding: 0%; margin: 0%; border-style: none; border-width: 0; } #more-top-settings { text-align: center; } #more-right-settings { margin-right: 12px; margin-left: 12px; line-height: 18px; font-size: 10px; text-align: center; } #right-safari { _display: table; width: 100%; height: 100%; }
+ </style>
+ <style type="text/css">
+ </style>
+ </head>
+ <body>
+ <div id="top"> <div id="top-one">
+ <div id="top-two">ConTeXt Process Management 0.60 </div>
+ </div>
+ </div>
+ <div id="bottom"> <div id="bottom-one">
+ <div id="bottom-two">wiki: http://contextgarden.net | mail: ntg-context@ntg.nl | website: http://www.pragma-ade.nl</div>
+ </div>
+ </div>
+ <div id="left"></div>
+ <div id="right"></div>
+ <div id="main">
+ <div id='main-settings'>
+ <h1>Command line options</h1>
+<table>
+ <tr><th style="width: 10em">flag</th><th style="width: 8em">value</th><th>description</th></tr>
+ <tr><th colspan="3">basic</th></tr>
+ <tr><th/><td/><td/></tr>
+ <tr><th>--run</th><td></td><td>process (one or more) files (default action)</td></tr>
+ <tr><th>--make</th><td></td><td>create context formats</td></tr>
+ <tr><th/><td/><td/></tr>
+ <tr><th>--ctx=name</th><td></td><td>use ctx file (process management specification)</td></tr>
+ <tr><th>--interface</th><td></td><td>use specified user interface (default: en)</td></tr>
+ <tr><th/><td/><td/></tr>
+ <tr><th>--autopdf</th><td></td><td>close pdf file in viewer and start pdf viewer afterwards</td></tr>
+ <tr><th>--purge</th><td></td><td>purge files either or not after a run (--pattern=...)</td></tr>
+ <tr><th>--purgeall</th><td></td><td>purge all files either or not after a run (--pattern=...)</td></tr>
+ <tr><th/><td/><td/></tr>
+ <tr><th>--usemodule</th><td>list</td><td>load the given module or style, normally part of the distribution</td></tr>
+ <tr><th>--environment</th><td>list</td><td>load the given environment file first (document styles)</td></tr>
+ <tr><th>--mode</th><td>list</td><td>enable given the modes (conditional processing in styles)</td></tr>
+ <tr><th>--path</th><td>list</td><td>also consult the given paths when files are looked for</td></tr>
+ <tr><th>--arguments</th><td>list</td><td>set variables that can be consulted during a run (key/value pairs)</td></tr>
+ <tr><th>--randomseed</th><td>number</td><td>set the randomseed</td></tr>
+ <tr><th>--result</th><td>name</td><td>rename the resulting output to the given name</td></tr>
+ <tr><th>--trackers</th><td>list</td><td>set tracker variables (show list with --showtrackers)</td></tr>
+ <tr><th>--directives</th><td>list</td><td>set directive variables (show list with --showdirectives)</td></tr>
+ <tr><th>--silent</th><td>list</td><td>disable logcatgories (show list with --showlogcategories)</td></tr>
+ <tr><th>--noconsole</th><td></td><td>disable logging to the console (logfile only)</td></tr>
+ <tr><th>--purgeresult</th><td></td><td>purge result file before run</td></tr>
+ <tr><th/><td/><td/></tr>
+ <tr><th>--forcexml</th><td></td><td>force xml stub</td></tr>
+ <tr><th>--forcecld</th><td></td><td>force cld (context lua document) stub</td></tr>
+ <tr><th>--forcelua</th><td></td><td>force lua stub (like texlua)</td></tr>
+ <tr><th>--forcemp</th><td></td><td>force mp stub</td></tr>
+ <tr><th/><td/><td/></tr>
+ <tr><th>--arrange</th><td></td><td>run extra imposition pass, given that the style sets up imposition</td></tr>
+ <tr><th>--noarrange</th><td></td><td>ignore imposition specifications in the style</td></tr>
+ <tr><th/><td/><td/></tr>
+ <tr><th>--jit</th><td></td><td>use luajittex with jit turned off (only use the faster virtual machine)</td></tr>
+ <tr><th>--jiton</th><td></td><td>use luajittex with jit turned on (in most cases not faster, even slower)</td></tr>
+ <tr><th/><td/><td/></tr>
+ <tr><th>--once</th><td></td><td>only run once (no multipass data file is produced)</td></tr>
+ <tr><th>--batchmode</th><td></td><td>run without stopping and do not show messages on the console</td></tr>
+ <tr><th>--nonstopmode</th><td></td><td>run without stopping</td></tr>
+ <tr><th>--synctex</th><td></td><td>run with synctex enabled (optional value: zipped, unzipped, 1, -1)</td></tr>
+ <tr><th/><td/><td/></tr>
+ <tr><th>--generate</th><td></td><td>generate file database etc. (as luatools does)</td></tr>
+ <tr><th>--paranoid</th><td></td><td>do not descend to .. and ../..</td></tr>
+ <tr><th>--version</th><td></td><td>report installed context version</td></tr>
+ <tr><th/><td/><td/></tr>
+ <tr><th>--global</th><td></td><td>assume given file present elsewhere</td></tr>
+ <tr><th>--nofile</th><td></td><td>use dummy file as jobname</td></tr>
+ <tr><th colspan="3">expert</th></tr>
+ <tr><th/><td/><td/></tr>
+ <tr><th>--touch</th><td></td><td>update context version number (remake needed afterwards, also provide --expert)</td></tr>
+ <tr><th>--nostatistics</th><td></td><td>omit runtime statistics at the end of the run</td></tr>
+ <tr><th>--update</th><td></td><td>update context from website (not to be confused with contextgarden)</td></tr>
+ <tr><th>--profile</th><td></td><td>profile job (use: mtxrun --script profile --analyze)</td></tr>
+ <tr><th>--timing</th><td></td><td>generate timing and statistics overview</td></tr>
+ <tr><th/><td/><td/></tr>
+ <tr><th>--extra=name</th><td></td><td>process extra (mtx-context-... in distribution)</td></tr>
+ <tr><th>--extras</th><td></td><td>show extras</td></tr>
+ <tr><th colspan="3">special</th></tr>
+ <tr><th/><td/><td/></tr>
+ <tr><th>--pdftex</th><td></td><td>process file with texexec using pdftex</td></tr>
+ <tr><th>--xetex</th><td></td><td>process file with texexec using xetex</td></tr>
+ <tr><th>--mkii</th><td></td><td>process file with texexec</td></tr>
+ <tr><th/><td/><td/></tr>
+ <tr><th>--pipe</th><td></td><td>do not check for file and enter scroll mode (--dummyfile=whatever.tmp)</td></tr>
+ </table>
+<br/>
+ </div>
+ </div>
+ </body>
+ </html>
diff --git a/Master/texmf-dist/doc/context/scripts/mkiv/context.man b/Master/texmf-dist/doc/context/scripts/mkiv/context.man
new file mode 100644
index 00000000000..e1fb28a994e
--- /dev/null
+++ b/Master/texmf-dist/doc/context/scripts/mkiv/context.man
@@ -0,0 +1,167 @@
+.TH "mtx-context" "1" "01-01-2013" "version 0.60" "ConTeXt Process Management"
+.SH NAME
+.B mtx-context
+.SH SYNOPSIS
+.B mtxrun --script context [
+.I OPTIONS ...
+.B ] [
+.I FILENAMES
+.B ]
+.SH DESCRIPTION
+.B ConTeXt Process Management
+.SH OPTIONS: BASIC
+.TP
+.B --run
+process (one or more) files (default action)
+.TP
+.B --make
+create context formats
+.TP
+.B --ctx=name
+use ctx file (process management specification)
+.TP
+.B --interface
+use specified user interface (default: en)
+.TP
+.B --autopdf
+close pdf file in viewer and start pdf viewer afterwards
+.TP
+.B --purge
+purge files either or not after a run (--pattern=...)
+.TP
+.B --purgeall
+purge all files either or not after a run (--pattern=...)
+.TP
+.B --usemodule=list
+load the given module or style, normally part of the distribution
+.TP
+.B --environment=list
+load the given environment file first (document styles)
+.TP
+.B --mode=list
+enable given the modes (conditional processing in styles)
+.TP
+.B --path=list
+also consult the given paths when files are looked for
+.TP
+.B --arguments=list
+set variables that can be consulted during a run (key/value pairs)
+.TP
+.B --randomseed=number
+set the randomseed
+.TP
+.B --result=name
+rename the resulting output to the given name
+.TP
+.B --trackers=list
+set tracker variables (show list with --showtrackers)
+.TP
+.B --directives=list
+set directive variables (show list with --showdirectives)
+.TP
+.B --silent=list
+disable logcatgories (show list with --showlogcategories)
+.TP
+.B --noconsole
+disable logging to the console (logfile only)
+.TP
+.B --purgeresult
+purge result file before run
+.TP
+.B --forcexml
+force xml stub
+.TP
+.B --forcecld
+force cld (context lua document) stub
+.TP
+.B --forcelua
+force lua stub (like texlua)
+.TP
+.B --forcemp
+force mp stub
+.TP
+.B --arrange
+run extra imposition pass, given that the style sets up imposition
+.TP
+.B --noarrange
+ignore imposition specifications in the style
+.TP
+.B --jit
+use luajittex with jit turned off (only use the faster virtual machine)
+.TP
+.B --jiton
+use luajittex with jit turned on (in most cases not faster, even slower)
+.TP
+.B --once
+only run once (no multipass data file is produced)
+.TP
+.B --batchmode
+run without stopping and do not show messages on the console
+.TP
+.B --nonstopmode
+run without stopping
+.TP
+.B --synctex
+run with synctex enabled (optional value: zipped, unzipped, 1, -1)
+.TP
+.B --generate
+generate file database etc. (as luatools does)
+.TP
+.B --paranoid
+do not descend to .. and ../..
+.TP
+.B --version
+report installed context version
+.TP
+.B --global
+assume given file present elsewhere
+.TP
+.B --nofile
+use dummy file as jobname
+.SH OPTIONS: EXPERT
+.TP
+.B --touch
+update context version number (remake needed afterwards, also provide --expert)
+.TP
+.B --nostatistics
+omit runtime statistics at the end of the run
+.TP
+.B --update
+update context from website (not to be confused with contextgarden)
+.TP
+.B --profile
+profile job (use: mtxrun --script profile --analyze)
+.TP
+.B --timing
+generate timing and statistics overview
+.TP
+.B --extra=name
+process extra (mtx-context-... in distribution)
+.TP
+.B --extras
+show extras
+.SH OPTIONS: SPECIAL
+.TP
+.B --pdftex
+process file with texexec using pdftex
+.TP
+.B --xetex
+process file with texexec using xetex
+.TP
+.B --mkii
+process file with texexec
+.TP
+.B --pipe
+do not check for file and enter scroll mode (--dummyfile=whatever.tmp)
+.SH AUTHOR
+More information about ConTeXt and the tools that come with it can be found at:
+
+
+.B "maillist:"
+ntg-context@ntg.nl / http://www.ntg.nl/mailman/listinfo/ntg-context
+
+.B "webpage:"
+http://www.pragma-ade.nl / http://tex.aanhet.net
+
+.B "wiki:"
+http://contextgarden.net
diff --git a/Master/texmf-dist/doc/context/scripts/mkiv/context.xml b/Master/texmf-dist/doc/context/scripts/mkiv/context.xml
new file mode 100644
index 00000000000..6eb8afeb105
--- /dev/null
+++ b/Master/texmf-dist/doc/context/scripts/mkiv/context.xml
@@ -0,0 +1,187 @@
+<?xml version="1.0" ?>
+
+<application>
+ <metadata>
+ <entry name="name">mtx-context</entry>
+ <entry name="detail">ConTeXt Process Management</entry>
+ <entry name="version">0.60</entry>
+ <entry name="comment">external helpinfo file</entry>
+ </metadata>
+ <flags>
+ <category name="basic">
+ <subcategory>
+ <flag name="run">
+ <short>process (one or more) files (default action)</short>
+ </flag>
+ <flag name="make">
+ <short>create context formats</short>
+ </flag>
+ </subcategory>
+ <subcategory>
+ <flag name="ctx=name">
+ <short>use ctx file (process management specification)</short>
+ </flag>
+ <flag name="interface">
+ <short>use specified user interface (default: en)</short>
+ </flag>
+ </subcategory>
+ <subcategory>
+ <flag name="autopdf">
+ <short>close pdf file in viewer and start pdf viewer afterwards</short>
+ </flag>
+ <flag name="purge">
+ <short>purge files either or not after a run (<ref name="pattern"/>=...)</short>
+ </flag>
+ <flag name="purgeall">
+ <short>purge all files either or not after a run (<ref name="pattern"/>=...)</short>
+ </flag>
+ </subcategory>
+ <subcategory>
+ <flag name="usemodule" value="list">
+ <short>load the given module or style, normally part of the distribution</short>
+ </flag>
+ <flag name="environment" value="list">
+ <short>load the given environment file first (document styles)</short>
+ </flag>
+ <flag name="mode" value="list">
+ <short>enable given the modes (conditional processing in styles)</short>
+ </flag>
+ <flag name="path" value="list">
+ <short>also consult the given paths when files are looked for</short>
+ </flag>
+ <flag name="arguments" value="list">
+ <short>set variables that can be consulted during a run (key/value pairs)</short>
+ </flag>
+ <flag name="randomseed" value="number">
+ <short>set the randomseed</short>
+ </flag>
+ <flag name="result" value="name">
+ <short>rename the resulting output to the given name</short>
+ </flag>
+ <flag name="trackers" value="list">
+ <short>set tracker variables (show list with <ref name="showtrackers"/>)</short>
+ </flag>
+ <flag name="directives" value="list">
+ <short>set directive variables (show list with <ref name="showdirectives"/>)</short>
+ </flag>
+ <flag name="silent" value="list">
+ <short>disable logcatgories (show list with <ref name="showlogcategories"/>)</short>
+ </flag>
+ <flag name="noconsole">
+ <short>disable logging to the console (logfile only)</short>
+ </flag>
+ <flag name="purgeresult">
+ <short>purge result file before run</short>
+ </flag>
+ </subcategory>
+ <subcategory>
+ <flag name="forcexml">
+ <short>force xml stub</short>
+ </flag>
+ <flag name="forcecld">
+ <short>force cld (context lua document) stub</short>
+ </flag>
+ <flag name="forcelua">
+ <short>force lua stub (like texlua)</short>
+ </flag>
+ <flag name="forcemp">
+ <short>force mp stub</short>
+ </flag>
+ </subcategory>
+ <subcategory>
+ <flag name="arrange">
+ <short>run extra imposition pass, given that the style sets up imposition</short>
+ </flag>
+ <flag name="noarrange">
+ <short>ignore imposition specifications in the style</short>
+ </flag>
+ </subcategory>
+ <subcategory>
+ <flag name="jit">
+ <short>use luajittex with jit turned off (only use the faster virtual machine)</short>
+ </flag>
+ <flag name="jiton">
+ <short>use luajittex with jit turned on (in most cases not faster, even slower)</short>
+ </flag>
+ </subcategory>
+ <subcategory>
+ <flag name="once">
+ <short>only run once (no multipass data file is produced)</short>
+ </flag>
+ <flag name="batchmode">
+ <short>run without stopping and do not show messages on the console</short>
+ </flag>
+ <flag name="nonstopmode">
+ <short>run without stopping</short>
+ </flag>
+ <flag name="synctex">
+ <short>run with synctex enabled (optional value: zipped, unzipped, 1, -1)</short>
+ </flag>
+ </subcategory>
+ <subcategory>
+ <flag name="generate">
+ <short>generate file database etc. (as luatools does)</short>
+ </flag>
+ <flag name="paranoid">
+ <short>do not descend to .. and ../..</short>
+ </flag>
+ <flag name="version">
+ <short>report installed context version</short>
+ </flag>
+ </subcategory>
+ <subcategory>
+ <flag name="global">
+ <short>assume given file present elsewhere</short>
+ </flag>
+ <flag name="nofile">
+ <short>use dummy file as jobname</short>
+ </flag>
+ </subcategory>
+ </category>
+ <category name="expert">
+ <subcategory>
+ <flag name="touch">
+ <short>update context version number (remake needed afterwards, also provide <ref name="expert"/>)</short>
+ </flag>
+ <flag name="nostatistics">
+ <short>omit runtime statistics at the end of the run</short>
+ </flag>
+ <flag name="update">
+ <short>update context from website (not to be confused with contextgarden)</short>
+ </flag>
+ <flag name="profile">
+ <short>profile job (use: mtxrun <ref name="script"/> profile <ref name="analyze"/>)</short>
+ </flag>
+ <flag name="timing">
+ <short>generate timing and statistics overview</short>
+ </flag>
+ </subcategory>
+ <subcategory>
+ <flag name="extra=name">
+ <short>process extra (mtx-context-... in distribution)</short>
+ </flag>
+ <flag name="extras">
+ <short>show extras</short>
+ </flag>
+ </subcategory>
+ </category>
+ <category name="special">
+ <subcategory>
+ <flag name="pdftex">
+ <short>process file with texexec using pdftex</short>
+ </flag>
+ <flag name="xetex">
+ <short>process file with texexec using xetex</short>
+ </flag>
+ <flag name="mkii">
+ <short>process file with texexec</short>
+ </flag>
+ </subcategory>
+ <subcategory>
+ <flag name="pipe">
+ <short>do not check for file and enter scroll mode (<ref name="dummyfile"/>=whatever.tmp)</short>
+ </flag>
+ </subcategory>
+ </category>
+ </flags>
+</application>
diff --git a/Master/texmf-dist/doc/context/scripts/mkiv/luatools.html b/Master/texmf-dist/doc/context/scripts/mkiv/luatools.html
new file mode 100644
index 00000000000..6cb869a2bac
--- /dev/null
+++ b/Master/texmf-dist/doc/context/scripts/mkiv/luatools.html
@@ -0,0 +1,59 @@
+<?xml version="1.0" encoding="UTF-8"?>
+
+<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Strict//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd">
+
+
+
+
+
+<html xmlns="http://www.w3.org/1999/xhtml" lang="en" xml:lang="en">
+ <head>
+ <title>luatools 1.35</title>
+ <meta http-equiv="Content-Type" content="text/html; charset=UTF-8"/>
+ <style type="text/css">
+ body { color: #FFFFFF; background-color: #808080; font-family: optima, verdana, futura, "lucida sans", arial, geneva, helvetica, sans; font-size: 12px; line-height: 18px; } a:link, a:active, a:visited { color: #FFFFFF; } a.dir-view:link, a.dir-view:active, a.dir-view:visited { color: #FFFFFF; text-decoration: underline; } .valid { color: #00FF00; } .invalid { color: #FF0000; } button, .commonlink, .smallbutton { font-weight: bold; font-size: 12px; text-decoration: none; color: #000000; border-color: #7F7F7F; border-style: solid; border-width: .125ex; background-color: #FFFFFF; padding: .5ex; } .smallbutton { width: 1em; } a.commonlink:link, a.commonlink:active, a.commonlink:visited, a.smalllink:link, a.smalllink:active, a.smalllink:visited { font-weight: bold; font-size: 12px; text-decoration: none; color: #000000; } h1, .title { font-style: normal; font-weight: normal; font-size: 18px; line-height: 18px; margin-bottom: 20px; } h2, .subtitle { font-style: normal; font-weight: normal; font-size: 12px; margin-top: 18px; margin-bottom: 18px; } table { line-height: 18px; font-size: 12px; margin: 0; } th { font-weight: bold; text-align: left; padding-bottom: 6px; } .tc { font-weight: bold; text-align: left; } p, li { max-width: 60em; } .empty-line { margin-top: 4px; } .more-room { margin-right: 1.5em; } .much-more-room { margin-right: 3em; } #main { position: absolute; left: 10%; top: 10%; right: 10%; bottom: 10%; z-index: 2; width: 80%; height: 80%; padding: 0%; margin: 0%; overflow: auto; border-style: none; border-width: 0; background-color: #3F3F3F; } #main-settings { margin: 12px; x_max-width: 60em; line-height: 18px; font-size: 12px; } #left { position: absolute; top : 10%; left: 0%; bottom: 0%; right: 90%; z-index: 1; width: 10%; height: 90%; padding: 0%; margin: 0%; font-size: 16px; border-style: none; border-width: 0; background-color: #4F6F6F; } #right { position: absolute; top : 0%; left: 90%; bottom: 10%; right: 0%; z-index: 1; width: 10%; height: 90%; padding: 0%; margin: 0%; font-size: 16px; border-style: none; border-width: 0; background-color: #4F6F6F; _margin-left: -15px; } #bottom { position: absolute; left: 10%; right: 0%; top: 90%; bottom: 0%; z-index: 1; width: 90%; height: 10%; padding: 0%; margin: 0%; font-size: 16px; border-style: none; border-width: 0; background-color: #6F6F8F; } #top { position: absolute; left: 0%; right: 10%; top: 0%; bottom: 90%; z-index: 1; width: 90%; height: 10%; padding: 0%; margin: 0%; font-size: 16px; border-style: none; border-width: 0; background-color: #6F6F8F; } #top-one { position: absolute; bottom: 50%; width: 100%; buggedheight: 100%; } #top-two { position: relative; margin-bottom: -9px; margin-left: 12px; margin-right: 12px; line-height: 18px; text-align: right; vertical-align: middle; } #bottom-one { position: absolute; bottom: 50%; width: 100%; buggedheight: 100%; } #bottom-two { position: relative; margin-bottom: -9px; margin-left: 12px; margin-right: 12px; line-height: 18px; text-align: left; vertical-align: middle; } #left-one { position: absolute; width: 100%; buggedheight: 100%; } #left-two { position: relative; margin-top: 12px; line-height: 18px; text-align: center; vertical-align: top; } #right-one { display: table; height: 100%; width: 100%; } #right-two { display: table-row; height: 100%; width: 100%; } #right-three { display: table-cell; width: 100%; vertical-align: bottom; _position: absolute; _top: 100%; } #right-four { text-align: center; margin-bottom: 2ex; _position: relative; _top: -100%; } #more-top { position: absolute; top: 0%; left: 90%; bottom: 90%; right: 0%; z-index: 3; width: 10%; height: 10%; padding: 0%; margin: 0%; border-style: none; border-width: 0; } #more-top-settings { text-align: center; } #more-right-settings { margin-right: 12px; margin-left: 12px; line-height: 18px; font-size: 10px; text-align: center; } #right-safari { _display: table; width: 100%; height: 100%; }
+ </style>
+ <style type="text/css">
+ </style>
+ </head>
+ <body>
+ <div id="top"> <div id="top-one">
+ <div id="top-two">luatools 1.35 </div>
+ </div>
+ </div>
+ <div id="bottom"> <div id="bottom-one">
+ <div id="bottom-two">wiki: http://contextgarden.net | mail: ntg-context@ntg.nl | website: http://www.pragma-ade.nl</div>
+ </div>
+ </div>
+ <div id="left"></div>
+ <div id="right"></div>
+ <div id="main">
+ <div id='main-settings'>
+ <h1>Command line options</h1>
+<table>
+ <tr><th style="width: 10em">flag</th><th style="width: 8em">value</th><th>description</th></tr>
+ <tr><th/><td/><td/></tr>
+ <tr><th>--generate</th><td></td><td>generate file database</td></tr>
+ <tr><th>--variables</th><td></td><td>show configuration variables</td></tr>
+ <tr><th>--configurations</th><td></td><td>show configuration order</td></tr>
+ <tr><th>--expand-braces</th><td></td><td>expand complex variable</td></tr>
+ <tr><th>--expand-path</th><td></td><td>expand variable (resolve paths)</td></tr>
+ <tr><th>--expand-var</th><td></td><td>expand variable (resolve references)</td></tr>
+ <tr><th>--show-path</th><td></td><td>show path expansion of ...</td></tr>
+ <tr><th>--var-value</th><td></td><td>report value of variable</td></tr>
+ <tr><th>--find-file</th><td></td><td>report file location</td></tr>
+ <tr><th>--find-path</th><td></td><td>report path of file</td></tr>
+ <tr><th>--make</th><td></td><td>[or --ini] make luatex format</td></tr>
+ <tr><th>--run</th><td></td><td>[or --fmt] run luatex format</td></tr>
+ <tr><th>--compile</th><td></td><td>assemble and compile lua inifile</td></tr>
+ <tr><th>--verbose</th><td></td><td>give a bit more info</td></tr>
+ <tr><th>--all</th><td></td><td>show all found files</td></tr>
+ <tr><th>--format</th><td>str</td><td>filter cf format specification (default 'tex', use 'any' for any match)</td></tr>
+ <tr><th>--pattern</th><td>str</td><td>filter variables</td></tr>
+ <tr><th>--trackers</th><td>list</td><td>enable given trackers</td></tr>
+ </table>
+<br/>
+ </div>
+ </div>
+ </body>
+ </html>
diff --git a/Master/texmf-dist/doc/context/scripts/mkiv/luatools.man b/Master/texmf-dist/doc/context/scripts/mkiv/luatools.man
new file mode 100644
index 00000000000..145e9f6083b
--- /dev/null
+++ b/Master/texmf-dist/doc/context/scripts/mkiv/luatools.man
@@ -0,0 +1,78 @@
+.TH "luatools" "1" "01-01-2013" "version 1.35" "ConTeXt TDS Management Tool (aka luatools)"
+.SH NAME
+.B luatools
+.SH SYNOPSIS
+.B luatools [
+.I OPTIONS ...
+.B ] [
+.I FILENAMES
+.B ]
+.SH DESCRIPTION
+.B ConTeXt TDS Management Tool (aka luatools)
+.SH OPTIONS
+.TP
+.B --generate
+generate file database
+.TP
+.B --variables
+show configuration variables
+.TP
+.B --configurations
+show configuration order
+.TP
+.B --expand-braces
+expand complex variable
+.TP
+.B --expand-path
+expand variable (resolve paths)
+.TP
+.B --expand-var
+expand variable (resolve references)
+.TP
+.B --show-path
+show path expansion of ...
+.TP
+.B --var-value
+report value of variable
+.TP
+.B --find-file
+report file location
+.TP
+.B --find-path
+report path of file
+.TP
+.B --make
+[or --ini] make luatex format
+.TP
+.B --run
+[or --fmt] run luatex format
+.TP
+.B --compile
+assemble and compile lua inifile
+.TP
+.B --verbose
+give a bit more info
+.TP
+.B --all
+show all found files
+.TP
+.B --format=str
+filter cf format specification (default 'tex', use 'any' for any match)
+.TP
+.B --pattern=str
+filter variables
+.TP
+.B --trackers=list
+enable given trackers
+.SH AUTHOR
+More information about ConTeXt and the tools that come with it can be found at:
+
+
+.B "maillist:"
+ntg-context@ntg.nl / http://www.ntg.nl/mailman/listinfo/ntg-context
+
+.B "webpage:"
+http://www.pragma-ade.nl / http://tex.aanhet.net
+
+.B "wiki:"
+http://contextgarden.net
diff --git a/Master/texmf-dist/doc/context/scripts/mkiv/luatools.xml b/Master/texmf-dist/doc/context/scripts/mkiv/luatools.xml
new file mode 100644
index 00000000000..34460fffec4
--- /dev/null
+++ b/Master/texmf-dist/doc/context/scripts/mkiv/luatools.xml
@@ -0,0 +1,31 @@
+<?xml version="1.0"?><application>
+ <metadata>
+ <entry name="name">luatools</entry>
+ <entry name="detail">ConTeXt TDS Management Tool (aka luatools)</entry>
+ <entry name="version">1.35</entry>
+ </metadata>
+ <flags>
+ <category name="basic">
+ <subcategory>
+ <flag name="generate"><short>generate file database</short></flag>
+ <flag name="variables"><short>show configuration variables</short></flag>
+ <flag name="configurations"><short>show configuration order</short></flag>
+ <flag name="expand-braces"><short>expand complex variable</short></flag>
+ <flag name="expand-path"><short>expand variable (resolve paths)</short></flag>
+ <flag name="expand-var"><short>expand variable (resolve references)</short></flag>
+ <flag name="show-path"><short>show path expansion of ...</short></flag>
+ <flag name="var-value"><short>report value of variable</short></flag>
+ <flag name="find-file"><short>report file location</short></flag>
+ <flag name="find-path"><short>report path of file</short></flag>
+ <flag name="make"><short>[or <ref name="ini"/>] make luatex format</short></flag>
+ <flag name="run"><short>[or <ref name="fmt"/>] run luatex format</short></flag>
+ <flag name="compile"><short>assemble and compile lua inifile</short></flag>
+ <flag name="verbose"><short>give a bit more info</short></flag>
+ <flag name="all"><short>show all found files</short></flag>
+ <flag name="format" value="str"><short>filter cf format specification (default 'tex', use 'any' for any match)</short></flag>
+ <flag name="pattern" value="str"><short>filter variables</short></flag>
+ <flag name="trackers" value="list"><short>enable given trackers</short></flag>
+ </subcategory>
+ </category>
+ </flags>
+</application> \ No newline at end of file
diff --git a/Master/texmf-dist/doc/context/scripts/mkiv/mtx-babel.html b/Master/texmf-dist/doc/context/scripts/mkiv/mtx-babel.html
new file mode 100644
index 00000000000..21de59c086f
--- /dev/null
+++ b/Master/texmf-dist/doc/context/scripts/mkiv/mtx-babel.html
@@ -0,0 +1,44 @@
+<?xml version="1.0" encoding="UTF-8"?>
+
+<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Strict//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd">
+
+
+
+
+
+<html xmlns="http://www.w3.org/1999/xhtml" lang="en" xml:lang="en">
+ <head>
+ <title>Babel Input To UTF Conversion 1.20</title>
+ <meta http-equiv="Content-Type" content="text/html; charset=UTF-8"/>
+ <style type="text/css">
+ body { color: #FFFFFF; background-color: #808080; font-family: optima, verdana, futura, "lucida sans", arial, geneva, helvetica, sans; font-size: 12px; line-height: 18px; } a:link, a:active, a:visited { color: #FFFFFF; } a.dir-view:link, a.dir-view:active, a.dir-view:visited { color: #FFFFFF; text-decoration: underline; } .valid { color: #00FF00; } .invalid { color: #FF0000; } button, .commonlink, .smallbutton { font-weight: bold; font-size: 12px; text-decoration: none; color: #000000; border-color: #7F7F7F; border-style: solid; border-width: .125ex; background-color: #FFFFFF; padding: .5ex; } .smallbutton { width: 1em; } a.commonlink:link, a.commonlink:active, a.commonlink:visited, a.smalllink:link, a.smalllink:active, a.smalllink:visited { font-weight: bold; font-size: 12px; text-decoration: none; color: #000000; } h1, .title { font-style: normal; font-weight: normal; font-size: 18px; line-height: 18px; margin-bottom: 20px; } h2, .subtitle { font-style: normal; font-weight: normal; font-size: 12px; margin-top: 18px; margin-bottom: 18px; } table { line-height: 18px; font-size: 12px; margin: 0; } th { font-weight: bold; text-align: left; padding-bottom: 6px; } .tc { font-weight: bold; text-align: left; } p, li { max-width: 60em; } .empty-line { margin-top: 4px; } .more-room { margin-right: 1.5em; } .much-more-room { margin-right: 3em; } #main { position: absolute; left: 10%; top: 10%; right: 10%; bottom: 10%; z-index: 2; width: 80%; height: 80%; padding: 0%; margin: 0%; overflow: auto; border-style: none; border-width: 0; background-color: #3F3F3F; } #main-settings { margin: 12px; x_max-width: 60em; line-height: 18px; font-size: 12px; } #left { position: absolute; top : 10%; left: 0%; bottom: 0%; right: 90%; z-index: 1; width: 10%; height: 90%; padding: 0%; margin: 0%; font-size: 16px; border-style: none; border-width: 0; background-color: #4F6F6F; } #right { position: absolute; top : 0%; left: 90%; bottom: 10%; right: 0%; z-index: 1; width: 10%; height: 90%; padding: 0%; margin: 0%; font-size: 16px; border-style: none; border-width: 0; background-color: #4F6F6F; _margin-left: -15px; } #bottom { position: absolute; left: 10%; right: 0%; top: 90%; bottom: 0%; z-index: 1; width: 90%; height: 10%; padding: 0%; margin: 0%; font-size: 16px; border-style: none; border-width: 0; background-color: #6F6F8F; } #top { position: absolute; left: 0%; right: 10%; top: 0%; bottom: 90%; z-index: 1; width: 90%; height: 10%; padding: 0%; margin: 0%; font-size: 16px; border-style: none; border-width: 0; background-color: #6F6F8F; } #top-one { position: absolute; bottom: 50%; width: 100%; buggedheight: 100%; } #top-two { position: relative; margin-bottom: -9px; margin-left: 12px; margin-right: 12px; line-height: 18px; text-align: right; vertical-align: middle; } #bottom-one { position: absolute; bottom: 50%; width: 100%; buggedheight: 100%; } #bottom-two { position: relative; margin-bottom: -9px; margin-left: 12px; margin-right: 12px; line-height: 18px; text-align: left; vertical-align: middle; } #left-one { position: absolute; width: 100%; buggedheight: 100%; } #left-two { position: relative; margin-top: 12px; line-height: 18px; text-align: center; vertical-align: top; } #right-one { display: table; height: 100%; width: 100%; } #right-two { display: table-row; height: 100%; width: 100%; } #right-three { display: table-cell; width: 100%; vertical-align: bottom; _position: absolute; _top: 100%; } #right-four { text-align: center; margin-bottom: 2ex; _position: relative; _top: -100%; } #more-top { position: absolute; top: 0%; left: 90%; bottom: 90%; right: 0%; z-index: 3; width: 10%; height: 10%; padding: 0%; margin: 0%; border-style: none; border-width: 0; } #more-top-settings { text-align: center; } #more-right-settings { margin-right: 12px; margin-left: 12px; line-height: 18px; font-size: 10px; text-align: center; } #right-safari { _display: table; width: 100%; height: 100%; }
+ </style>
+ <style type="text/css">
+ </style>
+ </head>
+ <body>
+ <div id="top"> <div id="top-one">
+ <div id="top-two">Babel Input To UTF Conversion 1.20 </div>
+ </div>
+ </div>
+ <div id="bottom"> <div id="bottom-one">
+ <div id="bottom-two">wiki: http://contextgarden.net | mail: ntg-context@ntg.nl | website: http://www.pragma-ade.nl</div>
+ </div>
+ </div>
+ <div id="left"></div>
+ <div id="right"></div>
+ <div id="main">
+ <div id='main-settings'>
+ <h1>Command line options</h1>
+<table>
+ <tr><th style="width: 10em">flag</th><th style="width: 8em">value</th><th>description</th></tr>
+ <tr><th/><td/><td/></tr>
+ <tr><th>--language</th><td>string</td><td>conversion language (e.g. greek)</td></tr>
+ <tr><th>--structure</th><td>string</td><td>obey given structure (e.g. 'document', default: 'context')</td></tr>
+ <tr><th>--convert</th><td></td><td>convert babel codes into utf</td></tr>
+ </table>
+<br/>
+ </div>
+ </div>
+ </body>
+ </html>
diff --git a/Master/texmf-dist/doc/context/scripts/mkiv/mtx-babel.man b/Master/texmf-dist/doc/context/scripts/mkiv/mtx-babel.man
new file mode 100644
index 00000000000..cd0b007fb3d
--- /dev/null
+++ b/Master/texmf-dist/doc/context/scripts/mkiv/mtx-babel.man
@@ -0,0 +1,33 @@
+.TH "mtx-babel" "1" "01-01-2013" "version 1.20" "Babel Input To UTF Conversion"
+.SH NAME
+.B mtx-babel
+.SH SYNOPSIS
+.B mtxrun --script babel [
+.I OPTIONS ...
+.B ] [
+.I FILENAMES
+.B ]
+.SH DESCRIPTION
+.B Babel Input To UTF Conversion
+.SH OPTIONS
+.TP
+.B --language=string
+conversion language (e.g. greek)
+.TP
+.B --structure=string
+obey given structure (e.g. 'document', default: 'context')
+.TP
+.B --convert
+convert babel codes into utf
+.SH AUTHOR
+More information about ConTeXt and the tools that come with it can be found at:
+
+
+.B "maillist:"
+ntg-context@ntg.nl / http://www.ntg.nl/mailman/listinfo/ntg-context
+
+.B "webpage:"
+http://www.pragma-ade.nl / http://tex.aanhet.net
+
+.B "wiki:"
+http://contextgarden.net
diff --git a/Master/texmf-dist/doc/context/scripts/mkiv/mtx-babel.xml b/Master/texmf-dist/doc/context/scripts/mkiv/mtx-babel.xml
new file mode 100644
index 00000000000..6a6efde1591
--- /dev/null
+++ b/Master/texmf-dist/doc/context/scripts/mkiv/mtx-babel.xml
@@ -0,0 +1,17 @@
+<?xml version="1.0"?>
+<application>
+ <metadata>
+ <entry name="name">mtx-babel</entry>
+ <entry name="detail">Babel Input To UTF Conversion</entry>
+ <entry name="version">1.20</entry>
+ </metadata>
+ <flags>
+ <category name="basic">
+ <subcategory>
+ <flag name="language" value="string"><short>conversion language (e.g. greek)</short></flag>
+ <flag name="structure" value="string"><short>obey given structure (e.g. 'document', default: 'context')</short></flag>
+ <flag name="convert"><short>convert babel codes into utf</short></flag>
+ </subcategory>
+ </category>
+ </flags>
+</application>
diff --git a/Master/texmf-dist/doc/context/scripts/mkiv/mtx-base.html b/Master/texmf-dist/doc/context/scripts/mkiv/mtx-base.html
new file mode 100644
index 00000000000..2d97a2811db
--- /dev/null
+++ b/Master/texmf-dist/doc/context/scripts/mkiv/mtx-base.html
@@ -0,0 +1,59 @@
+<?xml version="1.0" encoding="UTF-8"?>
+
+<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Strict//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd">
+
+
+
+
+
+<html xmlns="http://www.w3.org/1999/xhtml" lang="en" xml:lang="en">
+ <head>
+ <title>ConTeXt TDS Management Tool (aka luatools) 1.35</title>
+ <meta http-equiv="Content-Type" content="text/html; charset=UTF-8"/>
+ <style type="text/css">
+ body { color: #FFFFFF; background-color: #808080; font-family: optima, verdana, futura, "lucida sans", arial, geneva, helvetica, sans; font-size: 12px; line-height: 18px; } a:link, a:active, a:visited { color: #FFFFFF; } a.dir-view:link, a.dir-view:active, a.dir-view:visited { color: #FFFFFF; text-decoration: underline; } .valid { color: #00FF00; } .invalid { color: #FF0000; } button, .commonlink, .smallbutton { font-weight: bold; font-size: 12px; text-decoration: none; color: #000000; border-color: #7F7F7F; border-style: solid; border-width: .125ex; background-color: #FFFFFF; padding: .5ex; } .smallbutton { width: 1em; } a.commonlink:link, a.commonlink:active, a.commonlink:visited, a.smalllink:link, a.smalllink:active, a.smalllink:visited { font-weight: bold; font-size: 12px; text-decoration: none; color: #000000; } h1, .title { font-style: normal; font-weight: normal; font-size: 18px; line-height: 18px; margin-bottom: 20px; } h2, .subtitle { font-style: normal; font-weight: normal; font-size: 12px; margin-top: 18px; margin-bottom: 18px; } table { line-height: 18px; font-size: 12px; margin: 0; } th { font-weight: bold; text-align: left; padding-bottom: 6px; } .tc { font-weight: bold; text-align: left; } p, li { max-width: 60em; } .empty-line { margin-top: 4px; } .more-room { margin-right: 1.5em; } .much-more-room { margin-right: 3em; } #main { position: absolute; left: 10%; top: 10%; right: 10%; bottom: 10%; z-index: 2; width: 80%; height: 80%; padding: 0%; margin: 0%; overflow: auto; border-style: none; border-width: 0; background-color: #3F3F3F; } #main-settings { margin: 12px; x_max-width: 60em; line-height: 18px; font-size: 12px; } #left { position: absolute; top : 10%; left: 0%; bottom: 0%; right: 90%; z-index: 1; width: 10%; height: 90%; padding: 0%; margin: 0%; font-size: 16px; border-style: none; border-width: 0; background-color: #4F6F6F; } #right { position: absolute; top : 0%; left: 90%; bottom: 10%; right: 0%; z-index: 1; width: 10%; height: 90%; padding: 0%; margin: 0%; font-size: 16px; border-style: none; border-width: 0; background-color: #4F6F6F; _margin-left: -15px; } #bottom { position: absolute; left: 10%; right: 0%; top: 90%; bottom: 0%; z-index: 1; width: 90%; height: 10%; padding: 0%; margin: 0%; font-size: 16px; border-style: none; border-width: 0; background-color: #6F6F8F; } #top { position: absolute; left: 0%; right: 10%; top: 0%; bottom: 90%; z-index: 1; width: 90%; height: 10%; padding: 0%; margin: 0%; font-size: 16px; border-style: none; border-width: 0; background-color: #6F6F8F; } #top-one { position: absolute; bottom: 50%; width: 100%; buggedheight: 100%; } #top-two { position: relative; margin-bottom: -9px; margin-left: 12px; margin-right: 12px; line-height: 18px; text-align: right; vertical-align: middle; } #bottom-one { position: absolute; bottom: 50%; width: 100%; buggedheight: 100%; } #bottom-two { position: relative; margin-bottom: -9px; margin-left: 12px; margin-right: 12px; line-height: 18px; text-align: left; vertical-align: middle; } #left-one { position: absolute; width: 100%; buggedheight: 100%; } #left-two { position: relative; margin-top: 12px; line-height: 18px; text-align: center; vertical-align: top; } #right-one { display: table; height: 100%; width: 100%; } #right-two { display: table-row; height: 100%; width: 100%; } #right-three { display: table-cell; width: 100%; vertical-align: bottom; _position: absolute; _top: 100%; } #right-four { text-align: center; margin-bottom: 2ex; _position: relative; _top: -100%; } #more-top { position: absolute; top: 0%; left: 90%; bottom: 90%; right: 0%; z-index: 3; width: 10%; height: 10%; padding: 0%; margin: 0%; border-style: none; border-width: 0; } #more-top-settings { text-align: center; } #more-right-settings { margin-right: 12px; margin-left: 12px; line-height: 18px; font-size: 10px; text-align: center; } #right-safari { _display: table; width: 100%; height: 100%; }
+ </style>
+ <style type="text/css">
+ </style>
+ </head>
+ <body>
+ <div id="top"> <div id="top-one">
+ <div id="top-two">ConTeXt TDS Management Tool (aka luatools) 1.35 </div>
+ </div>
+ </div>
+ <div id="bottom"> <div id="bottom-one">
+ <div id="bottom-two">wiki: http://contextgarden.net | mail: ntg-context@ntg.nl | website: http://www.pragma-ade.nl</div>
+ </div>
+ </div>
+ <div id="left"></div>
+ <div id="right"></div>
+ <div id="main">
+ <div id='main-settings'>
+ <h1>Command line options</h1>
+<table>
+ <tr><th style="width: 10em">flag</th><th style="width: 8em">value</th><th>description</th></tr>
+ <tr><th/><td/><td/></tr>
+ <tr><th>--generate</th><td></td><td>generate file database</td></tr>
+ <tr><th>--variables</th><td></td><td>show configuration variables</td></tr>
+ <tr><th>--configurations</th><td></td><td>show configuration order</td></tr>
+ <tr><th>--expand-braces</th><td></td><td>expand complex variable</td></tr>
+ <tr><th>--expand-path</th><td></td><td>expand variable (resolve paths)</td></tr>
+ <tr><th>--expand-var</th><td></td><td>expand variable (resolve references)</td></tr>
+ <tr><th>--show-path</th><td></td><td>show path expansion of ...</td></tr>
+ <tr><th>--var-value</th><td></td><td>report value of variable</td></tr>
+ <tr><th>--find-file</th><td></td><td>report file location</td></tr>
+ <tr><th>--find-path</th><td></td><td>report path of file</td></tr>
+ <tr><th>--make</th><td></td><td>[or --ini] make luatex format</td></tr>
+ <tr><th>--run</th><td></td><td>[or --fmt] run luatex format</td></tr>
+ <tr><th>--compile</th><td></td><td>assemble and compile lua inifile</td></tr>
+ <tr><th>--verbose</th><td></td><td>give a bit more info</td></tr>
+ <tr><th>--all</th><td></td><td>show all found files</td></tr>
+ <tr><th>--format</th><td>str</td><td>filter cf format specification (default 'tex', use 'any' for any match)</td></tr>
+ <tr><th>--pattern</th><td>str</td><td>filter variables</td></tr>
+ <tr><th>--trackers</th><td>list</td><td>enable given trackers</td></tr>
+ </table>
+<br/>
+ </div>
+ </div>
+ </body>
+ </html>
diff --git a/Master/texmf-dist/doc/context/scripts/mkiv/mtx-base.man b/Master/texmf-dist/doc/context/scripts/mkiv/mtx-base.man
new file mode 100644
index 00000000000..6c72dcb07f1
--- /dev/null
+++ b/Master/texmf-dist/doc/context/scripts/mkiv/mtx-base.man
@@ -0,0 +1,78 @@
+.TH "mtx-base" "1" "01-01-2013" "version 1.35" "ConTeXt TDS Management Tool (aka luatools)"
+.SH NAME
+.B mtx-base
+.SH SYNOPSIS
+.B mtxrun --script base [
+.I OPTIONS ...
+.B ] [
+.I FILENAMES
+.B ]
+.SH DESCRIPTION
+.B ConTeXt TDS Management Tool (aka luatools)
+.SH OPTIONS
+.TP
+.B --generate
+generate file database
+.TP
+.B --variables
+show configuration variables
+.TP
+.B --configurations
+show configuration order
+.TP
+.B --expand-braces
+expand complex variable
+.TP
+.B --expand-path
+expand variable (resolve paths)
+.TP
+.B --expand-var
+expand variable (resolve references)
+.TP
+.B --show-path
+show path expansion of ...
+.TP
+.B --var-value
+report value of variable
+.TP
+.B --find-file
+report file location
+.TP
+.B --find-path
+report path of file
+.TP
+.B --make
+[or --ini] make luatex format
+.TP
+.B --run
+[or --fmt] run luatex format
+.TP
+.B --compile
+assemble and compile lua inifile
+.TP
+.B --verbose
+give a bit more info
+.TP
+.B --all
+show all found files
+.TP
+.B --format=str
+filter cf format specification (default 'tex', use 'any' for any match)
+.TP
+.B --pattern=str
+filter variables
+.TP
+.B --trackers=list
+enable given trackers
+.SH AUTHOR
+More information about ConTeXt and the tools that come with it can be found at:
+
+
+.B "maillist:"
+ntg-context@ntg.nl / http://www.ntg.nl/mailman/listinfo/ntg-context
+
+.B "webpage:"
+http://www.pragma-ade.nl / http://tex.aanhet.net
+
+.B "wiki:"
+http://contextgarden.net
diff --git a/Master/texmf-dist/doc/context/scripts/mkiv/mtx-base.xml b/Master/texmf-dist/doc/context/scripts/mkiv/mtx-base.xml
new file mode 100644
index 00000000000..de15b018815
--- /dev/null
+++ b/Master/texmf-dist/doc/context/scripts/mkiv/mtx-base.xml
@@ -0,0 +1,32 @@
+<?xml version="1.0"?>
+<application>
+ <metadata>
+ <entry name="name">mtx-base</entry>
+ <entry name="detail">ConTeXt TDS Management Tool (aka luatools)</entry>
+ <entry name="version">1.35</entry>
+ </metadata>
+ <flags>
+ <category name="basic">
+ <subcategory>
+ <flag name="generate"><short>generate file database</short></flag>
+ <flag name="variables"><short>show configuration variables</short></flag>
+ <flag name="configurations"><short>show configuration order</short></flag>
+ <flag name="expand-braces"><short>expand complex variable</short></flag>
+ <flag name="expand-path"><short>expand variable (resolve paths)</short></flag>
+ <flag name="expand-var"><short>expand variable (resolve references)</short></flag>
+ <flag name="show-path"><short>show path expansion of ...</short></flag>
+ <flag name="var-value"><short>report value of variable</short></flag>
+ <flag name="find-file"><short>report file location</short></flag>
+ <flag name="find-path"><short>report path of file</short></flag>
+ <flag name="make"><short>[or <ref name="ini"/>] make luatex format</short></flag>
+ <flag name="run"><short>[or <ref name="fmt"/>] run luatex format</short></flag>
+ <flag name="compile"><short>assemble and compile lua inifile</short></flag>
+ <flag name="verbose"><short>give a bit more info</short></flag>
+ <flag name="all"><short>show all found files</short></flag>
+ <flag name="format" value="str"><short>filter cf format specification (default 'tex', use 'any' for any match)</short></flag>
+ <flag name="pattern" value="str"><short>filter variables</short></flag>
+ <flag name="trackers" value="list"><short>enable given trackers</short></flag>
+ </subcategory>
+ </category>
+ </flags>
+</application>
diff --git a/Master/texmf-dist/doc/context/scripts/mkiv/mtx-cache.html b/Master/texmf-dist/doc/context/scripts/mkiv/mtx-cache.html
new file mode 100644
index 00000000000..a344d2394a9
--- /dev/null
+++ b/Master/texmf-dist/doc/context/scripts/mkiv/mtx-cache.html
@@ -0,0 +1,46 @@
+<?xml version="1.0" encoding="UTF-8"?>
+
+<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Strict//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd">
+
+
+
+
+
+<html xmlns="http://www.w3.org/1999/xhtml" lang="en" xml:lang="en">
+ <head>
+ <title>ConTeXt & MetaTeX Cache Management 0.10</title>
+ <meta http-equiv="Content-Type" content="text/html; charset=UTF-8"/>
+ <style type="text/css">
+ body { color: #FFFFFF; background-color: #808080; font-family: optima, verdana, futura, "lucida sans", arial, geneva, helvetica, sans; font-size: 12px; line-height: 18px; } a:link, a:active, a:visited { color: #FFFFFF; } a.dir-view:link, a.dir-view:active, a.dir-view:visited { color: #FFFFFF; text-decoration: underline; } .valid { color: #00FF00; } .invalid { color: #FF0000; } button, .commonlink, .smallbutton { font-weight: bold; font-size: 12px; text-decoration: none; color: #000000; border-color: #7F7F7F; border-style: solid; border-width: .125ex; background-color: #FFFFFF; padding: .5ex; } .smallbutton { width: 1em; } a.commonlink:link, a.commonlink:active, a.commonlink:visited, a.smalllink:link, a.smalllink:active, a.smalllink:visited { font-weight: bold; font-size: 12px; text-decoration: none; color: #000000; } h1, .title { font-style: normal; font-weight: normal; font-size: 18px; line-height: 18px; margin-bottom: 20px; } h2, .subtitle { font-style: normal; font-weight: normal; font-size: 12px; margin-top: 18px; margin-bottom: 18px; } table { line-height: 18px; font-size: 12px; margin: 0; } th { font-weight: bold; text-align: left; padding-bottom: 6px; } .tc { font-weight: bold; text-align: left; } p, li { max-width: 60em; } .empty-line { margin-top: 4px; } .more-room { margin-right: 1.5em; } .much-more-room { margin-right: 3em; } #main { position: absolute; left: 10%; top: 10%; right: 10%; bottom: 10%; z-index: 2; width: 80%; height: 80%; padding: 0%; margin: 0%; overflow: auto; border-style: none; border-width: 0; background-color: #3F3F3F; } #main-settings { margin: 12px; x_max-width: 60em; line-height: 18px; font-size: 12px; } #left { position: absolute; top : 10%; left: 0%; bottom: 0%; right: 90%; z-index: 1; width: 10%; height: 90%; padding: 0%; margin: 0%; font-size: 16px; border-style: none; border-width: 0; background-color: #4F6F6F; } #right { position: absolute; top : 0%; left: 90%; bottom: 10%; right: 0%; z-index: 1; width: 10%; height: 90%; padding: 0%; margin: 0%; font-size: 16px; border-style: none; border-width: 0; background-color: #4F6F6F; _margin-left: -15px; } #bottom { position: absolute; left: 10%; right: 0%; top: 90%; bottom: 0%; z-index: 1; width: 90%; height: 10%; padding: 0%; margin: 0%; font-size: 16px; border-style: none; border-width: 0; background-color: #6F6F8F; } #top { position: absolute; left: 0%; right: 10%; top: 0%; bottom: 90%; z-index: 1; width: 90%; height: 10%; padding: 0%; margin: 0%; font-size: 16px; border-style: none; border-width: 0; background-color: #6F6F8F; } #top-one { position: absolute; bottom: 50%; width: 100%; buggedheight: 100%; } #top-two { position: relative; margin-bottom: -9px; margin-left: 12px; margin-right: 12px; line-height: 18px; text-align: right; vertical-align: middle; } #bottom-one { position: absolute; bottom: 50%; width: 100%; buggedheight: 100%; } #bottom-two { position: relative; margin-bottom: -9px; margin-left: 12px; margin-right: 12px; line-height: 18px; text-align: left; vertical-align: middle; } #left-one { position: absolute; width: 100%; buggedheight: 100%; } #left-two { position: relative; margin-top: 12px; line-height: 18px; text-align: center; vertical-align: top; } #right-one { display: table; height: 100%; width: 100%; } #right-two { display: table-row; height: 100%; width: 100%; } #right-three { display: table-cell; width: 100%; vertical-align: bottom; _position: absolute; _top: 100%; } #right-four { text-align: center; margin-bottom: 2ex; _position: relative; _top: -100%; } #more-top { position: absolute; top: 0%; left: 90%; bottom: 90%; right: 0%; z-index: 3; width: 10%; height: 10%; padding: 0%; margin: 0%; border-style: none; border-width: 0; } #more-top-settings { text-align: center; } #more-right-settings { margin-right: 12px; margin-left: 12px; line-height: 18px; font-size: 10px; text-align: center; } #right-safari { _display: table; width: 100%; height: 100%; }
+ </style>
+ <style type="text/css">
+ </style>
+ </head>
+ <body>
+ <div id="top"> <div id="top-one">
+ <div id="top-two">ConTeXt & MetaTeX Cache Management 0.10 </div>
+ </div>
+ </div>
+ <div id="bottom"> <div id="bottom-one">
+ <div id="bottom-two">wiki: http://contextgarden.net | mail: ntg-context@ntg.nl | website: http://www.pragma-ade.nl</div>
+ </div>
+ </div>
+ <div id="left"></div>
+ <div id="right"></div>
+ <div id="main">
+ <div id='main-settings'>
+ <h1>Command line options</h1>
+<table>
+ <tr><th style="width: 10em">flag</th><th style="width: 8em">value</th><th>description</th></tr>
+ <tr><th/><td/><td/></tr>
+ <tr><th>--purge</th><td></td><td>remove not used files</td></tr>
+ <tr><th>--erase</th><td></td><td>completely remove cache</td></tr>
+ <tr><th>--list</th><td></td><td>show cache</td></tr>
+ <tr><th/><td/><td/></tr>
+ <tr><th>--all</th><td></td><td>all (not yet implemented)</td></tr>
+ </table>
+<br/>
+ </div>
+ </div>
+ </body>
+ </html>
diff --git a/Master/texmf-dist/doc/context/scripts/mkiv/mtx-cache.man b/Master/texmf-dist/doc/context/scripts/mkiv/mtx-cache.man
new file mode 100644
index 00000000000..26f3793fc2b
--- /dev/null
+++ b/Master/texmf-dist/doc/context/scripts/mkiv/mtx-cache.man
@@ -0,0 +1,36 @@
+.TH "mtx-cache" "1" "01-01-2013" "version 0.10" "ConTeXt &error; MetaTeX Cache Management"
+.SH NAME
+.B mtx-cache
+.SH SYNOPSIS
+.B mtxrun --script cache [
+.I OPTIONS ...
+.B ] [
+.I FILENAMES
+.B ]
+.SH DESCRIPTION
+.B ConTeXt &error; MetaTeX Cache Management
+.SH OPTIONS
+.TP
+.B --purge
+remove not used files
+.TP
+.B --erase
+completely remove cache
+.TP
+.B --list
+show cache
+.TP
+.B --all
+all (not yet implemented)
+.SH AUTHOR
+More information about ConTeXt and the tools that come with it can be found at:
+
+
+.B "maillist:"
+ntg-context@ntg.nl / http://www.ntg.nl/mailman/listinfo/ntg-context
+
+.B "webpage:"
+http://www.pragma-ade.nl / http://tex.aanhet.net
+
+.B "wiki:"
+http://contextgarden.net
diff --git a/Master/texmf-dist/doc/context/scripts/mkiv/mtx-cache.xml b/Master/texmf-dist/doc/context/scripts/mkiv/mtx-cache.xml
new file mode 100644
index 00000000000..2e37f036c3e
--- /dev/null
+++ b/Master/texmf-dist/doc/context/scripts/mkiv/mtx-cache.xml
@@ -0,0 +1,20 @@
+<?xml version="1.0"?>
+<application>
+ <metadata>
+ <entry name="name">mtx-cache</entry>
+ <entry name="detail">ConTeXt & MetaTeX Cache Management</entry>
+ <entry name="version">0.10</entry>
+ </metadata>
+ <flags>
+ <category name="basic">
+ <subcategory>
+ <flag name="purge"><short>remove not used files</short></flag>
+ <flag name="erase"><short>completely remove cache</short></flag>
+ <flag name="list"><short>show cache</short></flag>
+ </subcategory>
+ <subcategory>
+ <flag name="all"><short>all (not yet implemented)</short></flag>
+ </subcategory>
+ </category>
+ </flags>
+</application>
diff --git a/Master/texmf-dist/doc/context/scripts/mkiv/mtx-chars.html b/Master/texmf-dist/doc/context/scripts/mkiv/mtx-chars.html
new file mode 100644
index 00000000000..64bb3703886
--- /dev/null
+++ b/Master/texmf-dist/doc/context/scripts/mkiv/mtx-chars.html
@@ -0,0 +1,44 @@
+<?xml version="1.0" encoding="UTF-8"?>
+
+<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Strict//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd">
+
+
+
+
+
+<html xmlns="http://www.w3.org/1999/xhtml" lang="en" xml:lang="en">
+ <head>
+ <title>MkII Character Table Generators 0.10</title>
+ <meta http-equiv="Content-Type" content="text/html; charset=UTF-8"/>
+ <style type="text/css">
+ body { color: #FFFFFF; background-color: #808080; font-family: optima, verdana, futura, "lucida sans", arial, geneva, helvetica, sans; font-size: 12px; line-height: 18px; } a:link, a:active, a:visited { color: #FFFFFF; } a.dir-view:link, a.dir-view:active, a.dir-view:visited { color: #FFFFFF; text-decoration: underline; } .valid { color: #00FF00; } .invalid { color: #FF0000; } button, .commonlink, .smallbutton { font-weight: bold; font-size: 12px; text-decoration: none; color: #000000; border-color: #7F7F7F; border-style: solid; border-width: .125ex; background-color: #FFFFFF; padding: .5ex; } .smallbutton { width: 1em; } a.commonlink:link, a.commonlink:active, a.commonlink:visited, a.smalllink:link, a.smalllink:active, a.smalllink:visited { font-weight: bold; font-size: 12px; text-decoration: none; color: #000000; } h1, .title { font-style: normal; font-weight: normal; font-size: 18px; line-height: 18px; margin-bottom: 20px; } h2, .subtitle { font-style: normal; font-weight: normal; font-size: 12px; margin-top: 18px; margin-bottom: 18px; } table { line-height: 18px; font-size: 12px; margin: 0; } th { font-weight: bold; text-align: left; padding-bottom: 6px; } .tc { font-weight: bold; text-align: left; } p, li { max-width: 60em; } .empty-line { margin-top: 4px; } .more-room { margin-right: 1.5em; } .much-more-room { margin-right: 3em; } #main { position: absolute; left: 10%; top: 10%; right: 10%; bottom: 10%; z-index: 2; width: 80%; height: 80%; padding: 0%; margin: 0%; overflow: auto; border-style: none; border-width: 0; background-color: #3F3F3F; } #main-settings { margin: 12px; x_max-width: 60em; line-height: 18px; font-size: 12px; } #left { position: absolute; top : 10%; left: 0%; bottom: 0%; right: 90%; z-index: 1; width: 10%; height: 90%; padding: 0%; margin: 0%; font-size: 16px; border-style: none; border-width: 0; background-color: #4F6F6F; } #right { position: absolute; top : 0%; left: 90%; bottom: 10%; right: 0%; z-index: 1; width: 10%; height: 90%; padding: 0%; margin: 0%; font-size: 16px; border-style: none; border-width: 0; background-color: #4F6F6F; _margin-left: -15px; } #bottom { position: absolute; left: 10%; right: 0%; top: 90%; bottom: 0%; z-index: 1; width: 90%; height: 10%; padding: 0%; margin: 0%; font-size: 16px; border-style: none; border-width: 0; background-color: #6F6F8F; } #top { position: absolute; left: 0%; right: 10%; top: 0%; bottom: 90%; z-index: 1; width: 90%; height: 10%; padding: 0%; margin: 0%; font-size: 16px; border-style: none; border-width: 0; background-color: #6F6F8F; } #top-one { position: absolute; bottom: 50%; width: 100%; buggedheight: 100%; } #top-two { position: relative; margin-bottom: -9px; margin-left: 12px; margin-right: 12px; line-height: 18px; text-align: right; vertical-align: middle; } #bottom-one { position: absolute; bottom: 50%; width: 100%; buggedheight: 100%; } #bottom-two { position: relative; margin-bottom: -9px; margin-left: 12px; margin-right: 12px; line-height: 18px; text-align: left; vertical-align: middle; } #left-one { position: absolute; width: 100%; buggedheight: 100%; } #left-two { position: relative; margin-top: 12px; line-height: 18px; text-align: center; vertical-align: top; } #right-one { display: table; height: 100%; width: 100%; } #right-two { display: table-row; height: 100%; width: 100%; } #right-three { display: table-cell; width: 100%; vertical-align: bottom; _position: absolute; _top: 100%; } #right-four { text-align: center; margin-bottom: 2ex; _position: relative; _top: -100%; } #more-top { position: absolute; top: 0%; left: 90%; bottom: 90%; right: 0%; z-index: 3; width: 10%; height: 10%; padding: 0%; margin: 0%; border-style: none; border-width: 0; } #more-top-settings { text-align: center; } #more-right-settings { margin-right: 12px; margin-left: 12px; line-height: 18px; font-size: 10px; text-align: center; } #right-safari { _display: table; width: 100%; height: 100%; }
+ </style>
+ <style type="text/css">
+ </style>
+ </head>
+ <body>
+ <div id="top"> <div id="top-one">
+ <div id="top-two">MkII Character Table Generators 0.10 </div>
+ </div>
+ </div>
+ <div id="bottom"> <div id="bottom-one">
+ <div id="bottom-two">wiki: http://contextgarden.net | mail: ntg-context@ntg.nl | website: http://www.pragma-ade.nl</div>
+ </div>
+ </div>
+ <div id="left"></div>
+ <div id="right"></div>
+ <div id="main">
+ <div id='main-settings'>
+ <h1>Command line options</h1>
+<table>
+ <tr><th style="width: 10em">flag</th><th style="width: 8em">value</th><th>description</th></tr>
+ <tr><th/><td/><td/></tr>
+ <tr><th>--xtx</th><td></td><td>generate xetx-*.tex (used by xetex)</td></tr>
+ <tr><th>--pdf</th><td></td><td>generate pdfr-def.tex (used by pdftex)</td></tr>
+ <tr><th>--entities</th><td></td><td>generate entities table</td></tr>
+ </table>
+<br/>
+ </div>
+ </div>
+ </body>
+ </html>
diff --git a/Master/texmf-dist/doc/context/scripts/mkiv/mtx-chars.man b/Master/texmf-dist/doc/context/scripts/mkiv/mtx-chars.man
new file mode 100644
index 00000000000..5d3df23e6a4
--- /dev/null
+++ b/Master/texmf-dist/doc/context/scripts/mkiv/mtx-chars.man
@@ -0,0 +1,33 @@
+.TH "mtx-chars" "1" "01-01-2013" "version 0.10" "MkII Character Table Generators"
+.SH NAME
+.B mtx-chars
+.SH SYNOPSIS
+.B mtxrun --script chars [
+.I OPTIONS ...
+.B ] [
+.I FILENAMES
+.B ]
+.SH DESCRIPTION
+.B MkII Character Table Generators
+.SH OPTIONS
+.TP
+.B --xtx
+generate xetx-*.tex (used by xetex)
+.TP
+.B --pdf
+generate pdfr-def.tex (used by pdftex)
+.TP
+.B --entities
+generate entities table
+.SH AUTHOR
+More information about ConTeXt and the tools that come with it can be found at:
+
+
+.B "maillist:"
+ntg-context@ntg.nl / http://www.ntg.nl/mailman/listinfo/ntg-context
+
+.B "webpage:"
+http://www.pragma-ade.nl / http://tex.aanhet.net
+
+.B "wiki:"
+http://contextgarden.net
diff --git a/Master/texmf-dist/doc/context/scripts/mkiv/mtx-chars.xml b/Master/texmf-dist/doc/context/scripts/mkiv/mtx-chars.xml
new file mode 100644
index 00000000000..dd774923569
--- /dev/null
+++ b/Master/texmf-dist/doc/context/scripts/mkiv/mtx-chars.xml
@@ -0,0 +1,17 @@
+<?xml version="1.0"?>
+<application>
+ <metadata>
+ <entry name="name">mtx-chars</entry>
+ <entry name="detail">MkII Character Table Generators</entry>
+ <entry name="version">0.10</entry>
+ </metadata>
+ <flags>
+ <category name="basic">
+ <subcategory>
+ <flag name="xtx"><short>generate xetx-*.tex (used by xetex)</short></flag>
+ <flag name="pdf"><short>generate pdfr-def.tex (used by pdftex)</short></flag>
+ <flag name="entities"><short>generate entities table</short></flag>
+ </subcategory>
+ </category>
+ </flags>
+</application>
diff --git a/Master/texmf-dist/doc/context/scripts/mkiv/mtx-check.html b/Master/texmf-dist/doc/context/scripts/mkiv/mtx-check.html
new file mode 100644
index 00000000000..88cbc875948
--- /dev/null
+++ b/Master/texmf-dist/doc/context/scripts/mkiv/mtx-check.html
@@ -0,0 +1,42 @@
+<?xml version="1.0" encoding="UTF-8"?>
+
+<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Strict//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd">
+
+
+
+
+
+<html xmlns="http://www.w3.org/1999/xhtml" lang="en" xml:lang="en">
+ <head>
+ <title>Basic ConTeXt Syntax Checking 0.10</title>
+ <meta http-equiv="Content-Type" content="text/html; charset=UTF-8"/>
+ <style type="text/css">
+ body { color: #FFFFFF; background-color: #808080; font-family: optima, verdana, futura, "lucida sans", arial, geneva, helvetica, sans; font-size: 12px; line-height: 18px; } a:link, a:active, a:visited { color: #FFFFFF; } a.dir-view:link, a.dir-view:active, a.dir-view:visited { color: #FFFFFF; text-decoration: underline; } .valid { color: #00FF00; } .invalid { color: #FF0000; } button, .commonlink, .smallbutton { font-weight: bold; font-size: 12px; text-decoration: none; color: #000000; border-color: #7F7F7F; border-style: solid; border-width: .125ex; background-color: #FFFFFF; padding: .5ex; } .smallbutton { width: 1em; } a.commonlink:link, a.commonlink:active, a.commonlink:visited, a.smalllink:link, a.smalllink:active, a.smalllink:visited { font-weight: bold; font-size: 12px; text-decoration: none; color: #000000; } h1, .title { font-style: normal; font-weight: normal; font-size: 18px; line-height: 18px; margin-bottom: 20px; } h2, .subtitle { font-style: normal; font-weight: normal; font-size: 12px; margin-top: 18px; margin-bottom: 18px; } table { line-height: 18px; font-size: 12px; margin: 0; } th { font-weight: bold; text-align: left; padding-bottom: 6px; } .tc { font-weight: bold; text-align: left; } p, li { max-width: 60em; } .empty-line { margin-top: 4px; } .more-room { margin-right: 1.5em; } .much-more-room { margin-right: 3em; } #main { position: absolute; left: 10%; top: 10%; right: 10%; bottom: 10%; z-index: 2; width: 80%; height: 80%; padding: 0%; margin: 0%; overflow: auto; border-style: none; border-width: 0; background-color: #3F3F3F; } #main-settings { margin: 12px; x_max-width: 60em; line-height: 18px; font-size: 12px; } #left { position: absolute; top : 10%; left: 0%; bottom: 0%; right: 90%; z-index: 1; width: 10%; height: 90%; padding: 0%; margin: 0%; font-size: 16px; border-style: none; border-width: 0; background-color: #4F6F6F; } #right { position: absolute; top : 0%; left: 90%; bottom: 10%; right: 0%; z-index: 1; width: 10%; height: 90%; padding: 0%; margin: 0%; font-size: 16px; border-style: none; border-width: 0; background-color: #4F6F6F; _margin-left: -15px; } #bottom { position: absolute; left: 10%; right: 0%; top: 90%; bottom: 0%; z-index: 1; width: 90%; height: 10%; padding: 0%; margin: 0%; font-size: 16px; border-style: none; border-width: 0; background-color: #6F6F8F; } #top { position: absolute; left: 0%; right: 10%; top: 0%; bottom: 90%; z-index: 1; width: 90%; height: 10%; padding: 0%; margin: 0%; font-size: 16px; border-style: none; border-width: 0; background-color: #6F6F8F; } #top-one { position: absolute; bottom: 50%; width: 100%; buggedheight: 100%; } #top-two { position: relative; margin-bottom: -9px; margin-left: 12px; margin-right: 12px; line-height: 18px; text-align: right; vertical-align: middle; } #bottom-one { position: absolute; bottom: 50%; width: 100%; buggedheight: 100%; } #bottom-two { position: relative; margin-bottom: -9px; margin-left: 12px; margin-right: 12px; line-height: 18px; text-align: left; vertical-align: middle; } #left-one { position: absolute; width: 100%; buggedheight: 100%; } #left-two { position: relative; margin-top: 12px; line-height: 18px; text-align: center; vertical-align: top; } #right-one { display: table; height: 100%; width: 100%; } #right-two { display: table-row; height: 100%; width: 100%; } #right-three { display: table-cell; width: 100%; vertical-align: bottom; _position: absolute; _top: 100%; } #right-four { text-align: center; margin-bottom: 2ex; _position: relative; _top: -100%; } #more-top { position: absolute; top: 0%; left: 90%; bottom: 90%; right: 0%; z-index: 3; width: 10%; height: 10%; padding: 0%; margin: 0%; border-style: none; border-width: 0; } #more-top-settings { text-align: center; } #more-right-settings { margin-right: 12px; margin-left: 12px; line-height: 18px; font-size: 10px; text-align: center; } #right-safari { _display: table; width: 100%; height: 100%; }
+ </style>
+ <style type="text/css">
+ </style>
+ </head>
+ <body>
+ <div id="top"> <div id="top-one">
+ <div id="top-two">Basic ConTeXt Syntax Checking 0.10 </div>
+ </div>
+ </div>
+ <div id="bottom"> <div id="bottom-one">
+ <div id="bottom-two">wiki: http://contextgarden.net | mail: ntg-context@ntg.nl | website: http://www.pragma-ade.nl</div>
+ </div>
+ </div>
+ <div id="left"></div>
+ <div id="right"></div>
+ <div id="main">
+ <div id='main-settings'>
+ <h1>Command line options</h1>
+<table>
+ <tr><th style="width: 10em">flag</th><th style="width: 8em">value</th><th>description</th></tr>
+ <tr><th/><td/><td/></tr>
+ <tr><th>--convert</th><td></td><td>check tex file for errors</td></tr>
+ </table>
+<br/>
+ </div>
+ </div>
+ </body>
+ </html>
diff --git a/Master/texmf-dist/doc/context/scripts/mkiv/mtx-check.man b/Master/texmf-dist/doc/context/scripts/mkiv/mtx-check.man
new file mode 100644
index 00000000000..72e33b088eb
--- /dev/null
+++ b/Master/texmf-dist/doc/context/scripts/mkiv/mtx-check.man
@@ -0,0 +1,27 @@
+.TH "mtx-check" "1" "01-01-2013" "version 0.10" "Basic ConTeXt Syntax Checking"
+.SH NAME
+.B mtx-check
+.SH SYNOPSIS
+.B mtxrun --script check [
+.I OPTIONS ...
+.B ] [
+.I FILENAMES
+.B ]
+.SH DESCRIPTION
+.B Basic ConTeXt Syntax Checking
+.SH OPTIONS
+.TP
+.B --convert
+check tex file for errors
+.SH AUTHOR
+More information about ConTeXt and the tools that come with it can be found at:
+
+
+.B "maillist:"
+ntg-context@ntg.nl / http://www.ntg.nl/mailman/listinfo/ntg-context
+
+.B "webpage:"
+http://www.pragma-ade.nl / http://tex.aanhet.net
+
+.B "wiki:"
+http://contextgarden.net
diff --git a/Master/texmf-dist/doc/context/scripts/mkiv/mtx-check.xml b/Master/texmf-dist/doc/context/scripts/mkiv/mtx-check.xml
new file mode 100644
index 00000000000..a8dcb82fdca
--- /dev/null
+++ b/Master/texmf-dist/doc/context/scripts/mkiv/mtx-check.xml
@@ -0,0 +1,15 @@
+<?xml version="1.0"?>
+<application>
+ <metadata>
+ <entry name="name">mtx-check</entry>
+ <entry name="detail">Basic ConTeXt Syntax Checking</entry>
+ <entry name="version">0.10</entry>
+ </metadata>
+ <flags>
+ <category name="basic">
+ <subcategory>
+ <flag name="convert"><short>check tex file for errors</short></flag>
+ </subcategory>
+ </category>
+ </flags>
+</application>
diff --git a/Master/texmf-dist/doc/context/scripts/mkiv/mtx-colors.html b/Master/texmf-dist/doc/context/scripts/mkiv/mtx-colors.html
new file mode 100644
index 00000000000..3628fb66aab
--- /dev/null
+++ b/Master/texmf-dist/doc/context/scripts/mkiv/mtx-colors.html
@@ -0,0 +1,44 @@
+<?xml version="1.0" encoding="UTF-8"?>
+
+<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Strict//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd">
+
+
+
+
+
+<html xmlns="http://www.w3.org/1999/xhtml" lang="en" xml:lang="en">
+ <head>
+ <title>ConTeXt Color Management 0.10</title>
+ <meta http-equiv="Content-Type" content="text/html; charset=UTF-8"/>
+ <style type="text/css">
+ body { color: #FFFFFF; background-color: #808080; font-family: optima, verdana, futura, "lucida sans", arial, geneva, helvetica, sans; font-size: 12px; line-height: 18px; } a:link, a:active, a:visited { color: #FFFFFF; } a.dir-view:link, a.dir-view:active, a.dir-view:visited { color: #FFFFFF; text-decoration: underline; } .valid { color: #00FF00; } .invalid { color: #FF0000; } button, .commonlink, .smallbutton { font-weight: bold; font-size: 12px; text-decoration: none; color: #000000; border-color: #7F7F7F; border-style: solid; border-width: .125ex; background-color: #FFFFFF; padding: .5ex; } .smallbutton { width: 1em; } a.commonlink:link, a.commonlink:active, a.commonlink:visited, a.smalllink:link, a.smalllink:active, a.smalllink:visited { font-weight: bold; font-size: 12px; text-decoration: none; color: #000000; } h1, .title { font-style: normal; font-weight: normal; font-size: 18px; line-height: 18px; margin-bottom: 20px; } h2, .subtitle { font-style: normal; font-weight: normal; font-size: 12px; margin-top: 18px; margin-bottom: 18px; } table { line-height: 18px; font-size: 12px; margin: 0; } th { font-weight: bold; text-align: left; padding-bottom: 6px; } .tc { font-weight: bold; text-align: left; } p, li { max-width: 60em; } .empty-line { margin-top: 4px; } .more-room { margin-right: 1.5em; } .much-more-room { margin-right: 3em; } #main { position: absolute; left: 10%; top: 10%; right: 10%; bottom: 10%; z-index: 2; width: 80%; height: 80%; padding: 0%; margin: 0%; overflow: auto; border-style: none; border-width: 0; background-color: #3F3F3F; } #main-settings { margin: 12px; x_max-width: 60em; line-height: 18px; font-size: 12px; } #left { position: absolute; top : 10%; left: 0%; bottom: 0%; right: 90%; z-index: 1; width: 10%; height: 90%; padding: 0%; margin: 0%; font-size: 16px; border-style: none; border-width: 0; background-color: #4F6F6F; } #right { position: absolute; top : 0%; left: 90%; bottom: 10%; right: 0%; z-index: 1; width: 10%; height: 90%; padding: 0%; margin: 0%; font-size: 16px; border-style: none; border-width: 0; background-color: #4F6F6F; _margin-left: -15px; } #bottom { position: absolute; left: 10%; right: 0%; top: 90%; bottom: 0%; z-index: 1; width: 90%; height: 10%; padding: 0%; margin: 0%; font-size: 16px; border-style: none; border-width: 0; background-color: #6F6F8F; } #top { position: absolute; left: 0%; right: 10%; top: 0%; bottom: 90%; z-index: 1; width: 90%; height: 10%; padding: 0%; margin: 0%; font-size: 16px; border-style: none; border-width: 0; background-color: #6F6F8F; } #top-one { position: absolute; bottom: 50%; width: 100%; buggedheight: 100%; } #top-two { position: relative; margin-bottom: -9px; margin-left: 12px; margin-right: 12px; line-height: 18px; text-align: right; vertical-align: middle; } #bottom-one { position: absolute; bottom: 50%; width: 100%; buggedheight: 100%; } #bottom-two { position: relative; margin-bottom: -9px; margin-left: 12px; margin-right: 12px; line-height: 18px; text-align: left; vertical-align: middle; } #left-one { position: absolute; width: 100%; buggedheight: 100%; } #left-two { position: relative; margin-top: 12px; line-height: 18px; text-align: center; vertical-align: top; } #right-one { display: table; height: 100%; width: 100%; } #right-two { display: table-row; height: 100%; width: 100%; } #right-three { display: table-cell; width: 100%; vertical-align: bottom; _position: absolute; _top: 100%; } #right-four { text-align: center; margin-bottom: 2ex; _position: relative; _top: -100%; } #more-top { position: absolute; top: 0%; left: 90%; bottom: 90%; right: 0%; z-index: 3; width: 10%; height: 10%; padding: 0%; margin: 0%; border-style: none; border-width: 0; } #more-top-settings { text-align: center; } #more-right-settings { margin-right: 12px; margin-left: 12px; line-height: 18px; font-size: 10px; text-align: center; } #right-safari { _display: table; width: 100%; height: 100%; }
+ </style>
+ <style type="text/css">
+ </style>
+ </head>
+ <body>
+ <div id="top"> <div id="top-one">
+ <div id="top-two">ConTeXt Color Management 0.10 </div>
+ </div>
+ </div>
+ <div id="bottom"> <div id="bottom-one">
+ <div id="bottom-two">wiki: http://contextgarden.net | mail: ntg-context@ntg.nl | website: http://www.pragma-ade.nl</div>
+ </div>
+ </div>
+ <div id="left"></div>
+ <div id="right"></div>
+ <div id="main">
+ <div id='main-settings'>
+ <h1>Command line options</h1>
+<table>
+ <tr><th style="width: 10em">flag</th><th style="width: 8em">value</th><th>description</th></tr>
+ <tr><th/><td/><td/></tr>
+ <tr><th>--table</th><td></td><td>show icc table</td></tr>
+ </table>
+<br/>
+<h1>Example</h1>
+<tt>mtxrun --script color --table somename</tt>
+<br/><br/> </div>
+ </div>
+ </body>
+ </html>
diff --git a/Master/texmf-dist/doc/context/scripts/mkiv/mtx-colors.man b/Master/texmf-dist/doc/context/scripts/mkiv/mtx-colors.man
new file mode 100644
index 00000000000..d466b5ea30c
--- /dev/null
+++ b/Master/texmf-dist/doc/context/scripts/mkiv/mtx-colors.man
@@ -0,0 +1,27 @@
+.TH "mtx-colors" "1" "01-01-2013" "version 0.10" "ConTeXt Color Management"
+.SH NAME
+.B mtx-colors
+.SH SYNOPSIS
+.B mtxrun --script colors [
+.I OPTIONS ...
+.B ] [
+.I FILENAMES
+.B ]
+.SH DESCRIPTION
+.B ConTeXt Color Management
+.SH OPTIONS
+.TP
+.B --table
+show icc table
+.SH AUTHOR
+More information about ConTeXt and the tools that come with it can be found at:
+
+
+.B "maillist:"
+ntg-context@ntg.nl / http://www.ntg.nl/mailman/listinfo/ntg-context
+
+.B "webpage:"
+http://www.pragma-ade.nl / http://tex.aanhet.net
+
+.B "wiki:"
+http://contextgarden.net
diff --git a/Master/texmf-dist/doc/context/scripts/mkiv/mtx-colors.xml b/Master/texmf-dist/doc/context/scripts/mkiv/mtx-colors.xml
new file mode 100644
index 00000000000..ee6418b29d4
--- /dev/null
+++ b/Master/texmf-dist/doc/context/scripts/mkiv/mtx-colors.xml
@@ -0,0 +1,23 @@
+<?xml version="1.0"?>
+<application>
+ <metadata>
+ <entry name="name">mtx-colors</entry>
+ <entry name="detail">ConTeXt Color Management</entry>
+ <entry name="version">0.10</entry>
+ </metadata>
+ <flags>
+ <category name="basic">
+ <subcategory>
+ <flag name="table"><short>show icc table</short></flag>
+ </subcategory>
+ </category>
+ </flags>
+ <examples>
+ <category>
+ <title>Example</title>
+ <subcategory>
+ <example><command>mtxrun --script color --table somename</command></example>
+ </subcategory>
+ </category>
+ </examples>
+</application>
diff --git a/Master/texmf-dist/doc/context/scripts/mkiv/mtx-context.html b/Master/texmf-dist/doc/context/scripts/mkiv/mtx-context.html
new file mode 100644
index 00000000000..40e479d300f
--- /dev/null
+++ b/Master/texmf-dist/doc/context/scripts/mkiv/mtx-context.html
@@ -0,0 +1,104 @@
+<?xml version="1.0" encoding="UTF-8"?>
+
+<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Strict//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd">
+
+
+
+
+
+<html xmlns="http://www.w3.org/1999/xhtml" lang="en" xml:lang="en">
+ <head>
+ <title>ConTeXt Process Management 0.60</title>
+ <meta http-equiv="Content-Type" content="text/html; charset=UTF-8"/>
+ <style type="text/css">
+ body { color: #FFFFFF; background-color: #808080; font-family: optima, verdana, futura, "lucida sans", arial, geneva, helvetica, sans; font-size: 12px; line-height: 18px; } a:link, a:active, a:visited { color: #FFFFFF; } a.dir-view:link, a.dir-view:active, a.dir-view:visited { color: #FFFFFF; text-decoration: underline; } .valid { color: #00FF00; } .invalid { color: #FF0000; } button, .commonlink, .smallbutton { font-weight: bold; font-size: 12px; text-decoration: none; color: #000000; border-color: #7F7F7F; border-style: solid; border-width: .125ex; background-color: #FFFFFF; padding: .5ex; } .smallbutton { width: 1em; } a.commonlink:link, a.commonlink:active, a.commonlink:visited, a.smalllink:link, a.smalllink:active, a.smalllink:visited { font-weight: bold; font-size: 12px; text-decoration: none; color: #000000; } h1, .title { font-style: normal; font-weight: normal; font-size: 18px; line-height: 18px; margin-bottom: 20px; } h2, .subtitle { font-style: normal; font-weight: normal; font-size: 12px; margin-top: 18px; margin-bottom: 18px; } table { line-height: 18px; font-size: 12px; margin: 0; } th { font-weight: bold; text-align: left; padding-bottom: 6px; } .tc { font-weight: bold; text-align: left; } p, li { max-width: 60em; } .empty-line { margin-top: 4px; } .more-room { margin-right: 1.5em; } .much-more-room { margin-right: 3em; } #main { position: absolute; left: 10%; top: 10%; right: 10%; bottom: 10%; z-index: 2; width: 80%; height: 80%; padding: 0%; margin: 0%; overflow: auto; border-style: none; border-width: 0; background-color: #3F3F3F; } #main-settings { margin: 12px; x_max-width: 60em; line-height: 18px; font-size: 12px; } #left { position: absolute; top : 10%; left: 0%; bottom: 0%; right: 90%; z-index: 1; width: 10%; height: 90%; padding: 0%; margin: 0%; font-size: 16px; border-style: none; border-width: 0; background-color: #4F6F6F; } #right { position: absolute; top : 0%; left: 90%; bottom: 10%; right: 0%; z-index: 1; width: 10%; height: 90%; padding: 0%; margin: 0%; font-size: 16px; border-style: none; border-width: 0; background-color: #4F6F6F; _margin-left: -15px; } #bottom { position: absolute; left: 10%; right: 0%; top: 90%; bottom: 0%; z-index: 1; width: 90%; height: 10%; padding: 0%; margin: 0%; font-size: 16px; border-style: none; border-width: 0; background-color: #6F6F8F; } #top { position: absolute; left: 0%; right: 10%; top: 0%; bottom: 90%; z-index: 1; width: 90%; height: 10%; padding: 0%; margin: 0%; font-size: 16px; border-style: none; border-width: 0; background-color: #6F6F8F; } #top-one { position: absolute; bottom: 50%; width: 100%; buggedheight: 100%; } #top-two { position: relative; margin-bottom: -9px; margin-left: 12px; margin-right: 12px; line-height: 18px; text-align: right; vertical-align: middle; } #bottom-one { position: absolute; bottom: 50%; width: 100%; buggedheight: 100%; } #bottom-two { position: relative; margin-bottom: -9px; margin-left: 12px; margin-right: 12px; line-height: 18px; text-align: left; vertical-align: middle; } #left-one { position: absolute; width: 100%; buggedheight: 100%; } #left-two { position: relative; margin-top: 12px; line-height: 18px; text-align: center; vertical-align: top; } #right-one { display: table; height: 100%; width: 100%; } #right-two { display: table-row; height: 100%; width: 100%; } #right-three { display: table-cell; width: 100%; vertical-align: bottom; _position: absolute; _top: 100%; } #right-four { text-align: center; margin-bottom: 2ex; _position: relative; _top: -100%; } #more-top { position: absolute; top: 0%; left: 90%; bottom: 90%; right: 0%; z-index: 3; width: 10%; height: 10%; padding: 0%; margin: 0%; border-style: none; border-width: 0; } #more-top-settings { text-align: center; } #more-right-settings { margin-right: 12px; margin-left: 12px; line-height: 18px; font-size: 10px; text-align: center; } #right-safari { _display: table; width: 100%; height: 100%; }
+ </style>
+ <style type="text/css">
+ </style>
+ </head>
+ <body>
+ <div id="top"> <div id="top-one">
+ <div id="top-two">ConTeXt Process Management 0.60 </div>
+ </div>
+ </div>
+ <div id="bottom"> <div id="bottom-one">
+ <div id="bottom-two">wiki: http://contextgarden.net | mail: ntg-context@ntg.nl | website: http://www.pragma-ade.nl</div>
+ </div>
+ </div>
+ <div id="left"></div>
+ <div id="right"></div>
+ <div id="main">
+ <div id='main-settings'>
+ <h1>Command line options</h1>
+<table>
+ <tr><th style="width: 10em">flag</th><th style="width: 8em">value</th><th>description</th></tr>
+ <tr><th colspan="3">basic</th></tr>
+ <tr><th/><td/><td/></tr>
+ <tr><th>--run</th><td></td><td>process (one or more) files (default action)</td></tr>
+ <tr><th>--make</th><td></td><td>create context formats</td></tr>
+ <tr><th/><td/><td/></tr>
+ <tr><th>--ctx=name</th><td></td><td>use ctx file (process management specification)</td></tr>
+ <tr><th>--interface</th><td></td><td>use specified user interface (default: en)</td></tr>
+ <tr><th/><td/><td/></tr>
+ <tr><th>--autopdf</th><td></td><td>close pdf file in viewer and start pdf viewer afterwards</td></tr>
+ <tr><th>--purge</th><td></td><td>purge files either or not after a run (--pattern=...)</td></tr>
+ <tr><th>--purgeall</th><td></td><td>purge all files either or not after a run (--pattern=...)</td></tr>
+ <tr><th/><td/><td/></tr>
+ <tr><th>--usemodule</th><td>list</td><td>load the given module or style, normally part of the distribution</td></tr>
+ <tr><th>--environment</th><td>list</td><td>load the given environment file first (document styles)</td></tr>
+ <tr><th>--mode</th><td>list</td><td>enable given the modes (conditional processing in styles)</td></tr>
+ <tr><th>--path</th><td>list</td><td>also consult the given paths when files are looked for</td></tr>
+ <tr><th>--arguments</th><td>list</td><td>set variables that can be consulted during a run (key/value pairs)</td></tr>
+ <tr><th>--randomseed</th><td>number</td><td>set the randomseed</td></tr>
+ <tr><th>--result</th><td>name</td><td>rename the resulting output to the given name</td></tr>
+ <tr><th>--trackers</th><td>list</td><td>set tracker variables (show list with --showtrackers)</td></tr>
+ <tr><th>--directives</th><td>list</td><td>set directive variables (show list with --showdirectives)</td></tr>
+ <tr><th>--silent</th><td>list</td><td>disable logcatgories (show list with --showlogcategories)</td></tr>
+ <tr><th>--noconsole</th><td></td><td>disable logging to the console (logfile only)</td></tr>
+ <tr><th>--purgeresult</th><td></td><td>purge result file before run</td></tr>
+ <tr><th/><td/><td/></tr>
+ <tr><th>--forcexml</th><td></td><td>force xml stub</td></tr>
+ <tr><th>--forcecld</th><td></td><td>force cld (context lua document) stub</td></tr>
+ <tr><th>--forcelua</th><td></td><td>force lua stub (like texlua)</td></tr>
+ <tr><th>--forcemp</th><td></td><td>force mp stub</td></tr>
+ <tr><th/><td/><td/></tr>
+ <tr><th>--arrange</th><td></td><td>run extra imposition pass, given that the style sets up imposition</td></tr>
+ <tr><th>--noarrange</th><td></td><td>ignore imposition specifications in the style</td></tr>
+ <tr><th/><td/><td/></tr>
+ <tr><th>--jit</th><td></td><td>use luajittex with jit turned off (only use the faster virtual machine)</td></tr>
+ <tr><th>--jiton</th><td></td><td>use luajittex with jit turned on (in most cases not faster, even slower)</td></tr>
+ <tr><th/><td/><td/></tr>
+ <tr><th>--once</th><td></td><td>only run once (no multipass data file is produced)</td></tr>
+ <tr><th>--batchmode</th><td></td><td>run without stopping and do not show messages on the console</td></tr>
+ <tr><th>--nonstopmode</th><td></td><td>run without stopping</td></tr>
+ <tr><th>--synctex</th><td></td><td>run with synctex enabled (optional value: zipped, unzipped, 1, -1)</td></tr>
+ <tr><th/><td/><td/></tr>
+ <tr><th>--generate</th><td></td><td>generate file database etc. (as luatools does)</td></tr>
+ <tr><th>--paranoid</th><td></td><td>do not descend to .. and ../..</td></tr>
+ <tr><th>--version</th><td></td><td>report installed context version</td></tr>
+ <tr><th/><td/><td/></tr>
+ <tr><th>--global</th><td></td><td>assume given file present elsewhere</td></tr>
+ <tr><th>--nofile</th><td></td><td>use dummy file as jobname</td></tr>
+ <tr><th colspan="3">expert</th></tr>
+ <tr><th/><td/><td/></tr>
+ <tr><th>--touch</th><td></td><td>update context version number (remake needed afterwards, also provide --expert)</td></tr>
+ <tr><th>--nostatistics</th><td></td><td>omit runtime statistics at the end of the run</td></tr>
+ <tr><th>--update</th><td></td><td>update context from website (not to be confused with contextgarden)</td></tr>
+ <tr><th>--profile</th><td></td><td>profile job (use: mtxrun --script profile --analyze)</td></tr>
+ <tr><th>--timing</th><td></td><td>generate timing and statistics overview</td></tr>
+ <tr><th/><td/><td/></tr>
+ <tr><th>--extra=name</th><td></td><td>process extra (mtx-context-... in distribution)</td></tr>
+ <tr><th>--extras</th><td></td><td>show extras</td></tr>
+ <tr><th colspan="3">special</th></tr>
+ <tr><th/><td/><td/></tr>
+ <tr><th>--pdftex</th><td></td><td>process file with texexec using pdftex</td></tr>
+ <tr><th>--xetex</th><td></td><td>process file with texexec using xetex</td></tr>
+ <tr><th>--mkii</th><td></td><td>process file with texexec</td></tr>
+ <tr><th/><td/><td/></tr>
+ <tr><th>--pipe</th><td></td><td>do not check for file and enter scroll mode (--dummyfile=whatever.tmp)</td></tr>
+ </table>
+<br/>
+ </div>
+ </div>
+ </body>
+ </html>
diff --git a/Master/texmf-dist/doc/context/scripts/mkiv/mtx-context.man b/Master/texmf-dist/doc/context/scripts/mkiv/mtx-context.man
new file mode 100644
index 00000000000..e1fb28a994e
--- /dev/null
+++ b/Master/texmf-dist/doc/context/scripts/mkiv/mtx-context.man
@@ -0,0 +1,167 @@
+.TH "mtx-context" "1" "01-01-2013" "version 0.60" "ConTeXt Process Management"
+.SH NAME
+.B mtx-context
+.SH SYNOPSIS
+.B mtxrun --script context [
+.I OPTIONS ...
+.B ] [
+.I FILENAMES
+.B ]
+.SH DESCRIPTION
+.B ConTeXt Process Management
+.SH OPTIONS: BASIC
+.TP
+.B --run
+process (one or more) files (default action)
+.TP
+.B --make
+create context formats
+.TP
+.B --ctx=name
+use ctx file (process management specification)
+.TP
+.B --interface
+use specified user interface (default: en)
+.TP
+.B --autopdf
+close pdf file in viewer and start pdf viewer afterwards
+.TP
+.B --purge
+purge files either or not after a run (--pattern=...)
+.TP
+.B --purgeall
+purge all files either or not after a run (--pattern=...)
+.TP
+.B --usemodule=list
+load the given module or style, normally part of the distribution
+.TP
+.B --environment=list
+load the given environment file first (document styles)
+.TP
+.B --mode=list
+enable given the modes (conditional processing in styles)
+.TP
+.B --path=list
+also consult the given paths when files are looked for
+.TP
+.B --arguments=list
+set variables that can be consulted during a run (key/value pairs)
+.TP
+.B --randomseed=number
+set the randomseed
+.TP
+.B --result=name
+rename the resulting output to the given name
+.TP
+.B --trackers=list
+set tracker variables (show list with --showtrackers)
+.TP
+.B --directives=list
+set directive variables (show list with --showdirectives)
+.TP
+.B --silent=list
+disable logcatgories (show list with --showlogcategories)
+.TP
+.B --noconsole
+disable logging to the console (logfile only)
+.TP
+.B --purgeresult
+purge result file before run
+.TP
+.B --forcexml
+force xml stub
+.TP
+.B --forcecld
+force cld (context lua document) stub
+.TP
+.B --forcelua
+force lua stub (like texlua)
+.TP
+.B --forcemp
+force mp stub
+.TP
+.B --arrange
+run extra imposition pass, given that the style sets up imposition
+.TP
+.B --noarrange
+ignore imposition specifications in the style
+.TP
+.B --jit
+use luajittex with jit turned off (only use the faster virtual machine)
+.TP
+.B --jiton
+use luajittex with jit turned on (in most cases not faster, even slower)
+.TP
+.B --once
+only run once (no multipass data file is produced)
+.TP
+.B --batchmode
+run without stopping and do not show messages on the console
+.TP
+.B --nonstopmode
+run without stopping
+.TP
+.B --synctex
+run with synctex enabled (optional value: zipped, unzipped, 1, -1)
+.TP
+.B --generate
+generate file database etc. (as luatools does)
+.TP
+.B --paranoid
+do not descend to .. and ../..
+.TP
+.B --version
+report installed context version
+.TP
+.B --global
+assume given file present elsewhere
+.TP
+.B --nofile
+use dummy file as jobname
+.SH OPTIONS: EXPERT
+.TP
+.B --touch
+update context version number (remake needed afterwards, also provide --expert)
+.TP
+.B --nostatistics
+omit runtime statistics at the end of the run
+.TP
+.B --update
+update context from website (not to be confused with contextgarden)
+.TP
+.B --profile
+profile job (use: mtxrun --script profile --analyze)
+.TP
+.B --timing
+generate timing and statistics overview
+.TP
+.B --extra=name
+process extra (mtx-context-... in distribution)
+.TP
+.B --extras
+show extras
+.SH OPTIONS: SPECIAL
+.TP
+.B --pdftex
+process file with texexec using pdftex
+.TP
+.B --xetex
+process file with texexec using xetex
+.TP
+.B --mkii
+process file with texexec
+.TP
+.B --pipe
+do not check for file and enter scroll mode (--dummyfile=whatever.tmp)
+.SH AUTHOR
+More information about ConTeXt and the tools that come with it can be found at:
+
+
+.B "maillist:"
+ntg-context@ntg.nl / http://www.ntg.nl/mailman/listinfo/ntg-context
+
+.B "webpage:"
+http://www.pragma-ade.nl / http://tex.aanhet.net
+
+.B "wiki:"
+http://contextgarden.net
diff --git a/Master/texmf-dist/doc/context/scripts/mkiv/mtx-context.xml b/Master/texmf-dist/doc/context/scripts/mkiv/mtx-context.xml
new file mode 100644
index 00000000000..6eb8afeb105
--- /dev/null
+++ b/Master/texmf-dist/doc/context/scripts/mkiv/mtx-context.xml
@@ -0,0 +1,187 @@
+<?xml version="1.0" ?>
+
+<application>
+ <metadata>
+ <entry name="name">mtx-context</entry>
+ <entry name="detail">ConTeXt Process Management</entry>
+ <entry name="version">0.60</entry>
+ <entry name="comment">external helpinfo file</entry>
+ </metadata>
+ <flags>
+ <category name="basic">
+ <subcategory>
+ <flag name="run">
+ <short>process (one or more) files (default action)</short>
+ </flag>
+ <flag name="make">
+ <short>create context formats</short>
+ </flag>
+ </subcategory>
+ <subcategory>
+ <flag name="ctx=name">
+ <short>use ctx file (process management specification)</short>
+ </flag>
+ <flag name="interface">
+ <short>use specified user interface (default: en)</short>
+ </flag>
+ </subcategory>
+ <subcategory>
+ <flag name="autopdf">
+ <short>close pdf file in viewer and start pdf viewer afterwards</short>
+ </flag>
+ <flag name="purge">
+ <short>purge files either or not after a run (<ref name="pattern"/>=...)</short>
+ </flag>
+ <flag name="purgeall">
+ <short>purge all files either or not after a run (<ref name="pattern"/>=...)</short>
+ </flag>
+ </subcategory>
+ <subcategory>
+ <flag name="usemodule" value="list">
+ <short>load the given module or style, normally part of the distribution</short>
+ </flag>
+ <flag name="environment" value="list">
+ <short>load the given environment file first (document styles)</short>
+ </flag>
+ <flag name="mode" value="list">
+ <short>enable given the modes (conditional processing in styles)</short>
+ </flag>
+ <flag name="path" value="list">
+ <short>also consult the given paths when files are looked for</short>
+ </flag>
+ <flag name="arguments" value="list">
+ <short>set variables that can be consulted during a run (key/value pairs)</short>
+ </flag>
+ <flag name="randomseed" value="number">
+ <short>set the randomseed</short>
+ </flag>
+ <flag name="result" value="name">
+ <short>rename the resulting output to the given name</short>
+ </flag>
+ <flag name="trackers" value="list">
+ <short>set tracker variables (show list with <ref name="showtrackers"/>)</short>
+ </flag>
+ <flag name="directives" value="list">
+ <short>set directive variables (show list with <ref name="showdirectives"/>)</short>
+ </flag>
+ <flag name="silent" value="list">
+ <short>disable logcatgories (show list with <ref name="showlogcategories"/>)</short>
+ </flag>
+ <flag name="noconsole">
+ <short>disable logging to the console (logfile only)</short>
+ </flag>
+ <flag name="purgeresult">
+ <short>purge result file before run</short>
+ </flag>
+ </subcategory>
+ <subcategory>
+ <flag name="forcexml">
+ <short>force xml stub</short>
+ </flag>
+ <flag name="forcecld">
+ <short>force cld (context lua document) stub</short>
+ </flag>
+ <flag name="forcelua">
+ <short>force lua stub (like texlua)</short>
+ </flag>
+ <flag name="forcemp">
+ <short>force mp stub</short>
+ </flag>
+ </subcategory>
+ <subcategory>
+ <flag name="arrange">
+ <short>run extra imposition pass, given that the style sets up imposition</short>
+ </flag>
+ <flag name="noarrange">
+ <short>ignore imposition specifications in the style</short>
+ </flag>
+ </subcategory>
+ <subcategory>
+ <flag name="jit">
+ <short>use luajittex with jit turned off (only use the faster virtual machine)</short>
+ </flag>
+ <flag name="jiton">
+ <short>use luajittex with jit turned on (in most cases not faster, even slower)</short>
+ </flag>
+ </subcategory>
+ <subcategory>
+ <flag name="once">
+ <short>only run once (no multipass data file is produced)</short>
+ </flag>
+ <flag name="batchmode">
+ <short>run without stopping and do not show messages on the console</short>
+ </flag>
+ <flag name="nonstopmode">
+ <short>run without stopping</short>
+ </flag>
+ <flag name="synctex">
+ <short>run with synctex enabled (optional value: zipped, unzipped, 1, -1)</short>
+ </flag>
+ </subcategory>
+ <subcategory>
+ <flag name="generate">
+ <short>generate file database etc. (as luatools does)</short>
+ </flag>
+ <flag name="paranoid">
+ <short>do not descend to .. and ../..</short>
+ </flag>
+ <flag name="version">
+ <short>report installed context version</short>
+ </flag>
+ </subcategory>
+ <subcategory>
+ <flag name="global">
+ <short>assume given file present elsewhere</short>
+ </flag>
+ <flag name="nofile">
+ <short>use dummy file as jobname</short>
+ </flag>
+ </subcategory>
+ </category>
+ <category name="expert">
+ <subcategory>
+ <flag name="touch">
+ <short>update context version number (remake needed afterwards, also provide <ref name="expert"/>)</short>
+ </flag>
+ <flag name="nostatistics">
+ <short>omit runtime statistics at the end of the run</short>
+ </flag>
+ <flag name="update">
+ <short>update context from website (not to be confused with contextgarden)</short>
+ </flag>
+ <flag name="profile">
+ <short>profile job (use: mtxrun <ref name="script"/> profile <ref name="analyze"/>)</short>
+ </flag>
+ <flag name="timing">
+ <short>generate timing and statistics overview</short>
+ </flag>
+ </subcategory>
+ <subcategory>
+ <flag name="extra=name">
+ <short>process extra (mtx-context-... in distribution)</short>
+ </flag>
+ <flag name="extras">
+ <short>show extras</short>
+ </flag>
+ </subcategory>
+ </category>
+ <category name="special">
+ <subcategory>
+ <flag name="pdftex">
+ <short>process file with texexec using pdftex</short>
+ </flag>
+ <flag name="xetex">
+ <short>process file with texexec using xetex</short>
+ </flag>
+ <flag name="mkii">
+ <short>process file with texexec</short>
+ </flag>
+ </subcategory>
+ <subcategory>
+ <flag name="pipe">
+ <short>do not check for file and enter scroll mode (<ref name="dummyfile"/>=whatever.tmp)</short>
+ </flag>
+ </subcategory>
+ </category>
+ </flags>
+</application>
diff --git a/Master/texmf-dist/doc/context/scripts/mkiv/mtx-epub.html b/Master/texmf-dist/doc/context/scripts/mkiv/mtx-epub.html
new file mode 100644
index 00000000000..6c8bb99d845
--- /dev/null
+++ b/Master/texmf-dist/doc/context/scripts/mkiv/mtx-epub.html
@@ -0,0 +1,44 @@
+<?xml version="1.0" encoding="UTF-8"?>
+
+<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Strict//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd">
+
+
+
+
+
+<html xmlns="http://www.w3.org/1999/xhtml" lang="en" xml:lang="en">
+ <head>
+ <title>ConTeXt EPUB Helpers 0.12</title>
+ <meta http-equiv="Content-Type" content="text/html; charset=UTF-8"/>
+ <style type="text/css">
+ body { color: #FFFFFF; background-color: #808080; font-family: optima, verdana, futura, "lucida sans", arial, geneva, helvetica, sans; font-size: 12px; line-height: 18px; } a:link, a:active, a:visited { color: #FFFFFF; } a.dir-view:link, a.dir-view:active, a.dir-view:visited { color: #FFFFFF; text-decoration: underline; } .valid { color: #00FF00; } .invalid { color: #FF0000; } button, .commonlink, .smallbutton { font-weight: bold; font-size: 12px; text-decoration: none; color: #000000; border-color: #7F7F7F; border-style: solid; border-width: .125ex; background-color: #FFFFFF; padding: .5ex; } .smallbutton { width: 1em; } a.commonlink:link, a.commonlink:active, a.commonlink:visited, a.smalllink:link, a.smalllink:active, a.smalllink:visited { font-weight: bold; font-size: 12px; text-decoration: none; color: #000000; } h1, .title { font-style: normal; font-weight: normal; font-size: 18px; line-height: 18px; margin-bottom: 20px; } h2, .subtitle { font-style: normal; font-weight: normal; font-size: 12px; margin-top: 18px; margin-bottom: 18px; } table { line-height: 18px; font-size: 12px; margin: 0; } th { font-weight: bold; text-align: left; padding-bottom: 6px; } .tc { font-weight: bold; text-align: left; } p, li { max-width: 60em; } .empty-line { margin-top: 4px; } .more-room { margin-right: 1.5em; } .much-more-room { margin-right: 3em; } #main { position: absolute; left: 10%; top: 10%; right: 10%; bottom: 10%; z-index: 2; width: 80%; height: 80%; padding: 0%; margin: 0%; overflow: auto; border-style: none; border-width: 0; background-color: #3F3F3F; } #main-settings { margin: 12px; x_max-width: 60em; line-height: 18px; font-size: 12px; } #left { position: absolute; top : 10%; left: 0%; bottom: 0%; right: 90%; z-index: 1; width: 10%; height: 90%; padding: 0%; margin: 0%; font-size: 16px; border-style: none; border-width: 0; background-color: #4F6F6F; } #right { position: absolute; top : 0%; left: 90%; bottom: 10%; right: 0%; z-index: 1; width: 10%; height: 90%; padding: 0%; margin: 0%; font-size: 16px; border-style: none; border-width: 0; background-color: #4F6F6F; _margin-left: -15px; } #bottom { position: absolute; left: 10%; right: 0%; top: 90%; bottom: 0%; z-index: 1; width: 90%; height: 10%; padding: 0%; margin: 0%; font-size: 16px; border-style: none; border-width: 0; background-color: #6F6F8F; } #top { position: absolute; left: 0%; right: 10%; top: 0%; bottom: 90%; z-index: 1; width: 90%; height: 10%; padding: 0%; margin: 0%; font-size: 16px; border-style: none; border-width: 0; background-color: #6F6F8F; } #top-one { position: absolute; bottom: 50%; width: 100%; buggedheight: 100%; } #top-two { position: relative; margin-bottom: -9px; margin-left: 12px; margin-right: 12px; line-height: 18px; text-align: right; vertical-align: middle; } #bottom-one { position: absolute; bottom: 50%; width: 100%; buggedheight: 100%; } #bottom-two { position: relative; margin-bottom: -9px; margin-left: 12px; margin-right: 12px; line-height: 18px; text-align: left; vertical-align: middle; } #left-one { position: absolute; width: 100%; buggedheight: 100%; } #left-two { position: relative; margin-top: 12px; line-height: 18px; text-align: center; vertical-align: top; } #right-one { display: table; height: 100%; width: 100%; } #right-two { display: table-row; height: 100%; width: 100%; } #right-three { display: table-cell; width: 100%; vertical-align: bottom; _position: absolute; _top: 100%; } #right-four { text-align: center; margin-bottom: 2ex; _position: relative; _top: -100%; } #more-top { position: absolute; top: 0%; left: 90%; bottom: 90%; right: 0%; z-index: 3; width: 10%; height: 10%; padding: 0%; margin: 0%; border-style: none; border-width: 0; } #more-top-settings { text-align: center; } #more-right-settings { margin-right: 12px; margin-left: 12px; line-height: 18px; font-size: 10px; text-align: center; } #right-safari { _display: table; width: 100%; height: 100%; }
+ </style>
+ <style type="text/css">
+ </style>
+ </head>
+ <body>
+ <div id="top"> <div id="top-one">
+ <div id="top-two">ConTeXt EPUB Helpers 0.12 </div>
+ </div>
+ </div>
+ <div id="bottom"> <div id="bottom-one">
+ <div id="bottom-two">wiki: http://contextgarden.net | mail: ntg-context@ntg.nl | website: http://www.pragma-ade.nl</div>
+ </div>
+ </div>
+ <div id="left"></div>
+ <div id="right"></div>
+ <div id="main">
+ <div id='main-settings'>
+ <h1>Command line options</h1>
+<table>
+ <tr><th style="width: 10em">flag</th><th style="width: 8em">value</th><th>description</th></tr>
+ <tr><th/><td/><td/></tr>
+ <tr><th>--make</th><td></td><td>create epub zip file</td></tr>
+ </table>
+<br/>
+<h1>Example</h1>
+<tt>mtxrun --script epub --make mydocument</tt>
+<br/><br/> </div>
+ </div>
+ </body>
+ </html>
diff --git a/Master/texmf-dist/doc/context/scripts/mkiv/mtx-epub.man b/Master/texmf-dist/doc/context/scripts/mkiv/mtx-epub.man
new file mode 100644
index 00000000000..518435d1e2b
--- /dev/null
+++ b/Master/texmf-dist/doc/context/scripts/mkiv/mtx-epub.man
@@ -0,0 +1,27 @@
+.TH "mtx-epub" "1" "01-01-2013" "version 0.12" "ConTeXt EPUB Helpers"
+.SH NAME
+.B mtx-epub
+.SH SYNOPSIS
+.B mtxrun --script epub [
+.I OPTIONS ...
+.B ] [
+.I FILENAMES
+.B ]
+.SH DESCRIPTION
+.B ConTeXt EPUB Helpers
+.SH OPTIONS
+.TP
+.B --make
+create epub zip file
+.SH AUTHOR
+More information about ConTeXt and the tools that come with it can be found at:
+
+
+.B "maillist:"
+ntg-context@ntg.nl / http://www.ntg.nl/mailman/listinfo/ntg-context
+
+.B "webpage:"
+http://www.pragma-ade.nl / http://tex.aanhet.net
+
+.B "wiki:"
+http://contextgarden.net
diff --git a/Master/texmf-dist/doc/context/scripts/mkiv/mtx-epub.xml b/Master/texmf-dist/doc/context/scripts/mkiv/mtx-epub.xml
new file mode 100644
index 00000000000..5ef5dc81b39
--- /dev/null
+++ b/Master/texmf-dist/doc/context/scripts/mkiv/mtx-epub.xml
@@ -0,0 +1,23 @@
+<?xml version="1.0"?>
+<application>
+ <metadata>
+ <entry name="name">mtx-epub</entry>
+ <entry name="detail">ConTeXt EPUB Helpers</entry>
+ <entry name="version">0.12</entry>
+ </metadata>
+ <flags>
+ <category name="basic">
+ <subcategory>
+ <flag name="make"><short>create epub zip file</short></flag>
+ </subcategory>
+ </category>
+ </flags>
+ <examples>
+ <category>
+ <title>Example</title>
+ <subcategory>
+ <example><command>mtxrun --script epub --make mydocument</command></example>
+ </subcategory>
+ </category>
+ </examples>
+</application>
diff --git a/Master/texmf-dist/doc/context/scripts/mkiv/mtx-fcd.html b/Master/texmf-dist/doc/context/scripts/mkiv/mtx-fcd.html
new file mode 100644
index 00000000000..0719d9c0b2c
--- /dev/null
+++ b/Master/texmf-dist/doc/context/scripts/mkiv/mtx-fcd.html
@@ -0,0 +1,56 @@
+<?xml version="1.0" encoding="UTF-8"?>
+
+<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Strict//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd">
+
+
+
+
+
+<html xmlns="http://www.w3.org/1999/xhtml" lang="en" xml:lang="en">
+ <head>
+ <title>Fast Directory Change 1.00</title>
+ <meta http-equiv="Content-Type" content="text/html; charset=UTF-8"/>
+ <style type="text/css">
+ body { color: #FFFFFF; background-color: #808080; font-family: optima, verdana, futura, "lucida sans", arial, geneva, helvetica, sans; font-size: 12px; line-height: 18px; } a:link, a:active, a:visited { color: #FFFFFF; } a.dir-view:link, a.dir-view:active, a.dir-view:visited { color: #FFFFFF; text-decoration: underline; } .valid { color: #00FF00; } .invalid { color: #FF0000; } button, .commonlink, .smallbutton { font-weight: bold; font-size: 12px; text-decoration: none; color: #000000; border-color: #7F7F7F; border-style: solid; border-width: .125ex; background-color: #FFFFFF; padding: .5ex; } .smallbutton { width: 1em; } a.commonlink:link, a.commonlink:active, a.commonlink:visited, a.smalllink:link, a.smalllink:active, a.smalllink:visited { font-weight: bold; font-size: 12px; text-decoration: none; color: #000000; } h1, .title { font-style: normal; font-weight: normal; font-size: 18px; line-height: 18px; margin-bottom: 20px; } h2, .subtitle { font-style: normal; font-weight: normal; font-size: 12px; margin-top: 18px; margin-bottom: 18px; } table { line-height: 18px; font-size: 12px; margin: 0; } th { font-weight: bold; text-align: left; padding-bottom: 6px; } .tc { font-weight: bold; text-align: left; } p, li { max-width: 60em; } .empty-line { margin-top: 4px; } .more-room { margin-right: 1.5em; } .much-more-room { margin-right: 3em; } #main { position: absolute; left: 10%; top: 10%; right: 10%; bottom: 10%; z-index: 2; width: 80%; height: 80%; padding: 0%; margin: 0%; overflow: auto; border-style: none; border-width: 0; background-color: #3F3F3F; } #main-settings { margin: 12px; x_max-width: 60em; line-height: 18px; font-size: 12px; } #left { position: absolute; top : 10%; left: 0%; bottom: 0%; right: 90%; z-index: 1; width: 10%; height: 90%; padding: 0%; margin: 0%; font-size: 16px; border-style: none; border-width: 0; background-color: #4F6F6F; } #right { position: absolute; top : 0%; left: 90%; bottom: 10%; right: 0%; z-index: 1; width: 10%; height: 90%; padding: 0%; margin: 0%; font-size: 16px; border-style: none; border-width: 0; background-color: #4F6F6F; _margin-left: -15px; } #bottom { position: absolute; left: 10%; right: 0%; top: 90%; bottom: 0%; z-index: 1; width: 90%; height: 10%; padding: 0%; margin: 0%; font-size: 16px; border-style: none; border-width: 0; background-color: #6F6F8F; } #top { position: absolute; left: 0%; right: 10%; top: 0%; bottom: 90%; z-index: 1; width: 90%; height: 10%; padding: 0%; margin: 0%; font-size: 16px; border-style: none; border-width: 0; background-color: #6F6F8F; } #top-one { position: absolute; bottom: 50%; width: 100%; buggedheight: 100%; } #top-two { position: relative; margin-bottom: -9px; margin-left: 12px; margin-right: 12px; line-height: 18px; text-align: right; vertical-align: middle; } #bottom-one { position: absolute; bottom: 50%; width: 100%; buggedheight: 100%; } #bottom-two { position: relative; margin-bottom: -9px; margin-left: 12px; margin-right: 12px; line-height: 18px; text-align: left; vertical-align: middle; } #left-one { position: absolute; width: 100%; buggedheight: 100%; } #left-two { position: relative; margin-top: 12px; line-height: 18px; text-align: center; vertical-align: top; } #right-one { display: table; height: 100%; width: 100%; } #right-two { display: table-row; height: 100%; width: 100%; } #right-three { display: table-cell; width: 100%; vertical-align: bottom; _position: absolute; _top: 100%; } #right-four { text-align: center; margin-bottom: 2ex; _position: relative; _top: -100%; } #more-top { position: absolute; top: 0%; left: 90%; bottom: 90%; right: 0%; z-index: 3; width: 10%; height: 10%; padding: 0%; margin: 0%; border-style: none; border-width: 0; } #more-top-settings { text-align: center; } #more-right-settings { margin-right: 12px; margin-left: 12px; line-height: 18px; font-size: 10px; text-align: center; } #right-safari { _display: table; width: 100%; height: 100%; }
+ </style>
+ <style type="text/css">
+ </style>
+ </head>
+ <body>
+ <div id="top"> <div id="top-one">
+ <div id="top-two">Fast Directory Change 1.00 </div>
+ </div>
+ </div>
+ <div id="bottom"> <div id="bottom-one">
+ <div id="bottom-two">wiki: http://contextgarden.net | mail: ntg-context@ntg.nl | website: http://www.pragma-ade.nl</div>
+ </div>
+ </div>
+ <div id="left"></div>
+ <div id="right"></div>
+ <div id="main">
+ <div id='main-settings'>
+ <h1>Command line options</h1>
+<table>
+ <tr><th style="width: 10em">flag</th><th style="width: 8em">value</th><th>description</th></tr>
+ <tr><th/><td/><td/></tr>
+ <tr><th>--clear</th><td></td><td>clear the cache</td></tr>
+ <tr><th>--clear</th><td></td><td>--history [entry] clear the history</td></tr>
+ <tr><th>--scan</th><td></td><td>clear the cache and add given path(s)</td></tr>
+ <tr><th>--add</th><td></td><td>add given path(s)</td></tr>
+ <tr><th>--find</th><td></td><td>find given path (can be substring)</td></tr>
+ <tr><th>--find</th><td></td><td>--nohistory find given path (can be substring) but don&apos;t use history</td></tr>
+ <tr><th>--stub</th><td></td><td>print platform stub file</td></tr>
+ <tr><th>--list</th><td></td><td>show roots of cached dirs</td></tr>
+ <tr><th>--list</th><td></td><td>--history show history of chosen dirs</td></tr>
+ <tr><th>--help</th><td></td><td>show this help</td></tr>
+ </table>
+<br/>
+<h1>Example</h1>
+<tt>fcd --scan t:\</tt>
+<br/><tt>fcd --add f:\project</tt>
+<br/><tt>fcd [--find] whatever</tt>
+<br/><tt>fcd --list</tt>
+<br/><br/> </div>
+ </div>
+ </body>
+ </html>
diff --git a/Master/texmf-dist/doc/context/scripts/mkiv/mtx-fcd.man b/Master/texmf-dist/doc/context/scripts/mkiv/mtx-fcd.man
new file mode 100644
index 00000000000..43de2a3a4de
--- /dev/null
+++ b/Master/texmf-dist/doc/context/scripts/mkiv/mtx-fcd.man
@@ -0,0 +1,54 @@
+.TH "mtx-fcd" "1" "01-01-2013" "version 1.00" "Fast Directory Change"
+.SH NAME
+.B mtx-fcd
+.SH SYNOPSIS
+.B mtxrun --script fcd [
+.I OPTIONS ...
+.B ] [
+.I FILENAMES
+.B ]
+.SH DESCRIPTION
+.B Fast Directory Change
+.SH OPTIONS
+.TP
+.B --clear
+clear the cache
+.TP
+.B --clear
+--history [entry] clear the history
+.TP
+.B --scan
+clear the cache and add given path(s)
+.TP
+.B --add
+add given path(s)
+.TP
+.B --find
+find given path (can be substring)
+.TP
+.B --find
+--nohistory find given path (can be substring) but don&apos;t use history
+.TP
+.B --stub
+print platform stub file
+.TP
+.B --list
+show roots of cached dirs
+.TP
+.B --list
+--history show history of chosen dirs
+.TP
+.B --help
+show this help
+.SH AUTHOR
+More information about ConTeXt and the tools that come with it can be found at:
+
+
+.B "maillist:"
+ntg-context@ntg.nl / http://www.ntg.nl/mailman/listinfo/ntg-context
+
+.B "webpage:"
+http://www.pragma-ade.nl / http://tex.aanhet.net
+
+.B "wiki:"
+http://contextgarden.net
diff --git a/Master/texmf-dist/doc/context/scripts/mkiv/mtx-fcd.xml b/Master/texmf-dist/doc/context/scripts/mkiv/mtx-fcd.xml
new file mode 100644
index 00000000000..f20975efdf3
--- /dev/null
+++ b/Master/texmf-dist/doc/context/scripts/mkiv/mtx-fcd.xml
@@ -0,0 +1,35 @@
+<?xml version="1.0"?>
+<application>
+ <metadata>
+ <entry name="name">mtx-fcd</entry>
+ <entry name="detail">Fast Directory Change</entry>
+ <entry name="version">1.00</entry>
+ </metadata>
+ <flags>
+ <category name="basic">
+ <subcategory>
+ <flag name="clear"><short>clear the cache</short></flag>
+ <flag name="clear"><short><ref name="history"/> [entry] clear the history</short></flag>
+ <flag name="scan"><short>clear the cache and add given path(s)</short></flag>
+ <flag name="add"><short>add given path(s)</short></flag>
+ <flag name="find"><short>find given path (can be substring)</short></flag>
+ <flag name="find"><short><ref name="nohistory"/> find given path (can be substring) but don't use history</short></flag>
+ <flag name="stub"><short>print platform stub file</short></flag>
+ <flag name="list"><short>show roots of cached dirs</short></flag>
+ <flag name="list"><short><ref name="history"/> show history of chosen dirs</short></flag>
+ <flag name="help"><short>show this help</short></flag>
+ </subcategory>
+ </category>
+ </flags>
+ <examples>
+ <category>
+ <title>Example</title>
+ <subcategory>
+ <example><command>fcd --scan t:\</command></example>
+ <example><command>fcd --add f:\project</command></example>
+ <example><command>fcd [--find] whatever</command></example>
+ <example><command>fcd --list</command></example>
+ </subcategory>
+ </category>
+ </examples>
+</application>
diff --git a/Master/texmf-dist/doc/context/scripts/mkiv/mtx-flac.html b/Master/texmf-dist/doc/context/scripts/mkiv/mtx-flac.html
new file mode 100644
index 00000000000..293b1cb95e8
--- /dev/null
+++ b/Master/texmf-dist/doc/context/scripts/mkiv/mtx-flac.html
@@ -0,0 +1,45 @@
+<?xml version="1.0" encoding="UTF-8"?>
+
+<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Strict//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd">
+
+
+
+
+
+<html xmlns="http://www.w3.org/1999/xhtml" lang="en" xml:lang="en">
+ <head>
+ <title>ConTeXt Flac Helpers 0.10</title>
+ <meta http-equiv="Content-Type" content="text/html; charset=UTF-8"/>
+ <style type="text/css">
+ body { color: #FFFFFF; background-color: #808080; font-family: optima, verdana, futura, "lucida sans", arial, geneva, helvetica, sans; font-size: 12px; line-height: 18px; } a:link, a:active, a:visited { color: #FFFFFF; } a.dir-view:link, a.dir-view:active, a.dir-view:visited { color: #FFFFFF; text-decoration: underline; } .valid { color: #00FF00; } .invalid { color: #FF0000; } button, .commonlink, .smallbutton { font-weight: bold; font-size: 12px; text-decoration: none; color: #000000; border-color: #7F7F7F; border-style: solid; border-width: .125ex; background-color: #FFFFFF; padding: .5ex; } .smallbutton { width: 1em; } a.commonlink:link, a.commonlink:active, a.commonlink:visited, a.smalllink:link, a.smalllink:active, a.smalllink:visited { font-weight: bold; font-size: 12px; text-decoration: none; color: #000000; } h1, .title { font-style: normal; font-weight: normal; font-size: 18px; line-height: 18px; margin-bottom: 20px; } h2, .subtitle { font-style: normal; font-weight: normal; font-size: 12px; margin-top: 18px; margin-bottom: 18px; } table { line-height: 18px; font-size: 12px; margin: 0; } th { font-weight: bold; text-align: left; padding-bottom: 6px; } .tc { font-weight: bold; text-align: left; } p, li { max-width: 60em; } .empty-line { margin-top: 4px; } .more-room { margin-right: 1.5em; } .much-more-room { margin-right: 3em; } #main { position: absolute; left: 10%; top: 10%; right: 10%; bottom: 10%; z-index: 2; width: 80%; height: 80%; padding: 0%; margin: 0%; overflow: auto; border-style: none; border-width: 0; background-color: #3F3F3F; } #main-settings { margin: 12px; x_max-width: 60em; line-height: 18px; font-size: 12px; } #left { position: absolute; top : 10%; left: 0%; bottom: 0%; right: 90%; z-index: 1; width: 10%; height: 90%; padding: 0%; margin: 0%; font-size: 16px; border-style: none; border-width: 0; background-color: #4F6F6F; } #right { position: absolute; top : 0%; left: 90%; bottom: 10%; right: 0%; z-index: 1; width: 10%; height: 90%; padding: 0%; margin: 0%; font-size: 16px; border-style: none; border-width: 0; background-color: #4F6F6F; _margin-left: -15px; } #bottom { position: absolute; left: 10%; right: 0%; top: 90%; bottom: 0%; z-index: 1; width: 90%; height: 10%; padding: 0%; margin: 0%; font-size: 16px; border-style: none; border-width: 0; background-color: #6F6F8F; } #top { position: absolute; left: 0%; right: 10%; top: 0%; bottom: 90%; z-index: 1; width: 90%; height: 10%; padding: 0%; margin: 0%; font-size: 16px; border-style: none; border-width: 0; background-color: #6F6F8F; } #top-one { position: absolute; bottom: 50%; width: 100%; buggedheight: 100%; } #top-two { position: relative; margin-bottom: -9px; margin-left: 12px; margin-right: 12px; line-height: 18px; text-align: right; vertical-align: middle; } #bottom-one { position: absolute; bottom: 50%; width: 100%; buggedheight: 100%; } #bottom-two { position: relative; margin-bottom: -9px; margin-left: 12px; margin-right: 12px; line-height: 18px; text-align: left; vertical-align: middle; } #left-one { position: absolute; width: 100%; buggedheight: 100%; } #left-two { position: relative; margin-top: 12px; line-height: 18px; text-align: center; vertical-align: top; } #right-one { display: table; height: 100%; width: 100%; } #right-two { display: table-row; height: 100%; width: 100%; } #right-three { display: table-cell; width: 100%; vertical-align: bottom; _position: absolute; _top: 100%; } #right-four { text-align: center; margin-bottom: 2ex; _position: relative; _top: -100%; } #more-top { position: absolute; top: 0%; left: 90%; bottom: 90%; right: 0%; z-index: 3; width: 10%; height: 10%; padding: 0%; margin: 0%; border-style: none; border-width: 0; } #more-top-settings { text-align: center; } #more-right-settings { margin-right: 12px; margin-left: 12px; line-height: 18px; font-size: 10px; text-align: center; } #right-safari { _display: table; width: 100%; height: 100%; }
+ </style>
+ <style type="text/css">
+ </style>
+ </head>
+ <body>
+ <div id="top"> <div id="top-one">
+ <div id="top-two">ConTeXt Flac Helpers 0.10 </div>
+ </div>
+ </div>
+ <div id="bottom"> <div id="bottom-one">
+ <div id="bottom-two">wiki: http://contextgarden.net | mail: ntg-context@ntg.nl | website: http://www.pragma-ade.nl</div>
+ </div>
+ </div>
+ <div id="left"></div>
+ <div id="right"></div>
+ <div id="main">
+ <div id='main-settings'>
+ <h1>Command line options</h1>
+<table>
+ <tr><th style="width: 10em">flag</th><th style="width: 8em">value</th><th>description</th></tr>
+ <tr><th/><td/><td/></tr>
+ <tr><th>--collect</th><td></td><td>collect albums in xml file</td></tr>
+ </table>
+<br/>
+<h1>Example</h1>
+<tt>mtxrun --script flac --collect somename.flac</tt>
+<br/><tt>mtxrun --script flac --collect --pattern="m:/music/**")</tt>
+<br/><br/> </div>
+ </div>
+ </body>
+ </html>
diff --git a/Master/texmf-dist/doc/context/scripts/mkiv/mtx-flac.man b/Master/texmf-dist/doc/context/scripts/mkiv/mtx-flac.man
new file mode 100644
index 00000000000..ef914f2acff
--- /dev/null
+++ b/Master/texmf-dist/doc/context/scripts/mkiv/mtx-flac.man
@@ -0,0 +1,27 @@
+.TH "mtx-flac" "1" "01-01-2013" "version 0.10" "ConTeXt Flac Helpers"
+.SH NAME
+.B mtx-flac
+.SH SYNOPSIS
+.B mtxrun --script flac [
+.I OPTIONS ...
+.B ] [
+.I FILENAMES
+.B ]
+.SH DESCRIPTION
+.B ConTeXt Flac Helpers
+.SH OPTIONS
+.TP
+.B --collect
+collect albums in xml file
+.SH AUTHOR
+More information about ConTeXt and the tools that come with it can be found at:
+
+
+.B "maillist:"
+ntg-context@ntg.nl / http://www.ntg.nl/mailman/listinfo/ntg-context
+
+.B "webpage:"
+http://www.pragma-ade.nl / http://tex.aanhet.net
+
+.B "wiki:"
+http://contextgarden.net
diff --git a/Master/texmf-dist/doc/context/scripts/mkiv/mtx-flac.xml b/Master/texmf-dist/doc/context/scripts/mkiv/mtx-flac.xml
new file mode 100644
index 00000000000..bd5fc9bc9e7
--- /dev/null
+++ b/Master/texmf-dist/doc/context/scripts/mkiv/mtx-flac.xml
@@ -0,0 +1,24 @@
+<?xml version="1.0"?>
+<application>
+ <metadata>
+ <entry name="name">mtx-flac</entry>
+ <entry name="detail">ConTeXt Flac Helpers</entry>
+ <entry name="version">0.10</entry>
+ </metadata>
+ <flags>
+ <category name="basic">
+ <subcategory>
+ <flag name="collect"><short>collect albums in xml file</short></flag>
+ </subcategory>
+ </category>
+ </flags>
+ <examples>
+ <category>
+ <title>Example</title>
+ <subcategory>
+ <example><command>mtxrun --script flac --collect somename.flac</command></example>
+ <example><command>mtxrun --script flac --collect --pattern="m:/music/**")</command></example>
+ </subcategory>
+ </category>
+ </examples>
+</application>
diff --git a/Master/texmf-dist/doc/context/scripts/mkiv/mtx-fonts.html b/Master/texmf-dist/doc/context/scripts/mkiv/mtx-fonts.html
new file mode 100644
index 00000000000..5fdfc5feb74
--- /dev/null
+++ b/Master/texmf-dist/doc/context/scripts/mkiv/mtx-fonts.html
@@ -0,0 +1,71 @@
+<?xml version="1.0" encoding="UTF-8"?>
+
+<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Strict//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd">
+
+
+
+
+
+<html xmlns="http://www.w3.org/1999/xhtml" lang="en" xml:lang="en">
+ <head>
+ <title>ConTeXt Font Database Management 0.21</title>
+ <meta http-equiv="Content-Type" content="text/html; charset=UTF-8"/>
+ <style type="text/css">
+ body { color: #FFFFFF; background-color: #808080; font-family: optima, verdana, futura, "lucida sans", arial, geneva, helvetica, sans; font-size: 12px; line-height: 18px; } a:link, a:active, a:visited { color: #FFFFFF; } a.dir-view:link, a.dir-view:active, a.dir-view:visited { color: #FFFFFF; text-decoration: underline; } .valid { color: #00FF00; } .invalid { color: #FF0000; } button, .commonlink, .smallbutton { font-weight: bold; font-size: 12px; text-decoration: none; color: #000000; border-color: #7F7F7F; border-style: solid; border-width: .125ex; background-color: #FFFFFF; padding: .5ex; } .smallbutton { width: 1em; } a.commonlink:link, a.commonlink:active, a.commonlink:visited, a.smalllink:link, a.smalllink:active, a.smalllink:visited { font-weight: bold; font-size: 12px; text-decoration: none; color: #000000; } h1, .title { font-style: normal; font-weight: normal; font-size: 18px; line-height: 18px; margin-bottom: 20px; } h2, .subtitle { font-style: normal; font-weight: normal; font-size: 12px; margin-top: 18px; margin-bottom: 18px; } table { line-height: 18px; font-size: 12px; margin: 0; } th { font-weight: bold; text-align: left; padding-bottom: 6px; } .tc { font-weight: bold; text-align: left; } p, li { max-width: 60em; } .empty-line { margin-top: 4px; } .more-room { margin-right: 1.5em; } .much-more-room { margin-right: 3em; } #main { position: absolute; left: 10%; top: 10%; right: 10%; bottom: 10%; z-index: 2; width: 80%; height: 80%; padding: 0%; margin: 0%; overflow: auto; border-style: none; border-width: 0; background-color: #3F3F3F; } #main-settings { margin: 12px; x_max-width: 60em; line-height: 18px; font-size: 12px; } #left { position: absolute; top : 10%; left: 0%; bottom: 0%; right: 90%; z-index: 1; width: 10%; height: 90%; padding: 0%; margin: 0%; font-size: 16px; border-style: none; border-width: 0; background-color: #4F6F6F; } #right { position: absolute; top : 0%; left: 90%; bottom: 10%; right: 0%; z-index: 1; width: 10%; height: 90%; padding: 0%; margin: 0%; font-size: 16px; border-style: none; border-width: 0; background-color: #4F6F6F; _margin-left: -15px; } #bottom { position: absolute; left: 10%; right: 0%; top: 90%; bottom: 0%; z-index: 1; width: 90%; height: 10%; padding: 0%; margin: 0%; font-size: 16px; border-style: none; border-width: 0; background-color: #6F6F8F; } #top { position: absolute; left: 0%; right: 10%; top: 0%; bottom: 90%; z-index: 1; width: 90%; height: 10%; padding: 0%; margin: 0%; font-size: 16px; border-style: none; border-width: 0; background-color: #6F6F8F; } #top-one { position: absolute; bottom: 50%; width: 100%; buggedheight: 100%; } #top-two { position: relative; margin-bottom: -9px; margin-left: 12px; margin-right: 12px; line-height: 18px; text-align: right; vertical-align: middle; } #bottom-one { position: absolute; bottom: 50%; width: 100%; buggedheight: 100%; } #bottom-two { position: relative; margin-bottom: -9px; margin-left: 12px; margin-right: 12px; line-height: 18px; text-align: left; vertical-align: middle; } #left-one { position: absolute; width: 100%; buggedheight: 100%; } #left-two { position: relative; margin-top: 12px; line-height: 18px; text-align: center; vertical-align: top; } #right-one { display: table; height: 100%; width: 100%; } #right-two { display: table-row; height: 100%; width: 100%; } #right-three { display: table-cell; width: 100%; vertical-align: bottom; _position: absolute; _top: 100%; } #right-four { text-align: center; margin-bottom: 2ex; _position: relative; _top: -100%; } #more-top { position: absolute; top: 0%; left: 90%; bottom: 90%; right: 0%; z-index: 3; width: 10%; height: 10%; padding: 0%; margin: 0%; border-style: none; border-width: 0; } #more-top-settings { text-align: center; } #more-right-settings { margin-right: 12px; margin-left: 12px; line-height: 18px; font-size: 10px; text-align: center; } #right-safari { _display: table; width: 100%; height: 100%; }
+ </style>
+ <style type="text/css">
+ </style>
+ </head>
+ <body>
+ <div id="top"> <div id="top-one">
+ <div id="top-two">ConTeXt Font Database Management 0.21 </div>
+ </div>
+ </div>
+ <div id="bottom"> <div id="bottom-one">
+ <div id="bottom-two">wiki: http://contextgarden.net | mail: ntg-context@ntg.nl | website: http://www.pragma-ade.nl</div>
+ </div>
+ </div>
+ <div id="left"></div>
+ <div id="right"></div>
+ <div id="main">
+ <div id='main-settings'>
+ <h1>Command line options</h1>
+<table>
+ <tr><th style="width: 10em">flag</th><th style="width: 8em">value</th><th>description</th></tr>
+ <tr><th/><td/><td/></tr>
+ <tr><th>--save</th><td></td><td>save open type font in raw table</td></tr>
+ <tr><th>--unpack</th><td></td><td>save a tma file in a more readale format</td></tr>
+ <tr><th/><td/><td/></tr>
+ <tr><th>--reload</th><td></td><td>generate new font database (use --force when in doubt)</td></tr>
+ <tr><th>--reload</th><td></td><td>--simple:generate luatex-fonts-names.lua (not for context!)</td></tr>
+ <tr><th/><td/><td/></tr>
+ <tr><th>--list</th><td></td><td>--name: list installed fonts, filter by name [--pattern]</td></tr>
+ <tr><th>--list</th><td></td><td>--spec: list installed fonts, filter by spec [--filter]</td></tr>
+ <tr><th>--list</th><td></td><td>--file: list installed fonts, filter by file [--pattern]</td></tr>
+ <tr><th/><td/><td/></tr>
+ <tr><th>--pattern</th><td>str</td><td>filter files using pattern</td></tr>
+ <tr><th>--filter</th><td>list</td><td>key-value pairs</td></tr>
+ <tr><th>--all</th><td></td><td>show all found instances (combined with other flags)</td></tr>
+ <tr><th>--info</th><td></td><td>give more details</td></tr>
+ <tr><th>--track</th><td>list</td><td>enable trackers</td></tr>
+ <tr><th>--statistics</th><td></td><td>some info about the database</td></tr>
+ </table>
+<br/>
+<h1>Examples</h1>
+<tt>mtxrun --script font --list somename (== --pattern=*somename*)</tt>
+<br/><br/><tt>mtxrun --script font --list --name somename</tt>
+<br/><tt>mtxrun --script font --list --name --pattern=*somename*</tt>
+<br/><br/><tt>mtxrun --script font --list --spec somename</tt>
+<br/><tt>mtxrun --script font --list --spec somename-bold-italic</tt>
+<br/><tt>mtxrun --script font --list --spec --pattern=*somename*</tt>
+<br/><tt>mtxrun --script font --list --spec --filter="fontname=somename"</tt>
+<br/><tt>mtxrun --script font --list --spec --filter="familyname=somename,weight=bold,style=italic,width=condensed"</tt>
+<br/><tt>mtxrun --script font --list --spec --filter="familyname=crap*,weight=bold,style=italic"</tt>
+<br/><br/><tt>mtxrun --script font --list --all</tt>
+<br/><tt>mtxrun --script font --list --file somename</tt>
+<br/><tt>mtxrun --script font --list --file --all somename</tt>
+<br/><tt>mtxrun --script font --list --file --pattern=*somename*</tt>
+<br/><br/> </div>
+ </div>
+ </body>
+ </html>
diff --git a/Master/texmf-dist/doc/context/scripts/mkiv/mtx-fonts.man b/Master/texmf-dist/doc/context/scripts/mkiv/mtx-fonts.man
new file mode 100644
index 00000000000..b576b9de35d
--- /dev/null
+++ b/Master/texmf-dist/doc/context/scripts/mkiv/mtx-fonts.man
@@ -0,0 +1,63 @@
+.TH "mtx-fonts" "1" "01-01-2013" "version 0.21" "ConTeXt Font Database Management"
+.SH NAME
+.B mtx-fonts
+.SH SYNOPSIS
+.B mtxrun --script fonts [
+.I OPTIONS ...
+.B ] [
+.I FILENAMES
+.B ]
+.SH DESCRIPTION
+.B ConTeXt Font Database Management
+.SH OPTIONS
+.TP
+.B --save
+save open type font in raw table
+.TP
+.B --unpack
+save a tma file in a more readale format
+.TP
+.B --reload
+generate new font database (use --force when in doubt)
+.TP
+.B --reload
+--simple:generate luatex-fonts-names.lua (not for context!)
+.TP
+.B --list
+--name: list installed fonts, filter by name [--pattern]
+.TP
+.B --list
+--spec: list installed fonts, filter by spec [--filter]
+.TP
+.B --list
+--file: list installed fonts, filter by file [--pattern]
+.TP
+.B --pattern=str
+filter files using pattern
+.TP
+.B --filter=list
+key-value pairs
+.TP
+.B --all
+show all found instances (combined with other flags)
+.TP
+.B --info
+give more details
+.TP
+.B --track=list
+enable trackers
+.TP
+.B --statistics
+some info about the database
+.SH AUTHOR
+More information about ConTeXt and the tools that come with it can be found at:
+
+
+.B "maillist:"
+ntg-context@ntg.nl / http://www.ntg.nl/mailman/listinfo/ntg-context
+
+.B "webpage:"
+http://www.pragma-ade.nl / http://tex.aanhet.net
+
+.B "wiki:"
+http://contextgarden.net
diff --git a/Master/texmf-dist/doc/context/scripts/mkiv/mtx-fonts.xml b/Master/texmf-dist/doc/context/scripts/mkiv/mtx-fonts.xml
new file mode 100644
index 00000000000..f38633e5100
--- /dev/null
+++ b/Master/texmf-dist/doc/context/scripts/mkiv/mtx-fonts.xml
@@ -0,0 +1,59 @@
+<?xml version="1.0"?>
+<application>
+ <metadata>
+ <entry name="name">mtx-fonts</entry>
+ <entry name="detail">ConTeXt Font Database Management</entry>
+ <entry name="version">0.21</entry>
+ </metadata>
+ <flags>
+ <category name="basic">
+ <subcategory>
+ <flag name="save"><short>save open type font in raw table</short></flag>
+ <flag name="unpack"><short>save a tma file in a more readale format</short></flag>
+ </subcategory>
+ <subcategory>
+ <flag name="reload"><short>generate new font database (use <ref name="force"/> when in doubt)</short></flag>
+ <flag name="reload"><short><ref name="simple"/>:generate luatex-fonts-names.lua (not for context!)</short></flag>
+ </subcategory>
+ <subcategory>
+ <flag name="list"><short><ref name="name"/>: list installed fonts, filter by name [<ref name="pattern"/>]</short></flag>
+ <flag name="list"><short><ref name="spec"/>: list installed fonts, filter by spec [<ref name="filter"/>]</short></flag>
+ <flag name="list"><short><ref name="file"/>: list installed fonts, filter by file [<ref name="pattern"/>]</short></flag>
+ </subcategory>
+ <subcategory>
+ <flag name="pattern" value="str"><short>filter files using pattern</short></flag>
+ <flag name="filter" value="list"><short>key-value pairs</short></flag>
+ <flag name="all"><short>show all found instances (combined with other flags)</short></flag>
+ <flag name="info"><short>give more details</short></flag>
+ <flag name="track" value="list"><short>enable trackers</short></flag>
+ <flag name="statistics"><short>some info about the database</short></flag>
+ </subcategory>
+ </category>
+ </flags>
+ <examples>
+ <category>
+ <title>Examples</title>
+ <subcategory>
+ <example><command>mtxrun --script font --list somename (== --pattern=*somename*)</command></example>
+ </subcategory>
+ <subcategory>
+ <example><command>mtxrun --script font --list --name somename</command></example>
+ <example><command>mtxrun --script font --list --name --pattern=*somename*</command></example>
+ </subcategory>
+ <subcategory>
+ <example><command>mtxrun --script font --list --spec somename</command></example>
+ <example><command>mtxrun --script font --list --spec somename-bold-italic</command></example>
+ <example><command>mtxrun --script font --list --spec --pattern=*somename*</command></example>
+ <example><command>mtxrun --script font --list --spec --filter="fontname=somename"</command></example>
+ <example><command>mtxrun --script font --list --spec --filter="familyname=somename,weight=bold,style=italic,width=condensed"</command></example>
+ <example><command>mtxrun --script font --list --spec --filter="familyname=crap*,weight=bold,style=italic"</command></example>
+ </subcategory>
+ <subcategory>
+ <example><command>mtxrun --script font --list --all</command></example>
+ <example><command>mtxrun --script font --list --file somename</command></example>
+ <example><command>mtxrun --script font --list --file --all somename</command></example>
+ <example><command>mtxrun --script font --list --file --pattern=*somename*</command></example>
+ </subcategory>
+ </category>
+ </examples>
+</application>
diff --git a/Master/texmf-dist/doc/context/scripts/mkiv/mtx-grep.html b/Master/texmf-dist/doc/context/scripts/mkiv/mtx-grep.html
new file mode 100644
index 00000000000..3df541ba847
--- /dev/null
+++ b/Master/texmf-dist/doc/context/scripts/mkiv/mtx-grep.html
@@ -0,0 +1,45 @@
+<?xml version="1.0" encoding="UTF-8"?>
+
+<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Strict//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd">
+
+
+
+
+
+<html xmlns="http://www.w3.org/1999/xhtml" lang="en" xml:lang="en">
+ <head>
+ <title>Simple Grepper 0.10</title>
+ <meta http-equiv="Content-Type" content="text/html; charset=UTF-8"/>
+ <style type="text/css">
+ body { color: #FFFFFF; background-color: #808080; font-family: optima, verdana, futura, "lucida sans", arial, geneva, helvetica, sans; font-size: 12px; line-height: 18px; } a:link, a:active, a:visited { color: #FFFFFF; } a.dir-view:link, a.dir-view:active, a.dir-view:visited { color: #FFFFFF; text-decoration: underline; } .valid { color: #00FF00; } .invalid { color: #FF0000; } button, .commonlink, .smallbutton { font-weight: bold; font-size: 12px; text-decoration: none; color: #000000; border-color: #7F7F7F; border-style: solid; border-width: .125ex; background-color: #FFFFFF; padding: .5ex; } .smallbutton { width: 1em; } a.commonlink:link, a.commonlink:active, a.commonlink:visited, a.smalllink:link, a.smalllink:active, a.smalllink:visited { font-weight: bold; font-size: 12px; text-decoration: none; color: #000000; } h1, .title { font-style: normal; font-weight: normal; font-size: 18px; line-height: 18px; margin-bottom: 20px; } h2, .subtitle { font-style: normal; font-weight: normal; font-size: 12px; margin-top: 18px; margin-bottom: 18px; } table { line-height: 18px; font-size: 12px; margin: 0; } th { font-weight: bold; text-align: left; padding-bottom: 6px; } .tc { font-weight: bold; text-align: left; } p, li { max-width: 60em; } .empty-line { margin-top: 4px; } .more-room { margin-right: 1.5em; } .much-more-room { margin-right: 3em; } #main { position: absolute; left: 10%; top: 10%; right: 10%; bottom: 10%; z-index: 2; width: 80%; height: 80%; padding: 0%; margin: 0%; overflow: auto; border-style: none; border-width: 0; background-color: #3F3F3F; } #main-settings { margin: 12px; x_max-width: 60em; line-height: 18px; font-size: 12px; } #left { position: absolute; top : 10%; left: 0%; bottom: 0%; right: 90%; z-index: 1; width: 10%; height: 90%; padding: 0%; margin: 0%; font-size: 16px; border-style: none; border-width: 0; background-color: #4F6F6F; } #right { position: absolute; top : 0%; left: 90%; bottom: 10%; right: 0%; z-index: 1; width: 10%; height: 90%; padding: 0%; margin: 0%; font-size: 16px; border-style: none; border-width: 0; background-color: #4F6F6F; _margin-left: -15px; } #bottom { position: absolute; left: 10%; right: 0%; top: 90%; bottom: 0%; z-index: 1; width: 90%; height: 10%; padding: 0%; margin: 0%; font-size: 16px; border-style: none; border-width: 0; background-color: #6F6F8F; } #top { position: absolute; left: 0%; right: 10%; top: 0%; bottom: 90%; z-index: 1; width: 90%; height: 10%; padding: 0%; margin: 0%; font-size: 16px; border-style: none; border-width: 0; background-color: #6F6F8F; } #top-one { position: absolute; bottom: 50%; width: 100%; buggedheight: 100%; } #top-two { position: relative; margin-bottom: -9px; margin-left: 12px; margin-right: 12px; line-height: 18px; text-align: right; vertical-align: middle; } #bottom-one { position: absolute; bottom: 50%; width: 100%; buggedheight: 100%; } #bottom-two { position: relative; margin-bottom: -9px; margin-left: 12px; margin-right: 12px; line-height: 18px; text-align: left; vertical-align: middle; } #left-one { position: absolute; width: 100%; buggedheight: 100%; } #left-two { position: relative; margin-top: 12px; line-height: 18px; text-align: center; vertical-align: top; } #right-one { display: table; height: 100%; width: 100%; } #right-two { display: table-row; height: 100%; width: 100%; } #right-three { display: table-cell; width: 100%; vertical-align: bottom; _position: absolute; _top: 100%; } #right-four { text-align: center; margin-bottom: 2ex; _position: relative; _top: -100%; } #more-top { position: absolute; top: 0%; left: 90%; bottom: 90%; right: 0%; z-index: 3; width: 10%; height: 10%; padding: 0%; margin: 0%; border-style: none; border-width: 0; } #more-top-settings { text-align: center; } #more-right-settings { margin-right: 12px; margin-left: 12px; line-height: 18px; font-size: 10px; text-align: center; } #right-safari { _display: table; width: 100%; height: 100%; }
+ </style>
+ <style type="text/css">
+ </style>
+ </head>
+ <body>
+ <div id="top"> <div id="top-one">
+ <div id="top-two">Simple Grepper 0.10 </div>
+ </div>
+ </div>
+ <div id="bottom"> <div id="bottom-one">
+ <div id="bottom-two">wiki: http://contextgarden.net | mail: ntg-context@ntg.nl | website: http://www.pragma-ade.nl</div>
+ </div>
+ </div>
+ <div id="left"></div>
+ <div id="right"></div>
+ <div id="main">
+ <div id='main-settings'>
+ <h1>Command line options</h1>
+<table>
+ <tr><th style="width: 10em">flag</th><th style="width: 8em">value</th><th>description</th></tr>
+ <tr><th/><td/><td/></tr>
+ <tr><th>--pattern</th><td></td><td>search for pattern (optional)</td></tr>
+ <tr><th>--count</th><td></td><td>count matches only</td></tr>
+ <tr><th>--nocomment</th><td></td><td>skip lines that start with %% or #</td></tr>
+ <tr><th>--xml</th><td></td><td>pattern is lpath expression</td></tr>
+ </table>
+<br/>
+ </div>
+ </div>
+ </body>
+ </html>
diff --git a/Master/texmf-dist/doc/context/scripts/mkiv/mtx-grep.man b/Master/texmf-dist/doc/context/scripts/mkiv/mtx-grep.man
new file mode 100644
index 00000000000..6b86ec75591
--- /dev/null
+++ b/Master/texmf-dist/doc/context/scripts/mkiv/mtx-grep.man
@@ -0,0 +1,36 @@
+.TH "mtx-grep" "1" "01-01-2013" "version 0.10" "Simple Grepper"
+.SH NAME
+.B mtx-grep
+.SH SYNOPSIS
+.B mtxrun --script grep [
+.I OPTIONS ...
+.B ] [
+.I FILENAMES
+.B ]
+.SH DESCRIPTION
+.B Simple Grepper
+.SH OPTIONS
+.TP
+.B --pattern
+search for pattern (optional)
+.TP
+.B --count
+count matches only
+.TP
+.B --nocomment
+skip lines that start with %% or #
+.TP
+.B --xml
+pattern is lpath expression
+.SH AUTHOR
+More information about ConTeXt and the tools that come with it can be found at:
+
+
+.B "maillist:"
+ntg-context@ntg.nl / http://www.ntg.nl/mailman/listinfo/ntg-context
+
+.B "webpage:"
+http://www.pragma-ade.nl / http://tex.aanhet.net
+
+.B "wiki:"
+http://contextgarden.net
diff --git a/Master/texmf-dist/doc/context/scripts/mkiv/mtx-grep.xml b/Master/texmf-dist/doc/context/scripts/mkiv/mtx-grep.xml
new file mode 100644
index 00000000000..d8567e03576
--- /dev/null
+++ b/Master/texmf-dist/doc/context/scripts/mkiv/mtx-grep.xml
@@ -0,0 +1,21 @@
+<?xml version="1.0"?>
+<application>
+ <metadata>
+ <entry name="name">mtx-grep</entry>
+ <entry name="detail">Simple Grepper</entry>
+ <entry name="version">0.10</entry>
+ </metadata>
+ <flags>
+ <category name="basic">
+ <subcategory>
+ <flag name="pattern"><short>search for pattern (optional)</short></flag>
+ <flag name="count"><short>count matches only</short></flag>
+ <flag name="nocomment"><short>skip lines that start with %% or #</short></flag>
+ <flag name="xml"><short>pattern is lpath expression</short></flag>
+ </subcategory>
+ </category>
+ </flags>
+ <comments>
+ <comment>patterns are lua patterns and need to be escaped accordingly</comment>
+ </comments>
+</application>
diff --git a/Master/texmf-dist/doc/context/scripts/mkiv/mtx-interface.html b/Master/texmf-dist/doc/context/scripts/mkiv/mtx-interface.html
new file mode 100644
index 00000000000..25d26392538
--- /dev/null
+++ b/Master/texmf-dist/doc/context/scripts/mkiv/mtx-interface.html
@@ -0,0 +1,60 @@
+<?xml version="1.0" encoding="UTF-8"?>
+
+<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Strict//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd">
+
+
+
+
+
+<html xmlns="http://www.w3.org/1999/xhtml" lang="en" xml:lang="en">
+ <head>
+ <title>ConTeXt Interface Related Goodies 0.13</title>
+ <meta http-equiv="Content-Type" content="text/html; charset=UTF-8"/>
+ <style type="text/css">
+ body { color: #FFFFFF; background-color: #808080; font-family: optima, verdana, futura, "lucida sans", arial, geneva, helvetica, sans; font-size: 12px; line-height: 18px; } a:link, a:active, a:visited { color: #FFFFFF; } a.dir-view:link, a.dir-view:active, a.dir-view:visited { color: #FFFFFF; text-decoration: underline; } .valid { color: #00FF00; } .invalid { color: #FF0000; } button, .commonlink, .smallbutton { font-weight: bold; font-size: 12px; text-decoration: none; color: #000000; border-color: #7F7F7F; border-style: solid; border-width: .125ex; background-color: #FFFFFF; padding: .5ex; } .smallbutton { width: 1em; } a.commonlink:link, a.commonlink:active, a.commonlink:visited, a.smalllink:link, a.smalllink:active, a.smalllink:visited { font-weight: bold; font-size: 12px; text-decoration: none; color: #000000; } h1, .title { font-style: normal; font-weight: normal; font-size: 18px; line-height: 18px; margin-bottom: 20px; } h2, .subtitle { font-style: normal; font-weight: normal; font-size: 12px; margin-top: 18px; margin-bottom: 18px; } table { line-height: 18px; font-size: 12px; margin: 0; } th { font-weight: bold; text-align: left; padding-bottom: 6px; } .tc { font-weight: bold; text-align: left; } p, li { max-width: 60em; } .empty-line { margin-top: 4px; } .more-room { margin-right: 1.5em; } .much-more-room { margin-right: 3em; } #main { position: absolute; left: 10%; top: 10%; right: 10%; bottom: 10%; z-index: 2; width: 80%; height: 80%; padding: 0%; margin: 0%; overflow: auto; border-style: none; border-width: 0; background-color: #3F3F3F; } #main-settings { margin: 12px; x_max-width: 60em; line-height: 18px; font-size: 12px; } #left { position: absolute; top : 10%; left: 0%; bottom: 0%; right: 90%; z-index: 1; width: 10%; height: 90%; padding: 0%; margin: 0%; font-size: 16px; border-style: none; border-width: 0; background-color: #4F6F6F; } #right { position: absolute; top : 0%; left: 90%; bottom: 10%; right: 0%; z-index: 1; width: 10%; height: 90%; padding: 0%; margin: 0%; font-size: 16px; border-style: none; border-width: 0; background-color: #4F6F6F; _margin-left: -15px; } #bottom { position: absolute; left: 10%; right: 0%; top: 90%; bottom: 0%; z-index: 1; width: 90%; height: 10%; padding: 0%; margin: 0%; font-size: 16px; border-style: none; border-width: 0; background-color: #6F6F8F; } #top { position: absolute; left: 0%; right: 10%; top: 0%; bottom: 90%; z-index: 1; width: 90%; height: 10%; padding: 0%; margin: 0%; font-size: 16px; border-style: none; border-width: 0; background-color: #6F6F8F; } #top-one { position: absolute; bottom: 50%; width: 100%; buggedheight: 100%; } #top-two { position: relative; margin-bottom: -9px; margin-left: 12px; margin-right: 12px; line-height: 18px; text-align: right; vertical-align: middle; } #bottom-one { position: absolute; bottom: 50%; width: 100%; buggedheight: 100%; } #bottom-two { position: relative; margin-bottom: -9px; margin-left: 12px; margin-right: 12px; line-height: 18px; text-align: left; vertical-align: middle; } #left-one { position: absolute; width: 100%; buggedheight: 100%; } #left-two { position: relative; margin-top: 12px; line-height: 18px; text-align: center; vertical-align: top; } #right-one { display: table; height: 100%; width: 100%; } #right-two { display: table-row; height: 100%; width: 100%; } #right-three { display: table-cell; width: 100%; vertical-align: bottom; _position: absolute; _top: 100%; } #right-four { text-align: center; margin-bottom: 2ex; _position: relative; _top: -100%; } #more-top { position: absolute; top: 0%; left: 90%; bottom: 90%; right: 0%; z-index: 3; width: 10%; height: 10%; padding: 0%; margin: 0%; border-style: none; border-width: 0; } #more-top-settings { text-align: center; } #more-right-settings { margin-right: 12px; margin-left: 12px; line-height: 18px; font-size: 10px; text-align: center; } #right-safari { _display: table; width: 100%; height: 100%; }
+ </style>
+ <style type="text/css">
+ </style>
+ </head>
+ <body>
+ <div id="top"> <div id="top-one">
+ <div id="top-two">ConTeXt Interface Related Goodies 0.13 </div>
+ </div>
+ </div>
+ <div id="bottom"> <div id="bottom-one">
+ <div id="bottom-two">wiki: http://contextgarden.net | mail: ntg-context@ntg.nl | website: http://www.pragma-ade.nl</div>
+ </div>
+ </div>
+ <div id="left"></div>
+ <div id="right"></div>
+ <div id="main">
+ <div id='main-settings'>
+ <h1>Command line options</h1>
+<table>
+ <tr><th style="width: 10em">flag</th><th style="width: 8em">value</th><th>description</th></tr>
+ <tr><th/><td/><td/></tr>
+ <tr><th>--interfaces</th><td></td><td>generate context interface files</td></tr>
+ <tr><th>--messages</th><td></td><td>generate context message files</td></tr>
+ <tr><th>--labels</th><td></td><td>generate context label files</td></tr>
+ <tr><th/><td/><td/></tr>
+ <tr><th>--context</th><td></td><td>equals --interfaces --messages --languages</td></tr>
+ <tr><th/><td/><td/></tr>
+ <tr><th>--scite</th><td></td><td>generate scite interface</td></tr>
+ <tr><th>--bbedit</th><td></td><td>generate bbedit interface files</td></tr>
+ <tr><th>--jedit</th><td></td><td>generate jedit interface files</td></tr>
+ <tr><th>--textpad</th><td></td><td>generate textpad interface files</td></tr>
+ <tr><th>--text</th><td></td><td>create text files for commands and environments</td></tr>
+ <tr><th>--raw</th><td></td><td>report commands to the console</td></tr>
+ <tr><th>--check</th><td></td><td>generate check file</td></tr>
+ <tr><th/><td/><td/></tr>
+ <tr><th>--toutf</th><td></td><td>replace named characters by utf</td></tr>
+ <tr><th>--preprocess</th><td></td><td>preprocess mkvi files to tex files [force,suffix]</td></tr>
+ <tr><th/><td/><td/></tr>
+ <tr><th>--suffix</th><td></td><td>use given suffix for output files</td></tr>
+ <tr><th>--force</th><td></td><td>force action even when in doubt</td></tr>
+ </table>
+<br/>
+ </div>
+ </div>
+ </body>
+ </html>
diff --git a/Master/texmf-dist/doc/context/scripts/mkiv/mtx-interface.man b/Master/texmf-dist/doc/context/scripts/mkiv/mtx-interface.man
new file mode 100644
index 00000000000..232451621a6
--- /dev/null
+++ b/Master/texmf-dist/doc/context/scripts/mkiv/mtx-interface.man
@@ -0,0 +1,69 @@
+.TH "mtx-interface" "1" "01-01-2013" "version 0.13" "ConTeXt Interface Related Goodies"
+.SH NAME
+.B mtx-interface
+.SH SYNOPSIS
+.B mtxrun --script interface [
+.I OPTIONS ...
+.B ] [
+.I FILENAMES
+.B ]
+.SH DESCRIPTION
+.B ConTeXt Interface Related Goodies
+.SH OPTIONS
+.TP
+.B --interfaces
+generate context interface files
+.TP
+.B --messages
+generate context message files
+.TP
+.B --labels
+generate context label files
+.TP
+.B --context
+equals --interfaces --messages --languages
+.TP
+.B --scite
+generate scite interface
+.TP
+.B --bbedit
+generate bbedit interface files
+.TP
+.B --jedit
+generate jedit interface files
+.TP
+.B --textpad
+generate textpad interface files
+.TP
+.B --text
+create text files for commands and environments
+.TP
+.B --raw
+report commands to the console
+.TP
+.B --check
+generate check file
+.TP
+.B --toutf
+replace named characters by utf
+.TP
+.B --preprocess
+preprocess mkvi files to tex files [force,suffix]
+.TP
+.B --suffix
+use given suffix for output files
+.TP
+.B --force
+force action even when in doubt
+.SH AUTHOR
+More information about ConTeXt and the tools that come with it can be found at:
+
+
+.B "maillist:"
+ntg-context@ntg.nl / http://www.ntg.nl/mailman/listinfo/ntg-context
+
+.B "webpage:"
+http://www.pragma-ade.nl / http://tex.aanhet.net
+
+.B "wiki:"
+http://contextgarden.net
diff --git a/Master/texmf-dist/doc/context/scripts/mkiv/mtx-interface.xml b/Master/texmf-dist/doc/context/scripts/mkiv/mtx-interface.xml
new file mode 100644
index 00000000000..6150215f6bb
--- /dev/null
+++ b/Master/texmf-dist/doc/context/scripts/mkiv/mtx-interface.xml
@@ -0,0 +1,37 @@
+<?xml version="1.0"?>
+<application>
+ <metadata>
+ <entry name="name">mtx-interface</entry>
+ <entry name="detail">ConTeXt Interface Related Goodies</entry>
+ <entry name="version">0.13</entry>
+ </metadata>
+ <flags>
+ <category name="basic">
+ <subcategory>
+ <flag name="interfaces"><short>generate context interface files</short></flag>
+ <flag name="messages"><short>generate context message files</short></flag>
+ <flag name="labels"><short>generate context label files</short></flag>
+ </subcategory>
+ <subcategory>
+ <flag name="context"><short>equals <ref name="interfaces"/> <ref name="messages"/> <ref name="languages"/></short></flag>
+ </subcategory>
+ <subcategory>
+ <flag name="scite"><short>generate scite interface</short></flag>
+ <flag name="bbedit"><short>generate bbedit interface files</short></flag>
+ <flag name="jedit"><short>generate jedit interface files</short></flag>
+ <flag name="textpad"><short>generate textpad interface files</short></flag>
+ <flag name="text"><short>create text files for commands and environments</short></flag>
+ <flag name="raw"><short>report commands to the console</short></flag>
+ <flag name="check"><short>generate check file</short></flag>
+ </subcategory>
+ <subcategory>
+ <flag name="toutf"><short>replace named characters by utf</short></flag>
+ <flag name="preprocess"><short>preprocess mkvi files to tex files [force,suffix]</short></flag>
+ </subcategory>
+ <subcategory>
+ <flag name="suffix"><short>use given suffix for output files</short></flag>
+ <flag name="force"><short>force action even when in doubt</short></flag>
+ </subcategory>
+ </category>
+ </flags>
+</application>
diff --git a/Master/texmf-dist/doc/context/scripts/mkiv/mtx-metapost.html b/Master/texmf-dist/doc/context/scripts/mkiv/mtx-metapost.html
new file mode 100644
index 00000000000..d9be7074423
--- /dev/null
+++ b/Master/texmf-dist/doc/context/scripts/mkiv/mtx-metapost.html
@@ -0,0 +1,50 @@
+<?xml version="1.0" encoding="UTF-8"?>
+
+<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Strict//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd">
+
+
+
+
+
+<html xmlns="http://www.w3.org/1999/xhtml" lang="en" xml:lang="en">
+ <head>
+ <title>MetaPost to PDF processor 0.10</title>
+ <meta http-equiv="Content-Type" content="text/html; charset=UTF-8"/>
+ <style type="text/css">
+ body { color: #FFFFFF; background-color: #808080; font-family: optima, verdana, futura, "lucida sans", arial, geneva, helvetica, sans; font-size: 12px; line-height: 18px; } a:link, a:active, a:visited { color: #FFFFFF; } a.dir-view:link, a.dir-view:active, a.dir-view:visited { color: #FFFFFF; text-decoration: underline; } .valid { color: #00FF00; } .invalid { color: #FF0000; } button, .commonlink, .smallbutton { font-weight: bold; font-size: 12px; text-decoration: none; color: #000000; border-color: #7F7F7F; border-style: solid; border-width: .125ex; background-color: #FFFFFF; padding: .5ex; } .smallbutton { width: 1em; } a.commonlink:link, a.commonlink:active, a.commonlink:visited, a.smalllink:link, a.smalllink:active, a.smalllink:visited { font-weight: bold; font-size: 12px; text-decoration: none; color: #000000; } h1, .title { font-style: normal; font-weight: normal; font-size: 18px; line-height: 18px; margin-bottom: 20px; } h2, .subtitle { font-style: normal; font-weight: normal; font-size: 12px; margin-top: 18px; margin-bottom: 18px; } table { line-height: 18px; font-size: 12px; margin: 0; } th { font-weight: bold; text-align: left; padding-bottom: 6px; } .tc { font-weight: bold; text-align: left; } p, li { max-width: 60em; } .empty-line { margin-top: 4px; } .more-room { margin-right: 1.5em; } .much-more-room { margin-right: 3em; } #main { position: absolute; left: 10%; top: 10%; right: 10%; bottom: 10%; z-index: 2; width: 80%; height: 80%; padding: 0%; margin: 0%; overflow: auto; border-style: none; border-width: 0; background-color: #3F3F3F; } #main-settings { margin: 12px; x_max-width: 60em; line-height: 18px; font-size: 12px; } #left { position: absolute; top : 10%; left: 0%; bottom: 0%; right: 90%; z-index: 1; width: 10%; height: 90%; padding: 0%; margin: 0%; font-size: 16px; border-style: none; border-width: 0; background-color: #4F6F6F; } #right { position: absolute; top : 0%; left: 90%; bottom: 10%; right: 0%; z-index: 1; width: 10%; height: 90%; padding: 0%; margin: 0%; font-size: 16px; border-style: none; border-width: 0; background-color: #4F6F6F; _margin-left: -15px; } #bottom { position: absolute; left: 10%; right: 0%; top: 90%; bottom: 0%; z-index: 1; width: 90%; height: 10%; padding: 0%; margin: 0%; font-size: 16px; border-style: none; border-width: 0; background-color: #6F6F8F; } #top { position: absolute; left: 0%; right: 10%; top: 0%; bottom: 90%; z-index: 1; width: 90%; height: 10%; padding: 0%; margin: 0%; font-size: 16px; border-style: none; border-width: 0; background-color: #6F6F8F; } #top-one { position: absolute; bottom: 50%; width: 100%; buggedheight: 100%; } #top-two { position: relative; margin-bottom: -9px; margin-left: 12px; margin-right: 12px; line-height: 18px; text-align: right; vertical-align: middle; } #bottom-one { position: absolute; bottom: 50%; width: 100%; buggedheight: 100%; } #bottom-two { position: relative; margin-bottom: -9px; margin-left: 12px; margin-right: 12px; line-height: 18px; text-align: left; vertical-align: middle; } #left-one { position: absolute; width: 100%; buggedheight: 100%; } #left-two { position: relative; margin-top: 12px; line-height: 18px; text-align: center; vertical-align: top; } #right-one { display: table; height: 100%; width: 100%; } #right-two { display: table-row; height: 100%; width: 100%; } #right-three { display: table-cell; width: 100%; vertical-align: bottom; _position: absolute; _top: 100%; } #right-four { text-align: center; margin-bottom: 2ex; _position: relative; _top: -100%; } #more-top { position: absolute; top: 0%; left: 90%; bottom: 90%; right: 0%; z-index: 3; width: 10%; height: 10%; padding: 0%; margin: 0%; border-style: none; border-width: 0; } #more-top-settings { text-align: center; } #more-right-settings { margin-right: 12px; margin-left: 12px; line-height: 18px; font-size: 10px; text-align: center; } #right-safari { _display: table; width: 100%; height: 100%; }
+ </style>
+ <style type="text/css">
+ </style>
+ </head>
+ <body>
+ <div id="top"> <div id="top-one">
+ <div id="top-two">MetaPost to PDF processor 0.10 </div>
+ </div>
+ </div>
+ <div id="bottom"> <div id="bottom-one">
+ <div id="bottom-two">wiki: http://contextgarden.net | mail: ntg-context@ntg.nl | website: http://www.pragma-ade.nl</div>
+ </div>
+ </div>
+ <div id="left"></div>
+ <div id="right"></div>
+ <div id="main">
+ <div id='main-settings'>
+ <h1>Command line options</h1>
+<table>
+ <tr><th style="width: 10em">flag</th><th style="width: 8em">value</th><th>description</th></tr>
+ <tr><th/><td/><td/></tr>
+ <tr><th>--rawmp</th><td></td><td>raw metapost run</td></tr>
+ <tr><th>--metafun</th><td></td><td>use metafun instead of plain</td></tr>
+ <tr><th>--latex</th><td></td><td>force --tex=latex</td></tr>
+ <tr><th>--texexec</th><td></td><td>force texexec usage (mkii)</td></tr>
+ <tr><th>--split</th><td></td><td>split single result file into pages</td></tr>
+ </table>
+<br/>
+<h1>Examples</h1>
+<tt>mtxrun --script metapost yourfile.mp</tt>
+<br/><tt>mtxrun --script metapost --split yourfile.mp</tt>
+<br/><tt>mtxrun --script metapost yourfile.123 myfile.mps</tt>
+<br/><br/> </div>
+ </div>
+ </body>
+ </html>
diff --git a/Master/texmf-dist/doc/context/scripts/mkiv/mtx-metapost.man b/Master/texmf-dist/doc/context/scripts/mkiv/mtx-metapost.man
new file mode 100644
index 00000000000..9ccaddc9f2d
--- /dev/null
+++ b/Master/texmf-dist/doc/context/scripts/mkiv/mtx-metapost.man
@@ -0,0 +1,39 @@
+.TH "mtx-metapost" "1" "01-01-2013" "version 0.10" "MetaPost to PDF processor"
+.SH NAME
+.B mtx-metapost
+.SH SYNOPSIS
+.B mtxrun --script metapost [
+.I OPTIONS ...
+.B ] [
+.I FILENAMES
+.B ]
+.SH DESCRIPTION
+.B MetaPost to PDF processor
+.SH OPTIONS
+.TP
+.B --rawmp
+raw metapost run
+.TP
+.B --metafun
+use metafun instead of plain
+.TP
+.B --latex
+force --tex=latex
+.TP
+.B --texexec
+force texexec usage (mkii)
+.TP
+.B --split
+split single result file into pages
+.SH AUTHOR
+More information about ConTeXt and the tools that come with it can be found at:
+
+
+.B "maillist:"
+ntg-context@ntg.nl / http://www.ntg.nl/mailman/listinfo/ntg-context
+
+.B "webpage:"
+http://www.pragma-ade.nl / http://tex.aanhet.net
+
+.B "wiki:"
+http://contextgarden.net
diff --git a/Master/texmf-dist/doc/context/scripts/mkiv/mtx-metapost.xml b/Master/texmf-dist/doc/context/scripts/mkiv/mtx-metapost.xml
new file mode 100644
index 00000000000..dbeb77a2a94
--- /dev/null
+++ b/Master/texmf-dist/doc/context/scripts/mkiv/mtx-metapost.xml
@@ -0,0 +1,32 @@
+<?xml version="1.0"?>
+<application>
+ <metadata>
+ <entry name="name">mtx-metapost</entry>
+ <entry name="detail">MetaPost to PDF processor</entry>
+ <entry name="version">0.10</entry>
+ </metadata>
+ <flags>
+ <category name="basic">
+ <subcategory>
+ <flag name="rawmp"><short>raw metapost run</short></flag>
+ <flag name="metafun"><short>use metafun instead of plain</short></flag>
+ <flag name="latex"><short>force <ref name="tex=latex"/></short></flag>
+ <flag name="texexec"><short>force texexec usage (mkii)</short></flag>
+ <flag name="split"><short>split single result file into pages</short></flag>
+ </subcategory>
+ </category>
+ </flags>
+ <examples>
+ <category>
+ <title>Examples</title>
+ <subcategory>
+ <example><command>mtxrun --script metapost yourfile.mp</command></example>
+ <example><command>mtxrun --script metapost --split yourfile.mp</command></example>
+ <example><command>mtxrun --script metapost yourfile.123 myfile.mps</command></example>
+ </subcategory>
+ </category>
+ </examples>
+ <comments>
+ <comment>other usage resembles mptopdf.pl</comment>
+ </comments>
+</application>
diff --git a/Master/texmf-dist/doc/context/scripts/mkiv/mtx-metatex.html b/Master/texmf-dist/doc/context/scripts/mkiv/mtx-metatex.html
new file mode 100644
index 00000000000..ac6c33b98db
--- /dev/null
+++ b/Master/texmf-dist/doc/context/scripts/mkiv/mtx-metatex.html
@@ -0,0 +1,43 @@
+<?xml version="1.0" encoding="UTF-8"?>
+
+<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Strict//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd">
+
+
+
+
+
+<html xmlns="http://www.w3.org/1999/xhtml" lang="en" xml:lang="en">
+ <head>
+ <title>MetaTeX Process Management 0.10</title>
+ <meta http-equiv="Content-Type" content="text/html; charset=UTF-8"/>
+ <style type="text/css">
+ body { color: #FFFFFF; background-color: #808080; font-family: optima, verdana, futura, "lucida sans", arial, geneva, helvetica, sans; font-size: 12px; line-height: 18px; } a:link, a:active, a:visited { color: #FFFFFF; } a.dir-view:link, a.dir-view:active, a.dir-view:visited { color: #FFFFFF; text-decoration: underline; } .valid { color: #00FF00; } .invalid { color: #FF0000; } button, .commonlink, .smallbutton { font-weight: bold; font-size: 12px; text-decoration: none; color: #000000; border-color: #7F7F7F; border-style: solid; border-width: .125ex; background-color: #FFFFFF; padding: .5ex; } .smallbutton { width: 1em; } a.commonlink:link, a.commonlink:active, a.commonlink:visited, a.smalllink:link, a.smalllink:active, a.smalllink:visited { font-weight: bold; font-size: 12px; text-decoration: none; color: #000000; } h1, .title { font-style: normal; font-weight: normal; font-size: 18px; line-height: 18px; margin-bottom: 20px; } h2, .subtitle { font-style: normal; font-weight: normal; font-size: 12px; margin-top: 18px; margin-bottom: 18px; } table { line-height: 18px; font-size: 12px; margin: 0; } th { font-weight: bold; text-align: left; padding-bottom: 6px; } .tc { font-weight: bold; text-align: left; } p, li { max-width: 60em; } .empty-line { margin-top: 4px; } .more-room { margin-right: 1.5em; } .much-more-room { margin-right: 3em; } #main { position: absolute; left: 10%; top: 10%; right: 10%; bottom: 10%; z-index: 2; width: 80%; height: 80%; padding: 0%; margin: 0%; overflow: auto; border-style: none; border-width: 0; background-color: #3F3F3F; } #main-settings { margin: 12px; x_max-width: 60em; line-height: 18px; font-size: 12px; } #left { position: absolute; top : 10%; left: 0%; bottom: 0%; right: 90%; z-index: 1; width: 10%; height: 90%; padding: 0%; margin: 0%; font-size: 16px; border-style: none; border-width: 0; background-color: #4F6F6F; } #right { position: absolute; top : 0%; left: 90%; bottom: 10%; right: 0%; z-index: 1; width: 10%; height: 90%; padding: 0%; margin: 0%; font-size: 16px; border-style: none; border-width: 0; background-color: #4F6F6F; _margin-left: -15px; } #bottom { position: absolute; left: 10%; right: 0%; top: 90%; bottom: 0%; z-index: 1; width: 90%; height: 10%; padding: 0%; margin: 0%; font-size: 16px; border-style: none; border-width: 0; background-color: #6F6F8F; } #top { position: absolute; left: 0%; right: 10%; top: 0%; bottom: 90%; z-index: 1; width: 90%; height: 10%; padding: 0%; margin: 0%; font-size: 16px; border-style: none; border-width: 0; background-color: #6F6F8F; } #top-one { position: absolute; bottom: 50%; width: 100%; buggedheight: 100%; } #top-two { position: relative; margin-bottom: -9px; margin-left: 12px; margin-right: 12px; line-height: 18px; text-align: right; vertical-align: middle; } #bottom-one { position: absolute; bottom: 50%; width: 100%; buggedheight: 100%; } #bottom-two { position: relative; margin-bottom: -9px; margin-left: 12px; margin-right: 12px; line-height: 18px; text-align: left; vertical-align: middle; } #left-one { position: absolute; width: 100%; buggedheight: 100%; } #left-two { position: relative; margin-top: 12px; line-height: 18px; text-align: center; vertical-align: top; } #right-one { display: table; height: 100%; width: 100%; } #right-two { display: table-row; height: 100%; width: 100%; } #right-three { display: table-cell; width: 100%; vertical-align: bottom; _position: absolute; _top: 100%; } #right-four { text-align: center; margin-bottom: 2ex; _position: relative; _top: -100%; } #more-top { position: absolute; top: 0%; left: 90%; bottom: 90%; right: 0%; z-index: 3; width: 10%; height: 10%; padding: 0%; margin: 0%; border-style: none; border-width: 0; } #more-top-settings { text-align: center; } #more-right-settings { margin-right: 12px; margin-left: 12px; line-height: 18px; font-size: 10px; text-align: center; } #right-safari { _display: table; width: 100%; height: 100%; }
+ </style>
+ <style type="text/css">
+ </style>
+ </head>
+ <body>
+ <div id="top"> <div id="top-one">
+ <div id="top-two">MetaTeX Process Management 0.10 </div>
+ </div>
+ </div>
+ <div id="bottom"> <div id="bottom-one">
+ <div id="bottom-two">wiki: http://contextgarden.net | mail: ntg-context@ntg.nl | website: http://www.pragma-ade.nl</div>
+ </div>
+ </div>
+ <div id="left"></div>
+ <div id="right"></div>
+ <div id="main">
+ <div id='main-settings'>
+ <h1>Command line options</h1>
+<table>
+ <tr><th style="width: 10em">flag</th><th style="width: 8em">value</th><th>description</th></tr>
+ <tr><th/><td/><td/></tr>
+ <tr><th>--run</th><td></td><td>process (one or more) files (default action)</td></tr>
+ <tr><th>--make</th><td></td><td>create metatex format(s)</td></tr>
+ </table>
+<br/>
+ </div>
+ </div>
+ </body>
+ </html>
diff --git a/Master/texmf-dist/doc/context/scripts/mkiv/mtx-metatex.man b/Master/texmf-dist/doc/context/scripts/mkiv/mtx-metatex.man
new file mode 100644
index 00000000000..bd67ff4d321
--- /dev/null
+++ b/Master/texmf-dist/doc/context/scripts/mkiv/mtx-metatex.man
@@ -0,0 +1,30 @@
+.TH "mtx-metatex" "1" "01-01-2013" "version 0.10" "MetaTeX Process Management"
+.SH NAME
+.B mtx-metatex
+.SH SYNOPSIS
+.B mtxrun --script metatex [
+.I OPTIONS ...
+.B ] [
+.I FILENAMES
+.B ]
+.SH DESCRIPTION
+.B MetaTeX Process Management
+.SH OPTIONS
+.TP
+.B --run
+process (one or more) files (default action)
+.TP
+.B --make
+create metatex format(s)
+.SH AUTHOR
+More information about ConTeXt and the tools that come with it can be found at:
+
+
+.B "maillist:"
+ntg-context@ntg.nl / http://www.ntg.nl/mailman/listinfo/ntg-context
+
+.B "webpage:"
+http://www.pragma-ade.nl / http://tex.aanhet.net
+
+.B "wiki:"
+http://contextgarden.net
diff --git a/Master/texmf-dist/doc/context/scripts/mkiv/mtx-metatex.xml b/Master/texmf-dist/doc/context/scripts/mkiv/mtx-metatex.xml
new file mode 100644
index 00000000000..649673fbf90
--- /dev/null
+++ b/Master/texmf-dist/doc/context/scripts/mkiv/mtx-metatex.xml
@@ -0,0 +1,16 @@
+<?xml version="1.0"?>
+<application>
+ <metadata>
+ <entry name="name">mtx-metatex</entry>
+ <entry name="detail">MetaTeX Process Management</entry>
+ <entry name="version">0.10</entry>
+ </metadata>
+ <flags>
+ <category name="basic">
+ <subcategory>
+ <flag name="run"><short>process (one or more) files (default action)</short></flag>
+ <flag name="make"><short>create metatex format(s)</short></flag>
+ </subcategory>
+ </category>
+ </flags>
+</application>
diff --git a/Master/texmf-dist/doc/context/scripts/mkiv/mtx-modules.html b/Master/texmf-dist/doc/context/scripts/mkiv/mtx-modules.html
new file mode 100644
index 00000000000..4edfbdb8278
--- /dev/null
+++ b/Master/texmf-dist/doc/context/scripts/mkiv/mtx-modules.html
@@ -0,0 +1,44 @@
+<?xml version="1.0" encoding="UTF-8"?>
+
+<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Strict//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd">
+
+
+
+
+
+<html xmlns="http://www.w3.org/1999/xhtml" lang="en" xml:lang="en">
+ <head>
+ <title>ConTeXt Module Documentation Generators 1.00</title>
+ <meta http-equiv="Content-Type" content="text/html; charset=UTF-8"/>
+ <style type="text/css">
+ body { color: #FFFFFF; background-color: #808080; font-family: optima, verdana, futura, "lucida sans", arial, geneva, helvetica, sans; font-size: 12px; line-height: 18px; } a:link, a:active, a:visited { color: #FFFFFF; } a.dir-view:link, a.dir-view:active, a.dir-view:visited { color: #FFFFFF; text-decoration: underline; } .valid { color: #00FF00; } .invalid { color: #FF0000; } button, .commonlink, .smallbutton { font-weight: bold; font-size: 12px; text-decoration: none; color: #000000; border-color: #7F7F7F; border-style: solid; border-width: .125ex; background-color: #FFFFFF; padding: .5ex; } .smallbutton { width: 1em; } a.commonlink:link, a.commonlink:active, a.commonlink:visited, a.smalllink:link, a.smalllink:active, a.smalllink:visited { font-weight: bold; font-size: 12px; text-decoration: none; color: #000000; } h1, .title { font-style: normal; font-weight: normal; font-size: 18px; line-height: 18px; margin-bottom: 20px; } h2, .subtitle { font-style: normal; font-weight: normal; font-size: 12px; margin-top: 18px; margin-bottom: 18px; } table { line-height: 18px; font-size: 12px; margin: 0; } th { font-weight: bold; text-align: left; padding-bottom: 6px; } .tc { font-weight: bold; text-align: left; } p, li { max-width: 60em; } .empty-line { margin-top: 4px; } .more-room { margin-right: 1.5em; } .much-more-room { margin-right: 3em; } #main { position: absolute; left: 10%; top: 10%; right: 10%; bottom: 10%; z-index: 2; width: 80%; height: 80%; padding: 0%; margin: 0%; overflow: auto; border-style: none; border-width: 0; background-color: #3F3F3F; } #main-settings { margin: 12px; x_max-width: 60em; line-height: 18px; font-size: 12px; } #left { position: absolute; top : 10%; left: 0%; bottom: 0%; right: 90%; z-index: 1; width: 10%; height: 90%; padding: 0%; margin: 0%; font-size: 16px; border-style: none; border-width: 0; background-color: #4F6F6F; } #right { position: absolute; top : 0%; left: 90%; bottom: 10%; right: 0%; z-index: 1; width: 10%; height: 90%; padding: 0%; margin: 0%; font-size: 16px; border-style: none; border-width: 0; background-color: #4F6F6F; _margin-left: -15px; } #bottom { position: absolute; left: 10%; right: 0%; top: 90%; bottom: 0%; z-index: 1; width: 90%; height: 10%; padding: 0%; margin: 0%; font-size: 16px; border-style: none; border-width: 0; background-color: #6F6F8F; } #top { position: absolute; left: 0%; right: 10%; top: 0%; bottom: 90%; z-index: 1; width: 90%; height: 10%; padding: 0%; margin: 0%; font-size: 16px; border-style: none; border-width: 0; background-color: #6F6F8F; } #top-one { position: absolute; bottom: 50%; width: 100%; buggedheight: 100%; } #top-two { position: relative; margin-bottom: -9px; margin-left: 12px; margin-right: 12px; line-height: 18px; text-align: right; vertical-align: middle; } #bottom-one { position: absolute; bottom: 50%; width: 100%; buggedheight: 100%; } #bottom-two { position: relative; margin-bottom: -9px; margin-left: 12px; margin-right: 12px; line-height: 18px; text-align: left; vertical-align: middle; } #left-one { position: absolute; width: 100%; buggedheight: 100%; } #left-two { position: relative; margin-top: 12px; line-height: 18px; text-align: center; vertical-align: top; } #right-one { display: table; height: 100%; width: 100%; } #right-two { display: table-row; height: 100%; width: 100%; } #right-three { display: table-cell; width: 100%; vertical-align: bottom; _position: absolute; _top: 100%; } #right-four { text-align: center; margin-bottom: 2ex; _position: relative; _top: -100%; } #more-top { position: absolute; top: 0%; left: 90%; bottom: 90%; right: 0%; z-index: 3; width: 10%; height: 10%; padding: 0%; margin: 0%; border-style: none; border-width: 0; } #more-top-settings { text-align: center; } #more-right-settings { margin-right: 12px; margin-left: 12px; line-height: 18px; font-size: 10px; text-align: center; } #right-safari { _display: table; width: 100%; height: 100%; }
+ </style>
+ <style type="text/css">
+ </style>
+ </head>
+ <body>
+ <div id="top"> <div id="top-one">
+ <div id="top-two">ConTeXt Module Documentation Generators 1.00 </div>
+ </div>
+ </div>
+ <div id="bottom"> <div id="bottom-one">
+ <div id="bottom-two">wiki: http://contextgarden.net | mail: ntg-context@ntg.nl | website: http://www.pragma-ade.nl</div>
+ </div>
+ </div>
+ <div id="left"></div>
+ <div id="right"></div>
+ <div id="main">
+ <div id='main-settings'>
+ <h1>Command line options</h1>
+<table>
+ <tr><th style="width: 10em">flag</th><th style="width: 8em">value</th><th>description</th></tr>
+ <tr><th/><td/><td/></tr>
+ <tr><th>--convert</th><td></td><td>convert source files (tex, mkii, mkiv, mp) to 'ted' files</td></tr>
+ <tr><th>--process</th><td></td><td>process source files (tex, mkii, mkiv, mp) to 'pdf' files</td></tr>
+ <tr><th>--prep</th><td></td><td>use original name with suffix 'prep' appended</td></tr>
+ </table>
+<br/>
+ </div>
+ </div>
+ </body>
+ </html>
diff --git a/Master/texmf-dist/doc/context/scripts/mkiv/mtx-modules.man b/Master/texmf-dist/doc/context/scripts/mkiv/mtx-modules.man
new file mode 100644
index 00000000000..a719c16a077
--- /dev/null
+++ b/Master/texmf-dist/doc/context/scripts/mkiv/mtx-modules.man
@@ -0,0 +1,33 @@
+.TH "mtx-modules" "1" "01-01-2013" "version 1.00" "ConTeXt Module Documentation Generators"
+.SH NAME
+.B mtx-modules
+.SH SYNOPSIS
+.B mtxrun --script modules [
+.I OPTIONS ...
+.B ] [
+.I FILENAMES
+.B ]
+.SH DESCRIPTION
+.B ConTeXt Module Documentation Generators
+.SH OPTIONS
+.TP
+.B --convert
+convert source files (tex, mkii, mkiv, mp) to 'ted' files
+.TP
+.B --process
+process source files (tex, mkii, mkiv, mp) to 'pdf' files
+.TP
+.B --prep
+use original name with suffix 'prep' appended
+.SH AUTHOR
+More information about ConTeXt and the tools that come with it can be found at:
+
+
+.B "maillist:"
+ntg-context@ntg.nl / http://www.ntg.nl/mailman/listinfo/ntg-context
+
+.B "webpage:"
+http://www.pragma-ade.nl / http://tex.aanhet.net
+
+.B "wiki:"
+http://contextgarden.net
diff --git a/Master/texmf-dist/doc/context/scripts/mkiv/mtx-modules.xml b/Master/texmf-dist/doc/context/scripts/mkiv/mtx-modules.xml
new file mode 100644
index 00000000000..fe0311d1632
--- /dev/null
+++ b/Master/texmf-dist/doc/context/scripts/mkiv/mtx-modules.xml
@@ -0,0 +1,17 @@
+<?xml version="1.0"?>
+<application>
+ <metadata>
+ <entry name="name">mtx-modules</entry>
+ <entry name="detail">ConTeXt Module Documentation Generators</entry>
+ <entry name="version">1.00</entry>
+ </metadata>
+ <flags>
+ <category name="basic">
+ <subcategory>
+ <flag name="convert"><short>convert source files (tex, mkii, mkiv, mp) to 'ted' files</short></flag>
+ <flag name="process"><short>process source files (tex, mkii, mkiv, mp) to 'pdf' files</short></flag>
+ <flag name="prep"><short>use original name with suffix 'prep' appended</short></flag>
+ </subcategory>
+ </category>
+ </flags>
+</application>
diff --git a/Master/texmf-dist/doc/context/scripts/mkiv/mtx-package.html b/Master/texmf-dist/doc/context/scripts/mkiv/mtx-package.html
new file mode 100644
index 00000000000..42e54a3ac9a
--- /dev/null
+++ b/Master/texmf-dist/doc/context/scripts/mkiv/mtx-package.html
@@ -0,0 +1,42 @@
+<?xml version="1.0" encoding="UTF-8"?>
+
+<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Strict//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd">
+
+
+
+
+
+<html xmlns="http://www.w3.org/1999/xhtml" lang="en" xml:lang="en">
+ <head>
+ <title>Distribution Related Goodies 0.10</title>
+ <meta http-equiv="Content-Type" content="text/html; charset=UTF-8"/>
+ <style type="text/css">
+ body { color: #FFFFFF; background-color: #808080; font-family: optima, verdana, futura, "lucida sans", arial, geneva, helvetica, sans; font-size: 12px; line-height: 18px; } a:link, a:active, a:visited { color: #FFFFFF; } a.dir-view:link, a.dir-view:active, a.dir-view:visited { color: #FFFFFF; text-decoration: underline; } .valid { color: #00FF00; } .invalid { color: #FF0000; } button, .commonlink, .smallbutton { font-weight: bold; font-size: 12px; text-decoration: none; color: #000000; border-color: #7F7F7F; border-style: solid; border-width: .125ex; background-color: #FFFFFF; padding: .5ex; } .smallbutton { width: 1em; } a.commonlink:link, a.commonlink:active, a.commonlink:visited, a.smalllink:link, a.smalllink:active, a.smalllink:visited { font-weight: bold; font-size: 12px; text-decoration: none; color: #000000; } h1, .title { font-style: normal; font-weight: normal; font-size: 18px; line-height: 18px; margin-bottom: 20px; } h2, .subtitle { font-style: normal; font-weight: normal; font-size: 12px; margin-top: 18px; margin-bottom: 18px; } table { line-height: 18px; font-size: 12px; margin: 0; } th { font-weight: bold; text-align: left; padding-bottom: 6px; } .tc { font-weight: bold; text-align: left; } p, li { max-width: 60em; } .empty-line { margin-top: 4px; } .more-room { margin-right: 1.5em; } .much-more-room { margin-right: 3em; } #main { position: absolute; left: 10%; top: 10%; right: 10%; bottom: 10%; z-index: 2; width: 80%; height: 80%; padding: 0%; margin: 0%; overflow: auto; border-style: none; border-width: 0; background-color: #3F3F3F; } #main-settings { margin: 12px; x_max-width: 60em; line-height: 18px; font-size: 12px; } #left { position: absolute; top : 10%; left: 0%; bottom: 0%; right: 90%; z-index: 1; width: 10%; height: 90%; padding: 0%; margin: 0%; font-size: 16px; border-style: none; border-width: 0; background-color: #4F6F6F; } #right { position: absolute; top : 0%; left: 90%; bottom: 10%; right: 0%; z-index: 1; width: 10%; height: 90%; padding: 0%; margin: 0%; font-size: 16px; border-style: none; border-width: 0; background-color: #4F6F6F; _margin-left: -15px; } #bottom { position: absolute; left: 10%; right: 0%; top: 90%; bottom: 0%; z-index: 1; width: 90%; height: 10%; padding: 0%; margin: 0%; font-size: 16px; border-style: none; border-width: 0; background-color: #6F6F8F; } #top { position: absolute; left: 0%; right: 10%; top: 0%; bottom: 90%; z-index: 1; width: 90%; height: 10%; padding: 0%; margin: 0%; font-size: 16px; border-style: none; border-width: 0; background-color: #6F6F8F; } #top-one { position: absolute; bottom: 50%; width: 100%; buggedheight: 100%; } #top-two { position: relative; margin-bottom: -9px; margin-left: 12px; margin-right: 12px; line-height: 18px; text-align: right; vertical-align: middle; } #bottom-one { position: absolute; bottom: 50%; width: 100%; buggedheight: 100%; } #bottom-two { position: relative; margin-bottom: -9px; margin-left: 12px; margin-right: 12px; line-height: 18px; text-align: left; vertical-align: middle; } #left-one { position: absolute; width: 100%; buggedheight: 100%; } #left-two { position: relative; margin-top: 12px; line-height: 18px; text-align: center; vertical-align: top; } #right-one { display: table; height: 100%; width: 100%; } #right-two { display: table-row; height: 100%; width: 100%; } #right-three { display: table-cell; width: 100%; vertical-align: bottom; _position: absolute; _top: 100%; } #right-four { text-align: center; margin-bottom: 2ex; _position: relative; _top: -100%; } #more-top { position: absolute; top: 0%; left: 90%; bottom: 90%; right: 0%; z-index: 3; width: 10%; height: 10%; padding: 0%; margin: 0%; border-style: none; border-width: 0; } #more-top-settings { text-align: center; } #more-right-settings { margin-right: 12px; margin-left: 12px; line-height: 18px; font-size: 10px; text-align: center; } #right-safari { _display: table; width: 100%; height: 100%; }
+ </style>
+ <style type="text/css">
+ </style>
+ </head>
+ <body>
+ <div id="top"> <div id="top-one">
+ <div id="top-two">Distribution Related Goodies 0.10 </div>
+ </div>
+ </div>
+ <div id="bottom"> <div id="bottom-one">
+ <div id="bottom-two">wiki: http://contextgarden.net | mail: ntg-context@ntg.nl | website: http://www.pragma-ade.nl</div>
+ </div>
+ </div>
+ <div id="left"></div>
+ <div id="right"></div>
+ <div id="main">
+ <div id='main-settings'>
+ <h1>Command line options</h1>
+<table>
+ <tr><th style="width: 10em">flag</th><th style="width: 8em">value</th><th>description</th></tr>
+ <tr><th/><td/><td/></tr>
+ <tr><th>--merge</th><td></td><td>merge 'loadmodule' into merge file</td></tr>
+ </table>
+<br/>
+ </div>
+ </div>
+ </body>
+ </html>
diff --git a/Master/texmf-dist/doc/context/scripts/mkiv/mtx-package.man b/Master/texmf-dist/doc/context/scripts/mkiv/mtx-package.man
new file mode 100644
index 00000000000..af11e6aec01
--- /dev/null
+++ b/Master/texmf-dist/doc/context/scripts/mkiv/mtx-package.man
@@ -0,0 +1,27 @@
+.TH "mtx-package" "1" "01-01-2013" "version 0.10" "Distribution Related Goodies"
+.SH NAME
+.B mtx-package
+.SH SYNOPSIS
+.B mtxrun --script package [
+.I OPTIONS ...
+.B ] [
+.I FILENAMES
+.B ]
+.SH DESCRIPTION
+.B Distribution Related Goodies
+.SH OPTIONS
+.TP
+.B --merge
+merge 'loadmodule' into merge file
+.SH AUTHOR
+More information about ConTeXt and the tools that come with it can be found at:
+
+
+.B "maillist:"
+ntg-context@ntg.nl / http://www.ntg.nl/mailman/listinfo/ntg-context
+
+.B "webpage:"
+http://www.pragma-ade.nl / http://tex.aanhet.net
+
+.B "wiki:"
+http://contextgarden.net
diff --git a/Master/texmf-dist/doc/context/scripts/mkiv/mtx-package.xml b/Master/texmf-dist/doc/context/scripts/mkiv/mtx-package.xml
new file mode 100644
index 00000000000..0443704266a
--- /dev/null
+++ b/Master/texmf-dist/doc/context/scripts/mkiv/mtx-package.xml
@@ -0,0 +1,15 @@
+<?xml version="1.0"?>
+<application>
+ <metadata>
+ <entry name="name">mtx-package</entry>
+ <entry name="detail">Distribution Related Goodies</entry>
+ <entry name="version">0.10</entry>
+ </metadata>
+ <flags>
+ <category name="basic">
+ <subcategory>
+ <flag name="merge"><short>merge 'loadmodule' into merge file</short></flag>
+ </subcategory>
+ </category>
+ </flags>
+</application>
diff --git a/Master/texmf-dist/doc/context/scripts/mkiv/mtx-patterns.html b/Master/texmf-dist/doc/context/scripts/mkiv/mtx-patterns.html
new file mode 100644
index 00000000000..427bee3bd2a
--- /dev/null
+++ b/Master/texmf-dist/doc/context/scripts/mkiv/mtx-patterns.html
@@ -0,0 +1,51 @@
+<?xml version="1.0" encoding="UTF-8"?>
+
+<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Strict//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd">
+
+
+
+
+
+<html xmlns="http://www.w3.org/1999/xhtml" lang="en" xml:lang="en">
+ <head>
+ <title>ConTeXt Pattern File Management 0.20</title>
+ <meta http-equiv="Content-Type" content="text/html; charset=UTF-8"/>
+ <style type="text/css">
+ body { color: #FFFFFF; background-color: #808080; font-family: optima, verdana, futura, "lucida sans", arial, geneva, helvetica, sans; font-size: 12px; line-height: 18px; } a:link, a:active, a:visited { color: #FFFFFF; } a.dir-view:link, a.dir-view:active, a.dir-view:visited { color: #FFFFFF; text-decoration: underline; } .valid { color: #00FF00; } .invalid { color: #FF0000; } button, .commonlink, .smallbutton { font-weight: bold; font-size: 12px; text-decoration: none; color: #000000; border-color: #7F7F7F; border-style: solid; border-width: .125ex; background-color: #FFFFFF; padding: .5ex; } .smallbutton { width: 1em; } a.commonlink:link, a.commonlink:active, a.commonlink:visited, a.smalllink:link, a.smalllink:active, a.smalllink:visited { font-weight: bold; font-size: 12px; text-decoration: none; color: #000000; } h1, .title { font-style: normal; font-weight: normal; font-size: 18px; line-height: 18px; margin-bottom: 20px; } h2, .subtitle { font-style: normal; font-weight: normal; font-size: 12px; margin-top: 18px; margin-bottom: 18px; } table { line-height: 18px; font-size: 12px; margin: 0; } th { font-weight: bold; text-align: left; padding-bottom: 6px; } .tc { font-weight: bold; text-align: left; } p, li { max-width: 60em; } .empty-line { margin-top: 4px; } .more-room { margin-right: 1.5em; } .much-more-room { margin-right: 3em; } #main { position: absolute; left: 10%; top: 10%; right: 10%; bottom: 10%; z-index: 2; width: 80%; height: 80%; padding: 0%; margin: 0%; overflow: auto; border-style: none; border-width: 0; background-color: #3F3F3F; } #main-settings { margin: 12px; x_max-width: 60em; line-height: 18px; font-size: 12px; } #left { position: absolute; top : 10%; left: 0%; bottom: 0%; right: 90%; z-index: 1; width: 10%; height: 90%; padding: 0%; margin: 0%; font-size: 16px; border-style: none; border-width: 0; background-color: #4F6F6F; } #right { position: absolute; top : 0%; left: 90%; bottom: 10%; right: 0%; z-index: 1; width: 10%; height: 90%; padding: 0%; margin: 0%; font-size: 16px; border-style: none; border-width: 0; background-color: #4F6F6F; _margin-left: -15px; } #bottom { position: absolute; left: 10%; right: 0%; top: 90%; bottom: 0%; z-index: 1; width: 90%; height: 10%; padding: 0%; margin: 0%; font-size: 16px; border-style: none; border-width: 0; background-color: #6F6F8F; } #top { position: absolute; left: 0%; right: 10%; top: 0%; bottom: 90%; z-index: 1; width: 90%; height: 10%; padding: 0%; margin: 0%; font-size: 16px; border-style: none; border-width: 0; background-color: #6F6F8F; } #top-one { position: absolute; bottom: 50%; width: 100%; buggedheight: 100%; } #top-two { position: relative; margin-bottom: -9px; margin-left: 12px; margin-right: 12px; line-height: 18px; text-align: right; vertical-align: middle; } #bottom-one { position: absolute; bottom: 50%; width: 100%; buggedheight: 100%; } #bottom-two { position: relative; margin-bottom: -9px; margin-left: 12px; margin-right: 12px; line-height: 18px; text-align: left; vertical-align: middle; } #left-one { position: absolute; width: 100%; buggedheight: 100%; } #left-two { position: relative; margin-top: 12px; line-height: 18px; text-align: center; vertical-align: top; } #right-one { display: table; height: 100%; width: 100%; } #right-two { display: table-row; height: 100%; width: 100%; } #right-three { display: table-cell; width: 100%; vertical-align: bottom; _position: absolute; _top: 100%; } #right-four { text-align: center; margin-bottom: 2ex; _position: relative; _top: -100%; } #more-top { position: absolute; top: 0%; left: 90%; bottom: 90%; right: 0%; z-index: 3; width: 10%; height: 10%; padding: 0%; margin: 0%; border-style: none; border-width: 0; } #more-top-settings { text-align: center; } #more-right-settings { margin-right: 12px; margin-left: 12px; line-height: 18px; font-size: 10px; text-align: center; } #right-safari { _display: table; width: 100%; height: 100%; }
+ </style>
+ <style type="text/css">
+ </style>
+ </head>
+ <body>
+ <div id="top"> <div id="top-one">
+ <div id="top-two">ConTeXt Pattern File Management 0.20 </div>
+ </div>
+ </div>
+ <div id="bottom"> <div id="bottom-one">
+ <div id="bottom-two">wiki: http://contextgarden.net | mail: ntg-context@ntg.nl | website: http://www.pragma-ade.nl</div>
+ </div>
+ </div>
+ <div id="left"></div>
+ <div id="right"></div>
+ <div id="main">
+ <div id='main-settings'>
+ <h1>Command line options</h1>
+<table>
+ <tr><th style="width: 10em">flag</th><th style="width: 8em">value</th><th>description</th></tr>
+ <tr><th/><td/><td/></tr>
+ <tr><th>--convert</th><td></td><td>generate context language files (mnemonic driven, if not given then all)</td></tr>
+ <tr><th>--check</th><td></td><td>check pattern file (or those used by context when no file given)</td></tr>
+ <tr><th>--path</th><td></td><td>source path where hyph-foo.tex files are stored</td></tr>
+ <tr><th>--destination</th><td></td><td>destination path</td></tr>
+ <tr><th>--specification</th><td></td><td>additional patterns: e.g.: =cy,hyph-cy,welsh</td></tr>
+ </table>
+<br/>
+<h1>Examples</h1>
+<tt>mtxrun --script pattern --check hyph-*.tex</tt>
+<br/><tt>mtxrun --script pattern --check --path=c:/data/develop/svn-hyphen/trunk/hyph-utf8/tex/generic/hyph-utf8/patterns</tt>
+<br/><tt>mtxrun --script pattern --convert --path=c:/data/develop/svn-hyphen/trunk/hyph-utf8/tex/generic/hyph-utf8/patterns/tex --destination=e:/tmp/patterns</tt>
+<br/><tt>mtxrun --script pattern --convert --path=c:/data/develop/svn-hyphen/trunk/hyph-utf8/tex/generic/hyph-utf8/patterns/txt --destination=e:/tmp/patterns</tt>
+<br/><br/> </div>
+ </div>
+ </body>
+ </html>
diff --git a/Master/texmf-dist/doc/context/scripts/mkiv/mtx-patterns.man b/Master/texmf-dist/doc/context/scripts/mkiv/mtx-patterns.man
new file mode 100644
index 00000000000..96e3c4d4879
--- /dev/null
+++ b/Master/texmf-dist/doc/context/scripts/mkiv/mtx-patterns.man
@@ -0,0 +1,39 @@
+.TH "mtx-patterns" "1" "01-01-2013" "version 0.20" "ConTeXt Pattern File Management"
+.SH NAME
+.B mtx-patterns
+.SH SYNOPSIS
+.B mtxrun --script patterns [
+.I OPTIONS ...
+.B ] [
+.I FILENAMES
+.B ]
+.SH DESCRIPTION
+.B ConTeXt Pattern File Management
+.SH OPTIONS
+.TP
+.B --convert
+generate context language files (mnemonic driven, if not given then all)
+.TP
+.B --check
+check pattern file (or those used by context when no file given)
+.TP
+.B --path
+source path where hyph-foo.tex files are stored
+.TP
+.B --destination
+destination path
+.TP
+.B --specification
+additional patterns: e.g.: =cy,hyph-cy,welsh
+.SH AUTHOR
+More information about ConTeXt and the tools that come with it can be found at:
+
+
+.B "maillist:"
+ntg-context@ntg.nl / http://www.ntg.nl/mailman/listinfo/ntg-context
+
+.B "webpage:"
+http://www.pragma-ade.nl / http://tex.aanhet.net
+
+.B "wiki:"
+http://contextgarden.net
diff --git a/Master/texmf-dist/doc/context/scripts/mkiv/mtx-patterns.xml b/Master/texmf-dist/doc/context/scripts/mkiv/mtx-patterns.xml
new file mode 100644
index 00000000000..f3255aebee2
--- /dev/null
+++ b/Master/texmf-dist/doc/context/scripts/mkiv/mtx-patterns.xml
@@ -0,0 +1,30 @@
+<?xml version="1.0"?>
+<application>
+ <metadata>
+ <entry name="name">mtx-patterns</entry>
+ <entry name="detail">ConTeXt Pattern File Management</entry>
+ <entry name="version">0.20</entry>
+ </metadata>
+ <flags>
+ <category name="basic">
+ <subcategory>
+ <flag name="convert"><short>generate context language files (mnemonic driven, if not given then all)</short></flag>
+ <flag name="check"><short>check pattern file (or those used by context when no file given)</short></flag>
+ <flag name="path"><short>source path where hyph-foo.tex files are stored</short></flag>
+ <flag name="destination"><short>destination path</short></flag>
+ <flag name="specification"><short>additional patterns: e.g.: =cy,hyph-cy,welsh</short></flag>
+ </subcategory>
+ </category>
+ </flags>
+ <examples>
+ <category>
+ <title>Examples</title>
+ <subcategory>
+ <example><command>mtxrun --script pattern --check hyph-*.tex</command></example>
+ <example><command>mtxrun --script pattern --check --path=c:/data/develop/svn-hyphen/trunk/hyph-utf8/tex/generic/hyph-utf8/patterns</command></example>
+ <example><command>mtxrun --script pattern --convert --path=c:/data/develop/svn-hyphen/trunk/hyph-utf8/tex/generic/hyph-utf8/patterns/tex --destination=e:/tmp/patterns</command></example>
+ <example><command>mtxrun --script pattern --convert --path=c:/data/develop/svn-hyphen/trunk/hyph-utf8/tex/generic/hyph-utf8/patterns/txt --destination=e:/tmp/patterns</command></example>
+ </subcategory>
+ </category>
+ </examples>
+</application>
diff --git a/Master/texmf-dist/doc/context/scripts/mkiv/mtx-pdf.html b/Master/texmf-dist/doc/context/scripts/mkiv/mtx-pdf.html
new file mode 100644
index 00000000000..be923581f7e
--- /dev/null
+++ b/Master/texmf-dist/doc/context/scripts/mkiv/mtx-pdf.html
@@ -0,0 +1,44 @@
+<?xml version="1.0" encoding="UTF-8"?>
+
+<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Strict//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd">
+
+
+
+
+
+<html xmlns="http://www.w3.org/1999/xhtml" lang="en" xml:lang="en">
+ <head>
+ <title>ConTeXt PDF Helpers 0.10</title>
+ <meta http-equiv="Content-Type" content="text/html; charset=UTF-8"/>
+ <style type="text/css">
+ body { color: #FFFFFF; background-color: #808080; font-family: optima, verdana, futura, "lucida sans", arial, geneva, helvetica, sans; font-size: 12px; line-height: 18px; } a:link, a:active, a:visited { color: #FFFFFF; } a.dir-view:link, a.dir-view:active, a.dir-view:visited { color: #FFFFFF; text-decoration: underline; } .valid { color: #00FF00; } .invalid { color: #FF0000; } button, .commonlink, .smallbutton { font-weight: bold; font-size: 12px; text-decoration: none; color: #000000; border-color: #7F7F7F; border-style: solid; border-width: .125ex; background-color: #FFFFFF; padding: .5ex; } .smallbutton { width: 1em; } a.commonlink:link, a.commonlink:active, a.commonlink:visited, a.smalllink:link, a.smalllink:active, a.smalllink:visited { font-weight: bold; font-size: 12px; text-decoration: none; color: #000000; } h1, .title { font-style: normal; font-weight: normal; font-size: 18px; line-height: 18px; margin-bottom: 20px; } h2, .subtitle { font-style: normal; font-weight: normal; font-size: 12px; margin-top: 18px; margin-bottom: 18px; } table { line-height: 18px; font-size: 12px; margin: 0; } th { font-weight: bold; text-align: left; padding-bottom: 6px; } .tc { font-weight: bold; text-align: left; } p, li { max-width: 60em; } .empty-line { margin-top: 4px; } .more-room { margin-right: 1.5em; } .much-more-room { margin-right: 3em; } #main { position: absolute; left: 10%; top: 10%; right: 10%; bottom: 10%; z-index: 2; width: 80%; height: 80%; padding: 0%; margin: 0%; overflow: auto; border-style: none; border-width: 0; background-color: #3F3F3F; } #main-settings { margin: 12px; x_max-width: 60em; line-height: 18px; font-size: 12px; } #left { position: absolute; top : 10%; left: 0%; bottom: 0%; right: 90%; z-index: 1; width: 10%; height: 90%; padding: 0%; margin: 0%; font-size: 16px; border-style: none; border-width: 0; background-color: #4F6F6F; } #right { position: absolute; top : 0%; left: 90%; bottom: 10%; right: 0%; z-index: 1; width: 10%; height: 90%; padding: 0%; margin: 0%; font-size: 16px; border-style: none; border-width: 0; background-color: #4F6F6F; _margin-left: -15px; } #bottom { position: absolute; left: 10%; right: 0%; top: 90%; bottom: 0%; z-index: 1; width: 90%; height: 10%; padding: 0%; margin: 0%; font-size: 16px; border-style: none; border-width: 0; background-color: #6F6F8F; } #top { position: absolute; left: 0%; right: 10%; top: 0%; bottom: 90%; z-index: 1; width: 90%; height: 10%; padding: 0%; margin: 0%; font-size: 16px; border-style: none; border-width: 0; background-color: #6F6F8F; } #top-one { position: absolute; bottom: 50%; width: 100%; buggedheight: 100%; } #top-two { position: relative; margin-bottom: -9px; margin-left: 12px; margin-right: 12px; line-height: 18px; text-align: right; vertical-align: middle; } #bottom-one { position: absolute; bottom: 50%; width: 100%; buggedheight: 100%; } #bottom-two { position: relative; margin-bottom: -9px; margin-left: 12px; margin-right: 12px; line-height: 18px; text-align: left; vertical-align: middle; } #left-one { position: absolute; width: 100%; buggedheight: 100%; } #left-two { position: relative; margin-top: 12px; line-height: 18px; text-align: center; vertical-align: top; } #right-one { display: table; height: 100%; width: 100%; } #right-two { display: table-row; height: 100%; width: 100%; } #right-three { display: table-cell; width: 100%; vertical-align: bottom; _position: absolute; _top: 100%; } #right-four { text-align: center; margin-bottom: 2ex; _position: relative; _top: -100%; } #more-top { position: absolute; top: 0%; left: 90%; bottom: 90%; right: 0%; z-index: 3; width: 10%; height: 10%; padding: 0%; margin: 0%; border-style: none; border-width: 0; } #more-top-settings { text-align: center; } #more-right-settings { margin-right: 12px; margin-left: 12px; line-height: 18px; font-size: 10px; text-align: center; } #right-safari { _display: table; width: 100%; height: 100%; }
+ </style>
+ <style type="text/css">
+ </style>
+ </head>
+ <body>
+ <div id="top"> <div id="top-one">
+ <div id="top-two">ConTeXt PDF Helpers 0.10 </div>
+ </div>
+ </div>
+ <div id="bottom"> <div id="bottom-one">
+ <div id="bottom-two">wiki: http://contextgarden.net | mail: ntg-context@ntg.nl | website: http://www.pragma-ade.nl</div>
+ </div>
+ </div>
+ <div id="left"></div>
+ <div id="right"></div>
+ <div id="main">
+ <div id='main-settings'>
+ <h1>Command line options</h1>
+<table>
+ <tr><th style="width: 10em">flag</th><th style="width: 8em">value</th><th>description</th></tr>
+ <tr><th/><td/><td/></tr>
+ <tr><th>--info</th><td></td><td>show some info about the given file</td></tr>
+ <tr><th>--metadata</th><td></td><td>show metadata xml blob</td></tr>
+ <tr><th>--fonts</th><td></td><td>show used fonts (--detail)</td></tr>
+ </table>
+<br/>
+ </div>
+ </div>
+ </body>
+ </html>
diff --git a/Master/texmf-dist/doc/context/scripts/mkiv/mtx-pdf.man b/Master/texmf-dist/doc/context/scripts/mkiv/mtx-pdf.man
new file mode 100644
index 00000000000..7ac50df0d63
--- /dev/null
+++ b/Master/texmf-dist/doc/context/scripts/mkiv/mtx-pdf.man
@@ -0,0 +1,33 @@
+.TH "mtx-pdf" "1" "01-01-2013" "version 0.10" "ConTeXt PDF Helpers"
+.SH NAME
+.B mtx-pdf
+.SH SYNOPSIS
+.B mtxrun --script pdf [
+.I OPTIONS ...
+.B ] [
+.I FILENAMES
+.B ]
+.SH DESCRIPTION
+.B ConTeXt PDF Helpers
+.SH OPTIONS
+.TP
+.B --info
+show some info about the given file
+.TP
+.B --metadata
+show metadata xml blob
+.TP
+.B --fonts
+show used fonts (--detail)
+.SH AUTHOR
+More information about ConTeXt and the tools that come with it can be found at:
+
+
+.B "maillist:"
+ntg-context@ntg.nl / http://www.ntg.nl/mailman/listinfo/ntg-context
+
+.B "webpage:"
+http://www.pragma-ade.nl / http://tex.aanhet.net
+
+.B "wiki:"
+http://contextgarden.net
diff --git a/Master/texmf-dist/doc/context/scripts/mkiv/mtx-pdf.xml b/Master/texmf-dist/doc/context/scripts/mkiv/mtx-pdf.xml
new file mode 100644
index 00000000000..22e85988b52
--- /dev/null
+++ b/Master/texmf-dist/doc/context/scripts/mkiv/mtx-pdf.xml
@@ -0,0 +1,17 @@
+<?xml version="1.0"?>
+<application>
+ <metadata>
+ <entry name="name">mtx-pdf</entry>
+ <entry name="detail">ConTeXt PDF Helpers</entry>
+ <entry name="version">0.10</entry>
+ </metadata>
+ <flags>
+ <category name="basic">
+ <subcategory>
+ <flag name="info"><short>show some info about the given file</short></flag>
+ <flag name="metadata"><short>show metadata xml blob</short></flag>
+ <flag name="fonts"><short>show used fonts (<ref name="detail)"/></short></flag>
+ </subcategory>
+ </category>
+ </flags>
+</application>
diff --git a/Master/texmf-dist/doc/context/scripts/mkiv/mtx-profile.html b/Master/texmf-dist/doc/context/scripts/mkiv/mtx-profile.html
new file mode 100644
index 00000000000..843349e4d70
--- /dev/null
+++ b/Master/texmf-dist/doc/context/scripts/mkiv/mtx-profile.html
@@ -0,0 +1,43 @@
+<?xml version="1.0" encoding="UTF-8"?>
+
+<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Strict//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd">
+
+
+
+
+
+<html xmlns="http://www.w3.org/1999/xhtml" lang="en" xml:lang="en">
+ <head>
+ <title>ConTeXt MkIV LuaTeX Profiler 1.00</title>
+ <meta http-equiv="Content-Type" content="text/html; charset=UTF-8"/>
+ <style type="text/css">
+ body { color: #FFFFFF; background-color: #808080; font-family: optima, verdana, futura, "lucida sans", arial, geneva, helvetica, sans; font-size: 12px; line-height: 18px; } a:link, a:active, a:visited { color: #FFFFFF; } a.dir-view:link, a.dir-view:active, a.dir-view:visited { color: #FFFFFF; text-decoration: underline; } .valid { color: #00FF00; } .invalid { color: #FF0000; } button, .commonlink, .smallbutton { font-weight: bold; font-size: 12px; text-decoration: none; color: #000000; border-color: #7F7F7F; border-style: solid; border-width: .125ex; background-color: #FFFFFF; padding: .5ex; } .smallbutton { width: 1em; } a.commonlink:link, a.commonlink:active, a.commonlink:visited, a.smalllink:link, a.smalllink:active, a.smalllink:visited { font-weight: bold; font-size: 12px; text-decoration: none; color: #000000; } h1, .title { font-style: normal; font-weight: normal; font-size: 18px; line-height: 18px; margin-bottom: 20px; } h2, .subtitle { font-style: normal; font-weight: normal; font-size: 12px; margin-top: 18px; margin-bottom: 18px; } table { line-height: 18px; font-size: 12px; margin: 0; } th { font-weight: bold; text-align: left; padding-bottom: 6px; } .tc { font-weight: bold; text-align: left; } p, li { max-width: 60em; } .empty-line { margin-top: 4px; } .more-room { margin-right: 1.5em; } .much-more-room { margin-right: 3em; } #main { position: absolute; left: 10%; top: 10%; right: 10%; bottom: 10%; z-index: 2; width: 80%; height: 80%; padding: 0%; margin: 0%; overflow: auto; border-style: none; border-width: 0; background-color: #3F3F3F; } #main-settings { margin: 12px; x_max-width: 60em; line-height: 18px; font-size: 12px; } #left { position: absolute; top : 10%; left: 0%; bottom: 0%; right: 90%; z-index: 1; width: 10%; height: 90%; padding: 0%; margin: 0%; font-size: 16px; border-style: none; border-width: 0; background-color: #4F6F6F; } #right { position: absolute; top : 0%; left: 90%; bottom: 10%; right: 0%; z-index: 1; width: 10%; height: 90%; padding: 0%; margin: 0%; font-size: 16px; border-style: none; border-width: 0; background-color: #4F6F6F; _margin-left: -15px; } #bottom { position: absolute; left: 10%; right: 0%; top: 90%; bottom: 0%; z-index: 1; width: 90%; height: 10%; padding: 0%; margin: 0%; font-size: 16px; border-style: none; border-width: 0; background-color: #6F6F8F; } #top { position: absolute; left: 0%; right: 10%; top: 0%; bottom: 90%; z-index: 1; width: 90%; height: 10%; padding: 0%; margin: 0%; font-size: 16px; border-style: none; border-width: 0; background-color: #6F6F8F; } #top-one { position: absolute; bottom: 50%; width: 100%; buggedheight: 100%; } #top-two { position: relative; margin-bottom: -9px; margin-left: 12px; margin-right: 12px; line-height: 18px; text-align: right; vertical-align: middle; } #bottom-one { position: absolute; bottom: 50%; width: 100%; buggedheight: 100%; } #bottom-two { position: relative; margin-bottom: -9px; margin-left: 12px; margin-right: 12px; line-height: 18px; text-align: left; vertical-align: middle; } #left-one { position: absolute; width: 100%; buggedheight: 100%; } #left-two { position: relative; margin-top: 12px; line-height: 18px; text-align: center; vertical-align: top; } #right-one { display: table; height: 100%; width: 100%; } #right-two { display: table-row; height: 100%; width: 100%; } #right-three { display: table-cell; width: 100%; vertical-align: bottom; _position: absolute; _top: 100%; } #right-four { text-align: center; margin-bottom: 2ex; _position: relative; _top: -100%; } #more-top { position: absolute; top: 0%; left: 90%; bottom: 90%; right: 0%; z-index: 3; width: 10%; height: 10%; padding: 0%; margin: 0%; border-style: none; border-width: 0; } #more-top-settings { text-align: center; } #more-right-settings { margin-right: 12px; margin-left: 12px; line-height: 18px; font-size: 10px; text-align: center; } #right-safari { _display: table; width: 100%; height: 100%; }
+ </style>
+ <style type="text/css">
+ </style>
+ </head>
+ <body>
+ <div id="top"> <div id="top-one">
+ <div id="top-two">ConTeXt MkIV LuaTeX Profiler 1.00 </div>
+ </div>
+ </div>
+ <div id="bottom"> <div id="bottom-one">
+ <div id="bottom-two">wiki: http://contextgarden.net | mail: ntg-context@ntg.nl | website: http://www.pragma-ade.nl</div>
+ </div>
+ </div>
+ <div id="left"></div>
+ <div id="right"></div>
+ <div id="main">
+ <div id='main-settings'>
+ <h1>Command line options</h1>
+<table>
+ <tr><th style="width: 10em">flag</th><th style="width: 8em">value</th><th>description</th></tr>
+ <tr><th/><td/><td/></tr>
+ <tr><th>--analyze</th><td></td><td>analyze lua calls</td></tr>
+ <tr><th>--trace</th><td></td><td>analyze tex calls</td></tr>
+ </table>
+<br/>
+ </div>
+ </div>
+ </body>
+ </html>
diff --git a/Master/texmf-dist/doc/context/scripts/mkiv/mtx-profile.man b/Master/texmf-dist/doc/context/scripts/mkiv/mtx-profile.man
new file mode 100644
index 00000000000..5eba4861aa4
--- /dev/null
+++ b/Master/texmf-dist/doc/context/scripts/mkiv/mtx-profile.man
@@ -0,0 +1,30 @@
+.TH "mtx-profile" "1" "01-01-2013" "version 1.00" "ConTeXt MkIV LuaTeX Profiler"
+.SH NAME
+.B mtx-profile
+.SH SYNOPSIS
+.B mtxrun --script profile [
+.I OPTIONS ...
+.B ] [
+.I FILENAMES
+.B ]
+.SH DESCRIPTION
+.B ConTeXt MkIV LuaTeX Profiler
+.SH OPTIONS
+.TP
+.B --analyze
+analyze lua calls
+.TP
+.B --trace
+analyze tex calls
+.SH AUTHOR
+More information about ConTeXt and the tools that come with it can be found at:
+
+
+.B "maillist:"
+ntg-context@ntg.nl / http://www.ntg.nl/mailman/listinfo/ntg-context
+
+.B "webpage:"
+http://www.pragma-ade.nl / http://tex.aanhet.net
+
+.B "wiki:"
+http://contextgarden.net
diff --git a/Master/texmf-dist/doc/context/scripts/mkiv/mtx-profile.xml b/Master/texmf-dist/doc/context/scripts/mkiv/mtx-profile.xml
new file mode 100644
index 00000000000..48252c2c8d0
--- /dev/null
+++ b/Master/texmf-dist/doc/context/scripts/mkiv/mtx-profile.xml
@@ -0,0 +1,16 @@
+<?xml version="1.0"?>
+<application>
+ <metadata>
+ <entry name="name">mtx-profile</entry>
+ <entry name="detail">ConTeXt MkIV LuaTeX Profiler</entry>
+ <entry name="version">1.00</entry>
+ </metadata>
+ <flags>
+ <category name="basic">
+ <subcategory>
+ <flag name="analyze"><short>analyze lua calls</short></flag>
+ <flag name="trace"><short>analyze tex calls</short></flag>
+ </subcategory>
+ </category>
+ </flags>
+</application>
diff --git a/Master/texmf-dist/doc/context/scripts/mkiv/mtx-rsync.html b/Master/texmf-dist/doc/context/scripts/mkiv/mtx-rsync.html
new file mode 100644
index 00000000000..0f805ff8367
--- /dev/null
+++ b/Master/texmf-dist/doc/context/scripts/mkiv/mtx-rsync.html
@@ -0,0 +1,44 @@
+<?xml version="1.0" encoding="UTF-8"?>
+
+<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Strict//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd">
+
+
+
+
+
+<html xmlns="http://www.w3.org/1999/xhtml" lang="en" xml:lang="en">
+ <head>
+ <title>Rsync Helpers 0.10</title>
+ <meta http-equiv="Content-Type" content="text/html; charset=UTF-8"/>
+ <style type="text/css">
+ body { color: #FFFFFF; background-color: #808080; font-family: optima, verdana, futura, "lucida sans", arial, geneva, helvetica, sans; font-size: 12px; line-height: 18px; } a:link, a:active, a:visited { color: #FFFFFF; } a.dir-view:link, a.dir-view:active, a.dir-view:visited { color: #FFFFFF; text-decoration: underline; } .valid { color: #00FF00; } .invalid { color: #FF0000; } button, .commonlink, .smallbutton { font-weight: bold; font-size: 12px; text-decoration: none; color: #000000; border-color: #7F7F7F; border-style: solid; border-width: .125ex; background-color: #FFFFFF; padding: .5ex; } .smallbutton { width: 1em; } a.commonlink:link, a.commonlink:active, a.commonlink:visited, a.smalllink:link, a.smalllink:active, a.smalllink:visited { font-weight: bold; font-size: 12px; text-decoration: none; color: #000000; } h1, .title { font-style: normal; font-weight: normal; font-size: 18px; line-height: 18px; margin-bottom: 20px; } h2, .subtitle { font-style: normal; font-weight: normal; font-size: 12px; margin-top: 18px; margin-bottom: 18px; } table { line-height: 18px; font-size: 12px; margin: 0; } th { font-weight: bold; text-align: left; padding-bottom: 6px; } .tc { font-weight: bold; text-align: left; } p, li { max-width: 60em; } .empty-line { margin-top: 4px; } .more-room { margin-right: 1.5em; } .much-more-room { margin-right: 3em; } #main { position: absolute; left: 10%; top: 10%; right: 10%; bottom: 10%; z-index: 2; width: 80%; height: 80%; padding: 0%; margin: 0%; overflow: auto; border-style: none; border-width: 0; background-color: #3F3F3F; } #main-settings { margin: 12px; x_max-width: 60em; line-height: 18px; font-size: 12px; } #left { position: absolute; top : 10%; left: 0%; bottom: 0%; right: 90%; z-index: 1; width: 10%; height: 90%; padding: 0%; margin: 0%; font-size: 16px; border-style: none; border-width: 0; background-color: #4F6F6F; } #right { position: absolute; top : 0%; left: 90%; bottom: 10%; right: 0%; z-index: 1; width: 10%; height: 90%; padding: 0%; margin: 0%; font-size: 16px; border-style: none; border-width: 0; background-color: #4F6F6F; _margin-left: -15px; } #bottom { position: absolute; left: 10%; right: 0%; top: 90%; bottom: 0%; z-index: 1; width: 90%; height: 10%; padding: 0%; margin: 0%; font-size: 16px; border-style: none; border-width: 0; background-color: #6F6F8F; } #top { position: absolute; left: 0%; right: 10%; top: 0%; bottom: 90%; z-index: 1; width: 90%; height: 10%; padding: 0%; margin: 0%; font-size: 16px; border-style: none; border-width: 0; background-color: #6F6F8F; } #top-one { position: absolute; bottom: 50%; width: 100%; buggedheight: 100%; } #top-two { position: relative; margin-bottom: -9px; margin-left: 12px; margin-right: 12px; line-height: 18px; text-align: right; vertical-align: middle; } #bottom-one { position: absolute; bottom: 50%; width: 100%; buggedheight: 100%; } #bottom-two { position: relative; margin-bottom: -9px; margin-left: 12px; margin-right: 12px; line-height: 18px; text-align: left; vertical-align: middle; } #left-one { position: absolute; width: 100%; buggedheight: 100%; } #left-two { position: relative; margin-top: 12px; line-height: 18px; text-align: center; vertical-align: top; } #right-one { display: table; height: 100%; width: 100%; } #right-two { display: table-row; height: 100%; width: 100%; } #right-three { display: table-cell; width: 100%; vertical-align: bottom; _position: absolute; _top: 100%; } #right-four { text-align: center; margin-bottom: 2ex; _position: relative; _top: -100%; } #more-top { position: absolute; top: 0%; left: 90%; bottom: 90%; right: 0%; z-index: 3; width: 10%; height: 10%; padding: 0%; margin: 0%; border-style: none; border-width: 0; } #more-top-settings { text-align: center; } #more-right-settings { margin-right: 12px; margin-left: 12px; line-height: 18px; font-size: 10px; text-align: center; } #right-safari { _display: table; width: 100%; height: 100%; }
+ </style>
+ <style type="text/css">
+ </style>
+ </head>
+ <body>
+ <div id="top"> <div id="top-one">
+ <div id="top-two">Rsync Helpers 0.10 </div>
+ </div>
+ </div>
+ <div id="bottom"> <div id="bottom-one">
+ <div id="bottom-two">wiki: http://contextgarden.net | mail: ntg-context@ntg.nl | website: http://www.pragma-ade.nl</div>
+ </div>
+ </div>
+ <div id="left"></div>
+ <div id="right"></div>
+ <div id="main">
+ <div id='main-settings'>
+ <h1>Command line options</h1>
+<table>
+ <tr><th style="width: 10em">flag</th><th style="width: 8em">value</th><th>description</th></tr>
+ <tr><th/><td/><td/></tr>
+ <tr><th>--job</th><td></td><td>use given file as specification</td></tr>
+ <tr><th>--dryrun</th><td></td><td>show what would happen</td></tr>
+ <tr><th>--force</th><td></td><td>force run</td></tr>
+ </table>
+<br/>
+ </div>
+ </div>
+ </body>
+ </html>
diff --git a/Master/texmf-dist/doc/context/scripts/mkiv/mtx-rsync.man b/Master/texmf-dist/doc/context/scripts/mkiv/mtx-rsync.man
new file mode 100644
index 00000000000..5f3bc1fea00
--- /dev/null
+++ b/Master/texmf-dist/doc/context/scripts/mkiv/mtx-rsync.man
@@ -0,0 +1,33 @@
+.TH "mtx-rsync" "1" "01-01-2013" "version 0.10" "Rsync Helpers"
+.SH NAME
+.B mtx-rsync
+.SH SYNOPSIS
+.B mtxrun --script rsync [
+.I OPTIONS ...
+.B ] [
+.I FILENAMES
+.B ]
+.SH DESCRIPTION
+.B Rsync Helpers
+.SH OPTIONS
+.TP
+.B --job
+use given file as specification
+.TP
+.B --dryrun
+show what would happen
+.TP
+.B --force
+force run
+.SH AUTHOR
+More information about ConTeXt and the tools that come with it can be found at:
+
+
+.B "maillist:"
+ntg-context@ntg.nl / http://www.ntg.nl/mailman/listinfo/ntg-context
+
+.B "webpage:"
+http://www.pragma-ade.nl / http://tex.aanhet.net
+
+.B "wiki:"
+http://contextgarden.net
diff --git a/Master/texmf-dist/doc/context/scripts/mkiv/mtx-rsync.xml b/Master/texmf-dist/doc/context/scripts/mkiv/mtx-rsync.xml
new file mode 100644
index 00000000000..28352cc368e
--- /dev/null
+++ b/Master/texmf-dist/doc/context/scripts/mkiv/mtx-rsync.xml
@@ -0,0 +1,17 @@
+<?xml version="1.0"?>
+<application>
+ <metadata>
+ <entry name="name">mtx-rsync</entry>
+ <entry name="detail">Rsync Helpers</entry>
+ <entry name="version">0.10</entry>
+ </metadata>
+ <flags>
+ <category name="basic">
+ <subcategory>
+ <flag name="job"><short>use given file as specification</short></flag>
+ <flag name="dryrun"><short>show what would happen</short></flag>
+ <flag name="force"><short>force run</short></flag>
+ </subcategory>
+ </category>
+ </flags>
+</application>
diff --git a/Master/texmf-dist/doc/context/scripts/mkiv/mtx-scite.html b/Master/texmf-dist/doc/context/scripts/mkiv/mtx-scite.html
new file mode 100644
index 00000000000..041fbaa5197
--- /dev/null
+++ b/Master/texmf-dist/doc/context/scripts/mkiv/mtx-scite.html
@@ -0,0 +1,42 @@
+<?xml version="1.0" encoding="UTF-8"?>
+
+<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Strict//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd">
+
+
+
+
+
+<html xmlns="http://www.w3.org/1999/xhtml" lang="en" xml:lang="en">
+ <head>
+ <title>Scite Helper Script 1.00</title>
+ <meta http-equiv="Content-Type" content="text/html; charset=UTF-8"/>
+ <style type="text/css">
+ body { color: #FFFFFF; background-color: #808080; font-family: optima, verdana, futura, "lucida sans", arial, geneva, helvetica, sans; font-size: 12px; line-height: 18px; } a:link, a:active, a:visited { color: #FFFFFF; } a.dir-view:link, a.dir-view:active, a.dir-view:visited { color: #FFFFFF; text-decoration: underline; } .valid { color: #00FF00; } .invalid { color: #FF0000; } button, .commonlink, .smallbutton { font-weight: bold; font-size: 12px; text-decoration: none; color: #000000; border-color: #7F7F7F; border-style: solid; border-width: .125ex; background-color: #FFFFFF; padding: .5ex; } .smallbutton { width: 1em; } a.commonlink:link, a.commonlink:active, a.commonlink:visited, a.smalllink:link, a.smalllink:active, a.smalllink:visited { font-weight: bold; font-size: 12px; text-decoration: none; color: #000000; } h1, .title { font-style: normal; font-weight: normal; font-size: 18px; line-height: 18px; margin-bottom: 20px; } h2, .subtitle { font-style: normal; font-weight: normal; font-size: 12px; margin-top: 18px; margin-bottom: 18px; } table { line-height: 18px; font-size: 12px; margin: 0; } th { font-weight: bold; text-align: left; padding-bottom: 6px; } .tc { font-weight: bold; text-align: left; } p, li { max-width: 60em; } .empty-line { margin-top: 4px; } .more-room { margin-right: 1.5em; } .much-more-room { margin-right: 3em; } #main { position: absolute; left: 10%; top: 10%; right: 10%; bottom: 10%; z-index: 2; width: 80%; height: 80%; padding: 0%; margin: 0%; overflow: auto; border-style: none; border-width: 0; background-color: #3F3F3F; } #main-settings { margin: 12px; x_max-width: 60em; line-height: 18px; font-size: 12px; } #left { position: absolute; top : 10%; left: 0%; bottom: 0%; right: 90%; z-index: 1; width: 10%; height: 90%; padding: 0%; margin: 0%; font-size: 16px; border-style: none; border-width: 0; background-color: #4F6F6F; } #right { position: absolute; top : 0%; left: 90%; bottom: 10%; right: 0%; z-index: 1; width: 10%; height: 90%; padding: 0%; margin: 0%; font-size: 16px; border-style: none; border-width: 0; background-color: #4F6F6F; _margin-left: -15px; } #bottom { position: absolute; left: 10%; right: 0%; top: 90%; bottom: 0%; z-index: 1; width: 90%; height: 10%; padding: 0%; margin: 0%; font-size: 16px; border-style: none; border-width: 0; background-color: #6F6F8F; } #top { position: absolute; left: 0%; right: 10%; top: 0%; bottom: 90%; z-index: 1; width: 90%; height: 10%; padding: 0%; margin: 0%; font-size: 16px; border-style: none; border-width: 0; background-color: #6F6F8F; } #top-one { position: absolute; bottom: 50%; width: 100%; buggedheight: 100%; } #top-two { position: relative; margin-bottom: -9px; margin-left: 12px; margin-right: 12px; line-height: 18px; text-align: right; vertical-align: middle; } #bottom-one { position: absolute; bottom: 50%; width: 100%; buggedheight: 100%; } #bottom-two { position: relative; margin-bottom: -9px; margin-left: 12px; margin-right: 12px; line-height: 18px; text-align: left; vertical-align: middle; } #left-one { position: absolute; width: 100%; buggedheight: 100%; } #left-two { position: relative; margin-top: 12px; line-height: 18px; text-align: center; vertical-align: top; } #right-one { display: table; height: 100%; width: 100%; } #right-two { display: table-row; height: 100%; width: 100%; } #right-three { display: table-cell; width: 100%; vertical-align: bottom; _position: absolute; _top: 100%; } #right-four { text-align: center; margin-bottom: 2ex; _position: relative; _top: -100%; } #more-top { position: absolute; top: 0%; left: 90%; bottom: 90%; right: 0%; z-index: 3; width: 10%; height: 10%; padding: 0%; margin: 0%; border-style: none; border-width: 0; } #more-top-settings { text-align: center; } #more-right-settings { margin-right: 12px; margin-left: 12px; line-height: 18px; font-size: 10px; text-align: center; } #right-safari { _display: table; width: 100%; height: 100%; }
+ </style>
+ <style type="text/css">
+ </style>
+ </head>
+ <body>
+ <div id="top"> <div id="top-one">
+ <div id="top-two">Scite Helper Script 1.00 </div>
+ </div>
+ </div>
+ <div id="bottom"> <div id="bottom-one">
+ <div id="bottom-two">wiki: http://contextgarden.net | mail: ntg-context@ntg.nl | website: http://www.pragma-ade.nl</div>
+ </div>
+ </div>
+ <div id="left"></div>
+ <div id="right"></div>
+ <div id="main">
+ <div id='main-settings'>
+ <h1>Command line options</h1>
+<table>
+ <tr><th style="width: 10em">flag</th><th style="width: 8em">value</th><th>description</th></tr>
+ <tr><th/><td/><td/></tr>
+ <tr><th>--words</th><td></td><td>convert spell-*.txt into spell-*.lua</td></tr>
+ </table>
+<br/>
+ </div>
+ </div>
+ </body>
+ </html>
diff --git a/Master/texmf-dist/doc/context/scripts/mkiv/mtx-scite.man b/Master/texmf-dist/doc/context/scripts/mkiv/mtx-scite.man
new file mode 100644
index 00000000000..118179e9184
--- /dev/null
+++ b/Master/texmf-dist/doc/context/scripts/mkiv/mtx-scite.man
@@ -0,0 +1,27 @@
+.TH "mtx-scite" "1" "01-01-2013" "version 1.00" "Scite Helper Script"
+.SH NAME
+.B mtx-scite
+.SH SYNOPSIS
+.B mtxrun --script scite [
+.I OPTIONS ...
+.B ] [
+.I FILENAMES
+.B ]
+.SH DESCRIPTION
+.B Scite Helper Script
+.SH OPTIONS
+.TP
+.B --words
+convert spell-*.txt into spell-*.lua
+.SH AUTHOR
+More information about ConTeXt and the tools that come with it can be found at:
+
+
+.B "maillist:"
+ntg-context@ntg.nl / http://www.ntg.nl/mailman/listinfo/ntg-context
+
+.B "webpage:"
+http://www.pragma-ade.nl / http://tex.aanhet.net
+
+.B "wiki:"
+http://contextgarden.net
diff --git a/Master/texmf-dist/doc/context/scripts/mkiv/mtx-scite.xml b/Master/texmf-dist/doc/context/scripts/mkiv/mtx-scite.xml
new file mode 100644
index 00000000000..87fe506dc8f
--- /dev/null
+++ b/Master/texmf-dist/doc/context/scripts/mkiv/mtx-scite.xml
@@ -0,0 +1,15 @@
+<?xml version="1.0"?>
+<application>
+ <metadata>
+ <entry name="name">mtx-scite</entry>
+ <entry name="detail">Scite Helper Script</entry>
+ <entry name="version">1.00</entry>
+ </metadata>
+ <flags>
+ <category name="basic">
+ <subcategory>
+ <flag name="words"><short>convert spell-*.txt into spell-*.lua</short></flag>
+ </subcategory>
+ </category>
+ </flags>
+</application>
diff --git a/Master/texmf-dist/doc/context/scripts/mkiv/mtx-server.html b/Master/texmf-dist/doc/context/scripts/mkiv/mtx-server.html
new file mode 100644
index 00000000000..74d1d40f14a
--- /dev/null
+++ b/Master/texmf-dist/doc/context/scripts/mkiv/mtx-server.html
@@ -0,0 +1,47 @@
+<?xml version="1.0" encoding="UTF-8"?>
+
+<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Strict//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd">
+
+
+
+
+
+<html xmlns="http://www.w3.org/1999/xhtml" lang="en" xml:lang="en">
+ <head>
+ <title>Simple Webserver For Helpers 0.10</title>
+ <meta http-equiv="Content-Type" content="text/html; charset=UTF-8"/>
+ <style type="text/css">
+ body { color: #FFFFFF; background-color: #808080; font-family: optima, verdana, futura, "lucida sans", arial, geneva, helvetica, sans; font-size: 12px; line-height: 18px; } a:link, a:active, a:visited { color: #FFFFFF; } a.dir-view:link, a.dir-view:active, a.dir-view:visited { color: #FFFFFF; text-decoration: underline; } .valid { color: #00FF00; } .invalid { color: #FF0000; } button, .commonlink, .smallbutton { font-weight: bold; font-size: 12px; text-decoration: none; color: #000000; border-color: #7F7F7F; border-style: solid; border-width: .125ex; background-color: #FFFFFF; padding: .5ex; } .smallbutton { width: 1em; } a.commonlink:link, a.commonlink:active, a.commonlink:visited, a.smalllink:link, a.smalllink:active, a.smalllink:visited { font-weight: bold; font-size: 12px; text-decoration: none; color: #000000; } h1, .title { font-style: normal; font-weight: normal; font-size: 18px; line-height: 18px; margin-bottom: 20px; } h2, .subtitle { font-style: normal; font-weight: normal; font-size: 12px; margin-top: 18px; margin-bottom: 18px; } table { line-height: 18px; font-size: 12px; margin: 0; } th { font-weight: bold; text-align: left; padding-bottom: 6px; } .tc { font-weight: bold; text-align: left; } p, li { max-width: 60em; } .empty-line { margin-top: 4px; } .more-room { margin-right: 1.5em; } .much-more-room { margin-right: 3em; } #main { position: absolute; left: 10%; top: 10%; right: 10%; bottom: 10%; z-index: 2; width: 80%; height: 80%; padding: 0%; margin: 0%; overflow: auto; border-style: none; border-width: 0; background-color: #3F3F3F; } #main-settings { margin: 12px; x_max-width: 60em; line-height: 18px; font-size: 12px; } #left { position: absolute; top : 10%; left: 0%; bottom: 0%; right: 90%; z-index: 1; width: 10%; height: 90%; padding: 0%; margin: 0%; font-size: 16px; border-style: none; border-width: 0; background-color: #4F6F6F; } #right { position: absolute; top : 0%; left: 90%; bottom: 10%; right: 0%; z-index: 1; width: 10%; height: 90%; padding: 0%; margin: 0%; font-size: 16px; border-style: none; border-width: 0; background-color: #4F6F6F; _margin-left: -15px; } #bottom { position: absolute; left: 10%; right: 0%; top: 90%; bottom: 0%; z-index: 1; width: 90%; height: 10%; padding: 0%; margin: 0%; font-size: 16px; border-style: none; border-width: 0; background-color: #6F6F8F; } #top { position: absolute; left: 0%; right: 10%; top: 0%; bottom: 90%; z-index: 1; width: 90%; height: 10%; padding: 0%; margin: 0%; font-size: 16px; border-style: none; border-width: 0; background-color: #6F6F8F; } #top-one { position: absolute; bottom: 50%; width: 100%; buggedheight: 100%; } #top-two { position: relative; margin-bottom: -9px; margin-left: 12px; margin-right: 12px; line-height: 18px; text-align: right; vertical-align: middle; } #bottom-one { position: absolute; bottom: 50%; width: 100%; buggedheight: 100%; } #bottom-two { position: relative; margin-bottom: -9px; margin-left: 12px; margin-right: 12px; line-height: 18px; text-align: left; vertical-align: middle; } #left-one { position: absolute; width: 100%; buggedheight: 100%; } #left-two { position: relative; margin-top: 12px; line-height: 18px; text-align: center; vertical-align: top; } #right-one { display: table; height: 100%; width: 100%; } #right-two { display: table-row; height: 100%; width: 100%; } #right-three { display: table-cell; width: 100%; vertical-align: bottom; _position: absolute; _top: 100%; } #right-four { text-align: center; margin-bottom: 2ex; _position: relative; _top: -100%; } #more-top { position: absolute; top: 0%; left: 90%; bottom: 90%; right: 0%; z-index: 3; width: 10%; height: 10%; padding: 0%; margin: 0%; border-style: none; border-width: 0; } #more-top-settings { text-align: center; } #more-right-settings { margin-right: 12px; margin-left: 12px; line-height: 18px; font-size: 10px; text-align: center; } #right-safari { _display: table; width: 100%; height: 100%; }
+ </style>
+ <style type="text/css">
+ </style>
+ </head>
+ <body>
+ <div id="top"> <div id="top-one">
+ <div id="top-two">Simple Webserver For Helpers 0.10 </div>
+ </div>
+ </div>
+ <div id="bottom"> <div id="bottom-one">
+ <div id="bottom-two">wiki: http://contextgarden.net | mail: ntg-context@ntg.nl | website: http://www.pragma-ade.nl</div>
+ </div>
+ </div>
+ <div id="left"></div>
+ <div id="right"></div>
+ <div id="main">
+ <div id='main-settings'>
+ <h1>Command line options</h1>
+<table>
+ <tr><th style="width: 10em">flag</th><th style="width: 8em">value</th><th>description</th></tr>
+ <tr><th/><td/><td/></tr>
+ <tr><th>--start</th><td></td><td>start server</td></tr>
+ <tr><th>--port</th><td></td><td>port to listen to</td></tr>
+ <tr><th>--root</th><td></td><td>server root</td></tr>
+ <tr><th>--scripts</th><td></td><td>scripts sub path</td></tr>
+ <tr><th>--index</th><td></td><td>index file</td></tr>
+ <tr><th>--auto</th><td></td><td>start on own path</td></tr>
+ </table>
+<br/>
+ </div>
+ </div>
+ </body>
+ </html>
diff --git a/Master/texmf-dist/doc/context/scripts/mkiv/mtx-server.man b/Master/texmf-dist/doc/context/scripts/mkiv/mtx-server.man
new file mode 100644
index 00000000000..18373ad4b1c
--- /dev/null
+++ b/Master/texmf-dist/doc/context/scripts/mkiv/mtx-server.man
@@ -0,0 +1,42 @@
+.TH "mtx-server" "1" "01-01-2013" "version 0.10" "Simple Webserver For Helpers"
+.SH NAME
+.B mtx-server
+.SH SYNOPSIS
+.B mtxrun --script server [
+.I OPTIONS ...
+.B ] [
+.I FILENAMES
+.B ]
+.SH DESCRIPTION
+.B Simple Webserver For Helpers
+.SH OPTIONS
+.TP
+.B --start
+start server
+.TP
+.B --port
+port to listen to
+.TP
+.B --root
+server root
+.TP
+.B --scripts
+scripts sub path
+.TP
+.B --index
+index file
+.TP
+.B --auto
+start on own path
+.SH AUTHOR
+More information about ConTeXt and the tools that come with it can be found at:
+
+
+.B "maillist:"
+ntg-context@ntg.nl / http://www.ntg.nl/mailman/listinfo/ntg-context
+
+.B "webpage:"
+http://www.pragma-ade.nl / http://tex.aanhet.net
+
+.B "wiki:"
+http://contextgarden.net
diff --git a/Master/texmf-dist/doc/context/scripts/mkiv/mtx-server.xml b/Master/texmf-dist/doc/context/scripts/mkiv/mtx-server.xml
new file mode 100644
index 00000000000..d7f3988baee
--- /dev/null
+++ b/Master/texmf-dist/doc/context/scripts/mkiv/mtx-server.xml
@@ -0,0 +1,20 @@
+<?xml version="1.0"?>
+<application>
+ <metadata>
+ <entry name="name">mtx-server</entry>
+ <entry name="detail">Simple Webserver For Helpers</entry>
+ <entry name="version">0.10</entry>
+ </metadata>
+ <flags>
+ <category name="basic">
+ <subcategory>
+ <flag name="start"><short>start server</short></flag>
+ <flag name="port"><short>port to listen to</short></flag>
+ <flag name="root"><short>server root</short></flag>
+ <flag name="scripts"><short>scripts sub path</short></flag>
+ <flag name="index"><short>index file</short></flag>
+ <flag name="auto"><short>start on own path</short></flag>
+ </subcategory>
+ </category>
+ </flags>
+</application>
diff --git a/Master/texmf-dist/doc/context/scripts/mkiv/mtx-texworks.html b/Master/texmf-dist/doc/context/scripts/mkiv/mtx-texworks.html
new file mode 100644
index 00000000000..996fe90eba1
--- /dev/null
+++ b/Master/texmf-dist/doc/context/scripts/mkiv/mtx-texworks.html
@@ -0,0 +1,43 @@
+<?xml version="1.0" encoding="UTF-8"?>
+
+<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Strict//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd">
+
+
+
+
+
+<html xmlns="http://www.w3.org/1999/xhtml" lang="en" xml:lang="en">
+ <head>
+ <title>TeXworks Startup Script 1.00</title>
+ <meta http-equiv="Content-Type" content="text/html; charset=UTF-8"/>
+ <style type="text/css">
+ body { color: #FFFFFF; background-color: #808080; font-family: optima, verdana, futura, "lucida sans", arial, geneva, helvetica, sans; font-size: 12px; line-height: 18px; } a:link, a:active, a:visited { color: #FFFFFF; } a.dir-view:link, a.dir-view:active, a.dir-view:visited { color: #FFFFFF; text-decoration: underline; } .valid { color: #00FF00; } .invalid { color: #FF0000; } button, .commonlink, .smallbutton { font-weight: bold; font-size: 12px; text-decoration: none; color: #000000; border-color: #7F7F7F; border-style: solid; border-width: .125ex; background-color: #FFFFFF; padding: .5ex; } .smallbutton { width: 1em; } a.commonlink:link, a.commonlink:active, a.commonlink:visited, a.smalllink:link, a.smalllink:active, a.smalllink:visited { font-weight: bold; font-size: 12px; text-decoration: none; color: #000000; } h1, .title { font-style: normal; font-weight: normal; font-size: 18px; line-height: 18px; margin-bottom: 20px; } h2, .subtitle { font-style: normal; font-weight: normal; font-size: 12px; margin-top: 18px; margin-bottom: 18px; } table { line-height: 18px; font-size: 12px; margin: 0; } th { font-weight: bold; text-align: left; padding-bottom: 6px; } .tc { font-weight: bold; text-align: left; } p, li { max-width: 60em; } .empty-line { margin-top: 4px; } .more-room { margin-right: 1.5em; } .much-more-room { margin-right: 3em; } #main { position: absolute; left: 10%; top: 10%; right: 10%; bottom: 10%; z-index: 2; width: 80%; height: 80%; padding: 0%; margin: 0%; overflow: auto; border-style: none; border-width: 0; background-color: #3F3F3F; } #main-settings { margin: 12px; x_max-width: 60em; line-height: 18px; font-size: 12px; } #left { position: absolute; top : 10%; left: 0%; bottom: 0%; right: 90%; z-index: 1; width: 10%; height: 90%; padding: 0%; margin: 0%; font-size: 16px; border-style: none; border-width: 0; background-color: #4F6F6F; } #right { position: absolute; top : 0%; left: 90%; bottom: 10%; right: 0%; z-index: 1; width: 10%; height: 90%; padding: 0%; margin: 0%; font-size: 16px; border-style: none; border-width: 0; background-color: #4F6F6F; _margin-left: -15px; } #bottom { position: absolute; left: 10%; right: 0%; top: 90%; bottom: 0%; z-index: 1; width: 90%; height: 10%; padding: 0%; margin: 0%; font-size: 16px; border-style: none; border-width: 0; background-color: #6F6F8F; } #top { position: absolute; left: 0%; right: 10%; top: 0%; bottom: 90%; z-index: 1; width: 90%; height: 10%; padding: 0%; margin: 0%; font-size: 16px; border-style: none; border-width: 0; background-color: #6F6F8F; } #top-one { position: absolute; bottom: 50%; width: 100%; buggedheight: 100%; } #top-two { position: relative; margin-bottom: -9px; margin-left: 12px; margin-right: 12px; line-height: 18px; text-align: right; vertical-align: middle; } #bottom-one { position: absolute; bottom: 50%; width: 100%; buggedheight: 100%; } #bottom-two { position: relative; margin-bottom: -9px; margin-left: 12px; margin-right: 12px; line-height: 18px; text-align: left; vertical-align: middle; } #left-one { position: absolute; width: 100%; buggedheight: 100%; } #left-two { position: relative; margin-top: 12px; line-height: 18px; text-align: center; vertical-align: top; } #right-one { display: table; height: 100%; width: 100%; } #right-two { display: table-row; height: 100%; width: 100%; } #right-three { display: table-cell; width: 100%; vertical-align: bottom; _position: absolute; _top: 100%; } #right-four { text-align: center; margin-bottom: 2ex; _position: relative; _top: -100%; } #more-top { position: absolute; top: 0%; left: 90%; bottom: 90%; right: 0%; z-index: 3; width: 10%; height: 10%; padding: 0%; margin: 0%; border-style: none; border-width: 0; } #more-top-settings { text-align: center; } #more-right-settings { margin-right: 12px; margin-left: 12px; line-height: 18px; font-size: 10px; text-align: center; } #right-safari { _display: table; width: 100%; height: 100%; }
+ </style>
+ <style type="text/css">
+ </style>
+ </head>
+ <body>
+ <div id="top"> <div id="top-one">
+ <div id="top-two">TeXworks Startup Script 1.00 </div>
+ </div>
+ </div>
+ <div id="bottom"> <div id="bottom-one">
+ <div id="bottom-two">wiki: http://contextgarden.net | mail: ntg-context@ntg.nl | website: http://www.pragma-ade.nl</div>
+ </div>
+ </div>
+ <div id="left"></div>
+ <div id="right"></div>
+ <div id="main">
+ <div id='main-settings'>
+ <h1>Command line options</h1>
+<table>
+ <tr><th style="width: 10em">flag</th><th style="width: 8em">value</th><th>description</th></tr>
+ <tr><th/><td/><td/></tr>
+ <tr><th>--start</th><td></td><td>[--verbose] start texworks</td></tr>
+ <tr><th>--test</th><td></td><td>report what will happen</td></tr>
+ </table>
+<br/>
+ </div>
+ </div>
+ </body>
+ </html>
diff --git a/Master/texmf-dist/doc/context/scripts/mkiv/mtx-texworks.man b/Master/texmf-dist/doc/context/scripts/mkiv/mtx-texworks.man
new file mode 100644
index 00000000000..df2d4b84367
--- /dev/null
+++ b/Master/texmf-dist/doc/context/scripts/mkiv/mtx-texworks.man
@@ -0,0 +1,30 @@
+.TH "mtx-texworks" "1" "01-01-2013" "version 1.00" "TeXworks Startup Script"
+.SH NAME
+.B mtx-texworks
+.SH SYNOPSIS
+.B mtxrun --script texworks [
+.I OPTIONS ...
+.B ] [
+.I FILENAMES
+.B ]
+.SH DESCRIPTION
+.B TeXworks Startup Script
+.SH OPTIONS
+.TP
+.B --start
+[--verbose] start texworks
+.TP
+.B --test
+report what will happen
+.SH AUTHOR
+More information about ConTeXt and the tools that come with it can be found at:
+
+
+.B "maillist:"
+ntg-context@ntg.nl / http://www.ntg.nl/mailman/listinfo/ntg-context
+
+.B "webpage:"
+http://www.pragma-ade.nl / http://tex.aanhet.net
+
+.B "wiki:"
+http://contextgarden.net
diff --git a/Master/texmf-dist/doc/context/scripts/mkiv/mtx-texworks.xml b/Master/texmf-dist/doc/context/scripts/mkiv/mtx-texworks.xml
new file mode 100644
index 00000000000..0dcb6b9dc22
--- /dev/null
+++ b/Master/texmf-dist/doc/context/scripts/mkiv/mtx-texworks.xml
@@ -0,0 +1,16 @@
+<?xml version="1.0"?>
+<application>
+ <metadata>
+ <entry name="name">mtx-texworks</entry>
+ <entry name="detail">TeXworks Startup Script</entry>
+ <entry name="version">1.00</entry>
+ </metadata>
+ <flags>
+ <category name="basic">
+ <subcategory>
+ <flag name="start"><short>[<ref name="verbose]"/> start texworks</short></flag>
+ <flag name="test"><short>report what will happen</short></flag>
+ </subcategory>
+ </category>
+ </flags>
+</application>
diff --git a/Master/texmf-dist/doc/context/scripts/mkiv/mtx-timing.html b/Master/texmf-dist/doc/context/scripts/mkiv/mtx-timing.html
new file mode 100644
index 00000000000..ebad34be5fe
--- /dev/null
+++ b/Master/texmf-dist/doc/context/scripts/mkiv/mtx-timing.html
@@ -0,0 +1,44 @@
+<?xml version="1.0" encoding="UTF-8"?>
+
+<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Strict//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd">
+
+
+
+
+
+<html xmlns="http://www.w3.org/1999/xhtml" lang="en" xml:lang="en">
+ <head>
+ <title>ConTeXt Timing Tools 0.10</title>
+ <meta http-equiv="Content-Type" content="text/html; charset=UTF-8"/>
+ <style type="text/css">
+ body { color: #FFFFFF; background-color: #808080; font-family: optima, verdana, futura, "lucida sans", arial, geneva, helvetica, sans; font-size: 12px; line-height: 18px; } a:link, a:active, a:visited { color: #FFFFFF; } a.dir-view:link, a.dir-view:active, a.dir-view:visited { color: #FFFFFF; text-decoration: underline; } .valid { color: #00FF00; } .invalid { color: #FF0000; } button, .commonlink, .smallbutton { font-weight: bold; font-size: 12px; text-decoration: none; color: #000000; border-color: #7F7F7F; border-style: solid; border-width: .125ex; background-color: #FFFFFF; padding: .5ex; } .smallbutton { width: 1em; } a.commonlink:link, a.commonlink:active, a.commonlink:visited, a.smalllink:link, a.smalllink:active, a.smalllink:visited { font-weight: bold; font-size: 12px; text-decoration: none; color: #000000; } h1, .title { font-style: normal; font-weight: normal; font-size: 18px; line-height: 18px; margin-bottom: 20px; } h2, .subtitle { font-style: normal; font-weight: normal; font-size: 12px; margin-top: 18px; margin-bottom: 18px; } table { line-height: 18px; font-size: 12px; margin: 0; } th { font-weight: bold; text-align: left; padding-bottom: 6px; } .tc { font-weight: bold; text-align: left; } p, li { max-width: 60em; } .empty-line { margin-top: 4px; } .more-room { margin-right: 1.5em; } .much-more-room { margin-right: 3em; } #main { position: absolute; left: 10%; top: 10%; right: 10%; bottom: 10%; z-index: 2; width: 80%; height: 80%; padding: 0%; margin: 0%; overflow: auto; border-style: none; border-width: 0; background-color: #3F3F3F; } #main-settings { margin: 12px; x_max-width: 60em; line-height: 18px; font-size: 12px; } #left { position: absolute; top : 10%; left: 0%; bottom: 0%; right: 90%; z-index: 1; width: 10%; height: 90%; padding: 0%; margin: 0%; font-size: 16px; border-style: none; border-width: 0; background-color: #4F6F6F; } #right { position: absolute; top : 0%; left: 90%; bottom: 10%; right: 0%; z-index: 1; width: 10%; height: 90%; padding: 0%; margin: 0%; font-size: 16px; border-style: none; border-width: 0; background-color: #4F6F6F; _margin-left: -15px; } #bottom { position: absolute; left: 10%; right: 0%; top: 90%; bottom: 0%; z-index: 1; width: 90%; height: 10%; padding: 0%; margin: 0%; font-size: 16px; border-style: none; border-width: 0; background-color: #6F6F8F; } #top { position: absolute; left: 0%; right: 10%; top: 0%; bottom: 90%; z-index: 1; width: 90%; height: 10%; padding: 0%; margin: 0%; font-size: 16px; border-style: none; border-width: 0; background-color: #6F6F8F; } #top-one { position: absolute; bottom: 50%; width: 100%; buggedheight: 100%; } #top-two { position: relative; margin-bottom: -9px; margin-left: 12px; margin-right: 12px; line-height: 18px; text-align: right; vertical-align: middle; } #bottom-one { position: absolute; bottom: 50%; width: 100%; buggedheight: 100%; } #bottom-two { position: relative; margin-bottom: -9px; margin-left: 12px; margin-right: 12px; line-height: 18px; text-align: left; vertical-align: middle; } #left-one { position: absolute; width: 100%; buggedheight: 100%; } #left-two { position: relative; margin-top: 12px; line-height: 18px; text-align: center; vertical-align: top; } #right-one { display: table; height: 100%; width: 100%; } #right-two { display: table-row; height: 100%; width: 100%; } #right-three { display: table-cell; width: 100%; vertical-align: bottom; _position: absolute; _top: 100%; } #right-four { text-align: center; margin-bottom: 2ex; _position: relative; _top: -100%; } #more-top { position: absolute; top: 0%; left: 90%; bottom: 90%; right: 0%; z-index: 3; width: 10%; height: 10%; padding: 0%; margin: 0%; border-style: none; border-width: 0; } #more-top-settings { text-align: center; } #more-right-settings { margin-right: 12px; margin-left: 12px; line-height: 18px; font-size: 10px; text-align: center; } #right-safari { _display: table; width: 100%; height: 100%; }
+ </style>
+ <style type="text/css">
+ </style>
+ </head>
+ <body>
+ <div id="top"> <div id="top-one">
+ <div id="top-two">ConTeXt Timing Tools 0.10 </div>
+ </div>
+ </div>
+ <div id="bottom"> <div id="bottom-one">
+ <div id="bottom-two">wiki: http://contextgarden.net | mail: ntg-context@ntg.nl | website: http://www.pragma-ade.nl</div>
+ </div>
+ </div>
+ <div id="left"></div>
+ <div id="right"></div>
+ <div id="main">
+ <div id='main-settings'>
+ <h1>Command line options</h1>
+<table>
+ <tr><th style="width: 10em">flag</th><th style="width: 8em">value</th><th>description</th></tr>
+ <tr><th/><td/><td/></tr>
+ <tr><th>--xhtml</th><td></td><td>make xhtml file</td></tr>
+ <tr><th>--launch</th><td></td><td>launch after conversion</td></tr>
+ <tr><th>--remove</th><td></td><td>remove after launching</td></tr>
+ </table>
+<br/>
+ </div>
+ </div>
+ </body>
+ </html>
diff --git a/Master/texmf-dist/doc/context/scripts/mkiv/mtx-timing.man b/Master/texmf-dist/doc/context/scripts/mkiv/mtx-timing.man
new file mode 100644
index 00000000000..831ce60eda4
--- /dev/null
+++ b/Master/texmf-dist/doc/context/scripts/mkiv/mtx-timing.man
@@ -0,0 +1,33 @@
+.TH "mtx-timing" "1" "01-01-2013" "version 0.10" "ConTeXt Timing Tools"
+.SH NAME
+.B mtx-timing
+.SH SYNOPSIS
+.B mtxrun --script timing [
+.I OPTIONS ...
+.B ] [
+.I FILENAMES
+.B ]
+.SH DESCRIPTION
+.B ConTeXt Timing Tools
+.SH OPTIONS
+.TP
+.B --xhtml
+make xhtml file
+.TP
+.B --launch
+launch after conversion
+.TP
+.B --remove
+remove after launching
+.SH AUTHOR
+More information about ConTeXt and the tools that come with it can be found at:
+
+
+.B "maillist:"
+ntg-context@ntg.nl / http://www.ntg.nl/mailman/listinfo/ntg-context
+
+.B "webpage:"
+http://www.pragma-ade.nl / http://tex.aanhet.net
+
+.B "wiki:"
+http://contextgarden.net
diff --git a/Master/texmf-dist/doc/context/scripts/mkiv/mtx-timing.xml b/Master/texmf-dist/doc/context/scripts/mkiv/mtx-timing.xml
new file mode 100644
index 00000000000..d41322799dd
--- /dev/null
+++ b/Master/texmf-dist/doc/context/scripts/mkiv/mtx-timing.xml
@@ -0,0 +1,17 @@
+<?xml version="1.0"?>
+<application>
+ <metadata>
+ <entry name="name">mtx-timing</entry>
+ <entry name="detail">ConTeXt Timing Tools</entry>
+ <entry name="version">0.10</entry>
+ </metadata>
+ <flags>
+ <category name="basic">
+ <subcategory>
+ <flag name="xhtml"><short>make xhtml file</short></flag>
+ <flag name="launch"><short>launch after conversion</short></flag>
+ <flag name="remove"><short>remove after launching</short></flag>
+ </subcategory>
+ </category>
+ </flags>
+</application>
diff --git a/Master/texmf-dist/doc/context/scripts/mkiv/mtx-tools.html b/Master/texmf-dist/doc/context/scripts/mkiv/mtx-tools.html
new file mode 100644
index 00000000000..c4f25b2ed13
--- /dev/null
+++ b/Master/texmf-dist/doc/context/scripts/mkiv/mtx-tools.html
@@ -0,0 +1,56 @@
+<?xml version="1.0" encoding="UTF-8"?>
+
+<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Strict//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd">
+
+
+
+
+
+<html xmlns="http://www.w3.org/1999/xhtml" lang="en" xml:lang="en">
+ <head>
+ <title>Some File Related Goodies 1.01</title>
+ <meta http-equiv="Content-Type" content="text/html; charset=UTF-8"/>
+ <style type="text/css">
+ body { color: #FFFFFF; background-color: #808080; font-family: optima, verdana, futura, "lucida sans", arial, geneva, helvetica, sans; font-size: 12px; line-height: 18px; } a:link, a:active, a:visited { color: #FFFFFF; } a.dir-view:link, a.dir-view:active, a.dir-view:visited { color: #FFFFFF; text-decoration: underline; } .valid { color: #00FF00; } .invalid { color: #FF0000; } button, .commonlink, .smallbutton { font-weight: bold; font-size: 12px; text-decoration: none; color: #000000; border-color: #7F7F7F; border-style: solid; border-width: .125ex; background-color: #FFFFFF; padding: .5ex; } .smallbutton { width: 1em; } a.commonlink:link, a.commonlink:active, a.commonlink:visited, a.smalllink:link, a.smalllink:active, a.smalllink:visited { font-weight: bold; font-size: 12px; text-decoration: none; color: #000000; } h1, .title { font-style: normal; font-weight: normal; font-size: 18px; line-height: 18px; margin-bottom: 20px; } h2, .subtitle { font-style: normal; font-weight: normal; font-size: 12px; margin-top: 18px; margin-bottom: 18px; } table { line-height: 18px; font-size: 12px; margin: 0; } th { font-weight: bold; text-align: left; padding-bottom: 6px; } .tc { font-weight: bold; text-align: left; } p, li { max-width: 60em; } .empty-line { margin-top: 4px; } .more-room { margin-right: 1.5em; } .much-more-room { margin-right: 3em; } #main { position: absolute; left: 10%; top: 10%; right: 10%; bottom: 10%; z-index: 2; width: 80%; height: 80%; padding: 0%; margin: 0%; overflow: auto; border-style: none; border-width: 0; background-color: #3F3F3F; } #main-settings { margin: 12px; x_max-width: 60em; line-height: 18px; font-size: 12px; } #left { position: absolute; top : 10%; left: 0%; bottom: 0%; right: 90%; z-index: 1; width: 10%; height: 90%; padding: 0%; margin: 0%; font-size: 16px; border-style: none; border-width: 0; background-color: #4F6F6F; } #right { position: absolute; top : 0%; left: 90%; bottom: 10%; right: 0%; z-index: 1; width: 10%; height: 90%; padding: 0%; margin: 0%; font-size: 16px; border-style: none; border-width: 0; background-color: #4F6F6F; _margin-left: -15px; } #bottom { position: absolute; left: 10%; right: 0%; top: 90%; bottom: 0%; z-index: 1; width: 90%; height: 10%; padding: 0%; margin: 0%; font-size: 16px; border-style: none; border-width: 0; background-color: #6F6F8F; } #top { position: absolute; left: 0%; right: 10%; top: 0%; bottom: 90%; z-index: 1; width: 90%; height: 10%; padding: 0%; margin: 0%; font-size: 16px; border-style: none; border-width: 0; background-color: #6F6F8F; } #top-one { position: absolute; bottom: 50%; width: 100%; buggedheight: 100%; } #top-two { position: relative; margin-bottom: -9px; margin-left: 12px; margin-right: 12px; line-height: 18px; text-align: right; vertical-align: middle; } #bottom-one { position: absolute; bottom: 50%; width: 100%; buggedheight: 100%; } #bottom-two { position: relative; margin-bottom: -9px; margin-left: 12px; margin-right: 12px; line-height: 18px; text-align: left; vertical-align: middle; } #left-one { position: absolute; width: 100%; buggedheight: 100%; } #left-two { position: relative; margin-top: 12px; line-height: 18px; text-align: center; vertical-align: top; } #right-one { display: table; height: 100%; width: 100%; } #right-two { display: table-row; height: 100%; width: 100%; } #right-three { display: table-cell; width: 100%; vertical-align: bottom; _position: absolute; _top: 100%; } #right-four { text-align: center; margin-bottom: 2ex; _position: relative; _top: -100%; } #more-top { position: absolute; top: 0%; left: 90%; bottom: 90%; right: 0%; z-index: 3; width: 10%; height: 10%; padding: 0%; margin: 0%; border-style: none; border-width: 0; } #more-top-settings { text-align: center; } #more-right-settings { margin-right: 12px; margin-left: 12px; line-height: 18px; font-size: 10px; text-align: center; } #right-safari { _display: table; width: 100%; height: 100%; }
+ </style>
+ <style type="text/css">
+ </style>
+ </head>
+ <body>
+ <div id="top"> <div id="top-one">
+ <div id="top-two">Some File Related Goodies 1.01 </div>
+ </div>
+ </div>
+ <div id="bottom"> <div id="bottom-one">
+ <div id="bottom-two">wiki: http://contextgarden.net | mail: ntg-context@ntg.nl | website: http://www.pragma-ade.nl</div>
+ </div>
+ </div>
+ <div id="left"></div>
+ <div id="right"></div>
+ <div id="main">
+ <div id='main-settings'>
+ <h1>Command line options</h1>
+<table>
+ <tr><th style="width: 10em">flag</th><th style="width: 8em">value</th><th>description</th></tr>
+ <tr><th/><td/><td/></tr>
+ <tr><th>--disarmutfbomb</th><td></td><td>remove utf bomb if present</td></tr>
+ <tr><th>--force</th><td></td><td>remove indeed</td></tr>
+ <tr><th/><td/><td/></tr>
+ <tr><th>--dirtoxml</th><td></td><td>glob directory into xml</td></tr>
+ <tr><th>--pattern</th><td></td><td>glob pattern (default: *)</td></tr>
+ <tr><th>--url</th><td></td><td>url attribute (no processing)</td></tr>
+ <tr><th>--root</th><td></td><td>the root of the globbed path (default: .)</td></tr>
+ <tr><th>--output</th><td></td><td>output filename (console by default)</td></tr>
+ <tr><th>--recurse</th><td></td><td>recurse into subdirecories</td></tr>
+ <tr><th>--stripname</th><td></td><td>take pathpart of given pattern</td></tr>
+ <tr><th>--longname</th><td></td><td>set name attributes to full path name</td></tr>
+ <tr><th/><td/><td/></tr>
+ <tr><th>--pattern</th><td></td><td>glob pattern (default: *)</td></tr>
+ <tr><th>--recurse</th><td></td><td>recurse into subdirecories</td></tr>
+ <tr><th>--force</th><td></td><td>downcase indeed</td></tr>
+ </table>
+<br/>
+ </div>
+ </div>
+ </body>
+ </html>
diff --git a/Master/texmf-dist/doc/context/scripts/mkiv/mtx-tools.man b/Master/texmf-dist/doc/context/scripts/mkiv/mtx-tools.man
new file mode 100644
index 00000000000..f56b27bde55
--- /dev/null
+++ b/Master/texmf-dist/doc/context/scripts/mkiv/mtx-tools.man
@@ -0,0 +1,63 @@
+.TH "mtx-tools" "1" "01-01-2013" "version 1.01" "Some File Related Goodies"
+.SH NAME
+.B mtx-tools
+.SH SYNOPSIS
+.B mtxrun --script tools [
+.I OPTIONS ...
+.B ] [
+.I FILENAMES
+.B ]
+.SH DESCRIPTION
+.B Some File Related Goodies
+.SH OPTIONS
+.TP
+.B --disarmutfbomb
+remove utf bomb if present
+.TP
+.B --force
+remove indeed
+.TP
+.B --dirtoxml
+glob directory into xml
+.TP
+.B --pattern
+glob pattern (default: *)
+.TP
+.B --url
+url attribute (no processing)
+.TP
+.B --root
+the root of the globbed path (default: .)
+.TP
+.B --output
+output filename (console by default)
+.TP
+.B --recurse
+recurse into subdirecories
+.TP
+.B --stripname
+take pathpart of given pattern
+.TP
+.B --longname
+set name attributes to full path name
+.TP
+.B --pattern
+glob pattern (default: *)
+.TP
+.B --recurse
+recurse into subdirecories
+.TP
+.B --force
+downcase indeed
+.SH AUTHOR
+More information about ConTeXt and the tools that come with it can be found at:
+
+
+.B "maillist:"
+ntg-context@ntg.nl / http://www.ntg.nl/mailman/listinfo/ntg-context
+
+.B "webpage:"
+http://www.pragma-ade.nl / http://tex.aanhet.net
+
+.B "wiki:"
+http://contextgarden.net
diff --git a/Master/texmf-dist/doc/context/scripts/mkiv/mtx-tools.xml b/Master/texmf-dist/doc/context/scripts/mkiv/mtx-tools.xml
new file mode 100644
index 00000000000..a2ce84d5b32
--- /dev/null
+++ b/Master/texmf-dist/doc/context/scripts/mkiv/mtx-tools.xml
@@ -0,0 +1,31 @@
+<?xml version="1.0"?>
+<application>
+ <metadata>
+ <entry name="name">mtx-tools</entry>
+ <entry name="detail">Some File Related Goodies</entry>
+ <entry name="version">1.01</entry>
+ </metadata>
+ <flags>
+ <category name="basic">
+ <subcategory>
+ <flag name="disarmutfbomb"><short>remove utf bomb if present</short></flag>
+ <flag name="force"><short>remove indeed</short></flag>
+ </subcategory>
+ <subcategory>
+ <flag name="dirtoxml"><short>glob directory into xml</short></flag>
+ <flag name="pattern"><short>glob pattern (default: *)</short></flag>
+ <flag name="url"><short>url attribute (no processing)</short></flag>
+ <flag name="root"><short>the root of the globbed path (default: .)</short></flag>
+ <flag name="output"><short>output filename (console by default)</short></flag>
+ <flag name="recurse"><short>recurse into subdirecories</short></flag>
+ <flag name="stripname"><short>take pathpart of given pattern</short></flag>
+ <flag name="longname"><short>set name attributes to full path name</short></flag>
+ </subcategory>
+ <subcategory>
+ <flag name="pattern"><short>glob pattern (default: *)</short></flag>
+ <flag name="recurse"><short>recurse into subdirecories</short></flag>
+ <flag name="force"><short>downcase indeed</short></flag>
+ </subcategory>
+ </category>
+ </flags>
+</application>
diff --git a/Master/texmf-dist/doc/context/scripts/mkiv/mtx-unzip.html b/Master/texmf-dist/doc/context/scripts/mkiv/mtx-unzip.html
new file mode 100644
index 00000000000..58c124487db
--- /dev/null
+++ b/Master/texmf-dist/doc/context/scripts/mkiv/mtx-unzip.html
@@ -0,0 +1,44 @@
+<?xml version="1.0" encoding="UTF-8"?>
+
+<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Strict//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd">
+
+
+
+
+
+<html xmlns="http://www.w3.org/1999/xhtml" lang="en" xml:lang="en">
+ <head>
+ <title>Simple Unzipper 0.10</title>
+ <meta http-equiv="Content-Type" content="text/html; charset=UTF-8"/>
+ <style type="text/css">
+ body { color: #FFFFFF; background-color: #808080; font-family: optima, verdana, futura, "lucida sans", arial, geneva, helvetica, sans; font-size: 12px; line-height: 18px; } a:link, a:active, a:visited { color: #FFFFFF; } a.dir-view:link, a.dir-view:active, a.dir-view:visited { color: #FFFFFF; text-decoration: underline; } .valid { color: #00FF00; } .invalid { color: #FF0000; } button, .commonlink, .smallbutton { font-weight: bold; font-size: 12px; text-decoration: none; color: #000000; border-color: #7F7F7F; border-style: solid; border-width: .125ex; background-color: #FFFFFF; padding: .5ex; } .smallbutton { width: 1em; } a.commonlink:link, a.commonlink:active, a.commonlink:visited, a.smalllink:link, a.smalllink:active, a.smalllink:visited { font-weight: bold; font-size: 12px; text-decoration: none; color: #000000; } h1, .title { font-style: normal; font-weight: normal; font-size: 18px; line-height: 18px; margin-bottom: 20px; } h2, .subtitle { font-style: normal; font-weight: normal; font-size: 12px; margin-top: 18px; margin-bottom: 18px; } table { line-height: 18px; font-size: 12px; margin: 0; } th { font-weight: bold; text-align: left; padding-bottom: 6px; } .tc { font-weight: bold; text-align: left; } p, li { max-width: 60em; } .empty-line { margin-top: 4px; } .more-room { margin-right: 1.5em; } .much-more-room { margin-right: 3em; } #main { position: absolute; left: 10%; top: 10%; right: 10%; bottom: 10%; z-index: 2; width: 80%; height: 80%; padding: 0%; margin: 0%; overflow: auto; border-style: none; border-width: 0; background-color: #3F3F3F; } #main-settings { margin: 12px; x_max-width: 60em; line-height: 18px; font-size: 12px; } #left { position: absolute; top : 10%; left: 0%; bottom: 0%; right: 90%; z-index: 1; width: 10%; height: 90%; padding: 0%; margin: 0%; font-size: 16px; border-style: none; border-width: 0; background-color: #4F6F6F; } #right { position: absolute; top : 0%; left: 90%; bottom: 10%; right: 0%; z-index: 1; width: 10%; height: 90%; padding: 0%; margin: 0%; font-size: 16px; border-style: none; border-width: 0; background-color: #4F6F6F; _margin-left: -15px; } #bottom { position: absolute; left: 10%; right: 0%; top: 90%; bottom: 0%; z-index: 1; width: 90%; height: 10%; padding: 0%; margin: 0%; font-size: 16px; border-style: none; border-width: 0; background-color: #6F6F8F; } #top { position: absolute; left: 0%; right: 10%; top: 0%; bottom: 90%; z-index: 1; width: 90%; height: 10%; padding: 0%; margin: 0%; font-size: 16px; border-style: none; border-width: 0; background-color: #6F6F8F; } #top-one { position: absolute; bottom: 50%; width: 100%; buggedheight: 100%; } #top-two { position: relative; margin-bottom: -9px; margin-left: 12px; margin-right: 12px; line-height: 18px; text-align: right; vertical-align: middle; } #bottom-one { position: absolute; bottom: 50%; width: 100%; buggedheight: 100%; } #bottom-two { position: relative; margin-bottom: -9px; margin-left: 12px; margin-right: 12px; line-height: 18px; text-align: left; vertical-align: middle; } #left-one { position: absolute; width: 100%; buggedheight: 100%; } #left-two { position: relative; margin-top: 12px; line-height: 18px; text-align: center; vertical-align: top; } #right-one { display: table; height: 100%; width: 100%; } #right-two { display: table-row; height: 100%; width: 100%; } #right-three { display: table-cell; width: 100%; vertical-align: bottom; _position: absolute; _top: 100%; } #right-four { text-align: center; margin-bottom: 2ex; _position: relative; _top: -100%; } #more-top { position: absolute; top: 0%; left: 90%; bottom: 90%; right: 0%; z-index: 3; width: 10%; height: 10%; padding: 0%; margin: 0%; border-style: none; border-width: 0; } #more-top-settings { text-align: center; } #more-right-settings { margin-right: 12px; margin-left: 12px; line-height: 18px; font-size: 10px; text-align: center; } #right-safari { _display: table; width: 100%; height: 100%; }
+ </style>
+ <style type="text/css">
+ </style>
+ </head>
+ <body>
+ <div id="top"> <div id="top-one">
+ <div id="top-two">Simple Unzipper 0.10 </div>
+ </div>
+ </div>
+ <div id="bottom"> <div id="bottom-one">
+ <div id="bottom-two">wiki: http://contextgarden.net | mail: ntg-context@ntg.nl | website: http://www.pragma-ade.nl</div>
+ </div>
+ </div>
+ <div id="left"></div>
+ <div id="right"></div>
+ <div id="main">
+ <div id='main-settings'>
+ <h1>Command line options</h1>
+<table>
+ <tr><th style="width: 10em">flag</th><th style="width: 8em">value</th><th>description</th></tr>
+ <tr><th/><td/><td/></tr>
+ <tr><th>--list</th><td></td><td>list files in archive</td></tr>
+ <tr><th>--junk</th><td></td><td>flatten unzipped directory structure</td></tr>
+ <tr><th>--extract</th><td></td><td>extract files</td></tr>
+ </table>
+<br/>
+ </div>
+ </div>
+ </body>
+ </html>
diff --git a/Master/texmf-dist/doc/context/scripts/mkiv/mtx-unzip.man b/Master/texmf-dist/doc/context/scripts/mkiv/mtx-unzip.man
new file mode 100644
index 00000000000..5e1a369b2f7
--- /dev/null
+++ b/Master/texmf-dist/doc/context/scripts/mkiv/mtx-unzip.man
@@ -0,0 +1,33 @@
+.TH "mtx-unzip" "1" "01-01-2013" "version 0.10" "Simple Unzipper"
+.SH NAME
+.B mtx-unzip
+.SH SYNOPSIS
+.B mtxrun --script unzip [
+.I OPTIONS ...
+.B ] [
+.I FILENAMES
+.B ]
+.SH DESCRIPTION
+.B Simple Unzipper
+.SH OPTIONS
+.TP
+.B --list
+list files in archive
+.TP
+.B --junk
+flatten unzipped directory structure
+.TP
+.B --extract
+extract files
+.SH AUTHOR
+More information about ConTeXt and the tools that come with it can be found at:
+
+
+.B "maillist:"
+ntg-context@ntg.nl / http://www.ntg.nl/mailman/listinfo/ntg-context
+
+.B "webpage:"
+http://www.pragma-ade.nl / http://tex.aanhet.net
+
+.B "wiki:"
+http://contextgarden.net
diff --git a/Master/texmf-dist/doc/context/scripts/mkiv/mtx-unzip.xml b/Master/texmf-dist/doc/context/scripts/mkiv/mtx-unzip.xml
new file mode 100644
index 00000000000..9f56ca076d1
--- /dev/null
+++ b/Master/texmf-dist/doc/context/scripts/mkiv/mtx-unzip.xml
@@ -0,0 +1,17 @@
+<?xml version="1.0"?>
+<application>
+ <metadata>
+ <entry name="name">mtx-unzip</entry>
+ <entry name="detail">Simple Unzipper</entry>
+ <entry name="version">0.10</entry>
+ </metadata>
+ <flags>
+ <category name="basic">
+ <subcategory>
+ <flag name="list"><short>list files in archive</short></flag>
+ <flag name="junk"><short>flatten unzipped directory structure</short></flag>
+ <flag name="extract"><short>extract files</short></flag>
+ </subcategory>
+ </category>
+ </flags>
+</application>
diff --git a/Master/texmf-dist/doc/context/scripts/mkiv/mtx-update.html b/Master/texmf-dist/doc/context/scripts/mkiv/mtx-update.html
new file mode 100644
index 00000000000..ea04dcfb36d
--- /dev/null
+++ b/Master/texmf-dist/doc/context/scripts/mkiv/mtx-update.html
@@ -0,0 +1,59 @@
+<?xml version="1.0" encoding="UTF-8"?>
+
+<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Strict//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd">
+
+
+
+
+
+<html xmlns="http://www.w3.org/1999/xhtml" lang="en" xml:lang="en">
+ <head>
+ <title>ConTeXt Minimals Updater 0.31</title>
+ <meta http-equiv="Content-Type" content="text/html; charset=UTF-8"/>
+ <style type="text/css">
+ body { color: #FFFFFF; background-color: #808080; font-family: optima, verdana, futura, "lucida sans", arial, geneva, helvetica, sans; font-size: 12px; line-height: 18px; } a:link, a:active, a:visited { color: #FFFFFF; } a.dir-view:link, a.dir-view:active, a.dir-view:visited { color: #FFFFFF; text-decoration: underline; } .valid { color: #00FF00; } .invalid { color: #FF0000; } button, .commonlink, .smallbutton { font-weight: bold; font-size: 12px; text-decoration: none; color: #000000; border-color: #7F7F7F; border-style: solid; border-width: .125ex; background-color: #FFFFFF; padding: .5ex; } .smallbutton { width: 1em; } a.commonlink:link, a.commonlink:active, a.commonlink:visited, a.smalllink:link, a.smalllink:active, a.smalllink:visited { font-weight: bold; font-size: 12px; text-decoration: none; color: #000000; } h1, .title { font-style: normal; font-weight: normal; font-size: 18px; line-height: 18px; margin-bottom: 20px; } h2, .subtitle { font-style: normal; font-weight: normal; font-size: 12px; margin-top: 18px; margin-bottom: 18px; } table { line-height: 18px; font-size: 12px; margin: 0; } th { font-weight: bold; text-align: left; padding-bottom: 6px; } .tc { font-weight: bold; text-align: left; } p, li { max-width: 60em; } .empty-line { margin-top: 4px; } .more-room { margin-right: 1.5em; } .much-more-room { margin-right: 3em; } #main { position: absolute; left: 10%; top: 10%; right: 10%; bottom: 10%; z-index: 2; width: 80%; height: 80%; padding: 0%; margin: 0%; overflow: auto; border-style: none; border-width: 0; background-color: #3F3F3F; } #main-settings { margin: 12px; x_max-width: 60em; line-height: 18px; font-size: 12px; } #left { position: absolute; top : 10%; left: 0%; bottom: 0%; right: 90%; z-index: 1; width: 10%; height: 90%; padding: 0%; margin: 0%; font-size: 16px; border-style: none; border-width: 0; background-color: #4F6F6F; } #right { position: absolute; top : 0%; left: 90%; bottom: 10%; right: 0%; z-index: 1; width: 10%; height: 90%; padding: 0%; margin: 0%; font-size: 16px; border-style: none; border-width: 0; background-color: #4F6F6F; _margin-left: -15px; } #bottom { position: absolute; left: 10%; right: 0%; top: 90%; bottom: 0%; z-index: 1; width: 90%; height: 10%; padding: 0%; margin: 0%; font-size: 16px; border-style: none; border-width: 0; background-color: #6F6F8F; } #top { position: absolute; left: 0%; right: 10%; top: 0%; bottom: 90%; z-index: 1; width: 90%; height: 10%; padding: 0%; margin: 0%; font-size: 16px; border-style: none; border-width: 0; background-color: #6F6F8F; } #top-one { position: absolute; bottom: 50%; width: 100%; buggedheight: 100%; } #top-two { position: relative; margin-bottom: -9px; margin-left: 12px; margin-right: 12px; line-height: 18px; text-align: right; vertical-align: middle; } #bottom-one { position: absolute; bottom: 50%; width: 100%; buggedheight: 100%; } #bottom-two { position: relative; margin-bottom: -9px; margin-left: 12px; margin-right: 12px; line-height: 18px; text-align: left; vertical-align: middle; } #left-one { position: absolute; width: 100%; buggedheight: 100%; } #left-two { position: relative; margin-top: 12px; line-height: 18px; text-align: center; vertical-align: top; } #right-one { display: table; height: 100%; width: 100%; } #right-two { display: table-row; height: 100%; width: 100%; } #right-three { display: table-cell; width: 100%; vertical-align: bottom; _position: absolute; _top: 100%; } #right-four { text-align: center; margin-bottom: 2ex; _position: relative; _top: -100%; } #more-top { position: absolute; top: 0%; left: 90%; bottom: 90%; right: 0%; z-index: 3; width: 10%; height: 10%; padding: 0%; margin: 0%; border-style: none; border-width: 0; } #more-top-settings { text-align: center; } #more-right-settings { margin-right: 12px; margin-left: 12px; line-height: 18px; font-size: 10px; text-align: center; } #right-safari { _display: table; width: 100%; height: 100%; }
+ </style>
+ <style type="text/css">
+ </style>
+ </head>
+ <body>
+ <div id="top"> <div id="top-one">
+ <div id="top-two">ConTeXt Minimals Updater 0.31 </div>
+ </div>
+ </div>
+ <div id="bottom"> <div id="bottom-one">
+ <div id="bottom-two">wiki: http://contextgarden.net | mail: ntg-context@ntg.nl | website: http://www.pragma-ade.nl</div>
+ </div>
+ </div>
+ <div id="left"></div>
+ <div id="right"></div>
+ <div id="main">
+ <div id='main-settings'>
+ <h1>Command line options</h1>
+<table>
+ <tr><th style="width: 10em">flag</th><th style="width: 8em">value</th><th>description</th></tr>
+ <tr><th/><td/><td/></tr>
+ <tr><th>--platform</th><td>string</td><td>platform (windows, linux, linux-64, osx-intel, osx-ppc, linux-ppc)</td></tr>
+ <tr><th>--server</th><td>string</td><td>repository url (rsync://contextgarden.net)</td></tr>
+ <tr><th>--module</th><td>string</td><td>repository url (minimals)</td></tr>
+ <tr><th>--repository</th><td>string</td><td>specify version (current, experimental)</td></tr>
+ <tr><th>--context</th><td>string</td><td>specify version (current, latest, beta, yyyy.mm.dd)</td></tr>
+ <tr><th>--rsync</th><td>string</td><td>rsync binary (rsync)</td></tr>
+ <tr><th>--texroot</th><td>string</td><td>installation directory (not guessed for the moment)</td></tr>
+ <tr><th>--engine</th><td>string</td><td>tex engine (luatex, pdftex, xetex)</td></tr>
+ <tr><th>--modules</th><td>string</td><td>extra modules (can be list or 'all')</td></tr>
+ <tr><th>--fonts</th><td>string</td><td>additional fonts (can be list or 'all')</td></tr>
+ <tr><th>--goodies</th><td>string</td><td>extra binaries (like scite and texworks)</td></tr>
+ <tr><th>--force</th><td></td><td>instead of a dryrun, do the real thing</td></tr>
+ <tr><th>--update</th><td></td><td>update minimal tree</td></tr>
+ <tr><th>--make</th><td></td><td>also make formats and generate file databases</td></tr>
+ <tr><th>--keep</th><td></td><td>don't delete unused or obsolete files</td></tr>
+ <tr><th>--state</th><td></td><td>update tree using saved state</td></tr>
+ <tr><th>--cygwin</th><td></td><td>adapt drive specs to cygwin</td></tr>
+ <tr><th>--mingw</th><td></td><td>assume mingw binaries being used</td></tr>
+ </table>
+<br/>
+ </div>
+ </div>
+ </body>
+ </html>
diff --git a/Master/texmf-dist/doc/context/scripts/mkiv/mtx-update.man b/Master/texmf-dist/doc/context/scripts/mkiv/mtx-update.man
new file mode 100644
index 00000000000..72e7ce1b5ae
--- /dev/null
+++ b/Master/texmf-dist/doc/context/scripts/mkiv/mtx-update.man
@@ -0,0 +1,78 @@
+.TH "mtx-update" "1" "01-01-2013" "version 0.31" "ConTeXt Minimals Updater"
+.SH NAME
+.B mtx-update
+.SH SYNOPSIS
+.B mtxrun --script update [
+.I OPTIONS ...
+.B ] [
+.I FILENAMES
+.B ]
+.SH DESCRIPTION
+.B ConTeXt Minimals Updater
+.SH OPTIONS
+.TP
+.B --platform=string
+platform (windows, linux, linux-64, osx-intel, osx-ppc, linux-ppc)
+.TP
+.B --server=string
+repository url (rsync://contextgarden.net)
+.TP
+.B --module=string
+repository url (minimals)
+.TP
+.B --repository=string
+specify version (current, experimental)
+.TP
+.B --context=string
+specify version (current, latest, beta, yyyy.mm.dd)
+.TP
+.B --rsync=string
+rsync binary (rsync)
+.TP
+.B --texroot=string
+installation directory (not guessed for the moment)
+.TP
+.B --engine=string
+tex engine (luatex, pdftex, xetex)
+.TP
+.B --modules=string
+extra modules (can be list or 'all')
+.TP
+.B --fonts=string
+additional fonts (can be list or 'all')
+.TP
+.B --goodies=string
+extra binaries (like scite and texworks)
+.TP
+.B --force
+instead of a dryrun, do the real thing
+.TP
+.B --update
+update minimal tree
+.TP
+.B --make
+also make formats and generate file databases
+.TP
+.B --keep
+don't delete unused or obsolete files
+.TP
+.B --state
+update tree using saved state
+.TP
+.B --cygwin
+adapt drive specs to cygwin
+.TP
+.B --mingw
+assume mingw binaries being used
+.SH AUTHOR
+More information about ConTeXt and the tools that come with it can be found at:
+
+
+.B "maillist:"
+ntg-context@ntg.nl / http://www.ntg.nl/mailman/listinfo/ntg-context
+
+.B "webpage:"
+http://www.pragma-ade.nl / http://tex.aanhet.net
+
+.B "wiki:"
+http://contextgarden.net
diff --git a/Master/texmf-dist/doc/context/scripts/mkiv/mtx-update.xml b/Master/texmf-dist/doc/context/scripts/mkiv/mtx-update.xml
new file mode 100644
index 00000000000..95e99134156
--- /dev/null
+++ b/Master/texmf-dist/doc/context/scripts/mkiv/mtx-update.xml
@@ -0,0 +1,32 @@
+<?xml version="1.0"?>
+<application>
+ <metadata>
+ <entry name="name">mtx-update</entry>
+ <entry name="detail">ConTeXt Minimals Updater</entry>
+ <entry name="version">0.31</entry>
+ </metadata>
+ <flags>
+ <category name="basic">
+ <subcategory>
+ <flag name="platform" value="string"><short>platform (windows, linux, linux-64, osx-intel, osx-ppc, linux-ppc)</short></flag>
+ <flag name="server" value="string"><short>repository url (rsync://contextgarden.net)</short></flag>
+ <flag name="module" value="string"><short>repository url (minimals)</short></flag>
+ <flag name="repository" value="string"><short>specify version (current, experimental)</short></flag>
+ <flag name="context" value="string"><short>specify version (current, latest, beta, yyyy.mm.dd)</short></flag>
+ <flag name="rsync" value="string"><short>rsync binary (rsync)</short></flag>
+ <flag name="texroot" value="string"><short>installation directory (not guessed for the moment)</short></flag>
+ <flag name="engine" value="string"><short>tex engine (luatex, pdftex, xetex)</short></flag>
+ <flag name="modules" value="string"><short>extra modules (can be list or 'all')</short></flag>
+ <flag name="fonts" value="string"><short>additional fonts (can be list or 'all')</short></flag>
+ <flag name="goodies" value="string"><short>extra binaries (like scite and texworks)</short></flag>
+ <flag name="force"><short>instead of a dryrun, do the real thing</short></flag>
+ <flag name="update"><short>update minimal tree</short></flag>
+ <flag name="make"><short>also make formats and generate file databases</short></flag>
+ <flag name="keep"><short>don't delete unused or obsolete files</short></flag>
+ <flag name="state"><short>update tree using saved state</short></flag>
+ <flag name="cygwin"><short>adapt drive specs to cygwin</short></flag>
+ <flag name="mingw"><short>assume mingw binaries being used</short></flag>
+ </subcategory>
+ </category>
+ </flags>
+</application>
diff --git a/Master/texmf-dist/doc/context/scripts/mkiv/mtx-watch.html b/Master/texmf-dist/doc/context/scripts/mkiv/mtx-watch.html
new file mode 100644
index 00000000000..a4f783e0449
--- /dev/null
+++ b/Master/texmf-dist/doc/context/scripts/mkiv/mtx-watch.html
@@ -0,0 +1,49 @@
+<?xml version="1.0" encoding="UTF-8"?>
+
+<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Strict//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd">
+
+
+
+
+
+<html xmlns="http://www.w3.org/1999/xhtml" lang="en" xml:lang="en">
+ <head>
+ <title>ConTeXt Request Watchdog 1.00</title>
+ <meta http-equiv="Content-Type" content="text/html; charset=UTF-8"/>
+ <style type="text/css">
+ body { color: #FFFFFF; background-color: #808080; font-family: optima, verdana, futura, "lucida sans", arial, geneva, helvetica, sans; font-size: 12px; line-height: 18px; } a:link, a:active, a:visited { color: #FFFFFF; } a.dir-view:link, a.dir-view:active, a.dir-view:visited { color: #FFFFFF; text-decoration: underline; } .valid { color: #00FF00; } .invalid { color: #FF0000; } button, .commonlink, .smallbutton { font-weight: bold; font-size: 12px; text-decoration: none; color: #000000; border-color: #7F7F7F; border-style: solid; border-width: .125ex; background-color: #FFFFFF; padding: .5ex; } .smallbutton { width: 1em; } a.commonlink:link, a.commonlink:active, a.commonlink:visited, a.smalllink:link, a.smalllink:active, a.smalllink:visited { font-weight: bold; font-size: 12px; text-decoration: none; color: #000000; } h1, .title { font-style: normal; font-weight: normal; font-size: 18px; line-height: 18px; margin-bottom: 20px; } h2, .subtitle { font-style: normal; font-weight: normal; font-size: 12px; margin-top: 18px; margin-bottom: 18px; } table { line-height: 18px; font-size: 12px; margin: 0; } th { font-weight: bold; text-align: left; padding-bottom: 6px; } .tc { font-weight: bold; text-align: left; } p, li { max-width: 60em; } .empty-line { margin-top: 4px; } .more-room { margin-right: 1.5em; } .much-more-room { margin-right: 3em; } #main { position: absolute; left: 10%; top: 10%; right: 10%; bottom: 10%; z-index: 2; width: 80%; height: 80%; padding: 0%; margin: 0%; overflow: auto; border-style: none; border-width: 0; background-color: #3F3F3F; } #main-settings { margin: 12px; x_max-width: 60em; line-height: 18px; font-size: 12px; } #left { position: absolute; top : 10%; left: 0%; bottom: 0%; right: 90%; z-index: 1; width: 10%; height: 90%; padding: 0%; margin: 0%; font-size: 16px; border-style: none; border-width: 0; background-color: #4F6F6F; } #right { position: absolute; top : 0%; left: 90%; bottom: 10%; right: 0%; z-index: 1; width: 10%; height: 90%; padding: 0%; margin: 0%; font-size: 16px; border-style: none; border-width: 0; background-color: #4F6F6F; _margin-left: -15px; } #bottom { position: absolute; left: 10%; right: 0%; top: 90%; bottom: 0%; z-index: 1; width: 90%; height: 10%; padding: 0%; margin: 0%; font-size: 16px; border-style: none; border-width: 0; background-color: #6F6F8F; } #top { position: absolute; left: 0%; right: 10%; top: 0%; bottom: 90%; z-index: 1; width: 90%; height: 10%; padding: 0%; margin: 0%; font-size: 16px; border-style: none; border-width: 0; background-color: #6F6F8F; } #top-one { position: absolute; bottom: 50%; width: 100%; buggedheight: 100%; } #top-two { position: relative; margin-bottom: -9px; margin-left: 12px; margin-right: 12px; line-height: 18px; text-align: right; vertical-align: middle; } #bottom-one { position: absolute; bottom: 50%; width: 100%; buggedheight: 100%; } #bottom-two { position: relative; margin-bottom: -9px; margin-left: 12px; margin-right: 12px; line-height: 18px; text-align: left; vertical-align: middle; } #left-one { position: absolute; width: 100%; buggedheight: 100%; } #left-two { position: relative; margin-top: 12px; line-height: 18px; text-align: center; vertical-align: top; } #right-one { display: table; height: 100%; width: 100%; } #right-two { display: table-row; height: 100%; width: 100%; } #right-three { display: table-cell; width: 100%; vertical-align: bottom; _position: absolute; _top: 100%; } #right-four { text-align: center; margin-bottom: 2ex; _position: relative; _top: -100%; } #more-top { position: absolute; top: 0%; left: 90%; bottom: 90%; right: 0%; z-index: 3; width: 10%; height: 10%; padding: 0%; margin: 0%; border-style: none; border-width: 0; } #more-top-settings { text-align: center; } #more-right-settings { margin-right: 12px; margin-left: 12px; line-height: 18px; font-size: 10px; text-align: center; } #right-safari { _display: table; width: 100%; height: 100%; }
+ </style>
+ <style type="text/css">
+ </style>
+ </head>
+ <body>
+ <div id="top"> <div id="top-one">
+ <div id="top-two">ConTeXt Request Watchdog 1.00 </div>
+ </div>
+ </div>
+ <div id="bottom"> <div id="bottom-one">
+ <div id="bottom-two">wiki: http://contextgarden.net | mail: ntg-context@ntg.nl | website: http://www.pragma-ade.nl</div>
+ </div>
+ </div>
+ <div id="left"></div>
+ <div id="right"></div>
+ <div id="main">
+ <div id='main-settings'>
+ <h1>Command line options</h1>
+<table>
+ <tr><th style="width: 10em">flag</th><th style="width: 8em">value</th><th>description</th></tr>
+ <tr><th/><td/><td/></tr>
+ <tr><th>--logpath</th><td></td><td>optional path for log files</td></tr>
+ <tr><th>--watch</th><td></td><td>watch given path [--delay]</td></tr>
+ <tr><th>--pipe</th><td></td><td>use pipe instead of execute</td></tr>
+ <tr><th>--delay</th><td></td><td>delay between sweeps</td></tr>
+ <tr><th>--automachine</th><td></td><td>replace /machine/ in path /servername/</td></tr>
+ <tr><th>--collect</th><td></td><td>condense log files</td></tr>
+ <tr><th>--cleanup</th><td>delay</td><td>remove files in given path [--force]</td></tr>
+ <tr><th>--showlog</th><td></td><td>show log data</td></tr>
+ </table>
+<br/>
+ </div>
+ </div>
+ </body>
+ </html>
diff --git a/Master/texmf-dist/doc/context/scripts/mkiv/mtx-watch.man b/Master/texmf-dist/doc/context/scripts/mkiv/mtx-watch.man
new file mode 100644
index 00000000000..9f4a0d3b9d7
--- /dev/null
+++ b/Master/texmf-dist/doc/context/scripts/mkiv/mtx-watch.man
@@ -0,0 +1,48 @@
+.TH "mtx-watch" "1" "01-01-2013" "version 1.00" "ConTeXt Request Watchdog"
+.SH NAME
+.B mtx-watch
+.SH SYNOPSIS
+.B mtxrun --script watch [
+.I OPTIONS ...
+.B ] [
+.I FILENAMES
+.B ]
+.SH DESCRIPTION
+.B ConTeXt Request Watchdog
+.SH OPTIONS
+.TP
+.B --logpath
+optional path for log files
+.TP
+.B --watch
+watch given path [--delay]
+.TP
+.B --pipe
+use pipe instead of execute
+.TP
+.B --delay
+delay between sweeps
+.TP
+.B --automachine
+replace /machine/ in path /servername/
+.TP
+.B --collect
+condense log files
+.TP
+.B --cleanup=delay
+remove files in given path [--force]
+.TP
+.B --showlog
+show log data
+.SH AUTHOR
+More information about ConTeXt and the tools that come with it can be found at:
+
+
+.B "maillist:"
+ntg-context@ntg.nl / http://www.ntg.nl/mailman/listinfo/ntg-context
+
+.B "webpage:"
+http://www.pragma-ade.nl / http://tex.aanhet.net
+
+.B "wiki:"
+http://contextgarden.net
diff --git a/Master/texmf-dist/doc/context/scripts/mkiv/mtx-watch.xml b/Master/texmf-dist/doc/context/scripts/mkiv/mtx-watch.xml
new file mode 100644
index 00000000000..14a3a5f1690
--- /dev/null
+++ b/Master/texmf-dist/doc/context/scripts/mkiv/mtx-watch.xml
@@ -0,0 +1,22 @@
+<?xml version="1.0"?>
+<application>
+ <metadata>
+ <entry name="name">mtx-watch</entry>
+ <entry name="detail">ConTeXt Request Watchdog</entry>
+ <entry name="version">1.00</entry>
+ </metadata>
+ <flags>
+ <category name="basic">
+ <subcategory>
+ <flag name="logpath"><short>optional path for log files</short></flag>
+ <flag name="watch"><short>watch given path [<ref name="delay]"/></short></flag>
+ <flag name="pipe"><short>use pipe instead of execute</short></flag>
+ <flag name="delay"><short>delay between sweeps</short></flag>
+ <flag name="automachine"><short>replace /machine/ in path /servername/</short></flag>
+ <flag name="collect"><short>condense log files</short></flag>
+ <flag name="cleanup" value="delay"><short>remove files in given path [<ref name="force]"/></short></flag>
+ <flag name="showlog"><short>show log data</short></flag>
+ </subcategory>
+ </category>
+ </flags>
+</application>
diff --git a/Master/texmf-dist/doc/context/scripts/mkiv/mtxrun.html b/Master/texmf-dist/doc/context/scripts/mkiv/mtxrun.html
new file mode 100644
index 00000000000..37731369499
--- /dev/null
+++ b/Master/texmf-dist/doc/context/scripts/mkiv/mtxrun.html
@@ -0,0 +1,94 @@
+<?xml version="1.0" encoding="UTF-8"?>
+
+<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Strict//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd">
+
+
+
+
+
+<html xmlns="http://www.w3.org/1999/xhtml" lang="en" xml:lang="en">
+ <head>
+ <title>ConTeXt TDS Runner Tool 1.31</title>
+ <meta http-equiv="Content-Type" content="text/html; charset=UTF-8"/>
+ <style type="text/css">
+ body { color: #FFFFFF; background-color: #808080; font-family: optima, verdana, futura, "lucida sans", arial, geneva, helvetica, sans; font-size: 12px; line-height: 18px; } a:link, a:active, a:visited { color: #FFFFFF; } a.dir-view:link, a.dir-view:active, a.dir-view:visited { color: #FFFFFF; text-decoration: underline; } .valid { color: #00FF00; } .invalid { color: #FF0000; } button, .commonlink, .smallbutton { font-weight: bold; font-size: 12px; text-decoration: none; color: #000000; border-color: #7F7F7F; border-style: solid; border-width: .125ex; background-color: #FFFFFF; padding: .5ex; } .smallbutton { width: 1em; } a.commonlink:link, a.commonlink:active, a.commonlink:visited, a.smalllink:link, a.smalllink:active, a.smalllink:visited { font-weight: bold; font-size: 12px; text-decoration: none; color: #000000; } h1, .title { font-style: normal; font-weight: normal; font-size: 18px; line-height: 18px; margin-bottom: 20px; } h2, .subtitle { font-style: normal; font-weight: normal; font-size: 12px; margin-top: 18px; margin-bottom: 18px; } table { line-height: 18px; font-size: 12px; margin: 0; } th { font-weight: bold; text-align: left; padding-bottom: 6px; } .tc { font-weight: bold; text-align: left; } p, li { max-width: 60em; } .empty-line { margin-top: 4px; } .more-room { margin-right: 1.5em; } .much-more-room { margin-right: 3em; } #main { position: absolute; left: 10%; top: 10%; right: 10%; bottom: 10%; z-index: 2; width: 80%; height: 80%; padding: 0%; margin: 0%; overflow: auto; border-style: none; border-width: 0; background-color: #3F3F3F; } #main-settings { margin: 12px; x_max-width: 60em; line-height: 18px; font-size: 12px; } #left { position: absolute; top : 10%; left: 0%; bottom: 0%; right: 90%; z-index: 1; width: 10%; height: 90%; padding: 0%; margin: 0%; font-size: 16px; border-style: none; border-width: 0; background-color: #4F6F6F; } #right { position: absolute; top : 0%; left: 90%; bottom: 10%; right: 0%; z-index: 1; width: 10%; height: 90%; padding: 0%; margin: 0%; font-size: 16px; border-style: none; border-width: 0; background-color: #4F6F6F; _margin-left: -15px; } #bottom { position: absolute; left: 10%; right: 0%; top: 90%; bottom: 0%; z-index: 1; width: 90%; height: 10%; padding: 0%; margin: 0%; font-size: 16px; border-style: none; border-width: 0; background-color: #6F6F8F; } #top { position: absolute; left: 0%; right: 10%; top: 0%; bottom: 90%; z-index: 1; width: 90%; height: 10%; padding: 0%; margin: 0%; font-size: 16px; border-style: none; border-width: 0; background-color: #6F6F8F; } #top-one { position: absolute; bottom: 50%; width: 100%; buggedheight: 100%; } #top-two { position: relative; margin-bottom: -9px; margin-left: 12px; margin-right: 12px; line-height: 18px; text-align: right; vertical-align: middle; } #bottom-one { position: absolute; bottom: 50%; width: 100%; buggedheight: 100%; } #bottom-two { position: relative; margin-bottom: -9px; margin-left: 12px; margin-right: 12px; line-height: 18px; text-align: left; vertical-align: middle; } #left-one { position: absolute; width: 100%; buggedheight: 100%; } #left-two { position: relative; margin-top: 12px; line-height: 18px; text-align: center; vertical-align: top; } #right-one { display: table; height: 100%; width: 100%; } #right-two { display: table-row; height: 100%; width: 100%; } #right-three { display: table-cell; width: 100%; vertical-align: bottom; _position: absolute; _top: 100%; } #right-four { text-align: center; margin-bottom: 2ex; _position: relative; _top: -100%; } #more-top { position: absolute; top: 0%; left: 90%; bottom: 90%; right: 0%; z-index: 3; width: 10%; height: 10%; padding: 0%; margin: 0%; border-style: none; border-width: 0; } #more-top-settings { text-align: center; } #more-right-settings { margin-right: 12px; margin-left: 12px; line-height: 18px; font-size: 10px; text-align: center; } #right-safari { _display: table; width: 100%; height: 100%; }
+ </style>
+ <style type="text/css">
+ </style>
+ </head>
+ <body>
+ <div id="top"> <div id="top-one">
+ <div id="top-two">ConTeXt TDS Runner Tool 1.31 </div>
+ </div>
+ </div>
+ <div id="bottom"> <div id="bottom-one">
+ <div id="bottom-two">wiki: http://contextgarden.net | mail: ntg-context@ntg.nl | website: http://www.pragma-ade.nl</div>
+ </div>
+ </div>
+ <div id="left"></div>
+ <div id="right"></div>
+ <div id="main">
+ <div id='main-settings'>
+ <h1>Command line options</h1>
+<table>
+ <tr><th style="width: 10em">flag</th><th style="width: 8em">value</th><th>description</th></tr>
+ <tr><th/><td/><td/></tr>
+ <tr><th>--script</th><td></td><td>run an mtx script (lua prefered method) (--noquotes), no script gives list</td></tr>
+ <tr><th>--execute</th><td></td><td>run a script or program (texmfstart method) (--noquotes)</td></tr>
+ <tr><th>--resolve</th><td></td><td>resolve prefixed arguments</td></tr>
+ <tr><th>--ctxlua</th><td></td><td>run internally (using preloaded libs)</td></tr>
+ <tr><th>--internal</th><td></td><td>run script using built in libraries (same as --ctxlua)</td></tr>
+ <tr><th>--locate</th><td></td><td>locate given filename in database (default) or system (--first --all --detail)</td></tr>
+ <tr><th/><td/><td/></tr>
+ <tr><th>--autotree</th><td></td><td>use texmf tree cf. env texmfstart_tree or texmfstarttree</td></tr>
+ <tr><th>--tree</th><td>pathtotree</td><td>use given texmf tree (default file: setuptex.tmf)</td></tr>
+ <tr><th>--environment</th><td>name</td><td>use given (tmf) environment file</td></tr>
+ <tr><th>--path</th><td>runpath</td><td>go to given path before execution</td></tr>
+ <tr><th>--ifchanged</th><td>filename</td><td>only execute when given file has changed (md checksum)</td></tr>
+ <tr><th>--iftouched</th><td>old,new</td><td>only execute when given file has changed (time stamp)</td></tr>
+ <tr><th/><td/><td/></tr>
+ <tr><th>--makestubs</th><td></td><td>create stubs for (context related) scripts</td></tr>
+ <tr><th>--removestubs</th><td></td><td>remove stubs (context related) scripts</td></tr>
+ <tr><th>--stubpath</th><td>binpath</td><td>paths where stubs wil be written</td></tr>
+ <tr><th>--windows</th><td></td><td>create windows (mswin) stubs</td></tr>
+ <tr><th>--unix</th><td></td><td>create unix (linux) stubs</td></tr>
+ <tr><th/><td/><td/></tr>
+ <tr><th>--verbose</th><td></td><td>give a bit more info</td></tr>
+ <tr><th>--trackers</th><td>list</td><td>enable given trackers</td></tr>
+ <tr><th>--progname</th><td>str</td><td>format or backend</td></tr>
+ <tr><th/><td/><td/></tr>
+ <tr><th>--edit</th><td></td><td>launch editor with found file</td></tr>
+ <tr><th>--launch</th><td></td><td>launch files like manuals, assumes os support (--all)</td></tr>
+ <tr><th/><td/><td/></tr>
+ <tr><th>--timedrun</th><td></td><td>run a script and time its run</td></tr>
+ <tr><th>--autogenerate</th><td></td><td>regenerate databases if needed (handy when used to run context in an editor)</td></tr>
+ <tr><th/><td/><td/></tr>
+ <tr><th>--usekpse</th><td></td><td>use kpse as fallback (when no mkiv and cache installed, often slower)</td></tr>
+ <tr><th>--forcekpse</th><td></td><td>force using kpse (handy when no mkiv and cache installed but less functionality)</td></tr>
+ <tr><th/><td/><td/></tr>
+ <tr><th>--prefixes</th><td></td><td>show supported prefixes</td></tr>
+ <tr><th/><td/><td/></tr>
+ <tr><th>--generate</th><td></td><td>generate file database</td></tr>
+ <tr><th/><td/><td/></tr>
+ <tr><th>--variables</th><td></td><td>show configuration variables</td></tr>
+ <tr><th>--configurations</th><td></td><td>show configuration order</td></tr>
+ <tr><th/><td/><td/></tr>
+ <tr><th>--directives</th><td></td><td>show (known) directives</td></tr>
+ <tr><th>--trackers</th><td></td><td>show (known) trackers</td></tr>
+ <tr><th>--experiments</th><td></td><td>show (known) experiments</td></tr>
+ <tr><th/><td/><td/></tr>
+ <tr><th>--expand-braces</th><td></td><td>expand complex variable</td></tr>
+ <tr><th>--expand-path</th><td></td><td>expand variable (resolve paths)</td></tr>
+ <tr><th>--expand-var</th><td></td><td>expand variable (resolve references)</td></tr>
+ <tr><th>--show-path</th><td></td><td>show path expansion of ...</td></tr>
+ <tr><th>--var-value</th><td></td><td>report value of variable</td></tr>
+ <tr><th>--find-file</th><td></td><td>report file location</td></tr>
+ <tr><th>--find-path</th><td></td><td>report path of file</td></tr>
+ <tr><th/><td/><td/></tr>
+ <tr><th>--pattern</th><td>string</td><td>filter variables</td></tr>
+ </table>
+<br/>
+ </div>
+ </div>
+ </body>
+ </html>
diff --git a/Master/texmf-dist/doc/context/scripts/mkiv/mtxrun.man b/Master/texmf-dist/doc/context/scripts/mkiv/mtxrun.man
new file mode 100644
index 00000000000..212097ff153
--- /dev/null
+++ b/Master/texmf-dist/doc/context/scripts/mkiv/mtxrun.man
@@ -0,0 +1,147 @@
+.TH "mtxrun" "1" "01-01-2013" "version 1.31" "ConTeXt TDS Runner Tool"
+.SH NAME
+.B mtxrun
+.SH SYNOPSIS
+.B mtxrun [
+.I OPTIONS ...
+.B ] [
+.I FILENAMES
+.B ]
+.SH DESCRIPTION
+.B ConTeXt TDS Runner Tool
+.SH OPTIONS
+.TP
+.B --script
+run an mtx script (lua prefered method) (--noquotes), no script gives list
+.TP
+.B --execute
+run a script or program (texmfstart method) (--noquotes)
+.TP
+.B --resolve
+resolve prefixed arguments
+.TP
+.B --ctxlua
+run internally (using preloaded libs)
+.TP
+.B --internal
+run script using built in libraries (same as --ctxlua)
+.TP
+.B --locate
+locate given filename in database (default) or system (--first --all --detail)
+.TP
+.B --autotree
+use texmf tree cf. env texmfstart_tree or texmfstarttree
+.TP
+.B --tree=pathtotree
+use given texmf tree (default file: setuptex.tmf)
+.TP
+.B --environment=name
+use given (tmf) environment file
+.TP
+.B --path=runpath
+go to given path before execution
+.TP
+.B --ifchanged=filename
+only execute when given file has changed (md checksum)
+.TP
+.B --iftouched=old,new
+only execute when given file has changed (time stamp)
+.TP
+.B --makestubs
+create stubs for (context related) scripts
+.TP
+.B --removestubs
+remove stubs (context related) scripts
+.TP
+.B --stubpath=binpath
+paths where stubs wil be written
+.TP
+.B --windows
+create windows (mswin) stubs
+.TP
+.B --unix
+create unix (linux) stubs
+.TP
+.B --verbose
+give a bit more info
+.TP
+.B --trackers=list
+enable given trackers
+.TP
+.B --progname=str
+format or backend
+.TP
+.B --edit
+launch editor with found file
+.TP
+.B --launch
+launch files like manuals, assumes os support (--all)
+.TP
+.B --timedrun
+run a script and time its run
+.TP
+.B --autogenerate
+regenerate databases if needed (handy when used to run context in an editor)
+.TP
+.B --usekpse
+use kpse as fallback (when no mkiv and cache installed, often slower)
+.TP
+.B --forcekpse
+force using kpse (handy when no mkiv and cache installed but less functionality)
+.TP
+.B --prefixes
+show supported prefixes
+.TP
+.B --generate
+generate file database
+.TP
+.B --variables
+show configuration variables
+.TP
+.B --configurations
+show configuration order
+.TP
+.B --directives
+show (known) directives
+.TP
+.B --trackers
+show (known) trackers
+.TP
+.B --experiments
+show (known) experiments
+.TP
+.B --expand-braces
+expand complex variable
+.TP
+.B --expand-path
+expand variable (resolve paths)
+.TP
+.B --expand-var
+expand variable (resolve references)
+.TP
+.B --show-path
+show path expansion of ...
+.TP
+.B --var-value
+report value of variable
+.TP
+.B --find-file
+report file location
+.TP
+.B --find-path
+report path of file
+.TP
+.B --pattern=string
+filter variables
+.SH AUTHOR
+More information about ConTeXt and the tools that come with it can be found at:
+
+
+.B "maillist:"
+ntg-context@ntg.nl / http://www.ntg.nl/mailman/listinfo/ntg-context
+
+.B "webpage:"
+http://www.pragma-ade.nl / http://tex.aanhet.net
+
+.B "wiki:"
+http://contextgarden.net
diff --git a/Master/texmf-dist/doc/context/scripts/mkiv/mtxrun.xml b/Master/texmf-dist/doc/context/scripts/mkiv/mtxrun.xml
new file mode 100644
index 00000000000..f1b1e01f4b6
--- /dev/null
+++ b/Master/texmf-dist/doc/context/scripts/mkiv/mtxrun.xml
@@ -0,0 +1,79 @@
+<?xml version="1.0" ?>
+<application>
+ <metadata>
+ <entry name="name">mtxrun</entry>
+ <entry name="detail">ConTeXt TDS Runner Tool</entry>
+ <entry name="version">1.31</entry>
+ </metadata>
+ <flags>
+ <category name="basic">
+ <subcategory>
+ <flag name="script"><short>run an mtx script (lua prefered method) (<ref name="noquotes"/>), no script gives list</short></flag>
+ <flag name="execute"><short>run a script or program (texmfstart method) (<ref name="noquotes"/>)</short></flag>
+ <flag name="resolve"><short>resolve prefixed arguments</short></flag>
+ <flag name="ctxlua"><short>run internally (using preloaded libs)</short></flag>
+ <flag name="internal"><short>run script using built in libraries (same as <ref name="ctxlua"/>)</short></flag>
+ <flag name="locate"><short>locate given filename in database (default) or system (<ref name="first"/> <ref name="all"/> <ref name="detail"/>)</short></flag>
+ </subcategory>
+ <subcategory>
+ <flag name="autotree"><short>use texmf tree cf. env texmfstart_tree or texmfstarttree</short></flag>
+ <flag name="tree" value="pathtotree"><short>use given texmf tree (default file: setuptex.tmf)</short></flag>
+ <flag name="environment" value="name"><short>use given (tmf) environment file</short></flag>
+ <flag name="path" value="runpath"><short>go to given path before execution</short></flag>
+ <flag name="ifchanged" value="filename"><short>only execute when given file has changed (md checksum)</short></flag>
+ <flag name="iftouched" value="old,new"><short>only execute when given file has changed (time stamp)</short></flag>
+ </subcategory>
+ <subcategory>
+ <flag name="makestubs"><short>create stubs for (context related) scripts</short></flag>
+ <flag name="removestubs"><short>remove stubs (context related) scripts</short></flag>
+ <flag name="stubpath" value="binpath"><short>paths where stubs wil be written</short></flag>
+ <flag name="windows"><short>create windows (mswin) stubs</short></flag>
+ <flag name="unix"><short>create unix (linux) stubs</short></flag>
+ </subcategory>
+ <subcategory>
+ <flag name="verbose"><short>give a bit more info</short></flag>
+ <flag name="trackers" value="list"><short>enable given trackers</short></flag>
+ <flag name="progname" value="str"><short>format or backend</short></flag>
+ </subcategory>
+ <subcategory>
+ <flag name="edit"><short>launch editor with found file</short></flag>
+ <flag name="launch"><short>launch files like manuals, assumes os support (<ref name="all"/>)</short></flag>
+ </subcategory>
+ <subcategory>
+ <flag name="timedrun"><short>run a script and time its run</short></flag>
+ <flag name="autogenerate"><short>regenerate databases if needed (handy when used to run context in an editor)</short></flag>
+ </subcategory>
+ <subcategory>
+ <flag name="usekpse"><short>use kpse as fallback (when no mkiv and cache installed, often slower)</short></flag>
+ <flag name="forcekpse"><short>force using kpse (handy when no mkiv and cache installed but less functionality)</short></flag>
+ </subcategory>
+ <subcategory>
+ <flag name="prefixes"><short>show supported prefixes</short></flag>
+ </subcategory>
+ <subcategory>
+ <flag name="generate"><short>generate file database</short></flag>
+ </subcategory>
+ <subcategory>
+ <flag name="variables"><short>show configuration variables</short></flag>
+ <flag name="configurations"><short>show configuration order</short></flag>
+ </subcategory>
+ <subcategory>
+ <flag name="directives"><short>show (known) directives</short></flag>
+ <flag name="trackers"><short>show (known) trackers</short></flag>
+ <flag name="experiments"><short>show (known) experiments</short></flag>
+ </subcategory>
+ <subcategory>
+ <flag name="expand-braces"><short>expand complex variable</short></flag>
+ <flag name="expand-path"><short>expand variable (resolve paths)</short></flag>
+ <flag name="expand-var"><short>expand variable (resolve references)</short></flag>
+ <flag name="show-path"><short>show path expansion of ...</short></flag>
+ <flag name="var-value"><short>report value of variable</short></flag>
+ <flag name="find-file"><short>report file location</short></flag>
+ <flag name="find-path"><short>report path of file</short></flag>
+ </subcategory>
+ <subcategory>
+ <flag name="pattern" value="string"><short>filter variables</short></flag>
+ </subcategory>
+ </category>
+ </flags>
+</application>
diff --git a/Master/texmf-dist/doc/context/scripts/perl/texshow.1 b/Master/texmf-dist/doc/context/scripts/perl/texshow.1
deleted file mode 100644
index 618928b8f4b..00000000000
--- a/Master/texmf-dist/doc/context/scripts/perl/texshow.1
+++ /dev/null
@@ -1,70 +0,0 @@
-.TH "texshow" "1" "Jul 2006" "ConTeXt" "CONTEXT"
-.PP
-.SH "NAME"
-texshow \- ConTeXt command and parameter reference
-.PP
-.SH "SYNOPSIS"
-\fBtexshow\fP [ \fIOPTION\fP ] [ \fICOMMAND\fP ]
-[ \fILANGUAGE\fP ]
-.PP
-.SH "DESCRIPTION"
-.PP
-\fBtexshow\fP uses \fBperl\fP(1) and PerlTk (see \fBTk\fP(3pm)) to
-show a overview of the commands and parameters of those\&. In the Tk
-window you can search for a command and by switching the interface,
-you can see the equivalent in another interface language\&.
-.PP
-\fICOMMAND\fP is a \fBConTeXt\fP command to search for in the
-database\&. Note that Plain TeX commands are not (yet) in this database
-although they can be used in \fBConTeXt\fP source files\&.
-.PP
-\fILANGUAGE\fP can be one of \fBcz\fP (Czech), \fBde\fP
-(German), \fBen\fP (US-English) or \fBnl\fP (Dutch)\&. The default
-language is English\&.
-.PP
-You can also set the interface language with the
-\fB--interface\fP switch\&.
-.PP
-.SH "BINDINGS"
-.PP
-Most keyboard events are bound to the search widget, with the
-exception of \fB<Page Up>\fP (aka \fB<Prior>\fP) and \fB<Page Down>\fP
-(aka \fB<Next>\fP). \fB<Control-q>\fP, \fB<Control-x>\fP, and
-\fB<Alt-F4>\fP quit the application\&.
-.PP
-.SH "OPTIONS"
-.PP
-.IP "\fB--help\fP"
-Print a brief syntax summary
-.IP "\fB--interface=\fP\fILANGUAGE\fP"
-Primary interface language\&.
-See DESCRIPTION for \fILANGUAGE\fP options\&.
-.PP
-.SH "FILES"
-.PP
-.IP "\fITEXMF/tex/context/interface/cont-\fIXX\fP\&.xml\fP"
-The database
-file\&. At this writing, \fIXX\fP can be one of \fBcz\fP (Czech),
-\fBde\fP (German), \fBen\fP (English), \fBfr\fP (French),
-\fBit\fP (Italian), \fBnl\fP (Dutch), or \fBro\fP (Romanian)\&.
-Which file is used is determined by the
-\fILANGUAGE\fP specified on the command line\&.
-.PP
-.SH "SEE ALSO"
-.PP
-\fBtexexec\fP(1), \fBpdfetex\fP(1), \fBpdftex\fP(1)\&.
-.PP
-Web page: <http://www\&.pragma-ade\&.com/>
-.PP
-.SH "AUTHOR"
-.PP
-This version of \fBtexshow\fP was written by Taco Hoekwater
-<taco@elvenkind\&.com>\&.
-.PP
-This manpage was written by Tobias Burnus
-<burnus@gmx\&.de> and C\&.M\&. Connelly
-<c@eskimo\&.com>\&.
-.PP
-\fBtexshow\fP is part of \fBConTeXt\fP and is available
-from <http://www\&.pragma-ade\&.com/pragma-ade/>\&.
-.PP
diff --git a/Master/texmf-dist/doc/context/scripts/perl/texshow.html b/Master/texmf-dist/doc/context/scripts/perl/texshow.html
deleted file mode 100644
index a9af24284c6..00000000000
--- a/Master/texmf-dist/doc/context/scripts/perl/texshow.html
+++ /dev/null
@@ -1,137 +0,0 @@
-<HTML><HEAD><TITLE>Manpage of texshow</TITLE>
-</HEAD><BODY>
-<H1>texshow</H1>
-Section: CONTEXT (1)<BR>Updated: Jul 2006<BR><A HREF="#index">Index</A>
-<A HREF="http://localhost/cgi-bin/man/man2html">Return to Main Contents</A><HR>
-
-<P>
-
-<A NAME="lbAB">&nbsp;</A>
-<H2>NAME </H2>
-
-texshow - ConTeXt command and parameter reference
-<P>
-
-<A NAME="lbAC">&nbsp;</A>
-<H2>SYNOPSIS </H2>
-
-<B>texshow</B> [ <I>OPTION</I> ] [ <I>COMMAND</I> ]
-[ <I>LANGUAGE</I> ]
-<P>
-
-<A NAME="lbAD">&nbsp;</A>
-<H2>DESCRIPTION </H2>
-
-<P>
-
-<B>texshow</B> uses <B><A HREF="http://localhost/cgi-bin/man/man2html?1+perl">perl</A></B>(1) and PerlTk (see <B>Tk</B>(3pm)) to
-show a overview of the commands and parameters of those. In the Tk
-window you can search for a command and by switching the interface,
-you can see the equivalent in another interface language.
-<P>
-
-<I>COMMAND</I> is a <B>ConTeXt</B> command to search for in the
-database. Note that Plain TeX commands are not (yet) in this database
-although they can be used in <B>ConTeXt</B> source files.
-<P>
-
-<I>LANGUAGE</I> can be one of <B>cz</B> (Czech), <B>de</B>
-(German), <B>en</B> (US-English) or <B>nl</B> (Dutch). The default
-language is English.
-<P>
-
-You can also set the interface language with the
-<B>--interface</B> switch.
-<P>
-
-<A NAME="lbAE">&nbsp;</A>
-<H2>BINDINGS </H2>
-
-<P>
-
-Most keyboard events are bound to the search widget, with the
-exception of <B>&lt;Page Up&gt;</B> (aka <B>&lt;Prior&gt;</B>) and <B>&lt;Page Down&gt;</B>
-(aka <B>&lt;Next&gt;</B>). <B>&lt;Control-q&gt;</B>, <B>&lt;Control-x&gt;</B>, and
-<B>&lt;Alt-F4&gt;</B> quit the application.
-<P>
-
-<A NAME="lbAF">&nbsp;</A>
-<H2>OPTIONS </H2>
-
-<P>
-
-<DL COMPACT>
-<DT><B>--help</B><DD>
-Print a brief syntax summary
-<DT><B>--interface=</B><I>LANGUAGE</I><DD>
-Primary interface language.
-See DESCRIPTION for <I>LANGUAGE</I> options.
-</DL>
-<P>
-
-<A NAME="lbAG">&nbsp;</A>
-<H2>FILES </H2>
-
-<P>
-
-<DL COMPACT>
-<DT><I>TEXMF/tex/context/interface/cont-XX</I>.xml<DD>
-The database
-file. At this writing, <I>XX</I> can be one of <B>cz</B> (Czech),
-<B>de</B> (German), <B>en</B> (English), <B>fr</B> (French),
-<B>it</B> (Italian), <B>nl</B> (Dutch), or <B>ro</B> (Romanian).
-Which file is used is determined by the
-<I>LANGUAGE</I> specified on the command line.
-</DL>
-<P>
-
-<A NAME="lbAH">&nbsp;</A>
-<H2>SEE ALSO </H2>
-
-<P>
-
-<B><A HREF="http://localhost/cgi-bin/man/man2html?1+texexec">texexec</A></B>(1), <B><A HREF="http://localhost/cgi-bin/man/man2html?1+pdfetex">pdfetex</A></B>(1), <B><A HREF="http://localhost/cgi-bin/man/man2html?1+pdftex">pdftex</A></B>(1).
-<P>
-
-Web page: &lt;<A HREF="http://www.pragma-ade.com/">http://www.pragma-ade.com/</A>&gt;
-<P>
-
-<A NAME="lbAI">&nbsp;</A>
-<H2>AUTHOR </H2>
-
-<P>
-
-This version of <B>texshow</B> was written by Taco Hoekwater
-&lt;<A HREF="mailto:taco@elvenkind.com">taco@elvenkind.com</A>&gt;.
-<P>
-
-This manpage was written by Tobias Burnus
-&lt;<A HREF="mailto:burnus@gmx.de">burnus@gmx.de</A>&gt; and C.M. Connelly
-&lt;c@eskimo.com&gt;.
-<P>
-
-<B>texshow</B> is part of <B>ConTeXt</B> and is available
-from &lt;<A HREF="http://www.pragma-ade.com/pragma-ade/">http://www.pragma-ade.com/pragma-ade/</A>&gt;.
-<P>
-
-<P>
-
-<HR>
-<A NAME="index">&nbsp;</A><H2>Index</H2>
-<DL>
-<DT><A HREF="#lbAB">NAME </A><DD>
-<DT><A HREF="#lbAC">SYNOPSIS </A><DD>
-<DT><A HREF="#lbAD">DESCRIPTION </A><DD>
-<DT><A HREF="#lbAE">BINDINGS </A><DD>
-<DT><A HREF="#lbAF">OPTIONS </A><DD>
-<DT><A HREF="#lbAG">FILES </A><DD>
-<DT><A HREF="#lbAH">SEE ALSO </A><DD>
-<DT><A HREF="#lbAI">AUTHOR </A><DD>
-</DL>
-<HR>
-This document was created by
-<A HREF="http://localhost/cgi-bin/man/man2html">man2html</A>,
-using the manual pages.<BR>
-Time: 09:31:15 GMT, July 19, 2006
-</BODY>
-</HTML>
diff --git a/Master/texmf-dist/doc/man/man1/context.1 b/Master/texmf-dist/doc/man/man1/context.1
index 254d9b87468..e1fb28a994e 100644
--- a/Master/texmf-dist/doc/man/man1/context.1
+++ b/Master/texmf-dist/doc/man/man1/context.1
@@ -1,140 +1,167 @@
-.TH "CONTEXT" "1" "June 2011" "context 0.52" "ConTeXt"
-.de URL
-\\$2 \(laURL: \\$1 \(ra\\$3
-..
-.if \n[.g] .mso www.tmac
-.de EX
-.in +3
-.nf
-.ft CW
-..
-.de EE
-.in -3
-.ft R
-.fi
-..
-
-.SH "NAME"
-context \- front end to the new ConTeXt typesetting system
-
-.SH "SYNOPSIS"
-\fBcontext\fP [ \fIOPTION\fP ... ] \fIFILE\fP [ ... ]
-
-.SH "DESCRIPTION"
-
-\fBcontext\fP, a \fBtexlua\fP(1) script,
-is the command-line front end to the new version of the
-ConTeXt typesetting system, an extensive macro package
-built on the \fBtex\fP(1) family of typesetting programs.
-\fBcontext\fP provides several facilties:
-.IP \(bu
-Process a ConTeXt source file,
-performing as many runs as
-necessary of \fBluatex\fP(1). Options control the output
-format, the pages to process, paper size, and so forth.
-.IP \(bu
-Create new ConTeXt formats, useful during installation.
-.IP \(bu
-Post-process existing PDF files, including merging multiple
-files, and extracting and rearranging pages within a file,
-similar to the facilities provided for PostScript files by
-\fBpsnup\fP(1) or for PDF files by \fBpdftk\fP(1).
-.IP
-.SH "OPTIONS"
-
-All switches should always be specified in full. With no options,
-\fBcontext\fP assumes that
-the file on the command line is a ConTeXt source file, i.e. a TeX file
-in the ConTeXt dialect, and produces a PDF file using the source file.
-
-.IP "\fB--run\fP"
+.TH "mtx-context" "1" "01-01-2013" "version 0.60" "ConTeXt Process Management"
+.SH NAME
+.B mtx-context
+.SH SYNOPSIS
+.B mtxrun --script context [
+.I OPTIONS ...
+.B ] [
+.I FILENAMES
+.B ]
+.SH DESCRIPTION
+.B ConTeXt Process Management
+.SH OPTIONS: BASIC
+.TP
+.B --run
process (one or more) files (default action)
-.IP "\fB--make\fP"
+.TP
+.B --make
create context formats
-.IP "\fB--ctx\fP\fI=\fP\fIname\fP"
+.TP
+.B --ctx=name
use ctx file (process management specification)
-.IP "\fB--interface\fP"
+.TP
+.B --interface
use specified user interface (default: en)
-.IP "\fB--autopdf\fP"
+.TP
+.B --autopdf
close pdf file in viewer and start pdf viewer afterwards
-.IP "\fB--purge(all)\fP"
-purge files either or not after a run (\fB--pattern\fP\fI=\fP\fI...\fP)
-.IP "\fB--usemodule\fP\fI=\fP\fIlist\fP"
-load the given module or style, normally part o fthe distribution
-.IP "\fB--environment\fP\fI=\fP\fIlist\fP"
+.TP
+.B --purge
+purge files either or not after a run (--pattern=...)
+.TP
+.B --purgeall
+purge all files either or not after a run (--pattern=...)
+.TP
+.B --usemodule=list
+load the given module or style, normally part of the distribution
+.TP
+.B --environment=list
load the given environment file first (document styles)
-.IP "\fB--mode\fP\fI=\fP\fIlist\fP"
+.TP
+.B --mode=list
enable given the modes (conditional processing in styles)
-.IP "\fB--path\fP\fI=\fP\fIlist\fP"
+.TP
+.B --path=list
also consult the given paths when files are looked for
-.IP "\fB--arguments\fP\fI=\fP\fIlist\fP"
+.TP
+.B --arguments=list
set variables that can be consulted during a run (key/value pairs)
-.IP "\fB--randomseed\fP\fI=\fP\fInumber\fP"
+.TP
+.B --randomseed=number
set the randomseed
-.IP "\fB--result\fP\fI=\fP\fIname\fP"
+.TP
+.B --result=name
rename the resulting output to the given name
-.IP "\fB--trackers\fP\fI=\fP\fIlist\fP"
-set tracker variables (show list with \fB--showtrackers\fP)
-.IP "\fB--directives\fP\fI=\fP\fIlist\fP"
-set directive variables (show list with \fB--showdirectives\fP)
-.IP "\fB--silent\fP\fI=\fP\fIlist\fP"
-disable logcatgories (show list with \fB--showlogcategories\fP)
-.IP "\fB--noconsole\fP"
+.TP
+.B --trackers=list
+set tracker variables (show list with --showtrackers)
+.TP
+.B --directives=list
+set directive variables (show list with --showdirectives)
+.TP
+.B --silent=list
+disable logcatgories (show list with --showlogcategories)
+.TP
+.B --noconsole
disable logging to the console (logfile only)
-.IP "\fB--purgeresult\fP"
+.TP
+.B --purgeresult
purge result file before run
-.IP "\fB--forcexml\fP"
-force xml stub (optional flag: \fB--mkii\fP)
-.IP "\fB--forcecld\fP"
+.TP
+.B --forcexml
+force xml stub
+.TP
+.B --forcecld
force cld (context lua document) stub
-.IP "\fB--arrange\fP"
+.TP
+.B --forcelua
+force lua stub (like texlua)
+.TP
+.B --forcemp
+force mp stub
+.TP
+.B --arrange
run extra imposition pass, given that the style sets up imposition
-.IP "\fB--noarrange\fP"
+.TP
+.B --noarrange
ignore imposition specifications in the style
-.IP "\fB--once\fP"
+.TP
+.B --jit
+use luajittex with jit turned off (only use the faster virtual machine)
+.TP
+.B --jiton
+use luajittex with jit turned on (in most cases not faster, even slower)
+.TP
+.B --once
only run once (no multipass data file is produced)
-.IP "\fB--batchmode\fP"
-run without stopping and don't show messages on the console
-.IP "\fB--nonstopmode\fP"
+.TP
+.B --batchmode
+run without stopping and do not show messages on the console
+.TP
+.B --nonstopmode
run without stopping
-.IP "\fB--generate\fP"
-generate file database etc. (as \fBmtxrun\fP does)
-.IP "\fB--paranoid\fP"
-don't descend to .. and ../..
-.IP "\fB--version\fP"
+.TP
+.B --synctex
+run with synctex enabled (optional value: zipped, unzipped, 1, -1)
+.TP
+.B --generate
+generate file database etc. (as luatools does)
+.TP
+.B --paranoid
+do not descend to .. and ../..
+.TP
+.B --version
report installed context version
-.IP "\fB--expert\fP"
-show expert options
-
-
-.SH "INITIALIZATION"
-
-.PP
-The \fBcontext\fP command is just a stub that runs the larger \fBmtxrun\fP
-script, which is a scripting framework written for \fBtexlua\fP(1) that
-supports loading of extra modules, of which \fBcontext\fP is one. The
-implementation actually resides in a file named \fBmtx-context.lua\fP
-deep within the TDS structure.
-
-If \fBcontext\fP complains about \fBUnknown script 'context'\fP, this
-can usually be fixed by executing \fBmtxrun --generate\fP on the
-command line: this command regenerates the database that \fBmtxrun\fP
-uses to find its extra modules.
+.TP
+.B --global
+assume given file present elsewhere
+.TP
+.B --nofile
+use dummy file as jobname
+.SH OPTIONS: EXPERT
+.TP
+.B --touch
+update context version number (remake needed afterwards, also provide --expert)
+.TP
+.B --nostatistics
+omit runtime statistics at the end of the run
+.TP
+.B --update
+update context from website (not to be confused with contextgarden)
+.TP
+.B --profile
+profile job (use: mtxrun --script profile --analyze)
+.TP
+.B --timing
+generate timing and statistics overview
+.TP
+.B --extra=name
+process extra (mtx-context-... in distribution)
+.TP
+.B --extras
+show extras
+.SH OPTIONS: SPECIAL
+.TP
+.B --pdftex
+process file with texexec using pdftex
+.TP
+.B --xetex
+process file with texexec using xetex
+.TP
+.B --mkii
+process file with texexec
+.TP
+.B --pipe
+do not check for file and enter scroll mode (--dummyfile=whatever.tmp)
+.SH AUTHOR
+More information about ConTeXt and the tools that come with it can be found at:
-.SH "SEE ALSO"
-.PP
-\fBtex\fP(1), \fBluatex\fP(1), \fBtexlua\fP(1), \fBpsnup\fP(1), \fBpdftk\fP(1).
-.PP
-.URL "http://www.contextgarden.net" "ConTeXt wiki" .
-.PP
-.URL "http://www.ntg.nl/mailman/listinfo/ntg-context" "ConTeXt mailing list home" .
-.SH "BUGS"
+.B "maillist:"
+ntg-context@ntg.nl / http://www.ntg.nl/mailman/listinfo/ntg-context
-.PP
-This manpage is horribly incomplete.
+.B "webpage:"
+http://www.pragma-ade.nl / http://tex.aanhet.net
-.SH "AUTHOR"
-This partial manpage was written by Taco Hoekwater
-<taco@metatex.org>.
-It is based on the \fB--help\fP output of \fBcontext\fP.
+.B "wiki:"
+http://contextgarden.net
diff --git a/Master/texmf-dist/doc/man/man1/ctxtools.1 b/Master/texmf-dist/doc/man/man1/ctxtools.1
index c22263d4267..93218f2295f 100644
--- a/Master/texmf-dist/doc/man/man1/ctxtools.1
+++ b/Master/texmf-dist/doc/man/man1/ctxtools.1
@@ -1,89 +1,75 @@
-.de URL
-\\$2 \(laURL: \\$1 \(ra\\$3
-..
-.if \n[.g] .mso www.tmac
-.de EX
-.in +3
-.nf
-.ft CW
-..
-.de EE
-.in -3
-.ft R
-.fi
-..
-.TH CTXTOOLS "1" "January 2007" "ctxtools 1.3.3" "ConTeXt"
-
+.TH "ctxtools" "1" "01-01-2013" "version 1.3.5" "CtxTools"
.SH NAME
-ctxtools \- clean up temporary ConTeXt files, install latest ConTeXt
-
-.SH "SYNOPSIS"
-\fBctxtools\fP [ \fIOPTION\fP ... ]
-
+.B ctxtools
+.SH SYNOPSIS
+.B ctxtools [
+.I OPTIONS ...
+.B ] [
+.I FILENAMES
+.B ]
.SH DESCRIPTION
-\fBctxtools\fP is useful for dealing with ConTeXt source and generated
-files and for installing new versions of ConTeXt.
-
-.SH "OPTIONS"
-.TP
-.B --help
-Print the available options.
-.TP
-.B --purgefiles
-Get rid of many temporary files generated by ConTeXt. You can give the
-.B --all
-option to remove a larger fraction of those files,
-and the
-.B --recurse
-option to remove these files also in subdirectories.
-.TP
+.B CtxTools
+.SH OPTIONS
+.TP
+.B --touchcontextfile
+update context version
+.TP
.B --contextversion
-Show the date of the ConTeXt macros and the format file. Useful after
-upgrading ConTeXt (perhaps using \fB--updatecontext\fP) to check
-whether the new formats and macros are visible to the TeX path
-searches (see
-.BR kpsewhich (1).
-.TP
+report context version
+.TP
+.B --jeditinterface
+generate jedit syntax files [--pipe]
+.TP
+.B --bbeditinterface
+generate bbedit syntax files [--pipe]
+.TP
+.B --sciteinterface
+generate scite syntax files [--pipe]
+.TP
+.B --rawinterface
+generate raw syntax files [--pipe]
+.TP
+.B --translateinterface
+generate interface files (xml) [nl de ..]
+.TP
+.B --purgefiles
+remove temporary files [--all --recurse] [basename]
+.TP
+.B --documentation generate documentation [--type=]
+[filename]
+.TP
+.B --filterpages'
+) # no help, hidden temporary feature
+.TP
+.B --dpxmapfiles
+convert pdftex mapfiles to dvipdfmx [--force] [texmfroot]
+.TP
+.B --listentities
+create doctype entity definition from enco-uc.tex
+.TP
+.B --brandfiles
+add context copyright notice [--force]
+.TP
+.B --platformize
+replace line-endings [--recurse --force] [pattern]
+.TP
+.B --dependencies analyze depedencies within context [--save --compact --filter=[macros|filenames] ]
+[filename]
+.TP
.B --updatecontext
-Download the latest ConTeXt distribution and install it in the
-TEXMFLOCAL tree (see
-.BR kpsewhich (1)
-for information on
-TeX path searching). TEXMFLOCAL usually points to a system-wide
-location. If you want to install the new ConTeXt in your home
-directory (i.e. an install for one user), then set the TEXMFLOCAL
-environment variable while running
-.BR ctxtools .
-For example, using the syntax of the Bourne shell
-.BR sh (1):
-.EX
-TEXMFLOCAL=$HOME/texmf ctxtools --updatecontext
-.EE
-.TP
-.B --documentation
-Generate a \fI.ted\fP file from the source file. See also the
-\fB--module\fP option to
-.BR texexec (1).
-.TP
-\fB--patternfiles\fP [languagecode]
-Generate pattern files. You can also give the
-\fB--all\fP, \fB--xml\fP, and \fB--utf8\fP options.
-
-.SH "ENVIRONMENT"
+download latest version and remake formats [--proxy]
.TP
-TEXMFLOCAL
-Used by \f(CW--updatecontext\fP to decide where to install the distribution.
+.B --disarmutfbom
+remove utf bom [--force]
+.SH AUTHOR
+More information about ConTeXt and the tools that come with it can be found at:
+
-.SH "SEE ALSO"
-.PP
-.BR texexec (1),
-.BR texmfstart (1).
-.PP
-.URL "http://www.contextgarden.net" "ConTeXt wiki" .
+.B "maillist:"
+ntg-context@ntg.nl / http://www.ntg.nl/mailman/listinfo/ntg-context
-.SH BUGS
-This manual page is not complete.
+.B "webpage:"
+http://www.pragma-ade.nl / http://tex.aanhet.net
-.SH "AUTHOR"
-This manual page was written by Sanjoy Mahajan <sanjoy@mit.edu>. It is in
-the public domain.
+.B "wiki:"
+http://contextgarden.net
diff --git a/Master/texmf-dist/doc/man/man1/luatools.1 b/Master/texmf-dist/doc/man/man1/luatools.1
new file mode 100644
index 00000000000..145e9f6083b
--- /dev/null
+++ b/Master/texmf-dist/doc/man/man1/luatools.1
@@ -0,0 +1,78 @@
+.TH "luatools" "1" "01-01-2013" "version 1.35" "ConTeXt TDS Management Tool (aka luatools)"
+.SH NAME
+.B luatools
+.SH SYNOPSIS
+.B luatools [
+.I OPTIONS ...
+.B ] [
+.I FILENAMES
+.B ]
+.SH DESCRIPTION
+.B ConTeXt TDS Management Tool (aka luatools)
+.SH OPTIONS
+.TP
+.B --generate
+generate file database
+.TP
+.B --variables
+show configuration variables
+.TP
+.B --configurations
+show configuration order
+.TP
+.B --expand-braces
+expand complex variable
+.TP
+.B --expand-path
+expand variable (resolve paths)
+.TP
+.B --expand-var
+expand variable (resolve references)
+.TP
+.B --show-path
+show path expansion of ...
+.TP
+.B --var-value
+report value of variable
+.TP
+.B --find-file
+report file location
+.TP
+.B --find-path
+report path of file
+.TP
+.B --make
+[or --ini] make luatex format
+.TP
+.B --run
+[or --fmt] run luatex format
+.TP
+.B --compile
+assemble and compile lua inifile
+.TP
+.B --verbose
+give a bit more info
+.TP
+.B --all
+show all found files
+.TP
+.B --format=str
+filter cf format specification (default 'tex', use 'any' for any match)
+.TP
+.B --pattern=str
+filter variables
+.TP
+.B --trackers=list
+enable given trackers
+.SH AUTHOR
+More information about ConTeXt and the tools that come with it can be found at:
+
+
+.B "maillist:"
+ntg-context@ntg.nl / http://www.ntg.nl/mailman/listinfo/ntg-context
+
+.B "webpage:"
+http://www.pragma-ade.nl / http://tex.aanhet.net
+
+.B "wiki:"
+http://contextgarden.net
diff --git a/Master/texmf-dist/doc/man/man1/luatools.man1.pdf b/Master/texmf-dist/doc/man/man1/luatools.man1.pdf
new file mode 100644
index 00000000000..52b46b6bd83
--- /dev/null
+++ b/Master/texmf-dist/doc/man/man1/luatools.man1.pdf
Binary files differ
diff --git a/Master/texmf-dist/doc/man/man1/mtx-babel.1 b/Master/texmf-dist/doc/man/man1/mtx-babel.1
new file mode 100644
index 00000000000..cd0b007fb3d
--- /dev/null
+++ b/Master/texmf-dist/doc/man/man1/mtx-babel.1
@@ -0,0 +1,33 @@
+.TH "mtx-babel" "1" "01-01-2013" "version 1.20" "Babel Input To UTF Conversion"
+.SH NAME
+.B mtx-babel
+.SH SYNOPSIS
+.B mtxrun --script babel [
+.I OPTIONS ...
+.B ] [
+.I FILENAMES
+.B ]
+.SH DESCRIPTION
+.B Babel Input To UTF Conversion
+.SH OPTIONS
+.TP
+.B --language=string
+conversion language (e.g. greek)
+.TP
+.B --structure=string
+obey given structure (e.g. 'document', default: 'context')
+.TP
+.B --convert
+convert babel codes into utf
+.SH AUTHOR
+More information about ConTeXt and the tools that come with it can be found at:
+
+
+.B "maillist:"
+ntg-context@ntg.nl / http://www.ntg.nl/mailman/listinfo/ntg-context
+
+.B "webpage:"
+http://www.pragma-ade.nl / http://tex.aanhet.net
+
+.B "wiki:"
+http://contextgarden.net
diff --git a/Master/texmf-dist/doc/man/man1/mtx-babel.man1.pdf b/Master/texmf-dist/doc/man/man1/mtx-babel.man1.pdf
new file mode 100644
index 00000000000..aa026870301
--- /dev/null
+++ b/Master/texmf-dist/doc/man/man1/mtx-babel.man1.pdf
Binary files differ
diff --git a/Master/texmf-dist/doc/man/man1/mtx-base.1 b/Master/texmf-dist/doc/man/man1/mtx-base.1
new file mode 100644
index 00000000000..6c72dcb07f1
--- /dev/null
+++ b/Master/texmf-dist/doc/man/man1/mtx-base.1
@@ -0,0 +1,78 @@
+.TH "mtx-base" "1" "01-01-2013" "version 1.35" "ConTeXt TDS Management Tool (aka luatools)"
+.SH NAME
+.B mtx-base
+.SH SYNOPSIS
+.B mtxrun --script base [
+.I OPTIONS ...
+.B ] [
+.I FILENAMES
+.B ]
+.SH DESCRIPTION
+.B ConTeXt TDS Management Tool (aka luatools)
+.SH OPTIONS
+.TP
+.B --generate
+generate file database
+.TP
+.B --variables
+show configuration variables
+.TP
+.B --configurations
+show configuration order
+.TP
+.B --expand-braces
+expand complex variable
+.TP
+.B --expand-path
+expand variable (resolve paths)
+.TP
+.B --expand-var
+expand variable (resolve references)
+.TP
+.B --show-path
+show path expansion of ...
+.TP
+.B --var-value
+report value of variable
+.TP
+.B --find-file
+report file location
+.TP
+.B --find-path
+report path of file
+.TP
+.B --make
+[or --ini] make luatex format
+.TP
+.B --run
+[or --fmt] run luatex format
+.TP
+.B --compile
+assemble and compile lua inifile
+.TP
+.B --verbose
+give a bit more info
+.TP
+.B --all
+show all found files
+.TP
+.B --format=str
+filter cf format specification (default 'tex', use 'any' for any match)
+.TP
+.B --pattern=str
+filter variables
+.TP
+.B --trackers=list
+enable given trackers
+.SH AUTHOR
+More information about ConTeXt and the tools that come with it can be found at:
+
+
+.B "maillist:"
+ntg-context@ntg.nl / http://www.ntg.nl/mailman/listinfo/ntg-context
+
+.B "webpage:"
+http://www.pragma-ade.nl / http://tex.aanhet.net
+
+.B "wiki:"
+http://contextgarden.net
diff --git a/Master/texmf-dist/doc/man/man1/mtx-base.man1.pdf b/Master/texmf-dist/doc/man/man1/mtx-base.man1.pdf
new file mode 100644
index 00000000000..447b9341262
--- /dev/null
+++ b/Master/texmf-dist/doc/man/man1/mtx-base.man1.pdf
Binary files differ
diff --git a/Master/texmf-dist/doc/man/man1/mtx-cache.1 b/Master/texmf-dist/doc/man/man1/mtx-cache.1
new file mode 100644
index 00000000000..26f3793fc2b
--- /dev/null
+++ b/Master/texmf-dist/doc/man/man1/mtx-cache.1
@@ -0,0 +1,36 @@
+.TH "mtx-cache" "1" "01-01-2013" "version 0.10" "ConTeXt &error; MetaTeX Cache Management"
+.SH NAME
+.B mtx-cache
+.SH SYNOPSIS
+.B mtxrun --script cache [
+.I OPTIONS ...
+.B ] [
+.I FILENAMES
+.B ]
+.SH DESCRIPTION
+.B ConTeXt &error; MetaTeX Cache Management
+.SH OPTIONS
+.TP
+.B --purge
+remove not used files
+.TP
+.B --erase
+completely remove cache
+.TP
+.B --list
+show cache
+.TP
+.B --all
+all (not yet implemented)
+.SH AUTHOR
+More information about ConTeXt and the tools that come with it can be found at:
+
+
+.B "maillist:"
+ntg-context@ntg.nl / http://www.ntg.nl/mailman/listinfo/ntg-context
+
+.B "webpage:"
+http://www.pragma-ade.nl / http://tex.aanhet.net
+
+.B "wiki:"
+http://contextgarden.net
diff --git a/Master/texmf-dist/doc/man/man1/mtx-cache.man1.pdf b/Master/texmf-dist/doc/man/man1/mtx-cache.man1.pdf
new file mode 100644
index 00000000000..24ae98e4a16
--- /dev/null
+++ b/Master/texmf-dist/doc/man/man1/mtx-cache.man1.pdf
Binary files differ
diff --git a/Master/texmf-dist/doc/man/man1/mtx-chars.1 b/Master/texmf-dist/doc/man/man1/mtx-chars.1
new file mode 100644
index 00000000000..5d3df23e6a4
--- /dev/null
+++ b/Master/texmf-dist/doc/man/man1/mtx-chars.1
@@ -0,0 +1,33 @@
+.TH "mtx-chars" "1" "01-01-2013" "version 0.10" "MkII Character Table Generators"
+.SH NAME
+.B mtx-chars
+.SH SYNOPSIS
+.B mtxrun --script chars [
+.I OPTIONS ...
+.B ] [
+.I FILENAMES
+.B ]
+.SH DESCRIPTION
+.B MkII Character Table Generators
+.SH OPTIONS
+.TP
+.B --xtx
+generate xetx-*.tex (used by xetex)
+.TP
+.B --pdf
+generate pdfr-def.tex (used by pdftex)
+.TP
+.B --entities
+generate entities table
+.SH AUTHOR
+More information about ConTeXt and the tools that come with it can be found at:
+
+
+.B "maillist:"
+ntg-context@ntg.nl / http://www.ntg.nl/mailman/listinfo/ntg-context
+
+.B "webpage:"
+http://www.pragma-ade.nl / http://tex.aanhet.net
+
+.B "wiki:"
+http://contextgarden.net
diff --git a/Master/texmf-dist/doc/man/man1/mtx-chars.man1.pdf b/Master/texmf-dist/doc/man/man1/mtx-chars.man1.pdf
new file mode 100644
index 00000000000..ff2eca72000
--- /dev/null
+++ b/Master/texmf-dist/doc/man/man1/mtx-chars.man1.pdf
Binary files differ
diff --git a/Master/texmf-dist/doc/man/man1/mtx-check.1 b/Master/texmf-dist/doc/man/man1/mtx-check.1
new file mode 100644
index 00000000000..72e33b088eb
--- /dev/null
+++ b/Master/texmf-dist/doc/man/man1/mtx-check.1
@@ -0,0 +1,27 @@
+.TH "mtx-check" "1" "01-01-2013" "version 0.10" "Basic ConTeXt Syntax Checking"
+.SH NAME
+.B mtx-check
+.SH SYNOPSIS
+.B mtxrun --script check [
+.I OPTIONS ...
+.B ] [
+.I FILENAMES
+.B ]
+.SH DESCRIPTION
+.B Basic ConTeXt Syntax Checking
+.SH OPTIONS
+.TP
+.B --convert
+check tex file for errors
+.SH AUTHOR
+More information about ConTeXt and the tools that come with it can be found at:
+
+
+.B "maillist:"
+ntg-context@ntg.nl / http://www.ntg.nl/mailman/listinfo/ntg-context
+
+.B "webpage:"
+http://www.pragma-ade.nl / http://tex.aanhet.net
+
+.B "wiki:"
+http://contextgarden.net
diff --git a/Master/texmf-dist/doc/man/man1/mtx-check.man1.pdf b/Master/texmf-dist/doc/man/man1/mtx-check.man1.pdf
new file mode 100644
index 00000000000..f8266ee15e3
--- /dev/null
+++ b/Master/texmf-dist/doc/man/man1/mtx-check.man1.pdf
Binary files differ
diff --git a/Master/texmf-dist/doc/man/man1/mtx-colors.1 b/Master/texmf-dist/doc/man/man1/mtx-colors.1
new file mode 100644
index 00000000000..d466b5ea30c
--- /dev/null
+++ b/Master/texmf-dist/doc/man/man1/mtx-colors.1
@@ -0,0 +1,27 @@
+.TH "mtx-colors" "1" "01-01-2013" "version 0.10" "ConTeXt Color Management"
+.SH NAME
+.B mtx-colors
+.SH SYNOPSIS
+.B mtxrun --script colors [
+.I OPTIONS ...
+.B ] [
+.I FILENAMES
+.B ]
+.SH DESCRIPTION
+.B ConTeXt Color Management
+.SH OPTIONS
+.TP
+.B --table
+show icc table
+.SH AUTHOR
+More information about ConTeXt and the tools that come with it can be found at:
+
+
+.B "maillist:"
+ntg-context@ntg.nl / http://www.ntg.nl/mailman/listinfo/ntg-context
+
+.B "webpage:"
+http://www.pragma-ade.nl / http://tex.aanhet.net
+
+.B "wiki:"
+http://contextgarden.net
diff --git a/Master/texmf-dist/doc/man/man1/mtx-colors.man1.pdf b/Master/texmf-dist/doc/man/man1/mtx-colors.man1.pdf
new file mode 100644
index 00000000000..1c96774eee8
--- /dev/null
+++ b/Master/texmf-dist/doc/man/man1/mtx-colors.man1.pdf
Binary files differ
diff --git a/Master/texmf-dist/doc/man/man1/mtx-context.1 b/Master/texmf-dist/doc/man/man1/mtx-context.1
new file mode 100644
index 00000000000..e1fb28a994e
--- /dev/null
+++ b/Master/texmf-dist/doc/man/man1/mtx-context.1
@@ -0,0 +1,167 @@
+.TH "mtx-context" "1" "01-01-2013" "version 0.60" "ConTeXt Process Management"
+.SH NAME
+.B mtx-context
+.SH SYNOPSIS
+.B mtxrun --script context [
+.I OPTIONS ...
+.B ] [
+.I FILENAMES
+.B ]
+.SH DESCRIPTION
+.B ConTeXt Process Management
+.SH OPTIONS: BASIC
+.TP
+.B --run
+process (one or more) files (default action)
+.TP
+.B --make
+create context formats
+.TP
+.B --ctx=name
+use ctx file (process management specification)
+.TP
+.B --interface
+use specified user interface (default: en)
+.TP
+.B --autopdf
+close pdf file in viewer and start pdf viewer afterwards
+.TP
+.B --purge
+purge files either or not after a run (--pattern=...)
+.TP
+.B --purgeall
+purge all files either or not after a run (--pattern=...)
+.TP
+.B --usemodule=list
+load the given module or style, normally part of the distribution
+.TP
+.B --environment=list
+load the given environment file first (document styles)
+.TP
+.B --mode=list
+enable given the modes (conditional processing in styles)
+.TP
+.B --path=list
+also consult the given paths when files are looked for
+.TP
+.B --arguments=list
+set variables that can be consulted during a run (key/value pairs)
+.TP
+.B --randomseed=number
+set the randomseed
+.TP
+.B --result=name
+rename the resulting output to the given name
+.TP
+.B --trackers=list
+set tracker variables (show list with --showtrackers)
+.TP
+.B --directives=list
+set directive variables (show list with --showdirectives)
+.TP
+.B --silent=list
+disable logcatgories (show list with --showlogcategories)
+.TP
+.B --noconsole
+disable logging to the console (logfile only)
+.TP
+.B --purgeresult
+purge result file before run
+.TP
+.B --forcexml
+force xml stub
+.TP
+.B --forcecld
+force cld (context lua document) stub
+.TP
+.B --forcelua
+force lua stub (like texlua)
+.TP
+.B --forcemp
+force mp stub
+.TP
+.B --arrange
+run extra imposition pass, given that the style sets up imposition
+.TP
+.B --noarrange
+ignore imposition specifications in the style
+.TP
+.B --jit
+use luajittex with jit turned off (only use the faster virtual machine)
+.TP
+.B --jiton
+use luajittex with jit turned on (in most cases not faster, even slower)
+.TP
+.B --once
+only run once (no multipass data file is produced)
+.TP
+.B --batchmode
+run without stopping and do not show messages on the console
+.TP
+.B --nonstopmode
+run without stopping
+.TP
+.B --synctex
+run with synctex enabled (optional value: zipped, unzipped, 1, -1)
+.TP
+.B --generate
+generate file database etc. (as luatools does)
+.TP
+.B --paranoid
+do not descend to .. and ../..
+.TP
+.B --version
+report installed context version
+.TP
+.B --global
+assume given file present elsewhere
+.TP
+.B --nofile
+use dummy file as jobname
+.SH OPTIONS: EXPERT
+.TP
+.B --touch
+update context version number (remake needed afterwards, also provide --expert)
+.TP
+.B --nostatistics
+omit runtime statistics at the end of the run
+.TP
+.B --update
+update context from website (not to be confused with contextgarden)
+.TP
+.B --profile
+profile job (use: mtxrun --script profile --analyze)
+.TP
+.B --timing
+generate timing and statistics overview
+.TP
+.B --extra=name
+process extra (mtx-context-... in distribution)
+.TP
+.B --extras
+show extras
+.SH OPTIONS: SPECIAL
+.TP
+.B --pdftex
+process file with texexec using pdftex
+.TP
+.B --xetex
+process file with texexec using xetex
+.TP
+.B --mkii
+process file with texexec
+.TP
+.B --pipe
+do not check for file and enter scroll mode (--dummyfile=whatever.tmp)
+.SH AUTHOR
+More information about ConTeXt and the tools that come with it can be found at:
+
+
+.B "maillist:"
+ntg-context@ntg.nl / http://www.ntg.nl/mailman/listinfo/ntg-context
+
+.B "webpage:"
+http://www.pragma-ade.nl / http://tex.aanhet.net
+
+.B "wiki:"
+http://contextgarden.net
diff --git a/Master/texmf-dist/doc/man/man1/mtx-context.man1.pdf b/Master/texmf-dist/doc/man/man1/mtx-context.man1.pdf
new file mode 100644
index 00000000000..1fe6753ac13
--- /dev/null
+++ b/Master/texmf-dist/doc/man/man1/mtx-context.man1.pdf
Binary files differ
diff --git a/Master/texmf-dist/doc/man/man1/mtx-epub.1 b/Master/texmf-dist/doc/man/man1/mtx-epub.1
new file mode 100644
index 00000000000..518435d1e2b
--- /dev/null
+++ b/Master/texmf-dist/doc/man/man1/mtx-epub.1
@@ -0,0 +1,27 @@
+.TH "mtx-epub" "1" "01-01-2013" "version 0.12" "ConTeXt EPUB Helpers"
+.SH NAME
+.B mtx-epub
+.SH SYNOPSIS
+.B mtxrun --script epub [
+.I OPTIONS ...
+.B ] [
+.I FILENAMES
+.B ]
+.SH DESCRIPTION
+.B ConTeXt EPUB Helpers
+.SH OPTIONS
+.TP
+.B --make
+create epub zip file
+.SH AUTHOR
+More information about ConTeXt and the tools that come with it can be found at:
+
+
+.B "maillist:"
+ntg-context@ntg.nl / http://www.ntg.nl/mailman/listinfo/ntg-context
+
+.B "webpage:"
+http://www.pragma-ade.nl / http://tex.aanhet.net
+
+.B "wiki:"
+http://contextgarden.net
diff --git a/Master/texmf-dist/doc/man/man1/mtx-epub.man1.pdf b/Master/texmf-dist/doc/man/man1/mtx-epub.man1.pdf
new file mode 100644
index 00000000000..f444aadf814
--- /dev/null
+++ b/Master/texmf-dist/doc/man/man1/mtx-epub.man1.pdf
Binary files differ
diff --git a/Master/texmf-dist/doc/man/man1/mtx-fcd.1 b/Master/texmf-dist/doc/man/man1/mtx-fcd.1
new file mode 100644
index 00000000000..43de2a3a4de
--- /dev/null
+++ b/Master/texmf-dist/doc/man/man1/mtx-fcd.1
@@ -0,0 +1,54 @@
+.TH "mtx-fcd" "1" "01-01-2013" "version 1.00" "Fast Directory Change"
+.SH NAME
+.B mtx-fcd
+.SH SYNOPSIS
+.B mtxrun --script fcd [
+.I OPTIONS ...
+.B ] [
+.I FILENAMES
+.B ]
+.SH DESCRIPTION
+.B Fast Directory Change
+.SH OPTIONS
+.TP
+.B --clear
+clear the cache
+.TP
+.B --clear
+--history [entry] clear the history
+.TP
+.B --scan
+clear the cache and add given path(s)
+.TP
+.B --add
+add given path(s)
+.TP
+.B --find
+find given path (can be substring)
+.TP
+.B --find
+--nohistory find given path (can be substring) but don&apos;t use history
+.TP
+.B --stub
+print platform stub file
+.TP
+.B --list
+show roots of cached dirs
+.TP
+.B --list
+--history show history of chosen dirs
+.TP
+.B --help
+show this help
+.SH AUTHOR
+More information about ConTeXt and the tools that come with it can be found at:
+
+
+.B "maillist:"
+ntg-context@ntg.nl / http://www.ntg.nl/mailman/listinfo/ntg-context
+
+.B "webpage:"
+http://www.pragma-ade.nl / http://tex.aanhet.net
+
+.B "wiki:"
+http://contextgarden.net
diff --git a/Master/texmf-dist/doc/man/man1/mtx-fcd.man1.pdf b/Master/texmf-dist/doc/man/man1/mtx-fcd.man1.pdf
new file mode 100644
index 00000000000..2809a922631
--- /dev/null
+++ b/Master/texmf-dist/doc/man/man1/mtx-fcd.man1.pdf
Binary files differ
diff --git a/Master/texmf-dist/doc/man/man1/mtx-flac.1 b/Master/texmf-dist/doc/man/man1/mtx-flac.1
new file mode 100644
index 00000000000..ef914f2acff
--- /dev/null
+++ b/Master/texmf-dist/doc/man/man1/mtx-flac.1
@@ -0,0 +1,27 @@
+.TH "mtx-flac" "1" "01-01-2013" "version 0.10" "ConTeXt Flac Helpers"
+.SH NAME
+.B mtx-flac
+.SH SYNOPSIS
+.B mtxrun --script flac [
+.I OPTIONS ...
+.B ] [
+.I FILENAMES
+.B ]
+.SH DESCRIPTION
+.B ConTeXt Flac Helpers
+.SH OPTIONS
+.TP
+.B --collect
+collect albums in xml file
+.SH AUTHOR
+More information about ConTeXt and the tools that come with it can be found at:
+
+
+.B "maillist:"
+ntg-context@ntg.nl / http://www.ntg.nl/mailman/listinfo/ntg-context
+
+.B "webpage:"
+http://www.pragma-ade.nl / http://tex.aanhet.net
+
+.B "wiki:"
+http://contextgarden.net
diff --git a/Master/texmf-dist/doc/man/man1/mtx-flac.man1.pdf b/Master/texmf-dist/doc/man/man1/mtx-flac.man1.pdf
new file mode 100644
index 00000000000..1d1cb8b58cb
--- /dev/null
+++ b/Master/texmf-dist/doc/man/man1/mtx-flac.man1.pdf
Binary files differ
diff --git a/Master/texmf-dist/doc/man/man1/mtx-fonts.1 b/Master/texmf-dist/doc/man/man1/mtx-fonts.1
new file mode 100644
index 00000000000..b576b9de35d
--- /dev/null
+++ b/Master/texmf-dist/doc/man/man1/mtx-fonts.1
@@ -0,0 +1,63 @@
+.TH "mtx-fonts" "1" "01-01-2013" "version 0.21" "ConTeXt Font Database Management"
+.SH NAME
+.B mtx-fonts
+.SH SYNOPSIS
+.B mtxrun --script fonts [
+.I OPTIONS ...
+.B ] [
+.I FILENAMES
+.B ]
+.SH DESCRIPTION
+.B ConTeXt Font Database Management
+.SH OPTIONS
+.TP
+.B --save
+save open type font in raw table
+.TP
+.B --unpack
+save a tma file in a more readale format
+.TP
+.B --reload
+generate new font database (use --force when in doubt)
+.TP
+.B --reload
+--simple:generate luatex-fonts-names.lua (not for context!)
+.TP
+.B --list
+--name: list installed fonts, filter by name [--pattern]
+.TP
+.B --list
+--spec: list installed fonts, filter by spec [--filter]
+.TP
+.B --list
+--file: list installed fonts, filter by file [--pattern]
+.TP
+.B --pattern=str
+filter files using pattern
+.TP
+.B --filter=list
+key-value pairs
+.TP
+.B --all
+show all found instances (combined with other flags)
+.TP
+.B --info
+give more details
+.TP
+.B --track=list
+enable trackers
+.TP
+.B --statistics
+some info about the database
+.SH AUTHOR
+More information about ConTeXt and the tools that come with it can be found at:
+
+
+.B "maillist:"
+ntg-context@ntg.nl / http://www.ntg.nl/mailman/listinfo/ntg-context
+
+.B "webpage:"
+http://www.pragma-ade.nl / http://tex.aanhet.net
+
+.B "wiki:"
+http://contextgarden.net
diff --git a/Master/texmf-dist/doc/man/man1/mtx-fonts.man1.pdf b/Master/texmf-dist/doc/man/man1/mtx-fonts.man1.pdf
new file mode 100644
index 00000000000..b5b1319c13d
--- /dev/null
+++ b/Master/texmf-dist/doc/man/man1/mtx-fonts.man1.pdf
Binary files differ
diff --git a/Master/texmf-dist/doc/man/man1/mtx-grep.1 b/Master/texmf-dist/doc/man/man1/mtx-grep.1
new file mode 100644
index 00000000000..6b86ec75591
--- /dev/null
+++ b/Master/texmf-dist/doc/man/man1/mtx-grep.1
@@ -0,0 +1,36 @@
+.TH "mtx-grep" "1" "01-01-2013" "version 0.10" "Simple Grepper"
+.SH NAME
+.B mtx-grep
+.SH SYNOPSIS
+.B mtxrun --script grep [
+.I OPTIONS ...
+.B ] [
+.I FILENAMES
+.B ]
+.SH DESCRIPTION
+.B Simple Grepper
+.SH OPTIONS
+.TP
+.B --pattern
+search for pattern (optional)
+.TP
+.B --count
+count matches only
+.TP
+.B --nocomment
+skip lines that start with %% or #
+.TP
+.B --xml
+pattern is lpath expression
+.SH AUTHOR
+More information about ConTeXt and the tools that come with it can be found at:
+
+
+.B "maillist:"
+ntg-context@ntg.nl / http://www.ntg.nl/mailman/listinfo/ntg-context
+
+.B "webpage:"
+http://www.pragma-ade.nl / http://tex.aanhet.net
+
+.B "wiki:"
+http://contextgarden.net
diff --git a/Master/texmf-dist/doc/man/man1/mtx-grep.man1.pdf b/Master/texmf-dist/doc/man/man1/mtx-grep.man1.pdf
new file mode 100644
index 00000000000..db1dd5bb518
--- /dev/null
+++ b/Master/texmf-dist/doc/man/man1/mtx-grep.man1.pdf
Binary files differ
diff --git a/Master/texmf-dist/doc/man/man1/mtx-interface.1 b/Master/texmf-dist/doc/man/man1/mtx-interface.1
new file mode 100644
index 00000000000..232451621a6
--- /dev/null
+++ b/Master/texmf-dist/doc/man/man1/mtx-interface.1
@@ -0,0 +1,69 @@
+.TH "mtx-interface" "1" "01-01-2013" "version 0.13" "ConTeXt Interface Related Goodies"
+.SH NAME
+.B mtx-interface
+.SH SYNOPSIS
+.B mtxrun --script interface [
+.I OPTIONS ...
+.B ] [
+.I FILENAMES
+.B ]
+.SH DESCRIPTION
+.B ConTeXt Interface Related Goodies
+.SH OPTIONS
+.TP
+.B --interfaces
+generate context interface files
+.TP
+.B --messages
+generate context message files
+.TP
+.B --labels
+generate context label files
+.TP
+.B --context
+equals --interfaces --messages --languages
+.TP
+.B --scite
+generate scite interface
+.TP
+.B --bbedit
+generate bbedit interface files
+.TP
+.B --jedit
+generate jedit interface files
+.TP
+.B --textpad
+generate textpad interface files
+.TP
+.B --text
+create text files for commands and environments
+.TP
+.B --raw
+report commands to the console
+.TP
+.B --check
+generate check file
+.TP
+.B --toutf
+replace named characters by utf
+.TP
+.B --preprocess
+preprocess mkvi files to tex files [force,suffix]
+.TP
+.B --suffix
+use given suffix for output files
+.TP
+.B --force
+force action even when in doubt
+.SH AUTHOR
+More information about ConTeXt and the tools that come with it can be found at:
+
+
+.B "maillist:"
+ntg-context@ntg.nl / http://www.ntg.nl/mailman/listinfo/ntg-context
+
+.B "webpage:"
+http://www.pragma-ade.nl / http://tex.aanhet.net
+
+.B "wiki:"
+http://contextgarden.net
diff --git a/Master/texmf-dist/doc/man/man1/mtx-interface.man1.pdf b/Master/texmf-dist/doc/man/man1/mtx-interface.man1.pdf
new file mode 100644
index 00000000000..f77c0d123c9
--- /dev/null
+++ b/Master/texmf-dist/doc/man/man1/mtx-interface.man1.pdf
Binary files differ
diff --git a/Master/texmf-dist/doc/man/man1/mtx-metapost.1 b/Master/texmf-dist/doc/man/man1/mtx-metapost.1
new file mode 100644
index 00000000000..9ccaddc9f2d
--- /dev/null
+++ b/Master/texmf-dist/doc/man/man1/mtx-metapost.1
@@ -0,0 +1,39 @@
+.TH "mtx-metapost" "1" "01-01-2013" "version 0.10" "MetaPost to PDF processor"
+.SH NAME
+.B mtx-metapost
+.SH SYNOPSIS
+.B mtxrun --script metapost [
+.I OPTIONS ...
+.B ] [
+.I FILENAMES
+.B ]
+.SH DESCRIPTION
+.B MetaPost to PDF processor
+.SH OPTIONS
+.TP
+.B --rawmp
+raw metapost run
+.TP
+.B --metafun
+use metafun instead of plain
+.TP
+.B --latex
+force --tex=latex
+.TP
+.B --texexec
+force texexec usage (mkii)
+.TP
+.B --split
+split single result file into pages
+.SH AUTHOR
+More information about ConTeXt and the tools that come with it can be found at:
+
+
+.B "maillist:"
+ntg-context@ntg.nl / http://www.ntg.nl/mailman/listinfo/ntg-context
+
+.B "webpage:"
+http://www.pragma-ade.nl / http://tex.aanhet.net
+
+.B "wiki:"
+http://contextgarden.net
diff --git a/Master/texmf-dist/doc/man/man1/mtx-metapost.man1.pdf b/Master/texmf-dist/doc/man/man1/mtx-metapost.man1.pdf
new file mode 100644
index 00000000000..e8a62b94201
--- /dev/null
+++ b/Master/texmf-dist/doc/man/man1/mtx-metapost.man1.pdf
@@ -0,0 +1,110 @@
+%PDF-1.4
+%쏢
+5 0 obj
+<</Length 6 0 R/Filter /FlateDecode>>
+stream
+xUَ6}WܷJIJ&t8cHG=j8#ѽeL{=$% p&֍Ca|sXb(|A)+@r!'!ą)" q M՘'=ypO8)B K$ T#l*I=?Lǵ/O GeEu@ݵeQGv0!IpY@tD$dQ"?oGIz[7Im 43zb4$D/!!5@LF_^UjǛO;O`l<,C׶Pޖa6xgnSqȄ틤7{ZMTo!k}f"ӻ0 ŦBs6ymBlLP`E|?&OȪSkR$RjّcĔGGȇWhh}1TZgTlX6ؙPN-f+#l̼buB6x 6|īML*Mg~]u +Ŀ?^&g몪-7zYhU%jk}Uƞ89.].U7x $U.3/Jt )-7<5]Ngy5\^=,[Te#Bwo~ŗa/l&n)&}XBKb69x.$rqZKmKFN0^%1Z ez&!8
+uR'rX ꕖ Za^r|T(}OCʪ|E-{ #=e>8e0Οx+endstream
+endobj
+6 0 obj
+901
+endobj
+4 0 obj
+<</Type/Page/MediaBox [0 0 612 792]
+/Rotate 0/Parent 3 0 R
+/Resources<</ProcSet[/PDF /Text]
+/ExtGState 11 0 R
+/Font 12 0 R
+>>
+/Contents 5 0 R
+>>
+endobj
+3 0 obj
+<< /Type /Pages /Kids [
+4 0 R
+] /Count 1
+>>
+endobj
+1 0 obj
+<</Type /Catalog /Pages 3 0 R
+/Metadata 14 0 R
+>>
+endobj
+7 0 obj
+<</Type/ExtGState
+/OPM 1>>endobj
+11 0 obj
+<</R7
+7 0 R>>
+endobj
+12 0 obj
+<</R10
+10 0 R/R9
+9 0 R/R8
+8 0 R>>
+endobj
+10 0 obj
+<</BaseFont/Times-Italic/Type/Font
+/Subtype/Type1>>
+endobj
+9 0 obj
+<</BaseFont/Times-Bold/Type/Font
+/Subtype/Type1>>
+endobj
+8 0 obj
+<</BaseFont/Times-Roman/Type/Font
+/Encoding 13 0 R/Subtype/Type1>>
+endobj
+13 0 obj
+<</Type/Encoding/Differences[
+140/fi]>>
+endobj
+14 0 obj
+<</Type/Metadata
+/Subtype/XML/Length 1346>>stream
+<?xpacket begin='' id='W5M0MpCehiHzreSzNTczkc9d'?>
+<?adobe-xap-filters esc="CRLF"?>
+<x:xmpmeta xmlns:x='adobe:ns:meta/' x:xmptk='XMP toolkit 2.9.1-13, framework 1.6'>
+<rdf:RDF xmlns:rdf='http://www.w3.org/1999/02/22-rdf-syntax-ns#' xmlns:iX='http://ns.adobe.com/iX/1.0/'>
+<rdf:Description rdf:about='uuid:61ad16b7-d801-11ed-0000-a284dc771d16' xmlns:pdf='http://ns.adobe.com/pdf/1.3/' pdf:Producer='GPL Ghostscript 9.07'/>
+<rdf:Description rdf:about='uuid:61ad16b7-d801-11ed-0000-a284dc771d16' xmlns:xmp='http://ns.adobe.com/xap/1.0/'><xmp:ModifyDate>2013-04-08T02:39:55+02:00</xmp:ModifyDate>
+<xmp:CreateDate>2013-04-08T02:39:55+02:00</xmp:CreateDate>
+<xmp:CreatorTool>groff version 1.22.2</xmp:CreatorTool></rdf:Description>
+<rdf:Description rdf:about='uuid:61ad16b7-d801-11ed-0000-a284dc771d16' xmlns:xapMM='http://ns.adobe.com/xap/1.0/mm/' xapMM:DocumentID='uuid:61ad16b7-d801-11ed-0000-a284dc771d16'/>
+<rdf:Description rdf:about='uuid:61ad16b7-d801-11ed-0000-a284dc771d16' xmlns:dc='http://purl.org/dc/elements/1.1/' dc:format='application/pdf'><dc:title><rdf:Alt><rdf:li xml:lang='x-default'>Untitled</rdf:li></rdf:Alt></dc:title></rdf:Description>
+</rdf:RDF>
+</x:xmpmeta>
+
+
+<?xpacket end='w'?>
+endstream
+endobj
+2 0 obj
+<</Producer(GPL Ghostscript 9.07)
+/CreationDate(D:20130408023955+02'00')
+/ModDate(D:20130408023955+02'00')
+/Creator(groff version 1.22.2)>>endobj
+xref
+0 15
+0000000000 65535 f
+0000001224 00000 n
+0000003104 00000 n
+0000001165 00000 n
+0000001005 00000 n
+0000000015 00000 n
+0000000986 00000 n
+0000001289 00000 n
+0000001543 00000 n
+0000001478 00000 n
+0000001410 00000 n
+0000001330 00000 n
+0000001360 00000 n
+0000001625 00000 n
+0000001681 00000 n
+trailer
+<< /Size 15 /Root 1 0 R /Info 2 0 R
+/ID [<96EA08B77B0ED8E6BBE37608502BC60A><96EA08B77B0ED8E6BBE37608502BC60A>]
+>>
+startxref
+3258
+%%EOF
diff --git a/Master/texmf-dist/doc/man/man1/mtx-metatex.1 b/Master/texmf-dist/doc/man/man1/mtx-metatex.1
new file mode 100644
index 00000000000..bd67ff4d321
--- /dev/null
+++ b/Master/texmf-dist/doc/man/man1/mtx-metatex.1
@@ -0,0 +1,30 @@
+.TH "mtx-metatex" "1" "01-01-2013" "version 0.10" "MetaTeX Process Management"
+.SH NAME
+.B mtx-metatex
+.SH SYNOPSIS
+.B mtxrun --script metatex [
+.I OPTIONS ...
+.B ] [
+.I FILENAMES
+.B ]
+.SH DESCRIPTION
+.B MetaTeX Process Management
+.SH OPTIONS
+.TP
+.B --run
+process (one or more) files (default action)
+.TP
+.B --make
+create metatex format(s)
+.SH AUTHOR
+More information about ConTeXt and the tools that come with it can be found at:
+
+
+.B "maillist:"
+ntg-context@ntg.nl / http://www.ntg.nl/mailman/listinfo/ntg-context
+
+.B "webpage:"
+http://www.pragma-ade.nl / http://tex.aanhet.net
+
+.B "wiki:"
+http://contextgarden.net
diff --git a/Master/texmf-dist/doc/man/man1/mtx-metatex.man1.pdf b/Master/texmf-dist/doc/man/man1/mtx-metatex.man1.pdf
new file mode 100644
index 00000000000..b62f414af46
--- /dev/null
+++ b/Master/texmf-dist/doc/man/man1/mtx-metatex.man1.pdf
Binary files differ
diff --git a/Master/texmf-dist/doc/man/man1/mtx-modules.1 b/Master/texmf-dist/doc/man/man1/mtx-modules.1
new file mode 100644
index 00000000000..a719c16a077
--- /dev/null
+++ b/Master/texmf-dist/doc/man/man1/mtx-modules.1
@@ -0,0 +1,33 @@
+.TH "mtx-modules" "1" "01-01-2013" "version 1.00" "ConTeXt Module Documentation Generators"
+.SH NAME
+.B mtx-modules
+.SH SYNOPSIS
+.B mtxrun --script modules [
+.I OPTIONS ...
+.B ] [
+.I FILENAMES
+.B ]
+.SH DESCRIPTION
+.B ConTeXt Module Documentation Generators
+.SH OPTIONS
+.TP
+.B --convert
+convert source files (tex, mkii, mkiv, mp) to 'ted' files
+.TP
+.B --process
+process source files (tex, mkii, mkiv, mp) to 'pdf' files
+.TP
+.B --prep
+use original name with suffix 'prep' appended
+.SH AUTHOR
+More information about ConTeXt and the tools that come with it can be found at:
+
+
+.B "maillist:"
+ntg-context@ntg.nl / http://www.ntg.nl/mailman/listinfo/ntg-context
+
+.B "webpage:"
+http://www.pragma-ade.nl / http://tex.aanhet.net
+
+.B "wiki:"
+http://contextgarden.net
diff --git a/Master/texmf-dist/doc/man/man1/mtx-modules.man1.pdf b/Master/texmf-dist/doc/man/man1/mtx-modules.man1.pdf
new file mode 100644
index 00000000000..19ef7232d0f
--- /dev/null
+++ b/Master/texmf-dist/doc/man/man1/mtx-modules.man1.pdf
Binary files differ
diff --git a/Master/texmf-dist/doc/man/man1/mtx-package.1 b/Master/texmf-dist/doc/man/man1/mtx-package.1
new file mode 100644
index 00000000000..af11e6aec01
--- /dev/null
+++ b/Master/texmf-dist/doc/man/man1/mtx-package.1
@@ -0,0 +1,27 @@
+.TH "mtx-package" "1" "01-01-2013" "version 0.10" "Distribution Related Goodies"
+.SH NAME
+.B mtx-package
+.SH SYNOPSIS
+.B mtxrun --script package [
+.I OPTIONS ...
+.B ] [
+.I FILENAMES
+.B ]
+.SH DESCRIPTION
+.B Distribution Related Goodies
+.SH OPTIONS
+.TP
+.B --merge
+merge 'loadmodule' into merge file
+.SH AUTHOR
+More information about ConTeXt and the tools that come with it can be found at:
+
+
+.B "maillist:"
+ntg-context@ntg.nl / http://www.ntg.nl/mailman/listinfo/ntg-context
+
+.B "webpage:"
+http://www.pragma-ade.nl / http://tex.aanhet.net
+
+.B "wiki:"
+http://contextgarden.net
diff --git a/Master/texmf-dist/doc/man/man1/mtx-package.man1.pdf b/Master/texmf-dist/doc/man/man1/mtx-package.man1.pdf
new file mode 100644
index 00000000000..722ca53bc61
--- /dev/null
+++ b/Master/texmf-dist/doc/man/man1/mtx-package.man1.pdf
Binary files differ
diff --git a/Master/texmf-dist/doc/man/man1/mtx-patterns.1 b/Master/texmf-dist/doc/man/man1/mtx-patterns.1
new file mode 100644
index 00000000000..96e3c4d4879
--- /dev/null
+++ b/Master/texmf-dist/doc/man/man1/mtx-patterns.1
@@ -0,0 +1,39 @@
+.TH "mtx-patterns" "1" "01-01-2013" "version 0.20" "ConTeXt Pattern File Management"
+.SH NAME
+.B mtx-patterns
+.SH SYNOPSIS
+.B mtxrun --script patterns [
+.I OPTIONS ...
+.B ] [
+.I FILENAMES
+.B ]
+.SH DESCRIPTION
+.B ConTeXt Pattern File Management
+.SH OPTIONS
+.TP
+.B --convert
+generate context language files (mnemonic driven, if not given then all)
+.TP
+.B --check
+check pattern file (or those used by context when no file given)
+.TP
+.B --path
+source path where hyph-foo.tex files are stored
+.TP
+.B --destination
+destination path
+.TP
+.B --specification
+additional patterns: e.g.: =cy,hyph-cy,welsh
+.SH AUTHOR
+More information about ConTeXt and the tools that come with it can be found at:
+
+
+.B "maillist:"
+ntg-context@ntg.nl / http://www.ntg.nl/mailman/listinfo/ntg-context
+
+.B "webpage:"
+http://www.pragma-ade.nl / http://tex.aanhet.net
+
+.B "wiki:"
+http://contextgarden.net
diff --git a/Master/texmf-dist/doc/man/man1/mtx-patterns.man1.pdf b/Master/texmf-dist/doc/man/man1/mtx-patterns.man1.pdf
new file mode 100644
index 00000000000..20964c47f9e
--- /dev/null
+++ b/Master/texmf-dist/doc/man/man1/mtx-patterns.man1.pdf
Binary files differ
diff --git a/Master/texmf-dist/doc/man/man1/mtx-pdf.1 b/Master/texmf-dist/doc/man/man1/mtx-pdf.1
new file mode 100644
index 00000000000..7ac50df0d63
--- /dev/null
+++ b/Master/texmf-dist/doc/man/man1/mtx-pdf.1
@@ -0,0 +1,33 @@
+.TH "mtx-pdf" "1" "01-01-2013" "version 0.10" "ConTeXt PDF Helpers"
+.SH NAME
+.B mtx-pdf
+.SH SYNOPSIS
+.B mtxrun --script pdf [
+.I OPTIONS ...
+.B ] [
+.I FILENAMES
+.B ]
+.SH DESCRIPTION
+.B ConTeXt PDF Helpers
+.SH OPTIONS
+.TP
+.B --info
+show some info about the given file
+.TP
+.B --metadata
+show metadata xml blob
+.TP
+.B --fonts
+show used fonts (--detail)
+.SH AUTHOR
+More information about ConTeXt and the tools that come with it can be found at:
+
+
+.B "maillist:"
+ntg-context@ntg.nl / http://www.ntg.nl/mailman/listinfo/ntg-context
+
+.B "webpage:"
+http://www.pragma-ade.nl / http://tex.aanhet.net
+
+.B "wiki:"
+http://contextgarden.net
diff --git a/Master/texmf-dist/doc/man/man1/mtx-pdf.man1.pdf b/Master/texmf-dist/doc/man/man1/mtx-pdf.man1.pdf
new file mode 100644
index 00000000000..f5662e8ae4a
--- /dev/null
+++ b/Master/texmf-dist/doc/man/man1/mtx-pdf.man1.pdf
Binary files differ
diff --git a/Master/texmf-dist/doc/man/man1/mtx-profile.1 b/Master/texmf-dist/doc/man/man1/mtx-profile.1
new file mode 100644
index 00000000000..5eba4861aa4
--- /dev/null
+++ b/Master/texmf-dist/doc/man/man1/mtx-profile.1
@@ -0,0 +1,30 @@
+.TH "mtx-profile" "1" "01-01-2013" "version 1.00" "ConTeXt MkIV LuaTeX Profiler"
+.SH NAME
+.B mtx-profile
+.SH SYNOPSIS
+.B mtxrun --script profile [
+.I OPTIONS ...
+.B ] [
+.I FILENAMES
+.B ]
+.SH DESCRIPTION
+.B ConTeXt MkIV LuaTeX Profiler
+.SH OPTIONS
+.TP
+.B --analyze
+analyze lua calls
+.TP
+.B --trace
+analyze tex calls
+.SH AUTHOR
+More information about ConTeXt and the tools that come with it can be found at:
+
+
+.B "maillist:"
+ntg-context@ntg.nl / http://www.ntg.nl/mailman/listinfo/ntg-context
+
+.B "webpage:"
+http://www.pragma-ade.nl / http://tex.aanhet.net
+
+.B "wiki:"
+http://contextgarden.net
diff --git a/Master/texmf-dist/doc/man/man1/mtx-profile.man1.pdf b/Master/texmf-dist/doc/man/man1/mtx-profile.man1.pdf
new file mode 100644
index 00000000000..74f56d7b253
--- /dev/null
+++ b/Master/texmf-dist/doc/man/man1/mtx-profile.man1.pdf
@@ -0,0 +1,117 @@
+%PDF-1.4
+%쏢
+5 0 obj
+<</Length 6 0 R/Filter /FlateDecode>>
+stream
+xUrF+00N”)ʇQ@AS7ӃXEr >!JE0h
+(^+]w 8 6FSй19ҒE`51npѼe]Wd!EU&Qo"qpM
+LgaAW/rHSBcaݶ[é8}L 0%SgDnx@BycSo4Fێ@x5Dpeγ5O=|(,Y ^xZs}s;LO vD:_6B)-IBSM&7SDha .)q( 3O.bx?ۜ;}DQ#)o~O/&-ퟑm!ˠ1?z-酫Rv?fbJW|u{%*(V' `.6=d喂XJwFZӣGĈPwܰ>P S!eٯUUg(/zᚼ*{MKۢ5ϐVU+V mǕ!CO ]svډ| ;ECzڻl8J
+F\ͯpHs,FfP@ˑXrA|jH[tJF+&{XyvvԄ詾EfYaI(nW\5ޤc_S.R˞r1|QKLՏYńNc]`>g3͇WU@.ھ<!}־l( ļ8 !*aendstream
+endobj
+6 0 obj
+788
+endobj
+4 0 obj
+<</Type/Page/MediaBox [0 0 612 792]
+/Rotate 0/Parent 3 0 R
+/Resources<</ProcSet[/PDF /Text]
+/ExtGState 11 0 R
+/Font 12 0 R
+>>
+/Contents 5 0 R
+>>
+endobj
+3 0 obj
+<< /Type /Pages /Kids [
+4 0 R
+] /Count 1
+>>
+endobj
+1 0 obj
+<</Type /Catalog /Pages 3 0 R
+/Metadata 15 0 R
+>>
+endobj
+7 0 obj
+<</Type/ExtGState
+/OPM 1>>endobj
+11 0 obj
+<</R7
+7 0 R>>
+endobj
+12 0 obj
+<</R10
+10 0 R/R9
+9 0 R/R8
+8 0 R>>
+endobj
+10 0 obj
+<</BaseFont/Times-Italic/Type/Font
+/Subtype/Type1>>
+endobj
+9 0 obj
+<</BaseFont/Times-Bold/Type/Font
+/Encoding 13 0 R/Subtype/Type1>>
+endobj
+13 0 obj
+<</Type/Encoding/Differences[
+140/fi]>>
+endobj
+8 0 obj
+<</BaseFont/Times-Roman/Type/Font
+/Encoding 14 0 R/Subtype/Type1>>
+endobj
+14 0 obj
+<</Type/Encoding/Differences[
+140/fi]>>
+endobj
+15 0 obj
+<</Type/Metadata
+/Subtype/XML/Length 1346>>stream
+<?xpacket begin='' id='W5M0MpCehiHzreSzNTczkc9d'?>
+<?adobe-xap-filters esc="CRLF"?>
+<x:xmpmeta xmlns:x='adobe:ns:meta/' x:xmptk='XMP toolkit 2.9.1-13, framework 1.6'>
+<rdf:RDF xmlns:rdf='http://www.w3.org/1999/02/22-rdf-syntax-ns#' xmlns:iX='http://ns.adobe.com/iX/1.0/'>
+<rdf:Description rdf:about='uuid:61148037-d801-11ed-0000-9a6fc59f7d16' xmlns:pdf='http://ns.adobe.com/pdf/1.3/' pdf:Producer='GPL Ghostscript 9.07'/>
+<rdf:Description rdf:about='uuid:61148037-d801-11ed-0000-9a6fc59f7d16' xmlns:xmp='http://ns.adobe.com/xap/1.0/'><xmp:ModifyDate>2013-04-08T02:39:54+02:00</xmp:ModifyDate>
+<xmp:CreateDate>2013-04-08T02:39:54+02:00</xmp:CreateDate>
+<xmp:CreatorTool>groff version 1.22.2</xmp:CreatorTool></rdf:Description>
+<rdf:Description rdf:about='uuid:61148037-d801-11ed-0000-9a6fc59f7d16' xmlns:xapMM='http://ns.adobe.com/xap/1.0/mm/' xapMM:DocumentID='uuid:61148037-d801-11ed-0000-9a6fc59f7d16'/>
+<rdf:Description rdf:about='uuid:61148037-d801-11ed-0000-9a6fc59f7d16' xmlns:dc='http://purl.org/dc/elements/1.1/' dc:format='application/pdf'><dc:title><rdf:Alt><rdf:li xml:lang='x-default'>Untitled</rdf:li></rdf:Alt></dc:title></rdf:Description>
+</rdf:RDF>
+</x:xmpmeta>
+
+
+<?xpacket end='w'?>
+endstream
+endobj
+2 0 obj
+<</Producer(GPL Ghostscript 9.07)
+/CreationDate(D:20130408023954+02'00')
+/ModDate(D:20130408023954+02'00')
+/Creator(groff version 1.22.2)>>endobj
+xref
+0 16
+0000000000 65535 f
+0000001111 00000 n
+0000003063 00000 n
+0000001052 00000 n
+0000000892 00000 n
+0000000015 00000 n
+0000000873 00000 n
+0000001176 00000 n
+0000001502 00000 n
+0000001365 00000 n
+0000001297 00000 n
+0000001217 00000 n
+0000001247 00000 n
+0000001446 00000 n
+0000001584 00000 n
+0000001640 00000 n
+trailer
+<< /Size 16 /Root 1 0 R /Info 2 0 R
+/ID [<12D5B0B8D4EEBD69B189A1F1C48BFAD7><12D5B0B8D4EEBD69B189A1F1C48BFAD7>]
+>>
+startxref
+3217
+%%EOF
diff --git a/Master/texmf-dist/doc/man/man1/mtx-rsync.1 b/Master/texmf-dist/doc/man/man1/mtx-rsync.1
new file mode 100644
index 00000000000..5f3bc1fea00
--- /dev/null
+++ b/Master/texmf-dist/doc/man/man1/mtx-rsync.1
@@ -0,0 +1,33 @@
+.TH "mtx-rsync" "1" "01-01-2013" "version 0.10" "Rsync Helpers"
+.SH NAME
+.B mtx-rsync
+.SH SYNOPSIS
+.B mtxrun --script rsync [
+.I OPTIONS ...
+.B ] [
+.I FILENAMES
+.B ]
+.SH DESCRIPTION
+.B Rsync Helpers
+.SH OPTIONS
+.TP
+.B --job
+use given file as specification
+.TP
+.B --dryrun
+show what would happen
+.TP
+.B --force
+force run
+.SH AUTHOR
+More information about ConTeXt and the tools that come with it can be found at:
+
+
+.B "maillist:"
+ntg-context@ntg.nl / http://www.ntg.nl/mailman/listinfo/ntg-context
+
+.B "webpage:"
+http://www.pragma-ade.nl / http://tex.aanhet.net
+
+.B "wiki:"
+http://contextgarden.net
diff --git a/Master/texmf-dist/doc/man/man1/mtx-rsync.man1.pdf b/Master/texmf-dist/doc/man/man1/mtx-rsync.man1.pdf
new file mode 100644
index 00000000000..d9c41f3c033
--- /dev/null
+++ b/Master/texmf-dist/doc/man/man1/mtx-rsync.man1.pdf
Binary files differ
diff --git a/Master/texmf-dist/doc/man/man1/mtx-scite.1 b/Master/texmf-dist/doc/man/man1/mtx-scite.1
new file mode 100644
index 00000000000..118179e9184
--- /dev/null
+++ b/Master/texmf-dist/doc/man/man1/mtx-scite.1
@@ -0,0 +1,27 @@
+.TH "mtx-scite" "1" "01-01-2013" "version 1.00" "Scite Helper Script"
+.SH NAME
+.B mtx-scite
+.SH SYNOPSIS
+.B mtxrun --script scite [
+.I OPTIONS ...
+.B ] [
+.I FILENAMES
+.B ]
+.SH DESCRIPTION
+.B Scite Helper Script
+.SH OPTIONS
+.TP
+.B --words
+convert spell-*.txt into spell-*.lua
+.SH AUTHOR
+More information about ConTeXt and the tools that come with it can be found at:
+
+
+.B "maillist:"
+ntg-context@ntg.nl / http://www.ntg.nl/mailman/listinfo/ntg-context
+
+.B "webpage:"
+http://www.pragma-ade.nl / http://tex.aanhet.net
+
+.B "wiki:"
+http://contextgarden.net
diff --git a/Master/texmf-dist/doc/man/man1/mtx-scite.man1.pdf b/Master/texmf-dist/doc/man/man1/mtx-scite.man1.pdf
new file mode 100644
index 00000000000..28b5b948d02
--- /dev/null
+++ b/Master/texmf-dist/doc/man/man1/mtx-scite.man1.pdf
Binary files differ
diff --git a/Master/texmf-dist/doc/man/man1/mtx-server.1 b/Master/texmf-dist/doc/man/man1/mtx-server.1
new file mode 100644
index 00000000000..18373ad4b1c
--- /dev/null
+++ b/Master/texmf-dist/doc/man/man1/mtx-server.1
@@ -0,0 +1,42 @@
+.TH "mtx-server" "1" "01-01-2013" "version 0.10" "Simple Webserver For Helpers"
+.SH NAME
+.B mtx-server
+.SH SYNOPSIS
+.B mtxrun --script server [
+.I OPTIONS ...
+.B ] [
+.I FILENAMES
+.B ]
+.SH DESCRIPTION
+.B Simple Webserver For Helpers
+.SH OPTIONS
+.TP
+.B --start
+start server
+.TP
+.B --port
+port to listen to
+.TP
+.B --root
+server root
+.TP
+.B --scripts
+scripts sub path
+.TP
+.B --index
+index file
+.TP
+.B --auto
+start on own path
+.SH AUTHOR
+More information about ConTeXt and the tools that come with it can be found at:
+
+
+.B "maillist:"
+ntg-context@ntg.nl / http://www.ntg.nl/mailman/listinfo/ntg-context
+
+.B "webpage:"
+http://www.pragma-ade.nl / http://tex.aanhet.net
+
+.B "wiki:"
+http://contextgarden.net
diff --git a/Master/texmf-dist/doc/man/man1/mtx-server.man1.pdf b/Master/texmf-dist/doc/man/man1/mtx-server.man1.pdf
new file mode 100644
index 00000000000..f6bf45b2fac
--- /dev/null
+++ b/Master/texmf-dist/doc/man/man1/mtx-server.man1.pdf
@@ -0,0 +1,113 @@
+%PDF-1.4
+%쏢
+5 0 obj
+<</Length 6 0 R/Filter /FlateDecode>>
+stream
+xV6} %!$1L6IllRwf<$6xGaaU`^W/6h`(k%E$!}i %8n<c.^Qt"҃g}`ZlBCJ.<&M?5$\ƙ&$p<$m^0Q LbX_l~.l$ƭ#NH6\o\Z%43pNC"NvɝVl(p|l:~}
+1Y( ft#J`I'RKݢ.^ҚH/Iq"=$6;M04:>OS bx ;9q wƏň$of1qȯQn$t[MAyu<}7oh_/o] JHnm٪^?RջKS&;k`QDF0YmZ"]dw{sᶺ·0K|wRgpC&~UG;R=>Ϲv 됰a+rε*e~R,{m´>ku}
+{7پ[kJg4e_jJms4q+'&[[s(d@>ټxWil=v@V.hjçp( '+asG`fnxPAhVUo#TSJ
+UiAk1ۛp84݃c &d&FH' vCmy|A%"̮m6Y-aGBrI/!e&NO1%RW\.*3Ի
+bYSNhesH]kU7Vvg݂D(pj? XBY?NYƩ;^$>endstream
+endobj
+6 0 obj
+942
+endobj
+4 0 obj
+<</Type/Page/MediaBox [0 0 612 792]
+/Rotate 0/Parent 3 0 R
+/Resources<</ProcSet[/PDF /Text]
+/ExtGState 11 0 R
+/Font 12 0 R
+>>
+/Contents 5 0 R
+>>
+endobj
+3 0 obj
+<< /Type /Pages /Kids [
+4 0 R
+] /Count 1
+>>
+endobj
+1 0 obj
+<</Type /Catalog /Pages 3 0 R
+/Metadata 14 0 R
+>>
+endobj
+7 0 obj
+<</Type/ExtGState
+/OPM 1>>endobj
+11 0 obj
+<</R7
+7 0 R>>
+endobj
+12 0 obj
+<</R10
+10 0 R/R9
+9 0 R/R8
+8 0 R>>
+endobj
+10 0 obj
+<</BaseFont/Times-Italic/Type/Font
+/Subtype/Type1>>
+endobj
+9 0 obj
+<</BaseFont/Times-Bold/Type/Font
+/Subtype/Type1>>
+endobj
+8 0 obj
+<</BaseFont/Times-Roman/Type/Font
+/Encoding 13 0 R/Subtype/Type1>>
+endobj
+13 0 obj
+<</Type/Encoding/Differences[
+140/fi]>>
+endobj
+14 0 obj
+<</Type/Metadata
+/Subtype/XML/Length 1346>>stream
+<?xpacket begin='' id='W5M0MpCehiHzreSzNTczkc9d'?>
+<?adobe-xap-filters esc="CRLF"?>
+<x:xmpmeta xmlns:x='adobe:ns:meta/' x:xmptk='XMP toolkit 2.9.1-13, framework 1.6'>
+<rdf:RDF xmlns:rdf='http://www.w3.org/1999/02/22-rdf-syntax-ns#' xmlns:iX='http://ns.adobe.com/iX/1.0/'>
+<rdf:Description rdf:about='uuid:6245ad37-d801-11ed-0000-304ea6c51ebd' xmlns:pdf='http://ns.adobe.com/pdf/1.3/' pdf:Producer='GPL Ghostscript 9.07'/>
+<rdf:Description rdf:about='uuid:6245ad37-d801-11ed-0000-304ea6c51ebd' xmlns:xmp='http://ns.adobe.com/xap/1.0/'><xmp:ModifyDate>2013-04-08T02:39:56+02:00</xmp:ModifyDate>
+<xmp:CreateDate>2013-04-08T02:39:56+02:00</xmp:CreateDate>
+<xmp:CreatorTool>groff version 1.22.2</xmp:CreatorTool></rdf:Description>
+<rdf:Description rdf:about='uuid:6245ad37-d801-11ed-0000-304ea6c51ebd' xmlns:xapMM='http://ns.adobe.com/xap/1.0/mm/' xapMM:DocumentID='uuid:6245ad37-d801-11ed-0000-304ea6c51ebd'/>
+<rdf:Description rdf:about='uuid:6245ad37-d801-11ed-0000-304ea6c51ebd' xmlns:dc='http://purl.org/dc/elements/1.1/' dc:format='application/pdf'><dc:title><rdf:Alt><rdf:li xml:lang='x-default'>Untitled</rdf:li></rdf:Alt></dc:title></rdf:Description>
+</rdf:RDF>
+</x:xmpmeta>
+
+
+<?xpacket end='w'?>
+endstream
+endobj
+2 0 obj
+<</Producer(GPL Ghostscript 9.07)
+/CreationDate(D:20130408023956+02'00')
+/ModDate(D:20130408023956+02'00')
+/Creator(groff version 1.22.2)>>endobj
+xref
+0 15
+0000000000 65535 f
+0000001265 00000 n
+0000003145 00000 n
+0000001206 00000 n
+0000001046 00000 n
+0000000015 00000 n
+0000001027 00000 n
+0000001330 00000 n
+0000001584 00000 n
+0000001519 00000 n
+0000001451 00000 n
+0000001371 00000 n
+0000001401 00000 n
+0000001666 00000 n
+0000001722 00000 n
+trailer
+<< /Size 15 /Root 1 0 R /Info 2 0 R
+/ID [<6167DA2FECF3699F8F1F3F428D8353B0><6167DA2FECF3699F8F1F3F428D8353B0>]
+>>
+startxref
+3299
+%%EOF
diff --git a/Master/texmf-dist/doc/man/man1/mtx-texworks.1 b/Master/texmf-dist/doc/man/man1/mtx-texworks.1
new file mode 100644
index 00000000000..df2d4b84367
--- /dev/null
+++ b/Master/texmf-dist/doc/man/man1/mtx-texworks.1
@@ -0,0 +1,30 @@
+.TH "mtx-texworks" "1" "01-01-2013" "version 1.00" "TeXworks Startup Script"
+.SH NAME
+.B mtx-texworks
+.SH SYNOPSIS
+.B mtxrun --script texworks [
+.I OPTIONS ...
+.B ] [
+.I FILENAMES
+.B ]
+.SH DESCRIPTION
+.B TeXworks Startup Script
+.SH OPTIONS
+.TP
+.B --start
+[--verbose] start texworks
+.TP
+.B --test
+report what will happen
+.SH AUTHOR
+More information about ConTeXt and the tools that come with it can be found at:
+
+
+.B "maillist:"
+ntg-context@ntg.nl / http://www.ntg.nl/mailman/listinfo/ntg-context
+
+.B "webpage:"
+http://www.pragma-ade.nl / http://tex.aanhet.net
+
+.B "wiki:"
+http://contextgarden.net
diff --git a/Master/texmf-dist/doc/man/man1/mtx-texworks.man1.pdf b/Master/texmf-dist/doc/man/man1/mtx-texworks.man1.pdf
new file mode 100644
index 00000000000..360fd07af93
--- /dev/null
+++ b/Master/texmf-dist/doc/man/man1/mtx-texworks.man1.pdf
Binary files differ
diff --git a/Master/texmf-dist/doc/man/man1/mtx-timing.1 b/Master/texmf-dist/doc/man/man1/mtx-timing.1
new file mode 100644
index 00000000000..831ce60eda4
--- /dev/null
+++ b/Master/texmf-dist/doc/man/man1/mtx-timing.1
@@ -0,0 +1,33 @@
+.TH "mtx-timing" "1" "01-01-2013" "version 0.10" "ConTeXt Timing Tools"
+.SH NAME
+.B mtx-timing
+.SH SYNOPSIS
+.B mtxrun --script timing [
+.I OPTIONS ...
+.B ] [
+.I FILENAMES
+.B ]
+.SH DESCRIPTION
+.B ConTeXt Timing Tools
+.SH OPTIONS
+.TP
+.B --xhtml
+make xhtml file
+.TP
+.B --launch
+launch after conversion
+.TP
+.B --remove
+remove after launching
+.SH AUTHOR
+More information about ConTeXt and the tools that come with it can be found at:
+
+
+.B "maillist:"
+ntg-context@ntg.nl / http://www.ntg.nl/mailman/listinfo/ntg-context
+
+.B "webpage:"
+http://www.pragma-ade.nl / http://tex.aanhet.net
+
+.B "wiki:"
+http://contextgarden.net
diff --git a/Master/texmf-dist/doc/man/man1/mtx-timing.man1.pdf b/Master/texmf-dist/doc/man/man1/mtx-timing.man1.pdf
new file mode 100644
index 00000000000..679155e28be
--- /dev/null
+++ b/Master/texmf-dist/doc/man/man1/mtx-timing.man1.pdf
Binary files differ
diff --git a/Master/texmf-dist/doc/man/man1/mtx-tools.1 b/Master/texmf-dist/doc/man/man1/mtx-tools.1
new file mode 100644
index 00000000000..f56b27bde55
--- /dev/null
+++ b/Master/texmf-dist/doc/man/man1/mtx-tools.1
@@ -0,0 +1,63 @@
+.TH "mtx-tools" "1" "01-01-2013" "version 1.01" "Some File Related Goodies"
+.SH NAME
+.B mtx-tools
+.SH SYNOPSIS
+.B mtxrun --script tools [
+.I OPTIONS ...
+.B ] [
+.I FILENAMES
+.B ]
+.SH DESCRIPTION
+.B Some File Related Goodies
+.SH OPTIONS
+.TP
+.B --disarmutfbomb
+remove utf bomb if present
+.TP
+.B --force
+remove indeed
+.TP
+.B --dirtoxml
+glob directory into xml
+.TP
+.B --pattern
+glob pattern (default: *)
+.TP
+.B --url
+url attribute (no processing)
+.TP
+.B --root
+the root of the globbed path (default: .)
+.TP
+.B --output
+output filename (console by default)
+.TP
+.B --recurse
+recurse into subdirecories
+.TP
+.B --stripname
+take pathpart of given pattern
+.TP
+.B --longname
+set name attributes to full path name
+.TP
+.B --pattern
+glob pattern (default: *)
+.TP
+.B --recurse
+recurse into subdirecories
+.TP
+.B --force
+downcase indeed
+.SH AUTHOR
+More information about ConTeXt and the tools that come with it can be found at:
+
+
+.B "maillist:"
+ntg-context@ntg.nl / http://www.ntg.nl/mailman/listinfo/ntg-context
+
+.B "webpage:"
+http://www.pragma-ade.nl / http://tex.aanhet.net
+
+.B "wiki:"
+http://contextgarden.net
diff --git a/Master/texmf-dist/doc/man/man1/mtx-tools.man1.pdf b/Master/texmf-dist/doc/man/man1/mtx-tools.man1.pdf
new file mode 100644
index 00000000000..715534fecd1
--- /dev/null
+++ b/Master/texmf-dist/doc/man/man1/mtx-tools.man1.pdf
Binary files differ
diff --git a/Master/texmf-dist/doc/man/man1/mtx-unzip.1 b/Master/texmf-dist/doc/man/man1/mtx-unzip.1
new file mode 100644
index 00000000000..5e1a369b2f7
--- /dev/null
+++ b/Master/texmf-dist/doc/man/man1/mtx-unzip.1
@@ -0,0 +1,33 @@
+.TH "mtx-unzip" "1" "01-01-2013" "version 0.10" "Simple Unzipper"
+.SH NAME
+.B mtx-unzip
+.SH SYNOPSIS
+.B mtxrun --script unzip [
+.I OPTIONS ...
+.B ] [
+.I FILENAMES
+.B ]
+.SH DESCRIPTION
+.B Simple Unzipper
+.SH OPTIONS
+.TP
+.B --list
+list files in archive
+.TP
+.B --junk
+flatten unzipped directory structure
+.TP
+.B --extract
+extract files
+.SH AUTHOR
+More information about ConTeXt and the tools that come with it can be found at:
+
+
+.B "maillist:"
+ntg-context@ntg.nl / http://www.ntg.nl/mailman/listinfo/ntg-context
+
+.B "webpage:"
+http://www.pragma-ade.nl / http://tex.aanhet.net
+
+.B "wiki:"
+http://contextgarden.net
diff --git a/Master/texmf-dist/doc/man/man1/mtx-unzip.man1.pdf b/Master/texmf-dist/doc/man/man1/mtx-unzip.man1.pdf
new file mode 100644
index 00000000000..551f2e05952
--- /dev/null
+++ b/Master/texmf-dist/doc/man/man1/mtx-unzip.man1.pdf
@@ -0,0 +1,111 @@
+%PDF-1.4
+%쏢
+5 0 obj
+<</Length 6 0 R/Filter /FlateDecode>>
+stream
+xUMs0+ޭp, ,PNMSwNb1i'AMbr7Lf3bj~){5$sz!L!4$ODf>ďN]F!d g9ḟxޠU']ܹ΃q-ʋܧ)'M*n,zJhD03
+ 2<Z$(bp/AU5}.I/p@0H/
+<9]=%(U-2]h. S"~{tcWЏvlK8AMӽep웘~ u_ 31w2(C}^V>gq떀'&w#4OF]`) s4Y&i6ʫn8Lw_ qd-BO *b&0IK K]*Ug'R&W)~fLMXEUE鋌6eӛ=!hC]w0CvnˢT,2:-̡<+ gE{!1U?4|z@|iI I)Xz!sxPX Pnwf8e:}p}h (&z"ʣa,|=i803U~س{Vnf0L]h̘-V 9U'$Sw,J9$2ܘ,r7X|4իv;s88$קfS2ƙ+ˉwEzм3GݯvY"@EYcA1|>Pws<endstream
+endobj
+6 0 obj
+785
+endobj
+4 0 obj
+<</Type/Page/MediaBox [0 0 612 792]
+/Rotate 0/Parent 3 0 R
+/Resources<</ProcSet[/PDF /Text]
+/ExtGState 11 0 R
+/Font 12 0 R
+>>
+/Contents 5 0 R
+>>
+endobj
+3 0 obj
+<< /Type /Pages /Kids [
+4 0 R
+] /Count 1
+>>
+endobj
+1 0 obj
+<</Type /Catalog /Pages 3 0 R
+/Metadata 14 0 R
+>>
+endobj
+7 0 obj
+<</Type/ExtGState
+/OPM 1>>endobj
+11 0 obj
+<</R7
+7 0 R>>
+endobj
+12 0 obj
+<</R10
+10 0 R/R9
+9 0 R/R8
+8 0 R>>
+endobj
+10 0 obj
+<</BaseFont/Times-Italic/Type/Font
+/Subtype/Type1>>
+endobj
+9 0 obj
+<</BaseFont/Times-Bold/Type/Font
+/Subtype/Type1>>
+endobj
+8 0 obj
+<</BaseFont/Times-Roman/Type/Font
+/Encoding 13 0 R/Subtype/Type1>>
+endobj
+13 0 obj
+<</Type/Encoding/Differences[
+140/fi/fl]>>
+endobj
+14 0 obj
+<</Type/Metadata
+/Subtype/XML/Length 1346>>stream
+<?xpacket begin='' id='W5M0MpCehiHzreSzNTczkc9d'?>
+<?adobe-xap-filters esc="CRLF"?>
+<x:xmpmeta xmlns:x='adobe:ns:meta/' x:xmptk='XMP toolkit 2.9.1-13, framework 1.6'>
+<rdf:RDF xmlns:rdf='http://www.w3.org/1999/02/22-rdf-syntax-ns#' xmlns:iX='http://ns.adobe.com/iX/1.0/'>
+<rdf:Description rdf:about='uuid:5f4abcb7-d801-11ed-0000-3564f5f2415b' xmlns:pdf='http://ns.adobe.com/pdf/1.3/' pdf:Producer='GPL Ghostscript 9.07'/>
+<rdf:Description rdf:about='uuid:5f4abcb7-d801-11ed-0000-3564f5f2415b' xmlns:xmp='http://ns.adobe.com/xap/1.0/'><xmp:ModifyDate>2013-04-08T02:39:51+02:00</xmp:ModifyDate>
+<xmp:CreateDate>2013-04-08T02:39:51+02:00</xmp:CreateDate>
+<xmp:CreatorTool>groff version 1.22.2</xmp:CreatorTool></rdf:Description>
+<rdf:Description rdf:about='uuid:5f4abcb7-d801-11ed-0000-3564f5f2415b' xmlns:xapMM='http://ns.adobe.com/xap/1.0/mm/' xapMM:DocumentID='uuid:5f4abcb7-d801-11ed-0000-3564f5f2415b'/>
+<rdf:Description rdf:about='uuid:5f4abcb7-d801-11ed-0000-3564f5f2415b' xmlns:dc='http://purl.org/dc/elements/1.1/' dc:format='application/pdf'><dc:title><rdf:Alt><rdf:li xml:lang='x-default'>Untitled</rdf:li></rdf:Alt></dc:title></rdf:Description>
+</rdf:RDF>
+</x:xmpmeta>
+
+
+<?xpacket end='w'?>
+endstream
+endobj
+2 0 obj
+<</Producer(GPL Ghostscript 9.07)
+/CreationDate(D:20130408023951+02'00')
+/ModDate(D:20130408023951+02'00')
+/Creator(groff version 1.22.2)>>endobj
+xref
+0 15
+0000000000 65535 f
+0000001108 00000 n
+0000002991 00000 n
+0000001049 00000 n
+0000000889 00000 n
+0000000015 00000 n
+0000000870 00000 n
+0000001173 00000 n
+0000001427 00000 n
+0000001362 00000 n
+0000001294 00000 n
+0000001214 00000 n
+0000001244 00000 n
+0000001509 00000 n
+0000001568 00000 n
+trailer
+<< /Size 15 /Root 1 0 R /Info 2 0 R
+/ID [<BF2BF7DB80A88A27B65F12FFD6659384><BF2BF7DB80A88A27B65F12FFD6659384>]
+>>
+startxref
+3145
+%%EOF
diff --git a/Master/texmf-dist/doc/man/man1/mtx-update.1 b/Master/texmf-dist/doc/man/man1/mtx-update.1
new file mode 100644
index 00000000000..72e7ce1b5ae
--- /dev/null
+++ b/Master/texmf-dist/doc/man/man1/mtx-update.1
@@ -0,0 +1,78 @@
+.TH "mtx-update" "1" "01-01-2013" "version 0.31" "ConTeXt Minimals Updater"
+.SH NAME
+.B mtx-update
+.SH SYNOPSIS
+.B mtxrun --script update [
+.I OPTIONS ...
+.B ] [
+.I FILENAMES
+.B ]
+.SH DESCRIPTION
+.B ConTeXt Minimals Updater
+.SH OPTIONS
+.TP
+.B --platform=string
+platform (windows, linux, linux-64, osx-intel, osx-ppc, linux-ppc)
+.TP
+.B --server=string
+repository url (rsync://contextgarden.net)
+.TP
+.B --module=string
+repository url (minimals)
+.TP
+.B --repository=string
+specify version (current, experimental)
+.TP
+.B --context=string
+specify version (current, latest, beta, yyyy.mm.dd)
+.TP
+.B --rsync=string
+rsync binary (rsync)
+.TP
+.B --texroot=string
+installation directory (not guessed for the moment)
+.TP
+.B --engine=string
+tex engine (luatex, pdftex, xetex)
+.TP
+.B --modules=string
+extra modules (can be list or 'all')
+.TP
+.B --fonts=string
+additional fonts (can be list or 'all')
+.TP
+.B --goodies=string
+extra binaries (like scite and texworks)
+.TP
+.B --force
+instead of a dryrun, do the real thing
+.TP
+.B --update
+update minimal tree
+.TP
+.B --make
+also make formats and generate file databases
+.TP
+.B --keep
+don't delete unused or obsolete files
+.TP
+.B --state
+update tree using saved state
+.TP
+.B --cygwin
+adapt drive specs to cygwin
+.TP
+.B --mingw
+assume mingw binaries being used
+.SH AUTHOR
+More information about ConTeXt and the tools that come with it can be found at:
+
+
+.B "maillist:"
+ntg-context@ntg.nl / http://www.ntg.nl/mailman/listinfo/ntg-context
+
+.B "webpage:"
+http://www.pragma-ade.nl / http://tex.aanhet.net
+
+.B "wiki:"
+http://contextgarden.net
diff --git a/Master/texmf-dist/doc/man/man1/mtx-update.man1.pdf b/Master/texmf-dist/doc/man/man1/mtx-update.man1.pdf
new file mode 100644
index 00000000000..a21271c505f
--- /dev/null
+++ b/Master/texmf-dist/doc/man/man1/mtx-update.man1.pdf
Binary files differ
diff --git a/Master/texmf-dist/doc/man/man1/mtx-watch.1 b/Master/texmf-dist/doc/man/man1/mtx-watch.1
new file mode 100644
index 00000000000..9f4a0d3b9d7
--- /dev/null
+++ b/Master/texmf-dist/doc/man/man1/mtx-watch.1
@@ -0,0 +1,48 @@
+.TH "mtx-watch" "1" "01-01-2013" "version 1.00" "ConTeXt Request Watchdog"
+.SH NAME
+.B mtx-watch
+.SH SYNOPSIS
+.B mtxrun --script watch [
+.I OPTIONS ...
+.B ] [
+.I FILENAMES
+.B ]
+.SH DESCRIPTION
+.B ConTeXt Request Watchdog
+.SH OPTIONS
+.TP
+.B --logpath
+optional path for log files
+.TP
+.B --watch
+watch given path [--delay]
+.TP
+.B --pipe
+use pipe instead of execute
+.TP
+.B --delay
+delay between sweeps
+.TP
+.B --automachine
+replace /machine/ in path /servername/
+.TP
+.B --collect
+condense log files
+.TP
+.B --cleanup=delay
+remove files in given path [--force]
+.TP
+.B --showlog
+show log data
+.SH AUTHOR
+More information about ConTeXt and the tools that come with it can be found at:
+
+
+.B "maillist:"
+ntg-context@ntg.nl / http://www.ntg.nl/mailman/listinfo/ntg-context
+
+.B "webpage:"
+http://www.pragma-ade.nl / http://tex.aanhet.net
+
+.B "wiki:"
+http://contextgarden.net
diff --git a/Master/texmf-dist/doc/man/man1/mtx-watch.man1.pdf b/Master/texmf-dist/doc/man/man1/mtx-watch.man1.pdf
new file mode 100644
index 00000000000..fe08191c9ff
--- /dev/null
+++ b/Master/texmf-dist/doc/man/man1/mtx-watch.man1.pdf
Binary files differ
diff --git a/Master/texmf-dist/doc/man/man1/mtxrun.1 b/Master/texmf-dist/doc/man/man1/mtxrun.1
new file mode 100644
index 00000000000..212097ff153
--- /dev/null
+++ b/Master/texmf-dist/doc/man/man1/mtxrun.1
@@ -0,0 +1,147 @@
+.TH "mtxrun" "1" "01-01-2013" "version 1.31" "ConTeXt TDS Runner Tool"
+.SH NAME
+.B mtxrun
+.SH SYNOPSIS
+.B mtxrun [
+.I OPTIONS ...
+.B ] [
+.I FILENAMES
+.B ]
+.SH DESCRIPTION
+.B ConTeXt TDS Runner Tool
+.SH OPTIONS
+.TP
+.B --script
+run an mtx script (lua prefered method) (--noquotes), no script gives list
+.TP
+.B --execute
+run a script or program (texmfstart method) (--noquotes)
+.TP
+.B --resolve
+resolve prefixed arguments
+.TP
+.B --ctxlua
+run internally (using preloaded libs)
+.TP
+.B --internal
+run script using built in libraries (same as --ctxlua)
+.TP
+.B --locate
+locate given filename in database (default) or system (--first --all --detail)
+.TP
+.B --autotree
+use texmf tree cf. env texmfstart_tree or texmfstarttree
+.TP
+.B --tree=pathtotree
+use given texmf tree (default file: setuptex.tmf)
+.TP
+.B --environment=name
+use given (tmf) environment file
+.TP
+.B --path=runpath
+go to given path before execution
+.TP
+.B --ifchanged=filename
+only execute when given file has changed (md checksum)
+.TP
+.B --iftouched=old,new
+only execute when given file has changed (time stamp)
+.TP
+.B --makestubs
+create stubs for (context related) scripts
+.TP
+.B --removestubs
+remove stubs (context related) scripts
+.TP
+.B --stubpath=binpath
+paths where stubs wil be written
+.TP
+.B --windows
+create windows (mswin) stubs
+.TP
+.B --unix
+create unix (linux) stubs
+.TP
+.B --verbose
+give a bit more info
+.TP
+.B --trackers=list
+enable given trackers
+.TP
+.B --progname=str
+format or backend
+.TP
+.B --edit
+launch editor with found file
+.TP
+.B --launch
+launch files like manuals, assumes os support (--all)
+.TP
+.B --timedrun
+run a script and time its run
+.TP
+.B --autogenerate
+regenerate databases if needed (handy when used to run context in an editor)
+.TP
+.B --usekpse
+use kpse as fallback (when no mkiv and cache installed, often slower)
+.TP
+.B --forcekpse
+force using kpse (handy when no mkiv and cache installed but less functionality)
+.TP
+.B --prefixes
+show supported prefixes
+.TP
+.B --generate
+generate file database
+.TP
+.B --variables
+show configuration variables
+.TP
+.B --configurations
+show configuration order
+.TP
+.B --directives
+show (known) directives
+.TP
+.B --trackers
+show (known) trackers
+.TP
+.B --experiments
+show (known) experiments
+.TP
+.B --expand-braces
+expand complex variable
+.TP
+.B --expand-path
+expand variable (resolve paths)
+.TP
+.B --expand-var
+expand variable (resolve references)
+.TP
+.B --show-path
+show path expansion of ...
+.TP
+.B --var-value
+report value of variable
+.TP
+.B --find-file
+report file location
+.TP
+.B --find-path
+report path of file
+.TP
+.B --pattern=string
+filter variables
+.SH AUTHOR
+More information about ConTeXt and the tools that come with it can be found at:
+
+
+.B "maillist:"
+ntg-context@ntg.nl / http://www.ntg.nl/mailman/listinfo/ntg-context
+
+.B "webpage:"
+http://www.pragma-ade.nl / http://tex.aanhet.net
+
+.B "wiki:"
+http://contextgarden.net
diff --git a/Master/texmf-dist/doc/man/man1/mtxrun.man1.pdf b/Master/texmf-dist/doc/man/man1/mtxrun.man1.pdf
new file mode 100644
index 00000000000..5ed7141e9f4
--- /dev/null
+++ b/Master/texmf-dist/doc/man/man1/mtxrun.man1.pdf
Binary files differ
diff --git a/Master/texmf-dist/doc/man/man1/pstopdf.1 b/Master/texmf-dist/doc/man/man1/pstopdf.1
index 972f364cb19..71786ce8b79 100644
--- a/Master/texmf-dist/doc/man/man1/pstopdf.1
+++ b/Master/texmf-dist/doc/man/man1/pstopdf.1
@@ -1,75 +1,30 @@
-.TH "PSTOPDF" "1" "December 2006" "pstopdf 2.0" "ConTeXt"
-.de URL
-\\$2 \(laURL: \\$1 \(ra\\$3
-..
-.if \n[.g] .mso www.tmac
-.de EX
-.in +3
-.nf
-.ft CW
-..
-.de EE
-.in -3
-.ft R
-.fi
-..
-
-.SH "NAME"
-pstopdf \- convert EPS to PDF
-
-.SH "SYNOPSIS"
-\fBpstopdf\fP [ \fIOPTION\fP ... ] \fIFILE\fP [ ... ]
-
-.SH "DESCRIPTION"
-
-\fBpstopdf\fP converts EPS (encapsulated PostScript) to PDF.
-
+.TH "pstopdf" "1" "01-01-2013" "version 2.0.1" "PStoPDF"
+.SH NAME
+.B pstopdf
+.SH SYNOPSIS
+.B pstopdf [
+.I OPTIONS ...
+.B ] [
+.I FILENAMES
+.B ]
+.SH DESCRIPTION
+.B PStoPDF
.SH OPTIONS
.TP
-.B --help
-Print usage and version information.
-.TP
-.B --convert
-Convert EPS to PDF. This option is assumed if none is given.
-.TP
-.BI --method=\fIstring\fP
-What method to use for bounding boxes when converting to PDF.
-Specify one of \fBcrop\fP, \fBbounded\fP (the default), or \fBraw\fP.
-Older versions of ConTeXt used numerical methods (1, 2, or 3).
-.TP
-.BI --resolution=\fIstring\fP
-The resolution to use when when converting to pdf.
-Specify one of \fBlow\fP, \fBnormal\fP, \fBmedium\fP, \fBhigh\fP,
-\fBprinter\fP, \fBprint\fP, \fBscreen\fP, \fBebook\fP, or
-\fBdefault\fP.
-.TP
.B --request
-Handle exa request file.
+handles exa request file
.TP
.B --watch
-Watch folders for conversions (\fIuntested\fP).
-
-.SH EXAMPLES
+watch folders for conversions (untested)
+.SH AUTHOR
+More information about ConTeXt and the tools that come with it can be found at:
-To make \fIcow.pdf\fP:
-.EX
-pstopdf cow.eps
-.EE
-or (one of several equivalents):
-.EX
-pstopdf --convert cow.eps cow.pdf
-.EE
+.B "maillist:"
+ntg-context@ntg.nl / http://www.ntg.nl/mailman/listinfo/ntg-context
-.SH "SEE ALSO"
-.PP
-.BR epstopdf (1),
-.BR mptopdf (1),
-.BR ps2pdf (1),
-.BR texexec (1).
-.PP
-.URL "http://www.contextgarden.net" "ConTeXt wiki" .
+.B "webpage:"
+http://www.pragma-ade.nl / http://tex.aanhet.net
-.SH "AUTHOR"
-This (incomplete) manual page was written by Sanjoy Mahajan
-<sanjoy@mit.edu>. It is in the public domain.
+.B "wiki:"
+http://contextgarden.net
diff --git a/Master/texmf-dist/doc/man/man1/texexec.1 b/Master/texmf-dist/doc/man/man1/texexec.1
index 19f56dab96b..54213a30776 100644
--- a/Master/texmf-dist/doc/man/man1/texexec.1
+++ b/Master/texmf-dist/doc/man/man1/texexec.1
@@ -1,602 +1,72 @@
-.TH "TEXEXEC" "1" "December 2006" "texexec 6.2" "ConTeXt"
-.de URL
-\\$2 \(laURL: \\$1 \(ra\\$3
-..
-.if \n[.g] .mso www.tmac
-.de EX
-.in +3
-.nf
-.ft CW
-..
-.de EE
-.in -3
-.ft R
-.fi
-..
-
-.SH "NAME"
-texexec \- front end to the ConTeXt typesetting system
-
-.SH "SYNOPSIS"
-\fBtexexec\fP [ \fIOPTION\fP ... ] \fIFILE\fP [ ... ]
-
-.SH "DESCRIPTION"
-
-\fBtexexec\fP, a \fBruby\fP(1) script,
-is the command-line front end to the ConTeXt typesetting
-system, an extensive macro package
-built on the \fBtex\fP(1) family of typesetting programs.
-\fBtexexec\fP provides several facilties:
-.IP \(bu
-Process a ConTeXt source file,
-performing as many runs as
-necessary of \fBtex\fP(1), \fBtexutil\fP(1), and
-MetaPost (\fBmpost\fP(1)). Options control the output
-format, the pages to process, paper size, and so forth.
-.IP \(bu
-Create new ConTeXt formats, useful during installation.
-.IP \(bu
-Post-process existing PDF files, including merging multiple
-files, and extracting and rearranging pages within a file,
-similar to the facilities provided for PostScript files by
-\fBpsnup\fP(1) or for PDF files by \fBpdftk\fP(1).
-.IP \(bu
-Extract or strip documentation from ConTeXt source files
-using \fBtexutil\fP(1).
-.IP \(bu
-Run MetaPost (\fBmpost\fP(1)) to generate figures from MetaPost source.
-.IP \(bu
-Produce proof sheets of figures used in a ConTeXt source file.
-.IP
-.SH "OPTIONS"
-
-All switches are specified in full here but can be abbreviated to the
-shortest unique prefix. Thus, \fB--env\fP works the same as
-\fB--environment\fP. With no options, \fBtexexec\fP assumes that
-the file on the command line is a ConTeXt source file, i.e. a TeX file
-in the ConTeXt dialect, and produces a PDF file using the source file.
-
-.SS "General Options"
-
-.IP "\fB--alone\fP"
-Avoid calling other programs when possible. For
-example, \fB--alone\fP will prevent \fBtexexec\fP from
-using \fBfmtutil\fP(1) to generate formats (this prevention can
-be handy when tracing installation problems).
-.IP "\fB--environments\fP\fI=\fP\fIfile[,file[,...]]\fP"
-Specify ConTeXt environment file(s), separated by commas, to use when
-processing the source file. This option is useful when converting
-from non-ConTeXt file formats without environment or layout settings.
-.IP "\fB--help\fP"
-Produce a summary of switches
-and arguments. A more detailed help is produced by including
-\fB--all\fP.
-.IP "\fB--interface=\fP\fIlanguage\fP"
-Specify the language
-ConTeXt should use to communicate with you.
-Options are
-.IP
-.RS
-.IP "\fBen\fP"
-US English
-.IP "\fBnl\fP"
-Dutch
-.IP "\fBde\fP"
-German
-.IP "\fBuk\fP"
-British English
-.IP "\fBcz\fP"
-Czech
-.IP "\fBit\fP"
-Italian
-.RE
-.IP "\fB--keep\fP"
-Preserve a few of the temporary files generated while processing by
-appending \fI.keep\fP to their name. For example, after
-.EX
-texexec --keep document.tex
-.EE
-you will find (besides \fIdocument.pdf\fP) \fIdocument.log.keep\fP
-and \fIdocument.top.keep\fP. The \fIdocument.top\fP file is where
-\fBtexexec\fP wraps \fIdocument.tex\fP with the proper ConTeXt macro
-commands to set the output format, etc.
-.IP "\fB--once\fP"
-Process a file exactly once. (By default,
-\fBtexexec\fP processes the file as many times as
-necessary to sort out all references, typeset
-\fBMetaPost\fP code, and so forth.)
-.IP "\fB--purge\fP"
-Get rid of most temporary files generated while processing the source
-file. For example,
-.EX
-texexec --purge somefile.tex
-.EE
-will generate \fIsomefile.pdf\fP, cleaning up after itself and leaving
-only one extra file, \fIsomefile.tuo\fP. See also the \fB--purge\fP
-option of
-.BR ctxtools (1)
-.IP "\fB--purgeall\fP"
-Get rid of all temporary files generated while processing the source
-file, including the \fIfilename.tuo\fP file. See also the \fB--purge
---all\fP option combination of
-.BR ctxtools (1)
-.IP "\fB--randomseed=\fP\fINNNN\fP"
-Set the random seed.
-.IP "\fB--result=\fP\fIFILENAME\fP"
-Allows you to change the
-basename of the output file. See
-\fB--mode\fP for an example.
-.IP "\fB--runs=\fP\fINUMBER\fP"
-Specify the number of runs to
-perform on a file. Overrides
-\fBtexexec\fP's calculations.
-.IP "\fB--separation\fP"
-Perform color separations.
-.IP "\fB--silent\fP"
-Suppress a few diagnostic and progress messages.
-.IP "\fB--timeout=\fP\fINNN\fP"
-Abort the run if a subprocess waits for more than \fINNN\fP seconds;
-e.g. while waiting for user input when \fBtex\fP reports an undefined
-control sequence. Useful for automated testing scripts, to make sure
-the runs finish.
-.IP "\fB--usemodules=\fP\fImodule1[,module2,[...]]\fP
-Use the comma-separated list of modules. For example, to typeset
-\fIdocument.tex\fP using the \fIbib\fP and \fIunits\fP modules:
-.EX
-texexec --usemodules=bib,units document.tex
-.EE
-.IP "\fB--verbose\fP"
-Output extra diagnostic information.
-.IP "\fB--version\fP"
-Print the version number.
-.IP
-
-.SS "Processing ConTeXt Source Files"
-
-Including specifying paper sizes, formats, and so forth.
-
-.IP "\fB--arrange\fP"
-Perform page rearrangements, which are used to produce booklets. This
-option tells ConTeXt to the first \fIn\fP-1 runs without taking
-account of arrangements, then on the last run to pay attention to
-the arrangement commands in the source file.
-.IP "\fB--batchmode\fP"
-Process the file in batchmode, which means
-to typeset the whole document even if there are errors. More
-imformation about \f(CWbatchmode\fP can be found in Donald
-E. Knuth's \fITeXbook\fP.
-.IP "\fB--nonstopmode\fP"
-Process the file in nonstopmode, which means
-to typeset the document and report errors, but not to stop at any
-error. It is similar to batchmode but more verbose. More
-imformation about \f(CWnonstopmode\fP can be found in Donald
-E. Knuth's \fITeXbook\fP.
-.IP "\fB--bodyfont=\fP\fIfont\fP"
-The name of a font to preload for use in
-setting the body of the text (OBSOLETE).
-.IP "\fB--centerpage\fP"
-Center the document on the page.
-.IP "\fB--color\fP"
-Turn on color mode. Color mode can also be set by
-commands embedded in the document. These commands
-override the \fB--color\fP option.
-.IP "\fB--convert=\fP\fIFORMAT\fP"
-Convert the input file to
-ConTeXt format from \fIFORMAT\fP before
-processing. In most cases, this conversion will result
-in a TeX file. Currently supported input
-\fIFORMAT\fPs are \fBxml\fP and
-\fBsgml\fP.
-.IP "\fB--dvipdfmx, --dvipdfm, --dpx, --dpm\fP"
-Use the TeX engine (e.g. \fBpdftex\fP or \fBpdfetex\fP)
-to make a DVI file and \fBdvipdfmx\fP(1) to turn it
-into PDF.
-.IP "\fB--dvi, --ps, --dvips\fP"
-Use the TeX engine (e.g. \fBpdftex\fP or \fBpdfetex\fP)
-to make a DVI file and \fBdvips\fP(1) to turn it
-into PostScript. It's counterintuitive that \fB--dvi\fP produces
-a PostScript file in addition to the DVI file. But that's because
-\fB--dvi\fP is shorthand for \fB--dvips\fP; adding the \fB--nobackend\fP
-option prevents \fBtexexec\fP's running \fBdvips\fP(1). See also the
-\fB--engine\fP option.
-.IP "\fB--fast\fP"
-Typeset the document(s) as fast as possible without
-causing problems.
-.IP "\fB--final\fP"
-Perform a final run without skipping anything.
-This option is typically used with \fB--fast\fP.
-.IP "\fB--language=\fP\fILANGUAGE\fP"
-Set the language for
-hyphenation. Can be specified in your source file.
-Options are the same as those for \fB--interface\fP.
-.IP "\fB--mode=\fP\fIMODELIST\fP, \fB--modes=\fP\fIMODELIST\fP"
-Allows you to change the mode used while typesetting the
-source file. The \fIMODELIST\fP is a comma separated list of modes.
-Modes are a conditional-compilation facility
-like \f(CW#ifdef\fP in C. So one source file can be used to produce
-several typeset documents: one for A4 paper, one for
-screen display in full color, one for letter paper, etc. For
-example:
-.IP
-.EX
-\f(CWtexexec --pdf --mode=A4 --result=manual-a manual-t.tex\fP
-\f(CWtexexec --pdf --mode=letter --result=manual-l manual-t.tex\fP
-\f(CWtexexec --pdf --mode=screen --result=manual-s manual-t.tex\fP
-.EE
-.IP
-Here the \fB--mode\fP tells ConTeXt which
-mode directives to use when typesetting the source file. The
-\fB--result\fP option tells ConTeXt
-where to put the output file.
-.IP "\fB--modefile=\fP\fIfile\fP"
-Load this file before most of the usual processing; usually used for
-mode-related material.
-.IP "\fB--noarrange\fP"
-Ignore arrangement commands in the source file.
-.IP "\fB--nobackend\fP"
-Do not run the backend, e.g. \fBdvips\fP(1) or \fBdvipdfmx\fP(1). See
-the \fB--dvips\fP or \fB--dvipdfmx\fP options. Why would you give one
-of those options to choose a backend, yet tell \fBtexexec\fP not to
-run the backend? Because each backend has its own syntax for
-\f(CW\\special\fP calls. Specifying the backend allows the ConTeXt
-macros to use the correct syntax so that when you later run the
-backend to produce PostScript or PDF, the specials will be interpreted
-correctly.
-.IP "\fB--pages=\fP\fIPAGENUMBERLIST\fP"
-Specify the pages or page
-range to appear in the output file.
-\fIPAGENUMBERLIST\fP may be the keyword \fBodd\fP
-or \fBeven\fP; or one or more pages or page ranges separated by commas.
-For example,
-.EX
-\f(CWtexexec --pages=1,7,8-11,14 somefile.tex\fP
-.EE
-.IP "\fB--paperformat=\fP\fIKEY\fP"
-For typesetting multiple pages on a
-single piece of paper. \fIKEY\fP has the form \fBa4a3\fP
-(for printing A4 pages on A3 paper), \fBa5a4\fP
-(for printing A5 pages on A4 paper), or in general \fBaMaN\fP.
-The actual layout of the pages is specified with the
-\fB--printformat\fP option.
-.IP "\fB--pdf, --pdftex\fP"
-Use \fBpdftex\fP(1) to produce a pdf document (the default).
-.IP "\fB--printformat=\fP\fIKEY\fP"
-Specify the layout of the final
-output. \fIKEY\fP can be \fBup\fP, resulting in 2
-pages per sheet, double sided; or \fBdown\fP, resulting
-in 2 rotated pages per sheet, double sided. Use the
-\fB--paperformat\fP option to specify the original page
-and sheet size.
-.IP "\fB--utfbom\fP"
-Turn on UTF-8 encoding.
-.IP "\fB--xetex, --xtx\fP"
-Use \fBxetex\fP(1) to produce a pdf document.
-.IP
-
-.SS "Creating ConTeXt Format Files"
-
-.IP "\fB--make\fP"
-Generate a ConTeXt format file. For example, to make
-\fIcont-en.fmt\fP and have it placed in a default format directory:
-.EX
-texexec --make de
-.EE
-The most common invocation, which is used by scripts that install a new version
-of ConTeXt (see \fBctxtools\fP(1)), uses \fB--all\fP
-so that \fBtexexec\fP makes the usual formats:
-.EX
-texexec --make --all
-.EE
-.IP "\fB--local\fP"
-When searching for TeX or MetaPost formats, look in the current directory
-rather than in the location set by the kpse library. See
-.BR kpathsea (1)
-for more information on path searching.
-.IP "\fB--check\fP"
-Check and report information about the ConTeXt version, the
-distribution, the TeX engine, and the language interfaces/formats.
-
-.SS "Expert options"
-
-You should know what you're doing if you use these options!
-.IP "\fB--alpha"
-Use the TEXMFALPHA environment variable to find and
-run an alpha release of ConTeXt.
-.IP "\fB--beta"
-Use the TEXMFBETA environment variable to find and
-run a beta release of ConTeXt.
-.IP "\fB--distribution\fP\fI=dist\fP"
-Usually one of \fBstandard\fP, \fBweb2c\fP, or \fBmiktex\fP.
-\fBtexexec\fP should figure it out automatically, and you shouldn't
-need to use this option.
-.IP "\fB--engine\fP\fI=texengine\fP"
-Specify the program to do the hard work of typesetting. Currently
-either \fBpdftex\fP (the default), \fBxetex\fP, or \fBaleph\fP.
-The \fBluatex\fP value is experimental. The \fB--engine\fP
-option is not usually needed. Instead, let
-\fBtexexec\fP figure out the setting based on other command-line
-information. See for example the \fB--xetex\fP or \fB--pdf\fP
-switches.
-
-.SS "Postprocess PDF Files"
-
-.IP "\fB--combination=\fP\fIROWS\fP\fB*\fP\fICOLS\fP"
-Specify the number of pages to show on a single page. Use with
-\fB--pdfcombine\fP.
-.IP "\fB--pdfarrange\fP"
-For rearranging pages in PDF files.
-.EX
-\f(CWtexexec --pdfarrange --paperformat=a5a4 --printformat=up foo.pdf\fP
-.EE
-This command creates an A5 booklet from a PDF file
-\fIfoo.pdf\fP. \fB--pdfarrange\fP is used in
-conjunction with the following options.
-.IP "\fB--pdfcopy\fP"
-Copy and perhaps process pages from the pdf file.
-The resulting file is \fItexexec.pdf\fP by default, but you can change
-that using \fB--result\fP. Use the \fB--scale\fP option to magnify or
-demagnify the original pages and the \fB--pages\fP option to select
-the pages to copy. Here is an example using all these options:
-.EX
-texexec --pages=4-7 --pdfcopy --scale=750 --result=one images.pdf
-.EE
-It takes pages 4-7 from \fIimages.pdf\fP, scales them by 75%,
-and copies them to \fIone.pdf\fP.
-.IP "\fB--scale=\fP\fIinteger\fP"
-If the integer is less than 10, then it is taken as an (integer)
-magnification factor. Otherwise, it is taken as a magnification
-factor in TeX terms, i.e. with 1000 meaning full scale.
-.IP "\fB--paperoffset=\fP\fIdimen\fP"
-Specify the space between the
-edge of the pages and the beginning of the text block.
-.IP "\fB--backspace=\fP\fIdimen\fP"
-Specify the inside (gutter) margins.
-.IP "\fB--topspace=\fP\fIdimen\fP"
-Specify the top and bottom margin.
-.IP "\fB--markings\fP"
-Add crop marks.
-.IP "\fB--addempty=\fP\fIPAGES\fP"
-Add empty pages after the pages specified in \fIPAGES\fP. (Useful
-for, among other things, adding blank pages after a table of
-contents.)
-.IP "\fB--textwidth=\fP\fIWIDTH\fP"
-Set the width of
-the original text. Specifying this parameter with a
-single-sided original will allow ConTeXt to adjust
-the page layout for double-sided output, producing much
-more attractive results.
-
-With the \fB--pdfarrange\fP flag, specifying more
-than one file will result in all of the files being
-combined in the final result, allowing you to add title
-pages, decorated part separators, and so forth.
-
-You can also do more complex manipulations, such as
-adding additional text to the page by setting up a
-small file with layout definitions and a simple figure
-insertion loop.
-.IP "\fB--pdfcombine\fP"
-Combine multiple pages. Requires the \fB--combination\fP option.
-.IP "\fB--pdfselect\fP"
-Extract pages from a file. Use in combination with the
-\fB--selection\fP switch, as in
-.EX
-\f(CWtexexec --pdfselect --paperformat=S6
---selection=1,9,14 file-1\fP
-.EE
-which extracts pages 1, 9, and 14 from
-\fIfile-1.pdf\fP, and places them in
-\fItexexec.pdf\fP (the default output filename if
-an output file isn't specified).
-
-See \fB--pdfarrange\fP for other
-options.
-.IP "\fB--selection=\fP\fIPAGES\fP"
-Specify pages to be affected by
-another option. See \fB--pdfarrange\fP and
-\fB--pdfselect\fP for examples.
-
-.SS "XML handling"
-.IP "\fB--filters=\fP\fIfilter1[,filter2[,...]]\fP
-Specify XML filters to use.
-
-.SS "Extract or Strip Out Documentation"
-
-.IP "\fB--listing\fP"
-Produce a typeset version of the source code in
-\fIFILE\fP. You can specify the format of the output
-file. For example, use
-.EX
-\f(CWtexexec --ps --listing readme.now\fP
-.EE
-to produce a PostScript file called
-\fItexexec.ps\fP.
-
-See also \fB--backspace\fP, \fB--topspace\fP, and \fB--result\fP.
-.IP "\fB--module\fP"
-Create documentation for ConTeXt,
-MetaPost (see \fBmpost\fP(1)),
-.BR perl (1),
-and
-.BR ruby (1)
-modules.
-Converts the documentation to ConTeXt format and
-then typesets a documentated version of the source file.
-
-Documentation lines in ConTeXt source files are
-specified by beginning lines with these strings:
-
-\f(CW%C\fP : Copyright information
-
-\f(CW%D\fP : Documentation lines
-
-\f(CW%I\fP : TeXEdit information lines (mostly in Dutch)
-
-\f(CW%M\fP : Macro code needed to processs the documentation
-
-\f(CW%S\fP : Suppressed lines
-
-The same forms can be used for Perl or ruby scripts, except that the \f(CW%\fP
-character (the TeX comment character) is replaced by \f(CW#\fP (the
-Perl comment character).
-
-See also the \fB--documentation\fP option to
-.BR ctxtools (1).
-
-.SS "Process MetaPost Figures"
-
-.IP "\fB--mpsformats=\fP\fIname\fP"
-The name of a MetaPost format file, e.g. \fBmetafun\fP (the default).
-.IP "\fB--mptex\fP"
-Strips out and typesets TeX code embedded in a
-MetaPost file.
-.IP "\fB--nomp\fP"
-Do not run \fBmpost\fP(1), even if needed.
-.IP "\fB--nomprun\fP"
-Do not run \fBmpost\fP(1) on embedded
-MetaPost code.
-
-.SS "Producing Proof Sheets of Figures"
-
-Generate information and proof sheets of one or more (non-EPS)
-graphics files. For example,
-.EX
-texexec --figures *.png *.jpg
-.EE
-scans the current directory for PNG and JPG files
-and extracts useful information about their sizes and types. By
-default, this information is stored in
-.IR rlxtools.rli .
-Then the given figures are made into a proof sheet (by default
-\fItexexec.pdf\fP) according to the method specified by the
-\fB--method\fP option. Note that newer versions of
-.BR pdftex (1)
-do not support TIFF inclusion.
-.IP "\fB--method=\fP\fIALTERNATIVE\fP"
-Specify one of three options to produce the document containing the images
-used in the source file:
-
-\fBa\fP : A proof sheet with additional
-information provided for each figure (the default)
-
-\fBb\fP : A proof sheet with the graphics only
-
-\fBc\fP : One figure per page, with the page
-clipped to the bounding box of the
-figure
-
-See also \fB--paperoffset\fP,
-which allows you to specify an offset to be added to
-the page, as in
-.EX
-texexec --figures --method=c --paperoffset=.5cm *.pdf *.png *.jpg
-.EE
-
-.SH "USAGE"
-
-Each ConTeXt user interface (language) has its own format. The
-following command generates two formats, one using the English
-interface for typesetting in English, and one for Dutch:
-.EX
-\f(CWtexexec --make en nl\fP
-.EE
-
-By default, the language used for typesetting matches the
-user-interface language (set with
-\fB--interface\fP. It is possible to use one
-language for typesetting and another for messages by changing the
-relevant settings in \fIcont-usr.tex\fP. These languages can
-also be changed on the command line with a command such as
-.IP
-.RS
-\f(CWtexexec --make --language=pl,cz,sk en\fP
-.RE
-.IP
-That command generates a ConTeXt format file with an English user
-interface, and the main language set to Polish (\fBpl\fP). Czech
-and Slovak hyphenation patterns are also loaded
-so that Czech and Slovak text included in a source file will be
-typeset properly (\fBcz\fP and \fBsk\fP).
-.IP o
-When the appropriate formats are present, a file can be typeset
-by typing
-.EX
-\f(CWtexexec test\fP
-.EE
-.IP
-\fBtexexec\fP tries to determine what interface it should use to
-typeset \fItest.tex\fP by looking for a line such as
-.IP
-.EX
-\f(CW% interface=en tex=pdftex output=pdftex\fP
-.EE
-.IP
-at the top of the file (i.e., on the very first line). This line is
-equivalent to \fBTeX\fP's format line, ``&\fIFORMAT\fP'').
-.IP
-By default, \fBtexexec\fP will produce a PDF file using \fBpdftex\fP(1). The
-\fB--dvips\fP flag tells \fBtexexec\fP to produce a PostScript
-file instead.
-.IP
-After an error-free run, \fBtexexec\fP will run \fBtexutil\fP(1) to
-determine whether additional runs of \fBtex\fP(1) (or
-\fBpdftex\fP(1)) or any utility programs (e.g., \fBbibtex\fP(1),
-\fBmakeindex\fP(1)) are necessary. You can suppress these
-additional runs by specifying the \fB--once\fP or
-\fB--runs\fP flags:
-.IP
-.EX
-\f(CWtexexec --once test\fP
-\f(CWtexexec --runs=2 test\fP
-.EE
-.IP
-
-.SH EXAMPLES
+.TH "texexec" "1" "01-01-2013" "version 6.2.1" "TeXExec"
+.SH NAME
+.B texexec
+.SH SYNOPSIS
+.B texexec [
+.I OPTIONS ...
+.B ] [
+.I FILENAMES
+.B ]
+.SH DESCRIPTION
+.B TeXExec
+.SH OPTIONS
+.TP
+.B --make
+make formats
+.TP
+.B --check
+check versions
+.TP
+.B --process
+process file
+.TP
+.B --mptex
+process mp file
+.TP
+.B --mpxtex
+process mpx file
+.TP
+.B --mpgraphic
+process mp file to stand-alone graphics
+.TP
+.B --mpstatic
+process mp/ctx file to stand-alone graphics
+.TP
+.B --listing
+list of file content
+.TP
+.B --figures
+generate overview of figures
+.TP
+.B --modules
+generate module documentation
+.TP
+.B --pdfarrange
+impose pages (booklets)
.TP
-Produce PDF from ConTeXt source (the .tex extension is optional):
-\f(CWtexexec file.tex\fP
+.B --pdfselect
+select pages from file(s)
.TP
-Same as the above but without rerunning for crossreferences, etc.:
-\f(CWtexexec --once file.tex\fP
+.B --pdfcopy
+copy pages from file(s)
.TP
-Produce PostScript from ConTeXt source:
-\f(CWtexexec --ps file.tex\fP
+.B --pdftrim
+trim pages from file(s)
.TP
-Produce file-a4.pdf using conditional compilation (modes):
-\f(CWtexexec --mode=a4 --result=file-a4 file.tex\fP
+.B --pdfcombine
+combine multiple pages
.TP
-Generate format (.fmt) files used by ConTeXt (used during installation):
-\f(CWtexexec --make --all\fP
+.B --pdfsplit
+split file in pages
+.SH AUTHOR
+More information about ConTeXt and the tools that come with it can be found at:
-.SH "INITIALIZATION"
-.IP
-\fBtexexec\fP requires ruby. On Unix and Unix-like systems, no special
-steps have to be taken to get \fBtexexec\fP to work beyond installing
-ruby and having the \fBruby\fP(1) binary in your path.
+.B "maillist:"
+ntg-context@ntg.nl / http://www.ntg.nl/mailman/listinfo/ntg-context
-.SH "SEE ALSO"
-.PP
-\fBbibtex\fP(1), \fBctxtools\fP(1), \fBdvipdfmx\fP(1), \fBdvips\fP(1),
-\fBfmtutil\fP(1), \fBmakeindex\fP(1), \fBmpost\fP(1),
-\fBmptopdf\fP(1), \fBpdftex\fP(1), \fBpdftk\fP(1), \fBruby\fP(1),
-\fBpsnup\fP(1), \fBtex\fP(1), \fBtexfont\fP(1), \fBtexmfstart\fP(1),
-\fBtexshow\fP(1),
-\fBtexutil\fP(1), \fBxetex\fP(1).
-.PP
-The texexec manual
-.IR mtexexec.pdf ,
-available from
-.URL "http://www.pragma-ade.com/dir/general/manuals/" "PRAGMA ADE" .
-.PP
-Donald E. Knuth's \fIThe TeXbook\fP.
-.PP
-.URL "http://www.contextgarden.net" "ConTeXt wiki" .
+.B "webpage:"
+http://www.pragma-ade.nl / http://tex.aanhet.net
-.SH "AUTHOR"
-This manpage was written by Tobias Burnus
-<burnus@gmx.de> and C.M. Connelly
-<c@eskimo.com> and updated by Sanjoy Mahajan <sanjoy@mit.edu>.
-It is based on the
-.I mtexexec.pdf
-manual written by Hans Hagen <pragma@wxs.nl>.
+.B "wiki:"
+http://contextgarden.net
diff --git a/Master/texmf-dist/doc/man/man1/texmfstart.1 b/Master/texmf-dist/doc/man/man1/texmfstart.1
index e99ab230bd1..212097ff153 100644
--- a/Master/texmf-dist/doc/man/man1/texmfstart.1
+++ b/Master/texmf-dist/doc/man/man1/texmfstart.1
@@ -1,362 +1,147 @@
-.TH "TEXMFSTART" "1" "December 2006" "texmfstart 2.0" "ConTeXt"
-.de URL
-\\$2 \(laURL: \\$1 \(ra\\$3
-..
-.if \n[.g] .mso www.tmac
-.de EX
-.in +3
-.nf
-.ft CW
-..
-.de EE
-.in -3
-.ft R
-.fi
-..
-
-.SH "NAME"
-texmfstart \- run ConTeXt scripts, make ConTeXt wrapper scripts, view documents
-
-.SH "SYNOPSIS"
-\fB texmfstart\fP [ \fIoptions ...\fP ] [ \fIfilename\fP ] [ \fIarguments ...\fP ]
-
-.SH "DESCRIPTION"
-ConTeXt is a typesetting system based on the
-.BR tex (1)
-family of programs.
-.B texmfstart
-is a ConTeXt meta-script that can run subordinate
-scripts (subscripts?) such as
-.BR texexec (1),
-.BR ctxtools (1),
-or
-.BR pdftools (1).
-It can also generate short wrapper scripts to simplify common uses.
-These scripts are often called
-.I stubs
-in the ConTeXt documentation. The options control
-.BR texmfstart 's
-behaviour, and the arguments are passed
-to the program identified by
-.IR filename .
-
-If the filename is a document, then
-.B texmfstart
-will start a viewer for that document. For example:
-.EX
-texmfstart showcase.pdf
-.EE
-will start an appropriate viewer for
-.IR showcase.pdf .
-See the
-.B EXAMPLES
-section.
-
+.TH "mtxrun" "1" "01-01-2013" "version 1.31" "ConTeXt TDS Runner Tool"
+.SH NAME
+.B mtxrun
+.SH SYNOPSIS
+.B mtxrun [
+.I OPTIONS ...
+.B ] [
+.I FILENAMES
+.B ]
+.SH DESCRIPTION
+.B ConTeXt TDS Runner Tool
.SH OPTIONS
-
-.SS General:
.TP
-.B --help, --version
-print version information, usage, and examples.
+.B --script
+run an mtx script (lua prefered method) (--noquotes), no script gives list
.TP
-.B --verbose
-print status and progress information, for example what commands are
-being executed.
+.B --execute
+run a script or program (texmfstart method) (--noquotes)
.TP
-.B --clear
-don't pass info about locations to child processes.
-
-.SS Running a program:
+.B --resolve
+resolve prefixed arguments
.TP
-.B --arguments=\fIstr\fP
-an alternative for providing the arguments to be passed. For example,
-.EX
-texmfstart --arguments=b.tex texexec
-.EE
-will pass
-.I b.tex
-as the argument to
-.BR texexec .
+.B --ctxlua
+run internally (using preloaded libs)
.TP
-.B --report
-dry run: report what command would be run, but do not run it
+.B --internal
+run script using built in libraries (same as --ctxlua)
.TP
.B --locate
-dry run: like \f(CW--report\fP but doesn't print the trailing newline.
+locate given filename in database (default) or system (--first --all --detail)
.TP
-.B --browser
-view the document in a web browser (for Windows).
+.B --autotree
+use texmf tree cf. env texmfstart_tree or texmfstarttree
.TP
-.B --file=\fIfilename\fP
-an alternative way to specify the file (the program to run or document
-to open).
+.B --tree=pathtotree
+use given texmf tree (default file: setuptex.tmf)
.TP
-.B --direct
-run a program without searching for its location (assumes that the
-program is on the PATH).
+.B --environment=name
+use given (tmf) environment file
.TP
-.B --execute
-use the
-.BR ruby (1)
-.I exec
-function instead of its
-.I system
-function.
-.TP
-.B --program=\fIstr\fP
-the program space where
-.BR kpsewhich (1)
-will search (default: context). This information is given to
-.B kpsewhich
-as its \f(CW-progname\fP option. Usually you don't need this option.
-
-.SS Creating startup scripts:
-.TP
-.B --make
-create a wrapper script or batch file to run the given file.
-The wrapper scripts are put in the current path, which usually means
-the current directory. If
-\fIall\fP is given as the file, e.g.
-.EX
-texmfstart --make all
-.EE
-then make all the ConTeXt wrapper scripts (the stubs).
+.B --path=runpath
+go to given path before execution
.TP
-.B --windows
-when making a wrapper script (stub), create a Windows batch (.bat) file.
-Usually you
-do not need to specify this option, as
-.B texmfstart
-will figure out what operating system you are using.
-.TP
-.B --linux
-when making a wrapper script, create a Unix shell script. Usually you
-do not need to specify this option, as
-.B texmfstart
-will figure out what operating system you are using.
-.TP
-.B --stubpath=\fIpath\fP
-specify where to put the wrapper scripts (stubs).
-.TP
-.B --indirect
-always use
-.I texmfstart
-in the wrapper script (stub).
-
-.SS Document viewing:
+.B --ifchanged=filename
+only execute when given file has changed (md checksum)
.TP
-.B --page=\fInumber\fP
-open the document at this page.
-
-.SS Environments and paths:
+.B --iftouched=old,new
+only execute when given file has changed (time stamp)
.TP
-.B --path=\fIstr\fP
-change to the specified path.
+.B --makestubs
+create stubs for (context related) scripts
.TP
-.B --tree=\fIstr\fP
-use the given TEXMF tree.
+.B --removestubs
+remove stubs (context related) scripts
.TP
-.B --autotree
-automatically determine the TEXMF tree to use (the default).
+.B --stubpath=binpath
+paths where stubs wil be written
.TP
-.B --environment=\fIstr\fP
-use the given environment file. Its syntax is given in the
-.I mtexmfstart.pdf
-manual.
+.B --windows
+create windows (mswin) stubs
.TP
-.B --showenv
-print the environment variables known at runtime
-
-.SS Conditional execution:
+.B --unix
+create unix (linux) stubs
.TP
-.B --iftouched=\fIfile1,file2\fP
-run only when the given files have different timestamps.
+.B --verbose
+give a bit more info
.TP
-.B --ifchanged=\fIstr\fP
-run only when the given file has changed (based on its last-computed MD5
-checksum).
-
-.SS Special features:
+.B --trackers=list
+enable given trackers
+.TP
+.B --progname=str
+format or backend
.TP
.B --edit
-open the given file in an editor.
-
-.SH "FILENAME PREFIXES"
-.SS
-Optional prefixes determine the method used to search for the specified file:
+launch editor with found file
.TP
-.I bin:filename
-expanded name based on the PATH environment variable
+.B --launch
+launch files like manuals, assumes os support (--all)
.TP
-.I kpse:filename
-expanded name based on
-.BR kpsewhich (1)
-result
+.B --timedrun
+run a script and time its run
.TP
-.I rel:filename
-expanded name relative to the current directory
+.B --autogenerate
+regenerate databases if needed (handy when used to run context in an editor)
.TP
-.I env:name
-expanded pathname based on environment variable
-.I name
+.B --usekpse
+use kpse as fallback (when no mkiv and cache installed, often slower)
.TP
-.I path:filename
-path part of filename as located by
-.BR kpsewhich (1)
-
-.SH EXAMPLES
-.TP
-\f(CWtexmfstart texexec.rb file.tex\fP
-Locate the
-.I texexec.rb
-script and run it with
-.I file.tex
-as its argument. In other words, make
-.I file.pdf
-from
-.IR file.tex .
-If
-.BR texexec (1)
-is properly installed on your system,
-this common invocation can be shortened to
-.EX
-\f(CWtexexec file.tex\fP
-.EE
-.TP
-\f(CWtexmfstart texexec file.tex\fP
-Locate the
-.BR texexec (1)
-program (currently a
-.BR ruby (1)
-script,
-.IR texexec.rb )
-and run it with
-.I file.tex
-as its argument, producing
-.I file.pdf.
-This invocation can be shortened to
-.EX
-texexec file.tex
-.EE
-.TP
-\f(CWtexmfstart ctxtools --updatecontext\fP
-Run the
-.BR ctxtools (1)
-script, updating the ConTeXt installation. This
-invocation is equivalent to
-.EX
-ctxtools --updatecontext
-.EE
-.TP
-\f(CWtexmfstart pstopdf --method=3 cow.eps\fP
-Convert
-.I cow.eps
-to PDF using method 3 of
-.BR pstopdf (1).
-This invocation is equivalent to
-.EX
-pstopdf --method=3 cow.eps
-.EE
-.TP
-\f(CWtexmfstart --make --stubpath=/usr/local/bin texexec\fP
-Make a wrapper script (stub), either a shell script
-.I /usr/local/bin/texexec
-on Unix, or a batch file \fI\\usr\\local\\bin\\texexec.bat\fP
-on Windows. On Unix (and maybe on Windows?), you need to make
-the script executable; see
-.BR chmod (1).
-.TP
-\f(CWtexmfstart --edit kpse:cont-sys.tex\fP
-Locate and edit the \fIcont-sys.tex\fP configuration file.
-.TP
-\f(CWtexmfstart --ifchanged=whatever.mp texexec --mpgraphic whatever.mp\fP
-Rerun
-.B texexec
-if
-.I whatever.mp
-has changed since the last use of
-.IR --ifchanged .
-.TP
-\f(CWtexmfstart --ifchanged=whatever.mp bin:echo rerun MetaPost\fP
-If the
-.I whatever.mp
-source file has changed since the last use of
-.IR --ifchanged ,
-then use the
-.BR echo (1)
-command to tell the user to rerun MetaPost
-.RB "(see " mpost (1)).
-This example shows that
-.B texmfstart
-can be used to run any script, not just ConTeXt scripts. The
-\f(CWbin:\fP prefix tells
-.B texmfstart
-not to search for
-.I echo
-in the TEXMF tree(s), but to assume that it's an executable somewhere
-on the PATH.
-.TP
-\f(CWtexmfstart --ifchanged=whatever.mp --direct echo rerun MetaPost\fP
-This invocation has the same effect as the preceding example, but
-using \f(CW--direct\fP instead of the \f(CWbin:\fP prefix, again to tell
-.B texmfstart
-not to search for the
-.I echo
-command.
-
-.SH FILES
+.B --forcekpse
+force using kpse (handy when no mkiv and cache installed but less functionality)
.TP
-.I file.md5
-MD5 checksum file used for the \f(CW--ifchanged\fP option.
-
-.SH ENVIRONMENT
-.TP
-PATH
-For expanding filenames given with a
-.I bin
-prefix.
-.TP
-TEXMFSTART_EDITOR, EDITOR, editor
-Editor to use with
-.IR --edit .
-The environment variables are looked up in that order, with the first
-setting found taking priority.
+.B --prefixes
+show supported prefixes
+.TP
+.B --generate
+generate file database
+.TP
+.B --variables
+show configuration variables
+.TP
+.B --configurations
+show configuration order
+.TP
+.B --directives
+show (known) directives
+.TP
+.B --trackers
+show (known) trackers
+.TP
+.B --experiments
+show (known) experiments
+.TP
+.B --expand-braces
+expand complex variable
+.TP
+.B --expand-path
+expand variable (resolve paths)
+.TP
+.B --expand-var
+expand variable (resolve references)
+.TP
+.B --show-path
+show path expansion of ...
+.TP
+.B --var-value
+report value of variable
+.TP
+.B --find-file
+report file location
+.TP
+.B --find-path
+report path of file
+.TP
+.B --pattern=string
+filter variables
+.SH AUTHOR
+More information about ConTeXt and the tools that come with it can be found at:
-.SH "SEE ALSO"
-.PP
-.BR ctxtools (1),
-.BR kpsewhich (1),
-.BR makempy (1),
-.BR mpost (1),
-.BR pdftools (1),
-.BR pstopdf (1),
-.BR texexec (1),
-.BR texfont (1),
-.BR texutil (1).
-.PP
-.URL "http://www.contextgarden.net" "ConTeXt wiki" .
-.PP
-The
-.B texmfstart
-manual,
-.IR mtexmfstart.pdf ,
-available from
-.URL "http://www.pragma-ade.com/dir/general/manuals/" "PRAGMA ADE" .
-.SH BUGS
-On Unix, opening a PDF document first tries
-.B pdfopen
-then
-.BR acroread ,
-neither of which may be present on your system.
+.B "maillist:"
+ntg-context@ntg.nl / http://www.ntg.nl/mailman/listinfo/ntg-context
-The \f(CW--report\fP option doesn't work if you specify a filename
-prefix (i.e. it does a real run instead of a dry run).
+.B "webpage:"
+http://www.pragma-ade.nl / http://tex.aanhet.net
-.SH "AUTHOR"
-ConTeXt is written and maintained by
-.URL "http://www.pragma-ade.com" "Hans Hagen" .
-This man page, which is in the public domain, was written by Sanjoy
-Mahajan <sanjoy@mit.edu> based on the
-.I mtexmfstart.pdf
-manual.
+.B "wiki:"
+http://contextgarden.net
diff --git a/Master/texmf-dist/fonts/map/pdftex/context/koeieletters.map b/Master/texmf-dist/fonts/map/pdftex/context/koeieletters.map
deleted file mode 100644
index c3e177c176e..00000000000
--- a/Master/texmf-dist/fonts/map/pdftex/context/koeieletters.map
+++ /dev/null
@@ -1,8 +0,0 @@
-koeieletters koeieletters " koeielettersEnc ReEncodeFont " <koeieletters.enc <koeieletters.pfb
-koeieletters-contour koeieletters-contour " koeielettersEnc ReEncodeFont " <koeieletters.enc <koeieletters-contour.pfb
-koeien-ex koeieletters " koeielettersEnc-ex ReEncodeFont " <koeieletters-ex.enc <koeieletters.pfb
-koeien-sy koeieletters " koeielettersEnc-sy ReEncodeFont " <koeieletters-sy.enc <koeieletters.pfb
-koeien-mi koeieletters " koeielettersEnc-mi ReEncodeFont " <koeieletters-mi.enc <koeieletters.pfb
-koeielogos koeielogos " koeielogosEnc ReEncodeFont " <koeielogos.enc <koeielogos.pfb
-koeielogos-contour koeielogos-contour " koeielogosEnc ReEncodeFont " <koeielogos.enc <koeielogos-contour.pfb
-
diff --git a/Master/texmf-dist/fonts/opentype/context/tests/texmfhome.otf b/Master/texmf-dist/fonts/opentype/context/tests/texmfhome.otf
deleted file mode 100644
index d0af1152ff4..00000000000
--- a/Master/texmf-dist/fonts/opentype/context/tests/texmfhome.otf
+++ /dev/null
Binary files differ
diff --git a/Master/texmf-dist/fonts/pfm/hoekwater/context/contnav.pfm b/Master/texmf-dist/fonts/pfm/hoekwater/context/contnav.pfm
new file mode 100644
index 00000000000..2e6e2eebb6c
--- /dev/null
+++ b/Master/texmf-dist/fonts/pfm/hoekwater/context/contnav.pfm
Binary files differ
diff --git a/Master/texmf-dist/metapost/context/base/metafun.mpiv b/Master/texmf-dist/metapost/context/base/metafun.mpiv
index d600764a3d0..8247c121f2f 100644
--- a/Master/texmf-dist/metapost/context/base/metafun.mpiv
+++ b/Master/texmf-dist/metapost/context/base/metafun.mpiv
@@ -11,10 +11,9 @@
%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
%C details.
-%D First we input John Hobby's metapost plain file. However,
-%D because we want to prevent dependency problems and in the
-%D end even may use a patched version, we prefer to use a
-%D copy.
+%D First we input John Hobby's metapost plain file. However, because we want to
+%D prevent dependency problems and in the end even may use a patched version,
+%D we prefer to use a copy.
input "mp-base.mpiv" ;
input "mp-tool.mpiv" ;
diff --git a/Master/texmf-dist/metapost/context/base/mp-abck.mpiv b/Master/texmf-dist/metapost/context/base/mp-abck.mpiv
index 02fab033787..abd7d8848ec 100644
--- a/Master/texmf-dist/metapost/context/base/mp-abck.mpiv
+++ b/Master/texmf-dist/metapost/context/base/mp-abck.mpiv
@@ -5,7 +5,7 @@
%D subtitle=anchored background macros,
%D author=Hans Hagen,
%D date=\currentdate,
-%D copyright={PRAGMA / Hans Hagen \& Ton Otten}]
+%D copyright={PRAGMA ADE \& \CONTEXT\ Development Team}]
%C
%C This module is part of the \CONTEXT\ macro||package and is
%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
diff --git a/Master/texmf-dist/metapost/context/base/mp-apos.mpiv b/Master/texmf-dist/metapost/context/base/mp-apos.mpiv
index f92efc5fda8..7b77377545a 100644
--- a/Master/texmf-dist/metapost/context/base/mp-apos.mpiv
+++ b/Master/texmf-dist/metapost/context/base/mp-apos.mpiv
@@ -5,7 +5,7 @@
%D subtitle=anchored background macros,
%D author=Hans Hagen,
%D date=\currentdate,
-%D copyright={PRAGMA / Hans Hagen \& Ton Otten}]
+%D copyright={PRAGMA ADE \& \CONTEXT\ Development Team}]
%C
%C This module is part of the \CONTEXT\ macro||package and is
%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
diff --git a/Master/texmf-dist/metapost/context/base/mp-asnc.mpiv b/Master/texmf-dist/metapost/context/base/mp-asnc.mpiv
index dfd88317c0c..2626e4d584e 100644
--- a/Master/texmf-dist/metapost/context/base/mp-asnc.mpiv
+++ b/Master/texmf-dist/metapost/context/base/mp-asnc.mpiv
@@ -5,7 +5,7 @@
%D subtitle=anchored background macros,
%D author=Hans Hagen,
%D date=\currentdate,
-%D copyright={PRAGMA / Hans Hagen \& Ton Otten}]
+%D copyright={PRAGMA ADE \& \CONTEXT\ Development Team}]
%C
%C This module is part of the \CONTEXT\ macro||package and is
%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
diff --git a/Master/texmf-dist/metapost/context/base/mp-butt.mpiv b/Master/texmf-dist/metapost/context/base/mp-butt.mpiv
index df7e9e4fdab..6f5b90a7e66 100644
--- a/Master/texmf-dist/metapost/context/base/mp-butt.mpiv
+++ b/Master/texmf-dist/metapost/context/base/mp-butt.mpiv
@@ -5,7 +5,7 @@
%D subtitle=buttons,
%D author=Hans Hagen,
%D date=\currentdate,
-%D copyright={PRAGMA / Hans Hagen \& Ton Otten}]
+%D copyright={PRAGMA ADE \& \CONTEXT\ Development Team}]
%C
%C This module is part of the \CONTEXT\ macro||package and is
%C therefore copyrighted by \PRAGMA. See licen-en.pdf for
diff --git a/Master/texmf-dist/metapost/context/base/mp-char.mpiv b/Master/texmf-dist/metapost/context/base/mp-char.mpiv
index c293b728468..f604accd8e4 100644
--- a/Master/texmf-dist/metapost/context/base/mp-char.mpiv
+++ b/Master/texmf-dist/metapost/context/base/mp-char.mpiv
@@ -5,7 +5,7 @@
%D subtitle=charts,
%D author=Hans Hagen,
%D date=\currentdate,
-%D copyright={PRAGMA / Hans Hagen \& Ton Otten}]
+%D copyright={PRAGMA ADE \& \CONTEXT\ Development Team}]
%C
%C This module is part of the \CONTEXT\ macro||package and is
%C therefore copyrighted by \PRAGMA. See licen-en.pdf for
diff --git a/Master/texmf-dist/metapost/context/base/mp-chem.mpiv b/Master/texmf-dist/metapost/context/base/mp-chem.mpiv
index cb595f2c661..2addb0a73ea 100644
--- a/Master/texmf-dist/metapost/context/base/mp-chem.mpiv
+++ b/Master/texmf-dist/metapost/context/base/mp-chem.mpiv
@@ -3,15 +3,15 @@
%D version=2009.05.13,
%D title=\CONTEXT\ \METAPOST\ graphics,
%D subtitle=chemicals,
-%D author=Hans Hagen,
+%D author=Hans Hagen \& Alan Braslau,
%D date=\currentdate,
-%D copyright=\PRAGMA]
+%D copyright={PRAGMA ADE \& \CONTEXT\ Development Team}]
%C
%C This module is part of the \CONTEXT\ macro||package and is
%C therefore copyrighted by \PRAGMA. See licen-en.pdf for
%C details.
-%D This module in incomplete and experimental.
+%D This module is incomplete and experimental.
% either consistent setting or not
@@ -20,821 +20,1671 @@ if known context_chem : endinput ; fi ;
boolean context_chem ; context_chem := true ;
numeric
- chem_width, chem_radical_min, chem_radical_max, chem_text_max, chem_circle_radius,
- chem_rotation, chem_adjacent, chem_stack, chem_substituent, chem_direction, chem_setting_scale,
- chem_setting_offset, chem_text_offset, chem_picture_offset, chem_center_offset, chem_substituent_offset,
- chem_setting_l, chem_setting_r, chem_setting_t, chem_setting_b ;
+ chem_num[], % scratch
+ chem_text_min, chem_text_max,
+ chem_rotation, chem_adjacent, chem_stack_n,
+ chem_substituent, chem_substituent.lft, chem_substituent.rt,
+ chem_setting_offset, chem_text_offset, chem_picture_offset,
+ chem_center_offset, chem_dbl_offset,
+ chem_bb_angle, chem_axis_rulethickness,
+ chem_setting_l, chem_setting_r, chem_setting_t, chem_setting_b,
+ chem_setting_rotation, chem_emwidth, chem_b_length,
+ chem_front_b[] ;
boolean
chem_setting_axis,
- chem_setting_fixedwidth, chem_setting_fixedheight,
- chem_doing_pb, chem_text_trace ;
+ chem_doing_pb, chem_bd_wedge,
+ chem_star[], chem_front[], chem_stacked[], chem_tetra[] ;
+
+string
+ chem_previous ;
path
- chem_setting_bbox ;
+ chem_path[], % scratch
+ chem_b_path[], chem_c_path[],
+ chem_r_path[], chem_r_path.lft[], chem_r_path.rt[] ;
pair
- chem_shift,
- chem_adjacent_p, chem_substituent_p, chem_direction_p, chem_move_p ;
+ chem_origin, chem_mirror,
+ chem_pair[], % scratch
+ chem_sb_pair, chem_sb_pair.m, chem_sb_pair.p, chem_sb_pair.b ;
+
+picture
+ chem_pic, % scratch
+ % The use of dashpattern is found to dot the starting point with chem_sb_dash.m...
+ %chem_sb_dash, chem_sb_dash.m, chem_sb_dash.p, chem_sb_dash.b,
+ chem_axis_color ;
+
+transform
+ chem_t ; % scratch
+
+color lightblue ; lightblue := (173/255,216/255,230/255) ;
+
+% debugging
+
+boolean chem_trace_nesting ; chem_trace_nesting := false ;
+boolean chem_trace_text ; chem_trace_text := false ;
+boolean chem_trace_boundingbox ; chem_trace_boundingbox := false ;
+
+chem_axis_color := image(draw origin withcolor lightblue) ;
+chem_setting_axis := false ;
+chem_axis_rulethickness := 1pt ;
+chem_emwidth := 10pt ; % EmWidth or \the\emwidth does not work...
+chem_b_length := 3 chem_emwidth ;
+chem_text_offset := -.3chem_emwidth ; % -.71chem_emwidth ; % 1/sqrt(2)
+chem_center_offset := .5 chem_emwidth ;
+chem_picture_offset := chem_emwidth ;
+chem_dbl_offset := .05 ;
+chem_bb_angle := angle(1,2chem_dbl_offset) ;
+chem_text_min := 0.75 ;
+chem_text_max := 1.25 ;
+chem_dot_factor := 4 ; % *linewidth
+chem_sb_pair := (0.25,0.75) ; %chem_sb_dash := dashpattern(off 0.25 on 0.5 off 0.25) ;
+chem_sb_pair.m := (0.25,1 ) ; %chem_sb_dash.m := dashpattern(off 0.25 on 0.75) ;
+chem_sb_pair.p := (0 ,0.75) ; %chem_sb_dash.p := dashpattern(on 0.75 off 0.25) ;
+chem_sb_pair.b := (0, 1 ) ; %chem_sb_dash.b := dashpattern(on 1) ;
+
+chem_bd_wedge := false ; % true is incorrect, but quite common...
+
+def chem_reset =
+ chem_rotation := 0 ;
+ chem_mirror := origin ;
+ chem_adjacent := 0 ;
+ chem_substituent := 0 ;
+ chem_substituent.lft := 0 ;
+ chem_substituent.rt := 0 ;
+ chem_stack_n := 0 ;
+ chem_doing_pb := false ;
+ chem_origin := origin ;
+ chem_previous := "one" ;
+ pair chem_mark_pair[] ;
+enddef ;
+
+chem_reset ;
+
+newinternal numeric
+ one, carbon, alkyl, newmanstagger, newmaneclipsed,
+ three, four, five, six, seven, eight, nine,
+ fivefront, sixfront, chair, boat ;
+
+vardef chem_init_some (suffix $) (expr e) =
+ if not known chem_star[$] : chem_star[$] := false ; fi
+ if not known chem_front[$] : chem_front[$] := false ; fi
+ if not known chem_stacked[$] : chem_stacked[$] := false ; fi
+ if not known chem_tetra[$] : chem_tetra[$] := false ; fi
+
+ % We define all paths as closed, so that they may be indexed mod length.
+ if path(e) :
+ chem_b_path[$] := e if not cycle(e) : -- cycle fi ;
+ chem_num0 := length(chem_b_path[$]) ;
+ else : % polygon
+ chem_num0 := e ;
+ chem_num1 := 360/chem_num0 ;
+ chem_b_path[$] :=
+ (
+ for i=0 upto chem_num0-1 :
+ dir(if chem_star[$] : -i else : (.5-i) fi *chem_num1) --
+ endfor
+ cycle
+ )
+ if chem_front[$] :
+ rotated (chem_num1-90)
+ fi
+ if not chem_star[$] :
+ scaled (.5/(sind .5chem_num1))
+ fi ;
+ fi ;
-numeric
- chem_width[], chem_angle[], chem_start[], chem_initialrot[], chem_initialmov[] ;
+ if chem_front[$] and (not known chem_front_b[$]) :
+ chem_front_b[$] := floor(.5(length chem_b_path[$])) + 1 ;
+ fi
-pair
- chem_stack_d[],
- chem_b_zero[], chem_n_zero[],
- chem_r_max[], chem_r_min[],
- chem_r_zero[], chem_mr_zero[], chem_pr_zero[], chem_crz_zero[],
- chem_rt_zero[], chem_rtt_zero[], chem_rbt_zero[],
- chem_mid_zero[] ;
+ chem_num2 := 0 ;
+ chem_c_path[$] :=
+ reverse(fullcircle) rotated angle(point 0 of chem_b_path[$])
+ if not chem_star[$] :
+ hide (for i=0 upto chem_num0-1:
+ if abs(point i+.5 of chem_b_path[$]) <
+ abs(point chem_num2+.5 of chem_b_path[$]) :
+ chem_num2 := i ;
+ fi
+ endfor)
+ scaled (2*(abs(point chem_num2+.5 of chem_b_path[$]) - chem_dbl_offset))
+ fi ;
-path
- chem_b_path[], chem_bx_path[], chem_eb_path[], chem_sr_path[], chem_br_path[],
- chem_sb_path[], chem_msb_path[], chem_psb_path[],
- chem_s_path[], chem_ss_path[], chem_mss_path[], chem_pss_path[],
- chem_e_path[], chem_sd_path[], chem_bb_path[], chem_oe_path[],
- chem_bd_path[], chem_bw_path[],
- chem_ddt_path[], chem_ddb_path[], chem_ldt_path[], chem_ldb_path[], chem_rdt_path[], chem_rdb_path[],
- chem_dbl_path[], chem_dbr_path[],
- chem_ad_path[], chem_au_path[],
- chem_r_path[], chem_rl_path[], chem_rr_path[],
- chem_rb_path[], chem_prb_path[], chem_mrb_path[],
- chem_srl_path[], chem_srr_path[],
- chem_msr_path[], chem_psr_path[],
- chem_mr_path[], chem_pr_path[],
- chem_c_path[], chem_cc_path[],
- chem_midt_path[], chem_midb_path[], chem_midst_path[], chem_midsb_path[] ;
-
-chem_setting_scale := 1 ;
-chem_base_width := 40pt ;
-chem_text_offset := 3pt ;
-chem_center_offset := 6pt ;
-chem_picture_offset := 10pt ;
-chem_substituent_offset := 10pt ;
-chem_radical_min := 1.25 ;
-chem_radical_max := 1.50 ;
-chem_text_min := 0.75 ;
-chem_text_max := 1.75 ;
-chem_circle_radius := 0.80 ;
-chem_circle_radius := 1.10 ;
-chem_rotation := 1 ;
-chem_adjacent := 0 ;
-chem_substituent := 0 ;
-chem_direction := 0 ;
-chem_stack_n := 0 ;
-chem_doing_pb := false ;
-chem_shift := origin ;
-chem_dot_factor := 4 ;
-chem_text_trace := false ;
-chem_bd_n := 4 ;
-chem_bw_n := 4 ;
-chem_bd_angle := 4 ;
-chem_bb_angle := 4 ;
-
-vardef chem_start_structure(expr n, l, r, t, b, scale, axis, fixedwidth, fixedheight, offset) =
- chem_setting_axis := axis ;
- chem_setting_l := l * scale ;
- chem_setting_r := r * scale ;
- chem_setting_t := t * scale ;
- chem_setting_b := b * scale ;
- chem_setting_fixedwidth := fixedwidth ;
- chem_setting_fixedheight := fixedheight ;
- chem_setting_offset := offset ;
- if scale <> chem_setting_scale :
- chem_setting_scale := scale ;
- chem_init_all ;
- fi ;
- chem_rotation := 1 ;
- chem_adjacent := 0 ;
- chem_substituent := 0 ;
- chem_direction := 0 ;
- chem_stack_n := 0 ;
- chem_doing_pb := false ;
- chem_shift := origin ;
-enddef ;
-
-def chem_stop_structure =
- currentpicture := currentpicture shifted - chem_shift ;
- % axis here
- if chem_setting_fixedwidth :
- chem_setting_l := - xpart llcorner currentpicture ;
- chem_setting_r := xpart urcorner currentpicture ;
+ chem_r_path[$] :=
+ if chem_star[$] :
+ chem_b_path[$]
+ else :
+ (
+ for i=0 upto chem_num0-1 :
+ (unitvector point i of chem_b_path[$])
+ shifted point i of chem_b_path[$] --
+ endfor
+ cycle
+ )
+ fi ;
+
+ chem_r_path.lft[$] :=
+ (
+ for i=0 upto chem_num0-1 :
+ if chem_front[$] :
+ up
+ scaled .5
+ shifted point i of chem_b_path[$]
+ elseif chem_star[$] :
+ point i of chem_b_path[$]
+ else :
+ point i+1 of chem_b_path[$]
+ rotatedabout(point i of chem_b_path[$],180)
+ fi --
+ endfor
+ cycle
+ ) ;
+ chem_r_path.rt[$] :=
+ (
+ for i=0 upto chem_num0-1 :
+ if chem_front[$] :
+ down
+ scaled .5
+ shifted point i of chem_b_path[$]
+ elseif chem_star[$] :
+ point i+2 of chem_b_path[$]
+ else :
+ point i-1 of chem_b_path[$]
+ rotatedabout(point i of chem_b_path[$],180)
+ fi --
+ endfor
+ cycle
+ ) ;
+
+enddef ;
+
+% The following is used only once:
+def chem_init_all =
+begingroup
+ save a, b, c, d, e ; numeric a, b, c, d, e ;
+ save lft, rt ; path lft, rt ;
+
+ % tetrahedrial angle
+ a := 2angle(1,sqrt 2) ;
+
+ % solve for chair
+ 2b = 180 - .5a ;
+ 4c = 180 - .5a ;
+ d + e = 360 - 2a ;
+ d = 5e ; % this is the one tunable parameter which fixes the perspective.
+ z2 = z1 shifted dir(90+a+d) ;
+ z3 = z2 shifted dir(270-a) ;
+ z4 = z3 shifted dir(90+a) ;
+ z6 = z1 shifted dir(90+a) ;
+ z5 = z6 shifted dir(270-a) ;
+ z4 = z1 xyscaled (-1,-1) ;
+ z5 = z2 xyscaled (-1,-1) ;
+
+ save indx ; numeric indx ; indx = 2 ; % starting value doesn't matter, really.
+ % polygons
+ three := incr indx ; % 3 (these numbers don't matter - they are just indices)
+ four := incr indx ; % 4
+ five := incr indx ; % 5
+ six := incr indx ; % 6
+ seven := incr indx ; % 7
+ eight := incr indx ; % 8
+ nine := incr indx ; % 9
+
+ chem_init_some(three,3) ;
+ chem_init_some(four, 4) ;
+ chem_init_some(five, 5) ;
+ chem_init_some(six, 6) ;
+ chem_init_some(seven,7) ;
+ chem_init_some(eight,8) ;
+ chem_init_some(nine, 9) ;
+
+ % star-form
+ one := incr indx ; % 10
+ carbon := incr indx ; % 11
+ alkyl := incr indx ; % 12
+ newmanstagger := incr indx ; % 13
+ newmaneclipsed := incr indx ; % 14
+
+ chem_star[one] := true ;
+ chem_star[carbon] := true ; chem_tetra[carbon] := true ;
+ chem_star[alkyl] := true ; chem_tetra[alkyl] := true ;
+ chem_star[newmanstagger] := true ; chem_tetra[newmanstagger] := true ;
+ chem_star[newmaneclipsed] := true ; chem_tetra[newmaneclipsed] := true ;
+ chem_stacked[newmanstagger] := true ;
+ chem_stacked[newmaneclipsed] := true ;
+ chem_init_some(one, 8) ;
+ chem_init_some(carbon, dir(0)--dir(360-a)--dir(180-.5a+b)--dir(180-.5a)) ;
+ chem_init_some(alkyl, dir(0)--dir(360-a)--dir(360-a-90)--dir(90)) ;
+ chem_init_some(newmanstagger, dir(30)--dir(270)--dir(150)--dir(330)--dir(210)--dir(90)) ;
+ chem_init_some(newmaneclipsed, dir(30)--dir(270)--dir(150)--dir(0)--dir(240)--dir(120)) ;
+
+ % front views
+ fivefront := incr indx ; % 15
+ sixfront := incr indx ; % 16
+ chair := incr indx ; % 17
+ boat := incr indx ; % 18
+
+ chem_front[fivefront] := true ; chem_front_b[fivefront] := 3 ;
+ chem_front[sixfront] := true ; chem_front_b[sixfront] := 3 ;
+ chem_init_some(fivefront,5) ;
+ chem_init_some(sixfront, 6) ;
+ % chair
+ chem_front[chair] := true ; chem_front_b[chair] := 4 ;
+ chem_init_some(chair, z1--z2--z3--z4--z5--z6) ;
+ lft := dir(90-a)--down--dir(90+a+d)--down--dir(90+a)--down ;
+ rt := up--dir(270+a)--up--dir(270-a)--up--dir(90+e) ;
+ chem_r_path.lft[chair] :=
+ for i=0 upto 5 : point i of lft shifted point i of chem_b_path[chair] -- endfor
+ cycle ;
+ chem_r_path.rt[chair] :=
+ for i=0 upto 5 : point i of rt shifted point i of chem_b_path[chair] -- endfor
+ cycle ;
+ % boat
+ chem_front[boat] := true ; chem_front_b[boat] := 4 ;
+ chem_init_some(boat,
+ for i=1 upto 4 : point i-1 of chem_b_path[sixfront] -- endfor
+ point 2 of chem_b_path[sixfront] yscaled .5 --
+ point 1 of chem_b_path[sixfront] yscaled .5
+ ) ;
+ lft := dir(30+.5a)--dir(330+.5a)--dir(210-.5a)--dir(150-.5a)--dir(120)--dir(60) ;
+ rt := dir(30-.5a)--dir(330-.5a)--dir(210+.5a)--dir(150+.5a)--dir(120+a)--dir(60-a) ;
+ chem_r_path.lft[boat] :=
+ for i=0 upto 5 : point i of lft shifted point i of chem_b_path[boat] -- endfor
+ cycle ;
+ chem_r_path.rt[boat] :=
+ for i=0 upto 5 : point i of rt shifted point i of chem_b_path[boat] -- endfor
+ cycle ;
+endgroup
+enddef ;
+
+chem_init_all ; % WHY does this not work unless defined and then called?
+
+
+% Like most often in ConTeXt, we will trap but then silently ignore mistaken use,
+% unless of course the error be too harmful...
+
+% \startchemical
+
+def chem_start_structure(expr i, l, r, t, b, rotation, unit, factor, offset, axis, rulethickness, axiscolor) =
+ save chem_setting_l, chem_setting_r, chem_setting_t, chem_setting_b ;
+
+ chem_emwidth := unit ;
+ chem_b_length := factor * unit ;
+ if numeric l :
+ chem_setting_l := -l ;
+ fi
+ if numeric r :
+ chem_setting_r := r ;
+ fi
+ if numeric t :
+ chem_setting_t := t ;
+ fi
+ if numeric b :
+ chem_setting_b := -b ;
+ fi
+ chem_setting_rotation := rotation ;
+ chem_setting_offset := offset ;
+ chem_setting_axis := if boolean axis : axis else : (axis<>0) fi ;
+ chem_axis_rulethickness := .75*(rulethickness) ; % axis 50% thinner than frame and bonds.
+ chem_axis_color := image(draw origin withcolor axiscolor) ; % so we handle all color models
+
+ chem_reset ;
+enddef ;
+
+% \stopchemical
+
+vardef chem_stop_structure =
+ % Make sure that all of the saved stack has been restored... (this was a gotcha!)
+ forever :
+ exitif chem_stack_n=0 ;
+ chem_restore ;
+ endfor
+
+ currentpicture := (currentpicture shifted -chem_origin) rotated chem_setting_rotation ;
+
+ save l, r, b, t ;
+ l := min(xpart llcorner currentpicture, xpart lrcorner currentpicture) ;
+ r := max(xpart llcorner currentpicture, xpart lrcorner currentpicture) ;
+ b := min(ypart llcorner currentpicture, ypart ulcorner currentpicture) ;
+ t := max(ypart llcorner currentpicture, ypart ulcorner currentpicture) ;
+
+ if unknown chem_setting_l : chem_setting_l := l ; fi
+ if unknown chem_setting_r : chem_setting_r := r ; fi
+ if unknown chem_setting_b : chem_setting_b := b ; fi
+ if unknown chem_setting_t : chem_setting_t := t ; fi
+
+ if chem_setting_axis : % put it behind the picture
+ chem_pic := currentpicture ; currentpicture := nullpicture ;
+ chem_num0 := .5chem_b_length ;
+ chem_num1 := .2chem_num0 ;
+ % draw the axes to the bounding box of the entire structure,
+ % not necessarily the bounding box of the final figure
+ draw (l,0) -- (r,0)
+ withpen pencircle scaled chem_axis_rulethickness withcolor colorpart(chem_axis_color) ;
+ draw (0,b) -- (0,t)
+ withpen pencircle scaled chem_axis_rulethickness withcolor colorpart(chem_axis_color) ;
+ for i = 0 step chem_num0 until r :
+ draw (i,-chem_num1) -- (i,chem_num1)
+ withpen pencircle scaled chem_axis_rulethickness withcolor colorpart(chem_axis_color) ;
+ endfor
+ for i = 0 step -chem_num0 until l :
+ draw (i,-chem_num1) -- (i,chem_num1)
+ withpen pencircle scaled chem_axis_rulethickness withcolor colorpart(chem_axis_color) ;
+ endfor
+ for i = 0 step chem_num0 until t :
+ draw (-chem_num1,i) -- (chem_num1,i)
+ withpen pencircle scaled chem_axis_rulethickness withcolor colorpart(chem_axis_color) ;
+ endfor
+ for i = 0 step -chem_num0 until b :
+ draw (-chem_num1,i) -- (chem_num1,i)
+ withpen pencircle scaled chem_axis_rulethickness withcolor colorpart(chem_axis_color) ;
+ endfor
+ addto currentpicture also chem_pic ;
fi ;
- if chem_setting_fixedheight :
- chem_setting_t := ypart urcorner currentpicture ;
- chem_setting_b := - ypart llcorner currentpicture ;
+ if chem_trace_boundingbox :
+ fill boundingbox currentpicture withcolor blue withtransparency(1,.25) ;
fi ;
- chem_setting_bbox :=
- (-chem_setting_l,-chem_setting_b) -- ( chem_setting_r,-chem_setting_b) --
- ( chem_setting_r, chem_setting_t) -- (-chem_setting_l, chem_setting_t) -- cycle ;
- % maybe put it behind the picture
- if chem_setting_axis :
- save stp ; stp := chem_base_width/ 2 * chem_setting_scale ;
- save siz ; siz := chem_base_width/10 * chem_setting_scale ;
- draw (-chem_setting_l,0) -- (chem_setting_r,0) withcolor blue ;
- draw (0,-chem_setting_b) -- (0,chem_setting_t) withcolor blue ;
- for i = 0 step stp until chem_setting_r : draw (i,-siz) -- (i,siz) withcolor blue ; endfor ;
- for i = 0 step -stp until -chem_setting_l : draw (i,-siz) -- (i,siz) withcolor blue ; endfor ;
- for i = 0 step stp until chem_setting_t : draw (-siz,i) -- (siz,i) withcolor blue ; endfor ;
- for i = 0 step -stp until -chem_setting_b : draw (-siz,i) -- (siz,i) withcolor blue ; endfor ;
- draw chem_setting_bbox withcolor blue ;
+ setbounds currentpicture to
+ ((chem_setting_l,chem_setting_b) -- (chem_setting_r,chem_setting_b) --
+ (chem_setting_r,chem_setting_t) -- (chem_setting_l,chem_setting_t) -- cycle) ;
+ if chem_trace_boundingbox :
+ fill boundingbox currentpicture withcolor red withtransparency(1,.25) ;
fi ;
- setbounds currentpicture to chem_setting_bbox ;
enddef ;
-def chem_start_component = enddef ;
-def chem_stop_component = enddef ;
+% \chemical
+
+vardef chem_start_component = enddef ;
+vardef chem_stop_component = enddef ;
-def chem_pb =
-% draw boundingbox currentpicture withpen pencircle scaled 1mm withcolor blue ;
-% draw origin withpen pencircle scaled 2mm withcolor blue ;
+vardef chem_pb = % PB :
+ if chem_trace_nesting :
+ draw boundingbox currentpicture
+ withpen pencircle scaled 1mm withcolor colorpart(chem_axis_color) ;
+ draw origin withpen pencircle scaled 2mm withcolor colorpart(chem_axis_color) ;
+ fi ;
chem_doing_pb := true ;
enddef ;
-def chem_pe =
-% draw boundingbox currentpicture withpen pencircle scaled .5mm withcolor red ;
-% draw origin withpen pencircle scaled 1mm withcolor red ;
- currentpicture := currentpicture shifted - chem_shift ;
-% draw origin withpen pencircle scaled .5mm withcolor green ;
- chem_shift := origin ;
+vardef chem_pe = % PE
+ if chem_trace_nesting :
+ draw boundingbox currentpicture withpen pencircle scaled .5mm withcolor red ;
+ draw origin withpen pencircle scaled 1mm withcolor red ;
+ fi ;
+ currentpicture := currentpicture shifted -chem_origin ;
+ if chem_trace_nesting :
+ draw origin withpen pencircle scaled .5mm withcolor green ;
+ fi ;
+ chem_origin := origin ;
chem_doing_pb := false ;
enddef ;
-vardef chem_do (expr p) =
- if chem_doing_pb :
+vardef chem_do (expr pos) =
+ if (unknown chem_doing_pb) or (not chem_doing_pb) :
+ pos
+ else :
chem_doing_pb := false ;
-% save pp ; pair pp ; pp := point 1 of ((origin -- p) enlonged chem_picture_offset) ;
-% currentpicture := currentpicture shifted - pp ;
-% chem_shift := chem_shift - center pp ;
- currentpicture := currentpicture shifted - p ;
- chem_shift := chem_shift - p ;
+ currentpicture := currentpicture shifted -pos ;
+ chem_origin := chem_origin shifted -pos ;
origin % nullpicture
- else :
- p
fi
enddef ;
-vardef chem_b (expr n, f, t, r, c) =
- chem_draw (n, chem_b_path[n], f, t, r, c) ;
-enddef ;
-vardef chem_sb (expr n, f, t, r, c) =
- chem_draw (n, chem_sb_path[n], f, t, r, c) ;
+picture chem_stack_p[] ;
+pair chem_stack_origin[], chem_stack_mirror[] ;
+numeric chem_stack_rotation[] ;
+string chem_stack_previous[] ;
+
+vardef chem_save = % SAVE
+ chem_stack_p [incr chem_stack_n] := currentpicture ;
+ chem_stack_origin [ chem_stack_n] := chem_origin ; chem_origin := origin ;
+ chem_stack_rotation[ chem_stack_n] := chem_rotation ;
+ chem_stack_mirror [ chem_stack_n] := chem_mirror ;
+ chem_stack_previous[ chem_stack_n] := chem_previous ;
+ currentpicture := nullpicture ;
enddef ;
-vardef chem_s (expr n, f, t, r, c) =
- chem_draw (n, chem_s_path[n], f, t, r, c) ;
+vardef chem_restore = % RESTORE
+ if chem_stack_n>0 :
+ currentpicture := currentpicture shifted -chem_origin ;
+ addto chem_stack_p [chem_stack_n] also currentpicture ;
+ currentpicture := chem_stack_p [chem_stack_n] ;
+ chem_stack_p[chem_stack_n] := nullpicture ;
+ chem_origin := chem_stack_origin [chem_stack_n] ;
+ chem_rotation := chem_stack_rotation[chem_stack_n] ;
+ chem_mirror := chem_stack_mirror [chem_stack_n] ;
+ chem_previous := chem_stack_previous[chem_stack_n] ;
+ chem_stack_n := chem_stack_n - 1 ;
+ fi ;
enddef ;
-vardef chem_ss (expr n, f, t, r, c) =
- chem_draw (n, chem_ss_path[n], f, t, r, c) ;
-enddef ;
+% chem_adj and chem_sub are to be followed by chem_set(n) which does all the work...
-vardef chem_mid (expr n, r, c) =
- chem_draw_fixed (n, chem_midt_path[n], r, c) ;
- chem_draw_fixed (n, chem_midb_path[n], r, c) ;
+vardef chem_adj (suffix $) (expr d, s) = % ADJ
+ % scale s is ignored (for now?)
+ if not chem_front[$] :
+ chem_substituent := 0 ;
+ chem_substituent.lft := 0 ;
+ chem_substituent.rt := 0 ;
+ chem_adjacent := d ;
+ fi
enddef ;
-vardef chem_mids (expr n, r, c) =
- chem_draw_fixed (n, chem_midst_path[n], r, c) ;
- chem_draw_fixed (n, chem_midsb_path[n], r, c) ;
+vardef chem_lsub (suffix $) (expr d, s) = % LSUB
+ chem_sub.lft($,d,s) ;
enddef ;
-vardef chem_mss (expr n, f, t, r, c) =
- chem_draw (n, chem_mss_path[n], f, t, r, c) ;
+vardef chem_rsub (suffix $) (expr d, s) = % RSUB
+ chem_sub.rt ($,d,s) ;
enddef ;
-vardef chem_pss (expr n, f, t, r, c) =
- chem_draw (n, chem_pss_path[n], f, t, r, c) ;
+vardef chem_sub@# (suffix $) (expr d, s) = % SUB
+ % scale s is ignored (for now?)
+ chem_adjacent := 0 ;
+ chem_substituent := 0 ;
+ chem_substituent.lft := 0 ;
+ chem_substituent.rt := 0 ;
+ % then :
+ chem_substituent@# := d ;
enddef ;
-vardef chem_msb (expr n, f, t, r, c) =
- chem_draw (n, chem_msb_path[n], f, t, r, c) ;
+def chem_transformed (suffix $) = % not vardef!
+ scaled chem_b_length
+ if not chem_front[$] :
+ if chem_mirror<>origin : reflectedabout(origin,chem_mirror) fi
+ rotated chem_rotation
+ fi
enddef ;
-vardef chem_psb (expr n, f, t, r, c) =
- chem_draw (n, chem_psb_path[n], f, t, r, c) ;
+vardef chem_draw (expr what, r, c) (text extra) =
+ draw what
+ withpen pencircle scaled r
+ withcolor c %\MPcolor{c}
+ extra ;
enddef ;
-vardef chem_eb (expr n, f, t, r, c) =
- chem_draw (n, chem_eb_path[n], f, t, r, c) ;
+vardef chem_fill (expr what, r, c) (text extra) =
+ fill what
+ withpen pencircle scaled r
+ withcolor c %\MPcolor{c}
+ extra ;
enddef ;
-vardef chem_db (expr n, f, t, r, c) =
- if n = 1 :
- chem_draw (n, chem_msb_path [n], f, t, r, c) ;
- chem_draw (n, chem_psb_path [n], f, t, r, c) ;
- else :
- chem_draw (n, chem_dbl_path [n], f, t, r, c) ;
- chem_draw (n, chem_dbr_path [n], f, t, r, c) ;
- fi ;
+vardef chem_drawarrow (expr what, r, c) (text extra) =
+ drawarrow what
+ withpen pencircle scaled r
+ withcolor c %\MPcolor{c}
+ extra ;
enddef ;
-vardef chem_er (expr n, f, t, r, c) =
- chem_draw (n, chem_rl_path[n], f, t, r, c) ;
- chem_draw (n, chem_rr_path[n], f, t, r, c) ;
-enddef ;
+vardef chem_set (suffix $) =
+ forsuffixes P = scantokens chem_previous :
-vardef chem_dr (expr n, f, t, r, c) =
- chem_draw (n, chem_srl_path[n], f, t, r, c) ;
- chem_draw (n, chem_srr_path[n], f, t, r, c) ;
-enddef ;
+ % This is a fairly complicated optimization and ajustement. It took some
+ % thinking to get right, so beware!
-vardef chem_ad (expr n, f, t, r, c) =
- chem_draw_arrow(n, chem_ad_path[n], f, t, r, c) ;
-enddef ;
+ if (chem_adjacent<>0) and chem_star[P] and chem_star[$] :
+ % nop
+ chem_adjacent := 0 ;
+ elseif (chem_adjacent<>0) and (chem_front[P] or chem_front[$]) :
+ % not allowed for FRONT
+ chem_adjacent := 0 ;
+ elseif chem_adjacent<>0 :
+ chem_substituent := 0 ;
+ chem_substituent.lft := 0 ;
+ chem_substituent.rt := 0 ;
+ % move to the bond midpoint of the first structure
+ chem_pair0 := center (
+ if chem_star[P] :
+ origin -- point (chem_adjacent-1)
+ else :
+ subpath (chem_adjacent-1,chem_adjacent)
+ fi
+ of chem_b_path[P]
+ ) chem_transformed(P) ;
+ % find the closest opposite bond of the second structure
+ chem_pair1 := chem_pair0 rotated if chem_star[P] : 90 else : 180 fi ;
+ chem_num0 := abs(chem_pair1) ;
+ chem_num1 := if chem_tetra[$] : 1 else : length chem_b_path[$] fi ;
+ % only consider even indices (cardinal points) for ONE
+ chem_num2 := if chem_star[$] and not chem_tetra[$] : 2 else : 1 fi ;
+ for i=0 step chem_num2 until chem_num1 :
+ chem_pair2 := (
+ (
+ unitvector
+ center (
+ if chem_star[$] :
+ origin -- point i
+ else :
+ subpath (i,i+1)
+ fi
+ of chem_b_path[$])
+ )
+ scaled chem_num0
+ ) chem_transformed($) ;
+ if i=0 :
+ chem_pair3 := chem_pair2 ;
+ chem_num3 := 0 ;
+ elseif (abs(chem_pair1 shifted -chem_pair2)) < (abs(chem_pair1 shifted -chem_pair3)) :
+ chem_pair3 := chem_pair2 ;
+ chem_num3 := i ;
+ fi
+ endfor
+ if chem_star[$] :
+ chem_pair4 := chem_pair0 shifted
+ -((point (chem_adjacent-1) of chem_b_path[P]) chem_transformed(P)) ;
+ fi
+ % adjust the bond angles
+ chem_rotation := (chem_rotation + angle(chem_pair1)-angle(chem_pair3)) mod 360 ;
+ if not chem_star[$] :
+ chem_pair4 :=
+ if chem_star[P] :
+ (point chem_num3
+ else :
+ center(subpath (chem_num3,chem_num3+1)
+ fi
+ of chem_b_path[$])
+ chem_transformed($) ;
+ fi
+ if not chem_star[P] :
+ chem_pair4 := chem_pair4 shifted -chem_pair0 ;
+ fi
+ currentpicture := currentpicture shifted chem_pair4 ;
+ chem_origin := chem_origin shifted chem_pair4 ;
+ chem_adjacent := 0 ;
+ fi ;
-vardef chem_au (expr n, f, t, r, c) =
- chem_draw_arrow(n, chem_au_path[n], f, t, r, c)
+ % Insure that only one, if any, will be nonzero
+ if ((chem_substituent <> 0) and (chem_substituent.lft <> 0)) or
+ ((chem_substituent <> 0) and (chem_substituent.rt <> 0)) or
+ ((chem_substituent.lft <> 0) and (chem_substituent.rt <> 0)) :
+ chem_substituent := 0 ;
+ chem_substituent.lft := 0 ;
+ chem_substituent.rt := 0 ;
+ fi
+ if (chem_substituent <> 0) or (chem_substituent.lft <> 0) or (chem_substituent.rt <> 0) :
+ % move origin to radical endpoint of the first structure
+ if chem_substituent.lft > 0 :
+ chem_pair0 := point chem_substituent.lft-1 of chem_r_path.lft[P] ;
+ chem_substituent := chem_substituent.lft ;
+ chem_substituent.lft := 0 ;
+ elseif chem_substituent.rt > 0 :
+ chem_pair0 := point chem_substituent.rt-1 of chem_r_path.rt[P] ;
+ chem_substituent := chem_substituent.rt ;
+ chem_substituent.rt := 0 ;
+ else :
+ chem_pair0 := point chem_substituent-1 of chem_r_path[P] ;
+ fi
+ chem_pair1 := chem_pair0 if not chem_star[P] :
+ shifted -(point chem_substituent-1 of chem_b_path[P]) fi ;
+ chem_t := identity chem_transformed(P) ;
+ chem_pair0 := chem_pair0 transformed chem_t ; % radical
+ chem_pair1 := chem_pair1 transformed chem_t ; % recentered (see below)
+ currentpicture := currentpicture shifted -chem_pair0 ;
+ chem_origin := chem_origin shifted -chem_pair0 ;
+ if (not (chem_star[P] and chem_star[$])) or chem_tetra[P] or chem_tetra[$] :
+ if chem_tetra[P] and chem_tetra[$] and ((chem_substituent=1) or (chem_substituent=2)):
+ chem_rotation := (chem_rotation + 180) mod 360 ; % trans-alkane
+ chem_pair2 := (point .5 of chem_b_path[$]) ; % bisector, not chem_transformed
+ if chem_mirror=origin :
+ chem_mirror := chem_pair2 ;
+ else :
+ chem_num0 := angle(chem_mirror)-angle(chem_pair2) ;
+ if (chem_num0>0) and (chem_num0> 180) :
+ chem_num0 := 360 - chem_num0 ;
+ elseif (chem_num0<0) and (chem_num0<-180) :
+ chem_num0 := -360 - chem_num0 ;
+ fi
+ chem_rotation := (chem_rotation + 2chem_num0) mod 360 ;
+ chem_mirror := origin ;
+ fi
+ fi
+ chem_t := identity chem_transformed($) ;
+ chem_pair1 := chem_pair1 rotated 180 ; % opposite direction of radical bond
+ % find the closest node
+ chem_num0 := abs(chem_pair1) ; % distance
+ % search to find the nearest node of $; only consider 1 and 2 for CARBON,ALKYL
+ chem_num1 := if chem_tetra[$] : 1 else : length chem_b_path[$] fi ;
+ % only consider even indices (cardinal points) for ONE
+ chem_num2 := if chem_star[$] and not chem_tetra[$] : 2 else : 1 fi ;
+ for i=0 step chem_num2 until chem_num1 :
+ chem_pair2 := (unitvector(point i of chem_b_path[$]) scaled chem_num0)
+ transformed chem_t ;
+ if i=0 :
+ chem_pair3 := chem_pair2 ;
+ chem_num3 := 0 ;
+ elseif (abs(chem_pair1 shifted -chem_pair2)) <
+ (abs(chem_pair1 shifted -chem_pair3)) :
+ chem_pair3 := chem_pair2 ;
+ chem_num3 := i ;
+ fi
+ endfor
+ if not chem_front[$] : % adjust rotation
+ chem_rotation := (chem_rotation + angle(chem_pair1)-angle(chem_pair3)) mod 360 ;
+ fi ;
+ chem_t := identity chem_transformed($) ;
+ chem_pair4 := (point chem_num3 of chem_b_path[$]) transformed chem_t ;
+ if not chem_star[$] :
+ currentpicture := currentpicture shifted chem_pair4 ;
+ chem_origin := chem_origin shifted chem_pair4 ;
+ fi
+ fi
+ chem_substituent := 0 ;
+ fi ;
+ endfor
+ chem_previous := str $ ;
enddef ;
-vardef chem_r (expr n, f, t, r, c) =
- if n < 0 :
- chem_draw_vertical (n, chem_r_path[n], f, t, r, c) ;
+% line (f_rom, t_o, r_ule, c_olor)
+
+vardef chem_b@# (suffix $) (expr f, t, r, c) = % B
+ if chem_star[$] :
+ chem_r@#($,f,t,r,c) ;
+ elseif length(str @#)>0 :
+ chem_sb@#($,f,t,r,c) ;
else :
- chem_draw (n, chem_r_path[n], f, t, r, c) ;
- fi ;
+ chem_draw(
+ (subpath (f-1,t) of chem_b_path[$]) chem_transformed($),
+ r,c,) ;
+ fi
enddef ;
-vardef chem_rd (expr n, f, t, r, c) =
- chem_dashed_normal (n, chem_r_path[n], f, t, r, c)
+vardef chem_sb@# (suffix $) (expr f, t, r, c) = % SB
+ if chem_star[$] :
+ chem_sr@#($,f,t,r,c) ;
+ else :
+ %chem_draw(
+ % (subpath (f-1,t) of chem_b_path[$]) chem_transformed($),
+ % r,c,dashed chem_sb_dash@# scaled chem_b_length) ;
+ chem_t := identity chem_transformed($) ;
+ for i=f upto t :
+ chem_draw(
+ (subpath (chem_sb_pair@# shifted (i-1,i-1)) of chem_b_path[$])
+ transformed chem_t,
+ r,c,) ;
+ endfor
+ fi
enddef ;
-vardef chem_mrd (expr n, f, t, r, c) =
- chem_dashed_normal (n, chem_mr_path[n], f, t, r, c)
+vardef chem_sd@# (suffix $) (expr f, t, r, c) = % SD
+ if chem_star[$] :
+ chem_rd@#($,f,t,r,c) ;
+ else :
+ chem_t := identity chem_transformed($) ;
+ for i=f upto t :
+ chem_draw(
+ (subpath (chem_sb_pair@# shifted (i-1,i-1)) of chem_b_path[$])
+ transformed chem_t,
+ r,c,dashed evenly) ;
+ endfor
+ fi
enddef ;
-vardef chem_prd (expr n, f, t, r, c) =
- chem_dashed_normal (n, chem_pr_path[n], f, t, r, c)
+vardef chem_r_fragment@# (suffix $) (expr i) =
+ (
+ if chem_star[$] :
+ origin
+ else :
+ point i-1 of chem_b_path[$]
+ fi --
+ point i-1 of chem_r_path@#[$]
+ ) % no ;
enddef ;
-vardef chem_br (expr n, f, t, r, c) =
- chem_fill (n, chem_br_path[n], f, t, r, c )
+vardef chem_r@# (suffix $) (expr f, t, r, c) = % R
+ if length(str @#)>0 :
+ chem_sr@#($,f,t,r,c) ;
+ else :
+ chem_sr.b($,f,t,r,c) ;
+ fi
enddef ;
-vardef chem_rb (expr n, f, t, r, c) =
- chem_fill (n, chem_rb_path[n], f, t, r, c)
+vardef chem_er@# (suffix $) (expr f, t, r, c) = % ER
+ if length(str @#)>0:
+ chem_dr@#($,f,t,r,c) ;
+ else :
+ chem_dr.b($,f,t,r,c) ;
+ fi
enddef ;
-vardef chem_mrb (expr n, f, t, r, c) =
- chem_fill (n, chem_mrb_path[n], f, t, r, c)
+vardef chem_dr@# (suffix $) (expr f, t, r, c) = % DR
+ if not chem_front[$] :
+ chem_t := identity chem_transformed($) ;
+ for i=f upto t :
+ chem_path0 := (subpath chem_sb_pair@# of chem_r_fragment($,i)) ;
+ chem_draw(
+ (chem_path0 paralleled chem_dbl_offset) transformed chem_t,
+ r,c,) ;
+ chem_draw(
+ (chem_path0 paralleled -chem_dbl_offset) transformed chem_t,
+ r,c,) ;
+ endfor
+ fi
enddef ;
-vardef chem_prb (expr n, f, t, r, c) =
- chem_fill (n, chem_prb_path[n], f, t, r, c)
+vardef chem_lr@# (suffix $) (expr f, t, r, c) = % LR
+ if length(str @#)>0 :
+ chem_lsr@#($,f,t,r,c) ;
+ else :
+ chem_lsr.b($,f,t,r,c) ;
+ fi
enddef ;
-vardef chem_mr (expr n, f, t, r, c) =
- if n < 0 :
- chem_draw_vertical(n, chem_mr_path[n], f, t, r, c)
+vardef chem_rr@# (suffix $) (expr f, t, r, c) = % RR
+ if length(str @#)>0 :
+ chem_rsr@#($,f,t,r,c) ;
else :
- chem_draw (n, chem_mr_path[n], f, t, r, c)
+ chem_rsr.b($,f,t,r,c) ;
fi
enddef ;
-vardef chem_pr (expr n, f, t, r, c) =
- if n < 0 :
- chem_draw_vertical(n, chem_pr_path[n], f, t, r, c)
- else :
- chem_draw (n, chem_pr_path[n], f, t, r, c)
+vardef chem_eb@# (suffix $) (expr f, t, r, c) = % EB
+ if not chem_star[$] :
+ %chem_draw(
+ % ((subpath (f-1,t) of chem_b_path[$]) paralleled -2chem_dbl_offset)
+ % chem_transformed($),
+ % r,c,dashed chem_sb_dash scaled chem_b_length) ;
+ for i=f upto t :
+ chem_t := identity chem_transformed($) ;
+ chem_draw(
+ ((subpath (chem_sb_pair@# shifted (i-1,i-1)) of chem_b_path[$])
+ paralleled -2chem_dbl_offset) transformed chem_t,
+ r,c,) ;
+ endfor
fi
enddef ;
-vardef chem_sr (expr n, f, t, r, c) =
- chem_draw (n, chem_sr_path[n], f, t, r, c)
+vardef chem_ad@# (suffix $) (expr f, t, r, c) = % AD
+ if not chem_star[$] :
+ chem_t := identity chem_transformed($) ;
+ for i=f upto t :
+ chem_drawarrow(
+ ((subpath (chem_sb_pair@# shifted (i-1,i-1)) of chem_b_path[$])
+ paralleled 2chem_dbl_offset) transformed chem_t,
+ r,c,) ;
+ endfor
+ fi
enddef ;
-vardef chem_msr (expr n, f, t, r, c) =
- chem_draw (n, chem_msr_path[n], f, t, r, c)
+vardef chem_au@# (suffix $) (expr f, t, r, c) = % AU
+ if not chem_star[$] :
+ chem_t := identity chem_transformed($) ;
+ for i=f upto t :
+ chem_drawarrow(
+ reverse(
+ (subpath (chem_sb_pair@# shifted (i-1,i-1)) of chem_b_path[$])
+ paralleled 2chem_dbl_offset) transformed chem_t,
+ r,c,) ;
+ endfor
+ fi
enddef ;
-vardef chem_psr (expr n, f, t, r, c) =
- chem_draw (n, chem_psr_path[n], f, t, r, c)
+vardef chem_es@# (suffix $) (expr f, t, r, c) = % ES
+ if chem_star[$] :
+ chem_t := identity chem_transformed($) ;
+ for i=f upto t :
+ chem_draw(
+ ((point i-1 of chem_r_path[$]) scaled (xpart chem_sb_pair)) transformed chem_t,
+ chem_dot_factor*r,c,) ;
+ endfor
+ fi
enddef ;
-vardef chem_c (expr n, f, t, r, c) =
- chem_draw (n, chem_c_path[n], f, t, r, c)
+vardef chem_ed@# (suffix $) (expr f, t, r, c) = % ED
+ chem_t := identity chem_transformed($) ;
+ for i=f upto t :
+ if chem_star[$] :
+ chem_path0 := subpath chem_sb_pair of chem_r_fragment($,i) ;
+ chem_draw(
+ (point 0 of (chem_path0 paralleled -chem_dbl_offset)) transformed chem_t,
+ chem_dot_factor*r,c,) ;
+ chem_draw(
+ (point 0 of (chem_path0 paralleled chem_dbl_offset)) transformed chem_t,
+ chem_dot_factor*r,c,) ;
+ else :
+ chem_draw(
+ ((subpath (chem_sb_pair shifted (i-1,i-1)) of chem_b_path[$])
+ paralleled -2chem_dbl_offset) transformed chem_t,
+ r,c,dashed evenly) ;
+ fi
+ endfor
+enddef ;
+
+vardef chem_ep@# (suffix $) (expr f, t, r, c) = % EP
+ if chem_star[$] :
+ chem_t := identity chem_transformed($) ;
+ for i=f upto t :
+ chem_path0 := subpath chem_sb_pair of chem_r_fragment($,i) ;
+ chem_draw(
+ (point 0 of (chem_path0 paralleled -chem_dbl_offset) --
+ point 0 of (chem_path0 paralleled chem_dbl_offset)) transformed chem_t,
+ r,c,) ;
+ endfor
+ fi
enddef ;
-vardef chem_cc (expr n, f, t, r, c) =
- chem_draw (n, chem_cc_path[n], f, f, r, c)
+vardef chem_et@# (suffix $) (expr f, t, r, c) = % ET
+ if chem_star[$] :
+ chem_t := identity chem_transformed($) ;
+ for i=f upto t :
+ chem_path0 := subpath chem_sb_pair of chem_r_fragment($,i) ;
+ chem_draw(
+ (point 0 of (chem_path0 paralleled -2chem_dbl_offset)) transformed chem_t,
+ chem_dot_factor*r,c,) ;
+ chem_draw(
+ (point 0 of chem_path0) transformed chem_t,
+ chem_dot_factor*r,c,) ;
+ chem_draw(
+ (point 0 of (chem_path0 paralleled 2chem_dbl_offset)) transformed chem_t,
+ chem_dot_factor*r,c,) ;
+ endfor
+ fi
enddef ;
-vardef chem_cd (expr n, f, t, r, c) =
- chem_dashed_connected (n, chem_c_path[n], f, t, r, c)
+vardef chem_db@# (suffix $) (expr f, t, r, c) = % DB
+ if chem_star[$] :
+ chem_dr@#($,f,t,r,c) ;
+ elseif not chem_front[$] :
+ chem_t := identity chem_transformed($) ;
+ %chem_draw(
+ % ((subpath (f-1,t) of chem_b_path[$]) paralleled -chem_dbl_offset)
+ % transformed chem_t,
+ % r,c,dashed chem_sb_dash@# scaled chem_b_length) ;
+ %chem_draw(
+ % ((subpath (f-1,t) of chem_b_path[$]) paralleled chem_dbl_offset)
+ % transformed chem_t,
+ % r,c,dashed chem_sb_dash@# scaled chem_b_length) ;
+ for i=f upto t :
+ chem_path0 := subpath (chem_sb_pair@# shifted (i-1,i-1)) of chem_b_path[$] ;
+ chem_draw(
+ (chem_path0 paralleled -chem_dbl_offset) transformed chem_t,
+ r,c,) ;
+ chem_draw(
+ (chem_path0 paralleled chem_dbl_offset) transformed chem_t,
+ r,c,) ;
+ % todo : this should be cut-off where it overlaps a neighboring standard bond.
+ endfor
+ fi
enddef ;
-vardef chem_ccd (expr n, f, t, r, c) =
- chem_dashed_normal (n, chem_cc_path[n], f, f, r, c)
+vardef chem_tb@# (suffix $) (expr f, t, r, c) = % TB
+ if chem_star[$] :
+ chem_t := identity chem_transformed($) ;
+ for i=f upto t :
+ chem_path0 := subpath chem_sb_pair@# of chem_r_fragment($,i) ;
+ chem_draw(
+ (chem_path0 paralleled -2chem_dbl_offset) transformed chem_t,
+ r,c,) ;
+ chem_draw(
+ chem_path0 transformed chem_t,
+ r,c,) ;
+ chem_draw(
+ (chem_path0 paralleled 2chem_dbl_offset) transformed chem_t,
+ r,c,) ;
+ endfor
+ fi
enddef ;
-vardef chem_rn (expr n, i, t) =
- chem_rt (n,i,t) ;
+vardef chem_sr@# (suffix $) (expr f, t, r, c) = % SR
+ chem_t := identity chem_transformed($) ;
+ if chem_stacked[$] :
+ chem_num0 := length chem_b_path[$] ; chem_num1 := floor(.5chem_num0) ;
+ for i=f upto t :
+ chem_draw(
+ (subpath (if i>chem_num1: .5,ypart fi chem_sb_pair@#) of chem_r_fragment($,i))
+ transformed chem_t,
+ r,c,) ;
+ endfor
+ else :
+ for i=f upto t :
+ chem_draw(
+ (subpath chem_sb_pair@# of chem_r_fragment($,i))
+ transformed chem_t,
+ r,c,) ;
+ endfor
+ fi
enddef ;
-vardef chem_rtn (expr n, i, t) =
- chem_rtt(n,i,t) ;
+vardef chem_rd@# (suffix $) (expr f, t, r, c) = % RD
+ chem_t := identity chem_transformed($) ;
+ if chem_stacked[$] :
+ chem_num0 := length chem_b_path[$] ; chem_num1 := floor(.5chem_num0) ;
+ for i=f upto t :
+ chem_draw(
+ (subpath (if i>chem_num1: .5,ypart fi chem_sb_pair@#) of chem_r_fragment($,i))
+ transformed chem_t,
+ r,c,dashed evenly) ;
+ endfor
+ else :
+ for i=f upto t :
+ chem_draw(
+ (subpath chem_sb_pair@# of chem_r_fragment($,i))
+ transformed chem_t,
+ r,c,dashed evenly) ;
+ endfor
+ fi
enddef ;
-vardef chem_rbn (expr n, i, t) =
- chem_rbt(n,i,t) ;
+vardef chem_rh@# (suffix $) (expr f, t, r, c) = % RH
+ chem_t := identity chem_transformed($) ;
+ for i=f upto t :
+ chem_draw(
+ (subpath chem_sb_pair@# of chem_r_fragment($,i))
+ transformed chem_t,
+ chem_dot_factor*r,c,dashed withdots scaled ((.5chem_b_length/3)/5bp)) ;
+ % not symmetric - needs to be tweaked...
+ endfor
+enddef ;
+
+vardef chem_lrh@# (suffix $) (expr f, t, r, c) = % LRH
+ chem_t := identity chem_transformed($) ;
+ for i=f upto t :
+ chem_draw(
+ (subpath chem_sb_pair@# of chem_r_fragment.lft($,i))
+ transformed chem_t,
+ chem_dot_factor*r,c,dashed withdots scaled ((.5chem_b_length/3)/5bp)) ;
+ % not symmetric - needs to be tweaked...
+ endfor
+enddef ;
+
+vardef chem_rrh@# (suffix $) (expr f, t, r, c) = % RRH
+ chem_t := identity chem_transformed($) ;
+ for i=f upto t :
+ chem_draw(
+ (subpath chem_sb_pair@# of chem_r_fragment.rt($,i))
+ transformed chem_t,
+ chem_dot_factor*r,c,dashed withdots scaled ((.5chem_b_length/3)/5bp)) ;
+ % not symmetric - needs to be tweaked...
+ endfor
+enddef ;
+
+vardef chem_hb@# (suffix $) (expr f, t, r, c) = % HB
+ if chem_star[$] :
+ chem_rh@#($,f,t,r,c)
+ fi
enddef ;
-vardef chem_tb (expr n, f, t, r, c) = % one
- chem_draw (n, chem_msb_path[n], f, t, r, c) ;
- chem_draw (n, chem_sb_path [n], f, t, r, c) ;
- chem_draw (n, chem_psb_path[n], f, t, r, c) ;
+vardef chem_bb@# (suffix $) (expr f, t, r, c) = % BB
+ if chem_star[$] :
+ chem_rb@#($,f,t,r,c) ;
+ elseif chem_front[$] :
+ chem_t := identity chem_transformed($) ;
+ chem_draw(
+ (subpath (f-1,t) of chem_b_path[$]) transformed chem_t,
+ r,c,) ;
+ chem_num0 := length chem_b_path[$] ; % total number of bonds
+ chem_num1 := chem_front_b[$] ; % number of bonds to be made bold
+ % bold bonds within f and t
+ chem_num2 := if f<0 :((f+1) mod chem_num0) + chem_num0 else : ((f-1) mod chem_num0) + 1 fi ;
+ chem_num3 := if t<0 :((t+1) mod chem_num0) + chem_num0 else : ((t-1) mod chem_num0) + 1 fi ;
+ if chem_num3<chem_num2 :
+ chem_num4 := chem_num3 ;
+ chem_num3 := chem_num2 ;
+ chem_num2 := chem_num4 ;
+ fi
+ if chem_num2<chem_num1 : % Are there any bonds to be made bold?
+ if chem_num2=1 : % Skip the first bold bond.
+ chem_fill(
+ (point chem_num2-1 of chem_b_path[$] --
+ point chem_num2 of chem_b_path[$] shifted (0,-chem_dbl_offset) --
+ point chem_num2 of chem_b_path[$] shifted (0, chem_dbl_offset) --
+ cycle) transformed chem_t,
+ r,c,) ;
+ fi
+ if (chem_num2<=chem_num1-1) and (chem_num3>1) :
+ chem_path0 := subpath (if chem_num2>2 : chem_num2-1 else : 1 fi,
+ if chem_num3<chem_num1 : chem_num3 else : chem_num1-1 fi)
+ of chem_b_path[$] ;
+ chem_fill(
+ (chem_path0 paralleled -chem_dbl_offset --
+ reverse(chem_path0) paralleled -chem_dbl_offset --
+ cycle) transformed chem_t,
+ r,c,) ;
+ fi
+ if chem_num3>=chem_num1 :
+ chem_fill(
+ (point chem_num1 of chem_b_path[$] --
+ point chem_num1-1 of chem_b_path[$] shifted (0,-chem_dbl_offset) --
+ point chem_num1-1 of chem_b_path[$] shifted (0, chem_dbl_offset) --
+ cycle) transformed chem_t,
+ r,c,) ;
+ fi
+ fi
+ fi
enddef ;
-vardef chem_ep (expr n, f, t, r, c) = % one
- chem_draw (n, chem_e_path[n], f, t, r, c) ;
+vardef chem_rb@# (suffix $) (expr f, t, r, c) = % RB
+ chem_t := identity chem_transformed($) ;
+ for i=f upto t :
+ chem_path0 := subpath chem_sb_pair@# of chem_r_fragment($,i) ;
+ chem_fill(
+ (point 0 of chem_path0 --
+ point 1 of chem_path0
+ rotatedaround(point 0 of chem_path0, -chem_bb_angle) --
+ point 1 of chem_path0
+ rotatedaround(point 0 of chem_path0, chem_bb_angle) --
+ cycle) transformed chem_t,
+ r,c,) ;
+ endfor
+enddef ;
+
+vardef chem_lrb@# (suffix $) (expr f, t, r, c) = % LRB
+ if not chem_star[$] :
+ chem_t := identity chem_transformed($) ;
+ for i=f upto t :
+ chem_path0 := subpath chem_sb_pair@# of chem_r_fragment.lft($,i) ;
+ chem_fill(
+ (point 0 of chem_path0 --
+ point 1 of chem_path0
+ rotatedaround(point 0 of chem_path0, -chem_bb_angle) --
+ point 1 of chem_path0
+ rotatedaround(point 0 of chem_path0, chem_bb_angle) --
+ cycle) transformed chem_t,
+ r,c,) ;
+ endfor
+ fi
enddef ;
-vardef chem_es (expr n, f, t, r, c) = % one
- chem_draw_dot (n, center chem_e_path[n], f, t, r, c) ;
+vardef chem_rrb@# (suffix $) (expr f, t, r, c) = % RRB
+ if not chem_star[$] :
+ chem_t := identity chem_transformed($) ;
+ for i=f upto t :
+ chem_path0 := subpath chem_sb_pair@# of chem_r_fragment.rt($,i) ;
+ chem_fill(
+ (point 0 of chem_path0 --
+ point 1 of chem_path0
+ rotatedaround(point 0 of chem_path0, -chem_bb_angle) --
+ point 1 of chem_path0
+ rotatedaround(point 0 of chem_path0, chem_bb_angle) --
+ cycle) transformed chem_t,
+ r,c,) ;
+ endfor
+ fi
enddef ;
-vardef chem_ed (expr n, f, t, r, c) = % one
- chem_draw_dot (n, point 0 of chem_e_path[n], f, t, r, c) ;
- chem_draw_dot (n, point 1 of chem_e_path[n], f, t, r, c) ;
+vardef chem_lsr@# (suffix $) (expr f, t, r, c) = % LSR
+ if not chem_star[$] :
+ chem_t := identity chem_transformed($) ;
+ for i=f upto t :
+ chem_draw(
+ (subpath chem_sb_pair@# of chem_r_fragment.lft($,i)) transformed chem_t,
+ r,c,) ;
+ endfor
+ fi
enddef ;
-vardef chem_et (expr n, f, t, r, c) = % one
- chem_draw_dot (n, point 0 of chem_e_path[n], f, t, r, c) ;
- chem_draw_dot (n, center chem_e_path[n], f, t, r, c) ;
- chem_draw_dot (n, point 1 of chem_e_path[n], f, t, r, c) ;
+vardef chem_rsr@# (suffix $) (expr f, t, r, c) = % RSR
+ if not chem_star[$] :
+ chem_t := identity chem_transformed($) ;
+ for i=f upto t :
+ chem_draw(
+ (subpath chem_sb_pair@# of chem_r_fragment.rt($,i)) transformed chem_t,
+ r,c,) ;
+ endfor
+ fi
enddef ;
-vardef chem_sd (expr n, f, t, r, c) = % one
- chem_draw (n, chem_ddt_path[n], f, t, r, c) ;
- chem_draw (n, chem_ddb_path[n], f, t, r, c) ;
+vardef chem_lrd@# (suffix $) (expr f, t, r, c) = % LRD
+ if not chem_star[$] :
+ chem_t := identity chem_transformed($) ;
+ for i=f upto t :
+ chem_draw(
+ (subpath chem_sb_pair@# of chem_r_fragment.lft($,i)) transformed chem_t,
+ r,c,dashed evenly) ;
+ endfor
+ fi
enddef ;
-vardef chem_rdd (expr n, f, t, r, c) = % one
- chem_draw (n, chem_ldt_path[n], f, t, r, c) ;
- chem_draw (n, chem_ldb_path[n], f, t, r, c) ;
- chem_draw (n, chem_psb_path[n], f, t, r, c) ;
+vardef chem_rrd@# (suffix $) (expr f, t, r, c) = % RRD
+ if not chem_star[$] :
+ chem_t := identity chem_transformed($) ;
+ for i=f upto t :
+ chem_draw(
+ (subpath chem_sb_pair@# of chem_r_fragment.rt($,i)) transformed chem_t,
+ r,c,dashed evenly) ;
+ endfor
+ fi
enddef ;
-vardef chem_ldd (expr n, f, t, r, c) = % one
- chem_draw (n, chem_msb_path[n], f, t, r, c) ;
- chem_draw (n, chem_rdt_path[n], f, t, r, c) ;
- chem_draw (n, chem_rdb_path[n], f, t, r, c) ;
+vardef chem_s@# (suffix $) (expr f, t, r, c) = % S
+ if length(str @#)>0 :
+ chem_ss@#($,f,t,r,c) ;
+ else :
+ chem_ss.b($,f,t,r,c) ;
+ fi
enddef ;
-vardef chem_hb (expr n, f, t, r, c) = % one
- chem_draw_dot (n, point 0 of chem_sb_path[n], f, t, r, c) ;
- chem_draw_dot (n, center chem_sb_path[n], f, t, r, c) ;
- chem_draw_dot (n, point 1 of chem_sb_path[n], f, t, r, c) ;
+vardef chem_ss@# (suffix $) (expr f, t, r, c) = % SS
+ if not (chem_star[$] or chem_front[$]) :
+ chem_draw(
+ subpath chem_sb_pair@# of (point f-2 of chem_b_path[$] -- point t of chem_b_path[$])
+ chem_transformed($),
+ r,c,) ;
+ fi
enddef ;
-vardef chem_bb (expr n, f, t, r, c) = % one
- if n < 0 :
- chem_fill (n, chem_bb_path[n], 1, 1, r, c) ;
- chem_b (n, f, t, r, c) ;
+vardef chem_mid@# (suffix $) (expr f, t, r, c) = % MID
+ if length(str @#)>0 :
+ chem_mids@#($,f,t,r,c) ;
else :
- chem_fill (n, chem_bb_path[n], f, t, r, c) ;
- fi ;
+ chem_mids.b($,f,t,r,c) ;
+ fi
enddef ;
-vardef chem_oe (expr n, f, t, r, c) = % one
- chem_draw (n, chem_oe_path[n], f, t, r, c) ;
+vardef chem_mids@# (suffix $) (expr f, t, r, c) = % MIDS
+ if not (chem_star[$] or chem_front[$]) :
+ chem_t := identity chem_transformed($) ;
+ for i=f upto t :
+ chem_draw(
+ (subpath chem_sb_pair@# of (origin -- point i-1 of chem_b_path[$]))
+ transformed chem_t,
+ r,c,) ;
+ endfor
+ fi
enddef ;
-vardef chem_bd (expr n, f, t, r, c) = % one
- for i=0 upto 5 :
- chem_draw (n, subpath (2i,2i+1) of chem_bd_path[n], f, t, r, c) ;
- endfor ;
+vardef chem_cd (suffix $) (expr r, c) = % CD
+ chem_draw(
+ chem_c_path[$] chem_transformed($),
+ r,c,dashed evenly) ;
enddef ;
-vardef chem_bw (expr n, f, t, r, c) = % one
- chem_draw (n, chem_bw_path[n], f, t, r, c) ;
+vardef chem_c (suffix $) (expr r, c) = % C
+ chem_draw(
+ chem_c_path[$] chem_transformed($),
+ r,c,) ;
enddef ;
-vardef chem_z_zero@#(text t) =
- chem_text@#(t, chem_do(origin)) ;
+vardef chem_ccd (suffix $) (expr f, t, r, c) = % CCD
+ chem_num0 := ypart((origin--center(subpath (f-2,f-1) of chem_b_path[$]))
+ intersectiontimes chem_c_path[$]) ;
+ chem_num1 := ypart((origin--center(subpath (t-1,t) of chem_b_path[$]))
+ intersectiontimes chem_c_path[$]) ;
+ if chem_num1>chem_num0 :
+ chem_num0 := chem_num0 + length chem_c_path[$] ;
+ fi
+ chem_draw(
+ subpath (chem_num1,chem_num0) of chem_c_path[$] chem_transformed($),
+ r,c,dashed evenly) ;
+enddef ;
+
+vardef chem_cc (suffix $) (expr f, t, r, c) = % CC
+ chem_num0 := ypart((origin--center(subpath (f-2,f-1) of chem_b_path[$]))
+ intersectiontimes chem_c_path[$]) ;
+ chem_num1 := ypart((origin--center(subpath (t-1,t) of chem_b_path[$]))
+ intersectiontimes chem_c_path[$]) ;
+ if chem_num1>chem_num0 :
+ chem_num0 := chem_num0 + length chem_c_path[$] ;
+ fi
+ chem_draw(
+ subpath (chem_num1,chem_num0) of chem_c_path[$] chem_transformed($),
+ r,c,) ;
+enddef ;
+
+vardef chem_ldb@# (suffix $) (expr f, t, r, c) = % LD
+ if chem_star[$] :
+ chem_t := identity chem_transformed($) ;
+ for i=f upto t :
+ chem_path0 := subpath chem_sb_pair@# of chem_r_fragment($,i) ;
+ chem_draw(
+ chem_path0 transformed chem_t,
+ r,c,) ;
+ chem_draw(
+ (chem_path0 paralleled 2chem_dbl_offset) transformed chem_t,
+ r,c,) ;
+ endfor
+ fi
enddef ;
-vardef chem_cz_zero@#(text t) =
- chem_text@#(t, chem_do(origin)) ;
+vardef chem_rdb@# (suffix $) (expr f, t, r, c) = % LD
+ if chem_star[$] :
+ chem_t := identity chem_transformed($) ;
+ for i=f upto t :
+ chem_path0 := subpath chem_sb_pair@# of chem_r_fragment($,i) ;
+ chem_draw(
+ chem_path0 transformed chem_t,
+ r,c,) ;
+ chem_draw(
+ (chem_path0 paralleled -2chem_dbl_offset) transformed chem_t,
+ r,c,) ;
+ endfor
+ fi
enddef ;
-vardef chem_z@#(expr n, p) (text t) =
- if p = 0 :
- chem_text@#(t, chem_do(origin)) ;
- else :
- chem_text@#(t, chem_do(chem_b_zero[n] rotated chem_ang(n,p))) ;
- fi ;
+vardef chem_ldd@# (suffix $) (expr f, t, r, c) = % LDD
+ if chem_star[$] :
+ chem_t := identity chem_transformed($) ;
+ for i=f upto t :
+ chem_path0 := subpath chem_sb_pair@# of chem_r_fragment($,i) ;
+ chem_draw(
+ chem_path0 transformed chem_t,
+ r,c,) ;
+ chem_draw(
+ (chem_path0 paralleled 2chem_dbl_offset) transformed chem_t,
+ r,c,dashed evenly) ;
+ endfor
+ fi
enddef ;
-vardef chem_cz@#(expr n, p) (text t) =
- if n = 1 :
- chem_c_text(t, chem_do(chem_crz_zero[n] rotated chem_ang(n,p))) ;
- else :
- chem_text@#(t, chem_do(chem_b_zero[n] rotated chem_ang(n,p))) ;
- fi ;
+vardef chem_rdd@# (suffix $) (expr f, t, r, c) = % RDD
+ if chem_star[$] :
+ chem_t := identity chem_transformed($) ;
+ for i=f upto t :
+ chem_path0 := subpath chem_sb_pair@# of chem_r_fragment($,i) ;
+ chem_draw(
+ chem_path0 transformed chem_t,
+ r,c,) ;
+ chem_draw(
+ (chem_path0 paralleled -2chem_dbl_offset) transformed chem_t,
+ r,c,dashed evenly) ;
+ endfor
+ fi
enddef ;
-vardef chem_midz@#(expr n, p) (text t) =
- chem_text@#(t, chem_do(chem_mid_zero[n] rotated chem_ang(n,p))) ;
+vardef chem_oe@# (suffix $) (expr f, t, r, c) = % OE
+ if chem_star[$] :
+ chem_t := identity chem_transformed($) ;
+ for i=f upto t :
+ chem_path0 := subpath chem_sb_pair@# of chem_r_fragment($,i) ;
+ chem_path1 := chem_path0 paralleled -.5chem_dbl_offset ;
+ chem_path2 := chem_path0 paralleled .5chem_dbl_offset ;
+ chem_draw(
+ ( point 0 of chem_path0 --
+ .2[point 0 of chem_path0, point infinity of chem_path0]..
+ .3[point 0 of chem_path1, point infinity of chem_path1]..
+ .4[point 0 of chem_path0, point infinity of chem_path0]..
+ .5[point 0 of chem_path2, point infinity of chem_path2]..
+ .6[point 0 of chem_path0, point infinity of chem_path0]..
+ .7[point 0 of chem_path1, point infinity of chem_path1]..
+ .8[point 0 of chem_path0, point infinity of chem_path0]--
+ point infinity of chem_path0) transformed chem_t,
+ r,c,) ;
+ endfor
+ fi
enddef ;
-vardef chem_rz@#(expr n, p) (text t) =
- if n < 0 :
- % quite special
- chem_text@#(t, chem_do(chem_r_zero[n] shifted (chem_b_zero[n] rotated chem_ang(n,p)))) ;
- else :
- chem_text@#(t, chem_do(chem_r_zero[n] rotated chem_ang(n,p))) ;
- fi ;
+vardef chem_bw@# (suffix $) (expr f, t, r, c) = % BW
+ if chem_star[$] :
+ chem_t := identity chem_transformed($) ;
+ for i=f upto t :
+ chem_path0 := subpath chem_sb_pair@# of chem_r_fragment($,i) ;
+ chem_path1 := chem_path0 paralleled -.5chem_dbl_offset ;
+ chem_path2 := chem_path0 paralleled .5chem_dbl_offset ;
+ chem_draw(
+ ( point 0 of chem_path0..
+ .1[point 0 of chem_path1, point infinity of chem_path1]..
+ .2[point 0 of chem_path0, point infinity of chem_path0]..
+ .3[point 0 of chem_path2, point infinity of chem_path2]..
+ .4[point 0 of chem_path0, point infinity of chem_path0]..
+ .5[point 0 of chem_path1, point infinity of chem_path1]..
+ .6[point 0 of chem_path0, point infinity of chem_path0]..
+ .7[point 0 of chem_path2, point infinity of chem_path2]..
+ .8[point 0 of chem_path0, point infinity of chem_path0]..
+ .9[point 0 of chem_path1, point infinity of chem_path1]..
+ point infinity of chem_path0) transformed chem_t,
+ r,c,) ;
+ endfor
+ fi
enddef ;
-vardef chem_crz@#(expr n, p) (text tx) =
- chem_text(tx, chem_do(chem_crz_zero[n] rotated chem_ang(n,p))) ;
+vardef chem_bd@# (suffix $) (expr f, t, r, c) = % BD
+ if chem_star[$] : chem_rbd#@($,f,t,r,c) ; fi
enddef ;
-vardef chem_mrz@#(expr n, p) (text t) =
- if n < 0 :
- % quite special
- chem_text@#(t, chem_do(chem_mr_zero[n] shifted (chem_b_zero[n] rotated chem_ang(n,p)))) ;
- else :
- chem_text@#(t, chem_do(chem_mr_zero[n] rotated chem_ang(n,p))) ;
- fi ;
+vardef chem_rbd@# (suffix $) (expr f, t, r, c) = % RBD
+ chem_t := identity chem_transformed($) ;
+ for i=f upto t :
+ chem_path0 := subpath chem_sb_pair@# of chem_r_fragment($,i) ;
+ if chem_bd_wedge :
+ chem_path1 := chem_path0 rotated -chem_bb_angle ;
+ chem_path2 := chem_path0 rotated chem_bb_angle ;
+ else :
+ chem_path1 := chem_path0 paralleled -chem_dbl_offset ;
+ chem_path2 := chem_path0 paralleled chem_dbl_offset ;
+ fi
+ for j=0 upto 3 :
+ chem_draw(
+ (point (j/3) of chem_path1 -- point (j/3) of chem_path2) transformed chem_t,
+ 2r,c,) ;
+ endfor
+ endfor
+enddef ;
+
+vardef chem_lrbd@# (suffix $) (expr f, t, r, c) = % LRBD
+ if not chem_star[$] :
+ chem_t := identity chem_transformed($) ;
+ for i=f upto t :
+ chem_path0 := subpath chem_sb_pair@# of chem_r_fragment.lft($,i) ;
+ if chem_bd_wedge :
+ chem_path1 := chem_path0 rotated -chem_bb_angle ;
+ chem_path2 := chem_path0 rotated chem_bb_angle ;
+ else :
+ chem_path1 := chem_path0 paralleled -.5chem_dbl_offset ;
+ chem_path2 := chem_path0 paralleled .5chem_dbl_offset ;
+ fi
+ for j=0 upto 3 :
+ chem_draw(
+ (point (j/3) of chem_path1 -- point (j/3) of chem_path2) transformed chem_t,
+ 2r,c,) ;
+ endfor
+ endfor
+ fi
enddef ;
-vardef chem_prz@#(expr n, p) (text t) =
- if n < 0 :
- % quite special
- chem_text@#(t, chem_do(chem_pr_zero[n] shifted (chem_b_zero[n] rotated chem_ang(n,p)))) ;
- else :
- chem_text@#(t, chem_do(chem_pr_zero[n] rotated chem_ang(n,p))) ;
- fi ;
+vardef chem_rrbd@# (suffix $) (expr f, t, r, c) = % RRBD
+ if not chem_star[$] :
+ chem_t := identity chem_transformed($) ;
+ for i=f upto t :
+ chem_path0 := subpath chem_sb_pair@# of chem_r_fragment.rt($,i) ;
+ if chem_bd_wedge :
+ chem_path1 := chem_path0 rotated -chem_bb_angle ;
+ chem_path2 := chem_path0 rotated chem_bb_angle ;
+ else :
+ chem_path1 := chem_path0 paralleled -.5chem_dbl_offset ;
+ chem_path2 := chem_path0 paralleled .5chem_dbl_offset ;
+ fi
+ for j=0 upto 3 :
+ chem_draw(
+ (point (j/3) of chem_path1 -- point (j/3) of chem_path2) transformed chem_t,
+ 2r,c,) ;
+ endfor
+ endfor
+ fi
enddef ;
-vardef chem_rt@#(expr n, p) (text t) =
- chem_text@#(t, chem_do(chem_rt_zero[n] rotated chem_ang(n,p))) ;
+% text, number (no alignment on number);
+
+vardef chem_z@#(suffix $) (expr p) (text t) = % Z
+ draw chem_text@#
+ (t,chem_do(
+ if p=0 :
+ origin
+ else :
+ (point p-1 of chem_b_path[$]) chem_transformed($)
+ fi
+ )) ;
enddef ;
-vardef chem_rtt@#(expr n, p) (text t) =
- chem_text@#(t, chem_do(chem_rtt_zero[n] rotated chem_ang(n,p))) ;
+vardef chem_cz@#(suffix $) (expr p) (text t) = chem_z@#($,p,t) ; enddef ; % CZ ?
+
+vardef chem_midz@#(suffix $) (expr p) (text t) = % MIDZ
+ if not (chem_star[$] or chem_front[$]) :
+ draw chem_text@#
+ (t,chem_do(
+ (xpart chem_sb_pair, 0) scaled (xpart point 0 of chem_b_path[$])
+ chem_transformed($)
+ )) ;
+ fi
enddef ;
-vardef chem_rbt@#(expr n, p) (text t) =
- chem_text@#(t, chem_do(chem_rbt_zero[n] rotated chem_ang(n,p))) ;
+vardef chem_rz@#(suffix $) (expr p) (text t) = % RZ
+ draw chem_text@#
+ (t, chem_do((point p-1 of chem_r_path[$]) chem_transformed($))) ;
enddef ;
-vardef chem_zt@#(expr n, p) (text t) =
- if n = 1 :
- chem_text@#(t, chem_do(chem_rt_zero[n] rotated chem_ang(n,p))) ;
- else :
- chem_text@#(t, chem_do(chem_n_zero[n] rotated chem_ang(n,p))) ;
- fi ;
+vardef chem_lrz@#(suffix $) (expr p) (text t) = % LRZ
+ if not chem_star[$] :
+ draw chem_text@#
+ (t,
+ chem_do((point p-1 of chem_r_path.lft[$]) chem_transformed($))) ;
+ fi
enddef ;
-vardef chem_zn@#(expr n, p) (text t) =
- if n = 1 :
- chem_text@#(t, chem_do(chem_rt_zero[n] rotated chem_ang(n,p))) ;
- else :
- chem_text@#(t, chem_do(chem_n_zero[n] rotated chem_ang(n,p))) ;
- fi ;
+vardef chem_rrz@#(suffix $) (expr p) (text t) = % RRZ
+ if not chem_star[$] :
+ draw chem_text@#
+ (t, chem_do((point p-1 of chem_r_path.rt[$]) chem_transformed($))) ;
+ fi
enddef ;
-vardef chem_zbt@#(expr n, p) (text t) =
- chem_text@#(t, chem_do(chem_rtt_zero[n] rotated chem_ang(n,p))) ;
+vardef chem_zn@#(suffix $) (expr p) (text t) = % ZN
+ chem_zt($,p,t) ;
enddef ;
-vardef chem_zbn@#(expr n, p) (text t) =
- chem_text@#(t, chem_do(chem_rtt_zero[n] rotated chem_ang(n,p))) ;
+vardef chem_zt@#(suffix $) (expr p) (text t) = % ZT
+ draw chem_text@#(t,chem_do ((point p-1 of chem_b_path[$]) chem_transformed($)
+ scaled chem_text_min)) ;
enddef ;
-vardef chem_ztt@#(expr n, p) (text t) =
- chem_text@#(t, chem_do(chem_rbt_zero[n] rotated chem_ang(n,p))) ;
+vardef chem_zln@#(suffix $) (expr p) (text t) = % ZLN
+ chem_zlt($,p,t) ;
enddef ;
-vardef chem_ztn@#(expr n, p) (text t) =
- chem_text@#(t, chem_do(chem_rbt_zero[n] rotated chem_ang(n,p))) ;
+vardef chem_zlt@#(suffix $) (expr p) (text t) = % ZLT
+ draw chem_text@#(t, chem_do((point p-1.5 of chem_b_path[$]) chem_transformed($)
+ scaled chem_text_min)) ;
enddef ;
-vardef chem_symbol(expr t) =
- draw textext(t) ;
+vardef chem_zrn@#(suffix $) (expr p) (text t) = % ZRN
+ chem_zrt($,p,t) ;
enddef ;
-vardef chem_text@#(expr txt, z) = % adapted copy of thelabel@
- save p ; picture p ;
- p := textext(txt) ;
- p := p
- if (mfun_labtype@# >= 10) : shifted (0,ypart center p) fi
- shifted (z + chem_text_offset*mfun_laboff@# - (mfun_labxf@#*lrcorner p + mfun_labyf@#*ulcorner p + (1-mfun_labxf@#-mfun_labyf@#)*llcorner p)) ;
- if chem_text_trace :
- draw z withpen pencircle scaled 2pt withcolor red ;
- draw boundingbox p withpen pencircle scaled 1pt withcolor red ;
- fi ;
- draw p
+vardef chem_zrt@#(suffix $) (expr p) (text t) = % ZRT
+ draw chem_text@#(t, chem_do((point p-0.5 of chem_b_path[$]) chem_transformed($)
+ scaled chem_text_min)) ;
enddef ;
-vardef chem_c_text(expr txt, z) = % adapted copy of thelabel@
- save p ; picture p ; p := textext(txt) ;
- save b ; path b ; b := (boundingbox p) shifted z ;
- save a ; pair a ; a := (origin--z) intersection_point b ;
- if intersection_found :
- draw p shifted (z enlonged arclength(a -- center b)) ;
- else :
- draw p shifted z ;
+vardef chem_crz@#(suffix $) (expr p) (text t) = % CRZ ????
+ if chem_star[$] :
+ draw chem_text@#(t, chem_do((point p-1 of chem_b_path[$] enlonged chem_center_offset)
+ chem_transformed($))) ;
fi
-% draw b withcolor green ;
-% draw a withcolor red ;
enddef ;
-vardef chem_ang (expr n, d) =
- ((-1 * (d-1) * chem_angle[n]) + (-chem_rotation+1) * 90 + chem_start[n]) % no ;
+vardef chem_rn@#(suffix $) (expr i, t) = % RN
+ chem_rt($,i,t) ;
enddef ;
-vardef chem_rot (expr n, d) =
- chem_rotation := d ;
+vardef chem_rt@#(suffix $) (expr p) (text t) = % RT
+ draw chem_text@#(t, chem_do((center chem_r_fragment($,p)) chem_transformed($))) ;
enddef ;
-vardef chem_adj (expr n, d) =
- chem_adjacent := d ;
+vardef chem_lrn@#(suffix $) (expr i, t) = % LRN
+ chem_lrt($,i,t) ;
enddef ;
-vardef chem_sub (expr n, d) =
- chem_substituent := d ;
+vardef chem_lrt@#(suffix $) (expr p) (text t) = % LRT
+ draw chem_text@#(t, chem_do((center chem_r_fragment.lft($,p)) chem_transformed($))) ;
enddef ;
-vardef chem_dir (expr n, d) =
- if n = 1 :
- chem_direction_p := (origin - 2*center(chem_b_path[n] rotated chem_ang(n,d+1))) ;
- currentpicture := currentpicture shifted chem_direction_p ;
- chem_shift := chem_shift + chem_direction_p ;
- fi ;
+vardef chem_rrn@# (suffix $) (expr i, t) = % RRN
+ chem_rrt($,i,t) ;
enddef ;
-vardef chem_mov (expr n, d) =
- if d = 0 :
- currentpicture := currentpicture shifted - chem_shift ;
- chem_shift := origin ;
- else :
- chem_move_p := (origin - 2*center(chem_b_path[n] rotated chem_ang(n,d+chem_initialmov[n]))) ;
- currentpicture := currentpicture shifted chem_move_p ;
- chem_shift := chem_shift + chem_move_p ;
- fi ;
+vardef chem_rrt@#(suffix $) (expr p) (text t) = % RRT
+ draw chem_text@#(t, chem_do((center chem_r_fragment.rt($,p)) chem_transformed($))) ;
enddef ;
-vardef chem_off (expr n, d) =
- if (d = 1) or (d = 2) or (d = 8) : % positive
- currentpicture := currentpicture shifted (-chem_setting_offset,0) ;
- chem_shift := chem_shift + (-chem_setting_offset,0)
- elseif (d = 4) or (d = 5) or (d = 6) : % negative
- currentpicture := currentpicture shifted ( chem_setting_offset,0) ;
- chem_shift := chem_shift + ( chem_setting_offset,0)
- fi ;
+vardef chem_symbol(expr t) = draw textext(t) ; enddef ;
+
+vardef chem_align@#(expr pic) =
+ pic
+ if (mfun_labtype@# >= 10) :
+ shifted (0,ypart center pic)
+ fi
+ shifted (-(mfun_labxf@#*lrcorner pic + mfun_labyf@#*ulcorner pic + (1-mfun_labxf@#-mfun_labyf@#)*llcorner pic))
enddef ;
-vardef chem_set(expr n, m) =
- if chem_adjacent > 0 :
- chem_adjacent_d := xpart chem_b_zero[n] + xpart chem_b_zero[m] ;
- if chem_adjacent = 1 : chem_adjacent_p := (-chem_adjacent_d, 0) ;
- elseif chem_adjacent = 2 : chem_adjacent_p := (0, -chem_adjacent_d) ;
- elseif chem_adjacent = 3 : chem_adjacent_p := ( chem_adjacent_d, 0) ;
- elseif chem_adjacent = 4 : chem_adjacent_p := (0, chem_adjacent_d) ;
- else : chem_adjacent_p := origin ;
- fi ;
- currentpicture := currentpicture shifted chem_adjacent_p ;
- chem_shift := chem_shift + chem_adjacent_p ;
- chem_adjacent := 0 ;
- fi ;
- if chem_substituent > 0 :
- if m = 1 :
- chem_substituent_d := xpart chem_crz_zero[n] + chem_substituent_offset ;
+vardef chem_text@#(expr txt, z) =
+ chem_pic := textext(txt) ;
+ if length(str @#)=0 :
+ chem_pic := chem_align(chem_pic) ;
+ elseif (str @#) = "auto" :
+ if z<>origin :
+ chem_num0 := abs(angle(z rotated chem_setting_rotation)) ;
+ if chem_num0<=60 :
+ chem_pic := chem_align.rt (chem_pic) xshifted chem_text_offset ;
+ elseif chem_num0>=120 :
+ chem_pic := chem_align.lft(chem_pic) xshifted -chem_text_offset ;
+ else :
+ chem_pic := chem_align (chem_pic) ;
+ fi
+ else :
+ chem_pic := chem_align (chem_pic) ;
+ fi
else :
- chem_substituent_d := xpart chem_crz_zero[n] + xpart chem_b_zero[m] ;
- fi ;
- if chem_substituent = 1 : chem_substituent_p := (-chem_substituent_d, 0) ; % - ?
- elseif chem_substituent = 2 : chem_substituent_p := (0, chem_substituent_d) ;
- elseif chem_substituent = 3 : chem_substituent_p := ( chem_substituent_d, 0) ;
- elseif chem_substituent = 4 : chem_substituent_p := (0, -chem_substituent_d) ;
- else : chem_substituent_p := origin ;
- fi ;
- currentpicture := currentpicture shifted chem_substituent_p ;
- chem_shift := chem_shift + chem_substituent_p ;
- chem_substituent := 0 ;
- fi ;
- chem_rotation := chem_initialrot[m] ;
+ chem_pic := chem_align@#(chem_pic) shifted (chem_text_offset*mfun_laboff@#) ;
+ fi
+ chem_pic := (chem_pic rotated -chem_setting_rotation) shifted z ;
+
+ if chem_trace_text :
+ draw z withpen pencircle scaled 2pt withcolor red ;
+ draw boundingbox chem_pic withpen pencircle scaled 1pt withcolor red ;
+ fi
+
+ chem_pic
enddef ;
-vardef chem_draw (expr n, path_fragment, from_point, to_point, linewidth, linecolor) =
- for i:=from_point upto to_point:
- draw (path_fragment rotated chem_ang(n,i)) withpen pencircle scaled linewidth withcolor linecolor ;
- endfor ;
+% transform
+
+% rotations and reflections
+
+vardef chem_rot (suffix $) (expr d, s) = % ROT
+ if not chem_front[$] :
+ if d=0 :
+ chem_rotation := 0
+ else :
+ chem_num0 := if chem_stacked[$] : 3 else : 0 fi ;
+ chem_num1 := .5(angle(point d+chem_num0 of chem_b_path[$]) -
+ angle(point d+chem_num0-1 of chem_b_path[$])) ;
+ chem_rotation := (chem_rotation + s*chem_num1) mod 360 ;
+ fi
+ fi
enddef ;
-vardef chem_fill (expr n, path_fragment, from_point, to_point, linewidth, linecolor) =
- for i:=from_point upto to_point:
- fill (path_fragment rotated chem_ang(n,i)) withpen pencircle scaled 0 withcolor linecolor ;
- endfor ;
+vardef chem_mir (suffix $) (expr d, s) = % MIR
+ % We take the scale factor s to multiply the rotation, but only ONCE.
+ % For example: CARBON,.5MIR12 will give a rotation by 104°
+ if not chem_front[$] :
+ if d=0 : % inversion
+ if chem_mirror=origin :
+ chem_rotation := (chem_rotation + 180*s) mod 360 ;
+ else :
+ chem_mirror := chem_mirror rotated 90 ;
+ fi
+ else :
+ chem_pair0 := (point d-1 of chem_b_path[$]) scaled s ; % not chem_transformed
+ if chem_mirror=origin :
+ chem_mirror := chem_pair0 ;
+ else :
+ chem_num0 := angle(chem_mirror)-angle(chem_pair0) ;
+ if (chem_num0>0) and (chem_num0> 180) :
+ chem_num0 := 360 - chem_num0 ;
+ elseif (chem_num0<0) and (chem_num0<-180) :
+ chem_num0 := -360 - chem_num0 ;
+ fi
+ chem_num0 := chem_num0 * s ;
+ chem_rotation := (chem_rotation + 2chem_num0) mod 360 ;
+ chem_mirror := origin ;
+ fi
+ fi
+ fi
enddef ;
-vardef chem_dashed_normal (expr n, path_fragment, from_point, to_point, linewidth, linecolor) =
- for i:=from_point upto to_point:
- draw (path_fragment rotated chem_ang(n,i)) withpen pencircle scaled linewidth withcolor linecolor dashed evenly ;
- endfor ;
+% translations
+
+vardef chem_dir (suffix $) (expr d, s) = % DIR (same as MOV(d-1)MOV(d+1))
+ if not chem_front[$] :
+ if d=0 :
+ currentpicture := currentpicture shifted -chem_origin ;
+ chem_origin := origin ;
+ else :
+ chem_pair0 :=
+ (((point d-2 of chem_b_path[$]) shifted (point d of chem_b_path[$])) scaled s)
+ chem_transformed($) ;
+ currentpicture := currentpicture shifted -chem_pair0 ;
+ chem_origin := chem_origin shifted -chem_pair0 ;
+ fi
+ fi
+enddef ;
+
+vardef chem_mov (suffix $) (expr d, s) = % MOV
+ if d=0 :
+ currentpicture := currentpicture shifted -chem_origin ;
+ chem_origin := origin ;
+ else :
+ chem_pair0 := ((point d-1 of chem_b_path[$]) scaled s) chem_transformed($) ;
+ currentpicture := currentpicture shifted -chem_pair0 ;
+ chem_origin := chem_origin shifted -chem_pair0 ;
+ fi ;
enddef ;
-vardef chem_dashed_connected (expr n, path_fragment, from_point, to_point, linewidth, linecolor) =
- draw for i:=from_point upto to_point:
- (path_fragment rotated chem_ang(n,i)) if i < to_point : -- fi
- endfor withpen pencircle scaled linewidth withcolor linecolor dashed evenly ;
+vardef chem_mark (suffix $) (expr d, s) = % MARK
+ % scale s is ignored
+ if d<>0 :
+ chem_mark_pair[d] := -chem_origin ;
+ fi
enddef ;
-vardef chem_draw_dot (expr n, path_fragment, from_point, to_point, linewidth, linecolor) =
- for i:=from_point upto to_point:
- draw (path_fragment rotated chem_ang(n,i)) withpen pencircle scaled (chem_dot_factor*linewidth) withcolor linecolor ;
- endfor ;
+vardef chem_marked (expr d) =
+ if d=0 :
+ chem_origin
+ elseif known chem_mark_pair[d] :
+ chem_mark_pair[d] shifted chem_origin
+ else :
+ origin
+ fi
enddef ;
-vardef chem_draw_fixed (expr n, path_fragment, linewidth, linecolor) =
- draw (path_fragment rotated chem_ang(n,1)) withpen pencircle scaled linewidth withcolor linecolor ;
+vardef chem_number@#(suffix $) (expr p) (text t) = chem_label@#($,p,t) enddef ; % NUMBER
+vardef chem_label@# (suffix $) (expr p) (text t) = % LABEL
+ draw chem_text@#(t,chem_do(chem_marked(p))) ;
enddef ;
-vardef chem_draw_arrow (expr n, path_fragment, from_point, to_point, linewidth, linecolor) =
- for i:=from_point upto to_point:
- drawarrow (path_fragment rotated chem_ang(n,i)) withpen pencircle scaled linewidth withcolor linecolor ;
- endfor ;
+vardef chem_move (suffix $) (expr d, s) = % MOVE
+ chem_pair0 := chem_marked(d) scaled s ;
+ currentpicture := currentpicture shifted -chem_pair0 ;
+ chem_origin := chem_origin shifted -chem_pair0 ;
enddef ;
-vardef chem_draw_vertical (expr n, path_fragment, from_point, to_point, linewidth, linecolor) =
- % quite special
- for i:=from_point upto to_point:
- draw (path_fragment shifted (chem_b_zero[n] rotated chem_ang(n,i))) withpen pencircle scaled linewidth withcolor linecolor ;
- endfor ;
+vardef chem_diff (suffix $) (expr d, s) = % DIFF
+ chem_pair0 := (chem_marked(d) shifted -chem_origin) scaled s ;
+ currentpicture := currentpicture shifted -chem_pair0 ;
+ chem_origin := chem_origin shifted -chem_pair0 ;
enddef ;
-picture chem_stack_p[] ;
-pair chem_stack_shift[] ;
-
-vardef chem_save =
- chem_stack_n := chem_stack_n + 1 ;
- chem_stack_p[chem_stack_n] := currentpicture ;
- chem_stack_shift[chem_stack_n] := chem_shift ;
- chem_shift := origin ;
-% chem_adjacent := 0 ;
-% chem_substituent := 0 ;
-% chem_rotation := 1 ;
- currentpicture := nullpicture ;
+vardef chem_line (suffix $) (expr f, t, r, c) = % LINE
+ draw if f=t : origin else : chem_marked(f) fi -- chem_marked(t)
+ % no chem_transformed
+ withpen pencircle scaled r
+ withcolor c %\MPcolor{c}
enddef ;
-vardef chem_restore =
- if chem_stack_n > 0 :
- currentpicture := currentpicture shifted - chem_shift ;
- addto chem_stack_p[chem_stack_n] also currentpicture ;
- currentpicture := chem_stack_p[chem_stack_n] ;
- chem_stack_p[chem_stack_n] := nullpicture ;
- chem_shift := chem_stack_shift[chem_stack_n] ;
- chem_stack_n := chem_stack_n - 1 ;
- fi ;
+vardef chem_dash (suffix $) (expr f, t, r, c) = % DASH
+ draw if f=t : origin else : chem_marked(f) fi -- chem_marked(t)
+ % no chem_transformed
+ withpen pencircle scaled r
+ withcolor c %\MPcolor{c}
+ dashed evenly ;
enddef ;
-def chem_init_some(expr n, ratio, start, initialrot, initialmov) =
- chem_width [n] := ratio * chem_base_width * chem_setting_scale ;
- chem_angle [n] := 360/abs(n) ;
- chem_start [n] := start ;
- chem_initialrot[n] := initialrot ;
- chem_initialmov[n] := initialmov ;
- chem_b_zero [n] := (chem_width[n],0) rotated (chem_angle[n]/2) ;
- chem_n_zero [n] := (chem_text_min*chem_width[n],0) rotated (chem_angle[n]/2) ;
- chem_r_max [n] := chem_radical_max*chem_b_zero[n] ;
- chem_r_path [n] := chem_b_zero[n] -- chem_r_max[n] ;
- chem_mr_path [n] := chem_r_path [n] rotatedaround(chem_b_zero[n], (180-chem_angle[n])/2) ;
- chem_pr_path [n] := chem_r_path [n] rotatedaround(chem_b_zero[n],-(180-chem_angle[n])/2) ;
- chem_r_zero [n] := point 1 of chem_r_path [n] ;
- chem_mr_zero [n] := point 1 of chem_mr_path[n] ;
- chem_pr_zero [n] := point 1 of chem_pr_path[n] ;
- chem_crz_zero [n] := point 1 of (chem_r_path[n] enlonged chem_center_offset) ;
- chem_au_path [n] := subpath (0.2,0.8) of (chem_r_max[n] -- (chem_r_max[n] rotated chem_angle[n])) ;
- chem_ad_path [n] := reverse(chem_au_path[n]) ;
- chem_rt_zero [n] := (((chem_radical_max+chem_radical_min)/2)*chem_width[n],0) rotated (chem_angle[n]/2) ;
- chem_rtt_zero [n] := chem_rt_zero[n] rotated + 10 ;
- chem_rbt_zero [n] := chem_rt_zero[n] rotated - 10 ;
- chem_b_path [n] := reverse(chem_b_zero[n] -- (chem_b_zero[n] rotated -chem_angle[n])) ;
- chem_bx_path [n] := reverse(chem_b_zero[n] -- (chem_b_zero[n] rotated -chem_angle[n])) ; % ?
- chem_sb_path [n] := subpath (0.25,0.75) of chem_b_path[n] ;
- chem_s_path [n] := point 0 of chem_b_path[n] -- point 0 of (chem_b_path[n] rotated (2chem_angle[n])) ;
- chem_ss_path [n] := subpath (0.25,0.75) of (chem_s_path[n]) ;
- chem_pss_path [n] := subpath (0.00,0.75) of (chem_s_path[n]) ;
- chem_mss_path [n] := subpath (0.25,1.00) of (chem_s_path[n]) ;
- chem_mid_zero [n] := origin shifted (-.25chem_width[n],0) ;
- chem_midst_path[n] := chem_mid_zero[n] -- (chem_width[n],0) rotated ( chem_angle[n] + chem_angle[n]/2) ;
- chem_midsb_path[n] := chem_mid_zero[n] -- (chem_width[n],0) rotated (-chem_angle[n] - chem_angle[n]/2) ;
- chem_midt_path [n] := subpath (0.25,1.00) of chem_midst_path [n] ;
- chem_midb_path [n] := subpath (0.25,1.00) of chem_midsb_path [n] ;
- chem_msb_path [n] := subpath (0.00,0.75) of chem_b_path[n] ;
- chem_psb_path [n] := subpath (0.25,1.00) of chem_b_path[n] ;
- chem_dbl_path [n] := chem_sb_path[n] shifted - (0.05[origin,center chem_sb_path[n]]) ; % parallel
- chem_dbr_path [n] := chem_sb_path[n] shifted + (0.05[origin,center chem_sb_path[n]]) ;
- chem_eb_path [n] := chem_sb_path[n] shifted - (0.25[origin,center chem_sb_path[n]]) ;
- chem_sr_path [n] := chem_radical_min*chem_b_zero[n] -- chem_r_max[n] ;
- chem_rl_path [n] := chem_r_path[n] paralleled (chem_base_width/20) ;
- chem_rr_path [n] := chem_r_path[n] paralleled -(chem_base_width/20) ;
- chem_srl_path [n] := chem_sr_path[n] paralleled (chem_base_width/20) ;
- chem_srr_path [n] := chem_sr_path[n] paralleled -(chem_base_width/20) ;
- chem_br_path [n] := point 1 of chem_sb_path[n] --
- point 0 of chem_sb_path[n] rotatedaround(point 1 of chem_sb_path[n], -4) --
- point 0 of chem_sb_path[n] rotatedaround(point 1 of chem_sb_path[n], 4) -- cycle ;
- chem_rb_path [n] := chem_b_zero[n] -- chem_r_max[n] rotated -2 -- chem_r_max[n] -- chem_r_max[n] rotated 2 -- cycle ;
- chem_mrb_path [n] := chem_rb_path[n] rotatedaround(chem_b_zero[n], (180-chem_angle[n])/2) ;
- chem_prb_path [n] := chem_rb_path[n] rotatedaround(chem_b_zero[n],-(180-chem_angle[n])/2) ;
- chem_msr_path [n] := chem_sr_path[n] rotatedaround(chem_b_zero[n], (180-chem_angle[n])/2) ;
- chem_psr_path [n] := chem_sr_path[n] rotatedaround(chem_b_zero[n],-(180-chem_angle[n])/2) ;
- % not yet ok:
-% chem_c_path [n] := subpath (30/45, -30/45) of (fullcircle scaled (1.25*chem_circle_radius*chem_width[n]));
-% chem_cc_path [n] := subpath (30/45,8-30/45) of (fullcircle rotated 90 scaled (1.25*chem_circle_radius*chem_width[n]));
- chem_c_path [n] := subpath (30/45, -30/45) of (fullcircle scaled (chem_width[n]));
- chem_cc_path [n] := subpath (30/45,8-30/45) of (fullcircle rotated 90 scaled (chem_width[n]));
-enddef ;
-
-def chem_init_three = chem_init_some(3,30/52 ,-60,1,2) ; enddef ; % 60
-def chem_init_four = chem_init_some(4,30/42.5, 0,1,0) ; enddef ; % 45
-def chem_init_five = chem_init_some(5,30/35 , 0,1,0) ; enddef ; % 36
-def chem_init_six = chem_init_some(6, 1 , 0,1,0) ; enddef ; % 30
-def chem_init_eight = chem_init_some(8,30/22.5, 0,1,0) ; enddef ; % 22.5
-
-% bb R -R R Z -RZ +RZ
-
-def chem_init_some_front(expr n, ratio, start, initialrot, initialmov) =
- chem_init_some(n, ratio, start, initialrot, initialmov) ;
- chem_bb_path [n] := chem_b_path[n] rotated -chem_angle[n] -- chem_b_path[n] -- chem_b_path[n] rotated chem_angle[n] --
- (reverse(chem_b_path[n] shortened (chem_base_width/20))) paralleled (chem_base_width/20) --
- cycle ;
- chem_r_max [n] := chem_radical_max*chem_b_zero[n] ;
- chem_mr_path [n] := origin -- origin shifted (0,-.25chem_base_width) ;
- chem_pr_path [n] := origin -- origin shifted (0, .25*chem_base_width) ;
- chem_r_path [n] := point 1 of chem_mr_path[n] -- point 1 of chem_pr_path[n] ;
- chem_mr_zero [n] := point 1 of chem_mr_path[n] ;
- chem_pr_zero [n] := point 1 of chem_pr_path[n] ;
-enddef ;
-
-def chem_init_five_front = chem_init_some_front(-5,30/35,0,2,0) ; enddef ; % 36
-def chem_init_six_front = chem_init_some_front(-6, 1 ,0,2,0) ; enddef ; % 30
-
-vardef chem_init_one =
- chem_width [1] := .75 * chem_base_width * chem_setting_scale ;
- chem_angle [1] := 360/8 ;
- chem_start [1] := 0 ;
- chem_initialrot[1] := 1 ;
- chem_initialmov[1] := 1 ;
- chem_b_zero [1] := (1.75*chem_width[1],0) ;
- chem_r_min [1] := chem_radical_min*chem_b_zero[1] ;
- chem_r_max [1] := chem_radical_max*chem_b_zero[1] ;
- chem_r_path [1] := (.5*chem_width[1],0) -- (1.25*chem_width[1],0) ;
- chem_r_zero [1] := point 1 of chem_r_path [1] ;
- chem_b_path [1] := chem_r_path[1] rotated + (chem_angle[1]) ; % used for move here
- chem_b_zero [1] := chem_r_zero[1] ;
- chem_crz_zero [1] := chem_r_zero[1] enlonged chem_center_offset ;
- chem_e_path [1] := (.5*chem_width[1],-.25*chem_width[1]) -- (.5*chem_width[1],.25*chem_width[1]) ;
- chem_sb_path [1] := chem_r_path [1] ;
- chem_msb_path [1] := chem_r_path [1] shifted (0,-.1chem_width[1]) ;
- chem_psb_path [1] := chem_r_path [1] shifted (0, .1chem_width[1]) ;
- chem_ddt_path [1] := subpath(0,.4) of chem_r_path [1] ;
- chem_ddb_path [1] := subpath(.6,1) of chem_r_path [1] ;
- chem_ldt_path [1] := chem_ddt_path [1] shifted (0,-.1chem_width[1]) ; % parallel
- chem_ldb_path [1] := chem_ddb_path [1] shifted (0,-.1chem_width[1]) ;
- chem_rdt_path [1] := chem_ddt_path [1] shifted (0, .1chem_width[1]) ;
- chem_rdb_path [1] := chem_ddb_path [1] shifted (0, .1chem_width[1]) ;
- save pr ; pair pr[] ;
- pr0 := point 0 of chem_r_path[1] ;
- pr1 := point 1 of chem_r_path[1] ;
- chem_bb_path [1] := pr0 -- (pr1 rotatedaround(pr0,-chem_bb_angle)) -- (pr1 rotatedaround(pr0,chem_bb_angle)) -- cycle ;
- chem_oe_path [1] := ((-20,0)--(10,0){up}..(20,10)..(30,0)..(40,-10)..(50.0,0)..(60,10)..(70,0)..(80,-10)..{up}(90,0)--(120,0))
- xsized (.75*chem_width[1]) shifted pr0 ;
- chem_rt_zero [1] := point .5 of chem_r_path[1] ;
- chem_rtt_zero [1] := chem_rt_zero[1] rotated + (chem_angle[1]/2) ;
- chem_rbt_zero [1] := chem_rt_zero[1] rotated - (chem_angle[1]/2) ;
- % added by Alan Braslau (adapted to use shared variables):
- save p ; pair p[] ;
- p0 := pr1 rotatedaround(pr0, -chem_bd_angle) ;
- p1 := pr1 rotatedaround(pr0, +chem_bd_angle) ;
- p2 := p0 shifted - pr1 ;
- p3 := p1 shifted - pr1 ;
- chem_bd_path [1] :=
- p0 -- p1 for i=chem_bd_n downto 0 :
- -- p2 shifted (i/chem_bd_n)[pr1,pr0]
- -- p3 shifted (i/chem_bd_n)[pr1,pr0]
- endfor ;
- chem_bw_path [1] :=
- for i=0 upto chem_bw_n - 1 :
- ((i) /chem_bw_n)[pr0,pr1] .. ((i+.25)/chem_bw_n)[pr0,pr1] shifted p2 ..
- ((i+.50)/chem_bw_n)[pr0,pr1] .. ((i+.75)/chem_bw_n)[pr0,pr1] shifted -p2 ..
- endfor pr1 ;
+vardef chem_arrow (suffix $) (expr f, t, r, c) = % ARROW
+ drawarrow if f=t : origin else : chem_marked(f) fi -- chem_marked(t)
+ % no chem_transformed
+ withpen pencircle scaled r
+ withcolor c %\MPcolor{c}
enddef ;
-def chem_init_all =
- chem_init_one ;
- chem_init_three ;
- chem_init_four ;
- chem_init_five ;
- chem_init_six ;
- chem_init_eight ;
- chem_init_five_front ;
- chem_init_six_front ;
+
+vardef chem_rm (suffix $) (expr d, s) = % RM
+ if (not chem_front[$]) and (d<>0) :
+ chem_pair0 := ((point d-1 of chem_r_path[$]) scaled s) chem_transformed($) ;
+ currentpicture := currentpicture shifted -chem_pair0 ;
+ chem_origin := chem_origin shifted -chem_pair0 ;
+ fi ;
enddef ;
-chem_init_all ;
+vardef chem_off (suffix $) (expr d, s) = % OFF
+ if d=0 :
+ currentpicture := currentpicture shifted -chem_origin ;
+ chem_origin := origin ;
+ else :
+ chem_pair0 := (unitvector(point d-1 of chem_b_path[one])) scaled chem_setting_offset*s ;
+ % not chem_transformed
+ currentpicture := currentpicture shifted -chem_pair0 ;
+ chem_origin := chem_origin shifted -chem_pair0 ;
+ fi ;
+enddef ;
diff --git a/Master/texmf-dist/metapost/context/base/mp-core.mpiv b/Master/texmf-dist/metapost/context/base/mp-core.mpiv
index 1934e304024..3dba4a004e7 100644
--- a/Master/texmf-dist/metapost/context/base/mp-core.mpiv
+++ b/Master/texmf-dist/metapost/context/base/mp-core.mpiv
@@ -5,7 +5,7 @@
%D subtitle=background macros,
%D author=Hans Hagen,
%D date=\currentdate,
-%D copyright={PRAGMA / Hans Hagen \& Ton Otten}]
+%D copyright={PRAGMA ADE \& \CONTEXT\ Development Team}]
%C
%C This module is part of the \CONTEXT\ macro||package and is
%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
diff --git a/Master/texmf-dist/metapost/context/base/mp-crop.mpiv b/Master/texmf-dist/metapost/context/base/mp-crop.mpiv
index 6360757e5b0..00bcdcb4454 100644
--- a/Master/texmf-dist/metapost/context/base/mp-crop.mpiv
+++ b/Master/texmf-dist/metapost/context/base/mp-crop.mpiv
@@ -5,7 +5,7 @@
%D subtitle=Cropmarks,
%D author=Hans Hagen,
%D date=\currentdate,
-%D copyright={PRAGMA / Hans Hagen \& Ton Otten}]
+%D copyright={PRAGMA ADE \& \CONTEXT\ Development Team}]
%C
%C This module is part of the \CONTEXT\ macro||package and is
%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
@@ -15,20 +15,20 @@ if known context_crop : endinput ; fi ;
boolean context_crop ; context_crop := true ;
-vardef crop_marks_lines (expr box, length, offset, nx, ny) =
- save p ; picture p ; save w, h, x, y ; numeric w, h, x, y ;
+vardef crop_marks_lines (expr box, len, offset, nx, ny) =
+ save p ; picture p ; save w, h, x, y ; numeric w, h, x, y ;
p := image (
x := if nx = 0 : 1 else : nx - 1 fi ;
y := if ny = 0 : 1 else : ny - 1 fi ;
w := bbwidth (box) / x ;
h := bbheight(box) / y ;
for i=0 upto y :
- draw ((llcorner box) -- (llcorner box) shifted (-length,0)) shifted (-offset,i*h) ;
- draw ((lrcorner box) -- (lrcorner box) shifted ( length,0)) shifted ( offset,i*h) ;
+ draw ((llcorner box) -- (llcorner box) shifted (-len,0)) shifted (-offset,i*h) ;
+ draw ((lrcorner box) -- (lrcorner box) shifted ( len,0)) shifted ( offset,i*h) ;
endfor ;
for i=0 upto x :
- draw ((llcorner box) -- (llcorner box) shifted (0,-length)) shifted (i*w,-offset) ;
- draw ((ulcorner box) -- (ulcorner box) shifted (0, length)) shifted (i*w, offset) ;
+ draw ((llcorner box) -- (llcorner box) shifted (0,-len)) shifted (i*w,-offset) ;
+ draw ((ulcorner box) -- (ulcorner box) shifted (0, len)) shifted (i*w, offset) ;
endfor ;
) ;
setbounds p to box ;
diff --git a/Master/texmf-dist/metapost/context/base/mp-figs.mpiv b/Master/texmf-dist/metapost/context/base/mp-figs.mpiv
index c65808f6d05..aac7c5ad219 100644
--- a/Master/texmf-dist/metapost/context/base/mp-figs.mpiv
+++ b/Master/texmf-dist/metapost/context/base/mp-figs.mpiv
@@ -5,7 +5,7 @@
%D subtitle=figures,
%D author=Hans Hagen,
%D date=\currentdate,
-%D copyright={PRAGMA / Hans Hagen \& Ton Otten}]
+%D copyright={PRAGMA ADE \& \CONTEXT\ Development Team}]
%C
%C This module is part of the \CONTEXT\ macro||package and is
%C therefore copyrighted by \PRAGMA. See licen-en.pdf for
diff --git a/Master/texmf-dist/metapost/context/base/mp-form.mpiv b/Master/texmf-dist/metapost/context/base/mp-form.mpiv
index d0519b4f24c..b58792e1a01 100644
--- a/Master/texmf-dist/metapost/context/base/mp-form.mpiv
+++ b/Master/texmf-dist/metapost/context/base/mp-form.mpiv
@@ -5,7 +5,7 @@
%D subtitle=form support,
%D author=Hans Hagen,
%D date=\currentdate,
-%D copyright={PRAGMA / Hans Hagen \& Ton Otten}]
+%D copyright={PRAGMA ADE \& \CONTEXT\ Development Team}]
%C
%C This module is part of the \CONTEXT\ macro||package and is
%C therefore copyrighted by \PRAGMA. See licen-en.pdf for
diff --git a/Master/texmf-dist/metapost/context/base/mp-func.mpiv b/Master/texmf-dist/metapost/context/base/mp-func.mpiv
index 1d289b606b1..58df711f2f3 100644
--- a/Master/texmf-dist/metapost/context/base/mp-func.mpiv
+++ b/Master/texmf-dist/metapost/context/base/mp-func.mpiv
@@ -5,7 +5,7 @@
%D subtitle=function hacks,
%D author=Hans Hagen,
%D date=\currentdate,
-%D copyright={PRAGMA / Hans Hagen \& Ton Otten}]
+%D copyright={PRAGMA ADE \& \CONTEXT\ Development Team}]
%C
%C This module is part of the \CONTEXT\ macro||package and is
%C therefore copyrighted by \PRAGMA. See licen-en.pdf for
diff --git a/Master/texmf-dist/metapost/context/base/mp-grap.mpiv b/Master/texmf-dist/metapost/context/base/mp-grap.mpiv
new file mode 100644
index 00000000000..6b1f2311f88
--- /dev/null
+++ b/Master/texmf-dist/metapost/context/base/mp-grap.mpiv
@@ -0,0 +1,526 @@
+%D \module
+%D [ file=mp-grap.mpiv,
+%D version=2012.10.16, % 2008.09.08 and earlier,
+%D title=\CONTEXT\ \METAPOST\ graphics,
+%D subtitle=graph packagesupport,
+%D author=Hans Hagen \& Alan Braslau,
+%D date=\currentdate,
+%D copyright={PRAGMA ADE \& \CONTEXT\ Development Team}]
+%C
+%C This module is part of the \CONTEXT\ macro||package and is
+%C therefore copyrighted by \PRAGMA. See licen-en.pdf for
+%C details.
+
+if known context_grap : endinput ; fi
+
+boolean context_grap ; context_grap := true ;
+
+% Instead we could include graph here and then clean it up as well as use private
+% variables in the grap_ namespace. After all, graph is frozen.
+
+input graph.mp ;
+
+vardef roundd(expr x, d) =
+ if abs d > 4 :
+ if d > 0 :
+ x
+ else :
+ 0
+ fi
+ elseif d > 0 :
+ save i ; i = floor x ;
+ i + round(Ten_to[d]*(x-i))/Ten_to[d]
+ else :
+ round(x/Ten_to[-d])*Ten_to[-d]
+ fi
+enddef ;
+
+Ten_to0 = 1 ;
+Ten_to1 = 10 ;
+Ten_to2 = 100 ;
+Ten_to3 = 1000 ;
+Ten_to4 = 10000 ;
+
+def sFe_base = enddef ;
+
+if unknown Fe_plus :
+ picture Fe_plus ; Fe_plus := textext("+") ; % btex + etex ;
+fi ;
+
+vardef format (expr f,x) = dofmt_.Feform_(f,x) enddef ;
+vardef Mformat (expr f,x) = dofmt_.Meform (f,x) enddef ;
+vardef formatstr (expr f,x) = dofmt_.Feform_(f,x) enddef ;
+vardef Mformatstr(expr f,x) = dofmt_.Meform(f,x) enddef ;
+
+vardef escaped_format(expr s) =
+ "" for n=1 upto length(s) : &
+ if ASCII substring (n,n+1) of s = 37 :
+ "@"
+ else :
+ substring (n,n+1) of s
+ fi
+ endfor
+enddef ;
+
+vardef dofmt_@#(expr f, x) =
+ textext("\MPgraphformat{" & escaped_format(f) & "}{" & (if string x : x else: decimal x fi) & "}")
+ % textext(mfun_format_number(escaped_format(f),x))
+enddef ;
+
+% note that suffix @# is ignored above...
+
+vardef strfmt(expr f, x) =
+ "\MPgraphformat{" & escaped_format(f) & "}{" & (if string x : x else: decimal x fi) & "}"
+enddef ;
+
+% We redefine autogrid from graph.mp adding the possibility of differing X and Y
+% formats. Autoform is defined in graph.mp (by default "%g").
+
+% graph.mp: string Autoform; Autoform = "%g";
+% graph.mp:
+% graph.mp: vardef autogrid(suffix tx, ty) text w =
+% graph.mp: Gneedgr_:=false;
+% graph.mp: if str tx<>"": for x=auto.x: tx(Autoform,x) w; endfor fi
+% graph.mp: if str ty<>"": for y=auto.y: ty(Autoform,y) w; endfor fi
+% graph.mp: enddef;
+
+% string Autoform_X ; Autoform_X := "@.0e" ;
+% string Autoform_Y ; Autoform_Y := "@.0e" ;
+
+vardef autogrid(suffix tx, ty) text w =
+ Gneedgr_ := false ;
+ if str tx <> "" :
+ for x=auto.x :
+ tx (
+ if string Autoform_X :
+ if Autoform_X <> "" :
+ Autoform_X
+ else :
+ Autoform
+ fi
+ else :
+ Autoform
+ fi,
+ x
+ ) w ;
+ endfor
+ fi
+ if str ty <> "" :
+ for y=auto.y :
+ ty (
+ if string Autoform_Y :
+ if Autoform_Y <> "" :
+ Autoform_Y
+ else :
+ Autoform
+ fi
+ else :
+ Autoform
+ fi,
+ y
+ ) w ;
+ endfor
+ fi
+enddef ;
+
+% A couple of extensions:
+
+% Define a function plotsymbol() returning a picture: 10 different shapes,
+% unfilled outline, interior filled with different shades of the background.
+% This allows overlapping points on a plot to be more distinguishable.
+
+% grap_symsize := fontsize defaultfont ; % can be redefined
+%
+% dynamic version:
+
+vardef grap_symsize =
+ % fontsize defaultfont
+ % .8ExHeight
+ .35BodyFontSize
+enddef ;
+
+path grap_sym[] ; % (internal) symbol path
+
+grap_sym[0] := (0,0) ; % point
+grap_sym[1] := fullcircle ; % circle
+grap_sym[2] := (up -- down) scaled .5 ; % vertical bar
+
+for i = 3 upto 9 : % polygons
+ grap_sym[i] :=
+ for j = 0 upto i-1 :
+ (up scaled .5) rotated (360j/i) --
+ endfor cycle ;
+endfor
+
+grap_sym[12] := grap_sym[2] rotated +90 ; % horizontal line
+grap_sym[22] := grap_sym[2] rotated +45 ; % backslash
+grap_sym[32] := grap_sym[2] rotated -45 ; % slash
+grap_sym[13] := grap_sym[3] rotated 180 ; % down triangle
+grap_sym[23] := grap_sym[3] rotated -90 ; % right triangle
+grap_sym[33] := grap_sym[3] rotated +90 ; % left triangle
+grap_sym[14] := grap_sym[4] rotated +45 ; % square
+grap_sym[15] := grap_sym[5] rotated 180 ; % down pentagon
+grap_sym[16] := grap_sym[6] rotated +90 ; % turned hexagon
+grap_sym[17] := grap_sym[7] rotated 180 ;
+grap_sym[18] := grap_sym[8] rotated +22.5 ;
+
+numeric l ;
+
+for j = 5 upto 9 :
+ l := length(grap_sym[j]) ;
+ pair p[] ;
+ for i = 0 upto l :
+ p[i] = whatever [point i of grap_sym[j],
+ point (i+2 mod l) of grap_sym[j]] ;
+ p[i] = whatever [point (i+1 mod l) of grap_sym[j],
+ point (i+l-1 mod l) of grap_sym[j]] ;
+ endfor
+ grap_sym[20+j] := for i = 0 upto l : point i of grap_sym[j]--p[i]--endfor cycle ;
+endfor
+
+path s ; s := grap_sym[4] ;
+path q ; q := s scaled .25 ;
+numeric l ; l := length(s) ;
+
+pair p[] ;
+
+grap_sym[24] := for i = 0 upto l-1 :
+ hide(
+ p[i] = whatever [point i of s, point (i+1 mod l) of s] ;
+ p[i] = whatever [point i of q, point (i-1+l mod l) of q] ;
+ p[i+l] = whatever [point i of s, point (i+1 mod l) of s] ;
+ p[i+l] = whatever [point i+1 of q, point (i+2 mod l) of q] ;
+ )
+ point i of q -- p[i] -- p[i+l] --
+endfor cycle ;
+
+grap_sym[34] := grap_sym[24] rotated 45 ;
+
+% usage: gdraw p plot plotsymbol(1,red,1) ; % a filled red circle
+% usage: gdraw p plot plotsymbol(4,blue,0) ; % a blue square
+% usage: gdraw p plot plotsymbol(14,green,0.5) ; % a 50% filled green diamond
+
+def plotsymbol(expr n,c,f) = % (number,color,color|number)
+ if known grap_sym[n] :
+ image(
+ path p ; p := grap_sym[n] scaled grap_symsize ;
+ undraw p withpen currentpen scaled 2 ;
+ if cycle p : fill p withcolor
+ if color f and known f :
+ f
+ elseif numeric f and known f and color c and known c :
+ f[background,c]
+ elseif numeric f and known f :
+ f[background,black]
+ else :
+ background
+ fi ;
+ fi
+ draw p if color c and known c : withcolor c fi ;
+ )
+ else :
+ nullpicture
+ fi
+enddef ;
+
+% The following extensions are not specific to graph and could be moved to metafun...
+
+% convert a polygon path to a smooth path (useful, e.g. as a guide to the eye)
+
+def smoothpath (suffix $) =
+ if path $ :
+ (for i=0 upto length $ :
+ if i>0 : .. fi
+ (point i of $)
+ endfor )
+ fi
+enddef ;
+
+% return a path of a function func(x) with abcissa running from f to t over n intervals
+
+def makefunctionpath (expr f, t, n) (text func) =
+ (for x=f step ((t-f)/n) until t :
+ if x<>f : .. fi
+ (x, func)
+ endfor )
+enddef ;
+
+% shift a path, point by point
+%
+% example:
+%
+% p1 := addnoisetopath(p0,(.1normaldeviate,.1normaldeviate)) ;
+
+vardef addnoisetopath (suffix p) (text t) =
+ if path p :
+ hide(pair p_i)
+ (for i=0 upto length p :
+ if i>0 : -- fi
+ hide(p_i := point i of p; x := xpart p_i; y := ypart p_i)z shifted t
+ endfor)
+ fi
+enddef ;
+
+% return a new path of a function func(x) using the same abcissa as an existing path
+
+vardef functionpath (suffix p) (text t) =
+ (for i=0 upto length p :
+ if i>0 : .. fi
+ (hide(x := xpart(point i of p))x,t)
+ endfor )
+enddef ;
+
+% least-squares "fit" to a polynomial
+%
+% example:
+%
+% path p[] ;
+% numeric a[] ; a0 := 1 ; a1 := .1 ; a2 := .01 ; a3 := .001 ; a4 := 0.0001 ;
+% p0 := makefunctionpath(0,5,10,polynomial_function(a,4,x)) ;
+% p1 := addnoisetopath(p0,(0,.001normaldeviate)) ;
+% gdraw p0 ;
+% gdraw p1 plot plotsymbol(1,black,.5) ;
+%
+% numeric b[] ;
+% polynomial_fit(p1, b, 4, 1) ;
+% gdraw functionpath(p1,polynomial_function(b,4,x)) ;
+%
+% numeric c[] ;
+% linear_fit(p1, c, 1) ;
+% gdraw functionpath(p1,linear_function(c,x)) dashed evenly ;
+
+% a polynomial function:
+%
+% y = a0 + a1 * x + a2 * x^2 + ... + a[n] * x^n
+
+vardef polynomial_function (suffix $) (expr n, x) =
+ (for j=0 upto n : + $[j]*(x**j) endfor) % no ;
+enddef ;
+
+% find the determinant of a (n+1)*(n+1) matrix; indices run from 0 to n
+
+vardef det (suffix $) (expr n) =
+ hide(
+ numeric determinant ; determinant := 1 ;
+ save jj ; numeric jj ;
+ for k=0 upto n :
+ if $[k][k]=0 :
+ jj := -1 ;
+ for j=0 upto n :
+ if $[k][j]<>0 :
+ jj := j ;
+ exitif true ;
+ fi
+ endfor
+ if jj<0 :
+ determinant := 0 ;
+ exitif true ;
+ fi
+ for j=k upto n : % interchange the columns
+ temp := $[j][jj] ;
+ $[j][jj] := $[j][k] ;
+ $[j][k] := temp ;
+ endfor
+ determinant = -determinant ;
+ fi
+ exitif determinant=0 ;
+ determinant := determinant * $[k][k] ;
+ if k<n : % subtract row k from lower rows to get a diagonal matrix
+ for j=k+1 upto n:
+ for i=k+1 upto n:
+ $[j][i] := $[j][i]-$[j][k]*$[k][i]/$[k][k] ;
+ endfor
+ endfor
+ fi
+ endfor ;
+ )
+ determinant % no ;
+enddef ;
+
+numeric fit_chi_squared ;
+
+% least-squares fit of a polynomial $ of order n to a path p (unweighted)
+%
+% reference: P. R. Bevington, "Data Reduction and Error Analysis for the Physical
+% Sciences", McGraw-Hill, New York 1969.
+
+vardef polynomial_fit (suffix p, $) (expr n) (text t) =
+ if not path p :
+ Gerr(p, "Cannot fit--not a path") ;
+ elseif length p < n :
+ Gerr(p, "Cannot fit--not enough points") ;
+ else :
+ fit_chi_squared := 0 ;
+ % calculate sums of the data
+ save sumx, sumy ; numeric sumx[], sumy[] ;
+ save w ; numeric w ;
+ for i=0 upto 2n :
+ sumx[i] := 0 ;
+ endfor
+ for i=0 upto n :
+ sumy[i] := 0 ;
+ endfor
+ for i=0 upto length p :
+ clearxy; z = point i of p ;
+ w := if length(t) > 0 : t else : 1 fi ; % weight
+ x1 := w ;
+ for j=0 upto 2n :
+ sumx[j] := sumx[j] + x1 ;
+ x1 := x1 * x ;
+ endfor
+ y1 := y * w ;
+ for j=0 upto n :
+ sumy[j] := sumy[j] + y1 ;
+ y1 := y1 * x ;
+ endfor
+ fit_chi_squared := fit_chi_squared + y*y*w ;
+ endfor
+ % construct matrices and calculate the polynomial coefficients
+ save m ; numeric m[][] ;
+ for j=0 upto n :
+ for k=0 upto n :
+ m[j][k] := sumx[j+k] ;
+ endfor
+ endfor
+ save delta ; numeric delta ;
+ delta := det(m,n) ; % this destroys the matrix m[][], which is OK
+ if delta = 0 :
+ fit_chi_squared := 0 ;
+ for j=0 upto n :
+ $[j] := 0 ;
+ endfor
+ else :
+ for i=0 upto n :
+ for j=0 upto n :
+ for k=0 upto n :
+ m[j][k] := sumx[j+k] ;
+ endfor
+ m[j][i] := sumy[j] ;
+ endfor
+ $[i] := det(m,n) / delta ; % matrix m[][] gets destroyed...
+ endfor
+ for j=0 upto n :
+ fit_chi_squared := fit_chi_squared - 2sumy[j]*$[j] ;
+ for k=0 upto n :
+ fit_chi_squared := fit_chi_squared + $[j]*$[k]*sumx[j+k] ;
+ endfor
+ endfor
+ % normalize by the number of degrees of freedom
+ fit_chi_squared := fit_chi_squared / (length(p) - n) ;
+ fi
+ fi
+enddef ;
+
+% y = a0 + a1 * x
+%
+% of course a line is just a polynomial of order 1
+
+vardef linear_function (suffix $) (expr x) = polynomial_function($,1,x) enddef ;
+vardef linear_fit (suffix p, $) (text t) = polynomial_fit(p, $, 1, t) ; enddef ;
+
+% and a constant is polynomial of order 0
+
+vardef constant_function (suffix $) (expr x) = polynomial_function($,0,x) enddef ;
+vardef constant_fit (suffix p, $) (text t) = polynomial_fit(p, $, 0, t) ; enddef ;
+
+% y = a1 * exp(a0*x)
+%
+% exp and ln defined in metafun
+
+vardef exponential_function (suffix $) (expr x) = $1*exp($0*x) enddef ;
+
+% since we take a log, this only works for positive ordinates
+
+vardef exponential_fit (suffix p, $) (text t) =
+ save a ; numeric a[] ;
+ save q ; path q ; % fit to the log of the ordinate
+ for i=0 upto length p :
+ if ypart(point i of p)>0 :
+ augment.q(xpart(point i of p),ln(ypart(point i of p))) ;
+ fi
+ endfor
+ linear_fit(q,a,t) ;
+ $0 := a1 ;
+ $1 := exp(a0) ;
+enddef ;
+
+% y = a1 * x**a0
+
+vardef power_law_function (suffix $) (expr x) = $1*(x**$0) enddef ;
+
+% since we take logs, this only works for positive abcissae and ordinates
+
+vardef power_law_fit (suffix p, $) (text t) =
+ save a ; numeric a[] ;
+ save q ; path q ; % fit to the logs of the abcissae and ordinates
+ for i=0 upto length p :
+ if (xpart(point i of p)>0) and (ypart(point i of p)>0) :
+ augment.q(ln(xpart(point i of p)),ln(ypart(point i of p))) ;
+ fi
+ endfor
+ linear_fit(q,a,t) ;
+ $0 := a1 ;
+ $1 := exp(a0) ;
+enddef ;
+
+% gaussian: y = a2 * exp(-ln(2)*((x-a0)/a1)^2)
+%
+% a1 is the hwhm; sigma := a1/sqrt(2ln(2)) or a1/1.17741
+
+numeric lntwo ; lntwo := ln(2) ; % brrr, why not inline it
+
+vardef gaussian_function (suffix $) (expr x) =
+ if $1 = 0 :
+ if x = $0 : $2 else : 0 fi
+ else :
+ $2 * exp(-lntwo*(((x-$0)/$1)**2))
+ fi
+ if known $3 :
+ + $3
+ fi
+enddef ;
+
+% since we take a log, this only works for positive ordinates
+
+vardef gaussian_fit (suffix p, $) (text t) =
+ save a ; numeric a[] ;
+ save q ; path q ; % fit to the log of the ordinate
+ for i=0 upto length p :
+ if ypart(point i of p)>0 :
+ augment.q(xpart(point i of p), ln(ypart(point i of p))) ;
+ fi
+ endfor
+ polynomial_fit(q,a,2,if t > 0 : ln(t) else : 0 fi) ;
+ $1 := sqrt(-lntwo/a2) ;
+ $0 := -.5a1/a2 ;
+ $2 := exp(a0-.25*a1*a1/a2) ;
+ $3 := 0 ; % polynomial_fit will NOT work with a non-zero background!
+enddef ;
+
+% lorentzian: y = a2 / (1 + ((x - a0)/a1)^2)
+
+vardef lorentzian_function (suffix $) (expr x) =
+ if $1 = 0 :
+ if x = $0 : $2 else : 0 fi
+ else :
+ $2 / (1 + ((x - $0)/$1)**2)
+ fi
+ if known $3 :
+ + $3
+ fi
+enddef ;
+
+vardef lorentzian_fit (suffix p, $) (text t) =
+ save a ; numeric a[] ;
+ save q ; path q ; % fit to the inverse of the ordinate
+ for i=0 upto length p :
+ if ypart(point i of p)<>0 :
+ augment.q(xpart(point i of p), 1/ypart(point i of p)) ;
+ fi
+ endfor
+ polynomial_fit(q,a,2,if t <> 0 : 1/(t) else : 0 fi) ;
+ $0 := -.5a1/a2 ;
+ $2 := 1/(a0-.25a1*a1/a2) ;
+ $1 := sqrt((a0-.25a1*a1/a2)/a2) ;
+ $3 := 0 ; % polynomial_fit will NOT work with a non-zero background!
+enddef ;
diff --git a/Master/texmf-dist/metapost/context/base/mp-grid.mpiv b/Master/texmf-dist/metapost/context/base/mp-grid.mpiv
index cc5c2b76eb2..b9243b1b96e 100644
--- a/Master/texmf-dist/metapost/context/base/mp-grid.mpiv
+++ b/Master/texmf-dist/metapost/context/base/mp-grid.mpiv
@@ -5,7 +5,7 @@
%D subtitle=grid support,
%D author=Hans Hagen,
%D date=\currentdate,
-%D copyright={PRAGMA / Hans Hagen \& Ton Otten}]
+%D copyright={PRAGMA ADE \& \CONTEXT\ Development Team}]
%C
%C This module is part of the \CONTEXT\ macro||package and is
%C therefore copyrighted by \PRAGMA. See licen-en.pdf for
diff --git a/Master/texmf-dist/metapost/context/base/mp-grph.mpiv b/Master/texmf-dist/metapost/context/base/mp-grph.mpiv
index a8868033bbe..30c49e6e04a 100644
--- a/Master/texmf-dist/metapost/context/base/mp-grph.mpiv
+++ b/Master/texmf-dist/metapost/context/base/mp-grph.mpiv
@@ -5,7 +5,7 @@
%D subtitle=graphic text support,
%D author=Hans Hagen,
%D date=\currentdate,
-%D copyright={PRAGMA / Hans Hagen \& Ton Otten}]
+%D copyright={PRAGMA ADE \& \CONTEXT\ Development Team}]
%C
%C This module is part of the \CONTEXT\ macro||package and is
%C therefore copyrighted by \PRAGMA. See licen-en.pdf for
diff --git a/Master/texmf-dist/metapost/context/base/mp-mlib.mpiv b/Master/texmf-dist/metapost/context/base/mp-mlib.mpiv
index 71985cef836..9a20429241e 100644
--- a/Master/texmf-dist/metapost/context/base/mp-mlib.mpiv
+++ b/Master/texmf-dist/metapost/context/base/mp-mlib.mpiv
@@ -5,7 +5,7 @@
%D subtitle=plugins,
%D author=Hans Hagen,
%D date=\currentdate,
-%D copyright={PRAGMA / Hans Hagen \& Ton Otten}]
+%D copyright={PRAGMA ADE \& \CONTEXT\ Development Team}]
%C
%C This module is part of the \CONTEXT\ macro||package and is
%C therefore copyrighted by \PRAGMA. See licen-en.pdf for
@@ -99,7 +99,20 @@ newinternal textextoffset ; textextoffset := 0 ;
numeric mfun_tt_w[], mfun_tt_h[], mfun_tt_d[] ; % we can consider using colors (less hash space)
numeric mfun_tt_n ; mfun_tt_n := 0 ;
picture mfun_tt_p ; mfun_tt_p := nullpicture ;
-boolean mfun_trial_run ; mfun_trial_run := false ;
+
+if unknown mfun_trial_run :
+ boolean mfun_trial_run ;
+ mfun_trial_run := false ;
+else :
+ % already defined before the format is loaded
+fi ;
+
+if unknown mfun_first_run :
+ boolean mfun_first_run ;
+ mfun_first_run := true ;
+else :
+ % already defined before the format is loaded
+fi ;
def mfun_reset_tex_texts =
mfun_tt_n := 0 ;
@@ -110,8 +123,8 @@ def mfun_flush_tex_texts =
addto currentpicture also mfun_tt_p
enddef ;
-extra_endfig := "mfun_flush_tex_texts;" & extra_endfig;
-extra_beginfig := extra_beginfig & "mfun_reset_tex_texts;";
+extra_endfig := "mfun_flush_tex_texts ;" & extra_endfig ;
+extra_beginfig := extra_beginfig & "mfun_reset_tex_texts ;" ;
% We collect and flush them all, as we can also have temporary textexts
% that gets never really flushed but are used for calculations. So, we
@@ -255,7 +268,7 @@ vardef thetextext@#(expr p,z) =
fi
enddef ;
-vardef textext@#(expr p) = % no draw hers
+vardef textext@#(expr p) = % no draw here
thetextext@#(p,origin)
enddef ;
@@ -565,7 +578,10 @@ def bitmapimage(expr xresolution, yresolution, data) =
)
enddef ;
-% TODO:
+% Experimental:
+%
+% property p ; p = properties(withcolor (1,1,0,0)) ;
+% fill fullcircle scaled 20cm withproperties p ;
let property = picture ;
@@ -573,24 +589,33 @@ vardef properties(text t) =
image(draw unitcircle t)
enddef ;
-def withproperties expr p =
- if colormodel p = 3 :
- withcolor greypart p
- elseif colormodel p = 5 :
- withcolor (redpart p,greenpart p,bluepart p)
- elseif colormodel p = 7 :
- withcolor (cyanpart p,magentapart p,yellowpart p,blackpart p)
- fi
- % todo, when available in mp:
- %
- % withprescript prescript p
- % withpostscript postscript p
- %
- % todo, penpart
-enddef ;
+if metapostversion < 1.770 :
-% property p ; p = properties(withcolor (1,1,0,1)) ;
-% fill fullcircle scaled 20cm withproperties p ;
+ def withproperties expr p =
+ if colormodel p = 3 :
+ withcolor greypart p
+ elseif colormodel p = 5 :
+ withcolor (redpart p,greenpart p,bluepart p)
+ elseif colormodel p = 7 :
+ withcolor (cyanpart p,magentapart p,yellowpart p,blackpart p)
+ fi
+ enddef ;
+
+else :
+
+ def withproperties expr p =
+ if colormodel p = 3 :
+ withcolor greypart p
+ elseif colormodel p = 5 :
+ withcolor (redpart p,greenpart p,bluepart p)
+ elseif colormodel p = 7 :
+ withcolor (cyanpart p,magentapart p,yellowpart p,blackpart p)
+ fi
+ withprescript prescript p
+ withpostscript postscript p
+ enddef ;
+
+fi ;
% Experimental:
@@ -614,3 +639,26 @@ primarydef t asgroup s = % s = isolated|knockout
wrappedpicture
endgroup
enddef ;
+
+% Also experimental
+
+string mfun_auto_align[] ;
+
+mfun_auto_align[0] := "rt" ;
+mfun_auto_align[1] := "urt" ;
+mfun_auto_align[2] := "top" ;
+mfun_auto_align[3] := "ulft" ;
+mfun_auto_align[4] := "lft" ;
+mfun_auto_align[5] := "llft" ;
+mfun_auto_align[6] := "bot" ;
+mfun_auto_align[7] := "lrt" ;
+mfun_auto_align[8] := "rt" ;
+
+def autoalign(expr n) =
+ scantokens mfun_auto_align[round((n mod 360)/45)]
+enddef ;
+
+% draw textext.autoalign(60) ("\strut oeps 1") ;
+% draw textext.autoalign(160)("\strut oeps 2") ;
+% draw textext.autoalign(260)("\strut oeps 3") ;
+% draw textext.autoalign(360)("\strut oeps 4") ;
diff --git a/Master/texmf-dist/metapost/context/base/mp-page.mpiv b/Master/texmf-dist/metapost/context/base/mp-page.mpiv
index 96f617257f5..9c538d42a65 100644
--- a/Master/texmf-dist/metapost/context/base/mp-page.mpiv
+++ b/Master/texmf-dist/metapost/context/base/mp-page.mpiv
@@ -5,7 +5,7 @@
%D subtitle=page enhancements,
%D author=Hans Hagen,
%D date=\currentdate,
-%D copyright={PRAGMA / Hans Hagen \& Ton Otten}]
+%D copyright={PRAGMA ADE \& \CONTEXT\ Development Team}]
%C
%C This module is part of the \CONTEXT\ macro||package and is
%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
@@ -106,20 +106,30 @@ OuterEdgeWidth := 0pt ;
InnerEdgeDistance := 0pt ;
OuterEdgeDistance := 0pt ;
-path Area [][] ; pair Location [][] ; path Field [][] ; path Page ;
-numeric HorPos ; numeric Hstep [] ; numeric Hsize [] ;
-numeric VerPos ; numeric Vstep [] ; numeric Vsize [] ;
-
-for VerPos=Top step 10 until Bottom:
- for HorPos=LeftEdge step 1 until RightEdge:
- Area[HorPos][VerPos] := origin--cycle ;
- Area[VerPos][HorPos] := Area[HorPos][VerPos] ;
- Location[HorPos][VerPos] := origin ;
- Location[VerPos][HorPos] := Location[HorPos][VerPos] ;
- Field[HorPos][VerPos] := origin--cycle ;
- Field[VerPos][HorPos] := Field[HorPos][VerPos] ;
- endfor ;
-endfor ;
+% path Area[][] ;
+% pair Location[][] ;
+% path Field[][] ;
+
+% numeric Hstep[] ;
+% numeric Hsize[] ;
+% numeric Vstep[] ;
+% numeric Vsize[] ;
+
+path Page ;
+
+numeric HorPos ;
+numeric VerPos ;
+
+% for VerPos=Top step 10 until Bottom:
+% for HorPos=LeftEdge step 1 until RightEdge:
+% Area[HorPos][VerPos] := origin--cycle ;
+% Area[VerPos][HorPos] := Area[HorPos][VerPos] ;
+% Location[HorPos][VerPos] := origin ;
+% Location[VerPos][HorPos] := Location[HorPos][VerPos] ;
+% Field[HorPos][VerPos] := origin--cycle ;
+% Field[VerPos][HorPos] := Field[HorPos][VerPos] ;
+% endfor ;
+% endfor ;
% def LoadPageState =
% scantokens "input mp-state.tmp" ;
@@ -160,10 +170,94 @@ def SwapPageState =
fi ;
enddef ;
-def SetPageAreas =
+% def SetPageAreas =
+%
+% numeric Vsize[], Hsize[], Vstep[], Hstep[] ;
+%
+% Vsize[Top] = TopHeight ;
+% Vsize[TopSeparator] = TopDistance ;
+% Vsize[Header] = HeaderHeight ;
+% Vsize[HeaderSeparator] = HeaderDistance ;
+% Vsize[Text] = TextHeight ;
+% Vsize[FooterSeparator] = FooterDistance ;
+% Vsize[Footer] = FooterHeight ;
+% Vsize[BottomSeparator] = BottomDistance ;
+% Vsize[Bottom] = BottomHeight ;
+%
+% Vstep[Top] = Vstep[TopSeparator] +Vsize[TopSeparator] ;
+% Vstep[TopSeparator] = PaperHeight-TopSpace ;
+% Vstep[Header] = Vstep[TopSeparator] -Vsize[Header] ;
+% Vstep[HeaderSeparator] = Vstep[Header] -Vsize[HeaderSeparator] ;
+% Vstep[Text] = Vstep[HeaderSeparator]-Vsize[Text] ;
+% Vstep[FooterSeparator] = Vstep[Text] -Vsize[FooterSeparator] ;
+% Vstep[Footer] = Vstep[FooterSeparator]-Vsize[Footer] ;
+% Vstep[BottomSeparator] = Vstep[Footer] -Vsize[BottomSeparator] ;
+% Vstep[Bottom] = Vstep[BottomSeparator]-Vsize[Bottom] ;
+%
+% Hsize[LeftEdge] = LeftEdgeWidth ;
+% Hsize[LeftEdgeSeparator] = LeftEdgeDistance ;
+% Hsize[LeftMargin] = LeftMarginWidth ;
+% Hsize[LeftMarginSeparator] = LeftMarginDistance ;
+% Hsize[Text] = MakeupWidth ;
+% Hsize[RightMarginSeparator] = RightMarginDistance ;
+% Hsize[RightMargin] = RightMarginWidth ;
+% Hsize[RightEdgeSeparator] = RightEdgeDistance ;
+% Hsize[RightEdge] = RightEdgeWidth ;
+%
+% Hstep[LeftEdge] = Hstep[LeftEdgeSeparator] -Hsize[LeftEdge] ;
+% Hstep[LeftEdgeSeparator] = Hstep[LeftMargin] -Hsize[LeftEdgeSeparator] ;
+% Hstep[LeftMargin] = Hstep[LeftMarginSeparator] -Hsize[LeftMargin] ;
+% Hstep[LeftMarginSeparator] = Hstep[Text] -Hsize[LeftMarginSeparator] ;
+% Hstep[Text] = BackSpace ;
+% Hstep[RightMarginSeparator] = Hstep[Text] +Hsize[Text] ;
+% Hstep[RightMargin] = Hstep[RightMarginSeparator]+Hsize[RightMarginSeparator] ;
+% Hstep[RightEdgeSeparator] = Hstep[RightMargin] +Hsize[RightMargin] ;
+% Hstep[RightEdge] = Hstep[RightEdgeSeparator] +Hsize[RightEdgeSeparator] ;
+%
+% for VerPos=Top step 10 until Bottom:
+% for HorPos=LeftEdge step 1 until RightEdge:
+% Area[HorPos][VerPos] := unitsquare xscaled Hsize[HorPos] yscaled Vsize[VerPos] ;
+% Area[VerPos][HorPos] := Area[HorPos][VerPos] ;
+% Location[HorPos][VerPos] := (Hstep[HorPos],Vstep[VerPos]) ;
+% Location[VerPos][HorPos] := Location[HorPos][VerPos] ;
+% Field[HorPos][VerPos] := Area[HorPos][VerPos] shifted Location[HorPos][VerPos] ;
+% Field[VerPos][HorPos] := Field[HorPos][VerPos] ;
+% endfor ;
+% endfor ;
+%
+% Page := unitsquare xscaled PaperWidth yscaled PaperHeight ;
+%
+% enddef ;
+%
+% def BoundPageAreas =
+% % pickup pencircle scaled 0pt ;
+% bboxmargin := 0 ; setbounds currentpicture to Page ;
+% enddef ;
+%
+% def StartPage =
+% begingroup ;
+% if PageStateAvailable :
+% LoadPageState ;
+% SwapPageState ;
+% fi ;
+% SetPageAreas ;
+% BoundPageAreas ;
+% enddef ;
+%
+% def StopPage =
+% BoundPageAreas ;
+% endgroup ;
+% enddef ;
- numeric Vsize[], Hsize[], Vstep[], Hstep[] ;
+% Because metapost > 1.50 has dynamic memory management and is less
+% efficient than before we now delay calculations ... (on a document
+% with 150 pages the time spent in mp was close to 5 seconds which was
+% only due to initialising the page related areas, something that was
+% hardly noticeable before. At least now we're back to half a second
+% for such a case.
+def SetPageVsize =
+ numeric Vsize[] ;
Vsize[Top] = TopHeight ;
Vsize[TopSeparator] = TopDistance ;
Vsize[Header] = HeaderHeight ;
@@ -173,17 +267,10 @@ def SetPageAreas =
Vsize[Footer] = FooterHeight ;
Vsize[BottomSeparator] = BottomDistance ;
Vsize[Bottom] = BottomHeight ;
+enddef ;
- Vstep[Top] = Vstep[TopSeparator] +Vsize[TopSeparator] ;
- Vstep[TopSeparator] = PaperHeight-TopSpace ;
- Vstep[Header] = Vstep[TopSeparator] -Vsize[Header] ;
- Vstep[HeaderSeparator] = Vstep[Header] -Vsize[HeaderSeparator] ;
- Vstep[Text] = Vstep[HeaderSeparator]-Vsize[Text] ;
- Vstep[FooterSeparator] = Vstep[Text] -Vsize[FooterSeparator] ;
- Vstep[Footer] = Vstep[FooterSeparator]-Vsize[Footer] ;
- Vstep[BottomSeparator] = Vstep[Footer] -Vsize[BottomSeparator] ;
- Vstep[Bottom] = Vstep[BottomSeparator]-Vsize[Bottom] ;
-
+def SetPageHsize =
+ numeric Hsize[] ;
Hsize[LeftEdge] = LeftEdgeWidth ;
Hsize[LeftEdgeSeparator] = LeftEdgeDistance ;
Hsize[LeftMargin] = LeftMarginWidth ;
@@ -193,7 +280,23 @@ def SetPageAreas =
Hsize[RightMargin] = RightMarginWidth ;
Hsize[RightEdgeSeparator] = RightEdgeDistance ;
Hsize[RightEdge] = RightEdgeWidth ;
+enddef ;
+
+def SetPageVstep =
+ numeric Vstep[] ;
+ Vstep[Top] = Vstep[TopSeparator] +Vsize[TopSeparator] ;
+ Vstep[TopSeparator] = PaperHeight-TopSpace ;
+ Vstep[Header] = Vstep[TopSeparator] -Vsize[Header] ;
+ Vstep[HeaderSeparator] = Vstep[Header] -Vsize[HeaderSeparator] ;
+ Vstep[Text] = Vstep[HeaderSeparator]-Vsize[Text] ;
+ Vstep[FooterSeparator] = Vstep[Text] -Vsize[FooterSeparator] ;
+ Vstep[Footer] = Vstep[FooterSeparator]-Vsize[Footer] ;
+ Vstep[BottomSeparator] = Vstep[Footer] -Vsize[BottomSeparator] ;
+ Vstep[Bottom] = Vstep[BottomSeparator]-Vsize[Bottom] ;
+enddef ;
+def SetPageHstep =
+ numeric Hstep[] ;
Hstep[LeftEdge] = Hstep[LeftEdgeSeparator] -Hsize[LeftEdge] ;
Hstep[LeftEdgeSeparator] = Hstep[LeftMargin] -Hsize[LeftEdgeSeparator] ;
Hstep[LeftMargin] = Hstep[LeftMarginSeparator] -Hsize[LeftMargin] ;
@@ -203,22 +306,67 @@ def SetPageAreas =
Hstep[RightMargin] = Hstep[RightMarginSeparator]+Hsize[RightMarginSeparator] ;
Hstep[RightEdgeSeparator] = Hstep[RightMargin] +Hsize[RightMargin] ;
Hstep[RightEdge] = Hstep[RightEdgeSeparator] +Hsize[RightEdgeSeparator] ;
+enddef ;
+def SetPageArea =
+ path Area[][] ;
for VerPos=Top step 10 until Bottom:
for HorPos=LeftEdge step 1 until RightEdge:
- Area[HorPos][VerPos] := unitsquare xscaled Hsize[HorPos] yscaled Vsize[VerPos] ;
- Area[VerPos][HorPos] := Area[HorPos][VerPos] ;
- Location[HorPos][VerPos] := (Hstep[HorPos],Vstep[VerPos]) ;
- Location[VerPos][HorPos] := Location[HorPos][VerPos] ;
- Field[HorPos][VerPos] := Area[HorPos][VerPos] shifted Location[HorPos][VerPos] ;
- Field[VerPos][HorPos] := Field[HorPos][VerPos] ;
+ Area[HorPos][VerPos] := unitsquare xscaled Hsize[HorPos] yscaled Vsize[VerPos] ;
+ Area[VerPos][HorPos] := Area[HorPos][VerPos] ;
endfor ;
endfor ;
+enddef ;
+def SetPageLocation =
+ pair Location[][] ;
+ for VerPos=Top step 10 until Bottom:
+ for HorPos=LeftEdge step 1 until RightEdge:
+ Location[HorPos][VerPos] := (Hstep[HorPos],Vstep[VerPos]) ;
+ Location[VerPos][HorPos] := Location[HorPos][VerPos] ;
+ endfor ;
+ endfor ;
+enddef ;
+
+def SetPageField =
+ path Field[][] ;
+ for VerPos=Top step 10 until Bottom:
+ for HorPos=LeftEdge step 1 until RightEdge:
+ Field[HorPos][VerPos] := unitsquare xscaled Hsize[HorPos] yscaled Vsize[VerPos] shifted (Hstep[HorPos],Vstep[VerPos]) ;
+ Field[VerPos][HorPos] := Field[HorPos][VerPos] ;
+ endfor ;
+ endfor ;
+enddef ;
+
+def SetPagePage =
+ path Page ;
Page := unitsquare xscaled PaperWidth yscaled PaperHeight ;
+enddef ;
+def mfun_page_Area = hide(SetPageArea ;) Area enddef ;
+def mfun_page_Location = hide(SetPageLocation ;) Location enddef ;
+def mfun_page_Field = hide(SetPageField ;) Field enddef ;
+def mfun_page_Vsize = hide(SetPageVsize ;) Vsize enddef ;
+def mfun_page_Hsize = hide(SetPageHsize ;) Hsize enddef ;
+def mfun_page_Vstep = hide(SetPageVstep ;) Vstep enddef ;
+def mfun_page_Hstep = hide(SetPageHstep ;) Hstep enddef ;
+def mfun_page_Page = hide(SetPagePage ;) Page enddef ;
+
+def SetPageVariables =
+ let Area = mfun_page_Area ;
+ let Location = mfun_page_Location ;
+ let Field = mfun_page_Field ;
+ let Vsize = mfun_page_Vsize ;
+ let Hsize = mfun_page_Hsize ;
+ let Vstep = mfun_page_Vstep ;
+ let Hstep = mfun_page_Hstep ;
+ let Page = mfun_page_Page ;
enddef ;
+SetPageVariables ;
+
+let SetPageAreas = SetPageVariables ; % compatiblity
+
def BoundPageAreas =
% pickup pencircle scaled 0pt ;
bboxmargin := 0 ; setbounds currentpicture to Page ;
@@ -226,11 +374,13 @@ enddef ;
def StartPage =
begingroup ;
- if PageStateAvailable :
- LoadPageState ;
- SwapPageState ;
+ if mfun_first_run :
+ if PageStateAvailable :
+ LoadPageState ;
+ SwapPageState ;
+ fi ;
+ SetPageVariables ;
fi ;
- SetPageAreas ;
BoundPageAreas ;
enddef ;
@@ -272,15 +422,27 @@ def SetCoverAreas =
enddef ;
+% def StartCover =
+% begingroup ;
+% if PageStateAvailable :
+% LoadPageState ;
+% % SwapPageState ;
+% fi ;
+% SetPageAreas ;
+% SetCoverAreas ;
+% BoundCoverAreas ;
+% enddef ;
def StartCover =
begingroup ;
- if PageStateAvailable :
- LoadPageState ;
- % SwapPageState ;
+ if mfun_first_run :
+ if PageStateAvailable :
+ LoadPageState ;
+ % SwapPageState ;
+ fi ;
+ SetPageVariables ; % was SetPageAreas ;
+ SetCoverAreas ;
fi ;
- SetPageAreas ;
- SetCoverAreas ;
BoundCoverAreas ;
enddef ;
diff --git a/Master/texmf-dist/metapost/context/base/mp-shap.mpiv b/Master/texmf-dist/metapost/context/base/mp-shap.mpiv
index b62e636d56e..7136565108d 100644
--- a/Master/texmf-dist/metapost/context/base/mp-shap.mpiv
+++ b/Master/texmf-dist/metapost/context/base/mp-shap.mpiv
@@ -5,7 +5,7 @@
%D subtitle=shapes,
%D author=Hans Hagen,
%D date=\currentdate,
-%D copyright={PRAGMA / Hans Hagen \& Ton Otten}]
+%D copyright={PRAGMA ADE \& \CONTEXT\ Development Team}]
%C
%C This module is part of the \CONTEXT\ macro||package and is
%C therefore copyrighted by \PRAGMA. See licen-en.pdf for
diff --git a/Master/texmf-dist/metapost/context/base/mp-step.mpiv b/Master/texmf-dist/metapost/context/base/mp-step.mpiv
index 654ef443d5a..f7a7ba5debb 100644
--- a/Master/texmf-dist/metapost/context/base/mp-step.mpiv
+++ b/Master/texmf-dist/metapost/context/base/mp-step.mpiv
@@ -5,7 +5,7 @@
%D subtitle=steps,
%D author=Hans Hagen,
%D date=\currentdate,
-%D copyright={PRAGMA / Hans Hagen \& Ton Otten}]
+%D copyright={PRAGMA ADE \& \CONTEXT\ Development Team}]
%C
%C This module is part of the \CONTEXT\ macro||package and is
%C therefore copyrighted by \PRAGMA. See licen-en.pdf for
diff --git a/Master/texmf-dist/metapost/context/base/mp-text.mpiv b/Master/texmf-dist/metapost/context/base/mp-text.mpiv
index d5630f68ef2..b68e8412aef 100644
--- a/Master/texmf-dist/metapost/context/base/mp-text.mpiv
+++ b/Master/texmf-dist/metapost/context/base/mp-text.mpiv
@@ -5,7 +5,7 @@
%D subtitle=text support,
%D author=Hans Hagen,
%D date=\currentdate,
-%D copyright={PRAGMA / Hans Hagen \& Ton Otten}]
+%D copyright={PRAGMA ADE \& \CONTEXT\ Development Team}]
%C
%C This module is part of the \CONTEXT\ macro||package and is
%C therefore copyrighted by \PRAGMA. See licen-en.pdf for
diff --git a/Master/texmf-dist/metapost/context/base/mp-tool.mpiv b/Master/texmf-dist/metapost/context/base/mp-tool.mpiv
index 764863b65ce..5b53dcdef91 100644
--- a/Master/texmf-dist/metapost/context/base/mp-tool.mpiv
+++ b/Master/texmf-dist/metapost/context/base/mp-tool.mpiv
@@ -5,12 +5,14 @@
%D subtitle=auxiliary macros,
%D author=Hans Hagen,
%D date=\currentdate,
-%D copyright={PRAGMA / Hans Hagen \& Ton Otten}]
+%D copyright={PRAGMA ADE \& \CONTEXT\ Development Team}]
%C
%C This module is part of the \CONTEXT\ macro||package and is
%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
%C details.
+% def loadfile(expr name) = scantokens("input " & name & ";") enddef ;
+
if known context_tool : endinput ; fi ;
boolean context_tool ; context_tool := true ;
@@ -26,33 +28,7 @@ let @## = @# ;
if not known mpversion : string mpversion ; mpversion := "0.641" ; fi ;
-% vardef mpversiongt(expr s) =
-% scantokens (mpversion & " > " & if numeric s : decimal s else : s fi)
-% enddef ;
-% vardef mpversionlt(expr s) =
-% scantokens (mpversion & " < " & if numeric s : decimal s else : s fi)
-% enddef ;
-% vardef mpversioneq(expr s) =
-% scantokens (mpversion & " = " & if numeric s : decimal s else : s fi)
-% enddef ;
-
-%D More interesting:
-%D
-%D \starttyping
-%D fill fullcircle scaled 4cm withcolor if mpversiongt("0.6") : red else : green fi ;
-%D fill fullcircle scaled 2cm withcolor if mpversionlt(0.6) : blue else : white fi ;
-%D fill fullcircle scaled 1cm withcolor if mpversioncmp(0.6,">=") : yellow else : black fi ;
-%D \stoptyping
-
-% no longer needed as we load runtime
-
-vardef mpversioncmp(expr s, c) =
- scantokens (mpversion & c & if numeric s : decimal s else : s fi)
-enddef ;
-
-vardef mpversionlt (expr s) = mpversioncmp(s, "<") enddef ;
-vardef mpversioneq (expr s) = mpversioncmp(s, "=") enddef ;
-vardef mpversiongt (expr s) = mpversioncmp(s, ">") enddef ;
+newinternal metapostversion ; metapostversion := scantokens(mpversion) ;
%D We always want \EPS\ conforming output, so we say:
@@ -2181,3 +2157,168 @@ vardef mcomponent expr p = if cmykcolor p : magentapart p elseif rgbcolor p : 1
vardef ycomponent expr p = if cmykcolor p : yellowpart p elseif rgbcolor p : 1 - bluepart p else : p fi enddef ;
vardef bcomponent expr p = if cmykcolor p : blackpart p elseif rgbcolor p : 0 else : p fi enddef ;
+% draw image (...) ... ; % prescripts prepended to first, postscripts appended to last
+% draw decorated (...) ... ; % prescripts prepended to each, postscripts appended to each
+% draw redecorated (...) ... ; % prescripts assigned to each, postscripts assigned to each
+% draw undecorated (...) ... ; % following properties are ignored, existing properties are kept
+%
+% draw decorated (
+% draw fullcircle scaled 20cm withpen pencircle scaled 20mm withcolor red withtransparency (1,.40) ;
+% draw fullcircle scaled 15cm withpen pencircle scaled 15mm withcolor green withtransparency (1,.30) ;
+% draw fullcircle scaled 10cm withpen pencircle scaled 10mm withcolor blue withtransparency (1,.20) ;
+% )
+% withcolor blue
+% withtransparency (1,.125) % selectively applied
+% withpen pencircle scaled 10mm
+% ;
+
+% vardef image (text imagedata) = % already defined
+% save currentpicture ;
+% picture currentpicture ;
+% currentpicture := nullpicture ;
+% imagedata ;
+% currentpicture
+% enddef ;
+
+vardef undecorated (text imagedata) text decoration =
+ save currentpicture ;
+ picture currentpicture ;
+ currentpicture := nullpicture ;
+ imagedata ;
+ currentpicture
+enddef ;
+
+
+if metapostversion < 1.770 :
+
+ vardef decorated (text imagedata) text decoration =
+ save mfun_decorated_path, currentpicture ;
+ picture mfun_decorated_path, currentpicture ;
+ currentpicture := nullpicture ;
+ imagedata ;
+ mfun_decorated_path := currentpicture ;
+ currentpicture := nullpicture ;
+ for i within mfun_decorated_path :
+ addto currentpicture
+ if stroked i :
+ doublepath pathpart i
+ dashed dashpart i
+ withpen penpart i
+ withcolor colorpart i
+ decoration
+ elseif filled i :
+ contour pathpart i
+ withpen penpart i
+ withcolor colorpart i
+ decoration
+ elseif textual i :
+ also i
+ withcolor colorpart i
+ decoration
+ else :
+ also i
+ fi
+ ;
+ endfor ;
+ currentpicture
+ enddef ;
+
+else:
+
+ vardef decorated (text imagedata) text decoration =
+ save mfun_decorated_path, currentpicture ;
+ picture mfun_decorated_path, currentpicture ;
+ currentpicture := nullpicture ;
+ imagedata ;
+ mfun_decorated_path := currentpicture ;
+ currentpicture := nullpicture ;
+ for i within mfun_decorated_path :
+ addto currentpicture
+ if stroked i :
+ doublepath pathpart i
+ dashed dashpart i
+ withpen penpart i
+ withcolor colorpart i
+ withprescript prescriptpart i
+ withpostscript postscriptpart i
+ decoration
+ elseif filled i :
+ contour pathpart i
+ withpen penpart i
+ withcolor colorpart i
+ withprescript prescriptpart i
+ withpostscript postscriptpart i
+ decoration
+ elseif textual i :
+ also i
+ withcolor colorpart i
+ withprescript prescriptpart i
+ withpostscript postscriptpart i
+ decoration
+ else :
+ also i
+ fi
+ ;
+ endfor ;
+ currentpicture
+ enddef ;
+
+fi ;
+
+vardef redecorated (text imagedata) text decoration =
+ save mfun_decorated_path, currentpicture ;
+ picture mfun_decorated_path, currentpicture ;
+ currentpicture := nullpicture ;
+ imagedata ;
+ mfun_decorated_path := currentpicture ;
+ currentpicture := nullpicture ;
+ for i within mfun_decorated_path :
+ addto currentpicture
+ if stroked i :
+ doublepath pathpart i
+ dashed dashpart i
+ withpen penpart i
+ decoration
+ elseif filled i :
+ contour pathpart i
+ withpen penpart i
+ decoration
+ elseif textual i :
+ also i
+ decoration
+ else :
+ also i
+ fi
+ ;
+ endfor ;
+ currentpicture
+enddef ;
+
+% path mfun_bleed_box ;
+
+% primarydef p bleeded d =
+% image (
+% mfun_bleed_box := boundingbox p ;
+% if pair d :
+% draw p xysized (bbwidth(p)+2*xpart d,bbheight(p)+2*ypart d) shifted -d ;
+% else :
+% draw p xysized (bbwidth(p)+2d,bbheight(p)+2d) shifted (-d,-d) ;
+% fi ;
+% setbounds currentpicture to mfun_bleed_box ;
+% )
+% enddef ;
+
+%D Dimensions have bever been an issue as traditional MP can't make that large
+%D pictures, but with double mode we need a catch:
+
+newinternal maxdimensions ; maxdimensions := 14000 ;
+
+def mfun_apply_max_dimensions = % not a generic helper, we want to protect this one
+ if bbwidth currentpicture > maxdimensions :
+ currentpicture := currentpicture if bbheight currentpicture > bbwidth currentpicture : ysized else : xsized fi maxdimensions ;
+ elseif bbheight currentpicture > maxdimensions :
+ currentpicture := currentpicture ysized maxdimensions ;
+ fi ;
+enddef;
+
+extra_endfig := extra_endfig & "mfun_apply_max_dimensions ;" ;
diff --git a/Master/texmf-dist/scripts/context/lua/mtx-babel.lua b/Master/texmf-dist/scripts/context/lua/mtx-babel.lua
index 120e490929e..8c11465dea4 100644
--- a/Master/texmf-dist/scripts/context/lua/mtx-babel.lua
+++ b/Master/texmf-dist/scripts/context/lua/mtx-babel.lua
@@ -9,9 +9,23 @@ if not modules then modules = { } end modules ['mtx-babel'] = {
-- data tables by Thomas A. Schmitz
local helpinfo = [[
---language=string conversion language (e.g. greek)
---structure=string obey given structure (e.g. 'document', default: 'context')
---convert convert babel codes into utf
+<?xml version="1.0"?>
+<application>
+ <metadata>
+ <entry name="name">mtx-babel</entry>
+ <entry name="detail">Babel Input To UTF Conversion</entry>
+ <entry name="version">1.20</entry>
+ </metadata>
+ <flags>
+ <category name="basic">
+ <subcategory>
+ <flag name="language" value="string"><short>conversion language (e.g. greek)</short></flag>
+ <flag name="structure" value="string"><short>obey given structure (e.g. 'document', default: 'context')</short></flag>
+ <flag name="convert"><short>convert babel codes into utf</short></flag>
+ </subcategory>
+ </category>
+ </flags>
+</application>
]]
local application = logs.application {
@@ -431,6 +445,8 @@ end
if environment.argument("convert") then
scripts.babel.convert(environment.files[1] or "")
+elseif environment.argument("exporthelp") then
+ application.export(environment.argument("exporthelp"),environment.files[1])
else
application.help()
end
diff --git a/Master/texmf-dist/scripts/context/lua/mtx-base.lua b/Master/texmf-dist/scripts/context/lua/mtx-base.lua
index ab5bfe18a9b..bd674971793 100644
--- a/Master/texmf-dist/scripts/context/lua/mtx-base.lua
+++ b/Master/texmf-dist/scripts/context/lua/mtx-base.lua
@@ -7,29 +7,43 @@ if not modules then modules = { } end modules ['mtx-base'] = {
}
local helpinfo = [[
---generate generate file database
---variables show configuration variables
---configurations show configuration order
---expand-braces expand complex variable
---expand-path expand variable (resolve paths)
---expand-var expand variable (resolve references)
---show-path show path expansion of ...
---var-value report value of variable
---find-file report file location
---find-path report path of file
---make or --ini make luatex format
---run or --fmt= run luatex format
---compile assemble and compile lua inifile
---verbose give a bit more info
---all show all found files
---format=str filter cf format specification (default 'tex', use 'any' for any match)
---pattern=str filter variables
---trackers=list enable given trackers
+<?xml version="1.0"?>
+<application>
+ <metadata>
+ <entry name="name">mtx-base</entry>
+ <entry name="detail">ConTeXt TDS Management Tool (aka luatools)</entry>
+ <entry name="version">1.35</entry>
+ </metadata>
+ <flags>
+ <category name="basic">
+ <subcategory>
+ <flag name="generate"><short>generate file database</short></flag>
+ <flag name="variables"><short>show configuration variables</short></flag>
+ <flag name="configurations"><short>show configuration order</short></flag>
+ <flag name="expand-braces"><short>expand complex variable</short></flag>
+ <flag name="expand-path"><short>expand variable (resolve paths)</short></flag>
+ <flag name="expand-var"><short>expand variable (resolve references)</short></flag>
+ <flag name="show-path"><short>show path expansion of ...</short></flag>
+ <flag name="var-value"><short>report value of variable</short></flag>
+ <flag name="find-file"><short>report file location</short></flag>
+ <flag name="find-path"><short>report path of file</short></flag>
+ <flag name="make"><short>[or <ref name="ini"/>] make luatex format</short></flag>
+ <flag name="run"><short>[or <ref name="fmt"/>] run luatex format</short></flag>
+ <flag name="compile"><short>assemble and compile lua inifile</short></flag>
+ <flag name="verbose"><short>give a bit more info</short></flag>
+ <flag name="all"><short>show all found files</short></flag>
+ <flag name="format" value="str"><short>filter cf format specification (default 'tex', use 'any' for any match)</short></flag>
+ <flag name="pattern" value="str"><short>filter variables</short></flag>
+ <flag name="trackers" value="list"><short>enable given trackers</short></flag>
+ </subcategory>
+ </category>
+ </flags>
+</application>
]]
local application = logs.application {
name = "mtx-base",
- banner = "ConTeXt TDS Management Tool 1.35 (aka luatools)",
+ banner = "ConTeXt TDS Management Tool (aka luatools) 1.35",
helpinfo = helpinfo,
}
@@ -107,6 +121,8 @@ elseif environment.arguments["variables"] or environment.arguments["show-variabl
elseif environment.arguments["configurations"] or environment.arguments["show-configurations"] then
resolvers.load("nofiles")
resolvers.listers.configurations()
+elseif environment.arguments["exporthelp"] then
+ application.export(environment.arguments["exporthelp"],environment.files[1])
elseif environment.arguments["help"] or (environment.files[1]=='help') or (#environment.files==0) then
application.help()
elseif environment.files[1] == 'texmfcnf.lua' then
diff --git a/Master/texmf-dist/scripts/context/lua/mtx-cache.lua b/Master/texmf-dist/scripts/context/lua/mtx-cache.lua
index 08202bbf816..cd5512618d5 100644
--- a/Master/texmf-dist/scripts/context/lua/mtx-cache.lua
+++ b/Master/texmf-dist/scripts/context/lua/mtx-cache.lua
@@ -7,11 +7,26 @@ if not modules then modules = { } end modules ['mtx-cache'] = {
}
local helpinfo = [[
---purge remove not used files
---erase completely remove cache
---list show cache
-
---all all (not yet implemented)
+<?xml version="1.0"?>
+<application>
+ <metadata>
+ <entry name="name">mtx-cache</entry>
+ <entry name="detail">ConTeXt & MetaTeX Cache Management</entry>
+ <entry name="version">0.10</entry>
+ </metadata>
+ <flags>
+ <category name="basic">
+ <subcategory>
+ <flag name="purge"><short>remove not used files</short></flag>
+ <flag name="erase"><short>completely remove cache</short></flag>
+ <flag name="list"><short>show cache</short></flag>
+ </subcategory>
+ <subcategory>
+ <flag name="all"><short>all (not yet implemented)</short></flag>
+ </subcategory>
+ </category>
+ </flags>
+</application>
]]
local application = logs.application {
@@ -114,6 +129,8 @@ elseif environment.argument("erase") then
scripts.cache.erase()
elseif environment.argument("list") then
scripts.cache.list()
+elseif environment.argument("exporthelp") then
+ application.export(environment.argument("exporthelp"),environment.files[1])
else
application.help()
end
diff --git a/Master/texmf-dist/scripts/context/lua/mtx-chars.lua b/Master/texmf-dist/scripts/context/lua/mtx-chars.lua
index ad2c499bef4..9f6852da245 100644
--- a/Master/texmf-dist/scripts/context/lua/mtx-chars.lua
+++ b/Master/texmf-dist/scripts/context/lua/mtx-chars.lua
@@ -9,9 +9,23 @@ if not modules then modules = { } end modules ['mtx-chars'] = {
-- obsolete: --stix convert stix table to math table
local helpinfo = [[
---xtx generate xetx-*.tex (used by xetex)
---pdf generate pdfr-def.tex (used by pdftex)
---entities generate entities table
+<?xml version="1.0"?>
+<application>
+ <metadata>
+ <entry name="name">mtx-chars</entry>
+ <entry name="detail">MkII Character Table Generators</entry>
+ <entry name="version">0.10</entry>
+ </metadata>
+ <flags>
+ <category name="basic">
+ <subcategory>
+ <flag name="xtx"><short>generate xetx-*.tex (used by xetex)</short></flag>
+ <flag name="pdf"><short>generate pdfr-def.tex (used by pdftex)</short></flag>
+ <flag name="entities"><short>generate entities table</short></flag>
+ </subcategory>
+ </category>
+ </flags>
+</application>
]]
local application = logs.application {
@@ -387,6 +401,8 @@ elseif environment.argument("xtx") then
scripts.chars.makeencoutf()
elseif environment.argument("pdf") then
scripts.chars.makepdfr()
+elseif environment.argument("exporthelp") then
+ application.export(environment.argument("exporthelp"),environment.files[1])
else
application.help()
end
diff --git a/Master/texmf-dist/scripts/context/lua/mtx-check.lua b/Master/texmf-dist/scripts/context/lua/mtx-check.lua
index 8307a02c6b2..9f52509ec7e 100644
--- a/Master/texmf-dist/scripts/context/lua/mtx-check.lua
+++ b/Master/texmf-dist/scripts/context/lua/mtx-check.lua
@@ -11,7 +11,21 @@ local gsub, sub, format = string.gsub, string.sub, string.format
local insert, remove = table.insert, table.remove
local helpinfo = [[
---convert check tex file for errors
+<?xml version="1.0"?>
+<application>
+ <metadata>
+ <entry name="name">mtx-check</entry>
+ <entry name="detail">Basic ConTeXt Syntax Checking</entry>
+ <entry name="version">0.10</entry>
+ </metadata>
+ <flags>
+ <category name="basic">
+ <subcategory>
+ <flag name="convert"><short>check tex file for errors</short></flag>
+ </subcategory>
+ </category>
+ </flags>
+</application>
]]
local application = logs.application {
@@ -229,6 +243,8 @@ if environment.argument("check") then
scripts.checker.check(environment.files[1])
elseif environment.argument("help") then
application.help()
+elseif environment.argument("exporthelp") then
+ application.export(environment.argument("exporthelp"),environment.files[1])
elseif environment.files[1] then
scripts.checker.check(environment.files[1])
else
diff --git a/Master/texmf-dist/scripts/context/lua/mtx-colors.lua b/Master/texmf-dist/scripts/context/lua/mtx-colors.lua
index 2a51d50990e..7dd1b4ac4b0 100644
--- a/Master/texmf-dist/scripts/context/lua/mtx-colors.lua
+++ b/Master/texmf-dist/scripts/context/lua/mtx-colors.lua
@@ -9,11 +9,29 @@ if not modules then modules = { } end modules ['mtx-colors'] = {
-- todo: fc-cache -v en check dirs, or better is: fc-cat -v | grep Directory
local helpinfo = [[
---table show icc table
-
-example:
-
-mtxrun --script color --table somename
+<?xml version="1.0"?>
+<application>
+ <metadata>
+ <entry name="name">mtx-colors</entry>
+ <entry name="detail">ConTeXt Color Management</entry>
+ <entry name="version">0.10</entry>
+ </metadata>
+ <flags>
+ <category name="basic">
+ <subcategory>
+ <flag name="table"><short>show icc table</short></flag>
+ </subcategory>
+ </category>
+ </flags>
+ <examples>
+ <category>
+ <title>Example</title>
+ <subcategory>
+ <example><command>mtxrun --script color --table somename</command></example>
+ </subcategory>
+ </category>
+ </examples>
+</application>
]]
local application = logs.application {
@@ -52,6 +70,8 @@ end
if environment.argument("table") then
scripts.colors.table()
+elseif environment.argument("exporthelp") then
+ application.export(environment.argument("exporthelp"),environment.files[1])
else
application.help()
end
diff --git a/Master/texmf-dist/scripts/context/lua/mtx-context.lua b/Master/texmf-dist/scripts/context/lua/mtx-context.lua
index e07ecdfc7fe..add21fc90ff 100644
--- a/Master/texmf-dist/scripts/context/lua/mtx-context.lua
+++ b/Master/texmf-dist/scripts/context/lua/mtx-context.lua
@@ -6,247 +6,157 @@ if not modules then modules = { } end modules ['mtx-context'] = {
license = "see context related readme files"
}
+-- todo: more local functions
+-- todo: pass jobticket/ctxdata table around
+
+local type, next, tostring, tonumber = type, next, tostring, tonumber
local format, gmatch, match, gsub, find = string.format, string.gmatch, string.match, string.gsub, string.find
-local quote = string.quote
+local quote, validstring = string.quote, string.valid
local concat = table.concat
+local settings_to_array = utilities.parsers.settings_to_array
+local appendtable = table.append
+local lpegpatterns, lpegmatch, Cs, P = lpeg.patterns, lpeg.match, lpeg.Cs, lpeg.P
-local getargument = environment.argument
-
-local basicinfo = [[
---run process (one or more) files (default action)
---make create context formats
-
---ctx=name use ctx file (process management specification)
---interface use specified user interface (default: en)
-
---autopdf close pdf file in viewer and start pdf viewer afterwards
---purge(all) purge files either or not after a run (--pattern=...)
-
---usemodule=list load the given module or style, normally part o fthe distribution
---environment=list load the given environment file first (document styles)
---mode=list enable given the modes (conditional processing in styles)
---path=list also consult the given paths when files are looked for
---arguments=list set variables that can be consulted during a run (key/value pairs)
---randomseed=number set the randomseed
---result=name rename the resulting output to the given name
---trackers=list set tracker variables (show list with --showtrackers)
---directives=list set directive variables (show list with --showdirectives)
---silent=list disable logcatgories (show list with --showlogcategories)
---noconsole disable logging to the console (logfile only)
---purgeresult purge result file before run
-
---forcexml force xml stub (optional flag: --mkii)
---forcecld force cld (context lua document) stub
-
---arrange run extra imposition pass, given that the style sets up imposition
---noarrange ignore imposition specifications in the style
-
---once only run once (no multipass data file is produced)
---batchmode run without stopping and don't show messages on the console
---nonstopmode run without stopping
-
---generate generate file database etc. (as luatools does)
---paranoid don't descend to .. and ../..
---version report installed context version
-
---global assume given file present elsewhere
-
---expert expert options
-]]
-
--- filter=list is kind of obsolete
--- color is obsolete for mkiv, always on
--- separation is obsolete for mkiv, no longer available
--- output is currently obsolete for mkiv
--- setuppath=list must check
--- modefile=name must check
--- input=name load the given inputfile (must check)
-
-local expertinfo = [[
-expert options:
-
---touch update context version number (remake needed afterwards, also provide --expert)
---nostats omit runtime statistics at the end of the run
---update update context from website (not to be confused with contextgarden)
---profile profile job (use: mtxrun --script profile --analyze)
---timing generate timing and statistics overview
-
---extra=name process extra (mtx-context-<name> in distribution)
---extras show extras
-]]
-
-local specialinfo = [[
-special options:
-
---pdftex process file with texexec using pdftex
---xetex process file with texexec using xetex
-
---pipe don't check for file and enter scroll mode (--dummyfile=whatever.tmp)
-]]
+local getargument = environment.getargument or environment.argument
+local setargument = environment.setargument
local application = logs.application {
name = "mtx-context",
- banner = "ConTeXt Process Management 0.52",
- helpinfo = {
- basic = basicinfo,
- extra = extrainfo,
- expert = expertinfo,
- }
+ banner = "ConTeXt Process Management 0.60",
+ -- helpinfo = helpinfo, -- table with { category_a = text_1, category_b = text_2 } or helpstring or xml_blob
+ helpinfo = "mtx-context.xml",
}
+-- local luatexflags = {
+-- ["8bit"] = true, -- ignored, input is assumed to be in UTF-8 encoding
+-- ["default-translate-file"] = true, -- ignored, input is assumed to be in UTF-8 encoding
+-- ["translate-file"] = true, -- ignored, input is assumed to be in UTF-8 encoding
+-- ["etex"] = true, -- ignored, the etex extensions are always active
+--
+-- ["credits"] = true, -- display credits and exit
+-- ["debug-format"] = true, -- enable format debugging
+-- ["disable-write18"] = true, -- disable \write18{SHELL COMMAND}
+-- ["draftmode"] = true, -- switch on draft mode (generates no output PDF)
+-- ["enable-write18"] = true, -- enable \write18{SHELL COMMAND}
+-- ["file-line-error"] = true, -- enable file:line:error style messages
+-- ["file-line-error-style"] = true, -- aliases of --file-line-error
+-- ["no-file-line-error"] = true, -- disable file:line:error style messages
+-- ["no-file-line-error-style"] = true, -- aliases of --no-file-line-error
+-- ["fmt"] = true, -- load the format file FORMAT
+-- ["halt-on-error"] = true, -- stop processing at the first error
+-- ["help"] = true, -- display help and exit
+-- ["ini"] = true, -- be iniluatex, for dumping formats
+-- ["interaction"] = true, -- set interaction mode (STRING=batchmode/nonstopmode/scrollmode/errorstopmode)
+-- ["jobname"] = true, -- set the job name to STRING
+-- ["kpathsea-debug"] = true, -- set path searching debugging flags according to the bits of NUMBER
+-- ["lua"] = true, -- load and execute a lua initialization script
+-- ["mktex"] = true, -- enable mktexFMT generation (FMT=tex/tfm)
+-- ["no-mktex"] = true, -- disable mktexFMT generation (FMT=tex/tfm)
+-- ["nosocket"] = true, -- disable the lua socket library
+-- ["output-comment"] = true, -- use STRING for DVI file comment instead of date (no effect for PDF)
+-- ["output-directory"] = true, -- use existing DIR as the directory to write files in
+-- ["output-format"] = true, -- use FORMAT for job output; FORMAT is 'dvi' or 'pdf'
+-- ["parse-first-line"] = true, -- enable parsing of the first line of the input file
+-- ["no-parse-first-line"] = true, -- disable parsing of the first line of the input file
+-- ["progname"] = true, -- set the program name to STRING
+-- ["recorder"] = true, -- enable filename recorder
+-- ["safer"] = true, -- disable easily exploitable lua commands
+-- ["shell-escape"] = true, -- enable \write18{SHELL COMMAND}
+-- ["no-shell-escape"] = true, -- disable \write18{SHELL COMMAND}
+-- ["shell-restricted"] = true, -- restrict \write18 to a list of commands given in texmf.cnf
+-- ["synctex"] = true, -- enable synctex
+-- ["version"] = true, -- display version and exit
+-- ["luaonly"] = true, -- run a lua file, then exit
+-- ["luaconly"] = true, -- byte-compile a lua file, then exit
+-- ["jiton"] = false,
+-- }
+
local report = application.report
scripts = scripts or { }
scripts.context = scripts.context or { }
--- a demo cld file:
---
--- context.starttext()
--- context.chapter("Hello There")
--- context.readfile("tufte","","not found")
--- context.stoptext()
+-- for the moment here
--- l-file / todo
-
-function file.needsupdate(oldfile,newfile)
- return true
-end
-function file.syncmtimes(oldfile,newfile)
+if getargument("jit") or getargument("jiton") then
+ -- bonus shortcut, we assume than --jit also indicates the engine
+ -- although --jit and --engine=luajittex are independent
+ setargument("engine","luajittex")
end
--- l-io
+local engine_new = getargument("engine") or directives.value("system.engine")
+local engine_old = environment.ownbin
-function io.copydata(fromfile,tofile)
- io.savedata(tofile,io.loaddata(fromfile) or "")
+local function restart(engine_old,engine_new)
+ local command = format("%s --luaonly %q %s --redirected",engine_new,environment.ownname,environment.reconstructcommandline())
+ report(format("redirect %s -> %s: %s",engine_old,engine_new,command))
+ local result = os.execute(command)
+ os.exit(result)
end
--- ctx (will become util-ctx)
-
-local ctxrunner = { }
-
-function ctxrunner.filtered(str,method)
- str = tostring(str)
- if method == 'name' then str = file.removesuffix(file.basename(str))
- elseif method == 'path' then str = file.dirname(str)
- elseif method == 'suffix' then str = file.extname(str)
- elseif method == 'nosuffix' then str = file.removesuffix(str)
- elseif method == 'nopath' then str = file.basename(str)
- elseif method == 'base' then str = file.basename(str)
--- elseif method == 'full' then
--- elseif method == 'complete' then
--- elseif method == 'expand' then -- str = file.expandpath(str)
- end
- return str:gsub("\\","/")
+if getargument("redirected") then
+ setargument("engine",engine_old) -- later on we need this
+elseif engine_new == engine_old then
+ setargument("engine",engine_new) -- later on we need this
+elseif environment.validengines[engine_new] and engine_new ~= environment.basicengines[engine_old] then
+ restart(engine_old,engine_new)
+else
+ setargument("engine",engine_new) -- later on we need this
end
-function ctxrunner.substitute(e,str)
- local attributes = e.at
- if str and attributes then
- if attributes['method'] then
- str = ctxrunner.filtered(str,attributes['method'])
- end
- if str == "" and attributes['default'] then
- str = attributes['default']
- end
- end
- return str
-end
+-- so far
-function ctxrunner.reflag(flags)
- local t = { }
- for _, flag in next, flags do
- local key, value = match(flag,"^(.-)=(.+)$")
- if key and value then
- t[key] = value
- else
- t[flag] = true
- end
- end
- return t
-end
+-- constants
-function ctxrunner.substitute(str)
- return str
-end
+local usedfiles = {
+ nop = "cont-nop.mkiv",
+ yes = "cont-yes.mkiv",
+}
-function ctxrunner.justtext(str)
- str = xml.unescaped(tostring(str))
- str = xml.cleansed(str)
- str = str:gsub("\\+",'/')
- str = str:gsub("%s+",' ')
- return str
-end
+local usedsuffixes = {
+ before = {
+ "tuc"
+ },
+ after = {
+ "pdf", "tuc", "log"
+ },
+ keep = {
+ "log"
+ },
+}
-function ctxrunner.new()
- return {
- ctxname = "",
- jobname = "",
- xmldata = nil,
- suffix = "prep",
- locations = { '..', '../..' },
- variables = { },
- messages = { },
- environments = { },
- modules = { },
- filters = { },
- flags = { },
- modes = { },
- prepfiles = { },
- paths = { },
- }
-end
+local formatofinterface = {
+ en = "cont-en",
+ uk = "cont-uk",
+ de = "cont-de",
+ fr = "cont-fr",
+ nl = "cont-nl",
+ cs = "cont-cs",
+ it = "cont-it",
+ ro = "cont-ro",
+ pe = "cont-pe",
+}
-function ctxrunner.savelog(ctxdata,ctlname)
- local function yn(b)
- if b then return 'yes' else return 'no' end
- end
- if not ctlname or ctlname == "" or ctlname == ctxdata.jobname then
- if ctxdata.jobname then
- ctlname = file.replacesuffix(ctxdata.jobname,'ctl')
- elseif ctxdata.ctxname then
- ctlname = file.replacesuffix(ctxdata.ctxname,'ctl')
- else
- report("invalid ctl name: %s",ctlname or "?")
- return
- end
- end
- local prepfiles = ctxdata.prepfiles
- if prepfiles and next(prepfiles) then
- report("saving logdata in: %s",ctlname)
- f = io.open(ctlname,'w')
- if f then
- f:write("<?xml version='1.0' standalone='yes'?>\n\n")
- f:write(format("<ctx:preplist local='%s'>\n",yn(ctxdata.runlocal)))
- local sorted = table.sortedkeys(prepfiles)
- for i=1,#sorted do
- local name = sorted[i]
- f:write(format("\t<ctx:prepfile done='%s'>%s</ctx:prepfile>\n",yn(prepfiles[name]),name))
- end
- f:write("</ctx:preplist>\n")
- f:close()
- end
- else
- report("nothing prepared, no ctl file saved")
- os.remove(ctlname)
- end
-end
+local defaultformats = {
+ "cont-en",
+ "cont-nl",
+}
-function ctxrunner.register_path(ctxdata,path)
- -- test if exists
- ctxdata.paths[ctxdata.paths+1] = path
-end
+-- process information
-function ctxrunner.trace(ctxdata)
- print(table.serialize(ctxdata.messages))
- print(table.serialize(ctxdata.flags))
- print(table.serialize(ctxdata.environments))
- print(table.serialize(ctxdata.modules))
- print(table.serialize(ctxdata.filters))
- print(table.serialize(ctxdata.modes))
- print(xml.tostring(ctxdata.xmldata))
+local ctxrunner = { } -- namespace will go
+
+local ctx_locations = { '..', '../..' }
+
+function ctxrunner.new()
+ return {
+ ctxname = "",
+ jobname = "",
+ flags = { },
+ }
end
-function ctxrunner.manipulate(ctxdata,ctxname,defaultname)
+function ctxrunner.checkfile(ctxdata,ctxname,defaultname)
if not ctxdata.jobname or ctxdata.jobname == "" then
return
@@ -269,13 +179,14 @@ function ctxrunner.manipulate(ctxdata,ctxname,defaultname)
local usedname = ctxdata.ctxname
local found = lfs.isfile(usedname)
- -- no futher test if qualified path
+ -- no further test if qualified path
if not found then
- for _, path in next, ctxdata.locations do
+ for _, path in next, ctx_locations do
local fullname = file.join(path,ctxdata.ctxname)
if lfs.isfile(fullname) then
- usedname, found = fullname, true
+ usedname = fullname
+ found = true
break
end
end
@@ -283,194 +194,70 @@ function ctxrunner.manipulate(ctxdata,ctxname,defaultname)
if not found then
usedname = resolvers.findfile(ctxdata.ctxname,"tex")
- found = usedname ~= ""
+ found = usedname ~= ""
end
if not found and defaultname and defaultname ~= "" and lfs.isfile(defaultname) then
- usedname, found = defaultname, true
+ usedname = defaultname
+ found = true
end
if not found then
return
end
- ctxdata.xmldata = xml.load(usedname)
+ local xmldata = xml.load(usedname)
- if not ctxdata.xmldata then
+ if not xmldata then
return
else
-- test for valid, can be text file
end
- xml.include(ctxdata.xmldata,'ctx:include','name', table.append({'.', file.dirname(ctxdata.ctxname)},ctxdata.locations))
-
- ctxdata.variables['job'] = ctxdata.jobname
-
- ctxdata.flags = xml.collect_texts(ctxdata.xmldata,"/ctx:job/ctx:flags/ctx:flag",true)
- ctxdata.environments = xml.collect_texts(ctxdata.xmldata,"/ctx:job/ctx:process/ctx:resources/ctx:environment",true)
- ctxdata.modules = xml.collect_texts(ctxdata.xmldata,"/ctx:job/ctx:process/ctx:resources/ctx:module",true)
- ctxdata.filters = xml.collect_texts(ctxdata.xmldata,"/ctx:job/ctx:process/ctx:resources/ctx:filter",true)
- ctxdata.modes = xml.collect_texts(ctxdata.xmldata,"/ctx:job/ctx:process/ctx:resources/ctx:mode",true)
- ctxdata.messages = xml.collect_texts(ctxdata.xmldata,"ctx:message",true)
-
- ctxdata.flags = ctxrunner.reflag(ctxdata.flags)
-
- local messages = ctxdata.messages
- for i=1,#messages do
- report("ctx comment: %s", xml.tostring(messages[i]))
- end
-
- for r, d, k in xml.elements(ctxdata.xmldata,"ctx:value[@name='job']") do
- d[k] = ctxdata.variables['job'] or ""
- end
-
- local commands = { }
- for e in xml.collected(ctxdata.xmldata,"/ctx:job/ctx:preprocess/ctx:processors/ctx:processor") do
- commands[e.at and e.at['name'] or "unknown"] = e
- end
-
- local suffix = xml.filter(ctxdata.xmldata,"/ctx:job/ctx:preprocess/attribute('suffix')") or ctxdata.suffix
- local runlocal = xml.filter(ctxdata.xmldata,"/ctx:job/ctx:preprocess/ctx:processors/attribute('local')")
+ local ctxpaths = table.append({'.', file.dirname(ctxdata.ctxname)}, ctx_locations)
- runlocal = toboolean(runlocal)
+ xml.include(xmldata,'ctx:include','name', ctxpaths)
- for files in xml.collected(ctxdata.xmldata,"/ctx:job/ctx:preprocess/ctx:files") do
- for pattern in xml.collected(files,"ctx:file") do
+ local flags = ctxdata.flags
- preprocessor = pattern.at['processor'] or ""
-
- if preprocessor ~= "" then
-
- ctxdata.variables['old'] = ctxdata.jobname
- for r, d, k in xml.elements(ctxdata.xmldata,"ctx:value") do
- local ek = d[k]
- local ekat = ek.at['name']
- if ekat == 'old' then
- d[k] = ctxrunner.substitute(ctxdata.variables[ekat] or "")
- end
- end
-
- pattern = ctxrunner.justtext(xml.tostring(pattern))
-
- local oldfiles = dir.glob(pattern)
-
- local pluspath = false
- if #oldfiles == 0 then
- -- message: no files match pattern
- local paths = ctxdata.paths
- for i=1,#paths do
- local p = paths[i]
- local oldfiles = dir.glob(path.join(p,pattern))
- if #oldfiles > 0 then
- pluspath = true
- break
- end
- end
- end
- if #oldfiles == 0 then
- -- message: no old files
- else
- for i=1,#oldfiles do
- local oldfile = oldfiles[i]
- local newfile = oldfile .. "." .. suffix -- addsuffix will add one only
- if ctxdata.runlocal then
- newfile = file.basename(newfile)
- end
- if oldfile ~= newfile and file.needsupdate(oldfile,newfile) then
- -- message: oldfile needs preprocessing
- -- os.remove(newfile)
- local splitted = preprocessor:split(',')
- for i=1,#splitted do
- local pp = splitted[i]
- local command = commands[pp]
- if command then
- command = xml.copy(command)
- local suf = (command.at and command.at['suffix']) or ctxdata.suffix
- if suf then
- newfile = oldfile .. "." .. suf
- end
- if ctxdata.runlocal then
- newfile = file.basename(newfile)
- end
- for r, d, k in xml.elements(command,"ctx:old") do
- d[k] = ctxrunner.substitute(oldfile)
- end
- for r, d, k in xml.elements(command,"ctx:new") do
- d[k] = ctxrunner.substitute(newfile)
- end
- ctxdata.variables['old'] = oldfile
- ctxdata.variables['new'] = newfile
- for r, d, k in xml.elements(command,"ctx:value") do
- local ek = d[k]
- local ekat = ek.at and ek.at['name']
- if ekat then
- d[k] = ctxrunner.substitute(ctxdata.variables[ekat] or "")
- end
- end
- -- potential optimization: when mtxrun run internal
- command = xml.content(command)
- command = ctxrunner.justtext(command)
- report("command: %s",command)
- local result = os.spawn(command) or 0
- -- somehow we get the wrong return value
- if result > 0 then
- report("error, return code: %s",result)
- end
- if ctxdata.runlocal then
- oldfile = file.basename(oldfile)
- end
- end
- end
- if lfs.isfile(newfile) then
- file.syncmtimes(oldfile,newfile)
- ctxdata.prepfiles[oldfile] = true
- else
- report("error, check target location of new file: %s", newfile)
- ctxdata.prepfiles[oldfile] = false
- end
- else
- report("old file needs no preprocessing")
- ctxdata.prepfiles[oldfile] = lfs.isfile(newfile)
- end
- end
- end
- end
+ for e in xml.collected(xmldata,"/ctx:job/ctx:flags/ctx:flag") do
+ local flag = xml.text(e) or ""
+ local key, value = match(flag,"^(.-)=(.+)$")
+ if key and value then
+ flags[key] = value
+ else
+ flags[flag] = true
end
end
- ctxrunner.savelog(ctxdata)
-
end
-function ctxrunner.preppedfile(ctxdata,filename)
- if ctxdata.prepfiles[file.basename(filename)] then
- return filename .. ".prep"
- else
- return filename
+function ctxrunner.checkflags(ctxdata)
+ if ctxdata then
+ for k,v in next, ctxdata.flags do
+ if getargument(k) == nil then
+ setargument(k,v)
+ end
+ end
end
end
--- rest
+-- multipass control
-scripts.context.multipass = {
--- suffixes = { ".tuo", ".tuc" },
- suffixes = { ".tuc" },
- nofruns = 8,
--- nofruns = 7, -- test oscillation
-}
+local multipass_suffixes = { ".tuc" }
+local multipass_nofruns = 8 -- or 7 to test oscillation
-function scripts.context.multipass.hashfiles(jobname)
+local function multipass_hashfiles(jobname)
local hash = { }
- local suffixes = scripts.context.multipass.suffixes
- for i=1,#suffixes do
- local suffix = suffixes[i]
+ for i=1,#multipass_suffixes do
+ local suffix = multipass_suffixes[i]
local full = jobname .. suffix
hash[full] = md5.hex(io.loaddata(full) or "unknown")
end
return hash
end
-function scripts.context.multipass.changed(oldhash, newhash)
+local function multipass_changed(oldhash, newhash)
for k,v in next, oldhash do
if v ~= newhash[k] then
return true
@@ -479,126 +266,7 @@ function scripts.context.multipass.changed(oldhash, newhash)
return false
end
-function scripts.context.multipass.makeoptionfile(jobname,ctxdata,kindofrun,currentrun,finalrun,once)
- -- take jobname from ctx
- jobname = file.removesuffix(jobname)
- local f = io.open(jobname..".top","w")
- if f then
- local function someflag(flag)
- return (ctxdata and ctxdata.flags[flag]) or getargument(flag)
- end
- local function setvalue(flag,template,hash,default)
- local a = someflag(flag) or default
- if a and a ~= "" then
- if hash then
- if hash[a] then
- f:write(format(template,a),"\n")
- end
- else
- f:write(format(template,a),"\n")
- end
- end
- end
- local function setvalues(flag,template,plural)
- if type(flag) == "table" then
- for k, v in next, flag do
- f:write(format(template,v),"\n")
- end
- else
- local a = someflag(flag) or (plural and someflag(flag.."s"))
- if a and a ~= "" then
- for v in gmatch(a,"%s*([^,]+)") do
- f:write(format(template,v),"\n")
- end
- end
- end
- end
- local function setfixed(flag,template,...)
- if someflag(flag) then
- f:write(format(template,...),"\n")
- end
- end
- local function setalways(template,...)
- f:write(format(template,...),"\n")
- end
- --
- -- This might change ... we can just pass the relevant flags directly.
- --
- setalways("%% runtime options files (command line driven)")
- --
- setalways("\\unprotect")
- --
- setalways("%% feedback and basic job control")
- --
- -- Option file, we can pass more on the commandline some day soon. Actually we
- -- should use directives and trackers.
- --
- setfixed ("timing" , "\\usemodule[timing]")
- setfixed ("batchmode" , "\\batchmode")
- setfixed ("batch" , "\\batchmode")
- setfixed ("nonstopmode" , "\\nonstopmode")
- setfixed ("nonstop" , "\\nonstopmode")
- -- setfixed ("tracefiles" , "\\tracefilestrue")
- setfixed ("nostats" , "\\nomkivstatistics")
- setfixed ("paranoid" , "\\def\\maxreadlevel{1}")
- --
- setalways("%% handy for special styles")
- --
- setalways("\\startluacode")
- setalways("document = document or { }")
- setalways(table.serialize(environment.arguments, "document.arguments"))
- setalways(table.serialize(environment.files, "document.files"))
- setalways("\\stopluacode")
- --
- setalways("%% process info")
- --
- setalways( "\\setupsystem[inputfile=%s]",getargument("input") or environment.files[1] or "\\jobname")
- setvalue ("result" , "\\setupsystem[file=%s]")
- setalways( "\\setupsystem[\\c!n=%s,\\c!m=%s]", kindofrun or 0, currentrun or 0)
- setvalues("path" , "\\usepath[%s]")
- setvalue ("setuppath" , "\\setupsystem[\\c!directory={%s}]")
- setvalue ("randomseed" , "\\setupsystem[\\c!random=%s]")
- setvalue ("arguments" , "\\setupenv[%s]")
- if once then
- setalways("\\enabledirectives[system.runonce]")
- end
- setalways("%% modes")
- setvalues("modefile" , "\\readlocfile{%s}{}{}")
- setvalues("mode" , "\\enablemode[%s]", true)
- if ctxdata then
- setvalues(ctxdata.modes, "\\enablemode[%s]")
- end
- --
- setalways("%% options (not that important)")
- --
- setalways("\\startsetups *runtime:options")
- setfixed ("color" , "\\setupcolors[\\c!state=\\v!start]")
- setvalue ("separation" , "\\setupcolors[\\c!split=%s]")
- setfixed ("noarrange" , "\\setuparranging[\\v!disable]")
- if getargument('arrange') and not finalrun then
- setalways( "\\setuparranging[\\v!disable]")
- end
- setalways("\\stopsetups")
- --
- setalways("%% styles and modules")
- --
- setalways("\\startsetups *runtime:modules")
- setvalues("usemodule" , "\\usemodule[%s]", true)
- setvalues("environment" , "\\environment %s ", true)
- if ctxdata then
- setvalues(ctxdata.modules, "\\usemodule[%s]")
- setvalues(ctxdata.environments, "\\environment %s ")
- end
- setalways("\\stopsetups")
- --
- setalways("%% done")
- --
- setalways("\\protect \\endinput")
- f:close()
- end
-end
-
-function scripts.context.multipass.copyluafile(jobname) -- obsolete
+local function multipass_copyluafile(jobname)
local tuaname, tucname = jobname..".tua", jobname..".tuc"
if lfs.isfile(tuaname) then
os.remove(tucname)
@@ -606,120 +274,59 @@ function scripts.context.multipass.copyluafile(jobname) -- obsolete
end
end
-scripts.context.cldsuffixes = table.tohash {
- "cld",
-}
-
-scripts.context.xmlsuffixes = table.tohash {
- "xml",
-}
-
-scripts.context.luasuffixes = table.tohash {
- "lua",
-}
-
-scripts.context.beforesuffixes = {
- "tuo", "tuc"
-}
-scripts.context.aftersuffixes = {
- "pdf", "tuo", "tuc", "log"
-}
-
-scripts.context.errorsuffixes = {
- "log"
-}
-
-scripts.context.interfaces = {
- en = "cont-en",
- uk = "cont-uk",
- de = "cont-de",
- fr = "cont-fr",
- nl = "cont-nl",
- cs = "cont-cs",
- it = "cont-it",
- ro = "cont-ro",
- pe = "cont-pe",
-}
-
-scripts.context.defaultformats = {
- "cont-en",
- "cont-nl",
--- "mptopdf", -- todo: mak emkiv variant
--- "metatex", -- will show up soon
--- "metafun", -- todo: mp formats
--- "plain"
-}
-
-local lpegpatterns, Cs, P = lpeg.patterns, lpeg.Cs, lpeg.P
+--
local pattern = lpegpatterns.utfbom^-1 * (P("%% ") + P("% ")) * Cs((1-lpegpatterns.newline)^1)
-local function analyze(filename) -- only files on current path
- local f = io.open(file.addsuffix(filename,"tex"))
- if f then
- local t = { }
- local line = f:read("*line") or ""
- local preamble = lpeg.match(pattern,line)
+local function preamble_analyze(filename) -- only files on current path
+ local t = { }
+ local line = io.loadlines(file.addsuffix(filename,"tex"))
+ if line then
+ local preamble = lpegmatch(pattern,line)
if preamble then
for key, value in gmatch(preamble,"(%S+)%s*=%s*(%S+)") do
t[key] = value
end
t.type = "tex"
- elseif line:find("^<?xml ") then
+ elseif find(line,"^<?xml ") then
t.type = "xml"
end
if t.nofruns then
- scripts.context.multipass.nofruns = t.nofruns
+ multipass_nofruns = t.nofruns
end
if not t.engine then
- t.engine = 'luatex'
- end
- f:close()
- return t
- end
-end
-
-local function makestub(wrap,template,filename,prepname)
- local stubname = file.replacesuffix(file.basename(filename),'run')
- local f = io.open(stubname,'w')
- if f then
- if wrap then
- f:write("\\starttext\n")
+ t.engine = environment.basicengines[engine_old] --'luatex'
end
- f:write(format(template,prepname or filename),"\n")
- if wrap then
- f:write("\\stoptext\n")
+ if t.engine ~= engine_old then -- hack
+ if environment.validengines[t.engine] and t.engine ~= environment.basicengines[engine_old] then
+ restart(engine_old,t.engine)
+ end
end
- f:close()
- filename = stubname
end
- return filename
+ return t
end
---~ function scripts.context.openpdf(name)
---~ os.spawn(format('pdfopen --file "%s" 2>&1', file.replacesuffix(name,"pdf")))
---~ end
---~ function scripts.context.closepdf(name)
---~ os.spawn(format('pdfclose --file "%s" 2>&1', file.replacesuffix(name,"pdf")))
---~ end
+-- automatically opening and closing pdf files
-local pdfview -- delayed loading
+local pdfview -- delayed
-function scripts.context.openpdf(name,method)
+local function pdf_open(name,method)
pdfview = pdfview or dofile(resolvers.findfile("l-pdfview.lua","tex"))
pdfview.setmethod(method)
report(pdfview.status())
pdfview.open(file.replacesuffix(name,"pdf"))
end
-function scripts.context.closepdf(name,method)
+local function pdf_close(name,method)
pdfview = pdfview or dofile(resolvers.findfile("l-pdfview.lua","tex"))
pdfview.setmethod(method)
pdfview.close(file.replacesuffix(name,"pdf"))
end
-local function push_result_purge(oldbase,newbase)
- for _, suffix in next, scripts.context.aftersuffixes do
+-- result file handling
+
+local function result_push_purge(oldbase,newbase)
+ for _, suffix in next, usedsuffixes.after do
local oldname = file.addsuffix(oldbase,suffix)
local newname = file.addsuffix(newbase,suffix)
os.remove(newname)
@@ -727,8 +334,8 @@ local function push_result_purge(oldbase,newbase)
end
end
-local function push_result_keep(oldbase,newbase)
- for _, suffix in next, scripts.context.beforesuffixes do
+local function result_push_keep(oldbase,newbase)
+ for _, suffix in next, usedsuffixes.before do
local oldname = file.addsuffix(oldbase,suffix)
local newname = file.addsuffix(newbase,suffix)
local tmpname = "keep-"..oldname
@@ -739,8 +346,8 @@ local function push_result_keep(oldbase,newbase)
end
end
-local function save_result_error(oldbase,newbase)
- for _, suffix in next, scripts.context.errorsuffixes do
+local function result_save_error(oldbase,newbase)
+ for _, suffix in next, usedsuffixes.keep do
local oldname = file.addsuffix(oldbase,suffix)
local newname = file.addsuffix(newbase,suffix)
os.remove(newname) -- to be sure
@@ -748,8 +355,8 @@ local function save_result_error(oldbase,newbase)
end
end
-local function save_result_purge(oldbase,newbase)
- for _, suffix in next, scripts.context.aftersuffixes do
+local function result_save_purge(oldbase,newbase)
+ for _, suffix in next, usedsuffixes.after do
local oldname = file.addsuffix(oldbase,suffix)
local newname = file.addsuffix(newbase,suffix)
os.remove(newname) -- to be sure
@@ -757,8 +364,8 @@ local function save_result_purge(oldbase,newbase)
end
end
-local function save_result_keep(oldbase,newbase)
- for _, suffix in next, scripts.context.aftersuffixes do
+local function result_save_keep(oldbase,newbase)
+ for _, suffix in next, usedsuffixes.after do
local oldname = file.addsuffix(oldbase,suffix)
local newname = file.addsuffix(newbase,suffix)
local tmpname = "keep-"..oldname
@@ -768,313 +375,375 @@ local function save_result_keep(oldbase,newbase)
end
end
-function scripts.context.run(ctxdata,filename)
- -- filename overloads environment.files
- local files = (filename and { filename }) or environment.files
- if ctxdata then
- -- todo: interface
- for k,v in next, ctxdata.flags do
- environment.setargument(k,v)
+-- executing luatex
+
+local function flags_to_string(flags,prefix) -- context flags get prepended by c:
+ local t = { }
+ for k, v in table.sortedhash(flags) do
+ if prefix then
+ k = format("c:%s",k)
+ end
+ if not v or v == "" or v == '""' then
+ -- no need to flag false
+ elseif v == true then
+ t[#t+1] = format('--%s',k)
+ elseif type(v) == "string" then
+ t[#t+1] = format('--%s=%s',k,quote(v))
+ else
+ t[#t+1] = format('--%s=%s',k,tostring(v))
+ end
+ end
+ return concat(t," ")
+end
+
+local function luatex_command(l_flags,c_flags,filename,engine)
+ return format('%s %s %s "%s"',
+ engine or "luatex",
+ flags_to_string(l_flags),
+ flags_to_string(c_flags,true),
+ filename
+ )
+end
+
+local function run_texexec(filename,a_purge,a_purgeall)
+ if false then
+ -- we need to write a top etc too and run mp etc so it's not worth the
+ -- trouble, so it will take a while before the next is finished
+ --
+ -- context --extra=texutil --convert myfile
+ else
+ local texexec = resolvers.findfile("texexec.rb") or ""
+ if texexec ~= "" then
+ os.setenv("RUBYOPT","")
+ local options = environment.reconstructcommandline(environment.arguments_after)
+ options = gsub(options,"--purge","")
+ options = gsub(options,"--purgeall","")
+ local command = format("ruby %s %s",texexec,options)
+ report("running command: %s\n\n",command)
+ if a_purge then
+ os.execute(command)
+ scripts.context.purge_job(filename,false,true)
+ elseif a_purgeall then
+ os.execute(command)
+ scripts.context.purge_job(filename,true,true)
+ else
+ os.execute(command) -- we can use os.exec but that doesn't give back timing
+ end
end
end
- if #files > 0 then
+end
+
+--
+
+function scripts.context.run(ctxdata,filename)
+ --
+ local a_nofile = getargument("nofile")
+ local a_engine = getargument("engine")
+ --
+ local files = environment.files or { }
+ --
+ local filelist, mainfile
+ --
+ if filename then
+ -- the given forced name is processed, the filelist is passed to context
+ mainfile = filename
+ filelist = { filename }
+ -- files = files
+ elseif a_nofile then
+ -- the list of given files is processed using the dummy file
+ mainfile = usedfiles.nop
+ filelist = { usedfiles.nop }
+ -- files = { }
+ elseif #files > 0 then
+ -- the list of given files is processed using the stub file
+ mainfile = usedfiles.yes
+ filelist = files
+ files = { }
+ else
+ return
+ end
+ --
+ local interface = validstring(getargument("interface")) or "en"
+ local formatname = formatofinterface[interface] or "cont-en"
+ local formatfile, scriptfile = resolvers.locateformat(formatname) -- regular engine !
+ if not formatfile or not scriptfile then
+ report("warning: no format found, forcing remake (commandline driven)")
+ scripts.context.make(formatname)
+ formatfile, scriptfile = resolvers.locateformat(formatname) -- variant
+ end
+ if formatfile and scriptfile then
+ -- okay
+ elseif formatname then
+ report("error, no format found with name: %s, aborting",formatname)
+ return
+ else
+ report("error, no format found (provide formatname or interface)")
+ return
+ end
+ --
+ local a_mkii = getargument("mkii") or getargument("pdftex") or getargument("xetex")
+ local a_purge = getargument("purge")
+ local a_purgeall = getargument("purgeall")
+ local a_purgeresult = getargument("purgeresult")
+ local a_global = getargument("global")
+ local a_timing = getargument("timing")
+ local a_profile = getargument("profile")
+ local a_batchmode = getargument("batchmode")
+ local a_nonstopmode = getargument("nonstopmode")
+ local a_once = getargument("once")
+ local a_synctex = getargument("synctex")
+ local a_backend = getargument("backend")
+ local a_arrange = getargument("arrange")
+ local a_noarrange = getargument("noarrange")
+ local a_jiton = getargument("jiton")
+ --
+ a_batchmode = (a_batchmode and "batchmode") or (a_nonstopmode and "nonstopmode") or nil
+ a_synctex = tonumber(a_synctex) or (toboolean(a_synctex,true) and 1) or (a_synctex == "zipped" and 1) or (a_synctex == "unzipped" and -1) or nil
+ --
+ for i=1,#filelist do
--
- local interface = getargument("interface")
- -- todo: getargument("interface","en")
- interface = (type(interface) == "string" and interface) or "en"
+ local filename = filelist[i]
+ local basename = file.basename(filename)
+ local pathname = file.dirname(filename)
+ local jobname = file.removesuffix(basename)
+ local ctxname = ctxdata and ctxdata.ctxname
--
- local formatname = scripts.context.interfaces[interface] or "cont-en"
- local formatfile, scriptfile = resolvers.locateformat(formatname)
- -- this catches the command line
- if not formatfile or not scriptfile then
- report("warning: no format found, forcing remake (commandline driven)")
- scripts.context.make(formatname)
- formatfile, scriptfile = resolvers.locateformat(formatname)
+ if pathname == "" and not a_global and filename ~= usedfiles.nop then
+ filename = "./" .. filename
end
--
- if formatfile and scriptfile then
- for i=1,#files do
- local filename = files[i]
- local basename, pathname = file.basename(filename), file.dirname(filename)
- local jobname = file.removesuffix(basename)
- if pathname == "" and not getargument("global") then
- filename = "./" .. filename
+ local analysis = preamble_analyze(filename)
+ --
+ if a_mkii or analysis.engine == 'pdftex' or analysis.engine == 'xetex' then
+ run_texexec(filename,a_purge,a_purgeall)
+ else
+ if analysis.interface and analysis.interface ~= interface then
+ formatname = formatofinterface[analysis.interface] or formatname
+ formatfile, scriptfile = resolvers.locateformat(formatname)
+ end
+ --
+ a_jiton = (a_jiton or toboolean(analysis.jiton,true)) and true or nil
+ --
+ if not formatfile or not scriptfile then
+ report("warning: no format found, forcing remake (source driven)")
+ scripts.context.make(formatname,a_engine)
+ formatfile, scriptfile = resolvers.locateformat(formatname)
+ end
+ if formatfile and scriptfile then
+ local suffix = validstring(getargument("suffix"))
+ local resultname = validstring(getargument("result"))
+ if suffix then
+ resultname = file.removesuffix(jobname) .. suffix
end
- -- look at the first line
- local a = analyze(filename)
- if a and (a.engine == 'pdftex' or a.engine == 'xetex' or getargument("pdftex") or getargument("xetex")) then
- if false then
- -- we need to write a top etc too and run mp etc so it's not worth the
- -- trouble, so it will take a while before the next is finished
- --
- -- context --extra=texutil --convert myfile
- else
- local texexec = resolvers.findfile("texexec.rb") or ""
- if texexec ~= "" then
- os.setenv("RUBYOPT","")
- local options = environment.reconstructcommandline(environment.arguments_after)
- options = gsub(options,"--purge","")
- options = gsub(options,"--purgeall","")
- local command = format("ruby %s %s",texexec,options)
- if getargument("purge") then
- os.execute(command)
- scripts.context.purge_job(filename,false,true)
- elseif getargument("purgeall") then
- os.execute(command)
- scripts.context.purge_job(filename,true,true)
- else
- os.exec(command)
- end
+ local oldbase = ""
+ local newbase = ""
+ if resultname then
+ oldbase = file.removesuffix(jobname)
+ newbase = file.removesuffix(resultname)
+ if oldbase ~= newbase then
+ if a_purgeresult then
+ result_push_purge(oldbase,newbase)
+ else
+ result_push_keep(oldbase,newbase)
end
+ else
+ resultname = nil
end
- else
- if a and a.interface and a.interface ~= interface then
- formatname = scripts.context.interfaces[a.interface] or formatname
- formatfile, scriptfile = resolvers.locateformat(formatname)
+ end
+ --
+ local pdfview = getargument("autopdf") or getargument("closepdf")
+ if pdfview then
+ pdf_close(filename,pdfview)
+ if resultname then
+ pdf_close(resultname,pdfview)
end
- -- this catches the command line
- if not formatfile or not scriptfile then
- report("warning: no format found, forcing remake (source driven)")
- scripts.context.make(formatname)
- formatfile, scriptfile = resolvers.locateformat(formatname)
+ end
+ --
+ -- we could do this when locating the format and exit from luatex when
+ -- there is a version mismatch .. that way we can use stock luatex
+ -- plus mtxrun to run luajittex instead .. this saves a restart but is
+ -- also cleaner as then mtxrun only has to check for a special return
+ -- code (signaling a make + rerun) .. maybe some day
+ --
+ local okay = statistics.checkfmtstatus(formatfile,a_engine)
+ if okay ~= true then
+ report("warning: %s, forcing remake",tostring(okay))
+ scripts.context.make(formatname)
+ end
+ --
+ local oldhash = multipass_hashfiles(jobname)
+ local newhash = { }
+ local maxnofruns = once and 1 or multipass_nofruns
+ --
+ local c_flags = {
+ directives = validstring(environment.directives), -- gets passed via mtxrun
+ trackers = validstring(environment.trackers), -- gets passed via mtxrun
+ experiments = validstring(environment.experiments), -- gets passed via mtxrun
+ --
+ result = validstring(resultname),
+ input = validstring(getargument("input") or filename), -- alternative input
+ fulljobname = validstring(filename),
+ files = concat(files,","),
+ ctx = validstring(ctxname),
+ }
+ --
+ for k, v in next, environment.arguments do
+ -- the raw arguments
+ if c_flags[k] == nil then
+ c_flags[k] = v
end
- if formatfile and scriptfile then
- -- we default to mkiv xml !
- -- the --prep argument might become automatic (and noprep)
- local suffix = file.extname(filename) or "?"
- if scripts.context.xmlsuffixes[suffix] or getargument("forcexml") then
- if getargument("mkii") then
- filename = makestub(true,"\\processXMLfilegrouped{%s}",filename)
- else
- filename = makestub(true,"\\xmlprocess{\\xmldocument}{%s}{}",filename)
- end
- elseif scripts.context.cldsuffixes[suffix] or getargument("forcecld") then
- -- self contained cld files need to have a starttext/stoptext (less fontloading)
- filename = makestub(false,"\\ctxlua{context.runfile('%s')}",filename)
- elseif scripts.context.luasuffixes[suffix] or getargument("forcelua") then
- filename = makestub(true,"\\ctxlua{dofile('%s')}",filename)
- elseif getargument("prep") then
- -- we need to keep the original jobname
- filename = makestub(true,"\\readfile{%s}{}{}",filename,ctxrunner.preppedfile(ctxdata,filename))
- end
- --
- -- todo: also other stubs
- --
- local suffix, resultname = getargument("suffix"), getargument("result")
- if type(suffix) == "string" then
- resultname = file.removesuffix(jobname) .. suffix
- end
- local oldbase, newbase = "", ""
- if type(resultname) == "string" then
- oldbase = file.removesuffix(jobname)
- newbase = file.removesuffix(resultname)
- if oldbase ~= newbase then
- if getargument("purgeresult") then
- push_result_purge(oldbase,newbase)
- else
- push_result_keep(oldbase,newbase)
- end
- else
- resultname = nil
- end
- else
- resultname = nil
- end
- --
- local pdfview = getargument("autopdf") or getargument("closepdf")
- if pdfview then
- scripts.context.closepdf(filename,pdfview)
- if resultname then
- scripts.context.closepdf(resultname,pdfview)
- end
- end
- --
- local okay = statistics.checkfmtstatus(formatfile)
- if okay ~= true then
- report("warning: %s, forcing remake",tostring(okay))
- scripts.context.make(formatname)
- end
- --
- local flags = { }
- if getargument("batchmode") or getargument("batch") then
- flags[#flags+1] = "--interaction=batchmode"
- end
- if getargument("synctex") then
- -- this should become a directive
- report("warning: synctex is enabled") -- can add upto 5% runtime
- flags[#flags+1] = "--synctex=1"
- end
- flags[#flags+1] = "--fmt=" .. quote(formatfile)
- flags[#flags+1] = "--lua=" .. quote(scriptfile)
- --
- -- We pass these directly.
- --
-
---~ local silent = getargument("silent")
---~ local noconsole = getargument("noconsole")
---~ local directives = getargument("directives")
---~ local trackers = getargument("trackers")
---~ if silent == true then
---~ silent = "*"
---~ end
---~ if type(silent) == "string" then
---~ if type(directives) == "string" then
---~ directives = format("%s,logs.blocked={%s}",directives,silent)
---~ else
---~ directives = format("logs.blocked={%s}",silent)
---~ end
---~ end
---~ if noconsole then
---~ if type(directives) == "string" then
---~ directives = format("%s,logs.target=file",directives)
---~ else
---~ directives = format("logs.target=file")
---~ end
---~ end
-
- local directives = environment.directives
- local trackers = environment.trackers
- local experiments = environment.experiments
-
- --
- if type(directives) == "string" then
- flags[#flags+1] = format('--directives="%s"',directives)
- end
- if type(trackers) == "string" then
- flags[#flags+1] = format('--trackers="%s"',trackers)
- end
- --
- local backend = getargument("backend")
- if type(backend) ~= "string" then
- backend = "pdf"
- end
- flags[#flags+1] = format('--backend="%s"',backend)
- --
- local command = format("luatex %s %s \\stoptext", concat(flags," "), quote(filename))
- local oldhash, newhash = scripts.context.multipass.hashfiles(jobname), { }
- local once = getargument("once")
- local maxnofruns = (once and 1) or scripts.context.multipass.nofruns
- local arrange = getargument("arrange")
- for i=1,maxnofruns do
- -- 1:first run, 2:successive run, 3:once, 4:last of maxruns
- local kindofrun = (once and 3) or (i==1 and 1) or (i==maxnofruns and 4) or 2
- scripts.context.multipass.makeoptionfile(jobname,ctxdata,kindofrun,i,false,once) -- kindofrun, currentrun, final
- report("run %s: %s",i,command)
---~ print("\n") -- cleaner, else continuation on same line
- print("") -- cleaner, else continuation on same line
- local returncode, errorstring = os.spawn(command)
- --~ if returncode == 3 then
- --~ scripts.context.make(formatname)
- --~ returncode, errorstring = os.spawn(command)
- --~ if returncode == 3 then
- --~ report("ks: return code 3, message: %s",errorstring or "?")
- --~ os.exit(1)
- --~ end
- --~ end
- if not returncode then
- report("fatal error: no return code, message: %s",errorstring or "?")
- if resultname then
- save_result_error(oldbase,newbase)
- end
- os.exit(1)
- break
- elseif returncode > 0 then
- report("fatal error: return code: %s",returncode or "?")
- if resultname then
- save_result_error(oldbase,newbase)
- end
- os.exit(returncode)
- break
- else
- scripts.context.multipass.copyluafile(jobname)
- -- scripts.context.multipass.copytuifile(jobname)
- newhash = scripts.context.multipass.hashfiles(jobname)
- if scripts.context.multipass.changed(oldhash,newhash) then
- oldhash = newhash
- else
- break
- end
- end
- end
- --
- if arrange then
- local kindofrun = 3
- scripts.context.multipass.makeoptionfile(jobname,ctxdata,kindofrun,i,true) -- kindofrun, currentrun, final
- report("arrange run: %s",command)
- local returncode, errorstring = os.spawn(command)
- if not returncode then
- report("fatal error: no return code, message: %s",errorstring or "?")
- os.exit(1)
- elseif returncode > 0 then
- report("fatal error: return code: %s",returncode or "?")
- os.exit(returncode)
- end
- end
- --
- if getargument("purge") then
- scripts.context.purge_job(jobname)
- elseif getargument("purgeall") then
- scripts.context.purge_job(jobname,true)
- end
- --
- os.remove(jobname..".top")
- --
+ end
+ --
+ --
+ local l_flags = {
+ ["interaction"] = a_batchmode,
+ ["synctex"] = a_synctex,
+ ["no-parse-first-line"] = true,
+ -- ["no-mktex"] = true,
+ -- ["file-line-error-style"] = true,
+ ["fmt"] = formatfile,
+ ["lua"] = scriptfile,
+ ["jobname"] = jobname,
+ ["jiton"] = a_jiton,
+ }
+ --
+ if a_synctex then
+ report("warning: synctex is enabled") -- can add upto 5% runtime
+ end
+ --
+ if not a_timing then
+ -- okay
+ elseif c_flags.usemodule then
+ c_flags.usemodule = format("timing,%s",c_flags.usemodule)
+ else
+ c_flags.usemodule = "timing"
+ end
+ --
+ if not a_profile then
+ -- okay
+ elseif c_flags.directives then
+ c_flags.directives = format("system.profile,%s",c_flags.directives)
+ else
+ c_flags.directives = "system.profile"
+ end
+ --
+ -- kindofrun: 1:first run, 2:successive run, 3:once, 4:last of maxruns
+ --
+ for currentrun=1,maxnofruns do
+ --
+ c_flags.final = false
+ c_flags.kindofrun = (a_once and 3) or (currentrun==1 and 1) or (currentrun==maxnofruns and 4) or 2
+ c_flags.maxnofruns = maxnofruns
+ c_flags.currentrun = currentrun
+ c_flags.noarrange = a_noarrange or a_arrange or nil
+ --
+ local command = luatex_command(l_flags,c_flags,mainfile,a_engine)
+ --
+ report("run %s: %s",i,command)
+ print("") -- cleaner, else continuation on same line
+ local returncode, errorstring = os.spawn(command)
+ if not returncode then
+ report("fatal error: no return code, message: %s",errorstring or "?")
if resultname then
- if getargument("purgeresult") then
- -- so, if there is no result then we don't get the old one, but
- -- related files (log etc) are still there for tracing purposes
- save_result_purge(oldbase,newbase)
- else
- save_result_keep(oldbase,newbase)
- end
- report("result renamed to: %s",newbase)
- end
- --
- if getargument("purge") then
- scripts.context.purge_job(resultname)
- elseif getargument("purgeall") then
- scripts.context.purge_job(resultname,true)
+ result_save_error(oldbase,newbase)
end
- --
- local pdfview = getargument("autopdf")
- if pdfview then
- scripts.context.openpdf(resultname or filename,pdfview)
- end
- --
- if getargument("timing") then
- report()
- report("you can process (timing) statistics with:",jobname)
- report()
- report("context --extra=timing '%s'",jobname)
- report("mtxrun --script timing --xhtml [--launch --remove] '%s'",jobname)
- report()
+ os.exit(1)
+ break
+ elseif returncode == 0 then
+ multipass_copyluafile(jobname)
+ newhash = multipass_hashfiles(jobname)
+ if multipass_changed(oldhash,newhash) then
+ oldhash = newhash
+ else
+ break
end
else
- if formatname then
- report("error, no format found with name: %s, skipping",formatname)
- else
- report("error, no format found (provide formatname or interface)")
+ report("fatal error: return code: %s",returncode or "?")
+ if resultname then
+ result_save_error(oldbase,newbase)
end
+ os.exit(1) -- (returncode)
break
end
+ --
+ end
+ --
+ if a_arrange then
+ --
+ c_flags.final = true
+ c_flags.kindofrun = 3
+ c_flags.currentrun = c_flags.currentrun + 1
+ c_flags.noarrange = nil
+ --
+ local command = luatex_command(l_flags,c_flags,mainfile,a_engine)
+ --
+ report("arrange run: %s",command)
+ local returncode, errorstring = os.spawn(command)
+ if not returncode then
+ report("fatal error: no return code, message: %s",errorstring or "?")
+ os.exit(1)
+ elseif returncode > 0 then
+ report("fatal error: return code: %s",returncode or "?")
+ os.exit(returncode)
+ end
+ --
+ end
+ --
+ if a_purge then
+ scripts.context.purge_job(jobname)
+ elseif a_purgeall then
+ scripts.context.purge_job(jobname,true)
+ end
+ --
+ if resultname then
+ if a_purgeresult then
+ -- so, if there is no result then we don't get the old one, but
+ -- related files (log etc) are still there for tracing purposes
+ result_save_purge(oldbase,newbase)
+ else
+ result_save_keep(oldbase,newbase)
+ end
+ report("result renamed to: %s",newbase)
+ end
+ --
+ if purge then
+ scripts.context.purge_job(resultname)
+ elseif purgeall then
+ scripts.context.purge_job(resultname,true)
+ end
+ --
+ local pdfview = getargument("autopdf")
+ if pdfview then
+ pdf_open(resultname or jobname,pdfview)
+ end
+ --
+ if a_timing then
+ report()
+ report("you can process (timing) statistics with:",jobname)
+ report()
+ report("context --extra=timing '%s'",jobname)
+ report("mtxrun --script timing --xhtml [--launch --remove] '%s'",jobname)
+ report()
end
- end
- else
- if formatname then
- report("error, no format found with name: %s, aborting",formatname)
else
- report("error, no format found (provide formatname or interface)")
+ if formatname then
+ report("error, no format found with name: %s, skipping",formatname)
+ else
+ report("error, no format found (provide formatname or interface)")
+ end
+ break
end
end
end
+ --
end
-function scripts.context.pipe()
+function scripts.context.pipe() -- still used?
-- context --pipe
-- context --pipe --purge --dummyfile=whatever.tmp
local interface = getargument("interface")
interface = (type(interface) == "string" and interface) or "en"
- local formatname = scripts.context.interfaces[interface] or "cont-en"
+ local formatname = formatofinterface[interface] or "cont-en"
local formatfile, scriptfile = resolvers.locateformat(formatname)
if not formatfile or not scriptfile then
report("warning: no format found, forcing remake (commandline driven)")
@@ -1087,11 +756,16 @@ function scripts.context.pipe()
report("warning: %s, forcing remake",tostring(okay))
scripts.context.make(formatname)
end
- local flags = {
- "--interaction=scrollmode",
- "--fmt=" .. quote(formatfile),
- "--lua=" .. quote(scriptfile),
- "--backend=pdf",
+ local l_flags = {
+ interaction = "scrollmode",
+ fmt = formatfile,
+ lua = scriptfile,
+ }
+ local c_flags = {
+ backend = "pdf",
+ final = false,
+ kindofrun = 3,
+ currentrun = 1,
}
local filename = getargument("dummyfile") or ""
if filename == "" then
@@ -1100,10 +774,9 @@ function scripts.context.pipe()
else
filename = file.addsuffix(filename,"tmp")
io.savedata(filename,"\\relax")
- scripts.context.multipass.makeoptionfile(filename,{ flags = flags },3,1,false) -- kindofrun, currentrun, final
report("entering scrollmode using '%s' with optionfile, end job with \\end",filename)
end
- local command = format("luatex %s %s", concat(flags," "), quote(filename))
+ local command = luatex_command(l_flags,c_flags,filename)
os.spawn(command)
if getargument("purge") then
scripts.context.purge_job(filename)
@@ -1120,14 +793,14 @@ function scripts.context.pipe()
end
end
-local make_mkiv_format = environment.make_format
+local function make_mkiv_format(name,engine)
+ environment.make_format(name) -- jit is picked up later
+end
local function make_mkii_format(name,engine)
- if getargument(engine) then
- local command = format("mtxrun texexec.rb --make --%s %s",name,engine)
- report("running command: %s",command)
- os.spawn(command)
- end
+ local command = format("mtxrun texexec.rb --make --%s %s",name,engine)
+ report("running command: %s",command)
+ os.spawn(command)
end
function scripts.context.generate()
@@ -1140,14 +813,20 @@ function scripts.context.make(name)
if not getargument("fast") then -- as in texexec
scripts.context.generate()
end
- local list = (name and { name }) or (environment.files[1] and environment.files) or scripts.context.defaultformats
+ local list = (name and { name }) or (environment.files[1] and environment.files) or defaultformats
+ local engine = getargument("engine") or "luatex"
+ if getargument("jit") or getargument("jiton") then
+ engine = "luajittex"
+ end
for i=1,#list do
local name = list[i]
- name = scripts.context.interfaces[name] or name or ""
- if name ~= "" then
- make_mkiv_format(name)
- make_mkii_format(name,"pdftex")
- make_mkii_format(name,"xetex")
+ name = formatofinterface[name] or name or ""
+ if name == "" then
+ -- nothing
+ elseif engine == "luatex" or engine == "luajittex" then
+ make_mkiv_format(name,engine)
+ elseif engine == "pdftex" or engine == "xetex" then
+ make_mkii_format(name,engine)
end
end
end
@@ -1155,68 +834,77 @@ end
function scripts.context.ctx()
local ctxdata = ctxrunner.new()
ctxdata.jobname = environment.files[1]
- ctxrunner.manipulate(ctxdata,getargument("ctx"))
+ ctxrunner.checkfile(ctxdata,getargument("ctx"))
+ ctxrunner.checkflags(ctxdata)
scripts.context.run(ctxdata)
end
function scripts.context.autoctx()
local ctxdata = nil
- local files = (filename and { filename }) or environment.files
+ local files = environment.files
local firstfile = #files > 0 and files[1]
- if firstfile and file.extname(firstfile) == "xml" then
- local f = io.open(firstfile)
- if f then
- local chunk = f:read(512) or ""
- f:close()
- local ctxname = match(chunk,"<%?context%-directive%s+job%s+ctxfile%s+([^ ]-)%s*?>")
- if ctxname then
- ctxdata = ctxrunner.new()
- ctxdata.jobname = firstfile
- ctxrunner.manipulate(ctxdata,ctxname)
+ if firstfile then
+ local suffix = file.suffix(firstfile)
+ if suffix == "xml" then
+ local chunk = io.loadchunk(firstfile) -- 1024
+ if chunk then
+ local ctxname = match(chunk,"<%?context%-directive%s+job%s+ctxfile%s+([^ ]-)%s*?>")
+ if ctxname then
+ ctxdata = ctxrunner.new()
+ ctxdata.jobname = firstfile
+ ctxrunner.checkfile(ctxdata,ctxname)
+ ctxrunner.checkflags(ctxdata)
+ end
end
+ elseif suffix == "tex" then
+ -- maybe but we scan the preamble later too
end
end
scripts.context.run(ctxdata)
end
-local template = [[
-\starttext
- \directMPgraphic{%s}{input "%s"}
-\stoptext
-]]
-
-local loaded = false
-
-function scripts.context.metapost()
- local filename = environment.files[1] or ""
- if not loaded then
- dofile(resolvers.findfile("mlib-run.lua"))
- loaded = true
- commands = commands or { }
- commands.writestatus = report -- no longer needed
- end
- local formatname = getargument("format") or "metafun"
- if formatname == "" or type(formatname) == "boolean" then
- formatname = "metafun"
- end
- if getargument("pdf") then
- local basename = file.removesuffix(filename)
- local resultname = getargument("result") or basename
- local jobname = "mtx-context-metapost"
- local tempname = file.addsuffix(jobname,"tex")
- io.savedata(tempname,format(template,"metafun",filename))
- environment.files[1] = tempname
- environment.setargument("result",resultname)
- environment.setargument("once",true)
- scripts.context.run()
- scripts.context.purge_job(jobname,true)
- scripts.context.purge_job(resultname,true)
- elseif getargument("svg") then
- metapost.directrun(formatname,filename,"svg")
- else
- metapost.directrun(formatname,filename,"mps")
- end
-end
+-- no longer ok as mlib-run misses something:
+
+-- local template = [[
+-- \starttext
+-- \directMPgraphic{%s}{input "%s"}
+-- \stoptext
+-- ]]
+--
+-- local loaded = false
+--
+-- function scripts.context.metapost()
+-- local filename = environment.files[1] or ""
+-- if not loaded then
+-- dofile(resolvers.findfile("mlib-run.lua"))
+-- loaded = true
+-- commands = commands or { }
+-- commands.writestatus = report -- no longer needed
+-- end
+-- local formatname = getargument("format") or "metafun"
+-- if formatname == "" or type(formatname) == "boolean" then
+-- formatname = "metafun"
+-- end
+-- if getargument("pdf") then
+-- local basename = file.removesuffix(filename)
+-- local resultname = getargument("result") or basename
+-- local jobname = "mtx-context-metapost"
+-- local tempname = file.addsuffix(jobname,"tex")
+-- io.savedata(tempname,format(template,"metafun",filename))
+-- environment.files[1] = tempname
+-- setargument("result",resultname)
+-- setargument("once",true)
+-- scripts.context.run()
+-- scripts.context.purge_job(jobname,true)
+-- scripts.context.purge_job(resultname,true)
+-- elseif getargument("svg") then
+-- metapost.directrun(formatname,filename,"svg")
+-- else
+-- metapost.directrun(formatname,filename,"mps")
+-- end
+-- end
+
+-- --
function scripts.context.version()
local name = resolvers.findfile("context.mkiv")
@@ -1238,6 +926,8 @@ function scripts.context.version()
end
end
+-- purging files
+
local generic_files = {
"texexec.tex", "texexec.tui", "texexec.tuo",
"texexec.tuc", "texexec.tua",
@@ -1253,7 +943,7 @@ local temporary_runfiles = {
"tui", "tua", "tup", "ted", "tes", "top",
"log", "tmp", "run", "bck", "rlg",
"mpt", "mpx", "mpd", "mpo", "mpb", "ctl",
- "synctex.gz", "pgf",
+ "synctex", "synctex.gz", "pgf",
"prep",
}
@@ -1262,7 +952,6 @@ local persistent_runfiles = {
}
local special_runfiles = {
---~ "-mpgraph*", "-mprun*", "-temp-*" -- hm, wasn't this escaped?
"-mpgraph", "-mprun", "-temp-"
}
@@ -1278,9 +967,6 @@ local function purge_file(dfile,cfile)
end
end
-local function remove_special_files(pattern)
-end
-
function scripts.context.purge_job(jobname,all,mkiitoo)
if jobname and jobname ~= "" then
jobname = file.basename(jobname)
@@ -1318,7 +1004,7 @@ function scripts.context.purge(all,pattern,mkiitoo)
local deleted = { }
for i=1,#files do
local name = files[i]
- local suffix = file.extname(name)
+ local suffix = file.suffix(name)
local basename = file.basename(name)
if obsolete[suffix] or temporary[suffix] or persistent[suffix] or generic[basename] then
deleted[#deleted+1] = purge_file(name)
@@ -1335,64 +1021,86 @@ function scripts.context.purge(all,pattern,mkiitoo)
end
end
-local function touch(name,pattern)
+-- touching files (signals regeneration of formats)
+
+local function touch(name,versionpattern,kind,kindpattern)
local name = resolvers.findfile(name)
local olddata = io.loaddata(name)
if olddata then
+ local oldkind, newkind = "", kind or ""
local oldversion, newversion = "", os.date("%Y.%m.%d %H:%M")
- local newdata, ok = olddata:gsub(pattern,function(pre,mid,post)
- oldversion = mid
- return pre .. newversion .. post
- end)
- if ok > 0 then
+ local newdata
+ if versionpattern then
+ newdata = gsub(olddata,versionpattern,function(pre,mid,post)
+ oldversion = mid
+ return pre .. newversion .. post
+ end) or olddata
+ end
+ if kind and kindpattern then
+ newdata = gsub(newdata,kindpattern,function(pre,mid,post)
+ oldkind = mid
+ return pre .. newkind .. post
+ end) or newdata
+ end
+ if newdata ~= "" and (oldversion ~= newversion or oldkind ~= newkind or newdata ~= olddata) then
local backup = file.replacesuffix(name,"tmp")
os.remove(backup)
os.rename(name,backup)
io.savedata(name,newdata)
- return true, oldversion, newversion, name
- else
- return false
+ return name, oldversion, newversion, oldkind, newkind
end
end
end
-local function touchfiles(suffix)
- local done, oldversion, newversion, foundname = touch(file.addsuffix("context",suffix),"(\\edef\\contextversion{)(.-)(})")
- if done then
- report("old version : %s", oldversion)
- report("new version : %s", newversion)
- report("touched file: %s", foundname)
- local ok, _, _, foundname = touch(file.addsuffix("cont-new",suffix), "(\\newcontextversion{)(.-)(})")
- if ok then
- report("touched file: %s", foundname)
+local p_contextkind = "(\\edef\\contextkind%s*{)(.-)(})"
+local p_contextversion = "(\\edef\\contextversion%s*{)(.-)(})"
+local p_newcontextversion = "(\\newcontextversion%s*{)(.-)(})"
+
+local function touchfiles(suffix,kind)
+ local foundname, oldversion, newversion, oldkind, newkind = touch(file.addsuffix("context",suffix),p_contextversion,kind,p_contextkind)
+ if foundname then
+ report("old version : %s (%s)",oldversion,oldkind)
+ report("new version : %s (%s)",newversion,newkind)
+ report("touched file : %s",foundname)
+ local foundname = touch(file.addsuffix("cont-new",suffix),p_newcontextversion)
+ if foundname then
+ report("touched file : %s", foundname)
end
end
end
function scripts.context.touch()
if getargument("expert") then
- touchfiles("mkii")
- touchfiles("mkiv")
- touchfiles("mkvi")
+ local touch = getargument("touch")
+ local kind = getargument("kind")
+ if touch == "mkii" or touch == "mkiv" or touch == "mkvi" then -- mkix mkxi
+ touchfiles(touch,kind)
+ else
+ touchfiles("mkii",kind)
+ touchfiles("mkiv",kind)
+ touchfiles("mkvi",kind)
+ end
+ else
+ report("touching needs --expert")
end
end
-- modules
local labels = { "title", "comment", "status" }
-local cards = { "*.mkvi", "*.mkiv", "*.tex" }
+local cards = { "*.mkvi", "*.mkiv", "*.mkxi", "*.mkix", "*.tex" }
function scripts.context.modules(pattern)
local list = { }
local found = resolvers.findfile("context.mkiv")
if not pattern or pattern == "" then
-- official files in the tree
- for _, card in ipairs(cards) do
- resolvers.findwildcardfiles(card,list)
+ for i=1,#cards do
+ resolvers.findwildcardfiles(cards[i],list)
end
-- my dev path
- for _, card in ipairs(cards) do
- dir.glob(file.join(file.dirname(found),card),list)
+ for i=1,#cards do
+ dir.glob(file.join(file.dirname(found),cards[i]),list)
end
else
resolvers.findwildcardfiles(pattern,list)
@@ -1405,7 +1113,7 @@ function scripts.context.modules(pattern)
if not done[base] then
done[base] = true
local suffix = file.suffix(base)
- if suffix == "tex" or suffix == "mkiv" or suffix == "mkvi" then
+ if suffix == "tex" or suffix == "mkiv" or suffix == "mkvi" or suffix == "mkix" or suffix == "mkxi" then
local prefix = match(base,"^([xmst])%-")
if prefix then
v = resolvers.findfile(base) -- so that files on my dev path are seen
@@ -1462,30 +1170,28 @@ end
function scripts.context.extra()
local extra = getargument("extra")
- if type(extra) == "string" then
- if getargument("help") then
- scripts.context.extras(extra)
+ if type(extra) ~= "string" then
+ scripts.context.extras()
+ elseif getargument("help") then
+ scripts.context.extras(extra)
+ else
+ local fullextra = extra
+ if not find(fullextra,"mtx%-context%-") then
+ fullextra = "mtx-context-" .. extra
+ end
+ local foundextra = resolvers.findfile(fullextra)
+ if foundextra == "" then
+ scripts.context.extras()
+ return
else
- local fullextra = extra
- if not find(fullextra,"mtx%-context%-") then
- fullextra = "mtx-context-" .. extra
- end
- local foundextra = resolvers.findfile(fullextra)
- if foundextra == "" then
- scripts.context.extras()
- return
- else
- report("processing extra: %s", foundextra)
- end
- environment.setargument("purgeall",true)
- local result = environment.setargument("result") or ""
- if result == "" then
- environment.setargument("result","context-extra")
- end
- scripts.context.run(nil,foundextra)
+ report("processing extra: %s", foundextra)
end
- else
- scripts.context.extras()
+ setargument("purgeall",true)
+ local result = getargument("result") or ""
+ if result == "" then
+ setargument("result","context-extra")
+ end
+ scripts.context.run(nil,foundextra)
end
end
@@ -1493,25 +1199,27 @@ end
function scripts.context.trackers()
environment.files = { resolvers.findfile("m-trackers.mkiv") }
- scripts.context.multipass.nofruns = 1
- environment.setargument("purgeall",true)
+ multipass_nofruns = 1
+ setargument("purgeall",true)
scripts.context.run()
end
function scripts.context.directives()
environment.files = { resolvers.findfile("m-directives.mkiv") }
- scripts.context.multipass.nofruns = 1
- environment.setargument("purgeall",true)
+ multipass_nofruns = 1
+ setargument("purgeall",true)
scripts.context.run()
end
function scripts.context.logcategories()
environment.files = { resolvers.findfile("m-logcategories.mkiv") }
- scripts.context.multipass.nofruns = 1
- environment.setargument("purgeall",true)
+ multipass_nofruns = 1
+ setargument("purgeall",true)
scripts.context.run()
end
+-- updating (often one will use mtx-update instead)
+
function scripts.context.timed(action)
statistics.timed(action)
end
@@ -1548,7 +1256,7 @@ function scripts.context.update()
local function is_okay(basetree)
for _, tree in next, validtrees do
local pattern = gsub(tree,"%-","%%-")
- if basetree:find(pattern) then
+ if find(basetree,pattern) then
return tree
end
end
@@ -1614,7 +1322,7 @@ function scripts.context.update()
end
for k in zipfile:files() do
local filename = k.filename
- if filename:find("/$") then
+ if find(filename,"/$") then
lfs.mkdir(filename)
else
local data = zip.loaddata(zipfile,filename)
@@ -1652,6 +1360,23 @@ function scripts.context.update()
end
end
+-- getting it done
+
+if getargument("nostats") then
+ setargument("nostatistics",true)
+ setargument("nostat",nil)
+end
+
+if getargument("batch") then
+ setargument("batchmode",true)
+ setargument("batch",nil)
+end
+
+if getargument("nonstop") then
+ setargument("nonstopmode",true)
+ setargument("nonstop",nil)
+end
+
do
local silent = getargument("silent")
@@ -1664,17 +1389,12 @@ do
end
if getargument("once") then
- scripts.context.multipass.nofruns = 1
+ multipass_nofruns = 1
elseif getargument("runs") then
- scripts.context.multipass.nofruns = tonumber(getargument("runs")) or nil
-end
-
-if getargument("profile") then
- os.setenv("MTX_PROFILE_RUN","YES")
+ multipass_nofruns = tonumber(getargument("runs")) or nil
end
if getargument("run") then
--- scripts.context.timed(scripts.context.run)
scripts.context.timed(scripts.context.autoctx)
elseif getargument("make") then
scripts.context.timed(function() scripts.context.make() end)
@@ -1682,8 +1402,8 @@ elseif getargument("generate") then
scripts.context.timed(function() scripts.context.generate() end)
elseif getargument("ctx") then
scripts.context.timed(scripts.context.ctx)
-elseif getargument("mp") or getargument("metapost") then
- scripts.context.timed(scripts.context.metapost)
+-- elseif getargument("mp") or getargument("metapost") then
+-- scripts.context.timed(scripts.context.metapost)
elseif getargument("version") then
application.identify()
scripts.context.version()
@@ -1699,6 +1419,9 @@ elseif getargument("extras") then
scripts.context.extras(environment.files[1] or getargument("extras"))
elseif getargument("extra") then
scripts.context.extra()
+elseif getargument("exporthelp") then
+ -- application.export(getargument("exporthelp"),environment.files[1])
+ application.export()
elseif getargument("help") then
if environment.files[1] == "extras" then
scripts.context.extras()
@@ -1711,10 +1434,7 @@ elseif getargument("showdirectives") or getargument("directives") == true then
scripts.context.directives()
elseif getargument("showlogcategories") then
scripts.context.logcategories()
-elseif getargument("track") and type(getargument("track")) == "boolean" then -- for old times sake, will go
- scripts.context.trackers()
-elseif environment.files[1] then
--- scripts.context.timed(scripts.context.run)
+elseif environment.files[1] or getargument("nofile") then
scripts.context.timed(scripts.context.autoctx)
elseif getargument("pipe") then
scripts.context.timed(scripts.context.pipe)
@@ -1727,7 +1447,3 @@ elseif getargument("purgeall") then
else
application.help("basic")
end
-
-if getargument("profile") then
- os.setenv("MTX_PROFILE_RUN","NO")
-end
diff --git a/Master/texmf-dist/scripts/context/lua/mtx-context.xml b/Master/texmf-dist/scripts/context/lua/mtx-context.xml
new file mode 100644
index 00000000000..6eb8afeb105
--- /dev/null
+++ b/Master/texmf-dist/scripts/context/lua/mtx-context.xml
@@ -0,0 +1,187 @@
+<?xml version="1.0" ?>
+
+<application>
+ <metadata>
+ <entry name="name">mtx-context</entry>
+ <entry name="detail">ConTeXt Process Management</entry>
+ <entry name="version">0.60</entry>
+ <entry name="comment">external helpinfo file</entry>
+ </metadata>
+ <flags>
+ <category name="basic">
+ <subcategory>
+ <flag name="run">
+ <short>process (one or more) files (default action)</short>
+ </flag>
+ <flag name="make">
+ <short>create context formats</short>
+ </flag>
+ </subcategory>
+ <subcategory>
+ <flag name="ctx=name">
+ <short>use ctx file (process management specification)</short>
+ </flag>
+ <flag name="interface">
+ <short>use specified user interface (default: en)</short>
+ </flag>
+ </subcategory>
+ <subcategory>
+ <flag name="autopdf">
+ <short>close pdf file in viewer and start pdf viewer afterwards</short>
+ </flag>
+ <flag name="purge">
+ <short>purge files either or not after a run (<ref name="pattern"/>=...)</short>
+ </flag>
+ <flag name="purgeall">
+ <short>purge all files either or not after a run (<ref name="pattern"/>=...)</short>
+ </flag>
+ </subcategory>
+ <subcategory>
+ <flag name="usemodule" value="list">
+ <short>load the given module or style, normally part of the distribution</short>
+ </flag>
+ <flag name="environment" value="list">
+ <short>load the given environment file first (document styles)</short>
+ </flag>
+ <flag name="mode" value="list">
+ <short>enable given the modes (conditional processing in styles)</short>
+ </flag>
+ <flag name="path" value="list">
+ <short>also consult the given paths when files are looked for</short>
+ </flag>
+ <flag name="arguments" value="list">
+ <short>set variables that can be consulted during a run (key/value pairs)</short>
+ </flag>
+ <flag name="randomseed" value="number">
+ <short>set the randomseed</short>
+ </flag>
+ <flag name="result" value="name">
+ <short>rename the resulting output to the given name</short>
+ </flag>
+ <flag name="trackers" value="list">
+ <short>set tracker variables (show list with <ref name="showtrackers"/>)</short>
+ </flag>
+ <flag name="directives" value="list">
+ <short>set directive variables (show list with <ref name="showdirectives"/>)</short>
+ </flag>
+ <flag name="silent" value="list">
+ <short>disable logcatgories (show list with <ref name="showlogcategories"/>)</short>
+ </flag>
+ <flag name="noconsole">
+ <short>disable logging to the console (logfile only)</short>
+ </flag>
+ <flag name="purgeresult">
+ <short>purge result file before run</short>
+ </flag>
+ </subcategory>
+ <subcategory>
+ <flag name="forcexml">
+ <short>force xml stub</short>
+ </flag>
+ <flag name="forcecld">
+ <short>force cld (context lua document) stub</short>
+ </flag>
+ <flag name="forcelua">
+ <short>force lua stub (like texlua)</short>
+ </flag>
+ <flag name="forcemp">
+ <short>force mp stub</short>
+ </flag>
+ </subcategory>
+ <subcategory>
+ <flag name="arrange">
+ <short>run extra imposition pass, given that the style sets up imposition</short>
+ </flag>
+ <flag name="noarrange">
+ <short>ignore imposition specifications in the style</short>
+ </flag>
+ </subcategory>
+ <subcategory>
+ <flag name="jit">
+ <short>use luajittex with jit turned off (only use the faster virtual machine)</short>
+ </flag>
+ <flag name="jiton">
+ <short>use luajittex with jit turned on (in most cases not faster, even slower)</short>
+ </flag>
+ </subcategory>
+ <subcategory>
+ <flag name="once">
+ <short>only run once (no multipass data file is produced)</short>
+ </flag>
+ <flag name="batchmode">
+ <short>run without stopping and do not show messages on the console</short>
+ </flag>
+ <flag name="nonstopmode">
+ <short>run without stopping</short>
+ </flag>
+ <flag name="synctex">
+ <short>run with synctex enabled (optional value: zipped, unzipped, 1, -1)</short>
+ </flag>
+ </subcategory>
+ <subcategory>
+ <flag name="generate">
+ <short>generate file database etc. (as luatools does)</short>
+ </flag>
+ <flag name="paranoid">
+ <short>do not descend to .. and ../..</short>
+ </flag>
+ <flag name="version">
+ <short>report installed context version</short>
+ </flag>
+ </subcategory>
+ <subcategory>
+ <flag name="global">
+ <short>assume given file present elsewhere</short>
+ </flag>
+ <flag name="nofile">
+ <short>use dummy file as jobname</short>
+ </flag>
+ </subcategory>
+ </category>
+ <category name="expert">
+ <subcategory>
+ <flag name="touch">
+ <short>update context version number (remake needed afterwards, also provide <ref name="expert"/>)</short>
+ </flag>
+ <flag name="nostatistics">
+ <short>omit runtime statistics at the end of the run</short>
+ </flag>
+ <flag name="update">
+ <short>update context from website (not to be confused with contextgarden)</short>
+ </flag>
+ <flag name="profile">
+ <short>profile job (use: mtxrun <ref name="script"/> profile <ref name="analyze"/>)</short>
+ </flag>
+ <flag name="timing">
+ <short>generate timing and statistics overview</short>
+ </flag>
+ </subcategory>
+ <subcategory>
+ <flag name="extra=name">
+ <short>process extra (mtx-context-... in distribution)</short>
+ </flag>
+ <flag name="extras">
+ <short>show extras</short>
+ </flag>
+ </subcategory>
+ </category>
+ <category name="special">
+ <subcategory>
+ <flag name="pdftex">
+ <short>process file with texexec using pdftex</short>
+ </flag>
+ <flag name="xetex">
+ <short>process file with texexec using xetex</short>
+ </flag>
+ <flag name="mkii">
+ <short>process file with texexec</short>
+ </flag>
+ </subcategory>
+ <subcategory>
+ <flag name="pipe">
+ <short>do not check for file and enter scroll mode (<ref name="dummyfile"/>=whatever.tmp)</short>
+ </flag>
+ </subcategory>
+ </category>
+ </flags>
+</application>
diff --git a/Master/texmf-dist/scripts/context/lua/mtx-convert.lua b/Master/texmf-dist/scripts/context/lua/mtx-convert.lua
index b4e6e010bf8..b76b3baaf62 100644
--- a/Master/texmf-dist/scripts/context/lua/mtx-convert.lua
+++ b/Master/texmf-dist/scripts/context/lua/mtx-convert.lua
@@ -9,12 +9,26 @@ if not modules then modules = { } end modules ['mtx-convert'] = {
-- todo: eps and svg
local helpinfo = [[
---convertall convert all graphics on path
---inputpath=string original graphics path
---outputpath=string converted graphics path
---watch watch folders
---force force conversion (even if older)
---delay time between sweeps
+<?xml version="1.0"?>
+<application>
+ <metadata>
+ <entry name="name">mtx-convert</entry>
+ <entry name="detail">ConTeXT Graphic Conversion Helpers</entry>
+ <entry name="version">0.10</entry>
+ </metadata>
+ <flags>
+ <category name="basic">
+ <subcategory>
+ <flag name="convertall"><short>convert all graphics on path</short></flag>
+ <flag name="inputpath" value="string"><short>original graphics path</short></flag>
+ <flag name="outputpath" value="string"><short>converted graphics path</short></flag>
+ <flag name="watch"><short>watch folders</short></flag>
+ <flag name="force"><short>force conversion (even if older)</short></flag>
+ <flag name="delay"><short>time between sweeps</short></flag>
+ </subcategory>
+ </category>
+ </flags>
+</application>
]]
local application = logs.application {
@@ -34,7 +48,7 @@ local convert = scripts.convert
convert.converters = convert.converters or { }
local converters = convert.converters
-local gsprogram = (os.type == "windows" and "gswin32c") or "gs"
+local gsprogram = os.type == "windows" and "gswin32c" or "gs"
local gstemplate_eps = "%s -q -sDEVICE=pdfwrite -dPDFSETTINGS=/prepress -dEPSCrop -dNOPAUSE -dSAFER -dNOCACHE -dBATCH -dAutoRotatePages=/None -dProcessColorModel=/DeviceCMYK -sOutputFile=%s %s -c quit"
local gstemplate_ps = "%s -q -sDEVICE=pdfwrite -dPDFSETTINGS=/prepress -dNOPAUSE -dSAFER -dNOCACHE -dBATCH -dAutoRotatePages=/None -dProcessColorModel=/DeviceCMYK -sOutputFile=%s %s -c quit"
@@ -83,7 +97,7 @@ function converters.convertpath(inputpath,outputpath)
inputpath = inputpath or "."
outputpath = outputpath or "."
for name in lfs.dir(inputpath) do
- local suffix = file.extname(name)
+ local suffix = file.suffix(name)
if find(name,"%.$") then
-- skip . and ..
elseif converters[suffix] then
@@ -102,7 +116,7 @@ function converters.convertpath(inputpath,outputpath)
end
function converters.convertfile(oldname)
- local suffix = file.extname(oldname)
+ local suffix = file.suffix(oldname)
if converters[suffix] then
local newname = file.replacesuffix(oldname,"pdf")
if oldname == newname then
@@ -154,6 +168,8 @@ if environment.arguments.convertall then
convert.convertall()
elseif environment.files[1] then
convert.convertgiven()
+elseif environment.argument("exporthelp") then
+ application.export(environment.argument("exporthelp"),environment.files[1])
else
application.help()
end
diff --git a/Master/texmf-dist/scripts/context/lua/mtx-epub.lua b/Master/texmf-dist/scripts/context/lua/mtx-epub.lua
index 7d1c157749f..11f0a202453 100644
--- a/Master/texmf-dist/scripts/context/lua/mtx-epub.lua
+++ b/Master/texmf-dist/scripts/context/lua/mtx-epub.lua
@@ -11,8 +11,8 @@ if not modules then modules = { } end modules ['mtx-epub'] = {
-- really an id but has some special property). Then there is this ncx suffix
-- thing. Somehow it give the impression of a reversed engineered application
-- format so it will probably take a few cycles to let it become a real
--- clean standard. Thanks to Adam Reviczky for helping to figure out all these
--- puzzling details.
+-- clean standard. Thanks to Adam Reviczky, Luigi Scarso and Andy Thomas for
+-- helping to figure out all the puzzling details.
-- This is preliminary code. At some point we will deal with images as well but
-- first we need a decent strategy to export them. More information will be
@@ -22,16 +22,34 @@ local format, gsub = string.format, string.gsub
local concat = table.concat
local helpinfo = [[
---make create epub zip file
-
-example:
-
-mtxrun --script epub --make mydocument
+<?xml version="1.0"?>
+<application>
+ <metadata>
+ <entry name="name">mtx-epub</entry>
+ <entry name="detail">ConTeXt EPUB Helpers</entry>
+ <entry name="version">0.12</entry>
+ </metadata>
+ <flags>
+ <category name="basic">
+ <subcategory>
+ <flag name="make"><short>create epub zip file</short></flag>
+ </subcategory>
+ </category>
+ </flags>
+ <examples>
+ <category>
+ <title>Example</title>
+ <subcategory>
+ <example><command>mtxrun --script epub --make mydocument</command></example>
+ </subcategory>
+ </category>
+ </examples>
+</application>
]]
local application = logs.application {
name = "mtx-epub",
- banner = "ConTeXt EPUB Helpers 0.11",
+ banner = "ConTeXt EPUB Helpers 0.12",
helpinfo = helpinfo,
}
@@ -43,26 +61,27 @@ scripts.epub = scripts.epub or { }
local mimetype = "application/epub+zip"
local container = [[
-<?xml version="1.0" encoding="UTF-8" ?>
+<?xml version="1.0" encoding="UTF-8"?>
<container version="1.0" xmlns="urn:oasis:names:tc:opendocument:xmlns:container">
<rootfiles>
- <rootfile full-path="OPS/%s" media-type="application/oebps-package+xml"/>
+ <rootfile full-path="OEBPS/%s" media-type="application/oebps-package+xml"/>
</rootfiles>
</container>
]]
local package = [[
-<?xml version="1.0"?>
+<?xml version="1.0" encoding="UTF-8"?>
<package version="2.0" xmlns="http://www.idpf.org/2007/opf" unique-identifier="%s">
<metadata xmlns:dc="http://purl.org/dc/elements/1.1/" xmlns:opf="http://www.idpf.org/2007/opf">
- <dc:title>My Title</dc:title>
- <dc:language>en</dc:language>
- <dc:identifier id="%s" >urn:uuid:%s</dc:identifier>
- <dc:creator opf:file-as="Self, My" opf:role="aut">MySelf</dc:creator>
+ <dc:title>%s</dc:title>
+ <dc:language>%s</dc:language>
+ <dc:identifier id="%s" opf:scheme="UUID">urn:uuid:%s</dc:identifier>
+ <dc:creator>%s</dc:creator>
<dc:date>%s</dc:date>
+ <meta name="cover" content="%s" />
</metadata>
<manifest>
@@ -70,13 +89,14 @@ local package = [[
</manifest>
<spine toc="ncx">
+ <itemref idref="cover-xhtml" />
<itemref idref="%s" />
</spine>
</package>
]]
-local item = [[ <item id='%s' href='%s' media-type='%s'/>]]
+local item = [[ <item id="%s" href="%s" media-type="%s"/>]]
local toc = [[
<?xml version="1.0"?>
@@ -108,6 +128,23 @@ local toc = [[
</ncx>
]]
+local coverxhtml = [[
+<?xml version="1.0" encoding="UTF-8"?>
+
+<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.1//EN" "http://www.w3.org/TR/xhtml11/DTD/xhtml11.dtd">
+
+<html xmlns="http://www.w3.org/1999/xhtml">
+ <head>
+ <title>cover.xhtml</title>
+ </head>
+ <body>
+ <div>
+ <img src="%s" alt="The cover image" style="max-width: 100%%;" />
+ </div>
+ </body>
+</html>
+]]
+
-- We need to figure out what is permitted. Numbers only seem to give
-- problems is some applications as do names with dashes. Also the
-- optional toc is supposed to be there and although id's are by
@@ -117,7 +154,7 @@ local toc = [[
local function dumbid(filename)
-- return (string.gsub(os.uuid(),"%-%","")) -- to be tested
- return file.nameonly(filename) .. "-" .. file.extname(filename)
+ return file.nameonly(filename) .. "-" .. file.suffix(filename)
end
local mimetypes = {
@@ -128,6 +165,7 @@ local mimetypes = {
png = "image/png",
jpg = "image/jpeg",
ncx = "application/x-dtbncx+xml",
+ gif = "image/gif",
-- default = "text/plain",
}
@@ -194,9 +232,21 @@ function scripts.epub.make()
local files = specification.files or { file.addsuffix(filename,"xhtml") }
local images = specification.images or { }
local root = specification.root or files[1]
+ local language = specification.language or "en"
+ local creator = specification.author or "My Self"
+ local title = specification.title or "My Title"
+ local firstpage = specification.firstpage or ""
+ local lastpage = specification.lastpage or ""
-- identifier = gsub(identifier,"[^a-zA-z0-9]","")
+ if firstpage ~= "" then
+ images[firstpage] = firstpage
+ end
+ if lastpage ~= "" then
+ images[lastpage] = lastpage
+ end
+
identifier = "BookId" -- weird requirement
local epubname = name
@@ -204,11 +254,12 @@ function scripts.epub.make()
local epubfile = file.replacesuffix(name,"epub")
local epubroot = file.replacesuffix(name,"opf")
local epubtoc = "toc.ncx"
+ local epubcover = "cover.xhtml"
application.report("creating paths in tree %s",epubpath)
lfs.mkdir(epubpath)
lfs.mkdir(file.join(epubpath,"META-INF"))
- lfs.mkdir(file.join(epubpath,"OPS"))
+ lfs.mkdir(file.join(epubpath,"OEBPS"))
local used = { }
@@ -217,13 +268,14 @@ function scripts.epub.make()
local mime = mimetypes[suffix]
if mime then
local idmaker = idmakers[suffix] or idmakers.default
- local target = file.join(epubpath,"OPS",filename)
+ local target = file.join(epubpath,"OEBPS",filename)
file.copy(filename,target)
application.report("copying %s to %s",filename,target)
used[#used+1] = format(item,idmaker(filename),filename,mime)
end
end
+ copyone("cover.xhtml")
copyone("toc.ncx")
local function copythem(files)
@@ -241,7 +293,7 @@ function scripts.epub.make()
for k, v in table.sortedpairs(images) do
theimages[#theimages+1] = k
- if not lfs.isfile(k) and file.extname(k) == "svg" and file.extname(v) == "pdf" then
+ if not lfs.isfile(k) and file.suffix(k) == "svg" and file.suffix(v) == "pdf" then
local command = format("inkscape --export-plain-svg=%s %s",k,v)
application.report("running command '%s'\n\n",command)
os.execute(command)
@@ -250,33 +302,52 @@ function scripts.epub.make()
copythem(theimages)
- local idmaker = idmakers[file.extname(root)] or idmakers.default
-
- container = format(container,epubroot)
- package = format(package,identifier,identifier,os.uuid(),os.date("!%Y-%m-%dT%H:%M:%SZ"),concat(used,"\n"),idmaker(root))
- toc = format(toc,identifier,"title",root)
+ local idmaker = idmakers[file.suffix(root)] or idmakers.default
+
+ container = format(container,
+ epubroot
+ )
+ package = format(package,
+ identifier,
+ title,
+ language,
+ identifier,
+ os.uuid(),
+ creator,
+ os.date("!%Y-%m-%dT%H:%M:%SZ"),
+ idmaker(firstpage),
+ concat(used,"\n"),
+ idmaker(root)
+ )
+ toc = format(toc,
+ identifier,
+ title,
+ root
+ )
+ coverxhtml = format(coverxhtml,
+ firstpage
+ )
io.savedata(file.join(epubpath,"mimetype"),mimetype)
io.savedata(file.join(epubpath,"META-INF","container.xml"),container)
- io.savedata(file.join(epubpath,"OPS",epubroot),package)
- io.savedata(file.join(epubpath,"OPS",epubtoc),toc)
+ io.savedata(file.join(epubpath,"OEBPS",epubroot),package)
+ io.savedata(file.join(epubpath,"OEBPS",epubtoc),toc)
+ io.savedata(file.join(epubpath,"OEBPS",epubcover),coverxhtml)
application.report("creating archive\n\n")
- local done = false
- local list = { }
-
lfs.chdir(epubpath)
os.remove(epubfile)
+ local done = false
+
for i=1,#zippers do
local zipper = zippers[i]
if os.execute(format(zipper.uncompressed,epubfile,"mimetype")) then
os.execute(format(zipper.compressed,epubfile,"META-INF"))
- os.execute(format(zipper.compressed,epubfile,"OPS"))
+ os.execute(format(zipper.compressed,epubfile,"OEBPS"))
done = zipper.name
- else
- list[#list+1] = zipper.name
+ break
end
end
@@ -285,6 +356,10 @@ function scripts.epub.make()
if done then
application.report("epub archive made using %s: %s",done,file.join(epubpath,epubfile))
else
+ local list = { }
+ for i=1,#zippers do
+ list[#list+1] = zipper.name
+ end
application.report("no epub archive made, install one of: %s",concat(list," "))
end
@@ -296,6 +371,8 @@ end
if environment.argument("make") then
scripts.epub.make()
+elseif environment.argument("exporthelp") then
+ application.export(environment.argument("exporthelp"),environment.files[1])
else
application.help()
end
diff --git a/Master/texmf-dist/scripts/context/lua/mtx-fcd.lua b/Master/texmf-dist/scripts/context/lua/mtx-fcd.lua
new file mode 100644
index 00000000000..ba9299020ba
--- /dev/null
+++ b/Master/texmf-dist/scripts/context/lua/mtx-fcd.lua
@@ -0,0 +1,386 @@
+if not modules then modules = { } end modules ['mtx-fcd'] = {
+ version = 1.002,
+ comment = "companion to mtxrun.lua",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files",
+ comment = "based on the ruby version from 2005",
+}
+
+-- This is a kind of variant of the good old ncd (norton change directory) program. This
+-- script uses the same indirect cmd trick as Erwin Waterlander's wcd program.
+--
+-- The program is called via the stubs fcd.cmd or fcd.sh. On unix one should probably source
+-- the file: ". fcd args" in order to make the chdir persistent.
+--
+-- You need to create a stub with:
+--
+-- mtxrun --script fcd --stub > fcd.cmd
+-- mtxrun --script fcd --stub > fcd.sh
+--
+-- The stub starts this script and afterwards runs the created directory change script as
+-- part if the same run, so that indeed we change.
+
+local helpinfo = [[
+<?xml version="1.0"?>
+<application>
+ <metadata>
+ <entry name="name">mtx-fcd</entry>
+ <entry name="detail">Fast Directory Change</entry>
+ <entry name="version">1.00</entry>
+ </metadata>
+ <flags>
+ <category name="basic">
+ <subcategory>
+ <flag name="clear"><short>clear the cache</short></flag>
+ <flag name="clear"><short><ref name="history"/> [entry] clear the history</short></flag>
+ <flag name="scan"><short>clear the cache and add given path(s)</short></flag>
+ <flag name="add"><short>add given path(s)</short></flag>
+ <flag name="find"><short>find given path (can be substring)</short></flag>
+ <flag name="find"><short><ref name="nohistory"/> find given path (can be substring) but don't use history</short></flag>
+ <flag name="stub"><short>print platform stub file</short></flag>
+ <flag name="list"><short>show roots of cached dirs</short></flag>
+ <flag name="list"><short><ref name="history"/> show history of chosen dirs</short></flag>
+ <flag name="help"><short>show this help</short></flag>
+ </subcategory>
+ </category>
+ </flags>
+ <examples>
+ <category>
+ <title>Example</title>
+ <subcategory>
+ <example><command>fcd --scan t:\</command></example>
+ <example><command>fcd --add f:\project</command></example>
+ <example><command>fcd [--find] whatever</command></example>
+ <example><command>fcd --list</command></example>
+ </subcategory>
+ </category>
+ </examples>
+</application>
+]]
+
+local application = logs.application {
+ name = "mtx-fcd",
+ banner = "Fast Directory Change 1.00",
+ helpinfo = helpinfo,
+}
+
+local report = application.report
+local writeln = print -- texio.write_nl
+
+local find, char, byte, lower, gsub, format = string.find, string.char, string.byte, string.lower, string.gsub, string.format
+
+local mswinstub = [[@echo off
+
+rem this is: fcd.cmd
+
+@echo off
+
+if not exist "%HOME%" goto homepath
+
+:home
+
+mtxrun --script mtx-fcd.lua %1 %2 %3 %4 %5 %6 %7 %8 %9
+
+if exist "%HOME%\mtx-fcd-goto.cmd" call "%HOME%\mtx-fcd-goto.cmd"
+
+goto end
+
+:homepath
+
+if not exist "%HOMEDRIVE%\%HOMEPATH%" goto end
+
+mtxrun --script mtx-fcd.lua %1 %2 %3 %4 %5 %6 %7 %8 %9
+
+if exist "%HOMEDRIVE%\%HOMEPATH%\mtx-fcd-goto.cmd" call "%HOMEDRIVE%\%HOMEPATH%\mtx-fcd-goto.cmd"
+
+goto end
+
+:end
+]]
+
+local unixstub = [[#!/usr/bin/env sh
+
+# this is: fcd.sh
+
+# mv fcd.sh fcd
+# chmod fcd 755
+# . fcd [args]
+
+ruby -S fcd_start.rb $1 $2 $3 $4 $5 $6 $7 $8 $9
+
+if test -f "$HOME/fcd_stage.sh" ; then
+ . $HOME/fcd_stage.sh ;
+fi;
+
+]]
+
+local gotofile
+local datafile
+local stubfile
+local stubdata
+local stubdummy
+local stubchdir
+
+if os.platform == 'mswin' then
+ gotofile = 'mtx-fcd-goto.cmd'
+ datafile = 'mtx-fcd-data.lua'
+ stubfile = 'fcd.cmd'
+ stubdata = mswinstub
+ stubdummy = 'rem no dir to change to'
+ stubchdir = 'cd /d "%s"'
+else
+ gotofile = 'mtx-fcd-goto.sh'
+ datafile = 'mtx-fcd-data.lua'
+ stubfile = 'fcd.sh'
+ stubdata = unixstub
+ stubdummy = '# no dir to change to'
+ stubchdir = '# cd "%s"'
+end
+
+local homedir = os.env["HOME"] or "" -- no longer TMP etc
+
+if homedir == "" then
+ homedir = format("%s/%s",os.env["HOMEDRIVE"] or "",os.env["HOMEPATH"] or "")
+end
+
+if homedir == "/" or not lfs.isdir(homedir) then
+ os.exit()
+end
+
+local datafile = file.join(homedir,datafile)
+local gotofile = file.join(homedir,gotofile)
+local hash = nil
+local found = { }
+local pattern = ""
+local version = modules['mtx-fcd'].version
+
+io.savedata(gotofile,stubdummy)
+
+if not lfs.isfile(gotofile) then
+ -- write error
+ os.exit()
+end
+
+local function fcd_clear(onlyhistory,what)
+ if onlyhistory and hash and hash.history then
+ if what and what ~= "" then
+ hash.history[what] = nil
+ else
+ hash.history = { }
+ end
+ else
+ hash = {
+ name = "fcd cache",
+ comment = "generated by mtx-fcd.lua",
+ created = os.date(),
+ version = version,
+ paths = { },
+ history = { },
+ }
+ end
+end
+
+local function fcd_changeto(dir)
+ if dir and dir ~= "" then
+ io.savedata(gotofile,format(stubchdir,dir))
+ end
+end
+
+local function fcd_load(forcecreate)
+ if lfs.isfile(datafile) then
+ hash = dofile(datafile)
+ end
+ if not hash or hash.version ~= version then
+ if forcecache then
+ fcd_clear()
+ else
+ writeln("empty dir cache")
+ fcd_clear()
+ os.exit()
+ end
+ end
+end
+
+local function fcd_save()
+ if hash then
+ io.savedata(datafile,table.serialize(hash,true))
+ end
+end
+
+local function fcd_list(onlyhistory)
+ if hash then
+ writeln("")
+ if onlyhistory then
+ if next(hash.history) then
+ for k, v in table.sortedhash(hash.history) do
+ writeln(format("%s => %s",k,v))
+ end
+ else
+ writeln("no history")
+ end
+ else
+ local paths = hash.paths
+ if #paths > 0 then
+ for i=1,#paths do
+ local path = paths[i]
+ writeln(format("%4i %s",#path[2],path[1]))
+ end
+ else
+ writeln("empty cache")
+ end
+ end
+ end
+end
+
+local function fcd_find()
+ found = { }
+ pattern = environment.files[1] or ""
+ if pattern ~= "" then
+ pattern = string.escapedpattern(pattern)
+ local paths = hash.paths
+ for i=1,#paths do
+ local paths = paths[i][2]
+ for i=1,#paths do
+ local path = paths[i]
+ if find(path,pattern) then
+ found[#found+1] = path
+ end
+ end
+ end
+ end
+end
+
+local function fcd_choose(new)
+ if pattern == "" then
+ writeln(format("staying in dir %q",(gsub(lfs.currentdir(),"\\","/"))))
+ return
+ end
+ if #found == 0 then
+ writeln(format("dir %q not found",pattern))
+ return
+ end
+ local okay = #found == 1 and found[1] or (not new and hash.history[pattern])
+ if okay then
+ writeln(format("changing to %q",okay))
+ fcd_changeto(okay)
+ return
+ end
+ local offset = 0
+ while true do
+ if not found[offset] then
+ offset = 0
+ end
+ io.write("\n")
+ for i=1,26 do
+ local v = found[i+offset]
+ if v then
+ writeln(format("%s %3i %s",char(i+96),offset+i,v))
+ else
+ break
+ end
+ end
+ offset = offset + 26
+ if found[offset+1] then
+ io.write("\n[press enter for more or select letter]\n\n>> ")
+ else
+ io.write("\n[select letter]\n\n>> ")
+ end
+ local answer = lower(io.read() or "")
+ if not answer or answer == 'quit' then
+ break
+ elseif #answer > 0 then
+ local choice = tonumber(answer)
+ if not choice then
+ if answer >= "a" and answer <= "z" then
+ choice = byte(answer) - 96 + offset - 26
+ end
+ end
+ local newdir = found[choice]
+ if newdir then
+ hash.history[pattern] = newdir
+ writeln(format("changing to %q",newdir))
+ fcd_changeto(newdir)
+ fcd_save()
+ return
+ end
+ else
+ -- try again
+ end
+ end
+end
+
+local function globdirs(path,dirs)
+ local dirs = dirs or { }
+ for name in lfs.dir(path) do
+ if not find(name,"%.$") then
+ local fullname = path .. "/" .. name
+ if lfs.isdir(fullname) and not find(fullname,"/%.") then
+ dirs[#dirs+1] = fullname
+ globdirs(fullname,dirs)
+ end
+ end
+ end
+ return dirs
+end
+
+local function fcd_scan()
+ if hash then
+ local paths = hash.paths
+ for i=1,#environment.files do
+ local name = environment.files[i]
+ local name = gsub(name,"\\","/")
+ local name = gsub(name,"/$","")
+ local list = globdirs(name)
+ local done = false
+ for i=1,#paths do
+ if paths[i][1] == name then
+ paths[i][2] = list
+ done = true
+ break
+ end
+ end
+ if not done then
+ paths[#paths+1] = { name, list }
+ end
+ end
+ end
+end
+
+local argument = environment.argument
+
+if argument("clear") then
+ if argument("history") then
+ fcd_load()
+ fcd_clear(true)
+ else
+ fcd_clear()
+ end
+ fcd_save()
+elseif argument("scan") then
+ fcd_clear()
+ fcd_scan()
+ fcd_save()
+elseif argument("add") then
+ fcd_load(true)
+ fcd_scan()
+ fcd_save()
+elseif argument("stub") then
+ writeln(stubdata)
+elseif argument("list") then
+ fcd_load()
+ if argument("history") then
+ fcd_list(true)
+ else
+ fcd_list()
+ end
+elseif argument("help") then
+ application.help()
+elseif argument("exporthelp") then
+ application.export(argument("exporthelp"),environment.files[1])
+else -- also argument("find")
+ fcd_load()
+ fcd_find()
+ fcd_choose(argument("nohistory"))
+end
+
diff --git a/Master/texmf-dist/scripts/context/lua/mtx-flac.lua b/Master/texmf-dist/scripts/context/lua/mtx-flac.lua
index 37f985654a3..2155b24becb 100644
--- a/Master/texmf-dist/scripts/context/lua/mtx-flac.lua
+++ b/Master/texmf-dist/scripts/context/lua/mtx-flac.lua
@@ -6,17 +6,15 @@ if not modules then modules = { } end modules ['mtx-flac'] = {
license = "see context related readme files"
}
--- Written with Within Temptation's "The Unforgiven" in loopmode on
--- the speakers. The following code is also used for my occasional music
--- repository cleanup session using the code below.
-
--- this can become l-flac.lua
-
local sub, match, byte, lower = string.sub, string.match, string.byte, string.lower
local readstring, readnumber = io.readstring, io.readnumber
-local concat = table.concat
+local concat, sortedpairs = table.concat, table.sortedpairs
local tonumber = tonumber
local tobitstring = number.tobitstring
+local lpegmatch = lpeg.match
+local p_escaped = lpeg.patterns.xml.escaped
+
+-- rather silly: pack info in bits while a flac file is large anyway
flac = flac or { }
@@ -25,7 +23,7 @@ flac.report = string.format
local splitter = lpeg.splitat("=")
local readers = { }
-readers[0] = function(f,size,target) -- not yet ok
+readers[0] = function(f,size,target) -- not yet ok .. todo: use bit32 lib
local info = { }
target.info = info
info.minimum_block_size = readnumber(f,-2)
@@ -78,6 +76,7 @@ function flac.getmetadata(filename)
local reader = readers[flag] or readers.default
reader(f,size,data,banner)
if last then
+ f:close()
return data
end
end
@@ -127,28 +126,29 @@ function flac.savecollection(pattern,filename)
local nofartists, nofalbums, noftracks, noferrors = 0, 0, 0, 0
local f = io.open(filename,"wb")
if f then
+ flac.report("saving data in file %q",filename)
f:write("<?xml version='1.0' standalone='yes'?>\n\n")
f:write("<collection>\n")
- for artist, albums in table.sortedpairs(music) do
+ for artist, albums in sortedpairs(music) do
nofartists = nofartists + 1
f:write("\t<artist>\n")
- f:write("\t\t<name>" .. artist .. "</name>\n")
+ f:write("\t\t<name>",lpegmatch(p_escaped,artist),"</name>\n")
f:write("\t\t<albums>\n")
- for album, data in table.sortedpairs(albums) do
+ for album, data in sortedpairs(albums) do
nofalbums = nofalbums + 1
- f:write("\t\t\t<album year='" .. (data.year or 0) .. "'>\n")
- f:write("\t\t\t\t<name>" .. album .. "</name>\n")
+ f:write("\t\t\t<album year='",data.year or 0,"'>\n")
+ f:write("\t\t\t\t<name>",lpegmatch(p_escaped,album),"</name>\n")
f:write("\t\t\t\t<tracks>\n")
local tracks = data.tracks
for i=1,#tracks do
local track = tracks[i]
if track then
noftracks = noftracks + 1
- f:write("\t\t\t\t\t<track length='" .. track.length .. "'>" .. track.title .. "</track>\n")
+ f:write("\t\t\t\t\t<track length='",track.length,"'>",lpegmatch(p_escaped,track.title),"</track>\n")
else
noferrors = noferrors + 1
- flac.report("error in album: %q of artist",album,artist)
- f:write("\t\t\t\t\t<error track='" .. i .. "'/>\n")
+ flac.report("error in album: %q of %q, no track %s",album,artist,i)
+ f:write("\t\t\t\t\t<error track='",i,"'/>\n")
end
end
f:write("\t\t\t\t</tracks>\n")
@@ -158,20 +158,40 @@ function flac.savecollection(pattern,filename)
f:write("\t</artist>\n")
end
f:write("</collection>\n")
+ f:close()
+ flac.report("%s tracks of %s albums of %s artists saved in %q (%s errors)",noftracks,nofalbums,nofartists,filename,noferrors)
+ else
+ flac.report("unable to save data in file %q",filename)
end
- f:close()
- flac.report("%s tracks of %s albums of %s artists saved in %q (%s errors)",noftracks,nofalbums,nofartists,filename,noferrors)
end
--
local helpinfo = [[
---collect collect albums in xml file
-
-example:
-
-mtxrun --script flac --collect somename.flac
-mtxrun --script flac --collect --pattern="m:/music/**")
+<?xml version="1.0"?>
+<application>
+ <metadata>
+ <entry name="name">mtx-flac</entry>
+ <entry name="detail">ConTeXt Flac Helpers</entry>
+ <entry name="version">0.10</entry>
+ </metadata>
+ <flags>
+ <category name="basic">
+ <subcategory>
+ <flag name="collect"><short>collect albums in xml file</short></flag>
+ </subcategory>
+ </category>
+ </flags>
+ <examples>
+ <category>
+ <title>Example</title>
+ <subcategory>
+ <example><command>mtxrun --script flac --collect somename.flac</command></example>
+ <example><command>mtxrun --script flac --collect --pattern="m:/music/**")</command></example>
+ </subcategory>
+ </category>
+ </examples>
+</application>
]]
local application = logs.application {
@@ -211,6 +231,8 @@ end
if environment.argument("collect") then
scripts.flac.collect()
+elseif environment.argument("exporthelp") then
+ application.export(environment.argument("exporthelp"),environment.files[1])
else
application.help()
end
diff --git a/Master/texmf-dist/scripts/context/lua/mtx-fonts.lua b/Master/texmf-dist/scripts/context/lua/mtx-fonts.lua
index c5b458c1422..d427f8b2fc3 100644
--- a/Master/texmf-dist/scripts/context/lua/mtx-fonts.lua
+++ b/Master/texmf-dist/scripts/context/lua/mtx-fonts.lua
@@ -6,40 +6,70 @@ if not modules then modules = { } end modules ['mtx-fonts'] = {
license = "see context related readme files"
}
-local helpinfo = [[
---save save open type font in raw table
---unpack save a tma file in a more readale format
-
---reload generate new font database
---reload --simple generate 'luatex-fonts-names.lua' (not for context!)
-
---list --name list installed fonts, filter by name [--pattern]
---list --spec list installed fonts, filter by spec [--filter]
---list --file list installed fonts, filter by file [--pattern]
-
---pattern=str filter files using pattern
---filter=list key-value pairs
---all show all found instances
---info give more details
---track=list enable trackers
---statistics some info about the database
-
-examples of searches:
+local getargument = environment.getargument
+local setargument = environment.setargument
+local givenfiles = environment.files
-mtxrun --script font --list somename (== --pattern=*somename*)
-
-mtxrun --script font --list --name somename
-mtxrun --script font --list --name --pattern=*somename*
-
-mtxrun --script font --list --spec somename
-mtxrun --script font --list --spec somename-bold-italic
-mtxrun --script font --list --spec --pattern=*somename*
-mtxrun --script font --list --spec --filter="fontname=somename"
-mtxrun --script font --list --spec --filter="familyname=somename,weight=bold,style=italic,width=condensed"
-mtxrun --script font --list --spec --filter="familyname=crap*,weight=bold,style=italic"
-
-mtxrun --script font --list --file somename
-mtxrun --script font --list --file --pattern=*somename*
+local helpinfo = [[
+<?xml version="1.0"?>
+<application>
+ <metadata>
+ <entry name="name">mtx-fonts</entry>
+ <entry name="detail">ConTeXt Font Database Management</entry>
+ <entry name="version">0.21</entry>
+ </metadata>
+ <flags>
+ <category name="basic">
+ <subcategory>
+ <flag name="save"><short>save open type font in raw table</short></flag>
+ <flag name="unpack"><short>save a tma file in a more readale format</short></flag>
+ </subcategory>
+ <subcategory>
+ <flag name="reload"><short>generate new font database (use <ref name="force"/> when in doubt)</short></flag>
+ <flag name="reload"><short><ref name="simple"/>:generate luatex-fonts-names.lua (not for context!)</short></flag>
+ </subcategory>
+ <subcategory>
+ <flag name="list"><short><ref name="name"/>: list installed fonts, filter by name [<ref name="pattern"/>]</short></flag>
+ <flag name="list"><short><ref name="spec"/>: list installed fonts, filter by spec [<ref name="filter"/>]</short></flag>
+ <flag name="list"><short><ref name="file"/>: list installed fonts, filter by file [<ref name="pattern"/>]</short></flag>
+ </subcategory>
+ <subcategory>
+ <flag name="pattern" value="str"><short>filter files using pattern</short></flag>
+ <flag name="filter" value="list"><short>key-value pairs</short></flag>
+ <flag name="all"><short>show all found instances (combined with other flags)</short></flag>
+ <flag name="info"><short>give more details</short></flag>
+ <flag name="track" value="list"><short>enable trackers</short></flag>
+ <flag name="statistics"><short>some info about the database</short></flag>
+ </subcategory>
+ </category>
+ </flags>
+ <examples>
+ <category>
+ <title>Examples</title>
+ <subcategory>
+ <example><command>mtxrun --script font --list somename (== --pattern=*somename*)</command></example>
+ </subcategory>
+ <subcategory>
+ <example><command>mtxrun --script font --list --name somename</command></example>
+ <example><command>mtxrun --script font --list --name --pattern=*somename*</command></example>
+ </subcategory>
+ <subcategory>
+ <example><command>mtxrun --script font --list --spec somename</command></example>
+ <example><command>mtxrun --script font --list --spec somename-bold-italic</command></example>
+ <example><command>mtxrun --script font --list --spec --pattern=*somename*</command></example>
+ <example><command>mtxrun --script font --list --spec --filter="fontname=somename"</command></example>
+ <example><command>mtxrun --script font --list --spec --filter="familyname=somename,weight=bold,style=italic,width=condensed"</command></example>
+ <example><command>mtxrun --script font --list --spec --filter="familyname=crap*,weight=bold,style=italic"</command></example>
+ </subcategory>
+ <subcategory>
+ <example><command>mtxrun --script font --list --all</command></example>
+ <example><command>mtxrun --script font --list --file somename</command></example>
+ <example><command>mtxrun --script font --list --file --all somename</command></example>
+ <example><command>mtxrun --script font --list --file --pattern=*somename*</command></example>
+ </subcategory>
+ </category>
+ </examples>
+</application>
]]
local application = logs.application {
@@ -126,7 +156,7 @@ function fonts.names.simple()
end
report("saving names in '%s'",name)
io.savedata(name,table.serialize(simplified,true))
- local data = io.loaddata(resolvers.findfile("font-dum.lua","tex"))
+ local data = io.loaddata(resolvers.findfile("luatex-fonts-syn.lua","tex")) or ""
local dummy = string.match(data,"fonts%.names%.version%s*=%s*([%d%.]+)")
if tonumber(dummy) ~= simpleversion then
report("warning: version number %s in 'font-dum' does not match database version number %s",dummy or "?",simpleversion)
@@ -137,10 +167,10 @@ function fonts.names.simple()
end
function scripts.fonts.reload()
- if environment.argument("simple") then
+ if getargument("simple") then
fonts.names.simple()
else
- fonts.names.load(true)
+ fonts.names.load(true,getargument("force"))
end
end
@@ -208,7 +238,7 @@ end
local function reloadbase(reload)
if reload then
report("fontnames, reloading font database")
- names.load(true)
+ names.load(true,getargument("force"))
report("fontnames, done\n\n")
end
end
@@ -274,16 +304,16 @@ end
function scripts.fonts.list()
- local all = environment.argument("all")
- local info = environment.argument("info")
- local reload = environment.argument("reload")
- local pattern = environment.argument("pattern")
- local filter = environment.argument("filter")
- local given = environment.files[1]
+ local all = getargument("all")
+ local info = getargument("info")
+ local reload = getargument("reload")
+ local pattern = getargument("pattern")
+ local filter = getargument("filter")
+ local given = givenfiles[1]
reloadbase(reload)
- if environment.argument("name") then
+ if getargument("name") then
if pattern then
--~ mtxrun --script font --list --name --pattern=*somename*
list_matches(fonts.names.list(string.topattern(pattern,true),reload,all),info)
@@ -295,7 +325,7 @@ function scripts.fonts.list()
else
report("not supported: --list --name <no specification>",name)
end
- elseif environment.argument("spec") then
+ elseif getargument("spec") then
if pattern then
--~ mtxrun --script font --list --spec --pattern=*somename*
report("not supported: --list --spec --pattern",name)
@@ -308,7 +338,7 @@ function scripts.fonts.list()
else
report("not supported: --list --spec <no specification>",name)
end
- elseif environment.argument("file") then
+ elseif getargument("file") then
if pattern then
--~ mtxrun --script font --list --file --pattern=*somename*
list_specifications(fonts.names.collectfiles(string.topattern(pattern,true),reload,all),info)
@@ -336,7 +366,7 @@ function scripts.fonts.list()
end
function scripts.fonts.unpack()
- local name = file.removesuffix(file.basename(environment.files[1] or ""))
+ local name = file.removesuffix(file.basename(givenfiles[1] or ""))
if name and name ~= "" then
local cache = containers.define("fonts", "otf", 2.730, true)
local cleanname = containers.cleanname(name)
@@ -353,8 +383,8 @@ function scripts.fonts.unpack()
end
function scripts.fonts.save()
- local name = environment.files[1] or ""
- local sub = environment.files[2] or ""
+ local name = givenfiles[1] or ""
+ local sub = givenfiles[2] or ""
local function save(savename,fontblob)
if fontblob then
savename = savename:lower() .. ".lua"
@@ -367,7 +397,7 @@ function scripts.fonts.save()
if name and name ~= "" then
local filename = resolvers.findfile(name) -- maybe also search for opentype
if filename and filename ~= "" then
- local suffix = string.lower(file.extname(filename))
+ local suffix = string.lower(file.suffix(filename))
if suffix == 'ttf' or suffix == 'otf' or suffix == 'ttc' or suffix == "dfont" then
local fontinfo = fontloader.info(filename)
if fontinfo then
@@ -394,21 +424,23 @@ function scripts.fonts.save()
end
end
-if environment.argument("names") then
- environment.setargument("reload",true)
- environment.setargument("simple",true)
+if getargument("names") then
+ setargument("reload",true)
+ setargument("simple",true)
end
-if environment.argument("list") then
+if getargument("list") then
scripts.fonts.list()
-elseif environment.argument("reload") then
+elseif getargument("reload") then
scripts.fonts.reload()
-elseif environment.argument("save") then
+elseif getargument("save") then
scripts.fonts.save()
-elseif environment.argument("unpack") then
+elseif getargument("unpack") then
scripts.fonts.unpack()
-elseif environment.argument("statistics") then
+elseif getargument("statistics") then
fonts.names.statistics()
+elseif getargument("exporthelp") then
+ application.export(getargument("exporthelp"),givenfiles[1])
else
application.help()
end
diff --git a/Master/texmf-dist/scripts/context/lua/mtx-grep.lua b/Master/texmf-dist/scripts/context/lua/mtx-grep.lua
index 3cbc1421ad2..dbcce67f6a2 100644
--- a/Master/texmf-dist/scripts/context/lua/mtx-grep.lua
+++ b/Master/texmf-dist/scripts/context/lua/mtx-grep.lua
@@ -7,12 +7,27 @@ if not modules then modules = { } end modules ['mtx-babel'] = {
}
local helpinfo = [[
---pattern search for pattern (optional)
---count count matches only
---nocomment skip lines that start with %% or #
---xml pattern is lpath expression
-
-patterns are lua patterns and need to be escaped accordingly
+<?xml version="1.0"?>
+<application>
+ <metadata>
+ <entry name="name">mtx-grep</entry>
+ <entry name="detail">Simple Grepper</entry>
+ <entry name="version">0.10</entry>
+ </metadata>
+ <flags>
+ <category name="basic">
+ <subcategory>
+ <flag name="pattern"><short>search for pattern (optional)</short></flag>
+ <flag name="count"><short>count matches only</short></flag>
+ <flag name="nocomment"><short>skip lines that start with %% or #</short></flag>
+ <flag name="xml"><short>pattern is lpath expression</short></flag>
+ </subcategory>
+ </category>
+ </flags>
+ <comments>
+ <comment>patterns are lua patterns and need to be escaped accordingly</comment>
+ </comments>
+</application>
]]
local application = logs.application {
@@ -60,7 +75,7 @@ function scripts.grep.find(pattern, files, offset)
if m > 0 then
nofmatches = nofmatches + m
nofmatchedfiles = nofmatchedfiles + 1
- write_nl(format("%s: %s",name,m))
+ write_nl(format("%5i %s",m,name))
io.flush()
end
else
@@ -127,7 +142,7 @@ function scripts.grep.find(pattern, files, offset)
if count and m > 0 then
nofmatches = nofmatches + m
nofmatchedfiles = nofmatchedfiles + 1
- write_nl(format("%s: %s",name,m))
+ write_nl(format("%5i %s",m,name))
io.flush()
end
end
@@ -144,7 +159,9 @@ end
local pattern = environment.argument("pattern")
local files = environment.files and #environment.files > 0 and environment.files
-if pattern and files then
+if environment.argument("exporthelp") then
+ application.export(environment.argument("exporthelp"),files[1])
+elseif pattern and files then
scripts.grep.find(pattern, files)
elseif files then
scripts.grep.find(files[1], files, 2)
diff --git a/Master/texmf-dist/scripts/context/lua/mtx-interface.lua b/Master/texmf-dist/scripts/context/lua/mtx-interface.lua
index f52fde345d5..82cefd63817 100644
--- a/Master/texmf-dist/scripts/context/lua/mtx-interface.lua
+++ b/Master/texmf-dist/scripts/context/lua/mtx-interface.lua
@@ -11,25 +11,43 @@ local gsub, format, gmatch, find = string.gsub, string.format, string.gmatch, st
local utfchar, utfgsub = utf.char, utf.gsub
local helpinfo = [[
---interfaces generate context interface files
---messages generate context message files
---labels generate context label files
-
---context equals --interfaces --messages --languages
-
---scite generate scite interface
---bbedit generate bbedit interface files
---jedit generate jedit interface files
---textpad generate textpad interface files
---text create text files for commands and environments
---raw report commands to the console
---check generate check file
-
---toutf replace named characters by utf
---preprocess preprocess mkvi files to tex files [force,suffix]
-
---suffix use given suffix for output files
---force force action even when in doubt
+<?xml version="1.0"?>
+<application>
+ <metadata>
+ <entry name="name">mtx-interface</entry>
+ <entry name="detail">ConTeXt Interface Related Goodies</entry>
+ <entry name="version">0.13</entry>
+ </metadata>
+ <flags>
+ <category name="basic">
+ <subcategory>
+ <flag name="interfaces"><short>generate context interface files</short></flag>
+ <flag name="messages"><short>generate context message files</short></flag>
+ <flag name="labels"><short>generate context label files</short></flag>
+ </subcategory>
+ <subcategory>
+ <flag name="context"><short>equals <ref name="interfaces"/> <ref name="messages"/> <ref name="languages"/></short></flag>
+ </subcategory>
+ <subcategory>
+ <flag name="scite"><short>generate scite interface</short></flag>
+ <flag name="bbedit"><short>generate bbedit interface files</short></flag>
+ <flag name="jedit"><short>generate jedit interface files</short></flag>
+ <flag name="textpad"><short>generate textpad interface files</short></flag>
+ <flag name="text"><short>create text files for commands and environments</short></flag>
+ <flag name="raw"><short>report commands to the console</short></flag>
+ <flag name="check"><short>generate check file</short></flag>
+ </subcategory>
+ <subcategory>
+ <flag name="toutf"><short>replace named characters by utf</short></flag>
+ <flag name="preprocess"><short>preprocess mkvi files to tex files [force,suffix]</short></flag>
+ </subcategory>
+ <subcategory>
+ <flag name="suffix"><short>use given suffix for output files</short></flag>
+ <flag name="force"><short>force action even when in doubt</short></flag>
+ </subcategory>
+ </category>
+ </flags>
+</application>
]]
local application = logs.application {
@@ -50,7 +68,7 @@ local messageinterfaces = { 'en','cs','de','it','nl','ro','fr','pe','no' }
local function collect(filename,class,data)
if data then
local result = { }
- for name, list in next, data do
+ for name, list in table.sortedhash(data) do
result[#result+1] = format("keywordclass.%s.%s=\\\n",class,name)
for i=1,#list do
if i%5 == 0 then
@@ -596,6 +614,8 @@ elseif ea("scite") or ea("bbedit") or ea("jedit") or ea("textpad") or ea("text")
if ea("raw") then
scripts.interface.editor("raw")
end
+elseif ea("exporthelp") then
+ application.export(ea("exporthelp"),environment.files[1])
else
application.help()
end
diff --git a/Master/texmf-dist/scripts/context/lua/mtx-metapost.lua b/Master/texmf-dist/scripts/context/lua/mtx-metapost.lua
index 3b9ed6ff17e..08daec978dc 100644
--- a/Master/texmf-dist/scripts/context/lua/mtx-metapost.lua
+++ b/Master/texmf-dist/scripts/context/lua/mtx-metapost.lua
@@ -7,19 +7,38 @@ if not modules then modules = { } end modules ['mtx-metapost'] = { -- this was m
}
local helpinfo = [[
---rawmp raw metapost run
---metafun use metafun instead of plain
---latex force --tex=latex
---texexec force texexec usage (mkii)
---split split single result file into pages
-
-intended usage:
-
-mtxrun --script metapost yourfile.mp
-mtxrun --script metapost --split yourfile.mp
-mtxrun --script metapost yourfile.123 myfile.mps
-
-other usage resembles mptopdf.pl
+<?xml version="1.0"?>
+<application>
+ <metadata>
+ <entry name="name">mtx-metapost</entry>
+ <entry name="detail">MetaPost to PDF processor</entry>
+ <entry name="version">0.10</entry>
+ </metadata>
+ <flags>
+ <category name="basic">
+ <subcategory>
+ <flag name="rawmp"><short>raw metapost run</short></flag>
+ <flag name="metafun"><short>use metafun instead of plain</short></flag>
+ <flag name="latex"><short>force <ref name="tex=latex"/></short></flag>
+ <flag name="texexec"><short>force texexec usage (mkii)</short></flag>
+ <flag name="split"><short>split single result file into pages</short></flag>
+ </subcategory>
+ </category>
+ </flags>
+ <examples>
+ <category>
+ <title>Examples</title>
+ <subcategory>
+ <example><command>mtxrun --script metapost yourfile.mp</command></example>
+ <example><command>mtxrun --script metapost --split yourfile.mp</command></example>
+ <example><command>mtxrun --script metapost yourfile.123 myfile.mps</command></example>
+ </subcategory>
+ </category>
+ </examples>
+ <comments>
+ <comment>other usage resembles mptopdf.pl</comment>
+ </comments>
+</application>
]]
local application = logs.application {
@@ -49,7 +68,7 @@ local tempname = "mptopdf-temp.tex"
local function do_convert(filename)
if find(filename,".%d+$") or find(filename,"%.mps$") then
io.savedata(tempname,format(template,filename))
- local resultname = format("%s-%s.pdf",file.nameonly(filename),file.extname(filename))
+ local resultname = format("%s-%s.pdf",file.nameonly(filename),file.suffix(filename))
local result = os.execute(format([[context --once --batch --purge --result=%s "%s"]],resultname,tempname))
return lfs.isfile(resultname) and resultname
end
@@ -166,10 +185,12 @@ function scripts.mptopdf.convertall()
end
end
-if environment.files[1] then
+if environment.argument("exporthelp") then
+ application.export(environment.argument("exporthelp"),environment.files[1])
+elseif environment.files[1] then
scripts.mptopdf.convertall()
else
- if not environment.arguments.help then
+ if not environment.argument("help") then
report("provide MP output file (or pattern)")
report()
end
diff --git a/Master/texmf-dist/scripts/context/lua/mtx-metatex.lua b/Master/texmf-dist/scripts/context/lua/mtx-metatex.lua
index 0b66d59ef3a..455ecbd5281 100644
--- a/Master/texmf-dist/scripts/context/lua/mtx-metatex.lua
+++ b/Master/texmf-dist/scripts/context/lua/mtx-metatex.lua
@@ -9,8 +9,22 @@ if not modules then modules = { } end modules ['mtx-metatex'] = {
-- future versions will deal with specific variants of metatex
local helpinfo = [[
---run process (one or more) files (default action)
---make create metatex format(s)
+<?xml version="1.0"?>
+<application>
+ <metadata>
+ <entry name="name">mtx-metatex</entry>
+ <entry name="detail">MetaTeX Process Management</entry>
+ <entry name="version">0.10</entry>
+ </metadata>
+ <flags>
+ <category name="basic">
+ <subcategory>
+ <flag name="run"><short>process (one or more) files (default action)</short></flag>
+ <flag name="make"><short>create metatex format(s)</short></flag>
+ </subcategory>
+ </category>
+ </flags>
+</application>
]]
local application = logs.application {
@@ -57,6 +71,8 @@ elseif environment.argument("make") then
scripts.metatex.timed(scripts.metatex.make)
elseif environment.argument("help") then
logs.help(messages.help,false)
+elseif environment.argument("exporthelp") then
+ application.export(environment.argument("exporthelp"),environment.files[1])
elseif environment.files[1] then
scripts.metatex.timed(scripts.metatex.run)
else
diff --git a/Master/texmf-dist/scripts/context/lua/mtx-mk-help.lua b/Master/texmf-dist/scripts/context/lua/mtx-mk-help.lua
new file mode 100644
index 00000000000..794bbca3799
--- /dev/null
+++ b/Master/texmf-dist/scripts/context/lua/mtx-mk-help.lua
@@ -0,0 +1,473 @@
+if not modules then modules = { } end modules ['mtx-mk-help'] = {
+ version = 1.001,
+ comment = "a script for making help files",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+--[[
+
+mtxrun --exporthelp=all %targetpath%\mkiv\mtxrun.tmp
+context --exporthelp=all %targetpath%\mkiv\context.tmp
+mtxrun --script context --exporthelp=all %targetpath%\mkiv\mtx-context.tmp
+
+mtxrun --script babel --exporthelp=all %targetpath%\mkiv\mtx-babel.tmp
+mtxrun --script base --exporthelp=all %targetpath%\mkiv\mtx-base.tmp
+mtxrun --script cache --exporthelp=all %targetpath%\mkiv\mtx-cache.tmp
+mtxrun --script chars --exporthelp=all %targetpath%\mkiv\mtx-chars.tmp
+mtxrun --script check --exporthelp=all %targetpath%\mkiv\mtx-check.tmp
+mtxrun --script colors --exporthelp=all %targetpath%\mkiv\mtx-colors.tmp
+mtxrun --script convert --exporthelp=all %targetpath%\mkiv\mtx-convert.tmp
+mtxrun --script epub --exporthelp=all %targetpath%\mkiv\mtx-epub.tmp
+mtxrun --script fcd --exporthelp=all %targetpath%\mkiv\mtx-fcd.tmp
+mtxrun --script flac --exporthelp=all %targetpath%\mkiv\mtx-flac.tmp
+mtxrun --script fonts --exporthelp=all %targetpath%\mkiv\mtx-fonts.tmp
+mtxrun --script grep --exporthelp=all %targetpath%\mkiv\mtx-grep.tmp
+mtxrun --script interface --exporthelp=all %targetpath%\mkiv\mtx-interface.tmp
+mtxrun --script metapost --exporthelp=all %targetpath%\mkiv\mtx-metapost.tmp
+mtxrun --script metatex --exporthelp=all %targetpath%\mkiv\mtx-metatex.tmp
+mtxrun --script modules --exporthelp=all %targetpath%\mkiv\mtx-modules.tmp
+mtxrun --script mtxworks --exporthelp=all %targetpath%\mkiv\mtx-mtxworks.tmp
+mtxrun --script package --exporthelp=all %targetpath%\mkiv\mtx-package.tmp
+mtxrun --script patterns --exporthelp=all %targetpath%\mkiv\mtx-patterns.tmp
+mtxrun --script pdf --exporthelp=all %targetpath%\mkiv\mtx-pdf.tmp
+mtxrun --script profile --exporthelp=all %targetpath%\mkiv\mtx-profile.tmp
+mtxrun --script rsync --exporthelp=all %targetpath%\mkiv\mtx-rsync.tmp
+mtxrun --script scite --exporthelp=all %targetpath%\mkiv\mtx-scite.tmp
+mtxrun --script server --exporthelp=all %targetpath%\mkiv\mtx-server.tmp
+mtxrun --script texworks --exporthelp=all %targetpath%\mkiv\mtx-texworks.tmp
+mtxrun --script timing --exporthelp=all %targetpath%\mkiv\mtx-timing.tmp
+mtxrun --script tools --exporthelp=all %targetpath%\mkiv\mtx-tools.tmp
+mtxrun --script unzip --exporthelp=all %targetpath%\mkiv\mtx-unzip.tmp
+mtxrun --script update --exporthelp=all %targetpath%\mkiv\mtx-update.tmp
+mtxrun --script watch --exporthelp=all %targetpath%\mkiv\mtx-watch.tmp
+
+mtxrun --script mk-help luatools --exporthelp=all %targetpath%\mkiv\luatools.tmp
+
+mtxrun --script mk-help texmfstart --exporthelp=all %targetpath%\mkii\texmfstart.tmp
+mtxrun --script mk-help texexec --exporthelp=all %targetpath%\mkii\texexec.tmp
+mtxrun --script mk-help texutil --exporthelp=all %targetpath%\mkii\texutil.tmp
+mtxrun --script mk-help ctxtools --exporthelp=all %targetpath%\mkii\ctxtools.tmp
+mtxrun --script mk-help textools --exporthelp=all %targetpath%\mkii\textools.tmp
+mtxrun --script mk-help pdftools --exporthelp=all %targetpath%\mkii\pdftools.tmp
+mtxrun --script mk-help tmftools --exporthelp=all %targetpath%\mkii\tmftools.tmp
+mtxrun --script mk-help xmltools --exporthelp=all %targetpath%\mkii\xmltools.tmp
+mtxrun --script mk-help pstopdf --exporthelp=all %targetpath%\mkii\pstopdf.tmp
+mtxrun --script mk-help rlxtools --exporthelp=all %targetpath%\mkii\rlxtools.tmp
+mtxrun --script mk-help imgtopdf --exporthelp=all %targetpath%\mkii\imgtopdf.tmp
+
+]]--
+
+local helpinfo = os.resultof("mtxrun --exporthelp") or ""
+local helpinfo = string.match(helpinfo,[[^.-(<application>.-</application>)]]) or [[<application></application>]]
+
+local texmfstart = logs.application {
+ name = "texmfstart",
+ banner = "texmfstart 7.0.0",
+ helpinfo = [[<?xml version="1.0"?>]] .. helpinfo,
+}
+
+-- let's also put luatools here:
+
+local helpinfo = os.resultof("luatools --exporthelp") or ""
+local helpinfo = string.match(helpinfo,[[^.-(<application>.-</application>)]]) or [[<application></application>]]
+local helpinfo = string.gsub(helpinfo,"mtx%-base","luatools")
+
+local luatools = logs.application {
+ name = "luatools",
+ banner = "luatools 1.35",
+ helpinfo = [[<?xml version="1.0"?>]] .. helpinfo,
+}
+
+--
+
+local helpinfo = [[
+<?xml version="1.0"?>
+<application>
+ <metadata>
+ <entry name="name">texexec</entry>
+ <entry name="detail">TeXExec</entry>
+ <entry name="version">6.2.1</entry>
+ </metadata>
+ <flags>
+ <category name="basic">
+ <subcategory>
+ <flag name="make"><short>make formats</short></flag>
+ <flag name="check"><short>check versions</short></flag>
+ <flag name="process"><short>process file</short></flag>
+ <flag name="mptex"><short>process mp file</short></flag>
+ <flag name="mpxtex"><short>process mpx file</short></flag>
+ <flag name="mpgraphic"><short>process mp file to stand-alone graphics</short></flag>
+ <flag name="mpstatic"><short>process mp/ctx file to stand-alone graphics</short></flag>
+ <flag name="listing"><short>list of file content</short></flag>
+ <flag name="figures"><short>generate overview of figures</short></flag>
+ <flag name="modules"><short>generate module documentation</short></flag>
+ <flag name="pdfarrange"><short>impose pages (booklets)</short></flag>
+ <flag name="pdfselect"><short>select pages from file(s)</short></flag>
+ <flag name="pdfcopy"><short>copy pages from file(s)</short></flag>
+ <flag name="pdftrim"><short>trim pages from file(s)</short></flag>
+ <flag name="pdfcombine"><short>combine multiple pages</short></flag>
+ <flag name="pdfsplit"><short>split file in pages</short></flag>
+ </subcategory>
+ </category>
+ </flags>
+</application>
+]]
+
+local texexec = logs.application {
+ name = "texexec",
+ banner = "TeXExec 6.2.1",
+ helpinfo = helpinfo,
+}
+
+local helpinfo = [[
+<?xml version="1.0"?>
+<application>
+ <metadata>
+ <entry name="name">texutil</entry>
+ <entry name="detail">TeXUtil</entry>
+ <entry name="version">9.1.0</entry>
+ </metadata>
+ <flags>
+ <category name="basic">
+ <subcategory>
+ <flag name="references"><short>convert tui file into tuo file</short></flag>
+ <flag name="figures"><short>generate figure dimensions file</short></flag>
+ <flag name="logfile"><short>filter essential log messages</short></flag>
+ <flag name="purgefiles"><short>remove most temporary files</short></flag>
+ <flag name="purgeallfiles"><short>remove all temporary files</short></flag>
+ <flag name="documentation"><short>generate documentation file from source</short></flag>
+ <flag name="analyzefile"><short>analyze pdf file</short></flag>
+ </subcategory>
+ </category>
+ </flags>
+</application>]]
+
+local texutil = logs.application {
+ name = "texutil",
+ banner = "TeXUtil 9.1.0",
+ helpinfo = helpinfo,
+}
+
+local helpinfo = [[
+<?xml version="1.0"?>
+<application>
+ <metadata>
+ <entry name="name">ctxtools</entry>
+ <entry name="detail">CtxTools</entry>
+ <entry name="version">1.3.5</entry>
+ </metadata>
+ <flags>
+ <category name="basic">
+ <subcategory>
+ <flag name="touchcontextfile"><short>update context version</short></flag>
+ <flag name="contextversion"><short>report context version</short></flag>
+ <flag name="jeditinterface"><short>generate jedit syntax files [<ref name="pipe]"/></short></flag>
+ <flag name="bbeditinterface"><short>generate bbedit syntax files [<ref name="pipe]"/></short></flag>
+ <flag name="sciteinterface"><short>generate scite syntax files [<ref name="pipe]"/></short></flag>
+ <flag name="rawinterface"><short>generate raw syntax files [<ref name="pipe]"/></short></flag>
+ <flag name="translateinterface"><short>generate interface files (xml) [nl de ..]</short></flag>
+ <flag name="purgefiles"><short>remove temporary files [<ref name="all"/> <ref name="recurse]"/> [basename]</short></flag>
+ <flag name="documentation generate documentation [--type" value="]"><short>[filename]</short></flag>
+ <flag name="filterpages'"><short>) # no help, hidden temporary feature</short></flag>
+ <flag name="dpxmapfiles"><short>convert pdftex mapfiles to dvipdfmx [<ref name="force]"/> [texmfroot]</short></flag>
+ <flag name="listentities"><short>create doctype entity definition from enco-uc.tex</short></flag>
+ <flag name="brandfiles"><short>add context copyright notice [<ref name="force]"/></short></flag>
+ <flag name="platformize"><short>replace line-endings [<ref name="recurse"/> <ref name="force]"/> [pattern]</short></flag>
+ <flag name="dependencies analyze depedencies within context [--save --compact --filter" value="[macros|filenames] ]"><short>[filename]</short></flag>
+ <flag name="updatecontext"><short>download latest version and remake formats [<ref name="proxy]"/></short></flag>
+ <flag name="disarmutfbom"><short>remove utf bom [<ref name="force]"/></short></flag>
+ </subcategory>
+ </category>
+ </flags>
+</application>
+]]
+
+local ctxtools = logs.application {
+ name = "ctxtools",
+ banner = "CtxTools 1.3.5",
+ helpinfo = helpinfo,
+}
+
+local helpinfo = [[
+<?xml version="1.0"?>
+<application>
+ <metadata>
+ <entry name="name">textools</entry>
+ <entry name="detail">TeXTools</entry>
+ <entry name="version">1.3.1</entry>
+ </metadata>
+ <flags>
+ <category name="basic">
+ <subcategory>
+ <flag name="removemapnames"><short>[pattern] [<ref name="recurse]"/></short></flag>
+ <flag name="restoremapnames"><short>[pattern] [<ref name="recurse]"/></short></flag>
+ <flag name="hidemapnames"><short>[pattern] [<ref name="recurse]"/></short></flag>
+ <flag name="videmapnames"><short>[pattern] [<ref name="recurse]"/></short></flag>
+ <flag name="findfile"><short>filename [<ref name="recurse]"/></short></flag>
+ <flag name="unzipfiles"><short>[pattern] [<ref name="recurse]"/></short></flag>
+ <flag name="fixafmfiles"><short>[pattern] [<ref name="recurse]"/></short></flag>
+ <flag name="mactodos"><short>[pattern] [<ref name="recurse]"/></short></flag>
+ <flag name="fixtexmftrees"><short>[texmfroot] [<ref name="force]"/></short></flag>
+ <flag name="replacefile"><short>filename [<ref name="force]"/></short></flag>
+ <flag name="updatetree"><short>fromroot toroot [<ref name="force"/> <ref name="nocheck"/> <ref name="merge"/> <ref name="delete]"/></short></flag>
+ <flag name="downcasefilenames"><short>[<ref name="recurse]"/> [<ref name="force]"/></short></flag>
+ <flag name="stripformfeeds"><short>[<ref name="recurse]"/> [<ref name="force]"/></short></flag>
+ <flag name="showfont"><short>filename</short></flag>
+ <flag name="encmake"><short>afmfile encodingname</short></flag>
+ <flag name="tpmmake"><short>tpm file (run in texmf root)</short></flag>
+ </subcategory>
+ </category>
+ </flags>
+</application>
+]]
+
+local textools = logs.application {
+ name = "textools",
+ banner = "TeXTools 1.3.1",
+ helpinfo = helpinfo,
+}
+
+local helpinfo = [[
+<?xml version="1.0"?>
+<application>
+ <metadata>
+ <entry name="name">pdftools</entry>
+ <entry name="detail">PDFTools</entry>
+ <entry name="version">1.2.1</entry>
+ </metadata>
+ <flags>
+ <category name="basic">
+ <subcategory>
+ <flag name="spotimage filename --colorspec" value=""><short><ref name="colorname="/> [<ref name="retain"/> <ref name="invert"/> <ref name="subpath=]"/></short></flag>
+ <flag name="colorimage filename --colorspec" value=""><short>[<ref name="retain"/> <ref name="invert"/> <ref name="colorname="/> ]</short></flag>
+ <flag name="convertimage"><short>filename [<ref name="retain"/> <ref name="subpath]"/></short></flag>
+ <flag name="downsampleimage"><short>filename [<ref name="retain"/> <ref name="subpath"/> <ref name="lowres"/> <ref name="normal]"/></short></flag>
+ <flag name="info"><short>filename</short></flag>
+ <flag name="countpages"><short>[<ref name="pattern"/> <ref name="threshold]"/></short></flag>
+ <flag name="checkembedded"><short>[<ref name="pattern]"/></short></flag>
+ <flag name="analyzefile"><short>filename</short></flag>
+ </subcategory>
+ </category>
+ </flags>
+</application>
+]]
+
+local pdftools = logs.application {
+ name = "pdftools",
+ banner = "PDFTools 1.2.1",
+ helpinfo = helpinfo,
+}
+
+local helpinfo = [[
+<?xml version="1.0"?>
+<application>
+ <metadata>
+ <entry name="name">tmftools</entry>
+ <entry name="detail">TMFTools</entry>
+ <entry name="version">1.1.0</entry>
+ </metadata>
+ <flags>
+ <category name="basic">
+ <subcategory>
+ <flag name="analyze"><short>[<ref name="strict"/> <ref name="sort"/> <ref name="rootpath"/> <ref name="treepath"/> <ref name="delete"/> <ref name="force"/>] [pattern]</short></flag>
+ </subcategory>
+ <subcategory>
+ <flag name="serve"><short>act as kpse server</short></flag>
+ </subcategory>
+ </category>
+ </flags>
+</application>
+]]
+
+local tmftools = logs.application {
+ name = "tmftools",
+ banner = "TMFTools 1.2.1",
+ helpinfo = helpinfo,
+}
+
+local helpinfo = [[
+<?xml version="1.0"?>
+<application>
+ <metadata>
+ <entry name="name">xmltools</entry>
+ <entry name="detail">XMLTools</entry>
+ <entry name="version">1.2.2</entry>
+ </metadata>
+ <flags>
+ <category name="basic">
+ <subcategory>
+ <flag name="dir"><short>generate directory listing</short></flag>
+ <flag name="mmlpages"><short>generate graphic from mathml</short></flag>
+ <flag name="analyze"><short>report entities and elements [<ref name="utf"/> <ref name="process"/>]</short></flag>
+ <flag name="cleanup"><short>cleanup xml file [<ref name="force"/>]</short></flag>
+ <flag name="enhance"><short>enhance xml file (partial)</short></flag>
+ <flag name="filter"><short>filter elements from xml file [<ref name="element"/>]</short></flag>
+ <flag name="dir"><short>generate ddirectory listing</short></flag>
+ </subcategory>
+ </category>
+ </flags>
+</application>
+]]
+
+local xmltools = logs.application {
+ name = "xmltools",
+ banner = "XMLTools 1.2.1",
+ helpinfo = helpinfo,
+}
+
+local helpinfo = [[
+<?xml version="1.0"?>
+<application>
+ <metadata>
+ <entry name="name">pstopdf</entry>
+ <entry name="detail">PStoPDF</entry>
+ <entry name="version">2.0.1</entry>
+ </metadata>
+ <flags>
+ <category name="basic">
+ <subcategory>
+ <flag name="request"><short>handles exa request file</short></flag>
+ <flag name="watch"><short>watch folders for conversions (untested)</short></flag>
+ </subcategory>
+ </category>
+ </flags>
+</application>
+]]
+
+local pstopdf = logs.application {
+ name = "pstopdf",
+ banner = "PStoPDF 2.0.1",
+ helpinfo = helpinfo,
+}
+
+local helpinfo = [[
+<?xml version="1.0"?>
+<application>
+ <metadata>
+ <entry name="name">rlxtools</entry>
+ <entry name="detail">RlxTools</entry>
+ <entry name="version">1.0.1</entry>
+ </metadata>
+ <flags>
+ <category name="basic">
+ <subcategory>
+ <flag name="manipulate"><short>[<ref name="test]"/> manipulatorfile resourselog</short></flag>
+ <flag name="identify"><short>[<ref name="collect]"/> filename</short></flag>
+ </subcategory>
+ </category>
+ </flags>
+</application>
+]]
+
+local rlxtools = logs.application {
+ name = "rlxtools",
+ banner = "RlxTools 1.0.1",
+ helpinfo = helpinfo,
+}
+
+local helpinfo = [[
+<?xml version="1.0"?>
+<application>
+ <metadata>
+ <entry name="name">imgtopdf</entry>
+ <entry name="detail">ImgToPdf</entry>
+ <entry name="version">1.1.2</entry>
+ </metadata>
+ <flags>
+ <category name="basic">
+ <subcategory>
+ <flag name="convert"><short>convert image into pdf</short></flag>
+ <flag name="compression"><short>level of compression in percent</short></flag>
+ <flag name="depth"><short>image depth in bits</short></flag>
+ <flag name="colorspace"><short> colorspace (rgb,cmyk,gray)</short></flag>
+ <flag name="quality"><short>quality in percent</short></flag>
+ <flag name="inputpath"><short>path where files are looked for</short></flag>
+ <flag name="outputpath"><short>path where files end up</short></flag>
+ <flag name="auto"><short>determine settings automatically</short></flag>
+ </subcategory>
+ </category>
+ </flags>
+</application>
+]]
+
+local imgtopdf = logs.application {
+ name = "imgtopdf",
+ banner = "ImgToPdf 1.1.2",
+ helpinfo = helpinfo,
+}
+
+local helpinfo = [[
+<?xml version="1.0"?>
+<application>
+ <metadata>
+ <entry name="name">mptopdf</entry>
+ <entry name="detail">convert MetaPost to PDF</entry>
+ <entry name="version">1.4.1</entry>
+ </metadata>
+ <flags>
+ <category name="basic">
+ <subcategory>
+ <flag name="metafun"><short>use the metafun format to process the file (default is mpost)</short></flag>
+ <flag name="texexec"><short>use texexec (context) to process text snippets</short></flag>
+ <flag name="latex"><short>use latex to process text snippets</short></flag>
+ </subcategory>
+ </category>
+ </flags>
+</application>
+]]
+
+local mptopdf = logs.application {
+ name = "mptopdf",
+ banner = "MPtoPDF 1.4.1",
+ helpinfo = helpinfo,
+}
+
+-- texmfstart.rb is normally replaced by mtxrun
+-- runtools.rb is run from within context
+-- concheck.rb is run from within editors
+-- texsync.rb is no longer in the zip
+-- mpstools.rb is no longer in the zip
+-- rscortool.rb is only run indirectly
+-- rsfiltool.rb is only run indirectly
+-- rslibtool.rb is only run indirectly
+
+local application = logs.application {
+ name = "mk-help",
+ banner = "Mk Help generator 1.00",
+}
+
+local filename = environment.files[1]
+
+if not filename then
+ application.report("no mk script given")
+ return
+end
+
+local mkapplication
+
+if filename == "texmfstart" then mkapplication = texmfstart
+elseif filename == "luatools" then mkapplication = luatools
+elseif filename == "texexec" then mkapplication = texexec
+elseif filename == "texutil" then mkapplication = texutil
+elseif filename == "ctxtools" then mkapplication = ctxtools
+elseif filename == "textools" then mkapplication = textools
+elseif filename == "pdftools" then mkapplication = pdftools
+elseif filename == "tmftools" then mkapplication = tmftools
+elseif filename == "xmltools" then mkapplication = xmltools
+elseif filename == "pstopdf" then mkapplication = pstopdf
+elseif filename == "rlxtools" then mkapplication = rlxtools
+elseif filename == "imgtopdf" then mkapplication = imgtopdf
+elseif filename == "mptopdf" then mkapplication = mptopdf end
+
+if not mkapplication then
+ application.report("no valid mk script given")
+ return
+end
+
+if environment.argument("exporthelp") then
+ mkapplication.export(environment.argument("exporthelp"),environment.files[2])
+else
+ mkapplication.help()
+end
diff --git a/Master/texmf-dist/scripts/context/lua/mtx-modules.lua b/Master/texmf-dist/scripts/context/lua/mtx-modules.lua
index 902b12ac303..ef38a715673 100644
--- a/Master/texmf-dist/scripts/context/lua/mtx-modules.lua
+++ b/Master/texmf-dist/scripts/context/lua/mtx-modules.lua
@@ -10,9 +10,23 @@ scripts = scripts or { }
scripts.modules = scripts.modules or { }
local helpinfo = [[
---convert convert source files (tex, mkii, mkiv, mp) to 'ted' files
---process process source files (tex, mkii, mkiv, mp) to 'pdf' files
---prep use original name with suffix 'prep' appended
+<?xml version="1.0"?>
+<application>
+ <metadata>
+ <entry name="name">mtx-modules</entry>
+ <entry name="detail">ConTeXt Module Documentation Generators</entry>
+ <entry name="version">1.00</entry>
+ </metadata>
+ <flags>
+ <category name="basic">
+ <subcategory>
+ <flag name="convert"><short>convert source files (tex, mkii, mkiv, mp) to 'ted' files</short></flag>
+ <flag name="process"><short>process source files (tex, mkii, mkiv, mp) to 'pdf' files</short></flag>
+ <flag name="prep"><short>use original name with suffix 'prep' appended</short></flag>
+ </subcategory>
+ </category>
+ </flags>
+</application>
]]
local application = logs.application {
@@ -181,6 +195,8 @@ if environment.argument("process") then
scripts.modules.process(true)
elseif environment.argument("convert") then
scripts.modules.process(false)
+elseif environment.argument("exporthelp") then
+ application.export(environment.argument("exporthelp"),environment.files[1])
else
application.help()
end
diff --git a/Master/texmf-dist/scripts/context/lua/mtx-package.lua b/Master/texmf-dist/scripts/context/lua/mtx-package.lua
index 294ef4f285a..8c9e6b9fc85 100644
--- a/Master/texmf-dist/scripts/context/lua/mtx-package.lua
+++ b/Master/texmf-dist/scripts/context/lua/mtx-package.lua
@@ -9,7 +9,21 @@ if not modules then modules = { } end modules ['mtx-package'] = {
local format, gsub, gmatch = string.format, string.gsub, string.gmatch
local helpinfo = [[
---merge merge 'loadmodule' into merge file
+<?xml version="1.0"?>
+<application>
+ <metadata>
+ <entry name="name">mtx-package</entry>
+ <entry name="detail">Distribution Related Goodies</entry>
+ <entry name="version">0.10</entry>
+ </metadata>
+ <flags>
+ <category name="basic">
+ <subcategory>
+ <flag name="merge"><short>merge 'loadmodule' into merge file</short></flag>
+ </subcategory>
+ </category>
+ </flags>
+</application>
]]
local application = logs.application {
@@ -24,18 +38,18 @@ scripts = scripts or { }
messages = messages or { }
scripts.package = scripts.package or { }
-function scripts.package.merge_luatex_files(name,strip)
+function scripts.package.merge_luatex_files(name)
local oldname = resolvers.findfile(name) or ""
oldname = file.replacesuffix(oldname,"lua")
if oldname == "" then
- report("missing '%s'",name)
+ report("missing %q",name)
else
local newname = file.removesuffix(oldname) .. "-merged.lua"
local data = io.loaddata(oldname) or ""
if data == "" then
- report("missing '%s'",newname)
+ report("missing %q",newname)
else
- report("loading '%s'",oldname)
+ report("loading %q",oldname)
local collected = { }
collected[#collected+1] = format("-- merged file : %s\n",newname)
collected[#collected+1] = format("-- parent file : %s\n",oldname)
@@ -45,23 +59,17 @@ function scripts.package.merge_luatex_files(name,strip)
if file.basename(lib) ~= file.basename(newname) then
local fullname = resolvers.findfile(lib) or ""
if fullname == "" then
- report("missing '%s'",lib)
+ report("missing %q",lib)
else
- report("fetching '%s'",fullname)
+ report("fetching %q",fullname)
local data = io.loaddata(fullname)
- if strip then
- data = gsub(data,"%-%-%[%[ldx%-%-.-%-%-%ldx%]%]%-%-[\n\r]*","")
- data = gsub(data,"%-%-%~[^\n\r]*[\n\r]*","\n")
- data = gsub(data,"%s+%-%-[^\n\r]*[\n\r]*","\n")
- data = gsub(data,"[\n\r]+","\n")
- end
collected[#collected+1] = "\ndo -- begin closure to overcome local limits and interference\n\n"
- collected[#collected+1] = data
+ collected[#collected+1] = utilities.merger.compact(data)
collected[#collected+1] = "\nend -- closure\n"
end
end
end
- report("saving '%s'",newname)
+ report("saving %q",newname)
io.savedata(newname,table.concat(collected))
end
end
@@ -69,6 +77,8 @@ end
if environment.argument("merge") then
scripts.package.merge_luatex_files(environment.files[1] or "")
+elseif environment.argument("exporthelp") then
+ application.export(environment.argument("exporthelp"),environment.files[1])
else
application.help()
end
diff --git a/Master/texmf-dist/scripts/context/lua/mtx-patterns.lua b/Master/texmf-dist/scripts/context/lua/mtx-patterns.lua
index fbcb251b1f2..7144da9b090 100644
--- a/Master/texmf-dist/scripts/context/lua/mtx-patterns.lua
+++ b/Master/texmf-dist/scripts/context/lua/mtx-patterns.lua
@@ -12,17 +12,36 @@ local addsuffix = file.addsuffix
local lpegmatch, validutf8 = lpeg.match, lpeg.patterns.validutf8
local helpinfo = [[
---convert generate context language files (mnemonic driven, if not given then all)
---check check pattern file (or those used by context when no file given)
---path source path where hyph-foo.tex files are stored
---destination destination path
-
-examples of usage:
-
-mtxrun --script pattern --check hyph-*.tex
-mtxrun --script pattern --check --path=c:/data/develop/svn-hyphen/trunk/hyph-utf8/tex/generic/hyph-utf8/patterns
-mtxrun --script pattern --convert --path=c:/data/develop/svn-hyphen/trunk/hyph-utf8/tex/generic/hyph-utf8/patterns/tex --destination=e:/tmp/patterns
-mtxrun --script pattern --convert --path=c:/data/develop/svn-hyphen/trunk/hyph-utf8/tex/generic/hyph-utf8/patterns/txt --destination=e:/tmp/patterns
+<?xml version="1.0"?>
+<application>
+ <metadata>
+ <entry name="name">mtx-patterns</entry>
+ <entry name="detail">ConTeXt Pattern File Management</entry>
+ <entry name="version">0.20</entry>
+ </metadata>
+ <flags>
+ <category name="basic">
+ <subcategory>
+ <flag name="convert"><short>generate context language files (mnemonic driven, if not given then all)</short></flag>
+ <flag name="check"><short>check pattern file (or those used by context when no file given)</short></flag>
+ <flag name="path"><short>source path where hyph-foo.tex files are stored</short></flag>
+ <flag name="destination"><short>destination path</short></flag>
+ <flag name="specification"><short>additional patterns: e.g.: =cy,hyph-cy,welsh</short></flag>
+ </subcategory>
+ </category>
+ </flags>
+ <examples>
+ <category>
+ <title>Examples</title>
+ <subcategory>
+ <example><command>mtxrun --script pattern --check hyph-*.tex</command></example>
+ <example><command>mtxrun --script pattern --check --path=c:/data/develop/svn-hyphen/trunk/hyph-utf8/tex/generic/hyph-utf8/patterns</command></example>
+ <example><command>mtxrun --script pattern --convert --path=c:/data/develop/svn-hyphen/trunk/hyph-utf8/tex/generic/hyph-utf8/patterns/tex --destination=e:/tmp/patterns</command></example>
+ <example><command>mtxrun --script pattern --convert --path=c:/data/develop/svn-hyphen/trunk/hyph-utf8/tex/generic/hyph-utf8/patterns/txt --destination=e:/tmp/patterns</command></example>
+ </subcategory>
+ </category>
+ </examples>
+</application>
]]
local application = logs.application {
@@ -454,7 +473,19 @@ function scripts.patterns.save(destination,mnemonic,name,patternsnew,hyphenation
end
function scripts.patterns.prepare()
+ --
dofile(resolvers.findfile("char-def.lua"))
+ --
+ local specification = environment.argument("specification")
+ if specification then
+ local components = utilities.parsers.settings_to_array(specification)
+ if #components == 3 then
+ table.insert(scripts.patterns.list,1,components)
+ report("specification added: %s %s %s",table.unpack(components))
+ else
+ report('invalid specification: %q, "xx,lang-yy,zzzz" expected',specification)
+ end
+ end
end
function scripts.patterns.check()
@@ -484,7 +515,7 @@ function scripts.patterns.convert()
else
local destination = environment.argument("destination") or "."
if path == destination then
- resport("source path and destination path should differ (use --path and/or --destination)")
+ report("source path and destination path should differ (use --path and/or --destination)")
else
local files = environment.files
local only = false
@@ -516,6 +547,8 @@ if environment.argument("check") then
elseif environment.argument("convert") then
scripts.patterns.prepare()
scripts.patterns.convert()
+elseif environment.argument("exporthelp") then
+ application.export(environment.argument("exporthelp"),environment.files[1])
else
application.help()
end
diff --git a/Master/texmf-dist/scripts/context/lua/mtx-pdf.lua b/Master/texmf-dist/scripts/context/lua/mtx-pdf.lua
index 5654b8bc487..3e4130344ee 100644
--- a/Master/texmf-dist/scripts/context/lua/mtx-pdf.lua
+++ b/Master/texmf-dist/scripts/context/lua/mtx-pdf.lua
@@ -6,14 +6,35 @@ if not modules then modules = { } end modules ['mtx-pdf'] = {
license = "see context related readme files"
}
+local tonumber = tonumber
+local format, gmatch = string.format, string.gmatch
+local utfchar = utf.char
+local concat = table.concat
+local setmetatableindex, sortedhash, sortedkeys = table.setmetatableindex, table.sortedhash, table.sortedkeys
+
local helpinfo = [[
---info show some info about the given file
---metadata show metadata xml blob
+<?xml version="1.0"?>
+<application>
+ <metadata>
+ <entry name="name">mtx-pdf</entry>
+ <entry name="detail">ConTeXt PDF Helpers</entry>
+ <entry name="version">0.10</entry>
+ </metadata>
+ <flags>
+ <category name="basic">
+ <subcategory>
+ <flag name="info"><short>show some info about the given file</short></flag>
+ <flag name="metadata"><short>show metadata xml blob</short></flag>
+ <flag name="fonts"><short>show used fonts (<ref name="detail)"/></short></flag>
+ </subcategory>
+ </category>
+ </flags>
+</application>
]]
local application = logs.application {
name = "mtx-pdf",
- banner = "ConTeXt PDF Helpers 0.01",
+ banner = "ConTeXt PDF Helpers 0.10",
helpinfo = helpinfo,
}
@@ -39,9 +60,8 @@ local function loadpdffile(filename)
end
end
-function scripts.pdf.info()
- local filename = environment.files[1]
- local pdffile = loadpdffile(filename)
+function scripts.pdf.info(filename)
+ local pdffile = loadpdffile(filename)
if pdffile then
local catalog = pdffile.Catalog
local info = pdffile.Info
@@ -73,9 +93,8 @@ function scripts.pdf.info()
end
end
-function scripts.pdf.metadata()
- local filename = environment.files[1]
- local pdffile = loadpdffile(filename)
+function scripts.pdf.metadata(filename)
+ local pdffile = loadpdffile(filename)
if pdffile then
local catalog = pdffile.Catalog
local metadata = catalog.Metadata
@@ -87,10 +106,129 @@ function scripts.pdf.metadata()
end
end
-if environment.argument("info") then
- scripts.pdf.info()
+local function getfonts(pdffile)
+ local usedfonts = { }
+ for i=1,pdffile.pages.n do
+ local page = pdffile.pages[i]
+ local fontlist = page.Resources.Font
+ for k, v in next, lpdf.epdf.expand(fontlist) do
+ usedfonts[k] = lpdf.epdf.expand(v)
+ end
+ end
+ return usedfonts
+end
+
+local function getunicodes(font)
+ local cid = font.ToUnicode
+ if cid then
+ cid = cid()
+ local counts = { }
+ -- for s in gmatch(cid,"begincodespacerange%s*(.-)%s*endcodespacerange") do
+ -- for a, b in gmatch(s,"<([^>]+)>%s+<([^>]+)>") do
+ -- print(a,b)
+ -- end
+ -- end
+ setmetatableindex(counts, function(t,k) t[k] = 0 return 0 end)
+ for s in gmatch(cid,"beginbfrange%s*(.-)%s*endbfrange") do
+ for first, last, offset in gmatch(s,"<([^>]+)>%s+<([^>]+)>%s+<([^>]+)>") do
+ first = tonumber(first,16)
+ last = tonumber(last,16)
+ offset = tonumber(offset,16)
+ offset = offset - first
+ for i=first,last do
+ local c = i + offset
+ counts[c] = counts[c] + 1
+ end
+ end
+ end
+ for s in gmatch(cid,"beginbfchar%s*(.-)%s*endbfchar") do
+ for old, new in gmatch(s,"<([^>]+)>%s+<([^>]+)>") do
+ for n in gmatch(new,"....") do
+ local c = tonumber(n,16)
+ counts[c] = counts[c] + 1
+ end
+ end
+ end
+ return counts
+ end
+end
+
+function scripts.pdf.fonts(filename)
+ local pdffile = loadpdffile(filename)
+ if pdffile then
+ local usedfonts = getfonts(pdffile)
+ local found = { }
+ for k, v in table.sortedhash(usedfonts) do
+ local counts = getunicodes(v)
+ local codes = { }
+ local chars = { }
+ local freqs = { }
+ if counts then
+ codes = sortedkeys(counts)
+ for i=1,#codes do
+ local k = codes[i]
+ local c = utfchar(k)
+ chars[i] = c
+ freqs[i] = format("U+%05X %s %s",k,counts[k] > 1 and "+" or " ", c)
+ end
+ for i=1,#codes do
+ codes[i] = format("U+%05X",codes[i])
+ end
+ end
+ found[k] = {
+ basefont = v.BaseFont or "no basefont",
+ encoding = v.Encoding or "no encoding",
+ subtype = v.Subtype or "no subtype",
+ unicode = v.ToUnicode and "unicode" or "no unicode",
+ chars = chars,
+ codes = codes,
+ freqs = freqs,
+ }
+ end
+
+ if environment.argument("detail") then
+ for k, v in sortedhash(found) do
+ report("id : %s",k)
+ report("basefont : %s",v.basefont)
+ report("encoding : %s",v.encoding)
+ report("subtype : %s",v.subtype)
+ report("unicode : %s",v.unicode)
+ report("characters : %s", concat(v.chars," "))
+ report("codepoints : %s", concat(v.codes," "))
+ report("")
+ end
+ else
+ local results = { { "id", "basefont", "encoding", "subtype", "unicode", "characters" } }
+ for k, v in sortedhash(found) do
+ results[#results+1] = { k, v.basefont, v.encoding, v.subtype, v.unicode, concat(v.chars," ") }
+ end
+ utilities.formatters.formatcolumns(results)
+ report(results[1])
+ report("")
+ for i=2,#results do
+ report(results[i])
+ end
+ report("")
+ end
+ end
+end
+
+-- scripts.pdf.info("e:/tmp/oeps.pdf")
+-- scripts.pdf.metadata("e:/tmp/oeps.pdf")
+-- scripts.pdf.fonts("e:/tmp/oeps.pdf")
+
+local filename = environment.files[1] or ""
+
+if filename == "" then
+ application.help()
+elseif environment.argument("info") then
+ scripts.pdf.info(filename)
elseif environment.argument("metadata") then
- scripts.pdf.metadata()
+ scripts.pdf.metadata(filename)
+elseif environment.argument("fonts") then
+ scripts.pdf.fonts(filename)
+elseif environment.argument("exporthelp") then
+ application.export(environment.argument("exporthelp"),environment.files[1])
else
application.help()
end
diff --git a/Master/texmf-dist/scripts/context/lua/mtx-profile.lua b/Master/texmf-dist/scripts/context/lua/mtx-profile.lua
index 74e1d153845..3550474f38c 100644
--- a/Master/texmf-dist/scripts/context/lua/mtx-profile.lua
+++ b/Master/texmf-dist/scripts/context/lua/mtx-profile.lua
@@ -12,8 +12,22 @@ if not modules then modules = { } end modules ['mtx-profile'] = {
local match, format, find = string.match, string.format, string.find
local helpinfo = [[
---analyze analyze lua calls
---trace analyze tex calls
+<?xml version="1.0"?>
+<application>
+ <metadata>
+ <entry name="name">mtx-profile</entry>
+ <entry name="detail">ConTeXt MkIV LuaTeX Profiler</entry>
+ <entry name="version">1.00</entry>
+ </metadata>
+ <flags>
+ <category name="basic">
+ <subcategory>
+ <flag name="analyze"><short>analyze lua calls</short></flag>
+ <flag name="trace"><short>analyze tex calls</short></flag>
+ </subcategory>
+ </category>
+ </flags>
+</application>
]]
local application = logs.application {
@@ -41,33 +55,28 @@ function scripts.profiler.analyze(filename)
if f then
local times, counts, calls = { }, { }, { }
local totalruntime, totalcount, totalcalls = 0, 0, 0
- while true do
- local line = f:read()
- if line then
- local stacklevel, filename, functionname, linenumber, currentline, localtime, totaltime = line:match("^(%d+)\t(.-)\t(.-)\t(.-)\t(.-)\t(.-)\t(.-)")
- if not filename then
- -- next
- elseif filename == "=[C]" then
- if not functionname:find("^%(") then
- calls[functionname] = (calls[functionname] or 0) + 1
- end
- else
- local filename = filename:match("^@(.*)$")
- if filename then
- local fi = times[filename]
- if not fi then fi = { } times[filename] = fi end
- fi[functionname] = (fi[functionname] or 0) + tonumber(localtime)
- counts[functionname] = (counts[functionname] or 0) + 1
- end
+ for line in f:lines() do
+ local stacklevel, filename, functionname, linenumber, currentline, localtime, totaltime = line:match("^(%d+)\t(.-)\t(.-)\t(.-)\t(.-)\t(.-)\t(.-)")
+ if not filename then
+ -- next
+ elseif filename == "=[C]" then
+ if not functionname:find("^%(") then
+ calls[functionname] = (calls[functionname] or 0) + 1
end
else
- break
+ local filename = filename:match("^@(.*)$")
+ if filename then
+ local fi = times[filename]
+ if not fi then fi = { } times[filename] = fi end
+ fi[functionname] = (fi[functionname] or 0) + tonumber(localtime)
+ counts[functionname] = (counts[functionname] or 0) + 1
+ end
end
end
f:close()
print("")
local loaded = { }
- sortedtable.sortedkeys(times)
+ local sorted = table.sortedkeys(times)
for i=1,#sorted do
local filename = sorted[i]
local functions = times[filename]
@@ -171,6 +180,8 @@ if environment.argument("analyze") then
scripts.profiler.analyze(environment.files[1] or "luatex-profile.log")
elseif environment.argument("trace") then
scripts.profiler.analyze(environment.files[1] or "temp.log")
+elseif environment.argument("exporthelp") then
+ application.export(environment.argument("exporthelp"),environment.files[1])
else
application.help()
end
diff --git a/Master/texmf-dist/scripts/context/lua/mtx-rsync.lua b/Master/texmf-dist/scripts/context/lua/mtx-rsync.lua
index b549d5bcb62..65f795ee557 100644
--- a/Master/texmf-dist/scripts/context/lua/mtx-rsync.lua
+++ b/Master/texmf-dist/scripts/context/lua/mtx-rsync.lua
@@ -46,9 +46,23 @@ if not modules then modules = { } end modules ['mtx-rsync'] = {
-- }
local helpinfo = [[
---job use given file as specification
---dryrun show what would happen
---force force run
+<?xml version="1.0"?>
+<application>
+ <metadata>
+ <entry name="name">mtx-rsync</entry>
+ <entry name="detail">Rsync Helpers</entry>
+ <entry name="version">0.10</entry>
+ </metadata>
+ <flags>
+ <category name="basic">
+ <subcategory>
+ <flag name="job"><short>use given file as specification</short></flag>
+ <flag name="dryrun"><short>show what would happen</short></flag>
+ <flag name="force"><short>force run</short></flag>
+ </subcategory>
+ </category>
+ </flags>
+</application>
]]
local application = logs.application {
@@ -79,7 +93,7 @@ else
end
function rsynccommand(dryrun,recurse,origin,target)
- local command = "rsync -ptlv "
+ local command = "rsync -ptlva "
if dryrun then
command = command .. "-n "
end
@@ -104,6 +118,11 @@ function rsync.run(origin,target,message,recurse)
end
origin = cleanup(origin)
target = cleanup(target)
+ local path = gsub(target,"^/cygdrive/(.)","%1:")
+ if not lfs.isdir(path) then
+ report_message("creating target dir %s",path)
+ dir.makedirs(path) -- as rsync only creates them when --recursive
+ end
if message then
report_message(message)
end
@@ -159,7 +178,9 @@ elseif arguments.force then
rsync.mode = "force"
end
-if arguments.job then
+if arguments.exporthelp then
+ application.export(arguments.exporthelp,environment.files[1])
+elseif arguments.job then
rsync.job(files[1])
elseif files[1] and files[2] then
rsync.run(files[1],files[2])
diff --git a/Master/texmf-dist/scripts/context/lua/mtx-scite.lua b/Master/texmf-dist/scripts/context/lua/mtx-scite.lua
index 116555e79b0..972edbfe6a6 100644
--- a/Master/texmf-dist/scripts/context/lua/mtx-scite.lua
+++ b/Master/texmf-dist/scripts/context/lua/mtx-scite.lua
@@ -10,13 +10,22 @@ local P, R, S, C, Ct, Cf, Cc, Cg = lpeg.P, lpeg.R, lpeg.S, lpeg.C, lpeg.Ct, lpeg
local lpegmatch = lpeg.match
local format, lower, gmatch = string.format, string.lower, string.gmatch
--- local helpinfo = [[
--- --start [--verbose] start scite
--- --test report what will happen
--- ]]
-
local helpinfo = [[
---words convert spell-*.txt into spell-*.lua
+<?xml version="1.0"?>
+<application>
+ <metadata>
+ <entry name="name">mtx-scite</entry>
+ <entry name="detail">Scite Helper Script</entry>
+ <entry name="version">1.00</entry>
+ </metadata>
+ <flags>
+ <category name="basic">
+ <subcategory>
+ <flag name="words"><short>convert spell-*.txt into spell-*.lua</short></flag>
+ </subcategory>
+ </category>
+ </flags>
+</application>
]]
local application = logs.application {
@@ -199,14 +208,31 @@ function scripts.scite.words()
if lfs.isfile(txtname) then
report("loading %s",txtname)
local olddata = io.loaddata(txtname) or ""
+ local words = splitwords(olddata)
+ local min, max, n = 100, 1, 0
+ for k, v in next, words do
+ local l = #k
+ if l < min then
+ min = l
+ end
+ if l > max then
+ max = l
+ end
+ n = n + 1
+ end
+ if min > max then
+ min = max
+ end
local newdata = {
- words = splitwords(olddata),
- -- words = olddata,
+ words = words,
source = oldname,
+ min = min,
+ max = max,
+ n = n,
}
- report("saving %s",luaname)
+ report("saving %q, %s words, %s shortest, %s longest",luaname,n,min,max)
io.savedata(luaname,table.serialize(newdata,true))
- report("compiling %s",lucname)
+ report("compiling %q",lucname)
os.execute(format("luac -s -o %s %s",lucname,luaname))
else
report("no data file %s",txtname)
@@ -225,6 +251,8 @@ end
if environment.argument("words") then
scripts.scite.words()
+elseif environment.argument("exporthelp") then
+ application.export(environment.argument("exporthelp"),environment.files[1])
else
application.help()
end
diff --git a/Master/texmf-dist/scripts/context/lua/mtx-server-ctx-fonttest.lua b/Master/texmf-dist/scripts/context/lua/mtx-server-ctx-fonttest.lua
index b30cf017525..a8d7edf4106 100644
--- a/Master/texmf-dist/scripts/context/lua/mtx-server-ctx-fonttest.lua
+++ b/Master/texmf-dist/scripts/context/lua/mtx-server-ctx-fonttest.lua
@@ -6,12 +6,17 @@ if not modules then modules = { } end modules ['mtx-server-ctx-fonttest'] = {
license = "see context related readme files"
}
---~ dofile(resolvers.findfile("l-aux.lua","tex"))
+-- probably too much but who cares
+
dofile(resolvers.findfile("trac-lmx.lua","tex"))
+dofile(resolvers.findfile("font-ini.lua","tex"))
+dofile(resolvers.findfile("font-con.lua","tex"))
+dofile(resolvers.findfile("font-oti.lua","tex"))
+dofile(resolvers.findfile("font-otf.lua","tex"))
+dofile(resolvers.findfile("font-otp.lua","tex"))
dofile(resolvers.findfile("font-ott.lua","tex"))
dofile(resolvers.findfile("font-syn.lua","tex"))
dofile(resolvers.findfile("font-mis.lua","tex"))
---~ dofile(resolvers.findfile("font-otp.lua","tex"))
local format, gsub, concat, match, find = string.format, string.gsub, table.concat, string.match, string.find
@@ -34,7 +39,7 @@ local process_templates = { }
process_templates.default = [[
\starttext
- \setcharactermirroring[1]
+ \setupdirections[bidi=global]
\definefontfeature[sample][analyze=yes,%s]
\definedfont[name:%s*sample]
\startTEXpage[offset=3pt]
@@ -59,7 +64,7 @@ process_templates.trace = [[
\setupcolors[state=start]
-\setcharactermirroring[1]
+\setupdirections[bidi=global]
\setvariables
[otftracker]
@@ -280,9 +285,11 @@ local edit_template = [[
<br/> <br/>options:&nbsp;%s
]]
+-- <embed src="%s#toolbar=0&amp;navpanes=0&amp;scrollbar=0" width="100%%"/>
+
local result_template = [[
<br/> <br/>
- <embed src="%s#toolbar=0&amp;navpanes=0&amp;scrollbar=0" width="100%%"/>
+ <embed src="%s#view=Fit&amp;toolbar=0&amp;navpanes=0&amp;scrollbar=0" width="100%%"/>
<br/> <br/> results:
<a href='%s' target="source">tex file</a>
<a href='%s' target="result">pdf file</a>
@@ -377,8 +384,14 @@ local function process_font(currentfont,detail) -- maybe just fontname
local sample = string.strip(detail.sampletext or "")
if sample == "" then sample = sample_line end
report("sample text: %s",sample)
- io.savedata(file.join(temppath,file.addsuffix(tempname,"tex")),format(variant,concat(features,","),currentfont,sample))
- os.execute(format("mtxrun --path=%s --script context --once --batchmode %s",temppath,tempname))
+ dir.mkdirs(temppath)
+ local fullname = file.join(temppath,file.addsuffix(tempname,"tex"))
+ local data = format(variant,concat(features,","),currentfont,sample)
+ local command = format("mtxrun --path=%q --script context --once --batchmode %q",temppath,tempname)
+ report("filename: %s",fullname)
+ report("command: %s",command)
+ io.savedata(fullname,data)
+ os.execute(command)
return edit_font(currentfont,detail,tempname)
end
@@ -390,7 +403,8 @@ local tex_template = [[
local function show_source(currentfont,detail)
if tempname and tempname ~= "" then
- return format(tex_template,io.loaddata(file.join(temppath,file.addsuffix(tempname,"tex"))) or "no source yet")
+ local data = io.loaddata(file.join(temppath,file.addsuffix(tempname,"tex"))) or "no source yet"
+ return format(tex_template,data)
else
return "no source file"
end
diff --git a/Master/texmf-dist/scripts/context/lua/mtx-server-ctx-help.lua b/Master/texmf-dist/scripts/context/lua/mtx-server-ctx-help.lua
index a212e1369d0..b8dc0dfb278 100644
--- a/Master/texmf-dist/scripts/context/lua/mtx-server-ctx-help.lua
+++ b/Master/texmf-dist/scripts/context/lua/mtx-server-ctx-help.lua
@@ -9,13 +9,11 @@ if not modules then modules = { } end modules ['mtx-server-ctx-help'] = {
-- todo in lua interface: noargument, oneargument, twoarguments, threearguments
-- todo: pickup translations from mult file
---~ dofile(resolvers.findfile("l-aux.lua","tex"))
---~ dofile(resolvers.findfile("l-url.lua","tex"))
dofile(resolvers.findfile("trac-lmx.lua","tex"))
-- problem ... serialize parent stack
-local format = string.format
+local format, match, gsub, find = string.format, string.match, string.gsub, string.find
local concat = table.concat
local report = logs.reporter("ctx-help")
@@ -282,18 +280,48 @@ document.setups.translations = document.setups.translations or {
}
document.setups.formats = {
- open_command = { [[\%s]], [[context.%s (]] },
- close_command = { [[]], [[ )]] },
- connector = { [[]], [[, ]] },
- href_in_list = { [[<a href='mtx-server-ctx-help.lua?command=%s&mode=%s'>%s</a>]], [[<a href='mtx-server-ctx-help.lua?command=%s&mode=%s'>%s</a>]] },
- href_as_command = { [[<a href='mtx-server-ctx-help.lua?command=%s&mode=%s'>\%s</a>]], [[<a href='mtx-server-ctx-help.lua?command=%s&mode=%s'>context.%s</a>]] },
+ open_command = {
+ tex = [[\%s]],
+ lua = [[context.%s (]],
+ },
+ close_command = {
+ tex = [[]],
+ lua = [[ )]],
+ },
+ connector = {
+ tex = [[]],
+ lua = [[, ]],
+ },
+ href_in_list = {
+ tex = [[<a href='mtx-server-ctx-help.lua?command=%s&mode=%s'>%s</a>]],
+ lua = [[<a href='mtx-server-ctx-help.lua?command=%s&mode=%s'>%s</a>]],
+ },
+ href_as_command = {
+ tex = [[<a href='mtx-server-ctx-help.lua?command=%s&mode=%s'>\%s</a>]],
+ lua = [[<a href='mtx-server-ctx-help.lua?command=%s&mode=%s'>context.%s</a>]],
+ },
+ modes = {
+ tex = [[<a href='mtx-server-ctx-help.lua?mode=lua'>lua mode</a>]],
+ lua = [[<a href='mtx-server-ctx-help.lua?mode=tex'>tex mode</a>]],
+ },
+ optional_single = {
+ tex = "[optional string %s]",
+ lua = "{optional string %s}",
+ },
+ optional_list = {
+ tex = "[optional list %s]",
+ lua = "{optional table %s}" ,
+ } ,
+ mandate_single = {
+ tex = "[mandate string %s]",
+ lua = "{mandate string %s}",
+ },
+ mandate_list = {
+ tex = "[mandate list %s]",
+ lua = "{mandate list %s}",
+ },
interface = [[<a href='mtx-server-ctx-help.lua?interface=%s&mode=%s'>%s</a>]],
source = [[<a href='mtx-server-ctx-help.lua?source=%s&mode=%s'>%s</a>]],
- modes = { [[<a href='mtx-server-ctx-help.lua?mode=2'>lua mode</a>]], [[<a href='mtx-server-ctx-help.lua?mode=1'>tex mode</a>]] },
- optional_single = { "[optional string %s]", "{optional string %s}" },
- optional_list = { "[optional list %s]", "{optional table %s}" } ,
- mandate_single = { "[mandate string %s]", "{mandate string %s}" },
- mandate_list = { "[mandate list %s]", "{mandate list %s}" },
parameter = [[<tr><td width='15%%'>%s</td><td width='15%%'>%s</td><td width='70%%'>%s</td></tr>]],
parameters = [[<table width='100%%'>%s</table>]],
listing = [[<pre><t>%s</t></listing>]],
@@ -315,7 +343,7 @@ end
local function translated(e,int)
local attributes = e.at
local s = attributes.type or "?"
- local tag = s:match("^cd:(.*)$")
+ local tag = match(s,"^cd:(.*)$")
if attributes.default == "yes" then
return format(document.setups.formats.default,tag or "?")
elseif tag then
@@ -329,7 +357,7 @@ document.setups.loaded = document.setups.loaded or { }
document.setups.current = { }
document.setups.showsources = true
-document.setups.mode = 1
+document.setups.mode = "tex"
function document.setups.load(filename)
filename = resolvers.findfile(filename) or ""
@@ -402,7 +430,7 @@ end
function document.setups.show(name)
local current = document.setups.current
if current.root then
- local name = name:gsub("[<>]","")
+ local name = gsub(name,"[<>]","")
local setup = xml.first(current.root,"cd:command[@name='" .. name .. "']")
current.used[#current.used+1] = setup
xml.sprint(setup)
@@ -452,12 +480,12 @@ function document.setups.collect(name,int,lastmode)
category = attributes.category or "",
}
if document.setups.showsources then
- data.source = (attributes.file and formats.source:format(attributes.file,lastmode,attributes.file)) or ""
+ data.source = (attributes.file and format(formats.source,attributes.file,lastmode,attributes.file)) or ""
else
data.source = attributes.file or ""
end
local n, sequence, tags = 0, { }, { }
- sequence[#sequence+1] = formats.open_command[lastmode]:format(document.setups.csname(command,int))
+ sequence[#sequence+1] = format(formats.open_command[lastmode],document.setups.csname(command,int))
local arguments, tag = { }, ""
for r, d, k in xml.elements(command,"(cd:keywords|cd:assignments)") do
n = n + 1
@@ -470,15 +498,15 @@ function document.setups.collect(name,int,lastmode)
end
if attributes.optional == 'yes' then
if attributes.list == 'yes' then
- tag = formats.optional_list[lastmode]:format(n)
+ tag = format(formats.optional_list[lastmode],n)
else
- tag = formats.optional_single[lastmode]:format(n)
+ tag = format(formats.optional_single[lastmode],n)
end
else
if attributes.list == 'yes' then
- tag = formats.mandate_list[lastmode]:format(n)
+ tag = format(formats.mandate_list[lastmode],n)
else
- tag = formats.mandate_single[lastmode]:format(n)
+ tag = format(formats.mandate_single[lastmode],n)
end
end
sequence[#sequence+1] = tag
@@ -506,7 +534,7 @@ function document.setups.collect(name,int,lastmode)
right[#right+1] = translated(d[k],int)
end
end
- parameters[#parameters+1] = formats.parameter:format(left,"",concat(right, ", "))
+ parameters[#parameters+1] = format(formats.parameter,left,"",concat(right, ", "))
else
local what = tags[n]
for r, d, k in xml.elements(d[k],"(cd:parameter|cd:inherit)") do
@@ -514,11 +542,11 @@ function document.setups.collect(name,int,lastmode)
local left, right = d[k].at.name or "?", { }
if tag == "inherit" then
local name = d[k].at.name or "?"
- local goto = document.setups.formats.href_as_command[lastmode]:format(name,lastmode,name)
- if #parameters > 0 and not parameters[#parameters]:find("<br/>") then
- parameters[#parameters+1] = formats.parameter:format("<br/>","","")
+ local goto = format(document.setups.formats.href_as_command[lastmode],name,lastmode,name)
+ if #parameters > 0 and not find(parameters[#parameters],"<br/>") then
+ parameters[#parameters+1] = format(formats.parameter,"<br/>","","")
end
- parameters[#parameters+1] = formats.parameter:format(what,formats.special:format(translate("inherits",int)),goto)
+ parameters[#parameters+1] = format(formats.parameter,what,format(formats.special,translate("inherits",int)),goto)
else
for r, d, k in xml.elements(d[k],"(cd:constant|cd:resolve)") do
local tag = d[k].tg
@@ -534,15 +562,15 @@ function document.setups.collect(name,int,lastmode)
right[#right+1] = translated(d[k],int)
end
end
- parameters[#parameters+1] = formats.parameter:format(what,left,concat(right, ", "))
+ parameters[#parameters+1] = format(formats.parameter,what,left,concat(right, ", "))
end
what = ""
end
end
- parameters[#parameters+1] = formats.parameter:format("<br/>","","")
+ parameters[#parameters+1] = format(formats.parameter,"<br/>","","")
end
data.parameters = parameters or { }
- data.mode = formats.modes[lastmode or 1]
+ data.mode = formats.modes[lastmode or "tex"]
return data
else
return nil
@@ -566,7 +594,7 @@ local interfaces = {
romanian = 'ro',
}
-local lastinterface, lastcommand, lastsource, lastmode = "en", "", "", 1
+local lastinterface, lastcommand, lastsource, lastmode = "en", "", "", "tex"
local variables = {
['color-background-main-left'] = '#3F3F3F',
@@ -584,78 +612,87 @@ local function doit(configuration,filename,hashed)
local formats = document.setups.formats
- local start = os.clock()
+ local start = os.clock()
+ local detail = hashed.queries or { }
- local detail = url.query(hashed.query or "")
+ if detail then
- lastinterface = detail.interface or lastinterface
- lastcommand = detail.command or lastcommand
- lastsource = detail.source or lastsource
- lastmode = tonumber(detail.mode or lastmode) or 1
+ lastinterface = detail.interface or lastinterface
+ lastcommand = detail.command or lastcommand
+ lastsource = detail.source or lastsource
+ lastmode = detail.mode or lastmode or "tex"
- if lastinterface then
- report("checking interface: %s",lastinterface)
- document.setups.load(format("cont-%s.xml",lastinterface))
- end
+ lastcommand = gsub(lastcommand,"%s*^\\*(.+)%s*","%1")
- local div = document.setups.div[lastinterface]
- local span = document.setups.span[lastinterface]
+ if lastinterface then
+ report("checking interface: %s",lastinterface)
+ document.setups.load(format("cont-%s.xml",lastinterface))
+ end
- local result = { content = "error" }
+ local div = document.setups.div [lastinterface]
+ local span = document.setups.span[lastinterface]
- local names, refs, ints = document.setups.names(lastinterface), { }, { }
- for k=1,#names do
- local v = names[k]
- refs[k] = formats.href_in_list[lastmode]:format(v[1],lastmode,v[2])
- end
- if lastmode ~= 2 then
- local sorted = table.sortedkeys(interfaces)
- for k=1,#sorted do
- local v = sorted[k]
- ints[k] = formats.interface:format(interfaces[v],lastmode,v)
+ local names, refs, ints = document.setups.names(lastinterface), { }, { }
+ for k=1,#names do
+ local v = names[k]
+ refs[k] = format(formats.href_in_list[lastmode],v[1],lastmode,v[2])
+ end
+ if lastmode ~= "lua" then
+ local sorted = table.sortedkeys(interfaces)
+ for k=1,#sorted do
+ local v = sorted[k]
+ ints[k] = format(formats.interface,interfaces[v],lastmode,v)
+ end
end
- end
- local n = concat(refs,"<br/>")
- local i = concat(ints,"<br/><br/>")
+ local n = concat(refs,"<br/>")
+ local i = concat(ints,"<br/><br/>")
- if div then
- variables.names = div:format(n)
- variables.interfaces = div:format(i)
- else
- variables.names = n
- variables.interfaces = i
- end
+ if div then
+ variables.names = format(div,n)
+ variables.interfaces = format(div,i)
+ else
+ variables.names = n
+ variables.interfaces = i
+ end
- -- first we need to add information about mkii/mkiv
-
- variables.maintitle = "no definition"
- variables.maintext = ""
- variables.extra = ""
-
- if document.setups.showsources and lastsource and lastsource ~= "" then
- -- todo: mkii, mkiv, tex (can be different)
- local data = io.loaddata(resolvers.findfile(lastsource))
- variables.maintitle = lastsource
- variables.maintext = formats.listing:format(data)
- lastsource = ""
- elseif lastcommand and lastcommand ~= "" then
- local data = document.setups.collect(lastcommand,lastinterface,lastmode)
- if data then
- local what, extra = { "environment", "category", "source", "mode" }, { }
- for k=1,#what do
- local v = what[k]
- if data[v] and data[v] ~= "" then
- lmx.set(v, data[v])
- extra[#extra+1] = v .. ": " .. data[v]
+ -- first we need to add information about mkii/mkiv
+
+ variables.maintitle = "no definition"
+ variables.maintext = ""
+ variables.extra = ""
+
+ if document.setups.showsources and lastsource and lastsource ~= "" then
+ -- todo: mkii, mkiv, tex (can be different)
+ local data = io.loaddata(resolvers.findfile(lastsource))
+ variables.maintitle = lastsource
+ variables.maintext = format(formats.listing,data)
+ lastsource = ""
+ elseif lastcommand and lastcommand ~= "" then
+ local data = document.setups.collect(lastcommand,lastinterface,lastmode)
+ if data then
+ local what, extra = { "environment", "category", "source", "mode" }, { }
+ for k=1,#what do
+ local v = what[k]
+ if data[v] and data[v] ~= "" then
+ lmx.set(v, data[v])
+ extra[#extra+1] = v .. ": " .. data[v]
+ end
end
+ variables.maintitle = data.sequence
+ variables.maintext = format(formats.parameters,concat(data.parameters))
+ variables.extra = concat(extra,"&nbsp;&nbsp;&nbsp;")
+ else
+ variables.maintext = "select command"
end
- variables.maintitle = data.sequence
- variables.maintext = formats.parameters:format(concat(data.parameters))
- variables.extra = concat(extra,"&nbsp;&nbsp;&nbsp;")
- else
- variables.maintext = "select command"
end
+
+ else
+
+ variables.maintitle = "no definition"
+ variables.maintext = "some error"
+ variables.extra = ""
+
end
local content = lmx.convert('context-help.lmx',false,variables)
diff --git a/Master/texmf-dist/scripts/context/lua/mtx-server.lua b/Master/texmf-dist/scripts/context/lua/mtx-server.lua
index 068d5111130..5ec15de708f 100644
--- a/Master/texmf-dist/scripts/context/lua/mtx-server.lua
+++ b/Master/texmf-dist/scripts/context/lua/mtx-server.lua
@@ -7,12 +7,26 @@ if not modules then modules = { } end modules ['mtx-server'] = {
}
local helpinfo = [[
---start start server
---port port to listen to
---root server root
---scripts scripts sub path
---index index file
---auto start on own path
+<?xml version="1.0"?>
+<application>
+ <metadata>
+ <entry name="name">mtx-server</entry>
+ <entry name="detail">Simple Webserver For Helpers</entry>
+ <entry name="version">0.10</entry>
+ </metadata>
+ <flags>
+ <category name="basic">
+ <subcategory>
+ <flag name="start"><short>start server</short></flag>
+ <flag name="port"><short>port to listen to</short></flag>
+ <flag name="root"><short>server root</short></flag>
+ <flag name="scripts"><short>scripts sub path</short></flag>
+ <flag name="index"><short>index file</short></flag>
+ <flag name="auto"><short>start on own path</short></flag>
+ </subcategory>
+ </category>
+ </flags>
+</application>
]]
local application = logs.application {
@@ -26,11 +40,10 @@ local report = application.report
scripts = scripts or { }
scripts.webserver = scripts.webserver or { }
-dofile(resolvers.findfile("l-url.lua","tex"))
dofile(resolvers.findfile("luat-soc.lua","tex"))
local socket = socket or require("socket")
-local http = socket or require("socket.http")
+local http = http or require("socket.http") -- not needed
local format = string.format
-- The following two lists are taken from webrick (ruby) and
@@ -192,13 +205,13 @@ function handlers.generic(client,configuration,data,suffix,iscontent)
end
end
---~ return os.date()
+-- return os.date()
---~ return { content = "crap" }
+-- return { content = "crap" }
---~ return function(configuration,filename)
---~ return { content = filename }
---~ end
+-- return function(configuration,filename)
+-- return { content = filename }
+-- end
local loaded = { }
@@ -226,11 +239,13 @@ function handlers.lua(client,configuration,filename,suffix,iscontent,hashed) --
end
end
else
+ report("problematic script: %s",filename)
errormessage(client,configuration,404)
end
end
if result then
if type(result) == "function" then
+ report("running script: %s",filename)
result = result(configuration,filename,hashed) -- second argument will become query
end
if result and type(result) == "string" then
@@ -242,7 +257,7 @@ function handlers.lua(client,configuration,filename,suffix,iscontent,hashed) --
local action = handlers[suffix] or handlers.generic
action(client,configuration,result.content,suffix,true) -- content
elseif result.filename then
- local suffix = file.extname(result.filename) or "text/html"
+ local suffix = file.suffix(result.filename) or "text/html"
local action = handlers[suffix] or handlers.generic
action(client,configuration,result.filename,suffix,false) -- filename
else
@@ -301,40 +316,50 @@ function scripts.webserver.run(configuration)
report("scripts subpath: %s",configuration.scripts)
report("context services: http://localhost:%s/mtx-server-ctx-startup.lua",configuration.port)
local server = assert(socket.bind("*", configuration.port))
--- local reading = { server }
- while true do -- no multiple clients
+ local script = configuration.script
+ while true do -- blocking
local start = os.clock()
--- local input = socket.select(reading)
--- local client = input:accept()
local client = server:accept()
client:settimeout(configuration.timeout or 60)
local request, e = client:receive()
--- local request, e = client:receive("*a") -- doesn't work well (so no post)
if e then
errormessage(client,configuration,404)
else
local from = client:getpeername()
report("request from: %s",tostring(from))
- local fullurl = request:match("GET (.+) HTTP/.*$") or "" -- todo: more clever / post
+ report("request data: %s",tostring(request))
+ local fullurl = string.match(request,"GET (.+) HTTP/.*$") or "" -- todo: more clever / post
if fullurl == "" then
+ report("no url")
errormessage(client,configuration,404)
else
- fullurl = socket.url.unescape(fullurl)
+ report("requested url: %s",fullurl)
+ fullurl = socket.url.unescape(fullurl) -- still needed?
local hashed = url.hashed(fullurl)
local query = url.query(hashed.query)
- local filename = hashed.path
--- table.print(hashed)
- if filename then
+ local filename = hashed.path -- hm, not query?
+ if script then
+ filename = script
+ report("forced script: %s",filename)
+ local suffix = file.suffix(filename)
+ local action = handlers[suffix] or handlers.generic
+ if action then
+ report("performing action: %s",filename)
+ action(client,configuration,filename,suffix,false,hashed) -- filename and no content
+ else
+ errormessage(client,configuration,404)
+ end
+ elseif filename then
filename = socket.url.unescape(filename)
report("requested action: %s",filename)
- if filename:find("%.%.") then
+ if string.find(filename,"%.%.") then
filename = nil -- invalid path
end
if filename == nil or filename == "" or filename == "/" then
filename = configuration.index
report("invalid filename, forcing: %s",filename)
end
- local suffix = file.extname(filename)
+ local suffix = file.suffix(filename)
local action = handlers[suffix] or handlers.generic
if action then
report("performing action: %s",filename)
@@ -358,6 +383,7 @@ if environment.argument("auto") then
port = environment.argument("port"),
root = environment.argument("root") or file.dirname(path) or ".",
scripts = environment.argument("scripts") or file.dirname(path) or ".",
+ script = environment.argument("script"),
}
elseif environment.argument("start") then
scripts.webserver.run {
@@ -365,10 +391,12 @@ elseif environment.argument("start") then
root = environment.argument("root") or ".", -- "e:/websites/www.pragma-ade.com",
index = environment.argument("index"),
scripts = environment.argument("scripts"),
+ script = environment.argument("script"),
}
+elseif environment.argument("exporthelp") then
+ application.export(environment.argument("exporthelp"),environment.files[1])
else
application.help()
end
-
-- mtxrun --script server --start => http://localhost:31415/mtx-server-ctx-startup.lua
diff --git a/Master/texmf-dist/scripts/context/lua/mtx-texworks.lua b/Master/texmf-dist/scripts/context/lua/mtx-texworks.lua
index 33e56df468a..ae5f2afa419 100644
--- a/Master/texmf-dist/scripts/context/lua/mtx-texworks.lua
+++ b/Master/texmf-dist/scripts/context/lua/mtx-texworks.lua
@@ -7,8 +7,22 @@ if not modules then modules = { } end modules ['mtx-texworks'] = {
}
local helpinfo = [[
---start [--verbose] start texworks
---test report what will happen
+<?xml version="1.0"?>
+<application>
+ <metadata>
+ <entry name="name">mtx-texworks</entry>
+ <entry name="detail">TeXworks Startup Script</entry>
+ <entry name="version">1.00</entry>
+ </metadata>
+ <flags>
+ <category name="basic">
+ <subcategory>
+ <flag name="start"><short>[<ref name="verbose]"/> start texworks</short></flag>
+ <flag name="test"><short>report what will happen</short></flag>
+ </subcategory>
+ </category>
+ </flags>
+</application>
]]
local application = logs.application {
@@ -100,6 +114,8 @@ if environment.argument("start") then
scripts.texworks.start(true)
elseif environment.argument("test") then
scripts.texworks.start()
+elseif environment.argument("exporthelp") then
+ application.export(environment.argument("exporthelp"),environment.files[1])
else
application.help()
end
diff --git a/Master/texmf-dist/scripts/context/lua/mtx-timing.lua b/Master/texmf-dist/scripts/context/lua/mtx-timing.lua
index 03100e991af..5ba361e5f1a 100644
--- a/Master/texmf-dist/scripts/context/lua/mtx-timing.lua
+++ b/Master/texmf-dist/scripts/context/lua/mtx-timing.lua
@@ -9,9 +9,23 @@ if not modules then modules = { } end modules ['mtx-timing'] = {
local format, gsub, concat = string.format, string.gsub, table.concat
local helpinfo = [[
---xhtml make xhtml file
---launch launch after conversion
---remove remove after launching
+<?xml version="1.0"?>
+<application>
+ <metadata>
+ <entry name="name">mtx-timing</entry>
+ <entry name="detail">ConTeXt Timing Tools</entry>
+ <entry name="version">0.10</entry>
+ </metadata>
+ <flags>
+ <category name="basic">
+ <subcategory>
+ <flag name="xhtml"><short>make xhtml file</short></flag>
+ <flag name="launch"><short>launch after conversion</short></flag>
+ <flag name="remove"><short>remove after launching</short></flag>
+ </subcategory>
+ </category>
+ </flags>
+</application>
]]
local application = logs.application {
@@ -22,6 +36,7 @@ local application = logs.application {
local report = application.report
+dofile(resolvers.findfile("node-snp.lua","tex"))
dofile(resolvers.findfile("trac-tim.lua","tex"))
dofile(resolvers.findfile("trac-lmx.lua","tex"))
@@ -71,7 +86,7 @@ local directrun = true
local what = { "parameters", "nodes" }
-plugins = plugins or { } -- brrr, will become moduledata as well
+plugins = plugins or { progress = { } } -- brrr, will become moduledata as well
function plugins.progress.make_svg(filename,other)
local metadata, menudata, c = { }, { }, 0
@@ -196,6 +211,8 @@ end
if environment.argument("xhtml") then
scripts.timings.xhtml(environment.files[1] or "")
+elseif environment.argument("exporthelp") then
+ application.export(environment.argument("exporthelp"),environment.files[1])
else
application.help()
end
diff --git a/Master/texmf-dist/scripts/context/lua/mtx-tools.lua b/Master/texmf-dist/scripts/context/lua/mtx-tools.lua
index 45961a639a0..19b7458a19a 100644
--- a/Master/texmf-dist/scripts/context/lua/mtx-tools.lua
+++ b/Master/texmf-dist/scripts/context/lua/mtx-tools.lua
@@ -9,22 +9,37 @@ if not modules then modules = { } end modules ['mtx-tools'] = {
local find, format, sub, rep, gsub, lower = string.find, string.format, string.sub, string.rep, string.gsub, string.lower
local helpinfo = [[
---disarmutfbomb remove utf bomb if present
- --force remove indeed
-
---dirtoxml glob directory into xml
- --pattern glob pattern (default: *)
- --url url attribute (no processing)
- --root the root of the globbed path (default: .)
- --output output filename (console by default)
- --recurse recurse into subdirecories
- --stripname take pathpart of given pattern
- --longname set name attributes to full path name
-
---downcase
- --pattern glob pattern (default: *)
- --recurse recurse into subdirecories
- --force downcase indeed
+<?xml version="1.0"?>
+<application>
+ <metadata>
+ <entry name="name">mtx-tools</entry>
+ <entry name="detail">Some File Related Goodies</entry>
+ <entry name="version">1.01</entry>
+ </metadata>
+ <flags>
+ <category name="basic">
+ <subcategory>
+ <flag name="disarmutfbomb"><short>remove utf bomb if present</short></flag>
+ <flag name="force"><short>remove indeed</short></flag>
+ </subcategory>
+ <subcategory>
+ <flag name="dirtoxml"><short>glob directory into xml</short></flag>
+ <flag name="pattern"><short>glob pattern (default: *)</short></flag>
+ <flag name="url"><short>url attribute (no processing)</short></flag>
+ <flag name="root"><short>the root of the globbed path (default: .)</short></flag>
+ <flag name="output"><short>output filename (console by default)</short></flag>
+ <flag name="recurse"><short>recurse into subdirecories</short></flag>
+ <flag name="stripname"><short>take pathpart of given pattern</short></flag>
+ <flag name="longname"><short>set name attributes to full path name</short></flag>
+ </subcategory>
+ <subcategory>
+ <flag name="pattern"><short>glob pattern (default: *)</short></flag>
+ <flag name="recurse"><short>recurse into subdirecories</short></flag>
+ <flag name="force"><short>downcase indeed</short></flag>
+ </subcategory>
+ </category>
+ </flags>
+</application>
]]
local application = logs.application {
@@ -102,7 +117,7 @@ end
function scripts.tools.dirtoxml()
- local join, removesuffix, extname, date = file.join, file.removesuffix, file.extname, os.date
+ local join, removesuffix, suffixonly, date = file.join, file.removesuffix, file.suffixonly, os.date
local xmlns = "http://www.pragma-ade.com/rlg/xmldir.rng"
local timestamp = "%Y-%m-%d %H:%M"
@@ -124,7 +139,7 @@ function scripts.tools.dirtoxml()
if mode == "file" then
result[#result+1] = format("%s<file name='%s'>",d,(longname and path and join(path,name)) or name)
result[#result+1] = format("%s <base>%s</base>",d,removesuffix(name))
- result[#result+1] = format("%s <type>%s</type>",d,extname(name))
+ result[#result+1] = format("%s <type>%s</type>",d,suffixonly(name))
result[#result+1] = format("%s <size>%s</size>",d,attr.size)
result[#result+1] = format("%s <permissions>%s</permissions>",d,sub(attr.permissions,7,9))
result[#result+1] = format("%s <date>%s</date>",d,date(timestamp,attr.modification))
@@ -177,6 +192,8 @@ elseif environment.argument("dirtoxml") then
scripts.tools.dirtoxml()
elseif environment.argument("downcase") then
scripts.tools.downcase()
+elseif environment.argument("exporthelp") then
+ application.export(environment.argument("exporthelp"),environment.files[1])
else
application.help()
end
diff --git a/Master/texmf-dist/scripts/context/lua/mtx-unzip.lua b/Master/texmf-dist/scripts/context/lua/mtx-unzip.lua
index 645863426db..02d9676bcaa 100644
--- a/Master/texmf-dist/scripts/context/lua/mtx-unzip.lua
+++ b/Master/texmf-dist/scripts/context/lua/mtx-unzip.lua
@@ -11,9 +11,23 @@ if not modules then modules = { } end modules ['mtx-unzip'] = {
local format = string.format
local helpinfo = [[
---list list files in archive
---junk flatten unzipped directory structure
---extract extract files
+<?xml version="1.0"?>
+<application>
+ <metadata>
+ <entry name="name">mtx-unzip</entry>
+ <entry name="detail">Simple Unzipper</entry>
+ <entry name="version">0.10</entry>
+ </metadata>
+ <flags>
+ <category name="basic">
+ <subcategory>
+ <flag name="list"><short>list files in archive</short></flag>
+ <flag name="junk"><short>flatten unzipped directory structure</short></flag>
+ <flag name="extract"><short>extract files</short></flag>
+ </subcategory>
+ </category>
+ </flags>
+</application>
]]
local application = logs.application {
@@ -109,6 +123,8 @@ if environment.arguments["h"] or environment.arguments["help"] then
application.help()
elseif environment.arguments["l"] or environment.arguments["list"] then
scripts.unzipper.list(zipfile)
+elseif environment.arguments["exporthelp"] then
+ application.export(environment.arguments["exporthelp"],environment.files[1])
elseif environment.files[1] then -- implicit --extract
scripts.unzipper.extract(zipfile)
else
diff --git a/Master/texmf-dist/scripts/context/lua/mtx-update.lua b/Master/texmf-dist/scripts/context/lua/mtx-update.lua
index 037de8650ff..64203d3e3e2 100644
--- a/Master/texmf-dist/scripts/context/lua/mtx-update.lua
+++ b/Master/texmf-dist/scripts/context/lua/mtx-update.lua
@@ -12,27 +12,43 @@ if not modules then modules = { } end modules ['mtx-update'] = {
-- platforms that matter.
local helpinfo = [[
---platform=string platform (windows, linux, linux-64, osx-intel, osx-ppc, linux-ppc)
---server=string repository url (rsync://contextgarden.net)
---module=string repository url (minimals)
---repository=string specify version (current, experimental)
---context=string specify version (current, latest, beta, yyyy.mm.dd)
---rsync=string rsync binary (rsync)
---texroot=string installation directory (not guessed for the moment)
---engine=string tex engine (luatex, pdftex, xetex)
---modules=string extra modules (can be list or 'all')
---fonts=string additional fonts (can be list or 'all')
---goodies=string extra binaries (like scite and texworks)
---force instead of a dryrun, do the real thing
---update update minimal tree
---make also make formats and generate file databases
---keep don't delete unused or obsolete files
---state update tree using saved state
+<?xml version="1.0"?>
+<application>
+ <metadata>
+ <entry name="name">mtx-update</entry>
+ <entry name="detail">ConTeXt Minimals Updater</entry>
+ <entry name="version">0.31</entry>
+ </metadata>
+ <flags>
+ <category name="basic">
+ <subcategory>
+ <flag name="platform" value="string"><short>platform (windows, linux, linux-64, osx-intel, osx-ppc, linux-ppc)</short></flag>
+ <flag name="server" value="string"><short>repository url (rsync://contextgarden.net)</short></flag>
+ <flag name="module" value="string"><short>repository url (minimals)</short></flag>
+ <flag name="repository" value="string"><short>specify version (current, experimental)</short></flag>
+ <flag name="context" value="string"><short>specify version (current, latest, beta, yyyy.mm.dd)</short></flag>
+ <flag name="rsync" value="string"><short>rsync binary (rsync)</short></flag>
+ <flag name="texroot" value="string"><short>installation directory (not guessed for the moment)</short></flag>
+ <flag name="engine" value="string"><short>tex engine (luatex, pdftex, xetex)</short></flag>
+ <flag name="modules" value="string"><short>extra modules (can be list or 'all')</short></flag>
+ <flag name="fonts" value="string"><short>additional fonts (can be list or 'all')</short></flag>
+ <flag name="goodies" value="string"><short>extra binaries (like scite and texworks)</short></flag>
+ <flag name="force"><short>instead of a dryrun, do the real thing</short></flag>
+ <flag name="update"><short>update minimal tree</short></flag>
+ <flag name="make"><short>also make formats and generate file databases</short></flag>
+ <flag name="keep"><short>don't delete unused or obsolete files</short></flag>
+ <flag name="state"><short>update tree using saved state</short></flag>
+ <flag name="cygwin"><short>adapt drive specs to cygwin</short></flag>
+ <flag name="mingw"><short>assume mingw binaries being used</short></flag>
+ </subcategory>
+ </category>
+ </flags>
+</application>
]]
local application = logs.application {
name = "mtx-update",
- banner = "ConTeXt Minimals Updater 0.30",
+ banner = "ConTeXt Minimals Updater 0.31",
helpinfo = helpinfo,
}
@@ -108,6 +124,7 @@ scripts.update.engines = {
["luatex"] = {
{ "fonts/new/", "texmf" },
{ "bin/luatex/<platform>/", "texmf-<platform>" },
+ { "bin/luajittex/<platform>/","texmf-<platform>" },
},
["xetex"] = {
{ "base/xetex/", "texmf" },
@@ -125,6 +142,7 @@ scripts.update.engines = {
{ "fonts/old/", "texmf" },
{ "base/xetex/", "texmf" },
{ "bin/luatex/<platform>/", "texmf-<platform>" },
+ { "bin/luajittex/<platform>/","texmf-<platform>" },
{ "bin/xetex/<platform>/", "texmf-<platform>" },
{ "bin/pdftex/<platform>/", "texmf-<platform>" },
},
@@ -132,10 +150,10 @@ scripts.update.engines = {
scripts.update.goodies = {
["scite"] = {
- { "bin/<platform>/scite/", "texmf-<platform>" },
+ { "bin/<platform>/scite/", "texmf-<platform>" },
},
["texworks"] = {
- { "bin/<platform>/texworks/", "texmf-<platform>" },
+ { "bin/<platform>/texworks/", "texmf-<platform>" },
},
}
@@ -203,6 +221,17 @@ function scripts.update.fullpath(path)
end
end
+local rsync_variant = "cygwin" -- will be come mingw
+
+local function drive(d)
+ if rsync_variant == "cygwin" then
+ d = gsub(d,[[([a-zA-Z]):/]], "/cygdrive/%1/")
+ else
+ d = gsub(d,[[([a-zA-Z]):/]], "/%1/")
+ end
+ return d
+end
+
function scripts.update.synchronize()
report("update, start")
@@ -344,7 +373,7 @@ function scripts.update.synchronize()
destination = gsub(destination,"\\","/")
archive = gsub(archive,"<version>",version)
if osplatform == "windows" or osplatform == "mswin" then
- destination = gsub(destination,"([a-zA-Z]):/", "/cygdrive/%1/") -- ^
+ destination = drive(destination)
end
individual[#individual+1] = { archive, destination }
end
@@ -419,11 +448,11 @@ function scripts.update.synchronize()
if platform then
local command
if platform == 'mswin' then
- bin = gsub(bin,"([a-zA-Z]):/", "/cygdrive/%1/")
- texroot = gsub(texroot,"([a-zA-Z]):/", "/cygdrive/%1/")
- command = format("%s -t %s/texmf-context/scripts/context/lua/%s.lua %s/texmf-mswin/bin/", bin, texroot, script, texroot)
+ bin = drive(bin)
+ texroot = drive(texroot)
+ command = format([[%s -t "%s/texmf-context/scripts/context/lua/%s.lua" "%s/texmf-mswin/bin/"]], bin, texroot, script, texroot)
else
- command = format("%s -tgo --chmod=a+x %s/texmf-context/scripts/context/lua/%s.lua %s/texmf-%s/bin/%s", bin, texroot, script, texroot, platform, script)
+ command = format([[%s -tgo --chmod=a+x '%s/texmf-context/scripts/context/lua/%s.lua' '%s/texmf-%s/bin/%s']], bin, texroot, script, texroot, platform, script)
end
report("updating %s for %s: %s", script, platform, command)
scripts.update.run(command)
@@ -496,6 +525,8 @@ function scripts.update.make()
for engine in next, engines do
if engine == "luatex" then
scripts.update.run(format('mtxrun --tree="%s" --script context --autogenerate --make',texroot))
+ elseif engine == "luajittex" then
+ scripts.update.run(format('mtxrun --tree="%s" --script context --autogenerate --make --engine=luajittex',texroot))
else
scripts.update.run(format('mtxrun --tree="%s" --script texexec --make --all --%s %s',texroot,engine,formatlist))
end
@@ -616,6 +647,12 @@ if environment.argument("state") then
environment.setargument("make",true)
end
+if environment.argument("mingw") then
+ rsync_variant = "mingw"
+elseif environment.argument("cygwin") then
+ rsync_variant = "cygwin"
+end
+
if environment.argument("update") then
scripts.update.synchronize()
if environment.argument("make") then
@@ -623,6 +660,8 @@ if environment.argument("update") then
end
elseif environment.argument("make") then
scripts.update.make()
+elseif environment.argument("exporthelp") then
+ application.export(environment.argument("exporthelp"),environment.files[1])
else
application.help()
end
diff --git a/Master/texmf-dist/scripts/context/lua/mtx-watch.lua b/Master/texmf-dist/scripts/context/lua/mtx-watch.lua
index 36a3176c41a..95323f571cf 100644
--- a/Master/texmf-dist/scripts/context/lua/mtx-watch.lua
+++ b/Master/texmf-dist/scripts/context/lua/mtx-watch.lua
@@ -7,14 +7,28 @@ if not modules then modules = { } end modules ['mtx-watch'] = {
}
local helpinfo = [[
---logpath optional path for log files
---watch watch given path [--delay]
---pipe use pipe instead of execute
---delay delay between sweeps
---automachine replace /machine/ in path /<servername>/
---collect condense log files
---cleanup=delay remove files in given path [--force]
---showlog show log data
+<?xml version="1.0"?>
+<application>
+ <metadata>
+ <entry name="name">mtx-watch</entry>
+ <entry name="detail">ConTeXt Request Watchdog</entry>
+ <entry name="version">1.00</entry>
+ </metadata>
+ <flags>
+ <category name="basic">
+ <subcategory>
+ <flag name="logpath"><short>optional path for log files</short></flag>
+ <flag name="watch"><short>watch given path [<ref name="delay]"/></short></flag>
+ <flag name="pipe"><short>use pipe instead of execute</short></flag>
+ <flag name="delay"><short>delay between sweeps</short></flag>
+ <flag name="automachine"><short>replace /machine/ in path /servername/</short></flag>
+ <flag name="collect"><short>condense log files</short></flag>
+ <flag name="cleanup" value="delay"><short>remove files in given path [<ref name="force]"/></short></flag>
+ <flag name="showlog"><short>show log data</short></flag>
+ </subcategory>
+ </category>
+ </flags>
+</application>
]]
local application = logs.application {
@@ -97,7 +111,7 @@ local function glob(files,path) -- some day: sort by name (order prefix) and ati
end
end
-local clock = os.gettimeofday or os.time -- we cannot trust os.clock on linux
+local clock = os.gettimeofday or (socket and socket.gettime) or os.time -- we cannot trust os.clock on linux
-- local function filenamesort(a,b)
-- local fa, da = a[1], a[2]
@@ -227,17 +241,6 @@ function scripts.watch.watch()
end
end
local n, start = 0, time()
---~ local function wait()
---~ io.flush()
---~ if not done then
---~ n = n + 1
---~ if n >= 10 then
---~ report("run time: %i seconds, memory usage: %0.3g MB", difftime(time(),start), (status.luastate_bytes/1024)/1000)
---~ n = 0
---~ end
---~ os.sleep(delay)
---~ end
---~ end
local wtime = 0
local function wait()
io.flush()
@@ -421,6 +424,8 @@ elseif environment.argument("cleanup") then
scripts.watch.save_logs(scripts.watch.cleanup_stale_files())
elseif environment.argument("showlog") then
scripts.watch.show_logs()
+elseif environment.argument("exporthelp") then
+ application.export(environment.argument("exporthelp"),environment.files[1])
else
application.help()
end
diff --git a/Master/texmf-dist/scripts/context/lua/mtxlibs.lua b/Master/texmf-dist/scripts/context/lua/mtxlibs.lua
new file mode 100644
index 00000000000..60889acde48
--- /dev/null
+++ b/Master/texmf-dist/scripts/context/lua/mtxlibs.lua
@@ -0,0 +1,240 @@
+if not modules then modules = { } end modules ['mtxlibs'] = {
+ version = 1.001,
+ comment = "a reasonable subset of mtxrun preloaded libraries",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+-- This file can be used to load a the (relevant) helper libraries that are also used
+-- in ConTeXt. You can use it as:
+--
+-- -- if needed (outside texlua):
+--
+-- -- require("lpeg") -- mandate
+-- -- require("md5") -- handy
+-- -- require("lfs") -- recommended
+-- -- require("slunicode") -- sort of obsolete
+--
+-- -- the library:
+--
+-- require("mtxlibs")
+--
+-- An alternative is to merge all libraries into this one so that you don't have to
+-- distribute them.
+--
+-- mtxlibs --selfmerge
+--
+-- If you need additional libraries, you can do something like this:
+--
+-- lua mtxlibs.lua --selfmerge my-web-project.lua trac-lmx util-jsn
+-- lua mtxlibs.lua --selfmerge my-sql-project.lua util-sql util-sql-imp-library util-sql-imp-client
+--
+-- That way you only need to update one file in a project and are not dependent on changes
+-- in the core ConTeXT libraries. The libraries are maintained as part of ConTeXt and used
+-- in projects so relative stable. The code works in Lua 5.1 as well as in 5.2. Not all
+-- functionality makes sense for users who are not familiar with ConTeXt but for instance
+-- trackers and loggers are included because that way we have can provide users with a
+-- consistent ecosystem.
+--
+-- Much of the provided functionality is described in cld-mkiv.pdf and related manuals, on
+-- contextgarden.net as well in articles. The XML subsystem is described in its own manual.
+-- Templates and SQL (not preloaded) is also has its own manual.
+--
+-- The next section contains the merged code, with each block ending uop in its own
+-- closure. The code gets somewhat compacted to save space and speed up loading.
+--
+-- There are some dependencies between the several modules. Also, quite some functions are added
+-- to the regular Lua namespaces. In due time I'll isolate them in their own namespaces but with
+-- the for context handy option to expose them in the normal ones. I might make the dependencies
+-- less but it probably makes no sense to waste time on them.
+
+xpcall(function() local _, t = require("lpeg") return end,function() end) if t then lpeg = t end
+xpcall(function() local _, t = require("md5") return end,function() end) if t then md5 = t end
+xpcall(function() local _, t = require("lfs") return end,function() end) if t then lfs = t end
+xpcall(function() local _, t = require("slunicode") return end,function() end) if t then unicode = t end
+
+-- begin library merge
+
+-- end library merge
+
+local gsub, gmatch, match, find = string.gsub, string.gmatch, string.match, string.find
+local concat = table.concat
+
+local ownname = arg and arg[0] or 'mtxlibs.lua'
+local ownpath = gsub(match(ownname,"^(.+)[\\/].-$") or ".","\\","/")
+local owntree = ownpath
+
+local ownlibs = {
+
+ "l-lua.lua",
+ "l-lpeg.lua",
+ "l-function.lua",
+ "l-string.lua",
+ "l-table.lua",
+ "l-io.lua",
+ "l-number.lua",
+ "l-set.lua",
+ "l-os.lua",
+ "l-file.lua", -- limited functionality when no lfs
+ "l-md5.lua", -- not loaded when no md5 library
+ "l-url.lua",
+ "l-dir.lua", -- limited functionality when no lfs
+ "l-boolean.lua",
+ "l-unicode.lua", -- nowadays independent of slunicode
+ "l-math.lua",
+
+ "util-str.lua",
+ "util-tab.lua",
+ "util-sto.lua",
+ -- "util-lua.lua", -- no need for compiling
+ "util-prs.lua",
+ -- "util-fmt.lua", -- no need for table formatters
+ -- "util-deb.lua", -- no need for debugging (and tracing)
+
+ "trac-set.lua",
+ "trac-log.lua",
+ -- "trac-pro.lua", -- not relevant outside context
+ "trac-inf.lua",
+
+ "util-mrg.lua",
+ "util-tpl.lua",
+
+ "util-env.lua",
+ -- "luat-env.lua", -- not relevant outside context
+
+ "lxml-tab.lua",
+ "lxml-lpt.lua",
+ "lxml-mis.lua",
+ "lxml-aux.lua",
+ "lxml-xml.lua",
+
+}
+
+package.path = "t:/sources/?.lua;t:/sources/?;" .. package.path
+
+local ownlist = {
+ '.',
+ ownpath ,
+ ownpath .. "/../sources", -- HH's development path
+ owntree .. "/../../texmf-local/tex/context/base",
+ owntree .. "/../../texmf-context/tex/context/base",
+ owntree .. "/../../texmf-dist/tex/context/base",
+ owntree .. "/../../texmf/tex/context/base",
+ owntree .. "/../../../texmf-local/tex/context/base",
+ owntree .. "/../../../texmf-context/tex/context/base",
+ owntree .. "/../../../texmf-dist/tex/context/base",
+ owntree .. "/../../../texmf/tex/context/base",
+}
+
+if ownpath == "." then table.remove(ownlist,1) end
+
+own = {
+ name = ownname,
+ path = ownpath,
+ tree = owntree,
+ list = ownlist,
+ libs = ownlibs,
+}
+
+local function locate_libs()
+ local name = ownlibs[1]
+ local done = false
+ for i=1,#ownlist do
+ local path = ownlist[i]
+ local filename = path .. "/" .. name
+ local f = io.open(filename)
+ if f then
+ f:close()
+ package.path = package.path .. ";" .. path .. "/?.lua" -- in case l-* does a require
+ done = path
+ break
+ end
+ end
+ locate_libs = function() return done end
+ return done
+end
+
+local function load_libs()
+ local found = locate_libs()
+ if found then
+ for i=1,#ownlibs do
+ local basename = ownlibs[i]
+ local filename = found .. "/" .. basename
+ local codeblob = loadfile(filename)
+ if codeblob then
+ package.preload[basename] = codeblob() or true
+ end
+ end
+ end
+end
+
+if not unicode then
+ load_libs()
+end
+
+local merger = utilities and utilities.merger
+
+if not merger then
+ return
+end
+
+local arguments = environment.arguments
+local files = environment.files
+
+if environment.ownname ~= "mtxlibs.lua" then
+ return
+end
+
+local helpinfo = [[
+usage: mtxlibs [options]
+
+--merge
+--merge targetfile extralibs
+--selfclean
+
+and in a lua file:
+
+require("mtxlibs")
+]]
+
+local application = logs.application {
+ name = "mtxlibs",
+ banner = "ConTeXt Basic Lua Libraries 1.00",
+ helpinfo = helpinfo,
+}
+
+local report = application.report
+
+if arguments.selfmerge then
+
+ report("merging libraries")
+ local found = locate_libs()
+ if found then
+ local target = files[1]
+ if target == ownname then
+ report("target cannot be this file")
+ return
+ elseif target then
+ report("target: %s",target)
+ for i=1,#files do
+ ownlibs[#ownlibs+1] = file.addsuffix(files[i],"lua")
+ end
+ end
+ merger.selfmerge(ownname,ownlibs,{ found },target)
+ report("done")
+ else
+ report("no libraries found")
+ end
+
+elseif arguments.selfclean then
+
+ report("cleaning libraries")
+ merger.selfclean(ownname)
+ report("done")
+
+elseif arguments.help or files[1] == "help" then
+
+ application.help()
+
+end
diff --git a/Master/texmf-dist/scripts/context/lua/mtxrun.lua b/Master/texmf-dist/scripts/context/lua/mtxrun.lua
index 108f2a8a112..00f63a5791d 100755
--- a/Master/texmf-dist/scripts/context/lua/mtxrun.lua
+++ b/Master/texmf-dist/scripts/context/lua/mtxrun.lua
@@ -1,5 +1,16 @@
#!/usr/bin/env texlua
+-- for k, v in next, _G.string do
+-- local tv = type(v)
+-- if tv == "table" then
+-- for kk, vv in next, v do
+-- print(k,kk,vv)
+-- end
+-- else
+-- print(tv,k,v)
+-- end
+-- end
+
if not modules then modules = { } end modules ['mtxrun'] = {
version = 1.001,
comment = "runner, lua replacement for texmfstart.rb",
@@ -43,3016 +54,3010 @@ if not modules then modules = { } end modules ['mtxrun'] = {
do -- create closure to overcome 200 locals limit
-if not modules then modules = { } end modules ['l-string'] = {
- version = 1.001,
- comment = "companion to luat-lib.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
+package.loaded["l-lua"] = package.loaded["l-lua"] or true
+
+-- original size: 10048, stripped down to: 5684
+
+if not modules then modules={} end modules ['l-lua']={
+ version=1.001,
+ comment="companion to luat-lib.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
}
+local major,minor=string.match(_VERSION,"^[^%d]+(%d+)%.(%d+).*$")
+_MAJORVERSION=tonumber(major) or 5
+_MINORVERSION=tonumber(minor) or 1
+_LUAVERSION=_MAJORVERSION+_MINORVERSION/10
+if not lpeg then
+ lpeg=require("lpeg")
+end
+if loadstring then
+ local loadnormal=load
+ function load(first,...)
+ if type(first)=="string" then
+ return loadstring(first,...)
+ else
+ return loadnormal(first,...)
+ end
+ end
+else
+ loadstring=load
+end
+if not ipairs then
+ local function iterate(a,i)
+ i=i+1
+ local v=a[i]
+ if v~=nil then
+ return i,v
+ end
+ end
+ function ipairs(a)
+ return iterate,a,0
+ end
+end
+if not pairs then
+ function pairs(t)
+ return next,t
+ end
+end
+if not table.unpack then
+ table.unpack=_G.unpack
+elseif not unpack then
+ _G.unpack=table.unpack
+end
+if not package.loaders then
+ package.loaders=package.searchers
+end
+local print,select,tostring=print,select,tostring
+local inspectors={}
+function setinspector(inspector)
+ inspectors[#inspectors+1]=inspector
+end
+function inspect(...)
+ for s=1,select("#",...) do
+ local value=select(s,...)
+ local done=false
+ for i=1,#inspectors do
+ done=inspectors[i](value)
+ if done then
+ break
+ end
+ end
+ if not done then
+ print(tostring(value))
+ end
+ end
+end
+local dummy=function() end
+function optionalrequire(...)
+ local ok,result=xpcall(require,dummy,...)
+ if ok then
+ return result
+ end
+end
+local type=type
+local gsub,format=string.gsub,string.format
+local package=package
+local searchers=package.searchers or package.loaders
+local libpaths=nil
+local clibpaths=nil
+local libhash={}
+local clibhash={}
+local libextras={}
+local clibextras={}
+local filejoin=file and file.join or function(path,name) return path.."/"..name end
+local isreadable=file and file.is_readable or function(name) local f=io.open(name) if f then f:close() return true end end
+local addsuffix=file and file.addsuffix or function(name,suffix) return name.."."..suffix end
+local function cleanpath(path)
+ return path
+end
+local helpers=package.helpers or {
+ libpaths=function() return {} end,
+ clibpaths=function() return {} end,
+ cleanpath=cleanpath,
+ trace=false,
+ report=function(...) print(format(...)) end,
+}
+package.helpers=helpers
+local function getlibpaths()
+ return libpaths or helpers.libpaths(libhash)
+end
+local function getclibpaths()
+ return clibpaths or helpers.clibpaths(clibhash)
+end
+package.libpaths=getlibpaths
+package.clibpaths=getclibpaths
+local function addpath(what,paths,extras,hash,...)
+ local pathlist={... }
+ local cleanpath=helpers.cleanpath
+ local trace=helpers.trace
+ local report=helpers.report
+ local function add(path)
+ local path=cleanpath(path)
+ if not hash[path] then
+ if trace then
+ report("extra %s path: %s",what,path)
+ end
+ paths [#paths+1]=path
+ extras[#extras+1]=path
+ end
+ end
+ for p=1,#pathlist do
+ local path=pathlist[p]
+ if type(path)=="table" then
+ for i=1,#path do
+ add(path[i])
+ end
+ else
+ add(path)
+ end
+ end
+ return paths,extras
+end
+function package.extralibpath(...)
+ libpaths,libextras=addpath("lua",getlibpaths(),libextras,libhash,...)
+end
+function package.extraclibpath(...)
+ clibpaths,clibextras=addpath("lib",getclibpaths(),clibextras,clibhash,...)
+end
+if not searchers[-2] then
+ searchers[-2]=searchers[2]
+end
+searchers[2]=function(name)
+ return helpers.loaded(name)
+end
+searchers[3]=nil
+local function loadedaslib(resolved,rawname)
+ local init="luaopen_"..gsub(rawname,"%.","_")
+ if helpers.trace then
+ helpers.report("calling loadlib with '%s' with init '%s'",resolved,init)
+ end
+ return package.loadlib(resolved,init)
+end
+local function loadedbylua(name)
+ if helpers.trace then
+ helpers.report("locating '%s' using normal loader",name)
+ end
+ return true,searchers[-2](name)
+end
+local function loadedbypath(name,rawname,paths,islib,what)
+ local trace=helpers.trace
+ local report=helpers.report
+ if trace then
+ report("locating '%s' as '%s' on '%s' paths",rawname,name,what)
+ end
+ for p=1,#paths do
+ local path=paths[p]
+ local resolved=filejoin(path,name)
+ if trace then
+ report("checking for '%s' using '%s' path '%s'",name,what,path)
+ end
+ if isreadable(resolved) then
+ if trace then
+ report("lib '%s' located on '%s'",name,resolved)
+ end
+ if islib then
+ return true,loadedaslib(resolved,rawname)
+ else
+ return true,loadfile(resolved)
+ end
+ end
+ end
+end
+local function notloaded(name)
+ if helpers.trace then
+ helpers.report("? unable to locate library '%s'",name)
+ end
+end
+helpers.loadedaslib=loadedaslib
+helpers.loadedbylua=loadedbylua
+helpers.loadedbypath=loadedbypath
+helpers.notloaded=notloaded
+function helpers.loaded(name)
+ local thename=gsub(name,"%.","/")
+ local luaname=addsuffix(thename,"lua")
+ local libname=addsuffix(thename,os.libsuffix or "so")
+ local libpaths=getlibpaths()
+ local clibpaths=getclibpaths()
+ local done,result=loadedbypath(luaname,name,libpaths,false,"lua")
+ if done then
+ return result
+ end
+ local done,result=loadedbypath(luaname,name,clibpaths,false,"lua")
+ if done then
+ return result
+ end
+ local done,result=loadedbypath(libname,name,clibpaths,true,"lib")
+ if done then
+ return result
+ end
+ local done,result=loadedbylua(name)
+ if done then
+ return result
+ end
+ return notloaded(name)
+end
-local string = string
-local sub, gsub, find, match, gmatch, format, char, byte, rep, lower = string.sub, string.gsub, string.find, string.match, string.gmatch, string.format, string.char, string.byte, string.rep, string.lower
-local lpegmatch, S, C, Ct = lpeg.match, lpeg.S, lpeg.C, lpeg.Ct
--- some functions may disappear as they are not used anywhere
+end -- of closure
-if not string.split then
+do -- create closure to overcome 200 locals limit
- -- this will be overloaded by a faster lpeg variant
+package.loaded["l-lpeg"] = package.loaded["l-lpeg"] or true
- function string.split(str,pattern)
- local t = { }
- if #str > 0 then
- local n = 1
- for s in gmatch(str..pattern,"(.-)"..pattern) do
- t[n] = s
- n = n + 1
- end
- end
- return t
- end
+-- original size: 26252, stripped down to: 14371
+if not modules then modules={} end modules ['l-lpeg']={
+ version=1.001,
+ comment="companion to luat-lib.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+lpeg=require("lpeg")
+local type,next,tostring=type,next,tostring
+local byte,char,gmatch,format=string.byte,string.char,string.gmatch,string.format
+local floor=math.floor
+local P,R,S,V,Ct,C,Cs,Cc,Cp,Cmt=lpeg.P,lpeg.R,lpeg.S,lpeg.V,lpeg.Ct,lpeg.C,lpeg.Cs,lpeg.Cc,lpeg.Cp,lpeg.Cmt
+local lpegtype,lpegmatch,lpegprint=lpeg.type,lpeg.match,lpeg.print
+setinspector(function(v) if lpegtype(v) then lpegprint(v) return true end end)
+lpeg.patterns=lpeg.patterns or {}
+local patterns=lpeg.patterns
+local anything=P(1)
+local endofstring=P(-1)
+local alwaysmatched=P(true)
+patterns.anything=anything
+patterns.endofstring=endofstring
+patterns.beginofstring=alwaysmatched
+patterns.alwaysmatched=alwaysmatched
+local digit,sign=R('09'),S('+-')
+local cr,lf,crlf=P("\r"),P("\n"),P("\r\n")
+local newline=crlf+S("\r\n")
+local escaped=P("\\")*anything
+local squote=P("'")
+local dquote=P('"')
+local space=P(" ")
+local utfbom_32_be=P('\000\000\254\255')
+local utfbom_32_le=P('\255\254\000\000')
+local utfbom_16_be=P('\255\254')
+local utfbom_16_le=P('\254\255')
+local utfbom_8=P('\239\187\191')
+local utfbom=utfbom_32_be+utfbom_32_le+utfbom_16_be+utfbom_16_le+utfbom_8
+local utftype=utfbom_32_be*Cc("utf-32-be")+utfbom_32_le*Cc("utf-32-le")+utfbom_16_be*Cc("utf-16-be")+utfbom_16_le*Cc("utf-16-le")+utfbom_8*Cc("utf-8")+alwaysmatched*Cc("utf-8")
+local utfoffset=utfbom_32_be*Cc(4)+utfbom_32_le*Cc(4)+utfbom_16_be*Cc(2)+utfbom_16_le*Cc(2)+utfbom_8*Cc(3)+Cc(0)
+local utf8next=R("\128\191")
+patterns.utf8one=R("\000\127")
+patterns.utf8two=R("\194\223")*utf8next
+patterns.utf8three=R("\224\239")*utf8next*utf8next
+patterns.utf8four=R("\240\244")*utf8next*utf8next*utf8next
+patterns.utfbom=utfbom
+patterns.utftype=utftype
+patterns.utfoffset=utfoffset
+local utf8char=patterns.utf8one+patterns.utf8two+patterns.utf8three+patterns.utf8four
+local validutf8char=utf8char^0*endofstring*Cc(true)+Cc(false)
+local utf8character=P(1)*R("\128\191")^0
+patterns.utf8=utf8char
+patterns.utf8char=utf8char
+patterns.utf8character=utf8character
+patterns.validutf8=validutf8char
+patterns.validutf8char=validutf8char
+local eol=S("\n\r")
+local spacer=S(" \t\f\v")
+local whitespace=eol+spacer
+local nonspacer=1-spacer
+local nonwhitespace=1-whitespace
+patterns.eol=eol
+patterns.spacer=spacer
+patterns.whitespace=whitespace
+patterns.nonspacer=nonspacer
+patterns.nonwhitespace=nonwhitespace
+local stripper=spacer^0*C((spacer^0*nonspacer^1)^0)
+local collapser=Cs(spacer^0/""*nonspacer^0*((spacer^0/" "*nonspacer^1)^0))
+patterns.stripper=stripper
+patterns.collapser=collapser
+patterns.digit=digit
+patterns.sign=sign
+patterns.cardinal=sign^0*digit^1
+patterns.integer=sign^0*digit^1
+patterns.unsigned=digit^0*P('.')*digit^1
+patterns.float=sign^0*patterns.unsigned
+patterns.cunsigned=digit^0*P(',')*digit^1
+patterns.cfloat=sign^0*patterns.cunsigned
+patterns.number=patterns.float+patterns.integer
+patterns.cnumber=patterns.cfloat+patterns.integer
+patterns.oct=P("0")*R("07")^1
+patterns.octal=patterns.oct
+patterns.HEX=P("0x")*R("09","AF")^1
+patterns.hex=P("0x")*R("09","af")^1
+patterns.hexadecimal=P("0x")*R("09","AF","af")^1
+patterns.lowercase=R("az")
+patterns.uppercase=R("AZ")
+patterns.letter=patterns.lowercase+patterns.uppercase
+patterns.space=space
+patterns.tab=P("\t")
+patterns.spaceortab=patterns.space+patterns.tab
+patterns.newline=newline
+patterns.emptyline=newline^1
+patterns.equal=P("=")
+patterns.comma=P(",")
+patterns.commaspacer=P(",")*spacer^0
+patterns.period=P(".")
+patterns.colon=P(":")
+patterns.semicolon=P(";")
+patterns.underscore=P("_")
+patterns.escaped=escaped
+patterns.squote=squote
+patterns.dquote=dquote
+patterns.nosquote=(escaped+(1-squote))^0
+patterns.nodquote=(escaped+(1-dquote))^0
+patterns.unsingle=(squote/"")*patterns.nosquote*(squote/"")
+patterns.undouble=(dquote/"")*patterns.nodquote*(dquote/"")
+patterns.unquoted=patterns.undouble+patterns.unsingle
+patterns.unspacer=((patterns.spacer^1)/"")^0
+patterns.singlequoted=squote*patterns.nosquote*squote
+patterns.doublequoted=dquote*patterns.nodquote*dquote
+patterns.quoted=patterns.doublequoted+patterns.singlequoted
+patterns.propername=R("AZ","az","__")*R("09","AZ","az","__")^0*P(-1)
+patterns.somecontent=(anything-newline-space)^1
+patterns.beginline=#(1-newline)
+patterns.longtostring=Cs(whitespace^0/""*nonwhitespace^0*((whitespace^0/" "*(patterns.quoted+nonwhitespace)^1)^0))
+local function anywhere(pattern)
+ return P { P(pattern)+1*V(1) }
+end
+lpeg.anywhere=anywhere
+function lpeg.instringchecker(p)
+ p=anywhere(p)
+ return function(str)
+ return lpegmatch(p,str) and true or false
+ end
+end
+function lpeg.splitter(pattern,action)
+ return (((1-P(pattern))^1)/action+1)^0
+end
+function lpeg.tsplitter(pattern,action)
+ return Ct((((1-P(pattern))^1)/action+1)^0)
+end
+local splitters_s,splitters_m,splitters_t={},{},{}
+local function splitat(separator,single)
+ local splitter=(single and splitters_s[separator]) or splitters_m[separator]
+ if not splitter then
+ separator=P(separator)
+ local other=C((1-separator)^0)
+ if single then
+ local any=anything
+ splitter=other*(separator*C(any^0)+"")
+ splitters_s[separator]=splitter
+ else
+ splitter=other*(separator*other)^0
+ splitters_m[separator]=splitter
+ end
+ end
+ return splitter
end
-
-function string.unquoted(str)
- return (gsub(str,"^([\"\'])(.*)%1$","%2"))
+local function tsplitat(separator)
+ local splitter=splitters_t[separator]
+ if not splitter then
+ splitter=Ct(splitat(separator))
+ splitters_t[separator]=splitter
+ end
+ return splitter
+end
+lpeg.splitat=splitat
+lpeg.tsplitat=tsplitat
+function string.splitup(str,separator)
+ if not separator then
+ separator=","
+ end
+ return lpegmatch(splitters_m[separator] or splitat(separator),str)
end
-
-
-function string.quoted(str)
- return format("%q",str) -- always "
+local cache={}
+function lpeg.split(separator,str)
+ local c=cache[separator]
+ if not c then
+ c=tsplitat(separator)
+ cache[separator]=c
+ end
+ return lpegmatch(c,str)
end
-
-function string.count(str,pattern) -- variant 3
- local n = 0
- for _ in gmatch(str,pattern) do -- not for utf
- n = n + 1
+function string.split(str,separator)
+ if separator then
+ local c=cache[separator]
+ if not c then
+ c=tsplitat(separator)
+ cache[separator]=c
+ end
+ return lpegmatch(c,str)
+ else
+ return { str }
+ end
+end
+local spacing=patterns.spacer^0*newline
+local empty=spacing*Cc("")
+local nonempty=Cs((1-spacing)^1)*spacing^-1
+local content=(empty+nonempty)^1
+patterns.textline=content
+local linesplitter=tsplitat(newline)
+patterns.linesplitter=linesplitter
+function string.splitlines(str)
+ return lpegmatch(linesplitter,str)
+end
+local cache={}
+function lpeg.checkedsplit(separator,str)
+ local c=cache[separator]
+ if not c then
+ separator=P(separator)
+ local other=C((1-separator)^1)
+ c=Ct(separator^0*other*(separator^1*other)^0)
+ cache[separator]=c
+ end
+ return lpegmatch(c,str)
+end
+function string.checkedsplit(str,separator)
+ local c=cache[separator]
+ if not c then
+ separator=P(separator)
+ local other=C((1-separator)^1)
+ c=Ct(separator^0*other*(separator^1*other)^0)
+ cache[separator]=c
+ end
+ return lpegmatch(c,str)
+end
+local function f2(s) local c1,c2=byte(s,1,2) return c1*64+c2-12416 end
+local function f3(s) local c1,c2,c3=byte(s,1,3) return (c1*64+c2)*64+c3-925824 end
+local function f4(s) local c1,c2,c3,c4=byte(s,1,4) return ((c1*64+c2)*64+c3)*64+c4-63447168 end
+local utf8byte=patterns.utf8one/byte+patterns.utf8two/f2+patterns.utf8three/f3+patterns.utf8four/f4
+patterns.utf8byte=utf8byte
+local cache={}
+function lpeg.stripper(str)
+ if type(str)=="string" then
+ local s=cache[str]
+ if not s then
+ s=Cs(((S(str)^1)/""+1)^0)
+ cache[str]=s
end
- return n
+ return s
+ else
+ return Cs(((str^1)/""+1)^0)
+ end
end
-
-function string.limit(str,n,sentinel) -- not utf proof
- if #str > n then
- sentinel = sentinel or "..."
- return sub(str,1,(n-#sentinel)) .. sentinel
+local cache={}
+function lpeg.keeper(str)
+ if type(str)=="string" then
+ local s=cache[str]
+ if not s then
+ s=Cs((((1-S(str))^1)/""+1)^0)
+ cache[str]=s
+ end
+ return s
+ else
+ return Cs((((1-str)^1)/""+1)^0)
+ end
+end
+function lpeg.frontstripper(str)
+ return (P(str)+P(true))*Cs(anything^0)
+end
+function lpeg.endstripper(str)
+ return Cs((1-P(str)*endofstring)^0)
+end
+function lpeg.replacer(one,two,makefunction,isutf)
+ local pattern
+ local u=isutf and utf8char or 1
+ if type(one)=="table" then
+ local no=#one
+ local p=P(false)
+ if no==0 then
+ for k,v in next,one do
+ p=p+P(k)/v
+ end
+ pattern=Cs((p+u)^0)
+ elseif no==1 then
+ local o=one[1]
+ one,two=P(o[1]),o[2]
+ pattern=Cs((one/two+u)^0)
+ else
+ for i=1,no do
+ local o=one[i]
+ p=p+P(o[1])/o[2]
+ end
+ pattern=Cs((p+u)^0)
+ end
+ else
+ pattern=Cs((P(one)/(two or "")+u)^0)
+ end
+ if makefunction then
+ return function(str)
+ return lpegmatch(pattern,str)
+ end
+ else
+ return pattern
+ end
+end
+function lpeg.finder(lst,makefunction)
+ local pattern
+ if type(lst)=="table" then
+ pattern=P(false)
+ if #lst==0 then
+ for k,v in next,lst do
+ pattern=pattern+P(k)
+ end
else
- return str
+ for i=1,#lst do
+ pattern=pattern+P(lst[i])
+ end
+ end
+ else
+ pattern=P(lst)
+ end
+ pattern=(1-pattern)^0*pattern
+ if makefunction then
+ return function(str)
+ return lpegmatch(pattern,str)
+ end
+ else
+ return pattern
+ end
+end
+local splitters_f,splitters_s={},{}
+function lpeg.firstofsplit(separator)
+ local splitter=splitters_f[separator]
+ if not splitter then
+ separator=P(separator)
+ splitter=C((1-separator)^0)
+ splitters_f[separator]=splitter
+ end
+ return splitter
+end
+function lpeg.secondofsplit(separator)
+ local splitter=splitters_s[separator]
+ if not splitter then
+ separator=P(separator)
+ splitter=(1-separator)^0*separator*C(anything^0)
+ splitters_s[separator]=splitter
+ end
+ return splitter
+end
+function lpeg.balancer(left,right)
+ left,right=P(left),P(right)
+ return P { left*((1-left-right)+V(1))^0*right }
+end
+local nany=utf8char/""
+function lpeg.counter(pattern)
+ pattern=Cs((P(pattern)/" "+nany)^0)
+ return function(str)
+ return #lpegmatch(pattern,str)
+ end
+end
+utf=utf or (unicode and unicode.utf8) or {}
+local utfcharacters=utf and utf.characters or string.utfcharacters
+local utfgmatch=utf and utf.gmatch
+local utfchar=utf and utf.char
+lpeg.UP=lpeg.P
+if utfcharacters then
+ function lpeg.US(str)
+ local p=P(false)
+ for uc in utfcharacters(str) do
+ p=p+P(uc)
end
+ return p
+ end
+elseif utfgmatch then
+ function lpeg.US(str)
+ local p=P(false)
+ for uc in utfgmatch(str,".") do
+ p=p+P(uc)
+ end
+ return p
+ end
+else
+ function lpeg.US(str)
+ local p=P(false)
+ local f=function(uc)
+ p=p+P(uc)
+ end
+ lpegmatch((utf8char/f)^0,str)
+ return p
+ end
end
-
-local space = S(" \t\v\n")
-local nospace = 1 - space
-local stripper = space^0 * C((space^0 * nospace^1)^0) -- roberto's code
-
-function string.strip(str)
- return lpegmatch(stripper,str) or ""
+local range=utf8byte*utf8byte+Cc(false)
+function lpeg.UR(str,more)
+ local first,last
+ if type(str)=="number" then
+ first=str
+ last=more or first
+ else
+ first,last=lpegmatch(range,str)
+ if not last then
+ return P(str)
+ end
+ end
+ if first==last then
+ return P(str)
+ elseif utfchar and (last-first<8) then
+ local p=P(false)
+ for i=first,last do
+ p=p+P(utfchar(i))
+ end
+ return p
+ else
+ local f=function(b)
+ return b>=first and b<=last
+ end
+ return utf8byte/f
+ end
end
-
-function string.is_empty(str)
- return not find(str,"%S")
+function lpeg.is_lpeg(p)
+ return p and lpegtype(p)=="pattern"
+end
+function lpeg.oneof(list,...)
+ if type(list)~="table" then
+ list={ list,... }
+ end
+ local p=P(list[1])
+ for l=2,#list do
+ p=p+P(list[l])
+ end
+ return p
+end
+local sort=table.sort
+local function copyindexed(old)
+ local new={}
+ for i=1,#old do
+ new[i]=old
+ end
+ return new
end
-
-local patterns_escapes = {
- ["%"] = "%%",
- ["."] = "%.",
- ["+"] = "%+", ["-"] = "%-", ["*"] = "%*",
- ["["] = "%[", ["]"] = "%]",
- ["("] = "%(", [")"] = "%)",
- -- ["{"] = "%{", ["}"] = "%}"
- -- ["^"] = "%^", ["$"] = "%$",
-}
-
-local simple_escapes = {
- ["-"] = "%-",
- ["."] = "%.",
- ["?"] = ".",
- ["*"] = ".*",
-}
-
-function string.escapedpattern(str,simple)
- return (gsub(str,".",simple and simple_escapes or patterns_escapes))
+local function sortedkeys(tab)
+ local keys,s={},0
+ for key,_ in next,tab do
+ s=s+1
+ keys[s]=key
+ end
+ sort(keys)
+ return keys
end
-
-function string.topattern(str,lowercase,strict)
- if str == "" then
- return ".*"
+function lpeg.append(list,pp,delayed,checked)
+ local p=pp
+ if #list>0 then
+ local keys=copyindexed(list)
+ sort(keys)
+ for i=#keys,1,-1 do
+ local k=keys[i]
+ if p then
+ p=P(k)+p
+ else
+ p=P(k)
+ end
+ end
+ elseif delayed then
+ local keys=sortedkeys(list)
+ if p then
+ for i=1,#keys,1 do
+ local k=keys[i]
+ local v=list[k]
+ p=P(k)/list+p
+ end
else
- str = gsub(str,".",simple_escapes)
- if lowercase then
- str = lower(str)
+ for i=1,#keys do
+ local k=keys[i]
+ local v=list[k]
+ if p then
+ p=P(k)+p
+ else
+ p=P(k)
+ end
+ end
+ if p then
+ p=p/list
+ end
+ end
+ elseif checked then
+ local keys=sortedkeys(list)
+ for i=1,#keys do
+ local k=keys[i]
+ local v=list[k]
+ if p then
+ if k==v then
+ p=P(k)+p
+ else
+ p=P(k)/v+p
end
- if strict then
- return "^" .. str .. "$"
+ else
+ if k==v then
+ p=P(k)
else
- return str
+ p=P(k)/v
end
+ end
end
+ else
+ local keys=sortedkeys(list)
+ for i=1,#keys do
+ local k=keys[i]
+ local v=list[k]
+ if p then
+ p=P(k)/v+p
+ else
+ p=P(k)/v
+ end
+ end
+ end
+ return p
end
-
--- obsolete names:
-
-string.quote = string.quoted
-string.unquote = string.unquoted
+local function make(t)
+ local p
+ local keys=sortedkeys(t)
+ for i=1,#keys do
+ local k=keys[i]
+ local v=t[k]
+ if not p then
+ if next(v) then
+ p=P(k)*make(v)
+ else
+ p=P(k)
+ end
+ else
+ if next(v) then
+ p=p+P(k)*make(v)
+ else
+ p=p+P(k)
+ end
+ end
+ end
+ return p
+end
+function lpeg.utfchartabletopattern(list)
+ local tree={}
+ for i=1,#list do
+ local t=tree
+ for c in gmatch(list[i],".") do
+ if not t[c] then
+ t[c]={}
+ end
+ t=t[c]
+ end
+ end
+ return make(tree)
+end
+patterns.containseol=lpeg.finder(eol)
+local function nextstep(n,step,result)
+ local m=n%step
+ local d=floor(n/step)
+ if d>0 then
+ local v=V(tostring(step))
+ local s=result.start
+ for i=1,d do
+ if s then
+ s=v*s
+ else
+ s=v
+ end
+ end
+ result.start=s
+ end
+ if step>1 and result.start then
+ local v=V(tostring(step/2))
+ result[tostring(step)]=v*v
+ end
+ if step>0 then
+ return nextstep(m,step/2,result)
+ else
+ return result
+ end
+end
+function lpeg.times(pattern,n)
+ return P(nextstep(n,2^16,{ "start",["1"]=pattern }))
+end
+local digit=R("09")
+local period=P(".")
+local zero=P("0")
+local trailingzeros=zero^0*-digit
+local case_1=period*trailingzeros/""
+local case_2=period*(digit-trailingzeros)^1*(trailingzeros/"")
+local number=digit^1*(case_1+case_2)
+local stripper=Cs((number+1)^0)
+lpeg.patterns.stripzeros=stripper
end -- of closure
do -- create closure to overcome 200 locals limit
-if not modules then modules = { } end modules ['l-table'] = {
- version = 1.001,
- comment = "companion to luat-lib.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
+package.loaded["l-function"] = package.loaded["l-function"] or true
-local type, next, tostring, tonumber, ipairs, table, string = type, next, tostring, tonumber, ipairs, table, string
-local concat, sort, insert, remove = table.concat, table.sort, table.insert, table.remove
-local format, find, gsub, lower, dump, match = string.format, string.find, string.gsub, string.lower, string.dump, string.match
-local getmetatable, setmetatable = getmetatable, setmetatable
-local getinfo = debug.getinfo
-
--- Starting with version 5.2 Lua no longer provide ipairs, which makes
--- sense. As we already used the for loop and # in most places the
--- impact on ConTeXt was not that large; the remaining ipairs already
--- have been replaced. In a similar fashion we also hardly used pairs.
---
--- Just in case, we provide the fallbacks as discussed in Programming
--- in Lua (http://www.lua.org/pil/7.3.html):
+-- original size: 361, stripped down to: 322
-if not ipairs then
+if not modules then modules={} end modules ['l-functions']={
+ version=1.001,
+ comment="companion to luat-lib.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+functions=functions or {}
+function functions.dummy() end
- -- for k, v in ipairs(t) do ... end
- -- for k=1,#t do local v = t[k] ... end
- local function iterate(a,i)
- i = i + 1
- local v = a[i]
- if v ~= nil then
- return i, v --, nil
- end
- end
+end -- of closure
- function ipairs(a)
- return iterate, a, 0
- end
+do -- create closure to overcome 200 locals limit
-end
+package.loaded["l-string"] = package.loaded["l-string"] or true
-if not pairs then
+-- original size: 5513, stripped down to: 2708
- -- for k, v in pairs(t) do ... end
- -- for k, v in next, t do ... end
+if not modules then modules={} end modules ['l-string']={
+ version=1.001,
+ comment="companion to luat-lib.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+local string=string
+local sub,gmatch,format,char,byte,rep,lower=string.sub,string.gmatch,string.format,string.char,string.byte,string.rep,string.lower
+local lpegmatch,patterns=lpeg.match,lpeg.patterns
+local P,S,C,Ct,Cc,Cs=lpeg.P,lpeg.S,lpeg.C,lpeg.Ct,lpeg.Cc,lpeg.Cs
+local unquoted=patterns.squote*C(patterns.nosquote)*patterns.squote+patterns.dquote*C(patterns.nodquote)*patterns.dquote
+function string.unquoted(str)
+ return lpegmatch(unquoted,str) or str
+end
+function string.quoted(str)
+ return format("%q",str)
+end
+function string.count(str,pattern)
+ local n=0
+ for _ in gmatch(str,pattern) do
+ n=n+1
+ end
+ return n
+end
+function string.limit(str,n,sentinel)
+ if #str>n then
+ sentinel=sentinel or "..."
+ return sub(str,1,(n-#sentinel))..sentinel
+ else
+ return str
+ end
+end
+local stripper=patterns.stripper
+local collapser=patterns.collapser
+local longtostring=patterns.longtostring
+function string.strip(str)
+ return lpegmatch(stripper,str) or ""
+end
+function string.collapsespaces(str)
+ return lpegmatch(collapser,str) or ""
+end
+function string.longtostring(str)
+ return lpegmatch(longtostring,str) or ""
+end
+local pattern=P(" ")^0*P(-1)
+function string.is_empty(str)
+ if str=="" then
+ return true
+ else
+ return lpegmatch(pattern,str) and true or false
+ end
+end
+local anything=patterns.anything
+local allescapes=Cc("%")*S(".-+%?()[]*")
+local someescapes=Cc("%")*S(".-+%()[]")
+local matchescapes=Cc(".")*S("*?")
+local pattern_a=Cs ((allescapes+anything )^0 )
+local pattern_b=Cs ((someescapes+matchescapes+anything )^0 )
+local pattern_c=Cs (Cc("^")*(someescapes+matchescapes+anything )^0*Cc("$") )
+function string.escapedpattern(str,simple)
+ return lpegmatch(simple and pattern_b or pattern_a,str)
+end
+function string.topattern(str,lowercase,strict)
+ if str=="" or type(str)~="string" then
+ return ".*"
+ elseif strict then
+ str=lpegmatch(pattern_c,str)
+ else
+ str=lpegmatch(pattern_b,str)
+ end
+ if lowercase then
+ return lower(str)
+ else
+ return str
+ end
+end
+function string.valid(str,default)
+ return (type(str)=="string" and str~="" and str) or default or nil
+end
+string.itself=function(s) return s end
+local pattern=Ct(C(1)^0)
+function string.totable(str)
+ return lpegmatch(pattern,str)
+end
+local replacer=lpeg.replacer("@","%%")
+function string.tformat(fmt,...)
+ return format(lpegmatch(replacer,fmt),...)
+end
+string.quote=string.quoted
+string.unquote=string.unquoted
- function pairs(t)
- return next, t -- , nil
- end
-end
+end -- of closure
--- Also, unpack has been moved to the table table, and for compatiility
--- reasons we provide both now.
+do -- create closure to overcome 200 locals limit
-if not table.unpack then
- table.unpack = _G.unpack
-elseif not unpack then
- _G.unpack = table.unpack
-end
+package.loaded["l-table"] = package.loaded["l-table"] or true
--- extra functions, some might go (when not used)
+-- original size: 44643, stripped down to: 19717
+if not modules then modules={} end modules ['l-table']={
+ version=1.001,
+ comment="companion to luat-lib.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+local type,next,tostring,tonumber,ipairs,select=type,next,tostring,tonumber,ipairs,select
+local table,string=table,string
+local concat,sort,insert,remove=table.concat,table.sort,table.insert,table.remove
+local format,lower,dump=string.format,string.lower,string.dump
+local getmetatable,setmetatable=getmetatable,setmetatable
+local getinfo=debug.getinfo
+local lpegmatch,patterns=lpeg.match,lpeg.patterns
+local floor=math.floor
+local stripper=patterns.stripper
function table.strip(tab)
- local lst, l = { }, 0
- for i=1,#tab do
- local s = gsub(tab[i],"^%s*(.-)%s*$","%1")
- if s == "" then
- -- skip this one
- else
- l = l + 1
- lst[l] = s
- end
+ local lst,l={},0
+ for i=1,#tab do
+ local s=lpegmatch(stripper,tab[i]) or ""
+ if s=="" then
+ else
+ l=l+1
+ lst[l]=s
end
- return lst
+ end
+ return lst
end
-
function table.keys(t)
- local keys, k = { }, 0
- for key, _ in next, t do
- k = k + 1
- keys[k] = key
+ if t then
+ local keys,k={},0
+ for key,_ in next,t do
+ k=k+1
+ keys[k]=key
end
return keys
+ else
+ return {}
+ end
end
-
local function compare(a,b)
- local ta, tb = type(a), type(b) -- needed, else 11 < 2
- if ta == tb then
- return a < b
- else
- return tostring(a) < tostring(b)
- end
+ local ta,tb=type(a),type(b)
+ if ta==tb then
+ return a<b
+ else
+ return tostring(a)<tostring(b)
+ end
end
-
local function sortedkeys(tab)
- local srt, category, s = { }, 0, 0 -- 0=unknown 1=string, 2=number 3=mixed
- for key,_ in next, tab do
- s = s + 1
- srt[s] = key
- if category == 3 then
- -- no further check
+ if tab then
+ local srt,category,s={},0,0
+ for key,_ in next,tab do
+ s=s+1
+ srt[s]=key
+ if category==3 then
+ else
+ local tkey=type(key)
+ if tkey=="string" then
+ category=(category==2 and 3) or 1
+ elseif tkey=="number" then
+ category=(category==1 and 3) or 2
else
- local tkey = type(key)
- if tkey == "string" then
- category = (category == 2 and 3) or 1
- elseif tkey == "number" then
- category = (category == 1 and 3) or 2
- else
- category = 3
- end
+ category=3
end
+ end
end
- if category == 0 or category == 3 then
- sort(srt,compare)
+ if category==0 or category==3 then
+ sort(srt,compare)
else
- sort(srt)
+ sort(srt)
end
return srt
+ else
+ return {}
+ end
+end
+local function sortedhashkeys(tab,cmp)
+ if tab then
+ local srt,s={},0
+ for key,_ in next,tab do
+ if key then
+ s=s+1
+ srt[s]=key
+ end
+ end
+ sort(srt,cmp)
+ return srt
+ else
+ return {}
+ end
end
-
-local function sortedhashkeys(tab) -- fast one
- local srt, s = { }, 0
- for key,_ in next, tab do
- if key then
- s= s + 1
- srt[s] = key
- end
+function table.allkeys(t)
+ local keys={}
+ for k,v in next,t do
+ for k,v in next,v do
+ keys[k]=true
end
- sort(srt)
- return srt
+ end
+ return sortedkeys(keys)
end
-
-table.sortedkeys = sortedkeys
-table.sortedhashkeys = sortedhashkeys
-
+table.sortedkeys=sortedkeys
+table.sortedhashkeys=sortedhashkeys
local function nothing() end
-
-local function sortedhash(t)
- if t then
- local n, s = 0, sortedkeys(t) -- the robust one
- local function kv(s)
- n = n + 1
- local k = s[n]
- return k, t[k]
- end
- return kv, s
+local function sortedhash(t,cmp)
+ if t then
+ local s
+ if cmp then
+ s=sortedhashkeys(t,function(a,b) return cmp(t,a,b) end)
else
- return nothing
- end
-end
-
-table.sortedhash = sortedhash
-table.sortedpairs = sortedhash
-
-function table.append(t, list)
- local n = #t
- for i=1,#list do
- n = n + 1
- t[n] = list[i]
- end
- return t
+ s=sortedkeys(t)
+ end
+ local n=0
+ local function kv(s)
+ n=n+1
+ local k=s[n]
+ return k,t[k]
+ end
+ return kv,s
+ else
+ return nothing
+ end
+end
+table.sortedhash=sortedhash
+table.sortedpairs=sortedhash
+function table.append(t,list)
+ local n=#t
+ for i=1,#list do
+ n=n+1
+ t[n]=list[i]
+ end
+ return t
+end
+function table.prepend(t,list)
+ local nl=#list
+ local nt=nl+#t
+ for i=#t,1,-1 do
+ t[nt]=t[i]
+ nt=nt-1
+ end
+ for i=1,#list do
+ t[i]=list[i]
+ end
+ return t
+end
+function table.merge(t,...)
+ t=t or {}
+ for i=1,select("#",...) do
+ for k,v in next,(select(i,...)) do
+ t[k]=v
+ end
+ end
+ return t
end
-
-function table.prepend(t, list)
- local nl = #list
- local nt = nl + #t
- for i=#t,1,-1 do
- t[nt] = t[i]
- nt = nt - 1
- end
- for i=1,#list do
- t[i] = list[i]
- end
- return t
-end
-
-function table.merge(t, ...) -- first one is target
- t = t or { }
- local lst = { ... }
- for i=1,#lst do
- for k, v in next, lst[i] do
- t[k] = v
- end
- end
- return t
-end
-
function table.merged(...)
- local tmp, lst = { }, { ... }
- for i=1,#lst do
- for k, v in next, lst[i] do
- tmp[k] = v
- end
+ local t={}
+ for i=1,select("#",...) do
+ for k,v in next,(select(i,...)) do
+ t[k]=v
end
- return tmp
+ end
+ return t
end
-
-function table.imerge(t, ...)
- local lst, nt = { ... }, #t
- for i=1,#lst do
- local nst = lst[i]
- for j=1,#nst do
- nt = nt + 1
- t[nt] = nst[j]
- end
+function table.imerge(t,...)
+ local nt=#t
+ for i=1,select("#",...) do
+ local nst=select(i,...)
+ for j=1,#nst do
+ nt=nt+1
+ t[nt]=nst[j]
end
- return t
+ end
+ return t
end
-
function table.imerged(...)
- local tmp, ntmp, lst = { }, 0, {...}
- for i=1,#lst do
- local nst = lst[i]
- for j=1,#nst do
- ntmp = ntmp + 1
- tmp[ntmp] = nst[j]
- end
+ local tmp,ntmp={},0
+ for i=1,select("#",...) do
+ local nst=select(i,...)
+ for j=1,#nst do
+ ntmp=ntmp+1
+ tmp[ntmp]=nst[j]
+ end
+ end
+ return tmp
+end
+local function fastcopy(old,metatabletoo)
+ if old then
+ local new={}
+ for k,v in next,old do
+ if type(v)=="table" then
+ new[k]=fastcopy(v,metatabletoo)
+ else
+ new[k]=v
+ end
+ end
+ if metatabletoo then
+ local mt=getmetatable(old)
+ if mt then
+ setmetatable(new,mt)
+ end
end
- return tmp
-end
-
-local function fastcopy(old,metatabletoo) -- fast one
- if old then
- local new = { }
- for k,v in next, old do
- if type(v) == "table" then
- new[k] = fastcopy(v,metatabletoo) -- was just table.copy
- else
- new[k] = v
- end
- end
- if metatabletoo then
- -- optional second arg
- local mt = getmetatable(old)
- if mt then
- setmetatable(new,mt)
- end
- end
- return new
+ return new
+ else
+ return {}
+ end
+end
+local function copy(t,tables)
+ tables=tables or {}
+ local tcopy={}
+ if not tables[t] then
+ tables[t]=tcopy
+ end
+ for i,v in next,t do
+ if type(i)=="table" then
+ if tables[i] then
+ i=tables[i]
+ else
+ i=copy(i,tables)
+ end
+ end
+ if type(v)~="table" then
+ tcopy[i]=v
+ elseif tables[v] then
+ tcopy[i]=tables[v]
else
- return { }
- end
+ tcopy[i]=copy(v,tables)
+ end
+ end
+ local mt=getmetatable(t)
+ if mt then
+ setmetatable(tcopy,mt)
+ end
+ return tcopy
+end
+table.fastcopy=fastcopy
+table.copy=copy
+function table.derive(parent)
+ local child={}
+ if parent then
+ setmetatable(child,{ __index=parent })
+ end
+ return child
end
-
--- todo : copy without metatable
-
-local function copy(t, tables) -- taken from lua wiki, slightly adapted
- tables = tables or { }
- local tcopy = {}
- if not tables[t] then
- tables[t] = tcopy
- end
- for i,v in next, t do -- brrr, what happens with sparse indexed
- if type(i) == "table" then
- if tables[i] then
- i = tables[i]
- else
- i = copy(i, tables)
- end
- end
- if type(v) ~= "table" then
- tcopy[i] = v
- elseif tables[v] then
- tcopy[i] = tables[v]
- else
- tcopy[i] = copy(v, tables)
- end
- end
- local mt = getmetatable(t)
- if mt then
- setmetatable(tcopy,mt)
- end
- return tcopy
-end
-
-table.fastcopy = fastcopy
-table.copy = copy
-
-function table.derive(parent)
- local child = { }
- if parent then
- setmetatable(child,{ __index = parent })
- end
- return child
-end
-
function table.tohash(t,value)
- local h = { }
- if t then
- if value == nil then value = true end
- for _, v in next, t do -- no ipairs here
- h[v] = value
- end
+ local h={}
+ if t then
+ if value==nil then value=true end
+ for _,v in next,t do
+ h[v]=value
end
- return h
+ end
+ return h
end
-
function table.fromhash(t)
- local hsh, h = { }, 0
- for k, v in next, t do -- no ipairs here
- if v then
- h = h + 1
- hsh[h] = k
- end
+ local hsh,h={},0
+ for k,v in next,t do
+ if v then
+ h=h+1
+ hsh[h]=k
end
- return hsh
+ end
+ return hsh
end
-
-local noquotes, hexify, handle, reduce, compact, inline, functions
-
-local reserved = table.tohash { -- intercept a language inconvenience: no reserved words as key
- 'and', 'break', 'do', 'else', 'elseif', 'end', 'false', 'for', 'function', 'if',
- 'in', 'local', 'nil', 'not', 'or', 'repeat', 'return', 'then', 'true', 'until', 'while',
+local noquotes,hexify,handle,reduce,compact,inline,functions
+local reserved=table.tohash {
+ 'and','break','do','else','elseif','end','false','for','function','if',
+ 'in','local','nil','not','or','repeat','return','then','true','until','while',
}
-
local function simple_table(t)
- if #t > 0 then
- local n = 0
- for _,v in next, t do
- n = n + 1
- end
- if n == #t then
- local tt, nt = { }, 0
- for i=1,#t do
- local v = t[i]
- local tv = type(v)
- if tv == "number" then
- nt = nt + 1
- if hexify then
- tt[nt] = format("0x%04X",v)
- else
- tt[nt] = tostring(v) -- tostring not needed
- end
- elseif tv == "boolean" then
- nt = nt + 1
- tt[nt] = tostring(v)
- elseif tv == "string" then
- nt = nt + 1
- tt[nt] = format("%q",v)
- else
- tt = nil
- break
- end
- end
- return tt
+ if #t>0 then
+ local n=0
+ for _,v in next,t do
+ n=n+1
+ end
+ if n==#t then
+ local tt,nt={},0
+ for i=1,#t do
+ local v=t[i]
+ local tv=type(v)
+ if tv=="number" then
+ nt=nt+1
+ if hexify then
+ tt[nt]=format("0x%04X",v)
+ else
+ tt[nt]=tostring(v)
+ end
+ elseif tv=="boolean" then
+ nt=nt+1
+ tt[nt]=tostring(v)
+ elseif tv=="string" then
+ nt=nt+1
+ tt[nt]=format("%q",v)
+ else
+ tt=nil
+ break
end
+ end
+ return tt
end
- return nil
+ end
+ return nil
end
-
--- Because this is a core function of mkiv I moved some function calls
--- inline.
---
--- twice as fast in a test:
---
--- local propername = lpeg.P(lpeg.R("AZ","az","__") * lpeg.R("09","AZ","az", "__")^0 * lpeg.P(-1) )
-
--- problem: there no good number_to_string converter with the best resolution
-
+local propername=patterns.propername
local function dummy() end
-
local function do_serialize(root,name,depth,level,indexed)
- if level > 0 then
- depth = depth .. " "
- if indexed then
- handle(format("%s{",depth))
- else
- local tn = type(name)
- if tn == "number" then -- or find(k,"^%d+$") then
- if hexify then
- handle(format("%s[0x%04X]={",depth,name))
- else
- handle(format("%s[%s]={",depth,name))
- end
- elseif tn == "string" then
- if noquotes and not reserved[name] and find(name,"^%a[%w%_]*$") then
- handle(format("%s%s={",depth,name))
- else
- handle(format("%s[%q]={",depth,name))
- end
- elseif tn == "boolean" then
- handle(format("%s[%s]={",depth,tostring(name)))
- else
- handle(format("%s{",depth))
- end
- end
- end
- -- we could check for k (index) being number (cardinal)
- if root and next(root) then
- local first, last = nil, 0 -- #root cannot be trusted here (will be ok in 5.2 when ipairs is gone)
- if compact then
- -- NOT: for k=1,#root do (we need to quit at nil)
- for k,v in ipairs(root) do -- can we use next?
- if not first then first = k end
- last = last + 1
- end
- end
- local sk = sortedkeys(root)
- for i=1,#sk do
- local k = sk[i]
- local v = root[k]
- -- circular
- local t, tk = type(v), type(k)
- if compact and first and tk == "number" and k >= first and k <= last then
- if t == "number" then
- if hexify then
- handle(format("%s 0x%04X,",depth,v))
- else
- handle(format("%s %s,",depth,v)) -- %.99g
- end
- elseif t == "string" then
- if reduce and tonumber(v) then
- handle(format("%s %s,",depth,v))
- else
- handle(format("%s %q,",depth,v))
- end
- elseif t == "table" then
- if not next(v) then
- handle(format("%s {},",depth))
- elseif inline then -- and #t > 0
- local st = simple_table(v)
- if st then
- handle(format("%s { %s },",depth,concat(st,", ")))
- else
- do_serialize(v,k,depth,level+1,true)
- end
- else
- do_serialize(v,k,depth,level+1,true)
- end
- elseif t == "boolean" then
- handle(format("%s %s,",depth,tostring(v)))
- elseif t == "function" then
- if functions then
- handle(format('%s loadstring(%q),',depth,dump(v)))
- else
- handle(format('%s "function",',depth))
- end
- else
- handle(format("%s %q,",depth,tostring(v)))
- end
- elseif k == "__p__" then -- parent
- if false then
- handle(format("%s __p__=nil,",depth))
- end
- elseif t == "number" then
- if tk == "number" then -- or find(k,"^%d+$") then
- if hexify then
- handle(format("%s [0x%04X]=0x%04X,",depth,k,v))
- else
- handle(format("%s [%s]=%s,",depth,k,v)) -- %.99g
- end
- elseif tk == "boolean" then
- if hexify then
- handle(format("%s [%s]=0x%04X,",depth,tostring(k),v))
- else
- handle(format("%s [%s]=%s,",depth,tostring(k),v)) -- %.99g
- end
- elseif noquotes and not reserved[k] and find(k,"^%a[%w%_]*$") then
- if hexify then
- handle(format("%s %s=0x%04X,",depth,k,v))
- else
- handle(format("%s %s=%s,",depth,k,v)) -- %.99g
- end
- else
- if hexify then
- handle(format("%s [%q]=0x%04X,",depth,k,v))
- else
- handle(format("%s [%q]=%s,",depth,k,v)) -- %.99g
- end
- end
- elseif t == "string" then
- if reduce and tonumber(v) then
- if tk == "number" then -- or find(k,"^%d+$") then
- if hexify then
- handle(format("%s [0x%04X]=%s,",depth,k,v))
- else
- handle(format("%s [%s]=%s,",depth,k,v))
- end
- elseif tk == "boolean" then
- handle(format("%s [%s]=%s,",depth,tostring(k),v))
- elseif noquotes and not reserved[k] and find(k,"^%a[%w%_]*$") then
- handle(format("%s %s=%s,",depth,k,v))
- else
- handle(format("%s [%q]=%s,",depth,k,v))
- end
- else
- if tk == "number" then -- or find(k,"^%d+$") then
- if hexify then
- handle(format("%s [0x%04X]=%q,",depth,k,v))
- else
- handle(format("%s [%s]=%q,",depth,k,v))
- end
- elseif tk == "boolean" then
- handle(format("%s [%s]=%q,",depth,tostring(k),v))
- elseif noquotes and not reserved[k] and find(k,"^%a[%w%_]*$") then
- handle(format("%s %s=%q,",depth,k,v))
- else
- handle(format("%s [%q]=%q,",depth,k,v))
- end
- end
- elseif t == "table" then
- if not next(v) then
- if tk == "number" then -- or find(k,"^%d+$") then
- if hexify then
- handle(format("%s [0x%04X]={},",depth,k))
- else
- handle(format("%s [%s]={},",depth,k))
- end
- elseif tk == "boolean" then
- handle(format("%s [%s]={},",depth,tostring(k)))
- elseif noquotes and not reserved[k] and find(k,"^%a[%w%_]*$") then
- handle(format("%s %s={},",depth,k))
- else
- handle(format("%s [%q]={},",depth,k))
- end
- elseif inline then
- local st = simple_table(v)
- if st then
- if tk == "number" then -- or find(k,"^%d+$") then
- if hexify then
- handle(format("%s [0x%04X]={ %s },",depth,k,concat(st,", ")))
- else
- handle(format("%s [%s]={ %s },",depth,k,concat(st,", ")))
- end
- elseif tk == "boolean" then -- or find(k,"^%d+$") then
- handle(format("%s [%s]={ %s },",depth,tostring(k),concat(st,", ")))
- elseif noquotes and not reserved[k] and find(k,"^%a[%w%_]*$") then
- handle(format("%s %s={ %s },",depth,k,concat(st,", ")))
- else
- handle(format("%s [%q]={ %s },",depth,k,concat(st,", ")))
- end
- else
- do_serialize(v,k,depth,level+1)
- end
- else
- do_serialize(v,k,depth,level+1)
- end
- elseif t == "boolean" then
- if tk == "number" then -- or find(k,"^%d+$") then
- if hexify then
- handle(format("%s [0x%04X]=%s,",depth,k,tostring(v)))
- else
- handle(format("%s [%s]=%s,",depth,k,tostring(v)))
- end
- elseif tk == "boolean" then -- or find(k,"^%d+$") then
- handle(format("%s [%s]=%s,",depth,tostring(k),tostring(v)))
- elseif noquotes and not reserved[k] and find(k,"^%a[%w%_]*$") then
- handle(format("%s %s=%s,",depth,k,tostring(v)))
- else
- handle(format("%s [%q]=%s,",depth,k,tostring(v)))
- end
- elseif t == "function" then
- if functions then
- local f = getinfo(v).what == "C" and dump(dummy) or dump(v)
- -- local f = getinfo(v).what == "C" and dump(function(...) return v(...) end) or dump(v)
- if tk == "number" then -- or find(k,"^%d+$") then
- if hexify then
- handle(format("%s [0x%04X]=loadstring(%q),",depth,k,f))
- else
- handle(format("%s [%s]=loadstring(%q),",depth,k,f))
- end
- elseif tk == "boolean" then
- handle(format("%s [%s]=loadstring(%q),",depth,tostring(k),f))
- elseif noquotes and not reserved[k] and find(k,"^%a[%w%_]*$") then
- handle(format("%s %s=loadstring(%q),",depth,k,f))
- else
- handle(format("%s [%q]=loadstring(%q),",depth,k,f))
- end
- end
- else
- if tk == "number" then -- or find(k,"^%d+$") then
- if hexify then
- handle(format("%s [0x%04X]=%q,",depth,k,tostring(v)))
- else
- handle(format("%s [%s]=%q,",depth,k,tostring(v)))
- end
- elseif tk == "boolean" then -- or find(k,"^%d+$") then
- handle(format("%s [%s]=%q,",depth,tostring(k),tostring(v)))
- elseif noquotes and not reserved[k] and find(k,"^%a[%w%_]*$") then
- handle(format("%s %s=%q,",depth,k,tostring(v)))
- else
- handle(format("%s [%q]=%q,",depth,k,tostring(v)))
- end
- end
- end
- end
- if level > 0 then
- handle(format("%s},",depth))
- end
-end
-
--- replacing handle by a direct t[#t+1] = ... (plus test) is not much
--- faster (0.03 on 1.00 for zapfino.tma)
-
-local function serialize(_handle,root,name,specification) -- handle wins
- local tname = type(name)
- if type(specification) == "table" then
- noquotes = specification.noquotes
- hexify = specification.hexify
- handle = _handle or specification.handle or print
- reduce = specification.reduce or false
- functions = specification.functions
- compact = specification.compact
- inline = specification.inline and compact
- if functions == nil then
- functions = true
- end
- if compact == nil then
- compact = true
- end
- if inline == nil then
- inline = compact
- end
+ if level>0 then
+ depth=depth.." "
+ if indexed then
+ handle(format("%s{",depth))
else
- noquotes = false
- hexify = false
- handle = _handle or print
- reduce = false
- compact = true
- inline = true
- functions = true
- end
- if tname == "string" then
- if name == "return" then
- handle("return {")
- else
- handle(name .. "={")
- end
- elseif tname == "number" then
+ local tn=type(name)
+ if tn=="number" then
if hexify then
- handle(format("[0x%04X]={",name))
+ handle(format("%s[0x%04X]={",depth,name))
else
- handle("[" .. name .. "]={")
+ handle(format("%s[%s]={",depth,name))
end
- elseif tname == "boolean" then
- if name then
- handle("return {")
+ elseif tn=="string" then
+ if noquotes and not reserved[name] and lpegmatch(propername,name) then
+ handle(format("%s%s={",depth,name))
else
- handle("{")
- end
- else
- handle("t={")
- end
- if root then
- -- The dummy access will initialize a table that has a delayed initialization
- -- using a metatable. (maybe explicitly test for metatable)
- if getmetatable(root) then -- todo: make this an option, maybe even per subtable
- local dummy = root._w_h_a_t_e_v_e_r_
- root._w_h_a_t_e_v_e_r_ = nil
- end
- -- Let's forget about empty tables.
- if next(root) then
- do_serialize(root,name,"",0)
- end
- end
- handle("}")
-end
-
-
-function table.serialize(root,name,specification)
- local t, n = { }, 0
- local function flush(s)
- n = n + 1
- t[n] = s
- end
- serialize(flush,root,name,specification)
- return concat(t,"\n")
-end
-
-table.tohandle = serialize
-
--- sometimes tables are real use (zapfino extra pro is some 85M) in which
--- case a stepwise serialization is nice; actually, we could consider:
---
--- for line in table.serializer(root,name,reduce,noquotes) do
--- ...(line)
--- end
---
--- so this is on the todo list
-
-local maxtab = 2*1024
-
-function table.tofile(filename,root,name,specification)
- local f = io.open(filename,'w')
- if f then
- if maxtab > 1 then
- local t, n = { }, 0
- local function flush(s)
- n = n + 1
- t[n] = s
- if n > maxtab then
- f:write(concat(t,"\n"),"\n") -- hm, write(sometable) should be nice
- t, n = { }, 0 -- we could recycle t if needed
- end
- end
- serialize(flush,root,name,specification)
- f:write(concat(t,"\n"),"\n")
- else
- local function flush(s)
- f:write(s,"\n")
- end
- serialize(flush,root,name,specification)
- end
- f:close()
- io.flush()
- end
-end
-
-local function flattened(t,f,depth)
- if f == nil then
- f = { }
- depth = 0xFFFF
- elseif tonumber(f) then
- -- assume then only two arguments are given
- depth = f
- f = { }
- elseif not depth then
- depth = 0xFFFF
- end
- for k, v in next, t do
- if type(k) ~= "number" then
- if depth > 0 and type(v) == "table" then
- flattened(v,f,depth-1)
+ handle(format("%s[%q]={",depth,name))
+ end
+ elseif tn=="boolean" then
+ handle(format("%s[%s]={",depth,tostring(name)))
+ else
+ handle(format("%s{",depth))
+ end
+ end
+ end
+ if root and next(root) then
+ local first,last=nil,0
+ if compact then
+ last=#root
+ for k=1,last do
+ if root[k]==nil then
+ last=k-1
+ break
+ end
+ end
+ if last>0 then
+ first=1
+ end
+ end
+ local sk=sortedkeys(root)
+ for i=1,#sk do
+ local k=sk[i]
+ local v=root[k]
+ local t,tk=type(v),type(k)
+ if compact and first and tk=="number" and k>=first and k<=last then
+ if t=="number" then
+ if hexify then
+ handle(format("%s 0x%04X,",depth,v))
+ else
+ handle(format("%s %s,",depth,v))
+ end
+ elseif t=="string" then
+ if reduce and tonumber(v) then
+ handle(format("%s %s,",depth,v))
+ else
+ handle(format("%s %q,",depth,v))
+ end
+ elseif t=="table" then
+ if not next(v) then
+ handle(format("%s {},",depth))
+ elseif inline then
+ local st=simple_table(v)
+ if st then
+ handle(format("%s { %s },",depth,concat(st,", ")))
else
- f[k] = v
- end
- end
- end
- local n = #f
- for k=1,#t do
- local v = t[k]
- if depth > 0 and type(v) == "table" then
- flattened(v,f,depth-1)
- n = #f
+ do_serialize(v,k,depth,level+1,true)
+ end
+ else
+ do_serialize(v,k,depth,level+1,true)
+ end
+ elseif t=="boolean" then
+ handle(format("%s %s,",depth,tostring(v)))
+ elseif t=="function" then
+ if functions then
+ handle(format('%s load(%q),',depth,dump(v)))
+ else
+ handle(format('%s "function",',depth))
+ end
else
- n = n + 1
- f[n] = v
- end
- end
- return f
-end
-
-table.flattened = flattened
-
-local function unnest(t,f) -- only used in mk, for old times sake
- if not f then -- and only relevant for token lists
- f = { }
- end
- for i=1,#t do
- local v = t[i]
- if type(v) == "table" then
- if type(v[1]) == "table" then
- unnest(v,f)
+ handle(format("%s %q,",depth,tostring(v)))
+ end
+ elseif k=="__p__" then
+ if false then
+ handle(format("%s __p__=nil,",depth))
+ end
+ elseif t=="number" then
+ if tk=="number" then
+ if hexify then
+ handle(format("%s [0x%04X]=0x%04X,",depth,k,v))
+ else
+ handle(format("%s [%s]=%s,",depth,k,v))
+ end
+ elseif tk=="boolean" then
+ if hexify then
+ handle(format("%s [%s]=0x%04X,",depth,tostring(k),v))
+ else
+ handle(format("%s [%s]=%s,",depth,tostring(k),v))
+ end
+ elseif noquotes and not reserved[k] and lpegmatch(propername,k) then
+ if hexify then
+ handle(format("%s %s=0x%04X,",depth,k,v))
+ else
+ handle(format("%s %s=%s,",depth,k,v))
+ end
+ else
+ if hexify then
+ handle(format("%s [%q]=0x%04X,",depth,k,v))
+ else
+ handle(format("%s [%q]=%s,",depth,k,v))
+ end
+ end
+ elseif t=="string" then
+ if reduce and tonumber(v) then
+ if tk=="number" then
+ if hexify then
+ handle(format("%s [0x%04X]=%s,",depth,k,v))
else
- f[#f+1] = v
- end
+ handle(format("%s [%s]=%s,",depth,k,v))
+ end
+ elseif tk=="boolean" then
+ handle(format("%s [%s]=%s,",depth,tostring(k),v))
+ elseif noquotes and not reserved[k] and lpegmatch(propername,k) then
+ handle(format("%s %s=%s,",depth,k,v))
+ else
+ handle(format("%s [%q]=%s,",depth,k,v))
+ end
else
- f[#f+1] = v
- end
- end
- return f
-end
-
-function table.unnest(t) -- bad name
- return unnest(t)
-end
-
-local function are_equal(a,b,n,m) -- indexed
- if a and b and #a == #b then
- n = n or 1
- m = m or #a
- for i=n,m do
- local ai, bi = a[i], b[i]
- if ai==bi then
- -- same
- elseif type(ai)=="table" and type(bi)=="table" then
- if not are_equal(ai,bi) then
- return false
- end
+ if tk=="number" then
+ if hexify then
+ handle(format("%s [0x%04X]=%q,",depth,k,v))
else
- return false
- end
- end
- return true
- else
- return false
- end
-end
-
-local function identical(a,b) -- assumes same structure
- for ka, va in next, a do
- local vb = b[ka]
- if va == vb then
- -- same
- elseif type(va) == "table" and type(vb) == "table" then
- if not identical(va,vb) then
- return false
+ handle(format("%s [%s]=%q,",depth,k,v))
+ end
+ elseif tk=="boolean" then
+ handle(format("%s [%s]=%q,",depth,tostring(k),v))
+ elseif noquotes and not reserved[k] and lpegmatch(propername,k) then
+ handle(format("%s %s=%q,",depth,k,v))
+ else
+ handle(format("%s [%q]=%q,",depth,k,v))
+ end
+ end
+ elseif t=="table" then
+ if not next(v) then
+ if tk=="number" then
+ if hexify then
+ handle(format("%s [0x%04X]={},",depth,k))
+ else
+ handle(format("%s [%s]={},",depth,k))
+ end
+ elseif tk=="boolean" then
+ handle(format("%s [%s]={},",depth,tostring(k)))
+ elseif noquotes and not reserved[k] and lpegmatch(propername,k) then
+ handle(format("%s %s={},",depth,k))
+ else
+ handle(format("%s [%q]={},",depth,k))
+ end
+ elseif inline then
+ local st=simple_table(v)
+ if st then
+ if tk=="number" then
+ if hexify then
+ handle(format("%s [0x%04X]={ %s },",depth,k,concat(st,", ")))
+ else
+ handle(format("%s [%s]={ %s },",depth,k,concat(st,", ")))
+ end
+ elseif tk=="boolean" then
+ handle(format("%s [%s]={ %s },",depth,tostring(k),concat(st,", ")))
+ elseif noquotes and not reserved[k] and lpegmatch(propername,k) then
+ handle(format("%s %s={ %s },",depth,k,concat(st,", ")))
+ else
+ handle(format("%s [%q]={ %s },",depth,k,concat(st,", ")))
end
+ else
+ do_serialize(v,k,depth,level+1)
+ end
else
- return false
- end
- end
- return true
-end
-
-table.identical = identical
-table.are_equal = are_equal
-
--- maybe also make a combined one
-
-function table.compact(t)
- if t then
- for k,v in next, t do
- if not next(v) then
- t[k] = nil
- end
- end
- end
-end
-
-function table.contains(t, v)
- if t then
- for i=1, #t do
- if t[i] == v then
- return i
- end
- end
- end
- return false
-end
-
-function table.count(t)
- local n = 0
- for k, v in next, t do
- n = n + 1
- end
- return n
-end
-
-function table.swapped(t,s) -- hash
- local n = { }
- if s then
- for k, v in next, s do
- n[k] = v
- end
- end
- for k, v in next, t do
- n[v] = k
- end
- return n
-end
-
-function table.reversed(t)
- if t then
- local tt, tn = { }, #t
- if tn > 0 then
- local ttn = 0
- for i=tn,1,-1 do
- ttn = ttn + 1
- tt[ttn] = t[i]
- end
- end
- return tt
- end
-end
-
-function table.sequenced(t,sep,simple) -- hash only
- local s, n = { }, 0
- for k, v in sortedhash(t) do
- if simple then
- if v == true then
- n = n + 1
- s[n] = k
- elseif v and v~= "" then
- n = n + 1
- s[n] = k .. "=" .. tostring(v)
- end
+ do_serialize(v,k,depth,level+1)
+ end
+ elseif t=="boolean" then
+ if tk=="number" then
+ if hexify then
+ handle(format("%s [0x%04X]=%s,",depth,k,tostring(v)))
+ else
+ handle(format("%s [%s]=%s,",depth,k,tostring(v)))
+ end
+ elseif tk=="boolean" then
+ handle(format("%s [%s]=%s,",depth,tostring(k),tostring(v)))
+ elseif noquotes and not reserved[k] and lpegmatch(propername,k) then
+ handle(format("%s %s=%s,",depth,k,tostring(v)))
else
- n = n + 1
- s[n] = k .. "=" .. tostring(v)
+ handle(format("%s [%q]=%s,",depth,k,tostring(v)))
end
- end
- return concat(s, sep or " | ")
-end
-
-function table.print(t,...)
- if type(t) ~= "table" then
- print(tostring(t))
- else
- table.tohandle(print,t,...)
- end
-end
-
--- -- -- obsolete but we keep them for a while and might comment them later -- -- --
-
--- roughly: copy-loop : unpack : sub == 0.9 : 0.4 : 0.45 (so in critical apps, use unpack)
-
-function table.sub(t,i,j)
- return { unpack(t,i,j) }
-end
-
--- slower than #t on indexed tables (#t only returns the size of the numerically indexed slice)
-
-function table.is_empty(t)
- return not t or not next(t)
-end
-
-function table.has_one_entry(t)
- return t and not next(t,next(t))
-end
-
--- new
-
-function table.loweredkeys(t) -- maybe utf
- local l = { }
- for k, v in next, t do
- l[lower(k)] = v
- end
- return l
-end
-
--- new, might move (maybe duplicate)
-
-function table.unique(old)
- local hash = { }
- local new = { }
- local n = 0
- for i=1,#old do
- local oi = old[i]
- if not hash[oi] then
- n = n + 1
- new[n] = oi
- hash[oi] = true
- end
- end
- return new
-end
-
--- function table.sorted(t,...)
--- table.sort(t,...)
--- return t -- still sorts in-place
--- end
-
-
-end -- of closure
-
-do -- create closure to overcome 200 locals limit
-
-if not modules then modules = { } end modules ['l-lpeg'] = {
- version = 1.001,
- comment = "companion to luat-lib.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
-
--- a new lpeg fails on a #(1-P(":")) test and really needs a + P(-1)
-
-local lpeg = require("lpeg")
-
--- tracing (only used when we encounter a problem in integration of lpeg in luatex)
-
-local report = texio and texio.write_nl or print
-
--- local lpmatch = lpeg.match
--- local lpprint = lpeg.print
--- local lpp = lpeg.P
--- local lpr = lpeg.R
--- local lps = lpeg.S
--- local lpc = lpeg.C
--- local lpb = lpeg.B
--- local lpv = lpeg.V
--- local lpcf = lpeg.Cf
--- local lpcb = lpeg.Cb
--- local lpcg = lpeg.Cg
--- local lpct = lpeg.Ct
--- local lpcs = lpeg.Cs
--- local lpcc = lpeg.Cc
--- local lpcmt = lpeg.Cmt
--- local lpcarg = lpeg.Carg
-
--- function lpeg.match(l,...) report("LPEG MATCH") lpprint(l) return lpmatch(l,...) end
-
--- function lpeg.P (l) local p = lpp (l) report("LPEG P =") lpprint(l) return p end
--- function lpeg.R (l) local p = lpr (l) report("LPEG R =") lpprint(l) return p end
--- function lpeg.S (l) local p = lps (l) report("LPEG S =") lpprint(l) return p end
--- function lpeg.C (l) local p = lpc (l) report("LPEG C =") lpprint(l) return p end
--- function lpeg.B (l) local p = lpb (l) report("LPEG B =") lpprint(l) return p end
--- function lpeg.V (l) local p = lpv (l) report("LPEG V =") lpprint(l) return p end
--- function lpeg.Cf (l) local p = lpcf (l) report("LPEG Cf =") lpprint(l) return p end
--- function lpeg.Cb (l) local p = lpcb (l) report("LPEG Cb =") lpprint(l) return p end
--- function lpeg.Cg (l) local p = lpcg (l) report("LPEG Cg =") lpprint(l) return p end
--- function lpeg.Ct (l) local p = lpct (l) report("LPEG Ct =") lpprint(l) return p end
--- function lpeg.Cs (l) local p = lpcs (l) report("LPEG Cs =") lpprint(l) return p end
--- function lpeg.Cc (l) local p = lpcc (l) report("LPEG Cc =") lpprint(l) return p end
--- function lpeg.Cmt (l) local p = lpcmt (l) report("LPEG Cmt =") lpprint(l) return p end
--- function lpeg.Carg (l) local p = lpcarg(l) report("LPEG Carg =") lpprint(l) return p end
-
-local type = type
-local byte, char, gmatch = string.byte, string.char, string.gmatch
-
--- Beware, we predefine a bunch of patterns here and one reason for doing so
--- is that we get consistent behaviour in some of the visualizers.
-
-lpeg.patterns = lpeg.patterns or { } -- so that we can share
-local patterns = lpeg.patterns
-
-local P, R, S, V, match = lpeg.P, lpeg.R, lpeg.S, lpeg.V, lpeg.match
-local Ct, C, Cs, Cc = lpeg.Ct, lpeg.C, lpeg.Cs, lpeg.Cc
-local lpegtype = lpeg.type
-
-local utfcharacters = string.utfcharacters
-local utfgmatch = unicode and unicode.utf8.gmatch
-
-local anything = P(1)
-local endofstring = P(-1)
-local alwaysmatched = P(true)
-
-patterns.anything = anything
-patterns.endofstring = endofstring
-patterns.beginofstring = alwaysmatched
-patterns.alwaysmatched = alwaysmatched
-
-local digit, sign = R('09'), S('+-')
-local cr, lf, crlf = P("\r"), P("\n"), P("\r\n")
-local newline = crlf + S("\r\n") -- cr + lf
-local escaped = P("\\") * anything
-local squote = P("'")
-local dquote = P('"')
-local space = P(" ")
-
-local utfbom_32_be = P('\000\000\254\255')
-local utfbom_32_le = P('\255\254\000\000')
-local utfbom_16_be = P('\255\254')
-local utfbom_16_le = P('\254\255')
-local utfbom_8 = P('\239\187\191')
-local utfbom = utfbom_32_be + utfbom_32_le
- + utfbom_16_be + utfbom_16_le
- + utfbom_8
-local utftype = utfbom_32_be / "utf-32-be" + utfbom_32_le / "utf-32-le"
- + utfbom_16_be / "utf-16-be" + utfbom_16_le / "utf-16-le"
- + utfbom_8 / "utf-8" + alwaysmatched / "unknown"
-
-local utf8next = R("\128\191")
-
-patterns.utf8one = R("\000\127")
-patterns.utf8two = R("\194\223") * utf8next
-patterns.utf8three = R("\224\239") * utf8next * utf8next
-patterns.utf8four = R("\240\244") * utf8next * utf8next * utf8next
-patterns.utfbom = utfbom
-patterns.utftype = utftype
-
-local utf8char = patterns.utf8one + patterns.utf8two + patterns.utf8three + patterns.utf8four
-local validutf8char = utf8char^0 * endofstring * Cc(true) + Cc(false)
-
-patterns.utf8 = utf8char
-patterns.utf8char = utf8char
-patterns.validutf8 = validutf8char
-patterns.validutf8char = validutf8char
-
-patterns.digit = digit
-patterns.sign = sign
-patterns.cardinal = sign^0 * digit^1
-patterns.integer = sign^0 * digit^1
-patterns.float = sign^0 * digit^0 * P('.') * digit^1
-patterns.cfloat = sign^0 * digit^0 * P(',') * digit^1
-patterns.number = patterns.float + patterns.integer
-patterns.cnumber = patterns.cfloat + patterns.integer
-patterns.oct = P("0") * R("07")^1
-patterns.octal = patterns.oct
-patterns.HEX = P("0x") * R("09","AF")^1
-patterns.hex = P("0x") * R("09","af")^1
-patterns.hexadecimal = P("0x") * R("09","AF","af")^1
-patterns.lowercase = R("az")
-patterns.uppercase = R("AZ")
-patterns.letter = patterns.lowercase + patterns.uppercase
-patterns.space = space
-patterns.tab = P("\t")
-patterns.spaceortab = patterns.space + patterns.tab
-patterns.eol = S("\n\r")
-patterns.spacer = S(" \t\f\v") -- + char(0xc2, 0xa0) if we want utf (cf mail roberto)
-patterns.newline = newline
-patterns.emptyline = newline^1
-patterns.nonspacer = 1 - patterns.spacer
-patterns.whitespace = patterns.eol + patterns.spacer
-patterns.nonwhitespace = 1 - patterns.whitespace
-patterns.equal = P("=")
-patterns.comma = P(",")
-patterns.commaspacer = P(",") * patterns.spacer^0
-patterns.period = P(".")
-patterns.colon = P(":")
-patterns.semicolon = P(";")
-patterns.underscore = P("_")
-patterns.escaped = escaped
-patterns.squote = squote
-patterns.dquote = dquote
-patterns.nosquote = (escaped + (1-squote))^0
-patterns.nodquote = (escaped + (1-dquote))^0
-patterns.unsingle = (squote/"") * patterns.nosquote * (squote/"")
-patterns.undouble = (dquote/"") * patterns.nodquote * (dquote/"")
-patterns.unquoted = patterns.undouble + patterns.unsingle -- more often undouble
-patterns.unspacer = ((patterns.spacer^1)/"")^0
-
-patterns.somecontent = (anything - newline - space)^1 -- (utf8char - newline - space)^1
-patterns.beginline = #(1-newline)
-
--- print(string.unquoted("test"))
--- print(string.unquoted([["t\"est"]]))
--- print(string.unquoted([["t\"est"x]]))
--- print(string.unquoted("\'test\'"))
--- print(string.unquoted('"test"'))
--- print(string.unquoted('"test"'))
-
-function lpeg.anywhere(pattern) --slightly adapted from website
- return P { P(pattern) + 1 * V(1) } -- why so complex?
-end
-
-function lpeg.splitter(pattern, action)
- return (((1-P(pattern))^1)/action+1)^0
-end
-
-function lpeg.tsplitter(pattern, action)
- return Ct((((1-P(pattern))^1)/action+1)^0)
-end
-
--- probleem: separator can be lpeg and that does not hash too well, but
--- it's quite okay as the key is then not garbage collected
-
-local splitters_s, splitters_m, splitters_t = { }, { }, { }
-
-local function splitat(separator,single)
- local splitter = (single and splitters_s[separator]) or splitters_m[separator]
- if not splitter then
- separator = P(separator)
- local other = C((1 - separator)^0)
- if single then
- local any = anything
- splitter = other * (separator * C(any^0) + "") -- ?
- splitters_s[separator] = splitter
+ elseif t=="function" then
+ if functions then
+ local f=getinfo(v).what=="C" and dump(dummy) or dump(v)
+ if tk=="number" then
+ if hexify then
+ handle(format("%s [0x%04X]=load(%q),",depth,k,f))
+ else
+ handle(format("%s [%s]=load(%q),",depth,k,f))
+ end
+ elseif tk=="boolean" then
+ handle(format("%s [%s]=load(%q),",depth,tostring(k),f))
+ elseif noquotes and not reserved[k] and lpegmatch(propername,k) then
+ handle(format("%s %s=load(%q),",depth,k,f))
+ else
+ handle(format("%s [%q]=load(%q),",depth,k,f))
+ end
+ end
+ else
+ if tk=="number" then
+ if hexify then
+ handle(format("%s [0x%04X]=%q,",depth,k,tostring(v)))
+ else
+ handle(format("%s [%s]=%q,",depth,k,tostring(v)))
+ end
+ elseif tk=="boolean" then
+ handle(format("%s [%s]=%q,",depth,tostring(k),tostring(v)))
+ elseif noquotes and not reserved[k] and lpegmatch(propername,k) then
+ handle(format("%s %s=%q,",depth,k,tostring(v)))
else
- splitter = other * (separator * other)^0
- splitters_m[separator] = splitter
- end
- end
- return splitter
-end
-
-local function tsplitat(separator)
- local splitter = splitters_t[separator]
- if not splitter then
- splitter = Ct(splitat(separator))
- splitters_t[separator] = splitter
- end
- return splitter
-end
-
-lpeg.splitat = splitat
-lpeg.tsplitat = tsplitat
-
-function string.splitup(str,separator)
- if not separator then
- separator = ","
+ handle(format("%s [%q]=%q,",depth,k,tostring(v)))
+ end
+ end
+ end
+ end
+ if level>0 then
+ handle(format("%s},",depth))
+ end
+end
+local function serialize(_handle,root,name,specification)
+ local tname=type(name)
+ if type(specification)=="table" then
+ noquotes=specification.noquotes
+ hexify=specification.hexify
+ handle=_handle or specification.handle or print
+ reduce=specification.reduce or false
+ functions=specification.functions
+ compact=specification.compact
+ inline=specification.inline and compact
+ if functions==nil then
+ functions=true
+ end
+ if compact==nil then
+ compact=true
+ end
+ if inline==nil then
+ inline=compact
+ end
+ else
+ noquotes=false
+ hexify=false
+ handle=_handle or print
+ reduce=false
+ compact=true
+ inline=true
+ functions=true
+ end
+ if tname=="string" then
+ if name=="return" then
+ handle("return {")
+ else
+ handle(name.."={")
end
- return match(splitters_m[separator] or splitat(separator),str)
-end
-
-
-local cache = { }
-
-function lpeg.split(separator,str)
- local c = cache[separator]
- if not c then
- c = tsplitat(separator)
- cache[separator] = c
+ elseif tname=="number" then
+ if hexify then
+ handle(format("[0x%04X]={",name))
+ else
+ handle("["..name.."]={")
end
- return match(c,str)
-end
-
-function string.split(str,separator)
- local c = cache[separator]
- if not c then
- c = tsplitat(separator)
- cache[separator] = c
+ elseif tname=="boolean" then
+ if name then
+ handle("return {")
+ else
+ handle("{")
end
- return match(c,str)
-end
-
-local spacing = patterns.spacer^0 * newline -- sort of strip
-local empty = spacing * Cc("")
-local nonempty = Cs((1-spacing)^1) * spacing^-1
-local content = (empty + nonempty)^1
-
-patterns.textline = content
-
-
-local linesplitter = tsplitat(newline)
-
-patterns.linesplitter = linesplitter
-
-function string.splitlines(str)
- return match(linesplitter,str)
-end
-
-local utflinesplitter = utfbom^-1 * tsplitat(newline)
-
-patterns.utflinesplitter = utflinesplitter
-
-function string.utfsplitlines(str)
- return match(utflinesplitter,str or "")
-end
-
-
-local cache = { }
-
-function lpeg.checkedsplit(separator,str)
- local c = cache[separator]
- if not c then
- separator = P(separator)
- local other = C((1 - separator)^1)
- c = Ct(separator^0 * other * (separator^1 * other)^0)
- cache[separator] = c
+ else
+ handle("t={")
+ end
+ if root then
+ if getmetatable(root) then
+ local dummy=root._w_h_a_t_e_v_e_r_
+ root._w_h_a_t_e_v_e_r_=nil
end
- return match(c,str)
-end
-
-function string.checkedsplit(str,separator)
- local c = cache[separator]
- if not c then
- separator = P(separator)
- local other = C((1 - separator)^1)
- c = Ct(separator^0 * other * (separator^1 * other)^0)
- cache[separator] = c
+ if next(root) then
+ do_serialize(root,name,"",0)
end
- return match(c,str)
+ end
+ handle("}")
end
-
-
-local function f2(s) local c1, c2 = byte(s,1,2) return c1 * 64 + c2 - 12416 end
-local function f3(s) local c1, c2, c3 = byte(s,1,3) return (c1 * 64 + c2) * 64 + c3 - 925824 end
-local function f4(s) local c1, c2, c3, c4 = byte(s,1,4) return ((c1 * 64 + c2) * 64 + c3) * 64 + c4 - 63447168 end
-
-local utf8byte = patterns.utf8one/byte + patterns.utf8two/f2 + patterns.utf8three/f3 + patterns.utf8four/f4
-
-patterns.utf8byte = utf8byte
-
-
-
-local cache = { }
-
-function lpeg.stripper(str)
- if type(str) == "string" then
- local s = cache[str]
- if not s then
- s = Cs(((S(str)^1)/"" + 1)^0)
- cache[str] = s
- end
- return s
+function table.serialize(root,name,specification)
+ local t,n={},0
+ local function flush(s)
+ n=n+1
+ t[n]=s
+ end
+ serialize(flush,root,name,specification)
+ return concat(t,"\n")
+end
+table.tohandle=serialize
+local maxtab=2*1024
+function table.tofile(filename,root,name,specification)
+ local f=io.open(filename,'w')
+ if f then
+ if maxtab>1 then
+ local t,n={},0
+ local function flush(s)
+ n=n+1
+ t[n]=s
+ if n>maxtab then
+ f:write(concat(t,"\n"),"\n")
+ t,n={},0
+ end
+ end
+ serialize(flush,root,name,specification)
+ f:write(concat(t,"\n"),"\n")
else
- return Cs(((str^1)/"" + 1)^0)
+ local function flush(s)
+ f:write(s,"\n")
+ end
+ serialize(flush,root,name,specification)
end
+ f:close()
+ io.flush()
+ end
end
-
-local cache = { }
-
-function lpeg.keeper(str)
- if type(str) == "string" then
- local s = cache[str]
- if not s then
- s = Cs((((1-S(str))^1)/"" + 1)^0)
- cache[str] = s
- end
- return s
+local function flattened(t,f,depth)
+ if f==nil then
+ f={}
+ depth=0xFFFF
+ elseif tonumber(f) then
+ depth=f
+ f={}
+ elseif not depth then
+ depth=0xFFFF
+ end
+ for k,v in next,t do
+ if type(k)~="number" then
+ if depth>0 and type(v)=="table" then
+ flattened(v,f,depth-1)
+ else
+ f[k]=v
+ end
+ end
+ end
+ local n=#f
+ for k=1,#t do
+ local v=t[k]
+ if depth>0 and type(v)=="table" then
+ flattened(v,f,depth-1)
+ n=#f
else
- return Cs((((1-str)^1)/"" + 1)^0)
+ n=n+1
+ f[n]=v
+ end
+ end
+ return f
+end
+table.flattened=flattened
+local function unnest(t,f)
+ if not f then
+ f={}
+ end
+ for i=1,#t do
+ local v=t[i]
+ if type(v)=="table" then
+ if type(v[1])=="table" then
+ unnest(v,f)
+ else
+ f[#f+1]=v
+ end
+ else
+ f[#f+1]=v
+ end
+ end
+ return f
+end
+function table.unnest(t)
+ return unnest(t)
+end
+local function are_equal(a,b,n,m)
+ if a and b and #a==#b then
+ n=n or 1
+ m=m or #a
+ for i=n,m do
+ local ai,bi=a[i],b[i]
+ if ai==bi then
+ elseif type(ai)=="table" and type(bi)=="table" then
+ if not are_equal(ai,bi) then
+ return false
+ end
+ else
+ return false
+ end
end
-end
-
-function lpeg.frontstripper(str) -- or pattern (yet undocumented)
- return (P(str) + P(true)) * Cs(P(1)^0)
-end
-
-function lpeg.endstripper(str) -- or pattern (yet undocumented)
- return Cs((1 - P(str) * P(-1))^0)
-end
-
--- Just for fun I looked at the used bytecode and
--- p = (p and p + pp) or pp gets one more (testset).
-
-function lpeg.replacer(one,two)
- if type(one) == "table" then
- local no = #one
- if no > 0 then
- local p
- for i=1,no do
- local o = one[i]
- local pp = P(o[1]) / o[2]
- if p then
- p = p + pp
- else
- p = pp
- end
- end
- return Cs((p + 1)^0)
- end
+ return true
+ else
+ return false
+ end
+end
+local function identical(a,b)
+ for ka,va in next,a do
+ local vb=b[ka]
+ if va==vb then
+ elseif type(va)=="table" and type(vb)=="table" then
+ if not identical(va,vb) then
+ return false
+ end
else
- two = two or ""
- return Cs((P(one)/two + 1)^0)
+ return false
end
+ end
+ return true
end
-
-local splitters_f, splitters_s = { }, { }
-
-function lpeg.firstofsplit(separator) -- always return value
- local splitter = splitters_f[separator]
- if not splitter then
- separator = P(separator)
- splitter = C((1 - separator)^0)
- splitters_f[separator] = splitter
+table.identical=identical
+table.are_equal=are_equal
+function table.compact(t)
+ if t then
+ for k,v in next,t do
+ if not next(v) then
+ t[k]=nil
+ end
end
- return splitter
+ end
end
-
-function lpeg.secondofsplit(separator) -- nil if not split
- local splitter = splitters_s[separator]
- if not splitter then
- separator = P(separator)
- splitter = (1 - separator)^0 * separator * C(anything^0)
- splitters_s[separator] = splitter
+function table.contains(t,v)
+ if t then
+ for i=1,#t do
+ if t[i]==v then
+ return i
+ end
end
- return splitter
+ end
+ return false
end
-
-function lpeg.balancer(left,right)
- left, right = P(left), P(right)
- return P { left * ((1 - left - right) + V(1))^0 * right }
-end
-
-
-
-local nany = utf8char/""
-
-function lpeg.counter(pattern)
- pattern = Cs((P(pattern)/" " + nany)^0)
- return function(str)
- return #match(pattern,str)
- end
+function table.count(t)
+ local n=0
+ for k,v in next,t do
+ n=n+1
+ end
+ return n
+end
+function table.swapped(t,s)
+ local n={}
+ if s then
+ for k,v in next,s do
+ n[k]=v
+ end
+ end
+ for k,v in next,t do
+ n[v]=k
+ end
+ return n
+end
+function table.mirrored(t)
+ local n={}
+ for k,v in next,t do
+ n[v]=k
+ n[k]=v
+ end
+ return n
end
-
-if utfgmatch then
-
- function lpeg.count(str,what) -- replaces string.count
- if type(what) == "string" then
- local n = 0
- for _ in utfgmatch(str,what) do
- n = n + 1
- end
- return n
- else -- 4 times slower but still faster than / function
- return #match(Cs((P(what)/" " + nany)^0),str)
- end
- end
-
-else
-
- local cache = { }
-
- function lpeg.count(str,what) -- replaces string.count
- if type(what) == "string" then
- local p = cache[what]
- if not p then
- p = Cs((P(what)/" " + nany)^0)
- cache[p] = p
- end
- return #match(p,str)
- else -- 4 times slower but still faster than / function
- return #match(Cs((P(what)/" " + nany)^0),str)
- end
+function table.reversed(t)
+ if t then
+ local tt,tn={},#t
+ if tn>0 then
+ local ttn=0
+ for i=tn,1,-1 do
+ ttn=ttn+1
+ tt[ttn]=t[i]
+ end
+ end
+ return tt
+ end
+end
+function table.reverse(t)
+ if t then
+ local n=#t
+ for i=1,floor(n/2) do
+ local j=n-i+1
+ t[i],t[j]=t[j],t[i]
end
-
+ return t
+ end
end
-
-local patterns_escapes = { -- also defines in l-string
- ["%"] = "%%",
- ["."] = "%.",
- ["+"] = "%+", ["-"] = "%-", ["*"] = "%*",
- ["["] = "%[", ["]"] = "%]",
- ["("] = "%)", [")"] = "%)",
- -- ["{"] = "%{", ["}"] = "%}"
- -- ["^"] = "%^", ["$"] = "%$",
-}
-
-local simple_escapes = { -- also defines in l-string
- ["-"] = "%-",
- ["."] = "%.",
- ["?"] = ".",
- ["*"] = ".*",
-}
-
-local p = Cs((S("-.+*%()[]") / patterns_escapes + anything)^0)
-local s = Cs((S("-.+*%()[]") / simple_escapes + anything)^0)
-
-function string.escapedpattern(str,simple)
- return match(simple and s or p,str)
+function table.sequenced(t,sep,simple)
+ if not t then
+ return ""
+ end
+ local n=#t
+ local s={}
+ if n>0 then
+ for i=1,n do
+ s[i]=tostring(t[i])
+ end
+ else
+ n=0
+ for k,v in sortedhash(t) do
+ if simple then
+ if v==true then
+ n=n+1
+ s[n]=k
+ elseif v and v~="" then
+ n=n+1
+ s[n]=k.."="..tostring(v)
+ end
+ else
+ n=n+1
+ s[n]=k.."="..tostring(v)
+ end
+ end
+ end
+ return concat(s,sep or " | ")
end
-
--- utf extensies
-
-lpeg.UP = lpeg.P
-
-if utfcharacters then
-
- function lpeg.US(str)
- local p
- for uc in utfcharacters(str) do
- if p then
- p = p + P(uc)
- else
- p = P(uc)
- end
- end
- return p
- end
-
-
-elseif utfgmatch then
-
- function lpeg.US(str)
- local p
- for uc in utfgmatch(str,".") do
- if p then
- p = p + P(uc)
- else
- p = P(uc)
- end
- end
- return p
- end
-
-else
-
- function lpeg.US(str)
- local p
- local f = function(uc)
- if p then
- p = p + P(uc)
- else
- p = P(uc)
- end
- end
- match((utf8char/f)^0,str)
- return p
- end
-
+function table.print(t,...)
+ if type(t)~="table" then
+ print(tostring(t))
+ else
+ serialize(print,t,...)
+ end
end
-
-local range = Cs(utf8byte) * (Cs(utf8byte) + Cc(false))
-
-local utfchar = unicode and unicode.utf8 and unicode.utf8.char
-
-function lpeg.UR(str,more)
- local first, last
- if type(str) == "number" then
- first = str
- last = more or first
- else
- first, last = match(range,str)
- if not last then
- return P(str)
- end
- end
- if first == last then
- return P(str)
- elseif utfchar and last - first < 8 then -- a somewhat arbitrary criterium
- local p
- for i=first,last do
- if p then
- p = p + P(utfchar(i))
- else
- p = P(utfchar(i))
- end
- end
- return p -- nil when invalid range
- else
- local f = function(b)
- return b >= first and b <= last
- end
- return utf8byte / f -- nil when invalid range
- end
+setinspector(function(v) if type(v)=="table" then serialize(print,v,"table") return true end end)
+function table.sub(t,i,j)
+ return { unpack(t,i,j) }
end
-
-
-
-function lpeg.oneof(list,...) -- lpeg.oneof("elseif","else","if","then")
- if type(list) ~= "table" then
- list = { list, ... }
- end
- -- sort(list) -- longest match first
- local p = P(list[1])
- for l=2,#list do
- p = p + P(list[l])
- end
- return p
+function table.is_empty(t)
+ return not t or not next(t)
end
-
-function lpeg.is_lpeg(p)
- return p and lpegtype(p) == "pattern"
+function table.has_one_entry(t)
+ return t and not next(t,next(t))
end
-
--- For the moment here, but it might move to utilities. Beware, we need to
--- have the longest keyword first, so 'aaa' comes beforte 'aa' which is why we
--- loop back from the end cq. prepend.
-
-local sort, fastcopy, sortedkeys = table.sort, table.fastcopy, table.sortedkeys -- dependency!
-
-function lpeg.append(list,pp,delayed,checked)
- local p = pp
- if #list > 0 then
- local keys = fastcopy(list)
- sort(keys)
- for i=#keys,1,-1 do
- local k = keys[i]
- if p then
- p = P(k) + p
- else
- p = P(k)
- end
- end
- elseif delayed then -- hm, it looks like the lpeg parser resolves anyway
- local keys = sortedkeys(list)
- if p then
- for i=1,#keys,1 do
- local k = keys[i]
- local v = list[k]
- p = P(k)/list + p
- end
- else
- for i=1,#keys do
- local k = keys[i]
- local v = list[k]
- if p then
- p = P(k) + p
- else
- p = P(k)
- end
- end
- if p then
- p = p / list
- end
- end
- elseif checked then
- -- problem: substitution gives a capture
- local keys = sortedkeys(list)
- for i=1,#keys do
- local k = keys[i]
- local v = list[k]
- if p then
- if k == v then
- p = P(k) + p
- else
- p = P(k)/v + p
- end
- else
- if k == v then
- p = P(k)
- else
- p = P(k)/v
- end
- end
- end
- else
- local keys = sortedkeys(list)
- for i=1,#keys do
- local k = keys[i]
- local v = list[k]
- if p then
- p = P(k)/v + p
- else
- p = P(k)/v
- end
- end
- end
- return p
+function table.loweredkeys(t)
+ local l={}
+ for k,v in next,t do
+ l[lower(k)]=v
+ end
+ return l
end
-
--- inspect(lpeg.append({ a = "1", aa = "1", aaa = "1" } ,nil,true))
--- inspect(lpeg.append({ ["degree celsius"] = "1", celsius = "1", degree = "1" } ,nil,true))
-
--- function lpeg.exact_match(words,case_insensitive)
--- local pattern = concat(words)
--- if case_insensitive then
--- local pattern = S(upper(characters)) + S(lower(characters))
--- local list = { }
--- for i=1,#words do
--- list[lower(words[i])] = true
--- end
--- return Cmt(pattern^1, function(_,i,s)
--- return list[lower(s)] and i
--- end)
--- else
--- local pattern = S(concat(words))
--- local list = { }
--- for i=1,#words do
--- list[words[i]] = true
--- end
--- return Cmt(pattern^1, function(_,i,s)
--- return list[s] and i
--- end)
--- end
--- end
-
--- experiment:
-
-local function make(t)
- local p
--- for k, v in next, t do
- for k, v in table.sortedhash(t) do
- if not p then
- if next(v) then
- p = P(k) * make(v)
- else
- p = P(k)
- end
- else
- if next(v) then
- p = p + P(k) * make(v)
- else
- p = p + P(k)
- end
- end
+function table.unique(old)
+ local hash={}
+ local new={}
+ local n=0
+ for i=1,#old do
+ local oi=old[i]
+ if not hash[oi] then
+ n=n+1
+ new[n]=oi
+ hash[oi]=true
end
- return p
+ end
+ return new
end
-
-function lpeg.utfchartabletopattern(list)
- local tree = { }
- for i=1,#list do
- local t = tree
- for c in gmatch(list[i],".") do
- if not t[c] then
- t[c] = { }
- end
- t = t[c]
- end
- end
- return make(tree)
+function table.sorted(t,...)
+ sort(t,...)
+ return t
end
--- inspect ( lpeg.utfchartabletopattern {
--- utfchar(0x00A0), -- nbsp
--- utfchar(0x2000), -- enquad
--- utfchar(0x2001), -- emquad
--- utfchar(0x2002), -- enspace
--- utfchar(0x2003), -- emspace
--- utfchar(0x2004), -- threeperemspace
--- utfchar(0x2005), -- fourperemspace
--- utfchar(0x2006), -- sixperemspace
--- utfchar(0x2007), -- figurespace
--- utfchar(0x2008), -- punctuationspace
--- utfchar(0x2009), -- breakablethinspace
--- utfchar(0x200A), -- hairspace
--- utfchar(0x200B), -- zerowidthspace
--- utfchar(0x202F), -- narrownobreakspace
--- utfchar(0x205F), -- math thinspace
--- } )
-
end -- of closure
do -- create closure to overcome 200 locals limit
-if not modules then modules = { } end modules ['l-io'] = {
- version = 1.001,
- comment = "companion to luat-lib.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
+package.loaded["l-io"] = package.loaded["l-io"] or true
-local io = io
-local byte, find, gsub, format = string.byte, string.find, string.gsub, string.format
-local concat = table.concat
-local type = type
+-- original size: 8799, stripped down to: 6325
+if not modules then modules={} end modules ['l-io']={
+ version=1.001,
+ comment="companion to luat-lib.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+local io=io
+local byte,find,gsub,format=string.byte,string.find,string.gsub,string.format
+local concat=table.concat
+local floor=math.floor
+local type=type
if string.find(os.getenv("PATH"),";") then
- io.fileseparator, io.pathseparator = "\\", ";"
+ io.fileseparator,io.pathseparator="\\",";"
else
- io.fileseparator, io.pathseparator = "/" , ":"
+ io.fileseparator,io.pathseparator="/",":"
end
-
-function io.loaddata(filename,textmode)
- local f = io.open(filename,(textmode and 'r') or 'rb')
- if f then
- local data = f:read('*all')
- f:close()
- return data
+local function readall(f)
+ return f:read("*all")
+end
+local function readall(f)
+ local size=f:seek("end")
+ if size==0 then
+ return ""
+ elseif size<1024*1024 then
+ f:seek("set",0)
+ return f:read('*all')
+ else
+ local done=f:seek("set",0)
+ if size<1024*1024 then
+ step=1024*1024
+ elseif size>16*1024*1024 then
+ step=16*1024*1024
else
- return nil
+ step=floor(size/(1024*1024))*1024*1024/8
end
+ local data={}
+ while true do
+ local r=f:read(step)
+ if not r then
+ return concat(data)
+ else
+ data[#data+1]=r
+ end
+ end
+ end
+end
+io.readall=readall
+function io.loaddata(filename,textmode)
+ local f=io.open(filename,(textmode and 'r') or 'rb')
+ if f then
+ local data=readall(f)
+ f:close()
+ if #data>0 then
+ return data
+ end
+ end
end
-
function io.savedata(filename,data,joiner)
- local f = io.open(filename,"wb")
- if f then
- if type(data) == "table" then
- f:write(concat(data,joiner or ""))
- elseif type(data) == "function" then
- data(f)
- else
- f:write(data or "")
- end
- f:close()
- io.flush()
- return true
+ local f=io.open(filename,"wb")
+ if f then
+ if type(data)=="table" then
+ f:write(concat(data,joiner or ""))
+ elseif type(data)=="function" then
+ data(f)
else
- return false
+ f:write(data or "")
end
+ f:close()
+ io.flush()
+ return true
+ else
+ return false
+ end
+end
+function io.loadlines(filename,n)
+ local f=io.open(filename,'r')
+ if not f then
+ elseif n then
+ local lines={}
+ for i=1,n do
+ local line=f:read("*lines")
+ if line then
+ lines[#lines+1]=line
+ else
+ break
+ end
+ end
+ f:close()
+ lines=concat(lines,"\n")
+ if #lines>0 then
+ return lines
+ end
+ else
+ local line=f:read("*line") or ""
+ f:close()
+ if #line>0 then
+ return line
+ end
+ end
+end
+function io.loadchunk(filename,n)
+ local f=io.open(filename,'rb')
+ if f then
+ local data=f:read(n or 1024)
+ f:close()
+ if #data>0 then
+ return data
+ end
+ end
end
-
function io.exists(filename)
- local f = io.open(filename)
- if f == nil then
- return false
- else
- assert(f:close())
- return true
- end
+ local f=io.open(filename)
+ if f==nil then
+ return false
+ else
+ f:close()
+ return true
+ end
end
-
function io.size(filename)
- local f = io.open(filename)
- if f == nil then
- return 0
- else
- local s = f:seek("end")
- assert(f:close())
- return s
- end
+ local f=io.open(filename)
+ if f==nil then
+ return 0
+ else
+ local s=f:seek("end")
+ f:close()
+ return s
+ end
end
-
function io.noflines(f)
- if type(f) == "string" then
- local f = io.open(filename)
- local n = f and io.noflines(f) or 0
- assert(f:close())
- return n
+ if type(f)=="string" then
+ local f=io.open(filename)
+ if f then
+ local n=f and io.noflines(f) or 0
+ f:close()
+ return n
else
- local n = 0
- for _ in f:lines() do
- n = n + 1
- end
- f:seek('set',0)
- return n
+ return 0
end
-end
-
-local nextchar = {
- [ 4] = function(f)
- return f:read(1,1,1,1)
- end,
- [ 2] = function(f)
- return f:read(1,1)
- end,
- [ 1] = function(f)
- return f:read(1)
- end,
- [-2] = function(f)
- local a, b = f:read(1,1)
- return b, a
- end,
- [-4] = function(f)
- local a, b, c, d = f:read(1,1,1,1)
- return d, c, b, a
+ else
+ local n=0
+ for _ in f:lines() do
+ n=n+1
end
+ f:seek('set',0)
+ return n
+ end
+end
+local nextchar={
+ [ 4]=function(f)
+ return f:read(1,1,1,1)
+ end,
+ [ 2]=function(f)
+ return f:read(1,1)
+ end,
+ [ 1]=function(f)
+ return f:read(1)
+ end,
+ [-2]=function(f)
+ local a,b=f:read(1,1)
+ return b,a
+ end,
+ [-4]=function(f)
+ local a,b,c,d=f:read(1,1,1,1)
+ return d,c,b,a
+ end
}
-
function io.characters(f,n)
- if f then
- return nextchar[n or 1], f
- end
+ if f then
+ return nextchar[n or 1],f
+ end
end
-
-local nextbyte = {
- [4] = function(f)
- local a, b, c, d = f:read(1,1,1,1)
- if d then
- return byte(a), byte(b), byte(c), byte(d)
- end
- end,
- [3] = function(f)
- local a, b, c = f:read(1,1,1)
- if b then
- return byte(a), byte(b), byte(c)
- end
- end,
- [2] = function(f)
- local a, b = f:read(1,1)
- if b then
- return byte(a), byte(b)
- end
- end,
- [1] = function (f)
- local a = f:read(1)
- if a then
- return byte(a)
- end
- end,
- [-2] = function (f)
- local a, b = f:read(1,1)
- if b then
- return byte(b), byte(a)
- end
- end,
- [-3] = function(f)
- local a, b, c = f:read(1,1,1)
- if b then
- return byte(c), byte(b), byte(a)
- end
- end,
- [-4] = function(f)
- local a, b, c, d = f:read(1,1,1,1)
- if d then
- return byte(d), byte(c), byte(b), byte(a)
- end
+local nextbyte={
+ [4]=function(f)
+ local a,b,c,d=f:read(1,1,1,1)
+ if d then
+ return byte(a),byte(b),byte(c),byte(d)
+ end
+ end,
+ [3]=function(f)
+ local a,b,c=f:read(1,1,1)
+ if b then
+ return byte(a),byte(b),byte(c)
+ end
+ end,
+ [2]=function(f)
+ local a,b=f:read(1,1)
+ if b then
+ return byte(a),byte(b)
+ end
+ end,
+ [1]=function (f)
+ local a=f:read(1)
+ if a then
+ return byte(a)
+ end
+ end,
+ [-2]=function (f)
+ local a,b=f:read(1,1)
+ if b then
+ return byte(b),byte(a)
+ end
+ end,
+ [-3]=function(f)
+ local a,b,c=f:read(1,1,1)
+ if b then
+ return byte(c),byte(b),byte(a)
+ end
+ end,
+ [-4]=function(f)
+ local a,b,c,d=f:read(1,1,1,1)
+ if d then
+ return byte(d),byte(c),byte(b),byte(a)
end
+ end
}
-
function io.bytes(f,n)
- if f then
- return nextbyte[n or 1], f
- else
- return nil, nil
- end
+ if f then
+ return nextbyte[n or 1],f
+ else
+ return nil,nil
+ end
end
-
function io.ask(question,default,options)
- while true do
- io.write(question)
- if options then
- io.write(format(" [%s]",concat(options,"|")))
+ while true do
+ io.write(question)
+ if options then
+ io.write(format(" [%s]",concat(options,"|")))
+ end
+ if default then
+ io.write(format(" [%s]",default))
+ end
+ io.write(format(" "))
+ io.flush()
+ local answer=io.read()
+ answer=gsub(answer,"^%s*(.*)%s*$","%1")
+ if answer=="" and default then
+ return default
+ elseif not options then
+ return answer
+ else
+ for k=1,#options do
+ if options[k]==answer then
+ return answer
end
- if default then
- io.write(format(" [%s]",default))
- end
- io.write(format(" "))
- io.flush()
- local answer = io.read()
- answer = gsub(answer,"^%s*(.*)%s*$","%1")
- if answer == "" and default then
- return default
- elseif not options then
- return answer
- else
- for k=1,#options do
- if options[k] == answer then
- return answer
- end
- end
- local pattern = "^" .. answer
- for k=1,#options do
- local v = options[k]
- if find(v,pattern) then
- return v
- end
- end
+ end
+ local pattern="^"..answer
+ for k=1,#options do
+ local v=options[k]
+ if find(v,pattern) then
+ return v
end
+ end
end
+ end
end
-
local function readnumber(f,n,m)
- if m then
- f:seek("set",n)
- n = m
- end
- if n == 1 then
- return byte(f:read(1))
- elseif n == 2 then
- local a, b = byte(f:read(2),1,2)
- return 256 * a + b
- elseif n == 3 then
- local a, b, c = byte(f:read(3),1,3)
- return 256*256 * a + 256 * b + c
- elseif n == 4 then
- local a, b, c, d = byte(f:read(4),1,4)
- return 256*256*256 * a + 256*256 * b + 256 * c + d
- elseif n == 8 then
- local a, b = readnumber(f,4), readnumber(f,4)
- return 256 * a + b
- elseif n == 12 then
- local a, b, c = readnumber(f,4), readnumber(f,4), readnumber(f,4)
- return 256*256 * a + 256 * b + c
- elseif n == -2 then
- local b, a = byte(f:read(2),1,2)
- return 256*a + b
- elseif n == -3 then
- local c, b, a = byte(f:read(3),1,3)
- return 256*256 * a + 256 * b + c
- elseif n == -4 then
- local d, c, b, a = byte(f:read(4),1,4)
- return 256*256*256 * a + 256*256 * b + 256*c + d
- elseif n == -8 then
- local h, g, f, e, d, c, b, a = byte(f:read(8),1,8)
- return 256*256*256*256*256*256*256 * a +
- 256*256*256*256*256*256 * b +
- 256*256*256*256*256 * c +
- 256*256*256*256 * d +
- 256*256*256 * e +
- 256*256 * f +
- 256 * g +
- h
- else
- return 0
- end
+ if m then
+ f:seek("set",n)
+ n=m
+ end
+ if n==1 then
+ return byte(f:read(1))
+ elseif n==2 then
+ local a,b=byte(f:read(2),1,2)
+ return 256*a+b
+ elseif n==3 then
+ local a,b,c=byte(f:read(3),1,3)
+ return 256*256*a+256*b+c
+ elseif n==4 then
+ local a,b,c,d=byte(f:read(4),1,4)
+ return 256*256*256*a+256*256*b+256*c+d
+ elseif n==8 then
+ local a,b=readnumber(f,4),readnumber(f,4)
+ return 256*a+b
+ elseif n==12 then
+ local a,b,c=readnumber(f,4),readnumber(f,4),readnumber(f,4)
+ return 256*256*a+256*b+c
+ elseif n==-2 then
+ local b,a=byte(f:read(2),1,2)
+ return 256*a+b
+ elseif n==-3 then
+ local c,b,a=byte(f:read(3),1,3)
+ return 256*256*a+256*b+c
+ elseif n==-4 then
+ local d,c,b,a=byte(f:read(4),1,4)
+ return 256*256*256*a+256*256*b+256*c+d
+ elseif n==-8 then
+ local h,g,f,e,d,c,b,a=byte(f:read(8),1,8)
+ return 256*256*256*256*256*256*256*a+256*256*256*256*256*256*b+256*256*256*256*256*c+256*256*256*256*d+256*256*256*e+256*256*f+256*g+h
+ else
+ return 0
+ end
end
-
-io.readnumber = readnumber
-
+io.readnumber=readnumber
function io.readstring(f,n,m)
- if m then
- f:seek("set",n)
- n = m
- end
- local str = gsub(f:read(n),"%z","")
- return str
+ if m then
+ f:seek("set",n)
+ n=m
+ end
+ local str=gsub(f:read(n),"\000","")
+ return str
end
-
---
-
-if not io.i_limiter then function io.i_limiter() end end -- dummy so we can test safely
-if not io.o_limiter then function io.o_limiter() end end -- dummy so we can test safely
+if not io.i_limiter then function io.i_limiter() end end
+if not io.o_limiter then function io.o_limiter() end end
end -- of closure
do -- create closure to overcome 200 locals limit
-if not modules then modules = { } end modules ['l-number'] = {
- version = 1.001,
- comment = "companion to luat-lib.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
+package.loaded["l-number"] = package.loaded["l-number"] or true
--- this module will be replaced when we have the bit library
+-- original size: 4939, stripped down to: 2830
-local tostring = tostring
-local format, floor, match, rep = string.format, math.floor, string.match, string.rep
-local concat, insert = table.concat, table.insert
-local lpegmatch = lpeg.match
-
-number = number or { }
-local number = number
-
--- a,b,c,d,e,f = number.toset(100101)
-
-function number.toset(n)
- return match(tostring(n),"(.?)(.?)(.?)(.?)(.?)(.?)(.?)(.?)")
-end
-
-function number.toevenhex(n)
- local s = format("%X",n)
- if #s % 2 == 0 then
- return s
+if not modules then modules={} end modules ['l-number']={
+ version=1.001,
+ comment="companion to luat-lib.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+local tostring,tonumber=tostring,tonumber
+local format,floor,match,rep=string.format,math.floor,string.match,string.rep
+local concat,insert=table.concat,table.insert
+local lpegmatch=lpeg.match
+number=number or {}
+local number=number
+if bit32 then
+ local btest,bor=bit32.btest,bit32.bor
+ function number.bit(p)
+ return 2^(p-1)
+ end
+ number.hasbit=btest
+ number.setbit=bor
+ function number.setbit(x,p)
+ return btest(x,p) and x or x+p
+ end
+ function number.clearbit(x,p)
+ return btest(x,p) and x-p or x
+ end
+else
+ function number.bit(p)
+ return 2^(p-1)
+ end
+ function number.hasbit(x,p)
+ return x%(p+p)>=p
+ end
+ function number.setbit(x,p)
+ return (x%(p+p)>=p) and x or x+p
+ end
+ function number.clearbit(x,p)
+ return (x%(p+p)>=p) and x-p or x
+ end
+end
+if bit32 then
+ local bextract=bit32.extract
+ local t={
+ "0","0","0","0","0","0","0","0",
+ "0","0","0","0","0","0","0","0",
+ "0","0","0","0","0","0","0","0",
+ "0","0","0","0","0","0","0","0",
+ }
+ function number.tobitstring(b,m)
+ local n=32
+ for i=0,31 do
+ local v=bextract(b,i)
+ local k=32-i
+ if v==1 then
+ n=k
+ t[k]="1"
+ else
+ t[k]="0"
+ end
+ end
+ if m then
+ m=33-m*8
+ if m<1 then
+ m=1
+ end
+ return concat(t,"",m)
+ elseif n<8 then
+ return concat(t)
+ elseif n<16 then
+ return concat(t,"",9)
+ elseif n<24 then
+ return concat(t,"",17)
else
- return "0" .. s
+ return concat(t,"",25)
end
-end
-
--- the lpeg way is slower on 8 digits, but faster on 4 digits, some 7.5%
--- on
---
--- for i=1,1000000 do
--- local a,b,c,d,e,f,g,h = number.toset(12345678)
--- local a,b,c,d = number.toset(1234)
--- local a,b,c = number.toset(123)
--- end
---
--- of course dedicated "(.)(.)(.)(.)" matches are even faster
-
-local one = lpeg.C(1-lpeg.S(''))^1
-
-function number.toset(n)
- return lpegmatch(one,tostring(n))
-end
-
-function number.bits(n,zero)
- local t, i = { }, (zero and 0) or 1
- while n > 0 do
- local m = n % 2
- if m > 0 then
- insert(t,1,i)
- end
- n = floor(n/2)
- i = i + 1
+ end
+else
+ function number.tobitstring(n,m)
+ if n>0 then
+ local t={}
+ while n>0 do
+ insert(t,1,n%2>0 and 1 or 0)
+ n=floor(n/2)
+ end
+ local nn=8-#t%8
+ if nn>0 and nn<8 then
+ for i=1,nn do
+ insert(t,1,0)
+ end
+ end
+ if m then
+ m=m*8-#t
+ if m>0 then
+ insert(t,1,rep("0",m))
+ end
+ end
+ return concat(t)
+ elseif m then
+ rep("00000000",m)
+ else
+ return "00000000"
end
- return t
+ end
end
-
-
-function number.bit(p)
- return 2 ^ (p - 1) -- 1-based indexing
-end
-
-function number.hasbit(x, p) -- typical call: if hasbit(x, bit(3)) then ...
- return x % (p + p) >= p
-end
-
-function number.setbit(x, p)
- return hasbit(x, p) and x or x + p
+function number.valid(str,default)
+ return tonumber(str) or default or nil
end
-
-function number.clearbit(x, p)
- return hasbit(x, p) and x - p or x
+function number.toevenhex(n)
+ local s=format("%X",n)
+ if #s%2==0 then
+ return s
+ else
+ return "0"..s
+ end
end
-
-
-function number.tobitstring(n,m)
- if n == 0 then
- if m then
- rep("00000000",m)
- else
- return "00000000"
- end
+local one=lpeg.C(1-lpeg.S('')/tonumber)^1
+function number.toset(n)
+ return lpegmatch(one,tostring(n))
+end
+local function bits(n,i,...)
+ if n>0 then
+ local m=n%2
+ local n=floor(n/2)
+ if m>0 then
+ return bits(n,i+1,i,...)
else
- local t = { }
- while n > 0 do
- insert(t,1,n % 2 > 0 and 1 or 0)
- n = floor(n/2)
- end
- local nn = 8 - #t % 8
- if nn > 0 and nn < 8 then
- for i=1,nn do
- insert(t,1,0)
- end
- end
- if m then
- m = m * 8 - #t
- if m > 0 then
- insert(t,1,rep("0",m))
- end
- end
- return concat(t)
+ return bits(n,i+1,...)
end
+ else
+ return...
+ end
+end
+function number.bits(n)
+ return { bits(n,1) }
end
-
end -- of closure
do -- create closure to overcome 200 locals limit
-if not modules then modules = { } end modules ['l-set'] = {
- version = 1.001,
- comment = "companion to luat-lib.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
+package.loaded["l-set"] = package.loaded["l-set"] or true
--- This will become obsolete when we have the bitset library embedded.
-
-set = set or { }
-
-local nums = { }
-local tabs = { }
-local concat = table.concat
-local next, type = next, type
-
-set.create = table.tohash
+-- original size: 1923, stripped down to: 1133
+if not modules then modules={} end modules ['l-set']={
+ version=1.001,
+ comment="companion to luat-lib.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+set=set or {}
+local nums={}
+local tabs={}
+local concat=table.concat
+local next,type=next,type
+set.create=table.tohash
function set.tonumber(t)
- if next(t) then
- local s = ""
- -- we could save mem by sorting, but it slows down
- for k, v in next, t do
- if v then
- -- why bother about the leading space
- s = s .. " " .. k
- end
- end
- local n = nums[s]
- if not n then
- n = #tabs + 1
- tabs[n] = t
- nums[s] = n
- end
- return n
- else
- return 0
+ if next(t) then
+ local s=""
+ for k,v in next,t do
+ if v then
+ s=s.." "..k
+ end
+ end
+ local n=nums[s]
+ if not n then
+ n=#tabs+1
+ tabs[n]=t
+ nums[s]=n
end
+ return n
+ else
+ return 0
+ end
end
-
function set.totable(n)
- if n == 0 then
- return { }
- else
- return tabs[n] or { }
- end
+ if n==0 then
+ return {}
+ else
+ return tabs[n] or {}
+ end
end
-
function set.tolist(n)
- if n == 0 or not tabs[n] then
- return ""
- else
- local t, n = { }, 0
- for k, v in next, tabs[n] do
- if v then
- n = n + 1
- t[n] = k
- end
- end
- return concat(t," ")
+ if n==0 or not tabs[n] then
+ return ""
+ else
+ local t,n={},0
+ for k,v in next,tabs[n] do
+ if v then
+ n=n+1
+ t[n]=k
+ end
end
+ return concat(t," ")
+ end
end
-
function set.contains(n,s)
- if type(n) == "table" then
- return n[s]
- elseif n == 0 then
- return false
- else
- local t = tabs[n]
- return t and t[s]
- end
+ if type(n)=="table" then
+ return n[s]
+ elseif n==0 then
+ return false
+ else
+ local t=tabs[n]
+ return t and t[s]
+ end
end
-
-
end -- of closure
do -- create closure to overcome 200 locals limit
-if not modules then modules = { } end modules ['l-os'] = {
- version = 1.001,
- comment = "companion to luat-lib.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
--- This file deals with some operating system issues. Please don't bother me
--- with the pros and cons of operating systems as they all have their flaws
--- and benefits. Bashing one of them won't help solving problems and fixing
--- bugs faster and is a waste of time and energy.
---
--- path separators: / or \ ... we can use / everywhere
--- suffixes : dll so exe <none> ... no big deal
--- quotes : we can use "" in most cases
--- expansion : unless "" are used * might give side effects
--- piping/threads : somewhat different for each os
--- locations : specific user file locations and settings can change over time
---
--- os.type : windows | unix (new, we already guessed os.platform)
--- os.name : windows | msdos | linux | macosx | solaris | .. | generic (new)
--- os.platform : extended os.name with architecture
-
--- maybe build io.flush in os.execute
-
-local os = os
-local find, format, gsub, upper, gmatch = string.find, string.format, string.gsub, string.upper, string.gmatch
-local concat = table.concat
-local random, ceil = math.random, math.ceil
-local rawget, rawset, type, getmetatable, setmetatable, tonumber = rawget, rawset, type, getmetatable, setmetatable, tonumber
+package.loaded["l-os"] = package.loaded["l-os"] or true
--- The following code permits traversing the environment table, at least
--- in luatex. Internally all environment names are uppercase.
+-- original size: 13692, stripped down to: 8406
+if not modules then modules={} end modules ['l-os']={
+ version=1.001,
+ comment="companion to luat-lib.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+local os=os
+local date,time=os.date,os.time
+local find,format,gsub,upper,gmatch=string.find,string.format,string.gsub,string.upper,string.gmatch
+local concat=table.concat
+local random,ceil,randomseed=math.random,math.ceil,math.randomseed
+local rawget,rawset,type,getmetatable,setmetatable,tonumber,tostring=rawget,rawset,type,getmetatable,setmetatable,tonumber,tostring
+math.initialseed=tonumber(string.sub(string.reverse(tostring(ceil(socket and socket.gettime()*10000 or time()))),1,6))
+randomseed(math.initialseed)
if not os.__getenv__ then
-
- os.__getenv__ = os.getenv
- os.__setenv__ = os.setenv
-
- if os.env then
-
- local osgetenv = os.getenv
- local ossetenv = os.setenv
- local osenv = os.env local _ = osenv.PATH -- initialize the table
-
- function os.setenv(k,v)
- if v == nil then
- v = ""
- end
- local K = upper(k)
- osenv[K] = v
- if type(v) == "table" then
- v = concat(v,";") -- path
- end
- ossetenv(K,v)
- end
-
- function os.getenv(k)
- local K = upper(k)
- local v = osenv[K] or osenv[k] or osgetenv(K) or osgetenv(k)
- if v == "" then
- return nil
- else
- return v
- end
- end
-
- else
-
- local ossetenv = os.setenv
- local osgetenv = os.getenv
- local osenv = { }
-
- function os.setenv(k,v)
- if v == nil then
- v = ""
- end
- local K = upper(k)
- osenv[K] = v
- end
-
- function os.getenv(k)
- local K = upper(k)
- local v = osenv[K] or osgetenv(K) or osgetenv(k)
- if v == "" then
- return nil
- else
- return v
- end
- end
-
- local function __index(t,k)
- return os.getenv(k)
- end
- local function __newindex(t,k,v)
- os.setenv(k,v)
- end
-
- os.env = { }
-
- setmetatable(os.env, { __index = __index, __newindex = __newindex } )
-
+ os.__getenv__=os.getenv
+ os.__setenv__=os.setenv
+ if os.env then
+ local osgetenv=os.getenv
+ local ossetenv=os.setenv
+ local osenv=os.env local _=osenv.PATH
+ function os.setenv(k,v)
+ if v==nil then
+ v=""
+ end
+ local K=upper(k)
+ osenv[K]=v
+ if type(v)=="table" then
+ v=concat(v,";")
+ end
+ ossetenv(K,v)
+ end
+ function os.getenv(k)
+ local K=upper(k)
+ local v=osenv[K] or osenv[k] or osgetenv(K) or osgetenv(k)
+ if v=="" then
+ return nil
+ else
+ return v
+ end
+ end
+ else
+ local ossetenv=os.setenv
+ local osgetenv=os.getenv
+ local osenv={}
+ function os.setenv(k,v)
+ if v==nil then
+ v=""
+ end
+ local K=upper(k)
+ osenv[K]=v
+ end
+ function os.getenv(k)
+ local K=upper(k)
+ local v=osenv[K] or osgetenv(K) or osgetenv(k)
+ if v=="" then
+ return nil
+ else
+ return v
+ end
end
-
+ local function __index(t,k)
+ return os.getenv(k)
+ end
+ local function __newindex(t,k,v)
+ os.setenv(k,v)
+ end
+ os.env={}
+ setmetatable(os.env,{ __index=__index,__newindex=__newindex } )
+ end
end
-
--- end of environment hack
-
-local execute, spawn, exec, iopopen, ioflush = os.execute, os.spawn or os.execute, os.exec or os.execute, io.popen, io.flush
-
+local execute,spawn,exec,iopopen,ioflush=os.execute,os.spawn or os.execute,os.exec or os.execute,io.popen,io.flush
function os.execute(...) ioflush() return execute(...) end
-function os.spawn (...) ioflush() return spawn (...) end
-function os.exec (...) ioflush() return exec (...) end
-function io.popen (...) ioflush() return iopopen(...) end
-
+function os.spawn (...) ioflush() return spawn (...) end
+function os.exec (...) ioflush() return exec (...) end
+function io.popen (...) ioflush() return iopopen(...) end
function os.resultof(command)
- local handle = io.popen(command,"r")
- return handle and handle:read("*all") or ""
+ local handle=io.popen(command,"r")
+ return handle and handle:read("*all") or ""
end
-
if not io.fileseparator then
- if find(os.getenv("PATH"),";") then
- io.fileseparator, io.pathseparator, os.type = "\\", ";", os.type or "mswin"
- else
- io.fileseparator, io.pathseparator, os.type = "/" , ":", os.type or "unix"
- end
-end
-
-os.type = os.type or (io.pathseparator == ";" and "windows") or "unix"
-os.name = os.name or (os.type == "windows" and "mswin" ) or "linux"
-
-if os.type == "windows" then
- os.libsuffix, os.binsuffix, os.binsuffixes = 'dll', 'exe', { 'exe', 'cmd', 'bat' }
+ if find(os.getenv("PATH"),";") then
+ io.fileseparator,io.pathseparator,os.type="\\",";",os.type or "mswin"
+ else
+ io.fileseparator,io.pathseparator,os.type="/",":",os.type or "unix"
+ end
+end
+os.type=os.type or (io.pathseparator==";" and "windows") or "unix"
+os.name=os.name or (os.type=="windows" and "mswin" ) or "linux"
+if os.type=="windows" then
+ os.libsuffix,os.binsuffix,os.binsuffixes='dll','exe',{ 'exe','cmd','bat' }
else
- os.libsuffix, os.binsuffix, os.binsuffixes = 'so', '', { '' }
+ os.libsuffix,os.binsuffix,os.binsuffixes='so','',{ '' }
end
-
+local launchers={
+ windows="start %s",
+ macosx="open %s",
+ unix="$BROWSER %s &> /dev/null &",
+}
function os.launch(str)
- if os.type == "windows" then
- os.execute("start " .. str) -- os.spawn ?
- else
- os.execute(str .. " &") -- os.spawn ?
- end
+ os.execute(format(launchers[os.name] or launchers.unix,str))
end
-
if not os.times then
- -- utime = user time
- -- stime = system time
- -- cutime = children user time
- -- cstime = children system time
- function os.times()
- return {
- utime = os.gettimeofday(), -- user
- stime = 0, -- system
- cutime = 0, -- children user
- cstime = 0, -- children system
- }
- end
+ function os.times()
+ return {
+ utime=os.gettimeofday(),
+ stime=0,
+ cutime=0,
+ cstime=0,
+ }
+ end
end
-
-os.gettimeofday = os.gettimeofday or os.clock
-
-local startuptime = os.gettimeofday()
-
+os.gettimeofday=os.gettimeofday or os.clock
+local startuptime=os.gettimeofday()
function os.runtime()
- return os.gettimeofday() - startuptime
-end
-
-
--- no need for function anymore as we have more clever code and helpers now
--- this metatable trickery might as well disappear
-
-os.resolvers = os.resolvers or { } -- will become private
-
-local resolvers = os.resolvers
-
-local osmt = getmetatable(os) or { __index = function(t,k) t[k] = "unset" return "unset" end } -- maybe nil
-local osix = osmt.__index
-
-osmt.__index = function(t,k)
- return (resolvers[k] or osix)(t,k)
-end
-
-setmetatable(os,osmt)
-
--- we can use HOSTTYPE on some platforms
-
-local name, platform = os.name or "linux", os.getenv("MTX_PLATFORM") or ""
-
+ return os.gettimeofday()-startuptime
+end
+os.resolvers=os.resolvers or {}
+local resolvers=os.resolvers
+setmetatable(os,{ __index=function(t,k)
+ local r=resolvers[k]
+ return r and r(t,k) or nil
+end })
+local name,platform=os.name or "linux",os.getenv("MTX_PLATFORM") or ""
local function guess()
- local architecture = os.resultof("uname -m") or ""
- if architecture ~= "" then
- return architecture
- end
- architecture = os.getenv("HOSTTYPE") or ""
- if architecture ~= "" then
- return architecture
- end
- return os.resultof("echo $HOSTTYPE") or ""
-end
-
-if platform ~= "" then
-
- os.platform = platform
-
-elseif os.type == "windows" then
-
- -- we could set the variable directly, no function needed here
-
- function os.resolvers.platform(t,k)
- local platform, architecture = "", os.getenv("PROCESSOR_ARCHITECTURE") or ""
- if find(architecture,"AMD64") then
- platform = "mswin-64"
- else
- platform = "mswin"
- end
- os.setenv("MTX_PLATFORM",platform)
- os.platform = platform
- return platform
- end
-
-elseif name == "linux" then
-
- function os.resolvers.platform(t,k)
- -- we sometimes have HOSTTYPE set so let's check that first
- local platform, architecture = "", os.getenv("HOSTTYPE") or os.resultof("uname -m") or ""
- if find(architecture,"x86_64") then
- platform = "linux-64"
- elseif find(architecture,"ppc") then
- platform = "linux-ppc"
- else
- platform = "linux"
- end
- os.setenv("MTX_PLATFORM",platform)
- os.platform = platform
- return platform
- end
-
-elseif name == "macosx" then
-
- --[[
- Identifying the architecture of OSX is quite a mess and this
- is the best we can come up with. For some reason $HOSTTYPE is
- a kind of pseudo environment variable, not known to the current
- environment. And yes, uname cannot be trusted either, so there
- is a change that you end up with a 32 bit run on a 64 bit system.
- Also, some proper 64 bit intel macs are too cheap (low-end) and
- therefore not permitted to run the 64 bit kernel.
- ]]--
-
- function os.resolvers.platform(t,k)
- -- local platform, architecture = "", os.getenv("HOSTTYPE") or ""
- -- if architecture == "" then
- -- architecture = os.resultof("echo $HOSTTYPE") or ""
- -- end
- local platform, architecture = "", os.resultof("echo $HOSTTYPE") or ""
- if architecture == "" then
- -- print("\nI have no clue what kind of OSX you're running so let's assume an 32 bit intel.\n")
- platform = "osx-intel"
- elseif find(architecture,"i386") then
- platform = "osx-intel"
- elseif find(architecture,"x86_64") then
- platform = "osx-64"
- else
- platform = "osx-ppc"
- end
- os.setenv("MTX_PLATFORM",platform)
- os.platform = platform
- return platform
- end
-
-elseif name == "sunos" then
-
- function os.resolvers.platform(t,k)
- local platform, architecture = "", os.resultof("uname -m") or ""
- if find(architecture,"sparc") then
- platform = "solaris-sparc"
- else -- if architecture == 'i86pc'
- platform = "solaris-intel"
- end
- os.setenv("MTX_PLATFORM",platform)
- os.platform = platform
- return platform
- end
-
-elseif name == "freebsd" then
-
- function os.resolvers.platform(t,k)
- local platform, architecture = "", os.resultof("uname -m") or ""
- if find(architecture,"amd64") then
- platform = "freebsd-amd64"
- else
- platform = "freebsd"
- end
- os.setenv("MTX_PLATFORM",platform)
- os.platform = platform
- return platform
- end
-
-elseif name == "kfreebsd" then
-
- function os.resolvers.platform(t,k)
- -- we sometimes have HOSTTYPE set so let's check that first
- local platform, architecture = "", os.getenv("HOSTTYPE") or os.resultof("uname -m") or ""
- if find(architecture,"x86_64") then
- platform = "kfreebsd-amd64"
- else
- platform = "kfreebsd-i386"
- end
- os.setenv("MTX_PLATFORM",platform)
- os.platform = platform
- return platform
+ local architecture=os.resultof("uname -m") or ""
+ if architecture~="" then
+ return architecture
+ end
+ architecture=os.getenv("HOSTTYPE") or ""
+ if architecture~="" then
+ return architecture
+ end
+ return os.resultof("echo $HOSTTYPE") or ""
+end
+if platform~="" then
+ os.platform=platform
+elseif os.type=="windows" then
+ function os.resolvers.platform(t,k)
+ local platform,architecture="",os.getenv("PROCESSOR_ARCHITECTURE") or ""
+ if find(architecture,"AMD64") then
+ platform="mswin-64"
+ else
+ platform="mswin"
+ end
+ os.setenv("MTX_PLATFORM",platform)
+ os.platform=platform
+ return platform
+ end
+elseif name=="linux" then
+ function os.resolvers.platform(t,k)
+ local platform,architecture="",os.getenv("HOSTTYPE") or os.resultof("uname -m") or ""
+ if find(architecture,"x86_64") then
+ platform="linux-64"
+ elseif find(architecture,"ppc") then
+ platform="linux-ppc"
+ else
+ platform="linux"
+ end
+ os.setenv("MTX_PLATFORM",platform)
+ os.platform=platform
+ return platform
+ end
+elseif name=="macosx" then
+ function os.resolvers.platform(t,k)
+ local platform,architecture="",os.resultof("echo $HOSTTYPE") or ""
+ if architecture=="" then
+ platform="osx-intel"
+ elseif find(architecture,"i386") then
+ platform="osx-intel"
+ elseif find(architecture,"x86_64") then
+ platform="osx-64"
+ else
+ platform="osx-ppc"
+ end
+ os.setenv("MTX_PLATFORM",platform)
+ os.platform=platform
+ return platform
+ end
+elseif name=="sunos" then
+ function os.resolvers.platform(t,k)
+ local platform,architecture="",os.resultof("uname -m") or ""
+ if find(architecture,"sparc") then
+ platform="solaris-sparc"
+ else
+ platform="solaris-intel"
+ end
+ os.setenv("MTX_PLATFORM",platform)
+ os.platform=platform
+ return platform
+ end
+elseif name=="freebsd" then
+ function os.resolvers.platform(t,k)
+ local platform,architecture="",os.resultof("uname -m") or ""
+ if find(architecture,"amd64") then
+ platform="freebsd-amd64"
+ else
+ platform="freebsd"
+ end
+ os.setenv("MTX_PLATFORM",platform)
+ os.platform=platform
+ return platform
+ end
+elseif name=="kfreebsd" then
+ function os.resolvers.platform(t,k)
+ local platform,architecture="",os.getenv("HOSTTYPE") or os.resultof("uname -m") or ""
+ if find(architecture,"x86_64") then
+ platform="kfreebsd-amd64"
+ else
+ platform="kfreebsd-i386"
end
-
+ os.setenv("MTX_PLATFORM",platform)
+ os.platform=platform
+ return platform
+ end
else
-
- -- platform = "linux"
- -- os.setenv("MTX_PLATFORM",platform)
- -- os.platform = platform
-
- function os.resolvers.platform(t,k)
- local platform = "linux"
- os.setenv("MTX_PLATFORM",platform)
- os.platform = platform
- return platform
- end
-
-end
-
--- beware, we set the randomseed
-
--- from wikipedia: Version 4 UUIDs use a scheme relying only on random numbers. This algorithm sets the
--- version number as well as two reserved bits. All other bits are set using a random or pseudorandom
--- data source. Version 4 UUIDs have the form xxxxxxxx-xxxx-4xxx-yxxx-xxxxxxxxxxxx with hexadecimal
--- digits x and hexadecimal digits 8, 9, A, or B for y. e.g. f47ac10b-58cc-4372-a567-0e02b2c3d479.
---
--- as we don't call this function too often there is not so much risk on repetition
-
-local t = { 8, 9, "a", "b" }
-
+ function os.resolvers.platform(t,k)
+ local platform="linux"
+ os.setenv("MTX_PLATFORM",platform)
+ os.platform=platform
+ return platform
+ end
+end
+local t={ 8,9,"a","b" }
function os.uuid()
- return format("%04x%04x-4%03x-%s%03x-%04x-%04x%04x%04x",
- random(0xFFFF),random(0xFFFF),
- random(0x0FFF),
- t[ceil(random(4))] or 8,random(0x0FFF),
- random(0xFFFF),
- random(0xFFFF),random(0xFFFF),random(0xFFFF)
- )
+ return format("%04x%04x-4%03x-%s%03x-%04x-%04x%04x%04x",
+ random(0xFFFF),random(0xFFFF),
+ random(0x0FFF),
+ t[ceil(random(4))] or 8,random(0x0FFF),
+ random(0xFFFF),
+ random(0xFFFF),random(0xFFFF),random(0xFFFF)
+ )
end
-
local d
-
function os.timezone(delta)
- d = d or tonumber(tonumber(os.date("%H")-os.date("!%H")))
- if delta then
- if d > 0 then
- return format("+%02i:00",d)
- else
- return format("-%02i:00",-d)
- end
+ d=d or tonumber(tonumber(date("%H")-date("!%H")))
+ if delta then
+ if d>0 then
+ return format("+%02i:00",d)
else
- return 1
- end
-end
-
-local memory = { }
-
+ return format("-%02i:00",-d)
+ end
+ else
+ return 1
+ end
+end
+local timeformat=format("%%s%s",os.timezone(true))
+local dateformat="!%Y-%m-%d %H:%M:%S"
+function os.fulltime(t,default)
+ t=tonumber(t) or 0
+ if t>0 then
+ elseif default then
+ return default
+ else
+ t=nil
+ end
+ return format(timeformat,date(dateformat,t))
+end
+local dateformat="%Y-%m-%d %H:%M:%S"
+function os.localtime(t,default)
+ t=tonumber(t) or 0
+ if t>0 then
+ elseif default then
+ return default
+ else
+ t=nil
+ end
+ return date(dateformat,t)
+end
+function os.converttime(t,default)
+ local t=tonumber(t)
+ if t and t>0 then
+ return date(dateformat,t)
+ else
+ return default or "-"
+ end
+end
+local memory={}
local function which(filename)
- local fullname = memory[filename]
- if fullname == nil then
- local suffix = file.suffix(filename)
- local suffixes = suffix == "" and os.binsuffixes or { suffix }
- for directory in gmatch(os.getenv("PATH"),"[^" .. io.pathseparator .."]+") do
- local df = file.join(directory,filename)
- for i=1,#suffixes do
- local dfs = file.addsuffix(df,suffixes[i])
- if io.exists(dfs) then
- fullname = dfs
- break
- end
- end
- end
- if not fullname then
- fullname = false
+ local fullname=memory[filename]
+ if fullname==nil then
+ local suffix=file.suffix(filename)
+ local suffixes=suffix=="" and os.binsuffixes or { suffix }
+ for directory in gmatch(os.getenv("PATH"),"[^"..io.pathseparator.."]+") do
+ local df=file.join(directory,filename)
+ for i=1,#suffixes do
+ local dfs=file.addsuffix(df,suffixes[i])
+ if io.exists(dfs) then
+ fullname=dfs
+ break
end
- memory[filename] = fullname
+ end
end
- return fullname
+ if not fullname then
+ fullname=false
+ end
+ memory[filename]=fullname
+ end
+ return fullname
+end
+os.which=which
+os.where=which
+function os.today()
+ return date("!*t")
+end
+function os.now()
+ return date("!%Y-%m-%d %H:%M:%S")
+end
+if not os.sleep and socket then
+ os.sleep=socket.sleep
end
-
-os.which = which
-os.where = which
-
--- print(os.which("inkscape.exe"))
--- print(os.which("inkscape"))
--- print(os.which("gs.exe"))
--- print(os.which("ps2pdf"))
end -- of closure
do -- create closure to overcome 200 locals limit
-if not modules then modules = { } end modules ['l-file'] = {
- version = 1.001,
- comment = "companion to luat-lib.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
--- needs a cleanup
+package.loaded["l-file"] = package.loaded["l-file"] or true
-file = file or { }
-local file = file
-
-local insert, concat = table.insert, table.concat
-local find, gmatch, match, gsub, sub, char, lower = string.find, string.gmatch, string.match, string.gsub, string.sub, string.char, string.lower
-local lpegmatch = lpeg.match
-local getcurrentdir, attributes = lfs.currentdir, lfs.attributes
-
-local P, R, S, C, Cs, Cp, Cc = lpeg.P, lpeg.R, lpeg.S, lpeg.C, lpeg.Cs, lpeg.Cp, lpeg.Cc
-
-local function dirname(name,default)
- return match(name,"^(.+)[/\\].-$") or (default or "")
-end
+-- original size: 16648, stripped down to: 9051
+if not modules then modules={} end modules ['l-file']={
+ version=1.001,
+ comment="companion to luat-lib.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+file=file or {}
+local file=file
+if not lfs then
+ lfs=optionalrequire("lfs")
+end
+if not lfs then
+ lfs={
+ getcurrentdir=function()
+ return "."
+ end,
+ attributes=function()
+ return nil
+ end,
+ isfile=function(name)
+ local f=io.open(name,'rb')
+ if f then
+ f:close()
+ return true
+ end
+ end,
+ isdir=function(name)
+ print("you need to load lfs")
+ return false
+ end
+ }
+elseif not lfs.isfile then
+ local attributes=lfs.attributes
+ function lfs.isdir(name)
+ return attributes(name,"mode")=="directory"
+ end
+ function lfs.isfile(name)
+ return attributes(name,"mode")=="file"
+ end
+end
+local insert,concat=table.insert,table.concat
+local match=string.match
+local lpegmatch=lpeg.match
+local getcurrentdir,attributes=lfs.currentdir,lfs.attributes
+local checkedsplit=string.checkedsplit
+local P,R,S,C,Cs,Cp,Cc,Ct=lpeg.P,lpeg.R,lpeg.S,lpeg.C,lpeg.Cs,lpeg.Cp,lpeg.Cc,lpeg.Ct
+local colon=P(":")
+local period=P(".")
+local periods=P("..")
+local fwslash=P("/")
+local bwslash=P("\\")
+local slashes=S("\\/")
+local noperiod=1-period
+local noslashes=1-slashes
+local name=noperiod^1
+local suffix=period/""*(1-period-slashes)^1*-1
+local pattern=C((1-(slashes^1*noslashes^1*-1))^1)*P(1)
+local function pathpart(name,default)
+ return name and lpegmatch(pattern,name) or default or ""
+end
+local pattern=(noslashes^0*slashes)^1*C(noslashes^1)*-1
local function basename(name)
- return match(name,"^.+[/\\](.-)$") or name
+ return name and lpegmatch(pattern,name) or name
end
-
--- local function nameonly(name)
--- return (gsub(match(name,"^.+[/\\](.-)$") or name,"%..*$",""))
--- end
-
+local pattern=(noslashes^0*slashes^1)^0*Cs((1-suffix)^1)*suffix^0
local function nameonly(name)
- return (gsub(match(name,"^.+[/\\](.-)$") or name,"%.[%a%d]+$",""))
-end
-
-local function extname(name,default)
- return match(name,"^.+%.([^/\\]-)$") or default or ""
-end
-
-local function splitname(name)
- local n, s = match(name,"^(.+)%.([^/\\]-)$")
- return n or name, s or ""
-end
-
-file.basename = basename
-file.dirname = dirname
-file.nameonly = nameonly
-file.extname = extname
-file.suffix = extname
-
-function file.removesuffix(filename)
- return (gsub(filename,"%.[%a%d]+$",""))
-end
-
-function file.addsuffix(filename, suffix, criterium)
- if not suffix or suffix == "" then
- return filename
- elseif criterium == true then
- return filename .. "." .. suffix
- elseif not criterium then
- local n, s = splitname(filename)
- if not s or s == "" then
- return filename .. "." .. suffix
- else
+ return name and lpegmatch(pattern,name) or name
+end
+local pattern=(noslashes^0*slashes)^0*(noperiod^1*period)^1*C(noperiod^1)*-1
+local function suffixonly(name)
+ return name and lpegmatch(pattern,name) or ""
+end
+file.pathpart=pathpart
+file.basename=basename
+file.nameonly=nameonly
+file.suffixonly=suffixonly
+file.suffix=suffixonly
+file.dirname=pathpart
+file.extname=suffixonly
+local drive=C(R("az","AZ"))*colon
+local path=C((noslashes^0*slashes)^0)
+local suffix=period*C(P(1-period)^0*P(-1))
+local base=C((1-suffix)^0)
+local rest=C(P(1)^0)
+drive=drive+Cc("")
+path=path+Cc("")
+base=base+Cc("")
+suffix=suffix+Cc("")
+local pattern_a=drive*path*base*suffix
+local pattern_b=path*base*suffix
+local pattern_c=C(drive*path)*C(base*suffix)
+local pattern_d=path*rest
+function file.splitname(str,splitdrive)
+ if not str then
+ elseif splitdrive then
+ return lpegmatch(pattern_a,str)
+ else
+ return lpegmatch(pattern_b,str)
+ end
+end
+function file.splitbase(str)
+ return str and lpegmatch(pattern_d,str)
+end
+function file.nametotable(str,splitdrive)
+ if str then
+ local path,drive,subpath,name,base,suffix=lpegmatch(pattern_c,str)
+ if splitdrive then
+ return {
+ path=path,
+ drive=drive,
+ subpath=subpath,
+ name=name,
+ base=base,
+ suffix=suffix,
+ }
+ else
+ return {
+ path=path,
+ name=name,
+ base=base,
+ suffix=suffix,
+ }
+ end
+ end
+end
+local pattern=Cs(((period*(1-period-slashes)^1*-1)/""+1)^1)
+function file.removesuffix(name)
+ return name and lpegmatch(pattern,name)
+end
+local suffix=period/""*(1-period-slashes)^1*-1
+local pattern=Cs((noslashes^0*slashes^1)^0*((1-suffix)^1))*Cs(suffix)
+function file.addsuffix(filename,suffix,criterium)
+ if not filename or not suffix or suffix=="" then
+ return filename
+ elseif criterium==true then
+ return filename.."."..suffix
+ elseif not criterium then
+ local n,s=lpegmatch(pattern,filename)
+ if not s or s=="" then
+ return filename.."."..suffix
+ else
+ return filename
+ end
+ else
+ local n,s=lpegmatch(pattern,filename)
+ if s and s~="" then
+ local t=type(criterium)
+ if t=="table" then
+ for i=1,#criterium do
+ if s==criterium[i] then
return filename
+ end
end
- else
- local n, s = splitname(filename)
- if s and s ~= "" then
- local t = type(criterium)
- if t == "table" then
- -- keep if in criterium
- for i=1,#criterium do
- if s == criterium[i] then
- return filename
- end
- end
- elseif t == "string" then
- -- keep if criterium
- if s == criterium then
- return filename
- end
- end
+ elseif t=="string" then
+ if s==criterium then
+ return filename
end
- return n .. "." .. suffix
+ end
end
+ return (n or filename).."."..suffix
+ end
end
-
-
-function file.replacesuffix(filename, suffix)
- return (gsub(filename,"%.[%a%d]+$","")) .. "." .. suffix
+local suffix=period*(1-period-slashes)^1*-1
+local pattern=Cs((1-suffix)^0)
+function file.replacesuffix(name,suffix)
+ if name and suffix and suffix~="" then
+ return lpegmatch(pattern,name).."."..suffix
+ else
+ return name
+ end
end
-
-
-local trick_1 = char(1)
-local trick_2 = "^" .. trick_1 .. "/+"
-
-function file.join(...) -- rather dirty
- local lst = { ... }
- local a, b = lst[1], lst[2]
- if not a or a == "" then -- not a added
- lst[1] = trick_1
- elseif b and find(a,"^/+$") and find(b,"^/") then
- lst[1] = ""
- lst[2] = gsub(b,"^/+","")
- end
- local pth = concat(lst,"/")
- pth = gsub(pth,"\\","/")
- local a, b = match(pth,"^(.*://)(.*)$")
- if a and b then
- return a .. gsub(b,"//+","/")
- end
- a, b = match(pth,"^(//)(.*)$")
- if a and b then
- return a .. gsub(b,"//+","/")
- end
- pth = gsub(pth,trick_2,"")
- return (gsub(pth,"//+","/"))
+local reslasher=lpeg.replacer(P("\\"),"/")
+function file.reslash(str)
+ return str and lpegmatch(reslasher,str)
end
-
-
--- We should be able to use:
---
--- function file.is_writable(name)
--- local a = attributes(name) or attributes(dirname(name,"."))
--- return a and sub(a.permissions,2,2) == "w"
--- end
---
--- But after some testing Taco and I came up with:
-
function file.is_writable(name)
- if lfs.isdir(name) then
- name = name .. "/m_t_x_t_e_s_t.tmp"
- local f = io.open(name,"wb")
- if f then
- f:close()
- os.remove(name)
- return true
- end
- elseif lfs.isfile(name) then
- local f = io.open(name,"ab")
- if f then
- f:close()
- return true
- end
- else
- local f = io.open(name,"ab")
- if f then
- f:close()
- os.remove(name)
- return true
- end
+ if not name then
+ elseif lfs.isdir(name) then
+ name=name.."/m_t_x_t_e_s_t.tmp"
+ local f=io.open(name,"wb")
+ if f then
+ f:close()
+ os.remove(name)
+ return true
end
- return false
+ elseif lfs.isfile(name) then
+ local f=io.open(name,"ab")
+ if f then
+ f:close()
+ return true
+ end
+ else
+ local f=io.open(name,"ab")
+ if f then
+ f:close()
+ os.remove(name)
+ return true
+ end
+ end
+ return false
end
-
+local readable=P("r")*Cc(true)
function file.is_readable(name)
- local a = attributes(name)
- return a and sub(a.permissions,1,1) == "r"
-end
-
-file.isreadable = file.is_readable -- depricated
-file.iswritable = file.is_writable -- depricated
-
--- todo: lpeg \\ / .. does not save much
-
-local checkedsplit = string.checkedsplit
-
-function file.splitpath(str,separator) -- string
- str = gsub(str,"\\","/")
- return checkedsplit(str,separator or io.pathseparator)
-end
-
-function file.joinpath(tab,separator) -- table
- return concat(tab,separator or io.pathseparator) -- can have trailing //
-end
-
--- we can hash them weakly
-
-
-function file.collapsepath(str,anchor)
- if anchor and not find(str,"^/") and not find(str,"^%a:") then
- str = getcurrentdir() .. "/" .. str
- end
- if str == "" or str =="." then
- return "."
- elseif find(str,"^%.%.") then
- str = gsub(str,"\\","/")
- return str
- elseif not find(str,"%.") then
- str = gsub(str,"\\","/")
- return str
- end
- str = gsub(str,"\\","/")
- local starter, rest = match(str,"^(%a+:/*)(.-)$")
- if starter then
- str = rest
- end
- local oldelements = checkedsplit(str,"/")
- local newelements = { }
- local i = #oldelements
- while i > 0 do
- local element = oldelements[i]
- if element == '.' then
- -- do nothing
- elseif element == '..' then
- local n = i - 1
- while n > 0 do
- local element = oldelements[n]
- if element ~= '..' and element ~= '.' then
- oldelements[n] = '.'
- break
- else
- n = n - 1
- end
- end
- if n < 1 then
- insert(newelements,1,'..')
- end
- elseif element ~= "" then
- insert(newelements,1,element)
- end
- i = i - 1
- end
- if #newelements == 0 then
- return starter or "."
- elseif starter then
- return starter .. concat(newelements, '/')
- elseif find(str,"^/") then
- return "/" .. concat(newelements,'/')
+ if name then
+ local a=attributes(name)
+ return a and lpegmatch(readable,a.permissions) or false
+ else
+ return false
+ end
+end
+file.isreadable=file.is_readable
+file.iswritable=file.is_writable
+function file.size(name)
+ if name then
+ local a=attributes(name)
+ return a and a.size or 0
+ else
+ return 0
+ end
+end
+function file.splitpath(str,separator)
+ return str and checkedsplit(lpegmatch(reslasher,str),separator or io.pathseparator)
+end
+function file.joinpath(tab,separator)
+ return tab and concat(tab,separator or io.pathseparator)
+end
+local stripper=Cs(P(fwslash)^0/""*reslasher)
+local isnetwork=fwslash*fwslash*(1-fwslash)+(1-fwslash-colon)^1*colon
+local isroot=fwslash^1*-1
+local hasroot=fwslash^1
+local deslasher=lpeg.replacer(S("\\/")^1,"/")
+function file.join(...)
+ local lst={... }
+ local one=lst[1]
+ if lpegmatch(isnetwork,one) then
+ local two=lpegmatch(deslasher,concat(lst,"/",2))
+ return one.."/"..two
+ elseif lpegmatch(isroot,one) then
+ local two=lpegmatch(deslasher,concat(lst,"/",2))
+ if lpegmatch(hasroot,two) then
+ return two
else
- return concat(newelements, '/')
- end
-end
-
-
+ return "/"..two
+ end
+ elseif one=="" then
+ return lpegmatch(stripper,concat(lst,"/",2))
+ else
+ return lpegmatch(deslasher,concat(lst,"/"))
+ end
+end
+local drivespec=R("az","AZ")^1*colon
+local anchors=fwslash+drivespec
+local untouched=periods+(1-period)^1*P(-1)
+local splitstarter=(Cs(drivespec*(bwslash/"/"+fwslash)^0)+Cc(false))*Ct(lpeg.splitat(S("/\\")^1))
+local absolute=fwslash
+function file.collapsepath(str,anchor)
+ if not str then
+ return
+ end
+ if anchor and not lpegmatch(anchors,str) then
+ str=getcurrentdir().."/"..str
+ end
+ if str=="" or str=="." then
+ return "."
+ elseif lpegmatch(untouched,str) then
+ return lpegmatch(reslasher,str)
+ end
+ local starter,oldelements=lpegmatch(splitstarter,str)
+ local newelements={}
+ local i=#oldelements
+ while i>0 do
+ local element=oldelements[i]
+ if element=='.' then
+ elseif element=='..' then
+ local n=i-1
+ while n>0 do
+ local element=oldelements[n]
+ if element~='..' and element~='.' then
+ oldelements[n]='.'
+ break
+ else
+ n=n-1
+ end
+ end
+ if n<1 then
+ insert(newelements,1,'..')
+ end
+ elseif element~="" then
+ insert(newelements,1,element)
+ end
+ i=i-1
+ end
+ if #newelements==0 then
+ return starter or "."
+ elseif starter then
+ return starter..concat(newelements,'/')
+ elseif lpegmatch(absolute,str) then
+ return "/"..concat(newelements,'/')
+ else
+ return concat(newelements,'/')
+ end
+end
+local validchars=R("az","09","AZ","--","..")
+local pattern_a=lpeg.replacer(1-validchars)
+local pattern_a=Cs((validchars+P(1)/"-")^1)
+local whatever=P("-")^0/""
+local pattern_b=Cs(whatever*(1-whatever*-1)^1)
function file.robustname(str,strict)
- str = gsub(str,"[^%a%d%/%-%.\\]+","-")
+ if str then
+ str=lpegmatch(pattern_a,str) or str
if strict then
- return lower(gsub(str,"^%-*(.-)%-*$","%1"))
+ return lpegmatch(pattern_b,str) or str
else
- return str
+ return str
end
+ end
end
-
-file.readdata = io.loaddata
-file.savedata = io.savedata
-
+file.readdata=io.loaddata
+file.savedata=io.savedata
function file.copy(oldname,newname)
- file.savedata(newname,io.loaddata(oldname))
-end
-
--- lpeg variants, slightly faster, not always
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
--- also rewrite previous
-
-local letter = R("az","AZ") + S("_-+")
-local separator = P("://")
-
-local qualified = P(".")^0 * P("/") + letter*P(":") + letter^1*separator + letter^1 * P("/")
-local rootbased = P("/") + letter*P(":")
-
-lpeg.patterns.qualified = qualified
-lpeg.patterns.rootbased = rootbased
-
--- ./name ../name /name c: :// name/name
-
+ if oldname and newname then
+ local data=io.loaddata(oldname)
+ if data and data~="" then
+ file.savedata(newname,data)
+ end
+ end
+end
+local letter=R("az","AZ")+S("_-+")
+local separator=P("://")
+local qualified=period^0*fwslash+letter*colon+letter^1*separator+letter^1*fwslash
+local rootbased=fwslash+letter*colon
+lpeg.patterns.qualified=qualified
+lpeg.patterns.rootbased=rootbased
function file.is_qualified_path(filename)
- return lpegmatch(qualified,filename) ~= nil
+ return filename and lpegmatch(qualified,filename)~=nil
end
-
function file.is_rootbased_path(filename)
- return lpegmatch(rootbased,filename) ~= nil
+ return filename and lpegmatch(rootbased,filename)~=nil
end
-
--- actually these are schemes
-
-local slash = S("\\/")
-local period = P(".")
-local drive = C(R("az","AZ")) * P(":")
-local path = C(((1-slash)^0 * slash)^0)
-local suffix = period * C(P(1-period)^0 * P(-1))
-local base = C((1-suffix)^0)
-
-drive = drive + Cc("")
-path = path + Cc("")
-base = base + Cc("")
-suffix = suffix + Cc("")
-
-local pattern_a = drive * path * base * suffix
-local pattern_b = path * base * suffix
-local pattern_c = C(drive * path) * C(base * suffix)
-
-function file.splitname(str,splitdrive)
- if splitdrive then
- return lpegmatch(pattern_a,str) -- returns drive, path, base, suffix
- else
- return lpegmatch(pattern_b,str) -- returns path, base, suffix
- end
-end
-
-function file.nametotable(str,splitdrive) -- returns table
- local path, drive, subpath, name, base, suffix = lpegmatch(pattern_c,str)
- if splitdrive then
- return {
- path = path,
- drive = drive,
- subpath = subpath,
- name = name,
- base = base,
- suffix = suffix,
- }
- else
- return {
- path = path,
- name = name,
- base = base,
- suffix = suffix,
- }
- end
-end
-
--- function test(t) for k, v in next, t do print(v, "=>", file.splitname(v)) end end
---
--- test { "c:", "c:/aa", "c:/aa/bb", "c:/aa/bb/cc", "c:/aa/bb/cc.dd", "c:/aa/bb/cc.dd.ee" }
--- test { "c:", "c:aa", "c:aa/bb", "c:aa/bb/cc", "c:aa/bb/cc.dd", "c:aa/bb/cc.dd.ee" }
--- test { "/aa", "/aa/bb", "/aa/bb/cc", "/aa/bb/cc.dd", "/aa/bb/cc.dd.ee" }
--- test { "aa", "aa/bb", "aa/bb/cc", "aa/bb/cc.dd", "aa/bb/cc.dd.ee" }
-
-
--- for myself:
-
function file.strip(name,dir)
- local b, a = match(name,"^(.-)" .. dir .. "(.*)$")
- return a ~= "" and a or name
+ if name then
+ local b,a=match(name,"^(.-)"..dir.."(.*)$")
+ return a~="" and a or name
+ end
end
@@ -3060,64 +3065,81 @@ end -- of closure
do -- create closure to overcome 200 locals limit
-if not modules then modules = { } end modules ['l-md5'] = {
- version = 1.001,
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
--- This also provides file checksums and checkers.
+package.loaded["l-md5"] = package.loaded["l-md5"] or true
-local md5, file = md5, file
-local gsub, format, byte = string.gsub, string.format, string.byte
+-- original size: 3760, stripped down to: 2088
+if not modules then modules={} end modules ['l-md5']={
+ version=1.001,
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+if not md5 then
+ md5=optionalrequire("md5")
+end
+if not md5 then
+ md5={
+ sum=function(str) print("error: md5 is not loaded (sum ignored)") return str end,
+ sumhexa=function(str) print("error: md5 is not loaded (sumhexa ignored)") return str end,
+ }
+end
+local md5,file=md5,file
+local gsub,format,byte=string.gsub,string.format,string.byte
+local md5sum=md5.sum
local function convert(str,fmt)
- return (gsub(md5.sum(str),".",function(chr) return format(fmt,byte(chr)) end))
+ return (gsub(md5sum(str),".",function(chr) return format(fmt,byte(chr)) end))
end
-
if not md5.HEX then function md5.HEX(str) return convert(str,"%02X") end end
if not md5.hex then function md5.hex(str) return convert(str,"%02x") end end
if not md5.dec then function md5.dec(str) return convert(str,"%03i") end end
-
-
-function file.needs_updating(oldname,newname,threshold) -- size modification access change
- local oldtime = lfs.attributes(oldname, modification)
- local newtime = lfs.attributes(newname, modification)
- if newtime >= oldtime then
- return false
- elseif oldtime - newtime < (threshold or 1) then
- return false
+function file.needsupdating(oldname,newname,threshold)
+ local oldtime=lfs.attributes(oldname,"modification")
+ if oldtime then
+ local newtime=lfs.attributes(newname,"modification")
+ if not newtime then
+ return true
+ elseif newtime>=oldtime then
+ return false
+ elseif oldtime-newtime<(threshold or 1) then
+ return false
else
- return true
+ return true
end
+ else
+ return false
+ end
+end
+file.needs_updating=file.needsupdating
+function file.syncmtimes(oldname,newname)
+ local oldtime=lfs.attributes(oldname,"modification")
+ if oldtime and lfs.isfile(newname) then
+ lfs.touch(newname,oldtime,oldtime)
+ end
end
-
function file.checksum(name)
- if md5 then
- local data = io.loaddata(name)
- if data then
- return md5.HEX(data)
- end
+ if md5 then
+ local data=io.loaddata(name)
+ if data then
+ return md5.HEX(data)
end
- return nil
+ end
+ return nil
end
-
function file.loadchecksum(name)
- if md5 then
- local data = io.loaddata(name .. ".md5")
- return data and (gsub(data,"%s",""))
- end
- return nil
+ if md5 then
+ local data=io.loaddata(name..".md5")
+ return data and (gsub(data,"%s",""))
+ end
+ return nil
end
-
-function file.savechecksum(name, checksum)
- if not checksum then checksum = file.checksum(name) end
- if checksum then
- io.savedata(name .. ".md5",checksum)
- return checksum
- end
- return nil
+function file.savechecksum(name,checksum)
+ if not checksum then checksum=file.checksum(name) end
+ if checksum then
+ io.savedata(name..".md5",checksum)
+ return checksum
+ end
+ return nil
end
@@ -3125,594 +3147,546 @@ end -- of closure
do -- create closure to overcome 200 locals limit
-if not modules then modules = { } end modules ['l-url'] = {
- version = 1.001,
- comment = "companion to luat-lib.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
-local char, gmatch, gsub, format, byte, find = string.char, string.gmatch, string.gsub, string.format, string.byte, string.find
-local concat = table.concat
-local tonumber, type = tonumber, type
-local P, C, R, S, Cs, Cc, Ct = lpeg.P, lpeg.C, lpeg.R, lpeg.S, lpeg.Cs, lpeg.Cc, lpeg.Ct
-local lpegmatch, lpegpatterns, replacer = lpeg.match, lpeg.patterns, lpeg.replacer
+package.loaded["l-url"] = package.loaded["l-url"] or true
--- from wikipedia:
---
--- foo://username:password@example.com:8042/over/there/index.dtb?type=animal;name=narwhal#nose
--- \_/ \_______________/ \_________/ \__/ \___/ \_/ \______________________/ \__/
--- | | | | | | | |
--- | userinfo hostname port | | query fragment
--- | \________________________________/\_____________|____|/
--- scheme | | | |
--- | authority path | |
--- | | |
--- | path interpretable as filename
--- | ___________|____________ |
--- / \ / \ |
--- urn:example:animal:ferret:nose interpretable as extension
-
-url = url or { }
-local url = url
-
-local tochar = function(s) return char(tonumber(s,16)) end
-
-local colon = P(":")
-local qmark = P("?")
-local hash = P("#")
-local slash = P("/")
-local percent = P("%")
-local endofstring = P(-1)
-
-local hexdigit = R("09","AF","af")
-local plus = P("+")
-local nothing = Cc("")
-local escaped = (plus / " ") + (percent * C(hexdigit * hexdigit) / tochar)
-
--- we assume schemes with more than 1 character (in order to avoid problems with windows disks)
--- we also assume that when we have a scheme, we also have an authority
-
-local schemestr = Cs((escaped+(1-colon-slash-qmark-hash))^2)
-local authoritystr = Cs((escaped+(1- slash-qmark-hash))^0)
-local pathstr = Cs((escaped+(1- qmark-hash))^0)
-local querystr = Cs((escaped+(1- hash))^0)
-local fragmentstr = Cs((escaped+(1- endofstring))^0)
-
-local scheme = schemestr * colon + nothing
-local authority = slash * slash * authoritystr + nothing
-local path = slash * pathstr + nothing
-local query = qmark * querystr + nothing
-local fragment = hash * fragmentstr + nothing
-
-local validurl = scheme * authority * path * query * fragment
-local parser = Ct(validurl)
-
-lpegpatterns.url = validurl
-lpegpatterns.urlsplitter = parser
-
-local escapes = { } ; for i=0,255 do escapes[i] = format("%%%02X",i) end
-
-local escaper = Cs((R("09","AZ","az") + S("-./_") + P(1) / escapes)^0)
-
-lpegpatterns.urlescaper = escaper
-
--- todo: reconsider Ct as we can as well have five return values (saves a table)
--- so we can have two parsers, one with and one without
+-- original size: 11806, stripped down to: 5417
+if not modules then modules={} end modules ['l-url']={
+ version=1.001,
+ comment="companion to luat-lib.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+local char,format,byte=string.char,string.format,string.byte
+local concat=table.concat
+local tonumber,type=tonumber,type
+local P,C,R,S,Cs,Cc,Ct,Cf,Cg,V=lpeg.P,lpeg.C,lpeg.R,lpeg.S,lpeg.Cs,lpeg.Cc,lpeg.Ct,lpeg.Cf,lpeg.Cg,lpeg.V
+local lpegmatch,lpegpatterns,replacer=lpeg.match,lpeg.patterns,lpeg.replacer
+url=url or {}
+local url=url
+local tochar=function(s) return char(tonumber(s,16)) end
+local colon=P(":")
+local qmark=P("?")
+local hash=P("#")
+local slash=P("/")
+local percent=P("%")
+local endofstring=P(-1)
+local hexdigit=R("09","AF","af")
+local plus=P("+")
+local nothing=Cc("")
+local escapedchar=(percent*C(hexdigit*hexdigit))/tochar
+local escaped=(plus/" ")+escapedchar
+local noslash=P("/")/""
+local schemestr=Cs((escaped+(1-colon-slash-qmark-hash))^2)
+local authoritystr=Cs((escaped+(1- slash-qmark-hash))^0)
+local pathstr=Cs((escaped+(1- qmark-hash))^0)
+local querystr=Cs(((1- hash))^0)
+local fragmentstr=Cs((escaped+(1- endofstring))^0)
+local scheme=schemestr*colon+nothing
+local authority=slash*slash*authoritystr+nothing
+local path=slash*pathstr+nothing
+local query=qmark*querystr+nothing
+local fragment=hash*fragmentstr+nothing
+local validurl=scheme*authority*path*query*fragment
+local parser=Ct(validurl)
+lpegpatterns.url=validurl
+lpegpatterns.urlsplitter=parser
+local escapes={}
+setmetatable(escapes,{ __index=function(t,k)
+ local v=format("%%%02X",byte(k))
+ t[k]=v
+ return v
+end })
+local escaper=Cs((R("09","AZ","az")^1+P(" ")/"%%20"+S("-./_")^1+P(1)/escapes)^0)
+local unescaper=Cs((escapedchar+1)^0)
+lpegpatterns.urlunescaped=escapedchar
+lpegpatterns.urlescaper=escaper
+lpegpatterns.urlunescaper=unescaper
local function split(str)
- return (type(str) == "string" and lpegmatch(parser,str)) or str
+ return (type(str)=="string" and lpegmatch(parser,str)) or str
end
-
-local isscheme = schemestr * colon * slash * slash -- this test also assumes authority
-
+local isscheme=schemestr*colon*slash*slash
local function hasscheme(str)
- local scheme = lpegmatch(isscheme,str) -- at least one character
- return scheme ~= "" and scheme or false
-end
-
-
--- todo: cache them
-
-local rootletter = R("az","AZ")
- + S("_-+")
-local separator = P("://")
-local qualified = P(".")^0 * P("/")
- + rootletter * P(":")
- + rootletter^1 * separator
- + rootletter^1 * P("/")
-local rootbased = P("/")
- + rootletter * P(":")
-
-local barswapper = replacer("|",":")
-local backslashswapper = replacer("\\","/")
-
-local function hashed(str) -- not yet ok (/test?test)
- local s = split(str)
- local somescheme = s[1] ~= ""
- local somequery = s[4] ~= ""
- if not somescheme and not somequery then
- s = {
- scheme = "file",
- authority = "",
- path = str,
- query = "",
- fragment = "",
- original = str,
- noscheme = true,
- filename = str,
- }
- else -- not always a filename but handy anyway
- local authority, path, filename = s[2], s[3]
- if authority == "" then
- filename = path
- else
- filename = authority .. "/" .. path
- end
- s = {
- scheme = s[1],
- authority = authority,
- path = path,
- query = s[4],
- fragment = s[5],
- original = str,
- noscheme = false,
- filename = filename,
- }
- end
- return s
-end
-
--- Here we assume:
---
--- files: /// = relative
--- files: //// = absolute (!)
-
-
-
-url.split = split
-url.hasscheme = hasscheme
-url.hashed = hashed
-
-function url.addscheme(str,scheme) -- no authority
- if hasscheme(str) then
- return str
- elseif not scheme then
- return "file:///" .. str
+ if str then
+ local scheme=lpegmatch(isscheme,str)
+ return scheme~="" and scheme or false
+ else
+ return false
+ end
+end
+local rootletter=R("az","AZ")+S("_-+")
+local separator=P("://")
+local qualified=P(".")^0*P("/")+rootletter*P(":")+rootletter^1*separator+rootletter^1*P("/")
+local rootbased=P("/")+rootletter*P(":")
+local barswapper=replacer("|",":")
+local backslashswapper=replacer("\\","/")
+local equal=P("=")
+local amp=P("&")
+local key=Cs(((escapedchar+1)-equal )^0)
+local value=Cs(((escapedchar+1)-amp -endofstring)^0)
+local splitquery=Cf (Ct("")*P { "sequence",
+ sequence=V("pair")*(amp*V("pair"))^0,
+ pair=Cg(key*equal*value),
+},rawset)
+local function hashed(str)
+ if str=="" then
+ return {
+ scheme="invalid",
+ original=str,
+ }
+ end
+ local s=split(str)
+ local rawscheme=s[1]
+ local rawquery=s[4]
+ local somescheme=rawscheme~=""
+ local somequery=rawquery~=""
+ if not somescheme and not somequery then
+ s={
+ scheme="file",
+ authority="",
+ path=str,
+ query="",
+ fragment="",
+ original=str,
+ noscheme=true,
+ filename=str,
+ }
+ else
+ local authority,path,filename=s[2],s[3]
+ if authority=="" then
+ filename=path
+ elseif path=="" then
+ filename=""
else
- return scheme .. ":///" .. str
- end
-end
-
-function url.construct(hash) -- dodo: we need to escape !
- local fullurl, f = { }, 0
- local scheme, authority, path, query, fragment = hash.scheme, hash.authority, hash.path, hash.query, hash.fragment
- if scheme and scheme ~= "" then
- f = f + 1 ; fullurl[f] = scheme .. "://"
- end
- if authority and authority ~= "" then
- f = f + 1 ; fullurl[f] = authority
- end
- if path and path ~= "" then
- f = f + 1 ; fullurl[f] = "/" .. path
- end
- if query and query ~= "" then
- f = f + 1 ; fullurl[f] = "?".. query
- end
- if fragment and fragment ~= "" then
- f = f + 1 ; fullurl[f] = "#".. fragment
- end
- return lpegmatch(escaper,concat(fullurl))
-end
-
+ filename=authority.."/"..path
+ end
+ s={
+ scheme=rawscheme,
+ authority=authority,
+ path=path,
+ query=lpegmatch(unescaper,rawquery),
+ queries=lpegmatch(splitquery,rawquery),
+ fragment=s[5],
+ original=str,
+ noscheme=false,
+ filename=filename,
+ }
+ end
+ return s
+end
+url.split=split
+url.hasscheme=hasscheme
+url.hashed=hashed
+function url.addscheme(str,scheme)
+ if hasscheme(str) then
+ return str
+ elseif not scheme then
+ return "file:///"..str
+ else
+ return scheme..":///"..str
+ end
+end
+function url.construct(hash)
+ local fullurl,f={},0
+ local scheme,authority,path,query,fragment=hash.scheme,hash.authority,hash.path,hash.query,hash.fragment
+ if scheme and scheme~="" then
+ f=f+1;fullurl[f]=scheme.."://"
+ end
+ if authority and authority~="" then
+ f=f+1;fullurl[f]=authority
+ end
+ if path and path~="" then
+ f=f+1;fullurl[f]="/"..path
+ end
+ if query and query~="" then
+ f=f+1;fullurl[f]="?"..query
+ end
+ if fragment and fragment~="" then
+ f=f+1;fullurl[f]="#"..fragment
+ end
+ return lpegmatch(escaper,concat(fullurl))
+end
+local pattern=Cs(noslash*R("az","AZ")*(S(":|")/":")*noslash*P(1)^0)
function url.filename(filename)
- local t = hashed(filename)
- return (t.scheme == "file" and (gsub(t.path,"^/([a-zA-Z])([:|])/)","%1:"))) or filename
+ local spec=hashed(filename)
+ local path=spec.path
+ return (spec.scheme=="file" and path and lpegmatch(pattern,path)) or filename
end
-
+local function escapestring(str)
+ return lpegmatch(escaper,str)
+end
+url.escape=escapestring
function url.query(str)
- if type(str) == "string" then
- local t = { }
- for k, v in gmatch(str,"([^&=]*)=([^&=]*)") do
- t[k] = v
- end
- return t
- else
- return str
- end
+ if type(str)=="string" then
+ return lpegmatch(splitquery,str) or ""
+ else
+ return str
+ end
+end
+function url.toquery(data)
+ local td=type(data)
+ if td=="string" then
+ return #str and escape(data) or nil
+ elseif td=="table" then
+ if next(data) then
+ local t={}
+ for k,v in next,data do
+ t[#t+1]=format("%s=%s",k,escapestring(v))
+ end
+ return concat(t,"&")
+ end
+ else
+ end
+end
+local pattern=Cs(noslash^0*(1-noslash*P(-1))^0)
+function url.barepath(path)
+ if not path or path=="" then
+ return ""
+ else
+ return lpegmatch(pattern,path)
+ end
end
-
-
-
-
-
-
-
-
end -- of closure
do -- create closure to overcome 200 locals limit
-if not modules then modules = { } end modules ['l-dir'] = {
- version = 1.001,
- comment = "companion to luat-lib.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
--- dir.expandname will be merged with cleanpath and collapsepath
-
-local type = type
-local find, gmatch, match, gsub = string.find, string.gmatch, string.match, string.gsub
-local concat, insert, remove = table.concat, table.insert, table.remove
-local lpegmatch = lpeg.match
-
-local P, S, R, C, Cc, Cs, Ct, Cv, V = lpeg.P, lpeg.S, lpeg.R, lpeg.C, lpeg.Cc, lpeg.Cs, lpeg.Ct, lpeg.Cv, lpeg.V
-
-dir = dir or { }
-local dir = dir
-local lfs = lfs
+package.loaded["l-dir"] = package.loaded["l-dir"] or true
-local attributes = lfs.attributes
-local walkdir = lfs.dir
-local isdir = lfs.isdir
-local isfile = lfs.isfile
-local currentdir = lfs.currentdir
-
--- handy
+-- original size: 13139, stripped down to: 8196
+if not modules then modules={} end modules ['l-dir']={
+ version=1.001,
+ comment="companion to luat-lib.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+local type,select=type,select
+local find,gmatch,match,gsub=string.find,string.gmatch,string.match,string.gsub
+local concat,insert,remove=table.concat,table.insert,table.remove
+local lpegmatch=lpeg.match
+local P,S,R,C,Cc,Cs,Ct,Cv,V=lpeg.P,lpeg.S,lpeg.R,lpeg.C,lpeg.Cc,lpeg.Cs,lpeg.Ct,lpeg.Cv,lpeg.V
+dir=dir or {}
+local dir=dir
+local lfs=lfs
+local attributes=lfs.attributes
+local walkdir=lfs.dir
+local isdir=lfs.isdir
+local isfile=lfs.isfile
+local currentdir=lfs.currentdir
+local chdir=lfs.chdir
+if not isdir then
+ function isdir(name)
+ local a=attributes(name)
+ return a and a.mode=="directory"
+ end
+ lfs.isdir=isdir
+end
+if not isfile then
+ function isfile(name)
+ local a=attributes(name)
+ return a and a.mode=="file"
+ end
+ lfs.isfile=isfile
+end
function dir.current()
- return (gsub(currentdir(),"\\","/"))
+ return (gsub(currentdir(),"\\","/"))
end
-
--- optimizing for no find (*) does not save time
-
-
-local lfsisdir = isdir
-
+local lfsisdir=isdir
local function isdir(path)
- path = gsub(path,"[/\\]+$","")
- return lfsisdir(path)
+ path=gsub(path,"[/\\]+$","")
+ return lfsisdir(path)
end
-
-lfs.isdir = isdir
-
+lfs.isdir=isdir
local function globpattern(path,patt,recurse,action)
- if path == "/" then
- path = path .. "."
- elseif not find(path,"/$") then
- path = path .. '/'
- end
- if isdir(path) then -- lfs.isdir does not like trailing /
- for name in walkdir(path) do -- lfs.dir accepts trailing /
- local full = path .. name
- local mode = attributes(full,'mode')
- if mode == 'file' then
- if find(full,patt) then
- action(full)
- end
- elseif recurse and (mode == "directory") and (name ~= '.') and (name ~= "..") then
- globpattern(full,patt,recurse,action)
- end
- end
- end
-end
-
-dir.globpattern = globpattern
-
+ if path=="/" then
+ path=path.."."
+ elseif not find(path,"/$") then
+ path=path..'/'
+ end
+ if isdir(path) then
+ for name in walkdir(path) do
+ local full=path..name
+ local mode=attributes(full,'mode')
+ if mode=='file' then
+ if find(full,patt) then
+ action(full)
+ end
+ elseif recurse and (mode=="directory") and (name~='.') and (name~="..") then
+ globpattern(full,patt,recurse,action)
+ end
+ end
+ end
+end
+dir.globpattern=globpattern
local function collectpattern(path,patt,recurse,result)
- local ok, scanner
- result = result or { }
- if path == "/" then
- ok, scanner, first = xpcall(function() return walkdir(path..".") end, function() end) -- kepler safe
- else
- ok, scanner, first = xpcall(function() return walkdir(path) end, function() end) -- kepler safe
- end
- if ok and type(scanner) == "function" then
- if not find(path,"/$") then path = path .. '/' end
- for name in scanner, first do
- local full = path .. name
- local attr = attributes(full)
- local mode = attr.mode
- if mode == 'file' then
- if find(full,patt) then
- result[name] = attr
- end
- elseif recurse and (mode == "directory") and (name ~= '.') and (name ~= "..") then
- attr.list = collectpattern(full,patt,recurse)
- result[name] = attr
- end
- end
- end
- return result
-end
-
-dir.collectpattern = collectpattern
-
-local pattern = Ct {
- [1] = (C(P(".") + P("/")^1) + C(R("az","AZ") * P(":") * P("/")^0) + Cc("./")) * V(2) * V(3),
- [2] = C(((1-S("*?/"))^0 * P("/"))^0),
- [3] = C(P(1)^0)
+ local ok,scanner
+ result=result or {}
+ if path=="/" then
+ ok,scanner,first=xpcall(function() return walkdir(path..".") end,function() end)
+ else
+ ok,scanner,first=xpcall(function() return walkdir(path) end,function() end)
+ end
+ if ok and type(scanner)=="function" then
+ if not find(path,"/$") then path=path..'/' end
+ for name in scanner,first do
+ local full=path..name
+ local attr=attributes(full)
+ local mode=attr.mode
+ if mode=='file' then
+ if find(full,patt) then
+ result[name]=attr
+ end
+ elseif recurse and (mode=="directory") and (name~='.') and (name~="..") then
+ attr.list=collectpattern(full,patt,recurse)
+ result[name]=attr
+ end
+ end
+ end
+ return result
+end
+dir.collectpattern=collectpattern
+local pattern=Ct {
+ [1]=(C(P(".")+P("/")^1)+C(R("az","AZ")*P(":")*P("/")^0)+Cc("./"))*V(2)*V(3),
+ [2]=C(((1-S("*?/"))^0*P("/"))^0),
+ [3]=C(P(1)^0)
}
-
-local filter = Cs ( (
- P("**") / ".*" +
- P("*") / "[^/]*" +
- P("?") / "[^/]" +
- P(".") / "%%." +
- P("+") / "%%+" +
- P("-") / "%%-" +
- P(1)
+local filter=Cs ((
+ P("**")/".*"+P("*")/"[^/]*"+P("?")/"[^/]"+P(".")/"%%."+P("+")/"%%+"+P("-")/"%%-"+P(1)
)^0 )
-
local function glob(str,t)
- if type(t) == "function" then
- if type(str) == "table" then
- for s=1,#str do
- glob(str[s],t)
- end
- elseif isfile(str) then
- t(str)
- else
- local split = lpegmatch(pattern,str) -- we could use the file splitter
- if split then
- local root, path, base = split[1], split[2], split[3]
- local recurse = find(base,"%*%*")
- local start = root .. path
- local result = lpegmatch(filter,start .. base)
- globpattern(start,result,recurse,t)
- end
- end
+ if type(t)=="function" then
+ if type(str)=="table" then
+ for s=1,#str do
+ glob(str[s],t)
+ end
+ elseif isfile(str) then
+ t(str)
else
- if type(str) == "table" then
- local t = t or { }
- for s=1,#str do
- glob(str[s],t)
- end
- return t
- elseif isfile(str) then
- if t then
- t[#t+1] = str
- return t
- else
- return { str }
- end
- else
- local split = lpegmatch(pattern,str) -- we could use the file splitter
- if split then
- local t = t or { }
- local action = action or function(name) t[#t+1] = name end
- local root, path, base = split[1], split[2], split[3]
- local recurse = find(base,"%*%*")
- local start = root .. path
- local result = lpegmatch(filter,start .. base)
- globpattern(start,result,recurse,action)
- return t
- else
- return { }
- end
+ local split=lpegmatch(pattern,str)
+ if split then
+ local root,path,base=split[1],split[2],split[3]
+ local recurse=find(base,"%*%*")
+ local start=root..path
+ local result=lpegmatch(filter,start..base)
+ globpattern(start,result,recurse,t)
+ end
+ end
+ else
+ if type(str)=="table" then
+ local t=t or {}
+ for s=1,#str do
+ glob(str[s],t)
+ end
+ return t
+ elseif isfile(str) then
+ if t then
+ t[#t+1]=str
+ return t
+ else
+ return { str }
+ end
+ else
+ local split=lpegmatch(pattern,str)
+ if split then
+ local t=t or {}
+ local action=action or function(name) t[#t+1]=name end
+ local root,path,base=split[1],split[2],split[3]
+ local recurse=find(base,"%*%*")
+ local start=root..path
+ local result=lpegmatch(filter,start..base)
+ globpattern(start,result,recurse,action)
+ return t
+ else
+ return {}
+ end
+ end
+ end
+end
+dir.glob=glob
+local function globfiles(path,recurse,func,files)
+ if type(func)=="string" then
+ local s=func
+ func=function(name) return find(name,s) end
+ end
+ files=files or {}
+ local noffiles=#files
+ for name in walkdir(path) do
+ if find(name,"^%.") then
+ else
+ local mode=attributes(name,'mode')
+ if mode=="directory" then
+ if recurse then
+ globfiles(path.."/"..name,recurse,func,files)
end
- end
-end
-
-dir.glob = glob
-
-
-local function globfiles(path,recurse,func,files) -- func == pattern or function
- if type(func) == "string" then
- local s = func
- func = function(name) return find(name,s) end
- end
- files = files or { }
- local noffiles = #files
- for name in walkdir(path) do
- if find(name,"^%.") then
- --- skip
- else
- local mode = attributes(name,'mode')
- if mode == "directory" then
- if recurse then
- globfiles(path .. "/" .. name,recurse,func,files)
- end
- elseif mode == "file" then
- if not func or func(name) then
- noffiles = noffiles + 1
- files[noffiles] = path .. "/" .. name
- end
- end
+ elseif mode=="file" then
+ if not func or func(name) then
+ noffiles=noffiles+1
+ files[noffiles]=path.."/"..name
end
+ end
end
- return files
+ end
+ return files
end
-
-dir.globfiles = globfiles
-
--- t = dir.glob("c:/data/develop/context/sources/**/????-*.tex")
--- t = dir.glob("c:/data/develop/tex/texmf/**/*.tex")
--- t = dir.glob("c:/data/develop/context/texmf/**/*.tex")
--- t = dir.glob("f:/minimal/tex/**/*")
--- print(dir.ls("f:/minimal/tex/**/*"))
--- print(dir.ls("*.tex"))
-
+dir.globfiles=globfiles
function dir.ls(pattern)
- return concat(glob(pattern),"\n")
+ return concat(glob(pattern),"\n")
end
-
-
-local make_indeed = true -- false
-
-local onwindows = os.type == "windows" or find(os.getenv("PATH"),";")
-
+local make_indeed=true
+local onwindows=os.type=="windows" or find(os.getenv("PATH"),";")
if onwindows then
-
- function dir.mkdirs(...)
- local str, pth, t = "", "", { ... }
- for i=1,#t do
- local s = t[i]
- if s ~= "" then
- if str ~= "" then
- str = str .. "/" .. s
- else
- str = s
- end
- end
+ function dir.mkdirs(...)
+ local str,pth="",""
+ for i=1,select("#",...) do
+ local s=select(i,...)
+ if s=="" then
+ elseif str=="" then
+ str=s
+ else
+ str=str.."/"..s
+ end
+ end
+ local first,middle,last
+ local drive=false
+ first,middle,last=match(str,"^(//)(//*)(.*)$")
+ if first then
+ else
+ first,last=match(str,"^(//)/*(.-)$")
+ if first then
+ middle,last=match(str,"([^/]+)/+(.-)$")
+ if middle then
+ pth="//"..middle
+ else
+ pth="//"..last
+ last=""
end
- local first, middle, last
- local drive = false
- first, middle, last = match(str,"^(//)(//*)(.*)$")
+ else
+ first,middle,last=match(str,"^([a-zA-Z]:)(/*)(.-)$")
if first then
- -- empty network path == local path
+ pth,drive=first..middle,true
else
- first, last = match(str,"^(//)/*(.-)$")
- if first then
- middle, last = match(str,"([^/]+)/+(.-)$")
- if middle then
- pth = "//" .. middle
- else
- pth = "//" .. last
- last = ""
- end
- else
- first, middle, last = match(str,"^([a-zA-Z]:)(/*)(.-)$")
- if first then
- pth, drive = first .. middle, true
- else
- middle, last = match(str,"^(/*)(.-)$")
- if not middle then
- last = str
- end
- end
- end
- end
- for s in gmatch(last,"[^/]+") do
- if pth == "" then
- pth = s
- elseif drive then
- pth, drive = pth .. s, false
- else
- pth = pth .. "/" .. s
- end
- if make_indeed and not isdir(pth) then
- lfs.mkdir(pth)
- end
- end
- return pth, (isdir(pth) == true)
- end
-
-
+ middle,last=match(str,"^(/*)(.-)$")
+ if not middle then
+ last=str
+ end
+ end
+ end
+ end
+ for s in gmatch(last,"[^/]+") do
+ if pth=="" then
+ pth=s
+ elseif drive then
+ pth,drive=pth..s,false
+ else
+ pth=pth.."/"..s
+ end
+ if make_indeed and not isdir(pth) then
+ lfs.mkdir(pth)
+ end
+ end
+ return pth,(isdir(pth)==true)
+ end
else
-
- function dir.mkdirs(...)
- local str, pth, t = "", "", { ... }
- for i=1,#t do
- local s = t[i]
- if s and s ~= "" then -- we catch nil and false
- if str ~= "" then
- str = str .. "/" .. s
- else
- str = s
- end
- end
- end
- str = gsub(str,"/+","/")
- if find(str,"^/") then
- pth = "/"
- for s in gmatch(str,"[^/]+") do
- local first = (pth == "/")
- if first then
- pth = pth .. s
- else
- pth = pth .. "/" .. s
- end
- if make_indeed and not first and not isdir(pth) then
- lfs.mkdir(pth)
- end
- end
+ function dir.mkdirs(...)
+ local str,pth="",""
+ for i=1,select("#",...) do
+ local s=select(i,...)
+ if s and s~="" then
+ if str~="" then
+ str=str.."/"..s
else
- pth = "."
- for s in gmatch(str,"[^/]+") do
- pth = pth .. "/" .. s
- if make_indeed and not isdir(pth) then
- lfs.mkdir(pth)
- end
- end
+ str=s
end
- return pth, (isdir(pth) == true)
+ end
end
-
-
-end
-
-dir.makedirs = dir.mkdirs
-
--- we can only define it here as it uses dir.current
-
-if onwindows then
-
- function dir.expandname(str) -- will be merged with cleanpath and collapsepath
- local first, nothing, last = match(str,"^(//)(//*)(.*)$")
+ str=gsub(str,"/+","/")
+ if find(str,"^/") then
+ pth="/"
+ for s in gmatch(str,"[^/]+") do
+ local first=(pth=="/")
if first then
- first = dir.current() .. "/"
- end
- if not first then
- first, last = match(str,"^(//)/*(.*)$")
- end
- if not first then
- first, last = match(str,"^([a-zA-Z]:)(.*)$")
- if first and not find(last,"^/") then
- local d = currentdir()
- if lfs.chdir(first) then
- first = dir.current()
- end
- lfs.chdir(d)
- end
+ pth=pth..s
+ else
+ pth=pth.."/"..s
end
- if not first then
- first, last = dir.current(), str
+ if make_indeed and not first and not isdir(pth) then
+ lfs.mkdir(pth)
end
- last = gsub(last,"//","/")
- last = gsub(last,"/%./","/")
- last = gsub(last,"^/*","")
- first = gsub(first,"/*$","")
- if last == "" or last == "." then
- return first
- else
- return first .. "/" .. last
+ end
+ else
+ pth="."
+ for s in gmatch(str,"[^/]+") do
+ pth=pth.."/"..s
+ if make_indeed and not isdir(pth) then
+ lfs.mkdir(pth)
end
+ end
end
-
+ return pth,(isdir(pth)==true)
+ end
+end
+dir.makedirs=dir.mkdirs
+if onwindows then
+ function dir.expandname(str)
+ local first,nothing,last=match(str,"^(//)(//*)(.*)$")
+ if first then
+ first=dir.current().."/"
+ end
+ if not first then
+ first,last=match(str,"^(//)/*(.*)$")
+ end
+ if not first then
+ first,last=match(str,"^([a-zA-Z]:)(.*)$")
+ if first and not find(last,"^/") then
+ local d=currentdir()
+ if chdir(first) then
+ first=dir.current()
+ end
+ chdir(d)
+ end
+ end
+ if not first then
+ first,last=dir.current(),str
+ end
+ last=gsub(last,"//","/")
+ last=gsub(last,"/%./","/")
+ last=gsub(last,"^/*","")
+ first=gsub(first,"/*$","")
+ if last=="" or last=="." then
+ return first
+ else
+ return first.."/"..last
+ end
+ end
else
-
- function dir.expandname(str) -- will be merged with cleanpath and collapsepath
- if not find(str,"^/") then
- str = currentdir() .. "/" .. str
- end
- str = gsub(str,"//","/")
- str = gsub(str,"/%./","/")
- str = gsub(str,"(.)/%.$","%1")
- return str
+ function dir.expandname(str)
+ if not find(str,"^/") then
+ str=currentdir().."/"..str
end
-
+ str=gsub(str,"//","/")
+ str=gsub(str,"/%./","/")
+ str=gsub(str,"(.)/%.$","%1")
+ return str
+ end
end
-
-file.expandname = dir.expandname -- for convenience
-
-local stack = { }
-
+file.expandname=dir.expandname
+local stack={}
function dir.push(newdir)
- insert(stack,lfs.currentdir())
+ insert(stack,currentdir())
+ if newdir and newdir~="" then
+ chdir(newdir)
+ end
end
-
function dir.pop()
- local d = remove(stack)
- if d then
- lfs.chdir(d)
- end
- return d
+ local d=remove(stack)
+ if d then
+ chdir(d)
+ end
+ return d
end
@@ -3720,55 +3694,71 @@ end -- of closure
do -- create closure to overcome 200 locals limit
-if not modules then modules = { } end modules ['l-boolean'] = {
- version = 1.001,
- comment = "companion to luat-lib.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
+package.loaded["l-boolean"] = package.loaded["l-boolean"] or true
-local type, tonumber = type, tonumber
-
-boolean = boolean or { }
-local boolean = boolean
+-- original size: 1781, stripped down to: 1503
+if not modules then modules={} end modules ['l-boolean']={
+ version=1.001,
+ comment="companion to luat-lib.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+local type,tonumber=type,tonumber
+boolean=boolean or {}
+local boolean=boolean
function boolean.tonumber(b)
- if b then return 1 else return 0 end -- test and return or return
+ if b then return 1 else return 0 end
end
-
-function toboolean(str,tolerant)
- if tolerant then
- local tstr = type(str)
- if tstr == "string" then
- return str == "true" or str == "yes" or str == "on" or str == "1" or str == "t"
- elseif tstr == "number" then
- return tonumber(str) ~= 0
- elseif tstr == "nil" then
- return false
- else
- return str
- end
- elseif str == "true" then
- return true
- elseif str == "false" then
- return false
- else
- return str
- end
+function toboolean(str,tolerant)
+ if str==nil then
+ return false
+ elseif str==false then
+ return false
+ elseif str==true then
+ return true
+ elseif str=="true" then
+ return true
+ elseif str=="false" then
+ return false
+ elseif not tolerant then
+ return false
+ elseif str==0 then
+ return false
+ elseif (tonumber(str) or 0)>0 then
+ return true
+ else
+ return str=="yes" or str=="on" or str=="t"
+ end
+end
+string.toboolean=toboolean
+function string.booleanstring(str)
+ if str=="0" then
+ return false
+ elseif str=="1" then
+ return true
+ elseif str=="" then
+ return false
+ elseif str=="false" then
+ return false
+ elseif str=="true" then
+ return true
+ elseif (tonumber(str) or 0)>0 then
+ return true
+ else
+ return str=="yes" or str=="on" or str=="t"
+ end
end
-
-string.toboolean = toboolean
-
function string.is_boolean(str,default)
- if type(str) == "string" then
- if str == "true" or str == "yes" or str == "on" or str == "t" then
- return true
- elseif str == "false" or str == "no" or str == "off" or str == "f" then
- return false
- end
+ if type(str)=="string" then
+ if str=="true" or str=="yes" or str=="on" or str=="t" then
+ return true
+ elseif str=="false" or str=="no" or str=="off" or str=="f" then
+ return false
end
- return default
+ end
+ return default
end
@@ -3776,360 +3766,536 @@ end -- of closure
do -- create closure to overcome 200 locals limit
-if not modules then modules = { } end modules ['l-unicode'] = {
- version = 1.001,
- comment = "companion to luat-lib.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
-if not unicode then
-
- unicode = { utf8 = { } }
-
- local floor, char = math.floor, string.char
-
- function unicode.utf8.utfchar(n)
- if n < 0x80 then
- return char(n)
- elseif n < 0x800 then
- return char(
- 0xC0 + floor(n/0x40),
- 0x80 + (n % 0x40)
- )
- elseif n < 0x10000 then
- return char(
- 0xE0 + floor(n/0x1000),
- 0x80 + (floor(n/0x40) % 0x40),
- 0x80 + (n % 0x40)
- )
- elseif n < 0x40000 then
- return char(
- 0xF0 + floor(n/0x40000),
- 0x80 + floor(n/0x1000),
- 0x80 + (floor(n/0x40) % 0x40),
- 0x80 + (n % 0x40)
- )
- else
- -- return char(
- -- 0xF1 + floor(n/0x1000000),
- -- 0x80 + floor(n/0x40000),
- -- 0x80 + floor(n/0x1000),
- -- 0x80 + (floor(n/0x40) % 0x40),
- -- 0x80 + (n % 0x40)
- -- )
- return "?"
- end
- end
-
-end
-
-local unicode = unicode
-
-utf = utf or unicode.utf8
-
-local concat = table.concat
-local utfchar, utfbyte, utfgsub = utf.char, utf.byte, utf.gsub
-local char, byte, find, bytepairs, utfvalues, format = string.char, string.byte, string.find, string.bytepairs, string.utfvalues, string.format
-local type = type
-
-local utfsplitlines = string.utfsplitlines
+package.loaded["l-unicode"] = package.loaded["l-unicode"] or true
--- 0 EF BB BF UTF-8
--- 1 FF FE UTF-16-little-endian
--- 2 FE FF UTF-16-big-endian
--- 3 FF FE 00 00 UTF-32-little-endian
--- 4 00 00 FE FF UTF-32-big-endian
+-- original size: 26810, stripped down to: 11943
-unicode.utfname = {
- [0] = 'utf-8',
- [1] = 'utf-16-le',
- [2] = 'utf-16-be',
- [3] = 'utf-32-le',
- [4] = 'utf-32-be'
+if not modules then modules={} end modules ['l-unicode']={
+ version=1.001,
+ comment="companion to luat-lib.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
}
-
--- \000 fails in <= 5.0 but is valid in >=5.1 where %z is depricated
-
-function unicode.utftype(f)
- local str = f:read(4)
- if not str then
- f:seek('set')
- return 0
- -- elseif find(str,"^%z%z\254\255") then -- depricated
- -- elseif find(str,"^\000\000\254\255") then -- not permitted and bugged
- elseif find(str,"\000\000\254\255",1,true) then -- seems to work okay (TH)
- return 4
- -- elseif find(str,"^\255\254%z%z") then -- depricated
- -- elseif find(str,"^\255\254\000\000") then -- not permitted and bugged
- elseif find(str,"\255\254\000\000",1,true) then -- seems to work okay (TH)
- return 3
- elseif find(str,"^\254\255") then
- f:seek('set',2)
- return 2
- elseif find(str,"^\255\254") then
- f:seek('set',2)
- return 1
- elseif find(str,"^\239\187\191") then
- f:seek('set',3)
- return 0
+utf=utf or (unicode and unicode.utf8) or {}
+utf.characters=utf.characters or string.utfcharacters
+utf.values=utf.values or string.utfvalues
+local type=type
+local char,byte,format,sub=string.char,string.byte,string.format,string.sub
+local concat=table.concat
+local P,C,R,Cs,Ct,Cmt,Cc,Carg,Cp=lpeg.P,lpeg.C,lpeg.R,lpeg.Cs,lpeg.Ct,lpeg.Cmt,lpeg.Cc,lpeg.Carg,lpeg.Cp
+local lpegmatch,patterns=lpeg.match,lpeg.patterns
+local bytepairs=string.bytepairs
+local finder=lpeg.finder
+local replacer=lpeg.replacer
+local utfvalues=utf.values
+local utfgmatch=utf.gmatch
+local p_utftype=patterns.utftype
+local p_utfoffset=patterns.utfoffset
+local p_utf8char=patterns.utf8char
+local p_utf8byte=patterns.utf8byte
+local p_utfbom=patterns.utfbom
+local p_newline=patterns.newline
+local p_whitespace=patterns.whitespace
+if not unicode then
+ unicode={ utf=utf }
+end
+if not utf.char then
+ local floor,char=math.floor,string.char
+ function utf.char(n)
+ if n<0x80 then
+ return char(n)
+ elseif n<0x800 then
+ return char(
+ 0xC0+floor(n/0x40),
+ 0x80+(n%0x40)
+ )
+ elseif n<0x10000 then
+ return char(
+ 0xE0+floor(n/0x1000),
+ 0x80+(floor(n/0x40)%0x40),
+ 0x80+(n%0x40)
+ )
+ elseif n<0x200000 then
+ return char(
+ 0xF0+floor(n/0x40000),
+ 0x80+(floor(n/0x1000)%0x40),
+ 0x80+(floor(n/0x40)%0x40),
+ 0x80+(n%0x40)
+ )
else
- f:seek('set')
- return 0
+ return ""
+ end
+ end
+end
+if not utf.byte then
+ local utf8byte=patterns.utf8byte
+ function utf.byte(c)
+ return lpegmatch(utf8byte,c)
+ end
+end
+local utfchar,utfbyte=utf.char,utf.byte
+function utf.filetype(data)
+ return data and lpegmatch(p_utftype,data) or "unknown"
+end
+local toentities=Cs (
+ (
+ patterns.utf8one+(
+ patterns.utf8two+patterns.utf8three+patterns.utf8four
+ )/function(s) local b=utfbyte(s) if b<127 then return s else return format("&#%X;",b) end end
+ )^0
+)
+patterns.toentities=toentities
+function utf.toentities(str)
+ return lpegmatch(toentities,str)
+end
+local one=P(1)
+local two=C(1)*C(1)
+local four=C(R(utfchar(0xD8),utfchar(0xFF)))*C(1)*C(1)*C(1)
+local pattern=P("\254\255")*Cs((
+ four/function(a,b,c,d)
+ local ab=0xFF*byte(a)+byte(b)
+ local cd=0xFF*byte(c)+byte(d)
+ return utfchar((ab-0xD800)*0x400+(cd-0xDC00)+0x10000)
+ end+two/function(a,b)
+ return utfchar(byte(a)*256+byte(b))
+ end+one
+ )^1 )+P("\255\254")*Cs((
+ four/function(b,a,d,c)
+ local ab=0xFF*byte(a)+byte(b)
+ local cd=0xFF*byte(c)+byte(d)
+ return utfchar((ab-0xD800)*0x400+(cd-0xDC00)+0x10000)
+ end+two/function(b,a)
+ return utfchar(byte(a)*256+byte(b))
+ end+one
+ )^1 )
+function string.toutf(s)
+ return lpegmatch(pattern,s) or s
+end
+local validatedutf=Cs (
+ (
+ patterns.utf8one+patterns.utf8two+patterns.utf8three+patterns.utf8four+P(1)/"�"
+ )^0
+)
+patterns.validatedutf=validatedutf
+function utf.is_valid(str)
+ return type(str)=="string" and lpegmatch(validatedutf,str) or false
+end
+if not utf.len then
+ local n,f=0,1
+ local utfcharcounter=patterns.utfbom^-1*Cmt (
+ Cc(1)*patterns.utf8one^1+Cc(2)*patterns.utf8two^1+Cc(3)*patterns.utf8three^1+Cc(4)*patterns.utf8four^1,
+ function(_,t,d)
+ n=n+(t-f)/d
+ f=t
+ return true
+ end
+ )^0
+ function utf.len(str)
+ n,f=0,1
+ lpegmatch(utfcharcounter,str or "")
+ return n
+ end
+end
+utf.length=utf.len
+if not utf.sub then
+ local utflength=utf.length
+ local b,e,n,first,last=0,0,0,0,0
+ local function slide_zero(s,p)
+ n=n+1
+ if n>=last then
+ e=p-1
+ else
+ return p
+ end
+ end
+ local function slide_one(s,p)
+ n=n+1
+ if n==first then
+ b=p
end
+ if n>=last then
+ e=p-1
+ else
+ return p
+ end
+ end
+ local function slide_two(s,p)
+ n=n+1
+ if n==first then
+ b=p
+ else
+ return true
+ end
+ end
+ local pattern_zero=Cmt(p_utf8char,slide_zero)^0
+ local pattern_one=Cmt(p_utf8char,slide_one )^0
+ local pattern_two=Cmt(p_utf8char,slide_two )^0
+ function utf.sub(str,start,stop)
+ if not start then
+ return str
+ end
+ if start==0 then
+ start=1
+ end
+ if not stop then
+ if start<0 then
+ local l=utflength(str)
+ start=l+start
+ else
+ start=start-1
+ end
+ b,n,first=0,0,start
+ lpegmatch(pattern_two,str)
+ if n>=first then
+ return sub(str,b)
+ else
+ return ""
+ end
+ end
+ if start<0 or stop<0 then
+ local l=utf.length(str)
+ if start<0 then
+ start=l+start
+ if start<=0 then
+ start=1
+ else
+ start=start+1
+ end
+ end
+ if stop<0 then
+ stop=l+stop
+ if stop==0 then
+ stop=1
+ else
+ stop=stop+1
+ end
+ end
+ end
+ if start>stop then
+ return ""
+ elseif start>1 then
+ b,e,n,first,last=0,0,0,start-1,stop
+ lpegmatch(pattern_one,str)
+ if n>=first and e==0 then
+ e=#str
+ end
+ return sub(str,b,e)
+ else
+ b,e,n,last=1,0,0,stop
+ lpegmatch(pattern_zero,str)
+ if e==0 then
+ e=#str
+ end
+ return sub(str,b,e)
+ end
+ end
+end
+function utf.remapper(mapping)
+ local pattern=Cs((p_utf8char/mapping)^0)
+ return function(str)
+ if not str or str=="" then
+ return ""
+ else
+ return lpegmatch(pattern,str)
+ end
+ end,pattern
+end
+function utf.replacer(t)
+ local r=replacer(t,false,false,true)
+ return function(str)
+ return lpegmatch(r,str)
+ end
+end
+function utf.subtituter(t)
+ local f=finder (t)
+ local r=replacer(t,false,false,true)
+ return function(str)
+ local i=lpegmatch(f,str)
+ if not i then
+ return str
+ elseif i>#str then
+ return str
+ else
+ return lpegmatch(r,str)
+ end
+ end
+end
+local utflinesplitter=p_utfbom^-1*lpeg.tsplitat(p_newline)
+local utfcharsplitter_ows=p_utfbom^-1*Ct(C(p_utf8char)^0)
+local utfcharsplitter_iws=p_utfbom^-1*Ct((p_whitespace^1+C(p_utf8char))^0)
+local utfcharsplitter_raw=Ct(C(p_utf8char)^0)
+patterns.utflinesplitter=utflinesplitter
+function utf.splitlines(str)
+ return lpegmatch(utflinesplitter,str or "")
+end
+function utf.split(str,ignorewhitespace)
+ if ignorewhitespace then
+ return lpegmatch(utfcharsplitter_iws,str or "")
+ else
+ return lpegmatch(utfcharsplitter_ows,str or "")
+ end
+end
+function utf.totable(str)
+ return lpegmatch(utfcharsplitter_raw,str)
+end
+function utf.magic(f)
+ local str=f:read(4) or ""
+ local off=lpegmatch(p_utfoffset,str)
+ if off<4 then
+ f:seek('set',off)
+ end
+ return lpegmatch(p_utftype,str)
end
-
-
-
local function utf16_to_utf8_be(t)
- if type(t) == "string" then
- t = utfsplitlines(str)
- end
- local result = { } -- we reuse result
- for i=1,#t do
- local r, more = 0, 0
- for left, right in bytepairs(t[i]) do
- if right then
- local now = 256*left + right
- if more > 0 then
- now = (more-0xD800)*0x400 + (now-0xDC00) + 0x10000 -- the 0x10000 smells wrong
- more = 0
- r = r + 1
- result[r] = utfchar(now)
- elseif now >= 0xD800 and now <= 0xDBFF then
- more = now
- else
- r = r + 1
- result[r] = utfchar(now)
- end
- end
+ if type(t)=="string" then
+ t=lpegmatch(utflinesplitter,t)
+ end
+ local result={}
+ for i=1,#t do
+ local r,more=0,0
+ for left,right in bytepairs(t[i]) do
+ if right then
+ local now=256*left+right
+ if more>0 then
+ now=(more-0xD800)*0x400+(now-0xDC00)+0x10000
+ more=0
+ r=r+1
+ result[r]=utfchar(now)
+ elseif now>=0xD800 and now<=0xDBFF then
+ more=now
+ else
+ r=r+1
+ result[r]=utfchar(now)
end
- t[i] = concat(result,"",1,r) -- we reused tmp, hence t
+ end
end
- return t
+ t[i]=concat(result,"",1,r)
+ end
+ return t
end
-
local function utf16_to_utf8_le(t)
- if type(t) == "string" then
- t = utfsplitlines(str)
- end
- local result = { } -- we reuse result
- for i=1,#t do
- local r, more = 0, 0
- for left, right in bytepairs(t[i]) do
- if right then
- local now = 256*right + left
- if more > 0 then
- now = (more-0xD800)*0x400 + (now-0xDC00) + 0x10000 -- the 0x10000 smells wrong
- more = 0
- r = r + 1
- result[r] = utfchar(now)
- elseif now >= 0xD800 and now <= 0xDBFF then
- more = now
- else
- r = r + 1
- result[r] = utfchar(now)
- end
- end
+ if type(t)=="string" then
+ t=lpegmatch(utflinesplitter,t)
+ end
+ local result={}
+ for i=1,#t do
+ local r,more=0,0
+ for left,right in bytepairs(t[i]) do
+ if right then
+ local now=256*right+left
+ if more>0 then
+ now=(more-0xD800)*0x400+(now-0xDC00)+0x10000
+ more=0
+ r=r+1
+ result[r]=utfchar(now)
+ elseif now>=0xD800 and now<=0xDBFF then
+ more=now
+ else
+ r=r+1
+ result[r]=utfchar(now)
end
- t[i] = concat(result,"",1,r) -- we reused tmp, hence t
+ end
end
- return t
+ t[i]=concat(result,"",1,r)
+ end
+ return t
end
-
local function utf32_to_utf8_be(t)
- if type(t) == "string" then
- t = utfsplitlines(t)
- end
- local result = { } -- we reuse result
- for i=1,#t do
- local r, more = 0, -1
- for a,b in bytepairs(t[i]) do
- if a and b then
- if more < 0 then
- more = 256*256*256*a + 256*256*b
- else
- r = r + 1
- result[t] = utfchar(more + 256*a + b)
- more = -1
- end
- else
- break
- end
+ if type(t)=="string" then
+ t=lpegmatch(utflinesplitter,t)
+ end
+ local result={}
+ for i=1,#t do
+ local r,more=0,-1
+ for a,b in bytepairs(t[i]) do
+ if a and b then
+ if more<0 then
+ more=256*256*256*a+256*256*b
+ else
+ r=r+1
+ result[t]=utfchar(more+256*a+b)
+ more=-1
end
- t[i] = concat(result,"",1,r)
+ else
+ break
+ end
end
- return t
+ t[i]=concat(result,"",1,r)
+ end
+ return t
end
-
local function utf32_to_utf8_le(t)
- if type(t) == "string" then
- t = utfsplitlines(t)
- end
- local result = { } -- we reuse result
- for i=1,#t do
- local r, more = 0, -1
- for a,b in bytepairs(t[i]) do
- if a and b then
- if more < 0 then
- more = 256*b + a
- else
- r = r + 1
- result[t] = utfchar(more + 256*256*256*b + 256*256*a)
- more = -1
- end
- else
- break
- end
+ if type(t)=="string" then
+ t=lpegmatch(utflinesplitter,t)
+ end
+ local result={}
+ for i=1,#t do
+ local r,more=0,-1
+ for a,b in bytepairs(t[i]) do
+ if a and b then
+ if more<0 then
+ more=256*b+a
+ else
+ r=r+1
+ result[t]=utfchar(more+256*256*256*b+256*256*a)
+ more=-1
end
- t[i] = concat(result,"",1,r)
+ else
+ break
+ end
end
- return t
+ t[i]=concat(result,"",1,r)
+ end
+ return t
end
-
-unicode.utf32_to_utf8_be = utf32_to_utf8_be
-unicode.utf32_to_utf8_le = utf32_to_utf8_le
-unicode.utf16_to_utf8_be = utf16_to_utf8_be
-unicode.utf16_to_utf8_le = utf16_to_utf8_le
-
-function unicode.utf8_to_utf8(t)
- return type(t) == "string" and utfsplitlines(t) or t
+utf.utf32_to_utf8_be=utf32_to_utf8_be
+utf.utf32_to_utf8_le=utf32_to_utf8_le
+utf.utf16_to_utf8_be=utf16_to_utf8_be
+utf.utf16_to_utf8_le=utf16_to_utf8_le
+function utf.utf8_to_utf8(t)
+ return type(t)=="string" and lpegmatch(utflinesplitter,t) or t
end
-
-function unicode.utf16_to_utf8(t,endian)
- return endian and utf16_to_utf8_be(t) or utf16_to_utf8_le(t) or t
+function utf.utf16_to_utf8(t,endian)
+ return endian and utf16_to_utf8_be(t) or utf16_to_utf8_le(t) or t
end
-
-function unicode.utf32_to_utf8(t,endian)
- return endian and utf32_to_utf8_be(t) or utf32_to_utf8_le(t) or t
+function utf.utf32_to_utf8(t,endian)
+ return endian and utf32_to_utf8_be(t) or utf32_to_utf8_le(t) or t
end
-
local function little(c)
- local b = byte(c)
- if b < 0x10000 then
- return char(b%256,b/256)
- else
- b = b - 0x10000
- local b1, b2 = b/1024 + 0xD800, b%1024 + 0xDC00
- return char(b1%256,b1/256,b2%256,b2/256)
- end
+ local b=byte(c)
+ if b<0x10000 then
+ return char(b%256,b/256)
+ else
+ b=b-0x10000
+ local b1,b2=b/1024+0xD800,b%1024+0xDC00
+ return char(b1%256,b1/256,b2%256,b2/256)
+ end
end
-
local function big(c)
- local b = byte(c)
- if b < 0x10000 then
- return char(b/256,b%256)
- else
- b = b - 0x10000
- local b1, b2 = b/1024 + 0xD800, b%1024 + 0xDC00
- return char(b1/256,b1%256,b2/256,b2%256)
- end
+ local b=byte(c)
+ if b<0x10000 then
+ return char(b/256,b%256)
+ else
+ b=b-0x10000
+ local b1,b2=b/1024+0xD800,b%1024+0xDC00
+ return char(b1/256,b1%256,b2/256,b2%256)
+ end
+end
+local _,l_remap=utf.remapper(little)
+local _,b_remap=utf.remapper(big)
+function utf.utf8_to_utf16(str,littleendian)
+ if littleendian then
+ return char(255,254)..lpegmatch(l_remap,str)
+ else
+ return char(254,255)..lpegmatch(b_remap,str)
+ end
+end
+local pattern=Cs (
+ (p_utf8byte/function(unicode ) return format("0x%04X",unicode) end)*(p_utf8byte*Carg(1)/function(unicode,separator) return format("%s0x%04X",separator,unicode) end)^0
+)
+function utf.tocodes(str,separator)
+ return lpegmatch(pattern,str,1,separator or " ")
end
-
-function unicode.utf8_to_utf16(str,littleendian)
- if littleendian then
- return char(255,254) .. utfgsub(str,".",little)
+function utf.ustring(s)
+ return format("U+%05X",type(s)=="number" and s or utfbyte(s))
+end
+function utf.xstring(s)
+ return format("0x%05X",type(s)=="number" and s or utfbyte(s))
+end
+local p_nany=p_utf8char/""
+if utfgmatch then
+ function utf.count(str,what)
+ if type(what)=="string" then
+ local n=0
+ for _ in utfgmatch(str,what) do
+ n=n+1
+ end
+ return n
+ else
+ return #lpegmatch(Cs((P(what)/" "+p_nany)^0),str)
+ end
+ end
+else
+ local cache={}
+ function utf.count(str,what)
+ if type(what)=="string" then
+ local p=cache[what]
+ if not p then
+ p=Cs((P(what)/" "+p_nany)^0)
+ cache[p]=p
+ end
+ return #lpegmatch(p,str)
+ else
+ return #lpegmatch(Cs((P(what)/" "+p_nany)^0),str)
+ end
+ end
+end
+if not utf.characters then
+ function utf.characters(str)
+ return gmatch(str,".[\128-\191]*")
+ end
+ string.utfcharacters=utf.characters
+end
+if not utf.values then
+ local find=string.find
+ local dummy=function()
+ end
+ function utf.values(str)
+ local n=#str
+ if n==0 then
+ return dummy
+ elseif n==1 then
+ return function() return utfbyte(str) end
else
- return char(254,255) .. utfgsub(str,".",big)
+ local p=1
+ return function()
+ local b,e=find(str,".[\128-\191]*",p)
+ if b then
+ p=e+1
+ return utfbyte(sub(str,b,e))
+ end
+ end
end
+ end
+ string.utfvalues=utf.values
end
-function unicode.utfcodes(str)
- local t, n = { }, 0
- for u in utfvalues(str) do
- n = n + 1
- t[n] = format("0x%04X",u)
- end
- return concat(t,separator or " ")
-end
-function unicode.ustring(s)
- return format("U+%05X",type(s) == "number" and s or utfbyte(s))
-end
+end -- of closure
-function unicode.xstring(s)
- return format("0x%05X",type(s) == "number" and s or utfbyte(s))
-end
+do -- create closure to overcome 200 locals limit
+package.loaded["l-math"] = package.loaded["l-math"] or true
-local lpegmatch = lpeg.match
-local patterns = lpeg.patterns
-local utftype = patterns.utftype
+-- original size: 915, stripped down to: 836
-function unicode.filetype(data)
- return data and lpegmatch(utftype,data) or "unknown"
+if not modules then modules={} end modules ['l-math']={
+ version=1.001,
+ comment="companion to luat-lib.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+local floor,sin,cos,tan=math.floor,math.sin,math.cos,math.tan
+if not math.round then
+ function math.round(x) return floor(x+0.5) end
end
-
-local toentities = lpeg.Cs (
- (
- patterns.utf8one
- + (
- patterns.utf8two
- + patterns.utf8three
- + patterns.utf8four
- ) / function(s) local b = utfbyte(s) if b < 127 then return s else return format("&#%X;",b) end end
- )^0
-)
-
-patterns.toentities = toentities
-
-function utf.toentities(str)
- return lpegmatch(toentities,str)
+if not math.div then
+ function math.div(n,m) return floor(n/m) end
end
-
-
-
-
-local P, C, R, Cs = lpeg.P, lpeg.C, lpeg.R, lpeg.Cs
-
-local one = P(1)
-local two = C(1) * C(1)
-local four = C(R(utfchar(0xD8),utfchar(0xFF))) * C(1) * C(1) * C(1)
-
--- actually one of them is already utf ... sort of useless this one
-
-local pattern = P("\254\255") * Cs( (
- four / function(a,b,c,d)
- local ab = 0xFF * byte(a) + byte(b)
- local cd = 0xFF * byte(c) + byte(d)
- return utfchar((ab-0xD800)*0x400 + (cd-0xDC00) + 0x10000)
- end
- + two / function(a,b)
- return utfchar(byte(a)*256 + byte(b))
- end
- + one
- )^1 )
- + P("\255\254") * Cs( (
- four / function(b,a,d,c)
- local ab = 0xFF * byte(a) + byte(b)
- local cd = 0xFF * byte(c) + byte(d)
- return utfchar((ab-0xD800)*0x400 + (cd-0xDC00) + 0x10000)
- end
- + two / function(b,a)
- return utfchar(byte(a)*256 + byte(b))
- end
- + one
- )^1 )
-
-function string.toutf(s)
- return lpegmatch(pattern,s) or s -- todo: utf32
+if not math.mod then
+ function math.mod(n,m) return n%m end
end
-
-local validatedutf = Cs (
- (
- patterns.utf8one
- + patterns.utf8two
- + patterns.utf8three
- + patterns.utf8four
- + P(1) / "�"
- )^0
-)
-
-patterns.validatedutf = validatedutf
-
-function string.validutf(str)
- return lpegmatch(validatedutf,str)
+local pipi=2*math.pi/360
+if not math.sind then
+ function math.sind(d) return sin(d*pipi) end
+ function math.cosd(d) return cos(d*pipi) end
+ function math.tand(d) return tan(d*pipi) end
+end
+if not math.odd then
+ function math.odd (n) return n%2~=0 end
+ function math.even(n) return n%2==0 end
end
@@ -4137,213 +4303,852 @@ end -- of closure
do -- create closure to overcome 200 locals limit
-if not modules then modules = { } end modules ['l-math'] = {
- version = 1.001,
- comment = "companion to luat-lib.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
+package.loaded["util-str"] = package.loaded["util-str"] or true
-local floor, sin, cos, tan = math.floor, math.sin, math.cos, math.tan
-
-if not math.round then
- function math.round(x) return floor(x + 0.5) end
-end
+-- original size: 22834, stripped down to: 12570
-if not math.div then
- function math.div(n,m) return floor(n/m) end
+if not modules then modules={} end modules ['util-str']={
+ version=1.001,
+ comment="companion to luat-lib.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+utilities=utilities or {}
+utilities.strings=utilities.strings or {}
+local strings=utilities.strings
+local format,gsub,rep,sub=string.format,string.gsub,string.rep,string.sub
+local load,dump=load,string.dump
+local tonumber,type,tostring=tonumber,type,tostring
+local unpack,concat=table.unpack,table.concat
+local P,V,C,S,R,Ct,Cs,Cp,Carg,Cc=lpeg.P,lpeg.V,lpeg.C,lpeg.S,lpeg.R,lpeg.Ct,lpeg.Cs,lpeg.Cp,lpeg.Carg,lpeg.Cc
+local patterns,lpegmatch=lpeg.patterns,lpeg.match
+local utfchar,utfbyte=utf.char,utf.byte
+local loadstripped=_LUAVERSION<5.2 and load or function(str)
+ return load(dump(load(str),true))
+end
+if not number then number={} end
+local stripper=patterns.stripzeros
+local function points(n)
+ return (not n or n==0) and "0pt" or lpegmatch(stripper,format("%.5fpt",n/65536))
+end
+local function basepoints(n)
+ return (not n or n==0) and "0bp" or lpegmatch(stripper,format("%.5fbp",n*(7200/7227)/65536))
+end
+number.points=points
+number.basepoints=basepoints
+local rubish=patterns.spaceortab^0*patterns.newline
+local anyrubish=patterns.spaceortab+patterns.newline
+local anything=patterns.anything
+local stripped=(patterns.spaceortab^1/"")*patterns.newline
+local leading=rubish^0/""
+local trailing=(anyrubish^1*patterns.endofstring)/""
+local redundant=rubish^3/"\n"
+local pattern=Cs(leading*(trailing+redundant+stripped+anything)^0)
+function strings.collapsecrlf(str)
+ return lpegmatch(pattern,str)
+end
+local repeaters={}
+function strings.newrepeater(str,offset)
+ offset=offset or 0
+ local s=repeaters[str]
+ if not s then
+ s={}
+ repeaters[str]=s
+ end
+ local t=s[offset]
+ if t then
+ return t
+ end
+ t={}
+ setmetatable(t,{ __index=function(t,k)
+ if not k then
+ return ""
+ end
+ local n=k+offset
+ local s=n>0 and rep(str,n) or ""
+ t[k]=s
+ return s
+ end })
+ s[offset]=t
+ return t
+end
+local extra,tab,start=0,0,4,0
+local nspaces=strings.newrepeater(" ")
+string.nspaces=nspaces
+local pattern=Carg(1)/function(t)
+ extra,tab,start=0,t or 7,1
+ end*Cs((
+ Cp()*patterns.tab/function(position)
+ local current=(position-start+1)+extra
+ local spaces=tab-(current-1)%tab
+ if spaces>0 then
+ extra=extra+spaces-1
+ return nspaces[spaces]
+ else
+ return ""
+ end
+ end+patterns.newline*Cp()/function(position)
+ extra,start=0,position
+ end+patterns.anything
+ )^1)
+function strings.tabtospace(str,tab)
+ return lpegmatch(pattern,str,1,tab or 7)
+end
+function strings.striplong(str)
+ str=gsub(str,"^%s*","")
+ str=gsub(str,"[\n\r]+ *","\n")
+ return str
+end
+function strings.nice(str)
+ str=gsub(str,"[:%-+_]+"," ")
+ return str
+end
+local n=0
+local sequenced=table.sequenced
+function string.autodouble(s,sep)
+ if s==nil then
+ return '""'
+ end
+ local t=type(s)
+ if t=="number" then
+ return tostring(s)
+ end
+ if t=="table" then
+ return ('"'..sequenced(s,sep or ",")..'"')
+ end
+ return ('"'..tostring(s)..'"')
+end
+function string.autosingle(s,sep)
+ if s==nil then
+ return "''"
+ end
+ local t=type(s)
+ if t=="number" then
+ return tostring(s)
+ end
+ if t=="table" then
+ return ("'"..sequenced(s,sep or ",").."'")
+ end
+ return ("'"..tostring(s).."'")
+end
+local tracedchars={}
+string.tracedchars=tracedchars
+strings.tracers=tracedchars
+function string.tracedchar(b)
+ if type(b)=="number" then
+ return tracedchars[b] or (utfchar(b).." (U+"..format('%05X',b)..")")
+ else
+ local c=utfbyte(b)
+ return tracedchars[c] or (b.." (U+"..format('%05X',c)..")")
+ end
+end
+function number.signed(i)
+ if i>0 then
+ return "+",i
+ else
+ return "-",-i
+ end
+end
+local preamble=[[
+local type = type
+local tostring = tostring
+local tonumber = tonumber
+local format = string.format
+local concat = table.concat
+local signed = number.signed
+local points = number.points
+local basepoints = number.basepoints
+local utfchar = utf.char
+local utfbyte = utf.byte
+local lpegmatch = lpeg.match
+local nspaces = string.nspaces
+local tracedchar = string.tracedchar
+local autosingle = string.autosingle
+local autodouble = string.autodouble
+local sequenced = table.sequenced
+]]
+local template=[[
+%s
+%s
+return function(%s) return %s end
+]]
+local arguments={ "a1" }
+setmetatable(arguments,{ __index=function(t,k)
+ local v=t[k-1]..",a"..k
+ t[k]=v
+ return v
+ end
+})
+local prefix_any=C((S("+- .")+R("09"))^0)
+local prefix_tab=C((1-R("az","AZ","09","%%"))^0)
+local format_s=function(f)
+ n=n+1
+ if f and f~="" then
+ return format("format('%%%ss',a%s)",f,n)
+ else
+ return format("(a%s or '')",n)
+ end
+end
+local format_S=function(f)
+ n=n+1
+ if f and f~="" then
+ return format("format('%%%ss',tostring(a%s))",f,n)
+ else
+ return format("tostring(a%s)",n)
+ end
+end
+local format_q=function()
+ n=n+1
+ return format("(a%s and format('%%q',a%s) or '')",n,n)
+end
+local format_Q=function()
+ n=n+1
+ return format("format('%%q',tostring(a%s))",n)
+end
+local format_i=function(f)
+ n=n+1
+ if f and f~="" then
+ return format("format('%%%si',a%s)",f,n)
+ else
+ return format("a%s",n)
+ end
+end
+local format_d=format_i
+local format_I=function(f)
+ n=n+1
+ return format("format('%%s%%%si',signed(a%s))",f,n)
+end
+local format_f=function(f)
+ n=n+1
+ return format("format('%%%sf',a%s)",f,n)
+end
+local format_g=function(f)
+ n=n+1
+ return format("format('%%%sg',a%s)",f,n)
+end
+local format_G=function(f)
+ n=n+1
+ return format("format('%%%sG',a%s)",f,n)
+end
+local format_e=function(f)
+ n=n+1
+ return format("format('%%%se',a%s)",f,n)
+end
+local format_E=function(f)
+ n=n+1
+ return format("format('%%%sE',a%s)",f,n)
+end
+local format_x=function(f)
+ n=n+1
+ return format("format('%%%sx',a%s)",f,n)
+end
+local format_X=function(f)
+ n=n+1
+ return format("format('%%%sX',a%s)",f,n)
+end
+local format_o=function(f)
+ n=n+1
+ return format("format('%%%so',a%s)",f,n)
+end
+local format_c=function()
+ n=n+1
+ return format("utfchar(a%s)",n)
+end
+local format_C=function()
+ n=n+1
+ return format("tracedchar(a%s)",n)
+end
+local format_r=function(f)
+ n=n+1
+ return format("format('%%%s.0f',a%s)",f,n)
+end
+local format_h=function(f)
+ n=n+1
+ if f=="-" then
+ f=sub(f,2)
+ return format("format('%%%sx',type(a%s) == 'number' and a%s or utfbyte(a%s))",f=="" and "05" or f,n,n,n)
+ else
+ return format("format('0x%%%sx',type(a%s) == 'number' and a%s or utfbyte(a%s))",f=="" and "05" or f,n,n,n)
+ end
+end
+local format_H=function(f)
+ n=n+1
+ if f=="-" then
+ f=sub(f,2)
+ return format("format('%%%sX',type(a%s) == 'number' and a%s or utfbyte(a%s))",f=="" and "05" or f,n,n,n)
+ else
+ return format("format('0x%%%sX',type(a%s) == 'number' and a%s or utfbyte(a%s))",f=="" and "05" or f,n,n,n)
+ end
+end
+local format_u=function(f)
+ n=n+1
+ if f=="-" then
+ f=sub(f,2)
+ return format("format('%%%sx',type(a%s) == 'number' and a%s or utfbyte(a%s))",f=="" and "05" or f,n,n,n)
+ else
+ return format("format('u+%%%sx',type(a%s) == 'number' and a%s or utfbyte(a%s))",f=="" and "05" or f,n,n,n)
+ end
+end
+local format_U=function(f)
+ n=n+1
+ if f=="-" then
+ f=sub(f,2)
+ return format("format('%%%sX',type(a%s) == 'number' and a%s or utfbyte(a%s))",f=="" and "05" or f,n,n,n)
+ else
+ return format("format('U+%%%sX',type(a%s) == 'number' and a%s or utfbyte(a%s))",f=="" and "05" or f,n,n,n)
+ end
+end
+local format_p=function()
+ n=n+1
+ return format("points(a%s)",n)
+end
+local format_b=function()
+ n=n+1
+ return format("basepoints(a%s)",n)
+end
+local format_t=function(f)
+ n=n+1
+ if f and f~="" then
+ return format("concat(a%s,%q)",n,f)
+ else
+ return format("concat(a%s)",n)
+ end
+end
+local format_T=function(f)
+ n=n+1
+ if f and f~="" then
+ return format("sequenced(a%s,%q)",n,f)
+ else
+ return format("sequenced(a%s)",n)
+ end
+end
+local format_l=function()
+ n=n+1
+ return format("(a%s and 'true' or 'false')",n)
+end
+local format_L=function()
+ n=n+1
+ return format("(a%s and 'TRUE' or 'FALSE')",n)
+end
+local format_N=function()
+ n=n+1
+ return format("tostring(tonumber(a%s) or a%s)",n,n)
+end
+local format_a=function(f)
+ n=n+1
+ if f and f~="" then
+ return format("autosingle(a%s,%q)",n,f)
+ else
+ return format("autosingle(a%s)",n)
+ end
+end
+local format_A=function(f)
+ n=n+1
+ if f and f~="" then
+ return format("autodouble(a%s,%q)",n,f)
+ else
+ return format("autodouble(a%s)",n)
+ end
+end
+local format_w=function(f)
+ n=n+1
+ f=tonumber(f)
+ if f then
+ return format("nspaces[%s+a%s]",f,n)
+ else
+ return format("nspaces[a%s]",n)
+ end
+end
+local format_W=function(f)
+ return format("nspaces[%s]",tonumber(f) or 0)
+end
+local format_rest=function(s)
+ return format("%q",s)
+end
+local format_extension=function(extensions,f,name)
+ local extension=extensions[name] or "tostring(%s)"
+ local f=tonumber(f) or 1
+ if f==0 then
+ return extension
+ elseif f==1 then
+ n=n+1
+ local a="a"..n
+ return format(extension,a,a)
+ elseif f<0 then
+ local a="a"..(n+f+1)
+ return format(extension,a,a)
+ else
+ local t={}
+ for i=1,f do
+ n=n+1
+ t[#t+1]="a"..n
+ end
+ return format(extension,unpack(t))
+ end
+end
+local builder=Cs { "start",
+ start=(
+ (
+ P("%")/""*(
+ V("!")
++V("s")+V("q")+V("i")+V("d")+V("f")+V("g")+V("G")+V("e")+V("E")+V("x")+V("X")+V("o")
++V("c")+V("C")+V("S")
++V("Q")
++V("N")
++V("r")+V("h")+V("H")+V("u")+V("U")+V("p")+V("b")+V("t")+V("T")+V("l")+V("L")+V("I")+V("h")
++V("w")
++V("W")
++V("a")
++V("A")
++V("*")
+ )+V("*")
+ )*(P(-1)+Carg(1))
+ )^0,
+ ["s"]=(prefix_any*P("s"))/format_s,
+ ["q"]=(prefix_any*P("q"))/format_q,
+ ["i"]=(prefix_any*P("i"))/format_i,
+ ["d"]=(prefix_any*P("d"))/format_d,
+ ["f"]=(prefix_any*P("f"))/format_f,
+ ["g"]=(prefix_any*P("g"))/format_g,
+ ["G"]=(prefix_any*P("G"))/format_G,
+ ["e"]=(prefix_any*P("e"))/format_e,
+ ["E"]=(prefix_any*P("E"))/format_E,
+ ["x"]=(prefix_any*P("x"))/format_x,
+ ["X"]=(prefix_any*P("X"))/format_X,
+ ["o"]=(prefix_any*P("o"))/format_o,
+ ["S"]=(prefix_any*P("S"))/format_S,
+ ["Q"]=(prefix_any*P("Q"))/format_S,
+ ["N"]=(prefix_any*P("N"))/format_N,
+ ["c"]=(prefix_any*P("c"))/format_c,
+ ["C"]=(prefix_any*P("C"))/format_C,
+ ["r"]=(prefix_any*P("r"))/format_r,
+ ["h"]=(prefix_any*P("h"))/format_h,
+ ["H"]=(prefix_any*P("H"))/format_H,
+ ["u"]=(prefix_any*P("u"))/format_u,
+ ["U"]=(prefix_any*P("U"))/format_U,
+ ["p"]=(prefix_any*P("p"))/format_p,
+ ["b"]=(prefix_any*P("b"))/format_b,
+ ["t"]=(prefix_tab*P("t"))/format_t,
+ ["T"]=(prefix_tab*P("T"))/format_T,
+ ["l"]=(prefix_tab*P("l"))/format_l,
+ ["L"]=(prefix_tab*P("L"))/format_L,
+ ["I"]=(prefix_any*P("I"))/format_I,
+ ["w"]=(prefix_any*P("w"))/format_w,
+ ["W"]=(prefix_any*P("W"))/format_W,
+ ["a"]=(prefix_any*P("a"))/format_a,
+ ["A"]=(prefix_any*P("A"))/format_A,
+ ["*"]=Cs(((1-P("%"))^1+P("%%")/"%%%%")^1)/format_rest,
+ ["!"]=Carg(2)*prefix_any*P("!")*C((1-P("!"))^1)*P("!")/format_extension,
+}
+local direct=Cs (
+ P("%")/""*Cc([[local format = string.format return function(str) return format("%]])*(S("+- .")+R("09"))^0*S("sqidfgGeExXo")*Cc([[",str) end]])*P(-1)
+ )
+local function make(t,str)
+ local f
+ local p
+ local p=lpegmatch(direct,str)
+ if p then
+ f=loadstripped(p)()
+ else
+ n=0
+ p=lpegmatch(builder,str,1,"..",t._extensions_)
+ if n>0 then
+ p=format(template,preamble,t._preamble_,arguments[n],p)
+ f=loadstripped(p)()
+ else
+ f=function() return str end
+ end
+ end
+ t[str]=f
+ return f
end
-
-if not math.mod then
- function math.mod(n,m) return n % m end
+local function use(t,fmt,...)
+ return t[fmt](...)
end
-
-local pipi = 2*math.pi/360
-
-if not math.sind then
- function math.sind(d) return sin(d*pipi) end
- function math.cosd(d) return cos(d*pipi) end
- function math.tand(d) return tan(d*pipi) end
+strings.formatters={}
+function strings.formatters.new()
+ local t={ _extensions_={},_preamble_="",_type_="formatter" }
+ setmetatable(t,{ __index=make,__call=use })
+ return t
end
-
-if not math.odd then
- function math.odd (n) return n % 2 ~= 0 end
- function math.even(n) return n % 2 == 0 end
+local formatters=strings.formatters.new()
+string.formatters=formatters
+string.formatter=function(str,...) return formatters[str](...) end
+local function add(t,name,template,preamble)
+ if type(t)=="table" and t._type_=="formatter" then
+ t._extensions_[name]=template or "%s"
+ if preamble then
+ t._preamble_=preamble.."\n"..t._preamble_
+ end
+ end
end
+strings.formatters.add=add
+lpeg.patterns.xmlescape=Cs((P("<")/"&lt;"+P(">")/"&gt;"+P("&")/"&amp;"+P('"')/"&quot;"+P(1))^0)
+lpeg.patterns.texescape=Cs((C(S("#$%\\{}"))/"\\%1"+P(1))^0)
+add(formatters,"xml",[[lpegmatch(xmlescape,%s)]],[[local xmlescape = lpeg.patterns.xmlescape]])
+add(formatters,"tex",[[lpegmatch(texescape,%s)]],[[local texescape = lpeg.patterns.texescape]])
end -- of closure
do -- create closure to overcome 200 locals limit
-if not modules then modules = { } end modules ['util-tab'] = {
- version = 1.001,
- comment = "companion to luat-lib.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
+package.loaded["util-tab"] = package.loaded["util-tab"] or true
-utilities = utilities or {}
-utilities.tables = utilities.tables or { }
-local tables = utilities.tables
-
-local format, gmatch, rep = string.format, string.gmatch, string.rep
-local concat, insert, remove = table.concat, table.insert, table.remove
-local setmetatable, getmetatable, tonumber, tostring = setmetatable, getmetatable, tonumber, tostring
-local type, next, rawset, tonumber = type, next, rawset, tonumber
-
-function tables.definetable(target) -- defines undefined tables
- local composed, t, n = nil, { }, 0
- for name in gmatch(target,"([^%.]+)") do
- n = n + 1
- if composed then
- composed = composed .. "." .. name
- else
- composed = name
- end
- t[n] = format("%s = %s or { }",composed,composed)
- end
- return concat(t,"\n")
-end
+-- original size: 14491, stripped down to: 8512
+if not modules then modules={} end modules ['util-tab']={
+ version=1.001,
+ comment="companion to luat-lib.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+utilities=utilities or {}
+utilities.tables=utilities.tables or {}
+local tables=utilities.tables
+local format,gmatch,gsub=string.format,string.gmatch,string.gsub
+local concat,insert,remove=table.concat,table.insert,table.remove
+local setmetatable,getmetatable,tonumber,tostring=setmetatable,getmetatable,tonumber,tostring
+local type,next,rawset,tonumber,tostring,load,select=type,next,rawset,tonumber,tostring,load,select
+local lpegmatch,P,Cs,Cc=lpeg.match,lpeg.P,lpeg.Cs,lpeg.Cc
+local serialize,sortedkeys,sortedpairs=table.serialize,table.sortedkeys,table.sortedpairs
+local formatters=string.formatters
+local splitter=lpeg.tsplitat(".")
+function tables.definetable(target,nofirst,nolast)
+ local composed,shortcut,t=nil,nil,{}
+ local snippets=lpegmatch(splitter,target)
+ for i=1,#snippets-(nolast and 1 or 0) do
+ local name=snippets[i]
+ if composed then
+ composed=shortcut.."."..name
+ shortcut=shortcut.."_"..name
+ t[#t+1]=formatters["local %s = %s if not %s then %s = { } %s = %s end"](shortcut,composed,shortcut,shortcut,composed,shortcut)
+ else
+ composed=name
+ shortcut=name
+ if not nofirst then
+ t[#t+1]=formatters["%s = %s or { }"](composed,composed)
+ end
+ end
+ end
+ if nolast then
+ composed=shortcut.."."..snippets[#snippets]
+ end
+ return concat(t,"\n"),composed
+end
+function tables.definedtable(...)
+ local t=_G
+ for i=1,select("#",...) do
+ local li=select(i,...)
+ local tl=t[li]
+ if not tl then
+ tl={}
+ t[li]=tl
+ end
+ t=tl
+ end
+ return t
+end
function tables.accesstable(target,root)
- local t = root or _G
- for name in gmatch(target,"([^%.]+)") do
- t = t[name]
- if not t then
- return
- end
+ local t=root or _G
+ for name in gmatch(target,"([^%.]+)") do
+ t=t[name]
+ if not t then
+ return
end
- return t
+ end
+ return t
end
-
function tables.migratetable(target,v,root)
- local t = root or _G
- local names = string.split(target,".")
- for i=1,#names-1 do
- local name = names[i]
- t[name] = t[name] or { }
- t = t[name]
- if not t then
- return
- end
+ local t=root or _G
+ local names=string.split(target,".")
+ for i=1,#names-1 do
+ local name=names[i]
+ t[name]=t[name] or {}
+ t=t[name]
+ if not t then
+ return
end
- t[names[#names]] = v
+ end
+ t[names[#names]]=v
end
-
-function tables.removevalue(t,value) -- todo: n
- if value then
- for i=1,#t do
- if t[i] == value then
- remove(t,i)
- -- remove all, so no: return
- end
- end
+function tables.removevalue(t,value)
+ if value then
+ for i=1,#t do
+ if t[i]==value then
+ remove(t,i)
+ end
end
+ end
end
-
function tables.insertbeforevalue(t,value,extra)
- for i=1,#t do
- if t[i] == extra then
- remove(t,i)
- end
+ for i=1,#t do
+ if t[i]==extra then
+ remove(t,i)
end
- for i=1,#t do
- if t[i] == value then
- insert(t,i,extra)
- return
- end
+ end
+ for i=1,#t do
+ if t[i]==value then
+ insert(t,i,extra)
+ return
end
- insert(t,1,extra)
+ end
+ insert(t,1,extra)
end
-
function tables.insertaftervalue(t,value,extra)
- for i=1,#t do
- if t[i] == extra then
- remove(t,i)
- end
+ for i=1,#t do
+ if t[i]==extra then
+ remove(t,i)
+ end
+ end
+ for i=1,#t do
+ if t[i]==value then
+ insert(t,i+1,extra)
+ return
+ end
+ end
+ insert(t,#t+1,extra)
+end
+local escape=Cs(Cc('"')*((P('"')/'""'+P(1))^0)*Cc('"'))
+function table.tocsv(t,specification)
+ if t and #t>0 then
+ local result={}
+ local r={}
+ specification=specification or {}
+ local fields=specification.fields
+ if type(fields)~="string" then
+ fields=sortedkeys(t[1])
+ end
+ local separator=specification.separator or ","
+ if specification.preamble==true then
+ for f=1,#fields do
+ r[f]=lpegmatch(escape,tostring(fields[f]))
+ end
+ result[1]=concat(r,separator)
end
for i=1,#t do
- if t[i] == value then
- insert(t,i+1,extra)
- return
- end
- end
- insert(t,#t+1,extra)
-end
-
--- experimental
-
-local function toxml(t,d,result,step)
- for k, v in table.sortedpairs(t) do
- if type(v) == "table" then
- if type(k) == "number" then
- result[#result+1] = format("%s<entry n='%s'>",d,k)
- toxml(v,d..step,result,step)
- result[#result+1] = format("%s</entry>",d,k)
- else
- result[#result+1] = format("%s<%s>",d,k)
- toxml(v,d..step,result,step)
- result[#result+1] = format("%s</%s>",d,k)
- end
- elseif type(k) == "number" then
- result[#result+1] = format("%s<entry n='%s'>%s</entry>",d,k,v,k)
+ local ti=t[i]
+ for f=1,#fields do
+ local field=ti[fields[f]]
+ if type(field)=="string" then
+ r[f]=lpegmatch(escape,field)
else
- result[#result+1] = format("%s<%s>%s</%s>",d,k,tostring(v),k)
+ r[f]=tostring(field)
end
+ end
+ result[#result+1]=concat(r,separator)
end
+ return concat(result,"\n")
+ else
+ return ""
+ end
end
-
-function table.toxml(t,name,nobanner,indent,spaces)
- local noroot = name == false
- local result = (nobanner or noroot) and { } or { "<?xml version='1.0' standalone='yes' ?>" }
- local indent = rep(" ",indent or 0)
- local spaces = rep(" ",spaces or 1)
- if noroot then
- toxml( t, inndent, result, spaces)
+local nspaces=utilities.strings.newrepeater(" ")
+local function toxml(t,d,result,step)
+ for k,v in sortedpairs(t) do
+ local s=nspaces[d]
+ local tk=type(k)
+ local tv=type(v)
+ if tv=="table" then
+ if tk=="number" then
+ result[#result+1]=formatters["%s<entry n='%s'>"](s,k)
+ toxml(v,d+step,result,step)
+ result[#result+1]=formatters["%s</entry>"](s,k)
+ else
+ result[#result+1]=formatters["%s<%s>"](s,k)
+ toxml(v,d+step,result,step)
+ result[#result+1]=formatters["%s</%s>"](s,k)
+ end
+ elseif tv=="string" then
+ if tk=="number" then
+ result[#result+1]=formatters["%s<entry n='%s'>%!xml!</entry>"](s,k,v,k)
+ else
+ result[#result+1]=formatters["%s<%s>%!xml!</%s>"](s,k,v,k)
+ end
+ elseif tk=="number" then
+ result[#result+1]=formatters["%s<entry n='%s'>%S</entry>"](s,k,v,k)
else
- toxml( { [name or "root"] = t }, indent, result, spaces)
- end
- return concat(result,"\n")
+ result[#result+1]=formatters["%s<%s>%S</%s>"](s,k,v,k)
+ end
+ end
+end
+function table.toxml(t,specification)
+ specification=specification or {}
+ local name=specification.name
+ local noroot=name==false
+ local result=(specification.nobanner or noroot) and {} or { "<?xml version='1.0' standalone='yes' ?>" }
+ local indent=specification.indent or 0
+ local spaces=specification.spaces or 1
+ if noroot then
+ toxml(t,indent,result,spaces)
+ else
+ toxml({ [name or "data"]=t },indent,result,spaces)
+ end
+ return concat(result,"\n")
end
-
--- also experimental
-
--- encapsulate(table,utilities.tables)
--- encapsulate(table,utilities.tables,true)
--- encapsulate(table,true)
-
function tables.encapsulate(core,capsule,protect)
- if type(capsule) ~= "table" then
- protect = true
- capsule = { }
+ if type(capsule)~="table" then
+ protect=true
+ capsule={}
+ end
+ for key,value in next,core do
+ if capsule[key] then
+ print(formatters["\ninvalid %s %a in %a"]("inheritance",key,core))
+ os.exit()
+ else
+ capsule[key]=value
+ end
+ end
+ if protect then
+ for key,value in next,core do
+ core[key]=nil
end
- for key, value in next, core do
+ setmetatable(core,{
+ __index=capsule,
+ __newindex=function(t,key,value)
if capsule[key] then
- print(format("\ninvalid inheritance '%s' in '%s': %s",key,tostring(core)))
- os.exit()
+ print(formatters["\ninvalid %s %a' in %a"]("overload",key,core))
+ os.exit()
else
- capsule[key] = value
- end
- end
- if protect then
- for key, value in next, core do
- core[key] = nil
- end
- setmetatable(core, {
- __index = capsule,
- __newindex = function(t,key,value)
- if capsule[key] then
- print(format("\ninvalid overload '%s' in '%s'",key,tostring(core)))
- os.exit()
- else
- rawset(t,key,value)
- end
- end
- } )
- end
+ rawset(t,key,value)
+ end
+ end
+ } )
+ end
+end
+local function fastserialize(t,r,outer)
+ r[#r+1]="{"
+ local n=#t
+ if n>0 then
+ for i=1,n do
+ local v=t[i]
+ local tv=type(v)
+ if tv=="string" then
+ r[#r+1]=formatters["%q,"](v)
+ elseif tv=="number" then
+ r[#r+1]=formatters["%s,"](v)
+ elseif tv=="table" then
+ fastserialize(v,r)
+ elseif tv=="boolean" then
+ r[#r+1]=formatters["%S,"](v)
+ end
+ end
+ else
+ for k,v in next,t do
+ local tv=type(v)
+ if tv=="string" then
+ r[#r+1]=formatters["[%q]=%q,"](k,v)
+ elseif tv=="number" then
+ r[#r+1]=formatters["[%q]=%s,"](k,v)
+ elseif tv=="table" then
+ r[#r+1]=formatters["[%q]="](k)
+ fastserialize(v,r)
+ elseif tv=="boolean" then
+ r[#r+1]=formatters["[%q]=%S,"](k,v)
+ end
+ end
+ end
+ if outer then
+ r[#r+1]="}"
+ else
+ r[#r+1]="},"
+ end
+ return r
+end
+function table.fastserialize(t,prefix)
+ return concat(fastserialize(t,{ prefix or "return" },true))
+end
+function table.deserialize(str)
+ if not str or str=="" then
+ return
+ end
+ local code=load(str)
+ if not code then
+ return
+ end
+ code=code()
+ if not code then
+ return
+ end
+ return code
+end
+function table.load(filename)
+ if filename then
+ local t=io.loaddata(filename)
+ if t and t~="" then
+ t=load(t)
+ if type(t)=="function" then
+ t=t()
+ if type(t)=="table" then
+ return t
+ end
+ end
+ end
+ end
+end
+function table.save(filename,t,n,...)
+ io.savedata(filename,serialize(t,n==nil and true or n,...))
+end
+local function slowdrop(t)
+ local r={}
+ local l={}
+ for i=1,#t do
+ local ti=t[i]
+ local j=0
+ for k,v in next,ti do
+ j=j+1
+ l[j]=formatters["%s=%q"](k,v)
+ end
+ r[i]=formatters[" {%t},\n"](l)
+ end
+ return formatters["return {\n%st}"](r)
+end
+local function fastdrop(t)
+ local r={ "return {\n" }
+ for i=1,#t do
+ local ti=t[i]
+ r[#r+1]=" {"
+ for k,v in next,ti do
+ r[#r+1]=formatters["%s=%q"](k,v)
+ end
+ r[#r+1]="},\n"
+ end
+ r[#r+1]="}"
+ return concat(r)
+end
+function table.drop(t,slow)
+ if #t==0 then
+ return "return { }"
+ elseif slow==true then
+ return slowdrop(t)
+ else
+ return fastdrop(t)
+ end
+end
+function table.autokey(t,k)
+ local v={}
+ t[k]=v
+ return v
+end
+local selfmapper={ __index=function(t,k) t[k]=k return k end }
+function table.twowaymapper(t)
+ if not t then
+ t={}
+ else
+ for i=0,#t do
+ local ti=t[i]
+ if ti then
+ local i=tostring(i)
+ t[i]=ti
+ t[ti]=i
+ end
+ end
+ t[""]=t[0] or ""
+ end
+ setmetatable(t,selfmapper)
+ return t
end
@@ -4351,297 +5156,155 @@ end -- of closure
do -- create closure to overcome 200 locals limit
-if not modules then modules = { } end modules ['util-sto'] = {
- version = 1.001,
- comment = "companion to luat-lib.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
-local setmetatable, getmetatable = setmetatable, getmetatable
+package.loaded["util-sto"] = package.loaded["util-sto"] or true
-utilities = utilities or { }
-utilities.storage = utilities.storage or { }
-local storage = utilities.storage
+-- original size: 4432, stripped down to: 3123
+if not modules then modules={} end modules ['util-sto']={
+ version=1.001,
+ comment="companion to luat-lib.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+local setmetatable,getmetatable,type=setmetatable,getmetatable,type
+utilities=utilities or {}
+utilities.storage=utilities.storage or {}
+local storage=utilities.storage
function storage.mark(t)
- if not t then
- texio.write_nl("fatal error: storage cannot be marked")
- return -- os.exit()
- end
- local m = getmetatable(t)
- if not m then
- m = { }
- setmetatable(t,m)
- end
- m.__storage__ = true
- return t
+ if not t then
+ print("\nfatal error: storage cannot be marked\n")
+ os.exit()
+ return
+ end
+ local m=getmetatable(t)
+ if not m then
+ m={}
+ setmetatable(t,m)
+ end
+ m.__storage__=true
+ return t
end
-
function storage.allocate(t)
- t = t or { }
- local m = getmetatable(t)
- if not m then
- m = { }
- setmetatable(t,m)
- end
- m.__storage__ = true
- return t
+ t=t or {}
+ local m=getmetatable(t)
+ if not m then
+ m={}
+ setmetatable(t,m)
+ end
+ m.__storage__=true
+ return t
end
-
function storage.marked(t)
- local m = getmetatable(t)
- return m and m.__storage__
+ local m=getmetatable(t)
+ return m and m.__storage__
end
-
function storage.checked(t)
- if not t then
- texio.write_nl("fatal error: storage has not been allocated")
- return -- os.exit()
- end
- return t
+ if not t then
+ report("\nfatal error: storage has not been allocated\n")
+ os.exit()
+ return
+ end
+ return t
end
-
-
function storage.setinitializer(data,initialize)
- local m = getmetatable(data) or { }
- m.__index = function(data,k)
- m.__index = nil -- so that we can access the entries during initializing
- initialize()
- return data[k]
- end
- setmetatable(data, m)
-end
-
-local keyisvalue = { __index = function(t,k)
- t[k] = k
- return k
+ local m=getmetatable(data) or {}
+ m.__index=function(data,k)
+ m.__index=nil
+ initialize()
+ return data[k]
+ end
+ setmetatable(data,m)
+end
+local keyisvalue={ __index=function(t,k)
+ t[k]=k
+ return k
end }
-
function storage.sparse(t)
- t = t or { }
- setmetatable(t,keyisvalue)
- return t
-end
-
--- table namespace ?
-
-local function f_empty () return "" end -- t,k
-local function f_self (t,k) t[k] = k return k end
-local function f_ignore() end -- t,k,v
-
-local t_empty = { __index = f_empty }
-local t_self = { __index = f_self }
-local t_ignore = { __newindex = f_ignore }
-
+ t=t or {}
+ setmetatable(t,keyisvalue)
+ return t
+end
+local function f_empty () return "" end
+local function f_self (t,k) t[k]=k return k end
+local function f_table (t,k) local v={} t[k]=v return v end
+local function f_ignore() end
+local t_empty={ __index=f_empty }
+local t_self={ __index=f_self }
+local t_table={ __index=f_table }
+local t_ignore={ __newindex=f_ignore }
function table.setmetatableindex(t,f)
- local m = getmetatable(t)
- if m then
- if f == "empty" then
- m.__index = f_empty
- elseif f == "key" then
- m.__index = f_self
- else
- m.__index = f
- end
+ if type(t)~="table" then
+ f,t=t,{}
+ end
+ local m=getmetatable(t)
+ if m then
+ if f=="empty" then
+ m.__index=f_empty
+ elseif f=="key" then
+ m.__index=f_self
+ elseif f=="table" then
+ m.__index=f_table
else
- if f == "empty" then
- setmetatable(t, t_empty)
- elseif f == "key" then
- setmetatable(t, t_self)
- else
- setmetatable(t,{ __index = f })
- end
+ m.__index=f
+ end
+ else
+ if f=="empty" then
+ setmetatable(t,t_empty)
+ elseif f=="key" then
+ setmetatable(t,t_self)
+ elseif f=="table" then
+ setmetatable(t,t_table)
+ else
+ setmetatable(t,{ __index=f })
end
- return t
+ end
+ return t
end
-
function table.setmetatablenewindex(t,f)
- local m = getmetatable(t)
- if m then
- if f == "ignore" then
- m.__newindex = f_ignore
- else
- m.__newindex = f
- end
+ if type(t)~="table" then
+ f,t=t,{}
+ end
+ local m=getmetatable(t)
+ if m then
+ if f=="ignore" then
+ m.__newindex=f_ignore
else
- if f == "ignore" then
- setmetatable(t, t_ignore)
- else
- setmetatable(t,{ __newindex = f })
- end
+ m.__newindex=f
end
- return t
-end
-
-function table.setmetatablecall(t,f)
- local m = getmetatable(t)
- if m then
- m.__call = f
+ else
+ if f=="ignore" then
+ setmetatable(t,t_ignore)
else
- setmetatable(t,{ __call = f })
+ setmetatable(t,{ __newindex=f })
end
- return t
+ end
+ return t
+end
+function table.setmetatablecall(t,f)
+ if type(t)~="table" then
+ f,t=t,{}
+ end
+ local m=getmetatable(t)
+ if m then
+ m.__call=f
+ else
+ setmetatable(t,{ __call=f })
+ end
+ return t
end
-
function table.setmetatablekey(t,key,value)
- local m = getmetatable(t)
- if not m then
- m = { }
- setmetatable(t,m)
- end
- m[key] = value
- return t
+ local m=getmetatable(t)
+ if not m then
+ m={}
+ setmetatable(t,m)
+ end
+ m[key]=value
+ return t
end
-
function table.getmetatablekey(t,key,value)
- local m = getmetatable(t)
- return m and m[key]
-end
-
-
-end -- of closure
-
-do -- create closure to overcome 200 locals limit
-
-if not modules then modules = { } end modules ['util-mrg'] = {
- version = 1.001,
- comment = "companion to luat-lib.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
--- hm, quite unreadable
-
-local gsub, format = string.gsub, string.format
-local concat = table.concat
-local type, next = type, next
-
-utilities = utilities or {}
-utilities.merger = utilities.merger or { } -- maybe mergers
-utilities.report = logs and logs.reporter("system") or print
-
-local merger = utilities.merger
-
-merger.strip_comment = true
-
-local m_begin_merge = "begin library merge"
-local m_end_merge = "end library merge"
-local m_begin_closure = "do -- create closure to overcome 200 locals limit"
-local m_end_closure = "end -- of closure"
-
-local m_pattern =
- "%c+" ..
- "%-%-%s+" .. m_begin_merge ..
- "%c+(.-)%c+" ..
- "%-%-%s+" .. m_end_merge ..
- "%c+"
-
-local m_format =
- "\n\n-- " .. m_begin_merge ..
- "\n%s\n" ..
- "-- " .. m_end_merge .. "\n\n"
-
-local m_faked =
- "-- " .. "created merged file" .. "\n\n" ..
- "-- " .. m_begin_merge .. "\n\n" ..
- "-- " .. m_end_merge .. "\n\n"
-
-local function self_fake()
- return m_faked
-end
-
-local function self_nothing()
- return ""
-end
-
-local function self_load(name)
- local data = io.loaddata(name) or ""
- if data == "" then
- utilities.report("merge: unknown file %s",name)
- else
- utilities.report("merge: inserting %s",name)
- end
- return data or ""
-end
-
-local function self_save(name, data)
- if data ~= "" then
- if merger.strip_comment then
- -- saves some 20K
- local n = #data
- data = gsub(data,"%-%-~[^\n\r]*[\r\n]","")
- utilities.report("merge: %s bytes of comment stripped, %s bytes of code left",n-#data,#data)
- end
- io.savedata(name,data)
- utilities.report("merge: saving %s",name)
- end
-end
-
-local function self_swap(data,code)
- return data ~= "" and (gsub(data,m_pattern, function() return format(m_format,code) end, 1)) or ""
-end
-
-local function self_libs(libs,list)
- local result, f, frozen, foundpath = { }, nil, false, nil
- result[#result+1] = "\n"
- if type(libs) == 'string' then libs = { libs } end
- if type(list) == 'string' then list = { list } end
- for i=1,#libs do
- local lib = libs[i]
- for j=1,#list do
- local pth = gsub(list[j],"\\","/") -- file.clean_path
- utilities.report("merge: checking library path %s",pth)
- local name = pth .. "/" .. lib
- if lfs.isfile(name) then
- foundpath = pth
- end
- end
- if foundpath then break end
- end
- if foundpath then
- utilities.report("merge: using library path %s",foundpath)
- local right, wrong = { }, { }
- for i=1,#libs do
- local lib = libs[i]
- local fullname = foundpath .. "/" .. lib
- if lfs.isfile(fullname) then
- utilities.report("merge: using library %s",fullname)
- right[#right+1] = lib
- result[#result+1] = m_begin_closure
- result[#result+1] = io.loaddata(fullname,true)
- result[#result+1] = m_end_closure
- else
- utilities.report("merge: skipping library %s",fullname)
- wrong[#wrong+1] = lib
- end
- end
- if #right > 0 then
- utilities.report("merge: used libraries: %s",concat(right," "))
- end
- if #wrong > 0 then
- utilities.report("merge: skipped libraries: %s",concat(wrong," "))
- end
- else
- utilities.report("merge: no valid library path found")
- end
- return concat(result, "\n\n")
-end
-
-function merger.selfcreate(libs,list,target)
- if target then
- self_save(target,self_swap(self_fake(),self_libs(libs,list)))
- end
-end
-
-function merger.selfmerge(name,libs,list,target)
- self_save(target or name,self_swap(self_load(name),self_libs(libs,list)))
-end
-
-function merger.selfclean(name)
- self_save(name,self_swap(self_load(name),self_nothing()))
+ local m=getmetatable(t)
+ return m and m[key]
end
@@ -4649,529 +5312,418 @@ end -- of closure
do -- create closure to overcome 200 locals limit
-if not modules then modules = { } end modules ['util-lua'] = {
- version = 1.001,
- comment = "companion to luat-lib.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
-utilities = utilities or {}
-utilities.lua = utilities.lua or { }
-utilities.report = logs and logs.reporter("system") or print
-
-local function stupidcompile(luafile,lucfile)
- local data = io.loaddata(luafile)
- if data and data ~= "" then
- data = string.dump(data)
- if data and data ~= "" then
- io.savedata(lucfile,data)
- end
- end
-end
-
-function utilities.lua.compile(luafile,lucfile,cleanup,strip,fallback) -- defaults: cleanup=false strip=true
- utilities.report("lua: compiling %s into %s",luafile,lucfile)
- os.remove(lucfile)
- local command = "-o " .. string.quoted(lucfile) .. " " .. string.quoted(luafile)
- if strip ~= false then
- command = "-s " .. command
- end
- local done = os.spawn("texluac " .. command) == 0 -- or os.spawn("luac " .. command) == 0
- if not done and fallback then
- utilities.report("lua: dumping %s into %s (unstripped)",luafile,lucfile)
- stupidcompile(luafile,lucfile) -- maybe use the stripper we have elsewhere
- cleanup = false -- better see how worse it is
- end
- if done and cleanup == true and lfs.isfile(lucfile) and lfs.isfile(luafile) then
- utilities.report("lua: removing %s",luafile)
- os.remove(luafile)
- end
- return done
-end
-
-
-
-
-
-
+package.loaded["util-prs"] = package.loaded["util-prs"] or true
+-- original size: 16976, stripped down to: 12143
-end -- of closure
-
-do -- create closure to overcome 200 locals limit
-
-if not modules then modules = { } end modules ['util-prs'] = {
- version = 1.001,
- comment = "companion to luat-lib.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
+if not modules then modules={} end modules ['util-prs']={
+ version=1.001,
+ comment="companion to luat-lib.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
}
-
-local P, R, V, C, Ct, Cs, Carg = lpeg.P, lpeg.R, lpeg.V, lpeg.C, lpeg.Ct, lpeg.Cs, lpeg.Carg
-local lpegmatch = lpeg.match
-local concat, format, gmatch, find = table.concat, string.format, string.gmatch, string.find
-local tostring, type, next = tostring, type, next
-
-utilities = utilities or {}
-utilities.parsers = utilities.parsers or { }
-local parsers = utilities.parsers
-parsers.patterns = parsers.patterns or { }
-
-local setmetatableindex = table.setmetatableindex
-local sortedhash = table.sortedhash
-
--- we could use a Cf Cg construct
-
-local escape, left, right = P("\\"), P('{'), P('}')
-
-lpeg.patterns.balanced = P {
- [1] = ((escape * (left+right)) + (1 - (left+right)) + V(2))^0,
- [2] = left * V(1) * right
+local lpeg,table,string=lpeg,table,string
+local P,R,V,S,C,Ct,Cs,Carg,Cc,Cg,Cf,Cp=lpeg.P,lpeg.R,lpeg.V,lpeg.S,lpeg.C,lpeg.Ct,lpeg.Cs,lpeg.Carg,lpeg.Cc,lpeg.Cg,lpeg.Cf,lpeg.Cp
+local lpegmatch,lpegpatterns=lpeg.match,lpeg.patterns
+local concat,format,gmatch,find=table.concat,string.format,string.gmatch,string.find
+local tostring,type,next,rawset=tostring,type,next,rawset
+utilities=utilities or {}
+local parsers=utilities.parsers or {}
+utilities.parsers=parsers
+local patterns=parsers.patterns or {}
+parsers.patterns=patterns
+local setmetatableindex=table.setmetatableindex
+local sortedhash=table.sortedhash
+local digit=R("09")
+local space=P(' ')
+local equal=P("=")
+local comma=P(",")
+local lbrace=P("{")
+local rbrace=P("}")
+local lparent=P("(")
+local rparent=P(")")
+local period=S(".")
+local punctuation=S(".,:;")
+local spacer=lpegpatterns.spacer
+local whitespace=lpegpatterns.whitespace
+local newline=lpegpatterns.newline
+local anything=lpegpatterns.anything
+local endofstring=lpegpatterns.endofstring
+local nobrace=1-(lbrace+rbrace )
+local noparent=1-(lparent+rparent)
+local escape,left,right=P("\\"),P('{'),P('}')
+lpegpatterns.balanced=P {
+ [1]=((escape*(left+right))+(1-(left+right))+V(2))^0,
+ [2]=left*V(1)*right
}
-
-local space = P(' ')
-local equal = P("=")
-local comma = P(",")
-local lbrace = P("{")
-local rbrace = P("}")
-local nobrace = 1 - (lbrace+rbrace)
-local nested = P { lbrace * (nobrace + V(1))^0 * rbrace }
-local spaces = space^0
-local argument = Cs((lbrace/"") * ((nobrace + nested)^0) * (rbrace/""))
-local content = (1-P(-1))^0
-
-lpeg.patterns.nested = nested -- no capture
-lpeg.patterns.argument = argument -- argument after e.g. =
-lpeg.patterns.content = content -- rest after e.g =
-
-local value = P(lbrace * C((nobrace + nested)^0) * rbrace) + C((nested + (1-comma))^0)
-
-local key = C((1-equal-comma)^1)
-local pattern_a = (space+comma)^0 * (key * equal * value + key * C(""))
-local pattern_c = (space+comma)^0 * (key * equal * value)
-
-local key = C((1-space-equal-comma)^1)
-local pattern_b = spaces * comma^0 * spaces * (key * ((spaces * equal * spaces * value) + C("")))
-
--- "a=1, b=2, c=3, d={a{b,c}d}, e=12345, f=xx{a{b,c}d}xx, g={}" : outer {} removes, leading spaces ignored
-
-local hash = { }
-
-local function set(key,value)
- hash[key] = value
-end
-
+local nestedbraces=P { lbrace*(nobrace+V(1))^0*rbrace }
+local nestedparents=P { lparent*(noparent+V(1))^0*rparent }
+local spaces=space^0
+local argument=Cs((lbrace/"")*((nobrace+nestedbraces)^0)*(rbrace/""))
+local content=(1-endofstring)^0
+lpegpatterns.nestedbraces=nestedbraces
+lpegpatterns.nestedparents=nestedparents
+lpegpatterns.nested=nestedbraces
+lpegpatterns.argument=argument
+lpegpatterns.content=content
+local value=P(lbrace*C((nobrace+nestedbraces)^0)*rbrace)+C((nestedbraces+(1-comma))^0)
+local key=C((1-equal-comma)^1)
+local pattern_a=(space+comma)^0*(key*equal*value+key*C(""))
+local pattern_c=(space+comma)^0*(key*equal*value)
+local key=C((1-space-equal-comma)^1)
+local pattern_b=spaces*comma^0*spaces*(key*((spaces*equal*spaces*value)+C("")))
+local hash={}
local function set(key,value)
- hash[key] = value
-end
-
-local pattern_a_s = (pattern_a/set)^1
-local pattern_b_s = (pattern_b/set)^1
-local pattern_c_s = (pattern_c/set)^1
-
-parsers.patterns.settings_to_hash_a = pattern_a_s
-parsers.patterns.settings_to_hash_b = pattern_b_s
-parsers.patterns.settings_to_hash_c = pattern_c_s
-
+ hash[key]=value
+end
+local pattern_a_s=(pattern_a/set)^1
+local pattern_b_s=(pattern_b/set)^1
+local pattern_c_s=(pattern_c/set)^1
+patterns.settings_to_hash_a=pattern_a_s
+patterns.settings_to_hash_b=pattern_b_s
+patterns.settings_to_hash_c=pattern_c_s
function parsers.make_settings_to_hash_pattern(set,how)
- if how == "strict" then
- return (pattern_c/set)^1
- elseif how == "tolerant" then
- return (pattern_b/set)^1
- else
- return (pattern_a/set)^1
- end
+ if how=="strict" then
+ return (pattern_c/set)^1
+ elseif how=="tolerant" then
+ return (pattern_b/set)^1
+ else
+ return (pattern_a/set)^1
+ end
end
-
function parsers.settings_to_hash(str,existing)
- if str and str ~= "" then
- hash = existing or { }
- lpegmatch(pattern_a_s,str)
- return hash
- else
- return { }
- end
+ if str and str~="" then
+ hash=existing or {}
+ lpegmatch(pattern_a_s,str)
+ return hash
+ else
+ return {}
+ end
end
-
function parsers.settings_to_hash_tolerant(str,existing)
- if str and str ~= "" then
- hash = existing or { }
- lpegmatch(pattern_b_s,str)
- return hash
- else
- return { }
- end
+ if str and str~="" then
+ hash=existing or {}
+ lpegmatch(pattern_b_s,str)
+ return hash
+ else
+ return {}
+ end
end
-
function parsers.settings_to_hash_strict(str,existing)
- if str and str ~= "" then
- hash = existing or { }
- lpegmatch(pattern_c_s,str)
- return next(hash) and hash
- else
- return nil
- end
+ if str and str~="" then
+ hash=existing or {}
+ lpegmatch(pattern_c_s,str)
+ return next(hash) and hash
+ else
+ return nil
+ end
end
-
-local separator = comma * space^0
-local value = P(lbrace * C((nobrace + nested)^0) * rbrace) + C((nested + (1-comma))^0)
-local pattern = Ct(value*(separator*value)^0)
-
--- "aap, {noot}, mies" : outer {} removes, leading spaces ignored
-
-parsers.patterns.settings_to_array = pattern
-
--- we could use a weak table as cache
-
+local separator=comma*space^0
+local value=P(lbrace*C((nobrace+nestedbraces)^0)*rbrace)+C((nestedbraces+(1-comma))^0)
+local pattern=spaces*Ct(value*(separator*value)^0)
+patterns.settings_to_array=pattern
function parsers.settings_to_array(str,strict)
- if not str or str == "" then
- return { }
- elseif strict then
- if find(str,"{") then
- return lpegmatch(pattern,str)
- else
- return { str }
- end
+ if not str or str=="" then
+ return {}
+ elseif strict then
+ if find(str,"{") then
+ return lpegmatch(pattern,str)
else
- return lpegmatch(pattern,str)
+ return { str }
end
+ else
+ return lpegmatch(pattern,str)
+ end
end
-
local function set(t,v)
- t[#t+1] = v
+ t[#t+1]=v
end
-
-local value = P(Carg(1)*value) / set
-local pattern = value*(separator*value)^0 * Carg(1)
-
+local value=P(Carg(1)*value)/set
+local pattern=value*(separator*value)^0*Carg(1)
function parsers.add_settings_to_array(t,str)
- return lpegmatch(pattern,str,nil,t)
+ return lpegmatch(pattern,str,nil,t)
end
-
function parsers.hash_to_string(h,separator,yes,no,strict,omit)
- if h then
- local t, tn, s = { }, 0, table.sortedkeys(h)
- omit = omit and table.tohash(omit)
- for i=1,#s do
- local key = s[i]
- if not omit or not omit[key] then
- local value = h[key]
- if type(value) == "boolean" then
- if yes and no then
- if value then
- tn = tn + 1
- t[tn] = key .. '=' .. yes
- elseif not strict then
- tn = tn + 1
- t[tn] = key .. '=' .. no
- end
- elseif value or not strict then
- tn = tn + 1
- t[tn] = key .. '=' .. tostring(value)
- end
- else
- tn = tn + 1
- t[tn] = key .. '=' .. value
- end
- end
+ if h then
+ local t,tn,s={},0,table.sortedkeys(h)
+ omit=omit and table.tohash(omit)
+ for i=1,#s do
+ local key=s[i]
+ if not omit or not omit[key] then
+ local value=h[key]
+ if type(value)=="boolean" then
+ if yes and no then
+ if value then
+ tn=tn+1
+ t[tn]=key..'='..yes
+ elseif not strict then
+ tn=tn+1
+ t[tn]=key..'='..no
+ end
+ elseif value or not strict then
+ tn=tn+1
+ t[tn]=key..'='..tostring(value)
+ end
+ else
+ tn=tn+1
+ t[tn]=key..'='..value
end
- return concat(t,separator or ",")
- else
- return ""
+ end
end
+ return concat(t,separator or ",")
+ else
+ return ""
+ end
end
-
function parsers.array_to_string(a,separator)
- if a then
- return concat(a,separator or ",")
- else
- return ""
- end
-end
-
-function parsers.settings_to_set(str,t) -- tohash? -- todo: lpeg -- duplicate anyway
- t = t or { }
--- for s in gmatch(str,"%s*([^, ]+)") do -- space added
- for s in gmatch(str,"[^, ]+") do -- space added
- t[s] = true
- end
- return t
-end
-
-function parsers.simple_hash_to_string(h, separator)
- local t, tn = { }, 0
- for k, v in sortedhash(h) do
- if v then
- tn = tn + 1
- t[tn] = k
- end
+ if a then
+ return concat(a,separator or ",")
+ else
+ return ""
+ end
+end
+function parsers.settings_to_set(str,t)
+ t=t or {}
+ for s in gmatch(str,"[^, ]+") do
+ t[s]=true
+ end
+ return t
+end
+function parsers.simple_hash_to_string(h,separator)
+ local t,tn={},0
+ for k,v in sortedhash(h) do
+ if v then
+ tn=tn+1
+ t[tn]=k
end
- return concat(t,separator or ",")
+ end
+ return concat(t,separator or ",")
end
-
-local value = lbrace * C((nobrace + nested)^0) * rbrace
-local pattern = Ct((space + value)^0)
-
+local value=P(lbrace*C((nobrace+nestedbraces)^0)*rbrace)+C(digit^1*lparent*(noparent+nestedparents)^1*rparent)+C((nestedbraces+(1-comma))^1)
+local pattern_a=spaces*Ct(value*(separator*value)^0)
+local function repeater(n,str)
+ if not n then
+ return str
+ else
+ local s=lpegmatch(pattern_a,str)
+ if n==1 then
+ return unpack(s)
+ else
+ local t,tn={},0
+ for i=1,n do
+ for j=1,#s do
+ tn=tn+1
+ t[tn]=s[j]
+ end
+ end
+ return unpack(t)
+ end
+ end
+end
+local value=P(lbrace*C((nobrace+nestedbraces)^0)*rbrace)+(C(digit^1)/tonumber*lparent*Cs((noparent+nestedparents)^1)*rparent)/repeater+C((nestedbraces+(1-comma))^1)
+local pattern_b=spaces*Ct(value*(separator*value)^0)
+function parsers.settings_to_array_with_repeat(str,expand)
+ if expand then
+ return lpegmatch(pattern_b,str) or {}
+ else
+ return lpegmatch(pattern_a,str) or {}
+ end
+end
+local value=lbrace*C((nobrace+nestedbraces)^0)*rbrace
+local pattern=Ct((space+value)^0)
function parsers.arguments_to_table(str)
- return lpegmatch(pattern,str)
+ return lpegmatch(pattern,str)
end
-
--- temporary here (unoptimized)
-
function parsers.getparameters(self,class,parentclass,settings)
- local sc = self[class]
- if not sc then
- sc = { }
- self[class] = sc
- if parentclass then
- local sp = self[parentclass]
- if not sp then
- sp = { }
- self[parentclass] = sp
- end
- setmetatableindex(sc,sp)
- end
- end
- parsers.settings_to_hash(settings,sc)
+ local sc=self[class]
+ if not sc then
+ sc={}
+ self[class]=sc
+ if parentclass then
+ local sp=self[parentclass]
+ if not sp then
+ sp={}
+ self[parentclass]=sp
+ end
+ setmetatableindex(sc,sp)
+ end
+ end
+ parsers.settings_to_hash(settings,sc)
end
-
function parsers.listitem(str)
- return gmatch(str,"[^, ]+")
-end
-
-
-end -- of closure
-
-do -- create closure to overcome 200 locals limit
-
-if not modules then modules = { } end modules ['util-fmt'] = {
- version = 1.001,
- comment = "companion to luat-lib.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
+ return gmatch(str,"[^, ]+")
+end
+local pattern=Cs { "start",
+ start=V("one")+V("two")+V("three"),
+ rest=(Cc(",")*V("thousand"))^0*(P(".")+endofstring)*anything^0,
+ thousand=digit*digit*digit,
+ one=digit*V("rest"),
+ two=digit*digit*V("rest"),
+ three=V("thousand")*V("rest"),
}
-
-utilities = utilities or { }
-utilities.formatters = utilities.formatters or { }
-local formatters = utilities.formatters
-
-local concat, format = table.concat, string.format
-local tostring, type = tostring, type
-local strip = string.strip
-
-local P, R, Cs = lpeg.P, lpeg.R, lpeg.Cs
-local lpegmatch = lpeg.match
-
--- temporary here
-
-local digit = R("09")
-local period = P(".")
-local zero = P("0")
-local trailingzeros = zero^0 * -digit -- suggested by Roberto R
-local case_1 = period * trailingzeros / ""
-local case_2 = period * (digit - trailingzeros)^1 * (trailingzeros / "")
-local number = digit^1 * (case_1 + case_2)
-local stripper = Cs((number + 1)^0)
-
-
-lpeg.patterns.stripzeros = stripper
-
-function formatters.stripzeros(str)
- return lpegmatch(stripper,str)
-end
-
-function formatters.formatcolumns(result,between)
- if result and #result > 0 then
- between = between or " "
- local widths, numbers = { }, { }
- local first = result[1]
- local n = #first
- for i=1,n do
- widths[i] = 0
- end
- for i=1,#result do
- local r = result[i]
- for j=1,n do
- local rj = r[j]
- local tj = type(rj)
- if tj == "number" then
- numbers[j] = true
- end
- if tj ~= "string" then
- rj = tostring(rj)
- r[j] = rj
- end
- local w = #rj
- if w > widths[j] then
- widths[j] = w
- end
- end
- end
- for i=1,n do
- local w = widths[i]
- if numbers[i] then
- if w > 80 then
- widths[i] = "%s" .. between
- else
- widths[i] = "%0" .. w .. "i" .. between
- end
- else
- if w > 80 then
- widths[i] = "%s" .. between
- elseif w > 0 then
- widths[i] = "%-" .. w .. "s" .. between
- else
- widths[i] = "%s"
- end
- end
- end
- local template = strip(concat(widths))
- for i=1,#result do
- local str = format(template,unpack(result[i]))
- result[i] = strip(str)
- end
- end
- return result
-end
-
-
-end -- of closure
-
-do -- create closure to overcome 200 locals limit
-
-if not modules then modules = { } end modules ['util.deb'] = {
- version = 1.001,
- comment = "companion to luat-lib.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
--- the <anonymous> tag is kind of generic and used for functions that are not
--- bound to a variable, like node.new, node.copy etc (contrary to for instance
--- node.has_attribute which is bound to a has_attribute local variable in mkiv)
-
-local debug = require "debug"
-
-local getinfo = debug.getinfo
-local type, next, tostring = type, next, tostring
-local format, find = string.format, string.find
-local is_boolean = string.is_boolean
-
-utilities = utilities or { }
-utilities.debugger = utilities.debugger or { }
-local debugger = utilities.debugger
-
-local counters = { }
-local names = { }
-
--- one
-
-local function hook()
- local f = getinfo(2) -- "nS"
- if f then
- local n = "unknown"
- if f.what == "C" then
- n = f.name or '<anonymous>'
- if not names[n] then
- names[n] = format("%42s",n)
- end
- else
- -- source short_src linedefined what name namewhat nups func
- n = f.name or f.namewhat or f.what
- if not n or n == "" then
- n = "?"
- end
- if not names[n] then
- names[n] = format("%42s : % 5i : %s",n,f.linedefined or 0,f.short_src or "unknown source")
- end
- end
- counters[n] = (counters[n] or 0) + 1
- end
-end
-
-function debugger.showstats(printer,threshold) -- hm, something has changed, rubish now
- printer = printer or texio.write or print
- threshold = threshold or 0
- local total, grandtotal, functions = 0, 0, 0
- local dataset = { }
- for name, count in next, counters do
- dataset[#dataset+1] = { name, count }
- end
- table.sort(dataset,function(a,b) return a[2] == b[2] and b[1] > a[1] or a[2] > b[2] end)
- for i=1,#dataset do
- local d = dataset[i]
- local name = d[1]
- local count = d[2]
- if count > threshold and not find(name,"for generator") then -- move up
- printer(format("%8i %s\n", count, names[name]))
- total = total + count
+lpegpatterns.splitthousands=pattern
+function parsers.splitthousands(str)
+ return lpegmatch(pattern,str) or str
+end
+local optionalwhitespace=whitespace^0
+lpegpatterns.words=Ct((Cs((1-punctuation-whitespace)^1)+anything)^1)
+lpegpatterns.sentences=Ct((optionalwhitespace*Cs((1-period)^0*period))^1)
+lpegpatterns.paragraphs=Ct((optionalwhitespace*Cs((whitespace^1*endofstring/""+1-(spacer^0*newline*newline))^1))^1)
+local dquote=P('"')
+local equal=P('=')
+local escape=P('\\')
+local separator=S(' ,')
+local key=C((1-equal)^1)
+local value=dquote*C((1-dquote-escape*dquote)^0)*dquote
+local pattern=Cf(Ct("")*Cg(key*equal*value)*separator^0,rawset)^0*P(-1)
+patterns.keq_to_hash_c=pattern
+function parsers.keq_to_hash(str)
+ if str and str~="" then
+ return lpegmatch(pattern,str)
+ else
+ return {}
+ end
+end
+local defaultspecification={ separator=",",quote='"' }
+function parsers.csvsplitter(specification)
+ specification=specification and table.setmetatableindex(specification,defaultspecification) or defaultspecification
+ local separator=specification.separator
+ local quotechar=specification.quote
+ local separator=S(separator~="" and separator or ",")
+ local whatever=C((1-separator-newline)^0)
+ if quotechar and quotechar~="" then
+ local quotedata=nil
+ for chr in gmatch(quotechar,".") do
+ local quotechar=P(chr)
+ local quoteword=quotechar*C((1-quotechar)^0)*quotechar
+ if quotedata then
+ quotedata=quotedata+quoteword
+ else
+ quotedata=quoteword
+ end
+ end
+ whatever=quotedata+whatever
+ end
+ local parser=Ct((Ct(whatever*(separator*whatever)^0)*S("\n\r"))^0 )
+ return function(data)
+ return lpegmatch(parser,data)
+ end
+end
+function parsers.rfc4180splitter(specification)
+ specification=specification and table.setmetatableindex(specification,defaultspecification) or defaultspecification
+ local separator=specification.separator
+ local quotechar=P(specification.quote)
+ local dquotechar=quotechar*quotechar
+/specification.quote
+ local separator=S(separator~="" and separator or ",")
+ local escaped=quotechar*Cs((dquotechar+(1-quotechar))^0)*quotechar
+ local non_escaped=C((1-quotechar-newline-separator)^1)
+ local field=escaped+non_escaped
+ local record=Ct((field*separator^-1)^1)
+ local headerline=record*Cp()
+ local wholeblob=Ct((newline^-1*record)^0)
+ return function(data,getheader)
+ if getheader then
+ local header,position=lpegmatch(headerline,data)
+ local data=lpegmatch(wholeblob,data,position)
+ return data,header
+ else
+ return lpegmatch(wholeblob,data)
+ end
+ end
+end
+local function ranger(first,last,n,action)
+ if not first then
+ elseif last==true then
+ for i=first,n or first do
+ action(i)
+ end
+ elseif last then
+ for i=first,last do
+ action(i)
+ end
+ else
+ action(first)
+ end
+end
+local cardinal=lpegpatterns.cardinal/tonumber
+local spacers=lpegpatterns.spacer^0
+local endofstring=lpegpatterns.endofstring
+local stepper=spacers*(C(cardinal)*(spacers*S(":-")*spacers*(C(cardinal)+Cc(true) )+Cc(false) )*Carg(1)*Carg(2)/ranger*S(", ")^0 )^1
+local stepper=spacers*(C(cardinal)*(spacers*S(":-")*spacers*(C(cardinal)+(P("*")+endofstring)*Cc(true) )+Cc(false) )*Carg(1)*Carg(2)/ranger*S(", ")^0 )^1*endofstring
+function parsers.stepper(str,n,action)
+ if type(n)=="function" then
+ lpegmatch(stepper,str,1,false,n or print)
+ else
+ lpegmatch(stepper,str,1,n,action or print)
+ end
+end
+local pattern_math=Cs((P("%")/"\\percent "+P("^")*Cc("{")*lpegpatterns.integer*Cc("}")+P(1))^0)
+local pattern_text=Cs((P("%")/"\\percent "+(P("^")/"\\high")*Cc("{")*lpegpatterns.integer*Cc("}")+P(1))^0)
+patterns.unittotex=pattern
+function parsers.unittotex(str,textmode)
+ return lpegmatch(textmode and pattern_text or pattern_math,str)
+end
+local pattern=Cs((P("^")/"<sup>"*lpegpatterns.integer*Cc("</sup>")+P(1))^0)
+function parsers.unittoxml(str)
+ return lpegmatch(pattern,str)
+end
+local cache={}
+local spaces=lpeg.patterns.space^0
+local dummy=function() end
+table.setmetatableindex(cache,function(t,k)
+ local separator=P(k)
+ local value=(1-separator)^0
+ local pattern=spaces*C(value)*separator^0*Cp()
+ t[k]=pattern
+ return pattern
+end)
+local commalistiterator=cache[","]
+function utilities.parsers.iterator(str,separator)
+ local n=#str
+ if n==0 then
+ return dummy
+ else
+ local pattern=separator and cache[separator] or commalistiterator
+ local p=1
+ return function()
+ if p<=n then
+ local s,e=lpegmatch(pattern,str,p)
+ if e then
+ p=e
+ return s
end
- grandtotal = grandtotal + count
- functions = functions + 1
+ end
end
- printer("\n")
- printer(format("functions : % 10i\n", functions))
- printer(format("total : % 10i\n", total))
- printer(format("grand total: % 10i\n", grandtotal))
- printer(format("threshold : % 10i\n", threshold))
+ end
end
-
-function debugger.savestats(filename,threshold)
- local f = io.open(filename,'w')
- if f then
- debugger.showstats(function(str) f:write(str) end,threshold)
- f:close()
+local function initialize(t,name)
+ local source=t[name]
+ if source then
+ local result={}
+ for k,v in next,t[name] do
+ result[k]=v
end
+ return result
+ else
+ return {}
+ end
end
-
-function debugger.enable()
- debug.sethook(hook,"c")
+local function fetch(t,name)
+ return t[name] or {}
end
-
-function debugger.disable()
- debug.sethook()
+function process(result,more)
+ for k,v in next,more do
+ result[k]=v
+ end
+ return result
end
-
-
-
-
-
-local is_node = node and node.is_node
-local is_lpeg = lpeg and lpeg.type
-
-function inspect(i) -- global function
- local ti = type(i)
- if ti == "table" then
- table.print(i,"table")
- elseif is_node and is_node(i) then
- table.print(nodes.astable(i),tostring(i))
- elseif is_lpeg and is_lpeg(i) then
- lpeg.print(i)
- else
- print(tostring(i))
- end
-end
-
--- from the lua book:
-
-function traceback()
- local level = 1
- while true do
- local info = debug.getinfo(level, "Sl")
- if not info then
- break
- elseif info.what == "C" then
- print(format("%3i : C function",level))
- else
- print(format("%3i : [%s]:%d",level,info.short_src,info.currentline))
- end
- level = level + 1
- end
+local name=C((1-S(", "))^1)
+local parser=(Carg(1)*name/initialize)*(S(", ")^1*(Carg(1)*name/fetch))^0
+local merge=Cf(parser,process)
+function utilities.parsers.mergehashes(hash,list)
+ return lpegmatch(merge,list,1,hash)
end
@@ -5179,199 +5731,80 @@ end -- of closure
do -- create closure to overcome 200 locals limit
-if not modules then modules = { } end modules ['trac-inf'] = {
- version = 1.001,
- comment = "companion to trac-inf.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
--- As we want to protect the global tables, we no longer store the timing
--- in the tables themselves but in a hidden timers table so that we don't
--- get warnings about assignments. This is more efficient than using rawset
--- and rawget.
-
-local format, lower = string.format, string.lower
-local clock = os.gettimeofday or os.clock -- should go in environment
-local write_nl = texio.write_nl
-
-statistics = statistics or { }
-local statistics = statistics
-
-statistics.enable = true
-statistics.threshold = 0.05
-
-local statusinfo, n, registered, timers = { }, 0, { }, { }
+package.loaded["util-fmt"] = package.loaded["util-fmt"] or true
-local function hastiming(instance)
- return instance and timers[instance]
-end
-
-local function resettiming(instance)
- timers[instance or "notimer"] = { timing = 0, loadtime = 0 }
-end
-
-local function starttiming(instance)
- local timer = timers[instance or "notimer"]
- if not timer then
- timer = { }
- timers[instance or "notimer"] = timer
- end
- local it = timer.timing
- if not it then
- it = 0
- end
- if it == 0 then
- timer.starttime = clock()
- if not timer.loadtime then
- timer.loadtime = 0
- end
- end
- timer.timing = it + 1
-end
-
-local function stoptiming(instance, report)
- local timer = timers[instance or "notimer"]
- local it = timer.timing
- if it > 1 then
- timer.timing = it - 1
- else
- local starttime = timer.starttime
- if starttime then
- local stoptime = clock()
- local loadtime = stoptime - starttime
- timer.stoptime = stoptime
- timer.loadtime = timer.loadtime + loadtime
- if report then
- statistics.report("load time %0.3f",loadtime)
- end
- timer.timing = 0
- return loadtime
- end
- end
- return 0
-end
-
-local function elapsedtime(instance)
- local timer = timers[instance or "notimer"]
- return format("%0.3f",timer and timer.loadtime or 0)
-end
-
-local function elapsedindeed(instance)
- local timer = timers[instance or "notimer"]
- return (timer and timer.loadtime or 0) > statistics.threshold
-end
-
-local function elapsedseconds(instance,rest) -- returns nil if 0 seconds
- if elapsedindeed(instance) then
- return format("%s seconds %s", elapsedtime(instance),rest or "")
- end
-end
+-- original size: 2274, stripped down to: 1781
-statistics.hastiming = hastiming
-statistics.resettiming = resettiming
-statistics.starttiming = starttiming
-statistics.stoptiming = stoptiming
-statistics.elapsedtime = elapsedtime
-statistics.elapsedindeed = elapsedindeed
-statistics.elapsedseconds = elapsedseconds
-
--- general function
-
-function statistics.register(tag,fnc)
- if statistics.enable and type(fnc) == "function" then
- local rt = registered[tag] or (#statusinfo + 1)
- statusinfo[rt] = { tag, fnc }
- registered[tag] = rt
- if #tag > n then n = #tag end
- end
+if not modules then modules={} end modules ['util-fmt']={
+ version=1.001,
+ comment="companion to luat-lib.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+utilities=utilities or {}
+utilities.formatters=utilities.formatters or {}
+local formatters=utilities.formatters
+local concat,format=table.concat,string.format
+local tostring,type=tostring,type
+local strip=string.strip
+local lpegmatch=lpeg.match
+local stripper=lpeg.patterns.stripzeros
+function formatters.stripzeros(str)
+ return lpegmatch(stripper,str)
end
-
-function statistics.show(reporter)
- if statistics.enable then
- if not reporter then reporter = function(tag,data,n) write_nl(tag .. " " .. data) end end
- -- this code will move
- local register = statistics.register
- register("luatex banner", function()
- return lower(status.banner)
- end)
- register("control sequences", function()
- return format("%s of %s + %s", status.cs_count, status.hash_size,status.hash_extra)
- end)
- register("callbacks", function()
- local total, indirect = status.callbacks or 0, status.indirect_callbacks or 0
- return format("%s direct, %s indirect, %s total", total-indirect, indirect, total)
- end)
- collectgarbage("collect")
- register("current memory usage", statistics.memused)
- register("runtime",statistics.runtime)
- for i=1,#statusinfo do
- local s = statusinfo[i]
- local r = s[2]()
- if r then
- reporter(s[1],r,n)
- end
+function formatters.formatcolumns(result,between)
+ if result and #result>0 then
+ between=between or " "
+ local widths,numbers={},{}
+ local first=result[1]
+ local n=#first
+ for i=1,n do
+ widths[i]=0
+ end
+ for i=1,#result do
+ local r=result[i]
+ for j=1,n do
+ local rj=r[j]
+ local tj=type(rj)
+ if tj=="number" then
+ numbers[j]=true
+ end
+ if tj~="string" then
+ rj=tostring(rj)
+ r[j]=rj
+ end
+ local w=#rj
+ if w>widths[j] then
+ widths[j]=w
+ end
+ end
+ end
+ for i=1,n do
+ local w=widths[i]
+ if numbers[i] then
+ if w>80 then
+ widths[i]="%s"..between
+ else
+ widths[i]="%0"..w.."i"..between
+ end
+ else
+ if w>80 then
+ widths[i]="%s"..between
+ elseif w>0 then
+ widths[i]="%-"..w.."s"..between
+ else
+ widths[i]="%s"
end
- write_nl("") -- final newline
- statistics.enable = false
+ end
end
-end
-
-local template, report_statistics, nn = nil, nil, 0 -- we only calcute it once
-
-function statistics.showjobstat(tag,data,n)
- if not logs then
- -- sorry
- elseif type(data) == "table" then
- for i=1,#data do
- statistics.showjobstat(tag,data[i],n)
- end
- else
- if not template or n > nn then
- template, n = format("%%-%ss - %%s",n), nn
- report_statistics = logs.reporter("mkiv lua stats")
- end
- report_statistics(format(template,tag,data))
+ local template=strip(concat(widths))
+ for i=1,#result do
+ local str=format(template,unpack(result[i]))
+ result[i]=strip(str)
end
-end
-
-function statistics.memused() -- no math.round yet -)
- local round = math.round or math.floor
- return format("%s MB (ctx: %s MB)",round(collectgarbage("count")/1000), round(status.luastate_bytes/1000000))
-end
-
-starttiming(statistics)
-
-function statistics.formatruntime(runtime) -- indirect so it can be overloaded and
- return format("%s seconds", runtime) -- indeed that happens in cure-uti.lua
-end
-
-function statistics.runtime()
- stoptiming(statistics)
- return statistics.formatruntime(elapsedtime(statistics))
-end
-
-function statistics.timed(action,report)
- report = report or logs.reporter("system")
- starttiming("run")
- action()
- stoptiming("run")
- report("total runtime: %s",elapsedtime("run"))
-end
-
--- where, not really the best spot for this:
-
-commands = commands or { }
-
-function commands.resettimer(name)
- resettiming(name or "whatever")
- starttiming(name or "whatever")
-end
-
-function commands.elapsedtime(name)
- stoptiming(name or "whatever")
- context(elapsedtime(name or "whatever"))
+ end
+ return result
end
@@ -5379,341 +5812,311 @@ end -- of closure
do -- create closure to overcome 200 locals limit
-if not modules then modules = { } end modules ['trac-set'] = { -- might become util-set.lua
- version = 1.001,
- comment = "companion to luat-lib.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
-local type, next, tostring = type, next, tostring
-local concat = table.concat
-local format, find, lower, gsub, escapedpattern = string.format, string.find, string.lower, string.gsub, string.escapedpattern
-local is_boolean = string.is_boolean
-local settings_to_hash = utilities.parsers.settings_to_hash
-local allocate = utilities.storage.allocate
-
-utilities = utilities or { }
-local utilities = utilities
-utilities.setters = utilities.setters or { }
-local setters = utilities.setters
-
-local data = { } -- maybe just local
-
--- We can initialize from the cnf file. This is sort of tricky as
--- later defined setters also need to be initialized then. If set
--- this way, we need to ensure that they are not reset later on.
+package.loaded["trac-set"] = package.loaded["trac-set"] or true
-local trace_initialize = false -- only for testing during development
+-- original size: 12365, stripped down to: 8799
-function setters.initialize(filename,name,values) -- filename only for diagnostics
- local setter = data[name]
- if setter then
- local data = setter.data
- if data then
- for key, value in next, values do
- -- key = gsub(key,"_",".")
- value = is_boolean(value,value)
- local functions = data[key]
- if functions then
- if #functions > 0 and not functions.value then
- if trace_initialize then
- setter.report("executing %s (%s -> %s)",key,filename,tostring(value))
- end
- for i=1,#functions do
- functions[i](value)
- end
- functions.value = value
- else
- if trace_initialize then
- setter.report("skipping %s (%s -> %s)",key,filename,tostring(value))
- end
- end
- else
- -- we do a simple preregistration i.e. not in the
- -- list as it might be an obsolete entry
- functions = { default = value }
- data[key] = functions
- if trace_initialize then
- setter.report("storing %s (%s -> %s)",key,filename,tostring(value))
- end
- end
+if not modules then modules={} end modules ['trac-set']={
+ version=1.001,
+ comment="companion to luat-lib.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+local type,next,tostring=type,next,tostring
+local concat=table.concat
+local format,find,lower,gsub,topattern=string.format,string.find,string.lower,string.gsub,string.topattern
+local is_boolean=string.is_boolean
+local settings_to_hash=utilities.parsers.settings_to_hash
+local allocate=utilities.storage.allocate
+utilities=utilities or {}
+local utilities=utilities
+local setters=utilities.setters or {}
+utilities.setters=setters
+local data={}
+local trace_initialize=false
+function setters.initialize(filename,name,values)
+ local setter=data[name]
+ if setter then
+ frozen=true
+ local data=setter.data
+ if data then
+ for key,newvalue in next,values do
+ local newvalue=is_boolean(newvalue,newvalue)
+ local functions=data[key]
+ if functions then
+ local oldvalue=functions.value
+ if functions.frozen then
+ if trace_initialize then
+ setter.report("%s: %a is %s to %a",filename,key,"frozen",oldvalue)
end
- return true
+ elseif #functions>0 and not oldvalue then
+ if trace_initialize then
+ setter.report("%s: %a is %s to %a",filename,key,"set",newvalue)
+ end
+ for i=1,#functions do
+ functions[i](newvalue)
+ end
+ functions.value=newvalue
+ functions.frozen=functions.frozen or frozen
+ else
+ if trace_initialize then
+ setter.report("%s: %a is %s as %a",filename,key,"kept",oldvalue)
+ end
+ end
+ else
+ functions={ default=newvalue,frozen=frozen }
+ data[key]=functions
+ if trace_initialize then
+ setter.report("%s: %a is %s to %a",filename,key,"defaulted",newvalue)
+ end
end
+ end
+ return true
end
+ end
end
-
--- user interface code
-
local function set(t,what,newvalue)
- local data, done = t.data, t.done
- if type(what) == "string" then
- what = settings_to_hash(what) -- inefficient but ok
- end
- if type(what) ~= "table" then
- return
- end
- if not done then -- catch ... why not set?
- done = { }
- t.done = done
- end
- for w, value in next, what do
- if value == "" then
- value = newvalue
- elseif not value then
- value = false -- catch nil
- else
- value = is_boolean(value,value)
- end
- w = "^" .. escapedpattern(w,true) .. "$" -- new: anchored
- for name, functions in next, data do
- if done[name] then
- -- prevent recursion due to wildcards
- elseif find(name,w) then
- done[name] = true
- for i=1,#functions do
- functions[i](value)
- end
- functions.value = value
- end
- end
- end
+ local data=t.data
+ if not data.frozen then
+ local done=t.done
+ if type(what)=="string" then
+ what=settings_to_hash(what)
+ end
+ if type(what)~="table" then
+ return
+ end
+ if not done then
+ done={}
+ t.done=done
+ end
+ for w,value in next,what do
+ if value=="" then
+ value=newvalue
+ elseif not value then
+ value=false
+ else
+ value=is_boolean(value,value)
+ end
+ w=topattern(w,true,true)
+ for name,functions in next,data do
+ if done[name] then
+ elseif find(name,w) then
+ done[name]=true
+ for i=1,#functions do
+ functions[i](value)
+ end
+ functions.value=value
+ end
+ end
+ end
+ end
end
-
local function reset(t)
- for name, functions in next, t.data do
- for i=1,#functions do
- functions[i](false)
- end
- functions.value = false
+ local data=t.data
+ if not data.frozen then
+ for name,functions in next,data do
+ for i=1,#functions do
+ functions[i](false)
+ end
+ functions.value=false
end
+ end
end
-
local function enable(t,what)
- set(t,what,true)
+ set(t,what,true)
end
-
local function disable(t,what)
- local data = t.data
- if not what or what == "" then
- t.done = { }
- reset(t)
- else
- set(t,what,false)
- end
+ local data=t.data
+ if not what or what=="" then
+ t.done={}
+ reset(t)
+ else
+ set(t,what,false)
+ end
end
-
function setters.register(t,what,...)
- local data = t.data
- what = lower(what)
- local functions = data[what]
- if not functions then
- functions = { }
- data[what] = functions
- if trace_initialize then
- t.report("defining %s",what)
- end
- end
- local default = functions.default -- can be set from cnf file
- for _, fnc in next, { ... } do
- local typ = type(fnc)
- if typ == "string" then
- if trace_initialize then
- t.report("coupling %s to %s",what,fnc)
- end
- local s = fnc -- else wrong reference
- fnc = function(value) set(t,s,value) end
- elseif typ ~= "function" then
- fnc = nil
- end
- if fnc then
- functions[#functions+1] = fnc
- -- default: set at command line or in cnf file
- -- value : set in tex run (needed when loading runtime)
- local value = functions.value or default
- if value ~= nil then
- fnc(value)
- functions.value = value
- end
- end
- end
- return false -- so we can use it in an assignment
+ local data=t.data
+ what=lower(what)
+ local functions=data[what]
+ if not functions then
+ functions={}
+ data[what]=functions
+ if trace_initialize then
+ t.report("defining %a",what)
+ end
+ end
+ local default=functions.default
+ for i=1,select("#",...) do
+ local fnc=select(i,...)
+ local typ=type(fnc)
+ if typ=="string" then
+ if trace_initialize then
+ t.report("coupling %a to %a",what,fnc)
+ end
+ local s=fnc
+ fnc=function(value) set(t,s,value) end
+ elseif typ~="function" then
+ fnc=nil
+ end
+ if fnc then
+ functions[#functions+1]=fnc
+ local value=functions.value or default
+ if value~=nil then
+ fnc(value)
+ functions.value=value
+ end
+ end
+ end
+ return false
end
-
function setters.enable(t,what)
- local e = t.enable
- t.enable, t.done = enable, { }
- enable(t,what)
- t.enable, t.done = e, { }
+ local e=t.enable
+ t.enable,t.done=enable,{}
+ enable(t,what)
+ t.enable,t.done=e,{}
end
-
function setters.disable(t,what)
- local e = t.disable
- t.disable, t.done = disable, { }
- disable(t,what)
- t.disable, t.done = e, { }
+ local e=t.disable
+ t.disable,t.done=disable,{}
+ disable(t,what)
+ t.disable,t.done=e,{}
end
-
function setters.reset(t)
- t.done = { }
- reset(t)
-end
-
-function setters.list(t) -- pattern
- local list = table.sortedkeys(t.data)
- local user, system = { }, { }
- for l=1,#list do
- local what = list[l]
- if find(what,"^%*") then
- system[#system+1] = what
- else
- user[#user+1] = what
- end
+ t.done={}
+ reset(t)
+end
+function setters.list(t)
+ local list=table.sortedkeys(t.data)
+ local user,system={},{}
+ for l=1,#list do
+ local what=list[l]
+ if find(what,"^%*") then
+ system[#system+1]=what
+ else
+ user[#user+1]=what
end
- return user, system
+ end
+ return user,system
end
-
function setters.show(t)
- local category = t.name
- local list = setters.list(t)
- t.report()
- for k=1,#list do
- local name = list[k]
- local functions = t.data[name]
- if functions then
- local value, default, modules = functions.value, functions.default, #functions
- value = value == nil and "unset" or tostring(value)
- default = default == nil and "unset" or tostring(default)
- t.report("%-30s modules: %2i default: %6s value: %6s",name,modules,default,value)
- end
- end
- t.report()
-end
-
--- we could have used a bit of oo and the trackers:enable syntax but
--- there is already a lot of code around using the singular tracker
-
--- we could make this into a module but we also want the rest avaliable
-
-local enable, disable, register, list, show = setters.enable, setters.disable, setters.register, setters.list, setters.show
-
-local function report(setter,...)
- local report = logs and logs.report
- if report then
- report(setter.name,...)
- else -- fallback, as this module is loaded before the logger
- write_nl(format("%-15s : %s\n",setter.name,format(...)))
- end
-end
-
-function setters.new(name)
- local setter -- we need to access it in setter itself
- setter = {
- data = allocate(), -- indexed, but also default and value fields
- name = name,
- report = function(...) report (setter,...) end,
- enable = function(...) enable (setter,...) end,
- disable = function(...) disable (setter,...) end,
- register = function(...) register(setter,...) end,
- list = function(...) list (setter,...) end,
- show = function(...) show (setter,...) end,
- }
- data[name] = setter
- return setter
-end
-
-trackers = setters.new("trackers")
-directives = setters.new("directives")
-experiments = setters.new("experiments")
-
-local t_enable, t_disable, t_report = trackers .enable, trackers .disable, trackers .report
-local d_enable, d_disable, d_report = directives .enable, directives .disable, directives .report
-local e_enable, e_disable, e_report = experiments.enable, experiments.disable, experiments.report
-
--- nice trick: we overload two of the directives related functions with variants that
--- do tracing (itself using a tracker) .. proof of concept
-
-local trace_directives = false local trace_directives = false trackers.register("system.directives", function(v) trace_directives = v end)
-local trace_experiments = false local trace_experiments = false trackers.register("system.experiments", function(v) trace_experiments = v end)
-
+ local category=t.name
+ local list=setters.list(t)
+ t.report()
+ for k=1,#list do
+ local name=list[k]
+ local functions=t.data[name]
+ if functions then
+ local value,default,modules=functions.value,functions.default,#functions
+ value=value==nil and "unset" or tostring(value)
+ default=default==nil and "unset" or tostring(default)
+ t.report("%-50s modules: %2i default: %-12s value: %-12s",name,modules,default,value)
+ end
+ end
+ t.report()
+end
+local enable,disable,register,list,show=setters.enable,setters.disable,setters.register,setters.list,setters.show
+function setters.report(setter,...)
+ print(format("%-15s : %s\n",setter.name,format(...)))
+end
+local function default(setter,name)
+ local d=setter.data[name]
+ return d and d.default
+end
+local function value(setter,name)
+ local d=setter.data[name]
+ return d and (d.value or d.default)
+end
+function setters.new(name)
+ local setter
+ setter={
+ data=allocate(),
+ name=name,
+ report=function(...) setters.report (setter,...) end,
+ enable=function(...) enable (setter,...) end,
+ disable=function(...) disable (setter,...) end,
+ register=function(...) register(setter,...) end,
+ list=function(...) list (setter,...) end,
+ show=function(...) show (setter,...) end,
+ default=function(...) return default (setter,...) end,
+ value=function(...) return value (setter,...) end,
+ }
+ data[name]=setter
+ return setter
+end
+trackers=setters.new("trackers")
+directives=setters.new("directives")
+experiments=setters.new("experiments")
+local t_enable,t_disable=trackers .enable,trackers .disable
+local d_enable,d_disable=directives .enable,directives .disable
+local e_enable,e_disable=experiments.enable,experiments.disable
+local trace_directives=false local trace_directives=false trackers.register("system.directives",function(v) trace_directives=v end)
+local trace_experiments=false local trace_experiments=false trackers.register("system.experiments",function(v) trace_experiments=v end)
function directives.enable(...)
- if trace_directives then
- d_report("enabling: %s",concat({...}," "))
- end
- d_enable(...)
+ if trace_directives then
+ directives.report("enabling: % t",{...})
+ end
+ d_enable(...)
end
-
function directives.disable(...)
- if trace_directives then
- d_report("disabling: %s",concat({...}," "))
- end
- d_disable(...)
+ if trace_directives then
+ directives.report("disabling: % t",{...})
+ end
+ d_disable(...)
end
-
function experiments.enable(...)
- if trace_experiments then
- e_report("enabling: %s",concat({...}," "))
- end
- e_enable(...)
+ if trace_experiments then
+ experiments.report("enabling: % t",{...})
+ end
+ e_enable(...)
end
-
function experiments.disable(...)
- if trace_experiments then
- e_report("disabling: %s",concat({...}," "))
- end
- e_disable(...)
-end
-
--- a useful example
-
-directives.register("system.nostatistics", function(v)
- statistics.enable = not v
+ if trace_experiments then
+ experiments.report("disabling: % t",{...})
+ end
+ e_disable(...)
+end
+directives.register("system.nostatistics",function(v)
+ if statistics then
+ statistics.enable=not v
+ else
+ end
end)
-
-directives.register("system.nolibraries", function(v)
- libraries = nil -- we discard this tracing for security
+directives.register("system.nolibraries",function(v)
+ if libraries then
+ libraries=nil
+ else
+ end
end)
-
--- experiment
-
-local flags = environment and environment.engineflags
-
-if flags then
- if trackers and flags.trackers then
- setters.initialize("flags","trackers", settings_to_hash(flags.trackers))
- -- t_enable(flags.trackers)
+if environment then
+ local engineflags=environment.engineflags
+ if engineflags then
+ local list=engineflags["c:trackers"] or engineflags["trackers"]
+ if type(list)=="string" then
+ setters.initialize("commandline flags","trackers",settings_to_hash(list))
end
- if directives and flags.directives then
- setters.initialize("flags","directives", settings_to_hash(flags.directives))
- -- d_enable(flags.directives)
+ local list=engineflags["c:directives"] or engineflags["directives"]
+ if type(list)=="string" then
+ setters.initialize("commandline flags","directives",settings_to_hash(list))
end
+ end
end
-
--- here
-
if texconfig then
-
- -- this happens too late in ini mode but that is no problem
-
- local function set(k,v)
- v = tonumber(v)
- if v then
- texconfig[k] = v
- end
+ local function set(k,v)
+ v=tonumber(v)
+ if v then
+ texconfig[k]=v
end
-
- directives.register("luatex.expanddepth", function(v) set("expand_depth",v) end)
- directives.register("luatex.hashextra", function(v) set("hash_extra",v) end)
- directives.register("luatex.nestsize", function(v) set("nest_size",v) end)
- directives.register("luatex.maxinopen", function(v) set("max_in_open",v) end)
- directives.register("luatex.maxprintline", function(v) set("max_print_line",v) end)
- directives.register("luatex.maxstrings", function(v) set("max_strings",v) end)
- directives.register("luatex.paramsize", function(v) set("param_size",v) end)
- directives.register("luatex.savesize", function(v) set("save_size",v) end)
- directives.register("luatex.stacksize", function(v) set("stack_size",v) end)
-
+ end
+ directives.register("luatex.expanddepth",function(v) set("expand_depth",v) end)
+ directives.register("luatex.hashextra",function(v) set("hash_extra",v) end)
+ directives.register("luatex.nestsize",function(v) set("nest_size",v) end)
+ directives.register("luatex.maxinopen",function(v) set("max_in_open",v) end)
+ directives.register("luatex.maxprintline",function(v) set("max_print_line",v) end)
+ directives.register("luatex.maxstrings",function(v) set("max_strings",v) end)
+ directives.register("luatex.paramsize",function(v) set("param_size",v) end)
+ directives.register("luatex.savesize",function(v) set("save_size",v) end)
+ directives.register("luatex.stacksize",function(v) set("stack_size",v) end)
end
@@ -5721,643 +6124,559 @@ end -- of closure
do -- create closure to overcome 200 locals limit
-if not modules then modules = { } end modules ['trac-log'] = {
- version = 1.001,
- comment = "companion to trac-log.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
--- todo: less categories, more subcategories (e.g. nodes)
-
-
-local write_nl, write = texio and texio.write_nl or print, texio and texio.write or io.write
-local format, gmatch, find = string.format, string.gmatch, string.find
-local concat, insert, remove = table.concat, table.insert, table.remove
-local escapedpattern = string.escapedpattern
-local texcount = tex and tex.count
-local next, type = next, type
-
-local setmetatableindex = table.setmetatableindex
-
---[[ldx--
-<p>This is a prelude to a more extensive logging module. We no longer
-provide <l n='xml'/> based logging a sparsing is relatively easy anyway.</p>
---ldx]]--
+package.loaded["trac-log"] = package.loaded["trac-log"] or true
-logs = logs or { }
-local logs = logs
+-- original size: 21795, stripped down to: 14194
-local moreinfo = [[
+if not modules then modules={} end modules ['trac-log']={
+ version=1.001,
+ comment="companion to trac-log.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+local write_nl,write=texio and texio.write_nl or print,texio and texio.write or io.write
+local format,gmatch,find=string.format,string.gmatch,string.find
+local concat,insert,remove=table.concat,table.insert,table.remove
+local topattern=string.topattern
+local texcount=tex and tex.count
+local next,type,select=next,type,select
+local utfchar=utf.char
+local setmetatableindex=table.setmetatableindex
+local formatters=string.formatters
+logs=logs or {}
+local logs=logs
+local moreinfo=[[
More information about ConTeXt and the tools that come with it can be found at:
-
+]].."\n"..[[
maillist : ntg-context@ntg.nl / http://www.ntg.nl/mailman/listinfo/ntg-context
webpage : http://www.pragma-ade.nl / http://tex.aanhet.net
wiki : http://contextgarden.net
]]
-
--- basic loggers
-
+utilities.strings.formatters.add (
+ formatters,"unichr",
+ [["U+" .. format("%%05X",%s) .. " (" .. utfchar(%s) .. ")"]]
+)
+utilities.strings.formatters.add (
+ formatters,"chruni",
+ [[utfchar(%s) .. " (U+" .. format("%%05X",%s) .. ")"]]
+)
local function ignore() end
-
-setmetatableindex(logs, function(t,k) t[k] = ignore ; return ignore end)
-
-local report, subreport, status, settarget, setformats, settranslations
-
-local direct, subdirect, writer, pushtarget, poptarget
-
+setmetatableindex(logs,function(t,k) t[k]=ignore;return ignore end)
+local report,subreport,status,settarget,setformats,settranslations
+local direct,subdirect,writer,pushtarget,poptarget
if tex and (tex.jobname or tex.formatname) then
-
- local valueiskey = { __index = function(t,k) t[k] = k return k end } -- will be helper
-
- local target = "term and log"
-
- logs.flush = io.flush
-
- local formats = { } setmetatable(formats, valueiskey)
- local translations = { } setmetatable(translations,valueiskey)
-
- writer = function(...)
- write_nl(target,...)
- end
-
- newline = function()
- write_nl(target,"\n")
- end
-
- report = function(a,b,c,...)
- if c then
- write_nl(target,format("%-15s > %s\n",translations[a],format(formats[b],c,...)))
- elseif b then
- write_nl(target,format("%-15s > %s\n",translations[a],formats[b]))
- elseif a then
- write_nl(target,format("%-15s >\n", translations[a]))
- else
- write_nl(target,"\n")
- end
- end
-
- direct = function(a,b,c,...)
- if c then
- return format("%-15s > %s",translations[a],format(formats[b],c,...))
- elseif b then
- return format("%-15s > %s",translations[a],formats[b])
- elseif a then
- return format("%-15s >", translations[a])
- else
- return ""
- end
- end
-
- subreport = function(a,s,b,c,...)
- if c then
- write_nl(target,format("%-15s > %s > %s\n",translations[a],translations[s],format(formats[b],c,...)))
- elseif b then
- write_nl(target,format("%-15s > %s > %s\n",translations[a],translations[s],formats[b]))
- elseif a then
- write_nl(target,format("%-15s > %s >\n", translations[a],translations[s]))
- else
- write_nl(target,"\n")
- end
- end
-
- subdirect = function(a,s,b,c,...)
- if c then
- return format("%-15s > %s > %s",translations[a],translations[s],format(formats[b],c,...))
- elseif b then
- return format("%-15s > %s > %s",translations[a],translations[s],formats[b])
- elseif a then
- return format("%-15s > %s >", translations[a],translations[s])
- else
- return ""
- end
- end
-
- status = function(a,b,c,...)
- if c then
- write_nl(target,format("%-15s : %s\n",translations[a],format(formats[b],c,...)))
- elseif b then
- write_nl(target,format("%-15s : %s\n",translations[a],formats[b]))
- elseif a then
- write_nl(target,format("%-15s :\n", translations[a]))
- else
- write_nl(target,"\n")
- end
- end
-
- local targets = {
- logfile = "log",
- log = "log",
- file = "log",
- console = "term",
- terminal = "term",
- both = "term and log",
- }
-
- settarget = function(whereto)
- target = targets[whereto or "both"] or targets.both
- if target == "term" or target == "term and log" then
- logs.flush = io.flush
- else
- logs.flush = ignore
- end
- end
-
- local stack = { }
-
- pushtarget = function(newtarget)
- insert(stack,target)
- settarget(newtarget)
- end
-
- poptarget = function()
- if #stack > 0 then
- settarget(remove(stack))
- end
- end
-
- setformats = function(f)
- formats = f
- end
-
- settranslations = function(t)
- translations = t
- end
-
+ local valueiskey={ __index=function(t,k) t[k]=k return k end }
+ local target="term and log"
+ logs.flush=io.flush
+ local formats={} setmetatable(formats,valueiskey)
+ local translations={} setmetatable(translations,valueiskey)
+ writer=function(...)
+ write_nl(target,...)
+ end
+ newline=function()
+ write_nl(target,"\n")
+ end
+ local f_one=formatters["%-15s > %s\n"]
+ local f_two=formatters["%-15s >\n"]
+ report=function(a,b,c,...)
+ if c then
+ write_nl(target,f_one(translations[a],formatters[formats[b]](c,...)))
+ elseif b then
+ write_nl(target,f_one(translations[a],formats[b]))
+ elseif a then
+ write_nl(target,f_two(translations[a]))
+ else
+ write_nl(target,"\n")
+ end
+ end
+ local f_one=formatters["%-15s > %s"]
+ local f_two=formatters["%-15s >"]
+ direct=function(a,b,c,...)
+ if c then
+ return f_one(translations[a],formatters[formats[b]](c,...))
+ elseif b then
+ return f_one(translations[a],formats[b])
+ elseif a then
+ return f_two(translations[a])
+ else
+ return ""
+ end
+ end
+ local f_one=formatters["%-15s > %s > %s\n"]
+ local f_two=formatters["%-15s > %s >\n"]
+ subreport=function(a,s,b,c,...)
+ if c then
+ write_nl(target,f_one(translations[a],translations[s],formatters[formats[b]](c,...)))
+ elseif b then
+ write_nl(target,f_one(translations[a],translations[s],formats[b]))
+ elseif a then
+ write_nl(target,f_two(translations[a],translations[s]))
+ else
+ write_nl(target,"\n")
+ end
+ end
+ local f_one=formatters["%-15s > %s > %s"]
+ local f_two=formatters["%-15s > %s >"]
+ subdirect=function(a,s,b,c,...)
+ if c then
+ return f_one(translations[a],translations[s],formatters[formats[b]](c,...))
+ elseif b then
+ return f_one(translations[a],translations[s],formats[b])
+ elseif a then
+ return f_two(translations[a],translations[s])
+ else
+ return ""
+ end
+ end
+ local f_one=formatters["%-15s : %s\n"]
+ local f_two=formatters["%-15s :\n"]
+ status=function(a,b,c,...)
+ if c then
+ write_nl(target,f_one(translations[a],formatters[formats[b]](c,...)))
+ elseif b then
+ write_nl(target,f_one(translations[a],formats[b]))
+ elseif a then
+ write_nl(target,f_two(translations[a]))
+ else
+ write_nl(target,"\n")
+ end
+ end
+ local targets={
+ logfile="log",
+ log="log",
+ file="log",
+ console="term",
+ terminal="term",
+ both="term and log",
+ }
+ settarget=function(whereto)
+ target=targets[whereto or "both"] or targets.both
+ if target=="term" or target=="term and log" then
+ logs.flush=io.flush
+ else
+ logs.flush=ignore
+ end
+ end
+ local stack={}
+ pushtarget=function(newtarget)
+ insert(stack,target)
+ settarget(newtarget)
+ end
+ poptarget=function()
+ if #stack>0 then
+ settarget(remove(stack))
+ end
+ end
+ setformats=function(f)
+ formats=f
+ end
+ settranslations=function(t)
+ translations=t
+ end
else
-
- logs.flush = ignore
-
- writer = write_nl
-
- newline = function()
- write_nl("\n")
- end
-
- report = function(a,b,c,...)
- if c then
- write_nl(format("%-15s | %s",a,format(b,c,...)))
- elseif b then
- write_nl(format("%-15s | %s",a,b))
- elseif a then
- write_nl(format("%-15s |", a))
- else
- write_nl("")
- end
- end
-
- subreport = function(a,sub,b,c,...)
- if c then
- write_nl(format("%-15s | %s | %s",a,sub,format(b,c,...)))
- elseif b then
- write_nl(format("%-15s | %s | %s",a,sub,b))
- elseif a then
- write_nl(format("%-15s | %s |", a,sub))
- else
- write_nl("")
- end
- end
-
- status = function(a,b,c,...) -- not to be used in lua anyway
- if c then
- write_nl(format("%-15s : %s\n",a,format(b,c,...)))
- elseif b then
- write_nl(format("%-15s : %s\n",a,b)) -- b can have %'s
- elseif a then
- write_nl(format("%-15s :\n", a))
- else
- write_nl("\n")
- end
- end
-
- direct = ignore
- subdirect = ignore
-
- settarget = ignore
- pushtarget = ignore
- poptarget = ignore
- setformats = ignore
- settranslations = ignore
-
-end
-
-logs.report = report
-logs.subreport = subreport
-logs.status = status
-logs.settarget = settarget
-logs.pushtarget = pushtarget
-logs.poptarget = poptarget
-logs.setformats = setformats
-logs.settranslations = settranslations
-
-logs.direct = direct
-logs.subdirect = subdirect
-logs.writer = writer
-logs.newline = newline
-
--- installer
-
--- todo: renew (un) locks when a new one is added and wildcard
-
-local data, states = { }, nil
-
+ logs.flush=ignore
+ writer=write_nl
+ newline=function()
+ write_nl("\n")
+ end
+ local f_one=formatters["%-15s | %s"]
+ local f_two=formatters["%-15s |"]
+ report=function(a,b,c,...)
+ if c then
+ write_nl(f_one(a,formatters[b](c,...)))
+ elseif b then
+ write_nl(f_one(a,b))
+ elseif a then
+ write_nl(f_two(a))
+ else
+ write_nl("")
+ end
+ end
+ local f_one=formatters["%-15s | %s | %s"]
+ local f_two=formatters["%-15s | %s |"]
+ subreport=function(a,sub,b,c,...)
+ if c then
+ write_nl(f_one(a,sub,formatters[b](c,...)))
+ elseif b then
+ write_nl(f_one(a,sub,b))
+ elseif a then
+ write_nl(f_two(a,sub))
+ else
+ write_nl("")
+ end
+ end
+ local f_one=formatters["%-15s : %s\n"]
+ local f_two=formatters["%-15s :\n"]
+ status=function(a,b,c,...)
+ if c then
+ write_nl(f_one(a,formatters[b](c,...)))
+ elseif b then
+ write_nl(f_one(a,b))
+ elseif a then
+ write_nl(f_two(a))
+ else
+ write_nl("\n")
+ end
+ end
+ direct=ignore
+ subdirect=ignore
+ settarget=ignore
+ pushtarget=ignore
+ poptarget=ignore
+ setformats=ignore
+ settranslations=ignore
+end
+logs.report=report
+logs.subreport=subreport
+logs.status=status
+logs.settarget=settarget
+logs.pushtarget=pushtarget
+logs.poptarget=poptarget
+logs.setformats=setformats
+logs.settranslations=settranslations
+logs.direct=direct
+logs.subdirect=subdirect
+logs.writer=writer
+logs.newline=newline
+local data,states={},nil
function logs.reporter(category,subcategory)
- local logger = data[category]
- if not logger then
- local state = false
- if states == true then
- state = true
- elseif type(states) == "table" then
- for c, _ in next, states do
- if find(category,c) then
- state = true
- break
- end
- end
+ local logger=data[category]
+ if not logger then
+ local state=false
+ if states==true then
+ state=true
+ elseif type(states)=="table" then
+ for c,_ in next,states do
+ if find(category,c) then
+ state=true
+ break
+ end
+ end
+ end
+ logger={
+ reporters={},
+ state=state,
+ }
+ data[category]=logger
+ end
+ local reporter=logger.reporters[subcategory or "default"]
+ if not reporter then
+ if subcategory then
+ reporter=function(...)
+ if not logger.state then
+ subreport(category,subcategory,...)
end
- logger = {
- reporters = { },
- state = state,
- }
- data[category] = logger
- end
- local reporter = logger.reporters[subcategory or "default"]
- if not reporter then
- if subcategory then
- reporter = function(...)
- if not logger.state then
- subreport(category,subcategory,...)
- end
- end
- logger.reporters[subcategory] = reporter
- else
- local tag = category
- reporter = function(...)
- if not logger.state then
- report(category,...)
- end
- end
- logger.reporters.default = reporter
+ end
+ logger.reporters[subcategory]=reporter
+ else
+ local tag=category
+ reporter=function(...)
+ if not logger.state then
+ report(category,...)
end
+ end
+ logger.reporters.default=reporter
end
- return reporter
+ end
+ return reporter
end
-
-logs.new = logs.reporter -- for old times sake
-
--- context specicific: this ends up in the macro stream
-
-local ctxreport = logs.writer
-
+logs.new=logs.reporter
+local ctxreport=logs.writer
function logs.setmessenger(m)
- ctxreport = m
+ ctxreport=m
end
-
function logs.messenger(category,subcategory)
- -- we need to avoid catcode mess (todo: fast context)
- if subcategory then
- return function(...)
- ctxreport(subdirect(category,subcategory,...))
- end
- else
- return function(...)
- ctxreport(direct(category,...))
- end
+ if subcategory then
+ return function(...)
+ ctxreport(subdirect(category,subcategory,...))
+ end
+ else
+ return function(...)
+ ctxreport(direct(category,...))
end
+ end
end
-
--- so far
-
local function setblocked(category,value)
- if category == true then
- -- lock all
- category, value = "*", true
- elseif category == false then
- -- unlock all
- category, value = "*", false
- elseif value == nil then
- -- lock selective
- value = true
- end
- if category == "*" then
- states = value
- for k, v in next, data do
- v.state = value
- end
- else
- states = utilities.parsers.settings_to_hash(category)
- for c, _ in next, states do
- if data[c] then
- v.state = value
- else
- c = escapedpattern(c,true)
- for k, v in next, data do
- if find(k,c) then
- v.state = value
- end
- end
- end
- end
- end
+ if category==true then
+ category,value="*",true
+ elseif category==false then
+ category,value="*",false
+ elseif value==nil then
+ value=true
+ end
+ if category=="*" then
+ states=value
+ for k,v in next,data do
+ v.state=value
+ end
+ else
+ states=utilities.parsers.settings_to_hash(category)
+ for c,_ in next,states do
+ if data[c] then
+ v.state=value
+ else
+ c=topattern(c,true,true)
+ for k,v in next,data do
+ if find(k,c) then
+ v.state=value
+ end
+ end
+ end
+ end
+ end
end
-
function logs.disable(category,value)
- setblocked(category,value == nil and true or value)
+ setblocked(category,value==nil and true or value)
end
-
function logs.enable(category)
- setblocked(category,false)
+ setblocked(category,false)
end
-
function logs.categories()
- return table.sortedkeys(data)
+ return table.sortedkeys(data)
end
-
function logs.show()
- local n, c, s, max = 0, 0, 0, 0
- for category, v in table.sortedpairs(data) do
- n = n + 1
- local state = v.state
- local reporters = v.reporters
- local nc = #category
- if nc > c then
- c = nc
- end
- for subcategory, _ in next, reporters do
- local ns = #subcategory
- if ns > c then
- s = ns
- end
- local m = nc + ns
- if m > max then
- max = m
- end
- end
- local subcategories = concat(table.sortedkeys(reporters),", ")
- if state == true then
- state = "disabled"
- elseif state == false then
- state = "enabled"
- else
- state = "unknown"
- end
- -- no new here
- report("logging","category: '%s', subcategories: '%s', state: '%s'",category,subcategories,state)
+ local n,c,s,max=0,0,0,0
+ for category,v in table.sortedpairs(data) do
+ n=n+1
+ local state=v.state
+ local reporters=v.reporters
+ local nc=#category
+ if nc>c then
+ c=nc
+ end
+ for subcategory,_ in next,reporters do
+ local ns=#subcategory
+ if ns>c then
+ s=ns
+ end
+ local m=nc+ns
+ if m>max then
+ max=m
+ end
+ end
+ local subcategories=concat(table.sortedkeys(reporters),", ")
+ if state==true then
+ state="disabled"
+ elseif state==false then
+ state="enabled"
+ else
+ state="unknown"
end
- report("logging","categories: %s, max category: %s, max subcategory: %s, max combined: %s",n,c,s,max)
+ report("logging","category %a, subcategories %a, state %a",category,subcategories,state)
+ end
+ report("logging","categories: %s, max category: %s, max subcategory: %s, max combined: %s",n,c,s,max)
end
-
-directives.register("logs.blocked", function(v)
- setblocked(v,true)
+local delayed_reporters={}
+setmetatableindex(delayed_reporters,function(t,k)
+ local v=logs.reporter(k.name)
+ t[k]=v
+ return v
end)
-
-directives.register("logs.target", function(v)
- settarget(v)
+function utilities.setters.report(setter,...)
+ delayed_reporters[setter](...)
+end
+directives.register("logs.blocked",function(v)
+ setblocked(v,true)
end)
-
--- tex specific loggers (might move elsewhere)
-
-local report_pages = logs.reporter("pages") -- not needed but saves checking when we grep for it
-
-local real, user, sub
-
+directives.register("logs.target",function(v)
+ settarget(v)
+end)
+local report_pages=logs.reporter("pages")
+local real,user,sub
function logs.start_page_number()
- real, user, sub = texcount.realpageno, texcount.userpageno, texcount.subpageno
--- real, user, sub = 0, 0, 0
-end
-
-local timing = false
-local starttime = nil
-local lasttime = nil
-
-trackers.register("pages.timing", function(v) -- only for myself (diagnostics)
- starttime = os.clock()
- timing = true
+ real,user,sub=texcount.realpageno,texcount.userpageno,texcount.subpageno
+end
+local timing=false
+local starttime=nil
+local lasttime=nil
+trackers.register("pages.timing",function(v)
+ starttime=os.clock()
+ timing=true
end)
-
-function logs.stop_page_number() -- the first page can includes the initialization so we omit this in average
- if timing then
- local elapsed, average
- local stoptime = os.clock()
- if not lasttime or real < 2 then
- elapsed = stoptime
- average = stoptime
- starttime = stoptime
- else
- elapsed = stoptime - lasttime
- average = (stoptime - starttime) / (real - 1)
- end
- lasttime = stoptime
- if real > 0 then
- if user > 0 then
- if sub > 0 then
- report_pages("flushing realpage %s, userpage %s, subpage %s, time %0.04f / %0.04f",real,user,sub,elapsed,average)
- else
- report_pages("flushing realpage %s, userpage %s, time %0.04f / %0.04f",real,user,elapsed,average)
- end
- else
- report_pages("flushing realpage %s, time %0.04f / %0.04f",real,elapsed,average)
- end
- else
- report_pages("flushing page, time %0.04f / %0.04f",elapsed,average)
- end
+function logs.stop_page_number()
+ if timing then
+ local elapsed,average
+ local stoptime=os.clock()
+ if not lasttime or real<2 then
+ elapsed=stoptime
+ average=stoptime
+ starttime=stoptime
else
- if real > 0 then
- if user > 0 then
- if sub > 0 then
- report_pages("flushing realpage %s, userpage %s, subpage %s",real,user,sub)
- else
- report_pages("flushing realpage %s, userpage %s",real,user)
- end
- else
- report_pages("flushing realpage %s",real)
- end
- else
- report_pages("flushing page")
- end
+ elapsed=stoptime-lasttime
+ average=(stoptime-starttime)/(real-1)
+ end
+ lasttime=stoptime
+ if real<=0 then
+ report_pages("flushing page, time %0.04f / %0.04f",elapsed,average)
+ elseif user<=0 then
+ report_pages("flushing realpage %s, time %0.04f / %0.04f",real,elapsed,average)
+ elseif sub<=0 then
+ report_pages("flushing realpage %s, userpage %s, time %0.04f / %0.04f",real,user,elapsed,average)
+ else
+ report_pages("flushing realpage %s, userpage %s, subpage %s, time %0.04f / %0.04f",real,user,sub,elapsed,average)
+ end
+ else
+ if real<=0 then
+ report_pages("flushing page")
+ elseif user<=0 then
+ report_pages("flushing realpage %s",real)
+ elseif sub<=0 then
+ report_pages("flushing realpage %s, userpage %s",real,user)
+ else
+ report_pages("flushing realpage %s, userpage %s, subpage %s",real,user,sub)
end
- logs.flush()
+ end
+ logs.flush()
end
-
-logs.report_job_stat = statistics and statistics.showjobstat
-
-local report_files = logs.reporter("files")
-
-local nesting = 0
-local verbose = false
-local hasscheme = url.hasscheme
-
--- we don't have show_open and show_close callbacks yet
-
+local report_files=logs.reporter("files")
+local nesting=0
+local verbose=false
+local hasscheme=url.hasscheme
function logs.show_open(name)
- -- if hasscheme(name) ~= "virtual" then
- -- if verbose then
- -- nesting = nesting + 1
- -- report_files("level %s, opening %s",nesting,name)
- -- else
- -- write(format("(%s",name)) -- tex adds a space
- -- end
- -- end
end
-
function logs.show_close(name)
- -- if hasscheme(name) ~= "virtual" then
- -- if verbose then
- -- report_files("level %s, closing %s",nesting,name)
- -- nesting = nesting - 1
- -- else
- -- write(")") -- tex adds a space
- -- end
- -- end
end
-
function logs.show_load(name)
- -- if hasscheme(name) ~= "virtual" then
- -- if verbose then
- -- report_files("level %s, loading %s",nesting+1,name)
- -- else
- -- write(format("(%s)",name))
- -- end
- -- end
end
-
--- there may be scripts out there using this:
-
-local simple = logs.reporter("comment")
-
-logs.simple = simple
-logs.simpleline = simple
-
--- obsolete
-
-function logs.setprogram () end -- obsolete
-function logs.extendbanner() end -- obsolete
-function logs.reportlines () end -- obsolete
-function logs.reportbanner() end -- obsolete
-function logs.reportline () end -- obsolete
-function logs.simplelines () end -- obsolete
-function logs.help () end -- obsolete
-
--- applications
-
+local simple=logs.reporter("comment")
+logs.simple=simple
+logs.simpleline=simple
+function logs.setprogram () end
+function logs.extendbanner() end
+function logs.reportlines () end
+function logs.reportbanner() end
+function logs.reportline () end
+function logs.simplelines () end
+function logs.help () end
+local Carg,C,lpegmatch=lpeg.Carg,lpeg.C,lpeg.match
+local p_newline=lpeg.patterns.newline
+local linewise=(
+ Carg(1)*C((1-p_newline)^1)/function(t,s) t.report(s) end+Carg(1)*p_newline^2/function(t) t.report() end+p_newline
+)^1
local function reportlines(t,str)
- if str then
- for line in gmatch(str,"(.-)[\n\r]") do
- t.report(line)
- end
- end
+ if str then
+ lpegmatch(linewise,str,1,t)
+ end
end
-
local function reportbanner(t)
- local banner = t.banner
- if banner then
- t.report(banner)
- t.report()
- end
+ local banner=t.banner
+ if banner then
+ t.report(banner)
+ t.report()
+ end
end
-
local function reportversion(t)
- local banner = t.banner
- if banner then
- t.report(banner)
- end
+ local banner=t.banner
+ if banner then
+ t.report(banner)
+ end
end
-
local function reporthelp(t,...)
- local helpinfo = t.helpinfo
- if type(helpinfo) == "string" then
- reportlines(t,helpinfo)
- elseif type(helpinfo) == "table" then
- local tags = { ... }
- for i=1,#tags do
- reportlines(t,t.helpinfo[tags[i]])
- if i < #tags then
- t.report()
- end
- end
+ local helpinfo=t.helpinfo
+ if type(helpinfo)=="string" then
+ reportlines(t,helpinfo)
+ elseif type(helpinfo)=="table" then
+ for i=1,select("#",...) do
+ reportlines(t,t.helpinfo[select(i,...)])
+ if i<n then
+ t.report()
+ end
end
+ end
end
-
local function reportinfo(t)
- t.report()
- reportlines(t,moreinfo)
-end
-
+ t.report()
+ reportlines(t,t.moreinfo)
+end
+local function reportexport(t,method)
+ report(t.helpinfo)
+end
+local reporters={
+ lines=reportlines,
+ banner=reportbanner,
+ version=reportversion,
+ help=reporthelp,
+ info=reportinfo,
+ export=reportexport,
+}
+local exporters={
+}
+logs.reporters=reporters
+logs.exporters=exporters
function logs.application(t)
- t.name = t.name or "unknown"
- t.banner = t.banner
- t.report = logs.reporter(t.name)
- t.help = function(...) reportbanner(t) ; reporthelp(t,...) ; reportinfo(t) end
- t.identify = function() reportbanner(t) end
- t.version = function() reportversion(t) end
- return t
+ t.name=t.name or "unknown"
+ t.banner=t.banner
+ t.moreinfo=moreinfo
+ t.report=logs.reporter(t.name)
+ t.help=function(...)
+ reporters.banner(t)
+ reporters.help(t,...)
+ reporters.info(t)
+ end
+ t.export=function(...)
+ reporters.export(t,...)
+ end
+ t.identify=function()
+ reporters.banner(t)
+ end
+ t.version=function()
+ reporters.version(t)
+ end
+ return t
end
-
--- somewhat special
-
--- logging to a file
-
-
function logs.system(whereto,process,jobname,category,...)
- local message = format("%s %s => %s => %s => %s\r",os.date("%d/%m/%y %H:%m:%S"),process,jobname,category,format(...))
- for i=1,10 do
- local f = io.open(whereto,"a") -- we can consider keepint the file open
- if f then
- f:write(message)
- f:close()
- break
- else
- sleep(0.1)
- end
+ local message=formatters["%s %s => %s => %s => %s\r"](os.date("%d/%m/%y %H:%m:%S"),process,jobname,category,format(...))
+ for i=1,10 do
+ local f=io.open(whereto,"a")
+ if f then
+ f:write(message)
+ f:close()
+ break
+ else
+ sleep(0.1)
end
+ end
end
-
-local report_system = logs.reporter("system","logs")
-
+local report_system=logs.reporter("system","logs")
function logs.obsolete(old,new)
- local o = loadstring("return " .. new)()
- if type(o) == "function" then
- return function(...)
- report_system("function %s is obsolete, use %s",old,new)
- loadstring(old .. "=" .. new .. " return ".. old)()(...)
- end
- elseif type(o) == "table" then
- local t, m = { }, { }
- m.__index = function(t,k)
- report_system("table %s is obsolete, use %s",old,new)
- m.__index, m.__newindex = o, o
- return o[k]
- end
- m.__newindex = function(t,k,v)
- report_system("table %s is obsolete, use %s",old,new)
- m.__index, m.__newindex = o, o
- o[k] = v
- end
- if libraries then
- libraries.obsolete[old] = t -- true
- end
- setmetatable(t,m)
- return t
- end
+ local o=loadstring("return "..new)()
+ if type(o)=="function" then
+ return function(...)
+ report_system("function %a is obsolete, use %a",old,new)
+ loadstring(old.."="..new.." return "..old)()(...)
+ end
+ elseif type(o)=="table" then
+ local t,m={},{}
+ m.__index=function(t,k)
+ report_system("table %a is obsolete, use %a",old,new)
+ m.__index,m.__newindex=o,o
+ return o[k]
+ end
+ m.__newindex=function(t,k,v)
+ report_system("table %a is obsolete, use %a",old,new)
+ m.__index,m.__newindex=o,o
+ o[k]=v
+ end
+ if libraries then
+ libraries.obsolete[old]=t
+ end
+ setmetatable(t,m)
+ return t
+ end
end
-
if utilities then
- utilities.report = report_system
+ utilities.report=report_system
end
-
if tex and tex.error then
- function logs.texerrormessage(...) -- for the moment we put this function here
- tex.error(format(...), { })
- end
+ function logs.texerrormessage(...)
+ tex.error(format(...),{})
+ end
else
- function logs.texerrormessage(...)
- print(format(...))
- end
+ function logs.texerrormessage(...)
+ print(format(...))
+ end
end
-
--- do we still need io.flush then?
-
io.stdout:setvbuf('no')
io.stderr:setvbuf('no')
@@ -6366,487 +6685,1330 @@ end -- of closure
do -- create closure to overcome 200 locals limit
-if not modules then modules = { } end modules ['trac-pro'] = {
- version = 1.001,
- comment = "companion to luat-lib.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
+package.loaded["trac-inf"] = package.loaded["trac-inf"] or true
-local getmetatable, setmetatable, rawset, type = getmetatable, setmetatable, rawset, type
+-- original size: 5791, stripped down to: 4540
--- The protection implemented here is probably not that tight but good enough to catch
--- problems due to naive usage.
---
--- There's a more extensive version (trac-xxx.lua) that supports nesting.
---
--- This will change when we have _ENV in lua 5.2+
+if not modules then modules={} end modules ['trac-inf']={
+ version=1.001,
+ comment="companion to trac-inf.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+local type,tonumber=type,tonumber
+local format,lower=string.format,string.lower
+local concat=table.concat
+local clock=os.gettimeofday or os.clock
+statistics=statistics or {}
+local statistics=statistics
+statistics.enable=true
+statistics.threshold=0.01
+local statusinfo,n,registered,timers={},0,{},{}
+table.setmetatableindex(timers,function(t,k)
+ local v={ timing=0,loadtime=0 }
+ t[k]=v
+ return v
+end)
+local function hastiming(instance)
+ return instance and timers[instance]
+end
+local function resettiming(instance)
+ timers[instance or "notimer"]={ timing=0,loadtime=0 }
+end
+local function starttiming(instance)
+ local timer=timers[instance or "notimer"]
+ local it=timer.timing or 0
+ if it==0 then
+ timer.starttime=clock()
+ if not timer.loadtime then
+ timer.loadtime=0
+ end
+ end
+ timer.timing=it+1
+end
+local function stoptiming(instance,report)
+ local timer=timers[instance or "notimer"]
+ local it=timer.timing
+ if it>1 then
+ timer.timing=it-1
+ else
+ local starttime=timer.starttime
+ if starttime then
+ local stoptime=clock()
+ local loadtime=stoptime-starttime
+ timer.stoptime=stoptime
+ timer.loadtime=timer.loadtime+loadtime
+ if report then
+ statistics.report("load time %0.3f",loadtime)
+ end
+ timer.timing=0
+ return loadtime
+ end
+ end
+ return 0
+end
+local function elapsed(instance)
+ if type(instance)=="number" then
+ return instance or 0
+ else
+ local timer=timers[instance or "notimer"]
+ return timer and timer.loadtime or 0
+ end
+end
+local function elapsedtime(instance)
+ return format("%0.3f",elapsed(instance))
+end
+local function elapsedindeed(instance)
+ return elapsed(instance)>statistics.threshold
+end
+local function elapsedseconds(instance,rest)
+ if elapsedindeed(instance) then
+ return format("%0.3f seconds %s",elapsed(instance),rest or "")
+ end
+end
+statistics.hastiming=hastiming
+statistics.resettiming=resettiming
+statistics.starttiming=starttiming
+statistics.stoptiming=stoptiming
+statistics.elapsed=elapsed
+statistics.elapsedtime=elapsedtime
+statistics.elapsedindeed=elapsedindeed
+statistics.elapsedseconds=elapsedseconds
+function statistics.register(tag,fnc)
+ if statistics.enable and type(fnc)=="function" then
+ local rt=registered[tag] or (#statusinfo+1)
+ statusinfo[rt]={ tag,fnc }
+ registered[tag]=rt
+ if #tag>n then n=#tag end
+ end
+end
+local report=logs.reporter("mkiv lua stats")
+function statistics.show()
+ if statistics.enable then
+ local register=statistics.register
+ register("luatex banner",function()
+ return lower(status.banner)
+ end)
+ register("control sequences",function()
+ return format("%s of %s + %s",status.cs_count,status.hash_size,status.hash_extra)
+ end)
+ register("callbacks",function()
+ local total,indirect=status.callbacks or 0,status.indirect_callbacks or 0
+ return format("%s direct, %s indirect, %s total",total-indirect,indirect,total)
+ end)
+ if jit then
+ local status={ jit.status() }
+ if status[1] then
+ register("luajit status",function()
+ return concat(status," ",2)
+ end)
+ end
+ end
+ register("current memory usage",statistics.memused)
+ register("runtime",statistics.runtime)
+ logs.newline()
+ for i=1,#statusinfo do
+ local s=statusinfo[i]
+ local r=s[2]()
+ if r then
+ report("%s: %s",s[1],r)
+ end
+ end
+ statistics.enable=false
+ end
+end
+function statistics.memused()
+ local round=math.round or math.floor
+ return format("%s MB (ctx: %s MB)",round(collectgarbage("count")/1000),round(status.luastate_bytes/1000000))
+end
+starttiming(statistics)
+function statistics.formatruntime(runtime)
+ return format("%s seconds",runtime)
+end
+function statistics.runtime()
+ stoptiming(statistics)
+ return statistics.formatruntime(elapsedtime(statistics))
+end
+local report=logs.reporter("system")
+function statistics.timed(action)
+ starttiming("run")
+ action()
+ stoptiming("run")
+ report("total runtime: %s",elapsedtime("run"))
+end
+commands=commands or {}
+function commands.resettimer(name)
+ resettiming(name or "whatever")
+ starttiming(name or "whatever")
+end
+function commands.elapsedtime(name)
+ stoptiming(name or "whatever")
+ context(elapsedtime(name or "whatever"))
+end
-local trace_namespaces = false trackers.register("system.namespaces", function(v) trace_namespaces = v end)
-local report_system = logs.reporter("system","protection")
+end -- of closure
-namespaces = namespaces or { }
-local namespaces = namespaces
+do -- create closure to overcome 200 locals limit
+
+package.loaded["trac-pro"] = package.loaded["trac-pro"] or true
-local registered = { }
+-- original size: 5773, stripped down to: 3453
+if not modules then modules={} end modules ['trac-pro']={
+ version=1.001,
+ comment="companion to luat-lib.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+local getmetatable,setmetatable,rawset,type=getmetatable,setmetatable,rawset,type
+local trace_namespaces=false trackers.register("system.namespaces",function(v) trace_namespaces=v end)
+local report_system=logs.reporter("system","protection")
+namespaces=namespaces or {}
+local namespaces=namespaces
+local registered={}
local function report_index(k,name)
- if trace_namespaces then
- report_system("reference to '%s' in protected namespace '%s', %s",k,name,debug.traceback())
- else
- report_system("reference to '%s' in protected namespace '%s'",k,name)
- end
+ if trace_namespaces then
+ report_system("reference to %a in protected namespace %a: %s",k,name,debug.traceback())
+ else
+ report_system("reference to %a in protected namespace %a",k,name)
+ end
end
-
local function report_newindex(k,name)
- if trace_namespaces then
- report_system("assignment to '%s' in protected namespace '%s', %s",k,name,debug.traceback())
- else
- report_system("assignment to '%s' in protected namespace '%s'",k,name)
- end
+ if trace_namespaces then
+ report_system("assignment to %a in protected namespace %a: %s",k,name,debug.traceback())
+ else
+ report_system("assignment to %a in protected namespace %a",k,name)
+ end
end
-
local function register(name)
- local data = name == "global" and _G or _G[name]
- if not data then
- return -- error
- end
- registered[name] = data
- local m = getmetatable(data)
- if not m then
- m = { }
- setmetatable(data,m)
- end
- local index, newindex = { }, { }
- m.__saved__index = m.__index
- m.__no__index = function(t,k)
- if not index[k] then
- index[k] = true
- report_index(k,name)
- end
- return nil
- end
- m.__saved__newindex = m.__newindex
- m.__no__newindex = function(t,k,v)
- if not newindex[k] then
- newindex[k] = true
- report_newindex(k,name)
- end
- rawset(t,k,v)
+ local data=name=="global" and _G or _G[name]
+ if not data then
+ return
+ end
+ registered[name]=data
+ local m=getmetatable(data)
+ if not m then
+ m={}
+ setmetatable(data,m)
+ end
+ local index,newindex={},{}
+ m.__saved__index=m.__index
+ m.__no__index=function(t,k)
+ if not index[k] then
+ index[k]=true
+ report_index(k,name)
end
- m.__protection__depth = 0
-end
-
-local function private(name) -- maybe save name
- local data = registered[name]
+ return nil
+ end
+ m.__saved__newindex=m.__newindex
+ m.__no__newindex=function(t,k,v)
+ if not newindex[k] then
+ newindex[k]=true
+ report_newindex(k,name)
+ end
+ rawset(t,k,v)
+ end
+ m.__protection__depth=0
+end
+local function private(name)
+ local data=registered[name]
+ if not data then
+ data=_G[name]
if not data then
- data = _G[name]
- if not data then
- data = { }
- _G[name] = data
- end
- register(name)
+ data={}
+ _G[name]=data
end
- return data
+ register(name)
+ end
+ return data
end
-
local function protect(name)
- local data = registered[name]
- if not data then
- return
- end
- local m = getmetatable(data)
- local pd = m.__protection__depth
- if pd > 0 then
- m.__protection__depth = pd + 1
- else
- m.__save_d_index, m.__saved__newindex = m.__index, m.__newindex
- m.__index, m.__newindex = m.__no__index, m.__no__newindex
- m.__protection__depth = 1
- end
+ local data=registered[name]
+ if not data then
+ return
+ end
+ local m=getmetatable(data)
+ local pd=m.__protection__depth
+ if pd>0 then
+ m.__protection__depth=pd+1
+ else
+ m.__save_d_index,m.__saved__newindex=m.__index,m.__newindex
+ m.__index,m.__newindex=m.__no__index,m.__no__newindex
+ m.__protection__depth=1
+ end
end
-
local function unprotect(name)
- local data = registered[name]
- if not data then
- return
- end
- local m = getmetatable(data)
- local pd = m.__protection__depth
- if pd > 1 then
- m.__protection__depth = pd - 1
- else
- m.__index, m.__newindex = m.__saved__index, m.__saved__newindex
- m.__protection__depth = 0
- end
+ local data=registered[name]
+ if not data then
+ return
+ end
+ local m=getmetatable(data)
+ local pd=m.__protection__depth
+ if pd>1 then
+ m.__protection__depth=pd-1
+ else
+ m.__index,m.__newindex=m.__saved__index,m.__saved__newindex
+ m.__protection__depth=0
+ end
end
-
local function protectall()
- for name, _ in next, registered do
- if name ~= "global" then
- protect(name)
- end
+ for name,_ in next,registered do
+ if name~="global" then
+ protect(name)
end
+ end
end
-
local function unprotectall()
- for name, _ in next, registered do
- if name ~= "global" then
- unprotect(name)
- end
- end
-end
+ for name,_ in next,registered do
+ if name~="global" then
+ unprotect(name)
+ end
+ end
+end
+namespaces.register=register
+namespaces.private=private
+namespaces.protect=protect
+namespaces.unprotect=unprotect
+namespaces.protectall=protectall
+namespaces.unprotectall=unprotectall
+namespaces.private("namespaces") registered={} register("global")
+directives.register("system.protect",function(v)
+ if v then
+ protectall()
+ else
+ unprotectall()
+ end
+end)
+directives.register("system.checkglobals",function(v)
+ if v then
+ report_system("enabling global namespace guard")
+ protect("global")
+ else
+ report_system("disabling global namespace guard")
+ unprotect("global")
+ end
+end)
-namespaces.register = register -- register when defined
-namespaces.private = private -- allocate and register if needed
-namespaces.protect = protect
-namespaces.unprotect = unprotect
-namespaces.protectall = protectall
-namespaces.unprotectall = unprotectall
-namespaces.private("namespaces") registered = { } register("global") -- unreachable
+end -- of closure
-directives.register("system.protect", function(v)
- if v then
- protectall()
+do -- create closure to overcome 200 locals limit
+
+package.loaded["util-lua"] = package.loaded["util-lua"] or true
+
+-- original size: 12575, stripped down to: 8700
+
+if not modules then modules={} end modules ['util-lua']={
+ version=1.001,
+ comment="companion to luat-lib.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ comment="the strip code is written by Peter Cawley",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+local rep,sub,byte,dump,format=string.rep,string.sub,string.byte,string.dump,string.format
+local load,loadfile,type=load,loadfile,type
+utilities=utilities or {}
+utilities.lua=utilities.lua or {}
+local luautilities=utilities.lua
+local report_lua=logs.reporter("system","lua")
+local tracestripping=false
+local forcestupidcompile=true
+luautilities.stripcode=true
+luautilities.alwaysstripcode=false
+luautilities.nofstrippedchunks=0
+luautilities.nofstrippedbytes=0
+local strippedchunks={}
+luautilities.strippedchunks=strippedchunks
+luautilities.suffixes={
+ tma="tma",
+ tmc=jit and "tmb" or "tmc",
+ lua="lua",
+ luc=jit and "lub" or "luc",
+ lui="lui",
+ luv="luv",
+ luj="luj",
+ tua="tua",
+ tuc="tuc",
+}
+if jit or status.luatex_version>=74 then
+ local function register(name)
+ if tracestripping then
+ report_lua("stripped bytecode from %a",name or "unknown")
+ end
+ strippedchunks[#strippedchunks+1]=name
+ luautilities.nofstrippedchunks=luautilities.nofstrippedchunks+1
+ end
+ local function stupidcompile(luafile,lucfile,strip)
+ local code=io.loaddata(luafile)
+ if code and code~="" then
+ code=load(code)
+ if code then
+ code=dump(code,strip and luautilities.stripcode or luautilities.alwaysstripcode)
+ if code and code~="" then
+ register(name)
+ io.savedata(lucfile,code)
+ return true,0
+ end
+ else
+ report_lua("fatal error %a in file %a",1,luafile)
+ end
+ else
+ report_lua("fatal error %a in file %a",2,luafile)
+ end
+ return false,0
+ end
+ function luautilities.loadedluacode(fullname,forcestrip,name)
+ name=name or fullname
+ local code=environment.loadpreprocessedfile and environment.loadpreprocessedfile(fullname) or loadfile(fullname)
+ if code then
+ code()
+ end
+ if forcestrip and luautilities.stripcode then
+ if type(forcestrip)=="function" then
+ forcestrip=forcestrip(fullname)
+ end
+ if forcestrip or luautilities.alwaysstripcode then
+ register(name)
+ return load(dump(code,true)),0
+ else
+ return code,0
+ end
+ elseif luautilities.alwaysstripcode then
+ register(name)
+ return load(dump(code,true)),0
+ else
+ return code,0
+ end
+ end
+ function luautilities.strippedloadstring(code,forcestrip,name)
+ if forcestrip and luautilities.stripcode or luautilities.alwaysstripcode then
+ code=load(code)
+ if not code then
+ report_lua("fatal error %a in file %a",3,name)
+ end
+ register(name)
+ code=dump(code,true)
+ end
+ return load(code),0
+ end
+ function luautilities.compile(luafile,lucfile,cleanup,strip,fallback)
+ report_lua("compiling %a into %a",luafile,lucfile)
+ os.remove(lucfile)
+ local done=stupidcompile(luafile,lucfile,strip~=false)
+ if done then
+ report_lua("dumping %a into %a stripped",luafile,lucfile)
+ if cleanup==true and lfs.isfile(lucfile) and lfs.isfile(luafile) then
+ report_lua("removing %a",luafile)
+ os.remove(luafile)
+ end
+ end
+ return done
+ end
+ function luautilities.loadstripped(...)
+ local l=load(...)
+ if l then
+ return load(dump(l,true))
+ end
+ end
+else
+ local function register(name,before,after)
+ local delta=before-after
+ if tracestripping then
+ report_lua("bytecodes stripped from %a, # before %s, # after %s, delta %s",name,before,after,delta)
+ end
+ strippedchunks[#strippedchunks+1]=name
+ luautilities.nofstrippedchunks=luautilities.nofstrippedchunks+1
+ luautilities.nofstrippedbytes=luautilities.nofstrippedbytes+delta
+ return delta
+ end
+ local strip_code_pc
+ if _MAJORVERSION==5 and _MINORVERSION==1 then
+ strip_code_pc=function(dump,name)
+ local before=#dump
+ local version,format,endian,int,size,ins,num=byte(dump,5,11)
+ local subint
+ if endian==1 then
+ subint=function(dump,i,l)
+ local val=0
+ for n=l,1,-1 do
+ val=val*256+byte(dump,i+n-1)
+ end
+ return val,i+l
+ end
+ else
+ subint=function(dump,i,l)
+ local val=0
+ for n=1,l,1 do
+ val=val*256+byte(dump,i+n-1)
+ end
+ return val,i+l
+ end
+ end
+ local strip_function
+ strip_function=function(dump)
+ local count,offset=subint(dump,1,size)
+ local stripped,dirty=rep("\0",size),offset+count
+ offset=offset+count+int*2+4
+ offset=offset+int+subint(dump,offset,int)*ins
+ count,offset=subint(dump,offset,int)
+ for n=1,count do
+ local t
+ t,offset=subint(dump,offset,1)
+ if t==1 then
+ offset=offset+1
+ elseif t==4 then
+ offset=offset+size+subint(dump,offset,size)
+ elseif t==3 then
+ offset=offset+num
+ end
+ end
+ count,offset=subint(dump,offset,int)
+ stripped=stripped..sub(dump,dirty,offset-1)
+ for n=1,count do
+ local proto,off=strip_function(sub(dump,offset,-1))
+ stripped,offset=stripped..proto,offset+off-1
+ end
+ offset=offset+subint(dump,offset,int)*int+int
+ count,offset=subint(dump,offset,int)
+ for n=1,count do
+ offset=offset+subint(dump,offset,size)+size+int*2
+ end
+ count,offset=subint(dump,offset,int)
+ for n=1,count do
+ offset=offset+subint(dump,offset,size)+size
+ end
+ stripped=stripped..rep("\0",int*3)
+ return stripped,offset
+ end
+ dump=sub(dump,1,12)..strip_function(sub(dump,13,-1))
+ local after=#dump
+ local delta=register(name,before,after)
+ return dump,delta
+ end
+ else
+ strip_code_pc=function(dump,name)
+ return dump,0
+ end
+ end
+ function luautilities.loadedluacode(fullname,forcestrip,name)
+ local code=environment.loadpreprocessedfile and environment.preprocessedloadfile(fullname) or loadfile(fullname)
+ if code then
+ code()
+ end
+ if forcestrip and luautilities.stripcode then
+ if type(forcestrip)=="function" then
+ forcestrip=forcestrip(fullname)
+ end
+ if forcestrip then
+ local code,n=strip_code_pc(dump(code),name)
+ return load(code),n
+ elseif luautilities.alwaysstripcode then
+ return load(strip_code_pc(dump(code),name))
+ else
+ return code,0
+ end
+ elseif luautilities.alwaysstripcode then
+ return load(strip_code_pc(dump(code),name))
else
- unprotectall()
+ return code,0
+ end
+ end
+ function luautilities.strippedloadstring(code,forcestrip,name)
+ local n=0
+ if (forcestrip and luautilities.stripcode) or luautilities.alwaysstripcode then
+ code=load(code)
+ if not code then
+ report_lua("fatal error in file %a",name)
+ end
+ code,n=strip_code_pc(dump(code),name)
+ end
+ return load(code),n
+ end
+ local function stupidcompile(luafile,lucfile,strip)
+ local code=io.loaddata(luafile)
+ local n=0
+ if code and code~="" then
+ code=load(code)
+ if not code then
+ report_lua("fatal error in file %a",luafile)
+ end
+ code=dump(code)
+ if strip then
+ code,n=strip_code_pc(code,luautilities.stripcode or luautilities.alwaysstripcode,luafile)
+ end
+ if code and code~="" then
+ io.savedata(lucfile,code)
+ end
end
-end)
-
-directives.register("system.checkglobals", function(v)
- if v then
- report_system("enabling global namespace guard")
- protect("global")
+ return n
+ end
+ local luac_normal="texluac -o %q %q"
+ local luac_strip="texluac -s -o %q %q"
+ function luautilities.compile(luafile,lucfile,cleanup,strip,fallback)
+ report_lua("compiling %a into %a",luafile,lucfile)
+ os.remove(lucfile)
+ local done=false
+ if strip~=false then
+ strip=true
+ end
+ if forcestupidcompile then
+ fallback=true
+ elseif strip then
+ done=os.spawn(format(luac_strip,lucfile,luafile))==0
else
- report_system("disabling global namespace guard")
- unprotect("global")
+ done=os.spawn(format(luac_normal,lucfile,luafile))==0
end
-end)
+ if not done and fallback then
+ local n=stupidcompile(luafile,lucfile,strip)
+ if n>0 then
+ report_lua("%a dumped into %a (%i bytes stripped)",luafile,lucfile,n)
+ else
+ report_lua("%a dumped into %a (unstripped)",luafile,lucfile)
+ end
+ cleanup=false
+ done=true
+ end
+ if done and cleanup==true and lfs.isfile(lucfile) and lfs.isfile(luafile) then
+ report_lua("removing %a",luafile)
+ os.remove(luafile)
+ end
+ return done
+ end
+ luautilities.loadstripped=loadstring
+end
--- dummy section (will go to luat-dum.lua)
+end -- of closure
+do -- create closure to overcome 200 locals limit
+package.loaded["util-deb"] = package.loaded["util-deb"] or true
+-- original size: 3708, stripped down to: 2568
+if not modules then modules={} end modules ['util-deb']={
+ version=1.001,
+ comment="companion to luat-lib.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+local debug=require "debug"
+local getinfo=debug.getinfo
+local type,next,tostring=type,next,tostring
+local format,find=string.format,string.find
+local is_boolean=string.is_boolean
+utilities=utilities or {}
+local debugger=utilities.debugger or {}
+utilities.debugger=debugger
+local counters={}
+local names={}
+local report=logs.reporter("debugger")
+local function hook()
+ local f=getinfo(2)
+ if f then
+ local n="unknown"
+ if f.what=="C" then
+ n=f.name or '<anonymous>'
+ if not names[n] then
+ names[n]=format("%42s",n)
+ end
+ else
+ n=f.name or f.namewhat or f.what
+ if not n or n=="" then
+ n="?"
+ end
+ if not names[n] then
+ names[n]=format("%42s : % 5i : %s",n,f.linedefined or 0,f.short_src or "unknown source")
+ end
+ end
+ counters[n]=(counters[n] or 0)+1
+ end
+end
+function debugger.showstats(printer,threshold)
+ printer=printer or report
+ threshold=threshold or 0
+ local total,grandtotal,functions=0,0,0
+ local dataset={}
+ for name,count in next,counters do
+ dataset[#dataset+1]={ name,count }
+ end
+ table.sort(dataset,function(a,b) return a[2]==b[2] and b[1]>a[1] or a[2]>b[2] end)
+ for i=1,#dataset do
+ local d=dataset[i]
+ local name=d[1]
+ local count=d[2]
+ if count>threshold and not find(name,"for generator") then
+ printer(format("%8i %s\n",count,names[name]))
+ total=total+count
+ end
+ grandtotal=grandtotal+count
+ functions=functions+1
+ end
+ printer("\n")
+ printer(format("functions : % 10i\n",functions))
+ printer(format("total : % 10i\n",total))
+ printer(format("grand total: % 10i\n",grandtotal))
+ printer(format("threshold : % 10i\n",threshold))
+end
+function debugger.savestats(filename,threshold)
+ local f=io.open(filename,'w')
+ if f then
+ debugger.showstats(function(str) f:write(str) end,threshold)
+ f:close()
+ end
+end
+function debugger.enable()
+ debug.sethook(hook,"c")
+end
+function debugger.disable()
+ debug.sethook()
+end
+function traceback()
+ local level=1
+ while true do
+ local info=debug.getinfo(level,"Sl")
+ if not info then
+ break
+ elseif info.what=="C" then
+ print(format("%3i : C function",level))
+ else
+ print(format("%3i : [%s]:%d",level,info.short_src,info.currentline))
+ end
+ level=level+1
+ end
+end
end -- of closure
do -- create closure to overcome 200 locals limit
-if not modules then modules = { } end modules ['luat-env'] = {
- version = 1.001,
- comment = "companion to luat-lib.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
--- A former version provided functionality for non embeded core
--- scripts i.e. runtime library loading. Given the amount of
--- Lua code we use now, this no longer makes sense. Much of this
--- evolved before bytecode arrays were available and so a lot of
--- code has disappeared already.
-
-local trace_locating = false trackers.register("resolvers.locating", function(v) trace_locating = v end)
+package.loaded["util-mrg"] = package.loaded["util-mrg"] or true
-local report_lua = logs.reporter("resolvers","lua")
+-- original size: 7294, stripped down to: 5798
-local allocate, mark = utilities.storage.allocate, utilities.storage.mark
+if not modules then modules={} end modules ['util-mrg']={
+ version=1.001,
+ comment="companion to luat-lib.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+local gsub,format=string.gsub,string.format
+local concat=table.concat
+local type,next=type,next
+local P,R,S,V,Ct,C,Cs,Cc,Cp,Cmt,Cb,Cg=lpeg.P,lpeg.R,lpeg.S,lpeg.V,lpeg.Ct,lpeg.C,lpeg.Cs,lpeg.Cc,lpeg.Cp,lpeg.Cmt,lpeg.Cb,lpeg.Cg
+local lpegmatch,patterns=lpeg.match,lpeg.patterns
+utilities=utilities or {}
+local merger=utilities.merger or {}
+utilities.merger=merger
+merger.strip_comment=true
+local report=logs.reporter("system","merge")
+utilities.report=report
+local m_begin_merge="begin library merge"
+local m_end_merge="end library merge"
+local m_begin_closure="do -- create closure to overcome 200 locals limit"
+local m_end_closure="end -- of closure"
+local m_pattern="%c+".."%-%-%s+"..m_begin_merge.."%c+(.-)%c+".."%-%-%s+"..m_end_merge.."%c+"
+local m_format="\n\n-- "..m_begin_merge.."\n%s\n".."-- "..m_end_merge.."\n\n"
+local m_faked="-- ".."created merged file".."\n\n".."-- "..m_begin_merge.."\n\n".."-- "..m_end_merge.."\n\n"
+local m_report=[[
+-- used libraries : %s
+-- skipped libraries : %s
+-- original bytes : %s
+-- stripped bytes : %s
+]]
+local m_preloaded=[[package.loaded[%q] = package.loaded[%q] or true]]
+local function self_fake()
+ return m_faked
+end
+local function self_nothing()
+ return ""
+end
+local function self_load(name)
+ local data=io.loaddata(name) or ""
+ if data=="" then
+ report("unknown file %a",name)
+ else
+ report("inserting file %a",name)
+ end
+ return data or ""
+end
+local space=patterns.space
+local eol=patterns.newline
+local equals=P("=")^0
+local open=P("[")*Cg(equals,"init")*P("[")*P("\n")^-1
+local close=P("]")*C(equals)*P("]")
+local closeeq=Cmt(close*Cb("init"),function(s,i,a,b) return a==b end)
+local longstring=open*(1-closeeq)^0*close
+local quoted=patterns.quoted
+local emptyline=space^0*eol
+local operator1=P("<=")+P(">=")+P("~=")+P("..")+S("/^<>=*+%%")
+local operator2=S("*+/")
+local operator3=S("-")
+local separator=S(",;")
+local ignore=(P("]")*space^1*P("=")*space^1*P("]"))/"]=["+(P("=")*space^1*P("{"))/"={"+(P("(")*space^1)/"("+(P("{")*(space+eol)^1*P("}"))/"{}"
+local strings=quoted
+local longcmt=(emptyline^0*P("--")*longstring*emptyline^0)/""
+local longstr=longstring
+local comment=emptyline^0*P("--")*P("-")^0*(1-eol)^0*emptyline^1/"\n"
+local pack=((eol+space)^0/"")*operator1*((eol+space)^0/"")+((eol+space)^0/"")*operator2*((space)^0/"")+((eol+space)^1/"")*operator3*((space)^1/"")+((space)^0/"")*separator*((space)^0/"")
+local lines=emptyline^2/"\n"
+local spaces=(space*space)/" "
+local compact=Cs ((
+ ignore+strings+longcmt+longstr+comment+pack+lines+spaces+1
+)^1 )
+local strip=Cs((emptyline^2/"\n"+1)^0)
+local stripreturn=Cs((1-P("return")*space^1*P(1-space-eol)^1*(space+eol)^0*P(-1))^1)
+function merger.compact(data)
+ return lpegmatch(strip,lpegmatch(compact,data))
+end
+local function self_compact(data)
+ local delta=0
+ if merger.strip_comment then
+ local before=#data
+ data=lpegmatch(compact,data)
+ data=lpegmatch(strip,data)
+ local after=#data
+ delta=before-after
+ report("original size %s, compacted to %s, stripped %s",before,after,delta)
+ data=format("-- original size: %s, stripped down to: %s\n\n%s",before,after,data)
+ end
+ return lpegmatch(stripreturn,data) or data,delta
+end
+local function self_save(name,data)
+ if data~="" then
+ io.savedata(name,data)
+ report("saving %s with size %s",name,#data)
+ end
+end
+local function self_swap(data,code)
+ return data~="" and (gsub(data,m_pattern,function() return format(m_format,code) end,1)) or ""
+end
+local function self_libs(libs,list)
+ local result,f,frozen,foundpath={},nil,false,nil
+ result[#result+1]="\n"
+ if type(libs)=='string' then libs={ libs } end
+ if type(list)=='string' then list={ list } end
+ for i=1,#libs do
+ local lib=libs[i]
+ for j=1,#list do
+ local pth=gsub(list[j],"\\","/")
+ report("checking library path %a",pth)
+ local name=pth.."/"..lib
+ if lfs.isfile(name) then
+ foundpath=pth
+ end
+ end
+ if foundpath then break end
+ end
+ if foundpath then
+ report("using library path %a",foundpath)
+ local right,wrong,original,stripped={},{},0,0
+ for i=1,#libs do
+ local lib=libs[i]
+ local fullname=foundpath.."/"..lib
+ if lfs.isfile(fullname) then
+ report("using library %a",fullname)
+ local preloaded=file.nameonly(lib)
+ local data=io.loaddata(fullname,true)
+ original=original+#data
+ local data,delta=self_compact(data)
+ right[#right+1]=lib
+ result[#result+1]=m_begin_closure
+ result[#result+1]=format(m_preloaded,preloaded,preloaded)
+ result[#result+1]=data
+ result[#result+1]=m_end_closure
+ stripped=stripped+delta
+ else
+ report("skipping library %a",fullname)
+ wrong[#wrong+1]=lib
+ end
+ end
+ right=#right>0 and concat(right," ") or "-"
+ wrong=#wrong>0 and concat(wrong," ") or "-"
+ report("used libraries: %a",right)
+ report("skipped libraries: %a",wrong)
+ report("original bytes: %a",original)
+ report("stripped bytes: %a",stripped)
+ result[#result+1]=format(m_report,right,wrong,original,stripped)
+ else
+ report("no valid library path found")
+ end
+ return concat(result,"\n\n")
+end
+function merger.selfcreate(libs,list,target)
+ if target then
+ self_save(target,self_swap(self_fake(),self_libs(libs,list)))
+ end
+end
+function merger.selfmerge(name,libs,list,target)
+ self_save(target or name,self_swap(self_load(name),self_libs(libs,list)))
+end
+function merger.selfclean(name)
+ self_save(name,self_swap(self_load(name),self_nothing()))
+end
-local format, sub, match, gsub, find = string.format, string.sub, string.match, string.gsub, string.find
-local unquoted, quoted = string.unquoted, string.quoted
-local concat = table.concat
--- precautions
+end -- of closure
-os.setlocale(nil,nil) -- useless feature and even dangerous in luatex
+do -- create closure to overcome 200 locals limit
-function os.setlocale()
- -- no way you can mess with it
-end
+package.loaded["util-tpl"] = package.loaded["util-tpl"] or true
--- dirty tricks
+-- original size: 5655, stripped down to: 3242
-if arg and (arg[0] == 'luatex' or arg[0] == 'luatex.exe') and arg[1] == "--luaonly" then
- arg[-1] = arg[0]
- arg[ 0] = arg[2]
- for k=3,#arg do
- arg[k-2] = arg[k]
+if not modules then modules={} end modules ['util-tpl']={
+ version=1.001,
+ comment="companion to luat-lib.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+utilities.templates=utilities.templates or {}
+local templates=utilities.templates
+local trace_template=false trackers.register("templates.trace",function(v) trace_template=v end)
+local report_template=logs.reporter("template")
+local tostring=tostring
+local format,sub=string.format,string.sub
+local P,C,Cs,Carg,lpegmatch=lpeg.P,lpeg.C,lpeg.Cs,lpeg.Carg,lpeg.match
+local replacer
+local function replacekey(k,t,how,recursive)
+ local v=t[k]
+ if not v then
+ if trace_template then
+ report_template("unknown key %a",k)
+ end
+ return ""
+ else
+ v=tostring(v)
+ if trace_template then
+ report_template("setting key %a to value %a",k,v)
end
- arg[#arg] = nil -- last
- arg[#arg] = nil -- pre-last
+ if recursive then
+ return lpegmatch(replacer,v,1,t,how,recursive)
+ else
+ return v
+ end
+ end
+end
+local sqlescape=lpeg.replacer {
+ { "'","''" },
+ { "\\","\\\\" },
+ { "\r\n","\\n" },
+ { "\r","\\n" },
+}
+local sqlquotedescape=lpeg.Cs(lpeg.Cc("'")*sqlescape*lpeg.Cc("'"))
+local escapers={
+ lua=function(s)
+ return sub(format("%q",s),2,-2)
+ end,
+ sql=function(s)
+ return lpegmatch(sqlescape,s)
+ end,
+}
+local quotedescapers={
+ lua=function(s)
+ return format("%q",s)
+ end,
+ sql=function(s)
+ return lpegmatch(sqlquotedescape,s)
+ end,
+}
+lpeg.patterns.sqlescape=sqlescape
+lpeg.patterns.sqlescape=sqlquotedescape
+local luaescaper=escapers.lua
+local quotedluaescaper=quotedescapers.lua
+local function replacekeyunquoted(s,t,how,recurse)
+ local escaper=how and escapers[how] or luaescaper
+ return escaper(replacekey(s,t,how,recurse))
+end
+local function replacekeyquoted(s,t,how,recurse)
+ local escaper=how and quotedescapers[how] or quotedluaescaper
+ return escaper(replacekey(s,t,how,recurse))
+end
+local single=P("%")
+local double=P("%%")
+local lquoted=P("%[")
+local rquoted=P("]%")
+local lquotedq=P("%(")
+local rquotedq=P(")%")
+local escape=double/'%%'
+local nosingle=single/''
+local nodouble=double/''
+local nolquoted=lquoted/''
+local norquoted=rquoted/''
+local nolquotedq=lquotedq/''
+local norquotedq=rquotedq/''
+local key=nosingle*((C((1-nosingle )^1)*Carg(1)*Carg(2)*Carg(3))/replacekey )*nosingle
+local quoted=nolquotedq*((C((1-norquotedq)^1)*Carg(1)*Carg(2)*Carg(3))/replacekeyquoted )*norquotedq
+local unquoted=nolquoted*((C((1-norquoted )^1)*Carg(1)*Carg(2)*Carg(3))/replacekeyunquoted)*norquoted
+local any=P(1)
+ replacer=Cs((unquoted+quoted+escape+key+any)^0)
+local function replace(str,mapping,how,recurse)
+ if mapping and str then
+ return lpegmatch(replacer,str,1,mapping,how or "lua",recurse or false) or str
+ else
+ return str
+ end
+end
+templates.replace=replace
+function templates.load(filename,mapping,how,recurse)
+ local data=io.loaddata(filename) or ""
+ if mapping and next(mapping) then
+ return replace(data,mapping,how,recurse)
+ else
+ return data
+ end
+end
+function templates.resolve(t,mapping,how,recurse)
+ if not mapping then
+ mapping=t
+ end
+ for k,v in next,t do
+ t[k]=replace(v,mapping,how,recurse)
+ end
+ return t
end
--- environment
-environment = environment or { }
-local environment = environment
+end -- of closure
-environment.arguments = allocate()
-environment.files = allocate()
-environment.sortedflags = nil
+do -- create closure to overcome 200 locals limit
-local mt = {
- __index = function(_,k)
- if k == "version" then
- local version = tex.toks and tex.toks.contextversiontoks
- if version and version ~= "" then
- rawset(environment,"version",version)
- return version
- else
- return "unknown"
- end
- elseif k == "jobname" or k == "formatname" then
- local name = tex and tex[k]
- if name or name== "" then
- rawset(environment,k,name)
- return name
- else
- return "unknown"
- end
- elseif k == "outputfilename" then
- local name = environment.jobname
- rawset(environment,k,name)
- return name
- end
- end
-}
+package.loaded["util-env"] = package.loaded["util-env"] or true
-setmetatable(environment,mt)
+-- original size: 7702, stripped down to: 4701
+if not modules then modules={} end modules ['util-env']={
+ version=1.001,
+ comment="companion to luat-lib.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+local allocate,mark=utilities.storage.allocate,utilities.storage.mark
+local format,sub,match,gsub,find=string.format,string.sub,string.match,string.gsub,string.find
+local unquoted,quoted=string.unquoted,string.quoted
+local concat,insert,remove=table.concat,table.insert,table.remove
+environment=environment or {}
+local environment=environment
+os.setlocale(nil,nil)
+function os.setlocale()
+end
+local validengines=allocate {
+ ["luatex"]=true,
+ ["luajittex"]=true,
+}
+local basicengines=allocate {
+ ["luatex"]="luatex",
+ ["texlua"]="luatex",
+ ["texluac"]="luatex",
+ ["luajittex"]="luajittex",
+ ["texluajit"]="luajittex",
+}
+local luaengines=allocate {
+ ["lua"]=true,
+ ["luajit"]=true,
+}
+environment.validengines=validengines
+environment.basicengines=basicengines
+if not arg then
+elseif luaengines[file.removesuffix(arg[-1])] then
+elseif validengines[file.removesuffix(arg[0])] then
+ if arg[1]=="--luaonly" then
+ arg[-1]=arg[0]
+ arg[ 0]=arg[2]
+ for k=3,#arg do
+ arg[k-2]=arg[k]
+ end
+ remove(arg)
+ remove(arg)
+ else
+ end
+ local originalzero=file.basename(arg[0])
+ local specialmapping={ luatools=="base" }
+ if originalzero~="mtxrun" and originalzero~="mtxrun.lua" then
+ arg[0]=specialmapping[originalzero] or originalzero
+ insert(arg,0,"--script")
+ insert(arg,0,"mtxrun")
+ end
+end
+environment.arguments=allocate()
+environment.files=allocate()
+environment.sortedflags=nil
function environment.initializearguments(arg)
- local arguments, files = { }, { }
- environment.arguments, environment.files, environment.sortedflags = arguments, files, nil
- for index=1,#arg do
- local argument = arg[index]
- if index > 0 then
- local flag, value = match(argument,"^%-+(.-)=(.-)$")
- if flag then
- arguments[flag] = unquoted(value or "")
- else
- flag = match(argument,"^%-+(.+)")
- if flag then
- arguments[flag] = true
- else
- files[#files+1] = argument
- end
- end
+ local arguments,files={},{}
+ environment.arguments,environment.files,environment.sortedflags=arguments,files,nil
+ for index=1,#arg do
+ local argument=arg[index]
+ if index>0 then
+ local flag,value=match(argument,"^%-+(.-)=(.-)$")
+ if flag then
+ flag=gsub(flag,"^c:","")
+ arguments[flag]=unquoted(value or "")
+ else
+ flag=match(argument,"^%-+(.+)")
+ if flag then
+ flag=gsub(flag,"^c:","")
+ arguments[flag]=true
+ else
+ files[#files+1]=argument
end
+ end
end
- environment.ownname = environment.ownname or arg[0] or 'unknown.lua'
+ end
+ environment.ownname=file.reslash(environment.ownname or arg[0] or 'unknown.lua')
end
-
function environment.setargument(name,value)
- environment.arguments[name] = value
-end
-
--- todo: defaults, better checks e.g on type (boolean versus string)
---
--- tricky: too many hits when we support partials unless we add
--- a registration of arguments so from now on we have 'partial'
-
-function environment.argument(name,partial)
- local arguments, sortedflags = environment.arguments, environment.sortedflags
- if arguments[name] then
- return arguments[name]
- elseif partial then
- if not sortedflags then
- sortedflags = allocate(table.sortedkeys(arguments))
- for k=1,#sortedflags do
- sortedflags[k] = "^" .. sortedflags[k]
- end
- environment.sortedflags = sortedflags
- end
- -- example of potential clash: ^mode ^modefile
- for k=1,#sortedflags do
- local v = sortedflags[k]
- if find(name,v) then
- return arguments[sub(v,2,#v)]
- end
- end
- end
- return nil
-end
-
-function environment.splitarguments(separator) -- rather special, cut-off before separator
- local done, before, after = false, { }, { }
- local originalarguments = environment.originalarguments
- for k=1,#originalarguments do
- local v = originalarguments[k]
- if not done and v == separator then
- done = true
- elseif done then
- after[#after+1] = v
- else
- before[#before+1] = v
- end
+ environment.arguments[name]=value
+end
+function environment.getargument(name,partial)
+ local arguments,sortedflags=environment.arguments,environment.sortedflags
+ if arguments[name] then
+ return arguments[name]
+ elseif partial then
+ if not sortedflags then
+ sortedflags=allocate(table.sortedkeys(arguments))
+ for k=1,#sortedflags do
+ sortedflags[k]="^"..sortedflags[k]
+ end
+ environment.sortedflags=sortedflags
+ end
+ for k=1,#sortedflags do
+ local v=sortedflags[k]
+ if find(name,v) then
+ return arguments[sub(v,2,#v)]
+ end
+ end
+ end
+ return nil
+end
+environment.argument=environment.getargument
+function environment.splitarguments(separator)
+ local done,before,after=false,{},{}
+ local originalarguments=environment.originalarguments
+ for k=1,#originalarguments do
+ local v=originalarguments[k]
+ if not done and v==separator then
+ done=true
+ elseif done then
+ after[#after+1]=v
+ else
+ before[#before+1]=v
end
- return before, after
+ end
+ return before,after
end
-
function environment.reconstructcommandline(arg,noquote)
- arg = arg or environment.originalarguments
- if noquote and #arg == 1 then
- -- we could just do: return unquoted(resolvers.resolve(arg[i]))
- local a = arg[1]
- a = resolvers.resolve(a)
- a = unquoted(a)
- return a
- elseif #arg > 0 then
- local result = { }
- for i=1,#arg do
- -- we could just do: result[#result+1] = format("%q",unquoted(resolvers.resolve(arg[i])))
- local a = arg[i]
- a = resolvers.resolve(a)
- a = unquoted(a)
- a = gsub(a,'"','\\"') -- tricky
- if find(a," ") then
- result[#result+1] = quoted(a)
- else
- result[#result+1] = a
- end
- end
- return concat(result," ")
- else
- return ""
- end
+ arg=arg or environment.originalarguments
+ if noquote and #arg==1 then
+ local a=arg[1]
+ a=resolvers.resolve(a)
+ a=unquoted(a)
+ return a
+ elseif #arg>0 then
+ local result={}
+ for i=1,#arg do
+ local a=arg[i]
+ a=resolvers.resolve(a)
+ a=unquoted(a)
+ a=gsub(a,'"','\\"')
+ if find(a," ") then
+ result[#result+1]=quoted(a)
+ else
+ result[#result+1]=a
+ end
+ end
+ return concat(result," ")
+ else
+ return ""
+ end
end
-
-
if arg then
-
- -- new, reconstruct quoted snippets (maybe better just remove the " then and add them later)
- local newarg, instring = { }, false
-
- for index=1,#arg do
- local argument = arg[index]
- if find(argument,"^\"") then
- newarg[#newarg+1] = gsub(argument,"^\"","")
- if not find(argument,"\"$") then
- instring = true
- end
- elseif find(argument,"\"$") then
- newarg[#newarg] = newarg[#newarg] .. " " .. gsub(argument,"\"$","")
- instring = false
- elseif instring then
- newarg[#newarg] = newarg[#newarg] .. " " .. argument
- else
- newarg[#newarg+1] = argument
- end
- end
- for i=1,-5,-1 do
- newarg[i] = arg[i]
+ local newarg,instring={},false
+ for index=1,#arg do
+ local argument=arg[index]
+ if find(argument,"^\"") then
+ newarg[#newarg+1]=gsub(argument,"^\"","")
+ if not find(argument,"\"$") then
+ instring=true
+ end
+ elseif find(argument,"\"$") then
+ newarg[#newarg]=newarg[#newarg].." "..gsub(argument,"\"$","")
+ instring=false
+ elseif instring then
+ newarg[#newarg]=newarg[#newarg].." "..argument
+ else
+ newarg[#newarg+1]=argument
end
+ end
+ for i=1,-5,-1 do
+ newarg[i]=arg[i]
+ end
+ environment.initializearguments(newarg)
+ environment.originalarguments=mark(newarg)
+ environment.rawarguments=mark(arg)
+ arg={}
+end
- environment.initializearguments(newarg)
- environment.originalarguments = mark(newarg)
- environment.rawarguments = mark(arg)
+end -- of closure
- arg = { } -- prevent duplicate handling
+do -- create closure to overcome 200 locals limit
-end
+package.loaded["luat-env"] = package.loaded["luat-env"] or true
--- weird place ... depends on a not yet loaded module
+-- original size: 5874, stripped down to: 4184
+ if not modules then modules={} end modules ['luat-env']={
+ version=1.001,
+ comment="companion to luat-lib.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+local rawset,rawget,loadfile,assert=rawset,rawget,loadfile,assert
+local trace_locating=false trackers.register("resolvers.locating",function(v) trace_locating=v end)
+local report_lua=logs.reporter("resolvers","lua")
+local luautilities=utilities.lua
+local luasuffixes=luautilities.suffixes
+environment=environment or {}
+local environment=environment
+local mt={
+ __index=function(_,k)
+ if k=="version" then
+ local version=tex.toks and tex.toks.contextversiontoks
+ if version and version~="" then
+ rawset(environment,"version",version)
+ return version
+ else
+ return "unknown"
+ end
+ elseif k=="kind" then
+ local kind=tex.toks and tex.toks.contextkindtoks
+ if kind and kind~="" then
+ rawset(environment,"kind",kind)
+ return kind
+ else
+ return "unknown"
+ end
+ elseif k=="jobname" or k=="formatname" then
+ local name=tex and tex[k]
+ if name or name=="" then
+ rawset(environment,k,name)
+ return name
+ else
+ return "unknown"
+ end
+ elseif k=="outputfilename" then
+ local name=environment.jobname
+ rawset(environment,k,name)
+ return name
+ end
+ end
+}
+setmetatable(environment,mt)
function environment.texfile(filename)
- return resolvers.findfile(filename,'tex')
+ return resolvers.findfile(filename,'tex')
+end
+function environment.luafile(filename)
+ local resolved=resolvers.findfile(filename,'tex') or ""
+ if resolved~="" then
+ return resolved
+ end
+ resolved=resolvers.findfile(filename,'texmfscripts') or ""
+ if resolved~="" then
+ return resolved
+ end
+ return resolvers.findfile(filename,'luatexlibs') or ""
+end
+local stripindeed=false directives.register("system.compile.strip",function(v) stripindeed=v end)
+local function strippable(filename)
+ if stripindeed then
+ local modu=modules[file.nameonly(filename)]
+ return modu and modu.dataonly
+ else
+ return false
+ end
end
-
-function environment.luafile(filename)
- local resolved = resolvers.findfile(filename,'tex') or ""
- if resolved ~= "" then
- return resolved
+function environment.luafilechunk(filename,silent)
+ filename=file.replacesuffix(filename,"lua")
+ local fullname=environment.luafile(filename)
+ if fullname and fullname~="" then
+ local data=luautilities.loadedluacode(fullname,strippable,filename)
+ if trace_locating then
+ report_lua("loading file %a %s",fullname,not data and "failed" or "succeeded")
+ elseif not silent then
+ texio.write("<",data and "+ " or "- ",fullname,">")
end
- resolved = resolvers.findfile(filename,'texmfscripts') or ""
- if resolved ~= "" then
- return resolved
+ return data
+ else
+ if trace_locating then
+ report_lua("unknown file %a",filename)
end
- return resolvers.findfile(filename,'luatexlibs') or ""
-end
-
-environment.loadedluacode = loadfile -- can be overloaded
-
-function environment.luafilechunk(filename,silent) -- used for loading lua bytecode in the format
- filename = file.replacesuffix(filename, "lua")
- local fullname = environment.luafile(filename)
- if fullname and fullname ~= "" then
- local data = environment.loadedluacode(fullname)
- if trace_locating then
- report_lua("loading file %s%s", fullname, not data and " failed" or "")
- elseif not silent then
- texio.write("<",data and "+ " or "- ",fullname,">")
- end
- return data
- else
+ return nil
+ end
+end
+function environment.loadluafile(filename,version)
+ local lucname,luaname,chunk
+ local basename=file.removesuffix(filename)
+ if basename==filename then
+ luaname=file.addsuffix(basename,luasuffixes.lua)
+ lucname=file.addsuffix(basename,luasuffixes.luc)
+ else
+ luaname=basename
+ lucname=nil
+ end
+ local fullname=(lucname and environment.luafile(lucname)) or ""
+ if fullname~="" then
+ if trace_locating then
+ report_lua("loading %a",fullname)
+ end
+ chunk=loadfile(fullname)
+ end
+ if chunk then
+ assert(chunk)()
+ if version then
+ local v=version
+ if modules and modules[filename] then
+ v=modules[filename].version
+ elseif versions and versions[filename] then
+ v=versions[filename]
+ end
+ if v==version then
+ return true
+ else
if trace_locating then
- report_lua("unknown file %s", filename)
+ report_lua("version mismatch for %a, lua version %a, luc version %a",filename,v,version)
end
- return nil
- end
-end
-
--- the next ones can use the previous ones / combine
-
-function environment.loadluafile(filename, version)
- local lucname, luaname, chunk
- local basename = file.removesuffix(filename)
- if basename == filename then
- lucname, luaname = basename .. ".luc", basename .. ".lua"
+ environment.loadluafile(filename)
+ end
else
- lucname, luaname = nil, basename -- forced suffix
+ return true
end
- -- when not overloaded by explicit suffix we look for a luc file first
- local fullname = (lucname and environment.luafile(lucname)) or ""
- if fullname ~= "" then
- if trace_locating then
- report_lua("loading %s", fullname)
- end
- chunk = loadfile(fullname) -- this way we don't need a file exists check
- end
- if chunk then
- assert(chunk)()
- if version then
- -- we check of the version number of this chunk matches
- local v = version -- can be nil
- if modules and modules[filename] then
- v = modules[filename].version -- new method
- elseif versions and versions[filename] then
- v = versions[filename] -- old method
- end
- if v == version then
- return true
- else
- if trace_locating then
- report_lua("version mismatch for %s: lua=%s, luc=%s", filename, v, version)
- end
- environment.loadluafile(filename)
- end
- else
- return true
- end
+ end
+ fullname=(luaname and environment.luafile(luaname)) or ""
+ if fullname~="" then
+ if trace_locating then
+ report_lua("loading %a",fullname)
end
- fullname = (luaname and environment.luafile(luaname)) or ""
- if fullname ~= "" then
- if trace_locating then
- report_lua("loading %s", fullname)
- end
- chunk = loadfile(fullname) -- this way we don't need a file exists check
- if not chunk then
- if trace_locating then
- report_lua("unknown file %s", filename)
- end
- else
- assert(chunk)()
- return true
- end
+ chunk=loadfile(fullname)
+ if not chunk then
+ if trace_locating then
+ report_lua("unknown file %a",filename)
+ end
+ else
+ assert(chunk)()
+ return true
end
- return false
+ end
+ return false
end
@@ -6854,1370 +8016,981 @@ end -- of closure
do -- create closure to overcome 200 locals limit
-if not modules then modules = { } end modules ['lxml-tab'] = {
- version = 1.001,
- comment = "this module is the basis for the lxml-* ones",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
--- this module needs a cleanup: check latest lpeg, passing args, (sub)grammar, etc etc
--- stripping spaces from e.g. cont-en.xml saves .2 sec runtime so it's not worth the
--- trouble
-
--- todo: when serializing optionally remap named entities to hex (if known in char-ent.lua)
--- maybe when letter -> utf, else name .. then we need an option to the serializer .. a bit
--- of work so we delay this till we cleanup
-
-local trace_entities = false trackers.register("xml.entities", function(v) trace_entities = v end)
-
-local report_xml = logs and logs.reporter("xml","core") or function(...) print(format(...)) end
-
---[[ldx--
-<p>The parser used here is inspired by the variant discussed in the lua book, but
-handles comment and processing instructions, has a different structure, provides
-parent access; a first version used different trickery but was less optimized to we
-went this route. First we had a find based parser, now we have an <l n='lpeg'/> based one.
-The find based parser can be found in l-xml-edu.lua along with other older code.</p>
+package.loaded["lxml-tab"] = package.loaded["lxml-tab"] or true
-<p>Beware, the interface may change. For instance at, ns, tg, dt may get more
-verbose names. Once the code is stable we will also remove some tracing and
-optimize the code.</p>
-
-<p>I might even decide to reimplement the parser using the latest <l n='lpeg'/> trickery
-as the current variant was written when <l n='lpeg'/> showed up and it's easier now to
-build tables in one go.</p>
---ldx]]--
-
-xml = xml or { }
-local xml = xml
-
-
-local utf = unicode.utf8
-local concat, remove, insert = table.concat, table.remove, table.insert
-local type, next, setmetatable, getmetatable, tonumber = type, next, setmetatable, getmetatable, tonumber
-local format, lower, find, match, gsub = string.format, string.lower, string.find, string.match, string.gsub
-local utfchar, utffind, utfgsub = utf.char, utf.find, utf.gsub
-local lpegmatch = lpeg.match
-local P, S, R, C, V, C, Cs = lpeg.P, lpeg.S, lpeg.R, lpeg.C, lpeg.V, lpeg.C, lpeg.Cs
+-- original size: 42495, stripped down to: 26647
---[[ldx--
-<p>First a hack to enable namespace resolving. A namespace is characterized by
-a <l n='url'/>. The following function associates a namespace prefix with a
-pattern. We use <l n='lpeg'/>, which in this case is more than twice as fast as a
-find based solution where we loop over an array of patterns. Less code and
-much cleaner.</p>
---ldx]]--
-
-xml.xmlns = xml.xmlns or { }
-
-local check = P(false)
-local parse = check
-
---[[ldx--
-<p>The next function associates a namespace prefix with an <l n='url'/>. This
-normally happens independent of parsing.</p>
-
-<typing>
-xml.registerns("mml","mathml")
-</typing>
---ldx]]--
-
-function xml.registerns(namespace, pattern) -- pattern can be an lpeg
- check = check + C(P(lower(pattern))) / namespace
- parse = P { P(check) + 1 * V(1) }
+if not modules then modules={} end modules ['lxml-tab']={
+ version=1.001,
+ comment="this module is the basis for the lxml-* ones",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+local trace_entities=false trackers.register("xml.entities",function(v) trace_entities=v end)
+local report_xml=logs and logs.reporter("xml","core") or function(...) print(string.format(...)) end
+xml=xml or {}
+local xml=xml
+local concat,remove,insert=table.concat,table.remove,table.insert
+local type,next,setmetatable,getmetatable,tonumber=type,next,setmetatable,getmetatable,tonumber
+local lower,find,match,gsub=string.lower,string.find,string.match,string.gsub
+local utfchar=utf.char
+local lpegmatch=lpeg.match
+local P,S,R,C,V,C,Cs=lpeg.P,lpeg.S,lpeg.R,lpeg.C,lpeg.V,lpeg.C,lpeg.Cs
+local formatters=string.formatters
+xml.xmlns=xml.xmlns or {}
+local check=P(false)
+local parse=check
+function xml.registerns(namespace,pattern)
+ check=check+C(P(lower(pattern)))/namespace
+ parse=P { P(check)+1*V(1) }
end
-
---[[ldx--
-<p>The next function also registers a namespace, but this time we map a
-given namespace prefix onto a registered one, using the given
-<l n='url'/>. This used for attributes like <t>xmlns:m</t>.</p>
-
-<typing>
-xml.checkns("m","http://www.w3.org/mathml")
-</typing>
---ldx]]--
-
function xml.checkns(namespace,url)
- local ns = lpegmatch(parse,lower(url))
- if ns and namespace ~= ns then
- xml.xmlns[namespace] = ns
- end
+ local ns=lpegmatch(parse,lower(url))
+ if ns and namespace~=ns then
+ xml.xmlns[namespace]=ns
+ end
end
-
---[[ldx--
-<p>Next we provide a way to turn an <l n='url'/> into a registered
-namespace. This used for the <t>xmlns</t> attribute.</p>
-
-<typing>
-resolvedns = xml.resolvens("http://www.w3.org/mathml")
-</typing>
-
-This returns <t>mml</t>.
---ldx]]--
-
function xml.resolvens(url)
- return lpegmatch(parse,lower(url)) or ""
-end
-
---[[ldx--
-<p>A namespace in an element can be remapped onto the registered
-one efficiently by using the <t>xml.xmlns</t> table.</p>
---ldx]]--
-
---[[ldx--
-<p>This version uses <l n='lpeg'/>. We follow the same approach as before, stack and top and
-such. This version is about twice as fast which is mostly due to the fact that
-we don't have to prepare the stream for cdata, doctype etc etc. This variant is
-is dedicated to Luigi Scarso, who challenged me with 40 megabyte <l n='xml'/> files that
-took 12.5 seconds to load (1.5 for file io and the rest for tree building). With
-the <l n='lpeg'/> implementation we got that down to less 7.3 seconds. Loading the 14
-<l n='context'/> interface definition files (2.6 meg) went down from 1.05 seconds to 0.55.</p>
-
-<p>Next comes the parser. The rather messy doctype definition comes in many
-disguises so it is no surprice that later on have to dedicate quite some
-<l n='lpeg'/> code to it.</p>
-
-<typing>
-<!DOCTYPE Something PUBLIC "... ..." "..." [ ... ] >
-<!DOCTYPE Something PUBLIC "... ..." "..." >
-<!DOCTYPE Something SYSTEM "... ..." [ ... ] >
-<!DOCTYPE Something SYSTEM "... ..." >
-<!DOCTYPE Something [ ... ] >
-<!DOCTYPE Something >
-</typing>
-
-<p>The code may look a bit complex but this is mostly due to the fact that we
-resolve namespaces and attach metatables. There is only one public function:</p>
-
-<typing>
-local x = xml.convert(somestring)
-</typing>
-
-<p>An optional second boolean argument tells this function not to create a root
-element.</p>
-
-<p>Valid entities are:</p>
-
-<typing>
-<!ENTITY xxxx SYSTEM "yyyy" NDATA zzzz>
-<!ENTITY xxxx PUBLIC "yyyy" >
-<!ENTITY xxxx "yyyy" >
-</typing>
---ldx]]--
-
--- not just one big nested table capture (lpeg overflow)
-
-local nsremap, resolvens = xml.xmlns, xml.resolvens
-
-local stack = { }
-local top = { }
-local dt = { }
-local at = { }
-local xmlns = { }
-local errorstr = nil
-local entities = { }
-local strip = false
-local cleanup = false
-local utfize = false
-local resolve_predefined = false
-local unify_predefined = false
-
-local dcache = { }
-local hcache = { }
-local acache = { }
-
-local mt = { }
-
+ return lpegmatch(parse,lower(url)) or ""
+end
+local nsremap,resolvens=xml.xmlns,xml.resolvens
+local stack={}
+local top={}
+local dt={}
+local at={}
+local xmlns={}
+local errorstr=nil
+local entities={}
+local strip=false
+local cleanup=false
+local utfize=false
+local resolve_predefined=false
+local unify_predefined=false
+local dcache={}
+local hcache={}
+local acache={}
+local mt={}
local function initialize_mt(root)
- mt = { __index = root } -- will be redefined later
+ mt={ __index=root }
end
-
function xml.setproperty(root,k,v)
- getmetatable(root).__index[k] = v
+ getmetatable(root).__index[k]=v
end
-
function xml.checkerror(top,toclose)
- return "" -- can be set
+ return ""
end
-
local function add_attribute(namespace,tag,value)
- if cleanup and #value > 0 then
- value = cleanup(value) -- new
- end
- if tag == "xmlns" then
- xmlns[#xmlns+1] = resolvens(value)
- at[tag] = value
- elseif namespace == "" then
- at[tag] = value
- elseif namespace == "xmlns" then
- xml.checkns(tag,value)
- at["xmlns:" .. tag] = value
- else
- -- for the moment this way:
- at[namespace .. ":" .. tag] = value
- end
+ if cleanup and #value>0 then
+ value=cleanup(value)
+ end
+ if tag=="xmlns" then
+ xmlns[#xmlns+1]=resolvens(value)
+ at[tag]=value
+ elseif namespace=="" then
+ at[tag]=value
+ elseif namespace=="xmlns" then
+ xml.checkns(tag,value)
+ at["xmlns:"..tag]=value
+ else
+ at[namespace..":"..tag]=value
+ end
+end
+local function add_empty(spacing,namespace,tag)
+ if #spacing>0 then
+ dt[#dt+1]=spacing
+ end
+ local resolved=namespace=="" and xmlns[#xmlns] or nsremap[namespace] or namespace
+ top=stack[#stack]
+ dt=top.dt
+ local t={ ns=namespace or "",rn=resolved,tg=tag,at=at,dt={},__p__=top }
+ dt[#dt+1]=t
+ setmetatable(t,mt)
+ if at.xmlns then
+ remove(xmlns)
+ end
+ at={}
+end
+local function add_begin(spacing,namespace,tag)
+ if #spacing>0 then
+ dt[#dt+1]=spacing
+ end
+ local resolved=namespace=="" and xmlns[#xmlns] or nsremap[namespace] or namespace
+ top={ ns=namespace or "",rn=resolved,tg=tag,at=at,dt={},__p__=stack[#stack] }
+ setmetatable(top,mt)
+ dt=top.dt
+ stack[#stack+1]=top
+ at={}
+end
+local function add_end(spacing,namespace,tag)
+ if #spacing>0 then
+ dt[#dt+1]=spacing
+ end
+ local toclose=remove(stack)
+ top=stack[#stack]
+ if #stack<1 then
+ errorstr=formatters["unable to close %s %s"](tag,xml.checkerror(top,toclose) or "")
+ elseif toclose.tg~=tag then
+ errorstr=formatters["unable to close %s with %s %s"](toclose.tg,tag,xml.checkerror(top,toclose) or "")
+ end
+ dt=top.dt
+ dt[#dt+1]=toclose
+ if toclose.at.xmlns then
+ remove(xmlns)
+ end
end
-
-local function add_empty(spacing, namespace, tag)
- if #spacing > 0 then
- dt[#dt+1] = spacing
- end
- local resolved = (namespace == "" and xmlns[#xmlns]) or nsremap[namespace] or namespace
- top = stack[#stack]
- dt = top.dt
- local t = { ns=namespace or "", rn=resolved, tg=tag, at=at, dt={}, __p__ = top }
- dt[#dt+1] = t
- setmetatable(t, mt)
- if at.xmlns then
- remove(xmlns)
- end
- at = { }
-end
-
-local function add_begin(spacing, namespace, tag)
- if #spacing > 0 then
- dt[#dt+1] = spacing
- end
- local resolved = (namespace == "" and xmlns[#xmlns]) or nsremap[namespace] or namespace
- top = { ns=namespace or "", rn=resolved, tg=tag, at=at, dt={}, __p__ = stack[#stack] }
- setmetatable(top, mt)
- dt = top.dt
- stack[#stack+1] = top
- at = { }
-end
-
-local function add_end(spacing, namespace, tag)
- if #spacing > 0 then
- dt[#dt+1] = spacing
- end
- local toclose = remove(stack)
- top = stack[#stack]
- if #stack < 1 then
- errorstr = format("nothing to close with %s %s", tag, xml.checkerror(top,toclose) or "")
- elseif toclose.tg ~= tag then -- no namespace check
- errorstr = format("unable to close %s with %s %s", toclose.tg, tag, xml.checkerror(top,toclose) or "")
- end
- dt = top.dt
- dt[#dt+1] = toclose
- -- dt[0] = top -- nasty circular reference when serializing table
- if toclose.at.xmlns then
- remove(xmlns)
- end
-end
-
local function add_text(text)
- if cleanup and #text > 0 then
- dt[#dt+1] = cleanup(text)
- else
- dt[#dt+1] = text
- end
-end
-
-local function add_special(what, spacing, text)
- if #spacing > 0 then
- dt[#dt+1] = spacing
- end
- if strip and (what == "@cm@" or what == "@dt@") then
- -- forget it
- else
- dt[#dt+1] = { special=true, ns="", tg=what, dt={ text } }
- end
+ if cleanup and #text>0 then
+ dt[#dt+1]=cleanup(text)
+ else
+ dt[#dt+1]=text
+ end
+end
+local function add_special(what,spacing,text)
+ if #spacing>0 then
+ dt[#dt+1]=spacing
+ end
+ if strip and (what=="@cm@" or what=="@dt@") then
+ else
+ dt[#dt+1]={ special=true,ns="",tg=what,dt={ text } }
+ end
end
-
local function set_message(txt)
- errorstr = "garbage at the end of the file: " .. gsub(txt,"([ \n\r\t]*)","")
+ errorstr="garbage at the end of the file: "..gsub(txt,"([ \n\r\t]*)","")
end
-
-local reported_attribute_errors = { }
-
+local reported_attribute_errors={}
local function attribute_value_error(str)
- if not reported_attribute_errors[str] then
- report_xml("invalid attribute value: %q",str)
- reported_attribute_errors[str] = true
- at._error_ = str
- end
- return str
+ if not reported_attribute_errors[str] then
+ report_xml("invalid attribute value %a",str)
+ reported_attribute_errors[str]=true
+ at._error_=str
+ end
+ return str
end
-
local function attribute_specification_error(str)
- if not reported_attribute_errors[str] then
- report_xml("invalid attribute specification: %q",str)
- reported_attribute_errors[str] = true
- at._error_ = str
- end
- return str
-end
-
-xml.placeholders = {
- unknown_dec_entity = function(str) return (str == "" and "&error;") or format("&%s;",str) end,
- unknown_hex_entity = function(str) return format("&#x%s;",str) end,
- unknown_any_entity = function(str) return format("&#x%s;",str) end,
+ if not reported_attribute_errors[str] then
+ report_xml("invalid attribute specification %a",str)
+ reported_attribute_errors[str]=true
+ at._error_=str
+ end
+ return str
+end
+xml.placeholders={
+ unknown_dec_entity=function(str) return str=="" and "&error;" or formatters["&%s;"](str) end,
+ unknown_hex_entity=function(str) return formatters["&#x%s;"](str) end,
+ unknown_any_entity=function(str) return formatters["&#x%s;"](str) end,
}
-
-local placeholders = xml.placeholders
-
+local placeholders=xml.placeholders
local function fromhex(s)
- local n = tonumber(s,16)
- if n then
- return utfchar(n)
- else
- return format("h:%s",s), true
- end
+ local n=tonumber(s,16)
+ if n then
+ return utfchar(n)
+ else
+ return formatters["h:%s"](s),true
+ end
end
-
local function fromdec(s)
- local n = tonumber(s)
- if n then
- return utfchar(n)
- else
- return format("d:%s",s), true
- end
-end
-
--- one level expansion (simple case), no checking done
-
-local rest = (1-P(";"))^0
-local many = P(1)^0
-
-local parsedentity =
- P("&") * (P("#x")*(rest/fromhex) + P("#")*(rest/fromdec)) * P(";") * P(-1) +
- (P("#x")*(many/fromhex) + P("#")*(many/fromdec))
-
--- parsing in the xml file
-
-local predefined_unified = {
- [38] = "&amp;",
- [42] = "&quot;",
- [47] = "&apos;",
- [74] = "&lt;",
- [76] = "&gt;",
+ local n=tonumber(s)
+ if n then
+ return utfchar(n)
+ else
+ return formatters["d:%s"](s),true
+ end
+end
+local rest=(1-P(";"))^0
+local many=P(1)^0
+local parsedentity=P("&")*(P("#x")*(rest/fromhex)+P("#")*(rest/fromdec))*P(";")*P(-1)+(P("#x")*(many/fromhex)+P("#")*(many/fromdec))
+local predefined_unified={
+ [38]="&amp;",
+ [42]="&quot;",
+ [47]="&apos;",
+ [74]="&lt;",
+ [76]="&gt;",
}
-
-local predefined_simplified = {
- [38] = "&", amp = "&",
- [42] = '"', quot = '"',
- [47] = "'", apos = "'",
- [74] = "<", lt = "<",
- [76] = ">", gt = ">",
-}
-
-local nofprivates = 0xF0000 -- shared but seldom used
-
-local privates_u = { -- unescaped
- [ [[&]] ] = "&amp;",
- [ [["]] ] = "&quot;",
- [ [[']] ] = "&apos;",
- [ [[<]] ] = "&lt;",
- [ [[>]] ] = "&gt;",
+local predefined_simplified={
+ [38]="&",amp="&",
+ [42]='"',quot='"',
+ [47]="'",apos="'",
+ [74]="<",lt="<",
+ [76]=">",gt=">",
}
-
-local privates_p = {
+local nofprivates=0xF0000
+local privates_u={
+ [ [[&]] ]="&amp;",
+ [ [["]] ]="&quot;",
+ [ [[']] ]="&apos;",
+ [ [[<]] ]="&lt;",
+ [ [[>]] ]="&gt;",
}
-
-local privates_n = {
- -- keeps track of defined ones
+local privates_p={}
+local privates_n={
}
-
-local function escaped(s)
- if s == "" then
- return ""
- else -- if utffind(s,privates_u) then
- return (utfgsub(s,".",privates_u))
- -- else
- -- return s
- end
-end
-
+local escaped=utf.remapper(privates_u)
local function unescaped(s)
- local p = privates_n[s]
- if not p then
- nofprivates = nofprivates + 1
- p = utfchar(nofprivates)
- privates_n[s] = p
- s = "&" .. s .. ";" -- todo: use char-ent to map to hex
- privates_u[p] = s
- privates_p[p] = s
+ local p=privates_n[s]
+ if not p then
+ nofprivates=nofprivates+1
+ p=utfchar(nofprivates)
+ privates_n[s]=p
+ s="&"..s..";"
+ privates_u[p]=s
+ privates_p[p]=s
+ end
+ return p
+end
+local unprivatized=utf.remapper(privates_p)
+xml.privatetoken=unescaped
+xml.unprivatized=unprivatized
+xml.privatecodes=privates_n
+local function handle_hex_entity(str)
+ local h=hcache[str]
+ if not h then
+ local n=tonumber(str,16)
+ h=unify_predefined and predefined_unified[n]
+ if h then
+ if trace_entities then
+ report_xml("utfize, converting hex entity &#x%s; into %a",str,h)
+ end
+ elseif utfize then
+ h=(n and utfchar(n)) or xml.unknown_hex_entity(str) or ""
+ if not n then
+ report_xml("utfize, ignoring hex entity &#x%s;",str)
+ elseif trace_entities then
+ report_xml("utfize, converting hex entity &#x%s; into %a",str,h)
+ end
+ else
+ if trace_entities then
+ report_xml("found entity &#x%s;",str)
+ end
+ h="&#x"..str..";"
end
- return p
+ hcache[str]=h
+ end
+ return h
end
-
-local function unprivatized(s,resolve)
- if s == "" then
- return ""
+local function handle_dec_entity(str)
+ local d=dcache[str]
+ if not d then
+ local n=tonumber(str)
+ d=unify_predefined and predefined_unified[n]
+ if d then
+ if trace_entities then
+ report_xml("utfize, converting dec entity &#%s; into %a",str,d)
+ end
+ elseif utfize then
+ d=(n and utfchar(n)) or placeholders.unknown_dec_entity(str) or ""
+ if not n then
+ report_xml("utfize, ignoring dec entity &#%s;",str)
+ elseif trace_entities then
+ report_xml("utfize, converting dec entity &#%s; into %a",str,d)
+ end
else
- return (utfgsub(s,".",privates_p))
+ if trace_entities then
+ report_xml("found entity &#%s;",str)
+ end
+ d="&#"..str..";"
end
+ dcache[str]=d
+ end
+ return d
end
-
-xml.privatetoken = unescaped
-xml.unprivatized = unprivatized
-xml.privatecodes = privates_n
-
-local function handle_hex_entity(str)
- local h = hcache[str]
- if not h then
- local n = tonumber(str,16)
- h = unify_predefined and predefined_unified[n]
- if h then
- if trace_entities then
- report_xml("utfize, converting hex entity &#x%s; into %s",str,h)
- end
- elseif utfize then
- h = (n and utfchar(n)) or xml.unknown_hex_entity(str) or ""
- if not n then
- report_xml("utfize, ignoring hex entity &#x%s;",str)
- elseif trace_entities then
- report_xml("utfize, converting hex entity &#x%s; into %s",str,h)
- end
+xml.parsedentitylpeg=parsedentity
+local function handle_any_entity(str)
+ if resolve then
+ local a=acache[str]
+ if not a then
+ a=resolve_predefined and predefined_simplified[str]
+ if a then
+ if trace_entities then
+ report_xml("resolving entity &%s; to predefined %a",str,a)
+ end
+ else
+ if type(resolve)=="function" then
+ a=resolve(str) or entities[str]
else
- if trace_entities then
- report_xml("found entity &#x%s;",str)
- end
- h = "&#x" .. str .. ";"
+ a=entities[str]
end
- hcache[str] = h
- end
- return h
-end
-
-local function handle_dec_entity(str)
- local d = dcache[str]
- if not d then
- local n = tonumber(str)
- d = unify_predefined and predefined_unified[n]
- if d then
+ if a then
+ if type(a)=="function" then
if trace_entities then
- report_xml("utfize, converting dec entity &#%s; into %s",str,d)
- end
- elseif utfize then
- d = (n and utfchar(n)) or placeholders.unknown_dec_entity(str) or ""
- if not n then
- report_xml("utfize, ignoring dec entity &#%s;",str)
- elseif trace_entities then
- report_xml("utfize, converting dec entity &#%s; into %s",str,d)
- end
+ report_xml("expanding entity &%s; to function call",str)
+ end
+ a=a(str) or ""
+ end
+ a=lpegmatch(parsedentity,a) or a
+ if trace_entities then
+ report_xml("resolving entity &%s; to internal %a",str,a)
+ end
else
+ local unknown_any_entity=placeholders.unknown_any_entity
+ if unknown_any_entity then
+ a=unknown_any_entity(str) or ""
+ end
+ if a then
if trace_entities then
- report_xml("found entity &#%s;",str)
+ report_xml("resolving entity &%s; to external %s",str,a)
end
- d = "&#" .. str .. ";"
- end
- dcache[str] = d
- end
- return d
-end
-
-xml.parsedentitylpeg = parsedentity
-
-local function handle_any_entity(str)
- if resolve then
- local a = acache[str] -- per instance ! todo
- if not a then
- a = resolve_predefined and predefined_simplified[str]
- if a then
- if trace_entities then
- report_xml("resolved entity &%s; -> %s (predefined)",str,a)
- end
- else
- if type(resolve) == "function" then
- a = resolve(str) or entities[str]
- else
- a = entities[str]
- end
- if a then
- if type(a) == "function" then
- if trace_entities then
- report_xml("expanding entity &%s; (function)",str)
- end
- a = a(str) or ""
- end
- a = lpegmatch(parsedentity,a) or a -- for nested
- if trace_entities then
- report_xml("resolved entity &%s; -> %s (internal)",str,a)
- end
- else
- local unknown_any_entity = placeholders.unknown_any_entity
- if unknown_any_entity then
- a = unknown_any_entity(str) or ""
- end
- if a then
- if trace_entities then
- report_xml("resolved entity &%s; -> %s (external)",str,a)
- end
- else
- if trace_entities then
- report_xml("keeping entity &%s;",str)
- end
- if str == "" then
- a = "&error;"
- else
- a = "&" .. str .. ";"
- end
- end
- end
- end
- acache[str] = a
- elseif trace_entities then
- if not acache[str] then
- report_xml("converting entity &%s; into %s",str,a)
- acache[str] = a
+ else
+ if trace_entities then
+ report_xml("keeping entity &%s;",str)
end
- end
- return a
- else
- local a = acache[str]
- if not a then
- a = resolve_predefined and predefined_simplified[str]
- if a then
- -- one of the predefined
- acache[str] = a
- if trace_entities then
- report_xml("entity &%s; becomes %s",str,tostring(a))
- end
- elseif str == "" then
- if trace_entities then
- report_xml("invalid entity &%s;",str)
- end
- a = "&error;"
- acache[str] = a
+ if str=="" then
+ a="&error;"
else
- if trace_entities then
- report_xml("entity &%s; is made private",str)
- end
- -- a = "&" .. str .. ";"
- a = unescaped(str)
- acache[str] = a
- end
- end
- return a
- end
+ a="&"..str..";"
+ end
+ end
+ end
+ end
+ acache[str]=a
+ elseif trace_entities then
+ if not acache[str] then
+ report_xml("converting entity &%s; to %a",str,a)
+ acache[str]=a
+ end
+ end
+ return a
+ else
+ local a=acache[str]
+ if not a then
+ a=resolve_predefined and predefined_simplified[str]
+ if a then
+ acache[str]=a
+ if trace_entities then
+ report_xml("entity &%s; becomes %a",str,a)
+ end
+ elseif str=="" then
+ if trace_entities then
+ report_xml("invalid entity &%s;",str)
+ end
+ a="&error;"
+ acache[str]=a
+ else
+ if trace_entities then
+ report_xml("entity &%s; is made private",str)
+ end
+ a=unescaped(str)
+ acache[str]=a
+ end
+ end
+ return a
+ end
end
-
local function handle_end_entity(chr)
- report_xml("error in entity, %q found instead of ';'",chr)
-end
-
-local space = S(' \r\n\t')
-local open = P('<')
-local close = P('>')
-local squote = S("'")
-local dquote = S('"')
-local equal = P('=')
-local slash = P('/')
-local colon = P(':')
-local semicolon = P(';')
-local ampersand = P('&')
-local valid = R('az', 'AZ', '09') + S('_-.')
-local name_yes = C(valid^1) * colon * C(valid^1)
-local name_nop = C(P(true)) * C(valid^1)
-local name = name_yes + name_nop
-local utfbom = lpeg.patterns.utfbom -- no capture
-local spacing = C(space^0)
-
------ entitycontent = (1-open-semicolon)^0
-local anyentitycontent = (1-open-semicolon-space-close)^0
-local hexentitycontent = R("AF","af","09")^0
-local decentitycontent = R("09")^0
-local parsedentity = P("#")/"" * (
- P("x")/"" * (hexentitycontent/handle_hex_entity) +
- (decentitycontent/handle_dec_entity)
- ) + (anyentitycontent/handle_any_entity)
-local entity = ampersand/"" * parsedentity * ( (semicolon/"") + #(P(1)/handle_end_entity))
-
-local text_unparsed = C((1-open)^1)
-local text_parsed = Cs(((1-open-ampersand)^1 + entity)^1)
-
-local somespace = space^1
-local optionalspace = space^0
-
------ value = (squote * C((1 - squote)^0) * squote) + (dquote * C((1 - dquote)^0) * dquote) -- ampersand and < also invalid in value
-local value = (squote * Cs((entity + (1 - squote))^0) * squote) + (dquote * Cs((entity + (1 - dquote))^0) * dquote) -- ampersand and < also invalid in value
-
-local endofattributes = slash * close + close -- recovery of flacky html
-local whatever = space * name * optionalspace * equal
------ wrongvalue = C(P(1-whatever-close)^1 + P(1-close)^1) / attribute_value_error
------ wrongvalue = C(P(1-whatever-endofattributes)^1 + P(1-endofattributes)^1) / attribute_value_error
------ wrongvalue = C(P(1-space-endofattributes)^1) / attribute_value_error
-local wrongvalue = Cs(P(entity + (1-space-endofattributes))^1) / attribute_value_error
-
-local attributevalue = value + wrongvalue
-
-local attribute = (somespace * name * optionalspace * equal * optionalspace * attributevalue) / add_attribute
------ attributes = (attribute)^0
-
-local attributes = (attribute + somespace^-1 * (((1-endofattributes)^1)/attribute_specification_error))^0
-
-local parsedtext = text_parsed / add_text
-local unparsedtext = text_unparsed / add_text
-local balanced = P { "[" * ((1 - S"[]") + V(1))^0 * "]" } -- taken from lpeg manual, () example
-
-local emptyelement = (spacing * open * name * attributes * optionalspace * slash * close) / add_empty
-local beginelement = (spacing * open * name * attributes * optionalspace * close) / add_begin
-local endelement = (spacing * open * slash * name * optionalspace * close) / add_end
-
-local begincomment = open * P("!--")
-local endcomment = P("--") * close
-local begininstruction = open * P("?")
-local endinstruction = P("?") * close
-local begincdata = open * P("![CDATA[")
-local endcdata = P("]]") * close
-
-local someinstruction = C((1 - endinstruction)^0)
-local somecomment = C((1 - endcomment )^0)
-local somecdata = C((1 - endcdata )^0)
-
-local function normalentity(k,v ) entities[k] = v end
-local function systementity(k,v,n) entities[k] = v end
-local function publicentity(k,v,n) entities[k] = v end
-
--- todo: separate dtd parser
-
-local begindoctype = open * P("!DOCTYPE")
-local enddoctype = close
-local beginset = P("[")
-local endset = P("]")
-local doctypename = C((1-somespace-close)^0)
-local elementdoctype = optionalspace * P("<!ELEMENT") * (1-close)^0 * close
-
-local basiccomment = begincomment * ((1 - endcomment)^0) * endcomment
-
-local normalentitytype = (doctypename * somespace * value)/normalentity
-local publicentitytype = (doctypename * somespace * P("PUBLIC") * somespace * value)/publicentity
-local systementitytype = (doctypename * somespace * P("SYSTEM") * somespace * value * somespace * P("NDATA") * somespace * doctypename)/systementity
-local entitydoctype = optionalspace * P("<!ENTITY") * somespace * (systementitytype + publicentitytype + normalentitytype) * optionalspace * close
-
--- we accept comments in doctypes
-
-local doctypeset = beginset * optionalspace * P(elementdoctype + entitydoctype + basiccomment + space)^0 * optionalspace * endset
-local definitiondoctype= doctypename * somespace * doctypeset
-local publicdoctype = doctypename * somespace * P("PUBLIC") * somespace * value * somespace * value * somespace * doctypeset
-local systemdoctype = doctypename * somespace * P("SYSTEM") * somespace * value * somespace * doctypeset
-local simpledoctype = (1-close)^1 -- * balanced^0
-local somedoctype = C((somespace * (publicdoctype + systemdoctype + definitiondoctype + simpledoctype) * optionalspace)^0)
-local somedoctype = C((somespace * (publicdoctype + systemdoctype + definitiondoctype + simpledoctype) * optionalspace)^0)
-
-local instruction = (spacing * begininstruction * someinstruction * endinstruction) / function(...) add_special("@pi@",...) end
-local comment = (spacing * begincomment * somecomment * endcomment ) / function(...) add_special("@cm@",...) end
-local cdata = (spacing * begincdata * somecdata * endcdata ) / function(...) add_special("@cd@",...) end
-local doctype = (spacing * begindoctype * somedoctype * enddoctype ) / function(...) add_special("@dt@",...) end
-
--- nicer but slower:
---
--- local instruction = (Cc("@pi@") * spacing * begininstruction * someinstruction * endinstruction) / add_special
--- local comment = (Cc("@cm@") * spacing * begincomment * somecomment * endcomment ) / add_special
--- local cdata = (Cc("@cd@") * spacing * begincdata * somecdata * endcdata ) / add_special
--- local doctype = (Cc("@dt@") * spacing * begindoctype * somedoctype * enddoctype ) / add_special
-
-local trailer = space^0 * (text_unparsed/set_message)^0
-
--- comment + emptyelement + text + cdata + instruction + V("parent"), -- 6.5 seconds on 40 MB database file
--- text + comment + emptyelement + cdata + instruction + V("parent"), -- 5.8
--- text + V("parent") + emptyelement + comment + cdata + instruction, -- 5.5
-
-local grammar_parsed_text = P { "preamble",
- preamble = utfbom^0 * instruction^0 * (doctype + comment + instruction)^0 * V("parent") * trailer,
- parent = beginelement * V("children")^0 * endelement,
- children = parsedtext + V("parent") + emptyelement + comment + cdata + instruction,
+ report_xml("error in entity, %a found instead of %a",chr,";")
+end
+local space=S(' \r\n\t')
+local open=P('<')
+local close=P('>')
+local squote=S("'")
+local dquote=S('"')
+local equal=P('=')
+local slash=P('/')
+local colon=P(':')
+local semicolon=P(';')
+local ampersand=P('&')
+local valid=R('az','AZ','09')+S('_-.')
+local name_yes=C(valid^1)*colon*C(valid^1)
+local name_nop=C(P(true))*C(valid^1)
+local name=name_yes+name_nop
+local utfbom=lpeg.patterns.utfbom
+local spacing=C(space^0)
+local anyentitycontent=(1-open-semicolon-space-close)^0
+local hexentitycontent=R("AF","af","09")^0
+local decentitycontent=R("09")^0
+local parsedentity=P("#")/""*(
+ P("x")/""*(hexentitycontent/handle_hex_entity)+(decentitycontent/handle_dec_entity)
+ )+(anyentitycontent/handle_any_entity)
+local entity=ampersand/""*parsedentity*((semicolon/"")+#(P(1)/handle_end_entity))
+local text_unparsed=C((1-open)^1)
+local text_parsed=Cs(((1-open-ampersand)^1+entity)^1)
+local somespace=space^1
+local optionalspace=space^0
+local value=(squote*Cs((entity+(1-squote))^0)*squote)+(dquote*Cs((entity+(1-dquote))^0)*dquote)
+local endofattributes=slash*close+close
+local whatever=space*name*optionalspace*equal
+local wrongvalue=Cs(P(entity+(1-space-endofattributes))^1)/attribute_value_error
+local attributevalue=value+wrongvalue
+local attribute=(somespace*name*optionalspace*equal*optionalspace*attributevalue)/add_attribute
+local attributes=(attribute+somespace^-1*(((1-endofattributes)^1)/attribute_specification_error))^0
+local parsedtext=text_parsed/add_text
+local unparsedtext=text_unparsed/add_text
+local balanced=P { "["*((1-S"[]")+V(1))^0*"]" }
+local emptyelement=(spacing*open*name*attributes*optionalspace*slash*close)/add_empty
+local beginelement=(spacing*open*name*attributes*optionalspace*close)/add_begin
+local endelement=(spacing*open*slash*name*optionalspace*close)/add_end
+local begincomment=open*P("!--")
+local endcomment=P("--")*close
+local begininstruction=open*P("?")
+local endinstruction=P("?")*close
+local begincdata=open*P("![CDATA[")
+local endcdata=P("]]")*close
+local someinstruction=C((1-endinstruction)^0)
+local somecomment=C((1-endcomment )^0)
+local somecdata=C((1-endcdata )^0)
+local function normalentity(k,v ) entities[k]=v end
+local function systementity(k,v,n) entities[k]=v end
+local function publicentity(k,v,n) entities[k]=v end
+local begindoctype=open*P("!DOCTYPE")
+local enddoctype=close
+local beginset=P("[")
+local endset=P("]")
+local doctypename=C((1-somespace-close)^0)
+local elementdoctype=optionalspace*P("<!ELEMENT")*(1-close)^0*close
+local basiccomment=begincomment*((1-endcomment)^0)*endcomment
+local normalentitytype=(doctypename*somespace*value)/normalentity
+local publicentitytype=(doctypename*somespace*P("PUBLIC")*somespace*value)/publicentity
+local systementitytype=(doctypename*somespace*P("SYSTEM")*somespace*value*somespace*P("NDATA")*somespace*doctypename)/systementity
+local entitydoctype=optionalspace*P("<!ENTITY")*somespace*(systementitytype+publicentitytype+normalentitytype)*optionalspace*close
+local doctypeset=beginset*optionalspace*P(elementdoctype+entitydoctype+basiccomment+space)^0*optionalspace*endset
+local definitiondoctype=doctypename*somespace*doctypeset
+local publicdoctype=doctypename*somespace*P("PUBLIC")*somespace*value*somespace*value*somespace*doctypeset
+local systemdoctype=doctypename*somespace*P("SYSTEM")*somespace*value*somespace*doctypeset
+local simpledoctype=(1-close)^1
+local somedoctype=C((somespace*(publicdoctype+systemdoctype+definitiondoctype+simpledoctype)*optionalspace)^0)
+local somedoctype=C((somespace*(publicdoctype+systemdoctype+definitiondoctype+simpledoctype)*optionalspace)^0)
+local instruction=(spacing*begininstruction*someinstruction*endinstruction)/function(...) add_special("@pi@",...) end
+local comment=(spacing*begincomment*somecomment*endcomment )/function(...) add_special("@cm@",...) end
+local cdata=(spacing*begincdata*somecdata*endcdata )/function(...) add_special("@cd@",...) end
+local doctype=(spacing*begindoctype*somedoctype*enddoctype )/function(...) add_special("@dt@",...) end
+local trailer=space^0*(text_unparsed/set_message)^0
+local grammar_parsed_text=P { "preamble",
+ preamble=utfbom^0*instruction^0*(doctype+comment+instruction)^0*V("parent")*trailer,
+ parent=beginelement*V("children")^0*endelement,
+ children=parsedtext+V("parent")+emptyelement+comment+cdata+instruction,
}
-
-local grammar_unparsed_text = P { "preamble",
- preamble = utfbom^0 * instruction^0 * (doctype + comment + instruction)^0 * V("parent") * trailer,
- parent = beginelement * V("children")^0 * endelement,
- children = unparsedtext + V("parent") + emptyelement + comment + cdata + instruction,
+local grammar_unparsed_text=P { "preamble",
+ preamble=utfbom^0*instruction^0*(doctype+comment+instruction)^0*V("parent")*trailer,
+ parent=beginelement*V("children")^0*endelement,
+ children=unparsedtext+V("parent")+emptyelement+comment+cdata+instruction,
}
-
--- maybe we will add settings to result as well
-
-local function _xmlconvert_(data, settings)
- settings = settings or { } -- no_root strip_cm_and_dt given_entities parent_root error_handler
- --
- strip = settings.strip_cm_and_dt
- utfize = settings.utfize_entities
- resolve = settings.resolve_entities
- resolve_predefined = settings.resolve_predefined_entities -- in case we have escaped entities
- unify_predefined = settings.unify_predefined_entities -- &#038; -> &amp;
- cleanup = settings.text_cleanup
- entities = settings.entities or { }
- --
- if utfize == nil then
- settings.utfize_entities = true
- utfize = true
- end
- if resolve_predefined == nil then
- settings.resolve_predefined_entities = true
- resolve_predefined = true
- end
- --
- --
- stack, top, at, xmlns, errorstr = { }, { }, { }, { }, nil
- acache, hcache, dcache = { }, { }, { } -- not stored
- reported_attribute_errors = { }
- if settings.parent_root then
- mt = getmetatable(settings.parent_root)
+local function _xmlconvert_(data,settings)
+ settings=settings or {}
+ strip=settings.strip_cm_and_dt
+ utfize=settings.utfize_entities
+ resolve=settings.resolve_entities
+ resolve_predefined=settings.resolve_predefined_entities
+ unify_predefined=settings.unify_predefined_entities
+ cleanup=settings.text_cleanup
+ entities=settings.entities or {}
+ if utfize==nil then
+ settings.utfize_entities=true
+ utfize=true
+ end
+ if resolve_predefined==nil then
+ settings.resolve_predefined_entities=true
+ resolve_predefined=true
+ end
+ stack,top,at,xmlns,errorstr={},{},{},{},nil
+ acache,hcache,dcache={},{},{}
+ reported_attribute_errors={}
+ if settings.parent_root then
+ mt=getmetatable(settings.parent_root)
+ else
+ initialize_mt(top)
+ end
+ stack[#stack+1]=top
+ top.dt={}
+ dt=top.dt
+ if not data or data=="" then
+ errorstr="empty xml file"
+ elseif utfize or resolve then
+ if lpegmatch(grammar_parsed_text,data) then
+ errorstr=""
else
- initialize_mt(top)
- end
- stack[#stack+1] = top
- top.dt = { }
- dt = top.dt
- if not data or data == "" then
- errorstr = "empty xml file"
- elseif utfize or resolve then
- if lpegmatch(grammar_parsed_text,data) then
- errorstr = ""
- else
- errorstr = "invalid xml file - parsed text"
- end
- elseif type(data) == "string" then
- if lpegmatch(grammar_unparsed_text,data) then
- errorstr = ""
- else
- errorstr = "invalid xml file - unparsed text"
- end
+ errorstr="invalid xml file - parsed text"
+ end
+ elseif type(data)=="string" then
+ if lpegmatch(grammar_unparsed_text,data) then
+ errorstr=""
else
- errorstr = "invalid xml file - no text at all"
- end
- local result
- if errorstr and errorstr ~= "" then
- result = { dt = { { ns = "", tg = "error", dt = { errorstr }, at={ }, er = true } } }
- setmetatable(stack, mt)
- local errorhandler = settings.error_handler
- if errorhandler == false then
- -- no error message
- else
- errorhandler = errorhandler or xml.errorhandler
- if errorhandler then
- xml.errorhandler(format("load error: %s",errorstr))
- end
- end
+ errorstr="invalid xml file - unparsed text"
+ end
+ else
+ errorstr="invalid xml file - no text at all"
+ end
+ local result
+ if errorstr and errorstr~="" then
+ result={ dt={ { ns="",tg="error",dt={ errorstr },at={},er=true } } }
+ setmetatable(stack,mt)
+ local errorhandler=settings.error_handler
+ if errorhandler==false then
else
- result = stack[1]
- end
- if not settings.no_root then
- result = { special = true, ns = "", tg = '@rt@', dt = result.dt, at={ }, entities = entities, settings = settings }
- setmetatable(result, mt)
- local rdt = result.dt
- for k=1,#rdt do
- local v = rdt[k]
- if type(v) == "table" and not v.special then -- always table -)
- result.ri = k -- rootindex
- v.__p__ = result -- new, experiment, else we cannot go back to settings, we need to test this !
- break
- end
- end
- end
- if errorstr and errorstr ~= "" then
- result.error = true
- end
- result.statistics = {
- entities = {
- decimals = dcache,
- hexadecimals = hcache,
- names = acache,
- }
+ errorhandler=errorhandler or xml.errorhandler
+ if errorhandler then
+ local currentresource=settings.currentresource
+ if currentresource and currentresource~="" then
+ xml.errorhandler(formatters["load error in [%s]: %s"](currentresource,errorstr))
+ else
+ xml.errorhandler(formatters["load error: %s"](errorstr))
+ end
+ end
+ end
+ else
+ result=stack[1]
+ end
+ if not settings.no_root then
+ result={ special=true,ns="",tg='@rt@',dt=result.dt,at={},entities=entities,settings=settings }
+ setmetatable(result,mt)
+ local rdt=result.dt
+ for k=1,#rdt do
+ local v=rdt[k]
+ if type(v)=="table" and not v.special then
+ result.ri=k
+ v.__p__=result
+ break
+ end
+ end
+ end
+ if errorstr and errorstr~="" then
+ result.error=true
+ end
+ result.statistics={
+ entities={
+ decimals=dcache,
+ hexadecimals=hcache,
+ names=acache,
}
- strip, utfize, resolve, resolve_predefined = nil, nil, nil, nil
- unify_predefined, cleanup, entities = nil, nil, nil
- stack, top, at, xmlns, errorstr = nil, nil, nil, nil, nil
- acache, hcache, dcache = nil, nil, nil
- reported_attribute_errors, mt, errorhandler = nil, nil, nil
- return result
+ }
+ strip,utfize,resolve,resolve_predefined=nil,nil,nil,nil
+ unify_predefined,cleanup,entities=nil,nil,nil
+ stack,top,at,xmlns,errorstr=nil,nil,nil,nil,nil
+ acache,hcache,dcache=nil,nil,nil
+ reported_attribute_errors,mt,errorhandler=nil,nil,nil
+ return result
end
-
--- Because we can have a crash (stack issues) with faulty xml, we wrap this one
--- in a protector:
-
function xmlconvert(data,settings)
- local ok, result = pcall(function() return _xmlconvert_(data,settings) end)
- if ok then
- return result
- else
- return _xmlconvert_("")
- end
-end
-
-xml.convert = xmlconvert
-
-function xml.inheritedconvert(data,xmldata) -- xmldata is parent
- local settings = xmldata.settings
- if settings then
- settings.parent_root = xmldata -- to be tested
- end
- -- settings.no_root = true
- local xc = xmlconvert(data,settings) -- hm, we might need to locate settings
- -- xc.settings = nil
- -- xc.entities = nil
- -- xc.special = nil
- -- xc.ri = nil
- -- print(xc.tg)
- return xc
+ local ok,result=pcall(function() return _xmlconvert_(data,settings) end)
+ if ok then
+ return result
+ else
+ return _xmlconvert_("",settings)
+ end
+end
+xml.convert=xmlconvert
+function xml.inheritedconvert(data,xmldata)
+ local settings=xmldata.settings
+ if settings then
+ settings.parent_root=xmldata
+ end
+ local xc=xmlconvert(data,settings)
+ return xc
end
-
---[[ldx--
-<p>Packaging data in an xml like table is done with the following
-function. Maybe it will go away (when not used).</p>
---ldx]]--
-
function xml.is_valid(root)
- return root and root.dt and root.dt[1] and type(root.dt[1]) == "table" and not root.dt[1].er
+ return root and root.dt and root.dt[1] and type(root.dt[1])=="table" and not root.dt[1].er
end
-
function xml.package(tag,attributes,data)
- local ns, tg = match(tag,"^(.-):?([^:]+)$")
- local t = { ns = ns, tg = tg, dt = data or "", at = attributes or {} }
- setmetatable(t, mt)
- return t
+ local ns,tg=match(tag,"^(.-):?([^:]+)$")
+ local t={ ns=ns,tg=tg,dt=data or "",at=attributes or {} }
+ setmetatable(t,mt)
+ return t
end
-
function xml.is_valid(root)
- return root and not root.error
+ return root and not root.error
end
-
-xml.errorhandler = report_xml
-
---[[ldx--
-<p>We cannot load an <l n='lpeg'/> from a filehandle so we need to load
-the whole file first. The function accepts a string representing
-a filename or a file handle.</p>
---ldx]]--
-
+xml.errorhandler=report_xml
function xml.load(filename,settings)
- local data = ""
- if type(filename) == "string" then
- -- local data = io.loaddata(filename) - -todo: check type in io.loaddata
- local f = io.open(filename,'r')
- if f then
- data = f:read("*all")
- f:close()
- end
- elseif filename then -- filehandle
- data = filename:read("*all")
- end
- return xmlconvert(data,settings)
+ local data=""
+ if type(filename)=="string" then
+ local f=io.open(filename,'r')
+ if f then
+ data=f:read("*all")
+ f:close()
+ end
+ elseif filename then
+ data=filename:read("*all")
+ end
+ if settings then
+ settings.currentresource=filename
+ local result=xmlconvert(data,settings)
+ settings.currentresource=nil
+ return result
+ else
+ return xmlconvert(data,{ currentresource=filename })
+ end
end
-
---[[ldx--
-<p>When we inject new elements, we need to convert strings to
-valid trees, which is what the next function does.</p>
---ldx]]--
-
-local no_root = { no_root = true }
-
+local no_root={ no_root=true }
function xml.toxml(data)
- if type(data) == "string" then
- local root = { xmlconvert(data,no_root) }
- return (#root > 1 and root) or root[1]
- else
- return data
- end
+ if type(data)=="string" then
+ local root={ xmlconvert(data,no_root) }
+ return (#root>1 and root) or root[1]
+ else
+ return data
+ end
end
-
---[[ldx--
-<p>For copying a tree we use a dedicated function instead of the
-generic table copier. Since we know what we're dealing with we
-can speed up things a bit. The second argument is not to be used!</p>
---ldx]]--
-
local function copy(old,tables)
- if old then
- tables = tables or { }
- local new = { }
- if not tables[old] then
- tables[old] = new
- end
- for k,v in next, old do
- new[k] = (type(v) == "table" and (tables[v] or copy(v, tables))) or v
- end
- local mt = getmetatable(old)
- if mt then
- setmetatable(new,mt)
- end
- return new
- else
- return { }
+ if old then
+ tables=tables or {}
+ local new={}
+ if not tables[old] then
+ tables[old]=new
end
-end
-
-xml.copy = copy
-
---[[ldx--
-<p>In <l n='context'/> serializing the tree or parts of the tree is a major
-actitivity which is why the following function is pretty optimized resulting
-in a few more lines of code than needed. The variant that uses the formatting
-function for all components is about 15% slower than the concatinating
-alternative.</p>
---ldx]]--
-
--- todo: add <?xml version='1.0' standalone='yes'?> when not present
-
-function xml.checkbom(root) -- can be made faster
- if root.ri then
- local dt = root.dt
- for k=1,#dt do
- local v = dt[k]
- if type(v) == "table" and v.special and v.tg == "@pi@" and find(v.dt[1],"xml.*version=") then
- return
- end
- end
- insert(dt, 1, { special=true, ns="", tg="@pi@", dt = { "xml version='1.0' standalone='yes'"} } )
- insert(dt, 2, "\n" )
+ for k,v in next,old do
+ new[k]=(type(v)=="table" and (tables[v] or copy(v,tables))) or v
end
-end
-
---[[ldx--
-<p>At the cost of some 25% runtime overhead you can first convert the tree to a string
-and then handle the lot.</p>
---ldx]]--
-
--- new experimental reorganized serialize
-
-local function verbose_element(e,handlers) -- options
- local handle = handlers.handle
- local serialize = handlers.serialize
- local ens, etg, eat, edt, ern = e.ns, e.tg, e.at, e.dt, e.rn
- local ats = eat and next(eat) and { }
- if ats then
- for k,v in next, eat do
- ats[#ats+1] = format('%s=%q',k,escaped(v))
- end
- end
- if ern and trace_entities and ern ~= ens then
- ens = ern
+ local mt=getmetatable(old)
+ if mt then
+ setmetatable(new,mt)
end
- if ens ~= "" then
- if edt and #edt > 0 then
- if ats then
- handle("<",ens,":",etg," ",concat(ats," "),">")
- else
- handle("<",ens,":",etg,">")
- end
- for i=1,#edt do
- local e = edt[i]
- if type(e) == "string" then
- handle(escaped(e))
- else
- serialize(e,handlers)
- end
- end
- handle("</",ens,":",etg,">")
+ return new
+ else
+ return {}
+ end
+end
+xml.copy=copy
+function xml.checkbom(root)
+ if root.ri then
+ local dt=root.dt
+ for k=1,#dt do
+ local v=dt[k]
+ if type(v)=="table" and v.special and v.tg=="@pi@" and find(v.dt[1],"xml.*version=") then
+ return
+ end
+ end
+ insert(dt,1,{ special=true,ns="",tg="@pi@",dt={ "xml version='1.0' standalone='yes'" } } )
+ insert(dt,2,"\n" )
+ end
+end
+local function verbose_element(e,handlers)
+ local handle=handlers.handle
+ local serialize=handlers.serialize
+ local ens,etg,eat,edt,ern=e.ns,e.tg,e.at,e.dt,e.rn
+ local ats=eat and next(eat) and {}
+ if ats then
+ for k,v in next,eat do
+ ats[#ats+1]=formatters['%s=%q'](k,escaped(v))
+ end
+ end
+ if ern and trace_entities and ern~=ens then
+ ens=ern
+ end
+ if ens~="" then
+ if edt and #edt>0 then
+ if ats then
+ handle("<",ens,":",etg," ",concat(ats," "),">")
+ else
+ handle("<",ens,":",etg,">")
+ end
+ for i=1,#edt do
+ local e=edt[i]
+ if type(e)=="string" then
+ handle(escaped(e))
else
- if ats then
- handle("<",ens,":",etg," ",concat(ats," "),"/>")
- else
- handle("<",ens,":",etg,"/>")
- end
+ serialize(e,handlers)
end
+ end
+ handle("</",ens,":",etg,">")
else
- if edt and #edt > 0 then
- if ats then
- handle("<",etg," ",concat(ats," "),">")
- else
- handle("<",etg,">")
- end
- for i=1,#edt do
- local e = edt[i]
- if type(e) == "string" then
- handle(escaped(e)) -- option: hexify escaped entities
- else
- serialize(e,handlers)
- end
- end
- handle("</",etg,">")
+ if ats then
+ handle("<",ens,":",etg," ",concat(ats," "),"/>")
+ else
+ handle("<",ens,":",etg,"/>")
+ end
+ end
+ else
+ if edt and #edt>0 then
+ if ats then
+ handle("<",etg," ",concat(ats," "),">")
+ else
+ handle("<",etg,">")
+ end
+ for i=1,#edt do
+ local e=edt[i]
+ if type(e)=="string" then
+ handle(escaped(e))
else
- if ats then
- handle("<",etg," ",concat(ats," "),"/>")
- else
- handle("<",etg,"/>")
- end
+ serialize(e,handlers)
end
+ end
+ handle("</",etg,">")
+ else
+ if ats then
+ handle("<",etg," ",concat(ats," "),"/>")
+ else
+ handle("<",etg,"/>")
+ end
end
+ end
end
-
local function verbose_pi(e,handlers)
- handlers.handle("<?",e.dt[1],"?>")
+ handlers.handle("<?",e.dt[1],"?>")
end
-
local function verbose_comment(e,handlers)
- handlers.handle("<!--",e.dt[1],"-->")
+ handlers.handle("<!--",e.dt[1],"-->")
end
-
local function verbose_cdata(e,handlers)
- handlers.handle("<![CDATA[", e.dt[1],"]]>")
+ handlers.handle("<![CDATA[",e.dt[1],"]]>")
end
-
local function verbose_doctype(e,handlers)
- handlers.handle("<!DOCTYPE ",e.dt[1],">")
+ handlers.handle("<!DOCTYPE ",e.dt[1],">")
end
-
local function verbose_root(e,handlers)
- handlers.serialize(e.dt,handlers)
+ handlers.serialize(e.dt,handlers)
end
-
local function verbose_text(e,handlers)
- handlers.handle(escaped(e))
+ handlers.handle(escaped(e))
end
-
local function verbose_document(e,handlers)
- local serialize = handlers.serialize
- local functions = handlers.functions
- for i=1,#e do
- local ei = e[i]
- if type(ei) == "string" then
- functions["@tx@"](ei,handlers)
- else
- serialize(ei,handlers)
- end
+ local serialize=handlers.serialize
+ local functions=handlers.functions
+ for i=1,#e do
+ local ei=e[i]
+ if type(ei)=="string" then
+ functions["@tx@"](ei,handlers)
+ else
+ serialize(ei,handlers)
end
+ end
end
-
local function serialize(e,handlers,...)
- local initialize = handlers.initialize
- local finalize = handlers.finalize
- local functions = handlers.functions
- if initialize then
- local state = initialize(...)
- if not state == true then
- return state
- end
- end
- local etg = e.tg
- if etg then
- (functions[etg] or functions["@el@"])(e,handlers)
- -- elseif type(e) == "string" then
- -- functions["@tx@"](e,handlers)
- else
- functions["@dc@"](e,handlers) -- dc ?
- end
- if finalize then
- return finalize()
- end
+ local initialize=handlers.initialize
+ local finalize=handlers.finalize
+ local functions=handlers.functions
+ if initialize then
+ local state=initialize(...)
+ if not state==true then
+ return state
+ end
+ end
+ local etg=e.tg
+ if etg then
+ (functions[etg] or functions["@el@"])(e,handlers)
+ else
+ functions["@dc@"](e,handlers)
+ end
+ if finalize then
+ return finalize()
+ end
end
-
local function xserialize(e,handlers)
- local functions = handlers.functions
- local etg = e.tg
- if etg then
- (functions[etg] or functions["@el@"])(e,handlers)
- -- elseif type(e) == "string" then
- -- functions["@tx@"](e,handlers)
- else
- functions["@dc@"](e,handlers)
- end
-end
-
-local handlers = { }
-
+ local functions=handlers.functions
+ local etg=e.tg
+ if etg then
+ (functions[etg] or functions["@el@"])(e,handlers)
+ else
+ functions["@dc@"](e,handlers)
+ end
+end
+local handlers={}
local function newhandlers(settings)
- local t = table.copy(handlers.verbose or { }) -- merge
- if settings then
- for k,v in next, settings do
- if type(v) == "table" then
- local tk = t[k] if not tk then tk = { } t[k] = tk end
- for kk,vv in next, v do
- tk[kk] = vv
- end
- else
- t[k] = v
- end
- end
- if settings.name then
- handlers[settings.name] = t
- end
- end
- utilities.storage.mark(t)
- return t
-end
-
-local nofunction = function() end
-
+ local t=table.copy(handlers[settings and settings.parent or "verbose"] or {})
+ if settings then
+ for k,v in next,settings do
+ if type(v)=="table" then
+ local tk=t[k] if not tk then tk={} t[k]=tk end
+ for kk,vv in next,v do
+ tk[kk]=vv
+ end
+ else
+ t[k]=v
+ end
+ end
+ if settings.name then
+ handlers[settings.name]=t
+ end
+ end
+ utilities.storage.mark(t)
+ return t
+end
+local nofunction=function() end
function xml.sethandlersfunction(handler,name,fnc)
- handler.functions[name] = fnc or nofunction
+ handler.functions[name]=fnc or nofunction
end
-
function xml.gethandlersfunction(handler,name)
- return handler.functions[name]
+ return handler.functions[name]
end
-
function xml.gethandlers(name)
- return handlers[name]
+ return handlers[name]
end
-
newhandlers {
- name = "verbose",
- initialize = false, -- faster than nil and mt lookup
- finalize = false, -- faster than nil and mt lookup
- serialize = xserialize,
- handle = print,
- functions = {
- ["@dc@"] = verbose_document,
- ["@dt@"] = verbose_doctype,
- ["@rt@"] = verbose_root,
- ["@el@"] = verbose_element,
- ["@pi@"] = verbose_pi,
- ["@cm@"] = verbose_comment,
- ["@cd@"] = verbose_cdata,
- ["@tx@"] = verbose_text,
- }
+ name="verbose",
+ initialize=false,
+ finalize=false,
+ serialize=xserialize,
+ handle=print,
+ functions={
+ ["@dc@"]=verbose_document,
+ ["@dt@"]=verbose_doctype,
+ ["@rt@"]=verbose_root,
+ ["@el@"]=verbose_element,
+ ["@pi@"]=verbose_pi,
+ ["@cm@"]=verbose_comment,
+ ["@cd@"]=verbose_cdata,
+ ["@tx@"]=verbose_text,
+ }
}
-
---[[ldx--
-<p>How you deal with saving data depends on your preferences. For a 40 MB database
-file the timing on a 2.3 Core Duo are as follows (time in seconds):</p>
-
-<lines>
-1.3 : load data from file to string
-6.1 : convert string into tree
-5.3 : saving in file using xmlsave
-6.8 : converting to string using xml.tostring
-3.6 : saving converted string in file
-</lines>
-
-<p>Beware, these were timing with the old routine but measurements will not be that
-much different I guess.</p>
---ldx]]--
-
--- maybe this will move to lxml-xml
-
local result
-
-local xmlfilehandler = newhandlers {
- name = "file",
- initialize = function(name)
- result = io.open(name,"wb")
- return result
- end,
- finalize = function()
- result:close()
- return true
- end,
- handle = function(...)
- result:write(...)
- end,
+local xmlfilehandler=newhandlers {
+ name="file",
+ initialize=function(name)
+ result=io.open(name,"wb")
+ return result
+ end,
+ finalize=function()
+ result:close()
+ return true
+ end,
+ handle=function(...)
+ result:write(...)
+ end,
}
-
--- no checking on writeability here but not faster either
---
--- local xmlfilehandler = newhandlers {
--- initialize = function(name)
--- io.output(name,"wb")
--- return true
--- end,
--- finalize = function()
--- io.close()
--- return true
--- end,
--- handle = io.write,
--- }
-
function xml.save(root,name)
- serialize(root,xmlfilehandler,name)
+ serialize(root,xmlfilehandler,name)
end
-
local result
-
-local xmlstringhandler = newhandlers {
- name = "string",
- initialize = function()
- result = { }
- return result
- end,
- finalize = function()
- return concat(result)
- end,
- handle = function(...)
- result[#result+1] = concat { ... }
- end,
+local xmlstringhandler=newhandlers {
+ name="string",
+ initialize=function()
+ result={}
+ return result
+ end,
+ finalize=function()
+ return concat(result)
+ end,
+ handle=function(...)
+ result[#result+1]=concat {... }
+ end,
}
-
-local function xmltostring(root) -- 25% overhead due to collecting
- if not root then
- return ""
- elseif type(root) == 'string' then
- return root
- else -- if next(root) then -- next is faster than type (and >0 test)
- return serialize(root,xmlstringhandler) or ""
- end
+local function xmltostring(root)
+ if not root then
+ return ""
+ elseif type(root)=="string" then
+ return root
+ else
+ return serialize(root,xmlstringhandler) or ""
+ end
end
-
-local function __tostring(root) -- inline
- return (root and xmltostring(root)) or ""
+local function __tostring(root)
+ return (root and xmltostring(root)) or ""
end
-
-initialize_mt = function(root) -- redefinition
- mt = { __tostring = __tostring, __index = root }
+initialize_mt=function(root)
+ mt={ __tostring=__tostring,__index=root }
end
-
-xml.defaulthandlers = handlers
-xml.newhandlers = newhandlers
-xml.serialize = serialize
-xml.tostring = xmltostring
-
---[[ldx--
-<p>The next function operated on the content only and needs a handle function
-that accepts a string.</p>
---ldx]]--
-
+xml.defaulthandlers=handlers
+xml.newhandlers=newhandlers
+xml.serialize=serialize
+xml.tostring=xmltostring
local function xmlstring(e,handle)
- if not handle or (e.special and e.tg ~= "@rt@") then
- -- nothing
- elseif e.tg then
- local edt = e.dt
- if edt then
- for i=1,#edt do
- xmlstring(edt[i],handle)
- end
- end
- else
- handle(e)
+ if not handle or (e.special and e.tg~="@rt@") then
+ elseif e.tg then
+ local edt=e.dt
+ if edt then
+ for i=1,#edt do
+ xmlstring(edt[i],handle)
+ end
end
+ else
+ handle(e)
+ end
end
-
-xml.string = xmlstring
-
---[[ldx--
-<p>A few helpers:</p>
---ldx]]--
-
-
+xml.string=xmlstring
function xml.settings(e)
- while e do
- local s = e.settings
- if s then
- return s
- else
- e = e.__p__
- end
+ while e do
+ local s=e.settings
+ if s then
+ return s
+ else
+ e=e.__p__
end
- return nil
+ end
+ return nil
end
-
function xml.root(e)
- local r = e
- while e do
- e = e.__p__
- if e then
- r = e
- end
+ local r=e
+ while e do
+ e=e.__p__
+ if e then
+ r=e
end
- return r
+ end
+ return r
end
-
function xml.parent(root)
- return root.__p__
+ return root.__p__
end
-
function xml.body(root)
- return (root.ri and root.dt[root.ri]) or root -- not ok yet
+ return root.ri and root.dt[root.ri] or root
end
-
function xml.name(root)
- if not root then
- return ""
- elseif root.ns == "" then
- return root.tg
- else
- return root.ns .. ":" .. root.tg
- end
+ if not root then
+ return ""
+ end
+ local ns=root.ns
+ local tg=root.tg
+ if ns=="" then
+ return tg
+ else
+ return ns..":"..tg
+ end
end
-
---[[ldx--
-<p>The next helper erases an element but keeps the table as it is,
-and since empty strings are not serialized (effectively) it does
-not harm. Copying the table would take more time. Usage:</p>
---ldx]]--
-
function xml.erase(dt,k)
- if dt then
- if k then
- dt[k] = ""
- else for k=1,#dt do
- dt[1] = { "" }
- end end
- end
+ if dt then
+ if k then
+ dt[k]=""
+ else for k=1,#dt do
+ dt[1]={ "" }
+ end end
+ end
end
-
---[[ldx--
-<p>The next helper assigns a tree (or string). Usage:</p>
-
-<typing>
-dt[k] = xml.assign(root) or xml.assign(dt,k,root)
-</typing>
---ldx]]--
-
function xml.assign(dt,k,root)
- if dt and k then
- dt[k] = (type(root) == "table" and xml.body(root)) or root
- return dt[k]
- else
- return xml.body(root)
- end
+ if dt and k then
+ dt[k]=type(root)=="table" and xml.body(root) or root
+ return dt[k]
+ else
+ return xml.body(root)
+ end
+end
+function xml.tocdata(e,wrapper)
+ local whatever=type(e)=="table" and xmltostring(e.dt) or e or ""
+ if wrapper then
+ whatever=formatters["<%s>%s</%s>"](wrapper,whatever,wrapper)
+ end
+ local t={ special=true,ns="",tg="@cd@",at={},rn="",dt={ whatever },__p__=e }
+ setmetatable(t,getmetatable(e))
+ e.dt={ t }
end
-
--- the following helpers may move
-
---[[ldx--
-<p>The next helper assigns a tree (or string). Usage:</p>
-<typing>
-xml.tocdata(e)
-xml.tocdata(e,"error")
-</typing>
---ldx]]--
-
-function xml.tocdata(e,wrapper) -- a few more in the aux module
- local whatever = type(e) == "table" and xmltostring(e.dt) or e or ""
- if wrapper then
- whatever = format("<%s>%s</%s>",wrapper,whatever,wrapper)
- end
- local t = { special = true, ns = "", tg = "@cd@", at = {}, rn = "", dt = { whatever }, __p__ = e }
- setmetatable(t,getmetatable(e))
- e.dt = { t }
-end
-
function xml.makestandalone(root)
- if root.ri then
- local dt = root.dt
- for k=1,#dt do
- local v = dt[k]
- if type(v) == "table" and v.special and v.tg == "@pi@" then
- local txt = v.dt[1]
- if find(txt,"xml.*version=") then
- v.dt[1] = txt .. " standalone='yes'"
- break
- end
- end
+ if root.ri then
+ local dt=root.dt
+ for k=1,#dt do
+ local v=dt[k]
+ if type(v)=="table" and v.special and v.tg=="@pi@" then
+ local txt=v.dt[1]
+ if find(txt,"xml.*version=") then
+ v.dt[1]=txt.." standalone='yes'"
+ break
end
+ end
end
- return root
+ end
+ return root
end
-
function xml.kind(e)
- local dt = e and e.dt
- if dt then
- local n = #dt
- if n == 1 then
- local d = dt[1]
- if d.special then
- local tg = d.tg
- if tg == "@cd@" then
- return "cdata"
- elseif tg == "@cm" then
- return "comment"
- elseif tg == "@pi@" then
- return "instruction"
- elseif tg == "@dt@" then
- return "declaration"
- end
- elseif type(d) == "string" then
- return "text"
- end
- return "element"
- elseif n > 0 then
- return "mixed"
- end
- end
- return "empty"
+ local dt=e and e.dt
+ if dt then
+ local n=#dt
+ if n==1 then
+ local d=dt[1]
+ if d.special then
+ local tg=d.tg
+ if tg=="@cd@" then
+ return "cdata"
+ elseif tg=="@cm" then
+ return "comment"
+ elseif tg=="@pi@" then
+ return "instruction"
+ elseif tg=="@dt@" then
+ return "declaration"
+ end
+ elseif type(d)=="string" then
+ return "text"
+ end
+ return "element"
+ elseif n>0 then
+ return "mixed"
+ end
+ end
+ return "empty"
end
@@ -8225,1294 +8998,1060 @@ end -- of closure
do -- create closure to overcome 200 locals limit
-if not modules then modules = { } end modules ['lxml-pth'] = {
- version = 1.001,
- comment = "this module is the basis for the lxml-* ones",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
--- e.ni is only valid after a filter run
--- todo: B/C/[get first match]
-
-local concat, remove, insert = table.concat, table.remove, table.insert
-local type, next, tonumber, tostring, setmetatable, loadstring = type, next, tonumber, tostring, setmetatable, loadstring
-local format, upper, lower, gmatch, gsub, find, rep = string.format, string.upper, string.lower, string.gmatch, string.gsub, string.find, string.rep
-local lpegmatch, lpegpatterns = lpeg.match, lpeg.patterns
-
-local setmetatableindex = table.setmetatableindex
-
--- beware, this is not xpath ... e.g. position is different (currently) and
--- we have reverse-sibling as reversed preceding sibling
-
---[[ldx--
-<p>This module can be used stand alone but also inside <l n='mkiv'/> in
-which case it hooks into the tracker code. Therefore we provide a few
-functions that set the tracers. Here we overload a previously defined
-function.</p>
-<p>If I can get in the mood I will make a variant that is XSLT compliant
-but I wonder if it makes sense.</P>
---ldx]]--
-
---[[ldx--
-<p>Expecially the lpath code is experimental, we will support some of xpath, but
-only things that make sense for us; as compensation it is possible to hook in your
-own functions. Apart from preprocessing content for <l n='context'/> we also need
-this module for process management, like handling <l n='ctx'/> and <l n='rlx'/>
-files.</p>
-
-<typing>
-a/b/c /*/c
-a/b/c/first() a/b/c/last() a/b/c/index(n) a/b/c/index(-n)
-a/b/c/text() a/b/c/text(1) a/b/c/text(-1) a/b/c/text(n)
-</typing>
---ldx]]--
-
-local trace_lpath = false if trackers then trackers.register("xml.path", function(v) trace_lpath = v end) end
-local trace_lparse = false if trackers then trackers.register("xml.parse", function(v) trace_lparse = v end) end
-local trace_lprofile = false if trackers then trackers.register("xml.profile", function(v) trace_lpath = v trace_lparse = v trace_lprofile = v end) end
-
-local report_lpath = logs.reporter("xml","lpath")
-
---[[ldx--
-<p>We've now arrived at an interesting part: accessing the tree using a subset
-of <l n='xpath'/> and since we're not compatible we call it <l n='lpath'/>. We
-will explain more about its usage in other documents.</p>
---ldx]]--
+package.loaded["lxml-lpt"] = package.loaded["lxml-lpt"] or true
-local xml = xml
+-- original size: 48956, stripped down to: 30516
-local lpathcalls = 0 function xml.lpathcalls () return lpathcalls end
-local lpathcached = 0 function xml.lpathcached() return lpathcached end
-
-xml.functions = xml.functions or { } -- internal
-local functions = xml.functions
-
-xml.expressions = xml.expressions or { } -- in expressions
-local expressions = xml.expressions
-
-xml.finalizers = xml.finalizers or { } -- fast do-with ... (with return value other than collection)
-local finalizers = xml.finalizers
-
-xml.specialhandler = xml.specialhandler or { }
-local specialhandler = xml.specialhandler
-
-lpegpatterns.xml = lpegpatterns.xml or { }
-local xmlpatterns = lpegpatterns.xml
-
-finalizers.xml = finalizers.xml or { }
-finalizers.tex = finalizers.tex or { }
-
-local function fallback (t, name)
- local fn = finalizers[name]
- if fn then
- t[name] = fn
- else
- report_lpath("unknown sub finalizer '%s'",tostring(name))
- fn = function() end
- end
- return fn
-end
-
-setmetatableindex(finalizers.xml, fallback)
-setmetatableindex(finalizers.tex, fallback)
-
-xml.defaultprotocol = "xml"
-
--- as xsl does not follow xpath completely here we will also
--- be more liberal especially with regards to the use of | and
--- the rootpath:
---
--- test : all 'test' under current
--- /test : 'test' relative to current
--- a|b|c : set of names
--- (a|b|c) : idem
--- ! : not
---
--- after all, we're not doing transformations but filtering. in
--- addition we provide filter functions (last bit)
---
--- todo: optimizer
---
--- .. : parent
--- * : all kids
--- / : anchor here
--- // : /**/
--- ** : all in between
---
--- so far we had (more practical as we don't transform)
---
--- {/test} : kids 'test' under current node
--- {test} : any kid with tag 'test'
--- {//test} : same as above
-
--- evaluator (needs to be redone, for the moment copied)
-
--- todo: apply_axis(list,notable) and collection vs single
-
-local apply_axis = { }
-
-apply_axis['root'] = function(list)
- local collected = { }
- for l=1,#list do
- local ll = list[l]
- local rt = ll
- while ll do
- ll = ll.__p__
- if ll then
- rt = ll
- end
- end
- collected[l] = rt
- end
- return collected
-end
-
-apply_axis['self'] = function(list)
- return list
-end
-
-apply_axis['child'] = function(list)
- local collected, c = { }, 0
- for l=1,#list do
- local ll = list[l]
- local dt = ll.dt
- if dt then -- weird that this is needed
- local en = 0
- for k=1,#dt do
- local dk = dt[k]
- if dk.tg then
- c = c + 1
- collected[c] = dk
- dk.ni = k -- refresh
- en = en + 1
- dk.ei = en
- end
- end
- ll.en = en
- end
- end
- return collected
+if not modules then modules={} end modules ['lxml-lpt']={
+ version=1.001,
+ comment="this module is the basis for the lxml-* ones",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+local concat,remove,insert=table.concat,table.remove,table.insert
+local type,next,tonumber,tostring,setmetatable,load,select=type,next,tonumber,tostring,setmetatable,load,select
+local format,upper,lower,gmatch,gsub,find,rep=string.format,string.upper,string.lower,string.gmatch,string.gsub,string.find,string.rep
+local lpegmatch,lpegpatterns=lpeg.match,lpeg.patterns
+local setmetatableindex=table.setmetatableindex
+local formatters=string.formatters
+local trace_lpath=false if trackers then trackers.register("xml.path",function(v) trace_lpath=v end) end
+local trace_lparse=false if trackers then trackers.register("xml.parse",function(v) trace_lparse=v end) end
+local trace_lprofile=false if trackers then trackers.register("xml.profile",function(v) trace_lpath=v trace_lparse=v trace_lprofile=v end) end
+local report_lpath=logs.reporter("xml","lpath")
+local xml=xml
+local lpathcalls=0 function xml.lpathcalls () return lpathcalls end
+local lpathcached=0 function xml.lpathcached() return lpathcached end
+xml.functions=xml.functions or {}
+local functions=xml.functions
+xml.expressions=xml.expressions or {}
+local expressions=xml.expressions
+xml.finalizers=xml.finalizers or {}
+local finalizers=xml.finalizers
+xml.specialhandler=xml.specialhandler or {}
+local specialhandler=xml.specialhandler
+lpegpatterns.xml=lpegpatterns.xml or {}
+local xmlpatterns=lpegpatterns.xml
+finalizers.xml=finalizers.xml or {}
+finalizers.tex=finalizers.tex or {}
+local function fallback (t,name)
+ local fn=finalizers[name]
+ if fn then
+ t[name]=fn
+ else
+ report_lpath("unknown sub finalizer %a",name)
+ fn=function() end
+ end
+ return fn
+end
+setmetatableindex(finalizers.xml,fallback)
+setmetatableindex(finalizers.tex,fallback)
+xml.defaultprotocol="xml"
+local apply_axis={}
+apply_axis['root']=function(list)
+ local collected={}
+ for l=1,#list do
+ local ll=list[l]
+ local rt=ll
+ while ll do
+ ll=ll.__p__
+ if ll then
+ rt=ll
+ end
+ end
+ collected[l]=rt
+ end
+ return collected
+end
+apply_axis['self']=function(list)
+ return list
+end
+apply_axis['child']=function(list)
+ local collected,c={},0
+ for l=1,#list do
+ local ll=list[l]
+ local dt=ll.dt
+ if dt then
+ local en=0
+ for k=1,#dt do
+ local dk=dt[k]
+ if dk.tg then
+ c=c+1
+ collected[c]=dk
+ dk.ni=k
+ en=en+1
+ dk.ei=en
+ end
+ end
+ ll.en=en
+ end
+ end
+ return collected
end
-
local function collect(list,collected,c)
- local dt = list.dt
- if dt then
- local en = 0
- for k=1,#dt do
- local dk = dt[k]
- if dk.tg then
- c = c + 1
- collected[c] = dk
- dk.ni = k -- refresh
- en = en + 1
- dk.ei = en
- c = collect(dk,collected,c)
- end
- end
- list.en = en
- end
- return c
-end
-
-apply_axis['descendant'] = function(list)
- local collected, c = { }, 0
- for l=1,#list do
- c = collect(list[l],collected,c)
- end
- return collected
+ local dt=list.dt
+ if dt then
+ local en=0
+ for k=1,#dt do
+ local dk=dt[k]
+ if dk.tg then
+ c=c+1
+ collected[c]=dk
+ dk.ni=k
+ en=en+1
+ dk.ei=en
+ c=collect(dk,collected,c)
+ end
+ end
+ list.en=en
+ end
+ return c
+end
+apply_axis['descendant']=function(list)
+ local collected,c={},0
+ for l=1,#list do
+ c=collect(list[l],collected,c)
+ end
+ return collected
end
-
local function collect(list,collected,c)
- local dt = list.dt
- if dt then
- local en = 0
- for k=1,#dt do
- local dk = dt[k]
- if dk.tg then
- c = c + 1
- collected[c] = dk
- dk.ni = k -- refresh
- en = en + 1
- dk.ei = en
- c = collect(dk,collected,c)
- end
- end
- list.en = en
- end
- return c
-end
-apply_axis['descendant-or-self'] = function(list)
- local collected, c = { }, 0
- for l=1,#list do
- local ll = list[l]
- if ll.special ~= true then -- catch double root
- c = c + 1
- collected[c] = ll
- end
- c = collect(ll,collected,c)
- end
- return collected
-end
-
-apply_axis['ancestor'] = function(list)
- local collected, c = { }, 0
- for l=1,#list do
- local ll = list[l]
- while ll do
- ll = ll.__p__
- if ll then
- c = c + 1
- collected[c] = ll
- end
- end
- end
- return collected
-end
-
-apply_axis['ancestor-or-self'] = function(list)
- local collected, c = { }, 0
- for l=1,#list do
- local ll = list[l]
- c = c + 1
- collected[c] = ll
- while ll do
- ll = ll.__p__
- if ll then
- c = c + 1
- collected[c] = ll
- end
- end
- end
- return collected
-end
-
-apply_axis['parent'] = function(list)
- local collected, c = { }, 0
- for l=1,#list do
- local pl = list[l].__p__
- if pl then
- c = c + 1
- collected[c] = pl
- end
- end
- return collected
-end
-
-apply_axis['attribute'] = function(list)
- return { }
-end
-
-apply_axis['namespace'] = function(list)
- return { }
-end
-
-apply_axis['following'] = function(list) -- incomplete
- return { }
-end
-
-apply_axis['preceding'] = function(list) -- incomplete
- return { }
-end
-
-apply_axis['following-sibling'] = function(list)
- local collected, c = { }, 0
- for l=1,#list do
- local ll = list[l]
- local p = ll.__p__
- local d = p.dt
- for i=ll.ni+1,#d do
- local di = d[i]
- if type(di) == "table" then
- c = c + 1
- collected[c] = di
- end
- end
- end
- return collected
-end
-
-apply_axis['preceding-sibling'] = function(list)
- local collected, c = { }, 0
- for l=1,#list do
- local ll = list[l]
- local p = ll.__p__
- local d = p.dt
- for i=1,ll.ni-1 do
- local di = d[i]
- if type(di) == "table" then
- c = c + 1
- collected[c] = di
- end
- end
- end
- return collected
-end
-
-apply_axis['reverse-sibling'] = function(list) -- reverse preceding
- local collected, c = { }, 0
- for l=1,#list do
- local ll = list[l]
- local p = ll.__p__
- local d = p.dt
- for i=ll.ni-1,1,-1 do
- local di = d[i]
- if type(di) == "table" then
- c = c + 1
- collected[c] = di
- end
- end
- end
- return collected
-end
-
-apply_axis['auto-descendant-or-self'] = apply_axis['descendant-or-self']
-apply_axis['auto-descendant'] = apply_axis['descendant']
-apply_axis['auto-child'] = apply_axis['child']
-apply_axis['auto-self'] = apply_axis['self']
-apply_axis['initial-child'] = apply_axis['child']
-
+ local dt=list.dt
+ if dt then
+ local en=0
+ for k=1,#dt do
+ local dk=dt[k]
+ if dk.tg then
+ c=c+1
+ collected[c]=dk
+ dk.ni=k
+ en=en+1
+ dk.ei=en
+ c=collect(dk,collected,c)
+ end
+ end
+ list.en=en
+ end
+ return c
+end
+apply_axis['descendant-or-self']=function(list)
+ local collected,c={},0
+ for l=1,#list do
+ local ll=list[l]
+ if ll.special~=true then
+ c=c+1
+ collected[c]=ll
+ end
+ c=collect(ll,collected,c)
+ end
+ return collected
+end
+apply_axis['ancestor']=function(list)
+ local collected,c={},0
+ for l=1,#list do
+ local ll=list[l]
+ while ll do
+ ll=ll.__p__
+ if ll then
+ c=c+1
+ collected[c]=ll
+ end
+ end
+ end
+ return collected
+end
+apply_axis['ancestor-or-self']=function(list)
+ local collected,c={},0
+ for l=1,#list do
+ local ll=list[l]
+ c=c+1
+ collected[c]=ll
+ while ll do
+ ll=ll.__p__
+ if ll then
+ c=c+1
+ collected[c]=ll
+ end
+ end
+ end
+ return collected
+end
+apply_axis['parent']=function(list)
+ local collected,c={},0
+ for l=1,#list do
+ local pl=list[l].__p__
+ if pl then
+ c=c+1
+ collected[c]=pl
+ end
+ end
+ return collected
+end
+apply_axis['attribute']=function(list)
+ return {}
+end
+apply_axis['namespace']=function(list)
+ return {}
+end
+apply_axis['following']=function(list)
+ return {}
+end
+apply_axis['preceding']=function(list)
+ return {}
+end
+apply_axis['following-sibling']=function(list)
+ local collected,c={},0
+ for l=1,#list do
+ local ll=list[l]
+ local p=ll.__p__
+ local d=p.dt
+ for i=ll.ni+1,#d do
+ local di=d[i]
+ if type(di)=="table" then
+ c=c+1
+ collected[c]=di
+ end
+ end
+ end
+ return collected
+end
+apply_axis['preceding-sibling']=function(list)
+ local collected,c={},0
+ for l=1,#list do
+ local ll=list[l]
+ local p=ll.__p__
+ local d=p.dt
+ for i=1,ll.ni-1 do
+ local di=d[i]
+ if type(di)=="table" then
+ c=c+1
+ collected[c]=di
+ end
+ end
+ end
+ return collected
+end
+apply_axis['reverse-sibling']=function(list)
+ local collected,c={},0
+ for l=1,#list do
+ local ll=list[l]
+ local p=ll.__p__
+ local d=p.dt
+ for i=ll.ni-1,1,-1 do
+ local di=d[i]
+ if type(di)=="table" then
+ c=c+1
+ collected[c]=di
+ end
+ end
+ end
+ return collected
+end
+apply_axis['auto-descendant-or-self']=apply_axis['descendant-or-self']
+apply_axis['auto-descendant']=apply_axis['descendant']
+apply_axis['auto-child']=apply_axis['child']
+apply_axis['auto-self']=apply_axis['self']
+apply_axis['initial-child']=apply_axis['child']
local function apply_nodes(list,directive,nodes)
- -- todo: nodes[1] etc ... negated node name in set ... when needed
- -- ... currently ignored
- local maxn = #nodes
- if maxn == 3 then --optimized loop
- local nns, ntg = nodes[2], nodes[3]
- if not nns and not ntg then -- wildcard
- if directive then
- return list
- else
- return { }
- end
- else
- local collected, c, m, p = { }, 0, 0, nil
- if not nns then -- only check tag
- for l=1,#list do
- local ll = list[l]
- local ltg = ll.tg
- if ltg then
- if directive then
- if ntg == ltg then
- local llp = ll.__p__ ; if llp ~= p then p, m = llp, 1 else m = m + 1 end
- c = c + 1
- collected[c], ll.mi = ll, m
- end
- elseif ntg ~= ltg then
- local llp = ll.__p__ ; if llp ~= p then p, m = llp, 1 else m = m + 1 end
- c = c + 1
- collected[c], ll.mi = ll, m
- end
- end
- end
- elseif not ntg then -- only check namespace
- for l=1,#list do
- local ll = list[l]
- local lns = ll.rn or ll.ns
- if lns then
- if directive then
- if lns == nns then
- local llp = ll.__p__ ; if llp ~= p then p, m = llp, 1 else m = m + 1 end
- c = c + 1
- collected[c], ll.mi = ll, m
- end
- elseif lns ~= nns then
- local llp = ll.__p__ ; if llp ~= p then p, m = llp, 1 else m = m + 1 end
- c = c + 1
- collected[c], ll.mi = ll, m
- end
- end
- end
- else -- check both
- for l=1,#list do
- local ll = list[l]
- local ltg = ll.tg
- if ltg then
- local lns = ll.rn or ll.ns
- local ok = ltg == ntg and lns == nns
- if directive then
- if ok then
- local llp = ll.__p__ ; if llp ~= p then p, m = llp, 1 else m = m + 1 end
- c = c + 1
- collected[c], ll.mi = ll, m
- end
- elseif not ok then
- local llp = ll.__p__ ; if llp ~= p then p, m = llp, 1 else m = m + 1 end
- c = c + 1
- collected[c], ll.mi = ll, m
- end
- end
- end
- end
- return collected
- end
+ local maxn=#nodes
+ if maxn==3 then
+ local nns,ntg=nodes[2],nodes[3]
+ if not nns and not ntg then
+ if directive then
+ return list
+ else
+ return {}
+ end
else
- local collected, c, m, p = { }, 0, 0, nil
+ local collected,c,m,p={},0,0,nil
+ if not nns then
for l=1,#list do
- local ll = list[l]
- local ltg = ll.tg
- if ltg then
- local lns = ll.rn or ll.ns
- local ok = false
- for n=1,maxn,3 do
- local nns, ntg = nodes[n+1], nodes[n+2]
- ok = (not ntg or ltg == ntg) and (not nns or lns == nns)
- if ok then
- break
- end
- end
- if directive then
- if ok then
- local llp = ll.__p__ ; if llp ~= p then p, m = llp, 1 else m = m + 1 end
- c = c + 1
- collected[c], ll.mi = ll, m
- end
- elseif not ok then
- local llp = ll.__p__ ; if llp ~= p then p, m = llp, 1 else m = m + 1 end
- c = c + 1
- collected[c], ll.mi = ll, m
- end
- end
- end
- return collected
- end
-end
-
-local quit_expression = false
-
-local function apply_expression(list,expression,order)
- local collected, c = { }, 0
- quit_expression = false
+ local ll=list[l]
+ local ltg=ll.tg
+ if ltg then
+ if directive then
+ if ntg==ltg then
+ local llp=ll.__p__;if llp~=p then p,m=llp,1 else m=m+1 end
+ c=c+1
+ collected[c],ll.mi=ll,m
+ end
+ elseif ntg~=ltg then
+ local llp=ll.__p__;if llp~=p then p,m=llp,1 else m=m+1 end
+ c=c+1
+ collected[c],ll.mi=ll,m
+ end
+ end
+ end
+ elseif not ntg then
+ for l=1,#list do
+ local ll=list[l]
+ local lns=ll.rn or ll.ns
+ if lns then
+ if directive then
+ if lns==nns then
+ local llp=ll.__p__;if llp~=p then p,m=llp,1 else m=m+1 end
+ c=c+1
+ collected[c],ll.mi=ll,m
+ end
+ elseif lns~=nns then
+ local llp=ll.__p__;if llp~=p then p,m=llp,1 else m=m+1 end
+ c=c+1
+ collected[c],ll.mi=ll,m
+ end
+ end
+ end
+ else
+ for l=1,#list do
+ local ll=list[l]
+ local ltg=ll.tg
+ if ltg then
+ local lns=ll.rn or ll.ns
+ local ok=ltg==ntg and lns==nns
+ if directive then
+ if ok then
+ local llp=ll.__p__;if llp~=p then p,m=llp,1 else m=m+1 end
+ c=c+1
+ collected[c],ll.mi=ll,m
+ end
+ elseif not ok then
+ local llp=ll.__p__;if llp~=p then p,m=llp,1 else m=m+1 end
+ c=c+1
+ collected[c],ll.mi=ll,m
+ end
+ end
+ end
+ end
+ return collected
+ end
+ else
+ local collected,c,m,p={},0,0,nil
for l=1,#list do
- local ll = list[l]
- if expression(list,ll,l,order) then -- nasty, order alleen valid als n=1
- c = c + 1
- collected[c] = ll
- end
- if quit_expression then
+ local ll=list[l]
+ local ltg=ll.tg
+ if ltg then
+ local lns=ll.rn or ll.ns
+ local ok=false
+ for n=1,maxn,3 do
+ local nns,ntg=nodes[n+1],nodes[n+2]
+ ok=(not ntg or ltg==ntg) and (not nns or lns==nns)
+ if ok then
break
+ end
+ end
+ if directive then
+ if ok then
+ local llp=ll.__p__;if llp~=p then p,m=llp,1 else m=m+1 end
+ c=c+1
+ collected[c],ll.mi=ll,m
+ end
+ elseif not ok then
+ local llp=ll.__p__;if llp~=p then p,m=llp,1 else m=m+1 end
+ c=c+1
+ collected[c],ll.mi=ll,m
end
+ end
end
return collected
+ end
end
-
-local P, V, C, Cs, Cc, Ct, R, S, Cg, Cb = lpeg.P, lpeg.V, lpeg.C, lpeg.Cs, lpeg.Cc, lpeg.Ct, lpeg.R, lpeg.S, lpeg.Cg, lpeg.Cb
-
-local spaces = S(" \n\r\t\f")^0
-local lp_space = S(" \n\r\t\f")
-local lp_any = P(1)
-local lp_noequal = P("!=") / "~=" + P("<=") + P(">=") + P("==")
-local lp_doequal = P("=") / "=="
-local lp_or = P("|") / " or "
-local lp_and = P("&") / " and "
-
-local lp_builtin = P (
- P("text") / "(ll.dt[1] or '')" + -- fragile
- P("content") / "ll.dt" +
- -- P("name") / "(ll.ns~='' and ll.ns..':'..ll.tg)" +
- P("name") / "((ll.ns~='' and ll.ns..':'..ll.tg) or ll.tg)" +
- P("tag") / "ll.tg" +
- P("position") / "l" + -- is element in finalizer
- P("firstindex") / "1" +
- P("lastindex") / "(#ll.__p__.dt or 1)" +
- P("firstelement") / "1" +
- P("lastelement") / "(ll.__p__.en or 1)" +
- P("first") / "1" +
- P("last") / "#list" +
- P("rootposition") / "order" +
- P("order") / "order" +
- P("element") / "(ll.ei or 1)" +
- P("index") / "(ll.ni or 1)" +
- P("match") / "(ll.mi or 1)" +
- -- P("namespace") / "ll.ns" +
- P("ns") / "ll.ns"
- ) * ((spaces * P("(") * spaces * P(")"))/"")
-
--- for the moment we keep namespaces with attributes
-
-local lp_attribute = (P("@") + P("attribute::")) / "" * Cc("(ll.at and ll.at['") * ((R("az","AZ") + S("-_:"))^1) * Cc("'])")
-local lp_fastpos_p = ((P("+")^0 * R("09")^1 * P(-1)) / function(s) return "l==" .. s end)
-local lp_fastpos_n = ((P("-") * R("09")^1 * P(-1)) / function(s) return "(" .. s .. "<0 and (#list+".. s .. "==l))" end)
-local lp_fastpos = lp_fastpos_n + lp_fastpos_p
-local lp_reserved = C("and") + C("or") + C("not") + C("div") + C("mod") + C("true") + C("false")
-
-local lp_lua_function = C(R("az","AZ","__")^1 * (P(".") * R("az","AZ","__")^1)^1) * ("(") / function(t) -- todo: better . handling
- return t .. "("
-end
-
-local lp_function = C(R("az","AZ","__")^1) * P("(") / function(t) -- todo: better . handling
- if expressions[t] then
- return "expr." .. t .. "("
- else
- return "expr.error("
- end
-end
-
-local lparent = P("(")
-local rparent = P(")")
-local noparent = 1 - (lparent+rparent)
-local nested = P{lparent * (noparent + V(1))^0 * rparent}
-local value = P(lparent * C((noparent + nested)^0) * rparent) -- P{"("*C(((1-S("()"))+V(1))^0)*")"}
-
-local lp_child = Cc("expr.child(ll,'") * R("az","AZ","--","__")^1 * Cc("')")
-local lp_number = S("+-") * R("09")^1
-local lp_string = Cc("'") * R("az","AZ","--","__")^1 * Cc("'")
-local lp_content = (P("'") * (1-P("'"))^0 * P("'") + P('"') * (1-P('"'))^0 * P('"'))
-
+local quit_expression=false
+local function apply_expression(list,expression,order)
+ local collected,c={},0
+ quit_expression=false
+ for l=1,#list do
+ local ll=list[l]
+ if expression(list,ll,l,order) then
+ c=c+1
+ collected[c]=ll
+ end
+ if quit_expression then
+ break
+ end
+ end
+ return collected
+end
+local P,V,C,Cs,Cc,Ct,R,S,Cg,Cb=lpeg.P,lpeg.V,lpeg.C,lpeg.Cs,lpeg.Cc,lpeg.Ct,lpeg.R,lpeg.S,lpeg.Cg,lpeg.Cb
+local spaces=S(" \n\r\t\f")^0
+local lp_space=S(" \n\r\t\f")
+local lp_any=P(1)
+local lp_noequal=P("!=")/"~="+P("<=")+P(">=")+P("==")
+local lp_doequal=P("=")/"=="
+local lp_or=P("|")/" or "
+local lp_and=P("&")/" and "
+local lp_builtin=P (
+ P("text")/"(ll.dt[1] or '')"+
+ P("content")/"ll.dt"+
+ P("name")/"((ll.ns~='' and ll.ns..':'..ll.tg) or ll.tg)"+P("tag")/"ll.tg"+P("position")/"l"+
+ P("firstindex")/"1"+P("lastindex")/"(#ll.__p__.dt or 1)"+P("firstelement")/"1"+P("lastelement")/"(ll.__p__.en or 1)"+P("first")/"1"+P("last")/"#list"+P("rootposition")/"order"+P("order")/"order"+P("element")/"(ll.ei or 1)"+P("index")/"(ll.ni or 1)"+P("match")/"(ll.mi or 1)"+
+ P("ns")/"ll.ns"
+ )*((spaces*P("(")*spaces*P(")"))/"")
+local lp_attribute=(P("@")+P("attribute::"))/""*Cc("(ll.at and ll.at['")*((R("az","AZ")+S("-_:"))^1)*Cc("'])")
+lp_fastpos_p=P("+")^0*R("09")^1*P(-1)/"l==%0"
+lp_fastpos_n=P("-")*R("09")^1*P(-1)/"(%0<0 and (#list+%0==l))"
+local lp_fastpos=lp_fastpos_n+lp_fastpos_p
+local lp_reserved=C("and")+C("or")+C("not")+C("div")+C("mod")+C("true")+C("false")
+local lp_lua_function=Cs((R("az","AZ","__")^1*(P(".")*R("az","AZ","__")^1)^1)*("("))/"%0"
+local lp_function=C(R("az","AZ","__")^1)*P("(")/function(t)
+ if expressions[t] then
+ return "expr."..t.."("
+ else
+ return "expr.error("
+ end
+end
+local lparent=P("(")
+local rparent=P(")")
+local noparent=1-(lparent+rparent)
+local nested=P{lparent*(noparent+V(1))^0*rparent}
+local value=P(lparent*C((noparent+nested)^0)*rparent)
+local lp_child=Cc("expr.child(ll,'")*R("az","AZ","--","__")^1*Cc("')")
+local lp_number=S("+-")*R("09")^1
+local lp_string=Cc("'")*R("az","AZ","--","__")^1*Cc("'")
+local lp_content=(P("'")*(1-P("'"))^0*P("'")+P('"')*(1-P('"'))^0*P('"'))
local cleaner
-
-local lp_special = (C(P("name")+P("text")+P("tag")+P("count")+P("child"))) * value / function(t,s)
- if expressions[t] then
- s = s and s ~= "" and lpegmatch(cleaner,s)
- if s and s ~= "" then
- return "expr." .. t .. "(ll," .. s ..")"
- else
- return "expr." .. t .. "(ll)"
- end
+local lp_special=(C(P("name")+P("text")+P("tag")+P("count")+P("child")))*value/function(t,s)
+ if expressions[t] then
+ s=s and s~="" and lpegmatch(cleaner,s)
+ if s and s~="" then
+ return "expr."..t.."(ll,"..s..")"
else
- return "expr.error(" .. t .. ")"
+ return "expr."..t.."(ll)"
end
+ else
+ return "expr.error("..t..")"
+ end
end
-
-local content =
- lp_builtin +
- lp_attribute +
- lp_special +
- lp_noequal + lp_doequal +
- lp_or + lp_and +
- lp_reserved +
- lp_lua_function + lp_function +
- lp_content + -- too fragile
- lp_child +
- lp_any
-
-local converter = Cs (
- lp_fastpos + (P { lparent * (V(1))^0 * rparent + content } )^0
+local content=lp_builtin+lp_attribute+lp_special+lp_noequal+lp_doequal+lp_or+lp_and+lp_reserved+lp_lua_function+lp_function+lp_content+
+ lp_child+lp_any
+local converter=Cs (
+ lp_fastpos+(P { lparent*(V(1))^0*rparent+content } )^0
)
-
-cleaner = Cs ( (
- lp_reserved +
- lp_number +
- lp_string +
-1 )^1 )
-
-
-
-local template_e = [[
+cleaner=Cs ((
+ lp_reserved+lp_number+lp_string+1 )^1 )
+local template_e=[[
local expr = xml.expressions
return function(list,ll,l,order)
return %s
end
]]
-
-local template_f_y = [[
+local template_f_y=[[
local finalizer = xml.finalizers['%s']['%s']
return function(collection)
return finalizer(collection,%s)
end
]]
-
-local template_f_n = [[
+local template_f_n=[[
return xml.finalizers['%s']['%s']
]]
-
---
-
-local register_self = { kind = "axis", axis = "self" } -- , apply = apply_axis["self"] }
-local register_parent = { kind = "axis", axis = "parent" } -- , apply = apply_axis["parent"] }
-local register_descendant = { kind = "axis", axis = "descendant" } -- , apply = apply_axis["descendant"] }
-local register_child = { kind = "axis", axis = "child" } -- , apply = apply_axis["child"] }
-local register_descendant_or_self = { kind = "axis", axis = "descendant-or-self" } -- , apply = apply_axis["descendant-or-self"] }
-local register_root = { kind = "axis", axis = "root" } -- , apply = apply_axis["root"] }
-local register_ancestor = { kind = "axis", axis = "ancestor" } -- , apply = apply_axis["ancestor"] }
-local register_ancestor_or_self = { kind = "axis", axis = "ancestor-or-self" } -- , apply = apply_axis["ancestor-or-self"] }
-local register_attribute = { kind = "axis", axis = "attribute" } -- , apply = apply_axis["attribute"] }
-local register_namespace = { kind = "axis", axis = "namespace" } -- , apply = apply_axis["namespace"] }
-local register_following = { kind = "axis", axis = "following" } -- , apply = apply_axis["following"] }
-local register_following_sibling = { kind = "axis", axis = "following-sibling" } -- , apply = apply_axis["following-sibling"] }
-local register_preceding = { kind = "axis", axis = "preceding" } -- , apply = apply_axis["preceding"] }
-local register_preceding_sibling = { kind = "axis", axis = "preceding-sibling" } -- , apply = apply_axis["preceding-sibling"] }
-local register_reverse_sibling = { kind = "axis", axis = "reverse-sibling" } -- , apply = apply_axis["reverse-sibling"] }
-
-local register_auto_descendant_or_self = { kind = "axis", axis = "auto-descendant-or-self" } -- , apply = apply_axis["auto-descendant-or-self"] }
-local register_auto_descendant = { kind = "axis", axis = "auto-descendant" } -- , apply = apply_axis["auto-descendant"] }
-local register_auto_self = { kind = "axis", axis = "auto-self" } -- , apply = apply_axis["auto-self"] }
-local register_auto_child = { kind = "axis", axis = "auto-child" } -- , apply = apply_axis["auto-child"] }
-
-local register_initial_child = { kind = "axis", axis = "initial-child" } -- , apply = apply_axis["initial-child"] }
-
-local register_all_nodes = { kind = "nodes", nodetest = true, nodes = { true, false, false } }
-
-local skip = { }
-
+local register_self={ kind="axis",axis="self" }
+local register_parent={ kind="axis",axis="parent" }
+local register_descendant={ kind="axis",axis="descendant" }
+local register_child={ kind="axis",axis="child" }
+local register_descendant_or_self={ kind="axis",axis="descendant-or-self" }
+local register_root={ kind="axis",axis="root" }
+local register_ancestor={ kind="axis",axis="ancestor" }
+local register_ancestor_or_self={ kind="axis",axis="ancestor-or-self" }
+local register_attribute={ kind="axis",axis="attribute" }
+local register_namespace={ kind="axis",axis="namespace" }
+local register_following={ kind="axis",axis="following" }
+local register_following_sibling={ kind="axis",axis="following-sibling" }
+local register_preceding={ kind="axis",axis="preceding" }
+local register_preceding_sibling={ kind="axis",axis="preceding-sibling" }
+local register_reverse_sibling={ kind="axis",axis="reverse-sibling" }
+local register_auto_descendant_or_self={ kind="axis",axis="auto-descendant-or-self" }
+local register_auto_descendant={ kind="axis",axis="auto-descendant" }
+local register_auto_self={ kind="axis",axis="auto-self" }
+local register_auto_child={ kind="axis",axis="auto-child" }
+local register_initial_child={ kind="axis",axis="initial-child" }
+local register_all_nodes={ kind="nodes",nodetest=true,nodes={ true,false,false } }
+local skip={}
local function errorrunner_e(str,cnv)
- if not skip[str] then
- report_lpath("error in expression: %s => %s",str,cnv)
- skip[str] = cnv or str
- end
- return false
+ if not skip[str] then
+ report_lpath("error in expression: %s => %s",str,cnv)
+ skip[str]=cnv or str
+ end
+ return false
end
local function errorrunner_f(str,arg)
- report_lpath("error in finalizer: %s(%s)",str,arg or "")
- return false
+ report_lpath("error in finalizer: %s(%s)",str,arg or "")
+ return false
end
-
local function register_nodes(nodetest,nodes)
- return { kind = "nodes", nodetest = nodetest, nodes = nodes }
+ return { kind="nodes",nodetest=nodetest,nodes=nodes }
end
-
local function register_expression(expression)
- local converted = lpegmatch(converter,expression)
- local runner = loadstring(format(template_e,converted))
- runner = (runner and runner()) or function() errorrunner_e(expression,converted) end
- return { kind = "expression", expression = expression, converted = converted, evaluator = runner }
+ local converted=lpegmatch(converter,expression)
+ local runner=load(format(template_e,converted))
+ runner=(runner and runner()) or function() errorrunner_e(expression,converted) end
+ return { kind="expression",expression=expression,converted=converted,evaluator=runner }
end
-
local function register_finalizer(protocol,name,arguments)
- local runner
- if arguments and arguments ~= "" then
- runner = loadstring(format(template_f_y,protocol or xml.defaultprotocol,name,arguments))
- else
- runner = loadstring(format(template_f_n,protocol or xml.defaultprotocol,name))
- end
- runner = (runner and runner()) or function() errorrunner_f(name,arguments) end
- return { kind = "finalizer", name = name, arguments = arguments, finalizer = runner }
-end
-
-local expression = P { "ex",
- ex = "[" * C((V("sq") + V("dq") + (1 - S("[]")) + V("ex"))^0) * "]",
- sq = "'" * (1 - S("'"))^0 * "'",
- dq = '"' * (1 - S('"'))^0 * '"',
+ local runner
+ if arguments and arguments~="" then
+ runner=load(format(template_f_y,protocol or xml.defaultprotocol,name,arguments))
+ else
+ runner=load(format(template_f_n,protocol or xml.defaultprotocol,name))
+ end
+ runner=(runner and runner()) or function() errorrunner_f(name,arguments) end
+ return { kind="finalizer",name=name,arguments=arguments,finalizer=runner }
+end
+local expression=P { "ex",
+ ex="["*C((V("sq")+V("dq")+(1-S("[]"))+V("ex"))^0)*"]",
+ sq="'"*(1-S("'"))^0*"'",
+ dq='"'*(1-S('"'))^0*'"',
}
-
-local arguments = P { "ar",
- ar = "(" * Cs((V("sq") + V("dq") + V("nq") + P(1-P(")")))^0) * ")",
- nq = ((1 - S("),'\""))^1) / function(s) return format("%q",s) end,
- sq = P("'") * (1 - P("'"))^0 * P("'"),
- dq = P('"') * (1 - P('"'))^0 * P('"'),
+local arguments=P { "ar",
+ ar="("*Cs((V("sq")+V("dq")+V("nq")+P(1-P(")")))^0)*")",
+ nq=((1-S("),'\""))^1)/function(s) return format("%q",s) end,
+ sq=P("'")*(1-P("'"))^0*P("'"),
+ dq=P('"')*(1-P('"'))^0*P('"'),
}
-
--- todo: better arg parser
-
local function register_error(str)
- return { kind = "error", error = format("unparsed: %s",str) }
-end
-
--- there is a difference in * and /*/ and so we need to catch a few special cases
-
-local special_1 = P("*") * Cc(register_auto_descendant) * Cc(register_all_nodes) -- last one not needed
-local special_2 = P("/") * Cc(register_auto_self)
-local special_3 = P("") * Cc(register_auto_self)
-
-local no_nextcolon = P(-1) + #(1-P(":")) -- newer lpeg needs the P(-1)
-local no_nextlparent = P(-1) + #(1-P("(")) -- newer lpeg needs the P(-1)
-
-local pathparser = Ct { "patterns", -- can be made a bit faster by moving some patterns outside
-
- patterns = spaces * V("protocol") * spaces * (
- ( V("special") * spaces * P(-1) ) +
- ( V("initial") * spaces * V("step") * spaces * (P("/") * spaces * V("step") * spaces)^0 )
- ),
-
- protocol = Cg(V("letters"),"protocol") * P("://") + Cg(Cc(nil),"protocol"),
-
- -- the / is needed for // as descendant or self is somewhat special
- -- step = (V("shortcuts") + V("axis") * spaces * V("nodes")^0 + V("error")) * spaces * V("expressions")^0 * spaces * V("finalizer")^0,
- step = ((V("shortcuts") + P("/") + V("axis")) * spaces * V("nodes")^0 + V("error")) * spaces * V("expressions")^0 * spaces * V("finalizer")^0,
-
- axis = V("descendant") + V("child") + V("parent") + V("self") + V("root") + V("ancestor") +
- V("descendant_or_self") + V("following_sibling") + V("following") +
- V("reverse_sibling") + V("preceding_sibling") + V("preceding") + V("ancestor_or_self") +
- #(1-P(-1)) * Cc(register_auto_child),
-
- special = special_1 + special_2 + special_3,
-
- initial = (P("/") * spaces * Cc(register_initial_child))^-1,
-
- error = (P(1)^1) / register_error,
-
- shortcuts_a = V("s_descendant_or_self") + V("s_descendant") + V("s_child") + V("s_parent") + V("s_self") + V("s_root") + V("s_ancestor"),
-
- shortcuts = V("shortcuts_a") * (spaces * "/" * spaces * V("shortcuts_a"))^0,
-
- s_descendant_or_self = (P("***/") + P("/")) * Cc(register_descendant_or_self), --- *** is a bonus
- s_descendant = P("**") * Cc(register_descendant),
- s_child = P("*") * no_nextcolon * Cc(register_child ),
- s_parent = P("..") * Cc(register_parent ),
- s_self = P("." ) * Cc(register_self ),
- s_root = P("^^") * Cc(register_root ),
- s_ancestor = P("^") * Cc(register_ancestor ),
-
- descendant = P("descendant::") * Cc(register_descendant ),
- child = P("child::") * Cc(register_child ),
- parent = P("parent::") * Cc(register_parent ),
- self = P("self::") * Cc(register_self ),
- root = P('root::') * Cc(register_root ),
- ancestor = P('ancestor::') * Cc(register_ancestor ),
- descendant_or_self = P('descendant-or-self::') * Cc(register_descendant_or_self ),
- ancestor_or_self = P('ancestor-or-self::') * Cc(register_ancestor_or_self ),
- -- attribute = P('attribute::') * Cc(register_attribute ),
- -- namespace = P('namespace::') * Cc(register_namespace ),
- following = P('following::') * Cc(register_following ),
- following_sibling = P('following-sibling::') * Cc(register_following_sibling ),
- preceding = P('preceding::') * Cc(register_preceding ),
- preceding_sibling = P('preceding-sibling::') * Cc(register_preceding_sibling ),
- reverse_sibling = P('reverse-sibling::') * Cc(register_reverse_sibling ),
-
- nodes = (V("nodefunction") * spaces * P("(") * V("nodeset") * P(")") + V("nodetest") * V("nodeset")) / register_nodes,
-
- expressions = expression / register_expression,
-
- letters = R("az")^1,
- name = (1-S("/[]()|:*!"))^1, -- make inline
- negate = P("!") * Cc(false),
-
- nodefunction = V("negate") + P("not") * Cc(false) + Cc(true),
- nodetest = V("negate") + Cc(true),
- nodename = (V("negate") + Cc(true)) * spaces * ((V("wildnodename") * P(":") * V("wildnodename")) + (Cc(false) * V("wildnodename"))),
- wildnodename = (C(V("name")) + P("*") * Cc(false)) * no_nextlparent,
- nodeset = spaces * Ct(V("nodename") * (spaces * P("|") * spaces * V("nodename"))^0) * spaces,
-
- finalizer = (Cb("protocol") * P("/")^-1 * C(V("name")) * arguments * P(-1)) / register_finalizer,
-
+ return { kind="error",error=format("unparsed: %s",str) }
+end
+local special_1=P("*")*Cc(register_auto_descendant)*Cc(register_all_nodes)
+local special_2=P("/")*Cc(register_auto_self)
+local special_3=P("")*Cc(register_auto_self)
+local no_nextcolon=P(-1)+#(1-P(":"))
+local no_nextlparent=P(-1)+#(1-P("("))
+local pathparser=Ct { "patterns",
+ patterns=spaces*V("protocol")*spaces*(
+ (V("special")*spaces*P(-1) )+(V("initial")*spaces*V("step")*spaces*(P("/")*spaces*V("step")*spaces)^0 )
+ ),
+ protocol=Cg(V("letters"),"protocol")*P("://")+Cg(Cc(nil),"protocol"),
+ step=((V("shortcuts")+P("/")+V("axis"))*spaces*V("nodes")^0+V("error"))*spaces*V("expressions")^0*spaces*V("finalizer")^0,
+ axis=V("descendant")+V("child")+V("parent")+V("self")+V("root")+V("ancestor")+V("descendant_or_self")+V("following_sibling")+V("following")+V("reverse_sibling")+V("preceding_sibling")+V("preceding")+V("ancestor_or_self")+#(1-P(-1))*Cc(register_auto_child),
+ special=special_1+special_2+special_3,
+ initial=(P("/")*spaces*Cc(register_initial_child))^-1,
+ error=(P(1)^1)/register_error,
+ shortcuts_a=V("s_descendant_or_self")+V("s_descendant")+V("s_child")+V("s_parent")+V("s_self")+V("s_root")+V("s_ancestor"),
+ shortcuts=V("shortcuts_a")*(spaces*"/"*spaces*V("shortcuts_a"))^0,
+ s_descendant_or_self=(P("***/")+P("/"))*Cc(register_descendant_or_self),
+ s_descendant=P("**")*Cc(register_descendant),
+ s_child=P("*")*no_nextcolon*Cc(register_child ),
+ s_parent=P("..")*Cc(register_parent ),
+ s_self=P("." )*Cc(register_self ),
+ s_root=P("^^")*Cc(register_root ),
+ s_ancestor=P("^")*Cc(register_ancestor ),
+ descendant=P("descendant::")*Cc(register_descendant ),
+ child=P("child::")*Cc(register_child ),
+ parent=P("parent::")*Cc(register_parent ),
+ self=P("self::")*Cc(register_self ),
+ root=P('root::')*Cc(register_root ),
+ ancestor=P('ancestor::')*Cc(register_ancestor ),
+ descendant_or_self=P('descendant-or-self::')*Cc(register_descendant_or_self ),
+ ancestor_or_self=P('ancestor-or-self::')*Cc(register_ancestor_or_self ),
+ following=P('following::')*Cc(register_following ),
+ following_sibling=P('following-sibling::')*Cc(register_following_sibling ),
+ preceding=P('preceding::')*Cc(register_preceding ),
+ preceding_sibling=P('preceding-sibling::')*Cc(register_preceding_sibling ),
+ reverse_sibling=P('reverse-sibling::')*Cc(register_reverse_sibling ),
+ nodes=(V("nodefunction")*spaces*P("(")*V("nodeset")*P(")")+V("nodetest")*V("nodeset"))/register_nodes,
+ expressions=expression/register_expression,
+ letters=R("az")^1,
+ name=(1-S("/[]()|:*!"))^1,
+ negate=P("!")*Cc(false),
+ nodefunction=V("negate")+P("not")*Cc(false)+Cc(true),
+ nodetest=V("negate")+Cc(true),
+ nodename=(V("negate")+Cc(true))*spaces*((V("wildnodename")*P(":")*V("wildnodename"))+(Cc(false)*V("wildnodename"))),
+ wildnodename=(C(V("name"))+P("*")*Cc(false))*no_nextlparent,
+ nodeset=spaces*Ct(V("nodename")*(spaces*P("|")*spaces*V("nodename"))^0)*spaces,
+ finalizer=(Cb("protocol")*P("/")^-1*C(V("name"))*arguments*P(-1))/register_finalizer,
}
-
-xmlpatterns.pathparser = pathparser
-
-local cache = { }
-
+xmlpatterns.pathparser=pathparser
+local cache={}
local function nodesettostring(set,nodetest)
- local t = { }
- for i=1,#set,3 do
- local directive, ns, tg = set[i], set[i+1], set[i+2]
- if not ns or ns == "" then ns = "*" end
- if not tg or tg == "" then tg = "*" end
- tg = (tg == "@rt@" and "[root]") or format("%s:%s",ns,tg)
- t[i] = (directive and tg) or format("not(%s)",tg)
- end
- if nodetest == false then
- return format("not(%s)",concat(t,"|"))
- else
- return concat(t,"|")
- end
+ local t={}
+ for i=1,#set,3 do
+ local directive,ns,tg=set[i],set[i+1],set[i+2]
+ if not ns or ns=="" then ns="*" end
+ if not tg or tg=="" then tg="*" end
+ tg=(tg=="@rt@" and "[root]") or format("%s:%s",ns,tg)
+ t[i]=(directive and tg) or format("not(%s)",tg)
+ end
+ if nodetest==false then
+ return format("not(%s)",concat(t,"|"))
+ else
+ return concat(t,"|")
+ end
end
-
local function tagstostring(list)
- if #list == 0 then
- return "no elements"
- else
- local t = { }
- for i=1, #list do
- local li = list[i]
- local ns, tg = li.ns, li.tg
- if not ns or ns == "" then ns = "*" end
- if not tg or tg == "" then tg = "*" end
- t[i] = (tg == "@rt@" and "[root]") or format("%s:%s",ns,tg)
- end
- return concat(t," ")
- end
-end
-
-xml.nodesettostring = nodesettostring
-
-local lpath -- we have a harmless kind of circular reference
-
-local lshowoptions = { functions = false }
-
+ if #list==0 then
+ return "no elements"
+ else
+ local t={}
+ for i=1,#list do
+ local li=list[i]
+ local ns,tg=li.ns,li.tg
+ if not ns or ns=="" then ns="*" end
+ if not tg or tg=="" then tg="*" end
+ t[i]=(tg=="@rt@" and "[root]") or format("%s:%s",ns,tg)
+ end
+ return concat(t," ")
+ end
+end
+xml.nodesettostring=nodesettostring
+local lpath
+local lshowoptions={ functions=false }
local function lshow(parsed)
- if type(parsed) == "string" then
- parsed = lpath(parsed)
- end
- report_lpath("%s://%s => %s",parsed.protocol or xml.defaultprotocol,parsed.pattern,
- table.serialize(parsed,false,lshowoptions))
+ if type(parsed)=="string" then
+ parsed=lpath(parsed)
+ end
+ report_lpath("%s://%s => %s",parsed.protocol or xml.defaultprotocol,parsed.pattern,
+ table.serialize(parsed,false,lshowoptions))
end
-
-xml.lshow = lshow
-
+xml.lshow=lshow
local function add_comment(p,str)
- local pc = p.comment
- if not pc then
- p.comment = { str }
- else
- pc[#pc+1] = str
- end
-end
-
-lpath = function (pattern) -- the gain of caching is rather minimal
- lpathcalls = lpathcalls + 1
- if type(pattern) == "table" then
- return pattern
+ local pc=p.comment
+ if not pc then
+ p.comment={ str }
+ else
+ pc[#pc+1]=str
+ end
+end
+lpath=function (pattern)
+ lpathcalls=lpathcalls+1
+ if type(pattern)=="table" then
+ return pattern
+ else
+ local parsed=cache[pattern]
+ if parsed then
+ lpathcached=lpathcached+1
else
- local parsed = cache[pattern]
- if parsed then
- lpathcached = lpathcached + 1
+ parsed=lpegmatch(pathparser,pattern)
+ if parsed then
+ parsed.pattern=pattern
+ local np=#parsed
+ if np==0 then
+ parsed={ pattern=pattern,register_self,state="parsing error" }
+ report_lpath("parsing error in pattern: %s",pattern)
+ lshow(parsed)
else
- parsed = lpegmatch(pathparser,pattern)
- if parsed then
- parsed.pattern = pattern
- local np = #parsed
- if np == 0 then
- parsed = { pattern = pattern, register_self, state = "parsing error" }
- report_lpath("parsing error in '%s'",pattern)
- lshow(parsed)
- else
- -- we could have done this with a more complex parser but this
- -- is cleaner
- local pi = parsed[1]
- if pi.axis == "auto-child" then
- if false then
- add_comment(parsed, "auto-child replaced by auto-descendant-or-self")
- parsed[1] = register_auto_descendant_or_self
- else
- add_comment(parsed, "auto-child replaced by auto-descendant")
- parsed[1] = register_auto_descendant
- end
- elseif pi.axis == "initial-child" and np > 1 and parsed[2].axis then
- add_comment(parsed, "initial-child removed") -- we could also make it a auto-self
- remove(parsed,1)
- end
- local np = #parsed -- can have changed
- if np > 1 then
- local pnp = parsed[np]
- if pnp.kind == "nodes" and pnp.nodetest == true then
- local nodes = pnp.nodes
- if nodes[1] == true and nodes[2] == false and nodes[3] == false then
- add_comment(parsed, "redundant final wildcard filter removed")
- remove(parsed,np)
- end
- end
- end
- end
+ local pi=parsed[1]
+ if pi.axis=="auto-child" then
+ if false then
+ add_comment(parsed,"auto-child replaced by auto-descendant-or-self")
+ parsed[1]=register_auto_descendant_or_self
else
- parsed = { pattern = pattern }
- end
- cache[pattern] = parsed
- if trace_lparse and not trace_lprofile then
- lshow(parsed)
- end
- end
- return parsed
+ add_comment(parsed,"auto-child replaced by auto-descendant")
+ parsed[1]=register_auto_descendant
+ end
+ elseif pi.axis=="initial-child" and np>1 and parsed[2].axis then
+ add_comment(parsed,"initial-child removed")
+ remove(parsed,1)
+ end
+ local np=#parsed
+ if np>1 then
+ local pnp=parsed[np]
+ if pnp.kind=="nodes" and pnp.nodetest==true then
+ local nodes=pnp.nodes
+ if nodes[1]==true and nodes[2]==false and nodes[3]==false then
+ add_comment(parsed,"redundant final wildcard filter removed")
+ remove(parsed,np)
+ end
+ end
+ end
+ end
+ else
+ parsed={ pattern=pattern }
+ end
+ cache[pattern]=parsed
+ if trace_lparse and not trace_lprofile then
+ lshow(parsed)
+ end
end
+ return parsed
+ end
end
-
-xml.lpath = lpath
-
--- we can move all calls inline and then merge the trace back
--- technically we can combine axis and the next nodes which is
--- what we did before but this a bit cleaner (but slower too)
--- but interesting is that it's not that much faster when we
--- go inline
---
--- beware: we need to return a collection even when we filter
--- else the (simple) cache gets messed up
-
--- caching found lookups saves not that much (max .1 sec on a 8 sec run)
--- and it also messes up finalizers
-
--- watch out: when there is a finalizer, it's always called as there
--- can be cases that a finalizer returns (or does) something in case
--- there is no match; an example of this is count()
-
-local profiled = { } xml.profiled = profiled
-
+xml.lpath=lpath
+local profiled={} xml.profiled=profiled
local function profiled_apply(list,parsed,nofparsed,order)
- local p = profiled[parsed.pattern]
- if p then
- p.tested = p.tested + 1
- else
- p = { tested = 1, matched = 0, finalized = 0 }
- profiled[parsed.pattern] = p
- end
- local collected = list
- for i=1,nofparsed do
- local pi = parsed[i]
- local kind = pi.kind
- if kind == "axis" then
- collected = apply_axis[pi.axis](collected)
- elseif kind == "nodes" then
- collected = apply_nodes(collected,pi.nodetest,pi.nodes)
- elseif kind == "expression" then
- collected = apply_expression(collected,pi.evaluator,order)
- elseif kind == "finalizer" then
- collected = pi.finalizer(collected) -- no check on # here
- p.matched = p.matched + 1
- p.finalized = p.finalized + 1
- return collected
- end
- if not collected or #collected == 0 then
- local pn = i < nofparsed and parsed[nofparsed]
- if pn and pn.kind == "finalizer" then
- collected = pn.finalizer(collected)
- p.finalized = p.finalized + 1
- return collected
- end
- return nil
- end
- end
- if collected then
- p.matched = p.matched + 1
+ local p=profiled[parsed.pattern]
+ if p then
+ p.tested=p.tested+1
+ else
+ p={ tested=1,matched=0,finalized=0 }
+ profiled[parsed.pattern]=p
+ end
+ local collected=list
+ for i=1,nofparsed do
+ local pi=parsed[i]
+ local kind=pi.kind
+ if kind=="axis" then
+ collected=apply_axis[pi.axis](collected)
+ elseif kind=="nodes" then
+ collected=apply_nodes(collected,pi.nodetest,pi.nodes)
+ elseif kind=="expression" then
+ collected=apply_expression(collected,pi.evaluator,order)
+ elseif kind=="finalizer" then
+ collected=pi.finalizer(collected)
+ p.matched=p.matched+1
+ p.finalized=p.finalized+1
+ return collected
+ end
+ if not collected or #collected==0 then
+ local pn=i<nofparsed and parsed[nofparsed]
+ if pn and pn.kind=="finalizer" then
+ collected=pn.finalizer(collected)
+ p.finalized=p.finalized+1
+ return collected
+ end
+ return nil
end
- return collected
+ end
+ if collected then
+ p.matched=p.matched+1
+ end
+ return collected
end
-
local function traced_apply(list,parsed,nofparsed,order)
- if trace_lparse then
- lshow(parsed)
- end
- report_lpath("collecting: %s",parsed.pattern)
- report_lpath("root tags : %s",tagstostring(list))
- report_lpath("order : %s",order or "unset")
- local collected = list
- for i=1,nofparsed do
- local pi = parsed[i]
- local kind = pi.kind
- if kind == "axis" then
- collected = apply_axis[pi.axis](collected)
- report_lpath("% 10i : ax : %s",(collected and #collected) or 0,pi.axis)
- elseif kind == "nodes" then
- collected = apply_nodes(collected,pi.nodetest,pi.nodes)
- report_lpath("% 10i : ns : %s",(collected and #collected) or 0,nodesettostring(pi.nodes,pi.nodetest))
- elseif kind == "expression" then
- collected = apply_expression(collected,pi.evaluator,order)
- report_lpath("% 10i : ex : %s -> %s",(collected and #collected) or 0,pi.expression,pi.converted)
- elseif kind == "finalizer" then
- collected = pi.finalizer(collected)
- report_lpath("% 10i : fi : %s : %s(%s)",(type(collected) == "table" and #collected) or 0,parsed.protocol or xml.defaultprotocol,pi.name,pi.arguments or "")
- return collected
- end
- if not collected or #collected == 0 then
- local pn = i < nofparsed and parsed[nofparsed]
- if pn and pn.kind == "finalizer" then
- collected = pn.finalizer(collected)
- report_lpath("% 10i : fi : %s : %s(%s)",(type(collected) == "table" and #collected) or 0,parsed.protocol or xml.defaultprotocol,pn.name,pn.arguments or "")
- return collected
- end
- return nil
- end
+ if trace_lparse then
+ lshow(parsed)
+ end
+ report_lpath("collecting: %s",parsed.pattern)
+ report_lpath("root tags : %s",tagstostring(list))
+ report_lpath("order : %s",order or "unset")
+ local collected=list
+ for i=1,nofparsed do
+ local pi=parsed[i]
+ local kind=pi.kind
+ if kind=="axis" then
+ collected=apply_axis[pi.axis](collected)
+ report_lpath("% 10i : ax : %s",(collected and #collected) or 0,pi.axis)
+ elseif kind=="nodes" then
+ collected=apply_nodes(collected,pi.nodetest,pi.nodes)
+ report_lpath("% 10i : ns : %s",(collected and #collected) or 0,nodesettostring(pi.nodes,pi.nodetest))
+ elseif kind=="expression" then
+ collected=apply_expression(collected,pi.evaluator,order)
+ report_lpath("% 10i : ex : %s -> %s",(collected and #collected) or 0,pi.expression,pi.converted)
+ elseif kind=="finalizer" then
+ collected=pi.finalizer(collected)
+ report_lpath("% 10i : fi : %s : %s(%s)",(type(collected)=="table" and #collected) or 0,parsed.protocol or xml.defaultprotocol,pi.name,pi.arguments or "")
+ return collected
+ end
+ if not collected or #collected==0 then
+ local pn=i<nofparsed and parsed[nofparsed]
+ if pn and pn.kind=="finalizer" then
+ collected=pn.finalizer(collected)
+ report_lpath("% 10i : fi : %s : %s(%s)",(type(collected)=="table" and #collected) or 0,parsed.protocol or xml.defaultprotocol,pn.name,pn.arguments or "")
+ return collected
+ end
+ return nil
end
- return collected
+ end
+ return collected
end
-
local function normal_apply(list,parsed,nofparsed,order)
- local collected = list
- for i=1,nofparsed do
- local pi = parsed[i]
- local kind = pi.kind
- if kind == "axis" then
- local axis = pi.axis
- if axis ~= "self" then
- collected = apply_axis[axis](collected)
- end
- elseif kind == "nodes" then
- collected = apply_nodes(collected,pi.nodetest,pi.nodes)
- elseif kind == "expression" then
- collected = apply_expression(collected,pi.evaluator,order)
- elseif kind == "finalizer" then
- return pi.finalizer(collected)
- end
- if not collected or #collected == 0 then
- local pf = i < nofparsed and parsed[nofparsed].finalizer
- if pf then
- return pf(collected) -- can be anything
- end
- return nil
- end
- end
- return collected
+ local collected=list
+ for i=1,nofparsed do
+ local pi=parsed[i]
+ local kind=pi.kind
+ if kind=="axis" then
+ local axis=pi.axis
+ if axis~="self" then
+ collected=apply_axis[axis](collected)
+ end
+ elseif kind=="nodes" then
+ collected=apply_nodes(collected,pi.nodetest,pi.nodes)
+ elseif kind=="expression" then
+ collected=apply_expression(collected,pi.evaluator,order)
+ elseif kind=="finalizer" then
+ return pi.finalizer(collected)
+ end
+ if not collected or #collected==0 then
+ local pf=i<nofparsed and parsed[nofparsed].finalizer
+ if pf then
+ return pf(collected)
+ end
+ return nil
+ end
+ end
+ return collected
end
-
-
local function applylpath(list,pattern)
- if not list then
- return
- end
- local parsed = cache[pattern]
- if parsed then
- lpathcalls = lpathcalls + 1
- lpathcached = lpathcached + 1
- elseif type(pattern) == "table" then
- lpathcalls = lpathcalls + 1
- parsed = pattern
- else
- parsed = lpath(pattern) or pattern
- end
- if not parsed then
- return
- end
- local nofparsed = #parsed
- if nofparsed == 0 then
- return -- something is wrong
- end
- if not trace_lpath then
- return normal_apply ({ list },parsed,nofparsed,list.mi)
- elseif trace_lprofile then
- return profiled_apply({ list },parsed,nofparsed,list.mi)
- else
- return traced_apply ({ list },parsed,nofparsed,list.mi)
- end
-end
-
-xml.applylpath = applylpath -- takes a table as first argment, which is what xml.filter will do
-
---[[ldx--
-<p>This is the main filter function. It returns whatever is asked for.</p>
---ldx]]--
-
-function xml.filter(root,pattern) -- no longer funny attribute handling here
- return applylpath(root,pattern)
-end
-
--- internal (parsed)
-
-expressions.child = function(e,pattern)
- return applylpath(e,pattern) -- todo: cache
-end
-expressions.count = function(e,pattern) -- what if pattern == empty or nil
- local collected = applylpath(e,pattern) -- todo: cache
- return pattern and (collected and #collected) or 0
-end
-
--- external
-
-expressions.oneof = function(s,...) -- slow
- local t = {...} for i=1,#t do if s == t[i] then return true end end return false
-end
-expressions.error = function(str)
- xml.errorhandler(format("unknown function in lpath expression: %s",tostring(str or "?")))
- return false
-end
-expressions.undefined = function(s)
- return s == nil
-end
-
-expressions.quit = function(s)
- if s or s == nil then
- quit_expression = true
- end
- return true
-end
-
-expressions.print = function(...)
- print(...)
- return true
-end
-
-expressions.contains = find
-expressions.find = find
-expressions.upper = upper
-expressions.lower = lower
-expressions.number = tonumber
-expressions.boolean = toboolean
-
+ if not list then
+ return
+ end
+ local parsed=cache[pattern]
+ if parsed then
+ lpathcalls=lpathcalls+1
+ lpathcached=lpathcached+1
+ elseif type(pattern)=="table" then
+ lpathcalls=lpathcalls+1
+ parsed=pattern
+ else
+ parsed=lpath(pattern) or pattern
+ end
+ if not parsed then
+ return
+ end
+ local nofparsed=#parsed
+ if nofparsed==0 then
+ return
+ end
+ if not trace_lpath then
+ return normal_apply ({ list },parsed,nofparsed,list.mi)
+ elseif trace_lprofile then
+ return profiled_apply({ list },parsed,nofparsed,list.mi)
+ else
+ return traced_apply ({ list },parsed,nofparsed,list.mi)
+ end
+end
+xml.applylpath=applylpath
+function xml.filter(root,pattern)
+ return applylpath(root,pattern)
+end
+expressions.child=function(e,pattern)
+ return applylpath(e,pattern)
+end
+expressions.count=function(e,pattern)
+ local collected=applylpath(e,pattern)
+ return pattern and (collected and #collected) or 0
+end
+expressions.oneof=function(s,...)
+ for i=1,select("#",...) do
+ if s==select(i,...) then
+ return true
+ end
+ end
+ return false
+end
+expressions.error=function(str)
+ xml.errorhandler(format("unknown function in lpath expression: %s",tostring(str or "?")))
+ return false
+end
+expressions.undefined=function(s)
+ return s==nil
+end
+expressions.quit=function(s)
+ if s or s==nil then
+ quit_expression=true
+ end
+ return true
+end
+expressions.print=function(...)
+ print(...)
+ return true
+end
+expressions.contains=find
+expressions.find=find
+expressions.upper=upper
+expressions.lower=lower
+expressions.number=tonumber
+expressions.boolean=toboolean
function expressions.contains(str,pattern)
- local t = type(str)
- if t == "string" then
- if find(str,pattern) then
- return true
- end
- elseif t == "table" then
- for i=1,#str do
- local d = str[i]
- if type(d) == "string" and find(d,pattern) then
- return true
- end
- end
+ local t=type(str)
+ if t=="string" then
+ if find(str,pattern) then
+ return true
+ end
+ elseif t=="table" then
+ for i=1,#str do
+ local d=str[i]
+ if type(d)=="string" and find(d,pattern) then
+ return true
+ end
end
- return false
+ end
+ return false
end
-
--- user interface
-
local function traverse(root,pattern,handle)
- report_lpath("use 'xml.selection' instead for '%s'",pattern)
- local collected = applylpath(root,pattern)
- if collected then
- for c=1,#collected do
- local e = collected[c]
- local r = e.__p__
- handle(r,r.dt,e.ni)
- end
+ local collected=applylpath(root,pattern)
+ if collected then
+ for c=1,#collected do
+ local e=collected[c]
+ local r=e.__p__
+ handle(r,r.dt,e.ni)
end
+ end
end
-
local function selection(root,pattern,handle)
- local collected = applylpath(root,pattern)
- if collected then
- if handle then
- for c=1,#collected do
- handle(collected[c])
- end
- else
- return collected
- end
- end
-end
-
-xml.traverse = traverse -- old method, r, d, k
-xml.selection = selection -- new method, simple handle
-
-
--- generic function finalizer (independant namespace)
-
-local function dofunction(collected,fnc)
- if collected then
- local f = functions[fnc]
- if f then
- for c=1,#collected do
- f(collected[c])
- end
- else
- report_lpath("unknown function '%s'",fnc)
- end
+ local collected=applylpath(root,pattern)
+ if collected then
+ if handle then
+ for c=1,#collected do
+ handle(collected[c])
+ end
+ else
+ return collected
end
+ end
end
-
-finalizers.xml["function"] = dofunction
-finalizers.tex["function"] = dofunction
-
--- functions
-
-expressions.text = function(e,n)
- local rdt = e.__p__.dt
- return (rdt and rdt[n]) or ""
-end
-
-expressions.name = function(e,n) -- ns + tg
- local found = false
- n = tonumber(n) or 0
- if n == 0 then
- found = type(e) == "table" and e
- elseif n < 0 then
- local d, k = e.__p__.dt, e.ni
- for i=k-1,1,-1 do
- local di = d[i]
- if type(di) == "table" then
- if n == -1 then
- found = di
- break
- else
- n = n + 1
- end
- end
- end
+xml.traverse=traverse
+xml.selection=selection
+local function dofunction(collected,fnc,...)
+ if collected then
+ local f=functions[fnc]
+ if f then
+ for c=1,#collected do
+ f(collected[c],...)
+ end
else
- local d, k = e.__p__.dt, e.ni
- for i=k+1,#d,1 do
- local di = d[i]
- if type(di) == "table" then
- if n == 1 then
- found = di
- break
- else
- n = n - 1
- end
- end
- end
- end
- if found then
- local ns, tg = found.rn or found.ns or "", found.tg
- if ns ~= "" then
- return ns .. ":" .. tg
+ report_lpath("unknown function %a",fnc)
+ end
+ end
+end
+finalizers.xml["function"]=dofunction
+finalizers.tex["function"]=dofunction
+expressions.text=function(e,n)
+ local rdt=e.__p__.dt
+ return rdt and rdt[n] or ""
+end
+expressions.name=function(e,n)
+ local found=false
+ n=tonumber(n) or 0
+ if n==0 then
+ found=type(e)=="table" and e
+ elseif n<0 then
+ local d,k=e.__p__.dt,e.ni
+ for i=k-1,1,-1 do
+ local di=d[i]
+ if type(di)=="table" then
+ if n==-1 then
+ found=di
+ break
+ else
+ n=n+1
+ end
+ end
+ end
+ else
+ local d,k=e.__p__.dt,e.ni
+ for i=k+1,#d,1 do
+ local di=d[i]
+ if type(di)=="table" then
+ if n==1 then
+ found=di
+ break
else
- return tg
+ n=n-1
end
- else
- return ""
+ end
end
-end
-
-expressions.tag = function(e,n) -- only tg
- if not e then
- return ""
+ end
+ if found then
+ local ns,tg=found.rn or found.ns or "",found.tg
+ if ns~="" then
+ return ns..":"..tg
else
- local found = false
- n = tonumber(n) or 0
- if n == 0 then
- found = (type(e) == "table") and e -- seems to fail
- elseif n < 0 then
- local d, k = e.__p__.dt, e.ni
- for i=k-1,1,-1 do
- local di = d[i]
- if type(di) == "table" then
- if n == -1 then
- found = di
- break
- else
- n = n + 1
- end
- end
- end
- else
- local d, k = e.__p__.dt, e.ni
- for i=k+1,#d,1 do
- local di = d[i]
- if type(di) == "table" then
- if n == 1 then
- found = di
- break
- else
- n = n - 1
- end
- end
- end
- end
- return (found and found.tg) or ""
+ return tg
end
+ else
+ return ""
+ end
end
-
---[[ldx--
-<p>Often using an iterators looks nicer in the code than passing handler
-functions. The <l n='lua'/> book describes how to use coroutines for that
-purpose (<url href='http://www.lua.org/pil/9.3.html'/>). This permits
-code like:</p>
-
-<typing>
-for r, d, k in xml.elements(xml.load('text.xml'),"title") do
- print(d[k]) -- old method
-end
-for e in xml.collected(xml.load('text.xml'),"title") do
- print(e) -- new one
-end
-</typing>
---ldx]]--
-
-local wrap, yield = coroutine.wrap, coroutine.yield
-
-function xml.elements(root,pattern,reverse) -- r, d, k
- local collected = applylpath(root,pattern)
- if collected then
- if reverse then
- return wrap(function() for c=#collected,1,-1 do
- local e = collected[c] local r = e.__p__ yield(r,r.dt,e.ni)
- end end)
- else
- return wrap(function() for c=1,#collected do
- local e = collected[c] local r = e.__p__ yield(r,r.dt,e.ni)
- end end)
+expressions.tag=function(e,n)
+ if not e then
+ return ""
+ else
+ local found=false
+ n=tonumber(n) or 0
+ if n==0 then
+ found=(type(e)=="table") and e
+ elseif n<0 then
+ local d,k=e.__p__.dt,e.ni
+ for i=k-1,1,-1 do
+ local di=d[i]
+ if type(di)=="table" then
+ if n==-1 then
+ found=di
+ break
+ else
+ n=n+1
+ end
end
+ end
+ else
+ local d,k=e.__p__.dt,e.ni
+ for i=k+1,#d,1 do
+ local di=d[i]
+ if type(di)=="table" then
+ if n==1 then
+ found=di
+ break
+ else
+ n=n-1
+ end
+ end
+ end
+ end
+ return (found and found.tg) or ""
+ end
+end
+local dummy=function() end
+function xml.elements(root,pattern,reverse)
+ local collected=applylpath(root,pattern)
+ if not collected then
+ return dummy
+ elseif reverse then
+ local c=#collected+1
+ return function()
+ if c>1 then
+ c=c-1
+ local e=collected[c]
+ local r=e.__p__
+ return r,r.dt,e.ni
+ end
+ end
+ else
+ local n,c=#collected,0
+ return function()
+ if c<n then
+ c=c+1
+ local e=collected[c]
+ local r=e.__p__
+ return r,r.dt,e.ni
+ end
+ end
+ end
+end
+function xml.collected(root,pattern,reverse)
+ local collected=applylpath(root,pattern)
+ if not collected then
+ return dummy
+ elseif reverse then
+ local c=#collected+1
+ return function()
+ if c>1 then
+ c=c-1
+ return collected[c]
+ end
end
- return wrap(function() end)
-end
-
-function xml.collected(root,pattern,reverse) -- e
- local collected = applylpath(root,pattern)
- if collected then
- if reverse then
- return wrap(function() for c=#collected,1,-1 do yield(collected[c]) end end)
- else
- return wrap(function() for c=1,#collected do yield(collected[c]) end end)
- end
+ else
+ local n,c=#collected,0
+ return function()
+ if c<n then
+ c=c+1
+ return collected[c]
+ end
end
- return wrap(function() end)
+ end
end
-
--- handy
-
function xml.inspect(collection,pattern)
- pattern = pattern or "."
- for e in xml.collected(collection,pattern or ".") do
- report_lpath("pattern %q\n\n%s\n",pattern,xml.tostring(e))
- end
+ pattern=pattern or "."
+ for e in xml.collected(collection,pattern or ".") do
+ report_lpath("pattern: %s\n\n%s\n",pattern,xml.tostring(e))
+ end
+end
+local function split(e)
+ local dt=e.dt
+ if dt then
+ for i=1,#dt do
+ local dti=dt[i]
+ if type(dti)=="string" then
+ dti=gsub(dti,"^[\n\r]*(.-)[\n\r]*","%1")
+ dti=gsub(dti,"[\n\r]+","\n\n")
+ dt[i]=dti
+ else
+ split(dti)
+ end
+ end
+ end
+ return e
+end
+function xml.finalizers.paragraphs(c)
+ for i=1,#c do
+ split(c[i])
+ end
+ return c
end
@@ -9520,102 +10059,68 @@ end -- of closure
do -- create closure to overcome 200 locals limit
-if not modules then modules = { } end modules ['lxml-mis'] = {
- version = 1.001,
- comment = "this module is the basis for the lxml-* ones",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
-local xml, lpeg, string = xml, lpeg, string
+package.loaded["lxml-mis"] = package.loaded["lxml-mis"] or true
-local concat = table.concat
-local type, next, tonumber, tostring, setmetatable, loadstring = type, next, tonumber, tostring, setmetatable, loadstring
-local format, gsub, match = string.format, string.gsub, string.match
-local lpegmatch, lpegpatterns = lpeg.match, lpeg.patterns
-local P, S, R, C, V, Cc, Cs = lpeg.P, lpeg.S, lpeg.R, lpeg.C, lpeg.V, lpeg.Cc, lpeg.Cs
-
-lpegpatterns.xml = lpegpatterns.xml or { }
-local xmlpatterns = lpegpatterns.xml
-
---[[ldx--
-<p>The following helper functions best belong to the <t>lxml-ini</t>
-module. Some are here because we need then in the <t>mk</t>
-document and other manuals, others came up when playing with
-this module. Since this module is also used in <l n='mtxrun'/> we've
-put them here instead of loading mode modules there then needed.</p>
---ldx]]--
-
-local function xmlgsub(t,old,new) -- will be replaced
- local dt = t.dt
- if dt then
- for k=1,#dt do
- local v = dt[k]
- if type(v) == "string" then
- dt[k] = gsub(v,old,new)
- else
- xmlgsub(v,old,new)
- end
- end
- end
-end
+-- original size: 3684, stripped down to: 1957
-
-function xml.stripleadingspaces(dk,d,k) -- cosmetic, for manual
- if d and k then
- local dkm = d[k-1]
- if dkm and type(dkm) == "string" then
- local s = match(dkm,"\n(%s+)")
- xmlgsub(dk,"\n"..rep(" ",#s),"\n")
- end
- end
-end
-
-
-
--- 100 * 2500 * "oeps< oeps> oeps&" : gsub:lpeg|lpeg|lpeg
---
--- 1021:0335:0287:0247
-
--- 10 * 1000 * "oeps< oeps> oeps& asfjhalskfjh alskfjh alskfjh alskfjh ;al J;LSFDJ"
---
--- 1559:0257:0288:0190 (last one suggested by roberto)
-
--- escaped = Cs((S("<&>") / xml.escapes + 1)^0)
--- escaped = Cs((S("<")/"&lt;" + S(">")/"&gt;" + S("&")/"&amp;" + 1)^0)
-local normal = (1 - S("<&>"))^0
-local special = P("<")/"&lt;" + P(">")/"&gt;" + P("&")/"&amp;"
-local escaped = Cs(normal * (special * normal)^0)
-
--- 100 * 1000 * "oeps&lt; oeps&gt; oeps&amp;" : gsub:lpeg == 0153:0280:0151:0080 (last one by roberto)
-
-local normal = (1 - S"&")^0
-local special = P("&lt;")/"<" + P("&gt;")/">" + P("&amp;")/"&"
-local unescaped = Cs(normal * (special * normal)^0)
-
--- 100 * 5000 * "oeps <oeps bla='oeps' foo='bar'> oeps </oeps> oeps " : gsub:lpeg == 623:501 msec (short tags, less difference)
-
-local cleansed = Cs(((P("<") * (1-P(">"))^0 * P(">"))/"" + 1)^0)
-
-xmlpatterns.escaped = escaped
-xmlpatterns.unescaped = unescaped
-xmlpatterns.cleansed = cleansed
-
-function xml.escaped (str) return lpegmatch(escaped,str) end
+if not modules then modules={} end modules ['lxml-mis']={
+ version=1.001,
+ comment="this module is the basis for the lxml-* ones",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+local xml,lpeg,string=xml,lpeg,string
+local concat=table.concat
+local type,next,tonumber,tostring,setmetatable,loadstring=type,next,tonumber,tostring,setmetatable,loadstring
+local format,gsub,match=string.format,string.gsub,string.match
+local lpegmatch,lpegpatterns=lpeg.match,lpeg.patterns
+local P,S,R,C,V,Cc,Cs=lpeg.P,lpeg.S,lpeg.R,lpeg.C,lpeg.V,lpeg.Cc,lpeg.Cs
+lpegpatterns.xml=lpegpatterns.xml or {}
+local xmlpatterns=lpegpatterns.xml
+local function xmlgsub(t,old,new)
+ local dt=t.dt
+ if dt then
+ for k=1,#dt do
+ local v=dt[k]
+ if type(v)=="string" then
+ dt[k]=gsub(v,old,new)
+ else
+ xmlgsub(v,old,new)
+ end
+ end
+ end
+end
+function xml.stripleadingspaces(dk,d,k)
+ if d and k then
+ local dkm=d[k-1]
+ if dkm and type(dkm)=="string" then
+ local s=match(dkm,"\n(%s+)")
+ xmlgsub(dk,"\n"..rep(" ",#s),"\n")
+ end
+ end
+end
+local normal=(1-S("<&>"))^0
+local special=P("<")/"&lt;"+P(">")/"&gt;"+P("&")/"&amp;"
+local escaped=Cs(normal*(special*normal)^0)
+local normal=(1-S"&")^0
+local special=P("&lt;")/"<"+P("&gt;")/">"+P("&amp;")/"&"
+local unescaped=Cs(normal*(special*normal)^0)
+local cleansed=Cs(((P("<")*(1-P(">"))^0*P(">"))/""+1)^0)
+xmlpatterns.escaped=escaped
+xmlpatterns.unescaped=unescaped
+xmlpatterns.cleansed=cleansed
+function xml.escaped (str) return lpegmatch(escaped,str) end
function xml.unescaped(str) return lpegmatch(unescaped,str) end
-function xml.cleansed (str) return lpegmatch(cleansed,str) end
-
--- this might move
-
+function xml.cleansed (str) return lpegmatch(cleansed,str) end
function xml.fillin(root,pattern,str,check)
- local e = xml.first(root,pattern)
- if e then
- local n = #e.dt
- if not check or n == 0 or (n == 1 and e.dt[1] == "") then
- e.dt = { str }
- end
+ local e=xml.first(root,pattern)
+ if e then
+ local n=#e.dt
+ if not check or n==0 or (n==1 and e.dt[1]=="") then
+ e.dt={ str }
end
+ end
end
@@ -9623,765 +10128,692 @@ end -- of closure
do -- create closure to overcome 200 locals limit
-if not modules then modules = { } end modules ['lxml-aux'] = {
- version = 1.001,
- comment = "this module is the basis for the lxml-* ones",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
--- not all functions here make sense anymore vbut we keep them for
--- compatibility reasons
-
-local trace_manipulations = false trackers.register("lxml.manipulations", function(v) trace_manipulations = v end)
+package.loaded["lxml-aux"] = package.loaded["lxml-aux"] or true
-local report_xml = logs.reporter("xml")
-
-local xml = xml
-
-local xmlconvert, xmlcopy, xmlname = xml.convert, xml.copy, xml.name
-local xmlinheritedconvert = xml.inheritedconvert
-local xmlapplylpath = xml.applylpath
-local xmlfilter = xml.filter
-
-local type, setmetatable, getmetatable = type, setmetatable, getmetatable
-local insert, remove, fastcopy, concat = table.insert, table.remove, table.fastcopy, table.concat
-local gmatch, gsub, format, find, strip = string.gmatch, string.gsub, string.format, string.find, string.strip
-local utfbyte = utf.byte
+-- original size: 23804, stripped down to: 16817
+if not modules then modules={} end modules ['lxml-aux']={
+ version=1.001,
+ comment="this module is the basis for the lxml-* ones",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+local trace_manipulations=false trackers.register("lxml.manipulations",function(v) trace_manipulations=v end)
+local report_xml=logs.reporter("xml")
+local xml=xml
+local xmlconvert,xmlcopy,xmlname=xml.convert,xml.copy,xml.name
+local xmlinheritedconvert=xml.inheritedconvert
+local xmlapplylpath=xml.applylpath
+local xmlfilter=xml.filter
+local type,setmetatable,getmetatable=type,setmetatable,getmetatable
+local insert,remove,fastcopy,concat=table.insert,table.remove,table.fastcopy,table.concat
+local gmatch,gsub,format,find,strip=string.gmatch,string.gsub,string.format,string.find,string.strip
+local utfbyte=utf.byte
local function report(what,pattern,c,e)
- report_xml("%s element '%s' (root: '%s', position: %s, index: %s, pattern: %s)",what,xmlname(e),xmlname(e.__p__),c,e.ni,pattern)
+ report_xml("%s element %a, root %a, position %a, index %a, pattern %a",what,xmlname(e),xmlname(e.__p__),c,e.ni,pattern)
end
-
local function withelements(e,handle,depth)
- if e and handle then
- local edt = e.dt
- if edt then
- depth = depth or 0
- for i=1,#edt do
- local e = edt[i]
- if type(e) == "table" then
- handle(e,depth)
- withelements(e,handle,depth+1)
- end
- end
+ if e and handle then
+ local edt=e.dt
+ if edt then
+ depth=depth or 0
+ for i=1,#edt do
+ local e=edt[i]
+ if type(e)=="table" then
+ handle(e,depth)
+ withelements(e,handle,depth+1)
end
+ end
end
+ end
end
-
-xml.withelements = withelements
-
-function xml.withelement(e,n,handle) -- slow
- if e and n ~= 0 and handle then
- local edt = e.dt
- if edt then
- if n > 0 then
- for i=1,#edt do
- local ei = edt[i]
- if type(ei) == "table" then
- if n == 1 then
- handle(ei)
- return
- else
- n = n - 1
- end
- end
- end
- elseif n < 0 then
- for i=#edt,1,-1 do
- local ei = edt[i]
- if type(ei) == "table" then
- if n == -1 then
- handle(ei)
- return
- else
- n = n + 1
- end
- end
- end
+xml.withelements=withelements
+function xml.withelement(e,n,handle)
+ if e and n~=0 and handle then
+ local edt=e.dt
+ if edt then
+ if n>0 then
+ for i=1,#edt do
+ local ei=edt[i]
+ if type(ei)=="table" then
+ if n==1 then
+ handle(ei)
+ return
+ else
+ n=n-1
end
+ end
end
+ elseif n<0 then
+ for i=#edt,1,-1 do
+ local ei=edt[i]
+ if type(ei)=="table" then
+ if n==-1 then
+ handle(ei)
+ return
+ else
+ n=n+1
+ end
+ end
+ end
+ end
end
+ end
end
-
function xml.each(root,pattern,handle,reverse)
- local collected = xmlapplylpath(root,pattern)
- if collected then
- if reverse then
- for c=#collected,1,-1 do
- handle(collected[c])
- end
- else
- for c=1,#collected do
- handle(collected[c])
- end
- end
- return collected
+ local collected=xmlapplylpath(root,pattern)
+ if collected then
+ if reverse then
+ for c=#collected,1,-1 do
+ handle(collected[c])
+ end
+ else
+ for c=1,#collected do
+ handle(collected[c])
+ end
end
+ return collected
+ end
end
-
function xml.processattributes(root,pattern,handle)
- local collected = xmlapplylpath(root,pattern)
- if collected and handle then
- for c=1,#collected do
- handle(collected[c].at)
- end
+ local collected=xmlapplylpath(root,pattern)
+ if collected and handle then
+ for c=1,#collected do
+ handle(collected[c].at)
end
- return collected
+ end
+ return collected
end
-
---[[ldx--
-<p>The following functions collect elements and texts.</p>
---ldx]]--
-
--- are these still needed -> lxml-cmp.lua
-
-function xml.collect(root, pattern)
- return xmlapplylpath(root,pattern)
+function xml.collect(root,pattern)
+ return xmlapplylpath(root,pattern)
end
-
-function xml.collecttexts(root, pattern, flatten) -- todo: variant with handle
- local collected = xmlapplylpath(root,pattern)
- if collected and flatten then
- local xmltostring = xml.tostring
- for c=1,#collected do
- collected[c] = xmltostring(collected[c].dt)
- end
+function xml.collecttexts(root,pattern,flatten)
+ local collected=xmlapplylpath(root,pattern)
+ if collected and flatten then
+ local xmltostring=xml.tostring
+ for c=1,#collected do
+ collected[c]=xmltostring(collected[c].dt)
end
- return collected or { }
+ end
+ return collected or {}
end
-
-function xml.collect_tags(root, pattern, nonamespace)
- local collected = xmlapplylpath(root,pattern)
- if collected then
- local t, n = { }, 0
- for c=1,#collected do
- local e = collected[c]
- local ns, tg = e.ns, e.tg
- n = n + 1
- if nonamespace then
- t[n] = tg
- elseif ns == "" then
- t[n] = tg
- else
- t[n] = ns .. ":" .. tg
- end
- end
- return t
+function xml.collect_tags(root,pattern,nonamespace)
+ local collected=xmlapplylpath(root,pattern)
+ if collected then
+ local t,n={},0
+ for c=1,#collected do
+ local e=collected[c]
+ local ns,tg=e.ns,e.tg
+ n=n+1
+ if nonamespace then
+ t[n]=tg
+ elseif ns=="" then
+ t[n]=tg
+ else
+ t[n]=ns..":"..tg
+ end
end
+ return t
+ end
end
-
---[[ldx--
-<p>We've now arrived at the functions that manipulate the tree.</p>
---ldx]]--
-
-local no_root = { no_root = true }
-
+local no_root={ no_root=true }
local function redo_ni(d)
- for k=1,#d do
- local dk = d[k]
- if type(dk) == "table" then
- dk.ni = k
- end
+ for k=1,#d do
+ local dk=d[k]
+ if type(dk)=="table" then
+ dk.ni=k
end
+ end
end
-
local function xmltoelement(whatever,root)
- if not whatever then
- return nil
- end
- local element
- if type(whatever) == "string" then
- element = xmlinheritedconvert(whatever,root) -- beware, not really a root
- else
- element = whatever -- we assume a table
- end
- if element.error then
- return whatever -- string
- end
- if element then
- end
- return element
-end
-
-xml.toelement = xmltoelement
-
+ if not whatever then
+ return nil
+ end
+ local element
+ if type(whatever)=="string" then
+ element=xmlinheritedconvert(whatever,root)
+ else
+ element=whatever
+ end
+ if element.error then
+ return whatever
+ end
+ if element then
+ end
+ return element
+end
+xml.toelement=xmltoelement
local function copiedelement(element,newparent)
- if type(element) == "string" then
- return element
- else
- element = xmlcopy(element).dt
- if newparent and type(element) == "table" then
- element.__p__ = newparent
- end
- return element
+ if type(element)=="string" then
+ return element
+ else
+ element=xmlcopy(element).dt
+ if newparent and type(element)=="table" then
+ element.__p__=newparent
end
+ return element
+ end
end
-
function xml.delete(root,pattern)
- if not pattern or pattern == "" then
- local p = root.__p__
+ if not pattern or pattern=="" then
+ local p=root.__p__
+ if p then
+ if trace_manipulations then
+ report('deleting',"--",c,root)
+ end
+ local d=p.dt
+ remove(d,root.ni)
+ redo_ni(d)
+ end
+ else
+ local collected=xmlapplylpath(root,pattern)
+ if collected then
+ for c=1,#collected do
+ local e=collected[c]
+ local p=e.__p__
if p then
- if trace_manipulations then
- report('deleting',"--",c,root)
- end
- local d = p.dt
- remove(d,root.ni)
- redo_ni(d) -- can be made faster and inlined
- end
- else
- local collected = xmlapplylpath(root,pattern)
- if collected then
- for c=1,#collected do
- local e = collected[c]
- local p = e.__p__
- if p then
- if trace_manipulations then
- report('deleting',pattern,c,e)
- end
- local d = p.dt
- remove(d,e.ni)
- redo_ni(d) -- can be made faster and inlined
- end
- end
+ if trace_manipulations then
+ report('deleting',pattern,c,e)
+ end
+ local d=p.dt
+ remove(d,e.ni)
+ redo_ni(d)
end
+ end
end
+ end
end
-
function xml.replace(root,pattern,whatever)
- local element = root and xmltoelement(whatever,root)
- local collected = element and xmlapplylpath(root,pattern)
- if collected then
- for c=1,#collected do
- local e = collected[c]
- local p = e.__p__
- if p then
- if trace_manipulations then
- report('replacing',pattern,c,e)
- end
- local d = p.dt
- d[e.ni] = copiedelement(element,p)
- redo_ni(d) -- probably not needed
- end
+ local element=root and xmltoelement(whatever,root)
+ local collected=element and xmlapplylpath(root,pattern)
+ if collected then
+ for c=1,#collected do
+ local e=collected[c]
+ local p=e.__p__
+ if p then
+ if trace_manipulations then
+ report('replacing',pattern,c,e)
end
+ local d=p.dt
+ d[e.ni]=copiedelement(element,p)
+ redo_ni(d)
+ end
end
+ end
end
-
local function wrap(e,wrapper)
- local t = {
- rn = e.rn,
- tg = e.tg,
- ns = e.ns,
- at = e.at,
- dt = e.dt,
- __p__ = e,
- }
- setmetatable(t,getmetatable(e))
- e.rn = wrapper.rn or e.rn or ""
- e.tg = wrapper.tg or e.tg or ""
- e.ns = wrapper.ns or e.ns or ""
- e.at = fastcopy(wrapper.at)
- e.dt = { t }
+ local t={
+ rn=e.rn,
+ tg=e.tg,
+ ns=e.ns,
+ at=e.at,
+ dt=e.dt,
+ __p__=e,
+ }
+ setmetatable(t,getmetatable(e))
+ e.rn=wrapper.rn or e.rn or ""
+ e.tg=wrapper.tg or e.tg or ""
+ e.ns=wrapper.ns or e.ns or ""
+ e.at=fastcopy(wrapper.at)
+ e.dt={ t }
end
-
function xml.wrap(root,pattern,whatever)
- if whatever then
- local wrapper = xmltoelement(whatever,root)
- local collected = xmlapplylpath(root,pattern)
- if collected then
- for c=1,#collected do
- local e = collected[c]
- if trace_manipulations then
- report('wrapping',pattern,c,e)
- end
- wrap(e,wrapper)
- end
+ if whatever then
+ local wrapper=xmltoelement(whatever,root)
+ local collected=xmlapplylpath(root,pattern)
+ if collected then
+ for c=1,#collected do
+ local e=collected[c]
+ if trace_manipulations then
+ report('wrapping',pattern,c,e)
end
- else
- wrap(root,xmltoelement(pattern))
+ wrap(e,wrapper)
+ end
end
+ else
+ wrap(root,xmltoelement(pattern))
+ end
end
-
local function inject_element(root,pattern,whatever,prepend)
- local element = root and xmltoelement(whatever,root)
- local collected = element and xmlapplylpath(root,pattern)
- local function inject_e(e)
- local r = e.__p__
- local d, k, rri = r.dt, e.ni, r.ri
- local edt = (rri and d[rri].dt) or (d and d[k] and d[k].dt)
- if edt then
- local be, af
- local cp = copiedelement(element,e)
- if prepend then
- be, af = cp, edt
- else
- be, af = edt, cp
- end
- local bn = #be
- for i=1,#af do
- bn = bn + 1
- be[bn] = af[i]
- end
- if rri then
- r.dt[rri].dt = be
- else
- d[k].dt = be
- end
- redo_ni(d)
- end
- end
- if not collected then
- -- nothing
- elseif collected.tg then
- -- first or so
- inject_e(collected)
- else
- for c=1,#collected do
- inject_e(collected[c])
- end
- end
-end
-
-local function insert_element(root,pattern,whatever,before) -- todo: element als functie
- local element = root and xmltoelement(whatever,root)
- local collected = element and xmlapplylpath(root,pattern)
- local function insert_e(e)
- local r = e.__p__
- local d, k = r.dt, e.ni
- if not before then
- k = k + 1
- end
- insert(d,k,copiedelement(element,r))
- redo_ni(d)
- end
- if not collected then
- -- nothing
- elseif collected.tg then
- -- first or so
- insert_e(collected)
- else
- for c=1,#collected do
- insert_e(collected[c])
- end
+ local element=root and xmltoelement(whatever,root)
+ local collected=element and xmlapplylpath(root,pattern)
+ local function inject_e(e)
+ local r=e.__p__
+ local d,k,rri=r.dt,e.ni,r.ri
+ local edt=(rri and d[rri].dt) or (d and d[k] and d[k].dt)
+ if edt then
+ local be,af
+ local cp=copiedelement(element,e)
+ if prepend then
+ be,af=cp,edt
+ else
+ be,af=edt,cp
+ end
+ local bn=#be
+ for i=1,#af do
+ bn=bn+1
+ be[bn]=af[i]
+ end
+ if rri then
+ r.dt[rri].dt=be
+ else
+ d[k].dt=be
+ end
+ redo_ni(d)
+ end
+ end
+ if not collected then
+ elseif collected.tg then
+ inject_e(collected)
+ else
+ for c=1,#collected do
+ inject_e(collected[c])
+ end
+ end
+end
+local function insert_element(root,pattern,whatever,before)
+ local element=root and xmltoelement(whatever,root)
+ local collected=element and xmlapplylpath(root,pattern)
+ local function insert_e(e)
+ local r=e.__p__
+ local d,k=r.dt,e.ni
+ if not before then
+ k=k+1
+ end
+ insert(d,k,copiedelement(element,r))
+ redo_ni(d)
+ end
+ if not collected then
+ elseif collected.tg then
+ insert_e(collected)
+ else
+ for c=1,#collected do
+ insert_e(collected[c])
end
+ end
end
-
-xml.insert_element = insert_element
-xml.insertafter = insert_element
-xml.insertbefore = function(r,p,e) insert_element(r,p,e,true) end
-xml.injectafter = inject_element
-xml.injectbefore = function(r,p,e) inject_element(r,p,e,true) end
-
+xml.insert_element=insert_element
+xml.insertafter=insert_element
+xml.insertbefore=function(r,p,e) insert_element(r,p,e,true) end
+xml.injectafter=inject_element
+xml.injectbefore=function(r,p,e) inject_element(r,p,e,true) end
local function include(xmldata,pattern,attribute,recursive,loaddata)
- -- parse="text" (default: xml), encoding="" (todo)
- -- attribute = attribute or 'href'
- pattern = pattern or 'include'
- loaddata = loaddata or io.loaddata
- local collected = xmlapplylpath(xmldata,pattern)
- if collected then
- for c=1,#collected do
- local ek = collected[c]
- local name = nil
- local ekdt = ek.dt
- local ekat = ek.at
- local epdt = ek.__p__.dt
- if not attribute or attribute == "" then
- name = (type(ekdt) == "table" and ekdt[1]) or ekdt -- check, probably always tab or str
- end
- if not name then
- for a in gmatch(attribute or "href","([^|]+)") do
- name = ekat[a]
- if name then break end
- end
- end
- local data = (name and name ~= "" and loaddata(name)) or ""
- if data == "" then
- epdt[ek.ni] = "" -- xml.empty(d,k)
- elseif ekat["parse"] == "text" then
- -- for the moment hard coded
- epdt[ek.ni] = xml.escaped(data) -- d[k] = xml.escaped(data)
- else
- local xi = xmlinheritedconvert(data,xmldata)
- if not xi then
- epdt[ek.ni] = "" -- xml.empty(d,k)
- else
- if recursive then
- include(xi,pattern,attribute,recursive,loaddata)
- end
- epdt[ek.ni] = xml.body(xi) -- xml.assign(d,k,xi)
- end
- end
+ pattern=pattern or 'include'
+ loaddata=loaddata or io.loaddata
+ local collected=xmlapplylpath(xmldata,pattern)
+ if collected then
+ for c=1,#collected do
+ local ek=collected[c]
+ local name=nil
+ local ekdt=ek.dt
+ local ekat=ek.at
+ local epdt=ek.__p__.dt
+ if not attribute or attribute=="" then
+ name=(type(ekdt)=="table" and ekdt[1]) or ekdt
+ end
+ if not name then
+ for a in gmatch(attribute or "href","([^|]+)") do
+ name=ekat[a]
+ if name then break end
+ end
+ end
+ local data=(name and name~="" and loaddata(name)) or ""
+ if data=="" then
+ epdt[ek.ni]=""
+ elseif ekat["parse"]=="text" then
+ epdt[ek.ni]=xml.escaped(data)
+ else
+ local xi=xmlinheritedconvert(data,xmldata)
+ if not xi then
+ epdt[ek.ni]=""
+ else
+ if recursive then
+ include(xi,pattern,attribute,recursive,loaddata)
+ end
+ epdt[ek.ni]=xml.body(xi)
end
+ end
end
+ end
end
-
-xml.include = include
-
+xml.include=include
local function stripelement(e,nolines,anywhere)
- local edt = e.dt
- if edt then
- if anywhere then
- local t, n = { }, 0
- for e=1,#edt do
- local str = edt[e]
- if type(str) ~= "string" then
- n = n + 1
- t[n] = str
- elseif str ~= "" then
- -- todo: lpeg for each case
- if nolines then
- str = gsub(str,"%s+"," ")
- end
- str = gsub(str,"^%s*(.-)%s*$","%1")
- if str ~= "" then
- n = n + 1
- t[n] = str
- end
- end
- end
- e.dt = t
+ local edt=e.dt
+ if edt then
+ if anywhere then
+ local t,n={},0
+ for e=1,#edt do
+ local str=edt[e]
+ if type(str)~="string" then
+ n=n+1
+ t[n]=str
+ elseif str~="" then
+ if nolines then
+ str=gsub(str,"%s+"," ")
+ end
+ str=gsub(str,"^%s*(.-)%s*$","%1")
+ if str~="" then
+ n=n+1
+ t[n]=str
+ end
+ end
+ end
+ e.dt=t
+ else
+ if #edt>0 then
+ local str=edt[1]
+ if type(str)~="string" then
+ elseif str=="" then
+ remove(edt,1)
else
- -- we can assume a regular sparse xml table with no successive strings
- -- otherwise we should use a while loop
- if #edt > 0 then
- -- strip front
- local str = edt[1]
- if type(str) ~= "string" then
- -- nothing
- elseif str == "" then
- remove(edt,1)
- else
- if nolines then
- str = gsub(str,"%s+"," ")
- end
- str = gsub(str,"^%s+","")
- if str == "" then
- remove(edt,1)
- else
- edt[1] = str
- end
- end
- end
- local nedt = #edt
- if nedt > 0 then
- -- strip end
- local str = edt[nedt]
- if type(str) ~= "string" then
- -- nothing
- elseif str == "" then
- remove(edt)
- else
- if nolines then
- str = gsub(str,"%s+"," ")
- end
- str = gsub(str,"%s+$","")
- if str == "" then
- remove(edt)
- else
- edt[nedt] = str
- end
- end
- end
- end
- end
- return e -- convenient
-end
-
-xml.stripelement = stripelement
-
-function xml.strip(root,pattern,nolines,anywhere) -- strips all leading and trailing spacing
- local collected = xmlapplylpath(root,pattern) -- beware, indices no longer are valid now
- if collected then
- for i=1,#collected do
- stripelement(collected[i],nolines,anywhere)
- end
- end
-end
-
-local function renamespace(root, oldspace, newspace) -- fast variant
- local ndt = #root.dt
- for i=1,ndt or 0 do
- local e = root[i]
- if type(e) == "table" then
- if e.ns == oldspace then
- e.ns = newspace
- if e.rn then
- e.rn = newspace
- end
- end
- local edt = e.dt
- if edt then
- renamespace(edt, oldspace, newspace)
- end
- end
- end
-end
-
-xml.renamespace = renamespace
-
-function xml.remaptag(root, pattern, newtg)
- local collected = xmlapplylpath(root,pattern)
- if collected then
- for c=1,#collected do
- collected[c].tg = newtg
- end
+ if nolines then
+ str=gsub(str,"%s+"," ")
+ end
+ str=gsub(str,"^%s+","")
+ if str=="" then
+ remove(edt,1)
+ else
+ edt[1]=str
+ end
+ end
+ end
+ local nedt=#edt
+ if nedt>0 then
+ local str=edt[nedt]
+ if type(str)~="string" then
+ elseif str=="" then
+ remove(edt)
+ else
+ if nolines then
+ str=gsub(str,"%s+"," ")
+ end
+ str=gsub(str,"%s+$","")
+ if str=="" then
+ remove(edt)
+ else
+ edt[nedt]=str
+ end
+ end
+ end
+ end
+ end
+ return e
+end
+xml.stripelement=stripelement
+function xml.strip(root,pattern,nolines,anywhere)
+ local collected=xmlapplylpath(root,pattern)
+ if collected then
+ for i=1,#collected do
+ stripelement(collected[i],nolines,anywhere)
+ end
+ end
+end
+local function renamespace(root,oldspace,newspace)
+ local ndt=#root.dt
+ for i=1,ndt or 0 do
+ local e=root[i]
+ if type(e)=="table" then
+ if e.ns==oldspace then
+ e.ns=newspace
+ if e.rn then
+ e.rn=newspace
+ end
+ end
+ local edt=e.dt
+ if edt then
+ renamespace(edt,oldspace,newspace)
+ end
+ end
+ end
+end
+xml.renamespace=renamespace
+function xml.remaptag(root,pattern,newtg)
+ local collected=xmlapplylpath(root,pattern)
+ if collected then
+ for c=1,#collected do
+ collected[c].tg=newtg
end
+ end
end
-
-function xml.remapnamespace(root, pattern, newns)
- local collected = xmlapplylpath(root,pattern)
- if collected then
- for c=1,#collected do
- collected[c].ns = newns
- end
+function xml.remapnamespace(root,pattern,newns)
+ local collected=xmlapplylpath(root,pattern)
+ if collected then
+ for c=1,#collected do
+ collected[c].ns=newns
end
+ end
end
-
-function xml.checknamespace(root, pattern, newns)
- local collected = xmlapplylpath(root,pattern)
- if collected then
- for c=1,#collected do
- local e = collected[c]
- if (not e.rn or e.rn == "") and e.ns == "" then
- e.rn = newns
- end
- end
+function xml.checknamespace(root,pattern,newns)
+ local collected=xmlapplylpath(root,pattern)
+ if collected then
+ for c=1,#collected do
+ local e=collected[c]
+ if (not e.rn or e.rn=="") and e.ns=="" then
+ e.rn=newns
+ end
end
+ end
end
-
-function xml.remapname(root, pattern, newtg, newns, newrn)
- local collected = xmlapplylpath(root,pattern)
- if collected then
- for c=1,#collected do
- local e = collected[c]
- e.tg, e.ns, e.rn = newtg, newns, newrn
- end
+function xml.remapname(root,pattern,newtg,newns,newrn)
+ local collected=xmlapplylpath(root,pattern)
+ if collected then
+ for c=1,#collected do
+ local e=collected[c]
+ e.tg,e.ns,e.rn=newtg,newns,newrn
end
+ end
end
-
---[[ldx--
-<p>Helper (for q2p).</p>
---ldx]]--
-
function xml.cdatatotext(e)
- local dt = e.dt
- if #dt == 1 then
- local first = dt[1]
- if first.tg == "@cd@" then
- e.dt = first.dt
- end
+ local dt=e.dt
+ if #dt==1 then
+ local first=dt[1]
+ if first.tg=="@cd@" then
+ e.dt=first.dt
+ end
+ else
+ end
+end
+function xml.texttocdata(e)
+ local dt=e.dt
+ local s=xml.tostring(dt)
+ e.tg="@cd@"
+ e.special=true
+ e.ns=""
+ e.rn=""
+ e.dt={ s }
+ e.at=nil
+end
+function xml.elementtocdata(e)
+ local dt=e.dt
+ local s=xml.tostring(e)
+ e.tg="@cd@"
+ e.special=true
+ e.ns=""
+ e.rn=""
+ e.dt={ s }
+ e.at=nil
+end
+xml.builtinentities=table.tohash { "amp","quot","apos","lt","gt" }
+local entities=characters and characters.entities or nil
+local builtinentities=xml.builtinentities
+function xml.addentitiesdoctype(root,option)
+ if not entities then
+ require("char-ent")
+ entities=characters.entities
+ end
+ if entities and root and root.tg=="@rt@" and root.statistics then
+ local list={}
+ local hexify=option=="hexadecimal"
+ for k,v in table.sortedhash(root.statistics.entities.names) do
+ if not builtinentities[k] then
+ local e=entities[k]
+ if not e then
+ e=format("[%s]",k)
+ elseif hexify then
+ e=format("&#%05X;",utfbyte(k))
+ end
+ list[#list+1]=format(" <!ENTITY %s %q >",k,e)
+ end
+ end
+ local dt=root.dt
+ local n=dt[1].tg=="@pi@" and 2 or 1
+ if #list>0 then
+ insert(dt,n,{ "\n" })
+ insert(dt,n,{
+ tg="@dt@",
+ dt={ format("Something [\n%s\n] ",concat(list)) },
+ ns="",
+ special=true,
+ })
+ insert(dt,n,{ "\n\n" })
else
- -- maybe option
- end
-end
-
--- local x = xml.convert("<x><a>1<b>2</b>3</a></x>")
--- xml.texttocdata(xml.first(x,"a"))
--- print(x) -- <x><![CDATA[1<b>2</b>3]]></x>
-
-function xml.texttocdata(e) -- could be a finalizer
- local dt = e.dt
- local s = xml.tostring(dt) -- no shortcut?
- e.tg = "@cd@"
- e.special = true
- e.ns = ""
- e.rn = ""
- e.dt = { s }
- e.at = nil
-end
-
--- local x = xml.convert("<x><a>1<b>2</b>3</a></x>")
--- xml.tocdata(xml.first(x,"a"))
--- print(x) -- <x><![CDATA[<a>1<b>2</b>3</a>]]></x>
-
-function xml.elementtocdata(e) -- could be a finalizer
- local dt = e.dt
- local s = xml.tostring(e) -- no shortcut?
- e.tg = "@cd@"
- e.special = true
- e.ns = ""
- e.rn = ""
- e.dt = { s }
- e.at = nil
-end
-
-xml.builtinentities = table.tohash { "amp", "quot", "apos", "lt", "gt" } -- used often so share
-
-local entities = characters and characters.entities or nil
-local builtinentities = xml.builtinentities
-
-function xml.addentitiesdoctype(root,option) -- we could also have a 'resolve' i.e. inline hex
- if not entities then
- require("char-ent")
- entities = characters.entities
- end
- if entities and root and root.tg == "@rt@" and root.statistics then
- local list = { }
- local hexify = option == "hexadecimal"
- for k, v in table.sortedhash(root.statistics.entities.names) do
- if not builtinentities[k] then
- local e = entities[k]
- if not e then
- e = format("[%s]",k)
- elseif hexify then
- e = format("&#%05X;",utfbyte(k))
- end
- list[#list+1] = format(" <!ENTITY %s %q >",k,e)
- end
- end
- local dt = root.dt
- local n = dt[1].tg == "@pi@" and 2 or 1
- if #list > 0 then
- insert(dt, n, { "\n" })
- insert(dt, n, {
- tg = "@dt@", -- beware, doctype is unparsed
- dt = { format("Something [\n%s\n] ",concat(list)) },
- ns = "",
- special = true,
- })
- insert(dt, n, { "\n\n" })
- else
- -- insert(dt, n, { table.serialize(root.statistics) })
- end
end
-end
-
--- local str = [==[
--- <?xml version='1.0' standalone='yes' ?>
--- <root>
--- <a>test &nbsp; test &#123; test</a>
--- <b><![CDATA[oeps]]></b>
--- </root>
--- ]==]
---
--- local x = xml.convert(str)
--- xml.addentitiesdoctype(x,"hexadecimal")
--- print(x)
-
---[[ldx--
-<p>Here are a few synonyms.</p>
---ldx]]--
-
-xml.all = xml.each
-xml.insert = xml.insertafter
-xml.inject = xml.injectafter
-xml.after = xml.insertafter
-xml.before = xml.insertbefore
-xml.process = xml.each
-
--- obsolete
-
-xml.obsolete = xml.obsolete or { }
-local obsolete = xml.obsolete
-
-xml.strip_whitespace = xml.strip obsolete.strip_whitespace = xml.strip
-xml.collect_elements = xml.collect obsolete.collect_elements = xml.collect
-xml.delete_element = xml.delete obsolete.delete_element = xml.delete
-xml.replace_element = xml.replace obsolete.replace_element = xml.replacet
-xml.each_element = xml.each obsolete.each_element = xml.each
-xml.process_elements = xml.process obsolete.process_elements = xml.process
-xml.insert_element_after = xml.insertafter obsolete.insert_element_after = xml.insertafter
-xml.insert_element_before = xml.insertbefore obsolete.insert_element_before = xml.insertbefore
-xml.inject_element_after = xml.injectafter obsolete.inject_element_after = xml.injectafter
-xml.inject_element_before = xml.injectbefore obsolete.inject_element_before = xml.injectbefore
-xml.process_attributes = xml.processattributes obsolete.process_attributes = xml.processattributes
-xml.collect_texts = xml.collecttexts obsolete.collect_texts = xml.collecttexts
-xml.inject_element = xml.inject obsolete.inject_element = xml.inject
-xml.remap_tag = xml.remaptag obsolete.remap_tag = xml.remaptag
-xml.remap_name = xml.remapname obsolete.remap_name = xml.remapname
-xml.remap_namespace = xml.remapnamespace obsolete.remap_namespace = xml.remapnamespace
-
--- new (probably ok)
-
+ end
+end
+xml.all=xml.each
+xml.insert=xml.insertafter
+xml.inject=xml.injectafter
+xml.after=xml.insertafter
+xml.before=xml.insertbefore
+xml.process=xml.each
+xml.obsolete=xml.obsolete or {}
+local obsolete=xml.obsolete
+xml.strip_whitespace=xml.strip obsolete.strip_whitespace=xml.strip
+xml.collect_elements=xml.collect obsolete.collect_elements=xml.collect
+xml.delete_element=xml.delete obsolete.delete_element=xml.delete
+xml.replace_element=xml.replace obsolete.replace_element=xml.replacet
+xml.each_element=xml.each obsolete.each_element=xml.each
+xml.process_elements=xml.process obsolete.process_elements=xml.process
+xml.insert_element_after=xml.insertafter obsolete.insert_element_after=xml.insertafter
+xml.insert_element_before=xml.insertbefore obsolete.insert_element_before=xml.insertbefore
+xml.inject_element_after=xml.injectafter obsolete.inject_element_after=xml.injectafter
+xml.inject_element_before=xml.injectbefore obsolete.inject_element_before=xml.injectbefore
+xml.process_attributes=xml.processattributes obsolete.process_attributes=xml.processattributes
+xml.collect_texts=xml.collecttexts obsolete.collect_texts=xml.collecttexts
+xml.inject_element=xml.inject obsolete.inject_element=xml.inject
+xml.remap_tag=xml.remaptag obsolete.remap_tag=xml.remaptag
+xml.remap_name=xml.remapname obsolete.remap_name=xml.remapname
+xml.remap_namespace=xml.remapnamespace obsolete.remap_namespace=xml.remapnamespace
function xml.cdata(e)
- if e then
- local dt = e.dt
- if dt and #dt == 1 then
- local first = dt[1]
- return first.tg == "@cd@" and first.dt[1] or ""
- end
+ if e then
+ local dt=e.dt
+ if dt and #dt==1 then
+ local first=dt[1]
+ return first.tg=="@cd@" and first.dt[1] or ""
end
- return ""
+ end
+ return ""
end
-
function xml.finalizers.xml.cdata(collected)
- if collected then
- local e = collected[1]
- if e then
- local dt = e.dt
- if dt and #dt == 1 then
- local first = dt[1]
- return first.tg == "@cd@" and first.dt[1] or ""
- end
- end
- end
- return ""
-end
-
-function xml.insertcomment(e,str,n) -- also insertcdata
- table.insert(e.dt,n or 1,{
- tg = "@cm@",
- ns = "",
- special = true,
- at = { },
- dt = { str },
- })
-end
-
-function xml.setcdata(e,str) -- also setcomment
- e.dt = { {
- tg = "@cd@",
- ns = "",
- special = true,
- at = { },
- dt = { str },
- } }
+ if collected then
+ local e=collected[1]
+ if e then
+ local dt=e.dt
+ if dt and #dt==1 then
+ local first=dt[1]
+ return first.tg=="@cd@" and first.dt[1] or ""
+ end
+ end
+ end
+ return ""
+end
+function xml.insertcomment(e,str,n)
+ table.insert(e.dt,n or 1,{
+ tg="@cm@",
+ ns="",
+ special=true,
+ at={},
+ dt={ str },
+ })
+end
+function xml.setcdata(e,str)
+ e.dt={ {
+ tg="@cd@",
+ ns="",
+ special=true,
+ at={},
+ dt={ str },
+ } }
end
-
--- maybe helpers like this will move to an autoloader
-
function xml.separate(x,pattern)
- local collected = xmlapplylpath(x,pattern)
- if collected then
- for c=1,#collected do
- local e = collected[c]
- local d = e.dt
- if d == x then
- report_xml("warning: xml.separate changes root")
- x = d
- end
- local t, n = { "\n" }, 1
- local i, nd = 1, #d
- while i <= nd do
- while i <= nd do
- local di = d[i]
- if type(di) == "string" then
- if di == "\n" or find(di,"^%s+$") then -- first test is speedup
- i = i + 1
- else
- d[i] = strip(di)
- break
- end
- else
- break
- end
- end
- if i > nd then
- break
- end
- t[n+1] = "\n"
- t[n+2] = d[i]
- t[n+3] = "\n"
- n = n + 3
- i = i + 1
+ local collected=xmlapplylpath(x,pattern)
+ if collected then
+ for c=1,#collected do
+ local e=collected[c]
+ local d=e.dt
+ if d==x then
+ report_xml("warning: xml.separate changes root")
+ x=d
+ end
+ local t,n={ "\n" },1
+ local i,nd=1,#d
+ while i<=nd do
+ while i<=nd do
+ local di=d[i]
+ if type(di)=="string" then
+ if di=="\n" or find(di,"^%s+$") then
+ i=i+1
+ else
+ d[i]=strip(di)
+ break
end
- t[n+1] = "\n"
- setmetatable(t,getmetatable(d))
- e.dt = t
- end
- end
- return x
+ else
+ break
+ end
+ end
+ if i>nd then
+ break
+ end
+ t[n+1]="\n"
+ t[n+2]=d[i]
+ t[n+3]="\n"
+ n=n+3
+ i=i+1
+ end
+ t[n+1]="\n"
+ setmetatable(t,getmetatable(d))
+ e.dt=t
+ end
+ end
+ return x
+end
+local helpers=xml.helpers or {}
+xml.helpers=helpers
+local function normal(e,action)
+ local edt=e.dt
+ if edt then
+ for i=1,#edt do
+ local str=edt[i]
+ if type(str)=="string" and str~="" then
+ edt[i]=action(str)
+ end
+ end
+ end
+end
+local function recurse(e,action)
+ local edt=e.dt
+ if edt then
+ for i=1,#edt do
+ local str=edt[i]
+ if type(str)~="string" then
+ recurse(str,action,recursive)
+ elseif str~="" then
+ edt[i]=action(str)
+ end
+ end
+ end
+end
+function helpers.recursetext(collected,action,recursive)
+ if recursive then
+ for i=1,#collected do
+ recurse(collected[i],action)
+ end
+ else
+ for i=1,#collected do
+ normal(collected[i],action)
+ end
+ end
end
@@ -10389,450 +10821,377 @@ end -- of closure
do -- create closure to overcome 200 locals limit
-if not modules then modules = { } end modules ['lxml-xml'] = {
- version = 1.001,
- comment = "this module is the basis for the lxml-* ones",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
+package.loaded["lxml-xml"] = package.loaded["lxml-xml"] or true
-local concat = table.concat
-local find, lower, upper = string.find, string.lower, string.upper
+-- original size: 10274, stripped down to: 7538
-local xml = xml
-
-local finalizers = xml.finalizers.xml
-local xmlfilter = xml.filter -- we could inline this one for speed
-local xmltostring = xml.tostring
-local xmlserialize = xml.serialize
-local xmlcollected = xml.collected
-local xmlnewhandlers = xml.newhandlers
-
-local function first(collected) -- wrong ?
- return collected and collected[1]
+if not modules then modules={} end modules ['lxml-xml']={
+ version=1.001,
+ comment="this module is the basis for the lxml-* ones",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+local concat=table.concat
+local find,lower,upper=string.find,string.lower,string.upper
+local xml=xml
+local finalizers=xml.finalizers.xml
+local xmlfilter=xml.filter
+local xmltostring=xml.tostring
+local xmlserialize=xml.serialize
+local xmlcollected=xml.collected
+local xmlnewhandlers=xml.newhandlers
+local function first(collected)
+ return collected and collected[1]
end
-
local function last(collected)
- return collected and collected[#collected]
+ return collected and collected[#collected]
end
-
local function all(collected)
- return collected
+ return collected
end
-
--- local function reverse(collected)
--- if collected then
--- local nc = #collected
--- if nc > 0 then
--- local reversed, r = { }, 0
--- for c=nc,1,-1 do
--- r = r + 1
--- reversed[r] = collected[c]
--- end
--- return reversed
--- else
--- return collected
--- end
--- end
--- end
-
-local reverse = table.reversed
-
+local reverse=table.reversed
local function attribute(collected,name)
- if collected and #collected > 0 then
- local at = collected[1].at
- return at and at[name]
- end
+ if collected and #collected>0 then
+ local at=collected[1].at
+ return at and at[name]
+ end
end
-
local function att(id,name)
- local at = id.at
- return at and at[name]
+ local at=id.at
+ return at and at[name]
end
-
local function count(collected)
- return collected and #collected or 0
+ return collected and #collected or 0
end
-
local function position(collected,n)
- if not collected then
- return 0
- end
- local nc = #collected
- if nc == 0 then
- return 0
- end
- n = tonumber(n) or 0
- if n < 0 then
- return collected[nc + n + 1]
- elseif n > 0 then
- return collected[n]
- else
- return collected[1].mi or 0
- end
+ if not collected then
+ return 0
+ end
+ local nc=#collected
+ if nc==0 then
+ return 0
+ end
+ n=tonumber(n) or 0
+ if n<0 then
+ return collected[nc+n+1]
+ elseif n>0 then
+ return collected[n]
+ else
+ return collected[1].mi or 0
+ end
end
-
local function match(collected)
- return collected and #collected > 0 and collected[1].mi or 0 -- match
+ return collected and #collected>0 and collected[1].mi or 0
end
-
local function index(collected)
- return collected and #collected > 0 and collected[1].ni or 0 -- 0 is new
+ return collected and #collected>0 and collected[1].ni or 0
end
-
local function attributes(collected,arguments)
- if collected and #collected > 0 then
- local at = collected[1].at
- if arguments then
- return at[arguments]
- elseif next(at) then
- return at -- all of them
+ if collected and #collected>0 then
+ local at=collected[1].at
+ if arguments then
+ return at[arguments]
+ elseif next(at) then
+ return at
+ end
+ end
+end
+local function chainattribute(collected,arguments)
+ if collected and #collected>0 then
+ local e=collected[1]
+ while e do
+ local at=e.at
+ if at then
+ local a=at[arguments]
+ if a then
+ return a
end
+ else
+ break
+ end
+ e=e.__p__
end
+ end
+ return ""
end
-
-local function chainattribute(collected,arguments) -- todo: optional levels
- if collected and #collected > 0 then
- local e = collected[1]
- while e do
- local at = e.at
- if at then
- local a = at[arguments]
- if a then
- return a
- end
- else
- break -- error
- end
- e = e.__p__
- end
- end
+local function raw(collected)
+ if collected and #collected>0 then
+ local e=collected[1] or collected
+ return e and xmltostring(e) or ""
+ else
return ""
+ end
end
-
-local function raw(collected) -- hybrid (not much different from text so it might go)
- if collected and #collected > 0 then
- local e = collected[1] or collected
- return e and xmltostring(e) or "" -- only first as we cannot concat function
- else
- return ""
- end
-end
-
---
-
-local xmltexthandler = xmlnewhandlers {
- name = "string",
- initialize = function()
- result = { }
- return result
- end,
- finalize = function()
- return concat(result)
- end,
- handle = function(...)
- result[#result+1] = concat { ... }
- end,
- escape = false,
+local xmltexthandler=xmlnewhandlers {
+ name="string",
+ initialize=function()
+ result={}
+ return result
+ end,
+ finalize=function()
+ return concat(result)
+ end,
+ handle=function(...)
+ result[#result+1]=concat {... }
+ end,
+ escape=false,
}
-
local function xmltotext(root)
- local dt = root.dt
- if not dt then
- return ""
- end
- local nt = #dt -- string or table
- if nt == 0 then
- return ""
- elseif nt == 1 and type(dt[1]) == "string" then
- return dt[1] -- no escaping of " ' < > &
- else
- return xmlserialize(root,xmltexthandler) or ""
- end
-end
-
---
-
-local function text(collected) -- hybrid
- if collected then -- no # test here !
- local e = collected[1] or collected -- why fallback to element, how about cdata
- return e and xmltotext(e) or ""
- else
- return ""
- end
+ local dt=root.dt
+ if not dt then
+ return ""
+ end
+ local nt=#dt
+ if nt==0 then
+ return ""
+ elseif nt==1 and type(dt[1])=="string" then
+ return dt[1]
+ else
+ return xmlserialize(root,xmltexthandler) or ""
+ end
+end
+local function text(collected)
+ if collected then
+ local e=collected[1] or collected
+ return e and xmltotext(e) or ""
+ else
+ return ""
+ end
end
-
local function texts(collected)
- if not collected then
- return { } -- why no nil
- end
- local nc = #collected
- if nc == 0 then
- return { } -- why no nil
- end
- local t, n = { }, 0
- for c=1,nc do
- local e = collected[c]
- if e and e.dt then
- n = n + 1
- t[n] = e.dt
- end
- end
- return t
+ if not collected then
+ return {}
+ end
+ local nc=#collected
+ if nc==0 then
+ return {}
+ end
+ local t,n={},0
+ for c=1,nc do
+ local e=collected[c]
+ if e and e.dt then
+ n=n+1
+ t[n]=e.dt
+ end
+ end
+ return t
end
-
local function tag(collected,n)
- if not collected then
- return
- end
- local nc = #collected
- if nc == 0 then
- return
- end
- local c
- if n == 0 or not n then
- c = collected[1]
- elseif n > 1 then
- c = collected[n]
- else
- c = collected[nc-n+1]
- end
- return c and c.tg
+ if not collected then
+ return
+ end
+ local nc=#collected
+ if nc==0 then
+ return
+ end
+ local c
+ if n==0 or not n then
+ c=collected[1]
+ elseif n>1 then
+ c=collected[n]
+ else
+ c=collected[nc-n+1]
+ end
+ return c and c.tg
end
-
local function name(collected,n)
- if not collected then
- return
- end
- local nc = #collected
- if nc == 0 then
- return
- end
- local c
- if n == 0 or not n then
- c = collected[1]
- elseif n > 1 then
- c = collected[n]
- else
- c = collected[nc-n+1]
- end
- if not c then
- -- sorry
- elseif c.ns == "" then
- return c.tg
- else
- return c.ns .. ":" .. c.tg
- end
+ if not collected then
+ return
+ end
+ local nc=#collected
+ if nc==0 then
+ return
+ end
+ local c
+ if n==0 or not n then
+ c=collected[1]
+ elseif n>1 then
+ c=collected[n]
+ else
+ c=collected[nc-n+1]
+ end
+ if not c then
+ elseif c.ns=="" then
+ return c.tg
+ else
+ return c.ns..":"..c.tg
+ end
end
-
local function tags(collected,nonamespace)
- if not collected then
- return
- end
- local nc = #collected
- if nc == 0 then
- return
- end
- local t, n = { }, 0
- for c=1,nc do
- local e = collected[c]
- local ns, tg = e.ns, e.tg
- n = n + 1
- if nonamespace or ns == "" then
- t[n] = tg
- else
- t[n] = ns .. ":" .. tg
- end
+ if not collected then
+ return
+ end
+ local nc=#collected
+ if nc==0 then
+ return
+ end
+ local t,n={},0
+ for c=1,nc do
+ local e=collected[c]
+ local ns,tg=e.ns,e.tg
+ n=n+1
+ if nonamespace or ns=="" then
+ t[n]=tg
+ else
+ t[n]=ns..":"..tg
end
- return t
+ end
+ return t
end
-
local function empty(collected,spacesonly)
- if not collected then
- return true
- end
- local nc = #collected
- if nc == 0 then
- return true
- end
- for c=1,nc do
- local e = collected[c]
- if e then
- local edt = e.dt
- if edt then
- local n = #edt
- if n == 1 then
- local edk = edt[1]
- local typ = type(edk)
- if typ == "table" then
- return false
- elseif edk ~= "" then
- return false
- elseif spacesonly and not find(edk,"%S") then
- return false
- end
- elseif n > 1 then
- return false
- end
- end
- end
- end
+ if not collected then
return true
-end
-
-finalizers.first = first
-finalizers.last = last
-finalizers.all = all
-finalizers.reverse = reverse
-finalizers.elements = all
-finalizers.default = all
-finalizers.attribute = attribute
-finalizers.att = att
-finalizers.count = count
-finalizers.position = position
-finalizers.match = match
-finalizers.index = index
-finalizers.attributes = attributes
-finalizers.chainattribute = chainattribute
-finalizers.text = text
-finalizers.texts = texts
-finalizers.tag = tag
-finalizers.name = name
-finalizers.tags = tags
-finalizers.empty = empty
-
--- shortcuts -- we could support xmlfilter(id,pattern,first)
-
+ end
+ local nc=#collected
+ if nc==0 then
+ return true
+ end
+ for c=1,nc do
+ local e=collected[c]
+ if e then
+ local edt=e.dt
+ if edt then
+ local n=#edt
+ if n==1 then
+ local edk=edt[1]
+ local typ=type(edk)
+ if typ=="table" then
+ return false
+ elseif edk~="" then
+ return false
+ elseif spacesonly and not find(edk,"%S") then
+ return false
+ end
+ elseif n>1 then
+ return false
+ end
+ end
+ end
+ end
+ return true
+end
+finalizers.first=first
+finalizers.last=last
+finalizers.all=all
+finalizers.reverse=reverse
+finalizers.elements=all
+finalizers.default=all
+finalizers.attribute=attribute
+finalizers.att=att
+finalizers.count=count
+finalizers.position=position
+finalizers.match=match
+finalizers.index=index
+finalizers.attributes=attributes
+finalizers.chainattribute=chainattribute
+finalizers.text=text
+finalizers.texts=texts
+finalizers.tag=tag
+finalizers.name=name
+finalizers.tags=tags
+finalizers.empty=empty
function xml.first(id,pattern)
- return first(xmlfilter(id,pattern))
+ return first(xmlfilter(id,pattern))
end
-
function xml.last(id,pattern)
- return last(xmlfilter(id,pattern))
+ return last(xmlfilter(id,pattern))
end
-
function xml.count(id,pattern)
- return count(xmlfilter(id,pattern))
+ return count(xmlfilter(id,pattern))
end
-
function xml.attribute(id,pattern,a,default)
- return attribute(xmlfilter(id,pattern),a,default)
+ return attribute(xmlfilter(id,pattern),a,default)
end
-
function xml.raw(id,pattern)
- if pattern then
- return raw(xmlfilter(id,pattern))
- else
- return raw(id)
- end
-end
-
-function xml.text(id,pattern) -- brrr either content or element (when cdata)
- if pattern then
- -- return text(xmlfilter(id,pattern))
- local collected = xmlfilter(id,pattern)
- return collected and #collected > 0 and xmltotext(collected[1]) or ""
- elseif id then
- -- return text(id)
- return xmltotext(id) or ""
- else
- return ""
- end
+ if pattern then
+ return raw(xmlfilter(id,pattern))
+ else
+ return raw(id)
+ end
+end
+function xml.text(id,pattern)
+ if pattern then
+ local collected=xmlfilter(id,pattern)
+ return collected and #collected>0 and xmltotext(collected[1]) or ""
+ elseif id then
+ return xmltotext(id) or ""
+ else
+ return ""
+ end
end
-
-xml.content = text
-
---
-
-function xml.position(id,pattern,n) -- element
- return position(xmlfilter(id,pattern),n)
+xml.content=text
+function xml.position(id,pattern,n)
+ return position(xmlfilter(id,pattern),n)
end
-
-function xml.match(id,pattern) -- number
- return match(xmlfilter(id,pattern))
+function xml.match(id,pattern)
+ return match(xmlfilter(id,pattern))
end
-
function xml.empty(id,pattern,spacesonly)
- return empty(xmlfilter(id,pattern),spacesonly)
+ return empty(xmlfilter(id,pattern),spacesonly)
end
-
-xml.all = xml.filter
-xml.index = xml.position
-xml.found = xml.filter
-
--- a nice one:
-
+xml.all=xml.filter
+xml.index=xml.position
+xml.found=xml.filter
local function totable(x)
- local t = { }
- for e in xmlcollected(x[1] or x,"/*") do
- t[e.tg] = xmltostring(e.dt) or ""
- end
- return next(t) and t or nil
-end
-
-xml.table = totable
-finalizers.table = totable
-
+ local t={}
+ for e in xmlcollected(x[1] or x,"/*") do
+ t[e.tg]=xmltostring(e.dt) or ""
+ end
+ return next(t) and t or nil
+end
+xml.table=totable
+finalizers.table=totable
local function textonly(e,t)
- if e then
- local edt = e.dt
- if edt then
- for i=1,#edt do
- local e = edt[i]
- if type(e) == "table" then
- textonly(e,t)
- else
- t[#t+1] = e
- end
- end
+ if e then
+ local edt=e.dt
+ if edt then
+ for i=1,#edt do
+ local e=edt[i]
+ if type(e)=="table" then
+ textonly(e,t)
+ else
+ t[#t+1]=e
end
+ end
end
- return t
+ end
+ return t
end
-
-function xml.textonly(e) -- no pattern
- return concat(textonly(e,{}))
+function xml.textonly(e)
+ return concat(textonly(e,{}))
end
-
---
-
--- local x = xml.convert("<x><a x='+'>1<B>2</B>3</a></x>")
--- xml.filter(x,"**/lowerall()") print(x)
--- xml.filter(x,"**/upperall()") print(x)
-
function finalizers.lowerall(collected)
- for c=1,#collected do
- local e = collected[c]
- if not e.special then
- e.tg = lower(e.tg)
- local eat = e.at
- if eat then
- local t = { }
- for k,v in next, eat do
- t[lower(k)] = v
- end
- e.at = t
- end
+ for c=1,#collected do
+ local e=collected[c]
+ if not e.special then
+ e.tg=lower(e.tg)
+ local eat=e.at
+ if eat then
+ local t={}
+ for k,v in next,eat do
+ t[lower(k)]=v
end
+ e.at=t
+ end
end
+ end
end
-
function finalizers.upperall(collected)
- for c=1,#collected do
- local e = collected[c]
- if not e.special then
- e.tg = upper(e.tg)
- local eat = e.at
- if eat then
- local t = { }
- for k,v in next, eat do
- t[upper(k)] = v
- end
- e.at = t
- end
+ for c=1,#collected do
+ local e=collected[c]
+ if not e.special then
+ e.tg=upper(e.tg)
+ local eat=e.at
+ if eat then
+ local t={}
+ for k,v in next,eat do
+ t[upper(k)]=v
end
+ e.at=t
+ end
end
+ end
end
@@ -10840,245 +11199,331 @@ end -- of closure
do -- create closure to overcome 200 locals limit
-if not modules then modules = { } end modules ['data-ini'] = {
- version = 1.001,
- comment = "companion to luat-lib.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files",
-}
-
-local gsub, find, gmatch, char = string.gsub, string.find, string.gmatch, string.char
-local concat = table.concat
-local next, type = next, type
+package.loaded["trac-xml"] = package.loaded["trac-xml"] or true
-local filedirname, filebasename, fileextname, filejoin = file.dirname, file.basename, file.extname, file.join
+-- original size: 6351, stripped down to: 4919
-local trace_locating = false trackers.register("resolvers.locating", function(v) trace_locating = v end)
-local trace_detail = false trackers.register("resolvers.details", function(v) trace_detail = v end)
-local trace_expansions = false trackers.register("resolvers.expansions", function(v) trace_expansions = v end)
-
-local report_initialization = logs.reporter("resolvers","initialization")
-
-local ostype, osname, ossetenv, osgetenv = os.type, os.name, os.setenv, os.getenv
-
--- The code here used to be part of a data-res but for convenience
--- we now split it over multiple files. As this file is now the
--- starting point we introduce resolvers here.
+if not modules then modules={} end modules ['trac-xml']={
+ version=1.001,
+ comment="companion to trac-log.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+local formatters=string.formatters
+local reporters=logs.reporters
+local xmlserialize=xml.serialize
+local xmlcollected=xml.collected
+local xmltext=xml.text
+local xmlfirst=xml.first
+local function showhelp(specification,...)
+ local root=xml.convert(specification.helpinfo or "")
+ if not root then
+ return
+ end
+ local xs=xml.gethandlers("string")
+ xml.sethandlersfunction(xs,"short",function(e,handler) xmlserialize(e.dt,handler) end)
+ xml.sethandlersfunction(xs,"ref",function(e,handler) handler.handle("--"..e.at.name) end)
+ local wantedcategories=select("#",...)==0 and true or table.tohash {... }
+ local nofcategories=xml.count(root,"/application/flags/category")
+ local report=specification.report
+ for category in xmlcollected(root,"/application/flags/category") do
+ local categoryname=category.at.name or ""
+ if wantedcategories==true or wantedcategories[categoryname] then
+ if nofcategories>1 then
+ report("%s options:",categoryname)
+ report()
+ end
+ for subcategory in xmlcollected(category,"/subcategory") do
+ for flag in xmlcollected(subcategory,"/flag") do
+ local name=flag.at.name
+ local value=flag.at.value
+ local short=xmltext(xmlfirst(flag,"/short"))
+ if value then
+ report("--%-20s %s",formatters["%s=%s"](name,value),short)
+ else
+ report("--%-20s %s",name,short)
+ end
+ end
+ report()
+ end
+ end
+ end
+ for category in xmlcollected(root,"/application/examples/category") do
+ local title=xmltext(xmlfirst(category,"/title"))
+ if title and title~="" then
+ report()
+ report(title)
+ report()
+ end
+ for subcategory in xmlcollected(category,"/subcategory") do
+ for example in xmlcollected(subcategory,"/example") do
+ local command=xmltext(xmlfirst(example,"/command"))
+ local comment=xmltext(xmlfirst(example,"/comment"))
+ report(command)
+ end
+ report()
+ end
+ end
+ for comment in xmlcollected(root,"/application/comments/comment") do
+ local comment=xmltext(comment)
+ report()
+ report(comment)
+ report()
+ end
+end
+local reporthelp=reporters.help
+local exporthelp=reporters.export
+local function xmlfound(t)
+ local helpinfo=t.helpinfo
+ if type(helpinfo)=="table" then
+ return false
+ end
+ if type(helpinfo)~="string" then
+ helpinfo="Warning: no helpinfo found."
+ t.helpinfo=helpinfo
+ return false
+ end
+ if string.find(helpinfo,".xml$") then
+ local ownscript=environment.ownscript
+ local helpdata=false
+ if ownscript then
+ local helpfile=file.join(file.pathpart(ownscript),helpinfo)
+ helpdata=io.loaddata(helpfile)
+ if helpdata=="" then
+ helpdata=false
+ end
+ end
+ if not helpdata then
+ local helpfile=resolvers.findfile(helpinfo,"tex")
+ helpdata=helpfile and io.loaddata(helpfile)
+ end
+ if helpdata and helpdata~="" then
+ helpinfo=helpdata
+ else
+ helpinfo=formatters["Warning: help file %a is not found."](helpinfo)
+ end
+ end
+ t.helpinfo=helpinfo
+ return string.find(t.helpinfo,"^<%?xml") and true or false
+end
+function reporters.help(t,...)
+ if xmlfound(t) then
+ showhelp(t,...)
+ else
+ reporthelp(t,...)
+ end
+end
+function reporters.export(t,methods,filename)
+ if not xmlfound(t) then
+ return exporthelp(t)
+ end
+ if not methods or methods=="" then
+ methods=environment.arguments["exporthelp"]
+ end
+ if not filename or filename=="" then
+ filename=environment.files[1]
+ end
+ dofile(resolvers.findfile("trac-exp.lua","tex"))
+ local exporters=logs.exporters
+ if not exporters or not methods then
+ return exporthelp(t)
+ end
+ if methods=="all" then
+ methods=table.keys(exporters)
+ elseif type(methods)=="string" then
+ methods=utilities.parsers.settings_to_array(methods)
+ else
+ return exporthelp(t)
+ end
+ if type(filename)~="string" or filename=="" then
+ filename=false
+ elseif file.pathpart(filename)=="" then
+ t.report("export file %a will not be saved on the current path (safeguard)",filename)
+ return
+ end
+ for i=1,#methods do
+ local method=methods[i]
+ local exporter=exporters[method]
+ if exporter then
+ local result=exporter(t,method)
+ if result and result~="" then
+ if filename then
+ local fullname=file.replacesuffix(filename,method)
+ t.report("saving export in %a",fullname)
+ io.savedata(fullname,result)
+ else
+ reporters.lines(t,result)
+ end
+ else
+ t.report("no output from exporter %a",method)
+ end
+ else
+ t.report("unknown exporter %a",method)
+ end
+ end
+end
-resolvers = resolvers or { }
-local resolvers = resolvers
--- We don't want the kpse library to kick in. Also, we want to be able to
--- execute programs. Control over execution is implemented later.
+end -- of closure
-texconfig.kpse_init = false
-texconfig.shell_escape = 't'
+do -- create closure to overcome 200 locals limit
-if kpse and kpse.default_texmfcnf then
- local default_texmfcnf = kpse.default_texmfcnf()
- -- looks more like context:
- default_texmfcnf = gsub(default_texmfcnf,"$SELFAUTOLOC","selfautoloc:")
- default_texmfcnf = gsub(default_texmfcnf,"$SELFAUTODIR","selfautodir:")
- default_texmfcnf = gsub(default_texmfcnf,"$SELFAUTOPARENT","selfautoparent:")
- default_texmfcnf = gsub(default_texmfcnf,"$HOME","home:")
- --
- environment.default_texmfcnf = default_texmfcnf
-end
+package.loaded["data-ini"] = package.loaded["data-ini"] or true
-kpse = { original = kpse }
+-- original size: 7898, stripped down to: 5501
-setmetatable(kpse, {
- __index = function(kp,name)
- report_initialization("fatal error: kpse library is accessed (key: %s)",name)
- os.exit()
- end
+if not modules then modules={} end modules ['data-ini']={
+ version=1.001,
+ comment="companion to luat-lib.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files",
+}
+local gsub,find,gmatch,char=string.gsub,string.find,string.gmatch,string.char
+local next,type=next,type
+local filedirname,filebasename,filejoin=file.dirname,file.basename,file.join
+local trace_locating=false trackers.register("resolvers.locating",function(v) trace_locating=v end)
+local trace_detail=false trackers.register("resolvers.details",function(v) trace_detail=v end)
+local trace_expansions=false trackers.register("resolvers.expansions",function(v) trace_expansions=v end)
+local report_initialization=logs.reporter("resolvers","initialization")
+local ostype,osname,ossetenv,osgetenv=os.type,os.name,os.setenv,os.getenv
+resolvers=resolvers or {}
+local resolvers=resolvers
+texconfig.kpse_init=false
+texconfig.shell_escape='t'
+if not (environment and environment.default_texmfcnf) and kpse and kpse.default_texmfcnf then
+ local default_texmfcnf=kpse.default_texmfcnf()
+ default_texmfcnf=gsub(default_texmfcnf,"$SELFAUTOLOC","selfautoloc:")
+ default_texmfcnf=gsub(default_texmfcnf,"$SELFAUTODIR","selfautodir:")
+ default_texmfcnf=gsub(default_texmfcnf,"$SELFAUTOPARENT","selfautoparent:")
+ default_texmfcnf=gsub(default_texmfcnf,"$HOME","home:")
+ environment.default_texmfcnf=default_texmfcnf
+end
+kpse={ original=kpse }
+setmetatable(kpse,{
+ __index=function(kp,name)
+ report_initialization("fatal error: kpse library is accessed (key: %s)",name)
+ os.exit()
+ end
} )
-
--- First we check a couple of environment variables. Some might be
--- set already but we need then later on. We start with the system
--- font path.
-
do
-
- local osfontdir = osgetenv("OSFONTDIR")
-
- if osfontdir and osfontdir ~= "" then
- -- ok
- elseif osname == "windows" then
- ossetenv("OSFONTDIR","c:/windows/fonts//")
- elseif osname == "macosx" then
- ossetenv("OSFONTDIR","$HOME/Library/Fonts//;/Library/Fonts//;/System/Library/Fonts//")
- end
-
+ local osfontdir=osgetenv("OSFONTDIR")
+ if osfontdir and osfontdir~="" then
+ elseif osname=="windows" then
+ ossetenv("OSFONTDIR","c:/windows/fonts//")
+ elseif osname=="macosx" then
+ ossetenv("OSFONTDIR","$HOME/Library/Fonts//;/Library/Fonts//;/System/Library/Fonts//")
+ end
end
-
--- Next comes the user's home path. We need this as later on we have
--- to replace ~ with its value.
-
do
-
- local homedir = osgetenv(ostype == "windows" and 'USERPROFILE' or 'HOME') or ''
-
- if not homedir or homedir == "" then
- homedir = char(127) -- we need a value, later we wil trigger on it
- end
-
- homedir = file.collapsepath(homedir)
-
- ossetenv("HOME", homedir) -- can be used in unix cnf files
- ossetenv("USERPROFILE",homedir) -- can be used in windows cnf files
-
- environment.homedir = homedir
-
+ local homedir=osgetenv(ostype=="windows" and 'USERPROFILE' or 'HOME') or ''
+ if not homedir or homedir=="" then
+ homedir=char(127)
+ end
+ homedir=file.collapsepath(homedir)
+ ossetenv("HOME",homedir)
+ ossetenv("USERPROFILE",homedir)
+ environment.homedir=homedir
end
-
--- The following code sets the name of the own binary and its
--- path. This is fallback code as we have os.selfdir now.
-
do
-
- local args = environment.originalarguments or arg -- this needs a cleanup
-
- local ownbin = environment.ownbin or args[-2] or arg[-2] or args[-1] or arg[-1] or arg[0] or "luatex"
- local ownpath = environment.ownpath or os.selfdir
-
- ownbin = file.collapsepath(ownbin)
- ownpath = file.collapsepath(ownpath)
-
- if not ownpath or ownpath == "" or ownpath == "unset" then
- ownpath = args[-1] or arg[-1]
- ownpath = ownpath and filedirname(gsub(ownpath,"\\","/"))
- if not ownpath or ownpath == "" then
- ownpath = args[-0] or arg[-0]
- ownpath = ownpath and filedirname(gsub(ownpath,"\\","/"))
- end
- local binary = ownbin
- if not ownpath or ownpath == "" then
- ownpath = ownpath and filedirname(binary)
- end
- if not ownpath or ownpath == "" then
- if os.binsuffix ~= "" then
- binary = file.replacesuffix(binary,os.binsuffix)
- end
- local path = osgetenv("PATH")
- if path then
- for p in gmatch(path,"[^"..io.pathseparator.."]+") do
- local b = filejoin(p,binary)
- if lfs.isfile(b) then
- -- we assume that after changing to the path the currentdir function
- -- resolves to the real location and use this side effect here; this
- -- trick is needed because on the mac installations use symlinks in the
- -- path instead of real locations
- local olddir = lfs.currentdir()
- if lfs.chdir(p) then
- local pp = lfs.currentdir()
- if trace_locating and p ~= pp then
- report_initialization("following symlink '%s' to '%s'",p,pp)
- end
- ownpath = pp
- lfs.chdir(olddir)
- else
- if trace_locating then
- report_initialization("unable to check path '%s'",p)
- end
- ownpath = p
- end
- break
- end
- end
+ local args=environment.originalarguments or arg
+ if not environment.ownmain then
+ environment.ownmain=status and string.match(string.lower(status.banner),"this is ([%a]+)") or "luatex"
+ end
+ local ownbin=environment.ownbin or args[-2] or arg[-2] or args[-1] or arg[-1] or arg[0] or "luatex"
+ local ownpath=environment.ownpath or os.selfdir
+ ownbin=file.collapsepath(ownbin)
+ ownpath=file.collapsepath(ownpath)
+ if not ownpath or ownpath=="" or ownpath=="unset" then
+ ownpath=args[-1] or arg[-1]
+ ownpath=ownpath and filedirname(gsub(ownpath,"\\","/"))
+ if not ownpath or ownpath=="" then
+ ownpath=args[-0] or arg[-0]
+ ownpath=ownpath and filedirname(gsub(ownpath,"\\","/"))
+ end
+ local binary=ownbin
+ if not ownpath or ownpath=="" then
+ ownpath=ownpath and filedirname(binary)
+ end
+ if not ownpath or ownpath=="" then
+ if os.binsuffix~="" then
+ binary=file.replacesuffix(binary,os.binsuffix)
+ end
+ local path=osgetenv("PATH")
+ if path then
+ for p in gmatch(path,"[^"..io.pathseparator.."]+") do
+ local b=filejoin(p,binary)
+ if lfs.isfile(b) then
+ local olddir=lfs.currentdir()
+ if lfs.chdir(p) then
+ local pp=lfs.currentdir()
+ if trace_locating and p~=pp then
+ report_initialization("following symlink %a to %a",p,pp)
+ end
+ ownpath=pp
+ lfs.chdir(olddir)
+ else
+ if trace_locating then
+ report_initialization("unable to check path %a",p)
+ end
+ ownpath=p
end
+ break
+ end
end
- if not ownpath or ownpath == "" then
- ownpath = "."
- report_initialization("forcing fallback ownpath .")
- elseif trace_locating then
- report_initialization("using ownpath '%s'",ownpath)
- end
+ end
end
-
- environment.ownbin = ownbin
- environment.ownpath = ownpath
-
+ if not ownpath or ownpath=="" then
+ ownpath="."
+ report_initialization("forcing fallback to ownpath %a",ownpath)
+ elseif trace_locating then
+ report_initialization("using ownpath %a",ownpath)
+ end
+ end
+ environment.ownbin=ownbin
+ environment.ownpath=ownpath
end
-
-resolvers.ownpath = environment.ownpath
-
+resolvers.ownpath=environment.ownpath
function resolvers.getownpath()
- return environment.ownpath
+ return environment.ownpath
end
-
--- The self variables permit us to use only a few (or even no)
--- environment variables.
-
do
-
- local ownpath = environment.ownpath or dir.current()
-
- if ownpath then
- ossetenv('SELFAUTOLOC', file.collapsepath(ownpath))
- ossetenv('SELFAUTODIR', file.collapsepath(ownpath .. "/.."))
- ossetenv('SELFAUTOPARENT', file.collapsepath(ownpath .. "/../.."))
- else
- report_initialization("error: unable to locate ownpath")
- os.exit()
- end
-
-end
-
--- The running os:
-
--- todo: check is context sits here os.platform is more trustworthy
--- that the bin check as mtx-update runs from another path
-
-local texos = environment.texos or osgetenv("TEXOS")
-local texmfos = environment.texmfos or osgetenv('SELFAUTODIR')
-
-if not texos or texos == "" then
- texos = file.basename(texmfos)
-end
-
-ossetenv('TEXMFOS', texmfos) -- full bin path
-ossetenv('TEXOS', texos) -- partial bin parent
-ossetenv('SELFAUTOSYSTEM',os.platform) -- bonus
-
-environment.texos = texos
-environment.texmfos = texmfos
-
--- The current root:
-
-local texroot = environment.texroot or osgetenv("TEXROOT")
-
-if not texroot or texroot == "" then
- texroot = osgetenv('SELFAUTOPARENT')
- ossetenv('TEXROOT',texroot)
-end
-
-environment.texroot = file.collapsepath(texroot)
-
--- Tracing. Todo ...
-
-function resolvers.settrace(n) -- no longer number but: 'locating' or 'detail'
- if n then
- trackers.disable("resolvers.*")
- trackers.enable("resolvers."..n)
- end
+ local ownpath=environment.ownpath or dir.current()
+ if ownpath then
+ ossetenv('SELFAUTOLOC',file.collapsepath(ownpath))
+ ossetenv('SELFAUTODIR',file.collapsepath(ownpath.."/.."))
+ ossetenv('SELFAUTOPARENT',file.collapsepath(ownpath.."/../.."))
+ else
+ report_initialization("error: unable to locate ownpath")
+ os.exit()
+ end
+end
+local texos=environment.texos or osgetenv("TEXOS")
+local texmfos=environment.texmfos or osgetenv('SELFAUTODIR')
+if not texos or texos=="" then
+ texos=file.basename(texmfos)
+end
+ossetenv('TEXMFOS',texmfos)
+ossetenv('TEXOS',texos)
+ossetenv('SELFAUTOSYSTEM',os.platform)
+environment.texos=texos
+environment.texmfos=texmfos
+local texroot=environment.texroot or osgetenv("TEXROOT")
+if not texroot or texroot=="" then
+ texroot=osgetenv('SELFAUTOPARENT')
+ ossetenv('TEXROOT',texroot)
+end
+environment.texroot=file.collapsepath(texroot)
+if profiler then
+ directives.register("system.profile",function()
+ profiler.start("luatex-profile.log")
+ end)
end
-
-resolvers.settrace(osgetenv("MTX_INPUT_TRACE"))
-
--- todo:
-
--- if profiler and osgetenv("MTX_PROFILE_RUN") == "YES" then
--- profiler.start("luatex-profile.log")
--- end
-
--- a forward definition
-
if not resolvers.resolve then
- function resolvers.resolve (s) return s end
- function resolvers.unresolve(s) return s end
- function resolvers.repath (s) return s end
+ function resolvers.resolve (s) return s end
+ function resolvers.unresolve(s) return s end
+ function resolvers.repath (s) return s end
end
@@ -11086,1150 +11531,981 @@ end -- of closure
do -- create closure to overcome 200 locals limit
-if not modules then modules = { } end modules ['data-exp'] = {
- version = 1.001,
- comment = "companion to luat-lib.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files",
-}
-
-local format, find, gmatch, lower, char, sub = string.format, string.find, string.gmatch, string.lower, string.char, string.sub
-local concat, sort = table.concat, table.sort
-local lpegmatch, lpegpatterns = lpeg.match, lpeg.patterns
-local Ct, Cs, Cc, P, C, S = lpeg.Ct, lpeg.Cs, lpeg.Cc, lpeg.P, lpeg.C, lpeg.S
-local type, next = type, next
+package.loaded["data-exp"] = package.loaded["data-exp"] or true
-local ostype = os.type
-local collapsepath = file.collapsepath
-
-local trace_locating = false trackers.register("resolvers.locating", function(v) trace_locating = v end)
-local trace_expansions = false trackers.register("resolvers.expansions", function(v) trace_expansions = v end)
-
-local report_expansions = logs.reporter("resolvers","expansions")
-
-local resolvers = resolvers
-
--- As this bit of code is somewhat special it gets its own module. After
--- all, when working on the main resolver code, I don't want to scroll
--- past this every time. See data-obs.lua for the gsub variant.
+-- original size: 14643, stripped down to: 9517
+if not modules then modules={} end modules ['data-exp']={
+ version=1.001,
+ comment="companion to luat-lib.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files",
+}
+local format,find,gmatch,lower,char,sub=string.format,string.find,string.gmatch,string.lower,string.char,string.sub
+local concat,sort=table.concat,table.sort
+local lpegmatch,lpegpatterns=lpeg.match,lpeg.patterns
+local Ct,Cs,Cc,P,C,S=lpeg.Ct,lpeg.Cs,lpeg.Cc,lpeg.P,lpeg.C,lpeg.S
+local type,next=type,next
+local ostype=os.type
+local collapsepath=file.collapsepath
+local trace_locating=false trackers.register("resolvers.locating",function(v) trace_locating=v end)
+local trace_expansions=false trackers.register("resolvers.expansions",function(v) trace_expansions=v end)
+local report_expansions=logs.reporter("resolvers","expansions")
+local resolvers=resolvers
local function f_first(a,b)
- local t, n = { }, 0
- for s in gmatch(b,"[^,]+") do
- n = n + 1 ; t[n] = a .. s
- end
- return concat(t,",")
+ local t,n={},0
+ for s in gmatch(b,"[^,]+") do
+ n=n+1;t[n]=a..s
+ end
+ return concat(t,",")
end
-
local function f_second(a,b)
- local t, n = { }, 0
- for s in gmatch(a,"[^,]+") do
- n = n + 1 ; t[n] = s .. b
- end
- return concat(t,",")
+ local t,n={},0
+ for s in gmatch(a,"[^,]+") do
+ n=n+1;t[n]=s..b
+ end
+ return concat(t,",")
end
-
--- kpsewhich --expand-braces '{a,b}{c,d}'
--- ac:bc:ad:bd
-
--- old {a,b}{c,d} => ac ad bc bd
---
--- local function f_both(a,b)
--- local t, n = { }, 0
--- for sa in gmatch(a,"[^,]+") do
--- for sb in gmatch(b,"[^,]+") do
--- n = n + 1 ; t[n] = sa .. sb
--- end
--- end
--- return concat(t,",")
--- end
---
--- new {a,b}{c,d} => ac bc ad bd
-
local function f_both(a,b)
- local t, n = { }, 0
- for sb in gmatch(b,"[^,]+") do -- and not sa
- for sa in gmatch(a,"[^,]+") do -- sb
- n = n + 1 ; t[n] = sa .. sb
- end
- end
- return concat(t,",")
-end
-
-local left = P("{")
-local right = P("}")
-local var = P((1 - S("{}" ))^0)
-local set = P((1 - S("{},"))^0)
-local other = P(1)
-
-local l_first = Cs( ( Cc("{") * (C(set) * left * C(var) * right / f_first) * Cc("}") + other )^0 )
-local l_second = Cs( ( Cc("{") * (left * C(var) * right * C(set) / f_second) * Cc("}") + other )^0 )
-local l_both = Cs( ( Cc("{") * (left * C(var) * right * left * C(var) * right / f_both) * Cc("}") + other )^0 )
-local l_rest = Cs( ( left * var * (left/"") * var * (right/"") * var * right + other )^0 )
-
-local stripper_1 = lpeg.stripper ("{}@")
-local replacer_1 = lpeg.replacer { { ",}", ",@}" }, { "{,", "{@," }, }
-
-local function splitpathexpr(str, newlist, validate) -- I couldn't resist lpegging it (nice exercise).
- if trace_expansions then
- report_expansions("expanding variable '%s'",str)
- end
- local t, ok, done = newlist or { }, false, false
- local n = #t
- str = lpegmatch(replacer_1,str)
+ local t,n={},0
+ for sb in gmatch(b,"[^,]+") do
+ for sa in gmatch(a,"[^,]+") do
+ n=n+1;t[n]=sa..sb
+ end
+ end
+ return concat(t,",")
+end
+local left=P("{")
+local right=P("}")
+local var=P((1-S("{}" ))^0)
+local set=P((1-S("{},"))^0)
+local other=P(1)
+local l_first=Cs((Cc("{")*(C(set)*left*C(var)*right/f_first)*Cc("}")+other )^0 )
+local l_second=Cs((Cc("{")*(left*C(var)*right*C(set)/f_second)*Cc("}")+other )^0 )
+local l_both=Cs((Cc("{")*(left*C(var)*right*left*C(var)*right/f_both)*Cc("}")+other )^0 )
+local l_rest=Cs((left*var*(left/"")*var*(right/"")*var*right+other )^0 )
+local stripper_1=lpeg.stripper ("{}@")
+local replacer_1=lpeg.replacer { { ",}",",@}" },{ "{,","{@," },}
+local function splitpathexpr(str,newlist,validate)
+ if trace_expansions then
+ report_expansions("expanding variable %a",str)
+ end
+ local t,ok,done=newlist or {},false,false
+ local n=#t
+ str=lpegmatch(replacer_1,str)
+ repeat
+ local old=str
repeat
- local old = str
- repeat
- local old = str
- str = lpegmatch(l_first, str)
- until old == str
- repeat
- local old = str
- str = lpegmatch(l_second,str)
- until old == str
- repeat
- local old = str
- str = lpegmatch(l_both, str)
- until old == str
- repeat
- local old = str
- str = lpegmatch(l_rest, str)
- until old == str
- until old == str -- or not find(str,"{")
- str = lpegmatch(stripper_1,str)
- if validate then
- for s in gmatch(str,"[^,]+") do
- s = validate(s)
- if s then
- n = n + 1 ; t[n] = s
- end
- end
- else
- for s in gmatch(str,"[^,]+") do
- n = n + 1 ; t[n] = s
- end
- end
- if trace_expansions then
- for k=1,#t do
- report_expansions("% 4i: %s",k,t[k])
- end
+ local old=str
+ str=lpegmatch(l_first,str)
+ until old==str
+ repeat
+ local old=str
+ str=lpegmatch(l_second,str)
+ until old==str
+ repeat
+ local old=str
+ str=lpegmatch(l_both,str)
+ until old==str
+ repeat
+ local old=str
+ str=lpegmatch(l_rest,str)
+ until old==str
+ until old==str
+ str=lpegmatch(stripper_1,str)
+ if validate then
+ for s in gmatch(str,"[^,]+") do
+ s=validate(s)
+ if s then
+ n=n+1
+ t[n]=s
+ end
+ end
+ else
+ for s in gmatch(str,"[^,]+") do
+ n=n+1
+ t[n]=s
+ end
+ end
+ if trace_expansions then
+ for k=1,#t do
+ report_expansions("% 4i: %s",k,t[k])
end
- return t
+ end
+ return t
end
-
--- We could make the previous one public.
-
local function validate(s)
- s = collapsepath(s) -- already keeps the //
- return s ~= "" and not find(s,"^!*unset/*$") and s
+ s=collapsepath(s)
+ return s~="" and not find(s,"^!*unset/*$") and s
end
-
-resolvers.validatedpath = validate -- keeps the trailing //
-
+resolvers.validatedpath=validate
function resolvers.expandedpathfromlist(pathlist)
- local newlist = { }
- for k=1,#pathlist do
- splitpathexpr(pathlist[k],newlist,validate)
- end
- return newlist
-end
-
--- {a,b,c,d}
--- a,b,c/{p,q,r},d
--- a,b,c/{p,q,r}/d/{x,y,z}//
--- a,b,c/{p,q/{x,y,z},r},d/{p,q,r}
--- a,b,c/{p,q/{x,y,z},r},d/{p,q,r}
--- a{b,c}{d,e}f
--- {a,b,c,d}
--- {a,b,c/{p,q,r},d}
--- {a,b,c/{p,q,r}/d/{x,y,z}//}
--- {a,b,c/{p,q/{x,y,z}},d/{p,q,r}}
--- {a,b,c/{p,q/{x,y,z},w}v,d/{p,q,r}}
--- {$SELFAUTODIR,$SELFAUTOPARENT}{,{/share,}/texmf{-local,.local,}/web2c}
-
-local cleanup = lpeg.replacer {
- { "!" , "" },
- { "\\" , "/" },
+ local newlist={}
+ for k=1,#pathlist do
+ splitpathexpr(pathlist[k],newlist,validate)
+ end
+ return newlist
+end
+local cleanup=lpeg.replacer {
+ { "!","" },
+ { "\\","/" },
}
-
-function resolvers.cleanpath(str) -- tricky, maybe only simple paths
- local doslashes = (P("\\")/"/" + 1)^0
- local donegation = (P("!") /"" )^0
- local homedir = lpegmatch(Cs(donegation * doslashes),environment.homedir or "")
- if homedir == "~" or homedir == "" or not lfs.isdir(homedir) then
- if trace_expansions then
- report_expansions("no home dir set, ignoring dependent paths")
- end
- function resolvers.cleanpath(str)
- if not str or find(str,"~") then
- return "" -- special case
- else
- return lpegmatch(cleanup,str)
- end
- end
- else
- local dohome = ((P("~")+P("$HOME"))/homedir)^0
- local cleanup = Cs(donegation * dohome * doslashes)
- function resolvers.cleanpath(str)
- return str and lpegmatch(cleanup,str) or ""
- end
- end
- return resolvers.cleanpath(str)
-end
-
--- print(resolvers.cleanpath(""))
--- print(resolvers.cleanpath("!"))
--- print(resolvers.cleanpath("~"))
--- print(resolvers.cleanpath("~/test"))
--- print(resolvers.cleanpath("!~/test"))
--- print(resolvers.cleanpath("~/test~test"))
-
--- This one strips quotes and funny tokens.
-
-local expandhome = P("~") / "$HOME" -- environment.homedir
-
-local dodouble = P('"')/"" * (expandhome + (1 - P('"')))^0 * P('"')/""
-local dosingle = P("'")/"" * (expandhome + (1 - P("'")))^0 * P("'")/""
-local dostring = (expandhome + 1 )^0
-
-local stripper = Cs(
- lpegpatterns.unspacer * (dosingle + dodouble + dostring) * lpegpatterns.unspacer
+function resolvers.cleanpath(str)
+ local doslashes=(P("\\")/"/"+1)^0
+ local donegation=(P("!")/"" )^0
+ local homedir=lpegmatch(Cs(donegation*doslashes),environment.homedir or "")
+ if homedir=="~" or homedir=="" or not lfs.isdir(homedir) then
+ if trace_expansions then
+ report_expansions("no home dir set, ignoring dependent paths")
+ end
+ function resolvers.cleanpath(str)
+ if not str or find(str,"~") then
+ return ""
+ else
+ return lpegmatch(cleanup,str)
+ end
+ end
+ else
+ local dohome=((P("~")+P("$HOME"))/homedir)^0
+ local cleanup=Cs(donegation*dohome*doslashes)
+ function resolvers.cleanpath(str)
+ return str and lpegmatch(cleanup,str) or ""
+ end
+ end
+ return resolvers.cleanpath(str)
+end
+local expandhome=P("~")/"$HOME"
+local dodouble=P('"')/""*(expandhome+(1-P('"')))^0*P('"')/""
+local dosingle=P("'")/""*(expandhome+(1-P("'")))^0*P("'")/""
+local dostring=(expandhome+1 )^0
+local stripper=Cs(
+ lpegpatterns.unspacer*(dosingle+dodouble+dostring)*lpegpatterns.unspacer
)
-
-function resolvers.checkedvariable(str) -- assumes str is a string
- return type(str) == "string" and lpegmatch(stripper,str) or str
-end
-
--- The path splitter:
-
--- A config (optionally) has the paths split in tables. Internally
--- we join them and split them after the expansion has taken place. This
--- is more convenient.
-
-local cache = { }
-
------ splitter = lpeg.tsplitat(S(ostype == "windows" and ";" or ":;")) -- maybe add ,
-local splitter = lpeg.tsplitat(";") -- as we move towards urls, prefixes and use tables we no longer do :
-
-local backslashswapper = lpeg.replacer("\\","/")
-
-local function splitconfigurationpath(str) -- beware, this can be either a path or a { specification }
- if str then
- local found = cache[str]
- if not found then
- if str == "" then
- found = { }
- else
- local split = lpegmatch(splitter,lpegmatch(backslashswapper,str)) -- can be combined
- found = { }
- local noffound = 0
- for i=1,#split do
- local s = split[i]
- if not find(s,"^{*unset}*") then
- noffound = noffound + 1
- found[noffound] = s
- end
- end
- if trace_expansions then
- report_expansions("splitting path specification '%s'",str)
- for k=1,noffound do
- report_expansions("% 4i: %s",k,found[k])
- end
- end
- cache[str] = found
- end
+function resolvers.checkedvariable(str)
+ return type(str)=="string" and lpegmatch(stripper,str) or str
+end
+local cache={}
+local splitter=lpeg.tsplitat(";")
+local backslashswapper=lpeg.replacer("\\","/")
+local function splitconfigurationpath(str)
+ if str then
+ local found=cache[str]
+ if not found then
+ if str=="" then
+ found={}
+ else
+ local split=lpegmatch(splitter,lpegmatch(backslashswapper,str))
+ found={}
+ local noffound=0
+ for i=1,#split do
+ local s=split[i]
+ if not find(s,"^{*unset}*") then
+ noffound=noffound+1
+ found[noffound]=s
+ end
+ end
+ if trace_expansions then
+ report_expansions("splitting path specification %a",str)
+ for k=1,noffound do
+ report_expansions("% 4i: %s",k,found[k])
+ end
end
- return found
+ cache[str]=found
+ end
end
+ return found
+ end
end
-
-resolvers.splitconfigurationpath = splitconfigurationpath
-
+resolvers.splitconfigurationpath=splitconfigurationpath
function resolvers.splitpath(str)
- if type(str) == 'table' then
- return str
- else
- return splitconfigurationpath(str)
- end
+ if type(str)=='table' then
+ return str
+ else
+ return splitconfigurationpath(str)
+ end
end
-
function resolvers.joinpath(str)
- if type(str) == 'table' then
- return file.joinpath(str)
- else
- return str
- end
-end
-
--- The next function scans directories and returns a hash where the
--- entries are either strings or tables.
-
--- starting with . or .. etc or funny char
-
-
-
-
--- a lot of this caching can be stripped away when we have ssd's everywhere
---
--- we could cache all the (sub)paths here if needed
-
-local attributes, directory = lfs.attributes, lfs.dir
-
-local weird = P(".")^1 + lpeg.anywhere(S("~`!#$%^&*()={}[]:;\"\'||<>,?\n\r\t"))
-local timer = { }
-local scanned = { }
-local nofscans = 0
-local scancache = { }
-
+ if type(str)=='table' then
+ return file.joinpath(str)
+ else
+ return str
+ end
+end
+local attributes,directory=lfs.attributes,lfs.dir
+local weird=P(".")^1+lpeg.anywhere(S("~`!#$%^&*()={}[]:;\"\'||<>,?\n\r\t"))
+local timer={}
+local scanned={}
+local nofscans=0
+local scancache={}
local function scan(files,spec,path,n,m,r)
- local full = (path == "" and spec) or (spec .. path .. '/')
- local dirs = { }
- local nofdirs = 0
- for name in directory(full) do
- if not lpegmatch(weird,name) then
- local mode = attributes(full..name,'mode')
- if mode == 'file' then
- n = n + 1
- local f = files[name]
- if f then
- if type(f) == 'string' then
- files[name] = { f, path }
- else
- f[#f+1] = path
- end
- else -- probably unique anyway
- files[name] = path
- local lower = lower(name)
- if name ~= lower then
- files["remap:"..lower] = name
- r = r + 1
- end
- end
- elseif mode == 'directory' then
- m = m + 1
- nofdirs = nofdirs + 1
- if path ~= "" then
- dirs[nofdirs] = path..'/'..name
- else
- dirs[nofdirs] = name
- end
- end
+ local full=(path=="" and spec) or (spec..path..'/')
+ local dirs={}
+ local nofdirs=0
+ for name in directory(full) do
+ if not lpegmatch(weird,name) then
+ local mode=attributes(full..name,'mode')
+ if mode=='file' then
+ n=n+1
+ local f=files[name]
+ if f then
+ if type(f)=='string' then
+ files[name]={ f,path }
+ else
+ f[#f+1]=path
+ end
+ else
+ files[name]=path
+ local lower=lower(name)
+ if name~=lower then
+ files["remap:"..lower]=name
+ r=r+1
+ end
+ end
+ elseif mode=='directory' then
+ m=m+1
+ nofdirs=nofdirs+1
+ if path~="" then
+ dirs[nofdirs]=path..'/'..name
+ else
+ dirs[nofdirs]=name
end
+ end
end
- if nofdirs > 0 then
- sort(dirs)
- for i=1,nofdirs do
- files, n, m, r = scan(files,spec,dirs[i],n,m,r)
- end
+ end
+ if nofdirs>0 then
+ sort(dirs)
+ for i=1,nofdirs do
+ files,n,m,r=scan(files,spec,dirs[i],n,m,r)
end
- scancache[sub(full,1,-2)] = files
- return files, n, m, r
+ end
+ scancache[sub(full,1,-2)]=files
+ return files,n,m,r
end
-
-local fullcache = { }
-
+local fullcache={}
function resolvers.scanfiles(path,branch,usecache)
- statistics.starttiming(timer)
- local realpath = resolvers.resolve(path) -- no shortcut
- if usecache then
- local files = fullcache[realpath]
- if files then
- if trace_locating then
- report_expansions("using caches scan of path '%s', branch '%s'",path,branch or path)
- end
- return files
- end
- end
- if trace_locating then
- report_expansions("scanning path '%s', branch '%s'",path,branch or path)
- end
- local files, n, m, r = scan({ },realpath .. '/',"",0,0,0)
- files.__path__ = path -- can be selfautoparent:texmf-whatever
- files.__files__ = n
- files.__directories__ = m
- files.__remappings__ = r
- if trace_locating then
- report_expansions("%s files found on %s directories with %s uppercase remappings",n,m,r)
- end
- if usecache then
- scanned[#scanned+1] = realpath
- fullcache[realpath] = files
- end
- nofscans = nofscans + 1
- statistics.stoptiming(timer)
- return files
-end
-
-local function simplescan(files,spec,path) -- first match only, no map and such
- local full = (path == "" and spec) or (spec .. path .. '/')
- local dirs = { }
- local nofdirs = 0
- for name in directory(full) do
- if not lpegmatch(weird,name) then
- local mode = attributes(full..name,'mode')
- if mode == 'file' then
- if not files[name] then
- -- only first match
- files[name] = path
- end
- elseif mode == 'directory' then
- nofdirs = nofdirs + 1
- if path ~= "" then
- dirs[nofdirs] = path..'/'..name
- else
- dirs[nofdirs] = name
- end
- end
+ statistics.starttiming(timer)
+ local realpath=resolvers.resolve(path)
+ if usecache then
+ local files=fullcache[realpath]
+ if files then
+ if trace_locating then
+ report_expansions("using caches scan of path %a, branch %a",path,branch or path)
+ end
+ return files
+ end
+ end
+ if trace_locating then
+ report_expansions("scanning path %a, branch %a",path,branch or path)
+ end
+ local files,n,m,r=scan({},realpath..'/',"",0,0,0)
+ files.__path__=path
+ files.__files__=n
+ files.__directories__=m
+ files.__remappings__=r
+ if trace_locating then
+ report_expansions("%s files found on %s directories with %s uppercase remappings",n,m,r)
+ end
+ if usecache then
+ scanned[#scanned+1]=realpath
+ fullcache[realpath]=files
+ end
+ nofscans=nofscans+1
+ statistics.stoptiming(timer)
+ return files
+end
+local function simplescan(files,spec,path)
+ local full=(path=="" and spec) or (spec..path..'/')
+ local dirs={}
+ local nofdirs=0
+ for name in directory(full) do
+ if not lpegmatch(weird,name) then
+ local mode=attributes(full..name,'mode')
+ if mode=='file' then
+ if not files[name] then
+ files[name]=path
+ end
+ elseif mode=='directory' then
+ nofdirs=nofdirs+1
+ if path~="" then
+ dirs[nofdirs]=path..'/'..name
+ else
+ dirs[nofdirs]=name
end
+ end
end
- if nofdirs > 0 then
- sort(dirs)
- for i=1,nofdirs do
- files = simplescan(files,spec,dirs[i])
- end
+ end
+ if nofdirs>0 then
+ sort(dirs)
+ for i=1,nofdirs do
+ files=simplescan(files,spec,dirs[i])
end
- return files
+ end
+ return files
end
-
-local simplecache = { }
-local nofsharedscans = 0
-
+local simplecache={}
+local nofsharedscans=0
function resolvers.simplescanfiles(path,branch,usecache)
- statistics.starttiming(timer)
- local realpath = resolvers.resolve(path) -- no shortcut
- if usecache then
- local files = simplecache[realpath]
- if not files then
- files = scancache[realpath]
- if files then
- nofsharedscans = nofsharedscans + 1
- end
- end
- if files then
- if trace_locating then
- report_expansions("using caches scan of path '%s', branch '%s'",path,branch or path)
- end
- return files
- end
- end
- if trace_locating then
- report_expansions("scanning path '%s', branch '%s'",path,branch or path)
- end
- local files = simplescan({ },realpath .. '/',"")
- if trace_locating then
- report_expansions("%s files found",table.count(files))
- end
- if usecache then
- scanned[#scanned+1] = realpath
- simplecache[realpath] = files
- end
- nofscans = nofscans + 1
- statistics.stoptiming(timer)
- return files
+ statistics.starttiming(timer)
+ local realpath=resolvers.resolve(path)
+ if usecache then
+ local files=simplecache[realpath]
+ if not files then
+ files=scancache[realpath]
+ if files then
+ nofsharedscans=nofsharedscans+1
+ end
+ end
+ if files then
+ if trace_locating then
+ report_expansions("using caches scan of path %a, branch %a",path,branch or path)
+ end
+ return files
+ end
+ end
+ if trace_locating then
+ report_expansions("scanning path %a, branch %a",path,branch or path)
+ end
+ local files=simplescan({},realpath..'/',"")
+ if trace_locating then
+ report_expansions("%s files found",table.count(files))
+ end
+ if usecache then
+ scanned[#scanned+1]=realpath
+ simplecache[realpath]=files
+ end
+ nofscans=nofscans+1
+ statistics.stoptiming(timer)
+ return files
end
-
function resolvers.scandata()
- table.sort(scanned)
- return {
- n = nofscans,
- shared = nofsharedscans,
- time = statistics.elapsedtime(timer),
- paths = scanned,
- }
+ table.sort(scanned)
+ return {
+ n=nofscans,
+ shared=nofsharedscans,
+ time=statistics.elapsedtime(timer),
+ paths=scanned,
+ }
end
-
end -- of closure
do -- create closure to overcome 200 locals limit
-if not modules then modules = { } end modules ['data-env'] = {
- version = 1.001,
- comment = "companion to luat-lib.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files",
-}
+package.loaded["data-env"] = package.loaded["data-env"] or true
+
+-- original size: 8762, stripped down to: 6484
-local lower, gsub = string.lower, string.gsub
-
-local resolvers = resolvers
-
-local allocate = utilities.storage.allocate
-local setmetatableindex = table.setmetatableindex
-local fileextname = file.extname
-
-local formats = allocate()
-local suffixes = allocate()
-local dangerous = allocate()
-local suffixmap = allocate()
-
-resolvers.formats = formats
-resolvers.suffixes = suffixes
-resolvers.dangerous = dangerous
-resolvers.suffixmap = suffixmap
-
-local relations = allocate { -- todo: handlers also here
- core = {
- ofm = { -- will become obsolete
- names = { "ofm", "omega font metric", "omega font metrics" },
- variable = 'OFMFONTS',
- suffixes = { 'ofm', 'tfm' },
- },
- ovf = { -- will become obsolete
- names = { "ovf", "omega virtual font", "omega virtual fonts" },
- variable = 'OVFFONTS',
- suffixes = { 'ovf', 'vf' },
- },
- tfm = {
- names = { "tfm", "tex font metric", "tex font metrics" },
- variable = 'TFMFONTS',
- suffixes = { 'tfm' },
- },
- vf = {
- names = { "vf", "virtual font", "virtual fonts" },
- variable = 'VFFONTS',
- suffixes = { 'vf' },
- },
- otf = {
- names = { "otf", "opentype", "opentype font", "opentype fonts"},
- variable = 'OPENTYPEFONTS',
- suffixes = { 'otf' },
- },
- ttf = {
- names = { "ttf", "truetype", "truetype font", "truetype fonts", "truetype collection", "truetype collections", "truetype dictionary", "truetype dictionaries" },
- variable = 'TTFONTS',
- suffixes = { 'ttf', 'ttc', 'dfont' },
- },
- afm = {
- names = { "afm", "adobe font metric", "adobe font metrics" },
- variable = "AFMFONTS",
- suffixes = { "afm" },
- },
- pfb = {
- names = { "pfb", "type1", "type 1", "type1 font", "type 1 font", "type1 fonts", "type 1 fonts" },
- variable = 'T1FONTS',
- suffixes = { 'pfb', 'pfa' },
- },
- fea = {
- names = { "fea", "font feature", "font features", "font feature file", "font feature files" },
- variable = 'FONTFEATURES',
- suffixes = { 'fea' },
- },
- cid = {
- names = { "cid", "cid map", "cid maps", "cid file", "cid files" },
- variable = 'FONTCIDMAPS',
- suffixes = { 'cid', 'cidmap' },
- },
- fmt = {
- names = { "fmt", "format", "tex format" },
- variable = 'TEXFORMATS',
- suffixes = { 'fmt' },
- },
- mem = { -- will become obsolete
- names = { 'mem', "metapost format" },
- variable = 'MPMEMS',
- suffixes = { 'mem' },
- },
- mp = {
- names = { "mp" },
- variable = 'MPINPUTS',
- suffixes = { 'mp', 'mpvi', 'mpiv', 'mpii' },
- },
- tex = {
- names = { "tex" },
- variable = 'TEXINPUTS',
- suffixes = { 'tex', "mkvi", "mkiv", "mkii" },
- },
- icc = {
- names = { "icc", "icc profile", "icc profiles" },
- variable = 'ICCPROFILES',
- suffixes = { 'icc' },
- },
- texmfscripts = {
- names = { "texmfscript", "texmfscripts", "script", "scripts" },
- variable = 'TEXMFSCRIPTS',
- suffixes = { 'rb', 'pl', 'py' },
- },
- lua = {
- names = { "lua" },
- variable = 'LUAINPUTS',
- suffixes = { 'lua', 'luc', 'tma', 'tmc' },
- },
- lib = {
- names = { "lib" },
- variable = 'CLUAINPUTS',
- suffixes = os.libsuffix and { os.libsuffix } or { 'dll', 'so' },
- },
- bib = {
- names = { 'bib' },
- suffixes = { 'bib' },
- },
- bst = {
- names = { 'bst' },
- suffixes = { 'bst' },
- },
- fontconfig = {
- names = { 'fontconfig', 'fontconfig file', 'fontconfig files' },
- variable = 'FONTCONFIG_PATH',
- },
+if not modules then modules={} end modules ['data-env']={
+ version=1.001,
+ comment="companion to luat-lib.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files",
+}
+local lower,gsub=string.lower,string.gsub
+local resolvers=resolvers
+local allocate=utilities.storage.allocate
+local setmetatableindex=table.setmetatableindex
+local suffixonly=file.suffixonly
+local formats=allocate()
+local suffixes=allocate()
+local dangerous=allocate()
+local suffixmap=allocate()
+resolvers.formats=formats
+resolvers.suffixes=suffixes
+resolvers.dangerous=dangerous
+resolvers.suffixmap=suffixmap
+local luasuffixes=utilities.lua.suffixes
+local relations=allocate {
+ core={
+ ofm={
+ names={ "ofm","omega font metric","omega font metrics" },
+ variable='OFMFONTS',
+ suffixes={ 'ofm','tfm' },
+ },
+ ovf={
+ names={ "ovf","omega virtual font","omega virtual fonts" },
+ variable='OVFFONTS',
+ suffixes={ 'ovf','vf' },
+ },
+ tfm={
+ names={ "tfm","tex font metric","tex font metrics" },
+ variable='TFMFONTS',
+ suffixes={ 'tfm' },
},
- obsolete = {
- enc = {
- names = { "enc", "enc files", "enc file", "encoding files", "encoding file" },
- variable = 'ENCFONTS',
- suffixes = { 'enc' },
- },
- map = {
- names = { "map", "map files", "map file" },
- variable = 'TEXFONTMAPS',
- suffixes = { 'map' },
- },
- lig = {
- names = { "lig files", "lig file", "ligature file", "ligature files" },
- variable = 'LIGFONTS',
- suffixes = { 'lig' },
- },
- opl = {
- names = { "opl" },
- variable = 'OPLFONTS',
- suffixes = { 'opl' },
- },
- ovp = {
- names = { "ovp" },
- variable = 'OVPFONTS',
- suffixes = { 'ovp' },
- },
+ vf={
+ names={ "vf","virtual font","virtual fonts" },
+ variable='VFFONTS',
+ suffixes={ 'vf' },
},
- kpse = { -- subset
- base = {
- names = { 'base', "metafont format" },
- variable = 'MFBASES',
- suffixes = { 'base', 'bas' },
- },
- cmap = {
- names = { 'cmap', 'cmap files', 'cmap file' },
- variable = 'CMAPFONTS',
- suffixes = { 'cmap' },
- },
- cnf = {
- names = { 'cnf' },
- suffixes = { 'cnf' },
- },
- web = {
- names = { 'web' },
- suffixes = { 'web', 'ch' }
- },
- cweb = {
- names = { 'cweb' },
- suffixes = { 'w', 'web', 'ch' },
- },
- gf = {
- names = { 'gf' },
- suffixes = { '<resolution>gf' },
- },
- mf = {
- names = { 'mf' },
- variable = 'MFINPUTS',
- suffixes = { 'mf' },
- },
- mft = {
- names = { 'mft' },
- suffixes = { 'mft' },
- },
- pk = {
- names = { 'pk' },
- suffixes = { '<resolution>pk' },
- },
+ otf={
+ names={ "otf","opentype","opentype font","opentype fonts"},
+ variable='OPENTYPEFONTS',
+ suffixes={ 'otf' },
},
+ ttf={
+ names={ "ttf","truetype","truetype font","truetype fonts","truetype collection","truetype collections","truetype dictionary","truetype dictionaries" },
+ variable='TTFONTS',
+ suffixes={ 'ttf','ttc','dfont' },
+ },
+ afm={
+ names={ "afm","adobe font metric","adobe font metrics" },
+ variable="AFMFONTS",
+ suffixes={ "afm" },
+ },
+ pfb={
+ names={ "pfb","type1","type 1","type1 font","type 1 font","type1 fonts","type 1 fonts" },
+ variable='T1FONTS',
+ suffixes={ 'pfb','pfa' },
+ },
+ fea={
+ names={ "fea","font feature","font features","font feature file","font feature files" },
+ variable='FONTFEATURES',
+ suffixes={ 'fea' },
+ },
+ cid={
+ names={ "cid","cid map","cid maps","cid file","cid files" },
+ variable='FONTCIDMAPS',
+ suffixes={ 'cid','cidmap' },
+ },
+ fmt={
+ names={ "fmt","format","tex format" },
+ variable='TEXFORMATS',
+ suffixes={ 'fmt' },
+ },
+ mem={
+ names={ 'mem',"metapost format" },
+ variable='MPMEMS',
+ suffixes={ 'mem' },
+ },
+ mp={
+ names={ "mp" },
+ variable='MPINPUTS',
+ suffixes={ 'mp','mpvi','mpiv','mpii' },
+ },
+ tex={
+ names={ "tex" },
+ variable='TEXINPUTS',
+ suffixes={ 'tex',"mkvi","mkiv","mkii" },
+ },
+ icc={
+ names={ "icc","icc profile","icc profiles" },
+ variable='ICCPROFILES',
+ suffixes={ 'icc' },
+ },
+ texmfscripts={
+ names={ "texmfscript","texmfscripts","script","scripts" },
+ variable='TEXMFSCRIPTS',
+ suffixes={ 'rb','pl','py' },
+ },
+ lua={
+ names={ "lua" },
+ variable='LUAINPUTS',
+ suffixes={ luasuffixes.lua,luasuffixes.luc,luasuffixes.tma,luasuffixes.tmc },
+ },
+ lib={
+ names={ "lib" },
+ variable='CLUAINPUTS',
+ suffixes=os.libsuffix and { os.libsuffix } or { 'dll','so' },
+ },
+ bib={
+ names={ 'bib' },
+ suffixes={ 'bib' },
+ },
+ bst={
+ names={ 'bst' },
+ suffixes={ 'bst' },
+ },
+ fontconfig={
+ names={ 'fontconfig','fontconfig file','fontconfig files' },
+ variable='FONTCONFIG_PATH',
+ },
+ },
+ obsolete={
+ enc={
+ names={ "enc","enc files","enc file","encoding files","encoding file" },
+ variable='ENCFONTS',
+ suffixes={ 'enc' },
+ },
+ map={
+ names={ "map","map files","map file" },
+ variable='TEXFONTMAPS',
+ suffixes={ 'map' },
+ },
+ lig={
+ names={ "lig files","lig file","ligature file","ligature files" },
+ variable='LIGFONTS',
+ suffixes={ 'lig' },
+ },
+ opl={
+ names={ "opl" },
+ variable='OPLFONTS',
+ suffixes={ 'opl' },
+ },
+ ovp={
+ names={ "ovp" },
+ variable='OVPFONTS',
+ suffixes={ 'ovp' },
+ },
+ },
+ kpse={
+ base={
+ names={ 'base',"metafont format" },
+ variable='MFBASES',
+ suffixes={ 'base','bas' },
+ },
+ cmap={
+ names={ 'cmap','cmap files','cmap file' },
+ variable='CMAPFONTS',
+ suffixes={ 'cmap' },
+ },
+ cnf={
+ names={ 'cnf' },
+ suffixes={ 'cnf' },
+ },
+ web={
+ names={ 'web' },
+ suffixes={ 'web','ch' }
+ },
+ cweb={
+ names={ 'cweb' },
+ suffixes={ 'w','web','ch' },
+ },
+ gf={
+ names={ 'gf' },
+ suffixes={ '<resolution>gf' },
+ },
+ mf={
+ names={ 'mf' },
+ variable='MFINPUTS',
+ suffixes={ 'mf' },
+ },
+ mft={
+ names={ 'mft' },
+ suffixes={ 'mft' },
+ },
+ pk={
+ names={ 'pk' },
+ suffixes={ '<resolution>pk' },
+ },
+ },
}
-
-resolvers.relations = relations
-
--- formats: maps a format onto a variable
-
+resolvers.relations=relations
function resolvers.updaterelations()
- for category, categories in next, relations do
- for name, relation in next, categories do
- local rn = relation.names
- local rv = relation.variable
- local rs = relation.suffixes
- if rn and rv then
- for i=1,#rn do
- local rni = lower(gsub(rn[i]," ",""))
- formats[rni] = rv
- if rs then
- suffixes[rni] = rs
- for i=1,#rs do
- local rsi = rs[i]
- suffixmap[rsi] = rni
- end
- end
- end
- end
- if rs then
- end
- end
- end
-end
-
-resolvers.updaterelations() -- push this in the metatable -> newindex
-
+ for category,categories in next,relations do
+ for name,relation in next,categories do
+ local rn=relation.names
+ local rv=relation.variable
+ local rs=relation.suffixes
+ if rn and rv then
+ for i=1,#rn do
+ local rni=lower(gsub(rn[i]," ",""))
+ formats[rni]=rv
+ if rs then
+ suffixes[rni]=rs
+ for i=1,#rs do
+ local rsi=rs[i]
+ suffixmap[rsi]=rni
+ end
+ end
+ end
+ end
+ if rs then
+ end
+ end
+ end
+end
+resolvers.updaterelations()
local function simplified(t,k)
- return k and rawget(t,lower(gsub(k," ",""))) or nil
+ return k and rawget(t,lower(gsub(k," ",""))) or nil
end
-
-setmetatableindex(formats, simplified)
-setmetatableindex(suffixes, simplified)
-setmetatableindex(suffixmap, simplified)
-
--- A few accessors, mostly for command line tool.
-
+setmetatableindex(formats,simplified)
+setmetatableindex(suffixes,simplified)
+setmetatableindex(suffixmap,simplified)
function resolvers.suffixofformat(str)
- local s = suffixes[str]
- return s and s[1] or ""
+ local s=suffixes[str]
+ return s and s[1] or ""
end
-
function resolvers.suffixofformat(str)
- return suffixes[str] or { }
+ return suffixes[str] or {}
end
-
-for name, format in next, formats do
- dangerous[name] = true -- still needed ?
+for name,format in next,formats do
+ dangerous[name]=true
end
-
--- because vf searching is somewhat dangerous, we want to prevent
--- too liberal searching esp because we do a lookup on the current
--- path anyway; only tex (or any) is safe
-
-dangerous.tex = nil
-
-
--- more helpers
-
+dangerous.tex=nil
function resolvers.formatofvariable(str)
- return formats[str] or ''
+ return formats[str] or ''
end
-
-function resolvers.formatofsuffix(str) -- of file
- return suffixmap[fileextname(str)] or 'tex' -- so many map onto tex (like mkiv, cld etc)
+function resolvers.formatofsuffix(str)
+ return suffixmap[suffixonly(str)] or 'tex'
end
-
function resolvers.variableofformat(str)
- return formats[str] or ''
+ return formats[str] or ''
end
-
function resolvers.variableofformatorsuffix(str)
- local v = formats[str]
- if v then
- return v
- end
- v = suffixmap[fileextname(str)]
- if v then
- return formats[v]
- end
- return ''
+ local v=formats[str]
+ if v then
+ return v
+ end
+ v=suffixmap[suffixonly(str)]
+ if v then
+ return formats[v]
+ end
+ return ''
end
-
end -- of closure
do -- create closure to overcome 200 locals limit
-if not modules then modules = { } end modules ['data-tmp'] = {
- version = 1.100,
- comment = "companion to luat-lib.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
---[[ldx--
-<p>This module deals with caching data. It sets up the paths and
-implements loaders and savers for tables. Best is to set the
-following variable. When not set, the usual paths will be
-checked. Personally I prefer the (users) temporary path.</p>
-
-</code>
-TEXMFCACHE=$TMP;$TEMP;$TMPDIR;$TEMPDIR;$HOME;$TEXMFVAR;$VARTEXMF;.
-</code>
-
-<p>Currently we do no locking when we write files. This is no real
-problem because most caching involves fonts and the chance of them
-being written at the same time is small. We also need to extend
-luatools with a recache feature.</p>
---ldx]]--
-
-local format, lower, gsub, concat = string.format, string.lower, string.gsub, table.concat
-local serialize, serializetofile = table.serialize, table.tofile
-local mkdirs, isdir = dir.mkdirs, lfs.isdir
-
-local trace_locating = false trackers.register("resolvers.locating", function(v) trace_locating = v end)
-local trace_cache = false trackers.register("resolvers.cache", function(v) trace_cache = v end)
-
-local report_caches = logs.reporter("resolvers","caches")
-local report_resolvers = logs.reporter("resolvers","caching")
-
-local resolvers = resolvers
-
--- intermezzo
-
-local directive_cleanup = false directives.register("system.compile.cleanup", function(v) directive_cleanup = v end)
-local directive_strip = true directives.register("system.compile.strip", function(v) directive_strip = v end)
+package.loaded["data-tmp"] = package.loaded["data-tmp"] or true
-local compile = utilities.lua.compile
+-- original size: 14308, stripped down to: 10956
+if not modules then modules={} end modules ['data-tmp']={
+ version=1.100,
+ comment="companion to luat-lib.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+local format,lower,gsub,concat=string.format,string.lower,string.gsub,table.concat
+local serialize,serializetofile=table.serialize,table.tofile
+local mkdirs,isdir,isfile=dir.mkdirs,lfs.isdir,lfs.isfile
+local addsuffix,is_writable,is_readable=file.addsuffix,file.is_writable,file.is_readable
+local formatters=string.formatters
+local trace_locating=false trackers.register("resolvers.locating",function(v) trace_locating=v end)
+local trace_cache=false trackers.register("resolvers.cache",function(v) trace_cache=v end)
+local report_caches=logs.reporter("resolvers","caches")
+local report_resolvers=logs.reporter("resolvers","caching")
+local resolvers=resolvers
+local directive_cleanup=false directives.register("system.compile.cleanup",function(v) directive_cleanup=v end)
+local directive_strip=false directives.register("system.compile.strip",function(v) directive_strip=v end)
+local compile=utilities.lua.compile
function utilities.lua.compile(luafile,lucfile,cleanup,strip)
- if cleanup == nil then cleanup = directive_cleanup end
- if strip == nil then strip = directive_strip end
- return compile(luafile,lucfile,cleanup,strip)
-end
-
--- end of intermezzo
-
-caches = caches or { }
-local caches = caches
-
-caches.base = caches.base or "luatex-cache"
-caches.more = caches.more or "context"
-caches.direct = false -- true is faster but may need huge amounts of memory
-caches.tree = false
-caches.force = true
-caches.ask = false
-caches.relocate = false
-caches.defaults = { "TMPDIR", "TEMPDIR", "TMP", "TEMP", "HOME", "HOMEPATH" }
-
-local writable, readables, usedreadables = nil, { }, { }
-
--- we could use a metatable for writable and readable but not yet
-
+ if cleanup==nil then cleanup=directive_cleanup end
+ if strip==nil then strip=directive_strip end
+ return compile(luafile,lucfile,cleanup,strip)
+end
+caches=caches or {}
+local caches=caches
+local luasuffixes=utilities.lua.suffixes
+caches.base=caches.base or "luatex-cache"
+caches.more=caches.more or "context"
+caches.direct=false
+caches.tree=false
+caches.force=true
+caches.ask=false
+caches.relocate=false
+caches.defaults={ "TMPDIR","TEMPDIR","TMP","TEMP","HOME","HOMEPATH" }
+local writable,readables,usedreadables=nil,{},{}
local function identify()
- -- Combining the loops makes it messy. First we check the format cache path
- -- and when the last component is not present we try to create it.
- local texmfcaches = resolvers.cleanpathlist("TEXMFCACHE")
- if texmfcaches then
- for k=1,#texmfcaches do
- local cachepath = texmfcaches[k]
- if cachepath ~= "" then
- cachepath = resolvers.resolve(cachepath)
- cachepath = resolvers.cleanpath(cachepath)
- cachepath = file.collapsepath(cachepath)
- local valid = isdir(cachepath)
- if valid then
- if file.is_readable(cachepath) then
- readables[#readables+1] = cachepath
- if not writable and file.is_writable(cachepath) then
- writable = cachepath
- end
- end
- elseif not writable and caches.force then
- local cacheparent = file.dirname(cachepath)
- if file.is_writable(cacheparent) and true then -- we go on anyway (needed for mojca's kind of paths)
- if not caches.ask or io.ask(format("\nShould I create the cache path %s?",cachepath), "no", { "yes", "no" }) == "yes" then
- mkdirs(cachepath)
- if isdir(cachepath) and file.is_writable(cachepath) then
- report_caches("created: %s",cachepath)
- writable = cachepath
- readables[#readables+1] = cachepath
- end
- end
- end
- end
- end
- end
- end
- -- As a last resort we check some temporary paths but this time we don't
- -- create them.
- local texmfcaches = caches.defaults
- if texmfcaches then
- for k=1,#texmfcaches do
- local cachepath = texmfcaches[k]
- cachepath = resolvers.expansion(cachepath) -- was getenv
- if cachepath ~= "" then
- cachepath = resolvers.resolve(cachepath)
- cachepath = resolvers.cleanpath(cachepath)
- local valid = isdir(cachepath)
- if valid and file.is_readable(cachepath) then
- if not writable and file.is_writable(cachepath) then
- readables[#readables+1] = cachepath
- writable = cachepath
- break
- end
- end
- end
+ local texmfcaches=resolvers.cleanpathlist("TEXMFCACHE")
+ if texmfcaches then
+ for k=1,#texmfcaches do
+ local cachepath=texmfcaches[k]
+ if cachepath~="" then
+ cachepath=resolvers.resolve(cachepath)
+ cachepath=resolvers.cleanpath(cachepath)
+ cachepath=file.collapsepath(cachepath)
+ local valid=isdir(cachepath)
+ if valid then
+ if is_readable(cachepath) then
+ readables[#readables+1]=cachepath
+ if not writable and is_writable(cachepath) then
+ writable=cachepath
+ end
+ end
+ elseif not writable and caches.force then
+ local cacheparent=file.dirname(cachepath)
+ if is_writable(cacheparent) and true then
+ if not caches.ask or io.ask(format("\nShould I create the cache path %s?",cachepath),"no",{ "yes","no" })=="yes" then
+ mkdirs(cachepath)
+ if isdir(cachepath) and is_writable(cachepath) then
+ report_caches("path %a created",cachepath)
+ writable=cachepath
+ readables[#readables+1]=cachepath
+ end
+ end
+ end
+ end
+ end
+ end
+ end
+ local texmfcaches=caches.defaults
+ if texmfcaches then
+ for k=1,#texmfcaches do
+ local cachepath=texmfcaches[k]
+ cachepath=resolvers.expansion(cachepath)
+ if cachepath~="" then
+ cachepath=resolvers.resolve(cachepath)
+ cachepath=resolvers.cleanpath(cachepath)
+ local valid=isdir(cachepath)
+ if valid and is_readable(cachepath) then
+ if not writable and is_writable(cachepath) then
+ readables[#readables+1]=cachepath
+ writable=cachepath
+ break
+ end
end
+ end
end
- -- Some extra checking. If we have no writable or readable path then we simply
- -- quit.
- if not writable then
- report_caches("fatal error: there is no valid writable cache path defined")
- os.exit()
- elseif #readables == 0 then
- report_caches("fatal error: there is no valid readable cache path defined")
- os.exit()
- end
- -- why here
- writable = dir.expandname(resolvers.cleanpath(writable)) -- just in case
- -- moved here
- local base, more, tree = caches.base, caches.more, caches.tree or caches.treehash() -- we have only one writable tree
- if tree then
- caches.tree = tree
- writable = mkdirs(writable,base,more,tree)
- for i=1,#readables do
- readables[i] = file.join(readables[i],base,more,tree)
- end
- else
- writable = mkdirs(writable,base,more)
- for i=1,#readables do
- readables[i] = file.join(readables[i],base,more)
- end
+ end
+ if not writable then
+ report_caches("fatal error: there is no valid writable cache path defined")
+ os.exit()
+ elseif #readables==0 then
+ report_caches("fatal error: there is no valid readable cache path defined")
+ os.exit()
+ end
+ writable=dir.expandname(resolvers.cleanpath(writable))
+ local base,more,tree=caches.base,caches.more,caches.tree or caches.treehash()
+ if tree then
+ caches.tree=tree
+ writable=mkdirs(writable,base,more,tree)
+ for i=1,#readables do
+ readables[i]=file.join(readables[i],base,more,tree)
end
- -- end
- if trace_cache then
- for i=1,#readables do
- report_caches("using readable path '%s' (order %s)",readables[i],i)
- end
- report_caches("using writable path '%s'",writable)
+ else
+ writable=mkdirs(writable,base,more)
+ for i=1,#readables do
+ readables[i]=file.join(readables[i],base,more)
end
- identify = function()
- return writable, readables
+ end
+ if trace_cache then
+ for i=1,#readables do
+ report_caches("using readable path %a (order %s)",readables[i],i)
end
- return writable, readables
+ report_caches("using writable path %a",writable)
+ end
+ identify=function()
+ return writable,readables
+ end
+ return writable,readables
end
-
function caches.usedpaths()
- local writable, readables = identify()
- if #readables > 1 then
- local result = { }
- for i=1,#readables do
- local readable = readables[i]
- if usedreadables[i] or readable == writable then
- result[#result+1] = format("readable: '%s' (order %s)",readable,i)
- end
- end
- result[#result+1] = format("writable: '%s'",writable)
- return result
- else
- return writable
+ local writable,readables=identify()
+ if #readables>1 then
+ local result={}
+ for i=1,#readables do
+ local readable=readables[i]
+ if usedreadables[i] or readable==writable then
+ result[#result+1]=formatters["readable: %a (order %s)"](readable,i)
+ end
end
+ result[#result+1]=formatters["writable: %a"](writable)
+ return result
+ else
+ return writable
+ end
end
-
function caches.configfiles()
- return concat(resolvers.instance.specification,";")
+ return concat(resolvers.instance.specification,";")
end
-
function caches.hashed(tree)
- tree = gsub(tree,"\\$","/")
- tree = gsub(tree,"/+$","")
- tree = lower(tree)
- local hash = md5.hex(tree)
- if trace_cache or trace_locating then
- report_caches("hashing tree %s, hash %s",tree,hash)
- end
- return hash
+ tree=gsub(tree,"[\\/]+$","")
+ tree=lower(tree)
+ local hash=md5.hex(tree)
+ if trace_cache or trace_locating then
+ report_caches("hashing tree %a, hash %a",tree,hash)
+ end
+ return hash
end
-
function caches.treehash()
- local tree = caches.configfiles()
- if not tree or tree == "" then
- return false
+ local tree=caches.configfiles()
+ if not tree or tree=="" then
+ return false
+ else
+ return caches.hashed(tree)
+ end
+end
+local r_cache,w_cache={},{}
+local function getreadablepaths(...)
+ local tags={... }
+ local hash=concat(tags,"/")
+ local done=r_cache[hash]
+ if not done then
+ local writable,readables=identify()
+ if #tags>0 then
+ done={}
+ for i=1,#readables do
+ done[i]=file.join(readables[i],...)
+ end
else
- return caches.hashed(tree)
+ done=readables
end
+ r_cache[hash]=done
+ end
+ return done
end
-
-local r_cache, w_cache = { }, { } -- normally w in in r but who cares
-
-local function getreadablepaths(...) -- we can optimize this as we have at most 2 tags
- local tags = { ... }
- local hash = concat(tags,"/")
- local done = r_cache[hash]
- if not done then
- local writable, readables = identify() -- exit if not found
- if #tags > 0 then
- done = { }
- for i=1,#readables do
- done[i] = file.join(readables[i],...)
- end
- else
- done = readables
- end
- r_cache[hash] = done
- end
- return done
-end
-
local function getwritablepath(...)
- local tags = { ... }
- local hash = concat(tags,"/")
- local done = w_cache[hash]
- if not done then
- local writable, readables = identify() -- exit if not found
- if #tags > 0 then
- done = mkdirs(writable,...)
- else
- done = writable
- end
- w_cache[hash] = done
+ local tags={... }
+ local hash=concat(tags,"/")
+ local done=w_cache[hash]
+ if not done then
+ local writable,readables=identify()
+ if #tags>0 then
+ done=mkdirs(writable,...)
+ else
+ done=writable
end
- return done
+ w_cache[hash]=done
+ end
+ return done
end
-
-caches.getreadablepaths = getreadablepaths
-caches.getwritablepath = getwritablepath
-
+caches.getreadablepaths=getreadablepaths
+caches.getwritablepath=getwritablepath
function caches.getfirstreadablefile(filename,...)
- local rd = getreadablepaths(...)
- for i=1,#rd do
- local path = rd[i]
- local fullname = file.join(path,filename)
- if file.is_readable(fullname) then
- usedreadables[i] = true
- return fullname, path
- end
+ local rd=getreadablepaths(...)
+ for i=1,#rd do
+ local path=rd[i]
+ local fullname=file.join(path,filename)
+ if is_readable(fullname) then
+ usedreadables[i]=true
+ return fullname,path
end
- return caches.setfirstwritablefile(filename,...)
+ end
+ return caches.setfirstwritablefile(filename,...)
end
-
function caches.setfirstwritablefile(filename,...)
- local wr = getwritablepath(...)
- local fullname = file.join(wr,filename)
- return fullname, wr
+ local wr=getwritablepath(...)
+ local fullname=file.join(wr,filename)
+ return fullname,wr
end
-
-function caches.define(category,subcategory) -- for old times sake
- return function()
- return getwritablepath(category,subcategory)
- end
+function caches.define(category,subcategory)
+ return function()
+ return getwritablepath(category,subcategory)
+ end
end
-
function caches.setluanames(path,name)
- return path .. "/" .. name .. ".tma", path .. "/" .. name .. ".tmc"
+ return format("%s/%s.%s",path,name,luasuffixes.tma),format("%s/%s.%s",path,name,luasuffixes.tmc)
end
-
function caches.loaddata(readables,name)
- if type(readables) == "string" then
- readables = { readables }
- end
- for i=1,#readables do
- local path = readables[i]
- local tmaname, tmcname = caches.setluanames(path,name)
- local loader = loadfile(tmcname) or loadfile(tmaname)
- if loader then
- loader = loader()
- collectgarbage("step")
- return loader
- end
- end
- return false
+ if type(readables)=="string" then
+ readables={ readables }
+ end
+ for i=1,#readables do
+ local path=readables[i]
+ local tmaname,tmcname=caches.setluanames(path,name)
+ local loader=false
+ if isfile(tmcname) then
+ loader=loadfile(tmcname)
+ end
+ if not loader and isfile(tmaname) then
+ utilities.lua.compile(tmaname,tmcname)
+ if isfile(tmcname) then
+ loader=loadfile(tmcname)
+ end
+ if not loader then
+ loader=loadfile(tmaname)
+ end
+ end
+ if loader then
+ loader=loader()
+ collectgarbage("step")
+ return loader
+ end
+ end
+ return false
end
-
function caches.is_writable(filepath,filename)
- local tmaname, tmcname = caches.setluanames(filepath,filename)
- return file.is_writable(tmaname)
+ local tmaname,tmcname=caches.setluanames(filepath,filename)
+ return is_writable(tmaname)
end
-
-local saveoptions = { compact = true }
-
+local saveoptions={ compact=true }
function caches.savedata(filepath,filename,data,raw)
- local tmaname, tmcname = caches.setluanames(filepath,filename)
- local reduce, simplify = true, true
- if raw then
- reduce, simplify = false, false
- end
- data.cache_uuid = os.uuid()
- if caches.direct then
- file.savedata(tmaname,serialize(data,true,saveoptions))
- else
- serializetofile(tmaname,data,true,saveoptions)
- end
- utilities.lua.compile(tmaname,tmcname)
-end
-
--- moved from data-res:
-
-local content_state = { }
-
+ local tmaname,tmcname=caches.setluanames(filepath,filename)
+ local reduce,simplify=true,true
+ if raw then
+ reduce,simplify=false,false
+ end
+ data.cache_uuid=os.uuid()
+ if caches.direct then
+ file.savedata(tmaname,serialize(data,true,saveoptions))
+ else
+ serializetofile(tmaname,data,true,saveoptions)
+ end
+ utilities.lua.compile(tmaname,tmcname)
+end
+local content_state={}
function caches.contentstate()
- return content_state or { }
+ return content_state or {}
end
-
function caches.loadcontent(cachename,dataname)
- local name = caches.hashed(cachename)
- local full, path = caches.getfirstreadablefile(name ..".lua","trees")
- local filename = file.join(path,name)
- local blob = loadfile(filename .. ".luc") or loadfile(filename .. ".lua")
- if blob then
- local data = blob()
- if data and data.content then
- if data.type == dataname then
- if data.version == resolvers.cacheversion then
- content_state[#content_state+1] = data.uuid
- if trace_locating then
- report_resolvers("loading '%s' for '%s' from '%s'",dataname,cachename,filename)
- end
- return data.content
- else
- report_resolvers("skipping '%s' for '%s' from '%s' (version mismatch)",dataname,cachename,filename)
- end
- else
- report_resolvers("skipping '%s' for '%s' from '%s' (datatype mismatch)",dataname,cachename,filename)
- end
- elseif trace_locating then
- report_resolvers("skipping '%s' for '%s' from '%s' (no content)",dataname,cachename,filename)
+ local name=caches.hashed(cachename)
+ local full,path=caches.getfirstreadablefile(addsuffix(name,luasuffixes.lua),"trees")
+ local filename=file.join(path,name)
+ local blob=loadfile(addsuffix(filename,luasuffixes.luc)) or loadfile(addsuffix(filename,luasuffixes.lua))
+ if blob then
+ local data=blob()
+ if data and data.content then
+ if data.type==dataname then
+ if data.version==resolvers.cacheversion then
+ content_state[#content_state+1]=data.uuid
+ if trace_locating then
+ report_resolvers("loading %a for %a from %a",dataname,cachename,filename)
+ end
+ return data.content
+ else
+ report_resolvers("skipping %a for %a from %a (version mismatch)",dataname,cachename,filename)
end
+ else
+ report_resolvers("skipping %a for %a from %a (datatype mismatch)",dataname,cachename,filename)
+ end
elseif trace_locating then
- report_resolvers("skipping '%s' for '%s' from '%s' (invalid file)",dataname,cachename,filename)
+ report_resolvers("skipping %a for %a from %a (no content)",dataname,cachename,filename)
end
+ elseif trace_locating then
+ report_resolvers("skipping %a for %a from %a (invalid file)",dataname,cachename,filename)
+ end
end
-
function caches.collapsecontent(content)
- for k, v in next, content do
- if type(v) == "table" and #v == 1 then
- content[k] = v[1]
- end
+ for k,v in next,content do
+ if type(v)=="table" and #v==1 then
+ content[k]=v[1]
end
+ end
end
-
function caches.savecontent(cachename,dataname,content)
- local name = caches.hashed(cachename)
- local full, path = caches.setfirstwritablefile(name ..".lua","trees")
- local filename = file.join(path,name) -- is full
- local luaname, lucname = filename .. ".lua", filename .. ".luc"
+ local name=caches.hashed(cachename)
+ local full,path=caches.setfirstwritablefile(addsuffix(name,luasuffixes.lua),"trees")
+ local filename=file.join(path,name)
+ local luaname=addsuffix(filename,luasuffixes.lua)
+ local lucname=addsuffix(filename,luasuffixes.luc)
+ if trace_locating then
+ report_resolvers("preparing %a for %a",dataname,cachename)
+ end
+ local data={
+ type=dataname,
+ root=cachename,
+ version=resolvers.cacheversion,
+ date=os.date("%Y-%m-%d"),
+ time=os.date("%H:%M:%S"),
+ content=content,
+ uuid=os.uuid(),
+ }
+ local ok=io.savedata(luaname,serialize(data,true))
+ if ok then
if trace_locating then
- report_resolvers("preparing '%s' for '%s'",dataname,cachename)
- end
- local data = {
- type = dataname,
- root = cachename,
- version = resolvers.cacheversion,
- date = os.date("%Y-%m-%d"),
- time = os.date("%H:%M:%S"),
- content = content,
- uuid = os.uuid(),
- }
- local ok = io.savedata(luaname,serialize(data,true))
- if ok then
- if trace_locating then
- report_resolvers("category '%s', cachename '%s' saved in '%s'",dataname,cachename,luaname)
- end
- if utilities.lua.compile(luaname,lucname) then
- if trace_locating then
- report_resolvers("'%s' compiled to '%s'",dataname,lucname)
- end
- return true
- else
- if trace_locating then
- report_resolvers("compiling failed for '%s', deleting file '%s'",dataname,lucname)
- end
- os.remove(lucname)
- end
- elseif trace_locating then
- report_resolvers("unable to save '%s' in '%s' (access error)",dataname,luaname)
+ report_resolvers("category %a, cachename %a saved in %a",dataname,cachename,luaname)
end
+ if utilities.lua.compile(luaname,lucname) then
+ if trace_locating then
+ report_resolvers("%a compiled to %a",dataname,lucname)
+ end
+ return true
+ else
+ if trace_locating then
+ report_resolvers("compiling failed for %a, deleting file %a",dataname,lucname)
+ end
+ os.remove(lucname)
+ end
+ elseif trace_locating then
+ report_resolvers("unable to save %a in %a (access error)",dataname,luaname)
+ end
end
@@ -12237,1999 +12513,1700 @@ end -- of closure
do -- create closure to overcome 200 locals limit
-if not modules then modules = { } end modules ['data-met'] = {
- version = 1.100,
- comment = "companion to luat-lib.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
-local find, format = string.find, string.format
-local sequenced = table.sequenced
-local addurlscheme, urlhashed = url.addscheme, url.hashed
-
-local trace_locating = false
-
-trackers.register("resolvers.locating", function(v) trace_methods = v end)
-trackers.register("resolvers.methods", function(v) trace_methods = v end)
-
-
-local report_methods = logs.reporter("resolvers","methods")
-
-local allocate = utilities.storage.allocate
-
-local resolvers = resolvers
+package.loaded["data-met"] = package.loaded["data-met"] or true
-local registered = { }
-
-local function splitmethod(filename) -- todo: filetype in specification
- if not filename then
- return { scheme = "unknown", original = filename }
- end
- if type(filename) == "table" then
- return filename -- already split
- end
- filename = file.collapsepath(filename)
- if not find(filename,"://") then
- return { scheme = "file", path = filename, original = filename, filename = filename }
- end
- local specification = url.hashed(filename)
- if not specification.scheme or specification.scheme == "" then
- return { scheme = "file", path = filename, original = filename, filename = filename }
- else
- return specification
- end
-end
-
-resolvers.splitmethod = splitmethod -- bad name but ok
-
--- the second argument is always analyzed (saves time later on) and the original
--- gets passed as original but also as argument
-
-local function methodhandler(what,first,...) -- filename can be nil or false
- local method = registered[what]
- if method then
- local how, namespace = method.how, method.namespace
- if how == "uri" or how == "url" then
- local specification = splitmethod(first)
- local scheme = specification.scheme
- local resolver = namespace and namespace[scheme]
- if resolver then
- if trace_methods then
- report_methods("resolver: method=%s, how=%s, scheme=%s, argument=%s",what,how,scheme,first)
- end
- return resolver(specification,...)
- else
- resolver = namespace.default or namespace.file
- if resolver then
- if trace_methods then
- report_methods("resolver: method=%s, how=%s, default, argument=%s",what,how,first)
- end
- return resolver(specification,...)
- elseif trace_methods then
- report_methods("resolver: method=%s, how=%s, no handler",what,how)
- end
- end
- elseif how == "tag" then
- local resolver = namespace and namespace[first]
- if resolver then
- if trace_methods then
- report_methods("resolver: method=%s, how=%s, tag=%s",what,how,first)
- end
- return resolver(...)
- else
- resolver = namespace.default or namespace.file
- if resolver then
- if trace_methods then
- report_methods("resolver: method=%s, how=%s, default",what,how)
- end
- return resolver(...)
- elseif trace_methods then
- report_methods("resolver: method=%s, how=%s, unknown",what,how)
- end
- end
- end
- else
- report_methods("resolver: method=%s, unknown",what)
- end
-end
-
-resolvers.methodhandler = methodhandler
+-- original size: 4915, stripped down to: 3942
+if not modules then modules={} end modules ['data-met']={
+ version=1.100,
+ comment="companion to luat-lib.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+local find,format=string.find,string.format
+local sequenced=table.sequenced
+local addurlscheme,urlhashed=url.addscheme,url.hashed
+local trace_locating=false
+trackers.register("resolvers.locating",function(v) trace_methods=v end)
+trackers.register("resolvers.methods",function(v) trace_methods=v end)
+local report_methods=logs.reporter("resolvers","methods")
+local allocate=utilities.storage.allocate
+local resolvers=resolvers
+local registered={}
+local function splitmethod(filename)
+ if not filename then
+ return { scheme="unknown",original=filename }
+ end
+ if type(filename)=="table" then
+ return filename
+ end
+ filename=file.collapsepath(filename)
+ if not find(filename,"://") then
+ return { scheme="file",path=filename,original=filename,filename=filename }
+ end
+ local specification=url.hashed(filename)
+ if not specification.scheme or specification.scheme=="" then
+ return { scheme="file",path=filename,original=filename,filename=filename }
+ else
+ return specification
+ end
+end
+resolvers.splitmethod=splitmethod
+local function methodhandler(what,first,...)
+ local method=registered[what]
+ if method then
+ local how,namespace=method.how,method.namespace
+ if how=="uri" or how=="url" then
+ local specification=splitmethod(first)
+ local scheme=specification.scheme
+ local resolver=namespace and namespace[scheme]
+ if resolver then
+ if trace_methods then
+ report_methods("resolving, method %a, how %a, handler %a, argument %a",what,how,scheme,first)
+ end
+ return resolver(specification,...)
+ else
+ resolver=namespace.default or namespace.file
+ if resolver then
+ if trace_methods then
+ report_methods("resolving, method %a, how %a, handler %a, argument %a",what,how,"default",first)
+ end
+ return resolver(specification,...)
+ elseif trace_methods then
+ report_methods("resolving, method %a, how %a, handler %a, argument %a",what,how,"unset")
+ end
+ end
+ elseif how=="tag" then
+ local resolver=namespace and namespace[first]
+ if resolver then
+ if trace_methods then
+ report_methods("resolving, method %a, how %a, tag %a",what,how,first)
+ end
+ return resolver(...)
+ else
+ resolver=namespace.default or namespace.file
+ if resolver then
+ if trace_methods then
+ report_methods("resolving, method %a, how %a, tag %a",what,how,"default")
+ end
+ return resolver(...)
+ elseif trace_methods then
+ report_methods("resolving, method %a, how %a, tag %a",what,how,"unset")
+ end
+ end
+ end
+ else
+ report_methods("resolving, invalid method %a")
+ end
+end
+resolvers.methodhandler=methodhandler
function resolvers.registermethod(name,namespace,how)
- registered[name] = { how = how or "tag", namespace = namespace }
- namespace["byscheme"] = function(scheme,filename,...)
- if scheme == "file" then
- return methodhandler(name,filename,...)
- else
- return methodhandler(name,addurlscheme(filename,scheme),...)
- end
- end
-end
-
-local concatinators = allocate { notfound = file.join } -- concatinate paths
-local locators = allocate { notfound = function() end } -- locate databases
-local hashers = allocate { notfound = function() end } -- load databases
-local generators = allocate { notfound = function() end } -- generate databases
-
-resolvers.concatinators = concatinators
-resolvers.locators = locators
-resolvers.hashers = hashers
-resolvers.generators = generators
-
-local registermethod = resolvers.registermethod
-
+ registered[name]={ how=how or "tag",namespace=namespace }
+ namespace["byscheme"]=function(scheme,filename,...)
+ if scheme=="file" then
+ return methodhandler(name,filename,...)
+ else
+ return methodhandler(name,addurlscheme(filename,scheme),...)
+ end
+ end
+end
+local concatinators=allocate { notfound=file.join }
+local locators=allocate { notfound=function() end }
+local hashers=allocate { notfound=function() end }
+local generators=allocate { notfound=function() end }
+resolvers.concatinators=concatinators
+resolvers.locators=locators
+resolvers.hashers=hashers
+resolvers.generators=generators
+local registermethod=resolvers.registermethod
registermethod("concatinators",concatinators,"tag")
-registermethod("locators", locators, "uri")
-registermethod("hashers", hashers, "uri")
-registermethod("generators", generators, "uri")
+registermethod("locators",locators,"uri")
+registermethod("hashers",hashers,"uri")
+registermethod("generators",generators,"uri")
end -- of closure
do -- create closure to overcome 200 locals limit
-if not modules then modules = { } end modules ['data-res'] = {
- version = 1.001,
- comment = "companion to luat-lib.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files",
-}
-
--- In practice we will work within one tds tree, but i want to keep
--- the option open to build tools that look at multiple trees, which is
--- why we keep the tree specific data in a table. We used to pass the
--- instance but for practical purposes we now avoid this and use a
--- instance variable. We always have one instance active (sort of global).
-
--- todo: cache:/// home:/// selfautoparent:/// (sometime end 2012)
-
-local format, gsub, find, lower, upper, match, gmatch = string.format, string.gsub, string.find, string.lower, string.upper, string.match, string.gmatch
-local concat, insert, sortedkeys = table.concat, table.insert, table.sortedkeys
-local next, type, rawget = next, type, rawget
-local os = os
+package.loaded["data-res"] = package.loaded["data-res"] or true
-local P, S, R, C, Cc, Cs, Ct, Carg = lpeg.P, lpeg.S, lpeg.R, lpeg.C, lpeg.Cc, lpeg.Cs, lpeg.Ct, lpeg.Carg
-local lpegmatch, lpegpatterns = lpeg.match, lpeg.patterns
-
-local filedirname = file.dirname
-local filebasename = file.basename
-local fileextname = file.extname
-local filejoin = file.join
-local collapsepath = file.collapsepath
-local joinpath = file.joinpath
-local allocate = utilities.storage.allocate
-local setmetatableindex = table.setmetatableindex
-
-local trace_locating = false trackers.register("resolvers.locating", function(v) trace_locating = v end)
-local trace_detail = false trackers.register("resolvers.details", function(v) trace_detail = v end)
-local trace_expansions = false trackers.register("resolvers.expansions", function(v) trace_expansions = v end)
-
-local report_resolving = logs.reporter("resolvers","resolving")
-
-local resolvers = resolvers
-
-local expandedpathfromlist = resolvers.expandedpathfromlist
-local checkedvariable = resolvers.checkedvariable
-local splitconfigurationpath = resolvers.splitconfigurationpath
-local methodhandler = resolvers.methodhandler
-
-local initializesetter = utilities.setters.initialize
-
-local ostype, osname, osenv, ossetenv, osgetenv = os.type, os.name, os.env, os.setenv, os.getenv
-
-resolvers.cacheversion = '1.0.1'
-resolvers.configbanner = ''
-resolvers.homedir = environment.homedir
-resolvers.criticalvars = allocate { "SELFAUTOLOC", "SELFAUTODIR", "SELFAUTOPARENT", "TEXMFCNF", "TEXMF", "TEXOS" }
-resolvers.luacnfname = 'texmfcnf.lua'
-resolvers.luacnfstate = "unknown"
-
--- The web2c tex binaries as well as kpse have built in paths for the configuration
--- files and there can be a depressing truckload of them. This is actually the weak
--- spot of a distribution. So we don't want:
---
--- resolvers.luacnfspec = '{$SELFAUTODIR,$SELFAUTOPARENT}{,{/share,}/texmf{-local,}/web2c}'
---
--- but instead use:
---
--- resolvers.luacnfspec = 'selfautoparent:{/texmf{-local,}{,/web2c}}'
---
--- which does not make texlive happy as there is a texmf-local tree one level up
--- (sigh), so we need this. We can assume web2c as mkiv does not run on older
--- texlives anyway.
---
--- texlive:
---
--- selfautodir:
--- selfautoparent:
--- selfautodir:share/texmf-local/web2c
--- selfautodir:share/texmf/web2c
--- selfautodir:texmf-local/web2c
--- selfautodir:texmf/web2c
--- selfautoparent:share/texmf-local/web2c
--- selfautoparent:share/texmf/web2c
--- selfautoparent:texmf-local/web2c
--- selfautoparent:texmf/web2c
---
--- minimals:
---
--- home:texmf/web2c
--- selfautoparent:texmf-local/web2c
--- selfautoparent:texmf-context/web2c
--- selfautoparent:texmf/web2c
+-- original size: 60821, stripped down to: 42503
+if not modules then modules={} end modules ['data-res']={
+ version=1.001,
+ comment="companion to luat-lib.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files",
+}
+local gsub,find,lower,upper,match,gmatch=string.gsub,string.find,string.lower,string.upper,string.match,string.gmatch
+local concat,insert,sortedkeys=table.concat,table.insert,table.sortedkeys
+local next,type,rawget=next,type,rawget
+local os=os
+local P,S,R,C,Cc,Cs,Ct,Carg=lpeg.P,lpeg.S,lpeg.R,lpeg.C,lpeg.Cc,lpeg.Cs,lpeg.Ct,lpeg.Carg
+local lpegmatch,lpegpatterns=lpeg.match,lpeg.patterns
+local formatters=string.formatters
+local filedirname=file.dirname
+local filebasename=file.basename
+local suffixonly=file.suffixonly
+local filejoin=file.join
+local collapsepath=file.collapsepath
+local joinpath=file.joinpath
+local allocate=utilities.storage.allocate
+local settings_to_array=utilities.parsers.settings_to_array
+local setmetatableindex=table.setmetatableindex
+local luasuffixes=utilities.lua.suffixes
+local trace_locating=false trackers.register("resolvers.locating",function(v) trace_locating=v end)
+local trace_detail=false trackers.register("resolvers.details",function(v) trace_detail=v end)
+local trace_expansions=false trackers.register("resolvers.expansions",function(v) trace_expansions=v end)
+local report_resolving=logs.reporter("resolvers","resolving")
+local resolvers=resolvers
+local expandedpathfromlist=resolvers.expandedpathfromlist
+local checkedvariable=resolvers.checkedvariable
+local splitconfigurationpath=resolvers.splitconfigurationpath
+local methodhandler=resolvers.methodhandler
+local initializesetter=utilities.setters.initialize
+local ostype,osname,osenv,ossetenv,osgetenv=os.type,os.name,os.env,os.setenv,os.getenv
+resolvers.cacheversion='1.0.1'
+resolvers.configbanner=''
+resolvers.homedir=environment.homedir
+resolvers.criticalvars=allocate { "SELFAUTOLOC","SELFAUTODIR","SELFAUTOPARENT","TEXMFCNF","TEXMF","TEXOS" }
+resolvers.luacnfname="texmfcnf.lua"
+resolvers.luacnfstate="unknown"
if environment.default_texmfcnf then
- -- unfortunately we now have quite some overkill in the spec (not so nice on a network)
- resolvers.luacnfspec = environment.default_texmfcnf
+ resolvers.luacnfspec=environment.default_texmfcnf
else
- -- resolvers.luacnfspec = "selfautoparent:texmf{-local,-context,}/web2c"
- resolvers.luacnfspec = "{selfautoloc:,selfautodir:,selfautoparent:}{,/texmf{-local,}/web2c}"
-end
-
-resolvers.luacnfspec = 'home:texmf/web2c;' .. resolvers.luacnfspec
-
--- which (as we want users to use the web2c path) be can be simplified to this:
---
--- if environment and environment.ownpath and string.find(environment.ownpath,"[\\/]texlive[\\/]") then
--- resolvers.luacnfspec = 'selfautodir:/texmf-local/web2c,selfautoparent:/texmf-local/web2c,selfautoparent:/texmf/web2c'
--- else
--- resolvers.luacnfspec = 'selfautoparent:/texmf-local/web2c,selfautoparent:/texmf/web2c'
--- end
-
-
-
-local unset_variable = "unset"
-
-local formats = resolvers.formats
-local suffixes = resolvers.suffixes
-local dangerous = resolvers.dangerous
-local suffixmap = resolvers.suffixmap
-
-resolvers.defaultsuffixes = { "tex" } -- "mkiv", "cld" -- too tricky
-
-resolvers.instance = resolvers.instance or nil -- the current one (slow access)
-local instance = resolvers.instance or nil -- the current one (fast access)
-
--- An instance has an environment (coming from the outside, kept raw), variables
--- (coming from the configuration file), and expansions (variables with nested
--- variables replaced). One can push something into the outer environment and
--- its internal copy, but only the later one will be the raw unprefixed variant.
-
+ resolvers.luacnfspec="{selfautoloc:,selfautodir:,selfautoparent:}{,/texmf{-local,}/web2c}"
+end
+resolvers.luacnfspec='home:texmf/web2c;'..resolvers.luacnfspec
+local unset_variable="unset"
+local formats=resolvers.formats
+local suffixes=resolvers.suffixes
+local dangerous=resolvers.dangerous
+local suffixmap=resolvers.suffixmap
+resolvers.defaultsuffixes={ "tex" }
+resolvers.instance=resolvers.instance or nil
+local instance=resolvers.instance or nil
function resolvers.setenv(key,value,raw)
- if instance then
- -- this one will be consulted first when we stay inside
- -- the current environment (prefixes are not resolved here)
- instance.environment[key] = value
- -- we feed back into the environment, and as this is used
- -- by other applications (via os.execute) we need to make
- -- sure that prefixes are resolve
- ossetenv(key,raw and value or resolvers.resolve(value))
- end
+ if instance then
+ instance.environment[key]=value
+ ossetenv(key,raw and value or resolvers.resolve(value))
+ end
end
-
--- Beware we don't want empty here as this one can be called early on
--- and therefore we use rawget.
-
local function getenv(key)
- local value = rawget(instance.environment,key)
- if value and value ~= "" then
- return value
- else
- local e = osgetenv(key)
- return e ~= nil and e ~= "" and checkedvariable(e) or ""
- end
-end
-
-resolvers.getenv = getenv
-resolvers.env = getenv
-
--- We are going to use some metatable trickery where we backtrack from
--- expansion to variable to environment.
-
+ local value=rawget(instance.environment,key)
+ if value and value~="" then
+ return value
+ else
+ local e=osgetenv(key)
+ return e~=nil and e~="" and checkedvariable(e) or ""
+ end
+end
+resolvers.getenv=getenv
+resolvers.env=getenv
local function resolve(k)
- return instance.expansions[k]
-end
-
-local dollarstripper = lpeg.stripper("$")
-local inhibitstripper = P("!")^0 * Cs(P(1)^0)
-local backslashswapper = lpeg.replacer("\\","/")
-
-local somevariable = P("$") / ""
-local somekey = C(R("az","AZ","09","__","--")^1)
-local somethingelse = P(";") * ((1-S("!{}/\\"))^1 * P(";") / "")
- + P(";") * (P(";") / "")
- + P(1)
-local variableexpander = Cs( (somevariable * (somekey/resolve) + somethingelse)^1 )
-
-local cleaner = P("\\") / "/" + P(";") * S("!{}/\\")^0 * P(";")^1 / ";"
-local variablecleaner = Cs((cleaner + P(1))^0)
-
-local somevariable = R("az","AZ","09","__","--")^1 / resolve
-local variable = (P("$")/"") * (somevariable + (P("{")/"") * somevariable * (P("}")/""))
-local variableresolver = Cs((variable + P(1))^0)
-
+ return instance.expansions[k]
+end
+local dollarstripper=lpeg.stripper("$")
+local inhibitstripper=P("!")^0*Cs(P(1)^0)
+local backslashswapper=lpeg.replacer("\\","/")
+local somevariable=P("$")/""
+local somekey=C(R("az","AZ","09","__","--")^1)
+local somethingelse=P(";")*((1-S("!{}/\\"))^1*P(";")/"")+P(";")*(P(";")/"")+P(1)
+local variableexpander=Cs((somevariable*(somekey/resolve)+somethingelse)^1 )
+local cleaner=P("\\")/"/"+P(";")*S("!{}/\\")^0*P(";")^1/";"
+local variablecleaner=Cs((cleaner+P(1))^0)
+local somevariable=R("az","AZ","09","__","--")^1/resolve
+local variable=(P("$")/"")*(somevariable+(P("{")/"")*somevariable*(P("}")/""))
+local variableresolver=Cs((variable+P(1))^0)
local function expandedvariable(var)
- return lpegmatch(variableexpander,var) or var
-end
-
-function resolvers.newinstance() -- todo: all vars will become lowercase and alphanum only
-
- if trace_locating then
- report_resolving("creating instance")
- end
-
- local environment, variables, expansions, order = allocate(), allocate(), allocate(), allocate()
-
- local newinstance = {
- environment = environment,
- variables = variables,
- expansions = expansions,
- order = order,
- files = allocate(),
- setups = allocate(),
- found = allocate(),
- foundintrees = allocate(),
- hashes = allocate(),
- hashed = allocate(),
- specification = allocate(),
- lists = allocate(),
- data = allocate(), -- only for loading
- fakepaths = allocate(),
- remember = true,
- diskcache = true,
- renewcache = false,
- renewtree = false,
- loaderror = false,
- savelists = true,
- pattern = nil, -- lists
- force_suffixes = true,
- }
-
- setmetatableindex(variables,function(t,k)
- local v
- for i=1,#order do
- v = order[i][k]
- if v ~= nil then
- t[k] = v
- return v
- end
- end
- if v == nil then
- v = ""
- end
- t[k] = v
- return v
- end)
-
- setmetatableindex(environment, function(t,k)
- local v = osgetenv(k)
- if v == nil then
- v = variables[k]
- end
- if v ~= nil then
- v = checkedvariable(v) or ""
- end
- v = resolvers.repath(v) -- for taco who has a : separated osfontdir
- t[k] = v
- return v
- end)
-
- setmetatableindex(expansions, function(t,k)
- local v = environment[k]
- if type(v) == "string" then
- v = lpegmatch(variableresolver,v)
- v = lpegmatch(variablecleaner,v)
- end
- t[k] = v
+ return lpegmatch(variableexpander,var) or var
+end
+function resolvers.newinstance()
+ if trace_locating then
+ report_resolving("creating instance")
+ end
+ local environment,variables,expansions,order=allocate(),allocate(),allocate(),allocate()
+ local newinstance={
+ environment=environment,
+ variables=variables,
+ expansions=expansions,
+ order=order,
+ files=allocate(),
+ setups=allocate(),
+ found=allocate(),
+ foundintrees=allocate(),
+ hashes=allocate(),
+ hashed=allocate(),
+ specification=allocate(),
+ lists=allocate(),
+ data=allocate(),
+ fakepaths=allocate(),
+ remember=true,
+ diskcache=true,
+ renewcache=false,
+ renewtree=false,
+ loaderror=false,
+ savelists=true,
+ pattern=nil,
+ force_suffixes=true,
+ }
+ setmetatableindex(variables,function(t,k)
+ local v
+ for i=1,#order do
+ v=order[i][k]
+ if v~=nil then
+ t[k]=v
return v
- end)
-
- return newinstance
-
+ end
+ end
+ if v==nil then
+ v=""
+ end
+ t[k]=v
+ return v
+ end)
+ setmetatableindex(environment,function(t,k)
+ local v=osgetenv(k)
+ if v==nil then
+ v=variables[k]
+ end
+ if v~=nil then
+ v=checkedvariable(v) or ""
+ end
+ v=resolvers.repath(v)
+ t[k]=v
+ return v
+ end)
+ setmetatableindex(expansions,function(t,k)
+ local v=environment[k]
+ if type(v)=="string" then
+ v=lpegmatch(variableresolver,v)
+ v=lpegmatch(variablecleaner,v)
+ end
+ t[k]=v
+ return v
+ end)
+ return newinstance
+end
+function resolvers.setinstance(someinstance)
+ instance=someinstance
+ resolvers.instance=someinstance
+ return someinstance
end
-
-function resolvers.setinstance(someinstance) -- only one instance is active
- instance = someinstance
- resolvers.instance = someinstance
- return someinstance
-end
-
function resolvers.reset()
- return resolvers.setinstance(resolvers.newinstance())
+ return resolvers.setinstance(resolvers.newinstance())
end
-
local function reset_hashes()
- instance.lists = { }
- instance.found = { }
-end
-
-local slash = P("/")
-
-local pathexpressionpattern = Cs (
- Cc("^") * (
- Cc("%") * S(".-")
- + slash^2 * P(-1) / "/.*"
- + slash^2 / "/.-/"
- + (1-slash) * P(-1) * Cc("/")
- + P(1)
- )^1 * Cc("$") -- yes or no $
+ instance.lists={}
+ instance.found={}
+end
+local slash=P("/")
+local pathexpressionpattern=Cs (
+ Cc("^")*(
+ Cc("%")*S(".-")+slash^2*P(-1)/"/.*"
++slash^2/"/[^/]*/*"+(1-slash)*P(-1)*Cc("/")+P(1)
+ )^1*Cc("$")
)
-
-local cache = { }
-
+local cache={}
local function makepathexpression(str)
- if str == "." then
- return "^%./$"
- else
- local c = cache[str]
- if not c then
- c = lpegmatch(pathexpressionpattern,str)
- cache[str] = c
- end
- return c
+ if str=="." then
+ return "^%./$"
+ else
+ local c=cache[str]
+ if not c then
+ c=lpegmatch(pathexpressionpattern,str)
+ cache[str]=c
end
+ return c
+ end
end
-
local function reportcriticalvariables(cnfspec)
- if trace_locating then
- for i=1,#resolvers.criticalvars do
- local k = resolvers.criticalvars[i]
- local v = resolvers.getenv(k) or "unknown" -- this one will not resolve !
- report_resolving("variable '%s' set to '%s'",k,v)
- end
- report_resolving()
- if cnfspec then
- if type(cnfspec) == "table" then
- report_resolving("using configuration specification '%s'",concat(cnfspec,","))
- else
- report_resolving("using configuration specification '%s'",cnfspec)
- end
- end
- report_resolving()
+ if trace_locating then
+ for i=1,#resolvers.criticalvars do
+ local k=resolvers.criticalvars[i]
+ local v=resolvers.getenv(k) or "unknown"
+ report_resolving("variable %a set to %a",k,v)
end
- reportcriticalvariables = function() end
+ report_resolving()
+ if cnfspec then
+ report_resolving("using configuration specification %a",type(cnfspec)=="table" and concat(cnfspec,",") or cnfspec)
+ end
+ report_resolving()
+ end
+ reportcriticalvariables=function() end
end
-
local function identify_configuration_files()
- local specification = instance.specification
- if #specification == 0 then
- local cnfspec = getenv('TEXMFCNF')
- if cnfspec == "" then
- cnfspec = resolvers.luacnfspec
- resolvers.luacnfstate = "default"
- else
- resolvers.luacnfstate = "environment"
- end
- reportcriticalvariables(cnfspec)
- local cnfpaths = expandedpathfromlist(resolvers.splitpath(cnfspec))
- local luacnfname = resolvers.luacnfname
- for i=1,#cnfpaths do
- local filename = collapsepath(filejoin(cnfpaths[i],luacnfname))
- local realname = resolvers.resolve(filename)
- if lfs.isfile(realname) then
- specification[#specification+1] = filename
- if trace_locating then
- report_resolving("found configuration file '%s'",realname)
- end
- elseif trace_locating then
- report_resolving("unknown configuration file '%s'",realname)
- end
- end
+ local specification=instance.specification
+ if #specification==0 then
+ local cnfspec=getenv("TEXMFCNF")
+ if cnfspec=="" then
+ cnfspec=resolvers.luacnfspec
+ resolvers.luacnfstate="default"
+ else
+ resolvers.luacnfstate="environment"
+ end
+ reportcriticalvariables(cnfspec)
+ local cnfpaths=expandedpathfromlist(resolvers.splitpath(cnfspec))
+ local luacnfname=resolvers.luacnfname
+ for i=1,#cnfpaths do
+ local filename=collapsepath(filejoin(cnfpaths[i],luacnfname))
+ local realname=resolvers.resolve(filename)
+ if lfs.isfile(realname) then
+ specification[#specification+1]=filename
if trace_locating then
- report_resolving()
+ report_resolving("found configuration file %a",realname)
end
- elseif trace_locating then
- report_resolving("configuration files already identified")
+ elseif trace_locating then
+ report_resolving("unknown configuration file %a",realname)
+ end
end
+ if trace_locating then
+ report_resolving()
+ end
+ elseif trace_locating then
+ report_resolving("configuration files already identified")
+ end
end
-
local function load_configuration_files()
- local specification = instance.specification
- if #specification > 0 then
- local luacnfname = resolvers.luacnfname
- for i=1,#specification do
- local filename = specification[i]
- local pathname = filedirname(filename)
- local filename = filejoin(pathname,luacnfname)
- local realname = resolvers.resolve(filename) -- no shortcut
- local blob = loadfile(realname)
- if blob then
- local setups = instance.setups
- local data = blob()
- local parent = data and data.parent
- if parent then
- local filename = filejoin(pathname,parent)
- local realname = resolvers.resolve(filename) -- no shortcut
- local blob = loadfile(realname)
- if blob then
- local parentdata = blob()
- if parentdata then
- report_resolving("loading configuration file '%s'",filename)
- data = table.merged(parentdata,data)
- end
- end
- end
- data = data and data.content
- if data then
- if trace_locating then
- report_resolving("loading configuration file '%s'",filename)
- report_resolving()
- end
- local variables = data.variables or { }
- local warning = false
- for k, v in next, data do
- local variant = type(v)
- if variant == "table" then
- initializesetter(filename,k,v)
- elseif variables[k] == nil then
- if trace_locating and not warning then
- report_resolving("variables like '%s' in configuration file '%s' should move to the 'variables' subtable",
- k,resolvers.resolve(filename))
- warning = true
- end
- variables[k] = v
- end
- end
- setups[pathname] = variables
- if resolvers.luacnfstate == "default" then
- -- the following code is not tested
- local cnfspec = variables["TEXMFCNF"]
- if cnfspec then
- if trace_locating then
- report_resolving("reloading configuration due to TEXMF redefinition")
- end
- -- we push the value into the main environment (osenv) so
- -- that it takes precedence over the default one and therefore
- -- also over following definitions
- resolvers.setenv('TEXMFCNF',cnfspec) -- resolves prefixes
- -- we now identify and load the specified configuration files
- instance.specification = { }
- identify_configuration_files()
- load_configuration_files()
- -- we prevent further overload of the configuration variable
- resolvers.luacnfstate = "configuration"
- -- we quit the outer loop
- break
- end
- end
-
- else
- if trace_locating then
- report_resolving("skipping configuration file '%s' (no content)",filename)
- end
- setups[pathname] = { }
- instance.loaderror = true
- end
- elseif trace_locating then
- report_resolving("skipping configuration file '%s' (no valid format)",filename)
- end
- instance.order[#instance.order+1] = instance.setups[pathname]
- if instance.loaderror then
- break
- end
- end
- elseif trace_locating then
- report_resolving("warning: no lua configuration files found")
- end
+ local specification=instance.specification
+ if #specification>0 then
+ local luacnfname=resolvers.luacnfname
+ for i=1,#specification do
+ local filename=specification[i]
+ local pathname=filedirname(filename)
+ local filename=filejoin(pathname,luacnfname)
+ local realname=resolvers.resolve(filename)
+ local blob=loadfile(realname)
+ if blob then
+ local setups=instance.setups
+ local data=blob()
+ local parent=data and data.parent
+ if parent then
+ local filename=filejoin(pathname,parent)
+ local realname=resolvers.resolve(filename)
+ local blob=loadfile(realname)
+ if blob then
+ local parentdata=blob()
+ if parentdata then
+ report_resolving("loading configuration file %a",filename)
+ data=table.merged(parentdata,data)
+ end
+ end
+ end
+ data=data and data.content
+ if data then
+ if trace_locating then
+ report_resolving("loading configuration file %a",filename)
+ report_resolving()
+ end
+ local variables=data.variables or {}
+ local warning=false
+ for k,v in next,data do
+ local variant=type(v)
+ if variant=="table" then
+ initializesetter(filename,k,v)
+ elseif variables[k]==nil then
+ if trace_locating and not warning then
+ report_resolving("variables like %a in configuration file %a should move to the 'variables' subtable",
+ k,resolvers.resolve(filename))
+ warning=true
+ end
+ variables[k]=v
+ end
+ end
+ setups[pathname]=variables
+ if resolvers.luacnfstate=="default" then
+ local cnfspec=variables["TEXMFCNF"]
+ if cnfspec then
+ if trace_locating then
+ report_resolving("reloading configuration due to TEXMF redefinition")
+ end
+ resolvers.setenv("TEXMFCNF",cnfspec)
+ instance.specification={}
+ identify_configuration_files()
+ load_configuration_files()
+ resolvers.luacnfstate="configuration"
+ break
+ end
+ end
+ else
+ if trace_locating then
+ report_resolving("skipping configuration file %a (no content)",filename)
+ end
+ setups[pathname]={}
+ instance.loaderror=true
+ end
+ elseif trace_locating then
+ report_resolving("skipping configuration file %a (no valid format)",filename)
+ end
+ instance.order[#instance.order+1]=instance.setups[pathname]
+ if instance.loaderror then
+ break
+ end
+ end
+ elseif trace_locating then
+ report_resolving("warning: no lua configuration files found")
+ end
end
-
--- scheme magic ... database loading
-
local function load_file_databases()
- instance.loaderror, instance.files = false, allocate()
- if not instance.renewcache then
- local hashes = instance.hashes
- for k=1,#hashes do
- local hash = hashes[k]
- resolvers.hashers.byscheme(hash.type,hash.name)
- if instance.loaderror then break end
- end
+ instance.loaderror,instance.files=false,allocate()
+ if not instance.renewcache then
+ local hashes=instance.hashes
+ for k=1,#hashes do
+ local hash=hashes[k]
+ resolvers.hashers.byscheme(hash.type,hash.name)
+ if instance.loaderror then break end
end
+ end
end
-
local function locate_file_databases()
- -- todo: cache:// and tree:// (runtime)
- local texmfpaths = resolvers.expandedpathlist('TEXMF')
- if #texmfpaths > 0 then
- for i=1,#texmfpaths do
- local path = collapsepath(texmfpaths[i])
- local stripped = lpegmatch(inhibitstripper,path) -- the !! thing
- if stripped ~= "" then
- local runtime = stripped == path
- path = resolvers.cleanpath(path)
- local spec = resolvers.splitmethod(stripped)
- if runtime and (spec.noscheme or spec.scheme == "file") then
- stripped = "tree:///" .. stripped
- elseif spec.scheme == "cache" or spec.scheme == "file" then
- stripped = spec.path
- end
- if trace_locating then
- if runtime then
- report_resolving("locating list of '%s' (runtime) (%s)",path,stripped)
- else
- report_resolving("locating list of '%s' (cached)",path)
- end
- end
- methodhandler('locators',stripped)
- end
+ local texmfpaths=resolvers.expandedpathlist("TEXMF")
+ if #texmfpaths>0 then
+ for i=1,#texmfpaths do
+ local path=collapsepath(texmfpaths[i])
+ path=gsub(path,"/+$","")
+ local stripped=lpegmatch(inhibitstripper,path)
+ if stripped~="" then
+ local runtime=stripped==path
+ path=resolvers.cleanpath(path)
+ local spec=resolvers.splitmethod(stripped)
+ if runtime and (spec.noscheme or spec.scheme=="file") then
+ stripped="tree:///"..stripped
+ elseif spec.scheme=="cache" or spec.scheme=="file" then
+ stripped=spec.path
end
if trace_locating then
- report_resolving()
+ if runtime then
+ report_resolving("locating list of %a (runtime) (%s)",path,stripped)
+ else
+ report_resolving("locating list of %a (cached)",path)
+ end
end
- elseif trace_locating then
- report_resolving("no texmf paths are defined (using TEXMF)")
- end
-end
-
-local function generate_file_databases()
- local hashes = instance.hashes
- for k=1,#hashes do
- local hash = hashes[k]
- methodhandler('generators',hash.name)
+ methodhandler('locators',stripped)
+ end
end
if trace_locating then
- report_resolving()
+ report_resolving()
end
+ elseif trace_locating then
+ report_resolving("no texmf paths are defined (using TEXMF)")
+ end
end
-
-local function save_file_databases() -- will become cachers
- for i=1,#instance.hashes do
- local hash = instance.hashes[i]
- local cachename = hash.name
- if hash.cache then
- local content = instance.files[cachename]
- caches.collapsecontent(content)
- if trace_locating then
- report_resolving("saving tree '%s'",cachename)
- end
- caches.savecontent(cachename,"files",content)
- elseif trace_locating then
- report_resolving("not saving runtime tree '%s'",cachename)
- end
+local function generate_file_databases()
+ local hashes=instance.hashes
+ for k=1,#hashes do
+ local hash=hashes[k]
+ methodhandler('generators',hash.name)
+ end
+ if trace_locating then
+ report_resolving()
+ end
+end
+local function save_file_databases()
+ for i=1,#instance.hashes do
+ local hash=instance.hashes[i]
+ local cachename=hash.name
+ if hash.cache then
+ local content=instance.files[cachename]
+ caches.collapsecontent(content)
+ if trace_locating then
+ report_resolving("saving tree %a",cachename)
+ end
+ caches.savecontent(cachename,"files",content)
+ elseif trace_locating then
+ report_resolving("not saving runtime tree %a",cachename)
end
+ end
end
-
function resolvers.renew(hashname)
- if hashname and hashname ~= "" then
- local expanded = resolvers.expansion(hashname) or ""
- if expanded ~= "" then
- if trace_locating then
- report_resolving("identifying tree '%s' from '%s'",expanded,hashname)
- end
- hashname = expanded
- else
- if trace_locating then
- report_resolving("identifying tree '%s'",hashname)
- end
- end
- local realpath = resolvers.resolve(hashname)
- if lfs.isdir(realpath) then
- if trace_locating then
- report_resolving("using path '%s'",realpath)
- end
- methodhandler('generators',hashname)
- -- could be shared
- local content = instance.files[hashname]
- caches.collapsecontent(content)
- if trace_locating then
- report_resolving("saving tree '%s'",hashname)
- end
- caches.savecontent(hashname,"files",content)
- -- till here
- else
- report_resolving("invalid path '%s'",realpath)
- end
+ if hashname and hashname~="" then
+ local expanded=resolvers.expansion(hashname) or ""
+ if expanded~="" then
+ if trace_locating then
+ report_resolving("identifying tree %a from %a",expanded,hashname)
+ end
+ hashname=expanded
+ else
+ if trace_locating then
+ report_resolving("identifying tree %a",hashname)
+ end
+ end
+ local realpath=resolvers.resolve(hashname)
+ if lfs.isdir(realpath) then
+ if trace_locating then
+ report_resolving("using path %a",realpath)
+ end
+ methodhandler('generators',hashname)
+ local content=instance.files[hashname]
+ caches.collapsecontent(content)
+ if trace_locating then
+ report_resolving("saving tree %a",hashname)
+ end
+ caches.savecontent(hashname,"files",content)
+ else
+ report_resolving("invalid path %a",realpath)
end
+ end
end
-
local function load_databases()
- locate_file_databases()
- if instance.diskcache and not instance.renewcache then
- load_file_databases()
- if instance.loaderror then
- generate_file_databases()
- save_file_databases()
- end
- else
- generate_file_databases()
- if instance.renewcache then
- save_file_databases()
- end
+ locate_file_databases()
+ if instance.diskcache and not instance.renewcache then
+ load_file_databases()
+ if instance.loaderror then
+ generate_file_databases()
+ save_file_databases()
+ end
+ else
+ generate_file_databases()
+ if instance.renewcache then
+ save_file_databases()
end
+ end
end
-
function resolvers.appendhash(type,name,cache)
- -- safeguard ... tricky as it's actually a bug when seen twice
- if not instance.hashed[name] then
- if trace_locating then
- report_resolving("hash '%s' appended",name)
- end
- insert(instance.hashes, { type = type, name = name, cache = cache } )
- instance.hashed[name] = cache
+ if not instance.hashed[name] then
+ if trace_locating then
+ report_resolving("hash %a appended",name)
end
+ insert(instance.hashes,{ type=type,name=name,cache=cache } )
+ instance.hashed[name]=cache
+ end
end
-
function resolvers.prependhash(type,name,cache)
- -- safeguard ... tricky as it's actually a bug when seen twice
- if not instance.hashed[name] then
- if trace_locating then
- report_resolving("hash '%s' prepended",name)
- end
- insert(instance.hashes, 1, { type = type, name = name, cache = cache } )
- instance.hashed[name] = cache
- end
-end
-
-function resolvers.extendtexmfvariable(specification) -- crap, we could better prepend the hash
- local t = resolvers.splitpath(getenv('TEXMF'))
- insert(t,1,specification)
- local newspec = concat(t,";")
- if instance.environment["TEXMF"] then
- instance.environment["TEXMF"] = newspec
- elseif instance.variables["TEXMF"] then
- instance.variables["TEXMF"] = newspec
- else
- -- weird
- end
- reset_hashes()
+ if not instance.hashed[name] then
+ if trace_locating then
+ report_resolving("hash %a prepended",name)
+ end
+ insert(instance.hashes,1,{ type=type,name=name,cache=cache } )
+ instance.hashed[name]=cache
+ end
+end
+function resolvers.extendtexmfvariable(specification)
+ local t=resolvers.splitpath(getenv("TEXMF"))
+ insert(t,1,specification)
+ local newspec=concat(t,",")
+ if instance.environment["TEXMF"] then
+ instance.environment["TEXMF"]=newspec
+ elseif instance.variables["TEXMF"] then
+ instance.variables["TEXMF"]=newspec
+ else
+ end
+ reset_hashes()
end
-
function resolvers.splitexpansions()
- local ie = instance.expansions
- for k,v in next, ie do
- local t, tn, h, p = { }, 0, { }, splitconfigurationpath(v)
- for kk=1,#p do
- local vv = p[kk]
- if vv ~= "" and not h[vv] then
- tn = tn + 1
- t[tn] = vv
- h[vv] = true
- end
- end
- if #t > 1 then
- ie[k] = t
- else
- ie[k] = t[1]
- end
+ local ie=instance.expansions
+ for k,v in next,ie do
+ local t,tn,h,p={},0,{},splitconfigurationpath(v)
+ for kk=1,#p do
+ local vv=p[kk]
+ if vv~="" and not h[vv] then
+ tn=tn+1
+ t[tn]=vv
+ h[vv]=true
+ end
+ end
+ if #t>1 then
+ ie[k]=t
+ else
+ ie[k]=t[1]
end
+ end
end
-
--- end of split/join code
-
--- we used to have 'files' and 'configurations' so therefore the following
--- shared function
-
function resolvers.datastate()
- return caches.contentstate()
+ return caches.contentstate()
end
-
function resolvers.variable(name)
- local name = name and lpegmatch(dollarstripper,name)
- local result = name and instance.variables[name]
- return result ~= nil and result or ""
+ local name=name and lpegmatch(dollarstripper,name)
+ local result=name and instance.variables[name]
+ return result~=nil and result or ""
end
-
function resolvers.expansion(name)
- local name = name and lpegmatch(dollarstripper,name)
- local result = name and instance.expansions[name]
- return result ~= nil and result or ""
+ local name=name and lpegmatch(dollarstripper,name)
+ local result=name and instance.expansions[name]
+ return result~=nil and result or ""
end
-
function resolvers.unexpandedpathlist(str)
- local pth = resolvers.variable(str)
- local lst = resolvers.splitpath(pth)
- return expandedpathfromlist(lst)
+ local pth=resolvers.variable(str)
+ local lst=resolvers.splitpath(pth)
+ return expandedpathfromlist(lst)
end
-
function resolvers.unexpandedpath(str)
- return joinpath(resolvers.unexpandedpathlist(str))
+ return joinpath(resolvers.unexpandedpathlist(str))
end
-
-local done = { }
-
+local done={}
function resolvers.resetextrapath()
- local ep = instance.extra_paths
- if not ep then
- ep, done = { }, { }
- instance.extra_paths = ep
- elseif #ep > 0 then
- instance.lists, done = { }, { }
- end
+ local ep=instance.extra_paths
+ if not ep then
+ ep,done={},{}
+ instance.extra_paths=ep
+ elseif #ep>0 then
+ instance.lists,done={},{}
+ end
end
-
function resolvers.registerextrapath(paths,subpaths)
- local ep = instance.extra_paths or { }
- local oldn = #ep
- local newn = oldn
- if paths and paths ~= "" then
- if subpaths and subpaths ~= "" then
- for p in gmatch(paths,"[^,]+") do
- -- we gmatch each step again, not that fast, but used seldom
- for s in gmatch(subpaths,"[^,]+") do
- local ps = p .. "/" .. s
- if not done[ps] then
- newn = newn + 1
- ep[newn] = resolvers.cleanpath(ps)
- done[ps] = true
- end
- end
- end
+ paths=settings_to_array(paths)
+ subpaths=settings_to_array(subpaths)
+ local ep=instance.extra_paths or {}
+ local oldn=#ep
+ local newn=oldn
+ local nofpaths=#paths
+ local nofsubpaths=#subpaths
+ if nofpaths>0 then
+ if nofsubpaths>0 then
+ for i=1,nofpaths do
+ local p=paths[i]
+ for j=1,nofsubpaths do
+ local s=subpaths[j]
+ local ps=p.."/"..s
+ if not done[ps] then
+ newn=newn+1
+ ep[newn]=resolvers.cleanpath(ps)
+ done[ps]=true
+ end
+ end
+ end
+ else
+ for i=1,nofpaths do
+ local p=paths[i]
+ if not done[p] then
+ newn=newn+1
+ ep[newn]=resolvers.cleanpath(p)
+ done[p]=true
+ end
+ end
+ end
+ elseif nofsubpaths>0 then
+ for i=1,oldn do
+ for j=1,nofsubpaths do
+ local s=subpaths[j]
+ local ps=ep[i].."/"..s
+ if not done[ps] then
+ newn=newn+1
+ ep[newn]=resolvers.cleanpath(ps)
+ done[ps]=true
+ end
+ end
+ end
+ end
+ if newn>0 then
+ instance.extra_paths=ep
+ end
+ if newn>oldn then
+ instance.lists={}
+ end
+end
+local function made_list(instance,list)
+ local ep=instance.extra_paths
+ if not ep or #ep==0 then
+ return list
+ else
+ local done,new,newn={},{},0
+ for k=1,#list do
+ local v=list[k]
+ if not done[v] then
+ if find(v,"^[%.%/]$") then
+ done[v]=true
+ newn=newn+1
+ new[newn]=v
else
- for p in gmatch(paths,"[^,]+") do
- if not done[p] then
- newn = newn + 1
- ep[newn] = resolvers.cleanpath(p)
- done[p] = true
- end
- end
- end
- elseif subpaths and subpaths ~= "" then
- for i=1,oldn do
- -- we gmatch each step again, not that fast, but used seldom
- for s in gmatch(subpaths,"[^,]+") do
- local ps = ep[i] .. "/" .. s
- if not done[ps] then
- newn = newn + 1
- ep[newn] = resolvers.cleanpath(ps)
- done[ps] = true
- end
- end
+ break
end
+ end
end
- if newn > 0 then
- instance.extra_paths = ep -- register paths
- end
- if newn > oldn then
- instance.lists = { } -- erase the cache
+ for k=1,#ep do
+ local v=ep[k]
+ if not done[v] then
+ done[v]=true
+ newn=newn+1
+ new[newn]=v
+ end
end
-end
-
-local function made_list(instance,list)
- local ep = instance.extra_paths
- if not ep or #ep == 0 then
- return list
- else
- local done, new, newn = { }, { }, 0
- -- honour . .. ../.. but only when at the start
- for k=1,#list do
- local v = list[k]
- if not done[v] then
- if find(v,"^[%.%/]$") then
- done[v] = true
- newn = newn + 1
- new[newn] = v
- else
- break
- end
- end
- end
- -- first the extra paths
- for k=1,#ep do
- local v = ep[k]
- if not done[v] then
- done[v] = true
- newn = newn + 1
- new[newn] = v
- end
- end
- -- next the formal paths
- for k=1,#list do
- local v = list[k]
- if not done[v] then
- done[v] = true
- newn = newn + 1
- new[newn] = v
- end
- end
- return new
+ for k=1,#list do
+ local v=list[k]
+ if not done[v] then
+ done[v]=true
+ newn=newn+1
+ new[newn]=v
+ end
end
+ return new
+ end
end
-
function resolvers.cleanpathlist(str)
- local t = resolvers.expandedpathlist(str)
- if t then
- for i=1,#t do
- t[i] = collapsepath(resolvers.cleanpath(t[i]))
- end
+ local t=resolvers.expandedpathlist(str)
+ if t then
+ for i=1,#t do
+ t[i]=collapsepath(resolvers.cleanpath(t[i]))
end
- return t
+ end
+ return t
end
-
function resolvers.expandpath(str)
- return joinpath(resolvers.expandedpathlist(str))
+ return joinpath(resolvers.expandedpathlist(str))
end
-
function resolvers.expandedpathlist(str)
- if not str then
- return { }
- elseif instance.savelists then
- str = lpegmatch(dollarstripper,str)
- if not instance.lists[str] then -- cached
- local lst = made_list(instance,resolvers.splitpath(resolvers.expansion(str)))
- instance.lists[str] = expandedpathfromlist(lst)
- end
- return instance.lists[str]
- else
- local lst = resolvers.splitpath(resolvers.expansion(str))
- return made_list(instance,expandedpathfromlist(lst))
+ if not str then
+ return {}
+ elseif instance.savelists then
+ str=lpegmatch(dollarstripper,str)
+ local lists=instance.lists
+ local lst=lists[str]
+ if not lst then
+ local l=made_list(instance,resolvers.splitpath(resolvers.expansion(str)))
+ lst=expandedpathfromlist(l)
+ lists[str]=lst
end
+ return lst
+ else
+ local lst=resolvers.splitpath(resolvers.expansion(str))
+ return made_list(instance,expandedpathfromlist(lst))
+ end
end
-
-function resolvers.expandedpathlistfromvariable(str) -- brrr
- str = lpegmatch(dollarstripper,str)
- local tmp = resolvers.variableofformatorsuffix(str)
- return resolvers.expandedpathlist(tmp ~= "" and tmp or str)
+function resolvers.expandedpathlistfromvariable(str)
+ str=lpegmatch(dollarstripper,str)
+ local tmp=resolvers.variableofformatorsuffix(str)
+ return resolvers.expandedpathlist(tmp~="" and tmp or str)
end
-
function resolvers.expandpathfromvariable(str)
- return joinpath(resolvers.expandedpathlistfromvariable(str))
+ return joinpath(resolvers.expandedpathlistfromvariable(str))
end
-
-function resolvers.expandbraces(str) -- output variable and brace expansion of STRING
--- local ori = resolvers.variable(str)
--- if ori == "" then
- local ori = str
--- end
- local pth = expandedpathfromlist(resolvers.splitpath(ori))
- return joinpath(pth)
+function resolvers.expandbraces(str)
+ local ori=str
+ local pth=expandedpathfromlist(resolvers.splitpath(ori))
+ return joinpath(pth)
end
-
function resolvers.registerfilehash(name,content,someerror)
- if content then
- instance.files[name] = content
- else
- instance.files[name] = { }
- if somerror == true then -- can be unset
- instance.loaderror = someerror
- end
+ if content then
+ instance.files[name]=content
+ else
+ instance.files[name]={}
+ if somerror==true then
+ instance.loaderror=someerror
end
+ end
end
-
local function isreadable(name)
- local readable = lfs.isfile(name) -- not file.is_readable(name) asit can be a dir
- if trace_detail then
- if readable then
- report_resolving("file '%s' is readable",name)
- else
- report_resolving("file '%s' is not readable", name)
- end
+ local readable=lfs.isfile(name)
+ if trace_detail then
+ if readable then
+ report_resolving("file %a is readable",name)
+ else
+ report_resolving("file %a is not readable",name)
end
- return readable
+ end
+ return readable
end
-
--- name
--- name/name
-
local function collect_files(names)
- local filelist, noffiles = { }, 0
- for k=1,#names do
- local fname = names[k]
+ local filelist,noffiles={},0
+ for k=1,#names do
+ local fname=names[k]
+ if trace_detail then
+ report_resolving("checking name %a",fname)
+ end
+ local bname=filebasename(fname)
+ local dname=filedirname(fname)
+ if dname=="" or find(dname,"^%.") then
+ dname=false
+ else
+ dname=gsub(dname,"%*",".*")
+ dname="/"..dname.."$"
+ end
+ local hashes=instance.hashes
+ for h=1,#hashes do
+ local hash=hashes[h]
+ local blobpath=hash.name
+ local files=blobpath and instance.files[blobpath]
+ if files then
if trace_detail then
- report_resolving("checking name '%s'",fname)
- end
- local bname = filebasename(fname)
- local dname = filedirname(fname)
- if dname == "" or find(dname,"^%.") then
- dname = false
- else
-dname = gsub(dname,"*","%.*")
- dname = "/" .. dname .. "$"
- end
- local hashes = instance.hashes
- for h=1,#hashes do
- local hash = hashes[h]
- local blobpath = hash.name
- local files = blobpath and instance.files[blobpath]
- if files then
+ report_resolving("deep checking %a, base %a, pattern %a",blobpath,bname,dname)
+ end
+ local blobfile=files[bname]
+ if not blobfile then
+ local rname="remap:"..bname
+ blobfile=files[rname]
+ if blobfile then
+ bname=files[rname]
+ blobfile=files[bname]
+ end
+ end
+ if blobfile then
+ local blobroot=files.__path__ or blobpath
+ if type(blobfile)=='string' then
+ if not dname or find(blobfile,dname) then
+ local variant=hash.type
+ local search=filejoin(blobroot,blobfile,bname)
+ local result=methodhandler('concatinators',hash.type,blobroot,blobfile,bname)
+ if trace_detail then
+ report_resolving("match: variant %a, search %a, result %a",variant,search,result)
+ end
+ noffiles=noffiles+1
+ filelist[noffiles]={ variant,search,result }
+ end
+ else
+ for kk=1,#blobfile do
+ local vv=blobfile[kk]
+ if not dname or find(vv,dname) then
+ local variant=hash.type
+ local search=filejoin(blobroot,vv,bname)
+ local result=methodhandler('concatinators',hash.type,blobroot,vv,bname)
if trace_detail then
- report_resolving("deep checking '%s' (%s)",blobpath,bname)
- end
- local blobfile = files[bname]
- if not blobfile then
- local rname = "remap:"..bname
- blobfile = files[rname]
- if blobfile then
- bname = files[rname]
- blobfile = files[bname]
- end
+ report_resolving("match: variant %a, search %a, result %a",variant,search,result)
end
- if blobfile then
- local blobroot = files.__path__ or blobpath
- if type(blobfile) == 'string' then
- if not dname or find(blobfile,dname) then
- local variant = hash.type
- -- local search = filejoin(blobpath,blobfile,bname)
- local search = filejoin(blobroot,blobfile,bname)
- local result = methodhandler('concatinators',hash.type,blobroot,blobfile,bname)
- if trace_detail then
- report_resolving("match: variant '%s', search '%s', result '%s'",variant,search,result)
- end
- noffiles = noffiles + 1
- filelist[noffiles] = { variant, search, result }
- end
- else
- for kk=1,#blobfile do
- local vv = blobfile[kk]
- if not dname or find(vv,dname) then
- local variant = hash.type
- -- local search = filejoin(blobpath,vv,bname)
- local search = filejoin(blobroot,vv,bname)
- local result = methodhandler('concatinators',hash.type,blobroot,vv,bname)
- if trace_detail then
- report_resolving("match: variant '%s', search '%s', result '%s'",variant,search,result)
- end
- noffiles = noffiles + 1
- filelist[noffiles] = { variant, search, result }
- end
- end
- end
- end
- elseif trace_locating then
- report_resolving("no match in '%s' (%s)",blobpath,bname)
+ noffiles=noffiles+1
+ filelist[noffiles]={ variant,search,result }
+ end
end
+ end
end
+ elseif trace_locating then
+ report_resolving("no match in %a (%s)",blobpath,bname)
+ end
end
- return noffiles > 0 and filelist or nil
+ end
+ return noffiles>0 and filelist or nil
end
-
-local fit = { }
-
+local fit={}
function resolvers.registerintrees(filename,format,filetype,usedmethod,foundname)
- local foundintrees = instance.foundintrees
- if usedmethod == "direct" and filename == foundname and fit[foundname] then
- -- just an extra lookup after a test on presence
- else
- local t = {
- filename = filename,
- format = format ~= "" and format or nil,
- filetype = filetype ~= "" and filetype or nil,
- usedmethod = usedmethod,
- foundname = foundname,
- }
- fit[foundname] = t
- foundintrees[#foundintrees+1] = t
- end
+ local foundintrees=instance.foundintrees
+ if usedmethod=="direct" and filename==foundname and fit[foundname] then
+ else
+ local t={
+ filename=filename,
+ format=format~="" and format or nil,
+ filetype=filetype~="" and filetype or nil,
+ usedmethod=usedmethod,
+ foundname=foundname,
+ }
+ fit[foundname]=t
+ foundintrees[#foundintrees+1]=t
+ end
end
-
--- split the next one up for readability (but this module needs a cleanup anyway)
-
-local function can_be_dir(name) -- can become local
- local fakepaths = instance.fakepaths
- if not fakepaths[name] then
- if lfs.isdir(name) then
- fakepaths[name] = 1 -- directory
- else
- fakepaths[name] = 2 -- no directory
- end
+local function can_be_dir(name)
+ local fakepaths=instance.fakepaths
+ if not fakepaths[name] then
+ if lfs.isdir(name) then
+ fakepaths[name]=1
+ else
+ fakepaths[name]=2
end
- return fakepaths[name] == 1
+ end
+ return fakepaths[name]==1
end
-
-local preparetreepattern = Cs((P(".")/"%%." + P("-")/"%%-" + P(1))^0 * Cc("$"))
-
--- -- -- begin of main file search routing -- -- -- needs checking as previous has been patched
-
+local preparetreepattern=Cs((P(".")/"%%."+P("-")/"%%-"+P(1))^0*Cc("$"))
local collect_instance_files
-
local function find_analyze(filename,askedformat,allresults)
- local filetype, wantedfiles, ext = '', { }, fileextname(filename)
- -- too tricky as filename can be bla.1.2.3:
- --
- -- if not suffixmap[ext] then
- -- wantedfiles[#wantedfiles+1] = filename
- -- end
- wantedfiles[#wantedfiles+1] = filename
- if askedformat == "" then
- if ext == "" or not suffixmap[ext] then
- local defaultsuffixes = resolvers.defaultsuffixes
- for i=1,#defaultsuffixes do
- local forcedname = filename .. '.' .. defaultsuffixes[i]
- wantedfiles[#wantedfiles+1] = forcedname
- filetype = resolvers.formatofsuffix(forcedname)
- if trace_locating then
- report_resolving("forcing filetype '%s'",filetype)
- end
- end
- else
- filetype = resolvers.formatofsuffix(filename)
- if trace_locating then
- report_resolving("using suffix based filetype '%s'",filetype)
- end
+ local filetype,wantedfiles,ext='',{},suffixonly(filename)
+ wantedfiles[#wantedfiles+1]=filename
+ if askedformat=="" then
+ if ext=="" or not suffixmap[ext] then
+ local defaultsuffixes=resolvers.defaultsuffixes
+ for i=1,#defaultsuffixes do
+ local forcedname=filename..'.'..defaultsuffixes[i]
+ wantedfiles[#wantedfiles+1]=forcedname
+ filetype=resolvers.formatofsuffix(forcedname)
+ if trace_locating then
+ report_resolving("forcing filetype %a",filetype)
end
+ end
else
- if ext == "" or not suffixmap[ext] then
- local format_suffixes = suffixes[askedformat]
- if format_suffixes then
- for i=1,#format_suffixes do
- wantedfiles[#wantedfiles+1] = filename .. "." .. format_suffixes[i]
- end
- end
- end
- filetype = askedformat
- if trace_locating then
- report_resolving("using given filetype '%s'",filetype)
+ filetype=resolvers.formatofsuffix(filename)
+ if trace_locating then
+ report_resolving("using suffix based filetype %a",filetype)
+ end
+ end
+ else
+ if ext=="" or not suffixmap[ext] then
+ local format_suffixes=suffixes[askedformat]
+ if format_suffixes then
+ for i=1,#format_suffixes do
+ wantedfiles[#wantedfiles+1]=filename.."."..format_suffixes[i]
end
+ end
+ end
+ filetype=askedformat
+ if trace_locating then
+ report_resolving("using given filetype %a",filetype)
end
- return filetype, wantedfiles
+ end
+ return filetype,wantedfiles
end
-
local function find_direct(filename,allresults)
- if not dangerous[askedformat] and isreadable(filename) then
- if trace_detail then
- report_resolving("file '%s' found directly",filename)
- end
- return "direct", { filename }
+ if not dangerous[askedformat] and isreadable(filename) then
+ if trace_detail then
+ report_resolving("file %a found directly",filename)
end
+ return "direct",{ filename }
+ end
end
-
local function find_wildcard(filename,allresults)
- if find(filename,'%*') then
- if trace_locating then
- report_resolving("checking wildcard '%s'", filename)
- end
- local method, result = resolvers.findwildcardfiles(filename)
- if result then
- return "wildcard", result
- end
- end
-end
-
-local function find_qualified(filename,allresults) -- this one will be split too
- if not file.is_qualified_path(filename) then
- return
- end
+ if find(filename,'%*') then
if trace_locating then
- report_resolving("checking qualified name '%s'", filename)
- end
- if isreadable(filename) then
- if trace_detail then
- report_resolving("qualified file '%s' found", filename)
- end
- return "qualified", { filename }
- end
+ report_resolving("checking wildcard %a",filename)
+ end
+ local method,result=resolvers.findwildcardfiles(filename)
+ if result then
+ return "wildcard",result
+ end
+ end
+end
+local function find_qualified(filename,allresults,askedformat,alsostripped)
+ if not file.is_qualified_path(filename) then
+ return
+ end
+ if trace_locating then
+ report_resolving("checking qualified name %a",filename)
+ end
+ if isreadable(filename) then
if trace_detail then
- report_resolving("locating qualified file '%s'", filename)
- end
- local forcedname, suffix = "", fileextname(filename)
- if suffix == "" then -- why
- local format_suffixes = askedformat == "" and resolvers.defaultsuffixes or suffixes[askedformat]
- if format_suffixes then
- for i=1,#format_suffixes do
- local s = format_suffixes[i]
- forcedname = filename .. "." .. s
- if isreadable(forcedname) then
- if trace_locating then
- report_resolving("no suffix, forcing format filetype '%s'", s)
- end
- return "qualified", { forcedname }
- end
- end
- end
- end
- if suffix and suffix ~= "" then
- -- try to find in tree (no suffix manipulation), here we search for the
- -- matching last part of the name
- local basename = filebasename(filename)
- local pattern = lpegmatch(preparetreepattern,filename)
- -- messy .. to be sorted out
- local savedformat = askedformat
- local format = savedformat or ""
- if format == "" then
- askedformat = resolvers.formatofsuffix(suffix)
- end
- if not format then
- askedformat = "othertextfiles" -- kind of everything, maybe all
- end
- --
- if basename ~= filename then
- local resolved = collect_instance_files(basename,askedformat,allresults)
- if #resolved == 0 then
- local lowered = lower(basename)
- if filename ~= lowered then
- resolved = collect_instance_files(lowered,askedformat,allresults)
- end
- end
- resolvers.format = savedformat
- --
- if #resolved > 0 then
- local result = { }
- for r=1,#resolved do
- local rr = resolved[r]
- if find(rr,pattern) then
- result[#result+1] = rr
- end
- end
- if #result > 0 then
- return "qualified", result
- end
- end
- end
- -- a real wildcard:
- --
- -- local filelist = collect_files({basename})
- -- result = { }
- -- for f=1,#filelist do
- -- local ff = filelist[f][3] or ""
- -- if find(ff,pattern) then
- -- result[#result+1], ok = ff, true
- -- end
- -- end
- -- if #result > 0 then
- -- return "qualified", result
- -- end
- end
+ report_resolving("qualified file %a found",filename)
+ end
+ return "qualified",{ filename }
+ end
+ if trace_detail then
+ report_resolving("locating qualified file %a",filename)
+ end
+ local forcedname,suffix="",suffixonly(filename)
+ if suffix=="" then
+ local format_suffixes=askedformat=="" and resolvers.defaultsuffixes or suffixes[askedformat]
+ if format_suffixes then
+ for i=1,#format_suffixes do
+ local s=format_suffixes[i]
+ forcedname=filename.."."..s
+ if isreadable(forcedname) then
+ if trace_locating then
+ report_resolving("no suffix, forcing format filetype %a",s)
+ end
+ return "qualified",{ forcedname }
+ end
+ end
+ end
+ end
+ if alsostripped and suffix and suffix~="" then
+ local basename=filebasename(filename)
+ local pattern=lpegmatch(preparetreepattern,filename)
+ local savedformat=askedformat
+ local format=savedformat or ""
+ if format=="" then
+ askedformat=resolvers.formatofsuffix(suffix)
+ end
+ if not format then
+ askedformat="othertextfiles"
+ end
+ if basename~=filename then
+ local resolved=collect_instance_files(basename,askedformat,allresults)
+ if #resolved==0 then
+ local lowered=lower(basename)
+ if filename~=lowered then
+ resolved=collect_instance_files(lowered,askedformat,allresults)
+ end
+ end
+ resolvers.format=savedformat
+ if #resolved>0 then
+ local result={}
+ for r=1,#resolved do
+ local rr=resolved[r]
+ if find(rr,pattern) then
+ result[#result+1]=rr
+ end
+ end
+ if #result>0 then
+ return "qualified",result
+ end
+ end
+ end
+ end
end
-
local function check_subpath(fname)
- if isreadable(fname) then
- if trace_detail then
- report_resolving("found '%s' by deep scanning",fname)
- end
- return fname
+ if isreadable(fname) then
+ if trace_detail then
+ report_resolving("found %a by deep scanning",fname)
end
+ return fname
+ end
end
-
local function find_intree(filename,filetype,wantedfiles,allresults)
- local typespec = resolvers.variableofformat(filetype)
- local pathlist = resolvers.expandedpathlist(typespec)
- local method = "intree"
- if pathlist and #pathlist > 0 then
- -- list search
- local filelist = collect_files(wantedfiles)
- local dirlist = { }
- if filelist then
- for i=1,#filelist do
- dirlist[i] = filedirname(filelist[i][3]) .. "/" -- was [2] .. gamble
- end
- end
+ local typespec=resolvers.variableofformat(filetype)
+ local pathlist=resolvers.expandedpathlist(typespec)
+ local method="intree"
+ if pathlist and #pathlist>0 then
+ local filelist=collect_files(wantedfiles)
+ local dirlist={}
+ if filelist then
+ for i=1,#filelist do
+ dirlist[i]=filedirname(filelist[i][3]).."/"
+ end
+ end
+ if trace_detail then
+ report_resolving("checking filename %a",filename)
+ end
+ local resolve=resolvers.resolve
+ local result={}
+ for k=1,#pathlist do
+ local path=pathlist[k]
+ local pathname=lpegmatch(inhibitstripper,path)
+ local doscan=path==pathname
+ if not find (pathname,'//$') then
+ doscan=false
+ end
+ local done=false
+ if filelist then
+ local expression=makepathexpression(pathname)
if trace_detail then
- report_resolving("checking filename '%s'",filename)
- end
- local result = { }
- for k=1,#pathlist do
- local path = pathlist[k]
- local pathname = lpegmatch(inhibitstripper,path)
- local doscan = path == pathname -- no ^!!
- if not find (pathname,'//$') then
- doscan = false -- we check directly on the path
- end
- local done = false
- -- using file list
- if filelist then -- database
- -- compare list entries with permitted pattern -- /xx /xx//
- local expression = makepathexpression(pathname)
- if trace_detail then
- report_resolving("using pattern '%s' for path '%s'",expression,pathname)
+ report_resolving("using pattern %a for path %a",expression,pathname)
+ end
+ for k=1,#filelist do
+ local fl=filelist[k]
+ local f=fl[2]
+ local d=dirlist[k]
+ if find(d,expression) or find(resolve(d),expression) then
+ result[#result+1]=resolve(fl[3])
+ done=true
+ if allresults then
+ if trace_detail then
+ report_resolving("match to %a in hash for file %a and path %a, continue scanning",expression,f,d)
+ end
+ else
+ if trace_detail then
+ report_resolving("match to %a in hash for file %a and path %a, quit scanning",expression,f,d)
+ end
+ break
+ end
+ elseif trace_detail then
+ report_resolving("no match to %a in hash for file %a and path %a",expression,f,d)
+ end
+ end
+ end
+ if done then
+ method="database"
+ else
+ method="filesystem"
+ pathname=gsub(pathname,"/+$","")
+ pathname=resolve(pathname)
+ local scheme=url.hasscheme(pathname)
+ if not scheme or scheme=="file" then
+ local pname=gsub(pathname,"%.%*$",'')
+ if not find(pname,"%*") then
+ if can_be_dir(pname) then
+ for k=1,#wantedfiles do
+ local w=wantedfiles[k]
+ local fname=check_subpath(filejoin(pname,w))
+ if fname then
+ result[#result+1]=fname
+ done=true
+ if not allresults then
+ break
+ end
end
- for k=1,#filelist do
- local fl = filelist[k]
- local f = fl[2]
- local d = dirlist[k]
- if find(d,expression) then
- -- todo, test for readable
- result[#result+1] = resolvers.resolve(fl[3]) -- no shortcut
- done = true
- if allresults then
- if trace_detail then
- report_resolving("match to '%s' in hash for file '%s' and path '%s', continue scanning",expression,f,d)
- end
- else
- if trace_detail then
- report_resolving("match to '%s' in hash for file '%s' and path '%s', quit scanning",expression,f,d)
- end
+ end
+ if not done and doscan then
+ local files=resolvers.simplescanfiles(pname,false,true)
+ for k=1,#wantedfiles do
+ local w=wantedfiles[k]
+ local subpath=files[w]
+ if not subpath or subpath=="" then
+ elseif type(subpath)=="string" then
+ local fname=check_subpath(filejoin(pname,subpath,w))
+ if fname then
+ result[#result+1]=fname
+ done=true
+ if not allresults then
+ break
+ end
+ end
+ else
+ for i=1,#subpath do
+ local sp=subpath[i]
+ if sp=="" then
+ else
+ local fname=check_subpath(filejoin(pname,sp,w))
+ if fname then
+ result[#result+1]=fname
+ done=true
+ if not allresults then
break
+ end
end
- elseif trace_detail then
- report_resolving("no match to '%s' in hash for file '%s' and path '%s'",expression,f,d)
+ end
end
- end
- end
- if done then
- method = "database"
- else
- method = "filesystem" -- bonus, even when !! is specified
- pathname = gsub(pathname,"/+$","")
- pathname = resolvers.resolve(pathname)
- local scheme = url.hasscheme(pathname)
- if not scheme or scheme == "file" then
- local pname = gsub(pathname,"%.%*$",'')
- if not find(pname,"%*") then
- if can_be_dir(pname) then
- -- quick root scan first
- for k=1,#wantedfiles do
- local w = wantedfiles[k]
- local fname = check_subpath(filejoin(pname,w))
- if fname then
- result[#result+1] = fname
- done = true
- if not allresults then
- break
- end
- end
- end
- if not done and doscan then
- -- collect files in path (and cache the result)
- local files = resolvers.simplescanfiles(pname,false,true)
- for k=1,#wantedfiles do
- local w = wantedfiles[k]
- local subpath = files[w]
- if not subpath or subpath == "" then
- -- rootscan already done
- elseif type(subpath) == "string" then
- local fname = check_subpath(filejoin(pname,subpath,w))
- if fname then
- result[#result+1] = fname
- done = true
- if not allresults then
- break
- end
- end
- else
- for i=1,#subpath do
- local sp = subpath[i]
- if sp == "" then
- -- roottest already done
- else
- local fname = check_subpath(filejoin(pname,sp,w))
- if fname then
- result[#result+1] = fname
- done = true
- if not allresults then
- break
- end
- end
- end
- end
- if done and not allresults then
- break
- end
- end
- end
- end
- end
- else
- -- no access needed for non existing path, speedup (esp in large tree with lots of fake)
+ if done and not allresults then
+ break
end
+ end
end
+ end
end
- -- todo recursive scanning
- if done and not allresults then
- break
- end
- end
- if #result > 0 then
- return method, result
+ else
+ end
end
+ end
+ if done and not allresults then
+ break
+ end
end
+ if #result>0 then
+ return method,result
+ end
+ end
end
-
local function find_onpath(filename,filetype,wantedfiles,allresults)
+ if trace_detail then
+ report_resolving("checking filename %a, filetype %a, wanted files %a",filename,filetype,concat(wantedfiles," | "))
+ end
+ local result={}
+ for k=1,#wantedfiles do
+ local fname=wantedfiles[k]
+ if fname and isreadable(fname) then
+ filename=fname
+ result[#result+1]=filejoin('.',fname)
+ if not allresults then
+ break
+ end
+ end
+ end
+ if #result>0 then
+ return "onpath",result
+ end
+end
+local function find_otherwise(filename,filetype,wantedfiles,allresults)
+ local filelist=collect_files(wantedfiles)
+ local fl=filelist and filelist[1]
+ if fl then
+ return "otherwise",{ resolvers.resolve(fl[3]) }
+ end
+end
+collect_instance_files=function(filename,askedformat,allresults)
+ askedformat=askedformat or ""
+ filename=collapsepath(filename)
+ if allresults then
+ local filetype,wantedfiles=find_analyze(filename,askedformat)
+ local results={
+ { find_direct (filename,true) },
+ { find_wildcard (filename,true) },
+ { find_qualified(filename,true,askedformat) },
+ { find_intree (filename,filetype,wantedfiles,true) },
+ { find_onpath (filename,filetype,wantedfiles,true) },
+ { find_otherwise(filename,filetype,wantedfiles,true) },
+ }
+ local result,status,done={},{},{}
+ for k,r in next,results do
+ local method,list=r[1],r[2]
+ if method and list then
+ for i=1,#list do
+ local c=collapsepath(list[i])
+ if not done[c] then
+ result[#result+1]=c
+ done[c]=true
+ end
+ status[#status+1]=formatters["%-10s: %s"](method,c)
+ end
+ end
+ end
if trace_detail then
- report_resolving("checking filename '%s', filetype '%s', wanted files '%s'",filename, filetype or '?',concat(wantedfiles," | "))
- end
- local result = { }
- for k=1,#wantedfiles do
- local fname = wantedfiles[k]
- if fname and isreadable(fname) then
- filename = fname
- result[#result+1] = filejoin('.',fname)
- if not allresults then
- break
- end
+ report_resolving("lookup status: %s",table.serialize(status,filename))
+ end
+ return result,status
+ else
+ local method,result,stamp,filetype,wantedfiles
+ if instance.remember then
+ stamp=formatters["%s--%s"](filename,askedformat)
+ result=stamp and instance.found[stamp]
+ if result then
+ if trace_locating then
+ report_resolving("remembered file %a",filename)
end
+ return result
+ end
end
- if #result > 0 then
- return "onpath", result
- end
-end
-
-local function find_otherwise(filename,filetype,wantedfiles,allresults) -- other text files | any | whatever
- local filelist = collect_files(wantedfiles)
- local fl = filelist and filelist[1]
- if fl then
- return "otherwise", { resolvers.resolve(fl[3]) } -- filename
- end
-end
-
--- we could have a loop over the 6 functions but then we'd have to
--- always analyze
-
-collect_instance_files = function(filename,askedformat,allresults) -- uses nested
- askedformat = askedformat or ""
- filename = collapsepath(filename)
- if allresults then
- -- no need for caching, only used for tracing
- local filetype, wantedfiles = find_analyze(filename,askedformat)
- local results = {
- { find_direct (filename,true) },
- { find_wildcard (filename,true) },
- { find_qualified(filename,true) },
- { find_intree (filename,filetype,wantedfiles,true) },
- { find_onpath (filename,filetype,wantedfiles,true) },
- { find_otherwise(filename,filetype,wantedfiles,true) },
- }
- local result, status, done = { }, { }, { }
- for k, r in next, results do
- local method, list = r[1], r[2]
- if method and list then
- for i=1,#list do
- local c = collapsepath(list[i])
- if not done[c] then
- result[#result+1] = c
- done[c] = true
- end
- status[#status+1] = format("%-10s: %s",method,c)
- end
- end
- end
- if trace_detail then
- report_resolving("lookup status: %s",table.serialize(status,filename))
- end
- return result, status
- else
- local method, result, stamp, filetype, wantedfiles
- if instance.remember then
- stamp = format("%s--%s", filename, askedformat)
- result = stamp and instance.found[stamp]
- if result then
- if trace_locating then
- report_resolving("remembered file '%s'",filename)
- end
- return result
- end
- end
- method, result = find_direct(filename)
+ method,result=find_direct(filename)
+ if not result then
+ method,result=find_wildcard(filename)
+ if not result then
+ method,result=find_qualified(filename,false,askedformat)
if not result then
- method, result = find_wildcard(filename)
+ filetype,wantedfiles=find_analyze(filename,askedformat)
+ method,result=find_intree(filename,filetype,wantedfiles)
+ if not result then
+ method,result=find_onpath(filename,filetype,wantedfiles)
if not result then
- method, result = find_qualified(filename)
- if not result then
- filetype, wantedfiles = find_analyze(filename,askedformat)
- method, result = find_intree(filename,filetype,wantedfiles)
- if not result then
- method, result = find_onpath(filename,filetype,wantedfiles)
- if not result then
- method, result = find_otherwise(filename,filetype,wantedfiles)
- end
- end
- end
+ method,result=find_otherwise(filename,filetype,wantedfiles)
end
+ end
end
- if result and #result > 0 then
- local foundname = collapsepath(result[1])
- resolvers.registerintrees(filename,askedformat,filetype,method,foundname)
- result = { foundname }
- else
- result = { } -- maybe false
- end
- if stamp then
- if trace_locating then
- report_resolving("remembering file '%s'",filename)
- end
- instance.found[stamp] = result
- end
- return result
+ end
+ end
+ if result and #result>0 then
+ local foundname=collapsepath(result[1])
+ resolvers.registerintrees(filename,askedformat,filetype,method,foundname)
+ result={ foundname }
+ else
+ result={}
end
+ if stamp then
+ if trace_locating then
+ report_resolving("remembering file %a",filename)
+ end
+ instance.found[stamp]=result
+ end
+ return result
+ end
end
-
--- -- -- end of main file search routing -- -- --
-
-
local function findfiles(filename,filetype,allresults)
- local result, status = collect_instance_files(filename,filetype or "",allresults)
- if not result or #result == 0 then
- local lowered = lower(filename)
- if filename ~= lowered then
- result, status = collect_instance_files(lowered,filetype or "",allresults)
- end
+ local result,status=collect_instance_files(filename,filetype or "",allresults)
+ if not result or #result==0 then
+ local lowered=lower(filename)
+ if filename~=lowered then
+ result,status=collect_instance_files(lowered,filetype or "",allresults)
end
- return result or { }, status
+ end
+ return result or {},status
end
-
function resolvers.findfiles(filename,filetype)
- return findfiles(filename,filetype,true)
+ return findfiles(filename,filetype,true)
end
-
function resolvers.findfile(filename,filetype)
- return findfiles(filename,filetype,false)[1] or ""
+ return findfiles(filename,filetype,false)[1] or ""
end
-
function resolvers.findpath(filename,filetype)
- return filedirname(findfiles(filename,filetype,false)[1] or "")
+ return filedirname(findfiles(filename,filetype,false)[1] or "")
end
-
local function findgivenfiles(filename,allresults)
- local bname, result = filebasename(filename), { }
- local hashes = instance.hashes
- local noffound = 0
- for k=1,#hashes do
- local hash = hashes[k]
- local files = instance.files[hash.name] or { }
- local blist = files[bname]
- if not blist then
- local rname = "remap:"..bname
- blist = files[rname]
- if blist then
- bname = files[rname]
- blist = files[bname]
- end
+ local bname,result=filebasename(filename),{}
+ local hashes=instance.hashes
+ local noffound=0
+ for k=1,#hashes do
+ local hash=hashes[k]
+ local files=instance.files[hash.name] or {}
+ local blist=files[bname]
+ if not blist then
+ local rname="remap:"..bname
+ blist=files[rname]
+ if blist then
+ bname=files[rname]
+ blist=files[bname]
+ end
+ end
+ if blist then
+ if type(blist)=='string' then
+ local found=methodhandler('concatinators',hash.type,hash.name,blist,bname) or ""
+ if found~="" then
+ noffound=noffound+1
+ result[noffound]=resolvers.resolve(found)
+ if not allresults then
+ break
+ end
end
- if blist then
- if type(blist) == 'string' then
- local found = methodhandler('concatinators',hash.type,hash.name,blist,bname) or ""
- if found ~= "" then
- noffound = noffound + 1
- result[noffound] = resolvers.resolve(found)
- if not allresults then break end
- end
- else
- for kk=1,#blist do
- local vv = blist[kk]
- local found = methodhandler('concatinators',hash.type,hash.name,vv,bname) or ""
- if found ~= "" then
- noffound = noffound + 1
- result[noffound] = resolvers.resolve(found)
- if not allresults then break end
- end
- end
- end
+ else
+ for kk=1,#blist do
+ local vv=blist[kk]
+ local found=methodhandler('concatinators',hash.type,hash.name,vv,bname) or ""
+ if found~="" then
+ noffound=noffound+1
+ result[noffound]=resolvers.resolve(found)
+ if not allresults then break end
+ end
end
+ end
end
- return result
+ end
+ return result
end
-
function resolvers.findgivenfiles(filename)
- return findgivenfiles(filename,true)
+ return findgivenfiles(filename,true)
end
-
function resolvers.findgivenfile(filename)
- return findgivenfiles(filename,false)[1] or ""
+ return findgivenfiles(filename,false)[1] or ""
end
-
local function doit(path,blist,bname,tag,variant,result,allresults)
- local done = false
- if blist and variant then
- local resolve = resolvers.resolve -- added
- if type(blist) == 'string' then
- -- make function and share code
- if find(lower(blist),path) then
- local full = methodhandler('concatinators',variant,tag,blist,bname) or ""
- result[#result+1] = resolve(full)
- done = true
- end
- else
- for kk=1,#blist do
- local vv = blist[kk]
- if find(lower(vv),path) then
- local full = methodhandler('concatinators',variant,tag,vv,bname) or ""
- result[#result+1] = resolve(full)
- done = true
- if not allresults then break end
- end
- end
+ local done=false
+ if blist and variant then
+ local resolve=resolvers.resolve
+ if type(blist)=='string' then
+ if find(lower(blist),path) then
+ local full=methodhandler('concatinators',variant,tag,blist,bname) or ""
+ result[#result+1]=resolve(full)
+ done=true
+ end
+ else
+ for kk=1,#blist do
+ local vv=blist[kk]
+ if find(lower(vv),path) then
+ local full=methodhandler('concatinators',variant,tag,vv,bname) or ""
+ result[#result+1]=resolve(full)
+ done=true
+ if not allresults then break end
end
+ end
end
- return done
+ end
+ return done
end
-
-
-local makewildcard = Cs(
- (P("^")^0 * P("/") * P(-1) + P(-1)) /".*"
- + (P("^")^0 * P("/") / "")^0 * (P("*")/".*" + P("-")/"%%-" + P(".")/"%%." + P("?")/"."+ P("\\")/"/" + P(1))^0
+local makewildcard=Cs(
+ (P("^")^0*P("/")*P(-1)+P(-1))/".*"+(P("^")^0*P("/")/"")^0*(P("*")/".*"+P("-")/"%%-"+P(".")/"%%."+P("?")/"."+P("\\")/"/"+P(1))^0
)
-
function resolvers.wildcardpattern(pattern)
- return lpegmatch(makewildcard,pattern) or pattern
-end
-
-local function findwildcardfiles(filename,allresults,result) -- todo: remap: and lpeg
- result = result or { }
- local base = filebasename(filename)
- local dirn = filedirname(filename)
- local path = lower(lpegmatch(makewildcard,dirn) or dirn)
- local name = lower(lpegmatch(makewildcard,base) or base)
- local files, done = instance.files, false
- if find(name,"%*") then
- local hashes = instance.hashes
- for k=1,#hashes do
- local hash = hashes[k]
- local hashname, hashtype = hash.name, hash.type
- for kk, hh in next, files[hashname] do
- if not find(kk,"^remap:") then
- if find(lower(kk),name) then
- if doit(path,hh,kk,hashname,hashtype,result,allresults) then done = true end
- if done and not allresults then break end
- end
- end
- end
- end
- else
- local hashes = instance.hashes
- for k=1,#hashes do
- local hash = hashes[k]
- local hashname, hashtype = hash.name, hash.type
- if doit(path,files[hashname][bname],bname,hashname,hashtype,result,allresults) then done = true end
+ return lpegmatch(makewildcard,pattern) or pattern
+end
+local function findwildcardfiles(filename,allresults,result)
+ result=result or {}
+ local base=filebasename(filename)
+ local dirn=filedirname(filename)
+ local path=lower(lpegmatch(makewildcard,dirn) or dirn)
+ local name=lower(lpegmatch(makewildcard,base) or base)
+ local files,done=instance.files,false
+ if find(name,"%*") then
+ local hashes=instance.hashes
+ for k=1,#hashes do
+ local hash=hashes[k]
+ local hashname,hashtype=hash.name,hash.type
+ for kk,hh in next,files[hashname] do
+ if not find(kk,"^remap:") then
+ if find(lower(kk),name) then
+ if doit(path,hh,kk,hashname,hashtype,result,allresults) then done=true end
if done and not allresults then break end
+ end
end
+ end
end
- -- we can consider also searching the paths not in the database, but then
- -- we end up with a messy search (all // in all path specs)
- return result
+ else
+ local hashes=instance.hashes
+ for k=1,#hashes do
+ local hash=hashes[k]
+ local hashname,hashtype=hash.name,hash.type
+ if doit(path,files[hashname][bname],bname,hashname,hashtype,result,allresults) then done=true end
+ if done and not allresults then break end
+ end
+ end
+ return result
end
-
function resolvers.findwildcardfiles(filename,result)
- return findwildcardfiles(filename,true,result)
+ return findwildcardfiles(filename,true,result)
end
-
function resolvers.findwildcardfile(filename)
- return findwildcardfiles(filename,false)[1] or ""
+ return findwildcardfiles(filename,false)[1] or ""
end
-
--- main user functions
-
function resolvers.automount()
- -- implemented later
end
-
function resolvers.load(option)
- statistics.starttiming(instance)
- identify_configuration_files()
- load_configuration_files()
- if option ~= "nofiles" then
- load_databases()
- resolvers.automount()
- end
- statistics.stoptiming(instance)
- local files = instance.files
- return files and next(files) and true
+ statistics.starttiming(instance)
+ identify_configuration_files()
+ load_configuration_files()
+ if option~="nofiles" then
+ load_databases()
+ resolvers.automount()
+ end
+ statistics.stoptiming(instance)
+ local files=instance.files
+ return files and next(files) and true
end
-
function resolvers.loadtime()
- return statistics.elapsedtime(instance)
+ return statistics.elapsedtime(instance)
end
-
local function report(str)
+ if trace_locating then
+ report_resolving(str)
+ else
+ print(str)
+ end
+end
+function resolvers.dowithfilesandreport(command,files,...)
+ if files and #files>0 then
if trace_locating then
- report_resolving(str) -- has already verbose
- else
- print(str)
+ report('')
end
-end
-
-function resolvers.dowithfilesandreport(command, files, ...) -- will move
- if files and #files > 0 then
- if trace_locating then
- report('') -- ?
- end
- if type(files) == "string" then
- files = { files }
- end
- for f=1,#files do
- local file = files[f]
- local result = command(file,...)
- if type(result) == 'string' then
- report(result)
- else
- for i=1,#result do
- report(result[i]) -- could be unpack
- end
- end
+ if type(files)=="string" then
+ files={ files }
+ end
+ for f=1,#files do
+ local file=files[f]
+ local result=command(file,...)
+ if type(result)=='string' then
+ report(result)
+ else
+ for i=1,#result do
+ report(result[i])
end
+ end
end
+ end
end
-
--- obsolete
-
--- resolvers.varvalue = resolvers.variable -- output the value of variable $STRING.
--- resolvers.expandvar = resolvers.expansion -- output variable expansion of STRING.
-
-function resolvers.showpath(str) -- output search path for file type NAME
- return joinpath(resolvers.expandedpathlist(resolvers.formatofvariable(str)))
+function resolvers.showpath(str)
+ return joinpath(resolvers.expandedpathlist(resolvers.formatofvariable(str)))
end
-
-function resolvers.registerfile(files, name, path)
- if files[name] then
- if type(files[name]) == 'string' then
- files[name] = { files[name], path }
- else
- files[name] = path
- end
+function resolvers.registerfile(files,name,path)
+ if files[name] then
+ if type(files[name])=='string' then
+ files[name]={ files[name],path }
else
- files[name] = path
+ files[name]=path
end
+ else
+ files[name]=path
+ end
end
-
function resolvers.dowithpath(name,func)
- local pathlist = resolvers.expandedpathlist(name)
- for i=1,#pathlist do
- func("^"..resolvers.cleanpath(pathlist[i]))
- end
+ local pathlist=resolvers.expandedpathlist(name)
+ for i=1,#pathlist do
+ func("^"..resolvers.cleanpath(pathlist[i]))
+ end
end
-
function resolvers.dowithvariable(name,func)
- func(expandedvariable(name))
+ func(expandedvariable(name))
end
-
function resolvers.locateformat(name)
- local barename = file.removesuffix(name) -- gsub(name,"%.%a+$","")
- local fmtname = caches.getfirstreadablefile(barename..".fmt","formats") or ""
- if fmtname == "" then
- fmtname = resolvers.findfile(barename..".fmt")
- fmtname = resolvers.cleanpath(fmtname)
- end
- if fmtname ~= "" then
- local barename = file.removesuffix(fmtname)
- local luaname, lucname, luiname = barename .. ".lua", barename .. ".luc", barename .. ".lui"
- if lfs.isfile(luiname) then
- return barename, luiname
- elseif lfs.isfile(lucname) then
- return barename, lucname
- elseif lfs.isfile(luaname) then
- return barename, luaname
- end
- end
- return nil, nil
+ local engine=environment.ownmain or "luatex"
+ local barename=file.removesuffix(name)
+ local fullname=file.addsuffix(barename,"fmt")
+ local fmtname=caches.getfirstreadablefile(fullname,"formats",engine) or ""
+ if fmtname=="" then
+ fmtname=resolvers.findfile(fullname)
+ fmtname=resolvers.cleanpath(fmtname)
+ end
+ if fmtname~="" then
+ local barename=file.removesuffix(fmtname)
+ local luaname=file.addsuffix(barename,luasuffixes.lua)
+ local lucname=file.addsuffix(barename,luasuffixes.luc)
+ local luiname=file.addsuffix(barename,luasuffixes.lui)
+ if lfs.isfile(luiname) then
+ return barename,luiname
+ elseif lfs.isfile(lucname) then
+ return barename,lucname
+ elseif lfs.isfile(luaname) then
+ return barename,luaname
+ end
+ end
+ return nil,nil
end
-
function resolvers.booleanvariable(str,default)
- local b = resolvers.expansion(str)
- if b == "" then
- return default
- else
- b = toboolean(b)
- return (b == nil and default) or b
- end
-end
-
-function resolvers.dowithfilesintree(pattern,handle,before,after) -- will move, can be a nice iterator instead
- local instance = resolvers.instance
- local hashes = instance.hashes
- for i=1,#hashes do
- local hash = hashes[i]
- local blobtype = hash.type
- local blobpath = hash.name
- if blobpath then
- if before then
- before(blobtype,blobpath,pattern)
- end
- local files = instance.files[blobpath]
- local total, checked, done = 0, 0, 0
- if files then
- for k,v in next, files do
- total = total + 1
- if find(k,"^remap:") then
- k = files[k]
- v = k -- files[k] -- chained
- end
- if find(k,pattern) then
- if type(v) == "string" then
- checked = checked + 1
- if handle(blobtype,blobpath,v,k) then
- done = done + 1
- end
- else
- checked = checked + #v
- for i=1,#v do
- if handle(blobtype,blobpath,v[i],k) then
- done = done + 1
- end
- end
- end
- end
+ local b=resolvers.expansion(str)
+ if b=="" then
+ return default
+ else
+ b=toboolean(b)
+ return (b==nil and default) or b
+ end
+end
+function resolvers.dowithfilesintree(pattern,handle,before,after)
+ local instance=resolvers.instance
+ local hashes=instance.hashes
+ for i=1,#hashes do
+ local hash=hashes[i]
+ local blobtype=hash.type
+ local blobpath=hash.name
+ if blobpath then
+ if before then
+ before(blobtype,blobpath,pattern)
+ end
+ local files=instance.files[blobpath]
+ local total,checked,done=0,0,0
+ if files then
+ for k,v in table.sortedhash(files) do
+ total=total+1
+ if find(k,"^remap:") then
+ elseif find(k,pattern) then
+ if type(v)=="string" then
+ checked=checked+1
+ if handle(blobtype,blobpath,v,k) then
+ done=done+1
+ end
+ else
+ checked=checked+#v
+ for i=1,#v do
+ if handle(blobtype,blobpath,v[i],k) then
+ done=done+1
end
+ end
end
- if after then
- after(blobtype,blobpath,pattern,total,checked,done)
- end
+ end
end
+ end
+ if after then
+ after(blobtype,blobpath,pattern,total,checked,done)
+ end
end
+ end
end
-
-resolvers.obsolete = resolvers.obsolete or { }
-local obsolete = resolvers.obsolete
-
-resolvers.find_file = resolvers.findfile obsolete.find_file = resolvers.findfile
-resolvers.find_files = resolvers.findfiles obsolete.find_files = resolvers.findfiles
+resolvers.obsolete=resolvers.obsolete or {}
+local obsolete=resolvers.obsolete
+resolvers.find_file=resolvers.findfile obsolete.find_file=resolvers.findfile
+resolvers.find_files=resolvers.findfiles obsolete.find_files=resolvers.findfiles
end -- of closure
do -- create closure to overcome 200 locals limit
-if not modules then modules = { } end modules ['data-pre'] = {
- version = 1.001,
- comment = "companion to luat-lib.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
--- It could be interesting to hook the resolver in the file
--- opener so that unresolved prefixes travel around and we
--- get more abstraction.
-
--- As we use this beforehand we will move this up in the chain
--- of loading.
+package.loaded["data-pre"] = package.loaded["data-pre"] or true
+-- original size: 6430, stripped down to: 4219
-local resolvers = resolvers
-local prefixes = utilities.storage.allocate()
-resolvers.prefixes = prefixes
-
-local gsub = string.gsub
-local cleanpath, findgivenfile, expansion = resolvers.cleanpath, resolvers.findgivenfile, resolvers.expansion
-local getenv = resolvers.getenv -- we can probably also use resolvers.expansion
-local P, Cs, lpegmatch = lpeg.P, lpeg.Cs, lpeg.match
-
--- getenv = function(...) return resolvers.getenv(...) end -- needs checking (definitions changes later on)
-
-prefixes.environment = function(str)
- return cleanpath(expansion(str))
-end
-
-prefixes.relative = function(str,n) -- lfs.isfile
- if io.exists(str) then
- -- nothing
- elseif io.exists("./" .. str) then
- str = "./" .. str
- else
- local p = "../"
- for i=1,n or 2 do
- if io.exists(p .. str) then
- str = p .. str
- break
- else
- p = p .. "../"
- end
- end
- end
- return cleanpath(str)
-end
-
-prefixes.auto = function(str)
- local fullname = prefixes.relative(str)
- if not lfs.isfile(fullname) then
- fullname = prefixes.locate(str)
- end
- return fullname
-end
-
-prefixes.locate = function(str)
- local fullname = findgivenfile(str) or ""
- return cleanpath((fullname ~= "" and fullname) or str)
-end
-
-prefixes.filename = function(str)
- local fullname = findgivenfile(str) or ""
- return cleanpath(file.basename((fullname ~= "" and fullname) or str)) -- no cleanpath needed here
-end
-
-prefixes.pathname = function(str)
- local fullname = findgivenfile(str) or ""
- return cleanpath(file.dirname((fullname ~= "" and fullname) or str))
-end
-
-prefixes.selfautoloc = function(str)
- return cleanpath(file.join(getenv('SELFAUTOLOC'),str))
-end
-
-prefixes.selfautoparent = function(str)
- return cleanpath(file.join(getenv('SELFAUTOPARENT'),str))
-end
-
-prefixes.selfautodir = function(str)
- return cleanpath(file.join(getenv('SELFAUTODIR'),str))
-end
-
-prefixes.home = function(str)
- return cleanpath(file.join(getenv('HOME'),str))
-end
-
-prefixes.env = prefixes.environment
-prefixes.rel = prefixes.relative
-prefixes.loc = prefixes.locate
-prefixes.kpse = prefixes.locate
-prefixes.full = prefixes.locate
-prefixes.file = prefixes.filename
-prefixes.path = prefixes.pathname
-
+if not modules then modules={} end modules ['data-pre']={
+ version=1.001,
+ comment="companion to luat-lib.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+local resolvers=resolvers
+local prefixes=utilities.storage.allocate()
+resolvers.prefixes=prefixes
+local cleanpath,findgivenfile,expansion=resolvers.cleanpath,resolvers.findgivenfile,resolvers.expansion
+local getenv=resolvers.getenv
+local P,S,R,C,Cs,lpegmatch=lpeg.P,lpeg.S,lpeg.R,lpeg.C,lpeg.Cs,lpeg.match
+local joinpath,basename,dirname=file.join,file.basename,file.dirname
+local getmetatable,rawset,type=getmetatable,rawset,type
+prefixes.environment=function(str)
+ return cleanpath(expansion(str))
+end
+prefixes.relative=function(str,n)
+ if io.exists(str) then
+ elseif io.exists("./"..str) then
+ str="./"..str
+ else
+ local p="../"
+ for i=1,n or 2 do
+ if io.exists(p..str) then
+ str=p..str
+ break
+ else
+ p=p.."../"
+ end
+ end
+ end
+ return cleanpath(str)
+end
+prefixes.auto=function(str)
+ local fullname=prefixes.relative(str)
+ if not lfs.isfile(fullname) then
+ fullname=prefixes.locate(str)
+ end
+ return fullname
+end
+prefixes.locate=function(str)
+ local fullname=findgivenfile(str) or ""
+ return cleanpath((fullname~="" and fullname) or str)
+end
+prefixes.filename=function(str)
+ local fullname=findgivenfile(str) or ""
+ return cleanpath(basename((fullname~="" and fullname) or str))
+end
+prefixes.pathname=function(str)
+ local fullname=findgivenfile(str) or ""
+ return cleanpath(dirname((fullname~="" and fullname) or str))
+end
+prefixes.selfautoloc=function(str)
+ return cleanpath(joinpath(getenv('SELFAUTOLOC'),str))
+end
+prefixes.selfautoparent=function(str)
+ return cleanpath(joinpath(getenv('SELFAUTOPARENT'),str))
+end
+prefixes.selfautodir=function(str)
+ return cleanpath(joinpath(getenv('SELFAUTODIR'),str))
+end
+prefixes.home=function(str)
+ return cleanpath(joinpath(getenv('HOME'),str))
+end
+local function toppath()
+ local inputstack=resolvers.inputstack
+ if not inputstack then
+ return "."
+ end
+ local pathname=dirname(inputstack[#inputstack] or "")
+ if pathname=="" then
+ return "."
+ else
+ return pathname
+ end
+end
+resolvers.toppath=toppath
+prefixes.toppath=function(str)
+ return cleanpath(joinpath(toppath(),str))
+end
+prefixes.env=prefixes.environment
+prefixes.rel=prefixes.relative
+prefixes.loc=prefixes.locate
+prefixes.kpse=prefixes.locate
+prefixes.full=prefixes.locate
+prefixes.file=prefixes.filename
+prefixes.path=prefixes.pathname
function resolvers.allprefixes(separator)
- local all = table.sortedkeys(prefixes)
- if separator then
- for i=1,#all do
- all[i] = all[i] .. ":"
- end
+ local all=table.sortedkeys(prefixes)
+ if separator then
+ for i=1,#all do
+ all[i]=all[i]..":"
end
- return all
+ end
+ return all
end
-
local function _resolve_(method,target)
- local action = prefixes[method]
- if action then
- return action(target)
- else
- return method .. ":" .. target
- end
-end
-
-local resolved, abstract = { }, { }
-
+ local action=prefixes[method]
+ if action then
+ return action(target)
+ else
+ return method..":"..target
+ end
+end
+local resolved,abstract={},{}
function resolvers.resetresolve(str)
- resolved, abstract = { }, { }
+ resolved,abstract={},{}
end
-
-local function resolve(str) -- use schemes, this one is then for the commandline only
- if type(str) == "table" then
- local t = { }
- for i=1,#str do
- t[i] = resolve(str[i])
- end
- return t
- else
- local res = resolved[str]
- if not res then
- res = gsub(str,"([a-z][a-z]+):([^ \"\';,]*)",_resolve_) -- home:xx;selfautoparent:xx; etc (comma added)
- resolved[str] = res
- abstract[res] = str
- end
- return res
+local pattern=Cs((C(R("az")^2)*P(":")*C((1-S(" \"\';,"))^1)/_resolve_+P(1))^0)
+local function resolve(str)
+ if type(str)=="table" then
+ local t={}
+ for i=1,#str do
+ t[i]=resolve(str[i])
end
+ return t
+ else
+ local res=resolved[str]
+ if not res then
+ res=lpegmatch(pattern,str)
+ resolved[str]=res
+ abstract[res]=str
+ end
+ return res
+ end
end
-
local function unresolve(str)
- return abstract[str] or str
+ return abstract[str] or str
end
-
-resolvers.resolve = resolve
-resolvers.unresolve = unresolve
-
-if os.uname then
-
- for k, v in next, os.uname() do
- if not prefixes[k] then
- prefixes[k] = function() return v end
- end
+resolvers.resolve=resolve
+resolvers.unresolve=unresolve
+if type(os.uname)=="function" then
+ for k,v in next,os.uname() do
+ if not prefixes[k] then
+ prefixes[k]=function() return v end
end
-
+ end
end
-
-if os.type == "unix" then
-
- local pattern
-
- local function makepattern(t,k,v)
- local colon = P(":")
- local p
- for k, v in table.sortedpairs(prefixes) do
- if p then
- p = P(k) + p
- else
- p = P(k)
- end
- end
- pattern = Cs((p * colon + colon/";" + P(1))^0)
- if t then
- t[k] = v
- end
- end
-
- makepattern()
-
- getmetatable(prefixes).__newindex = makepattern
-
- function resolvers.repath(str)
- return lpegmatch(pattern,str)
- end
-
-else -- already the default:
-
- function resolvers.repath(str)
- return str
- end
-
+if os.type=="unix" then
+ local pattern
+ local function makepattern(t,k,v)
+ if t then
+ rawset(t,k,v)
+ end
+ local colon=P(":")
+ for k,v in table.sortedpairs(prefixes) do
+ if p then
+ p=P(k)+p
+ else
+ p=P(k)
+ end
+ end
+ pattern=Cs((p*colon+colon/";"+P(1))^0)
+ end
+ makepattern()
+ getmetatable(prefixes).__newindex=makepattern
+ function resolvers.repath(str)
+ return lpegmatch(pattern,str)
+ end
+else
+ function resolvers.repath(str)
+ return str
+ end
end
@@ -14237,172 +14214,159 @@ end -- of closure
do -- create closure to overcome 200 locals limit
-if not modules then modules = { } end modules ['data-inp'] = {
- version = 1.001,
- comment = "companion to luat-lib.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
-local allocate = utilities.storage.allocate
-local resolvers = resolvers
-
-local methodhandler = resolvers.methodhandler
-local registermethod = resolvers.registermethod
-
-local finders = allocate { helpers = { }, notfound = function() end }
-local openers = allocate { helpers = { }, notfound = function() end }
-local loaders = allocate { helpers = { }, notfound = function() return false, nil, 0 end }
+package.loaded["data-inp"] = package.loaded["data-inp"] or true
-registermethod("finders", finders, "uri")
-registermethod("openers", openers, "uri")
-registermethod("loaders", loaders, "uri")
+-- original size: 910, stripped down to: 823
-resolvers.finders = finders
-resolvers.openers = openers
-resolvers.loaders = loaders
+if not modules then modules={} end modules ['data-inp']={
+ version=1.001,
+ comment="companion to luat-lib.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+local allocate=utilities.storage.allocate
+local resolvers=resolvers
+local methodhandler=resolvers.methodhandler
+local registermethod=resolvers.registermethod
+local finders=allocate { helpers={},notfound=function() end }
+local openers=allocate { helpers={},notfound=function() end }
+local loaders=allocate { helpers={},notfound=function() return false,nil,0 end }
+registermethod("finders",finders,"uri")
+registermethod("openers",openers,"uri")
+registermethod("loaders",loaders,"uri")
+resolvers.finders=finders
+resolvers.openers=openers
+resolvers.loaders=loaders
end -- of closure
do -- create closure to overcome 200 locals limit
-if not modules then modules = { } end modules ['data-out'] = {
- version = 1.001,
- comment = "companion to luat-lib.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
-local allocate = utilities.storage.allocate
-local resolvers = resolvers
-
-local registermethod = resolvers.registermethod
+package.loaded["data-out"] = package.loaded["data-out"] or true
-local savers = allocate { helpers = { } }
+-- original size: 530, stripped down to: 475
-resolvers.savers = savers
-
-registermethod("savers", savers, "uri")
+if not modules then modules={} end modules ['data-out']={
+ version=1.001,
+ comment="companion to luat-lib.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+local allocate=utilities.storage.allocate
+local resolvers=resolvers
+local registermethod=resolvers.registermethod
+local savers=allocate { helpers={} }
+resolvers.savers=savers
+registermethod("savers",savers,"uri")
end -- of closure
do -- create closure to overcome 200 locals limit
-if not modules then modules = { } end modules ['data-fil'] = {
- version = 1.001,
- comment = "companion to luat-lib.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
+package.loaded["data-fil"] = package.loaded["data-fil"] or true
-local trace_locating = false trackers.register("resolvers.locating", function(v) trace_locating = v end)
-
-local report_files = logs.reporter("resolvers","files")
-
-local resolvers = resolvers
-
-local finders, openers, loaders, savers = resolvers.finders, resolvers.openers, resolvers.loaders, resolvers.savers
-local locators, hashers, generators, concatinators = resolvers.locators, resolvers.hashers, resolvers.generators, resolvers.concatinators
-
-local checkgarbage = utilities.garbagecollector and utilities.garbagecollector.check
+-- original size: 3801, stripped down to: 3231
+if not modules then modules={} end modules ['data-fil']={
+ version=1.001,
+ comment="companion to luat-lib.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+local trace_locating=false trackers.register("resolvers.locating",function(v) trace_locating=v end)
+local report_files=logs.reporter("resolvers","files")
+local resolvers=resolvers
+local finders,openers,loaders,savers=resolvers.finders,resolvers.openers,resolvers.loaders,resolvers.savers
+local locators,hashers,generators,concatinators=resolvers.locators,resolvers.hashers,resolvers.generators,resolvers.concatinators
+local checkgarbage=utilities.garbagecollector and utilities.garbagecollector.check
function locators.file(specification)
- local name = specification.filename
- local realname = resolvers.resolve(name) -- no shortcut
- if realname and realname ~= '' and lfs.isdir(realname) then
- if trace_locating then
- report_files("file locator '%s' found as '%s'",name,realname)
- end
- resolvers.appendhash('file',name,true) -- cache
- elseif trace_locating then
- report_files("file locator '%s' not found",name)
+ local name=specification.filename
+ local realname=resolvers.resolve(name)
+ if realname and realname~='' and lfs.isdir(realname) then
+ if trace_locating then
+ report_files("file locator %a found as %a",name,realname)
end
+ resolvers.appendhash('file',name,true)
+ elseif trace_locating then
+ report_files("file locator %a not found",name)
+ end
end
-
function hashers.file(specification)
- local name = specification.filename
- local content = caches.loadcontent(name,'files')
- resolvers.registerfilehash(name,content,content==nil)
+ local name=specification.filename
+ local content=caches.loadcontent(name,'files')
+ resolvers.registerfilehash(name,content,content==nil)
end
-
function generators.file(specification)
- local path = specification.filename
- local content = resolvers.scanfiles(path,false,true) -- scan once
- resolvers.registerfilehash(path,content,true)
+ local path=specification.filename
+ local content=resolvers.scanfiles(path,false,true)
+ resolvers.registerfilehash(path,content,true)
end
-
-concatinators.file = file.join
-
+concatinators.file=file.join
function finders.file(specification,filetype)
- local filename = specification.filename
- local foundname = resolvers.findfile(filename,filetype)
- if foundname and foundname ~= "" then
- if trace_locating then
- report_files("file finder: '%s' found",filename)
- end
- return foundname
- else
- if trace_locating then
- report_files("file finder: %s' not found",filename)
- end
- return finders.notfound()
+ local filename=specification.filename
+ local foundname=resolvers.findfile(filename,filetype)
+ if foundname and foundname~="" then
+ if trace_locating then
+ report_files("file finder: %a found",filename)
+ end
+ return foundname
+ else
+ if trace_locating then
+ report_files("file finder: %a not found",filename)
end
+ return finders.notfound()
+ end
end
-
--- The default textopener will be overloaded later on.
-
function openers.helpers.textopener(tag,filename,f)
- return {
- reader = function() return f:read () end,
- close = function() logs.show_close(filename) return f:close() end,
- }
+ return {
+ reader=function() return f:read () end,
+ close=function() logs.show_close(filename) return f:close() end,
+ }
end
-
function openers.file(specification,filetype)
- local filename = specification.filename
- if filename and filename ~= "" then
- local f = io.open(filename,"r")
- if f then
- if trace_locating then
- report_files("file opener, '%s' opened",filename)
- end
- return openers.helpers.textopener("file",filename,f)
- end
- end
- if trace_locating then
- report_files("file opener, '%s' not found",filename)
+ local filename=specification.filename
+ if filename and filename~="" then
+ local f=io.open(filename,"r")
+ if f then
+ if trace_locating then
+ report_files("file opener: %a opened",filename)
+ end
+ return openers.helpers.textopener("file",filename,f)
end
- return openers.notfound()
+ end
+ if trace_locating then
+ report_files("file opener: %a not found",filename)
+ end
+ return openers.notfound()
end
-
function loaders.file(specification,filetype)
- local filename = specification.filename
- if filename and filename ~= "" then
- local f = io.open(filename,"rb")
- if f then
- logs.show_load(filename)
- if trace_locating then
- report_files("file loader, '%s' loaded",filename)
- end
- local s = f:read("*a")
- if checkgarbage then
- checkgarbage(#s)
- end
- f:close()
- if s then
- return true, s, #s
- end
- end
- end
- if trace_locating then
- report_files("file loader, '%s' not found",filename)
- end
- return loaders.notfound()
+ local filename=specification.filename
+ if filename and filename~="" then
+ local f=io.open(filename,"rb")
+ if f then
+ logs.show_load(filename)
+ if trace_locating then
+ report_files("file loader: %a loaded",filename)
+ end
+ local s=f:read("*a")
+ if checkgarbage then
+ checkgarbage(#s)
+ end
+ f:close()
+ if s then
+ return true,s,#s
+ end
+ end
+ end
+ if trace_locating then
+ report_files("file loader: %a not found",filename)
+ end
+ return loaders.notfound()
end
@@ -14410,140 +14374,118 @@ end -- of closure
do -- create closure to overcome 200 locals limit
-if not modules then modules = { } end modules ['data-con'] = {
- version = 1.100,
- comment = "companion to luat-lib.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
-local format, lower, gsub = string.format, string.lower, string.gsub
-
-local trace_cache = false trackers.register("resolvers.cache", function(v) trace_cache = v end)
-local trace_containers = false trackers.register("resolvers.containers", function(v) trace_containers = v end)
-local trace_storage = false trackers.register("resolvers.storage", function(v) trace_storage = v end)
-
---[[ldx--
-<p>Once we found ourselves defining similar cache constructs
-several times, containers were introduced. Containers are used
-to collect tables in memory and reuse them when possible based
-on (unique) hashes (to be provided by the calling function).</p>
-
-<p>Caching to disk is disabled by default. Version numbers are
-stored in the saved table which makes it possible to change the
-table structures without bothering about the disk cache.</p>
-
-<p>Examples of usage can be found in the font related code.</p>
---ldx]]--
+package.loaded["data-con"] = package.loaded["data-con"] or true
-containers = containers or { }
-local containers = containers
-containers.usecache = true
+-- original size: 4940, stripped down to: 3580
-local report_containers = logs.reporter("resolvers","containers")
-
-local function report(container,tag,name)
- if trace_cache or trace_containers then
- report_containers("container: %s, tag: %s, name: %s",container.subcategory,tag,name or 'invalid')
- end
-end
-
-local allocated = { }
-
-local mt = {
- __index = function(t,k)
- if k == "writable" then
- local writable = caches.getwritablepath(t.category,t.subcategory) or { "." }
- t.writable = writable
- return writable
- elseif k == "readables" then
- local readables = caches.getreadablepaths(t.category,t.subcategory) or { "." }
- t.readables = readables
- return readables
- end
- end,
- __storage__ = true
+if not modules then modules={} end modules ['data-con']={
+ version=1.100,
+ comment="companion to luat-lib.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
}
-
-function containers.define(category, subcategory, version, enabled)
- if category and subcategory then
- local c = allocated[category]
- if not c then
- c = { }
- allocated[category] = c
- end
- local s = c[subcategory]
- if not s then
- s = {
- category = category,
- subcategory = subcategory,
- storage = { },
- enabled = enabled,
- version = version or math.pi, -- after all, this is TeX
- trace = false,
- -- writable = caches.getwritablepath and caches.getwritablepath (category,subcategory) or { "." },
- -- readables = caches.getreadablepaths and caches.getreadablepaths(category,subcategory) or { "." },
- }
- setmetatable(s,mt)
- c[subcategory] = s
- end
- return s
+local format,lower,gsub=string.format,string.lower,string.gsub
+local trace_cache=false trackers.register("resolvers.cache",function(v) trace_cache=v end)
+local trace_containers=false trackers.register("resolvers.containers",function(v) trace_containers=v end)
+local trace_storage=false trackers.register("resolvers.storage",function(v) trace_storage=v end)
+containers=containers or {}
+local containers=containers
+containers.usecache=true
+local report_containers=logs.reporter("resolvers","containers")
+local allocated={}
+local mt={
+ __index=function(t,k)
+ if k=="writable" then
+ local writable=caches.getwritablepath(t.category,t.subcategory) or { "." }
+ t.writable=writable
+ return writable
+ elseif k=="readables" then
+ local readables=caches.getreadablepaths(t.category,t.subcategory) or { "." }
+ t.readables=readables
+ return readables
+ end
+ end,
+ __storage__=true
+}
+function containers.define(category,subcategory,version,enabled)
+ if category and subcategory then
+ local c=allocated[category]
+ if not c then
+ c={}
+ allocated[category]=c
+ end
+ local s=c[subcategory]
+ if not s then
+ s={
+ category=category,
+ subcategory=subcategory,
+ storage={},
+ enabled=enabled,
+ version=version or math.pi,
+ trace=false,
+ }
+ setmetatable(s,mt)
+ c[subcategory]=s
end
+ return s
+ end
end
-
-function containers.is_usable(container, name)
- return container.enabled and caches and caches.is_writable(container.writable, name)
+function containers.is_usable(container,name)
+ return container.enabled and caches and caches.is_writable(container.writable,name)
end
-
-function containers.is_valid(container, name)
- if name and name ~= "" then
- local storage = container.storage[name]
- return storage and storage.cache_version == container.version
- else
- return false
- end
+function containers.is_valid(container,name)
+ if name and name~="" then
+ local storage=container.storage[name]
+ return storage and storage.cache_version==container.version
+ else
+ return false
+ end
end
-
function containers.read(container,name)
- local storage = container.storage
- local stored = storage[name]
- if not stored and container.enabled and caches and containers.usecache then
- stored = caches.loaddata(container.readables,name)
- if stored and stored.cache_version == container.version then
- report(container,"loaded",name)
- else
- stored = nil
- end
- storage[name] = stored
- elseif stored then
- report(container,"reusing",name)
+ local storage=container.storage
+ local stored=storage[name]
+ if not stored and container.enabled and caches and containers.usecache then
+ stored=caches.loaddata(container.readables,name)
+ if stored and stored.cache_version==container.version then
+ if trace_cache or trace_containers then
+ report_containers("action %a, category %a, name %a","load",container.subcategory,name)
+ end
+ else
+ stored=nil
end
- return stored
-end
-
-function containers.write(container, name, data)
- if data then
- data.cache_version = container.version
- if container.enabled and caches then
- local unique, shared = data.unique, data.shared
- data.unique, data.shared = nil, nil
- caches.savedata(container.writable, name, data)
- report(container,"saved",name)
- data.unique, data.shared = unique, shared
- end
- report(container,"stored",name)
- container.storage[name] = data
+ storage[name]=stored
+ elseif stored then
+ if trace_cache or trace_containers then
+ report_containers("action %a, category %a, name %a","reuse",container.subcategory,name)
+ end
+ end
+ return stored
+end
+function containers.write(container,name,data)
+ if data then
+ data.cache_version=container.version
+ if container.enabled and caches then
+ local unique,shared=data.unique,data.shared
+ data.unique,data.shared=nil,nil
+ caches.savedata(container.writable,name,data)
+ if trace_cache or trace_containers then
+ report_containers("action %a, category %a, name %a","save",container.subcategory,name)
+ end
+ data.unique,data.shared=unique,shared
end
- return data
+ if trace_cache or trace_containers then
+ report_containers("action %a, category %a, name %a","store",container.subcategory,name)
+ end
+ container.storage[name]=data
+ end
+ return data
end
-
function containers.content(container,name)
- return container.storage[name]
+ return container.storage[name]
end
-
function containers.cleanname(name)
- return (gsub(lower(name),"[^%w%d]+","-"))
+ return (gsub(lower(name),"[^%w%d]+","-"))
end
@@ -14551,102 +14493,90 @@ end -- of closure
do -- create closure to overcome 200 locals limit
-if not modules then modules = { } end modules ['data-use'] = {
- version = 1.001,
- comment = "companion to luat-lib.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
-local format, lower, gsub, find = string.format, string.lower, string.gsub, string.find
-
-local trace_locating = false trackers.register("resolvers.locating", function(v) trace_locating = v end)
-
-local report_mounts = logs.reporter("resolvers","mounts")
-
-local resolvers = resolvers
+package.loaded["data-use"] = package.loaded["data-use"] or true
--- we will make a better format, maybe something xml or just text or lua
-
-resolvers.automounted = resolvers.automounted or { }
+-- original size: 3913, stripped down to: 2998
+if not modules then modules={} end modules ['data-use']={
+ version=1.001,
+ comment="companion to luat-lib.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+local format,lower,gsub,find=string.format,string.lower,string.gsub,string.find
+local trace_locating=false trackers.register("resolvers.locating",function(v) trace_locating=v end)
+local report_mounts=logs.reporter("resolvers","mounts")
+local resolvers=resolvers
+resolvers.automounted=resolvers.automounted or {}
function resolvers.automount(usecache)
- local mountpaths = resolvers.cleanpathlist(resolvers.expansion('TEXMFMOUNT'))
- if (not mountpaths or #mountpaths == 0) and usecache then
- mountpaths = caches.getreadablepaths("mount")
- end
- if mountpaths and #mountpaths > 0 then
- statistics.starttiming(resolvers.instance)
- for k=1,#mountpaths do
- local root = mountpaths[k]
- local f = io.open(root.."/url.tmi")
- if f then
- for line in f:lines() do
- if line then
- if find(line,"^[%%#%-]") then -- or %W
- -- skip
- elseif find(line,"^zip://") then
- if trace_locating then
- report_mounts("mounting %s",line)
- end
- table.insert(resolvers.automounted,line)
- resolvers.usezipfile(line)
- end
- end
- end
- f:close()
- end
+ local mountpaths=resolvers.cleanpathlist(resolvers.expansion('TEXMFMOUNT'))
+ if (not mountpaths or #mountpaths==0) and usecache then
+ mountpaths=caches.getreadablepaths("mount")
+ end
+ if mountpaths and #mountpaths>0 then
+ statistics.starttiming(resolvers.instance)
+ for k=1,#mountpaths do
+ local root=mountpaths[k]
+ local f=io.open(root.."/url.tmi")
+ if f then
+ for line in f:lines() do
+ if line then
+ if find(line,"^[%%#%-]") then
+ elseif find(line,"^zip://") then
+ if trace_locating then
+ report_mounts("mounting %a",line)
+ end
+ table.insert(resolvers.automounted,line)
+ resolvers.usezipfile(line)
+ end
+ end
end
- statistics.stoptiming(resolvers.instance)
- end
-end
-
--- status info
-
-statistics.register("used config file", function() return caches.configfiles() end)
-statistics.register("used cache path", function() return caches.usedpaths() end)
-
--- experiment (code will move)
-
-function statistics.savefmtstatus(texname,formatbanner,sourcefile) -- texname == formatname
- local enginebanner = status.list().banner
- if formatbanner and enginebanner and sourcefile then
- local luvname = file.replacesuffix(texname,"luv")
- local luvdata = {
- enginebanner = enginebanner,
- formatbanner = formatbanner,
- sourcehash = md5.hex(io.loaddata(resolvers.findfile(sourcefile)) or "unknown"),
- sourcefile = sourcefile,
- }
- io.savedata(luvname,table.serialize(luvdata,true))
- end
+ f:close()
+ end
+ end
+ statistics.stoptiming(resolvers.instance)
+ end
+end
+statistics.register("used config file",function() return caches.configfiles() end)
+statistics.register("used cache path",function() return caches.usedpaths() end)
+function statistics.savefmtstatus(texname,formatbanner,sourcefile)
+ local enginebanner=status.list().banner
+ if formatbanner and enginebanner and sourcefile then
+ local luvname=file.replacesuffix(texname,"luv")
+ local luvdata={
+ enginebanner=enginebanner,
+ formatbanner=formatbanner,
+ sourcehash=md5.hex(io.loaddata(resolvers.findfile(sourcefile)) or "unknown"),
+ sourcefile=sourcefile,
+ }
+ io.savedata(luvname,table.serialize(luvdata,true))
+ end
end
-
function statistics.checkfmtstatus(texname)
- local enginebanner = status.list().banner
- if enginebanner and texname then
- local luvname = file.replacesuffix(texname,"luv")
- if lfs.isfile(luvname) then
- local luv = dofile(luvname)
- if luv and luv.sourcefile then
- local sourcehash = md5.hex(io.loaddata(resolvers.findfile(luv.sourcefile)) or "unknown")
- local luvbanner = luv.enginebanner or "?"
- if luvbanner ~= enginebanner then
- return format("engine mismatch (luv: %s <> bin: %s)",luvbanner,enginebanner)
- end
- local luvhash = luv.sourcehash or "?"
- if luvhash ~= sourcehash then
- return format("source mismatch (luv: %s <> bin: %s)",luvhash,sourcehash)
- end
- else
- return "invalid status file"
- end
- else
- return "missing status file"
- end
+ local enginebanner=status.list().banner
+ if enginebanner and texname then
+ local luvname=file.replacesuffix(texname,"luv")
+ if lfs.isfile(luvname) then
+ local luv=dofile(luvname)
+ if luv and luv.sourcefile then
+ local sourcehash=md5.hex(io.loaddata(resolvers.findfile(luv.sourcefile)) or "unknown")
+ local luvbanner=luv.enginebanner or "?"
+ if luvbanner~=enginebanner then
+ return format("engine mismatch (luv: %s <> bin: %s)",luvbanner,enginebanner)
+ end
+ local luvhash=luv.sourcehash or "?"
+ if luvhash~=sourcehash then
+ return format("source mismatch (luv: %s <> bin: %s)",luvhash,sourcehash)
+ end
+ else
+ return "invalid status file"
+ end
+ else
+ return "missing status file"
end
- return true
+ end
+ return true
end
@@ -14654,263 +14584,235 @@ end -- of closure
do -- create closure to overcome 200 locals limit
-if not modules then modules = { } end modules ['data-zip'] = {
- version = 1.001,
- comment = "companion to luat-lib.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
--- partly redone .. needs testing
+package.loaded["data-zip"] = package.loaded["data-zip"] or true
-local format, find, match = string.format, string.find, string.match
+-- original size: 8489, stripped down to: 6757
-local trace_locating = false trackers.register("resolvers.locating", function(v) trace_locating = v end)
-
-local report_zip = logs.reporter("resolvers","zip")
-
--- zip:///oeps.zip?name=bla/bla.tex
--- zip:///oeps.zip?tree=tex/texmf-local
--- zip:///texmf.zip?tree=/tex/texmf
--- zip:///texmf.zip?tree=/tex/texmf-local
--- zip:///texmf-mine.zip?tree=/tex/texmf-projects
-
-local resolvers = resolvers
-
-zip = zip or { }
-local zip = zip
-
-zip.archives = zip.archives or { }
-local archives = zip.archives
-
-zip.registeredfiles = zip.registeredfiles or { }
-local registeredfiles = zip.registeredfiles
-
-local limited = false
-
-directives.register("system.inputmode", function(v)
- if not limited then
- local i_limiter = io.i_limiter(v)
- if i_limiter then
- zip.open = i_limiter.protect(zip.open)
- limited = true
- end
- end
+if not modules then modules={} end modules ['data-zip']={
+ version=1.001,
+ comment="companion to luat-lib.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+local format,find,match=string.format,string.find,string.match
+local trace_locating=false trackers.register("resolvers.locating",function(v) trace_locating=v end)
+local report_zip=logs.reporter("resolvers","zip")
+local resolvers=resolvers
+zip=zip or {}
+local zip=zip
+zip.archives=zip.archives or {}
+local archives=zip.archives
+zip.registeredfiles=zip.registeredfiles or {}
+local registeredfiles=zip.registeredfiles
+local limited=false
+directives.register("system.inputmode",function(v)
+ if not limited then
+ local i_limiter=io.i_limiter(v)
+ if i_limiter then
+ zip.open=i_limiter.protect(zip.open)
+ limited=true
+ end
+ end
end)
-
-local function validzip(str) -- todo: use url splitter
- if not find(str,"^zip://") then
- return "zip:///" .. str
- else
- return str
- end
+local function validzip(str)
+ if not find(str,"^zip://") then
+ return "zip:///"..str
+ else
+ return str
+ end
end
-
function zip.openarchive(name)
- if not name or name == "" then
- return nil
- else
- local arch = archives[name]
- if not arch then
- local full = resolvers.findfile(name) or ""
- arch = (full ~= "" and zip.open(full)) or false
- archives[name] = arch
- end
- return arch
+ if not name or name=="" then
+ return nil
+ else
+ local arch=archives[name]
+ if not arch then
+ local full=resolvers.findfile(name) or ""
+ arch=(full~="" and zip.open(full)) or false
+ archives[name]=arch
end
+ return arch
+ end
end
-
function zip.closearchive(name)
- if not name or (name == "" and archives[name]) then
- zip.close(archives[name])
- archives[name] = nil
- end
+ if not name or (name=="" and archives[name]) then
+ zip.close(archives[name])
+ archives[name]=nil
+ end
end
-
function resolvers.locators.zip(specification)
- local archive = specification.filename
- local zipfile = archive and archive ~= "" and zip.openarchive(archive) -- tricky, could be in to be initialized tree
- if trace_locating then
- if zipfile then
- report_zip("locator, archive '%s' found",archive)
- else
- report_zip("locator, archive '%s' not found",archive)
- end
+ local archive=specification.filename
+ local zipfile=archive and archive~="" and zip.openarchive(archive)
+ if trace_locating then
+ if zipfile then
+ report_zip("locator: archive %a found",archive)
+ else
+ report_zip("locator: archive %a not found",archive)
end
+ end
end
-
function resolvers.hashers.zip(specification)
- local archive = specification.filename
- if trace_locating then
- report_zip("loading file '%s'",archive)
- end
- resolvers.usezipfile(specification.original)
+ local archive=specification.filename
+ if trace_locating then
+ report_zip("loading file %a",archive)
+ end
+ resolvers.usezipfile(specification.original)
+end
+function resolvers.concatinators.zip(zipfile,path,name)
+ if not path or path=="" then
+ return format('%s?name=%s',zipfile,name)
+ else
+ return format('%s?name=%s/%s',zipfile,path,name)
+ end
end
-
-function resolvers.concatinators.zip(zipfile,path,name) -- ok ?
- if not path or path == "" then
- return format('%s?name=%s',zipfile,name)
- else
- return format('%s?name=%s/%s',zipfile,path,name)
- end
-end
-
function resolvers.finders.zip(specification)
- local original = specification.original
- local archive = specification.filename
- if archive then
- local query = url.query(specification.query)
- local queryname = query.name
- if queryname then
- local zfile = zip.openarchive(archive)
- if zfile then
- if trace_locating then
- report_zip("finder, archive '%s' found",archive)
- end
- local dfile = zfile:open(queryname)
- if dfile then
- dfile = zfile:close()
- if trace_locating then
- report_zip("finder, file '%s' found",queryname)
- end
- return specification.original
- elseif trace_locating then
- report_zip("finder, file '%s' not found",queryname)
- end
- elseif trace_locating then
- report_zip("finder, unknown archive '%s'",archive)
- end
+ local original=specification.original
+ local archive=specification.filename
+ if archive then
+ local query=url.query(specification.query)
+ local queryname=query.name
+ if queryname then
+ local zfile=zip.openarchive(archive)
+ if zfile then
+ if trace_locating then
+ report_zip("finder: archive %a found",archive)
+ end
+ local dfile=zfile:open(queryname)
+ if dfile then
+ dfile=zfile:close()
+ if trace_locating then
+ report_zip("finder: file %a found",queryname)
+ end
+ return specification.original
+ elseif trace_locating then
+ report_zip("finder: file %a not found",queryname)
end
+ elseif trace_locating then
+ report_zip("finder: unknown archive %a",archive)
+ end
end
- if trace_locating then
- report_zip("finder, '%s' not found",original)
- end
- return resolvers.finders.notfound()
+ end
+ if trace_locating then
+ report_zip("finder: %a not found",original)
+ end
+ return resolvers.finders.notfound()
end
-
function resolvers.openers.zip(specification)
- local original = specification.original
- local archive = specification.filename
- if archive then
- local query = url.query(specification.query)
- local queryname = query.name
- if queryname then
- local zfile = zip.openarchive(archive)
- if zfile then
- if trace_locating then
- report_zip("opener, archive '%s' opened",archive)
- end
- local dfile = zfile:open(queryname)
- if dfile then
- if trace_locating then
- report_zip("opener, file '%s' found",queryname)
- end
- return resolvers.openers.helpers.textopener('zip',original,dfile)
- elseif trace_locating then
- report_zip("opener, file '%s' not found",queryname)
- end
- elseif trace_locating then
- report_zip("opener, unknown archive '%s'",archive)
- end
+ local original=specification.original
+ local archive=specification.filename
+ if archive then
+ local query=url.query(specification.query)
+ local queryname=query.name
+ if queryname then
+ local zfile=zip.openarchive(archive)
+ if zfile then
+ if trace_locating then
+ report_zip("opener; archive %a opened",archive)
+ end
+ local dfile=zfile:open(queryname)
+ if dfile then
+ if trace_locating then
+ report_zip("opener: file %a found",queryname)
+ end
+ return resolvers.openers.helpers.textopener('zip',original,dfile)
+ elseif trace_locating then
+ report_zip("opener: file %a not found",queryname)
end
+ elseif trace_locating then
+ report_zip("opener: unknown archive %a",archive)
+ end
end
- if trace_locating then
- report_zip("opener, '%s' not found",original)
- end
- return resolvers.openers.notfound()
+ end
+ if trace_locating then
+ report_zip("opener: %a not found",original)
+ end
+ return resolvers.openers.notfound()
end
-
function resolvers.loaders.zip(specification)
- local original = specification.original
- local archive = specification.filename
- if archive then
- local query = url.query(specification.query)
- local queryname = query.name
- if queryname then
- local zfile = zip.openarchive(archive)
- if zfile then
- if trace_locating then
- report_zip("loader, archive '%s' opened",archive)
- end
- local dfile = zfile:open(queryname)
- if dfile then
- logs.show_load(original)
- if trace_locating then
- report_zip("loader, file '%s' loaded",original)
- end
- local s = dfile:read("*all")
- dfile:close()
- return true, s, #s
- elseif trace_locating then
- report_zip("loader, file '%s' not found",queryname)
- end
- elseif trace_locating then
- report_zip("loader, unknown archive '%s'",archive)
- end
+ local original=specification.original
+ local archive=specification.filename
+ if archive then
+ local query=url.query(specification.query)
+ local queryname=query.name
+ if queryname then
+ local zfile=zip.openarchive(archive)
+ if zfile then
+ if trace_locating then
+ report_zip("loader: archive %a opened",archive)
+ end
+ local dfile=zfile:open(queryname)
+ if dfile then
+ logs.show_load(original)
+ if trace_locating then
+ report_zip("loader; file %a loaded",original)
+ end
+ local s=dfile:read("*all")
+ dfile:close()
+ return true,s,#s
+ elseif trace_locating then
+ report_zip("loader: file %a not found",queryname)
end
+ elseif trace_locating then
+ report_zip("loader; unknown archive %a",archive)
+ end
end
- if trace_locating then
- report_zip("loader, '%s' not found",original)
- end
- return resolvers.openers.notfound()
+ end
+ if trace_locating then
+ report_zip("loader: %a not found",original)
+ end
+ return resolvers.openers.notfound()
end
-
--- zip:///somefile.zip
--- zip:///somefile.zip?tree=texmf-local -> mount
-
function resolvers.usezipfile(archive)
- local specification = resolvers.splitmethod(archive) -- to be sure
- local archive = specification.filename
- if archive and not registeredfiles[archive] then
- local z = zip.openarchive(archive)
- if z then
- local instance = resolvers.instance
- local tree = url.query(specification.query).tree or ""
- if trace_locating then
- report_zip("registering, registering archive '%s'",archive)
- end
- statistics.starttiming(instance)
- resolvers.prependhash('zip',archive)
- resolvers.extendtexmfvariable(archive) -- resets hashes too
- registeredfiles[archive] = z
- instance.files[archive] = resolvers.registerzipfile(z,tree)
- statistics.stoptiming(instance)
- elseif trace_locating then
- report_zip("registering, unknown archive '%s'",archive)
- end
+ local specification=resolvers.splitmethod(archive)
+ local archive=specification.filename
+ if archive and not registeredfiles[archive] then
+ local z=zip.openarchive(archive)
+ if z then
+ local instance=resolvers.instance
+ local tree=url.query(specification.query).tree or ""
+ if trace_locating then
+ report_zip("registering: archive %a",archive)
+ end
+ statistics.starttiming(instance)
+ resolvers.prependhash('zip',archive)
+ resolvers.extendtexmfvariable(archive)
+ registeredfiles[archive]=z
+ instance.files[archive]=resolvers.registerzipfile(z,tree)
+ statistics.stoptiming(instance)
elseif trace_locating then
- report_zip("registering, '%s' not found",archive)
+ report_zip("registering: unknown archive %a",archive)
end
+ elseif trace_locating then
+ report_zip("registering: archive %a not found",archive)
+ end
end
-
function resolvers.registerzipfile(z,tree)
- local files, filter = { }, ""
- if tree == "" then
- filter = "^(.+)/(.-)$"
+ local files,filter={},""
+ if tree=="" then
+ filter="^(.+)/(.-)$"
+ else
+ filter=format("^%s/(.+)/(.-)$",tree)
+ end
+ if trace_locating then
+ report_zip("registering: using filter %a",filter)
+ end
+ local register,n=resolvers.registerfile,0
+ for i in z:files() do
+ local path,name=match(i.filename,filter)
+ if path then
+ if name and name~='' then
+ register(files,name,path)
+ n=n+1
+ else
+ end
else
- filter = format("^%s/(.+)/(.-)$",tree)
+ register(files,i.filename,'')
+ n=n+1
end
- if trace_locating then
- report_zip("registering, using filter '%s'",filter)
- end
- local register, n = resolvers.registerfile, 0
- for i in z:files() do
- local path, name = match(i.filename,filter)
- if path then
- if name and name ~= '' then
- register(files, name, path)
- n = n + 1
- else
- -- directory
- end
- else
- register(files, i.filename, '')
- n = n + 1
- end
- end
- report_zip("registering, %s files registered",n)
- return files
+ end
+ report_zip("registering: %s files registered",n)
+ return files
end
@@ -14918,393 +14820,447 @@ end -- of closure
do -- create closure to overcome 200 locals limit
-if not modules then modules = { } end modules ['data-tre'] = {
- version = 1.001,
- comment = "companion to luat-lib.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
--- \input tree://oeps1/**/oeps.tex
-
-local find, gsub, format = string.find, string.gsub, string.format
+package.loaded["data-tre"] = package.loaded["data-tre"] or true
-local trace_locating = false trackers.register("resolvers.locating", function(v) trace_locating = v end)
-
-local report_trees = logs.reporter("resolvers","trees")
-
-local resolvers = resolvers
-
-local done, found, notfound = { }, { }, resolvers.finders.notfound
+-- original size: 2508, stripped down to: 2074
+if not modules then modules={} end modules ['data-tre']={
+ version=1.001,
+ comment="companion to luat-lib.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+local find,gsub,format=string.find,string.gsub,string.format
+local trace_locating=false trackers.register("resolvers.locating",function(v) trace_locating=v end)
+local report_trees=logs.reporter("resolvers","trees")
+local resolvers=resolvers
+local done,found,notfound={},{},resolvers.finders.notfound
function resolvers.finders.tree(specification)
- local spec = specification.filename
- local fnd = found[spec]
- if fnd == nil then
- if spec ~= "" then
- local path, name = file.dirname(spec), file.basename(spec)
- if path == "" then path = "." end
- local hash = done[path]
- if not hash then
- local pattern = path .. "/*" -- we will use the proper splitter
- hash = dir.glob(pattern)
- done[path] = hash
- end
- local pattern = "/" .. gsub(name,"([%.%-%+])", "%%%1") .. "$"
- for k=1,#hash do
- local v = hash[k]
- if find(v,pattern) then
- found[spec] = v
- return v
- end
- end
- end
- fnd = notfound() -- false
- found[spec] = fnd
- end
- return fnd
+ local spec=specification.filename
+ local fnd=found[spec]
+ if fnd==nil then
+ if spec~="" then
+ local path,name=file.dirname(spec),file.basename(spec)
+ if path=="" then path="." end
+ local hash=done[path]
+ if not hash then
+ local pattern=path.."/*"
+ hash=dir.glob(pattern)
+ done[path]=hash
+ end
+ local pattern="/"..gsub(name,"([%.%-%+])","%%%1").."$"
+ for k=1,#hash do
+ local v=hash[k]
+ if find(v,pattern) then
+ found[spec]=v
+ return v
+ end
+ end
+ end
+ fnd=notfound()
+ found[spec]=fnd
+ end
+ return fnd
end
-
function resolvers.locators.tree(specification)
- local name = specification.filename
- local realname = resolvers.resolve(name) -- no shortcut
- if realname and realname ~= '' and lfs.isdir(realname) then
- if trace_locating then
- report_trees("locator '%s' found",realname)
- end
- resolvers.appendhash('tree',name,false) -- don't cache
- elseif trace_locating then
- report_trees("locator '%s' not found",name)
+ local name=specification.filename
+ local realname=resolvers.resolve(name)
+ if realname and realname~='' and lfs.isdir(realname) then
+ if trace_locating then
+ report_trees("locator %a found",realname)
end
+ resolvers.appendhash('tree',name,false)
+ elseif trace_locating then
+ report_trees("locator %a not found",name)
+ end
end
-
function resolvers.hashers.tree(specification)
- local name = specification.filename
- if trace_locating then
- report_trees("analysing '%s'",name)
- end
- resolvers.methodhandler("hashers",name)
-
- resolvers.generators.file(specification)
+ local name=specification.filename
+ if trace_locating then
+ report_trees("analysing %a",name)
+ end
+ resolvers.methodhandler("hashers",name)
+ resolvers.generators.file(specification)
end
-
-resolvers.concatinators.tree = resolvers.concatinators.file
-resolvers.generators.tree = resolvers.generators.file
-resolvers.openers.tree = resolvers.openers.file
-resolvers.loaders.tree = resolvers.loaders.file
+resolvers.concatinators.tree=resolvers.concatinators.file
+resolvers.generators.tree=resolvers.generators.file
+resolvers.openers.tree=resolvers.openers.file
+resolvers.loaders.tree=resolvers.loaders.file
end -- of closure
do -- create closure to overcome 200 locals limit
-if not modules then modules = { } end modules ['data-crl'] = {
- version = 1.001,
- comment = "companion to luat-lib.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
--- this one is replaced by data-sch.lua --
-
-local gsub = string.gsub
-
-local resolvers = resolvers
+package.loaded["data-sch"] = package.loaded["data-sch"] or true
-local finders, openers, loaders = resolvers.finders, resolvers.openers, resolvers.loaders
-
-resolvers.curl = resolvers.curl or { }
-local curl = resolvers.curl
-
-local cached = { }
-
-local function runcurl(specification)
- local original = specification.original
- -- local scheme = specification.scheme
- local cleanname = gsub(original,"[^%a%d%.]+","-")
- local cachename = caches.setfirstwritablefile(cleanname,"curl")
- if not cached[original] then
- if not io.exists(cachename) then
- cached[original] = cachename
- local command = "curl --silent --create-dirs --output " .. cachename .. " " .. original
- os.spawn(command)
- end
- if io.exists(cachename) then
- cached[original] = cachename
- else
- cached[original] = ""
- end
- end
- return cached[original]
-end
-
--- old code: we could be cleaner using specification (see schemes)
-
-local function finder(specification,filetype)
- return resolvers.methodhandler("finders",runcurl(specification),filetype)
-end
-
-local opener = openers.file
-local loader = loaders.file
-
-local function install(scheme)
- finders[scheme] = finder
- openers[scheme] = opener
- loaders[scheme] = loader
-end
-
-resolvers.curl.install = install
-
-install('http')
-install('https')
-install('ftp')
-
-
-end -- of closure
+-- original size: 6202, stripped down to: 5149
-do -- create closure to overcome 200 locals limit
-
-if not modules then modules = { } end modules ['data-lua'] = {
- version = 1.001,
- comment = "companion to luat-lib.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
+if not modules then modules={} end modules ['data-sch']={
+ version=1.001,
+ comment="companion to luat-lib.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
}
-
--- some loading stuff ... we might move this one to slot 2 depending
--- on the developments (the loaders must not trigger kpse); we could
--- of course use a more extensive lib path spec
-
-local trace_locating = false trackers.register("resolvers.locating", function(v) trace_locating = v end)
-
-local report_libraries = logs.reporter("resolvers","libraries")
-
-local gsub, insert = string.gsub, table.insert
-local unpack = unpack or table.unpack
-
-local resolvers, package = resolvers, package
-
-local libformats = { 'luatexlibs', 'tex', 'texmfscripts', 'othertextfiles' } -- 'luainputs'
-local clibformats = { 'lib' }
-
-local _path_, libpaths, _cpath_, clibpaths
-
-function package.libpaths()
- if not _path_ or package.path ~= _path_ then
- _path_ = package.path
- libpaths = file.splitpath(_path_,";")
+local load=load
+local gsub,concat,format=string.gsub,table.concat,string.format
+local finders,openers,loaders=resolvers.finders,resolvers.openers,resolvers.loaders
+local trace_schemes=false trackers.register("resolvers.schemes",function(v) trace_schemes=v end)
+local report_schemes=logs.reporter("resolvers","schemes")
+local http=require("socket.http")
+local ltn12=require("ltn12")
+local resolvers=resolvers
+local schemes=resolvers.schemes or {}
+resolvers.schemes=schemes
+local cleaners={}
+schemes.cleaners=cleaners
+local threshold=24*60*60
+directives.register("schemes.threshold",function(v) threshold=tonumber(v) or threshold end)
+function cleaners.none(specification)
+ return specification.original
+end
+function cleaners.strip(specification)
+ return (gsub(specification.original,"[^%a%d%.]+","-"))
+end
+function cleaners.md5(specification)
+ return file.addsuffix(md5.hex(specification.original),file.suffix(specification.path))
+end
+local cleaner=cleaners.strip
+directives.register("schemes.cleanmethod",function(v) cleaner=cleaners[v] or cleaners.strip end)
+function resolvers.schemes.cleanname(specification)
+ local hash=cleaner(specification)
+ if trace_schemes then
+ report_schemes("hashing %a to %a",specification.original,hash)
+ end
+ return hash
+end
+local cached,loaded,reused,thresholds,handlers={},{},{},{},{}
+local function runcurl(name,cachename)
+ local command="curl --silent --create-dirs --output "..cachename.." "..name
+ os.spawn(command)
+end
+local function fetch(specification)
+ local original=specification.original
+ local scheme=specification.scheme
+ local cleanname=schemes.cleanname(specification)
+ local cachename=caches.setfirstwritablefile(cleanname,"schemes")
+ if not cached[original] then
+ statistics.starttiming(schemes)
+ if not io.exists(cachename) or (os.difftime(os.time(),lfs.attributes(cachename).modification)>(thresholds[protocol] or threshold)) then
+ cached[original]=cachename
+ local handler=handlers[scheme]
+ if handler then
+ if trace_schemes then
+ report_schemes("fetching %a, protocol %a, method %a",original,scheme,"built-in")
+ end
+ logs.flush()
+ handler(specification,cachename)
+ else
+ if trace_schemes then
+ report_schemes("fetching %a, protocol %a, method %a",original,scheme,"curl")
+ end
+ logs.flush()
+ runcurl(original,cachename)
+ end
+ end
+ if io.exists(cachename) then
+ cached[original]=cachename
+ if trace_schemes then
+ report_schemes("using cached %a, protocol %a, cachename %a",original,scheme,cachename)
+ end
+ else
+ cached[original]=""
+ if trace_schemes then
+ report_schemes("using missing %a, protocol %a",original,scheme)
+ end
end
- return libpaths
-end
-
-function package.clibpaths()
- if not _cpath_ or package.cpath ~= _cpath_ then
- _cpath_ = package.cpath
- clibpaths = file.splitpath(_cpath_,";")
+ loaded[scheme]=loaded[scheme]+1
+ statistics.stoptiming(schemes)
+ else
+ if trace_schemes then
+ report_schemes("reusing %a, protocol %a",original,scheme)
end
- return clibpaths
+ reused[scheme]=reused[scheme]+1
+ end
+ return cached[original]
end
-
-local function thepath(...)
- local t = { ... } t[#t+1] = "?.lua"
- local path = file.join(unpack(t))
- if trace_locating then
- report_libraries("! appending '%s' to 'package.path'",path)
+local function finder(specification,filetype)
+ return resolvers.methodhandler("finders",fetch(specification),filetype)
+end
+local opener=openers.file
+local loader=loaders.file
+local function install(scheme,handler,newthreshold)
+ handlers [scheme]=handler
+ loaded [scheme]=0
+ reused [scheme]=0
+ finders [scheme]=finder
+ openers [scheme]=opener
+ loaders [scheme]=loader
+ thresholds[scheme]=newthreshold or threshold
+end
+schemes.install=install
+local function http_handler(specification,cachename)
+ local tempname=cachename..".tmp"
+ local f=io.open(tempname,"wb")
+ local status,message=http.request {
+ url=specification.original,
+ sink=ltn12.sink.file(f)
+ }
+ if not status then
+ os.remove(tempname)
+ else
+ os.remove(cachename)
+ os.rename(tempname,cachename)
+ end
+ return cachename
+end
+install('http',http_handler)
+install('https')
+install('ftp')
+statistics.register("scheme handling time",function()
+ local l,r,nl,nr={},{},0,0
+ for k,v in table.sortedhash(loaded) do
+ if v>0 then
+ nl=nl+1
+ l[nl]=k..":"..v
+ end
+ end
+ for k,v in table.sortedhash(reused) do
+ if v>0 then
+ nr=nr+1
+ r[nr]=k..":"..v
+ end
+ end
+ local n=nl+nr
+ if n>0 then
+ l=nl>0 and concat(l) or "none"
+ r=nr>0 and concat(r) or "none"
+ return format("%s seconds, %s processed, threshold %s seconds, loaded: %s, reused: %s",
+ statistics.elapsedtime(schemes),n,threshold,l,r)
+ else
+ return nil
+ end
+end)
+local httprequest=http.request
+local toquery=url.toquery
+local function fetchstring(url,data)
+ local q=data and toquery(data)
+ if q then
+ url=url.."?"..q
+ end
+ local reply=httprequest(url)
+ return reply
+end
+schemes.fetchstring=fetchstring
+function schemes.fetchtable(url,data)
+ local reply=fetchstring(url,data)
+ if reply then
+ local s=load("return "..reply)
+ if s then
+ return s()
end
- return path
+ end
end
-local p_libpaths, a_libpaths = { }, { }
-function package.appendtolibpath(...)
- insert(a_libpath,thepath(...))
-end
+end -- of closure
-function package.prependtolibpath(...)
- insert(p_libpaths,1,thepath(...))
-end
+do -- create closure to overcome 200 locals limit
--- beware, we need to return a loadfile result !
+package.loaded["data-lua"] = package.loaded["data-lua"] or true
-local function loaded(libpaths,name,simple)
- for i=1,#libpaths do -- package.path, might become option
- local libpath = libpaths[i]
- local resolved = gsub(libpath,"%?",simple)
- if trace_locating then -- more detail
- report_libraries("! checking for '%s' on 'package.path': '%s' => '%s'",simple,libpath,resolved)
- end
- if file.is_readable(resolved) then
- if trace_locating then
- report_libraries("! lib '%s' located via 'package.path': '%s'",name,resolved)
- end
- return loadfile(resolved)
- end
- end
-end
+-- original size: 4861, stripped down to: 3693
-package.loaders[2] = function(name) -- was [#package.loaders+1]
- if file.suffix(name) == "" then
- name = file.addsuffix(name,"lua") -- maybe a list
- if trace_locating then -- mode detail
- report_libraries("! locating '%s' with forced suffix",name)
- end
- else
- if trace_locating then -- mode detail
- report_libraries("! locating '%s'",name)
- end
- end
- for i=1,#libformats do
- local format = libformats[i]
- local resolved = resolvers.findfile(name,format) or ""
- if trace_locating then -- mode detail
- report_libraries("! checking for '%s' using 'libformat path': '%s'",name,format)
- end
- if resolved ~= "" then
- if trace_locating then
- report_libraries("! lib '%s' located via environment: '%s'",name,resolved)
- end
- return loadfile(resolved)
- end
- end
- -- libpaths
- local libpaths, clibpaths = package.libpaths(), package.clibpaths()
- local simple = gsub(name,"%.lua$","")
- local simple = gsub(simple,"%.","/")
- local resolved = loaded(p_libpaths,name,simple) or loaded(libpaths,name,simple) or loaded(a_libpaths,name,simple)
- if resolved then
- return resolved
- end
- --
- local libname = file.addsuffix(simple,os.libsuffix)
- for i=1,#clibformats do
- -- better have a dedicated loop
- local format = clibformats[i]
- local paths = resolvers.expandedpathlistfromvariable(format)
- for p=1,#paths do
- local path = paths[p]
- local resolved = file.join(path,libname)
- if trace_locating then -- mode detail
- report_libraries("! checking for '%s' using 'clibformat path': '%s'",libname,path)
- end
- if file.is_readable(resolved) then
- if trace_locating then
- report_libraries("! lib '%s' located via 'clibformat': '%s'",libname,resolved)
- end
- return package.loadlib(resolved,name)
- end
- end
- end
- for i=1,#clibpaths do -- package.path, might become option
- local libpath = clibpaths[i]
- local resolved = gsub(libpath,"?",simple)
- if trace_locating then -- more detail
- report_libraries("! checking for '%s' on 'package.cpath': '%s'",simple,libpath)
- end
- if file.is_readable(resolved) then
- if trace_locating then
- report_libraries("! lib '%s' located via 'package.cpath': '%s'",name,resolved)
- end
- return package.loadlib(resolved,name)
- end
- end
- -- just in case the distribution is messed up
- if trace_loading then -- more detail
- report_libraries("! checking for '%s' using 'luatexlibs': '%s'",name)
- end
- local resolved = resolvers.findfile(file.basename(name),'luatexlibs') or ""
- if resolved ~= "" then
- if trace_locating then
- report_libraries("! lib '%s' located by basename via environment: '%s'",name,resolved)
- end
- return loadfile(resolved)
- end
- if trace_locating then
- report_libraries('? unable to locate lib: %s',name)
- end
--- return "unable to locate " .. name
+if not modules then modules={} end modules ['data-lua']={
+ version=1.001,
+ comment="companion to luat-lib.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+local resolvers,package=resolvers,package
+local gsub=string.gsub
+local concat=table.concat
+local addsuffix=file.addsuffix
+local P,S,Cs,lpegmatch=lpeg.P,lpeg.S,lpeg.Cs,lpeg.match
+local libsuffixes={ 'tex','lua' }
+local clibsuffixes={ 'lib' }
+local libformats={ 'TEXINPUTS','LUAINPUTS' }
+local clibformats={ 'CLUAINPUTS' }
+local helpers=package.helpers
+trackers.register("resolvers.libraries",function(v) helpers.trace=v end)
+trackers.register("resolvers.locating",function(v) helpers.trace=v end)
+helpers.report=logs.reporter("resolvers","libraries")
+local pattern=Cs(P("!")^0/""*(P("/")*P(-1)/"/"+P("/")^1/"/"+1)^0)
+local function cleanpath(path)
+ return resolvers.resolve(lpegmatch(pattern,path))
+end
+helpers.cleanpath=cleanpath
+local loadedaslib=helpers.loadedaslib
+local loadedbylua=helpers.loadedbylua
+local loadedbypath=helpers.loadedbypath
+local notloaded=helpers.notloaded
+local getlibpaths=package.libpaths
+local getclibpaths=package.clibpaths
+function helpers.libpaths(libhash)
+ local libpaths={}
+ for i=1,#libformats do
+ local paths=resolvers.expandedpathlistfromvariable(libformats[i])
+ for i=1,#paths do
+ local path=cleanpath(paths[i])
+ if not libhash[path] then
+ libpaths[#libpaths+1]=path
+ libhash[path]=true
+ end
+ end
+ end
+ return libpaths
+end
+function helpers.clibpaths(clibhash)
+ local clibpaths={}
+ for i=1,#clibformats do
+ local paths=resolvers.expandedpathlistfromvariable(clibformats[i])
+ for i=1,#paths do
+ local path=cleanpath(paths[i])
+ if not clibhash[path] then
+ clibpaths[#clibpaths+1]=path
+ clibhash[path]=true
+ end
+ end
+ end
+ return clibpaths
+end
+local function loadedbyformat(name,rawname,suffixes,islib)
+ local trace=helpers.trace
+ local report=helpers.report
+ if trace then
+ report("locating %a as %a using formats %a",rawname,name,suffixes)
+ end
+ for i=1,#suffixes do
+ local format=suffixes[i]
+ local resolved=resolvers.findfile(name,format) or ""
+ if trace then
+ report("checking %a using format %a",name,format)
+ end
+ if resolved~="" then
+ if trace then
+ report("lib %a located on %a",name,resolved)
+ end
+ if islib then
+ return true,loadedaslib(resolved,rawname)
+ else
+ return true,loadfile(resolved)
+ end
+ end
+ end
+end
+helpers.loadedbyformat=loadedbyformat
+local pattern=Cs((((1-S("\\/"))^0*(S("\\/")^1/"/"))^0*(P(".")^1/"/"+P(1))^1)*-1)
+local function lualibfile(name)
+ return lpegmatch(pattern,name) or name
+end
+helpers.lualibfile=lualibfile
+function helpers.loaded(name)
+ local thename=lualibfile(name)
+ local luaname=addsuffix(thename,"lua")
+ local libname=addsuffix(thename,os.libsuffix)
+ local libpaths=getlibpaths()
+ local clibpaths=getclibpaths()
+ local done,result=loadedbyformat(luaname,name,libsuffixes,false)
+ if done then
+ return result
+ end
+ local done,result=loadedbyformat(libname,name,clibsuffixes,true)
+ if done then
+ return result
+ end
+ local done,result=loadedbypath(luaname,name,libpaths,false,"lua")
+ if done then
+ return result
+ end
+ local done,result=loadedbypath(luaname,name,clibpaths,false,"lua")
+ if done then
+ return result
+ end
+ local done,result=loadedbypath(libname,name,clibpaths,true,"lib")
+ if done then
+ return result
+ end
+ local done,result=loadedbylua(name)
+ if done then
+ return result
+ end
+ return notloaded(name)
end
-
-resolvers.loadlualib = require
-
--- -- -- --
-
-package.obsolete = package.obsolete or { }
-
-package.append_libpath = appendtolibpath -- will become obsolete
-package.prepend_libpath = prependtolibpath -- will become obsolete
-
-package.obsolete.append_libpath = appendtolibpath -- will become obsolete
-package.obsolete.prepend_libpath = prependtolibpath -- will become obsolete
+resolvers.loadlualib=require
end -- of closure
do -- create closure to overcome 200 locals limit
-if not modules then modules = { } end modules ['data-aux'] = {
- version = 1.001,
- comment = "companion to luat-lib.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
-local find = string.find
-local type, next = type, next
-
-local trace_locating = false trackers.register("resolvers.locating", function(v) trace_locating = v end)
+package.loaded["data-aux"] = package.loaded["data-aux"] or true
-local resolvers = resolvers
+-- original size: 2394, stripped down to: 2005
-local report_scripts = logs.reporter("resolvers","scripts")
-
-function resolvers.updatescript(oldname,newname) -- oldname -> own.name, not per se a suffix
- local scriptpath = "scripts/context/lua"
- newname = file.addsuffix(newname,"lua")
- local oldscript = resolvers.cleanpath(oldname)
+if not modules then modules={} end modules ['data-aux']={
+ version=1.001,
+ comment="companion to luat-lib.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+local find=string.find
+local type,next=type,next
+local trace_locating=false trackers.register("resolvers.locating",function(v) trace_locating=v end)
+local resolvers=resolvers
+local report_scripts=logs.reporter("resolvers","scripts")
+function resolvers.updatescript(oldname,newname)
+ local scriptpath="scripts/context/lua"
+ newname=file.addsuffix(newname,"lua")
+ local oldscript=resolvers.cleanpath(oldname)
+ if trace_locating then
+ report_scripts("to be replaced old script %a",oldscript)
+ end
+ local newscripts=resolvers.findfiles(newname) or {}
+ if #newscripts==0 then
if trace_locating then
- report_scripts("to be replaced old script %s", oldscript)
- end
- local newscripts = resolvers.findfiles(newname) or { }
- if #newscripts == 0 then
+ report_scripts("unable to locate new script")
+ end
+ else
+ for i=1,#newscripts do
+ local newscript=resolvers.cleanpath(newscripts[i])
+ if trace_locating then
+ report_scripts("checking new script %a",newscript)
+ end
+ if oldscript==newscript then
if trace_locating then
- report_scripts("unable to locate new script")
+ report_scripts("old and new script are the same")
end
- else
- for i=1,#newscripts do
- local newscript = resolvers.cleanpath(newscripts[i])
- if trace_locating then
- report_scripts("checking new script %s", newscript)
- end
- if oldscript == newscript then
- if trace_locating then
- report_scripts("old and new script are the same")
- end
- elseif not find(newscript,scriptpath) then
- if trace_locating then
- report_scripts("new script should come from %s",scriptpath)
- end
- elseif not (find(oldscript,file.removesuffix(newname).."$") or find(oldscript,newname.."$")) then
- if trace_locating then
- report_scripts("invalid new script name")
- end
- else
- local newdata = io.loaddata(newscript)
- if newdata then
- if trace_locating then
- report_scripts("old script content replaced by new content")
- end
- io.savedata(oldscript,newdata)
- break
- elseif trace_locating then
- report_scripts("unable to load new script")
- end
- end
+ elseif not find(newscript,scriptpath) then
+ if trace_locating then
+ report_scripts("new script should come from %a",scriptpath)
end
+ elseif not (find(oldscript,file.removesuffix(newname).."$") or find(oldscript,newname.."$")) then
+ if trace_locating then
+ report_scripts("invalid new script name")
+ end
+ else
+ local newdata=io.loaddata(newscript)
+ if newdata then
+ if trace_locating then
+ report_scripts("old script content replaced by new content")
+ end
+ io.savedata(oldscript,newdata)
+ break
+ elseif trace_locating then
+ report_scripts("unable to load new script")
+ end
+ end
end
+ end
end
@@ -15312,78 +15268,55 @@ end -- of closure
do -- create closure to overcome 200 locals limit
-if not modules then modules = { } end modules ['data-tmf'] = {
- version = 1.001,
- comment = "companion to luat-lib.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
-local resolvers = resolvers
-
-local report_tds = logs.reporter("resolvers","tds")
+package.loaded["data-tmf"] = package.loaded["data-tmf"] or true
--- = <<
--- ? ??
--- < +=
--- > =+
+-- original size: 2600, stripped down to: 1627
+if not modules then modules={} end modules ['data-tmf']={
+ version=1.001,
+ comment="companion to luat-lib.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+local resolvers=resolvers
+local report_tds=logs.reporter("resolvers","tds")
function resolvers.load_tree(tree,resolve)
- if type(tree) == "string" and tree ~= "" then
-
- local getenv, setenv = resolvers.getenv, resolvers.setenv
-
- -- later might listen to the raw osenv var as well
- local texos = "texmf-" .. os.platform
-
- local oldroot = environment.texroot
- local newroot = file.collapsepath(tree)
-
- local newtree = file.join(newroot,texos)
- local newpath = file.join(newtree,"bin")
-
- if not lfs.isdir(newtree) then
- report_tds("no '%s' under tree %s",texos,tree)
- os.exit()
- end
- if not lfs.isdir(newpath) then
- report_tds("no '%s/bin' under tree %s",texos,tree)
- os.exit()
- end
-
- local texmfos = newtree
-
- environment.texroot = newroot
- environment.texos = texos
- environment.texmfos = texmfos
-
- -- Beware, we need to obey the relocatable autoparent so we
- -- set TEXMFCNF to its raw value. This is somewhat tricky when
- -- we run a mkii job from within. Therefore, in mtxrun, there
- -- is a resolve applied when we're in mkii/kpse mode or when
- -- --resolve is passed to mtxrun. Maybe we should also set the
- -- local AUTOPARENT etc. although these are alwasy set new.
-
- if resolve then
- -- resolvers.luacnfspec = resolvers.joinpath(resolvers.resolve(resolvers.expandedpathfromlist(resolvers.splitpath(resolvers.luacnfspec))))
- resolvers.luacnfspec = resolvers.resolve(resolvers.luacnfspec)
- end
-
- setenv('SELFAUTOPARENT', newroot)
- setenv('SELFAUTODIR', newtree)
- setenv('SELFAUTOLOC', newpath)
- setenv('TEXROOT', newroot)
- setenv('TEXOS', texos)
- setenv('TEXMFOS', texmfos)
- setenv('TEXMFCNF', resolvers.luacnfspec,true) -- already resolved
- setenv('PATH', newpath .. io.pathseparator .. getenv('PATH'))
-
- report_tds("changing from root '%s' to '%s'",oldroot,newroot)
- report_tds("prepending '%s' to PATH",newpath)
- report_tds("setting TEXMFCNF to '%s'",resolvers.luacnfspec)
- report_tds()
+ if type(tree)=="string" and tree~="" then
+ local getenv,setenv=resolvers.getenv,resolvers.setenv
+ local texos="texmf-"..os.platform
+ local oldroot=environment.texroot
+ local newroot=file.collapsepath(tree)
+ local newtree=file.join(newroot,texos)
+ local newpath=file.join(newtree,"bin")
+ if not lfs.isdir(newtree) then
+ report_tds("no %a under tree %a",texos,tree)
+ os.exit()
+ end
+ if not lfs.isdir(newpath) then
+ report_tds("no '%s/bin' under tree %a",texos,tree)
+ os.exit()
+ end
+ local texmfos=newtree
+ environment.texroot=newroot
+ environment.texos=texos
+ environment.texmfos=texmfos
+ if resolve then
+ resolvers.luacnfspec=resolvers.resolve(resolvers.luacnfspec)
end
+ setenv('SELFAUTOPARENT',newroot)
+ setenv('SELFAUTODIR',newtree)
+ setenv('SELFAUTOLOC',newpath)
+ setenv('TEXROOT',newroot)
+ setenv('TEXOS',texos)
+ setenv('TEXMFOS',texmfos)
+ setenv('TEXMFCNF',resolvers.luacnfspec,true)
+ setenv('PATH',newpath..io.pathseparator..getenv('PATH'))
+ report_tds("changing from root %a to %a",oldroot,newroot)
+ report_tds("prepending %a to PATH",newpath)
+ report_tds("setting TEXMFCNF to %a",resolvers.luacnfspec)
+ report_tds()
+ end
end
@@ -15391,81 +15324,76 @@ end -- of closure
do -- create closure to overcome 200 locals limit
-if not modules then modules = { } end modules ['data-lst'] = {
- version = 1.001,
- comment = "companion to luat-lib.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
--- used in mtxrun, can be loaded later .. todo
-
-local find, concat, upper, format = string.find, table.concat, string.upper, string.format
-local fastcopy, sortedpairs = table.fastcopy, table.sortedpairs
+package.loaded["data-lst"] = package.loaded["data-lst"] or true
-resolvers.listers = resolvers.listers or { }
-
-local resolvers = resolvers
-
-local report_lists = logs.reporter("resolvers","lists")
+-- original size: 2654, stripped down to: 2301
+if not modules then modules={} end modules ['data-lst']={
+ version=1.001,
+ comment="companion to luat-lib.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+local find,concat,upper,format=string.find,table.concat,string.upper,string.format
+local fastcopy,sortedpairs=table.fastcopy,table.sortedpairs
+resolvers.listers=resolvers.listers or {}
+local resolvers=resolvers
+local report_lists=logs.reporter("resolvers","lists")
local function tabstr(str)
- if type(str) == 'table' then
- return concat(str," | ")
- else
- return str
- end
+ if type(str)=='table' then
+ return concat(str," | ")
+ else
+ return str
+ end
end
-
function resolvers.listers.variables(pattern)
- local instance = resolvers.instance
- local environment = instance.environment
- local variables = instance.variables
- local expansions = instance.expansions
- local pattern = upper(pattern or "")
- local configured = { }
- local order = instance.order
- for i=1,#order do
- for k, v in next, order[i] do
- if v ~= nil and configured[k] == nil then
- configured[k] = v
- end
- end
- end
- local env = fastcopy(environment)
- local var = fastcopy(variables)
- local exp = fastcopy(expansions)
- for key, value in sortedpairs(configured) do
- if key ~= "" and (pattern == "" or find(upper(key),pattern)) then
- report_lists(key)
- report_lists(" env: %s",tabstr(rawget(environment,key)) or "unset")
- report_lists(" var: %s",tabstr(configured[key]) or "unset")
- report_lists(" exp: %s",tabstr(expansions[key]) or "unset")
- report_lists(" res: %s",tabstr(resolvers.resolve(expansions[key])) or "unset")
- end
- end
- instance.environment = fastcopy(env)
- instance.variables = fastcopy(var)
- instance.expansions = fastcopy(exp)
-end
-
-function resolvers.listers.configurations(report)
- local configurations = resolvers.instance.specification
- local report = report or texio.write_nl
- for i=1,#configurations do
- report(format("file : %s",resolvers.resolve(configurations[i])))
- end
- report("")
- local list = resolvers.expandedpathfromlist(resolvers.splitpath(resolvers.luacnfspec))
- for i=1,#list do
- local li = resolvers.resolve(list[i])
- if lfs.isdir(li) then
- report(format("path - %s",li))
- else
- report(format("path + %s",li))
- end
+ local instance=resolvers.instance
+ local environment=instance.environment
+ local variables=instance.variables
+ local expansions=instance.expansions
+ local pattern=upper(pattern or "")
+ local configured={}
+ local order=instance.order
+ for i=1,#order do
+ for k,v in next,order[i] do
+ if v~=nil and configured[k]==nil then
+ configured[k]=v
+ end
+ end
+ end
+ local env=fastcopy(environment)
+ local var=fastcopy(variables)
+ local exp=fastcopy(expansions)
+ for key,value in sortedpairs(configured) do
+ if key~="" and (pattern=="" or find(upper(key),pattern)) then
+ report_lists(key)
+ report_lists(" env: %s",tabstr(rawget(environment,key)) or "unset")
+ report_lists(" var: %s",tabstr(configured[key]) or "unset")
+ report_lists(" exp: %s",tabstr(expansions[key]) or "unset")
+ report_lists(" res: %s",tabstr(resolvers.resolve(expansions[key])) or "unset")
+ end
+ end
+ instance.environment=fastcopy(env)
+ instance.variables=fastcopy(var)
+ instance.expansions=fastcopy(exp)
+end
+local report_resolved=logs.reporter("system","resolved")
+function resolvers.listers.configurations()
+ local configurations=resolvers.instance.specification
+ for i=1,#configurations do
+ report_resolved("file : %s",resolvers.resolve(configurations[i]))
+ end
+ report_resolved("")
+ local list=resolvers.expandedpathfromlist(resolvers.splitpath(resolvers.luacnfspec))
+ for i=1,#list do
+ local li=resolvers.resolve(list[i])
+ if lfs.isdir(li) then
+ report_resolved("path - %s",li)
+ else
+ report_resolved("path + %s",li)
end
+ end
end
@@ -15473,279 +15401,407 @@ end -- of closure
do -- create closure to overcome 200 locals limit
-if not modules then modules = { } end modules ['luat-sta'] = {
- version = 1.001,
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
+package.loaded["util-lib"] = package.loaded["util-lib"] or true
--- this code is used in the updater
+-- original size: 8911, stripped down to: 4216
+
+if not modules then modules={} end modules ['util-lib']={
+ version=1.001,
+ comment="companion to luat-lib.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files",
+}
+local gsub,find=string.gsub,string.find
+local pathpart,nameonly,joinfile=file.pathpart,file.nameonly,file.join
+local findfile,findfiles=resolvers and resolvers.findfile,resolvers and resolvers.findfiles
+local loaded=package.loaded
+local report_swiglib=logs.reporter("swiglib")
+local trace_swiglib=false trackers.register("resolvers.swiglib",function(v) trace_swiglib=v end)
+local function requireswiglib(required,version)
+ local library=loaded[required]
+ if library==nil then
+ local required_full=gsub(required,"%.","/")
+ local required_path=pathpart(required_full)
+ local required_base=nameonly(required_full)
+ local required_name=required_base.."."..os.libsuffix
+ local version=type(version)=="string" and version~="" and version or false
+ local function check(locate,...)
+ local found_library=nil
+ if version then
+ local asked_library=joinfile(required_path,version,required_name)
+ if trace_swiglib then
+ report_swiglib("checking %s: %a","with version",asked_library)
+ end
+ found_library=locate(asked_library,...)
+ if not found_library or found_library==""then
+ asked_library=joinfile(required_path,required_name)
+ if trace_swiglib then
+ report_swiglib("checking %s: %a","without version",asked_library)
+ end
+ found_library=locate(asked_library,...)
+ end
+ else
+ local asked_library=joinfile(required_path,required_name)
+ if trace_swiglib then
+ report_swiglib("checking %s: %a","without version",asked_library)
+ end
+ found_library=locate(asked_library,...)
+ end
+ return found_library and found_library~="" and found_library or false
+ end
+ local found_library=findfile and check(findfile,"lib")
+ if findfiles and not found_library then
+ local asked_library=joinfile(required_path,".*",required_name)
+ if trace_swiglib then
+ report_swiglib("checking %s: %a","latest version",asked_library)
+ end
+ local list=findfiles(asked_library,"lib",true)
+ if list and #list>0 then
+ table.sort(list)
+ found_library=list[#list]
+ end
+ end
+ if not found_library then
+ package.extraclibpath(environment.ownpath)
+ local paths=package.clibpaths()
+ for i=1,#paths do
+ local found_library=check(lfs.isfile)
+ if found_library then
+ break
+ end
+ end
+ end
+ if not found_library then
+ if trace_swiglib then
+ report_swiglib("not found: %a",asked_library)
+ end
+ library=false
+ else
+ local path=pathpart(found_library)
+ local base=nameonly(found_library)
+ dir.push(path)
+ if trace_swiglib then
+ report_swiglib("found: %a",found_library)
+ end
+ library=package.loadlib(found_library,"luaopen_"..required_base)
+ if type(library)=="function" then
+ library=library()
+ else
+ library=false
+ end
+ dir.pop()
+ end
+ if not library then
+ report_swiglib("unknown: %a",required)
+ elseif trace_swiglib then
+ report_swiglib("stored: %a",required)
+ end
+ loaded[required]=library
+ else
+ report_swiglib("reused: %a",required)
+ end
+ return library
+end
+local savedrequire=require
+function require(name,version)
+ if find(name,"^swiglib%.") then
+ return requireswiglib(name,version)
+ else
+ return savedrequire(name)
+ end
+end
+local swiglibs={}
+function swiglib(name,version)
+ local library=swiglibs[name]
+ if not library then
+ statistics.starttiming(swiglibs)
+ report_swiglib("loading %a",name)
+ library=requireswiglib("swiglib."..name,version)
+ swiglibs[name]=library
+ statistics.stoptiming(swiglibs)
+ end
+ return library
+end
+statistics.register("used swiglibs",function()
+ if next(swiglibs) then
+ return string.format("%s, initial load time %s seconds",table.concat(table.sortedkeys(swiglibs)," "),statistics.elapsedtime(swiglibs))
+ end
+end)
-local gmatch, match = string.gmatch, string.match
-local type = type
-states = states or { }
-local states = states
+end -- of closure
-states.data = states.data or { }
-local data = states.data
+do -- create closure to overcome 200 locals limit
-states.hash = states.hash or { }
-local hash = states.hash
+package.loaded["luat-sta"] = package.loaded["luat-sta"] or true
-states.tag = states.tag or ""
-states.filename = states.filename or ""
+-- original size: 5703, stripped down to: 2507
+if not modules then modules={} end modules ['luat-sta']={
+ version=1.001,
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+local gmatch,match=string.gmatch,string.match
+local type=type
+states=states or {}
+local states=states
+states.data=states.data or {}
+local data=states.data
+states.hash=states.hash or {}
+local hash=states.hash
+states.tag=states.tag or ""
+states.filename=states.filename or ""
function states.save(filename,tag)
- tag = tag or states.tag
- filename = file.addsuffix(filename or states.filename,'lus')
- io.savedata(filename,
- "-- generator : luat-sta.lua\n" ..
- "-- state tag : " .. tag .. "\n\n" ..
- table.serialize(data[tag or states.tag] or {},true)
- )
+ tag=tag or states.tag
+ filename=file.addsuffix(filename or states.filename,'lus')
+ io.savedata(filename,
+ "-- generator : luat-sta.lua\n".."-- state tag : "..tag.."\n\n"..table.serialize(data[tag or states.tag] or {},true)
+ )
end
-
function states.load(filename,tag)
- states.filename = filename
- states.tag = tag or "whatever"
- states.filename = file.addsuffix(states.filename,'lus')
- data[states.tag], hash[states.tag] = (io.exists(filename) and dofile(filename)) or { }, { }
+ states.filename=filename
+ states.tag=tag or "whatever"
+ states.filename=file.addsuffix(states.filename,'lus')
+ data[states.tag],hash[states.tag]=(io.exists(filename) and dofile(filename)) or {},{}
end
-
local function set_by_tag(tag,key,value,default,persistent)
- local d, h = data[tag], hash[tag]
- if d then
- if type(d) == "table" then
- local dkey, hkey = key, key
- local pre, post = match(key,"(.+)%.([^%.]+)$")
- if pre and post then
- for k in gmatch(pre,"[^%.]+") do
- local dk = d[k]
- if not dk then
- dk = { }
- d[k] = dk
- elseif type(dk) == "string" then
- -- invalid table, unable to upgrade structure
- -- hope for the best or delete the state file
- break
- end
- d = dk
- end
- dkey, hkey = post, key
- end
- if value == nil then
- value = default
- elseif value == false then
- -- special case
- elseif persistent then
- value = value or d[dkey] or default
- else
- value = value or default
- end
- d[dkey], h[hkey] = value, value
- elseif type(d) == "string" then
- -- weird
- data[tag], hash[tag] = value, value
- end
- end
+ local d,h=data[tag],hash[tag]
+ if d then
+ if type(d)=="table" then
+ local dkey,hkey=key,key
+ local pre,post=match(key,"(.+)%.([^%.]+)$")
+ if pre and post then
+ for k in gmatch(pre,"[^%.]+") do
+ local dk=d[k]
+ if not dk then
+ dk={}
+ d[k]=dk
+ elseif type(dk)=="string" then
+ break
+ end
+ d=dk
+ end
+ dkey,hkey=post,key
+ end
+ if value==nil then
+ value=default
+ elseif value==false then
+ elseif persistent then
+ value=value or d[dkey] or default
+ else
+ value=value or default
+ end
+ d[dkey],h[hkey]=value,value
+ elseif type(d)=="string" then
+ data[tag],hash[tag]=value,value
+ end
+ end
end
-
local function get_by_tag(tag,key,default)
- local h = hash[tag]
- if h and h[key] then
- return h[key]
- else
- local d = data[tag]
- if d then
- for k in gmatch(key,"[^%.]+") do
- local dk = d[k]
- if dk ~= nil then
- d = dk
- else
- return default
- end
- end
- if d == false then
- return false
- else
- return d or default
- end
+ local h=hash[tag]
+ if h and h[key] then
+ return h[key]
+ else
+ local d=data[tag]
+ if d then
+ for k in gmatch(key,"[^%.]+") do
+ local dk=d[k]
+ if dk~=nil then
+ d=dk
+ else
+ return default
end
+ end
+ if d==false then
+ return false
+ else
+ return d or default
+ end
end
+ end
end
-
-states.set_by_tag = set_by_tag
-states.get_by_tag = get_by_tag
-
+states.set_by_tag=set_by_tag
+states.get_by_tag=get_by_tag
function states.set(key,value,default,persistent)
- set_by_tag(states.tag,key,value,default,persistent)
+ set_by_tag(states.tag,key,value,default,persistent)
end
-
function states.get(key,default)
- return get_by_tag(states.tag,key,default)
+ return get_by_tag(states.tag,key,default)
end
-
-
-
end -- of closure
do -- create closure to overcome 200 locals limit
-if not modules then modules = { } end modules ['luat-fmt'] = {
- version = 1.001,
- comment = "companion to mtxrun",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
-
-local format = string.format
-
-local report_format = logs.reporter("resolvers","formats")
-
--- helper for mtxrun
+package.loaded["luat-fmt"] = package.loaded["luat-fmt"] or true
-local quoted = string.quoted
+-- original size: 5951, stripped down to: 4922
-local function primaryflags() -- not yet ok
- local trackers = environment.argument("trackers")
- local directives = environment.argument("directives")
- local flags = ""
- if trackers and trackers ~= "" then
- flags = flags .. "--trackers=" .. quoted(trackers)
- end
- if directives and directives ~= "" then
- flags = flags .. "--directives=" .. quoted(directives)
- end
- return flags
+if not modules then modules={} end modules ['luat-fmt']={
+ version=1.001,
+ comment="companion to mtxrun",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+local format=string.format
+local concat=table.concat
+local quoted=string.quoted
+local luasuffixes=utilities.lua.suffixes
+local report_format=logs.reporter("resolvers","formats")
+local function primaryflags()
+ local trackers=environment.argument("trackers")
+ local directives=environment.argument("directives")
+ local flags={}
+ if trackers and trackers~="" then
+ flags={ "--trackers="..quoted(trackers) }
+ end
+ if directives and directives~="" then
+ flags={ "--directives="..quoted(directives) }
+ end
+ if environment.argument("jit") then
+ flags={ "--jiton" }
+ end
+ return concat(flags," ")
end
-
function environment.make_format(name)
- -- change to format path (early as we need expanded paths)
- local olddir = lfs.currentdir()
- local path = caches.getwritablepath("formats") or "" -- maybe platform
- if path ~= "" then
- lfs.chdir(path)
- end
- report_format("format path: %s",lfs.currentdir())
- -- check source file
- local texsourcename = file.addsuffix(name,"mkiv")
- local fulltexsourcename = resolvers.findfile(texsourcename,"tex") or ""
- if fulltexsourcename == "" then
- texsourcename = file.addsuffix(name,"tex")
- fulltexsourcename = resolvers.findfile(texsourcename,"tex") or ""
- end
- if fulltexsourcename == "" then
- report_format("no tex source file with name: %s (mkiv or tex)",name)
- lfs.chdir(olddir)
- return
- else
- report_format("using tex source file: %s",fulltexsourcename)
- end
- local texsourcepath = dir.expandname(file.dirname(fulltexsourcename)) -- really needed
- -- check specification
- local specificationname = file.replacesuffix(fulltexsourcename,"lus")
- local fullspecificationname = resolvers.findfile(specificationname,"tex") or ""
- if fullspecificationname == "" then
- specificationname = file.join(texsourcepath,"context.lus")
- fullspecificationname = resolvers.findfile(specificationname,"tex") or ""
- end
- if fullspecificationname == "" then
- report_format("unknown stub specification: %s",specificationname)
- lfs.chdir(olddir)
- return
- end
- local specificationpath = file.dirname(fullspecificationname)
- -- load specification
- local usedluastub = nil
- local usedlualibs = dofile(fullspecificationname)
- if type(usedlualibs) == "string" then
- usedluastub = file.join(file.dirname(fullspecificationname),usedlualibs)
- elseif type(usedlualibs) == "table" then
- report_format("using stub specification: %s",fullspecificationname)
- local texbasename = file.basename(name)
- local luastubname = file.addsuffix(texbasename,"lua")
- local lucstubname = file.addsuffix(texbasename,"luc")
- -- pack libraries in stub
- report_format("creating initialization file: %s",luastubname)
- utilities.merger.selfcreate(usedlualibs,specificationpath,luastubname)
- -- compile stub file (does not save that much as we don't use this stub at startup any more)
- local strip = resolvers.booleanvariable("LUACSTRIP", true)
- if utilities.lua.compile(luastubname,lucstubname) and lfs.isfile(lucstubname) then
- report_format("using compiled initialization file: %s",lucstubname)
- usedluastub = lucstubname
- else
- report_format("using uncompiled initialization file: %s",luastubname)
- usedluastub = luastubname
- end
+ local engine=environment.ownmain or "luatex"
+ local olddir=dir.current()
+ local path=caches.getwritablepath("formats",engine) or ""
+ if path~="" then
+ lfs.chdir(path)
+ end
+ report_format("using format path %a",dir.current())
+ local texsourcename=file.addsuffix(name,"mkiv")
+ local fulltexsourcename=resolvers.findfile(texsourcename,"tex") or ""
+ if fulltexsourcename=="" then
+ texsourcename=file.addsuffix(name,"tex")
+ fulltexsourcename=resolvers.findfile(texsourcename,"tex") or ""
+ end
+ if fulltexsourcename=="" then
+ report_format("no tex source file with name %a (mkiv or tex)",name)
+ lfs.chdir(olddir)
+ return
+ else
+ report_format("using tex source file %a",fulltexsourcename)
+ end
+ local texsourcepath=dir.expandname(file.dirname(fulltexsourcename))
+ local specificationname=file.replacesuffix(fulltexsourcename,"lus")
+ local fullspecificationname=resolvers.findfile(specificationname,"tex") or ""
+ if fullspecificationname=="" then
+ specificationname=file.join(texsourcepath,"context.lus")
+ fullspecificationname=resolvers.findfile(specificationname,"tex") or ""
+ end
+ if fullspecificationname=="" then
+ report_format("unknown stub specification %a",specificationname)
+ lfs.chdir(olddir)
+ return
+ end
+ local specificationpath=file.dirname(fullspecificationname)
+ local usedluastub=nil
+ local usedlualibs=dofile(fullspecificationname)
+ if type(usedlualibs)=="string" then
+ usedluastub=file.join(file.dirname(fullspecificationname),usedlualibs)
+ elseif type(usedlualibs)=="table" then
+ report_format("using stub specification %a",fullspecificationname)
+ local texbasename=file.basename(name)
+ local luastubname=file.addsuffix(texbasename,luasuffixes.lua)
+ local lucstubname=file.addsuffix(texbasename,luasuffixes.luc)
+ report_format("creating initialization file %a",luastubname)
+ utilities.merger.selfcreate(usedlualibs,specificationpath,luastubname)
+ if utilities.lua.compile(luastubname,lucstubname) and lfs.isfile(lucstubname) then
+ report_format("using compiled initialization file %a",lucstubname)
+ usedluastub=lucstubname
else
- report_format("invalid stub specification: %s",fullspecificationname)
- lfs.chdir(olddir)
- return
- end
- -- generate format
- local command = format("luatex --ini %s --lua=%s %s %sdump",primaryflags(),quoted(usedluastub),quoted(fulltexsourcename),os.platform == "unix" and "\\\\" or "\\")
- report_format("running command: %s\n",command)
- os.spawn(command)
- -- remove related mem files
- local pattern = file.removesuffix(file.basename(usedluastub)).."-*.mem"
- -- report_format("removing related mplib format with pattern '%s'", pattern)
- local mp = dir.glob(pattern)
- if mp then
- for i=1,#mp do
- local name = mp[i]
- report_format("removing related mplib format %s", file.basename(name))
- os.remove(name)
- end
+ report_format("using uncompiled initialization file %a",luastubname)
+ usedluastub=luastubname
end
+ else
+ report_format("invalid stub specification %a",fullspecificationname)
lfs.chdir(olddir)
+ return
+ end
+ local command=format("%s --ini %s --lua=%s %s %sdump",engine,primaryflags(),quoted(usedluastub),quoted(fulltexsourcename),os.platform=="unix" and "\\\\" or "\\")
+ report_format("running command: %s\n",command)
+ os.spawn(command)
+ local pattern=file.removesuffix(file.basename(usedluastub)).."-*.mem"
+ local mp=dir.glob(pattern)
+ if mp then
+ for i=1,#mp do
+ local name=mp[i]
+ report_format("removing related mplib format %a",file.basename(name))
+ os.remove(name)
+ end
+ end
+ lfs.chdir(olddir)
end
-
function environment.run_format(name,data,more)
- -- hm, rather old code here; we can now use the file.whatever functions
- if name and name ~= "" then
- local barename = file.removesuffix(name)
- local fmtname = caches.getfirstreadablefile(file.addsuffix(barename,"fmt"),"formats")
- if fmtname == "" then
- fmtname = resolvers.findfile(file.addsuffix(barename,"fmt")) or ""
- end
- fmtname = resolvers.cleanpath(fmtname)
- if fmtname == "" then
- report_format("no format with name: %s",name)
- else
- local barename = file.removesuffix(name) -- expanded name
- local luaname = file.addsuffix(barename,"luc")
- if not lfs.isfile(luaname) then
- luaname = file.addsuffix(barename,"lua")
- end
- if not lfs.isfile(luaname) then
- report_format("using format name: %s",fmtname)
- report_format("no luc/lua with name: %s",barename)
- else
- local command = format("luatex %s --fmt=%s --lua=%s %s %s",primaryflags(),quoted(barename),quoted(luaname),quoted(data),more ~= "" and quoted(more) or "")
- report_format("running command: %s",command)
- os.spawn(command)
- end
- end
+ if name and name~="" then
+ local engine=environment.ownmain or "luatex"
+ local barename=file.removesuffix(name)
+ local fmtname=caches.getfirstreadablefile(file.addsuffix(barename,"fmt"),"formats",engine)
+ if fmtname=="" then
+ fmtname=resolvers.findfile(file.addsuffix(barename,"fmt")) or ""
+ end
+ fmtname=resolvers.cleanpath(fmtname)
+ if fmtname=="" then
+ report_format("no format with name %a",name)
+ else
+ local barename=file.removesuffix(name)
+ local luaname=file.addsuffix(barename,"luc")
+ if not lfs.isfile(luaname) then
+ luaname=file.addsuffix(barename,"lua")
+ end
+ if not lfs.isfile(luaname) then
+ report_format("using format name %a",fmtname)
+ report_format("no luc/lua file with name %a",barename)
+ else
+ local command=format("%s %s --fmt=%s --lua=%s %s %s",engine,primaryflags(),quoted(barename),quoted(luaname),quoted(data),more~="" and quoted(more) or "")
+ report_format("running command: %s",command)
+ os.spawn(command)
+ end
end
+ end
end
end -- of closure
+
+-- used libraries : l-lua.lua l-lpeg.lua l-function.lua l-string.lua l-table.lua l-io.lua l-number.lua l-set.lua l-os.lua l-file.lua l-md5.lua l-url.lua l-dir.lua l-boolean.lua l-unicode.lua l-math.lua util-str.lua util-tab.lua util-sto.lua util-prs.lua util-fmt.lua trac-set.lua trac-log.lua trac-inf.lua trac-pro.lua util-lua.lua util-deb.lua util-mrg.lua util-tpl.lua util-env.lua luat-env.lua lxml-tab.lua lxml-lpt.lua lxml-mis.lua lxml-aux.lua lxml-xml.lua trac-xml.lua data-ini.lua data-exp.lua data-env.lua data-tmp.lua data-met.lua data-res.lua data-pre.lua data-inp.lua data-out.lua data-fil.lua data-con.lua data-use.lua data-zip.lua data-tre.lua data-sch.lua data-lua.lua data-aux.lua data-tmf.lua data-lst.lua util-lib.lua luat-sta.lua luat-fmt.lua
+-- skipped libraries : -
+-- original bytes : 658276
+-- stripped bytes : 241564
+
-- end library merge
-own = { } -- not local, might change
+-- We need this hack till luatex is fixed.
+--
+-- for k,v in pairs(arg) do print(k,v) end
+
+if arg and (arg[0] == 'luatex' or arg[0] == 'luatex.exe') and arg[1] == "--luaonly" then
+ arg[-1]=arg[0] arg[0]=arg[2] for k=3,#arg do arg[k-2]=arg[k] end arg[#arg]=nil arg[#arg]=nil
+end
+
+-- End of hack.
-own.libs = { -- order can be made better
+local format, gsub, gmatch, match, find = string.format, string.gsub, string.gmatch, string.match, string.find
+local concat = table.concat
+local ownname = environment and environment.ownname or arg[0] or 'mtxrun.lua'
+local ownpath = gsub(match(ownname,"^(.+)[\\/].-$") or ".","\\","/")
+local owntree = environment and environment.ownpath or ownpath
+
+local ownlibs = { -- order can be made better
+
+ 'l-lua.lua',
+ 'l-lpeg.lua',
+ 'l-function.lua',
'l-string.lua',
'l-table.lua',
- 'l-lpeg.lua',
'l-io.lua',
'l-number.lua',
'l-set.lua',
@@ -15758,19 +15814,23 @@ own.libs = { -- order can be made better
'l-unicode.lua',
'l-math.lua',
+ 'util-str.lua', -- code might move to l-string
'util-tab.lua',
'util-sto.lua',
- 'util-mrg.lua',
- 'util-lua.lua',
'util-prs.lua',
'util-fmt.lua',
- 'util-deb.lua',
- 'trac-inf.lua',
'trac-set.lua',
'trac-log.lua',
- 'trac-pro.lua',
+ 'trac-inf.lua', -- was before trac-set
+ 'trac-pro.lua', -- not really needed
+ 'util-lua.lua', -- indeed here?
+ 'util-deb.lua',
+
+ 'util-mrg.lua',
+ 'util-tpl.lua',
+ 'util-env.lua',
'luat-env.lua', -- can come before inf (as in mkiv)
'lxml-tab.lua',
@@ -15780,6 +15840,8 @@ own.libs = { -- order can be made better
'lxml-aux.lua',
'lxml-xml.lua',
+ 'trac-xml.lua',
+
'data-ini.lua',
'data-exp.lua',
'data-env.lua',
@@ -15796,35 +15858,20 @@ own.libs = { -- order can be made better
-- 'data-bin.lua',
'data-zip.lua',
'data-tre.lua',
- 'data-crl.lua',
+ 'data-sch.lua',
'data-lua.lua',
'data-aux.lua', -- updater
'data-tmf.lua',
'data-lst.lua',
+ 'util-lib.lua', -- swiglib
+
'luat-sta.lua',
'luat-fmt.lua',
-}
-
--- We need this hack till luatex is fixed.
---
--- for k,v in pairs(arg) do print(k,v) end
-
-if arg and (arg[0] == 'luatex' or arg[0] == 'luatex.exe') and arg[1] == "--luaonly" then
- arg[-1]=arg[0] arg[0]=arg[2] for k=3,#arg do arg[k-2]=arg[k] end arg[#arg]=nil arg[#arg]=nil
-end
-
--- End of hack.
-
-local format, gsub, gmatch, match, find = string.format, string.gsub, string.gmatch, string.match, string.find
-local concat = table.concat
-
-own.name = (environment and environment.ownname) or arg[0] or 'mtxrun.lua'
-own.path = gsub(match(own.name,"^(.+)[\\/].-$") or ".","\\","/")
-local ownpath, owntree = own.path, environment and environment.ownpath or own.path
+}
-own.list = {
+local ownlist = {
'.',
ownpath ,
ownpath .. "/../sources", -- HH's development path
@@ -15838,13 +15885,21 @@ own.list = {
owntree .. "/../../../texmf/tex/context/base",
}
-if own.path == "." then table.remove(own.list,1) end
+if ownpath == "." then table.remove(ownlist,1) end
+
+own = {
+ name = ownname,
+ path = ownpath,
+ tree = owntree,
+ list = ownlist,
+ libs = ownlibs,
+}
local function locate_libs()
- for l=1,#own.libs do
- local lib = own.libs[l]
- for p =1,#own.list do
- local pth = own.list[p]
+ for l=1,#ownlibs do
+ local lib = ownlibs[l]
+ for p =1,#ownlist do
+ local pth = ownlist[p]
local filename = pth .. "/" .. lib
local found = lfs.isfile(filename)
if found then
@@ -15858,8 +15913,8 @@ end
local function load_libs()
local found = locate_libs()
if found then
- for l=1,#own.libs do
- local filename = found .. "/" .. own.libs[l]
+ for l=1,#ownlibs do
+ local filename = found .. "/" .. ownlibs[l]
local codeblob = loadfile(filename)
if codeblob then
codeblob()
@@ -15933,55 +15988,85 @@ if not environment.experiments then environment.experiments = e_experiments end
local instance = resolvers.reset()
local helpinfo = [[
---script run an mtx script (lua prefered method) (--noquotes), no script gives list
---execute run a script or program (texmfstart method) (--noquotes)
---resolve resolve prefixed arguments
---ctxlua run internally (using preloaded libs)
---internal run script using built in libraries (same as --ctxlua)
---locate locate given filename in database (default) or system (--first --all --detail)
-
---autotree use texmf tree cf. env 'texmfstart_tree' or 'texmfstarttree'
---tree=pathtotree use given texmf tree (default file: 'setuptex.tmf')
---environment=name use given (tmf) environment file
---path=runpath go to given path before execution
---ifchanged=filename only execute when given file has changed (md checksum)
---iftouched=old,new only execute when given file has changed (time stamp)
-
---makestubs create stubs for (context related) scripts
---removestubs remove stubs (context related) scripts
---stubpath=binpath paths where stubs wil be written
---windows create windows (mswin) stubs
---unix create unix (linux) stubs
-
---verbose give a bit more info
---trackers=list enable given trackers
---progname=str format or backend
-
---edit launch editor with found file
---launch (--all) launch files like manuals, assumes os support
-
---timedrun run a script an time its run
---autogenerate regenerate databases if needed (handy when used to run context in an editor)
-
---usekpse use kpse as fallback (when no mkiv and cache installed, often slower)
---forcekpse force using kpse (handy when no mkiv and cache installed but less functionality)
-
---prefixes show supported prefixes
-
---generate generate file database
-
---variables show configuration variables
---configurations show configuration order
-
---expand-braces expand complex variable
---expand-path expand variable (resolve paths)
---expand-var expand variable (resolve references)
---show-path show path expansion of ...
---var-value report value of variable
---find-file report file location
---find-path report path of file
-
---pattern=str filter variables
+<?xml version="1.0" ?>
+<application>
+ <metadata>
+ <entry name="name">mtxrun</entry>
+ <entry name="detail">ConTeXt TDS Runner Tool</entry>
+ <entry name="version">1.31</entry>
+ </metadata>
+ <flags>
+ <category name="basic">
+ <subcategory>
+ <flag name="script"><short>run an mtx script (lua prefered method) (<ref name="noquotes"/>), no script gives list</short></flag>
+ <flag name="execute"><short>run a script or program (texmfstart method) (<ref name="noquotes"/>)</short></flag>
+ <flag name="resolve"><short>resolve prefixed arguments</short></flag>
+ <flag name="ctxlua"><short>run internally (using preloaded libs)</short></flag>
+ <flag name="internal"><short>run script using built in libraries (same as <ref name="ctxlua"/>)</short></flag>
+ <flag name="locate"><short>locate given filename in database (default) or system (<ref name="first"/> <ref name="all"/> <ref name="detail"/>)</short></flag>
+ </subcategory>
+ <subcategory>
+ <flag name="autotree"><short>use texmf tree cf. env texmfstart_tree or texmfstarttree</short></flag>
+ <flag name="tree" value="pathtotree"><short>use given texmf tree (default file: setuptex.tmf)</short></flag>
+ <flag name="environment" value="name"><short>use given (tmf) environment file</short></flag>
+ <flag name="path" value="runpath"><short>go to given path before execution</short></flag>
+ <flag name="ifchanged" value="filename"><short>only execute when given file has changed (md checksum)</short></flag>
+ <flag name="iftouched" value="old,new"><short>only execute when given file has changed (time stamp)</short></flag>
+ </subcategory>
+ <subcategory>
+ <flag name="makestubs"><short>create stubs for (context related) scripts</short></flag>
+ <flag name="removestubs"><short>remove stubs (context related) scripts</short></flag>
+ <flag name="stubpath" value="binpath"><short>paths where stubs wil be written</short></flag>
+ <flag name="windows"><short>create windows (mswin) stubs</short></flag>
+ <flag name="unix"><short>create unix (linux) stubs</short></flag>
+ </subcategory>
+ <subcategory>
+ <flag name="verbose"><short>give a bit more info</short></flag>
+ <flag name="trackers" value="list"><short>enable given trackers</short></flag>
+ <flag name="progname" value="str"><short>format or backend</short></flag>
+ </subcategory>
+ <subcategory>
+ <flag name="edit"><short>launch editor with found file</short></flag>
+ <flag name="launch"><short>launch files like manuals, assumes os support (<ref name="all"/>)</short></flag>
+ </subcategory>
+ <subcategory>
+ <flag name="timedrun"><short>run a script and time its run</short></flag>
+ <flag name="autogenerate"><short>regenerate databases if needed (handy when used to run context in an editor)</short></flag>
+ </subcategory>
+ <subcategory>
+ <flag name="usekpse"><short>use kpse as fallback (when no mkiv and cache installed, often slower)</short></flag>
+ <flag name="forcekpse"><short>force using kpse (handy when no mkiv and cache installed but less functionality)</short></flag>
+ </subcategory>
+ <subcategory>
+ <flag name="prefixes"><short>show supported prefixes</short></flag>
+ </subcategory>
+ <subcategory>
+ <flag name="generate"><short>generate file database</short></flag>
+ </subcategory>
+ <subcategory>
+ <flag name="variables"><short>show configuration variables</short></flag>
+ <flag name="configurations"><short>show configuration order</short></flag>
+ </subcategory>
+ <subcategory>
+ <flag name="directives"><short>show (known) directives</short></flag>
+ <flag name="trackers"><short>show (known) trackers</short></flag>
+ <flag name="experiments"><short>show (known) experiments</short></flag>
+ </subcategory>
+ <subcategory>
+ <flag name="expand-braces"><short>expand complex variable</short></flag>
+ <flag name="expand-path"><short>expand variable (resolve paths)</short></flag>
+ <flag name="expand-var"><short>expand variable (resolve references)</short></flag>
+ <flag name="show-path"><short>show path expansion of ...</short></flag>
+ <flag name="var-value"><short>report value of variable</short></flag>
+ <flag name="find-file"><short>report file location</short></flag>
+ <flag name="find-path"><short>report path of file</short></flag>
+ </subcategory>
+ <subcategory>
+ <flag name="pattern" value="string"><short>filter variables</short></flag>
+ </subcategory>
+ </category>
+ </flags>
+</application>
]]
local application = logs.application {
@@ -16093,7 +16178,8 @@ function runners.execute_script(fullname,internal,nosplit)
elseif state == 'skip' then
return true
elseif state == "run" then
- local path, name, suffix, result = file.dirname(fullname), file.basename(fullname), file.extname(fullname), ""
+ local path, name, suffix = file.splitname(fullname)
+ local result = ""
if path ~= "" then
result = fullname
elseif name then
@@ -16104,7 +16190,7 @@ function runners.execute_script(fullname,internal,nosplit)
name = gsub(name,"^script:","")
if suffix == "" and runners.registered[name] and runners.registered[name][1] then
name = runners.registered[name][1]
- suffix = file.extname(name)
+ suffix = file.suffix(name)
end
if suffix == "" then
-- loop over known suffixes
@@ -16131,7 +16217,7 @@ function runners.execute_script(fullname,internal,nosplit)
environment.ownscript = result
dofile(result)
else
- local binary = runners.applications[file.extname(result)]
+ local binary = runners.applications[file.suffix(result)]
result = string.quoted(string.unquoted(result))
-- if string.match(result,' ') and not string.match(result,"^\".*\"$") then
-- result = '"' .. result .. '"'
@@ -16324,7 +16410,7 @@ function resolvers.launch(str)
-- maybe we also need to test on mtxrun.launcher.suffix environment
-- variable or on windows consult the assoc and ftype vars and such
local launchers = runners.launchers[os.platform] if launchers then
- local suffix = file.extname(str) if suffix then
+ local suffix = file.suffix(str) if suffix then
local runner = launchers[suffix] if runner then
str = runner .. " " .. str
end
@@ -16383,7 +16469,7 @@ function runners.find_mtx_script(filename)
end
filename = file.addsuffix(filename,"lua")
local basename = file.removesuffix(file.basename(filename))
- local suffix = file.extname(filename)
+ local suffix = file.suffix(filename)
-- qualified path, raw name
local fullname = file.is_qualified_path(filename) and io.exists(filename) and filename
if fullname and fullname ~= "" then
@@ -16438,7 +16524,7 @@ function runners.execute_ctx_script(filename,...)
runners.register_arguments(...)
local arguments = environment.arguments_after
local fullname = runners.find_mtx_script(filename) or ""
- if file.extname(fullname) == "cld" then
+ if file.suffix(fullname) == "cld" then
-- handy in editors where we force --autopdf
report("running cld script: %s",filename)
table.insert(arguments,1,fullname)
@@ -16546,6 +16632,21 @@ function runners.timed(action)
statistics.timed(action)
end
+function runners.associate(filename)
+ os.launch(filename)
+end
+
+function runners.gethelp(filename)
+ local url = environment.argument("url")
+ if url and url ~= "" then
+ local command = string.gsub(environment.argument("command") or "unknown","^%s*\\*(.-)%s*$","%1")
+ url = utilities.templates.replace(url,{ command = command })
+ os.launch(url)
+ else
+ report("no --url given")
+ end
+end
+
-- this is a bit dirty ... first we store the first filename and next we
-- split the arguments so that we only see the ones meant for this script
-- ... later we will use the second half
@@ -16647,8 +16748,18 @@ else
end
+if e_argument("script") or e_argument("scripts") then
-if e_argument("selfmerge") then
+ -- run a script by loading it (using libs), pass args
+
+ runners.loadbase()
+ if is_mkii_stub then
+ ok = runners.execute_script(filename,false,true)
+ else
+ ok = runners.execute_ctx_script(filename)
+ end
+
+elseif e_argument("selfmerge") then
-- embed used libraries
@@ -16678,17 +16789,6 @@ elseif e_argument("ctxlua") or e_argument("internal") then
runners.loadbase()
ok = runners.execute_script(filename,true)
-elseif e_argument("script") or e_argument("scripts") then
-
- -- run a script by loading it (using libs), pass args
-
- runners.loadbase()
- if is_mkii_stub then
- ok = runners.execute_script(filename,false,true)
- else
- ok = runners.execute_ctx_script(filename)
- end
-
elseif e_argument("execute") then
-- execute script
@@ -16715,6 +16815,14 @@ elseif e_argument("launch") then
runners.loadbase()
runners.launch_file(filename)
+elseif e_argument("associate") then
+
+ runners.associate(filename)
+
+elseif e_argument("gethelp") then
+
+ runners.gethelp()
+
elseif e_argument("makestubs") then
-- make stubs (depricated)
@@ -16806,7 +16914,7 @@ elseif e_argument("find-path") then
elseif e_argument("expand-braces") then
- -- luatools: runners.execute_ctx_script("mtx-base","--expand-braces",filename
+ -- luatools: runners.execute_ctx_script("mtx-base","--expand-braces",filename)
resolvers.load("nofiles")
runners.register_arguments(filename)
@@ -16908,6 +17016,23 @@ elseif e_argument("version") then
application.version()
+elseif e_argument("directives") then
+
+ directives.show()
+
+elseif e_argument("trackers") then
+
+ trackers.show()
+
+elseif e_argument("experiments") then
+
+ experiments.show()
+
+elseif e_argument("exporthelp") then
+
+ runners.loadbase()
+ application.export(e_argument("exporthelp"),filename)
+
elseif e_argument("help") or filename=='help' or filename == "" then
application.help()
@@ -16938,7 +17063,6 @@ elseif environment.files[1] == 'texmfcnf.lua' then -- so that we don't need to l
resolvers.listers.configurations()
else
-
runners.loadbase()
runners.execute_ctx_script("mtx-base",filename)
@@ -16955,4 +17079,4 @@ end
if ok == false then ok = 1 elseif ok == true then ok = 0 end
-os.exit(ok)
+os.exit(ok,true) -- true forces a cleanup in 5.2+
diff --git a/Master/texmf-dist/scripts/context/ruby/base/logger.rb b/Master/texmf-dist/scripts/context/ruby/base/logger.rb
index 2526cdb0e25..2245ab331f0 100644
--- a/Master/texmf-dist/scripts/context/ruby/base/logger.rb
+++ b/Master/texmf-dist/scripts/context/ruby/base/logger.rb
@@ -47,7 +47,8 @@ class Logger
print("\n")
return true
when 1
- message = str.first
+ # message = str.first
+ message = str.first.join(' ')
else
message = [str].flatten.collect{|s| s.to_s}.join(' ').chomp
end
diff --git a/Master/texmf-dist/scripts/context/ruby/base/tex.rb b/Master/texmf-dist/scripts/context/ruby/base/tex.rb
index 8f56839851c..9ba842501ea 100644
--- a/Master/texmf-dist/scripts/context/ruby/base/tex.rb
+++ b/Master/texmf-dist/scripts/context/ruby/base/tex.rb
@@ -362,7 +362,8 @@ class TEX
def mpsformats() @@mpsformats.keys.sort end
def defaulttexformats() ['en','nl','mptopdf'] end
- def defaultmpsformats() ['metafun'] end
+ # def defaultmpsformats() ['metafun'] end # no longer formats
+ def defaultmpsformats() [] end
def texmakeextras(format) @@texmakestr[format] || '' end
def mpsmakeextras(format) @@mpsmakestr[format] || '' end
diff --git a/Master/texmf-dist/scripts/context/ruby/fcd_start.rb b/Master/texmf-dist/scripts/context/ruby/fcd_start.rb
deleted file mode 100644
index 3d04754fac9..00000000000
--- a/Master/texmf-dist/scripts/context/ruby/fcd_start.rb
+++ /dev/null
@@ -1,472 +0,0 @@
-# Hans Hagen / PRAGMA ADE / 2005 / www.pragma-ade.com
-#
-# Fast Change Dir
-#
-# This is a kind of variant of the good old ncd
-# program. This script uses the same indirect cmd
-# trick as Erwin Waterlander's wcd program.
-#
-# === windows: fcd.cmd ===
-#
-# @echo off
-# ruby -S fcd_start.rb %1 %2 %3 %4 %5 %6 %7 %8 %9
-# if exist "%HOME%/fcd_stage.cmd" call %HOME%/fcd_stage.cmd
-#
-# === linux: fcd (fcd.sh) ===
-#
-# !/usr/bin/env sh
-# ruby -S fcd_start.rb $1 $2 $3 $4 $5 $6 $7 $8 $9
-# if test -f "$HOME/fcd_stage.sh" ; then
-# . $HOME/fcd_stage.sh ;
-# fi;
-#
-# ===
-#
-# On linux, one should source the file: ". fcd args" in order
-# to make the chdir persistent.
-#
-# You can create a stub with:
-#
-# ruby fcd_start.rb --stub --verbose
-#
-# usage:
-#
-# fcd --make t:\
-# fcd --add f:\project
-# fcd [--find] whatever
-# fcd [--find] whatever c (c being a list entry)
-# fcd [--find] whatever . (last choice with this pattern)
-# fcd --list
-
-# todo: HOMEDRIVE\HOMEPATH
-
-require 'rbconfig'
-
-class FastCD
-
- @@rootpath = nil
-
- ['HOME','TEMP','TMP','TMPDIR'].each do |key|
- if ENV[key] then
- if FileTest.directory?(ENV[key]) then
- @@rootpath = ENV[key]
- break
- end
- end
- end
-
- exit unless @@rootpath
-
- @@mswindows = RbConfig::CONFIG['host_os'] =~ /mswin/
- @@maxlength = 26
-
- require 'Win32API' if @@mswindows
-
- if @@mswindows then
- @@stubcode = [
- '@echo off',
- '',
- 'if not exist "%HOME%" goto temp',
- '',
- ':home',
- '',
- 'ruby -S fcd_start.rb %1 %2 %3 %4 %5 %6 %7 %8 %9',
- '',
- 'if exist "%HOME%\fcd_stage.cmd" call %HOME%\fcd_stage.cmd',
- 'goto end',
- '',
- ':temp',
- '',
- 'ruby -S fcd_start.rb %1 %2 %3 %4 %5 %6 %7 %8 %9',
- '',
- 'if exist "%TEMP%\fcd_stage.cmd" call %TEMP%\fcd_stage.cmd',
- 'goto end',
- '',
- ':end'
- ].join("\n")
- else
- @@stubcode = [
- '#!/usr/bin/env sh',
- '',
- 'ruby -S fcd_start.rb $1 $2 $3 $4 $5 $6 $7 $8 $9',
- '',
- 'if test -f "$HOME/fcd_stage.sh" ; then',
- ' . $HOME/fcd_stage.sh ;',
- 'fi;'
- ].join("\n")
- end
-
- @@selfpath = File.dirname($0)
- @@datafile = File.join(@@rootpath,'fcd_state.dat')
- @@histfile = File.join(@@rootpath,'fcd_state.his')
- @@cdirfile = File.join(@@rootpath,if @@mswindows then 'fcd_stage.cmd' else 'fcd_stage.sh' end)
- @@stubfile = File.join(@@selfpath,if @@mswindows then 'fcd.cmd' else 'fcd' end)
-
- def initialize(verbose=false)
- @list = Array.new
- @hist = Hash.new
- @result = Array.new
- @pattern = ''
- @result = ''
- @verbose = verbose
- if f = File.open(@@cdirfile,'w') then
- f << "#{if @@mswindows then 'rem' else '#' end} no dir to change to"
- f.close
- else
- report("unable to create stub #{@@cdirfile}")
- end
- end
-
- def filename(name)
- File.join(@@root,name)
- end
-
- def report(str,verbose=@verbose)
- puts(">> #{str}") if verbose
- end
-
- def flush(str,verbose=@verbose)
- print(str) if verbose
- end
-
- def clear
- if FileTest.file?(@@histfile)
- begin
- File.delete(@@histfile)
- rescue
- report("error in deleting history file '#{@histfile}'")
- else
- report("history file '#{@histfile}' is deleted")
- end
- else
- report("no history file '#{@histfile}'")
- end
- end
-
- def scan(dir='.')
- begin
- [dir].flatten.sort.uniq.each do |dir|
- begin
- Dir.chdir(dir)
- report("scanning '#{dir}'")
- # flush(">> ")
- Dir.glob("**/*").each do |d|
- if FileTest.directory?(d) then
- @list << File.expand_path(d)
- # flush(".")
- end
- end
- # flush("\n")
- @list = @list.sort.uniq
- report("#{@list.size} entries found")
- rescue
- report("unknown directory '#{dir}'")
- end
- end
- rescue
- report("invalid dir specification ")
- end
- end
-
- def save
- begin
- if f = File.open(@@datafile,'w') then
- @list.each do |l|
- f.puts(l)
- end
- f.close
- report("#{@list.size} status bytes saved in #{@@datafile}")
- else
- report("unable to save status in #{@@datafile}")
- end
- rescue
- report("error in saving status in #{@@datafile}")
- end
- end
-
- def remember
- if @hist[@pattern] == @result then
- # no need to save result
- else
- begin
- if f = File.open(@@histfile,'w') then
- @hist[@pattern] = @result
- @hist.keys.each do |k|
- f.puts("#{k} #{@hist[k]}")
- end
- f.close
- report("#{@hist.size} history entries saved in #{@@histfile}")
- else
- report("unable to save history in #{@@histfile}")
- end
- rescue
- report("error in saving history in #{@@histfile}")
- end
- end
- end
-
- def load
- begin
- @list = IO.read(@@datafile).split("\n")
- report("#{@list.length} status bytes loaded from #{@@datafile}")
- rescue
- report("error in loading status from #{@@datafile}")
- end
- begin
- IO.readlines(@@histfile).each do |line|
- if line =~ /^(.*?)\s+(.*)$/i then
- @hist[$1] = $2
- end
- end
- report("#{@hist.length} history entries loaded from #{@@histfile}")
- rescue
- report("error in loading history from #{@@histfile}")
- end
- end
-
- def show
- begin
- puts("directories:")
- puts("\n")
- if @list.length > 0 then
- @list.each do |l|
- puts(l)
- end
- else
- puts("no entries")
- end
- puts("\n")
- puts("history:")
- puts("\n")
- if @hist.length > 0 then
- @hist.keys.sort.each do |h|
- puts("#{h} >> #{@hist[h]}")
- end
- else
- puts("no entries")
- end
- rescue
- end
- end
-
- def find(pattern=nil)
- begin
- if pattern = [pattern].flatten.first then
- if pattern.length > 0 and @pattern = pattern then
- @result = @list.grep(/\/#{@pattern}$/i)
- if @result.length == 0 then
- @result = @list.grep(/\/#{@pattern}[^\/]*$/i)
- end
- end
- else
- puts(Dir.pwd.gsub(/\\/o, '/'))
- end
- rescue
- puts("some error")
- end
- end
-
- def chdir(dir)
- begin
- if dir then
- if f = File.open(@@cdirfile,'w') then
- if @@mswindows then
- f.puts("cd /d #{dir.gsub('/','\\')}")
- else
- f.puts("cd #{dir.gsub("\\",'/')}")
- end
- f.close
- end
- @result = dir
- report("changing to #{dir}",true)
- else
- report("not changing dir")
- end
- rescue
- end
- end
-
- def choose(args=[])
- offset = 97
- unless @pattern.empty? then
- begin
- case @result.size
- when 0 then
- report("dir '#{@pattern}' not found",true)
- when 1 then
- chdir(@result[0])
- else
- list = @result.dup
- begin
- if answer = args[1] then # assignment & test
- if answer == '.' and @hist.key?(@pattern) then
- if FileTest.directory?(@hist[@pattern]) then
- print("last choice ")
- chdir(@hist[@pattern])
- return
- end
- else
- index = answer[0] - offset
- if dir = list[index] then
- chdir(dir)
- return
- end
- end
- end
- rescue
- puts("some error")
- end
- loop do
- print("\n")
- list.each_index do |i|
-begin
- if i < @@maxlength then
- # puts("#{(i+?a).chr} #{list[i]}")
- puts("#{(i+offset).chr} #{list[i]}")
- else
- puts("\n there are #{list.length-@@maxlength} entries more")
- break
- end
-rescue
- puts("some error")
-end
- end
- print("\n>> ")
- if answer = wait then
- if answer >= offset and answer <= offset+25 then
- index = answer - offset
- if dir = list[index] then
- print("#{answer.chr} ")
- chdir(dir)
- elsif @hist.key?(@pattern) and FileTest.directory?(@hist[@pattern]) then
- print("last choice ")
- chdir(@hist[@pattern])
- else
- print("quit\n")
- end
- break
- elsif list.length >= @@maxlength then
- @@maxlength.times do |i| list.shift end
- print("next set")
- print("\n")
- elsif @hist.key?(@pattern) and FileTest.directory?(@hist[@pattern]) then
- print("last choice ")
- chdir(@hist[@pattern])
- break
- else
- print("quit\n")
- break
- end
- end
- end
- end
- rescue
- report($!)
- end
- end
- end
-
- def wait
- begin
- $stdout.flush
- return getc
- rescue
- return nil
- end
- end
-
- def getc
- begin
- if @@mswindows then
- ch = Win32API.new('crtdll','_getch',[],'L').call
- else
- system('stty raw -echo')
- ch = $stdin.getc
- system('stty -raw echo')
- end
- rescue
- ch = nil
- end
- return ch
- end
-
- def check
- unless FileTest.file?(@@stubfile) then
- report("creating stub #{@@stubfile}")
- begin
- if f = File.open(@@stubfile,'w') then
- f.puts(@@stubcode)
- f.close
- end
- rescue
- report("unable to create stub #{@@stubfile}")
- else
- unless @mswindows then
- begin
- File.chmod(0755,@@stubfile)
- rescue
- report("unable to change protections on #{@@stubfile}")
- end
- end
- end
- else
- report("stub #{@@stubfile} already present")
- end
- end
-
-end
-
-$stdout.sync = true
-
-verbose, action, args = false, :find, Array.new
-
-usage = "fcd [--add|clear|find|list|make|show|stub] [--verbose] [pattern]"
-version = "1.0.2"
-
-def quit(message)
- puts(message)
- exit
-end
-
-ARGV.each do |a|
- case a
- when '-a', '--add' then action = :add
- when '-c', '--clear' then action = :clear
- when '-f', '--find' then action = :find
- when '-l', '--list' then action = :show
- when '-m', '--make' then action = :make
- when '-s', '--show' then action = :show
- when '--stub' then action = :stub
- when '-v', '--verbose' then verbose = true
- when '--version' then quit("version: #{version}")
- when '-h', '--help' then quit("usage: #{usage}")
- when /^\-\-.*/ then quit("error: unknown switch #{a}, try --help")
- else args << a
- end
-end
-
-fcd = FastCD.new(verbose)
-fcd.report("Fast Change Dir / version #{version}")
-
-case action
- when :make then
- fcd.clear
- fcd.scan(args)
- fcd.save
- when :clear then
- fcd.clear
- when :add then
- fcd.load
- fcd.scan(args)
- fcd.save
- when :show then
- fcd.load
- fcd.show
- when :find then
- fcd.load
- fcd.find(args)
- fcd.choose(args)
- fcd.remember
- when :stub
- fcd.check
-end
diff --git a/Master/texmf-dist/scripts/context/ruby/mpstools.rb b/Master/texmf-dist/scripts/context/ruby/mpstools.rb
deleted file mode 100644
index 534bfb95bcf..00000000000
--- a/Master/texmf-dist/scripts/context/ruby/mpstools.rb
+++ /dev/null
@@ -1,7 +0,0 @@
-# todo
-#
-# this script will replace mptopdf and makempy
-
-puts("This program is yet unfinished, for the moment it just calls 'mptopdf'.\n\n")
-
-system("texmfstart mptopdf #{ARGV.join(' ')}")
diff --git a/Master/texmf-dist/scripts/context/ruby/texexec.rb b/Master/texmf-dist/scripts/context/ruby/texexec.rb
index 32c9d0ca538..c673cb46b17 100644
--- a/Master/texmf-dist/scripts/context/ruby/texexec.rb
+++ b/Master/texmf-dist/scripts/context/ruby/texexec.rb
@@ -733,7 +733,7 @@ class Commands
else
n += 1
end
- r << ' ' + s
+ r << ' ' + s.to_s
end
report(r) unless r.empty?
end
diff --git a/Master/texmf-dist/scripts/context/ruby/texsync.rb b/Master/texmf-dist/scripts/context/ruby/texsync.rb
deleted file mode 100644
index fd6593f05fb..00000000000
--- a/Master/texmf-dist/scripts/context/ruby/texsync.rb
+++ /dev/null
@@ -1,206 +0,0 @@
-#!/usr/bin/env ruby
-
-# program : texsync
-# copyright : PRAGMA Advanced Document Engineering
-# version : 2003-2005
-# author : Hans Hagen
-#
-# project : ConTeXt / eXaMpLe
-# concept : Hans Hagen
-# info : j.hagen@xs4all.nl
-# www : www.pragma-ade.com
-
-# For the moment this script only handles the 'minimal' context
-# distribution. In due time I will add a few more options, like
-# synchronization of the iso image.
-
-# taco's sync: rsync -au -v rsync://www.pragma-ade.com/all ./htdocs
-
-banner = ['TeXSync', 'version 1.1.1', '2002/2004', 'PRAGMA ADE/POD']
-
-$: << File.expand_path(File.dirname($0)) ; $: << File.join($:.last,'lib') ; $:.uniq!
-
-require 'base/switch'
-require 'base/logger'
-# require 'base/tool'
-
-require 'rbconfig'
-
-class Commands
-
- include CommandBase
-
- @@formats = ['en','nl','de','cz','it','ro', 'fr']
- @@always = ['metafun','mptopdf','en','nl']
- @@rsync = 'rsync -r -z -c --progress --stats "--exclude=*.fmt" "--exclude=*.efmt" "--exclude=*.mem"'
-
- @@kpsewhich = Hash.new
-
- @@kpsewhich['minimal'] = 'SELFAUTOPARENT'
- @@kpsewhich['context'] = 'TEXMFLOCAL'
- @@kpsewhich['documentation'] = 'TEXMFLOCAL'
- @@kpsewhich['unknown'] = 'SELFAUTOPARENT'
-
- def update
-
- report
-
- return unless destination = getdestination
-
- texpaths = gettexpaths
- address = option('address')
- user = option('user')
- tree = option('tree')
- force = option('force')
-
- ok = true
- begin
- report("synchronizing '#{tree}' from '#{address}' to '#{destination}'")
- report
- if texpaths then
- texpaths.each do |path|
- report("synchronizing path '#{path}' of '#{tree}' from '#{address}' to '#{destination}'")
- command = "#{rsync} #{user}@#{address}::#{tree}/#{path} #{destination}/{path}"
- ok = ok && system(command) if force
- end
- else
- command = "#{@@rsync} #{user}@#{address}::#{tree} #{destination}"
- ok = system(command) if force
- end
- rescue
- report("error in running rsync")
- ok = false
- ensure
- if force then
- if ok then
- if option('make') then
- report("generating tex and metapost formats")
- report
- @@formats.delete_if do |f|
- begin
- `kpsewhich cont-#{f}`.chomp.empty?
- rescue
- end
- end
- str = [@@formats,@@always].flatten.uniq.join(' ')
- begin
- system("texexec --make --alone #{str}")
- rescue
- report("unable to generate formats '#{str}'")
- else
- report
- end
- else
- report("regenerate the formats files if needed")
- end
- else
- report("error in synchronizing '#{tree}'")
- end
- else
- report("provide --force to execute '#{command}'") unless force
- end
- end
-
- end
-
- def list
-
- report
-
- address = option('address')
- user = option('user')
- result = nil
-
- begin
- report("fetching list of trees from '#{address}'")
- command = "#{@@rsync} #{user}@#{address}::"
- if option('force') then
- result = `#{command}`.chomp
- else
- report("provide --force to execute '#{command}'")
- end
- rescue
- result = nil
- else
- if result then
- report("available trees:")
- report
- reportlines(result)
- end
- ensure
- report("unable to fetch list") unless result
- end
-
- end
-
- private
-
- def gettexpaths
- if option('full') then
- texpaths = ['texmf','texmf-local','texmf-fonts','texmf-mswin','texmf-linux','texmf-macos']
- elsif option('terse') then
- texpaths = ['texmf','texmf-local','texmf-fonts']
- case RbConfig::CONFIG['host_os'] # or: Tool.ruby_platform
- when /mswin/ then texpaths.push('texmf-mswin')
- when /linux/ then texpaths.push('texmf-linux')
- when /darwin/ then texpaths.push('texmf-macosx')
- end
- else
- texpaths = nil
- end
- texpaths
- end
-
- def getdestination
- if (destination = option('destination')) && ! destination.empty? then
- begin
- if @@kpsewhich.key?(destination) then
- destination = @@kpsewhich[option('tree')] || @@kpsewhich['unknown']
- destination = `kpsewhich --expand-var=$#{destination}`.chomp
- elsif ! FileTest.directory?(destination) then
- destination = nil
- end
- rescue
- report("unable to determine destination tex root")
- else
- if ! destination || destination.empty? then
- report("no destination is specified")
- elsif not FileTest.directory?(destination) then
- report("invalid destination '#{destination}'")
- elsif not FileTest.writable?(destination) then
- report("destination '#{destination}' is not writable")
- else
- report("using destination '#{destination}'")
- return destination
- end
- end
- else
- report("unknown destination")
- end
- return nil
- end
-
-end
-
-logger = Logger.new(banner.shift)
-commandline = CommandLine.new
-
-commandline.registeraction('update', 'update installed tree')
-commandline.registeraction('list', 'list available trees')
-
-commandline.registerflag('terse', 'download as less as possible (esp binaries)')
-commandline.registerflag('full', 'download everything (all binaries)')
-commandline.registerflag('force', 'confirm action')
-commandline.registerflag('make', 'remake formats')
-
-commandline.registervalue('address', 'www.pragma-ade.com', 'adress of repository (www.pragma-ade)')
-commandline.registervalue('user', 'guest', 'user account (guest)')
-commandline.registervalue('tree', 'tex', 'tree to synchronize (tex)')
-commandline.registervalue('destination', nil, 'destination of tree (kpsewhich)')
-
-commandline.registeraction('help')
-commandline.registeraction('version')
-
-commandline.expand
-
-Commands.new(commandline,logger,banner).send(commandline.action || 'help')
diff --git a/Master/texmf-dist/scripts/context/stubs/mswin/mptopdf.exe b/Master/texmf-dist/scripts/context/stubs/mswin/ctxtools.exe
index 2d45f27494d..2d45f27494d 100755
--- a/Master/texmf-dist/scripts/context/stubs/mswin/mptopdf.exe
+++ b/Master/texmf-dist/scripts/context/stubs/mswin/ctxtools.exe
Binary files differ
diff --git a/Master/texmf-dist/scripts/context/stubs/mswin/mtxrun.dll b/Master/texmf-dist/scripts/context/stubs/mswin/mtxrun.dll
index 4116c5a24a6..6ba2597d56f 100755
--- a/Master/texmf-dist/scripts/context/stubs/mswin/mtxrun.dll
+++ b/Master/texmf-dist/scripts/context/stubs/mswin/mtxrun.dll
Binary files differ
diff --git a/Master/texmf-dist/scripts/context/stubs/mswin/mtxrun.exe b/Master/texmf-dist/scripts/context/stubs/mswin/mtxrun.exe
index 745eaf22464..acd99ddbf3b 100755
--- a/Master/texmf-dist/scripts/context/stubs/mswin/mtxrun.exe
+++ b/Master/texmf-dist/scripts/context/stubs/mswin/mtxrun.exe
Binary files differ
diff --git a/Master/texmf-dist/scripts/context/stubs/mswin/mtxrun.lua b/Master/texmf-dist/scripts/context/stubs/mswin/mtxrun.lua
index 108f2a8a112..00f63a5791d 100644
--- a/Master/texmf-dist/scripts/context/stubs/mswin/mtxrun.lua
+++ b/Master/texmf-dist/scripts/context/stubs/mswin/mtxrun.lua
@@ -1,5 +1,16 @@
#!/usr/bin/env texlua
+-- for k, v in next, _G.string do
+-- local tv = type(v)
+-- if tv == "table" then
+-- for kk, vv in next, v do
+-- print(k,kk,vv)
+-- end
+-- else
+-- print(tv,k,v)
+-- end
+-- end
+
if not modules then modules = { } end modules ['mtxrun'] = {
version = 1.001,
comment = "runner, lua replacement for texmfstart.rb",
@@ -43,3016 +54,3010 @@ if not modules then modules = { } end modules ['mtxrun'] = {
do -- create closure to overcome 200 locals limit
-if not modules then modules = { } end modules ['l-string'] = {
- version = 1.001,
- comment = "companion to luat-lib.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
+package.loaded["l-lua"] = package.loaded["l-lua"] or true
+
+-- original size: 10048, stripped down to: 5684
+
+if not modules then modules={} end modules ['l-lua']={
+ version=1.001,
+ comment="companion to luat-lib.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
}
+local major,minor=string.match(_VERSION,"^[^%d]+(%d+)%.(%d+).*$")
+_MAJORVERSION=tonumber(major) or 5
+_MINORVERSION=tonumber(minor) or 1
+_LUAVERSION=_MAJORVERSION+_MINORVERSION/10
+if not lpeg then
+ lpeg=require("lpeg")
+end
+if loadstring then
+ local loadnormal=load
+ function load(first,...)
+ if type(first)=="string" then
+ return loadstring(first,...)
+ else
+ return loadnormal(first,...)
+ end
+ end
+else
+ loadstring=load
+end
+if not ipairs then
+ local function iterate(a,i)
+ i=i+1
+ local v=a[i]
+ if v~=nil then
+ return i,v
+ end
+ end
+ function ipairs(a)
+ return iterate,a,0
+ end
+end
+if not pairs then
+ function pairs(t)
+ return next,t
+ end
+end
+if not table.unpack then
+ table.unpack=_G.unpack
+elseif not unpack then
+ _G.unpack=table.unpack
+end
+if not package.loaders then
+ package.loaders=package.searchers
+end
+local print,select,tostring=print,select,tostring
+local inspectors={}
+function setinspector(inspector)
+ inspectors[#inspectors+1]=inspector
+end
+function inspect(...)
+ for s=1,select("#",...) do
+ local value=select(s,...)
+ local done=false
+ for i=1,#inspectors do
+ done=inspectors[i](value)
+ if done then
+ break
+ end
+ end
+ if not done then
+ print(tostring(value))
+ end
+ end
+end
+local dummy=function() end
+function optionalrequire(...)
+ local ok,result=xpcall(require,dummy,...)
+ if ok then
+ return result
+ end
+end
+local type=type
+local gsub,format=string.gsub,string.format
+local package=package
+local searchers=package.searchers or package.loaders
+local libpaths=nil
+local clibpaths=nil
+local libhash={}
+local clibhash={}
+local libextras={}
+local clibextras={}
+local filejoin=file and file.join or function(path,name) return path.."/"..name end
+local isreadable=file and file.is_readable or function(name) local f=io.open(name) if f then f:close() return true end end
+local addsuffix=file and file.addsuffix or function(name,suffix) return name.."."..suffix end
+local function cleanpath(path)
+ return path
+end
+local helpers=package.helpers or {
+ libpaths=function() return {} end,
+ clibpaths=function() return {} end,
+ cleanpath=cleanpath,
+ trace=false,
+ report=function(...) print(format(...)) end,
+}
+package.helpers=helpers
+local function getlibpaths()
+ return libpaths or helpers.libpaths(libhash)
+end
+local function getclibpaths()
+ return clibpaths or helpers.clibpaths(clibhash)
+end
+package.libpaths=getlibpaths
+package.clibpaths=getclibpaths
+local function addpath(what,paths,extras,hash,...)
+ local pathlist={... }
+ local cleanpath=helpers.cleanpath
+ local trace=helpers.trace
+ local report=helpers.report
+ local function add(path)
+ local path=cleanpath(path)
+ if not hash[path] then
+ if trace then
+ report("extra %s path: %s",what,path)
+ end
+ paths [#paths+1]=path
+ extras[#extras+1]=path
+ end
+ end
+ for p=1,#pathlist do
+ local path=pathlist[p]
+ if type(path)=="table" then
+ for i=1,#path do
+ add(path[i])
+ end
+ else
+ add(path)
+ end
+ end
+ return paths,extras
+end
+function package.extralibpath(...)
+ libpaths,libextras=addpath("lua",getlibpaths(),libextras,libhash,...)
+end
+function package.extraclibpath(...)
+ clibpaths,clibextras=addpath("lib",getclibpaths(),clibextras,clibhash,...)
+end
+if not searchers[-2] then
+ searchers[-2]=searchers[2]
+end
+searchers[2]=function(name)
+ return helpers.loaded(name)
+end
+searchers[3]=nil
+local function loadedaslib(resolved,rawname)
+ local init="luaopen_"..gsub(rawname,"%.","_")
+ if helpers.trace then
+ helpers.report("calling loadlib with '%s' with init '%s'",resolved,init)
+ end
+ return package.loadlib(resolved,init)
+end
+local function loadedbylua(name)
+ if helpers.trace then
+ helpers.report("locating '%s' using normal loader",name)
+ end
+ return true,searchers[-2](name)
+end
+local function loadedbypath(name,rawname,paths,islib,what)
+ local trace=helpers.trace
+ local report=helpers.report
+ if trace then
+ report("locating '%s' as '%s' on '%s' paths",rawname,name,what)
+ end
+ for p=1,#paths do
+ local path=paths[p]
+ local resolved=filejoin(path,name)
+ if trace then
+ report("checking for '%s' using '%s' path '%s'",name,what,path)
+ end
+ if isreadable(resolved) then
+ if trace then
+ report("lib '%s' located on '%s'",name,resolved)
+ end
+ if islib then
+ return true,loadedaslib(resolved,rawname)
+ else
+ return true,loadfile(resolved)
+ end
+ end
+ end
+end
+local function notloaded(name)
+ if helpers.trace then
+ helpers.report("? unable to locate library '%s'",name)
+ end
+end
+helpers.loadedaslib=loadedaslib
+helpers.loadedbylua=loadedbylua
+helpers.loadedbypath=loadedbypath
+helpers.notloaded=notloaded
+function helpers.loaded(name)
+ local thename=gsub(name,"%.","/")
+ local luaname=addsuffix(thename,"lua")
+ local libname=addsuffix(thename,os.libsuffix or "so")
+ local libpaths=getlibpaths()
+ local clibpaths=getclibpaths()
+ local done,result=loadedbypath(luaname,name,libpaths,false,"lua")
+ if done then
+ return result
+ end
+ local done,result=loadedbypath(luaname,name,clibpaths,false,"lua")
+ if done then
+ return result
+ end
+ local done,result=loadedbypath(libname,name,clibpaths,true,"lib")
+ if done then
+ return result
+ end
+ local done,result=loadedbylua(name)
+ if done then
+ return result
+ end
+ return notloaded(name)
+end
-local string = string
-local sub, gsub, find, match, gmatch, format, char, byte, rep, lower = string.sub, string.gsub, string.find, string.match, string.gmatch, string.format, string.char, string.byte, string.rep, string.lower
-local lpegmatch, S, C, Ct = lpeg.match, lpeg.S, lpeg.C, lpeg.Ct
--- some functions may disappear as they are not used anywhere
+end -- of closure
-if not string.split then
+do -- create closure to overcome 200 locals limit
- -- this will be overloaded by a faster lpeg variant
+package.loaded["l-lpeg"] = package.loaded["l-lpeg"] or true
- function string.split(str,pattern)
- local t = { }
- if #str > 0 then
- local n = 1
- for s in gmatch(str..pattern,"(.-)"..pattern) do
- t[n] = s
- n = n + 1
- end
- end
- return t
- end
+-- original size: 26252, stripped down to: 14371
+if not modules then modules={} end modules ['l-lpeg']={
+ version=1.001,
+ comment="companion to luat-lib.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+lpeg=require("lpeg")
+local type,next,tostring=type,next,tostring
+local byte,char,gmatch,format=string.byte,string.char,string.gmatch,string.format
+local floor=math.floor
+local P,R,S,V,Ct,C,Cs,Cc,Cp,Cmt=lpeg.P,lpeg.R,lpeg.S,lpeg.V,lpeg.Ct,lpeg.C,lpeg.Cs,lpeg.Cc,lpeg.Cp,lpeg.Cmt
+local lpegtype,lpegmatch,lpegprint=lpeg.type,lpeg.match,lpeg.print
+setinspector(function(v) if lpegtype(v) then lpegprint(v) return true end end)
+lpeg.patterns=lpeg.patterns or {}
+local patterns=lpeg.patterns
+local anything=P(1)
+local endofstring=P(-1)
+local alwaysmatched=P(true)
+patterns.anything=anything
+patterns.endofstring=endofstring
+patterns.beginofstring=alwaysmatched
+patterns.alwaysmatched=alwaysmatched
+local digit,sign=R('09'),S('+-')
+local cr,lf,crlf=P("\r"),P("\n"),P("\r\n")
+local newline=crlf+S("\r\n")
+local escaped=P("\\")*anything
+local squote=P("'")
+local dquote=P('"')
+local space=P(" ")
+local utfbom_32_be=P('\000\000\254\255')
+local utfbom_32_le=P('\255\254\000\000')
+local utfbom_16_be=P('\255\254')
+local utfbom_16_le=P('\254\255')
+local utfbom_8=P('\239\187\191')
+local utfbom=utfbom_32_be+utfbom_32_le+utfbom_16_be+utfbom_16_le+utfbom_8
+local utftype=utfbom_32_be*Cc("utf-32-be")+utfbom_32_le*Cc("utf-32-le")+utfbom_16_be*Cc("utf-16-be")+utfbom_16_le*Cc("utf-16-le")+utfbom_8*Cc("utf-8")+alwaysmatched*Cc("utf-8")
+local utfoffset=utfbom_32_be*Cc(4)+utfbom_32_le*Cc(4)+utfbom_16_be*Cc(2)+utfbom_16_le*Cc(2)+utfbom_8*Cc(3)+Cc(0)
+local utf8next=R("\128\191")
+patterns.utf8one=R("\000\127")
+patterns.utf8two=R("\194\223")*utf8next
+patterns.utf8three=R("\224\239")*utf8next*utf8next
+patterns.utf8four=R("\240\244")*utf8next*utf8next*utf8next
+patterns.utfbom=utfbom
+patterns.utftype=utftype
+patterns.utfoffset=utfoffset
+local utf8char=patterns.utf8one+patterns.utf8two+patterns.utf8three+patterns.utf8four
+local validutf8char=utf8char^0*endofstring*Cc(true)+Cc(false)
+local utf8character=P(1)*R("\128\191")^0
+patterns.utf8=utf8char
+patterns.utf8char=utf8char
+patterns.utf8character=utf8character
+patterns.validutf8=validutf8char
+patterns.validutf8char=validutf8char
+local eol=S("\n\r")
+local spacer=S(" \t\f\v")
+local whitespace=eol+spacer
+local nonspacer=1-spacer
+local nonwhitespace=1-whitespace
+patterns.eol=eol
+patterns.spacer=spacer
+patterns.whitespace=whitespace
+patterns.nonspacer=nonspacer
+patterns.nonwhitespace=nonwhitespace
+local stripper=spacer^0*C((spacer^0*nonspacer^1)^0)
+local collapser=Cs(spacer^0/""*nonspacer^0*((spacer^0/" "*nonspacer^1)^0))
+patterns.stripper=stripper
+patterns.collapser=collapser
+patterns.digit=digit
+patterns.sign=sign
+patterns.cardinal=sign^0*digit^1
+patterns.integer=sign^0*digit^1
+patterns.unsigned=digit^0*P('.')*digit^1
+patterns.float=sign^0*patterns.unsigned
+patterns.cunsigned=digit^0*P(',')*digit^1
+patterns.cfloat=sign^0*patterns.cunsigned
+patterns.number=patterns.float+patterns.integer
+patterns.cnumber=patterns.cfloat+patterns.integer
+patterns.oct=P("0")*R("07")^1
+patterns.octal=patterns.oct
+patterns.HEX=P("0x")*R("09","AF")^1
+patterns.hex=P("0x")*R("09","af")^1
+patterns.hexadecimal=P("0x")*R("09","AF","af")^1
+patterns.lowercase=R("az")
+patterns.uppercase=R("AZ")
+patterns.letter=patterns.lowercase+patterns.uppercase
+patterns.space=space
+patterns.tab=P("\t")
+patterns.spaceortab=patterns.space+patterns.tab
+patterns.newline=newline
+patterns.emptyline=newline^1
+patterns.equal=P("=")
+patterns.comma=P(",")
+patterns.commaspacer=P(",")*spacer^0
+patterns.period=P(".")
+patterns.colon=P(":")
+patterns.semicolon=P(";")
+patterns.underscore=P("_")
+patterns.escaped=escaped
+patterns.squote=squote
+patterns.dquote=dquote
+patterns.nosquote=(escaped+(1-squote))^0
+patterns.nodquote=(escaped+(1-dquote))^0
+patterns.unsingle=(squote/"")*patterns.nosquote*(squote/"")
+patterns.undouble=(dquote/"")*patterns.nodquote*(dquote/"")
+patterns.unquoted=patterns.undouble+patterns.unsingle
+patterns.unspacer=((patterns.spacer^1)/"")^0
+patterns.singlequoted=squote*patterns.nosquote*squote
+patterns.doublequoted=dquote*patterns.nodquote*dquote
+patterns.quoted=patterns.doublequoted+patterns.singlequoted
+patterns.propername=R("AZ","az","__")*R("09","AZ","az","__")^0*P(-1)
+patterns.somecontent=(anything-newline-space)^1
+patterns.beginline=#(1-newline)
+patterns.longtostring=Cs(whitespace^0/""*nonwhitespace^0*((whitespace^0/" "*(patterns.quoted+nonwhitespace)^1)^0))
+local function anywhere(pattern)
+ return P { P(pattern)+1*V(1) }
+end
+lpeg.anywhere=anywhere
+function lpeg.instringchecker(p)
+ p=anywhere(p)
+ return function(str)
+ return lpegmatch(p,str) and true or false
+ end
+end
+function lpeg.splitter(pattern,action)
+ return (((1-P(pattern))^1)/action+1)^0
+end
+function lpeg.tsplitter(pattern,action)
+ return Ct((((1-P(pattern))^1)/action+1)^0)
+end
+local splitters_s,splitters_m,splitters_t={},{},{}
+local function splitat(separator,single)
+ local splitter=(single and splitters_s[separator]) or splitters_m[separator]
+ if not splitter then
+ separator=P(separator)
+ local other=C((1-separator)^0)
+ if single then
+ local any=anything
+ splitter=other*(separator*C(any^0)+"")
+ splitters_s[separator]=splitter
+ else
+ splitter=other*(separator*other)^0
+ splitters_m[separator]=splitter
+ end
+ end
+ return splitter
end
-
-function string.unquoted(str)
- return (gsub(str,"^([\"\'])(.*)%1$","%2"))
+local function tsplitat(separator)
+ local splitter=splitters_t[separator]
+ if not splitter then
+ splitter=Ct(splitat(separator))
+ splitters_t[separator]=splitter
+ end
+ return splitter
+end
+lpeg.splitat=splitat
+lpeg.tsplitat=tsplitat
+function string.splitup(str,separator)
+ if not separator then
+ separator=","
+ end
+ return lpegmatch(splitters_m[separator] or splitat(separator),str)
end
-
-
-function string.quoted(str)
- return format("%q",str) -- always "
+local cache={}
+function lpeg.split(separator,str)
+ local c=cache[separator]
+ if not c then
+ c=tsplitat(separator)
+ cache[separator]=c
+ end
+ return lpegmatch(c,str)
end
-
-function string.count(str,pattern) -- variant 3
- local n = 0
- for _ in gmatch(str,pattern) do -- not for utf
- n = n + 1
+function string.split(str,separator)
+ if separator then
+ local c=cache[separator]
+ if not c then
+ c=tsplitat(separator)
+ cache[separator]=c
+ end
+ return lpegmatch(c,str)
+ else
+ return { str }
+ end
+end
+local spacing=patterns.spacer^0*newline
+local empty=spacing*Cc("")
+local nonempty=Cs((1-spacing)^1)*spacing^-1
+local content=(empty+nonempty)^1
+patterns.textline=content
+local linesplitter=tsplitat(newline)
+patterns.linesplitter=linesplitter
+function string.splitlines(str)
+ return lpegmatch(linesplitter,str)
+end
+local cache={}
+function lpeg.checkedsplit(separator,str)
+ local c=cache[separator]
+ if not c then
+ separator=P(separator)
+ local other=C((1-separator)^1)
+ c=Ct(separator^0*other*(separator^1*other)^0)
+ cache[separator]=c
+ end
+ return lpegmatch(c,str)
+end
+function string.checkedsplit(str,separator)
+ local c=cache[separator]
+ if not c then
+ separator=P(separator)
+ local other=C((1-separator)^1)
+ c=Ct(separator^0*other*(separator^1*other)^0)
+ cache[separator]=c
+ end
+ return lpegmatch(c,str)
+end
+local function f2(s) local c1,c2=byte(s,1,2) return c1*64+c2-12416 end
+local function f3(s) local c1,c2,c3=byte(s,1,3) return (c1*64+c2)*64+c3-925824 end
+local function f4(s) local c1,c2,c3,c4=byte(s,1,4) return ((c1*64+c2)*64+c3)*64+c4-63447168 end
+local utf8byte=patterns.utf8one/byte+patterns.utf8two/f2+patterns.utf8three/f3+patterns.utf8four/f4
+patterns.utf8byte=utf8byte
+local cache={}
+function lpeg.stripper(str)
+ if type(str)=="string" then
+ local s=cache[str]
+ if not s then
+ s=Cs(((S(str)^1)/""+1)^0)
+ cache[str]=s
end
- return n
+ return s
+ else
+ return Cs(((str^1)/""+1)^0)
+ end
end
-
-function string.limit(str,n,sentinel) -- not utf proof
- if #str > n then
- sentinel = sentinel or "..."
- return sub(str,1,(n-#sentinel)) .. sentinel
+local cache={}
+function lpeg.keeper(str)
+ if type(str)=="string" then
+ local s=cache[str]
+ if not s then
+ s=Cs((((1-S(str))^1)/""+1)^0)
+ cache[str]=s
+ end
+ return s
+ else
+ return Cs((((1-str)^1)/""+1)^0)
+ end
+end
+function lpeg.frontstripper(str)
+ return (P(str)+P(true))*Cs(anything^0)
+end
+function lpeg.endstripper(str)
+ return Cs((1-P(str)*endofstring)^0)
+end
+function lpeg.replacer(one,two,makefunction,isutf)
+ local pattern
+ local u=isutf and utf8char or 1
+ if type(one)=="table" then
+ local no=#one
+ local p=P(false)
+ if no==0 then
+ for k,v in next,one do
+ p=p+P(k)/v
+ end
+ pattern=Cs((p+u)^0)
+ elseif no==1 then
+ local o=one[1]
+ one,two=P(o[1]),o[2]
+ pattern=Cs((one/two+u)^0)
+ else
+ for i=1,no do
+ local o=one[i]
+ p=p+P(o[1])/o[2]
+ end
+ pattern=Cs((p+u)^0)
+ end
+ else
+ pattern=Cs((P(one)/(two or "")+u)^0)
+ end
+ if makefunction then
+ return function(str)
+ return lpegmatch(pattern,str)
+ end
+ else
+ return pattern
+ end
+end
+function lpeg.finder(lst,makefunction)
+ local pattern
+ if type(lst)=="table" then
+ pattern=P(false)
+ if #lst==0 then
+ for k,v in next,lst do
+ pattern=pattern+P(k)
+ end
else
- return str
+ for i=1,#lst do
+ pattern=pattern+P(lst[i])
+ end
+ end
+ else
+ pattern=P(lst)
+ end
+ pattern=(1-pattern)^0*pattern
+ if makefunction then
+ return function(str)
+ return lpegmatch(pattern,str)
+ end
+ else
+ return pattern
+ end
+end
+local splitters_f,splitters_s={},{}
+function lpeg.firstofsplit(separator)
+ local splitter=splitters_f[separator]
+ if not splitter then
+ separator=P(separator)
+ splitter=C((1-separator)^0)
+ splitters_f[separator]=splitter
+ end
+ return splitter
+end
+function lpeg.secondofsplit(separator)
+ local splitter=splitters_s[separator]
+ if not splitter then
+ separator=P(separator)
+ splitter=(1-separator)^0*separator*C(anything^0)
+ splitters_s[separator]=splitter
+ end
+ return splitter
+end
+function lpeg.balancer(left,right)
+ left,right=P(left),P(right)
+ return P { left*((1-left-right)+V(1))^0*right }
+end
+local nany=utf8char/""
+function lpeg.counter(pattern)
+ pattern=Cs((P(pattern)/" "+nany)^0)
+ return function(str)
+ return #lpegmatch(pattern,str)
+ end
+end
+utf=utf or (unicode and unicode.utf8) or {}
+local utfcharacters=utf and utf.characters or string.utfcharacters
+local utfgmatch=utf and utf.gmatch
+local utfchar=utf and utf.char
+lpeg.UP=lpeg.P
+if utfcharacters then
+ function lpeg.US(str)
+ local p=P(false)
+ for uc in utfcharacters(str) do
+ p=p+P(uc)
end
+ return p
+ end
+elseif utfgmatch then
+ function lpeg.US(str)
+ local p=P(false)
+ for uc in utfgmatch(str,".") do
+ p=p+P(uc)
+ end
+ return p
+ end
+else
+ function lpeg.US(str)
+ local p=P(false)
+ local f=function(uc)
+ p=p+P(uc)
+ end
+ lpegmatch((utf8char/f)^0,str)
+ return p
+ end
end
-
-local space = S(" \t\v\n")
-local nospace = 1 - space
-local stripper = space^0 * C((space^0 * nospace^1)^0) -- roberto's code
-
-function string.strip(str)
- return lpegmatch(stripper,str) or ""
+local range=utf8byte*utf8byte+Cc(false)
+function lpeg.UR(str,more)
+ local first,last
+ if type(str)=="number" then
+ first=str
+ last=more or first
+ else
+ first,last=lpegmatch(range,str)
+ if not last then
+ return P(str)
+ end
+ end
+ if first==last then
+ return P(str)
+ elseif utfchar and (last-first<8) then
+ local p=P(false)
+ for i=first,last do
+ p=p+P(utfchar(i))
+ end
+ return p
+ else
+ local f=function(b)
+ return b>=first and b<=last
+ end
+ return utf8byte/f
+ end
end
-
-function string.is_empty(str)
- return not find(str,"%S")
+function lpeg.is_lpeg(p)
+ return p and lpegtype(p)=="pattern"
+end
+function lpeg.oneof(list,...)
+ if type(list)~="table" then
+ list={ list,... }
+ end
+ local p=P(list[1])
+ for l=2,#list do
+ p=p+P(list[l])
+ end
+ return p
+end
+local sort=table.sort
+local function copyindexed(old)
+ local new={}
+ for i=1,#old do
+ new[i]=old
+ end
+ return new
end
-
-local patterns_escapes = {
- ["%"] = "%%",
- ["."] = "%.",
- ["+"] = "%+", ["-"] = "%-", ["*"] = "%*",
- ["["] = "%[", ["]"] = "%]",
- ["("] = "%(", [")"] = "%)",
- -- ["{"] = "%{", ["}"] = "%}"
- -- ["^"] = "%^", ["$"] = "%$",
-}
-
-local simple_escapes = {
- ["-"] = "%-",
- ["."] = "%.",
- ["?"] = ".",
- ["*"] = ".*",
-}
-
-function string.escapedpattern(str,simple)
- return (gsub(str,".",simple and simple_escapes or patterns_escapes))
+local function sortedkeys(tab)
+ local keys,s={},0
+ for key,_ in next,tab do
+ s=s+1
+ keys[s]=key
+ end
+ sort(keys)
+ return keys
end
-
-function string.topattern(str,lowercase,strict)
- if str == "" then
- return ".*"
+function lpeg.append(list,pp,delayed,checked)
+ local p=pp
+ if #list>0 then
+ local keys=copyindexed(list)
+ sort(keys)
+ for i=#keys,1,-1 do
+ local k=keys[i]
+ if p then
+ p=P(k)+p
+ else
+ p=P(k)
+ end
+ end
+ elseif delayed then
+ local keys=sortedkeys(list)
+ if p then
+ for i=1,#keys,1 do
+ local k=keys[i]
+ local v=list[k]
+ p=P(k)/list+p
+ end
else
- str = gsub(str,".",simple_escapes)
- if lowercase then
- str = lower(str)
+ for i=1,#keys do
+ local k=keys[i]
+ local v=list[k]
+ if p then
+ p=P(k)+p
+ else
+ p=P(k)
+ end
+ end
+ if p then
+ p=p/list
+ end
+ end
+ elseif checked then
+ local keys=sortedkeys(list)
+ for i=1,#keys do
+ local k=keys[i]
+ local v=list[k]
+ if p then
+ if k==v then
+ p=P(k)+p
+ else
+ p=P(k)/v+p
end
- if strict then
- return "^" .. str .. "$"
+ else
+ if k==v then
+ p=P(k)
else
- return str
+ p=P(k)/v
end
+ end
end
+ else
+ local keys=sortedkeys(list)
+ for i=1,#keys do
+ local k=keys[i]
+ local v=list[k]
+ if p then
+ p=P(k)/v+p
+ else
+ p=P(k)/v
+ end
+ end
+ end
+ return p
end
-
--- obsolete names:
-
-string.quote = string.quoted
-string.unquote = string.unquoted
+local function make(t)
+ local p
+ local keys=sortedkeys(t)
+ for i=1,#keys do
+ local k=keys[i]
+ local v=t[k]
+ if not p then
+ if next(v) then
+ p=P(k)*make(v)
+ else
+ p=P(k)
+ end
+ else
+ if next(v) then
+ p=p+P(k)*make(v)
+ else
+ p=p+P(k)
+ end
+ end
+ end
+ return p
+end
+function lpeg.utfchartabletopattern(list)
+ local tree={}
+ for i=1,#list do
+ local t=tree
+ for c in gmatch(list[i],".") do
+ if not t[c] then
+ t[c]={}
+ end
+ t=t[c]
+ end
+ end
+ return make(tree)
+end
+patterns.containseol=lpeg.finder(eol)
+local function nextstep(n,step,result)
+ local m=n%step
+ local d=floor(n/step)
+ if d>0 then
+ local v=V(tostring(step))
+ local s=result.start
+ for i=1,d do
+ if s then
+ s=v*s
+ else
+ s=v
+ end
+ end
+ result.start=s
+ end
+ if step>1 and result.start then
+ local v=V(tostring(step/2))
+ result[tostring(step)]=v*v
+ end
+ if step>0 then
+ return nextstep(m,step/2,result)
+ else
+ return result
+ end
+end
+function lpeg.times(pattern,n)
+ return P(nextstep(n,2^16,{ "start",["1"]=pattern }))
+end
+local digit=R("09")
+local period=P(".")
+local zero=P("0")
+local trailingzeros=zero^0*-digit
+local case_1=period*trailingzeros/""
+local case_2=period*(digit-trailingzeros)^1*(trailingzeros/"")
+local number=digit^1*(case_1+case_2)
+local stripper=Cs((number+1)^0)
+lpeg.patterns.stripzeros=stripper
end -- of closure
do -- create closure to overcome 200 locals limit
-if not modules then modules = { } end modules ['l-table'] = {
- version = 1.001,
- comment = "companion to luat-lib.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
+package.loaded["l-function"] = package.loaded["l-function"] or true
-local type, next, tostring, tonumber, ipairs, table, string = type, next, tostring, tonumber, ipairs, table, string
-local concat, sort, insert, remove = table.concat, table.sort, table.insert, table.remove
-local format, find, gsub, lower, dump, match = string.format, string.find, string.gsub, string.lower, string.dump, string.match
-local getmetatable, setmetatable = getmetatable, setmetatable
-local getinfo = debug.getinfo
-
--- Starting with version 5.2 Lua no longer provide ipairs, which makes
--- sense. As we already used the for loop and # in most places the
--- impact on ConTeXt was not that large; the remaining ipairs already
--- have been replaced. In a similar fashion we also hardly used pairs.
---
--- Just in case, we provide the fallbacks as discussed in Programming
--- in Lua (http://www.lua.org/pil/7.3.html):
+-- original size: 361, stripped down to: 322
-if not ipairs then
+if not modules then modules={} end modules ['l-functions']={
+ version=1.001,
+ comment="companion to luat-lib.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+functions=functions or {}
+function functions.dummy() end
- -- for k, v in ipairs(t) do ... end
- -- for k=1,#t do local v = t[k] ... end
- local function iterate(a,i)
- i = i + 1
- local v = a[i]
- if v ~= nil then
- return i, v --, nil
- end
- end
+end -- of closure
- function ipairs(a)
- return iterate, a, 0
- end
+do -- create closure to overcome 200 locals limit
-end
+package.loaded["l-string"] = package.loaded["l-string"] or true
-if not pairs then
+-- original size: 5513, stripped down to: 2708
- -- for k, v in pairs(t) do ... end
- -- for k, v in next, t do ... end
+if not modules then modules={} end modules ['l-string']={
+ version=1.001,
+ comment="companion to luat-lib.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+local string=string
+local sub,gmatch,format,char,byte,rep,lower=string.sub,string.gmatch,string.format,string.char,string.byte,string.rep,string.lower
+local lpegmatch,patterns=lpeg.match,lpeg.patterns
+local P,S,C,Ct,Cc,Cs=lpeg.P,lpeg.S,lpeg.C,lpeg.Ct,lpeg.Cc,lpeg.Cs
+local unquoted=patterns.squote*C(patterns.nosquote)*patterns.squote+patterns.dquote*C(patterns.nodquote)*patterns.dquote
+function string.unquoted(str)
+ return lpegmatch(unquoted,str) or str
+end
+function string.quoted(str)
+ return format("%q",str)
+end
+function string.count(str,pattern)
+ local n=0
+ for _ in gmatch(str,pattern) do
+ n=n+1
+ end
+ return n
+end
+function string.limit(str,n,sentinel)
+ if #str>n then
+ sentinel=sentinel or "..."
+ return sub(str,1,(n-#sentinel))..sentinel
+ else
+ return str
+ end
+end
+local stripper=patterns.stripper
+local collapser=patterns.collapser
+local longtostring=patterns.longtostring
+function string.strip(str)
+ return lpegmatch(stripper,str) or ""
+end
+function string.collapsespaces(str)
+ return lpegmatch(collapser,str) or ""
+end
+function string.longtostring(str)
+ return lpegmatch(longtostring,str) or ""
+end
+local pattern=P(" ")^0*P(-1)
+function string.is_empty(str)
+ if str=="" then
+ return true
+ else
+ return lpegmatch(pattern,str) and true or false
+ end
+end
+local anything=patterns.anything
+local allescapes=Cc("%")*S(".-+%?()[]*")
+local someescapes=Cc("%")*S(".-+%()[]")
+local matchescapes=Cc(".")*S("*?")
+local pattern_a=Cs ((allescapes+anything )^0 )
+local pattern_b=Cs ((someescapes+matchescapes+anything )^0 )
+local pattern_c=Cs (Cc("^")*(someescapes+matchescapes+anything )^0*Cc("$") )
+function string.escapedpattern(str,simple)
+ return lpegmatch(simple and pattern_b or pattern_a,str)
+end
+function string.topattern(str,lowercase,strict)
+ if str=="" or type(str)~="string" then
+ return ".*"
+ elseif strict then
+ str=lpegmatch(pattern_c,str)
+ else
+ str=lpegmatch(pattern_b,str)
+ end
+ if lowercase then
+ return lower(str)
+ else
+ return str
+ end
+end
+function string.valid(str,default)
+ return (type(str)=="string" and str~="" and str) or default or nil
+end
+string.itself=function(s) return s end
+local pattern=Ct(C(1)^0)
+function string.totable(str)
+ return lpegmatch(pattern,str)
+end
+local replacer=lpeg.replacer("@","%%")
+function string.tformat(fmt,...)
+ return format(lpegmatch(replacer,fmt),...)
+end
+string.quote=string.quoted
+string.unquote=string.unquoted
- function pairs(t)
- return next, t -- , nil
- end
-end
+end -- of closure
--- Also, unpack has been moved to the table table, and for compatiility
--- reasons we provide both now.
+do -- create closure to overcome 200 locals limit
-if not table.unpack then
- table.unpack = _G.unpack
-elseif not unpack then
- _G.unpack = table.unpack
-end
+package.loaded["l-table"] = package.loaded["l-table"] or true
--- extra functions, some might go (when not used)
+-- original size: 44643, stripped down to: 19717
+if not modules then modules={} end modules ['l-table']={
+ version=1.001,
+ comment="companion to luat-lib.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+local type,next,tostring,tonumber,ipairs,select=type,next,tostring,tonumber,ipairs,select
+local table,string=table,string
+local concat,sort,insert,remove=table.concat,table.sort,table.insert,table.remove
+local format,lower,dump=string.format,string.lower,string.dump
+local getmetatable,setmetatable=getmetatable,setmetatable
+local getinfo=debug.getinfo
+local lpegmatch,patterns=lpeg.match,lpeg.patterns
+local floor=math.floor
+local stripper=patterns.stripper
function table.strip(tab)
- local lst, l = { }, 0
- for i=1,#tab do
- local s = gsub(tab[i],"^%s*(.-)%s*$","%1")
- if s == "" then
- -- skip this one
- else
- l = l + 1
- lst[l] = s
- end
+ local lst,l={},0
+ for i=1,#tab do
+ local s=lpegmatch(stripper,tab[i]) or ""
+ if s=="" then
+ else
+ l=l+1
+ lst[l]=s
end
- return lst
+ end
+ return lst
end
-
function table.keys(t)
- local keys, k = { }, 0
- for key, _ in next, t do
- k = k + 1
- keys[k] = key
+ if t then
+ local keys,k={},0
+ for key,_ in next,t do
+ k=k+1
+ keys[k]=key
end
return keys
+ else
+ return {}
+ end
end
-
local function compare(a,b)
- local ta, tb = type(a), type(b) -- needed, else 11 < 2
- if ta == tb then
- return a < b
- else
- return tostring(a) < tostring(b)
- end
+ local ta,tb=type(a),type(b)
+ if ta==tb then
+ return a<b
+ else
+ return tostring(a)<tostring(b)
+ end
end
-
local function sortedkeys(tab)
- local srt, category, s = { }, 0, 0 -- 0=unknown 1=string, 2=number 3=mixed
- for key,_ in next, tab do
- s = s + 1
- srt[s] = key
- if category == 3 then
- -- no further check
+ if tab then
+ local srt,category,s={},0,0
+ for key,_ in next,tab do
+ s=s+1
+ srt[s]=key
+ if category==3 then
+ else
+ local tkey=type(key)
+ if tkey=="string" then
+ category=(category==2 and 3) or 1
+ elseif tkey=="number" then
+ category=(category==1 and 3) or 2
else
- local tkey = type(key)
- if tkey == "string" then
- category = (category == 2 and 3) or 1
- elseif tkey == "number" then
- category = (category == 1 and 3) or 2
- else
- category = 3
- end
+ category=3
end
+ end
end
- if category == 0 or category == 3 then
- sort(srt,compare)
+ if category==0 or category==3 then
+ sort(srt,compare)
else
- sort(srt)
+ sort(srt)
end
return srt
+ else
+ return {}
+ end
+end
+local function sortedhashkeys(tab,cmp)
+ if tab then
+ local srt,s={},0
+ for key,_ in next,tab do
+ if key then
+ s=s+1
+ srt[s]=key
+ end
+ end
+ sort(srt,cmp)
+ return srt
+ else
+ return {}
+ end
end
-
-local function sortedhashkeys(tab) -- fast one
- local srt, s = { }, 0
- for key,_ in next, tab do
- if key then
- s= s + 1
- srt[s] = key
- end
+function table.allkeys(t)
+ local keys={}
+ for k,v in next,t do
+ for k,v in next,v do
+ keys[k]=true
end
- sort(srt)
- return srt
+ end
+ return sortedkeys(keys)
end
-
-table.sortedkeys = sortedkeys
-table.sortedhashkeys = sortedhashkeys
-
+table.sortedkeys=sortedkeys
+table.sortedhashkeys=sortedhashkeys
local function nothing() end
-
-local function sortedhash(t)
- if t then
- local n, s = 0, sortedkeys(t) -- the robust one
- local function kv(s)
- n = n + 1
- local k = s[n]
- return k, t[k]
- end
- return kv, s
+local function sortedhash(t,cmp)
+ if t then
+ local s
+ if cmp then
+ s=sortedhashkeys(t,function(a,b) return cmp(t,a,b) end)
else
- return nothing
- end
-end
-
-table.sortedhash = sortedhash
-table.sortedpairs = sortedhash
-
-function table.append(t, list)
- local n = #t
- for i=1,#list do
- n = n + 1
- t[n] = list[i]
- end
- return t
+ s=sortedkeys(t)
+ end
+ local n=0
+ local function kv(s)
+ n=n+1
+ local k=s[n]
+ return k,t[k]
+ end
+ return kv,s
+ else
+ return nothing
+ end
+end
+table.sortedhash=sortedhash
+table.sortedpairs=sortedhash
+function table.append(t,list)
+ local n=#t
+ for i=1,#list do
+ n=n+1
+ t[n]=list[i]
+ end
+ return t
+end
+function table.prepend(t,list)
+ local nl=#list
+ local nt=nl+#t
+ for i=#t,1,-1 do
+ t[nt]=t[i]
+ nt=nt-1
+ end
+ for i=1,#list do
+ t[i]=list[i]
+ end
+ return t
+end
+function table.merge(t,...)
+ t=t or {}
+ for i=1,select("#",...) do
+ for k,v in next,(select(i,...)) do
+ t[k]=v
+ end
+ end
+ return t
end
-
-function table.prepend(t, list)
- local nl = #list
- local nt = nl + #t
- for i=#t,1,-1 do
- t[nt] = t[i]
- nt = nt - 1
- end
- for i=1,#list do
- t[i] = list[i]
- end
- return t
-end
-
-function table.merge(t, ...) -- first one is target
- t = t or { }
- local lst = { ... }
- for i=1,#lst do
- for k, v in next, lst[i] do
- t[k] = v
- end
- end
- return t
-end
-
function table.merged(...)
- local tmp, lst = { }, { ... }
- for i=1,#lst do
- for k, v in next, lst[i] do
- tmp[k] = v
- end
+ local t={}
+ for i=1,select("#",...) do
+ for k,v in next,(select(i,...)) do
+ t[k]=v
end
- return tmp
+ end
+ return t
end
-
-function table.imerge(t, ...)
- local lst, nt = { ... }, #t
- for i=1,#lst do
- local nst = lst[i]
- for j=1,#nst do
- nt = nt + 1
- t[nt] = nst[j]
- end
+function table.imerge(t,...)
+ local nt=#t
+ for i=1,select("#",...) do
+ local nst=select(i,...)
+ for j=1,#nst do
+ nt=nt+1
+ t[nt]=nst[j]
end
- return t
+ end
+ return t
end
-
function table.imerged(...)
- local tmp, ntmp, lst = { }, 0, {...}
- for i=1,#lst do
- local nst = lst[i]
- for j=1,#nst do
- ntmp = ntmp + 1
- tmp[ntmp] = nst[j]
- end
+ local tmp,ntmp={},0
+ for i=1,select("#",...) do
+ local nst=select(i,...)
+ for j=1,#nst do
+ ntmp=ntmp+1
+ tmp[ntmp]=nst[j]
+ end
+ end
+ return tmp
+end
+local function fastcopy(old,metatabletoo)
+ if old then
+ local new={}
+ for k,v in next,old do
+ if type(v)=="table" then
+ new[k]=fastcopy(v,metatabletoo)
+ else
+ new[k]=v
+ end
+ end
+ if metatabletoo then
+ local mt=getmetatable(old)
+ if mt then
+ setmetatable(new,mt)
+ end
end
- return tmp
-end
-
-local function fastcopy(old,metatabletoo) -- fast one
- if old then
- local new = { }
- for k,v in next, old do
- if type(v) == "table" then
- new[k] = fastcopy(v,metatabletoo) -- was just table.copy
- else
- new[k] = v
- end
- end
- if metatabletoo then
- -- optional second arg
- local mt = getmetatable(old)
- if mt then
- setmetatable(new,mt)
- end
- end
- return new
+ return new
+ else
+ return {}
+ end
+end
+local function copy(t,tables)
+ tables=tables or {}
+ local tcopy={}
+ if not tables[t] then
+ tables[t]=tcopy
+ end
+ for i,v in next,t do
+ if type(i)=="table" then
+ if tables[i] then
+ i=tables[i]
+ else
+ i=copy(i,tables)
+ end
+ end
+ if type(v)~="table" then
+ tcopy[i]=v
+ elseif tables[v] then
+ tcopy[i]=tables[v]
else
- return { }
- end
+ tcopy[i]=copy(v,tables)
+ end
+ end
+ local mt=getmetatable(t)
+ if mt then
+ setmetatable(tcopy,mt)
+ end
+ return tcopy
+end
+table.fastcopy=fastcopy
+table.copy=copy
+function table.derive(parent)
+ local child={}
+ if parent then
+ setmetatable(child,{ __index=parent })
+ end
+ return child
end
-
--- todo : copy without metatable
-
-local function copy(t, tables) -- taken from lua wiki, slightly adapted
- tables = tables or { }
- local tcopy = {}
- if not tables[t] then
- tables[t] = tcopy
- end
- for i,v in next, t do -- brrr, what happens with sparse indexed
- if type(i) == "table" then
- if tables[i] then
- i = tables[i]
- else
- i = copy(i, tables)
- end
- end
- if type(v) ~= "table" then
- tcopy[i] = v
- elseif tables[v] then
- tcopy[i] = tables[v]
- else
- tcopy[i] = copy(v, tables)
- end
- end
- local mt = getmetatable(t)
- if mt then
- setmetatable(tcopy,mt)
- end
- return tcopy
-end
-
-table.fastcopy = fastcopy
-table.copy = copy
-
-function table.derive(parent)
- local child = { }
- if parent then
- setmetatable(child,{ __index = parent })
- end
- return child
-end
-
function table.tohash(t,value)
- local h = { }
- if t then
- if value == nil then value = true end
- for _, v in next, t do -- no ipairs here
- h[v] = value
- end
+ local h={}
+ if t then
+ if value==nil then value=true end
+ for _,v in next,t do
+ h[v]=value
end
- return h
+ end
+ return h
end
-
function table.fromhash(t)
- local hsh, h = { }, 0
- for k, v in next, t do -- no ipairs here
- if v then
- h = h + 1
- hsh[h] = k
- end
+ local hsh,h={},0
+ for k,v in next,t do
+ if v then
+ h=h+1
+ hsh[h]=k
end
- return hsh
+ end
+ return hsh
end
-
-local noquotes, hexify, handle, reduce, compact, inline, functions
-
-local reserved = table.tohash { -- intercept a language inconvenience: no reserved words as key
- 'and', 'break', 'do', 'else', 'elseif', 'end', 'false', 'for', 'function', 'if',
- 'in', 'local', 'nil', 'not', 'or', 'repeat', 'return', 'then', 'true', 'until', 'while',
+local noquotes,hexify,handle,reduce,compact,inline,functions
+local reserved=table.tohash {
+ 'and','break','do','else','elseif','end','false','for','function','if',
+ 'in','local','nil','not','or','repeat','return','then','true','until','while',
}
-
local function simple_table(t)
- if #t > 0 then
- local n = 0
- for _,v in next, t do
- n = n + 1
- end
- if n == #t then
- local tt, nt = { }, 0
- for i=1,#t do
- local v = t[i]
- local tv = type(v)
- if tv == "number" then
- nt = nt + 1
- if hexify then
- tt[nt] = format("0x%04X",v)
- else
- tt[nt] = tostring(v) -- tostring not needed
- end
- elseif tv == "boolean" then
- nt = nt + 1
- tt[nt] = tostring(v)
- elseif tv == "string" then
- nt = nt + 1
- tt[nt] = format("%q",v)
- else
- tt = nil
- break
- end
- end
- return tt
+ if #t>0 then
+ local n=0
+ for _,v in next,t do
+ n=n+1
+ end
+ if n==#t then
+ local tt,nt={},0
+ for i=1,#t do
+ local v=t[i]
+ local tv=type(v)
+ if tv=="number" then
+ nt=nt+1
+ if hexify then
+ tt[nt]=format("0x%04X",v)
+ else
+ tt[nt]=tostring(v)
+ end
+ elseif tv=="boolean" then
+ nt=nt+1
+ tt[nt]=tostring(v)
+ elseif tv=="string" then
+ nt=nt+1
+ tt[nt]=format("%q",v)
+ else
+ tt=nil
+ break
end
+ end
+ return tt
end
- return nil
+ end
+ return nil
end
-
--- Because this is a core function of mkiv I moved some function calls
--- inline.
---
--- twice as fast in a test:
---
--- local propername = lpeg.P(lpeg.R("AZ","az","__") * lpeg.R("09","AZ","az", "__")^0 * lpeg.P(-1) )
-
--- problem: there no good number_to_string converter with the best resolution
-
+local propername=patterns.propername
local function dummy() end
-
local function do_serialize(root,name,depth,level,indexed)
- if level > 0 then
- depth = depth .. " "
- if indexed then
- handle(format("%s{",depth))
- else
- local tn = type(name)
- if tn == "number" then -- or find(k,"^%d+$") then
- if hexify then
- handle(format("%s[0x%04X]={",depth,name))
- else
- handle(format("%s[%s]={",depth,name))
- end
- elseif tn == "string" then
- if noquotes and not reserved[name] and find(name,"^%a[%w%_]*$") then
- handle(format("%s%s={",depth,name))
- else
- handle(format("%s[%q]={",depth,name))
- end
- elseif tn == "boolean" then
- handle(format("%s[%s]={",depth,tostring(name)))
- else
- handle(format("%s{",depth))
- end
- end
- end
- -- we could check for k (index) being number (cardinal)
- if root and next(root) then
- local first, last = nil, 0 -- #root cannot be trusted here (will be ok in 5.2 when ipairs is gone)
- if compact then
- -- NOT: for k=1,#root do (we need to quit at nil)
- for k,v in ipairs(root) do -- can we use next?
- if not first then first = k end
- last = last + 1
- end
- end
- local sk = sortedkeys(root)
- for i=1,#sk do
- local k = sk[i]
- local v = root[k]
- -- circular
- local t, tk = type(v), type(k)
- if compact and first and tk == "number" and k >= first and k <= last then
- if t == "number" then
- if hexify then
- handle(format("%s 0x%04X,",depth,v))
- else
- handle(format("%s %s,",depth,v)) -- %.99g
- end
- elseif t == "string" then
- if reduce and tonumber(v) then
- handle(format("%s %s,",depth,v))
- else
- handle(format("%s %q,",depth,v))
- end
- elseif t == "table" then
- if not next(v) then
- handle(format("%s {},",depth))
- elseif inline then -- and #t > 0
- local st = simple_table(v)
- if st then
- handle(format("%s { %s },",depth,concat(st,", ")))
- else
- do_serialize(v,k,depth,level+1,true)
- end
- else
- do_serialize(v,k,depth,level+1,true)
- end
- elseif t == "boolean" then
- handle(format("%s %s,",depth,tostring(v)))
- elseif t == "function" then
- if functions then
- handle(format('%s loadstring(%q),',depth,dump(v)))
- else
- handle(format('%s "function",',depth))
- end
- else
- handle(format("%s %q,",depth,tostring(v)))
- end
- elseif k == "__p__" then -- parent
- if false then
- handle(format("%s __p__=nil,",depth))
- end
- elseif t == "number" then
- if tk == "number" then -- or find(k,"^%d+$") then
- if hexify then
- handle(format("%s [0x%04X]=0x%04X,",depth,k,v))
- else
- handle(format("%s [%s]=%s,",depth,k,v)) -- %.99g
- end
- elseif tk == "boolean" then
- if hexify then
- handle(format("%s [%s]=0x%04X,",depth,tostring(k),v))
- else
- handle(format("%s [%s]=%s,",depth,tostring(k),v)) -- %.99g
- end
- elseif noquotes and not reserved[k] and find(k,"^%a[%w%_]*$") then
- if hexify then
- handle(format("%s %s=0x%04X,",depth,k,v))
- else
- handle(format("%s %s=%s,",depth,k,v)) -- %.99g
- end
- else
- if hexify then
- handle(format("%s [%q]=0x%04X,",depth,k,v))
- else
- handle(format("%s [%q]=%s,",depth,k,v)) -- %.99g
- end
- end
- elseif t == "string" then
- if reduce and tonumber(v) then
- if tk == "number" then -- or find(k,"^%d+$") then
- if hexify then
- handle(format("%s [0x%04X]=%s,",depth,k,v))
- else
- handle(format("%s [%s]=%s,",depth,k,v))
- end
- elseif tk == "boolean" then
- handle(format("%s [%s]=%s,",depth,tostring(k),v))
- elseif noquotes and not reserved[k] and find(k,"^%a[%w%_]*$") then
- handle(format("%s %s=%s,",depth,k,v))
- else
- handle(format("%s [%q]=%s,",depth,k,v))
- end
- else
- if tk == "number" then -- or find(k,"^%d+$") then
- if hexify then
- handle(format("%s [0x%04X]=%q,",depth,k,v))
- else
- handle(format("%s [%s]=%q,",depth,k,v))
- end
- elseif tk == "boolean" then
- handle(format("%s [%s]=%q,",depth,tostring(k),v))
- elseif noquotes and not reserved[k] and find(k,"^%a[%w%_]*$") then
- handle(format("%s %s=%q,",depth,k,v))
- else
- handle(format("%s [%q]=%q,",depth,k,v))
- end
- end
- elseif t == "table" then
- if not next(v) then
- if tk == "number" then -- or find(k,"^%d+$") then
- if hexify then
- handle(format("%s [0x%04X]={},",depth,k))
- else
- handle(format("%s [%s]={},",depth,k))
- end
- elseif tk == "boolean" then
- handle(format("%s [%s]={},",depth,tostring(k)))
- elseif noquotes and not reserved[k] and find(k,"^%a[%w%_]*$") then
- handle(format("%s %s={},",depth,k))
- else
- handle(format("%s [%q]={},",depth,k))
- end
- elseif inline then
- local st = simple_table(v)
- if st then
- if tk == "number" then -- or find(k,"^%d+$") then
- if hexify then
- handle(format("%s [0x%04X]={ %s },",depth,k,concat(st,", ")))
- else
- handle(format("%s [%s]={ %s },",depth,k,concat(st,", ")))
- end
- elseif tk == "boolean" then -- or find(k,"^%d+$") then
- handle(format("%s [%s]={ %s },",depth,tostring(k),concat(st,", ")))
- elseif noquotes and not reserved[k] and find(k,"^%a[%w%_]*$") then
- handle(format("%s %s={ %s },",depth,k,concat(st,", ")))
- else
- handle(format("%s [%q]={ %s },",depth,k,concat(st,", ")))
- end
- else
- do_serialize(v,k,depth,level+1)
- end
- else
- do_serialize(v,k,depth,level+1)
- end
- elseif t == "boolean" then
- if tk == "number" then -- or find(k,"^%d+$") then
- if hexify then
- handle(format("%s [0x%04X]=%s,",depth,k,tostring(v)))
- else
- handle(format("%s [%s]=%s,",depth,k,tostring(v)))
- end
- elseif tk == "boolean" then -- or find(k,"^%d+$") then
- handle(format("%s [%s]=%s,",depth,tostring(k),tostring(v)))
- elseif noquotes and not reserved[k] and find(k,"^%a[%w%_]*$") then
- handle(format("%s %s=%s,",depth,k,tostring(v)))
- else
- handle(format("%s [%q]=%s,",depth,k,tostring(v)))
- end
- elseif t == "function" then
- if functions then
- local f = getinfo(v).what == "C" and dump(dummy) or dump(v)
- -- local f = getinfo(v).what == "C" and dump(function(...) return v(...) end) or dump(v)
- if tk == "number" then -- or find(k,"^%d+$") then
- if hexify then
- handle(format("%s [0x%04X]=loadstring(%q),",depth,k,f))
- else
- handle(format("%s [%s]=loadstring(%q),",depth,k,f))
- end
- elseif tk == "boolean" then
- handle(format("%s [%s]=loadstring(%q),",depth,tostring(k),f))
- elseif noquotes and not reserved[k] and find(k,"^%a[%w%_]*$") then
- handle(format("%s %s=loadstring(%q),",depth,k,f))
- else
- handle(format("%s [%q]=loadstring(%q),",depth,k,f))
- end
- end
- else
- if tk == "number" then -- or find(k,"^%d+$") then
- if hexify then
- handle(format("%s [0x%04X]=%q,",depth,k,tostring(v)))
- else
- handle(format("%s [%s]=%q,",depth,k,tostring(v)))
- end
- elseif tk == "boolean" then -- or find(k,"^%d+$") then
- handle(format("%s [%s]=%q,",depth,tostring(k),tostring(v)))
- elseif noquotes and not reserved[k] and find(k,"^%a[%w%_]*$") then
- handle(format("%s %s=%q,",depth,k,tostring(v)))
- else
- handle(format("%s [%q]=%q,",depth,k,tostring(v)))
- end
- end
- end
- end
- if level > 0 then
- handle(format("%s},",depth))
- end
-end
-
--- replacing handle by a direct t[#t+1] = ... (plus test) is not much
--- faster (0.03 on 1.00 for zapfino.tma)
-
-local function serialize(_handle,root,name,specification) -- handle wins
- local tname = type(name)
- if type(specification) == "table" then
- noquotes = specification.noquotes
- hexify = specification.hexify
- handle = _handle or specification.handle or print
- reduce = specification.reduce or false
- functions = specification.functions
- compact = specification.compact
- inline = specification.inline and compact
- if functions == nil then
- functions = true
- end
- if compact == nil then
- compact = true
- end
- if inline == nil then
- inline = compact
- end
+ if level>0 then
+ depth=depth.." "
+ if indexed then
+ handle(format("%s{",depth))
else
- noquotes = false
- hexify = false
- handle = _handle or print
- reduce = false
- compact = true
- inline = true
- functions = true
- end
- if tname == "string" then
- if name == "return" then
- handle("return {")
- else
- handle(name .. "={")
- end
- elseif tname == "number" then
+ local tn=type(name)
+ if tn=="number" then
if hexify then
- handle(format("[0x%04X]={",name))
+ handle(format("%s[0x%04X]={",depth,name))
else
- handle("[" .. name .. "]={")
+ handle(format("%s[%s]={",depth,name))
end
- elseif tname == "boolean" then
- if name then
- handle("return {")
+ elseif tn=="string" then
+ if noquotes and not reserved[name] and lpegmatch(propername,name) then
+ handle(format("%s%s={",depth,name))
else
- handle("{")
- end
- else
- handle("t={")
- end
- if root then
- -- The dummy access will initialize a table that has a delayed initialization
- -- using a metatable. (maybe explicitly test for metatable)
- if getmetatable(root) then -- todo: make this an option, maybe even per subtable
- local dummy = root._w_h_a_t_e_v_e_r_
- root._w_h_a_t_e_v_e_r_ = nil
- end
- -- Let's forget about empty tables.
- if next(root) then
- do_serialize(root,name,"",0)
- end
- end
- handle("}")
-end
-
-
-function table.serialize(root,name,specification)
- local t, n = { }, 0
- local function flush(s)
- n = n + 1
- t[n] = s
- end
- serialize(flush,root,name,specification)
- return concat(t,"\n")
-end
-
-table.tohandle = serialize
-
--- sometimes tables are real use (zapfino extra pro is some 85M) in which
--- case a stepwise serialization is nice; actually, we could consider:
---
--- for line in table.serializer(root,name,reduce,noquotes) do
--- ...(line)
--- end
---
--- so this is on the todo list
-
-local maxtab = 2*1024
-
-function table.tofile(filename,root,name,specification)
- local f = io.open(filename,'w')
- if f then
- if maxtab > 1 then
- local t, n = { }, 0
- local function flush(s)
- n = n + 1
- t[n] = s
- if n > maxtab then
- f:write(concat(t,"\n"),"\n") -- hm, write(sometable) should be nice
- t, n = { }, 0 -- we could recycle t if needed
- end
- end
- serialize(flush,root,name,specification)
- f:write(concat(t,"\n"),"\n")
- else
- local function flush(s)
- f:write(s,"\n")
- end
- serialize(flush,root,name,specification)
- end
- f:close()
- io.flush()
- end
-end
-
-local function flattened(t,f,depth)
- if f == nil then
- f = { }
- depth = 0xFFFF
- elseif tonumber(f) then
- -- assume then only two arguments are given
- depth = f
- f = { }
- elseif not depth then
- depth = 0xFFFF
- end
- for k, v in next, t do
- if type(k) ~= "number" then
- if depth > 0 and type(v) == "table" then
- flattened(v,f,depth-1)
+ handle(format("%s[%q]={",depth,name))
+ end
+ elseif tn=="boolean" then
+ handle(format("%s[%s]={",depth,tostring(name)))
+ else
+ handle(format("%s{",depth))
+ end
+ end
+ end
+ if root and next(root) then
+ local first,last=nil,0
+ if compact then
+ last=#root
+ for k=1,last do
+ if root[k]==nil then
+ last=k-1
+ break
+ end
+ end
+ if last>0 then
+ first=1
+ end
+ end
+ local sk=sortedkeys(root)
+ for i=1,#sk do
+ local k=sk[i]
+ local v=root[k]
+ local t,tk=type(v),type(k)
+ if compact and first and tk=="number" and k>=first and k<=last then
+ if t=="number" then
+ if hexify then
+ handle(format("%s 0x%04X,",depth,v))
+ else
+ handle(format("%s %s,",depth,v))
+ end
+ elseif t=="string" then
+ if reduce and tonumber(v) then
+ handle(format("%s %s,",depth,v))
+ else
+ handle(format("%s %q,",depth,v))
+ end
+ elseif t=="table" then
+ if not next(v) then
+ handle(format("%s {},",depth))
+ elseif inline then
+ local st=simple_table(v)
+ if st then
+ handle(format("%s { %s },",depth,concat(st,", ")))
else
- f[k] = v
- end
- end
- end
- local n = #f
- for k=1,#t do
- local v = t[k]
- if depth > 0 and type(v) == "table" then
- flattened(v,f,depth-1)
- n = #f
+ do_serialize(v,k,depth,level+1,true)
+ end
+ else
+ do_serialize(v,k,depth,level+1,true)
+ end
+ elseif t=="boolean" then
+ handle(format("%s %s,",depth,tostring(v)))
+ elseif t=="function" then
+ if functions then
+ handle(format('%s load(%q),',depth,dump(v)))
+ else
+ handle(format('%s "function",',depth))
+ end
else
- n = n + 1
- f[n] = v
- end
- end
- return f
-end
-
-table.flattened = flattened
-
-local function unnest(t,f) -- only used in mk, for old times sake
- if not f then -- and only relevant for token lists
- f = { }
- end
- for i=1,#t do
- local v = t[i]
- if type(v) == "table" then
- if type(v[1]) == "table" then
- unnest(v,f)
+ handle(format("%s %q,",depth,tostring(v)))
+ end
+ elseif k=="__p__" then
+ if false then
+ handle(format("%s __p__=nil,",depth))
+ end
+ elseif t=="number" then
+ if tk=="number" then
+ if hexify then
+ handle(format("%s [0x%04X]=0x%04X,",depth,k,v))
+ else
+ handle(format("%s [%s]=%s,",depth,k,v))
+ end
+ elseif tk=="boolean" then
+ if hexify then
+ handle(format("%s [%s]=0x%04X,",depth,tostring(k),v))
+ else
+ handle(format("%s [%s]=%s,",depth,tostring(k),v))
+ end
+ elseif noquotes and not reserved[k] and lpegmatch(propername,k) then
+ if hexify then
+ handle(format("%s %s=0x%04X,",depth,k,v))
+ else
+ handle(format("%s %s=%s,",depth,k,v))
+ end
+ else
+ if hexify then
+ handle(format("%s [%q]=0x%04X,",depth,k,v))
+ else
+ handle(format("%s [%q]=%s,",depth,k,v))
+ end
+ end
+ elseif t=="string" then
+ if reduce and tonumber(v) then
+ if tk=="number" then
+ if hexify then
+ handle(format("%s [0x%04X]=%s,",depth,k,v))
else
- f[#f+1] = v
- end
+ handle(format("%s [%s]=%s,",depth,k,v))
+ end
+ elseif tk=="boolean" then
+ handle(format("%s [%s]=%s,",depth,tostring(k),v))
+ elseif noquotes and not reserved[k] and lpegmatch(propername,k) then
+ handle(format("%s %s=%s,",depth,k,v))
+ else
+ handle(format("%s [%q]=%s,",depth,k,v))
+ end
else
- f[#f+1] = v
- end
- end
- return f
-end
-
-function table.unnest(t) -- bad name
- return unnest(t)
-end
-
-local function are_equal(a,b,n,m) -- indexed
- if a and b and #a == #b then
- n = n or 1
- m = m or #a
- for i=n,m do
- local ai, bi = a[i], b[i]
- if ai==bi then
- -- same
- elseif type(ai)=="table" and type(bi)=="table" then
- if not are_equal(ai,bi) then
- return false
- end
+ if tk=="number" then
+ if hexify then
+ handle(format("%s [0x%04X]=%q,",depth,k,v))
else
- return false
- end
- end
- return true
- else
- return false
- end
-end
-
-local function identical(a,b) -- assumes same structure
- for ka, va in next, a do
- local vb = b[ka]
- if va == vb then
- -- same
- elseif type(va) == "table" and type(vb) == "table" then
- if not identical(va,vb) then
- return false
+ handle(format("%s [%s]=%q,",depth,k,v))
+ end
+ elseif tk=="boolean" then
+ handle(format("%s [%s]=%q,",depth,tostring(k),v))
+ elseif noquotes and not reserved[k] and lpegmatch(propername,k) then
+ handle(format("%s %s=%q,",depth,k,v))
+ else
+ handle(format("%s [%q]=%q,",depth,k,v))
+ end
+ end
+ elseif t=="table" then
+ if not next(v) then
+ if tk=="number" then
+ if hexify then
+ handle(format("%s [0x%04X]={},",depth,k))
+ else
+ handle(format("%s [%s]={},",depth,k))
+ end
+ elseif tk=="boolean" then
+ handle(format("%s [%s]={},",depth,tostring(k)))
+ elseif noquotes and not reserved[k] and lpegmatch(propername,k) then
+ handle(format("%s %s={},",depth,k))
+ else
+ handle(format("%s [%q]={},",depth,k))
+ end
+ elseif inline then
+ local st=simple_table(v)
+ if st then
+ if tk=="number" then
+ if hexify then
+ handle(format("%s [0x%04X]={ %s },",depth,k,concat(st,", ")))
+ else
+ handle(format("%s [%s]={ %s },",depth,k,concat(st,", ")))
+ end
+ elseif tk=="boolean" then
+ handle(format("%s [%s]={ %s },",depth,tostring(k),concat(st,", ")))
+ elseif noquotes and not reserved[k] and lpegmatch(propername,k) then
+ handle(format("%s %s={ %s },",depth,k,concat(st,", ")))
+ else
+ handle(format("%s [%q]={ %s },",depth,k,concat(st,", ")))
end
+ else
+ do_serialize(v,k,depth,level+1)
+ end
else
- return false
- end
- end
- return true
-end
-
-table.identical = identical
-table.are_equal = are_equal
-
--- maybe also make a combined one
-
-function table.compact(t)
- if t then
- for k,v in next, t do
- if not next(v) then
- t[k] = nil
- end
- end
- end
-end
-
-function table.contains(t, v)
- if t then
- for i=1, #t do
- if t[i] == v then
- return i
- end
- end
- end
- return false
-end
-
-function table.count(t)
- local n = 0
- for k, v in next, t do
- n = n + 1
- end
- return n
-end
-
-function table.swapped(t,s) -- hash
- local n = { }
- if s then
- for k, v in next, s do
- n[k] = v
- end
- end
- for k, v in next, t do
- n[v] = k
- end
- return n
-end
-
-function table.reversed(t)
- if t then
- local tt, tn = { }, #t
- if tn > 0 then
- local ttn = 0
- for i=tn,1,-1 do
- ttn = ttn + 1
- tt[ttn] = t[i]
- end
- end
- return tt
- end
-end
-
-function table.sequenced(t,sep,simple) -- hash only
- local s, n = { }, 0
- for k, v in sortedhash(t) do
- if simple then
- if v == true then
- n = n + 1
- s[n] = k
- elseif v and v~= "" then
- n = n + 1
- s[n] = k .. "=" .. tostring(v)
- end
+ do_serialize(v,k,depth,level+1)
+ end
+ elseif t=="boolean" then
+ if tk=="number" then
+ if hexify then
+ handle(format("%s [0x%04X]=%s,",depth,k,tostring(v)))
+ else
+ handle(format("%s [%s]=%s,",depth,k,tostring(v)))
+ end
+ elseif tk=="boolean" then
+ handle(format("%s [%s]=%s,",depth,tostring(k),tostring(v)))
+ elseif noquotes and not reserved[k] and lpegmatch(propername,k) then
+ handle(format("%s %s=%s,",depth,k,tostring(v)))
else
- n = n + 1
- s[n] = k .. "=" .. tostring(v)
+ handle(format("%s [%q]=%s,",depth,k,tostring(v)))
end
- end
- return concat(s, sep or " | ")
-end
-
-function table.print(t,...)
- if type(t) ~= "table" then
- print(tostring(t))
- else
- table.tohandle(print,t,...)
- end
-end
-
--- -- -- obsolete but we keep them for a while and might comment them later -- -- --
-
--- roughly: copy-loop : unpack : sub == 0.9 : 0.4 : 0.45 (so in critical apps, use unpack)
-
-function table.sub(t,i,j)
- return { unpack(t,i,j) }
-end
-
--- slower than #t on indexed tables (#t only returns the size of the numerically indexed slice)
-
-function table.is_empty(t)
- return not t or not next(t)
-end
-
-function table.has_one_entry(t)
- return t and not next(t,next(t))
-end
-
--- new
-
-function table.loweredkeys(t) -- maybe utf
- local l = { }
- for k, v in next, t do
- l[lower(k)] = v
- end
- return l
-end
-
--- new, might move (maybe duplicate)
-
-function table.unique(old)
- local hash = { }
- local new = { }
- local n = 0
- for i=1,#old do
- local oi = old[i]
- if not hash[oi] then
- n = n + 1
- new[n] = oi
- hash[oi] = true
- end
- end
- return new
-end
-
--- function table.sorted(t,...)
--- table.sort(t,...)
--- return t -- still sorts in-place
--- end
-
-
-end -- of closure
-
-do -- create closure to overcome 200 locals limit
-
-if not modules then modules = { } end modules ['l-lpeg'] = {
- version = 1.001,
- comment = "companion to luat-lib.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
-
--- a new lpeg fails on a #(1-P(":")) test and really needs a + P(-1)
-
-local lpeg = require("lpeg")
-
--- tracing (only used when we encounter a problem in integration of lpeg in luatex)
-
-local report = texio and texio.write_nl or print
-
--- local lpmatch = lpeg.match
--- local lpprint = lpeg.print
--- local lpp = lpeg.P
--- local lpr = lpeg.R
--- local lps = lpeg.S
--- local lpc = lpeg.C
--- local lpb = lpeg.B
--- local lpv = lpeg.V
--- local lpcf = lpeg.Cf
--- local lpcb = lpeg.Cb
--- local lpcg = lpeg.Cg
--- local lpct = lpeg.Ct
--- local lpcs = lpeg.Cs
--- local lpcc = lpeg.Cc
--- local lpcmt = lpeg.Cmt
--- local lpcarg = lpeg.Carg
-
--- function lpeg.match(l,...) report("LPEG MATCH") lpprint(l) return lpmatch(l,...) end
-
--- function lpeg.P (l) local p = lpp (l) report("LPEG P =") lpprint(l) return p end
--- function lpeg.R (l) local p = lpr (l) report("LPEG R =") lpprint(l) return p end
--- function lpeg.S (l) local p = lps (l) report("LPEG S =") lpprint(l) return p end
--- function lpeg.C (l) local p = lpc (l) report("LPEG C =") lpprint(l) return p end
--- function lpeg.B (l) local p = lpb (l) report("LPEG B =") lpprint(l) return p end
--- function lpeg.V (l) local p = lpv (l) report("LPEG V =") lpprint(l) return p end
--- function lpeg.Cf (l) local p = lpcf (l) report("LPEG Cf =") lpprint(l) return p end
--- function lpeg.Cb (l) local p = lpcb (l) report("LPEG Cb =") lpprint(l) return p end
--- function lpeg.Cg (l) local p = lpcg (l) report("LPEG Cg =") lpprint(l) return p end
--- function lpeg.Ct (l) local p = lpct (l) report("LPEG Ct =") lpprint(l) return p end
--- function lpeg.Cs (l) local p = lpcs (l) report("LPEG Cs =") lpprint(l) return p end
--- function lpeg.Cc (l) local p = lpcc (l) report("LPEG Cc =") lpprint(l) return p end
--- function lpeg.Cmt (l) local p = lpcmt (l) report("LPEG Cmt =") lpprint(l) return p end
--- function lpeg.Carg (l) local p = lpcarg(l) report("LPEG Carg =") lpprint(l) return p end
-
-local type = type
-local byte, char, gmatch = string.byte, string.char, string.gmatch
-
--- Beware, we predefine a bunch of patterns here and one reason for doing so
--- is that we get consistent behaviour in some of the visualizers.
-
-lpeg.patterns = lpeg.patterns or { } -- so that we can share
-local patterns = lpeg.patterns
-
-local P, R, S, V, match = lpeg.P, lpeg.R, lpeg.S, lpeg.V, lpeg.match
-local Ct, C, Cs, Cc = lpeg.Ct, lpeg.C, lpeg.Cs, lpeg.Cc
-local lpegtype = lpeg.type
-
-local utfcharacters = string.utfcharacters
-local utfgmatch = unicode and unicode.utf8.gmatch
-
-local anything = P(1)
-local endofstring = P(-1)
-local alwaysmatched = P(true)
-
-patterns.anything = anything
-patterns.endofstring = endofstring
-patterns.beginofstring = alwaysmatched
-patterns.alwaysmatched = alwaysmatched
-
-local digit, sign = R('09'), S('+-')
-local cr, lf, crlf = P("\r"), P("\n"), P("\r\n")
-local newline = crlf + S("\r\n") -- cr + lf
-local escaped = P("\\") * anything
-local squote = P("'")
-local dquote = P('"')
-local space = P(" ")
-
-local utfbom_32_be = P('\000\000\254\255')
-local utfbom_32_le = P('\255\254\000\000')
-local utfbom_16_be = P('\255\254')
-local utfbom_16_le = P('\254\255')
-local utfbom_8 = P('\239\187\191')
-local utfbom = utfbom_32_be + utfbom_32_le
- + utfbom_16_be + utfbom_16_le
- + utfbom_8
-local utftype = utfbom_32_be / "utf-32-be" + utfbom_32_le / "utf-32-le"
- + utfbom_16_be / "utf-16-be" + utfbom_16_le / "utf-16-le"
- + utfbom_8 / "utf-8" + alwaysmatched / "unknown"
-
-local utf8next = R("\128\191")
-
-patterns.utf8one = R("\000\127")
-patterns.utf8two = R("\194\223") * utf8next
-patterns.utf8three = R("\224\239") * utf8next * utf8next
-patterns.utf8four = R("\240\244") * utf8next * utf8next * utf8next
-patterns.utfbom = utfbom
-patterns.utftype = utftype
-
-local utf8char = patterns.utf8one + patterns.utf8two + patterns.utf8three + patterns.utf8four
-local validutf8char = utf8char^0 * endofstring * Cc(true) + Cc(false)
-
-patterns.utf8 = utf8char
-patterns.utf8char = utf8char
-patterns.validutf8 = validutf8char
-patterns.validutf8char = validutf8char
-
-patterns.digit = digit
-patterns.sign = sign
-patterns.cardinal = sign^0 * digit^1
-patterns.integer = sign^0 * digit^1
-patterns.float = sign^0 * digit^0 * P('.') * digit^1
-patterns.cfloat = sign^0 * digit^0 * P(',') * digit^1
-patterns.number = patterns.float + patterns.integer
-patterns.cnumber = patterns.cfloat + patterns.integer
-patterns.oct = P("0") * R("07")^1
-patterns.octal = patterns.oct
-patterns.HEX = P("0x") * R("09","AF")^1
-patterns.hex = P("0x") * R("09","af")^1
-patterns.hexadecimal = P("0x") * R("09","AF","af")^1
-patterns.lowercase = R("az")
-patterns.uppercase = R("AZ")
-patterns.letter = patterns.lowercase + patterns.uppercase
-patterns.space = space
-patterns.tab = P("\t")
-patterns.spaceortab = patterns.space + patterns.tab
-patterns.eol = S("\n\r")
-patterns.spacer = S(" \t\f\v") -- + char(0xc2, 0xa0) if we want utf (cf mail roberto)
-patterns.newline = newline
-patterns.emptyline = newline^1
-patterns.nonspacer = 1 - patterns.spacer
-patterns.whitespace = patterns.eol + patterns.spacer
-patterns.nonwhitespace = 1 - patterns.whitespace
-patterns.equal = P("=")
-patterns.comma = P(",")
-patterns.commaspacer = P(",") * patterns.spacer^0
-patterns.period = P(".")
-patterns.colon = P(":")
-patterns.semicolon = P(";")
-patterns.underscore = P("_")
-patterns.escaped = escaped
-patterns.squote = squote
-patterns.dquote = dquote
-patterns.nosquote = (escaped + (1-squote))^0
-patterns.nodquote = (escaped + (1-dquote))^0
-patterns.unsingle = (squote/"") * patterns.nosquote * (squote/"")
-patterns.undouble = (dquote/"") * patterns.nodquote * (dquote/"")
-patterns.unquoted = patterns.undouble + patterns.unsingle -- more often undouble
-patterns.unspacer = ((patterns.spacer^1)/"")^0
-
-patterns.somecontent = (anything - newline - space)^1 -- (utf8char - newline - space)^1
-patterns.beginline = #(1-newline)
-
--- print(string.unquoted("test"))
--- print(string.unquoted([["t\"est"]]))
--- print(string.unquoted([["t\"est"x]]))
--- print(string.unquoted("\'test\'"))
--- print(string.unquoted('"test"'))
--- print(string.unquoted('"test"'))
-
-function lpeg.anywhere(pattern) --slightly adapted from website
- return P { P(pattern) + 1 * V(1) } -- why so complex?
-end
-
-function lpeg.splitter(pattern, action)
- return (((1-P(pattern))^1)/action+1)^0
-end
-
-function lpeg.tsplitter(pattern, action)
- return Ct((((1-P(pattern))^1)/action+1)^0)
-end
-
--- probleem: separator can be lpeg and that does not hash too well, but
--- it's quite okay as the key is then not garbage collected
-
-local splitters_s, splitters_m, splitters_t = { }, { }, { }
-
-local function splitat(separator,single)
- local splitter = (single and splitters_s[separator]) or splitters_m[separator]
- if not splitter then
- separator = P(separator)
- local other = C((1 - separator)^0)
- if single then
- local any = anything
- splitter = other * (separator * C(any^0) + "") -- ?
- splitters_s[separator] = splitter
+ elseif t=="function" then
+ if functions then
+ local f=getinfo(v).what=="C" and dump(dummy) or dump(v)
+ if tk=="number" then
+ if hexify then
+ handle(format("%s [0x%04X]=load(%q),",depth,k,f))
+ else
+ handle(format("%s [%s]=load(%q),",depth,k,f))
+ end
+ elseif tk=="boolean" then
+ handle(format("%s [%s]=load(%q),",depth,tostring(k),f))
+ elseif noquotes and not reserved[k] and lpegmatch(propername,k) then
+ handle(format("%s %s=load(%q),",depth,k,f))
+ else
+ handle(format("%s [%q]=load(%q),",depth,k,f))
+ end
+ end
+ else
+ if tk=="number" then
+ if hexify then
+ handle(format("%s [0x%04X]=%q,",depth,k,tostring(v)))
+ else
+ handle(format("%s [%s]=%q,",depth,k,tostring(v)))
+ end
+ elseif tk=="boolean" then
+ handle(format("%s [%s]=%q,",depth,tostring(k),tostring(v)))
+ elseif noquotes and not reserved[k] and lpegmatch(propername,k) then
+ handle(format("%s %s=%q,",depth,k,tostring(v)))
else
- splitter = other * (separator * other)^0
- splitters_m[separator] = splitter
- end
- end
- return splitter
-end
-
-local function tsplitat(separator)
- local splitter = splitters_t[separator]
- if not splitter then
- splitter = Ct(splitat(separator))
- splitters_t[separator] = splitter
- end
- return splitter
-end
-
-lpeg.splitat = splitat
-lpeg.tsplitat = tsplitat
-
-function string.splitup(str,separator)
- if not separator then
- separator = ","
+ handle(format("%s [%q]=%q,",depth,k,tostring(v)))
+ end
+ end
+ end
+ end
+ if level>0 then
+ handle(format("%s},",depth))
+ end
+end
+local function serialize(_handle,root,name,specification)
+ local tname=type(name)
+ if type(specification)=="table" then
+ noquotes=specification.noquotes
+ hexify=specification.hexify
+ handle=_handle or specification.handle or print
+ reduce=specification.reduce or false
+ functions=specification.functions
+ compact=specification.compact
+ inline=specification.inline and compact
+ if functions==nil then
+ functions=true
+ end
+ if compact==nil then
+ compact=true
+ end
+ if inline==nil then
+ inline=compact
+ end
+ else
+ noquotes=false
+ hexify=false
+ handle=_handle or print
+ reduce=false
+ compact=true
+ inline=true
+ functions=true
+ end
+ if tname=="string" then
+ if name=="return" then
+ handle("return {")
+ else
+ handle(name.."={")
end
- return match(splitters_m[separator] or splitat(separator),str)
-end
-
-
-local cache = { }
-
-function lpeg.split(separator,str)
- local c = cache[separator]
- if not c then
- c = tsplitat(separator)
- cache[separator] = c
+ elseif tname=="number" then
+ if hexify then
+ handle(format("[0x%04X]={",name))
+ else
+ handle("["..name.."]={")
end
- return match(c,str)
-end
-
-function string.split(str,separator)
- local c = cache[separator]
- if not c then
- c = tsplitat(separator)
- cache[separator] = c
+ elseif tname=="boolean" then
+ if name then
+ handle("return {")
+ else
+ handle("{")
end
- return match(c,str)
-end
-
-local spacing = patterns.spacer^0 * newline -- sort of strip
-local empty = spacing * Cc("")
-local nonempty = Cs((1-spacing)^1) * spacing^-1
-local content = (empty + nonempty)^1
-
-patterns.textline = content
-
-
-local linesplitter = tsplitat(newline)
-
-patterns.linesplitter = linesplitter
-
-function string.splitlines(str)
- return match(linesplitter,str)
-end
-
-local utflinesplitter = utfbom^-1 * tsplitat(newline)
-
-patterns.utflinesplitter = utflinesplitter
-
-function string.utfsplitlines(str)
- return match(utflinesplitter,str or "")
-end
-
-
-local cache = { }
-
-function lpeg.checkedsplit(separator,str)
- local c = cache[separator]
- if not c then
- separator = P(separator)
- local other = C((1 - separator)^1)
- c = Ct(separator^0 * other * (separator^1 * other)^0)
- cache[separator] = c
+ else
+ handle("t={")
+ end
+ if root then
+ if getmetatable(root) then
+ local dummy=root._w_h_a_t_e_v_e_r_
+ root._w_h_a_t_e_v_e_r_=nil
end
- return match(c,str)
-end
-
-function string.checkedsplit(str,separator)
- local c = cache[separator]
- if not c then
- separator = P(separator)
- local other = C((1 - separator)^1)
- c = Ct(separator^0 * other * (separator^1 * other)^0)
- cache[separator] = c
+ if next(root) then
+ do_serialize(root,name,"",0)
end
- return match(c,str)
+ end
+ handle("}")
end
-
-
-local function f2(s) local c1, c2 = byte(s,1,2) return c1 * 64 + c2 - 12416 end
-local function f3(s) local c1, c2, c3 = byte(s,1,3) return (c1 * 64 + c2) * 64 + c3 - 925824 end
-local function f4(s) local c1, c2, c3, c4 = byte(s,1,4) return ((c1 * 64 + c2) * 64 + c3) * 64 + c4 - 63447168 end
-
-local utf8byte = patterns.utf8one/byte + patterns.utf8two/f2 + patterns.utf8three/f3 + patterns.utf8four/f4
-
-patterns.utf8byte = utf8byte
-
-
-
-local cache = { }
-
-function lpeg.stripper(str)
- if type(str) == "string" then
- local s = cache[str]
- if not s then
- s = Cs(((S(str)^1)/"" + 1)^0)
- cache[str] = s
- end
- return s
+function table.serialize(root,name,specification)
+ local t,n={},0
+ local function flush(s)
+ n=n+1
+ t[n]=s
+ end
+ serialize(flush,root,name,specification)
+ return concat(t,"\n")
+end
+table.tohandle=serialize
+local maxtab=2*1024
+function table.tofile(filename,root,name,specification)
+ local f=io.open(filename,'w')
+ if f then
+ if maxtab>1 then
+ local t,n={},0
+ local function flush(s)
+ n=n+1
+ t[n]=s
+ if n>maxtab then
+ f:write(concat(t,"\n"),"\n")
+ t,n={},0
+ end
+ end
+ serialize(flush,root,name,specification)
+ f:write(concat(t,"\n"),"\n")
else
- return Cs(((str^1)/"" + 1)^0)
+ local function flush(s)
+ f:write(s,"\n")
+ end
+ serialize(flush,root,name,specification)
end
+ f:close()
+ io.flush()
+ end
end
-
-local cache = { }
-
-function lpeg.keeper(str)
- if type(str) == "string" then
- local s = cache[str]
- if not s then
- s = Cs((((1-S(str))^1)/"" + 1)^0)
- cache[str] = s
- end
- return s
+local function flattened(t,f,depth)
+ if f==nil then
+ f={}
+ depth=0xFFFF
+ elseif tonumber(f) then
+ depth=f
+ f={}
+ elseif not depth then
+ depth=0xFFFF
+ end
+ for k,v in next,t do
+ if type(k)~="number" then
+ if depth>0 and type(v)=="table" then
+ flattened(v,f,depth-1)
+ else
+ f[k]=v
+ end
+ end
+ end
+ local n=#f
+ for k=1,#t do
+ local v=t[k]
+ if depth>0 and type(v)=="table" then
+ flattened(v,f,depth-1)
+ n=#f
else
- return Cs((((1-str)^1)/"" + 1)^0)
+ n=n+1
+ f[n]=v
+ end
+ end
+ return f
+end
+table.flattened=flattened
+local function unnest(t,f)
+ if not f then
+ f={}
+ end
+ for i=1,#t do
+ local v=t[i]
+ if type(v)=="table" then
+ if type(v[1])=="table" then
+ unnest(v,f)
+ else
+ f[#f+1]=v
+ end
+ else
+ f[#f+1]=v
+ end
+ end
+ return f
+end
+function table.unnest(t)
+ return unnest(t)
+end
+local function are_equal(a,b,n,m)
+ if a and b and #a==#b then
+ n=n or 1
+ m=m or #a
+ for i=n,m do
+ local ai,bi=a[i],b[i]
+ if ai==bi then
+ elseif type(ai)=="table" and type(bi)=="table" then
+ if not are_equal(ai,bi) then
+ return false
+ end
+ else
+ return false
+ end
end
-end
-
-function lpeg.frontstripper(str) -- or pattern (yet undocumented)
- return (P(str) + P(true)) * Cs(P(1)^0)
-end
-
-function lpeg.endstripper(str) -- or pattern (yet undocumented)
- return Cs((1 - P(str) * P(-1))^0)
-end
-
--- Just for fun I looked at the used bytecode and
--- p = (p and p + pp) or pp gets one more (testset).
-
-function lpeg.replacer(one,two)
- if type(one) == "table" then
- local no = #one
- if no > 0 then
- local p
- for i=1,no do
- local o = one[i]
- local pp = P(o[1]) / o[2]
- if p then
- p = p + pp
- else
- p = pp
- end
- end
- return Cs((p + 1)^0)
- end
+ return true
+ else
+ return false
+ end
+end
+local function identical(a,b)
+ for ka,va in next,a do
+ local vb=b[ka]
+ if va==vb then
+ elseif type(va)=="table" and type(vb)=="table" then
+ if not identical(va,vb) then
+ return false
+ end
else
- two = two or ""
- return Cs((P(one)/two + 1)^0)
+ return false
end
+ end
+ return true
end
-
-local splitters_f, splitters_s = { }, { }
-
-function lpeg.firstofsplit(separator) -- always return value
- local splitter = splitters_f[separator]
- if not splitter then
- separator = P(separator)
- splitter = C((1 - separator)^0)
- splitters_f[separator] = splitter
+table.identical=identical
+table.are_equal=are_equal
+function table.compact(t)
+ if t then
+ for k,v in next,t do
+ if not next(v) then
+ t[k]=nil
+ end
end
- return splitter
+ end
end
-
-function lpeg.secondofsplit(separator) -- nil if not split
- local splitter = splitters_s[separator]
- if not splitter then
- separator = P(separator)
- splitter = (1 - separator)^0 * separator * C(anything^0)
- splitters_s[separator] = splitter
+function table.contains(t,v)
+ if t then
+ for i=1,#t do
+ if t[i]==v then
+ return i
+ end
end
- return splitter
+ end
+ return false
end
-
-function lpeg.balancer(left,right)
- left, right = P(left), P(right)
- return P { left * ((1 - left - right) + V(1))^0 * right }
-end
-
-
-
-local nany = utf8char/""
-
-function lpeg.counter(pattern)
- pattern = Cs((P(pattern)/" " + nany)^0)
- return function(str)
- return #match(pattern,str)
- end
+function table.count(t)
+ local n=0
+ for k,v in next,t do
+ n=n+1
+ end
+ return n
+end
+function table.swapped(t,s)
+ local n={}
+ if s then
+ for k,v in next,s do
+ n[k]=v
+ end
+ end
+ for k,v in next,t do
+ n[v]=k
+ end
+ return n
+end
+function table.mirrored(t)
+ local n={}
+ for k,v in next,t do
+ n[v]=k
+ n[k]=v
+ end
+ return n
end
-
-if utfgmatch then
-
- function lpeg.count(str,what) -- replaces string.count
- if type(what) == "string" then
- local n = 0
- for _ in utfgmatch(str,what) do
- n = n + 1
- end
- return n
- else -- 4 times slower but still faster than / function
- return #match(Cs((P(what)/" " + nany)^0),str)
- end
- end
-
-else
-
- local cache = { }
-
- function lpeg.count(str,what) -- replaces string.count
- if type(what) == "string" then
- local p = cache[what]
- if not p then
- p = Cs((P(what)/" " + nany)^0)
- cache[p] = p
- end
- return #match(p,str)
- else -- 4 times slower but still faster than / function
- return #match(Cs((P(what)/" " + nany)^0),str)
- end
+function table.reversed(t)
+ if t then
+ local tt,tn={},#t
+ if tn>0 then
+ local ttn=0
+ for i=tn,1,-1 do
+ ttn=ttn+1
+ tt[ttn]=t[i]
+ end
+ end
+ return tt
+ end
+end
+function table.reverse(t)
+ if t then
+ local n=#t
+ for i=1,floor(n/2) do
+ local j=n-i+1
+ t[i],t[j]=t[j],t[i]
end
-
+ return t
+ end
end
-
-local patterns_escapes = { -- also defines in l-string
- ["%"] = "%%",
- ["."] = "%.",
- ["+"] = "%+", ["-"] = "%-", ["*"] = "%*",
- ["["] = "%[", ["]"] = "%]",
- ["("] = "%)", [")"] = "%)",
- -- ["{"] = "%{", ["}"] = "%}"
- -- ["^"] = "%^", ["$"] = "%$",
-}
-
-local simple_escapes = { -- also defines in l-string
- ["-"] = "%-",
- ["."] = "%.",
- ["?"] = ".",
- ["*"] = ".*",
-}
-
-local p = Cs((S("-.+*%()[]") / patterns_escapes + anything)^0)
-local s = Cs((S("-.+*%()[]") / simple_escapes + anything)^0)
-
-function string.escapedpattern(str,simple)
- return match(simple and s or p,str)
+function table.sequenced(t,sep,simple)
+ if not t then
+ return ""
+ end
+ local n=#t
+ local s={}
+ if n>0 then
+ for i=1,n do
+ s[i]=tostring(t[i])
+ end
+ else
+ n=0
+ for k,v in sortedhash(t) do
+ if simple then
+ if v==true then
+ n=n+1
+ s[n]=k
+ elseif v and v~="" then
+ n=n+1
+ s[n]=k.."="..tostring(v)
+ end
+ else
+ n=n+1
+ s[n]=k.."="..tostring(v)
+ end
+ end
+ end
+ return concat(s,sep or " | ")
end
-
--- utf extensies
-
-lpeg.UP = lpeg.P
-
-if utfcharacters then
-
- function lpeg.US(str)
- local p
- for uc in utfcharacters(str) do
- if p then
- p = p + P(uc)
- else
- p = P(uc)
- end
- end
- return p
- end
-
-
-elseif utfgmatch then
-
- function lpeg.US(str)
- local p
- for uc in utfgmatch(str,".") do
- if p then
- p = p + P(uc)
- else
- p = P(uc)
- end
- end
- return p
- end
-
-else
-
- function lpeg.US(str)
- local p
- local f = function(uc)
- if p then
- p = p + P(uc)
- else
- p = P(uc)
- end
- end
- match((utf8char/f)^0,str)
- return p
- end
-
+function table.print(t,...)
+ if type(t)~="table" then
+ print(tostring(t))
+ else
+ serialize(print,t,...)
+ end
end
-
-local range = Cs(utf8byte) * (Cs(utf8byte) + Cc(false))
-
-local utfchar = unicode and unicode.utf8 and unicode.utf8.char
-
-function lpeg.UR(str,more)
- local first, last
- if type(str) == "number" then
- first = str
- last = more or first
- else
- first, last = match(range,str)
- if not last then
- return P(str)
- end
- end
- if first == last then
- return P(str)
- elseif utfchar and last - first < 8 then -- a somewhat arbitrary criterium
- local p
- for i=first,last do
- if p then
- p = p + P(utfchar(i))
- else
- p = P(utfchar(i))
- end
- end
- return p -- nil when invalid range
- else
- local f = function(b)
- return b >= first and b <= last
- end
- return utf8byte / f -- nil when invalid range
- end
+setinspector(function(v) if type(v)=="table" then serialize(print,v,"table") return true end end)
+function table.sub(t,i,j)
+ return { unpack(t,i,j) }
end
-
-
-
-function lpeg.oneof(list,...) -- lpeg.oneof("elseif","else","if","then")
- if type(list) ~= "table" then
- list = { list, ... }
- end
- -- sort(list) -- longest match first
- local p = P(list[1])
- for l=2,#list do
- p = p + P(list[l])
- end
- return p
+function table.is_empty(t)
+ return not t or not next(t)
end
-
-function lpeg.is_lpeg(p)
- return p and lpegtype(p) == "pattern"
+function table.has_one_entry(t)
+ return t and not next(t,next(t))
end
-
--- For the moment here, but it might move to utilities. Beware, we need to
--- have the longest keyword first, so 'aaa' comes beforte 'aa' which is why we
--- loop back from the end cq. prepend.
-
-local sort, fastcopy, sortedkeys = table.sort, table.fastcopy, table.sortedkeys -- dependency!
-
-function lpeg.append(list,pp,delayed,checked)
- local p = pp
- if #list > 0 then
- local keys = fastcopy(list)
- sort(keys)
- for i=#keys,1,-1 do
- local k = keys[i]
- if p then
- p = P(k) + p
- else
- p = P(k)
- end
- end
- elseif delayed then -- hm, it looks like the lpeg parser resolves anyway
- local keys = sortedkeys(list)
- if p then
- for i=1,#keys,1 do
- local k = keys[i]
- local v = list[k]
- p = P(k)/list + p
- end
- else
- for i=1,#keys do
- local k = keys[i]
- local v = list[k]
- if p then
- p = P(k) + p
- else
- p = P(k)
- end
- end
- if p then
- p = p / list
- end
- end
- elseif checked then
- -- problem: substitution gives a capture
- local keys = sortedkeys(list)
- for i=1,#keys do
- local k = keys[i]
- local v = list[k]
- if p then
- if k == v then
- p = P(k) + p
- else
- p = P(k)/v + p
- end
- else
- if k == v then
- p = P(k)
- else
- p = P(k)/v
- end
- end
- end
- else
- local keys = sortedkeys(list)
- for i=1,#keys do
- local k = keys[i]
- local v = list[k]
- if p then
- p = P(k)/v + p
- else
- p = P(k)/v
- end
- end
- end
- return p
+function table.loweredkeys(t)
+ local l={}
+ for k,v in next,t do
+ l[lower(k)]=v
+ end
+ return l
end
-
--- inspect(lpeg.append({ a = "1", aa = "1", aaa = "1" } ,nil,true))
--- inspect(lpeg.append({ ["degree celsius"] = "1", celsius = "1", degree = "1" } ,nil,true))
-
--- function lpeg.exact_match(words,case_insensitive)
--- local pattern = concat(words)
--- if case_insensitive then
--- local pattern = S(upper(characters)) + S(lower(characters))
--- local list = { }
--- for i=1,#words do
--- list[lower(words[i])] = true
--- end
--- return Cmt(pattern^1, function(_,i,s)
--- return list[lower(s)] and i
--- end)
--- else
--- local pattern = S(concat(words))
--- local list = { }
--- for i=1,#words do
--- list[words[i]] = true
--- end
--- return Cmt(pattern^1, function(_,i,s)
--- return list[s] and i
--- end)
--- end
--- end
-
--- experiment:
-
-local function make(t)
- local p
--- for k, v in next, t do
- for k, v in table.sortedhash(t) do
- if not p then
- if next(v) then
- p = P(k) * make(v)
- else
- p = P(k)
- end
- else
- if next(v) then
- p = p + P(k) * make(v)
- else
- p = p + P(k)
- end
- end
+function table.unique(old)
+ local hash={}
+ local new={}
+ local n=0
+ for i=1,#old do
+ local oi=old[i]
+ if not hash[oi] then
+ n=n+1
+ new[n]=oi
+ hash[oi]=true
end
- return p
+ end
+ return new
end
-
-function lpeg.utfchartabletopattern(list)
- local tree = { }
- for i=1,#list do
- local t = tree
- for c in gmatch(list[i],".") do
- if not t[c] then
- t[c] = { }
- end
- t = t[c]
- end
- end
- return make(tree)
+function table.sorted(t,...)
+ sort(t,...)
+ return t
end
--- inspect ( lpeg.utfchartabletopattern {
--- utfchar(0x00A0), -- nbsp
--- utfchar(0x2000), -- enquad
--- utfchar(0x2001), -- emquad
--- utfchar(0x2002), -- enspace
--- utfchar(0x2003), -- emspace
--- utfchar(0x2004), -- threeperemspace
--- utfchar(0x2005), -- fourperemspace
--- utfchar(0x2006), -- sixperemspace
--- utfchar(0x2007), -- figurespace
--- utfchar(0x2008), -- punctuationspace
--- utfchar(0x2009), -- breakablethinspace
--- utfchar(0x200A), -- hairspace
--- utfchar(0x200B), -- zerowidthspace
--- utfchar(0x202F), -- narrownobreakspace
--- utfchar(0x205F), -- math thinspace
--- } )
-
end -- of closure
do -- create closure to overcome 200 locals limit
-if not modules then modules = { } end modules ['l-io'] = {
- version = 1.001,
- comment = "companion to luat-lib.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
+package.loaded["l-io"] = package.loaded["l-io"] or true
-local io = io
-local byte, find, gsub, format = string.byte, string.find, string.gsub, string.format
-local concat = table.concat
-local type = type
+-- original size: 8799, stripped down to: 6325
+if not modules then modules={} end modules ['l-io']={
+ version=1.001,
+ comment="companion to luat-lib.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+local io=io
+local byte,find,gsub,format=string.byte,string.find,string.gsub,string.format
+local concat=table.concat
+local floor=math.floor
+local type=type
if string.find(os.getenv("PATH"),";") then
- io.fileseparator, io.pathseparator = "\\", ";"
+ io.fileseparator,io.pathseparator="\\",";"
else
- io.fileseparator, io.pathseparator = "/" , ":"
+ io.fileseparator,io.pathseparator="/",":"
end
-
-function io.loaddata(filename,textmode)
- local f = io.open(filename,(textmode and 'r') or 'rb')
- if f then
- local data = f:read('*all')
- f:close()
- return data
+local function readall(f)
+ return f:read("*all")
+end
+local function readall(f)
+ local size=f:seek("end")
+ if size==0 then
+ return ""
+ elseif size<1024*1024 then
+ f:seek("set",0)
+ return f:read('*all')
+ else
+ local done=f:seek("set",0)
+ if size<1024*1024 then
+ step=1024*1024
+ elseif size>16*1024*1024 then
+ step=16*1024*1024
else
- return nil
+ step=floor(size/(1024*1024))*1024*1024/8
end
+ local data={}
+ while true do
+ local r=f:read(step)
+ if not r then
+ return concat(data)
+ else
+ data[#data+1]=r
+ end
+ end
+ end
+end
+io.readall=readall
+function io.loaddata(filename,textmode)
+ local f=io.open(filename,(textmode and 'r') or 'rb')
+ if f then
+ local data=readall(f)
+ f:close()
+ if #data>0 then
+ return data
+ end
+ end
end
-
function io.savedata(filename,data,joiner)
- local f = io.open(filename,"wb")
- if f then
- if type(data) == "table" then
- f:write(concat(data,joiner or ""))
- elseif type(data) == "function" then
- data(f)
- else
- f:write(data or "")
- end
- f:close()
- io.flush()
- return true
+ local f=io.open(filename,"wb")
+ if f then
+ if type(data)=="table" then
+ f:write(concat(data,joiner or ""))
+ elseif type(data)=="function" then
+ data(f)
else
- return false
+ f:write(data or "")
end
+ f:close()
+ io.flush()
+ return true
+ else
+ return false
+ end
+end
+function io.loadlines(filename,n)
+ local f=io.open(filename,'r')
+ if not f then
+ elseif n then
+ local lines={}
+ for i=1,n do
+ local line=f:read("*lines")
+ if line then
+ lines[#lines+1]=line
+ else
+ break
+ end
+ end
+ f:close()
+ lines=concat(lines,"\n")
+ if #lines>0 then
+ return lines
+ end
+ else
+ local line=f:read("*line") or ""
+ f:close()
+ if #line>0 then
+ return line
+ end
+ end
+end
+function io.loadchunk(filename,n)
+ local f=io.open(filename,'rb')
+ if f then
+ local data=f:read(n or 1024)
+ f:close()
+ if #data>0 then
+ return data
+ end
+ end
end
-
function io.exists(filename)
- local f = io.open(filename)
- if f == nil then
- return false
- else
- assert(f:close())
- return true
- end
+ local f=io.open(filename)
+ if f==nil then
+ return false
+ else
+ f:close()
+ return true
+ end
end
-
function io.size(filename)
- local f = io.open(filename)
- if f == nil then
- return 0
- else
- local s = f:seek("end")
- assert(f:close())
- return s
- end
+ local f=io.open(filename)
+ if f==nil then
+ return 0
+ else
+ local s=f:seek("end")
+ f:close()
+ return s
+ end
end
-
function io.noflines(f)
- if type(f) == "string" then
- local f = io.open(filename)
- local n = f and io.noflines(f) or 0
- assert(f:close())
- return n
+ if type(f)=="string" then
+ local f=io.open(filename)
+ if f then
+ local n=f and io.noflines(f) or 0
+ f:close()
+ return n
else
- local n = 0
- for _ in f:lines() do
- n = n + 1
- end
- f:seek('set',0)
- return n
+ return 0
end
-end
-
-local nextchar = {
- [ 4] = function(f)
- return f:read(1,1,1,1)
- end,
- [ 2] = function(f)
- return f:read(1,1)
- end,
- [ 1] = function(f)
- return f:read(1)
- end,
- [-2] = function(f)
- local a, b = f:read(1,1)
- return b, a
- end,
- [-4] = function(f)
- local a, b, c, d = f:read(1,1,1,1)
- return d, c, b, a
+ else
+ local n=0
+ for _ in f:lines() do
+ n=n+1
end
+ f:seek('set',0)
+ return n
+ end
+end
+local nextchar={
+ [ 4]=function(f)
+ return f:read(1,1,1,1)
+ end,
+ [ 2]=function(f)
+ return f:read(1,1)
+ end,
+ [ 1]=function(f)
+ return f:read(1)
+ end,
+ [-2]=function(f)
+ local a,b=f:read(1,1)
+ return b,a
+ end,
+ [-4]=function(f)
+ local a,b,c,d=f:read(1,1,1,1)
+ return d,c,b,a
+ end
}
-
function io.characters(f,n)
- if f then
- return nextchar[n or 1], f
- end
+ if f then
+ return nextchar[n or 1],f
+ end
end
-
-local nextbyte = {
- [4] = function(f)
- local a, b, c, d = f:read(1,1,1,1)
- if d then
- return byte(a), byte(b), byte(c), byte(d)
- end
- end,
- [3] = function(f)
- local a, b, c = f:read(1,1,1)
- if b then
- return byte(a), byte(b), byte(c)
- end
- end,
- [2] = function(f)
- local a, b = f:read(1,1)
- if b then
- return byte(a), byte(b)
- end
- end,
- [1] = function (f)
- local a = f:read(1)
- if a then
- return byte(a)
- end
- end,
- [-2] = function (f)
- local a, b = f:read(1,1)
- if b then
- return byte(b), byte(a)
- end
- end,
- [-3] = function(f)
- local a, b, c = f:read(1,1,1)
- if b then
- return byte(c), byte(b), byte(a)
- end
- end,
- [-4] = function(f)
- local a, b, c, d = f:read(1,1,1,1)
- if d then
- return byte(d), byte(c), byte(b), byte(a)
- end
+local nextbyte={
+ [4]=function(f)
+ local a,b,c,d=f:read(1,1,1,1)
+ if d then
+ return byte(a),byte(b),byte(c),byte(d)
+ end
+ end,
+ [3]=function(f)
+ local a,b,c=f:read(1,1,1)
+ if b then
+ return byte(a),byte(b),byte(c)
+ end
+ end,
+ [2]=function(f)
+ local a,b=f:read(1,1)
+ if b then
+ return byte(a),byte(b)
+ end
+ end,
+ [1]=function (f)
+ local a=f:read(1)
+ if a then
+ return byte(a)
+ end
+ end,
+ [-2]=function (f)
+ local a,b=f:read(1,1)
+ if b then
+ return byte(b),byte(a)
+ end
+ end,
+ [-3]=function(f)
+ local a,b,c=f:read(1,1,1)
+ if b then
+ return byte(c),byte(b),byte(a)
+ end
+ end,
+ [-4]=function(f)
+ local a,b,c,d=f:read(1,1,1,1)
+ if d then
+ return byte(d),byte(c),byte(b),byte(a)
end
+ end
}
-
function io.bytes(f,n)
- if f then
- return nextbyte[n or 1], f
- else
- return nil, nil
- end
+ if f then
+ return nextbyte[n or 1],f
+ else
+ return nil,nil
+ end
end
-
function io.ask(question,default,options)
- while true do
- io.write(question)
- if options then
- io.write(format(" [%s]",concat(options,"|")))
+ while true do
+ io.write(question)
+ if options then
+ io.write(format(" [%s]",concat(options,"|")))
+ end
+ if default then
+ io.write(format(" [%s]",default))
+ end
+ io.write(format(" "))
+ io.flush()
+ local answer=io.read()
+ answer=gsub(answer,"^%s*(.*)%s*$","%1")
+ if answer=="" and default then
+ return default
+ elseif not options then
+ return answer
+ else
+ for k=1,#options do
+ if options[k]==answer then
+ return answer
end
- if default then
- io.write(format(" [%s]",default))
- end
- io.write(format(" "))
- io.flush()
- local answer = io.read()
- answer = gsub(answer,"^%s*(.*)%s*$","%1")
- if answer == "" and default then
- return default
- elseif not options then
- return answer
- else
- for k=1,#options do
- if options[k] == answer then
- return answer
- end
- end
- local pattern = "^" .. answer
- for k=1,#options do
- local v = options[k]
- if find(v,pattern) then
- return v
- end
- end
+ end
+ local pattern="^"..answer
+ for k=1,#options do
+ local v=options[k]
+ if find(v,pattern) then
+ return v
end
+ end
end
+ end
end
-
local function readnumber(f,n,m)
- if m then
- f:seek("set",n)
- n = m
- end
- if n == 1 then
- return byte(f:read(1))
- elseif n == 2 then
- local a, b = byte(f:read(2),1,2)
- return 256 * a + b
- elseif n == 3 then
- local a, b, c = byte(f:read(3),1,3)
- return 256*256 * a + 256 * b + c
- elseif n == 4 then
- local a, b, c, d = byte(f:read(4),1,4)
- return 256*256*256 * a + 256*256 * b + 256 * c + d
- elseif n == 8 then
- local a, b = readnumber(f,4), readnumber(f,4)
- return 256 * a + b
- elseif n == 12 then
- local a, b, c = readnumber(f,4), readnumber(f,4), readnumber(f,4)
- return 256*256 * a + 256 * b + c
- elseif n == -2 then
- local b, a = byte(f:read(2),1,2)
- return 256*a + b
- elseif n == -3 then
- local c, b, a = byte(f:read(3),1,3)
- return 256*256 * a + 256 * b + c
- elseif n == -4 then
- local d, c, b, a = byte(f:read(4),1,4)
- return 256*256*256 * a + 256*256 * b + 256*c + d
- elseif n == -8 then
- local h, g, f, e, d, c, b, a = byte(f:read(8),1,8)
- return 256*256*256*256*256*256*256 * a +
- 256*256*256*256*256*256 * b +
- 256*256*256*256*256 * c +
- 256*256*256*256 * d +
- 256*256*256 * e +
- 256*256 * f +
- 256 * g +
- h
- else
- return 0
- end
+ if m then
+ f:seek("set",n)
+ n=m
+ end
+ if n==1 then
+ return byte(f:read(1))
+ elseif n==2 then
+ local a,b=byte(f:read(2),1,2)
+ return 256*a+b
+ elseif n==3 then
+ local a,b,c=byte(f:read(3),1,3)
+ return 256*256*a+256*b+c
+ elseif n==4 then
+ local a,b,c,d=byte(f:read(4),1,4)
+ return 256*256*256*a+256*256*b+256*c+d
+ elseif n==8 then
+ local a,b=readnumber(f,4),readnumber(f,4)
+ return 256*a+b
+ elseif n==12 then
+ local a,b,c=readnumber(f,4),readnumber(f,4),readnumber(f,4)
+ return 256*256*a+256*b+c
+ elseif n==-2 then
+ local b,a=byte(f:read(2),1,2)
+ return 256*a+b
+ elseif n==-3 then
+ local c,b,a=byte(f:read(3),1,3)
+ return 256*256*a+256*b+c
+ elseif n==-4 then
+ local d,c,b,a=byte(f:read(4),1,4)
+ return 256*256*256*a+256*256*b+256*c+d
+ elseif n==-8 then
+ local h,g,f,e,d,c,b,a=byte(f:read(8),1,8)
+ return 256*256*256*256*256*256*256*a+256*256*256*256*256*256*b+256*256*256*256*256*c+256*256*256*256*d+256*256*256*e+256*256*f+256*g+h
+ else
+ return 0
+ end
end
-
-io.readnumber = readnumber
-
+io.readnumber=readnumber
function io.readstring(f,n,m)
- if m then
- f:seek("set",n)
- n = m
- end
- local str = gsub(f:read(n),"%z","")
- return str
+ if m then
+ f:seek("set",n)
+ n=m
+ end
+ local str=gsub(f:read(n),"\000","")
+ return str
end
-
---
-
-if not io.i_limiter then function io.i_limiter() end end -- dummy so we can test safely
-if not io.o_limiter then function io.o_limiter() end end -- dummy so we can test safely
+if not io.i_limiter then function io.i_limiter() end end
+if not io.o_limiter then function io.o_limiter() end end
end -- of closure
do -- create closure to overcome 200 locals limit
-if not modules then modules = { } end modules ['l-number'] = {
- version = 1.001,
- comment = "companion to luat-lib.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
+package.loaded["l-number"] = package.loaded["l-number"] or true
--- this module will be replaced when we have the bit library
+-- original size: 4939, stripped down to: 2830
-local tostring = tostring
-local format, floor, match, rep = string.format, math.floor, string.match, string.rep
-local concat, insert = table.concat, table.insert
-local lpegmatch = lpeg.match
-
-number = number or { }
-local number = number
-
--- a,b,c,d,e,f = number.toset(100101)
-
-function number.toset(n)
- return match(tostring(n),"(.?)(.?)(.?)(.?)(.?)(.?)(.?)(.?)")
-end
-
-function number.toevenhex(n)
- local s = format("%X",n)
- if #s % 2 == 0 then
- return s
+if not modules then modules={} end modules ['l-number']={
+ version=1.001,
+ comment="companion to luat-lib.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+local tostring,tonumber=tostring,tonumber
+local format,floor,match,rep=string.format,math.floor,string.match,string.rep
+local concat,insert=table.concat,table.insert
+local lpegmatch=lpeg.match
+number=number or {}
+local number=number
+if bit32 then
+ local btest,bor=bit32.btest,bit32.bor
+ function number.bit(p)
+ return 2^(p-1)
+ end
+ number.hasbit=btest
+ number.setbit=bor
+ function number.setbit(x,p)
+ return btest(x,p) and x or x+p
+ end
+ function number.clearbit(x,p)
+ return btest(x,p) and x-p or x
+ end
+else
+ function number.bit(p)
+ return 2^(p-1)
+ end
+ function number.hasbit(x,p)
+ return x%(p+p)>=p
+ end
+ function number.setbit(x,p)
+ return (x%(p+p)>=p) and x or x+p
+ end
+ function number.clearbit(x,p)
+ return (x%(p+p)>=p) and x-p or x
+ end
+end
+if bit32 then
+ local bextract=bit32.extract
+ local t={
+ "0","0","0","0","0","0","0","0",
+ "0","0","0","0","0","0","0","0",
+ "0","0","0","0","0","0","0","0",
+ "0","0","0","0","0","0","0","0",
+ }
+ function number.tobitstring(b,m)
+ local n=32
+ for i=0,31 do
+ local v=bextract(b,i)
+ local k=32-i
+ if v==1 then
+ n=k
+ t[k]="1"
+ else
+ t[k]="0"
+ end
+ end
+ if m then
+ m=33-m*8
+ if m<1 then
+ m=1
+ end
+ return concat(t,"",m)
+ elseif n<8 then
+ return concat(t)
+ elseif n<16 then
+ return concat(t,"",9)
+ elseif n<24 then
+ return concat(t,"",17)
else
- return "0" .. s
+ return concat(t,"",25)
end
-end
-
--- the lpeg way is slower on 8 digits, but faster on 4 digits, some 7.5%
--- on
---
--- for i=1,1000000 do
--- local a,b,c,d,e,f,g,h = number.toset(12345678)
--- local a,b,c,d = number.toset(1234)
--- local a,b,c = number.toset(123)
--- end
---
--- of course dedicated "(.)(.)(.)(.)" matches are even faster
-
-local one = lpeg.C(1-lpeg.S(''))^1
-
-function number.toset(n)
- return lpegmatch(one,tostring(n))
-end
-
-function number.bits(n,zero)
- local t, i = { }, (zero and 0) or 1
- while n > 0 do
- local m = n % 2
- if m > 0 then
- insert(t,1,i)
- end
- n = floor(n/2)
- i = i + 1
+ end
+else
+ function number.tobitstring(n,m)
+ if n>0 then
+ local t={}
+ while n>0 do
+ insert(t,1,n%2>0 and 1 or 0)
+ n=floor(n/2)
+ end
+ local nn=8-#t%8
+ if nn>0 and nn<8 then
+ for i=1,nn do
+ insert(t,1,0)
+ end
+ end
+ if m then
+ m=m*8-#t
+ if m>0 then
+ insert(t,1,rep("0",m))
+ end
+ end
+ return concat(t)
+ elseif m then
+ rep("00000000",m)
+ else
+ return "00000000"
end
- return t
+ end
end
-
-
-function number.bit(p)
- return 2 ^ (p - 1) -- 1-based indexing
-end
-
-function number.hasbit(x, p) -- typical call: if hasbit(x, bit(3)) then ...
- return x % (p + p) >= p
-end
-
-function number.setbit(x, p)
- return hasbit(x, p) and x or x + p
+function number.valid(str,default)
+ return tonumber(str) or default or nil
end
-
-function number.clearbit(x, p)
- return hasbit(x, p) and x - p or x
+function number.toevenhex(n)
+ local s=format("%X",n)
+ if #s%2==0 then
+ return s
+ else
+ return "0"..s
+ end
end
-
-
-function number.tobitstring(n,m)
- if n == 0 then
- if m then
- rep("00000000",m)
- else
- return "00000000"
- end
+local one=lpeg.C(1-lpeg.S('')/tonumber)^1
+function number.toset(n)
+ return lpegmatch(one,tostring(n))
+end
+local function bits(n,i,...)
+ if n>0 then
+ local m=n%2
+ local n=floor(n/2)
+ if m>0 then
+ return bits(n,i+1,i,...)
else
- local t = { }
- while n > 0 do
- insert(t,1,n % 2 > 0 and 1 or 0)
- n = floor(n/2)
- end
- local nn = 8 - #t % 8
- if nn > 0 and nn < 8 then
- for i=1,nn do
- insert(t,1,0)
- end
- end
- if m then
- m = m * 8 - #t
- if m > 0 then
- insert(t,1,rep("0",m))
- end
- end
- return concat(t)
+ return bits(n,i+1,...)
end
+ else
+ return...
+ end
+end
+function number.bits(n)
+ return { bits(n,1) }
end
-
end -- of closure
do -- create closure to overcome 200 locals limit
-if not modules then modules = { } end modules ['l-set'] = {
- version = 1.001,
- comment = "companion to luat-lib.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
+package.loaded["l-set"] = package.loaded["l-set"] or true
--- This will become obsolete when we have the bitset library embedded.
-
-set = set or { }
-
-local nums = { }
-local tabs = { }
-local concat = table.concat
-local next, type = next, type
-
-set.create = table.tohash
+-- original size: 1923, stripped down to: 1133
+if not modules then modules={} end modules ['l-set']={
+ version=1.001,
+ comment="companion to luat-lib.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+set=set or {}
+local nums={}
+local tabs={}
+local concat=table.concat
+local next,type=next,type
+set.create=table.tohash
function set.tonumber(t)
- if next(t) then
- local s = ""
- -- we could save mem by sorting, but it slows down
- for k, v in next, t do
- if v then
- -- why bother about the leading space
- s = s .. " " .. k
- end
- end
- local n = nums[s]
- if not n then
- n = #tabs + 1
- tabs[n] = t
- nums[s] = n
- end
- return n
- else
- return 0
+ if next(t) then
+ local s=""
+ for k,v in next,t do
+ if v then
+ s=s.." "..k
+ end
+ end
+ local n=nums[s]
+ if not n then
+ n=#tabs+1
+ tabs[n]=t
+ nums[s]=n
end
+ return n
+ else
+ return 0
+ end
end
-
function set.totable(n)
- if n == 0 then
- return { }
- else
- return tabs[n] or { }
- end
+ if n==0 then
+ return {}
+ else
+ return tabs[n] or {}
+ end
end
-
function set.tolist(n)
- if n == 0 or not tabs[n] then
- return ""
- else
- local t, n = { }, 0
- for k, v in next, tabs[n] do
- if v then
- n = n + 1
- t[n] = k
- end
- end
- return concat(t," ")
+ if n==0 or not tabs[n] then
+ return ""
+ else
+ local t,n={},0
+ for k,v in next,tabs[n] do
+ if v then
+ n=n+1
+ t[n]=k
+ end
end
+ return concat(t," ")
+ end
end
-
function set.contains(n,s)
- if type(n) == "table" then
- return n[s]
- elseif n == 0 then
- return false
- else
- local t = tabs[n]
- return t and t[s]
- end
+ if type(n)=="table" then
+ return n[s]
+ elseif n==0 then
+ return false
+ else
+ local t=tabs[n]
+ return t and t[s]
+ end
end
-
-
end -- of closure
do -- create closure to overcome 200 locals limit
-if not modules then modules = { } end modules ['l-os'] = {
- version = 1.001,
- comment = "companion to luat-lib.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
--- This file deals with some operating system issues. Please don't bother me
--- with the pros and cons of operating systems as they all have their flaws
--- and benefits. Bashing one of them won't help solving problems and fixing
--- bugs faster and is a waste of time and energy.
---
--- path separators: / or \ ... we can use / everywhere
--- suffixes : dll so exe <none> ... no big deal
--- quotes : we can use "" in most cases
--- expansion : unless "" are used * might give side effects
--- piping/threads : somewhat different for each os
--- locations : specific user file locations and settings can change over time
---
--- os.type : windows | unix (new, we already guessed os.platform)
--- os.name : windows | msdos | linux | macosx | solaris | .. | generic (new)
--- os.platform : extended os.name with architecture
-
--- maybe build io.flush in os.execute
-
-local os = os
-local find, format, gsub, upper, gmatch = string.find, string.format, string.gsub, string.upper, string.gmatch
-local concat = table.concat
-local random, ceil = math.random, math.ceil
-local rawget, rawset, type, getmetatable, setmetatable, tonumber = rawget, rawset, type, getmetatable, setmetatable, tonumber
+package.loaded["l-os"] = package.loaded["l-os"] or true
--- The following code permits traversing the environment table, at least
--- in luatex. Internally all environment names are uppercase.
+-- original size: 13692, stripped down to: 8406
+if not modules then modules={} end modules ['l-os']={
+ version=1.001,
+ comment="companion to luat-lib.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+local os=os
+local date,time=os.date,os.time
+local find,format,gsub,upper,gmatch=string.find,string.format,string.gsub,string.upper,string.gmatch
+local concat=table.concat
+local random,ceil,randomseed=math.random,math.ceil,math.randomseed
+local rawget,rawset,type,getmetatable,setmetatable,tonumber,tostring=rawget,rawset,type,getmetatable,setmetatable,tonumber,tostring
+math.initialseed=tonumber(string.sub(string.reverse(tostring(ceil(socket and socket.gettime()*10000 or time()))),1,6))
+randomseed(math.initialseed)
if not os.__getenv__ then
-
- os.__getenv__ = os.getenv
- os.__setenv__ = os.setenv
-
- if os.env then
-
- local osgetenv = os.getenv
- local ossetenv = os.setenv
- local osenv = os.env local _ = osenv.PATH -- initialize the table
-
- function os.setenv(k,v)
- if v == nil then
- v = ""
- end
- local K = upper(k)
- osenv[K] = v
- if type(v) == "table" then
- v = concat(v,";") -- path
- end
- ossetenv(K,v)
- end
-
- function os.getenv(k)
- local K = upper(k)
- local v = osenv[K] or osenv[k] or osgetenv(K) or osgetenv(k)
- if v == "" then
- return nil
- else
- return v
- end
- end
-
- else
-
- local ossetenv = os.setenv
- local osgetenv = os.getenv
- local osenv = { }
-
- function os.setenv(k,v)
- if v == nil then
- v = ""
- end
- local K = upper(k)
- osenv[K] = v
- end
-
- function os.getenv(k)
- local K = upper(k)
- local v = osenv[K] or osgetenv(K) or osgetenv(k)
- if v == "" then
- return nil
- else
- return v
- end
- end
-
- local function __index(t,k)
- return os.getenv(k)
- end
- local function __newindex(t,k,v)
- os.setenv(k,v)
- end
-
- os.env = { }
-
- setmetatable(os.env, { __index = __index, __newindex = __newindex } )
-
+ os.__getenv__=os.getenv
+ os.__setenv__=os.setenv
+ if os.env then
+ local osgetenv=os.getenv
+ local ossetenv=os.setenv
+ local osenv=os.env local _=osenv.PATH
+ function os.setenv(k,v)
+ if v==nil then
+ v=""
+ end
+ local K=upper(k)
+ osenv[K]=v
+ if type(v)=="table" then
+ v=concat(v,";")
+ end
+ ossetenv(K,v)
+ end
+ function os.getenv(k)
+ local K=upper(k)
+ local v=osenv[K] or osenv[k] or osgetenv(K) or osgetenv(k)
+ if v=="" then
+ return nil
+ else
+ return v
+ end
+ end
+ else
+ local ossetenv=os.setenv
+ local osgetenv=os.getenv
+ local osenv={}
+ function os.setenv(k,v)
+ if v==nil then
+ v=""
+ end
+ local K=upper(k)
+ osenv[K]=v
+ end
+ function os.getenv(k)
+ local K=upper(k)
+ local v=osenv[K] or osgetenv(K) or osgetenv(k)
+ if v=="" then
+ return nil
+ else
+ return v
+ end
end
-
+ local function __index(t,k)
+ return os.getenv(k)
+ end
+ local function __newindex(t,k,v)
+ os.setenv(k,v)
+ end
+ os.env={}
+ setmetatable(os.env,{ __index=__index,__newindex=__newindex } )
+ end
end
-
--- end of environment hack
-
-local execute, spawn, exec, iopopen, ioflush = os.execute, os.spawn or os.execute, os.exec or os.execute, io.popen, io.flush
-
+local execute,spawn,exec,iopopen,ioflush=os.execute,os.spawn or os.execute,os.exec or os.execute,io.popen,io.flush
function os.execute(...) ioflush() return execute(...) end
-function os.spawn (...) ioflush() return spawn (...) end
-function os.exec (...) ioflush() return exec (...) end
-function io.popen (...) ioflush() return iopopen(...) end
-
+function os.spawn (...) ioflush() return spawn (...) end
+function os.exec (...) ioflush() return exec (...) end
+function io.popen (...) ioflush() return iopopen(...) end
function os.resultof(command)
- local handle = io.popen(command,"r")
- return handle and handle:read("*all") or ""
+ local handle=io.popen(command,"r")
+ return handle and handle:read("*all") or ""
end
-
if not io.fileseparator then
- if find(os.getenv("PATH"),";") then
- io.fileseparator, io.pathseparator, os.type = "\\", ";", os.type or "mswin"
- else
- io.fileseparator, io.pathseparator, os.type = "/" , ":", os.type or "unix"
- end
-end
-
-os.type = os.type or (io.pathseparator == ";" and "windows") or "unix"
-os.name = os.name or (os.type == "windows" and "mswin" ) or "linux"
-
-if os.type == "windows" then
- os.libsuffix, os.binsuffix, os.binsuffixes = 'dll', 'exe', { 'exe', 'cmd', 'bat' }
+ if find(os.getenv("PATH"),";") then
+ io.fileseparator,io.pathseparator,os.type="\\",";",os.type or "mswin"
+ else
+ io.fileseparator,io.pathseparator,os.type="/",":",os.type or "unix"
+ end
+end
+os.type=os.type or (io.pathseparator==";" and "windows") or "unix"
+os.name=os.name or (os.type=="windows" and "mswin" ) or "linux"
+if os.type=="windows" then
+ os.libsuffix,os.binsuffix,os.binsuffixes='dll','exe',{ 'exe','cmd','bat' }
else
- os.libsuffix, os.binsuffix, os.binsuffixes = 'so', '', { '' }
+ os.libsuffix,os.binsuffix,os.binsuffixes='so','',{ '' }
end
-
+local launchers={
+ windows="start %s",
+ macosx="open %s",
+ unix="$BROWSER %s &> /dev/null &",
+}
function os.launch(str)
- if os.type == "windows" then
- os.execute("start " .. str) -- os.spawn ?
- else
- os.execute(str .. " &") -- os.spawn ?
- end
+ os.execute(format(launchers[os.name] or launchers.unix,str))
end
-
if not os.times then
- -- utime = user time
- -- stime = system time
- -- cutime = children user time
- -- cstime = children system time
- function os.times()
- return {
- utime = os.gettimeofday(), -- user
- stime = 0, -- system
- cutime = 0, -- children user
- cstime = 0, -- children system
- }
- end
+ function os.times()
+ return {
+ utime=os.gettimeofday(),
+ stime=0,
+ cutime=0,
+ cstime=0,
+ }
+ end
end
-
-os.gettimeofday = os.gettimeofday or os.clock
-
-local startuptime = os.gettimeofday()
-
+os.gettimeofday=os.gettimeofday or os.clock
+local startuptime=os.gettimeofday()
function os.runtime()
- return os.gettimeofday() - startuptime
-end
-
-
--- no need for function anymore as we have more clever code and helpers now
--- this metatable trickery might as well disappear
-
-os.resolvers = os.resolvers or { } -- will become private
-
-local resolvers = os.resolvers
-
-local osmt = getmetatable(os) or { __index = function(t,k) t[k] = "unset" return "unset" end } -- maybe nil
-local osix = osmt.__index
-
-osmt.__index = function(t,k)
- return (resolvers[k] or osix)(t,k)
-end
-
-setmetatable(os,osmt)
-
--- we can use HOSTTYPE on some platforms
-
-local name, platform = os.name or "linux", os.getenv("MTX_PLATFORM") or ""
-
+ return os.gettimeofday()-startuptime
+end
+os.resolvers=os.resolvers or {}
+local resolvers=os.resolvers
+setmetatable(os,{ __index=function(t,k)
+ local r=resolvers[k]
+ return r and r(t,k) or nil
+end })
+local name,platform=os.name or "linux",os.getenv("MTX_PLATFORM") or ""
local function guess()
- local architecture = os.resultof("uname -m") or ""
- if architecture ~= "" then
- return architecture
- end
- architecture = os.getenv("HOSTTYPE") or ""
- if architecture ~= "" then
- return architecture
- end
- return os.resultof("echo $HOSTTYPE") or ""
-end
-
-if platform ~= "" then
-
- os.platform = platform
-
-elseif os.type == "windows" then
-
- -- we could set the variable directly, no function needed here
-
- function os.resolvers.platform(t,k)
- local platform, architecture = "", os.getenv("PROCESSOR_ARCHITECTURE") or ""
- if find(architecture,"AMD64") then
- platform = "mswin-64"
- else
- platform = "mswin"
- end
- os.setenv("MTX_PLATFORM",platform)
- os.platform = platform
- return platform
- end
-
-elseif name == "linux" then
-
- function os.resolvers.platform(t,k)
- -- we sometimes have HOSTTYPE set so let's check that first
- local platform, architecture = "", os.getenv("HOSTTYPE") or os.resultof("uname -m") or ""
- if find(architecture,"x86_64") then
- platform = "linux-64"
- elseif find(architecture,"ppc") then
- platform = "linux-ppc"
- else
- platform = "linux"
- end
- os.setenv("MTX_PLATFORM",platform)
- os.platform = platform
- return platform
- end
-
-elseif name == "macosx" then
-
- --[[
- Identifying the architecture of OSX is quite a mess and this
- is the best we can come up with. For some reason $HOSTTYPE is
- a kind of pseudo environment variable, not known to the current
- environment. And yes, uname cannot be trusted either, so there
- is a change that you end up with a 32 bit run on a 64 bit system.
- Also, some proper 64 bit intel macs are too cheap (low-end) and
- therefore not permitted to run the 64 bit kernel.
- ]]--
-
- function os.resolvers.platform(t,k)
- -- local platform, architecture = "", os.getenv("HOSTTYPE") or ""
- -- if architecture == "" then
- -- architecture = os.resultof("echo $HOSTTYPE") or ""
- -- end
- local platform, architecture = "", os.resultof("echo $HOSTTYPE") or ""
- if architecture == "" then
- -- print("\nI have no clue what kind of OSX you're running so let's assume an 32 bit intel.\n")
- platform = "osx-intel"
- elseif find(architecture,"i386") then
- platform = "osx-intel"
- elseif find(architecture,"x86_64") then
- platform = "osx-64"
- else
- platform = "osx-ppc"
- end
- os.setenv("MTX_PLATFORM",platform)
- os.platform = platform
- return platform
- end
-
-elseif name == "sunos" then
-
- function os.resolvers.platform(t,k)
- local platform, architecture = "", os.resultof("uname -m") or ""
- if find(architecture,"sparc") then
- platform = "solaris-sparc"
- else -- if architecture == 'i86pc'
- platform = "solaris-intel"
- end
- os.setenv("MTX_PLATFORM",platform)
- os.platform = platform
- return platform
- end
-
-elseif name == "freebsd" then
-
- function os.resolvers.platform(t,k)
- local platform, architecture = "", os.resultof("uname -m") or ""
- if find(architecture,"amd64") then
- platform = "freebsd-amd64"
- else
- platform = "freebsd"
- end
- os.setenv("MTX_PLATFORM",platform)
- os.platform = platform
- return platform
- end
-
-elseif name == "kfreebsd" then
-
- function os.resolvers.platform(t,k)
- -- we sometimes have HOSTTYPE set so let's check that first
- local platform, architecture = "", os.getenv("HOSTTYPE") or os.resultof("uname -m") or ""
- if find(architecture,"x86_64") then
- platform = "kfreebsd-amd64"
- else
- platform = "kfreebsd-i386"
- end
- os.setenv("MTX_PLATFORM",platform)
- os.platform = platform
- return platform
+ local architecture=os.resultof("uname -m") or ""
+ if architecture~="" then
+ return architecture
+ end
+ architecture=os.getenv("HOSTTYPE") or ""
+ if architecture~="" then
+ return architecture
+ end
+ return os.resultof("echo $HOSTTYPE") or ""
+end
+if platform~="" then
+ os.platform=platform
+elseif os.type=="windows" then
+ function os.resolvers.platform(t,k)
+ local platform,architecture="",os.getenv("PROCESSOR_ARCHITECTURE") or ""
+ if find(architecture,"AMD64") then
+ platform="mswin-64"
+ else
+ platform="mswin"
+ end
+ os.setenv("MTX_PLATFORM",platform)
+ os.platform=platform
+ return platform
+ end
+elseif name=="linux" then
+ function os.resolvers.platform(t,k)
+ local platform,architecture="",os.getenv("HOSTTYPE") or os.resultof("uname -m") or ""
+ if find(architecture,"x86_64") then
+ platform="linux-64"
+ elseif find(architecture,"ppc") then
+ platform="linux-ppc"
+ else
+ platform="linux"
+ end
+ os.setenv("MTX_PLATFORM",platform)
+ os.platform=platform
+ return platform
+ end
+elseif name=="macosx" then
+ function os.resolvers.platform(t,k)
+ local platform,architecture="",os.resultof("echo $HOSTTYPE") or ""
+ if architecture=="" then
+ platform="osx-intel"
+ elseif find(architecture,"i386") then
+ platform="osx-intel"
+ elseif find(architecture,"x86_64") then
+ platform="osx-64"
+ else
+ platform="osx-ppc"
+ end
+ os.setenv("MTX_PLATFORM",platform)
+ os.platform=platform
+ return platform
+ end
+elseif name=="sunos" then
+ function os.resolvers.platform(t,k)
+ local platform,architecture="",os.resultof("uname -m") or ""
+ if find(architecture,"sparc") then
+ platform="solaris-sparc"
+ else
+ platform="solaris-intel"
+ end
+ os.setenv("MTX_PLATFORM",platform)
+ os.platform=platform
+ return platform
+ end
+elseif name=="freebsd" then
+ function os.resolvers.platform(t,k)
+ local platform,architecture="",os.resultof("uname -m") or ""
+ if find(architecture,"amd64") then
+ platform="freebsd-amd64"
+ else
+ platform="freebsd"
+ end
+ os.setenv("MTX_PLATFORM",platform)
+ os.platform=platform
+ return platform
+ end
+elseif name=="kfreebsd" then
+ function os.resolvers.platform(t,k)
+ local platform,architecture="",os.getenv("HOSTTYPE") or os.resultof("uname -m") or ""
+ if find(architecture,"x86_64") then
+ platform="kfreebsd-amd64"
+ else
+ platform="kfreebsd-i386"
end
-
+ os.setenv("MTX_PLATFORM",platform)
+ os.platform=platform
+ return platform
+ end
else
-
- -- platform = "linux"
- -- os.setenv("MTX_PLATFORM",platform)
- -- os.platform = platform
-
- function os.resolvers.platform(t,k)
- local platform = "linux"
- os.setenv("MTX_PLATFORM",platform)
- os.platform = platform
- return platform
- end
-
-end
-
--- beware, we set the randomseed
-
--- from wikipedia: Version 4 UUIDs use a scheme relying only on random numbers. This algorithm sets the
--- version number as well as two reserved bits. All other bits are set using a random or pseudorandom
--- data source. Version 4 UUIDs have the form xxxxxxxx-xxxx-4xxx-yxxx-xxxxxxxxxxxx with hexadecimal
--- digits x and hexadecimal digits 8, 9, A, or B for y. e.g. f47ac10b-58cc-4372-a567-0e02b2c3d479.
---
--- as we don't call this function too often there is not so much risk on repetition
-
-local t = { 8, 9, "a", "b" }
-
+ function os.resolvers.platform(t,k)
+ local platform="linux"
+ os.setenv("MTX_PLATFORM",platform)
+ os.platform=platform
+ return platform
+ end
+end
+local t={ 8,9,"a","b" }
function os.uuid()
- return format("%04x%04x-4%03x-%s%03x-%04x-%04x%04x%04x",
- random(0xFFFF),random(0xFFFF),
- random(0x0FFF),
- t[ceil(random(4))] or 8,random(0x0FFF),
- random(0xFFFF),
- random(0xFFFF),random(0xFFFF),random(0xFFFF)
- )
+ return format("%04x%04x-4%03x-%s%03x-%04x-%04x%04x%04x",
+ random(0xFFFF),random(0xFFFF),
+ random(0x0FFF),
+ t[ceil(random(4))] or 8,random(0x0FFF),
+ random(0xFFFF),
+ random(0xFFFF),random(0xFFFF),random(0xFFFF)
+ )
end
-
local d
-
function os.timezone(delta)
- d = d or tonumber(tonumber(os.date("%H")-os.date("!%H")))
- if delta then
- if d > 0 then
- return format("+%02i:00",d)
- else
- return format("-%02i:00",-d)
- end
+ d=d or tonumber(tonumber(date("%H")-date("!%H")))
+ if delta then
+ if d>0 then
+ return format("+%02i:00",d)
else
- return 1
- end
-end
-
-local memory = { }
-
+ return format("-%02i:00",-d)
+ end
+ else
+ return 1
+ end
+end
+local timeformat=format("%%s%s",os.timezone(true))
+local dateformat="!%Y-%m-%d %H:%M:%S"
+function os.fulltime(t,default)
+ t=tonumber(t) or 0
+ if t>0 then
+ elseif default then
+ return default
+ else
+ t=nil
+ end
+ return format(timeformat,date(dateformat,t))
+end
+local dateformat="%Y-%m-%d %H:%M:%S"
+function os.localtime(t,default)
+ t=tonumber(t) or 0
+ if t>0 then
+ elseif default then
+ return default
+ else
+ t=nil
+ end
+ return date(dateformat,t)
+end
+function os.converttime(t,default)
+ local t=tonumber(t)
+ if t and t>0 then
+ return date(dateformat,t)
+ else
+ return default or "-"
+ end
+end
+local memory={}
local function which(filename)
- local fullname = memory[filename]
- if fullname == nil then
- local suffix = file.suffix(filename)
- local suffixes = suffix == "" and os.binsuffixes or { suffix }
- for directory in gmatch(os.getenv("PATH"),"[^" .. io.pathseparator .."]+") do
- local df = file.join(directory,filename)
- for i=1,#suffixes do
- local dfs = file.addsuffix(df,suffixes[i])
- if io.exists(dfs) then
- fullname = dfs
- break
- end
- end
- end
- if not fullname then
- fullname = false
+ local fullname=memory[filename]
+ if fullname==nil then
+ local suffix=file.suffix(filename)
+ local suffixes=suffix=="" and os.binsuffixes or { suffix }
+ for directory in gmatch(os.getenv("PATH"),"[^"..io.pathseparator.."]+") do
+ local df=file.join(directory,filename)
+ for i=1,#suffixes do
+ local dfs=file.addsuffix(df,suffixes[i])
+ if io.exists(dfs) then
+ fullname=dfs
+ break
end
- memory[filename] = fullname
+ end
end
- return fullname
+ if not fullname then
+ fullname=false
+ end
+ memory[filename]=fullname
+ end
+ return fullname
+end
+os.which=which
+os.where=which
+function os.today()
+ return date("!*t")
+end
+function os.now()
+ return date("!%Y-%m-%d %H:%M:%S")
+end
+if not os.sleep and socket then
+ os.sleep=socket.sleep
end
-
-os.which = which
-os.where = which
-
--- print(os.which("inkscape.exe"))
--- print(os.which("inkscape"))
--- print(os.which("gs.exe"))
--- print(os.which("ps2pdf"))
end -- of closure
do -- create closure to overcome 200 locals limit
-if not modules then modules = { } end modules ['l-file'] = {
- version = 1.001,
- comment = "companion to luat-lib.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
--- needs a cleanup
+package.loaded["l-file"] = package.loaded["l-file"] or true
-file = file or { }
-local file = file
-
-local insert, concat = table.insert, table.concat
-local find, gmatch, match, gsub, sub, char, lower = string.find, string.gmatch, string.match, string.gsub, string.sub, string.char, string.lower
-local lpegmatch = lpeg.match
-local getcurrentdir, attributes = lfs.currentdir, lfs.attributes
-
-local P, R, S, C, Cs, Cp, Cc = lpeg.P, lpeg.R, lpeg.S, lpeg.C, lpeg.Cs, lpeg.Cp, lpeg.Cc
-
-local function dirname(name,default)
- return match(name,"^(.+)[/\\].-$") or (default or "")
-end
+-- original size: 16648, stripped down to: 9051
+if not modules then modules={} end modules ['l-file']={
+ version=1.001,
+ comment="companion to luat-lib.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+file=file or {}
+local file=file
+if not lfs then
+ lfs=optionalrequire("lfs")
+end
+if not lfs then
+ lfs={
+ getcurrentdir=function()
+ return "."
+ end,
+ attributes=function()
+ return nil
+ end,
+ isfile=function(name)
+ local f=io.open(name,'rb')
+ if f then
+ f:close()
+ return true
+ end
+ end,
+ isdir=function(name)
+ print("you need to load lfs")
+ return false
+ end
+ }
+elseif not lfs.isfile then
+ local attributes=lfs.attributes
+ function lfs.isdir(name)
+ return attributes(name,"mode")=="directory"
+ end
+ function lfs.isfile(name)
+ return attributes(name,"mode")=="file"
+ end
+end
+local insert,concat=table.insert,table.concat
+local match=string.match
+local lpegmatch=lpeg.match
+local getcurrentdir,attributes=lfs.currentdir,lfs.attributes
+local checkedsplit=string.checkedsplit
+local P,R,S,C,Cs,Cp,Cc,Ct=lpeg.P,lpeg.R,lpeg.S,lpeg.C,lpeg.Cs,lpeg.Cp,lpeg.Cc,lpeg.Ct
+local colon=P(":")
+local period=P(".")
+local periods=P("..")
+local fwslash=P("/")
+local bwslash=P("\\")
+local slashes=S("\\/")
+local noperiod=1-period
+local noslashes=1-slashes
+local name=noperiod^1
+local suffix=period/""*(1-period-slashes)^1*-1
+local pattern=C((1-(slashes^1*noslashes^1*-1))^1)*P(1)
+local function pathpart(name,default)
+ return name and lpegmatch(pattern,name) or default or ""
+end
+local pattern=(noslashes^0*slashes)^1*C(noslashes^1)*-1
local function basename(name)
- return match(name,"^.+[/\\](.-)$") or name
+ return name and lpegmatch(pattern,name) or name
end
-
--- local function nameonly(name)
--- return (gsub(match(name,"^.+[/\\](.-)$") or name,"%..*$",""))
--- end
-
+local pattern=(noslashes^0*slashes^1)^0*Cs((1-suffix)^1)*suffix^0
local function nameonly(name)
- return (gsub(match(name,"^.+[/\\](.-)$") or name,"%.[%a%d]+$",""))
-end
-
-local function extname(name,default)
- return match(name,"^.+%.([^/\\]-)$") or default or ""
-end
-
-local function splitname(name)
- local n, s = match(name,"^(.+)%.([^/\\]-)$")
- return n or name, s or ""
-end
-
-file.basename = basename
-file.dirname = dirname
-file.nameonly = nameonly
-file.extname = extname
-file.suffix = extname
-
-function file.removesuffix(filename)
- return (gsub(filename,"%.[%a%d]+$",""))
-end
-
-function file.addsuffix(filename, suffix, criterium)
- if not suffix or suffix == "" then
- return filename
- elseif criterium == true then
- return filename .. "." .. suffix
- elseif not criterium then
- local n, s = splitname(filename)
- if not s or s == "" then
- return filename .. "." .. suffix
- else
+ return name and lpegmatch(pattern,name) or name
+end
+local pattern=(noslashes^0*slashes)^0*(noperiod^1*period)^1*C(noperiod^1)*-1
+local function suffixonly(name)
+ return name and lpegmatch(pattern,name) or ""
+end
+file.pathpart=pathpart
+file.basename=basename
+file.nameonly=nameonly
+file.suffixonly=suffixonly
+file.suffix=suffixonly
+file.dirname=pathpart
+file.extname=suffixonly
+local drive=C(R("az","AZ"))*colon
+local path=C((noslashes^0*slashes)^0)
+local suffix=period*C(P(1-period)^0*P(-1))
+local base=C((1-suffix)^0)
+local rest=C(P(1)^0)
+drive=drive+Cc("")
+path=path+Cc("")
+base=base+Cc("")
+suffix=suffix+Cc("")
+local pattern_a=drive*path*base*suffix
+local pattern_b=path*base*suffix
+local pattern_c=C(drive*path)*C(base*suffix)
+local pattern_d=path*rest
+function file.splitname(str,splitdrive)
+ if not str then
+ elseif splitdrive then
+ return lpegmatch(pattern_a,str)
+ else
+ return lpegmatch(pattern_b,str)
+ end
+end
+function file.splitbase(str)
+ return str and lpegmatch(pattern_d,str)
+end
+function file.nametotable(str,splitdrive)
+ if str then
+ local path,drive,subpath,name,base,suffix=lpegmatch(pattern_c,str)
+ if splitdrive then
+ return {
+ path=path,
+ drive=drive,
+ subpath=subpath,
+ name=name,
+ base=base,
+ suffix=suffix,
+ }
+ else
+ return {
+ path=path,
+ name=name,
+ base=base,
+ suffix=suffix,
+ }
+ end
+ end
+end
+local pattern=Cs(((period*(1-period-slashes)^1*-1)/""+1)^1)
+function file.removesuffix(name)
+ return name and lpegmatch(pattern,name)
+end
+local suffix=period/""*(1-period-slashes)^1*-1
+local pattern=Cs((noslashes^0*slashes^1)^0*((1-suffix)^1))*Cs(suffix)
+function file.addsuffix(filename,suffix,criterium)
+ if not filename or not suffix or suffix=="" then
+ return filename
+ elseif criterium==true then
+ return filename.."."..suffix
+ elseif not criterium then
+ local n,s=lpegmatch(pattern,filename)
+ if not s or s=="" then
+ return filename.."."..suffix
+ else
+ return filename
+ end
+ else
+ local n,s=lpegmatch(pattern,filename)
+ if s and s~="" then
+ local t=type(criterium)
+ if t=="table" then
+ for i=1,#criterium do
+ if s==criterium[i] then
return filename
+ end
end
- else
- local n, s = splitname(filename)
- if s and s ~= "" then
- local t = type(criterium)
- if t == "table" then
- -- keep if in criterium
- for i=1,#criterium do
- if s == criterium[i] then
- return filename
- end
- end
- elseif t == "string" then
- -- keep if criterium
- if s == criterium then
- return filename
- end
- end
+ elseif t=="string" then
+ if s==criterium then
+ return filename
end
- return n .. "." .. suffix
+ end
end
+ return (n or filename).."."..suffix
+ end
end
-
-
-function file.replacesuffix(filename, suffix)
- return (gsub(filename,"%.[%a%d]+$","")) .. "." .. suffix
+local suffix=period*(1-period-slashes)^1*-1
+local pattern=Cs((1-suffix)^0)
+function file.replacesuffix(name,suffix)
+ if name and suffix and suffix~="" then
+ return lpegmatch(pattern,name).."."..suffix
+ else
+ return name
+ end
end
-
-
-local trick_1 = char(1)
-local trick_2 = "^" .. trick_1 .. "/+"
-
-function file.join(...) -- rather dirty
- local lst = { ... }
- local a, b = lst[1], lst[2]
- if not a or a == "" then -- not a added
- lst[1] = trick_1
- elseif b and find(a,"^/+$") and find(b,"^/") then
- lst[1] = ""
- lst[2] = gsub(b,"^/+","")
- end
- local pth = concat(lst,"/")
- pth = gsub(pth,"\\","/")
- local a, b = match(pth,"^(.*://)(.*)$")
- if a and b then
- return a .. gsub(b,"//+","/")
- end
- a, b = match(pth,"^(//)(.*)$")
- if a and b then
- return a .. gsub(b,"//+","/")
- end
- pth = gsub(pth,trick_2,"")
- return (gsub(pth,"//+","/"))
+local reslasher=lpeg.replacer(P("\\"),"/")
+function file.reslash(str)
+ return str and lpegmatch(reslasher,str)
end
-
-
--- We should be able to use:
---
--- function file.is_writable(name)
--- local a = attributes(name) or attributes(dirname(name,"."))
--- return a and sub(a.permissions,2,2) == "w"
--- end
---
--- But after some testing Taco and I came up with:
-
function file.is_writable(name)
- if lfs.isdir(name) then
- name = name .. "/m_t_x_t_e_s_t.tmp"
- local f = io.open(name,"wb")
- if f then
- f:close()
- os.remove(name)
- return true
- end
- elseif lfs.isfile(name) then
- local f = io.open(name,"ab")
- if f then
- f:close()
- return true
- end
- else
- local f = io.open(name,"ab")
- if f then
- f:close()
- os.remove(name)
- return true
- end
+ if not name then
+ elseif lfs.isdir(name) then
+ name=name.."/m_t_x_t_e_s_t.tmp"
+ local f=io.open(name,"wb")
+ if f then
+ f:close()
+ os.remove(name)
+ return true
end
- return false
+ elseif lfs.isfile(name) then
+ local f=io.open(name,"ab")
+ if f then
+ f:close()
+ return true
+ end
+ else
+ local f=io.open(name,"ab")
+ if f then
+ f:close()
+ os.remove(name)
+ return true
+ end
+ end
+ return false
end
-
+local readable=P("r")*Cc(true)
function file.is_readable(name)
- local a = attributes(name)
- return a and sub(a.permissions,1,1) == "r"
-end
-
-file.isreadable = file.is_readable -- depricated
-file.iswritable = file.is_writable -- depricated
-
--- todo: lpeg \\ / .. does not save much
-
-local checkedsplit = string.checkedsplit
-
-function file.splitpath(str,separator) -- string
- str = gsub(str,"\\","/")
- return checkedsplit(str,separator or io.pathseparator)
-end
-
-function file.joinpath(tab,separator) -- table
- return concat(tab,separator or io.pathseparator) -- can have trailing //
-end
-
--- we can hash them weakly
-
-
-function file.collapsepath(str,anchor)
- if anchor and not find(str,"^/") and not find(str,"^%a:") then
- str = getcurrentdir() .. "/" .. str
- end
- if str == "" or str =="." then
- return "."
- elseif find(str,"^%.%.") then
- str = gsub(str,"\\","/")
- return str
- elseif not find(str,"%.") then
- str = gsub(str,"\\","/")
- return str
- end
- str = gsub(str,"\\","/")
- local starter, rest = match(str,"^(%a+:/*)(.-)$")
- if starter then
- str = rest
- end
- local oldelements = checkedsplit(str,"/")
- local newelements = { }
- local i = #oldelements
- while i > 0 do
- local element = oldelements[i]
- if element == '.' then
- -- do nothing
- elseif element == '..' then
- local n = i - 1
- while n > 0 do
- local element = oldelements[n]
- if element ~= '..' and element ~= '.' then
- oldelements[n] = '.'
- break
- else
- n = n - 1
- end
- end
- if n < 1 then
- insert(newelements,1,'..')
- end
- elseif element ~= "" then
- insert(newelements,1,element)
- end
- i = i - 1
- end
- if #newelements == 0 then
- return starter or "."
- elseif starter then
- return starter .. concat(newelements, '/')
- elseif find(str,"^/") then
- return "/" .. concat(newelements,'/')
+ if name then
+ local a=attributes(name)
+ return a and lpegmatch(readable,a.permissions) or false
+ else
+ return false
+ end
+end
+file.isreadable=file.is_readable
+file.iswritable=file.is_writable
+function file.size(name)
+ if name then
+ local a=attributes(name)
+ return a and a.size or 0
+ else
+ return 0
+ end
+end
+function file.splitpath(str,separator)
+ return str and checkedsplit(lpegmatch(reslasher,str),separator or io.pathseparator)
+end
+function file.joinpath(tab,separator)
+ return tab and concat(tab,separator or io.pathseparator)
+end
+local stripper=Cs(P(fwslash)^0/""*reslasher)
+local isnetwork=fwslash*fwslash*(1-fwslash)+(1-fwslash-colon)^1*colon
+local isroot=fwslash^1*-1
+local hasroot=fwslash^1
+local deslasher=lpeg.replacer(S("\\/")^1,"/")
+function file.join(...)
+ local lst={... }
+ local one=lst[1]
+ if lpegmatch(isnetwork,one) then
+ local two=lpegmatch(deslasher,concat(lst,"/",2))
+ return one.."/"..two
+ elseif lpegmatch(isroot,one) then
+ local two=lpegmatch(deslasher,concat(lst,"/",2))
+ if lpegmatch(hasroot,two) then
+ return two
else
- return concat(newelements, '/')
- end
-end
-
-
+ return "/"..two
+ end
+ elseif one=="" then
+ return lpegmatch(stripper,concat(lst,"/",2))
+ else
+ return lpegmatch(deslasher,concat(lst,"/"))
+ end
+end
+local drivespec=R("az","AZ")^1*colon
+local anchors=fwslash+drivespec
+local untouched=periods+(1-period)^1*P(-1)
+local splitstarter=(Cs(drivespec*(bwslash/"/"+fwslash)^0)+Cc(false))*Ct(lpeg.splitat(S("/\\")^1))
+local absolute=fwslash
+function file.collapsepath(str,anchor)
+ if not str then
+ return
+ end
+ if anchor and not lpegmatch(anchors,str) then
+ str=getcurrentdir().."/"..str
+ end
+ if str=="" or str=="." then
+ return "."
+ elseif lpegmatch(untouched,str) then
+ return lpegmatch(reslasher,str)
+ end
+ local starter,oldelements=lpegmatch(splitstarter,str)
+ local newelements={}
+ local i=#oldelements
+ while i>0 do
+ local element=oldelements[i]
+ if element=='.' then
+ elseif element=='..' then
+ local n=i-1
+ while n>0 do
+ local element=oldelements[n]
+ if element~='..' and element~='.' then
+ oldelements[n]='.'
+ break
+ else
+ n=n-1
+ end
+ end
+ if n<1 then
+ insert(newelements,1,'..')
+ end
+ elseif element~="" then
+ insert(newelements,1,element)
+ end
+ i=i-1
+ end
+ if #newelements==0 then
+ return starter or "."
+ elseif starter then
+ return starter..concat(newelements,'/')
+ elseif lpegmatch(absolute,str) then
+ return "/"..concat(newelements,'/')
+ else
+ return concat(newelements,'/')
+ end
+end
+local validchars=R("az","09","AZ","--","..")
+local pattern_a=lpeg.replacer(1-validchars)
+local pattern_a=Cs((validchars+P(1)/"-")^1)
+local whatever=P("-")^0/""
+local pattern_b=Cs(whatever*(1-whatever*-1)^1)
function file.robustname(str,strict)
- str = gsub(str,"[^%a%d%/%-%.\\]+","-")
+ if str then
+ str=lpegmatch(pattern_a,str) or str
if strict then
- return lower(gsub(str,"^%-*(.-)%-*$","%1"))
+ return lpegmatch(pattern_b,str) or str
else
- return str
+ return str
end
+ end
end
-
-file.readdata = io.loaddata
-file.savedata = io.savedata
-
+file.readdata=io.loaddata
+file.savedata=io.savedata
function file.copy(oldname,newname)
- file.savedata(newname,io.loaddata(oldname))
-end
-
--- lpeg variants, slightly faster, not always
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
--- also rewrite previous
-
-local letter = R("az","AZ") + S("_-+")
-local separator = P("://")
-
-local qualified = P(".")^0 * P("/") + letter*P(":") + letter^1*separator + letter^1 * P("/")
-local rootbased = P("/") + letter*P(":")
-
-lpeg.patterns.qualified = qualified
-lpeg.patterns.rootbased = rootbased
-
--- ./name ../name /name c: :// name/name
-
+ if oldname and newname then
+ local data=io.loaddata(oldname)
+ if data and data~="" then
+ file.savedata(newname,data)
+ end
+ end
+end
+local letter=R("az","AZ")+S("_-+")
+local separator=P("://")
+local qualified=period^0*fwslash+letter*colon+letter^1*separator+letter^1*fwslash
+local rootbased=fwslash+letter*colon
+lpeg.patterns.qualified=qualified
+lpeg.patterns.rootbased=rootbased
function file.is_qualified_path(filename)
- return lpegmatch(qualified,filename) ~= nil
+ return filename and lpegmatch(qualified,filename)~=nil
end
-
function file.is_rootbased_path(filename)
- return lpegmatch(rootbased,filename) ~= nil
+ return filename and lpegmatch(rootbased,filename)~=nil
end
-
--- actually these are schemes
-
-local slash = S("\\/")
-local period = P(".")
-local drive = C(R("az","AZ")) * P(":")
-local path = C(((1-slash)^0 * slash)^0)
-local suffix = period * C(P(1-period)^0 * P(-1))
-local base = C((1-suffix)^0)
-
-drive = drive + Cc("")
-path = path + Cc("")
-base = base + Cc("")
-suffix = suffix + Cc("")
-
-local pattern_a = drive * path * base * suffix
-local pattern_b = path * base * suffix
-local pattern_c = C(drive * path) * C(base * suffix)
-
-function file.splitname(str,splitdrive)
- if splitdrive then
- return lpegmatch(pattern_a,str) -- returns drive, path, base, suffix
- else
- return lpegmatch(pattern_b,str) -- returns path, base, suffix
- end
-end
-
-function file.nametotable(str,splitdrive) -- returns table
- local path, drive, subpath, name, base, suffix = lpegmatch(pattern_c,str)
- if splitdrive then
- return {
- path = path,
- drive = drive,
- subpath = subpath,
- name = name,
- base = base,
- suffix = suffix,
- }
- else
- return {
- path = path,
- name = name,
- base = base,
- suffix = suffix,
- }
- end
-end
-
--- function test(t) for k, v in next, t do print(v, "=>", file.splitname(v)) end end
---
--- test { "c:", "c:/aa", "c:/aa/bb", "c:/aa/bb/cc", "c:/aa/bb/cc.dd", "c:/aa/bb/cc.dd.ee" }
--- test { "c:", "c:aa", "c:aa/bb", "c:aa/bb/cc", "c:aa/bb/cc.dd", "c:aa/bb/cc.dd.ee" }
--- test { "/aa", "/aa/bb", "/aa/bb/cc", "/aa/bb/cc.dd", "/aa/bb/cc.dd.ee" }
--- test { "aa", "aa/bb", "aa/bb/cc", "aa/bb/cc.dd", "aa/bb/cc.dd.ee" }
-
-
--- for myself:
-
function file.strip(name,dir)
- local b, a = match(name,"^(.-)" .. dir .. "(.*)$")
- return a ~= "" and a or name
+ if name then
+ local b,a=match(name,"^(.-)"..dir.."(.*)$")
+ return a~="" and a or name
+ end
end
@@ -3060,64 +3065,81 @@ end -- of closure
do -- create closure to overcome 200 locals limit
-if not modules then modules = { } end modules ['l-md5'] = {
- version = 1.001,
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
--- This also provides file checksums and checkers.
+package.loaded["l-md5"] = package.loaded["l-md5"] or true
-local md5, file = md5, file
-local gsub, format, byte = string.gsub, string.format, string.byte
+-- original size: 3760, stripped down to: 2088
+if not modules then modules={} end modules ['l-md5']={
+ version=1.001,
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+if not md5 then
+ md5=optionalrequire("md5")
+end
+if not md5 then
+ md5={
+ sum=function(str) print("error: md5 is not loaded (sum ignored)") return str end,
+ sumhexa=function(str) print("error: md5 is not loaded (sumhexa ignored)") return str end,
+ }
+end
+local md5,file=md5,file
+local gsub,format,byte=string.gsub,string.format,string.byte
+local md5sum=md5.sum
local function convert(str,fmt)
- return (gsub(md5.sum(str),".",function(chr) return format(fmt,byte(chr)) end))
+ return (gsub(md5sum(str),".",function(chr) return format(fmt,byte(chr)) end))
end
-
if not md5.HEX then function md5.HEX(str) return convert(str,"%02X") end end
if not md5.hex then function md5.hex(str) return convert(str,"%02x") end end
if not md5.dec then function md5.dec(str) return convert(str,"%03i") end end
-
-
-function file.needs_updating(oldname,newname,threshold) -- size modification access change
- local oldtime = lfs.attributes(oldname, modification)
- local newtime = lfs.attributes(newname, modification)
- if newtime >= oldtime then
- return false
- elseif oldtime - newtime < (threshold or 1) then
- return false
+function file.needsupdating(oldname,newname,threshold)
+ local oldtime=lfs.attributes(oldname,"modification")
+ if oldtime then
+ local newtime=lfs.attributes(newname,"modification")
+ if not newtime then
+ return true
+ elseif newtime>=oldtime then
+ return false
+ elseif oldtime-newtime<(threshold or 1) then
+ return false
else
- return true
+ return true
end
+ else
+ return false
+ end
+end
+file.needs_updating=file.needsupdating
+function file.syncmtimes(oldname,newname)
+ local oldtime=lfs.attributes(oldname,"modification")
+ if oldtime and lfs.isfile(newname) then
+ lfs.touch(newname,oldtime,oldtime)
+ end
end
-
function file.checksum(name)
- if md5 then
- local data = io.loaddata(name)
- if data then
- return md5.HEX(data)
- end
+ if md5 then
+ local data=io.loaddata(name)
+ if data then
+ return md5.HEX(data)
end
- return nil
+ end
+ return nil
end
-
function file.loadchecksum(name)
- if md5 then
- local data = io.loaddata(name .. ".md5")
- return data and (gsub(data,"%s",""))
- end
- return nil
+ if md5 then
+ local data=io.loaddata(name..".md5")
+ return data and (gsub(data,"%s",""))
+ end
+ return nil
end
-
-function file.savechecksum(name, checksum)
- if not checksum then checksum = file.checksum(name) end
- if checksum then
- io.savedata(name .. ".md5",checksum)
- return checksum
- end
- return nil
+function file.savechecksum(name,checksum)
+ if not checksum then checksum=file.checksum(name) end
+ if checksum then
+ io.savedata(name..".md5",checksum)
+ return checksum
+ end
+ return nil
end
@@ -3125,594 +3147,546 @@ end -- of closure
do -- create closure to overcome 200 locals limit
-if not modules then modules = { } end modules ['l-url'] = {
- version = 1.001,
- comment = "companion to luat-lib.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
-local char, gmatch, gsub, format, byte, find = string.char, string.gmatch, string.gsub, string.format, string.byte, string.find
-local concat = table.concat
-local tonumber, type = tonumber, type
-local P, C, R, S, Cs, Cc, Ct = lpeg.P, lpeg.C, lpeg.R, lpeg.S, lpeg.Cs, lpeg.Cc, lpeg.Ct
-local lpegmatch, lpegpatterns, replacer = lpeg.match, lpeg.patterns, lpeg.replacer
+package.loaded["l-url"] = package.loaded["l-url"] or true
--- from wikipedia:
---
--- foo://username:password@example.com:8042/over/there/index.dtb?type=animal;name=narwhal#nose
--- \_/ \_______________/ \_________/ \__/ \___/ \_/ \______________________/ \__/
--- | | | | | | | |
--- | userinfo hostname port | | query fragment
--- | \________________________________/\_____________|____|/
--- scheme | | | |
--- | authority path | |
--- | | |
--- | path interpretable as filename
--- | ___________|____________ |
--- / \ / \ |
--- urn:example:animal:ferret:nose interpretable as extension
-
-url = url or { }
-local url = url
-
-local tochar = function(s) return char(tonumber(s,16)) end
-
-local colon = P(":")
-local qmark = P("?")
-local hash = P("#")
-local slash = P("/")
-local percent = P("%")
-local endofstring = P(-1)
-
-local hexdigit = R("09","AF","af")
-local plus = P("+")
-local nothing = Cc("")
-local escaped = (plus / " ") + (percent * C(hexdigit * hexdigit) / tochar)
-
--- we assume schemes with more than 1 character (in order to avoid problems with windows disks)
--- we also assume that when we have a scheme, we also have an authority
-
-local schemestr = Cs((escaped+(1-colon-slash-qmark-hash))^2)
-local authoritystr = Cs((escaped+(1- slash-qmark-hash))^0)
-local pathstr = Cs((escaped+(1- qmark-hash))^0)
-local querystr = Cs((escaped+(1- hash))^0)
-local fragmentstr = Cs((escaped+(1- endofstring))^0)
-
-local scheme = schemestr * colon + nothing
-local authority = slash * slash * authoritystr + nothing
-local path = slash * pathstr + nothing
-local query = qmark * querystr + nothing
-local fragment = hash * fragmentstr + nothing
-
-local validurl = scheme * authority * path * query * fragment
-local parser = Ct(validurl)
-
-lpegpatterns.url = validurl
-lpegpatterns.urlsplitter = parser
-
-local escapes = { } ; for i=0,255 do escapes[i] = format("%%%02X",i) end
-
-local escaper = Cs((R("09","AZ","az") + S("-./_") + P(1) / escapes)^0)
-
-lpegpatterns.urlescaper = escaper
-
--- todo: reconsider Ct as we can as well have five return values (saves a table)
--- so we can have two parsers, one with and one without
+-- original size: 11806, stripped down to: 5417
+if not modules then modules={} end modules ['l-url']={
+ version=1.001,
+ comment="companion to luat-lib.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+local char,format,byte=string.char,string.format,string.byte
+local concat=table.concat
+local tonumber,type=tonumber,type
+local P,C,R,S,Cs,Cc,Ct,Cf,Cg,V=lpeg.P,lpeg.C,lpeg.R,lpeg.S,lpeg.Cs,lpeg.Cc,lpeg.Ct,lpeg.Cf,lpeg.Cg,lpeg.V
+local lpegmatch,lpegpatterns,replacer=lpeg.match,lpeg.patterns,lpeg.replacer
+url=url or {}
+local url=url
+local tochar=function(s) return char(tonumber(s,16)) end
+local colon=P(":")
+local qmark=P("?")
+local hash=P("#")
+local slash=P("/")
+local percent=P("%")
+local endofstring=P(-1)
+local hexdigit=R("09","AF","af")
+local plus=P("+")
+local nothing=Cc("")
+local escapedchar=(percent*C(hexdigit*hexdigit))/tochar
+local escaped=(plus/" ")+escapedchar
+local noslash=P("/")/""
+local schemestr=Cs((escaped+(1-colon-slash-qmark-hash))^2)
+local authoritystr=Cs((escaped+(1- slash-qmark-hash))^0)
+local pathstr=Cs((escaped+(1- qmark-hash))^0)
+local querystr=Cs(((1- hash))^0)
+local fragmentstr=Cs((escaped+(1- endofstring))^0)
+local scheme=schemestr*colon+nothing
+local authority=slash*slash*authoritystr+nothing
+local path=slash*pathstr+nothing
+local query=qmark*querystr+nothing
+local fragment=hash*fragmentstr+nothing
+local validurl=scheme*authority*path*query*fragment
+local parser=Ct(validurl)
+lpegpatterns.url=validurl
+lpegpatterns.urlsplitter=parser
+local escapes={}
+setmetatable(escapes,{ __index=function(t,k)
+ local v=format("%%%02X",byte(k))
+ t[k]=v
+ return v
+end })
+local escaper=Cs((R("09","AZ","az")^1+P(" ")/"%%20"+S("-./_")^1+P(1)/escapes)^0)
+local unescaper=Cs((escapedchar+1)^0)
+lpegpatterns.urlunescaped=escapedchar
+lpegpatterns.urlescaper=escaper
+lpegpatterns.urlunescaper=unescaper
local function split(str)
- return (type(str) == "string" and lpegmatch(parser,str)) or str
+ return (type(str)=="string" and lpegmatch(parser,str)) or str
end
-
-local isscheme = schemestr * colon * slash * slash -- this test also assumes authority
-
+local isscheme=schemestr*colon*slash*slash
local function hasscheme(str)
- local scheme = lpegmatch(isscheme,str) -- at least one character
- return scheme ~= "" and scheme or false
-end
-
-
--- todo: cache them
-
-local rootletter = R("az","AZ")
- + S("_-+")
-local separator = P("://")
-local qualified = P(".")^0 * P("/")
- + rootletter * P(":")
- + rootletter^1 * separator
- + rootletter^1 * P("/")
-local rootbased = P("/")
- + rootletter * P(":")
-
-local barswapper = replacer("|",":")
-local backslashswapper = replacer("\\","/")
-
-local function hashed(str) -- not yet ok (/test?test)
- local s = split(str)
- local somescheme = s[1] ~= ""
- local somequery = s[4] ~= ""
- if not somescheme and not somequery then
- s = {
- scheme = "file",
- authority = "",
- path = str,
- query = "",
- fragment = "",
- original = str,
- noscheme = true,
- filename = str,
- }
- else -- not always a filename but handy anyway
- local authority, path, filename = s[2], s[3]
- if authority == "" then
- filename = path
- else
- filename = authority .. "/" .. path
- end
- s = {
- scheme = s[1],
- authority = authority,
- path = path,
- query = s[4],
- fragment = s[5],
- original = str,
- noscheme = false,
- filename = filename,
- }
- end
- return s
-end
-
--- Here we assume:
---
--- files: /// = relative
--- files: //// = absolute (!)
-
-
-
-url.split = split
-url.hasscheme = hasscheme
-url.hashed = hashed
-
-function url.addscheme(str,scheme) -- no authority
- if hasscheme(str) then
- return str
- elseif not scheme then
- return "file:///" .. str
+ if str then
+ local scheme=lpegmatch(isscheme,str)
+ return scheme~="" and scheme or false
+ else
+ return false
+ end
+end
+local rootletter=R("az","AZ")+S("_-+")
+local separator=P("://")
+local qualified=P(".")^0*P("/")+rootletter*P(":")+rootletter^1*separator+rootletter^1*P("/")
+local rootbased=P("/")+rootletter*P(":")
+local barswapper=replacer("|",":")
+local backslashswapper=replacer("\\","/")
+local equal=P("=")
+local amp=P("&")
+local key=Cs(((escapedchar+1)-equal )^0)
+local value=Cs(((escapedchar+1)-amp -endofstring)^0)
+local splitquery=Cf (Ct("")*P { "sequence",
+ sequence=V("pair")*(amp*V("pair"))^0,
+ pair=Cg(key*equal*value),
+},rawset)
+local function hashed(str)
+ if str=="" then
+ return {
+ scheme="invalid",
+ original=str,
+ }
+ end
+ local s=split(str)
+ local rawscheme=s[1]
+ local rawquery=s[4]
+ local somescheme=rawscheme~=""
+ local somequery=rawquery~=""
+ if not somescheme and not somequery then
+ s={
+ scheme="file",
+ authority="",
+ path=str,
+ query="",
+ fragment="",
+ original=str,
+ noscheme=true,
+ filename=str,
+ }
+ else
+ local authority,path,filename=s[2],s[3]
+ if authority=="" then
+ filename=path
+ elseif path=="" then
+ filename=""
else
- return scheme .. ":///" .. str
- end
-end
-
-function url.construct(hash) -- dodo: we need to escape !
- local fullurl, f = { }, 0
- local scheme, authority, path, query, fragment = hash.scheme, hash.authority, hash.path, hash.query, hash.fragment
- if scheme and scheme ~= "" then
- f = f + 1 ; fullurl[f] = scheme .. "://"
- end
- if authority and authority ~= "" then
- f = f + 1 ; fullurl[f] = authority
- end
- if path and path ~= "" then
- f = f + 1 ; fullurl[f] = "/" .. path
- end
- if query and query ~= "" then
- f = f + 1 ; fullurl[f] = "?".. query
- end
- if fragment and fragment ~= "" then
- f = f + 1 ; fullurl[f] = "#".. fragment
- end
- return lpegmatch(escaper,concat(fullurl))
-end
-
+ filename=authority.."/"..path
+ end
+ s={
+ scheme=rawscheme,
+ authority=authority,
+ path=path,
+ query=lpegmatch(unescaper,rawquery),
+ queries=lpegmatch(splitquery,rawquery),
+ fragment=s[5],
+ original=str,
+ noscheme=false,
+ filename=filename,
+ }
+ end
+ return s
+end
+url.split=split
+url.hasscheme=hasscheme
+url.hashed=hashed
+function url.addscheme(str,scheme)
+ if hasscheme(str) then
+ return str
+ elseif not scheme then
+ return "file:///"..str
+ else
+ return scheme..":///"..str
+ end
+end
+function url.construct(hash)
+ local fullurl,f={},0
+ local scheme,authority,path,query,fragment=hash.scheme,hash.authority,hash.path,hash.query,hash.fragment
+ if scheme and scheme~="" then
+ f=f+1;fullurl[f]=scheme.."://"
+ end
+ if authority and authority~="" then
+ f=f+1;fullurl[f]=authority
+ end
+ if path and path~="" then
+ f=f+1;fullurl[f]="/"..path
+ end
+ if query and query~="" then
+ f=f+1;fullurl[f]="?"..query
+ end
+ if fragment and fragment~="" then
+ f=f+1;fullurl[f]="#"..fragment
+ end
+ return lpegmatch(escaper,concat(fullurl))
+end
+local pattern=Cs(noslash*R("az","AZ")*(S(":|")/":")*noslash*P(1)^0)
function url.filename(filename)
- local t = hashed(filename)
- return (t.scheme == "file" and (gsub(t.path,"^/([a-zA-Z])([:|])/)","%1:"))) or filename
+ local spec=hashed(filename)
+ local path=spec.path
+ return (spec.scheme=="file" and path and lpegmatch(pattern,path)) or filename
end
-
+local function escapestring(str)
+ return lpegmatch(escaper,str)
+end
+url.escape=escapestring
function url.query(str)
- if type(str) == "string" then
- local t = { }
- for k, v in gmatch(str,"([^&=]*)=([^&=]*)") do
- t[k] = v
- end
- return t
- else
- return str
- end
+ if type(str)=="string" then
+ return lpegmatch(splitquery,str) or ""
+ else
+ return str
+ end
+end
+function url.toquery(data)
+ local td=type(data)
+ if td=="string" then
+ return #str and escape(data) or nil
+ elseif td=="table" then
+ if next(data) then
+ local t={}
+ for k,v in next,data do
+ t[#t+1]=format("%s=%s",k,escapestring(v))
+ end
+ return concat(t,"&")
+ end
+ else
+ end
+end
+local pattern=Cs(noslash^0*(1-noslash*P(-1))^0)
+function url.barepath(path)
+ if not path or path=="" then
+ return ""
+ else
+ return lpegmatch(pattern,path)
+ end
end
-
-
-
-
-
-
-
-
end -- of closure
do -- create closure to overcome 200 locals limit
-if not modules then modules = { } end modules ['l-dir'] = {
- version = 1.001,
- comment = "companion to luat-lib.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
--- dir.expandname will be merged with cleanpath and collapsepath
-
-local type = type
-local find, gmatch, match, gsub = string.find, string.gmatch, string.match, string.gsub
-local concat, insert, remove = table.concat, table.insert, table.remove
-local lpegmatch = lpeg.match
-
-local P, S, R, C, Cc, Cs, Ct, Cv, V = lpeg.P, lpeg.S, lpeg.R, lpeg.C, lpeg.Cc, lpeg.Cs, lpeg.Ct, lpeg.Cv, lpeg.V
-
-dir = dir or { }
-local dir = dir
-local lfs = lfs
+package.loaded["l-dir"] = package.loaded["l-dir"] or true
-local attributes = lfs.attributes
-local walkdir = lfs.dir
-local isdir = lfs.isdir
-local isfile = lfs.isfile
-local currentdir = lfs.currentdir
-
--- handy
+-- original size: 13139, stripped down to: 8196
+if not modules then modules={} end modules ['l-dir']={
+ version=1.001,
+ comment="companion to luat-lib.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+local type,select=type,select
+local find,gmatch,match,gsub=string.find,string.gmatch,string.match,string.gsub
+local concat,insert,remove=table.concat,table.insert,table.remove
+local lpegmatch=lpeg.match
+local P,S,R,C,Cc,Cs,Ct,Cv,V=lpeg.P,lpeg.S,lpeg.R,lpeg.C,lpeg.Cc,lpeg.Cs,lpeg.Ct,lpeg.Cv,lpeg.V
+dir=dir or {}
+local dir=dir
+local lfs=lfs
+local attributes=lfs.attributes
+local walkdir=lfs.dir
+local isdir=lfs.isdir
+local isfile=lfs.isfile
+local currentdir=lfs.currentdir
+local chdir=lfs.chdir
+if not isdir then
+ function isdir(name)
+ local a=attributes(name)
+ return a and a.mode=="directory"
+ end
+ lfs.isdir=isdir
+end
+if not isfile then
+ function isfile(name)
+ local a=attributes(name)
+ return a and a.mode=="file"
+ end
+ lfs.isfile=isfile
+end
function dir.current()
- return (gsub(currentdir(),"\\","/"))
+ return (gsub(currentdir(),"\\","/"))
end
-
--- optimizing for no find (*) does not save time
-
-
-local lfsisdir = isdir
-
+local lfsisdir=isdir
local function isdir(path)
- path = gsub(path,"[/\\]+$","")
- return lfsisdir(path)
+ path=gsub(path,"[/\\]+$","")
+ return lfsisdir(path)
end
-
-lfs.isdir = isdir
-
+lfs.isdir=isdir
local function globpattern(path,patt,recurse,action)
- if path == "/" then
- path = path .. "."
- elseif not find(path,"/$") then
- path = path .. '/'
- end
- if isdir(path) then -- lfs.isdir does not like trailing /
- for name in walkdir(path) do -- lfs.dir accepts trailing /
- local full = path .. name
- local mode = attributes(full,'mode')
- if mode == 'file' then
- if find(full,patt) then
- action(full)
- end
- elseif recurse and (mode == "directory") and (name ~= '.') and (name ~= "..") then
- globpattern(full,patt,recurse,action)
- end
- end
- end
-end
-
-dir.globpattern = globpattern
-
+ if path=="/" then
+ path=path.."."
+ elseif not find(path,"/$") then
+ path=path..'/'
+ end
+ if isdir(path) then
+ for name in walkdir(path) do
+ local full=path..name
+ local mode=attributes(full,'mode')
+ if mode=='file' then
+ if find(full,patt) then
+ action(full)
+ end
+ elseif recurse and (mode=="directory") and (name~='.') and (name~="..") then
+ globpattern(full,patt,recurse,action)
+ end
+ end
+ end
+end
+dir.globpattern=globpattern
local function collectpattern(path,patt,recurse,result)
- local ok, scanner
- result = result or { }
- if path == "/" then
- ok, scanner, first = xpcall(function() return walkdir(path..".") end, function() end) -- kepler safe
- else
- ok, scanner, first = xpcall(function() return walkdir(path) end, function() end) -- kepler safe
- end
- if ok and type(scanner) == "function" then
- if not find(path,"/$") then path = path .. '/' end
- for name in scanner, first do
- local full = path .. name
- local attr = attributes(full)
- local mode = attr.mode
- if mode == 'file' then
- if find(full,patt) then
- result[name] = attr
- end
- elseif recurse and (mode == "directory") and (name ~= '.') and (name ~= "..") then
- attr.list = collectpattern(full,patt,recurse)
- result[name] = attr
- end
- end
- end
- return result
-end
-
-dir.collectpattern = collectpattern
-
-local pattern = Ct {
- [1] = (C(P(".") + P("/")^1) + C(R("az","AZ") * P(":") * P("/")^0) + Cc("./")) * V(2) * V(3),
- [2] = C(((1-S("*?/"))^0 * P("/"))^0),
- [3] = C(P(1)^0)
+ local ok,scanner
+ result=result or {}
+ if path=="/" then
+ ok,scanner,first=xpcall(function() return walkdir(path..".") end,function() end)
+ else
+ ok,scanner,first=xpcall(function() return walkdir(path) end,function() end)
+ end
+ if ok and type(scanner)=="function" then
+ if not find(path,"/$") then path=path..'/' end
+ for name in scanner,first do
+ local full=path..name
+ local attr=attributes(full)
+ local mode=attr.mode
+ if mode=='file' then
+ if find(full,patt) then
+ result[name]=attr
+ end
+ elseif recurse and (mode=="directory") and (name~='.') and (name~="..") then
+ attr.list=collectpattern(full,patt,recurse)
+ result[name]=attr
+ end
+ end
+ end
+ return result
+end
+dir.collectpattern=collectpattern
+local pattern=Ct {
+ [1]=(C(P(".")+P("/")^1)+C(R("az","AZ")*P(":")*P("/")^0)+Cc("./"))*V(2)*V(3),
+ [2]=C(((1-S("*?/"))^0*P("/"))^0),
+ [3]=C(P(1)^0)
}
-
-local filter = Cs ( (
- P("**") / ".*" +
- P("*") / "[^/]*" +
- P("?") / "[^/]" +
- P(".") / "%%." +
- P("+") / "%%+" +
- P("-") / "%%-" +
- P(1)
+local filter=Cs ((
+ P("**")/".*"+P("*")/"[^/]*"+P("?")/"[^/]"+P(".")/"%%."+P("+")/"%%+"+P("-")/"%%-"+P(1)
)^0 )
-
local function glob(str,t)
- if type(t) == "function" then
- if type(str) == "table" then
- for s=1,#str do
- glob(str[s],t)
- end
- elseif isfile(str) then
- t(str)
- else
- local split = lpegmatch(pattern,str) -- we could use the file splitter
- if split then
- local root, path, base = split[1], split[2], split[3]
- local recurse = find(base,"%*%*")
- local start = root .. path
- local result = lpegmatch(filter,start .. base)
- globpattern(start,result,recurse,t)
- end
- end
+ if type(t)=="function" then
+ if type(str)=="table" then
+ for s=1,#str do
+ glob(str[s],t)
+ end
+ elseif isfile(str) then
+ t(str)
else
- if type(str) == "table" then
- local t = t or { }
- for s=1,#str do
- glob(str[s],t)
- end
- return t
- elseif isfile(str) then
- if t then
- t[#t+1] = str
- return t
- else
- return { str }
- end
- else
- local split = lpegmatch(pattern,str) -- we could use the file splitter
- if split then
- local t = t or { }
- local action = action or function(name) t[#t+1] = name end
- local root, path, base = split[1], split[2], split[3]
- local recurse = find(base,"%*%*")
- local start = root .. path
- local result = lpegmatch(filter,start .. base)
- globpattern(start,result,recurse,action)
- return t
- else
- return { }
- end
+ local split=lpegmatch(pattern,str)
+ if split then
+ local root,path,base=split[1],split[2],split[3]
+ local recurse=find(base,"%*%*")
+ local start=root..path
+ local result=lpegmatch(filter,start..base)
+ globpattern(start,result,recurse,t)
+ end
+ end
+ else
+ if type(str)=="table" then
+ local t=t or {}
+ for s=1,#str do
+ glob(str[s],t)
+ end
+ return t
+ elseif isfile(str) then
+ if t then
+ t[#t+1]=str
+ return t
+ else
+ return { str }
+ end
+ else
+ local split=lpegmatch(pattern,str)
+ if split then
+ local t=t or {}
+ local action=action or function(name) t[#t+1]=name end
+ local root,path,base=split[1],split[2],split[3]
+ local recurse=find(base,"%*%*")
+ local start=root..path
+ local result=lpegmatch(filter,start..base)
+ globpattern(start,result,recurse,action)
+ return t
+ else
+ return {}
+ end
+ end
+ end
+end
+dir.glob=glob
+local function globfiles(path,recurse,func,files)
+ if type(func)=="string" then
+ local s=func
+ func=function(name) return find(name,s) end
+ end
+ files=files or {}
+ local noffiles=#files
+ for name in walkdir(path) do
+ if find(name,"^%.") then
+ else
+ local mode=attributes(name,'mode')
+ if mode=="directory" then
+ if recurse then
+ globfiles(path.."/"..name,recurse,func,files)
end
- end
-end
-
-dir.glob = glob
-
-
-local function globfiles(path,recurse,func,files) -- func == pattern or function
- if type(func) == "string" then
- local s = func
- func = function(name) return find(name,s) end
- end
- files = files or { }
- local noffiles = #files
- for name in walkdir(path) do
- if find(name,"^%.") then
- --- skip
- else
- local mode = attributes(name,'mode')
- if mode == "directory" then
- if recurse then
- globfiles(path .. "/" .. name,recurse,func,files)
- end
- elseif mode == "file" then
- if not func or func(name) then
- noffiles = noffiles + 1
- files[noffiles] = path .. "/" .. name
- end
- end
+ elseif mode=="file" then
+ if not func or func(name) then
+ noffiles=noffiles+1
+ files[noffiles]=path.."/"..name
end
+ end
end
- return files
+ end
+ return files
end
-
-dir.globfiles = globfiles
-
--- t = dir.glob("c:/data/develop/context/sources/**/????-*.tex")
--- t = dir.glob("c:/data/develop/tex/texmf/**/*.tex")
--- t = dir.glob("c:/data/develop/context/texmf/**/*.tex")
--- t = dir.glob("f:/minimal/tex/**/*")
--- print(dir.ls("f:/minimal/tex/**/*"))
--- print(dir.ls("*.tex"))
-
+dir.globfiles=globfiles
function dir.ls(pattern)
- return concat(glob(pattern),"\n")
+ return concat(glob(pattern),"\n")
end
-
-
-local make_indeed = true -- false
-
-local onwindows = os.type == "windows" or find(os.getenv("PATH"),";")
-
+local make_indeed=true
+local onwindows=os.type=="windows" or find(os.getenv("PATH"),";")
if onwindows then
-
- function dir.mkdirs(...)
- local str, pth, t = "", "", { ... }
- for i=1,#t do
- local s = t[i]
- if s ~= "" then
- if str ~= "" then
- str = str .. "/" .. s
- else
- str = s
- end
- end
+ function dir.mkdirs(...)
+ local str,pth="",""
+ for i=1,select("#",...) do
+ local s=select(i,...)
+ if s=="" then
+ elseif str=="" then
+ str=s
+ else
+ str=str.."/"..s
+ end
+ end
+ local first,middle,last
+ local drive=false
+ first,middle,last=match(str,"^(//)(//*)(.*)$")
+ if first then
+ else
+ first,last=match(str,"^(//)/*(.-)$")
+ if first then
+ middle,last=match(str,"([^/]+)/+(.-)$")
+ if middle then
+ pth="//"..middle
+ else
+ pth="//"..last
+ last=""
end
- local first, middle, last
- local drive = false
- first, middle, last = match(str,"^(//)(//*)(.*)$")
+ else
+ first,middle,last=match(str,"^([a-zA-Z]:)(/*)(.-)$")
if first then
- -- empty network path == local path
+ pth,drive=first..middle,true
else
- first, last = match(str,"^(//)/*(.-)$")
- if first then
- middle, last = match(str,"([^/]+)/+(.-)$")
- if middle then
- pth = "//" .. middle
- else
- pth = "//" .. last
- last = ""
- end
- else
- first, middle, last = match(str,"^([a-zA-Z]:)(/*)(.-)$")
- if first then
- pth, drive = first .. middle, true
- else
- middle, last = match(str,"^(/*)(.-)$")
- if not middle then
- last = str
- end
- end
- end
- end
- for s in gmatch(last,"[^/]+") do
- if pth == "" then
- pth = s
- elseif drive then
- pth, drive = pth .. s, false
- else
- pth = pth .. "/" .. s
- end
- if make_indeed and not isdir(pth) then
- lfs.mkdir(pth)
- end
- end
- return pth, (isdir(pth) == true)
- end
-
-
+ middle,last=match(str,"^(/*)(.-)$")
+ if not middle then
+ last=str
+ end
+ end
+ end
+ end
+ for s in gmatch(last,"[^/]+") do
+ if pth=="" then
+ pth=s
+ elseif drive then
+ pth,drive=pth..s,false
+ else
+ pth=pth.."/"..s
+ end
+ if make_indeed and not isdir(pth) then
+ lfs.mkdir(pth)
+ end
+ end
+ return pth,(isdir(pth)==true)
+ end
else
-
- function dir.mkdirs(...)
- local str, pth, t = "", "", { ... }
- for i=1,#t do
- local s = t[i]
- if s and s ~= "" then -- we catch nil and false
- if str ~= "" then
- str = str .. "/" .. s
- else
- str = s
- end
- end
- end
- str = gsub(str,"/+","/")
- if find(str,"^/") then
- pth = "/"
- for s in gmatch(str,"[^/]+") do
- local first = (pth == "/")
- if first then
- pth = pth .. s
- else
- pth = pth .. "/" .. s
- end
- if make_indeed and not first and not isdir(pth) then
- lfs.mkdir(pth)
- end
- end
+ function dir.mkdirs(...)
+ local str,pth="",""
+ for i=1,select("#",...) do
+ local s=select(i,...)
+ if s and s~="" then
+ if str~="" then
+ str=str.."/"..s
else
- pth = "."
- for s in gmatch(str,"[^/]+") do
- pth = pth .. "/" .. s
- if make_indeed and not isdir(pth) then
- lfs.mkdir(pth)
- end
- end
+ str=s
end
- return pth, (isdir(pth) == true)
+ end
end
-
-
-end
-
-dir.makedirs = dir.mkdirs
-
--- we can only define it here as it uses dir.current
-
-if onwindows then
-
- function dir.expandname(str) -- will be merged with cleanpath and collapsepath
- local first, nothing, last = match(str,"^(//)(//*)(.*)$")
+ str=gsub(str,"/+","/")
+ if find(str,"^/") then
+ pth="/"
+ for s in gmatch(str,"[^/]+") do
+ local first=(pth=="/")
if first then
- first = dir.current() .. "/"
- end
- if not first then
- first, last = match(str,"^(//)/*(.*)$")
- end
- if not first then
- first, last = match(str,"^([a-zA-Z]:)(.*)$")
- if first and not find(last,"^/") then
- local d = currentdir()
- if lfs.chdir(first) then
- first = dir.current()
- end
- lfs.chdir(d)
- end
+ pth=pth..s
+ else
+ pth=pth.."/"..s
end
- if not first then
- first, last = dir.current(), str
+ if make_indeed and not first and not isdir(pth) then
+ lfs.mkdir(pth)
end
- last = gsub(last,"//","/")
- last = gsub(last,"/%./","/")
- last = gsub(last,"^/*","")
- first = gsub(first,"/*$","")
- if last == "" or last == "." then
- return first
- else
- return first .. "/" .. last
+ end
+ else
+ pth="."
+ for s in gmatch(str,"[^/]+") do
+ pth=pth.."/"..s
+ if make_indeed and not isdir(pth) then
+ lfs.mkdir(pth)
end
+ end
end
-
+ return pth,(isdir(pth)==true)
+ end
+end
+dir.makedirs=dir.mkdirs
+if onwindows then
+ function dir.expandname(str)
+ local first,nothing,last=match(str,"^(//)(//*)(.*)$")
+ if first then
+ first=dir.current().."/"
+ end
+ if not first then
+ first,last=match(str,"^(//)/*(.*)$")
+ end
+ if not first then
+ first,last=match(str,"^([a-zA-Z]:)(.*)$")
+ if first and not find(last,"^/") then
+ local d=currentdir()
+ if chdir(first) then
+ first=dir.current()
+ end
+ chdir(d)
+ end
+ end
+ if not first then
+ first,last=dir.current(),str
+ end
+ last=gsub(last,"//","/")
+ last=gsub(last,"/%./","/")
+ last=gsub(last,"^/*","")
+ first=gsub(first,"/*$","")
+ if last=="" or last=="." then
+ return first
+ else
+ return first.."/"..last
+ end
+ end
else
-
- function dir.expandname(str) -- will be merged with cleanpath and collapsepath
- if not find(str,"^/") then
- str = currentdir() .. "/" .. str
- end
- str = gsub(str,"//","/")
- str = gsub(str,"/%./","/")
- str = gsub(str,"(.)/%.$","%1")
- return str
+ function dir.expandname(str)
+ if not find(str,"^/") then
+ str=currentdir().."/"..str
end
-
+ str=gsub(str,"//","/")
+ str=gsub(str,"/%./","/")
+ str=gsub(str,"(.)/%.$","%1")
+ return str
+ end
end
-
-file.expandname = dir.expandname -- for convenience
-
-local stack = { }
-
+file.expandname=dir.expandname
+local stack={}
function dir.push(newdir)
- insert(stack,lfs.currentdir())
+ insert(stack,currentdir())
+ if newdir and newdir~="" then
+ chdir(newdir)
+ end
end
-
function dir.pop()
- local d = remove(stack)
- if d then
- lfs.chdir(d)
- end
- return d
+ local d=remove(stack)
+ if d then
+ chdir(d)
+ end
+ return d
end
@@ -3720,55 +3694,71 @@ end -- of closure
do -- create closure to overcome 200 locals limit
-if not modules then modules = { } end modules ['l-boolean'] = {
- version = 1.001,
- comment = "companion to luat-lib.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
+package.loaded["l-boolean"] = package.loaded["l-boolean"] or true
-local type, tonumber = type, tonumber
-
-boolean = boolean or { }
-local boolean = boolean
+-- original size: 1781, stripped down to: 1503
+if not modules then modules={} end modules ['l-boolean']={
+ version=1.001,
+ comment="companion to luat-lib.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+local type,tonumber=type,tonumber
+boolean=boolean or {}
+local boolean=boolean
function boolean.tonumber(b)
- if b then return 1 else return 0 end -- test and return or return
+ if b then return 1 else return 0 end
end
-
-function toboolean(str,tolerant)
- if tolerant then
- local tstr = type(str)
- if tstr == "string" then
- return str == "true" or str == "yes" or str == "on" or str == "1" or str == "t"
- elseif tstr == "number" then
- return tonumber(str) ~= 0
- elseif tstr == "nil" then
- return false
- else
- return str
- end
- elseif str == "true" then
- return true
- elseif str == "false" then
- return false
- else
- return str
- end
+function toboolean(str,tolerant)
+ if str==nil then
+ return false
+ elseif str==false then
+ return false
+ elseif str==true then
+ return true
+ elseif str=="true" then
+ return true
+ elseif str=="false" then
+ return false
+ elseif not tolerant then
+ return false
+ elseif str==0 then
+ return false
+ elseif (tonumber(str) or 0)>0 then
+ return true
+ else
+ return str=="yes" or str=="on" or str=="t"
+ end
+end
+string.toboolean=toboolean
+function string.booleanstring(str)
+ if str=="0" then
+ return false
+ elseif str=="1" then
+ return true
+ elseif str=="" then
+ return false
+ elseif str=="false" then
+ return false
+ elseif str=="true" then
+ return true
+ elseif (tonumber(str) or 0)>0 then
+ return true
+ else
+ return str=="yes" or str=="on" or str=="t"
+ end
end
-
-string.toboolean = toboolean
-
function string.is_boolean(str,default)
- if type(str) == "string" then
- if str == "true" or str == "yes" or str == "on" or str == "t" then
- return true
- elseif str == "false" or str == "no" or str == "off" or str == "f" then
- return false
- end
+ if type(str)=="string" then
+ if str=="true" or str=="yes" or str=="on" or str=="t" then
+ return true
+ elseif str=="false" or str=="no" or str=="off" or str=="f" then
+ return false
end
- return default
+ end
+ return default
end
@@ -3776,360 +3766,536 @@ end -- of closure
do -- create closure to overcome 200 locals limit
-if not modules then modules = { } end modules ['l-unicode'] = {
- version = 1.001,
- comment = "companion to luat-lib.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
-if not unicode then
-
- unicode = { utf8 = { } }
-
- local floor, char = math.floor, string.char
-
- function unicode.utf8.utfchar(n)
- if n < 0x80 then
- return char(n)
- elseif n < 0x800 then
- return char(
- 0xC0 + floor(n/0x40),
- 0x80 + (n % 0x40)
- )
- elseif n < 0x10000 then
- return char(
- 0xE0 + floor(n/0x1000),
- 0x80 + (floor(n/0x40) % 0x40),
- 0x80 + (n % 0x40)
- )
- elseif n < 0x40000 then
- return char(
- 0xF0 + floor(n/0x40000),
- 0x80 + floor(n/0x1000),
- 0x80 + (floor(n/0x40) % 0x40),
- 0x80 + (n % 0x40)
- )
- else
- -- return char(
- -- 0xF1 + floor(n/0x1000000),
- -- 0x80 + floor(n/0x40000),
- -- 0x80 + floor(n/0x1000),
- -- 0x80 + (floor(n/0x40) % 0x40),
- -- 0x80 + (n % 0x40)
- -- )
- return "?"
- end
- end
-
-end
-
-local unicode = unicode
-
-utf = utf or unicode.utf8
-
-local concat = table.concat
-local utfchar, utfbyte, utfgsub = utf.char, utf.byte, utf.gsub
-local char, byte, find, bytepairs, utfvalues, format = string.char, string.byte, string.find, string.bytepairs, string.utfvalues, string.format
-local type = type
-
-local utfsplitlines = string.utfsplitlines
+package.loaded["l-unicode"] = package.loaded["l-unicode"] or true
--- 0 EF BB BF UTF-8
--- 1 FF FE UTF-16-little-endian
--- 2 FE FF UTF-16-big-endian
--- 3 FF FE 00 00 UTF-32-little-endian
--- 4 00 00 FE FF UTF-32-big-endian
+-- original size: 26810, stripped down to: 11943
-unicode.utfname = {
- [0] = 'utf-8',
- [1] = 'utf-16-le',
- [2] = 'utf-16-be',
- [3] = 'utf-32-le',
- [4] = 'utf-32-be'
+if not modules then modules={} end modules ['l-unicode']={
+ version=1.001,
+ comment="companion to luat-lib.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
}
-
--- \000 fails in <= 5.0 but is valid in >=5.1 where %z is depricated
-
-function unicode.utftype(f)
- local str = f:read(4)
- if not str then
- f:seek('set')
- return 0
- -- elseif find(str,"^%z%z\254\255") then -- depricated
- -- elseif find(str,"^\000\000\254\255") then -- not permitted and bugged
- elseif find(str,"\000\000\254\255",1,true) then -- seems to work okay (TH)
- return 4
- -- elseif find(str,"^\255\254%z%z") then -- depricated
- -- elseif find(str,"^\255\254\000\000") then -- not permitted and bugged
- elseif find(str,"\255\254\000\000",1,true) then -- seems to work okay (TH)
- return 3
- elseif find(str,"^\254\255") then
- f:seek('set',2)
- return 2
- elseif find(str,"^\255\254") then
- f:seek('set',2)
- return 1
- elseif find(str,"^\239\187\191") then
- f:seek('set',3)
- return 0
+utf=utf or (unicode and unicode.utf8) or {}
+utf.characters=utf.characters or string.utfcharacters
+utf.values=utf.values or string.utfvalues
+local type=type
+local char,byte,format,sub=string.char,string.byte,string.format,string.sub
+local concat=table.concat
+local P,C,R,Cs,Ct,Cmt,Cc,Carg,Cp=lpeg.P,lpeg.C,lpeg.R,lpeg.Cs,lpeg.Ct,lpeg.Cmt,lpeg.Cc,lpeg.Carg,lpeg.Cp
+local lpegmatch,patterns=lpeg.match,lpeg.patterns
+local bytepairs=string.bytepairs
+local finder=lpeg.finder
+local replacer=lpeg.replacer
+local utfvalues=utf.values
+local utfgmatch=utf.gmatch
+local p_utftype=patterns.utftype
+local p_utfoffset=patterns.utfoffset
+local p_utf8char=patterns.utf8char
+local p_utf8byte=patterns.utf8byte
+local p_utfbom=patterns.utfbom
+local p_newline=patterns.newline
+local p_whitespace=patterns.whitespace
+if not unicode then
+ unicode={ utf=utf }
+end
+if not utf.char then
+ local floor,char=math.floor,string.char
+ function utf.char(n)
+ if n<0x80 then
+ return char(n)
+ elseif n<0x800 then
+ return char(
+ 0xC0+floor(n/0x40),
+ 0x80+(n%0x40)
+ )
+ elseif n<0x10000 then
+ return char(
+ 0xE0+floor(n/0x1000),
+ 0x80+(floor(n/0x40)%0x40),
+ 0x80+(n%0x40)
+ )
+ elseif n<0x200000 then
+ return char(
+ 0xF0+floor(n/0x40000),
+ 0x80+(floor(n/0x1000)%0x40),
+ 0x80+(floor(n/0x40)%0x40),
+ 0x80+(n%0x40)
+ )
else
- f:seek('set')
- return 0
+ return ""
+ end
+ end
+end
+if not utf.byte then
+ local utf8byte=patterns.utf8byte
+ function utf.byte(c)
+ return lpegmatch(utf8byte,c)
+ end
+end
+local utfchar,utfbyte=utf.char,utf.byte
+function utf.filetype(data)
+ return data and lpegmatch(p_utftype,data) or "unknown"
+end
+local toentities=Cs (
+ (
+ patterns.utf8one+(
+ patterns.utf8two+patterns.utf8three+patterns.utf8four
+ )/function(s) local b=utfbyte(s) if b<127 then return s else return format("&#%X;",b) end end
+ )^0
+)
+patterns.toentities=toentities
+function utf.toentities(str)
+ return lpegmatch(toentities,str)
+end
+local one=P(1)
+local two=C(1)*C(1)
+local four=C(R(utfchar(0xD8),utfchar(0xFF)))*C(1)*C(1)*C(1)
+local pattern=P("\254\255")*Cs((
+ four/function(a,b,c,d)
+ local ab=0xFF*byte(a)+byte(b)
+ local cd=0xFF*byte(c)+byte(d)
+ return utfchar((ab-0xD800)*0x400+(cd-0xDC00)+0x10000)
+ end+two/function(a,b)
+ return utfchar(byte(a)*256+byte(b))
+ end+one
+ )^1 )+P("\255\254")*Cs((
+ four/function(b,a,d,c)
+ local ab=0xFF*byte(a)+byte(b)
+ local cd=0xFF*byte(c)+byte(d)
+ return utfchar((ab-0xD800)*0x400+(cd-0xDC00)+0x10000)
+ end+two/function(b,a)
+ return utfchar(byte(a)*256+byte(b))
+ end+one
+ )^1 )
+function string.toutf(s)
+ return lpegmatch(pattern,s) or s
+end
+local validatedutf=Cs (
+ (
+ patterns.utf8one+patterns.utf8two+patterns.utf8three+patterns.utf8four+P(1)/"�"
+ )^0
+)
+patterns.validatedutf=validatedutf
+function utf.is_valid(str)
+ return type(str)=="string" and lpegmatch(validatedutf,str) or false
+end
+if not utf.len then
+ local n,f=0,1
+ local utfcharcounter=patterns.utfbom^-1*Cmt (
+ Cc(1)*patterns.utf8one^1+Cc(2)*patterns.utf8two^1+Cc(3)*patterns.utf8three^1+Cc(4)*patterns.utf8four^1,
+ function(_,t,d)
+ n=n+(t-f)/d
+ f=t
+ return true
+ end
+ )^0
+ function utf.len(str)
+ n,f=0,1
+ lpegmatch(utfcharcounter,str or "")
+ return n
+ end
+end
+utf.length=utf.len
+if not utf.sub then
+ local utflength=utf.length
+ local b,e,n,first,last=0,0,0,0,0
+ local function slide_zero(s,p)
+ n=n+1
+ if n>=last then
+ e=p-1
+ else
+ return p
+ end
+ end
+ local function slide_one(s,p)
+ n=n+1
+ if n==first then
+ b=p
end
+ if n>=last then
+ e=p-1
+ else
+ return p
+ end
+ end
+ local function slide_two(s,p)
+ n=n+1
+ if n==first then
+ b=p
+ else
+ return true
+ end
+ end
+ local pattern_zero=Cmt(p_utf8char,slide_zero)^0
+ local pattern_one=Cmt(p_utf8char,slide_one )^0
+ local pattern_two=Cmt(p_utf8char,slide_two )^0
+ function utf.sub(str,start,stop)
+ if not start then
+ return str
+ end
+ if start==0 then
+ start=1
+ end
+ if not stop then
+ if start<0 then
+ local l=utflength(str)
+ start=l+start
+ else
+ start=start-1
+ end
+ b,n,first=0,0,start
+ lpegmatch(pattern_two,str)
+ if n>=first then
+ return sub(str,b)
+ else
+ return ""
+ end
+ end
+ if start<0 or stop<0 then
+ local l=utf.length(str)
+ if start<0 then
+ start=l+start
+ if start<=0 then
+ start=1
+ else
+ start=start+1
+ end
+ end
+ if stop<0 then
+ stop=l+stop
+ if stop==0 then
+ stop=1
+ else
+ stop=stop+1
+ end
+ end
+ end
+ if start>stop then
+ return ""
+ elseif start>1 then
+ b,e,n,first,last=0,0,0,start-1,stop
+ lpegmatch(pattern_one,str)
+ if n>=first and e==0 then
+ e=#str
+ end
+ return sub(str,b,e)
+ else
+ b,e,n,last=1,0,0,stop
+ lpegmatch(pattern_zero,str)
+ if e==0 then
+ e=#str
+ end
+ return sub(str,b,e)
+ end
+ end
+end
+function utf.remapper(mapping)
+ local pattern=Cs((p_utf8char/mapping)^0)
+ return function(str)
+ if not str or str=="" then
+ return ""
+ else
+ return lpegmatch(pattern,str)
+ end
+ end,pattern
+end
+function utf.replacer(t)
+ local r=replacer(t,false,false,true)
+ return function(str)
+ return lpegmatch(r,str)
+ end
+end
+function utf.subtituter(t)
+ local f=finder (t)
+ local r=replacer(t,false,false,true)
+ return function(str)
+ local i=lpegmatch(f,str)
+ if not i then
+ return str
+ elseif i>#str then
+ return str
+ else
+ return lpegmatch(r,str)
+ end
+ end
+end
+local utflinesplitter=p_utfbom^-1*lpeg.tsplitat(p_newline)
+local utfcharsplitter_ows=p_utfbom^-1*Ct(C(p_utf8char)^0)
+local utfcharsplitter_iws=p_utfbom^-1*Ct((p_whitespace^1+C(p_utf8char))^0)
+local utfcharsplitter_raw=Ct(C(p_utf8char)^0)
+patterns.utflinesplitter=utflinesplitter
+function utf.splitlines(str)
+ return lpegmatch(utflinesplitter,str or "")
+end
+function utf.split(str,ignorewhitespace)
+ if ignorewhitespace then
+ return lpegmatch(utfcharsplitter_iws,str or "")
+ else
+ return lpegmatch(utfcharsplitter_ows,str or "")
+ end
+end
+function utf.totable(str)
+ return lpegmatch(utfcharsplitter_raw,str)
+end
+function utf.magic(f)
+ local str=f:read(4) or ""
+ local off=lpegmatch(p_utfoffset,str)
+ if off<4 then
+ f:seek('set',off)
+ end
+ return lpegmatch(p_utftype,str)
end
-
-
-
local function utf16_to_utf8_be(t)
- if type(t) == "string" then
- t = utfsplitlines(str)
- end
- local result = { } -- we reuse result
- for i=1,#t do
- local r, more = 0, 0
- for left, right in bytepairs(t[i]) do
- if right then
- local now = 256*left + right
- if more > 0 then
- now = (more-0xD800)*0x400 + (now-0xDC00) + 0x10000 -- the 0x10000 smells wrong
- more = 0
- r = r + 1
- result[r] = utfchar(now)
- elseif now >= 0xD800 and now <= 0xDBFF then
- more = now
- else
- r = r + 1
- result[r] = utfchar(now)
- end
- end
+ if type(t)=="string" then
+ t=lpegmatch(utflinesplitter,t)
+ end
+ local result={}
+ for i=1,#t do
+ local r,more=0,0
+ for left,right in bytepairs(t[i]) do
+ if right then
+ local now=256*left+right
+ if more>0 then
+ now=(more-0xD800)*0x400+(now-0xDC00)+0x10000
+ more=0
+ r=r+1
+ result[r]=utfchar(now)
+ elseif now>=0xD800 and now<=0xDBFF then
+ more=now
+ else
+ r=r+1
+ result[r]=utfchar(now)
end
- t[i] = concat(result,"",1,r) -- we reused tmp, hence t
+ end
end
- return t
+ t[i]=concat(result,"",1,r)
+ end
+ return t
end
-
local function utf16_to_utf8_le(t)
- if type(t) == "string" then
- t = utfsplitlines(str)
- end
- local result = { } -- we reuse result
- for i=1,#t do
- local r, more = 0, 0
- for left, right in bytepairs(t[i]) do
- if right then
- local now = 256*right + left
- if more > 0 then
- now = (more-0xD800)*0x400 + (now-0xDC00) + 0x10000 -- the 0x10000 smells wrong
- more = 0
- r = r + 1
- result[r] = utfchar(now)
- elseif now >= 0xD800 and now <= 0xDBFF then
- more = now
- else
- r = r + 1
- result[r] = utfchar(now)
- end
- end
+ if type(t)=="string" then
+ t=lpegmatch(utflinesplitter,t)
+ end
+ local result={}
+ for i=1,#t do
+ local r,more=0,0
+ for left,right in bytepairs(t[i]) do
+ if right then
+ local now=256*right+left
+ if more>0 then
+ now=(more-0xD800)*0x400+(now-0xDC00)+0x10000
+ more=0
+ r=r+1
+ result[r]=utfchar(now)
+ elseif now>=0xD800 and now<=0xDBFF then
+ more=now
+ else
+ r=r+1
+ result[r]=utfchar(now)
end
- t[i] = concat(result,"",1,r) -- we reused tmp, hence t
+ end
end
- return t
+ t[i]=concat(result,"",1,r)
+ end
+ return t
end
-
local function utf32_to_utf8_be(t)
- if type(t) == "string" then
- t = utfsplitlines(t)
- end
- local result = { } -- we reuse result
- for i=1,#t do
- local r, more = 0, -1
- for a,b in bytepairs(t[i]) do
- if a and b then
- if more < 0 then
- more = 256*256*256*a + 256*256*b
- else
- r = r + 1
- result[t] = utfchar(more + 256*a + b)
- more = -1
- end
- else
- break
- end
+ if type(t)=="string" then
+ t=lpegmatch(utflinesplitter,t)
+ end
+ local result={}
+ for i=1,#t do
+ local r,more=0,-1
+ for a,b in bytepairs(t[i]) do
+ if a and b then
+ if more<0 then
+ more=256*256*256*a+256*256*b
+ else
+ r=r+1
+ result[t]=utfchar(more+256*a+b)
+ more=-1
end
- t[i] = concat(result,"",1,r)
+ else
+ break
+ end
end
- return t
+ t[i]=concat(result,"",1,r)
+ end
+ return t
end
-
local function utf32_to_utf8_le(t)
- if type(t) == "string" then
- t = utfsplitlines(t)
- end
- local result = { } -- we reuse result
- for i=1,#t do
- local r, more = 0, -1
- for a,b in bytepairs(t[i]) do
- if a and b then
- if more < 0 then
- more = 256*b + a
- else
- r = r + 1
- result[t] = utfchar(more + 256*256*256*b + 256*256*a)
- more = -1
- end
- else
- break
- end
+ if type(t)=="string" then
+ t=lpegmatch(utflinesplitter,t)
+ end
+ local result={}
+ for i=1,#t do
+ local r,more=0,-1
+ for a,b in bytepairs(t[i]) do
+ if a and b then
+ if more<0 then
+ more=256*b+a
+ else
+ r=r+1
+ result[t]=utfchar(more+256*256*256*b+256*256*a)
+ more=-1
end
- t[i] = concat(result,"",1,r)
+ else
+ break
+ end
end
- return t
+ t[i]=concat(result,"",1,r)
+ end
+ return t
end
-
-unicode.utf32_to_utf8_be = utf32_to_utf8_be
-unicode.utf32_to_utf8_le = utf32_to_utf8_le
-unicode.utf16_to_utf8_be = utf16_to_utf8_be
-unicode.utf16_to_utf8_le = utf16_to_utf8_le
-
-function unicode.utf8_to_utf8(t)
- return type(t) == "string" and utfsplitlines(t) or t
+utf.utf32_to_utf8_be=utf32_to_utf8_be
+utf.utf32_to_utf8_le=utf32_to_utf8_le
+utf.utf16_to_utf8_be=utf16_to_utf8_be
+utf.utf16_to_utf8_le=utf16_to_utf8_le
+function utf.utf8_to_utf8(t)
+ return type(t)=="string" and lpegmatch(utflinesplitter,t) or t
end
-
-function unicode.utf16_to_utf8(t,endian)
- return endian and utf16_to_utf8_be(t) or utf16_to_utf8_le(t) or t
+function utf.utf16_to_utf8(t,endian)
+ return endian and utf16_to_utf8_be(t) or utf16_to_utf8_le(t) or t
end
-
-function unicode.utf32_to_utf8(t,endian)
- return endian and utf32_to_utf8_be(t) or utf32_to_utf8_le(t) or t
+function utf.utf32_to_utf8(t,endian)
+ return endian and utf32_to_utf8_be(t) or utf32_to_utf8_le(t) or t
end
-
local function little(c)
- local b = byte(c)
- if b < 0x10000 then
- return char(b%256,b/256)
- else
- b = b - 0x10000
- local b1, b2 = b/1024 + 0xD800, b%1024 + 0xDC00
- return char(b1%256,b1/256,b2%256,b2/256)
- end
+ local b=byte(c)
+ if b<0x10000 then
+ return char(b%256,b/256)
+ else
+ b=b-0x10000
+ local b1,b2=b/1024+0xD800,b%1024+0xDC00
+ return char(b1%256,b1/256,b2%256,b2/256)
+ end
end
-
local function big(c)
- local b = byte(c)
- if b < 0x10000 then
- return char(b/256,b%256)
- else
- b = b - 0x10000
- local b1, b2 = b/1024 + 0xD800, b%1024 + 0xDC00
- return char(b1/256,b1%256,b2/256,b2%256)
- end
+ local b=byte(c)
+ if b<0x10000 then
+ return char(b/256,b%256)
+ else
+ b=b-0x10000
+ local b1,b2=b/1024+0xD800,b%1024+0xDC00
+ return char(b1/256,b1%256,b2/256,b2%256)
+ end
+end
+local _,l_remap=utf.remapper(little)
+local _,b_remap=utf.remapper(big)
+function utf.utf8_to_utf16(str,littleendian)
+ if littleendian then
+ return char(255,254)..lpegmatch(l_remap,str)
+ else
+ return char(254,255)..lpegmatch(b_remap,str)
+ end
+end
+local pattern=Cs (
+ (p_utf8byte/function(unicode ) return format("0x%04X",unicode) end)*(p_utf8byte*Carg(1)/function(unicode,separator) return format("%s0x%04X",separator,unicode) end)^0
+)
+function utf.tocodes(str,separator)
+ return lpegmatch(pattern,str,1,separator or " ")
end
-
-function unicode.utf8_to_utf16(str,littleendian)
- if littleendian then
- return char(255,254) .. utfgsub(str,".",little)
+function utf.ustring(s)
+ return format("U+%05X",type(s)=="number" and s or utfbyte(s))
+end
+function utf.xstring(s)
+ return format("0x%05X",type(s)=="number" and s or utfbyte(s))
+end
+local p_nany=p_utf8char/""
+if utfgmatch then
+ function utf.count(str,what)
+ if type(what)=="string" then
+ local n=0
+ for _ in utfgmatch(str,what) do
+ n=n+1
+ end
+ return n
+ else
+ return #lpegmatch(Cs((P(what)/" "+p_nany)^0),str)
+ end
+ end
+else
+ local cache={}
+ function utf.count(str,what)
+ if type(what)=="string" then
+ local p=cache[what]
+ if not p then
+ p=Cs((P(what)/" "+p_nany)^0)
+ cache[p]=p
+ end
+ return #lpegmatch(p,str)
+ else
+ return #lpegmatch(Cs((P(what)/" "+p_nany)^0),str)
+ end
+ end
+end
+if not utf.characters then
+ function utf.characters(str)
+ return gmatch(str,".[\128-\191]*")
+ end
+ string.utfcharacters=utf.characters
+end
+if not utf.values then
+ local find=string.find
+ local dummy=function()
+ end
+ function utf.values(str)
+ local n=#str
+ if n==0 then
+ return dummy
+ elseif n==1 then
+ return function() return utfbyte(str) end
else
- return char(254,255) .. utfgsub(str,".",big)
+ local p=1
+ return function()
+ local b,e=find(str,".[\128-\191]*",p)
+ if b then
+ p=e+1
+ return utfbyte(sub(str,b,e))
+ end
+ end
end
+ end
+ string.utfvalues=utf.values
end
-function unicode.utfcodes(str)
- local t, n = { }, 0
- for u in utfvalues(str) do
- n = n + 1
- t[n] = format("0x%04X",u)
- end
- return concat(t,separator or " ")
-end
-function unicode.ustring(s)
- return format("U+%05X",type(s) == "number" and s or utfbyte(s))
-end
+end -- of closure
-function unicode.xstring(s)
- return format("0x%05X",type(s) == "number" and s or utfbyte(s))
-end
+do -- create closure to overcome 200 locals limit
+package.loaded["l-math"] = package.loaded["l-math"] or true
-local lpegmatch = lpeg.match
-local patterns = lpeg.patterns
-local utftype = patterns.utftype
+-- original size: 915, stripped down to: 836
-function unicode.filetype(data)
- return data and lpegmatch(utftype,data) or "unknown"
+if not modules then modules={} end modules ['l-math']={
+ version=1.001,
+ comment="companion to luat-lib.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+local floor,sin,cos,tan=math.floor,math.sin,math.cos,math.tan
+if not math.round then
+ function math.round(x) return floor(x+0.5) end
end
-
-local toentities = lpeg.Cs (
- (
- patterns.utf8one
- + (
- patterns.utf8two
- + patterns.utf8three
- + patterns.utf8four
- ) / function(s) local b = utfbyte(s) if b < 127 then return s else return format("&#%X;",b) end end
- )^0
-)
-
-patterns.toentities = toentities
-
-function utf.toentities(str)
- return lpegmatch(toentities,str)
+if not math.div then
+ function math.div(n,m) return floor(n/m) end
end
-
-
-
-
-local P, C, R, Cs = lpeg.P, lpeg.C, lpeg.R, lpeg.Cs
-
-local one = P(1)
-local two = C(1) * C(1)
-local four = C(R(utfchar(0xD8),utfchar(0xFF))) * C(1) * C(1) * C(1)
-
--- actually one of them is already utf ... sort of useless this one
-
-local pattern = P("\254\255") * Cs( (
- four / function(a,b,c,d)
- local ab = 0xFF * byte(a) + byte(b)
- local cd = 0xFF * byte(c) + byte(d)
- return utfchar((ab-0xD800)*0x400 + (cd-0xDC00) + 0x10000)
- end
- + two / function(a,b)
- return utfchar(byte(a)*256 + byte(b))
- end
- + one
- )^1 )
- + P("\255\254") * Cs( (
- four / function(b,a,d,c)
- local ab = 0xFF * byte(a) + byte(b)
- local cd = 0xFF * byte(c) + byte(d)
- return utfchar((ab-0xD800)*0x400 + (cd-0xDC00) + 0x10000)
- end
- + two / function(b,a)
- return utfchar(byte(a)*256 + byte(b))
- end
- + one
- )^1 )
-
-function string.toutf(s)
- return lpegmatch(pattern,s) or s -- todo: utf32
+if not math.mod then
+ function math.mod(n,m) return n%m end
end
-
-local validatedutf = Cs (
- (
- patterns.utf8one
- + patterns.utf8two
- + patterns.utf8three
- + patterns.utf8four
- + P(1) / "�"
- )^0
-)
-
-patterns.validatedutf = validatedutf
-
-function string.validutf(str)
- return lpegmatch(validatedutf,str)
+local pipi=2*math.pi/360
+if not math.sind then
+ function math.sind(d) return sin(d*pipi) end
+ function math.cosd(d) return cos(d*pipi) end
+ function math.tand(d) return tan(d*pipi) end
+end
+if not math.odd then
+ function math.odd (n) return n%2~=0 end
+ function math.even(n) return n%2==0 end
end
@@ -4137,213 +4303,852 @@ end -- of closure
do -- create closure to overcome 200 locals limit
-if not modules then modules = { } end modules ['l-math'] = {
- version = 1.001,
- comment = "companion to luat-lib.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
+package.loaded["util-str"] = package.loaded["util-str"] or true
-local floor, sin, cos, tan = math.floor, math.sin, math.cos, math.tan
-
-if not math.round then
- function math.round(x) return floor(x + 0.5) end
-end
+-- original size: 22834, stripped down to: 12570
-if not math.div then
- function math.div(n,m) return floor(n/m) end
+if not modules then modules={} end modules ['util-str']={
+ version=1.001,
+ comment="companion to luat-lib.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+utilities=utilities or {}
+utilities.strings=utilities.strings or {}
+local strings=utilities.strings
+local format,gsub,rep,sub=string.format,string.gsub,string.rep,string.sub
+local load,dump=load,string.dump
+local tonumber,type,tostring=tonumber,type,tostring
+local unpack,concat=table.unpack,table.concat
+local P,V,C,S,R,Ct,Cs,Cp,Carg,Cc=lpeg.P,lpeg.V,lpeg.C,lpeg.S,lpeg.R,lpeg.Ct,lpeg.Cs,lpeg.Cp,lpeg.Carg,lpeg.Cc
+local patterns,lpegmatch=lpeg.patterns,lpeg.match
+local utfchar,utfbyte=utf.char,utf.byte
+local loadstripped=_LUAVERSION<5.2 and load or function(str)
+ return load(dump(load(str),true))
+end
+if not number then number={} end
+local stripper=patterns.stripzeros
+local function points(n)
+ return (not n or n==0) and "0pt" or lpegmatch(stripper,format("%.5fpt",n/65536))
+end
+local function basepoints(n)
+ return (not n or n==0) and "0bp" or lpegmatch(stripper,format("%.5fbp",n*(7200/7227)/65536))
+end
+number.points=points
+number.basepoints=basepoints
+local rubish=patterns.spaceortab^0*patterns.newline
+local anyrubish=patterns.spaceortab+patterns.newline
+local anything=patterns.anything
+local stripped=(patterns.spaceortab^1/"")*patterns.newline
+local leading=rubish^0/""
+local trailing=(anyrubish^1*patterns.endofstring)/""
+local redundant=rubish^3/"\n"
+local pattern=Cs(leading*(trailing+redundant+stripped+anything)^0)
+function strings.collapsecrlf(str)
+ return lpegmatch(pattern,str)
+end
+local repeaters={}
+function strings.newrepeater(str,offset)
+ offset=offset or 0
+ local s=repeaters[str]
+ if not s then
+ s={}
+ repeaters[str]=s
+ end
+ local t=s[offset]
+ if t then
+ return t
+ end
+ t={}
+ setmetatable(t,{ __index=function(t,k)
+ if not k then
+ return ""
+ end
+ local n=k+offset
+ local s=n>0 and rep(str,n) or ""
+ t[k]=s
+ return s
+ end })
+ s[offset]=t
+ return t
+end
+local extra,tab,start=0,0,4,0
+local nspaces=strings.newrepeater(" ")
+string.nspaces=nspaces
+local pattern=Carg(1)/function(t)
+ extra,tab,start=0,t or 7,1
+ end*Cs((
+ Cp()*patterns.tab/function(position)
+ local current=(position-start+1)+extra
+ local spaces=tab-(current-1)%tab
+ if spaces>0 then
+ extra=extra+spaces-1
+ return nspaces[spaces]
+ else
+ return ""
+ end
+ end+patterns.newline*Cp()/function(position)
+ extra,start=0,position
+ end+patterns.anything
+ )^1)
+function strings.tabtospace(str,tab)
+ return lpegmatch(pattern,str,1,tab or 7)
+end
+function strings.striplong(str)
+ str=gsub(str,"^%s*","")
+ str=gsub(str,"[\n\r]+ *","\n")
+ return str
+end
+function strings.nice(str)
+ str=gsub(str,"[:%-+_]+"," ")
+ return str
+end
+local n=0
+local sequenced=table.sequenced
+function string.autodouble(s,sep)
+ if s==nil then
+ return '""'
+ end
+ local t=type(s)
+ if t=="number" then
+ return tostring(s)
+ end
+ if t=="table" then
+ return ('"'..sequenced(s,sep or ",")..'"')
+ end
+ return ('"'..tostring(s)..'"')
+end
+function string.autosingle(s,sep)
+ if s==nil then
+ return "''"
+ end
+ local t=type(s)
+ if t=="number" then
+ return tostring(s)
+ end
+ if t=="table" then
+ return ("'"..sequenced(s,sep or ",").."'")
+ end
+ return ("'"..tostring(s).."'")
+end
+local tracedchars={}
+string.tracedchars=tracedchars
+strings.tracers=tracedchars
+function string.tracedchar(b)
+ if type(b)=="number" then
+ return tracedchars[b] or (utfchar(b).." (U+"..format('%05X',b)..")")
+ else
+ local c=utfbyte(b)
+ return tracedchars[c] or (b.." (U+"..format('%05X',c)..")")
+ end
+end
+function number.signed(i)
+ if i>0 then
+ return "+",i
+ else
+ return "-",-i
+ end
+end
+local preamble=[[
+local type = type
+local tostring = tostring
+local tonumber = tonumber
+local format = string.format
+local concat = table.concat
+local signed = number.signed
+local points = number.points
+local basepoints = number.basepoints
+local utfchar = utf.char
+local utfbyte = utf.byte
+local lpegmatch = lpeg.match
+local nspaces = string.nspaces
+local tracedchar = string.tracedchar
+local autosingle = string.autosingle
+local autodouble = string.autodouble
+local sequenced = table.sequenced
+]]
+local template=[[
+%s
+%s
+return function(%s) return %s end
+]]
+local arguments={ "a1" }
+setmetatable(arguments,{ __index=function(t,k)
+ local v=t[k-1]..",a"..k
+ t[k]=v
+ return v
+ end
+})
+local prefix_any=C((S("+- .")+R("09"))^0)
+local prefix_tab=C((1-R("az","AZ","09","%%"))^0)
+local format_s=function(f)
+ n=n+1
+ if f and f~="" then
+ return format("format('%%%ss',a%s)",f,n)
+ else
+ return format("(a%s or '')",n)
+ end
+end
+local format_S=function(f)
+ n=n+1
+ if f and f~="" then
+ return format("format('%%%ss',tostring(a%s))",f,n)
+ else
+ return format("tostring(a%s)",n)
+ end
+end
+local format_q=function()
+ n=n+1
+ return format("(a%s and format('%%q',a%s) or '')",n,n)
+end
+local format_Q=function()
+ n=n+1
+ return format("format('%%q',tostring(a%s))",n)
+end
+local format_i=function(f)
+ n=n+1
+ if f and f~="" then
+ return format("format('%%%si',a%s)",f,n)
+ else
+ return format("a%s",n)
+ end
+end
+local format_d=format_i
+local format_I=function(f)
+ n=n+1
+ return format("format('%%s%%%si',signed(a%s))",f,n)
+end
+local format_f=function(f)
+ n=n+1
+ return format("format('%%%sf',a%s)",f,n)
+end
+local format_g=function(f)
+ n=n+1
+ return format("format('%%%sg',a%s)",f,n)
+end
+local format_G=function(f)
+ n=n+1
+ return format("format('%%%sG',a%s)",f,n)
+end
+local format_e=function(f)
+ n=n+1
+ return format("format('%%%se',a%s)",f,n)
+end
+local format_E=function(f)
+ n=n+1
+ return format("format('%%%sE',a%s)",f,n)
+end
+local format_x=function(f)
+ n=n+1
+ return format("format('%%%sx',a%s)",f,n)
+end
+local format_X=function(f)
+ n=n+1
+ return format("format('%%%sX',a%s)",f,n)
+end
+local format_o=function(f)
+ n=n+1
+ return format("format('%%%so',a%s)",f,n)
+end
+local format_c=function()
+ n=n+1
+ return format("utfchar(a%s)",n)
+end
+local format_C=function()
+ n=n+1
+ return format("tracedchar(a%s)",n)
+end
+local format_r=function(f)
+ n=n+1
+ return format("format('%%%s.0f',a%s)",f,n)
+end
+local format_h=function(f)
+ n=n+1
+ if f=="-" then
+ f=sub(f,2)
+ return format("format('%%%sx',type(a%s) == 'number' and a%s or utfbyte(a%s))",f=="" and "05" or f,n,n,n)
+ else
+ return format("format('0x%%%sx',type(a%s) == 'number' and a%s or utfbyte(a%s))",f=="" and "05" or f,n,n,n)
+ end
+end
+local format_H=function(f)
+ n=n+1
+ if f=="-" then
+ f=sub(f,2)
+ return format("format('%%%sX',type(a%s) == 'number' and a%s or utfbyte(a%s))",f=="" and "05" or f,n,n,n)
+ else
+ return format("format('0x%%%sX',type(a%s) == 'number' and a%s or utfbyte(a%s))",f=="" and "05" or f,n,n,n)
+ end
+end
+local format_u=function(f)
+ n=n+1
+ if f=="-" then
+ f=sub(f,2)
+ return format("format('%%%sx',type(a%s) == 'number' and a%s or utfbyte(a%s))",f=="" and "05" or f,n,n,n)
+ else
+ return format("format('u+%%%sx',type(a%s) == 'number' and a%s or utfbyte(a%s))",f=="" and "05" or f,n,n,n)
+ end
+end
+local format_U=function(f)
+ n=n+1
+ if f=="-" then
+ f=sub(f,2)
+ return format("format('%%%sX',type(a%s) == 'number' and a%s or utfbyte(a%s))",f=="" and "05" or f,n,n,n)
+ else
+ return format("format('U+%%%sX',type(a%s) == 'number' and a%s or utfbyte(a%s))",f=="" and "05" or f,n,n,n)
+ end
+end
+local format_p=function()
+ n=n+1
+ return format("points(a%s)",n)
+end
+local format_b=function()
+ n=n+1
+ return format("basepoints(a%s)",n)
+end
+local format_t=function(f)
+ n=n+1
+ if f and f~="" then
+ return format("concat(a%s,%q)",n,f)
+ else
+ return format("concat(a%s)",n)
+ end
+end
+local format_T=function(f)
+ n=n+1
+ if f and f~="" then
+ return format("sequenced(a%s,%q)",n,f)
+ else
+ return format("sequenced(a%s)",n)
+ end
+end
+local format_l=function()
+ n=n+1
+ return format("(a%s and 'true' or 'false')",n)
+end
+local format_L=function()
+ n=n+1
+ return format("(a%s and 'TRUE' or 'FALSE')",n)
+end
+local format_N=function()
+ n=n+1
+ return format("tostring(tonumber(a%s) or a%s)",n,n)
+end
+local format_a=function(f)
+ n=n+1
+ if f and f~="" then
+ return format("autosingle(a%s,%q)",n,f)
+ else
+ return format("autosingle(a%s)",n)
+ end
+end
+local format_A=function(f)
+ n=n+1
+ if f and f~="" then
+ return format("autodouble(a%s,%q)",n,f)
+ else
+ return format("autodouble(a%s)",n)
+ end
+end
+local format_w=function(f)
+ n=n+1
+ f=tonumber(f)
+ if f then
+ return format("nspaces[%s+a%s]",f,n)
+ else
+ return format("nspaces[a%s]",n)
+ end
+end
+local format_W=function(f)
+ return format("nspaces[%s]",tonumber(f) or 0)
+end
+local format_rest=function(s)
+ return format("%q",s)
+end
+local format_extension=function(extensions,f,name)
+ local extension=extensions[name] or "tostring(%s)"
+ local f=tonumber(f) or 1
+ if f==0 then
+ return extension
+ elseif f==1 then
+ n=n+1
+ local a="a"..n
+ return format(extension,a,a)
+ elseif f<0 then
+ local a="a"..(n+f+1)
+ return format(extension,a,a)
+ else
+ local t={}
+ for i=1,f do
+ n=n+1
+ t[#t+1]="a"..n
+ end
+ return format(extension,unpack(t))
+ end
+end
+local builder=Cs { "start",
+ start=(
+ (
+ P("%")/""*(
+ V("!")
++V("s")+V("q")+V("i")+V("d")+V("f")+V("g")+V("G")+V("e")+V("E")+V("x")+V("X")+V("o")
++V("c")+V("C")+V("S")
++V("Q")
++V("N")
++V("r")+V("h")+V("H")+V("u")+V("U")+V("p")+V("b")+V("t")+V("T")+V("l")+V("L")+V("I")+V("h")
++V("w")
++V("W")
++V("a")
++V("A")
++V("*")
+ )+V("*")
+ )*(P(-1)+Carg(1))
+ )^0,
+ ["s"]=(prefix_any*P("s"))/format_s,
+ ["q"]=(prefix_any*P("q"))/format_q,
+ ["i"]=(prefix_any*P("i"))/format_i,
+ ["d"]=(prefix_any*P("d"))/format_d,
+ ["f"]=(prefix_any*P("f"))/format_f,
+ ["g"]=(prefix_any*P("g"))/format_g,
+ ["G"]=(prefix_any*P("G"))/format_G,
+ ["e"]=(prefix_any*P("e"))/format_e,
+ ["E"]=(prefix_any*P("E"))/format_E,
+ ["x"]=(prefix_any*P("x"))/format_x,
+ ["X"]=(prefix_any*P("X"))/format_X,
+ ["o"]=(prefix_any*P("o"))/format_o,
+ ["S"]=(prefix_any*P("S"))/format_S,
+ ["Q"]=(prefix_any*P("Q"))/format_S,
+ ["N"]=(prefix_any*P("N"))/format_N,
+ ["c"]=(prefix_any*P("c"))/format_c,
+ ["C"]=(prefix_any*P("C"))/format_C,
+ ["r"]=(prefix_any*P("r"))/format_r,
+ ["h"]=(prefix_any*P("h"))/format_h,
+ ["H"]=(prefix_any*P("H"))/format_H,
+ ["u"]=(prefix_any*P("u"))/format_u,
+ ["U"]=(prefix_any*P("U"))/format_U,
+ ["p"]=(prefix_any*P("p"))/format_p,
+ ["b"]=(prefix_any*P("b"))/format_b,
+ ["t"]=(prefix_tab*P("t"))/format_t,
+ ["T"]=(prefix_tab*P("T"))/format_T,
+ ["l"]=(prefix_tab*P("l"))/format_l,
+ ["L"]=(prefix_tab*P("L"))/format_L,
+ ["I"]=(prefix_any*P("I"))/format_I,
+ ["w"]=(prefix_any*P("w"))/format_w,
+ ["W"]=(prefix_any*P("W"))/format_W,
+ ["a"]=(prefix_any*P("a"))/format_a,
+ ["A"]=(prefix_any*P("A"))/format_A,
+ ["*"]=Cs(((1-P("%"))^1+P("%%")/"%%%%")^1)/format_rest,
+ ["!"]=Carg(2)*prefix_any*P("!")*C((1-P("!"))^1)*P("!")/format_extension,
+}
+local direct=Cs (
+ P("%")/""*Cc([[local format = string.format return function(str) return format("%]])*(S("+- .")+R("09"))^0*S("sqidfgGeExXo")*Cc([[",str) end]])*P(-1)
+ )
+local function make(t,str)
+ local f
+ local p
+ local p=lpegmatch(direct,str)
+ if p then
+ f=loadstripped(p)()
+ else
+ n=0
+ p=lpegmatch(builder,str,1,"..",t._extensions_)
+ if n>0 then
+ p=format(template,preamble,t._preamble_,arguments[n],p)
+ f=loadstripped(p)()
+ else
+ f=function() return str end
+ end
+ end
+ t[str]=f
+ return f
end
-
-if not math.mod then
- function math.mod(n,m) return n % m end
+local function use(t,fmt,...)
+ return t[fmt](...)
end
-
-local pipi = 2*math.pi/360
-
-if not math.sind then
- function math.sind(d) return sin(d*pipi) end
- function math.cosd(d) return cos(d*pipi) end
- function math.tand(d) return tan(d*pipi) end
+strings.formatters={}
+function strings.formatters.new()
+ local t={ _extensions_={},_preamble_="",_type_="formatter" }
+ setmetatable(t,{ __index=make,__call=use })
+ return t
end
-
-if not math.odd then
- function math.odd (n) return n % 2 ~= 0 end
- function math.even(n) return n % 2 == 0 end
+local formatters=strings.formatters.new()
+string.formatters=formatters
+string.formatter=function(str,...) return formatters[str](...) end
+local function add(t,name,template,preamble)
+ if type(t)=="table" and t._type_=="formatter" then
+ t._extensions_[name]=template or "%s"
+ if preamble then
+ t._preamble_=preamble.."\n"..t._preamble_
+ end
+ end
end
+strings.formatters.add=add
+lpeg.patterns.xmlescape=Cs((P("<")/"&lt;"+P(">")/"&gt;"+P("&")/"&amp;"+P('"')/"&quot;"+P(1))^0)
+lpeg.patterns.texescape=Cs((C(S("#$%\\{}"))/"\\%1"+P(1))^0)
+add(formatters,"xml",[[lpegmatch(xmlescape,%s)]],[[local xmlescape = lpeg.patterns.xmlescape]])
+add(formatters,"tex",[[lpegmatch(texescape,%s)]],[[local texescape = lpeg.patterns.texescape]])
end -- of closure
do -- create closure to overcome 200 locals limit
-if not modules then modules = { } end modules ['util-tab'] = {
- version = 1.001,
- comment = "companion to luat-lib.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
+package.loaded["util-tab"] = package.loaded["util-tab"] or true
-utilities = utilities or {}
-utilities.tables = utilities.tables or { }
-local tables = utilities.tables
-
-local format, gmatch, rep = string.format, string.gmatch, string.rep
-local concat, insert, remove = table.concat, table.insert, table.remove
-local setmetatable, getmetatable, tonumber, tostring = setmetatable, getmetatable, tonumber, tostring
-local type, next, rawset, tonumber = type, next, rawset, tonumber
-
-function tables.definetable(target) -- defines undefined tables
- local composed, t, n = nil, { }, 0
- for name in gmatch(target,"([^%.]+)") do
- n = n + 1
- if composed then
- composed = composed .. "." .. name
- else
- composed = name
- end
- t[n] = format("%s = %s or { }",composed,composed)
- end
- return concat(t,"\n")
-end
+-- original size: 14491, stripped down to: 8512
+if not modules then modules={} end modules ['util-tab']={
+ version=1.001,
+ comment="companion to luat-lib.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+utilities=utilities or {}
+utilities.tables=utilities.tables or {}
+local tables=utilities.tables
+local format,gmatch,gsub=string.format,string.gmatch,string.gsub
+local concat,insert,remove=table.concat,table.insert,table.remove
+local setmetatable,getmetatable,tonumber,tostring=setmetatable,getmetatable,tonumber,tostring
+local type,next,rawset,tonumber,tostring,load,select=type,next,rawset,tonumber,tostring,load,select
+local lpegmatch,P,Cs,Cc=lpeg.match,lpeg.P,lpeg.Cs,lpeg.Cc
+local serialize,sortedkeys,sortedpairs=table.serialize,table.sortedkeys,table.sortedpairs
+local formatters=string.formatters
+local splitter=lpeg.tsplitat(".")
+function tables.definetable(target,nofirst,nolast)
+ local composed,shortcut,t=nil,nil,{}
+ local snippets=lpegmatch(splitter,target)
+ for i=1,#snippets-(nolast and 1 or 0) do
+ local name=snippets[i]
+ if composed then
+ composed=shortcut.."."..name
+ shortcut=shortcut.."_"..name
+ t[#t+1]=formatters["local %s = %s if not %s then %s = { } %s = %s end"](shortcut,composed,shortcut,shortcut,composed,shortcut)
+ else
+ composed=name
+ shortcut=name
+ if not nofirst then
+ t[#t+1]=formatters["%s = %s or { }"](composed,composed)
+ end
+ end
+ end
+ if nolast then
+ composed=shortcut.."."..snippets[#snippets]
+ end
+ return concat(t,"\n"),composed
+end
+function tables.definedtable(...)
+ local t=_G
+ for i=1,select("#",...) do
+ local li=select(i,...)
+ local tl=t[li]
+ if not tl then
+ tl={}
+ t[li]=tl
+ end
+ t=tl
+ end
+ return t
+end
function tables.accesstable(target,root)
- local t = root or _G
- for name in gmatch(target,"([^%.]+)") do
- t = t[name]
- if not t then
- return
- end
+ local t=root or _G
+ for name in gmatch(target,"([^%.]+)") do
+ t=t[name]
+ if not t then
+ return
end
- return t
+ end
+ return t
end
-
function tables.migratetable(target,v,root)
- local t = root or _G
- local names = string.split(target,".")
- for i=1,#names-1 do
- local name = names[i]
- t[name] = t[name] or { }
- t = t[name]
- if not t then
- return
- end
+ local t=root or _G
+ local names=string.split(target,".")
+ for i=1,#names-1 do
+ local name=names[i]
+ t[name]=t[name] or {}
+ t=t[name]
+ if not t then
+ return
end
- t[names[#names]] = v
+ end
+ t[names[#names]]=v
end
-
-function tables.removevalue(t,value) -- todo: n
- if value then
- for i=1,#t do
- if t[i] == value then
- remove(t,i)
- -- remove all, so no: return
- end
- end
+function tables.removevalue(t,value)
+ if value then
+ for i=1,#t do
+ if t[i]==value then
+ remove(t,i)
+ end
end
+ end
end
-
function tables.insertbeforevalue(t,value,extra)
- for i=1,#t do
- if t[i] == extra then
- remove(t,i)
- end
+ for i=1,#t do
+ if t[i]==extra then
+ remove(t,i)
end
- for i=1,#t do
- if t[i] == value then
- insert(t,i,extra)
- return
- end
+ end
+ for i=1,#t do
+ if t[i]==value then
+ insert(t,i,extra)
+ return
end
- insert(t,1,extra)
+ end
+ insert(t,1,extra)
end
-
function tables.insertaftervalue(t,value,extra)
- for i=1,#t do
- if t[i] == extra then
- remove(t,i)
- end
+ for i=1,#t do
+ if t[i]==extra then
+ remove(t,i)
+ end
+ end
+ for i=1,#t do
+ if t[i]==value then
+ insert(t,i+1,extra)
+ return
+ end
+ end
+ insert(t,#t+1,extra)
+end
+local escape=Cs(Cc('"')*((P('"')/'""'+P(1))^0)*Cc('"'))
+function table.tocsv(t,specification)
+ if t and #t>0 then
+ local result={}
+ local r={}
+ specification=specification or {}
+ local fields=specification.fields
+ if type(fields)~="string" then
+ fields=sortedkeys(t[1])
+ end
+ local separator=specification.separator or ","
+ if specification.preamble==true then
+ for f=1,#fields do
+ r[f]=lpegmatch(escape,tostring(fields[f]))
+ end
+ result[1]=concat(r,separator)
end
for i=1,#t do
- if t[i] == value then
- insert(t,i+1,extra)
- return
- end
- end
- insert(t,#t+1,extra)
-end
-
--- experimental
-
-local function toxml(t,d,result,step)
- for k, v in table.sortedpairs(t) do
- if type(v) == "table" then
- if type(k) == "number" then
- result[#result+1] = format("%s<entry n='%s'>",d,k)
- toxml(v,d..step,result,step)
- result[#result+1] = format("%s</entry>",d,k)
- else
- result[#result+1] = format("%s<%s>",d,k)
- toxml(v,d..step,result,step)
- result[#result+1] = format("%s</%s>",d,k)
- end
- elseif type(k) == "number" then
- result[#result+1] = format("%s<entry n='%s'>%s</entry>",d,k,v,k)
+ local ti=t[i]
+ for f=1,#fields do
+ local field=ti[fields[f]]
+ if type(field)=="string" then
+ r[f]=lpegmatch(escape,field)
else
- result[#result+1] = format("%s<%s>%s</%s>",d,k,tostring(v),k)
+ r[f]=tostring(field)
end
+ end
+ result[#result+1]=concat(r,separator)
end
+ return concat(result,"\n")
+ else
+ return ""
+ end
end
-
-function table.toxml(t,name,nobanner,indent,spaces)
- local noroot = name == false
- local result = (nobanner or noroot) and { } or { "<?xml version='1.0' standalone='yes' ?>" }
- local indent = rep(" ",indent or 0)
- local spaces = rep(" ",spaces or 1)
- if noroot then
- toxml( t, inndent, result, spaces)
+local nspaces=utilities.strings.newrepeater(" ")
+local function toxml(t,d,result,step)
+ for k,v in sortedpairs(t) do
+ local s=nspaces[d]
+ local tk=type(k)
+ local tv=type(v)
+ if tv=="table" then
+ if tk=="number" then
+ result[#result+1]=formatters["%s<entry n='%s'>"](s,k)
+ toxml(v,d+step,result,step)
+ result[#result+1]=formatters["%s</entry>"](s,k)
+ else
+ result[#result+1]=formatters["%s<%s>"](s,k)
+ toxml(v,d+step,result,step)
+ result[#result+1]=formatters["%s</%s>"](s,k)
+ end
+ elseif tv=="string" then
+ if tk=="number" then
+ result[#result+1]=formatters["%s<entry n='%s'>%!xml!</entry>"](s,k,v,k)
+ else
+ result[#result+1]=formatters["%s<%s>%!xml!</%s>"](s,k,v,k)
+ end
+ elseif tk=="number" then
+ result[#result+1]=formatters["%s<entry n='%s'>%S</entry>"](s,k,v,k)
else
- toxml( { [name or "root"] = t }, indent, result, spaces)
- end
- return concat(result,"\n")
+ result[#result+1]=formatters["%s<%s>%S</%s>"](s,k,v,k)
+ end
+ end
+end
+function table.toxml(t,specification)
+ specification=specification or {}
+ local name=specification.name
+ local noroot=name==false
+ local result=(specification.nobanner or noroot) and {} or { "<?xml version='1.0' standalone='yes' ?>" }
+ local indent=specification.indent or 0
+ local spaces=specification.spaces or 1
+ if noroot then
+ toxml(t,indent,result,spaces)
+ else
+ toxml({ [name or "data"]=t },indent,result,spaces)
+ end
+ return concat(result,"\n")
end
-
--- also experimental
-
--- encapsulate(table,utilities.tables)
--- encapsulate(table,utilities.tables,true)
--- encapsulate(table,true)
-
function tables.encapsulate(core,capsule,protect)
- if type(capsule) ~= "table" then
- protect = true
- capsule = { }
+ if type(capsule)~="table" then
+ protect=true
+ capsule={}
+ end
+ for key,value in next,core do
+ if capsule[key] then
+ print(formatters["\ninvalid %s %a in %a"]("inheritance",key,core))
+ os.exit()
+ else
+ capsule[key]=value
+ end
+ end
+ if protect then
+ for key,value in next,core do
+ core[key]=nil
end
- for key, value in next, core do
+ setmetatable(core,{
+ __index=capsule,
+ __newindex=function(t,key,value)
if capsule[key] then
- print(format("\ninvalid inheritance '%s' in '%s': %s",key,tostring(core)))
- os.exit()
+ print(formatters["\ninvalid %s %a' in %a"]("overload",key,core))
+ os.exit()
else
- capsule[key] = value
- end
- end
- if protect then
- for key, value in next, core do
- core[key] = nil
- end
- setmetatable(core, {
- __index = capsule,
- __newindex = function(t,key,value)
- if capsule[key] then
- print(format("\ninvalid overload '%s' in '%s'",key,tostring(core)))
- os.exit()
- else
- rawset(t,key,value)
- end
- end
- } )
- end
+ rawset(t,key,value)
+ end
+ end
+ } )
+ end
+end
+local function fastserialize(t,r,outer)
+ r[#r+1]="{"
+ local n=#t
+ if n>0 then
+ for i=1,n do
+ local v=t[i]
+ local tv=type(v)
+ if tv=="string" then
+ r[#r+1]=formatters["%q,"](v)
+ elseif tv=="number" then
+ r[#r+1]=formatters["%s,"](v)
+ elseif tv=="table" then
+ fastserialize(v,r)
+ elseif tv=="boolean" then
+ r[#r+1]=formatters["%S,"](v)
+ end
+ end
+ else
+ for k,v in next,t do
+ local tv=type(v)
+ if tv=="string" then
+ r[#r+1]=formatters["[%q]=%q,"](k,v)
+ elseif tv=="number" then
+ r[#r+1]=formatters["[%q]=%s,"](k,v)
+ elseif tv=="table" then
+ r[#r+1]=formatters["[%q]="](k)
+ fastserialize(v,r)
+ elseif tv=="boolean" then
+ r[#r+1]=formatters["[%q]=%S,"](k,v)
+ end
+ end
+ end
+ if outer then
+ r[#r+1]="}"
+ else
+ r[#r+1]="},"
+ end
+ return r
+end
+function table.fastserialize(t,prefix)
+ return concat(fastserialize(t,{ prefix or "return" },true))
+end
+function table.deserialize(str)
+ if not str or str=="" then
+ return
+ end
+ local code=load(str)
+ if not code then
+ return
+ end
+ code=code()
+ if not code then
+ return
+ end
+ return code
+end
+function table.load(filename)
+ if filename then
+ local t=io.loaddata(filename)
+ if t and t~="" then
+ t=load(t)
+ if type(t)=="function" then
+ t=t()
+ if type(t)=="table" then
+ return t
+ end
+ end
+ end
+ end
+end
+function table.save(filename,t,n,...)
+ io.savedata(filename,serialize(t,n==nil and true or n,...))
+end
+local function slowdrop(t)
+ local r={}
+ local l={}
+ for i=1,#t do
+ local ti=t[i]
+ local j=0
+ for k,v in next,ti do
+ j=j+1
+ l[j]=formatters["%s=%q"](k,v)
+ end
+ r[i]=formatters[" {%t},\n"](l)
+ end
+ return formatters["return {\n%st}"](r)
+end
+local function fastdrop(t)
+ local r={ "return {\n" }
+ for i=1,#t do
+ local ti=t[i]
+ r[#r+1]=" {"
+ for k,v in next,ti do
+ r[#r+1]=formatters["%s=%q"](k,v)
+ end
+ r[#r+1]="},\n"
+ end
+ r[#r+1]="}"
+ return concat(r)
+end
+function table.drop(t,slow)
+ if #t==0 then
+ return "return { }"
+ elseif slow==true then
+ return slowdrop(t)
+ else
+ return fastdrop(t)
+ end
+end
+function table.autokey(t,k)
+ local v={}
+ t[k]=v
+ return v
+end
+local selfmapper={ __index=function(t,k) t[k]=k return k end }
+function table.twowaymapper(t)
+ if not t then
+ t={}
+ else
+ for i=0,#t do
+ local ti=t[i]
+ if ti then
+ local i=tostring(i)
+ t[i]=ti
+ t[ti]=i
+ end
+ end
+ t[""]=t[0] or ""
+ end
+ setmetatable(t,selfmapper)
+ return t
end
@@ -4351,297 +5156,155 @@ end -- of closure
do -- create closure to overcome 200 locals limit
-if not modules then modules = { } end modules ['util-sto'] = {
- version = 1.001,
- comment = "companion to luat-lib.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
-local setmetatable, getmetatable = setmetatable, getmetatable
+package.loaded["util-sto"] = package.loaded["util-sto"] or true
-utilities = utilities or { }
-utilities.storage = utilities.storage or { }
-local storage = utilities.storage
+-- original size: 4432, stripped down to: 3123
+if not modules then modules={} end modules ['util-sto']={
+ version=1.001,
+ comment="companion to luat-lib.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+local setmetatable,getmetatable,type=setmetatable,getmetatable,type
+utilities=utilities or {}
+utilities.storage=utilities.storage or {}
+local storage=utilities.storage
function storage.mark(t)
- if not t then
- texio.write_nl("fatal error: storage cannot be marked")
- return -- os.exit()
- end
- local m = getmetatable(t)
- if not m then
- m = { }
- setmetatable(t,m)
- end
- m.__storage__ = true
- return t
+ if not t then
+ print("\nfatal error: storage cannot be marked\n")
+ os.exit()
+ return
+ end
+ local m=getmetatable(t)
+ if not m then
+ m={}
+ setmetatable(t,m)
+ end
+ m.__storage__=true
+ return t
end
-
function storage.allocate(t)
- t = t or { }
- local m = getmetatable(t)
- if not m then
- m = { }
- setmetatable(t,m)
- end
- m.__storage__ = true
- return t
+ t=t or {}
+ local m=getmetatable(t)
+ if not m then
+ m={}
+ setmetatable(t,m)
+ end
+ m.__storage__=true
+ return t
end
-
function storage.marked(t)
- local m = getmetatable(t)
- return m and m.__storage__
+ local m=getmetatable(t)
+ return m and m.__storage__
end
-
function storage.checked(t)
- if not t then
- texio.write_nl("fatal error: storage has not been allocated")
- return -- os.exit()
- end
- return t
+ if not t then
+ report("\nfatal error: storage has not been allocated\n")
+ os.exit()
+ return
+ end
+ return t
end
-
-
function storage.setinitializer(data,initialize)
- local m = getmetatable(data) or { }
- m.__index = function(data,k)
- m.__index = nil -- so that we can access the entries during initializing
- initialize()
- return data[k]
- end
- setmetatable(data, m)
-end
-
-local keyisvalue = { __index = function(t,k)
- t[k] = k
- return k
+ local m=getmetatable(data) or {}
+ m.__index=function(data,k)
+ m.__index=nil
+ initialize()
+ return data[k]
+ end
+ setmetatable(data,m)
+end
+local keyisvalue={ __index=function(t,k)
+ t[k]=k
+ return k
end }
-
function storage.sparse(t)
- t = t or { }
- setmetatable(t,keyisvalue)
- return t
-end
-
--- table namespace ?
-
-local function f_empty () return "" end -- t,k
-local function f_self (t,k) t[k] = k return k end
-local function f_ignore() end -- t,k,v
-
-local t_empty = { __index = f_empty }
-local t_self = { __index = f_self }
-local t_ignore = { __newindex = f_ignore }
-
+ t=t or {}
+ setmetatable(t,keyisvalue)
+ return t
+end
+local function f_empty () return "" end
+local function f_self (t,k) t[k]=k return k end
+local function f_table (t,k) local v={} t[k]=v return v end
+local function f_ignore() end
+local t_empty={ __index=f_empty }
+local t_self={ __index=f_self }
+local t_table={ __index=f_table }
+local t_ignore={ __newindex=f_ignore }
function table.setmetatableindex(t,f)
- local m = getmetatable(t)
- if m then
- if f == "empty" then
- m.__index = f_empty
- elseif f == "key" then
- m.__index = f_self
- else
- m.__index = f
- end
+ if type(t)~="table" then
+ f,t=t,{}
+ end
+ local m=getmetatable(t)
+ if m then
+ if f=="empty" then
+ m.__index=f_empty
+ elseif f=="key" then
+ m.__index=f_self
+ elseif f=="table" then
+ m.__index=f_table
else
- if f == "empty" then
- setmetatable(t, t_empty)
- elseif f == "key" then
- setmetatable(t, t_self)
- else
- setmetatable(t,{ __index = f })
- end
+ m.__index=f
+ end
+ else
+ if f=="empty" then
+ setmetatable(t,t_empty)
+ elseif f=="key" then
+ setmetatable(t,t_self)
+ elseif f=="table" then
+ setmetatable(t,t_table)
+ else
+ setmetatable(t,{ __index=f })
end
- return t
+ end
+ return t
end
-
function table.setmetatablenewindex(t,f)
- local m = getmetatable(t)
- if m then
- if f == "ignore" then
- m.__newindex = f_ignore
- else
- m.__newindex = f
- end
+ if type(t)~="table" then
+ f,t=t,{}
+ end
+ local m=getmetatable(t)
+ if m then
+ if f=="ignore" then
+ m.__newindex=f_ignore
else
- if f == "ignore" then
- setmetatable(t, t_ignore)
- else
- setmetatable(t,{ __newindex = f })
- end
+ m.__newindex=f
end
- return t
-end
-
-function table.setmetatablecall(t,f)
- local m = getmetatable(t)
- if m then
- m.__call = f
+ else
+ if f=="ignore" then
+ setmetatable(t,t_ignore)
else
- setmetatable(t,{ __call = f })
+ setmetatable(t,{ __newindex=f })
end
- return t
+ end
+ return t
+end
+function table.setmetatablecall(t,f)
+ if type(t)~="table" then
+ f,t=t,{}
+ end
+ local m=getmetatable(t)
+ if m then
+ m.__call=f
+ else
+ setmetatable(t,{ __call=f })
+ end
+ return t
end
-
function table.setmetatablekey(t,key,value)
- local m = getmetatable(t)
- if not m then
- m = { }
- setmetatable(t,m)
- end
- m[key] = value
- return t
+ local m=getmetatable(t)
+ if not m then
+ m={}
+ setmetatable(t,m)
+ end
+ m[key]=value
+ return t
end
-
function table.getmetatablekey(t,key,value)
- local m = getmetatable(t)
- return m and m[key]
-end
-
-
-end -- of closure
-
-do -- create closure to overcome 200 locals limit
-
-if not modules then modules = { } end modules ['util-mrg'] = {
- version = 1.001,
- comment = "companion to luat-lib.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
--- hm, quite unreadable
-
-local gsub, format = string.gsub, string.format
-local concat = table.concat
-local type, next = type, next
-
-utilities = utilities or {}
-utilities.merger = utilities.merger or { } -- maybe mergers
-utilities.report = logs and logs.reporter("system") or print
-
-local merger = utilities.merger
-
-merger.strip_comment = true
-
-local m_begin_merge = "begin library merge"
-local m_end_merge = "end library merge"
-local m_begin_closure = "do -- create closure to overcome 200 locals limit"
-local m_end_closure = "end -- of closure"
-
-local m_pattern =
- "%c+" ..
- "%-%-%s+" .. m_begin_merge ..
- "%c+(.-)%c+" ..
- "%-%-%s+" .. m_end_merge ..
- "%c+"
-
-local m_format =
- "\n\n-- " .. m_begin_merge ..
- "\n%s\n" ..
- "-- " .. m_end_merge .. "\n\n"
-
-local m_faked =
- "-- " .. "created merged file" .. "\n\n" ..
- "-- " .. m_begin_merge .. "\n\n" ..
- "-- " .. m_end_merge .. "\n\n"
-
-local function self_fake()
- return m_faked
-end
-
-local function self_nothing()
- return ""
-end
-
-local function self_load(name)
- local data = io.loaddata(name) or ""
- if data == "" then
- utilities.report("merge: unknown file %s",name)
- else
- utilities.report("merge: inserting %s",name)
- end
- return data or ""
-end
-
-local function self_save(name, data)
- if data ~= "" then
- if merger.strip_comment then
- -- saves some 20K
- local n = #data
- data = gsub(data,"%-%-~[^\n\r]*[\r\n]","")
- utilities.report("merge: %s bytes of comment stripped, %s bytes of code left",n-#data,#data)
- end
- io.savedata(name,data)
- utilities.report("merge: saving %s",name)
- end
-end
-
-local function self_swap(data,code)
- return data ~= "" and (gsub(data,m_pattern, function() return format(m_format,code) end, 1)) or ""
-end
-
-local function self_libs(libs,list)
- local result, f, frozen, foundpath = { }, nil, false, nil
- result[#result+1] = "\n"
- if type(libs) == 'string' then libs = { libs } end
- if type(list) == 'string' then list = { list } end
- for i=1,#libs do
- local lib = libs[i]
- for j=1,#list do
- local pth = gsub(list[j],"\\","/") -- file.clean_path
- utilities.report("merge: checking library path %s",pth)
- local name = pth .. "/" .. lib
- if lfs.isfile(name) then
- foundpath = pth
- end
- end
- if foundpath then break end
- end
- if foundpath then
- utilities.report("merge: using library path %s",foundpath)
- local right, wrong = { }, { }
- for i=1,#libs do
- local lib = libs[i]
- local fullname = foundpath .. "/" .. lib
- if lfs.isfile(fullname) then
- utilities.report("merge: using library %s",fullname)
- right[#right+1] = lib
- result[#result+1] = m_begin_closure
- result[#result+1] = io.loaddata(fullname,true)
- result[#result+1] = m_end_closure
- else
- utilities.report("merge: skipping library %s",fullname)
- wrong[#wrong+1] = lib
- end
- end
- if #right > 0 then
- utilities.report("merge: used libraries: %s",concat(right," "))
- end
- if #wrong > 0 then
- utilities.report("merge: skipped libraries: %s",concat(wrong," "))
- end
- else
- utilities.report("merge: no valid library path found")
- end
- return concat(result, "\n\n")
-end
-
-function merger.selfcreate(libs,list,target)
- if target then
- self_save(target,self_swap(self_fake(),self_libs(libs,list)))
- end
-end
-
-function merger.selfmerge(name,libs,list,target)
- self_save(target or name,self_swap(self_load(name),self_libs(libs,list)))
-end
-
-function merger.selfclean(name)
- self_save(name,self_swap(self_load(name),self_nothing()))
+ local m=getmetatable(t)
+ return m and m[key]
end
@@ -4649,529 +5312,418 @@ end -- of closure
do -- create closure to overcome 200 locals limit
-if not modules then modules = { } end modules ['util-lua'] = {
- version = 1.001,
- comment = "companion to luat-lib.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
-utilities = utilities or {}
-utilities.lua = utilities.lua or { }
-utilities.report = logs and logs.reporter("system") or print
-
-local function stupidcompile(luafile,lucfile)
- local data = io.loaddata(luafile)
- if data and data ~= "" then
- data = string.dump(data)
- if data and data ~= "" then
- io.savedata(lucfile,data)
- end
- end
-end
-
-function utilities.lua.compile(luafile,lucfile,cleanup,strip,fallback) -- defaults: cleanup=false strip=true
- utilities.report("lua: compiling %s into %s",luafile,lucfile)
- os.remove(lucfile)
- local command = "-o " .. string.quoted(lucfile) .. " " .. string.quoted(luafile)
- if strip ~= false then
- command = "-s " .. command
- end
- local done = os.spawn("texluac " .. command) == 0 -- or os.spawn("luac " .. command) == 0
- if not done and fallback then
- utilities.report("lua: dumping %s into %s (unstripped)",luafile,lucfile)
- stupidcompile(luafile,lucfile) -- maybe use the stripper we have elsewhere
- cleanup = false -- better see how worse it is
- end
- if done and cleanup == true and lfs.isfile(lucfile) and lfs.isfile(luafile) then
- utilities.report("lua: removing %s",luafile)
- os.remove(luafile)
- end
- return done
-end
-
-
-
-
-
-
+package.loaded["util-prs"] = package.loaded["util-prs"] or true
+-- original size: 16976, stripped down to: 12143
-end -- of closure
-
-do -- create closure to overcome 200 locals limit
-
-if not modules then modules = { } end modules ['util-prs'] = {
- version = 1.001,
- comment = "companion to luat-lib.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
+if not modules then modules={} end modules ['util-prs']={
+ version=1.001,
+ comment="companion to luat-lib.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
}
-
-local P, R, V, C, Ct, Cs, Carg = lpeg.P, lpeg.R, lpeg.V, lpeg.C, lpeg.Ct, lpeg.Cs, lpeg.Carg
-local lpegmatch = lpeg.match
-local concat, format, gmatch, find = table.concat, string.format, string.gmatch, string.find
-local tostring, type, next = tostring, type, next
-
-utilities = utilities or {}
-utilities.parsers = utilities.parsers or { }
-local parsers = utilities.parsers
-parsers.patterns = parsers.patterns or { }
-
-local setmetatableindex = table.setmetatableindex
-local sortedhash = table.sortedhash
-
--- we could use a Cf Cg construct
-
-local escape, left, right = P("\\"), P('{'), P('}')
-
-lpeg.patterns.balanced = P {
- [1] = ((escape * (left+right)) + (1 - (left+right)) + V(2))^0,
- [2] = left * V(1) * right
+local lpeg,table,string=lpeg,table,string
+local P,R,V,S,C,Ct,Cs,Carg,Cc,Cg,Cf,Cp=lpeg.P,lpeg.R,lpeg.V,lpeg.S,lpeg.C,lpeg.Ct,lpeg.Cs,lpeg.Carg,lpeg.Cc,lpeg.Cg,lpeg.Cf,lpeg.Cp
+local lpegmatch,lpegpatterns=lpeg.match,lpeg.patterns
+local concat,format,gmatch,find=table.concat,string.format,string.gmatch,string.find
+local tostring,type,next,rawset=tostring,type,next,rawset
+utilities=utilities or {}
+local parsers=utilities.parsers or {}
+utilities.parsers=parsers
+local patterns=parsers.patterns or {}
+parsers.patterns=patterns
+local setmetatableindex=table.setmetatableindex
+local sortedhash=table.sortedhash
+local digit=R("09")
+local space=P(' ')
+local equal=P("=")
+local comma=P(",")
+local lbrace=P("{")
+local rbrace=P("}")
+local lparent=P("(")
+local rparent=P(")")
+local period=S(".")
+local punctuation=S(".,:;")
+local spacer=lpegpatterns.spacer
+local whitespace=lpegpatterns.whitespace
+local newline=lpegpatterns.newline
+local anything=lpegpatterns.anything
+local endofstring=lpegpatterns.endofstring
+local nobrace=1-(lbrace+rbrace )
+local noparent=1-(lparent+rparent)
+local escape,left,right=P("\\"),P('{'),P('}')
+lpegpatterns.balanced=P {
+ [1]=((escape*(left+right))+(1-(left+right))+V(2))^0,
+ [2]=left*V(1)*right
}
-
-local space = P(' ')
-local equal = P("=")
-local comma = P(",")
-local lbrace = P("{")
-local rbrace = P("}")
-local nobrace = 1 - (lbrace+rbrace)
-local nested = P { lbrace * (nobrace + V(1))^0 * rbrace }
-local spaces = space^0
-local argument = Cs((lbrace/"") * ((nobrace + nested)^0) * (rbrace/""))
-local content = (1-P(-1))^0
-
-lpeg.patterns.nested = nested -- no capture
-lpeg.patterns.argument = argument -- argument after e.g. =
-lpeg.patterns.content = content -- rest after e.g =
-
-local value = P(lbrace * C((nobrace + nested)^0) * rbrace) + C((nested + (1-comma))^0)
-
-local key = C((1-equal-comma)^1)
-local pattern_a = (space+comma)^0 * (key * equal * value + key * C(""))
-local pattern_c = (space+comma)^0 * (key * equal * value)
-
-local key = C((1-space-equal-comma)^1)
-local pattern_b = spaces * comma^0 * spaces * (key * ((spaces * equal * spaces * value) + C("")))
-
--- "a=1, b=2, c=3, d={a{b,c}d}, e=12345, f=xx{a{b,c}d}xx, g={}" : outer {} removes, leading spaces ignored
-
-local hash = { }
-
-local function set(key,value)
- hash[key] = value
-end
-
+local nestedbraces=P { lbrace*(nobrace+V(1))^0*rbrace }
+local nestedparents=P { lparent*(noparent+V(1))^0*rparent }
+local spaces=space^0
+local argument=Cs((lbrace/"")*((nobrace+nestedbraces)^0)*(rbrace/""))
+local content=(1-endofstring)^0
+lpegpatterns.nestedbraces=nestedbraces
+lpegpatterns.nestedparents=nestedparents
+lpegpatterns.nested=nestedbraces
+lpegpatterns.argument=argument
+lpegpatterns.content=content
+local value=P(lbrace*C((nobrace+nestedbraces)^0)*rbrace)+C((nestedbraces+(1-comma))^0)
+local key=C((1-equal-comma)^1)
+local pattern_a=(space+comma)^0*(key*equal*value+key*C(""))
+local pattern_c=(space+comma)^0*(key*equal*value)
+local key=C((1-space-equal-comma)^1)
+local pattern_b=spaces*comma^0*spaces*(key*((spaces*equal*spaces*value)+C("")))
+local hash={}
local function set(key,value)
- hash[key] = value
-end
-
-local pattern_a_s = (pattern_a/set)^1
-local pattern_b_s = (pattern_b/set)^1
-local pattern_c_s = (pattern_c/set)^1
-
-parsers.patterns.settings_to_hash_a = pattern_a_s
-parsers.patterns.settings_to_hash_b = pattern_b_s
-parsers.patterns.settings_to_hash_c = pattern_c_s
-
+ hash[key]=value
+end
+local pattern_a_s=(pattern_a/set)^1
+local pattern_b_s=(pattern_b/set)^1
+local pattern_c_s=(pattern_c/set)^1
+patterns.settings_to_hash_a=pattern_a_s
+patterns.settings_to_hash_b=pattern_b_s
+patterns.settings_to_hash_c=pattern_c_s
function parsers.make_settings_to_hash_pattern(set,how)
- if how == "strict" then
- return (pattern_c/set)^1
- elseif how == "tolerant" then
- return (pattern_b/set)^1
- else
- return (pattern_a/set)^1
- end
+ if how=="strict" then
+ return (pattern_c/set)^1
+ elseif how=="tolerant" then
+ return (pattern_b/set)^1
+ else
+ return (pattern_a/set)^1
+ end
end
-
function parsers.settings_to_hash(str,existing)
- if str and str ~= "" then
- hash = existing or { }
- lpegmatch(pattern_a_s,str)
- return hash
- else
- return { }
- end
+ if str and str~="" then
+ hash=existing or {}
+ lpegmatch(pattern_a_s,str)
+ return hash
+ else
+ return {}
+ end
end
-
function parsers.settings_to_hash_tolerant(str,existing)
- if str and str ~= "" then
- hash = existing or { }
- lpegmatch(pattern_b_s,str)
- return hash
- else
- return { }
- end
+ if str and str~="" then
+ hash=existing or {}
+ lpegmatch(pattern_b_s,str)
+ return hash
+ else
+ return {}
+ end
end
-
function parsers.settings_to_hash_strict(str,existing)
- if str and str ~= "" then
- hash = existing or { }
- lpegmatch(pattern_c_s,str)
- return next(hash) and hash
- else
- return nil
- end
+ if str and str~="" then
+ hash=existing or {}
+ lpegmatch(pattern_c_s,str)
+ return next(hash) and hash
+ else
+ return nil
+ end
end
-
-local separator = comma * space^0
-local value = P(lbrace * C((nobrace + nested)^0) * rbrace) + C((nested + (1-comma))^0)
-local pattern = Ct(value*(separator*value)^0)
-
--- "aap, {noot}, mies" : outer {} removes, leading spaces ignored
-
-parsers.patterns.settings_to_array = pattern
-
--- we could use a weak table as cache
-
+local separator=comma*space^0
+local value=P(lbrace*C((nobrace+nestedbraces)^0)*rbrace)+C((nestedbraces+(1-comma))^0)
+local pattern=spaces*Ct(value*(separator*value)^0)
+patterns.settings_to_array=pattern
function parsers.settings_to_array(str,strict)
- if not str or str == "" then
- return { }
- elseif strict then
- if find(str,"{") then
- return lpegmatch(pattern,str)
- else
- return { str }
- end
+ if not str or str=="" then
+ return {}
+ elseif strict then
+ if find(str,"{") then
+ return lpegmatch(pattern,str)
else
- return lpegmatch(pattern,str)
+ return { str }
end
+ else
+ return lpegmatch(pattern,str)
+ end
end
-
local function set(t,v)
- t[#t+1] = v
+ t[#t+1]=v
end
-
-local value = P(Carg(1)*value) / set
-local pattern = value*(separator*value)^0 * Carg(1)
-
+local value=P(Carg(1)*value)/set
+local pattern=value*(separator*value)^0*Carg(1)
function parsers.add_settings_to_array(t,str)
- return lpegmatch(pattern,str,nil,t)
+ return lpegmatch(pattern,str,nil,t)
end
-
function parsers.hash_to_string(h,separator,yes,no,strict,omit)
- if h then
- local t, tn, s = { }, 0, table.sortedkeys(h)
- omit = omit and table.tohash(omit)
- for i=1,#s do
- local key = s[i]
- if not omit or not omit[key] then
- local value = h[key]
- if type(value) == "boolean" then
- if yes and no then
- if value then
- tn = tn + 1
- t[tn] = key .. '=' .. yes
- elseif not strict then
- tn = tn + 1
- t[tn] = key .. '=' .. no
- end
- elseif value or not strict then
- tn = tn + 1
- t[tn] = key .. '=' .. tostring(value)
- end
- else
- tn = tn + 1
- t[tn] = key .. '=' .. value
- end
- end
+ if h then
+ local t,tn,s={},0,table.sortedkeys(h)
+ omit=omit and table.tohash(omit)
+ for i=1,#s do
+ local key=s[i]
+ if not omit or not omit[key] then
+ local value=h[key]
+ if type(value)=="boolean" then
+ if yes and no then
+ if value then
+ tn=tn+1
+ t[tn]=key..'='..yes
+ elseif not strict then
+ tn=tn+1
+ t[tn]=key..'='..no
+ end
+ elseif value or not strict then
+ tn=tn+1
+ t[tn]=key..'='..tostring(value)
+ end
+ else
+ tn=tn+1
+ t[tn]=key..'='..value
end
- return concat(t,separator or ",")
- else
- return ""
+ end
end
+ return concat(t,separator or ",")
+ else
+ return ""
+ end
end
-
function parsers.array_to_string(a,separator)
- if a then
- return concat(a,separator or ",")
- else
- return ""
- end
-end
-
-function parsers.settings_to_set(str,t) -- tohash? -- todo: lpeg -- duplicate anyway
- t = t or { }
--- for s in gmatch(str,"%s*([^, ]+)") do -- space added
- for s in gmatch(str,"[^, ]+") do -- space added
- t[s] = true
- end
- return t
-end
-
-function parsers.simple_hash_to_string(h, separator)
- local t, tn = { }, 0
- for k, v in sortedhash(h) do
- if v then
- tn = tn + 1
- t[tn] = k
- end
+ if a then
+ return concat(a,separator or ",")
+ else
+ return ""
+ end
+end
+function parsers.settings_to_set(str,t)
+ t=t or {}
+ for s in gmatch(str,"[^, ]+") do
+ t[s]=true
+ end
+ return t
+end
+function parsers.simple_hash_to_string(h,separator)
+ local t,tn={},0
+ for k,v in sortedhash(h) do
+ if v then
+ tn=tn+1
+ t[tn]=k
end
- return concat(t,separator or ",")
+ end
+ return concat(t,separator or ",")
end
-
-local value = lbrace * C((nobrace + nested)^0) * rbrace
-local pattern = Ct((space + value)^0)
-
+local value=P(lbrace*C((nobrace+nestedbraces)^0)*rbrace)+C(digit^1*lparent*(noparent+nestedparents)^1*rparent)+C((nestedbraces+(1-comma))^1)
+local pattern_a=spaces*Ct(value*(separator*value)^0)
+local function repeater(n,str)
+ if not n then
+ return str
+ else
+ local s=lpegmatch(pattern_a,str)
+ if n==1 then
+ return unpack(s)
+ else
+ local t,tn={},0
+ for i=1,n do
+ for j=1,#s do
+ tn=tn+1
+ t[tn]=s[j]
+ end
+ end
+ return unpack(t)
+ end
+ end
+end
+local value=P(lbrace*C((nobrace+nestedbraces)^0)*rbrace)+(C(digit^1)/tonumber*lparent*Cs((noparent+nestedparents)^1)*rparent)/repeater+C((nestedbraces+(1-comma))^1)
+local pattern_b=spaces*Ct(value*(separator*value)^0)
+function parsers.settings_to_array_with_repeat(str,expand)
+ if expand then
+ return lpegmatch(pattern_b,str) or {}
+ else
+ return lpegmatch(pattern_a,str) or {}
+ end
+end
+local value=lbrace*C((nobrace+nestedbraces)^0)*rbrace
+local pattern=Ct((space+value)^0)
function parsers.arguments_to_table(str)
- return lpegmatch(pattern,str)
+ return lpegmatch(pattern,str)
end
-
--- temporary here (unoptimized)
-
function parsers.getparameters(self,class,parentclass,settings)
- local sc = self[class]
- if not sc then
- sc = { }
- self[class] = sc
- if parentclass then
- local sp = self[parentclass]
- if not sp then
- sp = { }
- self[parentclass] = sp
- end
- setmetatableindex(sc,sp)
- end
- end
- parsers.settings_to_hash(settings,sc)
+ local sc=self[class]
+ if not sc then
+ sc={}
+ self[class]=sc
+ if parentclass then
+ local sp=self[parentclass]
+ if not sp then
+ sp={}
+ self[parentclass]=sp
+ end
+ setmetatableindex(sc,sp)
+ end
+ end
+ parsers.settings_to_hash(settings,sc)
end
-
function parsers.listitem(str)
- return gmatch(str,"[^, ]+")
-end
-
-
-end -- of closure
-
-do -- create closure to overcome 200 locals limit
-
-if not modules then modules = { } end modules ['util-fmt'] = {
- version = 1.001,
- comment = "companion to luat-lib.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
+ return gmatch(str,"[^, ]+")
+end
+local pattern=Cs { "start",
+ start=V("one")+V("two")+V("three"),
+ rest=(Cc(",")*V("thousand"))^0*(P(".")+endofstring)*anything^0,
+ thousand=digit*digit*digit,
+ one=digit*V("rest"),
+ two=digit*digit*V("rest"),
+ three=V("thousand")*V("rest"),
}
-
-utilities = utilities or { }
-utilities.formatters = utilities.formatters or { }
-local formatters = utilities.formatters
-
-local concat, format = table.concat, string.format
-local tostring, type = tostring, type
-local strip = string.strip
-
-local P, R, Cs = lpeg.P, lpeg.R, lpeg.Cs
-local lpegmatch = lpeg.match
-
--- temporary here
-
-local digit = R("09")
-local period = P(".")
-local zero = P("0")
-local trailingzeros = zero^0 * -digit -- suggested by Roberto R
-local case_1 = period * trailingzeros / ""
-local case_2 = period * (digit - trailingzeros)^1 * (trailingzeros / "")
-local number = digit^1 * (case_1 + case_2)
-local stripper = Cs((number + 1)^0)
-
-
-lpeg.patterns.stripzeros = stripper
-
-function formatters.stripzeros(str)
- return lpegmatch(stripper,str)
-end
-
-function formatters.formatcolumns(result,between)
- if result and #result > 0 then
- between = between or " "
- local widths, numbers = { }, { }
- local first = result[1]
- local n = #first
- for i=1,n do
- widths[i] = 0
- end
- for i=1,#result do
- local r = result[i]
- for j=1,n do
- local rj = r[j]
- local tj = type(rj)
- if tj == "number" then
- numbers[j] = true
- end
- if tj ~= "string" then
- rj = tostring(rj)
- r[j] = rj
- end
- local w = #rj
- if w > widths[j] then
- widths[j] = w
- end
- end
- end
- for i=1,n do
- local w = widths[i]
- if numbers[i] then
- if w > 80 then
- widths[i] = "%s" .. between
- else
- widths[i] = "%0" .. w .. "i" .. between
- end
- else
- if w > 80 then
- widths[i] = "%s" .. between
- elseif w > 0 then
- widths[i] = "%-" .. w .. "s" .. between
- else
- widths[i] = "%s"
- end
- end
- end
- local template = strip(concat(widths))
- for i=1,#result do
- local str = format(template,unpack(result[i]))
- result[i] = strip(str)
- end
- end
- return result
-end
-
-
-end -- of closure
-
-do -- create closure to overcome 200 locals limit
-
-if not modules then modules = { } end modules ['util.deb'] = {
- version = 1.001,
- comment = "companion to luat-lib.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
--- the <anonymous> tag is kind of generic and used for functions that are not
--- bound to a variable, like node.new, node.copy etc (contrary to for instance
--- node.has_attribute which is bound to a has_attribute local variable in mkiv)
-
-local debug = require "debug"
-
-local getinfo = debug.getinfo
-local type, next, tostring = type, next, tostring
-local format, find = string.format, string.find
-local is_boolean = string.is_boolean
-
-utilities = utilities or { }
-utilities.debugger = utilities.debugger or { }
-local debugger = utilities.debugger
-
-local counters = { }
-local names = { }
-
--- one
-
-local function hook()
- local f = getinfo(2) -- "nS"
- if f then
- local n = "unknown"
- if f.what == "C" then
- n = f.name or '<anonymous>'
- if not names[n] then
- names[n] = format("%42s",n)
- end
- else
- -- source short_src linedefined what name namewhat nups func
- n = f.name or f.namewhat or f.what
- if not n or n == "" then
- n = "?"
- end
- if not names[n] then
- names[n] = format("%42s : % 5i : %s",n,f.linedefined or 0,f.short_src or "unknown source")
- end
- end
- counters[n] = (counters[n] or 0) + 1
- end
-end
-
-function debugger.showstats(printer,threshold) -- hm, something has changed, rubish now
- printer = printer or texio.write or print
- threshold = threshold or 0
- local total, grandtotal, functions = 0, 0, 0
- local dataset = { }
- for name, count in next, counters do
- dataset[#dataset+1] = { name, count }
- end
- table.sort(dataset,function(a,b) return a[2] == b[2] and b[1] > a[1] or a[2] > b[2] end)
- for i=1,#dataset do
- local d = dataset[i]
- local name = d[1]
- local count = d[2]
- if count > threshold and not find(name,"for generator") then -- move up
- printer(format("%8i %s\n", count, names[name]))
- total = total + count
+lpegpatterns.splitthousands=pattern
+function parsers.splitthousands(str)
+ return lpegmatch(pattern,str) or str
+end
+local optionalwhitespace=whitespace^0
+lpegpatterns.words=Ct((Cs((1-punctuation-whitespace)^1)+anything)^1)
+lpegpatterns.sentences=Ct((optionalwhitespace*Cs((1-period)^0*period))^1)
+lpegpatterns.paragraphs=Ct((optionalwhitespace*Cs((whitespace^1*endofstring/""+1-(spacer^0*newline*newline))^1))^1)
+local dquote=P('"')
+local equal=P('=')
+local escape=P('\\')
+local separator=S(' ,')
+local key=C((1-equal)^1)
+local value=dquote*C((1-dquote-escape*dquote)^0)*dquote
+local pattern=Cf(Ct("")*Cg(key*equal*value)*separator^0,rawset)^0*P(-1)
+patterns.keq_to_hash_c=pattern
+function parsers.keq_to_hash(str)
+ if str and str~="" then
+ return lpegmatch(pattern,str)
+ else
+ return {}
+ end
+end
+local defaultspecification={ separator=",",quote='"' }
+function parsers.csvsplitter(specification)
+ specification=specification and table.setmetatableindex(specification,defaultspecification) or defaultspecification
+ local separator=specification.separator
+ local quotechar=specification.quote
+ local separator=S(separator~="" and separator or ",")
+ local whatever=C((1-separator-newline)^0)
+ if quotechar and quotechar~="" then
+ local quotedata=nil
+ for chr in gmatch(quotechar,".") do
+ local quotechar=P(chr)
+ local quoteword=quotechar*C((1-quotechar)^0)*quotechar
+ if quotedata then
+ quotedata=quotedata+quoteword
+ else
+ quotedata=quoteword
+ end
+ end
+ whatever=quotedata+whatever
+ end
+ local parser=Ct((Ct(whatever*(separator*whatever)^0)*S("\n\r"))^0 )
+ return function(data)
+ return lpegmatch(parser,data)
+ end
+end
+function parsers.rfc4180splitter(specification)
+ specification=specification and table.setmetatableindex(specification,defaultspecification) or defaultspecification
+ local separator=specification.separator
+ local quotechar=P(specification.quote)
+ local dquotechar=quotechar*quotechar
+/specification.quote
+ local separator=S(separator~="" and separator or ",")
+ local escaped=quotechar*Cs((dquotechar+(1-quotechar))^0)*quotechar
+ local non_escaped=C((1-quotechar-newline-separator)^1)
+ local field=escaped+non_escaped
+ local record=Ct((field*separator^-1)^1)
+ local headerline=record*Cp()
+ local wholeblob=Ct((newline^-1*record)^0)
+ return function(data,getheader)
+ if getheader then
+ local header,position=lpegmatch(headerline,data)
+ local data=lpegmatch(wholeblob,data,position)
+ return data,header
+ else
+ return lpegmatch(wholeblob,data)
+ end
+ end
+end
+local function ranger(first,last,n,action)
+ if not first then
+ elseif last==true then
+ for i=first,n or first do
+ action(i)
+ end
+ elseif last then
+ for i=first,last do
+ action(i)
+ end
+ else
+ action(first)
+ end
+end
+local cardinal=lpegpatterns.cardinal/tonumber
+local spacers=lpegpatterns.spacer^0
+local endofstring=lpegpatterns.endofstring
+local stepper=spacers*(C(cardinal)*(spacers*S(":-")*spacers*(C(cardinal)+Cc(true) )+Cc(false) )*Carg(1)*Carg(2)/ranger*S(", ")^0 )^1
+local stepper=spacers*(C(cardinal)*(spacers*S(":-")*spacers*(C(cardinal)+(P("*")+endofstring)*Cc(true) )+Cc(false) )*Carg(1)*Carg(2)/ranger*S(", ")^0 )^1*endofstring
+function parsers.stepper(str,n,action)
+ if type(n)=="function" then
+ lpegmatch(stepper,str,1,false,n or print)
+ else
+ lpegmatch(stepper,str,1,n,action or print)
+ end
+end
+local pattern_math=Cs((P("%")/"\\percent "+P("^")*Cc("{")*lpegpatterns.integer*Cc("}")+P(1))^0)
+local pattern_text=Cs((P("%")/"\\percent "+(P("^")/"\\high")*Cc("{")*lpegpatterns.integer*Cc("}")+P(1))^0)
+patterns.unittotex=pattern
+function parsers.unittotex(str,textmode)
+ return lpegmatch(textmode and pattern_text or pattern_math,str)
+end
+local pattern=Cs((P("^")/"<sup>"*lpegpatterns.integer*Cc("</sup>")+P(1))^0)
+function parsers.unittoxml(str)
+ return lpegmatch(pattern,str)
+end
+local cache={}
+local spaces=lpeg.patterns.space^0
+local dummy=function() end
+table.setmetatableindex(cache,function(t,k)
+ local separator=P(k)
+ local value=(1-separator)^0
+ local pattern=spaces*C(value)*separator^0*Cp()
+ t[k]=pattern
+ return pattern
+end)
+local commalistiterator=cache[","]
+function utilities.parsers.iterator(str,separator)
+ local n=#str
+ if n==0 then
+ return dummy
+ else
+ local pattern=separator and cache[separator] or commalistiterator
+ local p=1
+ return function()
+ if p<=n then
+ local s,e=lpegmatch(pattern,str,p)
+ if e then
+ p=e
+ return s
end
- grandtotal = grandtotal + count
- functions = functions + 1
+ end
end
- printer("\n")
- printer(format("functions : % 10i\n", functions))
- printer(format("total : % 10i\n", total))
- printer(format("grand total: % 10i\n", grandtotal))
- printer(format("threshold : % 10i\n", threshold))
+ end
end
-
-function debugger.savestats(filename,threshold)
- local f = io.open(filename,'w')
- if f then
- debugger.showstats(function(str) f:write(str) end,threshold)
- f:close()
+local function initialize(t,name)
+ local source=t[name]
+ if source then
+ local result={}
+ for k,v in next,t[name] do
+ result[k]=v
end
+ return result
+ else
+ return {}
+ end
end
-
-function debugger.enable()
- debug.sethook(hook,"c")
+local function fetch(t,name)
+ return t[name] or {}
end
-
-function debugger.disable()
- debug.sethook()
+function process(result,more)
+ for k,v in next,more do
+ result[k]=v
+ end
+ return result
end
-
-
-
-
-
-local is_node = node and node.is_node
-local is_lpeg = lpeg and lpeg.type
-
-function inspect(i) -- global function
- local ti = type(i)
- if ti == "table" then
- table.print(i,"table")
- elseif is_node and is_node(i) then
- table.print(nodes.astable(i),tostring(i))
- elseif is_lpeg and is_lpeg(i) then
- lpeg.print(i)
- else
- print(tostring(i))
- end
-end
-
--- from the lua book:
-
-function traceback()
- local level = 1
- while true do
- local info = debug.getinfo(level, "Sl")
- if not info then
- break
- elseif info.what == "C" then
- print(format("%3i : C function",level))
- else
- print(format("%3i : [%s]:%d",level,info.short_src,info.currentline))
- end
- level = level + 1
- end
+local name=C((1-S(", "))^1)
+local parser=(Carg(1)*name/initialize)*(S(", ")^1*(Carg(1)*name/fetch))^0
+local merge=Cf(parser,process)
+function utilities.parsers.mergehashes(hash,list)
+ return lpegmatch(merge,list,1,hash)
end
@@ -5179,199 +5731,80 @@ end -- of closure
do -- create closure to overcome 200 locals limit
-if not modules then modules = { } end modules ['trac-inf'] = {
- version = 1.001,
- comment = "companion to trac-inf.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
--- As we want to protect the global tables, we no longer store the timing
--- in the tables themselves but in a hidden timers table so that we don't
--- get warnings about assignments. This is more efficient than using rawset
--- and rawget.
-
-local format, lower = string.format, string.lower
-local clock = os.gettimeofday or os.clock -- should go in environment
-local write_nl = texio.write_nl
-
-statistics = statistics or { }
-local statistics = statistics
-
-statistics.enable = true
-statistics.threshold = 0.05
-
-local statusinfo, n, registered, timers = { }, 0, { }, { }
+package.loaded["util-fmt"] = package.loaded["util-fmt"] or true
-local function hastiming(instance)
- return instance and timers[instance]
-end
-
-local function resettiming(instance)
- timers[instance or "notimer"] = { timing = 0, loadtime = 0 }
-end
-
-local function starttiming(instance)
- local timer = timers[instance or "notimer"]
- if not timer then
- timer = { }
- timers[instance or "notimer"] = timer
- end
- local it = timer.timing
- if not it then
- it = 0
- end
- if it == 0 then
- timer.starttime = clock()
- if not timer.loadtime then
- timer.loadtime = 0
- end
- end
- timer.timing = it + 1
-end
-
-local function stoptiming(instance, report)
- local timer = timers[instance or "notimer"]
- local it = timer.timing
- if it > 1 then
- timer.timing = it - 1
- else
- local starttime = timer.starttime
- if starttime then
- local stoptime = clock()
- local loadtime = stoptime - starttime
- timer.stoptime = stoptime
- timer.loadtime = timer.loadtime + loadtime
- if report then
- statistics.report("load time %0.3f",loadtime)
- end
- timer.timing = 0
- return loadtime
- end
- end
- return 0
-end
-
-local function elapsedtime(instance)
- local timer = timers[instance or "notimer"]
- return format("%0.3f",timer and timer.loadtime or 0)
-end
-
-local function elapsedindeed(instance)
- local timer = timers[instance or "notimer"]
- return (timer and timer.loadtime or 0) > statistics.threshold
-end
-
-local function elapsedseconds(instance,rest) -- returns nil if 0 seconds
- if elapsedindeed(instance) then
- return format("%s seconds %s", elapsedtime(instance),rest or "")
- end
-end
+-- original size: 2274, stripped down to: 1781
-statistics.hastiming = hastiming
-statistics.resettiming = resettiming
-statistics.starttiming = starttiming
-statistics.stoptiming = stoptiming
-statistics.elapsedtime = elapsedtime
-statistics.elapsedindeed = elapsedindeed
-statistics.elapsedseconds = elapsedseconds
-
--- general function
-
-function statistics.register(tag,fnc)
- if statistics.enable and type(fnc) == "function" then
- local rt = registered[tag] or (#statusinfo + 1)
- statusinfo[rt] = { tag, fnc }
- registered[tag] = rt
- if #tag > n then n = #tag end
- end
+if not modules then modules={} end modules ['util-fmt']={
+ version=1.001,
+ comment="companion to luat-lib.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+utilities=utilities or {}
+utilities.formatters=utilities.formatters or {}
+local formatters=utilities.formatters
+local concat,format=table.concat,string.format
+local tostring,type=tostring,type
+local strip=string.strip
+local lpegmatch=lpeg.match
+local stripper=lpeg.patterns.stripzeros
+function formatters.stripzeros(str)
+ return lpegmatch(stripper,str)
end
-
-function statistics.show(reporter)
- if statistics.enable then
- if not reporter then reporter = function(tag,data,n) write_nl(tag .. " " .. data) end end
- -- this code will move
- local register = statistics.register
- register("luatex banner", function()
- return lower(status.banner)
- end)
- register("control sequences", function()
- return format("%s of %s + %s", status.cs_count, status.hash_size,status.hash_extra)
- end)
- register("callbacks", function()
- local total, indirect = status.callbacks or 0, status.indirect_callbacks or 0
- return format("%s direct, %s indirect, %s total", total-indirect, indirect, total)
- end)
- collectgarbage("collect")
- register("current memory usage", statistics.memused)
- register("runtime",statistics.runtime)
- for i=1,#statusinfo do
- local s = statusinfo[i]
- local r = s[2]()
- if r then
- reporter(s[1],r,n)
- end
+function formatters.formatcolumns(result,between)
+ if result and #result>0 then
+ between=between or " "
+ local widths,numbers={},{}
+ local first=result[1]
+ local n=#first
+ for i=1,n do
+ widths[i]=0
+ end
+ for i=1,#result do
+ local r=result[i]
+ for j=1,n do
+ local rj=r[j]
+ local tj=type(rj)
+ if tj=="number" then
+ numbers[j]=true
+ end
+ if tj~="string" then
+ rj=tostring(rj)
+ r[j]=rj
+ end
+ local w=#rj
+ if w>widths[j] then
+ widths[j]=w
+ end
+ end
+ end
+ for i=1,n do
+ local w=widths[i]
+ if numbers[i] then
+ if w>80 then
+ widths[i]="%s"..between
+ else
+ widths[i]="%0"..w.."i"..between
+ end
+ else
+ if w>80 then
+ widths[i]="%s"..between
+ elseif w>0 then
+ widths[i]="%-"..w.."s"..between
+ else
+ widths[i]="%s"
end
- write_nl("") -- final newline
- statistics.enable = false
+ end
end
-end
-
-local template, report_statistics, nn = nil, nil, 0 -- we only calcute it once
-
-function statistics.showjobstat(tag,data,n)
- if not logs then
- -- sorry
- elseif type(data) == "table" then
- for i=1,#data do
- statistics.showjobstat(tag,data[i],n)
- end
- else
- if not template or n > nn then
- template, n = format("%%-%ss - %%s",n), nn
- report_statistics = logs.reporter("mkiv lua stats")
- end
- report_statistics(format(template,tag,data))
+ local template=strip(concat(widths))
+ for i=1,#result do
+ local str=format(template,unpack(result[i]))
+ result[i]=strip(str)
end
-end
-
-function statistics.memused() -- no math.round yet -)
- local round = math.round or math.floor
- return format("%s MB (ctx: %s MB)",round(collectgarbage("count")/1000), round(status.luastate_bytes/1000000))
-end
-
-starttiming(statistics)
-
-function statistics.formatruntime(runtime) -- indirect so it can be overloaded and
- return format("%s seconds", runtime) -- indeed that happens in cure-uti.lua
-end
-
-function statistics.runtime()
- stoptiming(statistics)
- return statistics.formatruntime(elapsedtime(statistics))
-end
-
-function statistics.timed(action,report)
- report = report or logs.reporter("system")
- starttiming("run")
- action()
- stoptiming("run")
- report("total runtime: %s",elapsedtime("run"))
-end
-
--- where, not really the best spot for this:
-
-commands = commands or { }
-
-function commands.resettimer(name)
- resettiming(name or "whatever")
- starttiming(name or "whatever")
-end
-
-function commands.elapsedtime(name)
- stoptiming(name or "whatever")
- context(elapsedtime(name or "whatever"))
+ end
+ return result
end
@@ -5379,341 +5812,311 @@ end -- of closure
do -- create closure to overcome 200 locals limit
-if not modules then modules = { } end modules ['trac-set'] = { -- might become util-set.lua
- version = 1.001,
- comment = "companion to luat-lib.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
-local type, next, tostring = type, next, tostring
-local concat = table.concat
-local format, find, lower, gsub, escapedpattern = string.format, string.find, string.lower, string.gsub, string.escapedpattern
-local is_boolean = string.is_boolean
-local settings_to_hash = utilities.parsers.settings_to_hash
-local allocate = utilities.storage.allocate
-
-utilities = utilities or { }
-local utilities = utilities
-utilities.setters = utilities.setters or { }
-local setters = utilities.setters
-
-local data = { } -- maybe just local
-
--- We can initialize from the cnf file. This is sort of tricky as
--- later defined setters also need to be initialized then. If set
--- this way, we need to ensure that they are not reset later on.
+package.loaded["trac-set"] = package.loaded["trac-set"] or true
-local trace_initialize = false -- only for testing during development
+-- original size: 12365, stripped down to: 8799
-function setters.initialize(filename,name,values) -- filename only for diagnostics
- local setter = data[name]
- if setter then
- local data = setter.data
- if data then
- for key, value in next, values do
- -- key = gsub(key,"_",".")
- value = is_boolean(value,value)
- local functions = data[key]
- if functions then
- if #functions > 0 and not functions.value then
- if trace_initialize then
- setter.report("executing %s (%s -> %s)",key,filename,tostring(value))
- end
- for i=1,#functions do
- functions[i](value)
- end
- functions.value = value
- else
- if trace_initialize then
- setter.report("skipping %s (%s -> %s)",key,filename,tostring(value))
- end
- end
- else
- -- we do a simple preregistration i.e. not in the
- -- list as it might be an obsolete entry
- functions = { default = value }
- data[key] = functions
- if trace_initialize then
- setter.report("storing %s (%s -> %s)",key,filename,tostring(value))
- end
- end
+if not modules then modules={} end modules ['trac-set']={
+ version=1.001,
+ comment="companion to luat-lib.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+local type,next,tostring=type,next,tostring
+local concat=table.concat
+local format,find,lower,gsub,topattern=string.format,string.find,string.lower,string.gsub,string.topattern
+local is_boolean=string.is_boolean
+local settings_to_hash=utilities.parsers.settings_to_hash
+local allocate=utilities.storage.allocate
+utilities=utilities or {}
+local utilities=utilities
+local setters=utilities.setters or {}
+utilities.setters=setters
+local data={}
+local trace_initialize=false
+function setters.initialize(filename,name,values)
+ local setter=data[name]
+ if setter then
+ frozen=true
+ local data=setter.data
+ if data then
+ for key,newvalue in next,values do
+ local newvalue=is_boolean(newvalue,newvalue)
+ local functions=data[key]
+ if functions then
+ local oldvalue=functions.value
+ if functions.frozen then
+ if trace_initialize then
+ setter.report("%s: %a is %s to %a",filename,key,"frozen",oldvalue)
end
- return true
+ elseif #functions>0 and not oldvalue then
+ if trace_initialize then
+ setter.report("%s: %a is %s to %a",filename,key,"set",newvalue)
+ end
+ for i=1,#functions do
+ functions[i](newvalue)
+ end
+ functions.value=newvalue
+ functions.frozen=functions.frozen or frozen
+ else
+ if trace_initialize then
+ setter.report("%s: %a is %s as %a",filename,key,"kept",oldvalue)
+ end
+ end
+ else
+ functions={ default=newvalue,frozen=frozen }
+ data[key]=functions
+ if trace_initialize then
+ setter.report("%s: %a is %s to %a",filename,key,"defaulted",newvalue)
+ end
end
+ end
+ return true
end
+ end
end
-
--- user interface code
-
local function set(t,what,newvalue)
- local data, done = t.data, t.done
- if type(what) == "string" then
- what = settings_to_hash(what) -- inefficient but ok
- end
- if type(what) ~= "table" then
- return
- end
- if not done then -- catch ... why not set?
- done = { }
- t.done = done
- end
- for w, value in next, what do
- if value == "" then
- value = newvalue
- elseif not value then
- value = false -- catch nil
- else
- value = is_boolean(value,value)
- end
- w = "^" .. escapedpattern(w,true) .. "$" -- new: anchored
- for name, functions in next, data do
- if done[name] then
- -- prevent recursion due to wildcards
- elseif find(name,w) then
- done[name] = true
- for i=1,#functions do
- functions[i](value)
- end
- functions.value = value
- end
- end
- end
+ local data=t.data
+ if not data.frozen then
+ local done=t.done
+ if type(what)=="string" then
+ what=settings_to_hash(what)
+ end
+ if type(what)~="table" then
+ return
+ end
+ if not done then
+ done={}
+ t.done=done
+ end
+ for w,value in next,what do
+ if value=="" then
+ value=newvalue
+ elseif not value then
+ value=false
+ else
+ value=is_boolean(value,value)
+ end
+ w=topattern(w,true,true)
+ for name,functions in next,data do
+ if done[name] then
+ elseif find(name,w) then
+ done[name]=true
+ for i=1,#functions do
+ functions[i](value)
+ end
+ functions.value=value
+ end
+ end
+ end
+ end
end
-
local function reset(t)
- for name, functions in next, t.data do
- for i=1,#functions do
- functions[i](false)
- end
- functions.value = false
+ local data=t.data
+ if not data.frozen then
+ for name,functions in next,data do
+ for i=1,#functions do
+ functions[i](false)
+ end
+ functions.value=false
end
+ end
end
-
local function enable(t,what)
- set(t,what,true)
+ set(t,what,true)
end
-
local function disable(t,what)
- local data = t.data
- if not what or what == "" then
- t.done = { }
- reset(t)
- else
- set(t,what,false)
- end
+ local data=t.data
+ if not what or what=="" then
+ t.done={}
+ reset(t)
+ else
+ set(t,what,false)
+ end
end
-
function setters.register(t,what,...)
- local data = t.data
- what = lower(what)
- local functions = data[what]
- if not functions then
- functions = { }
- data[what] = functions
- if trace_initialize then
- t.report("defining %s",what)
- end
- end
- local default = functions.default -- can be set from cnf file
- for _, fnc in next, { ... } do
- local typ = type(fnc)
- if typ == "string" then
- if trace_initialize then
- t.report("coupling %s to %s",what,fnc)
- end
- local s = fnc -- else wrong reference
- fnc = function(value) set(t,s,value) end
- elseif typ ~= "function" then
- fnc = nil
- end
- if fnc then
- functions[#functions+1] = fnc
- -- default: set at command line or in cnf file
- -- value : set in tex run (needed when loading runtime)
- local value = functions.value or default
- if value ~= nil then
- fnc(value)
- functions.value = value
- end
- end
- end
- return false -- so we can use it in an assignment
+ local data=t.data
+ what=lower(what)
+ local functions=data[what]
+ if not functions then
+ functions={}
+ data[what]=functions
+ if trace_initialize then
+ t.report("defining %a",what)
+ end
+ end
+ local default=functions.default
+ for i=1,select("#",...) do
+ local fnc=select(i,...)
+ local typ=type(fnc)
+ if typ=="string" then
+ if trace_initialize then
+ t.report("coupling %a to %a",what,fnc)
+ end
+ local s=fnc
+ fnc=function(value) set(t,s,value) end
+ elseif typ~="function" then
+ fnc=nil
+ end
+ if fnc then
+ functions[#functions+1]=fnc
+ local value=functions.value or default
+ if value~=nil then
+ fnc(value)
+ functions.value=value
+ end
+ end
+ end
+ return false
end
-
function setters.enable(t,what)
- local e = t.enable
- t.enable, t.done = enable, { }
- enable(t,what)
- t.enable, t.done = e, { }
+ local e=t.enable
+ t.enable,t.done=enable,{}
+ enable(t,what)
+ t.enable,t.done=e,{}
end
-
function setters.disable(t,what)
- local e = t.disable
- t.disable, t.done = disable, { }
- disable(t,what)
- t.disable, t.done = e, { }
+ local e=t.disable
+ t.disable,t.done=disable,{}
+ disable(t,what)
+ t.disable,t.done=e,{}
end
-
function setters.reset(t)
- t.done = { }
- reset(t)
-end
-
-function setters.list(t) -- pattern
- local list = table.sortedkeys(t.data)
- local user, system = { }, { }
- for l=1,#list do
- local what = list[l]
- if find(what,"^%*") then
- system[#system+1] = what
- else
- user[#user+1] = what
- end
+ t.done={}
+ reset(t)
+end
+function setters.list(t)
+ local list=table.sortedkeys(t.data)
+ local user,system={},{}
+ for l=1,#list do
+ local what=list[l]
+ if find(what,"^%*") then
+ system[#system+1]=what
+ else
+ user[#user+1]=what
end
- return user, system
+ end
+ return user,system
end
-
function setters.show(t)
- local category = t.name
- local list = setters.list(t)
- t.report()
- for k=1,#list do
- local name = list[k]
- local functions = t.data[name]
- if functions then
- local value, default, modules = functions.value, functions.default, #functions
- value = value == nil and "unset" or tostring(value)
- default = default == nil and "unset" or tostring(default)
- t.report("%-30s modules: %2i default: %6s value: %6s",name,modules,default,value)
- end
- end
- t.report()
-end
-
--- we could have used a bit of oo and the trackers:enable syntax but
--- there is already a lot of code around using the singular tracker
-
--- we could make this into a module but we also want the rest avaliable
-
-local enable, disable, register, list, show = setters.enable, setters.disable, setters.register, setters.list, setters.show
-
-local function report(setter,...)
- local report = logs and logs.report
- if report then
- report(setter.name,...)
- else -- fallback, as this module is loaded before the logger
- write_nl(format("%-15s : %s\n",setter.name,format(...)))
- end
-end
-
-function setters.new(name)
- local setter -- we need to access it in setter itself
- setter = {
- data = allocate(), -- indexed, but also default and value fields
- name = name,
- report = function(...) report (setter,...) end,
- enable = function(...) enable (setter,...) end,
- disable = function(...) disable (setter,...) end,
- register = function(...) register(setter,...) end,
- list = function(...) list (setter,...) end,
- show = function(...) show (setter,...) end,
- }
- data[name] = setter
- return setter
-end
-
-trackers = setters.new("trackers")
-directives = setters.new("directives")
-experiments = setters.new("experiments")
-
-local t_enable, t_disable, t_report = trackers .enable, trackers .disable, trackers .report
-local d_enable, d_disable, d_report = directives .enable, directives .disable, directives .report
-local e_enable, e_disable, e_report = experiments.enable, experiments.disable, experiments.report
-
--- nice trick: we overload two of the directives related functions with variants that
--- do tracing (itself using a tracker) .. proof of concept
-
-local trace_directives = false local trace_directives = false trackers.register("system.directives", function(v) trace_directives = v end)
-local trace_experiments = false local trace_experiments = false trackers.register("system.experiments", function(v) trace_experiments = v end)
-
+ local category=t.name
+ local list=setters.list(t)
+ t.report()
+ for k=1,#list do
+ local name=list[k]
+ local functions=t.data[name]
+ if functions then
+ local value,default,modules=functions.value,functions.default,#functions
+ value=value==nil and "unset" or tostring(value)
+ default=default==nil and "unset" or tostring(default)
+ t.report("%-50s modules: %2i default: %-12s value: %-12s",name,modules,default,value)
+ end
+ end
+ t.report()
+end
+local enable,disable,register,list,show=setters.enable,setters.disable,setters.register,setters.list,setters.show
+function setters.report(setter,...)
+ print(format("%-15s : %s\n",setter.name,format(...)))
+end
+local function default(setter,name)
+ local d=setter.data[name]
+ return d and d.default
+end
+local function value(setter,name)
+ local d=setter.data[name]
+ return d and (d.value or d.default)
+end
+function setters.new(name)
+ local setter
+ setter={
+ data=allocate(),
+ name=name,
+ report=function(...) setters.report (setter,...) end,
+ enable=function(...) enable (setter,...) end,
+ disable=function(...) disable (setter,...) end,
+ register=function(...) register(setter,...) end,
+ list=function(...) list (setter,...) end,
+ show=function(...) show (setter,...) end,
+ default=function(...) return default (setter,...) end,
+ value=function(...) return value (setter,...) end,
+ }
+ data[name]=setter
+ return setter
+end
+trackers=setters.new("trackers")
+directives=setters.new("directives")
+experiments=setters.new("experiments")
+local t_enable,t_disable=trackers .enable,trackers .disable
+local d_enable,d_disable=directives .enable,directives .disable
+local e_enable,e_disable=experiments.enable,experiments.disable
+local trace_directives=false local trace_directives=false trackers.register("system.directives",function(v) trace_directives=v end)
+local trace_experiments=false local trace_experiments=false trackers.register("system.experiments",function(v) trace_experiments=v end)
function directives.enable(...)
- if trace_directives then
- d_report("enabling: %s",concat({...}," "))
- end
- d_enable(...)
+ if trace_directives then
+ directives.report("enabling: % t",{...})
+ end
+ d_enable(...)
end
-
function directives.disable(...)
- if trace_directives then
- d_report("disabling: %s",concat({...}," "))
- end
- d_disable(...)
+ if trace_directives then
+ directives.report("disabling: % t",{...})
+ end
+ d_disable(...)
end
-
function experiments.enable(...)
- if trace_experiments then
- e_report("enabling: %s",concat({...}," "))
- end
- e_enable(...)
+ if trace_experiments then
+ experiments.report("enabling: % t",{...})
+ end
+ e_enable(...)
end
-
function experiments.disable(...)
- if trace_experiments then
- e_report("disabling: %s",concat({...}," "))
- end
- e_disable(...)
-end
-
--- a useful example
-
-directives.register("system.nostatistics", function(v)
- statistics.enable = not v
+ if trace_experiments then
+ experiments.report("disabling: % t",{...})
+ end
+ e_disable(...)
+end
+directives.register("system.nostatistics",function(v)
+ if statistics then
+ statistics.enable=not v
+ else
+ end
end)
-
-directives.register("system.nolibraries", function(v)
- libraries = nil -- we discard this tracing for security
+directives.register("system.nolibraries",function(v)
+ if libraries then
+ libraries=nil
+ else
+ end
end)
-
--- experiment
-
-local flags = environment and environment.engineflags
-
-if flags then
- if trackers and flags.trackers then
- setters.initialize("flags","trackers", settings_to_hash(flags.trackers))
- -- t_enable(flags.trackers)
+if environment then
+ local engineflags=environment.engineflags
+ if engineflags then
+ local list=engineflags["c:trackers"] or engineflags["trackers"]
+ if type(list)=="string" then
+ setters.initialize("commandline flags","trackers",settings_to_hash(list))
end
- if directives and flags.directives then
- setters.initialize("flags","directives", settings_to_hash(flags.directives))
- -- d_enable(flags.directives)
+ local list=engineflags["c:directives"] or engineflags["directives"]
+ if type(list)=="string" then
+ setters.initialize("commandline flags","directives",settings_to_hash(list))
end
+ end
end
-
--- here
-
if texconfig then
-
- -- this happens too late in ini mode but that is no problem
-
- local function set(k,v)
- v = tonumber(v)
- if v then
- texconfig[k] = v
- end
+ local function set(k,v)
+ v=tonumber(v)
+ if v then
+ texconfig[k]=v
end
-
- directives.register("luatex.expanddepth", function(v) set("expand_depth",v) end)
- directives.register("luatex.hashextra", function(v) set("hash_extra",v) end)
- directives.register("luatex.nestsize", function(v) set("nest_size",v) end)
- directives.register("luatex.maxinopen", function(v) set("max_in_open",v) end)
- directives.register("luatex.maxprintline", function(v) set("max_print_line",v) end)
- directives.register("luatex.maxstrings", function(v) set("max_strings",v) end)
- directives.register("luatex.paramsize", function(v) set("param_size",v) end)
- directives.register("luatex.savesize", function(v) set("save_size",v) end)
- directives.register("luatex.stacksize", function(v) set("stack_size",v) end)
-
+ end
+ directives.register("luatex.expanddepth",function(v) set("expand_depth",v) end)
+ directives.register("luatex.hashextra",function(v) set("hash_extra",v) end)
+ directives.register("luatex.nestsize",function(v) set("nest_size",v) end)
+ directives.register("luatex.maxinopen",function(v) set("max_in_open",v) end)
+ directives.register("luatex.maxprintline",function(v) set("max_print_line",v) end)
+ directives.register("luatex.maxstrings",function(v) set("max_strings",v) end)
+ directives.register("luatex.paramsize",function(v) set("param_size",v) end)
+ directives.register("luatex.savesize",function(v) set("save_size",v) end)
+ directives.register("luatex.stacksize",function(v) set("stack_size",v) end)
end
@@ -5721,643 +6124,559 @@ end -- of closure
do -- create closure to overcome 200 locals limit
-if not modules then modules = { } end modules ['trac-log'] = {
- version = 1.001,
- comment = "companion to trac-log.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
--- todo: less categories, more subcategories (e.g. nodes)
-
-
-local write_nl, write = texio and texio.write_nl or print, texio and texio.write or io.write
-local format, gmatch, find = string.format, string.gmatch, string.find
-local concat, insert, remove = table.concat, table.insert, table.remove
-local escapedpattern = string.escapedpattern
-local texcount = tex and tex.count
-local next, type = next, type
-
-local setmetatableindex = table.setmetatableindex
-
---[[ldx--
-<p>This is a prelude to a more extensive logging module. We no longer
-provide <l n='xml'/> based logging a sparsing is relatively easy anyway.</p>
---ldx]]--
+package.loaded["trac-log"] = package.loaded["trac-log"] or true
-logs = logs or { }
-local logs = logs
+-- original size: 21795, stripped down to: 14194
-local moreinfo = [[
+if not modules then modules={} end modules ['trac-log']={
+ version=1.001,
+ comment="companion to trac-log.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+local write_nl,write=texio and texio.write_nl or print,texio and texio.write or io.write
+local format,gmatch,find=string.format,string.gmatch,string.find
+local concat,insert,remove=table.concat,table.insert,table.remove
+local topattern=string.topattern
+local texcount=tex and tex.count
+local next,type,select=next,type,select
+local utfchar=utf.char
+local setmetatableindex=table.setmetatableindex
+local formatters=string.formatters
+logs=logs or {}
+local logs=logs
+local moreinfo=[[
More information about ConTeXt and the tools that come with it can be found at:
-
+]].."\n"..[[
maillist : ntg-context@ntg.nl / http://www.ntg.nl/mailman/listinfo/ntg-context
webpage : http://www.pragma-ade.nl / http://tex.aanhet.net
wiki : http://contextgarden.net
]]
-
--- basic loggers
-
+utilities.strings.formatters.add (
+ formatters,"unichr",
+ [["U+" .. format("%%05X",%s) .. " (" .. utfchar(%s) .. ")"]]
+)
+utilities.strings.formatters.add (
+ formatters,"chruni",
+ [[utfchar(%s) .. " (U+" .. format("%%05X",%s) .. ")"]]
+)
local function ignore() end
-
-setmetatableindex(logs, function(t,k) t[k] = ignore ; return ignore end)
-
-local report, subreport, status, settarget, setformats, settranslations
-
-local direct, subdirect, writer, pushtarget, poptarget
-
+setmetatableindex(logs,function(t,k) t[k]=ignore;return ignore end)
+local report,subreport,status,settarget,setformats,settranslations
+local direct,subdirect,writer,pushtarget,poptarget
if tex and (tex.jobname or tex.formatname) then
-
- local valueiskey = { __index = function(t,k) t[k] = k return k end } -- will be helper
-
- local target = "term and log"
-
- logs.flush = io.flush
-
- local formats = { } setmetatable(formats, valueiskey)
- local translations = { } setmetatable(translations,valueiskey)
-
- writer = function(...)
- write_nl(target,...)
- end
-
- newline = function()
- write_nl(target,"\n")
- end
-
- report = function(a,b,c,...)
- if c then
- write_nl(target,format("%-15s > %s\n",translations[a],format(formats[b],c,...)))
- elseif b then
- write_nl(target,format("%-15s > %s\n",translations[a],formats[b]))
- elseif a then
- write_nl(target,format("%-15s >\n", translations[a]))
- else
- write_nl(target,"\n")
- end
- end
-
- direct = function(a,b,c,...)
- if c then
- return format("%-15s > %s",translations[a],format(formats[b],c,...))
- elseif b then
- return format("%-15s > %s",translations[a],formats[b])
- elseif a then
- return format("%-15s >", translations[a])
- else
- return ""
- end
- end
-
- subreport = function(a,s,b,c,...)
- if c then
- write_nl(target,format("%-15s > %s > %s\n",translations[a],translations[s],format(formats[b],c,...)))
- elseif b then
- write_nl(target,format("%-15s > %s > %s\n",translations[a],translations[s],formats[b]))
- elseif a then
- write_nl(target,format("%-15s > %s >\n", translations[a],translations[s]))
- else
- write_nl(target,"\n")
- end
- end
-
- subdirect = function(a,s,b,c,...)
- if c then
- return format("%-15s > %s > %s",translations[a],translations[s],format(formats[b],c,...))
- elseif b then
- return format("%-15s > %s > %s",translations[a],translations[s],formats[b])
- elseif a then
- return format("%-15s > %s >", translations[a],translations[s])
- else
- return ""
- end
- end
-
- status = function(a,b,c,...)
- if c then
- write_nl(target,format("%-15s : %s\n",translations[a],format(formats[b],c,...)))
- elseif b then
- write_nl(target,format("%-15s : %s\n",translations[a],formats[b]))
- elseif a then
- write_nl(target,format("%-15s :\n", translations[a]))
- else
- write_nl(target,"\n")
- end
- end
-
- local targets = {
- logfile = "log",
- log = "log",
- file = "log",
- console = "term",
- terminal = "term",
- both = "term and log",
- }
-
- settarget = function(whereto)
- target = targets[whereto or "both"] or targets.both
- if target == "term" or target == "term and log" then
- logs.flush = io.flush
- else
- logs.flush = ignore
- end
- end
-
- local stack = { }
-
- pushtarget = function(newtarget)
- insert(stack,target)
- settarget(newtarget)
- end
-
- poptarget = function()
- if #stack > 0 then
- settarget(remove(stack))
- end
- end
-
- setformats = function(f)
- formats = f
- end
-
- settranslations = function(t)
- translations = t
- end
-
+ local valueiskey={ __index=function(t,k) t[k]=k return k end }
+ local target="term and log"
+ logs.flush=io.flush
+ local formats={} setmetatable(formats,valueiskey)
+ local translations={} setmetatable(translations,valueiskey)
+ writer=function(...)
+ write_nl(target,...)
+ end
+ newline=function()
+ write_nl(target,"\n")
+ end
+ local f_one=formatters["%-15s > %s\n"]
+ local f_two=formatters["%-15s >\n"]
+ report=function(a,b,c,...)
+ if c then
+ write_nl(target,f_one(translations[a],formatters[formats[b]](c,...)))
+ elseif b then
+ write_nl(target,f_one(translations[a],formats[b]))
+ elseif a then
+ write_nl(target,f_two(translations[a]))
+ else
+ write_nl(target,"\n")
+ end
+ end
+ local f_one=formatters["%-15s > %s"]
+ local f_two=formatters["%-15s >"]
+ direct=function(a,b,c,...)
+ if c then
+ return f_one(translations[a],formatters[formats[b]](c,...))
+ elseif b then
+ return f_one(translations[a],formats[b])
+ elseif a then
+ return f_two(translations[a])
+ else
+ return ""
+ end
+ end
+ local f_one=formatters["%-15s > %s > %s\n"]
+ local f_two=formatters["%-15s > %s >\n"]
+ subreport=function(a,s,b,c,...)
+ if c then
+ write_nl(target,f_one(translations[a],translations[s],formatters[formats[b]](c,...)))
+ elseif b then
+ write_nl(target,f_one(translations[a],translations[s],formats[b]))
+ elseif a then
+ write_nl(target,f_two(translations[a],translations[s]))
+ else
+ write_nl(target,"\n")
+ end
+ end
+ local f_one=formatters["%-15s > %s > %s"]
+ local f_two=formatters["%-15s > %s >"]
+ subdirect=function(a,s,b,c,...)
+ if c then
+ return f_one(translations[a],translations[s],formatters[formats[b]](c,...))
+ elseif b then
+ return f_one(translations[a],translations[s],formats[b])
+ elseif a then
+ return f_two(translations[a],translations[s])
+ else
+ return ""
+ end
+ end
+ local f_one=formatters["%-15s : %s\n"]
+ local f_two=formatters["%-15s :\n"]
+ status=function(a,b,c,...)
+ if c then
+ write_nl(target,f_one(translations[a],formatters[formats[b]](c,...)))
+ elseif b then
+ write_nl(target,f_one(translations[a],formats[b]))
+ elseif a then
+ write_nl(target,f_two(translations[a]))
+ else
+ write_nl(target,"\n")
+ end
+ end
+ local targets={
+ logfile="log",
+ log="log",
+ file="log",
+ console="term",
+ terminal="term",
+ both="term and log",
+ }
+ settarget=function(whereto)
+ target=targets[whereto or "both"] or targets.both
+ if target=="term" or target=="term and log" then
+ logs.flush=io.flush
+ else
+ logs.flush=ignore
+ end
+ end
+ local stack={}
+ pushtarget=function(newtarget)
+ insert(stack,target)
+ settarget(newtarget)
+ end
+ poptarget=function()
+ if #stack>0 then
+ settarget(remove(stack))
+ end
+ end
+ setformats=function(f)
+ formats=f
+ end
+ settranslations=function(t)
+ translations=t
+ end
else
-
- logs.flush = ignore
-
- writer = write_nl
-
- newline = function()
- write_nl("\n")
- end
-
- report = function(a,b,c,...)
- if c then
- write_nl(format("%-15s | %s",a,format(b,c,...)))
- elseif b then
- write_nl(format("%-15s | %s",a,b))
- elseif a then
- write_nl(format("%-15s |", a))
- else
- write_nl("")
- end
- end
-
- subreport = function(a,sub,b,c,...)
- if c then
- write_nl(format("%-15s | %s | %s",a,sub,format(b,c,...)))
- elseif b then
- write_nl(format("%-15s | %s | %s",a,sub,b))
- elseif a then
- write_nl(format("%-15s | %s |", a,sub))
- else
- write_nl("")
- end
- end
-
- status = function(a,b,c,...) -- not to be used in lua anyway
- if c then
- write_nl(format("%-15s : %s\n",a,format(b,c,...)))
- elseif b then
- write_nl(format("%-15s : %s\n",a,b)) -- b can have %'s
- elseif a then
- write_nl(format("%-15s :\n", a))
- else
- write_nl("\n")
- end
- end
-
- direct = ignore
- subdirect = ignore
-
- settarget = ignore
- pushtarget = ignore
- poptarget = ignore
- setformats = ignore
- settranslations = ignore
-
-end
-
-logs.report = report
-logs.subreport = subreport
-logs.status = status
-logs.settarget = settarget
-logs.pushtarget = pushtarget
-logs.poptarget = poptarget
-logs.setformats = setformats
-logs.settranslations = settranslations
-
-logs.direct = direct
-logs.subdirect = subdirect
-logs.writer = writer
-logs.newline = newline
-
--- installer
-
--- todo: renew (un) locks when a new one is added and wildcard
-
-local data, states = { }, nil
-
+ logs.flush=ignore
+ writer=write_nl
+ newline=function()
+ write_nl("\n")
+ end
+ local f_one=formatters["%-15s | %s"]
+ local f_two=formatters["%-15s |"]
+ report=function(a,b,c,...)
+ if c then
+ write_nl(f_one(a,formatters[b](c,...)))
+ elseif b then
+ write_nl(f_one(a,b))
+ elseif a then
+ write_nl(f_two(a))
+ else
+ write_nl("")
+ end
+ end
+ local f_one=formatters["%-15s | %s | %s"]
+ local f_two=formatters["%-15s | %s |"]
+ subreport=function(a,sub,b,c,...)
+ if c then
+ write_nl(f_one(a,sub,formatters[b](c,...)))
+ elseif b then
+ write_nl(f_one(a,sub,b))
+ elseif a then
+ write_nl(f_two(a,sub))
+ else
+ write_nl("")
+ end
+ end
+ local f_one=formatters["%-15s : %s\n"]
+ local f_two=formatters["%-15s :\n"]
+ status=function(a,b,c,...)
+ if c then
+ write_nl(f_one(a,formatters[b](c,...)))
+ elseif b then
+ write_nl(f_one(a,b))
+ elseif a then
+ write_nl(f_two(a))
+ else
+ write_nl("\n")
+ end
+ end
+ direct=ignore
+ subdirect=ignore
+ settarget=ignore
+ pushtarget=ignore
+ poptarget=ignore
+ setformats=ignore
+ settranslations=ignore
+end
+logs.report=report
+logs.subreport=subreport
+logs.status=status
+logs.settarget=settarget
+logs.pushtarget=pushtarget
+logs.poptarget=poptarget
+logs.setformats=setformats
+logs.settranslations=settranslations
+logs.direct=direct
+logs.subdirect=subdirect
+logs.writer=writer
+logs.newline=newline
+local data,states={},nil
function logs.reporter(category,subcategory)
- local logger = data[category]
- if not logger then
- local state = false
- if states == true then
- state = true
- elseif type(states) == "table" then
- for c, _ in next, states do
- if find(category,c) then
- state = true
- break
- end
- end
+ local logger=data[category]
+ if not logger then
+ local state=false
+ if states==true then
+ state=true
+ elseif type(states)=="table" then
+ for c,_ in next,states do
+ if find(category,c) then
+ state=true
+ break
+ end
+ end
+ end
+ logger={
+ reporters={},
+ state=state,
+ }
+ data[category]=logger
+ end
+ local reporter=logger.reporters[subcategory or "default"]
+ if not reporter then
+ if subcategory then
+ reporter=function(...)
+ if not logger.state then
+ subreport(category,subcategory,...)
end
- logger = {
- reporters = { },
- state = state,
- }
- data[category] = logger
- end
- local reporter = logger.reporters[subcategory or "default"]
- if not reporter then
- if subcategory then
- reporter = function(...)
- if not logger.state then
- subreport(category,subcategory,...)
- end
- end
- logger.reporters[subcategory] = reporter
- else
- local tag = category
- reporter = function(...)
- if not logger.state then
- report(category,...)
- end
- end
- logger.reporters.default = reporter
+ end
+ logger.reporters[subcategory]=reporter
+ else
+ local tag=category
+ reporter=function(...)
+ if not logger.state then
+ report(category,...)
end
+ end
+ logger.reporters.default=reporter
end
- return reporter
+ end
+ return reporter
end
-
-logs.new = logs.reporter -- for old times sake
-
--- context specicific: this ends up in the macro stream
-
-local ctxreport = logs.writer
-
+logs.new=logs.reporter
+local ctxreport=logs.writer
function logs.setmessenger(m)
- ctxreport = m
+ ctxreport=m
end
-
function logs.messenger(category,subcategory)
- -- we need to avoid catcode mess (todo: fast context)
- if subcategory then
- return function(...)
- ctxreport(subdirect(category,subcategory,...))
- end
- else
- return function(...)
- ctxreport(direct(category,...))
- end
+ if subcategory then
+ return function(...)
+ ctxreport(subdirect(category,subcategory,...))
+ end
+ else
+ return function(...)
+ ctxreport(direct(category,...))
end
+ end
end
-
--- so far
-
local function setblocked(category,value)
- if category == true then
- -- lock all
- category, value = "*", true
- elseif category == false then
- -- unlock all
- category, value = "*", false
- elseif value == nil then
- -- lock selective
- value = true
- end
- if category == "*" then
- states = value
- for k, v in next, data do
- v.state = value
- end
- else
- states = utilities.parsers.settings_to_hash(category)
- for c, _ in next, states do
- if data[c] then
- v.state = value
- else
- c = escapedpattern(c,true)
- for k, v in next, data do
- if find(k,c) then
- v.state = value
- end
- end
- end
- end
- end
+ if category==true then
+ category,value="*",true
+ elseif category==false then
+ category,value="*",false
+ elseif value==nil then
+ value=true
+ end
+ if category=="*" then
+ states=value
+ for k,v in next,data do
+ v.state=value
+ end
+ else
+ states=utilities.parsers.settings_to_hash(category)
+ for c,_ in next,states do
+ if data[c] then
+ v.state=value
+ else
+ c=topattern(c,true,true)
+ for k,v in next,data do
+ if find(k,c) then
+ v.state=value
+ end
+ end
+ end
+ end
+ end
end
-
function logs.disable(category,value)
- setblocked(category,value == nil and true or value)
+ setblocked(category,value==nil and true or value)
end
-
function logs.enable(category)
- setblocked(category,false)
+ setblocked(category,false)
end
-
function logs.categories()
- return table.sortedkeys(data)
+ return table.sortedkeys(data)
end
-
function logs.show()
- local n, c, s, max = 0, 0, 0, 0
- for category, v in table.sortedpairs(data) do
- n = n + 1
- local state = v.state
- local reporters = v.reporters
- local nc = #category
- if nc > c then
- c = nc
- end
- for subcategory, _ in next, reporters do
- local ns = #subcategory
- if ns > c then
- s = ns
- end
- local m = nc + ns
- if m > max then
- max = m
- end
- end
- local subcategories = concat(table.sortedkeys(reporters),", ")
- if state == true then
- state = "disabled"
- elseif state == false then
- state = "enabled"
- else
- state = "unknown"
- end
- -- no new here
- report("logging","category: '%s', subcategories: '%s', state: '%s'",category,subcategories,state)
+ local n,c,s,max=0,0,0,0
+ for category,v in table.sortedpairs(data) do
+ n=n+1
+ local state=v.state
+ local reporters=v.reporters
+ local nc=#category
+ if nc>c then
+ c=nc
+ end
+ for subcategory,_ in next,reporters do
+ local ns=#subcategory
+ if ns>c then
+ s=ns
+ end
+ local m=nc+ns
+ if m>max then
+ max=m
+ end
+ end
+ local subcategories=concat(table.sortedkeys(reporters),", ")
+ if state==true then
+ state="disabled"
+ elseif state==false then
+ state="enabled"
+ else
+ state="unknown"
end
- report("logging","categories: %s, max category: %s, max subcategory: %s, max combined: %s",n,c,s,max)
+ report("logging","category %a, subcategories %a, state %a",category,subcategories,state)
+ end
+ report("logging","categories: %s, max category: %s, max subcategory: %s, max combined: %s",n,c,s,max)
end
-
-directives.register("logs.blocked", function(v)
- setblocked(v,true)
+local delayed_reporters={}
+setmetatableindex(delayed_reporters,function(t,k)
+ local v=logs.reporter(k.name)
+ t[k]=v
+ return v
end)
-
-directives.register("logs.target", function(v)
- settarget(v)
+function utilities.setters.report(setter,...)
+ delayed_reporters[setter](...)
+end
+directives.register("logs.blocked",function(v)
+ setblocked(v,true)
end)
-
--- tex specific loggers (might move elsewhere)
-
-local report_pages = logs.reporter("pages") -- not needed but saves checking when we grep for it
-
-local real, user, sub
-
+directives.register("logs.target",function(v)
+ settarget(v)
+end)
+local report_pages=logs.reporter("pages")
+local real,user,sub
function logs.start_page_number()
- real, user, sub = texcount.realpageno, texcount.userpageno, texcount.subpageno
--- real, user, sub = 0, 0, 0
-end
-
-local timing = false
-local starttime = nil
-local lasttime = nil
-
-trackers.register("pages.timing", function(v) -- only for myself (diagnostics)
- starttime = os.clock()
- timing = true
+ real,user,sub=texcount.realpageno,texcount.userpageno,texcount.subpageno
+end
+local timing=false
+local starttime=nil
+local lasttime=nil
+trackers.register("pages.timing",function(v)
+ starttime=os.clock()
+ timing=true
end)
-
-function logs.stop_page_number() -- the first page can includes the initialization so we omit this in average
- if timing then
- local elapsed, average
- local stoptime = os.clock()
- if not lasttime or real < 2 then
- elapsed = stoptime
- average = stoptime
- starttime = stoptime
- else
- elapsed = stoptime - lasttime
- average = (stoptime - starttime) / (real - 1)
- end
- lasttime = stoptime
- if real > 0 then
- if user > 0 then
- if sub > 0 then
- report_pages("flushing realpage %s, userpage %s, subpage %s, time %0.04f / %0.04f",real,user,sub,elapsed,average)
- else
- report_pages("flushing realpage %s, userpage %s, time %0.04f / %0.04f",real,user,elapsed,average)
- end
- else
- report_pages("flushing realpage %s, time %0.04f / %0.04f",real,elapsed,average)
- end
- else
- report_pages("flushing page, time %0.04f / %0.04f",elapsed,average)
- end
+function logs.stop_page_number()
+ if timing then
+ local elapsed,average
+ local stoptime=os.clock()
+ if not lasttime or real<2 then
+ elapsed=stoptime
+ average=stoptime
+ starttime=stoptime
else
- if real > 0 then
- if user > 0 then
- if sub > 0 then
- report_pages("flushing realpage %s, userpage %s, subpage %s",real,user,sub)
- else
- report_pages("flushing realpage %s, userpage %s",real,user)
- end
- else
- report_pages("flushing realpage %s",real)
- end
- else
- report_pages("flushing page")
- end
+ elapsed=stoptime-lasttime
+ average=(stoptime-starttime)/(real-1)
+ end
+ lasttime=stoptime
+ if real<=0 then
+ report_pages("flushing page, time %0.04f / %0.04f",elapsed,average)
+ elseif user<=0 then
+ report_pages("flushing realpage %s, time %0.04f / %0.04f",real,elapsed,average)
+ elseif sub<=0 then
+ report_pages("flushing realpage %s, userpage %s, time %0.04f / %0.04f",real,user,elapsed,average)
+ else
+ report_pages("flushing realpage %s, userpage %s, subpage %s, time %0.04f / %0.04f",real,user,sub,elapsed,average)
+ end
+ else
+ if real<=0 then
+ report_pages("flushing page")
+ elseif user<=0 then
+ report_pages("flushing realpage %s",real)
+ elseif sub<=0 then
+ report_pages("flushing realpage %s, userpage %s",real,user)
+ else
+ report_pages("flushing realpage %s, userpage %s, subpage %s",real,user,sub)
end
- logs.flush()
+ end
+ logs.flush()
end
-
-logs.report_job_stat = statistics and statistics.showjobstat
-
-local report_files = logs.reporter("files")
-
-local nesting = 0
-local verbose = false
-local hasscheme = url.hasscheme
-
--- we don't have show_open and show_close callbacks yet
-
+local report_files=logs.reporter("files")
+local nesting=0
+local verbose=false
+local hasscheme=url.hasscheme
function logs.show_open(name)
- -- if hasscheme(name) ~= "virtual" then
- -- if verbose then
- -- nesting = nesting + 1
- -- report_files("level %s, opening %s",nesting,name)
- -- else
- -- write(format("(%s",name)) -- tex adds a space
- -- end
- -- end
end
-
function logs.show_close(name)
- -- if hasscheme(name) ~= "virtual" then
- -- if verbose then
- -- report_files("level %s, closing %s",nesting,name)
- -- nesting = nesting - 1
- -- else
- -- write(")") -- tex adds a space
- -- end
- -- end
end
-
function logs.show_load(name)
- -- if hasscheme(name) ~= "virtual" then
- -- if verbose then
- -- report_files("level %s, loading %s",nesting+1,name)
- -- else
- -- write(format("(%s)",name))
- -- end
- -- end
end
-
--- there may be scripts out there using this:
-
-local simple = logs.reporter("comment")
-
-logs.simple = simple
-logs.simpleline = simple
-
--- obsolete
-
-function logs.setprogram () end -- obsolete
-function logs.extendbanner() end -- obsolete
-function logs.reportlines () end -- obsolete
-function logs.reportbanner() end -- obsolete
-function logs.reportline () end -- obsolete
-function logs.simplelines () end -- obsolete
-function logs.help () end -- obsolete
-
--- applications
-
+local simple=logs.reporter("comment")
+logs.simple=simple
+logs.simpleline=simple
+function logs.setprogram () end
+function logs.extendbanner() end
+function logs.reportlines () end
+function logs.reportbanner() end
+function logs.reportline () end
+function logs.simplelines () end
+function logs.help () end
+local Carg,C,lpegmatch=lpeg.Carg,lpeg.C,lpeg.match
+local p_newline=lpeg.patterns.newline
+local linewise=(
+ Carg(1)*C((1-p_newline)^1)/function(t,s) t.report(s) end+Carg(1)*p_newline^2/function(t) t.report() end+p_newline
+)^1
local function reportlines(t,str)
- if str then
- for line in gmatch(str,"(.-)[\n\r]") do
- t.report(line)
- end
- end
+ if str then
+ lpegmatch(linewise,str,1,t)
+ end
end
-
local function reportbanner(t)
- local banner = t.banner
- if banner then
- t.report(banner)
- t.report()
- end
+ local banner=t.banner
+ if banner then
+ t.report(banner)
+ t.report()
+ end
end
-
local function reportversion(t)
- local banner = t.banner
- if banner then
- t.report(banner)
- end
+ local banner=t.banner
+ if banner then
+ t.report(banner)
+ end
end
-
local function reporthelp(t,...)
- local helpinfo = t.helpinfo
- if type(helpinfo) == "string" then
- reportlines(t,helpinfo)
- elseif type(helpinfo) == "table" then
- local tags = { ... }
- for i=1,#tags do
- reportlines(t,t.helpinfo[tags[i]])
- if i < #tags then
- t.report()
- end
- end
+ local helpinfo=t.helpinfo
+ if type(helpinfo)=="string" then
+ reportlines(t,helpinfo)
+ elseif type(helpinfo)=="table" then
+ for i=1,select("#",...) do
+ reportlines(t,t.helpinfo[select(i,...)])
+ if i<n then
+ t.report()
+ end
end
+ end
end
-
local function reportinfo(t)
- t.report()
- reportlines(t,moreinfo)
-end
-
+ t.report()
+ reportlines(t,t.moreinfo)
+end
+local function reportexport(t,method)
+ report(t.helpinfo)
+end
+local reporters={
+ lines=reportlines,
+ banner=reportbanner,
+ version=reportversion,
+ help=reporthelp,
+ info=reportinfo,
+ export=reportexport,
+}
+local exporters={
+}
+logs.reporters=reporters
+logs.exporters=exporters
function logs.application(t)
- t.name = t.name or "unknown"
- t.banner = t.banner
- t.report = logs.reporter(t.name)
- t.help = function(...) reportbanner(t) ; reporthelp(t,...) ; reportinfo(t) end
- t.identify = function() reportbanner(t) end
- t.version = function() reportversion(t) end
- return t
+ t.name=t.name or "unknown"
+ t.banner=t.banner
+ t.moreinfo=moreinfo
+ t.report=logs.reporter(t.name)
+ t.help=function(...)
+ reporters.banner(t)
+ reporters.help(t,...)
+ reporters.info(t)
+ end
+ t.export=function(...)
+ reporters.export(t,...)
+ end
+ t.identify=function()
+ reporters.banner(t)
+ end
+ t.version=function()
+ reporters.version(t)
+ end
+ return t
end
-
--- somewhat special
-
--- logging to a file
-
-
function logs.system(whereto,process,jobname,category,...)
- local message = format("%s %s => %s => %s => %s\r",os.date("%d/%m/%y %H:%m:%S"),process,jobname,category,format(...))
- for i=1,10 do
- local f = io.open(whereto,"a") -- we can consider keepint the file open
- if f then
- f:write(message)
- f:close()
- break
- else
- sleep(0.1)
- end
+ local message=formatters["%s %s => %s => %s => %s\r"](os.date("%d/%m/%y %H:%m:%S"),process,jobname,category,format(...))
+ for i=1,10 do
+ local f=io.open(whereto,"a")
+ if f then
+ f:write(message)
+ f:close()
+ break
+ else
+ sleep(0.1)
end
+ end
end
-
-local report_system = logs.reporter("system","logs")
-
+local report_system=logs.reporter("system","logs")
function logs.obsolete(old,new)
- local o = loadstring("return " .. new)()
- if type(o) == "function" then
- return function(...)
- report_system("function %s is obsolete, use %s",old,new)
- loadstring(old .. "=" .. new .. " return ".. old)()(...)
- end
- elseif type(o) == "table" then
- local t, m = { }, { }
- m.__index = function(t,k)
- report_system("table %s is obsolete, use %s",old,new)
- m.__index, m.__newindex = o, o
- return o[k]
- end
- m.__newindex = function(t,k,v)
- report_system("table %s is obsolete, use %s",old,new)
- m.__index, m.__newindex = o, o
- o[k] = v
- end
- if libraries then
- libraries.obsolete[old] = t -- true
- end
- setmetatable(t,m)
- return t
- end
+ local o=loadstring("return "..new)()
+ if type(o)=="function" then
+ return function(...)
+ report_system("function %a is obsolete, use %a",old,new)
+ loadstring(old.."="..new.." return "..old)()(...)
+ end
+ elseif type(o)=="table" then
+ local t,m={},{}
+ m.__index=function(t,k)
+ report_system("table %a is obsolete, use %a",old,new)
+ m.__index,m.__newindex=o,o
+ return o[k]
+ end
+ m.__newindex=function(t,k,v)
+ report_system("table %a is obsolete, use %a",old,new)
+ m.__index,m.__newindex=o,o
+ o[k]=v
+ end
+ if libraries then
+ libraries.obsolete[old]=t
+ end
+ setmetatable(t,m)
+ return t
+ end
end
-
if utilities then
- utilities.report = report_system
+ utilities.report=report_system
end
-
if tex and tex.error then
- function logs.texerrormessage(...) -- for the moment we put this function here
- tex.error(format(...), { })
- end
+ function logs.texerrormessage(...)
+ tex.error(format(...),{})
+ end
else
- function logs.texerrormessage(...)
- print(format(...))
- end
+ function logs.texerrormessage(...)
+ print(format(...))
+ end
end
-
--- do we still need io.flush then?
-
io.stdout:setvbuf('no')
io.stderr:setvbuf('no')
@@ -6366,487 +6685,1330 @@ end -- of closure
do -- create closure to overcome 200 locals limit
-if not modules then modules = { } end modules ['trac-pro'] = {
- version = 1.001,
- comment = "companion to luat-lib.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
+package.loaded["trac-inf"] = package.loaded["trac-inf"] or true
-local getmetatable, setmetatable, rawset, type = getmetatable, setmetatable, rawset, type
+-- original size: 5791, stripped down to: 4540
--- The protection implemented here is probably not that tight but good enough to catch
--- problems due to naive usage.
---
--- There's a more extensive version (trac-xxx.lua) that supports nesting.
---
--- This will change when we have _ENV in lua 5.2+
+if not modules then modules={} end modules ['trac-inf']={
+ version=1.001,
+ comment="companion to trac-inf.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+local type,tonumber=type,tonumber
+local format,lower=string.format,string.lower
+local concat=table.concat
+local clock=os.gettimeofday or os.clock
+statistics=statistics or {}
+local statistics=statistics
+statistics.enable=true
+statistics.threshold=0.01
+local statusinfo,n,registered,timers={},0,{},{}
+table.setmetatableindex(timers,function(t,k)
+ local v={ timing=0,loadtime=0 }
+ t[k]=v
+ return v
+end)
+local function hastiming(instance)
+ return instance and timers[instance]
+end
+local function resettiming(instance)
+ timers[instance or "notimer"]={ timing=0,loadtime=0 }
+end
+local function starttiming(instance)
+ local timer=timers[instance or "notimer"]
+ local it=timer.timing or 0
+ if it==0 then
+ timer.starttime=clock()
+ if not timer.loadtime then
+ timer.loadtime=0
+ end
+ end
+ timer.timing=it+1
+end
+local function stoptiming(instance,report)
+ local timer=timers[instance or "notimer"]
+ local it=timer.timing
+ if it>1 then
+ timer.timing=it-1
+ else
+ local starttime=timer.starttime
+ if starttime then
+ local stoptime=clock()
+ local loadtime=stoptime-starttime
+ timer.stoptime=stoptime
+ timer.loadtime=timer.loadtime+loadtime
+ if report then
+ statistics.report("load time %0.3f",loadtime)
+ end
+ timer.timing=0
+ return loadtime
+ end
+ end
+ return 0
+end
+local function elapsed(instance)
+ if type(instance)=="number" then
+ return instance or 0
+ else
+ local timer=timers[instance or "notimer"]
+ return timer and timer.loadtime or 0
+ end
+end
+local function elapsedtime(instance)
+ return format("%0.3f",elapsed(instance))
+end
+local function elapsedindeed(instance)
+ return elapsed(instance)>statistics.threshold
+end
+local function elapsedseconds(instance,rest)
+ if elapsedindeed(instance) then
+ return format("%0.3f seconds %s",elapsed(instance),rest or "")
+ end
+end
+statistics.hastiming=hastiming
+statistics.resettiming=resettiming
+statistics.starttiming=starttiming
+statistics.stoptiming=stoptiming
+statistics.elapsed=elapsed
+statistics.elapsedtime=elapsedtime
+statistics.elapsedindeed=elapsedindeed
+statistics.elapsedseconds=elapsedseconds
+function statistics.register(tag,fnc)
+ if statistics.enable and type(fnc)=="function" then
+ local rt=registered[tag] or (#statusinfo+1)
+ statusinfo[rt]={ tag,fnc }
+ registered[tag]=rt
+ if #tag>n then n=#tag end
+ end
+end
+local report=logs.reporter("mkiv lua stats")
+function statistics.show()
+ if statistics.enable then
+ local register=statistics.register
+ register("luatex banner",function()
+ return lower(status.banner)
+ end)
+ register("control sequences",function()
+ return format("%s of %s + %s",status.cs_count,status.hash_size,status.hash_extra)
+ end)
+ register("callbacks",function()
+ local total,indirect=status.callbacks or 0,status.indirect_callbacks or 0
+ return format("%s direct, %s indirect, %s total",total-indirect,indirect,total)
+ end)
+ if jit then
+ local status={ jit.status() }
+ if status[1] then
+ register("luajit status",function()
+ return concat(status," ",2)
+ end)
+ end
+ end
+ register("current memory usage",statistics.memused)
+ register("runtime",statistics.runtime)
+ logs.newline()
+ for i=1,#statusinfo do
+ local s=statusinfo[i]
+ local r=s[2]()
+ if r then
+ report("%s: %s",s[1],r)
+ end
+ end
+ statistics.enable=false
+ end
+end
+function statistics.memused()
+ local round=math.round or math.floor
+ return format("%s MB (ctx: %s MB)",round(collectgarbage("count")/1000),round(status.luastate_bytes/1000000))
+end
+starttiming(statistics)
+function statistics.formatruntime(runtime)
+ return format("%s seconds",runtime)
+end
+function statistics.runtime()
+ stoptiming(statistics)
+ return statistics.formatruntime(elapsedtime(statistics))
+end
+local report=logs.reporter("system")
+function statistics.timed(action)
+ starttiming("run")
+ action()
+ stoptiming("run")
+ report("total runtime: %s",elapsedtime("run"))
+end
+commands=commands or {}
+function commands.resettimer(name)
+ resettiming(name or "whatever")
+ starttiming(name or "whatever")
+end
+function commands.elapsedtime(name)
+ stoptiming(name or "whatever")
+ context(elapsedtime(name or "whatever"))
+end
-local trace_namespaces = false trackers.register("system.namespaces", function(v) trace_namespaces = v end)
-local report_system = logs.reporter("system","protection")
+end -- of closure
-namespaces = namespaces or { }
-local namespaces = namespaces
+do -- create closure to overcome 200 locals limit
+
+package.loaded["trac-pro"] = package.loaded["trac-pro"] or true
-local registered = { }
+-- original size: 5773, stripped down to: 3453
+if not modules then modules={} end modules ['trac-pro']={
+ version=1.001,
+ comment="companion to luat-lib.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+local getmetatable,setmetatable,rawset,type=getmetatable,setmetatable,rawset,type
+local trace_namespaces=false trackers.register("system.namespaces",function(v) trace_namespaces=v end)
+local report_system=logs.reporter("system","protection")
+namespaces=namespaces or {}
+local namespaces=namespaces
+local registered={}
local function report_index(k,name)
- if trace_namespaces then
- report_system("reference to '%s' in protected namespace '%s', %s",k,name,debug.traceback())
- else
- report_system("reference to '%s' in protected namespace '%s'",k,name)
- end
+ if trace_namespaces then
+ report_system("reference to %a in protected namespace %a: %s",k,name,debug.traceback())
+ else
+ report_system("reference to %a in protected namespace %a",k,name)
+ end
end
-
local function report_newindex(k,name)
- if trace_namespaces then
- report_system("assignment to '%s' in protected namespace '%s', %s",k,name,debug.traceback())
- else
- report_system("assignment to '%s' in protected namespace '%s'",k,name)
- end
+ if trace_namespaces then
+ report_system("assignment to %a in protected namespace %a: %s",k,name,debug.traceback())
+ else
+ report_system("assignment to %a in protected namespace %a",k,name)
+ end
end
-
local function register(name)
- local data = name == "global" and _G or _G[name]
- if not data then
- return -- error
- end
- registered[name] = data
- local m = getmetatable(data)
- if not m then
- m = { }
- setmetatable(data,m)
- end
- local index, newindex = { }, { }
- m.__saved__index = m.__index
- m.__no__index = function(t,k)
- if not index[k] then
- index[k] = true
- report_index(k,name)
- end
- return nil
- end
- m.__saved__newindex = m.__newindex
- m.__no__newindex = function(t,k,v)
- if not newindex[k] then
- newindex[k] = true
- report_newindex(k,name)
- end
- rawset(t,k,v)
+ local data=name=="global" and _G or _G[name]
+ if not data then
+ return
+ end
+ registered[name]=data
+ local m=getmetatable(data)
+ if not m then
+ m={}
+ setmetatable(data,m)
+ end
+ local index,newindex={},{}
+ m.__saved__index=m.__index
+ m.__no__index=function(t,k)
+ if not index[k] then
+ index[k]=true
+ report_index(k,name)
end
- m.__protection__depth = 0
-end
-
-local function private(name) -- maybe save name
- local data = registered[name]
+ return nil
+ end
+ m.__saved__newindex=m.__newindex
+ m.__no__newindex=function(t,k,v)
+ if not newindex[k] then
+ newindex[k]=true
+ report_newindex(k,name)
+ end
+ rawset(t,k,v)
+ end
+ m.__protection__depth=0
+end
+local function private(name)
+ local data=registered[name]
+ if not data then
+ data=_G[name]
if not data then
- data = _G[name]
- if not data then
- data = { }
- _G[name] = data
- end
- register(name)
+ data={}
+ _G[name]=data
end
- return data
+ register(name)
+ end
+ return data
end
-
local function protect(name)
- local data = registered[name]
- if not data then
- return
- end
- local m = getmetatable(data)
- local pd = m.__protection__depth
- if pd > 0 then
- m.__protection__depth = pd + 1
- else
- m.__save_d_index, m.__saved__newindex = m.__index, m.__newindex
- m.__index, m.__newindex = m.__no__index, m.__no__newindex
- m.__protection__depth = 1
- end
+ local data=registered[name]
+ if not data then
+ return
+ end
+ local m=getmetatable(data)
+ local pd=m.__protection__depth
+ if pd>0 then
+ m.__protection__depth=pd+1
+ else
+ m.__save_d_index,m.__saved__newindex=m.__index,m.__newindex
+ m.__index,m.__newindex=m.__no__index,m.__no__newindex
+ m.__protection__depth=1
+ end
end
-
local function unprotect(name)
- local data = registered[name]
- if not data then
- return
- end
- local m = getmetatable(data)
- local pd = m.__protection__depth
- if pd > 1 then
- m.__protection__depth = pd - 1
- else
- m.__index, m.__newindex = m.__saved__index, m.__saved__newindex
- m.__protection__depth = 0
- end
+ local data=registered[name]
+ if not data then
+ return
+ end
+ local m=getmetatable(data)
+ local pd=m.__protection__depth
+ if pd>1 then
+ m.__protection__depth=pd-1
+ else
+ m.__index,m.__newindex=m.__saved__index,m.__saved__newindex
+ m.__protection__depth=0
+ end
end
-
local function protectall()
- for name, _ in next, registered do
- if name ~= "global" then
- protect(name)
- end
+ for name,_ in next,registered do
+ if name~="global" then
+ protect(name)
end
+ end
end
-
local function unprotectall()
- for name, _ in next, registered do
- if name ~= "global" then
- unprotect(name)
- end
- end
-end
+ for name,_ in next,registered do
+ if name~="global" then
+ unprotect(name)
+ end
+ end
+end
+namespaces.register=register
+namespaces.private=private
+namespaces.protect=protect
+namespaces.unprotect=unprotect
+namespaces.protectall=protectall
+namespaces.unprotectall=unprotectall
+namespaces.private("namespaces") registered={} register("global")
+directives.register("system.protect",function(v)
+ if v then
+ protectall()
+ else
+ unprotectall()
+ end
+end)
+directives.register("system.checkglobals",function(v)
+ if v then
+ report_system("enabling global namespace guard")
+ protect("global")
+ else
+ report_system("disabling global namespace guard")
+ unprotect("global")
+ end
+end)
-namespaces.register = register -- register when defined
-namespaces.private = private -- allocate and register if needed
-namespaces.protect = protect
-namespaces.unprotect = unprotect
-namespaces.protectall = protectall
-namespaces.unprotectall = unprotectall
-namespaces.private("namespaces") registered = { } register("global") -- unreachable
+end -- of closure
-directives.register("system.protect", function(v)
- if v then
- protectall()
+do -- create closure to overcome 200 locals limit
+
+package.loaded["util-lua"] = package.loaded["util-lua"] or true
+
+-- original size: 12575, stripped down to: 8700
+
+if not modules then modules={} end modules ['util-lua']={
+ version=1.001,
+ comment="companion to luat-lib.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ comment="the strip code is written by Peter Cawley",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+local rep,sub,byte,dump,format=string.rep,string.sub,string.byte,string.dump,string.format
+local load,loadfile,type=load,loadfile,type
+utilities=utilities or {}
+utilities.lua=utilities.lua or {}
+local luautilities=utilities.lua
+local report_lua=logs.reporter("system","lua")
+local tracestripping=false
+local forcestupidcompile=true
+luautilities.stripcode=true
+luautilities.alwaysstripcode=false
+luautilities.nofstrippedchunks=0
+luautilities.nofstrippedbytes=0
+local strippedchunks={}
+luautilities.strippedchunks=strippedchunks
+luautilities.suffixes={
+ tma="tma",
+ tmc=jit and "tmb" or "tmc",
+ lua="lua",
+ luc=jit and "lub" or "luc",
+ lui="lui",
+ luv="luv",
+ luj="luj",
+ tua="tua",
+ tuc="tuc",
+}
+if jit or status.luatex_version>=74 then
+ local function register(name)
+ if tracestripping then
+ report_lua("stripped bytecode from %a",name or "unknown")
+ end
+ strippedchunks[#strippedchunks+1]=name
+ luautilities.nofstrippedchunks=luautilities.nofstrippedchunks+1
+ end
+ local function stupidcompile(luafile,lucfile,strip)
+ local code=io.loaddata(luafile)
+ if code and code~="" then
+ code=load(code)
+ if code then
+ code=dump(code,strip and luautilities.stripcode or luautilities.alwaysstripcode)
+ if code and code~="" then
+ register(name)
+ io.savedata(lucfile,code)
+ return true,0
+ end
+ else
+ report_lua("fatal error %a in file %a",1,luafile)
+ end
+ else
+ report_lua("fatal error %a in file %a",2,luafile)
+ end
+ return false,0
+ end
+ function luautilities.loadedluacode(fullname,forcestrip,name)
+ name=name or fullname
+ local code=environment.loadpreprocessedfile and environment.loadpreprocessedfile(fullname) or loadfile(fullname)
+ if code then
+ code()
+ end
+ if forcestrip and luautilities.stripcode then
+ if type(forcestrip)=="function" then
+ forcestrip=forcestrip(fullname)
+ end
+ if forcestrip or luautilities.alwaysstripcode then
+ register(name)
+ return load(dump(code,true)),0
+ else
+ return code,0
+ end
+ elseif luautilities.alwaysstripcode then
+ register(name)
+ return load(dump(code,true)),0
+ else
+ return code,0
+ end
+ end
+ function luautilities.strippedloadstring(code,forcestrip,name)
+ if forcestrip and luautilities.stripcode or luautilities.alwaysstripcode then
+ code=load(code)
+ if not code then
+ report_lua("fatal error %a in file %a",3,name)
+ end
+ register(name)
+ code=dump(code,true)
+ end
+ return load(code),0
+ end
+ function luautilities.compile(luafile,lucfile,cleanup,strip,fallback)
+ report_lua("compiling %a into %a",luafile,lucfile)
+ os.remove(lucfile)
+ local done=stupidcompile(luafile,lucfile,strip~=false)
+ if done then
+ report_lua("dumping %a into %a stripped",luafile,lucfile)
+ if cleanup==true and lfs.isfile(lucfile) and lfs.isfile(luafile) then
+ report_lua("removing %a",luafile)
+ os.remove(luafile)
+ end
+ end
+ return done
+ end
+ function luautilities.loadstripped(...)
+ local l=load(...)
+ if l then
+ return load(dump(l,true))
+ end
+ end
+else
+ local function register(name,before,after)
+ local delta=before-after
+ if tracestripping then
+ report_lua("bytecodes stripped from %a, # before %s, # after %s, delta %s",name,before,after,delta)
+ end
+ strippedchunks[#strippedchunks+1]=name
+ luautilities.nofstrippedchunks=luautilities.nofstrippedchunks+1
+ luautilities.nofstrippedbytes=luautilities.nofstrippedbytes+delta
+ return delta
+ end
+ local strip_code_pc
+ if _MAJORVERSION==5 and _MINORVERSION==1 then
+ strip_code_pc=function(dump,name)
+ local before=#dump
+ local version,format,endian,int,size,ins,num=byte(dump,5,11)
+ local subint
+ if endian==1 then
+ subint=function(dump,i,l)
+ local val=0
+ for n=l,1,-1 do
+ val=val*256+byte(dump,i+n-1)
+ end
+ return val,i+l
+ end
+ else
+ subint=function(dump,i,l)
+ local val=0
+ for n=1,l,1 do
+ val=val*256+byte(dump,i+n-1)
+ end
+ return val,i+l
+ end
+ end
+ local strip_function
+ strip_function=function(dump)
+ local count,offset=subint(dump,1,size)
+ local stripped,dirty=rep("\0",size),offset+count
+ offset=offset+count+int*2+4
+ offset=offset+int+subint(dump,offset,int)*ins
+ count,offset=subint(dump,offset,int)
+ for n=1,count do
+ local t
+ t,offset=subint(dump,offset,1)
+ if t==1 then
+ offset=offset+1
+ elseif t==4 then
+ offset=offset+size+subint(dump,offset,size)
+ elseif t==3 then
+ offset=offset+num
+ end
+ end
+ count,offset=subint(dump,offset,int)
+ stripped=stripped..sub(dump,dirty,offset-1)
+ for n=1,count do
+ local proto,off=strip_function(sub(dump,offset,-1))
+ stripped,offset=stripped..proto,offset+off-1
+ end
+ offset=offset+subint(dump,offset,int)*int+int
+ count,offset=subint(dump,offset,int)
+ for n=1,count do
+ offset=offset+subint(dump,offset,size)+size+int*2
+ end
+ count,offset=subint(dump,offset,int)
+ for n=1,count do
+ offset=offset+subint(dump,offset,size)+size
+ end
+ stripped=stripped..rep("\0",int*3)
+ return stripped,offset
+ end
+ dump=sub(dump,1,12)..strip_function(sub(dump,13,-1))
+ local after=#dump
+ local delta=register(name,before,after)
+ return dump,delta
+ end
+ else
+ strip_code_pc=function(dump,name)
+ return dump,0
+ end
+ end
+ function luautilities.loadedluacode(fullname,forcestrip,name)
+ local code=environment.loadpreprocessedfile and environment.preprocessedloadfile(fullname) or loadfile(fullname)
+ if code then
+ code()
+ end
+ if forcestrip and luautilities.stripcode then
+ if type(forcestrip)=="function" then
+ forcestrip=forcestrip(fullname)
+ end
+ if forcestrip then
+ local code,n=strip_code_pc(dump(code),name)
+ return load(code),n
+ elseif luautilities.alwaysstripcode then
+ return load(strip_code_pc(dump(code),name))
+ else
+ return code,0
+ end
+ elseif luautilities.alwaysstripcode then
+ return load(strip_code_pc(dump(code),name))
else
- unprotectall()
+ return code,0
+ end
+ end
+ function luautilities.strippedloadstring(code,forcestrip,name)
+ local n=0
+ if (forcestrip and luautilities.stripcode) or luautilities.alwaysstripcode then
+ code=load(code)
+ if not code then
+ report_lua("fatal error in file %a",name)
+ end
+ code,n=strip_code_pc(dump(code),name)
+ end
+ return load(code),n
+ end
+ local function stupidcompile(luafile,lucfile,strip)
+ local code=io.loaddata(luafile)
+ local n=0
+ if code and code~="" then
+ code=load(code)
+ if not code then
+ report_lua("fatal error in file %a",luafile)
+ end
+ code=dump(code)
+ if strip then
+ code,n=strip_code_pc(code,luautilities.stripcode or luautilities.alwaysstripcode,luafile)
+ end
+ if code and code~="" then
+ io.savedata(lucfile,code)
+ end
end
-end)
-
-directives.register("system.checkglobals", function(v)
- if v then
- report_system("enabling global namespace guard")
- protect("global")
+ return n
+ end
+ local luac_normal="texluac -o %q %q"
+ local luac_strip="texluac -s -o %q %q"
+ function luautilities.compile(luafile,lucfile,cleanup,strip,fallback)
+ report_lua("compiling %a into %a",luafile,lucfile)
+ os.remove(lucfile)
+ local done=false
+ if strip~=false then
+ strip=true
+ end
+ if forcestupidcompile then
+ fallback=true
+ elseif strip then
+ done=os.spawn(format(luac_strip,lucfile,luafile))==0
else
- report_system("disabling global namespace guard")
- unprotect("global")
+ done=os.spawn(format(luac_normal,lucfile,luafile))==0
end
-end)
+ if not done and fallback then
+ local n=stupidcompile(luafile,lucfile,strip)
+ if n>0 then
+ report_lua("%a dumped into %a (%i bytes stripped)",luafile,lucfile,n)
+ else
+ report_lua("%a dumped into %a (unstripped)",luafile,lucfile)
+ end
+ cleanup=false
+ done=true
+ end
+ if done and cleanup==true and lfs.isfile(lucfile) and lfs.isfile(luafile) then
+ report_lua("removing %a",luafile)
+ os.remove(luafile)
+ end
+ return done
+ end
+ luautilities.loadstripped=loadstring
+end
--- dummy section (will go to luat-dum.lua)
+end -- of closure
+do -- create closure to overcome 200 locals limit
+package.loaded["util-deb"] = package.loaded["util-deb"] or true
+-- original size: 3708, stripped down to: 2568
+if not modules then modules={} end modules ['util-deb']={
+ version=1.001,
+ comment="companion to luat-lib.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+local debug=require "debug"
+local getinfo=debug.getinfo
+local type,next,tostring=type,next,tostring
+local format,find=string.format,string.find
+local is_boolean=string.is_boolean
+utilities=utilities or {}
+local debugger=utilities.debugger or {}
+utilities.debugger=debugger
+local counters={}
+local names={}
+local report=logs.reporter("debugger")
+local function hook()
+ local f=getinfo(2)
+ if f then
+ local n="unknown"
+ if f.what=="C" then
+ n=f.name or '<anonymous>'
+ if not names[n] then
+ names[n]=format("%42s",n)
+ end
+ else
+ n=f.name or f.namewhat or f.what
+ if not n or n=="" then
+ n="?"
+ end
+ if not names[n] then
+ names[n]=format("%42s : % 5i : %s",n,f.linedefined or 0,f.short_src or "unknown source")
+ end
+ end
+ counters[n]=(counters[n] or 0)+1
+ end
+end
+function debugger.showstats(printer,threshold)
+ printer=printer or report
+ threshold=threshold or 0
+ local total,grandtotal,functions=0,0,0
+ local dataset={}
+ for name,count in next,counters do
+ dataset[#dataset+1]={ name,count }
+ end
+ table.sort(dataset,function(a,b) return a[2]==b[2] and b[1]>a[1] or a[2]>b[2] end)
+ for i=1,#dataset do
+ local d=dataset[i]
+ local name=d[1]
+ local count=d[2]
+ if count>threshold and not find(name,"for generator") then
+ printer(format("%8i %s\n",count,names[name]))
+ total=total+count
+ end
+ grandtotal=grandtotal+count
+ functions=functions+1
+ end
+ printer("\n")
+ printer(format("functions : % 10i\n",functions))
+ printer(format("total : % 10i\n",total))
+ printer(format("grand total: % 10i\n",grandtotal))
+ printer(format("threshold : % 10i\n",threshold))
+end
+function debugger.savestats(filename,threshold)
+ local f=io.open(filename,'w')
+ if f then
+ debugger.showstats(function(str) f:write(str) end,threshold)
+ f:close()
+ end
+end
+function debugger.enable()
+ debug.sethook(hook,"c")
+end
+function debugger.disable()
+ debug.sethook()
+end
+function traceback()
+ local level=1
+ while true do
+ local info=debug.getinfo(level,"Sl")
+ if not info then
+ break
+ elseif info.what=="C" then
+ print(format("%3i : C function",level))
+ else
+ print(format("%3i : [%s]:%d",level,info.short_src,info.currentline))
+ end
+ level=level+1
+ end
+end
end -- of closure
do -- create closure to overcome 200 locals limit
-if not modules then modules = { } end modules ['luat-env'] = {
- version = 1.001,
- comment = "companion to luat-lib.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
--- A former version provided functionality for non embeded core
--- scripts i.e. runtime library loading. Given the amount of
--- Lua code we use now, this no longer makes sense. Much of this
--- evolved before bytecode arrays were available and so a lot of
--- code has disappeared already.
-
-local trace_locating = false trackers.register("resolvers.locating", function(v) trace_locating = v end)
+package.loaded["util-mrg"] = package.loaded["util-mrg"] or true
-local report_lua = logs.reporter("resolvers","lua")
+-- original size: 7294, stripped down to: 5798
-local allocate, mark = utilities.storage.allocate, utilities.storage.mark
+if not modules then modules={} end modules ['util-mrg']={
+ version=1.001,
+ comment="companion to luat-lib.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+local gsub,format=string.gsub,string.format
+local concat=table.concat
+local type,next=type,next
+local P,R,S,V,Ct,C,Cs,Cc,Cp,Cmt,Cb,Cg=lpeg.P,lpeg.R,lpeg.S,lpeg.V,lpeg.Ct,lpeg.C,lpeg.Cs,lpeg.Cc,lpeg.Cp,lpeg.Cmt,lpeg.Cb,lpeg.Cg
+local lpegmatch,patterns=lpeg.match,lpeg.patterns
+utilities=utilities or {}
+local merger=utilities.merger or {}
+utilities.merger=merger
+merger.strip_comment=true
+local report=logs.reporter("system","merge")
+utilities.report=report
+local m_begin_merge="begin library merge"
+local m_end_merge="end library merge"
+local m_begin_closure="do -- create closure to overcome 200 locals limit"
+local m_end_closure="end -- of closure"
+local m_pattern="%c+".."%-%-%s+"..m_begin_merge.."%c+(.-)%c+".."%-%-%s+"..m_end_merge.."%c+"
+local m_format="\n\n-- "..m_begin_merge.."\n%s\n".."-- "..m_end_merge.."\n\n"
+local m_faked="-- ".."created merged file".."\n\n".."-- "..m_begin_merge.."\n\n".."-- "..m_end_merge.."\n\n"
+local m_report=[[
+-- used libraries : %s
+-- skipped libraries : %s
+-- original bytes : %s
+-- stripped bytes : %s
+]]
+local m_preloaded=[[package.loaded[%q] = package.loaded[%q] or true]]
+local function self_fake()
+ return m_faked
+end
+local function self_nothing()
+ return ""
+end
+local function self_load(name)
+ local data=io.loaddata(name) or ""
+ if data=="" then
+ report("unknown file %a",name)
+ else
+ report("inserting file %a",name)
+ end
+ return data or ""
+end
+local space=patterns.space
+local eol=patterns.newline
+local equals=P("=")^0
+local open=P("[")*Cg(equals,"init")*P("[")*P("\n")^-1
+local close=P("]")*C(equals)*P("]")
+local closeeq=Cmt(close*Cb("init"),function(s,i,a,b) return a==b end)
+local longstring=open*(1-closeeq)^0*close
+local quoted=patterns.quoted
+local emptyline=space^0*eol
+local operator1=P("<=")+P(">=")+P("~=")+P("..")+S("/^<>=*+%%")
+local operator2=S("*+/")
+local operator3=S("-")
+local separator=S(",;")
+local ignore=(P("]")*space^1*P("=")*space^1*P("]"))/"]=["+(P("=")*space^1*P("{"))/"={"+(P("(")*space^1)/"("+(P("{")*(space+eol)^1*P("}"))/"{}"
+local strings=quoted
+local longcmt=(emptyline^0*P("--")*longstring*emptyline^0)/""
+local longstr=longstring
+local comment=emptyline^0*P("--")*P("-")^0*(1-eol)^0*emptyline^1/"\n"
+local pack=((eol+space)^0/"")*operator1*((eol+space)^0/"")+((eol+space)^0/"")*operator2*((space)^0/"")+((eol+space)^1/"")*operator3*((space)^1/"")+((space)^0/"")*separator*((space)^0/"")
+local lines=emptyline^2/"\n"
+local spaces=(space*space)/" "
+local compact=Cs ((
+ ignore+strings+longcmt+longstr+comment+pack+lines+spaces+1
+)^1 )
+local strip=Cs((emptyline^2/"\n"+1)^0)
+local stripreturn=Cs((1-P("return")*space^1*P(1-space-eol)^1*(space+eol)^0*P(-1))^1)
+function merger.compact(data)
+ return lpegmatch(strip,lpegmatch(compact,data))
+end
+local function self_compact(data)
+ local delta=0
+ if merger.strip_comment then
+ local before=#data
+ data=lpegmatch(compact,data)
+ data=lpegmatch(strip,data)
+ local after=#data
+ delta=before-after
+ report("original size %s, compacted to %s, stripped %s",before,after,delta)
+ data=format("-- original size: %s, stripped down to: %s\n\n%s",before,after,data)
+ end
+ return lpegmatch(stripreturn,data) or data,delta
+end
+local function self_save(name,data)
+ if data~="" then
+ io.savedata(name,data)
+ report("saving %s with size %s",name,#data)
+ end
+end
+local function self_swap(data,code)
+ return data~="" and (gsub(data,m_pattern,function() return format(m_format,code) end,1)) or ""
+end
+local function self_libs(libs,list)
+ local result,f,frozen,foundpath={},nil,false,nil
+ result[#result+1]="\n"
+ if type(libs)=='string' then libs={ libs } end
+ if type(list)=='string' then list={ list } end
+ for i=1,#libs do
+ local lib=libs[i]
+ for j=1,#list do
+ local pth=gsub(list[j],"\\","/")
+ report("checking library path %a",pth)
+ local name=pth.."/"..lib
+ if lfs.isfile(name) then
+ foundpath=pth
+ end
+ end
+ if foundpath then break end
+ end
+ if foundpath then
+ report("using library path %a",foundpath)
+ local right,wrong,original,stripped={},{},0,0
+ for i=1,#libs do
+ local lib=libs[i]
+ local fullname=foundpath.."/"..lib
+ if lfs.isfile(fullname) then
+ report("using library %a",fullname)
+ local preloaded=file.nameonly(lib)
+ local data=io.loaddata(fullname,true)
+ original=original+#data
+ local data,delta=self_compact(data)
+ right[#right+1]=lib
+ result[#result+1]=m_begin_closure
+ result[#result+1]=format(m_preloaded,preloaded,preloaded)
+ result[#result+1]=data
+ result[#result+1]=m_end_closure
+ stripped=stripped+delta
+ else
+ report("skipping library %a",fullname)
+ wrong[#wrong+1]=lib
+ end
+ end
+ right=#right>0 and concat(right," ") or "-"
+ wrong=#wrong>0 and concat(wrong," ") or "-"
+ report("used libraries: %a",right)
+ report("skipped libraries: %a",wrong)
+ report("original bytes: %a",original)
+ report("stripped bytes: %a",stripped)
+ result[#result+1]=format(m_report,right,wrong,original,stripped)
+ else
+ report("no valid library path found")
+ end
+ return concat(result,"\n\n")
+end
+function merger.selfcreate(libs,list,target)
+ if target then
+ self_save(target,self_swap(self_fake(),self_libs(libs,list)))
+ end
+end
+function merger.selfmerge(name,libs,list,target)
+ self_save(target or name,self_swap(self_load(name),self_libs(libs,list)))
+end
+function merger.selfclean(name)
+ self_save(name,self_swap(self_load(name),self_nothing()))
+end
-local format, sub, match, gsub, find = string.format, string.sub, string.match, string.gsub, string.find
-local unquoted, quoted = string.unquoted, string.quoted
-local concat = table.concat
--- precautions
+end -- of closure
-os.setlocale(nil,nil) -- useless feature and even dangerous in luatex
+do -- create closure to overcome 200 locals limit
-function os.setlocale()
- -- no way you can mess with it
-end
+package.loaded["util-tpl"] = package.loaded["util-tpl"] or true
--- dirty tricks
+-- original size: 5655, stripped down to: 3242
-if arg and (arg[0] == 'luatex' or arg[0] == 'luatex.exe') and arg[1] == "--luaonly" then
- arg[-1] = arg[0]
- arg[ 0] = arg[2]
- for k=3,#arg do
- arg[k-2] = arg[k]
+if not modules then modules={} end modules ['util-tpl']={
+ version=1.001,
+ comment="companion to luat-lib.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+utilities.templates=utilities.templates or {}
+local templates=utilities.templates
+local trace_template=false trackers.register("templates.trace",function(v) trace_template=v end)
+local report_template=logs.reporter("template")
+local tostring=tostring
+local format,sub=string.format,string.sub
+local P,C,Cs,Carg,lpegmatch=lpeg.P,lpeg.C,lpeg.Cs,lpeg.Carg,lpeg.match
+local replacer
+local function replacekey(k,t,how,recursive)
+ local v=t[k]
+ if not v then
+ if trace_template then
+ report_template("unknown key %a",k)
+ end
+ return ""
+ else
+ v=tostring(v)
+ if trace_template then
+ report_template("setting key %a to value %a",k,v)
end
- arg[#arg] = nil -- last
- arg[#arg] = nil -- pre-last
+ if recursive then
+ return lpegmatch(replacer,v,1,t,how,recursive)
+ else
+ return v
+ end
+ end
+end
+local sqlescape=lpeg.replacer {
+ { "'","''" },
+ { "\\","\\\\" },
+ { "\r\n","\\n" },
+ { "\r","\\n" },
+}
+local sqlquotedescape=lpeg.Cs(lpeg.Cc("'")*sqlescape*lpeg.Cc("'"))
+local escapers={
+ lua=function(s)
+ return sub(format("%q",s),2,-2)
+ end,
+ sql=function(s)
+ return lpegmatch(sqlescape,s)
+ end,
+}
+local quotedescapers={
+ lua=function(s)
+ return format("%q",s)
+ end,
+ sql=function(s)
+ return lpegmatch(sqlquotedescape,s)
+ end,
+}
+lpeg.patterns.sqlescape=sqlescape
+lpeg.patterns.sqlescape=sqlquotedescape
+local luaescaper=escapers.lua
+local quotedluaescaper=quotedescapers.lua
+local function replacekeyunquoted(s,t,how,recurse)
+ local escaper=how and escapers[how] or luaescaper
+ return escaper(replacekey(s,t,how,recurse))
+end
+local function replacekeyquoted(s,t,how,recurse)
+ local escaper=how and quotedescapers[how] or quotedluaescaper
+ return escaper(replacekey(s,t,how,recurse))
+end
+local single=P("%")
+local double=P("%%")
+local lquoted=P("%[")
+local rquoted=P("]%")
+local lquotedq=P("%(")
+local rquotedq=P(")%")
+local escape=double/'%%'
+local nosingle=single/''
+local nodouble=double/''
+local nolquoted=lquoted/''
+local norquoted=rquoted/''
+local nolquotedq=lquotedq/''
+local norquotedq=rquotedq/''
+local key=nosingle*((C((1-nosingle )^1)*Carg(1)*Carg(2)*Carg(3))/replacekey )*nosingle
+local quoted=nolquotedq*((C((1-norquotedq)^1)*Carg(1)*Carg(2)*Carg(3))/replacekeyquoted )*norquotedq
+local unquoted=nolquoted*((C((1-norquoted )^1)*Carg(1)*Carg(2)*Carg(3))/replacekeyunquoted)*norquoted
+local any=P(1)
+ replacer=Cs((unquoted+quoted+escape+key+any)^0)
+local function replace(str,mapping,how,recurse)
+ if mapping and str then
+ return lpegmatch(replacer,str,1,mapping,how or "lua",recurse or false) or str
+ else
+ return str
+ end
+end
+templates.replace=replace
+function templates.load(filename,mapping,how,recurse)
+ local data=io.loaddata(filename) or ""
+ if mapping and next(mapping) then
+ return replace(data,mapping,how,recurse)
+ else
+ return data
+ end
+end
+function templates.resolve(t,mapping,how,recurse)
+ if not mapping then
+ mapping=t
+ end
+ for k,v in next,t do
+ t[k]=replace(v,mapping,how,recurse)
+ end
+ return t
end
--- environment
-environment = environment or { }
-local environment = environment
+end -- of closure
-environment.arguments = allocate()
-environment.files = allocate()
-environment.sortedflags = nil
+do -- create closure to overcome 200 locals limit
-local mt = {
- __index = function(_,k)
- if k == "version" then
- local version = tex.toks and tex.toks.contextversiontoks
- if version and version ~= "" then
- rawset(environment,"version",version)
- return version
- else
- return "unknown"
- end
- elseif k == "jobname" or k == "formatname" then
- local name = tex and tex[k]
- if name or name== "" then
- rawset(environment,k,name)
- return name
- else
- return "unknown"
- end
- elseif k == "outputfilename" then
- local name = environment.jobname
- rawset(environment,k,name)
- return name
- end
- end
-}
+package.loaded["util-env"] = package.loaded["util-env"] or true
-setmetatable(environment,mt)
+-- original size: 7702, stripped down to: 4701
+if not modules then modules={} end modules ['util-env']={
+ version=1.001,
+ comment="companion to luat-lib.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+local allocate,mark=utilities.storage.allocate,utilities.storage.mark
+local format,sub,match,gsub,find=string.format,string.sub,string.match,string.gsub,string.find
+local unquoted,quoted=string.unquoted,string.quoted
+local concat,insert,remove=table.concat,table.insert,table.remove
+environment=environment or {}
+local environment=environment
+os.setlocale(nil,nil)
+function os.setlocale()
+end
+local validengines=allocate {
+ ["luatex"]=true,
+ ["luajittex"]=true,
+}
+local basicengines=allocate {
+ ["luatex"]="luatex",
+ ["texlua"]="luatex",
+ ["texluac"]="luatex",
+ ["luajittex"]="luajittex",
+ ["texluajit"]="luajittex",
+}
+local luaengines=allocate {
+ ["lua"]=true,
+ ["luajit"]=true,
+}
+environment.validengines=validengines
+environment.basicengines=basicengines
+if not arg then
+elseif luaengines[file.removesuffix(arg[-1])] then
+elseif validengines[file.removesuffix(arg[0])] then
+ if arg[1]=="--luaonly" then
+ arg[-1]=arg[0]
+ arg[ 0]=arg[2]
+ for k=3,#arg do
+ arg[k-2]=arg[k]
+ end
+ remove(arg)
+ remove(arg)
+ else
+ end
+ local originalzero=file.basename(arg[0])
+ local specialmapping={ luatools=="base" }
+ if originalzero~="mtxrun" and originalzero~="mtxrun.lua" then
+ arg[0]=specialmapping[originalzero] or originalzero
+ insert(arg,0,"--script")
+ insert(arg,0,"mtxrun")
+ end
+end
+environment.arguments=allocate()
+environment.files=allocate()
+environment.sortedflags=nil
function environment.initializearguments(arg)
- local arguments, files = { }, { }
- environment.arguments, environment.files, environment.sortedflags = arguments, files, nil
- for index=1,#arg do
- local argument = arg[index]
- if index > 0 then
- local flag, value = match(argument,"^%-+(.-)=(.-)$")
- if flag then
- arguments[flag] = unquoted(value or "")
- else
- flag = match(argument,"^%-+(.+)")
- if flag then
- arguments[flag] = true
- else
- files[#files+1] = argument
- end
- end
+ local arguments,files={},{}
+ environment.arguments,environment.files,environment.sortedflags=arguments,files,nil
+ for index=1,#arg do
+ local argument=arg[index]
+ if index>0 then
+ local flag,value=match(argument,"^%-+(.-)=(.-)$")
+ if flag then
+ flag=gsub(flag,"^c:","")
+ arguments[flag]=unquoted(value or "")
+ else
+ flag=match(argument,"^%-+(.+)")
+ if flag then
+ flag=gsub(flag,"^c:","")
+ arguments[flag]=true
+ else
+ files[#files+1]=argument
end
+ end
end
- environment.ownname = environment.ownname or arg[0] or 'unknown.lua'
+ end
+ environment.ownname=file.reslash(environment.ownname or arg[0] or 'unknown.lua')
end
-
function environment.setargument(name,value)
- environment.arguments[name] = value
-end
-
--- todo: defaults, better checks e.g on type (boolean versus string)
---
--- tricky: too many hits when we support partials unless we add
--- a registration of arguments so from now on we have 'partial'
-
-function environment.argument(name,partial)
- local arguments, sortedflags = environment.arguments, environment.sortedflags
- if arguments[name] then
- return arguments[name]
- elseif partial then
- if not sortedflags then
- sortedflags = allocate(table.sortedkeys(arguments))
- for k=1,#sortedflags do
- sortedflags[k] = "^" .. sortedflags[k]
- end
- environment.sortedflags = sortedflags
- end
- -- example of potential clash: ^mode ^modefile
- for k=1,#sortedflags do
- local v = sortedflags[k]
- if find(name,v) then
- return arguments[sub(v,2,#v)]
- end
- end
- end
- return nil
-end
-
-function environment.splitarguments(separator) -- rather special, cut-off before separator
- local done, before, after = false, { }, { }
- local originalarguments = environment.originalarguments
- for k=1,#originalarguments do
- local v = originalarguments[k]
- if not done and v == separator then
- done = true
- elseif done then
- after[#after+1] = v
- else
- before[#before+1] = v
- end
+ environment.arguments[name]=value
+end
+function environment.getargument(name,partial)
+ local arguments,sortedflags=environment.arguments,environment.sortedflags
+ if arguments[name] then
+ return arguments[name]
+ elseif partial then
+ if not sortedflags then
+ sortedflags=allocate(table.sortedkeys(arguments))
+ for k=1,#sortedflags do
+ sortedflags[k]="^"..sortedflags[k]
+ end
+ environment.sortedflags=sortedflags
+ end
+ for k=1,#sortedflags do
+ local v=sortedflags[k]
+ if find(name,v) then
+ return arguments[sub(v,2,#v)]
+ end
+ end
+ end
+ return nil
+end
+environment.argument=environment.getargument
+function environment.splitarguments(separator)
+ local done,before,after=false,{},{}
+ local originalarguments=environment.originalarguments
+ for k=1,#originalarguments do
+ local v=originalarguments[k]
+ if not done and v==separator then
+ done=true
+ elseif done then
+ after[#after+1]=v
+ else
+ before[#before+1]=v
end
- return before, after
+ end
+ return before,after
end
-
function environment.reconstructcommandline(arg,noquote)
- arg = arg or environment.originalarguments
- if noquote and #arg == 1 then
- -- we could just do: return unquoted(resolvers.resolve(arg[i]))
- local a = arg[1]
- a = resolvers.resolve(a)
- a = unquoted(a)
- return a
- elseif #arg > 0 then
- local result = { }
- for i=1,#arg do
- -- we could just do: result[#result+1] = format("%q",unquoted(resolvers.resolve(arg[i])))
- local a = arg[i]
- a = resolvers.resolve(a)
- a = unquoted(a)
- a = gsub(a,'"','\\"') -- tricky
- if find(a," ") then
- result[#result+1] = quoted(a)
- else
- result[#result+1] = a
- end
- end
- return concat(result," ")
- else
- return ""
- end
+ arg=arg or environment.originalarguments
+ if noquote and #arg==1 then
+ local a=arg[1]
+ a=resolvers.resolve(a)
+ a=unquoted(a)
+ return a
+ elseif #arg>0 then
+ local result={}
+ for i=1,#arg do
+ local a=arg[i]
+ a=resolvers.resolve(a)
+ a=unquoted(a)
+ a=gsub(a,'"','\\"')
+ if find(a," ") then
+ result[#result+1]=quoted(a)
+ else
+ result[#result+1]=a
+ end
+ end
+ return concat(result," ")
+ else
+ return ""
+ end
end
-
-
if arg then
-
- -- new, reconstruct quoted snippets (maybe better just remove the " then and add them later)
- local newarg, instring = { }, false
-
- for index=1,#arg do
- local argument = arg[index]
- if find(argument,"^\"") then
- newarg[#newarg+1] = gsub(argument,"^\"","")
- if not find(argument,"\"$") then
- instring = true
- end
- elseif find(argument,"\"$") then
- newarg[#newarg] = newarg[#newarg] .. " " .. gsub(argument,"\"$","")
- instring = false
- elseif instring then
- newarg[#newarg] = newarg[#newarg] .. " " .. argument
- else
- newarg[#newarg+1] = argument
- end
- end
- for i=1,-5,-1 do
- newarg[i] = arg[i]
+ local newarg,instring={},false
+ for index=1,#arg do
+ local argument=arg[index]
+ if find(argument,"^\"") then
+ newarg[#newarg+1]=gsub(argument,"^\"","")
+ if not find(argument,"\"$") then
+ instring=true
+ end
+ elseif find(argument,"\"$") then
+ newarg[#newarg]=newarg[#newarg].." "..gsub(argument,"\"$","")
+ instring=false
+ elseif instring then
+ newarg[#newarg]=newarg[#newarg].." "..argument
+ else
+ newarg[#newarg+1]=argument
end
+ end
+ for i=1,-5,-1 do
+ newarg[i]=arg[i]
+ end
+ environment.initializearguments(newarg)
+ environment.originalarguments=mark(newarg)
+ environment.rawarguments=mark(arg)
+ arg={}
+end
- environment.initializearguments(newarg)
- environment.originalarguments = mark(newarg)
- environment.rawarguments = mark(arg)
+end -- of closure
- arg = { } -- prevent duplicate handling
+do -- create closure to overcome 200 locals limit
-end
+package.loaded["luat-env"] = package.loaded["luat-env"] or true
--- weird place ... depends on a not yet loaded module
+-- original size: 5874, stripped down to: 4184
+ if not modules then modules={} end modules ['luat-env']={
+ version=1.001,
+ comment="companion to luat-lib.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+local rawset,rawget,loadfile,assert=rawset,rawget,loadfile,assert
+local trace_locating=false trackers.register("resolvers.locating",function(v) trace_locating=v end)
+local report_lua=logs.reporter("resolvers","lua")
+local luautilities=utilities.lua
+local luasuffixes=luautilities.suffixes
+environment=environment or {}
+local environment=environment
+local mt={
+ __index=function(_,k)
+ if k=="version" then
+ local version=tex.toks and tex.toks.contextversiontoks
+ if version and version~="" then
+ rawset(environment,"version",version)
+ return version
+ else
+ return "unknown"
+ end
+ elseif k=="kind" then
+ local kind=tex.toks and tex.toks.contextkindtoks
+ if kind and kind~="" then
+ rawset(environment,"kind",kind)
+ return kind
+ else
+ return "unknown"
+ end
+ elseif k=="jobname" or k=="formatname" then
+ local name=tex and tex[k]
+ if name or name=="" then
+ rawset(environment,k,name)
+ return name
+ else
+ return "unknown"
+ end
+ elseif k=="outputfilename" then
+ local name=environment.jobname
+ rawset(environment,k,name)
+ return name
+ end
+ end
+}
+setmetatable(environment,mt)
function environment.texfile(filename)
- return resolvers.findfile(filename,'tex')
+ return resolvers.findfile(filename,'tex')
+end
+function environment.luafile(filename)
+ local resolved=resolvers.findfile(filename,'tex') or ""
+ if resolved~="" then
+ return resolved
+ end
+ resolved=resolvers.findfile(filename,'texmfscripts') or ""
+ if resolved~="" then
+ return resolved
+ end
+ return resolvers.findfile(filename,'luatexlibs') or ""
+end
+local stripindeed=false directives.register("system.compile.strip",function(v) stripindeed=v end)
+local function strippable(filename)
+ if stripindeed then
+ local modu=modules[file.nameonly(filename)]
+ return modu and modu.dataonly
+ else
+ return false
+ end
end
-
-function environment.luafile(filename)
- local resolved = resolvers.findfile(filename,'tex') or ""
- if resolved ~= "" then
- return resolved
+function environment.luafilechunk(filename,silent)
+ filename=file.replacesuffix(filename,"lua")
+ local fullname=environment.luafile(filename)
+ if fullname and fullname~="" then
+ local data=luautilities.loadedluacode(fullname,strippable,filename)
+ if trace_locating then
+ report_lua("loading file %a %s",fullname,not data and "failed" or "succeeded")
+ elseif not silent then
+ texio.write("<",data and "+ " or "- ",fullname,">")
end
- resolved = resolvers.findfile(filename,'texmfscripts') or ""
- if resolved ~= "" then
- return resolved
+ return data
+ else
+ if trace_locating then
+ report_lua("unknown file %a",filename)
end
- return resolvers.findfile(filename,'luatexlibs') or ""
-end
-
-environment.loadedluacode = loadfile -- can be overloaded
-
-function environment.luafilechunk(filename,silent) -- used for loading lua bytecode in the format
- filename = file.replacesuffix(filename, "lua")
- local fullname = environment.luafile(filename)
- if fullname and fullname ~= "" then
- local data = environment.loadedluacode(fullname)
- if trace_locating then
- report_lua("loading file %s%s", fullname, not data and " failed" or "")
- elseif not silent then
- texio.write("<",data and "+ " or "- ",fullname,">")
- end
- return data
- else
+ return nil
+ end
+end
+function environment.loadluafile(filename,version)
+ local lucname,luaname,chunk
+ local basename=file.removesuffix(filename)
+ if basename==filename then
+ luaname=file.addsuffix(basename,luasuffixes.lua)
+ lucname=file.addsuffix(basename,luasuffixes.luc)
+ else
+ luaname=basename
+ lucname=nil
+ end
+ local fullname=(lucname and environment.luafile(lucname)) or ""
+ if fullname~="" then
+ if trace_locating then
+ report_lua("loading %a",fullname)
+ end
+ chunk=loadfile(fullname)
+ end
+ if chunk then
+ assert(chunk)()
+ if version then
+ local v=version
+ if modules and modules[filename] then
+ v=modules[filename].version
+ elseif versions and versions[filename] then
+ v=versions[filename]
+ end
+ if v==version then
+ return true
+ else
if trace_locating then
- report_lua("unknown file %s", filename)
+ report_lua("version mismatch for %a, lua version %a, luc version %a",filename,v,version)
end
- return nil
- end
-end
-
--- the next ones can use the previous ones / combine
-
-function environment.loadluafile(filename, version)
- local lucname, luaname, chunk
- local basename = file.removesuffix(filename)
- if basename == filename then
- lucname, luaname = basename .. ".luc", basename .. ".lua"
+ environment.loadluafile(filename)
+ end
else
- lucname, luaname = nil, basename -- forced suffix
+ return true
end
- -- when not overloaded by explicit suffix we look for a luc file first
- local fullname = (lucname and environment.luafile(lucname)) or ""
- if fullname ~= "" then
- if trace_locating then
- report_lua("loading %s", fullname)
- end
- chunk = loadfile(fullname) -- this way we don't need a file exists check
- end
- if chunk then
- assert(chunk)()
- if version then
- -- we check of the version number of this chunk matches
- local v = version -- can be nil
- if modules and modules[filename] then
- v = modules[filename].version -- new method
- elseif versions and versions[filename] then
- v = versions[filename] -- old method
- end
- if v == version then
- return true
- else
- if trace_locating then
- report_lua("version mismatch for %s: lua=%s, luc=%s", filename, v, version)
- end
- environment.loadluafile(filename)
- end
- else
- return true
- end
+ end
+ fullname=(luaname and environment.luafile(luaname)) or ""
+ if fullname~="" then
+ if trace_locating then
+ report_lua("loading %a",fullname)
end
- fullname = (luaname and environment.luafile(luaname)) or ""
- if fullname ~= "" then
- if trace_locating then
- report_lua("loading %s", fullname)
- end
- chunk = loadfile(fullname) -- this way we don't need a file exists check
- if not chunk then
- if trace_locating then
- report_lua("unknown file %s", filename)
- end
- else
- assert(chunk)()
- return true
- end
+ chunk=loadfile(fullname)
+ if not chunk then
+ if trace_locating then
+ report_lua("unknown file %a",filename)
+ end
+ else
+ assert(chunk)()
+ return true
end
- return false
+ end
+ return false
end
@@ -6854,1370 +8016,981 @@ end -- of closure
do -- create closure to overcome 200 locals limit
-if not modules then modules = { } end modules ['lxml-tab'] = {
- version = 1.001,
- comment = "this module is the basis for the lxml-* ones",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
--- this module needs a cleanup: check latest lpeg, passing args, (sub)grammar, etc etc
--- stripping spaces from e.g. cont-en.xml saves .2 sec runtime so it's not worth the
--- trouble
-
--- todo: when serializing optionally remap named entities to hex (if known in char-ent.lua)
--- maybe when letter -> utf, else name .. then we need an option to the serializer .. a bit
--- of work so we delay this till we cleanup
-
-local trace_entities = false trackers.register("xml.entities", function(v) trace_entities = v end)
-
-local report_xml = logs and logs.reporter("xml","core") or function(...) print(format(...)) end
-
---[[ldx--
-<p>The parser used here is inspired by the variant discussed in the lua book, but
-handles comment and processing instructions, has a different structure, provides
-parent access; a first version used different trickery but was less optimized to we
-went this route. First we had a find based parser, now we have an <l n='lpeg'/> based one.
-The find based parser can be found in l-xml-edu.lua along with other older code.</p>
+package.loaded["lxml-tab"] = package.loaded["lxml-tab"] or true
-<p>Beware, the interface may change. For instance at, ns, tg, dt may get more
-verbose names. Once the code is stable we will also remove some tracing and
-optimize the code.</p>
-
-<p>I might even decide to reimplement the parser using the latest <l n='lpeg'/> trickery
-as the current variant was written when <l n='lpeg'/> showed up and it's easier now to
-build tables in one go.</p>
---ldx]]--
-
-xml = xml or { }
-local xml = xml
-
-
-local utf = unicode.utf8
-local concat, remove, insert = table.concat, table.remove, table.insert
-local type, next, setmetatable, getmetatable, tonumber = type, next, setmetatable, getmetatable, tonumber
-local format, lower, find, match, gsub = string.format, string.lower, string.find, string.match, string.gsub
-local utfchar, utffind, utfgsub = utf.char, utf.find, utf.gsub
-local lpegmatch = lpeg.match
-local P, S, R, C, V, C, Cs = lpeg.P, lpeg.S, lpeg.R, lpeg.C, lpeg.V, lpeg.C, lpeg.Cs
+-- original size: 42495, stripped down to: 26647
---[[ldx--
-<p>First a hack to enable namespace resolving. A namespace is characterized by
-a <l n='url'/>. The following function associates a namespace prefix with a
-pattern. We use <l n='lpeg'/>, which in this case is more than twice as fast as a
-find based solution where we loop over an array of patterns. Less code and
-much cleaner.</p>
---ldx]]--
-
-xml.xmlns = xml.xmlns or { }
-
-local check = P(false)
-local parse = check
-
---[[ldx--
-<p>The next function associates a namespace prefix with an <l n='url'/>. This
-normally happens independent of parsing.</p>
-
-<typing>
-xml.registerns("mml","mathml")
-</typing>
---ldx]]--
-
-function xml.registerns(namespace, pattern) -- pattern can be an lpeg
- check = check + C(P(lower(pattern))) / namespace
- parse = P { P(check) + 1 * V(1) }
+if not modules then modules={} end modules ['lxml-tab']={
+ version=1.001,
+ comment="this module is the basis for the lxml-* ones",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+local trace_entities=false trackers.register("xml.entities",function(v) trace_entities=v end)
+local report_xml=logs and logs.reporter("xml","core") or function(...) print(string.format(...)) end
+xml=xml or {}
+local xml=xml
+local concat,remove,insert=table.concat,table.remove,table.insert
+local type,next,setmetatable,getmetatable,tonumber=type,next,setmetatable,getmetatable,tonumber
+local lower,find,match,gsub=string.lower,string.find,string.match,string.gsub
+local utfchar=utf.char
+local lpegmatch=lpeg.match
+local P,S,R,C,V,C,Cs=lpeg.P,lpeg.S,lpeg.R,lpeg.C,lpeg.V,lpeg.C,lpeg.Cs
+local formatters=string.formatters
+xml.xmlns=xml.xmlns or {}
+local check=P(false)
+local parse=check
+function xml.registerns(namespace,pattern)
+ check=check+C(P(lower(pattern)))/namespace
+ parse=P { P(check)+1*V(1) }
end
-
---[[ldx--
-<p>The next function also registers a namespace, but this time we map a
-given namespace prefix onto a registered one, using the given
-<l n='url'/>. This used for attributes like <t>xmlns:m</t>.</p>
-
-<typing>
-xml.checkns("m","http://www.w3.org/mathml")
-</typing>
---ldx]]--
-
function xml.checkns(namespace,url)
- local ns = lpegmatch(parse,lower(url))
- if ns and namespace ~= ns then
- xml.xmlns[namespace] = ns
- end
+ local ns=lpegmatch(parse,lower(url))
+ if ns and namespace~=ns then
+ xml.xmlns[namespace]=ns
+ end
end
-
---[[ldx--
-<p>Next we provide a way to turn an <l n='url'/> into a registered
-namespace. This used for the <t>xmlns</t> attribute.</p>
-
-<typing>
-resolvedns = xml.resolvens("http://www.w3.org/mathml")
-</typing>
-
-This returns <t>mml</t>.
---ldx]]--
-
function xml.resolvens(url)
- return lpegmatch(parse,lower(url)) or ""
-end
-
---[[ldx--
-<p>A namespace in an element can be remapped onto the registered
-one efficiently by using the <t>xml.xmlns</t> table.</p>
---ldx]]--
-
---[[ldx--
-<p>This version uses <l n='lpeg'/>. We follow the same approach as before, stack and top and
-such. This version is about twice as fast which is mostly due to the fact that
-we don't have to prepare the stream for cdata, doctype etc etc. This variant is
-is dedicated to Luigi Scarso, who challenged me with 40 megabyte <l n='xml'/> files that
-took 12.5 seconds to load (1.5 for file io and the rest for tree building). With
-the <l n='lpeg'/> implementation we got that down to less 7.3 seconds. Loading the 14
-<l n='context'/> interface definition files (2.6 meg) went down from 1.05 seconds to 0.55.</p>
-
-<p>Next comes the parser. The rather messy doctype definition comes in many
-disguises so it is no surprice that later on have to dedicate quite some
-<l n='lpeg'/> code to it.</p>
-
-<typing>
-<!DOCTYPE Something PUBLIC "... ..." "..." [ ... ] >
-<!DOCTYPE Something PUBLIC "... ..." "..." >
-<!DOCTYPE Something SYSTEM "... ..." [ ... ] >
-<!DOCTYPE Something SYSTEM "... ..." >
-<!DOCTYPE Something [ ... ] >
-<!DOCTYPE Something >
-</typing>
-
-<p>The code may look a bit complex but this is mostly due to the fact that we
-resolve namespaces and attach metatables. There is only one public function:</p>
-
-<typing>
-local x = xml.convert(somestring)
-</typing>
-
-<p>An optional second boolean argument tells this function not to create a root
-element.</p>
-
-<p>Valid entities are:</p>
-
-<typing>
-<!ENTITY xxxx SYSTEM "yyyy" NDATA zzzz>
-<!ENTITY xxxx PUBLIC "yyyy" >
-<!ENTITY xxxx "yyyy" >
-</typing>
---ldx]]--
-
--- not just one big nested table capture (lpeg overflow)
-
-local nsremap, resolvens = xml.xmlns, xml.resolvens
-
-local stack = { }
-local top = { }
-local dt = { }
-local at = { }
-local xmlns = { }
-local errorstr = nil
-local entities = { }
-local strip = false
-local cleanup = false
-local utfize = false
-local resolve_predefined = false
-local unify_predefined = false
-
-local dcache = { }
-local hcache = { }
-local acache = { }
-
-local mt = { }
-
+ return lpegmatch(parse,lower(url)) or ""
+end
+local nsremap,resolvens=xml.xmlns,xml.resolvens
+local stack={}
+local top={}
+local dt={}
+local at={}
+local xmlns={}
+local errorstr=nil
+local entities={}
+local strip=false
+local cleanup=false
+local utfize=false
+local resolve_predefined=false
+local unify_predefined=false
+local dcache={}
+local hcache={}
+local acache={}
+local mt={}
local function initialize_mt(root)
- mt = { __index = root } -- will be redefined later
+ mt={ __index=root }
end
-
function xml.setproperty(root,k,v)
- getmetatable(root).__index[k] = v
+ getmetatable(root).__index[k]=v
end
-
function xml.checkerror(top,toclose)
- return "" -- can be set
+ return ""
end
-
local function add_attribute(namespace,tag,value)
- if cleanup and #value > 0 then
- value = cleanup(value) -- new
- end
- if tag == "xmlns" then
- xmlns[#xmlns+1] = resolvens(value)
- at[tag] = value
- elseif namespace == "" then
- at[tag] = value
- elseif namespace == "xmlns" then
- xml.checkns(tag,value)
- at["xmlns:" .. tag] = value
- else
- -- for the moment this way:
- at[namespace .. ":" .. tag] = value
- end
+ if cleanup and #value>0 then
+ value=cleanup(value)
+ end
+ if tag=="xmlns" then
+ xmlns[#xmlns+1]=resolvens(value)
+ at[tag]=value
+ elseif namespace=="" then
+ at[tag]=value
+ elseif namespace=="xmlns" then
+ xml.checkns(tag,value)
+ at["xmlns:"..tag]=value
+ else
+ at[namespace..":"..tag]=value
+ end
+end
+local function add_empty(spacing,namespace,tag)
+ if #spacing>0 then
+ dt[#dt+1]=spacing
+ end
+ local resolved=namespace=="" and xmlns[#xmlns] or nsremap[namespace] or namespace
+ top=stack[#stack]
+ dt=top.dt
+ local t={ ns=namespace or "",rn=resolved,tg=tag,at=at,dt={},__p__=top }
+ dt[#dt+1]=t
+ setmetatable(t,mt)
+ if at.xmlns then
+ remove(xmlns)
+ end
+ at={}
+end
+local function add_begin(spacing,namespace,tag)
+ if #spacing>0 then
+ dt[#dt+1]=spacing
+ end
+ local resolved=namespace=="" and xmlns[#xmlns] or nsremap[namespace] or namespace
+ top={ ns=namespace or "",rn=resolved,tg=tag,at=at,dt={},__p__=stack[#stack] }
+ setmetatable(top,mt)
+ dt=top.dt
+ stack[#stack+1]=top
+ at={}
+end
+local function add_end(spacing,namespace,tag)
+ if #spacing>0 then
+ dt[#dt+1]=spacing
+ end
+ local toclose=remove(stack)
+ top=stack[#stack]
+ if #stack<1 then
+ errorstr=formatters["unable to close %s %s"](tag,xml.checkerror(top,toclose) or "")
+ elseif toclose.tg~=tag then
+ errorstr=formatters["unable to close %s with %s %s"](toclose.tg,tag,xml.checkerror(top,toclose) or "")
+ end
+ dt=top.dt
+ dt[#dt+1]=toclose
+ if toclose.at.xmlns then
+ remove(xmlns)
+ end
end
-
-local function add_empty(spacing, namespace, tag)
- if #spacing > 0 then
- dt[#dt+1] = spacing
- end
- local resolved = (namespace == "" and xmlns[#xmlns]) or nsremap[namespace] or namespace
- top = stack[#stack]
- dt = top.dt
- local t = { ns=namespace or "", rn=resolved, tg=tag, at=at, dt={}, __p__ = top }
- dt[#dt+1] = t
- setmetatable(t, mt)
- if at.xmlns then
- remove(xmlns)
- end
- at = { }
-end
-
-local function add_begin(spacing, namespace, tag)
- if #spacing > 0 then
- dt[#dt+1] = spacing
- end
- local resolved = (namespace == "" and xmlns[#xmlns]) or nsremap[namespace] or namespace
- top = { ns=namespace or "", rn=resolved, tg=tag, at=at, dt={}, __p__ = stack[#stack] }
- setmetatable(top, mt)
- dt = top.dt
- stack[#stack+1] = top
- at = { }
-end
-
-local function add_end(spacing, namespace, tag)
- if #spacing > 0 then
- dt[#dt+1] = spacing
- end
- local toclose = remove(stack)
- top = stack[#stack]
- if #stack < 1 then
- errorstr = format("nothing to close with %s %s", tag, xml.checkerror(top,toclose) or "")
- elseif toclose.tg ~= tag then -- no namespace check
- errorstr = format("unable to close %s with %s %s", toclose.tg, tag, xml.checkerror(top,toclose) or "")
- end
- dt = top.dt
- dt[#dt+1] = toclose
- -- dt[0] = top -- nasty circular reference when serializing table
- if toclose.at.xmlns then
- remove(xmlns)
- end
-end
-
local function add_text(text)
- if cleanup and #text > 0 then
- dt[#dt+1] = cleanup(text)
- else
- dt[#dt+1] = text
- end
-end
-
-local function add_special(what, spacing, text)
- if #spacing > 0 then
- dt[#dt+1] = spacing
- end
- if strip and (what == "@cm@" or what == "@dt@") then
- -- forget it
- else
- dt[#dt+1] = { special=true, ns="", tg=what, dt={ text } }
- end
+ if cleanup and #text>0 then
+ dt[#dt+1]=cleanup(text)
+ else
+ dt[#dt+1]=text
+ end
+end
+local function add_special(what,spacing,text)
+ if #spacing>0 then
+ dt[#dt+1]=spacing
+ end
+ if strip and (what=="@cm@" or what=="@dt@") then
+ else
+ dt[#dt+1]={ special=true,ns="",tg=what,dt={ text } }
+ end
end
-
local function set_message(txt)
- errorstr = "garbage at the end of the file: " .. gsub(txt,"([ \n\r\t]*)","")
+ errorstr="garbage at the end of the file: "..gsub(txt,"([ \n\r\t]*)","")
end
-
-local reported_attribute_errors = { }
-
+local reported_attribute_errors={}
local function attribute_value_error(str)
- if not reported_attribute_errors[str] then
- report_xml("invalid attribute value: %q",str)
- reported_attribute_errors[str] = true
- at._error_ = str
- end
- return str
+ if not reported_attribute_errors[str] then
+ report_xml("invalid attribute value %a",str)
+ reported_attribute_errors[str]=true
+ at._error_=str
+ end
+ return str
end
-
local function attribute_specification_error(str)
- if not reported_attribute_errors[str] then
- report_xml("invalid attribute specification: %q",str)
- reported_attribute_errors[str] = true
- at._error_ = str
- end
- return str
-end
-
-xml.placeholders = {
- unknown_dec_entity = function(str) return (str == "" and "&error;") or format("&%s;",str) end,
- unknown_hex_entity = function(str) return format("&#x%s;",str) end,
- unknown_any_entity = function(str) return format("&#x%s;",str) end,
+ if not reported_attribute_errors[str] then
+ report_xml("invalid attribute specification %a",str)
+ reported_attribute_errors[str]=true
+ at._error_=str
+ end
+ return str
+end
+xml.placeholders={
+ unknown_dec_entity=function(str) return str=="" and "&error;" or formatters["&%s;"](str) end,
+ unknown_hex_entity=function(str) return formatters["&#x%s;"](str) end,
+ unknown_any_entity=function(str) return formatters["&#x%s;"](str) end,
}
-
-local placeholders = xml.placeholders
-
+local placeholders=xml.placeholders
local function fromhex(s)
- local n = tonumber(s,16)
- if n then
- return utfchar(n)
- else
- return format("h:%s",s), true
- end
+ local n=tonumber(s,16)
+ if n then
+ return utfchar(n)
+ else
+ return formatters["h:%s"](s),true
+ end
end
-
local function fromdec(s)
- local n = tonumber(s)
- if n then
- return utfchar(n)
- else
- return format("d:%s",s), true
- end
-end
-
--- one level expansion (simple case), no checking done
-
-local rest = (1-P(";"))^0
-local many = P(1)^0
-
-local parsedentity =
- P("&") * (P("#x")*(rest/fromhex) + P("#")*(rest/fromdec)) * P(";") * P(-1) +
- (P("#x")*(many/fromhex) + P("#")*(many/fromdec))
-
--- parsing in the xml file
-
-local predefined_unified = {
- [38] = "&amp;",
- [42] = "&quot;",
- [47] = "&apos;",
- [74] = "&lt;",
- [76] = "&gt;",
+ local n=tonumber(s)
+ if n then
+ return utfchar(n)
+ else
+ return formatters["d:%s"](s),true
+ end
+end
+local rest=(1-P(";"))^0
+local many=P(1)^0
+local parsedentity=P("&")*(P("#x")*(rest/fromhex)+P("#")*(rest/fromdec))*P(";")*P(-1)+(P("#x")*(many/fromhex)+P("#")*(many/fromdec))
+local predefined_unified={
+ [38]="&amp;",
+ [42]="&quot;",
+ [47]="&apos;",
+ [74]="&lt;",
+ [76]="&gt;",
}
-
-local predefined_simplified = {
- [38] = "&", amp = "&",
- [42] = '"', quot = '"',
- [47] = "'", apos = "'",
- [74] = "<", lt = "<",
- [76] = ">", gt = ">",
-}
-
-local nofprivates = 0xF0000 -- shared but seldom used
-
-local privates_u = { -- unescaped
- [ [[&]] ] = "&amp;",
- [ [["]] ] = "&quot;",
- [ [[']] ] = "&apos;",
- [ [[<]] ] = "&lt;",
- [ [[>]] ] = "&gt;",
+local predefined_simplified={
+ [38]="&",amp="&",
+ [42]='"',quot='"',
+ [47]="'",apos="'",
+ [74]="<",lt="<",
+ [76]=">",gt=">",
}
-
-local privates_p = {
+local nofprivates=0xF0000
+local privates_u={
+ [ [[&]] ]="&amp;",
+ [ [["]] ]="&quot;",
+ [ [[']] ]="&apos;",
+ [ [[<]] ]="&lt;",
+ [ [[>]] ]="&gt;",
}
-
-local privates_n = {
- -- keeps track of defined ones
+local privates_p={}
+local privates_n={
}
-
-local function escaped(s)
- if s == "" then
- return ""
- else -- if utffind(s,privates_u) then
- return (utfgsub(s,".",privates_u))
- -- else
- -- return s
- end
-end
-
+local escaped=utf.remapper(privates_u)
local function unescaped(s)
- local p = privates_n[s]
- if not p then
- nofprivates = nofprivates + 1
- p = utfchar(nofprivates)
- privates_n[s] = p
- s = "&" .. s .. ";" -- todo: use char-ent to map to hex
- privates_u[p] = s
- privates_p[p] = s
+ local p=privates_n[s]
+ if not p then
+ nofprivates=nofprivates+1
+ p=utfchar(nofprivates)
+ privates_n[s]=p
+ s="&"..s..";"
+ privates_u[p]=s
+ privates_p[p]=s
+ end
+ return p
+end
+local unprivatized=utf.remapper(privates_p)
+xml.privatetoken=unescaped
+xml.unprivatized=unprivatized
+xml.privatecodes=privates_n
+local function handle_hex_entity(str)
+ local h=hcache[str]
+ if not h then
+ local n=tonumber(str,16)
+ h=unify_predefined and predefined_unified[n]
+ if h then
+ if trace_entities then
+ report_xml("utfize, converting hex entity &#x%s; into %a",str,h)
+ end
+ elseif utfize then
+ h=(n and utfchar(n)) or xml.unknown_hex_entity(str) or ""
+ if not n then
+ report_xml("utfize, ignoring hex entity &#x%s;",str)
+ elseif trace_entities then
+ report_xml("utfize, converting hex entity &#x%s; into %a",str,h)
+ end
+ else
+ if trace_entities then
+ report_xml("found entity &#x%s;",str)
+ end
+ h="&#x"..str..";"
end
- return p
+ hcache[str]=h
+ end
+ return h
end
-
-local function unprivatized(s,resolve)
- if s == "" then
- return ""
+local function handle_dec_entity(str)
+ local d=dcache[str]
+ if not d then
+ local n=tonumber(str)
+ d=unify_predefined and predefined_unified[n]
+ if d then
+ if trace_entities then
+ report_xml("utfize, converting dec entity &#%s; into %a",str,d)
+ end
+ elseif utfize then
+ d=(n and utfchar(n)) or placeholders.unknown_dec_entity(str) or ""
+ if not n then
+ report_xml("utfize, ignoring dec entity &#%s;",str)
+ elseif trace_entities then
+ report_xml("utfize, converting dec entity &#%s; into %a",str,d)
+ end
else
- return (utfgsub(s,".",privates_p))
+ if trace_entities then
+ report_xml("found entity &#%s;",str)
+ end
+ d="&#"..str..";"
end
+ dcache[str]=d
+ end
+ return d
end
-
-xml.privatetoken = unescaped
-xml.unprivatized = unprivatized
-xml.privatecodes = privates_n
-
-local function handle_hex_entity(str)
- local h = hcache[str]
- if not h then
- local n = tonumber(str,16)
- h = unify_predefined and predefined_unified[n]
- if h then
- if trace_entities then
- report_xml("utfize, converting hex entity &#x%s; into %s",str,h)
- end
- elseif utfize then
- h = (n and utfchar(n)) or xml.unknown_hex_entity(str) or ""
- if not n then
- report_xml("utfize, ignoring hex entity &#x%s;",str)
- elseif trace_entities then
- report_xml("utfize, converting hex entity &#x%s; into %s",str,h)
- end
+xml.parsedentitylpeg=parsedentity
+local function handle_any_entity(str)
+ if resolve then
+ local a=acache[str]
+ if not a then
+ a=resolve_predefined and predefined_simplified[str]
+ if a then
+ if trace_entities then
+ report_xml("resolving entity &%s; to predefined %a",str,a)
+ end
+ else
+ if type(resolve)=="function" then
+ a=resolve(str) or entities[str]
else
- if trace_entities then
- report_xml("found entity &#x%s;",str)
- end
- h = "&#x" .. str .. ";"
+ a=entities[str]
end
- hcache[str] = h
- end
- return h
-end
-
-local function handle_dec_entity(str)
- local d = dcache[str]
- if not d then
- local n = tonumber(str)
- d = unify_predefined and predefined_unified[n]
- if d then
+ if a then
+ if type(a)=="function" then
if trace_entities then
- report_xml("utfize, converting dec entity &#%s; into %s",str,d)
- end
- elseif utfize then
- d = (n and utfchar(n)) or placeholders.unknown_dec_entity(str) or ""
- if not n then
- report_xml("utfize, ignoring dec entity &#%s;",str)
- elseif trace_entities then
- report_xml("utfize, converting dec entity &#%s; into %s",str,d)
- end
+ report_xml("expanding entity &%s; to function call",str)
+ end
+ a=a(str) or ""
+ end
+ a=lpegmatch(parsedentity,a) or a
+ if trace_entities then
+ report_xml("resolving entity &%s; to internal %a",str,a)
+ end
else
+ local unknown_any_entity=placeholders.unknown_any_entity
+ if unknown_any_entity then
+ a=unknown_any_entity(str) or ""
+ end
+ if a then
if trace_entities then
- report_xml("found entity &#%s;",str)
+ report_xml("resolving entity &%s; to external %s",str,a)
end
- d = "&#" .. str .. ";"
- end
- dcache[str] = d
- end
- return d
-end
-
-xml.parsedentitylpeg = parsedentity
-
-local function handle_any_entity(str)
- if resolve then
- local a = acache[str] -- per instance ! todo
- if not a then
- a = resolve_predefined and predefined_simplified[str]
- if a then
- if trace_entities then
- report_xml("resolved entity &%s; -> %s (predefined)",str,a)
- end
- else
- if type(resolve) == "function" then
- a = resolve(str) or entities[str]
- else
- a = entities[str]
- end
- if a then
- if type(a) == "function" then
- if trace_entities then
- report_xml("expanding entity &%s; (function)",str)
- end
- a = a(str) or ""
- end
- a = lpegmatch(parsedentity,a) or a -- for nested
- if trace_entities then
- report_xml("resolved entity &%s; -> %s (internal)",str,a)
- end
- else
- local unknown_any_entity = placeholders.unknown_any_entity
- if unknown_any_entity then
- a = unknown_any_entity(str) or ""
- end
- if a then
- if trace_entities then
- report_xml("resolved entity &%s; -> %s (external)",str,a)
- end
- else
- if trace_entities then
- report_xml("keeping entity &%s;",str)
- end
- if str == "" then
- a = "&error;"
- else
- a = "&" .. str .. ";"
- end
- end
- end
- end
- acache[str] = a
- elseif trace_entities then
- if not acache[str] then
- report_xml("converting entity &%s; into %s",str,a)
- acache[str] = a
+ else
+ if trace_entities then
+ report_xml("keeping entity &%s;",str)
end
- end
- return a
- else
- local a = acache[str]
- if not a then
- a = resolve_predefined and predefined_simplified[str]
- if a then
- -- one of the predefined
- acache[str] = a
- if trace_entities then
- report_xml("entity &%s; becomes %s",str,tostring(a))
- end
- elseif str == "" then
- if trace_entities then
- report_xml("invalid entity &%s;",str)
- end
- a = "&error;"
- acache[str] = a
+ if str=="" then
+ a="&error;"
else
- if trace_entities then
- report_xml("entity &%s; is made private",str)
- end
- -- a = "&" .. str .. ";"
- a = unescaped(str)
- acache[str] = a
- end
- end
- return a
- end
+ a="&"..str..";"
+ end
+ end
+ end
+ end
+ acache[str]=a
+ elseif trace_entities then
+ if not acache[str] then
+ report_xml("converting entity &%s; to %a",str,a)
+ acache[str]=a
+ end
+ end
+ return a
+ else
+ local a=acache[str]
+ if not a then
+ a=resolve_predefined and predefined_simplified[str]
+ if a then
+ acache[str]=a
+ if trace_entities then
+ report_xml("entity &%s; becomes %a",str,a)
+ end
+ elseif str=="" then
+ if trace_entities then
+ report_xml("invalid entity &%s;",str)
+ end
+ a="&error;"
+ acache[str]=a
+ else
+ if trace_entities then
+ report_xml("entity &%s; is made private",str)
+ end
+ a=unescaped(str)
+ acache[str]=a
+ end
+ end
+ return a
+ end
end
-
local function handle_end_entity(chr)
- report_xml("error in entity, %q found instead of ';'",chr)
-end
-
-local space = S(' \r\n\t')
-local open = P('<')
-local close = P('>')
-local squote = S("'")
-local dquote = S('"')
-local equal = P('=')
-local slash = P('/')
-local colon = P(':')
-local semicolon = P(';')
-local ampersand = P('&')
-local valid = R('az', 'AZ', '09') + S('_-.')
-local name_yes = C(valid^1) * colon * C(valid^1)
-local name_nop = C(P(true)) * C(valid^1)
-local name = name_yes + name_nop
-local utfbom = lpeg.patterns.utfbom -- no capture
-local spacing = C(space^0)
-
------ entitycontent = (1-open-semicolon)^0
-local anyentitycontent = (1-open-semicolon-space-close)^0
-local hexentitycontent = R("AF","af","09")^0
-local decentitycontent = R("09")^0
-local parsedentity = P("#")/"" * (
- P("x")/"" * (hexentitycontent/handle_hex_entity) +
- (decentitycontent/handle_dec_entity)
- ) + (anyentitycontent/handle_any_entity)
-local entity = ampersand/"" * parsedentity * ( (semicolon/"") + #(P(1)/handle_end_entity))
-
-local text_unparsed = C((1-open)^1)
-local text_parsed = Cs(((1-open-ampersand)^1 + entity)^1)
-
-local somespace = space^1
-local optionalspace = space^0
-
------ value = (squote * C((1 - squote)^0) * squote) + (dquote * C((1 - dquote)^0) * dquote) -- ampersand and < also invalid in value
-local value = (squote * Cs((entity + (1 - squote))^0) * squote) + (dquote * Cs((entity + (1 - dquote))^0) * dquote) -- ampersand and < also invalid in value
-
-local endofattributes = slash * close + close -- recovery of flacky html
-local whatever = space * name * optionalspace * equal
------ wrongvalue = C(P(1-whatever-close)^1 + P(1-close)^1) / attribute_value_error
------ wrongvalue = C(P(1-whatever-endofattributes)^1 + P(1-endofattributes)^1) / attribute_value_error
------ wrongvalue = C(P(1-space-endofattributes)^1) / attribute_value_error
-local wrongvalue = Cs(P(entity + (1-space-endofattributes))^1) / attribute_value_error
-
-local attributevalue = value + wrongvalue
-
-local attribute = (somespace * name * optionalspace * equal * optionalspace * attributevalue) / add_attribute
------ attributes = (attribute)^0
-
-local attributes = (attribute + somespace^-1 * (((1-endofattributes)^1)/attribute_specification_error))^0
-
-local parsedtext = text_parsed / add_text
-local unparsedtext = text_unparsed / add_text
-local balanced = P { "[" * ((1 - S"[]") + V(1))^0 * "]" } -- taken from lpeg manual, () example
-
-local emptyelement = (spacing * open * name * attributes * optionalspace * slash * close) / add_empty
-local beginelement = (spacing * open * name * attributes * optionalspace * close) / add_begin
-local endelement = (spacing * open * slash * name * optionalspace * close) / add_end
-
-local begincomment = open * P("!--")
-local endcomment = P("--") * close
-local begininstruction = open * P("?")
-local endinstruction = P("?") * close
-local begincdata = open * P("![CDATA[")
-local endcdata = P("]]") * close
-
-local someinstruction = C((1 - endinstruction)^0)
-local somecomment = C((1 - endcomment )^0)
-local somecdata = C((1 - endcdata )^0)
-
-local function normalentity(k,v ) entities[k] = v end
-local function systementity(k,v,n) entities[k] = v end
-local function publicentity(k,v,n) entities[k] = v end
-
--- todo: separate dtd parser
-
-local begindoctype = open * P("!DOCTYPE")
-local enddoctype = close
-local beginset = P("[")
-local endset = P("]")
-local doctypename = C((1-somespace-close)^0)
-local elementdoctype = optionalspace * P("<!ELEMENT") * (1-close)^0 * close
-
-local basiccomment = begincomment * ((1 - endcomment)^0) * endcomment
-
-local normalentitytype = (doctypename * somespace * value)/normalentity
-local publicentitytype = (doctypename * somespace * P("PUBLIC") * somespace * value)/publicentity
-local systementitytype = (doctypename * somespace * P("SYSTEM") * somespace * value * somespace * P("NDATA") * somespace * doctypename)/systementity
-local entitydoctype = optionalspace * P("<!ENTITY") * somespace * (systementitytype + publicentitytype + normalentitytype) * optionalspace * close
-
--- we accept comments in doctypes
-
-local doctypeset = beginset * optionalspace * P(elementdoctype + entitydoctype + basiccomment + space)^0 * optionalspace * endset
-local definitiondoctype= doctypename * somespace * doctypeset
-local publicdoctype = doctypename * somespace * P("PUBLIC") * somespace * value * somespace * value * somespace * doctypeset
-local systemdoctype = doctypename * somespace * P("SYSTEM") * somespace * value * somespace * doctypeset
-local simpledoctype = (1-close)^1 -- * balanced^0
-local somedoctype = C((somespace * (publicdoctype + systemdoctype + definitiondoctype + simpledoctype) * optionalspace)^0)
-local somedoctype = C((somespace * (publicdoctype + systemdoctype + definitiondoctype + simpledoctype) * optionalspace)^0)
-
-local instruction = (spacing * begininstruction * someinstruction * endinstruction) / function(...) add_special("@pi@",...) end
-local comment = (spacing * begincomment * somecomment * endcomment ) / function(...) add_special("@cm@",...) end
-local cdata = (spacing * begincdata * somecdata * endcdata ) / function(...) add_special("@cd@",...) end
-local doctype = (spacing * begindoctype * somedoctype * enddoctype ) / function(...) add_special("@dt@",...) end
-
--- nicer but slower:
---
--- local instruction = (Cc("@pi@") * spacing * begininstruction * someinstruction * endinstruction) / add_special
--- local comment = (Cc("@cm@") * spacing * begincomment * somecomment * endcomment ) / add_special
--- local cdata = (Cc("@cd@") * spacing * begincdata * somecdata * endcdata ) / add_special
--- local doctype = (Cc("@dt@") * spacing * begindoctype * somedoctype * enddoctype ) / add_special
-
-local trailer = space^0 * (text_unparsed/set_message)^0
-
--- comment + emptyelement + text + cdata + instruction + V("parent"), -- 6.5 seconds on 40 MB database file
--- text + comment + emptyelement + cdata + instruction + V("parent"), -- 5.8
--- text + V("parent") + emptyelement + comment + cdata + instruction, -- 5.5
-
-local grammar_parsed_text = P { "preamble",
- preamble = utfbom^0 * instruction^0 * (doctype + comment + instruction)^0 * V("parent") * trailer,
- parent = beginelement * V("children")^0 * endelement,
- children = parsedtext + V("parent") + emptyelement + comment + cdata + instruction,
+ report_xml("error in entity, %a found instead of %a",chr,";")
+end
+local space=S(' \r\n\t')
+local open=P('<')
+local close=P('>')
+local squote=S("'")
+local dquote=S('"')
+local equal=P('=')
+local slash=P('/')
+local colon=P(':')
+local semicolon=P(';')
+local ampersand=P('&')
+local valid=R('az','AZ','09')+S('_-.')
+local name_yes=C(valid^1)*colon*C(valid^1)
+local name_nop=C(P(true))*C(valid^1)
+local name=name_yes+name_nop
+local utfbom=lpeg.patterns.utfbom
+local spacing=C(space^0)
+local anyentitycontent=(1-open-semicolon-space-close)^0
+local hexentitycontent=R("AF","af","09")^0
+local decentitycontent=R("09")^0
+local parsedentity=P("#")/""*(
+ P("x")/""*(hexentitycontent/handle_hex_entity)+(decentitycontent/handle_dec_entity)
+ )+(anyentitycontent/handle_any_entity)
+local entity=ampersand/""*parsedentity*((semicolon/"")+#(P(1)/handle_end_entity))
+local text_unparsed=C((1-open)^1)
+local text_parsed=Cs(((1-open-ampersand)^1+entity)^1)
+local somespace=space^1
+local optionalspace=space^0
+local value=(squote*Cs((entity+(1-squote))^0)*squote)+(dquote*Cs((entity+(1-dquote))^0)*dquote)
+local endofattributes=slash*close+close
+local whatever=space*name*optionalspace*equal
+local wrongvalue=Cs(P(entity+(1-space-endofattributes))^1)/attribute_value_error
+local attributevalue=value+wrongvalue
+local attribute=(somespace*name*optionalspace*equal*optionalspace*attributevalue)/add_attribute
+local attributes=(attribute+somespace^-1*(((1-endofattributes)^1)/attribute_specification_error))^0
+local parsedtext=text_parsed/add_text
+local unparsedtext=text_unparsed/add_text
+local balanced=P { "["*((1-S"[]")+V(1))^0*"]" }
+local emptyelement=(spacing*open*name*attributes*optionalspace*slash*close)/add_empty
+local beginelement=(spacing*open*name*attributes*optionalspace*close)/add_begin
+local endelement=(spacing*open*slash*name*optionalspace*close)/add_end
+local begincomment=open*P("!--")
+local endcomment=P("--")*close
+local begininstruction=open*P("?")
+local endinstruction=P("?")*close
+local begincdata=open*P("![CDATA[")
+local endcdata=P("]]")*close
+local someinstruction=C((1-endinstruction)^0)
+local somecomment=C((1-endcomment )^0)
+local somecdata=C((1-endcdata )^0)
+local function normalentity(k,v ) entities[k]=v end
+local function systementity(k,v,n) entities[k]=v end
+local function publicentity(k,v,n) entities[k]=v end
+local begindoctype=open*P("!DOCTYPE")
+local enddoctype=close
+local beginset=P("[")
+local endset=P("]")
+local doctypename=C((1-somespace-close)^0)
+local elementdoctype=optionalspace*P("<!ELEMENT")*(1-close)^0*close
+local basiccomment=begincomment*((1-endcomment)^0)*endcomment
+local normalentitytype=(doctypename*somespace*value)/normalentity
+local publicentitytype=(doctypename*somespace*P("PUBLIC")*somespace*value)/publicentity
+local systementitytype=(doctypename*somespace*P("SYSTEM")*somespace*value*somespace*P("NDATA")*somespace*doctypename)/systementity
+local entitydoctype=optionalspace*P("<!ENTITY")*somespace*(systementitytype+publicentitytype+normalentitytype)*optionalspace*close
+local doctypeset=beginset*optionalspace*P(elementdoctype+entitydoctype+basiccomment+space)^0*optionalspace*endset
+local definitiondoctype=doctypename*somespace*doctypeset
+local publicdoctype=doctypename*somespace*P("PUBLIC")*somespace*value*somespace*value*somespace*doctypeset
+local systemdoctype=doctypename*somespace*P("SYSTEM")*somespace*value*somespace*doctypeset
+local simpledoctype=(1-close)^1
+local somedoctype=C((somespace*(publicdoctype+systemdoctype+definitiondoctype+simpledoctype)*optionalspace)^0)
+local somedoctype=C((somespace*(publicdoctype+systemdoctype+definitiondoctype+simpledoctype)*optionalspace)^0)
+local instruction=(spacing*begininstruction*someinstruction*endinstruction)/function(...) add_special("@pi@",...) end
+local comment=(spacing*begincomment*somecomment*endcomment )/function(...) add_special("@cm@",...) end
+local cdata=(spacing*begincdata*somecdata*endcdata )/function(...) add_special("@cd@",...) end
+local doctype=(spacing*begindoctype*somedoctype*enddoctype )/function(...) add_special("@dt@",...) end
+local trailer=space^0*(text_unparsed/set_message)^0
+local grammar_parsed_text=P { "preamble",
+ preamble=utfbom^0*instruction^0*(doctype+comment+instruction)^0*V("parent")*trailer,
+ parent=beginelement*V("children")^0*endelement,
+ children=parsedtext+V("parent")+emptyelement+comment+cdata+instruction,
}
-
-local grammar_unparsed_text = P { "preamble",
- preamble = utfbom^0 * instruction^0 * (doctype + comment + instruction)^0 * V("parent") * trailer,
- parent = beginelement * V("children")^0 * endelement,
- children = unparsedtext + V("parent") + emptyelement + comment + cdata + instruction,
+local grammar_unparsed_text=P { "preamble",
+ preamble=utfbom^0*instruction^0*(doctype+comment+instruction)^0*V("parent")*trailer,
+ parent=beginelement*V("children")^0*endelement,
+ children=unparsedtext+V("parent")+emptyelement+comment+cdata+instruction,
}
-
--- maybe we will add settings to result as well
-
-local function _xmlconvert_(data, settings)
- settings = settings or { } -- no_root strip_cm_and_dt given_entities parent_root error_handler
- --
- strip = settings.strip_cm_and_dt
- utfize = settings.utfize_entities
- resolve = settings.resolve_entities
- resolve_predefined = settings.resolve_predefined_entities -- in case we have escaped entities
- unify_predefined = settings.unify_predefined_entities -- &#038; -> &amp;
- cleanup = settings.text_cleanup
- entities = settings.entities or { }
- --
- if utfize == nil then
- settings.utfize_entities = true
- utfize = true
- end
- if resolve_predefined == nil then
- settings.resolve_predefined_entities = true
- resolve_predefined = true
- end
- --
- --
- stack, top, at, xmlns, errorstr = { }, { }, { }, { }, nil
- acache, hcache, dcache = { }, { }, { } -- not stored
- reported_attribute_errors = { }
- if settings.parent_root then
- mt = getmetatable(settings.parent_root)
+local function _xmlconvert_(data,settings)
+ settings=settings or {}
+ strip=settings.strip_cm_and_dt
+ utfize=settings.utfize_entities
+ resolve=settings.resolve_entities
+ resolve_predefined=settings.resolve_predefined_entities
+ unify_predefined=settings.unify_predefined_entities
+ cleanup=settings.text_cleanup
+ entities=settings.entities or {}
+ if utfize==nil then
+ settings.utfize_entities=true
+ utfize=true
+ end
+ if resolve_predefined==nil then
+ settings.resolve_predefined_entities=true
+ resolve_predefined=true
+ end
+ stack,top,at,xmlns,errorstr={},{},{},{},nil
+ acache,hcache,dcache={},{},{}
+ reported_attribute_errors={}
+ if settings.parent_root then
+ mt=getmetatable(settings.parent_root)
+ else
+ initialize_mt(top)
+ end
+ stack[#stack+1]=top
+ top.dt={}
+ dt=top.dt
+ if not data or data=="" then
+ errorstr="empty xml file"
+ elseif utfize or resolve then
+ if lpegmatch(grammar_parsed_text,data) then
+ errorstr=""
else
- initialize_mt(top)
- end
- stack[#stack+1] = top
- top.dt = { }
- dt = top.dt
- if not data or data == "" then
- errorstr = "empty xml file"
- elseif utfize or resolve then
- if lpegmatch(grammar_parsed_text,data) then
- errorstr = ""
- else
- errorstr = "invalid xml file - parsed text"
- end
- elseif type(data) == "string" then
- if lpegmatch(grammar_unparsed_text,data) then
- errorstr = ""
- else
- errorstr = "invalid xml file - unparsed text"
- end
+ errorstr="invalid xml file - parsed text"
+ end
+ elseif type(data)=="string" then
+ if lpegmatch(grammar_unparsed_text,data) then
+ errorstr=""
else
- errorstr = "invalid xml file - no text at all"
- end
- local result
- if errorstr and errorstr ~= "" then
- result = { dt = { { ns = "", tg = "error", dt = { errorstr }, at={ }, er = true } } }
- setmetatable(stack, mt)
- local errorhandler = settings.error_handler
- if errorhandler == false then
- -- no error message
- else
- errorhandler = errorhandler or xml.errorhandler
- if errorhandler then
- xml.errorhandler(format("load error: %s",errorstr))
- end
- end
+ errorstr="invalid xml file - unparsed text"
+ end
+ else
+ errorstr="invalid xml file - no text at all"
+ end
+ local result
+ if errorstr and errorstr~="" then
+ result={ dt={ { ns="",tg="error",dt={ errorstr },at={},er=true } } }
+ setmetatable(stack,mt)
+ local errorhandler=settings.error_handler
+ if errorhandler==false then
else
- result = stack[1]
- end
- if not settings.no_root then
- result = { special = true, ns = "", tg = '@rt@', dt = result.dt, at={ }, entities = entities, settings = settings }
- setmetatable(result, mt)
- local rdt = result.dt
- for k=1,#rdt do
- local v = rdt[k]
- if type(v) == "table" and not v.special then -- always table -)
- result.ri = k -- rootindex
- v.__p__ = result -- new, experiment, else we cannot go back to settings, we need to test this !
- break
- end
- end
- end
- if errorstr and errorstr ~= "" then
- result.error = true
- end
- result.statistics = {
- entities = {
- decimals = dcache,
- hexadecimals = hcache,
- names = acache,
- }
+ errorhandler=errorhandler or xml.errorhandler
+ if errorhandler then
+ local currentresource=settings.currentresource
+ if currentresource and currentresource~="" then
+ xml.errorhandler(formatters["load error in [%s]: %s"](currentresource,errorstr))
+ else
+ xml.errorhandler(formatters["load error: %s"](errorstr))
+ end
+ end
+ end
+ else
+ result=stack[1]
+ end
+ if not settings.no_root then
+ result={ special=true,ns="",tg='@rt@',dt=result.dt,at={},entities=entities,settings=settings }
+ setmetatable(result,mt)
+ local rdt=result.dt
+ for k=1,#rdt do
+ local v=rdt[k]
+ if type(v)=="table" and not v.special then
+ result.ri=k
+ v.__p__=result
+ break
+ end
+ end
+ end
+ if errorstr and errorstr~="" then
+ result.error=true
+ end
+ result.statistics={
+ entities={
+ decimals=dcache,
+ hexadecimals=hcache,
+ names=acache,
}
- strip, utfize, resolve, resolve_predefined = nil, nil, nil, nil
- unify_predefined, cleanup, entities = nil, nil, nil
- stack, top, at, xmlns, errorstr = nil, nil, nil, nil, nil
- acache, hcache, dcache = nil, nil, nil
- reported_attribute_errors, mt, errorhandler = nil, nil, nil
- return result
+ }
+ strip,utfize,resolve,resolve_predefined=nil,nil,nil,nil
+ unify_predefined,cleanup,entities=nil,nil,nil
+ stack,top,at,xmlns,errorstr=nil,nil,nil,nil,nil
+ acache,hcache,dcache=nil,nil,nil
+ reported_attribute_errors,mt,errorhandler=nil,nil,nil
+ return result
end
-
--- Because we can have a crash (stack issues) with faulty xml, we wrap this one
--- in a protector:
-
function xmlconvert(data,settings)
- local ok, result = pcall(function() return _xmlconvert_(data,settings) end)
- if ok then
- return result
- else
- return _xmlconvert_("")
- end
-end
-
-xml.convert = xmlconvert
-
-function xml.inheritedconvert(data,xmldata) -- xmldata is parent
- local settings = xmldata.settings
- if settings then
- settings.parent_root = xmldata -- to be tested
- end
- -- settings.no_root = true
- local xc = xmlconvert(data,settings) -- hm, we might need to locate settings
- -- xc.settings = nil
- -- xc.entities = nil
- -- xc.special = nil
- -- xc.ri = nil
- -- print(xc.tg)
- return xc
+ local ok,result=pcall(function() return _xmlconvert_(data,settings) end)
+ if ok then
+ return result
+ else
+ return _xmlconvert_("",settings)
+ end
+end
+xml.convert=xmlconvert
+function xml.inheritedconvert(data,xmldata)
+ local settings=xmldata.settings
+ if settings then
+ settings.parent_root=xmldata
+ end
+ local xc=xmlconvert(data,settings)
+ return xc
end
-
---[[ldx--
-<p>Packaging data in an xml like table is done with the following
-function. Maybe it will go away (when not used).</p>
---ldx]]--
-
function xml.is_valid(root)
- return root and root.dt and root.dt[1] and type(root.dt[1]) == "table" and not root.dt[1].er
+ return root and root.dt and root.dt[1] and type(root.dt[1])=="table" and not root.dt[1].er
end
-
function xml.package(tag,attributes,data)
- local ns, tg = match(tag,"^(.-):?([^:]+)$")
- local t = { ns = ns, tg = tg, dt = data or "", at = attributes or {} }
- setmetatable(t, mt)
- return t
+ local ns,tg=match(tag,"^(.-):?([^:]+)$")
+ local t={ ns=ns,tg=tg,dt=data or "",at=attributes or {} }
+ setmetatable(t,mt)
+ return t
end
-
function xml.is_valid(root)
- return root and not root.error
+ return root and not root.error
end
-
-xml.errorhandler = report_xml
-
---[[ldx--
-<p>We cannot load an <l n='lpeg'/> from a filehandle so we need to load
-the whole file first. The function accepts a string representing
-a filename or a file handle.</p>
---ldx]]--
-
+xml.errorhandler=report_xml
function xml.load(filename,settings)
- local data = ""
- if type(filename) == "string" then
- -- local data = io.loaddata(filename) - -todo: check type in io.loaddata
- local f = io.open(filename,'r')
- if f then
- data = f:read("*all")
- f:close()
- end
- elseif filename then -- filehandle
- data = filename:read("*all")
- end
- return xmlconvert(data,settings)
+ local data=""
+ if type(filename)=="string" then
+ local f=io.open(filename,'r')
+ if f then
+ data=f:read("*all")
+ f:close()
+ end
+ elseif filename then
+ data=filename:read("*all")
+ end
+ if settings then
+ settings.currentresource=filename
+ local result=xmlconvert(data,settings)
+ settings.currentresource=nil
+ return result
+ else
+ return xmlconvert(data,{ currentresource=filename })
+ end
end
-
---[[ldx--
-<p>When we inject new elements, we need to convert strings to
-valid trees, which is what the next function does.</p>
---ldx]]--
-
-local no_root = { no_root = true }
-
+local no_root={ no_root=true }
function xml.toxml(data)
- if type(data) == "string" then
- local root = { xmlconvert(data,no_root) }
- return (#root > 1 and root) or root[1]
- else
- return data
- end
+ if type(data)=="string" then
+ local root={ xmlconvert(data,no_root) }
+ return (#root>1 and root) or root[1]
+ else
+ return data
+ end
end
-
---[[ldx--
-<p>For copying a tree we use a dedicated function instead of the
-generic table copier. Since we know what we're dealing with we
-can speed up things a bit. The second argument is not to be used!</p>
---ldx]]--
-
local function copy(old,tables)
- if old then
- tables = tables or { }
- local new = { }
- if not tables[old] then
- tables[old] = new
- end
- for k,v in next, old do
- new[k] = (type(v) == "table" and (tables[v] or copy(v, tables))) or v
- end
- local mt = getmetatable(old)
- if mt then
- setmetatable(new,mt)
- end
- return new
- else
- return { }
+ if old then
+ tables=tables or {}
+ local new={}
+ if not tables[old] then
+ tables[old]=new
end
-end
-
-xml.copy = copy
-
---[[ldx--
-<p>In <l n='context'/> serializing the tree or parts of the tree is a major
-actitivity which is why the following function is pretty optimized resulting
-in a few more lines of code than needed. The variant that uses the formatting
-function for all components is about 15% slower than the concatinating
-alternative.</p>
---ldx]]--
-
--- todo: add <?xml version='1.0' standalone='yes'?> when not present
-
-function xml.checkbom(root) -- can be made faster
- if root.ri then
- local dt = root.dt
- for k=1,#dt do
- local v = dt[k]
- if type(v) == "table" and v.special and v.tg == "@pi@" and find(v.dt[1],"xml.*version=") then
- return
- end
- end
- insert(dt, 1, { special=true, ns="", tg="@pi@", dt = { "xml version='1.0' standalone='yes'"} } )
- insert(dt, 2, "\n" )
+ for k,v in next,old do
+ new[k]=(type(v)=="table" and (tables[v] or copy(v,tables))) or v
end
-end
-
---[[ldx--
-<p>At the cost of some 25% runtime overhead you can first convert the tree to a string
-and then handle the lot.</p>
---ldx]]--
-
--- new experimental reorganized serialize
-
-local function verbose_element(e,handlers) -- options
- local handle = handlers.handle
- local serialize = handlers.serialize
- local ens, etg, eat, edt, ern = e.ns, e.tg, e.at, e.dt, e.rn
- local ats = eat and next(eat) and { }
- if ats then
- for k,v in next, eat do
- ats[#ats+1] = format('%s=%q',k,escaped(v))
- end
- end
- if ern and trace_entities and ern ~= ens then
- ens = ern
+ local mt=getmetatable(old)
+ if mt then
+ setmetatable(new,mt)
end
- if ens ~= "" then
- if edt and #edt > 0 then
- if ats then
- handle("<",ens,":",etg," ",concat(ats," "),">")
- else
- handle("<",ens,":",etg,">")
- end
- for i=1,#edt do
- local e = edt[i]
- if type(e) == "string" then
- handle(escaped(e))
- else
- serialize(e,handlers)
- end
- end
- handle("</",ens,":",etg,">")
+ return new
+ else
+ return {}
+ end
+end
+xml.copy=copy
+function xml.checkbom(root)
+ if root.ri then
+ local dt=root.dt
+ for k=1,#dt do
+ local v=dt[k]
+ if type(v)=="table" and v.special and v.tg=="@pi@" and find(v.dt[1],"xml.*version=") then
+ return
+ end
+ end
+ insert(dt,1,{ special=true,ns="",tg="@pi@",dt={ "xml version='1.0' standalone='yes'" } } )
+ insert(dt,2,"\n" )
+ end
+end
+local function verbose_element(e,handlers)
+ local handle=handlers.handle
+ local serialize=handlers.serialize
+ local ens,etg,eat,edt,ern=e.ns,e.tg,e.at,e.dt,e.rn
+ local ats=eat and next(eat) and {}
+ if ats then
+ for k,v in next,eat do
+ ats[#ats+1]=formatters['%s=%q'](k,escaped(v))
+ end
+ end
+ if ern and trace_entities and ern~=ens then
+ ens=ern
+ end
+ if ens~="" then
+ if edt and #edt>0 then
+ if ats then
+ handle("<",ens,":",etg," ",concat(ats," "),">")
+ else
+ handle("<",ens,":",etg,">")
+ end
+ for i=1,#edt do
+ local e=edt[i]
+ if type(e)=="string" then
+ handle(escaped(e))
else
- if ats then
- handle("<",ens,":",etg," ",concat(ats," "),"/>")
- else
- handle("<",ens,":",etg,"/>")
- end
+ serialize(e,handlers)
end
+ end
+ handle("</",ens,":",etg,">")
else
- if edt and #edt > 0 then
- if ats then
- handle("<",etg," ",concat(ats," "),">")
- else
- handle("<",etg,">")
- end
- for i=1,#edt do
- local e = edt[i]
- if type(e) == "string" then
- handle(escaped(e)) -- option: hexify escaped entities
- else
- serialize(e,handlers)
- end
- end
- handle("</",etg,">")
+ if ats then
+ handle("<",ens,":",etg," ",concat(ats," "),"/>")
+ else
+ handle("<",ens,":",etg,"/>")
+ end
+ end
+ else
+ if edt and #edt>0 then
+ if ats then
+ handle("<",etg," ",concat(ats," "),">")
+ else
+ handle("<",etg,">")
+ end
+ for i=1,#edt do
+ local e=edt[i]
+ if type(e)=="string" then
+ handle(escaped(e))
else
- if ats then
- handle("<",etg," ",concat(ats," "),"/>")
- else
- handle("<",etg,"/>")
- end
+ serialize(e,handlers)
end
+ end
+ handle("</",etg,">")
+ else
+ if ats then
+ handle("<",etg," ",concat(ats," "),"/>")
+ else
+ handle("<",etg,"/>")
+ end
end
+ end
end
-
local function verbose_pi(e,handlers)
- handlers.handle("<?",e.dt[1],"?>")
+ handlers.handle("<?",e.dt[1],"?>")
end
-
local function verbose_comment(e,handlers)
- handlers.handle("<!--",e.dt[1],"-->")
+ handlers.handle("<!--",e.dt[1],"-->")
end
-
local function verbose_cdata(e,handlers)
- handlers.handle("<![CDATA[", e.dt[1],"]]>")
+ handlers.handle("<![CDATA[",e.dt[1],"]]>")
end
-
local function verbose_doctype(e,handlers)
- handlers.handle("<!DOCTYPE ",e.dt[1],">")
+ handlers.handle("<!DOCTYPE ",e.dt[1],">")
end
-
local function verbose_root(e,handlers)
- handlers.serialize(e.dt,handlers)
+ handlers.serialize(e.dt,handlers)
end
-
local function verbose_text(e,handlers)
- handlers.handle(escaped(e))
+ handlers.handle(escaped(e))
end
-
local function verbose_document(e,handlers)
- local serialize = handlers.serialize
- local functions = handlers.functions
- for i=1,#e do
- local ei = e[i]
- if type(ei) == "string" then
- functions["@tx@"](ei,handlers)
- else
- serialize(ei,handlers)
- end
+ local serialize=handlers.serialize
+ local functions=handlers.functions
+ for i=1,#e do
+ local ei=e[i]
+ if type(ei)=="string" then
+ functions["@tx@"](ei,handlers)
+ else
+ serialize(ei,handlers)
end
+ end
end
-
local function serialize(e,handlers,...)
- local initialize = handlers.initialize
- local finalize = handlers.finalize
- local functions = handlers.functions
- if initialize then
- local state = initialize(...)
- if not state == true then
- return state
- end
- end
- local etg = e.tg
- if etg then
- (functions[etg] or functions["@el@"])(e,handlers)
- -- elseif type(e) == "string" then
- -- functions["@tx@"](e,handlers)
- else
- functions["@dc@"](e,handlers) -- dc ?
- end
- if finalize then
- return finalize()
- end
+ local initialize=handlers.initialize
+ local finalize=handlers.finalize
+ local functions=handlers.functions
+ if initialize then
+ local state=initialize(...)
+ if not state==true then
+ return state
+ end
+ end
+ local etg=e.tg
+ if etg then
+ (functions[etg] or functions["@el@"])(e,handlers)
+ else
+ functions["@dc@"](e,handlers)
+ end
+ if finalize then
+ return finalize()
+ end
end
-
local function xserialize(e,handlers)
- local functions = handlers.functions
- local etg = e.tg
- if etg then
- (functions[etg] or functions["@el@"])(e,handlers)
- -- elseif type(e) == "string" then
- -- functions["@tx@"](e,handlers)
- else
- functions["@dc@"](e,handlers)
- end
-end
-
-local handlers = { }
-
+ local functions=handlers.functions
+ local etg=e.tg
+ if etg then
+ (functions[etg] or functions["@el@"])(e,handlers)
+ else
+ functions["@dc@"](e,handlers)
+ end
+end
+local handlers={}
local function newhandlers(settings)
- local t = table.copy(handlers.verbose or { }) -- merge
- if settings then
- for k,v in next, settings do
- if type(v) == "table" then
- local tk = t[k] if not tk then tk = { } t[k] = tk end
- for kk,vv in next, v do
- tk[kk] = vv
- end
- else
- t[k] = v
- end
- end
- if settings.name then
- handlers[settings.name] = t
- end
- end
- utilities.storage.mark(t)
- return t
-end
-
-local nofunction = function() end
-
+ local t=table.copy(handlers[settings and settings.parent or "verbose"] or {})
+ if settings then
+ for k,v in next,settings do
+ if type(v)=="table" then
+ local tk=t[k] if not tk then tk={} t[k]=tk end
+ for kk,vv in next,v do
+ tk[kk]=vv
+ end
+ else
+ t[k]=v
+ end
+ end
+ if settings.name then
+ handlers[settings.name]=t
+ end
+ end
+ utilities.storage.mark(t)
+ return t
+end
+local nofunction=function() end
function xml.sethandlersfunction(handler,name,fnc)
- handler.functions[name] = fnc or nofunction
+ handler.functions[name]=fnc or nofunction
end
-
function xml.gethandlersfunction(handler,name)
- return handler.functions[name]
+ return handler.functions[name]
end
-
function xml.gethandlers(name)
- return handlers[name]
+ return handlers[name]
end
-
newhandlers {
- name = "verbose",
- initialize = false, -- faster than nil and mt lookup
- finalize = false, -- faster than nil and mt lookup
- serialize = xserialize,
- handle = print,
- functions = {
- ["@dc@"] = verbose_document,
- ["@dt@"] = verbose_doctype,
- ["@rt@"] = verbose_root,
- ["@el@"] = verbose_element,
- ["@pi@"] = verbose_pi,
- ["@cm@"] = verbose_comment,
- ["@cd@"] = verbose_cdata,
- ["@tx@"] = verbose_text,
- }
+ name="verbose",
+ initialize=false,
+ finalize=false,
+ serialize=xserialize,
+ handle=print,
+ functions={
+ ["@dc@"]=verbose_document,
+ ["@dt@"]=verbose_doctype,
+ ["@rt@"]=verbose_root,
+ ["@el@"]=verbose_element,
+ ["@pi@"]=verbose_pi,
+ ["@cm@"]=verbose_comment,
+ ["@cd@"]=verbose_cdata,
+ ["@tx@"]=verbose_text,
+ }
}
-
---[[ldx--
-<p>How you deal with saving data depends on your preferences. For a 40 MB database
-file the timing on a 2.3 Core Duo are as follows (time in seconds):</p>
-
-<lines>
-1.3 : load data from file to string
-6.1 : convert string into tree
-5.3 : saving in file using xmlsave
-6.8 : converting to string using xml.tostring
-3.6 : saving converted string in file
-</lines>
-
-<p>Beware, these were timing with the old routine but measurements will not be that
-much different I guess.</p>
---ldx]]--
-
--- maybe this will move to lxml-xml
-
local result
-
-local xmlfilehandler = newhandlers {
- name = "file",
- initialize = function(name)
- result = io.open(name,"wb")
- return result
- end,
- finalize = function()
- result:close()
- return true
- end,
- handle = function(...)
- result:write(...)
- end,
+local xmlfilehandler=newhandlers {
+ name="file",
+ initialize=function(name)
+ result=io.open(name,"wb")
+ return result
+ end,
+ finalize=function()
+ result:close()
+ return true
+ end,
+ handle=function(...)
+ result:write(...)
+ end,
}
-
--- no checking on writeability here but not faster either
---
--- local xmlfilehandler = newhandlers {
--- initialize = function(name)
--- io.output(name,"wb")
--- return true
--- end,
--- finalize = function()
--- io.close()
--- return true
--- end,
--- handle = io.write,
--- }
-
function xml.save(root,name)
- serialize(root,xmlfilehandler,name)
+ serialize(root,xmlfilehandler,name)
end
-
local result
-
-local xmlstringhandler = newhandlers {
- name = "string",
- initialize = function()
- result = { }
- return result
- end,
- finalize = function()
- return concat(result)
- end,
- handle = function(...)
- result[#result+1] = concat { ... }
- end,
+local xmlstringhandler=newhandlers {
+ name="string",
+ initialize=function()
+ result={}
+ return result
+ end,
+ finalize=function()
+ return concat(result)
+ end,
+ handle=function(...)
+ result[#result+1]=concat {... }
+ end,
}
-
-local function xmltostring(root) -- 25% overhead due to collecting
- if not root then
- return ""
- elseif type(root) == 'string' then
- return root
- else -- if next(root) then -- next is faster than type (and >0 test)
- return serialize(root,xmlstringhandler) or ""
- end
+local function xmltostring(root)
+ if not root then
+ return ""
+ elseif type(root)=="string" then
+ return root
+ else
+ return serialize(root,xmlstringhandler) or ""
+ end
end
-
-local function __tostring(root) -- inline
- return (root and xmltostring(root)) or ""
+local function __tostring(root)
+ return (root and xmltostring(root)) or ""
end
-
-initialize_mt = function(root) -- redefinition
- mt = { __tostring = __tostring, __index = root }
+initialize_mt=function(root)
+ mt={ __tostring=__tostring,__index=root }
end
-
-xml.defaulthandlers = handlers
-xml.newhandlers = newhandlers
-xml.serialize = serialize
-xml.tostring = xmltostring
-
---[[ldx--
-<p>The next function operated on the content only and needs a handle function
-that accepts a string.</p>
---ldx]]--
-
+xml.defaulthandlers=handlers
+xml.newhandlers=newhandlers
+xml.serialize=serialize
+xml.tostring=xmltostring
local function xmlstring(e,handle)
- if not handle or (e.special and e.tg ~= "@rt@") then
- -- nothing
- elseif e.tg then
- local edt = e.dt
- if edt then
- for i=1,#edt do
- xmlstring(edt[i],handle)
- end
- end
- else
- handle(e)
+ if not handle or (e.special and e.tg~="@rt@") then
+ elseif e.tg then
+ local edt=e.dt
+ if edt then
+ for i=1,#edt do
+ xmlstring(edt[i],handle)
+ end
end
+ else
+ handle(e)
+ end
end
-
-xml.string = xmlstring
-
---[[ldx--
-<p>A few helpers:</p>
---ldx]]--
-
-
+xml.string=xmlstring
function xml.settings(e)
- while e do
- local s = e.settings
- if s then
- return s
- else
- e = e.__p__
- end
+ while e do
+ local s=e.settings
+ if s then
+ return s
+ else
+ e=e.__p__
end
- return nil
+ end
+ return nil
end
-
function xml.root(e)
- local r = e
- while e do
- e = e.__p__
- if e then
- r = e
- end
+ local r=e
+ while e do
+ e=e.__p__
+ if e then
+ r=e
end
- return r
+ end
+ return r
end
-
function xml.parent(root)
- return root.__p__
+ return root.__p__
end
-
function xml.body(root)
- return (root.ri and root.dt[root.ri]) or root -- not ok yet
+ return root.ri and root.dt[root.ri] or root
end
-
function xml.name(root)
- if not root then
- return ""
- elseif root.ns == "" then
- return root.tg
- else
- return root.ns .. ":" .. root.tg
- end
+ if not root then
+ return ""
+ end
+ local ns=root.ns
+ local tg=root.tg
+ if ns=="" then
+ return tg
+ else
+ return ns..":"..tg
+ end
end
-
---[[ldx--
-<p>The next helper erases an element but keeps the table as it is,
-and since empty strings are not serialized (effectively) it does
-not harm. Copying the table would take more time. Usage:</p>
---ldx]]--
-
function xml.erase(dt,k)
- if dt then
- if k then
- dt[k] = ""
- else for k=1,#dt do
- dt[1] = { "" }
- end end
- end
+ if dt then
+ if k then
+ dt[k]=""
+ else for k=1,#dt do
+ dt[1]={ "" }
+ end end
+ end
end
-
---[[ldx--
-<p>The next helper assigns a tree (or string). Usage:</p>
-
-<typing>
-dt[k] = xml.assign(root) or xml.assign(dt,k,root)
-</typing>
---ldx]]--
-
function xml.assign(dt,k,root)
- if dt and k then
- dt[k] = (type(root) == "table" and xml.body(root)) or root
- return dt[k]
- else
- return xml.body(root)
- end
+ if dt and k then
+ dt[k]=type(root)=="table" and xml.body(root) or root
+ return dt[k]
+ else
+ return xml.body(root)
+ end
+end
+function xml.tocdata(e,wrapper)
+ local whatever=type(e)=="table" and xmltostring(e.dt) or e or ""
+ if wrapper then
+ whatever=formatters["<%s>%s</%s>"](wrapper,whatever,wrapper)
+ end
+ local t={ special=true,ns="",tg="@cd@",at={},rn="",dt={ whatever },__p__=e }
+ setmetatable(t,getmetatable(e))
+ e.dt={ t }
end
-
--- the following helpers may move
-
---[[ldx--
-<p>The next helper assigns a tree (or string). Usage:</p>
-<typing>
-xml.tocdata(e)
-xml.tocdata(e,"error")
-</typing>
---ldx]]--
-
-function xml.tocdata(e,wrapper) -- a few more in the aux module
- local whatever = type(e) == "table" and xmltostring(e.dt) or e or ""
- if wrapper then
- whatever = format("<%s>%s</%s>",wrapper,whatever,wrapper)
- end
- local t = { special = true, ns = "", tg = "@cd@", at = {}, rn = "", dt = { whatever }, __p__ = e }
- setmetatable(t,getmetatable(e))
- e.dt = { t }
-end
-
function xml.makestandalone(root)
- if root.ri then
- local dt = root.dt
- for k=1,#dt do
- local v = dt[k]
- if type(v) == "table" and v.special and v.tg == "@pi@" then
- local txt = v.dt[1]
- if find(txt,"xml.*version=") then
- v.dt[1] = txt .. " standalone='yes'"
- break
- end
- end
+ if root.ri then
+ local dt=root.dt
+ for k=1,#dt do
+ local v=dt[k]
+ if type(v)=="table" and v.special and v.tg=="@pi@" then
+ local txt=v.dt[1]
+ if find(txt,"xml.*version=") then
+ v.dt[1]=txt.." standalone='yes'"
+ break
end
+ end
end
- return root
+ end
+ return root
end
-
function xml.kind(e)
- local dt = e and e.dt
- if dt then
- local n = #dt
- if n == 1 then
- local d = dt[1]
- if d.special then
- local tg = d.tg
- if tg == "@cd@" then
- return "cdata"
- elseif tg == "@cm" then
- return "comment"
- elseif tg == "@pi@" then
- return "instruction"
- elseif tg == "@dt@" then
- return "declaration"
- end
- elseif type(d) == "string" then
- return "text"
- end
- return "element"
- elseif n > 0 then
- return "mixed"
- end
- end
- return "empty"
+ local dt=e and e.dt
+ if dt then
+ local n=#dt
+ if n==1 then
+ local d=dt[1]
+ if d.special then
+ local tg=d.tg
+ if tg=="@cd@" then
+ return "cdata"
+ elseif tg=="@cm" then
+ return "comment"
+ elseif tg=="@pi@" then
+ return "instruction"
+ elseif tg=="@dt@" then
+ return "declaration"
+ end
+ elseif type(d)=="string" then
+ return "text"
+ end
+ return "element"
+ elseif n>0 then
+ return "mixed"
+ end
+ end
+ return "empty"
end
@@ -8225,1294 +8998,1060 @@ end -- of closure
do -- create closure to overcome 200 locals limit
-if not modules then modules = { } end modules ['lxml-pth'] = {
- version = 1.001,
- comment = "this module is the basis for the lxml-* ones",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
--- e.ni is only valid after a filter run
--- todo: B/C/[get first match]
-
-local concat, remove, insert = table.concat, table.remove, table.insert
-local type, next, tonumber, tostring, setmetatable, loadstring = type, next, tonumber, tostring, setmetatable, loadstring
-local format, upper, lower, gmatch, gsub, find, rep = string.format, string.upper, string.lower, string.gmatch, string.gsub, string.find, string.rep
-local lpegmatch, lpegpatterns = lpeg.match, lpeg.patterns
-
-local setmetatableindex = table.setmetatableindex
-
--- beware, this is not xpath ... e.g. position is different (currently) and
--- we have reverse-sibling as reversed preceding sibling
-
---[[ldx--
-<p>This module can be used stand alone but also inside <l n='mkiv'/> in
-which case it hooks into the tracker code. Therefore we provide a few
-functions that set the tracers. Here we overload a previously defined
-function.</p>
-<p>If I can get in the mood I will make a variant that is XSLT compliant
-but I wonder if it makes sense.</P>
---ldx]]--
-
---[[ldx--
-<p>Expecially the lpath code is experimental, we will support some of xpath, but
-only things that make sense for us; as compensation it is possible to hook in your
-own functions. Apart from preprocessing content for <l n='context'/> we also need
-this module for process management, like handling <l n='ctx'/> and <l n='rlx'/>
-files.</p>
-
-<typing>
-a/b/c /*/c
-a/b/c/first() a/b/c/last() a/b/c/index(n) a/b/c/index(-n)
-a/b/c/text() a/b/c/text(1) a/b/c/text(-1) a/b/c/text(n)
-</typing>
---ldx]]--
-
-local trace_lpath = false if trackers then trackers.register("xml.path", function(v) trace_lpath = v end) end
-local trace_lparse = false if trackers then trackers.register("xml.parse", function(v) trace_lparse = v end) end
-local trace_lprofile = false if trackers then trackers.register("xml.profile", function(v) trace_lpath = v trace_lparse = v trace_lprofile = v end) end
-
-local report_lpath = logs.reporter("xml","lpath")
-
---[[ldx--
-<p>We've now arrived at an interesting part: accessing the tree using a subset
-of <l n='xpath'/> and since we're not compatible we call it <l n='lpath'/>. We
-will explain more about its usage in other documents.</p>
---ldx]]--
+package.loaded["lxml-lpt"] = package.loaded["lxml-lpt"] or true
-local xml = xml
+-- original size: 48956, stripped down to: 30516
-local lpathcalls = 0 function xml.lpathcalls () return lpathcalls end
-local lpathcached = 0 function xml.lpathcached() return lpathcached end
-
-xml.functions = xml.functions or { } -- internal
-local functions = xml.functions
-
-xml.expressions = xml.expressions or { } -- in expressions
-local expressions = xml.expressions
-
-xml.finalizers = xml.finalizers or { } -- fast do-with ... (with return value other than collection)
-local finalizers = xml.finalizers
-
-xml.specialhandler = xml.specialhandler or { }
-local specialhandler = xml.specialhandler
-
-lpegpatterns.xml = lpegpatterns.xml or { }
-local xmlpatterns = lpegpatterns.xml
-
-finalizers.xml = finalizers.xml or { }
-finalizers.tex = finalizers.tex or { }
-
-local function fallback (t, name)
- local fn = finalizers[name]
- if fn then
- t[name] = fn
- else
- report_lpath("unknown sub finalizer '%s'",tostring(name))
- fn = function() end
- end
- return fn
-end
-
-setmetatableindex(finalizers.xml, fallback)
-setmetatableindex(finalizers.tex, fallback)
-
-xml.defaultprotocol = "xml"
-
--- as xsl does not follow xpath completely here we will also
--- be more liberal especially with regards to the use of | and
--- the rootpath:
---
--- test : all 'test' under current
--- /test : 'test' relative to current
--- a|b|c : set of names
--- (a|b|c) : idem
--- ! : not
---
--- after all, we're not doing transformations but filtering. in
--- addition we provide filter functions (last bit)
---
--- todo: optimizer
---
--- .. : parent
--- * : all kids
--- / : anchor here
--- // : /**/
--- ** : all in between
---
--- so far we had (more practical as we don't transform)
---
--- {/test} : kids 'test' under current node
--- {test} : any kid with tag 'test'
--- {//test} : same as above
-
--- evaluator (needs to be redone, for the moment copied)
-
--- todo: apply_axis(list,notable) and collection vs single
-
-local apply_axis = { }
-
-apply_axis['root'] = function(list)
- local collected = { }
- for l=1,#list do
- local ll = list[l]
- local rt = ll
- while ll do
- ll = ll.__p__
- if ll then
- rt = ll
- end
- end
- collected[l] = rt
- end
- return collected
-end
-
-apply_axis['self'] = function(list)
- return list
-end
-
-apply_axis['child'] = function(list)
- local collected, c = { }, 0
- for l=1,#list do
- local ll = list[l]
- local dt = ll.dt
- if dt then -- weird that this is needed
- local en = 0
- for k=1,#dt do
- local dk = dt[k]
- if dk.tg then
- c = c + 1
- collected[c] = dk
- dk.ni = k -- refresh
- en = en + 1
- dk.ei = en
- end
- end
- ll.en = en
- end
- end
- return collected
+if not modules then modules={} end modules ['lxml-lpt']={
+ version=1.001,
+ comment="this module is the basis for the lxml-* ones",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+local concat,remove,insert=table.concat,table.remove,table.insert
+local type,next,tonumber,tostring,setmetatable,load,select=type,next,tonumber,tostring,setmetatable,load,select
+local format,upper,lower,gmatch,gsub,find,rep=string.format,string.upper,string.lower,string.gmatch,string.gsub,string.find,string.rep
+local lpegmatch,lpegpatterns=lpeg.match,lpeg.patterns
+local setmetatableindex=table.setmetatableindex
+local formatters=string.formatters
+local trace_lpath=false if trackers then trackers.register("xml.path",function(v) trace_lpath=v end) end
+local trace_lparse=false if trackers then trackers.register("xml.parse",function(v) trace_lparse=v end) end
+local trace_lprofile=false if trackers then trackers.register("xml.profile",function(v) trace_lpath=v trace_lparse=v trace_lprofile=v end) end
+local report_lpath=logs.reporter("xml","lpath")
+local xml=xml
+local lpathcalls=0 function xml.lpathcalls () return lpathcalls end
+local lpathcached=0 function xml.lpathcached() return lpathcached end
+xml.functions=xml.functions or {}
+local functions=xml.functions
+xml.expressions=xml.expressions or {}
+local expressions=xml.expressions
+xml.finalizers=xml.finalizers or {}
+local finalizers=xml.finalizers
+xml.specialhandler=xml.specialhandler or {}
+local specialhandler=xml.specialhandler
+lpegpatterns.xml=lpegpatterns.xml or {}
+local xmlpatterns=lpegpatterns.xml
+finalizers.xml=finalizers.xml or {}
+finalizers.tex=finalizers.tex or {}
+local function fallback (t,name)
+ local fn=finalizers[name]
+ if fn then
+ t[name]=fn
+ else
+ report_lpath("unknown sub finalizer %a",name)
+ fn=function() end
+ end
+ return fn
+end
+setmetatableindex(finalizers.xml,fallback)
+setmetatableindex(finalizers.tex,fallback)
+xml.defaultprotocol="xml"
+local apply_axis={}
+apply_axis['root']=function(list)
+ local collected={}
+ for l=1,#list do
+ local ll=list[l]
+ local rt=ll
+ while ll do
+ ll=ll.__p__
+ if ll then
+ rt=ll
+ end
+ end
+ collected[l]=rt
+ end
+ return collected
+end
+apply_axis['self']=function(list)
+ return list
+end
+apply_axis['child']=function(list)
+ local collected,c={},0
+ for l=1,#list do
+ local ll=list[l]
+ local dt=ll.dt
+ if dt then
+ local en=0
+ for k=1,#dt do
+ local dk=dt[k]
+ if dk.tg then
+ c=c+1
+ collected[c]=dk
+ dk.ni=k
+ en=en+1
+ dk.ei=en
+ end
+ end
+ ll.en=en
+ end
+ end
+ return collected
end
-
local function collect(list,collected,c)
- local dt = list.dt
- if dt then
- local en = 0
- for k=1,#dt do
- local dk = dt[k]
- if dk.tg then
- c = c + 1
- collected[c] = dk
- dk.ni = k -- refresh
- en = en + 1
- dk.ei = en
- c = collect(dk,collected,c)
- end
- end
- list.en = en
- end
- return c
-end
-
-apply_axis['descendant'] = function(list)
- local collected, c = { }, 0
- for l=1,#list do
- c = collect(list[l],collected,c)
- end
- return collected
+ local dt=list.dt
+ if dt then
+ local en=0
+ for k=1,#dt do
+ local dk=dt[k]
+ if dk.tg then
+ c=c+1
+ collected[c]=dk
+ dk.ni=k
+ en=en+1
+ dk.ei=en
+ c=collect(dk,collected,c)
+ end
+ end
+ list.en=en
+ end
+ return c
+end
+apply_axis['descendant']=function(list)
+ local collected,c={},0
+ for l=1,#list do
+ c=collect(list[l],collected,c)
+ end
+ return collected
end
-
local function collect(list,collected,c)
- local dt = list.dt
- if dt then
- local en = 0
- for k=1,#dt do
- local dk = dt[k]
- if dk.tg then
- c = c + 1
- collected[c] = dk
- dk.ni = k -- refresh
- en = en + 1
- dk.ei = en
- c = collect(dk,collected,c)
- end
- end
- list.en = en
- end
- return c
-end
-apply_axis['descendant-or-self'] = function(list)
- local collected, c = { }, 0
- for l=1,#list do
- local ll = list[l]
- if ll.special ~= true then -- catch double root
- c = c + 1
- collected[c] = ll
- end
- c = collect(ll,collected,c)
- end
- return collected
-end
-
-apply_axis['ancestor'] = function(list)
- local collected, c = { }, 0
- for l=1,#list do
- local ll = list[l]
- while ll do
- ll = ll.__p__
- if ll then
- c = c + 1
- collected[c] = ll
- end
- end
- end
- return collected
-end
-
-apply_axis['ancestor-or-self'] = function(list)
- local collected, c = { }, 0
- for l=1,#list do
- local ll = list[l]
- c = c + 1
- collected[c] = ll
- while ll do
- ll = ll.__p__
- if ll then
- c = c + 1
- collected[c] = ll
- end
- end
- end
- return collected
-end
-
-apply_axis['parent'] = function(list)
- local collected, c = { }, 0
- for l=1,#list do
- local pl = list[l].__p__
- if pl then
- c = c + 1
- collected[c] = pl
- end
- end
- return collected
-end
-
-apply_axis['attribute'] = function(list)
- return { }
-end
-
-apply_axis['namespace'] = function(list)
- return { }
-end
-
-apply_axis['following'] = function(list) -- incomplete
- return { }
-end
-
-apply_axis['preceding'] = function(list) -- incomplete
- return { }
-end
-
-apply_axis['following-sibling'] = function(list)
- local collected, c = { }, 0
- for l=1,#list do
- local ll = list[l]
- local p = ll.__p__
- local d = p.dt
- for i=ll.ni+1,#d do
- local di = d[i]
- if type(di) == "table" then
- c = c + 1
- collected[c] = di
- end
- end
- end
- return collected
-end
-
-apply_axis['preceding-sibling'] = function(list)
- local collected, c = { }, 0
- for l=1,#list do
- local ll = list[l]
- local p = ll.__p__
- local d = p.dt
- for i=1,ll.ni-1 do
- local di = d[i]
- if type(di) == "table" then
- c = c + 1
- collected[c] = di
- end
- end
- end
- return collected
-end
-
-apply_axis['reverse-sibling'] = function(list) -- reverse preceding
- local collected, c = { }, 0
- for l=1,#list do
- local ll = list[l]
- local p = ll.__p__
- local d = p.dt
- for i=ll.ni-1,1,-1 do
- local di = d[i]
- if type(di) == "table" then
- c = c + 1
- collected[c] = di
- end
- end
- end
- return collected
-end
-
-apply_axis['auto-descendant-or-self'] = apply_axis['descendant-or-self']
-apply_axis['auto-descendant'] = apply_axis['descendant']
-apply_axis['auto-child'] = apply_axis['child']
-apply_axis['auto-self'] = apply_axis['self']
-apply_axis['initial-child'] = apply_axis['child']
-
+ local dt=list.dt
+ if dt then
+ local en=0
+ for k=1,#dt do
+ local dk=dt[k]
+ if dk.tg then
+ c=c+1
+ collected[c]=dk
+ dk.ni=k
+ en=en+1
+ dk.ei=en
+ c=collect(dk,collected,c)
+ end
+ end
+ list.en=en
+ end
+ return c
+end
+apply_axis['descendant-or-self']=function(list)
+ local collected,c={},0
+ for l=1,#list do
+ local ll=list[l]
+ if ll.special~=true then
+ c=c+1
+ collected[c]=ll
+ end
+ c=collect(ll,collected,c)
+ end
+ return collected
+end
+apply_axis['ancestor']=function(list)
+ local collected,c={},0
+ for l=1,#list do
+ local ll=list[l]
+ while ll do
+ ll=ll.__p__
+ if ll then
+ c=c+1
+ collected[c]=ll
+ end
+ end
+ end
+ return collected
+end
+apply_axis['ancestor-or-self']=function(list)
+ local collected,c={},0
+ for l=1,#list do
+ local ll=list[l]
+ c=c+1
+ collected[c]=ll
+ while ll do
+ ll=ll.__p__
+ if ll then
+ c=c+1
+ collected[c]=ll
+ end
+ end
+ end
+ return collected
+end
+apply_axis['parent']=function(list)
+ local collected,c={},0
+ for l=1,#list do
+ local pl=list[l].__p__
+ if pl then
+ c=c+1
+ collected[c]=pl
+ end
+ end
+ return collected
+end
+apply_axis['attribute']=function(list)
+ return {}
+end
+apply_axis['namespace']=function(list)
+ return {}
+end
+apply_axis['following']=function(list)
+ return {}
+end
+apply_axis['preceding']=function(list)
+ return {}
+end
+apply_axis['following-sibling']=function(list)
+ local collected,c={},0
+ for l=1,#list do
+ local ll=list[l]
+ local p=ll.__p__
+ local d=p.dt
+ for i=ll.ni+1,#d do
+ local di=d[i]
+ if type(di)=="table" then
+ c=c+1
+ collected[c]=di
+ end
+ end
+ end
+ return collected
+end
+apply_axis['preceding-sibling']=function(list)
+ local collected,c={},0
+ for l=1,#list do
+ local ll=list[l]
+ local p=ll.__p__
+ local d=p.dt
+ for i=1,ll.ni-1 do
+ local di=d[i]
+ if type(di)=="table" then
+ c=c+1
+ collected[c]=di
+ end
+ end
+ end
+ return collected
+end
+apply_axis['reverse-sibling']=function(list)
+ local collected,c={},0
+ for l=1,#list do
+ local ll=list[l]
+ local p=ll.__p__
+ local d=p.dt
+ for i=ll.ni-1,1,-1 do
+ local di=d[i]
+ if type(di)=="table" then
+ c=c+1
+ collected[c]=di
+ end
+ end
+ end
+ return collected
+end
+apply_axis['auto-descendant-or-self']=apply_axis['descendant-or-self']
+apply_axis['auto-descendant']=apply_axis['descendant']
+apply_axis['auto-child']=apply_axis['child']
+apply_axis['auto-self']=apply_axis['self']
+apply_axis['initial-child']=apply_axis['child']
local function apply_nodes(list,directive,nodes)
- -- todo: nodes[1] etc ... negated node name in set ... when needed
- -- ... currently ignored
- local maxn = #nodes
- if maxn == 3 then --optimized loop
- local nns, ntg = nodes[2], nodes[3]
- if not nns and not ntg then -- wildcard
- if directive then
- return list
- else
- return { }
- end
- else
- local collected, c, m, p = { }, 0, 0, nil
- if not nns then -- only check tag
- for l=1,#list do
- local ll = list[l]
- local ltg = ll.tg
- if ltg then
- if directive then
- if ntg == ltg then
- local llp = ll.__p__ ; if llp ~= p then p, m = llp, 1 else m = m + 1 end
- c = c + 1
- collected[c], ll.mi = ll, m
- end
- elseif ntg ~= ltg then
- local llp = ll.__p__ ; if llp ~= p then p, m = llp, 1 else m = m + 1 end
- c = c + 1
- collected[c], ll.mi = ll, m
- end
- end
- end
- elseif not ntg then -- only check namespace
- for l=1,#list do
- local ll = list[l]
- local lns = ll.rn or ll.ns
- if lns then
- if directive then
- if lns == nns then
- local llp = ll.__p__ ; if llp ~= p then p, m = llp, 1 else m = m + 1 end
- c = c + 1
- collected[c], ll.mi = ll, m
- end
- elseif lns ~= nns then
- local llp = ll.__p__ ; if llp ~= p then p, m = llp, 1 else m = m + 1 end
- c = c + 1
- collected[c], ll.mi = ll, m
- end
- end
- end
- else -- check both
- for l=1,#list do
- local ll = list[l]
- local ltg = ll.tg
- if ltg then
- local lns = ll.rn or ll.ns
- local ok = ltg == ntg and lns == nns
- if directive then
- if ok then
- local llp = ll.__p__ ; if llp ~= p then p, m = llp, 1 else m = m + 1 end
- c = c + 1
- collected[c], ll.mi = ll, m
- end
- elseif not ok then
- local llp = ll.__p__ ; if llp ~= p then p, m = llp, 1 else m = m + 1 end
- c = c + 1
- collected[c], ll.mi = ll, m
- end
- end
- end
- end
- return collected
- end
+ local maxn=#nodes
+ if maxn==3 then
+ local nns,ntg=nodes[2],nodes[3]
+ if not nns and not ntg then
+ if directive then
+ return list
+ else
+ return {}
+ end
else
- local collected, c, m, p = { }, 0, 0, nil
+ local collected,c,m,p={},0,0,nil
+ if not nns then
for l=1,#list do
- local ll = list[l]
- local ltg = ll.tg
- if ltg then
- local lns = ll.rn or ll.ns
- local ok = false
- for n=1,maxn,3 do
- local nns, ntg = nodes[n+1], nodes[n+2]
- ok = (not ntg or ltg == ntg) and (not nns or lns == nns)
- if ok then
- break
- end
- end
- if directive then
- if ok then
- local llp = ll.__p__ ; if llp ~= p then p, m = llp, 1 else m = m + 1 end
- c = c + 1
- collected[c], ll.mi = ll, m
- end
- elseif not ok then
- local llp = ll.__p__ ; if llp ~= p then p, m = llp, 1 else m = m + 1 end
- c = c + 1
- collected[c], ll.mi = ll, m
- end
- end
- end
- return collected
- end
-end
-
-local quit_expression = false
-
-local function apply_expression(list,expression,order)
- local collected, c = { }, 0
- quit_expression = false
+ local ll=list[l]
+ local ltg=ll.tg
+ if ltg then
+ if directive then
+ if ntg==ltg then
+ local llp=ll.__p__;if llp~=p then p,m=llp,1 else m=m+1 end
+ c=c+1
+ collected[c],ll.mi=ll,m
+ end
+ elseif ntg~=ltg then
+ local llp=ll.__p__;if llp~=p then p,m=llp,1 else m=m+1 end
+ c=c+1
+ collected[c],ll.mi=ll,m
+ end
+ end
+ end
+ elseif not ntg then
+ for l=1,#list do
+ local ll=list[l]
+ local lns=ll.rn or ll.ns
+ if lns then
+ if directive then
+ if lns==nns then
+ local llp=ll.__p__;if llp~=p then p,m=llp,1 else m=m+1 end
+ c=c+1
+ collected[c],ll.mi=ll,m
+ end
+ elseif lns~=nns then
+ local llp=ll.__p__;if llp~=p then p,m=llp,1 else m=m+1 end
+ c=c+1
+ collected[c],ll.mi=ll,m
+ end
+ end
+ end
+ else
+ for l=1,#list do
+ local ll=list[l]
+ local ltg=ll.tg
+ if ltg then
+ local lns=ll.rn or ll.ns
+ local ok=ltg==ntg and lns==nns
+ if directive then
+ if ok then
+ local llp=ll.__p__;if llp~=p then p,m=llp,1 else m=m+1 end
+ c=c+1
+ collected[c],ll.mi=ll,m
+ end
+ elseif not ok then
+ local llp=ll.__p__;if llp~=p then p,m=llp,1 else m=m+1 end
+ c=c+1
+ collected[c],ll.mi=ll,m
+ end
+ end
+ end
+ end
+ return collected
+ end
+ else
+ local collected,c,m,p={},0,0,nil
for l=1,#list do
- local ll = list[l]
- if expression(list,ll,l,order) then -- nasty, order alleen valid als n=1
- c = c + 1
- collected[c] = ll
- end
- if quit_expression then
+ local ll=list[l]
+ local ltg=ll.tg
+ if ltg then
+ local lns=ll.rn or ll.ns
+ local ok=false
+ for n=1,maxn,3 do
+ local nns,ntg=nodes[n+1],nodes[n+2]
+ ok=(not ntg or ltg==ntg) and (not nns or lns==nns)
+ if ok then
break
+ end
+ end
+ if directive then
+ if ok then
+ local llp=ll.__p__;if llp~=p then p,m=llp,1 else m=m+1 end
+ c=c+1
+ collected[c],ll.mi=ll,m
+ end
+ elseif not ok then
+ local llp=ll.__p__;if llp~=p then p,m=llp,1 else m=m+1 end
+ c=c+1
+ collected[c],ll.mi=ll,m
end
+ end
end
return collected
+ end
end
-
-local P, V, C, Cs, Cc, Ct, R, S, Cg, Cb = lpeg.P, lpeg.V, lpeg.C, lpeg.Cs, lpeg.Cc, lpeg.Ct, lpeg.R, lpeg.S, lpeg.Cg, lpeg.Cb
-
-local spaces = S(" \n\r\t\f")^0
-local lp_space = S(" \n\r\t\f")
-local lp_any = P(1)
-local lp_noequal = P("!=") / "~=" + P("<=") + P(">=") + P("==")
-local lp_doequal = P("=") / "=="
-local lp_or = P("|") / " or "
-local lp_and = P("&") / " and "
-
-local lp_builtin = P (
- P("text") / "(ll.dt[1] or '')" + -- fragile
- P("content") / "ll.dt" +
- -- P("name") / "(ll.ns~='' and ll.ns..':'..ll.tg)" +
- P("name") / "((ll.ns~='' and ll.ns..':'..ll.tg) or ll.tg)" +
- P("tag") / "ll.tg" +
- P("position") / "l" + -- is element in finalizer
- P("firstindex") / "1" +
- P("lastindex") / "(#ll.__p__.dt or 1)" +
- P("firstelement") / "1" +
- P("lastelement") / "(ll.__p__.en or 1)" +
- P("first") / "1" +
- P("last") / "#list" +
- P("rootposition") / "order" +
- P("order") / "order" +
- P("element") / "(ll.ei or 1)" +
- P("index") / "(ll.ni or 1)" +
- P("match") / "(ll.mi or 1)" +
- -- P("namespace") / "ll.ns" +
- P("ns") / "ll.ns"
- ) * ((spaces * P("(") * spaces * P(")"))/"")
-
--- for the moment we keep namespaces with attributes
-
-local lp_attribute = (P("@") + P("attribute::")) / "" * Cc("(ll.at and ll.at['") * ((R("az","AZ") + S("-_:"))^1) * Cc("'])")
-local lp_fastpos_p = ((P("+")^0 * R("09")^1 * P(-1)) / function(s) return "l==" .. s end)
-local lp_fastpos_n = ((P("-") * R("09")^1 * P(-1)) / function(s) return "(" .. s .. "<0 and (#list+".. s .. "==l))" end)
-local lp_fastpos = lp_fastpos_n + lp_fastpos_p
-local lp_reserved = C("and") + C("or") + C("not") + C("div") + C("mod") + C("true") + C("false")
-
-local lp_lua_function = C(R("az","AZ","__")^1 * (P(".") * R("az","AZ","__")^1)^1) * ("(") / function(t) -- todo: better . handling
- return t .. "("
-end
-
-local lp_function = C(R("az","AZ","__")^1) * P("(") / function(t) -- todo: better . handling
- if expressions[t] then
- return "expr." .. t .. "("
- else
- return "expr.error("
- end
-end
-
-local lparent = P("(")
-local rparent = P(")")
-local noparent = 1 - (lparent+rparent)
-local nested = P{lparent * (noparent + V(1))^0 * rparent}
-local value = P(lparent * C((noparent + nested)^0) * rparent) -- P{"("*C(((1-S("()"))+V(1))^0)*")"}
-
-local lp_child = Cc("expr.child(ll,'") * R("az","AZ","--","__")^1 * Cc("')")
-local lp_number = S("+-") * R("09")^1
-local lp_string = Cc("'") * R("az","AZ","--","__")^1 * Cc("'")
-local lp_content = (P("'") * (1-P("'"))^0 * P("'") + P('"') * (1-P('"'))^0 * P('"'))
-
+local quit_expression=false
+local function apply_expression(list,expression,order)
+ local collected,c={},0
+ quit_expression=false
+ for l=1,#list do
+ local ll=list[l]
+ if expression(list,ll,l,order) then
+ c=c+1
+ collected[c]=ll
+ end
+ if quit_expression then
+ break
+ end
+ end
+ return collected
+end
+local P,V,C,Cs,Cc,Ct,R,S,Cg,Cb=lpeg.P,lpeg.V,lpeg.C,lpeg.Cs,lpeg.Cc,lpeg.Ct,lpeg.R,lpeg.S,lpeg.Cg,lpeg.Cb
+local spaces=S(" \n\r\t\f")^0
+local lp_space=S(" \n\r\t\f")
+local lp_any=P(1)
+local lp_noequal=P("!=")/"~="+P("<=")+P(">=")+P("==")
+local lp_doequal=P("=")/"=="
+local lp_or=P("|")/" or "
+local lp_and=P("&")/" and "
+local lp_builtin=P (
+ P("text")/"(ll.dt[1] or '')"+
+ P("content")/"ll.dt"+
+ P("name")/"((ll.ns~='' and ll.ns..':'..ll.tg) or ll.tg)"+P("tag")/"ll.tg"+P("position")/"l"+
+ P("firstindex")/"1"+P("lastindex")/"(#ll.__p__.dt or 1)"+P("firstelement")/"1"+P("lastelement")/"(ll.__p__.en or 1)"+P("first")/"1"+P("last")/"#list"+P("rootposition")/"order"+P("order")/"order"+P("element")/"(ll.ei or 1)"+P("index")/"(ll.ni or 1)"+P("match")/"(ll.mi or 1)"+
+ P("ns")/"ll.ns"
+ )*((spaces*P("(")*spaces*P(")"))/"")
+local lp_attribute=(P("@")+P("attribute::"))/""*Cc("(ll.at and ll.at['")*((R("az","AZ")+S("-_:"))^1)*Cc("'])")
+lp_fastpos_p=P("+")^0*R("09")^1*P(-1)/"l==%0"
+lp_fastpos_n=P("-")*R("09")^1*P(-1)/"(%0<0 and (#list+%0==l))"
+local lp_fastpos=lp_fastpos_n+lp_fastpos_p
+local lp_reserved=C("and")+C("or")+C("not")+C("div")+C("mod")+C("true")+C("false")
+local lp_lua_function=Cs((R("az","AZ","__")^1*(P(".")*R("az","AZ","__")^1)^1)*("("))/"%0"
+local lp_function=C(R("az","AZ","__")^1)*P("(")/function(t)
+ if expressions[t] then
+ return "expr."..t.."("
+ else
+ return "expr.error("
+ end
+end
+local lparent=P("(")
+local rparent=P(")")
+local noparent=1-(lparent+rparent)
+local nested=P{lparent*(noparent+V(1))^0*rparent}
+local value=P(lparent*C((noparent+nested)^0)*rparent)
+local lp_child=Cc("expr.child(ll,'")*R("az","AZ","--","__")^1*Cc("')")
+local lp_number=S("+-")*R("09")^1
+local lp_string=Cc("'")*R("az","AZ","--","__")^1*Cc("'")
+local lp_content=(P("'")*(1-P("'"))^0*P("'")+P('"')*(1-P('"'))^0*P('"'))
local cleaner
-
-local lp_special = (C(P("name")+P("text")+P("tag")+P("count")+P("child"))) * value / function(t,s)
- if expressions[t] then
- s = s and s ~= "" and lpegmatch(cleaner,s)
- if s and s ~= "" then
- return "expr." .. t .. "(ll," .. s ..")"
- else
- return "expr." .. t .. "(ll)"
- end
+local lp_special=(C(P("name")+P("text")+P("tag")+P("count")+P("child")))*value/function(t,s)
+ if expressions[t] then
+ s=s and s~="" and lpegmatch(cleaner,s)
+ if s and s~="" then
+ return "expr."..t.."(ll,"..s..")"
else
- return "expr.error(" .. t .. ")"
+ return "expr."..t.."(ll)"
end
+ else
+ return "expr.error("..t..")"
+ end
end
-
-local content =
- lp_builtin +
- lp_attribute +
- lp_special +
- lp_noequal + lp_doequal +
- lp_or + lp_and +
- lp_reserved +
- lp_lua_function + lp_function +
- lp_content + -- too fragile
- lp_child +
- lp_any
-
-local converter = Cs (
- lp_fastpos + (P { lparent * (V(1))^0 * rparent + content } )^0
+local content=lp_builtin+lp_attribute+lp_special+lp_noequal+lp_doequal+lp_or+lp_and+lp_reserved+lp_lua_function+lp_function+lp_content+
+ lp_child+lp_any
+local converter=Cs (
+ lp_fastpos+(P { lparent*(V(1))^0*rparent+content } )^0
)
-
-cleaner = Cs ( (
- lp_reserved +
- lp_number +
- lp_string +
-1 )^1 )
-
-
-
-local template_e = [[
+cleaner=Cs ((
+ lp_reserved+lp_number+lp_string+1 )^1 )
+local template_e=[[
local expr = xml.expressions
return function(list,ll,l,order)
return %s
end
]]
-
-local template_f_y = [[
+local template_f_y=[[
local finalizer = xml.finalizers['%s']['%s']
return function(collection)
return finalizer(collection,%s)
end
]]
-
-local template_f_n = [[
+local template_f_n=[[
return xml.finalizers['%s']['%s']
]]
-
---
-
-local register_self = { kind = "axis", axis = "self" } -- , apply = apply_axis["self"] }
-local register_parent = { kind = "axis", axis = "parent" } -- , apply = apply_axis["parent"] }
-local register_descendant = { kind = "axis", axis = "descendant" } -- , apply = apply_axis["descendant"] }
-local register_child = { kind = "axis", axis = "child" } -- , apply = apply_axis["child"] }
-local register_descendant_or_self = { kind = "axis", axis = "descendant-or-self" } -- , apply = apply_axis["descendant-or-self"] }
-local register_root = { kind = "axis", axis = "root" } -- , apply = apply_axis["root"] }
-local register_ancestor = { kind = "axis", axis = "ancestor" } -- , apply = apply_axis["ancestor"] }
-local register_ancestor_or_self = { kind = "axis", axis = "ancestor-or-self" } -- , apply = apply_axis["ancestor-or-self"] }
-local register_attribute = { kind = "axis", axis = "attribute" } -- , apply = apply_axis["attribute"] }
-local register_namespace = { kind = "axis", axis = "namespace" } -- , apply = apply_axis["namespace"] }
-local register_following = { kind = "axis", axis = "following" } -- , apply = apply_axis["following"] }
-local register_following_sibling = { kind = "axis", axis = "following-sibling" } -- , apply = apply_axis["following-sibling"] }
-local register_preceding = { kind = "axis", axis = "preceding" } -- , apply = apply_axis["preceding"] }
-local register_preceding_sibling = { kind = "axis", axis = "preceding-sibling" } -- , apply = apply_axis["preceding-sibling"] }
-local register_reverse_sibling = { kind = "axis", axis = "reverse-sibling" } -- , apply = apply_axis["reverse-sibling"] }
-
-local register_auto_descendant_or_self = { kind = "axis", axis = "auto-descendant-or-self" } -- , apply = apply_axis["auto-descendant-or-self"] }
-local register_auto_descendant = { kind = "axis", axis = "auto-descendant" } -- , apply = apply_axis["auto-descendant"] }
-local register_auto_self = { kind = "axis", axis = "auto-self" } -- , apply = apply_axis["auto-self"] }
-local register_auto_child = { kind = "axis", axis = "auto-child" } -- , apply = apply_axis["auto-child"] }
-
-local register_initial_child = { kind = "axis", axis = "initial-child" } -- , apply = apply_axis["initial-child"] }
-
-local register_all_nodes = { kind = "nodes", nodetest = true, nodes = { true, false, false } }
-
-local skip = { }
-
+local register_self={ kind="axis",axis="self" }
+local register_parent={ kind="axis",axis="parent" }
+local register_descendant={ kind="axis",axis="descendant" }
+local register_child={ kind="axis",axis="child" }
+local register_descendant_or_self={ kind="axis",axis="descendant-or-self" }
+local register_root={ kind="axis",axis="root" }
+local register_ancestor={ kind="axis",axis="ancestor" }
+local register_ancestor_or_self={ kind="axis",axis="ancestor-or-self" }
+local register_attribute={ kind="axis",axis="attribute" }
+local register_namespace={ kind="axis",axis="namespace" }
+local register_following={ kind="axis",axis="following" }
+local register_following_sibling={ kind="axis",axis="following-sibling" }
+local register_preceding={ kind="axis",axis="preceding" }
+local register_preceding_sibling={ kind="axis",axis="preceding-sibling" }
+local register_reverse_sibling={ kind="axis",axis="reverse-sibling" }
+local register_auto_descendant_or_self={ kind="axis",axis="auto-descendant-or-self" }
+local register_auto_descendant={ kind="axis",axis="auto-descendant" }
+local register_auto_self={ kind="axis",axis="auto-self" }
+local register_auto_child={ kind="axis",axis="auto-child" }
+local register_initial_child={ kind="axis",axis="initial-child" }
+local register_all_nodes={ kind="nodes",nodetest=true,nodes={ true,false,false } }
+local skip={}
local function errorrunner_e(str,cnv)
- if not skip[str] then
- report_lpath("error in expression: %s => %s",str,cnv)
- skip[str] = cnv or str
- end
- return false
+ if not skip[str] then
+ report_lpath("error in expression: %s => %s",str,cnv)
+ skip[str]=cnv or str
+ end
+ return false
end
local function errorrunner_f(str,arg)
- report_lpath("error in finalizer: %s(%s)",str,arg or "")
- return false
+ report_lpath("error in finalizer: %s(%s)",str,arg or "")
+ return false
end
-
local function register_nodes(nodetest,nodes)
- return { kind = "nodes", nodetest = nodetest, nodes = nodes }
+ return { kind="nodes",nodetest=nodetest,nodes=nodes }
end
-
local function register_expression(expression)
- local converted = lpegmatch(converter,expression)
- local runner = loadstring(format(template_e,converted))
- runner = (runner and runner()) or function() errorrunner_e(expression,converted) end
- return { kind = "expression", expression = expression, converted = converted, evaluator = runner }
+ local converted=lpegmatch(converter,expression)
+ local runner=load(format(template_e,converted))
+ runner=(runner and runner()) or function() errorrunner_e(expression,converted) end
+ return { kind="expression",expression=expression,converted=converted,evaluator=runner }
end
-
local function register_finalizer(protocol,name,arguments)
- local runner
- if arguments and arguments ~= "" then
- runner = loadstring(format(template_f_y,protocol or xml.defaultprotocol,name,arguments))
- else
- runner = loadstring(format(template_f_n,protocol or xml.defaultprotocol,name))
- end
- runner = (runner and runner()) or function() errorrunner_f(name,arguments) end
- return { kind = "finalizer", name = name, arguments = arguments, finalizer = runner }
-end
-
-local expression = P { "ex",
- ex = "[" * C((V("sq") + V("dq") + (1 - S("[]")) + V("ex"))^0) * "]",
- sq = "'" * (1 - S("'"))^0 * "'",
- dq = '"' * (1 - S('"'))^0 * '"',
+ local runner
+ if arguments and arguments~="" then
+ runner=load(format(template_f_y,protocol or xml.defaultprotocol,name,arguments))
+ else
+ runner=load(format(template_f_n,protocol or xml.defaultprotocol,name))
+ end
+ runner=(runner and runner()) or function() errorrunner_f(name,arguments) end
+ return { kind="finalizer",name=name,arguments=arguments,finalizer=runner }
+end
+local expression=P { "ex",
+ ex="["*C((V("sq")+V("dq")+(1-S("[]"))+V("ex"))^0)*"]",
+ sq="'"*(1-S("'"))^0*"'",
+ dq='"'*(1-S('"'))^0*'"',
}
-
-local arguments = P { "ar",
- ar = "(" * Cs((V("sq") + V("dq") + V("nq") + P(1-P(")")))^0) * ")",
- nq = ((1 - S("),'\""))^1) / function(s) return format("%q",s) end,
- sq = P("'") * (1 - P("'"))^0 * P("'"),
- dq = P('"') * (1 - P('"'))^0 * P('"'),
+local arguments=P { "ar",
+ ar="("*Cs((V("sq")+V("dq")+V("nq")+P(1-P(")")))^0)*")",
+ nq=((1-S("),'\""))^1)/function(s) return format("%q",s) end,
+ sq=P("'")*(1-P("'"))^0*P("'"),
+ dq=P('"')*(1-P('"'))^0*P('"'),
}
-
--- todo: better arg parser
-
local function register_error(str)
- return { kind = "error", error = format("unparsed: %s",str) }
-end
-
--- there is a difference in * and /*/ and so we need to catch a few special cases
-
-local special_1 = P("*") * Cc(register_auto_descendant) * Cc(register_all_nodes) -- last one not needed
-local special_2 = P("/") * Cc(register_auto_self)
-local special_3 = P("") * Cc(register_auto_self)
-
-local no_nextcolon = P(-1) + #(1-P(":")) -- newer lpeg needs the P(-1)
-local no_nextlparent = P(-1) + #(1-P("(")) -- newer lpeg needs the P(-1)
-
-local pathparser = Ct { "patterns", -- can be made a bit faster by moving some patterns outside
-
- patterns = spaces * V("protocol") * spaces * (
- ( V("special") * spaces * P(-1) ) +
- ( V("initial") * spaces * V("step") * spaces * (P("/") * spaces * V("step") * spaces)^0 )
- ),
-
- protocol = Cg(V("letters"),"protocol") * P("://") + Cg(Cc(nil),"protocol"),
-
- -- the / is needed for // as descendant or self is somewhat special
- -- step = (V("shortcuts") + V("axis") * spaces * V("nodes")^0 + V("error")) * spaces * V("expressions")^0 * spaces * V("finalizer")^0,
- step = ((V("shortcuts") + P("/") + V("axis")) * spaces * V("nodes")^0 + V("error")) * spaces * V("expressions")^0 * spaces * V("finalizer")^0,
-
- axis = V("descendant") + V("child") + V("parent") + V("self") + V("root") + V("ancestor") +
- V("descendant_or_self") + V("following_sibling") + V("following") +
- V("reverse_sibling") + V("preceding_sibling") + V("preceding") + V("ancestor_or_self") +
- #(1-P(-1)) * Cc(register_auto_child),
-
- special = special_1 + special_2 + special_3,
-
- initial = (P("/") * spaces * Cc(register_initial_child))^-1,
-
- error = (P(1)^1) / register_error,
-
- shortcuts_a = V("s_descendant_or_self") + V("s_descendant") + V("s_child") + V("s_parent") + V("s_self") + V("s_root") + V("s_ancestor"),
-
- shortcuts = V("shortcuts_a") * (spaces * "/" * spaces * V("shortcuts_a"))^0,
-
- s_descendant_or_self = (P("***/") + P("/")) * Cc(register_descendant_or_self), --- *** is a bonus
- s_descendant = P("**") * Cc(register_descendant),
- s_child = P("*") * no_nextcolon * Cc(register_child ),
- s_parent = P("..") * Cc(register_parent ),
- s_self = P("." ) * Cc(register_self ),
- s_root = P("^^") * Cc(register_root ),
- s_ancestor = P("^") * Cc(register_ancestor ),
-
- descendant = P("descendant::") * Cc(register_descendant ),
- child = P("child::") * Cc(register_child ),
- parent = P("parent::") * Cc(register_parent ),
- self = P("self::") * Cc(register_self ),
- root = P('root::') * Cc(register_root ),
- ancestor = P('ancestor::') * Cc(register_ancestor ),
- descendant_or_self = P('descendant-or-self::') * Cc(register_descendant_or_self ),
- ancestor_or_self = P('ancestor-or-self::') * Cc(register_ancestor_or_self ),
- -- attribute = P('attribute::') * Cc(register_attribute ),
- -- namespace = P('namespace::') * Cc(register_namespace ),
- following = P('following::') * Cc(register_following ),
- following_sibling = P('following-sibling::') * Cc(register_following_sibling ),
- preceding = P('preceding::') * Cc(register_preceding ),
- preceding_sibling = P('preceding-sibling::') * Cc(register_preceding_sibling ),
- reverse_sibling = P('reverse-sibling::') * Cc(register_reverse_sibling ),
-
- nodes = (V("nodefunction") * spaces * P("(") * V("nodeset") * P(")") + V("nodetest") * V("nodeset")) / register_nodes,
-
- expressions = expression / register_expression,
-
- letters = R("az")^1,
- name = (1-S("/[]()|:*!"))^1, -- make inline
- negate = P("!") * Cc(false),
-
- nodefunction = V("negate") + P("not") * Cc(false) + Cc(true),
- nodetest = V("negate") + Cc(true),
- nodename = (V("negate") + Cc(true)) * spaces * ((V("wildnodename") * P(":") * V("wildnodename")) + (Cc(false) * V("wildnodename"))),
- wildnodename = (C(V("name")) + P("*") * Cc(false)) * no_nextlparent,
- nodeset = spaces * Ct(V("nodename") * (spaces * P("|") * spaces * V("nodename"))^0) * spaces,
-
- finalizer = (Cb("protocol") * P("/")^-1 * C(V("name")) * arguments * P(-1)) / register_finalizer,
-
+ return { kind="error",error=format("unparsed: %s",str) }
+end
+local special_1=P("*")*Cc(register_auto_descendant)*Cc(register_all_nodes)
+local special_2=P("/")*Cc(register_auto_self)
+local special_3=P("")*Cc(register_auto_self)
+local no_nextcolon=P(-1)+#(1-P(":"))
+local no_nextlparent=P(-1)+#(1-P("("))
+local pathparser=Ct { "patterns",
+ patterns=spaces*V("protocol")*spaces*(
+ (V("special")*spaces*P(-1) )+(V("initial")*spaces*V("step")*spaces*(P("/")*spaces*V("step")*spaces)^0 )
+ ),
+ protocol=Cg(V("letters"),"protocol")*P("://")+Cg(Cc(nil),"protocol"),
+ step=((V("shortcuts")+P("/")+V("axis"))*spaces*V("nodes")^0+V("error"))*spaces*V("expressions")^0*spaces*V("finalizer")^0,
+ axis=V("descendant")+V("child")+V("parent")+V("self")+V("root")+V("ancestor")+V("descendant_or_self")+V("following_sibling")+V("following")+V("reverse_sibling")+V("preceding_sibling")+V("preceding")+V("ancestor_or_self")+#(1-P(-1))*Cc(register_auto_child),
+ special=special_1+special_2+special_3,
+ initial=(P("/")*spaces*Cc(register_initial_child))^-1,
+ error=(P(1)^1)/register_error,
+ shortcuts_a=V("s_descendant_or_self")+V("s_descendant")+V("s_child")+V("s_parent")+V("s_self")+V("s_root")+V("s_ancestor"),
+ shortcuts=V("shortcuts_a")*(spaces*"/"*spaces*V("shortcuts_a"))^0,
+ s_descendant_or_self=(P("***/")+P("/"))*Cc(register_descendant_or_self),
+ s_descendant=P("**")*Cc(register_descendant),
+ s_child=P("*")*no_nextcolon*Cc(register_child ),
+ s_parent=P("..")*Cc(register_parent ),
+ s_self=P("." )*Cc(register_self ),
+ s_root=P("^^")*Cc(register_root ),
+ s_ancestor=P("^")*Cc(register_ancestor ),
+ descendant=P("descendant::")*Cc(register_descendant ),
+ child=P("child::")*Cc(register_child ),
+ parent=P("parent::")*Cc(register_parent ),
+ self=P("self::")*Cc(register_self ),
+ root=P('root::')*Cc(register_root ),
+ ancestor=P('ancestor::')*Cc(register_ancestor ),
+ descendant_or_self=P('descendant-or-self::')*Cc(register_descendant_or_self ),
+ ancestor_or_self=P('ancestor-or-self::')*Cc(register_ancestor_or_self ),
+ following=P('following::')*Cc(register_following ),
+ following_sibling=P('following-sibling::')*Cc(register_following_sibling ),
+ preceding=P('preceding::')*Cc(register_preceding ),
+ preceding_sibling=P('preceding-sibling::')*Cc(register_preceding_sibling ),
+ reverse_sibling=P('reverse-sibling::')*Cc(register_reverse_sibling ),
+ nodes=(V("nodefunction")*spaces*P("(")*V("nodeset")*P(")")+V("nodetest")*V("nodeset"))/register_nodes,
+ expressions=expression/register_expression,
+ letters=R("az")^1,
+ name=(1-S("/[]()|:*!"))^1,
+ negate=P("!")*Cc(false),
+ nodefunction=V("negate")+P("not")*Cc(false)+Cc(true),
+ nodetest=V("negate")+Cc(true),
+ nodename=(V("negate")+Cc(true))*spaces*((V("wildnodename")*P(":")*V("wildnodename"))+(Cc(false)*V("wildnodename"))),
+ wildnodename=(C(V("name"))+P("*")*Cc(false))*no_nextlparent,
+ nodeset=spaces*Ct(V("nodename")*(spaces*P("|")*spaces*V("nodename"))^0)*spaces,
+ finalizer=(Cb("protocol")*P("/")^-1*C(V("name"))*arguments*P(-1))/register_finalizer,
}
-
-xmlpatterns.pathparser = pathparser
-
-local cache = { }
-
+xmlpatterns.pathparser=pathparser
+local cache={}
local function nodesettostring(set,nodetest)
- local t = { }
- for i=1,#set,3 do
- local directive, ns, tg = set[i], set[i+1], set[i+2]
- if not ns or ns == "" then ns = "*" end
- if not tg or tg == "" then tg = "*" end
- tg = (tg == "@rt@" and "[root]") or format("%s:%s",ns,tg)
- t[i] = (directive and tg) or format("not(%s)",tg)
- end
- if nodetest == false then
- return format("not(%s)",concat(t,"|"))
- else
- return concat(t,"|")
- end
+ local t={}
+ for i=1,#set,3 do
+ local directive,ns,tg=set[i],set[i+1],set[i+2]
+ if not ns or ns=="" then ns="*" end
+ if not tg or tg=="" then tg="*" end
+ tg=(tg=="@rt@" and "[root]") or format("%s:%s",ns,tg)
+ t[i]=(directive and tg) or format("not(%s)",tg)
+ end
+ if nodetest==false then
+ return format("not(%s)",concat(t,"|"))
+ else
+ return concat(t,"|")
+ end
end
-
local function tagstostring(list)
- if #list == 0 then
- return "no elements"
- else
- local t = { }
- for i=1, #list do
- local li = list[i]
- local ns, tg = li.ns, li.tg
- if not ns or ns == "" then ns = "*" end
- if not tg or tg == "" then tg = "*" end
- t[i] = (tg == "@rt@" and "[root]") or format("%s:%s",ns,tg)
- end
- return concat(t," ")
- end
-end
-
-xml.nodesettostring = nodesettostring
-
-local lpath -- we have a harmless kind of circular reference
-
-local lshowoptions = { functions = false }
-
+ if #list==0 then
+ return "no elements"
+ else
+ local t={}
+ for i=1,#list do
+ local li=list[i]
+ local ns,tg=li.ns,li.tg
+ if not ns or ns=="" then ns="*" end
+ if not tg or tg=="" then tg="*" end
+ t[i]=(tg=="@rt@" and "[root]") or format("%s:%s",ns,tg)
+ end
+ return concat(t," ")
+ end
+end
+xml.nodesettostring=nodesettostring
+local lpath
+local lshowoptions={ functions=false }
local function lshow(parsed)
- if type(parsed) == "string" then
- parsed = lpath(parsed)
- end
- report_lpath("%s://%s => %s",parsed.protocol or xml.defaultprotocol,parsed.pattern,
- table.serialize(parsed,false,lshowoptions))
+ if type(parsed)=="string" then
+ parsed=lpath(parsed)
+ end
+ report_lpath("%s://%s => %s",parsed.protocol or xml.defaultprotocol,parsed.pattern,
+ table.serialize(parsed,false,lshowoptions))
end
-
-xml.lshow = lshow
-
+xml.lshow=lshow
local function add_comment(p,str)
- local pc = p.comment
- if not pc then
- p.comment = { str }
- else
- pc[#pc+1] = str
- end
-end
-
-lpath = function (pattern) -- the gain of caching is rather minimal
- lpathcalls = lpathcalls + 1
- if type(pattern) == "table" then
- return pattern
+ local pc=p.comment
+ if not pc then
+ p.comment={ str }
+ else
+ pc[#pc+1]=str
+ end
+end
+lpath=function (pattern)
+ lpathcalls=lpathcalls+1
+ if type(pattern)=="table" then
+ return pattern
+ else
+ local parsed=cache[pattern]
+ if parsed then
+ lpathcached=lpathcached+1
else
- local parsed = cache[pattern]
- if parsed then
- lpathcached = lpathcached + 1
+ parsed=lpegmatch(pathparser,pattern)
+ if parsed then
+ parsed.pattern=pattern
+ local np=#parsed
+ if np==0 then
+ parsed={ pattern=pattern,register_self,state="parsing error" }
+ report_lpath("parsing error in pattern: %s",pattern)
+ lshow(parsed)
else
- parsed = lpegmatch(pathparser,pattern)
- if parsed then
- parsed.pattern = pattern
- local np = #parsed
- if np == 0 then
- parsed = { pattern = pattern, register_self, state = "parsing error" }
- report_lpath("parsing error in '%s'",pattern)
- lshow(parsed)
- else
- -- we could have done this with a more complex parser but this
- -- is cleaner
- local pi = parsed[1]
- if pi.axis == "auto-child" then
- if false then
- add_comment(parsed, "auto-child replaced by auto-descendant-or-self")
- parsed[1] = register_auto_descendant_or_self
- else
- add_comment(parsed, "auto-child replaced by auto-descendant")
- parsed[1] = register_auto_descendant
- end
- elseif pi.axis == "initial-child" and np > 1 and parsed[2].axis then
- add_comment(parsed, "initial-child removed") -- we could also make it a auto-self
- remove(parsed,1)
- end
- local np = #parsed -- can have changed
- if np > 1 then
- local pnp = parsed[np]
- if pnp.kind == "nodes" and pnp.nodetest == true then
- local nodes = pnp.nodes
- if nodes[1] == true and nodes[2] == false and nodes[3] == false then
- add_comment(parsed, "redundant final wildcard filter removed")
- remove(parsed,np)
- end
- end
- end
- end
+ local pi=parsed[1]
+ if pi.axis=="auto-child" then
+ if false then
+ add_comment(parsed,"auto-child replaced by auto-descendant-or-self")
+ parsed[1]=register_auto_descendant_or_self
else
- parsed = { pattern = pattern }
- end
- cache[pattern] = parsed
- if trace_lparse and not trace_lprofile then
- lshow(parsed)
- end
- end
- return parsed
+ add_comment(parsed,"auto-child replaced by auto-descendant")
+ parsed[1]=register_auto_descendant
+ end
+ elseif pi.axis=="initial-child" and np>1 and parsed[2].axis then
+ add_comment(parsed,"initial-child removed")
+ remove(parsed,1)
+ end
+ local np=#parsed
+ if np>1 then
+ local pnp=parsed[np]
+ if pnp.kind=="nodes" and pnp.nodetest==true then
+ local nodes=pnp.nodes
+ if nodes[1]==true and nodes[2]==false and nodes[3]==false then
+ add_comment(parsed,"redundant final wildcard filter removed")
+ remove(parsed,np)
+ end
+ end
+ end
+ end
+ else
+ parsed={ pattern=pattern }
+ end
+ cache[pattern]=parsed
+ if trace_lparse and not trace_lprofile then
+ lshow(parsed)
+ end
end
+ return parsed
+ end
end
-
-xml.lpath = lpath
-
--- we can move all calls inline and then merge the trace back
--- technically we can combine axis and the next nodes which is
--- what we did before but this a bit cleaner (but slower too)
--- but interesting is that it's not that much faster when we
--- go inline
---
--- beware: we need to return a collection even when we filter
--- else the (simple) cache gets messed up
-
--- caching found lookups saves not that much (max .1 sec on a 8 sec run)
--- and it also messes up finalizers
-
--- watch out: when there is a finalizer, it's always called as there
--- can be cases that a finalizer returns (or does) something in case
--- there is no match; an example of this is count()
-
-local profiled = { } xml.profiled = profiled
-
+xml.lpath=lpath
+local profiled={} xml.profiled=profiled
local function profiled_apply(list,parsed,nofparsed,order)
- local p = profiled[parsed.pattern]
- if p then
- p.tested = p.tested + 1
- else
- p = { tested = 1, matched = 0, finalized = 0 }
- profiled[parsed.pattern] = p
- end
- local collected = list
- for i=1,nofparsed do
- local pi = parsed[i]
- local kind = pi.kind
- if kind == "axis" then
- collected = apply_axis[pi.axis](collected)
- elseif kind == "nodes" then
- collected = apply_nodes(collected,pi.nodetest,pi.nodes)
- elseif kind == "expression" then
- collected = apply_expression(collected,pi.evaluator,order)
- elseif kind == "finalizer" then
- collected = pi.finalizer(collected) -- no check on # here
- p.matched = p.matched + 1
- p.finalized = p.finalized + 1
- return collected
- end
- if not collected or #collected == 0 then
- local pn = i < nofparsed and parsed[nofparsed]
- if pn and pn.kind == "finalizer" then
- collected = pn.finalizer(collected)
- p.finalized = p.finalized + 1
- return collected
- end
- return nil
- end
- end
- if collected then
- p.matched = p.matched + 1
+ local p=profiled[parsed.pattern]
+ if p then
+ p.tested=p.tested+1
+ else
+ p={ tested=1,matched=0,finalized=0 }
+ profiled[parsed.pattern]=p
+ end
+ local collected=list
+ for i=1,nofparsed do
+ local pi=parsed[i]
+ local kind=pi.kind
+ if kind=="axis" then
+ collected=apply_axis[pi.axis](collected)
+ elseif kind=="nodes" then
+ collected=apply_nodes(collected,pi.nodetest,pi.nodes)
+ elseif kind=="expression" then
+ collected=apply_expression(collected,pi.evaluator,order)
+ elseif kind=="finalizer" then
+ collected=pi.finalizer(collected)
+ p.matched=p.matched+1
+ p.finalized=p.finalized+1
+ return collected
+ end
+ if not collected or #collected==0 then
+ local pn=i<nofparsed and parsed[nofparsed]
+ if pn and pn.kind=="finalizer" then
+ collected=pn.finalizer(collected)
+ p.finalized=p.finalized+1
+ return collected
+ end
+ return nil
end
- return collected
+ end
+ if collected then
+ p.matched=p.matched+1
+ end
+ return collected
end
-
local function traced_apply(list,parsed,nofparsed,order)
- if trace_lparse then
- lshow(parsed)
- end
- report_lpath("collecting: %s",parsed.pattern)
- report_lpath("root tags : %s",tagstostring(list))
- report_lpath("order : %s",order or "unset")
- local collected = list
- for i=1,nofparsed do
- local pi = parsed[i]
- local kind = pi.kind
- if kind == "axis" then
- collected = apply_axis[pi.axis](collected)
- report_lpath("% 10i : ax : %s",(collected and #collected) or 0,pi.axis)
- elseif kind == "nodes" then
- collected = apply_nodes(collected,pi.nodetest,pi.nodes)
- report_lpath("% 10i : ns : %s",(collected and #collected) or 0,nodesettostring(pi.nodes,pi.nodetest))
- elseif kind == "expression" then
- collected = apply_expression(collected,pi.evaluator,order)
- report_lpath("% 10i : ex : %s -> %s",(collected and #collected) or 0,pi.expression,pi.converted)
- elseif kind == "finalizer" then
- collected = pi.finalizer(collected)
- report_lpath("% 10i : fi : %s : %s(%s)",(type(collected) == "table" and #collected) or 0,parsed.protocol or xml.defaultprotocol,pi.name,pi.arguments or "")
- return collected
- end
- if not collected or #collected == 0 then
- local pn = i < nofparsed and parsed[nofparsed]
- if pn and pn.kind == "finalizer" then
- collected = pn.finalizer(collected)
- report_lpath("% 10i : fi : %s : %s(%s)",(type(collected) == "table" and #collected) or 0,parsed.protocol or xml.defaultprotocol,pn.name,pn.arguments or "")
- return collected
- end
- return nil
- end
+ if trace_lparse then
+ lshow(parsed)
+ end
+ report_lpath("collecting: %s",parsed.pattern)
+ report_lpath("root tags : %s",tagstostring(list))
+ report_lpath("order : %s",order or "unset")
+ local collected=list
+ for i=1,nofparsed do
+ local pi=parsed[i]
+ local kind=pi.kind
+ if kind=="axis" then
+ collected=apply_axis[pi.axis](collected)
+ report_lpath("% 10i : ax : %s",(collected and #collected) or 0,pi.axis)
+ elseif kind=="nodes" then
+ collected=apply_nodes(collected,pi.nodetest,pi.nodes)
+ report_lpath("% 10i : ns : %s",(collected and #collected) or 0,nodesettostring(pi.nodes,pi.nodetest))
+ elseif kind=="expression" then
+ collected=apply_expression(collected,pi.evaluator,order)
+ report_lpath("% 10i : ex : %s -> %s",(collected and #collected) or 0,pi.expression,pi.converted)
+ elseif kind=="finalizer" then
+ collected=pi.finalizer(collected)
+ report_lpath("% 10i : fi : %s : %s(%s)",(type(collected)=="table" and #collected) or 0,parsed.protocol or xml.defaultprotocol,pi.name,pi.arguments or "")
+ return collected
+ end
+ if not collected or #collected==0 then
+ local pn=i<nofparsed and parsed[nofparsed]
+ if pn and pn.kind=="finalizer" then
+ collected=pn.finalizer(collected)
+ report_lpath("% 10i : fi : %s : %s(%s)",(type(collected)=="table" and #collected) or 0,parsed.protocol or xml.defaultprotocol,pn.name,pn.arguments or "")
+ return collected
+ end
+ return nil
end
- return collected
+ end
+ return collected
end
-
local function normal_apply(list,parsed,nofparsed,order)
- local collected = list
- for i=1,nofparsed do
- local pi = parsed[i]
- local kind = pi.kind
- if kind == "axis" then
- local axis = pi.axis
- if axis ~= "self" then
- collected = apply_axis[axis](collected)
- end
- elseif kind == "nodes" then
- collected = apply_nodes(collected,pi.nodetest,pi.nodes)
- elseif kind == "expression" then
- collected = apply_expression(collected,pi.evaluator,order)
- elseif kind == "finalizer" then
- return pi.finalizer(collected)
- end
- if not collected or #collected == 0 then
- local pf = i < nofparsed and parsed[nofparsed].finalizer
- if pf then
- return pf(collected) -- can be anything
- end
- return nil
- end
- end
- return collected
+ local collected=list
+ for i=1,nofparsed do
+ local pi=parsed[i]
+ local kind=pi.kind
+ if kind=="axis" then
+ local axis=pi.axis
+ if axis~="self" then
+ collected=apply_axis[axis](collected)
+ end
+ elseif kind=="nodes" then
+ collected=apply_nodes(collected,pi.nodetest,pi.nodes)
+ elseif kind=="expression" then
+ collected=apply_expression(collected,pi.evaluator,order)
+ elseif kind=="finalizer" then
+ return pi.finalizer(collected)
+ end
+ if not collected or #collected==0 then
+ local pf=i<nofparsed and parsed[nofparsed].finalizer
+ if pf then
+ return pf(collected)
+ end
+ return nil
+ end
+ end
+ return collected
end
-
-
local function applylpath(list,pattern)
- if not list then
- return
- end
- local parsed = cache[pattern]
- if parsed then
- lpathcalls = lpathcalls + 1
- lpathcached = lpathcached + 1
- elseif type(pattern) == "table" then
- lpathcalls = lpathcalls + 1
- parsed = pattern
- else
- parsed = lpath(pattern) or pattern
- end
- if not parsed then
- return
- end
- local nofparsed = #parsed
- if nofparsed == 0 then
- return -- something is wrong
- end
- if not trace_lpath then
- return normal_apply ({ list },parsed,nofparsed,list.mi)
- elseif trace_lprofile then
- return profiled_apply({ list },parsed,nofparsed,list.mi)
- else
- return traced_apply ({ list },parsed,nofparsed,list.mi)
- end
-end
-
-xml.applylpath = applylpath -- takes a table as first argment, which is what xml.filter will do
-
---[[ldx--
-<p>This is the main filter function. It returns whatever is asked for.</p>
---ldx]]--
-
-function xml.filter(root,pattern) -- no longer funny attribute handling here
- return applylpath(root,pattern)
-end
-
--- internal (parsed)
-
-expressions.child = function(e,pattern)
- return applylpath(e,pattern) -- todo: cache
-end
-expressions.count = function(e,pattern) -- what if pattern == empty or nil
- local collected = applylpath(e,pattern) -- todo: cache
- return pattern and (collected and #collected) or 0
-end
-
--- external
-
-expressions.oneof = function(s,...) -- slow
- local t = {...} for i=1,#t do if s == t[i] then return true end end return false
-end
-expressions.error = function(str)
- xml.errorhandler(format("unknown function in lpath expression: %s",tostring(str or "?")))
- return false
-end
-expressions.undefined = function(s)
- return s == nil
-end
-
-expressions.quit = function(s)
- if s or s == nil then
- quit_expression = true
- end
- return true
-end
-
-expressions.print = function(...)
- print(...)
- return true
-end
-
-expressions.contains = find
-expressions.find = find
-expressions.upper = upper
-expressions.lower = lower
-expressions.number = tonumber
-expressions.boolean = toboolean
-
+ if not list then
+ return
+ end
+ local parsed=cache[pattern]
+ if parsed then
+ lpathcalls=lpathcalls+1
+ lpathcached=lpathcached+1
+ elseif type(pattern)=="table" then
+ lpathcalls=lpathcalls+1
+ parsed=pattern
+ else
+ parsed=lpath(pattern) or pattern
+ end
+ if not parsed then
+ return
+ end
+ local nofparsed=#parsed
+ if nofparsed==0 then
+ return
+ end
+ if not trace_lpath then
+ return normal_apply ({ list },parsed,nofparsed,list.mi)
+ elseif trace_lprofile then
+ return profiled_apply({ list },parsed,nofparsed,list.mi)
+ else
+ return traced_apply ({ list },parsed,nofparsed,list.mi)
+ end
+end
+xml.applylpath=applylpath
+function xml.filter(root,pattern)
+ return applylpath(root,pattern)
+end
+expressions.child=function(e,pattern)
+ return applylpath(e,pattern)
+end
+expressions.count=function(e,pattern)
+ local collected=applylpath(e,pattern)
+ return pattern and (collected and #collected) or 0
+end
+expressions.oneof=function(s,...)
+ for i=1,select("#",...) do
+ if s==select(i,...) then
+ return true
+ end
+ end
+ return false
+end
+expressions.error=function(str)
+ xml.errorhandler(format("unknown function in lpath expression: %s",tostring(str or "?")))
+ return false
+end
+expressions.undefined=function(s)
+ return s==nil
+end
+expressions.quit=function(s)
+ if s or s==nil then
+ quit_expression=true
+ end
+ return true
+end
+expressions.print=function(...)
+ print(...)
+ return true
+end
+expressions.contains=find
+expressions.find=find
+expressions.upper=upper
+expressions.lower=lower
+expressions.number=tonumber
+expressions.boolean=toboolean
function expressions.contains(str,pattern)
- local t = type(str)
- if t == "string" then
- if find(str,pattern) then
- return true
- end
- elseif t == "table" then
- for i=1,#str do
- local d = str[i]
- if type(d) == "string" and find(d,pattern) then
- return true
- end
- end
+ local t=type(str)
+ if t=="string" then
+ if find(str,pattern) then
+ return true
+ end
+ elseif t=="table" then
+ for i=1,#str do
+ local d=str[i]
+ if type(d)=="string" and find(d,pattern) then
+ return true
+ end
end
- return false
+ end
+ return false
end
-
--- user interface
-
local function traverse(root,pattern,handle)
- report_lpath("use 'xml.selection' instead for '%s'",pattern)
- local collected = applylpath(root,pattern)
- if collected then
- for c=1,#collected do
- local e = collected[c]
- local r = e.__p__
- handle(r,r.dt,e.ni)
- end
+ local collected=applylpath(root,pattern)
+ if collected then
+ for c=1,#collected do
+ local e=collected[c]
+ local r=e.__p__
+ handle(r,r.dt,e.ni)
end
+ end
end
-
local function selection(root,pattern,handle)
- local collected = applylpath(root,pattern)
- if collected then
- if handle then
- for c=1,#collected do
- handle(collected[c])
- end
- else
- return collected
- end
- end
-end
-
-xml.traverse = traverse -- old method, r, d, k
-xml.selection = selection -- new method, simple handle
-
-
--- generic function finalizer (independant namespace)
-
-local function dofunction(collected,fnc)
- if collected then
- local f = functions[fnc]
- if f then
- for c=1,#collected do
- f(collected[c])
- end
- else
- report_lpath("unknown function '%s'",fnc)
- end
+ local collected=applylpath(root,pattern)
+ if collected then
+ if handle then
+ for c=1,#collected do
+ handle(collected[c])
+ end
+ else
+ return collected
end
+ end
end
-
-finalizers.xml["function"] = dofunction
-finalizers.tex["function"] = dofunction
-
--- functions
-
-expressions.text = function(e,n)
- local rdt = e.__p__.dt
- return (rdt and rdt[n]) or ""
-end
-
-expressions.name = function(e,n) -- ns + tg
- local found = false
- n = tonumber(n) or 0
- if n == 0 then
- found = type(e) == "table" and e
- elseif n < 0 then
- local d, k = e.__p__.dt, e.ni
- for i=k-1,1,-1 do
- local di = d[i]
- if type(di) == "table" then
- if n == -1 then
- found = di
- break
- else
- n = n + 1
- end
- end
- end
+xml.traverse=traverse
+xml.selection=selection
+local function dofunction(collected,fnc,...)
+ if collected then
+ local f=functions[fnc]
+ if f then
+ for c=1,#collected do
+ f(collected[c],...)
+ end
else
- local d, k = e.__p__.dt, e.ni
- for i=k+1,#d,1 do
- local di = d[i]
- if type(di) == "table" then
- if n == 1 then
- found = di
- break
- else
- n = n - 1
- end
- end
- end
- end
- if found then
- local ns, tg = found.rn or found.ns or "", found.tg
- if ns ~= "" then
- return ns .. ":" .. tg
+ report_lpath("unknown function %a",fnc)
+ end
+ end
+end
+finalizers.xml["function"]=dofunction
+finalizers.tex["function"]=dofunction
+expressions.text=function(e,n)
+ local rdt=e.__p__.dt
+ return rdt and rdt[n] or ""
+end
+expressions.name=function(e,n)
+ local found=false
+ n=tonumber(n) or 0
+ if n==0 then
+ found=type(e)=="table" and e
+ elseif n<0 then
+ local d,k=e.__p__.dt,e.ni
+ for i=k-1,1,-1 do
+ local di=d[i]
+ if type(di)=="table" then
+ if n==-1 then
+ found=di
+ break
+ else
+ n=n+1
+ end
+ end
+ end
+ else
+ local d,k=e.__p__.dt,e.ni
+ for i=k+1,#d,1 do
+ local di=d[i]
+ if type(di)=="table" then
+ if n==1 then
+ found=di
+ break
else
- return tg
+ n=n-1
end
- else
- return ""
+ end
end
-end
-
-expressions.tag = function(e,n) -- only tg
- if not e then
- return ""
+ end
+ if found then
+ local ns,tg=found.rn or found.ns or "",found.tg
+ if ns~="" then
+ return ns..":"..tg
else
- local found = false
- n = tonumber(n) or 0
- if n == 0 then
- found = (type(e) == "table") and e -- seems to fail
- elseif n < 0 then
- local d, k = e.__p__.dt, e.ni
- for i=k-1,1,-1 do
- local di = d[i]
- if type(di) == "table" then
- if n == -1 then
- found = di
- break
- else
- n = n + 1
- end
- end
- end
- else
- local d, k = e.__p__.dt, e.ni
- for i=k+1,#d,1 do
- local di = d[i]
- if type(di) == "table" then
- if n == 1 then
- found = di
- break
- else
- n = n - 1
- end
- end
- end
- end
- return (found and found.tg) or ""
+ return tg
end
+ else
+ return ""
+ end
end
-
---[[ldx--
-<p>Often using an iterators looks nicer in the code than passing handler
-functions. The <l n='lua'/> book describes how to use coroutines for that
-purpose (<url href='http://www.lua.org/pil/9.3.html'/>). This permits
-code like:</p>
-
-<typing>
-for r, d, k in xml.elements(xml.load('text.xml'),"title") do
- print(d[k]) -- old method
-end
-for e in xml.collected(xml.load('text.xml'),"title") do
- print(e) -- new one
-end
-</typing>
---ldx]]--
-
-local wrap, yield = coroutine.wrap, coroutine.yield
-
-function xml.elements(root,pattern,reverse) -- r, d, k
- local collected = applylpath(root,pattern)
- if collected then
- if reverse then
- return wrap(function() for c=#collected,1,-1 do
- local e = collected[c] local r = e.__p__ yield(r,r.dt,e.ni)
- end end)
- else
- return wrap(function() for c=1,#collected do
- local e = collected[c] local r = e.__p__ yield(r,r.dt,e.ni)
- end end)
+expressions.tag=function(e,n)
+ if not e then
+ return ""
+ else
+ local found=false
+ n=tonumber(n) or 0
+ if n==0 then
+ found=(type(e)=="table") and e
+ elseif n<0 then
+ local d,k=e.__p__.dt,e.ni
+ for i=k-1,1,-1 do
+ local di=d[i]
+ if type(di)=="table" then
+ if n==-1 then
+ found=di
+ break
+ else
+ n=n+1
+ end
end
+ end
+ else
+ local d,k=e.__p__.dt,e.ni
+ for i=k+1,#d,1 do
+ local di=d[i]
+ if type(di)=="table" then
+ if n==1 then
+ found=di
+ break
+ else
+ n=n-1
+ end
+ end
+ end
+ end
+ return (found and found.tg) or ""
+ end
+end
+local dummy=function() end
+function xml.elements(root,pattern,reverse)
+ local collected=applylpath(root,pattern)
+ if not collected then
+ return dummy
+ elseif reverse then
+ local c=#collected+1
+ return function()
+ if c>1 then
+ c=c-1
+ local e=collected[c]
+ local r=e.__p__
+ return r,r.dt,e.ni
+ end
+ end
+ else
+ local n,c=#collected,0
+ return function()
+ if c<n then
+ c=c+1
+ local e=collected[c]
+ local r=e.__p__
+ return r,r.dt,e.ni
+ end
+ end
+ end
+end
+function xml.collected(root,pattern,reverse)
+ local collected=applylpath(root,pattern)
+ if not collected then
+ return dummy
+ elseif reverse then
+ local c=#collected+1
+ return function()
+ if c>1 then
+ c=c-1
+ return collected[c]
+ end
end
- return wrap(function() end)
-end
-
-function xml.collected(root,pattern,reverse) -- e
- local collected = applylpath(root,pattern)
- if collected then
- if reverse then
- return wrap(function() for c=#collected,1,-1 do yield(collected[c]) end end)
- else
- return wrap(function() for c=1,#collected do yield(collected[c]) end end)
- end
+ else
+ local n,c=#collected,0
+ return function()
+ if c<n then
+ c=c+1
+ return collected[c]
+ end
end
- return wrap(function() end)
+ end
end
-
--- handy
-
function xml.inspect(collection,pattern)
- pattern = pattern or "."
- for e in xml.collected(collection,pattern or ".") do
- report_lpath("pattern %q\n\n%s\n",pattern,xml.tostring(e))
- end
+ pattern=pattern or "."
+ for e in xml.collected(collection,pattern or ".") do
+ report_lpath("pattern: %s\n\n%s\n",pattern,xml.tostring(e))
+ end
+end
+local function split(e)
+ local dt=e.dt
+ if dt then
+ for i=1,#dt do
+ local dti=dt[i]
+ if type(dti)=="string" then
+ dti=gsub(dti,"^[\n\r]*(.-)[\n\r]*","%1")
+ dti=gsub(dti,"[\n\r]+","\n\n")
+ dt[i]=dti
+ else
+ split(dti)
+ end
+ end
+ end
+ return e
+end
+function xml.finalizers.paragraphs(c)
+ for i=1,#c do
+ split(c[i])
+ end
+ return c
end
@@ -9520,102 +10059,68 @@ end -- of closure
do -- create closure to overcome 200 locals limit
-if not modules then modules = { } end modules ['lxml-mis'] = {
- version = 1.001,
- comment = "this module is the basis for the lxml-* ones",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
-local xml, lpeg, string = xml, lpeg, string
+package.loaded["lxml-mis"] = package.loaded["lxml-mis"] or true
-local concat = table.concat
-local type, next, tonumber, tostring, setmetatable, loadstring = type, next, tonumber, tostring, setmetatable, loadstring
-local format, gsub, match = string.format, string.gsub, string.match
-local lpegmatch, lpegpatterns = lpeg.match, lpeg.patterns
-local P, S, R, C, V, Cc, Cs = lpeg.P, lpeg.S, lpeg.R, lpeg.C, lpeg.V, lpeg.Cc, lpeg.Cs
-
-lpegpatterns.xml = lpegpatterns.xml or { }
-local xmlpatterns = lpegpatterns.xml
-
---[[ldx--
-<p>The following helper functions best belong to the <t>lxml-ini</t>
-module. Some are here because we need then in the <t>mk</t>
-document and other manuals, others came up when playing with
-this module. Since this module is also used in <l n='mtxrun'/> we've
-put them here instead of loading mode modules there then needed.</p>
---ldx]]--
-
-local function xmlgsub(t,old,new) -- will be replaced
- local dt = t.dt
- if dt then
- for k=1,#dt do
- local v = dt[k]
- if type(v) == "string" then
- dt[k] = gsub(v,old,new)
- else
- xmlgsub(v,old,new)
- end
- end
- end
-end
+-- original size: 3684, stripped down to: 1957
-
-function xml.stripleadingspaces(dk,d,k) -- cosmetic, for manual
- if d and k then
- local dkm = d[k-1]
- if dkm and type(dkm) == "string" then
- local s = match(dkm,"\n(%s+)")
- xmlgsub(dk,"\n"..rep(" ",#s),"\n")
- end
- end
-end
-
-
-
--- 100 * 2500 * "oeps< oeps> oeps&" : gsub:lpeg|lpeg|lpeg
---
--- 1021:0335:0287:0247
-
--- 10 * 1000 * "oeps< oeps> oeps& asfjhalskfjh alskfjh alskfjh alskfjh ;al J;LSFDJ"
---
--- 1559:0257:0288:0190 (last one suggested by roberto)
-
--- escaped = Cs((S("<&>") / xml.escapes + 1)^0)
--- escaped = Cs((S("<")/"&lt;" + S(">")/"&gt;" + S("&")/"&amp;" + 1)^0)
-local normal = (1 - S("<&>"))^0
-local special = P("<")/"&lt;" + P(">")/"&gt;" + P("&")/"&amp;"
-local escaped = Cs(normal * (special * normal)^0)
-
--- 100 * 1000 * "oeps&lt; oeps&gt; oeps&amp;" : gsub:lpeg == 0153:0280:0151:0080 (last one by roberto)
-
-local normal = (1 - S"&")^0
-local special = P("&lt;")/"<" + P("&gt;")/">" + P("&amp;")/"&"
-local unescaped = Cs(normal * (special * normal)^0)
-
--- 100 * 5000 * "oeps <oeps bla='oeps' foo='bar'> oeps </oeps> oeps " : gsub:lpeg == 623:501 msec (short tags, less difference)
-
-local cleansed = Cs(((P("<") * (1-P(">"))^0 * P(">"))/"" + 1)^0)
-
-xmlpatterns.escaped = escaped
-xmlpatterns.unescaped = unescaped
-xmlpatterns.cleansed = cleansed
-
-function xml.escaped (str) return lpegmatch(escaped,str) end
+if not modules then modules={} end modules ['lxml-mis']={
+ version=1.001,
+ comment="this module is the basis for the lxml-* ones",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+local xml,lpeg,string=xml,lpeg,string
+local concat=table.concat
+local type,next,tonumber,tostring,setmetatable,loadstring=type,next,tonumber,tostring,setmetatable,loadstring
+local format,gsub,match=string.format,string.gsub,string.match
+local lpegmatch,lpegpatterns=lpeg.match,lpeg.patterns
+local P,S,R,C,V,Cc,Cs=lpeg.P,lpeg.S,lpeg.R,lpeg.C,lpeg.V,lpeg.Cc,lpeg.Cs
+lpegpatterns.xml=lpegpatterns.xml or {}
+local xmlpatterns=lpegpatterns.xml
+local function xmlgsub(t,old,new)
+ local dt=t.dt
+ if dt then
+ for k=1,#dt do
+ local v=dt[k]
+ if type(v)=="string" then
+ dt[k]=gsub(v,old,new)
+ else
+ xmlgsub(v,old,new)
+ end
+ end
+ end
+end
+function xml.stripleadingspaces(dk,d,k)
+ if d and k then
+ local dkm=d[k-1]
+ if dkm and type(dkm)=="string" then
+ local s=match(dkm,"\n(%s+)")
+ xmlgsub(dk,"\n"..rep(" ",#s),"\n")
+ end
+ end
+end
+local normal=(1-S("<&>"))^0
+local special=P("<")/"&lt;"+P(">")/"&gt;"+P("&")/"&amp;"
+local escaped=Cs(normal*(special*normal)^0)
+local normal=(1-S"&")^0
+local special=P("&lt;")/"<"+P("&gt;")/">"+P("&amp;")/"&"
+local unescaped=Cs(normal*(special*normal)^0)
+local cleansed=Cs(((P("<")*(1-P(">"))^0*P(">"))/""+1)^0)
+xmlpatterns.escaped=escaped
+xmlpatterns.unescaped=unescaped
+xmlpatterns.cleansed=cleansed
+function xml.escaped (str) return lpegmatch(escaped,str) end
function xml.unescaped(str) return lpegmatch(unescaped,str) end
-function xml.cleansed (str) return lpegmatch(cleansed,str) end
-
--- this might move
-
+function xml.cleansed (str) return lpegmatch(cleansed,str) end
function xml.fillin(root,pattern,str,check)
- local e = xml.first(root,pattern)
- if e then
- local n = #e.dt
- if not check or n == 0 or (n == 1 and e.dt[1] == "") then
- e.dt = { str }
- end
+ local e=xml.first(root,pattern)
+ if e then
+ local n=#e.dt
+ if not check or n==0 or (n==1 and e.dt[1]=="") then
+ e.dt={ str }
end
+ end
end
@@ -9623,765 +10128,692 @@ end -- of closure
do -- create closure to overcome 200 locals limit
-if not modules then modules = { } end modules ['lxml-aux'] = {
- version = 1.001,
- comment = "this module is the basis for the lxml-* ones",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
--- not all functions here make sense anymore vbut we keep them for
--- compatibility reasons
-
-local trace_manipulations = false trackers.register("lxml.manipulations", function(v) trace_manipulations = v end)
+package.loaded["lxml-aux"] = package.loaded["lxml-aux"] or true
-local report_xml = logs.reporter("xml")
-
-local xml = xml
-
-local xmlconvert, xmlcopy, xmlname = xml.convert, xml.copy, xml.name
-local xmlinheritedconvert = xml.inheritedconvert
-local xmlapplylpath = xml.applylpath
-local xmlfilter = xml.filter
-
-local type, setmetatable, getmetatable = type, setmetatable, getmetatable
-local insert, remove, fastcopy, concat = table.insert, table.remove, table.fastcopy, table.concat
-local gmatch, gsub, format, find, strip = string.gmatch, string.gsub, string.format, string.find, string.strip
-local utfbyte = utf.byte
+-- original size: 23804, stripped down to: 16817
+if not modules then modules={} end modules ['lxml-aux']={
+ version=1.001,
+ comment="this module is the basis for the lxml-* ones",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+local trace_manipulations=false trackers.register("lxml.manipulations",function(v) trace_manipulations=v end)
+local report_xml=logs.reporter("xml")
+local xml=xml
+local xmlconvert,xmlcopy,xmlname=xml.convert,xml.copy,xml.name
+local xmlinheritedconvert=xml.inheritedconvert
+local xmlapplylpath=xml.applylpath
+local xmlfilter=xml.filter
+local type,setmetatable,getmetatable=type,setmetatable,getmetatable
+local insert,remove,fastcopy,concat=table.insert,table.remove,table.fastcopy,table.concat
+local gmatch,gsub,format,find,strip=string.gmatch,string.gsub,string.format,string.find,string.strip
+local utfbyte=utf.byte
local function report(what,pattern,c,e)
- report_xml("%s element '%s' (root: '%s', position: %s, index: %s, pattern: %s)",what,xmlname(e),xmlname(e.__p__),c,e.ni,pattern)
+ report_xml("%s element %a, root %a, position %a, index %a, pattern %a",what,xmlname(e),xmlname(e.__p__),c,e.ni,pattern)
end
-
local function withelements(e,handle,depth)
- if e and handle then
- local edt = e.dt
- if edt then
- depth = depth or 0
- for i=1,#edt do
- local e = edt[i]
- if type(e) == "table" then
- handle(e,depth)
- withelements(e,handle,depth+1)
- end
- end
+ if e and handle then
+ local edt=e.dt
+ if edt then
+ depth=depth or 0
+ for i=1,#edt do
+ local e=edt[i]
+ if type(e)=="table" then
+ handle(e,depth)
+ withelements(e,handle,depth+1)
end
+ end
end
+ end
end
-
-xml.withelements = withelements
-
-function xml.withelement(e,n,handle) -- slow
- if e and n ~= 0 and handle then
- local edt = e.dt
- if edt then
- if n > 0 then
- for i=1,#edt do
- local ei = edt[i]
- if type(ei) == "table" then
- if n == 1 then
- handle(ei)
- return
- else
- n = n - 1
- end
- end
- end
- elseif n < 0 then
- for i=#edt,1,-1 do
- local ei = edt[i]
- if type(ei) == "table" then
- if n == -1 then
- handle(ei)
- return
- else
- n = n + 1
- end
- end
- end
+xml.withelements=withelements
+function xml.withelement(e,n,handle)
+ if e and n~=0 and handle then
+ local edt=e.dt
+ if edt then
+ if n>0 then
+ for i=1,#edt do
+ local ei=edt[i]
+ if type(ei)=="table" then
+ if n==1 then
+ handle(ei)
+ return
+ else
+ n=n-1
end
+ end
end
+ elseif n<0 then
+ for i=#edt,1,-1 do
+ local ei=edt[i]
+ if type(ei)=="table" then
+ if n==-1 then
+ handle(ei)
+ return
+ else
+ n=n+1
+ end
+ end
+ end
+ end
end
+ end
end
-
function xml.each(root,pattern,handle,reverse)
- local collected = xmlapplylpath(root,pattern)
- if collected then
- if reverse then
- for c=#collected,1,-1 do
- handle(collected[c])
- end
- else
- for c=1,#collected do
- handle(collected[c])
- end
- end
- return collected
+ local collected=xmlapplylpath(root,pattern)
+ if collected then
+ if reverse then
+ for c=#collected,1,-1 do
+ handle(collected[c])
+ end
+ else
+ for c=1,#collected do
+ handle(collected[c])
+ end
end
+ return collected
+ end
end
-
function xml.processattributes(root,pattern,handle)
- local collected = xmlapplylpath(root,pattern)
- if collected and handle then
- for c=1,#collected do
- handle(collected[c].at)
- end
+ local collected=xmlapplylpath(root,pattern)
+ if collected and handle then
+ for c=1,#collected do
+ handle(collected[c].at)
end
- return collected
+ end
+ return collected
end
-
---[[ldx--
-<p>The following functions collect elements and texts.</p>
---ldx]]--
-
--- are these still needed -> lxml-cmp.lua
-
-function xml.collect(root, pattern)
- return xmlapplylpath(root,pattern)
+function xml.collect(root,pattern)
+ return xmlapplylpath(root,pattern)
end
-
-function xml.collecttexts(root, pattern, flatten) -- todo: variant with handle
- local collected = xmlapplylpath(root,pattern)
- if collected and flatten then
- local xmltostring = xml.tostring
- for c=1,#collected do
- collected[c] = xmltostring(collected[c].dt)
- end
+function xml.collecttexts(root,pattern,flatten)
+ local collected=xmlapplylpath(root,pattern)
+ if collected and flatten then
+ local xmltostring=xml.tostring
+ for c=1,#collected do
+ collected[c]=xmltostring(collected[c].dt)
end
- return collected or { }
+ end
+ return collected or {}
end
-
-function xml.collect_tags(root, pattern, nonamespace)
- local collected = xmlapplylpath(root,pattern)
- if collected then
- local t, n = { }, 0
- for c=1,#collected do
- local e = collected[c]
- local ns, tg = e.ns, e.tg
- n = n + 1
- if nonamespace then
- t[n] = tg
- elseif ns == "" then
- t[n] = tg
- else
- t[n] = ns .. ":" .. tg
- end
- end
- return t
+function xml.collect_tags(root,pattern,nonamespace)
+ local collected=xmlapplylpath(root,pattern)
+ if collected then
+ local t,n={},0
+ for c=1,#collected do
+ local e=collected[c]
+ local ns,tg=e.ns,e.tg
+ n=n+1
+ if nonamespace then
+ t[n]=tg
+ elseif ns=="" then
+ t[n]=tg
+ else
+ t[n]=ns..":"..tg
+ end
end
+ return t
+ end
end
-
---[[ldx--
-<p>We've now arrived at the functions that manipulate the tree.</p>
---ldx]]--
-
-local no_root = { no_root = true }
-
+local no_root={ no_root=true }
local function redo_ni(d)
- for k=1,#d do
- local dk = d[k]
- if type(dk) == "table" then
- dk.ni = k
- end
+ for k=1,#d do
+ local dk=d[k]
+ if type(dk)=="table" then
+ dk.ni=k
end
+ end
end
-
local function xmltoelement(whatever,root)
- if not whatever then
- return nil
- end
- local element
- if type(whatever) == "string" then
- element = xmlinheritedconvert(whatever,root) -- beware, not really a root
- else
- element = whatever -- we assume a table
- end
- if element.error then
- return whatever -- string
- end
- if element then
- end
- return element
-end
-
-xml.toelement = xmltoelement
-
+ if not whatever then
+ return nil
+ end
+ local element
+ if type(whatever)=="string" then
+ element=xmlinheritedconvert(whatever,root)
+ else
+ element=whatever
+ end
+ if element.error then
+ return whatever
+ end
+ if element then
+ end
+ return element
+end
+xml.toelement=xmltoelement
local function copiedelement(element,newparent)
- if type(element) == "string" then
- return element
- else
- element = xmlcopy(element).dt
- if newparent and type(element) == "table" then
- element.__p__ = newparent
- end
- return element
+ if type(element)=="string" then
+ return element
+ else
+ element=xmlcopy(element).dt
+ if newparent and type(element)=="table" then
+ element.__p__=newparent
end
+ return element
+ end
end
-
function xml.delete(root,pattern)
- if not pattern or pattern == "" then
- local p = root.__p__
+ if not pattern or pattern=="" then
+ local p=root.__p__
+ if p then
+ if trace_manipulations then
+ report('deleting',"--",c,root)
+ end
+ local d=p.dt
+ remove(d,root.ni)
+ redo_ni(d)
+ end
+ else
+ local collected=xmlapplylpath(root,pattern)
+ if collected then
+ for c=1,#collected do
+ local e=collected[c]
+ local p=e.__p__
if p then
- if trace_manipulations then
- report('deleting',"--",c,root)
- end
- local d = p.dt
- remove(d,root.ni)
- redo_ni(d) -- can be made faster and inlined
- end
- else
- local collected = xmlapplylpath(root,pattern)
- if collected then
- for c=1,#collected do
- local e = collected[c]
- local p = e.__p__
- if p then
- if trace_manipulations then
- report('deleting',pattern,c,e)
- end
- local d = p.dt
- remove(d,e.ni)
- redo_ni(d) -- can be made faster and inlined
- end
- end
+ if trace_manipulations then
+ report('deleting',pattern,c,e)
+ end
+ local d=p.dt
+ remove(d,e.ni)
+ redo_ni(d)
end
+ end
end
+ end
end
-
function xml.replace(root,pattern,whatever)
- local element = root and xmltoelement(whatever,root)
- local collected = element and xmlapplylpath(root,pattern)
- if collected then
- for c=1,#collected do
- local e = collected[c]
- local p = e.__p__
- if p then
- if trace_manipulations then
- report('replacing',pattern,c,e)
- end
- local d = p.dt
- d[e.ni] = copiedelement(element,p)
- redo_ni(d) -- probably not needed
- end
+ local element=root and xmltoelement(whatever,root)
+ local collected=element and xmlapplylpath(root,pattern)
+ if collected then
+ for c=1,#collected do
+ local e=collected[c]
+ local p=e.__p__
+ if p then
+ if trace_manipulations then
+ report('replacing',pattern,c,e)
end
+ local d=p.dt
+ d[e.ni]=copiedelement(element,p)
+ redo_ni(d)
+ end
end
+ end
end
-
local function wrap(e,wrapper)
- local t = {
- rn = e.rn,
- tg = e.tg,
- ns = e.ns,
- at = e.at,
- dt = e.dt,
- __p__ = e,
- }
- setmetatable(t,getmetatable(e))
- e.rn = wrapper.rn or e.rn or ""
- e.tg = wrapper.tg or e.tg or ""
- e.ns = wrapper.ns or e.ns or ""
- e.at = fastcopy(wrapper.at)
- e.dt = { t }
+ local t={
+ rn=e.rn,
+ tg=e.tg,
+ ns=e.ns,
+ at=e.at,
+ dt=e.dt,
+ __p__=e,
+ }
+ setmetatable(t,getmetatable(e))
+ e.rn=wrapper.rn or e.rn or ""
+ e.tg=wrapper.tg or e.tg or ""
+ e.ns=wrapper.ns or e.ns or ""
+ e.at=fastcopy(wrapper.at)
+ e.dt={ t }
end
-
function xml.wrap(root,pattern,whatever)
- if whatever then
- local wrapper = xmltoelement(whatever,root)
- local collected = xmlapplylpath(root,pattern)
- if collected then
- for c=1,#collected do
- local e = collected[c]
- if trace_manipulations then
- report('wrapping',pattern,c,e)
- end
- wrap(e,wrapper)
- end
+ if whatever then
+ local wrapper=xmltoelement(whatever,root)
+ local collected=xmlapplylpath(root,pattern)
+ if collected then
+ for c=1,#collected do
+ local e=collected[c]
+ if trace_manipulations then
+ report('wrapping',pattern,c,e)
end
- else
- wrap(root,xmltoelement(pattern))
+ wrap(e,wrapper)
+ end
end
+ else
+ wrap(root,xmltoelement(pattern))
+ end
end
-
local function inject_element(root,pattern,whatever,prepend)
- local element = root and xmltoelement(whatever,root)
- local collected = element and xmlapplylpath(root,pattern)
- local function inject_e(e)
- local r = e.__p__
- local d, k, rri = r.dt, e.ni, r.ri
- local edt = (rri and d[rri].dt) or (d and d[k] and d[k].dt)
- if edt then
- local be, af
- local cp = copiedelement(element,e)
- if prepend then
- be, af = cp, edt
- else
- be, af = edt, cp
- end
- local bn = #be
- for i=1,#af do
- bn = bn + 1
- be[bn] = af[i]
- end
- if rri then
- r.dt[rri].dt = be
- else
- d[k].dt = be
- end
- redo_ni(d)
- end
- end
- if not collected then
- -- nothing
- elseif collected.tg then
- -- first or so
- inject_e(collected)
- else
- for c=1,#collected do
- inject_e(collected[c])
- end
- end
-end
-
-local function insert_element(root,pattern,whatever,before) -- todo: element als functie
- local element = root and xmltoelement(whatever,root)
- local collected = element and xmlapplylpath(root,pattern)
- local function insert_e(e)
- local r = e.__p__
- local d, k = r.dt, e.ni
- if not before then
- k = k + 1
- end
- insert(d,k,copiedelement(element,r))
- redo_ni(d)
- end
- if not collected then
- -- nothing
- elseif collected.tg then
- -- first or so
- insert_e(collected)
- else
- for c=1,#collected do
- insert_e(collected[c])
- end
+ local element=root and xmltoelement(whatever,root)
+ local collected=element and xmlapplylpath(root,pattern)
+ local function inject_e(e)
+ local r=e.__p__
+ local d,k,rri=r.dt,e.ni,r.ri
+ local edt=(rri and d[rri].dt) or (d and d[k] and d[k].dt)
+ if edt then
+ local be,af
+ local cp=copiedelement(element,e)
+ if prepend then
+ be,af=cp,edt
+ else
+ be,af=edt,cp
+ end
+ local bn=#be
+ for i=1,#af do
+ bn=bn+1
+ be[bn]=af[i]
+ end
+ if rri then
+ r.dt[rri].dt=be
+ else
+ d[k].dt=be
+ end
+ redo_ni(d)
+ end
+ end
+ if not collected then
+ elseif collected.tg then
+ inject_e(collected)
+ else
+ for c=1,#collected do
+ inject_e(collected[c])
+ end
+ end
+end
+local function insert_element(root,pattern,whatever,before)
+ local element=root and xmltoelement(whatever,root)
+ local collected=element and xmlapplylpath(root,pattern)
+ local function insert_e(e)
+ local r=e.__p__
+ local d,k=r.dt,e.ni
+ if not before then
+ k=k+1
+ end
+ insert(d,k,copiedelement(element,r))
+ redo_ni(d)
+ end
+ if not collected then
+ elseif collected.tg then
+ insert_e(collected)
+ else
+ for c=1,#collected do
+ insert_e(collected[c])
end
+ end
end
-
-xml.insert_element = insert_element
-xml.insertafter = insert_element
-xml.insertbefore = function(r,p,e) insert_element(r,p,e,true) end
-xml.injectafter = inject_element
-xml.injectbefore = function(r,p,e) inject_element(r,p,e,true) end
-
+xml.insert_element=insert_element
+xml.insertafter=insert_element
+xml.insertbefore=function(r,p,e) insert_element(r,p,e,true) end
+xml.injectafter=inject_element
+xml.injectbefore=function(r,p,e) inject_element(r,p,e,true) end
local function include(xmldata,pattern,attribute,recursive,loaddata)
- -- parse="text" (default: xml), encoding="" (todo)
- -- attribute = attribute or 'href'
- pattern = pattern or 'include'
- loaddata = loaddata or io.loaddata
- local collected = xmlapplylpath(xmldata,pattern)
- if collected then
- for c=1,#collected do
- local ek = collected[c]
- local name = nil
- local ekdt = ek.dt
- local ekat = ek.at
- local epdt = ek.__p__.dt
- if not attribute or attribute == "" then
- name = (type(ekdt) == "table" and ekdt[1]) or ekdt -- check, probably always tab or str
- end
- if not name then
- for a in gmatch(attribute or "href","([^|]+)") do
- name = ekat[a]
- if name then break end
- end
- end
- local data = (name and name ~= "" and loaddata(name)) or ""
- if data == "" then
- epdt[ek.ni] = "" -- xml.empty(d,k)
- elseif ekat["parse"] == "text" then
- -- for the moment hard coded
- epdt[ek.ni] = xml.escaped(data) -- d[k] = xml.escaped(data)
- else
- local xi = xmlinheritedconvert(data,xmldata)
- if not xi then
- epdt[ek.ni] = "" -- xml.empty(d,k)
- else
- if recursive then
- include(xi,pattern,attribute,recursive,loaddata)
- end
- epdt[ek.ni] = xml.body(xi) -- xml.assign(d,k,xi)
- end
- end
+ pattern=pattern or 'include'
+ loaddata=loaddata or io.loaddata
+ local collected=xmlapplylpath(xmldata,pattern)
+ if collected then
+ for c=1,#collected do
+ local ek=collected[c]
+ local name=nil
+ local ekdt=ek.dt
+ local ekat=ek.at
+ local epdt=ek.__p__.dt
+ if not attribute or attribute=="" then
+ name=(type(ekdt)=="table" and ekdt[1]) or ekdt
+ end
+ if not name then
+ for a in gmatch(attribute or "href","([^|]+)") do
+ name=ekat[a]
+ if name then break end
+ end
+ end
+ local data=(name and name~="" and loaddata(name)) or ""
+ if data=="" then
+ epdt[ek.ni]=""
+ elseif ekat["parse"]=="text" then
+ epdt[ek.ni]=xml.escaped(data)
+ else
+ local xi=xmlinheritedconvert(data,xmldata)
+ if not xi then
+ epdt[ek.ni]=""
+ else
+ if recursive then
+ include(xi,pattern,attribute,recursive,loaddata)
+ end
+ epdt[ek.ni]=xml.body(xi)
end
+ end
end
+ end
end
-
-xml.include = include
-
+xml.include=include
local function stripelement(e,nolines,anywhere)
- local edt = e.dt
- if edt then
- if anywhere then
- local t, n = { }, 0
- for e=1,#edt do
- local str = edt[e]
- if type(str) ~= "string" then
- n = n + 1
- t[n] = str
- elseif str ~= "" then
- -- todo: lpeg for each case
- if nolines then
- str = gsub(str,"%s+"," ")
- end
- str = gsub(str,"^%s*(.-)%s*$","%1")
- if str ~= "" then
- n = n + 1
- t[n] = str
- end
- end
- end
- e.dt = t
+ local edt=e.dt
+ if edt then
+ if anywhere then
+ local t,n={},0
+ for e=1,#edt do
+ local str=edt[e]
+ if type(str)~="string" then
+ n=n+1
+ t[n]=str
+ elseif str~="" then
+ if nolines then
+ str=gsub(str,"%s+"," ")
+ end
+ str=gsub(str,"^%s*(.-)%s*$","%1")
+ if str~="" then
+ n=n+1
+ t[n]=str
+ end
+ end
+ end
+ e.dt=t
+ else
+ if #edt>0 then
+ local str=edt[1]
+ if type(str)~="string" then
+ elseif str=="" then
+ remove(edt,1)
else
- -- we can assume a regular sparse xml table with no successive strings
- -- otherwise we should use a while loop
- if #edt > 0 then
- -- strip front
- local str = edt[1]
- if type(str) ~= "string" then
- -- nothing
- elseif str == "" then
- remove(edt,1)
- else
- if nolines then
- str = gsub(str,"%s+"," ")
- end
- str = gsub(str,"^%s+","")
- if str == "" then
- remove(edt,1)
- else
- edt[1] = str
- end
- end
- end
- local nedt = #edt
- if nedt > 0 then
- -- strip end
- local str = edt[nedt]
- if type(str) ~= "string" then
- -- nothing
- elseif str == "" then
- remove(edt)
- else
- if nolines then
- str = gsub(str,"%s+"," ")
- end
- str = gsub(str,"%s+$","")
- if str == "" then
- remove(edt)
- else
- edt[nedt] = str
- end
- end
- end
- end
- end
- return e -- convenient
-end
-
-xml.stripelement = stripelement
-
-function xml.strip(root,pattern,nolines,anywhere) -- strips all leading and trailing spacing
- local collected = xmlapplylpath(root,pattern) -- beware, indices no longer are valid now
- if collected then
- for i=1,#collected do
- stripelement(collected[i],nolines,anywhere)
- end
- end
-end
-
-local function renamespace(root, oldspace, newspace) -- fast variant
- local ndt = #root.dt
- for i=1,ndt or 0 do
- local e = root[i]
- if type(e) == "table" then
- if e.ns == oldspace then
- e.ns = newspace
- if e.rn then
- e.rn = newspace
- end
- end
- local edt = e.dt
- if edt then
- renamespace(edt, oldspace, newspace)
- end
- end
- end
-end
-
-xml.renamespace = renamespace
-
-function xml.remaptag(root, pattern, newtg)
- local collected = xmlapplylpath(root,pattern)
- if collected then
- for c=1,#collected do
- collected[c].tg = newtg
- end
+ if nolines then
+ str=gsub(str,"%s+"," ")
+ end
+ str=gsub(str,"^%s+","")
+ if str=="" then
+ remove(edt,1)
+ else
+ edt[1]=str
+ end
+ end
+ end
+ local nedt=#edt
+ if nedt>0 then
+ local str=edt[nedt]
+ if type(str)~="string" then
+ elseif str=="" then
+ remove(edt)
+ else
+ if nolines then
+ str=gsub(str,"%s+"," ")
+ end
+ str=gsub(str,"%s+$","")
+ if str=="" then
+ remove(edt)
+ else
+ edt[nedt]=str
+ end
+ end
+ end
+ end
+ end
+ return e
+end
+xml.stripelement=stripelement
+function xml.strip(root,pattern,nolines,anywhere)
+ local collected=xmlapplylpath(root,pattern)
+ if collected then
+ for i=1,#collected do
+ stripelement(collected[i],nolines,anywhere)
+ end
+ end
+end
+local function renamespace(root,oldspace,newspace)
+ local ndt=#root.dt
+ for i=1,ndt or 0 do
+ local e=root[i]
+ if type(e)=="table" then
+ if e.ns==oldspace then
+ e.ns=newspace
+ if e.rn then
+ e.rn=newspace
+ end
+ end
+ local edt=e.dt
+ if edt then
+ renamespace(edt,oldspace,newspace)
+ end
+ end
+ end
+end
+xml.renamespace=renamespace
+function xml.remaptag(root,pattern,newtg)
+ local collected=xmlapplylpath(root,pattern)
+ if collected then
+ for c=1,#collected do
+ collected[c].tg=newtg
end
+ end
end
-
-function xml.remapnamespace(root, pattern, newns)
- local collected = xmlapplylpath(root,pattern)
- if collected then
- for c=1,#collected do
- collected[c].ns = newns
- end
+function xml.remapnamespace(root,pattern,newns)
+ local collected=xmlapplylpath(root,pattern)
+ if collected then
+ for c=1,#collected do
+ collected[c].ns=newns
end
+ end
end
-
-function xml.checknamespace(root, pattern, newns)
- local collected = xmlapplylpath(root,pattern)
- if collected then
- for c=1,#collected do
- local e = collected[c]
- if (not e.rn or e.rn == "") and e.ns == "" then
- e.rn = newns
- end
- end
+function xml.checknamespace(root,pattern,newns)
+ local collected=xmlapplylpath(root,pattern)
+ if collected then
+ for c=1,#collected do
+ local e=collected[c]
+ if (not e.rn or e.rn=="") and e.ns=="" then
+ e.rn=newns
+ end
end
+ end
end
-
-function xml.remapname(root, pattern, newtg, newns, newrn)
- local collected = xmlapplylpath(root,pattern)
- if collected then
- for c=1,#collected do
- local e = collected[c]
- e.tg, e.ns, e.rn = newtg, newns, newrn
- end
+function xml.remapname(root,pattern,newtg,newns,newrn)
+ local collected=xmlapplylpath(root,pattern)
+ if collected then
+ for c=1,#collected do
+ local e=collected[c]
+ e.tg,e.ns,e.rn=newtg,newns,newrn
end
+ end
end
-
---[[ldx--
-<p>Helper (for q2p).</p>
---ldx]]--
-
function xml.cdatatotext(e)
- local dt = e.dt
- if #dt == 1 then
- local first = dt[1]
- if first.tg == "@cd@" then
- e.dt = first.dt
- end
+ local dt=e.dt
+ if #dt==1 then
+ local first=dt[1]
+ if first.tg=="@cd@" then
+ e.dt=first.dt
+ end
+ else
+ end
+end
+function xml.texttocdata(e)
+ local dt=e.dt
+ local s=xml.tostring(dt)
+ e.tg="@cd@"
+ e.special=true
+ e.ns=""
+ e.rn=""
+ e.dt={ s }
+ e.at=nil
+end
+function xml.elementtocdata(e)
+ local dt=e.dt
+ local s=xml.tostring(e)
+ e.tg="@cd@"
+ e.special=true
+ e.ns=""
+ e.rn=""
+ e.dt={ s }
+ e.at=nil
+end
+xml.builtinentities=table.tohash { "amp","quot","apos","lt","gt" }
+local entities=characters and characters.entities or nil
+local builtinentities=xml.builtinentities
+function xml.addentitiesdoctype(root,option)
+ if not entities then
+ require("char-ent")
+ entities=characters.entities
+ end
+ if entities and root and root.tg=="@rt@" and root.statistics then
+ local list={}
+ local hexify=option=="hexadecimal"
+ for k,v in table.sortedhash(root.statistics.entities.names) do
+ if not builtinentities[k] then
+ local e=entities[k]
+ if not e then
+ e=format("[%s]",k)
+ elseif hexify then
+ e=format("&#%05X;",utfbyte(k))
+ end
+ list[#list+1]=format(" <!ENTITY %s %q >",k,e)
+ end
+ end
+ local dt=root.dt
+ local n=dt[1].tg=="@pi@" and 2 or 1
+ if #list>0 then
+ insert(dt,n,{ "\n" })
+ insert(dt,n,{
+ tg="@dt@",
+ dt={ format("Something [\n%s\n] ",concat(list)) },
+ ns="",
+ special=true,
+ })
+ insert(dt,n,{ "\n\n" })
else
- -- maybe option
- end
-end
-
--- local x = xml.convert("<x><a>1<b>2</b>3</a></x>")
--- xml.texttocdata(xml.first(x,"a"))
--- print(x) -- <x><![CDATA[1<b>2</b>3]]></x>
-
-function xml.texttocdata(e) -- could be a finalizer
- local dt = e.dt
- local s = xml.tostring(dt) -- no shortcut?
- e.tg = "@cd@"
- e.special = true
- e.ns = ""
- e.rn = ""
- e.dt = { s }
- e.at = nil
-end
-
--- local x = xml.convert("<x><a>1<b>2</b>3</a></x>")
--- xml.tocdata(xml.first(x,"a"))
--- print(x) -- <x><![CDATA[<a>1<b>2</b>3</a>]]></x>
-
-function xml.elementtocdata(e) -- could be a finalizer
- local dt = e.dt
- local s = xml.tostring(e) -- no shortcut?
- e.tg = "@cd@"
- e.special = true
- e.ns = ""
- e.rn = ""
- e.dt = { s }
- e.at = nil
-end
-
-xml.builtinentities = table.tohash { "amp", "quot", "apos", "lt", "gt" } -- used often so share
-
-local entities = characters and characters.entities or nil
-local builtinentities = xml.builtinentities
-
-function xml.addentitiesdoctype(root,option) -- we could also have a 'resolve' i.e. inline hex
- if not entities then
- require("char-ent")
- entities = characters.entities
- end
- if entities and root and root.tg == "@rt@" and root.statistics then
- local list = { }
- local hexify = option == "hexadecimal"
- for k, v in table.sortedhash(root.statistics.entities.names) do
- if not builtinentities[k] then
- local e = entities[k]
- if not e then
- e = format("[%s]",k)
- elseif hexify then
- e = format("&#%05X;",utfbyte(k))
- end
- list[#list+1] = format(" <!ENTITY %s %q >",k,e)
- end
- end
- local dt = root.dt
- local n = dt[1].tg == "@pi@" and 2 or 1
- if #list > 0 then
- insert(dt, n, { "\n" })
- insert(dt, n, {
- tg = "@dt@", -- beware, doctype is unparsed
- dt = { format("Something [\n%s\n] ",concat(list)) },
- ns = "",
- special = true,
- })
- insert(dt, n, { "\n\n" })
- else
- -- insert(dt, n, { table.serialize(root.statistics) })
- end
end
-end
-
--- local str = [==[
--- <?xml version='1.0' standalone='yes' ?>
--- <root>
--- <a>test &nbsp; test &#123; test</a>
--- <b><![CDATA[oeps]]></b>
--- </root>
--- ]==]
---
--- local x = xml.convert(str)
--- xml.addentitiesdoctype(x,"hexadecimal")
--- print(x)
-
---[[ldx--
-<p>Here are a few synonyms.</p>
---ldx]]--
-
-xml.all = xml.each
-xml.insert = xml.insertafter
-xml.inject = xml.injectafter
-xml.after = xml.insertafter
-xml.before = xml.insertbefore
-xml.process = xml.each
-
--- obsolete
-
-xml.obsolete = xml.obsolete or { }
-local obsolete = xml.obsolete
-
-xml.strip_whitespace = xml.strip obsolete.strip_whitespace = xml.strip
-xml.collect_elements = xml.collect obsolete.collect_elements = xml.collect
-xml.delete_element = xml.delete obsolete.delete_element = xml.delete
-xml.replace_element = xml.replace obsolete.replace_element = xml.replacet
-xml.each_element = xml.each obsolete.each_element = xml.each
-xml.process_elements = xml.process obsolete.process_elements = xml.process
-xml.insert_element_after = xml.insertafter obsolete.insert_element_after = xml.insertafter
-xml.insert_element_before = xml.insertbefore obsolete.insert_element_before = xml.insertbefore
-xml.inject_element_after = xml.injectafter obsolete.inject_element_after = xml.injectafter
-xml.inject_element_before = xml.injectbefore obsolete.inject_element_before = xml.injectbefore
-xml.process_attributes = xml.processattributes obsolete.process_attributes = xml.processattributes
-xml.collect_texts = xml.collecttexts obsolete.collect_texts = xml.collecttexts
-xml.inject_element = xml.inject obsolete.inject_element = xml.inject
-xml.remap_tag = xml.remaptag obsolete.remap_tag = xml.remaptag
-xml.remap_name = xml.remapname obsolete.remap_name = xml.remapname
-xml.remap_namespace = xml.remapnamespace obsolete.remap_namespace = xml.remapnamespace
-
--- new (probably ok)
-
+ end
+end
+xml.all=xml.each
+xml.insert=xml.insertafter
+xml.inject=xml.injectafter
+xml.after=xml.insertafter
+xml.before=xml.insertbefore
+xml.process=xml.each
+xml.obsolete=xml.obsolete or {}
+local obsolete=xml.obsolete
+xml.strip_whitespace=xml.strip obsolete.strip_whitespace=xml.strip
+xml.collect_elements=xml.collect obsolete.collect_elements=xml.collect
+xml.delete_element=xml.delete obsolete.delete_element=xml.delete
+xml.replace_element=xml.replace obsolete.replace_element=xml.replacet
+xml.each_element=xml.each obsolete.each_element=xml.each
+xml.process_elements=xml.process obsolete.process_elements=xml.process
+xml.insert_element_after=xml.insertafter obsolete.insert_element_after=xml.insertafter
+xml.insert_element_before=xml.insertbefore obsolete.insert_element_before=xml.insertbefore
+xml.inject_element_after=xml.injectafter obsolete.inject_element_after=xml.injectafter
+xml.inject_element_before=xml.injectbefore obsolete.inject_element_before=xml.injectbefore
+xml.process_attributes=xml.processattributes obsolete.process_attributes=xml.processattributes
+xml.collect_texts=xml.collecttexts obsolete.collect_texts=xml.collecttexts
+xml.inject_element=xml.inject obsolete.inject_element=xml.inject
+xml.remap_tag=xml.remaptag obsolete.remap_tag=xml.remaptag
+xml.remap_name=xml.remapname obsolete.remap_name=xml.remapname
+xml.remap_namespace=xml.remapnamespace obsolete.remap_namespace=xml.remapnamespace
function xml.cdata(e)
- if e then
- local dt = e.dt
- if dt and #dt == 1 then
- local first = dt[1]
- return first.tg == "@cd@" and first.dt[1] or ""
- end
+ if e then
+ local dt=e.dt
+ if dt and #dt==1 then
+ local first=dt[1]
+ return first.tg=="@cd@" and first.dt[1] or ""
end
- return ""
+ end
+ return ""
end
-
function xml.finalizers.xml.cdata(collected)
- if collected then
- local e = collected[1]
- if e then
- local dt = e.dt
- if dt and #dt == 1 then
- local first = dt[1]
- return first.tg == "@cd@" and first.dt[1] or ""
- end
- end
- end
- return ""
-end
-
-function xml.insertcomment(e,str,n) -- also insertcdata
- table.insert(e.dt,n or 1,{
- tg = "@cm@",
- ns = "",
- special = true,
- at = { },
- dt = { str },
- })
-end
-
-function xml.setcdata(e,str) -- also setcomment
- e.dt = { {
- tg = "@cd@",
- ns = "",
- special = true,
- at = { },
- dt = { str },
- } }
+ if collected then
+ local e=collected[1]
+ if e then
+ local dt=e.dt
+ if dt and #dt==1 then
+ local first=dt[1]
+ return first.tg=="@cd@" and first.dt[1] or ""
+ end
+ end
+ end
+ return ""
+end
+function xml.insertcomment(e,str,n)
+ table.insert(e.dt,n or 1,{
+ tg="@cm@",
+ ns="",
+ special=true,
+ at={},
+ dt={ str },
+ })
+end
+function xml.setcdata(e,str)
+ e.dt={ {
+ tg="@cd@",
+ ns="",
+ special=true,
+ at={},
+ dt={ str },
+ } }
end
-
--- maybe helpers like this will move to an autoloader
-
function xml.separate(x,pattern)
- local collected = xmlapplylpath(x,pattern)
- if collected then
- for c=1,#collected do
- local e = collected[c]
- local d = e.dt
- if d == x then
- report_xml("warning: xml.separate changes root")
- x = d
- end
- local t, n = { "\n" }, 1
- local i, nd = 1, #d
- while i <= nd do
- while i <= nd do
- local di = d[i]
- if type(di) == "string" then
- if di == "\n" or find(di,"^%s+$") then -- first test is speedup
- i = i + 1
- else
- d[i] = strip(di)
- break
- end
- else
- break
- end
- end
- if i > nd then
- break
- end
- t[n+1] = "\n"
- t[n+2] = d[i]
- t[n+3] = "\n"
- n = n + 3
- i = i + 1
+ local collected=xmlapplylpath(x,pattern)
+ if collected then
+ for c=1,#collected do
+ local e=collected[c]
+ local d=e.dt
+ if d==x then
+ report_xml("warning: xml.separate changes root")
+ x=d
+ end
+ local t,n={ "\n" },1
+ local i,nd=1,#d
+ while i<=nd do
+ while i<=nd do
+ local di=d[i]
+ if type(di)=="string" then
+ if di=="\n" or find(di,"^%s+$") then
+ i=i+1
+ else
+ d[i]=strip(di)
+ break
end
- t[n+1] = "\n"
- setmetatable(t,getmetatable(d))
- e.dt = t
- end
- end
- return x
+ else
+ break
+ end
+ end
+ if i>nd then
+ break
+ end
+ t[n+1]="\n"
+ t[n+2]=d[i]
+ t[n+3]="\n"
+ n=n+3
+ i=i+1
+ end
+ t[n+1]="\n"
+ setmetatable(t,getmetatable(d))
+ e.dt=t
+ end
+ end
+ return x
+end
+local helpers=xml.helpers or {}
+xml.helpers=helpers
+local function normal(e,action)
+ local edt=e.dt
+ if edt then
+ for i=1,#edt do
+ local str=edt[i]
+ if type(str)=="string" and str~="" then
+ edt[i]=action(str)
+ end
+ end
+ end
+end
+local function recurse(e,action)
+ local edt=e.dt
+ if edt then
+ for i=1,#edt do
+ local str=edt[i]
+ if type(str)~="string" then
+ recurse(str,action,recursive)
+ elseif str~="" then
+ edt[i]=action(str)
+ end
+ end
+ end
+end
+function helpers.recursetext(collected,action,recursive)
+ if recursive then
+ for i=1,#collected do
+ recurse(collected[i],action)
+ end
+ else
+ for i=1,#collected do
+ normal(collected[i],action)
+ end
+ end
end
@@ -10389,450 +10821,377 @@ end -- of closure
do -- create closure to overcome 200 locals limit
-if not modules then modules = { } end modules ['lxml-xml'] = {
- version = 1.001,
- comment = "this module is the basis for the lxml-* ones",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
+package.loaded["lxml-xml"] = package.loaded["lxml-xml"] or true
-local concat = table.concat
-local find, lower, upper = string.find, string.lower, string.upper
+-- original size: 10274, stripped down to: 7538
-local xml = xml
-
-local finalizers = xml.finalizers.xml
-local xmlfilter = xml.filter -- we could inline this one for speed
-local xmltostring = xml.tostring
-local xmlserialize = xml.serialize
-local xmlcollected = xml.collected
-local xmlnewhandlers = xml.newhandlers
-
-local function first(collected) -- wrong ?
- return collected and collected[1]
+if not modules then modules={} end modules ['lxml-xml']={
+ version=1.001,
+ comment="this module is the basis for the lxml-* ones",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+local concat=table.concat
+local find,lower,upper=string.find,string.lower,string.upper
+local xml=xml
+local finalizers=xml.finalizers.xml
+local xmlfilter=xml.filter
+local xmltostring=xml.tostring
+local xmlserialize=xml.serialize
+local xmlcollected=xml.collected
+local xmlnewhandlers=xml.newhandlers
+local function first(collected)
+ return collected and collected[1]
end
-
local function last(collected)
- return collected and collected[#collected]
+ return collected and collected[#collected]
end
-
local function all(collected)
- return collected
+ return collected
end
-
--- local function reverse(collected)
--- if collected then
--- local nc = #collected
--- if nc > 0 then
--- local reversed, r = { }, 0
--- for c=nc,1,-1 do
--- r = r + 1
--- reversed[r] = collected[c]
--- end
--- return reversed
--- else
--- return collected
--- end
--- end
--- end
-
-local reverse = table.reversed
-
+local reverse=table.reversed
local function attribute(collected,name)
- if collected and #collected > 0 then
- local at = collected[1].at
- return at and at[name]
- end
+ if collected and #collected>0 then
+ local at=collected[1].at
+ return at and at[name]
+ end
end
-
local function att(id,name)
- local at = id.at
- return at and at[name]
+ local at=id.at
+ return at and at[name]
end
-
local function count(collected)
- return collected and #collected or 0
+ return collected and #collected or 0
end
-
local function position(collected,n)
- if not collected then
- return 0
- end
- local nc = #collected
- if nc == 0 then
- return 0
- end
- n = tonumber(n) or 0
- if n < 0 then
- return collected[nc + n + 1]
- elseif n > 0 then
- return collected[n]
- else
- return collected[1].mi or 0
- end
+ if not collected then
+ return 0
+ end
+ local nc=#collected
+ if nc==0 then
+ return 0
+ end
+ n=tonumber(n) or 0
+ if n<0 then
+ return collected[nc+n+1]
+ elseif n>0 then
+ return collected[n]
+ else
+ return collected[1].mi or 0
+ end
end
-
local function match(collected)
- return collected and #collected > 0 and collected[1].mi or 0 -- match
+ return collected and #collected>0 and collected[1].mi or 0
end
-
local function index(collected)
- return collected and #collected > 0 and collected[1].ni or 0 -- 0 is new
+ return collected and #collected>0 and collected[1].ni or 0
end
-
local function attributes(collected,arguments)
- if collected and #collected > 0 then
- local at = collected[1].at
- if arguments then
- return at[arguments]
- elseif next(at) then
- return at -- all of them
+ if collected and #collected>0 then
+ local at=collected[1].at
+ if arguments then
+ return at[arguments]
+ elseif next(at) then
+ return at
+ end
+ end
+end
+local function chainattribute(collected,arguments)
+ if collected and #collected>0 then
+ local e=collected[1]
+ while e do
+ local at=e.at
+ if at then
+ local a=at[arguments]
+ if a then
+ return a
end
+ else
+ break
+ end
+ e=e.__p__
end
+ end
+ return ""
end
-
-local function chainattribute(collected,arguments) -- todo: optional levels
- if collected and #collected > 0 then
- local e = collected[1]
- while e do
- local at = e.at
- if at then
- local a = at[arguments]
- if a then
- return a
- end
- else
- break -- error
- end
- e = e.__p__
- end
- end
+local function raw(collected)
+ if collected and #collected>0 then
+ local e=collected[1] or collected
+ return e and xmltostring(e) or ""
+ else
return ""
+ end
end
-
-local function raw(collected) -- hybrid (not much different from text so it might go)
- if collected and #collected > 0 then
- local e = collected[1] or collected
- return e and xmltostring(e) or "" -- only first as we cannot concat function
- else
- return ""
- end
-end
-
---
-
-local xmltexthandler = xmlnewhandlers {
- name = "string",
- initialize = function()
- result = { }
- return result
- end,
- finalize = function()
- return concat(result)
- end,
- handle = function(...)
- result[#result+1] = concat { ... }
- end,
- escape = false,
+local xmltexthandler=xmlnewhandlers {
+ name="string",
+ initialize=function()
+ result={}
+ return result
+ end,
+ finalize=function()
+ return concat(result)
+ end,
+ handle=function(...)
+ result[#result+1]=concat {... }
+ end,
+ escape=false,
}
-
local function xmltotext(root)
- local dt = root.dt
- if not dt then
- return ""
- end
- local nt = #dt -- string or table
- if nt == 0 then
- return ""
- elseif nt == 1 and type(dt[1]) == "string" then
- return dt[1] -- no escaping of " ' < > &
- else
- return xmlserialize(root,xmltexthandler) or ""
- end
-end
-
---
-
-local function text(collected) -- hybrid
- if collected then -- no # test here !
- local e = collected[1] or collected -- why fallback to element, how about cdata
- return e and xmltotext(e) or ""
- else
- return ""
- end
+ local dt=root.dt
+ if not dt then
+ return ""
+ end
+ local nt=#dt
+ if nt==0 then
+ return ""
+ elseif nt==1 and type(dt[1])=="string" then
+ return dt[1]
+ else
+ return xmlserialize(root,xmltexthandler) or ""
+ end
+end
+local function text(collected)
+ if collected then
+ local e=collected[1] or collected
+ return e and xmltotext(e) or ""
+ else
+ return ""
+ end
end
-
local function texts(collected)
- if not collected then
- return { } -- why no nil
- end
- local nc = #collected
- if nc == 0 then
- return { } -- why no nil
- end
- local t, n = { }, 0
- for c=1,nc do
- local e = collected[c]
- if e and e.dt then
- n = n + 1
- t[n] = e.dt
- end
- end
- return t
+ if not collected then
+ return {}
+ end
+ local nc=#collected
+ if nc==0 then
+ return {}
+ end
+ local t,n={},0
+ for c=1,nc do
+ local e=collected[c]
+ if e and e.dt then
+ n=n+1
+ t[n]=e.dt
+ end
+ end
+ return t
end
-
local function tag(collected,n)
- if not collected then
- return
- end
- local nc = #collected
- if nc == 0 then
- return
- end
- local c
- if n == 0 or not n then
- c = collected[1]
- elseif n > 1 then
- c = collected[n]
- else
- c = collected[nc-n+1]
- end
- return c and c.tg
+ if not collected then
+ return
+ end
+ local nc=#collected
+ if nc==0 then
+ return
+ end
+ local c
+ if n==0 or not n then
+ c=collected[1]
+ elseif n>1 then
+ c=collected[n]
+ else
+ c=collected[nc-n+1]
+ end
+ return c and c.tg
end
-
local function name(collected,n)
- if not collected then
- return
- end
- local nc = #collected
- if nc == 0 then
- return
- end
- local c
- if n == 0 or not n then
- c = collected[1]
- elseif n > 1 then
- c = collected[n]
- else
- c = collected[nc-n+1]
- end
- if not c then
- -- sorry
- elseif c.ns == "" then
- return c.tg
- else
- return c.ns .. ":" .. c.tg
- end
+ if not collected then
+ return
+ end
+ local nc=#collected
+ if nc==0 then
+ return
+ end
+ local c
+ if n==0 or not n then
+ c=collected[1]
+ elseif n>1 then
+ c=collected[n]
+ else
+ c=collected[nc-n+1]
+ end
+ if not c then
+ elseif c.ns=="" then
+ return c.tg
+ else
+ return c.ns..":"..c.tg
+ end
end
-
local function tags(collected,nonamespace)
- if not collected then
- return
- end
- local nc = #collected
- if nc == 0 then
- return
- end
- local t, n = { }, 0
- for c=1,nc do
- local e = collected[c]
- local ns, tg = e.ns, e.tg
- n = n + 1
- if nonamespace or ns == "" then
- t[n] = tg
- else
- t[n] = ns .. ":" .. tg
- end
+ if not collected then
+ return
+ end
+ local nc=#collected
+ if nc==0 then
+ return
+ end
+ local t,n={},0
+ for c=1,nc do
+ local e=collected[c]
+ local ns,tg=e.ns,e.tg
+ n=n+1
+ if nonamespace or ns=="" then
+ t[n]=tg
+ else
+ t[n]=ns..":"..tg
end
- return t
+ end
+ return t
end
-
local function empty(collected,spacesonly)
- if not collected then
- return true
- end
- local nc = #collected
- if nc == 0 then
- return true
- end
- for c=1,nc do
- local e = collected[c]
- if e then
- local edt = e.dt
- if edt then
- local n = #edt
- if n == 1 then
- local edk = edt[1]
- local typ = type(edk)
- if typ == "table" then
- return false
- elseif edk ~= "" then
- return false
- elseif spacesonly and not find(edk,"%S") then
- return false
- end
- elseif n > 1 then
- return false
- end
- end
- end
- end
+ if not collected then
return true
-end
-
-finalizers.first = first
-finalizers.last = last
-finalizers.all = all
-finalizers.reverse = reverse
-finalizers.elements = all
-finalizers.default = all
-finalizers.attribute = attribute
-finalizers.att = att
-finalizers.count = count
-finalizers.position = position
-finalizers.match = match
-finalizers.index = index
-finalizers.attributes = attributes
-finalizers.chainattribute = chainattribute
-finalizers.text = text
-finalizers.texts = texts
-finalizers.tag = tag
-finalizers.name = name
-finalizers.tags = tags
-finalizers.empty = empty
-
--- shortcuts -- we could support xmlfilter(id,pattern,first)
-
+ end
+ local nc=#collected
+ if nc==0 then
+ return true
+ end
+ for c=1,nc do
+ local e=collected[c]
+ if e then
+ local edt=e.dt
+ if edt then
+ local n=#edt
+ if n==1 then
+ local edk=edt[1]
+ local typ=type(edk)
+ if typ=="table" then
+ return false
+ elseif edk~="" then
+ return false
+ elseif spacesonly and not find(edk,"%S") then
+ return false
+ end
+ elseif n>1 then
+ return false
+ end
+ end
+ end
+ end
+ return true
+end
+finalizers.first=first
+finalizers.last=last
+finalizers.all=all
+finalizers.reverse=reverse
+finalizers.elements=all
+finalizers.default=all
+finalizers.attribute=attribute
+finalizers.att=att
+finalizers.count=count
+finalizers.position=position
+finalizers.match=match
+finalizers.index=index
+finalizers.attributes=attributes
+finalizers.chainattribute=chainattribute
+finalizers.text=text
+finalizers.texts=texts
+finalizers.tag=tag
+finalizers.name=name
+finalizers.tags=tags
+finalizers.empty=empty
function xml.first(id,pattern)
- return first(xmlfilter(id,pattern))
+ return first(xmlfilter(id,pattern))
end
-
function xml.last(id,pattern)
- return last(xmlfilter(id,pattern))
+ return last(xmlfilter(id,pattern))
end
-
function xml.count(id,pattern)
- return count(xmlfilter(id,pattern))
+ return count(xmlfilter(id,pattern))
end
-
function xml.attribute(id,pattern,a,default)
- return attribute(xmlfilter(id,pattern),a,default)
+ return attribute(xmlfilter(id,pattern),a,default)
end
-
function xml.raw(id,pattern)
- if pattern then
- return raw(xmlfilter(id,pattern))
- else
- return raw(id)
- end
-end
-
-function xml.text(id,pattern) -- brrr either content or element (when cdata)
- if pattern then
- -- return text(xmlfilter(id,pattern))
- local collected = xmlfilter(id,pattern)
- return collected and #collected > 0 and xmltotext(collected[1]) or ""
- elseif id then
- -- return text(id)
- return xmltotext(id) or ""
- else
- return ""
- end
+ if pattern then
+ return raw(xmlfilter(id,pattern))
+ else
+ return raw(id)
+ end
+end
+function xml.text(id,pattern)
+ if pattern then
+ local collected=xmlfilter(id,pattern)
+ return collected and #collected>0 and xmltotext(collected[1]) or ""
+ elseif id then
+ return xmltotext(id) or ""
+ else
+ return ""
+ end
end
-
-xml.content = text
-
---
-
-function xml.position(id,pattern,n) -- element
- return position(xmlfilter(id,pattern),n)
+xml.content=text
+function xml.position(id,pattern,n)
+ return position(xmlfilter(id,pattern),n)
end
-
-function xml.match(id,pattern) -- number
- return match(xmlfilter(id,pattern))
+function xml.match(id,pattern)
+ return match(xmlfilter(id,pattern))
end
-
function xml.empty(id,pattern,spacesonly)
- return empty(xmlfilter(id,pattern),spacesonly)
+ return empty(xmlfilter(id,pattern),spacesonly)
end
-
-xml.all = xml.filter
-xml.index = xml.position
-xml.found = xml.filter
-
--- a nice one:
-
+xml.all=xml.filter
+xml.index=xml.position
+xml.found=xml.filter
local function totable(x)
- local t = { }
- for e in xmlcollected(x[1] or x,"/*") do
- t[e.tg] = xmltostring(e.dt) or ""
- end
- return next(t) and t or nil
-end
-
-xml.table = totable
-finalizers.table = totable
-
+ local t={}
+ for e in xmlcollected(x[1] or x,"/*") do
+ t[e.tg]=xmltostring(e.dt) or ""
+ end
+ return next(t) and t or nil
+end
+xml.table=totable
+finalizers.table=totable
local function textonly(e,t)
- if e then
- local edt = e.dt
- if edt then
- for i=1,#edt do
- local e = edt[i]
- if type(e) == "table" then
- textonly(e,t)
- else
- t[#t+1] = e
- end
- end
+ if e then
+ local edt=e.dt
+ if edt then
+ for i=1,#edt do
+ local e=edt[i]
+ if type(e)=="table" then
+ textonly(e,t)
+ else
+ t[#t+1]=e
end
+ end
end
- return t
+ end
+ return t
end
-
-function xml.textonly(e) -- no pattern
- return concat(textonly(e,{}))
+function xml.textonly(e)
+ return concat(textonly(e,{}))
end
-
---
-
--- local x = xml.convert("<x><a x='+'>1<B>2</B>3</a></x>")
--- xml.filter(x,"**/lowerall()") print(x)
--- xml.filter(x,"**/upperall()") print(x)
-
function finalizers.lowerall(collected)
- for c=1,#collected do
- local e = collected[c]
- if not e.special then
- e.tg = lower(e.tg)
- local eat = e.at
- if eat then
- local t = { }
- for k,v in next, eat do
- t[lower(k)] = v
- end
- e.at = t
- end
+ for c=1,#collected do
+ local e=collected[c]
+ if not e.special then
+ e.tg=lower(e.tg)
+ local eat=e.at
+ if eat then
+ local t={}
+ for k,v in next,eat do
+ t[lower(k)]=v
end
+ e.at=t
+ end
end
+ end
end
-
function finalizers.upperall(collected)
- for c=1,#collected do
- local e = collected[c]
- if not e.special then
- e.tg = upper(e.tg)
- local eat = e.at
- if eat then
- local t = { }
- for k,v in next, eat do
- t[upper(k)] = v
- end
- e.at = t
- end
+ for c=1,#collected do
+ local e=collected[c]
+ if not e.special then
+ e.tg=upper(e.tg)
+ local eat=e.at
+ if eat then
+ local t={}
+ for k,v in next,eat do
+ t[upper(k)]=v
end
+ e.at=t
+ end
end
+ end
end
@@ -10840,245 +11199,331 @@ end -- of closure
do -- create closure to overcome 200 locals limit
-if not modules then modules = { } end modules ['data-ini'] = {
- version = 1.001,
- comment = "companion to luat-lib.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files",
-}
-
-local gsub, find, gmatch, char = string.gsub, string.find, string.gmatch, string.char
-local concat = table.concat
-local next, type = next, type
+package.loaded["trac-xml"] = package.loaded["trac-xml"] or true
-local filedirname, filebasename, fileextname, filejoin = file.dirname, file.basename, file.extname, file.join
+-- original size: 6351, stripped down to: 4919
-local trace_locating = false trackers.register("resolvers.locating", function(v) trace_locating = v end)
-local trace_detail = false trackers.register("resolvers.details", function(v) trace_detail = v end)
-local trace_expansions = false trackers.register("resolvers.expansions", function(v) trace_expansions = v end)
-
-local report_initialization = logs.reporter("resolvers","initialization")
-
-local ostype, osname, ossetenv, osgetenv = os.type, os.name, os.setenv, os.getenv
-
--- The code here used to be part of a data-res but for convenience
--- we now split it over multiple files. As this file is now the
--- starting point we introduce resolvers here.
+if not modules then modules={} end modules ['trac-xml']={
+ version=1.001,
+ comment="companion to trac-log.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+local formatters=string.formatters
+local reporters=logs.reporters
+local xmlserialize=xml.serialize
+local xmlcollected=xml.collected
+local xmltext=xml.text
+local xmlfirst=xml.first
+local function showhelp(specification,...)
+ local root=xml.convert(specification.helpinfo or "")
+ if not root then
+ return
+ end
+ local xs=xml.gethandlers("string")
+ xml.sethandlersfunction(xs,"short",function(e,handler) xmlserialize(e.dt,handler) end)
+ xml.sethandlersfunction(xs,"ref",function(e,handler) handler.handle("--"..e.at.name) end)
+ local wantedcategories=select("#",...)==0 and true or table.tohash {... }
+ local nofcategories=xml.count(root,"/application/flags/category")
+ local report=specification.report
+ for category in xmlcollected(root,"/application/flags/category") do
+ local categoryname=category.at.name or ""
+ if wantedcategories==true or wantedcategories[categoryname] then
+ if nofcategories>1 then
+ report("%s options:",categoryname)
+ report()
+ end
+ for subcategory in xmlcollected(category,"/subcategory") do
+ for flag in xmlcollected(subcategory,"/flag") do
+ local name=flag.at.name
+ local value=flag.at.value
+ local short=xmltext(xmlfirst(flag,"/short"))
+ if value then
+ report("--%-20s %s",formatters["%s=%s"](name,value),short)
+ else
+ report("--%-20s %s",name,short)
+ end
+ end
+ report()
+ end
+ end
+ end
+ for category in xmlcollected(root,"/application/examples/category") do
+ local title=xmltext(xmlfirst(category,"/title"))
+ if title and title~="" then
+ report()
+ report(title)
+ report()
+ end
+ for subcategory in xmlcollected(category,"/subcategory") do
+ for example in xmlcollected(subcategory,"/example") do
+ local command=xmltext(xmlfirst(example,"/command"))
+ local comment=xmltext(xmlfirst(example,"/comment"))
+ report(command)
+ end
+ report()
+ end
+ end
+ for comment in xmlcollected(root,"/application/comments/comment") do
+ local comment=xmltext(comment)
+ report()
+ report(comment)
+ report()
+ end
+end
+local reporthelp=reporters.help
+local exporthelp=reporters.export
+local function xmlfound(t)
+ local helpinfo=t.helpinfo
+ if type(helpinfo)=="table" then
+ return false
+ end
+ if type(helpinfo)~="string" then
+ helpinfo="Warning: no helpinfo found."
+ t.helpinfo=helpinfo
+ return false
+ end
+ if string.find(helpinfo,".xml$") then
+ local ownscript=environment.ownscript
+ local helpdata=false
+ if ownscript then
+ local helpfile=file.join(file.pathpart(ownscript),helpinfo)
+ helpdata=io.loaddata(helpfile)
+ if helpdata=="" then
+ helpdata=false
+ end
+ end
+ if not helpdata then
+ local helpfile=resolvers.findfile(helpinfo,"tex")
+ helpdata=helpfile and io.loaddata(helpfile)
+ end
+ if helpdata and helpdata~="" then
+ helpinfo=helpdata
+ else
+ helpinfo=formatters["Warning: help file %a is not found."](helpinfo)
+ end
+ end
+ t.helpinfo=helpinfo
+ return string.find(t.helpinfo,"^<%?xml") and true or false
+end
+function reporters.help(t,...)
+ if xmlfound(t) then
+ showhelp(t,...)
+ else
+ reporthelp(t,...)
+ end
+end
+function reporters.export(t,methods,filename)
+ if not xmlfound(t) then
+ return exporthelp(t)
+ end
+ if not methods or methods=="" then
+ methods=environment.arguments["exporthelp"]
+ end
+ if not filename or filename=="" then
+ filename=environment.files[1]
+ end
+ dofile(resolvers.findfile("trac-exp.lua","tex"))
+ local exporters=logs.exporters
+ if not exporters or not methods then
+ return exporthelp(t)
+ end
+ if methods=="all" then
+ methods=table.keys(exporters)
+ elseif type(methods)=="string" then
+ methods=utilities.parsers.settings_to_array(methods)
+ else
+ return exporthelp(t)
+ end
+ if type(filename)~="string" or filename=="" then
+ filename=false
+ elseif file.pathpart(filename)=="" then
+ t.report("export file %a will not be saved on the current path (safeguard)",filename)
+ return
+ end
+ for i=1,#methods do
+ local method=methods[i]
+ local exporter=exporters[method]
+ if exporter then
+ local result=exporter(t,method)
+ if result and result~="" then
+ if filename then
+ local fullname=file.replacesuffix(filename,method)
+ t.report("saving export in %a",fullname)
+ io.savedata(fullname,result)
+ else
+ reporters.lines(t,result)
+ end
+ else
+ t.report("no output from exporter %a",method)
+ end
+ else
+ t.report("unknown exporter %a",method)
+ end
+ end
+end
-resolvers = resolvers or { }
-local resolvers = resolvers
--- We don't want the kpse library to kick in. Also, we want to be able to
--- execute programs. Control over execution is implemented later.
+end -- of closure
-texconfig.kpse_init = false
-texconfig.shell_escape = 't'
+do -- create closure to overcome 200 locals limit
-if kpse and kpse.default_texmfcnf then
- local default_texmfcnf = kpse.default_texmfcnf()
- -- looks more like context:
- default_texmfcnf = gsub(default_texmfcnf,"$SELFAUTOLOC","selfautoloc:")
- default_texmfcnf = gsub(default_texmfcnf,"$SELFAUTODIR","selfautodir:")
- default_texmfcnf = gsub(default_texmfcnf,"$SELFAUTOPARENT","selfautoparent:")
- default_texmfcnf = gsub(default_texmfcnf,"$HOME","home:")
- --
- environment.default_texmfcnf = default_texmfcnf
-end
+package.loaded["data-ini"] = package.loaded["data-ini"] or true
-kpse = { original = kpse }
+-- original size: 7898, stripped down to: 5501
-setmetatable(kpse, {
- __index = function(kp,name)
- report_initialization("fatal error: kpse library is accessed (key: %s)",name)
- os.exit()
- end
+if not modules then modules={} end modules ['data-ini']={
+ version=1.001,
+ comment="companion to luat-lib.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files",
+}
+local gsub,find,gmatch,char=string.gsub,string.find,string.gmatch,string.char
+local next,type=next,type
+local filedirname,filebasename,filejoin=file.dirname,file.basename,file.join
+local trace_locating=false trackers.register("resolvers.locating",function(v) trace_locating=v end)
+local trace_detail=false trackers.register("resolvers.details",function(v) trace_detail=v end)
+local trace_expansions=false trackers.register("resolvers.expansions",function(v) trace_expansions=v end)
+local report_initialization=logs.reporter("resolvers","initialization")
+local ostype,osname,ossetenv,osgetenv=os.type,os.name,os.setenv,os.getenv
+resolvers=resolvers or {}
+local resolvers=resolvers
+texconfig.kpse_init=false
+texconfig.shell_escape='t'
+if not (environment and environment.default_texmfcnf) and kpse and kpse.default_texmfcnf then
+ local default_texmfcnf=kpse.default_texmfcnf()
+ default_texmfcnf=gsub(default_texmfcnf,"$SELFAUTOLOC","selfautoloc:")
+ default_texmfcnf=gsub(default_texmfcnf,"$SELFAUTODIR","selfautodir:")
+ default_texmfcnf=gsub(default_texmfcnf,"$SELFAUTOPARENT","selfautoparent:")
+ default_texmfcnf=gsub(default_texmfcnf,"$HOME","home:")
+ environment.default_texmfcnf=default_texmfcnf
+end
+kpse={ original=kpse }
+setmetatable(kpse,{
+ __index=function(kp,name)
+ report_initialization("fatal error: kpse library is accessed (key: %s)",name)
+ os.exit()
+ end
} )
-
--- First we check a couple of environment variables. Some might be
--- set already but we need then later on. We start with the system
--- font path.
-
do
-
- local osfontdir = osgetenv("OSFONTDIR")
-
- if osfontdir and osfontdir ~= "" then
- -- ok
- elseif osname == "windows" then
- ossetenv("OSFONTDIR","c:/windows/fonts//")
- elseif osname == "macosx" then
- ossetenv("OSFONTDIR","$HOME/Library/Fonts//;/Library/Fonts//;/System/Library/Fonts//")
- end
-
+ local osfontdir=osgetenv("OSFONTDIR")
+ if osfontdir and osfontdir~="" then
+ elseif osname=="windows" then
+ ossetenv("OSFONTDIR","c:/windows/fonts//")
+ elseif osname=="macosx" then
+ ossetenv("OSFONTDIR","$HOME/Library/Fonts//;/Library/Fonts//;/System/Library/Fonts//")
+ end
end
-
--- Next comes the user's home path. We need this as later on we have
--- to replace ~ with its value.
-
do
-
- local homedir = osgetenv(ostype == "windows" and 'USERPROFILE' or 'HOME') or ''
-
- if not homedir or homedir == "" then
- homedir = char(127) -- we need a value, later we wil trigger on it
- end
-
- homedir = file.collapsepath(homedir)
-
- ossetenv("HOME", homedir) -- can be used in unix cnf files
- ossetenv("USERPROFILE",homedir) -- can be used in windows cnf files
-
- environment.homedir = homedir
-
+ local homedir=osgetenv(ostype=="windows" and 'USERPROFILE' or 'HOME') or ''
+ if not homedir or homedir=="" then
+ homedir=char(127)
+ end
+ homedir=file.collapsepath(homedir)
+ ossetenv("HOME",homedir)
+ ossetenv("USERPROFILE",homedir)
+ environment.homedir=homedir
end
-
--- The following code sets the name of the own binary and its
--- path. This is fallback code as we have os.selfdir now.
-
do
-
- local args = environment.originalarguments or arg -- this needs a cleanup
-
- local ownbin = environment.ownbin or args[-2] or arg[-2] or args[-1] or arg[-1] or arg[0] or "luatex"
- local ownpath = environment.ownpath or os.selfdir
-
- ownbin = file.collapsepath(ownbin)
- ownpath = file.collapsepath(ownpath)
-
- if not ownpath or ownpath == "" or ownpath == "unset" then
- ownpath = args[-1] or arg[-1]
- ownpath = ownpath and filedirname(gsub(ownpath,"\\","/"))
- if not ownpath or ownpath == "" then
- ownpath = args[-0] or arg[-0]
- ownpath = ownpath and filedirname(gsub(ownpath,"\\","/"))
- end
- local binary = ownbin
- if not ownpath or ownpath == "" then
- ownpath = ownpath and filedirname(binary)
- end
- if not ownpath or ownpath == "" then
- if os.binsuffix ~= "" then
- binary = file.replacesuffix(binary,os.binsuffix)
- end
- local path = osgetenv("PATH")
- if path then
- for p in gmatch(path,"[^"..io.pathseparator.."]+") do
- local b = filejoin(p,binary)
- if lfs.isfile(b) then
- -- we assume that after changing to the path the currentdir function
- -- resolves to the real location and use this side effect here; this
- -- trick is needed because on the mac installations use symlinks in the
- -- path instead of real locations
- local olddir = lfs.currentdir()
- if lfs.chdir(p) then
- local pp = lfs.currentdir()
- if trace_locating and p ~= pp then
- report_initialization("following symlink '%s' to '%s'",p,pp)
- end
- ownpath = pp
- lfs.chdir(olddir)
- else
- if trace_locating then
- report_initialization("unable to check path '%s'",p)
- end
- ownpath = p
- end
- break
- end
- end
+ local args=environment.originalarguments or arg
+ if not environment.ownmain then
+ environment.ownmain=status and string.match(string.lower(status.banner),"this is ([%a]+)") or "luatex"
+ end
+ local ownbin=environment.ownbin or args[-2] or arg[-2] or args[-1] or arg[-1] or arg[0] or "luatex"
+ local ownpath=environment.ownpath or os.selfdir
+ ownbin=file.collapsepath(ownbin)
+ ownpath=file.collapsepath(ownpath)
+ if not ownpath or ownpath=="" or ownpath=="unset" then
+ ownpath=args[-1] or arg[-1]
+ ownpath=ownpath and filedirname(gsub(ownpath,"\\","/"))
+ if not ownpath or ownpath=="" then
+ ownpath=args[-0] or arg[-0]
+ ownpath=ownpath and filedirname(gsub(ownpath,"\\","/"))
+ end
+ local binary=ownbin
+ if not ownpath or ownpath=="" then
+ ownpath=ownpath and filedirname(binary)
+ end
+ if not ownpath or ownpath=="" then
+ if os.binsuffix~="" then
+ binary=file.replacesuffix(binary,os.binsuffix)
+ end
+ local path=osgetenv("PATH")
+ if path then
+ for p in gmatch(path,"[^"..io.pathseparator.."]+") do
+ local b=filejoin(p,binary)
+ if lfs.isfile(b) then
+ local olddir=lfs.currentdir()
+ if lfs.chdir(p) then
+ local pp=lfs.currentdir()
+ if trace_locating and p~=pp then
+ report_initialization("following symlink %a to %a",p,pp)
+ end
+ ownpath=pp
+ lfs.chdir(olddir)
+ else
+ if trace_locating then
+ report_initialization("unable to check path %a",p)
+ end
+ ownpath=p
end
+ break
+ end
end
- if not ownpath or ownpath == "" then
- ownpath = "."
- report_initialization("forcing fallback ownpath .")
- elseif trace_locating then
- report_initialization("using ownpath '%s'",ownpath)
- end
+ end
end
-
- environment.ownbin = ownbin
- environment.ownpath = ownpath
-
+ if not ownpath or ownpath=="" then
+ ownpath="."
+ report_initialization("forcing fallback to ownpath %a",ownpath)
+ elseif trace_locating then
+ report_initialization("using ownpath %a",ownpath)
+ end
+ end
+ environment.ownbin=ownbin
+ environment.ownpath=ownpath
end
-
-resolvers.ownpath = environment.ownpath
-
+resolvers.ownpath=environment.ownpath
function resolvers.getownpath()
- return environment.ownpath
+ return environment.ownpath
end
-
--- The self variables permit us to use only a few (or even no)
--- environment variables.
-
do
-
- local ownpath = environment.ownpath or dir.current()
-
- if ownpath then
- ossetenv('SELFAUTOLOC', file.collapsepath(ownpath))
- ossetenv('SELFAUTODIR', file.collapsepath(ownpath .. "/.."))
- ossetenv('SELFAUTOPARENT', file.collapsepath(ownpath .. "/../.."))
- else
- report_initialization("error: unable to locate ownpath")
- os.exit()
- end
-
-end
-
--- The running os:
-
--- todo: check is context sits here os.platform is more trustworthy
--- that the bin check as mtx-update runs from another path
-
-local texos = environment.texos or osgetenv("TEXOS")
-local texmfos = environment.texmfos or osgetenv('SELFAUTODIR')
-
-if not texos or texos == "" then
- texos = file.basename(texmfos)
-end
-
-ossetenv('TEXMFOS', texmfos) -- full bin path
-ossetenv('TEXOS', texos) -- partial bin parent
-ossetenv('SELFAUTOSYSTEM',os.platform) -- bonus
-
-environment.texos = texos
-environment.texmfos = texmfos
-
--- The current root:
-
-local texroot = environment.texroot or osgetenv("TEXROOT")
-
-if not texroot or texroot == "" then
- texroot = osgetenv('SELFAUTOPARENT')
- ossetenv('TEXROOT',texroot)
-end
-
-environment.texroot = file.collapsepath(texroot)
-
--- Tracing. Todo ...
-
-function resolvers.settrace(n) -- no longer number but: 'locating' or 'detail'
- if n then
- trackers.disable("resolvers.*")
- trackers.enable("resolvers."..n)
- end
+ local ownpath=environment.ownpath or dir.current()
+ if ownpath then
+ ossetenv('SELFAUTOLOC',file.collapsepath(ownpath))
+ ossetenv('SELFAUTODIR',file.collapsepath(ownpath.."/.."))
+ ossetenv('SELFAUTOPARENT',file.collapsepath(ownpath.."/../.."))
+ else
+ report_initialization("error: unable to locate ownpath")
+ os.exit()
+ end
+end
+local texos=environment.texos or osgetenv("TEXOS")
+local texmfos=environment.texmfos or osgetenv('SELFAUTODIR')
+if not texos or texos=="" then
+ texos=file.basename(texmfos)
+end
+ossetenv('TEXMFOS',texmfos)
+ossetenv('TEXOS',texos)
+ossetenv('SELFAUTOSYSTEM',os.platform)
+environment.texos=texos
+environment.texmfos=texmfos
+local texroot=environment.texroot or osgetenv("TEXROOT")
+if not texroot or texroot=="" then
+ texroot=osgetenv('SELFAUTOPARENT')
+ ossetenv('TEXROOT',texroot)
+end
+environment.texroot=file.collapsepath(texroot)
+if profiler then
+ directives.register("system.profile",function()
+ profiler.start("luatex-profile.log")
+ end)
end
-
-resolvers.settrace(osgetenv("MTX_INPUT_TRACE"))
-
--- todo:
-
--- if profiler and osgetenv("MTX_PROFILE_RUN") == "YES" then
--- profiler.start("luatex-profile.log")
--- end
-
--- a forward definition
-
if not resolvers.resolve then
- function resolvers.resolve (s) return s end
- function resolvers.unresolve(s) return s end
- function resolvers.repath (s) return s end
+ function resolvers.resolve (s) return s end
+ function resolvers.unresolve(s) return s end
+ function resolvers.repath (s) return s end
end
@@ -11086,1150 +11531,981 @@ end -- of closure
do -- create closure to overcome 200 locals limit
-if not modules then modules = { } end modules ['data-exp'] = {
- version = 1.001,
- comment = "companion to luat-lib.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files",
-}
-
-local format, find, gmatch, lower, char, sub = string.format, string.find, string.gmatch, string.lower, string.char, string.sub
-local concat, sort = table.concat, table.sort
-local lpegmatch, lpegpatterns = lpeg.match, lpeg.patterns
-local Ct, Cs, Cc, P, C, S = lpeg.Ct, lpeg.Cs, lpeg.Cc, lpeg.P, lpeg.C, lpeg.S
-local type, next = type, next
+package.loaded["data-exp"] = package.loaded["data-exp"] or true
-local ostype = os.type
-local collapsepath = file.collapsepath
-
-local trace_locating = false trackers.register("resolvers.locating", function(v) trace_locating = v end)
-local trace_expansions = false trackers.register("resolvers.expansions", function(v) trace_expansions = v end)
-
-local report_expansions = logs.reporter("resolvers","expansions")
-
-local resolvers = resolvers
-
--- As this bit of code is somewhat special it gets its own module. After
--- all, when working on the main resolver code, I don't want to scroll
--- past this every time. See data-obs.lua for the gsub variant.
+-- original size: 14643, stripped down to: 9517
+if not modules then modules={} end modules ['data-exp']={
+ version=1.001,
+ comment="companion to luat-lib.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files",
+}
+local format,find,gmatch,lower,char,sub=string.format,string.find,string.gmatch,string.lower,string.char,string.sub
+local concat,sort=table.concat,table.sort
+local lpegmatch,lpegpatterns=lpeg.match,lpeg.patterns
+local Ct,Cs,Cc,P,C,S=lpeg.Ct,lpeg.Cs,lpeg.Cc,lpeg.P,lpeg.C,lpeg.S
+local type,next=type,next
+local ostype=os.type
+local collapsepath=file.collapsepath
+local trace_locating=false trackers.register("resolvers.locating",function(v) trace_locating=v end)
+local trace_expansions=false trackers.register("resolvers.expansions",function(v) trace_expansions=v end)
+local report_expansions=logs.reporter("resolvers","expansions")
+local resolvers=resolvers
local function f_first(a,b)
- local t, n = { }, 0
- for s in gmatch(b,"[^,]+") do
- n = n + 1 ; t[n] = a .. s
- end
- return concat(t,",")
+ local t,n={},0
+ for s in gmatch(b,"[^,]+") do
+ n=n+1;t[n]=a..s
+ end
+ return concat(t,",")
end
-
local function f_second(a,b)
- local t, n = { }, 0
- for s in gmatch(a,"[^,]+") do
- n = n + 1 ; t[n] = s .. b
- end
- return concat(t,",")
+ local t,n={},0
+ for s in gmatch(a,"[^,]+") do
+ n=n+1;t[n]=s..b
+ end
+ return concat(t,",")
end
-
--- kpsewhich --expand-braces '{a,b}{c,d}'
--- ac:bc:ad:bd
-
--- old {a,b}{c,d} => ac ad bc bd
---
--- local function f_both(a,b)
--- local t, n = { }, 0
--- for sa in gmatch(a,"[^,]+") do
--- for sb in gmatch(b,"[^,]+") do
--- n = n + 1 ; t[n] = sa .. sb
--- end
--- end
--- return concat(t,",")
--- end
---
--- new {a,b}{c,d} => ac bc ad bd
-
local function f_both(a,b)
- local t, n = { }, 0
- for sb in gmatch(b,"[^,]+") do -- and not sa
- for sa in gmatch(a,"[^,]+") do -- sb
- n = n + 1 ; t[n] = sa .. sb
- end
- end
- return concat(t,",")
-end
-
-local left = P("{")
-local right = P("}")
-local var = P((1 - S("{}" ))^0)
-local set = P((1 - S("{},"))^0)
-local other = P(1)
-
-local l_first = Cs( ( Cc("{") * (C(set) * left * C(var) * right / f_first) * Cc("}") + other )^0 )
-local l_second = Cs( ( Cc("{") * (left * C(var) * right * C(set) / f_second) * Cc("}") + other )^0 )
-local l_both = Cs( ( Cc("{") * (left * C(var) * right * left * C(var) * right / f_both) * Cc("}") + other )^0 )
-local l_rest = Cs( ( left * var * (left/"") * var * (right/"") * var * right + other )^0 )
-
-local stripper_1 = lpeg.stripper ("{}@")
-local replacer_1 = lpeg.replacer { { ",}", ",@}" }, { "{,", "{@," }, }
-
-local function splitpathexpr(str, newlist, validate) -- I couldn't resist lpegging it (nice exercise).
- if trace_expansions then
- report_expansions("expanding variable '%s'",str)
- end
- local t, ok, done = newlist or { }, false, false
- local n = #t
- str = lpegmatch(replacer_1,str)
+ local t,n={},0
+ for sb in gmatch(b,"[^,]+") do
+ for sa in gmatch(a,"[^,]+") do
+ n=n+1;t[n]=sa..sb
+ end
+ end
+ return concat(t,",")
+end
+local left=P("{")
+local right=P("}")
+local var=P((1-S("{}" ))^0)
+local set=P((1-S("{},"))^0)
+local other=P(1)
+local l_first=Cs((Cc("{")*(C(set)*left*C(var)*right/f_first)*Cc("}")+other )^0 )
+local l_second=Cs((Cc("{")*(left*C(var)*right*C(set)/f_second)*Cc("}")+other )^0 )
+local l_both=Cs((Cc("{")*(left*C(var)*right*left*C(var)*right/f_both)*Cc("}")+other )^0 )
+local l_rest=Cs((left*var*(left/"")*var*(right/"")*var*right+other )^0 )
+local stripper_1=lpeg.stripper ("{}@")
+local replacer_1=lpeg.replacer { { ",}",",@}" },{ "{,","{@," },}
+local function splitpathexpr(str,newlist,validate)
+ if trace_expansions then
+ report_expansions("expanding variable %a",str)
+ end
+ local t,ok,done=newlist or {},false,false
+ local n=#t
+ str=lpegmatch(replacer_1,str)
+ repeat
+ local old=str
repeat
- local old = str
- repeat
- local old = str
- str = lpegmatch(l_first, str)
- until old == str
- repeat
- local old = str
- str = lpegmatch(l_second,str)
- until old == str
- repeat
- local old = str
- str = lpegmatch(l_both, str)
- until old == str
- repeat
- local old = str
- str = lpegmatch(l_rest, str)
- until old == str
- until old == str -- or not find(str,"{")
- str = lpegmatch(stripper_1,str)
- if validate then
- for s in gmatch(str,"[^,]+") do
- s = validate(s)
- if s then
- n = n + 1 ; t[n] = s
- end
- end
- else
- for s in gmatch(str,"[^,]+") do
- n = n + 1 ; t[n] = s
- end
- end
- if trace_expansions then
- for k=1,#t do
- report_expansions("% 4i: %s",k,t[k])
- end
+ local old=str
+ str=lpegmatch(l_first,str)
+ until old==str
+ repeat
+ local old=str
+ str=lpegmatch(l_second,str)
+ until old==str
+ repeat
+ local old=str
+ str=lpegmatch(l_both,str)
+ until old==str
+ repeat
+ local old=str
+ str=lpegmatch(l_rest,str)
+ until old==str
+ until old==str
+ str=lpegmatch(stripper_1,str)
+ if validate then
+ for s in gmatch(str,"[^,]+") do
+ s=validate(s)
+ if s then
+ n=n+1
+ t[n]=s
+ end
+ end
+ else
+ for s in gmatch(str,"[^,]+") do
+ n=n+1
+ t[n]=s
+ end
+ end
+ if trace_expansions then
+ for k=1,#t do
+ report_expansions("% 4i: %s",k,t[k])
end
- return t
+ end
+ return t
end
-
--- We could make the previous one public.
-
local function validate(s)
- s = collapsepath(s) -- already keeps the //
- return s ~= "" and not find(s,"^!*unset/*$") and s
+ s=collapsepath(s)
+ return s~="" and not find(s,"^!*unset/*$") and s
end
-
-resolvers.validatedpath = validate -- keeps the trailing //
-
+resolvers.validatedpath=validate
function resolvers.expandedpathfromlist(pathlist)
- local newlist = { }
- for k=1,#pathlist do
- splitpathexpr(pathlist[k],newlist,validate)
- end
- return newlist
-end
-
--- {a,b,c,d}
--- a,b,c/{p,q,r},d
--- a,b,c/{p,q,r}/d/{x,y,z}//
--- a,b,c/{p,q/{x,y,z},r},d/{p,q,r}
--- a,b,c/{p,q/{x,y,z},r},d/{p,q,r}
--- a{b,c}{d,e}f
--- {a,b,c,d}
--- {a,b,c/{p,q,r},d}
--- {a,b,c/{p,q,r}/d/{x,y,z}//}
--- {a,b,c/{p,q/{x,y,z}},d/{p,q,r}}
--- {a,b,c/{p,q/{x,y,z},w}v,d/{p,q,r}}
--- {$SELFAUTODIR,$SELFAUTOPARENT}{,{/share,}/texmf{-local,.local,}/web2c}
-
-local cleanup = lpeg.replacer {
- { "!" , "" },
- { "\\" , "/" },
+ local newlist={}
+ for k=1,#pathlist do
+ splitpathexpr(pathlist[k],newlist,validate)
+ end
+ return newlist
+end
+local cleanup=lpeg.replacer {
+ { "!","" },
+ { "\\","/" },
}
-
-function resolvers.cleanpath(str) -- tricky, maybe only simple paths
- local doslashes = (P("\\")/"/" + 1)^0
- local donegation = (P("!") /"" )^0
- local homedir = lpegmatch(Cs(donegation * doslashes),environment.homedir or "")
- if homedir == "~" or homedir == "" or not lfs.isdir(homedir) then
- if trace_expansions then
- report_expansions("no home dir set, ignoring dependent paths")
- end
- function resolvers.cleanpath(str)
- if not str or find(str,"~") then
- return "" -- special case
- else
- return lpegmatch(cleanup,str)
- end
- end
- else
- local dohome = ((P("~")+P("$HOME"))/homedir)^0
- local cleanup = Cs(donegation * dohome * doslashes)
- function resolvers.cleanpath(str)
- return str and lpegmatch(cleanup,str) or ""
- end
- end
- return resolvers.cleanpath(str)
-end
-
--- print(resolvers.cleanpath(""))
--- print(resolvers.cleanpath("!"))
--- print(resolvers.cleanpath("~"))
--- print(resolvers.cleanpath("~/test"))
--- print(resolvers.cleanpath("!~/test"))
--- print(resolvers.cleanpath("~/test~test"))
-
--- This one strips quotes and funny tokens.
-
-local expandhome = P("~") / "$HOME" -- environment.homedir
-
-local dodouble = P('"')/"" * (expandhome + (1 - P('"')))^0 * P('"')/""
-local dosingle = P("'")/"" * (expandhome + (1 - P("'")))^0 * P("'")/""
-local dostring = (expandhome + 1 )^0
-
-local stripper = Cs(
- lpegpatterns.unspacer * (dosingle + dodouble + dostring) * lpegpatterns.unspacer
+function resolvers.cleanpath(str)
+ local doslashes=(P("\\")/"/"+1)^0
+ local donegation=(P("!")/"" )^0
+ local homedir=lpegmatch(Cs(donegation*doslashes),environment.homedir or "")
+ if homedir=="~" or homedir=="" or not lfs.isdir(homedir) then
+ if trace_expansions then
+ report_expansions("no home dir set, ignoring dependent paths")
+ end
+ function resolvers.cleanpath(str)
+ if not str or find(str,"~") then
+ return ""
+ else
+ return lpegmatch(cleanup,str)
+ end
+ end
+ else
+ local dohome=((P("~")+P("$HOME"))/homedir)^0
+ local cleanup=Cs(donegation*dohome*doslashes)
+ function resolvers.cleanpath(str)
+ return str and lpegmatch(cleanup,str) or ""
+ end
+ end
+ return resolvers.cleanpath(str)
+end
+local expandhome=P("~")/"$HOME"
+local dodouble=P('"')/""*(expandhome+(1-P('"')))^0*P('"')/""
+local dosingle=P("'")/""*(expandhome+(1-P("'")))^0*P("'")/""
+local dostring=(expandhome+1 )^0
+local stripper=Cs(
+ lpegpatterns.unspacer*(dosingle+dodouble+dostring)*lpegpatterns.unspacer
)
-
-function resolvers.checkedvariable(str) -- assumes str is a string
- return type(str) == "string" and lpegmatch(stripper,str) or str
-end
-
--- The path splitter:
-
--- A config (optionally) has the paths split in tables. Internally
--- we join them and split them after the expansion has taken place. This
--- is more convenient.
-
-local cache = { }
-
------ splitter = lpeg.tsplitat(S(ostype == "windows" and ";" or ":;")) -- maybe add ,
-local splitter = lpeg.tsplitat(";") -- as we move towards urls, prefixes and use tables we no longer do :
-
-local backslashswapper = lpeg.replacer("\\","/")
-
-local function splitconfigurationpath(str) -- beware, this can be either a path or a { specification }
- if str then
- local found = cache[str]
- if not found then
- if str == "" then
- found = { }
- else
- local split = lpegmatch(splitter,lpegmatch(backslashswapper,str)) -- can be combined
- found = { }
- local noffound = 0
- for i=1,#split do
- local s = split[i]
- if not find(s,"^{*unset}*") then
- noffound = noffound + 1
- found[noffound] = s
- end
- end
- if trace_expansions then
- report_expansions("splitting path specification '%s'",str)
- for k=1,noffound do
- report_expansions("% 4i: %s",k,found[k])
- end
- end
- cache[str] = found
- end
+function resolvers.checkedvariable(str)
+ return type(str)=="string" and lpegmatch(stripper,str) or str
+end
+local cache={}
+local splitter=lpeg.tsplitat(";")
+local backslashswapper=lpeg.replacer("\\","/")
+local function splitconfigurationpath(str)
+ if str then
+ local found=cache[str]
+ if not found then
+ if str=="" then
+ found={}
+ else
+ local split=lpegmatch(splitter,lpegmatch(backslashswapper,str))
+ found={}
+ local noffound=0
+ for i=1,#split do
+ local s=split[i]
+ if not find(s,"^{*unset}*") then
+ noffound=noffound+1
+ found[noffound]=s
+ end
+ end
+ if trace_expansions then
+ report_expansions("splitting path specification %a",str)
+ for k=1,noffound do
+ report_expansions("% 4i: %s",k,found[k])
+ end
end
- return found
+ cache[str]=found
+ end
end
+ return found
+ end
end
-
-resolvers.splitconfigurationpath = splitconfigurationpath
-
+resolvers.splitconfigurationpath=splitconfigurationpath
function resolvers.splitpath(str)
- if type(str) == 'table' then
- return str
- else
- return splitconfigurationpath(str)
- end
+ if type(str)=='table' then
+ return str
+ else
+ return splitconfigurationpath(str)
+ end
end
-
function resolvers.joinpath(str)
- if type(str) == 'table' then
- return file.joinpath(str)
- else
- return str
- end
-end
-
--- The next function scans directories and returns a hash where the
--- entries are either strings or tables.
-
--- starting with . or .. etc or funny char
-
-
-
-
--- a lot of this caching can be stripped away when we have ssd's everywhere
---
--- we could cache all the (sub)paths here if needed
-
-local attributes, directory = lfs.attributes, lfs.dir
-
-local weird = P(".")^1 + lpeg.anywhere(S("~`!#$%^&*()={}[]:;\"\'||<>,?\n\r\t"))
-local timer = { }
-local scanned = { }
-local nofscans = 0
-local scancache = { }
-
+ if type(str)=='table' then
+ return file.joinpath(str)
+ else
+ return str
+ end
+end
+local attributes,directory=lfs.attributes,lfs.dir
+local weird=P(".")^1+lpeg.anywhere(S("~`!#$%^&*()={}[]:;\"\'||<>,?\n\r\t"))
+local timer={}
+local scanned={}
+local nofscans=0
+local scancache={}
local function scan(files,spec,path,n,m,r)
- local full = (path == "" and spec) or (spec .. path .. '/')
- local dirs = { }
- local nofdirs = 0
- for name in directory(full) do
- if not lpegmatch(weird,name) then
- local mode = attributes(full..name,'mode')
- if mode == 'file' then
- n = n + 1
- local f = files[name]
- if f then
- if type(f) == 'string' then
- files[name] = { f, path }
- else
- f[#f+1] = path
- end
- else -- probably unique anyway
- files[name] = path
- local lower = lower(name)
- if name ~= lower then
- files["remap:"..lower] = name
- r = r + 1
- end
- end
- elseif mode == 'directory' then
- m = m + 1
- nofdirs = nofdirs + 1
- if path ~= "" then
- dirs[nofdirs] = path..'/'..name
- else
- dirs[nofdirs] = name
- end
- end
+ local full=(path=="" and spec) or (spec..path..'/')
+ local dirs={}
+ local nofdirs=0
+ for name in directory(full) do
+ if not lpegmatch(weird,name) then
+ local mode=attributes(full..name,'mode')
+ if mode=='file' then
+ n=n+1
+ local f=files[name]
+ if f then
+ if type(f)=='string' then
+ files[name]={ f,path }
+ else
+ f[#f+1]=path
+ end
+ else
+ files[name]=path
+ local lower=lower(name)
+ if name~=lower then
+ files["remap:"..lower]=name
+ r=r+1
+ end
+ end
+ elseif mode=='directory' then
+ m=m+1
+ nofdirs=nofdirs+1
+ if path~="" then
+ dirs[nofdirs]=path..'/'..name
+ else
+ dirs[nofdirs]=name
end
+ end
end
- if nofdirs > 0 then
- sort(dirs)
- for i=1,nofdirs do
- files, n, m, r = scan(files,spec,dirs[i],n,m,r)
- end
+ end
+ if nofdirs>0 then
+ sort(dirs)
+ for i=1,nofdirs do
+ files,n,m,r=scan(files,spec,dirs[i],n,m,r)
end
- scancache[sub(full,1,-2)] = files
- return files, n, m, r
+ end
+ scancache[sub(full,1,-2)]=files
+ return files,n,m,r
end
-
-local fullcache = { }
-
+local fullcache={}
function resolvers.scanfiles(path,branch,usecache)
- statistics.starttiming(timer)
- local realpath = resolvers.resolve(path) -- no shortcut
- if usecache then
- local files = fullcache[realpath]
- if files then
- if trace_locating then
- report_expansions("using caches scan of path '%s', branch '%s'",path,branch or path)
- end
- return files
- end
- end
- if trace_locating then
- report_expansions("scanning path '%s', branch '%s'",path,branch or path)
- end
- local files, n, m, r = scan({ },realpath .. '/',"",0,0,0)
- files.__path__ = path -- can be selfautoparent:texmf-whatever
- files.__files__ = n
- files.__directories__ = m
- files.__remappings__ = r
- if trace_locating then
- report_expansions("%s files found on %s directories with %s uppercase remappings",n,m,r)
- end
- if usecache then
- scanned[#scanned+1] = realpath
- fullcache[realpath] = files
- end
- nofscans = nofscans + 1
- statistics.stoptiming(timer)
- return files
-end
-
-local function simplescan(files,spec,path) -- first match only, no map and such
- local full = (path == "" and spec) or (spec .. path .. '/')
- local dirs = { }
- local nofdirs = 0
- for name in directory(full) do
- if not lpegmatch(weird,name) then
- local mode = attributes(full..name,'mode')
- if mode == 'file' then
- if not files[name] then
- -- only first match
- files[name] = path
- end
- elseif mode == 'directory' then
- nofdirs = nofdirs + 1
- if path ~= "" then
- dirs[nofdirs] = path..'/'..name
- else
- dirs[nofdirs] = name
- end
- end
+ statistics.starttiming(timer)
+ local realpath=resolvers.resolve(path)
+ if usecache then
+ local files=fullcache[realpath]
+ if files then
+ if trace_locating then
+ report_expansions("using caches scan of path %a, branch %a",path,branch or path)
+ end
+ return files
+ end
+ end
+ if trace_locating then
+ report_expansions("scanning path %a, branch %a",path,branch or path)
+ end
+ local files,n,m,r=scan({},realpath..'/',"",0,0,0)
+ files.__path__=path
+ files.__files__=n
+ files.__directories__=m
+ files.__remappings__=r
+ if trace_locating then
+ report_expansions("%s files found on %s directories with %s uppercase remappings",n,m,r)
+ end
+ if usecache then
+ scanned[#scanned+1]=realpath
+ fullcache[realpath]=files
+ end
+ nofscans=nofscans+1
+ statistics.stoptiming(timer)
+ return files
+end
+local function simplescan(files,spec,path)
+ local full=(path=="" and spec) or (spec..path..'/')
+ local dirs={}
+ local nofdirs=0
+ for name in directory(full) do
+ if not lpegmatch(weird,name) then
+ local mode=attributes(full..name,'mode')
+ if mode=='file' then
+ if not files[name] then
+ files[name]=path
+ end
+ elseif mode=='directory' then
+ nofdirs=nofdirs+1
+ if path~="" then
+ dirs[nofdirs]=path..'/'..name
+ else
+ dirs[nofdirs]=name
end
+ end
end
- if nofdirs > 0 then
- sort(dirs)
- for i=1,nofdirs do
- files = simplescan(files,spec,dirs[i])
- end
+ end
+ if nofdirs>0 then
+ sort(dirs)
+ for i=1,nofdirs do
+ files=simplescan(files,spec,dirs[i])
end
- return files
+ end
+ return files
end
-
-local simplecache = { }
-local nofsharedscans = 0
-
+local simplecache={}
+local nofsharedscans=0
function resolvers.simplescanfiles(path,branch,usecache)
- statistics.starttiming(timer)
- local realpath = resolvers.resolve(path) -- no shortcut
- if usecache then
- local files = simplecache[realpath]
- if not files then
- files = scancache[realpath]
- if files then
- nofsharedscans = nofsharedscans + 1
- end
- end
- if files then
- if trace_locating then
- report_expansions("using caches scan of path '%s', branch '%s'",path,branch or path)
- end
- return files
- end
- end
- if trace_locating then
- report_expansions("scanning path '%s', branch '%s'",path,branch or path)
- end
- local files = simplescan({ },realpath .. '/',"")
- if trace_locating then
- report_expansions("%s files found",table.count(files))
- end
- if usecache then
- scanned[#scanned+1] = realpath
- simplecache[realpath] = files
- end
- nofscans = nofscans + 1
- statistics.stoptiming(timer)
- return files
+ statistics.starttiming(timer)
+ local realpath=resolvers.resolve(path)
+ if usecache then
+ local files=simplecache[realpath]
+ if not files then
+ files=scancache[realpath]
+ if files then
+ nofsharedscans=nofsharedscans+1
+ end
+ end
+ if files then
+ if trace_locating then
+ report_expansions("using caches scan of path %a, branch %a",path,branch or path)
+ end
+ return files
+ end
+ end
+ if trace_locating then
+ report_expansions("scanning path %a, branch %a",path,branch or path)
+ end
+ local files=simplescan({},realpath..'/',"")
+ if trace_locating then
+ report_expansions("%s files found",table.count(files))
+ end
+ if usecache then
+ scanned[#scanned+1]=realpath
+ simplecache[realpath]=files
+ end
+ nofscans=nofscans+1
+ statistics.stoptiming(timer)
+ return files
end
-
function resolvers.scandata()
- table.sort(scanned)
- return {
- n = nofscans,
- shared = nofsharedscans,
- time = statistics.elapsedtime(timer),
- paths = scanned,
- }
+ table.sort(scanned)
+ return {
+ n=nofscans,
+ shared=nofsharedscans,
+ time=statistics.elapsedtime(timer),
+ paths=scanned,
+ }
end
-
end -- of closure
do -- create closure to overcome 200 locals limit
-if not modules then modules = { } end modules ['data-env'] = {
- version = 1.001,
- comment = "companion to luat-lib.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files",
-}
+package.loaded["data-env"] = package.loaded["data-env"] or true
+
+-- original size: 8762, stripped down to: 6484
-local lower, gsub = string.lower, string.gsub
-
-local resolvers = resolvers
-
-local allocate = utilities.storage.allocate
-local setmetatableindex = table.setmetatableindex
-local fileextname = file.extname
-
-local formats = allocate()
-local suffixes = allocate()
-local dangerous = allocate()
-local suffixmap = allocate()
-
-resolvers.formats = formats
-resolvers.suffixes = suffixes
-resolvers.dangerous = dangerous
-resolvers.suffixmap = suffixmap
-
-local relations = allocate { -- todo: handlers also here
- core = {
- ofm = { -- will become obsolete
- names = { "ofm", "omega font metric", "omega font metrics" },
- variable = 'OFMFONTS',
- suffixes = { 'ofm', 'tfm' },
- },
- ovf = { -- will become obsolete
- names = { "ovf", "omega virtual font", "omega virtual fonts" },
- variable = 'OVFFONTS',
- suffixes = { 'ovf', 'vf' },
- },
- tfm = {
- names = { "tfm", "tex font metric", "tex font metrics" },
- variable = 'TFMFONTS',
- suffixes = { 'tfm' },
- },
- vf = {
- names = { "vf", "virtual font", "virtual fonts" },
- variable = 'VFFONTS',
- suffixes = { 'vf' },
- },
- otf = {
- names = { "otf", "opentype", "opentype font", "opentype fonts"},
- variable = 'OPENTYPEFONTS',
- suffixes = { 'otf' },
- },
- ttf = {
- names = { "ttf", "truetype", "truetype font", "truetype fonts", "truetype collection", "truetype collections", "truetype dictionary", "truetype dictionaries" },
- variable = 'TTFONTS',
- suffixes = { 'ttf', 'ttc', 'dfont' },
- },
- afm = {
- names = { "afm", "adobe font metric", "adobe font metrics" },
- variable = "AFMFONTS",
- suffixes = { "afm" },
- },
- pfb = {
- names = { "pfb", "type1", "type 1", "type1 font", "type 1 font", "type1 fonts", "type 1 fonts" },
- variable = 'T1FONTS',
- suffixes = { 'pfb', 'pfa' },
- },
- fea = {
- names = { "fea", "font feature", "font features", "font feature file", "font feature files" },
- variable = 'FONTFEATURES',
- suffixes = { 'fea' },
- },
- cid = {
- names = { "cid", "cid map", "cid maps", "cid file", "cid files" },
- variable = 'FONTCIDMAPS',
- suffixes = { 'cid', 'cidmap' },
- },
- fmt = {
- names = { "fmt", "format", "tex format" },
- variable = 'TEXFORMATS',
- suffixes = { 'fmt' },
- },
- mem = { -- will become obsolete
- names = { 'mem', "metapost format" },
- variable = 'MPMEMS',
- suffixes = { 'mem' },
- },
- mp = {
- names = { "mp" },
- variable = 'MPINPUTS',
- suffixes = { 'mp', 'mpvi', 'mpiv', 'mpii' },
- },
- tex = {
- names = { "tex" },
- variable = 'TEXINPUTS',
- suffixes = { 'tex', "mkvi", "mkiv", "mkii" },
- },
- icc = {
- names = { "icc", "icc profile", "icc profiles" },
- variable = 'ICCPROFILES',
- suffixes = { 'icc' },
- },
- texmfscripts = {
- names = { "texmfscript", "texmfscripts", "script", "scripts" },
- variable = 'TEXMFSCRIPTS',
- suffixes = { 'rb', 'pl', 'py' },
- },
- lua = {
- names = { "lua" },
- variable = 'LUAINPUTS',
- suffixes = { 'lua', 'luc', 'tma', 'tmc' },
- },
- lib = {
- names = { "lib" },
- variable = 'CLUAINPUTS',
- suffixes = os.libsuffix and { os.libsuffix } or { 'dll', 'so' },
- },
- bib = {
- names = { 'bib' },
- suffixes = { 'bib' },
- },
- bst = {
- names = { 'bst' },
- suffixes = { 'bst' },
- },
- fontconfig = {
- names = { 'fontconfig', 'fontconfig file', 'fontconfig files' },
- variable = 'FONTCONFIG_PATH',
- },
+if not modules then modules={} end modules ['data-env']={
+ version=1.001,
+ comment="companion to luat-lib.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files",
+}
+local lower,gsub=string.lower,string.gsub
+local resolvers=resolvers
+local allocate=utilities.storage.allocate
+local setmetatableindex=table.setmetatableindex
+local suffixonly=file.suffixonly
+local formats=allocate()
+local suffixes=allocate()
+local dangerous=allocate()
+local suffixmap=allocate()
+resolvers.formats=formats
+resolvers.suffixes=suffixes
+resolvers.dangerous=dangerous
+resolvers.suffixmap=suffixmap
+local luasuffixes=utilities.lua.suffixes
+local relations=allocate {
+ core={
+ ofm={
+ names={ "ofm","omega font metric","omega font metrics" },
+ variable='OFMFONTS',
+ suffixes={ 'ofm','tfm' },
+ },
+ ovf={
+ names={ "ovf","omega virtual font","omega virtual fonts" },
+ variable='OVFFONTS',
+ suffixes={ 'ovf','vf' },
+ },
+ tfm={
+ names={ "tfm","tex font metric","tex font metrics" },
+ variable='TFMFONTS',
+ suffixes={ 'tfm' },
},
- obsolete = {
- enc = {
- names = { "enc", "enc files", "enc file", "encoding files", "encoding file" },
- variable = 'ENCFONTS',
- suffixes = { 'enc' },
- },
- map = {
- names = { "map", "map files", "map file" },
- variable = 'TEXFONTMAPS',
- suffixes = { 'map' },
- },
- lig = {
- names = { "lig files", "lig file", "ligature file", "ligature files" },
- variable = 'LIGFONTS',
- suffixes = { 'lig' },
- },
- opl = {
- names = { "opl" },
- variable = 'OPLFONTS',
- suffixes = { 'opl' },
- },
- ovp = {
- names = { "ovp" },
- variable = 'OVPFONTS',
- suffixes = { 'ovp' },
- },
+ vf={
+ names={ "vf","virtual font","virtual fonts" },
+ variable='VFFONTS',
+ suffixes={ 'vf' },
},
- kpse = { -- subset
- base = {
- names = { 'base', "metafont format" },
- variable = 'MFBASES',
- suffixes = { 'base', 'bas' },
- },
- cmap = {
- names = { 'cmap', 'cmap files', 'cmap file' },
- variable = 'CMAPFONTS',
- suffixes = { 'cmap' },
- },
- cnf = {
- names = { 'cnf' },
- suffixes = { 'cnf' },
- },
- web = {
- names = { 'web' },
- suffixes = { 'web', 'ch' }
- },
- cweb = {
- names = { 'cweb' },
- suffixes = { 'w', 'web', 'ch' },
- },
- gf = {
- names = { 'gf' },
- suffixes = { '<resolution>gf' },
- },
- mf = {
- names = { 'mf' },
- variable = 'MFINPUTS',
- suffixes = { 'mf' },
- },
- mft = {
- names = { 'mft' },
- suffixes = { 'mft' },
- },
- pk = {
- names = { 'pk' },
- suffixes = { '<resolution>pk' },
- },
+ otf={
+ names={ "otf","opentype","opentype font","opentype fonts"},
+ variable='OPENTYPEFONTS',
+ suffixes={ 'otf' },
},
+ ttf={
+ names={ "ttf","truetype","truetype font","truetype fonts","truetype collection","truetype collections","truetype dictionary","truetype dictionaries" },
+ variable='TTFONTS',
+ suffixes={ 'ttf','ttc','dfont' },
+ },
+ afm={
+ names={ "afm","adobe font metric","adobe font metrics" },
+ variable="AFMFONTS",
+ suffixes={ "afm" },
+ },
+ pfb={
+ names={ "pfb","type1","type 1","type1 font","type 1 font","type1 fonts","type 1 fonts" },
+ variable='T1FONTS',
+ suffixes={ 'pfb','pfa' },
+ },
+ fea={
+ names={ "fea","font feature","font features","font feature file","font feature files" },
+ variable='FONTFEATURES',
+ suffixes={ 'fea' },
+ },
+ cid={
+ names={ "cid","cid map","cid maps","cid file","cid files" },
+ variable='FONTCIDMAPS',
+ suffixes={ 'cid','cidmap' },
+ },
+ fmt={
+ names={ "fmt","format","tex format" },
+ variable='TEXFORMATS',
+ suffixes={ 'fmt' },
+ },
+ mem={
+ names={ 'mem',"metapost format" },
+ variable='MPMEMS',
+ suffixes={ 'mem' },
+ },
+ mp={
+ names={ "mp" },
+ variable='MPINPUTS',
+ suffixes={ 'mp','mpvi','mpiv','mpii' },
+ },
+ tex={
+ names={ "tex" },
+ variable='TEXINPUTS',
+ suffixes={ 'tex',"mkvi","mkiv","mkii" },
+ },
+ icc={
+ names={ "icc","icc profile","icc profiles" },
+ variable='ICCPROFILES',
+ suffixes={ 'icc' },
+ },
+ texmfscripts={
+ names={ "texmfscript","texmfscripts","script","scripts" },
+ variable='TEXMFSCRIPTS',
+ suffixes={ 'rb','pl','py' },
+ },
+ lua={
+ names={ "lua" },
+ variable='LUAINPUTS',
+ suffixes={ luasuffixes.lua,luasuffixes.luc,luasuffixes.tma,luasuffixes.tmc },
+ },
+ lib={
+ names={ "lib" },
+ variable='CLUAINPUTS',
+ suffixes=os.libsuffix and { os.libsuffix } or { 'dll','so' },
+ },
+ bib={
+ names={ 'bib' },
+ suffixes={ 'bib' },
+ },
+ bst={
+ names={ 'bst' },
+ suffixes={ 'bst' },
+ },
+ fontconfig={
+ names={ 'fontconfig','fontconfig file','fontconfig files' },
+ variable='FONTCONFIG_PATH',
+ },
+ },
+ obsolete={
+ enc={
+ names={ "enc","enc files","enc file","encoding files","encoding file" },
+ variable='ENCFONTS',
+ suffixes={ 'enc' },
+ },
+ map={
+ names={ "map","map files","map file" },
+ variable='TEXFONTMAPS',
+ suffixes={ 'map' },
+ },
+ lig={
+ names={ "lig files","lig file","ligature file","ligature files" },
+ variable='LIGFONTS',
+ suffixes={ 'lig' },
+ },
+ opl={
+ names={ "opl" },
+ variable='OPLFONTS',
+ suffixes={ 'opl' },
+ },
+ ovp={
+ names={ "ovp" },
+ variable='OVPFONTS',
+ suffixes={ 'ovp' },
+ },
+ },
+ kpse={
+ base={
+ names={ 'base',"metafont format" },
+ variable='MFBASES',
+ suffixes={ 'base','bas' },
+ },
+ cmap={
+ names={ 'cmap','cmap files','cmap file' },
+ variable='CMAPFONTS',
+ suffixes={ 'cmap' },
+ },
+ cnf={
+ names={ 'cnf' },
+ suffixes={ 'cnf' },
+ },
+ web={
+ names={ 'web' },
+ suffixes={ 'web','ch' }
+ },
+ cweb={
+ names={ 'cweb' },
+ suffixes={ 'w','web','ch' },
+ },
+ gf={
+ names={ 'gf' },
+ suffixes={ '<resolution>gf' },
+ },
+ mf={
+ names={ 'mf' },
+ variable='MFINPUTS',
+ suffixes={ 'mf' },
+ },
+ mft={
+ names={ 'mft' },
+ suffixes={ 'mft' },
+ },
+ pk={
+ names={ 'pk' },
+ suffixes={ '<resolution>pk' },
+ },
+ },
}
-
-resolvers.relations = relations
-
--- formats: maps a format onto a variable
-
+resolvers.relations=relations
function resolvers.updaterelations()
- for category, categories in next, relations do
- for name, relation in next, categories do
- local rn = relation.names
- local rv = relation.variable
- local rs = relation.suffixes
- if rn and rv then
- for i=1,#rn do
- local rni = lower(gsub(rn[i]," ",""))
- formats[rni] = rv
- if rs then
- suffixes[rni] = rs
- for i=1,#rs do
- local rsi = rs[i]
- suffixmap[rsi] = rni
- end
- end
- end
- end
- if rs then
- end
- end
- end
-end
-
-resolvers.updaterelations() -- push this in the metatable -> newindex
-
+ for category,categories in next,relations do
+ for name,relation in next,categories do
+ local rn=relation.names
+ local rv=relation.variable
+ local rs=relation.suffixes
+ if rn and rv then
+ for i=1,#rn do
+ local rni=lower(gsub(rn[i]," ",""))
+ formats[rni]=rv
+ if rs then
+ suffixes[rni]=rs
+ for i=1,#rs do
+ local rsi=rs[i]
+ suffixmap[rsi]=rni
+ end
+ end
+ end
+ end
+ if rs then
+ end
+ end
+ end
+end
+resolvers.updaterelations()
local function simplified(t,k)
- return k and rawget(t,lower(gsub(k," ",""))) or nil
+ return k and rawget(t,lower(gsub(k," ",""))) or nil
end
-
-setmetatableindex(formats, simplified)
-setmetatableindex(suffixes, simplified)
-setmetatableindex(suffixmap, simplified)
-
--- A few accessors, mostly for command line tool.
-
+setmetatableindex(formats,simplified)
+setmetatableindex(suffixes,simplified)
+setmetatableindex(suffixmap,simplified)
function resolvers.suffixofformat(str)
- local s = suffixes[str]
- return s and s[1] or ""
+ local s=suffixes[str]
+ return s and s[1] or ""
end
-
function resolvers.suffixofformat(str)
- return suffixes[str] or { }
+ return suffixes[str] or {}
end
-
-for name, format in next, formats do
- dangerous[name] = true -- still needed ?
+for name,format in next,formats do
+ dangerous[name]=true
end
-
--- because vf searching is somewhat dangerous, we want to prevent
--- too liberal searching esp because we do a lookup on the current
--- path anyway; only tex (or any) is safe
-
-dangerous.tex = nil
-
-
--- more helpers
-
+dangerous.tex=nil
function resolvers.formatofvariable(str)
- return formats[str] or ''
+ return formats[str] or ''
end
-
-function resolvers.formatofsuffix(str) -- of file
- return suffixmap[fileextname(str)] or 'tex' -- so many map onto tex (like mkiv, cld etc)
+function resolvers.formatofsuffix(str)
+ return suffixmap[suffixonly(str)] or 'tex'
end
-
function resolvers.variableofformat(str)
- return formats[str] or ''
+ return formats[str] or ''
end
-
function resolvers.variableofformatorsuffix(str)
- local v = formats[str]
- if v then
- return v
- end
- v = suffixmap[fileextname(str)]
- if v then
- return formats[v]
- end
- return ''
+ local v=formats[str]
+ if v then
+ return v
+ end
+ v=suffixmap[suffixonly(str)]
+ if v then
+ return formats[v]
+ end
+ return ''
end
-
end -- of closure
do -- create closure to overcome 200 locals limit
-if not modules then modules = { } end modules ['data-tmp'] = {
- version = 1.100,
- comment = "companion to luat-lib.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
---[[ldx--
-<p>This module deals with caching data. It sets up the paths and
-implements loaders and savers for tables. Best is to set the
-following variable. When not set, the usual paths will be
-checked. Personally I prefer the (users) temporary path.</p>
-
-</code>
-TEXMFCACHE=$TMP;$TEMP;$TMPDIR;$TEMPDIR;$HOME;$TEXMFVAR;$VARTEXMF;.
-</code>
-
-<p>Currently we do no locking when we write files. This is no real
-problem because most caching involves fonts and the chance of them
-being written at the same time is small. We also need to extend
-luatools with a recache feature.</p>
---ldx]]--
-
-local format, lower, gsub, concat = string.format, string.lower, string.gsub, table.concat
-local serialize, serializetofile = table.serialize, table.tofile
-local mkdirs, isdir = dir.mkdirs, lfs.isdir
-
-local trace_locating = false trackers.register("resolvers.locating", function(v) trace_locating = v end)
-local trace_cache = false trackers.register("resolvers.cache", function(v) trace_cache = v end)
-
-local report_caches = logs.reporter("resolvers","caches")
-local report_resolvers = logs.reporter("resolvers","caching")
-
-local resolvers = resolvers
-
--- intermezzo
-
-local directive_cleanup = false directives.register("system.compile.cleanup", function(v) directive_cleanup = v end)
-local directive_strip = true directives.register("system.compile.strip", function(v) directive_strip = v end)
+package.loaded["data-tmp"] = package.loaded["data-tmp"] or true
-local compile = utilities.lua.compile
+-- original size: 14308, stripped down to: 10956
+if not modules then modules={} end modules ['data-tmp']={
+ version=1.100,
+ comment="companion to luat-lib.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+local format,lower,gsub,concat=string.format,string.lower,string.gsub,table.concat
+local serialize,serializetofile=table.serialize,table.tofile
+local mkdirs,isdir,isfile=dir.mkdirs,lfs.isdir,lfs.isfile
+local addsuffix,is_writable,is_readable=file.addsuffix,file.is_writable,file.is_readable
+local formatters=string.formatters
+local trace_locating=false trackers.register("resolvers.locating",function(v) trace_locating=v end)
+local trace_cache=false trackers.register("resolvers.cache",function(v) trace_cache=v end)
+local report_caches=logs.reporter("resolvers","caches")
+local report_resolvers=logs.reporter("resolvers","caching")
+local resolvers=resolvers
+local directive_cleanup=false directives.register("system.compile.cleanup",function(v) directive_cleanup=v end)
+local directive_strip=false directives.register("system.compile.strip",function(v) directive_strip=v end)
+local compile=utilities.lua.compile
function utilities.lua.compile(luafile,lucfile,cleanup,strip)
- if cleanup == nil then cleanup = directive_cleanup end
- if strip == nil then strip = directive_strip end
- return compile(luafile,lucfile,cleanup,strip)
-end
-
--- end of intermezzo
-
-caches = caches or { }
-local caches = caches
-
-caches.base = caches.base or "luatex-cache"
-caches.more = caches.more or "context"
-caches.direct = false -- true is faster but may need huge amounts of memory
-caches.tree = false
-caches.force = true
-caches.ask = false
-caches.relocate = false
-caches.defaults = { "TMPDIR", "TEMPDIR", "TMP", "TEMP", "HOME", "HOMEPATH" }
-
-local writable, readables, usedreadables = nil, { }, { }
-
--- we could use a metatable for writable and readable but not yet
-
+ if cleanup==nil then cleanup=directive_cleanup end
+ if strip==nil then strip=directive_strip end
+ return compile(luafile,lucfile,cleanup,strip)
+end
+caches=caches or {}
+local caches=caches
+local luasuffixes=utilities.lua.suffixes
+caches.base=caches.base or "luatex-cache"
+caches.more=caches.more or "context"
+caches.direct=false
+caches.tree=false
+caches.force=true
+caches.ask=false
+caches.relocate=false
+caches.defaults={ "TMPDIR","TEMPDIR","TMP","TEMP","HOME","HOMEPATH" }
+local writable,readables,usedreadables=nil,{},{}
local function identify()
- -- Combining the loops makes it messy. First we check the format cache path
- -- and when the last component is not present we try to create it.
- local texmfcaches = resolvers.cleanpathlist("TEXMFCACHE")
- if texmfcaches then
- for k=1,#texmfcaches do
- local cachepath = texmfcaches[k]
- if cachepath ~= "" then
- cachepath = resolvers.resolve(cachepath)
- cachepath = resolvers.cleanpath(cachepath)
- cachepath = file.collapsepath(cachepath)
- local valid = isdir(cachepath)
- if valid then
- if file.is_readable(cachepath) then
- readables[#readables+1] = cachepath
- if not writable and file.is_writable(cachepath) then
- writable = cachepath
- end
- end
- elseif not writable and caches.force then
- local cacheparent = file.dirname(cachepath)
- if file.is_writable(cacheparent) and true then -- we go on anyway (needed for mojca's kind of paths)
- if not caches.ask or io.ask(format("\nShould I create the cache path %s?",cachepath), "no", { "yes", "no" }) == "yes" then
- mkdirs(cachepath)
- if isdir(cachepath) and file.is_writable(cachepath) then
- report_caches("created: %s",cachepath)
- writable = cachepath
- readables[#readables+1] = cachepath
- end
- end
- end
- end
- end
- end
- end
- -- As a last resort we check some temporary paths but this time we don't
- -- create them.
- local texmfcaches = caches.defaults
- if texmfcaches then
- for k=1,#texmfcaches do
- local cachepath = texmfcaches[k]
- cachepath = resolvers.expansion(cachepath) -- was getenv
- if cachepath ~= "" then
- cachepath = resolvers.resolve(cachepath)
- cachepath = resolvers.cleanpath(cachepath)
- local valid = isdir(cachepath)
- if valid and file.is_readable(cachepath) then
- if not writable and file.is_writable(cachepath) then
- readables[#readables+1] = cachepath
- writable = cachepath
- break
- end
- end
- end
+ local texmfcaches=resolvers.cleanpathlist("TEXMFCACHE")
+ if texmfcaches then
+ for k=1,#texmfcaches do
+ local cachepath=texmfcaches[k]
+ if cachepath~="" then
+ cachepath=resolvers.resolve(cachepath)
+ cachepath=resolvers.cleanpath(cachepath)
+ cachepath=file.collapsepath(cachepath)
+ local valid=isdir(cachepath)
+ if valid then
+ if is_readable(cachepath) then
+ readables[#readables+1]=cachepath
+ if not writable and is_writable(cachepath) then
+ writable=cachepath
+ end
+ end
+ elseif not writable and caches.force then
+ local cacheparent=file.dirname(cachepath)
+ if is_writable(cacheparent) and true then
+ if not caches.ask or io.ask(format("\nShould I create the cache path %s?",cachepath),"no",{ "yes","no" })=="yes" then
+ mkdirs(cachepath)
+ if isdir(cachepath) and is_writable(cachepath) then
+ report_caches("path %a created",cachepath)
+ writable=cachepath
+ readables[#readables+1]=cachepath
+ end
+ end
+ end
+ end
+ end
+ end
+ end
+ local texmfcaches=caches.defaults
+ if texmfcaches then
+ for k=1,#texmfcaches do
+ local cachepath=texmfcaches[k]
+ cachepath=resolvers.expansion(cachepath)
+ if cachepath~="" then
+ cachepath=resolvers.resolve(cachepath)
+ cachepath=resolvers.cleanpath(cachepath)
+ local valid=isdir(cachepath)
+ if valid and is_readable(cachepath) then
+ if not writable and is_writable(cachepath) then
+ readables[#readables+1]=cachepath
+ writable=cachepath
+ break
+ end
end
+ end
end
- -- Some extra checking. If we have no writable or readable path then we simply
- -- quit.
- if not writable then
- report_caches("fatal error: there is no valid writable cache path defined")
- os.exit()
- elseif #readables == 0 then
- report_caches("fatal error: there is no valid readable cache path defined")
- os.exit()
- end
- -- why here
- writable = dir.expandname(resolvers.cleanpath(writable)) -- just in case
- -- moved here
- local base, more, tree = caches.base, caches.more, caches.tree or caches.treehash() -- we have only one writable tree
- if tree then
- caches.tree = tree
- writable = mkdirs(writable,base,more,tree)
- for i=1,#readables do
- readables[i] = file.join(readables[i],base,more,tree)
- end
- else
- writable = mkdirs(writable,base,more)
- for i=1,#readables do
- readables[i] = file.join(readables[i],base,more)
- end
+ end
+ if not writable then
+ report_caches("fatal error: there is no valid writable cache path defined")
+ os.exit()
+ elseif #readables==0 then
+ report_caches("fatal error: there is no valid readable cache path defined")
+ os.exit()
+ end
+ writable=dir.expandname(resolvers.cleanpath(writable))
+ local base,more,tree=caches.base,caches.more,caches.tree or caches.treehash()
+ if tree then
+ caches.tree=tree
+ writable=mkdirs(writable,base,more,tree)
+ for i=1,#readables do
+ readables[i]=file.join(readables[i],base,more,tree)
end
- -- end
- if trace_cache then
- for i=1,#readables do
- report_caches("using readable path '%s' (order %s)",readables[i],i)
- end
- report_caches("using writable path '%s'",writable)
+ else
+ writable=mkdirs(writable,base,more)
+ for i=1,#readables do
+ readables[i]=file.join(readables[i],base,more)
end
- identify = function()
- return writable, readables
+ end
+ if trace_cache then
+ for i=1,#readables do
+ report_caches("using readable path %a (order %s)",readables[i],i)
end
- return writable, readables
+ report_caches("using writable path %a",writable)
+ end
+ identify=function()
+ return writable,readables
+ end
+ return writable,readables
end
-
function caches.usedpaths()
- local writable, readables = identify()
- if #readables > 1 then
- local result = { }
- for i=1,#readables do
- local readable = readables[i]
- if usedreadables[i] or readable == writable then
- result[#result+1] = format("readable: '%s' (order %s)",readable,i)
- end
- end
- result[#result+1] = format("writable: '%s'",writable)
- return result
- else
- return writable
+ local writable,readables=identify()
+ if #readables>1 then
+ local result={}
+ for i=1,#readables do
+ local readable=readables[i]
+ if usedreadables[i] or readable==writable then
+ result[#result+1]=formatters["readable: %a (order %s)"](readable,i)
+ end
end
+ result[#result+1]=formatters["writable: %a"](writable)
+ return result
+ else
+ return writable
+ end
end
-
function caches.configfiles()
- return concat(resolvers.instance.specification,";")
+ return concat(resolvers.instance.specification,";")
end
-
function caches.hashed(tree)
- tree = gsub(tree,"\\$","/")
- tree = gsub(tree,"/+$","")
- tree = lower(tree)
- local hash = md5.hex(tree)
- if trace_cache or trace_locating then
- report_caches("hashing tree %s, hash %s",tree,hash)
- end
- return hash
+ tree=gsub(tree,"[\\/]+$","")
+ tree=lower(tree)
+ local hash=md5.hex(tree)
+ if trace_cache or trace_locating then
+ report_caches("hashing tree %a, hash %a",tree,hash)
+ end
+ return hash
end
-
function caches.treehash()
- local tree = caches.configfiles()
- if not tree or tree == "" then
- return false
+ local tree=caches.configfiles()
+ if not tree or tree=="" then
+ return false
+ else
+ return caches.hashed(tree)
+ end
+end
+local r_cache,w_cache={},{}
+local function getreadablepaths(...)
+ local tags={... }
+ local hash=concat(tags,"/")
+ local done=r_cache[hash]
+ if not done then
+ local writable,readables=identify()
+ if #tags>0 then
+ done={}
+ for i=1,#readables do
+ done[i]=file.join(readables[i],...)
+ end
else
- return caches.hashed(tree)
+ done=readables
end
+ r_cache[hash]=done
+ end
+ return done
end
-
-local r_cache, w_cache = { }, { } -- normally w in in r but who cares
-
-local function getreadablepaths(...) -- we can optimize this as we have at most 2 tags
- local tags = { ... }
- local hash = concat(tags,"/")
- local done = r_cache[hash]
- if not done then
- local writable, readables = identify() -- exit if not found
- if #tags > 0 then
- done = { }
- for i=1,#readables do
- done[i] = file.join(readables[i],...)
- end
- else
- done = readables
- end
- r_cache[hash] = done
- end
- return done
-end
-
local function getwritablepath(...)
- local tags = { ... }
- local hash = concat(tags,"/")
- local done = w_cache[hash]
- if not done then
- local writable, readables = identify() -- exit if not found
- if #tags > 0 then
- done = mkdirs(writable,...)
- else
- done = writable
- end
- w_cache[hash] = done
+ local tags={... }
+ local hash=concat(tags,"/")
+ local done=w_cache[hash]
+ if not done then
+ local writable,readables=identify()
+ if #tags>0 then
+ done=mkdirs(writable,...)
+ else
+ done=writable
end
- return done
+ w_cache[hash]=done
+ end
+ return done
end
-
-caches.getreadablepaths = getreadablepaths
-caches.getwritablepath = getwritablepath
-
+caches.getreadablepaths=getreadablepaths
+caches.getwritablepath=getwritablepath
function caches.getfirstreadablefile(filename,...)
- local rd = getreadablepaths(...)
- for i=1,#rd do
- local path = rd[i]
- local fullname = file.join(path,filename)
- if file.is_readable(fullname) then
- usedreadables[i] = true
- return fullname, path
- end
+ local rd=getreadablepaths(...)
+ for i=1,#rd do
+ local path=rd[i]
+ local fullname=file.join(path,filename)
+ if is_readable(fullname) then
+ usedreadables[i]=true
+ return fullname,path
end
- return caches.setfirstwritablefile(filename,...)
+ end
+ return caches.setfirstwritablefile(filename,...)
end
-
function caches.setfirstwritablefile(filename,...)
- local wr = getwritablepath(...)
- local fullname = file.join(wr,filename)
- return fullname, wr
+ local wr=getwritablepath(...)
+ local fullname=file.join(wr,filename)
+ return fullname,wr
end
-
-function caches.define(category,subcategory) -- for old times sake
- return function()
- return getwritablepath(category,subcategory)
- end
+function caches.define(category,subcategory)
+ return function()
+ return getwritablepath(category,subcategory)
+ end
end
-
function caches.setluanames(path,name)
- return path .. "/" .. name .. ".tma", path .. "/" .. name .. ".tmc"
+ return format("%s/%s.%s",path,name,luasuffixes.tma),format("%s/%s.%s",path,name,luasuffixes.tmc)
end
-
function caches.loaddata(readables,name)
- if type(readables) == "string" then
- readables = { readables }
- end
- for i=1,#readables do
- local path = readables[i]
- local tmaname, tmcname = caches.setluanames(path,name)
- local loader = loadfile(tmcname) or loadfile(tmaname)
- if loader then
- loader = loader()
- collectgarbage("step")
- return loader
- end
- end
- return false
+ if type(readables)=="string" then
+ readables={ readables }
+ end
+ for i=1,#readables do
+ local path=readables[i]
+ local tmaname,tmcname=caches.setluanames(path,name)
+ local loader=false
+ if isfile(tmcname) then
+ loader=loadfile(tmcname)
+ end
+ if not loader and isfile(tmaname) then
+ utilities.lua.compile(tmaname,tmcname)
+ if isfile(tmcname) then
+ loader=loadfile(tmcname)
+ end
+ if not loader then
+ loader=loadfile(tmaname)
+ end
+ end
+ if loader then
+ loader=loader()
+ collectgarbage("step")
+ return loader
+ end
+ end
+ return false
end
-
function caches.is_writable(filepath,filename)
- local tmaname, tmcname = caches.setluanames(filepath,filename)
- return file.is_writable(tmaname)
+ local tmaname,tmcname=caches.setluanames(filepath,filename)
+ return is_writable(tmaname)
end
-
-local saveoptions = { compact = true }
-
+local saveoptions={ compact=true }
function caches.savedata(filepath,filename,data,raw)
- local tmaname, tmcname = caches.setluanames(filepath,filename)
- local reduce, simplify = true, true
- if raw then
- reduce, simplify = false, false
- end
- data.cache_uuid = os.uuid()
- if caches.direct then
- file.savedata(tmaname,serialize(data,true,saveoptions))
- else
- serializetofile(tmaname,data,true,saveoptions)
- end
- utilities.lua.compile(tmaname,tmcname)
-end
-
--- moved from data-res:
-
-local content_state = { }
-
+ local tmaname,tmcname=caches.setluanames(filepath,filename)
+ local reduce,simplify=true,true
+ if raw then
+ reduce,simplify=false,false
+ end
+ data.cache_uuid=os.uuid()
+ if caches.direct then
+ file.savedata(tmaname,serialize(data,true,saveoptions))
+ else
+ serializetofile(tmaname,data,true,saveoptions)
+ end
+ utilities.lua.compile(tmaname,tmcname)
+end
+local content_state={}
function caches.contentstate()
- return content_state or { }
+ return content_state or {}
end
-
function caches.loadcontent(cachename,dataname)
- local name = caches.hashed(cachename)
- local full, path = caches.getfirstreadablefile(name ..".lua","trees")
- local filename = file.join(path,name)
- local blob = loadfile(filename .. ".luc") or loadfile(filename .. ".lua")
- if blob then
- local data = blob()
- if data and data.content then
- if data.type == dataname then
- if data.version == resolvers.cacheversion then
- content_state[#content_state+1] = data.uuid
- if trace_locating then
- report_resolvers("loading '%s' for '%s' from '%s'",dataname,cachename,filename)
- end
- return data.content
- else
- report_resolvers("skipping '%s' for '%s' from '%s' (version mismatch)",dataname,cachename,filename)
- end
- else
- report_resolvers("skipping '%s' for '%s' from '%s' (datatype mismatch)",dataname,cachename,filename)
- end
- elseif trace_locating then
- report_resolvers("skipping '%s' for '%s' from '%s' (no content)",dataname,cachename,filename)
+ local name=caches.hashed(cachename)
+ local full,path=caches.getfirstreadablefile(addsuffix(name,luasuffixes.lua),"trees")
+ local filename=file.join(path,name)
+ local blob=loadfile(addsuffix(filename,luasuffixes.luc)) or loadfile(addsuffix(filename,luasuffixes.lua))
+ if blob then
+ local data=blob()
+ if data and data.content then
+ if data.type==dataname then
+ if data.version==resolvers.cacheversion then
+ content_state[#content_state+1]=data.uuid
+ if trace_locating then
+ report_resolvers("loading %a for %a from %a",dataname,cachename,filename)
+ end
+ return data.content
+ else
+ report_resolvers("skipping %a for %a from %a (version mismatch)",dataname,cachename,filename)
end
+ else
+ report_resolvers("skipping %a for %a from %a (datatype mismatch)",dataname,cachename,filename)
+ end
elseif trace_locating then
- report_resolvers("skipping '%s' for '%s' from '%s' (invalid file)",dataname,cachename,filename)
+ report_resolvers("skipping %a for %a from %a (no content)",dataname,cachename,filename)
end
+ elseif trace_locating then
+ report_resolvers("skipping %a for %a from %a (invalid file)",dataname,cachename,filename)
+ end
end
-
function caches.collapsecontent(content)
- for k, v in next, content do
- if type(v) == "table" and #v == 1 then
- content[k] = v[1]
- end
+ for k,v in next,content do
+ if type(v)=="table" and #v==1 then
+ content[k]=v[1]
end
+ end
end
-
function caches.savecontent(cachename,dataname,content)
- local name = caches.hashed(cachename)
- local full, path = caches.setfirstwritablefile(name ..".lua","trees")
- local filename = file.join(path,name) -- is full
- local luaname, lucname = filename .. ".lua", filename .. ".luc"
+ local name=caches.hashed(cachename)
+ local full,path=caches.setfirstwritablefile(addsuffix(name,luasuffixes.lua),"trees")
+ local filename=file.join(path,name)
+ local luaname=addsuffix(filename,luasuffixes.lua)
+ local lucname=addsuffix(filename,luasuffixes.luc)
+ if trace_locating then
+ report_resolvers("preparing %a for %a",dataname,cachename)
+ end
+ local data={
+ type=dataname,
+ root=cachename,
+ version=resolvers.cacheversion,
+ date=os.date("%Y-%m-%d"),
+ time=os.date("%H:%M:%S"),
+ content=content,
+ uuid=os.uuid(),
+ }
+ local ok=io.savedata(luaname,serialize(data,true))
+ if ok then
if trace_locating then
- report_resolvers("preparing '%s' for '%s'",dataname,cachename)
- end
- local data = {
- type = dataname,
- root = cachename,
- version = resolvers.cacheversion,
- date = os.date("%Y-%m-%d"),
- time = os.date("%H:%M:%S"),
- content = content,
- uuid = os.uuid(),
- }
- local ok = io.savedata(luaname,serialize(data,true))
- if ok then
- if trace_locating then
- report_resolvers("category '%s', cachename '%s' saved in '%s'",dataname,cachename,luaname)
- end
- if utilities.lua.compile(luaname,lucname) then
- if trace_locating then
- report_resolvers("'%s' compiled to '%s'",dataname,lucname)
- end
- return true
- else
- if trace_locating then
- report_resolvers("compiling failed for '%s', deleting file '%s'",dataname,lucname)
- end
- os.remove(lucname)
- end
- elseif trace_locating then
- report_resolvers("unable to save '%s' in '%s' (access error)",dataname,luaname)
+ report_resolvers("category %a, cachename %a saved in %a",dataname,cachename,luaname)
end
+ if utilities.lua.compile(luaname,lucname) then
+ if trace_locating then
+ report_resolvers("%a compiled to %a",dataname,lucname)
+ end
+ return true
+ else
+ if trace_locating then
+ report_resolvers("compiling failed for %a, deleting file %a",dataname,lucname)
+ end
+ os.remove(lucname)
+ end
+ elseif trace_locating then
+ report_resolvers("unable to save %a in %a (access error)",dataname,luaname)
+ end
end
@@ -12237,1999 +12513,1700 @@ end -- of closure
do -- create closure to overcome 200 locals limit
-if not modules then modules = { } end modules ['data-met'] = {
- version = 1.100,
- comment = "companion to luat-lib.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
-local find, format = string.find, string.format
-local sequenced = table.sequenced
-local addurlscheme, urlhashed = url.addscheme, url.hashed
-
-local trace_locating = false
-
-trackers.register("resolvers.locating", function(v) trace_methods = v end)
-trackers.register("resolvers.methods", function(v) trace_methods = v end)
-
-
-local report_methods = logs.reporter("resolvers","methods")
-
-local allocate = utilities.storage.allocate
-
-local resolvers = resolvers
+package.loaded["data-met"] = package.loaded["data-met"] or true
-local registered = { }
-
-local function splitmethod(filename) -- todo: filetype in specification
- if not filename then
- return { scheme = "unknown", original = filename }
- end
- if type(filename) == "table" then
- return filename -- already split
- end
- filename = file.collapsepath(filename)
- if not find(filename,"://") then
- return { scheme = "file", path = filename, original = filename, filename = filename }
- end
- local specification = url.hashed(filename)
- if not specification.scheme or specification.scheme == "" then
- return { scheme = "file", path = filename, original = filename, filename = filename }
- else
- return specification
- end
-end
-
-resolvers.splitmethod = splitmethod -- bad name but ok
-
--- the second argument is always analyzed (saves time later on) and the original
--- gets passed as original but also as argument
-
-local function methodhandler(what,first,...) -- filename can be nil or false
- local method = registered[what]
- if method then
- local how, namespace = method.how, method.namespace
- if how == "uri" or how == "url" then
- local specification = splitmethod(first)
- local scheme = specification.scheme
- local resolver = namespace and namespace[scheme]
- if resolver then
- if trace_methods then
- report_methods("resolver: method=%s, how=%s, scheme=%s, argument=%s",what,how,scheme,first)
- end
- return resolver(specification,...)
- else
- resolver = namespace.default or namespace.file
- if resolver then
- if trace_methods then
- report_methods("resolver: method=%s, how=%s, default, argument=%s",what,how,first)
- end
- return resolver(specification,...)
- elseif trace_methods then
- report_methods("resolver: method=%s, how=%s, no handler",what,how)
- end
- end
- elseif how == "tag" then
- local resolver = namespace and namespace[first]
- if resolver then
- if trace_methods then
- report_methods("resolver: method=%s, how=%s, tag=%s",what,how,first)
- end
- return resolver(...)
- else
- resolver = namespace.default or namespace.file
- if resolver then
- if trace_methods then
- report_methods("resolver: method=%s, how=%s, default",what,how)
- end
- return resolver(...)
- elseif trace_methods then
- report_methods("resolver: method=%s, how=%s, unknown",what,how)
- end
- end
- end
- else
- report_methods("resolver: method=%s, unknown",what)
- end
-end
-
-resolvers.methodhandler = methodhandler
+-- original size: 4915, stripped down to: 3942
+if not modules then modules={} end modules ['data-met']={
+ version=1.100,
+ comment="companion to luat-lib.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+local find,format=string.find,string.format
+local sequenced=table.sequenced
+local addurlscheme,urlhashed=url.addscheme,url.hashed
+local trace_locating=false
+trackers.register("resolvers.locating",function(v) trace_methods=v end)
+trackers.register("resolvers.methods",function(v) trace_methods=v end)
+local report_methods=logs.reporter("resolvers","methods")
+local allocate=utilities.storage.allocate
+local resolvers=resolvers
+local registered={}
+local function splitmethod(filename)
+ if not filename then
+ return { scheme="unknown",original=filename }
+ end
+ if type(filename)=="table" then
+ return filename
+ end
+ filename=file.collapsepath(filename)
+ if not find(filename,"://") then
+ return { scheme="file",path=filename,original=filename,filename=filename }
+ end
+ local specification=url.hashed(filename)
+ if not specification.scheme or specification.scheme=="" then
+ return { scheme="file",path=filename,original=filename,filename=filename }
+ else
+ return specification
+ end
+end
+resolvers.splitmethod=splitmethod
+local function methodhandler(what,first,...)
+ local method=registered[what]
+ if method then
+ local how,namespace=method.how,method.namespace
+ if how=="uri" or how=="url" then
+ local specification=splitmethod(first)
+ local scheme=specification.scheme
+ local resolver=namespace and namespace[scheme]
+ if resolver then
+ if trace_methods then
+ report_methods("resolving, method %a, how %a, handler %a, argument %a",what,how,scheme,first)
+ end
+ return resolver(specification,...)
+ else
+ resolver=namespace.default or namespace.file
+ if resolver then
+ if trace_methods then
+ report_methods("resolving, method %a, how %a, handler %a, argument %a",what,how,"default",first)
+ end
+ return resolver(specification,...)
+ elseif trace_methods then
+ report_methods("resolving, method %a, how %a, handler %a, argument %a",what,how,"unset")
+ end
+ end
+ elseif how=="tag" then
+ local resolver=namespace and namespace[first]
+ if resolver then
+ if trace_methods then
+ report_methods("resolving, method %a, how %a, tag %a",what,how,first)
+ end
+ return resolver(...)
+ else
+ resolver=namespace.default or namespace.file
+ if resolver then
+ if trace_methods then
+ report_methods("resolving, method %a, how %a, tag %a",what,how,"default")
+ end
+ return resolver(...)
+ elseif trace_methods then
+ report_methods("resolving, method %a, how %a, tag %a",what,how,"unset")
+ end
+ end
+ end
+ else
+ report_methods("resolving, invalid method %a")
+ end
+end
+resolvers.methodhandler=methodhandler
function resolvers.registermethod(name,namespace,how)
- registered[name] = { how = how or "tag", namespace = namespace }
- namespace["byscheme"] = function(scheme,filename,...)
- if scheme == "file" then
- return methodhandler(name,filename,...)
- else
- return methodhandler(name,addurlscheme(filename,scheme),...)
- end
- end
-end
-
-local concatinators = allocate { notfound = file.join } -- concatinate paths
-local locators = allocate { notfound = function() end } -- locate databases
-local hashers = allocate { notfound = function() end } -- load databases
-local generators = allocate { notfound = function() end } -- generate databases
-
-resolvers.concatinators = concatinators
-resolvers.locators = locators
-resolvers.hashers = hashers
-resolvers.generators = generators
-
-local registermethod = resolvers.registermethod
-
+ registered[name]={ how=how or "tag",namespace=namespace }
+ namespace["byscheme"]=function(scheme,filename,...)
+ if scheme=="file" then
+ return methodhandler(name,filename,...)
+ else
+ return methodhandler(name,addurlscheme(filename,scheme),...)
+ end
+ end
+end
+local concatinators=allocate { notfound=file.join }
+local locators=allocate { notfound=function() end }
+local hashers=allocate { notfound=function() end }
+local generators=allocate { notfound=function() end }
+resolvers.concatinators=concatinators
+resolvers.locators=locators
+resolvers.hashers=hashers
+resolvers.generators=generators
+local registermethod=resolvers.registermethod
registermethod("concatinators",concatinators,"tag")
-registermethod("locators", locators, "uri")
-registermethod("hashers", hashers, "uri")
-registermethod("generators", generators, "uri")
+registermethod("locators",locators,"uri")
+registermethod("hashers",hashers,"uri")
+registermethod("generators",generators,"uri")
end -- of closure
do -- create closure to overcome 200 locals limit
-if not modules then modules = { } end modules ['data-res'] = {
- version = 1.001,
- comment = "companion to luat-lib.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files",
-}
-
--- In practice we will work within one tds tree, but i want to keep
--- the option open to build tools that look at multiple trees, which is
--- why we keep the tree specific data in a table. We used to pass the
--- instance but for practical purposes we now avoid this and use a
--- instance variable. We always have one instance active (sort of global).
-
--- todo: cache:/// home:/// selfautoparent:/// (sometime end 2012)
-
-local format, gsub, find, lower, upper, match, gmatch = string.format, string.gsub, string.find, string.lower, string.upper, string.match, string.gmatch
-local concat, insert, sortedkeys = table.concat, table.insert, table.sortedkeys
-local next, type, rawget = next, type, rawget
-local os = os
+package.loaded["data-res"] = package.loaded["data-res"] or true
-local P, S, R, C, Cc, Cs, Ct, Carg = lpeg.P, lpeg.S, lpeg.R, lpeg.C, lpeg.Cc, lpeg.Cs, lpeg.Ct, lpeg.Carg
-local lpegmatch, lpegpatterns = lpeg.match, lpeg.patterns
-
-local filedirname = file.dirname
-local filebasename = file.basename
-local fileextname = file.extname
-local filejoin = file.join
-local collapsepath = file.collapsepath
-local joinpath = file.joinpath
-local allocate = utilities.storage.allocate
-local setmetatableindex = table.setmetatableindex
-
-local trace_locating = false trackers.register("resolvers.locating", function(v) trace_locating = v end)
-local trace_detail = false trackers.register("resolvers.details", function(v) trace_detail = v end)
-local trace_expansions = false trackers.register("resolvers.expansions", function(v) trace_expansions = v end)
-
-local report_resolving = logs.reporter("resolvers","resolving")
-
-local resolvers = resolvers
-
-local expandedpathfromlist = resolvers.expandedpathfromlist
-local checkedvariable = resolvers.checkedvariable
-local splitconfigurationpath = resolvers.splitconfigurationpath
-local methodhandler = resolvers.methodhandler
-
-local initializesetter = utilities.setters.initialize
-
-local ostype, osname, osenv, ossetenv, osgetenv = os.type, os.name, os.env, os.setenv, os.getenv
-
-resolvers.cacheversion = '1.0.1'
-resolvers.configbanner = ''
-resolvers.homedir = environment.homedir
-resolvers.criticalvars = allocate { "SELFAUTOLOC", "SELFAUTODIR", "SELFAUTOPARENT", "TEXMFCNF", "TEXMF", "TEXOS" }
-resolvers.luacnfname = 'texmfcnf.lua'
-resolvers.luacnfstate = "unknown"
-
--- The web2c tex binaries as well as kpse have built in paths for the configuration
--- files and there can be a depressing truckload of them. This is actually the weak
--- spot of a distribution. So we don't want:
---
--- resolvers.luacnfspec = '{$SELFAUTODIR,$SELFAUTOPARENT}{,{/share,}/texmf{-local,}/web2c}'
---
--- but instead use:
---
--- resolvers.luacnfspec = 'selfautoparent:{/texmf{-local,}{,/web2c}}'
---
--- which does not make texlive happy as there is a texmf-local tree one level up
--- (sigh), so we need this. We can assume web2c as mkiv does not run on older
--- texlives anyway.
---
--- texlive:
---
--- selfautodir:
--- selfautoparent:
--- selfautodir:share/texmf-local/web2c
--- selfautodir:share/texmf/web2c
--- selfautodir:texmf-local/web2c
--- selfautodir:texmf/web2c
--- selfautoparent:share/texmf-local/web2c
--- selfautoparent:share/texmf/web2c
--- selfautoparent:texmf-local/web2c
--- selfautoparent:texmf/web2c
---
--- minimals:
---
--- home:texmf/web2c
--- selfautoparent:texmf-local/web2c
--- selfautoparent:texmf-context/web2c
--- selfautoparent:texmf/web2c
+-- original size: 60821, stripped down to: 42503
+if not modules then modules={} end modules ['data-res']={
+ version=1.001,
+ comment="companion to luat-lib.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files",
+}
+local gsub,find,lower,upper,match,gmatch=string.gsub,string.find,string.lower,string.upper,string.match,string.gmatch
+local concat,insert,sortedkeys=table.concat,table.insert,table.sortedkeys
+local next,type,rawget=next,type,rawget
+local os=os
+local P,S,R,C,Cc,Cs,Ct,Carg=lpeg.P,lpeg.S,lpeg.R,lpeg.C,lpeg.Cc,lpeg.Cs,lpeg.Ct,lpeg.Carg
+local lpegmatch,lpegpatterns=lpeg.match,lpeg.patterns
+local formatters=string.formatters
+local filedirname=file.dirname
+local filebasename=file.basename
+local suffixonly=file.suffixonly
+local filejoin=file.join
+local collapsepath=file.collapsepath
+local joinpath=file.joinpath
+local allocate=utilities.storage.allocate
+local settings_to_array=utilities.parsers.settings_to_array
+local setmetatableindex=table.setmetatableindex
+local luasuffixes=utilities.lua.suffixes
+local trace_locating=false trackers.register("resolvers.locating",function(v) trace_locating=v end)
+local trace_detail=false trackers.register("resolvers.details",function(v) trace_detail=v end)
+local trace_expansions=false trackers.register("resolvers.expansions",function(v) trace_expansions=v end)
+local report_resolving=logs.reporter("resolvers","resolving")
+local resolvers=resolvers
+local expandedpathfromlist=resolvers.expandedpathfromlist
+local checkedvariable=resolvers.checkedvariable
+local splitconfigurationpath=resolvers.splitconfigurationpath
+local methodhandler=resolvers.methodhandler
+local initializesetter=utilities.setters.initialize
+local ostype,osname,osenv,ossetenv,osgetenv=os.type,os.name,os.env,os.setenv,os.getenv
+resolvers.cacheversion='1.0.1'
+resolvers.configbanner=''
+resolvers.homedir=environment.homedir
+resolvers.criticalvars=allocate { "SELFAUTOLOC","SELFAUTODIR","SELFAUTOPARENT","TEXMFCNF","TEXMF","TEXOS" }
+resolvers.luacnfname="texmfcnf.lua"
+resolvers.luacnfstate="unknown"
if environment.default_texmfcnf then
- -- unfortunately we now have quite some overkill in the spec (not so nice on a network)
- resolvers.luacnfspec = environment.default_texmfcnf
+ resolvers.luacnfspec=environment.default_texmfcnf
else
- -- resolvers.luacnfspec = "selfautoparent:texmf{-local,-context,}/web2c"
- resolvers.luacnfspec = "{selfautoloc:,selfautodir:,selfautoparent:}{,/texmf{-local,}/web2c}"
-end
-
-resolvers.luacnfspec = 'home:texmf/web2c;' .. resolvers.luacnfspec
-
--- which (as we want users to use the web2c path) be can be simplified to this:
---
--- if environment and environment.ownpath and string.find(environment.ownpath,"[\\/]texlive[\\/]") then
--- resolvers.luacnfspec = 'selfautodir:/texmf-local/web2c,selfautoparent:/texmf-local/web2c,selfautoparent:/texmf/web2c'
--- else
--- resolvers.luacnfspec = 'selfautoparent:/texmf-local/web2c,selfautoparent:/texmf/web2c'
--- end
-
-
-
-local unset_variable = "unset"
-
-local formats = resolvers.formats
-local suffixes = resolvers.suffixes
-local dangerous = resolvers.dangerous
-local suffixmap = resolvers.suffixmap
-
-resolvers.defaultsuffixes = { "tex" } -- "mkiv", "cld" -- too tricky
-
-resolvers.instance = resolvers.instance or nil -- the current one (slow access)
-local instance = resolvers.instance or nil -- the current one (fast access)
-
--- An instance has an environment (coming from the outside, kept raw), variables
--- (coming from the configuration file), and expansions (variables with nested
--- variables replaced). One can push something into the outer environment and
--- its internal copy, but only the later one will be the raw unprefixed variant.
-
+ resolvers.luacnfspec="{selfautoloc:,selfautodir:,selfautoparent:}{,/texmf{-local,}/web2c}"
+end
+resolvers.luacnfspec='home:texmf/web2c;'..resolvers.luacnfspec
+local unset_variable="unset"
+local formats=resolvers.formats
+local suffixes=resolvers.suffixes
+local dangerous=resolvers.dangerous
+local suffixmap=resolvers.suffixmap
+resolvers.defaultsuffixes={ "tex" }
+resolvers.instance=resolvers.instance or nil
+local instance=resolvers.instance or nil
function resolvers.setenv(key,value,raw)
- if instance then
- -- this one will be consulted first when we stay inside
- -- the current environment (prefixes are not resolved here)
- instance.environment[key] = value
- -- we feed back into the environment, and as this is used
- -- by other applications (via os.execute) we need to make
- -- sure that prefixes are resolve
- ossetenv(key,raw and value or resolvers.resolve(value))
- end
+ if instance then
+ instance.environment[key]=value
+ ossetenv(key,raw and value or resolvers.resolve(value))
+ end
end
-
--- Beware we don't want empty here as this one can be called early on
--- and therefore we use rawget.
-
local function getenv(key)
- local value = rawget(instance.environment,key)
- if value and value ~= "" then
- return value
- else
- local e = osgetenv(key)
- return e ~= nil and e ~= "" and checkedvariable(e) or ""
- end
-end
-
-resolvers.getenv = getenv
-resolvers.env = getenv
-
--- We are going to use some metatable trickery where we backtrack from
--- expansion to variable to environment.
-
+ local value=rawget(instance.environment,key)
+ if value and value~="" then
+ return value
+ else
+ local e=osgetenv(key)
+ return e~=nil and e~="" and checkedvariable(e) or ""
+ end
+end
+resolvers.getenv=getenv
+resolvers.env=getenv
local function resolve(k)
- return instance.expansions[k]
-end
-
-local dollarstripper = lpeg.stripper("$")
-local inhibitstripper = P("!")^0 * Cs(P(1)^0)
-local backslashswapper = lpeg.replacer("\\","/")
-
-local somevariable = P("$") / ""
-local somekey = C(R("az","AZ","09","__","--")^1)
-local somethingelse = P(";") * ((1-S("!{}/\\"))^1 * P(";") / "")
- + P(";") * (P(";") / "")
- + P(1)
-local variableexpander = Cs( (somevariable * (somekey/resolve) + somethingelse)^1 )
-
-local cleaner = P("\\") / "/" + P(";") * S("!{}/\\")^0 * P(";")^1 / ";"
-local variablecleaner = Cs((cleaner + P(1))^0)
-
-local somevariable = R("az","AZ","09","__","--")^1 / resolve
-local variable = (P("$")/"") * (somevariable + (P("{")/"") * somevariable * (P("}")/""))
-local variableresolver = Cs((variable + P(1))^0)
-
+ return instance.expansions[k]
+end
+local dollarstripper=lpeg.stripper("$")
+local inhibitstripper=P("!")^0*Cs(P(1)^0)
+local backslashswapper=lpeg.replacer("\\","/")
+local somevariable=P("$")/""
+local somekey=C(R("az","AZ","09","__","--")^1)
+local somethingelse=P(";")*((1-S("!{}/\\"))^1*P(";")/"")+P(";")*(P(";")/"")+P(1)
+local variableexpander=Cs((somevariable*(somekey/resolve)+somethingelse)^1 )
+local cleaner=P("\\")/"/"+P(";")*S("!{}/\\")^0*P(";")^1/";"
+local variablecleaner=Cs((cleaner+P(1))^0)
+local somevariable=R("az","AZ","09","__","--")^1/resolve
+local variable=(P("$")/"")*(somevariable+(P("{")/"")*somevariable*(P("}")/""))
+local variableresolver=Cs((variable+P(1))^0)
local function expandedvariable(var)
- return lpegmatch(variableexpander,var) or var
-end
-
-function resolvers.newinstance() -- todo: all vars will become lowercase and alphanum only
-
- if trace_locating then
- report_resolving("creating instance")
- end
-
- local environment, variables, expansions, order = allocate(), allocate(), allocate(), allocate()
-
- local newinstance = {
- environment = environment,
- variables = variables,
- expansions = expansions,
- order = order,
- files = allocate(),
- setups = allocate(),
- found = allocate(),
- foundintrees = allocate(),
- hashes = allocate(),
- hashed = allocate(),
- specification = allocate(),
- lists = allocate(),
- data = allocate(), -- only for loading
- fakepaths = allocate(),
- remember = true,
- diskcache = true,
- renewcache = false,
- renewtree = false,
- loaderror = false,
- savelists = true,
- pattern = nil, -- lists
- force_suffixes = true,
- }
-
- setmetatableindex(variables,function(t,k)
- local v
- for i=1,#order do
- v = order[i][k]
- if v ~= nil then
- t[k] = v
- return v
- end
- end
- if v == nil then
- v = ""
- end
- t[k] = v
- return v
- end)
-
- setmetatableindex(environment, function(t,k)
- local v = osgetenv(k)
- if v == nil then
- v = variables[k]
- end
- if v ~= nil then
- v = checkedvariable(v) or ""
- end
- v = resolvers.repath(v) -- for taco who has a : separated osfontdir
- t[k] = v
- return v
- end)
-
- setmetatableindex(expansions, function(t,k)
- local v = environment[k]
- if type(v) == "string" then
- v = lpegmatch(variableresolver,v)
- v = lpegmatch(variablecleaner,v)
- end
- t[k] = v
+ return lpegmatch(variableexpander,var) or var
+end
+function resolvers.newinstance()
+ if trace_locating then
+ report_resolving("creating instance")
+ end
+ local environment,variables,expansions,order=allocate(),allocate(),allocate(),allocate()
+ local newinstance={
+ environment=environment,
+ variables=variables,
+ expansions=expansions,
+ order=order,
+ files=allocate(),
+ setups=allocate(),
+ found=allocate(),
+ foundintrees=allocate(),
+ hashes=allocate(),
+ hashed=allocate(),
+ specification=allocate(),
+ lists=allocate(),
+ data=allocate(),
+ fakepaths=allocate(),
+ remember=true,
+ diskcache=true,
+ renewcache=false,
+ renewtree=false,
+ loaderror=false,
+ savelists=true,
+ pattern=nil,
+ force_suffixes=true,
+ }
+ setmetatableindex(variables,function(t,k)
+ local v
+ for i=1,#order do
+ v=order[i][k]
+ if v~=nil then
+ t[k]=v
return v
- end)
-
- return newinstance
-
+ end
+ end
+ if v==nil then
+ v=""
+ end
+ t[k]=v
+ return v
+ end)
+ setmetatableindex(environment,function(t,k)
+ local v=osgetenv(k)
+ if v==nil then
+ v=variables[k]
+ end
+ if v~=nil then
+ v=checkedvariable(v) or ""
+ end
+ v=resolvers.repath(v)
+ t[k]=v
+ return v
+ end)
+ setmetatableindex(expansions,function(t,k)
+ local v=environment[k]
+ if type(v)=="string" then
+ v=lpegmatch(variableresolver,v)
+ v=lpegmatch(variablecleaner,v)
+ end
+ t[k]=v
+ return v
+ end)
+ return newinstance
+end
+function resolvers.setinstance(someinstance)
+ instance=someinstance
+ resolvers.instance=someinstance
+ return someinstance
end
-
-function resolvers.setinstance(someinstance) -- only one instance is active
- instance = someinstance
- resolvers.instance = someinstance
- return someinstance
-end
-
function resolvers.reset()
- return resolvers.setinstance(resolvers.newinstance())
+ return resolvers.setinstance(resolvers.newinstance())
end
-
local function reset_hashes()
- instance.lists = { }
- instance.found = { }
-end
-
-local slash = P("/")
-
-local pathexpressionpattern = Cs (
- Cc("^") * (
- Cc("%") * S(".-")
- + slash^2 * P(-1) / "/.*"
- + slash^2 / "/.-/"
- + (1-slash) * P(-1) * Cc("/")
- + P(1)
- )^1 * Cc("$") -- yes or no $
+ instance.lists={}
+ instance.found={}
+end
+local slash=P("/")
+local pathexpressionpattern=Cs (
+ Cc("^")*(
+ Cc("%")*S(".-")+slash^2*P(-1)/"/.*"
++slash^2/"/[^/]*/*"+(1-slash)*P(-1)*Cc("/")+P(1)
+ )^1*Cc("$")
)
-
-local cache = { }
-
+local cache={}
local function makepathexpression(str)
- if str == "." then
- return "^%./$"
- else
- local c = cache[str]
- if not c then
- c = lpegmatch(pathexpressionpattern,str)
- cache[str] = c
- end
- return c
+ if str=="." then
+ return "^%./$"
+ else
+ local c=cache[str]
+ if not c then
+ c=lpegmatch(pathexpressionpattern,str)
+ cache[str]=c
end
+ return c
+ end
end
-
local function reportcriticalvariables(cnfspec)
- if trace_locating then
- for i=1,#resolvers.criticalvars do
- local k = resolvers.criticalvars[i]
- local v = resolvers.getenv(k) or "unknown" -- this one will not resolve !
- report_resolving("variable '%s' set to '%s'",k,v)
- end
- report_resolving()
- if cnfspec then
- if type(cnfspec) == "table" then
- report_resolving("using configuration specification '%s'",concat(cnfspec,","))
- else
- report_resolving("using configuration specification '%s'",cnfspec)
- end
- end
- report_resolving()
+ if trace_locating then
+ for i=1,#resolvers.criticalvars do
+ local k=resolvers.criticalvars[i]
+ local v=resolvers.getenv(k) or "unknown"
+ report_resolving("variable %a set to %a",k,v)
end
- reportcriticalvariables = function() end
+ report_resolving()
+ if cnfspec then
+ report_resolving("using configuration specification %a",type(cnfspec)=="table" and concat(cnfspec,",") or cnfspec)
+ end
+ report_resolving()
+ end
+ reportcriticalvariables=function() end
end
-
local function identify_configuration_files()
- local specification = instance.specification
- if #specification == 0 then
- local cnfspec = getenv('TEXMFCNF')
- if cnfspec == "" then
- cnfspec = resolvers.luacnfspec
- resolvers.luacnfstate = "default"
- else
- resolvers.luacnfstate = "environment"
- end
- reportcriticalvariables(cnfspec)
- local cnfpaths = expandedpathfromlist(resolvers.splitpath(cnfspec))
- local luacnfname = resolvers.luacnfname
- for i=1,#cnfpaths do
- local filename = collapsepath(filejoin(cnfpaths[i],luacnfname))
- local realname = resolvers.resolve(filename)
- if lfs.isfile(realname) then
- specification[#specification+1] = filename
- if trace_locating then
- report_resolving("found configuration file '%s'",realname)
- end
- elseif trace_locating then
- report_resolving("unknown configuration file '%s'",realname)
- end
- end
+ local specification=instance.specification
+ if #specification==0 then
+ local cnfspec=getenv("TEXMFCNF")
+ if cnfspec=="" then
+ cnfspec=resolvers.luacnfspec
+ resolvers.luacnfstate="default"
+ else
+ resolvers.luacnfstate="environment"
+ end
+ reportcriticalvariables(cnfspec)
+ local cnfpaths=expandedpathfromlist(resolvers.splitpath(cnfspec))
+ local luacnfname=resolvers.luacnfname
+ for i=1,#cnfpaths do
+ local filename=collapsepath(filejoin(cnfpaths[i],luacnfname))
+ local realname=resolvers.resolve(filename)
+ if lfs.isfile(realname) then
+ specification[#specification+1]=filename
if trace_locating then
- report_resolving()
+ report_resolving("found configuration file %a",realname)
end
- elseif trace_locating then
- report_resolving("configuration files already identified")
+ elseif trace_locating then
+ report_resolving("unknown configuration file %a",realname)
+ end
end
+ if trace_locating then
+ report_resolving()
+ end
+ elseif trace_locating then
+ report_resolving("configuration files already identified")
+ end
end
-
local function load_configuration_files()
- local specification = instance.specification
- if #specification > 0 then
- local luacnfname = resolvers.luacnfname
- for i=1,#specification do
- local filename = specification[i]
- local pathname = filedirname(filename)
- local filename = filejoin(pathname,luacnfname)
- local realname = resolvers.resolve(filename) -- no shortcut
- local blob = loadfile(realname)
- if blob then
- local setups = instance.setups
- local data = blob()
- local parent = data and data.parent
- if parent then
- local filename = filejoin(pathname,parent)
- local realname = resolvers.resolve(filename) -- no shortcut
- local blob = loadfile(realname)
- if blob then
- local parentdata = blob()
- if parentdata then
- report_resolving("loading configuration file '%s'",filename)
- data = table.merged(parentdata,data)
- end
- end
- end
- data = data and data.content
- if data then
- if trace_locating then
- report_resolving("loading configuration file '%s'",filename)
- report_resolving()
- end
- local variables = data.variables or { }
- local warning = false
- for k, v in next, data do
- local variant = type(v)
- if variant == "table" then
- initializesetter(filename,k,v)
- elseif variables[k] == nil then
- if trace_locating and not warning then
- report_resolving("variables like '%s' in configuration file '%s' should move to the 'variables' subtable",
- k,resolvers.resolve(filename))
- warning = true
- end
- variables[k] = v
- end
- end
- setups[pathname] = variables
- if resolvers.luacnfstate == "default" then
- -- the following code is not tested
- local cnfspec = variables["TEXMFCNF"]
- if cnfspec then
- if trace_locating then
- report_resolving("reloading configuration due to TEXMF redefinition")
- end
- -- we push the value into the main environment (osenv) so
- -- that it takes precedence over the default one and therefore
- -- also over following definitions
- resolvers.setenv('TEXMFCNF',cnfspec) -- resolves prefixes
- -- we now identify and load the specified configuration files
- instance.specification = { }
- identify_configuration_files()
- load_configuration_files()
- -- we prevent further overload of the configuration variable
- resolvers.luacnfstate = "configuration"
- -- we quit the outer loop
- break
- end
- end
-
- else
- if trace_locating then
- report_resolving("skipping configuration file '%s' (no content)",filename)
- end
- setups[pathname] = { }
- instance.loaderror = true
- end
- elseif trace_locating then
- report_resolving("skipping configuration file '%s' (no valid format)",filename)
- end
- instance.order[#instance.order+1] = instance.setups[pathname]
- if instance.loaderror then
- break
- end
- end
- elseif trace_locating then
- report_resolving("warning: no lua configuration files found")
- end
+ local specification=instance.specification
+ if #specification>0 then
+ local luacnfname=resolvers.luacnfname
+ for i=1,#specification do
+ local filename=specification[i]
+ local pathname=filedirname(filename)
+ local filename=filejoin(pathname,luacnfname)
+ local realname=resolvers.resolve(filename)
+ local blob=loadfile(realname)
+ if blob then
+ local setups=instance.setups
+ local data=blob()
+ local parent=data and data.parent
+ if parent then
+ local filename=filejoin(pathname,parent)
+ local realname=resolvers.resolve(filename)
+ local blob=loadfile(realname)
+ if blob then
+ local parentdata=blob()
+ if parentdata then
+ report_resolving("loading configuration file %a",filename)
+ data=table.merged(parentdata,data)
+ end
+ end
+ end
+ data=data and data.content
+ if data then
+ if trace_locating then
+ report_resolving("loading configuration file %a",filename)
+ report_resolving()
+ end
+ local variables=data.variables or {}
+ local warning=false
+ for k,v in next,data do
+ local variant=type(v)
+ if variant=="table" then
+ initializesetter(filename,k,v)
+ elseif variables[k]==nil then
+ if trace_locating and not warning then
+ report_resolving("variables like %a in configuration file %a should move to the 'variables' subtable",
+ k,resolvers.resolve(filename))
+ warning=true
+ end
+ variables[k]=v
+ end
+ end
+ setups[pathname]=variables
+ if resolvers.luacnfstate=="default" then
+ local cnfspec=variables["TEXMFCNF"]
+ if cnfspec then
+ if trace_locating then
+ report_resolving("reloading configuration due to TEXMF redefinition")
+ end
+ resolvers.setenv("TEXMFCNF",cnfspec)
+ instance.specification={}
+ identify_configuration_files()
+ load_configuration_files()
+ resolvers.luacnfstate="configuration"
+ break
+ end
+ end
+ else
+ if trace_locating then
+ report_resolving("skipping configuration file %a (no content)",filename)
+ end
+ setups[pathname]={}
+ instance.loaderror=true
+ end
+ elseif trace_locating then
+ report_resolving("skipping configuration file %a (no valid format)",filename)
+ end
+ instance.order[#instance.order+1]=instance.setups[pathname]
+ if instance.loaderror then
+ break
+ end
+ end
+ elseif trace_locating then
+ report_resolving("warning: no lua configuration files found")
+ end
end
-
--- scheme magic ... database loading
-
local function load_file_databases()
- instance.loaderror, instance.files = false, allocate()
- if not instance.renewcache then
- local hashes = instance.hashes
- for k=1,#hashes do
- local hash = hashes[k]
- resolvers.hashers.byscheme(hash.type,hash.name)
- if instance.loaderror then break end
- end
+ instance.loaderror,instance.files=false,allocate()
+ if not instance.renewcache then
+ local hashes=instance.hashes
+ for k=1,#hashes do
+ local hash=hashes[k]
+ resolvers.hashers.byscheme(hash.type,hash.name)
+ if instance.loaderror then break end
end
+ end
end
-
local function locate_file_databases()
- -- todo: cache:// and tree:// (runtime)
- local texmfpaths = resolvers.expandedpathlist('TEXMF')
- if #texmfpaths > 0 then
- for i=1,#texmfpaths do
- local path = collapsepath(texmfpaths[i])
- local stripped = lpegmatch(inhibitstripper,path) -- the !! thing
- if stripped ~= "" then
- local runtime = stripped == path
- path = resolvers.cleanpath(path)
- local spec = resolvers.splitmethod(stripped)
- if runtime and (spec.noscheme or spec.scheme == "file") then
- stripped = "tree:///" .. stripped
- elseif spec.scheme == "cache" or spec.scheme == "file" then
- stripped = spec.path
- end
- if trace_locating then
- if runtime then
- report_resolving("locating list of '%s' (runtime) (%s)",path,stripped)
- else
- report_resolving("locating list of '%s' (cached)",path)
- end
- end
- methodhandler('locators',stripped)
- end
+ local texmfpaths=resolvers.expandedpathlist("TEXMF")
+ if #texmfpaths>0 then
+ for i=1,#texmfpaths do
+ local path=collapsepath(texmfpaths[i])
+ path=gsub(path,"/+$","")
+ local stripped=lpegmatch(inhibitstripper,path)
+ if stripped~="" then
+ local runtime=stripped==path
+ path=resolvers.cleanpath(path)
+ local spec=resolvers.splitmethod(stripped)
+ if runtime and (spec.noscheme or spec.scheme=="file") then
+ stripped="tree:///"..stripped
+ elseif spec.scheme=="cache" or spec.scheme=="file" then
+ stripped=spec.path
end
if trace_locating then
- report_resolving()
+ if runtime then
+ report_resolving("locating list of %a (runtime) (%s)",path,stripped)
+ else
+ report_resolving("locating list of %a (cached)",path)
+ end
end
- elseif trace_locating then
- report_resolving("no texmf paths are defined (using TEXMF)")
- end
-end
-
-local function generate_file_databases()
- local hashes = instance.hashes
- for k=1,#hashes do
- local hash = hashes[k]
- methodhandler('generators',hash.name)
+ methodhandler('locators',stripped)
+ end
end
if trace_locating then
- report_resolving()
+ report_resolving()
end
+ elseif trace_locating then
+ report_resolving("no texmf paths are defined (using TEXMF)")
+ end
end
-
-local function save_file_databases() -- will become cachers
- for i=1,#instance.hashes do
- local hash = instance.hashes[i]
- local cachename = hash.name
- if hash.cache then
- local content = instance.files[cachename]
- caches.collapsecontent(content)
- if trace_locating then
- report_resolving("saving tree '%s'",cachename)
- end
- caches.savecontent(cachename,"files",content)
- elseif trace_locating then
- report_resolving("not saving runtime tree '%s'",cachename)
- end
+local function generate_file_databases()
+ local hashes=instance.hashes
+ for k=1,#hashes do
+ local hash=hashes[k]
+ methodhandler('generators',hash.name)
+ end
+ if trace_locating then
+ report_resolving()
+ end
+end
+local function save_file_databases()
+ for i=1,#instance.hashes do
+ local hash=instance.hashes[i]
+ local cachename=hash.name
+ if hash.cache then
+ local content=instance.files[cachename]
+ caches.collapsecontent(content)
+ if trace_locating then
+ report_resolving("saving tree %a",cachename)
+ end
+ caches.savecontent(cachename,"files",content)
+ elseif trace_locating then
+ report_resolving("not saving runtime tree %a",cachename)
end
+ end
end
-
function resolvers.renew(hashname)
- if hashname and hashname ~= "" then
- local expanded = resolvers.expansion(hashname) or ""
- if expanded ~= "" then
- if trace_locating then
- report_resolving("identifying tree '%s' from '%s'",expanded,hashname)
- end
- hashname = expanded
- else
- if trace_locating then
- report_resolving("identifying tree '%s'",hashname)
- end
- end
- local realpath = resolvers.resolve(hashname)
- if lfs.isdir(realpath) then
- if trace_locating then
- report_resolving("using path '%s'",realpath)
- end
- methodhandler('generators',hashname)
- -- could be shared
- local content = instance.files[hashname]
- caches.collapsecontent(content)
- if trace_locating then
- report_resolving("saving tree '%s'",hashname)
- end
- caches.savecontent(hashname,"files",content)
- -- till here
- else
- report_resolving("invalid path '%s'",realpath)
- end
+ if hashname and hashname~="" then
+ local expanded=resolvers.expansion(hashname) or ""
+ if expanded~="" then
+ if trace_locating then
+ report_resolving("identifying tree %a from %a",expanded,hashname)
+ end
+ hashname=expanded
+ else
+ if trace_locating then
+ report_resolving("identifying tree %a",hashname)
+ end
+ end
+ local realpath=resolvers.resolve(hashname)
+ if lfs.isdir(realpath) then
+ if trace_locating then
+ report_resolving("using path %a",realpath)
+ end
+ methodhandler('generators',hashname)
+ local content=instance.files[hashname]
+ caches.collapsecontent(content)
+ if trace_locating then
+ report_resolving("saving tree %a",hashname)
+ end
+ caches.savecontent(hashname,"files",content)
+ else
+ report_resolving("invalid path %a",realpath)
end
+ end
end
-
local function load_databases()
- locate_file_databases()
- if instance.diskcache and not instance.renewcache then
- load_file_databases()
- if instance.loaderror then
- generate_file_databases()
- save_file_databases()
- end
- else
- generate_file_databases()
- if instance.renewcache then
- save_file_databases()
- end
+ locate_file_databases()
+ if instance.diskcache and not instance.renewcache then
+ load_file_databases()
+ if instance.loaderror then
+ generate_file_databases()
+ save_file_databases()
+ end
+ else
+ generate_file_databases()
+ if instance.renewcache then
+ save_file_databases()
end
+ end
end
-
function resolvers.appendhash(type,name,cache)
- -- safeguard ... tricky as it's actually a bug when seen twice
- if not instance.hashed[name] then
- if trace_locating then
- report_resolving("hash '%s' appended",name)
- end
- insert(instance.hashes, { type = type, name = name, cache = cache } )
- instance.hashed[name] = cache
+ if not instance.hashed[name] then
+ if trace_locating then
+ report_resolving("hash %a appended",name)
end
+ insert(instance.hashes,{ type=type,name=name,cache=cache } )
+ instance.hashed[name]=cache
+ end
end
-
function resolvers.prependhash(type,name,cache)
- -- safeguard ... tricky as it's actually a bug when seen twice
- if not instance.hashed[name] then
- if trace_locating then
- report_resolving("hash '%s' prepended",name)
- end
- insert(instance.hashes, 1, { type = type, name = name, cache = cache } )
- instance.hashed[name] = cache
- end
-end
-
-function resolvers.extendtexmfvariable(specification) -- crap, we could better prepend the hash
- local t = resolvers.splitpath(getenv('TEXMF'))
- insert(t,1,specification)
- local newspec = concat(t,";")
- if instance.environment["TEXMF"] then
- instance.environment["TEXMF"] = newspec
- elseif instance.variables["TEXMF"] then
- instance.variables["TEXMF"] = newspec
- else
- -- weird
- end
- reset_hashes()
+ if not instance.hashed[name] then
+ if trace_locating then
+ report_resolving("hash %a prepended",name)
+ end
+ insert(instance.hashes,1,{ type=type,name=name,cache=cache } )
+ instance.hashed[name]=cache
+ end
+end
+function resolvers.extendtexmfvariable(specification)
+ local t=resolvers.splitpath(getenv("TEXMF"))
+ insert(t,1,specification)
+ local newspec=concat(t,",")
+ if instance.environment["TEXMF"] then
+ instance.environment["TEXMF"]=newspec
+ elseif instance.variables["TEXMF"] then
+ instance.variables["TEXMF"]=newspec
+ else
+ end
+ reset_hashes()
end
-
function resolvers.splitexpansions()
- local ie = instance.expansions
- for k,v in next, ie do
- local t, tn, h, p = { }, 0, { }, splitconfigurationpath(v)
- for kk=1,#p do
- local vv = p[kk]
- if vv ~= "" and not h[vv] then
- tn = tn + 1
- t[tn] = vv
- h[vv] = true
- end
- end
- if #t > 1 then
- ie[k] = t
- else
- ie[k] = t[1]
- end
+ local ie=instance.expansions
+ for k,v in next,ie do
+ local t,tn,h,p={},0,{},splitconfigurationpath(v)
+ for kk=1,#p do
+ local vv=p[kk]
+ if vv~="" and not h[vv] then
+ tn=tn+1
+ t[tn]=vv
+ h[vv]=true
+ end
+ end
+ if #t>1 then
+ ie[k]=t
+ else
+ ie[k]=t[1]
end
+ end
end
-
--- end of split/join code
-
--- we used to have 'files' and 'configurations' so therefore the following
--- shared function
-
function resolvers.datastate()
- return caches.contentstate()
+ return caches.contentstate()
end
-
function resolvers.variable(name)
- local name = name and lpegmatch(dollarstripper,name)
- local result = name and instance.variables[name]
- return result ~= nil and result or ""
+ local name=name and lpegmatch(dollarstripper,name)
+ local result=name and instance.variables[name]
+ return result~=nil and result or ""
end
-
function resolvers.expansion(name)
- local name = name and lpegmatch(dollarstripper,name)
- local result = name and instance.expansions[name]
- return result ~= nil and result or ""
+ local name=name and lpegmatch(dollarstripper,name)
+ local result=name and instance.expansions[name]
+ return result~=nil and result or ""
end
-
function resolvers.unexpandedpathlist(str)
- local pth = resolvers.variable(str)
- local lst = resolvers.splitpath(pth)
- return expandedpathfromlist(lst)
+ local pth=resolvers.variable(str)
+ local lst=resolvers.splitpath(pth)
+ return expandedpathfromlist(lst)
end
-
function resolvers.unexpandedpath(str)
- return joinpath(resolvers.unexpandedpathlist(str))
+ return joinpath(resolvers.unexpandedpathlist(str))
end
-
-local done = { }
-
+local done={}
function resolvers.resetextrapath()
- local ep = instance.extra_paths
- if not ep then
- ep, done = { }, { }
- instance.extra_paths = ep
- elseif #ep > 0 then
- instance.lists, done = { }, { }
- end
+ local ep=instance.extra_paths
+ if not ep then
+ ep,done={},{}
+ instance.extra_paths=ep
+ elseif #ep>0 then
+ instance.lists,done={},{}
+ end
end
-
function resolvers.registerextrapath(paths,subpaths)
- local ep = instance.extra_paths or { }
- local oldn = #ep
- local newn = oldn
- if paths and paths ~= "" then
- if subpaths and subpaths ~= "" then
- for p in gmatch(paths,"[^,]+") do
- -- we gmatch each step again, not that fast, but used seldom
- for s in gmatch(subpaths,"[^,]+") do
- local ps = p .. "/" .. s
- if not done[ps] then
- newn = newn + 1
- ep[newn] = resolvers.cleanpath(ps)
- done[ps] = true
- end
- end
- end
+ paths=settings_to_array(paths)
+ subpaths=settings_to_array(subpaths)
+ local ep=instance.extra_paths or {}
+ local oldn=#ep
+ local newn=oldn
+ local nofpaths=#paths
+ local nofsubpaths=#subpaths
+ if nofpaths>0 then
+ if nofsubpaths>0 then
+ for i=1,nofpaths do
+ local p=paths[i]
+ for j=1,nofsubpaths do
+ local s=subpaths[j]
+ local ps=p.."/"..s
+ if not done[ps] then
+ newn=newn+1
+ ep[newn]=resolvers.cleanpath(ps)
+ done[ps]=true
+ end
+ end
+ end
+ else
+ for i=1,nofpaths do
+ local p=paths[i]
+ if not done[p] then
+ newn=newn+1
+ ep[newn]=resolvers.cleanpath(p)
+ done[p]=true
+ end
+ end
+ end
+ elseif nofsubpaths>0 then
+ for i=1,oldn do
+ for j=1,nofsubpaths do
+ local s=subpaths[j]
+ local ps=ep[i].."/"..s
+ if not done[ps] then
+ newn=newn+1
+ ep[newn]=resolvers.cleanpath(ps)
+ done[ps]=true
+ end
+ end
+ end
+ end
+ if newn>0 then
+ instance.extra_paths=ep
+ end
+ if newn>oldn then
+ instance.lists={}
+ end
+end
+local function made_list(instance,list)
+ local ep=instance.extra_paths
+ if not ep or #ep==0 then
+ return list
+ else
+ local done,new,newn={},{},0
+ for k=1,#list do
+ local v=list[k]
+ if not done[v] then
+ if find(v,"^[%.%/]$") then
+ done[v]=true
+ newn=newn+1
+ new[newn]=v
else
- for p in gmatch(paths,"[^,]+") do
- if not done[p] then
- newn = newn + 1
- ep[newn] = resolvers.cleanpath(p)
- done[p] = true
- end
- end
- end
- elseif subpaths and subpaths ~= "" then
- for i=1,oldn do
- -- we gmatch each step again, not that fast, but used seldom
- for s in gmatch(subpaths,"[^,]+") do
- local ps = ep[i] .. "/" .. s
- if not done[ps] then
- newn = newn + 1
- ep[newn] = resolvers.cleanpath(ps)
- done[ps] = true
- end
- end
+ break
end
+ end
end
- if newn > 0 then
- instance.extra_paths = ep -- register paths
- end
- if newn > oldn then
- instance.lists = { } -- erase the cache
+ for k=1,#ep do
+ local v=ep[k]
+ if not done[v] then
+ done[v]=true
+ newn=newn+1
+ new[newn]=v
+ end
end
-end
-
-local function made_list(instance,list)
- local ep = instance.extra_paths
- if not ep or #ep == 0 then
- return list
- else
- local done, new, newn = { }, { }, 0
- -- honour . .. ../.. but only when at the start
- for k=1,#list do
- local v = list[k]
- if not done[v] then
- if find(v,"^[%.%/]$") then
- done[v] = true
- newn = newn + 1
- new[newn] = v
- else
- break
- end
- end
- end
- -- first the extra paths
- for k=1,#ep do
- local v = ep[k]
- if not done[v] then
- done[v] = true
- newn = newn + 1
- new[newn] = v
- end
- end
- -- next the formal paths
- for k=1,#list do
- local v = list[k]
- if not done[v] then
- done[v] = true
- newn = newn + 1
- new[newn] = v
- end
- end
- return new
+ for k=1,#list do
+ local v=list[k]
+ if not done[v] then
+ done[v]=true
+ newn=newn+1
+ new[newn]=v
+ end
end
+ return new
+ end
end
-
function resolvers.cleanpathlist(str)
- local t = resolvers.expandedpathlist(str)
- if t then
- for i=1,#t do
- t[i] = collapsepath(resolvers.cleanpath(t[i]))
- end
+ local t=resolvers.expandedpathlist(str)
+ if t then
+ for i=1,#t do
+ t[i]=collapsepath(resolvers.cleanpath(t[i]))
end
- return t
+ end
+ return t
end
-
function resolvers.expandpath(str)
- return joinpath(resolvers.expandedpathlist(str))
+ return joinpath(resolvers.expandedpathlist(str))
end
-
function resolvers.expandedpathlist(str)
- if not str then
- return { }
- elseif instance.savelists then
- str = lpegmatch(dollarstripper,str)
- if not instance.lists[str] then -- cached
- local lst = made_list(instance,resolvers.splitpath(resolvers.expansion(str)))
- instance.lists[str] = expandedpathfromlist(lst)
- end
- return instance.lists[str]
- else
- local lst = resolvers.splitpath(resolvers.expansion(str))
- return made_list(instance,expandedpathfromlist(lst))
+ if not str then
+ return {}
+ elseif instance.savelists then
+ str=lpegmatch(dollarstripper,str)
+ local lists=instance.lists
+ local lst=lists[str]
+ if not lst then
+ local l=made_list(instance,resolvers.splitpath(resolvers.expansion(str)))
+ lst=expandedpathfromlist(l)
+ lists[str]=lst
end
+ return lst
+ else
+ local lst=resolvers.splitpath(resolvers.expansion(str))
+ return made_list(instance,expandedpathfromlist(lst))
+ end
end
-
-function resolvers.expandedpathlistfromvariable(str) -- brrr
- str = lpegmatch(dollarstripper,str)
- local tmp = resolvers.variableofformatorsuffix(str)
- return resolvers.expandedpathlist(tmp ~= "" and tmp or str)
+function resolvers.expandedpathlistfromvariable(str)
+ str=lpegmatch(dollarstripper,str)
+ local tmp=resolvers.variableofformatorsuffix(str)
+ return resolvers.expandedpathlist(tmp~="" and tmp or str)
end
-
function resolvers.expandpathfromvariable(str)
- return joinpath(resolvers.expandedpathlistfromvariable(str))
+ return joinpath(resolvers.expandedpathlistfromvariable(str))
end
-
-function resolvers.expandbraces(str) -- output variable and brace expansion of STRING
--- local ori = resolvers.variable(str)
--- if ori == "" then
- local ori = str
--- end
- local pth = expandedpathfromlist(resolvers.splitpath(ori))
- return joinpath(pth)
+function resolvers.expandbraces(str)
+ local ori=str
+ local pth=expandedpathfromlist(resolvers.splitpath(ori))
+ return joinpath(pth)
end
-
function resolvers.registerfilehash(name,content,someerror)
- if content then
- instance.files[name] = content
- else
- instance.files[name] = { }
- if somerror == true then -- can be unset
- instance.loaderror = someerror
- end
+ if content then
+ instance.files[name]=content
+ else
+ instance.files[name]={}
+ if somerror==true then
+ instance.loaderror=someerror
end
+ end
end
-
local function isreadable(name)
- local readable = lfs.isfile(name) -- not file.is_readable(name) asit can be a dir
- if trace_detail then
- if readable then
- report_resolving("file '%s' is readable",name)
- else
- report_resolving("file '%s' is not readable", name)
- end
+ local readable=lfs.isfile(name)
+ if trace_detail then
+ if readable then
+ report_resolving("file %a is readable",name)
+ else
+ report_resolving("file %a is not readable",name)
end
- return readable
+ end
+ return readable
end
-
--- name
--- name/name
-
local function collect_files(names)
- local filelist, noffiles = { }, 0
- for k=1,#names do
- local fname = names[k]
+ local filelist,noffiles={},0
+ for k=1,#names do
+ local fname=names[k]
+ if trace_detail then
+ report_resolving("checking name %a",fname)
+ end
+ local bname=filebasename(fname)
+ local dname=filedirname(fname)
+ if dname=="" or find(dname,"^%.") then
+ dname=false
+ else
+ dname=gsub(dname,"%*",".*")
+ dname="/"..dname.."$"
+ end
+ local hashes=instance.hashes
+ for h=1,#hashes do
+ local hash=hashes[h]
+ local blobpath=hash.name
+ local files=blobpath and instance.files[blobpath]
+ if files then
if trace_detail then
- report_resolving("checking name '%s'",fname)
- end
- local bname = filebasename(fname)
- local dname = filedirname(fname)
- if dname == "" or find(dname,"^%.") then
- dname = false
- else
-dname = gsub(dname,"*","%.*")
- dname = "/" .. dname .. "$"
- end
- local hashes = instance.hashes
- for h=1,#hashes do
- local hash = hashes[h]
- local blobpath = hash.name
- local files = blobpath and instance.files[blobpath]
- if files then
+ report_resolving("deep checking %a, base %a, pattern %a",blobpath,bname,dname)
+ end
+ local blobfile=files[bname]
+ if not blobfile then
+ local rname="remap:"..bname
+ blobfile=files[rname]
+ if blobfile then
+ bname=files[rname]
+ blobfile=files[bname]
+ end
+ end
+ if blobfile then
+ local blobroot=files.__path__ or blobpath
+ if type(blobfile)=='string' then
+ if not dname or find(blobfile,dname) then
+ local variant=hash.type
+ local search=filejoin(blobroot,blobfile,bname)
+ local result=methodhandler('concatinators',hash.type,blobroot,blobfile,bname)
+ if trace_detail then
+ report_resolving("match: variant %a, search %a, result %a",variant,search,result)
+ end
+ noffiles=noffiles+1
+ filelist[noffiles]={ variant,search,result }
+ end
+ else
+ for kk=1,#blobfile do
+ local vv=blobfile[kk]
+ if not dname or find(vv,dname) then
+ local variant=hash.type
+ local search=filejoin(blobroot,vv,bname)
+ local result=methodhandler('concatinators',hash.type,blobroot,vv,bname)
if trace_detail then
- report_resolving("deep checking '%s' (%s)",blobpath,bname)
- end
- local blobfile = files[bname]
- if not blobfile then
- local rname = "remap:"..bname
- blobfile = files[rname]
- if blobfile then
- bname = files[rname]
- blobfile = files[bname]
- end
+ report_resolving("match: variant %a, search %a, result %a",variant,search,result)
end
- if blobfile then
- local blobroot = files.__path__ or blobpath
- if type(blobfile) == 'string' then
- if not dname or find(blobfile,dname) then
- local variant = hash.type
- -- local search = filejoin(blobpath,blobfile,bname)
- local search = filejoin(blobroot,blobfile,bname)
- local result = methodhandler('concatinators',hash.type,blobroot,blobfile,bname)
- if trace_detail then
- report_resolving("match: variant '%s', search '%s', result '%s'",variant,search,result)
- end
- noffiles = noffiles + 1
- filelist[noffiles] = { variant, search, result }
- end
- else
- for kk=1,#blobfile do
- local vv = blobfile[kk]
- if not dname or find(vv,dname) then
- local variant = hash.type
- -- local search = filejoin(blobpath,vv,bname)
- local search = filejoin(blobroot,vv,bname)
- local result = methodhandler('concatinators',hash.type,blobroot,vv,bname)
- if trace_detail then
- report_resolving("match: variant '%s', search '%s', result '%s'",variant,search,result)
- end
- noffiles = noffiles + 1
- filelist[noffiles] = { variant, search, result }
- end
- end
- end
- end
- elseif trace_locating then
- report_resolving("no match in '%s' (%s)",blobpath,bname)
+ noffiles=noffiles+1
+ filelist[noffiles]={ variant,search,result }
+ end
end
+ end
end
+ elseif trace_locating then
+ report_resolving("no match in %a (%s)",blobpath,bname)
+ end
end
- return noffiles > 0 and filelist or nil
+ end
+ return noffiles>0 and filelist or nil
end
-
-local fit = { }
-
+local fit={}
function resolvers.registerintrees(filename,format,filetype,usedmethod,foundname)
- local foundintrees = instance.foundintrees
- if usedmethod == "direct" and filename == foundname and fit[foundname] then
- -- just an extra lookup after a test on presence
- else
- local t = {
- filename = filename,
- format = format ~= "" and format or nil,
- filetype = filetype ~= "" and filetype or nil,
- usedmethod = usedmethod,
- foundname = foundname,
- }
- fit[foundname] = t
- foundintrees[#foundintrees+1] = t
- end
+ local foundintrees=instance.foundintrees
+ if usedmethod=="direct" and filename==foundname and fit[foundname] then
+ else
+ local t={
+ filename=filename,
+ format=format~="" and format or nil,
+ filetype=filetype~="" and filetype or nil,
+ usedmethod=usedmethod,
+ foundname=foundname,
+ }
+ fit[foundname]=t
+ foundintrees[#foundintrees+1]=t
+ end
end
-
--- split the next one up for readability (but this module needs a cleanup anyway)
-
-local function can_be_dir(name) -- can become local
- local fakepaths = instance.fakepaths
- if not fakepaths[name] then
- if lfs.isdir(name) then
- fakepaths[name] = 1 -- directory
- else
- fakepaths[name] = 2 -- no directory
- end
+local function can_be_dir(name)
+ local fakepaths=instance.fakepaths
+ if not fakepaths[name] then
+ if lfs.isdir(name) then
+ fakepaths[name]=1
+ else
+ fakepaths[name]=2
end
- return fakepaths[name] == 1
+ end
+ return fakepaths[name]==1
end
-
-local preparetreepattern = Cs((P(".")/"%%." + P("-")/"%%-" + P(1))^0 * Cc("$"))
-
--- -- -- begin of main file search routing -- -- -- needs checking as previous has been patched
-
+local preparetreepattern=Cs((P(".")/"%%."+P("-")/"%%-"+P(1))^0*Cc("$"))
local collect_instance_files
-
local function find_analyze(filename,askedformat,allresults)
- local filetype, wantedfiles, ext = '', { }, fileextname(filename)
- -- too tricky as filename can be bla.1.2.3:
- --
- -- if not suffixmap[ext] then
- -- wantedfiles[#wantedfiles+1] = filename
- -- end
- wantedfiles[#wantedfiles+1] = filename
- if askedformat == "" then
- if ext == "" or not suffixmap[ext] then
- local defaultsuffixes = resolvers.defaultsuffixes
- for i=1,#defaultsuffixes do
- local forcedname = filename .. '.' .. defaultsuffixes[i]
- wantedfiles[#wantedfiles+1] = forcedname
- filetype = resolvers.formatofsuffix(forcedname)
- if trace_locating then
- report_resolving("forcing filetype '%s'",filetype)
- end
- end
- else
- filetype = resolvers.formatofsuffix(filename)
- if trace_locating then
- report_resolving("using suffix based filetype '%s'",filetype)
- end
+ local filetype,wantedfiles,ext='',{},suffixonly(filename)
+ wantedfiles[#wantedfiles+1]=filename
+ if askedformat=="" then
+ if ext=="" or not suffixmap[ext] then
+ local defaultsuffixes=resolvers.defaultsuffixes
+ for i=1,#defaultsuffixes do
+ local forcedname=filename..'.'..defaultsuffixes[i]
+ wantedfiles[#wantedfiles+1]=forcedname
+ filetype=resolvers.formatofsuffix(forcedname)
+ if trace_locating then
+ report_resolving("forcing filetype %a",filetype)
end
+ end
else
- if ext == "" or not suffixmap[ext] then
- local format_suffixes = suffixes[askedformat]
- if format_suffixes then
- for i=1,#format_suffixes do
- wantedfiles[#wantedfiles+1] = filename .. "." .. format_suffixes[i]
- end
- end
- end
- filetype = askedformat
- if trace_locating then
- report_resolving("using given filetype '%s'",filetype)
+ filetype=resolvers.formatofsuffix(filename)
+ if trace_locating then
+ report_resolving("using suffix based filetype %a",filetype)
+ end
+ end
+ else
+ if ext=="" or not suffixmap[ext] then
+ local format_suffixes=suffixes[askedformat]
+ if format_suffixes then
+ for i=1,#format_suffixes do
+ wantedfiles[#wantedfiles+1]=filename.."."..format_suffixes[i]
end
+ end
+ end
+ filetype=askedformat
+ if trace_locating then
+ report_resolving("using given filetype %a",filetype)
end
- return filetype, wantedfiles
+ end
+ return filetype,wantedfiles
end
-
local function find_direct(filename,allresults)
- if not dangerous[askedformat] and isreadable(filename) then
- if trace_detail then
- report_resolving("file '%s' found directly",filename)
- end
- return "direct", { filename }
+ if not dangerous[askedformat] and isreadable(filename) then
+ if trace_detail then
+ report_resolving("file %a found directly",filename)
end
+ return "direct",{ filename }
+ end
end
-
local function find_wildcard(filename,allresults)
- if find(filename,'%*') then
- if trace_locating then
- report_resolving("checking wildcard '%s'", filename)
- end
- local method, result = resolvers.findwildcardfiles(filename)
- if result then
- return "wildcard", result
- end
- end
-end
-
-local function find_qualified(filename,allresults) -- this one will be split too
- if not file.is_qualified_path(filename) then
- return
- end
+ if find(filename,'%*') then
if trace_locating then
- report_resolving("checking qualified name '%s'", filename)
- end
- if isreadable(filename) then
- if trace_detail then
- report_resolving("qualified file '%s' found", filename)
- end
- return "qualified", { filename }
- end
+ report_resolving("checking wildcard %a",filename)
+ end
+ local method,result=resolvers.findwildcardfiles(filename)
+ if result then
+ return "wildcard",result
+ end
+ end
+end
+local function find_qualified(filename,allresults,askedformat,alsostripped)
+ if not file.is_qualified_path(filename) then
+ return
+ end
+ if trace_locating then
+ report_resolving("checking qualified name %a",filename)
+ end
+ if isreadable(filename) then
if trace_detail then
- report_resolving("locating qualified file '%s'", filename)
- end
- local forcedname, suffix = "", fileextname(filename)
- if suffix == "" then -- why
- local format_suffixes = askedformat == "" and resolvers.defaultsuffixes or suffixes[askedformat]
- if format_suffixes then
- for i=1,#format_suffixes do
- local s = format_suffixes[i]
- forcedname = filename .. "." .. s
- if isreadable(forcedname) then
- if trace_locating then
- report_resolving("no suffix, forcing format filetype '%s'", s)
- end
- return "qualified", { forcedname }
- end
- end
- end
- end
- if suffix and suffix ~= "" then
- -- try to find in tree (no suffix manipulation), here we search for the
- -- matching last part of the name
- local basename = filebasename(filename)
- local pattern = lpegmatch(preparetreepattern,filename)
- -- messy .. to be sorted out
- local savedformat = askedformat
- local format = savedformat or ""
- if format == "" then
- askedformat = resolvers.formatofsuffix(suffix)
- end
- if not format then
- askedformat = "othertextfiles" -- kind of everything, maybe all
- end
- --
- if basename ~= filename then
- local resolved = collect_instance_files(basename,askedformat,allresults)
- if #resolved == 0 then
- local lowered = lower(basename)
- if filename ~= lowered then
- resolved = collect_instance_files(lowered,askedformat,allresults)
- end
- end
- resolvers.format = savedformat
- --
- if #resolved > 0 then
- local result = { }
- for r=1,#resolved do
- local rr = resolved[r]
- if find(rr,pattern) then
- result[#result+1] = rr
- end
- end
- if #result > 0 then
- return "qualified", result
- end
- end
- end
- -- a real wildcard:
- --
- -- local filelist = collect_files({basename})
- -- result = { }
- -- for f=1,#filelist do
- -- local ff = filelist[f][3] or ""
- -- if find(ff,pattern) then
- -- result[#result+1], ok = ff, true
- -- end
- -- end
- -- if #result > 0 then
- -- return "qualified", result
- -- end
- end
+ report_resolving("qualified file %a found",filename)
+ end
+ return "qualified",{ filename }
+ end
+ if trace_detail then
+ report_resolving("locating qualified file %a",filename)
+ end
+ local forcedname,suffix="",suffixonly(filename)
+ if suffix=="" then
+ local format_suffixes=askedformat=="" and resolvers.defaultsuffixes or suffixes[askedformat]
+ if format_suffixes then
+ for i=1,#format_suffixes do
+ local s=format_suffixes[i]
+ forcedname=filename.."."..s
+ if isreadable(forcedname) then
+ if trace_locating then
+ report_resolving("no suffix, forcing format filetype %a",s)
+ end
+ return "qualified",{ forcedname }
+ end
+ end
+ end
+ end
+ if alsostripped and suffix and suffix~="" then
+ local basename=filebasename(filename)
+ local pattern=lpegmatch(preparetreepattern,filename)
+ local savedformat=askedformat
+ local format=savedformat or ""
+ if format=="" then
+ askedformat=resolvers.formatofsuffix(suffix)
+ end
+ if not format then
+ askedformat="othertextfiles"
+ end
+ if basename~=filename then
+ local resolved=collect_instance_files(basename,askedformat,allresults)
+ if #resolved==0 then
+ local lowered=lower(basename)
+ if filename~=lowered then
+ resolved=collect_instance_files(lowered,askedformat,allresults)
+ end
+ end
+ resolvers.format=savedformat
+ if #resolved>0 then
+ local result={}
+ for r=1,#resolved do
+ local rr=resolved[r]
+ if find(rr,pattern) then
+ result[#result+1]=rr
+ end
+ end
+ if #result>0 then
+ return "qualified",result
+ end
+ end
+ end
+ end
end
-
local function check_subpath(fname)
- if isreadable(fname) then
- if trace_detail then
- report_resolving("found '%s' by deep scanning",fname)
- end
- return fname
+ if isreadable(fname) then
+ if trace_detail then
+ report_resolving("found %a by deep scanning",fname)
end
+ return fname
+ end
end
-
local function find_intree(filename,filetype,wantedfiles,allresults)
- local typespec = resolvers.variableofformat(filetype)
- local pathlist = resolvers.expandedpathlist(typespec)
- local method = "intree"
- if pathlist and #pathlist > 0 then
- -- list search
- local filelist = collect_files(wantedfiles)
- local dirlist = { }
- if filelist then
- for i=1,#filelist do
- dirlist[i] = filedirname(filelist[i][3]) .. "/" -- was [2] .. gamble
- end
- end
+ local typespec=resolvers.variableofformat(filetype)
+ local pathlist=resolvers.expandedpathlist(typespec)
+ local method="intree"
+ if pathlist and #pathlist>0 then
+ local filelist=collect_files(wantedfiles)
+ local dirlist={}
+ if filelist then
+ for i=1,#filelist do
+ dirlist[i]=filedirname(filelist[i][3]).."/"
+ end
+ end
+ if trace_detail then
+ report_resolving("checking filename %a",filename)
+ end
+ local resolve=resolvers.resolve
+ local result={}
+ for k=1,#pathlist do
+ local path=pathlist[k]
+ local pathname=lpegmatch(inhibitstripper,path)
+ local doscan=path==pathname
+ if not find (pathname,'//$') then
+ doscan=false
+ end
+ local done=false
+ if filelist then
+ local expression=makepathexpression(pathname)
if trace_detail then
- report_resolving("checking filename '%s'",filename)
- end
- local result = { }
- for k=1,#pathlist do
- local path = pathlist[k]
- local pathname = lpegmatch(inhibitstripper,path)
- local doscan = path == pathname -- no ^!!
- if not find (pathname,'//$') then
- doscan = false -- we check directly on the path
- end
- local done = false
- -- using file list
- if filelist then -- database
- -- compare list entries with permitted pattern -- /xx /xx//
- local expression = makepathexpression(pathname)
- if trace_detail then
- report_resolving("using pattern '%s' for path '%s'",expression,pathname)
+ report_resolving("using pattern %a for path %a",expression,pathname)
+ end
+ for k=1,#filelist do
+ local fl=filelist[k]
+ local f=fl[2]
+ local d=dirlist[k]
+ if find(d,expression) or find(resolve(d),expression) then
+ result[#result+1]=resolve(fl[3])
+ done=true
+ if allresults then
+ if trace_detail then
+ report_resolving("match to %a in hash for file %a and path %a, continue scanning",expression,f,d)
+ end
+ else
+ if trace_detail then
+ report_resolving("match to %a in hash for file %a and path %a, quit scanning",expression,f,d)
+ end
+ break
+ end
+ elseif trace_detail then
+ report_resolving("no match to %a in hash for file %a and path %a",expression,f,d)
+ end
+ end
+ end
+ if done then
+ method="database"
+ else
+ method="filesystem"
+ pathname=gsub(pathname,"/+$","")
+ pathname=resolve(pathname)
+ local scheme=url.hasscheme(pathname)
+ if not scheme or scheme=="file" then
+ local pname=gsub(pathname,"%.%*$",'')
+ if not find(pname,"%*") then
+ if can_be_dir(pname) then
+ for k=1,#wantedfiles do
+ local w=wantedfiles[k]
+ local fname=check_subpath(filejoin(pname,w))
+ if fname then
+ result[#result+1]=fname
+ done=true
+ if not allresults then
+ break
+ end
end
- for k=1,#filelist do
- local fl = filelist[k]
- local f = fl[2]
- local d = dirlist[k]
- if find(d,expression) then
- -- todo, test for readable
- result[#result+1] = resolvers.resolve(fl[3]) -- no shortcut
- done = true
- if allresults then
- if trace_detail then
- report_resolving("match to '%s' in hash for file '%s' and path '%s', continue scanning",expression,f,d)
- end
- else
- if trace_detail then
- report_resolving("match to '%s' in hash for file '%s' and path '%s', quit scanning",expression,f,d)
- end
+ end
+ if not done and doscan then
+ local files=resolvers.simplescanfiles(pname,false,true)
+ for k=1,#wantedfiles do
+ local w=wantedfiles[k]
+ local subpath=files[w]
+ if not subpath or subpath=="" then
+ elseif type(subpath)=="string" then
+ local fname=check_subpath(filejoin(pname,subpath,w))
+ if fname then
+ result[#result+1]=fname
+ done=true
+ if not allresults then
+ break
+ end
+ end
+ else
+ for i=1,#subpath do
+ local sp=subpath[i]
+ if sp=="" then
+ else
+ local fname=check_subpath(filejoin(pname,sp,w))
+ if fname then
+ result[#result+1]=fname
+ done=true
+ if not allresults then
break
+ end
end
- elseif trace_detail then
- report_resolving("no match to '%s' in hash for file '%s' and path '%s'",expression,f,d)
+ end
end
- end
- end
- if done then
- method = "database"
- else
- method = "filesystem" -- bonus, even when !! is specified
- pathname = gsub(pathname,"/+$","")
- pathname = resolvers.resolve(pathname)
- local scheme = url.hasscheme(pathname)
- if not scheme or scheme == "file" then
- local pname = gsub(pathname,"%.%*$",'')
- if not find(pname,"%*") then
- if can_be_dir(pname) then
- -- quick root scan first
- for k=1,#wantedfiles do
- local w = wantedfiles[k]
- local fname = check_subpath(filejoin(pname,w))
- if fname then
- result[#result+1] = fname
- done = true
- if not allresults then
- break
- end
- end
- end
- if not done and doscan then
- -- collect files in path (and cache the result)
- local files = resolvers.simplescanfiles(pname,false,true)
- for k=1,#wantedfiles do
- local w = wantedfiles[k]
- local subpath = files[w]
- if not subpath or subpath == "" then
- -- rootscan already done
- elseif type(subpath) == "string" then
- local fname = check_subpath(filejoin(pname,subpath,w))
- if fname then
- result[#result+1] = fname
- done = true
- if not allresults then
- break
- end
- end
- else
- for i=1,#subpath do
- local sp = subpath[i]
- if sp == "" then
- -- roottest already done
- else
- local fname = check_subpath(filejoin(pname,sp,w))
- if fname then
- result[#result+1] = fname
- done = true
- if not allresults then
- break
- end
- end
- end
- end
- if done and not allresults then
- break
- end
- end
- end
- end
- end
- else
- -- no access needed for non existing path, speedup (esp in large tree with lots of fake)
+ if done and not allresults then
+ break
end
+ end
end
+ end
end
- -- todo recursive scanning
- if done and not allresults then
- break
- end
- end
- if #result > 0 then
- return method, result
+ else
+ end
end
+ end
+ if done and not allresults then
+ break
+ end
end
+ if #result>0 then
+ return method,result
+ end
+ end
end
-
local function find_onpath(filename,filetype,wantedfiles,allresults)
+ if trace_detail then
+ report_resolving("checking filename %a, filetype %a, wanted files %a",filename,filetype,concat(wantedfiles," | "))
+ end
+ local result={}
+ for k=1,#wantedfiles do
+ local fname=wantedfiles[k]
+ if fname and isreadable(fname) then
+ filename=fname
+ result[#result+1]=filejoin('.',fname)
+ if not allresults then
+ break
+ end
+ end
+ end
+ if #result>0 then
+ return "onpath",result
+ end
+end
+local function find_otherwise(filename,filetype,wantedfiles,allresults)
+ local filelist=collect_files(wantedfiles)
+ local fl=filelist and filelist[1]
+ if fl then
+ return "otherwise",{ resolvers.resolve(fl[3]) }
+ end
+end
+collect_instance_files=function(filename,askedformat,allresults)
+ askedformat=askedformat or ""
+ filename=collapsepath(filename)
+ if allresults then
+ local filetype,wantedfiles=find_analyze(filename,askedformat)
+ local results={
+ { find_direct (filename,true) },
+ { find_wildcard (filename,true) },
+ { find_qualified(filename,true,askedformat) },
+ { find_intree (filename,filetype,wantedfiles,true) },
+ { find_onpath (filename,filetype,wantedfiles,true) },
+ { find_otherwise(filename,filetype,wantedfiles,true) },
+ }
+ local result,status,done={},{},{}
+ for k,r in next,results do
+ local method,list=r[1],r[2]
+ if method and list then
+ for i=1,#list do
+ local c=collapsepath(list[i])
+ if not done[c] then
+ result[#result+1]=c
+ done[c]=true
+ end
+ status[#status+1]=formatters["%-10s: %s"](method,c)
+ end
+ end
+ end
if trace_detail then
- report_resolving("checking filename '%s', filetype '%s', wanted files '%s'",filename, filetype or '?',concat(wantedfiles," | "))
- end
- local result = { }
- for k=1,#wantedfiles do
- local fname = wantedfiles[k]
- if fname and isreadable(fname) then
- filename = fname
- result[#result+1] = filejoin('.',fname)
- if not allresults then
- break
- end
+ report_resolving("lookup status: %s",table.serialize(status,filename))
+ end
+ return result,status
+ else
+ local method,result,stamp,filetype,wantedfiles
+ if instance.remember then
+ stamp=formatters["%s--%s"](filename,askedformat)
+ result=stamp and instance.found[stamp]
+ if result then
+ if trace_locating then
+ report_resolving("remembered file %a",filename)
end
+ return result
+ end
end
- if #result > 0 then
- return "onpath", result
- end
-end
-
-local function find_otherwise(filename,filetype,wantedfiles,allresults) -- other text files | any | whatever
- local filelist = collect_files(wantedfiles)
- local fl = filelist and filelist[1]
- if fl then
- return "otherwise", { resolvers.resolve(fl[3]) } -- filename
- end
-end
-
--- we could have a loop over the 6 functions but then we'd have to
--- always analyze
-
-collect_instance_files = function(filename,askedformat,allresults) -- uses nested
- askedformat = askedformat or ""
- filename = collapsepath(filename)
- if allresults then
- -- no need for caching, only used for tracing
- local filetype, wantedfiles = find_analyze(filename,askedformat)
- local results = {
- { find_direct (filename,true) },
- { find_wildcard (filename,true) },
- { find_qualified(filename,true) },
- { find_intree (filename,filetype,wantedfiles,true) },
- { find_onpath (filename,filetype,wantedfiles,true) },
- { find_otherwise(filename,filetype,wantedfiles,true) },
- }
- local result, status, done = { }, { }, { }
- for k, r in next, results do
- local method, list = r[1], r[2]
- if method and list then
- for i=1,#list do
- local c = collapsepath(list[i])
- if not done[c] then
- result[#result+1] = c
- done[c] = true
- end
- status[#status+1] = format("%-10s: %s",method,c)
- end
- end
- end
- if trace_detail then
- report_resolving("lookup status: %s",table.serialize(status,filename))
- end
- return result, status
- else
- local method, result, stamp, filetype, wantedfiles
- if instance.remember then
- stamp = format("%s--%s", filename, askedformat)
- result = stamp and instance.found[stamp]
- if result then
- if trace_locating then
- report_resolving("remembered file '%s'",filename)
- end
- return result
- end
- end
- method, result = find_direct(filename)
+ method,result=find_direct(filename)
+ if not result then
+ method,result=find_wildcard(filename)
+ if not result then
+ method,result=find_qualified(filename,false,askedformat)
if not result then
- method, result = find_wildcard(filename)
+ filetype,wantedfiles=find_analyze(filename,askedformat)
+ method,result=find_intree(filename,filetype,wantedfiles)
+ if not result then
+ method,result=find_onpath(filename,filetype,wantedfiles)
if not result then
- method, result = find_qualified(filename)
- if not result then
- filetype, wantedfiles = find_analyze(filename,askedformat)
- method, result = find_intree(filename,filetype,wantedfiles)
- if not result then
- method, result = find_onpath(filename,filetype,wantedfiles)
- if not result then
- method, result = find_otherwise(filename,filetype,wantedfiles)
- end
- end
- end
+ method,result=find_otherwise(filename,filetype,wantedfiles)
end
+ end
end
- if result and #result > 0 then
- local foundname = collapsepath(result[1])
- resolvers.registerintrees(filename,askedformat,filetype,method,foundname)
- result = { foundname }
- else
- result = { } -- maybe false
- end
- if stamp then
- if trace_locating then
- report_resolving("remembering file '%s'",filename)
- end
- instance.found[stamp] = result
- end
- return result
+ end
+ end
+ if result and #result>0 then
+ local foundname=collapsepath(result[1])
+ resolvers.registerintrees(filename,askedformat,filetype,method,foundname)
+ result={ foundname }
+ else
+ result={}
end
+ if stamp then
+ if trace_locating then
+ report_resolving("remembering file %a",filename)
+ end
+ instance.found[stamp]=result
+ end
+ return result
+ end
end
-
--- -- -- end of main file search routing -- -- --
-
-
local function findfiles(filename,filetype,allresults)
- local result, status = collect_instance_files(filename,filetype or "",allresults)
- if not result or #result == 0 then
- local lowered = lower(filename)
- if filename ~= lowered then
- result, status = collect_instance_files(lowered,filetype or "",allresults)
- end
+ local result,status=collect_instance_files(filename,filetype or "",allresults)
+ if not result or #result==0 then
+ local lowered=lower(filename)
+ if filename~=lowered then
+ result,status=collect_instance_files(lowered,filetype or "",allresults)
end
- return result or { }, status
+ end
+ return result or {},status
end
-
function resolvers.findfiles(filename,filetype)
- return findfiles(filename,filetype,true)
+ return findfiles(filename,filetype,true)
end
-
function resolvers.findfile(filename,filetype)
- return findfiles(filename,filetype,false)[1] or ""
+ return findfiles(filename,filetype,false)[1] or ""
end
-
function resolvers.findpath(filename,filetype)
- return filedirname(findfiles(filename,filetype,false)[1] or "")
+ return filedirname(findfiles(filename,filetype,false)[1] or "")
end
-
local function findgivenfiles(filename,allresults)
- local bname, result = filebasename(filename), { }
- local hashes = instance.hashes
- local noffound = 0
- for k=1,#hashes do
- local hash = hashes[k]
- local files = instance.files[hash.name] or { }
- local blist = files[bname]
- if not blist then
- local rname = "remap:"..bname
- blist = files[rname]
- if blist then
- bname = files[rname]
- blist = files[bname]
- end
+ local bname,result=filebasename(filename),{}
+ local hashes=instance.hashes
+ local noffound=0
+ for k=1,#hashes do
+ local hash=hashes[k]
+ local files=instance.files[hash.name] or {}
+ local blist=files[bname]
+ if not blist then
+ local rname="remap:"..bname
+ blist=files[rname]
+ if blist then
+ bname=files[rname]
+ blist=files[bname]
+ end
+ end
+ if blist then
+ if type(blist)=='string' then
+ local found=methodhandler('concatinators',hash.type,hash.name,blist,bname) or ""
+ if found~="" then
+ noffound=noffound+1
+ result[noffound]=resolvers.resolve(found)
+ if not allresults then
+ break
+ end
end
- if blist then
- if type(blist) == 'string' then
- local found = methodhandler('concatinators',hash.type,hash.name,blist,bname) or ""
- if found ~= "" then
- noffound = noffound + 1
- result[noffound] = resolvers.resolve(found)
- if not allresults then break end
- end
- else
- for kk=1,#blist do
- local vv = blist[kk]
- local found = methodhandler('concatinators',hash.type,hash.name,vv,bname) or ""
- if found ~= "" then
- noffound = noffound + 1
- result[noffound] = resolvers.resolve(found)
- if not allresults then break end
- end
- end
- end
+ else
+ for kk=1,#blist do
+ local vv=blist[kk]
+ local found=methodhandler('concatinators',hash.type,hash.name,vv,bname) or ""
+ if found~="" then
+ noffound=noffound+1
+ result[noffound]=resolvers.resolve(found)
+ if not allresults then break end
+ end
end
+ end
end
- return result
+ end
+ return result
end
-
function resolvers.findgivenfiles(filename)
- return findgivenfiles(filename,true)
+ return findgivenfiles(filename,true)
end
-
function resolvers.findgivenfile(filename)
- return findgivenfiles(filename,false)[1] or ""
+ return findgivenfiles(filename,false)[1] or ""
end
-
local function doit(path,blist,bname,tag,variant,result,allresults)
- local done = false
- if blist and variant then
- local resolve = resolvers.resolve -- added
- if type(blist) == 'string' then
- -- make function and share code
- if find(lower(blist),path) then
- local full = methodhandler('concatinators',variant,tag,blist,bname) or ""
- result[#result+1] = resolve(full)
- done = true
- end
- else
- for kk=1,#blist do
- local vv = blist[kk]
- if find(lower(vv),path) then
- local full = methodhandler('concatinators',variant,tag,vv,bname) or ""
- result[#result+1] = resolve(full)
- done = true
- if not allresults then break end
- end
- end
+ local done=false
+ if blist and variant then
+ local resolve=resolvers.resolve
+ if type(blist)=='string' then
+ if find(lower(blist),path) then
+ local full=methodhandler('concatinators',variant,tag,blist,bname) or ""
+ result[#result+1]=resolve(full)
+ done=true
+ end
+ else
+ for kk=1,#blist do
+ local vv=blist[kk]
+ if find(lower(vv),path) then
+ local full=methodhandler('concatinators',variant,tag,vv,bname) or ""
+ result[#result+1]=resolve(full)
+ done=true
+ if not allresults then break end
end
+ end
end
- return done
+ end
+ return done
end
-
-
-local makewildcard = Cs(
- (P("^")^0 * P("/") * P(-1) + P(-1)) /".*"
- + (P("^")^0 * P("/") / "")^0 * (P("*")/".*" + P("-")/"%%-" + P(".")/"%%." + P("?")/"."+ P("\\")/"/" + P(1))^0
+local makewildcard=Cs(
+ (P("^")^0*P("/")*P(-1)+P(-1))/".*"+(P("^")^0*P("/")/"")^0*(P("*")/".*"+P("-")/"%%-"+P(".")/"%%."+P("?")/"."+P("\\")/"/"+P(1))^0
)
-
function resolvers.wildcardpattern(pattern)
- return lpegmatch(makewildcard,pattern) or pattern
-end
-
-local function findwildcardfiles(filename,allresults,result) -- todo: remap: and lpeg
- result = result or { }
- local base = filebasename(filename)
- local dirn = filedirname(filename)
- local path = lower(lpegmatch(makewildcard,dirn) or dirn)
- local name = lower(lpegmatch(makewildcard,base) or base)
- local files, done = instance.files, false
- if find(name,"%*") then
- local hashes = instance.hashes
- for k=1,#hashes do
- local hash = hashes[k]
- local hashname, hashtype = hash.name, hash.type
- for kk, hh in next, files[hashname] do
- if not find(kk,"^remap:") then
- if find(lower(kk),name) then
- if doit(path,hh,kk,hashname,hashtype,result,allresults) then done = true end
- if done and not allresults then break end
- end
- end
- end
- end
- else
- local hashes = instance.hashes
- for k=1,#hashes do
- local hash = hashes[k]
- local hashname, hashtype = hash.name, hash.type
- if doit(path,files[hashname][bname],bname,hashname,hashtype,result,allresults) then done = true end
+ return lpegmatch(makewildcard,pattern) or pattern
+end
+local function findwildcardfiles(filename,allresults,result)
+ result=result or {}
+ local base=filebasename(filename)
+ local dirn=filedirname(filename)
+ local path=lower(lpegmatch(makewildcard,dirn) or dirn)
+ local name=lower(lpegmatch(makewildcard,base) or base)
+ local files,done=instance.files,false
+ if find(name,"%*") then
+ local hashes=instance.hashes
+ for k=1,#hashes do
+ local hash=hashes[k]
+ local hashname,hashtype=hash.name,hash.type
+ for kk,hh in next,files[hashname] do
+ if not find(kk,"^remap:") then
+ if find(lower(kk),name) then
+ if doit(path,hh,kk,hashname,hashtype,result,allresults) then done=true end
if done and not allresults then break end
+ end
end
+ end
end
- -- we can consider also searching the paths not in the database, but then
- -- we end up with a messy search (all // in all path specs)
- return result
+ else
+ local hashes=instance.hashes
+ for k=1,#hashes do
+ local hash=hashes[k]
+ local hashname,hashtype=hash.name,hash.type
+ if doit(path,files[hashname][bname],bname,hashname,hashtype,result,allresults) then done=true end
+ if done and not allresults then break end
+ end
+ end
+ return result
end
-
function resolvers.findwildcardfiles(filename,result)
- return findwildcardfiles(filename,true,result)
+ return findwildcardfiles(filename,true,result)
end
-
function resolvers.findwildcardfile(filename)
- return findwildcardfiles(filename,false)[1] or ""
+ return findwildcardfiles(filename,false)[1] or ""
end
-
--- main user functions
-
function resolvers.automount()
- -- implemented later
end
-
function resolvers.load(option)
- statistics.starttiming(instance)
- identify_configuration_files()
- load_configuration_files()
- if option ~= "nofiles" then
- load_databases()
- resolvers.automount()
- end
- statistics.stoptiming(instance)
- local files = instance.files
- return files and next(files) and true
+ statistics.starttiming(instance)
+ identify_configuration_files()
+ load_configuration_files()
+ if option~="nofiles" then
+ load_databases()
+ resolvers.automount()
+ end
+ statistics.stoptiming(instance)
+ local files=instance.files
+ return files and next(files) and true
end
-
function resolvers.loadtime()
- return statistics.elapsedtime(instance)
+ return statistics.elapsedtime(instance)
end
-
local function report(str)
+ if trace_locating then
+ report_resolving(str)
+ else
+ print(str)
+ end
+end
+function resolvers.dowithfilesandreport(command,files,...)
+ if files and #files>0 then
if trace_locating then
- report_resolving(str) -- has already verbose
- else
- print(str)
+ report('')
end
-end
-
-function resolvers.dowithfilesandreport(command, files, ...) -- will move
- if files and #files > 0 then
- if trace_locating then
- report('') -- ?
- end
- if type(files) == "string" then
- files = { files }
- end
- for f=1,#files do
- local file = files[f]
- local result = command(file,...)
- if type(result) == 'string' then
- report(result)
- else
- for i=1,#result do
- report(result[i]) -- could be unpack
- end
- end
+ if type(files)=="string" then
+ files={ files }
+ end
+ for f=1,#files do
+ local file=files[f]
+ local result=command(file,...)
+ if type(result)=='string' then
+ report(result)
+ else
+ for i=1,#result do
+ report(result[i])
end
+ end
end
+ end
end
-
--- obsolete
-
--- resolvers.varvalue = resolvers.variable -- output the value of variable $STRING.
--- resolvers.expandvar = resolvers.expansion -- output variable expansion of STRING.
-
-function resolvers.showpath(str) -- output search path for file type NAME
- return joinpath(resolvers.expandedpathlist(resolvers.formatofvariable(str)))
+function resolvers.showpath(str)
+ return joinpath(resolvers.expandedpathlist(resolvers.formatofvariable(str)))
end
-
-function resolvers.registerfile(files, name, path)
- if files[name] then
- if type(files[name]) == 'string' then
- files[name] = { files[name], path }
- else
- files[name] = path
- end
+function resolvers.registerfile(files,name,path)
+ if files[name] then
+ if type(files[name])=='string' then
+ files[name]={ files[name],path }
else
- files[name] = path
+ files[name]=path
end
+ else
+ files[name]=path
+ end
end
-
function resolvers.dowithpath(name,func)
- local pathlist = resolvers.expandedpathlist(name)
- for i=1,#pathlist do
- func("^"..resolvers.cleanpath(pathlist[i]))
- end
+ local pathlist=resolvers.expandedpathlist(name)
+ for i=1,#pathlist do
+ func("^"..resolvers.cleanpath(pathlist[i]))
+ end
end
-
function resolvers.dowithvariable(name,func)
- func(expandedvariable(name))
+ func(expandedvariable(name))
end
-
function resolvers.locateformat(name)
- local barename = file.removesuffix(name) -- gsub(name,"%.%a+$","")
- local fmtname = caches.getfirstreadablefile(barename..".fmt","formats") or ""
- if fmtname == "" then
- fmtname = resolvers.findfile(barename..".fmt")
- fmtname = resolvers.cleanpath(fmtname)
- end
- if fmtname ~= "" then
- local barename = file.removesuffix(fmtname)
- local luaname, lucname, luiname = barename .. ".lua", barename .. ".luc", barename .. ".lui"
- if lfs.isfile(luiname) then
- return barename, luiname
- elseif lfs.isfile(lucname) then
- return barename, lucname
- elseif lfs.isfile(luaname) then
- return barename, luaname
- end
- end
- return nil, nil
+ local engine=environment.ownmain or "luatex"
+ local barename=file.removesuffix(name)
+ local fullname=file.addsuffix(barename,"fmt")
+ local fmtname=caches.getfirstreadablefile(fullname,"formats",engine) or ""
+ if fmtname=="" then
+ fmtname=resolvers.findfile(fullname)
+ fmtname=resolvers.cleanpath(fmtname)
+ end
+ if fmtname~="" then
+ local barename=file.removesuffix(fmtname)
+ local luaname=file.addsuffix(barename,luasuffixes.lua)
+ local lucname=file.addsuffix(barename,luasuffixes.luc)
+ local luiname=file.addsuffix(barename,luasuffixes.lui)
+ if lfs.isfile(luiname) then
+ return barename,luiname
+ elseif lfs.isfile(lucname) then
+ return barename,lucname
+ elseif lfs.isfile(luaname) then
+ return barename,luaname
+ end
+ end
+ return nil,nil
end
-
function resolvers.booleanvariable(str,default)
- local b = resolvers.expansion(str)
- if b == "" then
- return default
- else
- b = toboolean(b)
- return (b == nil and default) or b
- end
-end
-
-function resolvers.dowithfilesintree(pattern,handle,before,after) -- will move, can be a nice iterator instead
- local instance = resolvers.instance
- local hashes = instance.hashes
- for i=1,#hashes do
- local hash = hashes[i]
- local blobtype = hash.type
- local blobpath = hash.name
- if blobpath then
- if before then
- before(blobtype,blobpath,pattern)
- end
- local files = instance.files[blobpath]
- local total, checked, done = 0, 0, 0
- if files then
- for k,v in next, files do
- total = total + 1
- if find(k,"^remap:") then
- k = files[k]
- v = k -- files[k] -- chained
- end
- if find(k,pattern) then
- if type(v) == "string" then
- checked = checked + 1
- if handle(blobtype,blobpath,v,k) then
- done = done + 1
- end
- else
- checked = checked + #v
- for i=1,#v do
- if handle(blobtype,blobpath,v[i],k) then
- done = done + 1
- end
- end
- end
- end
+ local b=resolvers.expansion(str)
+ if b=="" then
+ return default
+ else
+ b=toboolean(b)
+ return (b==nil and default) or b
+ end
+end
+function resolvers.dowithfilesintree(pattern,handle,before,after)
+ local instance=resolvers.instance
+ local hashes=instance.hashes
+ for i=1,#hashes do
+ local hash=hashes[i]
+ local blobtype=hash.type
+ local blobpath=hash.name
+ if blobpath then
+ if before then
+ before(blobtype,blobpath,pattern)
+ end
+ local files=instance.files[blobpath]
+ local total,checked,done=0,0,0
+ if files then
+ for k,v in table.sortedhash(files) do
+ total=total+1
+ if find(k,"^remap:") then
+ elseif find(k,pattern) then
+ if type(v)=="string" then
+ checked=checked+1
+ if handle(blobtype,blobpath,v,k) then
+ done=done+1
+ end
+ else
+ checked=checked+#v
+ for i=1,#v do
+ if handle(blobtype,blobpath,v[i],k) then
+ done=done+1
end
+ end
end
- if after then
- after(blobtype,blobpath,pattern,total,checked,done)
- end
+ end
end
+ end
+ if after then
+ after(blobtype,blobpath,pattern,total,checked,done)
+ end
end
+ end
end
-
-resolvers.obsolete = resolvers.obsolete or { }
-local obsolete = resolvers.obsolete
-
-resolvers.find_file = resolvers.findfile obsolete.find_file = resolvers.findfile
-resolvers.find_files = resolvers.findfiles obsolete.find_files = resolvers.findfiles
+resolvers.obsolete=resolvers.obsolete or {}
+local obsolete=resolvers.obsolete
+resolvers.find_file=resolvers.findfile obsolete.find_file=resolvers.findfile
+resolvers.find_files=resolvers.findfiles obsolete.find_files=resolvers.findfiles
end -- of closure
do -- create closure to overcome 200 locals limit
-if not modules then modules = { } end modules ['data-pre'] = {
- version = 1.001,
- comment = "companion to luat-lib.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
--- It could be interesting to hook the resolver in the file
--- opener so that unresolved prefixes travel around and we
--- get more abstraction.
-
--- As we use this beforehand we will move this up in the chain
--- of loading.
+package.loaded["data-pre"] = package.loaded["data-pre"] or true
+-- original size: 6430, stripped down to: 4219
-local resolvers = resolvers
-local prefixes = utilities.storage.allocate()
-resolvers.prefixes = prefixes
-
-local gsub = string.gsub
-local cleanpath, findgivenfile, expansion = resolvers.cleanpath, resolvers.findgivenfile, resolvers.expansion
-local getenv = resolvers.getenv -- we can probably also use resolvers.expansion
-local P, Cs, lpegmatch = lpeg.P, lpeg.Cs, lpeg.match
-
--- getenv = function(...) return resolvers.getenv(...) end -- needs checking (definitions changes later on)
-
-prefixes.environment = function(str)
- return cleanpath(expansion(str))
-end
-
-prefixes.relative = function(str,n) -- lfs.isfile
- if io.exists(str) then
- -- nothing
- elseif io.exists("./" .. str) then
- str = "./" .. str
- else
- local p = "../"
- for i=1,n or 2 do
- if io.exists(p .. str) then
- str = p .. str
- break
- else
- p = p .. "../"
- end
- end
- end
- return cleanpath(str)
-end
-
-prefixes.auto = function(str)
- local fullname = prefixes.relative(str)
- if not lfs.isfile(fullname) then
- fullname = prefixes.locate(str)
- end
- return fullname
-end
-
-prefixes.locate = function(str)
- local fullname = findgivenfile(str) or ""
- return cleanpath((fullname ~= "" and fullname) or str)
-end
-
-prefixes.filename = function(str)
- local fullname = findgivenfile(str) or ""
- return cleanpath(file.basename((fullname ~= "" and fullname) or str)) -- no cleanpath needed here
-end
-
-prefixes.pathname = function(str)
- local fullname = findgivenfile(str) or ""
- return cleanpath(file.dirname((fullname ~= "" and fullname) or str))
-end
-
-prefixes.selfautoloc = function(str)
- return cleanpath(file.join(getenv('SELFAUTOLOC'),str))
-end
-
-prefixes.selfautoparent = function(str)
- return cleanpath(file.join(getenv('SELFAUTOPARENT'),str))
-end
-
-prefixes.selfautodir = function(str)
- return cleanpath(file.join(getenv('SELFAUTODIR'),str))
-end
-
-prefixes.home = function(str)
- return cleanpath(file.join(getenv('HOME'),str))
-end
-
-prefixes.env = prefixes.environment
-prefixes.rel = prefixes.relative
-prefixes.loc = prefixes.locate
-prefixes.kpse = prefixes.locate
-prefixes.full = prefixes.locate
-prefixes.file = prefixes.filename
-prefixes.path = prefixes.pathname
-
+if not modules then modules={} end modules ['data-pre']={
+ version=1.001,
+ comment="companion to luat-lib.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+local resolvers=resolvers
+local prefixes=utilities.storage.allocate()
+resolvers.prefixes=prefixes
+local cleanpath,findgivenfile,expansion=resolvers.cleanpath,resolvers.findgivenfile,resolvers.expansion
+local getenv=resolvers.getenv
+local P,S,R,C,Cs,lpegmatch=lpeg.P,lpeg.S,lpeg.R,lpeg.C,lpeg.Cs,lpeg.match
+local joinpath,basename,dirname=file.join,file.basename,file.dirname
+local getmetatable,rawset,type=getmetatable,rawset,type
+prefixes.environment=function(str)
+ return cleanpath(expansion(str))
+end
+prefixes.relative=function(str,n)
+ if io.exists(str) then
+ elseif io.exists("./"..str) then
+ str="./"..str
+ else
+ local p="../"
+ for i=1,n or 2 do
+ if io.exists(p..str) then
+ str=p..str
+ break
+ else
+ p=p.."../"
+ end
+ end
+ end
+ return cleanpath(str)
+end
+prefixes.auto=function(str)
+ local fullname=prefixes.relative(str)
+ if not lfs.isfile(fullname) then
+ fullname=prefixes.locate(str)
+ end
+ return fullname
+end
+prefixes.locate=function(str)
+ local fullname=findgivenfile(str) or ""
+ return cleanpath((fullname~="" and fullname) or str)
+end
+prefixes.filename=function(str)
+ local fullname=findgivenfile(str) or ""
+ return cleanpath(basename((fullname~="" and fullname) or str))
+end
+prefixes.pathname=function(str)
+ local fullname=findgivenfile(str) or ""
+ return cleanpath(dirname((fullname~="" and fullname) or str))
+end
+prefixes.selfautoloc=function(str)
+ return cleanpath(joinpath(getenv('SELFAUTOLOC'),str))
+end
+prefixes.selfautoparent=function(str)
+ return cleanpath(joinpath(getenv('SELFAUTOPARENT'),str))
+end
+prefixes.selfautodir=function(str)
+ return cleanpath(joinpath(getenv('SELFAUTODIR'),str))
+end
+prefixes.home=function(str)
+ return cleanpath(joinpath(getenv('HOME'),str))
+end
+local function toppath()
+ local inputstack=resolvers.inputstack
+ if not inputstack then
+ return "."
+ end
+ local pathname=dirname(inputstack[#inputstack] or "")
+ if pathname=="" then
+ return "."
+ else
+ return pathname
+ end
+end
+resolvers.toppath=toppath
+prefixes.toppath=function(str)
+ return cleanpath(joinpath(toppath(),str))
+end
+prefixes.env=prefixes.environment
+prefixes.rel=prefixes.relative
+prefixes.loc=prefixes.locate
+prefixes.kpse=prefixes.locate
+prefixes.full=prefixes.locate
+prefixes.file=prefixes.filename
+prefixes.path=prefixes.pathname
function resolvers.allprefixes(separator)
- local all = table.sortedkeys(prefixes)
- if separator then
- for i=1,#all do
- all[i] = all[i] .. ":"
- end
+ local all=table.sortedkeys(prefixes)
+ if separator then
+ for i=1,#all do
+ all[i]=all[i]..":"
end
- return all
+ end
+ return all
end
-
local function _resolve_(method,target)
- local action = prefixes[method]
- if action then
- return action(target)
- else
- return method .. ":" .. target
- end
-end
-
-local resolved, abstract = { }, { }
-
+ local action=prefixes[method]
+ if action then
+ return action(target)
+ else
+ return method..":"..target
+ end
+end
+local resolved,abstract={},{}
function resolvers.resetresolve(str)
- resolved, abstract = { }, { }
+ resolved,abstract={},{}
end
-
-local function resolve(str) -- use schemes, this one is then for the commandline only
- if type(str) == "table" then
- local t = { }
- for i=1,#str do
- t[i] = resolve(str[i])
- end
- return t
- else
- local res = resolved[str]
- if not res then
- res = gsub(str,"([a-z][a-z]+):([^ \"\';,]*)",_resolve_) -- home:xx;selfautoparent:xx; etc (comma added)
- resolved[str] = res
- abstract[res] = str
- end
- return res
+local pattern=Cs((C(R("az")^2)*P(":")*C((1-S(" \"\';,"))^1)/_resolve_+P(1))^0)
+local function resolve(str)
+ if type(str)=="table" then
+ local t={}
+ for i=1,#str do
+ t[i]=resolve(str[i])
end
+ return t
+ else
+ local res=resolved[str]
+ if not res then
+ res=lpegmatch(pattern,str)
+ resolved[str]=res
+ abstract[res]=str
+ end
+ return res
+ end
end
-
local function unresolve(str)
- return abstract[str] or str
+ return abstract[str] or str
end
-
-resolvers.resolve = resolve
-resolvers.unresolve = unresolve
-
-if os.uname then
-
- for k, v in next, os.uname() do
- if not prefixes[k] then
- prefixes[k] = function() return v end
- end
+resolvers.resolve=resolve
+resolvers.unresolve=unresolve
+if type(os.uname)=="function" then
+ for k,v in next,os.uname() do
+ if not prefixes[k] then
+ prefixes[k]=function() return v end
end
-
+ end
end
-
-if os.type == "unix" then
-
- local pattern
-
- local function makepattern(t,k,v)
- local colon = P(":")
- local p
- for k, v in table.sortedpairs(prefixes) do
- if p then
- p = P(k) + p
- else
- p = P(k)
- end
- end
- pattern = Cs((p * colon + colon/";" + P(1))^0)
- if t then
- t[k] = v
- end
- end
-
- makepattern()
-
- getmetatable(prefixes).__newindex = makepattern
-
- function resolvers.repath(str)
- return lpegmatch(pattern,str)
- end
-
-else -- already the default:
-
- function resolvers.repath(str)
- return str
- end
-
+if os.type=="unix" then
+ local pattern
+ local function makepattern(t,k,v)
+ if t then
+ rawset(t,k,v)
+ end
+ local colon=P(":")
+ for k,v in table.sortedpairs(prefixes) do
+ if p then
+ p=P(k)+p
+ else
+ p=P(k)
+ end
+ end
+ pattern=Cs((p*colon+colon/";"+P(1))^0)
+ end
+ makepattern()
+ getmetatable(prefixes).__newindex=makepattern
+ function resolvers.repath(str)
+ return lpegmatch(pattern,str)
+ end
+else
+ function resolvers.repath(str)
+ return str
+ end
end
@@ -14237,172 +14214,159 @@ end -- of closure
do -- create closure to overcome 200 locals limit
-if not modules then modules = { } end modules ['data-inp'] = {
- version = 1.001,
- comment = "companion to luat-lib.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
-local allocate = utilities.storage.allocate
-local resolvers = resolvers
-
-local methodhandler = resolvers.methodhandler
-local registermethod = resolvers.registermethod
-
-local finders = allocate { helpers = { }, notfound = function() end }
-local openers = allocate { helpers = { }, notfound = function() end }
-local loaders = allocate { helpers = { }, notfound = function() return false, nil, 0 end }
+package.loaded["data-inp"] = package.loaded["data-inp"] or true
-registermethod("finders", finders, "uri")
-registermethod("openers", openers, "uri")
-registermethod("loaders", loaders, "uri")
+-- original size: 910, stripped down to: 823
-resolvers.finders = finders
-resolvers.openers = openers
-resolvers.loaders = loaders
+if not modules then modules={} end modules ['data-inp']={
+ version=1.001,
+ comment="companion to luat-lib.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+local allocate=utilities.storage.allocate
+local resolvers=resolvers
+local methodhandler=resolvers.methodhandler
+local registermethod=resolvers.registermethod
+local finders=allocate { helpers={},notfound=function() end }
+local openers=allocate { helpers={},notfound=function() end }
+local loaders=allocate { helpers={},notfound=function() return false,nil,0 end }
+registermethod("finders",finders,"uri")
+registermethod("openers",openers,"uri")
+registermethod("loaders",loaders,"uri")
+resolvers.finders=finders
+resolvers.openers=openers
+resolvers.loaders=loaders
end -- of closure
do -- create closure to overcome 200 locals limit
-if not modules then modules = { } end modules ['data-out'] = {
- version = 1.001,
- comment = "companion to luat-lib.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
-local allocate = utilities.storage.allocate
-local resolvers = resolvers
-
-local registermethod = resolvers.registermethod
+package.loaded["data-out"] = package.loaded["data-out"] or true
-local savers = allocate { helpers = { } }
+-- original size: 530, stripped down to: 475
-resolvers.savers = savers
-
-registermethod("savers", savers, "uri")
+if not modules then modules={} end modules ['data-out']={
+ version=1.001,
+ comment="companion to luat-lib.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+local allocate=utilities.storage.allocate
+local resolvers=resolvers
+local registermethod=resolvers.registermethod
+local savers=allocate { helpers={} }
+resolvers.savers=savers
+registermethod("savers",savers,"uri")
end -- of closure
do -- create closure to overcome 200 locals limit
-if not modules then modules = { } end modules ['data-fil'] = {
- version = 1.001,
- comment = "companion to luat-lib.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
+package.loaded["data-fil"] = package.loaded["data-fil"] or true
-local trace_locating = false trackers.register("resolvers.locating", function(v) trace_locating = v end)
-
-local report_files = logs.reporter("resolvers","files")
-
-local resolvers = resolvers
-
-local finders, openers, loaders, savers = resolvers.finders, resolvers.openers, resolvers.loaders, resolvers.savers
-local locators, hashers, generators, concatinators = resolvers.locators, resolvers.hashers, resolvers.generators, resolvers.concatinators
-
-local checkgarbage = utilities.garbagecollector and utilities.garbagecollector.check
+-- original size: 3801, stripped down to: 3231
+if not modules then modules={} end modules ['data-fil']={
+ version=1.001,
+ comment="companion to luat-lib.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+local trace_locating=false trackers.register("resolvers.locating",function(v) trace_locating=v end)
+local report_files=logs.reporter("resolvers","files")
+local resolvers=resolvers
+local finders,openers,loaders,savers=resolvers.finders,resolvers.openers,resolvers.loaders,resolvers.savers
+local locators,hashers,generators,concatinators=resolvers.locators,resolvers.hashers,resolvers.generators,resolvers.concatinators
+local checkgarbage=utilities.garbagecollector and utilities.garbagecollector.check
function locators.file(specification)
- local name = specification.filename
- local realname = resolvers.resolve(name) -- no shortcut
- if realname and realname ~= '' and lfs.isdir(realname) then
- if trace_locating then
- report_files("file locator '%s' found as '%s'",name,realname)
- end
- resolvers.appendhash('file',name,true) -- cache
- elseif trace_locating then
- report_files("file locator '%s' not found",name)
+ local name=specification.filename
+ local realname=resolvers.resolve(name)
+ if realname and realname~='' and lfs.isdir(realname) then
+ if trace_locating then
+ report_files("file locator %a found as %a",name,realname)
end
+ resolvers.appendhash('file',name,true)
+ elseif trace_locating then
+ report_files("file locator %a not found",name)
+ end
end
-
function hashers.file(specification)
- local name = specification.filename
- local content = caches.loadcontent(name,'files')
- resolvers.registerfilehash(name,content,content==nil)
+ local name=specification.filename
+ local content=caches.loadcontent(name,'files')
+ resolvers.registerfilehash(name,content,content==nil)
end
-
function generators.file(specification)
- local path = specification.filename
- local content = resolvers.scanfiles(path,false,true) -- scan once
- resolvers.registerfilehash(path,content,true)
+ local path=specification.filename
+ local content=resolvers.scanfiles(path,false,true)
+ resolvers.registerfilehash(path,content,true)
end
-
-concatinators.file = file.join
-
+concatinators.file=file.join
function finders.file(specification,filetype)
- local filename = specification.filename
- local foundname = resolvers.findfile(filename,filetype)
- if foundname and foundname ~= "" then
- if trace_locating then
- report_files("file finder: '%s' found",filename)
- end
- return foundname
- else
- if trace_locating then
- report_files("file finder: %s' not found",filename)
- end
- return finders.notfound()
+ local filename=specification.filename
+ local foundname=resolvers.findfile(filename,filetype)
+ if foundname and foundname~="" then
+ if trace_locating then
+ report_files("file finder: %a found",filename)
+ end
+ return foundname
+ else
+ if trace_locating then
+ report_files("file finder: %a not found",filename)
end
+ return finders.notfound()
+ end
end
-
--- The default textopener will be overloaded later on.
-
function openers.helpers.textopener(tag,filename,f)
- return {
- reader = function() return f:read () end,
- close = function() logs.show_close(filename) return f:close() end,
- }
+ return {
+ reader=function() return f:read () end,
+ close=function() logs.show_close(filename) return f:close() end,
+ }
end
-
function openers.file(specification,filetype)
- local filename = specification.filename
- if filename and filename ~= "" then
- local f = io.open(filename,"r")
- if f then
- if trace_locating then
- report_files("file opener, '%s' opened",filename)
- end
- return openers.helpers.textopener("file",filename,f)
- end
- end
- if trace_locating then
- report_files("file opener, '%s' not found",filename)
+ local filename=specification.filename
+ if filename and filename~="" then
+ local f=io.open(filename,"r")
+ if f then
+ if trace_locating then
+ report_files("file opener: %a opened",filename)
+ end
+ return openers.helpers.textopener("file",filename,f)
end
- return openers.notfound()
+ end
+ if trace_locating then
+ report_files("file opener: %a not found",filename)
+ end
+ return openers.notfound()
end
-
function loaders.file(specification,filetype)
- local filename = specification.filename
- if filename and filename ~= "" then
- local f = io.open(filename,"rb")
- if f then
- logs.show_load(filename)
- if trace_locating then
- report_files("file loader, '%s' loaded",filename)
- end
- local s = f:read("*a")
- if checkgarbage then
- checkgarbage(#s)
- end
- f:close()
- if s then
- return true, s, #s
- end
- end
- end
- if trace_locating then
- report_files("file loader, '%s' not found",filename)
- end
- return loaders.notfound()
+ local filename=specification.filename
+ if filename and filename~="" then
+ local f=io.open(filename,"rb")
+ if f then
+ logs.show_load(filename)
+ if trace_locating then
+ report_files("file loader: %a loaded",filename)
+ end
+ local s=f:read("*a")
+ if checkgarbage then
+ checkgarbage(#s)
+ end
+ f:close()
+ if s then
+ return true,s,#s
+ end
+ end
+ end
+ if trace_locating then
+ report_files("file loader: %a not found",filename)
+ end
+ return loaders.notfound()
end
@@ -14410,140 +14374,118 @@ end -- of closure
do -- create closure to overcome 200 locals limit
-if not modules then modules = { } end modules ['data-con'] = {
- version = 1.100,
- comment = "companion to luat-lib.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
-local format, lower, gsub = string.format, string.lower, string.gsub
-
-local trace_cache = false trackers.register("resolvers.cache", function(v) trace_cache = v end)
-local trace_containers = false trackers.register("resolvers.containers", function(v) trace_containers = v end)
-local trace_storage = false trackers.register("resolvers.storage", function(v) trace_storage = v end)
-
---[[ldx--
-<p>Once we found ourselves defining similar cache constructs
-several times, containers were introduced. Containers are used
-to collect tables in memory and reuse them when possible based
-on (unique) hashes (to be provided by the calling function).</p>
-
-<p>Caching to disk is disabled by default. Version numbers are
-stored in the saved table which makes it possible to change the
-table structures without bothering about the disk cache.</p>
-
-<p>Examples of usage can be found in the font related code.</p>
---ldx]]--
+package.loaded["data-con"] = package.loaded["data-con"] or true
-containers = containers or { }
-local containers = containers
-containers.usecache = true
+-- original size: 4940, stripped down to: 3580
-local report_containers = logs.reporter("resolvers","containers")
-
-local function report(container,tag,name)
- if trace_cache or trace_containers then
- report_containers("container: %s, tag: %s, name: %s",container.subcategory,tag,name or 'invalid')
- end
-end
-
-local allocated = { }
-
-local mt = {
- __index = function(t,k)
- if k == "writable" then
- local writable = caches.getwritablepath(t.category,t.subcategory) or { "." }
- t.writable = writable
- return writable
- elseif k == "readables" then
- local readables = caches.getreadablepaths(t.category,t.subcategory) or { "." }
- t.readables = readables
- return readables
- end
- end,
- __storage__ = true
+if not modules then modules={} end modules ['data-con']={
+ version=1.100,
+ comment="companion to luat-lib.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
}
-
-function containers.define(category, subcategory, version, enabled)
- if category and subcategory then
- local c = allocated[category]
- if not c then
- c = { }
- allocated[category] = c
- end
- local s = c[subcategory]
- if not s then
- s = {
- category = category,
- subcategory = subcategory,
- storage = { },
- enabled = enabled,
- version = version or math.pi, -- after all, this is TeX
- trace = false,
- -- writable = caches.getwritablepath and caches.getwritablepath (category,subcategory) or { "." },
- -- readables = caches.getreadablepaths and caches.getreadablepaths(category,subcategory) or { "." },
- }
- setmetatable(s,mt)
- c[subcategory] = s
- end
- return s
+local format,lower,gsub=string.format,string.lower,string.gsub
+local trace_cache=false trackers.register("resolvers.cache",function(v) trace_cache=v end)
+local trace_containers=false trackers.register("resolvers.containers",function(v) trace_containers=v end)
+local trace_storage=false trackers.register("resolvers.storage",function(v) trace_storage=v end)
+containers=containers or {}
+local containers=containers
+containers.usecache=true
+local report_containers=logs.reporter("resolvers","containers")
+local allocated={}
+local mt={
+ __index=function(t,k)
+ if k=="writable" then
+ local writable=caches.getwritablepath(t.category,t.subcategory) or { "." }
+ t.writable=writable
+ return writable
+ elseif k=="readables" then
+ local readables=caches.getreadablepaths(t.category,t.subcategory) or { "." }
+ t.readables=readables
+ return readables
+ end
+ end,
+ __storage__=true
+}
+function containers.define(category,subcategory,version,enabled)
+ if category and subcategory then
+ local c=allocated[category]
+ if not c then
+ c={}
+ allocated[category]=c
+ end
+ local s=c[subcategory]
+ if not s then
+ s={
+ category=category,
+ subcategory=subcategory,
+ storage={},
+ enabled=enabled,
+ version=version or math.pi,
+ trace=false,
+ }
+ setmetatable(s,mt)
+ c[subcategory]=s
end
+ return s
+ end
end
-
-function containers.is_usable(container, name)
- return container.enabled and caches and caches.is_writable(container.writable, name)
+function containers.is_usable(container,name)
+ return container.enabled and caches and caches.is_writable(container.writable,name)
end
-
-function containers.is_valid(container, name)
- if name and name ~= "" then
- local storage = container.storage[name]
- return storage and storage.cache_version == container.version
- else
- return false
- end
+function containers.is_valid(container,name)
+ if name and name~="" then
+ local storage=container.storage[name]
+ return storage and storage.cache_version==container.version
+ else
+ return false
+ end
end
-
function containers.read(container,name)
- local storage = container.storage
- local stored = storage[name]
- if not stored and container.enabled and caches and containers.usecache then
- stored = caches.loaddata(container.readables,name)
- if stored and stored.cache_version == container.version then
- report(container,"loaded",name)
- else
- stored = nil
- end
- storage[name] = stored
- elseif stored then
- report(container,"reusing",name)
+ local storage=container.storage
+ local stored=storage[name]
+ if not stored and container.enabled and caches and containers.usecache then
+ stored=caches.loaddata(container.readables,name)
+ if stored and stored.cache_version==container.version then
+ if trace_cache or trace_containers then
+ report_containers("action %a, category %a, name %a","load",container.subcategory,name)
+ end
+ else
+ stored=nil
end
- return stored
-end
-
-function containers.write(container, name, data)
- if data then
- data.cache_version = container.version
- if container.enabled and caches then
- local unique, shared = data.unique, data.shared
- data.unique, data.shared = nil, nil
- caches.savedata(container.writable, name, data)
- report(container,"saved",name)
- data.unique, data.shared = unique, shared
- end
- report(container,"stored",name)
- container.storage[name] = data
+ storage[name]=stored
+ elseif stored then
+ if trace_cache or trace_containers then
+ report_containers("action %a, category %a, name %a","reuse",container.subcategory,name)
+ end
+ end
+ return stored
+end
+function containers.write(container,name,data)
+ if data then
+ data.cache_version=container.version
+ if container.enabled and caches then
+ local unique,shared=data.unique,data.shared
+ data.unique,data.shared=nil,nil
+ caches.savedata(container.writable,name,data)
+ if trace_cache or trace_containers then
+ report_containers("action %a, category %a, name %a","save",container.subcategory,name)
+ end
+ data.unique,data.shared=unique,shared
end
- return data
+ if trace_cache or trace_containers then
+ report_containers("action %a, category %a, name %a","store",container.subcategory,name)
+ end
+ container.storage[name]=data
+ end
+ return data
end
-
function containers.content(container,name)
- return container.storage[name]
+ return container.storage[name]
end
-
function containers.cleanname(name)
- return (gsub(lower(name),"[^%w%d]+","-"))
+ return (gsub(lower(name),"[^%w%d]+","-"))
end
@@ -14551,102 +14493,90 @@ end -- of closure
do -- create closure to overcome 200 locals limit
-if not modules then modules = { } end modules ['data-use'] = {
- version = 1.001,
- comment = "companion to luat-lib.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
-local format, lower, gsub, find = string.format, string.lower, string.gsub, string.find
-
-local trace_locating = false trackers.register("resolvers.locating", function(v) trace_locating = v end)
-
-local report_mounts = logs.reporter("resolvers","mounts")
-
-local resolvers = resolvers
+package.loaded["data-use"] = package.loaded["data-use"] or true
--- we will make a better format, maybe something xml or just text or lua
-
-resolvers.automounted = resolvers.automounted or { }
+-- original size: 3913, stripped down to: 2998
+if not modules then modules={} end modules ['data-use']={
+ version=1.001,
+ comment="companion to luat-lib.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+local format,lower,gsub,find=string.format,string.lower,string.gsub,string.find
+local trace_locating=false trackers.register("resolvers.locating",function(v) trace_locating=v end)
+local report_mounts=logs.reporter("resolvers","mounts")
+local resolvers=resolvers
+resolvers.automounted=resolvers.automounted or {}
function resolvers.automount(usecache)
- local mountpaths = resolvers.cleanpathlist(resolvers.expansion('TEXMFMOUNT'))
- if (not mountpaths or #mountpaths == 0) and usecache then
- mountpaths = caches.getreadablepaths("mount")
- end
- if mountpaths and #mountpaths > 0 then
- statistics.starttiming(resolvers.instance)
- for k=1,#mountpaths do
- local root = mountpaths[k]
- local f = io.open(root.."/url.tmi")
- if f then
- for line in f:lines() do
- if line then
- if find(line,"^[%%#%-]") then -- or %W
- -- skip
- elseif find(line,"^zip://") then
- if trace_locating then
- report_mounts("mounting %s",line)
- end
- table.insert(resolvers.automounted,line)
- resolvers.usezipfile(line)
- end
- end
- end
- f:close()
- end
+ local mountpaths=resolvers.cleanpathlist(resolvers.expansion('TEXMFMOUNT'))
+ if (not mountpaths or #mountpaths==0) and usecache then
+ mountpaths=caches.getreadablepaths("mount")
+ end
+ if mountpaths and #mountpaths>0 then
+ statistics.starttiming(resolvers.instance)
+ for k=1,#mountpaths do
+ local root=mountpaths[k]
+ local f=io.open(root.."/url.tmi")
+ if f then
+ for line in f:lines() do
+ if line then
+ if find(line,"^[%%#%-]") then
+ elseif find(line,"^zip://") then
+ if trace_locating then
+ report_mounts("mounting %a",line)
+ end
+ table.insert(resolvers.automounted,line)
+ resolvers.usezipfile(line)
+ end
+ end
end
- statistics.stoptiming(resolvers.instance)
- end
-end
-
--- status info
-
-statistics.register("used config file", function() return caches.configfiles() end)
-statistics.register("used cache path", function() return caches.usedpaths() end)
-
--- experiment (code will move)
-
-function statistics.savefmtstatus(texname,formatbanner,sourcefile) -- texname == formatname
- local enginebanner = status.list().banner
- if formatbanner and enginebanner and sourcefile then
- local luvname = file.replacesuffix(texname,"luv")
- local luvdata = {
- enginebanner = enginebanner,
- formatbanner = formatbanner,
- sourcehash = md5.hex(io.loaddata(resolvers.findfile(sourcefile)) or "unknown"),
- sourcefile = sourcefile,
- }
- io.savedata(luvname,table.serialize(luvdata,true))
- end
+ f:close()
+ end
+ end
+ statistics.stoptiming(resolvers.instance)
+ end
+end
+statistics.register("used config file",function() return caches.configfiles() end)
+statistics.register("used cache path",function() return caches.usedpaths() end)
+function statistics.savefmtstatus(texname,formatbanner,sourcefile)
+ local enginebanner=status.list().banner
+ if formatbanner and enginebanner and sourcefile then
+ local luvname=file.replacesuffix(texname,"luv")
+ local luvdata={
+ enginebanner=enginebanner,
+ formatbanner=formatbanner,
+ sourcehash=md5.hex(io.loaddata(resolvers.findfile(sourcefile)) or "unknown"),
+ sourcefile=sourcefile,
+ }
+ io.savedata(luvname,table.serialize(luvdata,true))
+ end
end
-
function statistics.checkfmtstatus(texname)
- local enginebanner = status.list().banner
- if enginebanner and texname then
- local luvname = file.replacesuffix(texname,"luv")
- if lfs.isfile(luvname) then
- local luv = dofile(luvname)
- if luv and luv.sourcefile then
- local sourcehash = md5.hex(io.loaddata(resolvers.findfile(luv.sourcefile)) or "unknown")
- local luvbanner = luv.enginebanner or "?"
- if luvbanner ~= enginebanner then
- return format("engine mismatch (luv: %s <> bin: %s)",luvbanner,enginebanner)
- end
- local luvhash = luv.sourcehash or "?"
- if luvhash ~= sourcehash then
- return format("source mismatch (luv: %s <> bin: %s)",luvhash,sourcehash)
- end
- else
- return "invalid status file"
- end
- else
- return "missing status file"
- end
+ local enginebanner=status.list().banner
+ if enginebanner and texname then
+ local luvname=file.replacesuffix(texname,"luv")
+ if lfs.isfile(luvname) then
+ local luv=dofile(luvname)
+ if luv and luv.sourcefile then
+ local sourcehash=md5.hex(io.loaddata(resolvers.findfile(luv.sourcefile)) or "unknown")
+ local luvbanner=luv.enginebanner or "?"
+ if luvbanner~=enginebanner then
+ return format("engine mismatch (luv: %s <> bin: %s)",luvbanner,enginebanner)
+ end
+ local luvhash=luv.sourcehash or "?"
+ if luvhash~=sourcehash then
+ return format("source mismatch (luv: %s <> bin: %s)",luvhash,sourcehash)
+ end
+ else
+ return "invalid status file"
+ end
+ else
+ return "missing status file"
end
- return true
+ end
+ return true
end
@@ -14654,263 +14584,235 @@ end -- of closure
do -- create closure to overcome 200 locals limit
-if not modules then modules = { } end modules ['data-zip'] = {
- version = 1.001,
- comment = "companion to luat-lib.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
--- partly redone .. needs testing
+package.loaded["data-zip"] = package.loaded["data-zip"] or true
-local format, find, match = string.format, string.find, string.match
+-- original size: 8489, stripped down to: 6757
-local trace_locating = false trackers.register("resolvers.locating", function(v) trace_locating = v end)
-
-local report_zip = logs.reporter("resolvers","zip")
-
--- zip:///oeps.zip?name=bla/bla.tex
--- zip:///oeps.zip?tree=tex/texmf-local
--- zip:///texmf.zip?tree=/tex/texmf
--- zip:///texmf.zip?tree=/tex/texmf-local
--- zip:///texmf-mine.zip?tree=/tex/texmf-projects
-
-local resolvers = resolvers
-
-zip = zip or { }
-local zip = zip
-
-zip.archives = zip.archives or { }
-local archives = zip.archives
-
-zip.registeredfiles = zip.registeredfiles or { }
-local registeredfiles = zip.registeredfiles
-
-local limited = false
-
-directives.register("system.inputmode", function(v)
- if not limited then
- local i_limiter = io.i_limiter(v)
- if i_limiter then
- zip.open = i_limiter.protect(zip.open)
- limited = true
- end
- end
+if not modules then modules={} end modules ['data-zip']={
+ version=1.001,
+ comment="companion to luat-lib.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+local format,find,match=string.format,string.find,string.match
+local trace_locating=false trackers.register("resolvers.locating",function(v) trace_locating=v end)
+local report_zip=logs.reporter("resolvers","zip")
+local resolvers=resolvers
+zip=zip or {}
+local zip=zip
+zip.archives=zip.archives or {}
+local archives=zip.archives
+zip.registeredfiles=zip.registeredfiles or {}
+local registeredfiles=zip.registeredfiles
+local limited=false
+directives.register("system.inputmode",function(v)
+ if not limited then
+ local i_limiter=io.i_limiter(v)
+ if i_limiter then
+ zip.open=i_limiter.protect(zip.open)
+ limited=true
+ end
+ end
end)
-
-local function validzip(str) -- todo: use url splitter
- if not find(str,"^zip://") then
- return "zip:///" .. str
- else
- return str
- end
+local function validzip(str)
+ if not find(str,"^zip://") then
+ return "zip:///"..str
+ else
+ return str
+ end
end
-
function zip.openarchive(name)
- if not name or name == "" then
- return nil
- else
- local arch = archives[name]
- if not arch then
- local full = resolvers.findfile(name) or ""
- arch = (full ~= "" and zip.open(full)) or false
- archives[name] = arch
- end
- return arch
+ if not name or name=="" then
+ return nil
+ else
+ local arch=archives[name]
+ if not arch then
+ local full=resolvers.findfile(name) or ""
+ arch=(full~="" and zip.open(full)) or false
+ archives[name]=arch
end
+ return arch
+ end
end
-
function zip.closearchive(name)
- if not name or (name == "" and archives[name]) then
- zip.close(archives[name])
- archives[name] = nil
- end
+ if not name or (name=="" and archives[name]) then
+ zip.close(archives[name])
+ archives[name]=nil
+ end
end
-
function resolvers.locators.zip(specification)
- local archive = specification.filename
- local zipfile = archive and archive ~= "" and zip.openarchive(archive) -- tricky, could be in to be initialized tree
- if trace_locating then
- if zipfile then
- report_zip("locator, archive '%s' found",archive)
- else
- report_zip("locator, archive '%s' not found",archive)
- end
+ local archive=specification.filename
+ local zipfile=archive and archive~="" and zip.openarchive(archive)
+ if trace_locating then
+ if zipfile then
+ report_zip("locator: archive %a found",archive)
+ else
+ report_zip("locator: archive %a not found",archive)
end
+ end
end
-
function resolvers.hashers.zip(specification)
- local archive = specification.filename
- if trace_locating then
- report_zip("loading file '%s'",archive)
- end
- resolvers.usezipfile(specification.original)
+ local archive=specification.filename
+ if trace_locating then
+ report_zip("loading file %a",archive)
+ end
+ resolvers.usezipfile(specification.original)
+end
+function resolvers.concatinators.zip(zipfile,path,name)
+ if not path or path=="" then
+ return format('%s?name=%s',zipfile,name)
+ else
+ return format('%s?name=%s/%s',zipfile,path,name)
+ end
end
-
-function resolvers.concatinators.zip(zipfile,path,name) -- ok ?
- if not path or path == "" then
- return format('%s?name=%s',zipfile,name)
- else
- return format('%s?name=%s/%s',zipfile,path,name)
- end
-end
-
function resolvers.finders.zip(specification)
- local original = specification.original
- local archive = specification.filename
- if archive then
- local query = url.query(specification.query)
- local queryname = query.name
- if queryname then
- local zfile = zip.openarchive(archive)
- if zfile then
- if trace_locating then
- report_zip("finder, archive '%s' found",archive)
- end
- local dfile = zfile:open(queryname)
- if dfile then
- dfile = zfile:close()
- if trace_locating then
- report_zip("finder, file '%s' found",queryname)
- end
- return specification.original
- elseif trace_locating then
- report_zip("finder, file '%s' not found",queryname)
- end
- elseif trace_locating then
- report_zip("finder, unknown archive '%s'",archive)
- end
+ local original=specification.original
+ local archive=specification.filename
+ if archive then
+ local query=url.query(specification.query)
+ local queryname=query.name
+ if queryname then
+ local zfile=zip.openarchive(archive)
+ if zfile then
+ if trace_locating then
+ report_zip("finder: archive %a found",archive)
+ end
+ local dfile=zfile:open(queryname)
+ if dfile then
+ dfile=zfile:close()
+ if trace_locating then
+ report_zip("finder: file %a found",queryname)
+ end
+ return specification.original
+ elseif trace_locating then
+ report_zip("finder: file %a not found",queryname)
end
+ elseif trace_locating then
+ report_zip("finder: unknown archive %a",archive)
+ end
end
- if trace_locating then
- report_zip("finder, '%s' not found",original)
- end
- return resolvers.finders.notfound()
+ end
+ if trace_locating then
+ report_zip("finder: %a not found",original)
+ end
+ return resolvers.finders.notfound()
end
-
function resolvers.openers.zip(specification)
- local original = specification.original
- local archive = specification.filename
- if archive then
- local query = url.query(specification.query)
- local queryname = query.name
- if queryname then
- local zfile = zip.openarchive(archive)
- if zfile then
- if trace_locating then
- report_zip("opener, archive '%s' opened",archive)
- end
- local dfile = zfile:open(queryname)
- if dfile then
- if trace_locating then
- report_zip("opener, file '%s' found",queryname)
- end
- return resolvers.openers.helpers.textopener('zip',original,dfile)
- elseif trace_locating then
- report_zip("opener, file '%s' not found",queryname)
- end
- elseif trace_locating then
- report_zip("opener, unknown archive '%s'",archive)
- end
+ local original=specification.original
+ local archive=specification.filename
+ if archive then
+ local query=url.query(specification.query)
+ local queryname=query.name
+ if queryname then
+ local zfile=zip.openarchive(archive)
+ if zfile then
+ if trace_locating then
+ report_zip("opener; archive %a opened",archive)
+ end
+ local dfile=zfile:open(queryname)
+ if dfile then
+ if trace_locating then
+ report_zip("opener: file %a found",queryname)
+ end
+ return resolvers.openers.helpers.textopener('zip',original,dfile)
+ elseif trace_locating then
+ report_zip("opener: file %a not found",queryname)
end
+ elseif trace_locating then
+ report_zip("opener: unknown archive %a",archive)
+ end
end
- if trace_locating then
- report_zip("opener, '%s' not found",original)
- end
- return resolvers.openers.notfound()
+ end
+ if trace_locating then
+ report_zip("opener: %a not found",original)
+ end
+ return resolvers.openers.notfound()
end
-
function resolvers.loaders.zip(specification)
- local original = specification.original
- local archive = specification.filename
- if archive then
- local query = url.query(specification.query)
- local queryname = query.name
- if queryname then
- local zfile = zip.openarchive(archive)
- if zfile then
- if trace_locating then
- report_zip("loader, archive '%s' opened",archive)
- end
- local dfile = zfile:open(queryname)
- if dfile then
- logs.show_load(original)
- if trace_locating then
- report_zip("loader, file '%s' loaded",original)
- end
- local s = dfile:read("*all")
- dfile:close()
- return true, s, #s
- elseif trace_locating then
- report_zip("loader, file '%s' not found",queryname)
- end
- elseif trace_locating then
- report_zip("loader, unknown archive '%s'",archive)
- end
+ local original=specification.original
+ local archive=specification.filename
+ if archive then
+ local query=url.query(specification.query)
+ local queryname=query.name
+ if queryname then
+ local zfile=zip.openarchive(archive)
+ if zfile then
+ if trace_locating then
+ report_zip("loader: archive %a opened",archive)
+ end
+ local dfile=zfile:open(queryname)
+ if dfile then
+ logs.show_load(original)
+ if trace_locating then
+ report_zip("loader; file %a loaded",original)
+ end
+ local s=dfile:read("*all")
+ dfile:close()
+ return true,s,#s
+ elseif trace_locating then
+ report_zip("loader: file %a not found",queryname)
end
+ elseif trace_locating then
+ report_zip("loader; unknown archive %a",archive)
+ end
end
- if trace_locating then
- report_zip("loader, '%s' not found",original)
- end
- return resolvers.openers.notfound()
+ end
+ if trace_locating then
+ report_zip("loader: %a not found",original)
+ end
+ return resolvers.openers.notfound()
end
-
--- zip:///somefile.zip
--- zip:///somefile.zip?tree=texmf-local -> mount
-
function resolvers.usezipfile(archive)
- local specification = resolvers.splitmethod(archive) -- to be sure
- local archive = specification.filename
- if archive and not registeredfiles[archive] then
- local z = zip.openarchive(archive)
- if z then
- local instance = resolvers.instance
- local tree = url.query(specification.query).tree or ""
- if trace_locating then
- report_zip("registering, registering archive '%s'",archive)
- end
- statistics.starttiming(instance)
- resolvers.prependhash('zip',archive)
- resolvers.extendtexmfvariable(archive) -- resets hashes too
- registeredfiles[archive] = z
- instance.files[archive] = resolvers.registerzipfile(z,tree)
- statistics.stoptiming(instance)
- elseif trace_locating then
- report_zip("registering, unknown archive '%s'",archive)
- end
+ local specification=resolvers.splitmethod(archive)
+ local archive=specification.filename
+ if archive and not registeredfiles[archive] then
+ local z=zip.openarchive(archive)
+ if z then
+ local instance=resolvers.instance
+ local tree=url.query(specification.query).tree or ""
+ if trace_locating then
+ report_zip("registering: archive %a",archive)
+ end
+ statistics.starttiming(instance)
+ resolvers.prependhash('zip',archive)
+ resolvers.extendtexmfvariable(archive)
+ registeredfiles[archive]=z
+ instance.files[archive]=resolvers.registerzipfile(z,tree)
+ statistics.stoptiming(instance)
elseif trace_locating then
- report_zip("registering, '%s' not found",archive)
+ report_zip("registering: unknown archive %a",archive)
end
+ elseif trace_locating then
+ report_zip("registering: archive %a not found",archive)
+ end
end
-
function resolvers.registerzipfile(z,tree)
- local files, filter = { }, ""
- if tree == "" then
- filter = "^(.+)/(.-)$"
+ local files,filter={},""
+ if tree=="" then
+ filter="^(.+)/(.-)$"
+ else
+ filter=format("^%s/(.+)/(.-)$",tree)
+ end
+ if trace_locating then
+ report_zip("registering: using filter %a",filter)
+ end
+ local register,n=resolvers.registerfile,0
+ for i in z:files() do
+ local path,name=match(i.filename,filter)
+ if path then
+ if name and name~='' then
+ register(files,name,path)
+ n=n+1
+ else
+ end
else
- filter = format("^%s/(.+)/(.-)$",tree)
+ register(files,i.filename,'')
+ n=n+1
end
- if trace_locating then
- report_zip("registering, using filter '%s'",filter)
- end
- local register, n = resolvers.registerfile, 0
- for i in z:files() do
- local path, name = match(i.filename,filter)
- if path then
- if name and name ~= '' then
- register(files, name, path)
- n = n + 1
- else
- -- directory
- end
- else
- register(files, i.filename, '')
- n = n + 1
- end
- end
- report_zip("registering, %s files registered",n)
- return files
+ end
+ report_zip("registering: %s files registered",n)
+ return files
end
@@ -14918,393 +14820,447 @@ end -- of closure
do -- create closure to overcome 200 locals limit
-if not modules then modules = { } end modules ['data-tre'] = {
- version = 1.001,
- comment = "companion to luat-lib.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
--- \input tree://oeps1/**/oeps.tex
-
-local find, gsub, format = string.find, string.gsub, string.format
+package.loaded["data-tre"] = package.loaded["data-tre"] or true
-local trace_locating = false trackers.register("resolvers.locating", function(v) trace_locating = v end)
-
-local report_trees = logs.reporter("resolvers","trees")
-
-local resolvers = resolvers
-
-local done, found, notfound = { }, { }, resolvers.finders.notfound
+-- original size: 2508, stripped down to: 2074
+if not modules then modules={} end modules ['data-tre']={
+ version=1.001,
+ comment="companion to luat-lib.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+local find,gsub,format=string.find,string.gsub,string.format
+local trace_locating=false trackers.register("resolvers.locating",function(v) trace_locating=v end)
+local report_trees=logs.reporter("resolvers","trees")
+local resolvers=resolvers
+local done,found,notfound={},{},resolvers.finders.notfound
function resolvers.finders.tree(specification)
- local spec = specification.filename
- local fnd = found[spec]
- if fnd == nil then
- if spec ~= "" then
- local path, name = file.dirname(spec), file.basename(spec)
- if path == "" then path = "." end
- local hash = done[path]
- if not hash then
- local pattern = path .. "/*" -- we will use the proper splitter
- hash = dir.glob(pattern)
- done[path] = hash
- end
- local pattern = "/" .. gsub(name,"([%.%-%+])", "%%%1") .. "$"
- for k=1,#hash do
- local v = hash[k]
- if find(v,pattern) then
- found[spec] = v
- return v
- end
- end
- end
- fnd = notfound() -- false
- found[spec] = fnd
- end
- return fnd
+ local spec=specification.filename
+ local fnd=found[spec]
+ if fnd==nil then
+ if spec~="" then
+ local path,name=file.dirname(spec),file.basename(spec)
+ if path=="" then path="." end
+ local hash=done[path]
+ if not hash then
+ local pattern=path.."/*"
+ hash=dir.glob(pattern)
+ done[path]=hash
+ end
+ local pattern="/"..gsub(name,"([%.%-%+])","%%%1").."$"
+ for k=1,#hash do
+ local v=hash[k]
+ if find(v,pattern) then
+ found[spec]=v
+ return v
+ end
+ end
+ end
+ fnd=notfound()
+ found[spec]=fnd
+ end
+ return fnd
end
-
function resolvers.locators.tree(specification)
- local name = specification.filename
- local realname = resolvers.resolve(name) -- no shortcut
- if realname and realname ~= '' and lfs.isdir(realname) then
- if trace_locating then
- report_trees("locator '%s' found",realname)
- end
- resolvers.appendhash('tree',name,false) -- don't cache
- elseif trace_locating then
- report_trees("locator '%s' not found",name)
+ local name=specification.filename
+ local realname=resolvers.resolve(name)
+ if realname and realname~='' and lfs.isdir(realname) then
+ if trace_locating then
+ report_trees("locator %a found",realname)
end
+ resolvers.appendhash('tree',name,false)
+ elseif trace_locating then
+ report_trees("locator %a not found",name)
+ end
end
-
function resolvers.hashers.tree(specification)
- local name = specification.filename
- if trace_locating then
- report_trees("analysing '%s'",name)
- end
- resolvers.methodhandler("hashers",name)
-
- resolvers.generators.file(specification)
+ local name=specification.filename
+ if trace_locating then
+ report_trees("analysing %a",name)
+ end
+ resolvers.methodhandler("hashers",name)
+ resolvers.generators.file(specification)
end
-
-resolvers.concatinators.tree = resolvers.concatinators.file
-resolvers.generators.tree = resolvers.generators.file
-resolvers.openers.tree = resolvers.openers.file
-resolvers.loaders.tree = resolvers.loaders.file
+resolvers.concatinators.tree=resolvers.concatinators.file
+resolvers.generators.tree=resolvers.generators.file
+resolvers.openers.tree=resolvers.openers.file
+resolvers.loaders.tree=resolvers.loaders.file
end -- of closure
do -- create closure to overcome 200 locals limit
-if not modules then modules = { } end modules ['data-crl'] = {
- version = 1.001,
- comment = "companion to luat-lib.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
--- this one is replaced by data-sch.lua --
-
-local gsub = string.gsub
-
-local resolvers = resolvers
+package.loaded["data-sch"] = package.loaded["data-sch"] or true
-local finders, openers, loaders = resolvers.finders, resolvers.openers, resolvers.loaders
-
-resolvers.curl = resolvers.curl or { }
-local curl = resolvers.curl
-
-local cached = { }
-
-local function runcurl(specification)
- local original = specification.original
- -- local scheme = specification.scheme
- local cleanname = gsub(original,"[^%a%d%.]+","-")
- local cachename = caches.setfirstwritablefile(cleanname,"curl")
- if not cached[original] then
- if not io.exists(cachename) then
- cached[original] = cachename
- local command = "curl --silent --create-dirs --output " .. cachename .. " " .. original
- os.spawn(command)
- end
- if io.exists(cachename) then
- cached[original] = cachename
- else
- cached[original] = ""
- end
- end
- return cached[original]
-end
-
--- old code: we could be cleaner using specification (see schemes)
-
-local function finder(specification,filetype)
- return resolvers.methodhandler("finders",runcurl(specification),filetype)
-end
-
-local opener = openers.file
-local loader = loaders.file
-
-local function install(scheme)
- finders[scheme] = finder
- openers[scheme] = opener
- loaders[scheme] = loader
-end
-
-resolvers.curl.install = install
-
-install('http')
-install('https')
-install('ftp')
-
-
-end -- of closure
+-- original size: 6202, stripped down to: 5149
-do -- create closure to overcome 200 locals limit
-
-if not modules then modules = { } end modules ['data-lua'] = {
- version = 1.001,
- comment = "companion to luat-lib.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
+if not modules then modules={} end modules ['data-sch']={
+ version=1.001,
+ comment="companion to luat-lib.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
}
-
--- some loading stuff ... we might move this one to slot 2 depending
--- on the developments (the loaders must not trigger kpse); we could
--- of course use a more extensive lib path spec
-
-local trace_locating = false trackers.register("resolvers.locating", function(v) trace_locating = v end)
-
-local report_libraries = logs.reporter("resolvers","libraries")
-
-local gsub, insert = string.gsub, table.insert
-local unpack = unpack or table.unpack
-
-local resolvers, package = resolvers, package
-
-local libformats = { 'luatexlibs', 'tex', 'texmfscripts', 'othertextfiles' } -- 'luainputs'
-local clibformats = { 'lib' }
-
-local _path_, libpaths, _cpath_, clibpaths
-
-function package.libpaths()
- if not _path_ or package.path ~= _path_ then
- _path_ = package.path
- libpaths = file.splitpath(_path_,";")
+local load=load
+local gsub,concat,format=string.gsub,table.concat,string.format
+local finders,openers,loaders=resolvers.finders,resolvers.openers,resolvers.loaders
+local trace_schemes=false trackers.register("resolvers.schemes",function(v) trace_schemes=v end)
+local report_schemes=logs.reporter("resolvers","schemes")
+local http=require("socket.http")
+local ltn12=require("ltn12")
+local resolvers=resolvers
+local schemes=resolvers.schemes or {}
+resolvers.schemes=schemes
+local cleaners={}
+schemes.cleaners=cleaners
+local threshold=24*60*60
+directives.register("schemes.threshold",function(v) threshold=tonumber(v) or threshold end)
+function cleaners.none(specification)
+ return specification.original
+end
+function cleaners.strip(specification)
+ return (gsub(specification.original,"[^%a%d%.]+","-"))
+end
+function cleaners.md5(specification)
+ return file.addsuffix(md5.hex(specification.original),file.suffix(specification.path))
+end
+local cleaner=cleaners.strip
+directives.register("schemes.cleanmethod",function(v) cleaner=cleaners[v] or cleaners.strip end)
+function resolvers.schemes.cleanname(specification)
+ local hash=cleaner(specification)
+ if trace_schemes then
+ report_schemes("hashing %a to %a",specification.original,hash)
+ end
+ return hash
+end
+local cached,loaded,reused,thresholds,handlers={},{},{},{},{}
+local function runcurl(name,cachename)
+ local command="curl --silent --create-dirs --output "..cachename.." "..name
+ os.spawn(command)
+end
+local function fetch(specification)
+ local original=specification.original
+ local scheme=specification.scheme
+ local cleanname=schemes.cleanname(specification)
+ local cachename=caches.setfirstwritablefile(cleanname,"schemes")
+ if not cached[original] then
+ statistics.starttiming(schemes)
+ if not io.exists(cachename) or (os.difftime(os.time(),lfs.attributes(cachename).modification)>(thresholds[protocol] or threshold)) then
+ cached[original]=cachename
+ local handler=handlers[scheme]
+ if handler then
+ if trace_schemes then
+ report_schemes("fetching %a, protocol %a, method %a",original,scheme,"built-in")
+ end
+ logs.flush()
+ handler(specification,cachename)
+ else
+ if trace_schemes then
+ report_schemes("fetching %a, protocol %a, method %a",original,scheme,"curl")
+ end
+ logs.flush()
+ runcurl(original,cachename)
+ end
+ end
+ if io.exists(cachename) then
+ cached[original]=cachename
+ if trace_schemes then
+ report_schemes("using cached %a, protocol %a, cachename %a",original,scheme,cachename)
+ end
+ else
+ cached[original]=""
+ if trace_schemes then
+ report_schemes("using missing %a, protocol %a",original,scheme)
+ end
end
- return libpaths
-end
-
-function package.clibpaths()
- if not _cpath_ or package.cpath ~= _cpath_ then
- _cpath_ = package.cpath
- clibpaths = file.splitpath(_cpath_,";")
+ loaded[scheme]=loaded[scheme]+1
+ statistics.stoptiming(schemes)
+ else
+ if trace_schemes then
+ report_schemes("reusing %a, protocol %a",original,scheme)
end
- return clibpaths
+ reused[scheme]=reused[scheme]+1
+ end
+ return cached[original]
end
-
-local function thepath(...)
- local t = { ... } t[#t+1] = "?.lua"
- local path = file.join(unpack(t))
- if trace_locating then
- report_libraries("! appending '%s' to 'package.path'",path)
+local function finder(specification,filetype)
+ return resolvers.methodhandler("finders",fetch(specification),filetype)
+end
+local opener=openers.file
+local loader=loaders.file
+local function install(scheme,handler,newthreshold)
+ handlers [scheme]=handler
+ loaded [scheme]=0
+ reused [scheme]=0
+ finders [scheme]=finder
+ openers [scheme]=opener
+ loaders [scheme]=loader
+ thresholds[scheme]=newthreshold or threshold
+end
+schemes.install=install
+local function http_handler(specification,cachename)
+ local tempname=cachename..".tmp"
+ local f=io.open(tempname,"wb")
+ local status,message=http.request {
+ url=specification.original,
+ sink=ltn12.sink.file(f)
+ }
+ if not status then
+ os.remove(tempname)
+ else
+ os.remove(cachename)
+ os.rename(tempname,cachename)
+ end
+ return cachename
+end
+install('http',http_handler)
+install('https')
+install('ftp')
+statistics.register("scheme handling time",function()
+ local l,r,nl,nr={},{},0,0
+ for k,v in table.sortedhash(loaded) do
+ if v>0 then
+ nl=nl+1
+ l[nl]=k..":"..v
+ end
+ end
+ for k,v in table.sortedhash(reused) do
+ if v>0 then
+ nr=nr+1
+ r[nr]=k..":"..v
+ end
+ end
+ local n=nl+nr
+ if n>0 then
+ l=nl>0 and concat(l) or "none"
+ r=nr>0 and concat(r) or "none"
+ return format("%s seconds, %s processed, threshold %s seconds, loaded: %s, reused: %s",
+ statistics.elapsedtime(schemes),n,threshold,l,r)
+ else
+ return nil
+ end
+end)
+local httprequest=http.request
+local toquery=url.toquery
+local function fetchstring(url,data)
+ local q=data and toquery(data)
+ if q then
+ url=url.."?"..q
+ end
+ local reply=httprequest(url)
+ return reply
+end
+schemes.fetchstring=fetchstring
+function schemes.fetchtable(url,data)
+ local reply=fetchstring(url,data)
+ if reply then
+ local s=load("return "..reply)
+ if s then
+ return s()
end
- return path
+ end
end
-local p_libpaths, a_libpaths = { }, { }
-function package.appendtolibpath(...)
- insert(a_libpath,thepath(...))
-end
+end -- of closure
-function package.prependtolibpath(...)
- insert(p_libpaths,1,thepath(...))
-end
+do -- create closure to overcome 200 locals limit
--- beware, we need to return a loadfile result !
+package.loaded["data-lua"] = package.loaded["data-lua"] or true
-local function loaded(libpaths,name,simple)
- for i=1,#libpaths do -- package.path, might become option
- local libpath = libpaths[i]
- local resolved = gsub(libpath,"%?",simple)
- if trace_locating then -- more detail
- report_libraries("! checking for '%s' on 'package.path': '%s' => '%s'",simple,libpath,resolved)
- end
- if file.is_readable(resolved) then
- if trace_locating then
- report_libraries("! lib '%s' located via 'package.path': '%s'",name,resolved)
- end
- return loadfile(resolved)
- end
- end
-end
+-- original size: 4861, stripped down to: 3693
-package.loaders[2] = function(name) -- was [#package.loaders+1]
- if file.suffix(name) == "" then
- name = file.addsuffix(name,"lua") -- maybe a list
- if trace_locating then -- mode detail
- report_libraries("! locating '%s' with forced suffix",name)
- end
- else
- if trace_locating then -- mode detail
- report_libraries("! locating '%s'",name)
- end
- end
- for i=1,#libformats do
- local format = libformats[i]
- local resolved = resolvers.findfile(name,format) or ""
- if trace_locating then -- mode detail
- report_libraries("! checking for '%s' using 'libformat path': '%s'",name,format)
- end
- if resolved ~= "" then
- if trace_locating then
- report_libraries("! lib '%s' located via environment: '%s'",name,resolved)
- end
- return loadfile(resolved)
- end
- end
- -- libpaths
- local libpaths, clibpaths = package.libpaths(), package.clibpaths()
- local simple = gsub(name,"%.lua$","")
- local simple = gsub(simple,"%.","/")
- local resolved = loaded(p_libpaths,name,simple) or loaded(libpaths,name,simple) or loaded(a_libpaths,name,simple)
- if resolved then
- return resolved
- end
- --
- local libname = file.addsuffix(simple,os.libsuffix)
- for i=1,#clibformats do
- -- better have a dedicated loop
- local format = clibformats[i]
- local paths = resolvers.expandedpathlistfromvariable(format)
- for p=1,#paths do
- local path = paths[p]
- local resolved = file.join(path,libname)
- if trace_locating then -- mode detail
- report_libraries("! checking for '%s' using 'clibformat path': '%s'",libname,path)
- end
- if file.is_readable(resolved) then
- if trace_locating then
- report_libraries("! lib '%s' located via 'clibformat': '%s'",libname,resolved)
- end
- return package.loadlib(resolved,name)
- end
- end
- end
- for i=1,#clibpaths do -- package.path, might become option
- local libpath = clibpaths[i]
- local resolved = gsub(libpath,"?",simple)
- if trace_locating then -- more detail
- report_libraries("! checking for '%s' on 'package.cpath': '%s'",simple,libpath)
- end
- if file.is_readable(resolved) then
- if trace_locating then
- report_libraries("! lib '%s' located via 'package.cpath': '%s'",name,resolved)
- end
- return package.loadlib(resolved,name)
- end
- end
- -- just in case the distribution is messed up
- if trace_loading then -- more detail
- report_libraries("! checking for '%s' using 'luatexlibs': '%s'",name)
- end
- local resolved = resolvers.findfile(file.basename(name),'luatexlibs') or ""
- if resolved ~= "" then
- if trace_locating then
- report_libraries("! lib '%s' located by basename via environment: '%s'",name,resolved)
- end
- return loadfile(resolved)
- end
- if trace_locating then
- report_libraries('? unable to locate lib: %s',name)
- end
--- return "unable to locate " .. name
+if not modules then modules={} end modules ['data-lua']={
+ version=1.001,
+ comment="companion to luat-lib.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+local resolvers,package=resolvers,package
+local gsub=string.gsub
+local concat=table.concat
+local addsuffix=file.addsuffix
+local P,S,Cs,lpegmatch=lpeg.P,lpeg.S,lpeg.Cs,lpeg.match
+local libsuffixes={ 'tex','lua' }
+local clibsuffixes={ 'lib' }
+local libformats={ 'TEXINPUTS','LUAINPUTS' }
+local clibformats={ 'CLUAINPUTS' }
+local helpers=package.helpers
+trackers.register("resolvers.libraries",function(v) helpers.trace=v end)
+trackers.register("resolvers.locating",function(v) helpers.trace=v end)
+helpers.report=logs.reporter("resolvers","libraries")
+local pattern=Cs(P("!")^0/""*(P("/")*P(-1)/"/"+P("/")^1/"/"+1)^0)
+local function cleanpath(path)
+ return resolvers.resolve(lpegmatch(pattern,path))
+end
+helpers.cleanpath=cleanpath
+local loadedaslib=helpers.loadedaslib
+local loadedbylua=helpers.loadedbylua
+local loadedbypath=helpers.loadedbypath
+local notloaded=helpers.notloaded
+local getlibpaths=package.libpaths
+local getclibpaths=package.clibpaths
+function helpers.libpaths(libhash)
+ local libpaths={}
+ for i=1,#libformats do
+ local paths=resolvers.expandedpathlistfromvariable(libformats[i])
+ for i=1,#paths do
+ local path=cleanpath(paths[i])
+ if not libhash[path] then
+ libpaths[#libpaths+1]=path
+ libhash[path]=true
+ end
+ end
+ end
+ return libpaths
+end
+function helpers.clibpaths(clibhash)
+ local clibpaths={}
+ for i=1,#clibformats do
+ local paths=resolvers.expandedpathlistfromvariable(clibformats[i])
+ for i=1,#paths do
+ local path=cleanpath(paths[i])
+ if not clibhash[path] then
+ clibpaths[#clibpaths+1]=path
+ clibhash[path]=true
+ end
+ end
+ end
+ return clibpaths
+end
+local function loadedbyformat(name,rawname,suffixes,islib)
+ local trace=helpers.trace
+ local report=helpers.report
+ if trace then
+ report("locating %a as %a using formats %a",rawname,name,suffixes)
+ end
+ for i=1,#suffixes do
+ local format=suffixes[i]
+ local resolved=resolvers.findfile(name,format) or ""
+ if trace then
+ report("checking %a using format %a",name,format)
+ end
+ if resolved~="" then
+ if trace then
+ report("lib %a located on %a",name,resolved)
+ end
+ if islib then
+ return true,loadedaslib(resolved,rawname)
+ else
+ return true,loadfile(resolved)
+ end
+ end
+ end
+end
+helpers.loadedbyformat=loadedbyformat
+local pattern=Cs((((1-S("\\/"))^0*(S("\\/")^1/"/"))^0*(P(".")^1/"/"+P(1))^1)*-1)
+local function lualibfile(name)
+ return lpegmatch(pattern,name) or name
+end
+helpers.lualibfile=lualibfile
+function helpers.loaded(name)
+ local thename=lualibfile(name)
+ local luaname=addsuffix(thename,"lua")
+ local libname=addsuffix(thename,os.libsuffix)
+ local libpaths=getlibpaths()
+ local clibpaths=getclibpaths()
+ local done,result=loadedbyformat(luaname,name,libsuffixes,false)
+ if done then
+ return result
+ end
+ local done,result=loadedbyformat(libname,name,clibsuffixes,true)
+ if done then
+ return result
+ end
+ local done,result=loadedbypath(luaname,name,libpaths,false,"lua")
+ if done then
+ return result
+ end
+ local done,result=loadedbypath(luaname,name,clibpaths,false,"lua")
+ if done then
+ return result
+ end
+ local done,result=loadedbypath(libname,name,clibpaths,true,"lib")
+ if done then
+ return result
+ end
+ local done,result=loadedbylua(name)
+ if done then
+ return result
+ end
+ return notloaded(name)
end
-
-resolvers.loadlualib = require
-
--- -- -- --
-
-package.obsolete = package.obsolete or { }
-
-package.append_libpath = appendtolibpath -- will become obsolete
-package.prepend_libpath = prependtolibpath -- will become obsolete
-
-package.obsolete.append_libpath = appendtolibpath -- will become obsolete
-package.obsolete.prepend_libpath = prependtolibpath -- will become obsolete
+resolvers.loadlualib=require
end -- of closure
do -- create closure to overcome 200 locals limit
-if not modules then modules = { } end modules ['data-aux'] = {
- version = 1.001,
- comment = "companion to luat-lib.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
-local find = string.find
-local type, next = type, next
-
-local trace_locating = false trackers.register("resolvers.locating", function(v) trace_locating = v end)
+package.loaded["data-aux"] = package.loaded["data-aux"] or true
-local resolvers = resolvers
+-- original size: 2394, stripped down to: 2005
-local report_scripts = logs.reporter("resolvers","scripts")
-
-function resolvers.updatescript(oldname,newname) -- oldname -> own.name, not per se a suffix
- local scriptpath = "scripts/context/lua"
- newname = file.addsuffix(newname,"lua")
- local oldscript = resolvers.cleanpath(oldname)
+if not modules then modules={} end modules ['data-aux']={
+ version=1.001,
+ comment="companion to luat-lib.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+local find=string.find
+local type,next=type,next
+local trace_locating=false trackers.register("resolvers.locating",function(v) trace_locating=v end)
+local resolvers=resolvers
+local report_scripts=logs.reporter("resolvers","scripts")
+function resolvers.updatescript(oldname,newname)
+ local scriptpath="scripts/context/lua"
+ newname=file.addsuffix(newname,"lua")
+ local oldscript=resolvers.cleanpath(oldname)
+ if trace_locating then
+ report_scripts("to be replaced old script %a",oldscript)
+ end
+ local newscripts=resolvers.findfiles(newname) or {}
+ if #newscripts==0 then
if trace_locating then
- report_scripts("to be replaced old script %s", oldscript)
- end
- local newscripts = resolvers.findfiles(newname) or { }
- if #newscripts == 0 then
+ report_scripts("unable to locate new script")
+ end
+ else
+ for i=1,#newscripts do
+ local newscript=resolvers.cleanpath(newscripts[i])
+ if trace_locating then
+ report_scripts("checking new script %a",newscript)
+ end
+ if oldscript==newscript then
if trace_locating then
- report_scripts("unable to locate new script")
+ report_scripts("old and new script are the same")
end
- else
- for i=1,#newscripts do
- local newscript = resolvers.cleanpath(newscripts[i])
- if trace_locating then
- report_scripts("checking new script %s", newscript)
- end
- if oldscript == newscript then
- if trace_locating then
- report_scripts("old and new script are the same")
- end
- elseif not find(newscript,scriptpath) then
- if trace_locating then
- report_scripts("new script should come from %s",scriptpath)
- end
- elseif not (find(oldscript,file.removesuffix(newname).."$") or find(oldscript,newname.."$")) then
- if trace_locating then
- report_scripts("invalid new script name")
- end
- else
- local newdata = io.loaddata(newscript)
- if newdata then
- if trace_locating then
- report_scripts("old script content replaced by new content")
- end
- io.savedata(oldscript,newdata)
- break
- elseif trace_locating then
- report_scripts("unable to load new script")
- end
- end
+ elseif not find(newscript,scriptpath) then
+ if trace_locating then
+ report_scripts("new script should come from %a",scriptpath)
end
+ elseif not (find(oldscript,file.removesuffix(newname).."$") or find(oldscript,newname.."$")) then
+ if trace_locating then
+ report_scripts("invalid new script name")
+ end
+ else
+ local newdata=io.loaddata(newscript)
+ if newdata then
+ if trace_locating then
+ report_scripts("old script content replaced by new content")
+ end
+ io.savedata(oldscript,newdata)
+ break
+ elseif trace_locating then
+ report_scripts("unable to load new script")
+ end
+ end
end
+ end
end
@@ -15312,78 +15268,55 @@ end -- of closure
do -- create closure to overcome 200 locals limit
-if not modules then modules = { } end modules ['data-tmf'] = {
- version = 1.001,
- comment = "companion to luat-lib.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
-local resolvers = resolvers
-
-local report_tds = logs.reporter("resolvers","tds")
+package.loaded["data-tmf"] = package.loaded["data-tmf"] or true
--- = <<
--- ? ??
--- < +=
--- > =+
+-- original size: 2600, stripped down to: 1627
+if not modules then modules={} end modules ['data-tmf']={
+ version=1.001,
+ comment="companion to luat-lib.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+local resolvers=resolvers
+local report_tds=logs.reporter("resolvers","tds")
function resolvers.load_tree(tree,resolve)
- if type(tree) == "string" and tree ~= "" then
-
- local getenv, setenv = resolvers.getenv, resolvers.setenv
-
- -- later might listen to the raw osenv var as well
- local texos = "texmf-" .. os.platform
-
- local oldroot = environment.texroot
- local newroot = file.collapsepath(tree)
-
- local newtree = file.join(newroot,texos)
- local newpath = file.join(newtree,"bin")
-
- if not lfs.isdir(newtree) then
- report_tds("no '%s' under tree %s",texos,tree)
- os.exit()
- end
- if not lfs.isdir(newpath) then
- report_tds("no '%s/bin' under tree %s",texos,tree)
- os.exit()
- end
-
- local texmfos = newtree
-
- environment.texroot = newroot
- environment.texos = texos
- environment.texmfos = texmfos
-
- -- Beware, we need to obey the relocatable autoparent so we
- -- set TEXMFCNF to its raw value. This is somewhat tricky when
- -- we run a mkii job from within. Therefore, in mtxrun, there
- -- is a resolve applied when we're in mkii/kpse mode or when
- -- --resolve is passed to mtxrun. Maybe we should also set the
- -- local AUTOPARENT etc. although these are alwasy set new.
-
- if resolve then
- -- resolvers.luacnfspec = resolvers.joinpath(resolvers.resolve(resolvers.expandedpathfromlist(resolvers.splitpath(resolvers.luacnfspec))))
- resolvers.luacnfspec = resolvers.resolve(resolvers.luacnfspec)
- end
-
- setenv('SELFAUTOPARENT', newroot)
- setenv('SELFAUTODIR', newtree)
- setenv('SELFAUTOLOC', newpath)
- setenv('TEXROOT', newroot)
- setenv('TEXOS', texos)
- setenv('TEXMFOS', texmfos)
- setenv('TEXMFCNF', resolvers.luacnfspec,true) -- already resolved
- setenv('PATH', newpath .. io.pathseparator .. getenv('PATH'))
-
- report_tds("changing from root '%s' to '%s'",oldroot,newroot)
- report_tds("prepending '%s' to PATH",newpath)
- report_tds("setting TEXMFCNF to '%s'",resolvers.luacnfspec)
- report_tds()
+ if type(tree)=="string" and tree~="" then
+ local getenv,setenv=resolvers.getenv,resolvers.setenv
+ local texos="texmf-"..os.platform
+ local oldroot=environment.texroot
+ local newroot=file.collapsepath(tree)
+ local newtree=file.join(newroot,texos)
+ local newpath=file.join(newtree,"bin")
+ if not lfs.isdir(newtree) then
+ report_tds("no %a under tree %a",texos,tree)
+ os.exit()
+ end
+ if not lfs.isdir(newpath) then
+ report_tds("no '%s/bin' under tree %a",texos,tree)
+ os.exit()
+ end
+ local texmfos=newtree
+ environment.texroot=newroot
+ environment.texos=texos
+ environment.texmfos=texmfos
+ if resolve then
+ resolvers.luacnfspec=resolvers.resolve(resolvers.luacnfspec)
end
+ setenv('SELFAUTOPARENT',newroot)
+ setenv('SELFAUTODIR',newtree)
+ setenv('SELFAUTOLOC',newpath)
+ setenv('TEXROOT',newroot)
+ setenv('TEXOS',texos)
+ setenv('TEXMFOS',texmfos)
+ setenv('TEXMFCNF',resolvers.luacnfspec,true)
+ setenv('PATH',newpath..io.pathseparator..getenv('PATH'))
+ report_tds("changing from root %a to %a",oldroot,newroot)
+ report_tds("prepending %a to PATH",newpath)
+ report_tds("setting TEXMFCNF to %a",resolvers.luacnfspec)
+ report_tds()
+ end
end
@@ -15391,81 +15324,76 @@ end -- of closure
do -- create closure to overcome 200 locals limit
-if not modules then modules = { } end modules ['data-lst'] = {
- version = 1.001,
- comment = "companion to luat-lib.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
--- used in mtxrun, can be loaded later .. todo
-
-local find, concat, upper, format = string.find, table.concat, string.upper, string.format
-local fastcopy, sortedpairs = table.fastcopy, table.sortedpairs
+package.loaded["data-lst"] = package.loaded["data-lst"] or true
-resolvers.listers = resolvers.listers or { }
-
-local resolvers = resolvers
-
-local report_lists = logs.reporter("resolvers","lists")
+-- original size: 2654, stripped down to: 2301
+if not modules then modules={} end modules ['data-lst']={
+ version=1.001,
+ comment="companion to luat-lib.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+local find,concat,upper,format=string.find,table.concat,string.upper,string.format
+local fastcopy,sortedpairs=table.fastcopy,table.sortedpairs
+resolvers.listers=resolvers.listers or {}
+local resolvers=resolvers
+local report_lists=logs.reporter("resolvers","lists")
local function tabstr(str)
- if type(str) == 'table' then
- return concat(str," | ")
- else
- return str
- end
+ if type(str)=='table' then
+ return concat(str," | ")
+ else
+ return str
+ end
end
-
function resolvers.listers.variables(pattern)
- local instance = resolvers.instance
- local environment = instance.environment
- local variables = instance.variables
- local expansions = instance.expansions
- local pattern = upper(pattern or "")
- local configured = { }
- local order = instance.order
- for i=1,#order do
- for k, v in next, order[i] do
- if v ~= nil and configured[k] == nil then
- configured[k] = v
- end
- end
- end
- local env = fastcopy(environment)
- local var = fastcopy(variables)
- local exp = fastcopy(expansions)
- for key, value in sortedpairs(configured) do
- if key ~= "" and (pattern == "" or find(upper(key),pattern)) then
- report_lists(key)
- report_lists(" env: %s",tabstr(rawget(environment,key)) or "unset")
- report_lists(" var: %s",tabstr(configured[key]) or "unset")
- report_lists(" exp: %s",tabstr(expansions[key]) or "unset")
- report_lists(" res: %s",tabstr(resolvers.resolve(expansions[key])) or "unset")
- end
- end
- instance.environment = fastcopy(env)
- instance.variables = fastcopy(var)
- instance.expansions = fastcopy(exp)
-end
-
-function resolvers.listers.configurations(report)
- local configurations = resolvers.instance.specification
- local report = report or texio.write_nl
- for i=1,#configurations do
- report(format("file : %s",resolvers.resolve(configurations[i])))
- end
- report("")
- local list = resolvers.expandedpathfromlist(resolvers.splitpath(resolvers.luacnfspec))
- for i=1,#list do
- local li = resolvers.resolve(list[i])
- if lfs.isdir(li) then
- report(format("path - %s",li))
- else
- report(format("path + %s",li))
- end
+ local instance=resolvers.instance
+ local environment=instance.environment
+ local variables=instance.variables
+ local expansions=instance.expansions
+ local pattern=upper(pattern or "")
+ local configured={}
+ local order=instance.order
+ for i=1,#order do
+ for k,v in next,order[i] do
+ if v~=nil and configured[k]==nil then
+ configured[k]=v
+ end
+ end
+ end
+ local env=fastcopy(environment)
+ local var=fastcopy(variables)
+ local exp=fastcopy(expansions)
+ for key,value in sortedpairs(configured) do
+ if key~="" and (pattern=="" or find(upper(key),pattern)) then
+ report_lists(key)
+ report_lists(" env: %s",tabstr(rawget(environment,key)) or "unset")
+ report_lists(" var: %s",tabstr(configured[key]) or "unset")
+ report_lists(" exp: %s",tabstr(expansions[key]) or "unset")
+ report_lists(" res: %s",tabstr(resolvers.resolve(expansions[key])) or "unset")
+ end
+ end
+ instance.environment=fastcopy(env)
+ instance.variables=fastcopy(var)
+ instance.expansions=fastcopy(exp)
+end
+local report_resolved=logs.reporter("system","resolved")
+function resolvers.listers.configurations()
+ local configurations=resolvers.instance.specification
+ for i=1,#configurations do
+ report_resolved("file : %s",resolvers.resolve(configurations[i]))
+ end
+ report_resolved("")
+ local list=resolvers.expandedpathfromlist(resolvers.splitpath(resolvers.luacnfspec))
+ for i=1,#list do
+ local li=resolvers.resolve(list[i])
+ if lfs.isdir(li) then
+ report_resolved("path - %s",li)
+ else
+ report_resolved("path + %s",li)
end
+ end
end
@@ -15473,279 +15401,407 @@ end -- of closure
do -- create closure to overcome 200 locals limit
-if not modules then modules = { } end modules ['luat-sta'] = {
- version = 1.001,
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
+package.loaded["util-lib"] = package.loaded["util-lib"] or true
--- this code is used in the updater
+-- original size: 8911, stripped down to: 4216
+
+if not modules then modules={} end modules ['util-lib']={
+ version=1.001,
+ comment="companion to luat-lib.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files",
+}
+local gsub,find=string.gsub,string.find
+local pathpart,nameonly,joinfile=file.pathpart,file.nameonly,file.join
+local findfile,findfiles=resolvers and resolvers.findfile,resolvers and resolvers.findfiles
+local loaded=package.loaded
+local report_swiglib=logs.reporter("swiglib")
+local trace_swiglib=false trackers.register("resolvers.swiglib",function(v) trace_swiglib=v end)
+local function requireswiglib(required,version)
+ local library=loaded[required]
+ if library==nil then
+ local required_full=gsub(required,"%.","/")
+ local required_path=pathpart(required_full)
+ local required_base=nameonly(required_full)
+ local required_name=required_base.."."..os.libsuffix
+ local version=type(version)=="string" and version~="" and version or false
+ local function check(locate,...)
+ local found_library=nil
+ if version then
+ local asked_library=joinfile(required_path,version,required_name)
+ if trace_swiglib then
+ report_swiglib("checking %s: %a","with version",asked_library)
+ end
+ found_library=locate(asked_library,...)
+ if not found_library or found_library==""then
+ asked_library=joinfile(required_path,required_name)
+ if trace_swiglib then
+ report_swiglib("checking %s: %a","without version",asked_library)
+ end
+ found_library=locate(asked_library,...)
+ end
+ else
+ local asked_library=joinfile(required_path,required_name)
+ if trace_swiglib then
+ report_swiglib("checking %s: %a","without version",asked_library)
+ end
+ found_library=locate(asked_library,...)
+ end
+ return found_library and found_library~="" and found_library or false
+ end
+ local found_library=findfile and check(findfile,"lib")
+ if findfiles and not found_library then
+ local asked_library=joinfile(required_path,".*",required_name)
+ if trace_swiglib then
+ report_swiglib("checking %s: %a","latest version",asked_library)
+ end
+ local list=findfiles(asked_library,"lib",true)
+ if list and #list>0 then
+ table.sort(list)
+ found_library=list[#list]
+ end
+ end
+ if not found_library then
+ package.extraclibpath(environment.ownpath)
+ local paths=package.clibpaths()
+ for i=1,#paths do
+ local found_library=check(lfs.isfile)
+ if found_library then
+ break
+ end
+ end
+ end
+ if not found_library then
+ if trace_swiglib then
+ report_swiglib("not found: %a",asked_library)
+ end
+ library=false
+ else
+ local path=pathpart(found_library)
+ local base=nameonly(found_library)
+ dir.push(path)
+ if trace_swiglib then
+ report_swiglib("found: %a",found_library)
+ end
+ library=package.loadlib(found_library,"luaopen_"..required_base)
+ if type(library)=="function" then
+ library=library()
+ else
+ library=false
+ end
+ dir.pop()
+ end
+ if not library then
+ report_swiglib("unknown: %a",required)
+ elseif trace_swiglib then
+ report_swiglib("stored: %a",required)
+ end
+ loaded[required]=library
+ else
+ report_swiglib("reused: %a",required)
+ end
+ return library
+end
+local savedrequire=require
+function require(name,version)
+ if find(name,"^swiglib%.") then
+ return requireswiglib(name,version)
+ else
+ return savedrequire(name)
+ end
+end
+local swiglibs={}
+function swiglib(name,version)
+ local library=swiglibs[name]
+ if not library then
+ statistics.starttiming(swiglibs)
+ report_swiglib("loading %a",name)
+ library=requireswiglib("swiglib."..name,version)
+ swiglibs[name]=library
+ statistics.stoptiming(swiglibs)
+ end
+ return library
+end
+statistics.register("used swiglibs",function()
+ if next(swiglibs) then
+ return string.format("%s, initial load time %s seconds",table.concat(table.sortedkeys(swiglibs)," "),statistics.elapsedtime(swiglibs))
+ end
+end)
-local gmatch, match = string.gmatch, string.match
-local type = type
-states = states or { }
-local states = states
+end -- of closure
-states.data = states.data or { }
-local data = states.data
+do -- create closure to overcome 200 locals limit
-states.hash = states.hash or { }
-local hash = states.hash
+package.loaded["luat-sta"] = package.loaded["luat-sta"] or true
-states.tag = states.tag or ""
-states.filename = states.filename or ""
+-- original size: 5703, stripped down to: 2507
+if not modules then modules={} end modules ['luat-sta']={
+ version=1.001,
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+local gmatch,match=string.gmatch,string.match
+local type=type
+states=states or {}
+local states=states
+states.data=states.data or {}
+local data=states.data
+states.hash=states.hash or {}
+local hash=states.hash
+states.tag=states.tag or ""
+states.filename=states.filename or ""
function states.save(filename,tag)
- tag = tag or states.tag
- filename = file.addsuffix(filename or states.filename,'lus')
- io.savedata(filename,
- "-- generator : luat-sta.lua\n" ..
- "-- state tag : " .. tag .. "\n\n" ..
- table.serialize(data[tag or states.tag] or {},true)
- )
+ tag=tag or states.tag
+ filename=file.addsuffix(filename or states.filename,'lus')
+ io.savedata(filename,
+ "-- generator : luat-sta.lua\n".."-- state tag : "..tag.."\n\n"..table.serialize(data[tag or states.tag] or {},true)
+ )
end
-
function states.load(filename,tag)
- states.filename = filename
- states.tag = tag or "whatever"
- states.filename = file.addsuffix(states.filename,'lus')
- data[states.tag], hash[states.tag] = (io.exists(filename) and dofile(filename)) or { }, { }
+ states.filename=filename
+ states.tag=tag or "whatever"
+ states.filename=file.addsuffix(states.filename,'lus')
+ data[states.tag],hash[states.tag]=(io.exists(filename) and dofile(filename)) or {},{}
end
-
local function set_by_tag(tag,key,value,default,persistent)
- local d, h = data[tag], hash[tag]
- if d then
- if type(d) == "table" then
- local dkey, hkey = key, key
- local pre, post = match(key,"(.+)%.([^%.]+)$")
- if pre and post then
- for k in gmatch(pre,"[^%.]+") do
- local dk = d[k]
- if not dk then
- dk = { }
- d[k] = dk
- elseif type(dk) == "string" then
- -- invalid table, unable to upgrade structure
- -- hope for the best or delete the state file
- break
- end
- d = dk
- end
- dkey, hkey = post, key
- end
- if value == nil then
- value = default
- elseif value == false then
- -- special case
- elseif persistent then
- value = value or d[dkey] or default
- else
- value = value or default
- end
- d[dkey], h[hkey] = value, value
- elseif type(d) == "string" then
- -- weird
- data[tag], hash[tag] = value, value
- end
- end
+ local d,h=data[tag],hash[tag]
+ if d then
+ if type(d)=="table" then
+ local dkey,hkey=key,key
+ local pre,post=match(key,"(.+)%.([^%.]+)$")
+ if pre and post then
+ for k in gmatch(pre,"[^%.]+") do
+ local dk=d[k]
+ if not dk then
+ dk={}
+ d[k]=dk
+ elseif type(dk)=="string" then
+ break
+ end
+ d=dk
+ end
+ dkey,hkey=post,key
+ end
+ if value==nil then
+ value=default
+ elseif value==false then
+ elseif persistent then
+ value=value or d[dkey] or default
+ else
+ value=value or default
+ end
+ d[dkey],h[hkey]=value,value
+ elseif type(d)=="string" then
+ data[tag],hash[tag]=value,value
+ end
+ end
end
-
local function get_by_tag(tag,key,default)
- local h = hash[tag]
- if h and h[key] then
- return h[key]
- else
- local d = data[tag]
- if d then
- for k in gmatch(key,"[^%.]+") do
- local dk = d[k]
- if dk ~= nil then
- d = dk
- else
- return default
- end
- end
- if d == false then
- return false
- else
- return d or default
- end
+ local h=hash[tag]
+ if h and h[key] then
+ return h[key]
+ else
+ local d=data[tag]
+ if d then
+ for k in gmatch(key,"[^%.]+") do
+ local dk=d[k]
+ if dk~=nil then
+ d=dk
+ else
+ return default
end
+ end
+ if d==false then
+ return false
+ else
+ return d or default
+ end
end
+ end
end
-
-states.set_by_tag = set_by_tag
-states.get_by_tag = get_by_tag
-
+states.set_by_tag=set_by_tag
+states.get_by_tag=get_by_tag
function states.set(key,value,default,persistent)
- set_by_tag(states.tag,key,value,default,persistent)
+ set_by_tag(states.tag,key,value,default,persistent)
end
-
function states.get(key,default)
- return get_by_tag(states.tag,key,default)
+ return get_by_tag(states.tag,key,default)
end
-
-
-
end -- of closure
do -- create closure to overcome 200 locals limit
-if not modules then modules = { } end modules ['luat-fmt'] = {
- version = 1.001,
- comment = "companion to mtxrun",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
-
-local format = string.format
-
-local report_format = logs.reporter("resolvers","formats")
-
--- helper for mtxrun
+package.loaded["luat-fmt"] = package.loaded["luat-fmt"] or true
-local quoted = string.quoted
+-- original size: 5951, stripped down to: 4922
-local function primaryflags() -- not yet ok
- local trackers = environment.argument("trackers")
- local directives = environment.argument("directives")
- local flags = ""
- if trackers and trackers ~= "" then
- flags = flags .. "--trackers=" .. quoted(trackers)
- end
- if directives and directives ~= "" then
- flags = flags .. "--directives=" .. quoted(directives)
- end
- return flags
+if not modules then modules={} end modules ['luat-fmt']={
+ version=1.001,
+ comment="companion to mtxrun",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+local format=string.format
+local concat=table.concat
+local quoted=string.quoted
+local luasuffixes=utilities.lua.suffixes
+local report_format=logs.reporter("resolvers","formats")
+local function primaryflags()
+ local trackers=environment.argument("trackers")
+ local directives=environment.argument("directives")
+ local flags={}
+ if trackers and trackers~="" then
+ flags={ "--trackers="..quoted(trackers) }
+ end
+ if directives and directives~="" then
+ flags={ "--directives="..quoted(directives) }
+ end
+ if environment.argument("jit") then
+ flags={ "--jiton" }
+ end
+ return concat(flags," ")
end
-
function environment.make_format(name)
- -- change to format path (early as we need expanded paths)
- local olddir = lfs.currentdir()
- local path = caches.getwritablepath("formats") or "" -- maybe platform
- if path ~= "" then
- lfs.chdir(path)
- end
- report_format("format path: %s",lfs.currentdir())
- -- check source file
- local texsourcename = file.addsuffix(name,"mkiv")
- local fulltexsourcename = resolvers.findfile(texsourcename,"tex") or ""
- if fulltexsourcename == "" then
- texsourcename = file.addsuffix(name,"tex")
- fulltexsourcename = resolvers.findfile(texsourcename,"tex") or ""
- end
- if fulltexsourcename == "" then
- report_format("no tex source file with name: %s (mkiv or tex)",name)
- lfs.chdir(olddir)
- return
- else
- report_format("using tex source file: %s",fulltexsourcename)
- end
- local texsourcepath = dir.expandname(file.dirname(fulltexsourcename)) -- really needed
- -- check specification
- local specificationname = file.replacesuffix(fulltexsourcename,"lus")
- local fullspecificationname = resolvers.findfile(specificationname,"tex") or ""
- if fullspecificationname == "" then
- specificationname = file.join(texsourcepath,"context.lus")
- fullspecificationname = resolvers.findfile(specificationname,"tex") or ""
- end
- if fullspecificationname == "" then
- report_format("unknown stub specification: %s",specificationname)
- lfs.chdir(olddir)
- return
- end
- local specificationpath = file.dirname(fullspecificationname)
- -- load specification
- local usedluastub = nil
- local usedlualibs = dofile(fullspecificationname)
- if type(usedlualibs) == "string" then
- usedluastub = file.join(file.dirname(fullspecificationname),usedlualibs)
- elseif type(usedlualibs) == "table" then
- report_format("using stub specification: %s",fullspecificationname)
- local texbasename = file.basename(name)
- local luastubname = file.addsuffix(texbasename,"lua")
- local lucstubname = file.addsuffix(texbasename,"luc")
- -- pack libraries in stub
- report_format("creating initialization file: %s",luastubname)
- utilities.merger.selfcreate(usedlualibs,specificationpath,luastubname)
- -- compile stub file (does not save that much as we don't use this stub at startup any more)
- local strip = resolvers.booleanvariable("LUACSTRIP", true)
- if utilities.lua.compile(luastubname,lucstubname) and lfs.isfile(lucstubname) then
- report_format("using compiled initialization file: %s",lucstubname)
- usedluastub = lucstubname
- else
- report_format("using uncompiled initialization file: %s",luastubname)
- usedluastub = luastubname
- end
+ local engine=environment.ownmain or "luatex"
+ local olddir=dir.current()
+ local path=caches.getwritablepath("formats",engine) or ""
+ if path~="" then
+ lfs.chdir(path)
+ end
+ report_format("using format path %a",dir.current())
+ local texsourcename=file.addsuffix(name,"mkiv")
+ local fulltexsourcename=resolvers.findfile(texsourcename,"tex") or ""
+ if fulltexsourcename=="" then
+ texsourcename=file.addsuffix(name,"tex")
+ fulltexsourcename=resolvers.findfile(texsourcename,"tex") or ""
+ end
+ if fulltexsourcename=="" then
+ report_format("no tex source file with name %a (mkiv or tex)",name)
+ lfs.chdir(olddir)
+ return
+ else
+ report_format("using tex source file %a",fulltexsourcename)
+ end
+ local texsourcepath=dir.expandname(file.dirname(fulltexsourcename))
+ local specificationname=file.replacesuffix(fulltexsourcename,"lus")
+ local fullspecificationname=resolvers.findfile(specificationname,"tex") or ""
+ if fullspecificationname=="" then
+ specificationname=file.join(texsourcepath,"context.lus")
+ fullspecificationname=resolvers.findfile(specificationname,"tex") or ""
+ end
+ if fullspecificationname=="" then
+ report_format("unknown stub specification %a",specificationname)
+ lfs.chdir(olddir)
+ return
+ end
+ local specificationpath=file.dirname(fullspecificationname)
+ local usedluastub=nil
+ local usedlualibs=dofile(fullspecificationname)
+ if type(usedlualibs)=="string" then
+ usedluastub=file.join(file.dirname(fullspecificationname),usedlualibs)
+ elseif type(usedlualibs)=="table" then
+ report_format("using stub specification %a",fullspecificationname)
+ local texbasename=file.basename(name)
+ local luastubname=file.addsuffix(texbasename,luasuffixes.lua)
+ local lucstubname=file.addsuffix(texbasename,luasuffixes.luc)
+ report_format("creating initialization file %a",luastubname)
+ utilities.merger.selfcreate(usedlualibs,specificationpath,luastubname)
+ if utilities.lua.compile(luastubname,lucstubname) and lfs.isfile(lucstubname) then
+ report_format("using compiled initialization file %a",lucstubname)
+ usedluastub=lucstubname
else
- report_format("invalid stub specification: %s",fullspecificationname)
- lfs.chdir(olddir)
- return
- end
- -- generate format
- local command = format("luatex --ini %s --lua=%s %s %sdump",primaryflags(),quoted(usedluastub),quoted(fulltexsourcename),os.platform == "unix" and "\\\\" or "\\")
- report_format("running command: %s\n",command)
- os.spawn(command)
- -- remove related mem files
- local pattern = file.removesuffix(file.basename(usedluastub)).."-*.mem"
- -- report_format("removing related mplib format with pattern '%s'", pattern)
- local mp = dir.glob(pattern)
- if mp then
- for i=1,#mp do
- local name = mp[i]
- report_format("removing related mplib format %s", file.basename(name))
- os.remove(name)
- end
+ report_format("using uncompiled initialization file %a",luastubname)
+ usedluastub=luastubname
end
+ else
+ report_format("invalid stub specification %a",fullspecificationname)
lfs.chdir(olddir)
+ return
+ end
+ local command=format("%s --ini %s --lua=%s %s %sdump",engine,primaryflags(),quoted(usedluastub),quoted(fulltexsourcename),os.platform=="unix" and "\\\\" or "\\")
+ report_format("running command: %s\n",command)
+ os.spawn(command)
+ local pattern=file.removesuffix(file.basename(usedluastub)).."-*.mem"
+ local mp=dir.glob(pattern)
+ if mp then
+ for i=1,#mp do
+ local name=mp[i]
+ report_format("removing related mplib format %a",file.basename(name))
+ os.remove(name)
+ end
+ end
+ lfs.chdir(olddir)
end
-
function environment.run_format(name,data,more)
- -- hm, rather old code here; we can now use the file.whatever functions
- if name and name ~= "" then
- local barename = file.removesuffix(name)
- local fmtname = caches.getfirstreadablefile(file.addsuffix(barename,"fmt"),"formats")
- if fmtname == "" then
- fmtname = resolvers.findfile(file.addsuffix(barename,"fmt")) or ""
- end
- fmtname = resolvers.cleanpath(fmtname)
- if fmtname == "" then
- report_format("no format with name: %s",name)
- else
- local barename = file.removesuffix(name) -- expanded name
- local luaname = file.addsuffix(barename,"luc")
- if not lfs.isfile(luaname) then
- luaname = file.addsuffix(barename,"lua")
- end
- if not lfs.isfile(luaname) then
- report_format("using format name: %s",fmtname)
- report_format("no luc/lua with name: %s",barename)
- else
- local command = format("luatex %s --fmt=%s --lua=%s %s %s",primaryflags(),quoted(barename),quoted(luaname),quoted(data),more ~= "" and quoted(more) or "")
- report_format("running command: %s",command)
- os.spawn(command)
- end
- end
+ if name and name~="" then
+ local engine=environment.ownmain or "luatex"
+ local barename=file.removesuffix(name)
+ local fmtname=caches.getfirstreadablefile(file.addsuffix(barename,"fmt"),"formats",engine)
+ if fmtname=="" then
+ fmtname=resolvers.findfile(file.addsuffix(barename,"fmt")) or ""
+ end
+ fmtname=resolvers.cleanpath(fmtname)
+ if fmtname=="" then
+ report_format("no format with name %a",name)
+ else
+ local barename=file.removesuffix(name)
+ local luaname=file.addsuffix(barename,"luc")
+ if not lfs.isfile(luaname) then
+ luaname=file.addsuffix(barename,"lua")
+ end
+ if not lfs.isfile(luaname) then
+ report_format("using format name %a",fmtname)
+ report_format("no luc/lua file with name %a",barename)
+ else
+ local command=format("%s %s --fmt=%s --lua=%s %s %s",engine,primaryflags(),quoted(barename),quoted(luaname),quoted(data),more~="" and quoted(more) or "")
+ report_format("running command: %s",command)
+ os.spawn(command)
+ end
end
+ end
end
end -- of closure
+
+-- used libraries : l-lua.lua l-lpeg.lua l-function.lua l-string.lua l-table.lua l-io.lua l-number.lua l-set.lua l-os.lua l-file.lua l-md5.lua l-url.lua l-dir.lua l-boolean.lua l-unicode.lua l-math.lua util-str.lua util-tab.lua util-sto.lua util-prs.lua util-fmt.lua trac-set.lua trac-log.lua trac-inf.lua trac-pro.lua util-lua.lua util-deb.lua util-mrg.lua util-tpl.lua util-env.lua luat-env.lua lxml-tab.lua lxml-lpt.lua lxml-mis.lua lxml-aux.lua lxml-xml.lua trac-xml.lua data-ini.lua data-exp.lua data-env.lua data-tmp.lua data-met.lua data-res.lua data-pre.lua data-inp.lua data-out.lua data-fil.lua data-con.lua data-use.lua data-zip.lua data-tre.lua data-sch.lua data-lua.lua data-aux.lua data-tmf.lua data-lst.lua util-lib.lua luat-sta.lua luat-fmt.lua
+-- skipped libraries : -
+-- original bytes : 658276
+-- stripped bytes : 241564
+
-- end library merge
-own = { } -- not local, might change
+-- We need this hack till luatex is fixed.
+--
+-- for k,v in pairs(arg) do print(k,v) end
+
+if arg and (arg[0] == 'luatex' or arg[0] == 'luatex.exe') and arg[1] == "--luaonly" then
+ arg[-1]=arg[0] arg[0]=arg[2] for k=3,#arg do arg[k-2]=arg[k] end arg[#arg]=nil arg[#arg]=nil
+end
+
+-- End of hack.
-own.libs = { -- order can be made better
+local format, gsub, gmatch, match, find = string.format, string.gsub, string.gmatch, string.match, string.find
+local concat = table.concat
+local ownname = environment and environment.ownname or arg[0] or 'mtxrun.lua'
+local ownpath = gsub(match(ownname,"^(.+)[\\/].-$") or ".","\\","/")
+local owntree = environment and environment.ownpath or ownpath
+
+local ownlibs = { -- order can be made better
+
+ 'l-lua.lua',
+ 'l-lpeg.lua',
+ 'l-function.lua',
'l-string.lua',
'l-table.lua',
- 'l-lpeg.lua',
'l-io.lua',
'l-number.lua',
'l-set.lua',
@@ -15758,19 +15814,23 @@ own.libs = { -- order can be made better
'l-unicode.lua',
'l-math.lua',
+ 'util-str.lua', -- code might move to l-string
'util-tab.lua',
'util-sto.lua',
- 'util-mrg.lua',
- 'util-lua.lua',
'util-prs.lua',
'util-fmt.lua',
- 'util-deb.lua',
- 'trac-inf.lua',
'trac-set.lua',
'trac-log.lua',
- 'trac-pro.lua',
+ 'trac-inf.lua', -- was before trac-set
+ 'trac-pro.lua', -- not really needed
+ 'util-lua.lua', -- indeed here?
+ 'util-deb.lua',
+
+ 'util-mrg.lua',
+ 'util-tpl.lua',
+ 'util-env.lua',
'luat-env.lua', -- can come before inf (as in mkiv)
'lxml-tab.lua',
@@ -15780,6 +15840,8 @@ own.libs = { -- order can be made better
'lxml-aux.lua',
'lxml-xml.lua',
+ 'trac-xml.lua',
+
'data-ini.lua',
'data-exp.lua',
'data-env.lua',
@@ -15796,35 +15858,20 @@ own.libs = { -- order can be made better
-- 'data-bin.lua',
'data-zip.lua',
'data-tre.lua',
- 'data-crl.lua',
+ 'data-sch.lua',
'data-lua.lua',
'data-aux.lua', -- updater
'data-tmf.lua',
'data-lst.lua',
+ 'util-lib.lua', -- swiglib
+
'luat-sta.lua',
'luat-fmt.lua',
-}
-
--- We need this hack till luatex is fixed.
---
--- for k,v in pairs(arg) do print(k,v) end
-
-if arg and (arg[0] == 'luatex' or arg[0] == 'luatex.exe') and arg[1] == "--luaonly" then
- arg[-1]=arg[0] arg[0]=arg[2] for k=3,#arg do arg[k-2]=arg[k] end arg[#arg]=nil arg[#arg]=nil
-end
-
--- End of hack.
-
-local format, gsub, gmatch, match, find = string.format, string.gsub, string.gmatch, string.match, string.find
-local concat = table.concat
-
-own.name = (environment and environment.ownname) or arg[0] or 'mtxrun.lua'
-own.path = gsub(match(own.name,"^(.+)[\\/].-$") or ".","\\","/")
-local ownpath, owntree = own.path, environment and environment.ownpath or own.path
+}
-own.list = {
+local ownlist = {
'.',
ownpath ,
ownpath .. "/../sources", -- HH's development path
@@ -15838,13 +15885,21 @@ own.list = {
owntree .. "/../../../texmf/tex/context/base",
}
-if own.path == "." then table.remove(own.list,1) end
+if ownpath == "." then table.remove(ownlist,1) end
+
+own = {
+ name = ownname,
+ path = ownpath,
+ tree = owntree,
+ list = ownlist,
+ libs = ownlibs,
+}
local function locate_libs()
- for l=1,#own.libs do
- local lib = own.libs[l]
- for p =1,#own.list do
- local pth = own.list[p]
+ for l=1,#ownlibs do
+ local lib = ownlibs[l]
+ for p =1,#ownlist do
+ local pth = ownlist[p]
local filename = pth .. "/" .. lib
local found = lfs.isfile(filename)
if found then
@@ -15858,8 +15913,8 @@ end
local function load_libs()
local found = locate_libs()
if found then
- for l=1,#own.libs do
- local filename = found .. "/" .. own.libs[l]
+ for l=1,#ownlibs do
+ local filename = found .. "/" .. ownlibs[l]
local codeblob = loadfile(filename)
if codeblob then
codeblob()
@@ -15933,55 +15988,85 @@ if not environment.experiments then environment.experiments = e_experiments end
local instance = resolvers.reset()
local helpinfo = [[
---script run an mtx script (lua prefered method) (--noquotes), no script gives list
---execute run a script or program (texmfstart method) (--noquotes)
---resolve resolve prefixed arguments
---ctxlua run internally (using preloaded libs)
---internal run script using built in libraries (same as --ctxlua)
---locate locate given filename in database (default) or system (--first --all --detail)
-
---autotree use texmf tree cf. env 'texmfstart_tree' or 'texmfstarttree'
---tree=pathtotree use given texmf tree (default file: 'setuptex.tmf')
---environment=name use given (tmf) environment file
---path=runpath go to given path before execution
---ifchanged=filename only execute when given file has changed (md checksum)
---iftouched=old,new only execute when given file has changed (time stamp)
-
---makestubs create stubs for (context related) scripts
---removestubs remove stubs (context related) scripts
---stubpath=binpath paths where stubs wil be written
---windows create windows (mswin) stubs
---unix create unix (linux) stubs
-
---verbose give a bit more info
---trackers=list enable given trackers
---progname=str format or backend
-
---edit launch editor with found file
---launch (--all) launch files like manuals, assumes os support
-
---timedrun run a script an time its run
---autogenerate regenerate databases if needed (handy when used to run context in an editor)
-
---usekpse use kpse as fallback (when no mkiv and cache installed, often slower)
---forcekpse force using kpse (handy when no mkiv and cache installed but less functionality)
-
---prefixes show supported prefixes
-
---generate generate file database
-
---variables show configuration variables
---configurations show configuration order
-
---expand-braces expand complex variable
---expand-path expand variable (resolve paths)
---expand-var expand variable (resolve references)
---show-path show path expansion of ...
---var-value report value of variable
---find-file report file location
---find-path report path of file
-
---pattern=str filter variables
+<?xml version="1.0" ?>
+<application>
+ <metadata>
+ <entry name="name">mtxrun</entry>
+ <entry name="detail">ConTeXt TDS Runner Tool</entry>
+ <entry name="version">1.31</entry>
+ </metadata>
+ <flags>
+ <category name="basic">
+ <subcategory>
+ <flag name="script"><short>run an mtx script (lua prefered method) (<ref name="noquotes"/>), no script gives list</short></flag>
+ <flag name="execute"><short>run a script or program (texmfstart method) (<ref name="noquotes"/>)</short></flag>
+ <flag name="resolve"><short>resolve prefixed arguments</short></flag>
+ <flag name="ctxlua"><short>run internally (using preloaded libs)</short></flag>
+ <flag name="internal"><short>run script using built in libraries (same as <ref name="ctxlua"/>)</short></flag>
+ <flag name="locate"><short>locate given filename in database (default) or system (<ref name="first"/> <ref name="all"/> <ref name="detail"/>)</short></flag>
+ </subcategory>
+ <subcategory>
+ <flag name="autotree"><short>use texmf tree cf. env texmfstart_tree or texmfstarttree</short></flag>
+ <flag name="tree" value="pathtotree"><short>use given texmf tree (default file: setuptex.tmf)</short></flag>
+ <flag name="environment" value="name"><short>use given (tmf) environment file</short></flag>
+ <flag name="path" value="runpath"><short>go to given path before execution</short></flag>
+ <flag name="ifchanged" value="filename"><short>only execute when given file has changed (md checksum)</short></flag>
+ <flag name="iftouched" value="old,new"><short>only execute when given file has changed (time stamp)</short></flag>
+ </subcategory>
+ <subcategory>
+ <flag name="makestubs"><short>create stubs for (context related) scripts</short></flag>
+ <flag name="removestubs"><short>remove stubs (context related) scripts</short></flag>
+ <flag name="stubpath" value="binpath"><short>paths where stubs wil be written</short></flag>
+ <flag name="windows"><short>create windows (mswin) stubs</short></flag>
+ <flag name="unix"><short>create unix (linux) stubs</short></flag>
+ </subcategory>
+ <subcategory>
+ <flag name="verbose"><short>give a bit more info</short></flag>
+ <flag name="trackers" value="list"><short>enable given trackers</short></flag>
+ <flag name="progname" value="str"><short>format or backend</short></flag>
+ </subcategory>
+ <subcategory>
+ <flag name="edit"><short>launch editor with found file</short></flag>
+ <flag name="launch"><short>launch files like manuals, assumes os support (<ref name="all"/>)</short></flag>
+ </subcategory>
+ <subcategory>
+ <flag name="timedrun"><short>run a script and time its run</short></flag>
+ <flag name="autogenerate"><short>regenerate databases if needed (handy when used to run context in an editor)</short></flag>
+ </subcategory>
+ <subcategory>
+ <flag name="usekpse"><short>use kpse as fallback (when no mkiv and cache installed, often slower)</short></flag>
+ <flag name="forcekpse"><short>force using kpse (handy when no mkiv and cache installed but less functionality)</short></flag>
+ </subcategory>
+ <subcategory>
+ <flag name="prefixes"><short>show supported prefixes</short></flag>
+ </subcategory>
+ <subcategory>
+ <flag name="generate"><short>generate file database</short></flag>
+ </subcategory>
+ <subcategory>
+ <flag name="variables"><short>show configuration variables</short></flag>
+ <flag name="configurations"><short>show configuration order</short></flag>
+ </subcategory>
+ <subcategory>
+ <flag name="directives"><short>show (known) directives</short></flag>
+ <flag name="trackers"><short>show (known) trackers</short></flag>
+ <flag name="experiments"><short>show (known) experiments</short></flag>
+ </subcategory>
+ <subcategory>
+ <flag name="expand-braces"><short>expand complex variable</short></flag>
+ <flag name="expand-path"><short>expand variable (resolve paths)</short></flag>
+ <flag name="expand-var"><short>expand variable (resolve references)</short></flag>
+ <flag name="show-path"><short>show path expansion of ...</short></flag>
+ <flag name="var-value"><short>report value of variable</short></flag>
+ <flag name="find-file"><short>report file location</short></flag>
+ <flag name="find-path"><short>report path of file</short></flag>
+ </subcategory>
+ <subcategory>
+ <flag name="pattern" value="string"><short>filter variables</short></flag>
+ </subcategory>
+ </category>
+ </flags>
+</application>
]]
local application = logs.application {
@@ -16093,7 +16178,8 @@ function runners.execute_script(fullname,internal,nosplit)
elseif state == 'skip' then
return true
elseif state == "run" then
- local path, name, suffix, result = file.dirname(fullname), file.basename(fullname), file.extname(fullname), ""
+ local path, name, suffix = file.splitname(fullname)
+ local result = ""
if path ~= "" then
result = fullname
elseif name then
@@ -16104,7 +16190,7 @@ function runners.execute_script(fullname,internal,nosplit)
name = gsub(name,"^script:","")
if suffix == "" and runners.registered[name] and runners.registered[name][1] then
name = runners.registered[name][1]
- suffix = file.extname(name)
+ suffix = file.suffix(name)
end
if suffix == "" then
-- loop over known suffixes
@@ -16131,7 +16217,7 @@ function runners.execute_script(fullname,internal,nosplit)
environment.ownscript = result
dofile(result)
else
- local binary = runners.applications[file.extname(result)]
+ local binary = runners.applications[file.suffix(result)]
result = string.quoted(string.unquoted(result))
-- if string.match(result,' ') and not string.match(result,"^\".*\"$") then
-- result = '"' .. result .. '"'
@@ -16324,7 +16410,7 @@ function resolvers.launch(str)
-- maybe we also need to test on mtxrun.launcher.suffix environment
-- variable or on windows consult the assoc and ftype vars and such
local launchers = runners.launchers[os.platform] if launchers then
- local suffix = file.extname(str) if suffix then
+ local suffix = file.suffix(str) if suffix then
local runner = launchers[suffix] if runner then
str = runner .. " " .. str
end
@@ -16383,7 +16469,7 @@ function runners.find_mtx_script(filename)
end
filename = file.addsuffix(filename,"lua")
local basename = file.removesuffix(file.basename(filename))
- local suffix = file.extname(filename)
+ local suffix = file.suffix(filename)
-- qualified path, raw name
local fullname = file.is_qualified_path(filename) and io.exists(filename) and filename
if fullname and fullname ~= "" then
@@ -16438,7 +16524,7 @@ function runners.execute_ctx_script(filename,...)
runners.register_arguments(...)
local arguments = environment.arguments_after
local fullname = runners.find_mtx_script(filename) or ""
- if file.extname(fullname) == "cld" then
+ if file.suffix(fullname) == "cld" then
-- handy in editors where we force --autopdf
report("running cld script: %s",filename)
table.insert(arguments,1,fullname)
@@ -16546,6 +16632,21 @@ function runners.timed(action)
statistics.timed(action)
end
+function runners.associate(filename)
+ os.launch(filename)
+end
+
+function runners.gethelp(filename)
+ local url = environment.argument("url")
+ if url and url ~= "" then
+ local command = string.gsub(environment.argument("command") or "unknown","^%s*\\*(.-)%s*$","%1")
+ url = utilities.templates.replace(url,{ command = command })
+ os.launch(url)
+ else
+ report("no --url given")
+ end
+end
+
-- this is a bit dirty ... first we store the first filename and next we
-- split the arguments so that we only see the ones meant for this script
-- ... later we will use the second half
@@ -16647,8 +16748,18 @@ else
end
+if e_argument("script") or e_argument("scripts") then
-if e_argument("selfmerge") then
+ -- run a script by loading it (using libs), pass args
+
+ runners.loadbase()
+ if is_mkii_stub then
+ ok = runners.execute_script(filename,false,true)
+ else
+ ok = runners.execute_ctx_script(filename)
+ end
+
+elseif e_argument("selfmerge") then
-- embed used libraries
@@ -16678,17 +16789,6 @@ elseif e_argument("ctxlua") or e_argument("internal") then
runners.loadbase()
ok = runners.execute_script(filename,true)
-elseif e_argument("script") or e_argument("scripts") then
-
- -- run a script by loading it (using libs), pass args
-
- runners.loadbase()
- if is_mkii_stub then
- ok = runners.execute_script(filename,false,true)
- else
- ok = runners.execute_ctx_script(filename)
- end
-
elseif e_argument("execute") then
-- execute script
@@ -16715,6 +16815,14 @@ elseif e_argument("launch") then
runners.loadbase()
runners.launch_file(filename)
+elseif e_argument("associate") then
+
+ runners.associate(filename)
+
+elseif e_argument("gethelp") then
+
+ runners.gethelp()
+
elseif e_argument("makestubs") then
-- make stubs (depricated)
@@ -16806,7 +16914,7 @@ elseif e_argument("find-path") then
elseif e_argument("expand-braces") then
- -- luatools: runners.execute_ctx_script("mtx-base","--expand-braces",filename
+ -- luatools: runners.execute_ctx_script("mtx-base","--expand-braces",filename)
resolvers.load("nofiles")
runners.register_arguments(filename)
@@ -16908,6 +17016,23 @@ elseif e_argument("version") then
application.version()
+elseif e_argument("directives") then
+
+ directives.show()
+
+elseif e_argument("trackers") then
+
+ trackers.show()
+
+elseif e_argument("experiments") then
+
+ experiments.show()
+
+elseif e_argument("exporthelp") then
+
+ runners.loadbase()
+ application.export(e_argument("exporthelp"),filename)
+
elseif e_argument("help") or filename=='help' or filename == "" then
application.help()
@@ -16938,7 +17063,6 @@ elseif environment.files[1] == 'texmfcnf.lua' then -- so that we don't need to l
resolvers.listers.configurations()
else
-
runners.loadbase()
runners.execute_ctx_script("mtx-base",filename)
@@ -16955,4 +17079,4 @@ end
if ok == false then ok = 1 elseif ok == true then ok = 0 end
-os.exit(ok)
+os.exit(ok,true) -- true forces a cleanup in 5.2+
diff --git a/Master/texmf-dist/scripts/context/stubs/mswin/pstopdf.exe b/Master/texmf-dist/scripts/context/stubs/mswin/pstopdf.exe
new file mode 100755
index 00000000000..2d45f27494d
--- /dev/null
+++ b/Master/texmf-dist/scripts/context/stubs/mswin/pstopdf.exe
Binary files differ
diff --git a/Master/texmf-dist/scripts/context/stubs/mswin/setuptex.bat b/Master/texmf-dist/scripts/context/stubs/mswin/setuptex.bat
new file mode 100755
index 00000000000..52c60f155c7
--- /dev/null
+++ b/Master/texmf-dist/scripts/context/stubs/mswin/setuptex.bat
@@ -0,0 +1,34 @@
+@ECHO OFF
+
+REM author: Hans Hagen - PRAGMA ADE - Hasselt NL - www.pragma-ade.com
+
+:userpath
+
+if "%SETUPTEX%"=="done" goto done
+
+if "%~s1"=="" goto selftest
+
+set TEXMFOS=%~s1texmf-mswin
+if exist %TEXMFOS%\bin\mtxrun.exe goto start
+
+set TEXMFOS=%~s1\texmf-mswin
+if exist %TEXMFOS%\bin\mtxrun.exe goto start
+
+:selftest
+
+set TEXMFOS=%~d0%~p0texmf-mswin
+if exist %TEXMFOS%\bin\mtxrun.exe goto start
+
+set TEXMFOS=%~d0%~p0\texmf-mswin
+if exist %TEXMFOS%\bin\mtxrun.exe goto start
+
+:start
+
+set PATH=%TEXMFOS%\bin;%PATH%
+
+:register
+
+set SETUPTEX=done
+set CTXMINIMAL=yes
+
+:done
diff --git a/Master/texmf-dist/scripts/context/stubs/unix/ctxtools b/Master/texmf-dist/scripts/context/stubs/unix/ctxtools
new file mode 100644
index 00000000000..2e6bd4afaa0
--- /dev/null
+++ b/Master/texmf-dist/scripts/context/stubs/unix/ctxtools
@@ -0,0 +1,2 @@
+#!/bin/sh
+mtxrun --script ctxtools "$@"
diff --git a/Master/texmf-dist/scripts/context/stubs/unix/mtxrun b/Master/texmf-dist/scripts/context/stubs/unix/mtxrun
index 108f2a8a112..00f63a5791d 100755
--- a/Master/texmf-dist/scripts/context/stubs/unix/mtxrun
+++ b/Master/texmf-dist/scripts/context/stubs/unix/mtxrun
@@ -1,5 +1,16 @@
#!/usr/bin/env texlua
+-- for k, v in next, _G.string do
+-- local tv = type(v)
+-- if tv == "table" then
+-- for kk, vv in next, v do
+-- print(k,kk,vv)
+-- end
+-- else
+-- print(tv,k,v)
+-- end
+-- end
+
if not modules then modules = { } end modules ['mtxrun'] = {
version = 1.001,
comment = "runner, lua replacement for texmfstart.rb",
@@ -43,3016 +54,3010 @@ if not modules then modules = { } end modules ['mtxrun'] = {
do -- create closure to overcome 200 locals limit
-if not modules then modules = { } end modules ['l-string'] = {
- version = 1.001,
- comment = "companion to luat-lib.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
+package.loaded["l-lua"] = package.loaded["l-lua"] or true
+
+-- original size: 10048, stripped down to: 5684
+
+if not modules then modules={} end modules ['l-lua']={
+ version=1.001,
+ comment="companion to luat-lib.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
}
+local major,minor=string.match(_VERSION,"^[^%d]+(%d+)%.(%d+).*$")
+_MAJORVERSION=tonumber(major) or 5
+_MINORVERSION=tonumber(minor) or 1
+_LUAVERSION=_MAJORVERSION+_MINORVERSION/10
+if not lpeg then
+ lpeg=require("lpeg")
+end
+if loadstring then
+ local loadnormal=load
+ function load(first,...)
+ if type(first)=="string" then
+ return loadstring(first,...)
+ else
+ return loadnormal(first,...)
+ end
+ end
+else
+ loadstring=load
+end
+if not ipairs then
+ local function iterate(a,i)
+ i=i+1
+ local v=a[i]
+ if v~=nil then
+ return i,v
+ end
+ end
+ function ipairs(a)
+ return iterate,a,0
+ end
+end
+if not pairs then
+ function pairs(t)
+ return next,t
+ end
+end
+if not table.unpack then
+ table.unpack=_G.unpack
+elseif not unpack then
+ _G.unpack=table.unpack
+end
+if not package.loaders then
+ package.loaders=package.searchers
+end
+local print,select,tostring=print,select,tostring
+local inspectors={}
+function setinspector(inspector)
+ inspectors[#inspectors+1]=inspector
+end
+function inspect(...)
+ for s=1,select("#",...) do
+ local value=select(s,...)
+ local done=false
+ for i=1,#inspectors do
+ done=inspectors[i](value)
+ if done then
+ break
+ end
+ end
+ if not done then
+ print(tostring(value))
+ end
+ end
+end
+local dummy=function() end
+function optionalrequire(...)
+ local ok,result=xpcall(require,dummy,...)
+ if ok then
+ return result
+ end
+end
+local type=type
+local gsub,format=string.gsub,string.format
+local package=package
+local searchers=package.searchers or package.loaders
+local libpaths=nil
+local clibpaths=nil
+local libhash={}
+local clibhash={}
+local libextras={}
+local clibextras={}
+local filejoin=file and file.join or function(path,name) return path.."/"..name end
+local isreadable=file and file.is_readable or function(name) local f=io.open(name) if f then f:close() return true end end
+local addsuffix=file and file.addsuffix or function(name,suffix) return name.."."..suffix end
+local function cleanpath(path)
+ return path
+end
+local helpers=package.helpers or {
+ libpaths=function() return {} end,
+ clibpaths=function() return {} end,
+ cleanpath=cleanpath,
+ trace=false,
+ report=function(...) print(format(...)) end,
+}
+package.helpers=helpers
+local function getlibpaths()
+ return libpaths or helpers.libpaths(libhash)
+end
+local function getclibpaths()
+ return clibpaths or helpers.clibpaths(clibhash)
+end
+package.libpaths=getlibpaths
+package.clibpaths=getclibpaths
+local function addpath(what,paths,extras,hash,...)
+ local pathlist={... }
+ local cleanpath=helpers.cleanpath
+ local trace=helpers.trace
+ local report=helpers.report
+ local function add(path)
+ local path=cleanpath(path)
+ if not hash[path] then
+ if trace then
+ report("extra %s path: %s",what,path)
+ end
+ paths [#paths+1]=path
+ extras[#extras+1]=path
+ end
+ end
+ for p=1,#pathlist do
+ local path=pathlist[p]
+ if type(path)=="table" then
+ for i=1,#path do
+ add(path[i])
+ end
+ else
+ add(path)
+ end
+ end
+ return paths,extras
+end
+function package.extralibpath(...)
+ libpaths,libextras=addpath("lua",getlibpaths(),libextras,libhash,...)
+end
+function package.extraclibpath(...)
+ clibpaths,clibextras=addpath("lib",getclibpaths(),clibextras,clibhash,...)
+end
+if not searchers[-2] then
+ searchers[-2]=searchers[2]
+end
+searchers[2]=function(name)
+ return helpers.loaded(name)
+end
+searchers[3]=nil
+local function loadedaslib(resolved,rawname)
+ local init="luaopen_"..gsub(rawname,"%.","_")
+ if helpers.trace then
+ helpers.report("calling loadlib with '%s' with init '%s'",resolved,init)
+ end
+ return package.loadlib(resolved,init)
+end
+local function loadedbylua(name)
+ if helpers.trace then
+ helpers.report("locating '%s' using normal loader",name)
+ end
+ return true,searchers[-2](name)
+end
+local function loadedbypath(name,rawname,paths,islib,what)
+ local trace=helpers.trace
+ local report=helpers.report
+ if trace then
+ report("locating '%s' as '%s' on '%s' paths",rawname,name,what)
+ end
+ for p=1,#paths do
+ local path=paths[p]
+ local resolved=filejoin(path,name)
+ if trace then
+ report("checking for '%s' using '%s' path '%s'",name,what,path)
+ end
+ if isreadable(resolved) then
+ if trace then
+ report("lib '%s' located on '%s'",name,resolved)
+ end
+ if islib then
+ return true,loadedaslib(resolved,rawname)
+ else
+ return true,loadfile(resolved)
+ end
+ end
+ end
+end
+local function notloaded(name)
+ if helpers.trace then
+ helpers.report("? unable to locate library '%s'",name)
+ end
+end
+helpers.loadedaslib=loadedaslib
+helpers.loadedbylua=loadedbylua
+helpers.loadedbypath=loadedbypath
+helpers.notloaded=notloaded
+function helpers.loaded(name)
+ local thename=gsub(name,"%.","/")
+ local luaname=addsuffix(thename,"lua")
+ local libname=addsuffix(thename,os.libsuffix or "so")
+ local libpaths=getlibpaths()
+ local clibpaths=getclibpaths()
+ local done,result=loadedbypath(luaname,name,libpaths,false,"lua")
+ if done then
+ return result
+ end
+ local done,result=loadedbypath(luaname,name,clibpaths,false,"lua")
+ if done then
+ return result
+ end
+ local done,result=loadedbypath(libname,name,clibpaths,true,"lib")
+ if done then
+ return result
+ end
+ local done,result=loadedbylua(name)
+ if done then
+ return result
+ end
+ return notloaded(name)
+end
-local string = string
-local sub, gsub, find, match, gmatch, format, char, byte, rep, lower = string.sub, string.gsub, string.find, string.match, string.gmatch, string.format, string.char, string.byte, string.rep, string.lower
-local lpegmatch, S, C, Ct = lpeg.match, lpeg.S, lpeg.C, lpeg.Ct
--- some functions may disappear as they are not used anywhere
+end -- of closure
-if not string.split then
+do -- create closure to overcome 200 locals limit
- -- this will be overloaded by a faster lpeg variant
+package.loaded["l-lpeg"] = package.loaded["l-lpeg"] or true
- function string.split(str,pattern)
- local t = { }
- if #str > 0 then
- local n = 1
- for s in gmatch(str..pattern,"(.-)"..pattern) do
- t[n] = s
- n = n + 1
- end
- end
- return t
- end
+-- original size: 26252, stripped down to: 14371
+if not modules then modules={} end modules ['l-lpeg']={
+ version=1.001,
+ comment="companion to luat-lib.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+lpeg=require("lpeg")
+local type,next,tostring=type,next,tostring
+local byte,char,gmatch,format=string.byte,string.char,string.gmatch,string.format
+local floor=math.floor
+local P,R,S,V,Ct,C,Cs,Cc,Cp,Cmt=lpeg.P,lpeg.R,lpeg.S,lpeg.V,lpeg.Ct,lpeg.C,lpeg.Cs,lpeg.Cc,lpeg.Cp,lpeg.Cmt
+local lpegtype,lpegmatch,lpegprint=lpeg.type,lpeg.match,lpeg.print
+setinspector(function(v) if lpegtype(v) then lpegprint(v) return true end end)
+lpeg.patterns=lpeg.patterns or {}
+local patterns=lpeg.patterns
+local anything=P(1)
+local endofstring=P(-1)
+local alwaysmatched=P(true)
+patterns.anything=anything
+patterns.endofstring=endofstring
+patterns.beginofstring=alwaysmatched
+patterns.alwaysmatched=alwaysmatched
+local digit,sign=R('09'),S('+-')
+local cr,lf,crlf=P("\r"),P("\n"),P("\r\n")
+local newline=crlf+S("\r\n")
+local escaped=P("\\")*anything
+local squote=P("'")
+local dquote=P('"')
+local space=P(" ")
+local utfbom_32_be=P('\000\000\254\255')
+local utfbom_32_le=P('\255\254\000\000')
+local utfbom_16_be=P('\255\254')
+local utfbom_16_le=P('\254\255')
+local utfbom_8=P('\239\187\191')
+local utfbom=utfbom_32_be+utfbom_32_le+utfbom_16_be+utfbom_16_le+utfbom_8
+local utftype=utfbom_32_be*Cc("utf-32-be")+utfbom_32_le*Cc("utf-32-le")+utfbom_16_be*Cc("utf-16-be")+utfbom_16_le*Cc("utf-16-le")+utfbom_8*Cc("utf-8")+alwaysmatched*Cc("utf-8")
+local utfoffset=utfbom_32_be*Cc(4)+utfbom_32_le*Cc(4)+utfbom_16_be*Cc(2)+utfbom_16_le*Cc(2)+utfbom_8*Cc(3)+Cc(0)
+local utf8next=R("\128\191")
+patterns.utf8one=R("\000\127")
+patterns.utf8two=R("\194\223")*utf8next
+patterns.utf8three=R("\224\239")*utf8next*utf8next
+patterns.utf8four=R("\240\244")*utf8next*utf8next*utf8next
+patterns.utfbom=utfbom
+patterns.utftype=utftype
+patterns.utfoffset=utfoffset
+local utf8char=patterns.utf8one+patterns.utf8two+patterns.utf8three+patterns.utf8four
+local validutf8char=utf8char^0*endofstring*Cc(true)+Cc(false)
+local utf8character=P(1)*R("\128\191")^0
+patterns.utf8=utf8char
+patterns.utf8char=utf8char
+patterns.utf8character=utf8character
+patterns.validutf8=validutf8char
+patterns.validutf8char=validutf8char
+local eol=S("\n\r")
+local spacer=S(" \t\f\v")
+local whitespace=eol+spacer
+local nonspacer=1-spacer
+local nonwhitespace=1-whitespace
+patterns.eol=eol
+patterns.spacer=spacer
+patterns.whitespace=whitespace
+patterns.nonspacer=nonspacer
+patterns.nonwhitespace=nonwhitespace
+local stripper=spacer^0*C((spacer^0*nonspacer^1)^0)
+local collapser=Cs(spacer^0/""*nonspacer^0*((spacer^0/" "*nonspacer^1)^0))
+patterns.stripper=stripper
+patterns.collapser=collapser
+patterns.digit=digit
+patterns.sign=sign
+patterns.cardinal=sign^0*digit^1
+patterns.integer=sign^0*digit^1
+patterns.unsigned=digit^0*P('.')*digit^1
+patterns.float=sign^0*patterns.unsigned
+patterns.cunsigned=digit^0*P(',')*digit^1
+patterns.cfloat=sign^0*patterns.cunsigned
+patterns.number=patterns.float+patterns.integer
+patterns.cnumber=patterns.cfloat+patterns.integer
+patterns.oct=P("0")*R("07")^1
+patterns.octal=patterns.oct
+patterns.HEX=P("0x")*R("09","AF")^1
+patterns.hex=P("0x")*R("09","af")^1
+patterns.hexadecimal=P("0x")*R("09","AF","af")^1
+patterns.lowercase=R("az")
+patterns.uppercase=R("AZ")
+patterns.letter=patterns.lowercase+patterns.uppercase
+patterns.space=space
+patterns.tab=P("\t")
+patterns.spaceortab=patterns.space+patterns.tab
+patterns.newline=newline
+patterns.emptyline=newline^1
+patterns.equal=P("=")
+patterns.comma=P(",")
+patterns.commaspacer=P(",")*spacer^0
+patterns.period=P(".")
+patterns.colon=P(":")
+patterns.semicolon=P(";")
+patterns.underscore=P("_")
+patterns.escaped=escaped
+patterns.squote=squote
+patterns.dquote=dquote
+patterns.nosquote=(escaped+(1-squote))^0
+patterns.nodquote=(escaped+(1-dquote))^0
+patterns.unsingle=(squote/"")*patterns.nosquote*(squote/"")
+patterns.undouble=(dquote/"")*patterns.nodquote*(dquote/"")
+patterns.unquoted=patterns.undouble+patterns.unsingle
+patterns.unspacer=((patterns.spacer^1)/"")^0
+patterns.singlequoted=squote*patterns.nosquote*squote
+patterns.doublequoted=dquote*patterns.nodquote*dquote
+patterns.quoted=patterns.doublequoted+patterns.singlequoted
+patterns.propername=R("AZ","az","__")*R("09","AZ","az","__")^0*P(-1)
+patterns.somecontent=(anything-newline-space)^1
+patterns.beginline=#(1-newline)
+patterns.longtostring=Cs(whitespace^0/""*nonwhitespace^0*((whitespace^0/" "*(patterns.quoted+nonwhitespace)^1)^0))
+local function anywhere(pattern)
+ return P { P(pattern)+1*V(1) }
+end
+lpeg.anywhere=anywhere
+function lpeg.instringchecker(p)
+ p=anywhere(p)
+ return function(str)
+ return lpegmatch(p,str) and true or false
+ end
+end
+function lpeg.splitter(pattern,action)
+ return (((1-P(pattern))^1)/action+1)^0
+end
+function lpeg.tsplitter(pattern,action)
+ return Ct((((1-P(pattern))^1)/action+1)^0)
+end
+local splitters_s,splitters_m,splitters_t={},{},{}
+local function splitat(separator,single)
+ local splitter=(single and splitters_s[separator]) or splitters_m[separator]
+ if not splitter then
+ separator=P(separator)
+ local other=C((1-separator)^0)
+ if single then
+ local any=anything
+ splitter=other*(separator*C(any^0)+"")
+ splitters_s[separator]=splitter
+ else
+ splitter=other*(separator*other)^0
+ splitters_m[separator]=splitter
+ end
+ end
+ return splitter
end
-
-function string.unquoted(str)
- return (gsub(str,"^([\"\'])(.*)%1$","%2"))
+local function tsplitat(separator)
+ local splitter=splitters_t[separator]
+ if not splitter then
+ splitter=Ct(splitat(separator))
+ splitters_t[separator]=splitter
+ end
+ return splitter
+end
+lpeg.splitat=splitat
+lpeg.tsplitat=tsplitat
+function string.splitup(str,separator)
+ if not separator then
+ separator=","
+ end
+ return lpegmatch(splitters_m[separator] or splitat(separator),str)
end
-
-
-function string.quoted(str)
- return format("%q",str) -- always "
+local cache={}
+function lpeg.split(separator,str)
+ local c=cache[separator]
+ if not c then
+ c=tsplitat(separator)
+ cache[separator]=c
+ end
+ return lpegmatch(c,str)
end
-
-function string.count(str,pattern) -- variant 3
- local n = 0
- for _ in gmatch(str,pattern) do -- not for utf
- n = n + 1
+function string.split(str,separator)
+ if separator then
+ local c=cache[separator]
+ if not c then
+ c=tsplitat(separator)
+ cache[separator]=c
+ end
+ return lpegmatch(c,str)
+ else
+ return { str }
+ end
+end
+local spacing=patterns.spacer^0*newline
+local empty=spacing*Cc("")
+local nonempty=Cs((1-spacing)^1)*spacing^-1
+local content=(empty+nonempty)^1
+patterns.textline=content
+local linesplitter=tsplitat(newline)
+patterns.linesplitter=linesplitter
+function string.splitlines(str)
+ return lpegmatch(linesplitter,str)
+end
+local cache={}
+function lpeg.checkedsplit(separator,str)
+ local c=cache[separator]
+ if not c then
+ separator=P(separator)
+ local other=C((1-separator)^1)
+ c=Ct(separator^0*other*(separator^1*other)^0)
+ cache[separator]=c
+ end
+ return lpegmatch(c,str)
+end
+function string.checkedsplit(str,separator)
+ local c=cache[separator]
+ if not c then
+ separator=P(separator)
+ local other=C((1-separator)^1)
+ c=Ct(separator^0*other*(separator^1*other)^0)
+ cache[separator]=c
+ end
+ return lpegmatch(c,str)
+end
+local function f2(s) local c1,c2=byte(s,1,2) return c1*64+c2-12416 end
+local function f3(s) local c1,c2,c3=byte(s,1,3) return (c1*64+c2)*64+c3-925824 end
+local function f4(s) local c1,c2,c3,c4=byte(s,1,4) return ((c1*64+c2)*64+c3)*64+c4-63447168 end
+local utf8byte=patterns.utf8one/byte+patterns.utf8two/f2+patterns.utf8three/f3+patterns.utf8four/f4
+patterns.utf8byte=utf8byte
+local cache={}
+function lpeg.stripper(str)
+ if type(str)=="string" then
+ local s=cache[str]
+ if not s then
+ s=Cs(((S(str)^1)/""+1)^0)
+ cache[str]=s
end
- return n
+ return s
+ else
+ return Cs(((str^1)/""+1)^0)
+ end
end
-
-function string.limit(str,n,sentinel) -- not utf proof
- if #str > n then
- sentinel = sentinel or "..."
- return sub(str,1,(n-#sentinel)) .. sentinel
+local cache={}
+function lpeg.keeper(str)
+ if type(str)=="string" then
+ local s=cache[str]
+ if not s then
+ s=Cs((((1-S(str))^1)/""+1)^0)
+ cache[str]=s
+ end
+ return s
+ else
+ return Cs((((1-str)^1)/""+1)^0)
+ end
+end
+function lpeg.frontstripper(str)
+ return (P(str)+P(true))*Cs(anything^0)
+end
+function lpeg.endstripper(str)
+ return Cs((1-P(str)*endofstring)^0)
+end
+function lpeg.replacer(one,two,makefunction,isutf)
+ local pattern
+ local u=isutf and utf8char or 1
+ if type(one)=="table" then
+ local no=#one
+ local p=P(false)
+ if no==0 then
+ for k,v in next,one do
+ p=p+P(k)/v
+ end
+ pattern=Cs((p+u)^0)
+ elseif no==1 then
+ local o=one[1]
+ one,two=P(o[1]),o[2]
+ pattern=Cs((one/two+u)^0)
+ else
+ for i=1,no do
+ local o=one[i]
+ p=p+P(o[1])/o[2]
+ end
+ pattern=Cs((p+u)^0)
+ end
+ else
+ pattern=Cs((P(one)/(two or "")+u)^0)
+ end
+ if makefunction then
+ return function(str)
+ return lpegmatch(pattern,str)
+ end
+ else
+ return pattern
+ end
+end
+function lpeg.finder(lst,makefunction)
+ local pattern
+ if type(lst)=="table" then
+ pattern=P(false)
+ if #lst==0 then
+ for k,v in next,lst do
+ pattern=pattern+P(k)
+ end
else
- return str
+ for i=1,#lst do
+ pattern=pattern+P(lst[i])
+ end
+ end
+ else
+ pattern=P(lst)
+ end
+ pattern=(1-pattern)^0*pattern
+ if makefunction then
+ return function(str)
+ return lpegmatch(pattern,str)
+ end
+ else
+ return pattern
+ end
+end
+local splitters_f,splitters_s={},{}
+function lpeg.firstofsplit(separator)
+ local splitter=splitters_f[separator]
+ if not splitter then
+ separator=P(separator)
+ splitter=C((1-separator)^0)
+ splitters_f[separator]=splitter
+ end
+ return splitter
+end
+function lpeg.secondofsplit(separator)
+ local splitter=splitters_s[separator]
+ if not splitter then
+ separator=P(separator)
+ splitter=(1-separator)^0*separator*C(anything^0)
+ splitters_s[separator]=splitter
+ end
+ return splitter
+end
+function lpeg.balancer(left,right)
+ left,right=P(left),P(right)
+ return P { left*((1-left-right)+V(1))^0*right }
+end
+local nany=utf8char/""
+function lpeg.counter(pattern)
+ pattern=Cs((P(pattern)/" "+nany)^0)
+ return function(str)
+ return #lpegmatch(pattern,str)
+ end
+end
+utf=utf or (unicode and unicode.utf8) or {}
+local utfcharacters=utf and utf.characters or string.utfcharacters
+local utfgmatch=utf and utf.gmatch
+local utfchar=utf and utf.char
+lpeg.UP=lpeg.P
+if utfcharacters then
+ function lpeg.US(str)
+ local p=P(false)
+ for uc in utfcharacters(str) do
+ p=p+P(uc)
end
+ return p
+ end
+elseif utfgmatch then
+ function lpeg.US(str)
+ local p=P(false)
+ for uc in utfgmatch(str,".") do
+ p=p+P(uc)
+ end
+ return p
+ end
+else
+ function lpeg.US(str)
+ local p=P(false)
+ local f=function(uc)
+ p=p+P(uc)
+ end
+ lpegmatch((utf8char/f)^0,str)
+ return p
+ end
end
-
-local space = S(" \t\v\n")
-local nospace = 1 - space
-local stripper = space^0 * C((space^0 * nospace^1)^0) -- roberto's code
-
-function string.strip(str)
- return lpegmatch(stripper,str) or ""
+local range=utf8byte*utf8byte+Cc(false)
+function lpeg.UR(str,more)
+ local first,last
+ if type(str)=="number" then
+ first=str
+ last=more or first
+ else
+ first,last=lpegmatch(range,str)
+ if not last then
+ return P(str)
+ end
+ end
+ if first==last then
+ return P(str)
+ elseif utfchar and (last-first<8) then
+ local p=P(false)
+ for i=first,last do
+ p=p+P(utfchar(i))
+ end
+ return p
+ else
+ local f=function(b)
+ return b>=first and b<=last
+ end
+ return utf8byte/f
+ end
end
-
-function string.is_empty(str)
- return not find(str,"%S")
+function lpeg.is_lpeg(p)
+ return p and lpegtype(p)=="pattern"
+end
+function lpeg.oneof(list,...)
+ if type(list)~="table" then
+ list={ list,... }
+ end
+ local p=P(list[1])
+ for l=2,#list do
+ p=p+P(list[l])
+ end
+ return p
+end
+local sort=table.sort
+local function copyindexed(old)
+ local new={}
+ for i=1,#old do
+ new[i]=old
+ end
+ return new
end
-
-local patterns_escapes = {
- ["%"] = "%%",
- ["."] = "%.",
- ["+"] = "%+", ["-"] = "%-", ["*"] = "%*",
- ["["] = "%[", ["]"] = "%]",
- ["("] = "%(", [")"] = "%)",
- -- ["{"] = "%{", ["}"] = "%}"
- -- ["^"] = "%^", ["$"] = "%$",
-}
-
-local simple_escapes = {
- ["-"] = "%-",
- ["."] = "%.",
- ["?"] = ".",
- ["*"] = ".*",
-}
-
-function string.escapedpattern(str,simple)
- return (gsub(str,".",simple and simple_escapes or patterns_escapes))
+local function sortedkeys(tab)
+ local keys,s={},0
+ for key,_ in next,tab do
+ s=s+1
+ keys[s]=key
+ end
+ sort(keys)
+ return keys
end
-
-function string.topattern(str,lowercase,strict)
- if str == "" then
- return ".*"
+function lpeg.append(list,pp,delayed,checked)
+ local p=pp
+ if #list>0 then
+ local keys=copyindexed(list)
+ sort(keys)
+ for i=#keys,1,-1 do
+ local k=keys[i]
+ if p then
+ p=P(k)+p
+ else
+ p=P(k)
+ end
+ end
+ elseif delayed then
+ local keys=sortedkeys(list)
+ if p then
+ for i=1,#keys,1 do
+ local k=keys[i]
+ local v=list[k]
+ p=P(k)/list+p
+ end
else
- str = gsub(str,".",simple_escapes)
- if lowercase then
- str = lower(str)
+ for i=1,#keys do
+ local k=keys[i]
+ local v=list[k]
+ if p then
+ p=P(k)+p
+ else
+ p=P(k)
+ end
+ end
+ if p then
+ p=p/list
+ end
+ end
+ elseif checked then
+ local keys=sortedkeys(list)
+ for i=1,#keys do
+ local k=keys[i]
+ local v=list[k]
+ if p then
+ if k==v then
+ p=P(k)+p
+ else
+ p=P(k)/v+p
end
- if strict then
- return "^" .. str .. "$"
+ else
+ if k==v then
+ p=P(k)
else
- return str
+ p=P(k)/v
end
+ end
end
+ else
+ local keys=sortedkeys(list)
+ for i=1,#keys do
+ local k=keys[i]
+ local v=list[k]
+ if p then
+ p=P(k)/v+p
+ else
+ p=P(k)/v
+ end
+ end
+ end
+ return p
end
-
--- obsolete names:
-
-string.quote = string.quoted
-string.unquote = string.unquoted
+local function make(t)
+ local p
+ local keys=sortedkeys(t)
+ for i=1,#keys do
+ local k=keys[i]
+ local v=t[k]
+ if not p then
+ if next(v) then
+ p=P(k)*make(v)
+ else
+ p=P(k)
+ end
+ else
+ if next(v) then
+ p=p+P(k)*make(v)
+ else
+ p=p+P(k)
+ end
+ end
+ end
+ return p
+end
+function lpeg.utfchartabletopattern(list)
+ local tree={}
+ for i=1,#list do
+ local t=tree
+ for c in gmatch(list[i],".") do
+ if not t[c] then
+ t[c]={}
+ end
+ t=t[c]
+ end
+ end
+ return make(tree)
+end
+patterns.containseol=lpeg.finder(eol)
+local function nextstep(n,step,result)
+ local m=n%step
+ local d=floor(n/step)
+ if d>0 then
+ local v=V(tostring(step))
+ local s=result.start
+ for i=1,d do
+ if s then
+ s=v*s
+ else
+ s=v
+ end
+ end
+ result.start=s
+ end
+ if step>1 and result.start then
+ local v=V(tostring(step/2))
+ result[tostring(step)]=v*v
+ end
+ if step>0 then
+ return nextstep(m,step/2,result)
+ else
+ return result
+ end
+end
+function lpeg.times(pattern,n)
+ return P(nextstep(n,2^16,{ "start",["1"]=pattern }))
+end
+local digit=R("09")
+local period=P(".")
+local zero=P("0")
+local trailingzeros=zero^0*-digit
+local case_1=period*trailingzeros/""
+local case_2=period*(digit-trailingzeros)^1*(trailingzeros/"")
+local number=digit^1*(case_1+case_2)
+local stripper=Cs((number+1)^0)
+lpeg.patterns.stripzeros=stripper
end -- of closure
do -- create closure to overcome 200 locals limit
-if not modules then modules = { } end modules ['l-table'] = {
- version = 1.001,
- comment = "companion to luat-lib.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
+package.loaded["l-function"] = package.loaded["l-function"] or true
-local type, next, tostring, tonumber, ipairs, table, string = type, next, tostring, tonumber, ipairs, table, string
-local concat, sort, insert, remove = table.concat, table.sort, table.insert, table.remove
-local format, find, gsub, lower, dump, match = string.format, string.find, string.gsub, string.lower, string.dump, string.match
-local getmetatable, setmetatable = getmetatable, setmetatable
-local getinfo = debug.getinfo
-
--- Starting with version 5.2 Lua no longer provide ipairs, which makes
--- sense. As we already used the for loop and # in most places the
--- impact on ConTeXt was not that large; the remaining ipairs already
--- have been replaced. In a similar fashion we also hardly used pairs.
---
--- Just in case, we provide the fallbacks as discussed in Programming
--- in Lua (http://www.lua.org/pil/7.3.html):
+-- original size: 361, stripped down to: 322
-if not ipairs then
+if not modules then modules={} end modules ['l-functions']={
+ version=1.001,
+ comment="companion to luat-lib.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+functions=functions or {}
+function functions.dummy() end
- -- for k, v in ipairs(t) do ... end
- -- for k=1,#t do local v = t[k] ... end
- local function iterate(a,i)
- i = i + 1
- local v = a[i]
- if v ~= nil then
- return i, v --, nil
- end
- end
+end -- of closure
- function ipairs(a)
- return iterate, a, 0
- end
+do -- create closure to overcome 200 locals limit
-end
+package.loaded["l-string"] = package.loaded["l-string"] or true
-if not pairs then
+-- original size: 5513, stripped down to: 2708
- -- for k, v in pairs(t) do ... end
- -- for k, v in next, t do ... end
+if not modules then modules={} end modules ['l-string']={
+ version=1.001,
+ comment="companion to luat-lib.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+local string=string
+local sub,gmatch,format,char,byte,rep,lower=string.sub,string.gmatch,string.format,string.char,string.byte,string.rep,string.lower
+local lpegmatch,patterns=lpeg.match,lpeg.patterns
+local P,S,C,Ct,Cc,Cs=lpeg.P,lpeg.S,lpeg.C,lpeg.Ct,lpeg.Cc,lpeg.Cs
+local unquoted=patterns.squote*C(patterns.nosquote)*patterns.squote+patterns.dquote*C(patterns.nodquote)*patterns.dquote
+function string.unquoted(str)
+ return lpegmatch(unquoted,str) or str
+end
+function string.quoted(str)
+ return format("%q",str)
+end
+function string.count(str,pattern)
+ local n=0
+ for _ in gmatch(str,pattern) do
+ n=n+1
+ end
+ return n
+end
+function string.limit(str,n,sentinel)
+ if #str>n then
+ sentinel=sentinel or "..."
+ return sub(str,1,(n-#sentinel))..sentinel
+ else
+ return str
+ end
+end
+local stripper=patterns.stripper
+local collapser=patterns.collapser
+local longtostring=patterns.longtostring
+function string.strip(str)
+ return lpegmatch(stripper,str) or ""
+end
+function string.collapsespaces(str)
+ return lpegmatch(collapser,str) or ""
+end
+function string.longtostring(str)
+ return lpegmatch(longtostring,str) or ""
+end
+local pattern=P(" ")^0*P(-1)
+function string.is_empty(str)
+ if str=="" then
+ return true
+ else
+ return lpegmatch(pattern,str) and true or false
+ end
+end
+local anything=patterns.anything
+local allescapes=Cc("%")*S(".-+%?()[]*")
+local someescapes=Cc("%")*S(".-+%()[]")
+local matchescapes=Cc(".")*S("*?")
+local pattern_a=Cs ((allescapes+anything )^0 )
+local pattern_b=Cs ((someescapes+matchescapes+anything )^0 )
+local pattern_c=Cs (Cc("^")*(someescapes+matchescapes+anything )^0*Cc("$") )
+function string.escapedpattern(str,simple)
+ return lpegmatch(simple and pattern_b or pattern_a,str)
+end
+function string.topattern(str,lowercase,strict)
+ if str=="" or type(str)~="string" then
+ return ".*"
+ elseif strict then
+ str=lpegmatch(pattern_c,str)
+ else
+ str=lpegmatch(pattern_b,str)
+ end
+ if lowercase then
+ return lower(str)
+ else
+ return str
+ end
+end
+function string.valid(str,default)
+ return (type(str)=="string" and str~="" and str) or default or nil
+end
+string.itself=function(s) return s end
+local pattern=Ct(C(1)^0)
+function string.totable(str)
+ return lpegmatch(pattern,str)
+end
+local replacer=lpeg.replacer("@","%%")
+function string.tformat(fmt,...)
+ return format(lpegmatch(replacer,fmt),...)
+end
+string.quote=string.quoted
+string.unquote=string.unquoted
- function pairs(t)
- return next, t -- , nil
- end
-end
+end -- of closure
--- Also, unpack has been moved to the table table, and for compatiility
--- reasons we provide both now.
+do -- create closure to overcome 200 locals limit
-if not table.unpack then
- table.unpack = _G.unpack
-elseif not unpack then
- _G.unpack = table.unpack
-end
+package.loaded["l-table"] = package.loaded["l-table"] or true
--- extra functions, some might go (when not used)
+-- original size: 44643, stripped down to: 19717
+if not modules then modules={} end modules ['l-table']={
+ version=1.001,
+ comment="companion to luat-lib.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+local type,next,tostring,tonumber,ipairs,select=type,next,tostring,tonumber,ipairs,select
+local table,string=table,string
+local concat,sort,insert,remove=table.concat,table.sort,table.insert,table.remove
+local format,lower,dump=string.format,string.lower,string.dump
+local getmetatable,setmetatable=getmetatable,setmetatable
+local getinfo=debug.getinfo
+local lpegmatch,patterns=lpeg.match,lpeg.patterns
+local floor=math.floor
+local stripper=patterns.stripper
function table.strip(tab)
- local lst, l = { }, 0
- for i=1,#tab do
- local s = gsub(tab[i],"^%s*(.-)%s*$","%1")
- if s == "" then
- -- skip this one
- else
- l = l + 1
- lst[l] = s
- end
+ local lst,l={},0
+ for i=1,#tab do
+ local s=lpegmatch(stripper,tab[i]) or ""
+ if s=="" then
+ else
+ l=l+1
+ lst[l]=s
end
- return lst
+ end
+ return lst
end
-
function table.keys(t)
- local keys, k = { }, 0
- for key, _ in next, t do
- k = k + 1
- keys[k] = key
+ if t then
+ local keys,k={},0
+ for key,_ in next,t do
+ k=k+1
+ keys[k]=key
end
return keys
+ else
+ return {}
+ end
end
-
local function compare(a,b)
- local ta, tb = type(a), type(b) -- needed, else 11 < 2
- if ta == tb then
- return a < b
- else
- return tostring(a) < tostring(b)
- end
+ local ta,tb=type(a),type(b)
+ if ta==tb then
+ return a<b
+ else
+ return tostring(a)<tostring(b)
+ end
end
-
local function sortedkeys(tab)
- local srt, category, s = { }, 0, 0 -- 0=unknown 1=string, 2=number 3=mixed
- for key,_ in next, tab do
- s = s + 1
- srt[s] = key
- if category == 3 then
- -- no further check
+ if tab then
+ local srt,category,s={},0,0
+ for key,_ in next,tab do
+ s=s+1
+ srt[s]=key
+ if category==3 then
+ else
+ local tkey=type(key)
+ if tkey=="string" then
+ category=(category==2 and 3) or 1
+ elseif tkey=="number" then
+ category=(category==1 and 3) or 2
else
- local tkey = type(key)
- if tkey == "string" then
- category = (category == 2 and 3) or 1
- elseif tkey == "number" then
- category = (category == 1 and 3) or 2
- else
- category = 3
- end
+ category=3
end
+ end
end
- if category == 0 or category == 3 then
- sort(srt,compare)
+ if category==0 or category==3 then
+ sort(srt,compare)
else
- sort(srt)
+ sort(srt)
end
return srt
+ else
+ return {}
+ end
+end
+local function sortedhashkeys(tab,cmp)
+ if tab then
+ local srt,s={},0
+ for key,_ in next,tab do
+ if key then
+ s=s+1
+ srt[s]=key
+ end
+ end
+ sort(srt,cmp)
+ return srt
+ else
+ return {}
+ end
end
-
-local function sortedhashkeys(tab) -- fast one
- local srt, s = { }, 0
- for key,_ in next, tab do
- if key then
- s= s + 1
- srt[s] = key
- end
+function table.allkeys(t)
+ local keys={}
+ for k,v in next,t do
+ for k,v in next,v do
+ keys[k]=true
end
- sort(srt)
- return srt
+ end
+ return sortedkeys(keys)
end
-
-table.sortedkeys = sortedkeys
-table.sortedhashkeys = sortedhashkeys
-
+table.sortedkeys=sortedkeys
+table.sortedhashkeys=sortedhashkeys
local function nothing() end
-
-local function sortedhash(t)
- if t then
- local n, s = 0, sortedkeys(t) -- the robust one
- local function kv(s)
- n = n + 1
- local k = s[n]
- return k, t[k]
- end
- return kv, s
+local function sortedhash(t,cmp)
+ if t then
+ local s
+ if cmp then
+ s=sortedhashkeys(t,function(a,b) return cmp(t,a,b) end)
else
- return nothing
- end
-end
-
-table.sortedhash = sortedhash
-table.sortedpairs = sortedhash
-
-function table.append(t, list)
- local n = #t
- for i=1,#list do
- n = n + 1
- t[n] = list[i]
- end
- return t
+ s=sortedkeys(t)
+ end
+ local n=0
+ local function kv(s)
+ n=n+1
+ local k=s[n]
+ return k,t[k]
+ end
+ return kv,s
+ else
+ return nothing
+ end
+end
+table.sortedhash=sortedhash
+table.sortedpairs=sortedhash
+function table.append(t,list)
+ local n=#t
+ for i=1,#list do
+ n=n+1
+ t[n]=list[i]
+ end
+ return t
+end
+function table.prepend(t,list)
+ local nl=#list
+ local nt=nl+#t
+ for i=#t,1,-1 do
+ t[nt]=t[i]
+ nt=nt-1
+ end
+ for i=1,#list do
+ t[i]=list[i]
+ end
+ return t
+end
+function table.merge(t,...)
+ t=t or {}
+ for i=1,select("#",...) do
+ for k,v in next,(select(i,...)) do
+ t[k]=v
+ end
+ end
+ return t
end
-
-function table.prepend(t, list)
- local nl = #list
- local nt = nl + #t
- for i=#t,1,-1 do
- t[nt] = t[i]
- nt = nt - 1
- end
- for i=1,#list do
- t[i] = list[i]
- end
- return t
-end
-
-function table.merge(t, ...) -- first one is target
- t = t or { }
- local lst = { ... }
- for i=1,#lst do
- for k, v in next, lst[i] do
- t[k] = v
- end
- end
- return t
-end
-
function table.merged(...)
- local tmp, lst = { }, { ... }
- for i=1,#lst do
- for k, v in next, lst[i] do
- tmp[k] = v
- end
+ local t={}
+ for i=1,select("#",...) do
+ for k,v in next,(select(i,...)) do
+ t[k]=v
end
- return tmp
+ end
+ return t
end
-
-function table.imerge(t, ...)
- local lst, nt = { ... }, #t
- for i=1,#lst do
- local nst = lst[i]
- for j=1,#nst do
- nt = nt + 1
- t[nt] = nst[j]
- end
+function table.imerge(t,...)
+ local nt=#t
+ for i=1,select("#",...) do
+ local nst=select(i,...)
+ for j=1,#nst do
+ nt=nt+1
+ t[nt]=nst[j]
end
- return t
+ end
+ return t
end
-
function table.imerged(...)
- local tmp, ntmp, lst = { }, 0, {...}
- for i=1,#lst do
- local nst = lst[i]
- for j=1,#nst do
- ntmp = ntmp + 1
- tmp[ntmp] = nst[j]
- end
+ local tmp,ntmp={},0
+ for i=1,select("#",...) do
+ local nst=select(i,...)
+ for j=1,#nst do
+ ntmp=ntmp+1
+ tmp[ntmp]=nst[j]
+ end
+ end
+ return tmp
+end
+local function fastcopy(old,metatabletoo)
+ if old then
+ local new={}
+ for k,v in next,old do
+ if type(v)=="table" then
+ new[k]=fastcopy(v,metatabletoo)
+ else
+ new[k]=v
+ end
+ end
+ if metatabletoo then
+ local mt=getmetatable(old)
+ if mt then
+ setmetatable(new,mt)
+ end
end
- return tmp
-end
-
-local function fastcopy(old,metatabletoo) -- fast one
- if old then
- local new = { }
- for k,v in next, old do
- if type(v) == "table" then
- new[k] = fastcopy(v,metatabletoo) -- was just table.copy
- else
- new[k] = v
- end
- end
- if metatabletoo then
- -- optional second arg
- local mt = getmetatable(old)
- if mt then
- setmetatable(new,mt)
- end
- end
- return new
+ return new
+ else
+ return {}
+ end
+end
+local function copy(t,tables)
+ tables=tables or {}
+ local tcopy={}
+ if not tables[t] then
+ tables[t]=tcopy
+ end
+ for i,v in next,t do
+ if type(i)=="table" then
+ if tables[i] then
+ i=tables[i]
+ else
+ i=copy(i,tables)
+ end
+ end
+ if type(v)~="table" then
+ tcopy[i]=v
+ elseif tables[v] then
+ tcopy[i]=tables[v]
else
- return { }
- end
+ tcopy[i]=copy(v,tables)
+ end
+ end
+ local mt=getmetatable(t)
+ if mt then
+ setmetatable(tcopy,mt)
+ end
+ return tcopy
+end
+table.fastcopy=fastcopy
+table.copy=copy
+function table.derive(parent)
+ local child={}
+ if parent then
+ setmetatable(child,{ __index=parent })
+ end
+ return child
end
-
--- todo : copy without metatable
-
-local function copy(t, tables) -- taken from lua wiki, slightly adapted
- tables = tables or { }
- local tcopy = {}
- if not tables[t] then
- tables[t] = tcopy
- end
- for i,v in next, t do -- brrr, what happens with sparse indexed
- if type(i) == "table" then
- if tables[i] then
- i = tables[i]
- else
- i = copy(i, tables)
- end
- end
- if type(v) ~= "table" then
- tcopy[i] = v
- elseif tables[v] then
- tcopy[i] = tables[v]
- else
- tcopy[i] = copy(v, tables)
- end
- end
- local mt = getmetatable(t)
- if mt then
- setmetatable(tcopy,mt)
- end
- return tcopy
-end
-
-table.fastcopy = fastcopy
-table.copy = copy
-
-function table.derive(parent)
- local child = { }
- if parent then
- setmetatable(child,{ __index = parent })
- end
- return child
-end
-
function table.tohash(t,value)
- local h = { }
- if t then
- if value == nil then value = true end
- for _, v in next, t do -- no ipairs here
- h[v] = value
- end
+ local h={}
+ if t then
+ if value==nil then value=true end
+ for _,v in next,t do
+ h[v]=value
end
- return h
+ end
+ return h
end
-
function table.fromhash(t)
- local hsh, h = { }, 0
- for k, v in next, t do -- no ipairs here
- if v then
- h = h + 1
- hsh[h] = k
- end
+ local hsh,h={},0
+ for k,v in next,t do
+ if v then
+ h=h+1
+ hsh[h]=k
end
- return hsh
+ end
+ return hsh
end
-
-local noquotes, hexify, handle, reduce, compact, inline, functions
-
-local reserved = table.tohash { -- intercept a language inconvenience: no reserved words as key
- 'and', 'break', 'do', 'else', 'elseif', 'end', 'false', 'for', 'function', 'if',
- 'in', 'local', 'nil', 'not', 'or', 'repeat', 'return', 'then', 'true', 'until', 'while',
+local noquotes,hexify,handle,reduce,compact,inline,functions
+local reserved=table.tohash {
+ 'and','break','do','else','elseif','end','false','for','function','if',
+ 'in','local','nil','not','or','repeat','return','then','true','until','while',
}
-
local function simple_table(t)
- if #t > 0 then
- local n = 0
- for _,v in next, t do
- n = n + 1
- end
- if n == #t then
- local tt, nt = { }, 0
- for i=1,#t do
- local v = t[i]
- local tv = type(v)
- if tv == "number" then
- nt = nt + 1
- if hexify then
- tt[nt] = format("0x%04X",v)
- else
- tt[nt] = tostring(v) -- tostring not needed
- end
- elseif tv == "boolean" then
- nt = nt + 1
- tt[nt] = tostring(v)
- elseif tv == "string" then
- nt = nt + 1
- tt[nt] = format("%q",v)
- else
- tt = nil
- break
- end
- end
- return tt
+ if #t>0 then
+ local n=0
+ for _,v in next,t do
+ n=n+1
+ end
+ if n==#t then
+ local tt,nt={},0
+ for i=1,#t do
+ local v=t[i]
+ local tv=type(v)
+ if tv=="number" then
+ nt=nt+1
+ if hexify then
+ tt[nt]=format("0x%04X",v)
+ else
+ tt[nt]=tostring(v)
+ end
+ elseif tv=="boolean" then
+ nt=nt+1
+ tt[nt]=tostring(v)
+ elseif tv=="string" then
+ nt=nt+1
+ tt[nt]=format("%q",v)
+ else
+ tt=nil
+ break
end
+ end
+ return tt
end
- return nil
+ end
+ return nil
end
-
--- Because this is a core function of mkiv I moved some function calls
--- inline.
---
--- twice as fast in a test:
---
--- local propername = lpeg.P(lpeg.R("AZ","az","__") * lpeg.R("09","AZ","az", "__")^0 * lpeg.P(-1) )
-
--- problem: there no good number_to_string converter with the best resolution
-
+local propername=patterns.propername
local function dummy() end
-
local function do_serialize(root,name,depth,level,indexed)
- if level > 0 then
- depth = depth .. " "
- if indexed then
- handle(format("%s{",depth))
- else
- local tn = type(name)
- if tn == "number" then -- or find(k,"^%d+$") then
- if hexify then
- handle(format("%s[0x%04X]={",depth,name))
- else
- handle(format("%s[%s]={",depth,name))
- end
- elseif tn == "string" then
- if noquotes and not reserved[name] and find(name,"^%a[%w%_]*$") then
- handle(format("%s%s={",depth,name))
- else
- handle(format("%s[%q]={",depth,name))
- end
- elseif tn == "boolean" then
- handle(format("%s[%s]={",depth,tostring(name)))
- else
- handle(format("%s{",depth))
- end
- end
- end
- -- we could check for k (index) being number (cardinal)
- if root and next(root) then
- local first, last = nil, 0 -- #root cannot be trusted here (will be ok in 5.2 when ipairs is gone)
- if compact then
- -- NOT: for k=1,#root do (we need to quit at nil)
- for k,v in ipairs(root) do -- can we use next?
- if not first then first = k end
- last = last + 1
- end
- end
- local sk = sortedkeys(root)
- for i=1,#sk do
- local k = sk[i]
- local v = root[k]
- -- circular
- local t, tk = type(v), type(k)
- if compact and first and tk == "number" and k >= first and k <= last then
- if t == "number" then
- if hexify then
- handle(format("%s 0x%04X,",depth,v))
- else
- handle(format("%s %s,",depth,v)) -- %.99g
- end
- elseif t == "string" then
- if reduce and tonumber(v) then
- handle(format("%s %s,",depth,v))
- else
- handle(format("%s %q,",depth,v))
- end
- elseif t == "table" then
- if not next(v) then
- handle(format("%s {},",depth))
- elseif inline then -- and #t > 0
- local st = simple_table(v)
- if st then
- handle(format("%s { %s },",depth,concat(st,", ")))
- else
- do_serialize(v,k,depth,level+1,true)
- end
- else
- do_serialize(v,k,depth,level+1,true)
- end
- elseif t == "boolean" then
- handle(format("%s %s,",depth,tostring(v)))
- elseif t == "function" then
- if functions then
- handle(format('%s loadstring(%q),',depth,dump(v)))
- else
- handle(format('%s "function",',depth))
- end
- else
- handle(format("%s %q,",depth,tostring(v)))
- end
- elseif k == "__p__" then -- parent
- if false then
- handle(format("%s __p__=nil,",depth))
- end
- elseif t == "number" then
- if tk == "number" then -- or find(k,"^%d+$") then
- if hexify then
- handle(format("%s [0x%04X]=0x%04X,",depth,k,v))
- else
- handle(format("%s [%s]=%s,",depth,k,v)) -- %.99g
- end
- elseif tk == "boolean" then
- if hexify then
- handle(format("%s [%s]=0x%04X,",depth,tostring(k),v))
- else
- handle(format("%s [%s]=%s,",depth,tostring(k),v)) -- %.99g
- end
- elseif noquotes and not reserved[k] and find(k,"^%a[%w%_]*$") then
- if hexify then
- handle(format("%s %s=0x%04X,",depth,k,v))
- else
- handle(format("%s %s=%s,",depth,k,v)) -- %.99g
- end
- else
- if hexify then
- handle(format("%s [%q]=0x%04X,",depth,k,v))
- else
- handle(format("%s [%q]=%s,",depth,k,v)) -- %.99g
- end
- end
- elseif t == "string" then
- if reduce and tonumber(v) then
- if tk == "number" then -- or find(k,"^%d+$") then
- if hexify then
- handle(format("%s [0x%04X]=%s,",depth,k,v))
- else
- handle(format("%s [%s]=%s,",depth,k,v))
- end
- elseif tk == "boolean" then
- handle(format("%s [%s]=%s,",depth,tostring(k),v))
- elseif noquotes and not reserved[k] and find(k,"^%a[%w%_]*$") then
- handle(format("%s %s=%s,",depth,k,v))
- else
- handle(format("%s [%q]=%s,",depth,k,v))
- end
- else
- if tk == "number" then -- or find(k,"^%d+$") then
- if hexify then
- handle(format("%s [0x%04X]=%q,",depth,k,v))
- else
- handle(format("%s [%s]=%q,",depth,k,v))
- end
- elseif tk == "boolean" then
- handle(format("%s [%s]=%q,",depth,tostring(k),v))
- elseif noquotes and not reserved[k] and find(k,"^%a[%w%_]*$") then
- handle(format("%s %s=%q,",depth,k,v))
- else
- handle(format("%s [%q]=%q,",depth,k,v))
- end
- end
- elseif t == "table" then
- if not next(v) then
- if tk == "number" then -- or find(k,"^%d+$") then
- if hexify then
- handle(format("%s [0x%04X]={},",depth,k))
- else
- handle(format("%s [%s]={},",depth,k))
- end
- elseif tk == "boolean" then
- handle(format("%s [%s]={},",depth,tostring(k)))
- elseif noquotes and not reserved[k] and find(k,"^%a[%w%_]*$") then
- handle(format("%s %s={},",depth,k))
- else
- handle(format("%s [%q]={},",depth,k))
- end
- elseif inline then
- local st = simple_table(v)
- if st then
- if tk == "number" then -- or find(k,"^%d+$") then
- if hexify then
- handle(format("%s [0x%04X]={ %s },",depth,k,concat(st,", ")))
- else
- handle(format("%s [%s]={ %s },",depth,k,concat(st,", ")))
- end
- elseif tk == "boolean" then -- or find(k,"^%d+$") then
- handle(format("%s [%s]={ %s },",depth,tostring(k),concat(st,", ")))
- elseif noquotes and not reserved[k] and find(k,"^%a[%w%_]*$") then
- handle(format("%s %s={ %s },",depth,k,concat(st,", ")))
- else
- handle(format("%s [%q]={ %s },",depth,k,concat(st,", ")))
- end
- else
- do_serialize(v,k,depth,level+1)
- end
- else
- do_serialize(v,k,depth,level+1)
- end
- elseif t == "boolean" then
- if tk == "number" then -- or find(k,"^%d+$") then
- if hexify then
- handle(format("%s [0x%04X]=%s,",depth,k,tostring(v)))
- else
- handle(format("%s [%s]=%s,",depth,k,tostring(v)))
- end
- elseif tk == "boolean" then -- or find(k,"^%d+$") then
- handle(format("%s [%s]=%s,",depth,tostring(k),tostring(v)))
- elseif noquotes and not reserved[k] and find(k,"^%a[%w%_]*$") then
- handle(format("%s %s=%s,",depth,k,tostring(v)))
- else
- handle(format("%s [%q]=%s,",depth,k,tostring(v)))
- end
- elseif t == "function" then
- if functions then
- local f = getinfo(v).what == "C" and dump(dummy) or dump(v)
- -- local f = getinfo(v).what == "C" and dump(function(...) return v(...) end) or dump(v)
- if tk == "number" then -- or find(k,"^%d+$") then
- if hexify then
- handle(format("%s [0x%04X]=loadstring(%q),",depth,k,f))
- else
- handle(format("%s [%s]=loadstring(%q),",depth,k,f))
- end
- elseif tk == "boolean" then
- handle(format("%s [%s]=loadstring(%q),",depth,tostring(k),f))
- elseif noquotes and not reserved[k] and find(k,"^%a[%w%_]*$") then
- handle(format("%s %s=loadstring(%q),",depth,k,f))
- else
- handle(format("%s [%q]=loadstring(%q),",depth,k,f))
- end
- end
- else
- if tk == "number" then -- or find(k,"^%d+$") then
- if hexify then
- handle(format("%s [0x%04X]=%q,",depth,k,tostring(v)))
- else
- handle(format("%s [%s]=%q,",depth,k,tostring(v)))
- end
- elseif tk == "boolean" then -- or find(k,"^%d+$") then
- handle(format("%s [%s]=%q,",depth,tostring(k),tostring(v)))
- elseif noquotes and not reserved[k] and find(k,"^%a[%w%_]*$") then
- handle(format("%s %s=%q,",depth,k,tostring(v)))
- else
- handle(format("%s [%q]=%q,",depth,k,tostring(v)))
- end
- end
- end
- end
- if level > 0 then
- handle(format("%s},",depth))
- end
-end
-
--- replacing handle by a direct t[#t+1] = ... (plus test) is not much
--- faster (0.03 on 1.00 for zapfino.tma)
-
-local function serialize(_handle,root,name,specification) -- handle wins
- local tname = type(name)
- if type(specification) == "table" then
- noquotes = specification.noquotes
- hexify = specification.hexify
- handle = _handle or specification.handle or print
- reduce = specification.reduce or false
- functions = specification.functions
- compact = specification.compact
- inline = specification.inline and compact
- if functions == nil then
- functions = true
- end
- if compact == nil then
- compact = true
- end
- if inline == nil then
- inline = compact
- end
+ if level>0 then
+ depth=depth.." "
+ if indexed then
+ handle(format("%s{",depth))
else
- noquotes = false
- hexify = false
- handle = _handle or print
- reduce = false
- compact = true
- inline = true
- functions = true
- end
- if tname == "string" then
- if name == "return" then
- handle("return {")
- else
- handle(name .. "={")
- end
- elseif tname == "number" then
+ local tn=type(name)
+ if tn=="number" then
if hexify then
- handle(format("[0x%04X]={",name))
+ handle(format("%s[0x%04X]={",depth,name))
else
- handle("[" .. name .. "]={")
+ handle(format("%s[%s]={",depth,name))
end
- elseif tname == "boolean" then
- if name then
- handle("return {")
+ elseif tn=="string" then
+ if noquotes and not reserved[name] and lpegmatch(propername,name) then
+ handle(format("%s%s={",depth,name))
else
- handle("{")
- end
- else
- handle("t={")
- end
- if root then
- -- The dummy access will initialize a table that has a delayed initialization
- -- using a metatable. (maybe explicitly test for metatable)
- if getmetatable(root) then -- todo: make this an option, maybe even per subtable
- local dummy = root._w_h_a_t_e_v_e_r_
- root._w_h_a_t_e_v_e_r_ = nil
- end
- -- Let's forget about empty tables.
- if next(root) then
- do_serialize(root,name,"",0)
- end
- end
- handle("}")
-end
-
-
-function table.serialize(root,name,specification)
- local t, n = { }, 0
- local function flush(s)
- n = n + 1
- t[n] = s
- end
- serialize(flush,root,name,specification)
- return concat(t,"\n")
-end
-
-table.tohandle = serialize
-
--- sometimes tables are real use (zapfino extra pro is some 85M) in which
--- case a stepwise serialization is nice; actually, we could consider:
---
--- for line in table.serializer(root,name,reduce,noquotes) do
--- ...(line)
--- end
---
--- so this is on the todo list
-
-local maxtab = 2*1024
-
-function table.tofile(filename,root,name,specification)
- local f = io.open(filename,'w')
- if f then
- if maxtab > 1 then
- local t, n = { }, 0
- local function flush(s)
- n = n + 1
- t[n] = s
- if n > maxtab then
- f:write(concat(t,"\n"),"\n") -- hm, write(sometable) should be nice
- t, n = { }, 0 -- we could recycle t if needed
- end
- end
- serialize(flush,root,name,specification)
- f:write(concat(t,"\n"),"\n")
- else
- local function flush(s)
- f:write(s,"\n")
- end
- serialize(flush,root,name,specification)
- end
- f:close()
- io.flush()
- end
-end
-
-local function flattened(t,f,depth)
- if f == nil then
- f = { }
- depth = 0xFFFF
- elseif tonumber(f) then
- -- assume then only two arguments are given
- depth = f
- f = { }
- elseif not depth then
- depth = 0xFFFF
- end
- for k, v in next, t do
- if type(k) ~= "number" then
- if depth > 0 and type(v) == "table" then
- flattened(v,f,depth-1)
+ handle(format("%s[%q]={",depth,name))
+ end
+ elseif tn=="boolean" then
+ handle(format("%s[%s]={",depth,tostring(name)))
+ else
+ handle(format("%s{",depth))
+ end
+ end
+ end
+ if root and next(root) then
+ local first,last=nil,0
+ if compact then
+ last=#root
+ for k=1,last do
+ if root[k]==nil then
+ last=k-1
+ break
+ end
+ end
+ if last>0 then
+ first=1
+ end
+ end
+ local sk=sortedkeys(root)
+ for i=1,#sk do
+ local k=sk[i]
+ local v=root[k]
+ local t,tk=type(v),type(k)
+ if compact and first and tk=="number" and k>=first and k<=last then
+ if t=="number" then
+ if hexify then
+ handle(format("%s 0x%04X,",depth,v))
+ else
+ handle(format("%s %s,",depth,v))
+ end
+ elseif t=="string" then
+ if reduce and tonumber(v) then
+ handle(format("%s %s,",depth,v))
+ else
+ handle(format("%s %q,",depth,v))
+ end
+ elseif t=="table" then
+ if not next(v) then
+ handle(format("%s {},",depth))
+ elseif inline then
+ local st=simple_table(v)
+ if st then
+ handle(format("%s { %s },",depth,concat(st,", ")))
else
- f[k] = v
- end
- end
- end
- local n = #f
- for k=1,#t do
- local v = t[k]
- if depth > 0 and type(v) == "table" then
- flattened(v,f,depth-1)
- n = #f
+ do_serialize(v,k,depth,level+1,true)
+ end
+ else
+ do_serialize(v,k,depth,level+1,true)
+ end
+ elseif t=="boolean" then
+ handle(format("%s %s,",depth,tostring(v)))
+ elseif t=="function" then
+ if functions then
+ handle(format('%s load(%q),',depth,dump(v)))
+ else
+ handle(format('%s "function",',depth))
+ end
else
- n = n + 1
- f[n] = v
- end
- end
- return f
-end
-
-table.flattened = flattened
-
-local function unnest(t,f) -- only used in mk, for old times sake
- if not f then -- and only relevant for token lists
- f = { }
- end
- for i=1,#t do
- local v = t[i]
- if type(v) == "table" then
- if type(v[1]) == "table" then
- unnest(v,f)
+ handle(format("%s %q,",depth,tostring(v)))
+ end
+ elseif k=="__p__" then
+ if false then
+ handle(format("%s __p__=nil,",depth))
+ end
+ elseif t=="number" then
+ if tk=="number" then
+ if hexify then
+ handle(format("%s [0x%04X]=0x%04X,",depth,k,v))
+ else
+ handle(format("%s [%s]=%s,",depth,k,v))
+ end
+ elseif tk=="boolean" then
+ if hexify then
+ handle(format("%s [%s]=0x%04X,",depth,tostring(k),v))
+ else
+ handle(format("%s [%s]=%s,",depth,tostring(k),v))
+ end
+ elseif noquotes and not reserved[k] and lpegmatch(propername,k) then
+ if hexify then
+ handle(format("%s %s=0x%04X,",depth,k,v))
+ else
+ handle(format("%s %s=%s,",depth,k,v))
+ end
+ else
+ if hexify then
+ handle(format("%s [%q]=0x%04X,",depth,k,v))
+ else
+ handle(format("%s [%q]=%s,",depth,k,v))
+ end
+ end
+ elseif t=="string" then
+ if reduce and tonumber(v) then
+ if tk=="number" then
+ if hexify then
+ handle(format("%s [0x%04X]=%s,",depth,k,v))
else
- f[#f+1] = v
- end
+ handle(format("%s [%s]=%s,",depth,k,v))
+ end
+ elseif tk=="boolean" then
+ handle(format("%s [%s]=%s,",depth,tostring(k),v))
+ elseif noquotes and not reserved[k] and lpegmatch(propername,k) then
+ handle(format("%s %s=%s,",depth,k,v))
+ else
+ handle(format("%s [%q]=%s,",depth,k,v))
+ end
else
- f[#f+1] = v
- end
- end
- return f
-end
-
-function table.unnest(t) -- bad name
- return unnest(t)
-end
-
-local function are_equal(a,b,n,m) -- indexed
- if a and b and #a == #b then
- n = n or 1
- m = m or #a
- for i=n,m do
- local ai, bi = a[i], b[i]
- if ai==bi then
- -- same
- elseif type(ai)=="table" and type(bi)=="table" then
- if not are_equal(ai,bi) then
- return false
- end
+ if tk=="number" then
+ if hexify then
+ handle(format("%s [0x%04X]=%q,",depth,k,v))
else
- return false
- end
- end
- return true
- else
- return false
- end
-end
-
-local function identical(a,b) -- assumes same structure
- for ka, va in next, a do
- local vb = b[ka]
- if va == vb then
- -- same
- elseif type(va) == "table" and type(vb) == "table" then
- if not identical(va,vb) then
- return false
+ handle(format("%s [%s]=%q,",depth,k,v))
+ end
+ elseif tk=="boolean" then
+ handle(format("%s [%s]=%q,",depth,tostring(k),v))
+ elseif noquotes and not reserved[k] and lpegmatch(propername,k) then
+ handle(format("%s %s=%q,",depth,k,v))
+ else
+ handle(format("%s [%q]=%q,",depth,k,v))
+ end
+ end
+ elseif t=="table" then
+ if not next(v) then
+ if tk=="number" then
+ if hexify then
+ handle(format("%s [0x%04X]={},",depth,k))
+ else
+ handle(format("%s [%s]={},",depth,k))
+ end
+ elseif tk=="boolean" then
+ handle(format("%s [%s]={},",depth,tostring(k)))
+ elseif noquotes and not reserved[k] and lpegmatch(propername,k) then
+ handle(format("%s %s={},",depth,k))
+ else
+ handle(format("%s [%q]={},",depth,k))
+ end
+ elseif inline then
+ local st=simple_table(v)
+ if st then
+ if tk=="number" then
+ if hexify then
+ handle(format("%s [0x%04X]={ %s },",depth,k,concat(st,", ")))
+ else
+ handle(format("%s [%s]={ %s },",depth,k,concat(st,", ")))
+ end
+ elseif tk=="boolean" then
+ handle(format("%s [%s]={ %s },",depth,tostring(k),concat(st,", ")))
+ elseif noquotes and not reserved[k] and lpegmatch(propername,k) then
+ handle(format("%s %s={ %s },",depth,k,concat(st,", ")))
+ else
+ handle(format("%s [%q]={ %s },",depth,k,concat(st,", ")))
end
+ else
+ do_serialize(v,k,depth,level+1)
+ end
else
- return false
- end
- end
- return true
-end
-
-table.identical = identical
-table.are_equal = are_equal
-
--- maybe also make a combined one
-
-function table.compact(t)
- if t then
- for k,v in next, t do
- if not next(v) then
- t[k] = nil
- end
- end
- end
-end
-
-function table.contains(t, v)
- if t then
- for i=1, #t do
- if t[i] == v then
- return i
- end
- end
- end
- return false
-end
-
-function table.count(t)
- local n = 0
- for k, v in next, t do
- n = n + 1
- end
- return n
-end
-
-function table.swapped(t,s) -- hash
- local n = { }
- if s then
- for k, v in next, s do
- n[k] = v
- end
- end
- for k, v in next, t do
- n[v] = k
- end
- return n
-end
-
-function table.reversed(t)
- if t then
- local tt, tn = { }, #t
- if tn > 0 then
- local ttn = 0
- for i=tn,1,-1 do
- ttn = ttn + 1
- tt[ttn] = t[i]
- end
- end
- return tt
- end
-end
-
-function table.sequenced(t,sep,simple) -- hash only
- local s, n = { }, 0
- for k, v in sortedhash(t) do
- if simple then
- if v == true then
- n = n + 1
- s[n] = k
- elseif v and v~= "" then
- n = n + 1
- s[n] = k .. "=" .. tostring(v)
- end
+ do_serialize(v,k,depth,level+1)
+ end
+ elseif t=="boolean" then
+ if tk=="number" then
+ if hexify then
+ handle(format("%s [0x%04X]=%s,",depth,k,tostring(v)))
+ else
+ handle(format("%s [%s]=%s,",depth,k,tostring(v)))
+ end
+ elseif tk=="boolean" then
+ handle(format("%s [%s]=%s,",depth,tostring(k),tostring(v)))
+ elseif noquotes and not reserved[k] and lpegmatch(propername,k) then
+ handle(format("%s %s=%s,",depth,k,tostring(v)))
else
- n = n + 1
- s[n] = k .. "=" .. tostring(v)
+ handle(format("%s [%q]=%s,",depth,k,tostring(v)))
end
- end
- return concat(s, sep or " | ")
-end
-
-function table.print(t,...)
- if type(t) ~= "table" then
- print(tostring(t))
- else
- table.tohandle(print,t,...)
- end
-end
-
--- -- -- obsolete but we keep them for a while and might comment them later -- -- --
-
--- roughly: copy-loop : unpack : sub == 0.9 : 0.4 : 0.45 (so in critical apps, use unpack)
-
-function table.sub(t,i,j)
- return { unpack(t,i,j) }
-end
-
--- slower than #t on indexed tables (#t only returns the size of the numerically indexed slice)
-
-function table.is_empty(t)
- return not t or not next(t)
-end
-
-function table.has_one_entry(t)
- return t and not next(t,next(t))
-end
-
--- new
-
-function table.loweredkeys(t) -- maybe utf
- local l = { }
- for k, v in next, t do
- l[lower(k)] = v
- end
- return l
-end
-
--- new, might move (maybe duplicate)
-
-function table.unique(old)
- local hash = { }
- local new = { }
- local n = 0
- for i=1,#old do
- local oi = old[i]
- if not hash[oi] then
- n = n + 1
- new[n] = oi
- hash[oi] = true
- end
- end
- return new
-end
-
--- function table.sorted(t,...)
--- table.sort(t,...)
--- return t -- still sorts in-place
--- end
-
-
-end -- of closure
-
-do -- create closure to overcome 200 locals limit
-
-if not modules then modules = { } end modules ['l-lpeg'] = {
- version = 1.001,
- comment = "companion to luat-lib.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
-
--- a new lpeg fails on a #(1-P(":")) test and really needs a + P(-1)
-
-local lpeg = require("lpeg")
-
--- tracing (only used when we encounter a problem in integration of lpeg in luatex)
-
-local report = texio and texio.write_nl or print
-
--- local lpmatch = lpeg.match
--- local lpprint = lpeg.print
--- local lpp = lpeg.P
--- local lpr = lpeg.R
--- local lps = lpeg.S
--- local lpc = lpeg.C
--- local lpb = lpeg.B
--- local lpv = lpeg.V
--- local lpcf = lpeg.Cf
--- local lpcb = lpeg.Cb
--- local lpcg = lpeg.Cg
--- local lpct = lpeg.Ct
--- local lpcs = lpeg.Cs
--- local lpcc = lpeg.Cc
--- local lpcmt = lpeg.Cmt
--- local lpcarg = lpeg.Carg
-
--- function lpeg.match(l,...) report("LPEG MATCH") lpprint(l) return lpmatch(l,...) end
-
--- function lpeg.P (l) local p = lpp (l) report("LPEG P =") lpprint(l) return p end
--- function lpeg.R (l) local p = lpr (l) report("LPEG R =") lpprint(l) return p end
--- function lpeg.S (l) local p = lps (l) report("LPEG S =") lpprint(l) return p end
--- function lpeg.C (l) local p = lpc (l) report("LPEG C =") lpprint(l) return p end
--- function lpeg.B (l) local p = lpb (l) report("LPEG B =") lpprint(l) return p end
--- function lpeg.V (l) local p = lpv (l) report("LPEG V =") lpprint(l) return p end
--- function lpeg.Cf (l) local p = lpcf (l) report("LPEG Cf =") lpprint(l) return p end
--- function lpeg.Cb (l) local p = lpcb (l) report("LPEG Cb =") lpprint(l) return p end
--- function lpeg.Cg (l) local p = lpcg (l) report("LPEG Cg =") lpprint(l) return p end
--- function lpeg.Ct (l) local p = lpct (l) report("LPEG Ct =") lpprint(l) return p end
--- function lpeg.Cs (l) local p = lpcs (l) report("LPEG Cs =") lpprint(l) return p end
--- function lpeg.Cc (l) local p = lpcc (l) report("LPEG Cc =") lpprint(l) return p end
--- function lpeg.Cmt (l) local p = lpcmt (l) report("LPEG Cmt =") lpprint(l) return p end
--- function lpeg.Carg (l) local p = lpcarg(l) report("LPEG Carg =") lpprint(l) return p end
-
-local type = type
-local byte, char, gmatch = string.byte, string.char, string.gmatch
-
--- Beware, we predefine a bunch of patterns here and one reason for doing so
--- is that we get consistent behaviour in some of the visualizers.
-
-lpeg.patterns = lpeg.patterns or { } -- so that we can share
-local patterns = lpeg.patterns
-
-local P, R, S, V, match = lpeg.P, lpeg.R, lpeg.S, lpeg.V, lpeg.match
-local Ct, C, Cs, Cc = lpeg.Ct, lpeg.C, lpeg.Cs, lpeg.Cc
-local lpegtype = lpeg.type
-
-local utfcharacters = string.utfcharacters
-local utfgmatch = unicode and unicode.utf8.gmatch
-
-local anything = P(1)
-local endofstring = P(-1)
-local alwaysmatched = P(true)
-
-patterns.anything = anything
-patterns.endofstring = endofstring
-patterns.beginofstring = alwaysmatched
-patterns.alwaysmatched = alwaysmatched
-
-local digit, sign = R('09'), S('+-')
-local cr, lf, crlf = P("\r"), P("\n"), P("\r\n")
-local newline = crlf + S("\r\n") -- cr + lf
-local escaped = P("\\") * anything
-local squote = P("'")
-local dquote = P('"')
-local space = P(" ")
-
-local utfbom_32_be = P('\000\000\254\255')
-local utfbom_32_le = P('\255\254\000\000')
-local utfbom_16_be = P('\255\254')
-local utfbom_16_le = P('\254\255')
-local utfbom_8 = P('\239\187\191')
-local utfbom = utfbom_32_be + utfbom_32_le
- + utfbom_16_be + utfbom_16_le
- + utfbom_8
-local utftype = utfbom_32_be / "utf-32-be" + utfbom_32_le / "utf-32-le"
- + utfbom_16_be / "utf-16-be" + utfbom_16_le / "utf-16-le"
- + utfbom_8 / "utf-8" + alwaysmatched / "unknown"
-
-local utf8next = R("\128\191")
-
-patterns.utf8one = R("\000\127")
-patterns.utf8two = R("\194\223") * utf8next
-patterns.utf8three = R("\224\239") * utf8next * utf8next
-patterns.utf8four = R("\240\244") * utf8next * utf8next * utf8next
-patterns.utfbom = utfbom
-patterns.utftype = utftype
-
-local utf8char = patterns.utf8one + patterns.utf8two + patterns.utf8three + patterns.utf8four
-local validutf8char = utf8char^0 * endofstring * Cc(true) + Cc(false)
-
-patterns.utf8 = utf8char
-patterns.utf8char = utf8char
-patterns.validutf8 = validutf8char
-patterns.validutf8char = validutf8char
-
-patterns.digit = digit
-patterns.sign = sign
-patterns.cardinal = sign^0 * digit^1
-patterns.integer = sign^0 * digit^1
-patterns.float = sign^0 * digit^0 * P('.') * digit^1
-patterns.cfloat = sign^0 * digit^0 * P(',') * digit^1
-patterns.number = patterns.float + patterns.integer
-patterns.cnumber = patterns.cfloat + patterns.integer
-patterns.oct = P("0") * R("07")^1
-patterns.octal = patterns.oct
-patterns.HEX = P("0x") * R("09","AF")^1
-patterns.hex = P("0x") * R("09","af")^1
-patterns.hexadecimal = P("0x") * R("09","AF","af")^1
-patterns.lowercase = R("az")
-patterns.uppercase = R("AZ")
-patterns.letter = patterns.lowercase + patterns.uppercase
-patterns.space = space
-patterns.tab = P("\t")
-patterns.spaceortab = patterns.space + patterns.tab
-patterns.eol = S("\n\r")
-patterns.spacer = S(" \t\f\v") -- + char(0xc2, 0xa0) if we want utf (cf mail roberto)
-patterns.newline = newline
-patterns.emptyline = newline^1
-patterns.nonspacer = 1 - patterns.spacer
-patterns.whitespace = patterns.eol + patterns.spacer
-patterns.nonwhitespace = 1 - patterns.whitespace
-patterns.equal = P("=")
-patterns.comma = P(",")
-patterns.commaspacer = P(",") * patterns.spacer^0
-patterns.period = P(".")
-patterns.colon = P(":")
-patterns.semicolon = P(";")
-patterns.underscore = P("_")
-patterns.escaped = escaped
-patterns.squote = squote
-patterns.dquote = dquote
-patterns.nosquote = (escaped + (1-squote))^0
-patterns.nodquote = (escaped + (1-dquote))^0
-patterns.unsingle = (squote/"") * patterns.nosquote * (squote/"")
-patterns.undouble = (dquote/"") * patterns.nodquote * (dquote/"")
-patterns.unquoted = patterns.undouble + patterns.unsingle -- more often undouble
-patterns.unspacer = ((patterns.spacer^1)/"")^0
-
-patterns.somecontent = (anything - newline - space)^1 -- (utf8char - newline - space)^1
-patterns.beginline = #(1-newline)
-
--- print(string.unquoted("test"))
--- print(string.unquoted([["t\"est"]]))
--- print(string.unquoted([["t\"est"x]]))
--- print(string.unquoted("\'test\'"))
--- print(string.unquoted('"test"'))
--- print(string.unquoted('"test"'))
-
-function lpeg.anywhere(pattern) --slightly adapted from website
- return P { P(pattern) + 1 * V(1) } -- why so complex?
-end
-
-function lpeg.splitter(pattern, action)
- return (((1-P(pattern))^1)/action+1)^0
-end
-
-function lpeg.tsplitter(pattern, action)
- return Ct((((1-P(pattern))^1)/action+1)^0)
-end
-
--- probleem: separator can be lpeg and that does not hash too well, but
--- it's quite okay as the key is then not garbage collected
-
-local splitters_s, splitters_m, splitters_t = { }, { }, { }
-
-local function splitat(separator,single)
- local splitter = (single and splitters_s[separator]) or splitters_m[separator]
- if not splitter then
- separator = P(separator)
- local other = C((1 - separator)^0)
- if single then
- local any = anything
- splitter = other * (separator * C(any^0) + "") -- ?
- splitters_s[separator] = splitter
+ elseif t=="function" then
+ if functions then
+ local f=getinfo(v).what=="C" and dump(dummy) or dump(v)
+ if tk=="number" then
+ if hexify then
+ handle(format("%s [0x%04X]=load(%q),",depth,k,f))
+ else
+ handle(format("%s [%s]=load(%q),",depth,k,f))
+ end
+ elseif tk=="boolean" then
+ handle(format("%s [%s]=load(%q),",depth,tostring(k),f))
+ elseif noquotes and not reserved[k] and lpegmatch(propername,k) then
+ handle(format("%s %s=load(%q),",depth,k,f))
+ else
+ handle(format("%s [%q]=load(%q),",depth,k,f))
+ end
+ end
+ else
+ if tk=="number" then
+ if hexify then
+ handle(format("%s [0x%04X]=%q,",depth,k,tostring(v)))
+ else
+ handle(format("%s [%s]=%q,",depth,k,tostring(v)))
+ end
+ elseif tk=="boolean" then
+ handle(format("%s [%s]=%q,",depth,tostring(k),tostring(v)))
+ elseif noquotes and not reserved[k] and lpegmatch(propername,k) then
+ handle(format("%s %s=%q,",depth,k,tostring(v)))
else
- splitter = other * (separator * other)^0
- splitters_m[separator] = splitter
- end
- end
- return splitter
-end
-
-local function tsplitat(separator)
- local splitter = splitters_t[separator]
- if not splitter then
- splitter = Ct(splitat(separator))
- splitters_t[separator] = splitter
- end
- return splitter
-end
-
-lpeg.splitat = splitat
-lpeg.tsplitat = tsplitat
-
-function string.splitup(str,separator)
- if not separator then
- separator = ","
+ handle(format("%s [%q]=%q,",depth,k,tostring(v)))
+ end
+ end
+ end
+ end
+ if level>0 then
+ handle(format("%s},",depth))
+ end
+end
+local function serialize(_handle,root,name,specification)
+ local tname=type(name)
+ if type(specification)=="table" then
+ noquotes=specification.noquotes
+ hexify=specification.hexify
+ handle=_handle or specification.handle or print
+ reduce=specification.reduce or false
+ functions=specification.functions
+ compact=specification.compact
+ inline=specification.inline and compact
+ if functions==nil then
+ functions=true
+ end
+ if compact==nil then
+ compact=true
+ end
+ if inline==nil then
+ inline=compact
+ end
+ else
+ noquotes=false
+ hexify=false
+ handle=_handle or print
+ reduce=false
+ compact=true
+ inline=true
+ functions=true
+ end
+ if tname=="string" then
+ if name=="return" then
+ handle("return {")
+ else
+ handle(name.."={")
end
- return match(splitters_m[separator] or splitat(separator),str)
-end
-
-
-local cache = { }
-
-function lpeg.split(separator,str)
- local c = cache[separator]
- if not c then
- c = tsplitat(separator)
- cache[separator] = c
+ elseif tname=="number" then
+ if hexify then
+ handle(format("[0x%04X]={",name))
+ else
+ handle("["..name.."]={")
end
- return match(c,str)
-end
-
-function string.split(str,separator)
- local c = cache[separator]
- if not c then
- c = tsplitat(separator)
- cache[separator] = c
+ elseif tname=="boolean" then
+ if name then
+ handle("return {")
+ else
+ handle("{")
end
- return match(c,str)
-end
-
-local spacing = patterns.spacer^0 * newline -- sort of strip
-local empty = spacing * Cc("")
-local nonempty = Cs((1-spacing)^1) * spacing^-1
-local content = (empty + nonempty)^1
-
-patterns.textline = content
-
-
-local linesplitter = tsplitat(newline)
-
-patterns.linesplitter = linesplitter
-
-function string.splitlines(str)
- return match(linesplitter,str)
-end
-
-local utflinesplitter = utfbom^-1 * tsplitat(newline)
-
-patterns.utflinesplitter = utflinesplitter
-
-function string.utfsplitlines(str)
- return match(utflinesplitter,str or "")
-end
-
-
-local cache = { }
-
-function lpeg.checkedsplit(separator,str)
- local c = cache[separator]
- if not c then
- separator = P(separator)
- local other = C((1 - separator)^1)
- c = Ct(separator^0 * other * (separator^1 * other)^0)
- cache[separator] = c
+ else
+ handle("t={")
+ end
+ if root then
+ if getmetatable(root) then
+ local dummy=root._w_h_a_t_e_v_e_r_
+ root._w_h_a_t_e_v_e_r_=nil
end
- return match(c,str)
-end
-
-function string.checkedsplit(str,separator)
- local c = cache[separator]
- if not c then
- separator = P(separator)
- local other = C((1 - separator)^1)
- c = Ct(separator^0 * other * (separator^1 * other)^0)
- cache[separator] = c
+ if next(root) then
+ do_serialize(root,name,"",0)
end
- return match(c,str)
+ end
+ handle("}")
end
-
-
-local function f2(s) local c1, c2 = byte(s,1,2) return c1 * 64 + c2 - 12416 end
-local function f3(s) local c1, c2, c3 = byte(s,1,3) return (c1 * 64 + c2) * 64 + c3 - 925824 end
-local function f4(s) local c1, c2, c3, c4 = byte(s,1,4) return ((c1 * 64 + c2) * 64 + c3) * 64 + c4 - 63447168 end
-
-local utf8byte = patterns.utf8one/byte + patterns.utf8two/f2 + patterns.utf8three/f3 + patterns.utf8four/f4
-
-patterns.utf8byte = utf8byte
-
-
-
-local cache = { }
-
-function lpeg.stripper(str)
- if type(str) == "string" then
- local s = cache[str]
- if not s then
- s = Cs(((S(str)^1)/"" + 1)^0)
- cache[str] = s
- end
- return s
+function table.serialize(root,name,specification)
+ local t,n={},0
+ local function flush(s)
+ n=n+1
+ t[n]=s
+ end
+ serialize(flush,root,name,specification)
+ return concat(t,"\n")
+end
+table.tohandle=serialize
+local maxtab=2*1024
+function table.tofile(filename,root,name,specification)
+ local f=io.open(filename,'w')
+ if f then
+ if maxtab>1 then
+ local t,n={},0
+ local function flush(s)
+ n=n+1
+ t[n]=s
+ if n>maxtab then
+ f:write(concat(t,"\n"),"\n")
+ t,n={},0
+ end
+ end
+ serialize(flush,root,name,specification)
+ f:write(concat(t,"\n"),"\n")
else
- return Cs(((str^1)/"" + 1)^0)
+ local function flush(s)
+ f:write(s,"\n")
+ end
+ serialize(flush,root,name,specification)
end
+ f:close()
+ io.flush()
+ end
end
-
-local cache = { }
-
-function lpeg.keeper(str)
- if type(str) == "string" then
- local s = cache[str]
- if not s then
- s = Cs((((1-S(str))^1)/"" + 1)^0)
- cache[str] = s
- end
- return s
+local function flattened(t,f,depth)
+ if f==nil then
+ f={}
+ depth=0xFFFF
+ elseif tonumber(f) then
+ depth=f
+ f={}
+ elseif not depth then
+ depth=0xFFFF
+ end
+ for k,v in next,t do
+ if type(k)~="number" then
+ if depth>0 and type(v)=="table" then
+ flattened(v,f,depth-1)
+ else
+ f[k]=v
+ end
+ end
+ end
+ local n=#f
+ for k=1,#t do
+ local v=t[k]
+ if depth>0 and type(v)=="table" then
+ flattened(v,f,depth-1)
+ n=#f
else
- return Cs((((1-str)^1)/"" + 1)^0)
+ n=n+1
+ f[n]=v
+ end
+ end
+ return f
+end
+table.flattened=flattened
+local function unnest(t,f)
+ if not f then
+ f={}
+ end
+ for i=1,#t do
+ local v=t[i]
+ if type(v)=="table" then
+ if type(v[1])=="table" then
+ unnest(v,f)
+ else
+ f[#f+1]=v
+ end
+ else
+ f[#f+1]=v
+ end
+ end
+ return f
+end
+function table.unnest(t)
+ return unnest(t)
+end
+local function are_equal(a,b,n,m)
+ if a and b and #a==#b then
+ n=n or 1
+ m=m or #a
+ for i=n,m do
+ local ai,bi=a[i],b[i]
+ if ai==bi then
+ elseif type(ai)=="table" and type(bi)=="table" then
+ if not are_equal(ai,bi) then
+ return false
+ end
+ else
+ return false
+ end
end
-end
-
-function lpeg.frontstripper(str) -- or pattern (yet undocumented)
- return (P(str) + P(true)) * Cs(P(1)^0)
-end
-
-function lpeg.endstripper(str) -- or pattern (yet undocumented)
- return Cs((1 - P(str) * P(-1))^0)
-end
-
--- Just for fun I looked at the used bytecode and
--- p = (p and p + pp) or pp gets one more (testset).
-
-function lpeg.replacer(one,two)
- if type(one) == "table" then
- local no = #one
- if no > 0 then
- local p
- for i=1,no do
- local o = one[i]
- local pp = P(o[1]) / o[2]
- if p then
- p = p + pp
- else
- p = pp
- end
- end
- return Cs((p + 1)^0)
- end
+ return true
+ else
+ return false
+ end
+end
+local function identical(a,b)
+ for ka,va in next,a do
+ local vb=b[ka]
+ if va==vb then
+ elseif type(va)=="table" and type(vb)=="table" then
+ if not identical(va,vb) then
+ return false
+ end
else
- two = two or ""
- return Cs((P(one)/two + 1)^0)
+ return false
end
+ end
+ return true
end
-
-local splitters_f, splitters_s = { }, { }
-
-function lpeg.firstofsplit(separator) -- always return value
- local splitter = splitters_f[separator]
- if not splitter then
- separator = P(separator)
- splitter = C((1 - separator)^0)
- splitters_f[separator] = splitter
+table.identical=identical
+table.are_equal=are_equal
+function table.compact(t)
+ if t then
+ for k,v in next,t do
+ if not next(v) then
+ t[k]=nil
+ end
end
- return splitter
+ end
end
-
-function lpeg.secondofsplit(separator) -- nil if not split
- local splitter = splitters_s[separator]
- if not splitter then
- separator = P(separator)
- splitter = (1 - separator)^0 * separator * C(anything^0)
- splitters_s[separator] = splitter
+function table.contains(t,v)
+ if t then
+ for i=1,#t do
+ if t[i]==v then
+ return i
+ end
end
- return splitter
+ end
+ return false
end
-
-function lpeg.balancer(left,right)
- left, right = P(left), P(right)
- return P { left * ((1 - left - right) + V(1))^0 * right }
-end
-
-
-
-local nany = utf8char/""
-
-function lpeg.counter(pattern)
- pattern = Cs((P(pattern)/" " + nany)^0)
- return function(str)
- return #match(pattern,str)
- end
+function table.count(t)
+ local n=0
+ for k,v in next,t do
+ n=n+1
+ end
+ return n
+end
+function table.swapped(t,s)
+ local n={}
+ if s then
+ for k,v in next,s do
+ n[k]=v
+ end
+ end
+ for k,v in next,t do
+ n[v]=k
+ end
+ return n
+end
+function table.mirrored(t)
+ local n={}
+ for k,v in next,t do
+ n[v]=k
+ n[k]=v
+ end
+ return n
end
-
-if utfgmatch then
-
- function lpeg.count(str,what) -- replaces string.count
- if type(what) == "string" then
- local n = 0
- for _ in utfgmatch(str,what) do
- n = n + 1
- end
- return n
- else -- 4 times slower but still faster than / function
- return #match(Cs((P(what)/" " + nany)^0),str)
- end
- end
-
-else
-
- local cache = { }
-
- function lpeg.count(str,what) -- replaces string.count
- if type(what) == "string" then
- local p = cache[what]
- if not p then
- p = Cs((P(what)/" " + nany)^0)
- cache[p] = p
- end
- return #match(p,str)
- else -- 4 times slower but still faster than / function
- return #match(Cs((P(what)/" " + nany)^0),str)
- end
+function table.reversed(t)
+ if t then
+ local tt,tn={},#t
+ if tn>0 then
+ local ttn=0
+ for i=tn,1,-1 do
+ ttn=ttn+1
+ tt[ttn]=t[i]
+ end
+ end
+ return tt
+ end
+end
+function table.reverse(t)
+ if t then
+ local n=#t
+ for i=1,floor(n/2) do
+ local j=n-i+1
+ t[i],t[j]=t[j],t[i]
end
-
+ return t
+ end
end
-
-local patterns_escapes = { -- also defines in l-string
- ["%"] = "%%",
- ["."] = "%.",
- ["+"] = "%+", ["-"] = "%-", ["*"] = "%*",
- ["["] = "%[", ["]"] = "%]",
- ["("] = "%)", [")"] = "%)",
- -- ["{"] = "%{", ["}"] = "%}"
- -- ["^"] = "%^", ["$"] = "%$",
-}
-
-local simple_escapes = { -- also defines in l-string
- ["-"] = "%-",
- ["."] = "%.",
- ["?"] = ".",
- ["*"] = ".*",
-}
-
-local p = Cs((S("-.+*%()[]") / patterns_escapes + anything)^0)
-local s = Cs((S("-.+*%()[]") / simple_escapes + anything)^0)
-
-function string.escapedpattern(str,simple)
- return match(simple and s or p,str)
+function table.sequenced(t,sep,simple)
+ if not t then
+ return ""
+ end
+ local n=#t
+ local s={}
+ if n>0 then
+ for i=1,n do
+ s[i]=tostring(t[i])
+ end
+ else
+ n=0
+ for k,v in sortedhash(t) do
+ if simple then
+ if v==true then
+ n=n+1
+ s[n]=k
+ elseif v and v~="" then
+ n=n+1
+ s[n]=k.."="..tostring(v)
+ end
+ else
+ n=n+1
+ s[n]=k.."="..tostring(v)
+ end
+ end
+ end
+ return concat(s,sep or " | ")
end
-
--- utf extensies
-
-lpeg.UP = lpeg.P
-
-if utfcharacters then
-
- function lpeg.US(str)
- local p
- for uc in utfcharacters(str) do
- if p then
- p = p + P(uc)
- else
- p = P(uc)
- end
- end
- return p
- end
-
-
-elseif utfgmatch then
-
- function lpeg.US(str)
- local p
- for uc in utfgmatch(str,".") do
- if p then
- p = p + P(uc)
- else
- p = P(uc)
- end
- end
- return p
- end
-
-else
-
- function lpeg.US(str)
- local p
- local f = function(uc)
- if p then
- p = p + P(uc)
- else
- p = P(uc)
- end
- end
- match((utf8char/f)^0,str)
- return p
- end
-
+function table.print(t,...)
+ if type(t)~="table" then
+ print(tostring(t))
+ else
+ serialize(print,t,...)
+ end
end
-
-local range = Cs(utf8byte) * (Cs(utf8byte) + Cc(false))
-
-local utfchar = unicode and unicode.utf8 and unicode.utf8.char
-
-function lpeg.UR(str,more)
- local first, last
- if type(str) == "number" then
- first = str
- last = more or first
- else
- first, last = match(range,str)
- if not last then
- return P(str)
- end
- end
- if first == last then
- return P(str)
- elseif utfchar and last - first < 8 then -- a somewhat arbitrary criterium
- local p
- for i=first,last do
- if p then
- p = p + P(utfchar(i))
- else
- p = P(utfchar(i))
- end
- end
- return p -- nil when invalid range
- else
- local f = function(b)
- return b >= first and b <= last
- end
- return utf8byte / f -- nil when invalid range
- end
+setinspector(function(v) if type(v)=="table" then serialize(print,v,"table") return true end end)
+function table.sub(t,i,j)
+ return { unpack(t,i,j) }
end
-
-
-
-function lpeg.oneof(list,...) -- lpeg.oneof("elseif","else","if","then")
- if type(list) ~= "table" then
- list = { list, ... }
- end
- -- sort(list) -- longest match first
- local p = P(list[1])
- for l=2,#list do
- p = p + P(list[l])
- end
- return p
+function table.is_empty(t)
+ return not t or not next(t)
end
-
-function lpeg.is_lpeg(p)
- return p and lpegtype(p) == "pattern"
+function table.has_one_entry(t)
+ return t and not next(t,next(t))
end
-
--- For the moment here, but it might move to utilities. Beware, we need to
--- have the longest keyword first, so 'aaa' comes beforte 'aa' which is why we
--- loop back from the end cq. prepend.
-
-local sort, fastcopy, sortedkeys = table.sort, table.fastcopy, table.sortedkeys -- dependency!
-
-function lpeg.append(list,pp,delayed,checked)
- local p = pp
- if #list > 0 then
- local keys = fastcopy(list)
- sort(keys)
- for i=#keys,1,-1 do
- local k = keys[i]
- if p then
- p = P(k) + p
- else
- p = P(k)
- end
- end
- elseif delayed then -- hm, it looks like the lpeg parser resolves anyway
- local keys = sortedkeys(list)
- if p then
- for i=1,#keys,1 do
- local k = keys[i]
- local v = list[k]
- p = P(k)/list + p
- end
- else
- for i=1,#keys do
- local k = keys[i]
- local v = list[k]
- if p then
- p = P(k) + p
- else
- p = P(k)
- end
- end
- if p then
- p = p / list
- end
- end
- elseif checked then
- -- problem: substitution gives a capture
- local keys = sortedkeys(list)
- for i=1,#keys do
- local k = keys[i]
- local v = list[k]
- if p then
- if k == v then
- p = P(k) + p
- else
- p = P(k)/v + p
- end
- else
- if k == v then
- p = P(k)
- else
- p = P(k)/v
- end
- end
- end
- else
- local keys = sortedkeys(list)
- for i=1,#keys do
- local k = keys[i]
- local v = list[k]
- if p then
- p = P(k)/v + p
- else
- p = P(k)/v
- end
- end
- end
- return p
+function table.loweredkeys(t)
+ local l={}
+ for k,v in next,t do
+ l[lower(k)]=v
+ end
+ return l
end
-
--- inspect(lpeg.append({ a = "1", aa = "1", aaa = "1" } ,nil,true))
--- inspect(lpeg.append({ ["degree celsius"] = "1", celsius = "1", degree = "1" } ,nil,true))
-
--- function lpeg.exact_match(words,case_insensitive)
--- local pattern = concat(words)
--- if case_insensitive then
--- local pattern = S(upper(characters)) + S(lower(characters))
--- local list = { }
--- for i=1,#words do
--- list[lower(words[i])] = true
--- end
--- return Cmt(pattern^1, function(_,i,s)
--- return list[lower(s)] and i
--- end)
--- else
--- local pattern = S(concat(words))
--- local list = { }
--- for i=1,#words do
--- list[words[i]] = true
--- end
--- return Cmt(pattern^1, function(_,i,s)
--- return list[s] and i
--- end)
--- end
--- end
-
--- experiment:
-
-local function make(t)
- local p
--- for k, v in next, t do
- for k, v in table.sortedhash(t) do
- if not p then
- if next(v) then
- p = P(k) * make(v)
- else
- p = P(k)
- end
- else
- if next(v) then
- p = p + P(k) * make(v)
- else
- p = p + P(k)
- end
- end
+function table.unique(old)
+ local hash={}
+ local new={}
+ local n=0
+ for i=1,#old do
+ local oi=old[i]
+ if not hash[oi] then
+ n=n+1
+ new[n]=oi
+ hash[oi]=true
end
- return p
+ end
+ return new
end
-
-function lpeg.utfchartabletopattern(list)
- local tree = { }
- for i=1,#list do
- local t = tree
- for c in gmatch(list[i],".") do
- if not t[c] then
- t[c] = { }
- end
- t = t[c]
- end
- end
- return make(tree)
+function table.sorted(t,...)
+ sort(t,...)
+ return t
end
--- inspect ( lpeg.utfchartabletopattern {
--- utfchar(0x00A0), -- nbsp
--- utfchar(0x2000), -- enquad
--- utfchar(0x2001), -- emquad
--- utfchar(0x2002), -- enspace
--- utfchar(0x2003), -- emspace
--- utfchar(0x2004), -- threeperemspace
--- utfchar(0x2005), -- fourperemspace
--- utfchar(0x2006), -- sixperemspace
--- utfchar(0x2007), -- figurespace
--- utfchar(0x2008), -- punctuationspace
--- utfchar(0x2009), -- breakablethinspace
--- utfchar(0x200A), -- hairspace
--- utfchar(0x200B), -- zerowidthspace
--- utfchar(0x202F), -- narrownobreakspace
--- utfchar(0x205F), -- math thinspace
--- } )
-
end -- of closure
do -- create closure to overcome 200 locals limit
-if not modules then modules = { } end modules ['l-io'] = {
- version = 1.001,
- comment = "companion to luat-lib.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
+package.loaded["l-io"] = package.loaded["l-io"] or true
-local io = io
-local byte, find, gsub, format = string.byte, string.find, string.gsub, string.format
-local concat = table.concat
-local type = type
+-- original size: 8799, stripped down to: 6325
+if not modules then modules={} end modules ['l-io']={
+ version=1.001,
+ comment="companion to luat-lib.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+local io=io
+local byte,find,gsub,format=string.byte,string.find,string.gsub,string.format
+local concat=table.concat
+local floor=math.floor
+local type=type
if string.find(os.getenv("PATH"),";") then
- io.fileseparator, io.pathseparator = "\\", ";"
+ io.fileseparator,io.pathseparator="\\",";"
else
- io.fileseparator, io.pathseparator = "/" , ":"
+ io.fileseparator,io.pathseparator="/",":"
end
-
-function io.loaddata(filename,textmode)
- local f = io.open(filename,(textmode and 'r') or 'rb')
- if f then
- local data = f:read('*all')
- f:close()
- return data
+local function readall(f)
+ return f:read("*all")
+end
+local function readall(f)
+ local size=f:seek("end")
+ if size==0 then
+ return ""
+ elseif size<1024*1024 then
+ f:seek("set",0)
+ return f:read('*all')
+ else
+ local done=f:seek("set",0)
+ if size<1024*1024 then
+ step=1024*1024
+ elseif size>16*1024*1024 then
+ step=16*1024*1024
else
- return nil
+ step=floor(size/(1024*1024))*1024*1024/8
end
+ local data={}
+ while true do
+ local r=f:read(step)
+ if not r then
+ return concat(data)
+ else
+ data[#data+1]=r
+ end
+ end
+ end
+end
+io.readall=readall
+function io.loaddata(filename,textmode)
+ local f=io.open(filename,(textmode and 'r') or 'rb')
+ if f then
+ local data=readall(f)
+ f:close()
+ if #data>0 then
+ return data
+ end
+ end
end
-
function io.savedata(filename,data,joiner)
- local f = io.open(filename,"wb")
- if f then
- if type(data) == "table" then
- f:write(concat(data,joiner or ""))
- elseif type(data) == "function" then
- data(f)
- else
- f:write(data or "")
- end
- f:close()
- io.flush()
- return true
+ local f=io.open(filename,"wb")
+ if f then
+ if type(data)=="table" then
+ f:write(concat(data,joiner or ""))
+ elseif type(data)=="function" then
+ data(f)
else
- return false
+ f:write(data or "")
end
+ f:close()
+ io.flush()
+ return true
+ else
+ return false
+ end
+end
+function io.loadlines(filename,n)
+ local f=io.open(filename,'r')
+ if not f then
+ elseif n then
+ local lines={}
+ for i=1,n do
+ local line=f:read("*lines")
+ if line then
+ lines[#lines+1]=line
+ else
+ break
+ end
+ end
+ f:close()
+ lines=concat(lines,"\n")
+ if #lines>0 then
+ return lines
+ end
+ else
+ local line=f:read("*line") or ""
+ f:close()
+ if #line>0 then
+ return line
+ end
+ end
+end
+function io.loadchunk(filename,n)
+ local f=io.open(filename,'rb')
+ if f then
+ local data=f:read(n or 1024)
+ f:close()
+ if #data>0 then
+ return data
+ end
+ end
end
-
function io.exists(filename)
- local f = io.open(filename)
- if f == nil then
- return false
- else
- assert(f:close())
- return true
- end
+ local f=io.open(filename)
+ if f==nil then
+ return false
+ else
+ f:close()
+ return true
+ end
end
-
function io.size(filename)
- local f = io.open(filename)
- if f == nil then
- return 0
- else
- local s = f:seek("end")
- assert(f:close())
- return s
- end
+ local f=io.open(filename)
+ if f==nil then
+ return 0
+ else
+ local s=f:seek("end")
+ f:close()
+ return s
+ end
end
-
function io.noflines(f)
- if type(f) == "string" then
- local f = io.open(filename)
- local n = f and io.noflines(f) or 0
- assert(f:close())
- return n
+ if type(f)=="string" then
+ local f=io.open(filename)
+ if f then
+ local n=f and io.noflines(f) or 0
+ f:close()
+ return n
else
- local n = 0
- for _ in f:lines() do
- n = n + 1
- end
- f:seek('set',0)
- return n
+ return 0
end
-end
-
-local nextchar = {
- [ 4] = function(f)
- return f:read(1,1,1,1)
- end,
- [ 2] = function(f)
- return f:read(1,1)
- end,
- [ 1] = function(f)
- return f:read(1)
- end,
- [-2] = function(f)
- local a, b = f:read(1,1)
- return b, a
- end,
- [-4] = function(f)
- local a, b, c, d = f:read(1,1,1,1)
- return d, c, b, a
+ else
+ local n=0
+ for _ in f:lines() do
+ n=n+1
end
+ f:seek('set',0)
+ return n
+ end
+end
+local nextchar={
+ [ 4]=function(f)
+ return f:read(1,1,1,1)
+ end,
+ [ 2]=function(f)
+ return f:read(1,1)
+ end,
+ [ 1]=function(f)
+ return f:read(1)
+ end,
+ [-2]=function(f)
+ local a,b=f:read(1,1)
+ return b,a
+ end,
+ [-4]=function(f)
+ local a,b,c,d=f:read(1,1,1,1)
+ return d,c,b,a
+ end
}
-
function io.characters(f,n)
- if f then
- return nextchar[n or 1], f
- end
+ if f then
+ return nextchar[n or 1],f
+ end
end
-
-local nextbyte = {
- [4] = function(f)
- local a, b, c, d = f:read(1,1,1,1)
- if d then
- return byte(a), byte(b), byte(c), byte(d)
- end
- end,
- [3] = function(f)
- local a, b, c = f:read(1,1,1)
- if b then
- return byte(a), byte(b), byte(c)
- end
- end,
- [2] = function(f)
- local a, b = f:read(1,1)
- if b then
- return byte(a), byte(b)
- end
- end,
- [1] = function (f)
- local a = f:read(1)
- if a then
- return byte(a)
- end
- end,
- [-2] = function (f)
- local a, b = f:read(1,1)
- if b then
- return byte(b), byte(a)
- end
- end,
- [-3] = function(f)
- local a, b, c = f:read(1,1,1)
- if b then
- return byte(c), byte(b), byte(a)
- end
- end,
- [-4] = function(f)
- local a, b, c, d = f:read(1,1,1,1)
- if d then
- return byte(d), byte(c), byte(b), byte(a)
- end
+local nextbyte={
+ [4]=function(f)
+ local a,b,c,d=f:read(1,1,1,1)
+ if d then
+ return byte(a),byte(b),byte(c),byte(d)
+ end
+ end,
+ [3]=function(f)
+ local a,b,c=f:read(1,1,1)
+ if b then
+ return byte(a),byte(b),byte(c)
+ end
+ end,
+ [2]=function(f)
+ local a,b=f:read(1,1)
+ if b then
+ return byte(a),byte(b)
+ end
+ end,
+ [1]=function (f)
+ local a=f:read(1)
+ if a then
+ return byte(a)
+ end
+ end,
+ [-2]=function (f)
+ local a,b=f:read(1,1)
+ if b then
+ return byte(b),byte(a)
+ end
+ end,
+ [-3]=function(f)
+ local a,b,c=f:read(1,1,1)
+ if b then
+ return byte(c),byte(b),byte(a)
+ end
+ end,
+ [-4]=function(f)
+ local a,b,c,d=f:read(1,1,1,1)
+ if d then
+ return byte(d),byte(c),byte(b),byte(a)
end
+ end
}
-
function io.bytes(f,n)
- if f then
- return nextbyte[n or 1], f
- else
- return nil, nil
- end
+ if f then
+ return nextbyte[n or 1],f
+ else
+ return nil,nil
+ end
end
-
function io.ask(question,default,options)
- while true do
- io.write(question)
- if options then
- io.write(format(" [%s]",concat(options,"|")))
+ while true do
+ io.write(question)
+ if options then
+ io.write(format(" [%s]",concat(options,"|")))
+ end
+ if default then
+ io.write(format(" [%s]",default))
+ end
+ io.write(format(" "))
+ io.flush()
+ local answer=io.read()
+ answer=gsub(answer,"^%s*(.*)%s*$","%1")
+ if answer=="" and default then
+ return default
+ elseif not options then
+ return answer
+ else
+ for k=1,#options do
+ if options[k]==answer then
+ return answer
end
- if default then
- io.write(format(" [%s]",default))
- end
- io.write(format(" "))
- io.flush()
- local answer = io.read()
- answer = gsub(answer,"^%s*(.*)%s*$","%1")
- if answer == "" and default then
- return default
- elseif not options then
- return answer
- else
- for k=1,#options do
- if options[k] == answer then
- return answer
- end
- end
- local pattern = "^" .. answer
- for k=1,#options do
- local v = options[k]
- if find(v,pattern) then
- return v
- end
- end
+ end
+ local pattern="^"..answer
+ for k=1,#options do
+ local v=options[k]
+ if find(v,pattern) then
+ return v
end
+ end
end
+ end
end
-
local function readnumber(f,n,m)
- if m then
- f:seek("set",n)
- n = m
- end
- if n == 1 then
- return byte(f:read(1))
- elseif n == 2 then
- local a, b = byte(f:read(2),1,2)
- return 256 * a + b
- elseif n == 3 then
- local a, b, c = byte(f:read(3),1,3)
- return 256*256 * a + 256 * b + c
- elseif n == 4 then
- local a, b, c, d = byte(f:read(4),1,4)
- return 256*256*256 * a + 256*256 * b + 256 * c + d
- elseif n == 8 then
- local a, b = readnumber(f,4), readnumber(f,4)
- return 256 * a + b
- elseif n == 12 then
- local a, b, c = readnumber(f,4), readnumber(f,4), readnumber(f,4)
- return 256*256 * a + 256 * b + c
- elseif n == -2 then
- local b, a = byte(f:read(2),1,2)
- return 256*a + b
- elseif n == -3 then
- local c, b, a = byte(f:read(3),1,3)
- return 256*256 * a + 256 * b + c
- elseif n == -4 then
- local d, c, b, a = byte(f:read(4),1,4)
- return 256*256*256 * a + 256*256 * b + 256*c + d
- elseif n == -8 then
- local h, g, f, e, d, c, b, a = byte(f:read(8),1,8)
- return 256*256*256*256*256*256*256 * a +
- 256*256*256*256*256*256 * b +
- 256*256*256*256*256 * c +
- 256*256*256*256 * d +
- 256*256*256 * e +
- 256*256 * f +
- 256 * g +
- h
- else
- return 0
- end
+ if m then
+ f:seek("set",n)
+ n=m
+ end
+ if n==1 then
+ return byte(f:read(1))
+ elseif n==2 then
+ local a,b=byte(f:read(2),1,2)
+ return 256*a+b
+ elseif n==3 then
+ local a,b,c=byte(f:read(3),1,3)
+ return 256*256*a+256*b+c
+ elseif n==4 then
+ local a,b,c,d=byte(f:read(4),1,4)
+ return 256*256*256*a+256*256*b+256*c+d
+ elseif n==8 then
+ local a,b=readnumber(f,4),readnumber(f,4)
+ return 256*a+b
+ elseif n==12 then
+ local a,b,c=readnumber(f,4),readnumber(f,4),readnumber(f,4)
+ return 256*256*a+256*b+c
+ elseif n==-2 then
+ local b,a=byte(f:read(2),1,2)
+ return 256*a+b
+ elseif n==-3 then
+ local c,b,a=byte(f:read(3),1,3)
+ return 256*256*a+256*b+c
+ elseif n==-4 then
+ local d,c,b,a=byte(f:read(4),1,4)
+ return 256*256*256*a+256*256*b+256*c+d
+ elseif n==-8 then
+ local h,g,f,e,d,c,b,a=byte(f:read(8),1,8)
+ return 256*256*256*256*256*256*256*a+256*256*256*256*256*256*b+256*256*256*256*256*c+256*256*256*256*d+256*256*256*e+256*256*f+256*g+h
+ else
+ return 0
+ end
end
-
-io.readnumber = readnumber
-
+io.readnumber=readnumber
function io.readstring(f,n,m)
- if m then
- f:seek("set",n)
- n = m
- end
- local str = gsub(f:read(n),"%z","")
- return str
+ if m then
+ f:seek("set",n)
+ n=m
+ end
+ local str=gsub(f:read(n),"\000","")
+ return str
end
-
---
-
-if not io.i_limiter then function io.i_limiter() end end -- dummy so we can test safely
-if not io.o_limiter then function io.o_limiter() end end -- dummy so we can test safely
+if not io.i_limiter then function io.i_limiter() end end
+if not io.o_limiter then function io.o_limiter() end end
end -- of closure
do -- create closure to overcome 200 locals limit
-if not modules then modules = { } end modules ['l-number'] = {
- version = 1.001,
- comment = "companion to luat-lib.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
+package.loaded["l-number"] = package.loaded["l-number"] or true
--- this module will be replaced when we have the bit library
+-- original size: 4939, stripped down to: 2830
-local tostring = tostring
-local format, floor, match, rep = string.format, math.floor, string.match, string.rep
-local concat, insert = table.concat, table.insert
-local lpegmatch = lpeg.match
-
-number = number or { }
-local number = number
-
--- a,b,c,d,e,f = number.toset(100101)
-
-function number.toset(n)
- return match(tostring(n),"(.?)(.?)(.?)(.?)(.?)(.?)(.?)(.?)")
-end
-
-function number.toevenhex(n)
- local s = format("%X",n)
- if #s % 2 == 0 then
- return s
+if not modules then modules={} end modules ['l-number']={
+ version=1.001,
+ comment="companion to luat-lib.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+local tostring,tonumber=tostring,tonumber
+local format,floor,match,rep=string.format,math.floor,string.match,string.rep
+local concat,insert=table.concat,table.insert
+local lpegmatch=lpeg.match
+number=number or {}
+local number=number
+if bit32 then
+ local btest,bor=bit32.btest,bit32.bor
+ function number.bit(p)
+ return 2^(p-1)
+ end
+ number.hasbit=btest
+ number.setbit=bor
+ function number.setbit(x,p)
+ return btest(x,p) and x or x+p
+ end
+ function number.clearbit(x,p)
+ return btest(x,p) and x-p or x
+ end
+else
+ function number.bit(p)
+ return 2^(p-1)
+ end
+ function number.hasbit(x,p)
+ return x%(p+p)>=p
+ end
+ function number.setbit(x,p)
+ return (x%(p+p)>=p) and x or x+p
+ end
+ function number.clearbit(x,p)
+ return (x%(p+p)>=p) and x-p or x
+ end
+end
+if bit32 then
+ local bextract=bit32.extract
+ local t={
+ "0","0","0","0","0","0","0","0",
+ "0","0","0","0","0","0","0","0",
+ "0","0","0","0","0","0","0","0",
+ "0","0","0","0","0","0","0","0",
+ }
+ function number.tobitstring(b,m)
+ local n=32
+ for i=0,31 do
+ local v=bextract(b,i)
+ local k=32-i
+ if v==1 then
+ n=k
+ t[k]="1"
+ else
+ t[k]="0"
+ end
+ end
+ if m then
+ m=33-m*8
+ if m<1 then
+ m=1
+ end
+ return concat(t,"",m)
+ elseif n<8 then
+ return concat(t)
+ elseif n<16 then
+ return concat(t,"",9)
+ elseif n<24 then
+ return concat(t,"",17)
else
- return "0" .. s
+ return concat(t,"",25)
end
-end
-
--- the lpeg way is slower on 8 digits, but faster on 4 digits, some 7.5%
--- on
---
--- for i=1,1000000 do
--- local a,b,c,d,e,f,g,h = number.toset(12345678)
--- local a,b,c,d = number.toset(1234)
--- local a,b,c = number.toset(123)
--- end
---
--- of course dedicated "(.)(.)(.)(.)" matches are even faster
-
-local one = lpeg.C(1-lpeg.S(''))^1
-
-function number.toset(n)
- return lpegmatch(one,tostring(n))
-end
-
-function number.bits(n,zero)
- local t, i = { }, (zero and 0) or 1
- while n > 0 do
- local m = n % 2
- if m > 0 then
- insert(t,1,i)
- end
- n = floor(n/2)
- i = i + 1
+ end
+else
+ function number.tobitstring(n,m)
+ if n>0 then
+ local t={}
+ while n>0 do
+ insert(t,1,n%2>0 and 1 or 0)
+ n=floor(n/2)
+ end
+ local nn=8-#t%8
+ if nn>0 and nn<8 then
+ for i=1,nn do
+ insert(t,1,0)
+ end
+ end
+ if m then
+ m=m*8-#t
+ if m>0 then
+ insert(t,1,rep("0",m))
+ end
+ end
+ return concat(t)
+ elseif m then
+ rep("00000000",m)
+ else
+ return "00000000"
end
- return t
+ end
end
-
-
-function number.bit(p)
- return 2 ^ (p - 1) -- 1-based indexing
-end
-
-function number.hasbit(x, p) -- typical call: if hasbit(x, bit(3)) then ...
- return x % (p + p) >= p
-end
-
-function number.setbit(x, p)
- return hasbit(x, p) and x or x + p
+function number.valid(str,default)
+ return tonumber(str) or default or nil
end
-
-function number.clearbit(x, p)
- return hasbit(x, p) and x - p or x
+function number.toevenhex(n)
+ local s=format("%X",n)
+ if #s%2==0 then
+ return s
+ else
+ return "0"..s
+ end
end
-
-
-function number.tobitstring(n,m)
- if n == 0 then
- if m then
- rep("00000000",m)
- else
- return "00000000"
- end
+local one=lpeg.C(1-lpeg.S('')/tonumber)^1
+function number.toset(n)
+ return lpegmatch(one,tostring(n))
+end
+local function bits(n,i,...)
+ if n>0 then
+ local m=n%2
+ local n=floor(n/2)
+ if m>0 then
+ return bits(n,i+1,i,...)
else
- local t = { }
- while n > 0 do
- insert(t,1,n % 2 > 0 and 1 or 0)
- n = floor(n/2)
- end
- local nn = 8 - #t % 8
- if nn > 0 and nn < 8 then
- for i=1,nn do
- insert(t,1,0)
- end
- end
- if m then
- m = m * 8 - #t
- if m > 0 then
- insert(t,1,rep("0",m))
- end
- end
- return concat(t)
+ return bits(n,i+1,...)
end
+ else
+ return...
+ end
+end
+function number.bits(n)
+ return { bits(n,1) }
end
-
end -- of closure
do -- create closure to overcome 200 locals limit
-if not modules then modules = { } end modules ['l-set'] = {
- version = 1.001,
- comment = "companion to luat-lib.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
+package.loaded["l-set"] = package.loaded["l-set"] or true
--- This will become obsolete when we have the bitset library embedded.
-
-set = set or { }
-
-local nums = { }
-local tabs = { }
-local concat = table.concat
-local next, type = next, type
-
-set.create = table.tohash
+-- original size: 1923, stripped down to: 1133
+if not modules then modules={} end modules ['l-set']={
+ version=1.001,
+ comment="companion to luat-lib.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+set=set or {}
+local nums={}
+local tabs={}
+local concat=table.concat
+local next,type=next,type
+set.create=table.tohash
function set.tonumber(t)
- if next(t) then
- local s = ""
- -- we could save mem by sorting, but it slows down
- for k, v in next, t do
- if v then
- -- why bother about the leading space
- s = s .. " " .. k
- end
- end
- local n = nums[s]
- if not n then
- n = #tabs + 1
- tabs[n] = t
- nums[s] = n
- end
- return n
- else
- return 0
+ if next(t) then
+ local s=""
+ for k,v in next,t do
+ if v then
+ s=s.." "..k
+ end
+ end
+ local n=nums[s]
+ if not n then
+ n=#tabs+1
+ tabs[n]=t
+ nums[s]=n
end
+ return n
+ else
+ return 0
+ end
end
-
function set.totable(n)
- if n == 0 then
- return { }
- else
- return tabs[n] or { }
- end
+ if n==0 then
+ return {}
+ else
+ return tabs[n] or {}
+ end
end
-
function set.tolist(n)
- if n == 0 or not tabs[n] then
- return ""
- else
- local t, n = { }, 0
- for k, v in next, tabs[n] do
- if v then
- n = n + 1
- t[n] = k
- end
- end
- return concat(t," ")
+ if n==0 or not tabs[n] then
+ return ""
+ else
+ local t,n={},0
+ for k,v in next,tabs[n] do
+ if v then
+ n=n+1
+ t[n]=k
+ end
end
+ return concat(t," ")
+ end
end
-
function set.contains(n,s)
- if type(n) == "table" then
- return n[s]
- elseif n == 0 then
- return false
- else
- local t = tabs[n]
- return t and t[s]
- end
+ if type(n)=="table" then
+ return n[s]
+ elseif n==0 then
+ return false
+ else
+ local t=tabs[n]
+ return t and t[s]
+ end
end
-
-
end -- of closure
do -- create closure to overcome 200 locals limit
-if not modules then modules = { } end modules ['l-os'] = {
- version = 1.001,
- comment = "companion to luat-lib.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
--- This file deals with some operating system issues. Please don't bother me
--- with the pros and cons of operating systems as they all have their flaws
--- and benefits. Bashing one of them won't help solving problems and fixing
--- bugs faster and is a waste of time and energy.
---
--- path separators: / or \ ... we can use / everywhere
--- suffixes : dll so exe <none> ... no big deal
--- quotes : we can use "" in most cases
--- expansion : unless "" are used * might give side effects
--- piping/threads : somewhat different for each os
--- locations : specific user file locations and settings can change over time
---
--- os.type : windows | unix (new, we already guessed os.platform)
--- os.name : windows | msdos | linux | macosx | solaris | .. | generic (new)
--- os.platform : extended os.name with architecture
-
--- maybe build io.flush in os.execute
-
-local os = os
-local find, format, gsub, upper, gmatch = string.find, string.format, string.gsub, string.upper, string.gmatch
-local concat = table.concat
-local random, ceil = math.random, math.ceil
-local rawget, rawset, type, getmetatable, setmetatable, tonumber = rawget, rawset, type, getmetatable, setmetatable, tonumber
+package.loaded["l-os"] = package.loaded["l-os"] or true
--- The following code permits traversing the environment table, at least
--- in luatex. Internally all environment names are uppercase.
+-- original size: 13692, stripped down to: 8406
+if not modules then modules={} end modules ['l-os']={
+ version=1.001,
+ comment="companion to luat-lib.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+local os=os
+local date,time=os.date,os.time
+local find,format,gsub,upper,gmatch=string.find,string.format,string.gsub,string.upper,string.gmatch
+local concat=table.concat
+local random,ceil,randomseed=math.random,math.ceil,math.randomseed
+local rawget,rawset,type,getmetatable,setmetatable,tonumber,tostring=rawget,rawset,type,getmetatable,setmetatable,tonumber,tostring
+math.initialseed=tonumber(string.sub(string.reverse(tostring(ceil(socket and socket.gettime()*10000 or time()))),1,6))
+randomseed(math.initialseed)
if not os.__getenv__ then
-
- os.__getenv__ = os.getenv
- os.__setenv__ = os.setenv
-
- if os.env then
-
- local osgetenv = os.getenv
- local ossetenv = os.setenv
- local osenv = os.env local _ = osenv.PATH -- initialize the table
-
- function os.setenv(k,v)
- if v == nil then
- v = ""
- end
- local K = upper(k)
- osenv[K] = v
- if type(v) == "table" then
- v = concat(v,";") -- path
- end
- ossetenv(K,v)
- end
-
- function os.getenv(k)
- local K = upper(k)
- local v = osenv[K] or osenv[k] or osgetenv(K) or osgetenv(k)
- if v == "" then
- return nil
- else
- return v
- end
- end
-
- else
-
- local ossetenv = os.setenv
- local osgetenv = os.getenv
- local osenv = { }
-
- function os.setenv(k,v)
- if v == nil then
- v = ""
- end
- local K = upper(k)
- osenv[K] = v
- end
-
- function os.getenv(k)
- local K = upper(k)
- local v = osenv[K] or osgetenv(K) or osgetenv(k)
- if v == "" then
- return nil
- else
- return v
- end
- end
-
- local function __index(t,k)
- return os.getenv(k)
- end
- local function __newindex(t,k,v)
- os.setenv(k,v)
- end
-
- os.env = { }
-
- setmetatable(os.env, { __index = __index, __newindex = __newindex } )
-
+ os.__getenv__=os.getenv
+ os.__setenv__=os.setenv
+ if os.env then
+ local osgetenv=os.getenv
+ local ossetenv=os.setenv
+ local osenv=os.env local _=osenv.PATH
+ function os.setenv(k,v)
+ if v==nil then
+ v=""
+ end
+ local K=upper(k)
+ osenv[K]=v
+ if type(v)=="table" then
+ v=concat(v,";")
+ end
+ ossetenv(K,v)
+ end
+ function os.getenv(k)
+ local K=upper(k)
+ local v=osenv[K] or osenv[k] or osgetenv(K) or osgetenv(k)
+ if v=="" then
+ return nil
+ else
+ return v
+ end
+ end
+ else
+ local ossetenv=os.setenv
+ local osgetenv=os.getenv
+ local osenv={}
+ function os.setenv(k,v)
+ if v==nil then
+ v=""
+ end
+ local K=upper(k)
+ osenv[K]=v
+ end
+ function os.getenv(k)
+ local K=upper(k)
+ local v=osenv[K] or osgetenv(K) or osgetenv(k)
+ if v=="" then
+ return nil
+ else
+ return v
+ end
end
-
+ local function __index(t,k)
+ return os.getenv(k)
+ end
+ local function __newindex(t,k,v)
+ os.setenv(k,v)
+ end
+ os.env={}
+ setmetatable(os.env,{ __index=__index,__newindex=__newindex } )
+ end
end
-
--- end of environment hack
-
-local execute, spawn, exec, iopopen, ioflush = os.execute, os.spawn or os.execute, os.exec or os.execute, io.popen, io.flush
-
+local execute,spawn,exec,iopopen,ioflush=os.execute,os.spawn or os.execute,os.exec or os.execute,io.popen,io.flush
function os.execute(...) ioflush() return execute(...) end
-function os.spawn (...) ioflush() return spawn (...) end
-function os.exec (...) ioflush() return exec (...) end
-function io.popen (...) ioflush() return iopopen(...) end
-
+function os.spawn (...) ioflush() return spawn (...) end
+function os.exec (...) ioflush() return exec (...) end
+function io.popen (...) ioflush() return iopopen(...) end
function os.resultof(command)
- local handle = io.popen(command,"r")
- return handle and handle:read("*all") or ""
+ local handle=io.popen(command,"r")
+ return handle and handle:read("*all") or ""
end
-
if not io.fileseparator then
- if find(os.getenv("PATH"),";") then
- io.fileseparator, io.pathseparator, os.type = "\\", ";", os.type or "mswin"
- else
- io.fileseparator, io.pathseparator, os.type = "/" , ":", os.type or "unix"
- end
-end
-
-os.type = os.type or (io.pathseparator == ";" and "windows") or "unix"
-os.name = os.name or (os.type == "windows" and "mswin" ) or "linux"
-
-if os.type == "windows" then
- os.libsuffix, os.binsuffix, os.binsuffixes = 'dll', 'exe', { 'exe', 'cmd', 'bat' }
+ if find(os.getenv("PATH"),";") then
+ io.fileseparator,io.pathseparator,os.type="\\",";",os.type or "mswin"
+ else
+ io.fileseparator,io.pathseparator,os.type="/",":",os.type or "unix"
+ end
+end
+os.type=os.type or (io.pathseparator==";" and "windows") or "unix"
+os.name=os.name or (os.type=="windows" and "mswin" ) or "linux"
+if os.type=="windows" then
+ os.libsuffix,os.binsuffix,os.binsuffixes='dll','exe',{ 'exe','cmd','bat' }
else
- os.libsuffix, os.binsuffix, os.binsuffixes = 'so', '', { '' }
+ os.libsuffix,os.binsuffix,os.binsuffixes='so','',{ '' }
end
-
+local launchers={
+ windows="start %s",
+ macosx="open %s",
+ unix="$BROWSER %s &> /dev/null &",
+}
function os.launch(str)
- if os.type == "windows" then
- os.execute("start " .. str) -- os.spawn ?
- else
- os.execute(str .. " &") -- os.spawn ?
- end
+ os.execute(format(launchers[os.name] or launchers.unix,str))
end
-
if not os.times then
- -- utime = user time
- -- stime = system time
- -- cutime = children user time
- -- cstime = children system time
- function os.times()
- return {
- utime = os.gettimeofday(), -- user
- stime = 0, -- system
- cutime = 0, -- children user
- cstime = 0, -- children system
- }
- end
+ function os.times()
+ return {
+ utime=os.gettimeofday(),
+ stime=0,
+ cutime=0,
+ cstime=0,
+ }
+ end
end
-
-os.gettimeofday = os.gettimeofday or os.clock
-
-local startuptime = os.gettimeofday()
-
+os.gettimeofday=os.gettimeofday or os.clock
+local startuptime=os.gettimeofday()
function os.runtime()
- return os.gettimeofday() - startuptime
-end
-
-
--- no need for function anymore as we have more clever code and helpers now
--- this metatable trickery might as well disappear
-
-os.resolvers = os.resolvers or { } -- will become private
-
-local resolvers = os.resolvers
-
-local osmt = getmetatable(os) or { __index = function(t,k) t[k] = "unset" return "unset" end } -- maybe nil
-local osix = osmt.__index
-
-osmt.__index = function(t,k)
- return (resolvers[k] or osix)(t,k)
-end
-
-setmetatable(os,osmt)
-
--- we can use HOSTTYPE on some platforms
-
-local name, platform = os.name or "linux", os.getenv("MTX_PLATFORM") or ""
-
+ return os.gettimeofday()-startuptime
+end
+os.resolvers=os.resolvers or {}
+local resolvers=os.resolvers
+setmetatable(os,{ __index=function(t,k)
+ local r=resolvers[k]
+ return r and r(t,k) or nil
+end })
+local name,platform=os.name or "linux",os.getenv("MTX_PLATFORM") or ""
local function guess()
- local architecture = os.resultof("uname -m") or ""
- if architecture ~= "" then
- return architecture
- end
- architecture = os.getenv("HOSTTYPE") or ""
- if architecture ~= "" then
- return architecture
- end
- return os.resultof("echo $HOSTTYPE") or ""
-end
-
-if platform ~= "" then
-
- os.platform = platform
-
-elseif os.type == "windows" then
-
- -- we could set the variable directly, no function needed here
-
- function os.resolvers.platform(t,k)
- local platform, architecture = "", os.getenv("PROCESSOR_ARCHITECTURE") or ""
- if find(architecture,"AMD64") then
- platform = "mswin-64"
- else
- platform = "mswin"
- end
- os.setenv("MTX_PLATFORM",platform)
- os.platform = platform
- return platform
- end
-
-elseif name == "linux" then
-
- function os.resolvers.platform(t,k)
- -- we sometimes have HOSTTYPE set so let's check that first
- local platform, architecture = "", os.getenv("HOSTTYPE") or os.resultof("uname -m") or ""
- if find(architecture,"x86_64") then
- platform = "linux-64"
- elseif find(architecture,"ppc") then
- platform = "linux-ppc"
- else
- platform = "linux"
- end
- os.setenv("MTX_PLATFORM",platform)
- os.platform = platform
- return platform
- end
-
-elseif name == "macosx" then
-
- --[[
- Identifying the architecture of OSX is quite a mess and this
- is the best we can come up with. For some reason $HOSTTYPE is
- a kind of pseudo environment variable, not known to the current
- environment. And yes, uname cannot be trusted either, so there
- is a change that you end up with a 32 bit run on a 64 bit system.
- Also, some proper 64 bit intel macs are too cheap (low-end) and
- therefore not permitted to run the 64 bit kernel.
- ]]--
-
- function os.resolvers.platform(t,k)
- -- local platform, architecture = "", os.getenv("HOSTTYPE") or ""
- -- if architecture == "" then
- -- architecture = os.resultof("echo $HOSTTYPE") or ""
- -- end
- local platform, architecture = "", os.resultof("echo $HOSTTYPE") or ""
- if architecture == "" then
- -- print("\nI have no clue what kind of OSX you're running so let's assume an 32 bit intel.\n")
- platform = "osx-intel"
- elseif find(architecture,"i386") then
- platform = "osx-intel"
- elseif find(architecture,"x86_64") then
- platform = "osx-64"
- else
- platform = "osx-ppc"
- end
- os.setenv("MTX_PLATFORM",platform)
- os.platform = platform
- return platform
- end
-
-elseif name == "sunos" then
-
- function os.resolvers.platform(t,k)
- local platform, architecture = "", os.resultof("uname -m") or ""
- if find(architecture,"sparc") then
- platform = "solaris-sparc"
- else -- if architecture == 'i86pc'
- platform = "solaris-intel"
- end
- os.setenv("MTX_PLATFORM",platform)
- os.platform = platform
- return platform
- end
-
-elseif name == "freebsd" then
-
- function os.resolvers.platform(t,k)
- local platform, architecture = "", os.resultof("uname -m") or ""
- if find(architecture,"amd64") then
- platform = "freebsd-amd64"
- else
- platform = "freebsd"
- end
- os.setenv("MTX_PLATFORM",platform)
- os.platform = platform
- return platform
- end
-
-elseif name == "kfreebsd" then
-
- function os.resolvers.platform(t,k)
- -- we sometimes have HOSTTYPE set so let's check that first
- local platform, architecture = "", os.getenv("HOSTTYPE") or os.resultof("uname -m") or ""
- if find(architecture,"x86_64") then
- platform = "kfreebsd-amd64"
- else
- platform = "kfreebsd-i386"
- end
- os.setenv("MTX_PLATFORM",platform)
- os.platform = platform
- return platform
+ local architecture=os.resultof("uname -m") or ""
+ if architecture~="" then
+ return architecture
+ end
+ architecture=os.getenv("HOSTTYPE") or ""
+ if architecture~="" then
+ return architecture
+ end
+ return os.resultof("echo $HOSTTYPE") or ""
+end
+if platform~="" then
+ os.platform=platform
+elseif os.type=="windows" then
+ function os.resolvers.platform(t,k)
+ local platform,architecture="",os.getenv("PROCESSOR_ARCHITECTURE") or ""
+ if find(architecture,"AMD64") then
+ platform="mswin-64"
+ else
+ platform="mswin"
+ end
+ os.setenv("MTX_PLATFORM",platform)
+ os.platform=platform
+ return platform
+ end
+elseif name=="linux" then
+ function os.resolvers.platform(t,k)
+ local platform,architecture="",os.getenv("HOSTTYPE") or os.resultof("uname -m") or ""
+ if find(architecture,"x86_64") then
+ platform="linux-64"
+ elseif find(architecture,"ppc") then
+ platform="linux-ppc"
+ else
+ platform="linux"
+ end
+ os.setenv("MTX_PLATFORM",platform)
+ os.platform=platform
+ return platform
+ end
+elseif name=="macosx" then
+ function os.resolvers.platform(t,k)
+ local platform,architecture="",os.resultof("echo $HOSTTYPE") or ""
+ if architecture=="" then
+ platform="osx-intel"
+ elseif find(architecture,"i386") then
+ platform="osx-intel"
+ elseif find(architecture,"x86_64") then
+ platform="osx-64"
+ else
+ platform="osx-ppc"
+ end
+ os.setenv("MTX_PLATFORM",platform)
+ os.platform=platform
+ return platform
+ end
+elseif name=="sunos" then
+ function os.resolvers.platform(t,k)
+ local platform,architecture="",os.resultof("uname -m") or ""
+ if find(architecture,"sparc") then
+ platform="solaris-sparc"
+ else
+ platform="solaris-intel"
+ end
+ os.setenv("MTX_PLATFORM",platform)
+ os.platform=platform
+ return platform
+ end
+elseif name=="freebsd" then
+ function os.resolvers.platform(t,k)
+ local platform,architecture="",os.resultof("uname -m") or ""
+ if find(architecture,"amd64") then
+ platform="freebsd-amd64"
+ else
+ platform="freebsd"
+ end
+ os.setenv("MTX_PLATFORM",platform)
+ os.platform=platform
+ return platform
+ end
+elseif name=="kfreebsd" then
+ function os.resolvers.platform(t,k)
+ local platform,architecture="",os.getenv("HOSTTYPE") or os.resultof("uname -m") or ""
+ if find(architecture,"x86_64") then
+ platform="kfreebsd-amd64"
+ else
+ platform="kfreebsd-i386"
end
-
+ os.setenv("MTX_PLATFORM",platform)
+ os.platform=platform
+ return platform
+ end
else
-
- -- platform = "linux"
- -- os.setenv("MTX_PLATFORM",platform)
- -- os.platform = platform
-
- function os.resolvers.platform(t,k)
- local platform = "linux"
- os.setenv("MTX_PLATFORM",platform)
- os.platform = platform
- return platform
- end
-
-end
-
--- beware, we set the randomseed
-
--- from wikipedia: Version 4 UUIDs use a scheme relying only on random numbers. This algorithm sets the
--- version number as well as two reserved bits. All other bits are set using a random or pseudorandom
--- data source. Version 4 UUIDs have the form xxxxxxxx-xxxx-4xxx-yxxx-xxxxxxxxxxxx with hexadecimal
--- digits x and hexadecimal digits 8, 9, A, or B for y. e.g. f47ac10b-58cc-4372-a567-0e02b2c3d479.
---
--- as we don't call this function too often there is not so much risk on repetition
-
-local t = { 8, 9, "a", "b" }
-
+ function os.resolvers.platform(t,k)
+ local platform="linux"
+ os.setenv("MTX_PLATFORM",platform)
+ os.platform=platform
+ return platform
+ end
+end
+local t={ 8,9,"a","b" }
function os.uuid()
- return format("%04x%04x-4%03x-%s%03x-%04x-%04x%04x%04x",
- random(0xFFFF),random(0xFFFF),
- random(0x0FFF),
- t[ceil(random(4))] or 8,random(0x0FFF),
- random(0xFFFF),
- random(0xFFFF),random(0xFFFF),random(0xFFFF)
- )
+ return format("%04x%04x-4%03x-%s%03x-%04x-%04x%04x%04x",
+ random(0xFFFF),random(0xFFFF),
+ random(0x0FFF),
+ t[ceil(random(4))] or 8,random(0x0FFF),
+ random(0xFFFF),
+ random(0xFFFF),random(0xFFFF),random(0xFFFF)
+ )
end
-
local d
-
function os.timezone(delta)
- d = d or tonumber(tonumber(os.date("%H")-os.date("!%H")))
- if delta then
- if d > 0 then
- return format("+%02i:00",d)
- else
- return format("-%02i:00",-d)
- end
+ d=d or tonumber(tonumber(date("%H")-date("!%H")))
+ if delta then
+ if d>0 then
+ return format("+%02i:00",d)
else
- return 1
- end
-end
-
-local memory = { }
-
+ return format("-%02i:00",-d)
+ end
+ else
+ return 1
+ end
+end
+local timeformat=format("%%s%s",os.timezone(true))
+local dateformat="!%Y-%m-%d %H:%M:%S"
+function os.fulltime(t,default)
+ t=tonumber(t) or 0
+ if t>0 then
+ elseif default then
+ return default
+ else
+ t=nil
+ end
+ return format(timeformat,date(dateformat,t))
+end
+local dateformat="%Y-%m-%d %H:%M:%S"
+function os.localtime(t,default)
+ t=tonumber(t) or 0
+ if t>0 then
+ elseif default then
+ return default
+ else
+ t=nil
+ end
+ return date(dateformat,t)
+end
+function os.converttime(t,default)
+ local t=tonumber(t)
+ if t and t>0 then
+ return date(dateformat,t)
+ else
+ return default or "-"
+ end
+end
+local memory={}
local function which(filename)
- local fullname = memory[filename]
- if fullname == nil then
- local suffix = file.suffix(filename)
- local suffixes = suffix == "" and os.binsuffixes or { suffix }
- for directory in gmatch(os.getenv("PATH"),"[^" .. io.pathseparator .."]+") do
- local df = file.join(directory,filename)
- for i=1,#suffixes do
- local dfs = file.addsuffix(df,suffixes[i])
- if io.exists(dfs) then
- fullname = dfs
- break
- end
- end
- end
- if not fullname then
- fullname = false
+ local fullname=memory[filename]
+ if fullname==nil then
+ local suffix=file.suffix(filename)
+ local suffixes=suffix=="" and os.binsuffixes or { suffix }
+ for directory in gmatch(os.getenv("PATH"),"[^"..io.pathseparator.."]+") do
+ local df=file.join(directory,filename)
+ for i=1,#suffixes do
+ local dfs=file.addsuffix(df,suffixes[i])
+ if io.exists(dfs) then
+ fullname=dfs
+ break
end
- memory[filename] = fullname
+ end
end
- return fullname
+ if not fullname then
+ fullname=false
+ end
+ memory[filename]=fullname
+ end
+ return fullname
+end
+os.which=which
+os.where=which
+function os.today()
+ return date("!*t")
+end
+function os.now()
+ return date("!%Y-%m-%d %H:%M:%S")
+end
+if not os.sleep and socket then
+ os.sleep=socket.sleep
end
-
-os.which = which
-os.where = which
-
--- print(os.which("inkscape.exe"))
--- print(os.which("inkscape"))
--- print(os.which("gs.exe"))
--- print(os.which("ps2pdf"))
end -- of closure
do -- create closure to overcome 200 locals limit
-if not modules then modules = { } end modules ['l-file'] = {
- version = 1.001,
- comment = "companion to luat-lib.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
--- needs a cleanup
+package.loaded["l-file"] = package.loaded["l-file"] or true
-file = file or { }
-local file = file
-
-local insert, concat = table.insert, table.concat
-local find, gmatch, match, gsub, sub, char, lower = string.find, string.gmatch, string.match, string.gsub, string.sub, string.char, string.lower
-local lpegmatch = lpeg.match
-local getcurrentdir, attributes = lfs.currentdir, lfs.attributes
-
-local P, R, S, C, Cs, Cp, Cc = lpeg.P, lpeg.R, lpeg.S, lpeg.C, lpeg.Cs, lpeg.Cp, lpeg.Cc
-
-local function dirname(name,default)
- return match(name,"^(.+)[/\\].-$") or (default or "")
-end
+-- original size: 16648, stripped down to: 9051
+if not modules then modules={} end modules ['l-file']={
+ version=1.001,
+ comment="companion to luat-lib.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+file=file or {}
+local file=file
+if not lfs then
+ lfs=optionalrequire("lfs")
+end
+if not lfs then
+ lfs={
+ getcurrentdir=function()
+ return "."
+ end,
+ attributes=function()
+ return nil
+ end,
+ isfile=function(name)
+ local f=io.open(name,'rb')
+ if f then
+ f:close()
+ return true
+ end
+ end,
+ isdir=function(name)
+ print("you need to load lfs")
+ return false
+ end
+ }
+elseif not lfs.isfile then
+ local attributes=lfs.attributes
+ function lfs.isdir(name)
+ return attributes(name,"mode")=="directory"
+ end
+ function lfs.isfile(name)
+ return attributes(name,"mode")=="file"
+ end
+end
+local insert,concat=table.insert,table.concat
+local match=string.match
+local lpegmatch=lpeg.match
+local getcurrentdir,attributes=lfs.currentdir,lfs.attributes
+local checkedsplit=string.checkedsplit
+local P,R,S,C,Cs,Cp,Cc,Ct=lpeg.P,lpeg.R,lpeg.S,lpeg.C,lpeg.Cs,lpeg.Cp,lpeg.Cc,lpeg.Ct
+local colon=P(":")
+local period=P(".")
+local periods=P("..")
+local fwslash=P("/")
+local bwslash=P("\\")
+local slashes=S("\\/")
+local noperiod=1-period
+local noslashes=1-slashes
+local name=noperiod^1
+local suffix=period/""*(1-period-slashes)^1*-1
+local pattern=C((1-(slashes^1*noslashes^1*-1))^1)*P(1)
+local function pathpart(name,default)
+ return name and lpegmatch(pattern,name) or default or ""
+end
+local pattern=(noslashes^0*slashes)^1*C(noslashes^1)*-1
local function basename(name)
- return match(name,"^.+[/\\](.-)$") or name
+ return name and lpegmatch(pattern,name) or name
end
-
--- local function nameonly(name)
--- return (gsub(match(name,"^.+[/\\](.-)$") or name,"%..*$",""))
--- end
-
+local pattern=(noslashes^0*slashes^1)^0*Cs((1-suffix)^1)*suffix^0
local function nameonly(name)
- return (gsub(match(name,"^.+[/\\](.-)$") or name,"%.[%a%d]+$",""))
-end
-
-local function extname(name,default)
- return match(name,"^.+%.([^/\\]-)$") or default or ""
-end
-
-local function splitname(name)
- local n, s = match(name,"^(.+)%.([^/\\]-)$")
- return n or name, s or ""
-end
-
-file.basename = basename
-file.dirname = dirname
-file.nameonly = nameonly
-file.extname = extname
-file.suffix = extname
-
-function file.removesuffix(filename)
- return (gsub(filename,"%.[%a%d]+$",""))
-end
-
-function file.addsuffix(filename, suffix, criterium)
- if not suffix or suffix == "" then
- return filename
- elseif criterium == true then
- return filename .. "." .. suffix
- elseif not criterium then
- local n, s = splitname(filename)
- if not s or s == "" then
- return filename .. "." .. suffix
- else
+ return name and lpegmatch(pattern,name) or name
+end
+local pattern=(noslashes^0*slashes)^0*(noperiod^1*period)^1*C(noperiod^1)*-1
+local function suffixonly(name)
+ return name and lpegmatch(pattern,name) or ""
+end
+file.pathpart=pathpart
+file.basename=basename
+file.nameonly=nameonly
+file.suffixonly=suffixonly
+file.suffix=suffixonly
+file.dirname=pathpart
+file.extname=suffixonly
+local drive=C(R("az","AZ"))*colon
+local path=C((noslashes^0*slashes)^0)
+local suffix=period*C(P(1-period)^0*P(-1))
+local base=C((1-suffix)^0)
+local rest=C(P(1)^0)
+drive=drive+Cc("")
+path=path+Cc("")
+base=base+Cc("")
+suffix=suffix+Cc("")
+local pattern_a=drive*path*base*suffix
+local pattern_b=path*base*suffix
+local pattern_c=C(drive*path)*C(base*suffix)
+local pattern_d=path*rest
+function file.splitname(str,splitdrive)
+ if not str then
+ elseif splitdrive then
+ return lpegmatch(pattern_a,str)
+ else
+ return lpegmatch(pattern_b,str)
+ end
+end
+function file.splitbase(str)
+ return str and lpegmatch(pattern_d,str)
+end
+function file.nametotable(str,splitdrive)
+ if str then
+ local path,drive,subpath,name,base,suffix=lpegmatch(pattern_c,str)
+ if splitdrive then
+ return {
+ path=path,
+ drive=drive,
+ subpath=subpath,
+ name=name,
+ base=base,
+ suffix=suffix,
+ }
+ else
+ return {
+ path=path,
+ name=name,
+ base=base,
+ suffix=suffix,
+ }
+ end
+ end
+end
+local pattern=Cs(((period*(1-period-slashes)^1*-1)/""+1)^1)
+function file.removesuffix(name)
+ return name and lpegmatch(pattern,name)
+end
+local suffix=period/""*(1-period-slashes)^1*-1
+local pattern=Cs((noslashes^0*slashes^1)^0*((1-suffix)^1))*Cs(suffix)
+function file.addsuffix(filename,suffix,criterium)
+ if not filename or not suffix or suffix=="" then
+ return filename
+ elseif criterium==true then
+ return filename.."."..suffix
+ elseif not criterium then
+ local n,s=lpegmatch(pattern,filename)
+ if not s or s=="" then
+ return filename.."."..suffix
+ else
+ return filename
+ end
+ else
+ local n,s=lpegmatch(pattern,filename)
+ if s and s~="" then
+ local t=type(criterium)
+ if t=="table" then
+ for i=1,#criterium do
+ if s==criterium[i] then
return filename
+ end
end
- else
- local n, s = splitname(filename)
- if s and s ~= "" then
- local t = type(criterium)
- if t == "table" then
- -- keep if in criterium
- for i=1,#criterium do
- if s == criterium[i] then
- return filename
- end
- end
- elseif t == "string" then
- -- keep if criterium
- if s == criterium then
- return filename
- end
- end
+ elseif t=="string" then
+ if s==criterium then
+ return filename
end
- return n .. "." .. suffix
+ end
end
+ return (n or filename).."."..suffix
+ end
end
-
-
-function file.replacesuffix(filename, suffix)
- return (gsub(filename,"%.[%a%d]+$","")) .. "." .. suffix
+local suffix=period*(1-period-slashes)^1*-1
+local pattern=Cs((1-suffix)^0)
+function file.replacesuffix(name,suffix)
+ if name and suffix and suffix~="" then
+ return lpegmatch(pattern,name).."."..suffix
+ else
+ return name
+ end
end
-
-
-local trick_1 = char(1)
-local trick_2 = "^" .. trick_1 .. "/+"
-
-function file.join(...) -- rather dirty
- local lst = { ... }
- local a, b = lst[1], lst[2]
- if not a or a == "" then -- not a added
- lst[1] = trick_1
- elseif b and find(a,"^/+$") and find(b,"^/") then
- lst[1] = ""
- lst[2] = gsub(b,"^/+","")
- end
- local pth = concat(lst,"/")
- pth = gsub(pth,"\\","/")
- local a, b = match(pth,"^(.*://)(.*)$")
- if a and b then
- return a .. gsub(b,"//+","/")
- end
- a, b = match(pth,"^(//)(.*)$")
- if a and b then
- return a .. gsub(b,"//+","/")
- end
- pth = gsub(pth,trick_2,"")
- return (gsub(pth,"//+","/"))
+local reslasher=lpeg.replacer(P("\\"),"/")
+function file.reslash(str)
+ return str and lpegmatch(reslasher,str)
end
-
-
--- We should be able to use:
---
--- function file.is_writable(name)
--- local a = attributes(name) or attributes(dirname(name,"."))
--- return a and sub(a.permissions,2,2) == "w"
--- end
---
--- But after some testing Taco and I came up with:
-
function file.is_writable(name)
- if lfs.isdir(name) then
- name = name .. "/m_t_x_t_e_s_t.tmp"
- local f = io.open(name,"wb")
- if f then
- f:close()
- os.remove(name)
- return true
- end
- elseif lfs.isfile(name) then
- local f = io.open(name,"ab")
- if f then
- f:close()
- return true
- end
- else
- local f = io.open(name,"ab")
- if f then
- f:close()
- os.remove(name)
- return true
- end
+ if not name then
+ elseif lfs.isdir(name) then
+ name=name.."/m_t_x_t_e_s_t.tmp"
+ local f=io.open(name,"wb")
+ if f then
+ f:close()
+ os.remove(name)
+ return true
end
- return false
+ elseif lfs.isfile(name) then
+ local f=io.open(name,"ab")
+ if f then
+ f:close()
+ return true
+ end
+ else
+ local f=io.open(name,"ab")
+ if f then
+ f:close()
+ os.remove(name)
+ return true
+ end
+ end
+ return false
end
-
+local readable=P("r")*Cc(true)
function file.is_readable(name)
- local a = attributes(name)
- return a and sub(a.permissions,1,1) == "r"
-end
-
-file.isreadable = file.is_readable -- depricated
-file.iswritable = file.is_writable -- depricated
-
--- todo: lpeg \\ / .. does not save much
-
-local checkedsplit = string.checkedsplit
-
-function file.splitpath(str,separator) -- string
- str = gsub(str,"\\","/")
- return checkedsplit(str,separator or io.pathseparator)
-end
-
-function file.joinpath(tab,separator) -- table
- return concat(tab,separator or io.pathseparator) -- can have trailing //
-end
-
--- we can hash them weakly
-
-
-function file.collapsepath(str,anchor)
- if anchor and not find(str,"^/") and not find(str,"^%a:") then
- str = getcurrentdir() .. "/" .. str
- end
- if str == "" or str =="." then
- return "."
- elseif find(str,"^%.%.") then
- str = gsub(str,"\\","/")
- return str
- elseif not find(str,"%.") then
- str = gsub(str,"\\","/")
- return str
- end
- str = gsub(str,"\\","/")
- local starter, rest = match(str,"^(%a+:/*)(.-)$")
- if starter then
- str = rest
- end
- local oldelements = checkedsplit(str,"/")
- local newelements = { }
- local i = #oldelements
- while i > 0 do
- local element = oldelements[i]
- if element == '.' then
- -- do nothing
- elseif element == '..' then
- local n = i - 1
- while n > 0 do
- local element = oldelements[n]
- if element ~= '..' and element ~= '.' then
- oldelements[n] = '.'
- break
- else
- n = n - 1
- end
- end
- if n < 1 then
- insert(newelements,1,'..')
- end
- elseif element ~= "" then
- insert(newelements,1,element)
- end
- i = i - 1
- end
- if #newelements == 0 then
- return starter or "."
- elseif starter then
- return starter .. concat(newelements, '/')
- elseif find(str,"^/") then
- return "/" .. concat(newelements,'/')
+ if name then
+ local a=attributes(name)
+ return a and lpegmatch(readable,a.permissions) or false
+ else
+ return false
+ end
+end
+file.isreadable=file.is_readable
+file.iswritable=file.is_writable
+function file.size(name)
+ if name then
+ local a=attributes(name)
+ return a and a.size or 0
+ else
+ return 0
+ end
+end
+function file.splitpath(str,separator)
+ return str and checkedsplit(lpegmatch(reslasher,str),separator or io.pathseparator)
+end
+function file.joinpath(tab,separator)
+ return tab and concat(tab,separator or io.pathseparator)
+end
+local stripper=Cs(P(fwslash)^0/""*reslasher)
+local isnetwork=fwslash*fwslash*(1-fwslash)+(1-fwslash-colon)^1*colon
+local isroot=fwslash^1*-1
+local hasroot=fwslash^1
+local deslasher=lpeg.replacer(S("\\/")^1,"/")
+function file.join(...)
+ local lst={... }
+ local one=lst[1]
+ if lpegmatch(isnetwork,one) then
+ local two=lpegmatch(deslasher,concat(lst,"/",2))
+ return one.."/"..two
+ elseif lpegmatch(isroot,one) then
+ local two=lpegmatch(deslasher,concat(lst,"/",2))
+ if lpegmatch(hasroot,two) then
+ return two
else
- return concat(newelements, '/')
- end
-end
-
-
+ return "/"..two
+ end
+ elseif one=="" then
+ return lpegmatch(stripper,concat(lst,"/",2))
+ else
+ return lpegmatch(deslasher,concat(lst,"/"))
+ end
+end
+local drivespec=R("az","AZ")^1*colon
+local anchors=fwslash+drivespec
+local untouched=periods+(1-period)^1*P(-1)
+local splitstarter=(Cs(drivespec*(bwslash/"/"+fwslash)^0)+Cc(false))*Ct(lpeg.splitat(S("/\\")^1))
+local absolute=fwslash
+function file.collapsepath(str,anchor)
+ if not str then
+ return
+ end
+ if anchor and not lpegmatch(anchors,str) then
+ str=getcurrentdir().."/"..str
+ end
+ if str=="" or str=="." then
+ return "."
+ elseif lpegmatch(untouched,str) then
+ return lpegmatch(reslasher,str)
+ end
+ local starter,oldelements=lpegmatch(splitstarter,str)
+ local newelements={}
+ local i=#oldelements
+ while i>0 do
+ local element=oldelements[i]
+ if element=='.' then
+ elseif element=='..' then
+ local n=i-1
+ while n>0 do
+ local element=oldelements[n]
+ if element~='..' and element~='.' then
+ oldelements[n]='.'
+ break
+ else
+ n=n-1
+ end
+ end
+ if n<1 then
+ insert(newelements,1,'..')
+ end
+ elseif element~="" then
+ insert(newelements,1,element)
+ end
+ i=i-1
+ end
+ if #newelements==0 then
+ return starter or "."
+ elseif starter then
+ return starter..concat(newelements,'/')
+ elseif lpegmatch(absolute,str) then
+ return "/"..concat(newelements,'/')
+ else
+ return concat(newelements,'/')
+ end
+end
+local validchars=R("az","09","AZ","--","..")
+local pattern_a=lpeg.replacer(1-validchars)
+local pattern_a=Cs((validchars+P(1)/"-")^1)
+local whatever=P("-")^0/""
+local pattern_b=Cs(whatever*(1-whatever*-1)^1)
function file.robustname(str,strict)
- str = gsub(str,"[^%a%d%/%-%.\\]+","-")
+ if str then
+ str=lpegmatch(pattern_a,str) or str
if strict then
- return lower(gsub(str,"^%-*(.-)%-*$","%1"))
+ return lpegmatch(pattern_b,str) or str
else
- return str
+ return str
end
+ end
end
-
-file.readdata = io.loaddata
-file.savedata = io.savedata
-
+file.readdata=io.loaddata
+file.savedata=io.savedata
function file.copy(oldname,newname)
- file.savedata(newname,io.loaddata(oldname))
-end
-
--- lpeg variants, slightly faster, not always
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
--- also rewrite previous
-
-local letter = R("az","AZ") + S("_-+")
-local separator = P("://")
-
-local qualified = P(".")^0 * P("/") + letter*P(":") + letter^1*separator + letter^1 * P("/")
-local rootbased = P("/") + letter*P(":")
-
-lpeg.patterns.qualified = qualified
-lpeg.patterns.rootbased = rootbased
-
--- ./name ../name /name c: :// name/name
-
+ if oldname and newname then
+ local data=io.loaddata(oldname)
+ if data and data~="" then
+ file.savedata(newname,data)
+ end
+ end
+end
+local letter=R("az","AZ")+S("_-+")
+local separator=P("://")
+local qualified=period^0*fwslash+letter*colon+letter^1*separator+letter^1*fwslash
+local rootbased=fwslash+letter*colon
+lpeg.patterns.qualified=qualified
+lpeg.patterns.rootbased=rootbased
function file.is_qualified_path(filename)
- return lpegmatch(qualified,filename) ~= nil
+ return filename and lpegmatch(qualified,filename)~=nil
end
-
function file.is_rootbased_path(filename)
- return lpegmatch(rootbased,filename) ~= nil
+ return filename and lpegmatch(rootbased,filename)~=nil
end
-
--- actually these are schemes
-
-local slash = S("\\/")
-local period = P(".")
-local drive = C(R("az","AZ")) * P(":")
-local path = C(((1-slash)^0 * slash)^0)
-local suffix = period * C(P(1-period)^0 * P(-1))
-local base = C((1-suffix)^0)
-
-drive = drive + Cc("")
-path = path + Cc("")
-base = base + Cc("")
-suffix = suffix + Cc("")
-
-local pattern_a = drive * path * base * suffix
-local pattern_b = path * base * suffix
-local pattern_c = C(drive * path) * C(base * suffix)
-
-function file.splitname(str,splitdrive)
- if splitdrive then
- return lpegmatch(pattern_a,str) -- returns drive, path, base, suffix
- else
- return lpegmatch(pattern_b,str) -- returns path, base, suffix
- end
-end
-
-function file.nametotable(str,splitdrive) -- returns table
- local path, drive, subpath, name, base, suffix = lpegmatch(pattern_c,str)
- if splitdrive then
- return {
- path = path,
- drive = drive,
- subpath = subpath,
- name = name,
- base = base,
- suffix = suffix,
- }
- else
- return {
- path = path,
- name = name,
- base = base,
- suffix = suffix,
- }
- end
-end
-
--- function test(t) for k, v in next, t do print(v, "=>", file.splitname(v)) end end
---
--- test { "c:", "c:/aa", "c:/aa/bb", "c:/aa/bb/cc", "c:/aa/bb/cc.dd", "c:/aa/bb/cc.dd.ee" }
--- test { "c:", "c:aa", "c:aa/bb", "c:aa/bb/cc", "c:aa/bb/cc.dd", "c:aa/bb/cc.dd.ee" }
--- test { "/aa", "/aa/bb", "/aa/bb/cc", "/aa/bb/cc.dd", "/aa/bb/cc.dd.ee" }
--- test { "aa", "aa/bb", "aa/bb/cc", "aa/bb/cc.dd", "aa/bb/cc.dd.ee" }
-
-
--- for myself:
-
function file.strip(name,dir)
- local b, a = match(name,"^(.-)" .. dir .. "(.*)$")
- return a ~= "" and a or name
+ if name then
+ local b,a=match(name,"^(.-)"..dir.."(.*)$")
+ return a~="" and a or name
+ end
end
@@ -3060,64 +3065,81 @@ end -- of closure
do -- create closure to overcome 200 locals limit
-if not modules then modules = { } end modules ['l-md5'] = {
- version = 1.001,
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
--- This also provides file checksums and checkers.
+package.loaded["l-md5"] = package.loaded["l-md5"] or true
-local md5, file = md5, file
-local gsub, format, byte = string.gsub, string.format, string.byte
+-- original size: 3760, stripped down to: 2088
+if not modules then modules={} end modules ['l-md5']={
+ version=1.001,
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+if not md5 then
+ md5=optionalrequire("md5")
+end
+if not md5 then
+ md5={
+ sum=function(str) print("error: md5 is not loaded (sum ignored)") return str end,
+ sumhexa=function(str) print("error: md5 is not loaded (sumhexa ignored)") return str end,
+ }
+end
+local md5,file=md5,file
+local gsub,format,byte=string.gsub,string.format,string.byte
+local md5sum=md5.sum
local function convert(str,fmt)
- return (gsub(md5.sum(str),".",function(chr) return format(fmt,byte(chr)) end))
+ return (gsub(md5sum(str),".",function(chr) return format(fmt,byte(chr)) end))
end
-
if not md5.HEX then function md5.HEX(str) return convert(str,"%02X") end end
if not md5.hex then function md5.hex(str) return convert(str,"%02x") end end
if not md5.dec then function md5.dec(str) return convert(str,"%03i") end end
-
-
-function file.needs_updating(oldname,newname,threshold) -- size modification access change
- local oldtime = lfs.attributes(oldname, modification)
- local newtime = lfs.attributes(newname, modification)
- if newtime >= oldtime then
- return false
- elseif oldtime - newtime < (threshold or 1) then
- return false
+function file.needsupdating(oldname,newname,threshold)
+ local oldtime=lfs.attributes(oldname,"modification")
+ if oldtime then
+ local newtime=lfs.attributes(newname,"modification")
+ if not newtime then
+ return true
+ elseif newtime>=oldtime then
+ return false
+ elseif oldtime-newtime<(threshold or 1) then
+ return false
else
- return true
+ return true
end
+ else
+ return false
+ end
+end
+file.needs_updating=file.needsupdating
+function file.syncmtimes(oldname,newname)
+ local oldtime=lfs.attributes(oldname,"modification")
+ if oldtime and lfs.isfile(newname) then
+ lfs.touch(newname,oldtime,oldtime)
+ end
end
-
function file.checksum(name)
- if md5 then
- local data = io.loaddata(name)
- if data then
- return md5.HEX(data)
- end
+ if md5 then
+ local data=io.loaddata(name)
+ if data then
+ return md5.HEX(data)
end
- return nil
+ end
+ return nil
end
-
function file.loadchecksum(name)
- if md5 then
- local data = io.loaddata(name .. ".md5")
- return data and (gsub(data,"%s",""))
- end
- return nil
+ if md5 then
+ local data=io.loaddata(name..".md5")
+ return data and (gsub(data,"%s",""))
+ end
+ return nil
end
-
-function file.savechecksum(name, checksum)
- if not checksum then checksum = file.checksum(name) end
- if checksum then
- io.savedata(name .. ".md5",checksum)
- return checksum
- end
- return nil
+function file.savechecksum(name,checksum)
+ if not checksum then checksum=file.checksum(name) end
+ if checksum then
+ io.savedata(name..".md5",checksum)
+ return checksum
+ end
+ return nil
end
@@ -3125,594 +3147,546 @@ end -- of closure
do -- create closure to overcome 200 locals limit
-if not modules then modules = { } end modules ['l-url'] = {
- version = 1.001,
- comment = "companion to luat-lib.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
-local char, gmatch, gsub, format, byte, find = string.char, string.gmatch, string.gsub, string.format, string.byte, string.find
-local concat = table.concat
-local tonumber, type = tonumber, type
-local P, C, R, S, Cs, Cc, Ct = lpeg.P, lpeg.C, lpeg.R, lpeg.S, lpeg.Cs, lpeg.Cc, lpeg.Ct
-local lpegmatch, lpegpatterns, replacer = lpeg.match, lpeg.patterns, lpeg.replacer
+package.loaded["l-url"] = package.loaded["l-url"] or true
--- from wikipedia:
---
--- foo://username:password@example.com:8042/over/there/index.dtb?type=animal;name=narwhal#nose
--- \_/ \_______________/ \_________/ \__/ \___/ \_/ \______________________/ \__/
--- | | | | | | | |
--- | userinfo hostname port | | query fragment
--- | \________________________________/\_____________|____|/
--- scheme | | | |
--- | authority path | |
--- | | |
--- | path interpretable as filename
--- | ___________|____________ |
--- / \ / \ |
--- urn:example:animal:ferret:nose interpretable as extension
-
-url = url or { }
-local url = url
-
-local tochar = function(s) return char(tonumber(s,16)) end
-
-local colon = P(":")
-local qmark = P("?")
-local hash = P("#")
-local slash = P("/")
-local percent = P("%")
-local endofstring = P(-1)
-
-local hexdigit = R("09","AF","af")
-local plus = P("+")
-local nothing = Cc("")
-local escaped = (plus / " ") + (percent * C(hexdigit * hexdigit) / tochar)
-
--- we assume schemes with more than 1 character (in order to avoid problems with windows disks)
--- we also assume that when we have a scheme, we also have an authority
-
-local schemestr = Cs((escaped+(1-colon-slash-qmark-hash))^2)
-local authoritystr = Cs((escaped+(1- slash-qmark-hash))^0)
-local pathstr = Cs((escaped+(1- qmark-hash))^0)
-local querystr = Cs((escaped+(1- hash))^0)
-local fragmentstr = Cs((escaped+(1- endofstring))^0)
-
-local scheme = schemestr * colon + nothing
-local authority = slash * slash * authoritystr + nothing
-local path = slash * pathstr + nothing
-local query = qmark * querystr + nothing
-local fragment = hash * fragmentstr + nothing
-
-local validurl = scheme * authority * path * query * fragment
-local parser = Ct(validurl)
-
-lpegpatterns.url = validurl
-lpegpatterns.urlsplitter = parser
-
-local escapes = { } ; for i=0,255 do escapes[i] = format("%%%02X",i) end
-
-local escaper = Cs((R("09","AZ","az") + S("-./_") + P(1) / escapes)^0)
-
-lpegpatterns.urlescaper = escaper
-
--- todo: reconsider Ct as we can as well have five return values (saves a table)
--- so we can have two parsers, one with and one without
+-- original size: 11806, stripped down to: 5417
+if not modules then modules={} end modules ['l-url']={
+ version=1.001,
+ comment="companion to luat-lib.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+local char,format,byte=string.char,string.format,string.byte
+local concat=table.concat
+local tonumber,type=tonumber,type
+local P,C,R,S,Cs,Cc,Ct,Cf,Cg,V=lpeg.P,lpeg.C,lpeg.R,lpeg.S,lpeg.Cs,lpeg.Cc,lpeg.Ct,lpeg.Cf,lpeg.Cg,lpeg.V
+local lpegmatch,lpegpatterns,replacer=lpeg.match,lpeg.patterns,lpeg.replacer
+url=url or {}
+local url=url
+local tochar=function(s) return char(tonumber(s,16)) end
+local colon=P(":")
+local qmark=P("?")
+local hash=P("#")
+local slash=P("/")
+local percent=P("%")
+local endofstring=P(-1)
+local hexdigit=R("09","AF","af")
+local plus=P("+")
+local nothing=Cc("")
+local escapedchar=(percent*C(hexdigit*hexdigit))/tochar
+local escaped=(plus/" ")+escapedchar
+local noslash=P("/")/""
+local schemestr=Cs((escaped+(1-colon-slash-qmark-hash))^2)
+local authoritystr=Cs((escaped+(1- slash-qmark-hash))^0)
+local pathstr=Cs((escaped+(1- qmark-hash))^0)
+local querystr=Cs(((1- hash))^0)
+local fragmentstr=Cs((escaped+(1- endofstring))^0)
+local scheme=schemestr*colon+nothing
+local authority=slash*slash*authoritystr+nothing
+local path=slash*pathstr+nothing
+local query=qmark*querystr+nothing
+local fragment=hash*fragmentstr+nothing
+local validurl=scheme*authority*path*query*fragment
+local parser=Ct(validurl)
+lpegpatterns.url=validurl
+lpegpatterns.urlsplitter=parser
+local escapes={}
+setmetatable(escapes,{ __index=function(t,k)
+ local v=format("%%%02X",byte(k))
+ t[k]=v
+ return v
+end })
+local escaper=Cs((R("09","AZ","az")^1+P(" ")/"%%20"+S("-./_")^1+P(1)/escapes)^0)
+local unescaper=Cs((escapedchar+1)^0)
+lpegpatterns.urlunescaped=escapedchar
+lpegpatterns.urlescaper=escaper
+lpegpatterns.urlunescaper=unescaper
local function split(str)
- return (type(str) == "string" and lpegmatch(parser,str)) or str
+ return (type(str)=="string" and lpegmatch(parser,str)) or str
end
-
-local isscheme = schemestr * colon * slash * slash -- this test also assumes authority
-
+local isscheme=schemestr*colon*slash*slash
local function hasscheme(str)
- local scheme = lpegmatch(isscheme,str) -- at least one character
- return scheme ~= "" and scheme or false
-end
-
-
--- todo: cache them
-
-local rootletter = R("az","AZ")
- + S("_-+")
-local separator = P("://")
-local qualified = P(".")^0 * P("/")
- + rootletter * P(":")
- + rootletter^1 * separator
- + rootletter^1 * P("/")
-local rootbased = P("/")
- + rootletter * P(":")
-
-local barswapper = replacer("|",":")
-local backslashswapper = replacer("\\","/")
-
-local function hashed(str) -- not yet ok (/test?test)
- local s = split(str)
- local somescheme = s[1] ~= ""
- local somequery = s[4] ~= ""
- if not somescheme and not somequery then
- s = {
- scheme = "file",
- authority = "",
- path = str,
- query = "",
- fragment = "",
- original = str,
- noscheme = true,
- filename = str,
- }
- else -- not always a filename but handy anyway
- local authority, path, filename = s[2], s[3]
- if authority == "" then
- filename = path
- else
- filename = authority .. "/" .. path
- end
- s = {
- scheme = s[1],
- authority = authority,
- path = path,
- query = s[4],
- fragment = s[5],
- original = str,
- noscheme = false,
- filename = filename,
- }
- end
- return s
-end
-
--- Here we assume:
---
--- files: /// = relative
--- files: //// = absolute (!)
-
-
-
-url.split = split
-url.hasscheme = hasscheme
-url.hashed = hashed
-
-function url.addscheme(str,scheme) -- no authority
- if hasscheme(str) then
- return str
- elseif not scheme then
- return "file:///" .. str
+ if str then
+ local scheme=lpegmatch(isscheme,str)
+ return scheme~="" and scheme or false
+ else
+ return false
+ end
+end
+local rootletter=R("az","AZ")+S("_-+")
+local separator=P("://")
+local qualified=P(".")^0*P("/")+rootletter*P(":")+rootletter^1*separator+rootletter^1*P("/")
+local rootbased=P("/")+rootletter*P(":")
+local barswapper=replacer("|",":")
+local backslashswapper=replacer("\\","/")
+local equal=P("=")
+local amp=P("&")
+local key=Cs(((escapedchar+1)-equal )^0)
+local value=Cs(((escapedchar+1)-amp -endofstring)^0)
+local splitquery=Cf (Ct("")*P { "sequence",
+ sequence=V("pair")*(amp*V("pair"))^0,
+ pair=Cg(key*equal*value),
+},rawset)
+local function hashed(str)
+ if str=="" then
+ return {
+ scheme="invalid",
+ original=str,
+ }
+ end
+ local s=split(str)
+ local rawscheme=s[1]
+ local rawquery=s[4]
+ local somescheme=rawscheme~=""
+ local somequery=rawquery~=""
+ if not somescheme and not somequery then
+ s={
+ scheme="file",
+ authority="",
+ path=str,
+ query="",
+ fragment="",
+ original=str,
+ noscheme=true,
+ filename=str,
+ }
+ else
+ local authority,path,filename=s[2],s[3]
+ if authority=="" then
+ filename=path
+ elseif path=="" then
+ filename=""
else
- return scheme .. ":///" .. str
- end
-end
-
-function url.construct(hash) -- dodo: we need to escape !
- local fullurl, f = { }, 0
- local scheme, authority, path, query, fragment = hash.scheme, hash.authority, hash.path, hash.query, hash.fragment
- if scheme and scheme ~= "" then
- f = f + 1 ; fullurl[f] = scheme .. "://"
- end
- if authority and authority ~= "" then
- f = f + 1 ; fullurl[f] = authority
- end
- if path and path ~= "" then
- f = f + 1 ; fullurl[f] = "/" .. path
- end
- if query and query ~= "" then
- f = f + 1 ; fullurl[f] = "?".. query
- end
- if fragment and fragment ~= "" then
- f = f + 1 ; fullurl[f] = "#".. fragment
- end
- return lpegmatch(escaper,concat(fullurl))
-end
-
+ filename=authority.."/"..path
+ end
+ s={
+ scheme=rawscheme,
+ authority=authority,
+ path=path,
+ query=lpegmatch(unescaper,rawquery),
+ queries=lpegmatch(splitquery,rawquery),
+ fragment=s[5],
+ original=str,
+ noscheme=false,
+ filename=filename,
+ }
+ end
+ return s
+end
+url.split=split
+url.hasscheme=hasscheme
+url.hashed=hashed
+function url.addscheme(str,scheme)
+ if hasscheme(str) then
+ return str
+ elseif not scheme then
+ return "file:///"..str
+ else
+ return scheme..":///"..str
+ end
+end
+function url.construct(hash)
+ local fullurl,f={},0
+ local scheme,authority,path,query,fragment=hash.scheme,hash.authority,hash.path,hash.query,hash.fragment
+ if scheme and scheme~="" then
+ f=f+1;fullurl[f]=scheme.."://"
+ end
+ if authority and authority~="" then
+ f=f+1;fullurl[f]=authority
+ end
+ if path and path~="" then
+ f=f+1;fullurl[f]="/"..path
+ end
+ if query and query~="" then
+ f=f+1;fullurl[f]="?"..query
+ end
+ if fragment and fragment~="" then
+ f=f+1;fullurl[f]="#"..fragment
+ end
+ return lpegmatch(escaper,concat(fullurl))
+end
+local pattern=Cs(noslash*R("az","AZ")*(S(":|")/":")*noslash*P(1)^0)
function url.filename(filename)
- local t = hashed(filename)
- return (t.scheme == "file" and (gsub(t.path,"^/([a-zA-Z])([:|])/)","%1:"))) or filename
+ local spec=hashed(filename)
+ local path=spec.path
+ return (spec.scheme=="file" and path and lpegmatch(pattern,path)) or filename
end
-
+local function escapestring(str)
+ return lpegmatch(escaper,str)
+end
+url.escape=escapestring
function url.query(str)
- if type(str) == "string" then
- local t = { }
- for k, v in gmatch(str,"([^&=]*)=([^&=]*)") do
- t[k] = v
- end
- return t
- else
- return str
- end
+ if type(str)=="string" then
+ return lpegmatch(splitquery,str) or ""
+ else
+ return str
+ end
+end
+function url.toquery(data)
+ local td=type(data)
+ if td=="string" then
+ return #str and escape(data) or nil
+ elseif td=="table" then
+ if next(data) then
+ local t={}
+ for k,v in next,data do
+ t[#t+1]=format("%s=%s",k,escapestring(v))
+ end
+ return concat(t,"&")
+ end
+ else
+ end
+end
+local pattern=Cs(noslash^0*(1-noslash*P(-1))^0)
+function url.barepath(path)
+ if not path or path=="" then
+ return ""
+ else
+ return lpegmatch(pattern,path)
+ end
end
-
-
-
-
-
-
-
-
end -- of closure
do -- create closure to overcome 200 locals limit
-if not modules then modules = { } end modules ['l-dir'] = {
- version = 1.001,
- comment = "companion to luat-lib.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
--- dir.expandname will be merged with cleanpath and collapsepath
-
-local type = type
-local find, gmatch, match, gsub = string.find, string.gmatch, string.match, string.gsub
-local concat, insert, remove = table.concat, table.insert, table.remove
-local lpegmatch = lpeg.match
-
-local P, S, R, C, Cc, Cs, Ct, Cv, V = lpeg.P, lpeg.S, lpeg.R, lpeg.C, lpeg.Cc, lpeg.Cs, lpeg.Ct, lpeg.Cv, lpeg.V
-
-dir = dir or { }
-local dir = dir
-local lfs = lfs
+package.loaded["l-dir"] = package.loaded["l-dir"] or true
-local attributes = lfs.attributes
-local walkdir = lfs.dir
-local isdir = lfs.isdir
-local isfile = lfs.isfile
-local currentdir = lfs.currentdir
-
--- handy
+-- original size: 13139, stripped down to: 8196
+if not modules then modules={} end modules ['l-dir']={
+ version=1.001,
+ comment="companion to luat-lib.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+local type,select=type,select
+local find,gmatch,match,gsub=string.find,string.gmatch,string.match,string.gsub
+local concat,insert,remove=table.concat,table.insert,table.remove
+local lpegmatch=lpeg.match
+local P,S,R,C,Cc,Cs,Ct,Cv,V=lpeg.P,lpeg.S,lpeg.R,lpeg.C,lpeg.Cc,lpeg.Cs,lpeg.Ct,lpeg.Cv,lpeg.V
+dir=dir or {}
+local dir=dir
+local lfs=lfs
+local attributes=lfs.attributes
+local walkdir=lfs.dir
+local isdir=lfs.isdir
+local isfile=lfs.isfile
+local currentdir=lfs.currentdir
+local chdir=lfs.chdir
+if not isdir then
+ function isdir(name)
+ local a=attributes(name)
+ return a and a.mode=="directory"
+ end
+ lfs.isdir=isdir
+end
+if not isfile then
+ function isfile(name)
+ local a=attributes(name)
+ return a and a.mode=="file"
+ end
+ lfs.isfile=isfile
+end
function dir.current()
- return (gsub(currentdir(),"\\","/"))
+ return (gsub(currentdir(),"\\","/"))
end
-
--- optimizing for no find (*) does not save time
-
-
-local lfsisdir = isdir
-
+local lfsisdir=isdir
local function isdir(path)
- path = gsub(path,"[/\\]+$","")
- return lfsisdir(path)
+ path=gsub(path,"[/\\]+$","")
+ return lfsisdir(path)
end
-
-lfs.isdir = isdir
-
+lfs.isdir=isdir
local function globpattern(path,patt,recurse,action)
- if path == "/" then
- path = path .. "."
- elseif not find(path,"/$") then
- path = path .. '/'
- end
- if isdir(path) then -- lfs.isdir does not like trailing /
- for name in walkdir(path) do -- lfs.dir accepts trailing /
- local full = path .. name
- local mode = attributes(full,'mode')
- if mode == 'file' then
- if find(full,patt) then
- action(full)
- end
- elseif recurse and (mode == "directory") and (name ~= '.') and (name ~= "..") then
- globpattern(full,patt,recurse,action)
- end
- end
- end
-end
-
-dir.globpattern = globpattern
-
+ if path=="/" then
+ path=path.."."
+ elseif not find(path,"/$") then
+ path=path..'/'
+ end
+ if isdir(path) then
+ for name in walkdir(path) do
+ local full=path..name
+ local mode=attributes(full,'mode')
+ if mode=='file' then
+ if find(full,patt) then
+ action(full)
+ end
+ elseif recurse and (mode=="directory") and (name~='.') and (name~="..") then
+ globpattern(full,patt,recurse,action)
+ end
+ end
+ end
+end
+dir.globpattern=globpattern
local function collectpattern(path,patt,recurse,result)
- local ok, scanner
- result = result or { }
- if path == "/" then
- ok, scanner, first = xpcall(function() return walkdir(path..".") end, function() end) -- kepler safe
- else
- ok, scanner, first = xpcall(function() return walkdir(path) end, function() end) -- kepler safe
- end
- if ok and type(scanner) == "function" then
- if not find(path,"/$") then path = path .. '/' end
- for name in scanner, first do
- local full = path .. name
- local attr = attributes(full)
- local mode = attr.mode
- if mode == 'file' then
- if find(full,patt) then
- result[name] = attr
- end
- elseif recurse and (mode == "directory") and (name ~= '.') and (name ~= "..") then
- attr.list = collectpattern(full,patt,recurse)
- result[name] = attr
- end
- end
- end
- return result
-end
-
-dir.collectpattern = collectpattern
-
-local pattern = Ct {
- [1] = (C(P(".") + P("/")^1) + C(R("az","AZ") * P(":") * P("/")^0) + Cc("./")) * V(2) * V(3),
- [2] = C(((1-S("*?/"))^0 * P("/"))^0),
- [3] = C(P(1)^0)
+ local ok,scanner
+ result=result or {}
+ if path=="/" then
+ ok,scanner,first=xpcall(function() return walkdir(path..".") end,function() end)
+ else
+ ok,scanner,first=xpcall(function() return walkdir(path) end,function() end)
+ end
+ if ok and type(scanner)=="function" then
+ if not find(path,"/$") then path=path..'/' end
+ for name in scanner,first do
+ local full=path..name
+ local attr=attributes(full)
+ local mode=attr.mode
+ if mode=='file' then
+ if find(full,patt) then
+ result[name]=attr
+ end
+ elseif recurse and (mode=="directory") and (name~='.') and (name~="..") then
+ attr.list=collectpattern(full,patt,recurse)
+ result[name]=attr
+ end
+ end
+ end
+ return result
+end
+dir.collectpattern=collectpattern
+local pattern=Ct {
+ [1]=(C(P(".")+P("/")^1)+C(R("az","AZ")*P(":")*P("/")^0)+Cc("./"))*V(2)*V(3),
+ [2]=C(((1-S("*?/"))^0*P("/"))^0),
+ [3]=C(P(1)^0)
}
-
-local filter = Cs ( (
- P("**") / ".*" +
- P("*") / "[^/]*" +
- P("?") / "[^/]" +
- P(".") / "%%." +
- P("+") / "%%+" +
- P("-") / "%%-" +
- P(1)
+local filter=Cs ((
+ P("**")/".*"+P("*")/"[^/]*"+P("?")/"[^/]"+P(".")/"%%."+P("+")/"%%+"+P("-")/"%%-"+P(1)
)^0 )
-
local function glob(str,t)
- if type(t) == "function" then
- if type(str) == "table" then
- for s=1,#str do
- glob(str[s],t)
- end
- elseif isfile(str) then
- t(str)
- else
- local split = lpegmatch(pattern,str) -- we could use the file splitter
- if split then
- local root, path, base = split[1], split[2], split[3]
- local recurse = find(base,"%*%*")
- local start = root .. path
- local result = lpegmatch(filter,start .. base)
- globpattern(start,result,recurse,t)
- end
- end
+ if type(t)=="function" then
+ if type(str)=="table" then
+ for s=1,#str do
+ glob(str[s],t)
+ end
+ elseif isfile(str) then
+ t(str)
else
- if type(str) == "table" then
- local t = t or { }
- for s=1,#str do
- glob(str[s],t)
- end
- return t
- elseif isfile(str) then
- if t then
- t[#t+1] = str
- return t
- else
- return { str }
- end
- else
- local split = lpegmatch(pattern,str) -- we could use the file splitter
- if split then
- local t = t or { }
- local action = action or function(name) t[#t+1] = name end
- local root, path, base = split[1], split[2], split[3]
- local recurse = find(base,"%*%*")
- local start = root .. path
- local result = lpegmatch(filter,start .. base)
- globpattern(start,result,recurse,action)
- return t
- else
- return { }
- end
+ local split=lpegmatch(pattern,str)
+ if split then
+ local root,path,base=split[1],split[2],split[3]
+ local recurse=find(base,"%*%*")
+ local start=root..path
+ local result=lpegmatch(filter,start..base)
+ globpattern(start,result,recurse,t)
+ end
+ end
+ else
+ if type(str)=="table" then
+ local t=t or {}
+ for s=1,#str do
+ glob(str[s],t)
+ end
+ return t
+ elseif isfile(str) then
+ if t then
+ t[#t+1]=str
+ return t
+ else
+ return { str }
+ end
+ else
+ local split=lpegmatch(pattern,str)
+ if split then
+ local t=t or {}
+ local action=action or function(name) t[#t+1]=name end
+ local root,path,base=split[1],split[2],split[3]
+ local recurse=find(base,"%*%*")
+ local start=root..path
+ local result=lpegmatch(filter,start..base)
+ globpattern(start,result,recurse,action)
+ return t
+ else
+ return {}
+ end
+ end
+ end
+end
+dir.glob=glob
+local function globfiles(path,recurse,func,files)
+ if type(func)=="string" then
+ local s=func
+ func=function(name) return find(name,s) end
+ end
+ files=files or {}
+ local noffiles=#files
+ for name in walkdir(path) do
+ if find(name,"^%.") then
+ else
+ local mode=attributes(name,'mode')
+ if mode=="directory" then
+ if recurse then
+ globfiles(path.."/"..name,recurse,func,files)
end
- end
-end
-
-dir.glob = glob
-
-
-local function globfiles(path,recurse,func,files) -- func == pattern or function
- if type(func) == "string" then
- local s = func
- func = function(name) return find(name,s) end
- end
- files = files or { }
- local noffiles = #files
- for name in walkdir(path) do
- if find(name,"^%.") then
- --- skip
- else
- local mode = attributes(name,'mode')
- if mode == "directory" then
- if recurse then
- globfiles(path .. "/" .. name,recurse,func,files)
- end
- elseif mode == "file" then
- if not func or func(name) then
- noffiles = noffiles + 1
- files[noffiles] = path .. "/" .. name
- end
- end
+ elseif mode=="file" then
+ if not func or func(name) then
+ noffiles=noffiles+1
+ files[noffiles]=path.."/"..name
end
+ end
end
- return files
+ end
+ return files
end
-
-dir.globfiles = globfiles
-
--- t = dir.glob("c:/data/develop/context/sources/**/????-*.tex")
--- t = dir.glob("c:/data/develop/tex/texmf/**/*.tex")
--- t = dir.glob("c:/data/develop/context/texmf/**/*.tex")
--- t = dir.glob("f:/minimal/tex/**/*")
--- print(dir.ls("f:/minimal/tex/**/*"))
--- print(dir.ls("*.tex"))
-
+dir.globfiles=globfiles
function dir.ls(pattern)
- return concat(glob(pattern),"\n")
+ return concat(glob(pattern),"\n")
end
-
-
-local make_indeed = true -- false
-
-local onwindows = os.type == "windows" or find(os.getenv("PATH"),";")
-
+local make_indeed=true
+local onwindows=os.type=="windows" or find(os.getenv("PATH"),";")
if onwindows then
-
- function dir.mkdirs(...)
- local str, pth, t = "", "", { ... }
- for i=1,#t do
- local s = t[i]
- if s ~= "" then
- if str ~= "" then
- str = str .. "/" .. s
- else
- str = s
- end
- end
+ function dir.mkdirs(...)
+ local str,pth="",""
+ for i=1,select("#",...) do
+ local s=select(i,...)
+ if s=="" then
+ elseif str=="" then
+ str=s
+ else
+ str=str.."/"..s
+ end
+ end
+ local first,middle,last
+ local drive=false
+ first,middle,last=match(str,"^(//)(//*)(.*)$")
+ if first then
+ else
+ first,last=match(str,"^(//)/*(.-)$")
+ if first then
+ middle,last=match(str,"([^/]+)/+(.-)$")
+ if middle then
+ pth="//"..middle
+ else
+ pth="//"..last
+ last=""
end
- local first, middle, last
- local drive = false
- first, middle, last = match(str,"^(//)(//*)(.*)$")
+ else
+ first,middle,last=match(str,"^([a-zA-Z]:)(/*)(.-)$")
if first then
- -- empty network path == local path
+ pth,drive=first..middle,true
else
- first, last = match(str,"^(//)/*(.-)$")
- if first then
- middle, last = match(str,"([^/]+)/+(.-)$")
- if middle then
- pth = "//" .. middle
- else
- pth = "//" .. last
- last = ""
- end
- else
- first, middle, last = match(str,"^([a-zA-Z]:)(/*)(.-)$")
- if first then
- pth, drive = first .. middle, true
- else
- middle, last = match(str,"^(/*)(.-)$")
- if not middle then
- last = str
- end
- end
- end
- end
- for s in gmatch(last,"[^/]+") do
- if pth == "" then
- pth = s
- elseif drive then
- pth, drive = pth .. s, false
- else
- pth = pth .. "/" .. s
- end
- if make_indeed and not isdir(pth) then
- lfs.mkdir(pth)
- end
- end
- return pth, (isdir(pth) == true)
- end
-
-
+ middle,last=match(str,"^(/*)(.-)$")
+ if not middle then
+ last=str
+ end
+ end
+ end
+ end
+ for s in gmatch(last,"[^/]+") do
+ if pth=="" then
+ pth=s
+ elseif drive then
+ pth,drive=pth..s,false
+ else
+ pth=pth.."/"..s
+ end
+ if make_indeed and not isdir(pth) then
+ lfs.mkdir(pth)
+ end
+ end
+ return pth,(isdir(pth)==true)
+ end
else
-
- function dir.mkdirs(...)
- local str, pth, t = "", "", { ... }
- for i=1,#t do
- local s = t[i]
- if s and s ~= "" then -- we catch nil and false
- if str ~= "" then
- str = str .. "/" .. s
- else
- str = s
- end
- end
- end
- str = gsub(str,"/+","/")
- if find(str,"^/") then
- pth = "/"
- for s in gmatch(str,"[^/]+") do
- local first = (pth == "/")
- if first then
- pth = pth .. s
- else
- pth = pth .. "/" .. s
- end
- if make_indeed and not first and not isdir(pth) then
- lfs.mkdir(pth)
- end
- end
+ function dir.mkdirs(...)
+ local str,pth="",""
+ for i=1,select("#",...) do
+ local s=select(i,...)
+ if s and s~="" then
+ if str~="" then
+ str=str.."/"..s
else
- pth = "."
- for s in gmatch(str,"[^/]+") do
- pth = pth .. "/" .. s
- if make_indeed and not isdir(pth) then
- lfs.mkdir(pth)
- end
- end
+ str=s
end
- return pth, (isdir(pth) == true)
+ end
end
-
-
-end
-
-dir.makedirs = dir.mkdirs
-
--- we can only define it here as it uses dir.current
-
-if onwindows then
-
- function dir.expandname(str) -- will be merged with cleanpath and collapsepath
- local first, nothing, last = match(str,"^(//)(//*)(.*)$")
+ str=gsub(str,"/+","/")
+ if find(str,"^/") then
+ pth="/"
+ for s in gmatch(str,"[^/]+") do
+ local first=(pth=="/")
if first then
- first = dir.current() .. "/"
- end
- if not first then
- first, last = match(str,"^(//)/*(.*)$")
- end
- if not first then
- first, last = match(str,"^([a-zA-Z]:)(.*)$")
- if first and not find(last,"^/") then
- local d = currentdir()
- if lfs.chdir(first) then
- first = dir.current()
- end
- lfs.chdir(d)
- end
+ pth=pth..s
+ else
+ pth=pth.."/"..s
end
- if not first then
- first, last = dir.current(), str
+ if make_indeed and not first and not isdir(pth) then
+ lfs.mkdir(pth)
end
- last = gsub(last,"//","/")
- last = gsub(last,"/%./","/")
- last = gsub(last,"^/*","")
- first = gsub(first,"/*$","")
- if last == "" or last == "." then
- return first
- else
- return first .. "/" .. last
+ end
+ else
+ pth="."
+ for s in gmatch(str,"[^/]+") do
+ pth=pth.."/"..s
+ if make_indeed and not isdir(pth) then
+ lfs.mkdir(pth)
end
+ end
end
-
+ return pth,(isdir(pth)==true)
+ end
+end
+dir.makedirs=dir.mkdirs
+if onwindows then
+ function dir.expandname(str)
+ local first,nothing,last=match(str,"^(//)(//*)(.*)$")
+ if first then
+ first=dir.current().."/"
+ end
+ if not first then
+ first,last=match(str,"^(//)/*(.*)$")
+ end
+ if not first then
+ first,last=match(str,"^([a-zA-Z]:)(.*)$")
+ if first and not find(last,"^/") then
+ local d=currentdir()
+ if chdir(first) then
+ first=dir.current()
+ end
+ chdir(d)
+ end
+ end
+ if not first then
+ first,last=dir.current(),str
+ end
+ last=gsub(last,"//","/")
+ last=gsub(last,"/%./","/")
+ last=gsub(last,"^/*","")
+ first=gsub(first,"/*$","")
+ if last=="" or last=="." then
+ return first
+ else
+ return first.."/"..last
+ end
+ end
else
-
- function dir.expandname(str) -- will be merged with cleanpath and collapsepath
- if not find(str,"^/") then
- str = currentdir() .. "/" .. str
- end
- str = gsub(str,"//","/")
- str = gsub(str,"/%./","/")
- str = gsub(str,"(.)/%.$","%1")
- return str
+ function dir.expandname(str)
+ if not find(str,"^/") then
+ str=currentdir().."/"..str
end
-
+ str=gsub(str,"//","/")
+ str=gsub(str,"/%./","/")
+ str=gsub(str,"(.)/%.$","%1")
+ return str
+ end
end
-
-file.expandname = dir.expandname -- for convenience
-
-local stack = { }
-
+file.expandname=dir.expandname
+local stack={}
function dir.push(newdir)
- insert(stack,lfs.currentdir())
+ insert(stack,currentdir())
+ if newdir and newdir~="" then
+ chdir(newdir)
+ end
end
-
function dir.pop()
- local d = remove(stack)
- if d then
- lfs.chdir(d)
- end
- return d
+ local d=remove(stack)
+ if d then
+ chdir(d)
+ end
+ return d
end
@@ -3720,55 +3694,71 @@ end -- of closure
do -- create closure to overcome 200 locals limit
-if not modules then modules = { } end modules ['l-boolean'] = {
- version = 1.001,
- comment = "companion to luat-lib.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
+package.loaded["l-boolean"] = package.loaded["l-boolean"] or true
-local type, tonumber = type, tonumber
-
-boolean = boolean or { }
-local boolean = boolean
+-- original size: 1781, stripped down to: 1503
+if not modules then modules={} end modules ['l-boolean']={
+ version=1.001,
+ comment="companion to luat-lib.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+local type,tonumber=type,tonumber
+boolean=boolean or {}
+local boolean=boolean
function boolean.tonumber(b)
- if b then return 1 else return 0 end -- test and return or return
+ if b then return 1 else return 0 end
end
-
-function toboolean(str,tolerant)
- if tolerant then
- local tstr = type(str)
- if tstr == "string" then
- return str == "true" or str == "yes" or str == "on" or str == "1" or str == "t"
- elseif tstr == "number" then
- return tonumber(str) ~= 0
- elseif tstr == "nil" then
- return false
- else
- return str
- end
- elseif str == "true" then
- return true
- elseif str == "false" then
- return false
- else
- return str
- end
+function toboolean(str,tolerant)
+ if str==nil then
+ return false
+ elseif str==false then
+ return false
+ elseif str==true then
+ return true
+ elseif str=="true" then
+ return true
+ elseif str=="false" then
+ return false
+ elseif not tolerant then
+ return false
+ elseif str==0 then
+ return false
+ elseif (tonumber(str) or 0)>0 then
+ return true
+ else
+ return str=="yes" or str=="on" or str=="t"
+ end
+end
+string.toboolean=toboolean
+function string.booleanstring(str)
+ if str=="0" then
+ return false
+ elseif str=="1" then
+ return true
+ elseif str=="" then
+ return false
+ elseif str=="false" then
+ return false
+ elseif str=="true" then
+ return true
+ elseif (tonumber(str) or 0)>0 then
+ return true
+ else
+ return str=="yes" or str=="on" or str=="t"
+ end
end
-
-string.toboolean = toboolean
-
function string.is_boolean(str,default)
- if type(str) == "string" then
- if str == "true" or str == "yes" or str == "on" or str == "t" then
- return true
- elseif str == "false" or str == "no" or str == "off" or str == "f" then
- return false
- end
+ if type(str)=="string" then
+ if str=="true" or str=="yes" or str=="on" or str=="t" then
+ return true
+ elseif str=="false" or str=="no" or str=="off" or str=="f" then
+ return false
end
- return default
+ end
+ return default
end
@@ -3776,360 +3766,536 @@ end -- of closure
do -- create closure to overcome 200 locals limit
-if not modules then modules = { } end modules ['l-unicode'] = {
- version = 1.001,
- comment = "companion to luat-lib.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
-if not unicode then
-
- unicode = { utf8 = { } }
-
- local floor, char = math.floor, string.char
-
- function unicode.utf8.utfchar(n)
- if n < 0x80 then
- return char(n)
- elseif n < 0x800 then
- return char(
- 0xC0 + floor(n/0x40),
- 0x80 + (n % 0x40)
- )
- elseif n < 0x10000 then
- return char(
- 0xE0 + floor(n/0x1000),
- 0x80 + (floor(n/0x40) % 0x40),
- 0x80 + (n % 0x40)
- )
- elseif n < 0x40000 then
- return char(
- 0xF0 + floor(n/0x40000),
- 0x80 + floor(n/0x1000),
- 0x80 + (floor(n/0x40) % 0x40),
- 0x80 + (n % 0x40)
- )
- else
- -- return char(
- -- 0xF1 + floor(n/0x1000000),
- -- 0x80 + floor(n/0x40000),
- -- 0x80 + floor(n/0x1000),
- -- 0x80 + (floor(n/0x40) % 0x40),
- -- 0x80 + (n % 0x40)
- -- )
- return "?"
- end
- end
-
-end
-
-local unicode = unicode
-
-utf = utf or unicode.utf8
-
-local concat = table.concat
-local utfchar, utfbyte, utfgsub = utf.char, utf.byte, utf.gsub
-local char, byte, find, bytepairs, utfvalues, format = string.char, string.byte, string.find, string.bytepairs, string.utfvalues, string.format
-local type = type
-
-local utfsplitlines = string.utfsplitlines
+package.loaded["l-unicode"] = package.loaded["l-unicode"] or true
--- 0 EF BB BF UTF-8
--- 1 FF FE UTF-16-little-endian
--- 2 FE FF UTF-16-big-endian
--- 3 FF FE 00 00 UTF-32-little-endian
--- 4 00 00 FE FF UTF-32-big-endian
+-- original size: 26810, stripped down to: 11943
-unicode.utfname = {
- [0] = 'utf-8',
- [1] = 'utf-16-le',
- [2] = 'utf-16-be',
- [3] = 'utf-32-le',
- [4] = 'utf-32-be'
+if not modules then modules={} end modules ['l-unicode']={
+ version=1.001,
+ comment="companion to luat-lib.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
}
-
--- \000 fails in <= 5.0 but is valid in >=5.1 where %z is depricated
-
-function unicode.utftype(f)
- local str = f:read(4)
- if not str then
- f:seek('set')
- return 0
- -- elseif find(str,"^%z%z\254\255") then -- depricated
- -- elseif find(str,"^\000\000\254\255") then -- not permitted and bugged
- elseif find(str,"\000\000\254\255",1,true) then -- seems to work okay (TH)
- return 4
- -- elseif find(str,"^\255\254%z%z") then -- depricated
- -- elseif find(str,"^\255\254\000\000") then -- not permitted and bugged
- elseif find(str,"\255\254\000\000",1,true) then -- seems to work okay (TH)
- return 3
- elseif find(str,"^\254\255") then
- f:seek('set',2)
- return 2
- elseif find(str,"^\255\254") then
- f:seek('set',2)
- return 1
- elseif find(str,"^\239\187\191") then
- f:seek('set',3)
- return 0
+utf=utf or (unicode and unicode.utf8) or {}
+utf.characters=utf.characters or string.utfcharacters
+utf.values=utf.values or string.utfvalues
+local type=type
+local char,byte,format,sub=string.char,string.byte,string.format,string.sub
+local concat=table.concat
+local P,C,R,Cs,Ct,Cmt,Cc,Carg,Cp=lpeg.P,lpeg.C,lpeg.R,lpeg.Cs,lpeg.Ct,lpeg.Cmt,lpeg.Cc,lpeg.Carg,lpeg.Cp
+local lpegmatch,patterns=lpeg.match,lpeg.patterns
+local bytepairs=string.bytepairs
+local finder=lpeg.finder
+local replacer=lpeg.replacer
+local utfvalues=utf.values
+local utfgmatch=utf.gmatch
+local p_utftype=patterns.utftype
+local p_utfoffset=patterns.utfoffset
+local p_utf8char=patterns.utf8char
+local p_utf8byte=patterns.utf8byte
+local p_utfbom=patterns.utfbom
+local p_newline=patterns.newline
+local p_whitespace=patterns.whitespace
+if not unicode then
+ unicode={ utf=utf }
+end
+if not utf.char then
+ local floor,char=math.floor,string.char
+ function utf.char(n)
+ if n<0x80 then
+ return char(n)
+ elseif n<0x800 then
+ return char(
+ 0xC0+floor(n/0x40),
+ 0x80+(n%0x40)
+ )
+ elseif n<0x10000 then
+ return char(
+ 0xE0+floor(n/0x1000),
+ 0x80+(floor(n/0x40)%0x40),
+ 0x80+(n%0x40)
+ )
+ elseif n<0x200000 then
+ return char(
+ 0xF0+floor(n/0x40000),
+ 0x80+(floor(n/0x1000)%0x40),
+ 0x80+(floor(n/0x40)%0x40),
+ 0x80+(n%0x40)
+ )
else
- f:seek('set')
- return 0
+ return ""
+ end
+ end
+end
+if not utf.byte then
+ local utf8byte=patterns.utf8byte
+ function utf.byte(c)
+ return lpegmatch(utf8byte,c)
+ end
+end
+local utfchar,utfbyte=utf.char,utf.byte
+function utf.filetype(data)
+ return data and lpegmatch(p_utftype,data) or "unknown"
+end
+local toentities=Cs (
+ (
+ patterns.utf8one+(
+ patterns.utf8two+patterns.utf8three+patterns.utf8four
+ )/function(s) local b=utfbyte(s) if b<127 then return s else return format("&#%X;",b) end end
+ )^0
+)
+patterns.toentities=toentities
+function utf.toentities(str)
+ return lpegmatch(toentities,str)
+end
+local one=P(1)
+local two=C(1)*C(1)
+local four=C(R(utfchar(0xD8),utfchar(0xFF)))*C(1)*C(1)*C(1)
+local pattern=P("\254\255")*Cs((
+ four/function(a,b,c,d)
+ local ab=0xFF*byte(a)+byte(b)
+ local cd=0xFF*byte(c)+byte(d)
+ return utfchar((ab-0xD800)*0x400+(cd-0xDC00)+0x10000)
+ end+two/function(a,b)
+ return utfchar(byte(a)*256+byte(b))
+ end+one
+ )^1 )+P("\255\254")*Cs((
+ four/function(b,a,d,c)
+ local ab=0xFF*byte(a)+byte(b)
+ local cd=0xFF*byte(c)+byte(d)
+ return utfchar((ab-0xD800)*0x400+(cd-0xDC00)+0x10000)
+ end+two/function(b,a)
+ return utfchar(byte(a)*256+byte(b))
+ end+one
+ )^1 )
+function string.toutf(s)
+ return lpegmatch(pattern,s) or s
+end
+local validatedutf=Cs (
+ (
+ patterns.utf8one+patterns.utf8two+patterns.utf8three+patterns.utf8four+P(1)/"�"
+ )^0
+)
+patterns.validatedutf=validatedutf
+function utf.is_valid(str)
+ return type(str)=="string" and lpegmatch(validatedutf,str) or false
+end
+if not utf.len then
+ local n,f=0,1
+ local utfcharcounter=patterns.utfbom^-1*Cmt (
+ Cc(1)*patterns.utf8one^1+Cc(2)*patterns.utf8two^1+Cc(3)*patterns.utf8three^1+Cc(4)*patterns.utf8four^1,
+ function(_,t,d)
+ n=n+(t-f)/d
+ f=t
+ return true
+ end
+ )^0
+ function utf.len(str)
+ n,f=0,1
+ lpegmatch(utfcharcounter,str or "")
+ return n
+ end
+end
+utf.length=utf.len
+if not utf.sub then
+ local utflength=utf.length
+ local b,e,n,first,last=0,0,0,0,0
+ local function slide_zero(s,p)
+ n=n+1
+ if n>=last then
+ e=p-1
+ else
+ return p
+ end
+ end
+ local function slide_one(s,p)
+ n=n+1
+ if n==first then
+ b=p
end
+ if n>=last then
+ e=p-1
+ else
+ return p
+ end
+ end
+ local function slide_two(s,p)
+ n=n+1
+ if n==first then
+ b=p
+ else
+ return true
+ end
+ end
+ local pattern_zero=Cmt(p_utf8char,slide_zero)^0
+ local pattern_one=Cmt(p_utf8char,slide_one )^0
+ local pattern_two=Cmt(p_utf8char,slide_two )^0
+ function utf.sub(str,start,stop)
+ if not start then
+ return str
+ end
+ if start==0 then
+ start=1
+ end
+ if not stop then
+ if start<0 then
+ local l=utflength(str)
+ start=l+start
+ else
+ start=start-1
+ end
+ b,n,first=0,0,start
+ lpegmatch(pattern_two,str)
+ if n>=first then
+ return sub(str,b)
+ else
+ return ""
+ end
+ end
+ if start<0 or stop<0 then
+ local l=utf.length(str)
+ if start<0 then
+ start=l+start
+ if start<=0 then
+ start=1
+ else
+ start=start+1
+ end
+ end
+ if stop<0 then
+ stop=l+stop
+ if stop==0 then
+ stop=1
+ else
+ stop=stop+1
+ end
+ end
+ end
+ if start>stop then
+ return ""
+ elseif start>1 then
+ b,e,n,first,last=0,0,0,start-1,stop
+ lpegmatch(pattern_one,str)
+ if n>=first and e==0 then
+ e=#str
+ end
+ return sub(str,b,e)
+ else
+ b,e,n,last=1,0,0,stop
+ lpegmatch(pattern_zero,str)
+ if e==0 then
+ e=#str
+ end
+ return sub(str,b,e)
+ end
+ end
+end
+function utf.remapper(mapping)
+ local pattern=Cs((p_utf8char/mapping)^0)
+ return function(str)
+ if not str or str=="" then
+ return ""
+ else
+ return lpegmatch(pattern,str)
+ end
+ end,pattern
+end
+function utf.replacer(t)
+ local r=replacer(t,false,false,true)
+ return function(str)
+ return lpegmatch(r,str)
+ end
+end
+function utf.subtituter(t)
+ local f=finder (t)
+ local r=replacer(t,false,false,true)
+ return function(str)
+ local i=lpegmatch(f,str)
+ if not i then
+ return str
+ elseif i>#str then
+ return str
+ else
+ return lpegmatch(r,str)
+ end
+ end
+end
+local utflinesplitter=p_utfbom^-1*lpeg.tsplitat(p_newline)
+local utfcharsplitter_ows=p_utfbom^-1*Ct(C(p_utf8char)^0)
+local utfcharsplitter_iws=p_utfbom^-1*Ct((p_whitespace^1+C(p_utf8char))^0)
+local utfcharsplitter_raw=Ct(C(p_utf8char)^0)
+patterns.utflinesplitter=utflinesplitter
+function utf.splitlines(str)
+ return lpegmatch(utflinesplitter,str or "")
+end
+function utf.split(str,ignorewhitespace)
+ if ignorewhitespace then
+ return lpegmatch(utfcharsplitter_iws,str or "")
+ else
+ return lpegmatch(utfcharsplitter_ows,str or "")
+ end
+end
+function utf.totable(str)
+ return lpegmatch(utfcharsplitter_raw,str)
+end
+function utf.magic(f)
+ local str=f:read(4) or ""
+ local off=lpegmatch(p_utfoffset,str)
+ if off<4 then
+ f:seek('set',off)
+ end
+ return lpegmatch(p_utftype,str)
end
-
-
-
local function utf16_to_utf8_be(t)
- if type(t) == "string" then
- t = utfsplitlines(str)
- end
- local result = { } -- we reuse result
- for i=1,#t do
- local r, more = 0, 0
- for left, right in bytepairs(t[i]) do
- if right then
- local now = 256*left + right
- if more > 0 then
- now = (more-0xD800)*0x400 + (now-0xDC00) + 0x10000 -- the 0x10000 smells wrong
- more = 0
- r = r + 1
- result[r] = utfchar(now)
- elseif now >= 0xD800 and now <= 0xDBFF then
- more = now
- else
- r = r + 1
- result[r] = utfchar(now)
- end
- end
+ if type(t)=="string" then
+ t=lpegmatch(utflinesplitter,t)
+ end
+ local result={}
+ for i=1,#t do
+ local r,more=0,0
+ for left,right in bytepairs(t[i]) do
+ if right then
+ local now=256*left+right
+ if more>0 then
+ now=(more-0xD800)*0x400+(now-0xDC00)+0x10000
+ more=0
+ r=r+1
+ result[r]=utfchar(now)
+ elseif now>=0xD800 and now<=0xDBFF then
+ more=now
+ else
+ r=r+1
+ result[r]=utfchar(now)
end
- t[i] = concat(result,"",1,r) -- we reused tmp, hence t
+ end
end
- return t
+ t[i]=concat(result,"",1,r)
+ end
+ return t
end
-
local function utf16_to_utf8_le(t)
- if type(t) == "string" then
- t = utfsplitlines(str)
- end
- local result = { } -- we reuse result
- for i=1,#t do
- local r, more = 0, 0
- for left, right in bytepairs(t[i]) do
- if right then
- local now = 256*right + left
- if more > 0 then
- now = (more-0xD800)*0x400 + (now-0xDC00) + 0x10000 -- the 0x10000 smells wrong
- more = 0
- r = r + 1
- result[r] = utfchar(now)
- elseif now >= 0xD800 and now <= 0xDBFF then
- more = now
- else
- r = r + 1
- result[r] = utfchar(now)
- end
- end
+ if type(t)=="string" then
+ t=lpegmatch(utflinesplitter,t)
+ end
+ local result={}
+ for i=1,#t do
+ local r,more=0,0
+ for left,right in bytepairs(t[i]) do
+ if right then
+ local now=256*right+left
+ if more>0 then
+ now=(more-0xD800)*0x400+(now-0xDC00)+0x10000
+ more=0
+ r=r+1
+ result[r]=utfchar(now)
+ elseif now>=0xD800 and now<=0xDBFF then
+ more=now
+ else
+ r=r+1
+ result[r]=utfchar(now)
end
- t[i] = concat(result,"",1,r) -- we reused tmp, hence t
+ end
end
- return t
+ t[i]=concat(result,"",1,r)
+ end
+ return t
end
-
local function utf32_to_utf8_be(t)
- if type(t) == "string" then
- t = utfsplitlines(t)
- end
- local result = { } -- we reuse result
- for i=1,#t do
- local r, more = 0, -1
- for a,b in bytepairs(t[i]) do
- if a and b then
- if more < 0 then
- more = 256*256*256*a + 256*256*b
- else
- r = r + 1
- result[t] = utfchar(more + 256*a + b)
- more = -1
- end
- else
- break
- end
+ if type(t)=="string" then
+ t=lpegmatch(utflinesplitter,t)
+ end
+ local result={}
+ for i=1,#t do
+ local r,more=0,-1
+ for a,b in bytepairs(t[i]) do
+ if a and b then
+ if more<0 then
+ more=256*256*256*a+256*256*b
+ else
+ r=r+1
+ result[t]=utfchar(more+256*a+b)
+ more=-1
end
- t[i] = concat(result,"",1,r)
+ else
+ break
+ end
end
- return t
+ t[i]=concat(result,"",1,r)
+ end
+ return t
end
-
local function utf32_to_utf8_le(t)
- if type(t) == "string" then
- t = utfsplitlines(t)
- end
- local result = { } -- we reuse result
- for i=1,#t do
- local r, more = 0, -1
- for a,b in bytepairs(t[i]) do
- if a and b then
- if more < 0 then
- more = 256*b + a
- else
- r = r + 1
- result[t] = utfchar(more + 256*256*256*b + 256*256*a)
- more = -1
- end
- else
- break
- end
+ if type(t)=="string" then
+ t=lpegmatch(utflinesplitter,t)
+ end
+ local result={}
+ for i=1,#t do
+ local r,more=0,-1
+ for a,b in bytepairs(t[i]) do
+ if a and b then
+ if more<0 then
+ more=256*b+a
+ else
+ r=r+1
+ result[t]=utfchar(more+256*256*256*b+256*256*a)
+ more=-1
end
- t[i] = concat(result,"",1,r)
+ else
+ break
+ end
end
- return t
+ t[i]=concat(result,"",1,r)
+ end
+ return t
end
-
-unicode.utf32_to_utf8_be = utf32_to_utf8_be
-unicode.utf32_to_utf8_le = utf32_to_utf8_le
-unicode.utf16_to_utf8_be = utf16_to_utf8_be
-unicode.utf16_to_utf8_le = utf16_to_utf8_le
-
-function unicode.utf8_to_utf8(t)
- return type(t) == "string" and utfsplitlines(t) or t
+utf.utf32_to_utf8_be=utf32_to_utf8_be
+utf.utf32_to_utf8_le=utf32_to_utf8_le
+utf.utf16_to_utf8_be=utf16_to_utf8_be
+utf.utf16_to_utf8_le=utf16_to_utf8_le
+function utf.utf8_to_utf8(t)
+ return type(t)=="string" and lpegmatch(utflinesplitter,t) or t
end
-
-function unicode.utf16_to_utf8(t,endian)
- return endian and utf16_to_utf8_be(t) or utf16_to_utf8_le(t) or t
+function utf.utf16_to_utf8(t,endian)
+ return endian and utf16_to_utf8_be(t) or utf16_to_utf8_le(t) or t
end
-
-function unicode.utf32_to_utf8(t,endian)
- return endian and utf32_to_utf8_be(t) or utf32_to_utf8_le(t) or t
+function utf.utf32_to_utf8(t,endian)
+ return endian and utf32_to_utf8_be(t) or utf32_to_utf8_le(t) or t
end
-
local function little(c)
- local b = byte(c)
- if b < 0x10000 then
- return char(b%256,b/256)
- else
- b = b - 0x10000
- local b1, b2 = b/1024 + 0xD800, b%1024 + 0xDC00
- return char(b1%256,b1/256,b2%256,b2/256)
- end
+ local b=byte(c)
+ if b<0x10000 then
+ return char(b%256,b/256)
+ else
+ b=b-0x10000
+ local b1,b2=b/1024+0xD800,b%1024+0xDC00
+ return char(b1%256,b1/256,b2%256,b2/256)
+ end
end
-
local function big(c)
- local b = byte(c)
- if b < 0x10000 then
- return char(b/256,b%256)
- else
- b = b - 0x10000
- local b1, b2 = b/1024 + 0xD800, b%1024 + 0xDC00
- return char(b1/256,b1%256,b2/256,b2%256)
- end
+ local b=byte(c)
+ if b<0x10000 then
+ return char(b/256,b%256)
+ else
+ b=b-0x10000
+ local b1,b2=b/1024+0xD800,b%1024+0xDC00
+ return char(b1/256,b1%256,b2/256,b2%256)
+ end
+end
+local _,l_remap=utf.remapper(little)
+local _,b_remap=utf.remapper(big)
+function utf.utf8_to_utf16(str,littleendian)
+ if littleendian then
+ return char(255,254)..lpegmatch(l_remap,str)
+ else
+ return char(254,255)..lpegmatch(b_remap,str)
+ end
+end
+local pattern=Cs (
+ (p_utf8byte/function(unicode ) return format("0x%04X",unicode) end)*(p_utf8byte*Carg(1)/function(unicode,separator) return format("%s0x%04X",separator,unicode) end)^0
+)
+function utf.tocodes(str,separator)
+ return lpegmatch(pattern,str,1,separator or " ")
end
-
-function unicode.utf8_to_utf16(str,littleendian)
- if littleendian then
- return char(255,254) .. utfgsub(str,".",little)
+function utf.ustring(s)
+ return format("U+%05X",type(s)=="number" and s or utfbyte(s))
+end
+function utf.xstring(s)
+ return format("0x%05X",type(s)=="number" and s or utfbyte(s))
+end
+local p_nany=p_utf8char/""
+if utfgmatch then
+ function utf.count(str,what)
+ if type(what)=="string" then
+ local n=0
+ for _ in utfgmatch(str,what) do
+ n=n+1
+ end
+ return n
+ else
+ return #lpegmatch(Cs((P(what)/" "+p_nany)^0),str)
+ end
+ end
+else
+ local cache={}
+ function utf.count(str,what)
+ if type(what)=="string" then
+ local p=cache[what]
+ if not p then
+ p=Cs((P(what)/" "+p_nany)^0)
+ cache[p]=p
+ end
+ return #lpegmatch(p,str)
+ else
+ return #lpegmatch(Cs((P(what)/" "+p_nany)^0),str)
+ end
+ end
+end
+if not utf.characters then
+ function utf.characters(str)
+ return gmatch(str,".[\128-\191]*")
+ end
+ string.utfcharacters=utf.characters
+end
+if not utf.values then
+ local find=string.find
+ local dummy=function()
+ end
+ function utf.values(str)
+ local n=#str
+ if n==0 then
+ return dummy
+ elseif n==1 then
+ return function() return utfbyte(str) end
else
- return char(254,255) .. utfgsub(str,".",big)
+ local p=1
+ return function()
+ local b,e=find(str,".[\128-\191]*",p)
+ if b then
+ p=e+1
+ return utfbyte(sub(str,b,e))
+ end
+ end
end
+ end
+ string.utfvalues=utf.values
end
-function unicode.utfcodes(str)
- local t, n = { }, 0
- for u in utfvalues(str) do
- n = n + 1
- t[n] = format("0x%04X",u)
- end
- return concat(t,separator or " ")
-end
-function unicode.ustring(s)
- return format("U+%05X",type(s) == "number" and s or utfbyte(s))
-end
+end -- of closure
-function unicode.xstring(s)
- return format("0x%05X",type(s) == "number" and s or utfbyte(s))
-end
+do -- create closure to overcome 200 locals limit
+package.loaded["l-math"] = package.loaded["l-math"] or true
-local lpegmatch = lpeg.match
-local patterns = lpeg.patterns
-local utftype = patterns.utftype
+-- original size: 915, stripped down to: 836
-function unicode.filetype(data)
- return data and lpegmatch(utftype,data) or "unknown"
+if not modules then modules={} end modules ['l-math']={
+ version=1.001,
+ comment="companion to luat-lib.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+local floor,sin,cos,tan=math.floor,math.sin,math.cos,math.tan
+if not math.round then
+ function math.round(x) return floor(x+0.5) end
end
-
-local toentities = lpeg.Cs (
- (
- patterns.utf8one
- + (
- patterns.utf8two
- + patterns.utf8three
- + patterns.utf8four
- ) / function(s) local b = utfbyte(s) if b < 127 then return s else return format("&#%X;",b) end end
- )^0
-)
-
-patterns.toentities = toentities
-
-function utf.toentities(str)
- return lpegmatch(toentities,str)
+if not math.div then
+ function math.div(n,m) return floor(n/m) end
end
-
-
-
-
-local P, C, R, Cs = lpeg.P, lpeg.C, lpeg.R, lpeg.Cs
-
-local one = P(1)
-local two = C(1) * C(1)
-local four = C(R(utfchar(0xD8),utfchar(0xFF))) * C(1) * C(1) * C(1)
-
--- actually one of them is already utf ... sort of useless this one
-
-local pattern = P("\254\255") * Cs( (
- four / function(a,b,c,d)
- local ab = 0xFF * byte(a) + byte(b)
- local cd = 0xFF * byte(c) + byte(d)
- return utfchar((ab-0xD800)*0x400 + (cd-0xDC00) + 0x10000)
- end
- + two / function(a,b)
- return utfchar(byte(a)*256 + byte(b))
- end
- + one
- )^1 )
- + P("\255\254") * Cs( (
- four / function(b,a,d,c)
- local ab = 0xFF * byte(a) + byte(b)
- local cd = 0xFF * byte(c) + byte(d)
- return utfchar((ab-0xD800)*0x400 + (cd-0xDC00) + 0x10000)
- end
- + two / function(b,a)
- return utfchar(byte(a)*256 + byte(b))
- end
- + one
- )^1 )
-
-function string.toutf(s)
- return lpegmatch(pattern,s) or s -- todo: utf32
+if not math.mod then
+ function math.mod(n,m) return n%m end
end
-
-local validatedutf = Cs (
- (
- patterns.utf8one
- + patterns.utf8two
- + patterns.utf8three
- + patterns.utf8four
- + P(1) / "�"
- )^0
-)
-
-patterns.validatedutf = validatedutf
-
-function string.validutf(str)
- return lpegmatch(validatedutf,str)
+local pipi=2*math.pi/360
+if not math.sind then
+ function math.sind(d) return sin(d*pipi) end
+ function math.cosd(d) return cos(d*pipi) end
+ function math.tand(d) return tan(d*pipi) end
+end
+if not math.odd then
+ function math.odd (n) return n%2~=0 end
+ function math.even(n) return n%2==0 end
end
@@ -4137,213 +4303,852 @@ end -- of closure
do -- create closure to overcome 200 locals limit
-if not modules then modules = { } end modules ['l-math'] = {
- version = 1.001,
- comment = "companion to luat-lib.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
+package.loaded["util-str"] = package.loaded["util-str"] or true
-local floor, sin, cos, tan = math.floor, math.sin, math.cos, math.tan
-
-if not math.round then
- function math.round(x) return floor(x + 0.5) end
-end
+-- original size: 22834, stripped down to: 12570
-if not math.div then
- function math.div(n,m) return floor(n/m) end
+if not modules then modules={} end modules ['util-str']={
+ version=1.001,
+ comment="companion to luat-lib.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+utilities=utilities or {}
+utilities.strings=utilities.strings or {}
+local strings=utilities.strings
+local format,gsub,rep,sub=string.format,string.gsub,string.rep,string.sub
+local load,dump=load,string.dump
+local tonumber,type,tostring=tonumber,type,tostring
+local unpack,concat=table.unpack,table.concat
+local P,V,C,S,R,Ct,Cs,Cp,Carg,Cc=lpeg.P,lpeg.V,lpeg.C,lpeg.S,lpeg.R,lpeg.Ct,lpeg.Cs,lpeg.Cp,lpeg.Carg,lpeg.Cc
+local patterns,lpegmatch=lpeg.patterns,lpeg.match
+local utfchar,utfbyte=utf.char,utf.byte
+local loadstripped=_LUAVERSION<5.2 and load or function(str)
+ return load(dump(load(str),true))
+end
+if not number then number={} end
+local stripper=patterns.stripzeros
+local function points(n)
+ return (not n or n==0) and "0pt" or lpegmatch(stripper,format("%.5fpt",n/65536))
+end
+local function basepoints(n)
+ return (not n or n==0) and "0bp" or lpegmatch(stripper,format("%.5fbp",n*(7200/7227)/65536))
+end
+number.points=points
+number.basepoints=basepoints
+local rubish=patterns.spaceortab^0*patterns.newline
+local anyrubish=patterns.spaceortab+patterns.newline
+local anything=patterns.anything
+local stripped=(patterns.spaceortab^1/"")*patterns.newline
+local leading=rubish^0/""
+local trailing=(anyrubish^1*patterns.endofstring)/""
+local redundant=rubish^3/"\n"
+local pattern=Cs(leading*(trailing+redundant+stripped+anything)^0)
+function strings.collapsecrlf(str)
+ return lpegmatch(pattern,str)
+end
+local repeaters={}
+function strings.newrepeater(str,offset)
+ offset=offset or 0
+ local s=repeaters[str]
+ if not s then
+ s={}
+ repeaters[str]=s
+ end
+ local t=s[offset]
+ if t then
+ return t
+ end
+ t={}
+ setmetatable(t,{ __index=function(t,k)
+ if not k then
+ return ""
+ end
+ local n=k+offset
+ local s=n>0 and rep(str,n) or ""
+ t[k]=s
+ return s
+ end })
+ s[offset]=t
+ return t
+end
+local extra,tab,start=0,0,4,0
+local nspaces=strings.newrepeater(" ")
+string.nspaces=nspaces
+local pattern=Carg(1)/function(t)
+ extra,tab,start=0,t or 7,1
+ end*Cs((
+ Cp()*patterns.tab/function(position)
+ local current=(position-start+1)+extra
+ local spaces=tab-(current-1)%tab
+ if spaces>0 then
+ extra=extra+spaces-1
+ return nspaces[spaces]
+ else
+ return ""
+ end
+ end+patterns.newline*Cp()/function(position)
+ extra,start=0,position
+ end+patterns.anything
+ )^1)
+function strings.tabtospace(str,tab)
+ return lpegmatch(pattern,str,1,tab or 7)
+end
+function strings.striplong(str)
+ str=gsub(str,"^%s*","")
+ str=gsub(str,"[\n\r]+ *","\n")
+ return str
+end
+function strings.nice(str)
+ str=gsub(str,"[:%-+_]+"," ")
+ return str
+end
+local n=0
+local sequenced=table.sequenced
+function string.autodouble(s,sep)
+ if s==nil then
+ return '""'
+ end
+ local t=type(s)
+ if t=="number" then
+ return tostring(s)
+ end
+ if t=="table" then
+ return ('"'..sequenced(s,sep or ",")..'"')
+ end
+ return ('"'..tostring(s)..'"')
+end
+function string.autosingle(s,sep)
+ if s==nil then
+ return "''"
+ end
+ local t=type(s)
+ if t=="number" then
+ return tostring(s)
+ end
+ if t=="table" then
+ return ("'"..sequenced(s,sep or ",").."'")
+ end
+ return ("'"..tostring(s).."'")
+end
+local tracedchars={}
+string.tracedchars=tracedchars
+strings.tracers=tracedchars
+function string.tracedchar(b)
+ if type(b)=="number" then
+ return tracedchars[b] or (utfchar(b).." (U+"..format('%05X',b)..")")
+ else
+ local c=utfbyte(b)
+ return tracedchars[c] or (b.." (U+"..format('%05X',c)..")")
+ end
+end
+function number.signed(i)
+ if i>0 then
+ return "+",i
+ else
+ return "-",-i
+ end
+end
+local preamble=[[
+local type = type
+local tostring = tostring
+local tonumber = tonumber
+local format = string.format
+local concat = table.concat
+local signed = number.signed
+local points = number.points
+local basepoints = number.basepoints
+local utfchar = utf.char
+local utfbyte = utf.byte
+local lpegmatch = lpeg.match
+local nspaces = string.nspaces
+local tracedchar = string.tracedchar
+local autosingle = string.autosingle
+local autodouble = string.autodouble
+local sequenced = table.sequenced
+]]
+local template=[[
+%s
+%s
+return function(%s) return %s end
+]]
+local arguments={ "a1" }
+setmetatable(arguments,{ __index=function(t,k)
+ local v=t[k-1]..",a"..k
+ t[k]=v
+ return v
+ end
+})
+local prefix_any=C((S("+- .")+R("09"))^0)
+local prefix_tab=C((1-R("az","AZ","09","%%"))^0)
+local format_s=function(f)
+ n=n+1
+ if f and f~="" then
+ return format("format('%%%ss',a%s)",f,n)
+ else
+ return format("(a%s or '')",n)
+ end
+end
+local format_S=function(f)
+ n=n+1
+ if f and f~="" then
+ return format("format('%%%ss',tostring(a%s))",f,n)
+ else
+ return format("tostring(a%s)",n)
+ end
+end
+local format_q=function()
+ n=n+1
+ return format("(a%s and format('%%q',a%s) or '')",n,n)
+end
+local format_Q=function()
+ n=n+1
+ return format("format('%%q',tostring(a%s))",n)
+end
+local format_i=function(f)
+ n=n+1
+ if f and f~="" then
+ return format("format('%%%si',a%s)",f,n)
+ else
+ return format("a%s",n)
+ end
+end
+local format_d=format_i
+local format_I=function(f)
+ n=n+1
+ return format("format('%%s%%%si',signed(a%s))",f,n)
+end
+local format_f=function(f)
+ n=n+1
+ return format("format('%%%sf',a%s)",f,n)
+end
+local format_g=function(f)
+ n=n+1
+ return format("format('%%%sg',a%s)",f,n)
+end
+local format_G=function(f)
+ n=n+1
+ return format("format('%%%sG',a%s)",f,n)
+end
+local format_e=function(f)
+ n=n+1
+ return format("format('%%%se',a%s)",f,n)
+end
+local format_E=function(f)
+ n=n+1
+ return format("format('%%%sE',a%s)",f,n)
+end
+local format_x=function(f)
+ n=n+1
+ return format("format('%%%sx',a%s)",f,n)
+end
+local format_X=function(f)
+ n=n+1
+ return format("format('%%%sX',a%s)",f,n)
+end
+local format_o=function(f)
+ n=n+1
+ return format("format('%%%so',a%s)",f,n)
+end
+local format_c=function()
+ n=n+1
+ return format("utfchar(a%s)",n)
+end
+local format_C=function()
+ n=n+1
+ return format("tracedchar(a%s)",n)
+end
+local format_r=function(f)
+ n=n+1
+ return format("format('%%%s.0f',a%s)",f,n)
+end
+local format_h=function(f)
+ n=n+1
+ if f=="-" then
+ f=sub(f,2)
+ return format("format('%%%sx',type(a%s) == 'number' and a%s or utfbyte(a%s))",f=="" and "05" or f,n,n,n)
+ else
+ return format("format('0x%%%sx',type(a%s) == 'number' and a%s or utfbyte(a%s))",f=="" and "05" or f,n,n,n)
+ end
+end
+local format_H=function(f)
+ n=n+1
+ if f=="-" then
+ f=sub(f,2)
+ return format("format('%%%sX',type(a%s) == 'number' and a%s or utfbyte(a%s))",f=="" and "05" or f,n,n,n)
+ else
+ return format("format('0x%%%sX',type(a%s) == 'number' and a%s or utfbyte(a%s))",f=="" and "05" or f,n,n,n)
+ end
+end
+local format_u=function(f)
+ n=n+1
+ if f=="-" then
+ f=sub(f,2)
+ return format("format('%%%sx',type(a%s) == 'number' and a%s or utfbyte(a%s))",f=="" and "05" or f,n,n,n)
+ else
+ return format("format('u+%%%sx',type(a%s) == 'number' and a%s or utfbyte(a%s))",f=="" and "05" or f,n,n,n)
+ end
+end
+local format_U=function(f)
+ n=n+1
+ if f=="-" then
+ f=sub(f,2)
+ return format("format('%%%sX',type(a%s) == 'number' and a%s or utfbyte(a%s))",f=="" and "05" or f,n,n,n)
+ else
+ return format("format('U+%%%sX',type(a%s) == 'number' and a%s or utfbyte(a%s))",f=="" and "05" or f,n,n,n)
+ end
+end
+local format_p=function()
+ n=n+1
+ return format("points(a%s)",n)
+end
+local format_b=function()
+ n=n+1
+ return format("basepoints(a%s)",n)
+end
+local format_t=function(f)
+ n=n+1
+ if f and f~="" then
+ return format("concat(a%s,%q)",n,f)
+ else
+ return format("concat(a%s)",n)
+ end
+end
+local format_T=function(f)
+ n=n+1
+ if f and f~="" then
+ return format("sequenced(a%s,%q)",n,f)
+ else
+ return format("sequenced(a%s)",n)
+ end
+end
+local format_l=function()
+ n=n+1
+ return format("(a%s and 'true' or 'false')",n)
+end
+local format_L=function()
+ n=n+1
+ return format("(a%s and 'TRUE' or 'FALSE')",n)
+end
+local format_N=function()
+ n=n+1
+ return format("tostring(tonumber(a%s) or a%s)",n,n)
+end
+local format_a=function(f)
+ n=n+1
+ if f and f~="" then
+ return format("autosingle(a%s,%q)",n,f)
+ else
+ return format("autosingle(a%s)",n)
+ end
+end
+local format_A=function(f)
+ n=n+1
+ if f and f~="" then
+ return format("autodouble(a%s,%q)",n,f)
+ else
+ return format("autodouble(a%s)",n)
+ end
+end
+local format_w=function(f)
+ n=n+1
+ f=tonumber(f)
+ if f then
+ return format("nspaces[%s+a%s]",f,n)
+ else
+ return format("nspaces[a%s]",n)
+ end
+end
+local format_W=function(f)
+ return format("nspaces[%s]",tonumber(f) or 0)
+end
+local format_rest=function(s)
+ return format("%q",s)
+end
+local format_extension=function(extensions,f,name)
+ local extension=extensions[name] or "tostring(%s)"
+ local f=tonumber(f) or 1
+ if f==0 then
+ return extension
+ elseif f==1 then
+ n=n+1
+ local a="a"..n
+ return format(extension,a,a)
+ elseif f<0 then
+ local a="a"..(n+f+1)
+ return format(extension,a,a)
+ else
+ local t={}
+ for i=1,f do
+ n=n+1
+ t[#t+1]="a"..n
+ end
+ return format(extension,unpack(t))
+ end
+end
+local builder=Cs { "start",
+ start=(
+ (
+ P("%")/""*(
+ V("!")
++V("s")+V("q")+V("i")+V("d")+V("f")+V("g")+V("G")+V("e")+V("E")+V("x")+V("X")+V("o")
++V("c")+V("C")+V("S")
++V("Q")
++V("N")
++V("r")+V("h")+V("H")+V("u")+V("U")+V("p")+V("b")+V("t")+V("T")+V("l")+V("L")+V("I")+V("h")
++V("w")
++V("W")
++V("a")
++V("A")
++V("*")
+ )+V("*")
+ )*(P(-1)+Carg(1))
+ )^0,
+ ["s"]=(prefix_any*P("s"))/format_s,
+ ["q"]=(prefix_any*P("q"))/format_q,
+ ["i"]=(prefix_any*P("i"))/format_i,
+ ["d"]=(prefix_any*P("d"))/format_d,
+ ["f"]=(prefix_any*P("f"))/format_f,
+ ["g"]=(prefix_any*P("g"))/format_g,
+ ["G"]=(prefix_any*P("G"))/format_G,
+ ["e"]=(prefix_any*P("e"))/format_e,
+ ["E"]=(prefix_any*P("E"))/format_E,
+ ["x"]=(prefix_any*P("x"))/format_x,
+ ["X"]=(prefix_any*P("X"))/format_X,
+ ["o"]=(prefix_any*P("o"))/format_o,
+ ["S"]=(prefix_any*P("S"))/format_S,
+ ["Q"]=(prefix_any*P("Q"))/format_S,
+ ["N"]=(prefix_any*P("N"))/format_N,
+ ["c"]=(prefix_any*P("c"))/format_c,
+ ["C"]=(prefix_any*P("C"))/format_C,
+ ["r"]=(prefix_any*P("r"))/format_r,
+ ["h"]=(prefix_any*P("h"))/format_h,
+ ["H"]=(prefix_any*P("H"))/format_H,
+ ["u"]=(prefix_any*P("u"))/format_u,
+ ["U"]=(prefix_any*P("U"))/format_U,
+ ["p"]=(prefix_any*P("p"))/format_p,
+ ["b"]=(prefix_any*P("b"))/format_b,
+ ["t"]=(prefix_tab*P("t"))/format_t,
+ ["T"]=(prefix_tab*P("T"))/format_T,
+ ["l"]=(prefix_tab*P("l"))/format_l,
+ ["L"]=(prefix_tab*P("L"))/format_L,
+ ["I"]=(prefix_any*P("I"))/format_I,
+ ["w"]=(prefix_any*P("w"))/format_w,
+ ["W"]=(prefix_any*P("W"))/format_W,
+ ["a"]=(prefix_any*P("a"))/format_a,
+ ["A"]=(prefix_any*P("A"))/format_A,
+ ["*"]=Cs(((1-P("%"))^1+P("%%")/"%%%%")^1)/format_rest,
+ ["!"]=Carg(2)*prefix_any*P("!")*C((1-P("!"))^1)*P("!")/format_extension,
+}
+local direct=Cs (
+ P("%")/""*Cc([[local format = string.format return function(str) return format("%]])*(S("+- .")+R("09"))^0*S("sqidfgGeExXo")*Cc([[",str) end]])*P(-1)
+ )
+local function make(t,str)
+ local f
+ local p
+ local p=lpegmatch(direct,str)
+ if p then
+ f=loadstripped(p)()
+ else
+ n=0
+ p=lpegmatch(builder,str,1,"..",t._extensions_)
+ if n>0 then
+ p=format(template,preamble,t._preamble_,arguments[n],p)
+ f=loadstripped(p)()
+ else
+ f=function() return str end
+ end
+ end
+ t[str]=f
+ return f
end
-
-if not math.mod then
- function math.mod(n,m) return n % m end
+local function use(t,fmt,...)
+ return t[fmt](...)
end
-
-local pipi = 2*math.pi/360
-
-if not math.sind then
- function math.sind(d) return sin(d*pipi) end
- function math.cosd(d) return cos(d*pipi) end
- function math.tand(d) return tan(d*pipi) end
+strings.formatters={}
+function strings.formatters.new()
+ local t={ _extensions_={},_preamble_="",_type_="formatter" }
+ setmetatable(t,{ __index=make,__call=use })
+ return t
end
-
-if not math.odd then
- function math.odd (n) return n % 2 ~= 0 end
- function math.even(n) return n % 2 == 0 end
+local formatters=strings.formatters.new()
+string.formatters=formatters
+string.formatter=function(str,...) return formatters[str](...) end
+local function add(t,name,template,preamble)
+ if type(t)=="table" and t._type_=="formatter" then
+ t._extensions_[name]=template or "%s"
+ if preamble then
+ t._preamble_=preamble.."\n"..t._preamble_
+ end
+ end
end
+strings.formatters.add=add
+lpeg.patterns.xmlescape=Cs((P("<")/"&lt;"+P(">")/"&gt;"+P("&")/"&amp;"+P('"')/"&quot;"+P(1))^0)
+lpeg.patterns.texescape=Cs((C(S("#$%\\{}"))/"\\%1"+P(1))^0)
+add(formatters,"xml",[[lpegmatch(xmlescape,%s)]],[[local xmlescape = lpeg.patterns.xmlescape]])
+add(formatters,"tex",[[lpegmatch(texescape,%s)]],[[local texescape = lpeg.patterns.texescape]])
end -- of closure
do -- create closure to overcome 200 locals limit
-if not modules then modules = { } end modules ['util-tab'] = {
- version = 1.001,
- comment = "companion to luat-lib.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
+package.loaded["util-tab"] = package.loaded["util-tab"] or true
-utilities = utilities or {}
-utilities.tables = utilities.tables or { }
-local tables = utilities.tables
-
-local format, gmatch, rep = string.format, string.gmatch, string.rep
-local concat, insert, remove = table.concat, table.insert, table.remove
-local setmetatable, getmetatable, tonumber, tostring = setmetatable, getmetatable, tonumber, tostring
-local type, next, rawset, tonumber = type, next, rawset, tonumber
-
-function tables.definetable(target) -- defines undefined tables
- local composed, t, n = nil, { }, 0
- for name in gmatch(target,"([^%.]+)") do
- n = n + 1
- if composed then
- composed = composed .. "." .. name
- else
- composed = name
- end
- t[n] = format("%s = %s or { }",composed,composed)
- end
- return concat(t,"\n")
-end
+-- original size: 14491, stripped down to: 8512
+if not modules then modules={} end modules ['util-tab']={
+ version=1.001,
+ comment="companion to luat-lib.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+utilities=utilities or {}
+utilities.tables=utilities.tables or {}
+local tables=utilities.tables
+local format,gmatch,gsub=string.format,string.gmatch,string.gsub
+local concat,insert,remove=table.concat,table.insert,table.remove
+local setmetatable,getmetatable,tonumber,tostring=setmetatable,getmetatable,tonumber,tostring
+local type,next,rawset,tonumber,tostring,load,select=type,next,rawset,tonumber,tostring,load,select
+local lpegmatch,P,Cs,Cc=lpeg.match,lpeg.P,lpeg.Cs,lpeg.Cc
+local serialize,sortedkeys,sortedpairs=table.serialize,table.sortedkeys,table.sortedpairs
+local formatters=string.formatters
+local splitter=lpeg.tsplitat(".")
+function tables.definetable(target,nofirst,nolast)
+ local composed,shortcut,t=nil,nil,{}
+ local snippets=lpegmatch(splitter,target)
+ for i=1,#snippets-(nolast and 1 or 0) do
+ local name=snippets[i]
+ if composed then
+ composed=shortcut.."."..name
+ shortcut=shortcut.."_"..name
+ t[#t+1]=formatters["local %s = %s if not %s then %s = { } %s = %s end"](shortcut,composed,shortcut,shortcut,composed,shortcut)
+ else
+ composed=name
+ shortcut=name
+ if not nofirst then
+ t[#t+1]=formatters["%s = %s or { }"](composed,composed)
+ end
+ end
+ end
+ if nolast then
+ composed=shortcut.."."..snippets[#snippets]
+ end
+ return concat(t,"\n"),composed
+end
+function tables.definedtable(...)
+ local t=_G
+ for i=1,select("#",...) do
+ local li=select(i,...)
+ local tl=t[li]
+ if not tl then
+ tl={}
+ t[li]=tl
+ end
+ t=tl
+ end
+ return t
+end
function tables.accesstable(target,root)
- local t = root or _G
- for name in gmatch(target,"([^%.]+)") do
- t = t[name]
- if not t then
- return
- end
+ local t=root or _G
+ for name in gmatch(target,"([^%.]+)") do
+ t=t[name]
+ if not t then
+ return
end
- return t
+ end
+ return t
end
-
function tables.migratetable(target,v,root)
- local t = root or _G
- local names = string.split(target,".")
- for i=1,#names-1 do
- local name = names[i]
- t[name] = t[name] or { }
- t = t[name]
- if not t then
- return
- end
+ local t=root or _G
+ local names=string.split(target,".")
+ for i=1,#names-1 do
+ local name=names[i]
+ t[name]=t[name] or {}
+ t=t[name]
+ if not t then
+ return
end
- t[names[#names]] = v
+ end
+ t[names[#names]]=v
end
-
-function tables.removevalue(t,value) -- todo: n
- if value then
- for i=1,#t do
- if t[i] == value then
- remove(t,i)
- -- remove all, so no: return
- end
- end
+function tables.removevalue(t,value)
+ if value then
+ for i=1,#t do
+ if t[i]==value then
+ remove(t,i)
+ end
end
+ end
end
-
function tables.insertbeforevalue(t,value,extra)
- for i=1,#t do
- if t[i] == extra then
- remove(t,i)
- end
+ for i=1,#t do
+ if t[i]==extra then
+ remove(t,i)
end
- for i=1,#t do
- if t[i] == value then
- insert(t,i,extra)
- return
- end
+ end
+ for i=1,#t do
+ if t[i]==value then
+ insert(t,i,extra)
+ return
end
- insert(t,1,extra)
+ end
+ insert(t,1,extra)
end
-
function tables.insertaftervalue(t,value,extra)
- for i=1,#t do
- if t[i] == extra then
- remove(t,i)
- end
+ for i=1,#t do
+ if t[i]==extra then
+ remove(t,i)
+ end
+ end
+ for i=1,#t do
+ if t[i]==value then
+ insert(t,i+1,extra)
+ return
+ end
+ end
+ insert(t,#t+1,extra)
+end
+local escape=Cs(Cc('"')*((P('"')/'""'+P(1))^0)*Cc('"'))
+function table.tocsv(t,specification)
+ if t and #t>0 then
+ local result={}
+ local r={}
+ specification=specification or {}
+ local fields=specification.fields
+ if type(fields)~="string" then
+ fields=sortedkeys(t[1])
+ end
+ local separator=specification.separator or ","
+ if specification.preamble==true then
+ for f=1,#fields do
+ r[f]=lpegmatch(escape,tostring(fields[f]))
+ end
+ result[1]=concat(r,separator)
end
for i=1,#t do
- if t[i] == value then
- insert(t,i+1,extra)
- return
- end
- end
- insert(t,#t+1,extra)
-end
-
--- experimental
-
-local function toxml(t,d,result,step)
- for k, v in table.sortedpairs(t) do
- if type(v) == "table" then
- if type(k) == "number" then
- result[#result+1] = format("%s<entry n='%s'>",d,k)
- toxml(v,d..step,result,step)
- result[#result+1] = format("%s</entry>",d,k)
- else
- result[#result+1] = format("%s<%s>",d,k)
- toxml(v,d..step,result,step)
- result[#result+1] = format("%s</%s>",d,k)
- end
- elseif type(k) == "number" then
- result[#result+1] = format("%s<entry n='%s'>%s</entry>",d,k,v,k)
+ local ti=t[i]
+ for f=1,#fields do
+ local field=ti[fields[f]]
+ if type(field)=="string" then
+ r[f]=lpegmatch(escape,field)
else
- result[#result+1] = format("%s<%s>%s</%s>",d,k,tostring(v),k)
+ r[f]=tostring(field)
end
+ end
+ result[#result+1]=concat(r,separator)
end
+ return concat(result,"\n")
+ else
+ return ""
+ end
end
-
-function table.toxml(t,name,nobanner,indent,spaces)
- local noroot = name == false
- local result = (nobanner or noroot) and { } or { "<?xml version='1.0' standalone='yes' ?>" }
- local indent = rep(" ",indent or 0)
- local spaces = rep(" ",spaces or 1)
- if noroot then
- toxml( t, inndent, result, spaces)
+local nspaces=utilities.strings.newrepeater(" ")
+local function toxml(t,d,result,step)
+ for k,v in sortedpairs(t) do
+ local s=nspaces[d]
+ local tk=type(k)
+ local tv=type(v)
+ if tv=="table" then
+ if tk=="number" then
+ result[#result+1]=formatters["%s<entry n='%s'>"](s,k)
+ toxml(v,d+step,result,step)
+ result[#result+1]=formatters["%s</entry>"](s,k)
+ else
+ result[#result+1]=formatters["%s<%s>"](s,k)
+ toxml(v,d+step,result,step)
+ result[#result+1]=formatters["%s</%s>"](s,k)
+ end
+ elseif tv=="string" then
+ if tk=="number" then
+ result[#result+1]=formatters["%s<entry n='%s'>%!xml!</entry>"](s,k,v,k)
+ else
+ result[#result+1]=formatters["%s<%s>%!xml!</%s>"](s,k,v,k)
+ end
+ elseif tk=="number" then
+ result[#result+1]=formatters["%s<entry n='%s'>%S</entry>"](s,k,v,k)
else
- toxml( { [name or "root"] = t }, indent, result, spaces)
- end
- return concat(result,"\n")
+ result[#result+1]=formatters["%s<%s>%S</%s>"](s,k,v,k)
+ end
+ end
+end
+function table.toxml(t,specification)
+ specification=specification or {}
+ local name=specification.name
+ local noroot=name==false
+ local result=(specification.nobanner or noroot) and {} or { "<?xml version='1.0' standalone='yes' ?>" }
+ local indent=specification.indent or 0
+ local spaces=specification.spaces or 1
+ if noroot then
+ toxml(t,indent,result,spaces)
+ else
+ toxml({ [name or "data"]=t },indent,result,spaces)
+ end
+ return concat(result,"\n")
end
-
--- also experimental
-
--- encapsulate(table,utilities.tables)
--- encapsulate(table,utilities.tables,true)
--- encapsulate(table,true)
-
function tables.encapsulate(core,capsule,protect)
- if type(capsule) ~= "table" then
- protect = true
- capsule = { }
+ if type(capsule)~="table" then
+ protect=true
+ capsule={}
+ end
+ for key,value in next,core do
+ if capsule[key] then
+ print(formatters["\ninvalid %s %a in %a"]("inheritance",key,core))
+ os.exit()
+ else
+ capsule[key]=value
+ end
+ end
+ if protect then
+ for key,value in next,core do
+ core[key]=nil
end
- for key, value in next, core do
+ setmetatable(core,{
+ __index=capsule,
+ __newindex=function(t,key,value)
if capsule[key] then
- print(format("\ninvalid inheritance '%s' in '%s': %s",key,tostring(core)))
- os.exit()
+ print(formatters["\ninvalid %s %a' in %a"]("overload",key,core))
+ os.exit()
else
- capsule[key] = value
- end
- end
- if protect then
- for key, value in next, core do
- core[key] = nil
- end
- setmetatable(core, {
- __index = capsule,
- __newindex = function(t,key,value)
- if capsule[key] then
- print(format("\ninvalid overload '%s' in '%s'",key,tostring(core)))
- os.exit()
- else
- rawset(t,key,value)
- end
- end
- } )
- end
+ rawset(t,key,value)
+ end
+ end
+ } )
+ end
+end
+local function fastserialize(t,r,outer)
+ r[#r+1]="{"
+ local n=#t
+ if n>0 then
+ for i=1,n do
+ local v=t[i]
+ local tv=type(v)
+ if tv=="string" then
+ r[#r+1]=formatters["%q,"](v)
+ elseif tv=="number" then
+ r[#r+1]=formatters["%s,"](v)
+ elseif tv=="table" then
+ fastserialize(v,r)
+ elseif tv=="boolean" then
+ r[#r+1]=formatters["%S,"](v)
+ end
+ end
+ else
+ for k,v in next,t do
+ local tv=type(v)
+ if tv=="string" then
+ r[#r+1]=formatters["[%q]=%q,"](k,v)
+ elseif tv=="number" then
+ r[#r+1]=formatters["[%q]=%s,"](k,v)
+ elseif tv=="table" then
+ r[#r+1]=formatters["[%q]="](k)
+ fastserialize(v,r)
+ elseif tv=="boolean" then
+ r[#r+1]=formatters["[%q]=%S,"](k,v)
+ end
+ end
+ end
+ if outer then
+ r[#r+1]="}"
+ else
+ r[#r+1]="},"
+ end
+ return r
+end
+function table.fastserialize(t,prefix)
+ return concat(fastserialize(t,{ prefix or "return" },true))
+end
+function table.deserialize(str)
+ if not str or str=="" then
+ return
+ end
+ local code=load(str)
+ if not code then
+ return
+ end
+ code=code()
+ if not code then
+ return
+ end
+ return code
+end
+function table.load(filename)
+ if filename then
+ local t=io.loaddata(filename)
+ if t and t~="" then
+ t=load(t)
+ if type(t)=="function" then
+ t=t()
+ if type(t)=="table" then
+ return t
+ end
+ end
+ end
+ end
+end
+function table.save(filename,t,n,...)
+ io.savedata(filename,serialize(t,n==nil and true or n,...))
+end
+local function slowdrop(t)
+ local r={}
+ local l={}
+ for i=1,#t do
+ local ti=t[i]
+ local j=0
+ for k,v in next,ti do
+ j=j+1
+ l[j]=formatters["%s=%q"](k,v)
+ end
+ r[i]=formatters[" {%t},\n"](l)
+ end
+ return formatters["return {\n%st}"](r)
+end
+local function fastdrop(t)
+ local r={ "return {\n" }
+ for i=1,#t do
+ local ti=t[i]
+ r[#r+1]=" {"
+ for k,v in next,ti do
+ r[#r+1]=formatters["%s=%q"](k,v)
+ end
+ r[#r+1]="},\n"
+ end
+ r[#r+1]="}"
+ return concat(r)
+end
+function table.drop(t,slow)
+ if #t==0 then
+ return "return { }"
+ elseif slow==true then
+ return slowdrop(t)
+ else
+ return fastdrop(t)
+ end
+end
+function table.autokey(t,k)
+ local v={}
+ t[k]=v
+ return v
+end
+local selfmapper={ __index=function(t,k) t[k]=k return k end }
+function table.twowaymapper(t)
+ if not t then
+ t={}
+ else
+ for i=0,#t do
+ local ti=t[i]
+ if ti then
+ local i=tostring(i)
+ t[i]=ti
+ t[ti]=i
+ end
+ end
+ t[""]=t[0] or ""
+ end
+ setmetatable(t,selfmapper)
+ return t
end
@@ -4351,297 +5156,155 @@ end -- of closure
do -- create closure to overcome 200 locals limit
-if not modules then modules = { } end modules ['util-sto'] = {
- version = 1.001,
- comment = "companion to luat-lib.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
-local setmetatable, getmetatable = setmetatable, getmetatable
+package.loaded["util-sto"] = package.loaded["util-sto"] or true
-utilities = utilities or { }
-utilities.storage = utilities.storage or { }
-local storage = utilities.storage
+-- original size: 4432, stripped down to: 3123
+if not modules then modules={} end modules ['util-sto']={
+ version=1.001,
+ comment="companion to luat-lib.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+local setmetatable,getmetatable,type=setmetatable,getmetatable,type
+utilities=utilities or {}
+utilities.storage=utilities.storage or {}
+local storage=utilities.storage
function storage.mark(t)
- if not t then
- texio.write_nl("fatal error: storage cannot be marked")
- return -- os.exit()
- end
- local m = getmetatable(t)
- if not m then
- m = { }
- setmetatable(t,m)
- end
- m.__storage__ = true
- return t
+ if not t then
+ print("\nfatal error: storage cannot be marked\n")
+ os.exit()
+ return
+ end
+ local m=getmetatable(t)
+ if not m then
+ m={}
+ setmetatable(t,m)
+ end
+ m.__storage__=true
+ return t
end
-
function storage.allocate(t)
- t = t or { }
- local m = getmetatable(t)
- if not m then
- m = { }
- setmetatable(t,m)
- end
- m.__storage__ = true
- return t
+ t=t or {}
+ local m=getmetatable(t)
+ if not m then
+ m={}
+ setmetatable(t,m)
+ end
+ m.__storage__=true
+ return t
end
-
function storage.marked(t)
- local m = getmetatable(t)
- return m and m.__storage__
+ local m=getmetatable(t)
+ return m and m.__storage__
end
-
function storage.checked(t)
- if not t then
- texio.write_nl("fatal error: storage has not been allocated")
- return -- os.exit()
- end
- return t
+ if not t then
+ report("\nfatal error: storage has not been allocated\n")
+ os.exit()
+ return
+ end
+ return t
end
-
-
function storage.setinitializer(data,initialize)
- local m = getmetatable(data) or { }
- m.__index = function(data,k)
- m.__index = nil -- so that we can access the entries during initializing
- initialize()
- return data[k]
- end
- setmetatable(data, m)
-end
-
-local keyisvalue = { __index = function(t,k)
- t[k] = k
- return k
+ local m=getmetatable(data) or {}
+ m.__index=function(data,k)
+ m.__index=nil
+ initialize()
+ return data[k]
+ end
+ setmetatable(data,m)
+end
+local keyisvalue={ __index=function(t,k)
+ t[k]=k
+ return k
end }
-
function storage.sparse(t)
- t = t or { }
- setmetatable(t,keyisvalue)
- return t
-end
-
--- table namespace ?
-
-local function f_empty () return "" end -- t,k
-local function f_self (t,k) t[k] = k return k end
-local function f_ignore() end -- t,k,v
-
-local t_empty = { __index = f_empty }
-local t_self = { __index = f_self }
-local t_ignore = { __newindex = f_ignore }
-
+ t=t or {}
+ setmetatable(t,keyisvalue)
+ return t
+end
+local function f_empty () return "" end
+local function f_self (t,k) t[k]=k return k end
+local function f_table (t,k) local v={} t[k]=v return v end
+local function f_ignore() end
+local t_empty={ __index=f_empty }
+local t_self={ __index=f_self }
+local t_table={ __index=f_table }
+local t_ignore={ __newindex=f_ignore }
function table.setmetatableindex(t,f)
- local m = getmetatable(t)
- if m then
- if f == "empty" then
- m.__index = f_empty
- elseif f == "key" then
- m.__index = f_self
- else
- m.__index = f
- end
+ if type(t)~="table" then
+ f,t=t,{}
+ end
+ local m=getmetatable(t)
+ if m then
+ if f=="empty" then
+ m.__index=f_empty
+ elseif f=="key" then
+ m.__index=f_self
+ elseif f=="table" then
+ m.__index=f_table
else
- if f == "empty" then
- setmetatable(t, t_empty)
- elseif f == "key" then
- setmetatable(t, t_self)
- else
- setmetatable(t,{ __index = f })
- end
+ m.__index=f
+ end
+ else
+ if f=="empty" then
+ setmetatable(t,t_empty)
+ elseif f=="key" then
+ setmetatable(t,t_self)
+ elseif f=="table" then
+ setmetatable(t,t_table)
+ else
+ setmetatable(t,{ __index=f })
end
- return t
+ end
+ return t
end
-
function table.setmetatablenewindex(t,f)
- local m = getmetatable(t)
- if m then
- if f == "ignore" then
- m.__newindex = f_ignore
- else
- m.__newindex = f
- end
+ if type(t)~="table" then
+ f,t=t,{}
+ end
+ local m=getmetatable(t)
+ if m then
+ if f=="ignore" then
+ m.__newindex=f_ignore
else
- if f == "ignore" then
- setmetatable(t, t_ignore)
- else
- setmetatable(t,{ __newindex = f })
- end
+ m.__newindex=f
end
- return t
-end
-
-function table.setmetatablecall(t,f)
- local m = getmetatable(t)
- if m then
- m.__call = f
+ else
+ if f=="ignore" then
+ setmetatable(t,t_ignore)
else
- setmetatable(t,{ __call = f })
+ setmetatable(t,{ __newindex=f })
end
- return t
+ end
+ return t
+end
+function table.setmetatablecall(t,f)
+ if type(t)~="table" then
+ f,t=t,{}
+ end
+ local m=getmetatable(t)
+ if m then
+ m.__call=f
+ else
+ setmetatable(t,{ __call=f })
+ end
+ return t
end
-
function table.setmetatablekey(t,key,value)
- local m = getmetatable(t)
- if not m then
- m = { }
- setmetatable(t,m)
- end
- m[key] = value
- return t
+ local m=getmetatable(t)
+ if not m then
+ m={}
+ setmetatable(t,m)
+ end
+ m[key]=value
+ return t
end
-
function table.getmetatablekey(t,key,value)
- local m = getmetatable(t)
- return m and m[key]
-end
-
-
-end -- of closure
-
-do -- create closure to overcome 200 locals limit
-
-if not modules then modules = { } end modules ['util-mrg'] = {
- version = 1.001,
- comment = "companion to luat-lib.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
--- hm, quite unreadable
-
-local gsub, format = string.gsub, string.format
-local concat = table.concat
-local type, next = type, next
-
-utilities = utilities or {}
-utilities.merger = utilities.merger or { } -- maybe mergers
-utilities.report = logs and logs.reporter("system") or print
-
-local merger = utilities.merger
-
-merger.strip_comment = true
-
-local m_begin_merge = "begin library merge"
-local m_end_merge = "end library merge"
-local m_begin_closure = "do -- create closure to overcome 200 locals limit"
-local m_end_closure = "end -- of closure"
-
-local m_pattern =
- "%c+" ..
- "%-%-%s+" .. m_begin_merge ..
- "%c+(.-)%c+" ..
- "%-%-%s+" .. m_end_merge ..
- "%c+"
-
-local m_format =
- "\n\n-- " .. m_begin_merge ..
- "\n%s\n" ..
- "-- " .. m_end_merge .. "\n\n"
-
-local m_faked =
- "-- " .. "created merged file" .. "\n\n" ..
- "-- " .. m_begin_merge .. "\n\n" ..
- "-- " .. m_end_merge .. "\n\n"
-
-local function self_fake()
- return m_faked
-end
-
-local function self_nothing()
- return ""
-end
-
-local function self_load(name)
- local data = io.loaddata(name) or ""
- if data == "" then
- utilities.report("merge: unknown file %s",name)
- else
- utilities.report("merge: inserting %s",name)
- end
- return data or ""
-end
-
-local function self_save(name, data)
- if data ~= "" then
- if merger.strip_comment then
- -- saves some 20K
- local n = #data
- data = gsub(data,"%-%-~[^\n\r]*[\r\n]","")
- utilities.report("merge: %s bytes of comment stripped, %s bytes of code left",n-#data,#data)
- end
- io.savedata(name,data)
- utilities.report("merge: saving %s",name)
- end
-end
-
-local function self_swap(data,code)
- return data ~= "" and (gsub(data,m_pattern, function() return format(m_format,code) end, 1)) or ""
-end
-
-local function self_libs(libs,list)
- local result, f, frozen, foundpath = { }, nil, false, nil
- result[#result+1] = "\n"
- if type(libs) == 'string' then libs = { libs } end
- if type(list) == 'string' then list = { list } end
- for i=1,#libs do
- local lib = libs[i]
- for j=1,#list do
- local pth = gsub(list[j],"\\","/") -- file.clean_path
- utilities.report("merge: checking library path %s",pth)
- local name = pth .. "/" .. lib
- if lfs.isfile(name) then
- foundpath = pth
- end
- end
- if foundpath then break end
- end
- if foundpath then
- utilities.report("merge: using library path %s",foundpath)
- local right, wrong = { }, { }
- for i=1,#libs do
- local lib = libs[i]
- local fullname = foundpath .. "/" .. lib
- if lfs.isfile(fullname) then
- utilities.report("merge: using library %s",fullname)
- right[#right+1] = lib
- result[#result+1] = m_begin_closure
- result[#result+1] = io.loaddata(fullname,true)
- result[#result+1] = m_end_closure
- else
- utilities.report("merge: skipping library %s",fullname)
- wrong[#wrong+1] = lib
- end
- end
- if #right > 0 then
- utilities.report("merge: used libraries: %s",concat(right," "))
- end
- if #wrong > 0 then
- utilities.report("merge: skipped libraries: %s",concat(wrong," "))
- end
- else
- utilities.report("merge: no valid library path found")
- end
- return concat(result, "\n\n")
-end
-
-function merger.selfcreate(libs,list,target)
- if target then
- self_save(target,self_swap(self_fake(),self_libs(libs,list)))
- end
-end
-
-function merger.selfmerge(name,libs,list,target)
- self_save(target or name,self_swap(self_load(name),self_libs(libs,list)))
-end
-
-function merger.selfclean(name)
- self_save(name,self_swap(self_load(name),self_nothing()))
+ local m=getmetatable(t)
+ return m and m[key]
end
@@ -4649,529 +5312,418 @@ end -- of closure
do -- create closure to overcome 200 locals limit
-if not modules then modules = { } end modules ['util-lua'] = {
- version = 1.001,
- comment = "companion to luat-lib.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
-utilities = utilities or {}
-utilities.lua = utilities.lua or { }
-utilities.report = logs and logs.reporter("system") or print
-
-local function stupidcompile(luafile,lucfile)
- local data = io.loaddata(luafile)
- if data and data ~= "" then
- data = string.dump(data)
- if data and data ~= "" then
- io.savedata(lucfile,data)
- end
- end
-end
-
-function utilities.lua.compile(luafile,lucfile,cleanup,strip,fallback) -- defaults: cleanup=false strip=true
- utilities.report("lua: compiling %s into %s",luafile,lucfile)
- os.remove(lucfile)
- local command = "-o " .. string.quoted(lucfile) .. " " .. string.quoted(luafile)
- if strip ~= false then
- command = "-s " .. command
- end
- local done = os.spawn("texluac " .. command) == 0 -- or os.spawn("luac " .. command) == 0
- if not done and fallback then
- utilities.report("lua: dumping %s into %s (unstripped)",luafile,lucfile)
- stupidcompile(luafile,lucfile) -- maybe use the stripper we have elsewhere
- cleanup = false -- better see how worse it is
- end
- if done and cleanup == true and lfs.isfile(lucfile) and lfs.isfile(luafile) then
- utilities.report("lua: removing %s",luafile)
- os.remove(luafile)
- end
- return done
-end
-
-
-
-
-
-
+package.loaded["util-prs"] = package.loaded["util-prs"] or true
+-- original size: 16976, stripped down to: 12143
-end -- of closure
-
-do -- create closure to overcome 200 locals limit
-
-if not modules then modules = { } end modules ['util-prs'] = {
- version = 1.001,
- comment = "companion to luat-lib.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
+if not modules then modules={} end modules ['util-prs']={
+ version=1.001,
+ comment="companion to luat-lib.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
}
-
-local P, R, V, C, Ct, Cs, Carg = lpeg.P, lpeg.R, lpeg.V, lpeg.C, lpeg.Ct, lpeg.Cs, lpeg.Carg
-local lpegmatch = lpeg.match
-local concat, format, gmatch, find = table.concat, string.format, string.gmatch, string.find
-local tostring, type, next = tostring, type, next
-
-utilities = utilities or {}
-utilities.parsers = utilities.parsers or { }
-local parsers = utilities.parsers
-parsers.patterns = parsers.patterns or { }
-
-local setmetatableindex = table.setmetatableindex
-local sortedhash = table.sortedhash
-
--- we could use a Cf Cg construct
-
-local escape, left, right = P("\\"), P('{'), P('}')
-
-lpeg.patterns.balanced = P {
- [1] = ((escape * (left+right)) + (1 - (left+right)) + V(2))^0,
- [2] = left * V(1) * right
+local lpeg,table,string=lpeg,table,string
+local P,R,V,S,C,Ct,Cs,Carg,Cc,Cg,Cf,Cp=lpeg.P,lpeg.R,lpeg.V,lpeg.S,lpeg.C,lpeg.Ct,lpeg.Cs,lpeg.Carg,lpeg.Cc,lpeg.Cg,lpeg.Cf,lpeg.Cp
+local lpegmatch,lpegpatterns=lpeg.match,lpeg.patterns
+local concat,format,gmatch,find=table.concat,string.format,string.gmatch,string.find
+local tostring,type,next,rawset=tostring,type,next,rawset
+utilities=utilities or {}
+local parsers=utilities.parsers or {}
+utilities.parsers=parsers
+local patterns=parsers.patterns or {}
+parsers.patterns=patterns
+local setmetatableindex=table.setmetatableindex
+local sortedhash=table.sortedhash
+local digit=R("09")
+local space=P(' ')
+local equal=P("=")
+local comma=P(",")
+local lbrace=P("{")
+local rbrace=P("}")
+local lparent=P("(")
+local rparent=P(")")
+local period=S(".")
+local punctuation=S(".,:;")
+local spacer=lpegpatterns.spacer
+local whitespace=lpegpatterns.whitespace
+local newline=lpegpatterns.newline
+local anything=lpegpatterns.anything
+local endofstring=lpegpatterns.endofstring
+local nobrace=1-(lbrace+rbrace )
+local noparent=1-(lparent+rparent)
+local escape,left,right=P("\\"),P('{'),P('}')
+lpegpatterns.balanced=P {
+ [1]=((escape*(left+right))+(1-(left+right))+V(2))^0,
+ [2]=left*V(1)*right
}
-
-local space = P(' ')
-local equal = P("=")
-local comma = P(",")
-local lbrace = P("{")
-local rbrace = P("}")
-local nobrace = 1 - (lbrace+rbrace)
-local nested = P { lbrace * (nobrace + V(1))^0 * rbrace }
-local spaces = space^0
-local argument = Cs((lbrace/"") * ((nobrace + nested)^0) * (rbrace/""))
-local content = (1-P(-1))^0
-
-lpeg.patterns.nested = nested -- no capture
-lpeg.patterns.argument = argument -- argument after e.g. =
-lpeg.patterns.content = content -- rest after e.g =
-
-local value = P(lbrace * C((nobrace + nested)^0) * rbrace) + C((nested + (1-comma))^0)
-
-local key = C((1-equal-comma)^1)
-local pattern_a = (space+comma)^0 * (key * equal * value + key * C(""))
-local pattern_c = (space+comma)^0 * (key * equal * value)
-
-local key = C((1-space-equal-comma)^1)
-local pattern_b = spaces * comma^0 * spaces * (key * ((spaces * equal * spaces * value) + C("")))
-
--- "a=1, b=2, c=3, d={a{b,c}d}, e=12345, f=xx{a{b,c}d}xx, g={}" : outer {} removes, leading spaces ignored
-
-local hash = { }
-
-local function set(key,value)
- hash[key] = value
-end
-
+local nestedbraces=P { lbrace*(nobrace+V(1))^0*rbrace }
+local nestedparents=P { lparent*(noparent+V(1))^0*rparent }
+local spaces=space^0
+local argument=Cs((lbrace/"")*((nobrace+nestedbraces)^0)*(rbrace/""))
+local content=(1-endofstring)^0
+lpegpatterns.nestedbraces=nestedbraces
+lpegpatterns.nestedparents=nestedparents
+lpegpatterns.nested=nestedbraces
+lpegpatterns.argument=argument
+lpegpatterns.content=content
+local value=P(lbrace*C((nobrace+nestedbraces)^0)*rbrace)+C((nestedbraces+(1-comma))^0)
+local key=C((1-equal-comma)^1)
+local pattern_a=(space+comma)^0*(key*equal*value+key*C(""))
+local pattern_c=(space+comma)^0*(key*equal*value)
+local key=C((1-space-equal-comma)^1)
+local pattern_b=spaces*comma^0*spaces*(key*((spaces*equal*spaces*value)+C("")))
+local hash={}
local function set(key,value)
- hash[key] = value
-end
-
-local pattern_a_s = (pattern_a/set)^1
-local pattern_b_s = (pattern_b/set)^1
-local pattern_c_s = (pattern_c/set)^1
-
-parsers.patterns.settings_to_hash_a = pattern_a_s
-parsers.patterns.settings_to_hash_b = pattern_b_s
-parsers.patterns.settings_to_hash_c = pattern_c_s
-
+ hash[key]=value
+end
+local pattern_a_s=(pattern_a/set)^1
+local pattern_b_s=(pattern_b/set)^1
+local pattern_c_s=(pattern_c/set)^1
+patterns.settings_to_hash_a=pattern_a_s
+patterns.settings_to_hash_b=pattern_b_s
+patterns.settings_to_hash_c=pattern_c_s
function parsers.make_settings_to_hash_pattern(set,how)
- if how == "strict" then
- return (pattern_c/set)^1
- elseif how == "tolerant" then
- return (pattern_b/set)^1
- else
- return (pattern_a/set)^1
- end
+ if how=="strict" then
+ return (pattern_c/set)^1
+ elseif how=="tolerant" then
+ return (pattern_b/set)^1
+ else
+ return (pattern_a/set)^1
+ end
end
-
function parsers.settings_to_hash(str,existing)
- if str and str ~= "" then
- hash = existing or { }
- lpegmatch(pattern_a_s,str)
- return hash
- else
- return { }
- end
+ if str and str~="" then
+ hash=existing or {}
+ lpegmatch(pattern_a_s,str)
+ return hash
+ else
+ return {}
+ end
end
-
function parsers.settings_to_hash_tolerant(str,existing)
- if str and str ~= "" then
- hash = existing or { }
- lpegmatch(pattern_b_s,str)
- return hash
- else
- return { }
- end
+ if str and str~="" then
+ hash=existing or {}
+ lpegmatch(pattern_b_s,str)
+ return hash
+ else
+ return {}
+ end
end
-
function parsers.settings_to_hash_strict(str,existing)
- if str and str ~= "" then
- hash = existing or { }
- lpegmatch(pattern_c_s,str)
- return next(hash) and hash
- else
- return nil
- end
+ if str and str~="" then
+ hash=existing or {}
+ lpegmatch(pattern_c_s,str)
+ return next(hash) and hash
+ else
+ return nil
+ end
end
-
-local separator = comma * space^0
-local value = P(lbrace * C((nobrace + nested)^0) * rbrace) + C((nested + (1-comma))^0)
-local pattern = Ct(value*(separator*value)^0)
-
--- "aap, {noot}, mies" : outer {} removes, leading spaces ignored
-
-parsers.patterns.settings_to_array = pattern
-
--- we could use a weak table as cache
-
+local separator=comma*space^0
+local value=P(lbrace*C((nobrace+nestedbraces)^0)*rbrace)+C((nestedbraces+(1-comma))^0)
+local pattern=spaces*Ct(value*(separator*value)^0)
+patterns.settings_to_array=pattern
function parsers.settings_to_array(str,strict)
- if not str or str == "" then
- return { }
- elseif strict then
- if find(str,"{") then
- return lpegmatch(pattern,str)
- else
- return { str }
- end
+ if not str or str=="" then
+ return {}
+ elseif strict then
+ if find(str,"{") then
+ return lpegmatch(pattern,str)
else
- return lpegmatch(pattern,str)
+ return { str }
end
+ else
+ return lpegmatch(pattern,str)
+ end
end
-
local function set(t,v)
- t[#t+1] = v
+ t[#t+1]=v
end
-
-local value = P(Carg(1)*value) / set
-local pattern = value*(separator*value)^0 * Carg(1)
-
+local value=P(Carg(1)*value)/set
+local pattern=value*(separator*value)^0*Carg(1)
function parsers.add_settings_to_array(t,str)
- return lpegmatch(pattern,str,nil,t)
+ return lpegmatch(pattern,str,nil,t)
end
-
function parsers.hash_to_string(h,separator,yes,no,strict,omit)
- if h then
- local t, tn, s = { }, 0, table.sortedkeys(h)
- omit = omit and table.tohash(omit)
- for i=1,#s do
- local key = s[i]
- if not omit or not omit[key] then
- local value = h[key]
- if type(value) == "boolean" then
- if yes and no then
- if value then
- tn = tn + 1
- t[tn] = key .. '=' .. yes
- elseif not strict then
- tn = tn + 1
- t[tn] = key .. '=' .. no
- end
- elseif value or not strict then
- tn = tn + 1
- t[tn] = key .. '=' .. tostring(value)
- end
- else
- tn = tn + 1
- t[tn] = key .. '=' .. value
- end
- end
+ if h then
+ local t,tn,s={},0,table.sortedkeys(h)
+ omit=omit and table.tohash(omit)
+ for i=1,#s do
+ local key=s[i]
+ if not omit or not omit[key] then
+ local value=h[key]
+ if type(value)=="boolean" then
+ if yes and no then
+ if value then
+ tn=tn+1
+ t[tn]=key..'='..yes
+ elseif not strict then
+ tn=tn+1
+ t[tn]=key..'='..no
+ end
+ elseif value or not strict then
+ tn=tn+1
+ t[tn]=key..'='..tostring(value)
+ end
+ else
+ tn=tn+1
+ t[tn]=key..'='..value
end
- return concat(t,separator or ",")
- else
- return ""
+ end
end
+ return concat(t,separator or ",")
+ else
+ return ""
+ end
end
-
function parsers.array_to_string(a,separator)
- if a then
- return concat(a,separator or ",")
- else
- return ""
- end
-end
-
-function parsers.settings_to_set(str,t) -- tohash? -- todo: lpeg -- duplicate anyway
- t = t or { }
--- for s in gmatch(str,"%s*([^, ]+)") do -- space added
- for s in gmatch(str,"[^, ]+") do -- space added
- t[s] = true
- end
- return t
-end
-
-function parsers.simple_hash_to_string(h, separator)
- local t, tn = { }, 0
- for k, v in sortedhash(h) do
- if v then
- tn = tn + 1
- t[tn] = k
- end
+ if a then
+ return concat(a,separator or ",")
+ else
+ return ""
+ end
+end
+function parsers.settings_to_set(str,t)
+ t=t or {}
+ for s in gmatch(str,"[^, ]+") do
+ t[s]=true
+ end
+ return t
+end
+function parsers.simple_hash_to_string(h,separator)
+ local t,tn={},0
+ for k,v in sortedhash(h) do
+ if v then
+ tn=tn+1
+ t[tn]=k
end
- return concat(t,separator or ",")
+ end
+ return concat(t,separator or ",")
end
-
-local value = lbrace * C((nobrace + nested)^0) * rbrace
-local pattern = Ct((space + value)^0)
-
+local value=P(lbrace*C((nobrace+nestedbraces)^0)*rbrace)+C(digit^1*lparent*(noparent+nestedparents)^1*rparent)+C((nestedbraces+(1-comma))^1)
+local pattern_a=spaces*Ct(value*(separator*value)^0)
+local function repeater(n,str)
+ if not n then
+ return str
+ else
+ local s=lpegmatch(pattern_a,str)
+ if n==1 then
+ return unpack(s)
+ else
+ local t,tn={},0
+ for i=1,n do
+ for j=1,#s do
+ tn=tn+1
+ t[tn]=s[j]
+ end
+ end
+ return unpack(t)
+ end
+ end
+end
+local value=P(lbrace*C((nobrace+nestedbraces)^0)*rbrace)+(C(digit^1)/tonumber*lparent*Cs((noparent+nestedparents)^1)*rparent)/repeater+C((nestedbraces+(1-comma))^1)
+local pattern_b=spaces*Ct(value*(separator*value)^0)
+function parsers.settings_to_array_with_repeat(str,expand)
+ if expand then
+ return lpegmatch(pattern_b,str) or {}
+ else
+ return lpegmatch(pattern_a,str) or {}
+ end
+end
+local value=lbrace*C((nobrace+nestedbraces)^0)*rbrace
+local pattern=Ct((space+value)^0)
function parsers.arguments_to_table(str)
- return lpegmatch(pattern,str)
+ return lpegmatch(pattern,str)
end
-
--- temporary here (unoptimized)
-
function parsers.getparameters(self,class,parentclass,settings)
- local sc = self[class]
- if not sc then
- sc = { }
- self[class] = sc
- if parentclass then
- local sp = self[parentclass]
- if not sp then
- sp = { }
- self[parentclass] = sp
- end
- setmetatableindex(sc,sp)
- end
- end
- parsers.settings_to_hash(settings,sc)
+ local sc=self[class]
+ if not sc then
+ sc={}
+ self[class]=sc
+ if parentclass then
+ local sp=self[parentclass]
+ if not sp then
+ sp={}
+ self[parentclass]=sp
+ end
+ setmetatableindex(sc,sp)
+ end
+ end
+ parsers.settings_to_hash(settings,sc)
end
-
function parsers.listitem(str)
- return gmatch(str,"[^, ]+")
-end
-
-
-end -- of closure
-
-do -- create closure to overcome 200 locals limit
-
-if not modules then modules = { } end modules ['util-fmt'] = {
- version = 1.001,
- comment = "companion to luat-lib.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
+ return gmatch(str,"[^, ]+")
+end
+local pattern=Cs { "start",
+ start=V("one")+V("two")+V("three"),
+ rest=(Cc(",")*V("thousand"))^0*(P(".")+endofstring)*anything^0,
+ thousand=digit*digit*digit,
+ one=digit*V("rest"),
+ two=digit*digit*V("rest"),
+ three=V("thousand")*V("rest"),
}
-
-utilities = utilities or { }
-utilities.formatters = utilities.formatters or { }
-local formatters = utilities.formatters
-
-local concat, format = table.concat, string.format
-local tostring, type = tostring, type
-local strip = string.strip
-
-local P, R, Cs = lpeg.P, lpeg.R, lpeg.Cs
-local lpegmatch = lpeg.match
-
--- temporary here
-
-local digit = R("09")
-local period = P(".")
-local zero = P("0")
-local trailingzeros = zero^0 * -digit -- suggested by Roberto R
-local case_1 = period * trailingzeros / ""
-local case_2 = period * (digit - trailingzeros)^1 * (trailingzeros / "")
-local number = digit^1 * (case_1 + case_2)
-local stripper = Cs((number + 1)^0)
-
-
-lpeg.patterns.stripzeros = stripper
-
-function formatters.stripzeros(str)
- return lpegmatch(stripper,str)
-end
-
-function formatters.formatcolumns(result,between)
- if result and #result > 0 then
- between = between or " "
- local widths, numbers = { }, { }
- local first = result[1]
- local n = #first
- for i=1,n do
- widths[i] = 0
- end
- for i=1,#result do
- local r = result[i]
- for j=1,n do
- local rj = r[j]
- local tj = type(rj)
- if tj == "number" then
- numbers[j] = true
- end
- if tj ~= "string" then
- rj = tostring(rj)
- r[j] = rj
- end
- local w = #rj
- if w > widths[j] then
- widths[j] = w
- end
- end
- end
- for i=1,n do
- local w = widths[i]
- if numbers[i] then
- if w > 80 then
- widths[i] = "%s" .. between
- else
- widths[i] = "%0" .. w .. "i" .. between
- end
- else
- if w > 80 then
- widths[i] = "%s" .. between
- elseif w > 0 then
- widths[i] = "%-" .. w .. "s" .. between
- else
- widths[i] = "%s"
- end
- end
- end
- local template = strip(concat(widths))
- for i=1,#result do
- local str = format(template,unpack(result[i]))
- result[i] = strip(str)
- end
- end
- return result
-end
-
-
-end -- of closure
-
-do -- create closure to overcome 200 locals limit
-
-if not modules then modules = { } end modules ['util.deb'] = {
- version = 1.001,
- comment = "companion to luat-lib.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
--- the <anonymous> tag is kind of generic and used for functions that are not
--- bound to a variable, like node.new, node.copy etc (contrary to for instance
--- node.has_attribute which is bound to a has_attribute local variable in mkiv)
-
-local debug = require "debug"
-
-local getinfo = debug.getinfo
-local type, next, tostring = type, next, tostring
-local format, find = string.format, string.find
-local is_boolean = string.is_boolean
-
-utilities = utilities or { }
-utilities.debugger = utilities.debugger or { }
-local debugger = utilities.debugger
-
-local counters = { }
-local names = { }
-
--- one
-
-local function hook()
- local f = getinfo(2) -- "nS"
- if f then
- local n = "unknown"
- if f.what == "C" then
- n = f.name or '<anonymous>'
- if not names[n] then
- names[n] = format("%42s",n)
- end
- else
- -- source short_src linedefined what name namewhat nups func
- n = f.name or f.namewhat or f.what
- if not n or n == "" then
- n = "?"
- end
- if not names[n] then
- names[n] = format("%42s : % 5i : %s",n,f.linedefined or 0,f.short_src or "unknown source")
- end
- end
- counters[n] = (counters[n] or 0) + 1
- end
-end
-
-function debugger.showstats(printer,threshold) -- hm, something has changed, rubish now
- printer = printer or texio.write or print
- threshold = threshold or 0
- local total, grandtotal, functions = 0, 0, 0
- local dataset = { }
- for name, count in next, counters do
- dataset[#dataset+1] = { name, count }
- end
- table.sort(dataset,function(a,b) return a[2] == b[2] and b[1] > a[1] or a[2] > b[2] end)
- for i=1,#dataset do
- local d = dataset[i]
- local name = d[1]
- local count = d[2]
- if count > threshold and not find(name,"for generator") then -- move up
- printer(format("%8i %s\n", count, names[name]))
- total = total + count
+lpegpatterns.splitthousands=pattern
+function parsers.splitthousands(str)
+ return lpegmatch(pattern,str) or str
+end
+local optionalwhitespace=whitespace^0
+lpegpatterns.words=Ct((Cs((1-punctuation-whitespace)^1)+anything)^1)
+lpegpatterns.sentences=Ct((optionalwhitespace*Cs((1-period)^0*period))^1)
+lpegpatterns.paragraphs=Ct((optionalwhitespace*Cs((whitespace^1*endofstring/""+1-(spacer^0*newline*newline))^1))^1)
+local dquote=P('"')
+local equal=P('=')
+local escape=P('\\')
+local separator=S(' ,')
+local key=C((1-equal)^1)
+local value=dquote*C((1-dquote-escape*dquote)^0)*dquote
+local pattern=Cf(Ct("")*Cg(key*equal*value)*separator^0,rawset)^0*P(-1)
+patterns.keq_to_hash_c=pattern
+function parsers.keq_to_hash(str)
+ if str and str~="" then
+ return lpegmatch(pattern,str)
+ else
+ return {}
+ end
+end
+local defaultspecification={ separator=",",quote='"' }
+function parsers.csvsplitter(specification)
+ specification=specification and table.setmetatableindex(specification,defaultspecification) or defaultspecification
+ local separator=specification.separator
+ local quotechar=specification.quote
+ local separator=S(separator~="" and separator or ",")
+ local whatever=C((1-separator-newline)^0)
+ if quotechar and quotechar~="" then
+ local quotedata=nil
+ for chr in gmatch(quotechar,".") do
+ local quotechar=P(chr)
+ local quoteword=quotechar*C((1-quotechar)^0)*quotechar
+ if quotedata then
+ quotedata=quotedata+quoteword
+ else
+ quotedata=quoteword
+ end
+ end
+ whatever=quotedata+whatever
+ end
+ local parser=Ct((Ct(whatever*(separator*whatever)^0)*S("\n\r"))^0 )
+ return function(data)
+ return lpegmatch(parser,data)
+ end
+end
+function parsers.rfc4180splitter(specification)
+ specification=specification and table.setmetatableindex(specification,defaultspecification) or defaultspecification
+ local separator=specification.separator
+ local quotechar=P(specification.quote)
+ local dquotechar=quotechar*quotechar
+/specification.quote
+ local separator=S(separator~="" and separator or ",")
+ local escaped=quotechar*Cs((dquotechar+(1-quotechar))^0)*quotechar
+ local non_escaped=C((1-quotechar-newline-separator)^1)
+ local field=escaped+non_escaped
+ local record=Ct((field*separator^-1)^1)
+ local headerline=record*Cp()
+ local wholeblob=Ct((newline^-1*record)^0)
+ return function(data,getheader)
+ if getheader then
+ local header,position=lpegmatch(headerline,data)
+ local data=lpegmatch(wholeblob,data,position)
+ return data,header
+ else
+ return lpegmatch(wholeblob,data)
+ end
+ end
+end
+local function ranger(first,last,n,action)
+ if not first then
+ elseif last==true then
+ for i=first,n or first do
+ action(i)
+ end
+ elseif last then
+ for i=first,last do
+ action(i)
+ end
+ else
+ action(first)
+ end
+end
+local cardinal=lpegpatterns.cardinal/tonumber
+local spacers=lpegpatterns.spacer^0
+local endofstring=lpegpatterns.endofstring
+local stepper=spacers*(C(cardinal)*(spacers*S(":-")*spacers*(C(cardinal)+Cc(true) )+Cc(false) )*Carg(1)*Carg(2)/ranger*S(", ")^0 )^1
+local stepper=spacers*(C(cardinal)*(spacers*S(":-")*spacers*(C(cardinal)+(P("*")+endofstring)*Cc(true) )+Cc(false) )*Carg(1)*Carg(2)/ranger*S(", ")^0 )^1*endofstring
+function parsers.stepper(str,n,action)
+ if type(n)=="function" then
+ lpegmatch(stepper,str,1,false,n or print)
+ else
+ lpegmatch(stepper,str,1,n,action or print)
+ end
+end
+local pattern_math=Cs((P("%")/"\\percent "+P("^")*Cc("{")*lpegpatterns.integer*Cc("}")+P(1))^0)
+local pattern_text=Cs((P("%")/"\\percent "+(P("^")/"\\high")*Cc("{")*lpegpatterns.integer*Cc("}")+P(1))^0)
+patterns.unittotex=pattern
+function parsers.unittotex(str,textmode)
+ return lpegmatch(textmode and pattern_text or pattern_math,str)
+end
+local pattern=Cs((P("^")/"<sup>"*lpegpatterns.integer*Cc("</sup>")+P(1))^0)
+function parsers.unittoxml(str)
+ return lpegmatch(pattern,str)
+end
+local cache={}
+local spaces=lpeg.patterns.space^0
+local dummy=function() end
+table.setmetatableindex(cache,function(t,k)
+ local separator=P(k)
+ local value=(1-separator)^0
+ local pattern=spaces*C(value)*separator^0*Cp()
+ t[k]=pattern
+ return pattern
+end)
+local commalistiterator=cache[","]
+function utilities.parsers.iterator(str,separator)
+ local n=#str
+ if n==0 then
+ return dummy
+ else
+ local pattern=separator and cache[separator] or commalistiterator
+ local p=1
+ return function()
+ if p<=n then
+ local s,e=lpegmatch(pattern,str,p)
+ if e then
+ p=e
+ return s
end
- grandtotal = grandtotal + count
- functions = functions + 1
+ end
end
- printer("\n")
- printer(format("functions : % 10i\n", functions))
- printer(format("total : % 10i\n", total))
- printer(format("grand total: % 10i\n", grandtotal))
- printer(format("threshold : % 10i\n", threshold))
+ end
end
-
-function debugger.savestats(filename,threshold)
- local f = io.open(filename,'w')
- if f then
- debugger.showstats(function(str) f:write(str) end,threshold)
- f:close()
+local function initialize(t,name)
+ local source=t[name]
+ if source then
+ local result={}
+ for k,v in next,t[name] do
+ result[k]=v
end
+ return result
+ else
+ return {}
+ end
end
-
-function debugger.enable()
- debug.sethook(hook,"c")
+local function fetch(t,name)
+ return t[name] or {}
end
-
-function debugger.disable()
- debug.sethook()
+function process(result,more)
+ for k,v in next,more do
+ result[k]=v
+ end
+ return result
end
-
-
-
-
-
-local is_node = node and node.is_node
-local is_lpeg = lpeg and lpeg.type
-
-function inspect(i) -- global function
- local ti = type(i)
- if ti == "table" then
- table.print(i,"table")
- elseif is_node and is_node(i) then
- table.print(nodes.astable(i),tostring(i))
- elseif is_lpeg and is_lpeg(i) then
- lpeg.print(i)
- else
- print(tostring(i))
- end
-end
-
--- from the lua book:
-
-function traceback()
- local level = 1
- while true do
- local info = debug.getinfo(level, "Sl")
- if not info then
- break
- elseif info.what == "C" then
- print(format("%3i : C function",level))
- else
- print(format("%3i : [%s]:%d",level,info.short_src,info.currentline))
- end
- level = level + 1
- end
+local name=C((1-S(", "))^1)
+local parser=(Carg(1)*name/initialize)*(S(", ")^1*(Carg(1)*name/fetch))^0
+local merge=Cf(parser,process)
+function utilities.parsers.mergehashes(hash,list)
+ return lpegmatch(merge,list,1,hash)
end
@@ -5179,199 +5731,80 @@ end -- of closure
do -- create closure to overcome 200 locals limit
-if not modules then modules = { } end modules ['trac-inf'] = {
- version = 1.001,
- comment = "companion to trac-inf.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
--- As we want to protect the global tables, we no longer store the timing
--- in the tables themselves but in a hidden timers table so that we don't
--- get warnings about assignments. This is more efficient than using rawset
--- and rawget.
-
-local format, lower = string.format, string.lower
-local clock = os.gettimeofday or os.clock -- should go in environment
-local write_nl = texio.write_nl
-
-statistics = statistics or { }
-local statistics = statistics
-
-statistics.enable = true
-statistics.threshold = 0.05
-
-local statusinfo, n, registered, timers = { }, 0, { }, { }
+package.loaded["util-fmt"] = package.loaded["util-fmt"] or true
-local function hastiming(instance)
- return instance and timers[instance]
-end
-
-local function resettiming(instance)
- timers[instance or "notimer"] = { timing = 0, loadtime = 0 }
-end
-
-local function starttiming(instance)
- local timer = timers[instance or "notimer"]
- if not timer then
- timer = { }
- timers[instance or "notimer"] = timer
- end
- local it = timer.timing
- if not it then
- it = 0
- end
- if it == 0 then
- timer.starttime = clock()
- if not timer.loadtime then
- timer.loadtime = 0
- end
- end
- timer.timing = it + 1
-end
-
-local function stoptiming(instance, report)
- local timer = timers[instance or "notimer"]
- local it = timer.timing
- if it > 1 then
- timer.timing = it - 1
- else
- local starttime = timer.starttime
- if starttime then
- local stoptime = clock()
- local loadtime = stoptime - starttime
- timer.stoptime = stoptime
- timer.loadtime = timer.loadtime + loadtime
- if report then
- statistics.report("load time %0.3f",loadtime)
- end
- timer.timing = 0
- return loadtime
- end
- end
- return 0
-end
-
-local function elapsedtime(instance)
- local timer = timers[instance or "notimer"]
- return format("%0.3f",timer and timer.loadtime or 0)
-end
-
-local function elapsedindeed(instance)
- local timer = timers[instance or "notimer"]
- return (timer and timer.loadtime or 0) > statistics.threshold
-end
-
-local function elapsedseconds(instance,rest) -- returns nil if 0 seconds
- if elapsedindeed(instance) then
- return format("%s seconds %s", elapsedtime(instance),rest or "")
- end
-end
+-- original size: 2274, stripped down to: 1781
-statistics.hastiming = hastiming
-statistics.resettiming = resettiming
-statistics.starttiming = starttiming
-statistics.stoptiming = stoptiming
-statistics.elapsedtime = elapsedtime
-statistics.elapsedindeed = elapsedindeed
-statistics.elapsedseconds = elapsedseconds
-
--- general function
-
-function statistics.register(tag,fnc)
- if statistics.enable and type(fnc) == "function" then
- local rt = registered[tag] or (#statusinfo + 1)
- statusinfo[rt] = { tag, fnc }
- registered[tag] = rt
- if #tag > n then n = #tag end
- end
+if not modules then modules={} end modules ['util-fmt']={
+ version=1.001,
+ comment="companion to luat-lib.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+utilities=utilities or {}
+utilities.formatters=utilities.formatters or {}
+local formatters=utilities.formatters
+local concat,format=table.concat,string.format
+local tostring,type=tostring,type
+local strip=string.strip
+local lpegmatch=lpeg.match
+local stripper=lpeg.patterns.stripzeros
+function formatters.stripzeros(str)
+ return lpegmatch(stripper,str)
end
-
-function statistics.show(reporter)
- if statistics.enable then
- if not reporter then reporter = function(tag,data,n) write_nl(tag .. " " .. data) end end
- -- this code will move
- local register = statistics.register
- register("luatex banner", function()
- return lower(status.banner)
- end)
- register("control sequences", function()
- return format("%s of %s + %s", status.cs_count, status.hash_size,status.hash_extra)
- end)
- register("callbacks", function()
- local total, indirect = status.callbacks or 0, status.indirect_callbacks or 0
- return format("%s direct, %s indirect, %s total", total-indirect, indirect, total)
- end)
- collectgarbage("collect")
- register("current memory usage", statistics.memused)
- register("runtime",statistics.runtime)
- for i=1,#statusinfo do
- local s = statusinfo[i]
- local r = s[2]()
- if r then
- reporter(s[1],r,n)
- end
+function formatters.formatcolumns(result,between)
+ if result and #result>0 then
+ between=between or " "
+ local widths,numbers={},{}
+ local first=result[1]
+ local n=#first
+ for i=1,n do
+ widths[i]=0
+ end
+ for i=1,#result do
+ local r=result[i]
+ for j=1,n do
+ local rj=r[j]
+ local tj=type(rj)
+ if tj=="number" then
+ numbers[j]=true
+ end
+ if tj~="string" then
+ rj=tostring(rj)
+ r[j]=rj
+ end
+ local w=#rj
+ if w>widths[j] then
+ widths[j]=w
+ end
+ end
+ end
+ for i=1,n do
+ local w=widths[i]
+ if numbers[i] then
+ if w>80 then
+ widths[i]="%s"..between
+ else
+ widths[i]="%0"..w.."i"..between
+ end
+ else
+ if w>80 then
+ widths[i]="%s"..between
+ elseif w>0 then
+ widths[i]="%-"..w.."s"..between
+ else
+ widths[i]="%s"
end
- write_nl("") -- final newline
- statistics.enable = false
+ end
end
-end
-
-local template, report_statistics, nn = nil, nil, 0 -- we only calcute it once
-
-function statistics.showjobstat(tag,data,n)
- if not logs then
- -- sorry
- elseif type(data) == "table" then
- for i=1,#data do
- statistics.showjobstat(tag,data[i],n)
- end
- else
- if not template or n > nn then
- template, n = format("%%-%ss - %%s",n), nn
- report_statistics = logs.reporter("mkiv lua stats")
- end
- report_statistics(format(template,tag,data))
+ local template=strip(concat(widths))
+ for i=1,#result do
+ local str=format(template,unpack(result[i]))
+ result[i]=strip(str)
end
-end
-
-function statistics.memused() -- no math.round yet -)
- local round = math.round or math.floor
- return format("%s MB (ctx: %s MB)",round(collectgarbage("count")/1000), round(status.luastate_bytes/1000000))
-end
-
-starttiming(statistics)
-
-function statistics.formatruntime(runtime) -- indirect so it can be overloaded and
- return format("%s seconds", runtime) -- indeed that happens in cure-uti.lua
-end
-
-function statistics.runtime()
- stoptiming(statistics)
- return statistics.formatruntime(elapsedtime(statistics))
-end
-
-function statistics.timed(action,report)
- report = report or logs.reporter("system")
- starttiming("run")
- action()
- stoptiming("run")
- report("total runtime: %s",elapsedtime("run"))
-end
-
--- where, not really the best spot for this:
-
-commands = commands or { }
-
-function commands.resettimer(name)
- resettiming(name or "whatever")
- starttiming(name or "whatever")
-end
-
-function commands.elapsedtime(name)
- stoptiming(name or "whatever")
- context(elapsedtime(name or "whatever"))
+ end
+ return result
end
@@ -5379,341 +5812,311 @@ end -- of closure
do -- create closure to overcome 200 locals limit
-if not modules then modules = { } end modules ['trac-set'] = { -- might become util-set.lua
- version = 1.001,
- comment = "companion to luat-lib.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
-local type, next, tostring = type, next, tostring
-local concat = table.concat
-local format, find, lower, gsub, escapedpattern = string.format, string.find, string.lower, string.gsub, string.escapedpattern
-local is_boolean = string.is_boolean
-local settings_to_hash = utilities.parsers.settings_to_hash
-local allocate = utilities.storage.allocate
-
-utilities = utilities or { }
-local utilities = utilities
-utilities.setters = utilities.setters or { }
-local setters = utilities.setters
-
-local data = { } -- maybe just local
-
--- We can initialize from the cnf file. This is sort of tricky as
--- later defined setters also need to be initialized then. If set
--- this way, we need to ensure that they are not reset later on.
+package.loaded["trac-set"] = package.loaded["trac-set"] or true
-local trace_initialize = false -- only for testing during development
+-- original size: 12365, stripped down to: 8799
-function setters.initialize(filename,name,values) -- filename only for diagnostics
- local setter = data[name]
- if setter then
- local data = setter.data
- if data then
- for key, value in next, values do
- -- key = gsub(key,"_",".")
- value = is_boolean(value,value)
- local functions = data[key]
- if functions then
- if #functions > 0 and not functions.value then
- if trace_initialize then
- setter.report("executing %s (%s -> %s)",key,filename,tostring(value))
- end
- for i=1,#functions do
- functions[i](value)
- end
- functions.value = value
- else
- if trace_initialize then
- setter.report("skipping %s (%s -> %s)",key,filename,tostring(value))
- end
- end
- else
- -- we do a simple preregistration i.e. not in the
- -- list as it might be an obsolete entry
- functions = { default = value }
- data[key] = functions
- if trace_initialize then
- setter.report("storing %s (%s -> %s)",key,filename,tostring(value))
- end
- end
+if not modules then modules={} end modules ['trac-set']={
+ version=1.001,
+ comment="companion to luat-lib.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+local type,next,tostring=type,next,tostring
+local concat=table.concat
+local format,find,lower,gsub,topattern=string.format,string.find,string.lower,string.gsub,string.topattern
+local is_boolean=string.is_boolean
+local settings_to_hash=utilities.parsers.settings_to_hash
+local allocate=utilities.storage.allocate
+utilities=utilities or {}
+local utilities=utilities
+local setters=utilities.setters or {}
+utilities.setters=setters
+local data={}
+local trace_initialize=false
+function setters.initialize(filename,name,values)
+ local setter=data[name]
+ if setter then
+ frozen=true
+ local data=setter.data
+ if data then
+ for key,newvalue in next,values do
+ local newvalue=is_boolean(newvalue,newvalue)
+ local functions=data[key]
+ if functions then
+ local oldvalue=functions.value
+ if functions.frozen then
+ if trace_initialize then
+ setter.report("%s: %a is %s to %a",filename,key,"frozen",oldvalue)
end
- return true
+ elseif #functions>0 and not oldvalue then
+ if trace_initialize then
+ setter.report("%s: %a is %s to %a",filename,key,"set",newvalue)
+ end
+ for i=1,#functions do
+ functions[i](newvalue)
+ end
+ functions.value=newvalue
+ functions.frozen=functions.frozen or frozen
+ else
+ if trace_initialize then
+ setter.report("%s: %a is %s as %a",filename,key,"kept",oldvalue)
+ end
+ end
+ else
+ functions={ default=newvalue,frozen=frozen }
+ data[key]=functions
+ if trace_initialize then
+ setter.report("%s: %a is %s to %a",filename,key,"defaulted",newvalue)
+ end
end
+ end
+ return true
end
+ end
end
-
--- user interface code
-
local function set(t,what,newvalue)
- local data, done = t.data, t.done
- if type(what) == "string" then
- what = settings_to_hash(what) -- inefficient but ok
- end
- if type(what) ~= "table" then
- return
- end
- if not done then -- catch ... why not set?
- done = { }
- t.done = done
- end
- for w, value in next, what do
- if value == "" then
- value = newvalue
- elseif not value then
- value = false -- catch nil
- else
- value = is_boolean(value,value)
- end
- w = "^" .. escapedpattern(w,true) .. "$" -- new: anchored
- for name, functions in next, data do
- if done[name] then
- -- prevent recursion due to wildcards
- elseif find(name,w) then
- done[name] = true
- for i=1,#functions do
- functions[i](value)
- end
- functions.value = value
- end
- end
- end
+ local data=t.data
+ if not data.frozen then
+ local done=t.done
+ if type(what)=="string" then
+ what=settings_to_hash(what)
+ end
+ if type(what)~="table" then
+ return
+ end
+ if not done then
+ done={}
+ t.done=done
+ end
+ for w,value in next,what do
+ if value=="" then
+ value=newvalue
+ elseif not value then
+ value=false
+ else
+ value=is_boolean(value,value)
+ end
+ w=topattern(w,true,true)
+ for name,functions in next,data do
+ if done[name] then
+ elseif find(name,w) then
+ done[name]=true
+ for i=1,#functions do
+ functions[i](value)
+ end
+ functions.value=value
+ end
+ end
+ end
+ end
end
-
local function reset(t)
- for name, functions in next, t.data do
- for i=1,#functions do
- functions[i](false)
- end
- functions.value = false
+ local data=t.data
+ if not data.frozen then
+ for name,functions in next,data do
+ for i=1,#functions do
+ functions[i](false)
+ end
+ functions.value=false
end
+ end
end
-
local function enable(t,what)
- set(t,what,true)
+ set(t,what,true)
end
-
local function disable(t,what)
- local data = t.data
- if not what or what == "" then
- t.done = { }
- reset(t)
- else
- set(t,what,false)
- end
+ local data=t.data
+ if not what or what=="" then
+ t.done={}
+ reset(t)
+ else
+ set(t,what,false)
+ end
end
-
function setters.register(t,what,...)
- local data = t.data
- what = lower(what)
- local functions = data[what]
- if not functions then
- functions = { }
- data[what] = functions
- if trace_initialize then
- t.report("defining %s",what)
- end
- end
- local default = functions.default -- can be set from cnf file
- for _, fnc in next, { ... } do
- local typ = type(fnc)
- if typ == "string" then
- if trace_initialize then
- t.report("coupling %s to %s",what,fnc)
- end
- local s = fnc -- else wrong reference
- fnc = function(value) set(t,s,value) end
- elseif typ ~= "function" then
- fnc = nil
- end
- if fnc then
- functions[#functions+1] = fnc
- -- default: set at command line or in cnf file
- -- value : set in tex run (needed when loading runtime)
- local value = functions.value or default
- if value ~= nil then
- fnc(value)
- functions.value = value
- end
- end
- end
- return false -- so we can use it in an assignment
+ local data=t.data
+ what=lower(what)
+ local functions=data[what]
+ if not functions then
+ functions={}
+ data[what]=functions
+ if trace_initialize then
+ t.report("defining %a",what)
+ end
+ end
+ local default=functions.default
+ for i=1,select("#",...) do
+ local fnc=select(i,...)
+ local typ=type(fnc)
+ if typ=="string" then
+ if trace_initialize then
+ t.report("coupling %a to %a",what,fnc)
+ end
+ local s=fnc
+ fnc=function(value) set(t,s,value) end
+ elseif typ~="function" then
+ fnc=nil
+ end
+ if fnc then
+ functions[#functions+1]=fnc
+ local value=functions.value or default
+ if value~=nil then
+ fnc(value)
+ functions.value=value
+ end
+ end
+ end
+ return false
end
-
function setters.enable(t,what)
- local e = t.enable
- t.enable, t.done = enable, { }
- enable(t,what)
- t.enable, t.done = e, { }
+ local e=t.enable
+ t.enable,t.done=enable,{}
+ enable(t,what)
+ t.enable,t.done=e,{}
end
-
function setters.disable(t,what)
- local e = t.disable
- t.disable, t.done = disable, { }
- disable(t,what)
- t.disable, t.done = e, { }
+ local e=t.disable
+ t.disable,t.done=disable,{}
+ disable(t,what)
+ t.disable,t.done=e,{}
end
-
function setters.reset(t)
- t.done = { }
- reset(t)
-end
-
-function setters.list(t) -- pattern
- local list = table.sortedkeys(t.data)
- local user, system = { }, { }
- for l=1,#list do
- local what = list[l]
- if find(what,"^%*") then
- system[#system+1] = what
- else
- user[#user+1] = what
- end
+ t.done={}
+ reset(t)
+end
+function setters.list(t)
+ local list=table.sortedkeys(t.data)
+ local user,system={},{}
+ for l=1,#list do
+ local what=list[l]
+ if find(what,"^%*") then
+ system[#system+1]=what
+ else
+ user[#user+1]=what
end
- return user, system
+ end
+ return user,system
end
-
function setters.show(t)
- local category = t.name
- local list = setters.list(t)
- t.report()
- for k=1,#list do
- local name = list[k]
- local functions = t.data[name]
- if functions then
- local value, default, modules = functions.value, functions.default, #functions
- value = value == nil and "unset" or tostring(value)
- default = default == nil and "unset" or tostring(default)
- t.report("%-30s modules: %2i default: %6s value: %6s",name,modules,default,value)
- end
- end
- t.report()
-end
-
--- we could have used a bit of oo and the trackers:enable syntax but
--- there is already a lot of code around using the singular tracker
-
--- we could make this into a module but we also want the rest avaliable
-
-local enable, disable, register, list, show = setters.enable, setters.disable, setters.register, setters.list, setters.show
-
-local function report(setter,...)
- local report = logs and logs.report
- if report then
- report(setter.name,...)
- else -- fallback, as this module is loaded before the logger
- write_nl(format("%-15s : %s\n",setter.name,format(...)))
- end
-end
-
-function setters.new(name)
- local setter -- we need to access it in setter itself
- setter = {
- data = allocate(), -- indexed, but also default and value fields
- name = name,
- report = function(...) report (setter,...) end,
- enable = function(...) enable (setter,...) end,
- disable = function(...) disable (setter,...) end,
- register = function(...) register(setter,...) end,
- list = function(...) list (setter,...) end,
- show = function(...) show (setter,...) end,
- }
- data[name] = setter
- return setter
-end
-
-trackers = setters.new("trackers")
-directives = setters.new("directives")
-experiments = setters.new("experiments")
-
-local t_enable, t_disable, t_report = trackers .enable, trackers .disable, trackers .report
-local d_enable, d_disable, d_report = directives .enable, directives .disable, directives .report
-local e_enable, e_disable, e_report = experiments.enable, experiments.disable, experiments.report
-
--- nice trick: we overload two of the directives related functions with variants that
--- do tracing (itself using a tracker) .. proof of concept
-
-local trace_directives = false local trace_directives = false trackers.register("system.directives", function(v) trace_directives = v end)
-local trace_experiments = false local trace_experiments = false trackers.register("system.experiments", function(v) trace_experiments = v end)
-
+ local category=t.name
+ local list=setters.list(t)
+ t.report()
+ for k=1,#list do
+ local name=list[k]
+ local functions=t.data[name]
+ if functions then
+ local value,default,modules=functions.value,functions.default,#functions
+ value=value==nil and "unset" or tostring(value)
+ default=default==nil and "unset" or tostring(default)
+ t.report("%-50s modules: %2i default: %-12s value: %-12s",name,modules,default,value)
+ end
+ end
+ t.report()
+end
+local enable,disable,register,list,show=setters.enable,setters.disable,setters.register,setters.list,setters.show
+function setters.report(setter,...)
+ print(format("%-15s : %s\n",setter.name,format(...)))
+end
+local function default(setter,name)
+ local d=setter.data[name]
+ return d and d.default
+end
+local function value(setter,name)
+ local d=setter.data[name]
+ return d and (d.value or d.default)
+end
+function setters.new(name)
+ local setter
+ setter={
+ data=allocate(),
+ name=name,
+ report=function(...) setters.report (setter,...) end,
+ enable=function(...) enable (setter,...) end,
+ disable=function(...) disable (setter,...) end,
+ register=function(...) register(setter,...) end,
+ list=function(...) list (setter,...) end,
+ show=function(...) show (setter,...) end,
+ default=function(...) return default (setter,...) end,
+ value=function(...) return value (setter,...) end,
+ }
+ data[name]=setter
+ return setter
+end
+trackers=setters.new("trackers")
+directives=setters.new("directives")
+experiments=setters.new("experiments")
+local t_enable,t_disable=trackers .enable,trackers .disable
+local d_enable,d_disable=directives .enable,directives .disable
+local e_enable,e_disable=experiments.enable,experiments.disable
+local trace_directives=false local trace_directives=false trackers.register("system.directives",function(v) trace_directives=v end)
+local trace_experiments=false local trace_experiments=false trackers.register("system.experiments",function(v) trace_experiments=v end)
function directives.enable(...)
- if trace_directives then
- d_report("enabling: %s",concat({...}," "))
- end
- d_enable(...)
+ if trace_directives then
+ directives.report("enabling: % t",{...})
+ end
+ d_enable(...)
end
-
function directives.disable(...)
- if trace_directives then
- d_report("disabling: %s",concat({...}," "))
- end
- d_disable(...)
+ if trace_directives then
+ directives.report("disabling: % t",{...})
+ end
+ d_disable(...)
end
-
function experiments.enable(...)
- if trace_experiments then
- e_report("enabling: %s",concat({...}," "))
- end
- e_enable(...)
+ if trace_experiments then
+ experiments.report("enabling: % t",{...})
+ end
+ e_enable(...)
end
-
function experiments.disable(...)
- if trace_experiments then
- e_report("disabling: %s",concat({...}," "))
- end
- e_disable(...)
-end
-
--- a useful example
-
-directives.register("system.nostatistics", function(v)
- statistics.enable = not v
+ if trace_experiments then
+ experiments.report("disabling: % t",{...})
+ end
+ e_disable(...)
+end
+directives.register("system.nostatistics",function(v)
+ if statistics then
+ statistics.enable=not v
+ else
+ end
end)
-
-directives.register("system.nolibraries", function(v)
- libraries = nil -- we discard this tracing for security
+directives.register("system.nolibraries",function(v)
+ if libraries then
+ libraries=nil
+ else
+ end
end)
-
--- experiment
-
-local flags = environment and environment.engineflags
-
-if flags then
- if trackers and flags.trackers then
- setters.initialize("flags","trackers", settings_to_hash(flags.trackers))
- -- t_enable(flags.trackers)
+if environment then
+ local engineflags=environment.engineflags
+ if engineflags then
+ local list=engineflags["c:trackers"] or engineflags["trackers"]
+ if type(list)=="string" then
+ setters.initialize("commandline flags","trackers",settings_to_hash(list))
end
- if directives and flags.directives then
- setters.initialize("flags","directives", settings_to_hash(flags.directives))
- -- d_enable(flags.directives)
+ local list=engineflags["c:directives"] or engineflags["directives"]
+ if type(list)=="string" then
+ setters.initialize("commandline flags","directives",settings_to_hash(list))
end
+ end
end
-
--- here
-
if texconfig then
-
- -- this happens too late in ini mode but that is no problem
-
- local function set(k,v)
- v = tonumber(v)
- if v then
- texconfig[k] = v
- end
+ local function set(k,v)
+ v=tonumber(v)
+ if v then
+ texconfig[k]=v
end
-
- directives.register("luatex.expanddepth", function(v) set("expand_depth",v) end)
- directives.register("luatex.hashextra", function(v) set("hash_extra",v) end)
- directives.register("luatex.nestsize", function(v) set("nest_size",v) end)
- directives.register("luatex.maxinopen", function(v) set("max_in_open",v) end)
- directives.register("luatex.maxprintline", function(v) set("max_print_line",v) end)
- directives.register("luatex.maxstrings", function(v) set("max_strings",v) end)
- directives.register("luatex.paramsize", function(v) set("param_size",v) end)
- directives.register("luatex.savesize", function(v) set("save_size",v) end)
- directives.register("luatex.stacksize", function(v) set("stack_size",v) end)
-
+ end
+ directives.register("luatex.expanddepth",function(v) set("expand_depth",v) end)
+ directives.register("luatex.hashextra",function(v) set("hash_extra",v) end)
+ directives.register("luatex.nestsize",function(v) set("nest_size",v) end)
+ directives.register("luatex.maxinopen",function(v) set("max_in_open",v) end)
+ directives.register("luatex.maxprintline",function(v) set("max_print_line",v) end)
+ directives.register("luatex.maxstrings",function(v) set("max_strings",v) end)
+ directives.register("luatex.paramsize",function(v) set("param_size",v) end)
+ directives.register("luatex.savesize",function(v) set("save_size",v) end)
+ directives.register("luatex.stacksize",function(v) set("stack_size",v) end)
end
@@ -5721,643 +6124,559 @@ end -- of closure
do -- create closure to overcome 200 locals limit
-if not modules then modules = { } end modules ['trac-log'] = {
- version = 1.001,
- comment = "companion to trac-log.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
--- todo: less categories, more subcategories (e.g. nodes)
-
-
-local write_nl, write = texio and texio.write_nl or print, texio and texio.write or io.write
-local format, gmatch, find = string.format, string.gmatch, string.find
-local concat, insert, remove = table.concat, table.insert, table.remove
-local escapedpattern = string.escapedpattern
-local texcount = tex and tex.count
-local next, type = next, type
-
-local setmetatableindex = table.setmetatableindex
-
---[[ldx--
-<p>This is a prelude to a more extensive logging module. We no longer
-provide <l n='xml'/> based logging a sparsing is relatively easy anyway.</p>
---ldx]]--
+package.loaded["trac-log"] = package.loaded["trac-log"] or true
-logs = logs or { }
-local logs = logs
+-- original size: 21795, stripped down to: 14194
-local moreinfo = [[
+if not modules then modules={} end modules ['trac-log']={
+ version=1.001,
+ comment="companion to trac-log.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+local write_nl,write=texio and texio.write_nl or print,texio and texio.write or io.write
+local format,gmatch,find=string.format,string.gmatch,string.find
+local concat,insert,remove=table.concat,table.insert,table.remove
+local topattern=string.topattern
+local texcount=tex and tex.count
+local next,type,select=next,type,select
+local utfchar=utf.char
+local setmetatableindex=table.setmetatableindex
+local formatters=string.formatters
+logs=logs or {}
+local logs=logs
+local moreinfo=[[
More information about ConTeXt and the tools that come with it can be found at:
-
+]].."\n"..[[
maillist : ntg-context@ntg.nl / http://www.ntg.nl/mailman/listinfo/ntg-context
webpage : http://www.pragma-ade.nl / http://tex.aanhet.net
wiki : http://contextgarden.net
]]
-
--- basic loggers
-
+utilities.strings.formatters.add (
+ formatters,"unichr",
+ [["U+" .. format("%%05X",%s) .. " (" .. utfchar(%s) .. ")"]]
+)
+utilities.strings.formatters.add (
+ formatters,"chruni",
+ [[utfchar(%s) .. " (U+" .. format("%%05X",%s) .. ")"]]
+)
local function ignore() end
-
-setmetatableindex(logs, function(t,k) t[k] = ignore ; return ignore end)
-
-local report, subreport, status, settarget, setformats, settranslations
-
-local direct, subdirect, writer, pushtarget, poptarget
-
+setmetatableindex(logs,function(t,k) t[k]=ignore;return ignore end)
+local report,subreport,status,settarget,setformats,settranslations
+local direct,subdirect,writer,pushtarget,poptarget
if tex and (tex.jobname or tex.formatname) then
-
- local valueiskey = { __index = function(t,k) t[k] = k return k end } -- will be helper
-
- local target = "term and log"
-
- logs.flush = io.flush
-
- local formats = { } setmetatable(formats, valueiskey)
- local translations = { } setmetatable(translations,valueiskey)
-
- writer = function(...)
- write_nl(target,...)
- end
-
- newline = function()
- write_nl(target,"\n")
- end
-
- report = function(a,b,c,...)
- if c then
- write_nl(target,format("%-15s > %s\n",translations[a],format(formats[b],c,...)))
- elseif b then
- write_nl(target,format("%-15s > %s\n",translations[a],formats[b]))
- elseif a then
- write_nl(target,format("%-15s >\n", translations[a]))
- else
- write_nl(target,"\n")
- end
- end
-
- direct = function(a,b,c,...)
- if c then
- return format("%-15s > %s",translations[a],format(formats[b],c,...))
- elseif b then
- return format("%-15s > %s",translations[a],formats[b])
- elseif a then
- return format("%-15s >", translations[a])
- else
- return ""
- end
- end
-
- subreport = function(a,s,b,c,...)
- if c then
- write_nl(target,format("%-15s > %s > %s\n",translations[a],translations[s],format(formats[b],c,...)))
- elseif b then
- write_nl(target,format("%-15s > %s > %s\n",translations[a],translations[s],formats[b]))
- elseif a then
- write_nl(target,format("%-15s > %s >\n", translations[a],translations[s]))
- else
- write_nl(target,"\n")
- end
- end
-
- subdirect = function(a,s,b,c,...)
- if c then
- return format("%-15s > %s > %s",translations[a],translations[s],format(formats[b],c,...))
- elseif b then
- return format("%-15s > %s > %s",translations[a],translations[s],formats[b])
- elseif a then
- return format("%-15s > %s >", translations[a],translations[s])
- else
- return ""
- end
- end
-
- status = function(a,b,c,...)
- if c then
- write_nl(target,format("%-15s : %s\n",translations[a],format(formats[b],c,...)))
- elseif b then
- write_nl(target,format("%-15s : %s\n",translations[a],formats[b]))
- elseif a then
- write_nl(target,format("%-15s :\n", translations[a]))
- else
- write_nl(target,"\n")
- end
- end
-
- local targets = {
- logfile = "log",
- log = "log",
- file = "log",
- console = "term",
- terminal = "term",
- both = "term and log",
- }
-
- settarget = function(whereto)
- target = targets[whereto or "both"] or targets.both
- if target == "term" or target == "term and log" then
- logs.flush = io.flush
- else
- logs.flush = ignore
- end
- end
-
- local stack = { }
-
- pushtarget = function(newtarget)
- insert(stack,target)
- settarget(newtarget)
- end
-
- poptarget = function()
- if #stack > 0 then
- settarget(remove(stack))
- end
- end
-
- setformats = function(f)
- formats = f
- end
-
- settranslations = function(t)
- translations = t
- end
-
+ local valueiskey={ __index=function(t,k) t[k]=k return k end }
+ local target="term and log"
+ logs.flush=io.flush
+ local formats={} setmetatable(formats,valueiskey)
+ local translations={} setmetatable(translations,valueiskey)
+ writer=function(...)
+ write_nl(target,...)
+ end
+ newline=function()
+ write_nl(target,"\n")
+ end
+ local f_one=formatters["%-15s > %s\n"]
+ local f_two=formatters["%-15s >\n"]
+ report=function(a,b,c,...)
+ if c then
+ write_nl(target,f_one(translations[a],formatters[formats[b]](c,...)))
+ elseif b then
+ write_nl(target,f_one(translations[a],formats[b]))
+ elseif a then
+ write_nl(target,f_two(translations[a]))
+ else
+ write_nl(target,"\n")
+ end
+ end
+ local f_one=formatters["%-15s > %s"]
+ local f_two=formatters["%-15s >"]
+ direct=function(a,b,c,...)
+ if c then
+ return f_one(translations[a],formatters[formats[b]](c,...))
+ elseif b then
+ return f_one(translations[a],formats[b])
+ elseif a then
+ return f_two(translations[a])
+ else
+ return ""
+ end
+ end
+ local f_one=formatters["%-15s > %s > %s\n"]
+ local f_two=formatters["%-15s > %s >\n"]
+ subreport=function(a,s,b,c,...)
+ if c then
+ write_nl(target,f_one(translations[a],translations[s],formatters[formats[b]](c,...)))
+ elseif b then
+ write_nl(target,f_one(translations[a],translations[s],formats[b]))
+ elseif a then
+ write_nl(target,f_two(translations[a],translations[s]))
+ else
+ write_nl(target,"\n")
+ end
+ end
+ local f_one=formatters["%-15s > %s > %s"]
+ local f_two=formatters["%-15s > %s >"]
+ subdirect=function(a,s,b,c,...)
+ if c then
+ return f_one(translations[a],translations[s],formatters[formats[b]](c,...))
+ elseif b then
+ return f_one(translations[a],translations[s],formats[b])
+ elseif a then
+ return f_two(translations[a],translations[s])
+ else
+ return ""
+ end
+ end
+ local f_one=formatters["%-15s : %s\n"]
+ local f_two=formatters["%-15s :\n"]
+ status=function(a,b,c,...)
+ if c then
+ write_nl(target,f_one(translations[a],formatters[formats[b]](c,...)))
+ elseif b then
+ write_nl(target,f_one(translations[a],formats[b]))
+ elseif a then
+ write_nl(target,f_two(translations[a]))
+ else
+ write_nl(target,"\n")
+ end
+ end
+ local targets={
+ logfile="log",
+ log="log",
+ file="log",
+ console="term",
+ terminal="term",
+ both="term and log",
+ }
+ settarget=function(whereto)
+ target=targets[whereto or "both"] or targets.both
+ if target=="term" or target=="term and log" then
+ logs.flush=io.flush
+ else
+ logs.flush=ignore
+ end
+ end
+ local stack={}
+ pushtarget=function(newtarget)
+ insert(stack,target)
+ settarget(newtarget)
+ end
+ poptarget=function()
+ if #stack>0 then
+ settarget(remove(stack))
+ end
+ end
+ setformats=function(f)
+ formats=f
+ end
+ settranslations=function(t)
+ translations=t
+ end
else
-
- logs.flush = ignore
-
- writer = write_nl
-
- newline = function()
- write_nl("\n")
- end
-
- report = function(a,b,c,...)
- if c then
- write_nl(format("%-15s | %s",a,format(b,c,...)))
- elseif b then
- write_nl(format("%-15s | %s",a,b))
- elseif a then
- write_nl(format("%-15s |", a))
- else
- write_nl("")
- end
- end
-
- subreport = function(a,sub,b,c,...)
- if c then
- write_nl(format("%-15s | %s | %s",a,sub,format(b,c,...)))
- elseif b then
- write_nl(format("%-15s | %s | %s",a,sub,b))
- elseif a then
- write_nl(format("%-15s | %s |", a,sub))
- else
- write_nl("")
- end
- end
-
- status = function(a,b,c,...) -- not to be used in lua anyway
- if c then
- write_nl(format("%-15s : %s\n",a,format(b,c,...)))
- elseif b then
- write_nl(format("%-15s : %s\n",a,b)) -- b can have %'s
- elseif a then
- write_nl(format("%-15s :\n", a))
- else
- write_nl("\n")
- end
- end
-
- direct = ignore
- subdirect = ignore
-
- settarget = ignore
- pushtarget = ignore
- poptarget = ignore
- setformats = ignore
- settranslations = ignore
-
-end
-
-logs.report = report
-logs.subreport = subreport
-logs.status = status
-logs.settarget = settarget
-logs.pushtarget = pushtarget
-logs.poptarget = poptarget
-logs.setformats = setformats
-logs.settranslations = settranslations
-
-logs.direct = direct
-logs.subdirect = subdirect
-logs.writer = writer
-logs.newline = newline
-
--- installer
-
--- todo: renew (un) locks when a new one is added and wildcard
-
-local data, states = { }, nil
-
+ logs.flush=ignore
+ writer=write_nl
+ newline=function()
+ write_nl("\n")
+ end
+ local f_one=formatters["%-15s | %s"]
+ local f_two=formatters["%-15s |"]
+ report=function(a,b,c,...)
+ if c then
+ write_nl(f_one(a,formatters[b](c,...)))
+ elseif b then
+ write_nl(f_one(a,b))
+ elseif a then
+ write_nl(f_two(a))
+ else
+ write_nl("")
+ end
+ end
+ local f_one=formatters["%-15s | %s | %s"]
+ local f_two=formatters["%-15s | %s |"]
+ subreport=function(a,sub,b,c,...)
+ if c then
+ write_nl(f_one(a,sub,formatters[b](c,...)))
+ elseif b then
+ write_nl(f_one(a,sub,b))
+ elseif a then
+ write_nl(f_two(a,sub))
+ else
+ write_nl("")
+ end
+ end
+ local f_one=formatters["%-15s : %s\n"]
+ local f_two=formatters["%-15s :\n"]
+ status=function(a,b,c,...)
+ if c then
+ write_nl(f_one(a,formatters[b](c,...)))
+ elseif b then
+ write_nl(f_one(a,b))
+ elseif a then
+ write_nl(f_two(a))
+ else
+ write_nl("\n")
+ end
+ end
+ direct=ignore
+ subdirect=ignore
+ settarget=ignore
+ pushtarget=ignore
+ poptarget=ignore
+ setformats=ignore
+ settranslations=ignore
+end
+logs.report=report
+logs.subreport=subreport
+logs.status=status
+logs.settarget=settarget
+logs.pushtarget=pushtarget
+logs.poptarget=poptarget
+logs.setformats=setformats
+logs.settranslations=settranslations
+logs.direct=direct
+logs.subdirect=subdirect
+logs.writer=writer
+logs.newline=newline
+local data,states={},nil
function logs.reporter(category,subcategory)
- local logger = data[category]
- if not logger then
- local state = false
- if states == true then
- state = true
- elseif type(states) == "table" then
- for c, _ in next, states do
- if find(category,c) then
- state = true
- break
- end
- end
+ local logger=data[category]
+ if not logger then
+ local state=false
+ if states==true then
+ state=true
+ elseif type(states)=="table" then
+ for c,_ in next,states do
+ if find(category,c) then
+ state=true
+ break
+ end
+ end
+ end
+ logger={
+ reporters={},
+ state=state,
+ }
+ data[category]=logger
+ end
+ local reporter=logger.reporters[subcategory or "default"]
+ if not reporter then
+ if subcategory then
+ reporter=function(...)
+ if not logger.state then
+ subreport(category,subcategory,...)
end
- logger = {
- reporters = { },
- state = state,
- }
- data[category] = logger
- end
- local reporter = logger.reporters[subcategory or "default"]
- if not reporter then
- if subcategory then
- reporter = function(...)
- if not logger.state then
- subreport(category,subcategory,...)
- end
- end
- logger.reporters[subcategory] = reporter
- else
- local tag = category
- reporter = function(...)
- if not logger.state then
- report(category,...)
- end
- end
- logger.reporters.default = reporter
+ end
+ logger.reporters[subcategory]=reporter
+ else
+ local tag=category
+ reporter=function(...)
+ if not logger.state then
+ report(category,...)
end
+ end
+ logger.reporters.default=reporter
end
- return reporter
+ end
+ return reporter
end
-
-logs.new = logs.reporter -- for old times sake
-
--- context specicific: this ends up in the macro stream
-
-local ctxreport = logs.writer
-
+logs.new=logs.reporter
+local ctxreport=logs.writer
function logs.setmessenger(m)
- ctxreport = m
+ ctxreport=m
end
-
function logs.messenger(category,subcategory)
- -- we need to avoid catcode mess (todo: fast context)
- if subcategory then
- return function(...)
- ctxreport(subdirect(category,subcategory,...))
- end
- else
- return function(...)
- ctxreport(direct(category,...))
- end
+ if subcategory then
+ return function(...)
+ ctxreport(subdirect(category,subcategory,...))
+ end
+ else
+ return function(...)
+ ctxreport(direct(category,...))
end
+ end
end
-
--- so far
-
local function setblocked(category,value)
- if category == true then
- -- lock all
- category, value = "*", true
- elseif category == false then
- -- unlock all
- category, value = "*", false
- elseif value == nil then
- -- lock selective
- value = true
- end
- if category == "*" then
- states = value
- for k, v in next, data do
- v.state = value
- end
- else
- states = utilities.parsers.settings_to_hash(category)
- for c, _ in next, states do
- if data[c] then
- v.state = value
- else
- c = escapedpattern(c,true)
- for k, v in next, data do
- if find(k,c) then
- v.state = value
- end
- end
- end
- end
- end
+ if category==true then
+ category,value="*",true
+ elseif category==false then
+ category,value="*",false
+ elseif value==nil then
+ value=true
+ end
+ if category=="*" then
+ states=value
+ for k,v in next,data do
+ v.state=value
+ end
+ else
+ states=utilities.parsers.settings_to_hash(category)
+ for c,_ in next,states do
+ if data[c] then
+ v.state=value
+ else
+ c=topattern(c,true,true)
+ for k,v in next,data do
+ if find(k,c) then
+ v.state=value
+ end
+ end
+ end
+ end
+ end
end
-
function logs.disable(category,value)
- setblocked(category,value == nil and true or value)
+ setblocked(category,value==nil and true or value)
end
-
function logs.enable(category)
- setblocked(category,false)
+ setblocked(category,false)
end
-
function logs.categories()
- return table.sortedkeys(data)
+ return table.sortedkeys(data)
end
-
function logs.show()
- local n, c, s, max = 0, 0, 0, 0
- for category, v in table.sortedpairs(data) do
- n = n + 1
- local state = v.state
- local reporters = v.reporters
- local nc = #category
- if nc > c then
- c = nc
- end
- for subcategory, _ in next, reporters do
- local ns = #subcategory
- if ns > c then
- s = ns
- end
- local m = nc + ns
- if m > max then
- max = m
- end
- end
- local subcategories = concat(table.sortedkeys(reporters),", ")
- if state == true then
- state = "disabled"
- elseif state == false then
- state = "enabled"
- else
- state = "unknown"
- end
- -- no new here
- report("logging","category: '%s', subcategories: '%s', state: '%s'",category,subcategories,state)
+ local n,c,s,max=0,0,0,0
+ for category,v in table.sortedpairs(data) do
+ n=n+1
+ local state=v.state
+ local reporters=v.reporters
+ local nc=#category
+ if nc>c then
+ c=nc
+ end
+ for subcategory,_ in next,reporters do
+ local ns=#subcategory
+ if ns>c then
+ s=ns
+ end
+ local m=nc+ns
+ if m>max then
+ max=m
+ end
+ end
+ local subcategories=concat(table.sortedkeys(reporters),", ")
+ if state==true then
+ state="disabled"
+ elseif state==false then
+ state="enabled"
+ else
+ state="unknown"
end
- report("logging","categories: %s, max category: %s, max subcategory: %s, max combined: %s",n,c,s,max)
+ report("logging","category %a, subcategories %a, state %a",category,subcategories,state)
+ end
+ report("logging","categories: %s, max category: %s, max subcategory: %s, max combined: %s",n,c,s,max)
end
-
-directives.register("logs.blocked", function(v)
- setblocked(v,true)
+local delayed_reporters={}
+setmetatableindex(delayed_reporters,function(t,k)
+ local v=logs.reporter(k.name)
+ t[k]=v
+ return v
end)
-
-directives.register("logs.target", function(v)
- settarget(v)
+function utilities.setters.report(setter,...)
+ delayed_reporters[setter](...)
+end
+directives.register("logs.blocked",function(v)
+ setblocked(v,true)
end)
-
--- tex specific loggers (might move elsewhere)
-
-local report_pages = logs.reporter("pages") -- not needed but saves checking when we grep for it
-
-local real, user, sub
-
+directives.register("logs.target",function(v)
+ settarget(v)
+end)
+local report_pages=logs.reporter("pages")
+local real,user,sub
function logs.start_page_number()
- real, user, sub = texcount.realpageno, texcount.userpageno, texcount.subpageno
--- real, user, sub = 0, 0, 0
-end
-
-local timing = false
-local starttime = nil
-local lasttime = nil
-
-trackers.register("pages.timing", function(v) -- only for myself (diagnostics)
- starttime = os.clock()
- timing = true
+ real,user,sub=texcount.realpageno,texcount.userpageno,texcount.subpageno
+end
+local timing=false
+local starttime=nil
+local lasttime=nil
+trackers.register("pages.timing",function(v)
+ starttime=os.clock()
+ timing=true
end)
-
-function logs.stop_page_number() -- the first page can includes the initialization so we omit this in average
- if timing then
- local elapsed, average
- local stoptime = os.clock()
- if not lasttime or real < 2 then
- elapsed = stoptime
- average = stoptime
- starttime = stoptime
- else
- elapsed = stoptime - lasttime
- average = (stoptime - starttime) / (real - 1)
- end
- lasttime = stoptime
- if real > 0 then
- if user > 0 then
- if sub > 0 then
- report_pages("flushing realpage %s, userpage %s, subpage %s, time %0.04f / %0.04f",real,user,sub,elapsed,average)
- else
- report_pages("flushing realpage %s, userpage %s, time %0.04f / %0.04f",real,user,elapsed,average)
- end
- else
- report_pages("flushing realpage %s, time %0.04f / %0.04f",real,elapsed,average)
- end
- else
- report_pages("flushing page, time %0.04f / %0.04f",elapsed,average)
- end
+function logs.stop_page_number()
+ if timing then
+ local elapsed,average
+ local stoptime=os.clock()
+ if not lasttime or real<2 then
+ elapsed=stoptime
+ average=stoptime
+ starttime=stoptime
else
- if real > 0 then
- if user > 0 then
- if sub > 0 then
- report_pages("flushing realpage %s, userpage %s, subpage %s",real,user,sub)
- else
- report_pages("flushing realpage %s, userpage %s",real,user)
- end
- else
- report_pages("flushing realpage %s",real)
- end
- else
- report_pages("flushing page")
- end
+ elapsed=stoptime-lasttime
+ average=(stoptime-starttime)/(real-1)
+ end
+ lasttime=stoptime
+ if real<=0 then
+ report_pages("flushing page, time %0.04f / %0.04f",elapsed,average)
+ elseif user<=0 then
+ report_pages("flushing realpage %s, time %0.04f / %0.04f",real,elapsed,average)
+ elseif sub<=0 then
+ report_pages("flushing realpage %s, userpage %s, time %0.04f / %0.04f",real,user,elapsed,average)
+ else
+ report_pages("flushing realpage %s, userpage %s, subpage %s, time %0.04f / %0.04f",real,user,sub,elapsed,average)
+ end
+ else
+ if real<=0 then
+ report_pages("flushing page")
+ elseif user<=0 then
+ report_pages("flushing realpage %s",real)
+ elseif sub<=0 then
+ report_pages("flushing realpage %s, userpage %s",real,user)
+ else
+ report_pages("flushing realpage %s, userpage %s, subpage %s",real,user,sub)
end
- logs.flush()
+ end
+ logs.flush()
end
-
-logs.report_job_stat = statistics and statistics.showjobstat
-
-local report_files = logs.reporter("files")
-
-local nesting = 0
-local verbose = false
-local hasscheme = url.hasscheme
-
--- we don't have show_open and show_close callbacks yet
-
+local report_files=logs.reporter("files")
+local nesting=0
+local verbose=false
+local hasscheme=url.hasscheme
function logs.show_open(name)
- -- if hasscheme(name) ~= "virtual" then
- -- if verbose then
- -- nesting = nesting + 1
- -- report_files("level %s, opening %s",nesting,name)
- -- else
- -- write(format("(%s",name)) -- tex adds a space
- -- end
- -- end
end
-
function logs.show_close(name)
- -- if hasscheme(name) ~= "virtual" then
- -- if verbose then
- -- report_files("level %s, closing %s",nesting,name)
- -- nesting = nesting - 1
- -- else
- -- write(")") -- tex adds a space
- -- end
- -- end
end
-
function logs.show_load(name)
- -- if hasscheme(name) ~= "virtual" then
- -- if verbose then
- -- report_files("level %s, loading %s",nesting+1,name)
- -- else
- -- write(format("(%s)",name))
- -- end
- -- end
end
-
--- there may be scripts out there using this:
-
-local simple = logs.reporter("comment")
-
-logs.simple = simple
-logs.simpleline = simple
-
--- obsolete
-
-function logs.setprogram () end -- obsolete
-function logs.extendbanner() end -- obsolete
-function logs.reportlines () end -- obsolete
-function logs.reportbanner() end -- obsolete
-function logs.reportline () end -- obsolete
-function logs.simplelines () end -- obsolete
-function logs.help () end -- obsolete
-
--- applications
-
+local simple=logs.reporter("comment")
+logs.simple=simple
+logs.simpleline=simple
+function logs.setprogram () end
+function logs.extendbanner() end
+function logs.reportlines () end
+function logs.reportbanner() end
+function logs.reportline () end
+function logs.simplelines () end
+function logs.help () end
+local Carg,C,lpegmatch=lpeg.Carg,lpeg.C,lpeg.match
+local p_newline=lpeg.patterns.newline
+local linewise=(
+ Carg(1)*C((1-p_newline)^1)/function(t,s) t.report(s) end+Carg(1)*p_newline^2/function(t) t.report() end+p_newline
+)^1
local function reportlines(t,str)
- if str then
- for line in gmatch(str,"(.-)[\n\r]") do
- t.report(line)
- end
- end
+ if str then
+ lpegmatch(linewise,str,1,t)
+ end
end
-
local function reportbanner(t)
- local banner = t.banner
- if banner then
- t.report(banner)
- t.report()
- end
+ local banner=t.banner
+ if banner then
+ t.report(banner)
+ t.report()
+ end
end
-
local function reportversion(t)
- local banner = t.banner
- if banner then
- t.report(banner)
- end
+ local banner=t.banner
+ if banner then
+ t.report(banner)
+ end
end
-
local function reporthelp(t,...)
- local helpinfo = t.helpinfo
- if type(helpinfo) == "string" then
- reportlines(t,helpinfo)
- elseif type(helpinfo) == "table" then
- local tags = { ... }
- for i=1,#tags do
- reportlines(t,t.helpinfo[tags[i]])
- if i < #tags then
- t.report()
- end
- end
+ local helpinfo=t.helpinfo
+ if type(helpinfo)=="string" then
+ reportlines(t,helpinfo)
+ elseif type(helpinfo)=="table" then
+ for i=1,select("#",...) do
+ reportlines(t,t.helpinfo[select(i,...)])
+ if i<n then
+ t.report()
+ end
end
+ end
end
-
local function reportinfo(t)
- t.report()
- reportlines(t,moreinfo)
-end
-
+ t.report()
+ reportlines(t,t.moreinfo)
+end
+local function reportexport(t,method)
+ report(t.helpinfo)
+end
+local reporters={
+ lines=reportlines,
+ banner=reportbanner,
+ version=reportversion,
+ help=reporthelp,
+ info=reportinfo,
+ export=reportexport,
+}
+local exporters={
+}
+logs.reporters=reporters
+logs.exporters=exporters
function logs.application(t)
- t.name = t.name or "unknown"
- t.banner = t.banner
- t.report = logs.reporter(t.name)
- t.help = function(...) reportbanner(t) ; reporthelp(t,...) ; reportinfo(t) end
- t.identify = function() reportbanner(t) end
- t.version = function() reportversion(t) end
- return t
+ t.name=t.name or "unknown"
+ t.banner=t.banner
+ t.moreinfo=moreinfo
+ t.report=logs.reporter(t.name)
+ t.help=function(...)
+ reporters.banner(t)
+ reporters.help(t,...)
+ reporters.info(t)
+ end
+ t.export=function(...)
+ reporters.export(t,...)
+ end
+ t.identify=function()
+ reporters.banner(t)
+ end
+ t.version=function()
+ reporters.version(t)
+ end
+ return t
end
-
--- somewhat special
-
--- logging to a file
-
-
function logs.system(whereto,process,jobname,category,...)
- local message = format("%s %s => %s => %s => %s\r",os.date("%d/%m/%y %H:%m:%S"),process,jobname,category,format(...))
- for i=1,10 do
- local f = io.open(whereto,"a") -- we can consider keepint the file open
- if f then
- f:write(message)
- f:close()
- break
- else
- sleep(0.1)
- end
+ local message=formatters["%s %s => %s => %s => %s\r"](os.date("%d/%m/%y %H:%m:%S"),process,jobname,category,format(...))
+ for i=1,10 do
+ local f=io.open(whereto,"a")
+ if f then
+ f:write(message)
+ f:close()
+ break
+ else
+ sleep(0.1)
end
+ end
end
-
-local report_system = logs.reporter("system","logs")
-
+local report_system=logs.reporter("system","logs")
function logs.obsolete(old,new)
- local o = loadstring("return " .. new)()
- if type(o) == "function" then
- return function(...)
- report_system("function %s is obsolete, use %s",old,new)
- loadstring(old .. "=" .. new .. " return ".. old)()(...)
- end
- elseif type(o) == "table" then
- local t, m = { }, { }
- m.__index = function(t,k)
- report_system("table %s is obsolete, use %s",old,new)
- m.__index, m.__newindex = o, o
- return o[k]
- end
- m.__newindex = function(t,k,v)
- report_system("table %s is obsolete, use %s",old,new)
- m.__index, m.__newindex = o, o
- o[k] = v
- end
- if libraries then
- libraries.obsolete[old] = t -- true
- end
- setmetatable(t,m)
- return t
- end
+ local o=loadstring("return "..new)()
+ if type(o)=="function" then
+ return function(...)
+ report_system("function %a is obsolete, use %a",old,new)
+ loadstring(old.."="..new.." return "..old)()(...)
+ end
+ elseif type(o)=="table" then
+ local t,m={},{}
+ m.__index=function(t,k)
+ report_system("table %a is obsolete, use %a",old,new)
+ m.__index,m.__newindex=o,o
+ return o[k]
+ end
+ m.__newindex=function(t,k,v)
+ report_system("table %a is obsolete, use %a",old,new)
+ m.__index,m.__newindex=o,o
+ o[k]=v
+ end
+ if libraries then
+ libraries.obsolete[old]=t
+ end
+ setmetatable(t,m)
+ return t
+ end
end
-
if utilities then
- utilities.report = report_system
+ utilities.report=report_system
end
-
if tex and tex.error then
- function logs.texerrormessage(...) -- for the moment we put this function here
- tex.error(format(...), { })
- end
+ function logs.texerrormessage(...)
+ tex.error(format(...),{})
+ end
else
- function logs.texerrormessage(...)
- print(format(...))
- end
+ function logs.texerrormessage(...)
+ print(format(...))
+ end
end
-
--- do we still need io.flush then?
-
io.stdout:setvbuf('no')
io.stderr:setvbuf('no')
@@ -6366,487 +6685,1330 @@ end -- of closure
do -- create closure to overcome 200 locals limit
-if not modules then modules = { } end modules ['trac-pro'] = {
- version = 1.001,
- comment = "companion to luat-lib.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
+package.loaded["trac-inf"] = package.loaded["trac-inf"] or true
-local getmetatable, setmetatable, rawset, type = getmetatable, setmetatable, rawset, type
+-- original size: 5791, stripped down to: 4540
--- The protection implemented here is probably not that tight but good enough to catch
--- problems due to naive usage.
---
--- There's a more extensive version (trac-xxx.lua) that supports nesting.
---
--- This will change when we have _ENV in lua 5.2+
+if not modules then modules={} end modules ['trac-inf']={
+ version=1.001,
+ comment="companion to trac-inf.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+local type,tonumber=type,tonumber
+local format,lower=string.format,string.lower
+local concat=table.concat
+local clock=os.gettimeofday or os.clock
+statistics=statistics or {}
+local statistics=statistics
+statistics.enable=true
+statistics.threshold=0.01
+local statusinfo,n,registered,timers={},0,{},{}
+table.setmetatableindex(timers,function(t,k)
+ local v={ timing=0,loadtime=0 }
+ t[k]=v
+ return v
+end)
+local function hastiming(instance)
+ return instance and timers[instance]
+end
+local function resettiming(instance)
+ timers[instance or "notimer"]={ timing=0,loadtime=0 }
+end
+local function starttiming(instance)
+ local timer=timers[instance or "notimer"]
+ local it=timer.timing or 0
+ if it==0 then
+ timer.starttime=clock()
+ if not timer.loadtime then
+ timer.loadtime=0
+ end
+ end
+ timer.timing=it+1
+end
+local function stoptiming(instance,report)
+ local timer=timers[instance or "notimer"]
+ local it=timer.timing
+ if it>1 then
+ timer.timing=it-1
+ else
+ local starttime=timer.starttime
+ if starttime then
+ local stoptime=clock()
+ local loadtime=stoptime-starttime
+ timer.stoptime=stoptime
+ timer.loadtime=timer.loadtime+loadtime
+ if report then
+ statistics.report("load time %0.3f",loadtime)
+ end
+ timer.timing=0
+ return loadtime
+ end
+ end
+ return 0
+end
+local function elapsed(instance)
+ if type(instance)=="number" then
+ return instance or 0
+ else
+ local timer=timers[instance or "notimer"]
+ return timer and timer.loadtime or 0
+ end
+end
+local function elapsedtime(instance)
+ return format("%0.3f",elapsed(instance))
+end
+local function elapsedindeed(instance)
+ return elapsed(instance)>statistics.threshold
+end
+local function elapsedseconds(instance,rest)
+ if elapsedindeed(instance) then
+ return format("%0.3f seconds %s",elapsed(instance),rest or "")
+ end
+end
+statistics.hastiming=hastiming
+statistics.resettiming=resettiming
+statistics.starttiming=starttiming
+statistics.stoptiming=stoptiming
+statistics.elapsed=elapsed
+statistics.elapsedtime=elapsedtime
+statistics.elapsedindeed=elapsedindeed
+statistics.elapsedseconds=elapsedseconds
+function statistics.register(tag,fnc)
+ if statistics.enable and type(fnc)=="function" then
+ local rt=registered[tag] or (#statusinfo+1)
+ statusinfo[rt]={ tag,fnc }
+ registered[tag]=rt
+ if #tag>n then n=#tag end
+ end
+end
+local report=logs.reporter("mkiv lua stats")
+function statistics.show()
+ if statistics.enable then
+ local register=statistics.register
+ register("luatex banner",function()
+ return lower(status.banner)
+ end)
+ register("control sequences",function()
+ return format("%s of %s + %s",status.cs_count,status.hash_size,status.hash_extra)
+ end)
+ register("callbacks",function()
+ local total,indirect=status.callbacks or 0,status.indirect_callbacks or 0
+ return format("%s direct, %s indirect, %s total",total-indirect,indirect,total)
+ end)
+ if jit then
+ local status={ jit.status() }
+ if status[1] then
+ register("luajit status",function()
+ return concat(status," ",2)
+ end)
+ end
+ end
+ register("current memory usage",statistics.memused)
+ register("runtime",statistics.runtime)
+ logs.newline()
+ for i=1,#statusinfo do
+ local s=statusinfo[i]
+ local r=s[2]()
+ if r then
+ report("%s: %s",s[1],r)
+ end
+ end
+ statistics.enable=false
+ end
+end
+function statistics.memused()
+ local round=math.round or math.floor
+ return format("%s MB (ctx: %s MB)",round(collectgarbage("count")/1000),round(status.luastate_bytes/1000000))
+end
+starttiming(statistics)
+function statistics.formatruntime(runtime)
+ return format("%s seconds",runtime)
+end
+function statistics.runtime()
+ stoptiming(statistics)
+ return statistics.formatruntime(elapsedtime(statistics))
+end
+local report=logs.reporter("system")
+function statistics.timed(action)
+ starttiming("run")
+ action()
+ stoptiming("run")
+ report("total runtime: %s",elapsedtime("run"))
+end
+commands=commands or {}
+function commands.resettimer(name)
+ resettiming(name or "whatever")
+ starttiming(name or "whatever")
+end
+function commands.elapsedtime(name)
+ stoptiming(name or "whatever")
+ context(elapsedtime(name or "whatever"))
+end
-local trace_namespaces = false trackers.register("system.namespaces", function(v) trace_namespaces = v end)
-local report_system = logs.reporter("system","protection")
+end -- of closure
-namespaces = namespaces or { }
-local namespaces = namespaces
+do -- create closure to overcome 200 locals limit
+
+package.loaded["trac-pro"] = package.loaded["trac-pro"] or true
-local registered = { }
+-- original size: 5773, stripped down to: 3453
+if not modules then modules={} end modules ['trac-pro']={
+ version=1.001,
+ comment="companion to luat-lib.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+local getmetatable,setmetatable,rawset,type=getmetatable,setmetatable,rawset,type
+local trace_namespaces=false trackers.register("system.namespaces",function(v) trace_namespaces=v end)
+local report_system=logs.reporter("system","protection")
+namespaces=namespaces or {}
+local namespaces=namespaces
+local registered={}
local function report_index(k,name)
- if trace_namespaces then
- report_system("reference to '%s' in protected namespace '%s', %s",k,name,debug.traceback())
- else
- report_system("reference to '%s' in protected namespace '%s'",k,name)
- end
+ if trace_namespaces then
+ report_system("reference to %a in protected namespace %a: %s",k,name,debug.traceback())
+ else
+ report_system("reference to %a in protected namespace %a",k,name)
+ end
end
-
local function report_newindex(k,name)
- if trace_namespaces then
- report_system("assignment to '%s' in protected namespace '%s', %s",k,name,debug.traceback())
- else
- report_system("assignment to '%s' in protected namespace '%s'",k,name)
- end
+ if trace_namespaces then
+ report_system("assignment to %a in protected namespace %a: %s",k,name,debug.traceback())
+ else
+ report_system("assignment to %a in protected namespace %a",k,name)
+ end
end
-
local function register(name)
- local data = name == "global" and _G or _G[name]
- if not data then
- return -- error
- end
- registered[name] = data
- local m = getmetatable(data)
- if not m then
- m = { }
- setmetatable(data,m)
- end
- local index, newindex = { }, { }
- m.__saved__index = m.__index
- m.__no__index = function(t,k)
- if not index[k] then
- index[k] = true
- report_index(k,name)
- end
- return nil
- end
- m.__saved__newindex = m.__newindex
- m.__no__newindex = function(t,k,v)
- if not newindex[k] then
- newindex[k] = true
- report_newindex(k,name)
- end
- rawset(t,k,v)
+ local data=name=="global" and _G or _G[name]
+ if not data then
+ return
+ end
+ registered[name]=data
+ local m=getmetatable(data)
+ if not m then
+ m={}
+ setmetatable(data,m)
+ end
+ local index,newindex={},{}
+ m.__saved__index=m.__index
+ m.__no__index=function(t,k)
+ if not index[k] then
+ index[k]=true
+ report_index(k,name)
end
- m.__protection__depth = 0
-end
-
-local function private(name) -- maybe save name
- local data = registered[name]
+ return nil
+ end
+ m.__saved__newindex=m.__newindex
+ m.__no__newindex=function(t,k,v)
+ if not newindex[k] then
+ newindex[k]=true
+ report_newindex(k,name)
+ end
+ rawset(t,k,v)
+ end
+ m.__protection__depth=0
+end
+local function private(name)
+ local data=registered[name]
+ if not data then
+ data=_G[name]
if not data then
- data = _G[name]
- if not data then
- data = { }
- _G[name] = data
- end
- register(name)
+ data={}
+ _G[name]=data
end
- return data
+ register(name)
+ end
+ return data
end
-
local function protect(name)
- local data = registered[name]
- if not data then
- return
- end
- local m = getmetatable(data)
- local pd = m.__protection__depth
- if pd > 0 then
- m.__protection__depth = pd + 1
- else
- m.__save_d_index, m.__saved__newindex = m.__index, m.__newindex
- m.__index, m.__newindex = m.__no__index, m.__no__newindex
- m.__protection__depth = 1
- end
+ local data=registered[name]
+ if not data then
+ return
+ end
+ local m=getmetatable(data)
+ local pd=m.__protection__depth
+ if pd>0 then
+ m.__protection__depth=pd+1
+ else
+ m.__save_d_index,m.__saved__newindex=m.__index,m.__newindex
+ m.__index,m.__newindex=m.__no__index,m.__no__newindex
+ m.__protection__depth=1
+ end
end
-
local function unprotect(name)
- local data = registered[name]
- if not data then
- return
- end
- local m = getmetatable(data)
- local pd = m.__protection__depth
- if pd > 1 then
- m.__protection__depth = pd - 1
- else
- m.__index, m.__newindex = m.__saved__index, m.__saved__newindex
- m.__protection__depth = 0
- end
+ local data=registered[name]
+ if not data then
+ return
+ end
+ local m=getmetatable(data)
+ local pd=m.__protection__depth
+ if pd>1 then
+ m.__protection__depth=pd-1
+ else
+ m.__index,m.__newindex=m.__saved__index,m.__saved__newindex
+ m.__protection__depth=0
+ end
end
-
local function protectall()
- for name, _ in next, registered do
- if name ~= "global" then
- protect(name)
- end
+ for name,_ in next,registered do
+ if name~="global" then
+ protect(name)
end
+ end
end
-
local function unprotectall()
- for name, _ in next, registered do
- if name ~= "global" then
- unprotect(name)
- end
- end
-end
+ for name,_ in next,registered do
+ if name~="global" then
+ unprotect(name)
+ end
+ end
+end
+namespaces.register=register
+namespaces.private=private
+namespaces.protect=protect
+namespaces.unprotect=unprotect
+namespaces.protectall=protectall
+namespaces.unprotectall=unprotectall
+namespaces.private("namespaces") registered={} register("global")
+directives.register("system.protect",function(v)
+ if v then
+ protectall()
+ else
+ unprotectall()
+ end
+end)
+directives.register("system.checkglobals",function(v)
+ if v then
+ report_system("enabling global namespace guard")
+ protect("global")
+ else
+ report_system("disabling global namespace guard")
+ unprotect("global")
+ end
+end)
-namespaces.register = register -- register when defined
-namespaces.private = private -- allocate and register if needed
-namespaces.protect = protect
-namespaces.unprotect = unprotect
-namespaces.protectall = protectall
-namespaces.unprotectall = unprotectall
-namespaces.private("namespaces") registered = { } register("global") -- unreachable
+end -- of closure
-directives.register("system.protect", function(v)
- if v then
- protectall()
+do -- create closure to overcome 200 locals limit
+
+package.loaded["util-lua"] = package.loaded["util-lua"] or true
+
+-- original size: 12575, stripped down to: 8700
+
+if not modules then modules={} end modules ['util-lua']={
+ version=1.001,
+ comment="companion to luat-lib.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ comment="the strip code is written by Peter Cawley",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+local rep,sub,byte,dump,format=string.rep,string.sub,string.byte,string.dump,string.format
+local load,loadfile,type=load,loadfile,type
+utilities=utilities or {}
+utilities.lua=utilities.lua or {}
+local luautilities=utilities.lua
+local report_lua=logs.reporter("system","lua")
+local tracestripping=false
+local forcestupidcompile=true
+luautilities.stripcode=true
+luautilities.alwaysstripcode=false
+luautilities.nofstrippedchunks=0
+luautilities.nofstrippedbytes=0
+local strippedchunks={}
+luautilities.strippedchunks=strippedchunks
+luautilities.suffixes={
+ tma="tma",
+ tmc=jit and "tmb" or "tmc",
+ lua="lua",
+ luc=jit and "lub" or "luc",
+ lui="lui",
+ luv="luv",
+ luj="luj",
+ tua="tua",
+ tuc="tuc",
+}
+if jit or status.luatex_version>=74 then
+ local function register(name)
+ if tracestripping then
+ report_lua("stripped bytecode from %a",name or "unknown")
+ end
+ strippedchunks[#strippedchunks+1]=name
+ luautilities.nofstrippedchunks=luautilities.nofstrippedchunks+1
+ end
+ local function stupidcompile(luafile,lucfile,strip)
+ local code=io.loaddata(luafile)
+ if code and code~="" then
+ code=load(code)
+ if code then
+ code=dump(code,strip and luautilities.stripcode or luautilities.alwaysstripcode)
+ if code and code~="" then
+ register(name)
+ io.savedata(lucfile,code)
+ return true,0
+ end
+ else
+ report_lua("fatal error %a in file %a",1,luafile)
+ end
+ else
+ report_lua("fatal error %a in file %a",2,luafile)
+ end
+ return false,0
+ end
+ function luautilities.loadedluacode(fullname,forcestrip,name)
+ name=name or fullname
+ local code=environment.loadpreprocessedfile and environment.loadpreprocessedfile(fullname) or loadfile(fullname)
+ if code then
+ code()
+ end
+ if forcestrip and luautilities.stripcode then
+ if type(forcestrip)=="function" then
+ forcestrip=forcestrip(fullname)
+ end
+ if forcestrip or luautilities.alwaysstripcode then
+ register(name)
+ return load(dump(code,true)),0
+ else
+ return code,0
+ end
+ elseif luautilities.alwaysstripcode then
+ register(name)
+ return load(dump(code,true)),0
+ else
+ return code,0
+ end
+ end
+ function luautilities.strippedloadstring(code,forcestrip,name)
+ if forcestrip and luautilities.stripcode or luautilities.alwaysstripcode then
+ code=load(code)
+ if not code then
+ report_lua("fatal error %a in file %a",3,name)
+ end
+ register(name)
+ code=dump(code,true)
+ end
+ return load(code),0
+ end
+ function luautilities.compile(luafile,lucfile,cleanup,strip,fallback)
+ report_lua("compiling %a into %a",luafile,lucfile)
+ os.remove(lucfile)
+ local done=stupidcompile(luafile,lucfile,strip~=false)
+ if done then
+ report_lua("dumping %a into %a stripped",luafile,lucfile)
+ if cleanup==true and lfs.isfile(lucfile) and lfs.isfile(luafile) then
+ report_lua("removing %a",luafile)
+ os.remove(luafile)
+ end
+ end
+ return done
+ end
+ function luautilities.loadstripped(...)
+ local l=load(...)
+ if l then
+ return load(dump(l,true))
+ end
+ end
+else
+ local function register(name,before,after)
+ local delta=before-after
+ if tracestripping then
+ report_lua("bytecodes stripped from %a, # before %s, # after %s, delta %s",name,before,after,delta)
+ end
+ strippedchunks[#strippedchunks+1]=name
+ luautilities.nofstrippedchunks=luautilities.nofstrippedchunks+1
+ luautilities.nofstrippedbytes=luautilities.nofstrippedbytes+delta
+ return delta
+ end
+ local strip_code_pc
+ if _MAJORVERSION==5 and _MINORVERSION==1 then
+ strip_code_pc=function(dump,name)
+ local before=#dump
+ local version,format,endian,int,size,ins,num=byte(dump,5,11)
+ local subint
+ if endian==1 then
+ subint=function(dump,i,l)
+ local val=0
+ for n=l,1,-1 do
+ val=val*256+byte(dump,i+n-1)
+ end
+ return val,i+l
+ end
+ else
+ subint=function(dump,i,l)
+ local val=0
+ for n=1,l,1 do
+ val=val*256+byte(dump,i+n-1)
+ end
+ return val,i+l
+ end
+ end
+ local strip_function
+ strip_function=function(dump)
+ local count,offset=subint(dump,1,size)
+ local stripped,dirty=rep("\0",size),offset+count
+ offset=offset+count+int*2+4
+ offset=offset+int+subint(dump,offset,int)*ins
+ count,offset=subint(dump,offset,int)
+ for n=1,count do
+ local t
+ t,offset=subint(dump,offset,1)
+ if t==1 then
+ offset=offset+1
+ elseif t==4 then
+ offset=offset+size+subint(dump,offset,size)
+ elseif t==3 then
+ offset=offset+num
+ end
+ end
+ count,offset=subint(dump,offset,int)
+ stripped=stripped..sub(dump,dirty,offset-1)
+ for n=1,count do
+ local proto,off=strip_function(sub(dump,offset,-1))
+ stripped,offset=stripped..proto,offset+off-1
+ end
+ offset=offset+subint(dump,offset,int)*int+int
+ count,offset=subint(dump,offset,int)
+ for n=1,count do
+ offset=offset+subint(dump,offset,size)+size+int*2
+ end
+ count,offset=subint(dump,offset,int)
+ for n=1,count do
+ offset=offset+subint(dump,offset,size)+size
+ end
+ stripped=stripped..rep("\0",int*3)
+ return stripped,offset
+ end
+ dump=sub(dump,1,12)..strip_function(sub(dump,13,-1))
+ local after=#dump
+ local delta=register(name,before,after)
+ return dump,delta
+ end
+ else
+ strip_code_pc=function(dump,name)
+ return dump,0
+ end
+ end
+ function luautilities.loadedluacode(fullname,forcestrip,name)
+ local code=environment.loadpreprocessedfile and environment.preprocessedloadfile(fullname) or loadfile(fullname)
+ if code then
+ code()
+ end
+ if forcestrip and luautilities.stripcode then
+ if type(forcestrip)=="function" then
+ forcestrip=forcestrip(fullname)
+ end
+ if forcestrip then
+ local code,n=strip_code_pc(dump(code),name)
+ return load(code),n
+ elseif luautilities.alwaysstripcode then
+ return load(strip_code_pc(dump(code),name))
+ else
+ return code,0
+ end
+ elseif luautilities.alwaysstripcode then
+ return load(strip_code_pc(dump(code),name))
else
- unprotectall()
+ return code,0
+ end
+ end
+ function luautilities.strippedloadstring(code,forcestrip,name)
+ local n=0
+ if (forcestrip and luautilities.stripcode) or luautilities.alwaysstripcode then
+ code=load(code)
+ if not code then
+ report_lua("fatal error in file %a",name)
+ end
+ code,n=strip_code_pc(dump(code),name)
+ end
+ return load(code),n
+ end
+ local function stupidcompile(luafile,lucfile,strip)
+ local code=io.loaddata(luafile)
+ local n=0
+ if code and code~="" then
+ code=load(code)
+ if not code then
+ report_lua("fatal error in file %a",luafile)
+ end
+ code=dump(code)
+ if strip then
+ code,n=strip_code_pc(code,luautilities.stripcode or luautilities.alwaysstripcode,luafile)
+ end
+ if code and code~="" then
+ io.savedata(lucfile,code)
+ end
end
-end)
-
-directives.register("system.checkglobals", function(v)
- if v then
- report_system("enabling global namespace guard")
- protect("global")
+ return n
+ end
+ local luac_normal="texluac -o %q %q"
+ local luac_strip="texluac -s -o %q %q"
+ function luautilities.compile(luafile,lucfile,cleanup,strip,fallback)
+ report_lua("compiling %a into %a",luafile,lucfile)
+ os.remove(lucfile)
+ local done=false
+ if strip~=false then
+ strip=true
+ end
+ if forcestupidcompile then
+ fallback=true
+ elseif strip then
+ done=os.spawn(format(luac_strip,lucfile,luafile))==0
else
- report_system("disabling global namespace guard")
- unprotect("global")
+ done=os.spawn(format(luac_normal,lucfile,luafile))==0
end
-end)
+ if not done and fallback then
+ local n=stupidcompile(luafile,lucfile,strip)
+ if n>0 then
+ report_lua("%a dumped into %a (%i bytes stripped)",luafile,lucfile,n)
+ else
+ report_lua("%a dumped into %a (unstripped)",luafile,lucfile)
+ end
+ cleanup=false
+ done=true
+ end
+ if done and cleanup==true and lfs.isfile(lucfile) and lfs.isfile(luafile) then
+ report_lua("removing %a",luafile)
+ os.remove(luafile)
+ end
+ return done
+ end
+ luautilities.loadstripped=loadstring
+end
--- dummy section (will go to luat-dum.lua)
+end -- of closure
+do -- create closure to overcome 200 locals limit
+package.loaded["util-deb"] = package.loaded["util-deb"] or true
+-- original size: 3708, stripped down to: 2568
+if not modules then modules={} end modules ['util-deb']={
+ version=1.001,
+ comment="companion to luat-lib.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+local debug=require "debug"
+local getinfo=debug.getinfo
+local type,next,tostring=type,next,tostring
+local format,find=string.format,string.find
+local is_boolean=string.is_boolean
+utilities=utilities or {}
+local debugger=utilities.debugger or {}
+utilities.debugger=debugger
+local counters={}
+local names={}
+local report=logs.reporter("debugger")
+local function hook()
+ local f=getinfo(2)
+ if f then
+ local n="unknown"
+ if f.what=="C" then
+ n=f.name or '<anonymous>'
+ if not names[n] then
+ names[n]=format("%42s",n)
+ end
+ else
+ n=f.name or f.namewhat or f.what
+ if not n or n=="" then
+ n="?"
+ end
+ if not names[n] then
+ names[n]=format("%42s : % 5i : %s",n,f.linedefined or 0,f.short_src or "unknown source")
+ end
+ end
+ counters[n]=(counters[n] or 0)+1
+ end
+end
+function debugger.showstats(printer,threshold)
+ printer=printer or report
+ threshold=threshold or 0
+ local total,grandtotal,functions=0,0,0
+ local dataset={}
+ for name,count in next,counters do
+ dataset[#dataset+1]={ name,count }
+ end
+ table.sort(dataset,function(a,b) return a[2]==b[2] and b[1]>a[1] or a[2]>b[2] end)
+ for i=1,#dataset do
+ local d=dataset[i]
+ local name=d[1]
+ local count=d[2]
+ if count>threshold and not find(name,"for generator") then
+ printer(format("%8i %s\n",count,names[name]))
+ total=total+count
+ end
+ grandtotal=grandtotal+count
+ functions=functions+1
+ end
+ printer("\n")
+ printer(format("functions : % 10i\n",functions))
+ printer(format("total : % 10i\n",total))
+ printer(format("grand total: % 10i\n",grandtotal))
+ printer(format("threshold : % 10i\n",threshold))
+end
+function debugger.savestats(filename,threshold)
+ local f=io.open(filename,'w')
+ if f then
+ debugger.showstats(function(str) f:write(str) end,threshold)
+ f:close()
+ end
+end
+function debugger.enable()
+ debug.sethook(hook,"c")
+end
+function debugger.disable()
+ debug.sethook()
+end
+function traceback()
+ local level=1
+ while true do
+ local info=debug.getinfo(level,"Sl")
+ if not info then
+ break
+ elseif info.what=="C" then
+ print(format("%3i : C function",level))
+ else
+ print(format("%3i : [%s]:%d",level,info.short_src,info.currentline))
+ end
+ level=level+1
+ end
+end
end -- of closure
do -- create closure to overcome 200 locals limit
-if not modules then modules = { } end modules ['luat-env'] = {
- version = 1.001,
- comment = "companion to luat-lib.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
--- A former version provided functionality for non embeded core
--- scripts i.e. runtime library loading. Given the amount of
--- Lua code we use now, this no longer makes sense. Much of this
--- evolved before bytecode arrays were available and so a lot of
--- code has disappeared already.
-
-local trace_locating = false trackers.register("resolvers.locating", function(v) trace_locating = v end)
+package.loaded["util-mrg"] = package.loaded["util-mrg"] or true
-local report_lua = logs.reporter("resolvers","lua")
+-- original size: 7294, stripped down to: 5798
-local allocate, mark = utilities.storage.allocate, utilities.storage.mark
+if not modules then modules={} end modules ['util-mrg']={
+ version=1.001,
+ comment="companion to luat-lib.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+local gsub,format=string.gsub,string.format
+local concat=table.concat
+local type,next=type,next
+local P,R,S,V,Ct,C,Cs,Cc,Cp,Cmt,Cb,Cg=lpeg.P,lpeg.R,lpeg.S,lpeg.V,lpeg.Ct,lpeg.C,lpeg.Cs,lpeg.Cc,lpeg.Cp,lpeg.Cmt,lpeg.Cb,lpeg.Cg
+local lpegmatch,patterns=lpeg.match,lpeg.patterns
+utilities=utilities or {}
+local merger=utilities.merger or {}
+utilities.merger=merger
+merger.strip_comment=true
+local report=logs.reporter("system","merge")
+utilities.report=report
+local m_begin_merge="begin library merge"
+local m_end_merge="end library merge"
+local m_begin_closure="do -- create closure to overcome 200 locals limit"
+local m_end_closure="end -- of closure"
+local m_pattern="%c+".."%-%-%s+"..m_begin_merge.."%c+(.-)%c+".."%-%-%s+"..m_end_merge.."%c+"
+local m_format="\n\n-- "..m_begin_merge.."\n%s\n".."-- "..m_end_merge.."\n\n"
+local m_faked="-- ".."created merged file".."\n\n".."-- "..m_begin_merge.."\n\n".."-- "..m_end_merge.."\n\n"
+local m_report=[[
+-- used libraries : %s
+-- skipped libraries : %s
+-- original bytes : %s
+-- stripped bytes : %s
+]]
+local m_preloaded=[[package.loaded[%q] = package.loaded[%q] or true]]
+local function self_fake()
+ return m_faked
+end
+local function self_nothing()
+ return ""
+end
+local function self_load(name)
+ local data=io.loaddata(name) or ""
+ if data=="" then
+ report("unknown file %a",name)
+ else
+ report("inserting file %a",name)
+ end
+ return data or ""
+end
+local space=patterns.space
+local eol=patterns.newline
+local equals=P("=")^0
+local open=P("[")*Cg(equals,"init")*P("[")*P("\n")^-1
+local close=P("]")*C(equals)*P("]")
+local closeeq=Cmt(close*Cb("init"),function(s,i,a,b) return a==b end)
+local longstring=open*(1-closeeq)^0*close
+local quoted=patterns.quoted
+local emptyline=space^0*eol
+local operator1=P("<=")+P(">=")+P("~=")+P("..")+S("/^<>=*+%%")
+local operator2=S("*+/")
+local operator3=S("-")
+local separator=S(",;")
+local ignore=(P("]")*space^1*P("=")*space^1*P("]"))/"]=["+(P("=")*space^1*P("{"))/"={"+(P("(")*space^1)/"("+(P("{")*(space+eol)^1*P("}"))/"{}"
+local strings=quoted
+local longcmt=(emptyline^0*P("--")*longstring*emptyline^0)/""
+local longstr=longstring
+local comment=emptyline^0*P("--")*P("-")^0*(1-eol)^0*emptyline^1/"\n"
+local pack=((eol+space)^0/"")*operator1*((eol+space)^0/"")+((eol+space)^0/"")*operator2*((space)^0/"")+((eol+space)^1/"")*operator3*((space)^1/"")+((space)^0/"")*separator*((space)^0/"")
+local lines=emptyline^2/"\n"
+local spaces=(space*space)/" "
+local compact=Cs ((
+ ignore+strings+longcmt+longstr+comment+pack+lines+spaces+1
+)^1 )
+local strip=Cs((emptyline^2/"\n"+1)^0)
+local stripreturn=Cs((1-P("return")*space^1*P(1-space-eol)^1*(space+eol)^0*P(-1))^1)
+function merger.compact(data)
+ return lpegmatch(strip,lpegmatch(compact,data))
+end
+local function self_compact(data)
+ local delta=0
+ if merger.strip_comment then
+ local before=#data
+ data=lpegmatch(compact,data)
+ data=lpegmatch(strip,data)
+ local after=#data
+ delta=before-after
+ report("original size %s, compacted to %s, stripped %s",before,after,delta)
+ data=format("-- original size: %s, stripped down to: %s\n\n%s",before,after,data)
+ end
+ return lpegmatch(stripreturn,data) or data,delta
+end
+local function self_save(name,data)
+ if data~="" then
+ io.savedata(name,data)
+ report("saving %s with size %s",name,#data)
+ end
+end
+local function self_swap(data,code)
+ return data~="" and (gsub(data,m_pattern,function() return format(m_format,code) end,1)) or ""
+end
+local function self_libs(libs,list)
+ local result,f,frozen,foundpath={},nil,false,nil
+ result[#result+1]="\n"
+ if type(libs)=='string' then libs={ libs } end
+ if type(list)=='string' then list={ list } end
+ for i=1,#libs do
+ local lib=libs[i]
+ for j=1,#list do
+ local pth=gsub(list[j],"\\","/")
+ report("checking library path %a",pth)
+ local name=pth.."/"..lib
+ if lfs.isfile(name) then
+ foundpath=pth
+ end
+ end
+ if foundpath then break end
+ end
+ if foundpath then
+ report("using library path %a",foundpath)
+ local right,wrong,original,stripped={},{},0,0
+ for i=1,#libs do
+ local lib=libs[i]
+ local fullname=foundpath.."/"..lib
+ if lfs.isfile(fullname) then
+ report("using library %a",fullname)
+ local preloaded=file.nameonly(lib)
+ local data=io.loaddata(fullname,true)
+ original=original+#data
+ local data,delta=self_compact(data)
+ right[#right+1]=lib
+ result[#result+1]=m_begin_closure
+ result[#result+1]=format(m_preloaded,preloaded,preloaded)
+ result[#result+1]=data
+ result[#result+1]=m_end_closure
+ stripped=stripped+delta
+ else
+ report("skipping library %a",fullname)
+ wrong[#wrong+1]=lib
+ end
+ end
+ right=#right>0 and concat(right," ") or "-"
+ wrong=#wrong>0 and concat(wrong," ") or "-"
+ report("used libraries: %a",right)
+ report("skipped libraries: %a",wrong)
+ report("original bytes: %a",original)
+ report("stripped bytes: %a",stripped)
+ result[#result+1]=format(m_report,right,wrong,original,stripped)
+ else
+ report("no valid library path found")
+ end
+ return concat(result,"\n\n")
+end
+function merger.selfcreate(libs,list,target)
+ if target then
+ self_save(target,self_swap(self_fake(),self_libs(libs,list)))
+ end
+end
+function merger.selfmerge(name,libs,list,target)
+ self_save(target or name,self_swap(self_load(name),self_libs(libs,list)))
+end
+function merger.selfclean(name)
+ self_save(name,self_swap(self_load(name),self_nothing()))
+end
-local format, sub, match, gsub, find = string.format, string.sub, string.match, string.gsub, string.find
-local unquoted, quoted = string.unquoted, string.quoted
-local concat = table.concat
--- precautions
+end -- of closure
-os.setlocale(nil,nil) -- useless feature and even dangerous in luatex
+do -- create closure to overcome 200 locals limit
-function os.setlocale()
- -- no way you can mess with it
-end
+package.loaded["util-tpl"] = package.loaded["util-tpl"] or true
--- dirty tricks
+-- original size: 5655, stripped down to: 3242
-if arg and (arg[0] == 'luatex' or arg[0] == 'luatex.exe') and arg[1] == "--luaonly" then
- arg[-1] = arg[0]
- arg[ 0] = arg[2]
- for k=3,#arg do
- arg[k-2] = arg[k]
+if not modules then modules={} end modules ['util-tpl']={
+ version=1.001,
+ comment="companion to luat-lib.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+utilities.templates=utilities.templates or {}
+local templates=utilities.templates
+local trace_template=false trackers.register("templates.trace",function(v) trace_template=v end)
+local report_template=logs.reporter("template")
+local tostring=tostring
+local format,sub=string.format,string.sub
+local P,C,Cs,Carg,lpegmatch=lpeg.P,lpeg.C,lpeg.Cs,lpeg.Carg,lpeg.match
+local replacer
+local function replacekey(k,t,how,recursive)
+ local v=t[k]
+ if not v then
+ if trace_template then
+ report_template("unknown key %a",k)
+ end
+ return ""
+ else
+ v=tostring(v)
+ if trace_template then
+ report_template("setting key %a to value %a",k,v)
end
- arg[#arg] = nil -- last
- arg[#arg] = nil -- pre-last
+ if recursive then
+ return lpegmatch(replacer,v,1,t,how,recursive)
+ else
+ return v
+ end
+ end
+end
+local sqlescape=lpeg.replacer {
+ { "'","''" },
+ { "\\","\\\\" },
+ { "\r\n","\\n" },
+ { "\r","\\n" },
+}
+local sqlquotedescape=lpeg.Cs(lpeg.Cc("'")*sqlescape*lpeg.Cc("'"))
+local escapers={
+ lua=function(s)
+ return sub(format("%q",s),2,-2)
+ end,
+ sql=function(s)
+ return lpegmatch(sqlescape,s)
+ end,
+}
+local quotedescapers={
+ lua=function(s)
+ return format("%q",s)
+ end,
+ sql=function(s)
+ return lpegmatch(sqlquotedescape,s)
+ end,
+}
+lpeg.patterns.sqlescape=sqlescape
+lpeg.patterns.sqlescape=sqlquotedescape
+local luaescaper=escapers.lua
+local quotedluaescaper=quotedescapers.lua
+local function replacekeyunquoted(s,t,how,recurse)
+ local escaper=how and escapers[how] or luaescaper
+ return escaper(replacekey(s,t,how,recurse))
+end
+local function replacekeyquoted(s,t,how,recurse)
+ local escaper=how and quotedescapers[how] or quotedluaescaper
+ return escaper(replacekey(s,t,how,recurse))
+end
+local single=P("%")
+local double=P("%%")
+local lquoted=P("%[")
+local rquoted=P("]%")
+local lquotedq=P("%(")
+local rquotedq=P(")%")
+local escape=double/'%%'
+local nosingle=single/''
+local nodouble=double/''
+local nolquoted=lquoted/''
+local norquoted=rquoted/''
+local nolquotedq=lquotedq/''
+local norquotedq=rquotedq/''
+local key=nosingle*((C((1-nosingle )^1)*Carg(1)*Carg(2)*Carg(3))/replacekey )*nosingle
+local quoted=nolquotedq*((C((1-norquotedq)^1)*Carg(1)*Carg(2)*Carg(3))/replacekeyquoted )*norquotedq
+local unquoted=nolquoted*((C((1-norquoted )^1)*Carg(1)*Carg(2)*Carg(3))/replacekeyunquoted)*norquoted
+local any=P(1)
+ replacer=Cs((unquoted+quoted+escape+key+any)^0)
+local function replace(str,mapping,how,recurse)
+ if mapping and str then
+ return lpegmatch(replacer,str,1,mapping,how or "lua",recurse or false) or str
+ else
+ return str
+ end
+end
+templates.replace=replace
+function templates.load(filename,mapping,how,recurse)
+ local data=io.loaddata(filename) or ""
+ if mapping and next(mapping) then
+ return replace(data,mapping,how,recurse)
+ else
+ return data
+ end
+end
+function templates.resolve(t,mapping,how,recurse)
+ if not mapping then
+ mapping=t
+ end
+ for k,v in next,t do
+ t[k]=replace(v,mapping,how,recurse)
+ end
+ return t
end
--- environment
-environment = environment or { }
-local environment = environment
+end -- of closure
-environment.arguments = allocate()
-environment.files = allocate()
-environment.sortedflags = nil
+do -- create closure to overcome 200 locals limit
-local mt = {
- __index = function(_,k)
- if k == "version" then
- local version = tex.toks and tex.toks.contextversiontoks
- if version and version ~= "" then
- rawset(environment,"version",version)
- return version
- else
- return "unknown"
- end
- elseif k == "jobname" or k == "formatname" then
- local name = tex and tex[k]
- if name or name== "" then
- rawset(environment,k,name)
- return name
- else
- return "unknown"
- end
- elseif k == "outputfilename" then
- local name = environment.jobname
- rawset(environment,k,name)
- return name
- end
- end
-}
+package.loaded["util-env"] = package.loaded["util-env"] or true
-setmetatable(environment,mt)
+-- original size: 7702, stripped down to: 4701
+if not modules then modules={} end modules ['util-env']={
+ version=1.001,
+ comment="companion to luat-lib.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+local allocate,mark=utilities.storage.allocate,utilities.storage.mark
+local format,sub,match,gsub,find=string.format,string.sub,string.match,string.gsub,string.find
+local unquoted,quoted=string.unquoted,string.quoted
+local concat,insert,remove=table.concat,table.insert,table.remove
+environment=environment or {}
+local environment=environment
+os.setlocale(nil,nil)
+function os.setlocale()
+end
+local validengines=allocate {
+ ["luatex"]=true,
+ ["luajittex"]=true,
+}
+local basicengines=allocate {
+ ["luatex"]="luatex",
+ ["texlua"]="luatex",
+ ["texluac"]="luatex",
+ ["luajittex"]="luajittex",
+ ["texluajit"]="luajittex",
+}
+local luaengines=allocate {
+ ["lua"]=true,
+ ["luajit"]=true,
+}
+environment.validengines=validengines
+environment.basicengines=basicengines
+if not arg then
+elseif luaengines[file.removesuffix(arg[-1])] then
+elseif validengines[file.removesuffix(arg[0])] then
+ if arg[1]=="--luaonly" then
+ arg[-1]=arg[0]
+ arg[ 0]=arg[2]
+ for k=3,#arg do
+ arg[k-2]=arg[k]
+ end
+ remove(arg)
+ remove(arg)
+ else
+ end
+ local originalzero=file.basename(arg[0])
+ local specialmapping={ luatools=="base" }
+ if originalzero~="mtxrun" and originalzero~="mtxrun.lua" then
+ arg[0]=specialmapping[originalzero] or originalzero
+ insert(arg,0,"--script")
+ insert(arg,0,"mtxrun")
+ end
+end
+environment.arguments=allocate()
+environment.files=allocate()
+environment.sortedflags=nil
function environment.initializearguments(arg)
- local arguments, files = { }, { }
- environment.arguments, environment.files, environment.sortedflags = arguments, files, nil
- for index=1,#arg do
- local argument = arg[index]
- if index > 0 then
- local flag, value = match(argument,"^%-+(.-)=(.-)$")
- if flag then
- arguments[flag] = unquoted(value or "")
- else
- flag = match(argument,"^%-+(.+)")
- if flag then
- arguments[flag] = true
- else
- files[#files+1] = argument
- end
- end
+ local arguments,files={},{}
+ environment.arguments,environment.files,environment.sortedflags=arguments,files,nil
+ for index=1,#arg do
+ local argument=arg[index]
+ if index>0 then
+ local flag,value=match(argument,"^%-+(.-)=(.-)$")
+ if flag then
+ flag=gsub(flag,"^c:","")
+ arguments[flag]=unquoted(value or "")
+ else
+ flag=match(argument,"^%-+(.+)")
+ if flag then
+ flag=gsub(flag,"^c:","")
+ arguments[flag]=true
+ else
+ files[#files+1]=argument
end
+ end
end
- environment.ownname = environment.ownname or arg[0] or 'unknown.lua'
+ end
+ environment.ownname=file.reslash(environment.ownname or arg[0] or 'unknown.lua')
end
-
function environment.setargument(name,value)
- environment.arguments[name] = value
-end
-
--- todo: defaults, better checks e.g on type (boolean versus string)
---
--- tricky: too many hits when we support partials unless we add
--- a registration of arguments so from now on we have 'partial'
-
-function environment.argument(name,partial)
- local arguments, sortedflags = environment.arguments, environment.sortedflags
- if arguments[name] then
- return arguments[name]
- elseif partial then
- if not sortedflags then
- sortedflags = allocate(table.sortedkeys(arguments))
- for k=1,#sortedflags do
- sortedflags[k] = "^" .. sortedflags[k]
- end
- environment.sortedflags = sortedflags
- end
- -- example of potential clash: ^mode ^modefile
- for k=1,#sortedflags do
- local v = sortedflags[k]
- if find(name,v) then
- return arguments[sub(v,2,#v)]
- end
- end
- end
- return nil
-end
-
-function environment.splitarguments(separator) -- rather special, cut-off before separator
- local done, before, after = false, { }, { }
- local originalarguments = environment.originalarguments
- for k=1,#originalarguments do
- local v = originalarguments[k]
- if not done and v == separator then
- done = true
- elseif done then
- after[#after+1] = v
- else
- before[#before+1] = v
- end
+ environment.arguments[name]=value
+end
+function environment.getargument(name,partial)
+ local arguments,sortedflags=environment.arguments,environment.sortedflags
+ if arguments[name] then
+ return arguments[name]
+ elseif partial then
+ if not sortedflags then
+ sortedflags=allocate(table.sortedkeys(arguments))
+ for k=1,#sortedflags do
+ sortedflags[k]="^"..sortedflags[k]
+ end
+ environment.sortedflags=sortedflags
+ end
+ for k=1,#sortedflags do
+ local v=sortedflags[k]
+ if find(name,v) then
+ return arguments[sub(v,2,#v)]
+ end
+ end
+ end
+ return nil
+end
+environment.argument=environment.getargument
+function environment.splitarguments(separator)
+ local done,before,after=false,{},{}
+ local originalarguments=environment.originalarguments
+ for k=1,#originalarguments do
+ local v=originalarguments[k]
+ if not done and v==separator then
+ done=true
+ elseif done then
+ after[#after+1]=v
+ else
+ before[#before+1]=v
end
- return before, after
+ end
+ return before,after
end
-
function environment.reconstructcommandline(arg,noquote)
- arg = arg or environment.originalarguments
- if noquote and #arg == 1 then
- -- we could just do: return unquoted(resolvers.resolve(arg[i]))
- local a = arg[1]
- a = resolvers.resolve(a)
- a = unquoted(a)
- return a
- elseif #arg > 0 then
- local result = { }
- for i=1,#arg do
- -- we could just do: result[#result+1] = format("%q",unquoted(resolvers.resolve(arg[i])))
- local a = arg[i]
- a = resolvers.resolve(a)
- a = unquoted(a)
- a = gsub(a,'"','\\"') -- tricky
- if find(a," ") then
- result[#result+1] = quoted(a)
- else
- result[#result+1] = a
- end
- end
- return concat(result," ")
- else
- return ""
- end
+ arg=arg or environment.originalarguments
+ if noquote and #arg==1 then
+ local a=arg[1]
+ a=resolvers.resolve(a)
+ a=unquoted(a)
+ return a
+ elseif #arg>0 then
+ local result={}
+ for i=1,#arg do
+ local a=arg[i]
+ a=resolvers.resolve(a)
+ a=unquoted(a)
+ a=gsub(a,'"','\\"')
+ if find(a," ") then
+ result[#result+1]=quoted(a)
+ else
+ result[#result+1]=a
+ end
+ end
+ return concat(result," ")
+ else
+ return ""
+ end
end
-
-
if arg then
-
- -- new, reconstruct quoted snippets (maybe better just remove the " then and add them later)
- local newarg, instring = { }, false
-
- for index=1,#arg do
- local argument = arg[index]
- if find(argument,"^\"") then
- newarg[#newarg+1] = gsub(argument,"^\"","")
- if not find(argument,"\"$") then
- instring = true
- end
- elseif find(argument,"\"$") then
- newarg[#newarg] = newarg[#newarg] .. " " .. gsub(argument,"\"$","")
- instring = false
- elseif instring then
- newarg[#newarg] = newarg[#newarg] .. " " .. argument
- else
- newarg[#newarg+1] = argument
- end
- end
- for i=1,-5,-1 do
- newarg[i] = arg[i]
+ local newarg,instring={},false
+ for index=1,#arg do
+ local argument=arg[index]
+ if find(argument,"^\"") then
+ newarg[#newarg+1]=gsub(argument,"^\"","")
+ if not find(argument,"\"$") then
+ instring=true
+ end
+ elseif find(argument,"\"$") then
+ newarg[#newarg]=newarg[#newarg].." "..gsub(argument,"\"$","")
+ instring=false
+ elseif instring then
+ newarg[#newarg]=newarg[#newarg].." "..argument
+ else
+ newarg[#newarg+1]=argument
end
+ end
+ for i=1,-5,-1 do
+ newarg[i]=arg[i]
+ end
+ environment.initializearguments(newarg)
+ environment.originalarguments=mark(newarg)
+ environment.rawarguments=mark(arg)
+ arg={}
+end
- environment.initializearguments(newarg)
- environment.originalarguments = mark(newarg)
- environment.rawarguments = mark(arg)
+end -- of closure
- arg = { } -- prevent duplicate handling
+do -- create closure to overcome 200 locals limit
-end
+package.loaded["luat-env"] = package.loaded["luat-env"] or true
--- weird place ... depends on a not yet loaded module
+-- original size: 5874, stripped down to: 4184
+ if not modules then modules={} end modules ['luat-env']={
+ version=1.001,
+ comment="companion to luat-lib.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+local rawset,rawget,loadfile,assert=rawset,rawget,loadfile,assert
+local trace_locating=false trackers.register("resolvers.locating",function(v) trace_locating=v end)
+local report_lua=logs.reporter("resolvers","lua")
+local luautilities=utilities.lua
+local luasuffixes=luautilities.suffixes
+environment=environment or {}
+local environment=environment
+local mt={
+ __index=function(_,k)
+ if k=="version" then
+ local version=tex.toks and tex.toks.contextversiontoks
+ if version and version~="" then
+ rawset(environment,"version",version)
+ return version
+ else
+ return "unknown"
+ end
+ elseif k=="kind" then
+ local kind=tex.toks and tex.toks.contextkindtoks
+ if kind and kind~="" then
+ rawset(environment,"kind",kind)
+ return kind
+ else
+ return "unknown"
+ end
+ elseif k=="jobname" or k=="formatname" then
+ local name=tex and tex[k]
+ if name or name=="" then
+ rawset(environment,k,name)
+ return name
+ else
+ return "unknown"
+ end
+ elseif k=="outputfilename" then
+ local name=environment.jobname
+ rawset(environment,k,name)
+ return name
+ end
+ end
+}
+setmetatable(environment,mt)
function environment.texfile(filename)
- return resolvers.findfile(filename,'tex')
+ return resolvers.findfile(filename,'tex')
+end
+function environment.luafile(filename)
+ local resolved=resolvers.findfile(filename,'tex') or ""
+ if resolved~="" then
+ return resolved
+ end
+ resolved=resolvers.findfile(filename,'texmfscripts') or ""
+ if resolved~="" then
+ return resolved
+ end
+ return resolvers.findfile(filename,'luatexlibs') or ""
+end
+local stripindeed=false directives.register("system.compile.strip",function(v) stripindeed=v end)
+local function strippable(filename)
+ if stripindeed then
+ local modu=modules[file.nameonly(filename)]
+ return modu and modu.dataonly
+ else
+ return false
+ end
end
-
-function environment.luafile(filename)
- local resolved = resolvers.findfile(filename,'tex') or ""
- if resolved ~= "" then
- return resolved
+function environment.luafilechunk(filename,silent)
+ filename=file.replacesuffix(filename,"lua")
+ local fullname=environment.luafile(filename)
+ if fullname and fullname~="" then
+ local data=luautilities.loadedluacode(fullname,strippable,filename)
+ if trace_locating then
+ report_lua("loading file %a %s",fullname,not data and "failed" or "succeeded")
+ elseif not silent then
+ texio.write("<",data and "+ " or "- ",fullname,">")
end
- resolved = resolvers.findfile(filename,'texmfscripts') or ""
- if resolved ~= "" then
- return resolved
+ return data
+ else
+ if trace_locating then
+ report_lua("unknown file %a",filename)
end
- return resolvers.findfile(filename,'luatexlibs') or ""
-end
-
-environment.loadedluacode = loadfile -- can be overloaded
-
-function environment.luafilechunk(filename,silent) -- used for loading lua bytecode in the format
- filename = file.replacesuffix(filename, "lua")
- local fullname = environment.luafile(filename)
- if fullname and fullname ~= "" then
- local data = environment.loadedluacode(fullname)
- if trace_locating then
- report_lua("loading file %s%s", fullname, not data and " failed" or "")
- elseif not silent then
- texio.write("<",data and "+ " or "- ",fullname,">")
- end
- return data
- else
+ return nil
+ end
+end
+function environment.loadluafile(filename,version)
+ local lucname,luaname,chunk
+ local basename=file.removesuffix(filename)
+ if basename==filename then
+ luaname=file.addsuffix(basename,luasuffixes.lua)
+ lucname=file.addsuffix(basename,luasuffixes.luc)
+ else
+ luaname=basename
+ lucname=nil
+ end
+ local fullname=(lucname and environment.luafile(lucname)) or ""
+ if fullname~="" then
+ if trace_locating then
+ report_lua("loading %a",fullname)
+ end
+ chunk=loadfile(fullname)
+ end
+ if chunk then
+ assert(chunk)()
+ if version then
+ local v=version
+ if modules and modules[filename] then
+ v=modules[filename].version
+ elseif versions and versions[filename] then
+ v=versions[filename]
+ end
+ if v==version then
+ return true
+ else
if trace_locating then
- report_lua("unknown file %s", filename)
+ report_lua("version mismatch for %a, lua version %a, luc version %a",filename,v,version)
end
- return nil
- end
-end
-
--- the next ones can use the previous ones / combine
-
-function environment.loadluafile(filename, version)
- local lucname, luaname, chunk
- local basename = file.removesuffix(filename)
- if basename == filename then
- lucname, luaname = basename .. ".luc", basename .. ".lua"
+ environment.loadluafile(filename)
+ end
else
- lucname, luaname = nil, basename -- forced suffix
+ return true
end
- -- when not overloaded by explicit suffix we look for a luc file first
- local fullname = (lucname and environment.luafile(lucname)) or ""
- if fullname ~= "" then
- if trace_locating then
- report_lua("loading %s", fullname)
- end
- chunk = loadfile(fullname) -- this way we don't need a file exists check
- end
- if chunk then
- assert(chunk)()
- if version then
- -- we check of the version number of this chunk matches
- local v = version -- can be nil
- if modules and modules[filename] then
- v = modules[filename].version -- new method
- elseif versions and versions[filename] then
- v = versions[filename] -- old method
- end
- if v == version then
- return true
- else
- if trace_locating then
- report_lua("version mismatch for %s: lua=%s, luc=%s", filename, v, version)
- end
- environment.loadluafile(filename)
- end
- else
- return true
- end
+ end
+ fullname=(luaname and environment.luafile(luaname)) or ""
+ if fullname~="" then
+ if trace_locating then
+ report_lua("loading %a",fullname)
end
- fullname = (luaname and environment.luafile(luaname)) or ""
- if fullname ~= "" then
- if trace_locating then
- report_lua("loading %s", fullname)
- end
- chunk = loadfile(fullname) -- this way we don't need a file exists check
- if not chunk then
- if trace_locating then
- report_lua("unknown file %s", filename)
- end
- else
- assert(chunk)()
- return true
- end
+ chunk=loadfile(fullname)
+ if not chunk then
+ if trace_locating then
+ report_lua("unknown file %a",filename)
+ end
+ else
+ assert(chunk)()
+ return true
end
- return false
+ end
+ return false
end
@@ -6854,1370 +8016,981 @@ end -- of closure
do -- create closure to overcome 200 locals limit
-if not modules then modules = { } end modules ['lxml-tab'] = {
- version = 1.001,
- comment = "this module is the basis for the lxml-* ones",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
--- this module needs a cleanup: check latest lpeg, passing args, (sub)grammar, etc etc
--- stripping spaces from e.g. cont-en.xml saves .2 sec runtime so it's not worth the
--- trouble
-
--- todo: when serializing optionally remap named entities to hex (if known in char-ent.lua)
--- maybe when letter -> utf, else name .. then we need an option to the serializer .. a bit
--- of work so we delay this till we cleanup
-
-local trace_entities = false trackers.register("xml.entities", function(v) trace_entities = v end)
-
-local report_xml = logs and logs.reporter("xml","core") or function(...) print(format(...)) end
-
---[[ldx--
-<p>The parser used here is inspired by the variant discussed in the lua book, but
-handles comment and processing instructions, has a different structure, provides
-parent access; a first version used different trickery but was less optimized to we
-went this route. First we had a find based parser, now we have an <l n='lpeg'/> based one.
-The find based parser can be found in l-xml-edu.lua along with other older code.</p>
+package.loaded["lxml-tab"] = package.loaded["lxml-tab"] or true
-<p>Beware, the interface may change. For instance at, ns, tg, dt may get more
-verbose names. Once the code is stable we will also remove some tracing and
-optimize the code.</p>
-
-<p>I might even decide to reimplement the parser using the latest <l n='lpeg'/> trickery
-as the current variant was written when <l n='lpeg'/> showed up and it's easier now to
-build tables in one go.</p>
---ldx]]--
-
-xml = xml or { }
-local xml = xml
-
-
-local utf = unicode.utf8
-local concat, remove, insert = table.concat, table.remove, table.insert
-local type, next, setmetatable, getmetatable, tonumber = type, next, setmetatable, getmetatable, tonumber
-local format, lower, find, match, gsub = string.format, string.lower, string.find, string.match, string.gsub
-local utfchar, utffind, utfgsub = utf.char, utf.find, utf.gsub
-local lpegmatch = lpeg.match
-local P, S, R, C, V, C, Cs = lpeg.P, lpeg.S, lpeg.R, lpeg.C, lpeg.V, lpeg.C, lpeg.Cs
+-- original size: 42495, stripped down to: 26647
---[[ldx--
-<p>First a hack to enable namespace resolving. A namespace is characterized by
-a <l n='url'/>. The following function associates a namespace prefix with a
-pattern. We use <l n='lpeg'/>, which in this case is more than twice as fast as a
-find based solution where we loop over an array of patterns. Less code and
-much cleaner.</p>
---ldx]]--
-
-xml.xmlns = xml.xmlns or { }
-
-local check = P(false)
-local parse = check
-
---[[ldx--
-<p>The next function associates a namespace prefix with an <l n='url'/>. This
-normally happens independent of parsing.</p>
-
-<typing>
-xml.registerns("mml","mathml")
-</typing>
---ldx]]--
-
-function xml.registerns(namespace, pattern) -- pattern can be an lpeg
- check = check + C(P(lower(pattern))) / namespace
- parse = P { P(check) + 1 * V(1) }
+if not modules then modules={} end modules ['lxml-tab']={
+ version=1.001,
+ comment="this module is the basis for the lxml-* ones",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+local trace_entities=false trackers.register("xml.entities",function(v) trace_entities=v end)
+local report_xml=logs and logs.reporter("xml","core") or function(...) print(string.format(...)) end
+xml=xml or {}
+local xml=xml
+local concat,remove,insert=table.concat,table.remove,table.insert
+local type,next,setmetatable,getmetatable,tonumber=type,next,setmetatable,getmetatable,tonumber
+local lower,find,match,gsub=string.lower,string.find,string.match,string.gsub
+local utfchar=utf.char
+local lpegmatch=lpeg.match
+local P,S,R,C,V,C,Cs=lpeg.P,lpeg.S,lpeg.R,lpeg.C,lpeg.V,lpeg.C,lpeg.Cs
+local formatters=string.formatters
+xml.xmlns=xml.xmlns or {}
+local check=P(false)
+local parse=check
+function xml.registerns(namespace,pattern)
+ check=check+C(P(lower(pattern)))/namespace
+ parse=P { P(check)+1*V(1) }
end
-
---[[ldx--
-<p>The next function also registers a namespace, but this time we map a
-given namespace prefix onto a registered one, using the given
-<l n='url'/>. This used for attributes like <t>xmlns:m</t>.</p>
-
-<typing>
-xml.checkns("m","http://www.w3.org/mathml")
-</typing>
---ldx]]--
-
function xml.checkns(namespace,url)
- local ns = lpegmatch(parse,lower(url))
- if ns and namespace ~= ns then
- xml.xmlns[namespace] = ns
- end
+ local ns=lpegmatch(parse,lower(url))
+ if ns and namespace~=ns then
+ xml.xmlns[namespace]=ns
+ end
end
-
---[[ldx--
-<p>Next we provide a way to turn an <l n='url'/> into a registered
-namespace. This used for the <t>xmlns</t> attribute.</p>
-
-<typing>
-resolvedns = xml.resolvens("http://www.w3.org/mathml")
-</typing>
-
-This returns <t>mml</t>.
---ldx]]--
-
function xml.resolvens(url)
- return lpegmatch(parse,lower(url)) or ""
-end
-
---[[ldx--
-<p>A namespace in an element can be remapped onto the registered
-one efficiently by using the <t>xml.xmlns</t> table.</p>
---ldx]]--
-
---[[ldx--
-<p>This version uses <l n='lpeg'/>. We follow the same approach as before, stack and top and
-such. This version is about twice as fast which is mostly due to the fact that
-we don't have to prepare the stream for cdata, doctype etc etc. This variant is
-is dedicated to Luigi Scarso, who challenged me with 40 megabyte <l n='xml'/> files that
-took 12.5 seconds to load (1.5 for file io and the rest for tree building). With
-the <l n='lpeg'/> implementation we got that down to less 7.3 seconds. Loading the 14
-<l n='context'/> interface definition files (2.6 meg) went down from 1.05 seconds to 0.55.</p>
-
-<p>Next comes the parser. The rather messy doctype definition comes in many
-disguises so it is no surprice that later on have to dedicate quite some
-<l n='lpeg'/> code to it.</p>
-
-<typing>
-<!DOCTYPE Something PUBLIC "... ..." "..." [ ... ] >
-<!DOCTYPE Something PUBLIC "... ..." "..." >
-<!DOCTYPE Something SYSTEM "... ..." [ ... ] >
-<!DOCTYPE Something SYSTEM "... ..." >
-<!DOCTYPE Something [ ... ] >
-<!DOCTYPE Something >
-</typing>
-
-<p>The code may look a bit complex but this is mostly due to the fact that we
-resolve namespaces and attach metatables. There is only one public function:</p>
-
-<typing>
-local x = xml.convert(somestring)
-</typing>
-
-<p>An optional second boolean argument tells this function not to create a root
-element.</p>
-
-<p>Valid entities are:</p>
-
-<typing>
-<!ENTITY xxxx SYSTEM "yyyy" NDATA zzzz>
-<!ENTITY xxxx PUBLIC "yyyy" >
-<!ENTITY xxxx "yyyy" >
-</typing>
---ldx]]--
-
--- not just one big nested table capture (lpeg overflow)
-
-local nsremap, resolvens = xml.xmlns, xml.resolvens
-
-local stack = { }
-local top = { }
-local dt = { }
-local at = { }
-local xmlns = { }
-local errorstr = nil
-local entities = { }
-local strip = false
-local cleanup = false
-local utfize = false
-local resolve_predefined = false
-local unify_predefined = false
-
-local dcache = { }
-local hcache = { }
-local acache = { }
-
-local mt = { }
-
+ return lpegmatch(parse,lower(url)) or ""
+end
+local nsremap,resolvens=xml.xmlns,xml.resolvens
+local stack={}
+local top={}
+local dt={}
+local at={}
+local xmlns={}
+local errorstr=nil
+local entities={}
+local strip=false
+local cleanup=false
+local utfize=false
+local resolve_predefined=false
+local unify_predefined=false
+local dcache={}
+local hcache={}
+local acache={}
+local mt={}
local function initialize_mt(root)
- mt = { __index = root } -- will be redefined later
+ mt={ __index=root }
end
-
function xml.setproperty(root,k,v)
- getmetatable(root).__index[k] = v
+ getmetatable(root).__index[k]=v
end
-
function xml.checkerror(top,toclose)
- return "" -- can be set
+ return ""
end
-
local function add_attribute(namespace,tag,value)
- if cleanup and #value > 0 then
- value = cleanup(value) -- new
- end
- if tag == "xmlns" then
- xmlns[#xmlns+1] = resolvens(value)
- at[tag] = value
- elseif namespace == "" then
- at[tag] = value
- elseif namespace == "xmlns" then
- xml.checkns(tag,value)
- at["xmlns:" .. tag] = value
- else
- -- for the moment this way:
- at[namespace .. ":" .. tag] = value
- end
+ if cleanup and #value>0 then
+ value=cleanup(value)
+ end
+ if tag=="xmlns" then
+ xmlns[#xmlns+1]=resolvens(value)
+ at[tag]=value
+ elseif namespace=="" then
+ at[tag]=value
+ elseif namespace=="xmlns" then
+ xml.checkns(tag,value)
+ at["xmlns:"..tag]=value
+ else
+ at[namespace..":"..tag]=value
+ end
+end
+local function add_empty(spacing,namespace,tag)
+ if #spacing>0 then
+ dt[#dt+1]=spacing
+ end
+ local resolved=namespace=="" and xmlns[#xmlns] or nsremap[namespace] or namespace
+ top=stack[#stack]
+ dt=top.dt
+ local t={ ns=namespace or "",rn=resolved,tg=tag,at=at,dt={},__p__=top }
+ dt[#dt+1]=t
+ setmetatable(t,mt)
+ if at.xmlns then
+ remove(xmlns)
+ end
+ at={}
+end
+local function add_begin(spacing,namespace,tag)
+ if #spacing>0 then
+ dt[#dt+1]=spacing
+ end
+ local resolved=namespace=="" and xmlns[#xmlns] or nsremap[namespace] or namespace
+ top={ ns=namespace or "",rn=resolved,tg=tag,at=at,dt={},__p__=stack[#stack] }
+ setmetatable(top,mt)
+ dt=top.dt
+ stack[#stack+1]=top
+ at={}
+end
+local function add_end(spacing,namespace,tag)
+ if #spacing>0 then
+ dt[#dt+1]=spacing
+ end
+ local toclose=remove(stack)
+ top=stack[#stack]
+ if #stack<1 then
+ errorstr=formatters["unable to close %s %s"](tag,xml.checkerror(top,toclose) or "")
+ elseif toclose.tg~=tag then
+ errorstr=formatters["unable to close %s with %s %s"](toclose.tg,tag,xml.checkerror(top,toclose) or "")
+ end
+ dt=top.dt
+ dt[#dt+1]=toclose
+ if toclose.at.xmlns then
+ remove(xmlns)
+ end
end
-
-local function add_empty(spacing, namespace, tag)
- if #spacing > 0 then
- dt[#dt+1] = spacing
- end
- local resolved = (namespace == "" and xmlns[#xmlns]) or nsremap[namespace] or namespace
- top = stack[#stack]
- dt = top.dt
- local t = { ns=namespace or "", rn=resolved, tg=tag, at=at, dt={}, __p__ = top }
- dt[#dt+1] = t
- setmetatable(t, mt)
- if at.xmlns then
- remove(xmlns)
- end
- at = { }
-end
-
-local function add_begin(spacing, namespace, tag)
- if #spacing > 0 then
- dt[#dt+1] = spacing
- end
- local resolved = (namespace == "" and xmlns[#xmlns]) or nsremap[namespace] or namespace
- top = { ns=namespace or "", rn=resolved, tg=tag, at=at, dt={}, __p__ = stack[#stack] }
- setmetatable(top, mt)
- dt = top.dt
- stack[#stack+1] = top
- at = { }
-end
-
-local function add_end(spacing, namespace, tag)
- if #spacing > 0 then
- dt[#dt+1] = spacing
- end
- local toclose = remove(stack)
- top = stack[#stack]
- if #stack < 1 then
- errorstr = format("nothing to close with %s %s", tag, xml.checkerror(top,toclose) or "")
- elseif toclose.tg ~= tag then -- no namespace check
- errorstr = format("unable to close %s with %s %s", toclose.tg, tag, xml.checkerror(top,toclose) or "")
- end
- dt = top.dt
- dt[#dt+1] = toclose
- -- dt[0] = top -- nasty circular reference when serializing table
- if toclose.at.xmlns then
- remove(xmlns)
- end
-end
-
local function add_text(text)
- if cleanup and #text > 0 then
- dt[#dt+1] = cleanup(text)
- else
- dt[#dt+1] = text
- end
-end
-
-local function add_special(what, spacing, text)
- if #spacing > 0 then
- dt[#dt+1] = spacing
- end
- if strip and (what == "@cm@" or what == "@dt@") then
- -- forget it
- else
- dt[#dt+1] = { special=true, ns="", tg=what, dt={ text } }
- end
+ if cleanup and #text>0 then
+ dt[#dt+1]=cleanup(text)
+ else
+ dt[#dt+1]=text
+ end
+end
+local function add_special(what,spacing,text)
+ if #spacing>0 then
+ dt[#dt+1]=spacing
+ end
+ if strip and (what=="@cm@" or what=="@dt@") then
+ else
+ dt[#dt+1]={ special=true,ns="",tg=what,dt={ text } }
+ end
end
-
local function set_message(txt)
- errorstr = "garbage at the end of the file: " .. gsub(txt,"([ \n\r\t]*)","")
+ errorstr="garbage at the end of the file: "..gsub(txt,"([ \n\r\t]*)","")
end
-
-local reported_attribute_errors = { }
-
+local reported_attribute_errors={}
local function attribute_value_error(str)
- if not reported_attribute_errors[str] then
- report_xml("invalid attribute value: %q",str)
- reported_attribute_errors[str] = true
- at._error_ = str
- end
- return str
+ if not reported_attribute_errors[str] then
+ report_xml("invalid attribute value %a",str)
+ reported_attribute_errors[str]=true
+ at._error_=str
+ end
+ return str
end
-
local function attribute_specification_error(str)
- if not reported_attribute_errors[str] then
- report_xml("invalid attribute specification: %q",str)
- reported_attribute_errors[str] = true
- at._error_ = str
- end
- return str
-end
-
-xml.placeholders = {
- unknown_dec_entity = function(str) return (str == "" and "&error;") or format("&%s;",str) end,
- unknown_hex_entity = function(str) return format("&#x%s;",str) end,
- unknown_any_entity = function(str) return format("&#x%s;",str) end,
+ if not reported_attribute_errors[str] then
+ report_xml("invalid attribute specification %a",str)
+ reported_attribute_errors[str]=true
+ at._error_=str
+ end
+ return str
+end
+xml.placeholders={
+ unknown_dec_entity=function(str) return str=="" and "&error;" or formatters["&%s;"](str) end,
+ unknown_hex_entity=function(str) return formatters["&#x%s;"](str) end,
+ unknown_any_entity=function(str) return formatters["&#x%s;"](str) end,
}
-
-local placeholders = xml.placeholders
-
+local placeholders=xml.placeholders
local function fromhex(s)
- local n = tonumber(s,16)
- if n then
- return utfchar(n)
- else
- return format("h:%s",s), true
- end
+ local n=tonumber(s,16)
+ if n then
+ return utfchar(n)
+ else
+ return formatters["h:%s"](s),true
+ end
end
-
local function fromdec(s)
- local n = tonumber(s)
- if n then
- return utfchar(n)
- else
- return format("d:%s",s), true
- end
-end
-
--- one level expansion (simple case), no checking done
-
-local rest = (1-P(";"))^0
-local many = P(1)^0
-
-local parsedentity =
- P("&") * (P("#x")*(rest/fromhex) + P("#")*(rest/fromdec)) * P(";") * P(-1) +
- (P("#x")*(many/fromhex) + P("#")*(many/fromdec))
-
--- parsing in the xml file
-
-local predefined_unified = {
- [38] = "&amp;",
- [42] = "&quot;",
- [47] = "&apos;",
- [74] = "&lt;",
- [76] = "&gt;",
+ local n=tonumber(s)
+ if n then
+ return utfchar(n)
+ else
+ return formatters["d:%s"](s),true
+ end
+end
+local rest=(1-P(";"))^0
+local many=P(1)^0
+local parsedentity=P("&")*(P("#x")*(rest/fromhex)+P("#")*(rest/fromdec))*P(";")*P(-1)+(P("#x")*(many/fromhex)+P("#")*(many/fromdec))
+local predefined_unified={
+ [38]="&amp;",
+ [42]="&quot;",
+ [47]="&apos;",
+ [74]="&lt;",
+ [76]="&gt;",
}
-
-local predefined_simplified = {
- [38] = "&", amp = "&",
- [42] = '"', quot = '"',
- [47] = "'", apos = "'",
- [74] = "<", lt = "<",
- [76] = ">", gt = ">",
-}
-
-local nofprivates = 0xF0000 -- shared but seldom used
-
-local privates_u = { -- unescaped
- [ [[&]] ] = "&amp;",
- [ [["]] ] = "&quot;",
- [ [[']] ] = "&apos;",
- [ [[<]] ] = "&lt;",
- [ [[>]] ] = "&gt;",
+local predefined_simplified={
+ [38]="&",amp="&",
+ [42]='"',quot='"',
+ [47]="'",apos="'",
+ [74]="<",lt="<",
+ [76]=">",gt=">",
}
-
-local privates_p = {
+local nofprivates=0xF0000
+local privates_u={
+ [ [[&]] ]="&amp;",
+ [ [["]] ]="&quot;",
+ [ [[']] ]="&apos;",
+ [ [[<]] ]="&lt;",
+ [ [[>]] ]="&gt;",
}
-
-local privates_n = {
- -- keeps track of defined ones
+local privates_p={}
+local privates_n={
}
-
-local function escaped(s)
- if s == "" then
- return ""
- else -- if utffind(s,privates_u) then
- return (utfgsub(s,".",privates_u))
- -- else
- -- return s
- end
-end
-
+local escaped=utf.remapper(privates_u)
local function unescaped(s)
- local p = privates_n[s]
- if not p then
- nofprivates = nofprivates + 1
- p = utfchar(nofprivates)
- privates_n[s] = p
- s = "&" .. s .. ";" -- todo: use char-ent to map to hex
- privates_u[p] = s
- privates_p[p] = s
+ local p=privates_n[s]
+ if not p then
+ nofprivates=nofprivates+1
+ p=utfchar(nofprivates)
+ privates_n[s]=p
+ s="&"..s..";"
+ privates_u[p]=s
+ privates_p[p]=s
+ end
+ return p
+end
+local unprivatized=utf.remapper(privates_p)
+xml.privatetoken=unescaped
+xml.unprivatized=unprivatized
+xml.privatecodes=privates_n
+local function handle_hex_entity(str)
+ local h=hcache[str]
+ if not h then
+ local n=tonumber(str,16)
+ h=unify_predefined and predefined_unified[n]
+ if h then
+ if trace_entities then
+ report_xml("utfize, converting hex entity &#x%s; into %a",str,h)
+ end
+ elseif utfize then
+ h=(n and utfchar(n)) or xml.unknown_hex_entity(str) or ""
+ if not n then
+ report_xml("utfize, ignoring hex entity &#x%s;",str)
+ elseif trace_entities then
+ report_xml("utfize, converting hex entity &#x%s; into %a",str,h)
+ end
+ else
+ if trace_entities then
+ report_xml("found entity &#x%s;",str)
+ end
+ h="&#x"..str..";"
end
- return p
+ hcache[str]=h
+ end
+ return h
end
-
-local function unprivatized(s,resolve)
- if s == "" then
- return ""
+local function handle_dec_entity(str)
+ local d=dcache[str]
+ if not d then
+ local n=tonumber(str)
+ d=unify_predefined and predefined_unified[n]
+ if d then
+ if trace_entities then
+ report_xml("utfize, converting dec entity &#%s; into %a",str,d)
+ end
+ elseif utfize then
+ d=(n and utfchar(n)) or placeholders.unknown_dec_entity(str) or ""
+ if not n then
+ report_xml("utfize, ignoring dec entity &#%s;",str)
+ elseif trace_entities then
+ report_xml("utfize, converting dec entity &#%s; into %a",str,d)
+ end
else
- return (utfgsub(s,".",privates_p))
+ if trace_entities then
+ report_xml("found entity &#%s;",str)
+ end
+ d="&#"..str..";"
end
+ dcache[str]=d
+ end
+ return d
end
-
-xml.privatetoken = unescaped
-xml.unprivatized = unprivatized
-xml.privatecodes = privates_n
-
-local function handle_hex_entity(str)
- local h = hcache[str]
- if not h then
- local n = tonumber(str,16)
- h = unify_predefined and predefined_unified[n]
- if h then
- if trace_entities then
- report_xml("utfize, converting hex entity &#x%s; into %s",str,h)
- end
- elseif utfize then
- h = (n and utfchar(n)) or xml.unknown_hex_entity(str) or ""
- if not n then
- report_xml("utfize, ignoring hex entity &#x%s;",str)
- elseif trace_entities then
- report_xml("utfize, converting hex entity &#x%s; into %s",str,h)
- end
+xml.parsedentitylpeg=parsedentity
+local function handle_any_entity(str)
+ if resolve then
+ local a=acache[str]
+ if not a then
+ a=resolve_predefined and predefined_simplified[str]
+ if a then
+ if trace_entities then
+ report_xml("resolving entity &%s; to predefined %a",str,a)
+ end
+ else
+ if type(resolve)=="function" then
+ a=resolve(str) or entities[str]
else
- if trace_entities then
- report_xml("found entity &#x%s;",str)
- end
- h = "&#x" .. str .. ";"
+ a=entities[str]
end
- hcache[str] = h
- end
- return h
-end
-
-local function handle_dec_entity(str)
- local d = dcache[str]
- if not d then
- local n = tonumber(str)
- d = unify_predefined and predefined_unified[n]
- if d then
+ if a then
+ if type(a)=="function" then
if trace_entities then
- report_xml("utfize, converting dec entity &#%s; into %s",str,d)
- end
- elseif utfize then
- d = (n and utfchar(n)) or placeholders.unknown_dec_entity(str) or ""
- if not n then
- report_xml("utfize, ignoring dec entity &#%s;",str)
- elseif trace_entities then
- report_xml("utfize, converting dec entity &#%s; into %s",str,d)
- end
+ report_xml("expanding entity &%s; to function call",str)
+ end
+ a=a(str) or ""
+ end
+ a=lpegmatch(parsedentity,a) or a
+ if trace_entities then
+ report_xml("resolving entity &%s; to internal %a",str,a)
+ end
else
+ local unknown_any_entity=placeholders.unknown_any_entity
+ if unknown_any_entity then
+ a=unknown_any_entity(str) or ""
+ end
+ if a then
if trace_entities then
- report_xml("found entity &#%s;",str)
+ report_xml("resolving entity &%s; to external %s",str,a)
end
- d = "&#" .. str .. ";"
- end
- dcache[str] = d
- end
- return d
-end
-
-xml.parsedentitylpeg = parsedentity
-
-local function handle_any_entity(str)
- if resolve then
- local a = acache[str] -- per instance ! todo
- if not a then
- a = resolve_predefined and predefined_simplified[str]
- if a then
- if trace_entities then
- report_xml("resolved entity &%s; -> %s (predefined)",str,a)
- end
- else
- if type(resolve) == "function" then
- a = resolve(str) or entities[str]
- else
- a = entities[str]
- end
- if a then
- if type(a) == "function" then
- if trace_entities then
- report_xml("expanding entity &%s; (function)",str)
- end
- a = a(str) or ""
- end
- a = lpegmatch(parsedentity,a) or a -- for nested
- if trace_entities then
- report_xml("resolved entity &%s; -> %s (internal)",str,a)
- end
- else
- local unknown_any_entity = placeholders.unknown_any_entity
- if unknown_any_entity then
- a = unknown_any_entity(str) or ""
- end
- if a then
- if trace_entities then
- report_xml("resolved entity &%s; -> %s (external)",str,a)
- end
- else
- if trace_entities then
- report_xml("keeping entity &%s;",str)
- end
- if str == "" then
- a = "&error;"
- else
- a = "&" .. str .. ";"
- end
- end
- end
- end
- acache[str] = a
- elseif trace_entities then
- if not acache[str] then
- report_xml("converting entity &%s; into %s",str,a)
- acache[str] = a
+ else
+ if trace_entities then
+ report_xml("keeping entity &%s;",str)
end
- end
- return a
- else
- local a = acache[str]
- if not a then
- a = resolve_predefined and predefined_simplified[str]
- if a then
- -- one of the predefined
- acache[str] = a
- if trace_entities then
- report_xml("entity &%s; becomes %s",str,tostring(a))
- end
- elseif str == "" then
- if trace_entities then
- report_xml("invalid entity &%s;",str)
- end
- a = "&error;"
- acache[str] = a
+ if str=="" then
+ a="&error;"
else
- if trace_entities then
- report_xml("entity &%s; is made private",str)
- end
- -- a = "&" .. str .. ";"
- a = unescaped(str)
- acache[str] = a
- end
- end
- return a
- end
+ a="&"..str..";"
+ end
+ end
+ end
+ end
+ acache[str]=a
+ elseif trace_entities then
+ if not acache[str] then
+ report_xml("converting entity &%s; to %a",str,a)
+ acache[str]=a
+ end
+ end
+ return a
+ else
+ local a=acache[str]
+ if not a then
+ a=resolve_predefined and predefined_simplified[str]
+ if a then
+ acache[str]=a
+ if trace_entities then
+ report_xml("entity &%s; becomes %a",str,a)
+ end
+ elseif str=="" then
+ if trace_entities then
+ report_xml("invalid entity &%s;",str)
+ end
+ a="&error;"
+ acache[str]=a
+ else
+ if trace_entities then
+ report_xml("entity &%s; is made private",str)
+ end
+ a=unescaped(str)
+ acache[str]=a
+ end
+ end
+ return a
+ end
end
-
local function handle_end_entity(chr)
- report_xml("error in entity, %q found instead of ';'",chr)
-end
-
-local space = S(' \r\n\t')
-local open = P('<')
-local close = P('>')
-local squote = S("'")
-local dquote = S('"')
-local equal = P('=')
-local slash = P('/')
-local colon = P(':')
-local semicolon = P(';')
-local ampersand = P('&')
-local valid = R('az', 'AZ', '09') + S('_-.')
-local name_yes = C(valid^1) * colon * C(valid^1)
-local name_nop = C(P(true)) * C(valid^1)
-local name = name_yes + name_nop
-local utfbom = lpeg.patterns.utfbom -- no capture
-local spacing = C(space^0)
-
------ entitycontent = (1-open-semicolon)^0
-local anyentitycontent = (1-open-semicolon-space-close)^0
-local hexentitycontent = R("AF","af","09")^0
-local decentitycontent = R("09")^0
-local parsedentity = P("#")/"" * (
- P("x")/"" * (hexentitycontent/handle_hex_entity) +
- (decentitycontent/handle_dec_entity)
- ) + (anyentitycontent/handle_any_entity)
-local entity = ampersand/"" * parsedentity * ( (semicolon/"") + #(P(1)/handle_end_entity))
-
-local text_unparsed = C((1-open)^1)
-local text_parsed = Cs(((1-open-ampersand)^1 + entity)^1)
-
-local somespace = space^1
-local optionalspace = space^0
-
------ value = (squote * C((1 - squote)^0) * squote) + (dquote * C((1 - dquote)^0) * dquote) -- ampersand and < also invalid in value
-local value = (squote * Cs((entity + (1 - squote))^0) * squote) + (dquote * Cs((entity + (1 - dquote))^0) * dquote) -- ampersand and < also invalid in value
-
-local endofattributes = slash * close + close -- recovery of flacky html
-local whatever = space * name * optionalspace * equal
------ wrongvalue = C(P(1-whatever-close)^1 + P(1-close)^1) / attribute_value_error
------ wrongvalue = C(P(1-whatever-endofattributes)^1 + P(1-endofattributes)^1) / attribute_value_error
------ wrongvalue = C(P(1-space-endofattributes)^1) / attribute_value_error
-local wrongvalue = Cs(P(entity + (1-space-endofattributes))^1) / attribute_value_error
-
-local attributevalue = value + wrongvalue
-
-local attribute = (somespace * name * optionalspace * equal * optionalspace * attributevalue) / add_attribute
------ attributes = (attribute)^0
-
-local attributes = (attribute + somespace^-1 * (((1-endofattributes)^1)/attribute_specification_error))^0
-
-local parsedtext = text_parsed / add_text
-local unparsedtext = text_unparsed / add_text
-local balanced = P { "[" * ((1 - S"[]") + V(1))^0 * "]" } -- taken from lpeg manual, () example
-
-local emptyelement = (spacing * open * name * attributes * optionalspace * slash * close) / add_empty
-local beginelement = (spacing * open * name * attributes * optionalspace * close) / add_begin
-local endelement = (spacing * open * slash * name * optionalspace * close) / add_end
-
-local begincomment = open * P("!--")
-local endcomment = P("--") * close
-local begininstruction = open * P("?")
-local endinstruction = P("?") * close
-local begincdata = open * P("![CDATA[")
-local endcdata = P("]]") * close
-
-local someinstruction = C((1 - endinstruction)^0)
-local somecomment = C((1 - endcomment )^0)
-local somecdata = C((1 - endcdata )^0)
-
-local function normalentity(k,v ) entities[k] = v end
-local function systementity(k,v,n) entities[k] = v end
-local function publicentity(k,v,n) entities[k] = v end
-
--- todo: separate dtd parser
-
-local begindoctype = open * P("!DOCTYPE")
-local enddoctype = close
-local beginset = P("[")
-local endset = P("]")
-local doctypename = C((1-somespace-close)^0)
-local elementdoctype = optionalspace * P("<!ELEMENT") * (1-close)^0 * close
-
-local basiccomment = begincomment * ((1 - endcomment)^0) * endcomment
-
-local normalentitytype = (doctypename * somespace * value)/normalentity
-local publicentitytype = (doctypename * somespace * P("PUBLIC") * somespace * value)/publicentity
-local systementitytype = (doctypename * somespace * P("SYSTEM") * somespace * value * somespace * P("NDATA") * somespace * doctypename)/systementity
-local entitydoctype = optionalspace * P("<!ENTITY") * somespace * (systementitytype + publicentitytype + normalentitytype) * optionalspace * close
-
--- we accept comments in doctypes
-
-local doctypeset = beginset * optionalspace * P(elementdoctype + entitydoctype + basiccomment + space)^0 * optionalspace * endset
-local definitiondoctype= doctypename * somespace * doctypeset
-local publicdoctype = doctypename * somespace * P("PUBLIC") * somespace * value * somespace * value * somespace * doctypeset
-local systemdoctype = doctypename * somespace * P("SYSTEM") * somespace * value * somespace * doctypeset
-local simpledoctype = (1-close)^1 -- * balanced^0
-local somedoctype = C((somespace * (publicdoctype + systemdoctype + definitiondoctype + simpledoctype) * optionalspace)^0)
-local somedoctype = C((somespace * (publicdoctype + systemdoctype + definitiondoctype + simpledoctype) * optionalspace)^0)
-
-local instruction = (spacing * begininstruction * someinstruction * endinstruction) / function(...) add_special("@pi@",...) end
-local comment = (spacing * begincomment * somecomment * endcomment ) / function(...) add_special("@cm@",...) end
-local cdata = (spacing * begincdata * somecdata * endcdata ) / function(...) add_special("@cd@",...) end
-local doctype = (spacing * begindoctype * somedoctype * enddoctype ) / function(...) add_special("@dt@",...) end
-
--- nicer but slower:
---
--- local instruction = (Cc("@pi@") * spacing * begininstruction * someinstruction * endinstruction) / add_special
--- local comment = (Cc("@cm@") * spacing * begincomment * somecomment * endcomment ) / add_special
--- local cdata = (Cc("@cd@") * spacing * begincdata * somecdata * endcdata ) / add_special
--- local doctype = (Cc("@dt@") * spacing * begindoctype * somedoctype * enddoctype ) / add_special
-
-local trailer = space^0 * (text_unparsed/set_message)^0
-
--- comment + emptyelement + text + cdata + instruction + V("parent"), -- 6.5 seconds on 40 MB database file
--- text + comment + emptyelement + cdata + instruction + V("parent"), -- 5.8
--- text + V("parent") + emptyelement + comment + cdata + instruction, -- 5.5
-
-local grammar_parsed_text = P { "preamble",
- preamble = utfbom^0 * instruction^0 * (doctype + comment + instruction)^0 * V("parent") * trailer,
- parent = beginelement * V("children")^0 * endelement,
- children = parsedtext + V("parent") + emptyelement + comment + cdata + instruction,
+ report_xml("error in entity, %a found instead of %a",chr,";")
+end
+local space=S(' \r\n\t')
+local open=P('<')
+local close=P('>')
+local squote=S("'")
+local dquote=S('"')
+local equal=P('=')
+local slash=P('/')
+local colon=P(':')
+local semicolon=P(';')
+local ampersand=P('&')
+local valid=R('az','AZ','09')+S('_-.')
+local name_yes=C(valid^1)*colon*C(valid^1)
+local name_nop=C(P(true))*C(valid^1)
+local name=name_yes+name_nop
+local utfbom=lpeg.patterns.utfbom
+local spacing=C(space^0)
+local anyentitycontent=(1-open-semicolon-space-close)^0
+local hexentitycontent=R("AF","af","09")^0
+local decentitycontent=R("09")^0
+local parsedentity=P("#")/""*(
+ P("x")/""*(hexentitycontent/handle_hex_entity)+(decentitycontent/handle_dec_entity)
+ )+(anyentitycontent/handle_any_entity)
+local entity=ampersand/""*parsedentity*((semicolon/"")+#(P(1)/handle_end_entity))
+local text_unparsed=C((1-open)^1)
+local text_parsed=Cs(((1-open-ampersand)^1+entity)^1)
+local somespace=space^1
+local optionalspace=space^0
+local value=(squote*Cs((entity+(1-squote))^0)*squote)+(dquote*Cs((entity+(1-dquote))^0)*dquote)
+local endofattributes=slash*close+close
+local whatever=space*name*optionalspace*equal
+local wrongvalue=Cs(P(entity+(1-space-endofattributes))^1)/attribute_value_error
+local attributevalue=value+wrongvalue
+local attribute=(somespace*name*optionalspace*equal*optionalspace*attributevalue)/add_attribute
+local attributes=(attribute+somespace^-1*(((1-endofattributes)^1)/attribute_specification_error))^0
+local parsedtext=text_parsed/add_text
+local unparsedtext=text_unparsed/add_text
+local balanced=P { "["*((1-S"[]")+V(1))^0*"]" }
+local emptyelement=(spacing*open*name*attributes*optionalspace*slash*close)/add_empty
+local beginelement=(spacing*open*name*attributes*optionalspace*close)/add_begin
+local endelement=(spacing*open*slash*name*optionalspace*close)/add_end
+local begincomment=open*P("!--")
+local endcomment=P("--")*close
+local begininstruction=open*P("?")
+local endinstruction=P("?")*close
+local begincdata=open*P("![CDATA[")
+local endcdata=P("]]")*close
+local someinstruction=C((1-endinstruction)^0)
+local somecomment=C((1-endcomment )^0)
+local somecdata=C((1-endcdata )^0)
+local function normalentity(k,v ) entities[k]=v end
+local function systementity(k,v,n) entities[k]=v end
+local function publicentity(k,v,n) entities[k]=v end
+local begindoctype=open*P("!DOCTYPE")
+local enddoctype=close
+local beginset=P("[")
+local endset=P("]")
+local doctypename=C((1-somespace-close)^0)
+local elementdoctype=optionalspace*P("<!ELEMENT")*(1-close)^0*close
+local basiccomment=begincomment*((1-endcomment)^0)*endcomment
+local normalentitytype=(doctypename*somespace*value)/normalentity
+local publicentitytype=(doctypename*somespace*P("PUBLIC")*somespace*value)/publicentity
+local systementitytype=(doctypename*somespace*P("SYSTEM")*somespace*value*somespace*P("NDATA")*somespace*doctypename)/systementity
+local entitydoctype=optionalspace*P("<!ENTITY")*somespace*(systementitytype+publicentitytype+normalentitytype)*optionalspace*close
+local doctypeset=beginset*optionalspace*P(elementdoctype+entitydoctype+basiccomment+space)^0*optionalspace*endset
+local definitiondoctype=doctypename*somespace*doctypeset
+local publicdoctype=doctypename*somespace*P("PUBLIC")*somespace*value*somespace*value*somespace*doctypeset
+local systemdoctype=doctypename*somespace*P("SYSTEM")*somespace*value*somespace*doctypeset
+local simpledoctype=(1-close)^1
+local somedoctype=C((somespace*(publicdoctype+systemdoctype+definitiondoctype+simpledoctype)*optionalspace)^0)
+local somedoctype=C((somespace*(publicdoctype+systemdoctype+definitiondoctype+simpledoctype)*optionalspace)^0)
+local instruction=(spacing*begininstruction*someinstruction*endinstruction)/function(...) add_special("@pi@",...) end
+local comment=(spacing*begincomment*somecomment*endcomment )/function(...) add_special("@cm@",...) end
+local cdata=(spacing*begincdata*somecdata*endcdata )/function(...) add_special("@cd@",...) end
+local doctype=(spacing*begindoctype*somedoctype*enddoctype )/function(...) add_special("@dt@",...) end
+local trailer=space^0*(text_unparsed/set_message)^0
+local grammar_parsed_text=P { "preamble",
+ preamble=utfbom^0*instruction^0*(doctype+comment+instruction)^0*V("parent")*trailer,
+ parent=beginelement*V("children")^0*endelement,
+ children=parsedtext+V("parent")+emptyelement+comment+cdata+instruction,
}
-
-local grammar_unparsed_text = P { "preamble",
- preamble = utfbom^0 * instruction^0 * (doctype + comment + instruction)^0 * V("parent") * trailer,
- parent = beginelement * V("children")^0 * endelement,
- children = unparsedtext + V("parent") + emptyelement + comment + cdata + instruction,
+local grammar_unparsed_text=P { "preamble",
+ preamble=utfbom^0*instruction^0*(doctype+comment+instruction)^0*V("parent")*trailer,
+ parent=beginelement*V("children")^0*endelement,
+ children=unparsedtext+V("parent")+emptyelement+comment+cdata+instruction,
}
-
--- maybe we will add settings to result as well
-
-local function _xmlconvert_(data, settings)
- settings = settings or { } -- no_root strip_cm_and_dt given_entities parent_root error_handler
- --
- strip = settings.strip_cm_and_dt
- utfize = settings.utfize_entities
- resolve = settings.resolve_entities
- resolve_predefined = settings.resolve_predefined_entities -- in case we have escaped entities
- unify_predefined = settings.unify_predefined_entities -- &#038; -> &amp;
- cleanup = settings.text_cleanup
- entities = settings.entities or { }
- --
- if utfize == nil then
- settings.utfize_entities = true
- utfize = true
- end
- if resolve_predefined == nil then
- settings.resolve_predefined_entities = true
- resolve_predefined = true
- end
- --
- --
- stack, top, at, xmlns, errorstr = { }, { }, { }, { }, nil
- acache, hcache, dcache = { }, { }, { } -- not stored
- reported_attribute_errors = { }
- if settings.parent_root then
- mt = getmetatable(settings.parent_root)
+local function _xmlconvert_(data,settings)
+ settings=settings or {}
+ strip=settings.strip_cm_and_dt
+ utfize=settings.utfize_entities
+ resolve=settings.resolve_entities
+ resolve_predefined=settings.resolve_predefined_entities
+ unify_predefined=settings.unify_predefined_entities
+ cleanup=settings.text_cleanup
+ entities=settings.entities or {}
+ if utfize==nil then
+ settings.utfize_entities=true
+ utfize=true
+ end
+ if resolve_predefined==nil then
+ settings.resolve_predefined_entities=true
+ resolve_predefined=true
+ end
+ stack,top,at,xmlns,errorstr={},{},{},{},nil
+ acache,hcache,dcache={},{},{}
+ reported_attribute_errors={}
+ if settings.parent_root then
+ mt=getmetatable(settings.parent_root)
+ else
+ initialize_mt(top)
+ end
+ stack[#stack+1]=top
+ top.dt={}
+ dt=top.dt
+ if not data or data=="" then
+ errorstr="empty xml file"
+ elseif utfize or resolve then
+ if lpegmatch(grammar_parsed_text,data) then
+ errorstr=""
else
- initialize_mt(top)
- end
- stack[#stack+1] = top
- top.dt = { }
- dt = top.dt
- if not data or data == "" then
- errorstr = "empty xml file"
- elseif utfize or resolve then
- if lpegmatch(grammar_parsed_text,data) then
- errorstr = ""
- else
- errorstr = "invalid xml file - parsed text"
- end
- elseif type(data) == "string" then
- if lpegmatch(grammar_unparsed_text,data) then
- errorstr = ""
- else
- errorstr = "invalid xml file - unparsed text"
- end
+ errorstr="invalid xml file - parsed text"
+ end
+ elseif type(data)=="string" then
+ if lpegmatch(grammar_unparsed_text,data) then
+ errorstr=""
else
- errorstr = "invalid xml file - no text at all"
- end
- local result
- if errorstr and errorstr ~= "" then
- result = { dt = { { ns = "", tg = "error", dt = { errorstr }, at={ }, er = true } } }
- setmetatable(stack, mt)
- local errorhandler = settings.error_handler
- if errorhandler == false then
- -- no error message
- else
- errorhandler = errorhandler or xml.errorhandler
- if errorhandler then
- xml.errorhandler(format("load error: %s",errorstr))
- end
- end
+ errorstr="invalid xml file - unparsed text"
+ end
+ else
+ errorstr="invalid xml file - no text at all"
+ end
+ local result
+ if errorstr and errorstr~="" then
+ result={ dt={ { ns="",tg="error",dt={ errorstr },at={},er=true } } }
+ setmetatable(stack,mt)
+ local errorhandler=settings.error_handler
+ if errorhandler==false then
else
- result = stack[1]
- end
- if not settings.no_root then
- result = { special = true, ns = "", tg = '@rt@', dt = result.dt, at={ }, entities = entities, settings = settings }
- setmetatable(result, mt)
- local rdt = result.dt
- for k=1,#rdt do
- local v = rdt[k]
- if type(v) == "table" and not v.special then -- always table -)
- result.ri = k -- rootindex
- v.__p__ = result -- new, experiment, else we cannot go back to settings, we need to test this !
- break
- end
- end
- end
- if errorstr and errorstr ~= "" then
- result.error = true
- end
- result.statistics = {
- entities = {
- decimals = dcache,
- hexadecimals = hcache,
- names = acache,
- }
+ errorhandler=errorhandler or xml.errorhandler
+ if errorhandler then
+ local currentresource=settings.currentresource
+ if currentresource and currentresource~="" then
+ xml.errorhandler(formatters["load error in [%s]: %s"](currentresource,errorstr))
+ else
+ xml.errorhandler(formatters["load error: %s"](errorstr))
+ end
+ end
+ end
+ else
+ result=stack[1]
+ end
+ if not settings.no_root then
+ result={ special=true,ns="",tg='@rt@',dt=result.dt,at={},entities=entities,settings=settings }
+ setmetatable(result,mt)
+ local rdt=result.dt
+ for k=1,#rdt do
+ local v=rdt[k]
+ if type(v)=="table" and not v.special then
+ result.ri=k
+ v.__p__=result
+ break
+ end
+ end
+ end
+ if errorstr and errorstr~="" then
+ result.error=true
+ end
+ result.statistics={
+ entities={
+ decimals=dcache,
+ hexadecimals=hcache,
+ names=acache,
}
- strip, utfize, resolve, resolve_predefined = nil, nil, nil, nil
- unify_predefined, cleanup, entities = nil, nil, nil
- stack, top, at, xmlns, errorstr = nil, nil, nil, nil, nil
- acache, hcache, dcache = nil, nil, nil
- reported_attribute_errors, mt, errorhandler = nil, nil, nil
- return result
+ }
+ strip,utfize,resolve,resolve_predefined=nil,nil,nil,nil
+ unify_predefined,cleanup,entities=nil,nil,nil
+ stack,top,at,xmlns,errorstr=nil,nil,nil,nil,nil
+ acache,hcache,dcache=nil,nil,nil
+ reported_attribute_errors,mt,errorhandler=nil,nil,nil
+ return result
end
-
--- Because we can have a crash (stack issues) with faulty xml, we wrap this one
--- in a protector:
-
function xmlconvert(data,settings)
- local ok, result = pcall(function() return _xmlconvert_(data,settings) end)
- if ok then
- return result
- else
- return _xmlconvert_("")
- end
-end
-
-xml.convert = xmlconvert
-
-function xml.inheritedconvert(data,xmldata) -- xmldata is parent
- local settings = xmldata.settings
- if settings then
- settings.parent_root = xmldata -- to be tested
- end
- -- settings.no_root = true
- local xc = xmlconvert(data,settings) -- hm, we might need to locate settings
- -- xc.settings = nil
- -- xc.entities = nil
- -- xc.special = nil
- -- xc.ri = nil
- -- print(xc.tg)
- return xc
+ local ok,result=pcall(function() return _xmlconvert_(data,settings) end)
+ if ok then
+ return result
+ else
+ return _xmlconvert_("",settings)
+ end
+end
+xml.convert=xmlconvert
+function xml.inheritedconvert(data,xmldata)
+ local settings=xmldata.settings
+ if settings then
+ settings.parent_root=xmldata
+ end
+ local xc=xmlconvert(data,settings)
+ return xc
end
-
---[[ldx--
-<p>Packaging data in an xml like table is done with the following
-function. Maybe it will go away (when not used).</p>
---ldx]]--
-
function xml.is_valid(root)
- return root and root.dt and root.dt[1] and type(root.dt[1]) == "table" and not root.dt[1].er
+ return root and root.dt and root.dt[1] and type(root.dt[1])=="table" and not root.dt[1].er
end
-
function xml.package(tag,attributes,data)
- local ns, tg = match(tag,"^(.-):?([^:]+)$")
- local t = { ns = ns, tg = tg, dt = data or "", at = attributes or {} }
- setmetatable(t, mt)
- return t
+ local ns,tg=match(tag,"^(.-):?([^:]+)$")
+ local t={ ns=ns,tg=tg,dt=data or "",at=attributes or {} }
+ setmetatable(t,mt)
+ return t
end
-
function xml.is_valid(root)
- return root and not root.error
+ return root and not root.error
end
-
-xml.errorhandler = report_xml
-
---[[ldx--
-<p>We cannot load an <l n='lpeg'/> from a filehandle so we need to load
-the whole file first. The function accepts a string representing
-a filename or a file handle.</p>
---ldx]]--
-
+xml.errorhandler=report_xml
function xml.load(filename,settings)
- local data = ""
- if type(filename) == "string" then
- -- local data = io.loaddata(filename) - -todo: check type in io.loaddata
- local f = io.open(filename,'r')
- if f then
- data = f:read("*all")
- f:close()
- end
- elseif filename then -- filehandle
- data = filename:read("*all")
- end
- return xmlconvert(data,settings)
+ local data=""
+ if type(filename)=="string" then
+ local f=io.open(filename,'r')
+ if f then
+ data=f:read("*all")
+ f:close()
+ end
+ elseif filename then
+ data=filename:read("*all")
+ end
+ if settings then
+ settings.currentresource=filename
+ local result=xmlconvert(data,settings)
+ settings.currentresource=nil
+ return result
+ else
+ return xmlconvert(data,{ currentresource=filename })
+ end
end
-
---[[ldx--
-<p>When we inject new elements, we need to convert strings to
-valid trees, which is what the next function does.</p>
---ldx]]--
-
-local no_root = { no_root = true }
-
+local no_root={ no_root=true }
function xml.toxml(data)
- if type(data) == "string" then
- local root = { xmlconvert(data,no_root) }
- return (#root > 1 and root) or root[1]
- else
- return data
- end
+ if type(data)=="string" then
+ local root={ xmlconvert(data,no_root) }
+ return (#root>1 and root) or root[1]
+ else
+ return data
+ end
end
-
---[[ldx--
-<p>For copying a tree we use a dedicated function instead of the
-generic table copier. Since we know what we're dealing with we
-can speed up things a bit. The second argument is not to be used!</p>
---ldx]]--
-
local function copy(old,tables)
- if old then
- tables = tables or { }
- local new = { }
- if not tables[old] then
- tables[old] = new
- end
- for k,v in next, old do
- new[k] = (type(v) == "table" and (tables[v] or copy(v, tables))) or v
- end
- local mt = getmetatable(old)
- if mt then
- setmetatable(new,mt)
- end
- return new
- else
- return { }
+ if old then
+ tables=tables or {}
+ local new={}
+ if not tables[old] then
+ tables[old]=new
end
-end
-
-xml.copy = copy
-
---[[ldx--
-<p>In <l n='context'/> serializing the tree or parts of the tree is a major
-actitivity which is why the following function is pretty optimized resulting
-in a few more lines of code than needed. The variant that uses the formatting
-function for all components is about 15% slower than the concatinating
-alternative.</p>
---ldx]]--
-
--- todo: add <?xml version='1.0' standalone='yes'?> when not present
-
-function xml.checkbom(root) -- can be made faster
- if root.ri then
- local dt = root.dt
- for k=1,#dt do
- local v = dt[k]
- if type(v) == "table" and v.special and v.tg == "@pi@" and find(v.dt[1],"xml.*version=") then
- return
- end
- end
- insert(dt, 1, { special=true, ns="", tg="@pi@", dt = { "xml version='1.0' standalone='yes'"} } )
- insert(dt, 2, "\n" )
+ for k,v in next,old do
+ new[k]=(type(v)=="table" and (tables[v] or copy(v,tables))) or v
end
-end
-
---[[ldx--
-<p>At the cost of some 25% runtime overhead you can first convert the tree to a string
-and then handle the lot.</p>
---ldx]]--
-
--- new experimental reorganized serialize
-
-local function verbose_element(e,handlers) -- options
- local handle = handlers.handle
- local serialize = handlers.serialize
- local ens, etg, eat, edt, ern = e.ns, e.tg, e.at, e.dt, e.rn
- local ats = eat and next(eat) and { }
- if ats then
- for k,v in next, eat do
- ats[#ats+1] = format('%s=%q',k,escaped(v))
- end
- end
- if ern and trace_entities and ern ~= ens then
- ens = ern
+ local mt=getmetatable(old)
+ if mt then
+ setmetatable(new,mt)
end
- if ens ~= "" then
- if edt and #edt > 0 then
- if ats then
- handle("<",ens,":",etg," ",concat(ats," "),">")
- else
- handle("<",ens,":",etg,">")
- end
- for i=1,#edt do
- local e = edt[i]
- if type(e) == "string" then
- handle(escaped(e))
- else
- serialize(e,handlers)
- end
- end
- handle("</",ens,":",etg,">")
+ return new
+ else
+ return {}
+ end
+end
+xml.copy=copy
+function xml.checkbom(root)
+ if root.ri then
+ local dt=root.dt
+ for k=1,#dt do
+ local v=dt[k]
+ if type(v)=="table" and v.special and v.tg=="@pi@" and find(v.dt[1],"xml.*version=") then
+ return
+ end
+ end
+ insert(dt,1,{ special=true,ns="",tg="@pi@",dt={ "xml version='1.0' standalone='yes'" } } )
+ insert(dt,2,"\n" )
+ end
+end
+local function verbose_element(e,handlers)
+ local handle=handlers.handle
+ local serialize=handlers.serialize
+ local ens,etg,eat,edt,ern=e.ns,e.tg,e.at,e.dt,e.rn
+ local ats=eat and next(eat) and {}
+ if ats then
+ for k,v in next,eat do
+ ats[#ats+1]=formatters['%s=%q'](k,escaped(v))
+ end
+ end
+ if ern and trace_entities and ern~=ens then
+ ens=ern
+ end
+ if ens~="" then
+ if edt and #edt>0 then
+ if ats then
+ handle("<",ens,":",etg," ",concat(ats," "),">")
+ else
+ handle("<",ens,":",etg,">")
+ end
+ for i=1,#edt do
+ local e=edt[i]
+ if type(e)=="string" then
+ handle(escaped(e))
else
- if ats then
- handle("<",ens,":",etg," ",concat(ats," "),"/>")
- else
- handle("<",ens,":",etg,"/>")
- end
+ serialize(e,handlers)
end
+ end
+ handle("</",ens,":",etg,">")
else
- if edt and #edt > 0 then
- if ats then
- handle("<",etg," ",concat(ats," "),">")
- else
- handle("<",etg,">")
- end
- for i=1,#edt do
- local e = edt[i]
- if type(e) == "string" then
- handle(escaped(e)) -- option: hexify escaped entities
- else
- serialize(e,handlers)
- end
- end
- handle("</",etg,">")
+ if ats then
+ handle("<",ens,":",etg," ",concat(ats," "),"/>")
+ else
+ handle("<",ens,":",etg,"/>")
+ end
+ end
+ else
+ if edt and #edt>0 then
+ if ats then
+ handle("<",etg," ",concat(ats," "),">")
+ else
+ handle("<",etg,">")
+ end
+ for i=1,#edt do
+ local e=edt[i]
+ if type(e)=="string" then
+ handle(escaped(e))
else
- if ats then
- handle("<",etg," ",concat(ats," "),"/>")
- else
- handle("<",etg,"/>")
- end
+ serialize(e,handlers)
end
+ end
+ handle("</",etg,">")
+ else
+ if ats then
+ handle("<",etg," ",concat(ats," "),"/>")
+ else
+ handle("<",etg,"/>")
+ end
end
+ end
end
-
local function verbose_pi(e,handlers)
- handlers.handle("<?",e.dt[1],"?>")
+ handlers.handle("<?",e.dt[1],"?>")
end
-
local function verbose_comment(e,handlers)
- handlers.handle("<!--",e.dt[1],"-->")
+ handlers.handle("<!--",e.dt[1],"-->")
end
-
local function verbose_cdata(e,handlers)
- handlers.handle("<![CDATA[", e.dt[1],"]]>")
+ handlers.handle("<![CDATA[",e.dt[1],"]]>")
end
-
local function verbose_doctype(e,handlers)
- handlers.handle("<!DOCTYPE ",e.dt[1],">")
+ handlers.handle("<!DOCTYPE ",e.dt[1],">")
end
-
local function verbose_root(e,handlers)
- handlers.serialize(e.dt,handlers)
+ handlers.serialize(e.dt,handlers)
end
-
local function verbose_text(e,handlers)
- handlers.handle(escaped(e))
+ handlers.handle(escaped(e))
end
-
local function verbose_document(e,handlers)
- local serialize = handlers.serialize
- local functions = handlers.functions
- for i=1,#e do
- local ei = e[i]
- if type(ei) == "string" then
- functions["@tx@"](ei,handlers)
- else
- serialize(ei,handlers)
- end
+ local serialize=handlers.serialize
+ local functions=handlers.functions
+ for i=1,#e do
+ local ei=e[i]
+ if type(ei)=="string" then
+ functions["@tx@"](ei,handlers)
+ else
+ serialize(ei,handlers)
end
+ end
end
-
local function serialize(e,handlers,...)
- local initialize = handlers.initialize
- local finalize = handlers.finalize
- local functions = handlers.functions
- if initialize then
- local state = initialize(...)
- if not state == true then
- return state
- end
- end
- local etg = e.tg
- if etg then
- (functions[etg] or functions["@el@"])(e,handlers)
- -- elseif type(e) == "string" then
- -- functions["@tx@"](e,handlers)
- else
- functions["@dc@"](e,handlers) -- dc ?
- end
- if finalize then
- return finalize()
- end
+ local initialize=handlers.initialize
+ local finalize=handlers.finalize
+ local functions=handlers.functions
+ if initialize then
+ local state=initialize(...)
+ if not state==true then
+ return state
+ end
+ end
+ local etg=e.tg
+ if etg then
+ (functions[etg] or functions["@el@"])(e,handlers)
+ else
+ functions["@dc@"](e,handlers)
+ end
+ if finalize then
+ return finalize()
+ end
end
-
local function xserialize(e,handlers)
- local functions = handlers.functions
- local etg = e.tg
- if etg then
- (functions[etg] or functions["@el@"])(e,handlers)
- -- elseif type(e) == "string" then
- -- functions["@tx@"](e,handlers)
- else
- functions["@dc@"](e,handlers)
- end
-end
-
-local handlers = { }
-
+ local functions=handlers.functions
+ local etg=e.tg
+ if etg then
+ (functions[etg] or functions["@el@"])(e,handlers)
+ else
+ functions["@dc@"](e,handlers)
+ end
+end
+local handlers={}
local function newhandlers(settings)
- local t = table.copy(handlers.verbose or { }) -- merge
- if settings then
- for k,v in next, settings do
- if type(v) == "table" then
- local tk = t[k] if not tk then tk = { } t[k] = tk end
- for kk,vv in next, v do
- tk[kk] = vv
- end
- else
- t[k] = v
- end
- end
- if settings.name then
- handlers[settings.name] = t
- end
- end
- utilities.storage.mark(t)
- return t
-end
-
-local nofunction = function() end
-
+ local t=table.copy(handlers[settings and settings.parent or "verbose"] or {})
+ if settings then
+ for k,v in next,settings do
+ if type(v)=="table" then
+ local tk=t[k] if not tk then tk={} t[k]=tk end
+ for kk,vv in next,v do
+ tk[kk]=vv
+ end
+ else
+ t[k]=v
+ end
+ end
+ if settings.name then
+ handlers[settings.name]=t
+ end
+ end
+ utilities.storage.mark(t)
+ return t
+end
+local nofunction=function() end
function xml.sethandlersfunction(handler,name,fnc)
- handler.functions[name] = fnc or nofunction
+ handler.functions[name]=fnc or nofunction
end
-
function xml.gethandlersfunction(handler,name)
- return handler.functions[name]
+ return handler.functions[name]
end
-
function xml.gethandlers(name)
- return handlers[name]
+ return handlers[name]
end
-
newhandlers {
- name = "verbose",
- initialize = false, -- faster than nil and mt lookup
- finalize = false, -- faster than nil and mt lookup
- serialize = xserialize,
- handle = print,
- functions = {
- ["@dc@"] = verbose_document,
- ["@dt@"] = verbose_doctype,
- ["@rt@"] = verbose_root,
- ["@el@"] = verbose_element,
- ["@pi@"] = verbose_pi,
- ["@cm@"] = verbose_comment,
- ["@cd@"] = verbose_cdata,
- ["@tx@"] = verbose_text,
- }
+ name="verbose",
+ initialize=false,
+ finalize=false,
+ serialize=xserialize,
+ handle=print,
+ functions={
+ ["@dc@"]=verbose_document,
+ ["@dt@"]=verbose_doctype,
+ ["@rt@"]=verbose_root,
+ ["@el@"]=verbose_element,
+ ["@pi@"]=verbose_pi,
+ ["@cm@"]=verbose_comment,
+ ["@cd@"]=verbose_cdata,
+ ["@tx@"]=verbose_text,
+ }
}
-
---[[ldx--
-<p>How you deal with saving data depends on your preferences. For a 40 MB database
-file the timing on a 2.3 Core Duo are as follows (time in seconds):</p>
-
-<lines>
-1.3 : load data from file to string
-6.1 : convert string into tree
-5.3 : saving in file using xmlsave
-6.8 : converting to string using xml.tostring
-3.6 : saving converted string in file
-</lines>
-
-<p>Beware, these were timing with the old routine but measurements will not be that
-much different I guess.</p>
---ldx]]--
-
--- maybe this will move to lxml-xml
-
local result
-
-local xmlfilehandler = newhandlers {
- name = "file",
- initialize = function(name)
- result = io.open(name,"wb")
- return result
- end,
- finalize = function()
- result:close()
- return true
- end,
- handle = function(...)
- result:write(...)
- end,
+local xmlfilehandler=newhandlers {
+ name="file",
+ initialize=function(name)
+ result=io.open(name,"wb")
+ return result
+ end,
+ finalize=function()
+ result:close()
+ return true
+ end,
+ handle=function(...)
+ result:write(...)
+ end,
}
-
--- no checking on writeability here but not faster either
---
--- local xmlfilehandler = newhandlers {
--- initialize = function(name)
--- io.output(name,"wb")
--- return true
--- end,
--- finalize = function()
--- io.close()
--- return true
--- end,
--- handle = io.write,
--- }
-
function xml.save(root,name)
- serialize(root,xmlfilehandler,name)
+ serialize(root,xmlfilehandler,name)
end
-
local result
-
-local xmlstringhandler = newhandlers {
- name = "string",
- initialize = function()
- result = { }
- return result
- end,
- finalize = function()
- return concat(result)
- end,
- handle = function(...)
- result[#result+1] = concat { ... }
- end,
+local xmlstringhandler=newhandlers {
+ name="string",
+ initialize=function()
+ result={}
+ return result
+ end,
+ finalize=function()
+ return concat(result)
+ end,
+ handle=function(...)
+ result[#result+1]=concat {... }
+ end,
}
-
-local function xmltostring(root) -- 25% overhead due to collecting
- if not root then
- return ""
- elseif type(root) == 'string' then
- return root
- else -- if next(root) then -- next is faster than type (and >0 test)
- return serialize(root,xmlstringhandler) or ""
- end
+local function xmltostring(root)
+ if not root then
+ return ""
+ elseif type(root)=="string" then
+ return root
+ else
+ return serialize(root,xmlstringhandler) or ""
+ end
end
-
-local function __tostring(root) -- inline
- return (root and xmltostring(root)) or ""
+local function __tostring(root)
+ return (root and xmltostring(root)) or ""
end
-
-initialize_mt = function(root) -- redefinition
- mt = { __tostring = __tostring, __index = root }
+initialize_mt=function(root)
+ mt={ __tostring=__tostring,__index=root }
end
-
-xml.defaulthandlers = handlers
-xml.newhandlers = newhandlers
-xml.serialize = serialize
-xml.tostring = xmltostring
-
---[[ldx--
-<p>The next function operated on the content only and needs a handle function
-that accepts a string.</p>
---ldx]]--
-
+xml.defaulthandlers=handlers
+xml.newhandlers=newhandlers
+xml.serialize=serialize
+xml.tostring=xmltostring
local function xmlstring(e,handle)
- if not handle or (e.special and e.tg ~= "@rt@") then
- -- nothing
- elseif e.tg then
- local edt = e.dt
- if edt then
- for i=1,#edt do
- xmlstring(edt[i],handle)
- end
- end
- else
- handle(e)
+ if not handle or (e.special and e.tg~="@rt@") then
+ elseif e.tg then
+ local edt=e.dt
+ if edt then
+ for i=1,#edt do
+ xmlstring(edt[i],handle)
+ end
end
+ else
+ handle(e)
+ end
end
-
-xml.string = xmlstring
-
---[[ldx--
-<p>A few helpers:</p>
---ldx]]--
-
-
+xml.string=xmlstring
function xml.settings(e)
- while e do
- local s = e.settings
- if s then
- return s
- else
- e = e.__p__
- end
+ while e do
+ local s=e.settings
+ if s then
+ return s
+ else
+ e=e.__p__
end
- return nil
+ end
+ return nil
end
-
function xml.root(e)
- local r = e
- while e do
- e = e.__p__
- if e then
- r = e
- end
+ local r=e
+ while e do
+ e=e.__p__
+ if e then
+ r=e
end
- return r
+ end
+ return r
end
-
function xml.parent(root)
- return root.__p__
+ return root.__p__
end
-
function xml.body(root)
- return (root.ri and root.dt[root.ri]) or root -- not ok yet
+ return root.ri and root.dt[root.ri] or root
end
-
function xml.name(root)
- if not root then
- return ""
- elseif root.ns == "" then
- return root.tg
- else
- return root.ns .. ":" .. root.tg
- end
+ if not root then
+ return ""
+ end
+ local ns=root.ns
+ local tg=root.tg
+ if ns=="" then
+ return tg
+ else
+ return ns..":"..tg
+ end
end
-
---[[ldx--
-<p>The next helper erases an element but keeps the table as it is,
-and since empty strings are not serialized (effectively) it does
-not harm. Copying the table would take more time. Usage:</p>
---ldx]]--
-
function xml.erase(dt,k)
- if dt then
- if k then
- dt[k] = ""
- else for k=1,#dt do
- dt[1] = { "" }
- end end
- end
+ if dt then
+ if k then
+ dt[k]=""
+ else for k=1,#dt do
+ dt[1]={ "" }
+ end end
+ end
end
-
---[[ldx--
-<p>The next helper assigns a tree (or string). Usage:</p>
-
-<typing>
-dt[k] = xml.assign(root) or xml.assign(dt,k,root)
-</typing>
---ldx]]--
-
function xml.assign(dt,k,root)
- if dt and k then
- dt[k] = (type(root) == "table" and xml.body(root)) or root
- return dt[k]
- else
- return xml.body(root)
- end
+ if dt and k then
+ dt[k]=type(root)=="table" and xml.body(root) or root
+ return dt[k]
+ else
+ return xml.body(root)
+ end
+end
+function xml.tocdata(e,wrapper)
+ local whatever=type(e)=="table" and xmltostring(e.dt) or e or ""
+ if wrapper then
+ whatever=formatters["<%s>%s</%s>"](wrapper,whatever,wrapper)
+ end
+ local t={ special=true,ns="",tg="@cd@",at={},rn="",dt={ whatever },__p__=e }
+ setmetatable(t,getmetatable(e))
+ e.dt={ t }
end
-
--- the following helpers may move
-
---[[ldx--
-<p>The next helper assigns a tree (or string). Usage:</p>
-<typing>
-xml.tocdata(e)
-xml.tocdata(e,"error")
-</typing>
---ldx]]--
-
-function xml.tocdata(e,wrapper) -- a few more in the aux module
- local whatever = type(e) == "table" and xmltostring(e.dt) or e or ""
- if wrapper then
- whatever = format("<%s>%s</%s>",wrapper,whatever,wrapper)
- end
- local t = { special = true, ns = "", tg = "@cd@", at = {}, rn = "", dt = { whatever }, __p__ = e }
- setmetatable(t,getmetatable(e))
- e.dt = { t }
-end
-
function xml.makestandalone(root)
- if root.ri then
- local dt = root.dt
- for k=1,#dt do
- local v = dt[k]
- if type(v) == "table" and v.special and v.tg == "@pi@" then
- local txt = v.dt[1]
- if find(txt,"xml.*version=") then
- v.dt[1] = txt .. " standalone='yes'"
- break
- end
- end
+ if root.ri then
+ local dt=root.dt
+ for k=1,#dt do
+ local v=dt[k]
+ if type(v)=="table" and v.special and v.tg=="@pi@" then
+ local txt=v.dt[1]
+ if find(txt,"xml.*version=") then
+ v.dt[1]=txt.." standalone='yes'"
+ break
end
+ end
end
- return root
+ end
+ return root
end
-
function xml.kind(e)
- local dt = e and e.dt
- if dt then
- local n = #dt
- if n == 1 then
- local d = dt[1]
- if d.special then
- local tg = d.tg
- if tg == "@cd@" then
- return "cdata"
- elseif tg == "@cm" then
- return "comment"
- elseif tg == "@pi@" then
- return "instruction"
- elseif tg == "@dt@" then
- return "declaration"
- end
- elseif type(d) == "string" then
- return "text"
- end
- return "element"
- elseif n > 0 then
- return "mixed"
- end
- end
- return "empty"
+ local dt=e and e.dt
+ if dt then
+ local n=#dt
+ if n==1 then
+ local d=dt[1]
+ if d.special then
+ local tg=d.tg
+ if tg=="@cd@" then
+ return "cdata"
+ elseif tg=="@cm" then
+ return "comment"
+ elseif tg=="@pi@" then
+ return "instruction"
+ elseif tg=="@dt@" then
+ return "declaration"
+ end
+ elseif type(d)=="string" then
+ return "text"
+ end
+ return "element"
+ elseif n>0 then
+ return "mixed"
+ end
+ end
+ return "empty"
end
@@ -8225,1294 +8998,1060 @@ end -- of closure
do -- create closure to overcome 200 locals limit
-if not modules then modules = { } end modules ['lxml-pth'] = {
- version = 1.001,
- comment = "this module is the basis for the lxml-* ones",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
--- e.ni is only valid after a filter run
--- todo: B/C/[get first match]
-
-local concat, remove, insert = table.concat, table.remove, table.insert
-local type, next, tonumber, tostring, setmetatable, loadstring = type, next, tonumber, tostring, setmetatable, loadstring
-local format, upper, lower, gmatch, gsub, find, rep = string.format, string.upper, string.lower, string.gmatch, string.gsub, string.find, string.rep
-local lpegmatch, lpegpatterns = lpeg.match, lpeg.patterns
-
-local setmetatableindex = table.setmetatableindex
-
--- beware, this is not xpath ... e.g. position is different (currently) and
--- we have reverse-sibling as reversed preceding sibling
-
---[[ldx--
-<p>This module can be used stand alone but also inside <l n='mkiv'/> in
-which case it hooks into the tracker code. Therefore we provide a few
-functions that set the tracers. Here we overload a previously defined
-function.</p>
-<p>If I can get in the mood I will make a variant that is XSLT compliant
-but I wonder if it makes sense.</P>
---ldx]]--
-
---[[ldx--
-<p>Expecially the lpath code is experimental, we will support some of xpath, but
-only things that make sense for us; as compensation it is possible to hook in your
-own functions. Apart from preprocessing content for <l n='context'/> we also need
-this module for process management, like handling <l n='ctx'/> and <l n='rlx'/>
-files.</p>
-
-<typing>
-a/b/c /*/c
-a/b/c/first() a/b/c/last() a/b/c/index(n) a/b/c/index(-n)
-a/b/c/text() a/b/c/text(1) a/b/c/text(-1) a/b/c/text(n)
-</typing>
---ldx]]--
-
-local trace_lpath = false if trackers then trackers.register("xml.path", function(v) trace_lpath = v end) end
-local trace_lparse = false if trackers then trackers.register("xml.parse", function(v) trace_lparse = v end) end
-local trace_lprofile = false if trackers then trackers.register("xml.profile", function(v) trace_lpath = v trace_lparse = v trace_lprofile = v end) end
-
-local report_lpath = logs.reporter("xml","lpath")
-
---[[ldx--
-<p>We've now arrived at an interesting part: accessing the tree using a subset
-of <l n='xpath'/> and since we're not compatible we call it <l n='lpath'/>. We
-will explain more about its usage in other documents.</p>
---ldx]]--
+package.loaded["lxml-lpt"] = package.loaded["lxml-lpt"] or true
-local xml = xml
+-- original size: 48956, stripped down to: 30516
-local lpathcalls = 0 function xml.lpathcalls () return lpathcalls end
-local lpathcached = 0 function xml.lpathcached() return lpathcached end
-
-xml.functions = xml.functions or { } -- internal
-local functions = xml.functions
-
-xml.expressions = xml.expressions or { } -- in expressions
-local expressions = xml.expressions
-
-xml.finalizers = xml.finalizers or { } -- fast do-with ... (with return value other than collection)
-local finalizers = xml.finalizers
-
-xml.specialhandler = xml.specialhandler or { }
-local specialhandler = xml.specialhandler
-
-lpegpatterns.xml = lpegpatterns.xml or { }
-local xmlpatterns = lpegpatterns.xml
-
-finalizers.xml = finalizers.xml or { }
-finalizers.tex = finalizers.tex or { }
-
-local function fallback (t, name)
- local fn = finalizers[name]
- if fn then
- t[name] = fn
- else
- report_lpath("unknown sub finalizer '%s'",tostring(name))
- fn = function() end
- end
- return fn
-end
-
-setmetatableindex(finalizers.xml, fallback)
-setmetatableindex(finalizers.tex, fallback)
-
-xml.defaultprotocol = "xml"
-
--- as xsl does not follow xpath completely here we will also
--- be more liberal especially with regards to the use of | and
--- the rootpath:
---
--- test : all 'test' under current
--- /test : 'test' relative to current
--- a|b|c : set of names
--- (a|b|c) : idem
--- ! : not
---
--- after all, we're not doing transformations but filtering. in
--- addition we provide filter functions (last bit)
---
--- todo: optimizer
---
--- .. : parent
--- * : all kids
--- / : anchor here
--- // : /**/
--- ** : all in between
---
--- so far we had (more practical as we don't transform)
---
--- {/test} : kids 'test' under current node
--- {test} : any kid with tag 'test'
--- {//test} : same as above
-
--- evaluator (needs to be redone, for the moment copied)
-
--- todo: apply_axis(list,notable) and collection vs single
-
-local apply_axis = { }
-
-apply_axis['root'] = function(list)
- local collected = { }
- for l=1,#list do
- local ll = list[l]
- local rt = ll
- while ll do
- ll = ll.__p__
- if ll then
- rt = ll
- end
- end
- collected[l] = rt
- end
- return collected
-end
-
-apply_axis['self'] = function(list)
- return list
-end
-
-apply_axis['child'] = function(list)
- local collected, c = { }, 0
- for l=1,#list do
- local ll = list[l]
- local dt = ll.dt
- if dt then -- weird that this is needed
- local en = 0
- for k=1,#dt do
- local dk = dt[k]
- if dk.tg then
- c = c + 1
- collected[c] = dk
- dk.ni = k -- refresh
- en = en + 1
- dk.ei = en
- end
- end
- ll.en = en
- end
- end
- return collected
+if not modules then modules={} end modules ['lxml-lpt']={
+ version=1.001,
+ comment="this module is the basis for the lxml-* ones",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+local concat,remove,insert=table.concat,table.remove,table.insert
+local type,next,tonumber,tostring,setmetatable,load,select=type,next,tonumber,tostring,setmetatable,load,select
+local format,upper,lower,gmatch,gsub,find,rep=string.format,string.upper,string.lower,string.gmatch,string.gsub,string.find,string.rep
+local lpegmatch,lpegpatterns=lpeg.match,lpeg.patterns
+local setmetatableindex=table.setmetatableindex
+local formatters=string.formatters
+local trace_lpath=false if trackers then trackers.register("xml.path",function(v) trace_lpath=v end) end
+local trace_lparse=false if trackers then trackers.register("xml.parse",function(v) trace_lparse=v end) end
+local trace_lprofile=false if trackers then trackers.register("xml.profile",function(v) trace_lpath=v trace_lparse=v trace_lprofile=v end) end
+local report_lpath=logs.reporter("xml","lpath")
+local xml=xml
+local lpathcalls=0 function xml.lpathcalls () return lpathcalls end
+local lpathcached=0 function xml.lpathcached() return lpathcached end
+xml.functions=xml.functions or {}
+local functions=xml.functions
+xml.expressions=xml.expressions or {}
+local expressions=xml.expressions
+xml.finalizers=xml.finalizers or {}
+local finalizers=xml.finalizers
+xml.specialhandler=xml.specialhandler or {}
+local specialhandler=xml.specialhandler
+lpegpatterns.xml=lpegpatterns.xml or {}
+local xmlpatterns=lpegpatterns.xml
+finalizers.xml=finalizers.xml or {}
+finalizers.tex=finalizers.tex or {}
+local function fallback (t,name)
+ local fn=finalizers[name]
+ if fn then
+ t[name]=fn
+ else
+ report_lpath("unknown sub finalizer %a",name)
+ fn=function() end
+ end
+ return fn
+end
+setmetatableindex(finalizers.xml,fallback)
+setmetatableindex(finalizers.tex,fallback)
+xml.defaultprotocol="xml"
+local apply_axis={}
+apply_axis['root']=function(list)
+ local collected={}
+ for l=1,#list do
+ local ll=list[l]
+ local rt=ll
+ while ll do
+ ll=ll.__p__
+ if ll then
+ rt=ll
+ end
+ end
+ collected[l]=rt
+ end
+ return collected
+end
+apply_axis['self']=function(list)
+ return list
+end
+apply_axis['child']=function(list)
+ local collected,c={},0
+ for l=1,#list do
+ local ll=list[l]
+ local dt=ll.dt
+ if dt then
+ local en=0
+ for k=1,#dt do
+ local dk=dt[k]
+ if dk.tg then
+ c=c+1
+ collected[c]=dk
+ dk.ni=k
+ en=en+1
+ dk.ei=en
+ end
+ end
+ ll.en=en
+ end
+ end
+ return collected
end
-
local function collect(list,collected,c)
- local dt = list.dt
- if dt then
- local en = 0
- for k=1,#dt do
- local dk = dt[k]
- if dk.tg then
- c = c + 1
- collected[c] = dk
- dk.ni = k -- refresh
- en = en + 1
- dk.ei = en
- c = collect(dk,collected,c)
- end
- end
- list.en = en
- end
- return c
-end
-
-apply_axis['descendant'] = function(list)
- local collected, c = { }, 0
- for l=1,#list do
- c = collect(list[l],collected,c)
- end
- return collected
+ local dt=list.dt
+ if dt then
+ local en=0
+ for k=1,#dt do
+ local dk=dt[k]
+ if dk.tg then
+ c=c+1
+ collected[c]=dk
+ dk.ni=k
+ en=en+1
+ dk.ei=en
+ c=collect(dk,collected,c)
+ end
+ end
+ list.en=en
+ end
+ return c
+end
+apply_axis['descendant']=function(list)
+ local collected,c={},0
+ for l=1,#list do
+ c=collect(list[l],collected,c)
+ end
+ return collected
end
-
local function collect(list,collected,c)
- local dt = list.dt
- if dt then
- local en = 0
- for k=1,#dt do
- local dk = dt[k]
- if dk.tg then
- c = c + 1
- collected[c] = dk
- dk.ni = k -- refresh
- en = en + 1
- dk.ei = en
- c = collect(dk,collected,c)
- end
- end
- list.en = en
- end
- return c
-end
-apply_axis['descendant-or-self'] = function(list)
- local collected, c = { }, 0
- for l=1,#list do
- local ll = list[l]
- if ll.special ~= true then -- catch double root
- c = c + 1
- collected[c] = ll
- end
- c = collect(ll,collected,c)
- end
- return collected
-end
-
-apply_axis['ancestor'] = function(list)
- local collected, c = { }, 0
- for l=1,#list do
- local ll = list[l]
- while ll do
- ll = ll.__p__
- if ll then
- c = c + 1
- collected[c] = ll
- end
- end
- end
- return collected
-end
-
-apply_axis['ancestor-or-self'] = function(list)
- local collected, c = { }, 0
- for l=1,#list do
- local ll = list[l]
- c = c + 1
- collected[c] = ll
- while ll do
- ll = ll.__p__
- if ll then
- c = c + 1
- collected[c] = ll
- end
- end
- end
- return collected
-end
-
-apply_axis['parent'] = function(list)
- local collected, c = { }, 0
- for l=1,#list do
- local pl = list[l].__p__
- if pl then
- c = c + 1
- collected[c] = pl
- end
- end
- return collected
-end
-
-apply_axis['attribute'] = function(list)
- return { }
-end
-
-apply_axis['namespace'] = function(list)
- return { }
-end
-
-apply_axis['following'] = function(list) -- incomplete
- return { }
-end
-
-apply_axis['preceding'] = function(list) -- incomplete
- return { }
-end
-
-apply_axis['following-sibling'] = function(list)
- local collected, c = { }, 0
- for l=1,#list do
- local ll = list[l]
- local p = ll.__p__
- local d = p.dt
- for i=ll.ni+1,#d do
- local di = d[i]
- if type(di) == "table" then
- c = c + 1
- collected[c] = di
- end
- end
- end
- return collected
-end
-
-apply_axis['preceding-sibling'] = function(list)
- local collected, c = { }, 0
- for l=1,#list do
- local ll = list[l]
- local p = ll.__p__
- local d = p.dt
- for i=1,ll.ni-1 do
- local di = d[i]
- if type(di) == "table" then
- c = c + 1
- collected[c] = di
- end
- end
- end
- return collected
-end
-
-apply_axis['reverse-sibling'] = function(list) -- reverse preceding
- local collected, c = { }, 0
- for l=1,#list do
- local ll = list[l]
- local p = ll.__p__
- local d = p.dt
- for i=ll.ni-1,1,-1 do
- local di = d[i]
- if type(di) == "table" then
- c = c + 1
- collected[c] = di
- end
- end
- end
- return collected
-end
-
-apply_axis['auto-descendant-or-self'] = apply_axis['descendant-or-self']
-apply_axis['auto-descendant'] = apply_axis['descendant']
-apply_axis['auto-child'] = apply_axis['child']
-apply_axis['auto-self'] = apply_axis['self']
-apply_axis['initial-child'] = apply_axis['child']
-
+ local dt=list.dt
+ if dt then
+ local en=0
+ for k=1,#dt do
+ local dk=dt[k]
+ if dk.tg then
+ c=c+1
+ collected[c]=dk
+ dk.ni=k
+ en=en+1
+ dk.ei=en
+ c=collect(dk,collected,c)
+ end
+ end
+ list.en=en
+ end
+ return c
+end
+apply_axis['descendant-or-self']=function(list)
+ local collected,c={},0
+ for l=1,#list do
+ local ll=list[l]
+ if ll.special~=true then
+ c=c+1
+ collected[c]=ll
+ end
+ c=collect(ll,collected,c)
+ end
+ return collected
+end
+apply_axis['ancestor']=function(list)
+ local collected,c={},0
+ for l=1,#list do
+ local ll=list[l]
+ while ll do
+ ll=ll.__p__
+ if ll then
+ c=c+1
+ collected[c]=ll
+ end
+ end
+ end
+ return collected
+end
+apply_axis['ancestor-or-self']=function(list)
+ local collected,c={},0
+ for l=1,#list do
+ local ll=list[l]
+ c=c+1
+ collected[c]=ll
+ while ll do
+ ll=ll.__p__
+ if ll then
+ c=c+1
+ collected[c]=ll
+ end
+ end
+ end
+ return collected
+end
+apply_axis['parent']=function(list)
+ local collected,c={},0
+ for l=1,#list do
+ local pl=list[l].__p__
+ if pl then
+ c=c+1
+ collected[c]=pl
+ end
+ end
+ return collected
+end
+apply_axis['attribute']=function(list)
+ return {}
+end
+apply_axis['namespace']=function(list)
+ return {}
+end
+apply_axis['following']=function(list)
+ return {}
+end
+apply_axis['preceding']=function(list)
+ return {}
+end
+apply_axis['following-sibling']=function(list)
+ local collected,c={},0
+ for l=1,#list do
+ local ll=list[l]
+ local p=ll.__p__
+ local d=p.dt
+ for i=ll.ni+1,#d do
+ local di=d[i]
+ if type(di)=="table" then
+ c=c+1
+ collected[c]=di
+ end
+ end
+ end
+ return collected
+end
+apply_axis['preceding-sibling']=function(list)
+ local collected,c={},0
+ for l=1,#list do
+ local ll=list[l]
+ local p=ll.__p__
+ local d=p.dt
+ for i=1,ll.ni-1 do
+ local di=d[i]
+ if type(di)=="table" then
+ c=c+1
+ collected[c]=di
+ end
+ end
+ end
+ return collected
+end
+apply_axis['reverse-sibling']=function(list)
+ local collected,c={},0
+ for l=1,#list do
+ local ll=list[l]
+ local p=ll.__p__
+ local d=p.dt
+ for i=ll.ni-1,1,-1 do
+ local di=d[i]
+ if type(di)=="table" then
+ c=c+1
+ collected[c]=di
+ end
+ end
+ end
+ return collected
+end
+apply_axis['auto-descendant-or-self']=apply_axis['descendant-or-self']
+apply_axis['auto-descendant']=apply_axis['descendant']
+apply_axis['auto-child']=apply_axis['child']
+apply_axis['auto-self']=apply_axis['self']
+apply_axis['initial-child']=apply_axis['child']
local function apply_nodes(list,directive,nodes)
- -- todo: nodes[1] etc ... negated node name in set ... when needed
- -- ... currently ignored
- local maxn = #nodes
- if maxn == 3 then --optimized loop
- local nns, ntg = nodes[2], nodes[3]
- if not nns and not ntg then -- wildcard
- if directive then
- return list
- else
- return { }
- end
- else
- local collected, c, m, p = { }, 0, 0, nil
- if not nns then -- only check tag
- for l=1,#list do
- local ll = list[l]
- local ltg = ll.tg
- if ltg then
- if directive then
- if ntg == ltg then
- local llp = ll.__p__ ; if llp ~= p then p, m = llp, 1 else m = m + 1 end
- c = c + 1
- collected[c], ll.mi = ll, m
- end
- elseif ntg ~= ltg then
- local llp = ll.__p__ ; if llp ~= p then p, m = llp, 1 else m = m + 1 end
- c = c + 1
- collected[c], ll.mi = ll, m
- end
- end
- end
- elseif not ntg then -- only check namespace
- for l=1,#list do
- local ll = list[l]
- local lns = ll.rn or ll.ns
- if lns then
- if directive then
- if lns == nns then
- local llp = ll.__p__ ; if llp ~= p then p, m = llp, 1 else m = m + 1 end
- c = c + 1
- collected[c], ll.mi = ll, m
- end
- elseif lns ~= nns then
- local llp = ll.__p__ ; if llp ~= p then p, m = llp, 1 else m = m + 1 end
- c = c + 1
- collected[c], ll.mi = ll, m
- end
- end
- end
- else -- check both
- for l=1,#list do
- local ll = list[l]
- local ltg = ll.tg
- if ltg then
- local lns = ll.rn or ll.ns
- local ok = ltg == ntg and lns == nns
- if directive then
- if ok then
- local llp = ll.__p__ ; if llp ~= p then p, m = llp, 1 else m = m + 1 end
- c = c + 1
- collected[c], ll.mi = ll, m
- end
- elseif not ok then
- local llp = ll.__p__ ; if llp ~= p then p, m = llp, 1 else m = m + 1 end
- c = c + 1
- collected[c], ll.mi = ll, m
- end
- end
- end
- end
- return collected
- end
+ local maxn=#nodes
+ if maxn==3 then
+ local nns,ntg=nodes[2],nodes[3]
+ if not nns and not ntg then
+ if directive then
+ return list
+ else
+ return {}
+ end
else
- local collected, c, m, p = { }, 0, 0, nil
+ local collected,c,m,p={},0,0,nil
+ if not nns then
for l=1,#list do
- local ll = list[l]
- local ltg = ll.tg
- if ltg then
- local lns = ll.rn or ll.ns
- local ok = false
- for n=1,maxn,3 do
- local nns, ntg = nodes[n+1], nodes[n+2]
- ok = (not ntg or ltg == ntg) and (not nns or lns == nns)
- if ok then
- break
- end
- end
- if directive then
- if ok then
- local llp = ll.__p__ ; if llp ~= p then p, m = llp, 1 else m = m + 1 end
- c = c + 1
- collected[c], ll.mi = ll, m
- end
- elseif not ok then
- local llp = ll.__p__ ; if llp ~= p then p, m = llp, 1 else m = m + 1 end
- c = c + 1
- collected[c], ll.mi = ll, m
- end
- end
- end
- return collected
- end
-end
-
-local quit_expression = false
-
-local function apply_expression(list,expression,order)
- local collected, c = { }, 0
- quit_expression = false
+ local ll=list[l]
+ local ltg=ll.tg
+ if ltg then
+ if directive then
+ if ntg==ltg then
+ local llp=ll.__p__;if llp~=p then p,m=llp,1 else m=m+1 end
+ c=c+1
+ collected[c],ll.mi=ll,m
+ end
+ elseif ntg~=ltg then
+ local llp=ll.__p__;if llp~=p then p,m=llp,1 else m=m+1 end
+ c=c+1
+ collected[c],ll.mi=ll,m
+ end
+ end
+ end
+ elseif not ntg then
+ for l=1,#list do
+ local ll=list[l]
+ local lns=ll.rn or ll.ns
+ if lns then
+ if directive then
+ if lns==nns then
+ local llp=ll.__p__;if llp~=p then p,m=llp,1 else m=m+1 end
+ c=c+1
+ collected[c],ll.mi=ll,m
+ end
+ elseif lns~=nns then
+ local llp=ll.__p__;if llp~=p then p,m=llp,1 else m=m+1 end
+ c=c+1
+ collected[c],ll.mi=ll,m
+ end
+ end
+ end
+ else
+ for l=1,#list do
+ local ll=list[l]
+ local ltg=ll.tg
+ if ltg then
+ local lns=ll.rn or ll.ns
+ local ok=ltg==ntg and lns==nns
+ if directive then
+ if ok then
+ local llp=ll.__p__;if llp~=p then p,m=llp,1 else m=m+1 end
+ c=c+1
+ collected[c],ll.mi=ll,m
+ end
+ elseif not ok then
+ local llp=ll.__p__;if llp~=p then p,m=llp,1 else m=m+1 end
+ c=c+1
+ collected[c],ll.mi=ll,m
+ end
+ end
+ end
+ end
+ return collected
+ end
+ else
+ local collected,c,m,p={},0,0,nil
for l=1,#list do
- local ll = list[l]
- if expression(list,ll,l,order) then -- nasty, order alleen valid als n=1
- c = c + 1
- collected[c] = ll
- end
- if quit_expression then
+ local ll=list[l]
+ local ltg=ll.tg
+ if ltg then
+ local lns=ll.rn or ll.ns
+ local ok=false
+ for n=1,maxn,3 do
+ local nns,ntg=nodes[n+1],nodes[n+2]
+ ok=(not ntg or ltg==ntg) and (not nns or lns==nns)
+ if ok then
break
+ end
+ end
+ if directive then
+ if ok then
+ local llp=ll.__p__;if llp~=p then p,m=llp,1 else m=m+1 end
+ c=c+1
+ collected[c],ll.mi=ll,m
+ end
+ elseif not ok then
+ local llp=ll.__p__;if llp~=p then p,m=llp,1 else m=m+1 end
+ c=c+1
+ collected[c],ll.mi=ll,m
end
+ end
end
return collected
+ end
end
-
-local P, V, C, Cs, Cc, Ct, R, S, Cg, Cb = lpeg.P, lpeg.V, lpeg.C, lpeg.Cs, lpeg.Cc, lpeg.Ct, lpeg.R, lpeg.S, lpeg.Cg, lpeg.Cb
-
-local spaces = S(" \n\r\t\f")^0
-local lp_space = S(" \n\r\t\f")
-local lp_any = P(1)
-local lp_noequal = P("!=") / "~=" + P("<=") + P(">=") + P("==")
-local lp_doequal = P("=") / "=="
-local lp_or = P("|") / " or "
-local lp_and = P("&") / " and "
-
-local lp_builtin = P (
- P("text") / "(ll.dt[1] or '')" + -- fragile
- P("content") / "ll.dt" +
- -- P("name") / "(ll.ns~='' and ll.ns..':'..ll.tg)" +
- P("name") / "((ll.ns~='' and ll.ns..':'..ll.tg) or ll.tg)" +
- P("tag") / "ll.tg" +
- P("position") / "l" + -- is element in finalizer
- P("firstindex") / "1" +
- P("lastindex") / "(#ll.__p__.dt or 1)" +
- P("firstelement") / "1" +
- P("lastelement") / "(ll.__p__.en or 1)" +
- P("first") / "1" +
- P("last") / "#list" +
- P("rootposition") / "order" +
- P("order") / "order" +
- P("element") / "(ll.ei or 1)" +
- P("index") / "(ll.ni or 1)" +
- P("match") / "(ll.mi or 1)" +
- -- P("namespace") / "ll.ns" +
- P("ns") / "ll.ns"
- ) * ((spaces * P("(") * spaces * P(")"))/"")
-
--- for the moment we keep namespaces with attributes
-
-local lp_attribute = (P("@") + P("attribute::")) / "" * Cc("(ll.at and ll.at['") * ((R("az","AZ") + S("-_:"))^1) * Cc("'])")
-local lp_fastpos_p = ((P("+")^0 * R("09")^1 * P(-1)) / function(s) return "l==" .. s end)
-local lp_fastpos_n = ((P("-") * R("09")^1 * P(-1)) / function(s) return "(" .. s .. "<0 and (#list+".. s .. "==l))" end)
-local lp_fastpos = lp_fastpos_n + lp_fastpos_p
-local lp_reserved = C("and") + C("or") + C("not") + C("div") + C("mod") + C("true") + C("false")
-
-local lp_lua_function = C(R("az","AZ","__")^1 * (P(".") * R("az","AZ","__")^1)^1) * ("(") / function(t) -- todo: better . handling
- return t .. "("
-end
-
-local lp_function = C(R("az","AZ","__")^1) * P("(") / function(t) -- todo: better . handling
- if expressions[t] then
- return "expr." .. t .. "("
- else
- return "expr.error("
- end
-end
-
-local lparent = P("(")
-local rparent = P(")")
-local noparent = 1 - (lparent+rparent)
-local nested = P{lparent * (noparent + V(1))^0 * rparent}
-local value = P(lparent * C((noparent + nested)^0) * rparent) -- P{"("*C(((1-S("()"))+V(1))^0)*")"}
-
-local lp_child = Cc("expr.child(ll,'") * R("az","AZ","--","__")^1 * Cc("')")
-local lp_number = S("+-") * R("09")^1
-local lp_string = Cc("'") * R("az","AZ","--","__")^1 * Cc("'")
-local lp_content = (P("'") * (1-P("'"))^0 * P("'") + P('"') * (1-P('"'))^0 * P('"'))
-
+local quit_expression=false
+local function apply_expression(list,expression,order)
+ local collected,c={},0
+ quit_expression=false
+ for l=1,#list do
+ local ll=list[l]
+ if expression(list,ll,l,order) then
+ c=c+1
+ collected[c]=ll
+ end
+ if quit_expression then
+ break
+ end
+ end
+ return collected
+end
+local P,V,C,Cs,Cc,Ct,R,S,Cg,Cb=lpeg.P,lpeg.V,lpeg.C,lpeg.Cs,lpeg.Cc,lpeg.Ct,lpeg.R,lpeg.S,lpeg.Cg,lpeg.Cb
+local spaces=S(" \n\r\t\f")^0
+local lp_space=S(" \n\r\t\f")
+local lp_any=P(1)
+local lp_noequal=P("!=")/"~="+P("<=")+P(">=")+P("==")
+local lp_doequal=P("=")/"=="
+local lp_or=P("|")/" or "
+local lp_and=P("&")/" and "
+local lp_builtin=P (
+ P("text")/"(ll.dt[1] or '')"+
+ P("content")/"ll.dt"+
+ P("name")/"((ll.ns~='' and ll.ns..':'..ll.tg) or ll.tg)"+P("tag")/"ll.tg"+P("position")/"l"+
+ P("firstindex")/"1"+P("lastindex")/"(#ll.__p__.dt or 1)"+P("firstelement")/"1"+P("lastelement")/"(ll.__p__.en or 1)"+P("first")/"1"+P("last")/"#list"+P("rootposition")/"order"+P("order")/"order"+P("element")/"(ll.ei or 1)"+P("index")/"(ll.ni or 1)"+P("match")/"(ll.mi or 1)"+
+ P("ns")/"ll.ns"
+ )*((spaces*P("(")*spaces*P(")"))/"")
+local lp_attribute=(P("@")+P("attribute::"))/""*Cc("(ll.at and ll.at['")*((R("az","AZ")+S("-_:"))^1)*Cc("'])")
+lp_fastpos_p=P("+")^0*R("09")^1*P(-1)/"l==%0"
+lp_fastpos_n=P("-")*R("09")^1*P(-1)/"(%0<0 and (#list+%0==l))"
+local lp_fastpos=lp_fastpos_n+lp_fastpos_p
+local lp_reserved=C("and")+C("or")+C("not")+C("div")+C("mod")+C("true")+C("false")
+local lp_lua_function=Cs((R("az","AZ","__")^1*(P(".")*R("az","AZ","__")^1)^1)*("("))/"%0"
+local lp_function=C(R("az","AZ","__")^1)*P("(")/function(t)
+ if expressions[t] then
+ return "expr."..t.."("
+ else
+ return "expr.error("
+ end
+end
+local lparent=P("(")
+local rparent=P(")")
+local noparent=1-(lparent+rparent)
+local nested=P{lparent*(noparent+V(1))^0*rparent}
+local value=P(lparent*C((noparent+nested)^0)*rparent)
+local lp_child=Cc("expr.child(ll,'")*R("az","AZ","--","__")^1*Cc("')")
+local lp_number=S("+-")*R("09")^1
+local lp_string=Cc("'")*R("az","AZ","--","__")^1*Cc("'")
+local lp_content=(P("'")*(1-P("'"))^0*P("'")+P('"')*(1-P('"'))^0*P('"'))
local cleaner
-
-local lp_special = (C(P("name")+P("text")+P("tag")+P("count")+P("child"))) * value / function(t,s)
- if expressions[t] then
- s = s and s ~= "" and lpegmatch(cleaner,s)
- if s and s ~= "" then
- return "expr." .. t .. "(ll," .. s ..")"
- else
- return "expr." .. t .. "(ll)"
- end
+local lp_special=(C(P("name")+P("text")+P("tag")+P("count")+P("child")))*value/function(t,s)
+ if expressions[t] then
+ s=s and s~="" and lpegmatch(cleaner,s)
+ if s and s~="" then
+ return "expr."..t.."(ll,"..s..")"
else
- return "expr.error(" .. t .. ")"
+ return "expr."..t.."(ll)"
end
+ else
+ return "expr.error("..t..")"
+ end
end
-
-local content =
- lp_builtin +
- lp_attribute +
- lp_special +
- lp_noequal + lp_doequal +
- lp_or + lp_and +
- lp_reserved +
- lp_lua_function + lp_function +
- lp_content + -- too fragile
- lp_child +
- lp_any
-
-local converter = Cs (
- lp_fastpos + (P { lparent * (V(1))^0 * rparent + content } )^0
+local content=lp_builtin+lp_attribute+lp_special+lp_noequal+lp_doequal+lp_or+lp_and+lp_reserved+lp_lua_function+lp_function+lp_content+
+ lp_child+lp_any
+local converter=Cs (
+ lp_fastpos+(P { lparent*(V(1))^0*rparent+content } )^0
)
-
-cleaner = Cs ( (
- lp_reserved +
- lp_number +
- lp_string +
-1 )^1 )
-
-
-
-local template_e = [[
+cleaner=Cs ((
+ lp_reserved+lp_number+lp_string+1 )^1 )
+local template_e=[[
local expr = xml.expressions
return function(list,ll,l,order)
return %s
end
]]
-
-local template_f_y = [[
+local template_f_y=[[
local finalizer = xml.finalizers['%s']['%s']
return function(collection)
return finalizer(collection,%s)
end
]]
-
-local template_f_n = [[
+local template_f_n=[[
return xml.finalizers['%s']['%s']
]]
-
---
-
-local register_self = { kind = "axis", axis = "self" } -- , apply = apply_axis["self"] }
-local register_parent = { kind = "axis", axis = "parent" } -- , apply = apply_axis["parent"] }
-local register_descendant = { kind = "axis", axis = "descendant" } -- , apply = apply_axis["descendant"] }
-local register_child = { kind = "axis", axis = "child" } -- , apply = apply_axis["child"] }
-local register_descendant_or_self = { kind = "axis", axis = "descendant-or-self" } -- , apply = apply_axis["descendant-or-self"] }
-local register_root = { kind = "axis", axis = "root" } -- , apply = apply_axis["root"] }
-local register_ancestor = { kind = "axis", axis = "ancestor" } -- , apply = apply_axis["ancestor"] }
-local register_ancestor_or_self = { kind = "axis", axis = "ancestor-or-self" } -- , apply = apply_axis["ancestor-or-self"] }
-local register_attribute = { kind = "axis", axis = "attribute" } -- , apply = apply_axis["attribute"] }
-local register_namespace = { kind = "axis", axis = "namespace" } -- , apply = apply_axis["namespace"] }
-local register_following = { kind = "axis", axis = "following" } -- , apply = apply_axis["following"] }
-local register_following_sibling = { kind = "axis", axis = "following-sibling" } -- , apply = apply_axis["following-sibling"] }
-local register_preceding = { kind = "axis", axis = "preceding" } -- , apply = apply_axis["preceding"] }
-local register_preceding_sibling = { kind = "axis", axis = "preceding-sibling" } -- , apply = apply_axis["preceding-sibling"] }
-local register_reverse_sibling = { kind = "axis", axis = "reverse-sibling" } -- , apply = apply_axis["reverse-sibling"] }
-
-local register_auto_descendant_or_self = { kind = "axis", axis = "auto-descendant-or-self" } -- , apply = apply_axis["auto-descendant-or-self"] }
-local register_auto_descendant = { kind = "axis", axis = "auto-descendant" } -- , apply = apply_axis["auto-descendant"] }
-local register_auto_self = { kind = "axis", axis = "auto-self" } -- , apply = apply_axis["auto-self"] }
-local register_auto_child = { kind = "axis", axis = "auto-child" } -- , apply = apply_axis["auto-child"] }
-
-local register_initial_child = { kind = "axis", axis = "initial-child" } -- , apply = apply_axis["initial-child"] }
-
-local register_all_nodes = { kind = "nodes", nodetest = true, nodes = { true, false, false } }
-
-local skip = { }
-
+local register_self={ kind="axis",axis="self" }
+local register_parent={ kind="axis",axis="parent" }
+local register_descendant={ kind="axis",axis="descendant" }
+local register_child={ kind="axis",axis="child" }
+local register_descendant_or_self={ kind="axis",axis="descendant-or-self" }
+local register_root={ kind="axis",axis="root" }
+local register_ancestor={ kind="axis",axis="ancestor" }
+local register_ancestor_or_self={ kind="axis",axis="ancestor-or-self" }
+local register_attribute={ kind="axis",axis="attribute" }
+local register_namespace={ kind="axis",axis="namespace" }
+local register_following={ kind="axis",axis="following" }
+local register_following_sibling={ kind="axis",axis="following-sibling" }
+local register_preceding={ kind="axis",axis="preceding" }
+local register_preceding_sibling={ kind="axis",axis="preceding-sibling" }
+local register_reverse_sibling={ kind="axis",axis="reverse-sibling" }
+local register_auto_descendant_or_self={ kind="axis",axis="auto-descendant-or-self" }
+local register_auto_descendant={ kind="axis",axis="auto-descendant" }
+local register_auto_self={ kind="axis",axis="auto-self" }
+local register_auto_child={ kind="axis",axis="auto-child" }
+local register_initial_child={ kind="axis",axis="initial-child" }
+local register_all_nodes={ kind="nodes",nodetest=true,nodes={ true,false,false } }
+local skip={}
local function errorrunner_e(str,cnv)
- if not skip[str] then
- report_lpath("error in expression: %s => %s",str,cnv)
- skip[str] = cnv or str
- end
- return false
+ if not skip[str] then
+ report_lpath("error in expression: %s => %s",str,cnv)
+ skip[str]=cnv or str
+ end
+ return false
end
local function errorrunner_f(str,arg)
- report_lpath("error in finalizer: %s(%s)",str,arg or "")
- return false
+ report_lpath("error in finalizer: %s(%s)",str,arg or "")
+ return false
end
-
local function register_nodes(nodetest,nodes)
- return { kind = "nodes", nodetest = nodetest, nodes = nodes }
+ return { kind="nodes",nodetest=nodetest,nodes=nodes }
end
-
local function register_expression(expression)
- local converted = lpegmatch(converter,expression)
- local runner = loadstring(format(template_e,converted))
- runner = (runner and runner()) or function() errorrunner_e(expression,converted) end
- return { kind = "expression", expression = expression, converted = converted, evaluator = runner }
+ local converted=lpegmatch(converter,expression)
+ local runner=load(format(template_e,converted))
+ runner=(runner and runner()) or function() errorrunner_e(expression,converted) end
+ return { kind="expression",expression=expression,converted=converted,evaluator=runner }
end
-
local function register_finalizer(protocol,name,arguments)
- local runner
- if arguments and arguments ~= "" then
- runner = loadstring(format(template_f_y,protocol or xml.defaultprotocol,name,arguments))
- else
- runner = loadstring(format(template_f_n,protocol or xml.defaultprotocol,name))
- end
- runner = (runner and runner()) or function() errorrunner_f(name,arguments) end
- return { kind = "finalizer", name = name, arguments = arguments, finalizer = runner }
-end
-
-local expression = P { "ex",
- ex = "[" * C((V("sq") + V("dq") + (1 - S("[]")) + V("ex"))^0) * "]",
- sq = "'" * (1 - S("'"))^0 * "'",
- dq = '"' * (1 - S('"'))^0 * '"',
+ local runner
+ if arguments and arguments~="" then
+ runner=load(format(template_f_y,protocol or xml.defaultprotocol,name,arguments))
+ else
+ runner=load(format(template_f_n,protocol or xml.defaultprotocol,name))
+ end
+ runner=(runner and runner()) or function() errorrunner_f(name,arguments) end
+ return { kind="finalizer",name=name,arguments=arguments,finalizer=runner }
+end
+local expression=P { "ex",
+ ex="["*C((V("sq")+V("dq")+(1-S("[]"))+V("ex"))^0)*"]",
+ sq="'"*(1-S("'"))^0*"'",
+ dq='"'*(1-S('"'))^0*'"',
}
-
-local arguments = P { "ar",
- ar = "(" * Cs((V("sq") + V("dq") + V("nq") + P(1-P(")")))^0) * ")",
- nq = ((1 - S("),'\""))^1) / function(s) return format("%q",s) end,
- sq = P("'") * (1 - P("'"))^0 * P("'"),
- dq = P('"') * (1 - P('"'))^0 * P('"'),
+local arguments=P { "ar",
+ ar="("*Cs((V("sq")+V("dq")+V("nq")+P(1-P(")")))^0)*")",
+ nq=((1-S("),'\""))^1)/function(s) return format("%q",s) end,
+ sq=P("'")*(1-P("'"))^0*P("'"),
+ dq=P('"')*(1-P('"'))^0*P('"'),
}
-
--- todo: better arg parser
-
local function register_error(str)
- return { kind = "error", error = format("unparsed: %s",str) }
-end
-
--- there is a difference in * and /*/ and so we need to catch a few special cases
-
-local special_1 = P("*") * Cc(register_auto_descendant) * Cc(register_all_nodes) -- last one not needed
-local special_2 = P("/") * Cc(register_auto_self)
-local special_3 = P("") * Cc(register_auto_self)
-
-local no_nextcolon = P(-1) + #(1-P(":")) -- newer lpeg needs the P(-1)
-local no_nextlparent = P(-1) + #(1-P("(")) -- newer lpeg needs the P(-1)
-
-local pathparser = Ct { "patterns", -- can be made a bit faster by moving some patterns outside
-
- patterns = spaces * V("protocol") * spaces * (
- ( V("special") * spaces * P(-1) ) +
- ( V("initial") * spaces * V("step") * spaces * (P("/") * spaces * V("step") * spaces)^0 )
- ),
-
- protocol = Cg(V("letters"),"protocol") * P("://") + Cg(Cc(nil),"protocol"),
-
- -- the / is needed for // as descendant or self is somewhat special
- -- step = (V("shortcuts") + V("axis") * spaces * V("nodes")^0 + V("error")) * spaces * V("expressions")^0 * spaces * V("finalizer")^0,
- step = ((V("shortcuts") + P("/") + V("axis")) * spaces * V("nodes")^0 + V("error")) * spaces * V("expressions")^0 * spaces * V("finalizer")^0,
-
- axis = V("descendant") + V("child") + V("parent") + V("self") + V("root") + V("ancestor") +
- V("descendant_or_self") + V("following_sibling") + V("following") +
- V("reverse_sibling") + V("preceding_sibling") + V("preceding") + V("ancestor_or_self") +
- #(1-P(-1)) * Cc(register_auto_child),
-
- special = special_1 + special_2 + special_3,
-
- initial = (P("/") * spaces * Cc(register_initial_child))^-1,
-
- error = (P(1)^1) / register_error,
-
- shortcuts_a = V("s_descendant_or_self") + V("s_descendant") + V("s_child") + V("s_parent") + V("s_self") + V("s_root") + V("s_ancestor"),
-
- shortcuts = V("shortcuts_a") * (spaces * "/" * spaces * V("shortcuts_a"))^0,
-
- s_descendant_or_self = (P("***/") + P("/")) * Cc(register_descendant_or_self), --- *** is a bonus
- s_descendant = P("**") * Cc(register_descendant),
- s_child = P("*") * no_nextcolon * Cc(register_child ),
- s_parent = P("..") * Cc(register_parent ),
- s_self = P("." ) * Cc(register_self ),
- s_root = P("^^") * Cc(register_root ),
- s_ancestor = P("^") * Cc(register_ancestor ),
-
- descendant = P("descendant::") * Cc(register_descendant ),
- child = P("child::") * Cc(register_child ),
- parent = P("parent::") * Cc(register_parent ),
- self = P("self::") * Cc(register_self ),
- root = P('root::') * Cc(register_root ),
- ancestor = P('ancestor::') * Cc(register_ancestor ),
- descendant_or_self = P('descendant-or-self::') * Cc(register_descendant_or_self ),
- ancestor_or_self = P('ancestor-or-self::') * Cc(register_ancestor_or_self ),
- -- attribute = P('attribute::') * Cc(register_attribute ),
- -- namespace = P('namespace::') * Cc(register_namespace ),
- following = P('following::') * Cc(register_following ),
- following_sibling = P('following-sibling::') * Cc(register_following_sibling ),
- preceding = P('preceding::') * Cc(register_preceding ),
- preceding_sibling = P('preceding-sibling::') * Cc(register_preceding_sibling ),
- reverse_sibling = P('reverse-sibling::') * Cc(register_reverse_sibling ),
-
- nodes = (V("nodefunction") * spaces * P("(") * V("nodeset") * P(")") + V("nodetest") * V("nodeset")) / register_nodes,
-
- expressions = expression / register_expression,
-
- letters = R("az")^1,
- name = (1-S("/[]()|:*!"))^1, -- make inline
- negate = P("!") * Cc(false),
-
- nodefunction = V("negate") + P("not") * Cc(false) + Cc(true),
- nodetest = V("negate") + Cc(true),
- nodename = (V("negate") + Cc(true)) * spaces * ((V("wildnodename") * P(":") * V("wildnodename")) + (Cc(false) * V("wildnodename"))),
- wildnodename = (C(V("name")) + P("*") * Cc(false)) * no_nextlparent,
- nodeset = spaces * Ct(V("nodename") * (spaces * P("|") * spaces * V("nodename"))^0) * spaces,
-
- finalizer = (Cb("protocol") * P("/")^-1 * C(V("name")) * arguments * P(-1)) / register_finalizer,
-
+ return { kind="error",error=format("unparsed: %s",str) }
+end
+local special_1=P("*")*Cc(register_auto_descendant)*Cc(register_all_nodes)
+local special_2=P("/")*Cc(register_auto_self)
+local special_3=P("")*Cc(register_auto_self)
+local no_nextcolon=P(-1)+#(1-P(":"))
+local no_nextlparent=P(-1)+#(1-P("("))
+local pathparser=Ct { "patterns",
+ patterns=spaces*V("protocol")*spaces*(
+ (V("special")*spaces*P(-1) )+(V("initial")*spaces*V("step")*spaces*(P("/")*spaces*V("step")*spaces)^0 )
+ ),
+ protocol=Cg(V("letters"),"protocol")*P("://")+Cg(Cc(nil),"protocol"),
+ step=((V("shortcuts")+P("/")+V("axis"))*spaces*V("nodes")^0+V("error"))*spaces*V("expressions")^0*spaces*V("finalizer")^0,
+ axis=V("descendant")+V("child")+V("parent")+V("self")+V("root")+V("ancestor")+V("descendant_or_self")+V("following_sibling")+V("following")+V("reverse_sibling")+V("preceding_sibling")+V("preceding")+V("ancestor_or_self")+#(1-P(-1))*Cc(register_auto_child),
+ special=special_1+special_2+special_3,
+ initial=(P("/")*spaces*Cc(register_initial_child))^-1,
+ error=(P(1)^1)/register_error,
+ shortcuts_a=V("s_descendant_or_self")+V("s_descendant")+V("s_child")+V("s_parent")+V("s_self")+V("s_root")+V("s_ancestor"),
+ shortcuts=V("shortcuts_a")*(spaces*"/"*spaces*V("shortcuts_a"))^0,
+ s_descendant_or_self=(P("***/")+P("/"))*Cc(register_descendant_or_self),
+ s_descendant=P("**")*Cc(register_descendant),
+ s_child=P("*")*no_nextcolon*Cc(register_child ),
+ s_parent=P("..")*Cc(register_parent ),
+ s_self=P("." )*Cc(register_self ),
+ s_root=P("^^")*Cc(register_root ),
+ s_ancestor=P("^")*Cc(register_ancestor ),
+ descendant=P("descendant::")*Cc(register_descendant ),
+ child=P("child::")*Cc(register_child ),
+ parent=P("parent::")*Cc(register_parent ),
+ self=P("self::")*Cc(register_self ),
+ root=P('root::')*Cc(register_root ),
+ ancestor=P('ancestor::')*Cc(register_ancestor ),
+ descendant_or_self=P('descendant-or-self::')*Cc(register_descendant_or_self ),
+ ancestor_or_self=P('ancestor-or-self::')*Cc(register_ancestor_or_self ),
+ following=P('following::')*Cc(register_following ),
+ following_sibling=P('following-sibling::')*Cc(register_following_sibling ),
+ preceding=P('preceding::')*Cc(register_preceding ),
+ preceding_sibling=P('preceding-sibling::')*Cc(register_preceding_sibling ),
+ reverse_sibling=P('reverse-sibling::')*Cc(register_reverse_sibling ),
+ nodes=(V("nodefunction")*spaces*P("(")*V("nodeset")*P(")")+V("nodetest")*V("nodeset"))/register_nodes,
+ expressions=expression/register_expression,
+ letters=R("az")^1,
+ name=(1-S("/[]()|:*!"))^1,
+ negate=P("!")*Cc(false),
+ nodefunction=V("negate")+P("not")*Cc(false)+Cc(true),
+ nodetest=V("negate")+Cc(true),
+ nodename=(V("negate")+Cc(true))*spaces*((V("wildnodename")*P(":")*V("wildnodename"))+(Cc(false)*V("wildnodename"))),
+ wildnodename=(C(V("name"))+P("*")*Cc(false))*no_nextlparent,
+ nodeset=spaces*Ct(V("nodename")*(spaces*P("|")*spaces*V("nodename"))^0)*spaces,
+ finalizer=(Cb("protocol")*P("/")^-1*C(V("name"))*arguments*P(-1))/register_finalizer,
}
-
-xmlpatterns.pathparser = pathparser
-
-local cache = { }
-
+xmlpatterns.pathparser=pathparser
+local cache={}
local function nodesettostring(set,nodetest)
- local t = { }
- for i=1,#set,3 do
- local directive, ns, tg = set[i], set[i+1], set[i+2]
- if not ns or ns == "" then ns = "*" end
- if not tg or tg == "" then tg = "*" end
- tg = (tg == "@rt@" and "[root]") or format("%s:%s",ns,tg)
- t[i] = (directive and tg) or format("not(%s)",tg)
- end
- if nodetest == false then
- return format("not(%s)",concat(t,"|"))
- else
- return concat(t,"|")
- end
+ local t={}
+ for i=1,#set,3 do
+ local directive,ns,tg=set[i],set[i+1],set[i+2]
+ if not ns or ns=="" then ns="*" end
+ if not tg or tg=="" then tg="*" end
+ tg=(tg=="@rt@" and "[root]") or format("%s:%s",ns,tg)
+ t[i]=(directive and tg) or format("not(%s)",tg)
+ end
+ if nodetest==false then
+ return format("not(%s)",concat(t,"|"))
+ else
+ return concat(t,"|")
+ end
end
-
local function tagstostring(list)
- if #list == 0 then
- return "no elements"
- else
- local t = { }
- for i=1, #list do
- local li = list[i]
- local ns, tg = li.ns, li.tg
- if not ns or ns == "" then ns = "*" end
- if not tg or tg == "" then tg = "*" end
- t[i] = (tg == "@rt@" and "[root]") or format("%s:%s",ns,tg)
- end
- return concat(t," ")
- end
-end
-
-xml.nodesettostring = nodesettostring
-
-local lpath -- we have a harmless kind of circular reference
-
-local lshowoptions = { functions = false }
-
+ if #list==0 then
+ return "no elements"
+ else
+ local t={}
+ for i=1,#list do
+ local li=list[i]
+ local ns,tg=li.ns,li.tg
+ if not ns or ns=="" then ns="*" end
+ if not tg or tg=="" then tg="*" end
+ t[i]=(tg=="@rt@" and "[root]") or format("%s:%s",ns,tg)
+ end
+ return concat(t," ")
+ end
+end
+xml.nodesettostring=nodesettostring
+local lpath
+local lshowoptions={ functions=false }
local function lshow(parsed)
- if type(parsed) == "string" then
- parsed = lpath(parsed)
- end
- report_lpath("%s://%s => %s",parsed.protocol or xml.defaultprotocol,parsed.pattern,
- table.serialize(parsed,false,lshowoptions))
+ if type(parsed)=="string" then
+ parsed=lpath(parsed)
+ end
+ report_lpath("%s://%s => %s",parsed.protocol or xml.defaultprotocol,parsed.pattern,
+ table.serialize(parsed,false,lshowoptions))
end
-
-xml.lshow = lshow
-
+xml.lshow=lshow
local function add_comment(p,str)
- local pc = p.comment
- if not pc then
- p.comment = { str }
- else
- pc[#pc+1] = str
- end
-end
-
-lpath = function (pattern) -- the gain of caching is rather minimal
- lpathcalls = lpathcalls + 1
- if type(pattern) == "table" then
- return pattern
+ local pc=p.comment
+ if not pc then
+ p.comment={ str }
+ else
+ pc[#pc+1]=str
+ end
+end
+lpath=function (pattern)
+ lpathcalls=lpathcalls+1
+ if type(pattern)=="table" then
+ return pattern
+ else
+ local parsed=cache[pattern]
+ if parsed then
+ lpathcached=lpathcached+1
else
- local parsed = cache[pattern]
- if parsed then
- lpathcached = lpathcached + 1
+ parsed=lpegmatch(pathparser,pattern)
+ if parsed then
+ parsed.pattern=pattern
+ local np=#parsed
+ if np==0 then
+ parsed={ pattern=pattern,register_self,state="parsing error" }
+ report_lpath("parsing error in pattern: %s",pattern)
+ lshow(parsed)
else
- parsed = lpegmatch(pathparser,pattern)
- if parsed then
- parsed.pattern = pattern
- local np = #parsed
- if np == 0 then
- parsed = { pattern = pattern, register_self, state = "parsing error" }
- report_lpath("parsing error in '%s'",pattern)
- lshow(parsed)
- else
- -- we could have done this with a more complex parser but this
- -- is cleaner
- local pi = parsed[1]
- if pi.axis == "auto-child" then
- if false then
- add_comment(parsed, "auto-child replaced by auto-descendant-or-self")
- parsed[1] = register_auto_descendant_or_self
- else
- add_comment(parsed, "auto-child replaced by auto-descendant")
- parsed[1] = register_auto_descendant
- end
- elseif pi.axis == "initial-child" and np > 1 and parsed[2].axis then
- add_comment(parsed, "initial-child removed") -- we could also make it a auto-self
- remove(parsed,1)
- end
- local np = #parsed -- can have changed
- if np > 1 then
- local pnp = parsed[np]
- if pnp.kind == "nodes" and pnp.nodetest == true then
- local nodes = pnp.nodes
- if nodes[1] == true and nodes[2] == false and nodes[3] == false then
- add_comment(parsed, "redundant final wildcard filter removed")
- remove(parsed,np)
- end
- end
- end
- end
+ local pi=parsed[1]
+ if pi.axis=="auto-child" then
+ if false then
+ add_comment(parsed,"auto-child replaced by auto-descendant-or-self")
+ parsed[1]=register_auto_descendant_or_self
else
- parsed = { pattern = pattern }
- end
- cache[pattern] = parsed
- if trace_lparse and not trace_lprofile then
- lshow(parsed)
- end
- end
- return parsed
+ add_comment(parsed,"auto-child replaced by auto-descendant")
+ parsed[1]=register_auto_descendant
+ end
+ elseif pi.axis=="initial-child" and np>1 and parsed[2].axis then
+ add_comment(parsed,"initial-child removed")
+ remove(parsed,1)
+ end
+ local np=#parsed
+ if np>1 then
+ local pnp=parsed[np]
+ if pnp.kind=="nodes" and pnp.nodetest==true then
+ local nodes=pnp.nodes
+ if nodes[1]==true and nodes[2]==false and nodes[3]==false then
+ add_comment(parsed,"redundant final wildcard filter removed")
+ remove(parsed,np)
+ end
+ end
+ end
+ end
+ else
+ parsed={ pattern=pattern }
+ end
+ cache[pattern]=parsed
+ if trace_lparse and not trace_lprofile then
+ lshow(parsed)
+ end
end
+ return parsed
+ end
end
-
-xml.lpath = lpath
-
--- we can move all calls inline and then merge the trace back
--- technically we can combine axis and the next nodes which is
--- what we did before but this a bit cleaner (but slower too)
--- but interesting is that it's not that much faster when we
--- go inline
---
--- beware: we need to return a collection even when we filter
--- else the (simple) cache gets messed up
-
--- caching found lookups saves not that much (max .1 sec on a 8 sec run)
--- and it also messes up finalizers
-
--- watch out: when there is a finalizer, it's always called as there
--- can be cases that a finalizer returns (or does) something in case
--- there is no match; an example of this is count()
-
-local profiled = { } xml.profiled = profiled
-
+xml.lpath=lpath
+local profiled={} xml.profiled=profiled
local function profiled_apply(list,parsed,nofparsed,order)
- local p = profiled[parsed.pattern]
- if p then
- p.tested = p.tested + 1
- else
- p = { tested = 1, matched = 0, finalized = 0 }
- profiled[parsed.pattern] = p
- end
- local collected = list
- for i=1,nofparsed do
- local pi = parsed[i]
- local kind = pi.kind
- if kind == "axis" then
- collected = apply_axis[pi.axis](collected)
- elseif kind == "nodes" then
- collected = apply_nodes(collected,pi.nodetest,pi.nodes)
- elseif kind == "expression" then
- collected = apply_expression(collected,pi.evaluator,order)
- elseif kind == "finalizer" then
- collected = pi.finalizer(collected) -- no check on # here
- p.matched = p.matched + 1
- p.finalized = p.finalized + 1
- return collected
- end
- if not collected or #collected == 0 then
- local pn = i < nofparsed and parsed[nofparsed]
- if pn and pn.kind == "finalizer" then
- collected = pn.finalizer(collected)
- p.finalized = p.finalized + 1
- return collected
- end
- return nil
- end
- end
- if collected then
- p.matched = p.matched + 1
+ local p=profiled[parsed.pattern]
+ if p then
+ p.tested=p.tested+1
+ else
+ p={ tested=1,matched=0,finalized=0 }
+ profiled[parsed.pattern]=p
+ end
+ local collected=list
+ for i=1,nofparsed do
+ local pi=parsed[i]
+ local kind=pi.kind
+ if kind=="axis" then
+ collected=apply_axis[pi.axis](collected)
+ elseif kind=="nodes" then
+ collected=apply_nodes(collected,pi.nodetest,pi.nodes)
+ elseif kind=="expression" then
+ collected=apply_expression(collected,pi.evaluator,order)
+ elseif kind=="finalizer" then
+ collected=pi.finalizer(collected)
+ p.matched=p.matched+1
+ p.finalized=p.finalized+1
+ return collected
+ end
+ if not collected or #collected==0 then
+ local pn=i<nofparsed and parsed[nofparsed]
+ if pn and pn.kind=="finalizer" then
+ collected=pn.finalizer(collected)
+ p.finalized=p.finalized+1
+ return collected
+ end
+ return nil
end
- return collected
+ end
+ if collected then
+ p.matched=p.matched+1
+ end
+ return collected
end
-
local function traced_apply(list,parsed,nofparsed,order)
- if trace_lparse then
- lshow(parsed)
- end
- report_lpath("collecting: %s",parsed.pattern)
- report_lpath("root tags : %s",tagstostring(list))
- report_lpath("order : %s",order or "unset")
- local collected = list
- for i=1,nofparsed do
- local pi = parsed[i]
- local kind = pi.kind
- if kind == "axis" then
- collected = apply_axis[pi.axis](collected)
- report_lpath("% 10i : ax : %s",(collected and #collected) or 0,pi.axis)
- elseif kind == "nodes" then
- collected = apply_nodes(collected,pi.nodetest,pi.nodes)
- report_lpath("% 10i : ns : %s",(collected and #collected) or 0,nodesettostring(pi.nodes,pi.nodetest))
- elseif kind == "expression" then
- collected = apply_expression(collected,pi.evaluator,order)
- report_lpath("% 10i : ex : %s -> %s",(collected and #collected) or 0,pi.expression,pi.converted)
- elseif kind == "finalizer" then
- collected = pi.finalizer(collected)
- report_lpath("% 10i : fi : %s : %s(%s)",(type(collected) == "table" and #collected) or 0,parsed.protocol or xml.defaultprotocol,pi.name,pi.arguments or "")
- return collected
- end
- if not collected or #collected == 0 then
- local pn = i < nofparsed and parsed[nofparsed]
- if pn and pn.kind == "finalizer" then
- collected = pn.finalizer(collected)
- report_lpath("% 10i : fi : %s : %s(%s)",(type(collected) == "table" and #collected) or 0,parsed.protocol or xml.defaultprotocol,pn.name,pn.arguments or "")
- return collected
- end
- return nil
- end
+ if trace_lparse then
+ lshow(parsed)
+ end
+ report_lpath("collecting: %s",parsed.pattern)
+ report_lpath("root tags : %s",tagstostring(list))
+ report_lpath("order : %s",order or "unset")
+ local collected=list
+ for i=1,nofparsed do
+ local pi=parsed[i]
+ local kind=pi.kind
+ if kind=="axis" then
+ collected=apply_axis[pi.axis](collected)
+ report_lpath("% 10i : ax : %s",(collected and #collected) or 0,pi.axis)
+ elseif kind=="nodes" then
+ collected=apply_nodes(collected,pi.nodetest,pi.nodes)
+ report_lpath("% 10i : ns : %s",(collected and #collected) or 0,nodesettostring(pi.nodes,pi.nodetest))
+ elseif kind=="expression" then
+ collected=apply_expression(collected,pi.evaluator,order)
+ report_lpath("% 10i : ex : %s -> %s",(collected and #collected) or 0,pi.expression,pi.converted)
+ elseif kind=="finalizer" then
+ collected=pi.finalizer(collected)
+ report_lpath("% 10i : fi : %s : %s(%s)",(type(collected)=="table" and #collected) or 0,parsed.protocol or xml.defaultprotocol,pi.name,pi.arguments or "")
+ return collected
+ end
+ if not collected or #collected==0 then
+ local pn=i<nofparsed and parsed[nofparsed]
+ if pn and pn.kind=="finalizer" then
+ collected=pn.finalizer(collected)
+ report_lpath("% 10i : fi : %s : %s(%s)",(type(collected)=="table" and #collected) or 0,parsed.protocol or xml.defaultprotocol,pn.name,pn.arguments or "")
+ return collected
+ end
+ return nil
end
- return collected
+ end
+ return collected
end
-
local function normal_apply(list,parsed,nofparsed,order)
- local collected = list
- for i=1,nofparsed do
- local pi = parsed[i]
- local kind = pi.kind
- if kind == "axis" then
- local axis = pi.axis
- if axis ~= "self" then
- collected = apply_axis[axis](collected)
- end
- elseif kind == "nodes" then
- collected = apply_nodes(collected,pi.nodetest,pi.nodes)
- elseif kind == "expression" then
- collected = apply_expression(collected,pi.evaluator,order)
- elseif kind == "finalizer" then
- return pi.finalizer(collected)
- end
- if not collected or #collected == 0 then
- local pf = i < nofparsed and parsed[nofparsed].finalizer
- if pf then
- return pf(collected) -- can be anything
- end
- return nil
- end
- end
- return collected
+ local collected=list
+ for i=1,nofparsed do
+ local pi=parsed[i]
+ local kind=pi.kind
+ if kind=="axis" then
+ local axis=pi.axis
+ if axis~="self" then
+ collected=apply_axis[axis](collected)
+ end
+ elseif kind=="nodes" then
+ collected=apply_nodes(collected,pi.nodetest,pi.nodes)
+ elseif kind=="expression" then
+ collected=apply_expression(collected,pi.evaluator,order)
+ elseif kind=="finalizer" then
+ return pi.finalizer(collected)
+ end
+ if not collected or #collected==0 then
+ local pf=i<nofparsed and parsed[nofparsed].finalizer
+ if pf then
+ return pf(collected)
+ end
+ return nil
+ end
+ end
+ return collected
end
-
-
local function applylpath(list,pattern)
- if not list then
- return
- end
- local parsed = cache[pattern]
- if parsed then
- lpathcalls = lpathcalls + 1
- lpathcached = lpathcached + 1
- elseif type(pattern) == "table" then
- lpathcalls = lpathcalls + 1
- parsed = pattern
- else
- parsed = lpath(pattern) or pattern
- end
- if not parsed then
- return
- end
- local nofparsed = #parsed
- if nofparsed == 0 then
- return -- something is wrong
- end
- if not trace_lpath then
- return normal_apply ({ list },parsed,nofparsed,list.mi)
- elseif trace_lprofile then
- return profiled_apply({ list },parsed,nofparsed,list.mi)
- else
- return traced_apply ({ list },parsed,nofparsed,list.mi)
- end
-end
-
-xml.applylpath = applylpath -- takes a table as first argment, which is what xml.filter will do
-
---[[ldx--
-<p>This is the main filter function. It returns whatever is asked for.</p>
---ldx]]--
-
-function xml.filter(root,pattern) -- no longer funny attribute handling here
- return applylpath(root,pattern)
-end
-
--- internal (parsed)
-
-expressions.child = function(e,pattern)
- return applylpath(e,pattern) -- todo: cache
-end
-expressions.count = function(e,pattern) -- what if pattern == empty or nil
- local collected = applylpath(e,pattern) -- todo: cache
- return pattern and (collected and #collected) or 0
-end
-
--- external
-
-expressions.oneof = function(s,...) -- slow
- local t = {...} for i=1,#t do if s == t[i] then return true end end return false
-end
-expressions.error = function(str)
- xml.errorhandler(format("unknown function in lpath expression: %s",tostring(str or "?")))
- return false
-end
-expressions.undefined = function(s)
- return s == nil
-end
-
-expressions.quit = function(s)
- if s or s == nil then
- quit_expression = true
- end
- return true
-end
-
-expressions.print = function(...)
- print(...)
- return true
-end
-
-expressions.contains = find
-expressions.find = find
-expressions.upper = upper
-expressions.lower = lower
-expressions.number = tonumber
-expressions.boolean = toboolean
-
+ if not list then
+ return
+ end
+ local parsed=cache[pattern]
+ if parsed then
+ lpathcalls=lpathcalls+1
+ lpathcached=lpathcached+1
+ elseif type(pattern)=="table" then
+ lpathcalls=lpathcalls+1
+ parsed=pattern
+ else
+ parsed=lpath(pattern) or pattern
+ end
+ if not parsed then
+ return
+ end
+ local nofparsed=#parsed
+ if nofparsed==0 then
+ return
+ end
+ if not trace_lpath then
+ return normal_apply ({ list },parsed,nofparsed,list.mi)
+ elseif trace_lprofile then
+ return profiled_apply({ list },parsed,nofparsed,list.mi)
+ else
+ return traced_apply ({ list },parsed,nofparsed,list.mi)
+ end
+end
+xml.applylpath=applylpath
+function xml.filter(root,pattern)
+ return applylpath(root,pattern)
+end
+expressions.child=function(e,pattern)
+ return applylpath(e,pattern)
+end
+expressions.count=function(e,pattern)
+ local collected=applylpath(e,pattern)
+ return pattern and (collected and #collected) or 0
+end
+expressions.oneof=function(s,...)
+ for i=1,select("#",...) do
+ if s==select(i,...) then
+ return true
+ end
+ end
+ return false
+end
+expressions.error=function(str)
+ xml.errorhandler(format("unknown function in lpath expression: %s",tostring(str or "?")))
+ return false
+end
+expressions.undefined=function(s)
+ return s==nil
+end
+expressions.quit=function(s)
+ if s or s==nil then
+ quit_expression=true
+ end
+ return true
+end
+expressions.print=function(...)
+ print(...)
+ return true
+end
+expressions.contains=find
+expressions.find=find
+expressions.upper=upper
+expressions.lower=lower
+expressions.number=tonumber
+expressions.boolean=toboolean
function expressions.contains(str,pattern)
- local t = type(str)
- if t == "string" then
- if find(str,pattern) then
- return true
- end
- elseif t == "table" then
- for i=1,#str do
- local d = str[i]
- if type(d) == "string" and find(d,pattern) then
- return true
- end
- end
+ local t=type(str)
+ if t=="string" then
+ if find(str,pattern) then
+ return true
+ end
+ elseif t=="table" then
+ for i=1,#str do
+ local d=str[i]
+ if type(d)=="string" and find(d,pattern) then
+ return true
+ end
end
- return false
+ end
+ return false
end
-
--- user interface
-
local function traverse(root,pattern,handle)
- report_lpath("use 'xml.selection' instead for '%s'",pattern)
- local collected = applylpath(root,pattern)
- if collected then
- for c=1,#collected do
- local e = collected[c]
- local r = e.__p__
- handle(r,r.dt,e.ni)
- end
+ local collected=applylpath(root,pattern)
+ if collected then
+ for c=1,#collected do
+ local e=collected[c]
+ local r=e.__p__
+ handle(r,r.dt,e.ni)
end
+ end
end
-
local function selection(root,pattern,handle)
- local collected = applylpath(root,pattern)
- if collected then
- if handle then
- for c=1,#collected do
- handle(collected[c])
- end
- else
- return collected
- end
- end
-end
-
-xml.traverse = traverse -- old method, r, d, k
-xml.selection = selection -- new method, simple handle
-
-
--- generic function finalizer (independant namespace)
-
-local function dofunction(collected,fnc)
- if collected then
- local f = functions[fnc]
- if f then
- for c=1,#collected do
- f(collected[c])
- end
- else
- report_lpath("unknown function '%s'",fnc)
- end
+ local collected=applylpath(root,pattern)
+ if collected then
+ if handle then
+ for c=1,#collected do
+ handle(collected[c])
+ end
+ else
+ return collected
end
+ end
end
-
-finalizers.xml["function"] = dofunction
-finalizers.tex["function"] = dofunction
-
--- functions
-
-expressions.text = function(e,n)
- local rdt = e.__p__.dt
- return (rdt and rdt[n]) or ""
-end
-
-expressions.name = function(e,n) -- ns + tg
- local found = false
- n = tonumber(n) or 0
- if n == 0 then
- found = type(e) == "table" and e
- elseif n < 0 then
- local d, k = e.__p__.dt, e.ni
- for i=k-1,1,-1 do
- local di = d[i]
- if type(di) == "table" then
- if n == -1 then
- found = di
- break
- else
- n = n + 1
- end
- end
- end
+xml.traverse=traverse
+xml.selection=selection
+local function dofunction(collected,fnc,...)
+ if collected then
+ local f=functions[fnc]
+ if f then
+ for c=1,#collected do
+ f(collected[c],...)
+ end
else
- local d, k = e.__p__.dt, e.ni
- for i=k+1,#d,1 do
- local di = d[i]
- if type(di) == "table" then
- if n == 1 then
- found = di
- break
- else
- n = n - 1
- end
- end
- end
- end
- if found then
- local ns, tg = found.rn or found.ns or "", found.tg
- if ns ~= "" then
- return ns .. ":" .. tg
+ report_lpath("unknown function %a",fnc)
+ end
+ end
+end
+finalizers.xml["function"]=dofunction
+finalizers.tex["function"]=dofunction
+expressions.text=function(e,n)
+ local rdt=e.__p__.dt
+ return rdt and rdt[n] or ""
+end
+expressions.name=function(e,n)
+ local found=false
+ n=tonumber(n) or 0
+ if n==0 then
+ found=type(e)=="table" and e
+ elseif n<0 then
+ local d,k=e.__p__.dt,e.ni
+ for i=k-1,1,-1 do
+ local di=d[i]
+ if type(di)=="table" then
+ if n==-1 then
+ found=di
+ break
+ else
+ n=n+1
+ end
+ end
+ end
+ else
+ local d,k=e.__p__.dt,e.ni
+ for i=k+1,#d,1 do
+ local di=d[i]
+ if type(di)=="table" then
+ if n==1 then
+ found=di
+ break
else
- return tg
+ n=n-1
end
- else
- return ""
+ end
end
-end
-
-expressions.tag = function(e,n) -- only tg
- if not e then
- return ""
+ end
+ if found then
+ local ns,tg=found.rn or found.ns or "",found.tg
+ if ns~="" then
+ return ns..":"..tg
else
- local found = false
- n = tonumber(n) or 0
- if n == 0 then
- found = (type(e) == "table") and e -- seems to fail
- elseif n < 0 then
- local d, k = e.__p__.dt, e.ni
- for i=k-1,1,-1 do
- local di = d[i]
- if type(di) == "table" then
- if n == -1 then
- found = di
- break
- else
- n = n + 1
- end
- end
- end
- else
- local d, k = e.__p__.dt, e.ni
- for i=k+1,#d,1 do
- local di = d[i]
- if type(di) == "table" then
- if n == 1 then
- found = di
- break
- else
- n = n - 1
- end
- end
- end
- end
- return (found and found.tg) or ""
+ return tg
end
+ else
+ return ""
+ end
end
-
---[[ldx--
-<p>Often using an iterators looks nicer in the code than passing handler
-functions. The <l n='lua'/> book describes how to use coroutines for that
-purpose (<url href='http://www.lua.org/pil/9.3.html'/>). This permits
-code like:</p>
-
-<typing>
-for r, d, k in xml.elements(xml.load('text.xml'),"title") do
- print(d[k]) -- old method
-end
-for e in xml.collected(xml.load('text.xml'),"title") do
- print(e) -- new one
-end
-</typing>
---ldx]]--
-
-local wrap, yield = coroutine.wrap, coroutine.yield
-
-function xml.elements(root,pattern,reverse) -- r, d, k
- local collected = applylpath(root,pattern)
- if collected then
- if reverse then
- return wrap(function() for c=#collected,1,-1 do
- local e = collected[c] local r = e.__p__ yield(r,r.dt,e.ni)
- end end)
- else
- return wrap(function() for c=1,#collected do
- local e = collected[c] local r = e.__p__ yield(r,r.dt,e.ni)
- end end)
+expressions.tag=function(e,n)
+ if not e then
+ return ""
+ else
+ local found=false
+ n=tonumber(n) or 0
+ if n==0 then
+ found=(type(e)=="table") and e
+ elseif n<0 then
+ local d,k=e.__p__.dt,e.ni
+ for i=k-1,1,-1 do
+ local di=d[i]
+ if type(di)=="table" then
+ if n==-1 then
+ found=di
+ break
+ else
+ n=n+1
+ end
end
+ end
+ else
+ local d,k=e.__p__.dt,e.ni
+ for i=k+1,#d,1 do
+ local di=d[i]
+ if type(di)=="table" then
+ if n==1 then
+ found=di
+ break
+ else
+ n=n-1
+ end
+ end
+ end
+ end
+ return (found and found.tg) or ""
+ end
+end
+local dummy=function() end
+function xml.elements(root,pattern,reverse)
+ local collected=applylpath(root,pattern)
+ if not collected then
+ return dummy
+ elseif reverse then
+ local c=#collected+1
+ return function()
+ if c>1 then
+ c=c-1
+ local e=collected[c]
+ local r=e.__p__
+ return r,r.dt,e.ni
+ end
+ end
+ else
+ local n,c=#collected,0
+ return function()
+ if c<n then
+ c=c+1
+ local e=collected[c]
+ local r=e.__p__
+ return r,r.dt,e.ni
+ end
+ end
+ end
+end
+function xml.collected(root,pattern,reverse)
+ local collected=applylpath(root,pattern)
+ if not collected then
+ return dummy
+ elseif reverse then
+ local c=#collected+1
+ return function()
+ if c>1 then
+ c=c-1
+ return collected[c]
+ end
end
- return wrap(function() end)
-end
-
-function xml.collected(root,pattern,reverse) -- e
- local collected = applylpath(root,pattern)
- if collected then
- if reverse then
- return wrap(function() for c=#collected,1,-1 do yield(collected[c]) end end)
- else
- return wrap(function() for c=1,#collected do yield(collected[c]) end end)
- end
+ else
+ local n,c=#collected,0
+ return function()
+ if c<n then
+ c=c+1
+ return collected[c]
+ end
end
- return wrap(function() end)
+ end
end
-
--- handy
-
function xml.inspect(collection,pattern)
- pattern = pattern or "."
- for e in xml.collected(collection,pattern or ".") do
- report_lpath("pattern %q\n\n%s\n",pattern,xml.tostring(e))
- end
+ pattern=pattern or "."
+ for e in xml.collected(collection,pattern or ".") do
+ report_lpath("pattern: %s\n\n%s\n",pattern,xml.tostring(e))
+ end
+end
+local function split(e)
+ local dt=e.dt
+ if dt then
+ for i=1,#dt do
+ local dti=dt[i]
+ if type(dti)=="string" then
+ dti=gsub(dti,"^[\n\r]*(.-)[\n\r]*","%1")
+ dti=gsub(dti,"[\n\r]+","\n\n")
+ dt[i]=dti
+ else
+ split(dti)
+ end
+ end
+ end
+ return e
+end
+function xml.finalizers.paragraphs(c)
+ for i=1,#c do
+ split(c[i])
+ end
+ return c
end
@@ -9520,102 +10059,68 @@ end -- of closure
do -- create closure to overcome 200 locals limit
-if not modules then modules = { } end modules ['lxml-mis'] = {
- version = 1.001,
- comment = "this module is the basis for the lxml-* ones",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
-local xml, lpeg, string = xml, lpeg, string
+package.loaded["lxml-mis"] = package.loaded["lxml-mis"] or true
-local concat = table.concat
-local type, next, tonumber, tostring, setmetatable, loadstring = type, next, tonumber, tostring, setmetatable, loadstring
-local format, gsub, match = string.format, string.gsub, string.match
-local lpegmatch, lpegpatterns = lpeg.match, lpeg.patterns
-local P, S, R, C, V, Cc, Cs = lpeg.P, lpeg.S, lpeg.R, lpeg.C, lpeg.V, lpeg.Cc, lpeg.Cs
-
-lpegpatterns.xml = lpegpatterns.xml or { }
-local xmlpatterns = lpegpatterns.xml
-
---[[ldx--
-<p>The following helper functions best belong to the <t>lxml-ini</t>
-module. Some are here because we need then in the <t>mk</t>
-document and other manuals, others came up when playing with
-this module. Since this module is also used in <l n='mtxrun'/> we've
-put them here instead of loading mode modules there then needed.</p>
---ldx]]--
-
-local function xmlgsub(t,old,new) -- will be replaced
- local dt = t.dt
- if dt then
- for k=1,#dt do
- local v = dt[k]
- if type(v) == "string" then
- dt[k] = gsub(v,old,new)
- else
- xmlgsub(v,old,new)
- end
- end
- end
-end
+-- original size: 3684, stripped down to: 1957
-
-function xml.stripleadingspaces(dk,d,k) -- cosmetic, for manual
- if d and k then
- local dkm = d[k-1]
- if dkm and type(dkm) == "string" then
- local s = match(dkm,"\n(%s+)")
- xmlgsub(dk,"\n"..rep(" ",#s),"\n")
- end
- end
-end
-
-
-
--- 100 * 2500 * "oeps< oeps> oeps&" : gsub:lpeg|lpeg|lpeg
---
--- 1021:0335:0287:0247
-
--- 10 * 1000 * "oeps< oeps> oeps& asfjhalskfjh alskfjh alskfjh alskfjh ;al J;LSFDJ"
---
--- 1559:0257:0288:0190 (last one suggested by roberto)
-
--- escaped = Cs((S("<&>") / xml.escapes + 1)^0)
--- escaped = Cs((S("<")/"&lt;" + S(">")/"&gt;" + S("&")/"&amp;" + 1)^0)
-local normal = (1 - S("<&>"))^0
-local special = P("<")/"&lt;" + P(">")/"&gt;" + P("&")/"&amp;"
-local escaped = Cs(normal * (special * normal)^0)
-
--- 100 * 1000 * "oeps&lt; oeps&gt; oeps&amp;" : gsub:lpeg == 0153:0280:0151:0080 (last one by roberto)
-
-local normal = (1 - S"&")^0
-local special = P("&lt;")/"<" + P("&gt;")/">" + P("&amp;")/"&"
-local unescaped = Cs(normal * (special * normal)^0)
-
--- 100 * 5000 * "oeps <oeps bla='oeps' foo='bar'> oeps </oeps> oeps " : gsub:lpeg == 623:501 msec (short tags, less difference)
-
-local cleansed = Cs(((P("<") * (1-P(">"))^0 * P(">"))/"" + 1)^0)
-
-xmlpatterns.escaped = escaped
-xmlpatterns.unescaped = unescaped
-xmlpatterns.cleansed = cleansed
-
-function xml.escaped (str) return lpegmatch(escaped,str) end
+if not modules then modules={} end modules ['lxml-mis']={
+ version=1.001,
+ comment="this module is the basis for the lxml-* ones",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+local xml,lpeg,string=xml,lpeg,string
+local concat=table.concat
+local type,next,tonumber,tostring,setmetatable,loadstring=type,next,tonumber,tostring,setmetatable,loadstring
+local format,gsub,match=string.format,string.gsub,string.match
+local lpegmatch,lpegpatterns=lpeg.match,lpeg.patterns
+local P,S,R,C,V,Cc,Cs=lpeg.P,lpeg.S,lpeg.R,lpeg.C,lpeg.V,lpeg.Cc,lpeg.Cs
+lpegpatterns.xml=lpegpatterns.xml or {}
+local xmlpatterns=lpegpatterns.xml
+local function xmlgsub(t,old,new)
+ local dt=t.dt
+ if dt then
+ for k=1,#dt do
+ local v=dt[k]
+ if type(v)=="string" then
+ dt[k]=gsub(v,old,new)
+ else
+ xmlgsub(v,old,new)
+ end
+ end
+ end
+end
+function xml.stripleadingspaces(dk,d,k)
+ if d and k then
+ local dkm=d[k-1]
+ if dkm and type(dkm)=="string" then
+ local s=match(dkm,"\n(%s+)")
+ xmlgsub(dk,"\n"..rep(" ",#s),"\n")
+ end
+ end
+end
+local normal=(1-S("<&>"))^0
+local special=P("<")/"&lt;"+P(">")/"&gt;"+P("&")/"&amp;"
+local escaped=Cs(normal*(special*normal)^0)
+local normal=(1-S"&")^0
+local special=P("&lt;")/"<"+P("&gt;")/">"+P("&amp;")/"&"
+local unescaped=Cs(normal*(special*normal)^0)
+local cleansed=Cs(((P("<")*(1-P(">"))^0*P(">"))/""+1)^0)
+xmlpatterns.escaped=escaped
+xmlpatterns.unescaped=unescaped
+xmlpatterns.cleansed=cleansed
+function xml.escaped (str) return lpegmatch(escaped,str) end
function xml.unescaped(str) return lpegmatch(unescaped,str) end
-function xml.cleansed (str) return lpegmatch(cleansed,str) end
-
--- this might move
-
+function xml.cleansed (str) return lpegmatch(cleansed,str) end
function xml.fillin(root,pattern,str,check)
- local e = xml.first(root,pattern)
- if e then
- local n = #e.dt
- if not check or n == 0 or (n == 1 and e.dt[1] == "") then
- e.dt = { str }
- end
+ local e=xml.first(root,pattern)
+ if e then
+ local n=#e.dt
+ if not check or n==0 or (n==1 and e.dt[1]=="") then
+ e.dt={ str }
end
+ end
end
@@ -9623,765 +10128,692 @@ end -- of closure
do -- create closure to overcome 200 locals limit
-if not modules then modules = { } end modules ['lxml-aux'] = {
- version = 1.001,
- comment = "this module is the basis for the lxml-* ones",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
--- not all functions here make sense anymore vbut we keep them for
--- compatibility reasons
-
-local trace_manipulations = false trackers.register("lxml.manipulations", function(v) trace_manipulations = v end)
+package.loaded["lxml-aux"] = package.loaded["lxml-aux"] or true
-local report_xml = logs.reporter("xml")
-
-local xml = xml
-
-local xmlconvert, xmlcopy, xmlname = xml.convert, xml.copy, xml.name
-local xmlinheritedconvert = xml.inheritedconvert
-local xmlapplylpath = xml.applylpath
-local xmlfilter = xml.filter
-
-local type, setmetatable, getmetatable = type, setmetatable, getmetatable
-local insert, remove, fastcopy, concat = table.insert, table.remove, table.fastcopy, table.concat
-local gmatch, gsub, format, find, strip = string.gmatch, string.gsub, string.format, string.find, string.strip
-local utfbyte = utf.byte
+-- original size: 23804, stripped down to: 16817
+if not modules then modules={} end modules ['lxml-aux']={
+ version=1.001,
+ comment="this module is the basis for the lxml-* ones",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+local trace_manipulations=false trackers.register("lxml.manipulations",function(v) trace_manipulations=v end)
+local report_xml=logs.reporter("xml")
+local xml=xml
+local xmlconvert,xmlcopy,xmlname=xml.convert,xml.copy,xml.name
+local xmlinheritedconvert=xml.inheritedconvert
+local xmlapplylpath=xml.applylpath
+local xmlfilter=xml.filter
+local type,setmetatable,getmetatable=type,setmetatable,getmetatable
+local insert,remove,fastcopy,concat=table.insert,table.remove,table.fastcopy,table.concat
+local gmatch,gsub,format,find,strip=string.gmatch,string.gsub,string.format,string.find,string.strip
+local utfbyte=utf.byte
local function report(what,pattern,c,e)
- report_xml("%s element '%s' (root: '%s', position: %s, index: %s, pattern: %s)",what,xmlname(e),xmlname(e.__p__),c,e.ni,pattern)
+ report_xml("%s element %a, root %a, position %a, index %a, pattern %a",what,xmlname(e),xmlname(e.__p__),c,e.ni,pattern)
end
-
local function withelements(e,handle,depth)
- if e and handle then
- local edt = e.dt
- if edt then
- depth = depth or 0
- for i=1,#edt do
- local e = edt[i]
- if type(e) == "table" then
- handle(e,depth)
- withelements(e,handle,depth+1)
- end
- end
+ if e and handle then
+ local edt=e.dt
+ if edt then
+ depth=depth or 0
+ for i=1,#edt do
+ local e=edt[i]
+ if type(e)=="table" then
+ handle(e,depth)
+ withelements(e,handle,depth+1)
end
+ end
end
+ end
end
-
-xml.withelements = withelements
-
-function xml.withelement(e,n,handle) -- slow
- if e and n ~= 0 and handle then
- local edt = e.dt
- if edt then
- if n > 0 then
- for i=1,#edt do
- local ei = edt[i]
- if type(ei) == "table" then
- if n == 1 then
- handle(ei)
- return
- else
- n = n - 1
- end
- end
- end
- elseif n < 0 then
- for i=#edt,1,-1 do
- local ei = edt[i]
- if type(ei) == "table" then
- if n == -1 then
- handle(ei)
- return
- else
- n = n + 1
- end
- end
- end
+xml.withelements=withelements
+function xml.withelement(e,n,handle)
+ if e and n~=0 and handle then
+ local edt=e.dt
+ if edt then
+ if n>0 then
+ for i=1,#edt do
+ local ei=edt[i]
+ if type(ei)=="table" then
+ if n==1 then
+ handle(ei)
+ return
+ else
+ n=n-1
end
+ end
end
+ elseif n<0 then
+ for i=#edt,1,-1 do
+ local ei=edt[i]
+ if type(ei)=="table" then
+ if n==-1 then
+ handle(ei)
+ return
+ else
+ n=n+1
+ end
+ end
+ end
+ end
end
+ end
end
-
function xml.each(root,pattern,handle,reverse)
- local collected = xmlapplylpath(root,pattern)
- if collected then
- if reverse then
- for c=#collected,1,-1 do
- handle(collected[c])
- end
- else
- for c=1,#collected do
- handle(collected[c])
- end
- end
- return collected
+ local collected=xmlapplylpath(root,pattern)
+ if collected then
+ if reverse then
+ for c=#collected,1,-1 do
+ handle(collected[c])
+ end
+ else
+ for c=1,#collected do
+ handle(collected[c])
+ end
end
+ return collected
+ end
end
-
function xml.processattributes(root,pattern,handle)
- local collected = xmlapplylpath(root,pattern)
- if collected and handle then
- for c=1,#collected do
- handle(collected[c].at)
- end
+ local collected=xmlapplylpath(root,pattern)
+ if collected and handle then
+ for c=1,#collected do
+ handle(collected[c].at)
end
- return collected
+ end
+ return collected
end
-
---[[ldx--
-<p>The following functions collect elements and texts.</p>
---ldx]]--
-
--- are these still needed -> lxml-cmp.lua
-
-function xml.collect(root, pattern)
- return xmlapplylpath(root,pattern)
+function xml.collect(root,pattern)
+ return xmlapplylpath(root,pattern)
end
-
-function xml.collecttexts(root, pattern, flatten) -- todo: variant with handle
- local collected = xmlapplylpath(root,pattern)
- if collected and flatten then
- local xmltostring = xml.tostring
- for c=1,#collected do
- collected[c] = xmltostring(collected[c].dt)
- end
+function xml.collecttexts(root,pattern,flatten)
+ local collected=xmlapplylpath(root,pattern)
+ if collected and flatten then
+ local xmltostring=xml.tostring
+ for c=1,#collected do
+ collected[c]=xmltostring(collected[c].dt)
end
- return collected or { }
+ end
+ return collected or {}
end
-
-function xml.collect_tags(root, pattern, nonamespace)
- local collected = xmlapplylpath(root,pattern)
- if collected then
- local t, n = { }, 0
- for c=1,#collected do
- local e = collected[c]
- local ns, tg = e.ns, e.tg
- n = n + 1
- if nonamespace then
- t[n] = tg
- elseif ns == "" then
- t[n] = tg
- else
- t[n] = ns .. ":" .. tg
- end
- end
- return t
+function xml.collect_tags(root,pattern,nonamespace)
+ local collected=xmlapplylpath(root,pattern)
+ if collected then
+ local t,n={},0
+ for c=1,#collected do
+ local e=collected[c]
+ local ns,tg=e.ns,e.tg
+ n=n+1
+ if nonamespace then
+ t[n]=tg
+ elseif ns=="" then
+ t[n]=tg
+ else
+ t[n]=ns..":"..tg
+ end
end
+ return t
+ end
end
-
---[[ldx--
-<p>We've now arrived at the functions that manipulate the tree.</p>
---ldx]]--
-
-local no_root = { no_root = true }
-
+local no_root={ no_root=true }
local function redo_ni(d)
- for k=1,#d do
- local dk = d[k]
- if type(dk) == "table" then
- dk.ni = k
- end
+ for k=1,#d do
+ local dk=d[k]
+ if type(dk)=="table" then
+ dk.ni=k
end
+ end
end
-
local function xmltoelement(whatever,root)
- if not whatever then
- return nil
- end
- local element
- if type(whatever) == "string" then
- element = xmlinheritedconvert(whatever,root) -- beware, not really a root
- else
- element = whatever -- we assume a table
- end
- if element.error then
- return whatever -- string
- end
- if element then
- end
- return element
-end
-
-xml.toelement = xmltoelement
-
+ if not whatever then
+ return nil
+ end
+ local element
+ if type(whatever)=="string" then
+ element=xmlinheritedconvert(whatever,root)
+ else
+ element=whatever
+ end
+ if element.error then
+ return whatever
+ end
+ if element then
+ end
+ return element
+end
+xml.toelement=xmltoelement
local function copiedelement(element,newparent)
- if type(element) == "string" then
- return element
- else
- element = xmlcopy(element).dt
- if newparent and type(element) == "table" then
- element.__p__ = newparent
- end
- return element
+ if type(element)=="string" then
+ return element
+ else
+ element=xmlcopy(element).dt
+ if newparent and type(element)=="table" then
+ element.__p__=newparent
end
+ return element
+ end
end
-
function xml.delete(root,pattern)
- if not pattern or pattern == "" then
- local p = root.__p__
+ if not pattern or pattern=="" then
+ local p=root.__p__
+ if p then
+ if trace_manipulations then
+ report('deleting',"--",c,root)
+ end
+ local d=p.dt
+ remove(d,root.ni)
+ redo_ni(d)
+ end
+ else
+ local collected=xmlapplylpath(root,pattern)
+ if collected then
+ for c=1,#collected do
+ local e=collected[c]
+ local p=e.__p__
if p then
- if trace_manipulations then
- report('deleting',"--",c,root)
- end
- local d = p.dt
- remove(d,root.ni)
- redo_ni(d) -- can be made faster and inlined
- end
- else
- local collected = xmlapplylpath(root,pattern)
- if collected then
- for c=1,#collected do
- local e = collected[c]
- local p = e.__p__
- if p then
- if trace_manipulations then
- report('deleting',pattern,c,e)
- end
- local d = p.dt
- remove(d,e.ni)
- redo_ni(d) -- can be made faster and inlined
- end
- end
+ if trace_manipulations then
+ report('deleting',pattern,c,e)
+ end
+ local d=p.dt
+ remove(d,e.ni)
+ redo_ni(d)
end
+ end
end
+ end
end
-
function xml.replace(root,pattern,whatever)
- local element = root and xmltoelement(whatever,root)
- local collected = element and xmlapplylpath(root,pattern)
- if collected then
- for c=1,#collected do
- local e = collected[c]
- local p = e.__p__
- if p then
- if trace_manipulations then
- report('replacing',pattern,c,e)
- end
- local d = p.dt
- d[e.ni] = copiedelement(element,p)
- redo_ni(d) -- probably not needed
- end
+ local element=root and xmltoelement(whatever,root)
+ local collected=element and xmlapplylpath(root,pattern)
+ if collected then
+ for c=1,#collected do
+ local e=collected[c]
+ local p=e.__p__
+ if p then
+ if trace_manipulations then
+ report('replacing',pattern,c,e)
end
+ local d=p.dt
+ d[e.ni]=copiedelement(element,p)
+ redo_ni(d)
+ end
end
+ end
end
-
local function wrap(e,wrapper)
- local t = {
- rn = e.rn,
- tg = e.tg,
- ns = e.ns,
- at = e.at,
- dt = e.dt,
- __p__ = e,
- }
- setmetatable(t,getmetatable(e))
- e.rn = wrapper.rn or e.rn or ""
- e.tg = wrapper.tg or e.tg or ""
- e.ns = wrapper.ns or e.ns or ""
- e.at = fastcopy(wrapper.at)
- e.dt = { t }
+ local t={
+ rn=e.rn,
+ tg=e.tg,
+ ns=e.ns,
+ at=e.at,
+ dt=e.dt,
+ __p__=e,
+ }
+ setmetatable(t,getmetatable(e))
+ e.rn=wrapper.rn or e.rn or ""
+ e.tg=wrapper.tg or e.tg or ""
+ e.ns=wrapper.ns or e.ns or ""
+ e.at=fastcopy(wrapper.at)
+ e.dt={ t }
end
-
function xml.wrap(root,pattern,whatever)
- if whatever then
- local wrapper = xmltoelement(whatever,root)
- local collected = xmlapplylpath(root,pattern)
- if collected then
- for c=1,#collected do
- local e = collected[c]
- if trace_manipulations then
- report('wrapping',pattern,c,e)
- end
- wrap(e,wrapper)
- end
+ if whatever then
+ local wrapper=xmltoelement(whatever,root)
+ local collected=xmlapplylpath(root,pattern)
+ if collected then
+ for c=1,#collected do
+ local e=collected[c]
+ if trace_manipulations then
+ report('wrapping',pattern,c,e)
end
- else
- wrap(root,xmltoelement(pattern))
+ wrap(e,wrapper)
+ end
end
+ else
+ wrap(root,xmltoelement(pattern))
+ end
end
-
local function inject_element(root,pattern,whatever,prepend)
- local element = root and xmltoelement(whatever,root)
- local collected = element and xmlapplylpath(root,pattern)
- local function inject_e(e)
- local r = e.__p__
- local d, k, rri = r.dt, e.ni, r.ri
- local edt = (rri and d[rri].dt) or (d and d[k] and d[k].dt)
- if edt then
- local be, af
- local cp = copiedelement(element,e)
- if prepend then
- be, af = cp, edt
- else
- be, af = edt, cp
- end
- local bn = #be
- for i=1,#af do
- bn = bn + 1
- be[bn] = af[i]
- end
- if rri then
- r.dt[rri].dt = be
- else
- d[k].dt = be
- end
- redo_ni(d)
- end
- end
- if not collected then
- -- nothing
- elseif collected.tg then
- -- first or so
- inject_e(collected)
- else
- for c=1,#collected do
- inject_e(collected[c])
- end
- end
-end
-
-local function insert_element(root,pattern,whatever,before) -- todo: element als functie
- local element = root and xmltoelement(whatever,root)
- local collected = element and xmlapplylpath(root,pattern)
- local function insert_e(e)
- local r = e.__p__
- local d, k = r.dt, e.ni
- if not before then
- k = k + 1
- end
- insert(d,k,copiedelement(element,r))
- redo_ni(d)
- end
- if not collected then
- -- nothing
- elseif collected.tg then
- -- first or so
- insert_e(collected)
- else
- for c=1,#collected do
- insert_e(collected[c])
- end
+ local element=root and xmltoelement(whatever,root)
+ local collected=element and xmlapplylpath(root,pattern)
+ local function inject_e(e)
+ local r=e.__p__
+ local d,k,rri=r.dt,e.ni,r.ri
+ local edt=(rri and d[rri].dt) or (d and d[k] and d[k].dt)
+ if edt then
+ local be,af
+ local cp=copiedelement(element,e)
+ if prepend then
+ be,af=cp,edt
+ else
+ be,af=edt,cp
+ end
+ local bn=#be
+ for i=1,#af do
+ bn=bn+1
+ be[bn]=af[i]
+ end
+ if rri then
+ r.dt[rri].dt=be
+ else
+ d[k].dt=be
+ end
+ redo_ni(d)
+ end
+ end
+ if not collected then
+ elseif collected.tg then
+ inject_e(collected)
+ else
+ for c=1,#collected do
+ inject_e(collected[c])
+ end
+ end
+end
+local function insert_element(root,pattern,whatever,before)
+ local element=root and xmltoelement(whatever,root)
+ local collected=element and xmlapplylpath(root,pattern)
+ local function insert_e(e)
+ local r=e.__p__
+ local d,k=r.dt,e.ni
+ if not before then
+ k=k+1
+ end
+ insert(d,k,copiedelement(element,r))
+ redo_ni(d)
+ end
+ if not collected then
+ elseif collected.tg then
+ insert_e(collected)
+ else
+ for c=1,#collected do
+ insert_e(collected[c])
end
+ end
end
-
-xml.insert_element = insert_element
-xml.insertafter = insert_element
-xml.insertbefore = function(r,p,e) insert_element(r,p,e,true) end
-xml.injectafter = inject_element
-xml.injectbefore = function(r,p,e) inject_element(r,p,e,true) end
-
+xml.insert_element=insert_element
+xml.insertafter=insert_element
+xml.insertbefore=function(r,p,e) insert_element(r,p,e,true) end
+xml.injectafter=inject_element
+xml.injectbefore=function(r,p,e) inject_element(r,p,e,true) end
local function include(xmldata,pattern,attribute,recursive,loaddata)
- -- parse="text" (default: xml), encoding="" (todo)
- -- attribute = attribute or 'href'
- pattern = pattern or 'include'
- loaddata = loaddata or io.loaddata
- local collected = xmlapplylpath(xmldata,pattern)
- if collected then
- for c=1,#collected do
- local ek = collected[c]
- local name = nil
- local ekdt = ek.dt
- local ekat = ek.at
- local epdt = ek.__p__.dt
- if not attribute or attribute == "" then
- name = (type(ekdt) == "table" and ekdt[1]) or ekdt -- check, probably always tab or str
- end
- if not name then
- for a in gmatch(attribute or "href","([^|]+)") do
- name = ekat[a]
- if name then break end
- end
- end
- local data = (name and name ~= "" and loaddata(name)) or ""
- if data == "" then
- epdt[ek.ni] = "" -- xml.empty(d,k)
- elseif ekat["parse"] == "text" then
- -- for the moment hard coded
- epdt[ek.ni] = xml.escaped(data) -- d[k] = xml.escaped(data)
- else
- local xi = xmlinheritedconvert(data,xmldata)
- if not xi then
- epdt[ek.ni] = "" -- xml.empty(d,k)
- else
- if recursive then
- include(xi,pattern,attribute,recursive,loaddata)
- end
- epdt[ek.ni] = xml.body(xi) -- xml.assign(d,k,xi)
- end
- end
+ pattern=pattern or 'include'
+ loaddata=loaddata or io.loaddata
+ local collected=xmlapplylpath(xmldata,pattern)
+ if collected then
+ for c=1,#collected do
+ local ek=collected[c]
+ local name=nil
+ local ekdt=ek.dt
+ local ekat=ek.at
+ local epdt=ek.__p__.dt
+ if not attribute or attribute=="" then
+ name=(type(ekdt)=="table" and ekdt[1]) or ekdt
+ end
+ if not name then
+ for a in gmatch(attribute or "href","([^|]+)") do
+ name=ekat[a]
+ if name then break end
+ end
+ end
+ local data=(name and name~="" and loaddata(name)) or ""
+ if data=="" then
+ epdt[ek.ni]=""
+ elseif ekat["parse"]=="text" then
+ epdt[ek.ni]=xml.escaped(data)
+ else
+ local xi=xmlinheritedconvert(data,xmldata)
+ if not xi then
+ epdt[ek.ni]=""
+ else
+ if recursive then
+ include(xi,pattern,attribute,recursive,loaddata)
+ end
+ epdt[ek.ni]=xml.body(xi)
end
+ end
end
+ end
end
-
-xml.include = include
-
+xml.include=include
local function stripelement(e,nolines,anywhere)
- local edt = e.dt
- if edt then
- if anywhere then
- local t, n = { }, 0
- for e=1,#edt do
- local str = edt[e]
- if type(str) ~= "string" then
- n = n + 1
- t[n] = str
- elseif str ~= "" then
- -- todo: lpeg for each case
- if nolines then
- str = gsub(str,"%s+"," ")
- end
- str = gsub(str,"^%s*(.-)%s*$","%1")
- if str ~= "" then
- n = n + 1
- t[n] = str
- end
- end
- end
- e.dt = t
+ local edt=e.dt
+ if edt then
+ if anywhere then
+ local t,n={},0
+ for e=1,#edt do
+ local str=edt[e]
+ if type(str)~="string" then
+ n=n+1
+ t[n]=str
+ elseif str~="" then
+ if nolines then
+ str=gsub(str,"%s+"," ")
+ end
+ str=gsub(str,"^%s*(.-)%s*$","%1")
+ if str~="" then
+ n=n+1
+ t[n]=str
+ end
+ end
+ end
+ e.dt=t
+ else
+ if #edt>0 then
+ local str=edt[1]
+ if type(str)~="string" then
+ elseif str=="" then
+ remove(edt,1)
else
- -- we can assume a regular sparse xml table with no successive strings
- -- otherwise we should use a while loop
- if #edt > 0 then
- -- strip front
- local str = edt[1]
- if type(str) ~= "string" then
- -- nothing
- elseif str == "" then
- remove(edt,1)
- else
- if nolines then
- str = gsub(str,"%s+"," ")
- end
- str = gsub(str,"^%s+","")
- if str == "" then
- remove(edt,1)
- else
- edt[1] = str
- end
- end
- end
- local nedt = #edt
- if nedt > 0 then
- -- strip end
- local str = edt[nedt]
- if type(str) ~= "string" then
- -- nothing
- elseif str == "" then
- remove(edt)
- else
- if nolines then
- str = gsub(str,"%s+"," ")
- end
- str = gsub(str,"%s+$","")
- if str == "" then
- remove(edt)
- else
- edt[nedt] = str
- end
- end
- end
- end
- end
- return e -- convenient
-end
-
-xml.stripelement = stripelement
-
-function xml.strip(root,pattern,nolines,anywhere) -- strips all leading and trailing spacing
- local collected = xmlapplylpath(root,pattern) -- beware, indices no longer are valid now
- if collected then
- for i=1,#collected do
- stripelement(collected[i],nolines,anywhere)
- end
- end
-end
-
-local function renamespace(root, oldspace, newspace) -- fast variant
- local ndt = #root.dt
- for i=1,ndt or 0 do
- local e = root[i]
- if type(e) == "table" then
- if e.ns == oldspace then
- e.ns = newspace
- if e.rn then
- e.rn = newspace
- end
- end
- local edt = e.dt
- if edt then
- renamespace(edt, oldspace, newspace)
- end
- end
- end
-end
-
-xml.renamespace = renamespace
-
-function xml.remaptag(root, pattern, newtg)
- local collected = xmlapplylpath(root,pattern)
- if collected then
- for c=1,#collected do
- collected[c].tg = newtg
- end
+ if nolines then
+ str=gsub(str,"%s+"," ")
+ end
+ str=gsub(str,"^%s+","")
+ if str=="" then
+ remove(edt,1)
+ else
+ edt[1]=str
+ end
+ end
+ end
+ local nedt=#edt
+ if nedt>0 then
+ local str=edt[nedt]
+ if type(str)~="string" then
+ elseif str=="" then
+ remove(edt)
+ else
+ if nolines then
+ str=gsub(str,"%s+"," ")
+ end
+ str=gsub(str,"%s+$","")
+ if str=="" then
+ remove(edt)
+ else
+ edt[nedt]=str
+ end
+ end
+ end
+ end
+ end
+ return e
+end
+xml.stripelement=stripelement
+function xml.strip(root,pattern,nolines,anywhere)
+ local collected=xmlapplylpath(root,pattern)
+ if collected then
+ for i=1,#collected do
+ stripelement(collected[i],nolines,anywhere)
+ end
+ end
+end
+local function renamespace(root,oldspace,newspace)
+ local ndt=#root.dt
+ for i=1,ndt or 0 do
+ local e=root[i]
+ if type(e)=="table" then
+ if e.ns==oldspace then
+ e.ns=newspace
+ if e.rn then
+ e.rn=newspace
+ end
+ end
+ local edt=e.dt
+ if edt then
+ renamespace(edt,oldspace,newspace)
+ end
+ end
+ end
+end
+xml.renamespace=renamespace
+function xml.remaptag(root,pattern,newtg)
+ local collected=xmlapplylpath(root,pattern)
+ if collected then
+ for c=1,#collected do
+ collected[c].tg=newtg
end
+ end
end
-
-function xml.remapnamespace(root, pattern, newns)
- local collected = xmlapplylpath(root,pattern)
- if collected then
- for c=1,#collected do
- collected[c].ns = newns
- end
+function xml.remapnamespace(root,pattern,newns)
+ local collected=xmlapplylpath(root,pattern)
+ if collected then
+ for c=1,#collected do
+ collected[c].ns=newns
end
+ end
end
-
-function xml.checknamespace(root, pattern, newns)
- local collected = xmlapplylpath(root,pattern)
- if collected then
- for c=1,#collected do
- local e = collected[c]
- if (not e.rn or e.rn == "") and e.ns == "" then
- e.rn = newns
- end
- end
+function xml.checknamespace(root,pattern,newns)
+ local collected=xmlapplylpath(root,pattern)
+ if collected then
+ for c=1,#collected do
+ local e=collected[c]
+ if (not e.rn or e.rn=="") and e.ns=="" then
+ e.rn=newns
+ end
end
+ end
end
-
-function xml.remapname(root, pattern, newtg, newns, newrn)
- local collected = xmlapplylpath(root,pattern)
- if collected then
- for c=1,#collected do
- local e = collected[c]
- e.tg, e.ns, e.rn = newtg, newns, newrn
- end
+function xml.remapname(root,pattern,newtg,newns,newrn)
+ local collected=xmlapplylpath(root,pattern)
+ if collected then
+ for c=1,#collected do
+ local e=collected[c]
+ e.tg,e.ns,e.rn=newtg,newns,newrn
end
+ end
end
-
---[[ldx--
-<p>Helper (for q2p).</p>
---ldx]]--
-
function xml.cdatatotext(e)
- local dt = e.dt
- if #dt == 1 then
- local first = dt[1]
- if first.tg == "@cd@" then
- e.dt = first.dt
- end
+ local dt=e.dt
+ if #dt==1 then
+ local first=dt[1]
+ if first.tg=="@cd@" then
+ e.dt=first.dt
+ end
+ else
+ end
+end
+function xml.texttocdata(e)
+ local dt=e.dt
+ local s=xml.tostring(dt)
+ e.tg="@cd@"
+ e.special=true
+ e.ns=""
+ e.rn=""
+ e.dt={ s }
+ e.at=nil
+end
+function xml.elementtocdata(e)
+ local dt=e.dt
+ local s=xml.tostring(e)
+ e.tg="@cd@"
+ e.special=true
+ e.ns=""
+ e.rn=""
+ e.dt={ s }
+ e.at=nil
+end
+xml.builtinentities=table.tohash { "amp","quot","apos","lt","gt" }
+local entities=characters and characters.entities or nil
+local builtinentities=xml.builtinentities
+function xml.addentitiesdoctype(root,option)
+ if not entities then
+ require("char-ent")
+ entities=characters.entities
+ end
+ if entities and root and root.tg=="@rt@" and root.statistics then
+ local list={}
+ local hexify=option=="hexadecimal"
+ for k,v in table.sortedhash(root.statistics.entities.names) do
+ if not builtinentities[k] then
+ local e=entities[k]
+ if not e then
+ e=format("[%s]",k)
+ elseif hexify then
+ e=format("&#%05X;",utfbyte(k))
+ end
+ list[#list+1]=format(" <!ENTITY %s %q >",k,e)
+ end
+ end
+ local dt=root.dt
+ local n=dt[1].tg=="@pi@" and 2 or 1
+ if #list>0 then
+ insert(dt,n,{ "\n" })
+ insert(dt,n,{
+ tg="@dt@",
+ dt={ format("Something [\n%s\n] ",concat(list)) },
+ ns="",
+ special=true,
+ })
+ insert(dt,n,{ "\n\n" })
else
- -- maybe option
- end
-end
-
--- local x = xml.convert("<x><a>1<b>2</b>3</a></x>")
--- xml.texttocdata(xml.first(x,"a"))
--- print(x) -- <x><![CDATA[1<b>2</b>3]]></x>
-
-function xml.texttocdata(e) -- could be a finalizer
- local dt = e.dt
- local s = xml.tostring(dt) -- no shortcut?
- e.tg = "@cd@"
- e.special = true
- e.ns = ""
- e.rn = ""
- e.dt = { s }
- e.at = nil
-end
-
--- local x = xml.convert("<x><a>1<b>2</b>3</a></x>")
--- xml.tocdata(xml.first(x,"a"))
--- print(x) -- <x><![CDATA[<a>1<b>2</b>3</a>]]></x>
-
-function xml.elementtocdata(e) -- could be a finalizer
- local dt = e.dt
- local s = xml.tostring(e) -- no shortcut?
- e.tg = "@cd@"
- e.special = true
- e.ns = ""
- e.rn = ""
- e.dt = { s }
- e.at = nil
-end
-
-xml.builtinentities = table.tohash { "amp", "quot", "apos", "lt", "gt" } -- used often so share
-
-local entities = characters and characters.entities or nil
-local builtinentities = xml.builtinentities
-
-function xml.addentitiesdoctype(root,option) -- we could also have a 'resolve' i.e. inline hex
- if not entities then
- require("char-ent")
- entities = characters.entities
- end
- if entities and root and root.tg == "@rt@" and root.statistics then
- local list = { }
- local hexify = option == "hexadecimal"
- for k, v in table.sortedhash(root.statistics.entities.names) do
- if not builtinentities[k] then
- local e = entities[k]
- if not e then
- e = format("[%s]",k)
- elseif hexify then
- e = format("&#%05X;",utfbyte(k))
- end
- list[#list+1] = format(" <!ENTITY %s %q >",k,e)
- end
- end
- local dt = root.dt
- local n = dt[1].tg == "@pi@" and 2 or 1
- if #list > 0 then
- insert(dt, n, { "\n" })
- insert(dt, n, {
- tg = "@dt@", -- beware, doctype is unparsed
- dt = { format("Something [\n%s\n] ",concat(list)) },
- ns = "",
- special = true,
- })
- insert(dt, n, { "\n\n" })
- else
- -- insert(dt, n, { table.serialize(root.statistics) })
- end
end
-end
-
--- local str = [==[
--- <?xml version='1.0' standalone='yes' ?>
--- <root>
--- <a>test &nbsp; test &#123; test</a>
--- <b><![CDATA[oeps]]></b>
--- </root>
--- ]==]
---
--- local x = xml.convert(str)
--- xml.addentitiesdoctype(x,"hexadecimal")
--- print(x)
-
---[[ldx--
-<p>Here are a few synonyms.</p>
---ldx]]--
-
-xml.all = xml.each
-xml.insert = xml.insertafter
-xml.inject = xml.injectafter
-xml.after = xml.insertafter
-xml.before = xml.insertbefore
-xml.process = xml.each
-
--- obsolete
-
-xml.obsolete = xml.obsolete or { }
-local obsolete = xml.obsolete
-
-xml.strip_whitespace = xml.strip obsolete.strip_whitespace = xml.strip
-xml.collect_elements = xml.collect obsolete.collect_elements = xml.collect
-xml.delete_element = xml.delete obsolete.delete_element = xml.delete
-xml.replace_element = xml.replace obsolete.replace_element = xml.replacet
-xml.each_element = xml.each obsolete.each_element = xml.each
-xml.process_elements = xml.process obsolete.process_elements = xml.process
-xml.insert_element_after = xml.insertafter obsolete.insert_element_after = xml.insertafter
-xml.insert_element_before = xml.insertbefore obsolete.insert_element_before = xml.insertbefore
-xml.inject_element_after = xml.injectafter obsolete.inject_element_after = xml.injectafter
-xml.inject_element_before = xml.injectbefore obsolete.inject_element_before = xml.injectbefore
-xml.process_attributes = xml.processattributes obsolete.process_attributes = xml.processattributes
-xml.collect_texts = xml.collecttexts obsolete.collect_texts = xml.collecttexts
-xml.inject_element = xml.inject obsolete.inject_element = xml.inject
-xml.remap_tag = xml.remaptag obsolete.remap_tag = xml.remaptag
-xml.remap_name = xml.remapname obsolete.remap_name = xml.remapname
-xml.remap_namespace = xml.remapnamespace obsolete.remap_namespace = xml.remapnamespace
-
--- new (probably ok)
-
+ end
+end
+xml.all=xml.each
+xml.insert=xml.insertafter
+xml.inject=xml.injectafter
+xml.after=xml.insertafter
+xml.before=xml.insertbefore
+xml.process=xml.each
+xml.obsolete=xml.obsolete or {}
+local obsolete=xml.obsolete
+xml.strip_whitespace=xml.strip obsolete.strip_whitespace=xml.strip
+xml.collect_elements=xml.collect obsolete.collect_elements=xml.collect
+xml.delete_element=xml.delete obsolete.delete_element=xml.delete
+xml.replace_element=xml.replace obsolete.replace_element=xml.replacet
+xml.each_element=xml.each obsolete.each_element=xml.each
+xml.process_elements=xml.process obsolete.process_elements=xml.process
+xml.insert_element_after=xml.insertafter obsolete.insert_element_after=xml.insertafter
+xml.insert_element_before=xml.insertbefore obsolete.insert_element_before=xml.insertbefore
+xml.inject_element_after=xml.injectafter obsolete.inject_element_after=xml.injectafter
+xml.inject_element_before=xml.injectbefore obsolete.inject_element_before=xml.injectbefore
+xml.process_attributes=xml.processattributes obsolete.process_attributes=xml.processattributes
+xml.collect_texts=xml.collecttexts obsolete.collect_texts=xml.collecttexts
+xml.inject_element=xml.inject obsolete.inject_element=xml.inject
+xml.remap_tag=xml.remaptag obsolete.remap_tag=xml.remaptag
+xml.remap_name=xml.remapname obsolete.remap_name=xml.remapname
+xml.remap_namespace=xml.remapnamespace obsolete.remap_namespace=xml.remapnamespace
function xml.cdata(e)
- if e then
- local dt = e.dt
- if dt and #dt == 1 then
- local first = dt[1]
- return first.tg == "@cd@" and first.dt[1] or ""
- end
+ if e then
+ local dt=e.dt
+ if dt and #dt==1 then
+ local first=dt[1]
+ return first.tg=="@cd@" and first.dt[1] or ""
end
- return ""
+ end
+ return ""
end
-
function xml.finalizers.xml.cdata(collected)
- if collected then
- local e = collected[1]
- if e then
- local dt = e.dt
- if dt and #dt == 1 then
- local first = dt[1]
- return first.tg == "@cd@" and first.dt[1] or ""
- end
- end
- end
- return ""
-end
-
-function xml.insertcomment(e,str,n) -- also insertcdata
- table.insert(e.dt,n or 1,{
- tg = "@cm@",
- ns = "",
- special = true,
- at = { },
- dt = { str },
- })
-end
-
-function xml.setcdata(e,str) -- also setcomment
- e.dt = { {
- tg = "@cd@",
- ns = "",
- special = true,
- at = { },
- dt = { str },
- } }
+ if collected then
+ local e=collected[1]
+ if e then
+ local dt=e.dt
+ if dt and #dt==1 then
+ local first=dt[1]
+ return first.tg=="@cd@" and first.dt[1] or ""
+ end
+ end
+ end
+ return ""
+end
+function xml.insertcomment(e,str,n)
+ table.insert(e.dt,n or 1,{
+ tg="@cm@",
+ ns="",
+ special=true,
+ at={},
+ dt={ str },
+ })
+end
+function xml.setcdata(e,str)
+ e.dt={ {
+ tg="@cd@",
+ ns="",
+ special=true,
+ at={},
+ dt={ str },
+ } }
end
-
--- maybe helpers like this will move to an autoloader
-
function xml.separate(x,pattern)
- local collected = xmlapplylpath(x,pattern)
- if collected then
- for c=1,#collected do
- local e = collected[c]
- local d = e.dt
- if d == x then
- report_xml("warning: xml.separate changes root")
- x = d
- end
- local t, n = { "\n" }, 1
- local i, nd = 1, #d
- while i <= nd do
- while i <= nd do
- local di = d[i]
- if type(di) == "string" then
- if di == "\n" or find(di,"^%s+$") then -- first test is speedup
- i = i + 1
- else
- d[i] = strip(di)
- break
- end
- else
- break
- end
- end
- if i > nd then
- break
- end
- t[n+1] = "\n"
- t[n+2] = d[i]
- t[n+3] = "\n"
- n = n + 3
- i = i + 1
+ local collected=xmlapplylpath(x,pattern)
+ if collected then
+ for c=1,#collected do
+ local e=collected[c]
+ local d=e.dt
+ if d==x then
+ report_xml("warning: xml.separate changes root")
+ x=d
+ end
+ local t,n={ "\n" },1
+ local i,nd=1,#d
+ while i<=nd do
+ while i<=nd do
+ local di=d[i]
+ if type(di)=="string" then
+ if di=="\n" or find(di,"^%s+$") then
+ i=i+1
+ else
+ d[i]=strip(di)
+ break
end
- t[n+1] = "\n"
- setmetatable(t,getmetatable(d))
- e.dt = t
- end
- end
- return x
+ else
+ break
+ end
+ end
+ if i>nd then
+ break
+ end
+ t[n+1]="\n"
+ t[n+2]=d[i]
+ t[n+3]="\n"
+ n=n+3
+ i=i+1
+ end
+ t[n+1]="\n"
+ setmetatable(t,getmetatable(d))
+ e.dt=t
+ end
+ end
+ return x
+end
+local helpers=xml.helpers or {}
+xml.helpers=helpers
+local function normal(e,action)
+ local edt=e.dt
+ if edt then
+ for i=1,#edt do
+ local str=edt[i]
+ if type(str)=="string" and str~="" then
+ edt[i]=action(str)
+ end
+ end
+ end
+end
+local function recurse(e,action)
+ local edt=e.dt
+ if edt then
+ for i=1,#edt do
+ local str=edt[i]
+ if type(str)~="string" then
+ recurse(str,action,recursive)
+ elseif str~="" then
+ edt[i]=action(str)
+ end
+ end
+ end
+end
+function helpers.recursetext(collected,action,recursive)
+ if recursive then
+ for i=1,#collected do
+ recurse(collected[i],action)
+ end
+ else
+ for i=1,#collected do
+ normal(collected[i],action)
+ end
+ end
end
@@ -10389,450 +10821,377 @@ end -- of closure
do -- create closure to overcome 200 locals limit
-if not modules then modules = { } end modules ['lxml-xml'] = {
- version = 1.001,
- comment = "this module is the basis for the lxml-* ones",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
+package.loaded["lxml-xml"] = package.loaded["lxml-xml"] or true
-local concat = table.concat
-local find, lower, upper = string.find, string.lower, string.upper
+-- original size: 10274, stripped down to: 7538
-local xml = xml
-
-local finalizers = xml.finalizers.xml
-local xmlfilter = xml.filter -- we could inline this one for speed
-local xmltostring = xml.tostring
-local xmlserialize = xml.serialize
-local xmlcollected = xml.collected
-local xmlnewhandlers = xml.newhandlers
-
-local function first(collected) -- wrong ?
- return collected and collected[1]
+if not modules then modules={} end modules ['lxml-xml']={
+ version=1.001,
+ comment="this module is the basis for the lxml-* ones",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+local concat=table.concat
+local find,lower,upper=string.find,string.lower,string.upper
+local xml=xml
+local finalizers=xml.finalizers.xml
+local xmlfilter=xml.filter
+local xmltostring=xml.tostring
+local xmlserialize=xml.serialize
+local xmlcollected=xml.collected
+local xmlnewhandlers=xml.newhandlers
+local function first(collected)
+ return collected and collected[1]
end
-
local function last(collected)
- return collected and collected[#collected]
+ return collected and collected[#collected]
end
-
local function all(collected)
- return collected
+ return collected
end
-
--- local function reverse(collected)
--- if collected then
--- local nc = #collected
--- if nc > 0 then
--- local reversed, r = { }, 0
--- for c=nc,1,-1 do
--- r = r + 1
--- reversed[r] = collected[c]
--- end
--- return reversed
--- else
--- return collected
--- end
--- end
--- end
-
-local reverse = table.reversed
-
+local reverse=table.reversed
local function attribute(collected,name)
- if collected and #collected > 0 then
- local at = collected[1].at
- return at and at[name]
- end
+ if collected and #collected>0 then
+ local at=collected[1].at
+ return at and at[name]
+ end
end
-
local function att(id,name)
- local at = id.at
- return at and at[name]
+ local at=id.at
+ return at and at[name]
end
-
local function count(collected)
- return collected and #collected or 0
+ return collected and #collected or 0
end
-
local function position(collected,n)
- if not collected then
- return 0
- end
- local nc = #collected
- if nc == 0 then
- return 0
- end
- n = tonumber(n) or 0
- if n < 0 then
- return collected[nc + n + 1]
- elseif n > 0 then
- return collected[n]
- else
- return collected[1].mi or 0
- end
+ if not collected then
+ return 0
+ end
+ local nc=#collected
+ if nc==0 then
+ return 0
+ end
+ n=tonumber(n) or 0
+ if n<0 then
+ return collected[nc+n+1]
+ elseif n>0 then
+ return collected[n]
+ else
+ return collected[1].mi or 0
+ end
end
-
local function match(collected)
- return collected and #collected > 0 and collected[1].mi or 0 -- match
+ return collected and #collected>0 and collected[1].mi or 0
end
-
local function index(collected)
- return collected and #collected > 0 and collected[1].ni or 0 -- 0 is new
+ return collected and #collected>0 and collected[1].ni or 0
end
-
local function attributes(collected,arguments)
- if collected and #collected > 0 then
- local at = collected[1].at
- if arguments then
- return at[arguments]
- elseif next(at) then
- return at -- all of them
+ if collected and #collected>0 then
+ local at=collected[1].at
+ if arguments then
+ return at[arguments]
+ elseif next(at) then
+ return at
+ end
+ end
+end
+local function chainattribute(collected,arguments)
+ if collected and #collected>0 then
+ local e=collected[1]
+ while e do
+ local at=e.at
+ if at then
+ local a=at[arguments]
+ if a then
+ return a
end
+ else
+ break
+ end
+ e=e.__p__
end
+ end
+ return ""
end
-
-local function chainattribute(collected,arguments) -- todo: optional levels
- if collected and #collected > 0 then
- local e = collected[1]
- while e do
- local at = e.at
- if at then
- local a = at[arguments]
- if a then
- return a
- end
- else
- break -- error
- end
- e = e.__p__
- end
- end
+local function raw(collected)
+ if collected and #collected>0 then
+ local e=collected[1] or collected
+ return e and xmltostring(e) or ""
+ else
return ""
+ end
end
-
-local function raw(collected) -- hybrid (not much different from text so it might go)
- if collected and #collected > 0 then
- local e = collected[1] or collected
- return e and xmltostring(e) or "" -- only first as we cannot concat function
- else
- return ""
- end
-end
-
---
-
-local xmltexthandler = xmlnewhandlers {
- name = "string",
- initialize = function()
- result = { }
- return result
- end,
- finalize = function()
- return concat(result)
- end,
- handle = function(...)
- result[#result+1] = concat { ... }
- end,
- escape = false,
+local xmltexthandler=xmlnewhandlers {
+ name="string",
+ initialize=function()
+ result={}
+ return result
+ end,
+ finalize=function()
+ return concat(result)
+ end,
+ handle=function(...)
+ result[#result+1]=concat {... }
+ end,
+ escape=false,
}
-
local function xmltotext(root)
- local dt = root.dt
- if not dt then
- return ""
- end
- local nt = #dt -- string or table
- if nt == 0 then
- return ""
- elseif nt == 1 and type(dt[1]) == "string" then
- return dt[1] -- no escaping of " ' < > &
- else
- return xmlserialize(root,xmltexthandler) or ""
- end
-end
-
---
-
-local function text(collected) -- hybrid
- if collected then -- no # test here !
- local e = collected[1] or collected -- why fallback to element, how about cdata
- return e and xmltotext(e) or ""
- else
- return ""
- end
+ local dt=root.dt
+ if not dt then
+ return ""
+ end
+ local nt=#dt
+ if nt==0 then
+ return ""
+ elseif nt==1 and type(dt[1])=="string" then
+ return dt[1]
+ else
+ return xmlserialize(root,xmltexthandler) or ""
+ end
+end
+local function text(collected)
+ if collected then
+ local e=collected[1] or collected
+ return e and xmltotext(e) or ""
+ else
+ return ""
+ end
end
-
local function texts(collected)
- if not collected then
- return { } -- why no nil
- end
- local nc = #collected
- if nc == 0 then
- return { } -- why no nil
- end
- local t, n = { }, 0
- for c=1,nc do
- local e = collected[c]
- if e and e.dt then
- n = n + 1
- t[n] = e.dt
- end
- end
- return t
+ if not collected then
+ return {}
+ end
+ local nc=#collected
+ if nc==0 then
+ return {}
+ end
+ local t,n={},0
+ for c=1,nc do
+ local e=collected[c]
+ if e and e.dt then
+ n=n+1
+ t[n]=e.dt
+ end
+ end
+ return t
end
-
local function tag(collected,n)
- if not collected then
- return
- end
- local nc = #collected
- if nc == 0 then
- return
- end
- local c
- if n == 0 or not n then
- c = collected[1]
- elseif n > 1 then
- c = collected[n]
- else
- c = collected[nc-n+1]
- end
- return c and c.tg
+ if not collected then
+ return
+ end
+ local nc=#collected
+ if nc==0 then
+ return
+ end
+ local c
+ if n==0 or not n then
+ c=collected[1]
+ elseif n>1 then
+ c=collected[n]
+ else
+ c=collected[nc-n+1]
+ end
+ return c and c.tg
end
-
local function name(collected,n)
- if not collected then
- return
- end
- local nc = #collected
- if nc == 0 then
- return
- end
- local c
- if n == 0 or not n then
- c = collected[1]
- elseif n > 1 then
- c = collected[n]
- else
- c = collected[nc-n+1]
- end
- if not c then
- -- sorry
- elseif c.ns == "" then
- return c.tg
- else
- return c.ns .. ":" .. c.tg
- end
+ if not collected then
+ return
+ end
+ local nc=#collected
+ if nc==0 then
+ return
+ end
+ local c
+ if n==0 or not n then
+ c=collected[1]
+ elseif n>1 then
+ c=collected[n]
+ else
+ c=collected[nc-n+1]
+ end
+ if not c then
+ elseif c.ns=="" then
+ return c.tg
+ else
+ return c.ns..":"..c.tg
+ end
end
-
local function tags(collected,nonamespace)
- if not collected then
- return
- end
- local nc = #collected
- if nc == 0 then
- return
- end
- local t, n = { }, 0
- for c=1,nc do
- local e = collected[c]
- local ns, tg = e.ns, e.tg
- n = n + 1
- if nonamespace or ns == "" then
- t[n] = tg
- else
- t[n] = ns .. ":" .. tg
- end
+ if not collected then
+ return
+ end
+ local nc=#collected
+ if nc==0 then
+ return
+ end
+ local t,n={},0
+ for c=1,nc do
+ local e=collected[c]
+ local ns,tg=e.ns,e.tg
+ n=n+1
+ if nonamespace or ns=="" then
+ t[n]=tg
+ else
+ t[n]=ns..":"..tg
end
- return t
+ end
+ return t
end
-
local function empty(collected,spacesonly)
- if not collected then
- return true
- end
- local nc = #collected
- if nc == 0 then
- return true
- end
- for c=1,nc do
- local e = collected[c]
- if e then
- local edt = e.dt
- if edt then
- local n = #edt
- if n == 1 then
- local edk = edt[1]
- local typ = type(edk)
- if typ == "table" then
- return false
- elseif edk ~= "" then
- return false
- elseif spacesonly and not find(edk,"%S") then
- return false
- end
- elseif n > 1 then
- return false
- end
- end
- end
- end
+ if not collected then
return true
-end
-
-finalizers.first = first
-finalizers.last = last
-finalizers.all = all
-finalizers.reverse = reverse
-finalizers.elements = all
-finalizers.default = all
-finalizers.attribute = attribute
-finalizers.att = att
-finalizers.count = count
-finalizers.position = position
-finalizers.match = match
-finalizers.index = index
-finalizers.attributes = attributes
-finalizers.chainattribute = chainattribute
-finalizers.text = text
-finalizers.texts = texts
-finalizers.tag = tag
-finalizers.name = name
-finalizers.tags = tags
-finalizers.empty = empty
-
--- shortcuts -- we could support xmlfilter(id,pattern,first)
-
+ end
+ local nc=#collected
+ if nc==0 then
+ return true
+ end
+ for c=1,nc do
+ local e=collected[c]
+ if e then
+ local edt=e.dt
+ if edt then
+ local n=#edt
+ if n==1 then
+ local edk=edt[1]
+ local typ=type(edk)
+ if typ=="table" then
+ return false
+ elseif edk~="" then
+ return false
+ elseif spacesonly and not find(edk,"%S") then
+ return false
+ end
+ elseif n>1 then
+ return false
+ end
+ end
+ end
+ end
+ return true
+end
+finalizers.first=first
+finalizers.last=last
+finalizers.all=all
+finalizers.reverse=reverse
+finalizers.elements=all
+finalizers.default=all
+finalizers.attribute=attribute
+finalizers.att=att
+finalizers.count=count
+finalizers.position=position
+finalizers.match=match
+finalizers.index=index
+finalizers.attributes=attributes
+finalizers.chainattribute=chainattribute
+finalizers.text=text
+finalizers.texts=texts
+finalizers.tag=tag
+finalizers.name=name
+finalizers.tags=tags
+finalizers.empty=empty
function xml.first(id,pattern)
- return first(xmlfilter(id,pattern))
+ return first(xmlfilter(id,pattern))
end
-
function xml.last(id,pattern)
- return last(xmlfilter(id,pattern))
+ return last(xmlfilter(id,pattern))
end
-
function xml.count(id,pattern)
- return count(xmlfilter(id,pattern))
+ return count(xmlfilter(id,pattern))
end
-
function xml.attribute(id,pattern,a,default)
- return attribute(xmlfilter(id,pattern),a,default)
+ return attribute(xmlfilter(id,pattern),a,default)
end
-
function xml.raw(id,pattern)
- if pattern then
- return raw(xmlfilter(id,pattern))
- else
- return raw(id)
- end
-end
-
-function xml.text(id,pattern) -- brrr either content or element (when cdata)
- if pattern then
- -- return text(xmlfilter(id,pattern))
- local collected = xmlfilter(id,pattern)
- return collected and #collected > 0 and xmltotext(collected[1]) or ""
- elseif id then
- -- return text(id)
- return xmltotext(id) or ""
- else
- return ""
- end
+ if pattern then
+ return raw(xmlfilter(id,pattern))
+ else
+ return raw(id)
+ end
+end
+function xml.text(id,pattern)
+ if pattern then
+ local collected=xmlfilter(id,pattern)
+ return collected and #collected>0 and xmltotext(collected[1]) or ""
+ elseif id then
+ return xmltotext(id) or ""
+ else
+ return ""
+ end
end
-
-xml.content = text
-
---
-
-function xml.position(id,pattern,n) -- element
- return position(xmlfilter(id,pattern),n)
+xml.content=text
+function xml.position(id,pattern,n)
+ return position(xmlfilter(id,pattern),n)
end
-
-function xml.match(id,pattern) -- number
- return match(xmlfilter(id,pattern))
+function xml.match(id,pattern)
+ return match(xmlfilter(id,pattern))
end
-
function xml.empty(id,pattern,spacesonly)
- return empty(xmlfilter(id,pattern),spacesonly)
+ return empty(xmlfilter(id,pattern),spacesonly)
end
-
-xml.all = xml.filter
-xml.index = xml.position
-xml.found = xml.filter
-
--- a nice one:
-
+xml.all=xml.filter
+xml.index=xml.position
+xml.found=xml.filter
local function totable(x)
- local t = { }
- for e in xmlcollected(x[1] or x,"/*") do
- t[e.tg] = xmltostring(e.dt) or ""
- end
- return next(t) and t or nil
-end
-
-xml.table = totable
-finalizers.table = totable
-
+ local t={}
+ for e in xmlcollected(x[1] or x,"/*") do
+ t[e.tg]=xmltostring(e.dt) or ""
+ end
+ return next(t) and t or nil
+end
+xml.table=totable
+finalizers.table=totable
local function textonly(e,t)
- if e then
- local edt = e.dt
- if edt then
- for i=1,#edt do
- local e = edt[i]
- if type(e) == "table" then
- textonly(e,t)
- else
- t[#t+1] = e
- end
- end
+ if e then
+ local edt=e.dt
+ if edt then
+ for i=1,#edt do
+ local e=edt[i]
+ if type(e)=="table" then
+ textonly(e,t)
+ else
+ t[#t+1]=e
end
+ end
end
- return t
+ end
+ return t
end
-
-function xml.textonly(e) -- no pattern
- return concat(textonly(e,{}))
+function xml.textonly(e)
+ return concat(textonly(e,{}))
end
-
---
-
--- local x = xml.convert("<x><a x='+'>1<B>2</B>3</a></x>")
--- xml.filter(x,"**/lowerall()") print(x)
--- xml.filter(x,"**/upperall()") print(x)
-
function finalizers.lowerall(collected)
- for c=1,#collected do
- local e = collected[c]
- if not e.special then
- e.tg = lower(e.tg)
- local eat = e.at
- if eat then
- local t = { }
- for k,v in next, eat do
- t[lower(k)] = v
- end
- e.at = t
- end
+ for c=1,#collected do
+ local e=collected[c]
+ if not e.special then
+ e.tg=lower(e.tg)
+ local eat=e.at
+ if eat then
+ local t={}
+ for k,v in next,eat do
+ t[lower(k)]=v
end
+ e.at=t
+ end
end
+ end
end
-
function finalizers.upperall(collected)
- for c=1,#collected do
- local e = collected[c]
- if not e.special then
- e.tg = upper(e.tg)
- local eat = e.at
- if eat then
- local t = { }
- for k,v in next, eat do
- t[upper(k)] = v
- end
- e.at = t
- end
+ for c=1,#collected do
+ local e=collected[c]
+ if not e.special then
+ e.tg=upper(e.tg)
+ local eat=e.at
+ if eat then
+ local t={}
+ for k,v in next,eat do
+ t[upper(k)]=v
end
+ e.at=t
+ end
end
+ end
end
@@ -10840,245 +11199,331 @@ end -- of closure
do -- create closure to overcome 200 locals limit
-if not modules then modules = { } end modules ['data-ini'] = {
- version = 1.001,
- comment = "companion to luat-lib.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files",
-}
-
-local gsub, find, gmatch, char = string.gsub, string.find, string.gmatch, string.char
-local concat = table.concat
-local next, type = next, type
+package.loaded["trac-xml"] = package.loaded["trac-xml"] or true
-local filedirname, filebasename, fileextname, filejoin = file.dirname, file.basename, file.extname, file.join
+-- original size: 6351, stripped down to: 4919
-local trace_locating = false trackers.register("resolvers.locating", function(v) trace_locating = v end)
-local trace_detail = false trackers.register("resolvers.details", function(v) trace_detail = v end)
-local trace_expansions = false trackers.register("resolvers.expansions", function(v) trace_expansions = v end)
-
-local report_initialization = logs.reporter("resolvers","initialization")
-
-local ostype, osname, ossetenv, osgetenv = os.type, os.name, os.setenv, os.getenv
-
--- The code here used to be part of a data-res but for convenience
--- we now split it over multiple files. As this file is now the
--- starting point we introduce resolvers here.
+if not modules then modules={} end modules ['trac-xml']={
+ version=1.001,
+ comment="companion to trac-log.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+local formatters=string.formatters
+local reporters=logs.reporters
+local xmlserialize=xml.serialize
+local xmlcollected=xml.collected
+local xmltext=xml.text
+local xmlfirst=xml.first
+local function showhelp(specification,...)
+ local root=xml.convert(specification.helpinfo or "")
+ if not root then
+ return
+ end
+ local xs=xml.gethandlers("string")
+ xml.sethandlersfunction(xs,"short",function(e,handler) xmlserialize(e.dt,handler) end)
+ xml.sethandlersfunction(xs,"ref",function(e,handler) handler.handle("--"..e.at.name) end)
+ local wantedcategories=select("#",...)==0 and true or table.tohash {... }
+ local nofcategories=xml.count(root,"/application/flags/category")
+ local report=specification.report
+ for category in xmlcollected(root,"/application/flags/category") do
+ local categoryname=category.at.name or ""
+ if wantedcategories==true or wantedcategories[categoryname] then
+ if nofcategories>1 then
+ report("%s options:",categoryname)
+ report()
+ end
+ for subcategory in xmlcollected(category,"/subcategory") do
+ for flag in xmlcollected(subcategory,"/flag") do
+ local name=flag.at.name
+ local value=flag.at.value
+ local short=xmltext(xmlfirst(flag,"/short"))
+ if value then
+ report("--%-20s %s",formatters["%s=%s"](name,value),short)
+ else
+ report("--%-20s %s",name,short)
+ end
+ end
+ report()
+ end
+ end
+ end
+ for category in xmlcollected(root,"/application/examples/category") do
+ local title=xmltext(xmlfirst(category,"/title"))
+ if title and title~="" then
+ report()
+ report(title)
+ report()
+ end
+ for subcategory in xmlcollected(category,"/subcategory") do
+ for example in xmlcollected(subcategory,"/example") do
+ local command=xmltext(xmlfirst(example,"/command"))
+ local comment=xmltext(xmlfirst(example,"/comment"))
+ report(command)
+ end
+ report()
+ end
+ end
+ for comment in xmlcollected(root,"/application/comments/comment") do
+ local comment=xmltext(comment)
+ report()
+ report(comment)
+ report()
+ end
+end
+local reporthelp=reporters.help
+local exporthelp=reporters.export
+local function xmlfound(t)
+ local helpinfo=t.helpinfo
+ if type(helpinfo)=="table" then
+ return false
+ end
+ if type(helpinfo)~="string" then
+ helpinfo="Warning: no helpinfo found."
+ t.helpinfo=helpinfo
+ return false
+ end
+ if string.find(helpinfo,".xml$") then
+ local ownscript=environment.ownscript
+ local helpdata=false
+ if ownscript then
+ local helpfile=file.join(file.pathpart(ownscript),helpinfo)
+ helpdata=io.loaddata(helpfile)
+ if helpdata=="" then
+ helpdata=false
+ end
+ end
+ if not helpdata then
+ local helpfile=resolvers.findfile(helpinfo,"tex")
+ helpdata=helpfile and io.loaddata(helpfile)
+ end
+ if helpdata and helpdata~="" then
+ helpinfo=helpdata
+ else
+ helpinfo=formatters["Warning: help file %a is not found."](helpinfo)
+ end
+ end
+ t.helpinfo=helpinfo
+ return string.find(t.helpinfo,"^<%?xml") and true or false
+end
+function reporters.help(t,...)
+ if xmlfound(t) then
+ showhelp(t,...)
+ else
+ reporthelp(t,...)
+ end
+end
+function reporters.export(t,methods,filename)
+ if not xmlfound(t) then
+ return exporthelp(t)
+ end
+ if not methods or methods=="" then
+ methods=environment.arguments["exporthelp"]
+ end
+ if not filename or filename=="" then
+ filename=environment.files[1]
+ end
+ dofile(resolvers.findfile("trac-exp.lua","tex"))
+ local exporters=logs.exporters
+ if not exporters or not methods then
+ return exporthelp(t)
+ end
+ if methods=="all" then
+ methods=table.keys(exporters)
+ elseif type(methods)=="string" then
+ methods=utilities.parsers.settings_to_array(methods)
+ else
+ return exporthelp(t)
+ end
+ if type(filename)~="string" or filename=="" then
+ filename=false
+ elseif file.pathpart(filename)=="" then
+ t.report("export file %a will not be saved on the current path (safeguard)",filename)
+ return
+ end
+ for i=1,#methods do
+ local method=methods[i]
+ local exporter=exporters[method]
+ if exporter then
+ local result=exporter(t,method)
+ if result and result~="" then
+ if filename then
+ local fullname=file.replacesuffix(filename,method)
+ t.report("saving export in %a",fullname)
+ io.savedata(fullname,result)
+ else
+ reporters.lines(t,result)
+ end
+ else
+ t.report("no output from exporter %a",method)
+ end
+ else
+ t.report("unknown exporter %a",method)
+ end
+ end
+end
-resolvers = resolvers or { }
-local resolvers = resolvers
--- We don't want the kpse library to kick in. Also, we want to be able to
--- execute programs. Control over execution is implemented later.
+end -- of closure
-texconfig.kpse_init = false
-texconfig.shell_escape = 't'
+do -- create closure to overcome 200 locals limit
-if kpse and kpse.default_texmfcnf then
- local default_texmfcnf = kpse.default_texmfcnf()
- -- looks more like context:
- default_texmfcnf = gsub(default_texmfcnf,"$SELFAUTOLOC","selfautoloc:")
- default_texmfcnf = gsub(default_texmfcnf,"$SELFAUTODIR","selfautodir:")
- default_texmfcnf = gsub(default_texmfcnf,"$SELFAUTOPARENT","selfautoparent:")
- default_texmfcnf = gsub(default_texmfcnf,"$HOME","home:")
- --
- environment.default_texmfcnf = default_texmfcnf
-end
+package.loaded["data-ini"] = package.loaded["data-ini"] or true
-kpse = { original = kpse }
+-- original size: 7898, stripped down to: 5501
-setmetatable(kpse, {
- __index = function(kp,name)
- report_initialization("fatal error: kpse library is accessed (key: %s)",name)
- os.exit()
- end
+if not modules then modules={} end modules ['data-ini']={
+ version=1.001,
+ comment="companion to luat-lib.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files",
+}
+local gsub,find,gmatch,char=string.gsub,string.find,string.gmatch,string.char
+local next,type=next,type
+local filedirname,filebasename,filejoin=file.dirname,file.basename,file.join
+local trace_locating=false trackers.register("resolvers.locating",function(v) trace_locating=v end)
+local trace_detail=false trackers.register("resolvers.details",function(v) trace_detail=v end)
+local trace_expansions=false trackers.register("resolvers.expansions",function(v) trace_expansions=v end)
+local report_initialization=logs.reporter("resolvers","initialization")
+local ostype,osname,ossetenv,osgetenv=os.type,os.name,os.setenv,os.getenv
+resolvers=resolvers or {}
+local resolvers=resolvers
+texconfig.kpse_init=false
+texconfig.shell_escape='t'
+if not (environment and environment.default_texmfcnf) and kpse and kpse.default_texmfcnf then
+ local default_texmfcnf=kpse.default_texmfcnf()
+ default_texmfcnf=gsub(default_texmfcnf,"$SELFAUTOLOC","selfautoloc:")
+ default_texmfcnf=gsub(default_texmfcnf,"$SELFAUTODIR","selfautodir:")
+ default_texmfcnf=gsub(default_texmfcnf,"$SELFAUTOPARENT","selfautoparent:")
+ default_texmfcnf=gsub(default_texmfcnf,"$HOME","home:")
+ environment.default_texmfcnf=default_texmfcnf
+end
+kpse={ original=kpse }
+setmetatable(kpse,{
+ __index=function(kp,name)
+ report_initialization("fatal error: kpse library is accessed (key: %s)",name)
+ os.exit()
+ end
} )
-
--- First we check a couple of environment variables. Some might be
--- set already but we need then later on. We start with the system
--- font path.
-
do
-
- local osfontdir = osgetenv("OSFONTDIR")
-
- if osfontdir and osfontdir ~= "" then
- -- ok
- elseif osname == "windows" then
- ossetenv("OSFONTDIR","c:/windows/fonts//")
- elseif osname == "macosx" then
- ossetenv("OSFONTDIR","$HOME/Library/Fonts//;/Library/Fonts//;/System/Library/Fonts//")
- end
-
+ local osfontdir=osgetenv("OSFONTDIR")
+ if osfontdir and osfontdir~="" then
+ elseif osname=="windows" then
+ ossetenv("OSFONTDIR","c:/windows/fonts//")
+ elseif osname=="macosx" then
+ ossetenv("OSFONTDIR","$HOME/Library/Fonts//;/Library/Fonts//;/System/Library/Fonts//")
+ end
end
-
--- Next comes the user's home path. We need this as later on we have
--- to replace ~ with its value.
-
do
-
- local homedir = osgetenv(ostype == "windows" and 'USERPROFILE' or 'HOME') or ''
-
- if not homedir or homedir == "" then
- homedir = char(127) -- we need a value, later we wil trigger on it
- end
-
- homedir = file.collapsepath(homedir)
-
- ossetenv("HOME", homedir) -- can be used in unix cnf files
- ossetenv("USERPROFILE",homedir) -- can be used in windows cnf files
-
- environment.homedir = homedir
-
+ local homedir=osgetenv(ostype=="windows" and 'USERPROFILE' or 'HOME') or ''
+ if not homedir or homedir=="" then
+ homedir=char(127)
+ end
+ homedir=file.collapsepath(homedir)
+ ossetenv("HOME",homedir)
+ ossetenv("USERPROFILE",homedir)
+ environment.homedir=homedir
end
-
--- The following code sets the name of the own binary and its
--- path. This is fallback code as we have os.selfdir now.
-
do
-
- local args = environment.originalarguments or arg -- this needs a cleanup
-
- local ownbin = environment.ownbin or args[-2] or arg[-2] or args[-1] or arg[-1] or arg[0] or "luatex"
- local ownpath = environment.ownpath or os.selfdir
-
- ownbin = file.collapsepath(ownbin)
- ownpath = file.collapsepath(ownpath)
-
- if not ownpath or ownpath == "" or ownpath == "unset" then
- ownpath = args[-1] or arg[-1]
- ownpath = ownpath and filedirname(gsub(ownpath,"\\","/"))
- if not ownpath or ownpath == "" then
- ownpath = args[-0] or arg[-0]
- ownpath = ownpath and filedirname(gsub(ownpath,"\\","/"))
- end
- local binary = ownbin
- if not ownpath or ownpath == "" then
- ownpath = ownpath and filedirname(binary)
- end
- if not ownpath or ownpath == "" then
- if os.binsuffix ~= "" then
- binary = file.replacesuffix(binary,os.binsuffix)
- end
- local path = osgetenv("PATH")
- if path then
- for p in gmatch(path,"[^"..io.pathseparator.."]+") do
- local b = filejoin(p,binary)
- if lfs.isfile(b) then
- -- we assume that after changing to the path the currentdir function
- -- resolves to the real location and use this side effect here; this
- -- trick is needed because on the mac installations use symlinks in the
- -- path instead of real locations
- local olddir = lfs.currentdir()
- if lfs.chdir(p) then
- local pp = lfs.currentdir()
- if trace_locating and p ~= pp then
- report_initialization("following symlink '%s' to '%s'",p,pp)
- end
- ownpath = pp
- lfs.chdir(olddir)
- else
- if trace_locating then
- report_initialization("unable to check path '%s'",p)
- end
- ownpath = p
- end
- break
- end
- end
+ local args=environment.originalarguments or arg
+ if not environment.ownmain then
+ environment.ownmain=status and string.match(string.lower(status.banner),"this is ([%a]+)") or "luatex"
+ end
+ local ownbin=environment.ownbin or args[-2] or arg[-2] or args[-1] or arg[-1] or arg[0] or "luatex"
+ local ownpath=environment.ownpath or os.selfdir
+ ownbin=file.collapsepath(ownbin)
+ ownpath=file.collapsepath(ownpath)
+ if not ownpath or ownpath=="" or ownpath=="unset" then
+ ownpath=args[-1] or arg[-1]
+ ownpath=ownpath and filedirname(gsub(ownpath,"\\","/"))
+ if not ownpath or ownpath=="" then
+ ownpath=args[-0] or arg[-0]
+ ownpath=ownpath and filedirname(gsub(ownpath,"\\","/"))
+ end
+ local binary=ownbin
+ if not ownpath or ownpath=="" then
+ ownpath=ownpath and filedirname(binary)
+ end
+ if not ownpath or ownpath=="" then
+ if os.binsuffix~="" then
+ binary=file.replacesuffix(binary,os.binsuffix)
+ end
+ local path=osgetenv("PATH")
+ if path then
+ for p in gmatch(path,"[^"..io.pathseparator.."]+") do
+ local b=filejoin(p,binary)
+ if lfs.isfile(b) then
+ local olddir=lfs.currentdir()
+ if lfs.chdir(p) then
+ local pp=lfs.currentdir()
+ if trace_locating and p~=pp then
+ report_initialization("following symlink %a to %a",p,pp)
+ end
+ ownpath=pp
+ lfs.chdir(olddir)
+ else
+ if trace_locating then
+ report_initialization("unable to check path %a",p)
+ end
+ ownpath=p
end
+ break
+ end
end
- if not ownpath or ownpath == "" then
- ownpath = "."
- report_initialization("forcing fallback ownpath .")
- elseif trace_locating then
- report_initialization("using ownpath '%s'",ownpath)
- end
+ end
end
-
- environment.ownbin = ownbin
- environment.ownpath = ownpath
-
+ if not ownpath or ownpath=="" then
+ ownpath="."
+ report_initialization("forcing fallback to ownpath %a",ownpath)
+ elseif trace_locating then
+ report_initialization("using ownpath %a",ownpath)
+ end
+ end
+ environment.ownbin=ownbin
+ environment.ownpath=ownpath
end
-
-resolvers.ownpath = environment.ownpath
-
+resolvers.ownpath=environment.ownpath
function resolvers.getownpath()
- return environment.ownpath
+ return environment.ownpath
end
-
--- The self variables permit us to use only a few (or even no)
--- environment variables.
-
do
-
- local ownpath = environment.ownpath or dir.current()
-
- if ownpath then
- ossetenv('SELFAUTOLOC', file.collapsepath(ownpath))
- ossetenv('SELFAUTODIR', file.collapsepath(ownpath .. "/.."))
- ossetenv('SELFAUTOPARENT', file.collapsepath(ownpath .. "/../.."))
- else
- report_initialization("error: unable to locate ownpath")
- os.exit()
- end
-
-end
-
--- The running os:
-
--- todo: check is context sits here os.platform is more trustworthy
--- that the bin check as mtx-update runs from another path
-
-local texos = environment.texos or osgetenv("TEXOS")
-local texmfos = environment.texmfos or osgetenv('SELFAUTODIR')
-
-if not texos or texos == "" then
- texos = file.basename(texmfos)
-end
-
-ossetenv('TEXMFOS', texmfos) -- full bin path
-ossetenv('TEXOS', texos) -- partial bin parent
-ossetenv('SELFAUTOSYSTEM',os.platform) -- bonus
-
-environment.texos = texos
-environment.texmfos = texmfos
-
--- The current root:
-
-local texroot = environment.texroot or osgetenv("TEXROOT")
-
-if not texroot or texroot == "" then
- texroot = osgetenv('SELFAUTOPARENT')
- ossetenv('TEXROOT',texroot)
-end
-
-environment.texroot = file.collapsepath(texroot)
-
--- Tracing. Todo ...
-
-function resolvers.settrace(n) -- no longer number but: 'locating' or 'detail'
- if n then
- trackers.disable("resolvers.*")
- trackers.enable("resolvers."..n)
- end
+ local ownpath=environment.ownpath or dir.current()
+ if ownpath then
+ ossetenv('SELFAUTOLOC',file.collapsepath(ownpath))
+ ossetenv('SELFAUTODIR',file.collapsepath(ownpath.."/.."))
+ ossetenv('SELFAUTOPARENT',file.collapsepath(ownpath.."/../.."))
+ else
+ report_initialization("error: unable to locate ownpath")
+ os.exit()
+ end
+end
+local texos=environment.texos or osgetenv("TEXOS")
+local texmfos=environment.texmfos or osgetenv('SELFAUTODIR')
+if not texos or texos=="" then
+ texos=file.basename(texmfos)
+end
+ossetenv('TEXMFOS',texmfos)
+ossetenv('TEXOS',texos)
+ossetenv('SELFAUTOSYSTEM',os.platform)
+environment.texos=texos
+environment.texmfos=texmfos
+local texroot=environment.texroot or osgetenv("TEXROOT")
+if not texroot or texroot=="" then
+ texroot=osgetenv('SELFAUTOPARENT')
+ ossetenv('TEXROOT',texroot)
+end
+environment.texroot=file.collapsepath(texroot)
+if profiler then
+ directives.register("system.profile",function()
+ profiler.start("luatex-profile.log")
+ end)
end
-
-resolvers.settrace(osgetenv("MTX_INPUT_TRACE"))
-
--- todo:
-
--- if profiler and osgetenv("MTX_PROFILE_RUN") == "YES" then
--- profiler.start("luatex-profile.log")
--- end
-
--- a forward definition
-
if not resolvers.resolve then
- function resolvers.resolve (s) return s end
- function resolvers.unresolve(s) return s end
- function resolvers.repath (s) return s end
+ function resolvers.resolve (s) return s end
+ function resolvers.unresolve(s) return s end
+ function resolvers.repath (s) return s end
end
@@ -11086,1150 +11531,981 @@ end -- of closure
do -- create closure to overcome 200 locals limit
-if not modules then modules = { } end modules ['data-exp'] = {
- version = 1.001,
- comment = "companion to luat-lib.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files",
-}
-
-local format, find, gmatch, lower, char, sub = string.format, string.find, string.gmatch, string.lower, string.char, string.sub
-local concat, sort = table.concat, table.sort
-local lpegmatch, lpegpatterns = lpeg.match, lpeg.patterns
-local Ct, Cs, Cc, P, C, S = lpeg.Ct, lpeg.Cs, lpeg.Cc, lpeg.P, lpeg.C, lpeg.S
-local type, next = type, next
+package.loaded["data-exp"] = package.loaded["data-exp"] or true
-local ostype = os.type
-local collapsepath = file.collapsepath
-
-local trace_locating = false trackers.register("resolvers.locating", function(v) trace_locating = v end)
-local trace_expansions = false trackers.register("resolvers.expansions", function(v) trace_expansions = v end)
-
-local report_expansions = logs.reporter("resolvers","expansions")
-
-local resolvers = resolvers
-
--- As this bit of code is somewhat special it gets its own module. After
--- all, when working on the main resolver code, I don't want to scroll
--- past this every time. See data-obs.lua for the gsub variant.
+-- original size: 14643, stripped down to: 9517
+if not modules then modules={} end modules ['data-exp']={
+ version=1.001,
+ comment="companion to luat-lib.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files",
+}
+local format,find,gmatch,lower,char,sub=string.format,string.find,string.gmatch,string.lower,string.char,string.sub
+local concat,sort=table.concat,table.sort
+local lpegmatch,lpegpatterns=lpeg.match,lpeg.patterns
+local Ct,Cs,Cc,P,C,S=lpeg.Ct,lpeg.Cs,lpeg.Cc,lpeg.P,lpeg.C,lpeg.S
+local type,next=type,next
+local ostype=os.type
+local collapsepath=file.collapsepath
+local trace_locating=false trackers.register("resolvers.locating",function(v) trace_locating=v end)
+local trace_expansions=false trackers.register("resolvers.expansions",function(v) trace_expansions=v end)
+local report_expansions=logs.reporter("resolvers","expansions")
+local resolvers=resolvers
local function f_first(a,b)
- local t, n = { }, 0
- for s in gmatch(b,"[^,]+") do
- n = n + 1 ; t[n] = a .. s
- end
- return concat(t,",")
+ local t,n={},0
+ for s in gmatch(b,"[^,]+") do
+ n=n+1;t[n]=a..s
+ end
+ return concat(t,",")
end
-
local function f_second(a,b)
- local t, n = { }, 0
- for s in gmatch(a,"[^,]+") do
- n = n + 1 ; t[n] = s .. b
- end
- return concat(t,",")
+ local t,n={},0
+ for s in gmatch(a,"[^,]+") do
+ n=n+1;t[n]=s..b
+ end
+ return concat(t,",")
end
-
--- kpsewhich --expand-braces '{a,b}{c,d}'
--- ac:bc:ad:bd
-
--- old {a,b}{c,d} => ac ad bc bd
---
--- local function f_both(a,b)
--- local t, n = { }, 0
--- for sa in gmatch(a,"[^,]+") do
--- for sb in gmatch(b,"[^,]+") do
--- n = n + 1 ; t[n] = sa .. sb
--- end
--- end
--- return concat(t,",")
--- end
---
--- new {a,b}{c,d} => ac bc ad bd
-
local function f_both(a,b)
- local t, n = { }, 0
- for sb in gmatch(b,"[^,]+") do -- and not sa
- for sa in gmatch(a,"[^,]+") do -- sb
- n = n + 1 ; t[n] = sa .. sb
- end
- end
- return concat(t,",")
-end
-
-local left = P("{")
-local right = P("}")
-local var = P((1 - S("{}" ))^0)
-local set = P((1 - S("{},"))^0)
-local other = P(1)
-
-local l_first = Cs( ( Cc("{") * (C(set) * left * C(var) * right / f_first) * Cc("}") + other )^0 )
-local l_second = Cs( ( Cc("{") * (left * C(var) * right * C(set) / f_second) * Cc("}") + other )^0 )
-local l_both = Cs( ( Cc("{") * (left * C(var) * right * left * C(var) * right / f_both) * Cc("}") + other )^0 )
-local l_rest = Cs( ( left * var * (left/"") * var * (right/"") * var * right + other )^0 )
-
-local stripper_1 = lpeg.stripper ("{}@")
-local replacer_1 = lpeg.replacer { { ",}", ",@}" }, { "{,", "{@," }, }
-
-local function splitpathexpr(str, newlist, validate) -- I couldn't resist lpegging it (nice exercise).
- if trace_expansions then
- report_expansions("expanding variable '%s'",str)
- end
- local t, ok, done = newlist or { }, false, false
- local n = #t
- str = lpegmatch(replacer_1,str)
+ local t,n={},0
+ for sb in gmatch(b,"[^,]+") do
+ for sa in gmatch(a,"[^,]+") do
+ n=n+1;t[n]=sa..sb
+ end
+ end
+ return concat(t,",")
+end
+local left=P("{")
+local right=P("}")
+local var=P((1-S("{}" ))^0)
+local set=P((1-S("{},"))^0)
+local other=P(1)
+local l_first=Cs((Cc("{")*(C(set)*left*C(var)*right/f_first)*Cc("}")+other )^0 )
+local l_second=Cs((Cc("{")*(left*C(var)*right*C(set)/f_second)*Cc("}")+other )^0 )
+local l_both=Cs((Cc("{")*(left*C(var)*right*left*C(var)*right/f_both)*Cc("}")+other )^0 )
+local l_rest=Cs((left*var*(left/"")*var*(right/"")*var*right+other )^0 )
+local stripper_1=lpeg.stripper ("{}@")
+local replacer_1=lpeg.replacer { { ",}",",@}" },{ "{,","{@," },}
+local function splitpathexpr(str,newlist,validate)
+ if trace_expansions then
+ report_expansions("expanding variable %a",str)
+ end
+ local t,ok,done=newlist or {},false,false
+ local n=#t
+ str=lpegmatch(replacer_1,str)
+ repeat
+ local old=str
repeat
- local old = str
- repeat
- local old = str
- str = lpegmatch(l_first, str)
- until old == str
- repeat
- local old = str
- str = lpegmatch(l_second,str)
- until old == str
- repeat
- local old = str
- str = lpegmatch(l_both, str)
- until old == str
- repeat
- local old = str
- str = lpegmatch(l_rest, str)
- until old == str
- until old == str -- or not find(str,"{")
- str = lpegmatch(stripper_1,str)
- if validate then
- for s in gmatch(str,"[^,]+") do
- s = validate(s)
- if s then
- n = n + 1 ; t[n] = s
- end
- end
- else
- for s in gmatch(str,"[^,]+") do
- n = n + 1 ; t[n] = s
- end
- end
- if trace_expansions then
- for k=1,#t do
- report_expansions("% 4i: %s",k,t[k])
- end
+ local old=str
+ str=lpegmatch(l_first,str)
+ until old==str
+ repeat
+ local old=str
+ str=lpegmatch(l_second,str)
+ until old==str
+ repeat
+ local old=str
+ str=lpegmatch(l_both,str)
+ until old==str
+ repeat
+ local old=str
+ str=lpegmatch(l_rest,str)
+ until old==str
+ until old==str
+ str=lpegmatch(stripper_1,str)
+ if validate then
+ for s in gmatch(str,"[^,]+") do
+ s=validate(s)
+ if s then
+ n=n+1
+ t[n]=s
+ end
+ end
+ else
+ for s in gmatch(str,"[^,]+") do
+ n=n+1
+ t[n]=s
+ end
+ end
+ if trace_expansions then
+ for k=1,#t do
+ report_expansions("% 4i: %s",k,t[k])
end
- return t
+ end
+ return t
end
-
--- We could make the previous one public.
-
local function validate(s)
- s = collapsepath(s) -- already keeps the //
- return s ~= "" and not find(s,"^!*unset/*$") and s
+ s=collapsepath(s)
+ return s~="" and not find(s,"^!*unset/*$") and s
end
-
-resolvers.validatedpath = validate -- keeps the trailing //
-
+resolvers.validatedpath=validate
function resolvers.expandedpathfromlist(pathlist)
- local newlist = { }
- for k=1,#pathlist do
- splitpathexpr(pathlist[k],newlist,validate)
- end
- return newlist
-end
-
--- {a,b,c,d}
--- a,b,c/{p,q,r},d
--- a,b,c/{p,q,r}/d/{x,y,z}//
--- a,b,c/{p,q/{x,y,z},r},d/{p,q,r}
--- a,b,c/{p,q/{x,y,z},r},d/{p,q,r}
--- a{b,c}{d,e}f
--- {a,b,c,d}
--- {a,b,c/{p,q,r},d}
--- {a,b,c/{p,q,r}/d/{x,y,z}//}
--- {a,b,c/{p,q/{x,y,z}},d/{p,q,r}}
--- {a,b,c/{p,q/{x,y,z},w}v,d/{p,q,r}}
--- {$SELFAUTODIR,$SELFAUTOPARENT}{,{/share,}/texmf{-local,.local,}/web2c}
-
-local cleanup = lpeg.replacer {
- { "!" , "" },
- { "\\" , "/" },
+ local newlist={}
+ for k=1,#pathlist do
+ splitpathexpr(pathlist[k],newlist,validate)
+ end
+ return newlist
+end
+local cleanup=lpeg.replacer {
+ { "!","" },
+ { "\\","/" },
}
-
-function resolvers.cleanpath(str) -- tricky, maybe only simple paths
- local doslashes = (P("\\")/"/" + 1)^0
- local donegation = (P("!") /"" )^0
- local homedir = lpegmatch(Cs(donegation * doslashes),environment.homedir or "")
- if homedir == "~" or homedir == "" or not lfs.isdir(homedir) then
- if trace_expansions then
- report_expansions("no home dir set, ignoring dependent paths")
- end
- function resolvers.cleanpath(str)
- if not str or find(str,"~") then
- return "" -- special case
- else
- return lpegmatch(cleanup,str)
- end
- end
- else
- local dohome = ((P("~")+P("$HOME"))/homedir)^0
- local cleanup = Cs(donegation * dohome * doslashes)
- function resolvers.cleanpath(str)
- return str and lpegmatch(cleanup,str) or ""
- end
- end
- return resolvers.cleanpath(str)
-end
-
--- print(resolvers.cleanpath(""))
--- print(resolvers.cleanpath("!"))
--- print(resolvers.cleanpath("~"))
--- print(resolvers.cleanpath("~/test"))
--- print(resolvers.cleanpath("!~/test"))
--- print(resolvers.cleanpath("~/test~test"))
-
--- This one strips quotes and funny tokens.
-
-local expandhome = P("~") / "$HOME" -- environment.homedir
-
-local dodouble = P('"')/"" * (expandhome + (1 - P('"')))^0 * P('"')/""
-local dosingle = P("'")/"" * (expandhome + (1 - P("'")))^0 * P("'")/""
-local dostring = (expandhome + 1 )^0
-
-local stripper = Cs(
- lpegpatterns.unspacer * (dosingle + dodouble + dostring) * lpegpatterns.unspacer
+function resolvers.cleanpath(str)
+ local doslashes=(P("\\")/"/"+1)^0
+ local donegation=(P("!")/"" )^0
+ local homedir=lpegmatch(Cs(donegation*doslashes),environment.homedir or "")
+ if homedir=="~" or homedir=="" or not lfs.isdir(homedir) then
+ if trace_expansions then
+ report_expansions("no home dir set, ignoring dependent paths")
+ end
+ function resolvers.cleanpath(str)
+ if not str or find(str,"~") then
+ return ""
+ else
+ return lpegmatch(cleanup,str)
+ end
+ end
+ else
+ local dohome=((P("~")+P("$HOME"))/homedir)^0
+ local cleanup=Cs(donegation*dohome*doslashes)
+ function resolvers.cleanpath(str)
+ return str and lpegmatch(cleanup,str) or ""
+ end
+ end
+ return resolvers.cleanpath(str)
+end
+local expandhome=P("~")/"$HOME"
+local dodouble=P('"')/""*(expandhome+(1-P('"')))^0*P('"')/""
+local dosingle=P("'")/""*(expandhome+(1-P("'")))^0*P("'")/""
+local dostring=(expandhome+1 )^0
+local stripper=Cs(
+ lpegpatterns.unspacer*(dosingle+dodouble+dostring)*lpegpatterns.unspacer
)
-
-function resolvers.checkedvariable(str) -- assumes str is a string
- return type(str) == "string" and lpegmatch(stripper,str) or str
-end
-
--- The path splitter:
-
--- A config (optionally) has the paths split in tables. Internally
--- we join them and split them after the expansion has taken place. This
--- is more convenient.
-
-local cache = { }
-
------ splitter = lpeg.tsplitat(S(ostype == "windows" and ";" or ":;")) -- maybe add ,
-local splitter = lpeg.tsplitat(";") -- as we move towards urls, prefixes and use tables we no longer do :
-
-local backslashswapper = lpeg.replacer("\\","/")
-
-local function splitconfigurationpath(str) -- beware, this can be either a path or a { specification }
- if str then
- local found = cache[str]
- if not found then
- if str == "" then
- found = { }
- else
- local split = lpegmatch(splitter,lpegmatch(backslashswapper,str)) -- can be combined
- found = { }
- local noffound = 0
- for i=1,#split do
- local s = split[i]
- if not find(s,"^{*unset}*") then
- noffound = noffound + 1
- found[noffound] = s
- end
- end
- if trace_expansions then
- report_expansions("splitting path specification '%s'",str)
- for k=1,noffound do
- report_expansions("% 4i: %s",k,found[k])
- end
- end
- cache[str] = found
- end
+function resolvers.checkedvariable(str)
+ return type(str)=="string" and lpegmatch(stripper,str) or str
+end
+local cache={}
+local splitter=lpeg.tsplitat(";")
+local backslashswapper=lpeg.replacer("\\","/")
+local function splitconfigurationpath(str)
+ if str then
+ local found=cache[str]
+ if not found then
+ if str=="" then
+ found={}
+ else
+ local split=lpegmatch(splitter,lpegmatch(backslashswapper,str))
+ found={}
+ local noffound=0
+ for i=1,#split do
+ local s=split[i]
+ if not find(s,"^{*unset}*") then
+ noffound=noffound+1
+ found[noffound]=s
+ end
+ end
+ if trace_expansions then
+ report_expansions("splitting path specification %a",str)
+ for k=1,noffound do
+ report_expansions("% 4i: %s",k,found[k])
+ end
end
- return found
+ cache[str]=found
+ end
end
+ return found
+ end
end
-
-resolvers.splitconfigurationpath = splitconfigurationpath
-
+resolvers.splitconfigurationpath=splitconfigurationpath
function resolvers.splitpath(str)
- if type(str) == 'table' then
- return str
- else
- return splitconfigurationpath(str)
- end
+ if type(str)=='table' then
+ return str
+ else
+ return splitconfigurationpath(str)
+ end
end
-
function resolvers.joinpath(str)
- if type(str) == 'table' then
- return file.joinpath(str)
- else
- return str
- end
-end
-
--- The next function scans directories and returns a hash where the
--- entries are either strings or tables.
-
--- starting with . or .. etc or funny char
-
-
-
-
--- a lot of this caching can be stripped away when we have ssd's everywhere
---
--- we could cache all the (sub)paths here if needed
-
-local attributes, directory = lfs.attributes, lfs.dir
-
-local weird = P(".")^1 + lpeg.anywhere(S("~`!#$%^&*()={}[]:;\"\'||<>,?\n\r\t"))
-local timer = { }
-local scanned = { }
-local nofscans = 0
-local scancache = { }
-
+ if type(str)=='table' then
+ return file.joinpath(str)
+ else
+ return str
+ end
+end
+local attributes,directory=lfs.attributes,lfs.dir
+local weird=P(".")^1+lpeg.anywhere(S("~`!#$%^&*()={}[]:;\"\'||<>,?\n\r\t"))
+local timer={}
+local scanned={}
+local nofscans=0
+local scancache={}
local function scan(files,spec,path,n,m,r)
- local full = (path == "" and spec) or (spec .. path .. '/')
- local dirs = { }
- local nofdirs = 0
- for name in directory(full) do
- if not lpegmatch(weird,name) then
- local mode = attributes(full..name,'mode')
- if mode == 'file' then
- n = n + 1
- local f = files[name]
- if f then
- if type(f) == 'string' then
- files[name] = { f, path }
- else
- f[#f+1] = path
- end
- else -- probably unique anyway
- files[name] = path
- local lower = lower(name)
- if name ~= lower then
- files["remap:"..lower] = name
- r = r + 1
- end
- end
- elseif mode == 'directory' then
- m = m + 1
- nofdirs = nofdirs + 1
- if path ~= "" then
- dirs[nofdirs] = path..'/'..name
- else
- dirs[nofdirs] = name
- end
- end
+ local full=(path=="" and spec) or (spec..path..'/')
+ local dirs={}
+ local nofdirs=0
+ for name in directory(full) do
+ if not lpegmatch(weird,name) then
+ local mode=attributes(full..name,'mode')
+ if mode=='file' then
+ n=n+1
+ local f=files[name]
+ if f then
+ if type(f)=='string' then
+ files[name]={ f,path }
+ else
+ f[#f+1]=path
+ end
+ else
+ files[name]=path
+ local lower=lower(name)
+ if name~=lower then
+ files["remap:"..lower]=name
+ r=r+1
+ end
+ end
+ elseif mode=='directory' then
+ m=m+1
+ nofdirs=nofdirs+1
+ if path~="" then
+ dirs[nofdirs]=path..'/'..name
+ else
+ dirs[nofdirs]=name
end
+ end
end
- if nofdirs > 0 then
- sort(dirs)
- for i=1,nofdirs do
- files, n, m, r = scan(files,spec,dirs[i],n,m,r)
- end
+ end
+ if nofdirs>0 then
+ sort(dirs)
+ for i=1,nofdirs do
+ files,n,m,r=scan(files,spec,dirs[i],n,m,r)
end
- scancache[sub(full,1,-2)] = files
- return files, n, m, r
+ end
+ scancache[sub(full,1,-2)]=files
+ return files,n,m,r
end
-
-local fullcache = { }
-
+local fullcache={}
function resolvers.scanfiles(path,branch,usecache)
- statistics.starttiming(timer)
- local realpath = resolvers.resolve(path) -- no shortcut
- if usecache then
- local files = fullcache[realpath]
- if files then
- if trace_locating then
- report_expansions("using caches scan of path '%s', branch '%s'",path,branch or path)
- end
- return files
- end
- end
- if trace_locating then
- report_expansions("scanning path '%s', branch '%s'",path,branch or path)
- end
- local files, n, m, r = scan({ },realpath .. '/',"",0,0,0)
- files.__path__ = path -- can be selfautoparent:texmf-whatever
- files.__files__ = n
- files.__directories__ = m
- files.__remappings__ = r
- if trace_locating then
- report_expansions("%s files found on %s directories with %s uppercase remappings",n,m,r)
- end
- if usecache then
- scanned[#scanned+1] = realpath
- fullcache[realpath] = files
- end
- nofscans = nofscans + 1
- statistics.stoptiming(timer)
- return files
-end
-
-local function simplescan(files,spec,path) -- first match only, no map and such
- local full = (path == "" and spec) or (spec .. path .. '/')
- local dirs = { }
- local nofdirs = 0
- for name in directory(full) do
- if not lpegmatch(weird,name) then
- local mode = attributes(full..name,'mode')
- if mode == 'file' then
- if not files[name] then
- -- only first match
- files[name] = path
- end
- elseif mode == 'directory' then
- nofdirs = nofdirs + 1
- if path ~= "" then
- dirs[nofdirs] = path..'/'..name
- else
- dirs[nofdirs] = name
- end
- end
+ statistics.starttiming(timer)
+ local realpath=resolvers.resolve(path)
+ if usecache then
+ local files=fullcache[realpath]
+ if files then
+ if trace_locating then
+ report_expansions("using caches scan of path %a, branch %a",path,branch or path)
+ end
+ return files
+ end
+ end
+ if trace_locating then
+ report_expansions("scanning path %a, branch %a",path,branch or path)
+ end
+ local files,n,m,r=scan({},realpath..'/',"",0,0,0)
+ files.__path__=path
+ files.__files__=n
+ files.__directories__=m
+ files.__remappings__=r
+ if trace_locating then
+ report_expansions("%s files found on %s directories with %s uppercase remappings",n,m,r)
+ end
+ if usecache then
+ scanned[#scanned+1]=realpath
+ fullcache[realpath]=files
+ end
+ nofscans=nofscans+1
+ statistics.stoptiming(timer)
+ return files
+end
+local function simplescan(files,spec,path)
+ local full=(path=="" and spec) or (spec..path..'/')
+ local dirs={}
+ local nofdirs=0
+ for name in directory(full) do
+ if not lpegmatch(weird,name) then
+ local mode=attributes(full..name,'mode')
+ if mode=='file' then
+ if not files[name] then
+ files[name]=path
+ end
+ elseif mode=='directory' then
+ nofdirs=nofdirs+1
+ if path~="" then
+ dirs[nofdirs]=path..'/'..name
+ else
+ dirs[nofdirs]=name
end
+ end
end
- if nofdirs > 0 then
- sort(dirs)
- for i=1,nofdirs do
- files = simplescan(files,spec,dirs[i])
- end
+ end
+ if nofdirs>0 then
+ sort(dirs)
+ for i=1,nofdirs do
+ files=simplescan(files,spec,dirs[i])
end
- return files
+ end
+ return files
end
-
-local simplecache = { }
-local nofsharedscans = 0
-
+local simplecache={}
+local nofsharedscans=0
function resolvers.simplescanfiles(path,branch,usecache)
- statistics.starttiming(timer)
- local realpath = resolvers.resolve(path) -- no shortcut
- if usecache then
- local files = simplecache[realpath]
- if not files then
- files = scancache[realpath]
- if files then
- nofsharedscans = nofsharedscans + 1
- end
- end
- if files then
- if trace_locating then
- report_expansions("using caches scan of path '%s', branch '%s'",path,branch or path)
- end
- return files
- end
- end
- if trace_locating then
- report_expansions("scanning path '%s', branch '%s'",path,branch or path)
- end
- local files = simplescan({ },realpath .. '/',"")
- if trace_locating then
- report_expansions("%s files found",table.count(files))
- end
- if usecache then
- scanned[#scanned+1] = realpath
- simplecache[realpath] = files
- end
- nofscans = nofscans + 1
- statistics.stoptiming(timer)
- return files
+ statistics.starttiming(timer)
+ local realpath=resolvers.resolve(path)
+ if usecache then
+ local files=simplecache[realpath]
+ if not files then
+ files=scancache[realpath]
+ if files then
+ nofsharedscans=nofsharedscans+1
+ end
+ end
+ if files then
+ if trace_locating then
+ report_expansions("using caches scan of path %a, branch %a",path,branch or path)
+ end
+ return files
+ end
+ end
+ if trace_locating then
+ report_expansions("scanning path %a, branch %a",path,branch or path)
+ end
+ local files=simplescan({},realpath..'/',"")
+ if trace_locating then
+ report_expansions("%s files found",table.count(files))
+ end
+ if usecache then
+ scanned[#scanned+1]=realpath
+ simplecache[realpath]=files
+ end
+ nofscans=nofscans+1
+ statistics.stoptiming(timer)
+ return files
end
-
function resolvers.scandata()
- table.sort(scanned)
- return {
- n = nofscans,
- shared = nofsharedscans,
- time = statistics.elapsedtime(timer),
- paths = scanned,
- }
+ table.sort(scanned)
+ return {
+ n=nofscans,
+ shared=nofsharedscans,
+ time=statistics.elapsedtime(timer),
+ paths=scanned,
+ }
end
-
end -- of closure
do -- create closure to overcome 200 locals limit
-if not modules then modules = { } end modules ['data-env'] = {
- version = 1.001,
- comment = "companion to luat-lib.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files",
-}
+package.loaded["data-env"] = package.loaded["data-env"] or true
+
+-- original size: 8762, stripped down to: 6484
-local lower, gsub = string.lower, string.gsub
-
-local resolvers = resolvers
-
-local allocate = utilities.storage.allocate
-local setmetatableindex = table.setmetatableindex
-local fileextname = file.extname
-
-local formats = allocate()
-local suffixes = allocate()
-local dangerous = allocate()
-local suffixmap = allocate()
-
-resolvers.formats = formats
-resolvers.suffixes = suffixes
-resolvers.dangerous = dangerous
-resolvers.suffixmap = suffixmap
-
-local relations = allocate { -- todo: handlers also here
- core = {
- ofm = { -- will become obsolete
- names = { "ofm", "omega font metric", "omega font metrics" },
- variable = 'OFMFONTS',
- suffixes = { 'ofm', 'tfm' },
- },
- ovf = { -- will become obsolete
- names = { "ovf", "omega virtual font", "omega virtual fonts" },
- variable = 'OVFFONTS',
- suffixes = { 'ovf', 'vf' },
- },
- tfm = {
- names = { "tfm", "tex font metric", "tex font metrics" },
- variable = 'TFMFONTS',
- suffixes = { 'tfm' },
- },
- vf = {
- names = { "vf", "virtual font", "virtual fonts" },
- variable = 'VFFONTS',
- suffixes = { 'vf' },
- },
- otf = {
- names = { "otf", "opentype", "opentype font", "opentype fonts"},
- variable = 'OPENTYPEFONTS',
- suffixes = { 'otf' },
- },
- ttf = {
- names = { "ttf", "truetype", "truetype font", "truetype fonts", "truetype collection", "truetype collections", "truetype dictionary", "truetype dictionaries" },
- variable = 'TTFONTS',
- suffixes = { 'ttf', 'ttc', 'dfont' },
- },
- afm = {
- names = { "afm", "adobe font metric", "adobe font metrics" },
- variable = "AFMFONTS",
- suffixes = { "afm" },
- },
- pfb = {
- names = { "pfb", "type1", "type 1", "type1 font", "type 1 font", "type1 fonts", "type 1 fonts" },
- variable = 'T1FONTS',
- suffixes = { 'pfb', 'pfa' },
- },
- fea = {
- names = { "fea", "font feature", "font features", "font feature file", "font feature files" },
- variable = 'FONTFEATURES',
- suffixes = { 'fea' },
- },
- cid = {
- names = { "cid", "cid map", "cid maps", "cid file", "cid files" },
- variable = 'FONTCIDMAPS',
- suffixes = { 'cid', 'cidmap' },
- },
- fmt = {
- names = { "fmt", "format", "tex format" },
- variable = 'TEXFORMATS',
- suffixes = { 'fmt' },
- },
- mem = { -- will become obsolete
- names = { 'mem', "metapost format" },
- variable = 'MPMEMS',
- suffixes = { 'mem' },
- },
- mp = {
- names = { "mp" },
- variable = 'MPINPUTS',
- suffixes = { 'mp', 'mpvi', 'mpiv', 'mpii' },
- },
- tex = {
- names = { "tex" },
- variable = 'TEXINPUTS',
- suffixes = { 'tex', "mkvi", "mkiv", "mkii" },
- },
- icc = {
- names = { "icc", "icc profile", "icc profiles" },
- variable = 'ICCPROFILES',
- suffixes = { 'icc' },
- },
- texmfscripts = {
- names = { "texmfscript", "texmfscripts", "script", "scripts" },
- variable = 'TEXMFSCRIPTS',
- suffixes = { 'rb', 'pl', 'py' },
- },
- lua = {
- names = { "lua" },
- variable = 'LUAINPUTS',
- suffixes = { 'lua', 'luc', 'tma', 'tmc' },
- },
- lib = {
- names = { "lib" },
- variable = 'CLUAINPUTS',
- suffixes = os.libsuffix and { os.libsuffix } or { 'dll', 'so' },
- },
- bib = {
- names = { 'bib' },
- suffixes = { 'bib' },
- },
- bst = {
- names = { 'bst' },
- suffixes = { 'bst' },
- },
- fontconfig = {
- names = { 'fontconfig', 'fontconfig file', 'fontconfig files' },
- variable = 'FONTCONFIG_PATH',
- },
+if not modules then modules={} end modules ['data-env']={
+ version=1.001,
+ comment="companion to luat-lib.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files",
+}
+local lower,gsub=string.lower,string.gsub
+local resolvers=resolvers
+local allocate=utilities.storage.allocate
+local setmetatableindex=table.setmetatableindex
+local suffixonly=file.suffixonly
+local formats=allocate()
+local suffixes=allocate()
+local dangerous=allocate()
+local suffixmap=allocate()
+resolvers.formats=formats
+resolvers.suffixes=suffixes
+resolvers.dangerous=dangerous
+resolvers.suffixmap=suffixmap
+local luasuffixes=utilities.lua.suffixes
+local relations=allocate {
+ core={
+ ofm={
+ names={ "ofm","omega font metric","omega font metrics" },
+ variable='OFMFONTS',
+ suffixes={ 'ofm','tfm' },
+ },
+ ovf={
+ names={ "ovf","omega virtual font","omega virtual fonts" },
+ variable='OVFFONTS',
+ suffixes={ 'ovf','vf' },
+ },
+ tfm={
+ names={ "tfm","tex font metric","tex font metrics" },
+ variable='TFMFONTS',
+ suffixes={ 'tfm' },
},
- obsolete = {
- enc = {
- names = { "enc", "enc files", "enc file", "encoding files", "encoding file" },
- variable = 'ENCFONTS',
- suffixes = { 'enc' },
- },
- map = {
- names = { "map", "map files", "map file" },
- variable = 'TEXFONTMAPS',
- suffixes = { 'map' },
- },
- lig = {
- names = { "lig files", "lig file", "ligature file", "ligature files" },
- variable = 'LIGFONTS',
- suffixes = { 'lig' },
- },
- opl = {
- names = { "opl" },
- variable = 'OPLFONTS',
- suffixes = { 'opl' },
- },
- ovp = {
- names = { "ovp" },
- variable = 'OVPFONTS',
- suffixes = { 'ovp' },
- },
+ vf={
+ names={ "vf","virtual font","virtual fonts" },
+ variable='VFFONTS',
+ suffixes={ 'vf' },
},
- kpse = { -- subset
- base = {
- names = { 'base', "metafont format" },
- variable = 'MFBASES',
- suffixes = { 'base', 'bas' },
- },
- cmap = {
- names = { 'cmap', 'cmap files', 'cmap file' },
- variable = 'CMAPFONTS',
- suffixes = { 'cmap' },
- },
- cnf = {
- names = { 'cnf' },
- suffixes = { 'cnf' },
- },
- web = {
- names = { 'web' },
- suffixes = { 'web', 'ch' }
- },
- cweb = {
- names = { 'cweb' },
- suffixes = { 'w', 'web', 'ch' },
- },
- gf = {
- names = { 'gf' },
- suffixes = { '<resolution>gf' },
- },
- mf = {
- names = { 'mf' },
- variable = 'MFINPUTS',
- suffixes = { 'mf' },
- },
- mft = {
- names = { 'mft' },
- suffixes = { 'mft' },
- },
- pk = {
- names = { 'pk' },
- suffixes = { '<resolution>pk' },
- },
+ otf={
+ names={ "otf","opentype","opentype font","opentype fonts"},
+ variable='OPENTYPEFONTS',
+ suffixes={ 'otf' },
},
+ ttf={
+ names={ "ttf","truetype","truetype font","truetype fonts","truetype collection","truetype collections","truetype dictionary","truetype dictionaries" },
+ variable='TTFONTS',
+ suffixes={ 'ttf','ttc','dfont' },
+ },
+ afm={
+ names={ "afm","adobe font metric","adobe font metrics" },
+ variable="AFMFONTS",
+ suffixes={ "afm" },
+ },
+ pfb={
+ names={ "pfb","type1","type 1","type1 font","type 1 font","type1 fonts","type 1 fonts" },
+ variable='T1FONTS',
+ suffixes={ 'pfb','pfa' },
+ },
+ fea={
+ names={ "fea","font feature","font features","font feature file","font feature files" },
+ variable='FONTFEATURES',
+ suffixes={ 'fea' },
+ },
+ cid={
+ names={ "cid","cid map","cid maps","cid file","cid files" },
+ variable='FONTCIDMAPS',
+ suffixes={ 'cid','cidmap' },
+ },
+ fmt={
+ names={ "fmt","format","tex format" },
+ variable='TEXFORMATS',
+ suffixes={ 'fmt' },
+ },
+ mem={
+ names={ 'mem',"metapost format" },
+ variable='MPMEMS',
+ suffixes={ 'mem' },
+ },
+ mp={
+ names={ "mp" },
+ variable='MPINPUTS',
+ suffixes={ 'mp','mpvi','mpiv','mpii' },
+ },
+ tex={
+ names={ "tex" },
+ variable='TEXINPUTS',
+ suffixes={ 'tex',"mkvi","mkiv","mkii" },
+ },
+ icc={
+ names={ "icc","icc profile","icc profiles" },
+ variable='ICCPROFILES',
+ suffixes={ 'icc' },
+ },
+ texmfscripts={
+ names={ "texmfscript","texmfscripts","script","scripts" },
+ variable='TEXMFSCRIPTS',
+ suffixes={ 'rb','pl','py' },
+ },
+ lua={
+ names={ "lua" },
+ variable='LUAINPUTS',
+ suffixes={ luasuffixes.lua,luasuffixes.luc,luasuffixes.tma,luasuffixes.tmc },
+ },
+ lib={
+ names={ "lib" },
+ variable='CLUAINPUTS',
+ suffixes=os.libsuffix and { os.libsuffix } or { 'dll','so' },
+ },
+ bib={
+ names={ 'bib' },
+ suffixes={ 'bib' },
+ },
+ bst={
+ names={ 'bst' },
+ suffixes={ 'bst' },
+ },
+ fontconfig={
+ names={ 'fontconfig','fontconfig file','fontconfig files' },
+ variable='FONTCONFIG_PATH',
+ },
+ },
+ obsolete={
+ enc={
+ names={ "enc","enc files","enc file","encoding files","encoding file" },
+ variable='ENCFONTS',
+ suffixes={ 'enc' },
+ },
+ map={
+ names={ "map","map files","map file" },
+ variable='TEXFONTMAPS',
+ suffixes={ 'map' },
+ },
+ lig={
+ names={ "lig files","lig file","ligature file","ligature files" },
+ variable='LIGFONTS',
+ suffixes={ 'lig' },
+ },
+ opl={
+ names={ "opl" },
+ variable='OPLFONTS',
+ suffixes={ 'opl' },
+ },
+ ovp={
+ names={ "ovp" },
+ variable='OVPFONTS',
+ suffixes={ 'ovp' },
+ },
+ },
+ kpse={
+ base={
+ names={ 'base',"metafont format" },
+ variable='MFBASES',
+ suffixes={ 'base','bas' },
+ },
+ cmap={
+ names={ 'cmap','cmap files','cmap file' },
+ variable='CMAPFONTS',
+ suffixes={ 'cmap' },
+ },
+ cnf={
+ names={ 'cnf' },
+ suffixes={ 'cnf' },
+ },
+ web={
+ names={ 'web' },
+ suffixes={ 'web','ch' }
+ },
+ cweb={
+ names={ 'cweb' },
+ suffixes={ 'w','web','ch' },
+ },
+ gf={
+ names={ 'gf' },
+ suffixes={ '<resolution>gf' },
+ },
+ mf={
+ names={ 'mf' },
+ variable='MFINPUTS',
+ suffixes={ 'mf' },
+ },
+ mft={
+ names={ 'mft' },
+ suffixes={ 'mft' },
+ },
+ pk={
+ names={ 'pk' },
+ suffixes={ '<resolution>pk' },
+ },
+ },
}
-
-resolvers.relations = relations
-
--- formats: maps a format onto a variable
-
+resolvers.relations=relations
function resolvers.updaterelations()
- for category, categories in next, relations do
- for name, relation in next, categories do
- local rn = relation.names
- local rv = relation.variable
- local rs = relation.suffixes
- if rn and rv then
- for i=1,#rn do
- local rni = lower(gsub(rn[i]," ",""))
- formats[rni] = rv
- if rs then
- suffixes[rni] = rs
- for i=1,#rs do
- local rsi = rs[i]
- suffixmap[rsi] = rni
- end
- end
- end
- end
- if rs then
- end
- end
- end
-end
-
-resolvers.updaterelations() -- push this in the metatable -> newindex
-
+ for category,categories in next,relations do
+ for name,relation in next,categories do
+ local rn=relation.names
+ local rv=relation.variable
+ local rs=relation.suffixes
+ if rn and rv then
+ for i=1,#rn do
+ local rni=lower(gsub(rn[i]," ",""))
+ formats[rni]=rv
+ if rs then
+ suffixes[rni]=rs
+ for i=1,#rs do
+ local rsi=rs[i]
+ suffixmap[rsi]=rni
+ end
+ end
+ end
+ end
+ if rs then
+ end
+ end
+ end
+end
+resolvers.updaterelations()
local function simplified(t,k)
- return k and rawget(t,lower(gsub(k," ",""))) or nil
+ return k and rawget(t,lower(gsub(k," ",""))) or nil
end
-
-setmetatableindex(formats, simplified)
-setmetatableindex(suffixes, simplified)
-setmetatableindex(suffixmap, simplified)
-
--- A few accessors, mostly for command line tool.
-
+setmetatableindex(formats,simplified)
+setmetatableindex(suffixes,simplified)
+setmetatableindex(suffixmap,simplified)
function resolvers.suffixofformat(str)
- local s = suffixes[str]
- return s and s[1] or ""
+ local s=suffixes[str]
+ return s and s[1] or ""
end
-
function resolvers.suffixofformat(str)
- return suffixes[str] or { }
+ return suffixes[str] or {}
end
-
-for name, format in next, formats do
- dangerous[name] = true -- still needed ?
+for name,format in next,formats do
+ dangerous[name]=true
end
-
--- because vf searching is somewhat dangerous, we want to prevent
--- too liberal searching esp because we do a lookup on the current
--- path anyway; only tex (or any) is safe
-
-dangerous.tex = nil
-
-
--- more helpers
-
+dangerous.tex=nil
function resolvers.formatofvariable(str)
- return formats[str] or ''
+ return formats[str] or ''
end
-
-function resolvers.formatofsuffix(str) -- of file
- return suffixmap[fileextname(str)] or 'tex' -- so many map onto tex (like mkiv, cld etc)
+function resolvers.formatofsuffix(str)
+ return suffixmap[suffixonly(str)] or 'tex'
end
-
function resolvers.variableofformat(str)
- return formats[str] or ''
+ return formats[str] or ''
end
-
function resolvers.variableofformatorsuffix(str)
- local v = formats[str]
- if v then
- return v
- end
- v = suffixmap[fileextname(str)]
- if v then
- return formats[v]
- end
- return ''
+ local v=formats[str]
+ if v then
+ return v
+ end
+ v=suffixmap[suffixonly(str)]
+ if v then
+ return formats[v]
+ end
+ return ''
end
-
end -- of closure
do -- create closure to overcome 200 locals limit
-if not modules then modules = { } end modules ['data-tmp'] = {
- version = 1.100,
- comment = "companion to luat-lib.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
---[[ldx--
-<p>This module deals with caching data. It sets up the paths and
-implements loaders and savers for tables. Best is to set the
-following variable. When not set, the usual paths will be
-checked. Personally I prefer the (users) temporary path.</p>
-
-</code>
-TEXMFCACHE=$TMP;$TEMP;$TMPDIR;$TEMPDIR;$HOME;$TEXMFVAR;$VARTEXMF;.
-</code>
-
-<p>Currently we do no locking when we write files. This is no real
-problem because most caching involves fonts and the chance of them
-being written at the same time is small. We also need to extend
-luatools with a recache feature.</p>
---ldx]]--
-
-local format, lower, gsub, concat = string.format, string.lower, string.gsub, table.concat
-local serialize, serializetofile = table.serialize, table.tofile
-local mkdirs, isdir = dir.mkdirs, lfs.isdir
-
-local trace_locating = false trackers.register("resolvers.locating", function(v) trace_locating = v end)
-local trace_cache = false trackers.register("resolvers.cache", function(v) trace_cache = v end)
-
-local report_caches = logs.reporter("resolvers","caches")
-local report_resolvers = logs.reporter("resolvers","caching")
-
-local resolvers = resolvers
-
--- intermezzo
-
-local directive_cleanup = false directives.register("system.compile.cleanup", function(v) directive_cleanup = v end)
-local directive_strip = true directives.register("system.compile.strip", function(v) directive_strip = v end)
+package.loaded["data-tmp"] = package.loaded["data-tmp"] or true
-local compile = utilities.lua.compile
+-- original size: 14308, stripped down to: 10956
+if not modules then modules={} end modules ['data-tmp']={
+ version=1.100,
+ comment="companion to luat-lib.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+local format,lower,gsub,concat=string.format,string.lower,string.gsub,table.concat
+local serialize,serializetofile=table.serialize,table.tofile
+local mkdirs,isdir,isfile=dir.mkdirs,lfs.isdir,lfs.isfile
+local addsuffix,is_writable,is_readable=file.addsuffix,file.is_writable,file.is_readable
+local formatters=string.formatters
+local trace_locating=false trackers.register("resolvers.locating",function(v) trace_locating=v end)
+local trace_cache=false trackers.register("resolvers.cache",function(v) trace_cache=v end)
+local report_caches=logs.reporter("resolvers","caches")
+local report_resolvers=logs.reporter("resolvers","caching")
+local resolvers=resolvers
+local directive_cleanup=false directives.register("system.compile.cleanup",function(v) directive_cleanup=v end)
+local directive_strip=false directives.register("system.compile.strip",function(v) directive_strip=v end)
+local compile=utilities.lua.compile
function utilities.lua.compile(luafile,lucfile,cleanup,strip)
- if cleanup == nil then cleanup = directive_cleanup end
- if strip == nil then strip = directive_strip end
- return compile(luafile,lucfile,cleanup,strip)
-end
-
--- end of intermezzo
-
-caches = caches or { }
-local caches = caches
-
-caches.base = caches.base or "luatex-cache"
-caches.more = caches.more or "context"
-caches.direct = false -- true is faster but may need huge amounts of memory
-caches.tree = false
-caches.force = true
-caches.ask = false
-caches.relocate = false
-caches.defaults = { "TMPDIR", "TEMPDIR", "TMP", "TEMP", "HOME", "HOMEPATH" }
-
-local writable, readables, usedreadables = nil, { }, { }
-
--- we could use a metatable for writable and readable but not yet
-
+ if cleanup==nil then cleanup=directive_cleanup end
+ if strip==nil then strip=directive_strip end
+ return compile(luafile,lucfile,cleanup,strip)
+end
+caches=caches or {}
+local caches=caches
+local luasuffixes=utilities.lua.suffixes
+caches.base=caches.base or "luatex-cache"
+caches.more=caches.more or "context"
+caches.direct=false
+caches.tree=false
+caches.force=true
+caches.ask=false
+caches.relocate=false
+caches.defaults={ "TMPDIR","TEMPDIR","TMP","TEMP","HOME","HOMEPATH" }
+local writable,readables,usedreadables=nil,{},{}
local function identify()
- -- Combining the loops makes it messy. First we check the format cache path
- -- and when the last component is not present we try to create it.
- local texmfcaches = resolvers.cleanpathlist("TEXMFCACHE")
- if texmfcaches then
- for k=1,#texmfcaches do
- local cachepath = texmfcaches[k]
- if cachepath ~= "" then
- cachepath = resolvers.resolve(cachepath)
- cachepath = resolvers.cleanpath(cachepath)
- cachepath = file.collapsepath(cachepath)
- local valid = isdir(cachepath)
- if valid then
- if file.is_readable(cachepath) then
- readables[#readables+1] = cachepath
- if not writable and file.is_writable(cachepath) then
- writable = cachepath
- end
- end
- elseif not writable and caches.force then
- local cacheparent = file.dirname(cachepath)
- if file.is_writable(cacheparent) and true then -- we go on anyway (needed for mojca's kind of paths)
- if not caches.ask or io.ask(format("\nShould I create the cache path %s?",cachepath), "no", { "yes", "no" }) == "yes" then
- mkdirs(cachepath)
- if isdir(cachepath) and file.is_writable(cachepath) then
- report_caches("created: %s",cachepath)
- writable = cachepath
- readables[#readables+1] = cachepath
- end
- end
- end
- end
- end
- end
- end
- -- As a last resort we check some temporary paths but this time we don't
- -- create them.
- local texmfcaches = caches.defaults
- if texmfcaches then
- for k=1,#texmfcaches do
- local cachepath = texmfcaches[k]
- cachepath = resolvers.expansion(cachepath) -- was getenv
- if cachepath ~= "" then
- cachepath = resolvers.resolve(cachepath)
- cachepath = resolvers.cleanpath(cachepath)
- local valid = isdir(cachepath)
- if valid and file.is_readable(cachepath) then
- if not writable and file.is_writable(cachepath) then
- readables[#readables+1] = cachepath
- writable = cachepath
- break
- end
- end
- end
+ local texmfcaches=resolvers.cleanpathlist("TEXMFCACHE")
+ if texmfcaches then
+ for k=1,#texmfcaches do
+ local cachepath=texmfcaches[k]
+ if cachepath~="" then
+ cachepath=resolvers.resolve(cachepath)
+ cachepath=resolvers.cleanpath(cachepath)
+ cachepath=file.collapsepath(cachepath)
+ local valid=isdir(cachepath)
+ if valid then
+ if is_readable(cachepath) then
+ readables[#readables+1]=cachepath
+ if not writable and is_writable(cachepath) then
+ writable=cachepath
+ end
+ end
+ elseif not writable and caches.force then
+ local cacheparent=file.dirname(cachepath)
+ if is_writable(cacheparent) and true then
+ if not caches.ask or io.ask(format("\nShould I create the cache path %s?",cachepath),"no",{ "yes","no" })=="yes" then
+ mkdirs(cachepath)
+ if isdir(cachepath) and is_writable(cachepath) then
+ report_caches("path %a created",cachepath)
+ writable=cachepath
+ readables[#readables+1]=cachepath
+ end
+ end
+ end
+ end
+ end
+ end
+ end
+ local texmfcaches=caches.defaults
+ if texmfcaches then
+ for k=1,#texmfcaches do
+ local cachepath=texmfcaches[k]
+ cachepath=resolvers.expansion(cachepath)
+ if cachepath~="" then
+ cachepath=resolvers.resolve(cachepath)
+ cachepath=resolvers.cleanpath(cachepath)
+ local valid=isdir(cachepath)
+ if valid and is_readable(cachepath) then
+ if not writable and is_writable(cachepath) then
+ readables[#readables+1]=cachepath
+ writable=cachepath
+ break
+ end
end
+ end
end
- -- Some extra checking. If we have no writable or readable path then we simply
- -- quit.
- if not writable then
- report_caches("fatal error: there is no valid writable cache path defined")
- os.exit()
- elseif #readables == 0 then
- report_caches("fatal error: there is no valid readable cache path defined")
- os.exit()
- end
- -- why here
- writable = dir.expandname(resolvers.cleanpath(writable)) -- just in case
- -- moved here
- local base, more, tree = caches.base, caches.more, caches.tree or caches.treehash() -- we have only one writable tree
- if tree then
- caches.tree = tree
- writable = mkdirs(writable,base,more,tree)
- for i=1,#readables do
- readables[i] = file.join(readables[i],base,more,tree)
- end
- else
- writable = mkdirs(writable,base,more)
- for i=1,#readables do
- readables[i] = file.join(readables[i],base,more)
- end
+ end
+ if not writable then
+ report_caches("fatal error: there is no valid writable cache path defined")
+ os.exit()
+ elseif #readables==0 then
+ report_caches("fatal error: there is no valid readable cache path defined")
+ os.exit()
+ end
+ writable=dir.expandname(resolvers.cleanpath(writable))
+ local base,more,tree=caches.base,caches.more,caches.tree or caches.treehash()
+ if tree then
+ caches.tree=tree
+ writable=mkdirs(writable,base,more,tree)
+ for i=1,#readables do
+ readables[i]=file.join(readables[i],base,more,tree)
end
- -- end
- if trace_cache then
- for i=1,#readables do
- report_caches("using readable path '%s' (order %s)",readables[i],i)
- end
- report_caches("using writable path '%s'",writable)
+ else
+ writable=mkdirs(writable,base,more)
+ for i=1,#readables do
+ readables[i]=file.join(readables[i],base,more)
end
- identify = function()
- return writable, readables
+ end
+ if trace_cache then
+ for i=1,#readables do
+ report_caches("using readable path %a (order %s)",readables[i],i)
end
- return writable, readables
+ report_caches("using writable path %a",writable)
+ end
+ identify=function()
+ return writable,readables
+ end
+ return writable,readables
end
-
function caches.usedpaths()
- local writable, readables = identify()
- if #readables > 1 then
- local result = { }
- for i=1,#readables do
- local readable = readables[i]
- if usedreadables[i] or readable == writable then
- result[#result+1] = format("readable: '%s' (order %s)",readable,i)
- end
- end
- result[#result+1] = format("writable: '%s'",writable)
- return result
- else
- return writable
+ local writable,readables=identify()
+ if #readables>1 then
+ local result={}
+ for i=1,#readables do
+ local readable=readables[i]
+ if usedreadables[i] or readable==writable then
+ result[#result+1]=formatters["readable: %a (order %s)"](readable,i)
+ end
end
+ result[#result+1]=formatters["writable: %a"](writable)
+ return result
+ else
+ return writable
+ end
end
-
function caches.configfiles()
- return concat(resolvers.instance.specification,";")
+ return concat(resolvers.instance.specification,";")
end
-
function caches.hashed(tree)
- tree = gsub(tree,"\\$","/")
- tree = gsub(tree,"/+$","")
- tree = lower(tree)
- local hash = md5.hex(tree)
- if trace_cache or trace_locating then
- report_caches("hashing tree %s, hash %s",tree,hash)
- end
- return hash
+ tree=gsub(tree,"[\\/]+$","")
+ tree=lower(tree)
+ local hash=md5.hex(tree)
+ if trace_cache or trace_locating then
+ report_caches("hashing tree %a, hash %a",tree,hash)
+ end
+ return hash
end
-
function caches.treehash()
- local tree = caches.configfiles()
- if not tree or tree == "" then
- return false
+ local tree=caches.configfiles()
+ if not tree or tree=="" then
+ return false
+ else
+ return caches.hashed(tree)
+ end
+end
+local r_cache,w_cache={},{}
+local function getreadablepaths(...)
+ local tags={... }
+ local hash=concat(tags,"/")
+ local done=r_cache[hash]
+ if not done then
+ local writable,readables=identify()
+ if #tags>0 then
+ done={}
+ for i=1,#readables do
+ done[i]=file.join(readables[i],...)
+ end
else
- return caches.hashed(tree)
+ done=readables
end
+ r_cache[hash]=done
+ end
+ return done
end
-
-local r_cache, w_cache = { }, { } -- normally w in in r but who cares
-
-local function getreadablepaths(...) -- we can optimize this as we have at most 2 tags
- local tags = { ... }
- local hash = concat(tags,"/")
- local done = r_cache[hash]
- if not done then
- local writable, readables = identify() -- exit if not found
- if #tags > 0 then
- done = { }
- for i=1,#readables do
- done[i] = file.join(readables[i],...)
- end
- else
- done = readables
- end
- r_cache[hash] = done
- end
- return done
-end
-
local function getwritablepath(...)
- local tags = { ... }
- local hash = concat(tags,"/")
- local done = w_cache[hash]
- if not done then
- local writable, readables = identify() -- exit if not found
- if #tags > 0 then
- done = mkdirs(writable,...)
- else
- done = writable
- end
- w_cache[hash] = done
+ local tags={... }
+ local hash=concat(tags,"/")
+ local done=w_cache[hash]
+ if not done then
+ local writable,readables=identify()
+ if #tags>0 then
+ done=mkdirs(writable,...)
+ else
+ done=writable
end
- return done
+ w_cache[hash]=done
+ end
+ return done
end
-
-caches.getreadablepaths = getreadablepaths
-caches.getwritablepath = getwritablepath
-
+caches.getreadablepaths=getreadablepaths
+caches.getwritablepath=getwritablepath
function caches.getfirstreadablefile(filename,...)
- local rd = getreadablepaths(...)
- for i=1,#rd do
- local path = rd[i]
- local fullname = file.join(path,filename)
- if file.is_readable(fullname) then
- usedreadables[i] = true
- return fullname, path
- end
+ local rd=getreadablepaths(...)
+ for i=1,#rd do
+ local path=rd[i]
+ local fullname=file.join(path,filename)
+ if is_readable(fullname) then
+ usedreadables[i]=true
+ return fullname,path
end
- return caches.setfirstwritablefile(filename,...)
+ end
+ return caches.setfirstwritablefile(filename,...)
end
-
function caches.setfirstwritablefile(filename,...)
- local wr = getwritablepath(...)
- local fullname = file.join(wr,filename)
- return fullname, wr
+ local wr=getwritablepath(...)
+ local fullname=file.join(wr,filename)
+ return fullname,wr
end
-
-function caches.define(category,subcategory) -- for old times sake
- return function()
- return getwritablepath(category,subcategory)
- end
+function caches.define(category,subcategory)
+ return function()
+ return getwritablepath(category,subcategory)
+ end
end
-
function caches.setluanames(path,name)
- return path .. "/" .. name .. ".tma", path .. "/" .. name .. ".tmc"
+ return format("%s/%s.%s",path,name,luasuffixes.tma),format("%s/%s.%s",path,name,luasuffixes.tmc)
end
-
function caches.loaddata(readables,name)
- if type(readables) == "string" then
- readables = { readables }
- end
- for i=1,#readables do
- local path = readables[i]
- local tmaname, tmcname = caches.setluanames(path,name)
- local loader = loadfile(tmcname) or loadfile(tmaname)
- if loader then
- loader = loader()
- collectgarbage("step")
- return loader
- end
- end
- return false
+ if type(readables)=="string" then
+ readables={ readables }
+ end
+ for i=1,#readables do
+ local path=readables[i]
+ local tmaname,tmcname=caches.setluanames(path,name)
+ local loader=false
+ if isfile(tmcname) then
+ loader=loadfile(tmcname)
+ end
+ if not loader and isfile(tmaname) then
+ utilities.lua.compile(tmaname,tmcname)
+ if isfile(tmcname) then
+ loader=loadfile(tmcname)
+ end
+ if not loader then
+ loader=loadfile(tmaname)
+ end
+ end
+ if loader then
+ loader=loader()
+ collectgarbage("step")
+ return loader
+ end
+ end
+ return false
end
-
function caches.is_writable(filepath,filename)
- local tmaname, tmcname = caches.setluanames(filepath,filename)
- return file.is_writable(tmaname)
+ local tmaname,tmcname=caches.setluanames(filepath,filename)
+ return is_writable(tmaname)
end
-
-local saveoptions = { compact = true }
-
+local saveoptions={ compact=true }
function caches.savedata(filepath,filename,data,raw)
- local tmaname, tmcname = caches.setluanames(filepath,filename)
- local reduce, simplify = true, true
- if raw then
- reduce, simplify = false, false
- end
- data.cache_uuid = os.uuid()
- if caches.direct then
- file.savedata(tmaname,serialize(data,true,saveoptions))
- else
- serializetofile(tmaname,data,true,saveoptions)
- end
- utilities.lua.compile(tmaname,tmcname)
-end
-
--- moved from data-res:
-
-local content_state = { }
-
+ local tmaname,tmcname=caches.setluanames(filepath,filename)
+ local reduce,simplify=true,true
+ if raw then
+ reduce,simplify=false,false
+ end
+ data.cache_uuid=os.uuid()
+ if caches.direct then
+ file.savedata(tmaname,serialize(data,true,saveoptions))
+ else
+ serializetofile(tmaname,data,true,saveoptions)
+ end
+ utilities.lua.compile(tmaname,tmcname)
+end
+local content_state={}
function caches.contentstate()
- return content_state or { }
+ return content_state or {}
end
-
function caches.loadcontent(cachename,dataname)
- local name = caches.hashed(cachename)
- local full, path = caches.getfirstreadablefile(name ..".lua","trees")
- local filename = file.join(path,name)
- local blob = loadfile(filename .. ".luc") or loadfile(filename .. ".lua")
- if blob then
- local data = blob()
- if data and data.content then
- if data.type == dataname then
- if data.version == resolvers.cacheversion then
- content_state[#content_state+1] = data.uuid
- if trace_locating then
- report_resolvers("loading '%s' for '%s' from '%s'",dataname,cachename,filename)
- end
- return data.content
- else
- report_resolvers("skipping '%s' for '%s' from '%s' (version mismatch)",dataname,cachename,filename)
- end
- else
- report_resolvers("skipping '%s' for '%s' from '%s' (datatype mismatch)",dataname,cachename,filename)
- end
- elseif trace_locating then
- report_resolvers("skipping '%s' for '%s' from '%s' (no content)",dataname,cachename,filename)
+ local name=caches.hashed(cachename)
+ local full,path=caches.getfirstreadablefile(addsuffix(name,luasuffixes.lua),"trees")
+ local filename=file.join(path,name)
+ local blob=loadfile(addsuffix(filename,luasuffixes.luc)) or loadfile(addsuffix(filename,luasuffixes.lua))
+ if blob then
+ local data=blob()
+ if data and data.content then
+ if data.type==dataname then
+ if data.version==resolvers.cacheversion then
+ content_state[#content_state+1]=data.uuid
+ if trace_locating then
+ report_resolvers("loading %a for %a from %a",dataname,cachename,filename)
+ end
+ return data.content
+ else
+ report_resolvers("skipping %a for %a from %a (version mismatch)",dataname,cachename,filename)
end
+ else
+ report_resolvers("skipping %a for %a from %a (datatype mismatch)",dataname,cachename,filename)
+ end
elseif trace_locating then
- report_resolvers("skipping '%s' for '%s' from '%s' (invalid file)",dataname,cachename,filename)
+ report_resolvers("skipping %a for %a from %a (no content)",dataname,cachename,filename)
end
+ elseif trace_locating then
+ report_resolvers("skipping %a for %a from %a (invalid file)",dataname,cachename,filename)
+ end
end
-
function caches.collapsecontent(content)
- for k, v in next, content do
- if type(v) == "table" and #v == 1 then
- content[k] = v[1]
- end
+ for k,v in next,content do
+ if type(v)=="table" and #v==1 then
+ content[k]=v[1]
end
+ end
end
-
function caches.savecontent(cachename,dataname,content)
- local name = caches.hashed(cachename)
- local full, path = caches.setfirstwritablefile(name ..".lua","trees")
- local filename = file.join(path,name) -- is full
- local luaname, lucname = filename .. ".lua", filename .. ".luc"
+ local name=caches.hashed(cachename)
+ local full,path=caches.setfirstwritablefile(addsuffix(name,luasuffixes.lua),"trees")
+ local filename=file.join(path,name)
+ local luaname=addsuffix(filename,luasuffixes.lua)
+ local lucname=addsuffix(filename,luasuffixes.luc)
+ if trace_locating then
+ report_resolvers("preparing %a for %a",dataname,cachename)
+ end
+ local data={
+ type=dataname,
+ root=cachename,
+ version=resolvers.cacheversion,
+ date=os.date("%Y-%m-%d"),
+ time=os.date("%H:%M:%S"),
+ content=content,
+ uuid=os.uuid(),
+ }
+ local ok=io.savedata(luaname,serialize(data,true))
+ if ok then
if trace_locating then
- report_resolvers("preparing '%s' for '%s'",dataname,cachename)
- end
- local data = {
- type = dataname,
- root = cachename,
- version = resolvers.cacheversion,
- date = os.date("%Y-%m-%d"),
- time = os.date("%H:%M:%S"),
- content = content,
- uuid = os.uuid(),
- }
- local ok = io.savedata(luaname,serialize(data,true))
- if ok then
- if trace_locating then
- report_resolvers("category '%s', cachename '%s' saved in '%s'",dataname,cachename,luaname)
- end
- if utilities.lua.compile(luaname,lucname) then
- if trace_locating then
- report_resolvers("'%s' compiled to '%s'",dataname,lucname)
- end
- return true
- else
- if trace_locating then
- report_resolvers("compiling failed for '%s', deleting file '%s'",dataname,lucname)
- end
- os.remove(lucname)
- end
- elseif trace_locating then
- report_resolvers("unable to save '%s' in '%s' (access error)",dataname,luaname)
+ report_resolvers("category %a, cachename %a saved in %a",dataname,cachename,luaname)
end
+ if utilities.lua.compile(luaname,lucname) then
+ if trace_locating then
+ report_resolvers("%a compiled to %a",dataname,lucname)
+ end
+ return true
+ else
+ if trace_locating then
+ report_resolvers("compiling failed for %a, deleting file %a",dataname,lucname)
+ end
+ os.remove(lucname)
+ end
+ elseif trace_locating then
+ report_resolvers("unable to save %a in %a (access error)",dataname,luaname)
+ end
end
@@ -12237,1999 +12513,1700 @@ end -- of closure
do -- create closure to overcome 200 locals limit
-if not modules then modules = { } end modules ['data-met'] = {
- version = 1.100,
- comment = "companion to luat-lib.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
-local find, format = string.find, string.format
-local sequenced = table.sequenced
-local addurlscheme, urlhashed = url.addscheme, url.hashed
-
-local trace_locating = false
-
-trackers.register("resolvers.locating", function(v) trace_methods = v end)
-trackers.register("resolvers.methods", function(v) trace_methods = v end)
-
-
-local report_methods = logs.reporter("resolvers","methods")
-
-local allocate = utilities.storage.allocate
-
-local resolvers = resolvers
+package.loaded["data-met"] = package.loaded["data-met"] or true
-local registered = { }
-
-local function splitmethod(filename) -- todo: filetype in specification
- if not filename then
- return { scheme = "unknown", original = filename }
- end
- if type(filename) == "table" then
- return filename -- already split
- end
- filename = file.collapsepath(filename)
- if not find(filename,"://") then
- return { scheme = "file", path = filename, original = filename, filename = filename }
- end
- local specification = url.hashed(filename)
- if not specification.scheme or specification.scheme == "" then
- return { scheme = "file", path = filename, original = filename, filename = filename }
- else
- return specification
- end
-end
-
-resolvers.splitmethod = splitmethod -- bad name but ok
-
--- the second argument is always analyzed (saves time later on) and the original
--- gets passed as original but also as argument
-
-local function methodhandler(what,first,...) -- filename can be nil or false
- local method = registered[what]
- if method then
- local how, namespace = method.how, method.namespace
- if how == "uri" or how == "url" then
- local specification = splitmethod(first)
- local scheme = specification.scheme
- local resolver = namespace and namespace[scheme]
- if resolver then
- if trace_methods then
- report_methods("resolver: method=%s, how=%s, scheme=%s, argument=%s",what,how,scheme,first)
- end
- return resolver(specification,...)
- else
- resolver = namespace.default or namespace.file
- if resolver then
- if trace_methods then
- report_methods("resolver: method=%s, how=%s, default, argument=%s",what,how,first)
- end
- return resolver(specification,...)
- elseif trace_methods then
- report_methods("resolver: method=%s, how=%s, no handler",what,how)
- end
- end
- elseif how == "tag" then
- local resolver = namespace and namespace[first]
- if resolver then
- if trace_methods then
- report_methods("resolver: method=%s, how=%s, tag=%s",what,how,first)
- end
- return resolver(...)
- else
- resolver = namespace.default or namespace.file
- if resolver then
- if trace_methods then
- report_methods("resolver: method=%s, how=%s, default",what,how)
- end
- return resolver(...)
- elseif trace_methods then
- report_methods("resolver: method=%s, how=%s, unknown",what,how)
- end
- end
- end
- else
- report_methods("resolver: method=%s, unknown",what)
- end
-end
-
-resolvers.methodhandler = methodhandler
+-- original size: 4915, stripped down to: 3942
+if not modules then modules={} end modules ['data-met']={
+ version=1.100,
+ comment="companion to luat-lib.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+local find,format=string.find,string.format
+local sequenced=table.sequenced
+local addurlscheme,urlhashed=url.addscheme,url.hashed
+local trace_locating=false
+trackers.register("resolvers.locating",function(v) trace_methods=v end)
+trackers.register("resolvers.methods",function(v) trace_methods=v end)
+local report_methods=logs.reporter("resolvers","methods")
+local allocate=utilities.storage.allocate
+local resolvers=resolvers
+local registered={}
+local function splitmethod(filename)
+ if not filename then
+ return { scheme="unknown",original=filename }
+ end
+ if type(filename)=="table" then
+ return filename
+ end
+ filename=file.collapsepath(filename)
+ if not find(filename,"://") then
+ return { scheme="file",path=filename,original=filename,filename=filename }
+ end
+ local specification=url.hashed(filename)
+ if not specification.scheme or specification.scheme=="" then
+ return { scheme="file",path=filename,original=filename,filename=filename }
+ else
+ return specification
+ end
+end
+resolvers.splitmethod=splitmethod
+local function methodhandler(what,first,...)
+ local method=registered[what]
+ if method then
+ local how,namespace=method.how,method.namespace
+ if how=="uri" or how=="url" then
+ local specification=splitmethod(first)
+ local scheme=specification.scheme
+ local resolver=namespace and namespace[scheme]
+ if resolver then
+ if trace_methods then
+ report_methods("resolving, method %a, how %a, handler %a, argument %a",what,how,scheme,first)
+ end
+ return resolver(specification,...)
+ else
+ resolver=namespace.default or namespace.file
+ if resolver then
+ if trace_methods then
+ report_methods("resolving, method %a, how %a, handler %a, argument %a",what,how,"default",first)
+ end
+ return resolver(specification,...)
+ elseif trace_methods then
+ report_methods("resolving, method %a, how %a, handler %a, argument %a",what,how,"unset")
+ end
+ end
+ elseif how=="tag" then
+ local resolver=namespace and namespace[first]
+ if resolver then
+ if trace_methods then
+ report_methods("resolving, method %a, how %a, tag %a",what,how,first)
+ end
+ return resolver(...)
+ else
+ resolver=namespace.default or namespace.file
+ if resolver then
+ if trace_methods then
+ report_methods("resolving, method %a, how %a, tag %a",what,how,"default")
+ end
+ return resolver(...)
+ elseif trace_methods then
+ report_methods("resolving, method %a, how %a, tag %a",what,how,"unset")
+ end
+ end
+ end
+ else
+ report_methods("resolving, invalid method %a")
+ end
+end
+resolvers.methodhandler=methodhandler
function resolvers.registermethod(name,namespace,how)
- registered[name] = { how = how or "tag", namespace = namespace }
- namespace["byscheme"] = function(scheme,filename,...)
- if scheme == "file" then
- return methodhandler(name,filename,...)
- else
- return methodhandler(name,addurlscheme(filename,scheme),...)
- end
- end
-end
-
-local concatinators = allocate { notfound = file.join } -- concatinate paths
-local locators = allocate { notfound = function() end } -- locate databases
-local hashers = allocate { notfound = function() end } -- load databases
-local generators = allocate { notfound = function() end } -- generate databases
-
-resolvers.concatinators = concatinators
-resolvers.locators = locators
-resolvers.hashers = hashers
-resolvers.generators = generators
-
-local registermethod = resolvers.registermethod
-
+ registered[name]={ how=how or "tag",namespace=namespace }
+ namespace["byscheme"]=function(scheme,filename,...)
+ if scheme=="file" then
+ return methodhandler(name,filename,...)
+ else
+ return methodhandler(name,addurlscheme(filename,scheme),...)
+ end
+ end
+end
+local concatinators=allocate { notfound=file.join }
+local locators=allocate { notfound=function() end }
+local hashers=allocate { notfound=function() end }
+local generators=allocate { notfound=function() end }
+resolvers.concatinators=concatinators
+resolvers.locators=locators
+resolvers.hashers=hashers
+resolvers.generators=generators
+local registermethod=resolvers.registermethod
registermethod("concatinators",concatinators,"tag")
-registermethod("locators", locators, "uri")
-registermethod("hashers", hashers, "uri")
-registermethod("generators", generators, "uri")
+registermethod("locators",locators,"uri")
+registermethod("hashers",hashers,"uri")
+registermethod("generators",generators,"uri")
end -- of closure
do -- create closure to overcome 200 locals limit
-if not modules then modules = { } end modules ['data-res'] = {
- version = 1.001,
- comment = "companion to luat-lib.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files",
-}
-
--- In practice we will work within one tds tree, but i want to keep
--- the option open to build tools that look at multiple trees, which is
--- why we keep the tree specific data in a table. We used to pass the
--- instance but for practical purposes we now avoid this and use a
--- instance variable. We always have one instance active (sort of global).
-
--- todo: cache:/// home:/// selfautoparent:/// (sometime end 2012)
-
-local format, gsub, find, lower, upper, match, gmatch = string.format, string.gsub, string.find, string.lower, string.upper, string.match, string.gmatch
-local concat, insert, sortedkeys = table.concat, table.insert, table.sortedkeys
-local next, type, rawget = next, type, rawget
-local os = os
+package.loaded["data-res"] = package.loaded["data-res"] or true
-local P, S, R, C, Cc, Cs, Ct, Carg = lpeg.P, lpeg.S, lpeg.R, lpeg.C, lpeg.Cc, lpeg.Cs, lpeg.Ct, lpeg.Carg
-local lpegmatch, lpegpatterns = lpeg.match, lpeg.patterns
-
-local filedirname = file.dirname
-local filebasename = file.basename
-local fileextname = file.extname
-local filejoin = file.join
-local collapsepath = file.collapsepath
-local joinpath = file.joinpath
-local allocate = utilities.storage.allocate
-local setmetatableindex = table.setmetatableindex
-
-local trace_locating = false trackers.register("resolvers.locating", function(v) trace_locating = v end)
-local trace_detail = false trackers.register("resolvers.details", function(v) trace_detail = v end)
-local trace_expansions = false trackers.register("resolvers.expansions", function(v) trace_expansions = v end)
-
-local report_resolving = logs.reporter("resolvers","resolving")
-
-local resolvers = resolvers
-
-local expandedpathfromlist = resolvers.expandedpathfromlist
-local checkedvariable = resolvers.checkedvariable
-local splitconfigurationpath = resolvers.splitconfigurationpath
-local methodhandler = resolvers.methodhandler
-
-local initializesetter = utilities.setters.initialize
-
-local ostype, osname, osenv, ossetenv, osgetenv = os.type, os.name, os.env, os.setenv, os.getenv
-
-resolvers.cacheversion = '1.0.1'
-resolvers.configbanner = ''
-resolvers.homedir = environment.homedir
-resolvers.criticalvars = allocate { "SELFAUTOLOC", "SELFAUTODIR", "SELFAUTOPARENT", "TEXMFCNF", "TEXMF", "TEXOS" }
-resolvers.luacnfname = 'texmfcnf.lua'
-resolvers.luacnfstate = "unknown"
-
--- The web2c tex binaries as well as kpse have built in paths for the configuration
--- files and there can be a depressing truckload of them. This is actually the weak
--- spot of a distribution. So we don't want:
---
--- resolvers.luacnfspec = '{$SELFAUTODIR,$SELFAUTOPARENT}{,{/share,}/texmf{-local,}/web2c}'
---
--- but instead use:
---
--- resolvers.luacnfspec = 'selfautoparent:{/texmf{-local,}{,/web2c}}'
---
--- which does not make texlive happy as there is a texmf-local tree one level up
--- (sigh), so we need this. We can assume web2c as mkiv does not run on older
--- texlives anyway.
---
--- texlive:
---
--- selfautodir:
--- selfautoparent:
--- selfautodir:share/texmf-local/web2c
--- selfautodir:share/texmf/web2c
--- selfautodir:texmf-local/web2c
--- selfautodir:texmf/web2c
--- selfautoparent:share/texmf-local/web2c
--- selfautoparent:share/texmf/web2c
--- selfautoparent:texmf-local/web2c
--- selfautoparent:texmf/web2c
---
--- minimals:
---
--- home:texmf/web2c
--- selfautoparent:texmf-local/web2c
--- selfautoparent:texmf-context/web2c
--- selfautoparent:texmf/web2c
+-- original size: 60821, stripped down to: 42503
+if not modules then modules={} end modules ['data-res']={
+ version=1.001,
+ comment="companion to luat-lib.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files",
+}
+local gsub,find,lower,upper,match,gmatch=string.gsub,string.find,string.lower,string.upper,string.match,string.gmatch
+local concat,insert,sortedkeys=table.concat,table.insert,table.sortedkeys
+local next,type,rawget=next,type,rawget
+local os=os
+local P,S,R,C,Cc,Cs,Ct,Carg=lpeg.P,lpeg.S,lpeg.R,lpeg.C,lpeg.Cc,lpeg.Cs,lpeg.Ct,lpeg.Carg
+local lpegmatch,lpegpatterns=lpeg.match,lpeg.patterns
+local formatters=string.formatters
+local filedirname=file.dirname
+local filebasename=file.basename
+local suffixonly=file.suffixonly
+local filejoin=file.join
+local collapsepath=file.collapsepath
+local joinpath=file.joinpath
+local allocate=utilities.storage.allocate
+local settings_to_array=utilities.parsers.settings_to_array
+local setmetatableindex=table.setmetatableindex
+local luasuffixes=utilities.lua.suffixes
+local trace_locating=false trackers.register("resolvers.locating",function(v) trace_locating=v end)
+local trace_detail=false trackers.register("resolvers.details",function(v) trace_detail=v end)
+local trace_expansions=false trackers.register("resolvers.expansions",function(v) trace_expansions=v end)
+local report_resolving=logs.reporter("resolvers","resolving")
+local resolvers=resolvers
+local expandedpathfromlist=resolvers.expandedpathfromlist
+local checkedvariable=resolvers.checkedvariable
+local splitconfigurationpath=resolvers.splitconfigurationpath
+local methodhandler=resolvers.methodhandler
+local initializesetter=utilities.setters.initialize
+local ostype,osname,osenv,ossetenv,osgetenv=os.type,os.name,os.env,os.setenv,os.getenv
+resolvers.cacheversion='1.0.1'
+resolvers.configbanner=''
+resolvers.homedir=environment.homedir
+resolvers.criticalvars=allocate { "SELFAUTOLOC","SELFAUTODIR","SELFAUTOPARENT","TEXMFCNF","TEXMF","TEXOS" }
+resolvers.luacnfname="texmfcnf.lua"
+resolvers.luacnfstate="unknown"
if environment.default_texmfcnf then
- -- unfortunately we now have quite some overkill in the spec (not so nice on a network)
- resolvers.luacnfspec = environment.default_texmfcnf
+ resolvers.luacnfspec=environment.default_texmfcnf
else
- -- resolvers.luacnfspec = "selfautoparent:texmf{-local,-context,}/web2c"
- resolvers.luacnfspec = "{selfautoloc:,selfautodir:,selfautoparent:}{,/texmf{-local,}/web2c}"
-end
-
-resolvers.luacnfspec = 'home:texmf/web2c;' .. resolvers.luacnfspec
-
--- which (as we want users to use the web2c path) be can be simplified to this:
---
--- if environment and environment.ownpath and string.find(environment.ownpath,"[\\/]texlive[\\/]") then
--- resolvers.luacnfspec = 'selfautodir:/texmf-local/web2c,selfautoparent:/texmf-local/web2c,selfautoparent:/texmf/web2c'
--- else
--- resolvers.luacnfspec = 'selfautoparent:/texmf-local/web2c,selfautoparent:/texmf/web2c'
--- end
-
-
-
-local unset_variable = "unset"
-
-local formats = resolvers.formats
-local suffixes = resolvers.suffixes
-local dangerous = resolvers.dangerous
-local suffixmap = resolvers.suffixmap
-
-resolvers.defaultsuffixes = { "tex" } -- "mkiv", "cld" -- too tricky
-
-resolvers.instance = resolvers.instance or nil -- the current one (slow access)
-local instance = resolvers.instance or nil -- the current one (fast access)
-
--- An instance has an environment (coming from the outside, kept raw), variables
--- (coming from the configuration file), and expansions (variables with nested
--- variables replaced). One can push something into the outer environment and
--- its internal copy, but only the later one will be the raw unprefixed variant.
-
+ resolvers.luacnfspec="{selfautoloc:,selfautodir:,selfautoparent:}{,/texmf{-local,}/web2c}"
+end
+resolvers.luacnfspec='home:texmf/web2c;'..resolvers.luacnfspec
+local unset_variable="unset"
+local formats=resolvers.formats
+local suffixes=resolvers.suffixes
+local dangerous=resolvers.dangerous
+local suffixmap=resolvers.suffixmap
+resolvers.defaultsuffixes={ "tex" }
+resolvers.instance=resolvers.instance or nil
+local instance=resolvers.instance or nil
function resolvers.setenv(key,value,raw)
- if instance then
- -- this one will be consulted first when we stay inside
- -- the current environment (prefixes are not resolved here)
- instance.environment[key] = value
- -- we feed back into the environment, and as this is used
- -- by other applications (via os.execute) we need to make
- -- sure that prefixes are resolve
- ossetenv(key,raw and value or resolvers.resolve(value))
- end
+ if instance then
+ instance.environment[key]=value
+ ossetenv(key,raw and value or resolvers.resolve(value))
+ end
end
-
--- Beware we don't want empty here as this one can be called early on
--- and therefore we use rawget.
-
local function getenv(key)
- local value = rawget(instance.environment,key)
- if value and value ~= "" then
- return value
- else
- local e = osgetenv(key)
- return e ~= nil and e ~= "" and checkedvariable(e) or ""
- end
-end
-
-resolvers.getenv = getenv
-resolvers.env = getenv
-
--- We are going to use some metatable trickery where we backtrack from
--- expansion to variable to environment.
-
+ local value=rawget(instance.environment,key)
+ if value and value~="" then
+ return value
+ else
+ local e=osgetenv(key)
+ return e~=nil and e~="" and checkedvariable(e) or ""
+ end
+end
+resolvers.getenv=getenv
+resolvers.env=getenv
local function resolve(k)
- return instance.expansions[k]
-end
-
-local dollarstripper = lpeg.stripper("$")
-local inhibitstripper = P("!")^0 * Cs(P(1)^0)
-local backslashswapper = lpeg.replacer("\\","/")
-
-local somevariable = P("$") / ""
-local somekey = C(R("az","AZ","09","__","--")^1)
-local somethingelse = P(";") * ((1-S("!{}/\\"))^1 * P(";") / "")
- + P(";") * (P(";") / "")
- + P(1)
-local variableexpander = Cs( (somevariable * (somekey/resolve) + somethingelse)^1 )
-
-local cleaner = P("\\") / "/" + P(";") * S("!{}/\\")^0 * P(";")^1 / ";"
-local variablecleaner = Cs((cleaner + P(1))^0)
-
-local somevariable = R("az","AZ","09","__","--")^1 / resolve
-local variable = (P("$")/"") * (somevariable + (P("{")/"") * somevariable * (P("}")/""))
-local variableresolver = Cs((variable + P(1))^0)
-
+ return instance.expansions[k]
+end
+local dollarstripper=lpeg.stripper("$")
+local inhibitstripper=P("!")^0*Cs(P(1)^0)
+local backslashswapper=lpeg.replacer("\\","/")
+local somevariable=P("$")/""
+local somekey=C(R("az","AZ","09","__","--")^1)
+local somethingelse=P(";")*((1-S("!{}/\\"))^1*P(";")/"")+P(";")*(P(";")/"")+P(1)
+local variableexpander=Cs((somevariable*(somekey/resolve)+somethingelse)^1 )
+local cleaner=P("\\")/"/"+P(";")*S("!{}/\\")^0*P(";")^1/";"
+local variablecleaner=Cs((cleaner+P(1))^0)
+local somevariable=R("az","AZ","09","__","--")^1/resolve
+local variable=(P("$")/"")*(somevariable+(P("{")/"")*somevariable*(P("}")/""))
+local variableresolver=Cs((variable+P(1))^0)
local function expandedvariable(var)
- return lpegmatch(variableexpander,var) or var
-end
-
-function resolvers.newinstance() -- todo: all vars will become lowercase and alphanum only
-
- if trace_locating then
- report_resolving("creating instance")
- end
-
- local environment, variables, expansions, order = allocate(), allocate(), allocate(), allocate()
-
- local newinstance = {
- environment = environment,
- variables = variables,
- expansions = expansions,
- order = order,
- files = allocate(),
- setups = allocate(),
- found = allocate(),
- foundintrees = allocate(),
- hashes = allocate(),
- hashed = allocate(),
- specification = allocate(),
- lists = allocate(),
- data = allocate(), -- only for loading
- fakepaths = allocate(),
- remember = true,
- diskcache = true,
- renewcache = false,
- renewtree = false,
- loaderror = false,
- savelists = true,
- pattern = nil, -- lists
- force_suffixes = true,
- }
-
- setmetatableindex(variables,function(t,k)
- local v
- for i=1,#order do
- v = order[i][k]
- if v ~= nil then
- t[k] = v
- return v
- end
- end
- if v == nil then
- v = ""
- end
- t[k] = v
- return v
- end)
-
- setmetatableindex(environment, function(t,k)
- local v = osgetenv(k)
- if v == nil then
- v = variables[k]
- end
- if v ~= nil then
- v = checkedvariable(v) or ""
- end
- v = resolvers.repath(v) -- for taco who has a : separated osfontdir
- t[k] = v
- return v
- end)
-
- setmetatableindex(expansions, function(t,k)
- local v = environment[k]
- if type(v) == "string" then
- v = lpegmatch(variableresolver,v)
- v = lpegmatch(variablecleaner,v)
- end
- t[k] = v
+ return lpegmatch(variableexpander,var) or var
+end
+function resolvers.newinstance()
+ if trace_locating then
+ report_resolving("creating instance")
+ end
+ local environment,variables,expansions,order=allocate(),allocate(),allocate(),allocate()
+ local newinstance={
+ environment=environment,
+ variables=variables,
+ expansions=expansions,
+ order=order,
+ files=allocate(),
+ setups=allocate(),
+ found=allocate(),
+ foundintrees=allocate(),
+ hashes=allocate(),
+ hashed=allocate(),
+ specification=allocate(),
+ lists=allocate(),
+ data=allocate(),
+ fakepaths=allocate(),
+ remember=true,
+ diskcache=true,
+ renewcache=false,
+ renewtree=false,
+ loaderror=false,
+ savelists=true,
+ pattern=nil,
+ force_suffixes=true,
+ }
+ setmetatableindex(variables,function(t,k)
+ local v
+ for i=1,#order do
+ v=order[i][k]
+ if v~=nil then
+ t[k]=v
return v
- end)
-
- return newinstance
-
+ end
+ end
+ if v==nil then
+ v=""
+ end
+ t[k]=v
+ return v
+ end)
+ setmetatableindex(environment,function(t,k)
+ local v=osgetenv(k)
+ if v==nil then
+ v=variables[k]
+ end
+ if v~=nil then
+ v=checkedvariable(v) or ""
+ end
+ v=resolvers.repath(v)
+ t[k]=v
+ return v
+ end)
+ setmetatableindex(expansions,function(t,k)
+ local v=environment[k]
+ if type(v)=="string" then
+ v=lpegmatch(variableresolver,v)
+ v=lpegmatch(variablecleaner,v)
+ end
+ t[k]=v
+ return v
+ end)
+ return newinstance
+end
+function resolvers.setinstance(someinstance)
+ instance=someinstance
+ resolvers.instance=someinstance
+ return someinstance
end
-
-function resolvers.setinstance(someinstance) -- only one instance is active
- instance = someinstance
- resolvers.instance = someinstance
- return someinstance
-end
-
function resolvers.reset()
- return resolvers.setinstance(resolvers.newinstance())
+ return resolvers.setinstance(resolvers.newinstance())
end
-
local function reset_hashes()
- instance.lists = { }
- instance.found = { }
-end
-
-local slash = P("/")
-
-local pathexpressionpattern = Cs (
- Cc("^") * (
- Cc("%") * S(".-")
- + slash^2 * P(-1) / "/.*"
- + slash^2 / "/.-/"
- + (1-slash) * P(-1) * Cc("/")
- + P(1)
- )^1 * Cc("$") -- yes or no $
+ instance.lists={}
+ instance.found={}
+end
+local slash=P("/")
+local pathexpressionpattern=Cs (
+ Cc("^")*(
+ Cc("%")*S(".-")+slash^2*P(-1)/"/.*"
++slash^2/"/[^/]*/*"+(1-slash)*P(-1)*Cc("/")+P(1)
+ )^1*Cc("$")
)
-
-local cache = { }
-
+local cache={}
local function makepathexpression(str)
- if str == "." then
- return "^%./$"
- else
- local c = cache[str]
- if not c then
- c = lpegmatch(pathexpressionpattern,str)
- cache[str] = c
- end
- return c
+ if str=="." then
+ return "^%./$"
+ else
+ local c=cache[str]
+ if not c then
+ c=lpegmatch(pathexpressionpattern,str)
+ cache[str]=c
end
+ return c
+ end
end
-
local function reportcriticalvariables(cnfspec)
- if trace_locating then
- for i=1,#resolvers.criticalvars do
- local k = resolvers.criticalvars[i]
- local v = resolvers.getenv(k) or "unknown" -- this one will not resolve !
- report_resolving("variable '%s' set to '%s'",k,v)
- end
- report_resolving()
- if cnfspec then
- if type(cnfspec) == "table" then
- report_resolving("using configuration specification '%s'",concat(cnfspec,","))
- else
- report_resolving("using configuration specification '%s'",cnfspec)
- end
- end
- report_resolving()
+ if trace_locating then
+ for i=1,#resolvers.criticalvars do
+ local k=resolvers.criticalvars[i]
+ local v=resolvers.getenv(k) or "unknown"
+ report_resolving("variable %a set to %a",k,v)
end
- reportcriticalvariables = function() end
+ report_resolving()
+ if cnfspec then
+ report_resolving("using configuration specification %a",type(cnfspec)=="table" and concat(cnfspec,",") or cnfspec)
+ end
+ report_resolving()
+ end
+ reportcriticalvariables=function() end
end
-
local function identify_configuration_files()
- local specification = instance.specification
- if #specification == 0 then
- local cnfspec = getenv('TEXMFCNF')
- if cnfspec == "" then
- cnfspec = resolvers.luacnfspec
- resolvers.luacnfstate = "default"
- else
- resolvers.luacnfstate = "environment"
- end
- reportcriticalvariables(cnfspec)
- local cnfpaths = expandedpathfromlist(resolvers.splitpath(cnfspec))
- local luacnfname = resolvers.luacnfname
- for i=1,#cnfpaths do
- local filename = collapsepath(filejoin(cnfpaths[i],luacnfname))
- local realname = resolvers.resolve(filename)
- if lfs.isfile(realname) then
- specification[#specification+1] = filename
- if trace_locating then
- report_resolving("found configuration file '%s'",realname)
- end
- elseif trace_locating then
- report_resolving("unknown configuration file '%s'",realname)
- end
- end
+ local specification=instance.specification
+ if #specification==0 then
+ local cnfspec=getenv("TEXMFCNF")
+ if cnfspec=="" then
+ cnfspec=resolvers.luacnfspec
+ resolvers.luacnfstate="default"
+ else
+ resolvers.luacnfstate="environment"
+ end
+ reportcriticalvariables(cnfspec)
+ local cnfpaths=expandedpathfromlist(resolvers.splitpath(cnfspec))
+ local luacnfname=resolvers.luacnfname
+ for i=1,#cnfpaths do
+ local filename=collapsepath(filejoin(cnfpaths[i],luacnfname))
+ local realname=resolvers.resolve(filename)
+ if lfs.isfile(realname) then
+ specification[#specification+1]=filename
if trace_locating then
- report_resolving()
+ report_resolving("found configuration file %a",realname)
end
- elseif trace_locating then
- report_resolving("configuration files already identified")
+ elseif trace_locating then
+ report_resolving("unknown configuration file %a",realname)
+ end
end
+ if trace_locating then
+ report_resolving()
+ end
+ elseif trace_locating then
+ report_resolving("configuration files already identified")
+ end
end
-
local function load_configuration_files()
- local specification = instance.specification
- if #specification > 0 then
- local luacnfname = resolvers.luacnfname
- for i=1,#specification do
- local filename = specification[i]
- local pathname = filedirname(filename)
- local filename = filejoin(pathname,luacnfname)
- local realname = resolvers.resolve(filename) -- no shortcut
- local blob = loadfile(realname)
- if blob then
- local setups = instance.setups
- local data = blob()
- local parent = data and data.parent
- if parent then
- local filename = filejoin(pathname,parent)
- local realname = resolvers.resolve(filename) -- no shortcut
- local blob = loadfile(realname)
- if blob then
- local parentdata = blob()
- if parentdata then
- report_resolving("loading configuration file '%s'",filename)
- data = table.merged(parentdata,data)
- end
- end
- end
- data = data and data.content
- if data then
- if trace_locating then
- report_resolving("loading configuration file '%s'",filename)
- report_resolving()
- end
- local variables = data.variables or { }
- local warning = false
- for k, v in next, data do
- local variant = type(v)
- if variant == "table" then
- initializesetter(filename,k,v)
- elseif variables[k] == nil then
- if trace_locating and not warning then
- report_resolving("variables like '%s' in configuration file '%s' should move to the 'variables' subtable",
- k,resolvers.resolve(filename))
- warning = true
- end
- variables[k] = v
- end
- end
- setups[pathname] = variables
- if resolvers.luacnfstate == "default" then
- -- the following code is not tested
- local cnfspec = variables["TEXMFCNF"]
- if cnfspec then
- if trace_locating then
- report_resolving("reloading configuration due to TEXMF redefinition")
- end
- -- we push the value into the main environment (osenv) so
- -- that it takes precedence over the default one and therefore
- -- also over following definitions
- resolvers.setenv('TEXMFCNF',cnfspec) -- resolves prefixes
- -- we now identify and load the specified configuration files
- instance.specification = { }
- identify_configuration_files()
- load_configuration_files()
- -- we prevent further overload of the configuration variable
- resolvers.luacnfstate = "configuration"
- -- we quit the outer loop
- break
- end
- end
-
- else
- if trace_locating then
- report_resolving("skipping configuration file '%s' (no content)",filename)
- end
- setups[pathname] = { }
- instance.loaderror = true
- end
- elseif trace_locating then
- report_resolving("skipping configuration file '%s' (no valid format)",filename)
- end
- instance.order[#instance.order+1] = instance.setups[pathname]
- if instance.loaderror then
- break
- end
- end
- elseif trace_locating then
- report_resolving("warning: no lua configuration files found")
- end
+ local specification=instance.specification
+ if #specification>0 then
+ local luacnfname=resolvers.luacnfname
+ for i=1,#specification do
+ local filename=specification[i]
+ local pathname=filedirname(filename)
+ local filename=filejoin(pathname,luacnfname)
+ local realname=resolvers.resolve(filename)
+ local blob=loadfile(realname)
+ if blob then
+ local setups=instance.setups
+ local data=blob()
+ local parent=data and data.parent
+ if parent then
+ local filename=filejoin(pathname,parent)
+ local realname=resolvers.resolve(filename)
+ local blob=loadfile(realname)
+ if blob then
+ local parentdata=blob()
+ if parentdata then
+ report_resolving("loading configuration file %a",filename)
+ data=table.merged(parentdata,data)
+ end
+ end
+ end
+ data=data and data.content
+ if data then
+ if trace_locating then
+ report_resolving("loading configuration file %a",filename)
+ report_resolving()
+ end
+ local variables=data.variables or {}
+ local warning=false
+ for k,v in next,data do
+ local variant=type(v)
+ if variant=="table" then
+ initializesetter(filename,k,v)
+ elseif variables[k]==nil then
+ if trace_locating and not warning then
+ report_resolving("variables like %a in configuration file %a should move to the 'variables' subtable",
+ k,resolvers.resolve(filename))
+ warning=true
+ end
+ variables[k]=v
+ end
+ end
+ setups[pathname]=variables
+ if resolvers.luacnfstate=="default" then
+ local cnfspec=variables["TEXMFCNF"]
+ if cnfspec then
+ if trace_locating then
+ report_resolving("reloading configuration due to TEXMF redefinition")
+ end
+ resolvers.setenv("TEXMFCNF",cnfspec)
+ instance.specification={}
+ identify_configuration_files()
+ load_configuration_files()
+ resolvers.luacnfstate="configuration"
+ break
+ end
+ end
+ else
+ if trace_locating then
+ report_resolving("skipping configuration file %a (no content)",filename)
+ end
+ setups[pathname]={}
+ instance.loaderror=true
+ end
+ elseif trace_locating then
+ report_resolving("skipping configuration file %a (no valid format)",filename)
+ end
+ instance.order[#instance.order+1]=instance.setups[pathname]
+ if instance.loaderror then
+ break
+ end
+ end
+ elseif trace_locating then
+ report_resolving("warning: no lua configuration files found")
+ end
end
-
--- scheme magic ... database loading
-
local function load_file_databases()
- instance.loaderror, instance.files = false, allocate()
- if not instance.renewcache then
- local hashes = instance.hashes
- for k=1,#hashes do
- local hash = hashes[k]
- resolvers.hashers.byscheme(hash.type,hash.name)
- if instance.loaderror then break end
- end
+ instance.loaderror,instance.files=false,allocate()
+ if not instance.renewcache then
+ local hashes=instance.hashes
+ for k=1,#hashes do
+ local hash=hashes[k]
+ resolvers.hashers.byscheme(hash.type,hash.name)
+ if instance.loaderror then break end
end
+ end
end
-
local function locate_file_databases()
- -- todo: cache:// and tree:// (runtime)
- local texmfpaths = resolvers.expandedpathlist('TEXMF')
- if #texmfpaths > 0 then
- for i=1,#texmfpaths do
- local path = collapsepath(texmfpaths[i])
- local stripped = lpegmatch(inhibitstripper,path) -- the !! thing
- if stripped ~= "" then
- local runtime = stripped == path
- path = resolvers.cleanpath(path)
- local spec = resolvers.splitmethod(stripped)
- if runtime and (spec.noscheme or spec.scheme == "file") then
- stripped = "tree:///" .. stripped
- elseif spec.scheme == "cache" or spec.scheme == "file" then
- stripped = spec.path
- end
- if trace_locating then
- if runtime then
- report_resolving("locating list of '%s' (runtime) (%s)",path,stripped)
- else
- report_resolving("locating list of '%s' (cached)",path)
- end
- end
- methodhandler('locators',stripped)
- end
+ local texmfpaths=resolvers.expandedpathlist("TEXMF")
+ if #texmfpaths>0 then
+ for i=1,#texmfpaths do
+ local path=collapsepath(texmfpaths[i])
+ path=gsub(path,"/+$","")
+ local stripped=lpegmatch(inhibitstripper,path)
+ if stripped~="" then
+ local runtime=stripped==path
+ path=resolvers.cleanpath(path)
+ local spec=resolvers.splitmethod(stripped)
+ if runtime and (spec.noscheme or spec.scheme=="file") then
+ stripped="tree:///"..stripped
+ elseif spec.scheme=="cache" or spec.scheme=="file" then
+ stripped=spec.path
end
if trace_locating then
- report_resolving()
+ if runtime then
+ report_resolving("locating list of %a (runtime) (%s)",path,stripped)
+ else
+ report_resolving("locating list of %a (cached)",path)
+ end
end
- elseif trace_locating then
- report_resolving("no texmf paths are defined (using TEXMF)")
- end
-end
-
-local function generate_file_databases()
- local hashes = instance.hashes
- for k=1,#hashes do
- local hash = hashes[k]
- methodhandler('generators',hash.name)
+ methodhandler('locators',stripped)
+ end
end
if trace_locating then
- report_resolving()
+ report_resolving()
end
+ elseif trace_locating then
+ report_resolving("no texmf paths are defined (using TEXMF)")
+ end
end
-
-local function save_file_databases() -- will become cachers
- for i=1,#instance.hashes do
- local hash = instance.hashes[i]
- local cachename = hash.name
- if hash.cache then
- local content = instance.files[cachename]
- caches.collapsecontent(content)
- if trace_locating then
- report_resolving("saving tree '%s'",cachename)
- end
- caches.savecontent(cachename,"files",content)
- elseif trace_locating then
- report_resolving("not saving runtime tree '%s'",cachename)
- end
+local function generate_file_databases()
+ local hashes=instance.hashes
+ for k=1,#hashes do
+ local hash=hashes[k]
+ methodhandler('generators',hash.name)
+ end
+ if trace_locating then
+ report_resolving()
+ end
+end
+local function save_file_databases()
+ for i=1,#instance.hashes do
+ local hash=instance.hashes[i]
+ local cachename=hash.name
+ if hash.cache then
+ local content=instance.files[cachename]
+ caches.collapsecontent(content)
+ if trace_locating then
+ report_resolving("saving tree %a",cachename)
+ end
+ caches.savecontent(cachename,"files",content)
+ elseif trace_locating then
+ report_resolving("not saving runtime tree %a",cachename)
end
+ end
end
-
function resolvers.renew(hashname)
- if hashname and hashname ~= "" then
- local expanded = resolvers.expansion(hashname) or ""
- if expanded ~= "" then
- if trace_locating then
- report_resolving("identifying tree '%s' from '%s'",expanded,hashname)
- end
- hashname = expanded
- else
- if trace_locating then
- report_resolving("identifying tree '%s'",hashname)
- end
- end
- local realpath = resolvers.resolve(hashname)
- if lfs.isdir(realpath) then
- if trace_locating then
- report_resolving("using path '%s'",realpath)
- end
- methodhandler('generators',hashname)
- -- could be shared
- local content = instance.files[hashname]
- caches.collapsecontent(content)
- if trace_locating then
- report_resolving("saving tree '%s'",hashname)
- end
- caches.savecontent(hashname,"files",content)
- -- till here
- else
- report_resolving("invalid path '%s'",realpath)
- end
+ if hashname and hashname~="" then
+ local expanded=resolvers.expansion(hashname) or ""
+ if expanded~="" then
+ if trace_locating then
+ report_resolving("identifying tree %a from %a",expanded,hashname)
+ end
+ hashname=expanded
+ else
+ if trace_locating then
+ report_resolving("identifying tree %a",hashname)
+ end
+ end
+ local realpath=resolvers.resolve(hashname)
+ if lfs.isdir(realpath) then
+ if trace_locating then
+ report_resolving("using path %a",realpath)
+ end
+ methodhandler('generators',hashname)
+ local content=instance.files[hashname]
+ caches.collapsecontent(content)
+ if trace_locating then
+ report_resolving("saving tree %a",hashname)
+ end
+ caches.savecontent(hashname,"files",content)
+ else
+ report_resolving("invalid path %a",realpath)
end
+ end
end
-
local function load_databases()
- locate_file_databases()
- if instance.diskcache and not instance.renewcache then
- load_file_databases()
- if instance.loaderror then
- generate_file_databases()
- save_file_databases()
- end
- else
- generate_file_databases()
- if instance.renewcache then
- save_file_databases()
- end
+ locate_file_databases()
+ if instance.diskcache and not instance.renewcache then
+ load_file_databases()
+ if instance.loaderror then
+ generate_file_databases()
+ save_file_databases()
+ end
+ else
+ generate_file_databases()
+ if instance.renewcache then
+ save_file_databases()
end
+ end
end
-
function resolvers.appendhash(type,name,cache)
- -- safeguard ... tricky as it's actually a bug when seen twice
- if not instance.hashed[name] then
- if trace_locating then
- report_resolving("hash '%s' appended",name)
- end
- insert(instance.hashes, { type = type, name = name, cache = cache } )
- instance.hashed[name] = cache
+ if not instance.hashed[name] then
+ if trace_locating then
+ report_resolving("hash %a appended",name)
end
+ insert(instance.hashes,{ type=type,name=name,cache=cache } )
+ instance.hashed[name]=cache
+ end
end
-
function resolvers.prependhash(type,name,cache)
- -- safeguard ... tricky as it's actually a bug when seen twice
- if not instance.hashed[name] then
- if trace_locating then
- report_resolving("hash '%s' prepended",name)
- end
- insert(instance.hashes, 1, { type = type, name = name, cache = cache } )
- instance.hashed[name] = cache
- end
-end
-
-function resolvers.extendtexmfvariable(specification) -- crap, we could better prepend the hash
- local t = resolvers.splitpath(getenv('TEXMF'))
- insert(t,1,specification)
- local newspec = concat(t,";")
- if instance.environment["TEXMF"] then
- instance.environment["TEXMF"] = newspec
- elseif instance.variables["TEXMF"] then
- instance.variables["TEXMF"] = newspec
- else
- -- weird
- end
- reset_hashes()
+ if not instance.hashed[name] then
+ if trace_locating then
+ report_resolving("hash %a prepended",name)
+ end
+ insert(instance.hashes,1,{ type=type,name=name,cache=cache } )
+ instance.hashed[name]=cache
+ end
+end
+function resolvers.extendtexmfvariable(specification)
+ local t=resolvers.splitpath(getenv("TEXMF"))
+ insert(t,1,specification)
+ local newspec=concat(t,",")
+ if instance.environment["TEXMF"] then
+ instance.environment["TEXMF"]=newspec
+ elseif instance.variables["TEXMF"] then
+ instance.variables["TEXMF"]=newspec
+ else
+ end
+ reset_hashes()
end
-
function resolvers.splitexpansions()
- local ie = instance.expansions
- for k,v in next, ie do
- local t, tn, h, p = { }, 0, { }, splitconfigurationpath(v)
- for kk=1,#p do
- local vv = p[kk]
- if vv ~= "" and not h[vv] then
- tn = tn + 1
- t[tn] = vv
- h[vv] = true
- end
- end
- if #t > 1 then
- ie[k] = t
- else
- ie[k] = t[1]
- end
+ local ie=instance.expansions
+ for k,v in next,ie do
+ local t,tn,h,p={},0,{},splitconfigurationpath(v)
+ for kk=1,#p do
+ local vv=p[kk]
+ if vv~="" and not h[vv] then
+ tn=tn+1
+ t[tn]=vv
+ h[vv]=true
+ end
+ end
+ if #t>1 then
+ ie[k]=t
+ else
+ ie[k]=t[1]
end
+ end
end
-
--- end of split/join code
-
--- we used to have 'files' and 'configurations' so therefore the following
--- shared function
-
function resolvers.datastate()
- return caches.contentstate()
+ return caches.contentstate()
end
-
function resolvers.variable(name)
- local name = name and lpegmatch(dollarstripper,name)
- local result = name and instance.variables[name]
- return result ~= nil and result or ""
+ local name=name and lpegmatch(dollarstripper,name)
+ local result=name and instance.variables[name]
+ return result~=nil and result or ""
end
-
function resolvers.expansion(name)
- local name = name and lpegmatch(dollarstripper,name)
- local result = name and instance.expansions[name]
- return result ~= nil and result or ""
+ local name=name and lpegmatch(dollarstripper,name)
+ local result=name and instance.expansions[name]
+ return result~=nil and result or ""
end
-
function resolvers.unexpandedpathlist(str)
- local pth = resolvers.variable(str)
- local lst = resolvers.splitpath(pth)
- return expandedpathfromlist(lst)
+ local pth=resolvers.variable(str)
+ local lst=resolvers.splitpath(pth)
+ return expandedpathfromlist(lst)
end
-
function resolvers.unexpandedpath(str)
- return joinpath(resolvers.unexpandedpathlist(str))
+ return joinpath(resolvers.unexpandedpathlist(str))
end
-
-local done = { }
-
+local done={}
function resolvers.resetextrapath()
- local ep = instance.extra_paths
- if not ep then
- ep, done = { }, { }
- instance.extra_paths = ep
- elseif #ep > 0 then
- instance.lists, done = { }, { }
- end
+ local ep=instance.extra_paths
+ if not ep then
+ ep,done={},{}
+ instance.extra_paths=ep
+ elseif #ep>0 then
+ instance.lists,done={},{}
+ end
end
-
function resolvers.registerextrapath(paths,subpaths)
- local ep = instance.extra_paths or { }
- local oldn = #ep
- local newn = oldn
- if paths and paths ~= "" then
- if subpaths and subpaths ~= "" then
- for p in gmatch(paths,"[^,]+") do
- -- we gmatch each step again, not that fast, but used seldom
- for s in gmatch(subpaths,"[^,]+") do
- local ps = p .. "/" .. s
- if not done[ps] then
- newn = newn + 1
- ep[newn] = resolvers.cleanpath(ps)
- done[ps] = true
- end
- end
- end
+ paths=settings_to_array(paths)
+ subpaths=settings_to_array(subpaths)
+ local ep=instance.extra_paths or {}
+ local oldn=#ep
+ local newn=oldn
+ local nofpaths=#paths
+ local nofsubpaths=#subpaths
+ if nofpaths>0 then
+ if nofsubpaths>0 then
+ for i=1,nofpaths do
+ local p=paths[i]
+ for j=1,nofsubpaths do
+ local s=subpaths[j]
+ local ps=p.."/"..s
+ if not done[ps] then
+ newn=newn+1
+ ep[newn]=resolvers.cleanpath(ps)
+ done[ps]=true
+ end
+ end
+ end
+ else
+ for i=1,nofpaths do
+ local p=paths[i]
+ if not done[p] then
+ newn=newn+1
+ ep[newn]=resolvers.cleanpath(p)
+ done[p]=true
+ end
+ end
+ end
+ elseif nofsubpaths>0 then
+ for i=1,oldn do
+ for j=1,nofsubpaths do
+ local s=subpaths[j]
+ local ps=ep[i].."/"..s
+ if not done[ps] then
+ newn=newn+1
+ ep[newn]=resolvers.cleanpath(ps)
+ done[ps]=true
+ end
+ end
+ end
+ end
+ if newn>0 then
+ instance.extra_paths=ep
+ end
+ if newn>oldn then
+ instance.lists={}
+ end
+end
+local function made_list(instance,list)
+ local ep=instance.extra_paths
+ if not ep or #ep==0 then
+ return list
+ else
+ local done,new,newn={},{},0
+ for k=1,#list do
+ local v=list[k]
+ if not done[v] then
+ if find(v,"^[%.%/]$") then
+ done[v]=true
+ newn=newn+1
+ new[newn]=v
else
- for p in gmatch(paths,"[^,]+") do
- if not done[p] then
- newn = newn + 1
- ep[newn] = resolvers.cleanpath(p)
- done[p] = true
- end
- end
- end
- elseif subpaths and subpaths ~= "" then
- for i=1,oldn do
- -- we gmatch each step again, not that fast, but used seldom
- for s in gmatch(subpaths,"[^,]+") do
- local ps = ep[i] .. "/" .. s
- if not done[ps] then
- newn = newn + 1
- ep[newn] = resolvers.cleanpath(ps)
- done[ps] = true
- end
- end
+ break
end
+ end
end
- if newn > 0 then
- instance.extra_paths = ep -- register paths
- end
- if newn > oldn then
- instance.lists = { } -- erase the cache
+ for k=1,#ep do
+ local v=ep[k]
+ if not done[v] then
+ done[v]=true
+ newn=newn+1
+ new[newn]=v
+ end
end
-end
-
-local function made_list(instance,list)
- local ep = instance.extra_paths
- if not ep or #ep == 0 then
- return list
- else
- local done, new, newn = { }, { }, 0
- -- honour . .. ../.. but only when at the start
- for k=1,#list do
- local v = list[k]
- if not done[v] then
- if find(v,"^[%.%/]$") then
- done[v] = true
- newn = newn + 1
- new[newn] = v
- else
- break
- end
- end
- end
- -- first the extra paths
- for k=1,#ep do
- local v = ep[k]
- if not done[v] then
- done[v] = true
- newn = newn + 1
- new[newn] = v
- end
- end
- -- next the formal paths
- for k=1,#list do
- local v = list[k]
- if not done[v] then
- done[v] = true
- newn = newn + 1
- new[newn] = v
- end
- end
- return new
+ for k=1,#list do
+ local v=list[k]
+ if not done[v] then
+ done[v]=true
+ newn=newn+1
+ new[newn]=v
+ end
end
+ return new
+ end
end
-
function resolvers.cleanpathlist(str)
- local t = resolvers.expandedpathlist(str)
- if t then
- for i=1,#t do
- t[i] = collapsepath(resolvers.cleanpath(t[i]))
- end
+ local t=resolvers.expandedpathlist(str)
+ if t then
+ for i=1,#t do
+ t[i]=collapsepath(resolvers.cleanpath(t[i]))
end
- return t
+ end
+ return t
end
-
function resolvers.expandpath(str)
- return joinpath(resolvers.expandedpathlist(str))
+ return joinpath(resolvers.expandedpathlist(str))
end
-
function resolvers.expandedpathlist(str)
- if not str then
- return { }
- elseif instance.savelists then
- str = lpegmatch(dollarstripper,str)
- if not instance.lists[str] then -- cached
- local lst = made_list(instance,resolvers.splitpath(resolvers.expansion(str)))
- instance.lists[str] = expandedpathfromlist(lst)
- end
- return instance.lists[str]
- else
- local lst = resolvers.splitpath(resolvers.expansion(str))
- return made_list(instance,expandedpathfromlist(lst))
+ if not str then
+ return {}
+ elseif instance.savelists then
+ str=lpegmatch(dollarstripper,str)
+ local lists=instance.lists
+ local lst=lists[str]
+ if not lst then
+ local l=made_list(instance,resolvers.splitpath(resolvers.expansion(str)))
+ lst=expandedpathfromlist(l)
+ lists[str]=lst
end
+ return lst
+ else
+ local lst=resolvers.splitpath(resolvers.expansion(str))
+ return made_list(instance,expandedpathfromlist(lst))
+ end
end
-
-function resolvers.expandedpathlistfromvariable(str) -- brrr
- str = lpegmatch(dollarstripper,str)
- local tmp = resolvers.variableofformatorsuffix(str)
- return resolvers.expandedpathlist(tmp ~= "" and tmp or str)
+function resolvers.expandedpathlistfromvariable(str)
+ str=lpegmatch(dollarstripper,str)
+ local tmp=resolvers.variableofformatorsuffix(str)
+ return resolvers.expandedpathlist(tmp~="" and tmp or str)
end
-
function resolvers.expandpathfromvariable(str)
- return joinpath(resolvers.expandedpathlistfromvariable(str))
+ return joinpath(resolvers.expandedpathlistfromvariable(str))
end
-
-function resolvers.expandbraces(str) -- output variable and brace expansion of STRING
--- local ori = resolvers.variable(str)
--- if ori == "" then
- local ori = str
--- end
- local pth = expandedpathfromlist(resolvers.splitpath(ori))
- return joinpath(pth)
+function resolvers.expandbraces(str)
+ local ori=str
+ local pth=expandedpathfromlist(resolvers.splitpath(ori))
+ return joinpath(pth)
end
-
function resolvers.registerfilehash(name,content,someerror)
- if content then
- instance.files[name] = content
- else
- instance.files[name] = { }
- if somerror == true then -- can be unset
- instance.loaderror = someerror
- end
+ if content then
+ instance.files[name]=content
+ else
+ instance.files[name]={}
+ if somerror==true then
+ instance.loaderror=someerror
end
+ end
end
-
local function isreadable(name)
- local readable = lfs.isfile(name) -- not file.is_readable(name) asit can be a dir
- if trace_detail then
- if readable then
- report_resolving("file '%s' is readable",name)
- else
- report_resolving("file '%s' is not readable", name)
- end
+ local readable=lfs.isfile(name)
+ if trace_detail then
+ if readable then
+ report_resolving("file %a is readable",name)
+ else
+ report_resolving("file %a is not readable",name)
end
- return readable
+ end
+ return readable
end
-
--- name
--- name/name
-
local function collect_files(names)
- local filelist, noffiles = { }, 0
- for k=1,#names do
- local fname = names[k]
+ local filelist,noffiles={},0
+ for k=1,#names do
+ local fname=names[k]
+ if trace_detail then
+ report_resolving("checking name %a",fname)
+ end
+ local bname=filebasename(fname)
+ local dname=filedirname(fname)
+ if dname=="" or find(dname,"^%.") then
+ dname=false
+ else
+ dname=gsub(dname,"%*",".*")
+ dname="/"..dname.."$"
+ end
+ local hashes=instance.hashes
+ for h=1,#hashes do
+ local hash=hashes[h]
+ local blobpath=hash.name
+ local files=blobpath and instance.files[blobpath]
+ if files then
if trace_detail then
- report_resolving("checking name '%s'",fname)
- end
- local bname = filebasename(fname)
- local dname = filedirname(fname)
- if dname == "" or find(dname,"^%.") then
- dname = false
- else
-dname = gsub(dname,"*","%.*")
- dname = "/" .. dname .. "$"
- end
- local hashes = instance.hashes
- for h=1,#hashes do
- local hash = hashes[h]
- local blobpath = hash.name
- local files = blobpath and instance.files[blobpath]
- if files then
+ report_resolving("deep checking %a, base %a, pattern %a",blobpath,bname,dname)
+ end
+ local blobfile=files[bname]
+ if not blobfile then
+ local rname="remap:"..bname
+ blobfile=files[rname]
+ if blobfile then
+ bname=files[rname]
+ blobfile=files[bname]
+ end
+ end
+ if blobfile then
+ local blobroot=files.__path__ or blobpath
+ if type(blobfile)=='string' then
+ if not dname or find(blobfile,dname) then
+ local variant=hash.type
+ local search=filejoin(blobroot,blobfile,bname)
+ local result=methodhandler('concatinators',hash.type,blobroot,blobfile,bname)
+ if trace_detail then
+ report_resolving("match: variant %a, search %a, result %a",variant,search,result)
+ end
+ noffiles=noffiles+1
+ filelist[noffiles]={ variant,search,result }
+ end
+ else
+ for kk=1,#blobfile do
+ local vv=blobfile[kk]
+ if not dname or find(vv,dname) then
+ local variant=hash.type
+ local search=filejoin(blobroot,vv,bname)
+ local result=methodhandler('concatinators',hash.type,blobroot,vv,bname)
if trace_detail then
- report_resolving("deep checking '%s' (%s)",blobpath,bname)
- end
- local blobfile = files[bname]
- if not blobfile then
- local rname = "remap:"..bname
- blobfile = files[rname]
- if blobfile then
- bname = files[rname]
- blobfile = files[bname]
- end
+ report_resolving("match: variant %a, search %a, result %a",variant,search,result)
end
- if blobfile then
- local blobroot = files.__path__ or blobpath
- if type(blobfile) == 'string' then
- if not dname or find(blobfile,dname) then
- local variant = hash.type
- -- local search = filejoin(blobpath,blobfile,bname)
- local search = filejoin(blobroot,blobfile,bname)
- local result = methodhandler('concatinators',hash.type,blobroot,blobfile,bname)
- if trace_detail then
- report_resolving("match: variant '%s', search '%s', result '%s'",variant,search,result)
- end
- noffiles = noffiles + 1
- filelist[noffiles] = { variant, search, result }
- end
- else
- for kk=1,#blobfile do
- local vv = blobfile[kk]
- if not dname or find(vv,dname) then
- local variant = hash.type
- -- local search = filejoin(blobpath,vv,bname)
- local search = filejoin(blobroot,vv,bname)
- local result = methodhandler('concatinators',hash.type,blobroot,vv,bname)
- if trace_detail then
- report_resolving("match: variant '%s', search '%s', result '%s'",variant,search,result)
- end
- noffiles = noffiles + 1
- filelist[noffiles] = { variant, search, result }
- end
- end
- end
- end
- elseif trace_locating then
- report_resolving("no match in '%s' (%s)",blobpath,bname)
+ noffiles=noffiles+1
+ filelist[noffiles]={ variant,search,result }
+ end
end
+ end
end
+ elseif trace_locating then
+ report_resolving("no match in %a (%s)",blobpath,bname)
+ end
end
- return noffiles > 0 and filelist or nil
+ end
+ return noffiles>0 and filelist or nil
end
-
-local fit = { }
-
+local fit={}
function resolvers.registerintrees(filename,format,filetype,usedmethod,foundname)
- local foundintrees = instance.foundintrees
- if usedmethod == "direct" and filename == foundname and fit[foundname] then
- -- just an extra lookup after a test on presence
- else
- local t = {
- filename = filename,
- format = format ~= "" and format or nil,
- filetype = filetype ~= "" and filetype or nil,
- usedmethod = usedmethod,
- foundname = foundname,
- }
- fit[foundname] = t
- foundintrees[#foundintrees+1] = t
- end
+ local foundintrees=instance.foundintrees
+ if usedmethod=="direct" and filename==foundname and fit[foundname] then
+ else
+ local t={
+ filename=filename,
+ format=format~="" and format or nil,
+ filetype=filetype~="" and filetype or nil,
+ usedmethod=usedmethod,
+ foundname=foundname,
+ }
+ fit[foundname]=t
+ foundintrees[#foundintrees+1]=t
+ end
end
-
--- split the next one up for readability (but this module needs a cleanup anyway)
-
-local function can_be_dir(name) -- can become local
- local fakepaths = instance.fakepaths
- if not fakepaths[name] then
- if lfs.isdir(name) then
- fakepaths[name] = 1 -- directory
- else
- fakepaths[name] = 2 -- no directory
- end
+local function can_be_dir(name)
+ local fakepaths=instance.fakepaths
+ if not fakepaths[name] then
+ if lfs.isdir(name) then
+ fakepaths[name]=1
+ else
+ fakepaths[name]=2
end
- return fakepaths[name] == 1
+ end
+ return fakepaths[name]==1
end
-
-local preparetreepattern = Cs((P(".")/"%%." + P("-")/"%%-" + P(1))^0 * Cc("$"))
-
--- -- -- begin of main file search routing -- -- -- needs checking as previous has been patched
-
+local preparetreepattern=Cs((P(".")/"%%."+P("-")/"%%-"+P(1))^0*Cc("$"))
local collect_instance_files
-
local function find_analyze(filename,askedformat,allresults)
- local filetype, wantedfiles, ext = '', { }, fileextname(filename)
- -- too tricky as filename can be bla.1.2.3:
- --
- -- if not suffixmap[ext] then
- -- wantedfiles[#wantedfiles+1] = filename
- -- end
- wantedfiles[#wantedfiles+1] = filename
- if askedformat == "" then
- if ext == "" or not suffixmap[ext] then
- local defaultsuffixes = resolvers.defaultsuffixes
- for i=1,#defaultsuffixes do
- local forcedname = filename .. '.' .. defaultsuffixes[i]
- wantedfiles[#wantedfiles+1] = forcedname
- filetype = resolvers.formatofsuffix(forcedname)
- if trace_locating then
- report_resolving("forcing filetype '%s'",filetype)
- end
- end
- else
- filetype = resolvers.formatofsuffix(filename)
- if trace_locating then
- report_resolving("using suffix based filetype '%s'",filetype)
- end
+ local filetype,wantedfiles,ext='',{},suffixonly(filename)
+ wantedfiles[#wantedfiles+1]=filename
+ if askedformat=="" then
+ if ext=="" or not suffixmap[ext] then
+ local defaultsuffixes=resolvers.defaultsuffixes
+ for i=1,#defaultsuffixes do
+ local forcedname=filename..'.'..defaultsuffixes[i]
+ wantedfiles[#wantedfiles+1]=forcedname
+ filetype=resolvers.formatofsuffix(forcedname)
+ if trace_locating then
+ report_resolving("forcing filetype %a",filetype)
end
+ end
else
- if ext == "" or not suffixmap[ext] then
- local format_suffixes = suffixes[askedformat]
- if format_suffixes then
- for i=1,#format_suffixes do
- wantedfiles[#wantedfiles+1] = filename .. "." .. format_suffixes[i]
- end
- end
- end
- filetype = askedformat
- if trace_locating then
- report_resolving("using given filetype '%s'",filetype)
+ filetype=resolvers.formatofsuffix(filename)
+ if trace_locating then
+ report_resolving("using suffix based filetype %a",filetype)
+ end
+ end
+ else
+ if ext=="" or not suffixmap[ext] then
+ local format_suffixes=suffixes[askedformat]
+ if format_suffixes then
+ for i=1,#format_suffixes do
+ wantedfiles[#wantedfiles+1]=filename.."."..format_suffixes[i]
end
+ end
+ end
+ filetype=askedformat
+ if trace_locating then
+ report_resolving("using given filetype %a",filetype)
end
- return filetype, wantedfiles
+ end
+ return filetype,wantedfiles
end
-
local function find_direct(filename,allresults)
- if not dangerous[askedformat] and isreadable(filename) then
- if trace_detail then
- report_resolving("file '%s' found directly",filename)
- end
- return "direct", { filename }
+ if not dangerous[askedformat] and isreadable(filename) then
+ if trace_detail then
+ report_resolving("file %a found directly",filename)
end
+ return "direct",{ filename }
+ end
end
-
local function find_wildcard(filename,allresults)
- if find(filename,'%*') then
- if trace_locating then
- report_resolving("checking wildcard '%s'", filename)
- end
- local method, result = resolvers.findwildcardfiles(filename)
- if result then
- return "wildcard", result
- end
- end
-end
-
-local function find_qualified(filename,allresults) -- this one will be split too
- if not file.is_qualified_path(filename) then
- return
- end
+ if find(filename,'%*') then
if trace_locating then
- report_resolving("checking qualified name '%s'", filename)
- end
- if isreadable(filename) then
- if trace_detail then
- report_resolving("qualified file '%s' found", filename)
- end
- return "qualified", { filename }
- end
+ report_resolving("checking wildcard %a",filename)
+ end
+ local method,result=resolvers.findwildcardfiles(filename)
+ if result then
+ return "wildcard",result
+ end
+ end
+end
+local function find_qualified(filename,allresults,askedformat,alsostripped)
+ if not file.is_qualified_path(filename) then
+ return
+ end
+ if trace_locating then
+ report_resolving("checking qualified name %a",filename)
+ end
+ if isreadable(filename) then
if trace_detail then
- report_resolving("locating qualified file '%s'", filename)
- end
- local forcedname, suffix = "", fileextname(filename)
- if suffix == "" then -- why
- local format_suffixes = askedformat == "" and resolvers.defaultsuffixes or suffixes[askedformat]
- if format_suffixes then
- for i=1,#format_suffixes do
- local s = format_suffixes[i]
- forcedname = filename .. "." .. s
- if isreadable(forcedname) then
- if trace_locating then
- report_resolving("no suffix, forcing format filetype '%s'", s)
- end
- return "qualified", { forcedname }
- end
- end
- end
- end
- if suffix and suffix ~= "" then
- -- try to find in tree (no suffix manipulation), here we search for the
- -- matching last part of the name
- local basename = filebasename(filename)
- local pattern = lpegmatch(preparetreepattern,filename)
- -- messy .. to be sorted out
- local savedformat = askedformat
- local format = savedformat or ""
- if format == "" then
- askedformat = resolvers.formatofsuffix(suffix)
- end
- if not format then
- askedformat = "othertextfiles" -- kind of everything, maybe all
- end
- --
- if basename ~= filename then
- local resolved = collect_instance_files(basename,askedformat,allresults)
- if #resolved == 0 then
- local lowered = lower(basename)
- if filename ~= lowered then
- resolved = collect_instance_files(lowered,askedformat,allresults)
- end
- end
- resolvers.format = savedformat
- --
- if #resolved > 0 then
- local result = { }
- for r=1,#resolved do
- local rr = resolved[r]
- if find(rr,pattern) then
- result[#result+1] = rr
- end
- end
- if #result > 0 then
- return "qualified", result
- end
- end
- end
- -- a real wildcard:
- --
- -- local filelist = collect_files({basename})
- -- result = { }
- -- for f=1,#filelist do
- -- local ff = filelist[f][3] or ""
- -- if find(ff,pattern) then
- -- result[#result+1], ok = ff, true
- -- end
- -- end
- -- if #result > 0 then
- -- return "qualified", result
- -- end
- end
+ report_resolving("qualified file %a found",filename)
+ end
+ return "qualified",{ filename }
+ end
+ if trace_detail then
+ report_resolving("locating qualified file %a",filename)
+ end
+ local forcedname,suffix="",suffixonly(filename)
+ if suffix=="" then
+ local format_suffixes=askedformat=="" and resolvers.defaultsuffixes or suffixes[askedformat]
+ if format_suffixes then
+ for i=1,#format_suffixes do
+ local s=format_suffixes[i]
+ forcedname=filename.."."..s
+ if isreadable(forcedname) then
+ if trace_locating then
+ report_resolving("no suffix, forcing format filetype %a",s)
+ end
+ return "qualified",{ forcedname }
+ end
+ end
+ end
+ end
+ if alsostripped and suffix and suffix~="" then
+ local basename=filebasename(filename)
+ local pattern=lpegmatch(preparetreepattern,filename)
+ local savedformat=askedformat
+ local format=savedformat or ""
+ if format=="" then
+ askedformat=resolvers.formatofsuffix(suffix)
+ end
+ if not format then
+ askedformat="othertextfiles"
+ end
+ if basename~=filename then
+ local resolved=collect_instance_files(basename,askedformat,allresults)
+ if #resolved==0 then
+ local lowered=lower(basename)
+ if filename~=lowered then
+ resolved=collect_instance_files(lowered,askedformat,allresults)
+ end
+ end
+ resolvers.format=savedformat
+ if #resolved>0 then
+ local result={}
+ for r=1,#resolved do
+ local rr=resolved[r]
+ if find(rr,pattern) then
+ result[#result+1]=rr
+ end
+ end
+ if #result>0 then
+ return "qualified",result
+ end
+ end
+ end
+ end
end
-
local function check_subpath(fname)
- if isreadable(fname) then
- if trace_detail then
- report_resolving("found '%s' by deep scanning",fname)
- end
- return fname
+ if isreadable(fname) then
+ if trace_detail then
+ report_resolving("found %a by deep scanning",fname)
end
+ return fname
+ end
end
-
local function find_intree(filename,filetype,wantedfiles,allresults)
- local typespec = resolvers.variableofformat(filetype)
- local pathlist = resolvers.expandedpathlist(typespec)
- local method = "intree"
- if pathlist and #pathlist > 0 then
- -- list search
- local filelist = collect_files(wantedfiles)
- local dirlist = { }
- if filelist then
- for i=1,#filelist do
- dirlist[i] = filedirname(filelist[i][3]) .. "/" -- was [2] .. gamble
- end
- end
+ local typespec=resolvers.variableofformat(filetype)
+ local pathlist=resolvers.expandedpathlist(typespec)
+ local method="intree"
+ if pathlist and #pathlist>0 then
+ local filelist=collect_files(wantedfiles)
+ local dirlist={}
+ if filelist then
+ for i=1,#filelist do
+ dirlist[i]=filedirname(filelist[i][3]).."/"
+ end
+ end
+ if trace_detail then
+ report_resolving("checking filename %a",filename)
+ end
+ local resolve=resolvers.resolve
+ local result={}
+ for k=1,#pathlist do
+ local path=pathlist[k]
+ local pathname=lpegmatch(inhibitstripper,path)
+ local doscan=path==pathname
+ if not find (pathname,'//$') then
+ doscan=false
+ end
+ local done=false
+ if filelist then
+ local expression=makepathexpression(pathname)
if trace_detail then
- report_resolving("checking filename '%s'",filename)
- end
- local result = { }
- for k=1,#pathlist do
- local path = pathlist[k]
- local pathname = lpegmatch(inhibitstripper,path)
- local doscan = path == pathname -- no ^!!
- if not find (pathname,'//$') then
- doscan = false -- we check directly on the path
- end
- local done = false
- -- using file list
- if filelist then -- database
- -- compare list entries with permitted pattern -- /xx /xx//
- local expression = makepathexpression(pathname)
- if trace_detail then
- report_resolving("using pattern '%s' for path '%s'",expression,pathname)
+ report_resolving("using pattern %a for path %a",expression,pathname)
+ end
+ for k=1,#filelist do
+ local fl=filelist[k]
+ local f=fl[2]
+ local d=dirlist[k]
+ if find(d,expression) or find(resolve(d),expression) then
+ result[#result+1]=resolve(fl[3])
+ done=true
+ if allresults then
+ if trace_detail then
+ report_resolving("match to %a in hash for file %a and path %a, continue scanning",expression,f,d)
+ end
+ else
+ if trace_detail then
+ report_resolving("match to %a in hash for file %a and path %a, quit scanning",expression,f,d)
+ end
+ break
+ end
+ elseif trace_detail then
+ report_resolving("no match to %a in hash for file %a and path %a",expression,f,d)
+ end
+ end
+ end
+ if done then
+ method="database"
+ else
+ method="filesystem"
+ pathname=gsub(pathname,"/+$","")
+ pathname=resolve(pathname)
+ local scheme=url.hasscheme(pathname)
+ if not scheme or scheme=="file" then
+ local pname=gsub(pathname,"%.%*$",'')
+ if not find(pname,"%*") then
+ if can_be_dir(pname) then
+ for k=1,#wantedfiles do
+ local w=wantedfiles[k]
+ local fname=check_subpath(filejoin(pname,w))
+ if fname then
+ result[#result+1]=fname
+ done=true
+ if not allresults then
+ break
+ end
end
- for k=1,#filelist do
- local fl = filelist[k]
- local f = fl[2]
- local d = dirlist[k]
- if find(d,expression) then
- -- todo, test for readable
- result[#result+1] = resolvers.resolve(fl[3]) -- no shortcut
- done = true
- if allresults then
- if trace_detail then
- report_resolving("match to '%s' in hash for file '%s' and path '%s', continue scanning",expression,f,d)
- end
- else
- if trace_detail then
- report_resolving("match to '%s' in hash for file '%s' and path '%s', quit scanning",expression,f,d)
- end
+ end
+ if not done and doscan then
+ local files=resolvers.simplescanfiles(pname,false,true)
+ for k=1,#wantedfiles do
+ local w=wantedfiles[k]
+ local subpath=files[w]
+ if not subpath or subpath=="" then
+ elseif type(subpath)=="string" then
+ local fname=check_subpath(filejoin(pname,subpath,w))
+ if fname then
+ result[#result+1]=fname
+ done=true
+ if not allresults then
+ break
+ end
+ end
+ else
+ for i=1,#subpath do
+ local sp=subpath[i]
+ if sp=="" then
+ else
+ local fname=check_subpath(filejoin(pname,sp,w))
+ if fname then
+ result[#result+1]=fname
+ done=true
+ if not allresults then
break
+ end
end
- elseif trace_detail then
- report_resolving("no match to '%s' in hash for file '%s' and path '%s'",expression,f,d)
+ end
end
- end
- end
- if done then
- method = "database"
- else
- method = "filesystem" -- bonus, even when !! is specified
- pathname = gsub(pathname,"/+$","")
- pathname = resolvers.resolve(pathname)
- local scheme = url.hasscheme(pathname)
- if not scheme or scheme == "file" then
- local pname = gsub(pathname,"%.%*$",'')
- if not find(pname,"%*") then
- if can_be_dir(pname) then
- -- quick root scan first
- for k=1,#wantedfiles do
- local w = wantedfiles[k]
- local fname = check_subpath(filejoin(pname,w))
- if fname then
- result[#result+1] = fname
- done = true
- if not allresults then
- break
- end
- end
- end
- if not done and doscan then
- -- collect files in path (and cache the result)
- local files = resolvers.simplescanfiles(pname,false,true)
- for k=1,#wantedfiles do
- local w = wantedfiles[k]
- local subpath = files[w]
- if not subpath or subpath == "" then
- -- rootscan already done
- elseif type(subpath) == "string" then
- local fname = check_subpath(filejoin(pname,subpath,w))
- if fname then
- result[#result+1] = fname
- done = true
- if not allresults then
- break
- end
- end
- else
- for i=1,#subpath do
- local sp = subpath[i]
- if sp == "" then
- -- roottest already done
- else
- local fname = check_subpath(filejoin(pname,sp,w))
- if fname then
- result[#result+1] = fname
- done = true
- if not allresults then
- break
- end
- end
- end
- end
- if done and not allresults then
- break
- end
- end
- end
- end
- end
- else
- -- no access needed for non existing path, speedup (esp in large tree with lots of fake)
+ if done and not allresults then
+ break
end
+ end
end
+ end
end
- -- todo recursive scanning
- if done and not allresults then
- break
- end
- end
- if #result > 0 then
- return method, result
+ else
+ end
end
+ end
+ if done and not allresults then
+ break
+ end
end
+ if #result>0 then
+ return method,result
+ end
+ end
end
-
local function find_onpath(filename,filetype,wantedfiles,allresults)
+ if trace_detail then
+ report_resolving("checking filename %a, filetype %a, wanted files %a",filename,filetype,concat(wantedfiles," | "))
+ end
+ local result={}
+ for k=1,#wantedfiles do
+ local fname=wantedfiles[k]
+ if fname and isreadable(fname) then
+ filename=fname
+ result[#result+1]=filejoin('.',fname)
+ if not allresults then
+ break
+ end
+ end
+ end
+ if #result>0 then
+ return "onpath",result
+ end
+end
+local function find_otherwise(filename,filetype,wantedfiles,allresults)
+ local filelist=collect_files(wantedfiles)
+ local fl=filelist and filelist[1]
+ if fl then
+ return "otherwise",{ resolvers.resolve(fl[3]) }
+ end
+end
+collect_instance_files=function(filename,askedformat,allresults)
+ askedformat=askedformat or ""
+ filename=collapsepath(filename)
+ if allresults then
+ local filetype,wantedfiles=find_analyze(filename,askedformat)
+ local results={
+ { find_direct (filename,true) },
+ { find_wildcard (filename,true) },
+ { find_qualified(filename,true,askedformat) },
+ { find_intree (filename,filetype,wantedfiles,true) },
+ { find_onpath (filename,filetype,wantedfiles,true) },
+ { find_otherwise(filename,filetype,wantedfiles,true) },
+ }
+ local result,status,done={},{},{}
+ for k,r in next,results do
+ local method,list=r[1],r[2]
+ if method and list then
+ for i=1,#list do
+ local c=collapsepath(list[i])
+ if not done[c] then
+ result[#result+1]=c
+ done[c]=true
+ end
+ status[#status+1]=formatters["%-10s: %s"](method,c)
+ end
+ end
+ end
if trace_detail then
- report_resolving("checking filename '%s', filetype '%s', wanted files '%s'",filename, filetype or '?',concat(wantedfiles," | "))
- end
- local result = { }
- for k=1,#wantedfiles do
- local fname = wantedfiles[k]
- if fname and isreadable(fname) then
- filename = fname
- result[#result+1] = filejoin('.',fname)
- if not allresults then
- break
- end
+ report_resolving("lookup status: %s",table.serialize(status,filename))
+ end
+ return result,status
+ else
+ local method,result,stamp,filetype,wantedfiles
+ if instance.remember then
+ stamp=formatters["%s--%s"](filename,askedformat)
+ result=stamp and instance.found[stamp]
+ if result then
+ if trace_locating then
+ report_resolving("remembered file %a",filename)
end
+ return result
+ end
end
- if #result > 0 then
- return "onpath", result
- end
-end
-
-local function find_otherwise(filename,filetype,wantedfiles,allresults) -- other text files | any | whatever
- local filelist = collect_files(wantedfiles)
- local fl = filelist and filelist[1]
- if fl then
- return "otherwise", { resolvers.resolve(fl[3]) } -- filename
- end
-end
-
--- we could have a loop over the 6 functions but then we'd have to
--- always analyze
-
-collect_instance_files = function(filename,askedformat,allresults) -- uses nested
- askedformat = askedformat or ""
- filename = collapsepath(filename)
- if allresults then
- -- no need for caching, only used for tracing
- local filetype, wantedfiles = find_analyze(filename,askedformat)
- local results = {
- { find_direct (filename,true) },
- { find_wildcard (filename,true) },
- { find_qualified(filename,true) },
- { find_intree (filename,filetype,wantedfiles,true) },
- { find_onpath (filename,filetype,wantedfiles,true) },
- { find_otherwise(filename,filetype,wantedfiles,true) },
- }
- local result, status, done = { }, { }, { }
- for k, r in next, results do
- local method, list = r[1], r[2]
- if method and list then
- for i=1,#list do
- local c = collapsepath(list[i])
- if not done[c] then
- result[#result+1] = c
- done[c] = true
- end
- status[#status+1] = format("%-10s: %s",method,c)
- end
- end
- end
- if trace_detail then
- report_resolving("lookup status: %s",table.serialize(status,filename))
- end
- return result, status
- else
- local method, result, stamp, filetype, wantedfiles
- if instance.remember then
- stamp = format("%s--%s", filename, askedformat)
- result = stamp and instance.found[stamp]
- if result then
- if trace_locating then
- report_resolving("remembered file '%s'",filename)
- end
- return result
- end
- end
- method, result = find_direct(filename)
+ method,result=find_direct(filename)
+ if not result then
+ method,result=find_wildcard(filename)
+ if not result then
+ method,result=find_qualified(filename,false,askedformat)
if not result then
- method, result = find_wildcard(filename)
+ filetype,wantedfiles=find_analyze(filename,askedformat)
+ method,result=find_intree(filename,filetype,wantedfiles)
+ if not result then
+ method,result=find_onpath(filename,filetype,wantedfiles)
if not result then
- method, result = find_qualified(filename)
- if not result then
- filetype, wantedfiles = find_analyze(filename,askedformat)
- method, result = find_intree(filename,filetype,wantedfiles)
- if not result then
- method, result = find_onpath(filename,filetype,wantedfiles)
- if not result then
- method, result = find_otherwise(filename,filetype,wantedfiles)
- end
- end
- end
+ method,result=find_otherwise(filename,filetype,wantedfiles)
end
+ end
end
- if result and #result > 0 then
- local foundname = collapsepath(result[1])
- resolvers.registerintrees(filename,askedformat,filetype,method,foundname)
- result = { foundname }
- else
- result = { } -- maybe false
- end
- if stamp then
- if trace_locating then
- report_resolving("remembering file '%s'",filename)
- end
- instance.found[stamp] = result
- end
- return result
+ end
+ end
+ if result and #result>0 then
+ local foundname=collapsepath(result[1])
+ resolvers.registerintrees(filename,askedformat,filetype,method,foundname)
+ result={ foundname }
+ else
+ result={}
end
+ if stamp then
+ if trace_locating then
+ report_resolving("remembering file %a",filename)
+ end
+ instance.found[stamp]=result
+ end
+ return result
+ end
end
-
--- -- -- end of main file search routing -- -- --
-
-
local function findfiles(filename,filetype,allresults)
- local result, status = collect_instance_files(filename,filetype or "",allresults)
- if not result or #result == 0 then
- local lowered = lower(filename)
- if filename ~= lowered then
- result, status = collect_instance_files(lowered,filetype or "",allresults)
- end
+ local result,status=collect_instance_files(filename,filetype or "",allresults)
+ if not result or #result==0 then
+ local lowered=lower(filename)
+ if filename~=lowered then
+ result,status=collect_instance_files(lowered,filetype or "",allresults)
end
- return result or { }, status
+ end
+ return result or {},status
end
-
function resolvers.findfiles(filename,filetype)
- return findfiles(filename,filetype,true)
+ return findfiles(filename,filetype,true)
end
-
function resolvers.findfile(filename,filetype)
- return findfiles(filename,filetype,false)[1] or ""
+ return findfiles(filename,filetype,false)[1] or ""
end
-
function resolvers.findpath(filename,filetype)
- return filedirname(findfiles(filename,filetype,false)[1] or "")
+ return filedirname(findfiles(filename,filetype,false)[1] or "")
end
-
local function findgivenfiles(filename,allresults)
- local bname, result = filebasename(filename), { }
- local hashes = instance.hashes
- local noffound = 0
- for k=1,#hashes do
- local hash = hashes[k]
- local files = instance.files[hash.name] or { }
- local blist = files[bname]
- if not blist then
- local rname = "remap:"..bname
- blist = files[rname]
- if blist then
- bname = files[rname]
- blist = files[bname]
- end
+ local bname,result=filebasename(filename),{}
+ local hashes=instance.hashes
+ local noffound=0
+ for k=1,#hashes do
+ local hash=hashes[k]
+ local files=instance.files[hash.name] or {}
+ local blist=files[bname]
+ if not blist then
+ local rname="remap:"..bname
+ blist=files[rname]
+ if blist then
+ bname=files[rname]
+ blist=files[bname]
+ end
+ end
+ if blist then
+ if type(blist)=='string' then
+ local found=methodhandler('concatinators',hash.type,hash.name,blist,bname) or ""
+ if found~="" then
+ noffound=noffound+1
+ result[noffound]=resolvers.resolve(found)
+ if not allresults then
+ break
+ end
end
- if blist then
- if type(blist) == 'string' then
- local found = methodhandler('concatinators',hash.type,hash.name,blist,bname) or ""
- if found ~= "" then
- noffound = noffound + 1
- result[noffound] = resolvers.resolve(found)
- if not allresults then break end
- end
- else
- for kk=1,#blist do
- local vv = blist[kk]
- local found = methodhandler('concatinators',hash.type,hash.name,vv,bname) or ""
- if found ~= "" then
- noffound = noffound + 1
- result[noffound] = resolvers.resolve(found)
- if not allresults then break end
- end
- end
- end
+ else
+ for kk=1,#blist do
+ local vv=blist[kk]
+ local found=methodhandler('concatinators',hash.type,hash.name,vv,bname) or ""
+ if found~="" then
+ noffound=noffound+1
+ result[noffound]=resolvers.resolve(found)
+ if not allresults then break end
+ end
end
+ end
end
- return result
+ end
+ return result
end
-
function resolvers.findgivenfiles(filename)
- return findgivenfiles(filename,true)
+ return findgivenfiles(filename,true)
end
-
function resolvers.findgivenfile(filename)
- return findgivenfiles(filename,false)[1] or ""
+ return findgivenfiles(filename,false)[1] or ""
end
-
local function doit(path,blist,bname,tag,variant,result,allresults)
- local done = false
- if blist and variant then
- local resolve = resolvers.resolve -- added
- if type(blist) == 'string' then
- -- make function and share code
- if find(lower(blist),path) then
- local full = methodhandler('concatinators',variant,tag,blist,bname) or ""
- result[#result+1] = resolve(full)
- done = true
- end
- else
- for kk=1,#blist do
- local vv = blist[kk]
- if find(lower(vv),path) then
- local full = methodhandler('concatinators',variant,tag,vv,bname) or ""
- result[#result+1] = resolve(full)
- done = true
- if not allresults then break end
- end
- end
+ local done=false
+ if blist and variant then
+ local resolve=resolvers.resolve
+ if type(blist)=='string' then
+ if find(lower(blist),path) then
+ local full=methodhandler('concatinators',variant,tag,blist,bname) or ""
+ result[#result+1]=resolve(full)
+ done=true
+ end
+ else
+ for kk=1,#blist do
+ local vv=blist[kk]
+ if find(lower(vv),path) then
+ local full=methodhandler('concatinators',variant,tag,vv,bname) or ""
+ result[#result+1]=resolve(full)
+ done=true
+ if not allresults then break end
end
+ end
end
- return done
+ end
+ return done
end
-
-
-local makewildcard = Cs(
- (P("^")^0 * P("/") * P(-1) + P(-1)) /".*"
- + (P("^")^0 * P("/") / "")^0 * (P("*")/".*" + P("-")/"%%-" + P(".")/"%%." + P("?")/"."+ P("\\")/"/" + P(1))^0
+local makewildcard=Cs(
+ (P("^")^0*P("/")*P(-1)+P(-1))/".*"+(P("^")^0*P("/")/"")^0*(P("*")/".*"+P("-")/"%%-"+P(".")/"%%."+P("?")/"."+P("\\")/"/"+P(1))^0
)
-
function resolvers.wildcardpattern(pattern)
- return lpegmatch(makewildcard,pattern) or pattern
-end
-
-local function findwildcardfiles(filename,allresults,result) -- todo: remap: and lpeg
- result = result or { }
- local base = filebasename(filename)
- local dirn = filedirname(filename)
- local path = lower(lpegmatch(makewildcard,dirn) or dirn)
- local name = lower(lpegmatch(makewildcard,base) or base)
- local files, done = instance.files, false
- if find(name,"%*") then
- local hashes = instance.hashes
- for k=1,#hashes do
- local hash = hashes[k]
- local hashname, hashtype = hash.name, hash.type
- for kk, hh in next, files[hashname] do
- if not find(kk,"^remap:") then
- if find(lower(kk),name) then
- if doit(path,hh,kk,hashname,hashtype,result,allresults) then done = true end
- if done and not allresults then break end
- end
- end
- end
- end
- else
- local hashes = instance.hashes
- for k=1,#hashes do
- local hash = hashes[k]
- local hashname, hashtype = hash.name, hash.type
- if doit(path,files[hashname][bname],bname,hashname,hashtype,result,allresults) then done = true end
+ return lpegmatch(makewildcard,pattern) or pattern
+end
+local function findwildcardfiles(filename,allresults,result)
+ result=result or {}
+ local base=filebasename(filename)
+ local dirn=filedirname(filename)
+ local path=lower(lpegmatch(makewildcard,dirn) or dirn)
+ local name=lower(lpegmatch(makewildcard,base) or base)
+ local files,done=instance.files,false
+ if find(name,"%*") then
+ local hashes=instance.hashes
+ for k=1,#hashes do
+ local hash=hashes[k]
+ local hashname,hashtype=hash.name,hash.type
+ for kk,hh in next,files[hashname] do
+ if not find(kk,"^remap:") then
+ if find(lower(kk),name) then
+ if doit(path,hh,kk,hashname,hashtype,result,allresults) then done=true end
if done and not allresults then break end
+ end
end
+ end
end
- -- we can consider also searching the paths not in the database, but then
- -- we end up with a messy search (all // in all path specs)
- return result
+ else
+ local hashes=instance.hashes
+ for k=1,#hashes do
+ local hash=hashes[k]
+ local hashname,hashtype=hash.name,hash.type
+ if doit(path,files[hashname][bname],bname,hashname,hashtype,result,allresults) then done=true end
+ if done and not allresults then break end
+ end
+ end
+ return result
end
-
function resolvers.findwildcardfiles(filename,result)
- return findwildcardfiles(filename,true,result)
+ return findwildcardfiles(filename,true,result)
end
-
function resolvers.findwildcardfile(filename)
- return findwildcardfiles(filename,false)[1] or ""
+ return findwildcardfiles(filename,false)[1] or ""
end
-
--- main user functions
-
function resolvers.automount()
- -- implemented later
end
-
function resolvers.load(option)
- statistics.starttiming(instance)
- identify_configuration_files()
- load_configuration_files()
- if option ~= "nofiles" then
- load_databases()
- resolvers.automount()
- end
- statistics.stoptiming(instance)
- local files = instance.files
- return files and next(files) and true
+ statistics.starttiming(instance)
+ identify_configuration_files()
+ load_configuration_files()
+ if option~="nofiles" then
+ load_databases()
+ resolvers.automount()
+ end
+ statistics.stoptiming(instance)
+ local files=instance.files
+ return files and next(files) and true
end
-
function resolvers.loadtime()
- return statistics.elapsedtime(instance)
+ return statistics.elapsedtime(instance)
end
-
local function report(str)
+ if trace_locating then
+ report_resolving(str)
+ else
+ print(str)
+ end
+end
+function resolvers.dowithfilesandreport(command,files,...)
+ if files and #files>0 then
if trace_locating then
- report_resolving(str) -- has already verbose
- else
- print(str)
+ report('')
end
-end
-
-function resolvers.dowithfilesandreport(command, files, ...) -- will move
- if files and #files > 0 then
- if trace_locating then
- report('') -- ?
- end
- if type(files) == "string" then
- files = { files }
- end
- for f=1,#files do
- local file = files[f]
- local result = command(file,...)
- if type(result) == 'string' then
- report(result)
- else
- for i=1,#result do
- report(result[i]) -- could be unpack
- end
- end
+ if type(files)=="string" then
+ files={ files }
+ end
+ for f=1,#files do
+ local file=files[f]
+ local result=command(file,...)
+ if type(result)=='string' then
+ report(result)
+ else
+ for i=1,#result do
+ report(result[i])
end
+ end
end
+ end
end
-
--- obsolete
-
--- resolvers.varvalue = resolvers.variable -- output the value of variable $STRING.
--- resolvers.expandvar = resolvers.expansion -- output variable expansion of STRING.
-
-function resolvers.showpath(str) -- output search path for file type NAME
- return joinpath(resolvers.expandedpathlist(resolvers.formatofvariable(str)))
+function resolvers.showpath(str)
+ return joinpath(resolvers.expandedpathlist(resolvers.formatofvariable(str)))
end
-
-function resolvers.registerfile(files, name, path)
- if files[name] then
- if type(files[name]) == 'string' then
- files[name] = { files[name], path }
- else
- files[name] = path
- end
+function resolvers.registerfile(files,name,path)
+ if files[name] then
+ if type(files[name])=='string' then
+ files[name]={ files[name],path }
else
- files[name] = path
+ files[name]=path
end
+ else
+ files[name]=path
+ end
end
-
function resolvers.dowithpath(name,func)
- local pathlist = resolvers.expandedpathlist(name)
- for i=1,#pathlist do
- func("^"..resolvers.cleanpath(pathlist[i]))
- end
+ local pathlist=resolvers.expandedpathlist(name)
+ for i=1,#pathlist do
+ func("^"..resolvers.cleanpath(pathlist[i]))
+ end
end
-
function resolvers.dowithvariable(name,func)
- func(expandedvariable(name))
+ func(expandedvariable(name))
end
-
function resolvers.locateformat(name)
- local barename = file.removesuffix(name) -- gsub(name,"%.%a+$","")
- local fmtname = caches.getfirstreadablefile(barename..".fmt","formats") or ""
- if fmtname == "" then
- fmtname = resolvers.findfile(barename..".fmt")
- fmtname = resolvers.cleanpath(fmtname)
- end
- if fmtname ~= "" then
- local barename = file.removesuffix(fmtname)
- local luaname, lucname, luiname = barename .. ".lua", barename .. ".luc", barename .. ".lui"
- if lfs.isfile(luiname) then
- return barename, luiname
- elseif lfs.isfile(lucname) then
- return barename, lucname
- elseif lfs.isfile(luaname) then
- return barename, luaname
- end
- end
- return nil, nil
+ local engine=environment.ownmain or "luatex"
+ local barename=file.removesuffix(name)
+ local fullname=file.addsuffix(barename,"fmt")
+ local fmtname=caches.getfirstreadablefile(fullname,"formats",engine) or ""
+ if fmtname=="" then
+ fmtname=resolvers.findfile(fullname)
+ fmtname=resolvers.cleanpath(fmtname)
+ end
+ if fmtname~="" then
+ local barename=file.removesuffix(fmtname)
+ local luaname=file.addsuffix(barename,luasuffixes.lua)
+ local lucname=file.addsuffix(barename,luasuffixes.luc)
+ local luiname=file.addsuffix(barename,luasuffixes.lui)
+ if lfs.isfile(luiname) then
+ return barename,luiname
+ elseif lfs.isfile(lucname) then
+ return barename,lucname
+ elseif lfs.isfile(luaname) then
+ return barename,luaname
+ end
+ end
+ return nil,nil
end
-
function resolvers.booleanvariable(str,default)
- local b = resolvers.expansion(str)
- if b == "" then
- return default
- else
- b = toboolean(b)
- return (b == nil and default) or b
- end
-end
-
-function resolvers.dowithfilesintree(pattern,handle,before,after) -- will move, can be a nice iterator instead
- local instance = resolvers.instance
- local hashes = instance.hashes
- for i=1,#hashes do
- local hash = hashes[i]
- local blobtype = hash.type
- local blobpath = hash.name
- if blobpath then
- if before then
- before(blobtype,blobpath,pattern)
- end
- local files = instance.files[blobpath]
- local total, checked, done = 0, 0, 0
- if files then
- for k,v in next, files do
- total = total + 1
- if find(k,"^remap:") then
- k = files[k]
- v = k -- files[k] -- chained
- end
- if find(k,pattern) then
- if type(v) == "string" then
- checked = checked + 1
- if handle(blobtype,blobpath,v,k) then
- done = done + 1
- end
- else
- checked = checked + #v
- for i=1,#v do
- if handle(blobtype,blobpath,v[i],k) then
- done = done + 1
- end
- end
- end
- end
+ local b=resolvers.expansion(str)
+ if b=="" then
+ return default
+ else
+ b=toboolean(b)
+ return (b==nil and default) or b
+ end
+end
+function resolvers.dowithfilesintree(pattern,handle,before,after)
+ local instance=resolvers.instance
+ local hashes=instance.hashes
+ for i=1,#hashes do
+ local hash=hashes[i]
+ local blobtype=hash.type
+ local blobpath=hash.name
+ if blobpath then
+ if before then
+ before(blobtype,blobpath,pattern)
+ end
+ local files=instance.files[blobpath]
+ local total,checked,done=0,0,0
+ if files then
+ for k,v in table.sortedhash(files) do
+ total=total+1
+ if find(k,"^remap:") then
+ elseif find(k,pattern) then
+ if type(v)=="string" then
+ checked=checked+1
+ if handle(blobtype,blobpath,v,k) then
+ done=done+1
+ end
+ else
+ checked=checked+#v
+ for i=1,#v do
+ if handle(blobtype,blobpath,v[i],k) then
+ done=done+1
end
+ end
end
- if after then
- after(blobtype,blobpath,pattern,total,checked,done)
- end
+ end
end
+ end
+ if after then
+ after(blobtype,blobpath,pattern,total,checked,done)
+ end
end
+ end
end
-
-resolvers.obsolete = resolvers.obsolete or { }
-local obsolete = resolvers.obsolete
-
-resolvers.find_file = resolvers.findfile obsolete.find_file = resolvers.findfile
-resolvers.find_files = resolvers.findfiles obsolete.find_files = resolvers.findfiles
+resolvers.obsolete=resolvers.obsolete or {}
+local obsolete=resolvers.obsolete
+resolvers.find_file=resolvers.findfile obsolete.find_file=resolvers.findfile
+resolvers.find_files=resolvers.findfiles obsolete.find_files=resolvers.findfiles
end -- of closure
do -- create closure to overcome 200 locals limit
-if not modules then modules = { } end modules ['data-pre'] = {
- version = 1.001,
- comment = "companion to luat-lib.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
--- It could be interesting to hook the resolver in the file
--- opener so that unresolved prefixes travel around and we
--- get more abstraction.
-
--- As we use this beforehand we will move this up in the chain
--- of loading.
+package.loaded["data-pre"] = package.loaded["data-pre"] or true
+-- original size: 6430, stripped down to: 4219
-local resolvers = resolvers
-local prefixes = utilities.storage.allocate()
-resolvers.prefixes = prefixes
-
-local gsub = string.gsub
-local cleanpath, findgivenfile, expansion = resolvers.cleanpath, resolvers.findgivenfile, resolvers.expansion
-local getenv = resolvers.getenv -- we can probably also use resolvers.expansion
-local P, Cs, lpegmatch = lpeg.P, lpeg.Cs, lpeg.match
-
--- getenv = function(...) return resolvers.getenv(...) end -- needs checking (definitions changes later on)
-
-prefixes.environment = function(str)
- return cleanpath(expansion(str))
-end
-
-prefixes.relative = function(str,n) -- lfs.isfile
- if io.exists(str) then
- -- nothing
- elseif io.exists("./" .. str) then
- str = "./" .. str
- else
- local p = "../"
- for i=1,n or 2 do
- if io.exists(p .. str) then
- str = p .. str
- break
- else
- p = p .. "../"
- end
- end
- end
- return cleanpath(str)
-end
-
-prefixes.auto = function(str)
- local fullname = prefixes.relative(str)
- if not lfs.isfile(fullname) then
- fullname = prefixes.locate(str)
- end
- return fullname
-end
-
-prefixes.locate = function(str)
- local fullname = findgivenfile(str) or ""
- return cleanpath((fullname ~= "" and fullname) or str)
-end
-
-prefixes.filename = function(str)
- local fullname = findgivenfile(str) or ""
- return cleanpath(file.basename((fullname ~= "" and fullname) or str)) -- no cleanpath needed here
-end
-
-prefixes.pathname = function(str)
- local fullname = findgivenfile(str) or ""
- return cleanpath(file.dirname((fullname ~= "" and fullname) or str))
-end
-
-prefixes.selfautoloc = function(str)
- return cleanpath(file.join(getenv('SELFAUTOLOC'),str))
-end
-
-prefixes.selfautoparent = function(str)
- return cleanpath(file.join(getenv('SELFAUTOPARENT'),str))
-end
-
-prefixes.selfautodir = function(str)
- return cleanpath(file.join(getenv('SELFAUTODIR'),str))
-end
-
-prefixes.home = function(str)
- return cleanpath(file.join(getenv('HOME'),str))
-end
-
-prefixes.env = prefixes.environment
-prefixes.rel = prefixes.relative
-prefixes.loc = prefixes.locate
-prefixes.kpse = prefixes.locate
-prefixes.full = prefixes.locate
-prefixes.file = prefixes.filename
-prefixes.path = prefixes.pathname
-
+if not modules then modules={} end modules ['data-pre']={
+ version=1.001,
+ comment="companion to luat-lib.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+local resolvers=resolvers
+local prefixes=utilities.storage.allocate()
+resolvers.prefixes=prefixes
+local cleanpath,findgivenfile,expansion=resolvers.cleanpath,resolvers.findgivenfile,resolvers.expansion
+local getenv=resolvers.getenv
+local P,S,R,C,Cs,lpegmatch=lpeg.P,lpeg.S,lpeg.R,lpeg.C,lpeg.Cs,lpeg.match
+local joinpath,basename,dirname=file.join,file.basename,file.dirname
+local getmetatable,rawset,type=getmetatable,rawset,type
+prefixes.environment=function(str)
+ return cleanpath(expansion(str))
+end
+prefixes.relative=function(str,n)
+ if io.exists(str) then
+ elseif io.exists("./"..str) then
+ str="./"..str
+ else
+ local p="../"
+ for i=1,n or 2 do
+ if io.exists(p..str) then
+ str=p..str
+ break
+ else
+ p=p.."../"
+ end
+ end
+ end
+ return cleanpath(str)
+end
+prefixes.auto=function(str)
+ local fullname=prefixes.relative(str)
+ if not lfs.isfile(fullname) then
+ fullname=prefixes.locate(str)
+ end
+ return fullname
+end
+prefixes.locate=function(str)
+ local fullname=findgivenfile(str) or ""
+ return cleanpath((fullname~="" and fullname) or str)
+end
+prefixes.filename=function(str)
+ local fullname=findgivenfile(str) or ""
+ return cleanpath(basename((fullname~="" and fullname) or str))
+end
+prefixes.pathname=function(str)
+ local fullname=findgivenfile(str) or ""
+ return cleanpath(dirname((fullname~="" and fullname) or str))
+end
+prefixes.selfautoloc=function(str)
+ return cleanpath(joinpath(getenv('SELFAUTOLOC'),str))
+end
+prefixes.selfautoparent=function(str)
+ return cleanpath(joinpath(getenv('SELFAUTOPARENT'),str))
+end
+prefixes.selfautodir=function(str)
+ return cleanpath(joinpath(getenv('SELFAUTODIR'),str))
+end
+prefixes.home=function(str)
+ return cleanpath(joinpath(getenv('HOME'),str))
+end
+local function toppath()
+ local inputstack=resolvers.inputstack
+ if not inputstack then
+ return "."
+ end
+ local pathname=dirname(inputstack[#inputstack] or "")
+ if pathname=="" then
+ return "."
+ else
+ return pathname
+ end
+end
+resolvers.toppath=toppath
+prefixes.toppath=function(str)
+ return cleanpath(joinpath(toppath(),str))
+end
+prefixes.env=prefixes.environment
+prefixes.rel=prefixes.relative
+prefixes.loc=prefixes.locate
+prefixes.kpse=prefixes.locate
+prefixes.full=prefixes.locate
+prefixes.file=prefixes.filename
+prefixes.path=prefixes.pathname
function resolvers.allprefixes(separator)
- local all = table.sortedkeys(prefixes)
- if separator then
- for i=1,#all do
- all[i] = all[i] .. ":"
- end
+ local all=table.sortedkeys(prefixes)
+ if separator then
+ for i=1,#all do
+ all[i]=all[i]..":"
end
- return all
+ end
+ return all
end
-
local function _resolve_(method,target)
- local action = prefixes[method]
- if action then
- return action(target)
- else
- return method .. ":" .. target
- end
-end
-
-local resolved, abstract = { }, { }
-
+ local action=prefixes[method]
+ if action then
+ return action(target)
+ else
+ return method..":"..target
+ end
+end
+local resolved,abstract={},{}
function resolvers.resetresolve(str)
- resolved, abstract = { }, { }
+ resolved,abstract={},{}
end
-
-local function resolve(str) -- use schemes, this one is then for the commandline only
- if type(str) == "table" then
- local t = { }
- for i=1,#str do
- t[i] = resolve(str[i])
- end
- return t
- else
- local res = resolved[str]
- if not res then
- res = gsub(str,"([a-z][a-z]+):([^ \"\';,]*)",_resolve_) -- home:xx;selfautoparent:xx; etc (comma added)
- resolved[str] = res
- abstract[res] = str
- end
- return res
+local pattern=Cs((C(R("az")^2)*P(":")*C((1-S(" \"\';,"))^1)/_resolve_+P(1))^0)
+local function resolve(str)
+ if type(str)=="table" then
+ local t={}
+ for i=1,#str do
+ t[i]=resolve(str[i])
end
+ return t
+ else
+ local res=resolved[str]
+ if not res then
+ res=lpegmatch(pattern,str)
+ resolved[str]=res
+ abstract[res]=str
+ end
+ return res
+ end
end
-
local function unresolve(str)
- return abstract[str] or str
+ return abstract[str] or str
end
-
-resolvers.resolve = resolve
-resolvers.unresolve = unresolve
-
-if os.uname then
-
- for k, v in next, os.uname() do
- if not prefixes[k] then
- prefixes[k] = function() return v end
- end
+resolvers.resolve=resolve
+resolvers.unresolve=unresolve
+if type(os.uname)=="function" then
+ for k,v in next,os.uname() do
+ if not prefixes[k] then
+ prefixes[k]=function() return v end
end
-
+ end
end
-
-if os.type == "unix" then
-
- local pattern
-
- local function makepattern(t,k,v)
- local colon = P(":")
- local p
- for k, v in table.sortedpairs(prefixes) do
- if p then
- p = P(k) + p
- else
- p = P(k)
- end
- end
- pattern = Cs((p * colon + colon/";" + P(1))^0)
- if t then
- t[k] = v
- end
- end
-
- makepattern()
-
- getmetatable(prefixes).__newindex = makepattern
-
- function resolvers.repath(str)
- return lpegmatch(pattern,str)
- end
-
-else -- already the default:
-
- function resolvers.repath(str)
- return str
- end
-
+if os.type=="unix" then
+ local pattern
+ local function makepattern(t,k,v)
+ if t then
+ rawset(t,k,v)
+ end
+ local colon=P(":")
+ for k,v in table.sortedpairs(prefixes) do
+ if p then
+ p=P(k)+p
+ else
+ p=P(k)
+ end
+ end
+ pattern=Cs((p*colon+colon/";"+P(1))^0)
+ end
+ makepattern()
+ getmetatable(prefixes).__newindex=makepattern
+ function resolvers.repath(str)
+ return lpegmatch(pattern,str)
+ end
+else
+ function resolvers.repath(str)
+ return str
+ end
end
@@ -14237,172 +14214,159 @@ end -- of closure
do -- create closure to overcome 200 locals limit
-if not modules then modules = { } end modules ['data-inp'] = {
- version = 1.001,
- comment = "companion to luat-lib.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
-local allocate = utilities.storage.allocate
-local resolvers = resolvers
-
-local methodhandler = resolvers.methodhandler
-local registermethod = resolvers.registermethod
-
-local finders = allocate { helpers = { }, notfound = function() end }
-local openers = allocate { helpers = { }, notfound = function() end }
-local loaders = allocate { helpers = { }, notfound = function() return false, nil, 0 end }
+package.loaded["data-inp"] = package.loaded["data-inp"] or true
-registermethod("finders", finders, "uri")
-registermethod("openers", openers, "uri")
-registermethod("loaders", loaders, "uri")
+-- original size: 910, stripped down to: 823
-resolvers.finders = finders
-resolvers.openers = openers
-resolvers.loaders = loaders
+if not modules then modules={} end modules ['data-inp']={
+ version=1.001,
+ comment="companion to luat-lib.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+local allocate=utilities.storage.allocate
+local resolvers=resolvers
+local methodhandler=resolvers.methodhandler
+local registermethod=resolvers.registermethod
+local finders=allocate { helpers={},notfound=function() end }
+local openers=allocate { helpers={},notfound=function() end }
+local loaders=allocate { helpers={},notfound=function() return false,nil,0 end }
+registermethod("finders",finders,"uri")
+registermethod("openers",openers,"uri")
+registermethod("loaders",loaders,"uri")
+resolvers.finders=finders
+resolvers.openers=openers
+resolvers.loaders=loaders
end -- of closure
do -- create closure to overcome 200 locals limit
-if not modules then modules = { } end modules ['data-out'] = {
- version = 1.001,
- comment = "companion to luat-lib.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
-local allocate = utilities.storage.allocate
-local resolvers = resolvers
-
-local registermethod = resolvers.registermethod
+package.loaded["data-out"] = package.loaded["data-out"] or true
-local savers = allocate { helpers = { } }
+-- original size: 530, stripped down to: 475
-resolvers.savers = savers
-
-registermethod("savers", savers, "uri")
+if not modules then modules={} end modules ['data-out']={
+ version=1.001,
+ comment="companion to luat-lib.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+local allocate=utilities.storage.allocate
+local resolvers=resolvers
+local registermethod=resolvers.registermethod
+local savers=allocate { helpers={} }
+resolvers.savers=savers
+registermethod("savers",savers,"uri")
end -- of closure
do -- create closure to overcome 200 locals limit
-if not modules then modules = { } end modules ['data-fil'] = {
- version = 1.001,
- comment = "companion to luat-lib.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
+package.loaded["data-fil"] = package.loaded["data-fil"] or true
-local trace_locating = false trackers.register("resolvers.locating", function(v) trace_locating = v end)
-
-local report_files = logs.reporter("resolvers","files")
-
-local resolvers = resolvers
-
-local finders, openers, loaders, savers = resolvers.finders, resolvers.openers, resolvers.loaders, resolvers.savers
-local locators, hashers, generators, concatinators = resolvers.locators, resolvers.hashers, resolvers.generators, resolvers.concatinators
-
-local checkgarbage = utilities.garbagecollector and utilities.garbagecollector.check
+-- original size: 3801, stripped down to: 3231
+if not modules then modules={} end modules ['data-fil']={
+ version=1.001,
+ comment="companion to luat-lib.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+local trace_locating=false trackers.register("resolvers.locating",function(v) trace_locating=v end)
+local report_files=logs.reporter("resolvers","files")
+local resolvers=resolvers
+local finders,openers,loaders,savers=resolvers.finders,resolvers.openers,resolvers.loaders,resolvers.savers
+local locators,hashers,generators,concatinators=resolvers.locators,resolvers.hashers,resolvers.generators,resolvers.concatinators
+local checkgarbage=utilities.garbagecollector and utilities.garbagecollector.check
function locators.file(specification)
- local name = specification.filename
- local realname = resolvers.resolve(name) -- no shortcut
- if realname and realname ~= '' and lfs.isdir(realname) then
- if trace_locating then
- report_files("file locator '%s' found as '%s'",name,realname)
- end
- resolvers.appendhash('file',name,true) -- cache
- elseif trace_locating then
- report_files("file locator '%s' not found",name)
+ local name=specification.filename
+ local realname=resolvers.resolve(name)
+ if realname and realname~='' and lfs.isdir(realname) then
+ if trace_locating then
+ report_files("file locator %a found as %a",name,realname)
end
+ resolvers.appendhash('file',name,true)
+ elseif trace_locating then
+ report_files("file locator %a not found",name)
+ end
end
-
function hashers.file(specification)
- local name = specification.filename
- local content = caches.loadcontent(name,'files')
- resolvers.registerfilehash(name,content,content==nil)
+ local name=specification.filename
+ local content=caches.loadcontent(name,'files')
+ resolvers.registerfilehash(name,content,content==nil)
end
-
function generators.file(specification)
- local path = specification.filename
- local content = resolvers.scanfiles(path,false,true) -- scan once
- resolvers.registerfilehash(path,content,true)
+ local path=specification.filename
+ local content=resolvers.scanfiles(path,false,true)
+ resolvers.registerfilehash(path,content,true)
end
-
-concatinators.file = file.join
-
+concatinators.file=file.join
function finders.file(specification,filetype)
- local filename = specification.filename
- local foundname = resolvers.findfile(filename,filetype)
- if foundname and foundname ~= "" then
- if trace_locating then
- report_files("file finder: '%s' found",filename)
- end
- return foundname
- else
- if trace_locating then
- report_files("file finder: %s' not found",filename)
- end
- return finders.notfound()
+ local filename=specification.filename
+ local foundname=resolvers.findfile(filename,filetype)
+ if foundname and foundname~="" then
+ if trace_locating then
+ report_files("file finder: %a found",filename)
+ end
+ return foundname
+ else
+ if trace_locating then
+ report_files("file finder: %a not found",filename)
end
+ return finders.notfound()
+ end
end
-
--- The default textopener will be overloaded later on.
-
function openers.helpers.textopener(tag,filename,f)
- return {
- reader = function() return f:read () end,
- close = function() logs.show_close(filename) return f:close() end,
- }
+ return {
+ reader=function() return f:read () end,
+ close=function() logs.show_close(filename) return f:close() end,
+ }
end
-
function openers.file(specification,filetype)
- local filename = specification.filename
- if filename and filename ~= "" then
- local f = io.open(filename,"r")
- if f then
- if trace_locating then
- report_files("file opener, '%s' opened",filename)
- end
- return openers.helpers.textopener("file",filename,f)
- end
- end
- if trace_locating then
- report_files("file opener, '%s' not found",filename)
+ local filename=specification.filename
+ if filename and filename~="" then
+ local f=io.open(filename,"r")
+ if f then
+ if trace_locating then
+ report_files("file opener: %a opened",filename)
+ end
+ return openers.helpers.textopener("file",filename,f)
end
- return openers.notfound()
+ end
+ if trace_locating then
+ report_files("file opener: %a not found",filename)
+ end
+ return openers.notfound()
end
-
function loaders.file(specification,filetype)
- local filename = specification.filename
- if filename and filename ~= "" then
- local f = io.open(filename,"rb")
- if f then
- logs.show_load(filename)
- if trace_locating then
- report_files("file loader, '%s' loaded",filename)
- end
- local s = f:read("*a")
- if checkgarbage then
- checkgarbage(#s)
- end
- f:close()
- if s then
- return true, s, #s
- end
- end
- end
- if trace_locating then
- report_files("file loader, '%s' not found",filename)
- end
- return loaders.notfound()
+ local filename=specification.filename
+ if filename and filename~="" then
+ local f=io.open(filename,"rb")
+ if f then
+ logs.show_load(filename)
+ if trace_locating then
+ report_files("file loader: %a loaded",filename)
+ end
+ local s=f:read("*a")
+ if checkgarbage then
+ checkgarbage(#s)
+ end
+ f:close()
+ if s then
+ return true,s,#s
+ end
+ end
+ end
+ if trace_locating then
+ report_files("file loader: %a not found",filename)
+ end
+ return loaders.notfound()
end
@@ -14410,140 +14374,118 @@ end -- of closure
do -- create closure to overcome 200 locals limit
-if not modules then modules = { } end modules ['data-con'] = {
- version = 1.100,
- comment = "companion to luat-lib.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
-local format, lower, gsub = string.format, string.lower, string.gsub
-
-local trace_cache = false trackers.register("resolvers.cache", function(v) trace_cache = v end)
-local trace_containers = false trackers.register("resolvers.containers", function(v) trace_containers = v end)
-local trace_storage = false trackers.register("resolvers.storage", function(v) trace_storage = v end)
-
---[[ldx--
-<p>Once we found ourselves defining similar cache constructs
-several times, containers were introduced. Containers are used
-to collect tables in memory and reuse them when possible based
-on (unique) hashes (to be provided by the calling function).</p>
-
-<p>Caching to disk is disabled by default. Version numbers are
-stored in the saved table which makes it possible to change the
-table structures without bothering about the disk cache.</p>
-
-<p>Examples of usage can be found in the font related code.</p>
---ldx]]--
+package.loaded["data-con"] = package.loaded["data-con"] or true
-containers = containers or { }
-local containers = containers
-containers.usecache = true
+-- original size: 4940, stripped down to: 3580
-local report_containers = logs.reporter("resolvers","containers")
-
-local function report(container,tag,name)
- if trace_cache or trace_containers then
- report_containers("container: %s, tag: %s, name: %s",container.subcategory,tag,name or 'invalid')
- end
-end
-
-local allocated = { }
-
-local mt = {
- __index = function(t,k)
- if k == "writable" then
- local writable = caches.getwritablepath(t.category,t.subcategory) or { "." }
- t.writable = writable
- return writable
- elseif k == "readables" then
- local readables = caches.getreadablepaths(t.category,t.subcategory) or { "." }
- t.readables = readables
- return readables
- end
- end,
- __storage__ = true
+if not modules then modules={} end modules ['data-con']={
+ version=1.100,
+ comment="companion to luat-lib.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
}
-
-function containers.define(category, subcategory, version, enabled)
- if category and subcategory then
- local c = allocated[category]
- if not c then
- c = { }
- allocated[category] = c
- end
- local s = c[subcategory]
- if not s then
- s = {
- category = category,
- subcategory = subcategory,
- storage = { },
- enabled = enabled,
- version = version or math.pi, -- after all, this is TeX
- trace = false,
- -- writable = caches.getwritablepath and caches.getwritablepath (category,subcategory) or { "." },
- -- readables = caches.getreadablepaths and caches.getreadablepaths(category,subcategory) or { "." },
- }
- setmetatable(s,mt)
- c[subcategory] = s
- end
- return s
+local format,lower,gsub=string.format,string.lower,string.gsub
+local trace_cache=false trackers.register("resolvers.cache",function(v) trace_cache=v end)
+local trace_containers=false trackers.register("resolvers.containers",function(v) trace_containers=v end)
+local trace_storage=false trackers.register("resolvers.storage",function(v) trace_storage=v end)
+containers=containers or {}
+local containers=containers
+containers.usecache=true
+local report_containers=logs.reporter("resolvers","containers")
+local allocated={}
+local mt={
+ __index=function(t,k)
+ if k=="writable" then
+ local writable=caches.getwritablepath(t.category,t.subcategory) or { "." }
+ t.writable=writable
+ return writable
+ elseif k=="readables" then
+ local readables=caches.getreadablepaths(t.category,t.subcategory) or { "." }
+ t.readables=readables
+ return readables
+ end
+ end,
+ __storage__=true
+}
+function containers.define(category,subcategory,version,enabled)
+ if category and subcategory then
+ local c=allocated[category]
+ if not c then
+ c={}
+ allocated[category]=c
+ end
+ local s=c[subcategory]
+ if not s then
+ s={
+ category=category,
+ subcategory=subcategory,
+ storage={},
+ enabled=enabled,
+ version=version or math.pi,
+ trace=false,
+ }
+ setmetatable(s,mt)
+ c[subcategory]=s
end
+ return s
+ end
end
-
-function containers.is_usable(container, name)
- return container.enabled and caches and caches.is_writable(container.writable, name)
+function containers.is_usable(container,name)
+ return container.enabled and caches and caches.is_writable(container.writable,name)
end
-
-function containers.is_valid(container, name)
- if name and name ~= "" then
- local storage = container.storage[name]
- return storage and storage.cache_version == container.version
- else
- return false
- end
+function containers.is_valid(container,name)
+ if name and name~="" then
+ local storage=container.storage[name]
+ return storage and storage.cache_version==container.version
+ else
+ return false
+ end
end
-
function containers.read(container,name)
- local storage = container.storage
- local stored = storage[name]
- if not stored and container.enabled and caches and containers.usecache then
- stored = caches.loaddata(container.readables,name)
- if stored and stored.cache_version == container.version then
- report(container,"loaded",name)
- else
- stored = nil
- end
- storage[name] = stored
- elseif stored then
- report(container,"reusing",name)
+ local storage=container.storage
+ local stored=storage[name]
+ if not stored and container.enabled and caches and containers.usecache then
+ stored=caches.loaddata(container.readables,name)
+ if stored and stored.cache_version==container.version then
+ if trace_cache or trace_containers then
+ report_containers("action %a, category %a, name %a","load",container.subcategory,name)
+ end
+ else
+ stored=nil
end
- return stored
-end
-
-function containers.write(container, name, data)
- if data then
- data.cache_version = container.version
- if container.enabled and caches then
- local unique, shared = data.unique, data.shared
- data.unique, data.shared = nil, nil
- caches.savedata(container.writable, name, data)
- report(container,"saved",name)
- data.unique, data.shared = unique, shared
- end
- report(container,"stored",name)
- container.storage[name] = data
+ storage[name]=stored
+ elseif stored then
+ if trace_cache or trace_containers then
+ report_containers("action %a, category %a, name %a","reuse",container.subcategory,name)
+ end
+ end
+ return stored
+end
+function containers.write(container,name,data)
+ if data then
+ data.cache_version=container.version
+ if container.enabled and caches then
+ local unique,shared=data.unique,data.shared
+ data.unique,data.shared=nil,nil
+ caches.savedata(container.writable,name,data)
+ if trace_cache or trace_containers then
+ report_containers("action %a, category %a, name %a","save",container.subcategory,name)
+ end
+ data.unique,data.shared=unique,shared
end
- return data
+ if trace_cache or trace_containers then
+ report_containers("action %a, category %a, name %a","store",container.subcategory,name)
+ end
+ container.storage[name]=data
+ end
+ return data
end
-
function containers.content(container,name)
- return container.storage[name]
+ return container.storage[name]
end
-
function containers.cleanname(name)
- return (gsub(lower(name),"[^%w%d]+","-"))
+ return (gsub(lower(name),"[^%w%d]+","-"))
end
@@ -14551,102 +14493,90 @@ end -- of closure
do -- create closure to overcome 200 locals limit
-if not modules then modules = { } end modules ['data-use'] = {
- version = 1.001,
- comment = "companion to luat-lib.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
-local format, lower, gsub, find = string.format, string.lower, string.gsub, string.find
-
-local trace_locating = false trackers.register("resolvers.locating", function(v) trace_locating = v end)
-
-local report_mounts = logs.reporter("resolvers","mounts")
-
-local resolvers = resolvers
+package.loaded["data-use"] = package.loaded["data-use"] or true
--- we will make a better format, maybe something xml or just text or lua
-
-resolvers.automounted = resolvers.automounted or { }
+-- original size: 3913, stripped down to: 2998
+if not modules then modules={} end modules ['data-use']={
+ version=1.001,
+ comment="companion to luat-lib.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+local format,lower,gsub,find=string.format,string.lower,string.gsub,string.find
+local trace_locating=false trackers.register("resolvers.locating",function(v) trace_locating=v end)
+local report_mounts=logs.reporter("resolvers","mounts")
+local resolvers=resolvers
+resolvers.automounted=resolvers.automounted or {}
function resolvers.automount(usecache)
- local mountpaths = resolvers.cleanpathlist(resolvers.expansion('TEXMFMOUNT'))
- if (not mountpaths or #mountpaths == 0) and usecache then
- mountpaths = caches.getreadablepaths("mount")
- end
- if mountpaths and #mountpaths > 0 then
- statistics.starttiming(resolvers.instance)
- for k=1,#mountpaths do
- local root = mountpaths[k]
- local f = io.open(root.."/url.tmi")
- if f then
- for line in f:lines() do
- if line then
- if find(line,"^[%%#%-]") then -- or %W
- -- skip
- elseif find(line,"^zip://") then
- if trace_locating then
- report_mounts("mounting %s",line)
- end
- table.insert(resolvers.automounted,line)
- resolvers.usezipfile(line)
- end
- end
- end
- f:close()
- end
+ local mountpaths=resolvers.cleanpathlist(resolvers.expansion('TEXMFMOUNT'))
+ if (not mountpaths or #mountpaths==0) and usecache then
+ mountpaths=caches.getreadablepaths("mount")
+ end
+ if mountpaths and #mountpaths>0 then
+ statistics.starttiming(resolvers.instance)
+ for k=1,#mountpaths do
+ local root=mountpaths[k]
+ local f=io.open(root.."/url.tmi")
+ if f then
+ for line in f:lines() do
+ if line then
+ if find(line,"^[%%#%-]") then
+ elseif find(line,"^zip://") then
+ if trace_locating then
+ report_mounts("mounting %a",line)
+ end
+ table.insert(resolvers.automounted,line)
+ resolvers.usezipfile(line)
+ end
+ end
end
- statistics.stoptiming(resolvers.instance)
- end
-end
-
--- status info
-
-statistics.register("used config file", function() return caches.configfiles() end)
-statistics.register("used cache path", function() return caches.usedpaths() end)
-
--- experiment (code will move)
-
-function statistics.savefmtstatus(texname,formatbanner,sourcefile) -- texname == formatname
- local enginebanner = status.list().banner
- if formatbanner and enginebanner and sourcefile then
- local luvname = file.replacesuffix(texname,"luv")
- local luvdata = {
- enginebanner = enginebanner,
- formatbanner = formatbanner,
- sourcehash = md5.hex(io.loaddata(resolvers.findfile(sourcefile)) or "unknown"),
- sourcefile = sourcefile,
- }
- io.savedata(luvname,table.serialize(luvdata,true))
- end
+ f:close()
+ end
+ end
+ statistics.stoptiming(resolvers.instance)
+ end
+end
+statistics.register("used config file",function() return caches.configfiles() end)
+statistics.register("used cache path",function() return caches.usedpaths() end)
+function statistics.savefmtstatus(texname,formatbanner,sourcefile)
+ local enginebanner=status.list().banner
+ if formatbanner and enginebanner and sourcefile then
+ local luvname=file.replacesuffix(texname,"luv")
+ local luvdata={
+ enginebanner=enginebanner,
+ formatbanner=formatbanner,
+ sourcehash=md5.hex(io.loaddata(resolvers.findfile(sourcefile)) or "unknown"),
+ sourcefile=sourcefile,
+ }
+ io.savedata(luvname,table.serialize(luvdata,true))
+ end
end
-
function statistics.checkfmtstatus(texname)
- local enginebanner = status.list().banner
- if enginebanner and texname then
- local luvname = file.replacesuffix(texname,"luv")
- if lfs.isfile(luvname) then
- local luv = dofile(luvname)
- if luv and luv.sourcefile then
- local sourcehash = md5.hex(io.loaddata(resolvers.findfile(luv.sourcefile)) or "unknown")
- local luvbanner = luv.enginebanner or "?"
- if luvbanner ~= enginebanner then
- return format("engine mismatch (luv: %s <> bin: %s)",luvbanner,enginebanner)
- end
- local luvhash = luv.sourcehash or "?"
- if luvhash ~= sourcehash then
- return format("source mismatch (luv: %s <> bin: %s)",luvhash,sourcehash)
- end
- else
- return "invalid status file"
- end
- else
- return "missing status file"
- end
+ local enginebanner=status.list().banner
+ if enginebanner and texname then
+ local luvname=file.replacesuffix(texname,"luv")
+ if lfs.isfile(luvname) then
+ local luv=dofile(luvname)
+ if luv and luv.sourcefile then
+ local sourcehash=md5.hex(io.loaddata(resolvers.findfile(luv.sourcefile)) or "unknown")
+ local luvbanner=luv.enginebanner or "?"
+ if luvbanner~=enginebanner then
+ return format("engine mismatch (luv: %s <> bin: %s)",luvbanner,enginebanner)
+ end
+ local luvhash=luv.sourcehash or "?"
+ if luvhash~=sourcehash then
+ return format("source mismatch (luv: %s <> bin: %s)",luvhash,sourcehash)
+ end
+ else
+ return "invalid status file"
+ end
+ else
+ return "missing status file"
end
- return true
+ end
+ return true
end
@@ -14654,263 +14584,235 @@ end -- of closure
do -- create closure to overcome 200 locals limit
-if not modules then modules = { } end modules ['data-zip'] = {
- version = 1.001,
- comment = "companion to luat-lib.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
--- partly redone .. needs testing
+package.loaded["data-zip"] = package.loaded["data-zip"] or true
-local format, find, match = string.format, string.find, string.match
+-- original size: 8489, stripped down to: 6757
-local trace_locating = false trackers.register("resolvers.locating", function(v) trace_locating = v end)
-
-local report_zip = logs.reporter("resolvers","zip")
-
--- zip:///oeps.zip?name=bla/bla.tex
--- zip:///oeps.zip?tree=tex/texmf-local
--- zip:///texmf.zip?tree=/tex/texmf
--- zip:///texmf.zip?tree=/tex/texmf-local
--- zip:///texmf-mine.zip?tree=/tex/texmf-projects
-
-local resolvers = resolvers
-
-zip = zip or { }
-local zip = zip
-
-zip.archives = zip.archives or { }
-local archives = zip.archives
-
-zip.registeredfiles = zip.registeredfiles or { }
-local registeredfiles = zip.registeredfiles
-
-local limited = false
-
-directives.register("system.inputmode", function(v)
- if not limited then
- local i_limiter = io.i_limiter(v)
- if i_limiter then
- zip.open = i_limiter.protect(zip.open)
- limited = true
- end
- end
+if not modules then modules={} end modules ['data-zip']={
+ version=1.001,
+ comment="companion to luat-lib.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+local format,find,match=string.format,string.find,string.match
+local trace_locating=false trackers.register("resolvers.locating",function(v) trace_locating=v end)
+local report_zip=logs.reporter("resolvers","zip")
+local resolvers=resolvers
+zip=zip or {}
+local zip=zip
+zip.archives=zip.archives or {}
+local archives=zip.archives
+zip.registeredfiles=zip.registeredfiles or {}
+local registeredfiles=zip.registeredfiles
+local limited=false
+directives.register("system.inputmode",function(v)
+ if not limited then
+ local i_limiter=io.i_limiter(v)
+ if i_limiter then
+ zip.open=i_limiter.protect(zip.open)
+ limited=true
+ end
+ end
end)
-
-local function validzip(str) -- todo: use url splitter
- if not find(str,"^zip://") then
- return "zip:///" .. str
- else
- return str
- end
+local function validzip(str)
+ if not find(str,"^zip://") then
+ return "zip:///"..str
+ else
+ return str
+ end
end
-
function zip.openarchive(name)
- if not name or name == "" then
- return nil
- else
- local arch = archives[name]
- if not arch then
- local full = resolvers.findfile(name) or ""
- arch = (full ~= "" and zip.open(full)) or false
- archives[name] = arch
- end
- return arch
+ if not name or name=="" then
+ return nil
+ else
+ local arch=archives[name]
+ if not arch then
+ local full=resolvers.findfile(name) or ""
+ arch=(full~="" and zip.open(full)) or false
+ archives[name]=arch
end
+ return arch
+ end
end
-
function zip.closearchive(name)
- if not name or (name == "" and archives[name]) then
- zip.close(archives[name])
- archives[name] = nil
- end
+ if not name or (name=="" and archives[name]) then
+ zip.close(archives[name])
+ archives[name]=nil
+ end
end
-
function resolvers.locators.zip(specification)
- local archive = specification.filename
- local zipfile = archive and archive ~= "" and zip.openarchive(archive) -- tricky, could be in to be initialized tree
- if trace_locating then
- if zipfile then
- report_zip("locator, archive '%s' found",archive)
- else
- report_zip("locator, archive '%s' not found",archive)
- end
+ local archive=specification.filename
+ local zipfile=archive and archive~="" and zip.openarchive(archive)
+ if trace_locating then
+ if zipfile then
+ report_zip("locator: archive %a found",archive)
+ else
+ report_zip("locator: archive %a not found",archive)
end
+ end
end
-
function resolvers.hashers.zip(specification)
- local archive = specification.filename
- if trace_locating then
- report_zip("loading file '%s'",archive)
- end
- resolvers.usezipfile(specification.original)
+ local archive=specification.filename
+ if trace_locating then
+ report_zip("loading file %a",archive)
+ end
+ resolvers.usezipfile(specification.original)
+end
+function resolvers.concatinators.zip(zipfile,path,name)
+ if not path or path=="" then
+ return format('%s?name=%s',zipfile,name)
+ else
+ return format('%s?name=%s/%s',zipfile,path,name)
+ end
end
-
-function resolvers.concatinators.zip(zipfile,path,name) -- ok ?
- if not path or path == "" then
- return format('%s?name=%s',zipfile,name)
- else
- return format('%s?name=%s/%s',zipfile,path,name)
- end
-end
-
function resolvers.finders.zip(specification)
- local original = specification.original
- local archive = specification.filename
- if archive then
- local query = url.query(specification.query)
- local queryname = query.name
- if queryname then
- local zfile = zip.openarchive(archive)
- if zfile then
- if trace_locating then
- report_zip("finder, archive '%s' found",archive)
- end
- local dfile = zfile:open(queryname)
- if dfile then
- dfile = zfile:close()
- if trace_locating then
- report_zip("finder, file '%s' found",queryname)
- end
- return specification.original
- elseif trace_locating then
- report_zip("finder, file '%s' not found",queryname)
- end
- elseif trace_locating then
- report_zip("finder, unknown archive '%s'",archive)
- end
+ local original=specification.original
+ local archive=specification.filename
+ if archive then
+ local query=url.query(specification.query)
+ local queryname=query.name
+ if queryname then
+ local zfile=zip.openarchive(archive)
+ if zfile then
+ if trace_locating then
+ report_zip("finder: archive %a found",archive)
+ end
+ local dfile=zfile:open(queryname)
+ if dfile then
+ dfile=zfile:close()
+ if trace_locating then
+ report_zip("finder: file %a found",queryname)
+ end
+ return specification.original
+ elseif trace_locating then
+ report_zip("finder: file %a not found",queryname)
end
+ elseif trace_locating then
+ report_zip("finder: unknown archive %a",archive)
+ end
end
- if trace_locating then
- report_zip("finder, '%s' not found",original)
- end
- return resolvers.finders.notfound()
+ end
+ if trace_locating then
+ report_zip("finder: %a not found",original)
+ end
+ return resolvers.finders.notfound()
end
-
function resolvers.openers.zip(specification)
- local original = specification.original
- local archive = specification.filename
- if archive then
- local query = url.query(specification.query)
- local queryname = query.name
- if queryname then
- local zfile = zip.openarchive(archive)
- if zfile then
- if trace_locating then
- report_zip("opener, archive '%s' opened",archive)
- end
- local dfile = zfile:open(queryname)
- if dfile then
- if trace_locating then
- report_zip("opener, file '%s' found",queryname)
- end
- return resolvers.openers.helpers.textopener('zip',original,dfile)
- elseif trace_locating then
- report_zip("opener, file '%s' not found",queryname)
- end
- elseif trace_locating then
- report_zip("opener, unknown archive '%s'",archive)
- end
+ local original=specification.original
+ local archive=specification.filename
+ if archive then
+ local query=url.query(specification.query)
+ local queryname=query.name
+ if queryname then
+ local zfile=zip.openarchive(archive)
+ if zfile then
+ if trace_locating then
+ report_zip("opener; archive %a opened",archive)
+ end
+ local dfile=zfile:open(queryname)
+ if dfile then
+ if trace_locating then
+ report_zip("opener: file %a found",queryname)
+ end
+ return resolvers.openers.helpers.textopener('zip',original,dfile)
+ elseif trace_locating then
+ report_zip("opener: file %a not found",queryname)
end
+ elseif trace_locating then
+ report_zip("opener: unknown archive %a",archive)
+ end
end
- if trace_locating then
- report_zip("opener, '%s' not found",original)
- end
- return resolvers.openers.notfound()
+ end
+ if trace_locating then
+ report_zip("opener: %a not found",original)
+ end
+ return resolvers.openers.notfound()
end
-
function resolvers.loaders.zip(specification)
- local original = specification.original
- local archive = specification.filename
- if archive then
- local query = url.query(specification.query)
- local queryname = query.name
- if queryname then
- local zfile = zip.openarchive(archive)
- if zfile then
- if trace_locating then
- report_zip("loader, archive '%s' opened",archive)
- end
- local dfile = zfile:open(queryname)
- if dfile then
- logs.show_load(original)
- if trace_locating then
- report_zip("loader, file '%s' loaded",original)
- end
- local s = dfile:read("*all")
- dfile:close()
- return true, s, #s
- elseif trace_locating then
- report_zip("loader, file '%s' not found",queryname)
- end
- elseif trace_locating then
- report_zip("loader, unknown archive '%s'",archive)
- end
+ local original=specification.original
+ local archive=specification.filename
+ if archive then
+ local query=url.query(specification.query)
+ local queryname=query.name
+ if queryname then
+ local zfile=zip.openarchive(archive)
+ if zfile then
+ if trace_locating then
+ report_zip("loader: archive %a opened",archive)
+ end
+ local dfile=zfile:open(queryname)
+ if dfile then
+ logs.show_load(original)
+ if trace_locating then
+ report_zip("loader; file %a loaded",original)
+ end
+ local s=dfile:read("*all")
+ dfile:close()
+ return true,s,#s
+ elseif trace_locating then
+ report_zip("loader: file %a not found",queryname)
end
+ elseif trace_locating then
+ report_zip("loader; unknown archive %a",archive)
+ end
end
- if trace_locating then
- report_zip("loader, '%s' not found",original)
- end
- return resolvers.openers.notfound()
+ end
+ if trace_locating then
+ report_zip("loader: %a not found",original)
+ end
+ return resolvers.openers.notfound()
end
-
--- zip:///somefile.zip
--- zip:///somefile.zip?tree=texmf-local -> mount
-
function resolvers.usezipfile(archive)
- local specification = resolvers.splitmethod(archive) -- to be sure
- local archive = specification.filename
- if archive and not registeredfiles[archive] then
- local z = zip.openarchive(archive)
- if z then
- local instance = resolvers.instance
- local tree = url.query(specification.query).tree or ""
- if trace_locating then
- report_zip("registering, registering archive '%s'",archive)
- end
- statistics.starttiming(instance)
- resolvers.prependhash('zip',archive)
- resolvers.extendtexmfvariable(archive) -- resets hashes too
- registeredfiles[archive] = z
- instance.files[archive] = resolvers.registerzipfile(z,tree)
- statistics.stoptiming(instance)
- elseif trace_locating then
- report_zip("registering, unknown archive '%s'",archive)
- end
+ local specification=resolvers.splitmethod(archive)
+ local archive=specification.filename
+ if archive and not registeredfiles[archive] then
+ local z=zip.openarchive(archive)
+ if z then
+ local instance=resolvers.instance
+ local tree=url.query(specification.query).tree or ""
+ if trace_locating then
+ report_zip("registering: archive %a",archive)
+ end
+ statistics.starttiming(instance)
+ resolvers.prependhash('zip',archive)
+ resolvers.extendtexmfvariable(archive)
+ registeredfiles[archive]=z
+ instance.files[archive]=resolvers.registerzipfile(z,tree)
+ statistics.stoptiming(instance)
elseif trace_locating then
- report_zip("registering, '%s' not found",archive)
+ report_zip("registering: unknown archive %a",archive)
end
+ elseif trace_locating then
+ report_zip("registering: archive %a not found",archive)
+ end
end
-
function resolvers.registerzipfile(z,tree)
- local files, filter = { }, ""
- if tree == "" then
- filter = "^(.+)/(.-)$"
+ local files,filter={},""
+ if tree=="" then
+ filter="^(.+)/(.-)$"
+ else
+ filter=format("^%s/(.+)/(.-)$",tree)
+ end
+ if trace_locating then
+ report_zip("registering: using filter %a",filter)
+ end
+ local register,n=resolvers.registerfile,0
+ for i in z:files() do
+ local path,name=match(i.filename,filter)
+ if path then
+ if name and name~='' then
+ register(files,name,path)
+ n=n+1
+ else
+ end
else
- filter = format("^%s/(.+)/(.-)$",tree)
+ register(files,i.filename,'')
+ n=n+1
end
- if trace_locating then
- report_zip("registering, using filter '%s'",filter)
- end
- local register, n = resolvers.registerfile, 0
- for i in z:files() do
- local path, name = match(i.filename,filter)
- if path then
- if name and name ~= '' then
- register(files, name, path)
- n = n + 1
- else
- -- directory
- end
- else
- register(files, i.filename, '')
- n = n + 1
- end
- end
- report_zip("registering, %s files registered",n)
- return files
+ end
+ report_zip("registering: %s files registered",n)
+ return files
end
@@ -14918,393 +14820,447 @@ end -- of closure
do -- create closure to overcome 200 locals limit
-if not modules then modules = { } end modules ['data-tre'] = {
- version = 1.001,
- comment = "companion to luat-lib.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
--- \input tree://oeps1/**/oeps.tex
-
-local find, gsub, format = string.find, string.gsub, string.format
+package.loaded["data-tre"] = package.loaded["data-tre"] or true
-local trace_locating = false trackers.register("resolvers.locating", function(v) trace_locating = v end)
-
-local report_trees = logs.reporter("resolvers","trees")
-
-local resolvers = resolvers
-
-local done, found, notfound = { }, { }, resolvers.finders.notfound
+-- original size: 2508, stripped down to: 2074
+if not modules then modules={} end modules ['data-tre']={
+ version=1.001,
+ comment="companion to luat-lib.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+local find,gsub,format=string.find,string.gsub,string.format
+local trace_locating=false trackers.register("resolvers.locating",function(v) trace_locating=v end)
+local report_trees=logs.reporter("resolvers","trees")
+local resolvers=resolvers
+local done,found,notfound={},{},resolvers.finders.notfound
function resolvers.finders.tree(specification)
- local spec = specification.filename
- local fnd = found[spec]
- if fnd == nil then
- if spec ~= "" then
- local path, name = file.dirname(spec), file.basename(spec)
- if path == "" then path = "." end
- local hash = done[path]
- if not hash then
- local pattern = path .. "/*" -- we will use the proper splitter
- hash = dir.glob(pattern)
- done[path] = hash
- end
- local pattern = "/" .. gsub(name,"([%.%-%+])", "%%%1") .. "$"
- for k=1,#hash do
- local v = hash[k]
- if find(v,pattern) then
- found[spec] = v
- return v
- end
- end
- end
- fnd = notfound() -- false
- found[spec] = fnd
- end
- return fnd
+ local spec=specification.filename
+ local fnd=found[spec]
+ if fnd==nil then
+ if spec~="" then
+ local path,name=file.dirname(spec),file.basename(spec)
+ if path=="" then path="." end
+ local hash=done[path]
+ if not hash then
+ local pattern=path.."/*"
+ hash=dir.glob(pattern)
+ done[path]=hash
+ end
+ local pattern="/"..gsub(name,"([%.%-%+])","%%%1").."$"
+ for k=1,#hash do
+ local v=hash[k]
+ if find(v,pattern) then
+ found[spec]=v
+ return v
+ end
+ end
+ end
+ fnd=notfound()
+ found[spec]=fnd
+ end
+ return fnd
end
-
function resolvers.locators.tree(specification)
- local name = specification.filename
- local realname = resolvers.resolve(name) -- no shortcut
- if realname and realname ~= '' and lfs.isdir(realname) then
- if trace_locating then
- report_trees("locator '%s' found",realname)
- end
- resolvers.appendhash('tree',name,false) -- don't cache
- elseif trace_locating then
- report_trees("locator '%s' not found",name)
+ local name=specification.filename
+ local realname=resolvers.resolve(name)
+ if realname and realname~='' and lfs.isdir(realname) then
+ if trace_locating then
+ report_trees("locator %a found",realname)
end
+ resolvers.appendhash('tree',name,false)
+ elseif trace_locating then
+ report_trees("locator %a not found",name)
+ end
end
-
function resolvers.hashers.tree(specification)
- local name = specification.filename
- if trace_locating then
- report_trees("analysing '%s'",name)
- end
- resolvers.methodhandler("hashers",name)
-
- resolvers.generators.file(specification)
+ local name=specification.filename
+ if trace_locating then
+ report_trees("analysing %a",name)
+ end
+ resolvers.methodhandler("hashers",name)
+ resolvers.generators.file(specification)
end
-
-resolvers.concatinators.tree = resolvers.concatinators.file
-resolvers.generators.tree = resolvers.generators.file
-resolvers.openers.tree = resolvers.openers.file
-resolvers.loaders.tree = resolvers.loaders.file
+resolvers.concatinators.tree=resolvers.concatinators.file
+resolvers.generators.tree=resolvers.generators.file
+resolvers.openers.tree=resolvers.openers.file
+resolvers.loaders.tree=resolvers.loaders.file
end -- of closure
do -- create closure to overcome 200 locals limit
-if not modules then modules = { } end modules ['data-crl'] = {
- version = 1.001,
- comment = "companion to luat-lib.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
--- this one is replaced by data-sch.lua --
-
-local gsub = string.gsub
-
-local resolvers = resolvers
+package.loaded["data-sch"] = package.loaded["data-sch"] or true
-local finders, openers, loaders = resolvers.finders, resolvers.openers, resolvers.loaders
-
-resolvers.curl = resolvers.curl or { }
-local curl = resolvers.curl
-
-local cached = { }
-
-local function runcurl(specification)
- local original = specification.original
- -- local scheme = specification.scheme
- local cleanname = gsub(original,"[^%a%d%.]+","-")
- local cachename = caches.setfirstwritablefile(cleanname,"curl")
- if not cached[original] then
- if not io.exists(cachename) then
- cached[original] = cachename
- local command = "curl --silent --create-dirs --output " .. cachename .. " " .. original
- os.spawn(command)
- end
- if io.exists(cachename) then
- cached[original] = cachename
- else
- cached[original] = ""
- end
- end
- return cached[original]
-end
-
--- old code: we could be cleaner using specification (see schemes)
-
-local function finder(specification,filetype)
- return resolvers.methodhandler("finders",runcurl(specification),filetype)
-end
-
-local opener = openers.file
-local loader = loaders.file
-
-local function install(scheme)
- finders[scheme] = finder
- openers[scheme] = opener
- loaders[scheme] = loader
-end
-
-resolvers.curl.install = install
-
-install('http')
-install('https')
-install('ftp')
-
-
-end -- of closure
+-- original size: 6202, stripped down to: 5149
-do -- create closure to overcome 200 locals limit
-
-if not modules then modules = { } end modules ['data-lua'] = {
- version = 1.001,
- comment = "companion to luat-lib.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
+if not modules then modules={} end modules ['data-sch']={
+ version=1.001,
+ comment="companion to luat-lib.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
}
-
--- some loading stuff ... we might move this one to slot 2 depending
--- on the developments (the loaders must not trigger kpse); we could
--- of course use a more extensive lib path spec
-
-local trace_locating = false trackers.register("resolvers.locating", function(v) trace_locating = v end)
-
-local report_libraries = logs.reporter("resolvers","libraries")
-
-local gsub, insert = string.gsub, table.insert
-local unpack = unpack or table.unpack
-
-local resolvers, package = resolvers, package
-
-local libformats = { 'luatexlibs', 'tex', 'texmfscripts', 'othertextfiles' } -- 'luainputs'
-local clibformats = { 'lib' }
-
-local _path_, libpaths, _cpath_, clibpaths
-
-function package.libpaths()
- if not _path_ or package.path ~= _path_ then
- _path_ = package.path
- libpaths = file.splitpath(_path_,";")
+local load=load
+local gsub,concat,format=string.gsub,table.concat,string.format
+local finders,openers,loaders=resolvers.finders,resolvers.openers,resolvers.loaders
+local trace_schemes=false trackers.register("resolvers.schemes",function(v) trace_schemes=v end)
+local report_schemes=logs.reporter("resolvers","schemes")
+local http=require("socket.http")
+local ltn12=require("ltn12")
+local resolvers=resolvers
+local schemes=resolvers.schemes or {}
+resolvers.schemes=schemes
+local cleaners={}
+schemes.cleaners=cleaners
+local threshold=24*60*60
+directives.register("schemes.threshold",function(v) threshold=tonumber(v) or threshold end)
+function cleaners.none(specification)
+ return specification.original
+end
+function cleaners.strip(specification)
+ return (gsub(specification.original,"[^%a%d%.]+","-"))
+end
+function cleaners.md5(specification)
+ return file.addsuffix(md5.hex(specification.original),file.suffix(specification.path))
+end
+local cleaner=cleaners.strip
+directives.register("schemes.cleanmethod",function(v) cleaner=cleaners[v] or cleaners.strip end)
+function resolvers.schemes.cleanname(specification)
+ local hash=cleaner(specification)
+ if trace_schemes then
+ report_schemes("hashing %a to %a",specification.original,hash)
+ end
+ return hash
+end
+local cached,loaded,reused,thresholds,handlers={},{},{},{},{}
+local function runcurl(name,cachename)
+ local command="curl --silent --create-dirs --output "..cachename.." "..name
+ os.spawn(command)
+end
+local function fetch(specification)
+ local original=specification.original
+ local scheme=specification.scheme
+ local cleanname=schemes.cleanname(specification)
+ local cachename=caches.setfirstwritablefile(cleanname,"schemes")
+ if not cached[original] then
+ statistics.starttiming(schemes)
+ if not io.exists(cachename) or (os.difftime(os.time(),lfs.attributes(cachename).modification)>(thresholds[protocol] or threshold)) then
+ cached[original]=cachename
+ local handler=handlers[scheme]
+ if handler then
+ if trace_schemes then
+ report_schemes("fetching %a, protocol %a, method %a",original,scheme,"built-in")
+ end
+ logs.flush()
+ handler(specification,cachename)
+ else
+ if trace_schemes then
+ report_schemes("fetching %a, protocol %a, method %a",original,scheme,"curl")
+ end
+ logs.flush()
+ runcurl(original,cachename)
+ end
+ end
+ if io.exists(cachename) then
+ cached[original]=cachename
+ if trace_schemes then
+ report_schemes("using cached %a, protocol %a, cachename %a",original,scheme,cachename)
+ end
+ else
+ cached[original]=""
+ if trace_schemes then
+ report_schemes("using missing %a, protocol %a",original,scheme)
+ end
end
- return libpaths
-end
-
-function package.clibpaths()
- if not _cpath_ or package.cpath ~= _cpath_ then
- _cpath_ = package.cpath
- clibpaths = file.splitpath(_cpath_,";")
+ loaded[scheme]=loaded[scheme]+1
+ statistics.stoptiming(schemes)
+ else
+ if trace_schemes then
+ report_schemes("reusing %a, protocol %a",original,scheme)
end
- return clibpaths
+ reused[scheme]=reused[scheme]+1
+ end
+ return cached[original]
end
-
-local function thepath(...)
- local t = { ... } t[#t+1] = "?.lua"
- local path = file.join(unpack(t))
- if trace_locating then
- report_libraries("! appending '%s' to 'package.path'",path)
+local function finder(specification,filetype)
+ return resolvers.methodhandler("finders",fetch(specification),filetype)
+end
+local opener=openers.file
+local loader=loaders.file
+local function install(scheme,handler,newthreshold)
+ handlers [scheme]=handler
+ loaded [scheme]=0
+ reused [scheme]=0
+ finders [scheme]=finder
+ openers [scheme]=opener
+ loaders [scheme]=loader
+ thresholds[scheme]=newthreshold or threshold
+end
+schemes.install=install
+local function http_handler(specification,cachename)
+ local tempname=cachename..".tmp"
+ local f=io.open(tempname,"wb")
+ local status,message=http.request {
+ url=specification.original,
+ sink=ltn12.sink.file(f)
+ }
+ if not status then
+ os.remove(tempname)
+ else
+ os.remove(cachename)
+ os.rename(tempname,cachename)
+ end
+ return cachename
+end
+install('http',http_handler)
+install('https')
+install('ftp')
+statistics.register("scheme handling time",function()
+ local l,r,nl,nr={},{},0,0
+ for k,v in table.sortedhash(loaded) do
+ if v>0 then
+ nl=nl+1
+ l[nl]=k..":"..v
+ end
+ end
+ for k,v in table.sortedhash(reused) do
+ if v>0 then
+ nr=nr+1
+ r[nr]=k..":"..v
+ end
+ end
+ local n=nl+nr
+ if n>0 then
+ l=nl>0 and concat(l) or "none"
+ r=nr>0 and concat(r) or "none"
+ return format("%s seconds, %s processed, threshold %s seconds, loaded: %s, reused: %s",
+ statistics.elapsedtime(schemes),n,threshold,l,r)
+ else
+ return nil
+ end
+end)
+local httprequest=http.request
+local toquery=url.toquery
+local function fetchstring(url,data)
+ local q=data and toquery(data)
+ if q then
+ url=url.."?"..q
+ end
+ local reply=httprequest(url)
+ return reply
+end
+schemes.fetchstring=fetchstring
+function schemes.fetchtable(url,data)
+ local reply=fetchstring(url,data)
+ if reply then
+ local s=load("return "..reply)
+ if s then
+ return s()
end
- return path
+ end
end
-local p_libpaths, a_libpaths = { }, { }
-function package.appendtolibpath(...)
- insert(a_libpath,thepath(...))
-end
+end -- of closure
-function package.prependtolibpath(...)
- insert(p_libpaths,1,thepath(...))
-end
+do -- create closure to overcome 200 locals limit
--- beware, we need to return a loadfile result !
+package.loaded["data-lua"] = package.loaded["data-lua"] or true
-local function loaded(libpaths,name,simple)
- for i=1,#libpaths do -- package.path, might become option
- local libpath = libpaths[i]
- local resolved = gsub(libpath,"%?",simple)
- if trace_locating then -- more detail
- report_libraries("! checking for '%s' on 'package.path': '%s' => '%s'",simple,libpath,resolved)
- end
- if file.is_readable(resolved) then
- if trace_locating then
- report_libraries("! lib '%s' located via 'package.path': '%s'",name,resolved)
- end
- return loadfile(resolved)
- end
- end
-end
+-- original size: 4861, stripped down to: 3693
-package.loaders[2] = function(name) -- was [#package.loaders+1]
- if file.suffix(name) == "" then
- name = file.addsuffix(name,"lua") -- maybe a list
- if trace_locating then -- mode detail
- report_libraries("! locating '%s' with forced suffix",name)
- end
- else
- if trace_locating then -- mode detail
- report_libraries("! locating '%s'",name)
- end
- end
- for i=1,#libformats do
- local format = libformats[i]
- local resolved = resolvers.findfile(name,format) or ""
- if trace_locating then -- mode detail
- report_libraries("! checking for '%s' using 'libformat path': '%s'",name,format)
- end
- if resolved ~= "" then
- if trace_locating then
- report_libraries("! lib '%s' located via environment: '%s'",name,resolved)
- end
- return loadfile(resolved)
- end
- end
- -- libpaths
- local libpaths, clibpaths = package.libpaths(), package.clibpaths()
- local simple = gsub(name,"%.lua$","")
- local simple = gsub(simple,"%.","/")
- local resolved = loaded(p_libpaths,name,simple) or loaded(libpaths,name,simple) or loaded(a_libpaths,name,simple)
- if resolved then
- return resolved
- end
- --
- local libname = file.addsuffix(simple,os.libsuffix)
- for i=1,#clibformats do
- -- better have a dedicated loop
- local format = clibformats[i]
- local paths = resolvers.expandedpathlistfromvariable(format)
- for p=1,#paths do
- local path = paths[p]
- local resolved = file.join(path,libname)
- if trace_locating then -- mode detail
- report_libraries("! checking for '%s' using 'clibformat path': '%s'",libname,path)
- end
- if file.is_readable(resolved) then
- if trace_locating then
- report_libraries("! lib '%s' located via 'clibformat': '%s'",libname,resolved)
- end
- return package.loadlib(resolved,name)
- end
- end
- end
- for i=1,#clibpaths do -- package.path, might become option
- local libpath = clibpaths[i]
- local resolved = gsub(libpath,"?",simple)
- if trace_locating then -- more detail
- report_libraries("! checking for '%s' on 'package.cpath': '%s'",simple,libpath)
- end
- if file.is_readable(resolved) then
- if trace_locating then
- report_libraries("! lib '%s' located via 'package.cpath': '%s'",name,resolved)
- end
- return package.loadlib(resolved,name)
- end
- end
- -- just in case the distribution is messed up
- if trace_loading then -- more detail
- report_libraries("! checking for '%s' using 'luatexlibs': '%s'",name)
- end
- local resolved = resolvers.findfile(file.basename(name),'luatexlibs') or ""
- if resolved ~= "" then
- if trace_locating then
- report_libraries("! lib '%s' located by basename via environment: '%s'",name,resolved)
- end
- return loadfile(resolved)
- end
- if trace_locating then
- report_libraries('? unable to locate lib: %s',name)
- end
--- return "unable to locate " .. name
+if not modules then modules={} end modules ['data-lua']={
+ version=1.001,
+ comment="companion to luat-lib.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+local resolvers,package=resolvers,package
+local gsub=string.gsub
+local concat=table.concat
+local addsuffix=file.addsuffix
+local P,S,Cs,lpegmatch=lpeg.P,lpeg.S,lpeg.Cs,lpeg.match
+local libsuffixes={ 'tex','lua' }
+local clibsuffixes={ 'lib' }
+local libformats={ 'TEXINPUTS','LUAINPUTS' }
+local clibformats={ 'CLUAINPUTS' }
+local helpers=package.helpers
+trackers.register("resolvers.libraries",function(v) helpers.trace=v end)
+trackers.register("resolvers.locating",function(v) helpers.trace=v end)
+helpers.report=logs.reporter("resolvers","libraries")
+local pattern=Cs(P("!")^0/""*(P("/")*P(-1)/"/"+P("/")^1/"/"+1)^0)
+local function cleanpath(path)
+ return resolvers.resolve(lpegmatch(pattern,path))
+end
+helpers.cleanpath=cleanpath
+local loadedaslib=helpers.loadedaslib
+local loadedbylua=helpers.loadedbylua
+local loadedbypath=helpers.loadedbypath
+local notloaded=helpers.notloaded
+local getlibpaths=package.libpaths
+local getclibpaths=package.clibpaths
+function helpers.libpaths(libhash)
+ local libpaths={}
+ for i=1,#libformats do
+ local paths=resolvers.expandedpathlistfromvariable(libformats[i])
+ for i=1,#paths do
+ local path=cleanpath(paths[i])
+ if not libhash[path] then
+ libpaths[#libpaths+1]=path
+ libhash[path]=true
+ end
+ end
+ end
+ return libpaths
+end
+function helpers.clibpaths(clibhash)
+ local clibpaths={}
+ for i=1,#clibformats do
+ local paths=resolvers.expandedpathlistfromvariable(clibformats[i])
+ for i=1,#paths do
+ local path=cleanpath(paths[i])
+ if not clibhash[path] then
+ clibpaths[#clibpaths+1]=path
+ clibhash[path]=true
+ end
+ end
+ end
+ return clibpaths
+end
+local function loadedbyformat(name,rawname,suffixes,islib)
+ local trace=helpers.trace
+ local report=helpers.report
+ if trace then
+ report("locating %a as %a using formats %a",rawname,name,suffixes)
+ end
+ for i=1,#suffixes do
+ local format=suffixes[i]
+ local resolved=resolvers.findfile(name,format) or ""
+ if trace then
+ report("checking %a using format %a",name,format)
+ end
+ if resolved~="" then
+ if trace then
+ report("lib %a located on %a",name,resolved)
+ end
+ if islib then
+ return true,loadedaslib(resolved,rawname)
+ else
+ return true,loadfile(resolved)
+ end
+ end
+ end
+end
+helpers.loadedbyformat=loadedbyformat
+local pattern=Cs((((1-S("\\/"))^0*(S("\\/")^1/"/"))^0*(P(".")^1/"/"+P(1))^1)*-1)
+local function lualibfile(name)
+ return lpegmatch(pattern,name) or name
+end
+helpers.lualibfile=lualibfile
+function helpers.loaded(name)
+ local thename=lualibfile(name)
+ local luaname=addsuffix(thename,"lua")
+ local libname=addsuffix(thename,os.libsuffix)
+ local libpaths=getlibpaths()
+ local clibpaths=getclibpaths()
+ local done,result=loadedbyformat(luaname,name,libsuffixes,false)
+ if done then
+ return result
+ end
+ local done,result=loadedbyformat(libname,name,clibsuffixes,true)
+ if done then
+ return result
+ end
+ local done,result=loadedbypath(luaname,name,libpaths,false,"lua")
+ if done then
+ return result
+ end
+ local done,result=loadedbypath(luaname,name,clibpaths,false,"lua")
+ if done then
+ return result
+ end
+ local done,result=loadedbypath(libname,name,clibpaths,true,"lib")
+ if done then
+ return result
+ end
+ local done,result=loadedbylua(name)
+ if done then
+ return result
+ end
+ return notloaded(name)
end
-
-resolvers.loadlualib = require
-
--- -- -- --
-
-package.obsolete = package.obsolete or { }
-
-package.append_libpath = appendtolibpath -- will become obsolete
-package.prepend_libpath = prependtolibpath -- will become obsolete
-
-package.obsolete.append_libpath = appendtolibpath -- will become obsolete
-package.obsolete.prepend_libpath = prependtolibpath -- will become obsolete
+resolvers.loadlualib=require
end -- of closure
do -- create closure to overcome 200 locals limit
-if not modules then modules = { } end modules ['data-aux'] = {
- version = 1.001,
- comment = "companion to luat-lib.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
-local find = string.find
-local type, next = type, next
-
-local trace_locating = false trackers.register("resolvers.locating", function(v) trace_locating = v end)
+package.loaded["data-aux"] = package.loaded["data-aux"] or true
-local resolvers = resolvers
+-- original size: 2394, stripped down to: 2005
-local report_scripts = logs.reporter("resolvers","scripts")
-
-function resolvers.updatescript(oldname,newname) -- oldname -> own.name, not per se a suffix
- local scriptpath = "scripts/context/lua"
- newname = file.addsuffix(newname,"lua")
- local oldscript = resolvers.cleanpath(oldname)
+if not modules then modules={} end modules ['data-aux']={
+ version=1.001,
+ comment="companion to luat-lib.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+local find=string.find
+local type,next=type,next
+local trace_locating=false trackers.register("resolvers.locating",function(v) trace_locating=v end)
+local resolvers=resolvers
+local report_scripts=logs.reporter("resolvers","scripts")
+function resolvers.updatescript(oldname,newname)
+ local scriptpath="scripts/context/lua"
+ newname=file.addsuffix(newname,"lua")
+ local oldscript=resolvers.cleanpath(oldname)
+ if trace_locating then
+ report_scripts("to be replaced old script %a",oldscript)
+ end
+ local newscripts=resolvers.findfiles(newname) or {}
+ if #newscripts==0 then
if trace_locating then
- report_scripts("to be replaced old script %s", oldscript)
- end
- local newscripts = resolvers.findfiles(newname) or { }
- if #newscripts == 0 then
+ report_scripts("unable to locate new script")
+ end
+ else
+ for i=1,#newscripts do
+ local newscript=resolvers.cleanpath(newscripts[i])
+ if trace_locating then
+ report_scripts("checking new script %a",newscript)
+ end
+ if oldscript==newscript then
if trace_locating then
- report_scripts("unable to locate new script")
+ report_scripts("old and new script are the same")
end
- else
- for i=1,#newscripts do
- local newscript = resolvers.cleanpath(newscripts[i])
- if trace_locating then
- report_scripts("checking new script %s", newscript)
- end
- if oldscript == newscript then
- if trace_locating then
- report_scripts("old and new script are the same")
- end
- elseif not find(newscript,scriptpath) then
- if trace_locating then
- report_scripts("new script should come from %s",scriptpath)
- end
- elseif not (find(oldscript,file.removesuffix(newname).."$") or find(oldscript,newname.."$")) then
- if trace_locating then
- report_scripts("invalid new script name")
- end
- else
- local newdata = io.loaddata(newscript)
- if newdata then
- if trace_locating then
- report_scripts("old script content replaced by new content")
- end
- io.savedata(oldscript,newdata)
- break
- elseif trace_locating then
- report_scripts("unable to load new script")
- end
- end
+ elseif not find(newscript,scriptpath) then
+ if trace_locating then
+ report_scripts("new script should come from %a",scriptpath)
end
+ elseif not (find(oldscript,file.removesuffix(newname).."$") or find(oldscript,newname.."$")) then
+ if trace_locating then
+ report_scripts("invalid new script name")
+ end
+ else
+ local newdata=io.loaddata(newscript)
+ if newdata then
+ if trace_locating then
+ report_scripts("old script content replaced by new content")
+ end
+ io.savedata(oldscript,newdata)
+ break
+ elseif trace_locating then
+ report_scripts("unable to load new script")
+ end
+ end
end
+ end
end
@@ -15312,78 +15268,55 @@ end -- of closure
do -- create closure to overcome 200 locals limit
-if not modules then modules = { } end modules ['data-tmf'] = {
- version = 1.001,
- comment = "companion to luat-lib.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
-local resolvers = resolvers
-
-local report_tds = logs.reporter("resolvers","tds")
+package.loaded["data-tmf"] = package.loaded["data-tmf"] or true
--- = <<
--- ? ??
--- < +=
--- > =+
+-- original size: 2600, stripped down to: 1627
+if not modules then modules={} end modules ['data-tmf']={
+ version=1.001,
+ comment="companion to luat-lib.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+local resolvers=resolvers
+local report_tds=logs.reporter("resolvers","tds")
function resolvers.load_tree(tree,resolve)
- if type(tree) == "string" and tree ~= "" then
-
- local getenv, setenv = resolvers.getenv, resolvers.setenv
-
- -- later might listen to the raw osenv var as well
- local texos = "texmf-" .. os.platform
-
- local oldroot = environment.texroot
- local newroot = file.collapsepath(tree)
-
- local newtree = file.join(newroot,texos)
- local newpath = file.join(newtree,"bin")
-
- if not lfs.isdir(newtree) then
- report_tds("no '%s' under tree %s",texos,tree)
- os.exit()
- end
- if not lfs.isdir(newpath) then
- report_tds("no '%s/bin' under tree %s",texos,tree)
- os.exit()
- end
-
- local texmfos = newtree
-
- environment.texroot = newroot
- environment.texos = texos
- environment.texmfos = texmfos
-
- -- Beware, we need to obey the relocatable autoparent so we
- -- set TEXMFCNF to its raw value. This is somewhat tricky when
- -- we run a mkii job from within. Therefore, in mtxrun, there
- -- is a resolve applied when we're in mkii/kpse mode or when
- -- --resolve is passed to mtxrun. Maybe we should also set the
- -- local AUTOPARENT etc. although these are alwasy set new.
-
- if resolve then
- -- resolvers.luacnfspec = resolvers.joinpath(resolvers.resolve(resolvers.expandedpathfromlist(resolvers.splitpath(resolvers.luacnfspec))))
- resolvers.luacnfspec = resolvers.resolve(resolvers.luacnfspec)
- end
-
- setenv('SELFAUTOPARENT', newroot)
- setenv('SELFAUTODIR', newtree)
- setenv('SELFAUTOLOC', newpath)
- setenv('TEXROOT', newroot)
- setenv('TEXOS', texos)
- setenv('TEXMFOS', texmfos)
- setenv('TEXMFCNF', resolvers.luacnfspec,true) -- already resolved
- setenv('PATH', newpath .. io.pathseparator .. getenv('PATH'))
-
- report_tds("changing from root '%s' to '%s'",oldroot,newroot)
- report_tds("prepending '%s' to PATH",newpath)
- report_tds("setting TEXMFCNF to '%s'",resolvers.luacnfspec)
- report_tds()
+ if type(tree)=="string" and tree~="" then
+ local getenv,setenv=resolvers.getenv,resolvers.setenv
+ local texos="texmf-"..os.platform
+ local oldroot=environment.texroot
+ local newroot=file.collapsepath(tree)
+ local newtree=file.join(newroot,texos)
+ local newpath=file.join(newtree,"bin")
+ if not lfs.isdir(newtree) then
+ report_tds("no %a under tree %a",texos,tree)
+ os.exit()
+ end
+ if not lfs.isdir(newpath) then
+ report_tds("no '%s/bin' under tree %a",texos,tree)
+ os.exit()
+ end
+ local texmfos=newtree
+ environment.texroot=newroot
+ environment.texos=texos
+ environment.texmfos=texmfos
+ if resolve then
+ resolvers.luacnfspec=resolvers.resolve(resolvers.luacnfspec)
end
+ setenv('SELFAUTOPARENT',newroot)
+ setenv('SELFAUTODIR',newtree)
+ setenv('SELFAUTOLOC',newpath)
+ setenv('TEXROOT',newroot)
+ setenv('TEXOS',texos)
+ setenv('TEXMFOS',texmfos)
+ setenv('TEXMFCNF',resolvers.luacnfspec,true)
+ setenv('PATH',newpath..io.pathseparator..getenv('PATH'))
+ report_tds("changing from root %a to %a",oldroot,newroot)
+ report_tds("prepending %a to PATH",newpath)
+ report_tds("setting TEXMFCNF to %a",resolvers.luacnfspec)
+ report_tds()
+ end
end
@@ -15391,81 +15324,76 @@ end -- of closure
do -- create closure to overcome 200 locals limit
-if not modules then modules = { } end modules ['data-lst'] = {
- version = 1.001,
- comment = "companion to luat-lib.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
--- used in mtxrun, can be loaded later .. todo
-
-local find, concat, upper, format = string.find, table.concat, string.upper, string.format
-local fastcopy, sortedpairs = table.fastcopy, table.sortedpairs
+package.loaded["data-lst"] = package.loaded["data-lst"] or true
-resolvers.listers = resolvers.listers or { }
-
-local resolvers = resolvers
-
-local report_lists = logs.reporter("resolvers","lists")
+-- original size: 2654, stripped down to: 2301
+if not modules then modules={} end modules ['data-lst']={
+ version=1.001,
+ comment="companion to luat-lib.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+local find,concat,upper,format=string.find,table.concat,string.upper,string.format
+local fastcopy,sortedpairs=table.fastcopy,table.sortedpairs
+resolvers.listers=resolvers.listers or {}
+local resolvers=resolvers
+local report_lists=logs.reporter("resolvers","lists")
local function tabstr(str)
- if type(str) == 'table' then
- return concat(str," | ")
- else
- return str
- end
+ if type(str)=='table' then
+ return concat(str," | ")
+ else
+ return str
+ end
end
-
function resolvers.listers.variables(pattern)
- local instance = resolvers.instance
- local environment = instance.environment
- local variables = instance.variables
- local expansions = instance.expansions
- local pattern = upper(pattern or "")
- local configured = { }
- local order = instance.order
- for i=1,#order do
- for k, v in next, order[i] do
- if v ~= nil and configured[k] == nil then
- configured[k] = v
- end
- end
- end
- local env = fastcopy(environment)
- local var = fastcopy(variables)
- local exp = fastcopy(expansions)
- for key, value in sortedpairs(configured) do
- if key ~= "" and (pattern == "" or find(upper(key),pattern)) then
- report_lists(key)
- report_lists(" env: %s",tabstr(rawget(environment,key)) or "unset")
- report_lists(" var: %s",tabstr(configured[key]) or "unset")
- report_lists(" exp: %s",tabstr(expansions[key]) or "unset")
- report_lists(" res: %s",tabstr(resolvers.resolve(expansions[key])) or "unset")
- end
- end
- instance.environment = fastcopy(env)
- instance.variables = fastcopy(var)
- instance.expansions = fastcopy(exp)
-end
-
-function resolvers.listers.configurations(report)
- local configurations = resolvers.instance.specification
- local report = report or texio.write_nl
- for i=1,#configurations do
- report(format("file : %s",resolvers.resolve(configurations[i])))
- end
- report("")
- local list = resolvers.expandedpathfromlist(resolvers.splitpath(resolvers.luacnfspec))
- for i=1,#list do
- local li = resolvers.resolve(list[i])
- if lfs.isdir(li) then
- report(format("path - %s",li))
- else
- report(format("path + %s",li))
- end
+ local instance=resolvers.instance
+ local environment=instance.environment
+ local variables=instance.variables
+ local expansions=instance.expansions
+ local pattern=upper(pattern or "")
+ local configured={}
+ local order=instance.order
+ for i=1,#order do
+ for k,v in next,order[i] do
+ if v~=nil and configured[k]==nil then
+ configured[k]=v
+ end
+ end
+ end
+ local env=fastcopy(environment)
+ local var=fastcopy(variables)
+ local exp=fastcopy(expansions)
+ for key,value in sortedpairs(configured) do
+ if key~="" and (pattern=="" or find(upper(key),pattern)) then
+ report_lists(key)
+ report_lists(" env: %s",tabstr(rawget(environment,key)) or "unset")
+ report_lists(" var: %s",tabstr(configured[key]) or "unset")
+ report_lists(" exp: %s",tabstr(expansions[key]) or "unset")
+ report_lists(" res: %s",tabstr(resolvers.resolve(expansions[key])) or "unset")
+ end
+ end
+ instance.environment=fastcopy(env)
+ instance.variables=fastcopy(var)
+ instance.expansions=fastcopy(exp)
+end
+local report_resolved=logs.reporter("system","resolved")
+function resolvers.listers.configurations()
+ local configurations=resolvers.instance.specification
+ for i=1,#configurations do
+ report_resolved("file : %s",resolvers.resolve(configurations[i]))
+ end
+ report_resolved("")
+ local list=resolvers.expandedpathfromlist(resolvers.splitpath(resolvers.luacnfspec))
+ for i=1,#list do
+ local li=resolvers.resolve(list[i])
+ if lfs.isdir(li) then
+ report_resolved("path - %s",li)
+ else
+ report_resolved("path + %s",li)
end
+ end
end
@@ -15473,279 +15401,407 @@ end -- of closure
do -- create closure to overcome 200 locals limit
-if not modules then modules = { } end modules ['luat-sta'] = {
- version = 1.001,
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
+package.loaded["util-lib"] = package.loaded["util-lib"] or true
--- this code is used in the updater
+-- original size: 8911, stripped down to: 4216
+
+if not modules then modules={} end modules ['util-lib']={
+ version=1.001,
+ comment="companion to luat-lib.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files",
+}
+local gsub,find=string.gsub,string.find
+local pathpart,nameonly,joinfile=file.pathpart,file.nameonly,file.join
+local findfile,findfiles=resolvers and resolvers.findfile,resolvers and resolvers.findfiles
+local loaded=package.loaded
+local report_swiglib=logs.reporter("swiglib")
+local trace_swiglib=false trackers.register("resolvers.swiglib",function(v) trace_swiglib=v end)
+local function requireswiglib(required,version)
+ local library=loaded[required]
+ if library==nil then
+ local required_full=gsub(required,"%.","/")
+ local required_path=pathpart(required_full)
+ local required_base=nameonly(required_full)
+ local required_name=required_base.."."..os.libsuffix
+ local version=type(version)=="string" and version~="" and version or false
+ local function check(locate,...)
+ local found_library=nil
+ if version then
+ local asked_library=joinfile(required_path,version,required_name)
+ if trace_swiglib then
+ report_swiglib("checking %s: %a","with version",asked_library)
+ end
+ found_library=locate(asked_library,...)
+ if not found_library or found_library==""then
+ asked_library=joinfile(required_path,required_name)
+ if trace_swiglib then
+ report_swiglib("checking %s: %a","without version",asked_library)
+ end
+ found_library=locate(asked_library,...)
+ end
+ else
+ local asked_library=joinfile(required_path,required_name)
+ if trace_swiglib then
+ report_swiglib("checking %s: %a","without version",asked_library)
+ end
+ found_library=locate(asked_library,...)
+ end
+ return found_library and found_library~="" and found_library or false
+ end
+ local found_library=findfile and check(findfile,"lib")
+ if findfiles and not found_library then
+ local asked_library=joinfile(required_path,".*",required_name)
+ if trace_swiglib then
+ report_swiglib("checking %s: %a","latest version",asked_library)
+ end
+ local list=findfiles(asked_library,"lib",true)
+ if list and #list>0 then
+ table.sort(list)
+ found_library=list[#list]
+ end
+ end
+ if not found_library then
+ package.extraclibpath(environment.ownpath)
+ local paths=package.clibpaths()
+ for i=1,#paths do
+ local found_library=check(lfs.isfile)
+ if found_library then
+ break
+ end
+ end
+ end
+ if not found_library then
+ if trace_swiglib then
+ report_swiglib("not found: %a",asked_library)
+ end
+ library=false
+ else
+ local path=pathpart(found_library)
+ local base=nameonly(found_library)
+ dir.push(path)
+ if trace_swiglib then
+ report_swiglib("found: %a",found_library)
+ end
+ library=package.loadlib(found_library,"luaopen_"..required_base)
+ if type(library)=="function" then
+ library=library()
+ else
+ library=false
+ end
+ dir.pop()
+ end
+ if not library then
+ report_swiglib("unknown: %a",required)
+ elseif trace_swiglib then
+ report_swiglib("stored: %a",required)
+ end
+ loaded[required]=library
+ else
+ report_swiglib("reused: %a",required)
+ end
+ return library
+end
+local savedrequire=require
+function require(name,version)
+ if find(name,"^swiglib%.") then
+ return requireswiglib(name,version)
+ else
+ return savedrequire(name)
+ end
+end
+local swiglibs={}
+function swiglib(name,version)
+ local library=swiglibs[name]
+ if not library then
+ statistics.starttiming(swiglibs)
+ report_swiglib("loading %a",name)
+ library=requireswiglib("swiglib."..name,version)
+ swiglibs[name]=library
+ statistics.stoptiming(swiglibs)
+ end
+ return library
+end
+statistics.register("used swiglibs",function()
+ if next(swiglibs) then
+ return string.format("%s, initial load time %s seconds",table.concat(table.sortedkeys(swiglibs)," "),statistics.elapsedtime(swiglibs))
+ end
+end)
-local gmatch, match = string.gmatch, string.match
-local type = type
-states = states or { }
-local states = states
+end -- of closure
-states.data = states.data or { }
-local data = states.data
+do -- create closure to overcome 200 locals limit
-states.hash = states.hash or { }
-local hash = states.hash
+package.loaded["luat-sta"] = package.loaded["luat-sta"] or true
-states.tag = states.tag or ""
-states.filename = states.filename or ""
+-- original size: 5703, stripped down to: 2507
+if not modules then modules={} end modules ['luat-sta']={
+ version=1.001,
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+local gmatch,match=string.gmatch,string.match
+local type=type
+states=states or {}
+local states=states
+states.data=states.data or {}
+local data=states.data
+states.hash=states.hash or {}
+local hash=states.hash
+states.tag=states.tag or ""
+states.filename=states.filename or ""
function states.save(filename,tag)
- tag = tag or states.tag
- filename = file.addsuffix(filename or states.filename,'lus')
- io.savedata(filename,
- "-- generator : luat-sta.lua\n" ..
- "-- state tag : " .. tag .. "\n\n" ..
- table.serialize(data[tag or states.tag] or {},true)
- )
+ tag=tag or states.tag
+ filename=file.addsuffix(filename or states.filename,'lus')
+ io.savedata(filename,
+ "-- generator : luat-sta.lua\n".."-- state tag : "..tag.."\n\n"..table.serialize(data[tag or states.tag] or {},true)
+ )
end
-
function states.load(filename,tag)
- states.filename = filename
- states.tag = tag or "whatever"
- states.filename = file.addsuffix(states.filename,'lus')
- data[states.tag], hash[states.tag] = (io.exists(filename) and dofile(filename)) or { }, { }
+ states.filename=filename
+ states.tag=tag or "whatever"
+ states.filename=file.addsuffix(states.filename,'lus')
+ data[states.tag],hash[states.tag]=(io.exists(filename) and dofile(filename)) or {},{}
end
-
local function set_by_tag(tag,key,value,default,persistent)
- local d, h = data[tag], hash[tag]
- if d then
- if type(d) == "table" then
- local dkey, hkey = key, key
- local pre, post = match(key,"(.+)%.([^%.]+)$")
- if pre and post then
- for k in gmatch(pre,"[^%.]+") do
- local dk = d[k]
- if not dk then
- dk = { }
- d[k] = dk
- elseif type(dk) == "string" then
- -- invalid table, unable to upgrade structure
- -- hope for the best or delete the state file
- break
- end
- d = dk
- end
- dkey, hkey = post, key
- end
- if value == nil then
- value = default
- elseif value == false then
- -- special case
- elseif persistent then
- value = value or d[dkey] or default
- else
- value = value or default
- end
- d[dkey], h[hkey] = value, value
- elseif type(d) == "string" then
- -- weird
- data[tag], hash[tag] = value, value
- end
- end
+ local d,h=data[tag],hash[tag]
+ if d then
+ if type(d)=="table" then
+ local dkey,hkey=key,key
+ local pre,post=match(key,"(.+)%.([^%.]+)$")
+ if pre and post then
+ for k in gmatch(pre,"[^%.]+") do
+ local dk=d[k]
+ if not dk then
+ dk={}
+ d[k]=dk
+ elseif type(dk)=="string" then
+ break
+ end
+ d=dk
+ end
+ dkey,hkey=post,key
+ end
+ if value==nil then
+ value=default
+ elseif value==false then
+ elseif persistent then
+ value=value or d[dkey] or default
+ else
+ value=value or default
+ end
+ d[dkey],h[hkey]=value,value
+ elseif type(d)=="string" then
+ data[tag],hash[tag]=value,value
+ end
+ end
end
-
local function get_by_tag(tag,key,default)
- local h = hash[tag]
- if h and h[key] then
- return h[key]
- else
- local d = data[tag]
- if d then
- for k in gmatch(key,"[^%.]+") do
- local dk = d[k]
- if dk ~= nil then
- d = dk
- else
- return default
- end
- end
- if d == false then
- return false
- else
- return d or default
- end
+ local h=hash[tag]
+ if h and h[key] then
+ return h[key]
+ else
+ local d=data[tag]
+ if d then
+ for k in gmatch(key,"[^%.]+") do
+ local dk=d[k]
+ if dk~=nil then
+ d=dk
+ else
+ return default
end
+ end
+ if d==false then
+ return false
+ else
+ return d or default
+ end
end
+ end
end
-
-states.set_by_tag = set_by_tag
-states.get_by_tag = get_by_tag
-
+states.set_by_tag=set_by_tag
+states.get_by_tag=get_by_tag
function states.set(key,value,default,persistent)
- set_by_tag(states.tag,key,value,default,persistent)
+ set_by_tag(states.tag,key,value,default,persistent)
end
-
function states.get(key,default)
- return get_by_tag(states.tag,key,default)
+ return get_by_tag(states.tag,key,default)
end
-
-
-
end -- of closure
do -- create closure to overcome 200 locals limit
-if not modules then modules = { } end modules ['luat-fmt'] = {
- version = 1.001,
- comment = "companion to mtxrun",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
-
-local format = string.format
-
-local report_format = logs.reporter("resolvers","formats")
-
--- helper for mtxrun
+package.loaded["luat-fmt"] = package.loaded["luat-fmt"] or true
-local quoted = string.quoted
+-- original size: 5951, stripped down to: 4922
-local function primaryflags() -- not yet ok
- local trackers = environment.argument("trackers")
- local directives = environment.argument("directives")
- local flags = ""
- if trackers and trackers ~= "" then
- flags = flags .. "--trackers=" .. quoted(trackers)
- end
- if directives and directives ~= "" then
- flags = flags .. "--directives=" .. quoted(directives)
- end
- return flags
+if not modules then modules={} end modules ['luat-fmt']={
+ version=1.001,
+ comment="companion to mtxrun",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+local format=string.format
+local concat=table.concat
+local quoted=string.quoted
+local luasuffixes=utilities.lua.suffixes
+local report_format=logs.reporter("resolvers","formats")
+local function primaryflags()
+ local trackers=environment.argument("trackers")
+ local directives=environment.argument("directives")
+ local flags={}
+ if trackers and trackers~="" then
+ flags={ "--trackers="..quoted(trackers) }
+ end
+ if directives and directives~="" then
+ flags={ "--directives="..quoted(directives) }
+ end
+ if environment.argument("jit") then
+ flags={ "--jiton" }
+ end
+ return concat(flags," ")
end
-
function environment.make_format(name)
- -- change to format path (early as we need expanded paths)
- local olddir = lfs.currentdir()
- local path = caches.getwritablepath("formats") or "" -- maybe platform
- if path ~= "" then
- lfs.chdir(path)
- end
- report_format("format path: %s",lfs.currentdir())
- -- check source file
- local texsourcename = file.addsuffix(name,"mkiv")
- local fulltexsourcename = resolvers.findfile(texsourcename,"tex") or ""
- if fulltexsourcename == "" then
- texsourcename = file.addsuffix(name,"tex")
- fulltexsourcename = resolvers.findfile(texsourcename,"tex") or ""
- end
- if fulltexsourcename == "" then
- report_format("no tex source file with name: %s (mkiv or tex)",name)
- lfs.chdir(olddir)
- return
- else
- report_format("using tex source file: %s",fulltexsourcename)
- end
- local texsourcepath = dir.expandname(file.dirname(fulltexsourcename)) -- really needed
- -- check specification
- local specificationname = file.replacesuffix(fulltexsourcename,"lus")
- local fullspecificationname = resolvers.findfile(specificationname,"tex") or ""
- if fullspecificationname == "" then
- specificationname = file.join(texsourcepath,"context.lus")
- fullspecificationname = resolvers.findfile(specificationname,"tex") or ""
- end
- if fullspecificationname == "" then
- report_format("unknown stub specification: %s",specificationname)
- lfs.chdir(olddir)
- return
- end
- local specificationpath = file.dirname(fullspecificationname)
- -- load specification
- local usedluastub = nil
- local usedlualibs = dofile(fullspecificationname)
- if type(usedlualibs) == "string" then
- usedluastub = file.join(file.dirname(fullspecificationname),usedlualibs)
- elseif type(usedlualibs) == "table" then
- report_format("using stub specification: %s",fullspecificationname)
- local texbasename = file.basename(name)
- local luastubname = file.addsuffix(texbasename,"lua")
- local lucstubname = file.addsuffix(texbasename,"luc")
- -- pack libraries in stub
- report_format("creating initialization file: %s",luastubname)
- utilities.merger.selfcreate(usedlualibs,specificationpath,luastubname)
- -- compile stub file (does not save that much as we don't use this stub at startup any more)
- local strip = resolvers.booleanvariable("LUACSTRIP", true)
- if utilities.lua.compile(luastubname,lucstubname) and lfs.isfile(lucstubname) then
- report_format("using compiled initialization file: %s",lucstubname)
- usedluastub = lucstubname
- else
- report_format("using uncompiled initialization file: %s",luastubname)
- usedluastub = luastubname
- end
+ local engine=environment.ownmain or "luatex"
+ local olddir=dir.current()
+ local path=caches.getwritablepath("formats",engine) or ""
+ if path~="" then
+ lfs.chdir(path)
+ end
+ report_format("using format path %a",dir.current())
+ local texsourcename=file.addsuffix(name,"mkiv")
+ local fulltexsourcename=resolvers.findfile(texsourcename,"tex") or ""
+ if fulltexsourcename=="" then
+ texsourcename=file.addsuffix(name,"tex")
+ fulltexsourcename=resolvers.findfile(texsourcename,"tex") or ""
+ end
+ if fulltexsourcename=="" then
+ report_format("no tex source file with name %a (mkiv or tex)",name)
+ lfs.chdir(olddir)
+ return
+ else
+ report_format("using tex source file %a",fulltexsourcename)
+ end
+ local texsourcepath=dir.expandname(file.dirname(fulltexsourcename))
+ local specificationname=file.replacesuffix(fulltexsourcename,"lus")
+ local fullspecificationname=resolvers.findfile(specificationname,"tex") or ""
+ if fullspecificationname=="" then
+ specificationname=file.join(texsourcepath,"context.lus")
+ fullspecificationname=resolvers.findfile(specificationname,"tex") or ""
+ end
+ if fullspecificationname=="" then
+ report_format("unknown stub specification %a",specificationname)
+ lfs.chdir(olddir)
+ return
+ end
+ local specificationpath=file.dirname(fullspecificationname)
+ local usedluastub=nil
+ local usedlualibs=dofile(fullspecificationname)
+ if type(usedlualibs)=="string" then
+ usedluastub=file.join(file.dirname(fullspecificationname),usedlualibs)
+ elseif type(usedlualibs)=="table" then
+ report_format("using stub specification %a",fullspecificationname)
+ local texbasename=file.basename(name)
+ local luastubname=file.addsuffix(texbasename,luasuffixes.lua)
+ local lucstubname=file.addsuffix(texbasename,luasuffixes.luc)
+ report_format("creating initialization file %a",luastubname)
+ utilities.merger.selfcreate(usedlualibs,specificationpath,luastubname)
+ if utilities.lua.compile(luastubname,lucstubname) and lfs.isfile(lucstubname) then
+ report_format("using compiled initialization file %a",lucstubname)
+ usedluastub=lucstubname
else
- report_format("invalid stub specification: %s",fullspecificationname)
- lfs.chdir(olddir)
- return
- end
- -- generate format
- local command = format("luatex --ini %s --lua=%s %s %sdump",primaryflags(),quoted(usedluastub),quoted(fulltexsourcename),os.platform == "unix" and "\\\\" or "\\")
- report_format("running command: %s\n",command)
- os.spawn(command)
- -- remove related mem files
- local pattern = file.removesuffix(file.basename(usedluastub)).."-*.mem"
- -- report_format("removing related mplib format with pattern '%s'", pattern)
- local mp = dir.glob(pattern)
- if mp then
- for i=1,#mp do
- local name = mp[i]
- report_format("removing related mplib format %s", file.basename(name))
- os.remove(name)
- end
+ report_format("using uncompiled initialization file %a",luastubname)
+ usedluastub=luastubname
end
+ else
+ report_format("invalid stub specification %a",fullspecificationname)
lfs.chdir(olddir)
+ return
+ end
+ local command=format("%s --ini %s --lua=%s %s %sdump",engine,primaryflags(),quoted(usedluastub),quoted(fulltexsourcename),os.platform=="unix" and "\\\\" or "\\")
+ report_format("running command: %s\n",command)
+ os.spawn(command)
+ local pattern=file.removesuffix(file.basename(usedluastub)).."-*.mem"
+ local mp=dir.glob(pattern)
+ if mp then
+ for i=1,#mp do
+ local name=mp[i]
+ report_format("removing related mplib format %a",file.basename(name))
+ os.remove(name)
+ end
+ end
+ lfs.chdir(olddir)
end
-
function environment.run_format(name,data,more)
- -- hm, rather old code here; we can now use the file.whatever functions
- if name and name ~= "" then
- local barename = file.removesuffix(name)
- local fmtname = caches.getfirstreadablefile(file.addsuffix(barename,"fmt"),"formats")
- if fmtname == "" then
- fmtname = resolvers.findfile(file.addsuffix(barename,"fmt")) or ""
- end
- fmtname = resolvers.cleanpath(fmtname)
- if fmtname == "" then
- report_format("no format with name: %s",name)
- else
- local barename = file.removesuffix(name) -- expanded name
- local luaname = file.addsuffix(barename,"luc")
- if not lfs.isfile(luaname) then
- luaname = file.addsuffix(barename,"lua")
- end
- if not lfs.isfile(luaname) then
- report_format("using format name: %s",fmtname)
- report_format("no luc/lua with name: %s",barename)
- else
- local command = format("luatex %s --fmt=%s --lua=%s %s %s",primaryflags(),quoted(barename),quoted(luaname),quoted(data),more ~= "" and quoted(more) or "")
- report_format("running command: %s",command)
- os.spawn(command)
- end
- end
+ if name and name~="" then
+ local engine=environment.ownmain or "luatex"
+ local barename=file.removesuffix(name)
+ local fmtname=caches.getfirstreadablefile(file.addsuffix(barename,"fmt"),"formats",engine)
+ if fmtname=="" then
+ fmtname=resolvers.findfile(file.addsuffix(barename,"fmt")) or ""
+ end
+ fmtname=resolvers.cleanpath(fmtname)
+ if fmtname=="" then
+ report_format("no format with name %a",name)
+ else
+ local barename=file.removesuffix(name)
+ local luaname=file.addsuffix(barename,"luc")
+ if not lfs.isfile(luaname) then
+ luaname=file.addsuffix(barename,"lua")
+ end
+ if not lfs.isfile(luaname) then
+ report_format("using format name %a",fmtname)
+ report_format("no luc/lua file with name %a",barename)
+ else
+ local command=format("%s %s --fmt=%s --lua=%s %s %s",engine,primaryflags(),quoted(barename),quoted(luaname),quoted(data),more~="" and quoted(more) or "")
+ report_format("running command: %s",command)
+ os.spawn(command)
+ end
end
+ end
end
end -- of closure
+
+-- used libraries : l-lua.lua l-lpeg.lua l-function.lua l-string.lua l-table.lua l-io.lua l-number.lua l-set.lua l-os.lua l-file.lua l-md5.lua l-url.lua l-dir.lua l-boolean.lua l-unicode.lua l-math.lua util-str.lua util-tab.lua util-sto.lua util-prs.lua util-fmt.lua trac-set.lua trac-log.lua trac-inf.lua trac-pro.lua util-lua.lua util-deb.lua util-mrg.lua util-tpl.lua util-env.lua luat-env.lua lxml-tab.lua lxml-lpt.lua lxml-mis.lua lxml-aux.lua lxml-xml.lua trac-xml.lua data-ini.lua data-exp.lua data-env.lua data-tmp.lua data-met.lua data-res.lua data-pre.lua data-inp.lua data-out.lua data-fil.lua data-con.lua data-use.lua data-zip.lua data-tre.lua data-sch.lua data-lua.lua data-aux.lua data-tmf.lua data-lst.lua util-lib.lua luat-sta.lua luat-fmt.lua
+-- skipped libraries : -
+-- original bytes : 658276
+-- stripped bytes : 241564
+
-- end library merge
-own = { } -- not local, might change
+-- We need this hack till luatex is fixed.
+--
+-- for k,v in pairs(arg) do print(k,v) end
+
+if arg and (arg[0] == 'luatex' or arg[0] == 'luatex.exe') and arg[1] == "--luaonly" then
+ arg[-1]=arg[0] arg[0]=arg[2] for k=3,#arg do arg[k-2]=arg[k] end arg[#arg]=nil arg[#arg]=nil
+end
+
+-- End of hack.
-own.libs = { -- order can be made better
+local format, gsub, gmatch, match, find = string.format, string.gsub, string.gmatch, string.match, string.find
+local concat = table.concat
+local ownname = environment and environment.ownname or arg[0] or 'mtxrun.lua'
+local ownpath = gsub(match(ownname,"^(.+)[\\/].-$") or ".","\\","/")
+local owntree = environment and environment.ownpath or ownpath
+
+local ownlibs = { -- order can be made better
+
+ 'l-lua.lua',
+ 'l-lpeg.lua',
+ 'l-function.lua',
'l-string.lua',
'l-table.lua',
- 'l-lpeg.lua',
'l-io.lua',
'l-number.lua',
'l-set.lua',
@@ -15758,19 +15814,23 @@ own.libs = { -- order can be made better
'l-unicode.lua',
'l-math.lua',
+ 'util-str.lua', -- code might move to l-string
'util-tab.lua',
'util-sto.lua',
- 'util-mrg.lua',
- 'util-lua.lua',
'util-prs.lua',
'util-fmt.lua',
- 'util-deb.lua',
- 'trac-inf.lua',
'trac-set.lua',
'trac-log.lua',
- 'trac-pro.lua',
+ 'trac-inf.lua', -- was before trac-set
+ 'trac-pro.lua', -- not really needed
+ 'util-lua.lua', -- indeed here?
+ 'util-deb.lua',
+
+ 'util-mrg.lua',
+ 'util-tpl.lua',
+ 'util-env.lua',
'luat-env.lua', -- can come before inf (as in mkiv)
'lxml-tab.lua',
@@ -15780,6 +15840,8 @@ own.libs = { -- order can be made better
'lxml-aux.lua',
'lxml-xml.lua',
+ 'trac-xml.lua',
+
'data-ini.lua',
'data-exp.lua',
'data-env.lua',
@@ -15796,35 +15858,20 @@ own.libs = { -- order can be made better
-- 'data-bin.lua',
'data-zip.lua',
'data-tre.lua',
- 'data-crl.lua',
+ 'data-sch.lua',
'data-lua.lua',
'data-aux.lua', -- updater
'data-tmf.lua',
'data-lst.lua',
+ 'util-lib.lua', -- swiglib
+
'luat-sta.lua',
'luat-fmt.lua',
-}
-
--- We need this hack till luatex is fixed.
---
--- for k,v in pairs(arg) do print(k,v) end
-
-if arg and (arg[0] == 'luatex' or arg[0] == 'luatex.exe') and arg[1] == "--luaonly" then
- arg[-1]=arg[0] arg[0]=arg[2] for k=3,#arg do arg[k-2]=arg[k] end arg[#arg]=nil arg[#arg]=nil
-end
-
--- End of hack.
-
-local format, gsub, gmatch, match, find = string.format, string.gsub, string.gmatch, string.match, string.find
-local concat = table.concat
-
-own.name = (environment and environment.ownname) or arg[0] or 'mtxrun.lua'
-own.path = gsub(match(own.name,"^(.+)[\\/].-$") or ".","\\","/")
-local ownpath, owntree = own.path, environment and environment.ownpath or own.path
+}
-own.list = {
+local ownlist = {
'.',
ownpath ,
ownpath .. "/../sources", -- HH's development path
@@ -15838,13 +15885,21 @@ own.list = {
owntree .. "/../../../texmf/tex/context/base",
}
-if own.path == "." then table.remove(own.list,1) end
+if ownpath == "." then table.remove(ownlist,1) end
+
+own = {
+ name = ownname,
+ path = ownpath,
+ tree = owntree,
+ list = ownlist,
+ libs = ownlibs,
+}
local function locate_libs()
- for l=1,#own.libs do
- local lib = own.libs[l]
- for p =1,#own.list do
- local pth = own.list[p]
+ for l=1,#ownlibs do
+ local lib = ownlibs[l]
+ for p =1,#ownlist do
+ local pth = ownlist[p]
local filename = pth .. "/" .. lib
local found = lfs.isfile(filename)
if found then
@@ -15858,8 +15913,8 @@ end
local function load_libs()
local found = locate_libs()
if found then
- for l=1,#own.libs do
- local filename = found .. "/" .. own.libs[l]
+ for l=1,#ownlibs do
+ local filename = found .. "/" .. ownlibs[l]
local codeblob = loadfile(filename)
if codeblob then
codeblob()
@@ -15933,55 +15988,85 @@ if not environment.experiments then environment.experiments = e_experiments end
local instance = resolvers.reset()
local helpinfo = [[
---script run an mtx script (lua prefered method) (--noquotes), no script gives list
---execute run a script or program (texmfstart method) (--noquotes)
---resolve resolve prefixed arguments
---ctxlua run internally (using preloaded libs)
---internal run script using built in libraries (same as --ctxlua)
---locate locate given filename in database (default) or system (--first --all --detail)
-
---autotree use texmf tree cf. env 'texmfstart_tree' or 'texmfstarttree'
---tree=pathtotree use given texmf tree (default file: 'setuptex.tmf')
---environment=name use given (tmf) environment file
---path=runpath go to given path before execution
---ifchanged=filename only execute when given file has changed (md checksum)
---iftouched=old,new only execute when given file has changed (time stamp)
-
---makestubs create stubs for (context related) scripts
---removestubs remove stubs (context related) scripts
---stubpath=binpath paths where stubs wil be written
---windows create windows (mswin) stubs
---unix create unix (linux) stubs
-
---verbose give a bit more info
---trackers=list enable given trackers
---progname=str format or backend
-
---edit launch editor with found file
---launch (--all) launch files like manuals, assumes os support
-
---timedrun run a script an time its run
---autogenerate regenerate databases if needed (handy when used to run context in an editor)
-
---usekpse use kpse as fallback (when no mkiv and cache installed, often slower)
---forcekpse force using kpse (handy when no mkiv and cache installed but less functionality)
-
---prefixes show supported prefixes
-
---generate generate file database
-
---variables show configuration variables
---configurations show configuration order
-
---expand-braces expand complex variable
---expand-path expand variable (resolve paths)
---expand-var expand variable (resolve references)
---show-path show path expansion of ...
---var-value report value of variable
---find-file report file location
---find-path report path of file
-
---pattern=str filter variables
+<?xml version="1.0" ?>
+<application>
+ <metadata>
+ <entry name="name">mtxrun</entry>
+ <entry name="detail">ConTeXt TDS Runner Tool</entry>
+ <entry name="version">1.31</entry>
+ </metadata>
+ <flags>
+ <category name="basic">
+ <subcategory>
+ <flag name="script"><short>run an mtx script (lua prefered method) (<ref name="noquotes"/>), no script gives list</short></flag>
+ <flag name="execute"><short>run a script or program (texmfstart method) (<ref name="noquotes"/>)</short></flag>
+ <flag name="resolve"><short>resolve prefixed arguments</short></flag>
+ <flag name="ctxlua"><short>run internally (using preloaded libs)</short></flag>
+ <flag name="internal"><short>run script using built in libraries (same as <ref name="ctxlua"/>)</short></flag>
+ <flag name="locate"><short>locate given filename in database (default) or system (<ref name="first"/> <ref name="all"/> <ref name="detail"/>)</short></flag>
+ </subcategory>
+ <subcategory>
+ <flag name="autotree"><short>use texmf tree cf. env texmfstart_tree or texmfstarttree</short></flag>
+ <flag name="tree" value="pathtotree"><short>use given texmf tree (default file: setuptex.tmf)</short></flag>
+ <flag name="environment" value="name"><short>use given (tmf) environment file</short></flag>
+ <flag name="path" value="runpath"><short>go to given path before execution</short></flag>
+ <flag name="ifchanged" value="filename"><short>only execute when given file has changed (md checksum)</short></flag>
+ <flag name="iftouched" value="old,new"><short>only execute when given file has changed (time stamp)</short></flag>
+ </subcategory>
+ <subcategory>
+ <flag name="makestubs"><short>create stubs for (context related) scripts</short></flag>
+ <flag name="removestubs"><short>remove stubs (context related) scripts</short></flag>
+ <flag name="stubpath" value="binpath"><short>paths where stubs wil be written</short></flag>
+ <flag name="windows"><short>create windows (mswin) stubs</short></flag>
+ <flag name="unix"><short>create unix (linux) stubs</short></flag>
+ </subcategory>
+ <subcategory>
+ <flag name="verbose"><short>give a bit more info</short></flag>
+ <flag name="trackers" value="list"><short>enable given trackers</short></flag>
+ <flag name="progname" value="str"><short>format or backend</short></flag>
+ </subcategory>
+ <subcategory>
+ <flag name="edit"><short>launch editor with found file</short></flag>
+ <flag name="launch"><short>launch files like manuals, assumes os support (<ref name="all"/>)</short></flag>
+ </subcategory>
+ <subcategory>
+ <flag name="timedrun"><short>run a script and time its run</short></flag>
+ <flag name="autogenerate"><short>regenerate databases if needed (handy when used to run context in an editor)</short></flag>
+ </subcategory>
+ <subcategory>
+ <flag name="usekpse"><short>use kpse as fallback (when no mkiv and cache installed, often slower)</short></flag>
+ <flag name="forcekpse"><short>force using kpse (handy when no mkiv and cache installed but less functionality)</short></flag>
+ </subcategory>
+ <subcategory>
+ <flag name="prefixes"><short>show supported prefixes</short></flag>
+ </subcategory>
+ <subcategory>
+ <flag name="generate"><short>generate file database</short></flag>
+ </subcategory>
+ <subcategory>
+ <flag name="variables"><short>show configuration variables</short></flag>
+ <flag name="configurations"><short>show configuration order</short></flag>
+ </subcategory>
+ <subcategory>
+ <flag name="directives"><short>show (known) directives</short></flag>
+ <flag name="trackers"><short>show (known) trackers</short></flag>
+ <flag name="experiments"><short>show (known) experiments</short></flag>
+ </subcategory>
+ <subcategory>
+ <flag name="expand-braces"><short>expand complex variable</short></flag>
+ <flag name="expand-path"><short>expand variable (resolve paths)</short></flag>
+ <flag name="expand-var"><short>expand variable (resolve references)</short></flag>
+ <flag name="show-path"><short>show path expansion of ...</short></flag>
+ <flag name="var-value"><short>report value of variable</short></flag>
+ <flag name="find-file"><short>report file location</short></flag>
+ <flag name="find-path"><short>report path of file</short></flag>
+ </subcategory>
+ <subcategory>
+ <flag name="pattern" value="string"><short>filter variables</short></flag>
+ </subcategory>
+ </category>
+ </flags>
+</application>
]]
local application = logs.application {
@@ -16093,7 +16178,8 @@ function runners.execute_script(fullname,internal,nosplit)
elseif state == 'skip' then
return true
elseif state == "run" then
- local path, name, suffix, result = file.dirname(fullname), file.basename(fullname), file.extname(fullname), ""
+ local path, name, suffix = file.splitname(fullname)
+ local result = ""
if path ~= "" then
result = fullname
elseif name then
@@ -16104,7 +16190,7 @@ function runners.execute_script(fullname,internal,nosplit)
name = gsub(name,"^script:","")
if suffix == "" and runners.registered[name] and runners.registered[name][1] then
name = runners.registered[name][1]
- suffix = file.extname(name)
+ suffix = file.suffix(name)
end
if suffix == "" then
-- loop over known suffixes
@@ -16131,7 +16217,7 @@ function runners.execute_script(fullname,internal,nosplit)
environment.ownscript = result
dofile(result)
else
- local binary = runners.applications[file.extname(result)]
+ local binary = runners.applications[file.suffix(result)]
result = string.quoted(string.unquoted(result))
-- if string.match(result,' ') and not string.match(result,"^\".*\"$") then
-- result = '"' .. result .. '"'
@@ -16324,7 +16410,7 @@ function resolvers.launch(str)
-- maybe we also need to test on mtxrun.launcher.suffix environment
-- variable or on windows consult the assoc and ftype vars and such
local launchers = runners.launchers[os.platform] if launchers then
- local suffix = file.extname(str) if suffix then
+ local suffix = file.suffix(str) if suffix then
local runner = launchers[suffix] if runner then
str = runner .. " " .. str
end
@@ -16383,7 +16469,7 @@ function runners.find_mtx_script(filename)
end
filename = file.addsuffix(filename,"lua")
local basename = file.removesuffix(file.basename(filename))
- local suffix = file.extname(filename)
+ local suffix = file.suffix(filename)
-- qualified path, raw name
local fullname = file.is_qualified_path(filename) and io.exists(filename) and filename
if fullname and fullname ~= "" then
@@ -16438,7 +16524,7 @@ function runners.execute_ctx_script(filename,...)
runners.register_arguments(...)
local arguments = environment.arguments_after
local fullname = runners.find_mtx_script(filename) or ""
- if file.extname(fullname) == "cld" then
+ if file.suffix(fullname) == "cld" then
-- handy in editors where we force --autopdf
report("running cld script: %s",filename)
table.insert(arguments,1,fullname)
@@ -16546,6 +16632,21 @@ function runners.timed(action)
statistics.timed(action)
end
+function runners.associate(filename)
+ os.launch(filename)
+end
+
+function runners.gethelp(filename)
+ local url = environment.argument("url")
+ if url and url ~= "" then
+ local command = string.gsub(environment.argument("command") or "unknown","^%s*\\*(.-)%s*$","%1")
+ url = utilities.templates.replace(url,{ command = command })
+ os.launch(url)
+ else
+ report("no --url given")
+ end
+end
+
-- this is a bit dirty ... first we store the first filename and next we
-- split the arguments so that we only see the ones meant for this script
-- ... later we will use the second half
@@ -16647,8 +16748,18 @@ else
end
+if e_argument("script") or e_argument("scripts") then
-if e_argument("selfmerge") then
+ -- run a script by loading it (using libs), pass args
+
+ runners.loadbase()
+ if is_mkii_stub then
+ ok = runners.execute_script(filename,false,true)
+ else
+ ok = runners.execute_ctx_script(filename)
+ end
+
+elseif e_argument("selfmerge") then
-- embed used libraries
@@ -16678,17 +16789,6 @@ elseif e_argument("ctxlua") or e_argument("internal") then
runners.loadbase()
ok = runners.execute_script(filename,true)
-elseif e_argument("script") or e_argument("scripts") then
-
- -- run a script by loading it (using libs), pass args
-
- runners.loadbase()
- if is_mkii_stub then
- ok = runners.execute_script(filename,false,true)
- else
- ok = runners.execute_ctx_script(filename)
- end
-
elseif e_argument("execute") then
-- execute script
@@ -16715,6 +16815,14 @@ elseif e_argument("launch") then
runners.loadbase()
runners.launch_file(filename)
+elseif e_argument("associate") then
+
+ runners.associate(filename)
+
+elseif e_argument("gethelp") then
+
+ runners.gethelp()
+
elseif e_argument("makestubs") then
-- make stubs (depricated)
@@ -16806,7 +16914,7 @@ elseif e_argument("find-path") then
elseif e_argument("expand-braces") then
- -- luatools: runners.execute_ctx_script("mtx-base","--expand-braces",filename
+ -- luatools: runners.execute_ctx_script("mtx-base","--expand-braces",filename)
resolvers.load("nofiles")
runners.register_arguments(filename)
@@ -16908,6 +17016,23 @@ elseif e_argument("version") then
application.version()
+elseif e_argument("directives") then
+
+ directives.show()
+
+elseif e_argument("trackers") then
+
+ trackers.show()
+
+elseif e_argument("experiments") then
+
+ experiments.show()
+
+elseif e_argument("exporthelp") then
+
+ runners.loadbase()
+ application.export(e_argument("exporthelp"),filename)
+
elseif e_argument("help") or filename=='help' or filename == "" then
application.help()
@@ -16938,7 +17063,6 @@ elseif environment.files[1] == 'texmfcnf.lua' then -- so that we don't need to l
resolvers.listers.configurations()
else
-
runners.loadbase()
runners.execute_ctx_script("mtx-base",filename)
@@ -16955,4 +17079,4 @@ end
if ok == false then ok = 1 elseif ok == true then ok = 0 end
-os.exit(ok)
+os.exit(ok,true) -- true forces a cleanup in 5.2+
diff --git a/Master/texmf-dist/scripts/context/stubs/unix/pstopdf b/Master/texmf-dist/scripts/context/stubs/unix/pstopdf
new file mode 100644
index 00000000000..116f5f4a32e
--- /dev/null
+++ b/Master/texmf-dist/scripts/context/stubs/unix/pstopdf
@@ -0,0 +1,2 @@
+#!/bin/sh
+mtxrun --script pstopdf "$@"
diff --git a/Master/texmf-dist/tex/context/base/anch-bar.mkiv b/Master/texmf-dist/tex/context/base/anch-bar.mkiv
index 9f9770fb6a3..c7c6190bef9 100644
--- a/Master/texmf-dist/tex/context/base/anch-bar.mkiv
+++ b/Master/texmf-dist/tex/context/base/anch-bar.mkiv
@@ -58,7 +58,9 @@
\installcommandhandler \??sidebar {sidebar} \??sidebar
\newcount\c_anch_sidebars_n
-\newdimen\c_anch_sidebars_distance
+\newcount\c_anch_sidebars_current % local
+\newdimen\d_anch_sidebars_distance
+\newcount\c_anch_sidebars_level
% \setupMPvariables
% [mpos:sidebar]
@@ -67,24 +69,60 @@
% distance=5pt]
\setupsidebar
- [\c!rulethickness=2pt,
+ [\c!rulethickness=\dimexpr\bodyfontsize/6\relax, % 2pt default
\c!rulecolor=\s!black,
\c!alternative=0,
- \c!topoffset=0pt,
- \c!bottomoffset=0pt,
- \c!distance=.5\bodyfontsize]
+ \c!topoffset=\zeropoint,
+ \c!bottomoffset=\zeropoint,
+ \c!distance=.5\bodyfontsize,
+ \c!level=,
+ \c!leftmargindistance=\zeropoint]
\let\setupsidebars\setupsidebar
\unexpanded\def\startsidebar
{\dosingleempty\anch_sidebars_start}
-\def\anch_sidebars_start[#1]%
+\unexpanded\def\startsidebar
+ {\dodoubleempty\anch_sidebars_start}
+
+\def\anch_sidebars_start[#1][#2]%
{\bgroup
- \def\currentsidebar{#1}%
\dontleavehmode
+ \advance\c_anch_sidebars_level\plusone
\global\advance\c_anch_sidebars_n\plusone
- \advance\c_anch_sidebars_distance\sidebarparameter\c!distance
+ \c_anch_sidebars_current\c_anch_sidebars_n\relax % relax needed
+ \doifassignmentelse{#1}
+ {\edef\currentsidebar{\the\c_anch_sidebars_level}%
+ \checksidebarparent
+ \setupcurrentsidebar[#1]}
+ {\def\currentsidebar{#1}%
+ \setupcurrentsidebar[#2]}%
+ \scratchdistance\sidebarparameter\c!distance\relax
+ \edef\m_level{\sidebarparameter\c!level}%
+ \ifx\m_level\empty
+ \ifnum\c_anch_sidebars_level=\plusone
+ \scratchdimen\sidebarparameter\c!leftmargindistance\relax
+ \ifdim\scratchdimen=\zeropoint
+ \advance\d_anch_sidebars_distance\scratchdistance\relax
+ \else
+ \d_anch_sidebars_distance\scratchdimen
+ \fi
+ \else
+ \advance\d_anch_sidebars_distance\scratchdistance\relax
+ \fi
+ \else
+ \ifnum\m_level=\plusone
+ \scratchdimen\sidebarparameter\c!leftmargindistance\relax
+ \ifdim\scratchdimen=\zeropoint
+ \advance\d_anch_sidebars_distance\scratchdistance\relax
+ \else
+ \d_anch_sidebars_distance\scratchdimen
+ \fi
+ \else
+ \d_anch_sidebars_distance\dimexpr\scratchdimen+\numexpr\m_level-\plusone\relax\dimexpr\scratchdistance\relax\relax
+ \fi
+ \fi
\startpositionoverlay{text-1}%
\normalexpanded{\setMPpositiongraphicrange % maybe expand in definition
{b:sidebar:\the\c_anch_sidebars_n}%
@@ -94,15 +132,17 @@
linewidth=\sidebarparameter\c!rulethickness,
linecolor=\sidebarparameter\c!rulecolor,
alternative=\sidebarparameter\c!alternative,
- topoffset=\sidebarparameter\c!topoffset,
- bottomoffset=\sidebarparameter\c!bottomoffset,
- distance=\the\c_anch_sidebars_distance}}%
+ topoffset=\the\dimexpr\sidebarparameter\c!topoffset,
+ bottomoffset=\the\dimexpr\sidebarparameter\c!bottomoffset,
+ distance=\the\d_anch_sidebars_distance}%
+ }%
\stoppositionoverlay
- \bpos{sidebar:\the\c_anch_sidebars_n}}
+ \bpos{sidebar:\the\c_anch_sidebars_current}%
+ \ignorespaces}
\unexpanded\def\stopsidebar
{\removelastspace
- \epos{sidebar:\the\c_anch_sidebars_n}
+ \epos{sidebar:\the\c_anch_sidebars_current}
\carryoverpar\egroup}
%D Let's keep this nice and simple (okay, we could pass the 6 variables in
@@ -121,49 +161,73 @@
\MPpositiongraphic{mpos:sidebar}{}%
\stopMPpositionmethod
-%D We now reimplement the margin rules handler defined in
-%D \type {core-rul}:
+%D We now reimplement the \MKII\ margin rules handler in a more
+%D modern way.
%D
-%D \setupmarginrules[level=5]
+%D \setupmarginrules
+%D [rulecolor=darkred,
+%D rulethickness=2pt]
%D
-%D \startmarginrule[1]
-%D First we set the level at~5. Next we typeset this first
-%D paragraph as a level~1 one. As expected no rule show up.
-%D \stopmarginrule
+%D \setupmarginrules % sidebar
+%D [2]
+%D [rulecolor=darkblue]
%D
-%D \startmarginrule[5]
-%D The second paragraph is a level~5 one. As we can see here,
-%D the marginal rule gets a width according to its level.
+%D \startmarginrule[1]
+%D \input ward
+%D \startmarginrule[2]
+%D \input ward
+%D \startmarginrule[3]
+%D \input ward
+%D \startmarginrule[level=6,rulecolor=darkgreen]
+%D \input ward
+%D \stopmarginrule
+%D \input ward
+%D \stopmarginrule
+%D \input ward
+%D \stopmarginrule
+%D \input ward
%D \stopmarginrule
%D
-%D \startmarginrule[8]
-%D It will of course be no surprise that this third paragraph
-%D has a even thicker margin rule. This behavior can be
-%D overruled by specifying the width explictly.
-%D \stopmarginrule
+%D Compared to the old mechanism we now can pass settings too.
\definesidebar
[\v!margin]
- [\c!rulethickness=\@@karulethickness,
- \c!distance=\dimexpr\leftmargindistance-\@@karulethickness/2\relax]
+ [\c!leftmargindistance=\dimexpr\leftmargindistance+\sidebarparameter\c!rulethickness/2\relax]
-\definecomplexorsimple\startmarginrule
+\dorecurse{5}{\definesidebar[\v!margin:#1][\v!margin]} % let's be nice and predefine 5 levels
-\def\simplestartmarginrule
- {\complexstartmarginrule[1]}
+\unexpanded\def\setupmarginrule
+ {\dodoubleargument\anch_marginrules_setup}
-\def\complexstartmarginrule[#1]%
- {\bgroup
- \ifnum#1<\@@kalevel\relax
- \let\stopmarginrule\egroup
+\def\anch_marginrules_setup[#1][#2]%
+ {\ifsecondargument
+ \setupsidebar[\v!margin:#1][#2]%
\else
- \def\@@kadefaultwidth{#1}%
- \let\stopmarginrule\dostopmarginrule
- \normalexpanded{\startsidebar[\v!margin]}% why expanded
+ \setupsidebar[\v!margin][#1]%
\fi}
-\def\dostopmarginrule
- {\stopsidebar
- \egroup}
+\let\setupmarginrules\setupmarginrule
+
+\unexpanded\def\startmarginrule
+ {\dosingleempty\anch_marginrules_start}
+
+\unexpanded\def\startmarginrule
+ {\dosingleempty\anch_marginrules_start}
+
+\def\anch_marginrules_start[#1]% pretty inefficient checking
+ {\edef\m_anch_marginrules_kind{#1}%
+ \ifx\m_anch_marginrules_kind\empty
+ \anch_sidebars_start[\v!margin][]%
+ \else
+ \doifassignmentelse\m_anch_marginrules_kind
+ {\anch_sidebars_start[\v!margin][#1]}%
+ {\anch_marginrules_check{#1}%
+ \anch_sidebars_start[\v!margin:#1][\c!level=#1]}%
+ \fi}
+
+\def\anch_marginrules_check#1%
+ {\doifnotcommandhandler\??sidebar{\v!margin:#1}{\definesidebar[\v!margin:#1][\v!margin]}}
+
+\let\stopmarginrule\stopsidebar
\protect \endinput
diff --git a/Master/texmf-dist/tex/context/base/anch-bck.mkvi b/Master/texmf-dist/tex/context/base/anch-bck.mkvi
index 8ec056468dd..79e42dc0acc 100644
--- a/Master/texmf-dist/tex/context/base/anch-bck.mkvi
+++ b/Master/texmf-dist/tex/context/base/anch-bck.mkvi
@@ -276,7 +276,7 @@
\kern\textbackgroundskip\nobreak
\fi \fi
\nobreak
- \vskip-\dimexpr\lineheight+\parskip\relax
+ \vskip-\dimexpr\lineheight+\parskip\relax % problem: we loose the hangindent
\nobreak
\endgroup
\begingroup
@@ -298,7 +298,7 @@
\endgraf % new
\textbackgroundparameter\c!after}
-\unexpanded\def\checkpositionoverlays
+\unexpanded\def\checkpositionoverlays % overloads \relax in anch-pgr
{\ifproductionrun
\enabletextarearegistration
\enablehiddenbackground
@@ -307,7 +307,7 @@
\setuptextbackground
[\c!mp=mpos:region:draw,
- \c!method=mpos:region,
+ \c!method=mpos:region, % mpos:regionshape
\c!state=\v!start,
\c!location=\v!text,
\c!leftoffset=\!!zeropoint, % 1em,
@@ -397,6 +397,14 @@
\includeMPgraphic{mpos:region:anchor} ;
\stopMPpositiongraphic
+\startMPpositiongraphic{mpos:regionshape}{fillcolor,filloffset,linecolor,gridcolor,linewidth,gridwidth,gridshift,lineradius,lineoffset}
+ \includeMPgraphic{mpos:region:setup} ;
+ \includeMPgraphic{mpos:region:extra} ;
+ \MPgetmultishapes{\MPvar{self}}{\MPanchorid} ;
+ \includeMPgraphic{\MPvar{mp}} ;
+ \includeMPgraphic{mpos:region:anchor} ;
+\stopMPpositiongraphic
+
\startMPpositionmethod{mpos:region}
\MPpositiongraphic{mpos:region}{}%
\stopMPpositionmethod
diff --git a/Master/texmf-dist/tex/context/base/anch-pgr.lua b/Master/texmf-dist/tex/context/base/anch-pgr.lua
index bf4dcbe0287..278448e3aa7 100644
--- a/Master/texmf-dist/tex/context/base/anch-pgr.lua
+++ b/Master/texmf-dist/tex/context/base/anch-pgr.lua
@@ -8,6 +8,8 @@ if not modules then modules = { } end modules ['anch-pgr'] = {
-- todo: we need to clean up lists (of previous pages)
+local commands, context = commands, context
+
local format = string.format
local abs = math.abs
local concat, sort = table.concat, table.sort
@@ -15,20 +17,19 @@ local splitter = lpeg.splitat(":")
local lpegmatch = lpeg.match
local jobpositions = job.positions
+local formatters = string.formatters
local report_graphics = logs.reporter("graphics")
-local function point(n)
- return format("%.5fpt",n/65536)
-end
+local f_b_tag = formatters["b:%s"]
+local f_e_tag = formatters["e:%s"]
+local f_p_tag = formatters["p:%s"]
-local function pair(x,y)
- return format("(%.5fpt,%.5fpt)",x/65536,y/65536)
-end
+local f_tag_two = formatters["%s:%s"]
-local function path(t)
- return concat(t,"--") .. "--cycle"
-end
+local f_point = formatters["%p"]
+local f_pair = formatters["(%p,%p)"]
+local f_path = formatters["%--t--cycle"]
local function regionarea(r)
local rx, ry = r.x, r.y
@@ -36,10 +37,10 @@ local function regionarea(r)
local rh = ry + r.h
local rd = ry - r.d
return {
- pair(rx, rh - ry),
- pair(rw, rh - ry),
- pair(rw, rd - ry),
- pair(rx, rd - ry),
+ f_pair(rx, rh - ry),
+ f_pair(rw, rh - ry),
+ f_pair(rw, rd - ry),
+ f_pair(rx, rd - ry),
}
end
@@ -47,34 +48,48 @@ end
local eps = 2
-local function add(t,x,y,last)
+local function add(t,x,y,last,direction)
local n = #t
if n == 0 then
t[n+1] = { x, y }
- elseif n == 1 then
- local tn = t[1]
- if abs(tn[1]-x) <= eps or abs(tn[2]-y) <= eps then
- t[n+1] = { x, y }
- end
else
- local tm = t[n-1]
local tn = t[n]
local lx = tn[1]
local ly = tn[2]
- if abs(lx-tm[1]) <= eps and abs(lx-x) <= eps then
- if abs(ly-y) > eps then
- tn[2] = y
+ if x == lx and y == ly then
+ -- quick skip
+ elseif n == 1 then
+-- if abs(lx-x) <= eps or abs(ly-y) <= eps then
+ if abs(lx-x) > eps or abs(ly-y) > eps then
+ t[n+1] = { x, y }
end
- elseif abs(ly-tm[2]) <= eps and abs(ly-y) <= eps then
- if abs(lx-x) > eps then
- tn[1] = x
+ else
+ local tm = t[n-1]
+ local px = tm[1]
+ local py = tm[2]
+if (direction == "down" and y > ly) or (direction == "up" and y < ly) then
+ -- move back from too much hang
+else
+ if abs(lx-px) <= eps and abs(lx-x) <= eps then
+ if abs(ly-y) > eps then
+ tn[2] = y
+ end
+ elseif abs(ly-py) <= eps and abs(ly-y) <= eps then
+ if abs(lx-x) > eps then
+ tn[1] = x
+ end
+ elseif not last then
+ t[n+1] = { x, y }
end
- elseif not last then
- t[n+1] = { x, y }
+end
end
end
end
+-- local function add(t,x,y,last)
+-- t[#t+1] = { x, y }
+-- end
+
local function finish(t)
local n = #t
if n > 1 then
@@ -109,105 +124,103 @@ end
-- todo: mark regions and free paragraphs in collected
-local function shapes(r,rx,ry,rw,rh,rd,lytop,lybot,rytop,rybot)
+local function shapes(r,rx,ry,rw,rh,rd,lytop,lybot,rytop,rybot,obeyhang)
-- we assume that we only hang per page and not cross pages
-- which makes sense as hanging is only uses in special cases
--
-- we can remove data as soon as a page is done so we could
-- remember per page and discard areas after each shipout
local leftshape, rightshape
--- leftshape = r.leftshape
--- rightshape = r.rightshape
--- if not leftshape then
- leftshape = { { rx, rh } }
- rightshape = { { rw, rh } }
- local paragraphs = r.paragraphs
- local extending = false
- if paragraphs then
- for i=1,#paragraphs do
- local p = paragraphs[i]
- local ha = p.ha
- if ha and ha ~= 0 then
+ leftshape = { { rx, rh } } -- spikes get removed so we can start at the edge
+ rightshape = { { rw, rh } } -- even if we hang next
+ local paragraphs = r.paragraphs
+ local extending = false
+ if paragraphs then
+ for i=1,#paragraphs do
+ local p = paragraphs[i]
+ local ha = p.ha
+ if obeyhang and ha and ha ~= 0 then
+ local py = p.y
+ local ph = p.h
+ local pd = p.d
+ local hi = p.hi
+ local hang = ha * (ph + pd)
+ local py_ph = py + ph
+ -- ha < 0 hi < 0 : right top
+ -- ha < 0 hi > 0 : left top
+ if ha < 0 then
+ if hi < 0 then -- right
+ add(rightshape,rw, py_ph,"up")
+ add(rightshape,rw + hi,py_ph,"up")
+ add(rightshape,rw + hi,py_ph + hang,"up")
+ add(rightshape,rw, py_ph + hang,"up")
+ else
+ -- left
+ add(leftshape,rx,py_ph,"down")
+ add(leftshape,rx + hi,py_ph,"down")
+ add(leftshape,rx + hi,py_ph + hang,"down")
+ add(leftshape,rx,py_ph + hang,"down")
+ end
+ else
+ -- maybe some day
+ end
+ extending = true -- false
+ else -- we need to clip to the next par
+ local ps = p.ps
+ if ps then
local py = p.y
local ph = p.h
local pd = p.d
- local hi = p.hi
- local hang = ha * (ph + pd)
+ local step = ph + pd
+ local size = #ps * step
local py_ph = py + ph
- -- ha < 0 hi < 0 : right top
- -- ha < 0 hi > 0 : left top
- if ha < 0 then
- if hi < 0 then -- right
- add(rightshape,rw , py_ph)
- add(rightshape,rw + hi, py_ph)
- add(rightshape,rw + hi, py_ph + hang)
- add(rightshape,rw , py_ph + hang)
- else
- -- left
- add(leftshape,rx, py_ph)
- add(leftshape,rx + hi, py_ph)
- add(leftshape,rx + hi, py_ph + hang)
- add(leftshape,rx, py_ph + hang)
- end
- end
-extending = false
- else -- we need to clip to the next par
- local ps = p.ps
- if ps then
- local py = p.y
- local ph = p.h
- local pd = p.d
- local step = ph + pd
- local size = #ps * step
- local py_ph = py + ph
- add(leftshape,rx,py_ph)
- add(rightshape,rw,py_ph)
- for i=1,#ps do
- local p = ps[i]
- local l = p[1]
- local w = p[2]
- add(leftshape,rx + l, py_ph)
- add(rightshape,rx + l + w, py_ph)
- py_ph = py_ph - step
- add(leftshape,rx + l, py_ph)
- add(rightshape,rx + l + w, py_ph)
- end
- extending = true
--- add(left,rx,py_ph)
--- add(right,rw,py_ph)
- else
- if extending then
- local py = p.y
- local ph = p.h
- local pd = p.d
- local py_ph = py + ph
- local py_pd = py - pd
- add(leftshape,leftshape[#leftshape][1],py_ph)
- add(rightshape,rightshape[#rightshape][1],py_ph)
- add(leftshape,rx,py_ph)
- add(rightshape,rw,py_ph)
-extending = false
- end
+ add(leftshape,rx,py_ph,"up")
+ add(rightshape,rw,py_ph,"down")
+ for i=1,#ps do
+ local p = ps[i]
+ local l = p[1]
+ local w = p[2]
+ add(leftshape,rx + l, py_ph,"up")
+ add(rightshape,rx + l + w, py_ph,"down")
+ py_ph = py_ph - step
+ add(leftshape,rx + l, py_ph,"up")
+ add(rightshape,rx + l + w, py_ph,"down")
end
+ extending = true
+ elseif extending then
+ local py = p.y
+ local ph = p.h
+ local pd = p.d
+ local py_ph = py + ph
+ local py_pd = py - pd
+ add(leftshape,leftshape[#leftshape][1],py_ph,"up")
+ add(rightshape,rightshape[#rightshape][1],py_ph,"down")
+ add(leftshape,rx,py_ph,"up") -- shouldn't this be py_pd
+ add(rightshape,rw,py_ph,"down") -- shouldn't this be py_pd
+ extending = false
end
end
end
- -- we can have a simple variant when no paragraphs
- if extending then
- -- not ok
- leftshape[#leftshape][2] = rd
- rightshape[#rightshape][2] = rw
- else
- add(leftshape,rx,rd)
- add(rightshape,rw,rd)
- end
--- r.leftshape = leftshape
--- r.rightshape = rightshape
--- end
+ end
+ -- we can have a simple variant when no paragraphs
+ if extending then
+ -- not ok
+ leftshape[#leftshape][2] = rd
+ rightshape[#rightshape][2] = rw
+ else
+ add(leftshape,rx,rd,"up")
+ add(rightshape,rw,rd,"down")
+ end
return clip(leftshape,lytop,lybot), clip(rightshape,rytop,rybot)
end
-local function singlepart(b,e,r,left,right)
+-- local function shapes(r,rx,ry,rw,rh,rd,lytop,lybot,rytop,rybot,obeyhang)
+-- local leftshape = { { rx, rh }, { rx, rd } }
+-- local rightshape = { { rw, rh }, { rw, rd } }
+-- return clip(leftshape,lytop,lybot), clip(rightshape,rytop,rybot)
+-- end
+
+local function singlepart(b,e,r,left,right,obeyhang)
local bx, by = b.x, b.y
local ex, ey = e.x, e.y
local rx, ry = r.x, r.y
@@ -231,14 +244,14 @@ local function singlepart(b,e,r,left,right)
local area
if by == ey then
area = {
- pair(bx,bh-ry),
- pair(ex,eh-ry),
- pair(ex,ed-ry),
- pair(bx,bd-ry),
+ f_pair(bx,bh-ry),
+ f_pair(ex,eh-ry),
+ f_pair(ex,ed-ry),
+ f_pair(bx,bd-ry),
}
else
area = { }
- local leftshapes, rightshapes = shapes(r,rx,ry,rw,rh,rd,bd,ed,bh,eh)
+ local leftshapes, rightshapes = shapes(r,rx,ry,rw,rh,rd,bd,ed,bh,eh,obeyhang)
add(area,bx,bh-ry)
for i=1,#rightshapes do
local ri = rightshapes[i]
@@ -255,7 +268,7 @@ local function singlepart(b,e,r,left,right)
finish(area)
for i=1,#area do
local a = area[i]
- area[i] = pair(a[1],a[2])
+ area[i] = f_pair(a[1],a[2])
end
end
return {
@@ -265,7 +278,7 @@ local function singlepart(b,e,r,left,right)
}
end
-local function firstpart(b,r,left,right)
+local function firstpart(b,r,left,right,obeyhang)
local bx, by = b.x, b.y
local rx, ry = r.x, r.y
local rw = rx + r.w
@@ -278,7 +291,7 @@ local function firstpart(b,r,left,right)
local bh = by + b.h
local bd = by - b.d
local area = { }
- local leftshapes, rightshapes = shapes(r,rx,ry,rw,rh,rd,bd,rd,bh,rd)
+ local leftshapes, rightshapes = shapes(r,rx,ry,rw,rh,rd,bd,rd,bh,rd,obeyhang)
add(area,bx,bh-ry)
for i=1,#rightshapes do
local ri = rightshapes[i]
@@ -293,7 +306,7 @@ local function firstpart(b,r,left,right)
finish(area)
for i=1,#area do
local a = area[i]
- area[i] = pair(a[1],a[2])
+ area[i] = f_pair(a[1],a[2])
end
return {
location = "first",
@@ -302,7 +315,7 @@ local function firstpart(b,r,left,right)
}
end
-local function middlepart(r,left,right)
+local function middlepart(r,left,right,obeyhang)
local rx, ry = r.x, r.y
local rw = rx + r.w
local rh = ry + r.h
@@ -312,7 +325,7 @@ local function middlepart(r,left,right)
rw = rw - right
end
local area = { }
- local leftshapes, rightshapes = shapes(r,rx,ry,rw,rh,rd,rh,rd,rh,rd)
+ local leftshapes, rightshapes = shapes(r,rx,ry,rw,rh,rd,rh,rd,rh,rd,obeyhang)
for i=#leftshapes,1,-1 do
local li = leftshapes[i]
add(area,li[1],li[2]-ry)
@@ -324,7 +337,7 @@ local function middlepart(r,left,right)
finish(area)
for i=1,#area do
local a = area[i]
- area[i] = pair(a[1],a[2])
+ area[i] = f_pair(a[1],a[2])
end
return {
location = "middle",
@@ -333,7 +346,7 @@ local function middlepart(r,left,right)
}
end
-local function lastpart(e,r,left,right)
+local function lastpart(e,r,left,right,obeyhang)
local ex, ey = e.x, e.y
local rx, ry = r.x, r.y
local rw = rx + r.w
@@ -347,7 +360,7 @@ local function lastpart(e,r,left,right)
local ed = ey - e.d
local area = { }
-- two cases: till end and halfway e line
- local leftshapes, rightshapes = shapes(r,rx,ry,rw,rh,rd,rh,ed,rh,eh)
+ local leftshapes, rightshapes = shapes(r,rx,ry,rw,rh,rd,rh,ed,rh,eh,obeyhang)
for i=1,#rightshapes do
local ri = rightshapes[i]
add(area,ri[1],ri[2]-ry)
@@ -361,7 +374,7 @@ local function lastpart(e,r,left,right)
finish(area)
for i=1,#area do
local a = area[i]
- area[i] = pair(a[1],a[2])
+ area[i] = f_pair(a[1],a[2])
end
return {
location = "last",
@@ -375,24 +388,24 @@ local backgrounds = { }
graphics.backgrounds = backgrounds
-local function calculatemultipar(tag)
+local function calculatemultipar(tag,obeyhang)
local collected = jobpositions.collected
- local b = collected[format("b:%s",tag)]
- local e = collected[format("e:%s",tag)]
+ local b = collected[f_b_tag(tag)]
+ local e = collected[f_e_tag(tag)]
if not b or not e then
- report_graphics("invalid tag '%s'",tag)
+ report_graphics("invalid tag %a",tag)
return { }
end
local br = b.r
local er = e.r
if not br or not er then
- report_graphics("invalid region for '%s'",tag)
+ report_graphics("invalid region for %a",tag)
return { }
end
local btag, bindex = lpegmatch(splitter,br)
local etag, eindex = lpegmatch(splitter,er)
if not bindex or not eindex or btag ~= etag then
- report_graphics("invalid indices for '%s'",tag)
+ report_graphics("invalid indices for %a",tag)
return { }
end
local bindex = tonumber(bindex)
@@ -420,7 +433,7 @@ local function calculatemultipar(tag)
--
local bn = b.n
if bn then
- local bp = collected[format("p:%s",bn)]
+ local bp = collected[f_p_tag(bn)]
if bp then
left = left + bp.ls
right = right + bp.rs
@@ -429,35 +442,35 @@ local function calculatemultipar(tag)
--
if bindex == eindex then
return {
- list = { [b.p] = { singlepart(b,e,collected[br],left,right) } },
+ list = { [b.p] = { singlepart(b,e,collected[br],left,right,obeyhang) } },
bpos = b,
epos = e,
}
else
local list = {
- [b.p] = { firstpart(b,collected[br],left,right) },
+ [b.p] = { firstpart(b,collected[br],left,right,obeyhang) },
}
for i=bindex+1,eindex-1 do
- br = format("%s:%s",btag,i)
+ br = f_tag_two(btag,i)
local r = collected[br]
if not r then
- report_graphics("invalid middle for '%s'",br)
+ report_graphics("invalid middle for %a",br)
else
local p = r.p
local pp = list[p]
if pp then
- pp[#pp+1] = middlepart(r,left,right)
+ pp[#pp+1] = middlepart(r,left,right,obeyhang)
else
- list[p] = { middlepart(r,left,right) }
+ list[p] = { middlepart(r,left,right,obeyhang) }
end
end
end
local p = e.p
local pp = list[p]
if pp then
- pp[#pp+1] = lastpart(e,collected[er],left,right)
+ pp[#pp+1] = lastpart(e,collected[er],left,right,obeyhang)
else
- list[p] = { lastpart(e,collected[er],left,right) }
+ list[p] = { lastpart(e,collected[er],left,right,obeyhang) }
end
return {
list = list,
@@ -511,36 +524,41 @@ local multilocs = {
-- if unknown context_abck : input mp-abck.mpiv ; fi ;
-local template_a = [[
+local f_template_a = [[
path multiregs[], multipars[], multibox ;
string multikind[] ;
numeric multilocs[], nofmultipars ;
nofmultipars := %s ;
-multibox := unitsquare xyscaled %s ;
+multibox := unitsquare xyscaled (%p,%p) ;
numeric par_strut_height, par_strut_depth, par_line_height ;
-par_strut_height := %s ;
-par_strut_depth := %s ;
-par_line_height := %s ;
+par_strut_height := %p ;
+par_strut_depth := %p ;
+par_line_height := %p ;
]]
-local template_b = [[
+local f_template_b = [[
multilocs[%s] := %s ;
multikind[%s] := "%s" ;
-multipars[%s] := (%s) shifted - %s ;
+multipars[%s] := (%--t--cycle) shifted - (%p,%p) ;
]]
-local template_c = [[
-multiregs[%s] := (%s) shifted - %s ;
+local f_template_c = [[
+multiregs[%s] := (%--t--cycle) shifted - %s ;
]]
-local template_d = [[
+local f_template_d = [[
setbounds currentpicture to multibox ;
]]
-function backgrounds.fetchmultipar(n,anchor,page)
+f_template_a = formatters[f_template_a]
+f_template_b = formatters[f_template_b]
+f_template_c = formatters[f_template_c]
+f_template_d = formatters[f_template_d]
+
+function backgrounds.fetchmultipar(n,anchor,page,obeyhang)
local data = pbg[n]
if not data then
- data = calculatemultipar(n)
+ data = calculatemultipar(n,obeyhang)
pbg[n] = data -- can be replaced by register
-- register(data.list,n,anchor)
end
@@ -550,60 +568,66 @@ function backgrounds.fetchmultipar(n,anchor,page)
local pagedata = list[page]
if pagedata then
local nofmultipars = #pagedata
- -- report_graphics("fetching '%s' at page %s using anchor '%s' containing %s multipars",n,page,anchor,nofmultipars)
+ -- report_graphics("fetching %a at page %s using anchor %a containing %s multipars",n,page,anchor,nofmultipars)
local a = jobpositions.collected[anchor]
if not a then
- report_graphics("missing anchor '%s'",anchor)
+ report_graphics("missing anchor %a",anchor)
else
local trace = false
local x, y, w, h, d = a.x, a.y, a.w, a.h, a.d
local bpos = data.bpos
local bh, bd = bpos.h, bpos.d
- local result = { format(template_a,nofmultipars,pair(w,h+d),point(bh),point(bd),point(bh+bd)) }
+ local result = { f_template_a(nofmultipars,w,h+d,bh,bd,bh+bd) }
for i=1,nofmultipars do
local region = pagedata[i]
- result[#result+1] = format(template_b,
+ result[#result+1] = f_template_b(
i, multilocs[region.location],
i, region.location,
- i, path(region.area), pair(x,y-region.region.y))
+ i, region.area, x, y-region.region.y)
if trace then
- result[#result+1] = format(template_c,
- i, path(regionarea(region.region)), offset)
+ result[#result+1] = f_template_c(i, regionarea(region.region), offset)
end
end
data[page] = nil
- result[#result+1] = template_d
+ result[#result+1] = f_template_d()
result = concat(result,"\n")
return result
end
end
end
end
- return format(template_a,0,"origin",0,0,0)
+ return f_template_a(0,"origin",0,0,0)
end
-backgrounds.point = point
-backgrounds.pair = pair
-backgrounds.path = path
+backgrounds.point = f_point
+backgrounds.pair = f_pair
+backgrounds.path = f_path
function commands.fetchmultipar(n,anchor,page)
context(backgrounds.fetchmultipar(n,anchor,page))
end
-local template_a = [[
+function commands.fetchmultishape(n,anchor,page)
+ context(backgrounds.fetchmultipar(n,anchor,page,true))
+end
+
+local f_template_a = [[
path posboxes[], posregions[] ;
numeric pospages[] ;
numeric nofposboxes ;
nofposboxes := %s ;
-%s ;
+%t ;
]]
-local template_b = [[
+local f_template_b = [[
pospages[%s] := %s ;
-posboxes[%s] := %s--%s--%s--%s--cycle ;
-posregions[%s] := %s--%s--%s--%s--cycle ;
+posboxes[%s] := (%p,%p)--(%p,%p)--(%p,%p)--(%p,%p)--cycle ;
+posregions[%s] := (%p,%p)--(%p,%p)--(%p,%p)--(%p,%p)--cycle ;
]]
+f_template_a = formatters[f_template_a]
+f_template_b = formatters[f_template_b]
+
function commands.fetchposboxes(tags,anchor,page) -- no caching (yet) / todo: anchor, page
local collected = jobpositions.collected
if type(tags) == "string" then
@@ -625,10 +649,10 @@ function commands.fetchposboxes(tags,anchor,page) -- no caching (yet) / todo: an
local ch = cy + c.h
local cd = cy - c.d
nofboxes = nofboxes + 1
- list[nofboxes] = format(template_b,
+ list[nofboxes] = f_template_b(
nofboxes,c.p,
- nofboxes,pair(cx,ch),pair(cw,ch),pair(cw,cd),pair(cx,cd),
- nofboxes,pair(0,rh),pair(rw,rh),pair(rw,rd),pair(0,rd)
+ nofboxes,cx,ch,cw,ch,cw,cd,cx,cd,
+ nofboxes,0,rh,rw,rh,rw,rd,0,rd
)
end
end
@@ -636,16 +660,15 @@ function commands.fetchposboxes(tags,anchor,page) -- no caching (yet) / todo: an
print("\n missing",tag)
end
end
- -- print(format(template_a,nofboxes,concat(list)))
- context(template_a,nofboxes,concat(list))
+ context(f_template_a(nofboxes,list))
end
local doifelse = commands.doifelse
-function commands.doifelsemultipar(n,page)
+function commands.doifelsemultipar(n,page,obeyhang)
local data = pbg[n]
if not data then
- data = calculatemultipar(n)
+ data = calculatemultipar(n,obeyhang)
pbg[n] = data
end
if page then
diff --git a/Master/texmf-dist/tex/context/base/anch-pgr.mkiv b/Master/texmf-dist/tex/context/base/anch-pgr.mkiv
index a417d26e3eb..01ef25dc4fc 100644
--- a/Master/texmf-dist/tex/context/base/anch-pgr.mkiv
+++ b/Master/texmf-dist/tex/context/base/anch-pgr.mkiv
@@ -33,7 +33,7 @@
\installcorenamespace{positionaction}
\installcorenamespace{positioncleanup}
-\unexpanded\def\dosetpositionaction#1%
+\unexpanded\def\anch_positions_set_action#1%
{\expandafter\gdef\csname\??positionaction#1\endcsname} % nicely gobbles spaces
\unexpanded\def\doifpositionaction#1%
@@ -111,7 +111,7 @@
{\begingroup
\edef\currentpositionanchor
{\ifx\currentpositionoverlay\empty#3\else\currentpositionoverlay::\MPanchoridentifier\fi}%
- \normalexpanded{\dosetpositionaction{\currentpositionanchor}{\noexpand\getvalue{\??positioncleanup\currentpositionanchor}}}%
+ \normalexpanded{\anch_positions_set_action{\currentpositionanchor}{\noexpand\getvalue{\??positioncleanup\currentpositionanchor}}}%
\let#1\relax
\ifcsname\??positioncleanup\currentpositionanchor\endcsname
\setxvalue{\??positioncleanup\currentpositionanchor}%
@@ -140,9 +140,6 @@
\fi
\fi\fi}
-% \def\anch_positions_register_page_indeed#1%
-% {\setbox#1\hbox{\hpos\pageanchor{\box#1}}}
-
\def\anch_positions_register_page_indeed#1% maybe like text
{\ifvbox#1\setbox#1\hbox{\box#1}\fi
\anch_make_page_box{#1}}
@@ -218,7 +215,6 @@
%D \typebuffer[graphic]
\def\MPanchoridentifier{mpa} % {mp-anchor}
-%def\MPoverlayposprefix{MO::} % not used
%D The rest of the definitions concerning such overlays may
%D look complicated,
@@ -237,8 +233,8 @@
\def\textbackgroundoverlay#1{\v!text#1}
\def\MPanchornumber {\the\realpageno}
-\def\positionoverlay % the test prevents too many redundant positions
- {\ifpositioning % in (not used) text* position layers
+\unexpanded\def\positionoverlay % the test prevents too many redundant positions
+ {\ifpositioning % in (not used) text* position layers
\expandafter\anch_positions_overlay_indeed
\else % also \iftrialtypesetting test here?
\expandafter\gobbleoneargument
@@ -269,7 +265,7 @@
\box\scratchbox
\vfill}}
-\def\positionregionoverlay % shares regions
+\unexpanded\def\positionregionoverlay % shares regions
{\ifpositioning
\expandafter\anch_positions_region_overlay_indeed
\else % also \iftrialtypesetting test here?
@@ -308,9 +304,15 @@
\expandafter\anch_positions_overlay_start_yes
\fi}
+\let\stoppositionoverlay\relax
+
\def\anch_positions_overlay_start_nop#1\stoppositionoverlay
{}
+\ifdefined\checkpositionoverlays \else \let\checkpositionoverlays\relax \fi
+
+\let\currentpositionoverlay\empty
+
\def\anch_positions_overlay_start_yes#1%
{\checkpositionoverlays
\edef\currentpositionoverlay{#1}}
@@ -318,9 +320,11 @@
\unexpanded\def\stoppositionoverlay
{\let\currentpositionoverlay\empty}
+% needs checking if still needed
+%
% \def\resetpositionoverlay#1%
-% {\dosetpositionaction{#1::\MPanchoridentifier::}{}}
-
+% {\anch_positions_set_action{#1::\MPanchoridentifier::}{}}
+%
% \def\handlepositionboxes#1#2#3%
% {\handlepositionaction\dohandlepositionboxes\with{#1}{#2}{#3}\on{#2}}
%
@@ -332,15 +336,15 @@
% \appendtoks
% \let\dohandlepositionboxes\doinsertpositionboxes % was handle ?
% \to \everyinsertpositionaction
-
-\def\docleanpositionboxes#1#2#3% pos tag setups
- {\ifnum\MPp{#1}<\realpageno \else
- \noexpand \dohandlepositionboxes{#1}{#2}{#3}% reinsert
- \fi}
-
-\appendtoks
- \let\dohandlepositionboxes\docleanpositionboxes
-\to \everycleanpositionaction
+%
+% \def\docleanpositionboxes#1#2#3% pos tag setups
+% {\ifnum\MPp{#1}<\realpageno \else
+% \noexpand \dohandlepositionboxes{#1}{#2}{#3}% reinsert
+% \fi}
+%
+% \appendtoks
+% \let\dohandlepositionboxes\docleanpositionboxes
+% \to \everycleanpositionaction
%D A position graphic is a normal (non||reused) \METAPOST\
%D graphic, used immediately, with zero dimensions, so that a
@@ -350,7 +354,7 @@
\installcorenamespace{positionmethod}
%installcorenamespace{graphicvariable}
-\newbox\positiongraphicbox
+\newbox\b_anch_positions_graphic
\def\startMPpositiongraphic % id setups
{\dodoublegroupempty\anch_positions_meta_graphic_start}
@@ -361,11 +365,11 @@
\let\stopMPpositiongraphic\relax
\def\anch_positions_meta_graphic_prepare
- {\ifcsname\??gv\currentmpvariableclass:self\endcsname \else
- \letvalue{\??gv\currentmpvariableclass:self}\currentposition
+ {\ifcsname\??graphicvariable\currentmpvariableclass:self\endcsname \else
+ \letvalue{\??graphicvariable\currentmpvariableclass:self}\currentposition
\fi
- \ifcsname\??gv\currentmpvariableclass:from\endcsname \else
- \letvalue{\??gv\currentmpvariableclass:from}\currentposition
+ \ifcsname\??graphicvariable\currentmpvariableclass:from\endcsname \else
+ \letvalue{\??graphicvariable\currentmpvariableclass:from}\currentposition
\fi}
\def\anch_positions_meta_graphic_use#1#2#3%
@@ -375,7 +379,7 @@
\startMPcode#3\stopMPcode
\endgroup}
-\def\MPpositiongraphic
+\unexpanded\def\MPpositiongraphic
{\dodoublegroupempty\anch_positions_meta_graphic_direct}
\def\anch_positions_meta_graphic_direct#1% tag setups
@@ -402,9 +406,9 @@
\anch_positions_meta_graphic_prepare
\obeyMPboxorigin % do we also set the size ? when needed this must be done in mp ... might change
\def\anch_positions_meta_graphic_direct{\anch_positions_meta_graphic_nested{#3}}% takes two extra arguments
- \setbox\positiongraphicbox\hbox{\ignorespaces\csname#1#2\endcsname\removelastspace}%
- \smashbox\positiongraphicbox
- \box\positiongraphicbox
+ \setbox\b_anch_positions_graphic\hbox{\ignorespaces\csname#1#2\endcsname\removelastspace}%
+ \smashbox\b_anch_positions_graphic
+ \box\b_anch_positions_graphic
\endgroup}
\def\anch_positions_meta_graphic_nested#1#2#3% nesting used in prikkels / pascal (might go away)
@@ -422,77 +426,77 @@
%D Simple one position graphics.
-\def\setMPpositiongraphic
- {\dotriplegroupempty\dosetMPpositiongraphic}
+\unexpanded\def\setMPpositiongraphic
+ {\dotriplegroupempty\anch_positions_meta_graphic_set}
-\def\dosetMPpositiongraphic#1#2#3% pos tag vars
+\def\anch_positions_meta_graphic_set#1#2#3% pos tag vars
{\ifx\currentpositionoverlay\empty
- \dosetpositionaction{#1}{\MPpositiongraphic{#2}{#3}}%
+ \anch_positions_set_action{#1}{\MPpositiongraphic{#2}{#3}}%
\else % silly can be one
- \handlepositiongraphics{#1}{#2}{#3}%
+ \anch_positions_meta_graphic_handle{#1}{#2}{#3}%
\fi}
-\def\handlepositiongraphics#1#2#3% combine with boxes
- {\handlepositionaction\dohandleMPpositiongraphic\with{#1}{#2}{#3}\on{#2}}
+\def\anch_positions_meta_graphic_handle#1#2#3% combine with boxes
+ {\handlepositionaction\anch_positions_meta_graphic_handle_indeed\with{#1}{#2}{#3}\on{#2}}
-\def\doinsertMPpositiongraphic#1#2#3% pos tag setups
+\def\anch_positions_meta_graphic_insert#1#2#3% pos tag setups
{\ifnum\MPp{#1}=\realpageno\relax % extra saveguard
\def\currentposition{#1}\MPpositiongraphic{#2}{#3}%
\fi}
\appendtoks
- \let\dohandleMPpositiongraphic\doinsertMPpositiongraphic
+ \let\anch_positions_meta_graphic_handle_indeed\anch_positions_meta_graphic_insert
\to \everyinsertpositionaction
-\def\docleanMPpositiongraphic#1#2#3% pos tag setups
+\def\anch_positions_meta_graphic_cleanup#1#2#3% pos tag setups
{\ifnum\MPp{#1}<\realpageno \else
- \noexpand\dohandleMPpositiongraphic{#1}{#2}{#3}%
+ \noexpand\anch_positions_meta_graphic_handle_indeed{#1}{#2}{#3}%
\fi}
\appendtoks
- \let\dohandleMPpositiongraphic\docleanMPpositiongraphic
+ \let\anch_positions_meta_graphic_handle_indeed\anch_positions_meta_graphic_cleanup
\to \everycleanpositionaction
%D Graphics that span two positions (beware, does not cross pages).
\unexpanded\def\setMPpositiongraphicrange
- {\doquadruplegroupempty\dosetMPpositiongraphicrange}
+ {\doquadruplegroupempty\anch_positions_meta_graphic_set_range}
-\def\dosetMPpositiongraphicrange#1#2#3#4% bpos epos tag vars
+\def\anch_positions_meta_graphic_set_range#1#2#3#4% bpos epos tag vars
{\ifx\currentpositionoverlay\empty
- \dosetpositionaction{#1}{\MPpositiongraphic{#3}{#4}}%
+ \anch_positions_set_action{#1}{\MPpositiongraphic{#3}{#4}}%
\else
- \handlepositiongraphicsrange{#1}{#2}{#3}{#4}%
+ \anch_positions_meta_graphic_handle_range{#1}{#2}{#3}{#4}%
\fi}
-\def\handlepositiongraphicsrange#1#2#3#4%
- {\handlepositionaction\dohandleMPpositiongraphicrange\with{#1}{#2}{#3}{#4}\on{#2}}
+\def\anch_positions_meta_graphic_handle_range#1#2#3#4%
+ {\handlepositionaction\anch_positions_meta_graphic_handle_range_indeed\with{#1}{#2}{#3}{#4}\on{#2}}
-\def\doinsertMPpositiongraphicrange#1#2#3#4% pos pos tag setups
+\def\anch_positions_meta_graphic_insert_range#1#2#3#4% pos pos tag setups
{\ctxcommand{doifelserangeonpage("#1","#2",\number\realpageno)}%
- {%\writestatus{YES}{#1/#2 => #3}%
- \def\currentposition{#1}%
+ {\def\currentposition{#1}%
\MPpositiongraphic{#3}{#4}}%
- {}}%\writestatus{NOP}{#1/#2 = #3}}}
+ {}}
\appendtoks
- \let\dohandleMPpositiongraphicrange\doinsertMPpositiongraphicrange
+ \let\anch_positions_meta_graphic_handle_range_indeed\anch_positions_meta_graphic_insert_range
\to \everyinsertpositionaction
-\def\docleanMPpositiongraphicrange#1#2#3#4% pos tag setups
+\def\anch_positions_meta_graphic_cleanup_range#1#2#3#4% pos tag setups
{\ifnum\MPp{#2}<\realpageno \else
- \noexpand \dohandleMPpositiongraphicrange{#1}{#2}{#3}{#4}%
+ \noexpand \anch_positions_meta_graphic_handle_range_indeed{#1}{#2}{#3}{#4}%
\fi}
\appendtoks
- \let\dohandleMPpositiongraphicrange\docleanMPpositiongraphicrange
+ \let\anch_positions_meta_graphic_handle_range_indeed\anch_positions_meta_graphic_cleanup_range
\to \everycleanpositionaction
-\let\dohandleMPpositiongraphicrange\gobblefourarguments
+\let\anch_positions_meta_graphic_handle_range_indeed\gobblefourarguments
% Helpers:
-\def\MPgetposboxes #1#2{\ctxcommand{fetchposboxes("#1","#2",\the\realpageno)}}
-\def\MPgetmultipars#1#2{\ctxcommand{fetchmultipar("#1","#2",\the\realpageno)}}
+\def\MPgetposboxes #1#2{\ctxcommand{fetchposboxes("#1","#2",\the\realpageno)}}
+\def\MPgetmultipars #1#2{\ctxcommand{fetchmultipar("#1","#2",\the\realpageno)}}
+\def\MPgetmultishapes#1#2{\ctxcommand{fetchmultishape("#1","#2",\the\realpageno)}}
\protect \endinput
diff --git a/Master/texmf-dist/tex/context/base/anch-pos.lua b/Master/texmf-dist/tex/context/base/anch-pos.lua
index 5d01a6e466d..2697cecf4e8 100644
--- a/Master/texmf-dist/tex/context/base/anch-pos.lua
+++ b/Master/texmf-dist/tex/context/base/anch-pos.lua
@@ -17,8 +17,10 @@ more efficient.</p>
-- maybe replace texsp by our own converter (stay at the lua end)
-- eventually mp will have large numbers so we can use sp there too
+local commands, context = commands, context
+
local tostring, next, rawget, setmetatable = tostring, next, rawget, setmetatable
-local concat, sort = table.concat, table.sort
+local sort = table.sort
local format, gmatch, match = string.format, string.gmatch, string.match
local rawget = rawget
local lpegmatch = lpeg.match
@@ -39,6 +41,7 @@ local v_column = variables.column
local pt = number.dimenfactors.pt
local pts = number.pts
+local formatters = string.formatters
local collected = allocate()
local tobesaved = allocate()
@@ -71,6 +74,23 @@ local default = { -- not r and paragraphs etc
}
}
+local f_b_tag = formatters["b:%s"]
+local f_e_tag = formatters["e:%s"]
+local f_p_tag = formatters["p:%s"]
+local f_w_tag = formatters["w:%s"]
+
+local f_b_column = formatters["_plib_.b_col(%q)"]
+local f_e_column = formatters["_plib_.e_col()"]
+
+local f_enhance = formatters["_plib_.enhance(%q)"]
+local f_region = formatters["region:%s"]
+
+local f_b_region = formatters["_plib_.b_region(%q)"]
+local f_e_region = formatters["_plib_.e_region(%s)"]
+
+local f_tag_three = formatters["%s:%s:%s"]
+local f_tag_two = formatters["%s:%s"]
+
local function sorter(a,b)
return a.y > b.y
end
@@ -275,13 +295,13 @@ function commands.bcolumn(tag,register)
insert(columns,tag)
column = tag
if register then
- context(new_latelua(format("_plib_.b_col(%q)",tag)))
+ context(new_latelua(f_b_column(tag)))
end
end
function commands.ecolumn(register)
if register then
- context(new_latelua("_plib_.e_col()"))
+ context(new_latelua(f_e_column()))
end
remove(columns)
column = columns[#columns]
@@ -311,7 +331,7 @@ end
function jobpositions.markregionbox(n,tag,correct)
if not tag or tag == "" then
nofregions = nofregions + 1
- tag = format("region:%s",nofregions)
+ tag = f_region(nofregions)
end
local box = texbox[n]
local w = box.width
@@ -325,8 +345,8 @@ function jobpositions.markregionbox(n,tag,correct)
h = h ~= 0 and h or nil,
d = d ~= 0 and d or nil,
}
- local push = new_latelua(format("_plib_.b_region(%q)",tag))
- local pop = new_latelua(format("_plib_.e_region(%s)",tostring(correct)))
+ local push = new_latelua(f_b_region(tag))
+ local pop = new_latelua(f_e_region(tostring(correct))) -- todo: check if tostring is needed with formatter
-- maybe we should construct a hbox first (needs experimenting) so that we can avoid some at the tex end
local head = box.list
if head then
@@ -348,7 +368,7 @@ end
function commands.pos(name,t)
tobesaved[name] = t
- context(new_latelua(format("_plib_.enhance(%q)",name)))
+ context(new_latelua(f_enhance(name)))
end
local nofparagraphs = 0
@@ -391,9 +411,9 @@ function commands.parpos() -- todo: relate to localpar (so this is an intermedia
if parshape and #parshape > 0 then
t.ps = parshape
end
- local tag = format("p:%s",nofparagraphs)
+ local tag = f_p_tag(nofparagraphs)
tobesaved[tag] = t
- context(new_latelua(format("_plib_.enhance(%q)",tag)))
+ context(new_latelua(f_enhance(tag)))
end
function commands.posxy(name) -- can node.write be used here?
@@ -405,7 +425,7 @@ function commands.posxy(name) -- can node.write be used here?
y = true,
n = nofparagraphs > 0 and nofparagraphs or nil,
}
- context(new_latelua(format("_plib_.enhance(%q)",name)))
+ context(new_latelua(f_enhance(name)))
end
function commands.poswhd(name,w,h,d)
@@ -420,7 +440,7 @@ function commands.poswhd(name,w,h,d)
d = d,
n = nofparagraphs > 0 and nofparagraphs or nil,
}
- context(new_latelua(format("_plib_.enhance(%q)",name)))
+ context(new_latelua(f_enhance(name)))
end
function commands.posplus(name,w,h,d,extra)
@@ -436,7 +456,7 @@ function commands.posplus(name,w,h,d,extra)
n = nofparagraphs > 0 and nofparagraphs or nil,
e = extra,
}
- context(new_latelua(format("_plib_.enhance(%q)",name)))
+ context(new_latelua(f_enhance(name)))
end
function commands.posstrut(name,w,h,d)
@@ -451,12 +471,12 @@ function commands.posstrut(name,w,h,d)
d = strutbox.depth,
n = nofparagraphs > 0 and nofparagraphs or nil,
}
- context(new_latelua(format("_plib_.enhance(%q)",name)))
+ context(new_latelua(f_enhance(name)))
end
function jobpositions.getreserved(tag,n)
if tag == v_column then
- local fulltag = format("%s:%s:%s",tag,texcount.realpageno,n or 1)
+ local fulltag = f_tag_three(tag,texcount.realpageno,n or 1)
local data = collected[fulltag]
if data then
return data, fulltag
@@ -464,7 +484,7 @@ function jobpositions.getreserved(tag,n)
tag = v_text
end
if tag == v_text then
- local fulltag = format("%s:%s",tag,texcount.realpageno)
+ local fulltag = f_tag_two(tag,texcount.realpageno)
return collected[fulltag] or false, fulltag
end
return collected[tag] or false, tag
@@ -885,7 +905,7 @@ end
local function MPpardata(n)
local t = collected[n]
if not t then
- local tag = format("p:%s",n)
+ local tag = f_p_tag(n)
t = collected[tag]
end
if t then
@@ -905,10 +925,10 @@ end
commands.MPpardata = MPpardata
function commands.MPposset(id) -- special helper, used in backgrounds
- local b = format("b:%s",id)
- local e = format("e:%s",id)
- local w = format("w:%s",id)
- local p = format("p:%s",jobpositions.n(b))
+ local b = f_b_tag(id)
+ local e = f_e_tag(id)
+ local w = f_w_tag(id)
+ local p = f_p_tag(jobpositions.n(b))
MPpos(b) context(",") MPpos(e) context(",") MPpos(w) context(",") MPpos(p) context(",") MPpardata(p)
end
@@ -969,35 +989,35 @@ function commands.MPxywhd(id)
end
end
--- is testcase already defined? if so, then local
+local doif, doifelse = commands.doif, commands.doifelse
function commands.doifpositionelse(name)
- commands.doifelse(collected[name])
+ doifelse(collected[name])
end
function commands.doifposition(name)
- commands.doif(collected[name])
+ doif(collected[name])
end
function commands.doifpositiononpage(name,page) -- probably always realpageno
local c = collected[name]
- commands.testcase(c and c.p == page)
+ doifelse(c and c.p == page)
end
function commands.doifoverlappingelse(one,two,overlappingmargin)
- commands.testcase(overlapping(one,two,overlappingmargin))
+ doifelse(overlapping(one,two,overlappingmargin))
end
function commands.doifpositionsonsamepageelse(list,page)
- commands.testcase(onsamepage(list))
+ doifelse(onsamepage(list))
end
function commands.doifpositionsonthispageelse(list)
- commands.testcase(onsamepage(list,tostring(tex.count.realpageno)))
+ doifelse(onsamepage(list,tostring(tex.count.realpageno)))
end
function commands.doifelsepositionsused()
- commands.testcase(next(collected))
+ doifelse(next(collected))
end
commands.markcolumnbox = jobpositions.markcolumnbox
diff --git a/Master/texmf-dist/tex/context/base/anch-pos.mkiv b/Master/texmf-dist/tex/context/base/anch-pos.mkiv
index 2e13eeba2cc..7ecaa296ed8 100644
--- a/Master/texmf-dist/tex/context/base/anch-pos.mkiv
+++ b/Master/texmf-dist/tex/context/base/anch-pos.mkiv
@@ -361,9 +361,9 @@
\startcolor[blue]%
\llap{\infofont\number\c_anch_positions_paragraph}%
\vrule
- \!!width 4\onepoint
- \!!height2\onepoint
- \!!depth 2\onepoint
+ \s!width 4\onepoint
+ \s!height2\onepoint
+ \s!depth 2\onepoint
\stopcolor
\hss}}
@@ -377,7 +377,7 @@
{\smashedhbox
{#1{\infofont#2#3}%
\kern-\onepoint
- \vrule\!!width2\onepoint\!!height\halfapoint\!!depth\halfapoint}}
+ \vrule\s!width2\onepoint\s!height\halfapoint\s!depth\halfapoint}}
\unexpanded\def\anch_positions_trace_left_indeed
{\anch_positions_trace\llap\darkmagenta{\currentposition>}}
diff --git a/Master/texmf-dist/tex/context/base/anch-tab.mkiv b/Master/texmf-dist/tex/context/base/anch-tab.mkiv
index 4bdaa2ef984..a70f63e24bf 100644
--- a/Master/texmf-dist/tex/context/base/anch-tab.mkiv
+++ b/Master/texmf-dist/tex/context/base/anch-tab.mkiv
@@ -13,6 +13,9 @@
\writestatus{loading}{ConTeXt Anchoring Macros / Table Extensions}
+%D This is just a playground and functionality might change or even
+%D dissappear in favour of better solutions.
+
\unprotect
\newcount\c_anch_tabs
@@ -57,23 +60,22 @@
\global\advance\c_anch_tabs\plusone
\to \everytabulate
-%D Beware, the following code is somewhat weird and experimental
-%D and might be dropped or become a loadable module.
+%D Beware, the following code is somewhat weird and experimental and might be
+%D dropped or become a loadable module.
%D \macros
%D {GFC, GTC, GSC}
%D
-%D The next macros extend tables and tabulation with
-%D backgrounds and position related features. Areas are
-%D specified with symbolic names, and symbolic references to
-%D the graphics involved. Each table has its own namespace.
+%D The next macros extend tables and tabulation with backgrounds and position
+%D related features. Areas are specified with symbolic names, and symbolic
+%D references to the graphics involved. Each table has its own namespace.
\newconditional\tablehaspositions
-\newcount\noftabpositions
-\newtoks \posXCtoks
+\newcount \noftabpositions
+\newtoks \posXCtoks
-\def\dotablebpos{\bpos}
-\def\dotableepos{\epos}
+\def\anch_tabulate_bpos{\bpos}
+\def\anch_tabulate_epos{\epos}
\installcorenamespace{positiontables}
@@ -85,74 +87,80 @@
\let\tabulatepos\tablepos
-\def\dodododoGSC[#1:#2]%
- {\remappositionframed{#2}{\tbPOSprefix#1}%
- \dotablebpos{\tbPOSprefix#1}%
- \doglobal\appendtoks\@EA\dotableepos\@EA{\tbPOSprefix#1}\to\posXCtoks}
+\unexpanded\def\tbXC {\dosingleempty\anch_tables_XC }
+\unexpanded\def\tbGSC{\dosingleempty\anch_tables_GSC}
+\unexpanded\def\tbGFC{\dosingleempty\anch_tables_GFC}
+\unexpanded\def\tbGTC{\dosingleempty\anch_tables_GTC}
-\def\dododoGSC[#1:#2:#3]%
- {\doglobal\appendtoks\dodododoGSC[#1:#2]\to\posXCtoks\NC}
+\def\anch_table_check_state
+ {\iftrialtypesetting
+ \global\settrue\tablehaspositions
+ \firstargumentfalse
+ \fi}
-\def\dodoGSC[#1]%
- {\def\docommand##1{\dododoGSC[##1:##1]}%
- \processcommalist[#1]\docommand}
+\def\anch_tables_XC [#1]{\anch_table_check_state\iffirstargument\anch_tables_indeed_XC [#1]\else\expandafter\fi\NC}
+\def\anch_tables_GSC[#1]{\anch_table_check_state\iffirstargument\anch_tables_indeed_GSC[#1]\else\expandafter\NC\fi}
+\def\anch_tables_GFC[#1]{\anch_table_check_state\iffirstargument\anch_tables_indeed_GFC[#1]\else\expandafter\NC\fi}
+\def\anch_tables_GTC[#1]{\anch_table_check_state\iffirstargument\anch_tables_indeed_GTC[#1]\else\expandafter\NC\fi}
-\def\dodododoGFC[#1:#2:#3]%
- {\remappositionframed{#2}{\tbPOSprefix#1}%
- \dotablebpos{\tbPOSprefix#1}}
+\def\anch_tables_indeed_XC[#1]%
+ {{\let\NC\relax\processcommalist[#1]\anch_tables_step_XC}}
-\def\dododoGFC[#1]%
- {\def\docommand##1{\dodododoGFC[##1:##1]}%
- \processcommalist[#1]\docommand}
+\def\anch_tables_step_XC#1%
+ {\anch_tables_step_indeed_XC[#1]}
-\def\dodoGFC[#1]%
- {\doglobal\appendtoks\dododoGFC[#1]\to\posXCtoks\NC}
+\def\anch_tables_step_indeed_XC[#1#2]%
+ {\if#1>\anch_tables_indeed_GFC [#2:#2]\else
+ \if#1+\anch_tables_indeed_GFC [#2:#2]\else
+ \if#1<\anch_tables_indeed_GTC [#2:#2]\else
+ \if#1-\anch_tables_indeed_GTC [#2:#2]\else
+ \if#1=\anch_tables_indeed_GSC [#2:#2]\else
+ \anch_tables_indeed_GSC[#1#2:#1#2]\fi\fi\fi\fi\fi}
-\def\dododododoGTC[#1:#2]%
- {\dotableepos{\tbPOSprefix#1}}
+\def\anch_tables_indeed_GSC[#1]%
+ {\processcommalist[#1]\anch_tables_step_GSC}
-\def\dodododoGTC[#1]%
- {\def\docommand##1{\dododododoGTC[##1:##1]}%
- \processcommalist[#1]\docommand}
+\def\anch_tables_step_GSC#1%
+ {\anch_tables_append_GSC[#1:#1]}
-\def\dododoGTC[#1]%
- {\doglobal\appendtoks\dodododoGTC[#1]\to\posXCtoks}
+\def\anch_tables_append_GSC[#1:#2:#3]%
+ {\doglobal\appendtoks\anch_tables_process_GSC[#1:#2]\to\posXCtoks\NC}
-\def\dodoGTC[#1]%
- {\doglobal\appendtoks\dododoGTC[#1]\to\posXCtoks\NC}
+\def\anch_tables_process_GSC[#1:#2]%
+ {\remappositionframed{#2}{\tbPOSprefix#1}%
+ \anch_tabulate_bpos{\tbPOSprefix#1}%
+ \doglobal\appendtoks\@EA\anch_tabulate_epos\@EA{\tbPOSprefix#1}\to\posXCtoks}
-\def\dodododoXC[#1#2]%
- {\if#1>\dodoGFC [#2:#2]\else
- \if#1+\dodoGFC [#2:#2]\else
- \if#1<\dodoGTC [#2:#2]\else
- \if#1-\dodoGTC [#2:#2]\else
- \if#1=\dodoGSC [#2:#2]\else
- \dodoGSC[#1#2:#1#2]\fi\fi\fi\fi\fi}
+\def\anch_tables_indeed_GFC[#1]%
+ {\doglobal\appendtoks\anch_tables_delayed_GFC[#1]\to\posXCtoks\NC}
-\def\dododoXC#1%
- {\dodododoXC[#1]}
+\def\anch_tables_delayed_GFC[#1]%
+ {\processcommalist[#1]\anch_tables_step_GFC}
-\def\dodoXC[#1]%
- {{\let\NC\relax\processcommalist[#1]\dododoXC}}
+\def\anch_tables_step_GFC#1%
+ {\anch_tables_process_GFC[#1:#1]}
-\def\@@checktablepositionstate
- {\iftrialtypesetting
- \global\settrue\tablehaspositions
- \firstargumentfalse
- \fi}
+\def\anch_tables_process_GFC[#1:#2:#3]%
+ {\remappositionframed{#2}{\tbPOSprefix#1}%
+ \anch_tabulate_bpos{\tbPOSprefix#1}}
+
+\def\anch_tables_indeed_GTC[#1]%
+ {\doglobal\appendtoks\anch_tables_delayed_GTC[#1]\to\posXCtoks\NC}
-\def\doGSC[#1]{\@@checktablepositionstate\iffirstargument\dodoGSC[#1]\else\expandafter\NC\fi}
-\def\doGFC[#1]{\@@checktablepositionstate\iffirstargument\dodoGFC[#1]\else\expandafter\NC\fi}
-\def\doGTC[#1]{\@@checktablepositionstate\iffirstargument\dodoGTC[#1]\else\expandafter\NC\fi}
-\def\doXC [#1]{\@@checktablepositionstate\iffirstargument\dodoXC [#1]\else\expandafter\fi\NC}
+\def\anch_tables_delayed_GTC[#1]%
+ {\doglobal\appendtoks\anch_tables_process_GTC[#1]\to\posXCtoks}
-\def\tbGSC{\dosingleempty\doGSC}
-\def\tbGFC{\dosingleempty\doGFC}
-\def\tbGTC{\dosingleempty\doGTC}
-\def\tbXC {\dosingleempty\doXC }
+\def\anch_tables_process_GTC[#1]%
+ {\processcommalist[#1]\anch_tables_step_GTC}
-%D The amount of code to support tables and tabulation is
-%D rather minimalistic.
+\def\anch_tables_step_GTC#1%
+ {\anch_tables_step_indeed_GTC[#1:#1]}
+
+\def\anch_tables_step_indeed_GTC[#1:#2]%
+ {\anch_tabulate_epos{\tbPOSprefix#1}}
+
+%D The amount of code to support tables and tabulation is rather
+%D minimalistic.
\let\tabulatepos\tablepos
@@ -177,7 +185,7 @@
\def\tabulateEQpos
{\setbox\scratchbox\hbox{\tabulateEQ}%
\hbox to \wd\scratchbox{\hss\kern\zeropoint\tabulatepos\hss}%
- \hskip-\wd\scratchbox
+ \kern-\wd\scratchbox
\box\scratchbox}
\appendtoks
@@ -187,31 +195,30 @@
% We need to handle paragraphs as well.
-\let\doflushtabulateepos\relax
+\let\anch_tabulate_flush_epos\relax
-\def\dotabulatebpos
+\def\anch_tabulate_bpos_indeed
{\bpos}
-\def\dotabulateepos#1%
+\def\anch_tabulate_epos_indeed#1%
{\ifvoid\b_tabl_tabulate_current\c_tabl_tabulate_column
\epos{#1}%
- \glet\doflushtabulateepos\relax
+ \glet\anch_tabulate_flush_epos\relax
\else
- \gdef\doflushtabulateepos{\epos{#1}}%
+ \gdef\anch_tabulate_flush_epos{\epos{#1}}%
\fi}
\def\flushtabulatesplitbox
{\box\b_tabl_tabulate
- \iftrialtypesetting\else\ifconditional\tablehaspositions\doflushtabulateepos\fi\fi}
+ \iftrialtypesetting\else\ifconditional\tablehaspositions\anch_tabulate_flush_epos\fi\fi}
\appendtoks
- \let\dotablebpos\dotabulatebpos % ?
- \let\dotableepos\dotabulateepos % ?
- \glet\doflushtabulateepos\relax
+ \let\anch_tabulate_bpos\anch_tabulate_bpos_indeed % ?
+ \let\anch_tabulate_epos\anch_tabulate_epos_indeed % ?
+ \glet\anch_tabulate_flush_epos\relax
\to \everytabulate
-%D In order to prevent potential clashes with abbreviations,
-%D postpone the mapping.
+%D In order to prevent potential clashes with abbreviations, postpone the mapping.
\appendtoks
\let\GSC\tbGSC
@@ -223,8 +230,8 @@
%D \macros
%D {definepositionframed}
%D
-%D The next example show how to provide backgrounds to table
-%D cells. First we define some framed backgrounds.
+%D The next example show how to provide backgrounds to table cells. First we define
+%D some framed backgrounds.
%D
%D \startbuffer
%D \definepositionframed[x][background=color,backgroundcolor=red]
@@ -244,8 +251,7 @@
%D \stoptabulate
%D \stopbuffer
%D
-%D The table itself defines three areas (a, b and~c) using
-%D these frames.
+%D The table itself defines three areas (a, b and~c) using these frames.
%D
%D \typebuffer
%D % \getbuffer
@@ -270,9 +276,8 @@
\let\XC \tbXC
\to \everytable
-%D In the previous example, we could have provided an overlay to
-%D the framed definition. A more direct approach is demonstrated
-%D below:
+%D In the previous example, we could have provided an overlay to the framed definition.
+%D A more direct approach is demonstrated below:
%D
%D \startbuffer
%D \def\cw#1{\color[white]{#1}}
@@ -291,8 +296,7 @@
%D
%D \typebuffer \getbuffer
%D
-%D The definition of the table looks about the same as the
-%D previous one:
+%D The definition of the table looks about the same as the previous one:
%D
%D \startbuffer
%D \starttable[|c|c|c|]
@@ -347,22 +351,26 @@
% \definepositionframed[y][background=color,fillcolor=green]
% \definepositionframed[z][background=color,fillcolor=blue]
-\def\remappositionframed#1#2% from to
+\unexpanded\def\remappositionframed#1#2% from to
{\copyposition{b:#1}{b:#2}%
\copyposition{e:#1}{e:#2}%
- \dosetpositionaction{b:#2}{\dopositionaction{b:#1}}}
+ \anch_positions_set_action{b:#2}{\dopositionaction{b:#1}}}
\unexpanded\def\definepositionframed
- {\dodoubleargument\dodefinepositionframed}
+ {\dodoubleargument\anch_framed_define}
-\def\dodefinepositionframed[#1][#2]%
- {\dosetpositionaction{b:#1}{\dopositionframed[#1][#2]}}
+\def\anch_framed_define[#1][#2]%
+ {\anch_positions_set_action{b:#1}{\anch_framed_handle[#1][#2]}}
+
+\unexpanded\def\anch_framed_handle
+ {\bgroup
+ \anch_framed_indeed}
\unexpanded\def\positionframed
{\bgroup
- \dodoubleempty\dopositionframed}
+ \dodoubleempty\anch_framed_indeed}
-\def\dopositionframed[#1][#2]%
+\def\anch_framed_indeed[#1][#2]%
{\setbox\scratchbox\hbox
{\scratchwidth \dimexpr\MPx{e:#1}-\MPx{b:#1}\relax
\scratchdepth \dimexpr\MPy{b:#1}-\MPy{e:#1}+\MPd{e:#1}\relax
diff --git a/Master/texmf-dist/tex/context/base/attr-col.lua b/Master/texmf-dist/tex/context/base/attr-col.lua
index c592d1dc1cb..7c6b7909ba2 100644
--- a/Master/texmf-dist/tex/context/base/attr-col.lua
+++ b/Master/texmf-dist/tex/context/base/attr-col.lua
@@ -15,8 +15,11 @@ if not modules then modules = { } end modules ['attr-col'] = {
local type = type
local format = string.format
local concat = table.concat
+local min, max, floor = math.min, math.max, math.floor
-local attributes, nodes = attributes, nodes
+local attributes, nodes, utilities, logs, backends, storage = attributes, nodes, utilities, logs, backends, storage
+local commands, context, interfaces = commands, context, interfaces
+local tex = tex
local allocate = utilities.storage.allocate
local setmetatableindex = table.setmetatableindex
@@ -31,11 +34,14 @@ local report_transparencies = logs.reporter("transparencies","support")
-- nb: attributes: color etc is much slower than normal (marks + literals) but ...
-- nb. too many "0 g"s
-local states = attributes.states
-local tasks = nodes.tasks
-local nodeinjections = backends.nodeinjections
-local registrations = backends.registrations
-local unsetvalue = attributes.unsetvalue
+local states = attributes.states
+local tasks = nodes.tasks
+local nodeinjections = backends.nodeinjections
+local registrations = backends.registrations
+local unsetvalue = attributes.unsetvalue
+
+local registerstorage = storage.register
+local formatters = string.formatters
-- We can distinguish between rules and glyphs but it's not worth the trouble. A
-- first implementation did that and while it saves a bit for glyphs and rules, it
@@ -63,7 +69,7 @@ local unsetvalue = attributes.unsetvalue
-- colors.strings[color] = "return colors." .. colorspace .. "(" .. concat({...},",") .. ")"
-- end
--
--- storage.register("attributes/colors/data", colors.strings, "attributes.colors.data") -- evaluated
+-- registerstorage("attributes/colors/data", colors.strings, "attributes.colors.data") -- evaluated
--
-- We assume that only processcolors are defined in the format.
@@ -83,15 +89,23 @@ colors.default = 1
colors.main = nil
colors.triggering = true
colors.supported = true
+colors.model = "all"
+
+local data = colors.data
+local values = colors.values
+local registered = colors.registered
-storage.register("attributes/colors/values", colors.values, "attributes.colors.values")
-storage.register("attributes/colors/registered", colors.registered, "attributes.colors.registered")
+local numbers = attributes.numbers
+local list = attributes.list
-local templates = {
- rgb = "r:%s:%s:%s",
- cmyk = "c:%s:%s:%s:%s",
- gray = "s:%s",
- spot = "p:%s:%s:%s:%s"
+registerstorage("attributes/colors/values", values, "attributes.colors.values")
+registerstorage("attributes/colors/registered", registered, "attributes.colors.registered")
+
+local f_colors = {
+ rgb = formatters["r:%s:%s:%s"],
+ cmyk = formatters["c:%s:%s:%s:%s"],
+ gray = formatters["s:%s"],
+ spot = formatters["p:%s:%s:%s:%s"],
}
local models = {
@@ -104,17 +118,6 @@ local models = {
cmyk = 4,
}
-colors.model = "all"
-
-local data = colors.data
-local values = colors.values
-local registered = colors.registered
-
-local numbers = attributes.numbers
-local list = attributes.list
-
-local min, max, floor = math.min, math.max, math.floor
-
local function rgbtocmyk(r,g,b) -- we could reduce
return 1-r, 1-g, 1-b, 0
end
@@ -125,9 +128,9 @@ end
local function rgbtogray(r,g,b)
if colors.weightgray then
- return .30*r+.59*g+.11*b
+ return .30*r + .59*g + .11*b
else
- return r/3+g/3+b/3
+ return r/3 + g/3 + b/3
end
end
@@ -135,6 +138,17 @@ local function cmyktogray(c,m,y,k)
return rgbtogray(cmyktorgb(c,m,y,k))
end
+-- not critical so not needed:
+--
+-- local function cmyktogray(c,m,y,k)
+-- local r, g, b = 1.0 - min(1.0,c+k), 1.0 - min(1.0,m+k), 1.0 - min(1.0,y+k)
+-- if colors.weightgray then
+-- return .30*r + .59*g + .11*b
+-- else
+-- return r/3 + g/3 + b/3
+-- end
+-- end
+
-- http://en.wikipedia.org/wiki/HSI_color_space
-- http://nl.wikipedia.org/wiki/HSV_(kleurruimte)
@@ -271,7 +285,7 @@ local function reviver(data,n)
if not v then
local gray = graycolor(0)
d = { gray, gray, gray, gray }
- report_attributes("unable to revive color %s",n or "?")
+ report_attributes("unable to revive color %a",n)
else
local model = colors.forcedmodel(v[1])
if model == 2 then
@@ -309,7 +323,7 @@ function colors.setmodel(name,weightgray)
end
function colors.register(name, colorspace, ...) -- passing 9 vars is faster (but not called that often)
- local stamp = format(templates[colorspace],...)
+ local stamp = f_colors[colorspace](...)
local color = registered[stamp]
if not color then
color = #values + 1
@@ -363,15 +377,15 @@ transparencies.triggering = true
transparencies.attribute = a_transparency
transparencies.supported = true
-storage.register("attributes/transparencies/registered", transparencies.registered, "attributes.transparencies.registered")
-storage.register("attributes/transparencies/values", transparencies.values, "attributes.transparencies.values")
+local registered = transparencies.registered -- we could use a 2 dimensional table instead
+local data = transparencies.data
+local values = transparencies.values
+local f_transparency = formatters["%s:%s"]
-local registered = transparencies.registered -- we could use a 2 dimensional table instead
-local data = transparencies.data
-local values = transparencies.values
-local template = "%s:%s"
+registerstorage("attributes/transparencies/registered", registered, "attributes.transparencies.registered")
+registerstorage("attributes/transparencies/values", values, "attributes.transparencies.values")
-local function inject_transparency (...)
+local function inject_transparency(...)
inject_transparency = nodeinjections.transparency
return inject_transparency(...)
end
@@ -386,7 +400,7 @@ function transparencies.register(name,a,t,force) -- name is irrelevant here (can
-- but then we'd end up with transparencies resources even if we
-- would not use transparencies (but define them only). This is
-- somewhat messy.
- local stamp = format(template,a,t)
+ local stamp = f_transparency(a,t)
local n = registered[stamp]
if not n then
n = #values + 1
diff --git a/Master/texmf-dist/tex/context/base/attr-eff.lua b/Master/texmf-dist/tex/context/base/attr-eff.lua
index 023d1c51b53..4dce5419abc 100644
--- a/Master/texmf-dist/tex/context/base/attr-eff.lua
+++ b/Master/texmf-dist/tex/context/base/attr-eff.lua
@@ -6,9 +6,9 @@ if not modules then modules = { } end modules ['attr-eff'] = {
license = "see context related readme files"
}
-local format = string.format
-
-local attributes, nodes = attributes, nodes
+local commands, interfaces = commands, interfaces
+local attributes, nodes, backends, utilities = attributes, nodes, backends, utilities
+local tex = tex
local states = attributes.states
local tasks = nodes.tasks
@@ -16,6 +16,10 @@ local nodeinjections = backends.nodeinjections
local settexattribute = tex.setattribute
local allocate = utilities.storage.allocate
local setmetatableindex = table.setmetatableindex
+local formatters = string.formatters
+
+local variables = interfaces.variables
+local v_normal = variables.normal
attributes.effects = attributes.effects or { }
local effects = attributes.effects
@@ -27,14 +31,14 @@ effects.values = effects.values or { }
effects.registered = effects.registered or { }
effects.attribute = a_effect
-storage.register("attributes/effects/registered", effects.registered, "attributes.effects.registered")
-storage.register("attributes/effects/values", effects.values, "attributes.effects.values")
+local data = effects.data
+local registered = effects.registered
+local values = effects.values
-local template = "%s:%s:%s"
+local f_stamp = formatters["%s:%s:%s"]
-local data = effects.data
-local registered = effects.registered
-local values = effects.values
+storage.register("attributes/effects/registered", registered, "attributes.effects.registered")
+storage.register("attributes/effects/values", values, "attributes.effects.values")
-- valid effects: normal inner outer both hidden (stretch,rulethickness,effect)
@@ -66,12 +70,22 @@ effects.handler = nodes.installattributehandler {
processor = states.process,
}
-local function register(effect,stretch,rulethickness)
- local stamp = format(template,effect,stretch,rulethickness)
+local function register(specification)
+ local alternative, stretch, rulethickness
+ if specification then
+ alternative = specification.alternative or v_normal
+ stretch = specification.stretch or 0
+ rulethickness = specification.rulethickness or 0
+ else
+ alternative = v_normal
+ stretch = 0
+ rulethickness = 0
+ end
+ local stamp = f_stamp(alternative,stretch,rulethickness)
local n = registered[stamp]
if not n then
n = #values + 1
- values[n] = { effect, stretch, rulethickness }
+ values[n] = { alternative, stretch, rulethickness }
registered[stamp] = n
end
return n
@@ -88,10 +102,10 @@ effects.enable = enable
local enabled = false
-function commands.triggereffect(effect,stretch,rulethickness)
+function commands.triggereffect(specification)
if not enabled then
enable()
enabled = true
end
- settexattribute(a_effect,register(effect,stretch,rulethickness))
+ settexattribute(a_effect,register(specification))
end
diff --git a/Master/texmf-dist/tex/context/base/attr-eff.mkiv b/Master/texmf-dist/tex/context/base/attr-eff.mkiv
index c5c94537c14..43f575a7a69 100644
--- a/Master/texmf-dist/tex/context/base/attr-eff.mkiv
+++ b/Master/texmf-dist/tex/context/base/attr-eff.mkiv
@@ -17,38 +17,39 @@
\unprotect
-\gdef\dotriggereffect#1#2#3%
- {\ctxcommand{triggereffect('#1',#2,\number\dimexpr#3\relax)}}
+\installcorenamespace{effect}
-\unexpanded\def\setupeffect
- {\dodoubleargument\dosetupeffect}
+\installcommandhandler \??effect {effect} \??effect
-\def\dosetupeffect[#1][#2]%
- {\getparameters[\??et#1][#2]}
+\setupeffect
+ [\c!method=\v!none,
+ \c!stretch=\zerocount,
+ \c!rulethickness=\zeropoint,
+ \c!alternative=\v!normal]
-\unexpanded\def\defineeffect
- {\dodoubleargument\dodefineeffect}
-
-\def\dodefineeffect[#1][#2]%
- {\getparameters[\??et#1][\c!method=\v!none,\c!stretch=0,\c!rulethickness=\zeropoint,\c!alternative=\v!normal,#2]%
- \doif{\getvalue{\??et#1\c!method}}\v!command
- {\setugvalue{\e!start#1}{\starteffect[#1]}%
- \setugvalue{\e!stop #1}{\stopeffect}}}
-
-% yes or no grouped
+\appendtoks
+ \edef\p_method{\effectparameter\c!method}%
+ \ifx\p_method\v!method
+ \setuxvalue{\e!start\currenteffect}{\starteffect[#1]}%
+ \setuxvalue{\e!stop \currenteffect}{\stopeffect}%
+ \fi
+\to \everydefineeffect
\unexpanded\def\starteffect[#1]%
- {\dotriggereffect
- {\csname\??et#1\c!alternative \endcsname}%
- {\csname\??et#1\c!stretch \endcsname}%
- {\csname\??et#1\c!rulethickness\endcsname}}
+ {\ctxcommand{triggereffect{
+ alternative = "\namedeffectparameter{#1}\c!alternative",
+ stretch = \number\namedeffectparameter{#1}\c!stretch,
+ rulethickness = \number\dimexpr\namedeffectparameter{#1}\c!rulethickness\relax
+ }}}
\unexpanded\def\stopeffect % can be special
- {\dotriggereffect\v!normal0\zeropoint}
+ {\ctxcommand{triggereffect()}} % v!normal 0 0
\unexpanded\def\effect[#1]%
{\groupedcommand{\starteffect[#1]}{\stopeffect}}
+% yes or no grouped
+
\defineeffect [\v!inner] [\c!alternative=\v!inner,\c!rulethickness=.25pt]
\defineeffect [\v!outer] [\c!alternative=\v!outer,\c!rulethickness=.25pt]
\defineeffect [\v!both] [\c!alternative=\v!both, \c!rulethickness=.25pt]
diff --git a/Master/texmf-dist/tex/context/base/attr-ini.lua b/Master/texmf-dist/tex/context/base/attr-ini.lua
index f3714fcb4d9..206a86d79b5 100644
--- a/Master/texmf-dist/tex/context/base/attr-ini.lua
+++ b/Master/texmf-dist/tex/context/base/attr-ini.lua
@@ -6,6 +6,8 @@ if not modules then modules = { } end modules ['attr-ini'] = {
license = "see context related readme files"
}
+local commands, context, nodes, storage = commands, context, nodes, storage
+
local next, type = next, type
--[[ldx--
@@ -13,11 +15,10 @@ local next, type = next, type
symbolic names later on.</p>
--ldx]]--
-attributes = attributes or { }
-
-local attributes, nodes = attributes, nodes
+attributes = attributes or { }
+local attributes = attributes
--- todo: local and then gobals ... first loaded anyway
+local sharedstorage = storage.shared
attributes.names = attributes.names or { }
attributes.numbers = attributes.numbers or { }
@@ -26,15 +27,14 @@ attributes.states = attributes.states or { }
attributes.handlers = attributes.handlers or { }
attributes.unsetvalue = -0x7FFFFFFF
-local names, numbers, list = attributes.names, attributes.numbers, attributes.list
+local names = attributes.names
+local numbers = attributes.numbers
+local list = attributes.list
storage.register("attributes/names", names, "attributes.names")
storage.register("attributes/numbers", numbers, "attributes.numbers")
storage.register("attributes/list", list, "attributes.list")
-names [0] = "fontdynamic"
-numbers["fontdynamic"] = 0
-
function attributes.define(name,number) -- at the tex end
if not numbers[name] then
numbers[name] = number
@@ -44,12 +44,18 @@ function attributes.define(name,number) -- at the tex end
end
--[[ldx--
+<p>We reserve this one as we really want it to be always set (faster).</p>
+--ldx]]--
+
+names[0], numbers["fontdynamic"] = "fontdynamic", 0
+
+--[[ldx--
<p>We can use the attributes in the range 127-255 (outside user space). These
are only used when no attribute is set at the \TEX\ end which normally
happens in <l n='context'/>.</p>
--ldx]]--
-storage.shared.attributes_last_private = storage.shared.attributes_last_private or 127
+sharedstorage.attributes_last_private = sharedstorage.attributes_last_private or 127
-- to be considered (so that we can use an array access):
--
@@ -57,10 +63,10 @@ storage.shared.attributes_last_private = storage.shared.attributes_last_private
--
-- setmetatable(private, {
-- __index = function(t,name)
--- local number = storage.shared.attributes_last_private or 127
+-- local number = sharedstorage.attributes_last_private
-- if number < 1023 then -- tex.count.minallocatedattribute - 1
-- number = number + 1
--- storage.shared.attributes_last_private = number
+-- sharedstorage.attributes_last_private = number
-- end
-- numbers[name], names[number], list[number] = number, name, { }
-- private[name] = number
@@ -74,12 +80,12 @@ storage.shared.attributes_last_private = storage.shared.attributes_last_private
function attributes.private(name) -- at the lua end (hidden from user)
local number = numbers[name]
if not number then
- local last = storage.shared.attributes_last_private or 127
+ local last = sharedstorage.attributes_last_private
if last < 1023 then -- tex.count.minallocatedattribute - 1
last = last + 1
- storage.shared.attributes_last_private = last
+ sharedstorage.attributes_last_private = last
else
- report_attribute("no more room for private attributes") -- fatal
+ report_attribute("no more room for private attributes")
os.exit()
end
number = last
@@ -88,7 +94,7 @@ function attributes.private(name) -- at the lua end (hidden from user)
return number
end
--- new (actually a tracer)
+-- tracers
local report_attribute = logs.reporter("attributes")
@@ -99,7 +105,7 @@ local function showlist(what,list)
while a do
local number, value = a.number, a.value
i = i + 1
- report_attribute("%s %2i: attribute %3i, value %4i, name %s",tostring(what),i,number,value,names[number] or '?')
+ report_attribute("%S %2i: attribute %3i, value %4i, name %a",what,i,number,value,names[number])
a = a.next
end
end
diff --git a/Master/texmf-dist/tex/context/base/attr-ini.mkiv b/Master/texmf-dist/tex/context/base/attr-ini.mkiv
index a1550b4be98..ac210c98e5a 100644
--- a/Master/texmf-dist/tex/context/base/attr-ini.mkiv
+++ b/Master/texmf-dist/tex/context/base/attr-ini.mkiv
@@ -14,8 +14,8 @@
\writestatus{loading}{ConTeXt Attribute Macros / Initialization}
%D Although it's still somewhat experimental, here we introduce code
-%D related to attributes. Housekeeping will move completely to Lua
-%D and \newattribute will go away.
+%D related to attributes. Housekeeping will move completely to \LUA\
+%D and \type {\newattribute} will go away.
\unprotect
@@ -27,14 +27,16 @@
\unexpanded\def\pushattribute#1%
{\global\advance\csname\??attributestack\string#1\endcsname\plusone
- \global\expandafter\mathchardef\csname\??attributestack\string#1:\number\csname\??attributestack\string#1\endcsname\endcsname\attribute#1}
+ \expandafter\xdef\csname\??attributestack\string#1:\number\csname\??attributestack\string#1\endcsname\endcsname{\number\attribute#1}}
\unexpanded\def\popattribute#1%
- {\attribute#1\csname\??attributestack\string#1:\number\csname\??attributestack\string#1\endcsname\endcsname
+ {\attribute#1\csname\??attributestack\string#1:\number\csname\??attributestack\string#1\endcsname\endcsname\relax
\global\advance\csname\??attributestack\string#1\endcsname\minusone}
\unexpanded\def\installattributestack#1%
- {\expandafter\newcount\csname\??attributestack\string#1\endcsname}
+ {\ifcsname\??attributestack\string#1\endcsname \else
+ \expandafter\newcount\csname\??attributestack\string#1\endcsname
+ \fi}
\newtoks \attributesresetlist
@@ -83,12 +85,13 @@
%D For the moment we put this here (later it will move to where it's used):
\definesystemattribute [state]
+\definesystemattribute [color] [public]
+\definesystemattribute [colormodel] [public,global]
\definesystemattribute [skip]
\definesystemattribute [penalty]
-\definesystemattribute [colormodel] [public,global]
-\definesystemattribute [color] [public]
\definesystemattribute [transparency] [public]
\definesystemattribute [background] [public]
+\definesystemattribute [alignbackground] [public]
\definesystemattribute [colorintent] [public]
\definesystemattribute [negative] [public]
\definesystemattribute [effect] [public]
diff --git a/Master/texmf-dist/tex/context/base/attr-lay.lua b/Master/texmf-dist/tex/context/base/attr-lay.lua
index bcdc541f7b6..4bcc70b0ca1 100644
--- a/Master/texmf-dist/tex/context/base/attr-lay.lua
+++ b/Master/texmf-dist/tex/context/base/attr-lay.lua
@@ -10,12 +10,18 @@ if not modules then modules = { } end modules ['attr-lay'] = {
-- but when we need it stacked layers might show up too; the next function based
-- approach can be replaced by static (metatable driven) resolvers
+-- maybe use backends.registrations here too
+
local type = type
-local format = string.format
local insert, remove = table.insert, table.remove
+local attributes, nodes, utilities, logs, backends = attributes, nodes, utilities, logs, backends
+local commands, context, interfaces = commands, context, interfaces
+local tex = tex
+
local allocate = utilities.storage.allocate
local setmetatableindex = table.setmetatableindex
+local formatters = string.formatters
local report_viewerlayers = logs.reporter("viewerlayers")
@@ -26,8 +32,6 @@ local report_viewerlayers = logs.reporter("viewerlayers")
-- nb. too many "0 g"s
-- nb: more local tables
-local attributes, nodes = attributes, nodes
-
attributes.viewerlayers = attributes.viewerlayers or { }
local viewerlayers = attributes.viewerlayers
@@ -57,22 +61,42 @@ local texgetattribute = tex.getattribute
local texsettokenlist = tex.settoks
local unsetvalue = attributes.unsetvalue
-storage.register("attributes/viewerlayers/registered", viewerlayers.registered, "attributes.viewerlayers.registered")
-storage.register("attributes/viewerlayers/values", viewerlayers.values, "attributes.viewerlayers.values")
-storage.register("attributes/viewerlayers/scopes", viewerlayers.scopes, "attributes.viewerlayers.scopes")
+local nodepool = nodes.pool
+
+local data = viewerlayers.data
+local values = viewerlayers.values
+local listwise = viewerlayers.listwise
+local registered = viewerlayers.registered
+local scopes = viewerlayers.scopes
+
+local f_stamp = formatters["%s"]
-local data = viewerlayers.data
-local values = viewerlayers.values
-local listwise = viewerlayers.listwise
-local registered = viewerlayers.registered
-local scopes = viewerlayers.scopes
-local template = "%s"
+storage.register("attributes/viewerlayers/registered", registered, "attributes.viewerlayers.registered")
+storage.register("attributes/viewerlayers/values", values, "attributes.viewerlayers.values")
+storage.register("attributes/viewerlayers/scopes", scopes, "attributes.viewerlayers.scopes")
+
+local layerstacker = utilities.stacker.new("layers") -- experiment
+
+layerstacker.mode = "stack"
+layerstacker.unset = attributes.unsetvalue
+
+viewerlayers.resolve_begin = layerstacker.resolve_begin
+viewerlayers.resolve_step = layerstacker.resolve_step
+viewerlayers.resolve_end = layerstacker.resolve_end
+
+function commands.cleanuplayers()
+ layerstacker.clean()
+ -- todo
+end
-- stacked
+local function startlayer(...) startlayer = nodeinjections.startlayer return startlayer(...) end
+local function stoplayer (...) stoplayer = nodeinjections.stoplayer return stoplayer (...) end
+
local function extender(viewerlayers,key)
if viewerlayers.supported and key == "none" then
- local d = nodeinjections.stoplayer()
+ local d = stoplayer()
viewerlayers.none = d
return d
end
@@ -82,17 +106,23 @@ local function reviver(data,n)
if viewerlayers.supported then
local v = values[n]
if v then
- local d = nodeinjections.startlayer(v)
+ local d = startlayer(v)
data[n] = d
return d
else
- report_viewerlayers("error, unknown reference '%s'",tostring(n))
+ report_viewerlayers("error: unknown reference %a",tostring(n))
end
end
end
-setmetatableindex(viewerlayers, extender)
-setmetatableindex(viewerlayers.data, reviver)
+setmetatableindex(viewerlayers,extender)
+setmetatableindex(viewerlayers.data,reviver)
+
+-- !!!! TEST CODE !!!!
+
+layerstacker.start = function(...) local f = nodeinjections.startstackedlayer layerstacker.start = f return f(...) end
+layerstacker.stop = function(...) local f = nodeinjections.stopstackedlayer layerstacker.stop = f return f(...) end
+layerstacker.change = function(...) local f = nodeinjections.changestackedlayer layerstacker.change = f return f(...) end
local function initializer(...)
return states.initialize(...)
@@ -103,7 +133,8 @@ attributes.viewerlayers.handler = nodes.installattributehandler {
namespace = viewerlayers,
initializer = initializer,
finalizer = states.finalize,
- processor = states.stacked,
+ -- processor = states.stacked,
+ processor = states.stacker,
}
local stack, enabled, global = { }, false, false
@@ -132,7 +163,7 @@ local function register(name,lw) -- if not inimode redefine data[n] in first cal
if not enabled then
viewerlayers.enable(true)
end
- local stamp = format(template,name)
+ local stamp = f_stamp(name)
local n = registered[stamp]
if not n then
n = #values + 1
@@ -149,12 +180,16 @@ function viewerlayers.setfeatures(hasorder)
viewerlayers.hasorder = hasorder
end
+local usestacker = true -- new, experimental
+
function viewerlayers.start(name)
--- if not enabled then
--- viewerlayers.enable(true)
--- end
- insert(stack,texgetattribute(a_viewerlayer))
- local a = register(name) or unsetvalue
+ local a
+ if usestacker then
+ a = layerstacker.push(register(name) or unsetvalue)
+ else
+ insert(stack,texgetattribute(a_viewerlayer))
+ a = register(name) or unsetvalue
+ end
if global or scopes[name] == v_global then
scopes[a] = v_global -- messy but we don't know the attributes yet
texsetattribute("global",a_viewerlayer,a)
@@ -165,14 +200,21 @@ function viewerlayers.start(name)
end
function viewerlayers.stop()
- local a = remove(stack)
- if a >= 0 then
+ local a
+ if usestacker then
+ a = layerstacker.pop()
+ else
+ a = remove(stack)
+ end
+ if not a then
+ -- error
+ elseif a >= 0 then
if global or scopes[a] == v_global then
texsetattribute("global",a_viewerlayer,a)
else
texsetattribute(a_viewerlayer,a)
end
- texsettokenlist("currentviewerlayertoks",values[a])
+ texsettokenlist("currentviewerlayertoks",values[a] or "")
else
if global or scopes[a] == v_global then
texsetattribute("global",a_viewerlayer,unsetvalue)
@@ -197,9 +239,9 @@ function viewerlayers.define(settings)
end
end
-commands.defineviewerlayer = viewerlayers.define
-commands.startviewerlayer = viewerlayers.start
-commands.stopviewerlayer = viewerlayers.stop
+commands.defineviewerlayer = viewerlayers.define
+commands.startviewerlayer = viewerlayers.start
+commands.stopviewerlayer = viewerlayers.stop
function commands.definedviewerlayer(settings)
viewerlayers.define(settings)
diff --git a/Master/texmf-dist/tex/context/base/attr-lay.mkiv b/Master/texmf-dist/tex/context/base/attr-lay.mkiv
index 67cd204e2f6..9c9c3318ef1 100644
--- a/Master/texmf-dist/tex/context/base/attr-lay.mkiv
+++ b/Master/texmf-dist/tex/context/base/attr-lay.mkiv
@@ -13,51 +13,51 @@
\writestatus{loading}{ConTeXt Attribute Macros / Viewerlayers}
+%D Currently there is a limitation in mixed inline usage. This has to do with the fact
+%D that we have a stacked model but cannot determine where to revert to (as we can
+%D have AABBCCAA ranges). Maybe I'll solve that one day. It only affects nested inline
+%D layers and these make not much sense anyway. We'd have to store the complete nesting
+%D stack in the attribute in order to be able to cross pages and that demands a new
+%D mechanism.
+
\unprotect
\registerctxluafile{attr-lay}{1.001}
% needs to work over stopitemize grouping etc
-% \def\registerviewerlayer#1#2% global !
-% {\setxvalue{\??ql:#1}{\global\attribute\viewerlayerattribute\ctxlua{tex.write(attributes.viewerlayers.register('#2'))} }}
-%
-% \setevalue{\??ql:}{\global\attribute\viewerlayerattribute\attributeunsetvalue}
+\installcorenamespace{viewerlayer}
+
+\installcommandhandler \??viewerlayer {viewerlayer} \??viewerlayer
-\getparameters
- [\??lr]
+\setupviewerlayer
[\c!state=\v!start,
\c!title=,
\c!printable=\v!yes,
\c!scope=\v!local, % maybe global but needs checking with layout
\c!method=\v!none]
-\def\defineviewerlayer
- {\dodoubleargument\dodefineviewerlayer}
-
-\def\dodefineviewerlayer[#1][#2]% document wide properties
- {\begingroup
- \getparameters[\??lr][#2]%
- \ctxcommand{defineviewerlayer{
- tag = "#1",
- title = "\@@lrtitle",
- visible = "\@@lrstate",
- editable = "\v!yes",
- printable = "\@@lrprintable",
- scope = "\@@lrscope"
- }}%
- \doif\@@lrmethod\v!command
- {\setugvalue{\e!start#1}{\startviewerlayer[#1]}%
- \setugvalue{\e!stop #1}{\stopviewerlayer }}%
- \endgroup}
-
-\unexpanded\def\startviewerlayer[#1]{\ctxcommand{startviewerlayer("#1")}} % not grouped
-\unexpanded\def\stopviewerlayer {\ctxcommand{stopviewerlayer()}} % not grouped
+\appendtoks
+ \ctxcommand{defineviewerlayer{
+ tag = "\currentviewerlayer",
+ title = "\viewerlayerparameter\c!title",
+ visible = "\viewerlayerparameter\c!state",
+ editable = "\v!yes",
+ printable = "\viewerlayerparameter\c!printable",
+ scope = "\viewerlayerparameter\c!scope"
+ }}%
+ \doif{\viewerlayerparameter\c!method}\v!command
+ {\setuxvalue{\e!start#1}{\startviewerlayer[\currentviewerlayer]}%
+ \setuxvalue{\e!stop #1}{\stopviewerlayer}}%
+\to \everydefineviewerlayer
+
+\unexpanded\def\startviewerlayer[#1]{\ctxcommand{startviewerlayer("#1")}} % not grouped
+\unexpanded\def\stopviewerlayer {\ctxcommand{stopviewerlayer()}} % not grouped
\unexpanded\def\viewerlayer [#1]{\groupedcommand{\startviewerlayer[#1]}{\stopviewerlayer}} % grouped
% some day we will keep this at the lua end as the info is only needed there
-\let\currentviewerlayer\empty \newtoks\currentviewerlayertoks % soon we can set macros at the lua end
+\newtoks\currentviewerlayertoks % soon we can set macros at the lua end
\def\currentviewerlayer{\the\currentviewerlayertoks}
@@ -67,7 +67,9 @@
% layout components are implemented rather directly (speed)
-\def\doinitializelayoutcomponent#1%
+\installcorenamespace{layoutcomponentattribute}
+
+\def\attr_layoutcomponent_initialize#1%
{\edef\layoutcomponentboxattribute{\ctxcommand{definedviewerlayer{%
tag = "#1",
title = utilities.strings.nice("#1"), % only here as in steps we have step:<number>
@@ -76,24 +78,28 @@
printable = "\v!yes"
}}}%
\edef\layoutcomponentboxattribute{attr \viewerlayerattribute \layoutcomponentboxattribute\relax}%
- \expandafter\glet\csname\??lr:a:#1\endcsname\layoutcomponentboxattribute}
+ \expandafter\glet\csname\??layoutcomponentattribute#1\endcsname\layoutcomponentboxattribute}
-\def\dosetlayoutcomponentattribute#1% make this faster
- {\expandafter\let\expandafter\layoutcomponentboxattribute\csname\??lr:a:#1\endcsname
+\def\attr_layoutcomponent_set#1% make this faster
+ {\expandafter\let\expandafter\layoutcomponentboxattribute\csname\??layoutcomponentattribute#1\endcsname
\ifx\layoutcomponentboxattribute\relax
- \doinitializelayoutcomponent{#1}% get rid of { }
+ \attr_layoutcomponent_initialize{#1}% get rid of { }
\fi}
-\def\doresetlayoutcomponentattribute
+\def\attr_layoutcomponent_reset
{\let\layoutcomponentboxattribute\empty}
\let\setlayoutcomponentattribute \gobbleoneargument
\let\resetlayoutcomponentattribute\relax
\let\layoutcomponentboxattribute \empty
-\def\showlayoutcomponents
- {\ctxlua{attributes.viewerlayers.enable()}% will go
- \let\setlayoutcomponentattribute \dosetlayoutcomponentattribute
- \let\resetlayoutcomponentattribute\doresetlayoutcomponentattribute}
+\unexpanded\def\showlayoutcomponents
+ {%\ctxlua{attributes.viewerlayers.enable()}% automatic
+ \let\setlayoutcomponentattribute \attr_layoutcomponent_set
+ \let\resetlayoutcomponentattribute\attr_layoutcomponent_reset}
+
+\appendtoks
+ \ctxcommand{cleanuplayers()}%
+\to \everyshipout
\protect \endinput
diff --git a/Master/texmf-dist/tex/context/base/attr-mkr.lua b/Master/texmf-dist/tex/context/base/attr-mkr.lua
new file mode 100644
index 00000000000..976598fa013
--- /dev/null
+++ b/Master/texmf-dist/tex/context/base/attr-mkr.lua
@@ -0,0 +1,26 @@
+if not modules then modules = { } end modules ['attr-mkr'] = {
+ version = 1.001,
+ comment = "companion to attr-mkr.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+local markers = nodes.markers or { }
+nodes.markers = markers
+
+local cache = { }
+local numbers = attributes.numbers
+local a_unknown = attributes.private("marker:unknown")
+
+table.setmetatableindex(cache,function(t,k)
+ local k = "marker:" .. k
+ local v = numbers[k] or a_unknown
+ t[k] = v
+ return v
+end)
+
+function markers.get(n,name)
+ local a = cache[name]
+ return a and n[a] or nil
+end
diff --git a/Master/texmf-dist/tex/context/base/attr-mkr.mkiv b/Master/texmf-dist/tex/context/base/attr-mkr.mkiv
new file mode 100644
index 00000000000..c8818ebae7e
--- /dev/null
+++ b/Master/texmf-dist/tex/context/base/attr-mkr.mkiv
@@ -0,0 +1,25 @@
+%D \module
+%D [ file=attr-mkr,
+%D version=2013.01.09,
+%D title=\CONTEXT\ Attribute Macros,
+%D subtitle=Markers,
+%D author=Hans Hagen,
+%D date=\currentdate,
+%D copyright={PRAGMA ADE \& \CONTEXT\ Development Team}]
+%C
+%C This module is part of the \CONTEXT\ macro||package and is
+%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
+%C details.
+
+\writestatus{loading}{ConTeXt Attribute Macros / Markers}
+
+\unprotect
+
+\registerctxluafile{attr-mkr}{1.001}
+
+\unexpanded\def\definemarker [#1]{\defineattribute[\s!marker:#1]}
+\unexpanded\def\setmarker [#1]#2[#3]{\dosetattribute{\s!marker:#1}{#3}}
+\unexpanded\def\resetmarker [#1]{\dogetattribute{\s!marker:#1}}
+ \def\boxmarker #1#2{attr \numexpr\dogetattributeid{\s!marker:#1}\numexpr \numexpr#2\relax}
+
+\protect \endinput
diff --git a/Master/texmf-dist/tex/context/base/attr-neg.lua b/Master/texmf-dist/tex/context/base/attr-neg.lua
index 4d89cb49b64..c32cec956b2 100644
--- a/Master/texmf-dist/tex/context/base/attr-neg.lua
+++ b/Master/texmf-dist/tex/context/base/attr-neg.lua
@@ -11,8 +11,9 @@ if not modules then modules = { } end modules ['attr-neg'] = {
local format = string.format
-
-local attributes, nodes = attributes, nodes
+local attributes, nodes, utilities, logs, backends = attributes, nodes, utilities, logs, backends
+local commands, context, interfaces = commands, context, interfaces
+local tex = tex
local states = attributes.states
local tasks = nodes.tasks
diff --git a/Master/texmf-dist/tex/context/base/attr-neg.mkiv b/Master/texmf-dist/tex/context/base/attr-neg.mkiv
index 0fc5070f1bd..102b220baca 100644
--- a/Master/texmf-dist/tex/context/base/attr-neg.mkiv
+++ b/Master/texmf-dist/tex/context/base/attr-neg.mkiv
@@ -19,12 +19,12 @@
% positive and negative are preregistered
-\def\dotriggernegative#1{\ctxcommand{triggernegative('#1')}}
+\unexpanded\def\startnegative{\attr_trigger_negative\v!negative}
+\unexpanded\def\stopnegative {\attr_trigger_negative\v!positive}
-\unexpanded\def\startnegative{\dotriggernegative\v!negative}
-\unexpanded\def\stopnegative {\dotriggernegative\v!positive}
+\unexpanded\def\startpositive{\attr_trigger_negative\v!positive}
+\unexpanded\def\stoppositive {\attr_trigger_negative\v!negative}
-\unexpanded\def\startpositive{\dotriggernegative\v!positive}
-\unexpanded\def\stoppositive {\dotriggernegative\v!negative}
+\def\attr_trigger_negative#1{\ctxcommand{triggernegative('#1')}}
\protect \endinput
diff --git a/Master/texmf-dist/tex/context/base/back-exp.lua b/Master/texmf-dist/tex/context/base/back-exp.lua
index 3ce40a6c58b..4d219a18b80 100644
--- a/Master/texmf-dist/tex/context/base/back-exp.lua
+++ b/Master/texmf-dist/tex/context/base/back-exp.lua
@@ -18,15 +18,18 @@ if not modules then modules = { } end modules ['back-exp'] = {
-- We can optimize the code ... currently the overhead is some 10% for xml + html so
-- there is no hurry.
+-- todo: move critital formatters out of functions
+-- todo: delay loading (apart from basic tag stuff)
+
local next, type = next, type
local format, match, concat, rep, sub, gsub, gmatch, find = string.format, string.match, table.concat, string.rep, string.sub, string.gsub, string.gmatch, string.find
+local validstring = string.valid
local lpegmatch = lpeg.match
-local utfchar, utfbyte, utfsub, utfgsub = utf.char, utf.byte, utf.sub, utf.gsub
+local utfchar, utfbyte, utfvalues = utf.char, utf.byte, utf.values
local insert, remove = table.insert, table.remove
-local topoints = number.topoints
-local utfvalues = string.utfvalues
local fromunicode16 = fonts.mappings.fromunicode16
local sortedhash = table.sortedhash
+local formatters = string.formatters
local trace_export = false trackers.register ("export.trace", function(v) trace_export = v end)
local trace_spacing = false trackers.register ("export.trace.spacing", function(v) trace_spacing = v end)
@@ -88,15 +91,12 @@ local a_reference = attributes.private('reference')
local a_textblock = attributes.private("textblock")
-local has_attribute = node.has_attribute
-local set_attribute = node.set_attribute
local traverse_id = node.traverse_id
local traverse_nodes = node.traverse
local slide_nodelist = node.slide
local texattribute = tex.attribute
local texdimen = tex.dimen
local texcount = tex.count
-local unsetvalue = attributes.unsetvalue
local locate_node = nodes.locate
local references = structures.references
@@ -154,6 +154,8 @@ local somespace = { [0x20] = true, [" "] = true } -- for testing
local entities = { ["&"] = "&amp;", [">"] = "&gt;", ["<"] = "&lt;" }
local attribentities = { ["&"] = "&amp;", [">"] = "&gt;", ["<"] = "&lt;", ['"'] = "quot;" }
+local entityremapper = utf.remapper(entities)
+
local alignmapping = {
flushright = "right",
middle = "center",
@@ -180,7 +182,7 @@ end)
setmetatableindex(specialspaces, function(t,k)
local v = utfchar(k)
t[k] = v
- entities[v] = format("&#x%X;",k)
+ entities[v] = formatters["&#x%X;"](k)
somespace[k] = true
somespace[v] = true
return v
@@ -229,7 +231,7 @@ end)
local function attribute(key,value)
if value and value ~= "" then
- return format(' %s="%s"',key,gsub(value,".",attribentities))
+ return formatters[' %s="%s"'](key,gsub(value,".",attribentities))
else
return ""
end
@@ -258,17 +260,6 @@ local function hashlistdata()
end
end
---~ local spaces = { } -- watch how we also moved the -1 in depth-1 to the creator
-
---~ setmetatableindex(spaces, function(t,k)
---~ if not k then
---~ return ""
---~ end
---~ local s = rep(" ",k-1)
---~ t[k] = s
---~ return s
---~ end)
-
local spaces = utilities.strings.newrepeater(" ",-1)
function structurestags.setattributehash(fulltag,key,value) -- public hash
@@ -300,7 +291,7 @@ local usedstyles = { }
local documenttemplate = [[
document {
- font-size : %s !important ;
+ font-size : %s !important ;
max-width : %s !important ;
text-align : %s !important ;
hyphens : %s !important ;
@@ -354,7 +345,7 @@ local function allusedstyles(xmlfile)
for detail, data in sortedhash(details) do
local s = fontspecification(data.style)
local c = colorspecification(data.color)
- result[#result+1] = format(styletemplate,element,detail,
+ result[#result+1] = formatters[styletemplate](element,detail,
s.style or "inherit",
s.variant or "inherit",
s.weight or "inherit",
@@ -382,11 +373,11 @@ local function allusedimages(xmlfile)
for element, details in sortedhash(usedimages) do
for detail, data in sortedhash(details) do
local name = data.name
- if file.extname(name) == "pdf" then
+ if file.suffix(name) == "pdf" then
-- temp hack .. we will have a remapper
name = file.replacesuffix(name,"svg")
end
- result[#result+1] = format(imagetemplate,element,detail,name,data.width,data.height)
+ result[#result+1] = formatters[imagetemplate](element,detail,name,data.width,data.height)
end
end
return concat(result,"\n\n")
@@ -397,7 +388,7 @@ local function uniqueusedimages()
for element, details in next, usedimages do
for detail, data in next, details do
local name = data.name
- if file.extname(name) == "pdf" then
+ if file.suffix(name) == "pdf" then
unique[file.replacesuffix(name,"svg")] = name
else
unique[name] = name
@@ -475,7 +466,7 @@ function extras.document(result,element,detail,n,fulltag,di)
local key = fields[i]
local value = identity[key]
if value and value ~= "" then
- result[#result+1] = format(" %s=%q",key,value)
+ result[#result+1] = formatters[" %s=%q"](key,value)
end
end
end
@@ -514,7 +505,7 @@ end
function extras.synonym(result,element,detail,n,fulltag,di)
local tag = synonyms[fulltag]
if tag then
- result[#result+1] = format(" tag='%s'",tag)
+ result[#result+1] = formatters[" tag='%s'"](tag)
end
end
@@ -527,7 +518,7 @@ end
function extras.sorting(result,element,detail,n,fulltag,di)
local tag = sortings[fulltag]
if tag then
- result[#result+1] = format(" tag='%s'",tag)
+ result[#result+1] = formatters[" tag='%s'"](tag)
end
end
@@ -575,14 +566,14 @@ end
function extras.description(result,element,detail,n,fulltag,di)
local id = linked[fulltag]
if id then
- result[#result+1] = format(" insert='%s'",id) -- maybe just fulltag
+ result[#result+1] = formatters[" insert='%s'"](id) -- maybe just fulltag
end
end
function extras.descriptionsymbol(result,element,detail,n,fulltag,di)
local id = linked[fulltag]
if id then
- result[#result+1] = format(" insert='%s'",id)
+ result[#result+1] = formatters[" insert='%s'"](id)
end
end
@@ -602,9 +593,9 @@ function extras.image(result,element,detail,n,fulltag,di)
if data then
result[#result+1] = attribute("name",data.name)
if tonumber(data.page) > 1 then
- result[#result+1] = format(" page='%s'",data.page)
+ result[#result+1] = formatters[" page='%s'"](data.page)
end
- result[#result+1] = format(" id='%s' width='%s' height='%s'",fulltag,data.width,data.height)
+ result[#result+1] = formatters[" id='%s' width='%s' height='%s'"](fulltag,data.width,data.height)
end
end
@@ -620,7 +611,7 @@ end
function extras.combination(result,element,detail,n,fulltag,di)
local data = combinations[fulltag]
if data then
- result[#result+1] = format(" nx='%s' ny='%s'",data.nx,data.ny)
+ result[#result+1] = formatters[" nx='%s' ny='%s'"](data.nx,data.ny)
end
end
@@ -693,7 +684,7 @@ end
function specials.internal(result,var)
local internal = references.checkedurl(var.operation)
if internal then
- result[#result+1] = format(" location='aut:%s'",internal)
+ result[#result+1] = formatters[" location='aut:%s'"](internal)
end
end
@@ -705,9 +696,9 @@ local function adddestination(result,references) -- todo: specials -> exporters
if reference and reference ~= "" then
local prefix = references.prefix
if prefix and prefix ~= "" then
- result[#result+1] = format(" prefix='%s'",prefix)
+ result[#result+1] = formatters[" prefix='%s'"](prefix)
end
- result[#result+1] = format(" destination='%s'",reference)
+ result[#result+1] = formatters[" destination='%s'"](reference)
for i=1,#references do
local r = references[i]
local e = evaluators[r.kind]
@@ -725,13 +716,13 @@ local function addreference(result,references)
if reference and reference ~= "" then
local prefix = references.prefix
if prefix and prefix ~= "" then
- result[#result+1] = format(" prefix='%s'",prefix)
+ result[#result+1] = formatters[" prefix='%s'"](prefix)
end
- result[#result+1] = format(" reference='%s'",reference)
+ result[#result+1] = formatters[" reference='%s'"](reference)
end
local internal = references.internal
if internal and internal ~= "" then
- result[#result+1] = format(" location='aut:%s'",internal)
+ result[#result+1] = formatters[" location='aut:%s'"](internal)
end
end
end
@@ -817,6 +808,24 @@ end
-- maybe delay __i__ till we need it
+local apply_function = {
+ {
+ element = "mo",
+ -- comment = "apply function",
+ -- data = { utfchar(0x2061) },
+ data = { "&#x2061;" },
+ nature = "mixed",
+ }
+}
+
+local functioncontent = { }
+
+setmetatableindex(functioncontent,function(t,k)
+ local v = { { content = k } }
+ t[k] = v
+ return v
+end)
+
local function checkmath(root) -- we can provide utf.toentities as an option
local data = root.data
if data then
@@ -963,18 +972,10 @@ local function checkmath(root) -- we can provide utf.toentities as an option
i = collapse(di,i,data,ndata,detail,"mi")
local tag = functions[category]
if tag then
- di.data = { tag }
+ di.data = functioncontent[tag]
end
if apply then
- di.after = {
- {
- element = "mo",
- -- comment = "apply function",
- -- data = { utfchar(0x2061) },
- data = { "&#x2061;" },
- nature = "mixed",
- }
- }
+ di.after = apply_function
elseif automathapply then -- make function
local following
if i <= ndata then
@@ -991,15 +992,7 @@ local function checkmath(root) -- we can provide utf.toentities as an option
if following then
local tg = following.tg
if tg == "mrow" or tg == "mfenced" then -- we need to figure out the right condition
- di.after = {
- {
- element = "mo",
- -- comment = "apply function",
- -- data = { utfchar(0x2061) },
- data = { "&#x2061;" },
- nature = "mixed",
- }
- }
+ di.after = apply_function
end
end
end
@@ -1162,11 +1155,11 @@ function extras.tablecell(result,element,detail,n,fulltag,di)
if hash then
local v = hash.columns
if v and v > 1 then
- result[#result+1] = format(" columns='%s'",v)
+ result[#result+1] = formatters[" columns='%s'"](v)
end
local v = hash.rows
if v and v > 1 then
- result[#result+1] = format(" rows='%s'",v)
+ result[#result+1] = formatters[" rows='%s'"](v)
end
local v = hash.align
if not v or v == 0 then
@@ -1239,19 +1232,19 @@ local function emptytag(result,element,nature,depth,di) -- currently only break
local a = di.attributes -- we might add detail etc
if a then -- happens seldom
if linedone then
- result[#result+1] = format("%s<%s",spaces[depth],namespaced[element])
+ result[#result+1] = formatters["%w<%s"](depth,namespaced[element])
else
- result[#result+1] = format("\n%s<%s",spaces[depth],namespaced[element])
+ result[#result+1] = formatters["\n%w<%s"](depth,namespaced[element])
end
for k, v in next, a do
- result[#result+1] = format(" %s=%q",k,v)
+ result[#result+1] = formatters[" %s=%q"](k,v)
end
result[#result+1] = "/>\n"
else
if linedone then
- result[#result+1] = format("%s<%s/>\n",spaces[depth],namespaced[element])
+ result[#result+1] = formatters["%w<%s/>\n"](depth,namespaced[element])
else
- result[#result+1] = format("\n%s<%s/>\n",spaces[depth],namespaced[element])
+ result[#result+1] = formatters["\n%w<%s/>\n"](depth,namespaced[element])
end
end
linedone = false
@@ -1267,57 +1260,57 @@ local function begintag(result,element,nature,depth,di,skip)
linedone = false
inlinedepth = inlinedepth + 1
if show_comment and comment then
- result[#result+1] = format("<!-- %s -->",comment)
+ result[#result+1] = formatters["<!-- %s -->"](comment)
end
elseif nature == "mixed" then
if inlinedepth > 0 then
if show_comment and comment then
- result[#result+1] = format("<!-- %s -->",comment)
+ result[#result+1] = formatters["<!-- %s -->"](comment)
end
elseif linedone then
result[#result+1] = spaces[depth]
if show_comment and comment then
- result[#result+1] = format("<!-- %s -->",comment)
+ result[#result+1] = formatters["<!-- %s -->"](comment)
end
else
- result[#result+1] = format("\n%s",spaces[depth])
+ result[#result+1] = formatters["\n%w"](depth)
linedone = false
if show_comment and comment then
- result[#result+1] = format("<!-- %s -->\n%s",comment,spaces[depth])
+ result[#result+1] = formatters["<!-- %s -->\n%w"](comment,depth)
end
end
inlinedepth = inlinedepth + 1
else
if inlinedepth > 0 then
if show_comment and comment then
- result[#result+1] = format("<!-- %s -->",comment)
+ result[#result+1] = formatters["<!-- %s -->"](comment)
end
elseif linedone then
result[#result+1] = spaces[depth]
if show_comment and comment then
- result[#result+1] = format("<!-- %s -->",comment)
+ result[#result+1] = formatters["<!-- %s -->"](comment)
end
else
- result[#result+1] = format("\n%s",spaces[depth]) -- can introduced extra line in mixed+mixed (filtered later on)
+ result[#result+1] = formatters["\n%w"](depth) -- can introduced extra line in mixed+mixed (filtered later on)
linedone = false
if show_comment and comment then
- result[#result+1] = format("<!-- %s -->\n%s",comment,spaces[depth])
+ result[#result+1] = formatters["<!-- %s -->\n%w"](comment,depth)
end
end
end
if skip == "comment" then
if show_comment then
- result[#result+1] = format("<!-- begin %s -->",namespaced[element])
+ result[#result+1] = formatters["<!-- begin %s -->"](namespaced[element])
end
elseif skip then
-- ignore
else
- result[#result+1] = format("<%s",namespaced[element])
+ result[#result+1] = formatters["<%s"](namespaced[element])
if detail then
- result[#result+1] = format(" detail=%q",detail)
+ result[#result+1] = formatters[" detail=%q"](detail)
end
if indexing and n then
- result[#result+1] = format(" n=%q",n)
+ result[#result+1] = formatters[" n=%q"](n)
end
local extra = extras[element]
if extra then
@@ -1326,13 +1319,13 @@ local function begintag(result,element,nature,depth,di,skip)
local u = userdata[fulltag]
if u then
for k, v in next, u do
- result[#result+1] = format(" %s=%q",k,v)
+ result[#result+1] = formatters[" %s=%q"](k,v)
end
end
local a = di.attributes
if a then
for k, v in next, a do
- result[#result+1] = format(" %s=%q",k,v)
+ result[#result+1] = formatters[" %s=%q"](k,v)
end
end
result[#result+1] = ">"
@@ -1349,12 +1342,12 @@ local function begintag(result,element,nature,depth,di,skip)
result[#result+1] = "\n"
linedone = true
end
- result[#result+1] = format("%s<metadata>\n",spaces[depth])
+ result[#result+1] = formatters["%w<metadata>\n"](depth)
for k, v in table.sortedpairs(metadata) do
- v = utfgsub(v,".",entities)
- result[#result+1] = format("%s<metavariable name=%q>%s</metavariable>\n",spaces[depth+1],k,v)
+ v = entityremapper(v)
+ result[#result+1] = formatters["%w<metavariable name=%q>%s</metavariable>\n"](depth+1,k,v)
end
- result[#result+1] = format("%s</metadata>\n",spaces[depth])
+ result[#result+1] = formatters["%w</metadata>\n"](depth)
end
end
@@ -1366,35 +1359,35 @@ local function endtag(result,element,nature,depth,skip)
end
if skip == "comment" then
if show_comment then
- result[#result+1] = format("%s<!-- end %s -->\n",spaces[depth],namespaced[element])
+ result[#result+1] = formatters["%w<!-- end %s -->\n"](depth,namespaced[element])
end
elseif skip then
-- ignore
else
- result[#result+1] = format("%s</%s>\n",spaces[depth],namespaced[element])
+ result[#result+1] = formatters["%w</%s>\n"](depth,namespaced[element])
end
linedone = true
else
if skip == "comment" then
if show_comment then
- result[#result+1] = format("<!-- end %s -->",namespaced[element])
+ result[#result+1] = formatters["<!-- end %s -->"](namespaced[element])
end
elseif skip then
-- ignore
else
- result[#result+1] = format("</%s>",namespaced[element])
+ result[#result+1] = formatters["</%s>"](namespaced[element])
end
end
else
inlinedepth = inlinedepth - 1
if skip == "comment" then
if show_comment then
- result[#result+1] = format("<!-- end %s -->",namespaced[element])
+ result[#result+1] = formatters["<!-- end %s -->"](namespaced[element])
end
elseif skip then
-- ignore
else
- result[#result+1] = format("</%s>",namespaced[element])
+ result[#result+1] = formatters["</%s>"](namespaced[element])
end
linedone = false
end
@@ -1405,22 +1398,22 @@ local function flushtree(result,data,nature,depth)
local nofdata = #data
for i=1,nofdata do
local di = data[i]
- if not di then
+ if not di then -- hm, di can be string
-- whatever
elseif di.content then
-- already has breaks
- local content = utfgsub(di.content,".",entities)
+ local content = entityremapper(di.content)
if i == nofdata and sub(content,-1) == "\n" then -- move check
-- can be an end of line in par but can also be the last line
if trace_spacing then
- result[#result+1] = format("<c n='%s'>%s</c>",di.parnumber or 0,sub(content,1,-2))
+ result[#result+1] = formatters["<c n='%s'>%s</c>"](di.parnumber or 0,sub(content,1,-2))
else
result[#result+1] = sub(content,1,-2)
end
result[#result+1] = " "
else
if trace_spacing then
- result[#result+1] = format("<c n='%s'>%s</c>",di.parnumber or 0,content)
+ result[#result+1] = formatters["<c n='%s'>%s</c>"](di.parnumber or 0,content)
else
result[#result+1] = content
end
@@ -1428,7 +1421,9 @@ local function flushtree(result,data,nature,depth)
linedone = false
elseif not di.collapsed then -- ignore collapsed data (is appended, reconstructed par)
local element = di.element
- if element == "break" then -- or element == "pagebreak"
+ if not element then
+ -- skip
+ elseif element == "break" then -- or element == "pagebreak"
emptytag(result,element,nature,depth,di)
elseif element == "" or di.skip == "ignore" then
-- skip
@@ -1443,9 +1438,9 @@ local function flushtree(result,data,nature,depth)
end
begintag(result,element,natu,depth,di,skip)
flushtree(result,di.data,natu,depth)
--- if sub(result[#result],-1) == " " and natu ~= "inline" then
--- result[#result] = sub(result[#result],1,-2)
--- end
+ -- if sub(result[#result],-1) == " " and natu ~= "inline" then
+ -- result[#result] = sub(result[#result],1,-2)
+ -- end
endtag(result,element,natu,depth,skip)
if di.after then
flushtree(result,di.after,nature,depth)
@@ -1662,9 +1657,9 @@ local function push(fulltag,depth)
treestack[currentdepth] = tree
if trace_export then
if detail and detail ~= "" then
- report_export("%s<%s trigger='%s' paragraph='%s' index='%s' detail='%s'>",spaces[currentdepth-1],fulltag,currentattribute or 0,currentparagraph or 0,#treedata,detail)
+ report_export("%w<%s trigger=%a paragraph=%a index=%a detail=%a>",currentdepth-1,fulltag,currentattribute or 0,currentparagraph or 0,#treedata,detail)
else
- report_export("%s<%s trigger='%s' paragraph='%s' index='%s'>",spaces[currentdepth-1],fulltag,currentattribute or 0,currentparagraph or 0,#treedata)
+ report_export("%w<%s trigger=%a paragraph=%a index=%a>",currentdepth-1,fulltag,currentattribute or 0,currentparagraph or 0,#treedata)
end
end
tree = t
@@ -1686,7 +1681,7 @@ local function pop()
currentdepth = currentdepth - 1
if trace_export then
if top then
- report_export("%s</%s>",spaces[currentdepth],top)
+ report_export("%w</%s>",currentdepth,top)
else
report_export("</%s>",top)
end
@@ -1696,7 +1691,7 @@ end
local function continueexport()
if nofcurrentcontent > 0 then
if trace_export then
- report_export("%s<!-- injecting pagebreak space -->",spaces[currentdepth])
+ report_export("%w<!-- injecting pagebreak space -->",currentdepth)
end
nofcurrentcontent = nofcurrentcontent + 1
currentcontent[nofcurrentcontent] = " " -- pagebreak
@@ -1712,7 +1707,7 @@ local function pushentry(current)
local newdepth = #current
local olddepth = currentdepth
if trace_export then
- report_export("%s<!-- moving from depth %s to %s (%s) -->",spaces[currentdepth],olddepth,newdepth,current[newdepth])
+ report_export("%w<!-- moving from depth %s to %s (%s) -->",currentdepth,olddepth,newdepth,current[newdepth])
end
if olddepth <= 0 then
for i=1,newdepth do
@@ -1751,28 +1746,28 @@ local function pushentry(current)
pop()
end
elseif trace_export then
- report_export("%s<!-- staying at depth %s (%s) -->",spaces[currentdepth],newdepth,nesting[newdepth] or "?")
+ report_export("%w<!-- staying at depth %s (%s) -->",currentdepth,newdepth,nesting[newdepth] or "?")
end
end
return olddepth, newdepth
end
end
-local function pushcontent(addbreak)
+local function pushcontent(currentparagraph,newparagraph)
if nofcurrentcontent > 0 then
- if addbreak then
+ if currentparagraph then
if currentcontent[nofcurrentcontent] == "\n" then
if trace_export then
- report_export("%s<!-- removing newline -->",spaces[currentdepth])
+ report_export("%w<!-- removing newline -->",currentdepth)
end
nofcurrentcontent = nofcurrentcontent - 1
end
end
local content = concat(currentcontent,"",1,nofcurrentcontent)
if content == "" then
- -- omit; when addbreak we could push, remove spaces, pop
- elseif somespace[content] and addbreak then
- -- omit; when addbreak we could push, remove spaces, pop
+ -- omit; when currentparagraph we could push, remove spaces, pop
+ elseif somespace[content] and currentparagraph then
+ -- omit; when currentparagraph we could push, remove spaces, pop
else
local olddepth, newdepth
local list = taglist[currentattribute]
@@ -1783,9 +1778,9 @@ local function pushcontent(addbreak)
local nd = #td
td[nd+1] = { parnumber = currentparagraph, content = content }
if trace_export then
- report_export("%s<!-- start content with length %s -->",spaces[currentdepth],#content)
- report_export("%s%s",spaces[currentdepth],(gsub(content,"\n","\\n")))
- report_export("%s<!-- stop content -->",spaces[currentdepth])
+ report_export("%w<!-- start content with length %s -->",currentdepth,#content)
+ report_export("%w%s",currentdepth,(gsub(content,"\n","\\n")))
+ report_export("%w<!-- stop content -->",currentdepth)
end
if olddepth then
for i=newdepth-1,olddepth,-1 do
@@ -1795,22 +1790,22 @@ local function pushcontent(addbreak)
end
nofcurrentcontent = 0
end
- if addbreak then
+ if currentparagraph then
pushentry(makebreaklist(currentnesting))
if trace_export then
- report_export("%s<!-- break added due to %s -->",spaces[currentdepth],addbreak)
+ report_export("%w<!-- break added betweep paragraph %a and %a -->",currentdepth,currentparagraph,newparagraph)
end
end
end
local function finishexport()
if trace_export then
- report_export("%s<!-- start finalizing -->",spaces[currentdepth])
+ report_export("%w<!-- start finalizing -->",currentdepth)
end
if nofcurrentcontent > 0 then
if somespace[currentcontent[nofcurrentcontent]] then
if trace_export then
- report_export("%s<!-- removing space -->",spaces[currentdepth])
+ report_export("%w<!-- removing space -->",currentdepth)
end
nofcurrentcontent = nofcurrentcontent - 1
end
@@ -1821,32 +1816,22 @@ local function finishexport()
end
currentcontent = { } -- we're nice and do a cleanup
if trace_export then
- report_export("%s<!-- stop finalizing -->",spaces[currentdepth])
+ report_export("%w<!-- stop finalizing -->",currentdepth)
end
end
-- whatsit_code localpar_code
-local function tracedchar(c)
- if c == 0x20 then
- return "[space]"
- elseif c == 0 then
- return "[signal]"
- else
- return utfchar(c)
- end
-end
-
local function collectresults(head,list) -- is last used (we also have currentattribute)
local p
for n in traverse_nodes(head) do
local id = n.id -- 14: image, 8: literal (mp)
if id == glyph_code then
- local at = has_attribute(n,a_tagged)
+ local at = n[a_tagged]
if not at then
-- we need to tag the pagebody stuff as being valid skippable
--
- -- report_export("skipping character: 0x%05X %s (no attribute)",n.char,utfchar(n.char))
+ -- report_export("skipping character: %C (no attribute)",n.char)
else
-- we could add tonunicodes for ligatures (todo)
local components = n.components
@@ -1856,55 +1841,52 @@ local function collectresults(head,list) -- is last used (we also have currentat
local c = n.char
if last ~= at then
local tl = taglist[at]
--- if trace_export then
--- report_export("%s<!-- processing glyph %s (tag %s) -->",spaces[currentdepth],utfchar(c),at)
--- end
pushcontent()
currentnesting = tl
- currentparagraph = has_attribute(n,a_taggedpar)
+ currentparagraph = n[a_taggedpar]
currentattribute = at
last = at
pushentry(currentnesting)
- if trace_export then
- report_export("%s<!-- processing glyph %s (tag %s) -->",spaces[currentdepth],tracedchar(c),at)
- end
+ if trace_export then
+ report_export("%w<!-- processing glyph %C tagged %a -->",currentdepth,c,at)
+ end
-- We need to intercept this here; maybe I will also move this
-- to a regular setter at the tex end.
- local r = has_attribute(n,a_reference)
+ local r = n[a_reference]
if r then
referencehash[tl[#tl]] = r -- fulltag
end
--
elseif last then
- local ap = has_attribute(n,a_taggedpar)
+ local ap = n[a_taggedpar]
if ap ~= currentparagraph then
- pushcontent(format("new paragraph (%s -> %s)",tostring(currentparagraph),tostring(ap)))
+ pushcontent(currentparagraph,ap)
pushentry(currentnesting)
currentattribute = last
currentparagraph = ap
end
if trace_export then
- report_export("%s<!-- processing glyph %s (tag %s) -->",spaces[currentdepth],tracedchar(c),last)
+ report_export("%w<!-- processing glyph %C tagged %a) -->",currentdepth,c,last)
end
else
if trace_export then
- report_export("%s<!-- processing glyph %s (tag %s) -->",spaces[currentdepth],tracedchar(c),at)
+ report_export("%w<!-- processing glyph %C tagged %a) -->",currentdepth,c,at)
end
end
- local s = has_attribute(n,a_exportstatus)
+ local s = n[a_exportstatus]
if s then
c = s
end
if c == 0 then
if trace_export then
- report_export("%s<!-- skipping last glyph -->",spaces[currentdepth])
+ report_export("%w<!-- skipping last glyph -->",currentdepth)
end
elseif c == 0x20 then
- local a = has_attribute(n,a_characters)
+ local a = n[a_characters]
nofcurrentcontent = nofcurrentcontent + 1
if a then
if trace_export then
- report_export("%s<!-- turning last space into special space U+%05X -->",spaces[currentdepth],a)
+ report_export("%w<!-- turning last space into special space %U -->",currentdepth,a)
end
currentcontent[nofcurrentcontent] = specialspaces[a] -- special space
else
@@ -1934,24 +1916,6 @@ local function collectresults(head,list) -- is last used (we also have currentat
end
end
end
- elseif id == hlist_code or id == vlist_code then
- local ai = has_attribute(n,a_image)
- if ai then
- local at = has_attribute(n,a_tagged)
- if nofcurrentcontent > 0 then
- pushcontent()
- pushentry(currentnesting) -- ??
- end
- pushentry(taglist[at]) -- has an index, todo: flag empty element
- if trace_export then
- report_export("%s<!-- processing image (tag %s)",spaces[currentdepth],last)
- end
- last = nil
- currentparagraph = nil
- else
- -- we need to determine an end-of-line
- collectresults(n.list,n)
- end
elseif id == disc_code then -- probably too late
if keephyphens then
local pre = n.pre
@@ -1963,40 +1927,40 @@ local function collectresults(head,list) -- is last used (we also have currentat
collectresults(n.replace,nil)
elseif id == glue_code then
-- we need to distinguish between hskips and vskips
- local ca = has_attribute(n,a_characters)
+ local ca = n[a_characters]
if ca == 0 then
-- skip this one ... already converted special character (node-acc)
elseif ca then
- local a = has_attribute(n,a_tagged)
+ local a = n[a_tagged]
if a then
local c = specialspaces[ca]
if last ~= a then
local tl = taglist[a]
if trace_export then
- report_export("%s<!-- processing space glyph U+%05X (tag %s) case 1 -->",spaces[currentdepth],ca,a)
+ report_export("%w<!-- processing space glyph %U tagged %a case 1 -->",currentdepth,ca,a)
end
pushcontent()
currentnesting = tl
- currentparagraph = has_attribute(n,a_taggedpar)
+ currentparagraph = n[a_taggedpar]
currentattribute = a
last = a
pushentry(currentnesting)
-- no reference check (see above)
elseif last then
- local ap = has_attribute(n,a_taggedpar)
+ local ap = n[a_taggedpar]
if ap ~= currentparagraph then
- pushcontent(format("new paragraph (%s -> %s)",tostring(currentparagraph),tostring(ap)))
+ pushcontent(currentparagraph,ap)
pushentry(currentnesting)
currentattribute = last
currentparagraph = ap
end
if trace_export then
- report_export("%s<!-- processing space glyph U+%05X (tag %s) case 2 -->",spaces[currentdepth],ca,last)
+ report_export("%w<!-- processing space glyph %U tagged %a case 2 -->",currentdepth,ca,last)
end
end
-- if somespace[currentcontent[nofcurrentcontent]] then
-- if trace_export then
- -- report_export("%s<!-- removing space -->",spaces[currentdepth])
+ -- report_export("%w<!-- removing space -->",currentdepth)
-- end
-- nofcurrentcontent = nofcurrentcontent - 1
-- end
@@ -2008,21 +1972,21 @@ local function collectresults(head,list) -- is last used (we also have currentat
if subtype == userskip_code then
if n.spec.width > threshold then
if last and not somespace[currentcontent[nofcurrentcontent]] then
- local a = has_attribute(n,a_tagged)
+ local a = n[a_tagged]
if a == last then
if trace_export then
- report_export("%s<!-- injecting spacing 5a -->",spaces[currentdepth])
+ report_export("%w<!-- injecting spacing 5a -->",currentdepth)
end
nofcurrentcontent = nofcurrentcontent + 1
currentcontent[nofcurrentcontent] = " "
elseif a then
-- e.g LOGO<space>LOGO
if trace_export then
- report_export("%s<!-- processing glue > threshold (tag %s => %s) -->",spaces[currentdepth],last,a)
+ report_export("%w<!-- processing glue > threshold tagged %s becomes %s -->",currentdepth,last,a)
end
pushcontent()
if trace_export then
- report_export("%s<!-- injecting spacing 5b -->",spaces[currentdepth])
+ report_export("%w<!-- injecting spacing 5b -->",currentdepth)
end
last = a
nofcurrentcontent = nofcurrentcontent + 1
@@ -2035,16 +1999,16 @@ local function collectresults(head,list) -- is last used (we also have currentat
end
elseif subtype == spaceskip_code or subtype == xspaceskip_code then
if not somespace[currentcontent[nofcurrentcontent]] then
- local a = has_attribute(n,a_tagged)
+ local a = n[a_tagged]
if a == last then
if trace_export then
- report_export("%s<!-- injecting spacing 7 (stay in element) -->",spaces[currentdepth])
+ report_export("%w<!-- injecting spacing 7 (stay in element) -->",currentdepth)
end
nofcurrentcontent = nofcurrentcontent + 1
currentcontent[nofcurrentcontent] = " "
else
if trace_export then
- report_export("%s<!-- injecting spacing 7 (end of element) -->",spaces[currentdepth])
+ report_export("%w<!-- injecting spacing 7 (end of element) -->",currentdepth)
end
last = a
pushcontent()
@@ -2064,16 +2028,16 @@ local function collectresults(head,list) -- is last used (we also have currentat
nofcurrentcontent = nofcurrentcontent - 1
end
elseif not somespace[r] then
- local a = has_attribute(n,a_tagged)
+ local a = n[a_tagged]
if a == last then
if trace_export then
- report_export("%s<!-- injecting spacing 1 (end of line, stay in element) -->",spaces[currentdepth])
+ report_export("%w<!-- injecting spacing 1 (end of line, stay in element) -->",currentdepth)
end
nofcurrentcontent = nofcurrentcontent + 1
currentcontent[nofcurrentcontent] = " "
else
if trace_export then
- report_export("%s<!-- injecting spacing 1 (end of line, end of element) -->",spaces[currentdepth])
+ report_export("%w<!-- injecting spacing 1 (end of line, end of element) -->",currentdepth)
end
last = a
pushcontent()
@@ -2091,6 +2055,24 @@ local function collectresults(head,list) -- is last used (we also have currentat
return
end
end
+ elseif id == hlist_code or id == vlist_code then
+ local ai = n[a_image]
+ if ai then
+ local at = n[a_tagged]
+ if nofcurrentcontent > 0 then
+ pushcontent()
+ pushentry(currentnesting) -- ??
+ end
+ pushentry(taglist[at]) -- has an index, todo: flag empty element
+ if trace_export then
+ report_export("%w<!-- processing image tagged %a",currentdepth,last)
+ end
+ last = nil
+ currentparagraph = nil
+ else
+ -- we need to determine an end-of-line
+ collectresults(n.list,n)
+ end
elseif id == kern_code then
local kern = n.kern
if kern > 0 then
@@ -2100,11 +2082,11 @@ local function collectresults(head,list) -- is last used (we also have currentat
end
if kern > limit then
if last and not somespace[currentcontent[nofcurrentcontent]] then
- local a = has_attribute(n,a_tagged)
+ local a = n[a_tagged]
if a == last then
if not somespace[currentcontent[nofcurrentcontent]] then
if trace_export then
- report_export("%s<!-- injecting spacing 8 (%s) -->",spaces[currentdepth],topoints(kern,true))
+ report_export("%w<!-- injecting spacing 8 (kern %p) -->",currentdepth,kern)
end
nofcurrentcontent = nofcurrentcontent + 1
currentcontent[nofcurrentcontent] = " "
@@ -2112,12 +2094,12 @@ local function collectresults(head,list) -- is last used (we also have currentat
elseif a then
-- e.g LOGO<space>LOGO
if trace_export then
- report_export("%s<!-- processing kern, threshold %s, tag %s => %s -->",spaces[currentdepth],topoints(limit,true),last,a)
+ report_export("%w<!-- processing kern, threshold %p, tag %s => %s -->",currentdepth,limit,last,a)
end
last = a
pushcontent()
if trace_export then
- report_export("%s<!-- injecting spacing 9 (%s) -->",spaces[currentdepth],topoints(kern,true))
+ report_export("%w<!-- injecting spacing 9 (kern %p) -->",currentdepth,kern)
end
nofcurrentcontent = nofcurrentcontent + 1
currentcontent[nofcurrentcontent] = " "
@@ -2136,13 +2118,13 @@ end
function nodes.handlers.export(head) -- hooks into the page builder
starttiming(treehash)
if trace_export then
- report_export("%s<!-- start flushing page -->",spaces[currentdepth])
+ report_export("%w<!-- start flushing page -->",currentdepth)
end
-- continueexport()
restart = true
collectresults(head)
if trace_export then
- report_export("%s<!-- stop flushing page -->",spaces[currentdepth])
+ report_export("%w<!-- stop flushing page -->",currentdepth)
end
stoptiming(treehash)
return head, true
@@ -2153,9 +2135,9 @@ function builders.paragraphs.tag(head)
for n in traverse_id(hlist_code,head) do
local subtype = n.subtype
if subtype == line_code then
- set_attribute(n,a_textblock,noftextblocks)
+ n[a_textblock] = noftextblocks
elseif subtype == glue_code or subtype == kern_code then
- set_attribute(n,a_textblock,0)
+ n[a_textblock] = 0
end
end
return false
@@ -2192,7 +2174,7 @@ local function allusedstylesheets(xmlfile,cssfiles,files)
cssfile = file.addsuffix(cssfile,"css")
end
files[#files+1] = cssfile
- report_export("adding css reference '%s",cssfile)
+ report_export("adding css reference '%s'",cssfile)
result[#result+1] = format(csspreamble,cssfile)
end
return concat(result)
@@ -2216,14 +2198,14 @@ local displaymapping = {
local function allusedelements(xmlfile)
local result = { format("/* template for file %s */",xmlfile) }
- for element, details in table.sortedhash(used) do
+ for element, details in sortedhash(used) do
result[#result+1] = format("/* category: %s */",element)
- for detail, nature in table.sortedhash(details) do
+ for detail, nature in sortedhash(details) do
local d = displaymapping[nature or "display"] or "block"
if detail == "" then
- result[#result+1] = format(e_template,element,d)
+ result[#result+1] = formatters[e_template](element,d)
else
- result[#result+1] = format(d_template,element,detail,d)
+ result[#result+1] = formatters[d_template](element,detail,d)
end
end
end
@@ -2340,16 +2322,16 @@ local function stopexport(v)
--
files = table.unique(files)
--
- report_export("saving xml data in '%s",xmlfile)
+ report_export("saving xml data in %a",xmlfile)
io.savedata(xmlfile,results)
--
- report_export("saving css image definitions in '%s",imagefilename)
+ report_export("saving css image definitions in %a",imagefilename)
io.savedata(imagefilename,allusedimages(xmlfile))
--
- report_export("saving css style definitions in '%s",stylefilename)
+ report_export("saving css style definitions in %a",stylefilename)
io.savedata(stylefilename,allusedstyles(xmlfile))
--
- report_export("saving css template in '%s",templatefilename)
+ report_export("saving css template in %a",templatefilename)
io.savedata(templatefilename,allusedelements(xmlfile))
--
if xhtmlfile then
@@ -2359,17 +2341,26 @@ local function stopexport(v)
xhtmlfile = file.addsuffix(xhtmlfile,"xhtml")
end
files[#files+1] = xhtmlfile
- report_export("saving xhtml variant in '%s",xhtmlfile)
+ report_export("saving xhtml variant in %a",xhtmlfile)
local xmltree = cleanxhtmltree(xml.convert(results))
xml.save(xmltree,xhtmlfile)
+ -- looking at identity is somewhat redundant as we also inherit from interaction
+ -- at the tex end
+ local identity = interactions.general.getidentity()
local specification = {
name = file.removesuffix(v),
identifier = os.uuid(),
images = uniqueusedimages(),
root = xhtmlfile,
files = files,
+ language = languagenames[tex.count.mainlanguagenumber],
+ title = validstring(finetuning.title) or validstring(identity.title),
+ subtitle = validstring(finetuning.subtitle) or validstring(identity.subtitle),
+ author = validstring(finetuning.author) or validstring(identity.author),
+ firstpage = validstring(finetuning.firstpage),
+ lastpage = validstring(finetuning.lastpage),
}
- report_export("saving specification in '%s' (mtxrun --script epub --make %s)",specificationfilename,specificationfilename)
+ report_export("saving specification in %a (mtxrun --script epub --make %s)",specificationfilename,specificationfilename)
io.savedata(specificationfilename,table.serialize(specification,true))
end
stoptiming(treehash)
diff --git a/Master/texmf-dist/tex/context/base/back-exp.mkiv b/Master/texmf-dist/tex/context/base/back-exp.mkiv
index 871d08b1991..9e65633d4b0 100644
--- a/Master/texmf-dist/tex/context/base/back-exp.mkiv
+++ b/Master/texmf-dist/tex/context/base/back-exp.mkiv
@@ -132,6 +132,11 @@
[\c!align=\number\raggedstatus,
\c!bodyfont=\bodyfontsize,
\c!width=\textwidth,
+ \c!title={\directinteractionparameter\c!title},
+ \c!subtitle={\directinteractionparameter\c!subtitle},
+ \c!author={\directinteractionparameter\c!author},
+ % \c!firstpage=, % imagename
+ % \c!lastpage=, % imagename
\c!hyphen=\v!no]
\def\dosynchronizeexport
@@ -141,13 +146,22 @@
bodyfont = \number\dimexpr\exportparameter\c!bodyfont,
width = \number\dimexpr\exportparameter\c!width,
hyphen = "\exportparameter\c!hyphen",
+ title = \!!bs\exportparameter\c!title\!!es,
+ subtitle = \!!bs\exportparameter\c!subtitle\!!es,
+ author = \!!bs\exportparameter\c!author\!!es,
+ firstpage = "\exportparameter\c!firstpage",
+ lastpage = "\exportparameter\c!lastpage",
}}}
\appendtoks
- \doifsomething{\backendparameter\c!export}{\dosynchronizeexport}%
+ \doifsomething{\backendparameter\c!export}\dosynchronizeexport
\to \everystarttext
\appendtoks
+ \doifsomething{\backendparameter\c!export}\dosynchronizeexport % in case it is done inside \starttext
+\to \everysetupdocument
+
+\appendtoks
\doifsomething{\backendparameter\c!xhtml}
{\enabledirectives[backend.export.xhtml=\backendparameter\c!xhtml]}%
\doifsomething{\backendparameter\c!css}
@@ -155,11 +169,11 @@
\to \everysetupbackend
\appendtoks
- \doifsomething{\backendparameter\c!export}
- {\setuptagging
- [\c!state=\v!start]%
- \enabledirectives
- [backend.export=\backendparameter\c!export]}%
+ \doifelsenothing{\backendparameter\c!export}
+ {\resetsystemmode\v!export}
+ {\setuptagging[\c!state=\v!start]%
+ \enabledirectives[backend.export=\backendparameter\c!export]%
+ \setsystemmode\v!export}%
\to \everysetupbackend
\protect \endinput
diff --git a/Master/texmf-dist/tex/context/base/back-ini.lua b/Master/texmf-dist/tex/context/base/back-ini.lua
index 0c02e201a68..bdd931abd33 100644
--- a/Master/texmf-dist/tex/context/base/back-ini.lua
+++ b/Master/texmf-dist/tex/context/base/back-ini.lua
@@ -59,7 +59,11 @@ function backends.install(what)
local backend = backends[what]
if backend then
if trace_backend then
- report_backend("initializing backend %s (%s)",what,backend.comment or "no comment")
+ if backend.comment then
+ report_backend("initializing backend %a, %a",what,backend.comment)
+ else
+ report_backend("initializing backend %a",what)
+ end
end
backends.current = what
for category, default in next, defaults do
@@ -68,7 +72,7 @@ function backends.install(what)
setmetatableindex(target, plugin)
end
elseif trace_backend then
- report_backend("no backend named %s",what)
+ report_backend("no backend named %a",what)
end
end
end
@@ -93,10 +97,10 @@ tables.vfspecials = allocate {
stopslant = comment,
}
--- -- experimental code --
+-- experimental code --
-function commands.pdfrotation(a)
+function commands.pdfrotation(a) -- somewhat weird here
local s, c = sind(a), cosd(a)
- context("%s %s %s %s",c,s,-s,c)
+ context("%0.6f %0.6f %0.6f %0.6f",c,s,-s,c)
end
diff --git a/Master/texmf-dist/tex/context/base/back-ini.mkiv b/Master/texmf-dist/tex/context/base/back-ini.mkiv
index 8ece8f6e13b..fc8759c14db 100644
--- a/Master/texmf-dist/tex/context/base/back-ini.mkiv
+++ b/Master/texmf-dist/tex/context/base/back-ini.mkiv
@@ -61,13 +61,13 @@
\let \dotransformnextbox\gobblesixarguments % and pass last box
%D \macros
-%D {doovalbox}
+%D {back_ovalbox}
%D
%D When we look at the implementation, this is a complicated
%D one. There are seven arguments.
%D
%D \starttyping
-%D \doovalbox {w} {h} {d} {linewidth} {radius} {stroke} {fill} {variant}
+%D \back_ovalbox {w} {h} {d} {linewidth} {radius} {stroke} {fill} {variant}
%D \stoptyping
%D
%D This command has to return a \type{\vbox} which can be used
@@ -75,7 +75,7 @@
%D degrees, the stroke and fill are~\type{1} (true) of~\type{0}
%D (false).
-\let \doovalbox \gobbleeightarguments
+\let\back_ovalbox \gobbleeightarguments
%D \macros
%D {dostartclipping,dostopclipping}
diff --git a/Master/texmf-dist/tex/context/base/back-pdf.mkiv b/Master/texmf-dist/tex/context/base/back-pdf.mkiv
index 9e441e224a6..1cf7a3703d2 100644
--- a/Master/texmf-dist/tex/context/base/back-pdf.mkiv
+++ b/Master/texmf-dist/tex/context/base/back-pdf.mkiv
@@ -68,16 +68,17 @@
{\ctxcommand{setxmpfile("\backendparameter{xmpfile}")}}%
\to \everysetupbackend
+% \doifsomething{\backendparameter\c!format} .. at the lua end
+
\appendtoks
- \doifsomething{\backendparameter\c!format}
- {\ctxcommand{setformat {
+ \ctxcommand{setformat {
format = "\backendparameter\c!format",
level = "\backendparameter\c!level",
option = "\backendparameter\c!option",
profile = "\backendparameter\c!profile",
intent = "\backendparameter\c!intent",
file = "\backendparameter\c!file",
- }}}%
+ }}%
\to \everysetupbackend
%D For the moment we keep these.
@@ -140,7 +141,7 @@
%
% function lpdf.rotationcm(a)
% local s, c = sind(a), cosd(a)
-% return format("%s %s %s %s 0 0 cm",c,s,-s,c)
+% return format("%f %f %f %f 0 0 cm",c,s,-s,c)
% end
%
% \def\dostartmirroring{\pdfliteral{-1 0 0 1 0 0 cm}}
@@ -186,7 +187,7 @@
\unexpanded\def\dodotransformnextbox#1#2#3%
{\hbox
- {\hskip#1\onebasepoint
+ {\kern#1\onebasepoint
\raise#2\onebasepoint\hbox
{\pdfsave
\pdfsetmatrix{#3}% 0 0 (no #5 #6 yet)
@@ -220,12 +221,12 @@
\newbox\objectbox
-\def\dostartobject#1#2#3#4#5%
+\unexpanded\def\dostartobject#1#2#3#4#5% needs to be \unexpanded
{\bgroup
\setbox\objectbox\vbox\bgroup
\def\back_object_stop{\egroup\back_object_register{#1}{#2}}}
-\def\dostopobject
+\unexpanded\def\dostopobject % needs to be \unexpanded
{\back_object_stop
\egroup}
@@ -237,9 +238,6 @@
\immediate\pdfxform resources {\pdfbackendcurrentresources}\objectbox
\dosetobjectreference{#1}{#2}{\the\pdflastxform}}
-\def\doresetobjects
- {}
-
\let\m_back_object_reference\empty
\def\doinsertobject#1#2%
@@ -263,7 +261,7 @@
% for the moment here
%D \macros
-%D {doovalbox}
+%D {back_ovalbox}
%D
%D Drawing frames with round corners is inherited from the
%D main module.
@@ -276,7 +274,7 @@
% \def\back_oval_calculate#1#2#3%
% {\PointsToBigPoints{\dimexpr#2+#3\relax}#1}
-\unexpanded\def\doovalbox#1#2#3#4#5#6#7#8%
+\unexpanded\def\back_ovalbox#1#2#3#4#5#6#7#8%
{\forcecolorhack
\bgroup
% \scratchdimen#4%
@@ -306,16 +304,16 @@
\PointsToBigPoints{\dimexpr #2-\scratchdimen}\yymax
\PointsToBigPoints{\dimexpr-#3+\scratchdimen}\yymin
%
- \edef\dostroke{#6}%
- \edef\dofill{#7}%
- \edef\mode{\number#8 \space}%
+ \edef\dostroke{\number#6}%
+ \edef\dofill{\number#7}%
+ \edef\mode{\number#8}%
% no \ifcase, else \relax in pdfcode
\setbox\scratchbox\hbox
{\ifnum\dostroke\dofill>\zerocount
\pdfliteral
{q
\stroke\space w
- \ifcase\mode
+ \ifcase\mode\space
\xxmin\space \ymin \space m
\xxmax\space \ymin \space l
\xmax \space \ymin \space \xmax \space \yymin\space y
@@ -495,7 +493,7 @@
\xmin \space \ymin \space \xmin \space \yymin\space y
\or % 28
\fi
- \ifnum\mode>8
+ \ifnum\mode>8\space
S
\else
\ifnum\dostroke=\plusone S \fi
diff --git a/Master/texmf-dist/tex/context/base/back-swf.mkiv b/Master/texmf-dist/tex/context/base/back-swf.mkiv
index 1d225b550ea..09745e0f93f 100644
--- a/Master/texmf-dist/tex/context/base/back-swf.mkiv
+++ b/Master/texmf-dist/tex/context/base/back-swf.mkiv
@@ -11,12 +11,12 @@
%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
%C details.
-% This is only a placeholder that demonstrates the usage of swf
-% resources.
-
-%D \starttyping
-%D \starttext
+%D This is only a placeholder that demonstrates the usage of swf resources.
+%D There is no need to include this file into the format. The module was
+%D tested by Luigi and Willi and based on their suggestions the functionality
+%D was improved.
%D
+%D \starttyping
%D \enabletrackers[graphics.locating]
%D \enabletrackers[backend.swf]
%D
@@ -41,8 +41,6 @@
%D \stopTEXpage
%D \stoptyping
-\stoptext
-
\endinput
\starttext
@@ -72,8 +70,6 @@
},
\stopluaparameterset
-\starttext
-
% preview=swf:myset:display:1
% controls=swf:myset:controls:1
% resources=swf:myset:resources:1
diff --git a/Master/texmf-dist/tex/context/base/back-u3d.mkiv b/Master/texmf-dist/tex/context/base/back-u3d.mkiv
index dfe8a90c2a6..89d26ee41e8 100644
--- a/Master/texmf-dist/tex/context/base/back-u3d.mkiv
+++ b/Master/texmf-dist/tex/context/base/back-u3d.mkiv
@@ -11,10 +11,9 @@
%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
%C details.
-% This is only a placeholder that demonstrates the usage of u3d
-% resources. The user interface is rather messy an might be
-% improved. The files and setup is derived from an example by
-% Michael Vidiassov.
+%D This is only a placeholder that demonstrates the usage of u3d resources. The
+%D user interface is rather messy an might be improved. The files and setup is
+%D derived from an example by Michael Vidiassov.
\endinput
diff --git a/Master/texmf-dist/tex/context/base/bibl-bib.lua b/Master/texmf-dist/tex/context/base/bibl-bib.lua
index 444f7e9bcc4..c86a0c0c263 100644
--- a/Master/texmf-dist/tex/context/base/bibl-bib.lua
+++ b/Master/texmf-dist/tex/context/base/bibl-bib.lua
@@ -12,7 +12,6 @@ bibtex files and converts them to xml so that the we access the content
in a convenient way. Actually handling the data takes place elsewhere.</p>
--ldx]]--
-local utf = unicode.utf8
local lower, format, gsub, concat = string.lower, string.format, string.gsub, table.concat
local next = next
local utfchar = utf.char
@@ -22,6 +21,7 @@ local variables = interfaces and interfaces.variables
local settings_to_hash = utilities.parsers.settings_to_hash
local finalizers = xml.finalizers.tex
local xmlfilter, xmltext, getid = xml.filter, xml.text, lxml.getid
+local formatters = string.formatters
local P, R, S, C, Cc, Cs, Ct = lpeg.P, lpeg.R, lpeg.S, lpeg.C, lpeg.Cc, lpeg.Cs, lpeg.Ct
@@ -144,9 +144,9 @@ function bibtex.load(session,filename)
if filename ~= "" then
local data = io.loaddata(filename) or ""
if data == "" then
- report_xml("empty file '%s', no conversion to xml",filename)
+ report_xml("empty file %a, no conversion to xml",filename)
elseif trace_bibxml then
- report_xml("converting file '%s' to xml",filename)
+ report_xml("converting file %a to xml",filename)
end
bibtex.convert(session,data)
end
@@ -163,7 +163,7 @@ function bibtex.new()
}
end
-local escaped_pattern = lpegpatterns.xml.escaped
+local p_escaped = lpegpatterns.xml.escaped
local ihatethis = {
f = "\\f",
@@ -203,17 +203,16 @@ function bibtex.toxml(session,options)
local convert = options.convert -- todo: interface
local strip = options.strip -- todo: interface
local entries = session.entries
- r = r + 1 ; result[r] = format("<?xml version='1.0' standalone='yes'?>")
- r = r + 1 ; result[r] = format("<bibtex>")
+ r = r + 1 ; result[r] = "<?xml version='1.0' standalone='yes'?>"
+ r = r + 1 ; result[r] = "<bibtex>"
for id, categories in next, session.data do
id = lower(gsub(id,"^@",""))
for name, entry in next, categories do
if not entries or entries[name] then
- r = r + 1 ; result[r] = format("<entry tag='%s' category='%s'>",lower(name),id)
+ r = r + 1 ; result[r] = formatters["<entry tag='%s' category='%s'>"](lower(name),id)
for key, value in next, entry do
- value = gsub(value,"\\(.)",ihatethis)
- value = lpegmatch(escaped_pattern,value)
-
+ value = gsub(value,"\\(.)",ihatethis) -- this really needs checking
+ value = lpegmatch(p_escaped,value)
if value ~= "" then
if convert then
value = textoutf(value,true)
@@ -223,14 +222,14 @@ function bibtex.toxml(session,options)
-- kind of hackery ... bibtex databases are quite unportable
value = lpegmatch(filter,value) or value
end
- r = r + 1 ; result[r] = format(" <field name='%s'>%s</field>",key,value)
+ r = r + 1 ; result[r] = formatters[" <field name='%s'>%s</field>"](key,value)
end
end
- r = r + 1 ; result[r] = format("</entry>")
+ r = r + 1 ; result[r] = "</entry>"
end
end
end
- r = r + 1 ; result[r] = format("</bibtex>")
+ r = r + 1 ; result[r] = "</bibtex>"
result = concat(result,"\n")
-- alternatively we could use lxml.convert
session.xml = xml.convert(result, {
diff --git a/Master/texmf-dist/tex/context/base/bibl-bib.mkiv b/Master/texmf-dist/tex/context/base/bibl-bib.mkiv
index 56007d21c1f..d9010294d55 100644
--- a/Master/texmf-dist/tex/context/base/bibl-bib.mkiv
+++ b/Master/texmf-dist/tex/context/base/bibl-bib.mkiv
@@ -340,14 +340,13 @@
% todo : lang en language
% todo : directions
-
% variables
-\newcount\bibtexblock \bibtexblock\plusone
+\ifdefined\bibtexblock \else \newcount\bibtexblock \fi \bibtexblock\plusone
+\ifdefined\bibtexcounter \else \newcount\bibtexcounter \fi
\newtoks \everysetupbibtexpublications
\newtoks \everysetupbibtexcitations
-\newcount\bibtexcounter
\def\bibtexrefprefix{\number\bibtexblock:}
@@ -626,7 +625,7 @@
\doifelse{\bibtexpublicationsparameter\c!method}\v!local
{\ctxlua{bibtex.hacks.reset(1)}}% function can take method
{\ctxlua{bibtex.hacks.reset(2)}}%
- \doplacestructurelist
+ \strc_lists_place_current
{\currentbibtexsession}
{\currentbibtexcriterium}
{\namedlistparameter\currentbibtexsession\c!number}%
diff --git a/Master/texmf-dist/tex/context/base/bibl-tra.lua b/Master/texmf-dist/tex/context/base/bibl-tra.lua
index 6341898eef5..6a70160230e 100644
--- a/Master/texmf-dist/tex/context/base/bibl-tra.lua
+++ b/Master/texmf-dist/tex/context/base/bibl-tra.lua
@@ -1,4 +1,4 @@
-if not modules then modules = { } end modules ['bibl-bib'] = {
+if not modules then modules = { } end modules ['bibl-tra'] = {
version = 1.001,
comment = "this module is the basis for the lxml-* ones",
author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
@@ -33,6 +33,12 @@ local template = utilities.strings.striplong([[
\bibdata{%s}
]])
+local bibtexbin = environment.arguments.mlbibtex and "mlbibcontext" or "bibtex"
+
+directives.register("publications.usemlbibtex", function(v)
+ bibtexbin = v and "mlbibcontext" or "bibtex"
+end)
+
function hacks.process(settings)
local style = settings.style or ""
local database = settings.database or ""
@@ -41,16 +47,16 @@ function hacks.process(settings)
interfaces.showmessage("publications",3)
io.savedata(file.addsuffix(jobname,"aux"),format(template,style,database))
if trace_bibtex then
- report_tex("processing bibtex file '%s'",jobname)
+ report_tex("processing bibtex file %a using %a",jobname,bibtexbin)
end
- os.execute(format("bibtex %s",jobname))
+ os.execute(format("%s %q",bibtexbin,jobname))
-- purge 'm
end
end
function hacks.register(str)
if trace_bibtex then
- report_tex("registering bibtex entry '%s'",str)
+ report_tex("registering bibtex entry %a",str)
end
registered[#registered+1] = str
ordered[str] = #registered
@@ -115,7 +121,7 @@ function hacks.registerplaced(str)
end
function hacks.doifalreadyplaced(str)
- commands.testcase(used[str])
+ commands.doifelse(used[str])
end
-- we ask for <n>:tag but when we can't find it we go back
diff --git a/Master/texmf-dist/tex/context/base/bibl-tra.mkiv b/Master/texmf-dist/tex/context/base/bibl-tra.mkiv
index 3531703adc9..511212a9822 100644
--- a/Master/texmf-dist/tex/context/base/bibl-tra.mkiv
+++ b/Master/texmf-dist/tex/context/base/bibl-tra.mkiv
@@ -194,31 +194,32 @@
\installstructurelistprocessor{pubs:userdata}%
{\ctxlua{bibtex.hacks.add(structures.lists.uservalue("\currentlist",\currentlistindex,"bibref"),\currentlistindex)}}
-\newcount\bibtexblock \bibtexblock\plusone
+\ifdefined\bibtexblock \else \newcount\bibtexblock \fi \bibtexblock\plusone
+\ifdefined\bibtexcounter \else \newcount\bibtexcounter \fi
%D \macros{bibdoif,bibdoifnot,bibdoifelse}
%D
%D Here are a few small helpers that are used a lot in all the typesetting commands
%D (\type{\bibinsert...}) we will encounter later.
-\long\def\bibdoifelse#1%
- {\@EA\def\@EA\!!stringa\@EA{#1}%
+\unexpanded\def\bibdoifelse#1%
+ {\expandafter\def\expandafter\!!stringa\expandafter{#1}%
\ifx\!!stringa\empty
\expandafter\secondoftwoarguments
\else
\expandafter\firstoftwoarguments
\fi}
-\long\def\bibdoifnot#1%
- {\@EA\def\@EA\!!stringa\@EA{#1}%
+\unexpanded\def\bibdoifnot#1%
+ {\expandafter\def\expandafter\!!stringa\expandafter{#1}%
\ifx\!!stringa\empty
\expandafter\firstofoneargument
\else
\expandafter\gobbleoneargument
\fi}
-\long\def\bibdoif#1%
- {\@EA\def\@EA\!!stringa\@EA{#1}%
+\unexpanded\def\bibdoif#1%
+ {\expandafter\def\expandafter\!!stringa\expandafter{#1}%
\ifx\!!stringa\empty
\expandafter\gobbleoneargument
\else
@@ -248,12 +249,12 @@
\unexpanded\def\setupbibtex
{\dosingleempty\dosetupbibtex}
-\def\dosetupbibtex[#1]%
+\unexpanded\def\dosetupbibtex[#1]%
{\let\@@pbdatabase\empty
\getparameters[\??pb][#1]%
\the\everysetupbibtex}
-\def\installbibtexsorter#1#2%
+\unexpanded\def\installbibtexsorter#1#2%
{\setvalue{\??pb:\c!sort:#1}{#2}}
\installbibtexsorter\v!no {no}
@@ -284,7 +285,7 @@
\unexpanded\def\setuppublications
{\dosingleargument\dosetuppublications}
-\def\dosetuppublications[#1]%
+\unexpanded\def\dosetuppublications[#1]%
{\getparameters[\??pb][\c!alternative=,#1]%
\doifsomething\@@pbalternative
{\readsysfile{bibl-\@@pbalternative.tex}
@@ -330,7 +331,7 @@
\s!unknown=>\edef\@@citedefault{\@@pbrefcommand}]%
\to \everysetuppublications
-\def\bibleftnumber#1{#1\hfill~}
+\unexpanded\def\bibleftnumber#1{#1\hfill~}
%D \macros{usepublications}
%D
@@ -339,19 +340,22 @@
%D because similar numbers can be confusing. So, for the moment this is not
%D supported in \MKIV. (So no: see reference [3-5,9] in "some other document")
-\def\usepublications[#1]%
+\unexpanded\def\usepublications[#1]%
{\processcommalist[#1]\dousepublications}
-\def\dousepublications#1%
+\unexpanded\def\dousepublications#1%
{\doonlyonce{#1.\f!bibextension}{\dodousepublications{#1}}}
-\def\dodousepublications#1% brr, this par stuff
+\unexpanded\def\dodousepublications#1% brr, this par stuff
{\let\@@savedpar\par
\let\par\ignorespaces
\ifhmode\kern\zeropoint\fi
+ \pushcatcodetable
+ \setcatcodetable\ctxcatcodes
\readfile{#1.\f!bibextension}
{\showmessage\m!publications{4}{#1.\f!bibextension}}
{\showmessage\m!publications{2}{#1.\f!bibextension}}%
+ \popcatcodetable
\ifhmode\removeunwantedspaces\fi
\let\par\@@savedpar}
@@ -373,7 +377,7 @@
\unexpanded\def\setuppublicationlist
{\dosingleempty\dosetuppublicationlist}
-\def\dosetuppublicationlist[#1]%
+\unexpanded\def\dosetuppublicationlist[#1]%
{\getparameters[\??pv:l:][#1]%
\setuplist[pubs][\c!samplesize={AA99},\c!alternative=a,\c!interaction=,\c!pagenumber=\v!no,#1,\c!command=]}
@@ -415,14 +419,14 @@
\newtoks\initializebibdefinitions % we need to prevent clashes
-\def\simplebibdef#1% hh: funny expansion ?
- {\@EA\long\@EA\def\csname bib@#1\endcsname##1%
+\unexpanded\def\simplebibdef#1% hh: funny expansion ?
+ {\expandafter\def\csname bib@#1\endcsname##1%
{\setvalue{\??pb @#1}{##1}\ignorespaces}%
\expandafter \appendtoks
\expandafter\let\csname insert#1\expandafter\endcsname\csname bibinsert#1\endcsname
\to \initializebibdefinitions
- \@EA\unexpanded\@EA\def\csname bibinsert#1\endcsname##1##2##3%
- {\@EA\bibdoifelse\@EA{\csname\??pb @#1\endcsname}{##1\csname\??pb @#1\endcsname##2}{##3}}}
+ \expandafter\unexpanded\expandafter\def\csname bibinsert#1\endcsname##1##2##3%
+ {\expandafter\bibdoifelse\expandafter{\csname\??pb @#1\endcsname}{##1\csname\??pb @#1\endcsname##2}{##3}}}
\def\bibcommandlist
{abstract, annotate, arttitle, assignee, bibnumber, bibtype, biburl, chapter, city,
@@ -433,13 +437,13 @@
\processcommacommand[\bibcommandlist]\simplebibdef
-\def\bibinsertdoi#1#2#3% let's see how this fails
+\unexpanded\def\bibinsertdoi#1#2#3% let's see how this fails
{\bibdoifelse{\@@pb@doi}{#1\expanded{\bibgotoDOI{\@@pb@thekey}{\@@pb@doi}}#2}{#3}}
-\def\bibinsertbiburl#1#2#3% let's see how this fails
+\unexpanded\def\bibinsertbiburl#1#2#3% let's see how this fails
{\bibdoifelse{\@@pb@biburl}{#1\expanded{\bibgotoURL{\@@pb@thekey}{\@@pb@biburl}}#2}{#3}}
-\def\bibinsertmonth#1#2#3%
+\unexpanded\def\bibinsertmonth#1#2#3%
{\bibdoifelse\@@pb@month
{#1\doifnumberelse\@@pb@month
{\doifconversiondefinedelse\@@pbmonthconversion
@@ -452,7 +456,7 @@
\let\inserttype\bibinsertbibtype % for backward compat.
\to\initializebibdefinitions
-\def\newbibfield[#1]%
+\unexpanded\def\newbibfield[#1]%
{\simplebibdef{#1}%
\edef\bibcommandlist{\bibcommandlist,#1}}
@@ -473,7 +477,7 @@
% todo: instead of \getvalue{bla@num} in specs we should do
% \bibentrynum{bla} so that we can create a better namespace
-%D All of these \type{\@EA}'s and \type{\csnames} make this code
+%D All of these \type{\expandafter}'s and \type{\csnames} make this code
%D look far more complex than it really is. For example, the argument
%D \type{author} defines the macro \type{\bib@author} to do two
%D things: increment the counter \type{\author@num} (let's say to 2)
@@ -483,23 +487,23 @@
%D \specialbibinsert{author}{\author@num}{<before>}{<after>}{<not>}
%D \stoptyping
-\def\docomplexbibdef#1%
+\unexpanded\def\docomplexbibdef#1%
{\dodoubleempty\dodocomplexbibdef[#1]}
-\def\dodocomplexbibdef[#1][#2]#3%
+\unexpanded\def\dodocomplexbibdef[#1][#2]#3%
{\doquadrupleempty\dododocomplexbibdef[#1][#2][#3]}
-\def\dododocomplexbibdef[#1][#2][#3][#4]#5#6%
- {\@EA\increment\csname#1@num\endcsname % todo: bib in name
+\unexpanded\def\dododocomplexbibdef[#1][#2][#3][#4]#5#6%
+ {\expandafter\increment\csname#1@num\endcsname % todo: bib in name
\setevalue{\??pb @#1\csname#1@num\endcsname}{{#3}{#5}{#6}{#4}{#2}}\ignorespaces}
-\def\complexbibdef#1%
- {\@EA\newcounter\csname #1@num\endcsname
- \@EA\def\csname bib@#1\endcsname{\docomplexbibdef{#1}}%
+\unexpanded\def\complexbibdef#1%
+ {\expandafter\newcounter\csname #1@num\endcsname
+ \expandafter\def\csname bib@#1\endcsname{\docomplexbibdef{#1}}%
\expandafter \appendtoks
\expandafter\let\csname insert#1s\expandafter\endcsname\csname bibinsert#1s\endcsname
\to \initializebibdefinitions
- \@EA\def\csname bibinsert#1s\endcsname##1##2##3{\specialbibinsert{#1}{\csname #1@num\endcsname}{##1}{\unskip ##2}{##3}}}
+ \expandafter\def\csname bibinsert#1s\endcsname##1##2##3{\specialbibinsert{#1}{\csname #1@num\endcsname}{##1}{\unskip ##2}{##3}}}
\processcommalist[author,artauthor,editor]\complexbibdef
@@ -510,7 +514,7 @@
\newcount\etaldisplaycounter
\newcount\todocounter
-\def\specialbibinsert#1#2#3#4#5%
+\unexpanded\def\specialbibinsert#1#2#3#4#5%
{\bgroup
\ifnum#2>\zerocount
\etallimitcounter =0\bibalternative{#1etallimit}\relax
@@ -528,7 +532,7 @@
% find the current author list
\let\templist\empty
\dorecurse{#2}
- {\scratchtoks\@EA\@EA\@EA{\csname\??pb @#1\recurselevel\endcsname}%
+ {\scratchtoks\doubleexpandafter{\csname\??pb @#1\recurselevel\endcsname}%
\edef\templist{\ifx\templist\empty\else\templist,\fi\the\scratchtoks}}%
#3\publicationlistparameter\c!command{#1}{\todocounter}{\templist}#4\relax
\else
@@ -547,7 +551,7 @@
%D \#2 = number of items to be typeset
%D \#3 = commacommand containing authors
-\def\doprocessauthoritem#1#2#3%
+\unexpanded\def\doprocessauthoritem#1#2#3%
{\advance\scratchcounter\plusone
\ifnum\numexpr\scratchcounter-\plusone\relax<#2\relax
\publicationlistparameter{#1}#3%
@@ -566,7 +570,7 @@
\fi\fi
\fi}
-\def\dospecialbibinsert#1#2#3%
+\unexpanded\def\dospecialbibinsert#1#2#3%
{\getcommacommandsize[#3]%
\scratchcounter\zerocount
\processcommacommand[#3]{\doprocessauthoritem{#1}{#2}}}
@@ -588,25 +592,25 @@
%D \type{#5} junior
%D \stoplines
-\def\normalauthor#1#2#3#4#5%
+\unexpanded\def\normalauthor#1#2#3#4#5%
{\bibdoif{#1}{#1\bibalternative\c!firstnamesep}%
\bibdoif{#2}{#2\bibalternative\c!vonsep}%
#3%
\bibdoif{#5}{\bibalternative\c!surnamesep#5\unskip}}
-\def\normalshortauthor#1#2#3#4#5%
+\unexpanded\def\normalshortauthor#1#2#3#4#5%
{\bibdoif{#4}{#4\bibalternative\c!firstnamesep}%
\bibdoif{#2}{#2\bibalternative\c!vonsep}%
#3%
\bibdoif{#5}{\bibalternative\c!surnamesep#5\unskip}}
-\def\invertedauthor#1#2#3#4#5%
+\unexpanded\def\invertedauthor#1#2#3#4#5%
{\bibdoif{#2}{#2\bibalternative\c!vonsep}%
#3%
\bibdoif{#5}{\bibalternative\c!juniorsep#5}%
\bibdoif{#1}{\bibalternative\c!surnamesep#1\unskip}}
-\def\invertedshortauthor#1#2#3#4#5%
+\unexpanded\def\invertedshortauthor#1#2#3#4#5%
{\bibdoif{#2}{#2\bibalternative\c!vonsep}%
#3%
\bibdoif{#5}{\bibalternative\c!juniorsep#5}%
@@ -617,16 +621,16 @@
%D These are used in \type{\typesetapublication} to do
%D initializations and cleanups.
-\def\clearbibitem#1{\setvalue{\??pb @#1}{}}%
+\unexpanded\def\clearbibitem#1{\setvalue{\??pb @#1}{}}%
-\def\clearbibitemtwo#1% is this reset really needed? after all we reset the counter and we are local
+\unexpanded\def\clearbibitemtwo#1% is this reset really needed? after all we reset the counter and we are local
{%\dofastrecurse\plusone{\csname#1@num\endcsname}\plusone{\expandafter\let\csname\??pb @#1\recurselevel\undefined}%
\letvalue{#1@num}\!!zerocount}
-\def\bibitemdefs#1%
- {\@EA\let\csname#1\expandafter\endcsname\csname bib@#1\endcsname}
+\unexpanded\def\bibitemdefs#1%
+ {\expandafter\let\csname#1\expandafter\endcsname\csname bib@#1\endcsname}
-\def\presetbibvariables % make a fast resetter (toks)
+\unexpanded\def\presetbibvariables % make a fast resetter (toks)
{\processcommacommand[\bibcommandlist,crossref]\clearbibitem
\processcommalist [artauthor,author,editor]\clearbibitemtwo
\processcommacommand[\bibcommandlist]\bibitemdefs
@@ -637,8 +641,6 @@
%D We are coming to the end of this module, to the macros that
%D do typesetting and read the \type{bbl} file.
-\newcount\bibtexcounter
-
%D Just a \type{\dosingleempty} is the most friendly
%D of doing this: there need not even be an argument
%D to \type{\startpublication}. Of course, then there
@@ -677,7 +679,7 @@
% hm, we can store at the lua end ...
-\def\dostartpublication[#1]%
+\unexpanded\def\dostartpublication[#1]%
{\begingroup
\doifassignmentelse{#1}%
{\getparameters[\??pb][k=\s!unknown,t=article,n=,s=,a=,y=,o=,u=,#1]}%
@@ -686,7 +688,7 @@
\catcode\commentasciicode\othercatcode
\dodostartpublication}
-\def\dodostartpublication#1\stoppublication
+\unexpanded\def\dodostartpublication#1\stoppublication
{\setxvalue{pbd:\@@pbk}##1{\noexpand\ifcase##1\noexpand\or
\@@pbk\noexpand\or
\@@pba\noexpand\or
@@ -710,7 +712,7 @@
\def\bibgetvaru#1{\csname pbd:#1\endcsname\pluseight}
\def\bibgetvard#1{\csname pbd:#1\endcsname\plusnine }
-\def\doifbibreferencefoundelse#1%
+\unexpanded\def\doifbibreferencefoundelse#1%
{\preloadbiblist
\doifdefinedelse{pbd:#1}
\firstoftwoarguments
@@ -726,10 +728,10 @@
% used in bib self
-\def\bib@crossref#1% called via \csname \endcsname
+\unexpanded\def\bib@crossref#1% called via \csname \endcsname
{\setvalue{\??pb @crossref}{#1}\ignorespaces}
-\def\bibinsertcrossref#1#2#3%
+\unexpanded\def\bibinsertcrossref#1#2#3%
{\bibdoifelse\@@pb@crossref{#1\cite[\@@pb@crossref]#2}{#3}}
\let\insertcrossref\gobblethreearguments
@@ -741,7 +743,7 @@
%D item (like the 'short' key). For this, the ID of the current
%D item is passed in the implict parameter \type{\currentpublicationkey}
-\def\doprocessbibtexentry#1{\typesetapublication{#1}}
+\unexpanded\def\doprocessbibtexentry#1{\typesetapublication{#1}}
\unexpanded\def\typesetpubslist
{\begingroup
@@ -755,7 +757,7 @@
\ctxlua{bibtex.hacks.filterall()}}
{\doif{\listparameter\c!criterium}\v!cite
{\setuplist[pubs][\c!criterium=\v!here]}%
- \doplacestructurelist
+ \strc_lists_place_current
{pubs}%
{\listparameter\c!criterium}%
{\listparameter\c!number}%
@@ -814,7 +816,7 @@
\unexpanded\def\completepublications
{\dosingleempty\docompletepublications}
-\def\docompletepublications[#1]%
+\unexpanded\def\docompletepublications[#1]%
{\begingroup
\setuplist[pubs][#1]%
\edef\currentbibtexsessiontitle{\publicationlistparameter\c!title}%
@@ -834,13 +836,13 @@
\unexpanded\def\placepublications
{\dosingleempty\doplacepublications}
-\def\doplacepublications[#1]%
+\unexpanded\def\doplacepublications[#1]%
{\begingroup
\setuplist[pubs][#1]%
\dodoplacepublications
\endgroup}
-\def\dodoplacepublications
+\unexpanded\def\dodoplacepublications
{\determinelistcharacteristics[pubs]%
\initializepubslist
\doifnot{\namedlistparameter{pubs}\c!option}\v!continue
@@ -870,7 +872,7 @@
% we'll define proper handlers later
-\def\doplacepublicationindeed#1%
+\unexpanded\def\doplacepublicationindeed#1%
{\doifbibreferencefoundelse{#1}
{\global\advance\bibtexcounter\plusone
\def\currentpublicationkey{#1}%
@@ -882,11 +884,11 @@
\strc_lists_apply_renderingsetup}
{}} % invalid
-\def\bibtexpubtext
+\unexpanded\def\bibtexpubtext
{\expanded{\reference[\bibrefprefix\currentpublicationkey]{\number\bibtexcounter}}%
\strut\dotypesetapublication\currentpublicationkey\strut}
-\def\dotypesetapublication#1%
+\unexpanded\def\dotypesetapublication#1%
{\bgroup
\the\initializebibdefinitions % NEW
\def\@@currentalternative{:l:}%
@@ -907,9 +909,9 @@
%D This is the result of bibtex's `language' field.
-\def\setbiblanguage#1#2{\setvalue{\??pb\s!language#1}{#2}}
+\unexpanded\def\setbiblanguage#1#2{\setvalue{\??pb\s!language#1}{#2}}
-\def\lang#1%
+\unexpanded\def\lang#1%
{\edef\biblanguage{#1}%
\ifcsname\??pb\s!language#1\endcsname
\language[\getvalue{\??pb\s!language#1}]%
@@ -927,13 +929,13 @@
\unexpanded\def\cite
{\strictdoifnextoptionalelse\dodocite\dobibref}
-\def\dobibref#1%
+\unexpanded\def\dobibref#1%
{\docite[#1][]}
-\def\dodocite[#1]%
+\unexpanded\def\dodocite[#1]%
{\strictdoifnextoptionalelse{\docite[#1]}{\docite[#1][]}}
-\def\docite[#1][#2]%
+\unexpanded\def\docite[#1][#2]%
{\begingroup
\doifelsenothing{#2}\secondargumentfalse\secondargumenttrue
\ifsecondargument
@@ -943,7 +945,7 @@
\fi
\endgroup}
-\def\dowhatevercite#1#2%
+\unexpanded\def\dowhatevercite#1#2%
{\processcommalist[#2]\docitation
\setupinteraction[\c!style=]%
\doifassignmentelse
@@ -971,7 +973,7 @@
\doifelsevalue{@@pv\@@currentalternative\c!compress}\v!no\bibcitecompressfalse\bibcitecompresstrue
\getvalue{bib\@@currentalternative ref}[#2]}
-\def\donumberedcite#1%
+\unexpanded\def\donumberedcite#1%
{\processcommalist[#1]\docitation
\setupinteraction[\c!style=]%
\edef\@@currentalternative{\@@citedefault}%
@@ -980,14 +982,14 @@
%D \macros{nocite}
-\def\nocite[#1]%
+\unexpanded\def\nocite[#1]%
{\processcommalist[#1]\docitation}
%D \macros{setupcite}
\unexpanded\def\setupcite{\dodoubleempty\dosetupcite}
-\def\dosetupcite[#1][#2]%
+\unexpanded\def\dosetupcite[#1][#2]%
{\ifsecondargument
\def\dodosetupcite##1{\getparameters[\??pv##1][#2]}%
\processcommalist[#1]\dodosetupcite
@@ -997,7 +999,7 @@
%D Low-level stuff
-\def\getcitedata#1[#2]#3[#4]#5to#6%
+\unexpanded\def\getcitedata#1[#2]#3[#4]#5to#6%
{\bgroup
\dofetchapublication{#4}%
\doifdefinedelse{\??pb @bib#2}%
@@ -1005,12 +1007,12 @@
{\xdef#6{\getvalue{\??pb @#2}}}%
\egroup}
-\def\dofetchapublication#1%
+\unexpanded\def\dofetchapublication#1%
{\def\currentpublicationkey{#1}%
\presetbibvariables
\ignorespaces\bibgetvard{#1}}
-\def\docitation#1%
+\unexpanded\def\docitation#1%
{\iftrialtypesetting \else
\expanded{\writedatatolist[pubs][bibref=#1]}%
\fi}
@@ -1044,7 +1046,16 @@
%D Delegate this to \LUA.
-\def\ixbibauthoryear#1#2#3#4%
+% \let\ixlastcommand \relax
+% \let\ixsecondcomman \relax
+% \let\ixfirstcommand \relax
+% \let\thebibauthors \empty
+% \let\thebibyears \empty
+% \let\authorcount \!!zerocount
+
+\let\currentbibauthor\empty
+
+\unexpanded\def\ixbibauthoryear#1#2#3#4%
{\bgroup
\gdef\ixlastcommand {#4}%
\gdef\ixsecondcommand{#3}%
@@ -1066,17 +1077,17 @@
%D This macro only has to make sure that the lists
%D \type{\thebibauthors} and \type{\thebibyears} are printed.
-\def\dobibauthoryear
+\unexpanded\def\dobibauthoryear
{\scratchcounter\zerocount
\getcommacommandsize[\thebibauthors]%
\edef\authorcount{\commalistsize}%
- \@EA\processcommalist\@EA[\thebibauthors]\dodobibauthoryear}
+ \expandafter\processcommalist\expandafter[\thebibauthors]\dodobibauthoryear}
-\def\dodobibauthoryear#1%
+\unexpanded\def\dodobibauthoryear#1%
{\advance\scratchcounter\plusone
\edef\wantednumber{\the\scratchcounter}%
\getfromcommacommand[\thebibyears][\wantednumber]%
- \@EA\def\@EA\currentbibyear\@EA{\commalistelement}%
+ \expandafter\def\expandafter\currentbibyear\expandafter{\commalistelement}%
\setcurrentbibauthor{#1}%
\ifnum\scratchcounter=\plusone
\ixfirstcommand
@@ -1086,25 +1097,25 @@
\ixsecondcommand
\fi\fi}
-\def\setcurrentbibauthor#1%
- {\getcommacommandsize[#1]%
- \ifcase\commalistsize
- % anonymous?
- \let\currentbibauthor\empty
- \or
- \def\currentbibauthor{#1}%
- \or
- \expanded{\docurrentbibauthor#1}%
- \else
- \handlemultiplebibauthors{\commalistsize}{#1}%
- \fi}
+\unexpanded\def\setcurrentbibauthor#1% sensitive for empty entries but I don't want to touch this
+ {\getcommacommandsize[#1]%
+ \ifcase\commalistsize
+ % anonymous?
+ \let\currentbibauthor\empty
+ \or
+ \def\currentbibauthor{#1}%
+ \or
+ \expanded{\docurrentbibauthor#1}%
+ \else
+ \handlemultiplebibauthors{\commalistsize}{#1}%
+ \fi}
\newcount\citescratchcounter
-\def\handlemultiplebibauthors#1#2%
+\unexpanded\def\handlemultiplebibauthors#1#2%
{\citescratchcounter\zerocount
\let\currentbibauthor\empty
- \def\bibprocessauthoritem##1%
+ \unexpanded\def\bibprocessauthoritem##1%
{\advance\citescratchcounter\plusone
\ifnum \citescratchcounter=#1\relax
\edef\currentbibauthor{\currentbibauthor##1}%
@@ -1122,10 +1133,10 @@
%D This discovery of authoretallimit is not the best one,
%D but it will do for now.
-\def\docurrentbibauthor#1,#2%
+\unexpanded\def\docurrentbibauthor#1,#2%
{\doifemptyelse{#2}
{\def\currentbibauthor{#1\bibalternative{otherstext}}}
- {\@EA\ifx\csname\??pv\@@currentalternative authoretallimit\endcsname\relax
+ {\expandafter\ifx\csname\??pv\@@currentalternative authoretallimit\endcsname\relax
\edef\currentbibauthor{#1\bibalternative{andtext}#2}%
\else
\edef\currentbibauthor{#1%
@@ -1137,7 +1148,7 @@
%D edef, and the \type{\robustdoifinsetelse} doesn't listen to
%D \type{\doglobal }
-\def\robustaddtocommalist#1#2% {item} \cs
+\unexpanded\def\robustaddtocommalist#1#2% {item} \cs
{\robustdoifinsetelse{#1}#2\resetglobal
{\dodoglobal\xdef#2{\ifx#2\empty\else#2,\fi#1}}}
@@ -1146,15 +1157,15 @@
%D Now we get to the macros that fill the two lists.
%D The `simple' one really is quite simple.
-\def\donormalbibauthoryear#1%
+\unexpanded\def\donormalbibauthoryear#1%
{\def\myauthor{Xxxxxxxxxx}%
\def\myyear{0000}%
\doifbibreferencefoundelse{#1}
{\def\myauthor{{\bibgetvara{#1}}}%
\def\myyear {\bibgetvary{#1}}}%
{}%
- \@EA\doglobal\@EA\appendtocommalist\@EA{\myauthor}\thebibauthors
- \@EA\doglobal\@EA\appendtocommalist\@EA{\myyear }\thebibyears}
+ \expandafter\doglobal\expandafter\appendtocommalist\expandafter{\myauthor}\thebibauthors
+ \expandafter\doglobal\expandafter\appendtocommalist\expandafter{\myyear }\thebibyears}
%D \macros{docompressbibauthoryear}
%D
@@ -1162,7 +1173,7 @@
%D the reference is not found or the reference does not contain
%D author data. No questions marks o.s.s. (to be fixed later)
-\def\docompressbibauthoryear#1%
+\unexpanded\def\docompressbibauthoryear#1%
{\def\myauthor{Xxxxxxxxxx}%
\def\myyear {0000}%
\doifbibreferencefoundelse{#1}
@@ -1185,14 +1196,14 @@
%D \type{\bibitemwanted} are needed later to insert the year
%D information in the correct item of \type{\thebibyears}
-\def\checkifmyauthoralreadyexists
+\unexpanded\def\checkifmyauthoralreadyexists
{\doifemptyelsevalue{thebibauthors}
{\global\bibitemwanted \plusone
\global\bibitemcounter\plusone
\xdef\thebibauthors{{\myauthor}}}
{% the next weirdness is because according to \getcommalistsize,
% the length of \type{[{{},{}}]} is 2.
- \@EA\getcommalistsize\@EA[\thebibauthors,]%
+ \expandafter\getcommalistsize\expandafter[\thebibauthors,]%
\global\bibitemcounter\numexpr\commalistsize+\minusone\relax
\global\bibitemwanted \zerocount
\processcommacommand[\thebibauthors]\docomparemyauthor}}
@@ -1204,7 +1215,7 @@
%D the counters will stay at their present values and everything
%D will be setup properly to insert the year info.
-\def\docomparemyauthor#1%
+\unexpanded\def\docomparemyauthor#1%
{\global\advance\bibitemwanted \plusone
\def\mytempc{#1}%
\ifx\mytempc\myauthor
@@ -1212,12 +1223,12 @@
\else\ifnum\bibitemwanted=\bibitemcounter\relax
\global\advance\bibitemwanted \plusone
\global\bibitemcounter\bibitemwanted\relax
- \@EA\doglobal\@EA\robustaddtocommalist\@EA{{\myauthor}}\thebibauthors
+ \expandafter\doglobal\expandafter\robustaddtocommalist\expandafter{{\myauthor}}\thebibauthors
\fi\fi}
%D This macro should be clear now.
-\def\findmatchingyear
+\unexpanded\def\findmatchingyear
{\edef\wantednumber{\the\bibitemwanted}%
\getfromcommacommand[\thebibyears][\wantednumber]%
\ifx\commalistelement\empty
@@ -1234,9 +1245,9 @@
%D Beware, we can have cites without reference match.
-\def\gotobiblink#1[#2]{\doifreferencefoundelse{\bibrefprefix#2}{\goto{#1}[\bibrefprefix#2]}{#1}}
-\def\atbiblink [#1]{\doifreferencefoundelse{\bibrefprefix#1}{\at [\bibrefprefix#1]}{#1}}
-\def\inbiblink [#1]{\doifreferencefoundelse{\bibrefprefix#1}{\expanded{\goto{\currentreferencetext}}[\bibrefprefix#1]}{#1}}
+\unexpanded\def\gotobiblink#1[#2]{\doifreferencefoundelse{\bibrefprefix#2}{\goto{#1}[\bibrefprefix#2]}{#1}}
+\unexpanded\def\atbiblink [#1]{\doifreferencefoundelse{\bibrefprefix#1}{\at [\bibrefprefix#1]}{#1}}
+\unexpanded\def\inbiblink [#1]{\doifreferencefoundelse{\bibrefprefix#1}{\expanded{\goto{\currentreferencetext}}[\bibrefprefix#1]}{#1}}
%D \macros{bibauthoryearref,bibauthoryearsref,bibauthorref,bibyearref}
%D
@@ -1244,35 +1255,29 @@
%D \type{\ixbibauthoryearref} stores the data in the macros
%D \type{\currentbibauthor} and \type{\currentbibyear}.
-\def\ifbibinteractionelse
+\unexpanded\def\doifbibinteractionelse
{\iflocation
\edef\test{\bibalternative\c!interaction}%
\ifx\test\v!stop
- \@EA\@EA\@EA\secondoftwoarguments
+ \doubleexpandafter\secondoftwoarguments
\else
- \@EA\@EA\@EA\firstoftwoarguments
+ \doubleexpandafter\firstoftwoarguments
\fi
\else
- \@EA\secondoftwoarguments
+ \expandafter\secondoftwoarguments
\fi}
-\def\ifbibinteractionelse
- {\iflocation
- \doifelse{\bibalternative\c!interaction}\v!stop
- {\@EA\secondoftwoarguments}
- {\@EA\firstoftwoarguments}%
- \else
- \@EA\secondoftwoarguments
- \fi}
+% \unexpanded\def\bibmaybeinteractive#1#2%
+% {\ifbibcitecompress
+% #2%
+% \else
+% \doifbibinteractionelse{\gotobiblink{#2}[#1]}{#2}%
+% \fi}
-\def\bibmaybeinteractive#1#2%
- {\ifbibcitecompress
- #2%
- \else
- \ifbibinteractionelse{\gotobiblink{#2}[#1]}{#2}%
- \fi}
+\unexpanded\def\bibmaybeinteractive#1#2%
+ {\doifbibinteractionelse{\gotobiblink{#2}[#1]}{#2}}
-\def\bibauthoryearref[#1]%
+\unexpanded\def\bibauthoryearref[#1]%
{\ixbibauthoryear{#1}%
{\bibmaybeinteractive{#1}{{\currentbibauthor}\bibalternative\c!inbetween
\bibalternative\v!left{\currentbibyear}\bibalternative\v!right}}
@@ -1283,7 +1288,7 @@
\bibmaybeinteractive{#1}{{\currentbibauthor}\bibalternative\c!inbetween
\bibalternative\v!left {\currentbibyear}\bibalternative\v!right}}}
-\def\bibauthoryearsref[#1]%
+\unexpanded\def\bibauthoryearsref[#1]%
{\bibalternative\v!left
\ixbibauthoryear{#1}
{\bibmaybeinteractive{#1}{{\currentbibauthor}\bibalternative\c!inbetween{\currentbibyear}}}
@@ -1293,7 +1298,7 @@
\bibmaybeinteractive{#1}{{\currentbibauthor}\bibalternative\c!inbetween{\currentbibyear}}}%
\bibalternative\v!right}
-\def\bibauthorref[#1]%
+\unexpanded\def\bibauthorref[#1]%
{\bibalternative\v!left
\ixbibauthoryear{#1}%
{\bibmaybeinteractive{#1}{{\currentbibauthor}}}
@@ -1301,7 +1306,7 @@
{\bibalternative\c!lastpubsep\bibmaybeinteractive{#1}{{\currentbibauthor}}}%
\bibalternative\v!right}
-\def\bibyearref[#1]%
+\unexpanded\def\bibyearref[#1]%
{\bibalternative\v!left
\ixbibauthoryear{#1}%
{\bibmaybeinteractive{#1}{{\currentbibyear}}}
@@ -1318,109 +1323,109 @@
\newconditional\firstbibrefsep
-\def\bibresetrefsep
+\unexpanded\def\bibresetrefsep
{\settrue\firstbibrefsep}
-\def\bibinsertrefsep
+\unexpanded\def\bibinsertrefsep
{\ifconditional\firstbibrefsep
\setfalse\firstbibrefsep
\else
\bibalternative\c!pubsep
\fi}
-\def\bibshortref[#1]%
+\unexpanded\def\bibshortref[#1]%
{\bibalternative\v!left
\bibresetrefsep\processcommalist[#1]\dobibshortref
\bibalternative\v!right}
-\def\dobibshortref#1%
+\unexpanded\def\dobibshortref#1%
{\bibinsertrefsep
\doifbibreferencefoundelse{#1}
{\gotobiblink{\bibgetvars{#1}}[#1]}
{}}
-\def\bibserialref[#1]%
+\unexpanded\def\bibserialref[#1]%
{\bibalternative\v!left
\bibresetrefsep\processcommalist[#1]\dobibserialref
\bibalternative\v!right}
-\def\dobibserialref#1%
+\unexpanded\def\dobibserialref#1%
{\bibinsertrefsep
\doifbibreferencefoundelse{#1}
{\gotobiblink{\bibgetvarn{#1}}[#1]}
{}}
-\def\bibkeyref[#1]%
+\unexpanded\def\bibkeyref[#1]%
{\bibalternative\v!left
\bibresetrefsep\processcommalist[#1]\dobibkeyref
\bibalternative\v!right}
-\def\dobibkeyref#1%
+\unexpanded\def\dobibkeyref#1%
{\bibinsertrefsep
\gotobiblink{#1}[#1]}
-\def\bibgotoDOI#1#2%
- {\ifbibinteractionelse
+\unexpanded\def\bibgotoDOI#1#2%
+ {\doifbibinteractionelse
{\useURL[bibfooDoi#1][#2]%
\useURL[bibfoo#1][http://dx.doi.org/#2]%
\goto{\url[bibfooDoi#1]}[url(bibfoo#1)]}
{\hyphenatedurl{#2}}}
-\def\bibdoiref[#1]%
+\unexpanded\def\bibdoiref[#1]%
{\bibalternative\v!left
\bibresetrefsep\processcommalist[#1]\dobibdoiref
\bibalternative\v!right}
-\def\dobibdoiref#1%
+\unexpanded\def\dobibdoiref#1%
{\bibinsertrefsep
\doifbibreferencefoundelse{#1}
{\expanded{\bibgotoDOI{#1}{\bibgetvaro{#1}}}}
{}}
-\def\biburlref[#1]%
+\unexpanded\def\biburlref[#1]%
{\bibalternative\v!left
\bibresetrefsep\processcommalist[#1]\dobiburlref
\bibalternative\v!right}
-\def\bibgotoURL#1#2%
- {\ifbibinteractionelse
+\unexpanded\def\bibgotoURL#1#2%
+ {\doifbibinteractionelse
{\useURL[bibfoo#1][#2]\goto{\url[bibfoo#1]}[url(bibfoo#1)]}
{\hyphenatedurl{#2}}}
-\def\dobiburlref#1%
+\unexpanded\def\dobiburlref#1%
{\bibinsertrefsep
\doifbibreferencefoundelse{#1}
{\expanded{\bibgotoURL{#1}{\bibgetvaru{#1}}}}
{}}
-\def\bibtyperef[#1]%
+\unexpanded\def\bibtyperef[#1]%
{\bibalternative\v!left
\bibresetrefsep\processcommalist[#1]\dobibtyperef
\bibalternative\v!right}
-\def\dobibtyperef#1%
+\unexpanded\def\dobibtyperef#1%
{\bibinsertrefsep
\doifbibreferencefoundelse{#1}
{\gotobiblink{\bibgetvart{#1}}[#1]}
{}}
-\def\bibpageref[#1]%
+\unexpanded\def\bibpageref[#1]%
{\bibalternative\v!left
\bibresetrefsep\processcommalist[#1]\dobibpageref
\bibalternative\v!right}
-\def\dobibpageref#1%
+\unexpanded\def\dobibpageref#1%
{\bibinsertrefsep
- \ifbibinteractionelse
+ \doifbibinteractionelse
{\atbiblink[#1]}
{{\referencingfalse\at[#1]}}}
-\def\bibdataref[#1]%
+\unexpanded\def\bibdataref[#1]%
{\bibalternative\v!left
\bibresetrefsep\processcommalist[#1]\dobibdata
\bibalternative\v!right}
-\def\dobibdata#1%
+\unexpanded\def\dobibdata#1%
{\bibinsertrefsep
\doifbibreferencefoundelse{#1}
{\dotypesetapublication{#1}}
@@ -1430,7 +1435,7 @@
%D \macros{bibnumref}
-\def\bibnumref[#1]%
+\unexpanded\def\bibnumref[#1]%
{\begingroup
\bibalternative\v!left
\penalty\plustenthousand
@@ -1438,7 +1443,7 @@
\bibalternative\v!right
\endgroup}
-\def\dowithbibtexnumrefconnector#1#2%
+\unexpanded\def\dowithbibtexnumrefconnector#1#2%
{\ifnum#1>\plusone
\ifnum#2>\plusone
\ifnum#2=#1\relax
@@ -1449,12 +1454,12 @@
\fi
\fi}
-\def\dowithbibtexnumref#1#2#3#4#5% n, i, prefix block ref
+\unexpanded\def\dowithbibtexnumref#1#2#3#4#5% n, i, prefix block ref
{\dowithbibtexnumrefconnector{#1}{#2}%
\def\bibrefprefix{#4:}%
\inbiblink[#5]}
-\def\dowithbibtexnumrefrange#1#2#3#4#5#6#7% n, i, prefix block ref
+\unexpanded\def\dowithbibtexnumrefrange#1#2#3#4#5#6#7% n, i, prefix block ref
{\dowithbibtexnumrefconnector{#1}{#2}%
\def\bibrefprefix{#4:}%
\inbiblink[#5]%
@@ -1465,27 +1470,24 @@
%D By request from Sanjoy. This makes it easier to implement
%D \type{\citeasnoun}.
-\def\bibauthornumref[#1]%
+\unexpanded\def\bibauthornumref[#1]%
{\getcommalistsize[#1]%
\global\bibitemcounter\commalistsize
\bibresetrefsep
\processcommalist[#1]\dobibauthornumref}
-\def\dobibauthornumref#1%
- {\bibinsertrefsep
- \doifbibreferencefoundelse{#1}
- {\begingroup
- \bibgetvara{#1}%
- \bibalternative\c!inbetween
- \setuppublications[\c!refcommand=num]%
- \cite[#1]%
- \endgroup}
- {}}
+\unexpanded\def\dobibauthornumref#1%
+ {\bibinsertrefsep
+ \doifbibreferencefoundelse{#1}
+ {\begingroup
+ \cite[\c!left=,\c!right=,\c!alternative=\v!author][#1]%
+ \bibalternative\c!inbetween
+ \cite[num][#1]%
+ \endgroup}
+ {}}
%D And some defaults are loaded from bibl-apa:
-\def\c!monthconversion{monthconversion} % todo
-
\setuppublications
[\c!monthconversion=,
\c!alternative=apa,
@@ -1495,11 +1497,13 @@
\c!refcommand=num,
\c!numbercommand=\bibleftnumber]
-\def\preloadbiblist
+\unexpanded\def\preloadbiblist
{\globallet\preloadbiblist\relax
\dousepublications\jobname}
% \appendtoks \preloadbiblist \to \everysetuppublications
% \appendtoks \preloadbiblist \to \everystarttext
+\let\ifbibinteractionelse\doifbibinteractionelse
+
\protect \endinput
diff --git a/Master/texmf-dist/tex/context/base/blob-ini.lua b/Master/texmf-dist/tex/context/base/blob-ini.lua
index b97485b1b93..4debaf94c5c 100644
--- a/Master/texmf-dist/tex/context/base/blob-ini.lua
+++ b/Master/texmf-dist/tex/context/base/blob-ini.lua
@@ -74,7 +74,7 @@ function blobs.dispose(t)
end
end
-function blobs.append(t,str) -- will be link nodes.link
+function blobs.append(t,str) -- compare concat and link
local typ = type(str)
local dummy = nil
if typ == "number" then
diff --git a/Master/texmf-dist/tex/context/base/buff-ini.lua b/Master/texmf-dist/tex/context/base/buff-ini.lua
index 7098679ca17..475d23efe84 100644
--- a/Master/texmf-dist/tex/context/base/buff-ini.lua
+++ b/Master/texmf-dist/tex/context/base/buff-ini.lua
@@ -13,21 +13,27 @@ local trace_visualize = false trackers.register("buffers.visualize", function(v
local report_buffers = logs.reporter("buffers","usage")
local report_grabbing = logs.reporter("buffers","grabbing")
+local context, commands = context, commands
+
local concat = table.concat
-local type, next = type, next
-local sub, format, match, find = string.sub, string.format, string.match, string.find
-local count, splitlines = string.count, string.splitlines
+local type, next, load = type, next, load
+local sub, format = string.sub, string.format
+local splitlines, validstring = string.splitlines, string.valid
+local P, Cs, patterns, lpegmatch = lpeg.P, lpeg.Cs, lpeg.patterns, lpeg.match
-local variables = interfaces.variables
+local variables = interfaces.variables
local settings_to_array = utilities.parsers.settings_to_array
+local formatters = string.formatters
+
+local v_yes = variables.yes
-local ctxcatcodes = tex.ctxcatcodes
-local txtcatcodes = tex.txtcatcodes
+local catcodenumbers = catcodes.numbers
-buffers = { }
+local ctxcatcodes = catcodenumbers.ctxcatcodes
+local txtcatcodes = catcodenumbers.txtcatcodes
+buffers = buffers or { }
local buffers = buffers
-local context = context
local cache = { }
@@ -84,6 +90,44 @@ local function collectcontent(names,separator) -- no print
end
end
+local function loadcontent(names) -- no print
+ if type(names) == "string" then
+ names = settings_to_array(names)
+ end
+ local nnames = #names
+ local ok = false
+ if nnames == 0 then
+ ok = load(getcontent("")) -- default buffer
+ elseif nnames == 1 then
+ ok = load(getcontent(names[1]))
+ else
+ -- lua 5.2 chunked load
+ local i = 0
+ ok = load(function()
+ while true do
+ i = i + 1
+ if i > nnames then
+ return nil
+ end
+ local c = getcontent(names[i])
+ if c == "" then
+ -- would trigger end of load
+ else
+ return c
+ end
+ end
+ end)
+ end
+ if ok then
+ return ok()
+ elseif nnames == 0 then
+ report_buffers("invalid lua code in default buffer")
+ else
+ report_buffers("invalid lua code in buffer %a",concat(names,","))
+ end
+end
+
+
buffers.raw = getcontent
buffers.erase = erase
buffers.assign = assign
@@ -92,22 +136,24 @@ buffers.exists = exists
buffers.getcontent = getcontent
buffers.getlines = getlines
buffers.collectcontent = collectcontent
+buffers.loadcontent = loadcontent
-- the context interface
commands.erasebuffer = erase
commands.assignbuffer = assign
-local P, patterns, lpegmatch = lpeg.P, lpeg.patterns, lpeg.match
+local anything = patterns.anything
+local alwaysmatched = patterns.alwaysmatched
local function countnesting(b,e)
local n
local g = P(b) / function() n = n + 1 end
+ P(e) / function() n = n - 1 end
- + patterns.anything
- local p = patterns.alwaysmatched / function() n = 0 end
+ + anything
+ local p = alwaysmatched / function() n = 0 end
* g^0
- * patterns.alwaysmatched / function() return n end
+ * alwaysmatched / function() return n end
return p
end
@@ -123,6 +169,67 @@ local continue = false
-- An \n is unlikely to show up as \r is the endlinechar but \n is more generic
-- for us.
+-- This fits the way we fetch verbatim: the indentatio before the sentinel
+-- determines the stripping.
+
+-- str = [[
+-- test test test test test test test
+-- test test test test test test test
+-- test test test test test test test
+--
+-- test test test test test test test
+-- test test test test test test test
+-- test test test test test test test
+-- ]]
+
+-- local function undent(str)
+-- local margin = match(str,"[\n\r]( +)[\n\r]*$") or ""
+-- local indent = #margin
+-- if indent > 0 then
+-- local lines = splitlines(str)
+-- local ok = true
+-- local pattern = "^" .. margin
+-- for i=1,#lines do
+-- local l = lines[i]
+-- if find(l,pattern) then
+-- lines[i] = sub(l,indent+1)
+-- else
+-- ok = false
+-- break
+-- end
+-- end
+-- if ok then
+-- return concat(lines,"\n")
+-- end
+-- end
+-- return str
+-- end
+
+-- how about tabs
+
+local getmargin = (Cs(P(" ")^1)*P(-1)+1)^1
+local eol = patterns.eol
+local whatever = (P(1)-eol)^0 * eol^1
+
+local strippers = { }
+
+local function undent(str) -- new version, needs testing
+ local margin = lpegmatch(getmargin,str)
+ if type(margin) ~= "string" then
+ return str
+ end
+ local indent = #margin
+ if indent == 0 then
+ return str
+ end
+ local stripper = strippers[indent]
+ if not stripper then
+ stripper = Cs((P(margin)/"" * whatever + eol^1)^1)
+ strippers[indent] = stripper
+ end
+ return lpegmatch(stripper,str) or str
+end
+
function commands.grabbuffer(name,begintag,endtag,bufferdata,catcodes) -- maybe move \\ to call
local dn = getcontent(name)
if dn == "" then
@@ -150,47 +257,27 @@ function commands.grabbuffer(name,begintag,endtag,bufferdata,catcodes) -- maybe
else
if continue then
dn = dn .. sub(bufferdata,2,-2) -- no \r, \n is more generic
+ elseif dn == "" then
+ dn = sub(bufferdata,2,-2)
else
- if dn == "" then
- dn = sub(bufferdata,2,-2)
- else
- dn = dn .. "\n" .. sub(bufferdata,2,-2) -- no \r, \n is more generic
- end
+ dn = dn .. "\n" .. sub(bufferdata,2,-2) -- no \r, \n is more generic
end
local last = sub(dn,-1)
if last == "\n" or last == "\r" then -- \n is unlikely as \r is the endlinechar
dn = sub(dn,1,-2)
end
if autoundent then
- local margin = match(dn,"[\n\r]( +)[\n\r]*$") or ""
- local indent = #margin
- if indent > 0 then
- local lines = splitlines(dn)
- local ok = true
- local pattern = "^" .. margin
- for i=1,#lines do
- local l = lines[i]
- if find(l,pattern) then
- lines[i] = sub(l,indent+1)
- else
- ok = false
- break
- end
- end
- if ok then
- dn = concat(lines,"\n")
- end
- end
+ dn = undent(dn)
end
end
assign(name,dn,catcodes)
- commands.testcase(more)
+ commands.doifelse(more)
end
-- The optional prefix hack is there for the typesetbuffer feature and
-- in mkii we needed that (this hidden feature is used in a manual).
-local function prepared(name,list) -- list is optional
+local function prepared(name,list,prefix) -- list is optional
if not list or list == "" then
list = name
end
@@ -201,7 +288,12 @@ local function prepared(name,list) -- list is optional
if content == "" then
content = "empty buffer"
end
- return tex.jobname .. "-" .. name .. ".tmp", content
+ if prefix then
+ local name = file.addsuffix(name,"tmp")
+ return tex.jobname .. "-" .. name, content
+ else
+ return name, content
+ end
end
local capsule = "\\starttext\n%s\n\\stoptext\n"
@@ -215,29 +307,29 @@ function commands.runbuffer(name,list,encapsulate)
local data = io.loaddata(name)
if data ~= content then
if trace_run then
- report_buffers("changes in '%s', processing forced",name)
+ report_buffers("changes in %a, processing forced",name)
end
io.savedata(name,content)
os.execute(format(command,name))
elseif trace_run then
- report_buffers("no changes in '%s', not processed",name)
+ report_buffers("no changes in %a, not processed",name)
end
end
-function commands.savebuffer(list,name) -- name is optional
- local name, content = prepared(name,list)
+function commands.savebuffer(list,name,prefix) -- name is optional
+ local name, content = prepared(name,list,prefix==v_yes)
io.savedata(name,content)
end
function commands.getbuffer(name)
local str = getcontent(name)
if str ~= "" then
- context.viafile(str)
+ context.viafile(str,formatters["buffer.%s"](validstring(name,"noname")))
end
end
function commands.getbuffermkvi(name) -- rather direct !
- context.viafile(resolvers.macros.preprocessed(getcontent(name)))
+ context.viafile(resolvers.macros.preprocessed(getcontent(name)),formatters["buffer.%s.mkiv"](validstring(name,"noname")))
end
function commands.gettexbuffer(name)
@@ -255,17 +347,10 @@ function commands.gettexbuffer(name)
end
end
-function commands.getbufferctxlua(name)
- local ok = loadstring(getcontent(name))
- if ok then
- ok()
- else
- report_buffers("invalid lua code in buffer '%s'",name)
- end
-end
+commands.getbufferctxlua = loadcontent
function commands.doifelsebuffer(name)
- commands.testcase(exists(name))
+ commands.doifelse(exists(name))
end
-- This only used for mp buffers and is a kludge. Don't change the
diff --git a/Master/texmf-dist/tex/context/base/buff-ini.mkii b/Master/texmf-dist/tex/context/base/buff-ini.mkii
index 40baaec9354..7fdb89e45cd 100644
--- a/Master/texmf-dist/tex/context/base/buff-ini.mkii
+++ b/Master/texmf-dist/tex/context/base/buff-ini.mkii
@@ -126,8 +126,8 @@
\let\processnextbufferline\processnextbufferlineA
\fi}
{\letbeundefined{#4}% \letvalue{#4}=\relax % \undefined
- \@EA\defconvertedargument\@EA\beginofblock\@EA{\csname#3\endcsname}% we could use defconvertedcommand here (no \@EA)
- \@EA\defconvertedargument\@EA\endofblock \@EA{\csname#4\endcsname}% we could use defconvertedcommand here (no \@EA)
+ \expandafter\defconvertedargument\expandafter\beginofblock\expandafter{\csname#3\endcsname}% we could use defconvertedcommand here (no \expandafter)
+ \expandafter\defconvertedargument\expandafter\endofblock \expandafter{\csname#4\endcsname}% we could use defconvertedcommand here (no \expandafter)
\ifcase\buffernestmode
\let\processnextbufferline\processnextbufferlineB
\or
@@ -314,17 +314,17 @@
\ifnum\currentbufferparagraph>\zerocount
\expandafter\dostartbufferparagraph
\else
- \expandafter\gobbleoneargument
+ \expandafter\gobblebufferparagraph
\fi}
\def\filterbufferparagraph#1#2%
{\advance\currentbufferparagraph \plusone
\ifcase\currentbufferparagraph
- \@EA\gobblebufferparagraph
+ \expandafter\gobblebufferparagraph
\else
\doifinsetelse{\the\currentbufferparagraph}{#2}
- {\@EA\dostartbufferparagraph}
- {\@EA\fakebufferparagraph}%
+ {\expandafter\dostartbufferparagraph}
+ {\expandafter\fakebufferparagraph}%
\fi
{#1}}
diff --git a/Master/texmf-dist/tex/context/base/buff-ini.mkiv b/Master/texmf-dist/tex/context/base/buff-ini.mkiv
index 239a274c0a8..7616a1deec9 100644
--- a/Master/texmf-dist/tex/context/base/buff-ini.mkiv
+++ b/Master/texmf-dist/tex/context/base/buff-ini.mkiv
@@ -34,9 +34,9 @@
\setuvalue{\e!start\v!buffer}%
{\begingroup % (3)
\obeylines
- \dosingleempty\buff_start}
+ \dodoubleempty\buff_start}
-\def\buff_start[#1]%
+\def\buff_start[#1][#2]%
{\buff_start_indeed{}{#1}{\e!start\v!buffer}{\e!stop\v!buffer}}
\def\buff_start_indeed#1#2#3#4% \donothing needed !
@@ -163,13 +163,28 @@
% x
% \stopbuffer
%
-% \savebuffer[x][temp.log]
+% \savebuffer[x] [temp] % gets name: jobname-temp.tmp
+% \savebufferinfile[x][temp.log] % gets name: temp.log
+
+\installcorenamespace{savebuffer}
+
+\installsetuponlycommandhandler \??savebuffer {savebuffer}
+
+\setupsavebuffer
+ [\c!list=,
+ \c!file=,
+ \c!prefix=\v!yes]
\unexpanded\def\savebuffer
{\dodoubleempty\buff_save}
\def\buff_save[#1][#2]%
- {\ctxcommand{savebuffer("#1","#2")}}
+ {\begingroup
+ \doifassignmentelse{#1}
+ {\setupcurrentsavebuffer[#1]}%
+ {\setupcurrentsavebuffer[\c!list={#1},\c!file=#2]}%
+ \ctxcommand{savebuffer("\directsavebufferparameter\c!list","\directsavebufferparameter\c!file","\directsavebufferparameter\c!prefix")}%
+ \endgroup}
%D Experimental: no expansion of commands in buffer!
@@ -199,4 +214,28 @@
\def\getbufferdata[#1]{\buff_get_stored_indeed{#1}}
+%D This is a weird one, moved from cont-new. Do we really need it? If not
+%D it will go away.
+
+\bgroup \permitcircumflexescape
+
+\obeylines % don't remove %'s !
+
+\gdef\collapsedspace#1%
+ {\ifx#1^^M%
+ \expandafter\collapsedspace
+ \else
+ \space
+ \expandafter#1%
+ \fi}
+
+\unexpanded\gdef\collapsespaces
+ {\prependtoksonce\relax\to\everyeof%
+ \ignorelines%
+ \ignoretabs%
+ \let\obeyedspace\collapsedspace%
+ \obeyspaces}
+
+\egroup
+
\protect \endinput
diff --git a/Master/texmf-dist/tex/context/base/buff-par.lua b/Master/texmf-dist/tex/context/base/buff-par.lua
index 2015b0bc0a4..2c1cd40e9d1 100644
--- a/Master/texmf-dist/tex/context/base/buff-par.lua
+++ b/Master/texmf-dist/tex/context/base/buff-par.lua
@@ -1,4 +1,4 @@
-if not modules then modules = { } end modules ['buff-ini'] = {
+if not modules then modules = { } end modules ['buff-par'] = {
version = 1.001,
comment = "companion to buff-ini.mkiv",
author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
@@ -6,21 +6,26 @@ if not modules then modules = { } end modules ['buff-ini'] = {
license = "see context related readme files"
}
-local trace_parallel = false trackers.register("buffers.parallel", function(v) trace_parallel = v end)
-
-local report_parallel = logs.reporter("buffers","parallel")
+local context, commands = context, commands
local insert, remove, find, gmatch = table.insert, table.remove, string.find, string.gmatch
local strip, format = string.strip, string.format
-local variables = interfaces.variables
+local trace_parallel = false trackers.register("buffers.parallel", function(v) trace_parallel = v end)
+
+local report_parallel = logs.reporter("buffers","parallel")
+
+local variables = interfaces.variables
+
+local parallel = buffers.parallel or { }
+buffers.parallel = parallel
-buffers.parallel = { } local parallel = buffers.parallel
+local settings_to_array = utilities.parsers.settings_to_array
-local data = { }
+local data = { }
function parallel.define(category,tags)
- local tags = utilities.parsers.settings_to_array(tags)
+ local tags = settings_to_array(tags)
local entries = { }
data[category] = {
tags = tags,
@@ -38,7 +43,7 @@ function parallel.reset(category,tags)
if not tags or tags == "" or tags == variables.all then
tags = table.keys(entries)
else
- tags = utilities.parsers.settings_to_array(tags)
+ tags = settings_to_array(tags)
end
for i=1,#tags do
entries[tags[i]] = {
@@ -76,7 +81,7 @@ function parallel.save(category,tag,content)
end
-- maybe no strip
-- use lpeg
- if find(content,"^%s*%[") then
+ if find(content,"%s*%[") then
local done = false
for label, content in gmatch(content,"%s*%[(.-)%]%s*([^%[]+)") do
if done then
@@ -86,7 +91,7 @@ function parallel.save(category,tag,content)
done = true
end
if trace_parallel and label ~= "" then
- report_parallel("reference found: category '%s', tag '%s', label '%s'",category,tag,label)
+ report_parallel("reference found of category %a, tag %a, label %a",category,tag,label)
end
line.label = label
line.content = strip(content)
@@ -175,5 +180,5 @@ commands.placeparallel = parallel.place
commands.resetparallel = parallel.reset
function commands.doifelseparallel(category,tags)
- commands.testcase(parallel.hassomecontent(category,tags))
+ commands.doifelse(parallel.hassomecontent(category,tags))
end
diff --git a/Master/texmf-dist/tex/context/base/buff-par.mkiv b/Master/texmf-dist/tex/context/base/buff-par.mkiv
deleted file mode 100644
index 7d35676bdec..00000000000
--- a/Master/texmf-dist/tex/context/base/buff-par.mkiv
+++ /dev/null
@@ -1,151 +0,0 @@
-%D \module
-%D [ file=buff-par,
-%D version=2010.12.05,
-%D title=\CONTEXT\ Buffer Macros,
-%D subtitle=Parallel,
-%D author=Hans Hagen,
-%D date=\currentdate,
-%D copyright={PRAGMA ADE \& \CONTEXT\ Development Team}]
-%C
-%C This module is part of the \CONTEXT\ macro||package and is
-%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
-%C details.
-
-\writestatus{loading}{ConTeXt Buffer Macros / Parallel}
-
-\registerctxluafile{buff-par}{1.001}
-
-%D This module is developped for Thomas Schmitz as part of
-%D a project.
-
-\unprotect
-
-\letvalue{\??px:}\empty
-
-\def\parallelparameter#1%
- {\csname\??px:%
- \ifcsname\??px:\currentparallel:\currentparallelinstance:#1\endcsname
- \currentparallel:\currentparallelinstance:#1%
- \else\ifcsname\??px:\currentparallel:#1\endcsname
- \currentparallel:#1%
- \else\ifcsname\??px:#1\endcsname
- #1%
- \fi\fi\fi
- \endcsname}
-
-\unexpanded\def\defineparallel
- {\dodoubleargument\dodefineparallel}
-
-\def\dodefineparallel[#1][#2]%
- {\ctxcommand{defineparallel("#1","#2")}%
- \processcommalist[#2]\dododefineparallel
- \setuvalue{\e!start#1}{\dostartparallelset{#1}}%
- \setuvalue{\e!stop #1}{\dostopparallelset}}
-
-\def\dododefineparallel#1%
- {\definebuffer[#1]%%
- \setuvalue{\e!stop#1}{\dowithparallel{#1}}}
-
-\def\dostartparallelset#1%
- {\def\currentparallel{#1}%
- \ctxcommand{nextparallel("\currentparallel")}}
-
-\def\dostopparallelset#1%
- {}
-
-\def\dowithparallel#1% defined moet ook aan de lua kant kunnen
- {\ctxcommand{saveparallel("\currentparallel","#1",buffers.raw("\thedefinedbuffer{#1}"))}}
-
-\unexpanded\def\placeparallel
- {\dotripleempty\doplaceparallel}
-
-\def\doplaceparallel[#1][#2][#3]%
- {\begingroup
- \def\currentparallel{#1}%
- \ctxcommand{placeparallel("\currentparallel","#2","#3")}%
- \endgroup}
-
-% was: \parallelparameter\c!command}
-
-\def\doflushparallel#1#2#3#4#5% {instance}{status}{line}{label}{content}
- {\begingroup
- \def\currentparallelinstance{#1}%
- \def\currentparallelnumber {#2}%
- \def\currentparallelline {#3}%
- \def\currentparallellabel {#4}%
- \def\currentparallelcontent {#5}%
- \ifcase#2\relax
- \expandafter\noflushparalleldefault
- \or
- \expandafter\doflushparalleldefault
- \fi
- \endgroup}
-
-\def\noflushparalleldefault{}
-\def\doflushparalleldefault{\directsetup{\parallelparameter\c!setups}}
-
-\startsetups parallel:place:default
- \hangafter\plusone
- \hangindent4em
- \dontleavehmode
- \hbox to 3em \bgroup
- \hss
- \bf
- \doifsomething \currentparallellabel {
- \textreference[\currentparallellabel]{\currentparallelline}
- }
- \currentparallelline
- \quad
- \egroup
- \currentparallelcontent
- \par
-\stopsetups
-
-\unexpanded\def\setupparallel
- {\dotripleempty\dosetupparallel}
-
-\def\dosetupparallel[#1][#2][#3]%
- {\ifthirdargument
- \getparameters[\??px:#1:#2:][#3]%
- \else\ifsecondargument
- \getparameters[\??px:#1:][#2]%
- \else
- \getparameters[\??px:][#1]% maybe no : here
- \fi\fi}
-
-\setupparallel
-% [\c!command=\doflushparalleldefault]
- [\c!setups=parallel:place:default]
-
-\def\doifelseparallel#1#2%
- {\cldcontext{commands.doifelseparallel("#1","#2")}}
-
-\def\resetparallel
- {\dodoubleempty\doresetparallel}
-
-\def\resetparallel[#1][#2]%
- {\ctxcommand{resetparallel("#1","#2"))}}
-
-% default
-
-% \def\doflushparalleldefault#1#2#3#4% todo: setups instead
-% {\ifcase#1\or
-% \begingroup
-% \hangafter1
-% \hangindent4em
-% \dontleavehmode
-% \hbox to 3em{\hss\bf\doifsomething{#3}{\textreference[#3]{#2}}#2\quad}#4\par
-% \endgroup
-% \fi}
-
-\protect \endinput
-
-% \def\dododefineparallel#1%
-% {\setuvalue{\e!stop #1}{}%
-% \setuvalue{\e!start#1}{\dostartparallel{#1}}}
-%
-% \def\dostartparallel#1%
-% {\grabuntil{\e!stop#1}{\dododostartparallel{#1}}}
-%
-% \def\dododostartparallel#1#2%
-% {\ctxcommand{saveparallel("\currentparallel","#1",\!!bs\detokenize{#2}\!!es)}}
diff --git a/Master/texmf-dist/tex/context/base/buff-par.mkvi b/Master/texmf-dist/tex/context/base/buff-par.mkvi
new file mode 100644
index 00000000000..404fa8ef30e
--- /dev/null
+++ b/Master/texmf-dist/tex/context/base/buff-par.mkvi
@@ -0,0 +1,131 @@
+%D \module
+%D [ file=buff-par,
+%D version=2010.12.05,
+%D title=\CONTEXT\ Buffer Macros,
+%D subtitle=Parallel,
+%D author=Hans Hagen,
+%D date=\currentdate,
+%D copyright={PRAGMA ADE \& \CONTEXT\ Development Team}]
+%C
+%C This module is part of the \CONTEXT\ macro||package and is
+%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
+%C details.
+
+\writestatus{loading}{ConTeXt Buffer Macros / Parallel}
+
+\registerctxluafile{buff-par}{1.001}
+
+%D This module is developped for Thomas Schmitz as part of a project. There is
+%D no documentation yet.
+%D
+%D \starttyping
+%D \defineparallel[main][one,two]
+%D
+%D \startmain
+%D \startone
+%D first 1
+%D [reference] first 2
+%D first 3
+%D \stopone
+%D \starttwo
+%D second 1
+%D \stoptwo
+%D \stopmain
+%D
+%D \placeparallel[main][one,two][criterium=all]
+
+%D criterium=all start=<number> n=<number>
+
+\unprotect
+
+\installcorenamespace{parallel}
+
+\installcommandhandler \??parallel {parallel} \??parallel
+
+\setupparallel
+ [\c!setups=parallel:place:default]
+
+\let\buff_parallel_define_saved\defineparallel
+
+\unexpanded\def\defineparallel
+ {\dodoubleargument\buff_parallel_define}
+
+\def\buff_parallel_define[#name][#instances]%
+ {\buff_parallel_define_saved[#name]
+ \ctxcommand{defineparallel("#name","#instances")}%
+ \processcommalist[#instances]\buff_parallel_define_instance
+ \setuevalue{\e!start#name}{\buff_parallel_start{#name}}%
+ \setuevalue{\e!stop #name}{\buff_parallel_stop}}
+
+\def\buff_parallel_define_instance#instance%
+ {\normalexpanded{\buff_parallel_define_saved[#instance][\currentparallel]}%
+ \definebuffer[#instance]%
+ \setuevalue{\e!stop#instance}{\buff_parallel_save{#instance}}}
+
+\unexpanded\def\buff_parallel_start#name%
+ {\pushmacro\currentparallel
+ \edef\currentparallel{#name}%
+ \ctxcommand{nextparallel("\currentparallel")}}
+
+\unexpanded\def\buff_parallel_stop
+ {\popmacro\currentparallel}
+
+\unexpanded\def\buff_parallel_save#instance% defined moet ook aan de lua kant kunnen
+ {\ctxcommand{saveparallel("\currentparallel","#instance",buffers.raw("\thedefinedbuffer{#instance}"))}}
+
+\unexpanded\def\placeparallel
+ {\dotripleempty\buff_parallel_place}
+
+\def\buff_parallel_place[#name][#instance][#settings]%
+ {\begingroup
+ \edef\currentparallel{#name}%
+ \ctxcommand{placeparallel("\currentparallel","#instance","#settings")}% -- todo: pass options as k/v
+ \endgroup}
+
+\def\doflushparallel#instance#status#line#label#content% called at lua end
+ {\begingroup
+ \def\currentparallelinstance{#instance}%
+ \def\currentparallelnumber {#status}%
+ \def\currentparallelline {#line}%
+ \def\currentparallellabel {#label}%
+ \def\currentparallelcontent {#content}%
+ \ifcase#status\relax
+ \expandafter\buff_parallel_flush_nop
+ \or
+ \expandafter\buff_parallel_flush_yes
+ \fi
+ \endgroup}
+
+\def\buff_parallel_flush_nop
+ {}
+
+\def\buff_parallel_flush_yes
+ {\directsetup{\namedparallelparameter\currentparallelinstance\c!setups}}
+
+\unexpanded\def\doifelseparallel#name#instance%
+ {\ctxcommand{doifelseparallel("#name","#instance")}}
+
+\unexpanded\def\resetparallel
+ {\dodoubleempty\buff_parallel_reset}
+
+\def\buff_parallel_reset[#name][#instance]%
+ {\ctxcommand{resetparallel("#name","#instance"))}}
+
+\startsetups parallel:place:default
+ \hangafter\plusone
+ \hangindent4em
+ \dontleavehmode
+ \hbox to 3em \bgroup
+ \hss
+ \bf
+ \doifsomething \currentparallellabel {
+ \textreference[\currentparallellabel]{\currentparallelline}
+ }
+ \currentparallelline
+ \quad
+ \egroup
+ \currentparallelcontent
+ \par
+\stopsetups
+
+\protect \endinput
diff --git a/Master/texmf-dist/tex/context/base/buff-ver.lua b/Master/texmf-dist/tex/context/base/buff-ver.lua
index 004d8985896..e327a59dd9d 100644
--- a/Master/texmf-dist/tex/context/base/buff-ver.lua
+++ b/Master/texmf-dist/tex/context/base/buff-ver.lua
@@ -18,6 +18,8 @@ local concat = table.concat
local C, P, R, S, V, Carg, Cc, Cs = lpeg.C, lpeg.P, lpeg.R, lpeg.S, lpeg.V, lpeg.Carg, lpeg.Cc, lpeg.Cs
local patterns, lpegmatch, is_lpeg = lpeg.patterns, lpeg.match, lpeg.is_lpeg
+local context, commands = context, commands
+
local trace_visualize = false trackers.register("buffers.visualize", function(v) trace_visualize = v end)
local report_visualizers = logs.reporter("buffers","visualizers")
@@ -30,7 +32,6 @@ visualizers.specifications = specifications
local tabtospace = utilities.strings.tabtospace
local variables = interfaces.variables
local settings_to_array = utilities.parsers.settings_to_array
-local verbatim = context.verbatim
local variables = interfaces.variables
local findfile = resolvers.findfile
local addsuffix = file.addsuffix
@@ -52,6 +53,7 @@ local dodisplayverbatimemptyline = context.dodisplayverbatimemptyline
local dodisplayverbatimstart = context.dodisplayverbatimstart
local dodisplayverbatimstop = context.dodisplayverbatimstop
+local verbatim = context.verbatim
local doverbatimspace = context.doverbatimspace
local CargOne = Carg(1)
@@ -162,7 +164,7 @@ function visualizers.newgrammar(name,t)
g = g and g.grammar
if g then
if trace_visualize then
- report_visualizers("cloning grammar '%s'",name)
+ report_visualizers("cloning grammar %a",name)
end
for k,v in next, g do
if not t[k] then
@@ -181,12 +183,12 @@ local function getvisualizer(method,nature)
local m = specifications[method] or specifications.default
if nature then
if trace_visualize then
- report_visualizers("getting visualizer '%s' with nature '%s'",method,nature)
+ report_visualizers("getting visualizer %a with nature %a",method,nature)
end
return m and (m[nature] or m.parser) or nil
else
if trace_visualize then
- report_visualizers("getting visualizer '%s'",method)
+ report_visualizers("getting visualizer %a",method)
end
return m and m.parser or nil
end
@@ -196,7 +198,7 @@ local fallback = context.verbatim
local function makepattern(visualizer,replacement,pattern)
if not pattern then
- report_visualizers("error in visualizer: %s",replacement)
+ report_visualizers("error in visualizer %a",replacement)
return patterns.alwaystrue
else
if type(visualizer) == "table" and type(replacement) == "string" then
@@ -235,11 +237,11 @@ function visualizers.load(name)
end
if texname == "" or luaname == "" then
if trace_visualize then
- report_visualizers("unknown visualizer '%s'",name)
+ report_visualizers("unknown visualizer %a",name)
end
else
if trace_visualize then
- report_visualizers("loading visualizer '%s'",name)
+ report_visualizers("loading visualizer %a",name)
end
lua.registercode(luaname)
context.input(texname)
@@ -253,7 +255,7 @@ end
function visualizers.register(name,specification)
name = lower(name)
if trace_visualize then
- report_visualizers("registering visualizer '%s'",name)
+ report_visualizers("registering visualizer %a",name)
end
specifications[name] = specification
local parser, handler = specification.parser, specification.handler
@@ -356,7 +358,7 @@ function visualizers.registerescapepattern(name,befores,afters,normalmethod,esca
local after = afters[i]
local processor = processors[i]
if trace_visualize then
- report_visualizers("registering escape pattern, name: '%s', index: '%s', before: '%s', after: '%s', processor: '%s'",
+ report_visualizers("registering escape pattern, name %a, index %a, before %a, after %a, processor %a",
name,i,before,after,processor or "default")
end
before = P(before) * space_pattern
@@ -392,7 +394,7 @@ function visualizers.registerescapeline(name,befores,normalmethod,escapemethod,p
local before = befores[i]
local processor = processors[i]
if trace_visualize then
- report_visualizers("registering escape line pattern, name: '%s', before: '%s', after: <<newline>>",name,before)
+ report_visualizers("registering escape line pattern, name %a, before %a, after <<newline>>",name,before)
end
before = P(before) * space_pattern
after = space_pattern * P("\n")
@@ -422,7 +424,7 @@ function visualizers.registerescapecommand(name,token,normalmethod,escapecommand
local escapepattern = escapepatterns[name]
if not escapepattern then
if trace_visualize then
- report_visualizers("registering escape token, name: '%s', token: '%s'",name,token)
+ report_visualizers("registering escape token, name %a, token %a",name,token)
end
token = P(token)
local notoken = hack((1 - token)^1)
@@ -491,12 +493,12 @@ local function visualize(content,settings) -- maybe also method in settings
local n = m and m[nature]
if n then
if trace_visualize then
- report_visualizers("visualize using method '%s' and nature '%s'",method,nature)
+ report_visualizers("visualize using method %a and nature %a",method,nature)
end
n(content,settings)
else
if trace_visualize then
- report_visualizers("visualize using method '%s'",method)
+ report_visualizers("visualize using method %a",method)
end
fallback(content,1,settings)
end
@@ -506,9 +508,12 @@ end
visualizers.visualize = visualize
visualizers.getvisualizer = getvisualizer
+local fallbacks = { } table.setmetatableindex(fallbacks,function(t,k) local v = { nature = k } t[k] = v return v end)
+
local function checkedsettings(settings,nature)
if not settings then
- return { nature = nature }
+ -- let's avoid dummy tables as much as possible
+ return fallbacks[nature]
else
if not settings.nature then
settings.nature = nature
@@ -688,19 +693,19 @@ local getlines = buffers.getlines
-- interface
function commands.doifelsevisualizer(name)
- commands.testcase(specifications[lower(name)])
+ commands.doifelse(specifications[lower(name)])
end
commands.loadvisualizer = visualizers.load
---~ local decodecomment = resolvers.macros.decodecomment -- experiment
+-- local decodecomment = resolvers.macros.decodecomment -- experiment
function commands.typebuffer(settings)
local lines = getlines(settings.name)
if lines then
local content, m = filter(lines,settings)
if content and content ~= "" then
---~ content = decodecomment(content)
+ -- content = decodecomment(content)
content = dotabs(content,settings)
visualize(content,checkedsettings(settings,"display"))
end
@@ -732,7 +737,7 @@ local strip = Cs((P("\\") * ((1-S("\\ "))^1) * (P(" ")/"") + 1)^0) --
function commands.typestring(settings)
local content = settings.data
if content and content ~= "" then
- content = lpegmatch(strip,content) -- can be an option, btu needed in e.g. tabulate
+ content = #content > 1 and lpegmatch(strip,content) or content -- can be an option, but needed in e.g. tabulate
-- content = decodecomment(content)
-- content = dotabs(content,settings)
visualize(content,checkedsettings(settings,"inline"))
@@ -750,7 +755,7 @@ function commands.typefile(settings)
str = regimes.translate(str,regime)
end
if str and str~= "" then
---~ content = decodecomment(content)
+ -- content = decodecomment(content)
local lines = splitlines(str)
local content, m = filter(lines,settings)
if content and content ~= "" then
diff --git a/Master/texmf-dist/tex/context/base/buff-ver.mkiv b/Master/texmf-dist/tex/context/base/buff-ver.mkiv
index 5336c4458bd..430834a30d8 100644
--- a/Master/texmf-dist/tex/context/base/buff-ver.mkiv
+++ b/Master/texmf-dist/tex/context/base/buff-ver.mkiv
@@ -11,6 +11,8 @@
%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
%C details.
+% check after-first and before-last breaks
+
\writestatus{loading}{ConTeXt Buffer Macros / Verbatim}
\registerctxluafile{buff-ver}{1.001}
@@ -70,21 +72,22 @@
\csname\??typingspace\typeparameter\c!space\endcsname
\relax\the\everyinitializeverbatim\relax}
+\unexpanded\def\doinitializeverbatim % for use elsewhere .. temp hack (see lxml-ini)
+ {\buff_verbatim_initialize_type_one
+ \buff_verbatim_initialize_type_two}
+
\let\buff_verbatim_set_line_margin\relax
\def\buff_verbatim_set_line_margin_indeed
- {\getpagestatus
- \hskip\ifrightpage\typingparameter\c!oddmargin\else\typingparameter\c!evenmargin\fi\relax}
+ {\hskip\doifoddpageelse{\typingparameter\c!oddmargin}{\typingparameter\c!evenmargin}\relax}
\def\buff_verbatim_check_margins
{\scratchskip\typingparameter\c!oddmargin\relax
- \ifzeropt\scratchskip
- \else
+ \ifzeropt\scratchskip \else
\let\buff_verbatim_set_line_margin\buff_verbatim_set_line_margin_indeed
\fi
\scratchskip\typingparameter\c!evenmargin\relax
- \ifzeropt\scratchskip
- \else
+ \ifzeropt\scratchskip \else
\let\buff_verbatim_set_line_margin\buff_verbatim_set_line_margin_indeed
\fi
\ifx\buff_verbatim_set_line_margin\relax
@@ -331,7 +334,7 @@
\unexpanded\def\specialfixedspace {\kern\interwordspace\relax}
\unexpanded\def\specialobeyedspace {\hskip\interwordspace\relax} % better than spaceskip
-\unexpanded\def\specialstretchedspace{\hskip.5\interwordspace\!!plus.125\interwordstretch\relax} % more but not less
+\unexpanded\def\specialstretchedspace{\hskip.5\interwordspace\s!plus.125\interwordstretch\relax} % more but not less
\unexpanded\def\specialcontrolspace {\hskip\zeropoint\hbox{\normalcontrolspace}\hskip\zeropoint\relax}
\unexpanded\def\obeyhyphens
@@ -369,7 +372,7 @@
\let\inlinemathmarker \textdollar
\def\displaymathmarker{\textdollar\textdollar}
-\def\buff_verbatim_special_type#1#2%
+\def\buff_verbatim_special_type#1#2#% # gobbles spaces
{\dontleavehmode\bgroup
\buff_verbatim_initialize_type_one
\catcode\leftbraceasciicode \begingroupcatcode
@@ -390,6 +393,8 @@
\unexpanded\def\astype{\bgroup\usetypestyleandcolor\c!style\c!color\let\nexttoken}
+\unexpanded\def\asciistr#1{\dontleavehmode{\verbatimfont\detokenize{#1}}} % use in some old styles
+
%D The basic display verbatim commands are defined in an indirect way. As we
%D will see, they are a specific case of a more general mechanism.
@@ -641,9 +646,9 @@
\c!text=\v!no,
\c!style=\tt,
\c!indentnext=\v!yes,
- \c!margin=\!!zeropoint,
- \c!evenmargin=\!!zeropoint,
- \c!oddmargin=\!!zeropoint,
+ \c!margin=\zeropoint,
+ \c!evenmargin=\zeropoint,
+ \c!oddmargin=\zeropoint,
\c!blank=\v!line,
%\c!escape=, % yes | no | {START,STOP} | default when yes: {BTEX,ETEX}
\c!numbering=\v!no,
@@ -836,4 +841,9 @@
% \usevisualizerstyleandcolor\c!style\c!color
% \let\next}
+\appendtoks
+ \def\type#1{\letterbackslash\checkedstrippedcsname#1}% or maybe detokenize
+ \def\tex #1{\letterbackslash#1}%
+\to \everysimplifycommands
+
\protect \endinput
diff --git a/Master/texmf-dist/tex/context/base/catc-ctx.mkiv b/Master/texmf-dist/tex/context/base/catc-ctx.mkiv
index bd5c16d69a3..ddade7f52c3 100644
--- a/Master/texmf-dist/tex/context/base/catc-ctx.mkiv
+++ b/Master/texmf-dist/tex/context/base/catc-ctx.mkiv
@@ -128,11 +128,18 @@
% for the moment here:
-\def\starttexcode
+\normalprotected\def\starttexcode
{\pushcatcodetable
\catcodetable\prtcatcodes}
-\def\stoptexcode
+\normalprotected\def\stoptexcode
+ {\popcatcodetable}
+
+\normalprotected\def\startcontextcode
+ {\pushcatcodetable
+ \catcodetable\ctxcatcodes}
+
+\normalprotected\def\stopcontextcode
{\popcatcodetable}
\endinput
diff --git a/Master/texmf-dist/tex/context/base/catc-def.mkiv b/Master/texmf-dist/tex/context/base/catc-def.mkiv
index 26e8cb11ea2..cfbaed171e4 100644
--- a/Master/texmf-dist/tex/context/base/catc-def.mkiv
+++ b/Master/texmf-dist/tex/context/base/catc-def.mkiv
@@ -123,7 +123,7 @@
%D shortcuts to their character representation.
\chardef \^ = \circumflexasciicode
-\chardef \_ = \underscoreasciicode % but way too wide in lm, so ... until that's fixed:
+\chardef \_ = \underscoreasciicode
\chardef \& = \ampersandasciicode
\chardef \% = \commentasciicode
\chardef \# = \hashasciicode
@@ -133,8 +133,9 @@
\chardef \\ = \backslashasciicode
\chardef \| = \barasciicode
-%def\_{\leavevmode \kern.06em \vbox{\hrule width.3em}}
-\def\_{\dontleavehmode \kern.06em \vbox{\hrule width.3em}} % this will become a \chardef
+% way too wide in lm, so one can also use:
+%
+% \def\_{\dontleavehmode \kern.06em \vbox{\hrule width.3em}} % this will become a \chardef
%D From now on we can use the protection mechanisms.
diff --git a/Master/texmf-dist/tex/context/base/catc-ini.lua b/Master/texmf-dist/tex/context/base/catc-ini.lua
index b2c793a6a98..d4f9b65af07 100644
--- a/Master/texmf-dist/tex/context/base/catc-ini.lua
+++ b/Master/texmf-dist/tex/context/base/catc-ini.lua
@@ -6,8 +6,6 @@ if not modules then modules = { } end modules ['catc-ini'] = {
license = "see context related readme files"
}
--- todo: everywhere replace tex.ctxcatcodes -> catcodes.numbers.ctxcatcodes
-
catcodes = catcodes or { }
catcodes.numbers = catcodes.numbers or { }
catcodes.names = catcodes.names or { }
@@ -15,26 +13,29 @@ catcodes.names = catcodes.names or { }
storage.register("catcodes/numbers", catcodes.numbers, "catcodes.numbers")
storage.register("catcodes/names", catcodes.names, "catcodes.names")
+local numbers = catcodes.numbers
+local names = catcodes.names
+
-- this only happens at initime
function catcodes.register(name,number)
- catcodes.numbers[name] = number
- local cnn = catcodes.names[number]
+ numbers[name] = number
+ local cnn = names[number]
if cnn then
cnn[#cnn+1] = name
else
- catcodes.names[number] = { name }
+ names[number] = { name }
end
- tex[name] = number
+ tex[name] = number -- downward compatible
end
-- this only happens at runtime
-for k, v in next, catcodes.numbers do
- tex[k] = v
+for k, v in next, numbers do
+ tex[k] = v -- downward compatible
end
-- nasty
-table.setmetatableindex(catcodes.numbers,function(t,k) if type(k) == "number" then t[k] = k return k end end)
-table.setmetatableindex(catcodes.names, function(t,k) if type(k) == "string" then t[k] = k return k end end)
+table.setmetatableindex(numbers,function(t,k) if type(k) == "number" then t[k] = k return k end end)
+table.setmetatableindex(names, function(t,k) if type(k) == "string" then t[k] = k return k end end)
diff --git a/Master/texmf-dist/tex/context/base/catc-ini.mkiv b/Master/texmf-dist/tex/context/base/catc-ini.mkiv
index 26c3ceee990..791ce31c467 100644
--- a/Master/texmf-dist/tex/context/base/catc-ini.mkiv
+++ b/Master/texmf-dist/tex/context/base/catc-ini.mkiv
@@ -87,6 +87,17 @@
\xdef\outputnewlinechar{^^J}%
\endgroup}
+%D We predefine some prefixes ahead of syst-aux and mult-sys. We reserve 8 slots
+%D for catcodes.
+
+\def\??catcodelet {1>>} % let : \let
+\def\??catcodedef {2>>} % def : \def
+\def\??catcodeued {3>>} % ued : \unexpanded\def
+\def\??catcodeget {4>>} % \meaning
+
+\def\??catcodetablet{5>>}
+\def\??catcodetablen{6>>}
+
\newcount\c_syst_catcodes_n \c_syst_catcodes_n\zerocount % 0 = signal, so advance before allocate
\newcount\c_syst_catcodes_a
\newcount\c_syst_catcodes_b
@@ -94,7 +105,7 @@
\normalprotected\def\newcatcodetable#1% we could move the cctdefcounter to lua
{\global\advance\c_syst_catcodes_n\plusone
- \expandafter\xdef\csname\??qm:n:\number\c_syst_catcodes_n\endcsname{\string#1}% logging
+ \expandafter\xdef\csname\??catcodetablen\number\c_syst_catcodes_n\endcsname{\string#1}% logging
\newconstant#1%
#1\c_syst_catcodes_n
\ctxlua{catcodes.register("\expandafter\gobbleoneargument\string#1",\number#1)}}
@@ -151,12 +162,7 @@
\setnewconstant\c_syst_catcodes_hack\tildeasciicode
%D Once a catcode is assigned, the next assignments will happen
-%D faster. We predefine some prefixes ahead of mult-sys.
-
-\def\??ql{@@ql} % let : \let
-\def\??qd{@@qd} % def : \def
-\def\??qu{@@qu} % ued : \unexpanded\def
-\def\??qm{@@qm} % \meaning
+%D faster.
\def\letcatcodecommand{\afterassignment\syst_catcodes_let_a\c_syst_catcodes_a}
\def\defcatcodecommand{\afterassignment\syst_catcodes_def_a\c_syst_catcodes_a}
@@ -167,46 +173,46 @@
\def\syst_catcodes_ued_a{\afterassignment\syst_catcodes_ued_b\c_syst_catcodes_b}
\def\syst_catcodes_let_b % each time
- {\ifcsname\??ql:\number\c_syst_catcodes_a:\number\c_syst_catcodes_b\endcsname
- \csname\??ql:\number\c_syst_catcodes_a:\number\c_syst_catcodes_b\expandafter\endcsname
+ {\ifcsname\??catcodelet\number\c_syst_catcodes_a:\number\c_syst_catcodes_b\endcsname
+ \csname\??catcodelet\number\c_syst_catcodes_a:\number\c_syst_catcodes_b\expandafter\endcsname
\else
\expandafter\syst_catcodes_let_c
\fi}
\def\syst_catcodes_def_b % each time
- {\ifcsname\??qd:\number\c_syst_catcodes_a:\number\c_syst_catcodes_b\endcsname
- \csname\??qd:\number\c_syst_catcodes_a:\number\c_syst_catcodes_b\expandafter\endcsname
+ {\ifcsname\??catcodedef\number\c_syst_catcodes_a:\number\c_syst_catcodes_b\endcsname
+ \csname\??catcodedef\number\c_syst_catcodes_a:\number\c_syst_catcodes_b\expandafter\endcsname
\else
\expandafter\syst_catcodes_def_c
\fi}
\def\syst_catcodes_ued_b % each time
- {\ifcsname\??qu:\number\c_syst_catcodes_a:\number\c_syst_catcodes_b\endcsname
- \csname\??qu:\number\c_syst_catcodes_a:\number\c_syst_catcodes_b\expandafter\endcsname
+ {\ifcsname\??catcodeued\number\c_syst_catcodes_a:\number\c_syst_catcodes_b\endcsname
+ \csname\??catcodeued\number\c_syst_catcodes_a:\number\c_syst_catcodes_b\expandafter\endcsname
\else
\expandafter\syst_catcodes_ued_c
\fi}
\def\syst_catcodes_let_c % only first time
- {\expandafter\gdef\csname\??ql:\number\c_syst_catcodes_a:\number\c_syst_catcodes_b\expandafter\endcsname\expandafter
- {\expandafter\let\csname\??qm:\number\c_syst_catcodes_a:\number\c_syst_catcodes_b\endcsname}%
+ {\expandafter\gdef\csname\??catcodelet\number\c_syst_catcodes_a:\number\c_syst_catcodes_b\expandafter\endcsname\expandafter
+ {\expandafter\let\csname\??catcodeget\number\c_syst_catcodes_a:\number\c_syst_catcodes_b\endcsname}%
\syst_catcodes_reinstate_unexpanded
- \csname\??ql:\number\c_syst_catcodes_a:\number\c_syst_catcodes_b\endcsname}
+ \csname\??catcodelet\number\c_syst_catcodes_a:\number\c_syst_catcodes_b\endcsname}
\def\syst_catcodes_def_c % only first time (we could use \normalexpanded here)
- {\expandafter\gdef\csname\??qd:\number\c_syst_catcodes_a:\number\c_syst_catcodes_b\expandafter\endcsname
+ {\expandafter\gdef\csname\??catcodedef\number\c_syst_catcodes_a:\number\c_syst_catcodes_b\expandafter\endcsname
\expandafter##\expandafter1\expandafter
- {\expandafter\def\csname\??qm:\number\c_syst_catcodes_a:\number\c_syst_catcodes_b\endcsname{##1}}%
+ {\expandafter\def\csname\??catcodeget\number\c_syst_catcodes_a:\number\c_syst_catcodes_b\endcsname{##1}}%
\syst_catcodes_reinstate_normal
- \csname\??qd:\number\c_syst_catcodes_a:\number\c_syst_catcodes_b\endcsname}
+ \csname\??catcodedef\number\c_syst_catcodes_a:\number\c_syst_catcodes_b\endcsname}
\def\syst_catcodes_ued_c % only first time
- {\expandafter\gdef\csname\??qu:\number\c_syst_catcodes_a:\number\c_syst_catcodes_b\expandafter\endcsname
+ {\expandafter\gdef\csname\??catcodeued\number\c_syst_catcodes_a:\number\c_syst_catcodes_b\expandafter\endcsname
\expandafter##\expandafter1\expandafter
- {\expandafter\normalprotected\expandafter\def\csname\??qm:\number\c_syst_catcodes_a:\number\c_syst_catcodes_b\endcsname{##1}}%
+ {\expandafter\normalprotected\expandafter\def\csname\??catcodeget\number\c_syst_catcodes_a:\number\c_syst_catcodes_b\endcsname{##1}}%
\syst_catcodes_reinstate_unexpanded
- \csname\??qu:\number\c_syst_catcodes_a:\number\c_syst_catcodes_b\endcsname}
+ \csname\??catcodeued\number\c_syst_catcodes_a:\number\c_syst_catcodes_b\endcsname}
\def\reinstatecatcodecommand{\afterassignment\syst_catcodes_reinstate_normal\c_syst_catcodes_b}
@@ -227,8 +233,8 @@
\newconstant\defaultcatcodetable
\def\catcodecommand#1%
- {\csname\??qm:\number
- \ifcsname\??qm:\number\currentcatcodetable:\number#1\endcsname
+ {\csname\??catcodeget\number
+ \ifcsname\??catcodeget\number\currentcatcodetable:\number#1\endcsname
\currentcatcodetable \else \defaultcatcodetable
\fi
:\number#1\endcsname}
@@ -251,13 +257,13 @@
\normalprotected\def\pushcatcodetable
{\advance\c_syst_catcodes_level\plusone
\syst_catcodes_trace_push
- \expandafter\chardef\csname\??qm:t:\number\c_syst_catcodes_level\endcsname\currentcatcodetable}
+ \expandafter\chardef\csname\??catcodetablet\number\c_syst_catcodes_level\endcsname\currentcatcodetable}
\normalprotected\def\popcatcodetable
{\ifcase\c_syst_catcodes_level
\syst_catcodes_trace_nesting_error
\else
- \expandafter\catcodetable\csname\??qm:t:\number\c_syst_catcodes_level\endcsname
+ \expandafter\catcodetable\csname\??catcodetablet\number\c_syst_catcodes_level\endcsname
\syst_catcodes_trace_pop
\advance\c_syst_catcodes_level\minusone
\fi}
@@ -269,7 +275,7 @@
\normalprotected\def\restorecatcodes % takes previous level
{\ifnum\c_syst_catcodes_level>\plusone
- \expandafter\catcodetable\csname\??qm:t:\number\numexpr\c_syst_catcodes_level-1\relax\endcsname
+ \expandafter\catcodetable\csname\??catcodetablet\number\numexpr\c_syst_catcodes_level-1\relax\endcsname
\fi}
% \newtoks\everycatcodetable
@@ -294,14 +300,14 @@
\def\syst_catcodes_prev
{\ifnum\numexpr\c_syst_catcodes_level-1\relax>\zerocount
- \csname\??qm:n:\number\csname\??qm:t:\number\numexpr\c_syst_catcodes_level-1\relax\endcsname\endcsname
+ \csname\??catcodetablen\number\csname\??catcodetablet\number\numexpr\c_syst_catcodes_level-1\relax\endcsname\endcsname
\else
-%
\fi}
\def\catcodetablename
{\ifnum\currentcatcodetable>\zerocount
- \csname\??qm:n:\number\currentcatcodetable\endcsname
+ \csname\??catcodetablen\number\currentcatcodetable\endcsname
\else
-%
\fi}
diff --git a/Master/texmf-dist/tex/context/base/char-cjk.lua b/Master/texmf-dist/tex/context/base/char-cjk.lua
index b077f4a3e0e..3d7de142333 100644
--- a/Master/texmf-dist/tex/context/base/char-cjk.lua
+++ b/Master/texmf-dist/tex/context/base/char-cjk.lua
@@ -12,7 +12,8 @@ local floor = math.floor
local format = string.format
local utfchar = utf.char
-local ranges = characters.ranges
+local ranges = characters.ranges
+local allocate = utilities.storage.allocate
-- Hangul Syllable
@@ -209,7 +210,7 @@ local remapped = { -- this might be merged into char-def.lua
[0x11C2] = 0x314E, -- H
}
-characters.hangul = {
+characters.hangul = allocate {
decomposed = decomposed,
description = description,
leadconsonant = leadconsonant,
@@ -226,24 +227,6 @@ local hangul_syllable_basetable = {
linebreak = "h2",
}
---~ local hangul_syllable_metatable = {
---~ __index = function(t,k)
---~ local u = t.unicodeslot
---~ if k == "fscode" then
---~ -- no need to cache this as we normally use fscodes
---~ return leadconsonant(u)
---~ elseif k == "shcode" then
---~ return { decomposed(u) }
---~ elseif k == "specials" then
---~ return { "char", decomposed(u) }
---~ elseif k == "description" then
---~ return description(u)
---~ else
---~ return hangul_syllable_basetable[k]
---~ end
---~ end
---~ }
-
local hangul_syllable_metatable = {
__index = function(t,k)
local u = t.unicodeslot
diff --git a/Master/texmf-dist/tex/context/base/char-def.lua b/Master/texmf-dist/tex/context/base/char-def.lua
index 804468c2db4..8580fb6990b 100644
--- a/Master/texmf-dist/tex/context/base/char-def.lua
+++ b/Master/texmf-dist/tex/context/base/char-def.lua
@@ -3,15 +3,19 @@ if not modules then modules = { } end modules ['char-def'] = {
comment = "companion to char-ini.mkiv",
author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
+ license = "see context related readme files",
+ dataonly = true,
}
--[[
-The first version of this table was generated from unicode tables
-but after that was mostly updated manual using data present in
-ConTeXt and elsewhere. I did my best to make this table as complete
-as needed for proper use in ConTeXt MkIV. All errors are mine. If
-you find an error or ommision, just let me know.
+The first version of this table was generated from unicode tables but after that was
+mostly updated manual using data present in ConTeXt and elsewhere. I did my best to
+make this table as complete as needed for proper use in ConTeXt MkIV. All errors are
+mine. If you find an error or ommision, just let me know. This file is updated every
+now and then using mtx-unicode where we check against the latest unicode txt files
+(normally once per year around the tex live code freeze) and checked afterwards for
+differences. We could save some bytes by sharing variant tables but it's not worth
+the trouble. Some additional data is kept in other files.
]]--
characters = characters or { }
@@ -310,6 +314,10 @@ characters.data={
direction="et",
linebreak="al",
unicodeslot=0x0023,
+ variants={
+ [0xFE0E]="text style",
+ [0xFE0F]="emoji style",
+ },
},
{
adobename="dollar",
@@ -406,15 +414,6 @@ characters.data={
direction="cs",
linebreak="is",
mathclass="punctuation",
---mathspec={
--- {
--- class="punctuation",
--- },
--- {
--- class="ord",
--- name="mathcomma",
--- },
---},
unicodeslot=0x002C,
},
{
@@ -424,6 +423,8 @@ characters.data={
description="HYPHEN-MINUS",
direction="es",
linebreak="hy",
+ mathextensible="h",
+ mathfiller="relfill",
mathsymbol=0x2212,
unicodeslot=0x002D,
},
@@ -431,18 +432,13 @@ characters.data={
adobename="period",
category="po",
cjkwd="na",
+ comment="class needed for autopunctuation",
contextname="textperiod",
description="FULL STOP",
direction="cs",
linebreak="is",
- mathclass="ord",
+ mathclass="punctuation",
mathspec={
--- {
--- class="ord",
--- },
--- {
--- class="punctuation",
--- },
{
class="punctuation",
name="ldotp",
@@ -454,12 +450,21 @@ characters.data={
adobename="slash",
category="po",
cjkwd="na",
+ comment="mathsymbol=0x2044",
contextname="textslash",
description="SOLIDUS",
direction="cs",
linebreak="sy",
- mathclass="ord",
- mathsymbol=0x2044,
+ mathspec={
+ {
+ class="middle",
+ unicode=0x2044,
+ },
+ {
+ class="ordinary",
+ unicode=0x2044,
+ },
+ },
unicodeslot=0x002F,
},
{
@@ -471,6 +476,10 @@ characters.data={
linebreak="nu",
mathclass="number",
unicodeslot=0x0030,
+ variants={
+ [0xFE0E]="text style",
+ [0xFE0F]="emoji style",
+ },
},
{
adobename="one",
@@ -481,6 +490,10 @@ characters.data={
linebreak="nu",
mathclass="number",
unicodeslot=0x0031,
+ variants={
+ [0xFE0E]="text style",
+ [0xFE0F]="emoji style",
+ },
},
{
adobename="two",
@@ -491,6 +504,10 @@ characters.data={
linebreak="nu",
mathclass="number",
unicodeslot=0x0032,
+ variants={
+ [0xFE0E]="text style",
+ [0xFE0F]="emoji style",
+ },
},
{
adobename="three",
@@ -501,6 +518,10 @@ characters.data={
linebreak="nu",
mathclass="number",
unicodeslot=0x0033,
+ variants={
+ [0xFE0E]="text style",
+ [0xFE0F]="emoji style",
+ },
},
{
adobename="four",
@@ -511,6 +532,10 @@ characters.data={
linebreak="nu",
mathclass="number",
unicodeslot=0x0034,
+ variants={
+ [0xFE0E]="text style",
+ [0xFE0F]="emoji style",
+ },
},
{
adobename="five",
@@ -521,6 +546,10 @@ characters.data={
linebreak="nu",
mathclass="number",
unicodeslot=0x0035,
+ variants={
+ [0xFE0E]="text style",
+ [0xFE0F]="emoji style",
+ },
},
{
adobename="six",
@@ -531,6 +560,10 @@ characters.data={
linebreak="nu",
mathclass="number",
unicodeslot=0x0036,
+ variants={
+ [0xFE0E]="text style",
+ [0xFE0F]="emoji style",
+ },
},
{
adobename="seven",
@@ -541,6 +574,10 @@ characters.data={
linebreak="nu",
mathclass="number",
unicodeslot=0x0037,
+ variants={
+ [0xFE0E]="text style",
+ [0xFE0F]="emoji style",
+ },
},
{
adobename="eight",
@@ -551,6 +588,10 @@ characters.data={
linebreak="nu",
mathclass="number",
unicodeslot=0x0038,
+ variants={
+ [0xFE0E]="text style",
+ [0xFE0F]="emoji style",
+ },
},
{
adobename="nine",
@@ -561,6 +602,10 @@ characters.data={
linebreak="nu",
mathclass="number",
unicodeslot=0x0039,
+ variants={
+ [0xFE0E]="text style",
+ [0xFE0F]="emoji style",
+ },
},
{
adobename="colon",
@@ -603,15 +648,17 @@ characters.data={
description="EQUALS SIGN",
direction="on",
linebreak="al",
+ mathextensible="h",
+ mathfiller="equalfill",
mathspec={
- {
- class="relation",
- name="eq",
- },
- {
- class="relation",
- name="Relbar",
- },
+ {
+ class="relation",
+ name="eq",
+ },
+ {
+ class="relation",
+ name="Relbar",
+ },
},
unicodeslot=0x003D,
},
@@ -979,7 +1026,7 @@ characters.data={
description="CIRCUMFLEX ACCENT",
direction="on",
linebreak="al",
- mathclass="accent",
+ mathclass="topaccent",
unicodeslot=0x005E,
},
{
@@ -1000,7 +1047,7 @@ characters.data={
description="GRAVE ACCENT",
direction="on",
linebreak="al",
- mathclass="accent",
+ mathclass="topaccent",
mathname="grave",
unicodeslot=0x0060,
},
@@ -1684,7 +1731,7 @@ characters.data={
description="DIAERESIS",
direction="on",
linebreak="ai",
- mathclass="accent",
+ mathclass="topaccent",
mathname="ddot",
specials={ "compat", 0x0020, 0x0308 },
unicodeslot=0x00A8,
@@ -1729,11 +1776,11 @@ characters.data={
linebreak="al",
mathspec={
{
- class="ord",
+ class="ordinary",
name="lnot",
},
{
- class="ord",
+ class="ordinary",
name="neg",
},
},
@@ -1767,7 +1814,7 @@ characters.data={
description="MACRON",
direction="on",
linebreak="al",
- mathclass="accent",
+ mathclass="topaccent",
mathname="bar",
specials={ "compat", 0x0020, 0x0304 },
unicodeslot=0x00AF,
@@ -1824,7 +1871,7 @@ characters.data={
description="ACUTE ACCENT",
direction="on",
linebreak="bb",
- mathclass="accent",
+ mathclass="topaccent",
mathname="acute",
specials={ "compat", 0x0020, 0x0301 },
unicodeslot=0x00B4,
@@ -2538,7 +2585,7 @@ characters.data={
description="LATIN SMALL LETTER ETH",
direction="l",
linebreak="al",
- mathclass="ord",
+ mathclass="ordinary",
mathname="eth",
uccode=0x00D0,
unicodeslot=0x00F0,
@@ -3205,8 +3252,6 @@ characters.data={
description="LATIN SMALL LETTER H WITH STROKE",
direction="l",
linebreak="al",
- mathclass="ord",
- mathname="hbar",
shcode=0x0068,
uccode=0x0126,
unicodeslot=0x0127,
@@ -7479,7 +7524,7 @@ characters.data={
description="MODIFIER LETTER CIRCUMFLEX ACCENT",
direction="on",
linebreak="al",
- mathclass="accent",
+ mathclass="topaccent",
mathname="hat",
specials={ "compat", 0x0020, 0x0302 },
unicodeslot=0x02C6,
@@ -7493,7 +7538,7 @@ characters.data={
description="CARON",
direction="on",
linebreak="ai",
- mathclass="accent",
+ mathclass="topaccent",
mathname="check",
specials={ "compat", 0x0020, 0x030C },
unicodeslot=0x02C7,
@@ -7639,7 +7684,7 @@ characters.data={
description="BREVE",
direction="on",
linebreak="ai",
- mathclass="accent",
+ mathclass="topaccent",
mathname="breve",
specials={ "compat", 0x0020, 0x0306 },
unicodeslot=0x02D8,
@@ -7652,7 +7697,7 @@ characters.data={
description="DOT ABOVE",
direction="on",
linebreak="ai",
- mathclass="accent",
+ mathclass="topaccent",
mathname="dot",
specials={ "compat", 0x0020, 0x0307 },
unicodeslot=0x02D9,
@@ -7665,7 +7710,7 @@ characters.data={
description="RING ABOVE",
direction="on",
linebreak="ai",
- mathclass="accent",
+ mathclass="topaccent",
mathname="mathring",
specials={ "compat", 0x0020, 0x030A },
unicodeslot=0x02DA,
@@ -7688,7 +7733,7 @@ characters.data={
description="SMALL TILDE",
direction="on",
linebreak="al",
- mathclass="accent",
+ mathclass="topaccent",
mathname="tilde",
specials={ "compat", 0x0020, 0x0303 },
unicodeslot=0x02DC,
@@ -7982,6 +8027,8 @@ characters.data={
description="COMBINING CIRCUMFLEX ACCENT",
direction="nsm",
linebreak="cm",
+ mathclass="topaccent",
+ mathname="widehat",
mathstretch="h",
unicodeslot=0x0302,
},
@@ -7992,6 +8039,8 @@ characters.data={
description="COMBINING TILDE",
direction="nsm",
linebreak="cm",
+ mathclass="topaccent",
+ mathname="widetilde",
mathstretch="h",
unicodeslot=0x0303,
},
@@ -10252,7 +10301,7 @@ characters.data={
description="GREEK KAPPA SYMBOL",
direction="l",
linebreak="al",
- mathclass="ord",
+ mathclass="ordinary",
mathname="varkappa",
specials={ "compat", 0x03BA },
uccode=0x039A,
@@ -14174,6 +14223,13 @@ characters.data={
linebreak="ba",
unicodeslot=0x058A,
},
+ [0x058F]={
+ category="sc",
+ description="ARMENIAN DRAM SIGN",
+ direction="et",
+ linebreak="pr",
+ unicodeslot=0x058F,
+ },
[0x0591]={
adobename="etnahtalefthebrew",
category="mn",
@@ -14615,7 +14671,7 @@ characters.data={
contextname="hebrewAlef",
description="HEBREW LETTER ALEF",
direction="r",
- linebreak="al",
+ linebreak="hl",
unicodeslot=0x05D0,
},
[0x05D1]={
@@ -14624,7 +14680,7 @@ characters.data={
contextname="hebrewBet",
description="HEBREW LETTER BET",
direction="r",
- linebreak="al",
+ linebreak="hl",
unicodeslot=0x05D1,
},
[0x05D2]={
@@ -14633,7 +14689,7 @@ characters.data={
contextname="hebrewGimel",
description="HEBREW LETTER GIMEL",
direction="r",
- linebreak="al",
+ linebreak="hl",
unicodeslot=0x05D2,
},
[0x05D3]={
@@ -14642,7 +14698,7 @@ characters.data={
contextname="hebrewDalet",
description="HEBREW LETTER DALET",
direction="r",
- linebreak="al",
+ linebreak="hl",
unicodeslot=0x05D3,
},
[0x05D4]={
@@ -14651,7 +14707,7 @@ characters.data={
contextname="hebrewHe",
description="HEBREW LETTER HE",
direction="r",
- linebreak="al",
+ linebreak="hl",
unicodeslot=0x05D4,
},
[0x05D5]={
@@ -14660,7 +14716,7 @@ characters.data={
contextname="hebrewVav",
description="HEBREW LETTER VAV",
direction="r",
- linebreak="al",
+ linebreak="hl",
unicodeslot=0x05D5,
},
[0x05D6]={
@@ -14669,7 +14725,7 @@ characters.data={
contextname="hebrewZayin",
description="HEBREW LETTER ZAYIN",
direction="r",
- linebreak="al",
+ linebreak="hl",
unicodeslot=0x05D6,
},
[0x05D7]={
@@ -14678,7 +14734,7 @@ characters.data={
contextname="hebrewHet",
description="HEBREW LETTER HET",
direction="r",
- linebreak="al",
+ linebreak="hl",
unicodeslot=0x05D7,
},
[0x05D8]={
@@ -14687,7 +14743,7 @@ characters.data={
contextname="hebrewTet",
description="HEBREW LETTER TET",
direction="r",
- linebreak="al",
+ linebreak="hl",
unicodeslot=0x05D8,
},
[0x05D9]={
@@ -14696,7 +14752,7 @@ characters.data={
contextname="hebrewYod",
description="HEBREW LETTER YOD",
direction="r",
- linebreak="al",
+ linebreak="hl",
unicodeslot=0x05D9,
},
[0x05DA]={
@@ -14705,7 +14761,7 @@ characters.data={
contextname="hebrewKaffinal",
description="HEBREW LETTER FINAL KAF",
direction="r",
- linebreak="al",
+ linebreak="hl",
unicodeslot=0x05DA,
},
[0x05DB]={
@@ -14714,7 +14770,7 @@ characters.data={
contextname="hebrewKaf",
description="HEBREW LETTER KAF",
direction="r",
- linebreak="al",
+ linebreak="hl",
unicodeslot=0x05DB,
},
[0x05DC]={
@@ -14723,7 +14779,7 @@ characters.data={
contextname="hebrewLamed",
description="HEBREW LETTER LAMED",
direction="r",
- linebreak="al",
+ linebreak="hl",
unicodeslot=0x05DC,
},
[0x05DD]={
@@ -14732,7 +14788,7 @@ characters.data={
contextname="hebrewMemfinal",
description="HEBREW LETTER FINAL MEM",
direction="r",
- linebreak="al",
+ linebreak="hl",
unicodeslot=0x05DD,
},
[0x05DE]={
@@ -14741,7 +14797,7 @@ characters.data={
contextname="hebrewMem",
description="HEBREW LETTER MEM",
direction="r",
- linebreak="al",
+ linebreak="hl",
unicodeslot=0x05DE,
},
[0x05DF]={
@@ -14750,7 +14806,7 @@ characters.data={
contextname="hebrewNunfinal",
description="HEBREW LETTER FINAL NUN",
direction="r",
- linebreak="al",
+ linebreak="hl",
unicodeslot=0x05DF,
},
[0x05E0]={
@@ -14759,7 +14815,7 @@ characters.data={
contextname="hebrewNun",
description="HEBREW LETTER NUN",
direction="r",
- linebreak="al",
+ linebreak="hl",
unicodeslot=0x05E0,
},
[0x05E1]={
@@ -14768,7 +14824,7 @@ characters.data={
contextname="hebrewSamekh",
description="HEBREW LETTER SAMEKH",
direction="r",
- linebreak="al",
+ linebreak="hl",
unicodeslot=0x05E1,
},
[0x05E2]={
@@ -14777,7 +14833,7 @@ characters.data={
contextname="hebrewAyin",
description="HEBREW LETTER AYIN",
direction="r",
- linebreak="al",
+ linebreak="hl",
unicodeslot=0x05E2,
},
[0x05E3]={
@@ -14786,7 +14842,7 @@ characters.data={
contextname="hebrewPefinal",
description="HEBREW LETTER FINAL PE",
direction="r",
- linebreak="al",
+ linebreak="hl",
unicodeslot=0x05E3,
},
[0x05E4]={
@@ -14795,7 +14851,7 @@ characters.data={
contextname="hebrewPe",
description="HEBREW LETTER PE",
direction="r",
- linebreak="al",
+ linebreak="hl",
unicodeslot=0x05E4,
},
[0x05E5]={
@@ -14804,7 +14860,7 @@ characters.data={
contextname="hebrewTsadifinal",
description="HEBREW LETTER FINAL TSADI",
direction="r",
- linebreak="al",
+ linebreak="hl",
unicodeslot=0x05E5,
},
[0x05E6]={
@@ -14813,7 +14869,7 @@ characters.data={
contextname="hebrewTsadi",
description="HEBREW LETTER TSADI",
direction="r",
- linebreak="al",
+ linebreak="hl",
unicodeslot=0x05E6,
},
[0x05E7]={
@@ -14822,7 +14878,7 @@ characters.data={
contextname="hebrewQof",
description="HEBREW LETTER QOF",
direction="r",
- linebreak="al",
+ linebreak="hl",
unicodeslot=0x05E7,
},
[0x05E8]={
@@ -14831,7 +14887,7 @@ characters.data={
contextname="hebrewResh",
description="HEBREW LETTER RESH",
direction="r",
- linebreak="al",
+ linebreak="hl",
unicodeslot=0x05E8,
},
[0x05E9]={
@@ -14840,7 +14896,7 @@ characters.data={
contextname="hebrewShin",
description="HEBREW LETTER SHIN",
direction="r",
- linebreak="al",
+ linebreak="hl",
unicodeslot=0x05E9,
},
[0x05EA]={
@@ -14849,7 +14905,7 @@ characters.data={
contextname="hebrewTav",
description="HEBREW LETTER TAV",
direction="r",
- linebreak="al",
+ linebreak="hl",
unicodeslot=0x05EA,
},
[0x05F0]={
@@ -14857,7 +14913,7 @@ characters.data={
category="lo",
description="HEBREW LIGATURE YIDDISH DOUBLE VAV",
direction="r",
- linebreak="al",
+ linebreak="hl",
unicodeslot=0x05F0,
},
[0x05F1]={
@@ -14865,7 +14921,7 @@ characters.data={
category="lo",
description="HEBREW LIGATURE YIDDISH VAV YOD",
direction="r",
- linebreak="al",
+ linebreak="hl",
unicodeslot=0x05F1,
},
[0x05F2]={
@@ -14873,7 +14929,7 @@ characters.data={
category="lo",
description="HEBREW LIGATURE YIDDISH DOUBLE YOD",
direction="r",
- linebreak="al",
+ linebreak="hl",
unicodeslot=0x05F2,
},
[0x05F3]={
@@ -14893,6 +14949,7 @@ characters.data={
unicodeslot=0x05F4,
},
[0x0600]={
+ arabic="u",
category="cf",
description="ARABIC NUMBER SIGN",
direction="an",
@@ -14901,6 +14958,7 @@ characters.data={
visible="yes",
},
[0x0601]={
+ arabic="u",
category="cf",
description="ARABIC SIGN SANAH",
direction="an",
@@ -14909,6 +14967,7 @@ characters.data={
visible="yes",
},
[0x0602]={
+ arabic="u",
category="cf",
description="ARABIC FOOTNOTE MARKER",
direction="an",
@@ -14917,6 +14976,7 @@ characters.data={
visible="yes",
},
[0x0603]={
+ arabic="u",
category="cf",
description="ARABIC SIGN SAFHA",
direction="an",
@@ -14924,6 +14984,14 @@ characters.data={
unicodeslot=0x0603,
visible="yes",
},
+ [0x0604]={
+ arabic="u",
+ category="cf",
+ description="ARABIC SIGN SAMVAT",
+ direction="an",
+ linebreak="al",
+ unicodeslot=0x0604,
+ },
[0x0606]={
category="sm",
description="ARABIC-INDIC CUBE ROOT",
@@ -14939,6 +15007,7 @@ characters.data={
unicodeslot=0x0607,
},
[0x0608]={
+ arabic="u",
category="sm",
description="ARABIC RAY",
direction="al",
@@ -14960,6 +15029,7 @@ characters.data={
unicodeslot=0x060A,
},
[0x060B]={
+ arabic="u",
category="sc",
description="AFGHANI SIGN",
direction="al",
@@ -15096,6 +15166,7 @@ characters.data={
unicodeslot=0x061F,
},
[0x0620]={
+ arabic="d",
category="lo",
description="ARABIC LETTER KASHMIRI YEH",
direction="al",
@@ -15104,6 +15175,7 @@ characters.data={
},
[0x0621]={
adobename="hamzasukunarabic",
+ arabic="u",
category="lo",
description="ARABIC LETTER HAMZA",
direction="al",
@@ -15112,6 +15184,7 @@ characters.data={
},
[0x0622]={
adobename="alefmaddaabovearabic",
+ arabic="r",
category="lo",
description="ARABIC LETTER ALEF WITH MADDA ABOVE",
direction="al",
@@ -15122,6 +15195,7 @@ characters.data={
},
[0x0623]={
adobename="alefhamzaabovearabic",
+ arabic="r",
category="lo",
description="ARABIC LETTER ALEF WITH HAMZA ABOVE",
direction="al",
@@ -15132,6 +15206,7 @@ characters.data={
},
[0x0624]={
adobename="wawhamzaabovearabic",
+ arabic="r",
category="lo",
description="ARABIC LETTER WAW WITH HAMZA ABOVE",
direction="al",
@@ -15142,6 +15217,7 @@ characters.data={
},
[0x0625]={
adobename="alefhamzabelowarabic",
+ arabic="r",
category="lo",
description="ARABIC LETTER ALEF WITH HAMZA BELOW",
direction="al",
@@ -15152,6 +15228,7 @@ characters.data={
},
[0x0626]={
adobename="yehhamzaabovearabic",
+ arabic="d",
category="lo",
description="ARABIC LETTER YEH WITH HAMZA ABOVE",
direction="al",
@@ -15162,6 +15239,7 @@ characters.data={
},
[0x0627]={
adobename="alefarabic",
+ arabic="r",
category="lo",
description="ARABIC LETTER ALEF",
direction="al",
@@ -15170,6 +15248,7 @@ characters.data={
},
[0x0628]={
adobename="beharabic",
+ arabic="d",
category="lo",
description="ARABIC LETTER BEH",
direction="al",
@@ -15178,6 +15257,7 @@ characters.data={
},
[0x0629]={
adobename="tehmarbutaarabic",
+ arabic="r",
category="lo",
description="ARABIC LETTER TEH MARBUTA",
direction="al",
@@ -15186,6 +15266,7 @@ characters.data={
},
[0x062A]={
adobename="teharabic",
+ arabic="d",
category="lo",
description="ARABIC LETTER TEH",
direction="al",
@@ -15194,6 +15275,7 @@ characters.data={
},
[0x062B]={
adobename="theharabic",
+ arabic="d",
category="lo",
description="ARABIC LETTER THEH",
direction="al",
@@ -15202,6 +15284,7 @@ characters.data={
},
[0x062C]={
adobename="jeemarabic",
+ arabic="d",
category="lo",
description="ARABIC LETTER JEEM",
direction="al",
@@ -15210,6 +15293,7 @@ characters.data={
},
[0x062D]={
adobename="haharabic",
+ arabic="d",
category="lo",
description="ARABIC LETTER HAH",
direction="al",
@@ -15218,6 +15302,7 @@ characters.data={
},
[0x062E]={
adobename="khaharabic",
+ arabic="d",
category="lo",
description="ARABIC LETTER KHAH",
direction="al",
@@ -15226,6 +15311,7 @@ characters.data={
},
[0x062F]={
adobename="dalarabic",
+ arabic="r",
category="lo",
description="ARABIC LETTER DAL",
direction="al",
@@ -15234,6 +15320,7 @@ characters.data={
},
[0x0630]={
adobename="thalarabic",
+ arabic="r",
category="lo",
description="ARABIC LETTER THAL",
direction="al",
@@ -15242,6 +15329,7 @@ characters.data={
},
[0x0631]={
adobename="rehyehaleflamarabic",
+ arabic="r",
category="lo",
description="ARABIC LETTER REH",
direction="al",
@@ -15250,6 +15338,7 @@ characters.data={
},
[0x0632]={
adobename="zainarabic",
+ arabic="r",
category="lo",
description="ARABIC LETTER ZAIN",
direction="al",
@@ -15258,6 +15347,7 @@ characters.data={
},
[0x0633]={
adobename="seenarabic",
+ arabic="d",
category="lo",
description="ARABIC LETTER SEEN",
direction="al",
@@ -15266,6 +15356,7 @@ characters.data={
},
[0x0634]={
adobename="sheenarabic",
+ arabic="d",
category="lo",
description="ARABIC LETTER SHEEN",
direction="al",
@@ -15274,6 +15365,7 @@ characters.data={
},
[0x0635]={
adobename="sadarabic",
+ arabic="d",
category="lo",
description="ARABIC LETTER SAD",
direction="al",
@@ -15282,6 +15374,7 @@ characters.data={
},
[0x0636]={
adobename="dadarabic",
+ arabic="d",
category="lo",
description="ARABIC LETTER DAD",
direction="al",
@@ -15290,6 +15383,7 @@ characters.data={
},
[0x0637]={
adobename="taharabic",
+ arabic="d",
category="lo",
description="ARABIC LETTER TAH",
direction="al",
@@ -15298,6 +15392,7 @@ characters.data={
},
[0x0638]={
adobename="zaharabic",
+ arabic="d",
category="lo",
description="ARABIC LETTER ZAH",
direction="al",
@@ -15306,6 +15401,7 @@ characters.data={
},
[0x0639]={
adobename="ainarabic",
+ arabic="d",
category="lo",
description="ARABIC LETTER AIN",
direction="al",
@@ -15314,6 +15410,7 @@ characters.data={
},
[0x063A]={
adobename="ghainarabic",
+ arabic="d",
category="lo",
description="ARABIC LETTER GHAIN",
direction="al",
@@ -15321,6 +15418,7 @@ characters.data={
unicodeslot=0x063A,
},
[0x063B]={
+ arabic="d",
category="lo",
description="ARABIC LETTER KEHEH WITH TWO DOTS ABOVE",
direction="al",
@@ -15328,6 +15426,7 @@ characters.data={
unicodeslot=0x063B,
},
[0x063C]={
+ arabic="d",
category="lo",
description="ARABIC LETTER KEHEH WITH THREE DOTS BELOW",
direction="al",
@@ -15335,6 +15434,7 @@ characters.data={
unicodeslot=0x063C,
},
[0x063D]={
+ arabic="d",
category="lo",
description="ARABIC LETTER FARSI YEH WITH INVERTED V",
direction="al",
@@ -15342,6 +15442,7 @@ characters.data={
unicodeslot=0x063D,
},
[0x063E]={
+ arabic="d",
category="lo",
description="ARABIC LETTER FARSI YEH WITH TWO DOTS ABOVE",
direction="al",
@@ -15349,6 +15450,7 @@ characters.data={
unicodeslot=0x063E,
},
[0x063F]={
+ arabic="d",
category="lo",
description="ARABIC LETTER FARSI YEH WITH THREE DOTS ABOVE",
direction="al",
@@ -15357,6 +15459,7 @@ characters.data={
},
[0x0640]={
adobename="tatweelarabic",
+ arabic="c",
category="lm",
description="ARABIC TATWEEL",
direction="al",
@@ -15365,6 +15468,7 @@ characters.data={
},
[0x0641]={
adobename="feharabic",
+ arabic="d",
category="lo",
description="ARABIC LETTER FEH",
direction="al",
@@ -15373,6 +15477,7 @@ characters.data={
},
[0x0642]={
adobename="qafarabic",
+ arabic="d",
category="lo",
description="ARABIC LETTER QAF",
direction="al",
@@ -15381,6 +15486,7 @@ characters.data={
},
[0x0643]={
adobename="kafarabic",
+ arabic="d",
category="lo",
description="ARABIC LETTER KAF",
direction="al",
@@ -15389,6 +15495,7 @@ characters.data={
},
[0x0644]={
adobename="lamarabic",
+ arabic="d",
category="lo",
description="ARABIC LETTER LAM",
direction="al",
@@ -15397,6 +15504,7 @@ characters.data={
},
[0x0645]={
adobename="meemarabic",
+ arabic="d",
category="lo",
description="ARABIC LETTER MEEM",
direction="al",
@@ -15405,6 +15513,7 @@ characters.data={
},
[0x0646]={
adobename="noonarabic",
+ arabic="d",
category="lo",
description="ARABIC LETTER NOON",
direction="al",
@@ -15413,6 +15522,7 @@ characters.data={
},
[0x0647]={
adobename="heharabic",
+ arabic="d",
category="lo",
description="ARABIC LETTER HEH",
direction="al",
@@ -15421,6 +15531,7 @@ characters.data={
},
[0x0648]={
adobename="wawarabic",
+ arabic="r",
category="lo",
description="ARABIC LETTER WAW",
direction="al",
@@ -15429,6 +15540,7 @@ characters.data={
},
[0x0649]={
adobename="alefmaksuraarabic",
+ arabic="d",
category="lo",
description="ARABIC LETTER ALEF MAKSURA",
direction="al",
@@ -15437,6 +15549,7 @@ characters.data={
},
[0x064A]={
adobename="yeharabic",
+ arabic="d",
category="lo",
description="ARABIC LETTER YEH",
direction="al",
@@ -15711,6 +15824,7 @@ characters.data={
unicodeslot=0x066D,
},
[0x066E]={
+ arabic="d",
category="lo",
description="ARABIC LETTER DOTLESS BEH",
direction="al",
@@ -15718,6 +15832,7 @@ characters.data={
unicodeslot=0x066E,
},
[0x066F]={
+ arabic="d",
category="lo",
description="ARABIC LETTER DOTLESS QAF",
direction="al",
@@ -15732,6 +15847,7 @@ characters.data={
unicodeslot=0x0670,
},
[0x0671]={
+ arabic="r",
category="lo",
description="ARABIC LETTER ALEF WASLA",
direction="al",
@@ -15739,6 +15855,7 @@ characters.data={
unicodeslot=0x0671,
},
[0x0672]={
+ arabic="r",
category="lo",
description="ARABIC LETTER ALEF WITH WAVY HAMZA ABOVE",
direction="al",
@@ -15747,6 +15864,7 @@ characters.data={
unicodeslot=0x0672,
},
[0x0673]={
+ arabic="r",
category="lo",
description="ARABIC LETTER ALEF WITH WAVY HAMZA BELOW",
direction="al",
@@ -15755,6 +15873,7 @@ characters.data={
unicodeslot=0x0673,
},
[0x0674]={
+ arabic="u",
category="lo",
description="ARABIC LETTER HIGH HAMZA",
direction="al",
@@ -15762,6 +15881,7 @@ characters.data={
unicodeslot=0x0674,
},
[0x0675]={
+ arabic="r",
category="lo",
description="ARABIC LETTER HIGH HAMZA ALEF",
direction="al",
@@ -15770,6 +15890,7 @@ characters.data={
unicodeslot=0x0675,
},
[0x0676]={
+ arabic="r",
category="lo",
description="ARABIC LETTER HIGH HAMZA WAW",
direction="al",
@@ -15778,6 +15899,7 @@ characters.data={
unicodeslot=0x0676,
},
[0x0677]={
+ arabic="r",
category="lo",
description="ARABIC LETTER U WITH HAMZA ABOVE",
direction="al",
@@ -15787,6 +15909,7 @@ characters.data={
unicodeslot=0x0677,
},
[0x0678]={
+ arabic="d",
category="lo",
description="ARABIC LETTER HIGH HAMZA YEH",
direction="al",
@@ -15796,6 +15919,7 @@ characters.data={
},
[0x0679]={
adobename="tteharabic",
+ arabic="d",
category="lo",
description="ARABIC LETTER TTEH",
direction="al",
@@ -15803,6 +15927,7 @@ characters.data={
unicodeslot=0x0679,
},
[0x067A]={
+ arabic="d",
category="lo",
description="ARABIC LETTER TTEHEH",
direction="al",
@@ -15810,6 +15935,7 @@ characters.data={
unicodeslot=0x067A,
},
[0x067B]={
+ arabic="d",
category="lo",
description="ARABIC LETTER BEEH",
direction="al",
@@ -15817,6 +15943,7 @@ characters.data={
unicodeslot=0x067B,
},
[0x067C]={
+ arabic="d",
category="lo",
description="ARABIC LETTER TEH WITH RING",
direction="al",
@@ -15825,6 +15952,7 @@ characters.data={
unicodeslot=0x067C,
},
[0x067D]={
+ arabic="d",
category="lo",
description="ARABIC LETTER TEH WITH THREE DOTS ABOVE DOWNWARDS",
direction="al",
@@ -15834,6 +15962,7 @@ characters.data={
},
[0x067E]={
adobename="peharabic",
+ arabic="d",
category="lo",
description="ARABIC LETTER PEH",
direction="al",
@@ -15841,6 +15970,7 @@ characters.data={
unicodeslot=0x067E,
},
[0x067F]={
+ arabic="d",
category="lo",
description="ARABIC LETTER TEHEH",
direction="al",
@@ -15848,6 +15978,7 @@ characters.data={
unicodeslot=0x067F,
},
[0x0680]={
+ arabic="d",
category="lo",
description="ARABIC LETTER BEHEH",
direction="al",
@@ -15855,6 +15986,7 @@ characters.data={
unicodeslot=0x0680,
},
[0x0681]={
+ arabic="d",
category="lo",
description="ARABIC LETTER HAH WITH HAMZA ABOVE",
direction="al",
@@ -15863,6 +15995,7 @@ characters.data={
unicodeslot=0x0681,
},
[0x0682]={
+ arabic="d",
category="lo",
description="ARABIC LETTER HAH WITH TWO DOTS VERTICAL ABOVE",
direction="al",
@@ -15871,6 +16004,7 @@ characters.data={
unicodeslot=0x0682,
},
[0x0683]={
+ arabic="d",
category="lo",
description="ARABIC LETTER NYEH",
direction="al",
@@ -15878,6 +16012,7 @@ characters.data={
unicodeslot=0x0683,
},
[0x0684]={
+ arabic="d",
category="lo",
description="ARABIC LETTER DYEH",
direction="al",
@@ -15885,6 +16020,7 @@ characters.data={
unicodeslot=0x0684,
},
[0x0685]={
+ arabic="d",
category="lo",
description="ARABIC LETTER HAH WITH THREE DOTS ABOVE",
direction="al",
@@ -15894,6 +16030,7 @@ characters.data={
},
[0x0686]={
adobename="tcheharabic",
+ arabic="d",
category="lo",
description="ARABIC LETTER TCHEH",
direction="al",
@@ -15901,6 +16038,7 @@ characters.data={
unicodeslot=0x0686,
},
[0x0687]={
+ arabic="d",
category="lo",
description="ARABIC LETTER TCHEHEH",
direction="al",
@@ -15909,6 +16047,7 @@ characters.data={
},
[0x0688]={
adobename="ddalarabic",
+ arabic="r",
category="lo",
description="ARABIC LETTER DDAL",
direction="al",
@@ -15916,6 +16055,7 @@ characters.data={
unicodeslot=0x0688,
},
[0x0689]={
+ arabic="r",
category="lo",
description="ARABIC LETTER DAL WITH RING",
direction="al",
@@ -15924,6 +16064,7 @@ characters.data={
unicodeslot=0x0689,
},
[0x068A]={
+ arabic="r",
category="lo",
description="ARABIC LETTER DAL WITH DOT BELOW",
direction="al",
@@ -15932,6 +16073,7 @@ characters.data={
unicodeslot=0x068A,
},
[0x068B]={
+ arabic="r",
category="lo",
description="ARABIC LETTER DAL WITH DOT BELOW AND SMALL TAH",
direction="al",
@@ -15940,6 +16082,7 @@ characters.data={
unicodeslot=0x068B,
},
[0x068C]={
+ arabic="r",
category="lo",
description="ARABIC LETTER DAHAL",
direction="al",
@@ -15947,6 +16090,7 @@ characters.data={
unicodeslot=0x068C,
},
[0x068D]={
+ arabic="r",
category="lo",
description="ARABIC LETTER DDAHAL",
direction="al",
@@ -15954,6 +16098,7 @@ characters.data={
unicodeslot=0x068D,
},
[0x068E]={
+ arabic="r",
category="lo",
description="ARABIC LETTER DUL",
direction="al",
@@ -15961,6 +16106,7 @@ characters.data={
unicodeslot=0x068E,
},
[0x068F]={
+ arabic="r",
category="lo",
description="ARABIC LETTER DAL WITH THREE DOTS ABOVE DOWNWARDS",
direction="al",
@@ -15969,6 +16115,7 @@ characters.data={
unicodeslot=0x068F,
},
[0x0690]={
+ arabic="r",
category="lo",
description="ARABIC LETTER DAL WITH FOUR DOTS ABOVE",
direction="al",
@@ -15978,6 +16125,7 @@ characters.data={
},
[0x0691]={
adobename="rreharabic",
+ arabic="r",
category="lo",
description="ARABIC LETTER RREH",
direction="al",
@@ -15985,6 +16133,7 @@ characters.data={
unicodeslot=0x0691,
},
[0x0692]={
+ arabic="r",
category="lo",
description="ARABIC LETTER REH WITH SMALL V",
direction="al",
@@ -15993,6 +16142,7 @@ characters.data={
unicodeslot=0x0692,
},
[0x0693]={
+ arabic="r",
category="lo",
description="ARABIC LETTER REH WITH RING",
direction="al",
@@ -16001,6 +16151,7 @@ characters.data={
unicodeslot=0x0693,
},
[0x0694]={
+ arabic="r",
category="lo",
description="ARABIC LETTER REH WITH DOT BELOW",
direction="al",
@@ -16009,6 +16160,7 @@ characters.data={
unicodeslot=0x0694,
},
[0x0695]={
+ arabic="r",
category="lo",
description="ARABIC LETTER REH WITH SMALL V BELOW",
direction="al",
@@ -16017,6 +16169,7 @@ characters.data={
unicodeslot=0x0695,
},
[0x0696]={
+ arabic="r",
category="lo",
description="ARABIC LETTER REH WITH DOT BELOW AND DOT ABOVE",
direction="al",
@@ -16025,6 +16178,7 @@ characters.data={
unicodeslot=0x0696,
},
[0x0697]={
+ arabic="r",
category="lo",
description="ARABIC LETTER REH WITH TWO DOTS ABOVE",
direction="al",
@@ -16034,6 +16188,7 @@ characters.data={
},
[0x0698]={
adobename="jeharabic",
+ arabic="r",
category="lo",
description="ARABIC LETTER JEH",
direction="al",
@@ -16041,6 +16196,7 @@ characters.data={
unicodeslot=0x0698,
},
[0x0699]={
+ arabic="r",
category="lo",
description="ARABIC LETTER REH WITH FOUR DOTS ABOVE",
direction="al",
@@ -16049,6 +16205,7 @@ characters.data={
unicodeslot=0x0699,
},
[0x069A]={
+ arabic="d",
category="lo",
description="ARABIC LETTER SEEN WITH DOT BELOW AND DOT ABOVE",
direction="al",
@@ -16057,6 +16214,7 @@ characters.data={
unicodeslot=0x069A,
},
[0x069B]={
+ arabic="d",
category="lo",
description="ARABIC LETTER SEEN WITH THREE DOTS BELOW",
direction="al",
@@ -16065,6 +16223,7 @@ characters.data={
unicodeslot=0x069B,
},
[0x069C]={
+ arabic="d",
category="lo",
description="ARABIC LETTER SEEN WITH THREE DOTS BELOW AND THREE DOTS ABOVE",
direction="al",
@@ -16073,6 +16232,7 @@ characters.data={
unicodeslot=0x069C,
},
[0x069D]={
+ arabic="d",
category="lo",
description="ARABIC LETTER SAD WITH TWO DOTS BELOW",
direction="al",
@@ -16081,6 +16241,7 @@ characters.data={
unicodeslot=0x069D,
},
[0x069E]={
+ arabic="d",
category="lo",
description="ARABIC LETTER SAD WITH THREE DOTS ABOVE",
direction="al",
@@ -16089,6 +16250,7 @@ characters.data={
unicodeslot=0x069E,
},
[0x069F]={
+ arabic="d",
category="lo",
description="ARABIC LETTER TAH WITH THREE DOTS ABOVE",
direction="al",
@@ -16097,6 +16259,7 @@ characters.data={
unicodeslot=0x069F,
},
[0x06A0]={
+ arabic="d",
category="lo",
description="ARABIC LETTER AIN WITH THREE DOTS ABOVE",
direction="al",
@@ -16105,6 +16268,7 @@ characters.data={
unicodeslot=0x06A0,
},
[0x06A1]={
+ arabic="d",
category="lo",
description="ARABIC LETTER DOTLESS FEH",
direction="al",
@@ -16112,6 +16276,7 @@ characters.data={
unicodeslot=0x06A1,
},
[0x06A2]={
+ arabic="d",
category="lo",
description="ARABIC LETTER FEH WITH DOT MOVED BELOW",
direction="al",
@@ -16120,6 +16285,7 @@ characters.data={
unicodeslot=0x06A2,
},
[0x06A3]={
+ arabic="d",
category="lo",
description="ARABIC LETTER FEH WITH DOT BELOW",
direction="al",
@@ -16129,6 +16295,7 @@ characters.data={
},
[0x06A4]={
adobename="veharabic",
+ arabic="d",
category="lo",
description="ARABIC LETTER VEH",
direction="al",
@@ -16136,6 +16303,7 @@ characters.data={
unicodeslot=0x06A4,
},
[0x06A5]={
+ arabic="d",
category="lo",
description="ARABIC LETTER FEH WITH THREE DOTS BELOW",
direction="al",
@@ -16144,6 +16312,7 @@ characters.data={
unicodeslot=0x06A5,
},
[0x06A6]={
+ arabic="d",
category="lo",
description="ARABIC LETTER PEHEH",
direction="al",
@@ -16151,6 +16320,7 @@ characters.data={
unicodeslot=0x06A6,
},
[0x06A7]={
+ arabic="d",
category="lo",
description="ARABIC LETTER QAF WITH DOT ABOVE",
direction="al",
@@ -16159,6 +16329,7 @@ characters.data={
unicodeslot=0x06A7,
},
[0x06A8]={
+ arabic="d",
category="lo",
description="ARABIC LETTER QAF WITH THREE DOTS ABOVE",
direction="al",
@@ -16167,6 +16338,7 @@ characters.data={
unicodeslot=0x06A8,
},
[0x06A9]={
+ arabic="d",
category="lo",
description="ARABIC LETTER KEHEH",
direction="al",
@@ -16174,6 +16346,7 @@ characters.data={
unicodeslot=0x06A9,
},
[0x06AA]={
+ arabic="d",
category="lo",
description="ARABIC LETTER SWASH KAF",
direction="al",
@@ -16181,6 +16354,7 @@ characters.data={
unicodeslot=0x06AA,
},
[0x06AB]={
+ arabic="d",
category="lo",
description="ARABIC LETTER KAF WITH RING",
direction="al",
@@ -16189,6 +16363,7 @@ characters.data={
unicodeslot=0x06AB,
},
[0x06AC]={
+ arabic="d",
category="lo",
description="ARABIC LETTER KAF WITH DOT ABOVE",
direction="al",
@@ -16197,6 +16372,7 @@ characters.data={
unicodeslot=0x06AC,
},
[0x06AD]={
+ arabic="d",
category="lo",
description="ARABIC LETTER NG",
direction="al",
@@ -16204,6 +16380,7 @@ characters.data={
unicodeslot=0x06AD,
},
[0x06AE]={
+ arabic="d",
category="lo",
description="ARABIC LETTER KAF WITH THREE DOTS BELOW",
direction="al",
@@ -16213,6 +16390,7 @@ characters.data={
},
[0x06AF]={
adobename="gafarabic",
+ arabic="d",
category="lo",
description="ARABIC LETTER GAF",
direction="al",
@@ -16220,6 +16398,7 @@ characters.data={
unicodeslot=0x06AF,
},
[0x06B0]={
+ arabic="d",
category="lo",
description="ARABIC LETTER GAF WITH RING",
direction="al",
@@ -16228,6 +16407,7 @@ characters.data={
unicodeslot=0x06B0,
},
[0x06B1]={
+ arabic="d",
category="lo",
description="ARABIC LETTER NGOEH",
direction="al",
@@ -16235,6 +16415,7 @@ characters.data={
unicodeslot=0x06B1,
},
[0x06B2]={
+ arabic="d",
category="lo",
description="ARABIC LETTER GAF WITH TWO DOTS BELOW",
direction="al",
@@ -16243,6 +16424,7 @@ characters.data={
unicodeslot=0x06B2,
},
[0x06B3]={
+ arabic="d",
category="lo",
description="ARABIC LETTER GUEH",
direction="al",
@@ -16250,6 +16432,7 @@ characters.data={
unicodeslot=0x06B3,
},
[0x06B4]={
+ arabic="d",
category="lo",
description="ARABIC LETTER GAF WITH THREE DOTS ABOVE",
direction="al",
@@ -16258,6 +16441,7 @@ characters.data={
unicodeslot=0x06B4,
},
[0x06B5]={
+ arabic="d",
category="lo",
description="ARABIC LETTER LAM WITH SMALL V",
direction="al",
@@ -16266,6 +16450,7 @@ characters.data={
unicodeslot=0x06B5,
},
[0x06B6]={
+ arabic="d",
category="lo",
description="ARABIC LETTER LAM WITH DOT ABOVE",
direction="al",
@@ -16274,6 +16459,7 @@ characters.data={
unicodeslot=0x06B6,
},
[0x06B7]={
+ arabic="d",
category="lo",
description="ARABIC LETTER LAM WITH THREE DOTS ABOVE",
direction="al",
@@ -16282,6 +16468,7 @@ characters.data={
unicodeslot=0x06B7,
},
[0x06B8]={
+ arabic="d",
category="lo",
description="ARABIC LETTER LAM WITH THREE DOTS BELOW",
direction="al",
@@ -16290,6 +16477,7 @@ characters.data={
unicodeslot=0x06B8,
},
[0x06B9]={
+ arabic="d",
category="lo",
description="ARABIC LETTER NOON WITH DOT BELOW",
direction="al",
@@ -16299,6 +16487,7 @@ characters.data={
},
[0x06BA]={
adobename="noonghunnaarabic",
+ arabic="d",
category="lo",
description="ARABIC LETTER NOON GHUNNA",
direction="al",
@@ -16306,6 +16495,7 @@ characters.data={
unicodeslot=0x06BA,
},
[0x06BB]={
+ arabic="d",
category="lo",
description="ARABIC LETTER RNOON",
direction="al",
@@ -16313,6 +16503,7 @@ characters.data={
unicodeslot=0x06BB,
},
[0x06BC]={
+ arabic="d",
category="lo",
description="ARABIC LETTER NOON WITH RING",
direction="al",
@@ -16321,6 +16512,7 @@ characters.data={
unicodeslot=0x06BC,
},
[0x06BD]={
+ arabic="d",
category="lo",
description="ARABIC LETTER NOON WITH THREE DOTS ABOVE",
direction="al",
@@ -16329,6 +16521,7 @@ characters.data={
unicodeslot=0x06BD,
},
[0x06BE]={
+ arabic="d",
category="lo",
description="ARABIC LETTER HEH DOACHASHMEE",
direction="al",
@@ -16336,6 +16529,7 @@ characters.data={
unicodeslot=0x06BE,
},
[0x06BF]={
+ arabic="d",
category="lo",
description="ARABIC LETTER TCHEH WITH DOT ABOVE",
direction="al",
@@ -16344,6 +16538,7 @@ characters.data={
unicodeslot=0x06BF,
},
[0x06C0]={
+ arabic="r",
category="lo",
description="ARABIC LETTER HEH WITH YEH ABOVE",
direction="al",
@@ -16354,6 +16549,7 @@ characters.data={
},
[0x06C1]={
adobename="hehaltonearabic",
+ arabic="d",
category="lo",
description="ARABIC LETTER HEH GOAL",
direction="al",
@@ -16361,6 +16557,7 @@ characters.data={
unicodeslot=0x06C1,
},
[0x06C2]={
+ arabic="d",
category="lo",
description="ARABIC LETTER HEH GOAL WITH HAMZA ABOVE",
direction="al",
@@ -16369,6 +16566,7 @@ characters.data={
unicodeslot=0x06C2,
},
[0x06C3]={
+ arabic="r",
category="lo",
description="ARABIC LETTER TEH MARBUTA GOAL",
direction="al",
@@ -16376,6 +16574,7 @@ characters.data={
unicodeslot=0x06C3,
},
[0x06C4]={
+ arabic="r",
category="lo",
description="ARABIC LETTER WAW WITH RING",
direction="al",
@@ -16384,6 +16583,7 @@ characters.data={
unicodeslot=0x06C4,
},
[0x06C5]={
+ arabic="r",
category="lo",
description="ARABIC LETTER KIRGHIZ OE",
direction="al",
@@ -16391,6 +16591,7 @@ characters.data={
unicodeslot=0x06C5,
},
[0x06C6]={
+ arabic="r",
category="lo",
description="ARABIC LETTER OE",
direction="al",
@@ -16398,6 +16599,7 @@ characters.data={
unicodeslot=0x06C6,
},
[0x06C7]={
+ arabic="r",
category="lo",
description="ARABIC LETTER U",
direction="al",
@@ -16405,6 +16607,7 @@ characters.data={
unicodeslot=0x06C7,
},
[0x06C8]={
+ arabic="r",
category="lo",
description="ARABIC LETTER YU",
direction="al",
@@ -16412,6 +16615,7 @@ characters.data={
unicodeslot=0x06C8,
},
[0x06C9]={
+ arabic="r",
category="lo",
description="ARABIC LETTER KIRGHIZ YU",
direction="al",
@@ -16419,6 +16623,7 @@ characters.data={
unicodeslot=0x06C9,
},
[0x06CA]={
+ arabic="r",
category="lo",
description="ARABIC LETTER WAW WITH TWO DOTS ABOVE",
direction="al",
@@ -16427,6 +16632,7 @@ characters.data={
unicodeslot=0x06CA,
},
[0x06CB]={
+ arabic="r",
category="lo",
description="ARABIC LETTER VE",
direction="al",
@@ -16434,6 +16640,7 @@ characters.data={
unicodeslot=0x06CB,
},
[0x06CC]={
+ arabic="d",
category="lo",
description="ARABIC LETTER FARSI YEH",
direction="al",
@@ -16441,6 +16648,7 @@ characters.data={
unicodeslot=0x06CC,
},
[0x06CD]={
+ arabic="r",
category="lo",
description="ARABIC LETTER YEH WITH TAIL",
direction="al",
@@ -16449,6 +16657,7 @@ characters.data={
unicodeslot=0x06CD,
},
[0x06CE]={
+ arabic="d",
category="lo",
description="ARABIC LETTER YEH WITH SMALL V",
direction="al",
@@ -16457,6 +16666,7 @@ characters.data={
unicodeslot=0x06CE,
},
[0x06CF]={
+ arabic="r",
category="lo",
description="ARABIC LETTER WAW WITH DOT ABOVE",
direction="al",
@@ -16465,6 +16675,7 @@ characters.data={
unicodeslot=0x06CF,
},
[0x06D0]={
+ arabic="d",
category="lo",
description="ARABIC LETTER E",
direction="al",
@@ -16473,6 +16684,7 @@ characters.data={
},
[0x06D1]={
adobename="yehthreedotsbelowarabic",
+ arabic="d",
category="lo",
description="ARABIC LETTER YEH WITH THREE DOTS BELOW",
direction="al",
@@ -16482,6 +16694,7 @@ characters.data={
},
[0x06D2]={
adobename="yehbarreearabic",
+ arabic="r",
category="lo",
description="ARABIC LETTER YEH BARREE",
direction="al",
@@ -16489,6 +16702,7 @@ characters.data={
unicodeslot=0x06D2,
},
[0x06D3]={
+ arabic="r",
category="lo",
description="ARABIC LETTER YEH BARREE WITH HAMZA ABOVE",
direction="al",
@@ -16505,6 +16719,7 @@ characters.data={
},
[0x06D5]={
adobename="afii57534",
+ arabic="r",
category="lo",
description="ARABIC LETTER AE",
direction="al",
@@ -16561,6 +16776,7 @@ characters.data={
unicodeslot=0x06DC,
},
[0x06DD]={
+ arabic="u",
category="cf",
description="ARABIC END OF AYAH",
direction="an",
@@ -16681,6 +16897,7 @@ characters.data={
unicodeslot=0x06ED,
},
[0x06EE]={
+ arabic="r",
category="lo",
description="ARABIC LETTER DAL WITH INVERTED V",
direction="al",
@@ -16689,6 +16906,7 @@ characters.data={
unicodeslot=0x06EE,
},
[0x06EF]={
+ arabic="r",
category="lo",
description="ARABIC LETTER REH WITH INVERTED V",
direction="al",
@@ -16777,6 +16995,7 @@ characters.data={
unicodeslot=0x06F9,
},
[0x06FA]={
+ arabic="d",
category="lo",
description="ARABIC LETTER SHEEN WITH DOT BELOW",
direction="al",
@@ -16785,6 +17004,7 @@ characters.data={
unicodeslot=0x06FA,
},
[0x06FB]={
+ arabic="d",
category="lo",
description="ARABIC LETTER DAD WITH DOT BELOW",
direction="al",
@@ -16793,6 +17013,7 @@ characters.data={
unicodeslot=0x06FB,
},
[0x06FC]={
+ arabic="d",
category="lo",
description="ARABIC LETTER GHAIN WITH DOT BELOW",
direction="al",
@@ -16815,6 +17036,7 @@ characters.data={
unicodeslot=0x06FE,
},
[0x06FF]={
+ arabic="d",
category="lo",
description="ARABIC LETTER HEH WITH INVERTED V",
direction="al",
@@ -16923,12 +17145,13 @@ characters.data={
[0x070F]={
category="cf",
description="SYRIAC ABBREVIATION MARK",
- direction="an",
+ direction="al",
linebreak="al",
unicodeslot=0x070F,
visible="yes",
},
[0x0710]={
+ arabic="r",
category="lo",
description="SYRIAC LETTER ALAPH",
direction="al",
@@ -16943,6 +17166,7 @@ characters.data={
unicodeslot=0x0711,
},
[0x0712]={
+ arabic="d",
category="lo",
description="SYRIAC LETTER BETH",
direction="al",
@@ -16950,6 +17174,7 @@ characters.data={
unicodeslot=0x0712,
},
[0x0713]={
+ arabic="d",
category="lo",
description="SYRIAC LETTER GAMAL",
direction="al",
@@ -16957,6 +17182,7 @@ characters.data={
unicodeslot=0x0713,
},
[0x0714]={
+ arabic="d",
category="lo",
description="SYRIAC LETTER GAMAL GARSHUNI",
direction="al",
@@ -16964,6 +17190,7 @@ characters.data={
unicodeslot=0x0714,
},
[0x0715]={
+ arabic="r",
category="lo",
description="SYRIAC LETTER DALATH",
direction="al",
@@ -16971,6 +17198,7 @@ characters.data={
unicodeslot=0x0715,
},
[0x0716]={
+ arabic="r",
category="lo",
description="SYRIAC LETTER DOTLESS DALATH RISH",
direction="al",
@@ -16978,6 +17206,7 @@ characters.data={
unicodeslot=0x0716,
},
[0x0717]={
+ arabic="r",
category="lo",
description="SYRIAC LETTER HE",
direction="al",
@@ -16985,6 +17214,7 @@ characters.data={
unicodeslot=0x0717,
},
[0x0718]={
+ arabic="r",
category="lo",
description="SYRIAC LETTER WAW",
direction="al",
@@ -16992,6 +17222,7 @@ characters.data={
unicodeslot=0x0718,
},
[0x0719]={
+ arabic="r",
category="lo",
description="SYRIAC LETTER ZAIN",
direction="al",
@@ -16999,6 +17230,7 @@ characters.data={
unicodeslot=0x0719,
},
[0x071A]={
+ arabic="d",
category="lo",
description="SYRIAC LETTER HETH",
direction="al",
@@ -17006,6 +17238,7 @@ characters.data={
unicodeslot=0x071A,
},
[0x071B]={
+ arabic="d",
category="lo",
description="SYRIAC LETTER TETH",
direction="al",
@@ -17013,6 +17246,7 @@ characters.data={
unicodeslot=0x071B,
},
[0x071C]={
+ arabic="d",
category="lo",
description="SYRIAC LETTER TETH GARSHUNI",
direction="al",
@@ -17020,6 +17254,7 @@ characters.data={
unicodeslot=0x071C,
},
[0x071D]={
+ arabic="d",
category="lo",
description="SYRIAC LETTER YUDH",
direction="al",
@@ -17027,6 +17262,7 @@ characters.data={
unicodeslot=0x071D,
},
[0x071E]={
+ arabic="r",
category="lo",
description="SYRIAC LETTER YUDH HE",
direction="al",
@@ -17034,6 +17270,7 @@ characters.data={
unicodeslot=0x071E,
},
[0x071F]={
+ arabic="d",
category="lo",
description="SYRIAC LETTER KAPH",
direction="al",
@@ -17041,6 +17278,7 @@ characters.data={
unicodeslot=0x071F,
},
[0x0720]={
+ arabic="d",
category="lo",
description="SYRIAC LETTER LAMADH",
direction="al",
@@ -17048,6 +17286,7 @@ characters.data={
unicodeslot=0x0720,
},
[0x0721]={
+ arabic="d",
category="lo",
description="SYRIAC LETTER MIM",
direction="al",
@@ -17055,6 +17294,7 @@ characters.data={
unicodeslot=0x0721,
},
[0x0722]={
+ arabic="d",
category="lo",
description="SYRIAC LETTER NUN",
direction="al",
@@ -17062,6 +17302,7 @@ characters.data={
unicodeslot=0x0722,
},
[0x0723]={
+ arabic="d",
category="lo",
description="SYRIAC LETTER SEMKATH",
direction="al",
@@ -17069,6 +17310,7 @@ characters.data={
unicodeslot=0x0723,
},
[0x0724]={
+ arabic="d",
category="lo",
description="SYRIAC LETTER FINAL SEMKATH",
direction="al",
@@ -17076,6 +17318,7 @@ characters.data={
unicodeslot=0x0724,
},
[0x0725]={
+ arabic="d",
category="lo",
description="SYRIAC LETTER E",
direction="al",
@@ -17083,6 +17326,7 @@ characters.data={
unicodeslot=0x0725,
},
[0x0726]={
+ arabic="d",
category="lo",
description="SYRIAC LETTER PE",
direction="al",
@@ -17090,6 +17334,7 @@ characters.data={
unicodeslot=0x0726,
},
[0x0727]={
+ arabic="d",
category="lo",
description="SYRIAC LETTER REVERSED PE",
direction="al",
@@ -17097,6 +17342,7 @@ characters.data={
unicodeslot=0x0727,
},
[0x0728]={
+ arabic="r",
category="lo",
description="SYRIAC LETTER SADHE",
direction="al",
@@ -17104,6 +17350,7 @@ characters.data={
unicodeslot=0x0728,
},
[0x0729]={
+ arabic="d",
category="lo",
description="SYRIAC LETTER QAPH",
direction="al",
@@ -17111,6 +17358,7 @@ characters.data={
unicodeslot=0x0729,
},
[0x072A]={
+ arabic="r",
category="lo",
description="SYRIAC LETTER RISH",
direction="al",
@@ -17118,6 +17366,7 @@ characters.data={
unicodeslot=0x072A,
},
[0x072B]={
+ arabic="d",
category="lo",
description="SYRIAC LETTER SHIN",
direction="al",
@@ -17125,6 +17374,7 @@ characters.data={
unicodeslot=0x072B,
},
[0x072C]={
+ arabic="r",
category="lo",
description="SYRIAC LETTER TAW",
direction="al",
@@ -17132,6 +17382,7 @@ characters.data={
unicodeslot=0x072C,
},
[0x072D]={
+ arabic="d",
category="lo",
description="SYRIAC LETTER PERSIAN BHETH",
direction="al",
@@ -17139,6 +17390,7 @@ characters.data={
unicodeslot=0x072D,
},
[0x072E]={
+ arabic="d",
category="lo",
description="SYRIAC LETTER PERSIAN GHAMAL",
direction="al",
@@ -17146,6 +17398,7 @@ characters.data={
unicodeslot=0x072E,
},
[0x072F]={
+ arabic="r",
category="lo",
description="SYRIAC LETTER PERSIAN DHALATH",
direction="al",
@@ -17342,6 +17595,7 @@ characters.data={
unicodeslot=0x074A,
},
[0x074D]={
+ arabic="r",
category="lo",
description="SYRIAC LETTER SOGDIAN ZHAIN",
direction="al",
@@ -17349,6 +17603,7 @@ characters.data={
unicodeslot=0x074D,
},
[0x074E]={
+ arabic="d",
category="lo",
description="SYRIAC LETTER SOGDIAN KHAPH",
direction="al",
@@ -17356,6 +17611,7 @@ characters.data={
unicodeslot=0x074E,
},
[0x074F]={
+ arabic="d",
category="lo",
description="SYRIAC LETTER SOGDIAN FE",
direction="al",
@@ -17363,6 +17619,7 @@ characters.data={
unicodeslot=0x074F,
},
[0x0750]={
+ arabic="d",
category="lo",
description="ARABIC LETTER BEH WITH THREE DOTS HORIZONTALLY BELOW",
direction="al",
@@ -17371,6 +17628,7 @@ characters.data={
unicodeslot=0x0750,
},
[0x0751]={
+ arabic="d",
category="lo",
description="ARABIC LETTER BEH WITH DOT BELOW AND THREE DOTS ABOVE",
direction="al",
@@ -17379,6 +17637,7 @@ characters.data={
unicodeslot=0x0751,
},
[0x0752]={
+ arabic="d",
category="lo",
description="ARABIC LETTER BEH WITH THREE DOTS POINTING UPWARDS BELOW",
direction="al",
@@ -17387,6 +17646,7 @@ characters.data={
unicodeslot=0x0752,
},
[0x0753]={
+ arabic="d",
category="lo",
description="ARABIC LETTER BEH WITH THREE DOTS POINTING UPWARDS BELOW AND TWO DOTS ABOVE",
direction="al",
@@ -17395,6 +17655,7 @@ characters.data={
unicodeslot=0x0753,
},
[0x0754]={
+ arabic="d",
category="lo",
description="ARABIC LETTER BEH WITH TWO DOTS BELOW AND DOT ABOVE",
direction="al",
@@ -17403,6 +17664,7 @@ characters.data={
unicodeslot=0x0754,
},
[0x0755]={
+ arabic="d",
category="lo",
description="ARABIC LETTER BEH WITH INVERTED SMALL V BELOW",
direction="al",
@@ -17411,6 +17673,7 @@ characters.data={
unicodeslot=0x0755,
},
[0x0756]={
+ arabic="d",
category="lo",
description="ARABIC LETTER BEH WITH SMALL V",
direction="al",
@@ -17419,6 +17682,7 @@ characters.data={
unicodeslot=0x0756,
},
[0x0757]={
+ arabic="d",
category="lo",
description="ARABIC LETTER HAH WITH TWO DOTS ABOVE",
direction="al",
@@ -17427,6 +17691,7 @@ characters.data={
unicodeslot=0x0757,
},
[0x0758]={
+ arabic="d",
category="lo",
description="ARABIC LETTER HAH WITH THREE DOTS POINTING UPWARDS BELOW",
direction="al",
@@ -17435,6 +17700,7 @@ characters.data={
unicodeslot=0x0758,
},
[0x0759]={
+ arabic="r",
category="lo",
description="ARABIC LETTER DAL WITH TWO DOTS VERTICALLY BELOW AND SMALL TAH",
direction="al",
@@ -17443,6 +17709,7 @@ characters.data={
unicodeslot=0x0759,
},
[0x075A]={
+ arabic="r",
category="lo",
description="ARABIC LETTER DAL WITH INVERTED SMALL V BELOW",
direction="al",
@@ -17451,6 +17718,7 @@ characters.data={
unicodeslot=0x075A,
},
[0x075B]={
+ arabic="r",
category="lo",
description="ARABIC LETTER REH WITH STROKE",
direction="al",
@@ -17459,6 +17727,7 @@ characters.data={
unicodeslot=0x075B,
},
[0x075C]={
+ arabic="d",
category="lo",
description="ARABIC LETTER SEEN WITH FOUR DOTS ABOVE",
direction="al",
@@ -17467,6 +17736,7 @@ characters.data={
unicodeslot=0x075C,
},
[0x075D]={
+ arabic="d",
category="lo",
description="ARABIC LETTER AIN WITH TWO DOTS ABOVE",
direction="al",
@@ -17475,6 +17745,7 @@ characters.data={
unicodeslot=0x075D,
},
[0x075E]={
+ arabic="d",
category="lo",
description="ARABIC LETTER AIN WITH THREE DOTS POINTING DOWNWARDS ABOVE",
direction="al",
@@ -17483,6 +17754,7 @@ characters.data={
unicodeslot=0x075E,
},
[0x075F]={
+ arabic="d",
category="lo",
description="ARABIC LETTER AIN WITH TWO DOTS VERTICALLY ABOVE",
direction="al",
@@ -17491,6 +17763,7 @@ characters.data={
unicodeslot=0x075F,
},
[0x0760]={
+ arabic="d",
category="lo",
description="ARABIC LETTER FEH WITH TWO DOTS BELOW",
direction="al",
@@ -17499,6 +17772,7 @@ characters.data={
unicodeslot=0x0760,
},
[0x0761]={
+ arabic="d",
category="lo",
description="ARABIC LETTER FEH WITH THREE DOTS POINTING UPWARDS BELOW",
direction="al",
@@ -17507,6 +17781,7 @@ characters.data={
unicodeslot=0x0761,
},
[0x0762]={
+ arabic="d",
category="lo",
description="ARABIC LETTER KEHEH WITH DOT ABOVE",
direction="al",
@@ -17515,6 +17790,7 @@ characters.data={
unicodeslot=0x0762,
},
[0x0763]={
+ arabic="d",
category="lo",
description="ARABIC LETTER KEHEH WITH THREE DOTS ABOVE",
direction="al",
@@ -17523,6 +17799,7 @@ characters.data={
unicodeslot=0x0763,
},
[0x0764]={
+ arabic="d",
category="lo",
description="ARABIC LETTER KEHEH WITH THREE DOTS POINTING UPWARDS BELOW",
direction="al",
@@ -17531,6 +17808,7 @@ characters.data={
unicodeslot=0x0764,
},
[0x0765]={
+ arabic="d",
category="lo",
description="ARABIC LETTER MEEM WITH DOT ABOVE",
direction="al",
@@ -17539,6 +17817,7 @@ characters.data={
unicodeslot=0x0765,
},
[0x0766]={
+ arabic="d",
category="lo",
description="ARABIC LETTER MEEM WITH DOT BELOW",
direction="al",
@@ -17547,6 +17826,7 @@ characters.data={
unicodeslot=0x0766,
},
[0x0767]={
+ arabic="d",
category="lo",
description="ARABIC LETTER NOON WITH TWO DOTS BELOW",
direction="al",
@@ -17555,6 +17835,7 @@ characters.data={
unicodeslot=0x0767,
},
[0x0768]={
+ arabic="d",
category="lo",
description="ARABIC LETTER NOON WITH SMALL TAH",
direction="al",
@@ -17563,6 +17844,7 @@ characters.data={
unicodeslot=0x0768,
},
[0x0769]={
+ arabic="d",
category="lo",
description="ARABIC LETTER NOON WITH SMALL V",
direction="al",
@@ -17571,6 +17853,7 @@ characters.data={
unicodeslot=0x0769,
},
[0x076A]={
+ arabic="d",
category="lo",
description="ARABIC LETTER LAM WITH BAR",
direction="al",
@@ -17579,6 +17862,7 @@ characters.data={
unicodeslot=0x076A,
},
[0x076B]={
+ arabic="r",
category="lo",
description="ARABIC LETTER REH WITH TWO DOTS VERTICALLY ABOVE",
direction="al",
@@ -17587,6 +17871,7 @@ characters.data={
unicodeslot=0x076B,
},
[0x076C]={
+ arabic="r",
category="lo",
description="ARABIC LETTER REH WITH HAMZA ABOVE",
direction="al",
@@ -17595,6 +17880,7 @@ characters.data={
unicodeslot=0x076C,
},
[0x076D]={
+ arabic="d",
category="lo",
description="ARABIC LETTER SEEN WITH TWO DOTS VERTICALLY ABOVE",
direction="al",
@@ -17603,6 +17889,7 @@ characters.data={
unicodeslot=0x076D,
},
[0x076E]={
+ arabic="d",
category="lo",
description="ARABIC LETTER HAH WITH SMALL ARABIC LETTER TAH BELOW",
direction="al",
@@ -17610,6 +17897,7 @@ characters.data={
unicodeslot=0x076E,
},
[0x076F]={
+ arabic="d",
category="lo",
description="ARABIC LETTER HAH WITH SMALL ARABIC LETTER TAH AND TWO DOTS",
direction="al",
@@ -17617,6 +17905,7 @@ characters.data={
unicodeslot=0x076F,
},
[0x0770]={
+ arabic="d",
category="lo",
description="ARABIC LETTER SEEN WITH SMALL ARABIC LETTER TAH AND TWO DOTS",
direction="al",
@@ -17624,6 +17913,7 @@ characters.data={
unicodeslot=0x0770,
},
[0x0771]={
+ arabic="r",
category="lo",
description="ARABIC LETTER REH WITH SMALL ARABIC LETTER TAH AND TWO DOTS",
direction="al",
@@ -17631,6 +17921,7 @@ characters.data={
unicodeslot=0x0771,
},
[0x0772]={
+ arabic="d",
category="lo",
description="ARABIC LETTER HAH WITH SMALL ARABIC LETTER TAH ABOVE",
direction="al",
@@ -17638,6 +17929,7 @@ characters.data={
unicodeslot=0x0772,
},
[0x0773]={
+ arabic="r",
category="lo",
description="ARABIC LETTER ALEF WITH EXTENDED ARABIC-INDIC DIGIT TWO ABOVE",
direction="al",
@@ -17645,6 +17937,7 @@ characters.data={
unicodeslot=0x0773,
},
[0x0774]={
+ arabic="r",
category="lo",
description="ARABIC LETTER ALEF WITH EXTENDED ARABIC-INDIC DIGIT THREE ABOVE",
direction="al",
@@ -17652,6 +17945,7 @@ characters.data={
unicodeslot=0x0774,
},
[0x0775]={
+ arabic="d",
category="lo",
description="ARABIC LETTER FARSI YEH WITH EXTENDED ARABIC-INDIC DIGIT TWO ABOVE",
direction="al",
@@ -17659,6 +17953,7 @@ characters.data={
unicodeslot=0x0775,
},
[0x0776]={
+ arabic="d",
category="lo",
description="ARABIC LETTER FARSI YEH WITH EXTENDED ARABIC-INDIC DIGIT THREE ABOVE",
direction="al",
@@ -17666,6 +17961,7 @@ characters.data={
unicodeslot=0x0776,
},
[0x0777]={
+ arabic="d",
category="lo",
description="ARABIC LETTER FARSI YEH WITH EXTENDED ARABIC-INDIC DIGIT FOUR BELOW",
direction="al",
@@ -17673,6 +17969,7 @@ characters.data={
unicodeslot=0x0777,
},
[0x0778]={
+ arabic="r",
category="lo",
description="ARABIC LETTER WAW WITH EXTENDED ARABIC-INDIC DIGIT TWO ABOVE",
direction="al",
@@ -17680,6 +17977,7 @@ characters.data={
unicodeslot=0x0778,
},
[0x0779]={
+ arabic="r",
category="lo",
description="ARABIC LETTER WAW WITH EXTENDED ARABIC-INDIC DIGIT THREE ABOVE",
direction="al",
@@ -17687,6 +17985,7 @@ characters.data={
unicodeslot=0x0779,
},
[0x077A]={
+ arabic="d",
category="lo",
description="ARABIC LETTER YEH BARREE WITH EXTENDED ARABIC-INDIC DIGIT TWO ABOVE",
direction="al",
@@ -17694,6 +17993,7 @@ characters.data={
unicodeslot=0x077A,
},
[0x077B]={
+ arabic="d",
category="lo",
description="ARABIC LETTER YEH BARREE WITH EXTENDED ARABIC-INDIC DIGIT THREE ABOVE",
direction="al",
@@ -17701,6 +18001,7 @@ characters.data={
unicodeslot=0x077B,
},
[0x077C]={
+ arabic="d",
category="lo",
description="ARABIC LETTER HAH WITH EXTENDED ARABIC-INDIC DIGIT FOUR BELOW",
direction="al",
@@ -17708,6 +18009,7 @@ characters.data={
unicodeslot=0x077C,
},
[0x077D]={
+ arabic="d",
category="lo",
description="ARABIC LETTER SEEN WITH EXTENDED ARABIC-INDIC DIGIT FOUR ABOVE",
direction="al",
@@ -17715,6 +18017,7 @@ characters.data={
unicodeslot=0x077D,
},
[0x077E]={
+ arabic="d",
category="lo",
description="ARABIC LETTER SEEN WITH INVERTED V",
direction="al",
@@ -17722,6 +18025,7 @@ characters.data={
unicodeslot=0x077E,
},
[0x077F]={
+ arabic="d",
category="lo",
description="ARABIC LETTER KAF WITH TWO DOTS ABOVE",
direction="al",
@@ -18149,6 +18453,7 @@ characters.data={
unicodeslot=0x07C9,
},
[0x07CA]={
+ arabic="d",
category="lo",
description="NKO LETTER A",
direction="r",
@@ -18156,6 +18461,7 @@ characters.data={
unicodeslot=0x07CA,
},
[0x07CB]={
+ arabic="d",
category="lo",
description="NKO LETTER EE",
direction="r",
@@ -18163,6 +18469,7 @@ characters.data={
unicodeslot=0x07CB,
},
[0x07CC]={
+ arabic="d",
category="lo",
description="NKO LETTER I",
direction="r",
@@ -18170,6 +18477,7 @@ characters.data={
unicodeslot=0x07CC,
},
[0x07CD]={
+ arabic="d",
category="lo",
description="NKO LETTER E",
direction="r",
@@ -18177,6 +18485,7 @@ characters.data={
unicodeslot=0x07CD,
},
[0x07CE]={
+ arabic="d",
category="lo",
description="NKO LETTER U",
direction="r",
@@ -18184,6 +18493,7 @@ characters.data={
unicodeslot=0x07CE,
},
[0x07CF]={
+ arabic="d",
category="lo",
description="NKO LETTER OO",
direction="r",
@@ -18191,6 +18501,7 @@ characters.data={
unicodeslot=0x07CF,
},
[0x07D0]={
+ arabic="d",
category="lo",
description="NKO LETTER O",
direction="r",
@@ -18198,6 +18509,7 @@ characters.data={
unicodeslot=0x07D0,
},
[0x07D1]={
+ arabic="d",
category="lo",
description="NKO LETTER DAGBASINNA",
direction="r",
@@ -18205,6 +18517,7 @@ characters.data={
unicodeslot=0x07D1,
},
[0x07D2]={
+ arabic="d",
category="lo",
description="NKO LETTER N",
direction="r",
@@ -18212,6 +18525,7 @@ characters.data={
unicodeslot=0x07D2,
},
[0x07D3]={
+ arabic="d",
category="lo",
description="NKO LETTER BA",
direction="r",
@@ -18219,6 +18533,7 @@ characters.data={
unicodeslot=0x07D3,
},
[0x07D4]={
+ arabic="d",
category="lo",
description="NKO LETTER PA",
direction="r",
@@ -18226,6 +18541,7 @@ characters.data={
unicodeslot=0x07D4,
},
[0x07D5]={
+ arabic="d",
category="lo",
description="NKO LETTER TA",
direction="r",
@@ -18233,6 +18549,7 @@ characters.data={
unicodeslot=0x07D5,
},
[0x07D6]={
+ arabic="d",
category="lo",
description="NKO LETTER JA",
direction="r",
@@ -18240,6 +18557,7 @@ characters.data={
unicodeslot=0x07D6,
},
[0x07D7]={
+ arabic="d",
category="lo",
description="NKO LETTER CHA",
direction="r",
@@ -18247,6 +18565,7 @@ characters.data={
unicodeslot=0x07D7,
},
[0x07D8]={
+ arabic="d",
category="lo",
description="NKO LETTER DA",
direction="r",
@@ -18254,6 +18573,7 @@ characters.data={
unicodeslot=0x07D8,
},
[0x07D9]={
+ arabic="d",
category="lo",
description="NKO LETTER RA",
direction="r",
@@ -18261,6 +18581,7 @@ characters.data={
unicodeslot=0x07D9,
},
[0x07DA]={
+ arabic="d",
category="lo",
description="NKO LETTER RRA",
direction="r",
@@ -18268,6 +18589,7 @@ characters.data={
unicodeslot=0x07DA,
},
[0x07DB]={
+ arabic="d",
category="lo",
description="NKO LETTER SA",
direction="r",
@@ -18275,6 +18597,7 @@ characters.data={
unicodeslot=0x07DB,
},
[0x07DC]={
+ arabic="d",
category="lo",
description="NKO LETTER GBA",
direction="r",
@@ -18282,6 +18605,7 @@ characters.data={
unicodeslot=0x07DC,
},
[0x07DD]={
+ arabic="d",
category="lo",
description="NKO LETTER FA",
direction="r",
@@ -18289,6 +18613,7 @@ characters.data={
unicodeslot=0x07DD,
},
[0x07DE]={
+ arabic="d",
category="lo",
description="NKO LETTER KA",
direction="r",
@@ -18296,6 +18621,7 @@ characters.data={
unicodeslot=0x07DE,
},
[0x07DF]={
+ arabic="d",
category="lo",
description="NKO LETTER LA",
direction="r",
@@ -18303,6 +18629,7 @@ characters.data={
unicodeslot=0x07DF,
},
[0x07E0]={
+ arabic="d",
category="lo",
description="NKO LETTER NA WOLOSO",
direction="r",
@@ -18310,6 +18637,7 @@ characters.data={
unicodeslot=0x07E0,
},
[0x07E1]={
+ arabic="d",
category="lo",
description="NKO LETTER MA",
direction="r",
@@ -18317,6 +18645,7 @@ characters.data={
unicodeslot=0x07E1,
},
[0x07E2]={
+ arabic="d",
category="lo",
description="NKO LETTER NYA",
direction="r",
@@ -18324,6 +18653,7 @@ characters.data={
unicodeslot=0x07E2,
},
[0x07E3]={
+ arabic="d",
category="lo",
description="NKO LETTER NA",
direction="r",
@@ -18331,6 +18661,7 @@ characters.data={
unicodeslot=0x07E3,
},
[0x07E4]={
+ arabic="d",
category="lo",
description="NKO LETTER HA",
direction="r",
@@ -18338,6 +18669,7 @@ characters.data={
unicodeslot=0x07E4,
},
[0x07E5]={
+ arabic="d",
category="lo",
description="NKO LETTER WA",
direction="r",
@@ -18345,6 +18677,7 @@ characters.data={
unicodeslot=0x07E5,
},
[0x07E6]={
+ arabic="d",
category="lo",
description="NKO LETTER YA",
direction="r",
@@ -18352,6 +18685,7 @@ characters.data={
unicodeslot=0x07E6,
},
[0x07E7]={
+ arabic="d",
category="lo",
description="NKO LETTER NYA WOLOSO",
direction="r",
@@ -18359,6 +18693,7 @@ characters.data={
unicodeslot=0x07E7,
},
[0x07E8]={
+ arabic="d",
category="lo",
description="NKO LETTER JONA JA",
direction="r",
@@ -18366,6 +18701,7 @@ characters.data={
unicodeslot=0x07E8,
},
[0x07E9]={
+ arabic="d",
category="lo",
description="NKO LETTER JONA CHA",
direction="r",
@@ -18373,6 +18709,7 @@ characters.data={
unicodeslot=0x07E9,
},
[0x07EA]={
+ arabic="d",
category="lo",
description="NKO LETTER JONA RA",
direction="r",
@@ -18485,6 +18822,7 @@ characters.data={
unicodeslot=0x07F9,
},
[0x07FA]={
+ arabic="c",
category="lm",
description="NKO LAJANYALAN",
direction="r",
@@ -18919,6 +19257,7 @@ characters.data={
unicodeslot=0x083E,
},
[0x0840]={
+ arabic="r",
category="lo",
description="MANDAIC LETTER HALQA",
direction="r",
@@ -18926,6 +19265,7 @@ characters.data={
unicodeslot=0x0840,
},
[0x0841]={
+ arabic="d",
category="lo",
description="MANDAIC LETTER AB",
direction="r",
@@ -18933,6 +19273,7 @@ characters.data={
unicodeslot=0x0841,
},
[0x0842]={
+ arabic="d",
category="lo",
description="MANDAIC LETTER AG",
direction="r",
@@ -18940,6 +19281,7 @@ characters.data={
unicodeslot=0x0842,
},
[0x0843]={
+ arabic="d",
category="lo",
description="MANDAIC LETTER AD",
direction="r",
@@ -18947,6 +19289,7 @@ characters.data={
unicodeslot=0x0843,
},
[0x0844]={
+ arabic="d",
category="lo",
description="MANDAIC LETTER AH",
direction="r",
@@ -18954,6 +19297,7 @@ characters.data={
unicodeslot=0x0844,
},
[0x0845]={
+ arabic="d",
category="lo",
description="MANDAIC LETTER USHENNA",
direction="r",
@@ -18961,6 +19305,7 @@ characters.data={
unicodeslot=0x0845,
},
[0x0846]={
+ arabic="r",
category="lo",
description="MANDAIC LETTER AZ",
direction="r",
@@ -18968,6 +19313,7 @@ characters.data={
unicodeslot=0x0846,
},
[0x0847]={
+ arabic="d",
category="lo",
description="MANDAIC LETTER IT",
direction="r",
@@ -18975,6 +19321,7 @@ characters.data={
unicodeslot=0x0847,
},
[0x0848]={
+ arabic="d",
category="lo",
description="MANDAIC LETTER ATT",
direction="r",
@@ -18982,6 +19329,7 @@ characters.data={
unicodeslot=0x0848,
},
[0x0849]={
+ arabic="r",
category="lo",
description="MANDAIC LETTER AKSA",
direction="r",
@@ -18989,6 +19337,7 @@ characters.data={
unicodeslot=0x0849,
},
[0x084A]={
+ arabic="d",
category="lo",
description="MANDAIC LETTER AK",
direction="r",
@@ -18996,6 +19345,7 @@ characters.data={
unicodeslot=0x084A,
},
[0x084B]={
+ arabic="d",
category="lo",
description="MANDAIC LETTER AL",
direction="r",
@@ -19003,6 +19353,7 @@ characters.data={
unicodeslot=0x084B,
},
[0x084C]={
+ arabic="d",
category="lo",
description="MANDAIC LETTER AM",
direction="r",
@@ -19010,6 +19361,7 @@ characters.data={
unicodeslot=0x084C,
},
[0x084D]={
+ arabic="d",
category="lo",
description="MANDAIC LETTER AN",
direction="r",
@@ -19017,6 +19369,7 @@ characters.data={
unicodeslot=0x084D,
},
[0x084E]={
+ arabic="d",
category="lo",
description="MANDAIC LETTER AS",
direction="r",
@@ -19024,6 +19377,7 @@ characters.data={
unicodeslot=0x084E,
},
[0x084F]={
+ arabic="r",
category="lo",
description="MANDAIC LETTER IN",
direction="r",
@@ -19031,6 +19385,7 @@ characters.data={
unicodeslot=0x084F,
},
[0x0850]={
+ arabic="d",
category="lo",
description="MANDAIC LETTER AP",
direction="r",
@@ -19038,6 +19393,7 @@ characters.data={
unicodeslot=0x0850,
},
[0x0851]={
+ arabic="d",
category="lo",
description="MANDAIC LETTER ASZ",
direction="r",
@@ -19045,6 +19401,7 @@ characters.data={
unicodeslot=0x0851,
},
[0x0852]={
+ arabic="d",
category="lo",
description="MANDAIC LETTER AQ",
direction="r",
@@ -19052,6 +19409,7 @@ characters.data={
unicodeslot=0x0852,
},
[0x0853]={
+ arabic="d",
category="lo",
description="MANDAIC LETTER AR",
direction="r",
@@ -19059,6 +19417,7 @@ characters.data={
unicodeslot=0x0853,
},
[0x0854]={
+ arabic="r",
category="lo",
description="MANDAIC LETTER ASH",
direction="r",
@@ -19066,6 +19425,7 @@ characters.data={
unicodeslot=0x0854,
},
[0x0855]={
+ arabic="d",
category="lo",
description="MANDAIC LETTER AT",
direction="r",
@@ -19073,6 +19433,7 @@ characters.data={
unicodeslot=0x0855,
},
[0x0856]={
+ arabic="u",
category="lo",
description="MANDAIC LETTER DUSHENNA",
direction="r",
@@ -19080,6 +19441,7 @@ characters.data={
unicodeslot=0x0856,
},
[0x0857]={
+ arabic="u",
category="lo",
description="MANDAIC LETTER KAD",
direction="r",
@@ -19087,6 +19449,7 @@ characters.data={
unicodeslot=0x0857,
},
[0x0858]={
+ arabic="u",
category="lo",
description="MANDAIC LETTER AIN",
direction="r",
@@ -19121,6 +19484,291 @@ characters.data={
linebreak="al",
unicodeslot=0x085E,
},
+ [0x08A0]={
+ arabic="d",
+ category="lo",
+ description="ARABIC LETTER BEH WITH SMALL V BELOW",
+ direction="al",
+ linebreak="al",
+ unicodeslot=0x08A0,
+ },
+ [0x08A2]={
+ arabic="d",
+ category="lo",
+ description="ARABIC LETTER JEEM WITH TWO DOTS ABOVE",
+ direction="al",
+ linebreak="al",
+ unicodeslot=0x08A2,
+ },
+ [0x08A3]={
+ arabic="d",
+ category="lo",
+ description="ARABIC LETTER TAH WITH TWO DOTS ABOVE",
+ direction="al",
+ linebreak="al",
+ unicodeslot=0x08A3,
+ },
+ [0x08A4]={
+ arabic="d",
+ category="lo",
+ description="ARABIC LETTER FEH WITH DOT BELOW AND THREE DOTS ABOVE",
+ direction="al",
+ linebreak="al",
+ unicodeslot=0x08A4,
+ },
+ [0x08A5]={
+ arabic="d",
+ category="lo",
+ description="ARABIC LETTER QAF WITH DOT BELOW",
+ direction="al",
+ linebreak="al",
+ unicodeslot=0x08A5,
+ },
+ [0x08A6]={
+ arabic="d",
+ category="lo",
+ description="ARABIC LETTER LAM WITH DOUBLE BAR",
+ direction="al",
+ linebreak="al",
+ unicodeslot=0x08A6,
+ },
+ [0x08A7]={
+ arabic="d",
+ category="lo",
+ description="ARABIC LETTER MEEM WITH THREE DOTS ABOVE",
+ direction="al",
+ linebreak="al",
+ unicodeslot=0x08A7,
+ },
+ [0x08A8]={
+ arabic="d",
+ category="lo",
+ description="ARABIC LETTER YEH WITH TWO DOTS BELOW AND HAMZA ABOVE",
+ direction="al",
+ linebreak="al",
+ unicodeslot=0x08A8,
+ },
+ [0x08A9]={
+ arabic="d",
+ category="lo",
+ description="ARABIC LETTER YEH WITH TWO DOTS BELOW AND DOT ABOVE",
+ direction="al",
+ linebreak="al",
+ unicodeslot=0x08A9,
+ },
+ [0x08AA]={
+ arabic="r",
+ category="lo",
+ description="ARABIC LETTER REH WITH LOOP",
+ direction="al",
+ linebreak="al",
+ unicodeslot=0x08AA,
+ },
+ [0x08AB]={
+ arabic="r",
+ category="lo",
+ description="ARABIC LETTER WAW WITH DOT WITHIN",
+ direction="al",
+ linebreak="al",
+ unicodeslot=0x08AB,
+ },
+ [0x08AC]={
+ arabic="r",
+ category="lo",
+ description="ARABIC LETTER ROHINGYA YEH",
+ direction="al",
+ linebreak="al",
+ unicodeslot=0x08AC,
+ },
+ [0x08E4]={
+ category="mn",
+ description="ARABIC CURLY FATHA",
+ direction="nsm",
+ linebreak="cm",
+ unicodeslot=0x08E4,
+ },
+ [0x08E5]={
+ category="mn",
+ description="ARABIC CURLY DAMMA",
+ direction="nsm",
+ linebreak="cm",
+ unicodeslot=0x08E5,
+ },
+ [0x08E6]={
+ category="mn",
+ description="ARABIC CURLY KASRA",
+ direction="nsm",
+ linebreak="cm",
+ unicodeslot=0x08E6,
+ },
+ [0x08E7]={
+ category="mn",
+ description="ARABIC CURLY FATHATAN",
+ direction="nsm",
+ linebreak="cm",
+ unicodeslot=0x08E7,
+ },
+ [0x08E8]={
+ category="mn",
+ description="ARABIC CURLY DAMMATAN",
+ direction="nsm",
+ linebreak="cm",
+ unicodeslot=0x08E8,
+ },
+ [0x08E9]={
+ category="mn",
+ description="ARABIC CURLY KASRATAN",
+ direction="nsm",
+ linebreak="cm",
+ unicodeslot=0x08E9,
+ },
+ [0x08EA]={
+ category="mn",
+ description="ARABIC TONE ONE DOT ABOVE",
+ direction="nsm",
+ linebreak="cm",
+ unicodeslot=0x08EA,
+ },
+ [0x08EB]={
+ category="mn",
+ description="ARABIC TONE TWO DOTS ABOVE",
+ direction="nsm",
+ linebreak="cm",
+ unicodeslot=0x08EB,
+ },
+ [0x08EC]={
+ category="mn",
+ description="ARABIC TONE LOOP ABOVE",
+ direction="nsm",
+ linebreak="cm",
+ unicodeslot=0x08EC,
+ },
+ [0x08ED]={
+ category="mn",
+ description="ARABIC TONE ONE DOT BELOW",
+ direction="nsm",
+ linebreak="cm",
+ unicodeslot=0x08ED,
+ },
+ [0x08EE]={
+ category="mn",
+ description="ARABIC TONE TWO DOTS BELOW",
+ direction="nsm",
+ linebreak="cm",
+ unicodeslot=0x08EE,
+ },
+ [0x08EF]={
+ category="mn",
+ description="ARABIC TONE LOOP BELOW",
+ direction="nsm",
+ linebreak="cm",
+ unicodeslot=0x08EF,
+ },
+ [0x08F0]={
+ category="mn",
+ description="ARABIC OPEN FATHATAN",
+ direction="nsm",
+ linebreak="cm",
+ unicodeslot=0x08F0,
+ },
+ [0x08F1]={
+ category="mn",
+ description="ARABIC OPEN DAMMATAN",
+ direction="nsm",
+ linebreak="cm",
+ unicodeslot=0x08F1,
+ },
+ [0x08F2]={
+ category="mn",
+ description="ARABIC OPEN KASRATAN",
+ direction="nsm",
+ linebreak="cm",
+ unicodeslot=0x08F2,
+ },
+ [0x08F3]={
+ category="mn",
+ description="ARABIC SMALL HIGH WAW",
+ direction="nsm",
+ linebreak="cm",
+ unicodeslot=0x08F3,
+ },
+ [0x08F4]={
+ category="mn",
+ description="ARABIC FATHA WITH RING",
+ direction="nsm",
+ linebreak="cm",
+ unicodeslot=0x08F4,
+ },
+ [0x08F5]={
+ category="mn",
+ description="ARABIC FATHA WITH DOT ABOVE",
+ direction="nsm",
+ linebreak="cm",
+ unicodeslot=0x08F5,
+ },
+ [0x08F6]={
+ category="mn",
+ description="ARABIC KASRA WITH DOT BELOW",
+ direction="nsm",
+ linebreak="cm",
+ unicodeslot=0x08F6,
+ },
+ [0x08F7]={
+ category="mn",
+ description="ARABIC LEFT ARROWHEAD ABOVE",
+ direction="nsm",
+ linebreak="cm",
+ unicodeslot=0x08F7,
+ },
+ [0x08F8]={
+ category="mn",
+ description="ARABIC RIGHT ARROWHEAD ABOVE",
+ direction="nsm",
+ linebreak="cm",
+ unicodeslot=0x08F8,
+ },
+ [0x08F9]={
+ category="mn",
+ description="ARABIC LEFT ARROWHEAD BELOW",
+ direction="nsm",
+ linebreak="cm",
+ unicodeslot=0x08F9,
+ },
+ [0x08FA]={
+ category="mn",
+ description="ARABIC RIGHT ARROWHEAD BELOW",
+ direction="nsm",
+ linebreak="cm",
+ unicodeslot=0x08FA,
+ },
+ [0x08FB]={
+ category="mn",
+ description="ARABIC DOUBLE RIGHT ARROWHEAD ABOVE",
+ direction="nsm",
+ linebreak="cm",
+ unicodeslot=0x08FB,
+ },
+ [0x08FC]={
+ category="mn",
+ description="ARABIC DOUBLE RIGHT ARROWHEAD ABOVE WITH DOT",
+ direction="nsm",
+ linebreak="cm",
+ unicodeslot=0x08FC,
+ },
+ [0x08FD]={
+ category="mn",
+ description="ARABIC RIGHT ARROWHEAD ABOVE WITH DOT",
+ direction="nsm",
+ linebreak="cm",
+ unicodeslot=0x08FD,
+ },
+ [0x08FE]={
+ category="mn",
+ description="ARABIC DAMMA WITH DOT",
+ direction="nsm",
+ linebreak="cm",
+ unicodeslot=0x08FE,
+ },
[0x0900]={
category="mn",
description="DEVANAGARI SIGN INVERTED CANDRABINDU",
@@ -22149,6 +22797,13 @@ characters.data={
linebreak="nu",
unicodeslot=0x0AEF,
},
+ [0x0AF0]={
+ category="po",
+ description="GUJARATI ABBREVIATION SIGN",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x0AF0,
+ },
[0x0AF1]={
category="sc",
description="GUJARATI RUPEE SIGN",
@@ -26966,6 +27621,20 @@ characters.data={
specials={ "compat", 0x0EAB, 0x0EA1 },
unicodeslot=0x0EDD,
},
+ [0x0EDE]={
+ category="lo",
+ description="LAO LETTER KHMU GO",
+ direction="l",
+ linebreak="sa",
+ unicodeslot=0x0EDE,
+ },
+ [0x0EDF]={
+ category="lo",
+ description="LAO LETTER KHMU NYO",
+ direction="l",
+ linebreak="sa",
+ unicodeslot=0x0EDF,
+ },
[0x0F00]={
category="lo",
description="TIBETAN SYLLABLE OM",
@@ -29892,6 +30561,20 @@ characters.data={
linebreak="al",
unicodeslot=0x10C5,
},
+ [0x10C7]={
+ category="lu",
+ description="GEORGIAN CAPITAL LETTER YN",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x10C7,
+ },
+ [0x10CD]={
+ category="lu",
+ description="GEORGIAN CAPITAL LETTER AEN",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x10CD,
+ },
[0x10D0]={
category="lo",
description="GEORGIAN LETTER AN",
@@ -30208,6 +30891,27 @@ characters.data={
specials={ "super", 0x10DC },
unicodeslot=0x10FC,
},
+ [0x10FD]={
+ category="lo",
+ description="GEORGIAN LETTER AEN",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x10FD,
+ },
+ [0x10FE]={
+ category="lo",
+ description="GEORGIAN LETTER HARD SIGN",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x10FE,
+ },
+ [0x10FF]={
+ category="lo",
+ description="GEORGIAN LETTER LABIAL SIGN",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x10FF,
+ },
[0x1100]={
category="lo",
cjkwd="w",
@@ -31447,7 +32151,6 @@ characters.data={
},
[0x11A3]={
category="lo",
- cjkwd="w",
description="HANGUL JUNGSEONG A-EU",
direction="l",
linebreak="jv",
@@ -31455,7 +32158,6 @@ characters.data={
},
[0x11A4]={
category="lo",
- cjkwd="w",
description="HANGUL JUNGSEONG YA-U",
direction="l",
linebreak="jv",
@@ -31463,7 +32165,6 @@ characters.data={
},
[0x11A5]={
category="lo",
- cjkwd="w",
description="HANGUL JUNGSEONG YEO-YA",
direction="l",
linebreak="jv",
@@ -31471,7 +32172,6 @@ characters.data={
},
[0x11A6]={
category="lo",
- cjkwd="w",
description="HANGUL JUNGSEONG O-YA",
direction="l",
linebreak="jv",
@@ -31479,7 +32179,6 @@ characters.data={
},
[0x11A7]={
category="lo",
- cjkwd="w",
description="HANGUL JUNGSEONG O-YAE",
direction="l",
linebreak="jv",
@@ -32061,7 +32760,6 @@ characters.data={
},
[0x11FA]={
category="lo",
- cjkwd="w",
description="HANGUL JONGSEONG KIYEOK-NIEUN",
direction="l",
linebreak="jt",
@@ -32069,7 +32767,6 @@ characters.data={
},
[0x11FB]={
category="lo",
- cjkwd="w",
description="HANGUL JONGSEONG KIYEOK-PIEUP",
direction="l",
linebreak="jt",
@@ -32077,7 +32774,6 @@ characters.data={
},
[0x11FC]={
category="lo",
- cjkwd="w",
description="HANGUL JONGSEONG KIYEOK-CHIEUCH",
direction="l",
linebreak="jt",
@@ -32085,7 +32781,6 @@ characters.data={
},
[0x11FD]={
category="lo",
- cjkwd="w",
description="HANGUL JONGSEONG KIYEOK-KHIEUKH",
direction="l",
linebreak="jt",
@@ -32093,7 +32788,6 @@ characters.data={
},
[0x11FE]={
category="lo",
- cjkwd="w",
description="HANGUL JONGSEONG KIYEOK-HIEUH",
direction="l",
linebreak="jt",
@@ -32101,7 +32795,6 @@ characters.data={
},
[0x11FF]={
category="lo",
- cjkwd="w",
description="HANGUL JONGSEONG SSANGNIEUN",
direction="l",
linebreak="jt",
@@ -41576,14 +42269,14 @@ characters.data={
[0x17B4]={
category="cf",
description="KHMER VOWEL INHERENT AQ",
- direction="l",
+ direction="nsm",
linebreak="sa",
unicodeslot=0x17B4,
},
[0x17B5]={
category="cf",
description="KHMER VOWEL INHERENT AA",
- direction="l",
+ direction="nsm",
linebreak="sa",
unicodeslot=0x17B5,
},
@@ -47499,6 +48192,27 @@ characters.data={
linebreak="cm",
unicodeslot=0x1BAA,
},
+ [0x1BAB]={
+ category="mn",
+ description="SUNDANESE SIGN VIRAMA",
+ direction="nsm",
+ linebreak="cm",
+ unicodeslot=0x1BAB,
+ },
+ [0x1BAC]={
+ category="mc",
+ description="SUNDANESE CONSONANT SIGN PASANGAN MA",
+ direction="l",
+ linebreak="cm",
+ unicodeslot=0x1BAC,
+ },
+ [0x1BAD]={
+ category="mc",
+ description="SUNDANESE CONSONANT SIGN PASANGAN WA",
+ direction="l",
+ linebreak="cm",
+ unicodeslot=0x1BAD,
+ },
[0x1BAE]={
category="lo",
description="SUNDANESE LETTER KHA",
@@ -47583,6 +48297,48 @@ characters.data={
linebreak="nu",
unicodeslot=0x1BB9,
},
+ [0x1BBA]={
+ category="lo",
+ description="SUNDANESE AVAGRAHA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1BBA,
+ },
+ [0x1BBB]={
+ category="lo",
+ description="SUNDANESE LETTER REU",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1BBB,
+ },
+ [0x1BBC]={
+ category="lo",
+ description="SUNDANESE LETTER LEU",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1BBC,
+ },
+ [0x1BBD]={
+ category="lo",
+ description="SUNDANESE LETTER BHA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1BBD,
+ },
+ [0x1BBE]={
+ category="lo",
+ description="SUNDANESE LETTER FINAL K",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1BBE,
+ },
+ [0x1BBF]={
+ category="lo",
+ description="SUNDANESE LETTER FINAL M",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1BBF,
+ },
[0x1BC0]={
category="lo",
description="BATAK LETTER A",
@@ -48829,6 +49585,62 @@ characters.data={
linebreak="ba",
unicodeslot=0x1C7F,
},
+ [0x1CC0]={
+ category="po",
+ description="SUNDANESE PUNCTUATION BINDU SURYA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1CC0,
+ },
+ [0x1CC1]={
+ category="po",
+ description="SUNDANESE PUNCTUATION BINDU PANGLONG",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1CC1,
+ },
+ [0x1CC2]={
+ category="po",
+ description="SUNDANESE PUNCTUATION BINDU PURNAMA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1CC2,
+ },
+ [0x1CC3]={
+ category="po",
+ description="SUNDANESE PUNCTUATION BINDU CAKRA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1CC3,
+ },
+ [0x1CC4]={
+ category="po",
+ description="SUNDANESE PUNCTUATION BINDU LEU SATANGA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1CC4,
+ },
+ [0x1CC5]={
+ category="po",
+ description="SUNDANESE PUNCTUATION BINDU KA SATANGA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1CC5,
+ },
+ [0x1CC6]={
+ category="po",
+ description="SUNDANESE PUNCTUATION BINDU DA SATANGA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1CC6,
+ },
+ [0x1CC7]={
+ category="po",
+ description="SUNDANESE PUNCTUATION BINDU BA SATANGA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1CC7,
+ },
[0x1CD0]={
category="mn",
description="VEDIC TONE KARSHANA",
@@ -49074,6 +49886,34 @@ characters.data={
linebreak="cm",
unicodeslot=0x1CF2,
},
+ [0x1CF3]={
+ category="mc",
+ description="VEDIC SIGN ROTATED ARDHAVISARGA",
+ direction="l",
+ linebreak="cm",
+ unicodeslot=0x1CF3,
+ },
+ [0x1CF4]={
+ category="mn",
+ description="VEDIC TONE CANDRA ABOVE",
+ direction="nsm",
+ linebreak="cm",
+ unicodeslot=0x1CF4,
+ },
+ [0x1CF5]={
+ category="lo",
+ description="VEDIC SIGN JIHVAMULIYA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1CF5,
+ },
+ [0x1CF6]={
+ category="lo",
+ description="VEDIC SIGN UPADHMANIYA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1CF6,
+ },
[0x1D00]={
category="ll",
description="LATIN LETTER SMALL CAPITAL A",
@@ -56332,6 +57172,7 @@ characters.data={
},
[0x200C]={
adobename="zerowidthnonjoiner",
+ arabic="u",
category="cf",
contextname="zwnj",
description="ZERO WIDTH NON-JOINER",
@@ -56341,6 +57182,7 @@ characters.data={
},
[0x200D]={
adobename="afii301",
+ arabic="c",
category="cf",
contextname="zwj",
description="ZERO WIDTH JOINER",
@@ -56624,7 +57466,7 @@ characters.data={
{
class="inner",
name="dots",
- }
+ },
},
specials={ "compat", 0x002E, 0x002E, 0x002E },
unicodeslot=0x2026,
@@ -56817,6 +57659,10 @@ characters.data={
linebreak="ns",
specials={ "compat", 0x0021, 0x0021 },
unicodeslot=0x203C,
+ variants={
+ [0xFE0E]="text style",
+ [0xFE0F]="emoji style",
+ },
},
[0x203D]={
category="po",
@@ -56832,6 +57678,16 @@ characters.data={
description="OVERLINE",
direction="on",
linebreak="al",
+ mathspec={
+ {
+ class="topaccent",
+ name="overbar",
+ },
+ {
+ class="botaccent",
+ name="underbar",
+ },
+ },
specials={ "compat", 0x0020, 0x0305 },
unicodeslot=0x203E,
},
@@ -56880,7 +57736,7 @@ characters.data={
linebreak="is",
mathspec={
{
- class="ord",
+ class="ordinary",
name="slash",
},
{
@@ -56929,6 +57785,10 @@ characters.data={
linebreak="ns",
specials={ "compat", 0x0021, 0x003F },
unicodeslot=0x2049,
+ variants={
+ [0xFE0E]="text style",
+ [0xFE0F]="emoji style",
+ },
},
[0x204A]={
category="po",
@@ -57244,6 +58104,8 @@ characters.data={
description="SUPERSCRIPT PLUS SIGN",
direction="es",
linebreak="al",
+ mathclass="binary",
+ mathname="positivesign",
specials={ "super", 0x002B },
unicodeslot=0x207A,
},
@@ -57252,6 +58114,8 @@ characters.data={
description="SUPERSCRIPT MINUS",
direction="es",
linebreak="al",
+ mathclass="binary",
+ mathname="negativesign",
specials={ "super", 0x2212 },
unicodeslot=0x207B,
},
@@ -57732,6 +58596,13 @@ characters.data={
linebreak="pr",
unicodeslot=0x20B9,
},
+ [0x20BA]={
+ category="sc",
+ description="TURKISH LIRA SIGN",
+ direction="et",
+ linebreak="pr",
+ unicodeslot=0x20BA,
+ },
[0x20D0]={
category="mn",
description="COMBINING LEFT HARPOON ABOVE",
@@ -57786,7 +58657,7 @@ characters.data={
description="COMBINING RIGHT ARROW ABOVE",
direction="nsm",
linebreak="cm",
- mathclass="accent",
+ mathclass="topaccent",
mathname="vec",
unicodeslot=0x20D7,
},
@@ -57816,7 +58687,7 @@ characters.data={
description="COMBINING THREE DOTS ABOVE",
direction="nsm",
linebreak="cm",
- mathclass="accent",
+ mathclass="topaccent",
mathname="dddot",
unicodeslot=0x20DB,
},
@@ -57930,7 +58801,7 @@ characters.data={
description="COMBINING WIDE BRIDGE ABOVE",
direction="nsm",
linebreak="cm",
- mathclass="accent",
+ mathclass="topaccent",
unicodeslot=0x20E9,
},
[0x20EA]={
@@ -58118,8 +58989,16 @@ characters.data={
description="PLANCK CONSTANT OVER TWO PI",
direction="l",
linebreak="al",
- mathclass="variable",
- mathname="hslash",
+ mathspec={
+ {
+ class="variable",
+ name="hslash",
+ },
+ {
+ class="ordinary",
+ name="hbar",
+ },
+ },
specials={ "font", 0x0127 },
unicodeslot=0x210F,
},
@@ -58445,7 +59324,7 @@ characters.data={
direction="l",
lccode=0x214E,
linebreak="al",
- mathclass="ord",
+ mathclass="ordinary",
mathname="Finv",
unicodeslot=0x2132,
},
@@ -58513,6 +59392,10 @@ characters.data={
linebreak="al",
specials={ "font", 0x0069 },
unicodeslot=0x2139,
+ variants={
+ [0xFE0E]="text style",
+ [0xFE0F]="emoji style",
+ },
},
[0x213A]={
category="so",
@@ -58574,7 +59457,7 @@ characters.data={
description="TURNED SANS-SERIF CAPITAL G",
direction="on",
linebreak="al",
- mathclass="ord",
+ mathclass="ordinary",
mathname="Game",
unicodeslot=0x2141,
},
@@ -58584,7 +59467,7 @@ characters.data={
description="TURNED SANS-SERIF CAPITAL L",
direction="on",
linebreak="al",
- mathclass="ord",
+ mathclass="ordinary",
unicodeslot=0x2142,
},
[0x2143]={
@@ -58593,7 +59476,7 @@ characters.data={
description="REVERSED SANS-SERIF CAPITAL L",
direction="on",
linebreak="al",
- mathclass="ord",
+ mathclass="ordinary",
unicodeslot=0x2143,
},
[0x2144]={
@@ -58602,7 +59485,7 @@ characters.data={
description="TURNED SANS-SERIF CAPITAL Y",
direction="on",
linebreak="al",
- mathclass="ord",
+ mathclass="ordinary",
unicodeslot=0x2144,
},
[0x2145]={
@@ -58660,7 +59543,7 @@ characters.data={
description="PROPERTY LINE",
direction="on",
linebreak="al",
- mathclass="ord",
+ mathclass="ordinary",
mathname="PropertyLine",
unicodeslot=0x214A,
},
@@ -58669,7 +59552,7 @@ characters.data={
description="TURNED AMPERSAND",
direction="on",
linebreak="al",
- mathclass="bin",
+ mathclass="binary",
mathname="upand",
unicodeslot=0x214B,
},
@@ -59301,6 +60184,7 @@ characters.data={
description="LEFTWARDS ARROW",
direction="on",
linebreak="ai",
+ mathextensible="l",
mathfiller="leftarrowfill",
mathspec={
{
@@ -59331,6 +60215,7 @@ characters.data={
direction="on",
linebreak="ai",
mathclass="relation",
+ mathextensible="u",
mathname="uparrow",
unicodeslot=0x2191,
},
@@ -59341,6 +60226,7 @@ characters.data={
description="RIGHTWARDS ARROW",
direction="on",
linebreak="ai",
+ mathextensible="r",
mathfiller="rightarrowfill",
mathspec={
{
@@ -59371,6 +60257,7 @@ characters.data={
direction="on",
linebreak="ai",
mathclass="relation",
+ mathextensible="d",
mathname="downarrow",
unicodeslot=0x2193,
},
@@ -59382,8 +60269,13 @@ characters.data={
direction="on",
linebreak="ai",
mathclass="relation",
+ mathextensible="h",
mathname="leftrightarrow",
unicodeslot=0x2194,
+ variants={
+ [0xFE0E]="text style",
+ [0xFE0F]="emoji style",
+ },
},
[0x2195]={
adobename="arrowupdn",
@@ -59393,8 +60285,13 @@ characters.data={
direction="on",
linebreak="ai",
mathclass="relation",
+ mathextensible="v",
mathname="updownarrow",
unicodeslot=0x2195,
+ variants={
+ [0xFE0E]="text style",
+ [0xFE0F]="emoji style",
+ },
},
[0x2196]={
adobename="arrowupleft",
@@ -59406,6 +60303,10 @@ characters.data={
mathclass="relation",
mathname="nwarrow",
unicodeslot=0x2196,
+ variants={
+ [0xFE0E]="text style",
+ [0xFE0F]="emoji style",
+ },
},
[0x2197]={
adobename="arrowupright",
@@ -59417,6 +60318,10 @@ characters.data={
mathclass="relation",
mathname="nearrow",
unicodeslot=0x2197,
+ variants={
+ [0xFE0E]="text style",
+ [0xFE0F]="emoji style",
+ },
},
[0x2198]={
adobename="arrowdownright",
@@ -59428,6 +60333,10 @@ characters.data={
mathclass="relation",
mathname="searrow",
unicodeslot=0x2198,
+ variants={
+ [0xFE0E]="text style",
+ [0xFE0F]="emoji style",
+ },
},
[0x2199]={
adobename="arrowdownleft",
@@ -59439,6 +60348,10 @@ characters.data={
mathclass="relation",
mathname="swarrow",
unicodeslot=0x2199,
+ variants={
+ [0xFE0E]="text style",
+ [0xFE0F]="emoji style",
+ },
},
[0x219A]={
category="sm",
@@ -59446,6 +60359,7 @@ characters.data={
direction="on",
linebreak="al",
mathclass="relation",
+ mathextensible="l",
mathname="nleftarrow",
specials={ "char", 0x2190, 0x0338 },
unicodeslot=0x219A,
@@ -59456,6 +60370,7 @@ characters.data={
direction="on",
linebreak="al",
mathclass="relation",
+ mathextensible="r",
mathname="nrightarrow",
specials={ "char", 0x2192, 0x0338 },
unicodeslot=0x219B,
@@ -59466,6 +60381,7 @@ characters.data={
direction="on",
linebreak="al",
mathclass="relation",
+ mathextensible="l",
mathname="leftwavearrow",
unicodeslot=0x219C,
},
@@ -59475,6 +60391,7 @@ characters.data={
direction="on",
linebreak="al",
mathclass="relation",
+ mathextensible="r",
mathname="rightwavearrow",
unicodeslot=0x219D,
},
@@ -59484,6 +60401,8 @@ characters.data={
direction="on",
linebreak="al",
mathclass="relation",
+ mathextensible="l",
+ mathfiller="twoheadleftarrowfill",
mathname="twoheadleftarrow",
unicodeslot=0x219E,
},
@@ -59493,6 +60412,7 @@ characters.data={
direction="on",
linebreak="al",
mathclass="relation",
+ mathextensible="u",
mathname="twoheaduparrow",
unicodeslot=0x219F,
},
@@ -59502,6 +60422,8 @@ characters.data={
direction="on",
linebreak="al",
mathclass="relation",
+ mathextensible="r",
+ mathfiller="twoheadrightarrowfill",
mathname="twoheadrightarrow",
unicodeslot=0x21A0,
},
@@ -59511,6 +60433,7 @@ characters.data={
direction="on",
linebreak="al",
mathclass="relation",
+ mathextensible="d",
mathname="twoheaddownarrow",
unicodeslot=0x21A1,
},
@@ -59520,6 +60443,7 @@ characters.data={
direction="on",
linebreak="al",
mathclass="relation",
+ mathextensible="l",
mathname="leftarrowtail",
unicodeslot=0x21A2,
},
@@ -59529,6 +60453,7 @@ characters.data={
direction="on",
linebreak="al",
mathclass="relation",
+ mathextensible="r",
mathname="rightarrowtail",
unicodeslot=0x21A3,
},
@@ -59538,6 +60463,7 @@ characters.data={
direction="on",
linebreak="al",
mathclass="relation",
+ mathextensible="l",
mathname="mapsfrom",
unicodeslot=0x21A4,
},
@@ -59547,6 +60473,7 @@ characters.data={
direction="on",
linebreak="al",
mathclass="relation",
+ mathextensible="u",
mathname="mapsup",
unicodeslot=0x21A5,
},
@@ -59556,6 +60483,8 @@ characters.data={
direction="on",
linebreak="al",
mathclass="relation",
+ mathextensible="r",
+ mathfiller="mapstofill",
mathname="mapsto",
unicodeslot=0x21A6,
},
@@ -59565,6 +60494,7 @@ characters.data={
direction="on",
linebreak="al",
mathclass="relation",
+ mathextensible="d",
mathname="mapsdown",
unicodeslot=0x21A7,
},
@@ -59574,7 +60504,8 @@ characters.data={
description="UP DOWN ARROW WITH BASE",
direction="on",
linebreak="al",
- mathclass="ord",
+ mathclass="ordinary",
+ mathextensible="v",
mathname="updownarrowbar",
unicodeslot=0x21A8,
},
@@ -59584,8 +60515,14 @@ characters.data={
direction="on",
linebreak="al",
mathclass="relation",
+ mathextensible="l",
+ mathfiller="hookleftarrowfill",
mathname="hookleftarrow",
unicodeslot=0x21A9,
+ variants={
+ [0xFE0E]="text style",
+ [0xFE0F]="emoji style",
+ },
},
[0x21AA]={
category="so",
@@ -59593,8 +60530,14 @@ characters.data={
direction="on",
linebreak="al",
mathclass="relation",
+ mathextensible="r",
+ mathfiller="hookrightarrowfill",
mathname="hookrightarrow",
unicodeslot=0x21AA,
+ variants={
+ [0xFE0E]="text style",
+ [0xFE0F]="emoji style",
+ },
},
[0x21AB]={
category="so",
@@ -59602,6 +60545,7 @@ characters.data={
direction="on",
linebreak="al",
mathclass="relation",
+ mathextensible="l",
mathname="looparrowleft",
unicodeslot=0x21AB,
},
@@ -59611,6 +60555,7 @@ characters.data={
direction="on",
linebreak="al",
mathclass="relation",
+ mathextensible="r",
mathname="looparrowright",
unicodeslot=0x21AC,
},
@@ -59620,6 +60565,7 @@ characters.data={
direction="on",
linebreak="al",
mathclass="relation",
+ mathextensible="h",
mathname="leftrightsquigarrow",
unicodeslot=0x21AD,
},
@@ -59629,6 +60575,7 @@ characters.data={
direction="on",
linebreak="al",
mathclass="relation",
+ mathextensible="h",
mathname="nleftrightarrow",
specials={ "char", 0x2194, 0x0338 },
unicodeslot=0x21AE,
@@ -59639,6 +60586,7 @@ characters.data={
direction="on",
linebreak="al",
mathclass="relation",
+ mathextensible="d",
mathname="downzigzagarrow",
unicodeslot=0x21AF,
},
@@ -59648,6 +60596,7 @@ characters.data={
direction="on",
linebreak="al",
mathclass="relation",
+ mathextensible="m",
mathname="Lsh",
unicodeslot=0x21B0,
},
@@ -59657,6 +60606,7 @@ characters.data={
direction="on",
linebreak="al",
mathclass="relation",
+ mathextensible="m",
mathname="Rsh",
unicodeslot=0x21B1,
},
@@ -59666,6 +60616,7 @@ characters.data={
direction="on",
linebreak="al",
mathclass="relation",
+ mathextensible="m",
mathname="Ldsh",
unicodeslot=0x21B2,
},
@@ -59675,6 +60626,7 @@ characters.data={
direction="on",
linebreak="al",
mathclass="relation",
+ mathextensible="m",
mathname="Rdsh",
unicodeslot=0x21B3,
},
@@ -59683,7 +60635,8 @@ characters.data={
description="RIGHTWARDS ARROW WITH CORNER DOWNWARDS",
direction="on",
linebreak="al",
- mathclass="ord",
+ mathclass="ordinary",
+ mathextensible="m",
mathname="linefeed",
unicodeslot=0x21B4,
},
@@ -59694,7 +60647,8 @@ characters.data={
description="DOWNWARDS ARROW WITH CORNER LEFTWARDS",
direction="on",
linebreak="al",
- mathclass="ord",
+ mathclass="ordinary",
+ mathextensible="m",
mathname="carriagereturn",
unicodeslot=0x21B5,
},
@@ -59733,6 +60687,7 @@ characters.data={
direction="on",
linebreak="al",
mathclass="relation",
+ mathextensible="h",
mathname="barleftarrowrightarrowbar",
unicodeslot=0x21B9,
},
@@ -59777,6 +60732,8 @@ characters.data={
direction="on",
linebreak="al",
mathclass="relation",
+ mathextensible="l",
+ mathfiller="leftharpoonupfill",
mathname="leftharpoonup",
unicodeslot=0x21BC,
},
@@ -59786,6 +60743,8 @@ characters.data={
direction="on",
linebreak="al",
mathclass="relation",
+ mathextensible="l",
+ mathfiller="leftharpoondownfill",
mathname="leftharpoondown",
unicodeslot=0x21BD,
},
@@ -59822,6 +60781,8 @@ characters.data={
direction="on",
linebreak="al",
mathclass="relation",
+ mathextensible="r",
+ mathfiller="rightharpoonupfill",
mathname="rightharpoonup",
unicodeslot=0x21C0,
},
@@ -59831,6 +60792,8 @@ characters.data={
direction="on",
linebreak="al",
mathclass="relation",
+ mathextensible="r",
+ mathfiller="rightharpoondownfill",
mathname="rightharpoondown",
unicodeslot=0x21C1,
},
@@ -59859,6 +60822,8 @@ characters.data={
direction="on",
linebreak="al",
mathclass="relation",
+ mathextensible="h",
+ mathfiller="rightoverleftarrowfill",
mathname="rightleftarrows",
unicodeslot=0x21C4,
},
@@ -59869,6 +60834,7 @@ characters.data={
direction="on",
linebreak="al",
mathclass="relation",
+ mathextensible="m",
mathname="updownarrows",
unicodeslot=0x21C5,
},
@@ -59879,6 +60845,8 @@ characters.data={
direction="on",
linebreak="al",
mathclass="relation",
+ mathextensible="h",
+ mathfiller="lefgtoverrightarrowfill",
mathname="leftrightarrows",
unicodeslot=0x21C6,
},
@@ -59888,6 +60856,7 @@ characters.data={
direction="on",
linebreak="al",
mathclass="relation",
+ mathextensible="l",
mathname="leftleftarrows",
unicodeslot=0x21C7,
},
@@ -59897,6 +60866,7 @@ characters.data={
direction="on",
linebreak="al",
mathclass="relation",
+ mathextensible="u",
mathname="upuparrows",
unicodeslot=0x21C8,
},
@@ -59906,6 +60876,7 @@ characters.data={
direction="on",
linebreak="al",
mathclass="relation",
+ mathextensible="r",
mathname="rightrightarrows",
unicodeslot=0x21C9,
},
@@ -59915,6 +60886,7 @@ characters.data={
direction="on",
linebreak="al",
mathclass="relation",
+ mathextensible="d",
mathname="downdownarrows",
unicodeslot=0x21CA,
},
@@ -59924,6 +60896,8 @@ characters.data={
direction="on",
linebreak="al",
mathclass="relation",
+ mathextensible="h",
+ mathfiller="leftrightharpoonsfill",
mathname="leftrightharpoons",
unicodeslot=0x21CB,
},
@@ -59933,6 +60907,8 @@ characters.data={
direction="on",
linebreak="al",
mathclass="relation",
+ mathextensible="h",
+ mathfiller="rightleftharpoonsfill",
mathname="rightleftharpoons",
unicodeslot=0x21CC,
},
@@ -59943,6 +60919,7 @@ characters.data={
direction="on",
linebreak="al",
mathclass="relation",
+ mathextensible="l",
mathname="nLeftarrow",
specials={ "char", 0x21D0, 0x0338 },
unicodeslot=0x21CD,
@@ -59953,6 +60930,7 @@ characters.data={
direction="on",
linebreak="al",
mathclass="relation",
+ mathextensible="h",
mathname="nLeftrightarrow",
specials={ "char", 0x21D4, 0x0338 },
unicodeslot=0x21CE,
@@ -59964,6 +60942,7 @@ characters.data={
direction="on",
linebreak="al",
mathclass="relation",
+ mathextensible="r",
mathname="nRightarrow",
specials={ "char", 0x21D2, 0x0338 },
unicodeslot=0x21CF,
@@ -59975,6 +60954,7 @@ characters.data={
direction="on",
linebreak="al",
mathclass="relation",
+ mathextensible="l",
mathname="Leftarrow",
unicodeslot=0x21D0,
},
@@ -59985,6 +60965,7 @@ characters.data={
direction="on",
linebreak="al",
mathclass="relation",
+ mathextensible="u",
mathname="Uparrow",
unicodeslot=0x21D1,
},
@@ -59995,8 +60976,17 @@ characters.data={
description="RIGHTWARDS DOUBLE ARROW",
direction="on",
linebreak="ai",
- mathclass="relation",
- mathname="Rightarrow",
+ mathextensible="r",
+ mathspec={
+ {
+ class="relation",
+ name="Rightarrow",
+ },
+ {
+ class="relation",
+ name="imply",
+ },
+ },
unicodeslot=0x21D2,
},
[0x21D3]={
@@ -60006,6 +60996,7 @@ characters.data={
direction="on",
linebreak="al",
mathclass="relation",
+ mathextensible="d",
mathname="Downarrow",
unicodeslot=0x21D3,
},
@@ -60017,6 +61008,7 @@ characters.data={
direction="on",
linebreak="ai",
mathclass="relation",
+ mathextensible="h",
mathname="Leftrightarrow",
unicodeslot=0x21D4,
},
@@ -60026,6 +61018,7 @@ characters.data={
direction="on",
linebreak="al",
mathclass="relation",
+ mathextensible="v",
mathname="Updownarrow",
unicodeslot=0x21D5,
},
@@ -60071,6 +61064,7 @@ characters.data={
direction="on",
linebreak="al",
mathclass="relation",
+ mathextensible="l",
mathname="Lleftarrow",
unicodeslot=0x21DA,
},
@@ -60080,6 +61074,7 @@ characters.data={
direction="on",
linebreak="al",
mathclass="relation",
+ mathextensible="r",
mathname="Rrightarrow",
unicodeslot=0x21DB,
},
@@ -60089,6 +61084,7 @@ characters.data={
direction="on",
linebreak="al",
mathclass="relation",
+ mathextensible="l",
mathname="leftsquigarrow",
unicodeslot=0x21DC,
},
@@ -60116,6 +61112,7 @@ characters.data={
direction="on",
linebreak="al",
mathclass="relation",
+ mathextensible="u",
mathname="nHuparrow",
unicodeslot=0x21DE,
},
@@ -60126,6 +61123,7 @@ characters.data={
direction="on",
linebreak="al",
mathclass="relation",
+ mathextensible="d",
mathname="nHdownarrow",
unicodeslot=0x21DF,
},
@@ -60136,6 +61134,7 @@ characters.data={
direction="on",
linebreak="al",
mathclass="relation",
+ mathextensible="l",
mathname="leftdasharrow",
unicodeslot=0x21E0,
},
@@ -60146,6 +61145,7 @@ characters.data={
direction="on",
linebreak="al",
mathclass="relation",
+ mathextensible="u",
mathname="updasharrow",
unicodeslot=0x21E1,
},
@@ -60174,6 +61174,7 @@ characters.data={
direction="on",
linebreak="al",
mathclass="relation",
+ mathextensible="d",
mathname="downdasharrow",
unicodeslot=0x21E3,
},
@@ -60184,6 +61185,7 @@ characters.data={
direction="on",
linebreak="al",
mathclass="relation",
+ mathextensible="l",
mathname="barleftarrow",
unicodeslot=0x21E4,
},
@@ -60194,6 +61196,7 @@ characters.data={
direction="on",
linebreak="al",
mathclass="relation",
+ mathextensible="r",
mathname="rightarrowbar",
unicodeslot=0x21E5,
},
@@ -60203,7 +61206,8 @@ characters.data={
description="LEFTWARDS WHITE ARROW",
direction="on",
linebreak="al",
- mathclass="ord",
+ mathclass="ordinary",
+ mathextensible="l",
mathname="leftwhitearrow",
unicodeslot=0x21E6,
},
@@ -60214,7 +61218,8 @@ characters.data={
description="UPWARDS WHITE ARROW",
direction="on",
linebreak="al",
- mathclass="ord",
+ mathclass="ordinary",
+ mathextensible="u",
mathname="upwhitearrow",
unicodeslot=0x21E7,
},
@@ -60224,7 +61229,8 @@ characters.data={
description="RIGHTWARDS WHITE ARROW",
direction="on",
linebreak="al",
- mathclass="ord",
+ mathclass="ordinary",
+ mathextensible="r",
mathname="rightwhitearrow",
unicodeslot=0x21E8,
},
@@ -60234,7 +61240,8 @@ characters.data={
description="DOWNWARDS WHITE ARROW",
direction="on",
linebreak="al",
- mathclass="ord",
+ mathclass="ordinary",
+ mathextensible="d",
mathname="downwhitearrow",
unicodeslot=0x21E9,
},
@@ -60251,7 +61258,8 @@ characters.data={
description="UPWARDS WHITE ARROW ON PEDESTAL",
direction="on",
linebreak="al",
- mathclass="ord",
+ mathclass="ordinary",
+ mathextensible="u",
mathname="whitearrowupfrombar",
unicodeslot=0x21EB,
},
@@ -60317,6 +61325,7 @@ characters.data={
direction="on",
linebreak="al",
mathclass="relation",
+ mathextensible="r",
mathname="circleonrightarrow",
unicodeslot=0x21F4,
},
@@ -60326,6 +61335,7 @@ characters.data={
direction="on",
linebreak="al",
mathclass="relation",
+ mathextensible="m",
mathname="downuparrows",
unicodeslot=0x21F5,
},
@@ -60335,6 +61345,7 @@ characters.data={
direction="on",
linebreak="al",
mathclass="relation",
+ mathextensible="r",
mathname="rightthreearrows",
unicodeslot=0x21F6,
},
@@ -60344,6 +61355,7 @@ characters.data={
direction="on",
linebreak="al",
mathclass="relation",
+ mathextensible="l",
mathname="nvleftarrow",
unicodeslot=0x21F7,
},
@@ -60353,6 +61365,7 @@ characters.data={
direction="on",
linebreak="al",
mathclass="relation",
+ mathextensible="r",
mathname="nvrightarrow",
unicodeslot=0x21F8,
},
@@ -60362,6 +61375,7 @@ characters.data={
direction="on",
linebreak="al",
mathclass="relation",
+ mathextensible="h",
mathname="nvleftrightarrow",
unicodeslot=0x21F9,
},
@@ -60371,6 +61385,7 @@ characters.data={
direction="on",
linebreak="al",
mathclass="relation",
+ mathextensible="l",
mathname="nVleftarrow",
unicodeslot=0x21FA,
},
@@ -60380,6 +61395,7 @@ characters.data={
direction="on",
linebreak="al",
mathclass="relation",
+ mathextensible="r",
mathname="nVrightarrow",
unicodeslot=0x21FB,
},
@@ -60389,6 +61405,7 @@ characters.data={
direction="on",
linebreak="al",
mathclass="relation",
+ mathextensible="h",
mathname="nVleftrightarrow",
unicodeslot=0x21FC,
},
@@ -60398,6 +61415,7 @@ characters.data={
direction="on",
linebreak="al",
mathclass="relation",
+ mathextensible="l",
mathname="leftarrowtriangle",
unicodeslot=0x21FD,
},
@@ -60407,6 +61425,7 @@ characters.data={
direction="on",
linebreak="al",
mathclass="relation",
+ mathextensible="r",
mathname="rightarrowtriangle",
unicodeslot=0x21FE,
},
@@ -60416,6 +61435,7 @@ characters.data={
direction="on",
linebreak="al",
mathclass="relation",
+ mathextensible="h",
mathname="leftrightarrowtriangle",
unicodeslot=0x21FF,
},
@@ -60426,7 +61446,7 @@ characters.data={
description="FOR ALL",
direction="on",
linebreak="ai",
- mathclass="ord",
+ mathclass="ordinary",
mathname="forall",
unicodeslot=0x2200,
},
@@ -60435,7 +61455,7 @@ characters.data={
description="COMPLEMENT",
direction="on",
linebreak="al",
- mathclass="ord",
+ mathclass="ordinary",
mathname="complement",
unicodeslot=0x2201,
},
@@ -60457,7 +61477,7 @@ characters.data={
description="THERE EXISTS",
direction="on",
linebreak="ai",
- mathclass="ord",
+ mathclass="ordinary",
mathname="exists",
unicodeslot=0x2203,
},
@@ -60466,7 +61486,7 @@ characters.data={
description="THERE DOES NOT EXIST",
direction="on",
linebreak="al",
- mathclass="ord",
+ mathclass="ordinary",
mathname="nexists",
specials={ "char", 0x2203, 0x0338 },
unicodeslot=0x2204,
@@ -60627,11 +61647,9 @@ characters.data={
[0x2212]={
adobename="minus",
category="sm",
---comment="the mathclass is needed for mathsymbol remapping",
description="MINUS SIGN",
direction="es",
linebreak="pr",
---mathclass="binary",
mathspec={
{
class="binary",
@@ -60667,7 +61685,7 @@ characters.data={
adobename="divisionslash",
category="sm",
cjkwd="a",
- comment="mathclass=ord,mathname=diagup",
+ comment="mathclass=ordinary,mathname=diagup",
description="DIVISION SLASH",
direction="on",
linebreak="ai",
@@ -60676,7 +61694,7 @@ characters.data={
},
[0x2216]={
category="sm",
- comment="mathclass=ord,mathname=diagdown",
+ comment="mathclass=ordinary,mathname=diagdown",
description="SET MINUS",
direction="on",
linebreak="al",
@@ -60771,7 +61789,7 @@ characters.data={
description="RIGHT ANGLE",
direction="on",
linebreak="ai",
- mathclass="ord",
+ mathclass="ordinary",
mathname="rightangle",
unicodeslot=0x221F,
},
@@ -60782,7 +61800,7 @@ characters.data={
description="ANGLE",
direction="on",
linebreak="ai",
- mathclass="ord",
+ mathclass="ordinary",
mathname="angle",
unicodeslot=0x2220,
},
@@ -60791,7 +61809,7 @@ characters.data={
description="MEASURED ANGLE",
direction="on",
linebreak="al",
- mathclass="ord",
+ mathclass="ordinary",
mathname="measuredangle",
unicodeslot=0x2221,
},
@@ -60800,7 +61818,7 @@ characters.data={
description="SPHERICAL ANGLE",
direction="on",
linebreak="al",
- mathclass="ord",
+ mathclass="ordinary",
mathname="sphericalangle",
unicodeslot=0x2222,
},
@@ -60887,11 +61905,11 @@ characters.data={
linebreak="ai",
mathspec={
{
- class="bin",
+ class="binary",
name="vee",
},
{
- class="bin",
+ class="binary",
name="lor",
},
},
@@ -60932,15 +61950,16 @@ characters.data={
description="INTEGRAL",
direction="on",
linebreak="ai",
+ mathclass="limop",
mathspec={
{
- class="nothing",
- name="intop",
- },
- {
class="limop",
name="int",
},
+ {
+ class="nothing",
+ name="intop",
+ },
},
unicodeslot=0x222B,
},
@@ -60951,15 +61970,16 @@ characters.data={
description="DOUBLE INTEGRAL",
direction="on",
linebreak="ai",
+ mathclass="limop",
mathspec={
{
- class="nothing",
- name="iintop",
- },
- {
class="limop",
name="iint",
},
+ {
+ class="nothing",
+ name="iintop",
+ },
},
specials={ "compat", 0x222B, 0x222B },
unicodeslot=0x222C,
@@ -60969,15 +61989,16 @@ characters.data={
description="TRIPLE INTEGRAL",
direction="on",
linebreak="al",
+ mathclass="limop",
mathspec={
{
- class="nothing",
- name="iiintop",
- },
- {
class="limop",
name="iiint",
},
+ {
+ class="nothing",
+ name="iiintop",
+ },
},
specials={ "compat", 0x222B, 0x222B, 0x222B },
unicodeslot=0x222D,
@@ -61507,6 +62528,7 @@ characters.data={
direction="on",
linebreak="ai",
mathclass="relation",
+ mathextensible="h",
mathname="equiv",
unicodeslot=0x2261,
},
@@ -61517,6 +62539,8 @@ characters.data={
direction="on",
linebreak="al",
mathclass="relation",
+ mathextensible="h",
+ mathfiller="triplerelfill",
mathname="nequiv",
specials={ "char", 0x2261, 0x0338 },
unicodeslot=0x2262,
@@ -61526,6 +62550,8 @@ characters.data={
description="STRICTLY EQUIVALENT TO",
direction="on",
linebreak="al",
+ mathclass="relation",
+ mathextensible="h",
unicodeslot=0x2263,
},
[0x2264]={
@@ -62402,7 +63428,7 @@ characters.data={
description="NORMAL SUBGROUP OF",
direction="on",
linebreak="al",
- mathclass="bin",
+ mathclass="binary",
mirror=0x22B3,
unicodeslot=0x22B2,
},
@@ -62412,7 +63438,7 @@ characters.data={
description="CONTAINS AS NORMAL SUBGROUP",
direction="on",
linebreak="al",
- mathclass="bin",
+ mathclass="binary",
mirror=0x22B2,
unicodeslot=0x22B3,
},
@@ -63174,7 +64200,7 @@ characters.data={
description="DIAMETER SIGN",
direction="on",
linebreak="al",
- mathclass="ord",
+ mathclass="ordinary",
mathname="varnothing",
unicodeslot=0x2300,
},
@@ -63235,7 +64261,7 @@ characters.data={
description="LEFT CEILING",
direction="on",
linebreak="al",
- mathspec = {
+ mathspec={
{
class="open",
name="lceil",
@@ -63243,7 +64269,7 @@ characters.data={
{
class="open",
name="lceiling",
- }
+ },
},
mirror=0x2309,
unicodeslot=0x2308,
@@ -63253,7 +64279,7 @@ characters.data={
description="RIGHT CEILING",
direction="on",
linebreak="al",
- mathspec = {
+ mathspec={
{
class="close",
name="rceil",
@@ -63261,7 +64287,7 @@ characters.data={
{
class="close",
name="rceiling",
- }
+ },
},
mirror=0x2308,
unicodeslot=0x2309,
@@ -63392,15 +64418,23 @@ characters.data={
category="so",
description="WATCH",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x231A,
+ variants={
+ [0xFE0E]="text style",
+ [0xFE0F]="emoji style",
+ },
},
[0x231B]={
category="so",
description="HOURGLASS",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x231B,
+ variants={
+ [0xFE0E]="text style",
+ [0xFE0F]="emoji style",
+ },
},
[0x231C]={
category="so",
@@ -64821,7 +65855,7 @@ characters.data={
description="TOP TORTOISE SHELL BRACKET",
direction="on",
linebreak="al",
- mathclass="accent",
+ mathclass="topaccent",
unicodeslot=0x23E0,
},
[0x23E1]={
@@ -64829,7 +65863,7 @@ characters.data={
description="BOTTOM TORTOISE SHELL BRACKET",
direction="on",
linebreak="al",
- mathclass="accent",
+ mathclass="botaccent",
unicodeslot=0x23E1,
},
[0x23E2]={
@@ -64934,28 +65968,28 @@ characters.data={
category="so",
description="ALARM CLOCK",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x23F0,
},
[0x23F1]={
category="so",
description="STOPWATCH",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x23F1,
},
[0x23F2]={
category="so",
description="TIMER CLOCK",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x23F2,
},
[0x23F3]={
category="so",
description="HOURGLASS WITH FLOWING SAND",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x23F3,
},
[0x2400]={
@@ -66311,6 +67345,10 @@ characters.data={
linebreak="ai",
specials={ "circle", 0x004D },
unicodeslot=0x24C2,
+ variants={
+ [0xFE0E]="text style",
+ [0xFE0F]="emoji style",
+ },
},
[0x24C3]={
adobename="Ncircle",
@@ -66364,7 +67402,7 @@ characters.data={
direction="l",
lccode=0x24E1,
linebreak="ai",
- mathclass="ord",
+ mathclass="ordinary",
mathname="circledR",
specials={ "circle", 0x0052 },
unicodeslot=0x24C7,
@@ -66377,7 +67415,7 @@ characters.data={
direction="l",
lccode=0x24E2,
linebreak="ai",
- mathclass="ord",
+ mathclass="ordinary",
mathname="circledS",
specials={ "circle", 0x0053 },
unicodeslot=0x24C8,
@@ -68228,7 +69266,7 @@ characters.data={
description="BLACK SQUARE",
direction="on",
linebreak="ai",
- mathclass="ord",
+ mathclass="ordinary",
mathname="blacksquare",
unicodeslot=0x25A0,
},
@@ -68241,11 +69279,11 @@ characters.data={
linebreak="ai",
mathspec={
{
- class="ord",
+ class="ordinary",
name="square",
},
{
- class="ord",
+ class="ordinary",
name="Box",
},
},
@@ -68256,7 +69294,7 @@ characters.data={
description="WHITE SQUARE WITH ROUNDED CORNERS",
direction="on",
linebreak="al",
- mathclass="ord",
+ mathclass="ordinary",
unicodeslot=0x25A2,
},
[0x25A3]={
@@ -68329,6 +69367,10 @@ characters.data={
direction="on",
linebreak="al",
unicodeslot=0x25AA,
+ variants={
+ [0xFE0E]="text style",
+ [0xFE0F]="emoji style",
+ },
},
[0x25AB]={
adobename="whitesmallsquare",
@@ -68337,6 +69379,10 @@ characters.data={
direction="on",
linebreak="al",
unicodeslot=0x25AB,
+ variants={
+ [0xFE0E]="text style",
+ [0xFE0F]="emoji style",
+ },
},
[0x25AC]={
adobename="filledrect",
@@ -68388,6 +69434,8 @@ characters.data={
description="BLACK UP-POINTING TRIANGLE",
direction="on",
linebreak="ai",
+ mathclass="binary",
+ mathname="blacktriangle",
unicodeslot=0x25B2,
},
[0x25B3]={
@@ -68399,7 +69447,7 @@ characters.data={
linebreak="ai",
mathspec={
{
- class="ord",
+ class="ordinary",
name="triangle",
},
{
@@ -68432,9 +69480,13 @@ characters.data={
description="BLACK RIGHT-POINTING TRIANGLE",
direction="on",
linebreak="ai",
- mathclass="bin",
+ mathclass="binary",
mathname="blacktriangleright",
unicodeslot=0x25B6,
+ variants={
+ [0xFE0E]="text style",
+ [0xFE0F]="emoji style",
+ },
},
[0x25B7]={
adobename="whiterightpointingtriangle",
@@ -68443,7 +69495,7 @@ characters.data={
description="WHITE RIGHT-POINTING TRIANGLE",
direction="on",
linebreak="ai",
- mathclass="bin",
+ mathclass="binary",
mathname="triangleright",
unicodeslot=0x25B7,
},
@@ -68452,8 +69504,6 @@ characters.data={
description="BLACK RIGHT-POINTING SMALL TRIANGLE",
direction="on",
linebreak="al",
- mathclass="bin",
- mathname="blacktriangleleft",
unicodeslot=0x25B8,
},
[0x25B9]={
@@ -68486,6 +69536,8 @@ characters.data={
description="BLACK DOWN-POINTING TRIANGLE",
direction="on",
linebreak="ai",
+ mathclass="binary",
+ mathname="blacktriangledown",
unicodeslot=0x25BC,
},
[0x25BD]={
@@ -68495,8 +69547,16 @@ characters.data={
description="WHITE DOWN-POINTING TRIANGLE",
direction="on",
linebreak="ai",
- mathclass="binary",
- mathname="bigtriangledown",
+ mathspec={
+ {
+ class="binary",
+ name="triangledown",
+ },
+ {
+ class="binary",
+ name="bigtriangledown",
+ },
+ },
unicodeslot=0x25BD,
},
[0x25BE]={
@@ -68521,7 +69581,13 @@ characters.data={
description="BLACK LEFT-POINTING TRIANGLE",
direction="on",
linebreak="ai",
+ mathclass="binary",
+ mathname="blacktriangleleft",
unicodeslot=0x25C0,
+ variants={
+ [0xFE0E]="text style",
+ [0xFE0F]="emoji style",
+ },
},
[0x25C1]={
adobename="whiteleftpointingtriangle",
@@ -68530,7 +69596,7 @@ characters.data={
description="WHITE LEFT-POINTING TRIANGLE",
direction="on",
linebreak="ai",
- mathclass="bin",
+ mathclass="binary",
mathname="triangleleft",
unicodeslot=0x25C1,
},
@@ -68605,7 +69671,7 @@ characters.data={
description="LOZENGE",
direction="on",
linebreak="al",
- mathclass="ord",
+ mathclass="ordinary",
mathname="lozenge",
unicodeslot=0x25CA,
},
@@ -68977,6 +70043,10 @@ characters.data={
direction="on",
linebreak="al",
unicodeslot=0x25FB,
+ variants={
+ [0xFE0E]="text style",
+ [0xFE0F]="emoji style",
+ },
},
[0x25FC]={
category="sm",
@@ -68984,6 +70054,10 @@ characters.data={
direction="on",
linebreak="al",
unicodeslot=0x25FC,
+ variants={
+ [0xFE0E]="text style",
+ [0xFE0F]="emoji style",
+ },
},
[0x25FD]={
category="sm",
@@ -68991,6 +70065,10 @@ characters.data={
direction="on",
linebreak="al",
unicodeslot=0x25FD,
+ variants={
+ [0xFE0E]="text style",
+ [0xFE0F]="emoji style",
+ },
},
[0x25FE]={
category="sm",
@@ -68998,6 +70076,10 @@ characters.data={
direction="on",
linebreak="al",
unicodeslot=0x25FE,
+ variants={
+ [0xFE0E]="text style",
+ [0xFE0F]="emoji style",
+ },
},
[0x25FF]={
category="sm",
@@ -69010,28 +70092,36 @@ characters.data={
category="so",
description="BLACK SUN WITH RAYS",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x2600,
+ variants={
+ [0xFE0E]="text style",
+ [0xFE0F]="emoji style",
+ },
},
[0x2601]={
category="so",
description="CLOUD",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x2601,
+ variants={
+ [0xFE0E]="text style",
+ [0xFE0F]="emoji style",
+ },
},
[0x2602]={
category="so",
description="UMBRELLA",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x2602,
},
[0x2603]={
category="so",
description="SNOWMAN",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x2603,
},
[0x2604]={
@@ -69048,7 +70138,7 @@ characters.data={
description="BLACK STAR",
direction="on",
linebreak="ai",
- mathclass="ord",
+ mathclass="ordinary",
mathname="bigstar",
unicodeslot=0x2605,
},
@@ -69119,6 +70209,10 @@ characters.data={
direction="on",
linebreak="ai",
unicodeslot=0x260E,
+ variants={
+ [0xFE0E]="text style",
+ [0xFE0F]="emoji style",
+ },
},
[0x260F]={
adobename="whitetelephone",
@@ -69142,6 +70236,10 @@ characters.data={
direction="on",
linebreak="al",
unicodeslot=0x2611,
+ variants={
+ [0xFE0E]="text style",
+ [0xFE0F]="emoji style",
+ },
},
[0x2612]={
category="so",
@@ -69162,16 +70260,24 @@ characters.data={
cjkwd="a",
description="UMBRELLA WITH RAIN DROPS",
direction="on",
- linebreak="ai",
+ linebreak="id",
unicodeslot=0x2614,
+ variants={
+ [0xFE0E]="text style",
+ [0xFE0F]="emoji style",
+ },
},
[0x2615]={
category="so",
cjkwd="a",
description="HOT BEVERAGE",
direction="on",
- linebreak="ai",
+ linebreak="id",
unicodeslot=0x2615,
+ variants={
+ [0xFE0E]="text style",
+ [0xFE0F]="emoji style",
+ },
},
[0x2616]={
category="so",
@@ -69191,7 +70297,7 @@ characters.data={
category="so",
description="SHAMROCK",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x2618,
},
[0x2619]={
@@ -69205,14 +70311,14 @@ characters.data={
category="so",
description="BLACK LEFT POINTING INDEX",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x261A,
},
[0x261B]={
category="so",
description="BLACK RIGHT POINTING INDEX",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x261B,
},
[0x261C]={
@@ -69221,7 +70327,7 @@ characters.data={
cjkwd="a",
description="WHITE LEFT POINTING INDEX",
direction="on",
- linebreak="ai",
+ linebreak="id",
unicodeslot=0x261C,
},
[0x261D]={
@@ -69229,8 +70335,12 @@ characters.data={
category="so",
description="WHITE UP POINTING INDEX",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x261D,
+ variants={
+ [0xFE0E]="text style",
+ [0xFE0F]="emoji style",
+ },
},
[0x261E]={
adobename="pointingindexrightwhite",
@@ -69238,7 +70348,7 @@ characters.data={
cjkwd="a",
description="WHITE RIGHT POINTING INDEX",
direction="on",
- linebreak="ai",
+ linebreak="id",
unicodeslot=0x261E,
},
[0x261F]={
@@ -69246,7 +70356,7 @@ characters.data={
category="so",
description="WHITE DOWN POINTING INDEX",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x261F,
},
[0x2620]={
@@ -69429,7 +70539,7 @@ characters.data={
category="so",
description="WHITE FROWNING FACE",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x2639,
},
[0x263A]={
@@ -69437,15 +70547,19 @@ characters.data={
category="so",
description="WHITE SMILING FACE",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x263A,
+ variants={
+ [0xFE0E]="text style",
+ [0xFE0F]="emoji style",
+ },
},
[0x263B]={
adobename="invsmileface",
category="so",
description="BLACK SMILING FACE",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x263B,
},
[0x263C]={
@@ -69544,6 +70658,10 @@ characters.data={
direction="on",
linebreak="al",
unicodeslot=0x2648,
+ variants={
+ [0xFE0E]="text style",
+ [0xFE0F]="emoji style",
+ },
},
[0x2649]={
category="so",
@@ -69551,6 +70669,10 @@ characters.data={
direction="on",
linebreak="al",
unicodeslot=0x2649,
+ variants={
+ [0xFE0E]="text style",
+ [0xFE0F]="emoji style",
+ },
},
[0x264A]={
category="so",
@@ -69558,6 +70680,10 @@ characters.data={
direction="on",
linebreak="al",
unicodeslot=0x264A,
+ variants={
+ [0xFE0E]="text style",
+ [0xFE0F]="emoji style",
+ },
},
[0x264B]={
category="so",
@@ -69565,6 +70691,10 @@ characters.data={
direction="on",
linebreak="al",
unicodeslot=0x264B,
+ variants={
+ [0xFE0E]="text style",
+ [0xFE0F]="emoji style",
+ },
},
[0x264C]={
category="so",
@@ -69572,6 +70702,10 @@ characters.data={
direction="on",
linebreak="al",
unicodeslot=0x264C,
+ variants={
+ [0xFE0E]="text style",
+ [0xFE0F]="emoji style",
+ },
},
[0x264D]={
category="so",
@@ -69579,6 +70713,10 @@ characters.data={
direction="on",
linebreak="al",
unicodeslot=0x264D,
+ variants={
+ [0xFE0E]="text style",
+ [0xFE0F]="emoji style",
+ },
},
[0x264E]={
category="so",
@@ -69586,6 +70724,10 @@ characters.data={
direction="on",
linebreak="al",
unicodeslot=0x264E,
+ variants={
+ [0xFE0E]="text style",
+ [0xFE0F]="emoji style",
+ },
},
[0x264F]={
category="so",
@@ -69593,6 +70735,10 @@ characters.data={
direction="on",
linebreak="al",
unicodeslot=0x264F,
+ variants={
+ [0xFE0E]="text style",
+ [0xFE0F]="emoji style",
+ },
},
[0x2650]={
category="so",
@@ -69600,6 +70746,10 @@ characters.data={
direction="on",
linebreak="al",
unicodeslot=0x2650,
+ variants={
+ [0xFE0E]="text style",
+ [0xFE0F]="emoji style",
+ },
},
[0x2651]={
category="so",
@@ -69607,6 +70757,10 @@ characters.data={
direction="on",
linebreak="al",
unicodeslot=0x2651,
+ variants={
+ [0xFE0E]="text style",
+ [0xFE0F]="emoji style",
+ },
},
[0x2652]={
category="so",
@@ -69614,6 +70768,10 @@ characters.data={
direction="on",
linebreak="al",
unicodeslot=0x2652,
+ variants={
+ [0xFE0E]="text style",
+ [0xFE0F]="emoji style",
+ },
},
[0x2653]={
category="so",
@@ -69621,6 +70779,10 @@ characters.data={
direction="on",
linebreak="al",
unicodeslot=0x2653,
+ variants={
+ [0xFE0E]="text style",
+ [0xFE0F]="emoji style",
+ },
},
[0x2654]={
category="so",
@@ -69716,6 +70878,10 @@ characters.data={
mathclass="default",
mathname="spadesuit",
unicodeslot=0x2660,
+ variants={
+ [0xFE0E]="text style",
+ [0xFE0F]="emoji style",
+ },
},
[0x2661]={
adobename="heartsuitwhite",
@@ -69748,6 +70914,10 @@ characters.data={
mathclass="default",
mathname="clubsuit",
unicodeslot=0x2663,
+ variants={
+ [0xFE0E]="text style",
+ [0xFE0F]="emoji style",
+ },
},
[0x2664]={
adobename="spadesuitwhite",
@@ -69766,6 +70936,10 @@ characters.data={
direction="on",
linebreak="ai",
unicodeslot=0x2665,
+ variants={
+ [0xFE0E]="text style",
+ [0xFE0F]="emoji style",
+ },
},
[0x2666]={
adobename="diamond",
@@ -69773,9 +70947,13 @@ characters.data={
description="BLACK DIAMOND SUIT",
direction="on",
linebreak="al",
- mathclass="ord",
+ mathclass="ordinary",
mathname="blacklozenge",
unicodeslot=0x2666,
+ variants={
+ [0xFE0E]="text style",
+ [0xFE0F]="emoji style",
+ },
},
[0x2667]={
adobename="clubsuitwhite",
@@ -69792,8 +70970,12 @@ characters.data={
cjkwd="a",
description="HOT SPRINGS",
direction="on",
- linebreak="ai",
+ linebreak="id",
unicodeslot=0x2668,
+ variants={
+ [0xFE0E]="text style",
+ [0xFE0F]="emoji style",
+ },
},
[0x2669]={
adobename="quarternote",
@@ -69944,6 +71126,10 @@ characters.data={
direction="on",
linebreak="al",
unicodeslot=0x267B,
+ variants={
+ [0xFE0E]="text style",
+ [0xFE0F]="emoji style",
+ },
},
[0x267C]={
category="so",
@@ -69970,8 +71156,12 @@ characters.data={
category="so",
description="WHEELCHAIR SYMBOL",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x267F,
+ variants={
+ [0xFE0E]="text style",
+ [0xFE0F]="emoji style",
+ },
},
[0x2680]={
category="so",
@@ -70112,6 +71302,10 @@ characters.data={
direction="on",
linebreak="al",
unicodeslot=0x2693,
+ variants={
+ [0xFE0E]="text style",
+ [0xFE0F]="emoji style",
+ },
},
[0x2694]={
category="so",
@@ -70205,6 +71399,10 @@ characters.data={
direction="on",
linebreak="al",
unicodeslot=0x26A0,
+ variants={
+ [0xFE0E]="text style",
+ [0xFE0F]="emoji style",
+ },
},
[0x26A1]={
category="so",
@@ -70212,6 +71410,10 @@ characters.data={
direction="on",
linebreak="al",
unicodeslot=0x26A1,
+ variants={
+ [0xFE0E]="text style",
+ [0xFE0F]="emoji style",
+ },
},
[0x26A2]={
category="so",
@@ -70275,6 +71477,10 @@ characters.data={
direction="on",
linebreak="al",
unicodeslot=0x26AA,
+ variants={
+ [0xFE0E]="text style",
+ [0xFE0F]="emoji style",
+ },
},
[0x26AB]={
category="so",
@@ -70282,6 +71488,10 @@ characters.data={
direction="on",
linebreak="al",
unicodeslot=0x26AB,
+ variants={
+ [0xFE0E]="text style",
+ [0xFE0F]="emoji style",
+ },
},
[0x26AC]={
category="so",
@@ -70406,51 +71616,59 @@ characters.data={
category="so",
description="SOCCER BALL",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x26BD,
+ variants={
+ [0xFE0E]="text style",
+ [0xFE0F]="emoji style",
+ },
},
[0x26BE]={
category="so",
cjkwd="a",
description="BASEBALL",
direction="on",
- linebreak="ai",
+ linebreak="id",
unicodeslot=0x26BE,
+ variants={
+ [0xFE0E]="text style",
+ [0xFE0F]="emoji style",
+ },
},
[0x26BF]={
category="so",
cjkwd="a",
description="SQUARED KEY",
direction="on",
- linebreak="ai",
+ linebreak="id",
unicodeslot=0x26BF,
},
[0x26C0]={
category="so",
description="WHITE DRAUGHTS MAN",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x26C0,
},
[0x26C1]={
category="so",
description="WHITE DRAUGHTS KING",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x26C1,
},
[0x26C2]={
category="so",
description="BLACK DRAUGHTS MAN",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x26C2,
},
[0x26C3]={
category="so",
description="BLACK DRAUGHTS KING",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x26C3,
},
[0x26C4]={
@@ -70458,23 +71676,31 @@ characters.data={
cjkwd="a",
description="SNOWMAN WITHOUT SNOW",
direction="on",
- linebreak="ai",
+ linebreak="id",
unicodeslot=0x26C4,
+ variants={
+ [0xFE0E]="text style",
+ [0xFE0F]="emoji style",
+ },
},
[0x26C5]={
category="so",
cjkwd="a",
description="SUN BEHIND CLOUD",
direction="on",
- linebreak="ai",
+ linebreak="id",
unicodeslot=0x26C5,
+ variants={
+ [0xFE0E]="text style",
+ [0xFE0F]="emoji style",
+ },
},
[0x26C6]={
category="so",
cjkwd="a",
description="RAIN",
direction="on",
- linebreak="ai",
+ linebreak="id",
unicodeslot=0x26C6,
},
[0x26C7]={
@@ -70482,7 +71708,7 @@ characters.data={
cjkwd="a",
description="BLACK SNOWMAN",
direction="on",
- linebreak="ai",
+ linebreak="id",
unicodeslot=0x26C7,
},
[0x26C8]={
@@ -70490,7 +71716,7 @@ characters.data={
cjkwd="a",
description="THUNDER CLOUD AND RAIN",
direction="on",
- linebreak="ai",
+ linebreak="id",
unicodeslot=0x26C8,
},
[0x26C9]={
@@ -70530,7 +71756,7 @@ characters.data={
cjkwd="a",
description="DISABLED CAR",
direction="on",
- linebreak="ai",
+ linebreak="id",
unicodeslot=0x26CD,
},
[0x26CE]={
@@ -70545,7 +71771,7 @@ characters.data={
cjkwd="a",
description="PICK",
direction="on",
- linebreak="ai",
+ linebreak="id",
unicodeslot=0x26CF,
},
[0x26D0]={
@@ -70553,7 +71779,7 @@ characters.data={
cjkwd="a",
description="CAR SLIDING",
direction="on",
- linebreak="ai",
+ linebreak="id",
unicodeslot=0x26D0,
},
[0x26D1]={
@@ -70561,7 +71787,7 @@ characters.data={
cjkwd="a",
description="HELMET WITH WHITE CROSS",
direction="on",
- linebreak="ai",
+ linebreak="id",
unicodeslot=0x26D1,
},
[0x26D2]={
@@ -70577,7 +71803,7 @@ characters.data={
cjkwd="a",
description="CHAINS",
direction="on",
- linebreak="ai",
+ linebreak="id",
unicodeslot=0x26D3,
},
[0x26D4]={
@@ -70585,8 +71811,12 @@ characters.data={
cjkwd="a",
description="NO ENTRY",
direction="on",
- linebreak="ai",
+ linebreak="id",
unicodeslot=0x26D4,
+ variants={
+ [0xFE0E]="text style",
+ [0xFE0F]="emoji style",
+ },
},
[0x26D5]={
category="so",
@@ -70617,7 +71847,7 @@ characters.data={
cjkwd="a",
description="BLACK LEFT LANE MERGE",
direction="on",
- linebreak="ai",
+ linebreak="id",
unicodeslot=0x26D8,
},
[0x26D9]={
@@ -70625,7 +71855,7 @@ characters.data={
cjkwd="a",
description="WHITE LEFT LANE MERGE",
direction="on",
- linebreak="ai",
+ linebreak="id",
unicodeslot=0x26D9,
},
[0x26DA]={
@@ -70649,7 +71879,7 @@ characters.data={
cjkwd="a",
description="LEFT CLOSED ENTRY",
direction="on",
- linebreak="ai",
+ linebreak="id",
unicodeslot=0x26DC,
},
[0x26DD]={
@@ -70673,7 +71903,7 @@ characters.data={
cjkwd="a",
description="BLACK TRUCK",
direction="on",
- linebreak="ai",
+ linebreak="id",
unicodeslot=0x26DF,
},
[0x26E0]={
@@ -70681,7 +71911,7 @@ characters.data={
cjkwd="a",
description="RESTRICTED LEFT ENTRY-1",
direction="on",
- linebreak="ai",
+ linebreak="id",
unicodeslot=0x26E0,
},
[0x26E1]={
@@ -70689,7 +71919,7 @@ characters.data={
cjkwd="a",
description="RESTRICTED LEFT ENTRY-2",
direction="on",
- linebreak="ai",
+ linebreak="id",
unicodeslot=0x26E1,
},
[0x26E2]={
@@ -70756,8 +71986,12 @@ characters.data={
cjkwd="a",
description="CHURCH",
direction="on",
- linebreak="ai",
+ linebreak="id",
unicodeslot=0x26EA,
+ variants={
+ [0xFE0E]="text style",
+ [0xFE0F]="emoji style",
+ },
},
[0x26EB]={
category="so",
@@ -70812,7 +72046,7 @@ characters.data={
cjkwd="a",
description="UMBRELLA ON GROUND",
direction="on",
- linebreak="ai",
+ linebreak="id",
unicodeslot=0x26F1,
},
[0x26F2]={
@@ -70820,23 +72054,31 @@ characters.data={
cjkwd="a",
description="FOUNTAIN",
direction="on",
- linebreak="ai",
+ linebreak="id",
unicodeslot=0x26F2,
+ variants={
+ [0xFE0E]="text style",
+ [0xFE0F]="emoji style",
+ },
},
[0x26F3]={
category="so",
cjkwd="a",
description="FLAG IN HOLE",
direction="on",
- linebreak="ai",
+ linebreak="id",
unicodeslot=0x26F3,
+ variants={
+ [0xFE0E]="text style",
+ [0xFE0F]="emoji style",
+ },
},
[0x26F4]={
category="so",
cjkwd="a",
description="FERRY",
direction="on",
- linebreak="ai",
+ linebreak="id",
unicodeslot=0x26F4,
},
[0x26F5]={
@@ -70844,8 +72086,12 @@ characters.data={
cjkwd="a",
description="SAILBOAT",
direction="on",
- linebreak="ai",
+ linebreak="id",
unicodeslot=0x26F5,
+ variants={
+ [0xFE0E]="text style",
+ [0xFE0F]="emoji style",
+ },
},
[0x26F6]={
category="so",
@@ -70860,7 +72106,7 @@ characters.data={
cjkwd="a",
description="SKIER",
direction="on",
- linebreak="ai",
+ linebreak="id",
unicodeslot=0x26F7,
},
[0x26F8]={
@@ -70868,7 +72114,7 @@ characters.data={
cjkwd="a",
description="ICE SKATE",
direction="on",
- linebreak="ai",
+ linebreak="id",
unicodeslot=0x26F8,
},
[0x26F9]={
@@ -70876,7 +72122,7 @@ characters.data={
cjkwd="a",
description="PERSON WITH BALL",
direction="on",
- linebreak="ai",
+ linebreak="id",
unicodeslot=0x26F9,
},
[0x26FA]={
@@ -70884,8 +72130,12 @@ characters.data={
cjkwd="a",
description="TENT",
direction="on",
- linebreak="ai",
+ linebreak="id",
unicodeslot=0x26FA,
+ variants={
+ [0xFE0E]="text style",
+ [0xFE0F]="emoji style",
+ },
},
[0x26FB]={
category="so",
@@ -70908,15 +72158,19 @@ characters.data={
cjkwd="a",
description="FUEL PUMP",
direction="on",
- linebreak="ai",
+ linebreak="id",
unicodeslot=0x26FD,
+ variants={
+ [0xFE0E]="text style",
+ [0xFE0F]="emoji style",
+ },
},
[0x26FE]={
category="so",
cjkwd="a",
description="CUP ON BLACK SQUARE",
direction="on",
- linebreak="ai",
+ linebreak="id",
unicodeslot=0x26FE,
},
[0x26FF]={
@@ -70924,35 +72178,39 @@ characters.data={
cjkwd="a",
description="WHITE FLAG WITH HORIZONTAL MIDDLE BLACK STRIPE",
direction="on",
- linebreak="ai",
+ linebreak="id",
unicodeslot=0x26FF,
},
[0x2701]={
category="so",
description="UPPER BLADE SCISSORS",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x2701,
},
[0x2702]={
category="so",
description="BLACK SCISSORS",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x2702,
+ variants={
+ [0xFE0E]="text style",
+ [0xFE0F]="emoji style",
+ },
},
[0x2703]={
category="so",
description="LOWER BLADE SCISSORS",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x2703,
},
[0x2704]={
category="so",
description="WHITE SCISSORS",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x2704,
},
[0x2705]={
@@ -70980,42 +72238,54 @@ characters.data={
category="so",
description="AIRPLANE",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x2708,
+ variants={
+ [0xFE0E]="text style",
+ [0xFE0F]="emoji style",
+ },
},
[0x2709]={
category="so",
description="ENVELOPE",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x2709,
+ variants={
+ [0xFE0E]="text style",
+ [0xFE0F]="emoji style",
+ },
},
[0x270A]={
category="so",
description="RAISED FIST",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x270A,
},
[0x270B]={
category="so",
description="RAISED HAND",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x270B,
},
[0x270C]={
category="so",
description="VICTORY HAND",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x270C,
+ variants={
+ [0xFE0E]="text style",
+ [0xFE0F]="emoji style",
+ },
},
[0x270D]={
category="so",
description="WRITING HAND",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x270D,
},
[0x270E]={
@@ -71031,6 +72301,10 @@ characters.data={
direction="on",
linebreak="al",
unicodeslot=0x270F,
+ variants={
+ [0xFE0E]="text style",
+ [0xFE0F]="emoji style",
+ },
},
[0x2710]={
category="so",
@@ -71052,6 +72326,10 @@ characters.data={
direction="on",
linebreak="al",
unicodeslot=0x2712,
+ variants={
+ [0xFE0E]="text style",
+ [0xFE0F]="emoji style",
+ },
},
[0x2713]={
adobename="checkmark",
@@ -71069,6 +72347,10 @@ characters.data={
direction="on",
linebreak="al",
unicodeslot=0x2714,
+ variants={
+ [0xFE0E]="text style",
+ [0xFE0F]="emoji style",
+ },
},
[0x2715]={
category="so",
@@ -71083,6 +72365,10 @@ characters.data={
direction="on",
linebreak="al",
unicodeslot=0x2716,
+ variants={
+ [0xFE0E]="text style",
+ [0xFE0F]="emoji style",
+ },
},
[0x2717]={
category="so",
@@ -71288,6 +72574,10 @@ characters.data={
direction="on",
linebreak="al",
unicodeslot=0x2733,
+ variants={
+ [0xFE0E]="text style",
+ [0xFE0F]="emoji style",
+ },
},
[0x2734]={
category="so",
@@ -71295,6 +72585,10 @@ characters.data={
direction="on",
linebreak="al",
unicodeslot=0x2734,
+ variants={
+ [0xFE0E]="text style",
+ [0xFE0F]="emoji style",
+ },
},
[0x2735]={
category="so",
@@ -71408,6 +72702,10 @@ characters.data={
direction="on",
linebreak="al",
unicodeslot=0x2744,
+ variants={
+ [0xFE0E]="text style",
+ [0xFE0F]="emoji style",
+ },
},
[0x2745]={
category="so",
@@ -71429,6 +72727,10 @@ characters.data={
direction="on",
linebreak="al",
unicodeslot=0x2747,
+ variants={
+ [0xFE0E]="text style",
+ [0xFE0F]="emoji style",
+ },
},
[0x2748]={
category="so",
@@ -71542,6 +72844,10 @@ characters.data={
direction="on",
linebreak="ai",
unicodeslot=0x2757,
+ variants={
+ [0xFE0E]="text style",
+ [0xFE0F]="emoji style",
+ },
},
[0x2758]={
category="so",
@@ -71633,6 +72939,10 @@ characters.data={
direction="on",
linebreak="al",
unicodeslot=0x2764,
+ variants={
+ [0xFE0E]="text style",
+ [0xFE0F]="emoji style",
+ },
},
[0x2765]={
category="so",
@@ -72094,6 +73404,10 @@ characters.data={
direction="on",
linebreak="al",
unicodeslot=0x27A1,
+ variants={
+ [0xFE0E]="text style",
+ [0xFE0F]="emoji style",
+ },
},
[0x27A2]={
category="so",
@@ -72388,6 +73702,15 @@ characters.data={
linebreak="al",
unicodeslot=0x27CA,
},
+ [0x27CB]={
+ category="sm",
+ comment="check math properties",
+ description="MATHEMATICAL RISING DIAGONAL",
+ direction="on",
+ linebreak="al",
+ mirror=0x27CD,
+ unicodeslot=0x27CB,
+ },
[0x27CC]={
category="sm",
description="LONG DIVISION",
@@ -72395,6 +73718,15 @@ characters.data={
linebreak="al",
unicodeslot=0x27CC,
},
+ [0x27CD]={
+ category="sm",
+ comment="check math properties",
+ description="MATHEMATICAL FALLING DIAGONAL",
+ direction="on",
+ linebreak="al",
+ mirror=0x27CB,
+ unicodeslot=0x27CD,
+ },
[0x27CE]={
category="sm",
description="SQUARED LOGICAL AND",
@@ -72680,6 +74012,7 @@ characters.data={
description="UPWARDS QUADRUPLE ARROW",
direction="on",
linebreak="al",
+ mathextensible="u",
unicodeslot=0x27F0,
},
[0x27F1]={
@@ -72687,6 +74020,7 @@ characters.data={
description="DOWNWARDS QUADRUPLE ARROW",
direction="on",
linebreak="al",
+ mathextensible="v",
unicodeslot=0x27F1,
},
[0x27F2]={
@@ -72708,6 +74042,7 @@ characters.data={
description="RIGHT ARROW WITH CIRCLED PLUS",
direction="on",
linebreak="al",
+ mathextensible="r",
unicodeslot=0x27F4,
},
[0x27F5]={
@@ -72716,6 +74051,7 @@ characters.data={
direction="on",
linebreak="al",
mathclass="relation",
+ mathextensible="l",
mathname="longleftarrow",
unicodeslot=0x27F5,
},
@@ -72725,6 +74061,7 @@ characters.data={
direction="on",
linebreak="al",
mathclass="relation",
+ mathextensible="r",
mathname="longrightarrow",
unicodeslot=0x27F6,
},
@@ -72734,6 +74071,8 @@ characters.data={
direction="on",
linebreak="al",
mathclass="relation",
+ mathextensible="h",
+ mathfiller="leftrightarrowfill",
mathname="longleftrightarrow",
unicodeslot=0x27F7,
},
@@ -72743,6 +74082,8 @@ characters.data={
direction="on",
linebreak="al",
mathclass="relation",
+ mathextensible="l",
+ mathfiller="Leftarrowfill",
mathname="Longleftarrow",
unicodeslot=0x27F8,
},
@@ -72752,6 +74093,8 @@ characters.data={
direction="on",
linebreak="al",
mathclass="relation",
+ mathextensible="r",
+ mathfiller="Rightarrowfill",
mathname="Longrightarrow",
unicodeslot=0x27F9,
},
@@ -72761,6 +74104,8 @@ characters.data={
direction="on",
linebreak="al",
mathclass="relation",
+ mathextensible="h",
+ mathfiller="Leftrightarrowfill",
mathname="Longleftrightarrow",
unicodeslot=0x27FA,
},
@@ -72770,6 +74115,7 @@ characters.data={
direction="on",
linebreak="al",
mathclass="relation",
+ mathextensible="l",
mathname="longmapsfrom",
unicodeslot=0x27FB,
},
@@ -72779,6 +74125,7 @@ characters.data={
direction="on",
linebreak="al",
mathclass="relation",
+ mathextensible="r",
mathname="longmapsto",
unicodeslot=0x27FC,
},
@@ -72788,6 +74135,7 @@ characters.data={
direction="on",
linebreak="al",
mathclass="relation",
+ mathextensible="l",
mathname="Longmapsfrom",
unicodeslot=0x27FD,
},
@@ -72797,6 +74145,7 @@ characters.data={
direction="on",
linebreak="al",
mathclass="relation",
+ mathextensible="r",
mathname="Longmapsto",
unicodeslot=0x27FE,
},
@@ -72806,6 +74155,7 @@ characters.data={
direction="on",
linebreak="al",
mathclass="relation",
+ mathextensible="r",
mathname="longrightsquigarrow",
unicodeslot=0x27FF,
},
@@ -74606,6 +75956,7 @@ characters.data={
description="RIGHTWARDS TWO-HEADED ARROW WITH VERTICAL STROKE",
direction="on",
linebreak="al",
+ mathextensible="r",
unicodeslot=0x2900,
},
[0x2901]={
@@ -74613,6 +75964,7 @@ characters.data={
description="RIGHTWARDS TWO-HEADED ARROW WITH DOUBLE VERTICAL STROKE",
direction="on",
linebreak="al",
+ mathextensible="r",
unicodeslot=0x2901,
},
[0x2902]={
@@ -74620,6 +75972,7 @@ characters.data={
description="LEFTWARDS DOUBLE ARROW WITH VERTICAL STROKE",
direction="on",
linebreak="al",
+ mathextensible="l",
unicodeslot=0x2902,
},
[0x2903]={
@@ -74627,6 +75980,7 @@ characters.data={
description="RIGHTWARDS DOUBLE ARROW WITH VERTICAL STROKE",
direction="on",
linebreak="al",
+ mathextensible="r",
unicodeslot=0x2903,
},
[0x2904]={
@@ -74634,6 +75988,7 @@ characters.data={
description="LEFT RIGHT DOUBLE ARROW WITH VERTICAL STROKE",
direction="on",
linebreak="al",
+ mathextensible="h",
unicodeslot=0x2904,
},
[0x2905]={
@@ -74641,6 +75996,7 @@ characters.data={
description="RIGHTWARDS TWO-HEADED ARROW FROM BAR",
direction="on",
linebreak="al",
+ mathextensible="r",
unicodeslot=0x2905,
},
[0x2906]={
@@ -74649,6 +76005,7 @@ characters.data={
direction="on",
linebreak="al",
mathclass="relation",
+ mathextensible="l",
mathname="Mapsfrom",
unicodeslot=0x2906,
},
@@ -74658,6 +76015,7 @@ characters.data={
direction="on",
linebreak="al",
mathclass="relation",
+ mathextensible="r",
mathname="Mapsto",
unicodeslot=0x2907,
},
@@ -74666,6 +76024,7 @@ characters.data={
description="DOWNWARDS ARROW WITH HORIZONTAL STROKE",
direction="on",
linebreak="al",
+ mathextensible="d",
unicodeslot=0x2908,
},
[0x2909]={
@@ -74673,6 +76032,7 @@ characters.data={
description="UPWARDS ARROW WITH HORIZONTAL STROKE",
direction="on",
linebreak="al",
+ mathextensible="u",
unicodeslot=0x2909,
},
[0x290A]={
@@ -74681,6 +76041,7 @@ characters.data={
direction="on",
linebreak="al",
mathclass="relation",
+ mathextensible="u",
mathname="Uuparrow",
unicodeslot=0x290A,
},
@@ -74690,6 +76051,7 @@ characters.data={
direction="on",
linebreak="al",
mathclass="relation",
+ mathextensible="d",
mathname="Ddownarrow",
unicodeslot=0x290B,
},
@@ -74699,6 +76061,7 @@ characters.data={
direction="on",
linebreak="al",
mathclass="relation",
+ mathextensible="l",
mathname="dashedleftarrow",
unicodeslot=0x290C,
},
@@ -74708,6 +76071,7 @@ characters.data={
direction="on",
linebreak="al",
mathclass="relation",
+ mathextensible="r",
mathname="dashedrightarrow",
unicodeslot=0x290D,
},
@@ -74716,6 +76080,7 @@ characters.data={
description="LEFTWARDS TRIPLE DASH ARROW",
direction="on",
linebreak="al",
+ mathextensible="l",
unicodeslot=0x290E,
},
[0x290F]={
@@ -74723,6 +76088,7 @@ characters.data={
description="RIGHTWARDS TRIPLE DASH ARROW",
direction="on",
linebreak="al",
+ mathextensible="r",
unicodeslot=0x290F,
},
[0x2910]={
@@ -74730,6 +76096,7 @@ characters.data={
description="RIGHTWARDS TWO-HEADED TRIPLE DASH ARROW",
direction="on",
linebreak="al",
+ mathextensible="r",
unicodeslot=0x2910,
},
[0x2911]={
@@ -74738,6 +76105,7 @@ characters.data={
direction="on",
linebreak="al",
mathclass="relation",
+ mathextensible="r",
mathname="dottedrightarrow",
unicodeslot=0x2911,
},
@@ -74746,6 +76114,7 @@ characters.data={
description="UPWARDS ARROW TO BAR",
direction="on",
linebreak="al",
+ mathextensible="u",
unicodeslot=0x2912,
},
[0x2913]={
@@ -74753,6 +76122,7 @@ characters.data={
description="DOWNWARDS ARROW TO BAR",
direction="on",
linebreak="al",
+ mathextensible="d",
unicodeslot=0x2913,
},
[0x2914]={
@@ -74760,6 +76130,7 @@ characters.data={
description="RIGHTWARDS ARROW WITH TAIL WITH VERTICAL STROKE",
direction="on",
linebreak="al",
+ mathextensible="r",
unicodeslot=0x2914,
},
[0x2915]={
@@ -74767,6 +76138,7 @@ characters.data={
description="RIGHTWARDS ARROW WITH TAIL WITH DOUBLE VERTICAL STROKE",
direction="on",
linebreak="al",
+ mathextensible="r",
unicodeslot=0x2915,
},
[0x2916]={
@@ -74775,6 +76147,7 @@ characters.data={
direction="on",
linebreak="al",
mathclass="relation",
+ mathextensible="r",
mathname="twoheadrightarrowtail",
unicodeslot=0x2916,
},
@@ -74784,6 +76157,7 @@ characters.data={
direction="on",
linebreak="al",
mathclass="relation",
+ mathextensible="r",
unicodeslot=0x2917,
},
[0x2918]={
@@ -74791,6 +76165,7 @@ characters.data={
description="RIGHTWARDS TWO-HEADED ARROW WITH TAIL WITH DOUBLE VERTICAL STROKE",
direction="on",
linebreak="al",
+ mathextensible="r",
unicodeslot=0x2918,
},
[0x2919]={
@@ -74798,6 +76173,7 @@ characters.data={
description="LEFTWARDS ARROW-TAIL",
direction="on",
linebreak="al",
+ mathextensible="l",
unicodeslot=0x2919,
},
[0x291A]={
@@ -74805,6 +76181,7 @@ characters.data={
description="RIGHTWARDS ARROW-TAIL",
direction="on",
linebreak="al",
+ mathextensible="r",
unicodeslot=0x291A,
},
[0x291B]={
@@ -74812,6 +76189,7 @@ characters.data={
description="LEFTWARDS DOUBLE ARROW-TAIL",
direction="on",
linebreak="al",
+ mathextensible="l",
unicodeslot=0x291B,
},
[0x291C]={
@@ -74819,6 +76197,7 @@ characters.data={
description="RIGHTWARDS DOUBLE ARROW-TAIL",
direction="on",
linebreak="al",
+ mathextensible="r",
unicodeslot=0x291C,
},
[0x291D]={
@@ -74826,6 +76205,7 @@ characters.data={
description="LEFTWARDS ARROW TO BLACK DIAMOND",
direction="on",
linebreak="al",
+ mathextensible="l",
unicodeslot=0x291D,
},
[0x291E]={
@@ -74833,6 +76213,7 @@ characters.data={
description="RIGHTWARDS ARROW TO BLACK DIAMOND",
direction="on",
linebreak="al",
+ mathextensible="r",
unicodeslot=0x291E,
},
[0x291F]={
@@ -74840,6 +76221,7 @@ characters.data={
description="LEFTWARDS ARROW FROM BAR TO BLACK DIAMOND",
direction="on",
linebreak="al",
+ mathextensible="l",
unicodeslot=0x291F,
},
[0x2920]={
@@ -74847,6 +76229,7 @@ characters.data={
description="RIGHTWARDS ARROW FROM BAR TO BLACK DIAMOND",
direction="on",
linebreak="al",
+ mathextensible="r",
unicodeslot=0x2920,
},
[0x2921]={
@@ -74992,6 +76375,7 @@ characters.data={
description="WAVE ARROW POINTING DIRECTLY RIGHT",
direction="on",
linebreak="al",
+ mathextensible="r",
unicodeslot=0x2933,
},
[0x2934]={
@@ -74999,20 +76383,31 @@ characters.data={
description="ARROW POINTING RIGHTWARDS THEN CURVING UPWARDS",
direction="on",
linebreak="al",
+ mathextensible="m",
unicodeslot=0x2934,
+ variants={
+ [0xFE0E]="text style",
+ [0xFE0F]="emoji style",
+ },
},
[0x2935]={
category="sm",
description="ARROW POINTING RIGHTWARDS THEN CURVING DOWNWARDS",
direction="on",
linebreak="al",
+ mathextensible="m",
unicodeslot=0x2935,
+ variants={
+ [0xFE0E]="text style",
+ [0xFE0F]="emoji style",
+ },
},
[0x2936]={
category="sm",
description="ARROW POINTING DOWNWARDS THEN CURVING LEFTWARDS",
direction="on",
linebreak="al",
+ mathextensible="m",
unicodeslot=0x2936,
},
[0x2937]={
@@ -75020,6 +76415,7 @@ characters.data={
description="ARROW POINTING DOWNWARDS THEN CURVING RIGHTWARDS",
direction="on",
linebreak="al",
+ mathextensible="m",
unicodeslot=0x2937,
},
[0x2938]={
@@ -75027,6 +76423,7 @@ characters.data={
description="RIGHT-SIDE ARC CLOCKWISE ARROW",
direction="on",
linebreak="al",
+ mathextensible="r",
unicodeslot=0x2938,
},
[0x2939]={
@@ -75034,6 +76431,7 @@ characters.data={
description="LEFT-SIDE ARC ANTICLOCKWISE ARROW",
direction="on",
linebreak="al",
+ mathextensible="l",
unicodeslot=0x2939,
},
[0x293A]={
@@ -75069,6 +76467,7 @@ characters.data={
description="LOWER RIGHT SEMICIRCULAR CLOCKWISE ARROW",
direction="on",
linebreak="al",
+ mathextensible="r",
unicodeslot=0x293E,
},
[0x293F]={
@@ -75076,6 +76475,7 @@ characters.data={
description="LOWER LEFT SEMICIRCULAR ANTICLOCKWISE ARROW",
direction="on",
linebreak="al",
+ mathextensible="l",
unicodeslot=0x293F,
},
[0x2940]={
@@ -75118,6 +76518,7 @@ characters.data={
description="RIGHTWARDS ARROW WITH PLUS BELOW",
direction="on",
linebreak="al",
+ mathextensible="r",
unicodeslot=0x2945,
},
[0x2946]={
@@ -75125,6 +76526,7 @@ characters.data={
description="LEFTWARDS ARROW WITH PLUS BELOW",
direction="on",
linebreak="al",
+ mathextensible="l",
unicodeslot=0x2946,
},
[0x2947]={
@@ -75146,6 +76548,7 @@ characters.data={
description="UPWARDS TWO-HEADED ARROW FROM SMALL CIRCLE",
direction="on",
linebreak="al",
+ mathextensible="u",
unicodeslot=0x2949,
},
[0x294A]={
@@ -75419,6 +76822,7 @@ characters.data={
description="RIGHT DOUBLE ARROW WITH ROUNDED HEAD",
direction="on",
linebreak="al",
+ mathextensible="r",
unicodeslot=0x2970,
},
[0x2971]={
@@ -76312,6 +77716,7 @@ characters.data={
description="BLACK DIAMOND WITH DOWN ARROW",
direction="on",
linebreak="al",
+ mathextensible="d",
unicodeslot=0x29EA,
},
[0x29EB]={
@@ -76326,6 +77731,7 @@ characters.data={
description="WHITE CIRCLE WITH DOWN ARROW",
direction="on",
linebreak="al",
+ mathextensible="d",
unicodeslot=0x29EC,
},
[0x29ED]={
@@ -76333,6 +77739,7 @@ characters.data={
description="BLACK CIRCLE WITH DOWN ARROW",
direction="on",
linebreak="al",
+ mathextensible="d",
unicodeslot=0x29ED,
},
[0x29EE]={
@@ -76649,6 +78056,7 @@ characters.data={
description="INTEGRAL WITH LEFTWARDS ARROW WITH HOOK",
direction="on",
linebreak="al",
+ mathextensible="l",
unicodeslot=0x2A17,
},
[0x2A18]={
@@ -78482,6 +79890,10 @@ characters.data={
direction="on",
linebreak="al",
unicodeslot=0x2B05,
+ variants={
+ [0xFE0E]="text style",
+ [0xFE0F]="emoji style",
+ },
},
[0x2B06]={
category="so",
@@ -78489,6 +79901,10 @@ characters.data={
direction="on",
linebreak="al",
unicodeslot=0x2B06,
+ variants={
+ [0xFE0E]="text style",
+ [0xFE0F]="emoji style",
+ },
},
[0x2B07]={
category="so",
@@ -78496,6 +79912,10 @@ characters.data={
direction="on",
linebreak="al",
unicodeslot=0x2B07,
+ variants={
+ [0xFE0E]="text style",
+ [0xFE0F]="emoji style",
+ },
},
[0x2B08]={
category="so",
@@ -78636,6 +80056,10 @@ characters.data={
direction="on",
linebreak="al",
unicodeslot=0x2B1B,
+ variants={
+ [0xFE0E]="text style",
+ [0xFE0F]="emoji style",
+ },
},
[0x2B1C]={
category="so",
@@ -78643,6 +80067,10 @@ characters.data={
direction="on",
linebreak="al",
unicodeslot=0x2B1C,
+ variants={
+ [0xFE0E]="text style",
+ [0xFE0F]="emoji style",
+ },
},
[0x2B1D]={
category="so",
@@ -78782,6 +80210,7 @@ characters.data={
description="LEFT ARROW WITH SMALL CIRCLE",
direction="on",
linebreak="al",
+ mathextensible="l",
unicodeslot=0x2B30,
},
[0x2B31]={
@@ -78789,6 +80218,7 @@ characters.data={
description="THREE LEFTWARDS ARROWS",
direction="on",
linebreak="al",
+ mathextensible="l",
unicodeslot=0x2B31,
},
[0x2B32]={
@@ -78796,6 +80226,7 @@ characters.data={
description="LEFT ARROW WITH CIRCLED PLUS",
direction="on",
linebreak="al",
+ mathextensible="l",
unicodeslot=0x2B32,
},
[0x2B33]={
@@ -78803,6 +80234,7 @@ characters.data={
description="LONG LEFTWARDS SQUIGGLE ARROW",
direction="on",
linebreak="al",
+ mathextensible="l",
unicodeslot=0x2B33,
},
[0x2B34]={
@@ -78810,6 +80242,7 @@ characters.data={
description="LEFTWARDS TWO-HEADED ARROW WITH VERTICAL STROKE",
direction="on",
linebreak="al",
+ mathextensible="l",
unicodeslot=0x2B34,
},
[0x2B35]={
@@ -78817,6 +80250,7 @@ characters.data={
description="LEFTWARDS TWO-HEADED ARROW WITH DOUBLE VERTICAL STROKE",
direction="on",
linebreak="al",
+ mathextensible="l",
unicodeslot=0x2B35,
},
[0x2B36]={
@@ -78824,6 +80258,7 @@ characters.data={
description="LEFTWARDS TWO-HEADED ARROW FROM BAR",
direction="on",
linebreak="al",
+ mathextensible="l",
unicodeslot=0x2B36,
},
[0x2B37]={
@@ -78831,6 +80266,7 @@ characters.data={
description="LEFTWARDS TWO-HEADED TRIPLE DASH ARROW",
direction="on",
linebreak="al",
+ mathextensible="l",
unicodeslot=0x2B37,
},
[0x2B38]={
@@ -78838,6 +80274,7 @@ characters.data={
description="LEFTWARDS ARROW WITH DOTTED STEM",
direction="on",
linebreak="al",
+ mathextensible="l",
unicodeslot=0x2B38,
},
[0x2B39]={
@@ -78845,6 +80282,7 @@ characters.data={
description="LEFTWARDS ARROW WITH TAIL WITH VERTICAL STROKE",
direction="on",
linebreak="al",
+ mathextensible="l",
unicodeslot=0x2B39,
},
[0x2B3A]={
@@ -78852,6 +80290,7 @@ characters.data={
description="LEFTWARDS ARROW WITH TAIL WITH DOUBLE VERTICAL STROKE",
direction="on",
linebreak="al",
+ mathextensible="l",
unicodeslot=0x2B3A,
},
[0x2B3B]={
@@ -78859,6 +80298,7 @@ characters.data={
description="LEFTWARDS TWO-HEADED ARROW WITH TAIL",
direction="on",
linebreak="al",
+ mathextensible="l",
unicodeslot=0x2B3B,
},
[0x2B3C]={
@@ -78866,6 +80306,7 @@ characters.data={
description="LEFTWARDS TWO-HEADED ARROW WITH TAIL WITH VERTICAL STROKE",
direction="on",
linebreak="al",
+ mathextensible="l",
unicodeslot=0x2B3C,
},
[0x2B3D]={
@@ -78873,6 +80314,7 @@ characters.data={
description="LEFTWARDS TWO-HEADED ARROW WITH TAIL WITH DOUBLE VERTICAL STROKE",
direction="on",
linebreak="al",
+ mathextensible="l",
unicodeslot=0x2B3D,
},
[0x2B3E]={
@@ -78887,6 +80329,7 @@ characters.data={
description="WAVE ARROW POINTING DIRECTLY LEFT",
direction="on",
linebreak="al",
+ mathextensible="l",
unicodeslot=0x2B3F,
},
[0x2B40]={
@@ -78986,6 +80429,10 @@ characters.data={
direction="on",
linebreak="al",
unicodeslot=0x2B50,
+ variants={
+ [0xFE0E]="text style",
+ [0xFE0F]="emoji style",
+ },
},
[0x2B51]={
category="so",
@@ -79022,6 +80469,10 @@ characters.data={
direction="on",
linebreak="ai",
unicodeslot=0x2B55,
+ variants={
+ [0xFE0E]="text style",
+ [0xFE0F]="emoji style",
+ },
},
[0x2B56]={
category="so",
@@ -80960,6 +82411,20 @@ characters.data={
linebreak="cm",
unicodeslot=0x2CF1,
},
+ [0x2CF2]={
+ category="lu",
+ description="COPTIC CAPITAL LETTER BOHAIRIC KHEI",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x2CF2,
+ },
+ [0x2CF3]={
+ category="ll",
+ description="COPTIC SMALL LETTER BOHAIRIC KHEI",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x2CF3,
+ },
[0x2CF9]={
category="po",
description="COPTIC OLD NUBIAN FULL STOP",
@@ -81313,6 +82778,20 @@ characters.data={
uccode=0x10C5,
unicodeslot=0x2D25,
},
+ [0x2D27]={
+ category="ll",
+ description="GEORGIAN SMALL LETTER YN",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x2D27,
+ },
+ [0x2D2D]={
+ category="ll",
+ description="GEORGIAN SMALL LETTER AEN",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x2D2D,
+ },
[0x2D30]={
category="lo",
description="TIFINAGH LETTER YA",
@@ -81616,7 +83095,6 @@ characters.data={
},
[0x2D5B]={
category="lo",
- cjkwd="w",
description="TIFINAGH LETTER YASH",
direction="l",
linebreak="al",
@@ -81624,7 +83102,6 @@ characters.data={
},
[0x2D5C]={
category="lo",
- cjkwd="w",
description="TIFINAGH LETTER YAT",
direction="l",
linebreak="al",
@@ -81669,14 +83146,14 @@ characters.data={
category="lo",
description="TIFINAGH LETTER YAY",
direction="l",
- linebreak="id",
+ linebreak="al",
unicodeslot=0x2D62,
},
[0x2D63]={
category="lo",
description="TIFINAGH LETTER YAZ",
direction="l",
- linebreak="id",
+ linebreak="al",
unicodeslot=0x2D63,
},
[0x2D64]={
@@ -81693,6 +83170,20 @@ characters.data={
linebreak="al",
unicodeslot=0x2D65,
},
+ [0x2D66]={
+ category="lo",
+ description="TIFINAGH LETTER YE",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x2D66,
+ },
+ [0x2D67]={
+ category="lo",
+ description="TIFINAGH LETTER YO",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x2D67,
+ },
[0x2D6F]={
category="lm",
description="TIFINAGH MODIFIER LETTER LABIALIZATION MARK",
@@ -81908,14 +83399,14 @@ characters.data={
category="lo",
description="ETHIOPIC SYLLABLE SSEE",
direction="l",
- linebreak="id",
+ linebreak="al",
unicodeslot=0x2DA4,
},
[0x2DA5]={
category="lo",
description="ETHIOPIC SYLLABLE SSE",
direction="l",
- linebreak="id",
+ linebreak="al",
unicodeslot=0x2DA5,
},
[0x2DA6]={
@@ -82004,6 +83495,7 @@ characters.data={
},
[0x2DB4]={
category="lo",
+ cjkwd="w",
description="ETHIOPIC SYLLABLE ZZEE",
direction="l",
linebreak="al",
@@ -82011,6 +83503,7 @@ characters.data={
},
[0x2DB5]={
category="lo",
+ cjkwd="w",
description="ETHIOPIC SYLLABLE ZZE",
direction="l",
linebreak="al",
@@ -82048,14 +83541,14 @@ characters.data={
category="lo",
description="ETHIOPIC SYLLABLE CCHAA",
direction="l",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x2DBB,
},
[0x2DBC]={
category="lo",
description="ETHIOPIC SYLLABLE CCHEE",
direction="l",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x2DBC,
},
[0x2DBD]={
@@ -82424,6 +83917,7 @@ characters.data={
},
[0x2DF6]={
category="mn",
+ cjkwd="w",
description="COMBINING CYRILLIC LETTER A",
direction="nsm",
linebreak="cm",
@@ -82431,6 +83925,7 @@ characters.data={
},
[0x2DF7]={
category="mn",
+ cjkwd="w",
description="COMBINING CYRILLIC LETTER IE",
direction="nsm",
linebreak="cm",
@@ -82475,14 +83970,14 @@ characters.data={
category="mn",
description="COMBINING CYRILLIC LETTER LITTLE YUS",
direction="nsm",
- linebreak="cm",
+ linebreak="id",
unicodeslot=0x2DFD,
},
[0x2DFE]={
category="mn",
description="COMBINING CYRILLIC LETTER BIG YUS",
direction="nsm",
- linebreak="cm",
+ linebreak="id",
unicodeslot=0x2DFE,
},
[0x2DFF]={
@@ -82862,6 +84357,76 @@ characters.data={
linebreak="ba",
unicodeslot=0x2E31,
},
+ [0x2E32]={
+ category="po",
+ description="TURNED COMMA",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x2E32,
+ },
+ [0x2E33]={
+ category="po",
+ description="RAISED DOT",
+ direction="on",
+ linebreak="ba",
+ unicodeslot=0x2E33,
+ },
+ [0x2E34]={
+ category="po",
+ description="RAISED COMMA",
+ direction="on",
+ linebreak="ba",
+ unicodeslot=0x2E34,
+ },
+ [0x2E35]={
+ category="po",
+ description="TURNED SEMICOLON",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x2E35,
+ },
+ [0x2E36]={
+ category="po",
+ description="DAGGER WITH LEFT GUARD",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x2E36,
+ },
+ [0x2E37]={
+ category="po",
+ description="DAGGER WITH RIGHT GUARD",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x2E37,
+ },
+ [0x2E38]={
+ category="po",
+ description="TURNED DAGGER",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x2E38,
+ },
+ [0x2E39]={
+ category="po",
+ description="TOP HALF SECTION SIGN",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x2E39,
+ },
+ [0x2E3A]={
+ category="pd",
+ description="TWO-EM DASH",
+ direction="on",
+ linebreak="b2",
+ unicodeslot=0x2E3A,
+ },
+ [0x2E3B]={
+ category="pd",
+ description="THREE-EM DASH",
+ direction="on",
+ linebreak="b2",
+ unicodeslot=0x2E3B,
+ },
[0x2E80]={
category="so",
cjkwd="w",
@@ -86237,7 +87802,7 @@ characters.data={
category="mn",
cjkwd="w",
description="HANGUL SINGLE DOT TONE MARK",
- direction="nsm",
+ direction="l",
linebreak="cm",
unicodeslot=0x302E,
},
@@ -86245,7 +87810,7 @@ characters.data={
category="mn",
cjkwd="w",
description="HANGUL DOUBLE DOT TONE MARK",
- direction="nsm",
+ direction="l",
linebreak="cm",
unicodeslot=0x302F,
},
@@ -86365,6 +87930,10 @@ characters.data={
direction="on",
linebreak="id",
unicodeslot=0x303D,
+ variants={
+ [0xFE0E]="text style",
+ [0xFE0F]="emoji style",
+ },
},
[0x303E]={
category="so",
@@ -86388,7 +87957,7 @@ characters.data={
cjkwd="w",
description="HIRAGANA LETTER SMALL A",
direction="l",
- linebreak="ns",
+ linebreak="cj",
unicodeslot=0x3041,
},
[0x3042]={
@@ -86406,7 +87975,7 @@ characters.data={
cjkwd="w",
description="HIRAGANA LETTER SMALL I",
direction="l",
- linebreak="ns",
+ linebreak="cj",
unicodeslot=0x3043,
},
[0x3044]={
@@ -86424,7 +87993,7 @@ characters.data={
cjkwd="w",
description="HIRAGANA LETTER SMALL U",
direction="l",
- linebreak="ns",
+ linebreak="cj",
unicodeslot=0x3045,
},
[0x3046]={
@@ -86442,7 +88011,7 @@ characters.data={
cjkwd="w",
description="HIRAGANA LETTER SMALL E",
direction="l",
- linebreak="ns",
+ linebreak="cj",
unicodeslot=0x3047,
},
[0x3048]={
@@ -86460,7 +88029,7 @@ characters.data={
cjkwd="w",
description="HIRAGANA LETTER SMALL O",
direction="l",
- linebreak="ns",
+ linebreak="cj",
unicodeslot=0x3049,
},
[0x304A]={
@@ -86706,7 +88275,7 @@ characters.data={
cjkwd="w",
description="HIRAGANA LETTER SMALL TU",
direction="l",
- linebreak="ns",
+ linebreak="cj",
unicodeslot=0x3063,
},
[0x3064]={
@@ -87007,7 +88576,7 @@ characters.data={
cjkwd="w",
description="HIRAGANA LETTER SMALL YA",
direction="l",
- linebreak="ns",
+ linebreak="cj",
unicodeslot=0x3083,
},
[0x3084]={
@@ -87025,7 +88594,7 @@ characters.data={
cjkwd="w",
description="HIRAGANA LETTER SMALL YU",
direction="l",
- linebreak="ns",
+ linebreak="cj",
unicodeslot=0x3085,
},
[0x3086]={
@@ -87043,7 +88612,7 @@ characters.data={
cjkwd="w",
description="HIRAGANA LETTER SMALL YO",
direction="l",
- linebreak="ns",
+ linebreak="cj",
unicodeslot=0x3087,
},
[0x3088]={
@@ -87106,7 +88675,7 @@ characters.data={
cjkwd="w",
description="HIRAGANA LETTER SMALL WA",
direction="l",
- linebreak="ns",
+ linebreak="cj",
unicodeslot=0x308E,
},
[0x308F]={
@@ -87169,7 +88738,7 @@ characters.data={
cjkwd="w",
description="HIRAGANA LETTER SMALL KA",
direction="l",
- linebreak="ns",
+ linebreak="cj",
unicodeslot=0x3095,
},
[0x3096]={
@@ -87177,7 +88746,7 @@ characters.data={
cjkwd="w",
description="HIRAGANA LETTER SMALL KE",
direction="l",
- linebreak="ns",
+ linebreak="cj",
unicodeslot=0x3096,
},
[0x3099]={
@@ -87258,7 +88827,7 @@ characters.data={
cjkwd="w",
description="KATAKANA LETTER SMALL A",
direction="l",
- linebreak="ns",
+ linebreak="cj",
unicodeslot=0x30A1,
},
[0x30A2]={
@@ -87276,7 +88845,7 @@ characters.data={
cjkwd="w",
description="KATAKANA LETTER SMALL I",
direction="l",
- linebreak="ns",
+ linebreak="cj",
unicodeslot=0x30A3,
},
[0x30A4]={
@@ -87294,7 +88863,7 @@ characters.data={
cjkwd="w",
description="KATAKANA LETTER SMALL U",
direction="l",
- linebreak="ns",
+ linebreak="cj",
unicodeslot=0x30A5,
},
[0x30A6]={
@@ -87312,7 +88881,7 @@ characters.data={
cjkwd="w",
description="KATAKANA LETTER SMALL E",
direction="l",
- linebreak="ns",
+ linebreak="cj",
unicodeslot=0x30A7,
},
[0x30A8]={
@@ -87330,7 +88899,7 @@ characters.data={
cjkwd="w",
description="KATAKANA LETTER SMALL O",
direction="l",
- linebreak="ns",
+ linebreak="cj",
unicodeslot=0x30A9,
},
[0x30AA]={
@@ -87576,7 +89145,7 @@ characters.data={
cjkwd="w",
description="KATAKANA LETTER SMALL TU",
direction="l",
- linebreak="ns",
+ linebreak="cj",
unicodeslot=0x30C3,
},
[0x30C4]={
@@ -87877,7 +89446,7 @@ characters.data={
cjkwd="w",
description="KATAKANA LETTER SMALL YA",
direction="l",
- linebreak="ns",
+ linebreak="cj",
unicodeslot=0x30E3,
},
[0x30E4]={
@@ -87895,7 +89464,7 @@ characters.data={
cjkwd="w",
description="KATAKANA LETTER SMALL YU",
direction="l",
- linebreak="ns",
+ linebreak="cj",
unicodeslot=0x30E5,
},
[0x30E6]={
@@ -87913,7 +89482,7 @@ characters.data={
cjkwd="w",
description="KATAKANA LETTER SMALL YO",
direction="l",
- linebreak="ns",
+ linebreak="cj",
unicodeslot=0x30E7,
},
[0x30E8]={
@@ -87976,7 +89545,7 @@ characters.data={
cjkwd="w",
description="KATAKANA LETTER SMALL WA",
direction="l",
- linebreak="ns",
+ linebreak="cj",
unicodeslot=0x30EE,
},
[0x30EF]={
@@ -88040,7 +89609,7 @@ characters.data={
cjkwd="w",
description="KATAKANA LETTER SMALL KA",
direction="l",
- linebreak="ns",
+ linebreak="cj",
unicodeslot=0x30F5,
},
[0x30F6]={
@@ -88049,7 +89618,7 @@ characters.data={
cjkwd="w",
description="KATAKANA LETTER SMALL KE",
direction="l",
- linebreak="ns",
+ linebreak="cj",
unicodeslot=0x30F6,
},
[0x30F7]={
@@ -88107,7 +89676,7 @@ characters.data={
cjkwd="w",
description="KATAKANA-HIRAGANA PROLONGED SOUND MARK",
direction="l",
- linebreak="ns",
+ linebreak="cj",
unicodeslot=0x30FC,
},
[0x30FD]={
@@ -90094,7 +91663,7 @@ characters.data={
cjkwd="w",
description="KATAKANA LETTER SMALL KU",
direction="l",
- linebreak="ns",
+ linebreak="cj",
unicodeslot=0x31F0,
},
[0x31F1]={
@@ -90102,7 +91671,7 @@ characters.data={
cjkwd="w",
description="KATAKANA LETTER SMALL SI",
direction="l",
- linebreak="ns",
+ linebreak="cj",
unicodeslot=0x31F1,
},
[0x31F2]={
@@ -90110,7 +91679,7 @@ characters.data={
cjkwd="w",
description="KATAKANA LETTER SMALL SU",
direction="l",
- linebreak="ns",
+ linebreak="cj",
unicodeslot=0x31F2,
},
[0x31F3]={
@@ -90118,7 +91687,7 @@ characters.data={
cjkwd="w",
description="KATAKANA LETTER SMALL TO",
direction="l",
- linebreak="ns",
+ linebreak="cj",
unicodeslot=0x31F3,
},
[0x31F4]={
@@ -90126,7 +91695,7 @@ characters.data={
cjkwd="w",
description="KATAKANA LETTER SMALL NU",
direction="l",
- linebreak="ns",
+ linebreak="cj",
unicodeslot=0x31F4,
},
[0x31F5]={
@@ -90134,7 +91703,7 @@ characters.data={
cjkwd="w",
description="KATAKANA LETTER SMALL HA",
direction="l",
- linebreak="ns",
+ linebreak="cj",
unicodeslot=0x31F5,
},
[0x31F6]={
@@ -90142,7 +91711,7 @@ characters.data={
cjkwd="w",
description="KATAKANA LETTER SMALL HI",
direction="l",
- linebreak="ns",
+ linebreak="cj",
unicodeslot=0x31F6,
},
[0x31F7]={
@@ -90150,7 +91719,7 @@ characters.data={
cjkwd="w",
description="KATAKANA LETTER SMALL HU",
direction="l",
- linebreak="ns",
+ linebreak="cj",
unicodeslot=0x31F7,
},
[0x31F8]={
@@ -90158,7 +91727,7 @@ characters.data={
cjkwd="w",
description="KATAKANA LETTER SMALL HE",
direction="l",
- linebreak="ns",
+ linebreak="cj",
unicodeslot=0x31F8,
},
[0x31F9]={
@@ -90166,7 +91735,7 @@ characters.data={
cjkwd="w",
description="KATAKANA LETTER SMALL HO",
direction="l",
- linebreak="ns",
+ linebreak="cj",
unicodeslot=0x31F9,
},
[0x31FA]={
@@ -90174,7 +91743,7 @@ characters.data={
cjkwd="w",
description="KATAKANA LETTER SMALL MU",
direction="l",
- linebreak="ns",
+ linebreak="cj",
unicodeslot=0x31FA,
},
[0x31FB]={
@@ -90182,7 +91751,7 @@ characters.data={
cjkwd="w",
description="KATAKANA LETTER SMALL RA",
direction="l",
- linebreak="ns",
+ linebreak="cj",
unicodeslot=0x31FB,
},
[0x31FC]={
@@ -90190,7 +91759,7 @@ characters.data={
cjkwd="w",
description="KATAKANA LETTER SMALL RI",
direction="l",
- linebreak="ns",
+ linebreak="cj",
unicodeslot=0x31FC,
},
[0x31FD]={
@@ -90198,7 +91767,7 @@ characters.data={
cjkwd="w",
description="KATAKANA LETTER SMALL RU",
direction="l",
- linebreak="ns",
+ linebreak="cj",
unicodeslot=0x31FD,
},
[0x31FE]={
@@ -90206,7 +91775,7 @@ characters.data={
cjkwd="w",
description="KATAKANA LETTER SMALL RE",
direction="l",
- linebreak="ns",
+ linebreak="cj",
unicodeslot=0x31FE,
},
[0x31FF]={
@@ -90214,7 +91783,7 @@ characters.data={
cjkwd="w",
description="KATAKANA LETTER SMALL RO",
direction="l",
- linebreak="ns",
+ linebreak="cj",
unicodeslot=0x31FF,
},
[0x3200]={
@@ -91668,6 +93237,10 @@ characters.data={
linebreak="id",
specials={ "circle", 0x795D },
unicodeslot=0x3297,
+ variants={
+ [0xFE0E]="text style",
+ [0xFE0F]="emoji style",
+ },
},
[0x3298]={
adobename="ideographiclaborcircle",
@@ -91688,6 +93261,10 @@ characters.data={
linebreak="id",
specials={ "circle", 0x79D8 },
unicodeslot=0x3299,
+ variants={
+ [0xFE0E]="text style",
+ [0xFE0F]="emoji style",
+ },
},
[0x329A]={
category="so",
@@ -108042,6 +109619,62 @@ characters.data={
linebreak="al",
unicodeslot=0xA673,
},
+ [0xA674]={
+ category="mn",
+ description="COMBINING CYRILLIC LETTER UKRAINIAN IE",
+ direction="nsm",
+ linebreak="cm",
+ unicodeslot=0xA674,
+ },
+ [0xA675]={
+ category="mn",
+ description="COMBINING CYRILLIC LETTER I",
+ direction="nsm",
+ linebreak="cm",
+ unicodeslot=0xA675,
+ },
+ [0xA676]={
+ category="mn",
+ description="COMBINING CYRILLIC LETTER YI",
+ direction="nsm",
+ linebreak="cm",
+ unicodeslot=0xA676,
+ },
+ [0xA677]={
+ category="mn",
+ description="COMBINING CYRILLIC LETTER U",
+ direction="nsm",
+ linebreak="cm",
+ unicodeslot=0xA677,
+ },
+ [0xA678]={
+ category="mn",
+ description="COMBINING CYRILLIC LETTER HARD SIGN",
+ direction="nsm",
+ linebreak="cm",
+ unicodeslot=0xA678,
+ },
+ [0xA679]={
+ category="mn",
+ description="COMBINING CYRILLIC LETTER YERU",
+ direction="nsm",
+ linebreak="cm",
+ unicodeslot=0xA679,
+ },
+ [0xA67A]={
+ category="mn",
+ description="COMBINING CYRILLIC LETTER SOFT SIGN",
+ direction="nsm",
+ linebreak="cm",
+ unicodeslot=0xA67A,
+ },
+ [0xA67B]={
+ category="mn",
+ description="COMBINING CYRILLIC LETTER OMEGA",
+ direction="nsm",
+ linebreak="cm",
+ unicodeslot=0xA67B,
+ },
[0xA67C]={
category="mn",
description="COMBINING CYRILLIC KAVYKA",
@@ -108238,6 +109871,13 @@ characters.data={
linebreak="al",
unicodeslot=0xA697,
},
+ [0xA69F]={
+ category="mn",
+ description="COMBINING CYRILLIC LETTER IOTIFIED E",
+ direction="nsm",
+ linebreak="cm",
+ unicodeslot=0xA69F,
+ },
[0xA6A0]={
category="lo",
description="BAMUM LETTER A",
@@ -109870,6 +111510,20 @@ characters.data={
linebreak="al",
unicodeslot=0xA791,
},
+ [0xA792]={
+ category="lu",
+ description="LATIN CAPITAL LETTER C WITH BAR",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0xA792,
+ },
+ [0xA793]={
+ category="ll",
+ description="LATIN SMALL LETTER C WITH BAR",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0xA793,
+ },
[0xA7A0]={
category="lu",
description="LATIN CAPITAL LETTER G WITH OBLIQUE STROKE",
@@ -109940,6 +111594,29 @@ characters.data={
linebreak="al",
unicodeslot=0xA7A9,
},
+ [0xA7AA]={
+ category="lu",
+ description="LATIN CAPITAL LETTER H WITH HOOK",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0xA7AA,
+ },
+ [0xA7F8]={
+ category="lm",
+ description="MODIFIER LETTER CAPITAL H WITH STROKE",
+ direction="l",
+ linebreak="al",
+ specials={ "super", 0x0126 },
+ unicodeslot=0xA7F8,
+ },
+ [0xA7F9]={
+ category="lm",
+ description="MODIFIER LETTER SMALL LIGATURE OE",
+ direction="l",
+ linebreak="al",
+ specials={ "super", 0x0153 },
+ unicodeslot=0xA7F9,
+ },
[0xA7FA]={
category="ll",
description="LATIN LETTER SMALL CAPITAL TURNED M",
@@ -114278,6 +115955,167 @@ characters.data={
linebreak="sa",
unicodeslot=0xAADF,
},
+ [0xAAE0]={
+ category="lo",
+ description="MEETEI MAYEK LETTER E",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0xAAE0,
+ },
+ [0xAAE1]={
+ category="lo",
+ description="MEETEI MAYEK LETTER O",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0xAAE1,
+ },
+ [0xAAE2]={
+ category="lo",
+ description="MEETEI MAYEK LETTER CHA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0xAAE2,
+ },
+ [0xAAE3]={
+ category="lo",
+ description="MEETEI MAYEK LETTER NYA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0xAAE3,
+ },
+ [0xAAE4]={
+ category="lo",
+ description="MEETEI MAYEK LETTER TTA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0xAAE4,
+ },
+ [0xAAE5]={
+ category="lo",
+ description="MEETEI MAYEK LETTER TTHA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0xAAE5,
+ },
+ [0xAAE6]={
+ category="lo",
+ description="MEETEI MAYEK LETTER DDA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0xAAE6,
+ },
+ [0xAAE7]={
+ category="lo",
+ description="MEETEI MAYEK LETTER DDHA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0xAAE7,
+ },
+ [0xAAE8]={
+ category="lo",
+ description="MEETEI MAYEK LETTER NNA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0xAAE8,
+ },
+ [0xAAE9]={
+ category="lo",
+ description="MEETEI MAYEK LETTER SHA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0xAAE9,
+ },
+ [0xAAEA]={
+ category="lo",
+ description="MEETEI MAYEK LETTER SSA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0xAAEA,
+ },
+ [0xAAEB]={
+ category="mc",
+ description="MEETEI MAYEK VOWEL SIGN II",
+ direction="l",
+ linebreak="cm",
+ unicodeslot=0xAAEB,
+ },
+ [0xAAEC]={
+ category="mn",
+ description="MEETEI MAYEK VOWEL SIGN UU",
+ direction="nsm",
+ linebreak="cm",
+ unicodeslot=0xAAEC,
+ },
+ [0xAAED]={
+ category="mn",
+ description="MEETEI MAYEK VOWEL SIGN AAI",
+ direction="nsm",
+ linebreak="cm",
+ unicodeslot=0xAAED,
+ },
+ [0xAAEE]={
+ category="mc",
+ description="MEETEI MAYEK VOWEL SIGN AU",
+ direction="l",
+ linebreak="cm",
+ unicodeslot=0xAAEE,
+ },
+ [0xAAEF]={
+ category="mc",
+ description="MEETEI MAYEK VOWEL SIGN AAU",
+ direction="l",
+ linebreak="cm",
+ unicodeslot=0xAAEF,
+ },
+ [0xAAF0]={
+ category="po",
+ description="MEETEI MAYEK CHEIKHAN",
+ direction="l",
+ linebreak="ba",
+ unicodeslot=0xAAF0,
+ },
+ [0xAAF1]={
+ category="po",
+ description="MEETEI MAYEK AHANG KHUDAM",
+ direction="l",
+ linebreak="ba",
+ unicodeslot=0xAAF1,
+ },
+ [0xAAF2]={
+ category="lo",
+ description="MEETEI MAYEK ANJI",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0xAAF2,
+ },
+ [0xAAF3]={
+ category="lm",
+ description="MEETEI MAYEK SYLLABLE REPETITION MARK",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0xAAF3,
+ },
+ [0xAAF4]={
+ category="lm",
+ description="MEETEI MAYEK WORD REPETITION MARK",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0xAAF4,
+ },
+ [0xAAF5]={
+ category="mc",
+ description="MEETEI MAYEK VOWEL SIGN VISARGA",
+ direction="l",
+ linebreak="cm",
+ unicodeslot=0xAAF5,
+ },
+ [0xAAF6]={
+ category="mn",
+ description="MEETEI MAYEK VIRAMA",
+ direction="nsm",
+ linebreak="cm",
+ unicodeslot=0xAAF6,
+ },
[0xAB01]={
category="lo",
description="ETHIOPIC SYLLABLE TTHU",
@@ -114896,7 +116734,6 @@ characters.data={
},
[0xD7B0]={
category="lo",
- cjkwd="w",
description="HANGUL JUNGSEONG O-YEO",
direction="l",
linebreak="jv",
@@ -114904,7 +116741,6 @@ characters.data={
},
[0xD7B1]={
category="lo",
- cjkwd="w",
description="HANGUL JUNGSEONG O-O-I",
direction="l",
linebreak="jv",
@@ -114912,7 +116748,6 @@ characters.data={
},
[0xD7B2]={
category="lo",
- cjkwd="w",
description="HANGUL JUNGSEONG YO-A",
direction="l",
linebreak="jv",
@@ -114920,7 +116755,6 @@ characters.data={
},
[0xD7B3]={
category="lo",
- cjkwd="w",
description="HANGUL JUNGSEONG YO-AE",
direction="l",
linebreak="jv",
@@ -114928,7 +116762,6 @@ characters.data={
},
[0xD7B4]={
category="lo",
- cjkwd="w",
description="HANGUL JUNGSEONG YO-EO",
direction="l",
linebreak="jv",
@@ -114936,7 +116769,6 @@ characters.data={
},
[0xD7B5]={
category="lo",
- cjkwd="w",
description="HANGUL JUNGSEONG U-YEO",
direction="l",
linebreak="jv",
@@ -114944,7 +116776,6 @@ characters.data={
},
[0xD7B6]={
category="lo",
- cjkwd="w",
description="HANGUL JUNGSEONG U-I-I",
direction="l",
linebreak="jv",
@@ -114952,7 +116783,6 @@ characters.data={
},
[0xD7B7]={
category="lo",
- cjkwd="w",
description="HANGUL JUNGSEONG YU-AE",
direction="l",
linebreak="jv",
@@ -114960,7 +116790,6 @@ characters.data={
},
[0xD7B8]={
category="lo",
- cjkwd="w",
description="HANGUL JUNGSEONG YU-O",
direction="l",
linebreak="jv",
@@ -114968,7 +116797,6 @@ characters.data={
},
[0xD7B9]={
category="lo",
- cjkwd="w",
description="HANGUL JUNGSEONG EU-A",
direction="l",
linebreak="jv",
@@ -114976,7 +116804,6 @@ characters.data={
},
[0xD7BA]={
category="lo",
- cjkwd="w",
description="HANGUL JUNGSEONG EU-EO",
direction="l",
linebreak="jv",
@@ -114984,7 +116811,6 @@ characters.data={
},
[0xD7BB]={
category="lo",
- cjkwd="w",
description="HANGUL JUNGSEONG EU-E",
direction="l",
linebreak="jv",
@@ -114992,7 +116818,6 @@ characters.data={
},
[0xD7BC]={
category="lo",
- cjkwd="w",
description="HANGUL JUNGSEONG EU-O",
direction="l",
linebreak="jv",
@@ -115000,7 +116825,6 @@ characters.data={
},
[0xD7BD]={
category="lo",
- cjkwd="w",
description="HANGUL JUNGSEONG I-YA-O",
direction="l",
linebreak="jv",
@@ -115008,7 +116832,6 @@ characters.data={
},
[0xD7BE]={
category="lo",
- cjkwd="w",
description="HANGUL JUNGSEONG I-YAE",
direction="l",
linebreak="jv",
@@ -115016,7 +116839,6 @@ characters.data={
},
[0xD7BF]={
category="lo",
- cjkwd="w",
description="HANGUL JUNGSEONG I-YEO",
direction="l",
linebreak="jv",
@@ -115024,7 +116846,6 @@ characters.data={
},
[0xD7C0]={
category="lo",
- cjkwd="w",
description="HANGUL JUNGSEONG I-YE",
direction="l",
linebreak="jv",
@@ -115032,7 +116853,6 @@ characters.data={
},
[0xD7C1]={
category="lo",
- cjkwd="w",
description="HANGUL JUNGSEONG I-O-I",
direction="l",
linebreak="jv",
@@ -115040,7 +116860,6 @@ characters.data={
},
[0xD7C2]={
category="lo",
- cjkwd="w",
description="HANGUL JUNGSEONG I-YO",
direction="l",
linebreak="jv",
@@ -115048,7 +116867,6 @@ characters.data={
},
[0xD7C3]={
category="lo",
- cjkwd="w",
description="HANGUL JUNGSEONG I-YU",
direction="l",
linebreak="jv",
@@ -115056,7 +116874,6 @@ characters.data={
},
[0xD7C4]={
category="lo",
- cjkwd="w",
description="HANGUL JUNGSEONG I-I",
direction="l",
linebreak="jv",
@@ -115064,7 +116881,6 @@ characters.data={
},
[0xD7C5]={
category="lo",
- cjkwd="w",
description="HANGUL JUNGSEONG ARAEA-A",
direction="l",
linebreak="jv",
@@ -115072,7 +116888,6 @@ characters.data={
},
[0xD7C6]={
category="lo",
- cjkwd="w",
description="HANGUL JUNGSEONG ARAEA-E",
direction="l",
linebreak="jv",
@@ -115080,7 +116895,6 @@ characters.data={
},
[0xD7CB]={
category="lo",
- cjkwd="w",
description="HANGUL JONGSEONG NIEUN-RIEUL",
direction="l",
linebreak="jt",
@@ -115088,7 +116902,6 @@ characters.data={
},
[0xD7CC]={
category="lo",
- cjkwd="w",
description="HANGUL JONGSEONG NIEUN-CHIEUCH",
direction="l",
linebreak="jt",
@@ -115096,7 +116909,6 @@ characters.data={
},
[0xD7CD]={
category="lo",
- cjkwd="w",
description="HANGUL JONGSEONG SSANGTIKEUT",
direction="l",
linebreak="jt",
@@ -115104,7 +116916,6 @@ characters.data={
},
[0xD7CE]={
category="lo",
- cjkwd="w",
description="HANGUL JONGSEONG SSANGTIKEUT-PIEUP",
direction="l",
linebreak="jt",
@@ -115112,7 +116923,6 @@ characters.data={
},
[0xD7CF]={
category="lo",
- cjkwd="w",
description="HANGUL JONGSEONG TIKEUT-PIEUP",
direction="l",
linebreak="jt",
@@ -115120,7 +116930,6 @@ characters.data={
},
[0xD7D0]={
category="lo",
- cjkwd="w",
description="HANGUL JONGSEONG TIKEUT-SIOS",
direction="l",
linebreak="jt",
@@ -115128,7 +116937,6 @@ characters.data={
},
[0xD7D1]={
category="lo",
- cjkwd="w",
description="HANGUL JONGSEONG TIKEUT-SIOS-KIYEOK",
direction="l",
linebreak="jt",
@@ -115136,7 +116944,6 @@ characters.data={
},
[0xD7D2]={
category="lo",
- cjkwd="w",
description="HANGUL JONGSEONG TIKEUT-CIEUC",
direction="l",
linebreak="jt",
@@ -115144,7 +116951,6 @@ characters.data={
},
[0xD7D3]={
category="lo",
- cjkwd="w",
description="HANGUL JONGSEONG TIKEUT-CHIEUCH",
direction="l",
linebreak="jt",
@@ -115152,7 +116958,6 @@ characters.data={
},
[0xD7D4]={
category="lo",
- cjkwd="w",
description="HANGUL JONGSEONG TIKEUT-THIEUTH",
direction="l",
linebreak="jt",
@@ -115160,7 +116965,6 @@ characters.data={
},
[0xD7D5]={
category="lo",
- cjkwd="w",
description="HANGUL JONGSEONG RIEUL-SSANGKIYEOK",
direction="l",
linebreak="jt",
@@ -115168,7 +116972,6 @@ characters.data={
},
[0xD7D6]={
category="lo",
- cjkwd="w",
description="HANGUL JONGSEONG RIEUL-KIYEOK-HIEUH",
direction="l",
linebreak="jt",
@@ -115176,7 +116979,6 @@ characters.data={
},
[0xD7D7]={
category="lo",
- cjkwd="w",
description="HANGUL JONGSEONG SSANGRIEUL-KHIEUKH",
direction="l",
linebreak="jt",
@@ -115184,7 +116986,6 @@ characters.data={
},
[0xD7D8]={
category="lo",
- cjkwd="w",
description="HANGUL JONGSEONG RIEUL-MIEUM-HIEUH",
direction="l",
linebreak="jt",
@@ -115192,7 +116993,6 @@ characters.data={
},
[0xD7D9]={
category="lo",
- cjkwd="w",
description="HANGUL JONGSEONG RIEUL-PIEUP-TIKEUT",
direction="l",
linebreak="jt",
@@ -115200,7 +117000,6 @@ characters.data={
},
[0xD7DA]={
category="lo",
- cjkwd="w",
description="HANGUL JONGSEONG RIEUL-PIEUP-PHIEUPH",
direction="l",
linebreak="jt",
@@ -115208,7 +117007,6 @@ characters.data={
},
[0xD7DB]={
category="lo",
- cjkwd="w",
description="HANGUL JONGSEONG RIEUL-YESIEUNG",
direction="l",
linebreak="jt",
@@ -115216,7 +117014,6 @@ characters.data={
},
[0xD7DC]={
category="lo",
- cjkwd="w",
description="HANGUL JONGSEONG RIEUL-YEORINHIEUH-HIEUH",
direction="l",
linebreak="jt",
@@ -115224,7 +117021,6 @@ characters.data={
},
[0xD7DD]={
category="lo",
- cjkwd="w",
description="HANGUL JONGSEONG KAPYEOUNRIEUL",
direction="l",
linebreak="jt",
@@ -115232,7 +117028,6 @@ characters.data={
},
[0xD7DE]={
category="lo",
- cjkwd="w",
description="HANGUL JONGSEONG MIEUM-NIEUN",
direction="l",
linebreak="jt",
@@ -115240,7 +117035,6 @@ characters.data={
},
[0xD7DF]={
category="lo",
- cjkwd="w",
description="HANGUL JONGSEONG MIEUM-SSANGNIEUN",
direction="l",
linebreak="jt",
@@ -115248,7 +117042,6 @@ characters.data={
},
[0xD7E0]={
category="lo",
- cjkwd="w",
description="HANGUL JONGSEONG SSANGMIEUM",
direction="l",
linebreak="jt",
@@ -115256,7 +117049,6 @@ characters.data={
},
[0xD7E1]={
category="lo",
- cjkwd="w",
description="HANGUL JONGSEONG MIEUM-PIEUP-SIOS",
direction="l",
linebreak="jt",
@@ -115264,7 +117056,6 @@ characters.data={
},
[0xD7E2]={
category="lo",
- cjkwd="w",
description="HANGUL JONGSEONG MIEUM-CIEUC",
direction="l",
linebreak="jt",
@@ -115272,7 +117063,6 @@ characters.data={
},
[0xD7E3]={
category="lo",
- cjkwd="w",
description="HANGUL JONGSEONG PIEUP-TIKEUT",
direction="l",
linebreak="jt",
@@ -115280,7 +117070,6 @@ characters.data={
},
[0xD7E4]={
category="lo",
- cjkwd="w",
description="HANGUL JONGSEONG PIEUP-RIEUL-PHIEUPH",
direction="l",
linebreak="jt",
@@ -115288,7 +117077,6 @@ characters.data={
},
[0xD7E5]={
category="lo",
- cjkwd="w",
description="HANGUL JONGSEONG PIEUP-MIEUM",
direction="l",
linebreak="jt",
@@ -115296,7 +117084,6 @@ characters.data={
},
[0xD7E6]={
category="lo",
- cjkwd="w",
description="HANGUL JONGSEONG SSANGPIEUP",
direction="l",
linebreak="jt",
@@ -115304,7 +117091,6 @@ characters.data={
},
[0xD7E7]={
category="lo",
- cjkwd="w",
description="HANGUL JONGSEONG PIEUP-SIOS-TIKEUT",
direction="l",
linebreak="jt",
@@ -115312,7 +117098,6 @@ characters.data={
},
[0xD7E8]={
category="lo",
- cjkwd="w",
description="HANGUL JONGSEONG PIEUP-CIEUC",
direction="l",
linebreak="jt",
@@ -115320,7 +117105,6 @@ characters.data={
},
[0xD7E9]={
category="lo",
- cjkwd="w",
description="HANGUL JONGSEONG PIEUP-CHIEUCH",
direction="l",
linebreak="jt",
@@ -115328,7 +117112,6 @@ characters.data={
},
[0xD7EA]={
category="lo",
- cjkwd="w",
description="HANGUL JONGSEONG SIOS-MIEUM",
direction="l",
linebreak="jt",
@@ -115336,7 +117119,6 @@ characters.data={
},
[0xD7EB]={
category="lo",
- cjkwd="w",
description="HANGUL JONGSEONG SIOS-KAPYEOUNPIEUP",
direction="l",
linebreak="jt",
@@ -115344,7 +117126,6 @@ characters.data={
},
[0xD7EC]={
category="lo",
- cjkwd="w",
description="HANGUL JONGSEONG SSANGSIOS-KIYEOK",
direction="l",
linebreak="jt",
@@ -115352,7 +117133,6 @@ characters.data={
},
[0xD7ED]={
category="lo",
- cjkwd="w",
description="HANGUL JONGSEONG SSANGSIOS-TIKEUT",
direction="l",
linebreak="jt",
@@ -115360,7 +117140,6 @@ characters.data={
},
[0xD7EE]={
category="lo",
- cjkwd="w",
description="HANGUL JONGSEONG SIOS-PANSIOS",
direction="l",
linebreak="jt",
@@ -115368,7 +117147,6 @@ characters.data={
},
[0xD7EF]={
category="lo",
- cjkwd="w",
description="HANGUL JONGSEONG SIOS-CIEUC",
direction="l",
linebreak="jt",
@@ -115376,7 +117154,6 @@ characters.data={
},
[0xD7F0]={
category="lo",
- cjkwd="w",
description="HANGUL JONGSEONG SIOS-CHIEUCH",
direction="l",
linebreak="jt",
@@ -115384,7 +117161,6 @@ characters.data={
},
[0xD7F1]={
category="lo",
- cjkwd="w",
description="HANGUL JONGSEONG SIOS-THIEUTH",
direction="l",
linebreak="jt",
@@ -115392,7 +117168,6 @@ characters.data={
},
[0xD7F2]={
category="lo",
- cjkwd="w",
description="HANGUL JONGSEONG SIOS-HIEUH",
direction="l",
linebreak="jt",
@@ -115400,7 +117175,6 @@ characters.data={
},
[0xD7F3]={
category="lo",
- cjkwd="w",
description="HANGUL JONGSEONG PANSIOS-PIEUP",
direction="l",
linebreak="jt",
@@ -115408,7 +117182,6 @@ characters.data={
},
[0xD7F4]={
category="lo",
- cjkwd="w",
description="HANGUL JONGSEONG PANSIOS-KAPYEOUNPIEUP",
direction="l",
linebreak="jt",
@@ -115416,7 +117189,6 @@ characters.data={
},
[0xD7F5]={
category="lo",
- cjkwd="w",
description="HANGUL JONGSEONG YESIEUNG-MIEUM",
direction="l",
linebreak="jt",
@@ -115424,7 +117196,6 @@ characters.data={
},
[0xD7F6]={
category="lo",
- cjkwd="w",
description="HANGUL JONGSEONG YESIEUNG-HIEUH",
direction="l",
linebreak="jt",
@@ -115432,7 +117203,6 @@ characters.data={
},
[0xD7F7]={
category="lo",
- cjkwd="w",
description="HANGUL JONGSEONG CIEUC-PIEUP",
direction="l",
linebreak="jt",
@@ -115440,7 +117210,6 @@ characters.data={
},
[0xD7F8]={
category="lo",
- cjkwd="w",
description="HANGUL JONGSEONG CIEUC-SSANGPIEUP",
direction="l",
linebreak="jt",
@@ -115448,7 +117217,6 @@ characters.data={
},
[0xD7F9]={
category="lo",
- cjkwd="w",
description="HANGUL JONGSEONG SSANGCIEUC",
direction="l",
linebreak="jt",
@@ -115456,7 +117224,6 @@ characters.data={
},
[0xD7FA]={
category="lo",
- cjkwd="w",
description="HANGUL JONGSEONG PHIEUPH-SIOS",
direction="l",
linebreak="jt",
@@ -115464,7 +117231,6 @@ characters.data={
},
[0xD7FB]={
category="lo",
- cjkwd="w",
description="HANGUL JONGSEONG PHIEUPH-THIEUTH",
direction="l",
linebreak="jt",
@@ -118233,6 +119999,24 @@ characters.data={
specials={ "char", 0x9DB4 },
unicodeslot=0xFA2D,
},
+ [0xFA2E]={
+ category="lo",
+ cjkwd="w",
+ description="CJK COMPATIBILITY IDEOGRAPH-FA2E",
+ direction="l",
+ linebreak="id",
+ specials={ "char", 0x90DE },
+ unicodeslot=0xFA2E,
+ },
+ [0xFA2F]={
+ category="lo",
+ cjkwd="w",
+ description="CJK COMPATIBILITY IDEOGRAPH-FA2F",
+ direction="l",
+ linebreak="id",
+ specials={ "char", 0x96B7 },
+ unicodeslot=0xFA2F,
+ },
[0xFA30]={
category="lo",
cjkwd="w",
@@ -119861,7 +121645,7 @@ characters.data={
category="lo",
description="HEBREW LETTER YOD WITH HIRIQ",
direction="r",
- linebreak="al",
+ linebreak="hl",
shcode=0x05D9,
specials={ "char", 0x05D9, 0x05B4 },
unicodeslot=0xFB1D,
@@ -119878,7 +121662,7 @@ characters.data={
category="lo",
description="HEBREW LIGATURE YIDDISH YOD YOD PATAH",
direction="r",
- linebreak="al",
+ linebreak="hl",
specials={ "char", 0x05F2, 0x05B7 },
unicodeslot=0xFB1F,
},
@@ -119887,7 +121671,7 @@ characters.data={
category="lo",
description="HEBREW LETTER ALTERNATIVE AYIN",
direction="r",
- linebreak="al",
+ linebreak="hl",
specials={ "font", 0x05E2 },
unicodeslot=0xFB20,
},
@@ -119895,7 +121679,7 @@ characters.data={
category="lo",
description="HEBREW LETTER WIDE ALEF",
direction="r",
- linebreak="al",
+ linebreak="hl",
specials={ "font", 0x05D0 },
unicodeslot=0xFB21,
},
@@ -119903,7 +121687,7 @@ characters.data={
category="lo",
description="HEBREW LETTER WIDE DALET",
direction="r",
- linebreak="al",
+ linebreak="hl",
specials={ "font", 0x05D3 },
unicodeslot=0xFB22,
},
@@ -119911,7 +121695,7 @@ characters.data={
category="lo",
description="HEBREW LETTER WIDE HE",
direction="r",
- linebreak="al",
+ linebreak="hl",
specials={ "font", 0x05D4 },
unicodeslot=0xFB23,
},
@@ -119919,7 +121703,7 @@ characters.data={
category="lo",
description="HEBREW LETTER WIDE KAF",
direction="r",
- linebreak="al",
+ linebreak="hl",
specials={ "font", 0x05DB },
unicodeslot=0xFB24,
},
@@ -119927,7 +121711,7 @@ characters.data={
category="lo",
description="HEBREW LETTER WIDE LAMED",
direction="r",
- linebreak="al",
+ linebreak="hl",
specials={ "font", 0x05DC },
unicodeslot=0xFB25,
},
@@ -119935,7 +121719,7 @@ characters.data={
category="lo",
description="HEBREW LETTER WIDE FINAL MEM",
direction="r",
- linebreak="al",
+ linebreak="hl",
specials={ "font", 0x05DD },
unicodeslot=0xFB26,
},
@@ -119943,7 +121727,7 @@ characters.data={
category="lo",
description="HEBREW LETTER WIDE RESH",
direction="r",
- linebreak="al",
+ linebreak="hl",
specials={ "font", 0x05E8 },
unicodeslot=0xFB27,
},
@@ -119951,7 +121735,7 @@ characters.data={
category="lo",
description="HEBREW LETTER WIDE TAV",
direction="r",
- linebreak="al",
+ linebreak="hl",
specials={ "font", 0x05EA },
unicodeslot=0xFB28,
},
@@ -119968,7 +121752,7 @@ characters.data={
category="lo",
description="HEBREW LETTER SHIN WITH SHIN DOT",
direction="r",
- linebreak="al",
+ linebreak="hl",
shcode=0x05E9,
specials={ "char", 0x05E9, 0x05C1 },
unicodeslot=0xFB2A,
@@ -119978,7 +121762,7 @@ characters.data={
category="lo",
description="HEBREW LETTER SHIN WITH SIN DOT",
direction="r",
- linebreak="al",
+ linebreak="hl",
shcode=0x05E9,
specials={ "char", 0x05E9, 0x05C2 },
unicodeslot=0xFB2B,
@@ -119988,7 +121772,7 @@ characters.data={
category="lo",
description="HEBREW LETTER SHIN WITH DAGESH AND SHIN DOT",
direction="r",
- linebreak="al",
+ linebreak="hl",
shcode=0x05E9,
specials={ "char", 0xFB49, 0x05C1 },
unicodeslot=0xFB2C,
@@ -119998,7 +121782,7 @@ characters.data={
category="lo",
description="HEBREW LETTER SHIN WITH DAGESH AND SIN DOT",
direction="r",
- linebreak="al",
+ linebreak="hl",
shcode=0x05E9,
specials={ "char", 0xFB49, 0x05C2 },
unicodeslot=0xFB2D,
@@ -120008,7 +121792,7 @@ characters.data={
category="lo",
description="HEBREW LETTER ALEF WITH PATAH",
direction="r",
- linebreak="al",
+ linebreak="hl",
shcode=0x05D0,
specials={ "char", 0x05D0, 0x05B7 },
unicodeslot=0xFB2E,
@@ -120018,7 +121802,7 @@ characters.data={
category="lo",
description="HEBREW LETTER ALEF WITH QAMATS",
direction="r",
- linebreak="al",
+ linebreak="hl",
shcode=0x05D0,
specials={ "char", 0x05D0, 0x05B8 },
unicodeslot=0xFB2F,
@@ -120028,7 +121812,7 @@ characters.data={
category="lo",
description="HEBREW LETTER ALEF WITH MAPIQ",
direction="r",
- linebreak="al",
+ linebreak="hl",
shcode=0x05D0,
specials={ "char", 0x05D0, 0x05BC },
unicodeslot=0xFB30,
@@ -120038,7 +121822,7 @@ characters.data={
category="lo",
description="HEBREW LETTER BET WITH DAGESH",
direction="r",
- linebreak="al",
+ linebreak="hl",
shcode=0x05D1,
specials={ "char", 0x05D1, 0x05BC },
unicodeslot=0xFB31,
@@ -120048,7 +121832,7 @@ characters.data={
category="lo",
description="HEBREW LETTER GIMEL WITH DAGESH",
direction="r",
- linebreak="al",
+ linebreak="hl",
shcode=0x05D2,
specials={ "char", 0x05D2, 0x05BC },
unicodeslot=0xFB32,
@@ -120058,7 +121842,7 @@ characters.data={
category="lo",
description="HEBREW LETTER DALET WITH DAGESH",
direction="r",
- linebreak="al",
+ linebreak="hl",
shcode=0x05D3,
specials={ "char", 0x05D3, 0x05BC },
unicodeslot=0xFB33,
@@ -120068,7 +121852,7 @@ characters.data={
category="lo",
description="HEBREW LETTER HE WITH MAPIQ",
direction="r",
- linebreak="al",
+ linebreak="hl",
shcode=0x05D4,
specials={ "char", 0x05D4, 0x05BC },
unicodeslot=0xFB34,
@@ -120078,7 +121862,7 @@ characters.data={
category="lo",
description="HEBREW LETTER VAV WITH DAGESH",
direction="r",
- linebreak="al",
+ linebreak="hl",
shcode=0x05D5,
specials={ "char", 0x05D5, 0x05BC },
unicodeslot=0xFB35,
@@ -120088,7 +121872,7 @@ characters.data={
category="lo",
description="HEBREW LETTER ZAYIN WITH DAGESH",
direction="r",
- linebreak="al",
+ linebreak="hl",
shcode=0x05D6,
specials={ "char", 0x05D6, 0x05BC },
unicodeslot=0xFB36,
@@ -120098,7 +121882,7 @@ characters.data={
category="lo",
description="HEBREW LETTER TET WITH DAGESH",
direction="r",
- linebreak="al",
+ linebreak="hl",
shcode=0x05D8,
specials={ "char", 0x05D8, 0x05BC },
unicodeslot=0xFB38,
@@ -120108,7 +121892,7 @@ characters.data={
category="lo",
description="HEBREW LETTER YOD WITH DAGESH",
direction="r",
- linebreak="al",
+ linebreak="hl",
shcode=0x05D9,
specials={ "char", 0x05D9, 0x05BC },
unicodeslot=0xFB39,
@@ -120118,7 +121902,7 @@ characters.data={
category="lo",
description="HEBREW LETTER FINAL KAF WITH DAGESH",
direction="r",
- linebreak="al",
+ linebreak="hl",
specials={ "char", 0x05DA, 0x05BC },
unicodeslot=0xFB3A,
},
@@ -120127,7 +121911,7 @@ characters.data={
category="lo",
description="HEBREW LETTER KAF WITH DAGESH",
direction="r",
- linebreak="al",
+ linebreak="hl",
shcode=0x05DB,
specials={ "char", 0x05DB, 0x05BC },
unicodeslot=0xFB3B,
@@ -120137,7 +121921,7 @@ characters.data={
category="lo",
description="HEBREW LETTER LAMED WITH DAGESH",
direction="r",
- linebreak="al",
+ linebreak="hl",
shcode=0x05DC,
specials={ "char", 0x05DC, 0x05BC },
unicodeslot=0xFB3C,
@@ -120147,7 +121931,7 @@ characters.data={
category="lo",
description="HEBREW LETTER MEM WITH DAGESH",
direction="r",
- linebreak="al",
+ linebreak="hl",
shcode=0x05DE,
specials={ "char", 0x05DE, 0x05BC },
unicodeslot=0xFB3E,
@@ -120157,7 +121941,7 @@ characters.data={
category="lo",
description="HEBREW LETTER NUN WITH DAGESH",
direction="r",
- linebreak="al",
+ linebreak="hl",
shcode=0x05E0,
specials={ "char", 0x05E0, 0x05BC },
unicodeslot=0xFB40,
@@ -120167,7 +121951,7 @@ characters.data={
category="lo",
description="HEBREW LETTER SAMEKH WITH DAGESH",
direction="r",
- linebreak="al",
+ linebreak="hl",
shcode=0x05E1,
specials={ "char", 0x05E1, 0x05BC },
unicodeslot=0xFB41,
@@ -120177,7 +121961,7 @@ characters.data={
category="lo",
description="HEBREW LETTER FINAL PE WITH DAGESH",
direction="r",
- linebreak="al",
+ linebreak="hl",
specials={ "char", 0x05E3, 0x05BC },
unicodeslot=0xFB43,
},
@@ -120186,7 +121970,7 @@ characters.data={
category="lo",
description="HEBREW LETTER PE WITH DAGESH",
direction="r",
- linebreak="al",
+ linebreak="hl",
shcode=0x05E4,
specials={ "char", 0x05E4, 0x05BC },
unicodeslot=0xFB44,
@@ -120196,7 +121980,7 @@ characters.data={
category="lo",
description="HEBREW LETTER TSADI WITH DAGESH",
direction="r",
- linebreak="al",
+ linebreak="hl",
shcode=0x05E6,
specials={ "char", 0x05E6, 0x05BC },
unicodeslot=0xFB46,
@@ -120206,7 +121990,7 @@ characters.data={
category="lo",
description="HEBREW LETTER QOF WITH DAGESH",
direction="r",
- linebreak="al",
+ linebreak="hl",
shcode=0x05E7,
specials={ "char", 0x05E7, 0x05BC },
unicodeslot=0xFB47,
@@ -120216,7 +122000,7 @@ characters.data={
category="lo",
description="HEBREW LETTER RESH WITH DAGESH",
direction="r",
- linebreak="al",
+ linebreak="hl",
shcode=0x05E8,
specials={ "char", 0x05E8, 0x05BC },
unicodeslot=0xFB48,
@@ -120226,7 +122010,7 @@ characters.data={
category="lo",
description="HEBREW LETTER SHIN WITH DAGESH",
direction="r",
- linebreak="al",
+ linebreak="hl",
shcode=0x05E9,
specials={ "char", 0x05E9, 0x05BC },
unicodeslot=0xFB49,
@@ -120236,7 +122020,7 @@ characters.data={
category="lo",
description="HEBREW LETTER TAV WITH DAGESH",
direction="r",
- linebreak="al",
+ linebreak="hl",
shcode=0x05EA,
specials={ "char", 0x05EA, 0x05BC },
unicodeslot=0xFB4A,
@@ -120246,7 +122030,7 @@ characters.data={
category="lo",
description="HEBREW LETTER VAV WITH HOLAM",
direction="r",
- linebreak="al",
+ linebreak="hl",
shcode=0x05D5,
specials={ "char", 0x05D5, 0x05B9 },
unicodeslot=0xFB4B,
@@ -120256,7 +122040,7 @@ characters.data={
category="lo",
description="HEBREW LETTER BET WITH RAFE",
direction="r",
- linebreak="al",
+ linebreak="hl",
shcode=0x05D1,
specials={ "char", 0x05D1, 0x05BF },
unicodeslot=0xFB4C,
@@ -120266,7 +122050,7 @@ characters.data={
category="lo",
description="HEBREW LETTER KAF WITH RAFE",
direction="r",
- linebreak="al",
+ linebreak="hl",
shcode=0x05DB,
specials={ "char", 0x05DB, 0x05BF },
unicodeslot=0xFB4D,
@@ -120276,7 +122060,7 @@ characters.data={
category="lo",
description="HEBREW LETTER PE WITH RAFE",
direction="r",
- linebreak="al",
+ linebreak="hl",
shcode=0x05E4,
specials={ "char", 0x05E4, 0x05BF },
unicodeslot=0xFB4E,
@@ -120286,7 +122070,7 @@ characters.data={
category="lo",
description="HEBREW LIGATURE ALEF LAMED",
direction="r",
- linebreak="al",
+ linebreak="hl",
specials={ "compat", 0x05D0, 0x05DC },
unicodeslot=0xFB4F,
},
@@ -128386,7 +130170,7 @@ characters.data={
cjkwd="h",
description="HALFWIDTH KATAKANA LETTER SMALL A",
direction="l",
- linebreak="ns",
+ linebreak="cj",
specials={ "narrow", 0x30A1 },
unicodeslot=0xFF67,
},
@@ -128396,7 +130180,7 @@ characters.data={
cjkwd="h",
description="HALFWIDTH KATAKANA LETTER SMALL I",
direction="l",
- linebreak="ns",
+ linebreak="cj",
specials={ "narrow", 0x30A3 },
unicodeslot=0xFF68,
},
@@ -128406,7 +130190,7 @@ characters.data={
cjkwd="h",
description="HALFWIDTH KATAKANA LETTER SMALL U",
direction="l",
- linebreak="ns",
+ linebreak="cj",
specials={ "narrow", 0x30A5 },
unicodeslot=0xFF69,
},
@@ -128416,7 +130200,7 @@ characters.data={
cjkwd="h",
description="HALFWIDTH KATAKANA LETTER SMALL E",
direction="l",
- linebreak="ns",
+ linebreak="cj",
specials={ "narrow", 0x30A7 },
unicodeslot=0xFF6A,
},
@@ -128426,7 +130210,7 @@ characters.data={
cjkwd="h",
description="HALFWIDTH KATAKANA LETTER SMALL O",
direction="l",
- linebreak="ns",
+ linebreak="cj",
specials={ "narrow", 0x30A9 },
unicodeslot=0xFF6B,
},
@@ -128436,7 +130220,7 @@ characters.data={
cjkwd="h",
description="HALFWIDTH KATAKANA LETTER SMALL YA",
direction="l",
- linebreak="ns",
+ linebreak="cj",
specials={ "narrow", 0x30E3 },
unicodeslot=0xFF6C,
},
@@ -128446,7 +130230,7 @@ characters.data={
cjkwd="h",
description="HALFWIDTH KATAKANA LETTER SMALL YU",
direction="l",
- linebreak="ns",
+ linebreak="cj",
specials={ "narrow", 0x30E5 },
unicodeslot=0xFF6D,
},
@@ -128456,7 +130240,7 @@ characters.data={
cjkwd="h",
description="HALFWIDTH KATAKANA LETTER SMALL YO",
direction="l",
- linebreak="ns",
+ linebreak="cj",
specials={ "narrow", 0x30E7 },
unicodeslot=0xFF6E,
},
@@ -128466,7 +130250,7 @@ characters.data={
cjkwd="h",
description="HALFWIDTH KATAKANA LETTER SMALL TU",
direction="l",
- linebreak="ns",
+ linebreak="cj",
specials={ "narrow", 0x30C3 },
unicodeslot=0xFF6F,
},
@@ -128476,7 +130260,7 @@ characters.data={
cjkwd="h",
description="HALFWIDTH KATAKANA-HIRAGANA PROLONGED SOUND MARK",
direction="l",
- linebreak="ns",
+ linebreak="cj",
specials={ "narrow", 0x30FC },
unicodeslot=0xFF70,
},
@@ -136192,6 +137976,412 @@ characters.data={
linebreak="al",
unicodeslot=0x1093F,
},
+ [0x10980]={
+ category="lo",
+ description="MEROITIC HIEROGLYPHIC LETTER A",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x10980,
+ },
+ [0x10981]={
+ category="lo",
+ description="MEROITIC HIEROGLYPHIC LETTER E",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x10981,
+ },
+ [0x10982]={
+ category="lo",
+ description="MEROITIC HIEROGLYPHIC LETTER I",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x10982,
+ },
+ [0x10983]={
+ category="lo",
+ description="MEROITIC HIEROGLYPHIC LETTER O",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x10983,
+ },
+ [0x10984]={
+ category="lo",
+ description="MEROITIC HIEROGLYPHIC LETTER YA",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x10984,
+ },
+ [0x10985]={
+ category="lo",
+ description="MEROITIC HIEROGLYPHIC LETTER WA",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x10985,
+ },
+ [0x10986]={
+ category="lo",
+ description="MEROITIC HIEROGLYPHIC LETTER BA",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x10986,
+ },
+ [0x10987]={
+ category="lo",
+ description="MEROITIC HIEROGLYPHIC LETTER BA-2",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x10987,
+ },
+ [0x10988]={
+ category="lo",
+ description="MEROITIC HIEROGLYPHIC LETTER PA",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x10988,
+ },
+ [0x10989]={
+ category="lo",
+ description="MEROITIC HIEROGLYPHIC LETTER MA",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x10989,
+ },
+ [0x1098A]={
+ category="lo",
+ description="MEROITIC HIEROGLYPHIC LETTER NA",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x1098A,
+ },
+ [0x1098B]={
+ category="lo",
+ description="MEROITIC HIEROGLYPHIC LETTER NA-2",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x1098B,
+ },
+ [0x1098C]={
+ category="lo",
+ description="MEROITIC HIEROGLYPHIC LETTER NE",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x1098C,
+ },
+ [0x1098D]={
+ category="lo",
+ description="MEROITIC HIEROGLYPHIC LETTER NE-2",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x1098D,
+ },
+ [0x1098E]={
+ category="lo",
+ description="MEROITIC HIEROGLYPHIC LETTER RA",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x1098E,
+ },
+ [0x1098F]={
+ category="lo",
+ description="MEROITIC HIEROGLYPHIC LETTER RA-2",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x1098F,
+ },
+ [0x10990]={
+ category="lo",
+ description="MEROITIC HIEROGLYPHIC LETTER LA",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x10990,
+ },
+ [0x10991]={
+ category="lo",
+ description="MEROITIC HIEROGLYPHIC LETTER KHA",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x10991,
+ },
+ [0x10992]={
+ category="lo",
+ description="MEROITIC HIEROGLYPHIC LETTER HHA",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x10992,
+ },
+ [0x10993]={
+ category="lo",
+ description="MEROITIC HIEROGLYPHIC LETTER SA",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x10993,
+ },
+ [0x10994]={
+ category="lo",
+ description="MEROITIC HIEROGLYPHIC LETTER SA-2",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x10994,
+ },
+ [0x10995]={
+ category="lo",
+ description="MEROITIC HIEROGLYPHIC LETTER SE",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x10995,
+ },
+ [0x10996]={
+ category="lo",
+ description="MEROITIC HIEROGLYPHIC LETTER KA",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x10996,
+ },
+ [0x10997]={
+ category="lo",
+ description="MEROITIC HIEROGLYPHIC LETTER QA",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x10997,
+ },
+ [0x10998]={
+ category="lo",
+ description="MEROITIC HIEROGLYPHIC LETTER TA",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x10998,
+ },
+ [0x10999]={
+ category="lo",
+ description="MEROITIC HIEROGLYPHIC LETTER TA-2",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x10999,
+ },
+ [0x1099A]={
+ category="lo",
+ description="MEROITIC HIEROGLYPHIC LETTER TE",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x1099A,
+ },
+ [0x1099B]={
+ category="lo",
+ description="MEROITIC HIEROGLYPHIC LETTER TE-2",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x1099B,
+ },
+ [0x1099C]={
+ category="lo",
+ description="MEROITIC HIEROGLYPHIC LETTER TO",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x1099C,
+ },
+ [0x1099D]={
+ category="lo",
+ description="MEROITIC HIEROGLYPHIC LETTER DA",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x1099D,
+ },
+ [0x1099E]={
+ category="lo",
+ description="MEROITIC HIEROGLYPHIC SYMBOL VIDJ",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x1099E,
+ },
+ [0x1099F]={
+ category="lo",
+ description="MEROITIC HIEROGLYPHIC SYMBOL VIDJ-2",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x1099F,
+ },
+ [0x109A0]={
+ category="lo",
+ description="MEROITIC CURSIVE LETTER A",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x109A0,
+ },
+ [0x109A1]={
+ category="lo",
+ description="MEROITIC CURSIVE LETTER E",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x109A1,
+ },
+ [0x109A2]={
+ category="lo",
+ description="MEROITIC CURSIVE LETTER I",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x109A2,
+ },
+ [0x109A3]={
+ category="lo",
+ description="MEROITIC CURSIVE LETTER O",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x109A3,
+ },
+ [0x109A4]={
+ category="lo",
+ description="MEROITIC CURSIVE LETTER YA",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x109A4,
+ },
+ [0x109A5]={
+ category="lo",
+ description="MEROITIC CURSIVE LETTER WA",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x109A5,
+ },
+ [0x109A6]={
+ category="lo",
+ description="MEROITIC CURSIVE LETTER BA",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x109A6,
+ },
+ [0x109A7]={
+ category="lo",
+ description="MEROITIC CURSIVE LETTER PA",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x109A7,
+ },
+ [0x109A8]={
+ category="lo",
+ description="MEROITIC CURSIVE LETTER MA",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x109A8,
+ },
+ [0x109A9]={
+ category="lo",
+ description="MEROITIC CURSIVE LETTER NA",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x109A9,
+ },
+ [0x109AA]={
+ category="lo",
+ description="MEROITIC CURSIVE LETTER NE",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x109AA,
+ },
+ [0x109AB]={
+ category="lo",
+ description="MEROITIC CURSIVE LETTER RA",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x109AB,
+ },
+ [0x109AC]={
+ category="lo",
+ description="MEROITIC CURSIVE LETTER LA",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x109AC,
+ },
+ [0x109AD]={
+ category="lo",
+ description="MEROITIC CURSIVE LETTER KHA",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x109AD,
+ },
+ [0x109AE]={
+ category="lo",
+ description="MEROITIC CURSIVE LETTER HHA",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x109AE,
+ },
+ [0x109AF]={
+ category="lo",
+ description="MEROITIC CURSIVE LETTER SA",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x109AF,
+ },
+ [0x109B0]={
+ category="lo",
+ description="MEROITIC CURSIVE LETTER ARCHAIC SA",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x109B0,
+ },
+ [0x109B1]={
+ category="lo",
+ description="MEROITIC CURSIVE LETTER SE",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x109B1,
+ },
+ [0x109B2]={
+ category="lo",
+ description="MEROITIC CURSIVE LETTER KA",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x109B2,
+ },
+ [0x109B3]={
+ category="lo",
+ description="MEROITIC CURSIVE LETTER QA",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x109B3,
+ },
+ [0x109B4]={
+ category="lo",
+ description="MEROITIC CURSIVE LETTER TA",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x109B4,
+ },
+ [0x109B5]={
+ category="lo",
+ description="MEROITIC CURSIVE LETTER TE",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x109B5,
+ },
+ [0x109B6]={
+ category="lo",
+ description="MEROITIC CURSIVE LETTER TO",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x109B6,
+ },
+ [0x109B7]={
+ category="lo",
+ description="MEROITIC CURSIVE LETTER DA",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x109B7,
+ },
+ [0x109BE]={
+ category="lo",
+ description="MEROITIC CURSIVE LOGOGRAM RMT",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x109BE,
+ },
+ [0x109BF]={
+ category="lo",
+ description="MEROITIC CURSIVE LOGOGRAM IMN",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x109BF,
+ },
[0x10A00]={
category="lo",
description="KHAROSHTHI LETTER A",
@@ -139646,6 +141836,1765 @@ characters.data={
linebreak="ba",
unicodeslot=0x110C1,
},
+ [0x110D0]={
+ category="lo",
+ description="SORA SOMPENG LETTER SAH",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x110D0,
+ },
+ [0x110D1]={
+ category="lo",
+ description="SORA SOMPENG LETTER TAH",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x110D1,
+ },
+ [0x110D2]={
+ category="lo",
+ description="SORA SOMPENG LETTER BAH",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x110D2,
+ },
+ [0x110D3]={
+ category="lo",
+ description="SORA SOMPENG LETTER CAH",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x110D3,
+ },
+ [0x110D4]={
+ category="lo",
+ description="SORA SOMPENG LETTER DAH",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x110D4,
+ },
+ [0x110D5]={
+ category="lo",
+ description="SORA SOMPENG LETTER GAH",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x110D5,
+ },
+ [0x110D6]={
+ category="lo",
+ description="SORA SOMPENG LETTER MAH",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x110D6,
+ },
+ [0x110D7]={
+ category="lo",
+ description="SORA SOMPENG LETTER NGAH",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x110D7,
+ },
+ [0x110D8]={
+ category="lo",
+ description="SORA SOMPENG LETTER LAH",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x110D8,
+ },
+ [0x110D9]={
+ category="lo",
+ description="SORA SOMPENG LETTER NAH",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x110D9,
+ },
+ [0x110DA]={
+ category="lo",
+ description="SORA SOMPENG LETTER VAH",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x110DA,
+ },
+ [0x110DB]={
+ category="lo",
+ description="SORA SOMPENG LETTER PAH",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x110DB,
+ },
+ [0x110DC]={
+ category="lo",
+ description="SORA SOMPENG LETTER YAH",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x110DC,
+ },
+ [0x110DD]={
+ category="lo",
+ description="SORA SOMPENG LETTER RAH",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x110DD,
+ },
+ [0x110DE]={
+ category="lo",
+ description="SORA SOMPENG LETTER HAH",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x110DE,
+ },
+ [0x110DF]={
+ category="lo",
+ description="SORA SOMPENG LETTER KAH",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x110DF,
+ },
+ [0x110E0]={
+ category="lo",
+ description="SORA SOMPENG LETTER JAH",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x110E0,
+ },
+ [0x110E1]={
+ category="lo",
+ description="SORA SOMPENG LETTER NYAH",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x110E1,
+ },
+ [0x110E2]={
+ category="lo",
+ description="SORA SOMPENG LETTER AH",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x110E2,
+ },
+ [0x110E3]={
+ category="lo",
+ description="SORA SOMPENG LETTER EEH",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x110E3,
+ },
+ [0x110E4]={
+ category="lo",
+ description="SORA SOMPENG LETTER IH",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x110E4,
+ },
+ [0x110E5]={
+ category="lo",
+ description="SORA SOMPENG LETTER UH",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x110E5,
+ },
+ [0x110E6]={
+ category="lo",
+ description="SORA SOMPENG LETTER OH",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x110E6,
+ },
+ [0x110E7]={
+ category="lo",
+ description="SORA SOMPENG LETTER EH",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x110E7,
+ },
+ [0x110E8]={
+ category="lo",
+ description="SORA SOMPENG LETTER MAE",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x110E8,
+ },
+ [0x110F0]={
+ category="nd",
+ description="SORA SOMPENG DIGIT ZERO",
+ direction="l",
+ linebreak="nu",
+ unicodeslot=0x110F0,
+ },
+ [0x110F1]={
+ category="nd",
+ description="SORA SOMPENG DIGIT ONE",
+ direction="l",
+ linebreak="nu",
+ unicodeslot=0x110F1,
+ },
+ [0x110F2]={
+ category="nd",
+ description="SORA SOMPENG DIGIT TWO",
+ direction="l",
+ linebreak="nu",
+ unicodeslot=0x110F2,
+ },
+ [0x110F3]={
+ category="nd",
+ description="SORA SOMPENG DIGIT THREE",
+ direction="l",
+ linebreak="nu",
+ unicodeslot=0x110F3,
+ },
+ [0x110F4]={
+ category="nd",
+ description="SORA SOMPENG DIGIT FOUR",
+ direction="l",
+ linebreak="nu",
+ unicodeslot=0x110F4,
+ },
+ [0x110F5]={
+ category="nd",
+ description="SORA SOMPENG DIGIT FIVE",
+ direction="l",
+ linebreak="nu",
+ unicodeslot=0x110F5,
+ },
+ [0x110F6]={
+ category="nd",
+ description="SORA SOMPENG DIGIT SIX",
+ direction="l",
+ linebreak="nu",
+ unicodeslot=0x110F6,
+ },
+ [0x110F7]={
+ category="nd",
+ description="SORA SOMPENG DIGIT SEVEN",
+ direction="l",
+ linebreak="nu",
+ unicodeslot=0x110F7,
+ },
+ [0x110F8]={
+ category="nd",
+ description="SORA SOMPENG DIGIT EIGHT",
+ direction="l",
+ linebreak="nu",
+ unicodeslot=0x110F8,
+ },
+ [0x110F9]={
+ category="nd",
+ description="SORA SOMPENG DIGIT NINE",
+ direction="l",
+ linebreak="nu",
+ unicodeslot=0x110F9,
+ },
+ [0x11100]={
+ category="mn",
+ description="CHAKMA SIGN CANDRABINDU",
+ direction="nsm",
+ linebreak="cm",
+ unicodeslot=0x11100,
+ },
+ [0x11101]={
+ category="mn",
+ description="CHAKMA SIGN ANUSVARA",
+ direction="nsm",
+ linebreak="cm",
+ unicodeslot=0x11101,
+ },
+ [0x11102]={
+ category="mn",
+ description="CHAKMA SIGN VISARGA",
+ direction="nsm",
+ linebreak="cm",
+ unicodeslot=0x11102,
+ },
+ [0x11103]={
+ category="lo",
+ description="CHAKMA LETTER AA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x11103,
+ },
+ [0x11104]={
+ category="lo",
+ description="CHAKMA LETTER I",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x11104,
+ },
+ [0x11105]={
+ category="lo",
+ description="CHAKMA LETTER U",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x11105,
+ },
+ [0x11106]={
+ category="lo",
+ description="CHAKMA LETTER E",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x11106,
+ },
+ [0x11107]={
+ category="lo",
+ description="CHAKMA LETTER KAA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x11107,
+ },
+ [0x11108]={
+ category="lo",
+ description="CHAKMA LETTER KHAA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x11108,
+ },
+ [0x11109]={
+ category="lo",
+ description="CHAKMA LETTER GAA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x11109,
+ },
+ [0x1110A]={
+ category="lo",
+ description="CHAKMA LETTER GHAA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1110A,
+ },
+ [0x1110B]={
+ category="lo",
+ description="CHAKMA LETTER NGAA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1110B,
+ },
+ [0x1110C]={
+ category="lo",
+ description="CHAKMA LETTER CAA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1110C,
+ },
+ [0x1110D]={
+ category="lo",
+ description="CHAKMA LETTER CHAA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1110D,
+ },
+ [0x1110E]={
+ category="lo",
+ description="CHAKMA LETTER JAA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1110E,
+ },
+ [0x1110F]={
+ category="lo",
+ description="CHAKMA LETTER JHAA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1110F,
+ },
+ [0x11110]={
+ category="lo",
+ description="CHAKMA LETTER NYAA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x11110,
+ },
+ [0x11111]={
+ category="lo",
+ description="CHAKMA LETTER TTAA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x11111,
+ },
+ [0x11112]={
+ category="lo",
+ description="CHAKMA LETTER TTHAA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x11112,
+ },
+ [0x11113]={
+ category="lo",
+ description="CHAKMA LETTER DDAA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x11113,
+ },
+ [0x11114]={
+ category="lo",
+ description="CHAKMA LETTER DDHAA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x11114,
+ },
+ [0x11115]={
+ category="lo",
+ description="CHAKMA LETTER NNAA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x11115,
+ },
+ [0x11116]={
+ category="lo",
+ description="CHAKMA LETTER TAA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x11116,
+ },
+ [0x11117]={
+ category="lo",
+ description="CHAKMA LETTER THAA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x11117,
+ },
+ [0x11118]={
+ category="lo",
+ description="CHAKMA LETTER DAA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x11118,
+ },
+ [0x11119]={
+ category="lo",
+ description="CHAKMA LETTER DHAA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x11119,
+ },
+ [0x1111A]={
+ category="lo",
+ description="CHAKMA LETTER NAA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1111A,
+ },
+ [0x1111B]={
+ category="lo",
+ description="CHAKMA LETTER PAA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1111B,
+ },
+ [0x1111C]={
+ category="lo",
+ description="CHAKMA LETTER PHAA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1111C,
+ },
+ [0x1111D]={
+ category="lo",
+ description="CHAKMA LETTER BAA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1111D,
+ },
+ [0x1111E]={
+ category="lo",
+ description="CHAKMA LETTER BHAA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1111E,
+ },
+ [0x1111F]={
+ category="lo",
+ description="CHAKMA LETTER MAA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1111F,
+ },
+ [0x11120]={
+ category="lo",
+ description="CHAKMA LETTER YYAA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x11120,
+ },
+ [0x11121]={
+ category="lo",
+ description="CHAKMA LETTER YAA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x11121,
+ },
+ [0x11122]={
+ category="lo",
+ description="CHAKMA LETTER RAA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x11122,
+ },
+ [0x11123]={
+ category="lo",
+ description="CHAKMA LETTER LAA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x11123,
+ },
+ [0x11124]={
+ category="lo",
+ description="CHAKMA LETTER WAA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x11124,
+ },
+ [0x11125]={
+ category="lo",
+ description="CHAKMA LETTER SAA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x11125,
+ },
+ [0x11126]={
+ category="lo",
+ description="CHAKMA LETTER HAA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x11126,
+ },
+ [0x11127]={
+ category="mn",
+ description="CHAKMA VOWEL SIGN A",
+ direction="nsm",
+ linebreak="cm",
+ unicodeslot=0x11127,
+ },
+ [0x11128]={
+ category="mn",
+ description="CHAKMA VOWEL SIGN I",
+ direction="nsm",
+ linebreak="cm",
+ unicodeslot=0x11128,
+ },
+ [0x11129]={
+ category="mn",
+ description="CHAKMA VOWEL SIGN II",
+ direction="nsm",
+ linebreak="cm",
+ unicodeslot=0x11129,
+ },
+ [0x1112A]={
+ category="mn",
+ description="CHAKMA VOWEL SIGN U",
+ direction="nsm",
+ linebreak="cm",
+ unicodeslot=0x1112A,
+ },
+ [0x1112B]={
+ category="mn",
+ description="CHAKMA VOWEL SIGN UU",
+ direction="nsm",
+ linebreak="cm",
+ unicodeslot=0x1112B,
+ },
+ [0x1112C]={
+ category="mc",
+ description="CHAKMA VOWEL SIGN E",
+ direction="l",
+ linebreak="cm",
+ unicodeslot=0x1112C,
+ },
+ [0x1112D]={
+ category="mn",
+ description="CHAKMA VOWEL SIGN AI",
+ direction="nsm",
+ linebreak="cm",
+ unicodeslot=0x1112D,
+ },
+ [0x1112E]={
+ category="mn",
+ description="CHAKMA VOWEL SIGN O",
+ direction="nsm",
+ linebreak="cm",
+ specials={ "char", 0x11131, 0x11127 },
+ unicodeslot=0x1112E,
+ },
+ [0x1112F]={
+ category="mn",
+ description="CHAKMA VOWEL SIGN AU",
+ direction="nsm",
+ linebreak="cm",
+ specials={ "char", 0x11132, 0x11127 },
+ unicodeslot=0x1112F,
+ },
+ [0x11130]={
+ category="mn",
+ description="CHAKMA VOWEL SIGN OI",
+ direction="nsm",
+ linebreak="cm",
+ unicodeslot=0x11130,
+ },
+ [0x11131]={
+ category="mn",
+ description="CHAKMA O MARK",
+ direction="nsm",
+ linebreak="cm",
+ unicodeslot=0x11131,
+ },
+ [0x11132]={
+ category="mn",
+ description="CHAKMA AU MARK",
+ direction="nsm",
+ linebreak="cm",
+ unicodeslot=0x11132,
+ },
+ [0x11133]={
+ category="mn",
+ description="CHAKMA VIRAMA",
+ direction="nsm",
+ linebreak="cm",
+ unicodeslot=0x11133,
+ },
+ [0x11134]={
+ category="mn",
+ description="CHAKMA MAAYYAA",
+ direction="nsm",
+ linebreak="cm",
+ unicodeslot=0x11134,
+ },
+ [0x11136]={
+ category="nd",
+ description="CHAKMA DIGIT ZERO",
+ direction="l",
+ linebreak="nu",
+ unicodeslot=0x11136,
+ },
+ [0x11137]={
+ category="nd",
+ description="CHAKMA DIGIT ONE",
+ direction="l",
+ linebreak="nu",
+ unicodeslot=0x11137,
+ },
+ [0x11138]={
+ category="nd",
+ description="CHAKMA DIGIT TWO",
+ direction="l",
+ linebreak="nu",
+ unicodeslot=0x11138,
+ },
+ [0x11139]={
+ category="nd",
+ description="CHAKMA DIGIT THREE",
+ direction="l",
+ linebreak="nu",
+ unicodeslot=0x11139,
+ },
+ [0x1113A]={
+ category="nd",
+ description="CHAKMA DIGIT FOUR",
+ direction="l",
+ linebreak="nu",
+ unicodeslot=0x1113A,
+ },
+ [0x1113B]={
+ category="nd",
+ description="CHAKMA DIGIT FIVE",
+ direction="l",
+ linebreak="nu",
+ unicodeslot=0x1113B,
+ },
+ [0x1113C]={
+ category="nd",
+ description="CHAKMA DIGIT SIX",
+ direction="l",
+ linebreak="nu",
+ unicodeslot=0x1113C,
+ },
+ [0x1113D]={
+ category="nd",
+ description="CHAKMA DIGIT SEVEN",
+ direction="l",
+ linebreak="nu",
+ unicodeslot=0x1113D,
+ },
+ [0x1113E]={
+ category="nd",
+ description="CHAKMA DIGIT EIGHT",
+ direction="l",
+ linebreak="nu",
+ unicodeslot=0x1113E,
+ },
+ [0x1113F]={
+ category="nd",
+ description="CHAKMA DIGIT NINE",
+ direction="l",
+ linebreak="nu",
+ unicodeslot=0x1113F,
+ },
+ [0x11140]={
+ category="po",
+ description="CHAKMA SECTION MARK",
+ direction="l",
+ linebreak="ba",
+ unicodeslot=0x11140,
+ },
+ [0x11141]={
+ category="po",
+ description="CHAKMA DANDA",
+ direction="l",
+ linebreak="ba",
+ unicodeslot=0x11141,
+ },
+ [0x11142]={
+ category="po",
+ description="CHAKMA DOUBLE DANDA",
+ direction="l",
+ linebreak="ba",
+ unicodeslot=0x11142,
+ },
+ [0x11143]={
+ category="po",
+ description="CHAKMA QUESTION MARK",
+ direction="l",
+ linebreak="ba",
+ unicodeslot=0x11143,
+ },
+ [0x11180]={
+ category="mn",
+ description="SHARADA SIGN CANDRABINDU",
+ direction="nsm",
+ linebreak="cm",
+ unicodeslot=0x11180,
+ },
+ [0x11181]={
+ category="mn",
+ description="SHARADA SIGN ANUSVARA",
+ direction="nsm",
+ linebreak="cm",
+ unicodeslot=0x11181,
+ },
+ [0x11182]={
+ category="mc",
+ description="SHARADA SIGN VISARGA",
+ direction="l",
+ linebreak="cm",
+ unicodeslot=0x11182,
+ },
+ [0x11183]={
+ category="lo",
+ description="SHARADA LETTER A",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x11183,
+ },
+ [0x11184]={
+ category="lo",
+ description="SHARADA LETTER AA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x11184,
+ },
+ [0x11185]={
+ category="lo",
+ description="SHARADA LETTER I",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x11185,
+ },
+ [0x11186]={
+ category="lo",
+ description="SHARADA LETTER II",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x11186,
+ },
+ [0x11187]={
+ category="lo",
+ description="SHARADA LETTER U",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x11187,
+ },
+ [0x11188]={
+ category="lo",
+ description="SHARADA LETTER UU",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x11188,
+ },
+ [0x11189]={
+ category="lo",
+ description="SHARADA LETTER VOCALIC R",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x11189,
+ },
+ [0x1118A]={
+ category="lo",
+ description="SHARADA LETTER VOCALIC RR",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1118A,
+ },
+ [0x1118B]={
+ category="lo",
+ description="SHARADA LETTER VOCALIC L",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1118B,
+ },
+ [0x1118C]={
+ category="lo",
+ description="SHARADA LETTER VOCALIC LL",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1118C,
+ },
+ [0x1118D]={
+ category="lo",
+ description="SHARADA LETTER E",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1118D,
+ },
+ [0x1118E]={
+ category="lo",
+ description="SHARADA LETTER AI",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1118E,
+ },
+ [0x1118F]={
+ category="lo",
+ description="SHARADA LETTER O",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1118F,
+ },
+ [0x11190]={
+ category="lo",
+ description="SHARADA LETTER AU",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x11190,
+ },
+ [0x11191]={
+ category="lo",
+ description="SHARADA LETTER KA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x11191,
+ },
+ [0x11192]={
+ category="lo",
+ description="SHARADA LETTER KHA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x11192,
+ },
+ [0x11193]={
+ category="lo",
+ description="SHARADA LETTER GA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x11193,
+ },
+ [0x11194]={
+ category="lo",
+ description="SHARADA LETTER GHA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x11194,
+ },
+ [0x11195]={
+ category="lo",
+ description="SHARADA LETTER NGA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x11195,
+ },
+ [0x11196]={
+ category="lo",
+ description="SHARADA LETTER CA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x11196,
+ },
+ [0x11197]={
+ category="lo",
+ description="SHARADA LETTER CHA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x11197,
+ },
+ [0x11198]={
+ category="lo",
+ description="SHARADA LETTER JA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x11198,
+ },
+ [0x11199]={
+ category="lo",
+ description="SHARADA LETTER JHA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x11199,
+ },
+ [0x1119A]={
+ category="lo",
+ description="SHARADA LETTER NYA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1119A,
+ },
+ [0x1119B]={
+ category="lo",
+ description="SHARADA LETTER TTA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1119B,
+ },
+ [0x1119C]={
+ category="lo",
+ description="SHARADA LETTER TTHA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1119C,
+ },
+ [0x1119D]={
+ category="lo",
+ description="SHARADA LETTER DDA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1119D,
+ },
+ [0x1119E]={
+ category="lo",
+ description="SHARADA LETTER DDHA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1119E,
+ },
+ [0x1119F]={
+ category="lo",
+ description="SHARADA LETTER NNA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1119F,
+ },
+ [0x111A0]={
+ category="lo",
+ description="SHARADA LETTER TA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x111A0,
+ },
+ [0x111A1]={
+ category="lo",
+ description="SHARADA LETTER THA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x111A1,
+ },
+ [0x111A2]={
+ category="lo",
+ description="SHARADA LETTER DA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x111A2,
+ },
+ [0x111A3]={
+ category="lo",
+ description="SHARADA LETTER DHA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x111A3,
+ },
+ [0x111A4]={
+ category="lo",
+ description="SHARADA LETTER NA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x111A4,
+ },
+ [0x111A5]={
+ category="lo",
+ description="SHARADA LETTER PA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x111A5,
+ },
+ [0x111A6]={
+ category="lo",
+ description="SHARADA LETTER PHA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x111A6,
+ },
+ [0x111A7]={
+ category="lo",
+ description="SHARADA LETTER BA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x111A7,
+ },
+ [0x111A8]={
+ category="lo",
+ description="SHARADA LETTER BHA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x111A8,
+ },
+ [0x111A9]={
+ category="lo",
+ description="SHARADA LETTER MA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x111A9,
+ },
+ [0x111AA]={
+ category="lo",
+ description="SHARADA LETTER YA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x111AA,
+ },
+ [0x111AB]={
+ category="lo",
+ description="SHARADA LETTER RA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x111AB,
+ },
+ [0x111AC]={
+ category="lo",
+ description="SHARADA LETTER LA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x111AC,
+ },
+ [0x111AD]={
+ category="lo",
+ description="SHARADA LETTER LLA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x111AD,
+ },
+ [0x111AE]={
+ category="lo",
+ description="SHARADA LETTER VA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x111AE,
+ },
+ [0x111AF]={
+ category="lo",
+ description="SHARADA LETTER SHA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x111AF,
+ },
+ [0x111B0]={
+ category="lo",
+ description="SHARADA LETTER SSA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x111B0,
+ },
+ [0x111B1]={
+ category="lo",
+ description="SHARADA LETTER SA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x111B1,
+ },
+ [0x111B2]={
+ category="lo",
+ description="SHARADA LETTER HA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x111B2,
+ },
+ [0x111B3]={
+ category="mc",
+ description="SHARADA VOWEL SIGN AA",
+ direction="l",
+ linebreak="cm",
+ unicodeslot=0x111B3,
+ },
+ [0x111B4]={
+ category="mc",
+ description="SHARADA VOWEL SIGN I",
+ direction="l",
+ linebreak="cm",
+ unicodeslot=0x111B4,
+ },
+ [0x111B5]={
+ category="mc",
+ description="SHARADA VOWEL SIGN II",
+ direction="l",
+ linebreak="cm",
+ unicodeslot=0x111B5,
+ },
+ [0x111B6]={
+ category="mn",
+ description="SHARADA VOWEL SIGN U",
+ direction="nsm",
+ linebreak="cm",
+ unicodeslot=0x111B6,
+ },
+ [0x111B7]={
+ category="mn",
+ description="SHARADA VOWEL SIGN UU",
+ direction="nsm",
+ linebreak="cm",
+ unicodeslot=0x111B7,
+ },
+ [0x111B8]={
+ category="mn",
+ description="SHARADA VOWEL SIGN VOCALIC R",
+ direction="nsm",
+ linebreak="cm",
+ unicodeslot=0x111B8,
+ },
+ [0x111B9]={
+ category="mn",
+ description="SHARADA VOWEL SIGN VOCALIC RR",
+ direction="nsm",
+ linebreak="cm",
+ unicodeslot=0x111B9,
+ },
+ [0x111BA]={
+ category="mn",
+ description="SHARADA VOWEL SIGN VOCALIC L",
+ direction="nsm",
+ linebreak="cm",
+ unicodeslot=0x111BA,
+ },
+ [0x111BB]={
+ category="mn",
+ description="SHARADA VOWEL SIGN VOCALIC LL",
+ direction="nsm",
+ linebreak="cm",
+ unicodeslot=0x111BB,
+ },
+ [0x111BC]={
+ category="mn",
+ description="SHARADA VOWEL SIGN E",
+ direction="nsm",
+ linebreak="cm",
+ unicodeslot=0x111BC,
+ },
+ [0x111BD]={
+ category="mn",
+ description="SHARADA VOWEL SIGN AI",
+ direction="nsm",
+ linebreak="cm",
+ unicodeslot=0x111BD,
+ },
+ [0x111BE]={
+ category="mn",
+ description="SHARADA VOWEL SIGN O",
+ direction="nsm",
+ linebreak="cm",
+ unicodeslot=0x111BE,
+ },
+ [0x111BF]={
+ category="mc",
+ description="SHARADA VOWEL SIGN AU",
+ direction="l",
+ linebreak="cm",
+ unicodeslot=0x111BF,
+ },
+ [0x111C0]={
+ category="mc",
+ description="SHARADA SIGN VIRAMA",
+ direction="l",
+ linebreak="cm",
+ unicodeslot=0x111C0,
+ },
+ [0x111C1]={
+ category="lo",
+ description="SHARADA SIGN AVAGRAHA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x111C1,
+ },
+ [0x111C2]={
+ category="lo",
+ description="SHARADA SIGN JIHVAMULIYA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x111C2,
+ },
+ [0x111C3]={
+ category="lo",
+ description="SHARADA SIGN UPADHMANIYA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x111C3,
+ },
+ [0x111C4]={
+ category="lo",
+ description="SHARADA OM",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x111C4,
+ },
+ [0x111C5]={
+ category="po",
+ description="SHARADA DANDA",
+ direction="l",
+ linebreak="ba",
+ unicodeslot=0x111C5,
+ },
+ [0x111C6]={
+ category="po",
+ description="SHARADA DOUBLE DANDA",
+ direction="l",
+ linebreak="ba",
+ unicodeslot=0x111C6,
+ },
+ [0x111C7]={
+ category="po",
+ description="SHARADA ABBREVIATION SIGN",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x111C7,
+ },
+ [0x111C8]={
+ category="po",
+ description="SHARADA SEPARATOR",
+ direction="l",
+ linebreak="ba",
+ unicodeslot=0x111C8,
+ },
+ [0x111D0]={
+ category="nd",
+ description="SHARADA DIGIT ZERO",
+ direction="l",
+ linebreak="nu",
+ unicodeslot=0x111D0,
+ },
+ [0x111D1]={
+ category="nd",
+ description="SHARADA DIGIT ONE",
+ direction="l",
+ linebreak="nu",
+ unicodeslot=0x111D1,
+ },
+ [0x111D2]={
+ category="nd",
+ description="SHARADA DIGIT TWO",
+ direction="l",
+ linebreak="nu",
+ unicodeslot=0x111D2,
+ },
+ [0x111D3]={
+ category="nd",
+ description="SHARADA DIGIT THREE",
+ direction="l",
+ linebreak="nu",
+ unicodeslot=0x111D3,
+ },
+ [0x111D4]={
+ category="nd",
+ description="SHARADA DIGIT FOUR",
+ direction="l",
+ linebreak="nu",
+ unicodeslot=0x111D4,
+ },
+ [0x111D5]={
+ category="nd",
+ description="SHARADA DIGIT FIVE",
+ direction="l",
+ linebreak="nu",
+ unicodeslot=0x111D5,
+ },
+ [0x111D6]={
+ category="nd",
+ description="SHARADA DIGIT SIX",
+ direction="l",
+ linebreak="nu",
+ unicodeslot=0x111D6,
+ },
+ [0x111D7]={
+ category="nd",
+ description="SHARADA DIGIT SEVEN",
+ direction="l",
+ linebreak="nu",
+ unicodeslot=0x111D7,
+ },
+ [0x111D8]={
+ category="nd",
+ description="SHARADA DIGIT EIGHT",
+ direction="l",
+ linebreak="nu",
+ unicodeslot=0x111D8,
+ },
+ [0x111D9]={
+ category="nd",
+ description="SHARADA DIGIT NINE",
+ direction="l",
+ linebreak="nu",
+ unicodeslot=0x111D9,
+ },
+ [0x11680]={
+ category="lo",
+ description="TAKRI LETTER A",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x11680,
+ },
+ [0x11681]={
+ category="lo",
+ description="TAKRI LETTER AA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x11681,
+ },
+ [0x11682]={
+ category="lo",
+ description="TAKRI LETTER I",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x11682,
+ },
+ [0x11683]={
+ category="lo",
+ description="TAKRI LETTER II",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x11683,
+ },
+ [0x11684]={
+ category="lo",
+ description="TAKRI LETTER U",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x11684,
+ },
+ [0x11685]={
+ category="lo",
+ description="TAKRI LETTER UU",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x11685,
+ },
+ [0x11686]={
+ category="lo",
+ description="TAKRI LETTER E",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x11686,
+ },
+ [0x11687]={
+ category="lo",
+ description="TAKRI LETTER AI",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x11687,
+ },
+ [0x11688]={
+ category="lo",
+ description="TAKRI LETTER O",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x11688,
+ },
+ [0x11689]={
+ category="lo",
+ description="TAKRI LETTER AU",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x11689,
+ },
+ [0x1168A]={
+ category="lo",
+ description="TAKRI LETTER KA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1168A,
+ },
+ [0x1168B]={
+ category="lo",
+ description="TAKRI LETTER KHA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1168B,
+ },
+ [0x1168C]={
+ category="lo",
+ description="TAKRI LETTER GA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1168C,
+ },
+ [0x1168D]={
+ category="lo",
+ description="TAKRI LETTER GHA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1168D,
+ },
+ [0x1168E]={
+ category="lo",
+ description="TAKRI LETTER NGA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1168E,
+ },
+ [0x1168F]={
+ category="lo",
+ description="TAKRI LETTER CA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1168F,
+ },
+ [0x11690]={
+ category="lo",
+ description="TAKRI LETTER CHA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x11690,
+ },
+ [0x11691]={
+ category="lo",
+ description="TAKRI LETTER JA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x11691,
+ },
+ [0x11692]={
+ category="lo",
+ description="TAKRI LETTER JHA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x11692,
+ },
+ [0x11693]={
+ category="lo",
+ description="TAKRI LETTER NYA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x11693,
+ },
+ [0x11694]={
+ category="lo",
+ description="TAKRI LETTER TTA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x11694,
+ },
+ [0x11695]={
+ category="lo",
+ description="TAKRI LETTER TTHA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x11695,
+ },
+ [0x11696]={
+ category="lo",
+ description="TAKRI LETTER DDA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x11696,
+ },
+ [0x11697]={
+ category="lo",
+ description="TAKRI LETTER DDHA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x11697,
+ },
+ [0x11698]={
+ category="lo",
+ description="TAKRI LETTER NNA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x11698,
+ },
+ [0x11699]={
+ category="lo",
+ description="TAKRI LETTER TA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x11699,
+ },
+ [0x1169A]={
+ category="lo",
+ description="TAKRI LETTER THA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1169A,
+ },
+ [0x1169B]={
+ category="lo",
+ description="TAKRI LETTER DA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1169B,
+ },
+ [0x1169C]={
+ category="lo",
+ description="TAKRI LETTER DHA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1169C,
+ },
+ [0x1169D]={
+ category="lo",
+ description="TAKRI LETTER NA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1169D,
+ },
+ [0x1169E]={
+ category="lo",
+ description="TAKRI LETTER PA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1169E,
+ },
+ [0x1169F]={
+ category="lo",
+ description="TAKRI LETTER PHA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1169F,
+ },
+ [0x116A0]={
+ category="lo",
+ description="TAKRI LETTER BA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x116A0,
+ },
+ [0x116A1]={
+ category="lo",
+ description="TAKRI LETTER BHA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x116A1,
+ },
+ [0x116A2]={
+ category="lo",
+ description="TAKRI LETTER MA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x116A2,
+ },
+ [0x116A3]={
+ category="lo",
+ description="TAKRI LETTER YA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x116A3,
+ },
+ [0x116A4]={
+ category="lo",
+ description="TAKRI LETTER RA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x116A4,
+ },
+ [0x116A5]={
+ category="lo",
+ description="TAKRI LETTER LA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x116A5,
+ },
+ [0x116A6]={
+ category="lo",
+ description="TAKRI LETTER VA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x116A6,
+ },
+ [0x116A7]={
+ category="lo",
+ description="TAKRI LETTER SHA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x116A7,
+ },
+ [0x116A8]={
+ category="lo",
+ description="TAKRI LETTER SA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x116A8,
+ },
+ [0x116A9]={
+ category="lo",
+ description="TAKRI LETTER HA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x116A9,
+ },
+ [0x116AA]={
+ category="lo",
+ description="TAKRI LETTER RRA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x116AA,
+ },
+ [0x116AB]={
+ category="mn",
+ description="TAKRI SIGN ANUSVARA",
+ direction="nsm",
+ linebreak="cm",
+ unicodeslot=0x116AB,
+ },
+ [0x116AC]={
+ category="mc",
+ description="TAKRI SIGN VISARGA",
+ direction="l",
+ linebreak="cm",
+ unicodeslot=0x116AC,
+ },
+ [0x116AD]={
+ category="mn",
+ description="TAKRI VOWEL SIGN AA",
+ direction="nsm",
+ linebreak="cm",
+ unicodeslot=0x116AD,
+ },
+ [0x116AE]={
+ category="mc",
+ description="TAKRI VOWEL SIGN I",
+ direction="l",
+ linebreak="cm",
+ unicodeslot=0x116AE,
+ },
+ [0x116AF]={
+ category="mc",
+ description="TAKRI VOWEL SIGN II",
+ direction="l",
+ linebreak="cm",
+ unicodeslot=0x116AF,
+ },
+ [0x116B0]={
+ category="mn",
+ description="TAKRI VOWEL SIGN U",
+ direction="nsm",
+ linebreak="cm",
+ unicodeslot=0x116B0,
+ },
+ [0x116B1]={
+ category="mn",
+ description="TAKRI VOWEL SIGN UU",
+ direction="nsm",
+ linebreak="cm",
+ unicodeslot=0x116B1,
+ },
+ [0x116B2]={
+ category="mn",
+ description="TAKRI VOWEL SIGN E",
+ direction="nsm",
+ linebreak="cm",
+ unicodeslot=0x116B2,
+ },
+ [0x116B3]={
+ category="mn",
+ description="TAKRI VOWEL SIGN AI",
+ direction="nsm",
+ linebreak="cm",
+ unicodeslot=0x116B3,
+ },
+ [0x116B4]={
+ category="mn",
+ description="TAKRI VOWEL SIGN O",
+ direction="nsm",
+ linebreak="cm",
+ unicodeslot=0x116B4,
+ },
+ [0x116B5]={
+ category="mn",
+ description="TAKRI VOWEL SIGN AU",
+ direction="nsm",
+ linebreak="cm",
+ unicodeslot=0x116B5,
+ },
+ [0x116B6]={
+ category="mc",
+ description="TAKRI SIGN VIRAMA",
+ direction="l",
+ linebreak="cm",
+ unicodeslot=0x116B6,
+ },
+ [0x116B7]={
+ category="mn",
+ description="TAKRI SIGN NUKTA",
+ direction="nsm",
+ linebreak="cm",
+ unicodeslot=0x116B7,
+ },
+ [0x116C0]={
+ category="nd",
+ description="TAKRI DIGIT ZERO",
+ direction="l",
+ linebreak="nu",
+ unicodeslot=0x116C0,
+ },
+ [0x116C1]={
+ category="nd",
+ description="TAKRI DIGIT ONE",
+ direction="l",
+ linebreak="nu",
+ unicodeslot=0x116C1,
+ },
+ [0x116C2]={
+ category="nd",
+ description="TAKRI DIGIT TWO",
+ direction="l",
+ linebreak="nu",
+ unicodeslot=0x116C2,
+ },
+ [0x116C3]={
+ category="nd",
+ description="TAKRI DIGIT THREE",
+ direction="l",
+ linebreak="nu",
+ unicodeslot=0x116C3,
+ },
+ [0x116C4]={
+ category="nd",
+ description="TAKRI DIGIT FOUR",
+ direction="l",
+ linebreak="nu",
+ unicodeslot=0x116C4,
+ },
+ [0x116C5]={
+ category="nd",
+ description="TAKRI DIGIT FIVE",
+ direction="l",
+ linebreak="nu",
+ unicodeslot=0x116C5,
+ },
+ [0x116C6]={
+ category="nd",
+ description="TAKRI DIGIT SIX",
+ direction="l",
+ linebreak="nu",
+ unicodeslot=0x116C6,
+ },
+ [0x116C7]={
+ category="nd",
+ description="TAKRI DIGIT SEVEN",
+ direction="l",
+ linebreak="nu",
+ unicodeslot=0x116C7,
+ },
+ [0x116C8]={
+ category="nd",
+ description="TAKRI DIGIT EIGHT",
+ direction="l",
+ linebreak="nu",
+ unicodeslot=0x116C8,
+ },
+ [0x116C9]={
+ category="nd",
+ description="TAKRI DIGIT NINE",
+ direction="l",
+ linebreak="nu",
+ unicodeslot=0x116C9,
+ },
[0x12000]={
category="lo",
description="CUNEIFORM SIGN A",
@@ -140022,7 +143971,7 @@ characters.data={
description="CUNEIFORM SIGN ARAD TIMES KUR",
direction="l",
linebreak="al",
- mathclass="ord",
+ mathclass="ordinary",
mathname="backprime",
unicodeslot=0x12035,
},
@@ -158002,6 +161951,937 @@ characters.data={
linebreak="al",
unicodeslot=0x16A38,
},
+ [0x16F00]={
+ category="lo",
+ description="MIAO LETTER PA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x16F00,
+ },
+ [0x16F01]={
+ category="lo",
+ description="MIAO LETTER BA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x16F01,
+ },
+ [0x16F02]={
+ category="lo",
+ description="MIAO LETTER YI PA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x16F02,
+ },
+ [0x16F03]={
+ category="lo",
+ description="MIAO LETTER PLA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x16F03,
+ },
+ [0x16F04]={
+ category="lo",
+ description="MIAO LETTER MA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x16F04,
+ },
+ [0x16F05]={
+ category="lo",
+ description="MIAO LETTER MHA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x16F05,
+ },
+ [0x16F06]={
+ category="lo",
+ description="MIAO LETTER ARCHAIC MA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x16F06,
+ },
+ [0x16F07]={
+ category="lo",
+ description="MIAO LETTER FA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x16F07,
+ },
+ [0x16F08]={
+ category="lo",
+ description="MIAO LETTER VA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x16F08,
+ },
+ [0x16F09]={
+ category="lo",
+ description="MIAO LETTER VFA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x16F09,
+ },
+ [0x16F0A]={
+ category="lo",
+ description="MIAO LETTER TA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x16F0A,
+ },
+ [0x16F0B]={
+ category="lo",
+ description="MIAO LETTER DA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x16F0B,
+ },
+ [0x16F0C]={
+ category="lo",
+ description="MIAO LETTER YI TTA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x16F0C,
+ },
+ [0x16F0D]={
+ category="lo",
+ description="MIAO LETTER YI TA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x16F0D,
+ },
+ [0x16F0E]={
+ category="lo",
+ description="MIAO LETTER TTA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x16F0E,
+ },
+ [0x16F0F]={
+ category="lo",
+ description="MIAO LETTER DDA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x16F0F,
+ },
+ [0x16F10]={
+ category="lo",
+ description="MIAO LETTER NA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x16F10,
+ },
+ [0x16F11]={
+ category="lo",
+ description="MIAO LETTER NHA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x16F11,
+ },
+ [0x16F12]={
+ category="lo",
+ description="MIAO LETTER YI NNA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x16F12,
+ },
+ [0x16F13]={
+ category="lo",
+ description="MIAO LETTER ARCHAIC NA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x16F13,
+ },
+ [0x16F14]={
+ category="lo",
+ description="MIAO LETTER NNA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x16F14,
+ },
+ [0x16F15]={
+ category="lo",
+ description="MIAO LETTER NNHA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x16F15,
+ },
+ [0x16F16]={
+ category="lo",
+ description="MIAO LETTER LA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x16F16,
+ },
+ [0x16F17]={
+ category="lo",
+ description="MIAO LETTER LYA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x16F17,
+ },
+ [0x16F18]={
+ category="lo",
+ description="MIAO LETTER LHA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x16F18,
+ },
+ [0x16F19]={
+ category="lo",
+ description="MIAO LETTER LHYA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x16F19,
+ },
+ [0x16F1A]={
+ category="lo",
+ description="MIAO LETTER TLHA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x16F1A,
+ },
+ [0x16F1B]={
+ category="lo",
+ description="MIAO LETTER DLHA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x16F1B,
+ },
+ [0x16F1C]={
+ category="lo",
+ description="MIAO LETTER TLHYA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x16F1C,
+ },
+ [0x16F1D]={
+ category="lo",
+ description="MIAO LETTER DLHYA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x16F1D,
+ },
+ [0x16F1E]={
+ category="lo",
+ description="MIAO LETTER KA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x16F1E,
+ },
+ [0x16F1F]={
+ category="lo",
+ description="MIAO LETTER GA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x16F1F,
+ },
+ [0x16F20]={
+ category="lo",
+ description="MIAO LETTER YI KA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x16F20,
+ },
+ [0x16F21]={
+ category="lo",
+ description="MIAO LETTER QA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x16F21,
+ },
+ [0x16F22]={
+ category="lo",
+ description="MIAO LETTER QGA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x16F22,
+ },
+ [0x16F23]={
+ category="lo",
+ description="MIAO LETTER NGA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x16F23,
+ },
+ [0x16F24]={
+ category="lo",
+ description="MIAO LETTER NGHA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x16F24,
+ },
+ [0x16F25]={
+ category="lo",
+ description="MIAO LETTER ARCHAIC NGA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x16F25,
+ },
+ [0x16F26]={
+ category="lo",
+ description="MIAO LETTER HA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x16F26,
+ },
+ [0x16F27]={
+ category="lo",
+ description="MIAO LETTER XA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x16F27,
+ },
+ [0x16F28]={
+ category="lo",
+ description="MIAO LETTER GHA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x16F28,
+ },
+ [0x16F29]={
+ category="lo",
+ description="MIAO LETTER GHHA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x16F29,
+ },
+ [0x16F2A]={
+ category="lo",
+ description="MIAO LETTER TSSA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x16F2A,
+ },
+ [0x16F2B]={
+ category="lo",
+ description="MIAO LETTER DZZA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x16F2B,
+ },
+ [0x16F2C]={
+ category="lo",
+ description="MIAO LETTER NYA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x16F2C,
+ },
+ [0x16F2D]={
+ category="lo",
+ description="MIAO LETTER NYHA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x16F2D,
+ },
+ [0x16F2E]={
+ category="lo",
+ description="MIAO LETTER TSHA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x16F2E,
+ },
+ [0x16F2F]={
+ category="lo",
+ description="MIAO LETTER DZHA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x16F2F,
+ },
+ [0x16F30]={
+ category="lo",
+ description="MIAO LETTER YI TSHA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x16F30,
+ },
+ [0x16F31]={
+ category="lo",
+ description="MIAO LETTER YI DZHA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x16F31,
+ },
+ [0x16F32]={
+ category="lo",
+ description="MIAO LETTER REFORMED TSHA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x16F32,
+ },
+ [0x16F33]={
+ category="lo",
+ description="MIAO LETTER SHA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x16F33,
+ },
+ [0x16F34]={
+ category="lo",
+ description="MIAO LETTER SSA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x16F34,
+ },
+ [0x16F35]={
+ category="lo",
+ description="MIAO LETTER ZHA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x16F35,
+ },
+ [0x16F36]={
+ category="lo",
+ description="MIAO LETTER ZSHA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x16F36,
+ },
+ [0x16F37]={
+ category="lo",
+ description="MIAO LETTER TSA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x16F37,
+ },
+ [0x16F38]={
+ category="lo",
+ description="MIAO LETTER DZA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x16F38,
+ },
+ [0x16F39]={
+ category="lo",
+ description="MIAO LETTER YI TSA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x16F39,
+ },
+ [0x16F3A]={
+ category="lo",
+ description="MIAO LETTER SA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x16F3A,
+ },
+ [0x16F3B]={
+ category="lo",
+ description="MIAO LETTER ZA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x16F3B,
+ },
+ [0x16F3C]={
+ category="lo",
+ description="MIAO LETTER ZSA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x16F3C,
+ },
+ [0x16F3D]={
+ category="lo",
+ description="MIAO LETTER ZZA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x16F3D,
+ },
+ [0x16F3E]={
+ category="lo",
+ description="MIAO LETTER ZZSA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x16F3E,
+ },
+ [0x16F3F]={
+ category="lo",
+ description="MIAO LETTER ARCHAIC ZZA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x16F3F,
+ },
+ [0x16F40]={
+ category="lo",
+ description="MIAO LETTER ZZYA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x16F40,
+ },
+ [0x16F41]={
+ category="lo",
+ description="MIAO LETTER ZZSYA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x16F41,
+ },
+ [0x16F42]={
+ category="lo",
+ description="MIAO LETTER WA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x16F42,
+ },
+ [0x16F43]={
+ category="lo",
+ description="MIAO LETTER AH",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x16F43,
+ },
+ [0x16F44]={
+ category="lo",
+ description="MIAO LETTER HHA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x16F44,
+ },
+ [0x16F50]={
+ category="lo",
+ description="MIAO LETTER NASALIZATION",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x16F50,
+ },
+ [0x16F51]={
+ category="mc",
+ description="MIAO SIGN ASPIRATION",
+ direction="l",
+ linebreak="cm",
+ unicodeslot=0x16F51,
+ },
+ [0x16F52]={
+ category="mc",
+ description="MIAO SIGN REFORMED VOICING",
+ direction="l",
+ linebreak="cm",
+ unicodeslot=0x16F52,
+ },
+ [0x16F53]={
+ category="mc",
+ description="MIAO SIGN REFORMED ASPIRATION",
+ direction="l",
+ linebreak="cm",
+ unicodeslot=0x16F53,
+ },
+ [0x16F54]={
+ category="mc",
+ description="MIAO VOWEL SIGN A",
+ direction="l",
+ linebreak="cm",
+ unicodeslot=0x16F54,
+ },
+ [0x16F55]={
+ category="mc",
+ description="MIAO VOWEL SIGN AA",
+ direction="l",
+ linebreak="cm",
+ unicodeslot=0x16F55,
+ },
+ [0x16F56]={
+ category="mc",
+ description="MIAO VOWEL SIGN AHH",
+ direction="l",
+ linebreak="cm",
+ unicodeslot=0x16F56,
+ },
+ [0x16F57]={
+ category="mc",
+ description="MIAO VOWEL SIGN AN",
+ direction="l",
+ linebreak="cm",
+ unicodeslot=0x16F57,
+ },
+ [0x16F58]={
+ category="mc",
+ description="MIAO VOWEL SIGN ANG",
+ direction="l",
+ linebreak="cm",
+ unicodeslot=0x16F58,
+ },
+ [0x16F59]={
+ category="mc",
+ description="MIAO VOWEL SIGN O",
+ direction="l",
+ linebreak="cm",
+ unicodeslot=0x16F59,
+ },
+ [0x16F5A]={
+ category="mc",
+ description="MIAO VOWEL SIGN OO",
+ direction="l",
+ linebreak="cm",
+ unicodeslot=0x16F5A,
+ },
+ [0x16F5B]={
+ category="mc",
+ description="MIAO VOWEL SIGN WO",
+ direction="l",
+ linebreak="cm",
+ unicodeslot=0x16F5B,
+ },
+ [0x16F5C]={
+ category="mc",
+ description="MIAO VOWEL SIGN W",
+ direction="l",
+ linebreak="cm",
+ unicodeslot=0x16F5C,
+ },
+ [0x16F5D]={
+ category="mc",
+ description="MIAO VOWEL SIGN E",
+ direction="l",
+ linebreak="cm",
+ unicodeslot=0x16F5D,
+ },
+ [0x16F5E]={
+ category="mc",
+ description="MIAO VOWEL SIGN EN",
+ direction="l",
+ linebreak="cm",
+ unicodeslot=0x16F5E,
+ },
+ [0x16F5F]={
+ category="mc",
+ description="MIAO VOWEL SIGN ENG",
+ direction="l",
+ linebreak="cm",
+ unicodeslot=0x16F5F,
+ },
+ [0x16F60]={
+ category="mc",
+ description="MIAO VOWEL SIGN OEY",
+ direction="l",
+ linebreak="cm",
+ unicodeslot=0x16F60,
+ },
+ [0x16F61]={
+ category="mc",
+ description="MIAO VOWEL SIGN I",
+ direction="l",
+ linebreak="cm",
+ unicodeslot=0x16F61,
+ },
+ [0x16F62]={
+ category="mc",
+ description="MIAO VOWEL SIGN IA",
+ direction="l",
+ linebreak="cm",
+ unicodeslot=0x16F62,
+ },
+ [0x16F63]={
+ category="mc",
+ description="MIAO VOWEL SIGN IAN",
+ direction="l",
+ linebreak="cm",
+ unicodeslot=0x16F63,
+ },
+ [0x16F64]={
+ category="mc",
+ description="MIAO VOWEL SIGN IANG",
+ direction="l",
+ linebreak="cm",
+ unicodeslot=0x16F64,
+ },
+ [0x16F65]={
+ category="mc",
+ description="MIAO VOWEL SIGN IO",
+ direction="l",
+ linebreak="cm",
+ unicodeslot=0x16F65,
+ },
+ [0x16F66]={
+ category="mc",
+ description="MIAO VOWEL SIGN IE",
+ direction="l",
+ linebreak="cm",
+ unicodeslot=0x16F66,
+ },
+ [0x16F67]={
+ category="mc",
+ description="MIAO VOWEL SIGN II",
+ direction="l",
+ linebreak="cm",
+ unicodeslot=0x16F67,
+ },
+ [0x16F68]={
+ category="mc",
+ description="MIAO VOWEL SIGN IU",
+ direction="l",
+ linebreak="cm",
+ unicodeslot=0x16F68,
+ },
+ [0x16F69]={
+ category="mc",
+ description="MIAO VOWEL SIGN ING",
+ direction="l",
+ linebreak="cm",
+ unicodeslot=0x16F69,
+ },
+ [0x16F6A]={
+ category="mc",
+ description="MIAO VOWEL SIGN U",
+ direction="l",
+ linebreak="cm",
+ unicodeslot=0x16F6A,
+ },
+ [0x16F6B]={
+ category="mc",
+ description="MIAO VOWEL SIGN UA",
+ direction="l",
+ linebreak="cm",
+ unicodeslot=0x16F6B,
+ },
+ [0x16F6C]={
+ category="mc",
+ description="MIAO VOWEL SIGN UAN",
+ direction="l",
+ linebreak="cm",
+ unicodeslot=0x16F6C,
+ },
+ [0x16F6D]={
+ category="mc",
+ description="MIAO VOWEL SIGN UANG",
+ direction="l",
+ linebreak="cm",
+ unicodeslot=0x16F6D,
+ },
+ [0x16F6E]={
+ category="mc",
+ description="MIAO VOWEL SIGN UU",
+ direction="l",
+ linebreak="cm",
+ unicodeslot=0x16F6E,
+ },
+ [0x16F6F]={
+ category="mc",
+ description="MIAO VOWEL SIGN UEI",
+ direction="l",
+ linebreak="cm",
+ unicodeslot=0x16F6F,
+ },
+ [0x16F70]={
+ category="mc",
+ description="MIAO VOWEL SIGN UNG",
+ direction="l",
+ linebreak="cm",
+ unicodeslot=0x16F70,
+ },
+ [0x16F71]={
+ category="mc",
+ description="MIAO VOWEL SIGN Y",
+ direction="l",
+ linebreak="cm",
+ unicodeslot=0x16F71,
+ },
+ [0x16F72]={
+ category="mc",
+ description="MIAO VOWEL SIGN YI",
+ direction="l",
+ linebreak="cm",
+ unicodeslot=0x16F72,
+ },
+ [0x16F73]={
+ category="mc",
+ description="MIAO VOWEL SIGN AE",
+ direction="l",
+ linebreak="cm",
+ unicodeslot=0x16F73,
+ },
+ [0x16F74]={
+ category="mc",
+ description="MIAO VOWEL SIGN AEE",
+ direction="l",
+ linebreak="cm",
+ unicodeslot=0x16F74,
+ },
+ [0x16F75]={
+ category="mc",
+ description="MIAO VOWEL SIGN ERR",
+ direction="l",
+ linebreak="cm",
+ unicodeslot=0x16F75,
+ },
+ [0x16F76]={
+ category="mc",
+ description="MIAO VOWEL SIGN ROUNDED ERR",
+ direction="l",
+ linebreak="cm",
+ unicodeslot=0x16F76,
+ },
+ [0x16F77]={
+ category="mc",
+ description="MIAO VOWEL SIGN ER",
+ direction="l",
+ linebreak="cm",
+ unicodeslot=0x16F77,
+ },
+ [0x16F78]={
+ category="mc",
+ description="MIAO VOWEL SIGN ROUNDED ER",
+ direction="l",
+ linebreak="cm",
+ unicodeslot=0x16F78,
+ },
+ [0x16F79]={
+ category="mc",
+ description="MIAO VOWEL SIGN AI",
+ direction="l",
+ linebreak="cm",
+ unicodeslot=0x16F79,
+ },
+ [0x16F7A]={
+ category="mc",
+ description="MIAO VOWEL SIGN EI",
+ direction="l",
+ linebreak="cm",
+ unicodeslot=0x16F7A,
+ },
+ [0x16F7B]={
+ category="mc",
+ description="MIAO VOWEL SIGN AU",
+ direction="l",
+ linebreak="cm",
+ unicodeslot=0x16F7B,
+ },
+ [0x16F7C]={
+ category="mc",
+ description="MIAO VOWEL SIGN OU",
+ direction="l",
+ linebreak="cm",
+ unicodeslot=0x16F7C,
+ },
+ [0x16F7D]={
+ category="mc",
+ description="MIAO VOWEL SIGN N",
+ direction="l",
+ linebreak="cm",
+ unicodeslot=0x16F7D,
+ },
+ [0x16F7E]={
+ category="mc",
+ description="MIAO VOWEL SIGN NG",
+ direction="l",
+ linebreak="cm",
+ unicodeslot=0x16F7E,
+ },
+ [0x16F8F]={
+ category="mn",
+ description="MIAO TONE RIGHT",
+ direction="nsm",
+ linebreak="cm",
+ unicodeslot=0x16F8F,
+ },
+ [0x16F90]={
+ category="mn",
+ description="MIAO TONE TOP RIGHT",
+ direction="nsm",
+ linebreak="cm",
+ unicodeslot=0x16F90,
+ },
+ [0x16F91]={
+ category="mn",
+ description="MIAO TONE ABOVE",
+ direction="nsm",
+ linebreak="cm",
+ unicodeslot=0x16F91,
+ },
+ [0x16F92]={
+ category="mn",
+ description="MIAO TONE BELOW",
+ direction="nsm",
+ linebreak="cm",
+ unicodeslot=0x16F92,
+ },
+ [0x16F93]={
+ category="lm",
+ description="MIAO LETTER TONE-2",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x16F93,
+ },
+ [0x16F94]={
+ category="lm",
+ description="MIAO LETTER TONE-3",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x16F94,
+ },
+ [0x16F95]={
+ category="lm",
+ description="MIAO LETTER TONE-4",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x16F95,
+ },
+ [0x16F96]={
+ category="lm",
+ description="MIAO LETTER TONE-5",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x16F96,
+ },
+ [0x16F97]={
+ category="lm",
+ description="MIAO LETTER TONE-6",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x16F97,
+ },
+ [0x16F98]={
+ category="lm",
+ description="MIAO LETTER TONE-7",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x16F98,
+ },
+ [0x16F99]={
+ category="lm",
+ description="MIAO LETTER TONE-8",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x16F99,
+ },
+ [0x16F9A]={
+ category="lm",
+ description="MIAO LETTER REFORMED TONE-1",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x16F9A,
+ },
+ [0x16F9B]={
+ category="lm",
+ description="MIAO LETTER REFORMED TONE-2",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x16F9B,
+ },
+ [0x16F9C]={
+ category="lm",
+ description="MIAO LETTER REFORMED TONE-4",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x16F9C,
+ },
+ [0x16F9D]={
+ category="lm",
+ description="MIAO LETTER REFORMED TONE-5",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x16F9D,
+ },
+ [0x16F9E]={
+ category="lm",
+ description="MIAO LETTER REFORMED TONE-6",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x16F9E,
+ },
+ [0x16F9F]={
+ category="lm",
+ description="MIAO LETTER REFORMED TONE-8",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x16F9F,
+ },
[0x1B000]={
category="lo",
cjkwd="w",
@@ -168920,11 +173800,11 @@ characters.data={
},
[0x1D6FB]={
category="sm",
+ comment="mathname='nabla'",
description="MATHEMATICAL ITALIC NABLA",
direction="l",
linebreak="al",
mathclass="default",
- mathname="nabla",
specials={ "font", 0x2207 },
unicodeslot=0x1D6FB,
visual="it",
@@ -171227,1425 +176107,2714 @@ characters.data={
specials={ "font", 0x0039 },
unicodeslot=0x1D7FF,
},
+ [0x1EE00]={
+ category="lo",
+ comment="check math properties",
+ description="ARABIC MATHEMATICAL ALEF",
+ direction="al",
+ linebreak="al",
+ specials={ "font", 0x0627 },
+ unicodeslot=0x1EE00,
+ },
+ [0x1EE01]={
+ category="lo",
+ comment="check math properties",
+ description="ARABIC MATHEMATICAL BEH",
+ direction="al",
+ linebreak="al",
+ specials={ "font", 0x0628 },
+ unicodeslot=0x1EE01,
+ },
+ [0x1EE02]={
+ category="lo",
+ comment="check math properties",
+ description="ARABIC MATHEMATICAL JEEM",
+ direction="al",
+ linebreak="al",
+ specials={ "font", 0x062C },
+ unicodeslot=0x1EE02,
+ },
+ [0x1EE03]={
+ category="lo",
+ comment="check math properties",
+ description="ARABIC MATHEMATICAL DAL",
+ direction="al",
+ linebreak="al",
+ specials={ "font", 0x062F },
+ unicodeslot=0x1EE03,
+ },
+ [0x1EE05]={
+ category="lo",
+ comment="check math properties",
+ description="ARABIC MATHEMATICAL WAW",
+ direction="al",
+ linebreak="al",
+ specials={ "font", 0x0648 },
+ unicodeslot=0x1EE05,
+ },
+ [0x1EE06]={
+ category="lo",
+ comment="check math properties",
+ description="ARABIC MATHEMATICAL ZAIN",
+ direction="al",
+ linebreak="al",
+ specials={ "font", 0x0632 },
+ unicodeslot=0x1EE06,
+ },
+ [0x1EE07]={
+ category="lo",
+ comment="check math properties",
+ description="ARABIC MATHEMATICAL HAH",
+ direction="al",
+ linebreak="al",
+ specials={ "font", 0x062D },
+ unicodeslot=0x1EE07,
+ },
+ [0x1EE08]={
+ category="lo",
+ comment="check math properties",
+ description="ARABIC MATHEMATICAL TAH",
+ direction="al",
+ linebreak="al",
+ specials={ "font", 0x0637 },
+ unicodeslot=0x1EE08,
+ },
+ [0x1EE09]={
+ category="lo",
+ comment="check math properties",
+ description="ARABIC MATHEMATICAL YEH",
+ direction="al",
+ linebreak="al",
+ specials={ "font", 0x064A },
+ unicodeslot=0x1EE09,
+ },
+ [0x1EE0A]={
+ category="lo",
+ comment="check math properties",
+ description="ARABIC MATHEMATICAL KAF",
+ direction="al",
+ linebreak="al",
+ specials={ "font", 0x0643 },
+ unicodeslot=0x1EE0A,
+ },
+ [0x1EE0B]={
+ category="lo",
+ comment="check math properties",
+ description="ARABIC MATHEMATICAL LAM",
+ direction="al",
+ linebreak="al",
+ specials={ "font", 0x0644 },
+ unicodeslot=0x1EE0B,
+ },
+ [0x1EE0C]={
+ category="lo",
+ comment="check math properties",
+ description="ARABIC MATHEMATICAL MEEM",
+ direction="al",
+ linebreak="al",
+ specials={ "font", 0x0645 },
+ unicodeslot=0x1EE0C,
+ },
+ [0x1EE0D]={
+ category="lo",
+ comment="check math properties",
+ description="ARABIC MATHEMATICAL NOON",
+ direction="al",
+ linebreak="al",
+ specials={ "font", 0x0646 },
+ unicodeslot=0x1EE0D,
+ },
+ [0x1EE0E]={
+ category="lo",
+ comment="check math properties",
+ description="ARABIC MATHEMATICAL SEEN",
+ direction="al",
+ linebreak="al",
+ specials={ "font", 0x0633 },
+ unicodeslot=0x1EE0E,
+ },
+ [0x1EE0F]={
+ category="lo",
+ comment="check math properties",
+ description="ARABIC MATHEMATICAL AIN",
+ direction="al",
+ linebreak="al",
+ specials={ "font", 0x0639 },
+ unicodeslot=0x1EE0F,
+ },
+ [0x1EE10]={
+ category="lo",
+ comment="check math properties",
+ description="ARABIC MATHEMATICAL FEH",
+ direction="al",
+ linebreak="al",
+ specials={ "font", 0x0641 },
+ unicodeslot=0x1EE10,
+ },
+ [0x1EE11]={
+ category="lo",
+ comment="check math properties",
+ description="ARABIC MATHEMATICAL SAD",
+ direction="al",
+ linebreak="al",
+ specials={ "font", 0x0635 },
+ unicodeslot=0x1EE11,
+ },
+ [0x1EE12]={
+ category="lo",
+ comment="check math properties",
+ description="ARABIC MATHEMATICAL QAF",
+ direction="al",
+ linebreak="al",
+ specials={ "font", 0x0642 },
+ unicodeslot=0x1EE12,
+ },
+ [0x1EE13]={
+ category="lo",
+ comment="check math properties",
+ description="ARABIC MATHEMATICAL REH",
+ direction="al",
+ linebreak="al",
+ specials={ "font", 0x0631 },
+ unicodeslot=0x1EE13,
+ },
+ [0x1EE14]={
+ category="lo",
+ comment="check math properties",
+ description="ARABIC MATHEMATICAL SHEEN",
+ direction="al",
+ linebreak="al",
+ specials={ "font", 0x0634 },
+ unicodeslot=0x1EE14,
+ },
+ [0x1EE15]={
+ category="lo",
+ comment="check math properties",
+ description="ARABIC MATHEMATICAL TEH",
+ direction="al",
+ linebreak="al",
+ specials={ "font", 0x062A },
+ unicodeslot=0x1EE15,
+ },
+ [0x1EE16]={
+ category="lo",
+ comment="check math properties",
+ description="ARABIC MATHEMATICAL THEH",
+ direction="al",
+ linebreak="al",
+ specials={ "font", 0x062B },
+ unicodeslot=0x1EE16,
+ },
+ [0x1EE17]={
+ category="lo",
+ comment="check math properties",
+ description="ARABIC MATHEMATICAL KHAH",
+ direction="al",
+ linebreak="al",
+ specials={ "font", 0x062E },
+ unicodeslot=0x1EE17,
+ },
+ [0x1EE18]={
+ category="lo",
+ comment="check math properties",
+ description="ARABIC MATHEMATICAL THAL",
+ direction="al",
+ linebreak="al",
+ specials={ "font", 0x0630 },
+ unicodeslot=0x1EE18,
+ },
+ [0x1EE19]={
+ category="lo",
+ comment="check math properties",
+ description="ARABIC MATHEMATICAL DAD",
+ direction="al",
+ linebreak="al",
+ specials={ "font", 0x0636 },
+ unicodeslot=0x1EE19,
+ },
+ [0x1EE1A]={
+ category="lo",
+ comment="check math properties",
+ description="ARABIC MATHEMATICAL ZAH",
+ direction="al",
+ linebreak="al",
+ specials={ "font", 0x0638 },
+ unicodeslot=0x1EE1A,
+ },
+ [0x1EE1B]={
+ category="lo",
+ comment="check math properties",
+ description="ARABIC MATHEMATICAL GHAIN",
+ direction="al",
+ linebreak="al",
+ specials={ "font", 0x063A },
+ unicodeslot=0x1EE1B,
+ },
+ [0x1EE1C]={
+ category="lo",
+ comment="check math properties",
+ description="ARABIC MATHEMATICAL DOTLESS BEH",
+ direction="al",
+ linebreak="al",
+ specials={ "font", 0x066E },
+ unicodeslot=0x1EE1C,
+ },
+ [0x1EE1D]={
+ category="lo",
+ comment="check math properties",
+ description="ARABIC MATHEMATICAL DOTLESS NOON",
+ direction="al",
+ linebreak="al",
+ specials={ "font", 0x06BA },
+ unicodeslot=0x1EE1D,
+ },
+ [0x1EE1E]={
+ category="lo",
+ comment="check math properties",
+ description="ARABIC MATHEMATICAL DOTLESS FEH",
+ direction="al",
+ linebreak="al",
+ specials={ "font", 0x06A1 },
+ unicodeslot=0x1EE1E,
+ },
+ [0x1EE1F]={
+ category="lo",
+ comment="check math properties",
+ description="ARABIC MATHEMATICAL DOTLESS QAF",
+ direction="al",
+ linebreak="al",
+ specials={ "font", 0x066F },
+ unicodeslot=0x1EE1F,
+ },
+ [0x1EE21]={
+ category="lo",
+ comment="check math properties",
+ description="ARABIC MATHEMATICAL INITIAL BEH",
+ direction="al",
+ linebreak="al",
+ specials={ "font", 0x0628 },
+ unicodeslot=0x1EE21,
+ },
+ [0x1EE22]={
+ category="lo",
+ comment="check math properties",
+ description="ARABIC MATHEMATICAL INITIAL JEEM",
+ direction="al",
+ linebreak="al",
+ specials={ "font", 0x062C },
+ unicodeslot=0x1EE22,
+ },
+ [0x1EE24]={
+ category="lo",
+ comment="check math properties",
+ description="ARABIC MATHEMATICAL INITIAL HEH",
+ direction="al",
+ linebreak="al",
+ specials={ "font", 0x0647 },
+ unicodeslot=0x1EE24,
+ },
+ [0x1EE27]={
+ category="lo",
+ comment="check math properties",
+ description="ARABIC MATHEMATICAL INITIAL HAH",
+ direction="al",
+ linebreak="al",
+ specials={ "font", 0x062D },
+ unicodeslot=0x1EE27,
+ },
+ [0x1EE29]={
+ category="lo",
+ comment="check math properties",
+ description="ARABIC MATHEMATICAL INITIAL YEH",
+ direction="al",
+ linebreak="al",
+ specials={ "font", 0x064A },
+ unicodeslot=0x1EE29,
+ },
+ [0x1EE2A]={
+ category="lo",
+ comment="check math properties",
+ description="ARABIC MATHEMATICAL INITIAL KAF",
+ direction="al",
+ linebreak="al",
+ specials={ "font", 0x0643 },
+ unicodeslot=0x1EE2A,
+ },
+ [0x1EE2B]={
+ category="lo",
+ comment="check math properties",
+ description="ARABIC MATHEMATICAL INITIAL LAM",
+ direction="al",
+ linebreak="al",
+ specials={ "font", 0x0644 },
+ unicodeslot=0x1EE2B,
+ },
+ [0x1EE2C]={
+ category="lo",
+ comment="check math properties",
+ description="ARABIC MATHEMATICAL INITIAL MEEM",
+ direction="al",
+ linebreak="al",
+ specials={ "font", 0x0645 },
+ unicodeslot=0x1EE2C,
+ },
+ [0x1EE2D]={
+ category="lo",
+ comment="check math properties",
+ description="ARABIC MATHEMATICAL INITIAL NOON",
+ direction="al",
+ linebreak="al",
+ specials={ "font", 0x0646 },
+ unicodeslot=0x1EE2D,
+ },
+ [0x1EE2E]={
+ category="lo",
+ comment="check math properties",
+ description="ARABIC MATHEMATICAL INITIAL SEEN",
+ direction="al",
+ linebreak="al",
+ specials={ "font", 0x0633 },
+ unicodeslot=0x1EE2E,
+ },
+ [0x1EE2F]={
+ category="lo",
+ comment="check math properties",
+ description="ARABIC MATHEMATICAL INITIAL AIN",
+ direction="al",
+ linebreak="al",
+ specials={ "font", 0x0639 },
+ unicodeslot=0x1EE2F,
+ },
+ [0x1EE30]={
+ category="lo",
+ comment="check math properties",
+ description="ARABIC MATHEMATICAL INITIAL FEH",
+ direction="al",
+ linebreak="al",
+ specials={ "font", 0x0641 },
+ unicodeslot=0x1EE30,
+ },
+ [0x1EE31]={
+ category="lo",
+ comment="check math properties",
+ description="ARABIC MATHEMATICAL INITIAL SAD",
+ direction="al",
+ linebreak="al",
+ specials={ "font", 0x0635 },
+ unicodeslot=0x1EE31,
+ },
+ [0x1EE32]={
+ category="lo",
+ comment="check math properties",
+ description="ARABIC MATHEMATICAL INITIAL QAF",
+ direction="al",
+ linebreak="al",
+ specials={ "font", 0x0642 },
+ unicodeslot=0x1EE32,
+ },
+ [0x1EE34]={
+ category="lo",
+ comment="check math properties",
+ description="ARABIC MATHEMATICAL INITIAL SHEEN",
+ direction="al",
+ linebreak="al",
+ specials={ "font", 0x0634 },
+ unicodeslot=0x1EE34,
+ },
+ [0x1EE35]={
+ category="lo",
+ comment="check math properties",
+ description="ARABIC MATHEMATICAL INITIAL TEH",
+ direction="al",
+ linebreak="al",
+ specials={ "font", 0x062A },
+ unicodeslot=0x1EE35,
+ },
+ [0x1EE36]={
+ category="lo",
+ comment="check math properties",
+ description="ARABIC MATHEMATICAL INITIAL THEH",
+ direction="al",
+ linebreak="al",
+ specials={ "font", 0x062B },
+ unicodeslot=0x1EE36,
+ },
+ [0x1EE37]={
+ category="lo",
+ comment="check math properties",
+ description="ARABIC MATHEMATICAL INITIAL KHAH",
+ direction="al",
+ linebreak="al",
+ specials={ "font", 0x062E },
+ unicodeslot=0x1EE37,
+ },
+ [0x1EE39]={
+ category="lo",
+ comment="check math properties",
+ description="ARABIC MATHEMATICAL INITIAL DAD",
+ direction="al",
+ linebreak="al",
+ specials={ "font", 0x0636 },
+ unicodeslot=0x1EE39,
+ },
+ [0x1EE3B]={
+ category="lo",
+ comment="check math properties",
+ description="ARABIC MATHEMATICAL INITIAL GHAIN",
+ direction="al",
+ linebreak="al",
+ specials={ "font", 0x063A },
+ unicodeslot=0x1EE3B,
+ },
+ [0x1EE42]={
+ category="lo",
+ comment="check math properties",
+ description="ARABIC MATHEMATICAL TAILED JEEM",
+ direction="al",
+ linebreak="al",
+ specials={ "font", 0x062C },
+ unicodeslot=0x1EE42,
+ },
+ [0x1EE47]={
+ category="lo",
+ comment="check math properties",
+ description="ARABIC MATHEMATICAL TAILED HAH",
+ direction="al",
+ linebreak="al",
+ specials={ "font", 0x062D },
+ unicodeslot=0x1EE47,
+ },
+ [0x1EE49]={
+ category="lo",
+ comment="check math properties",
+ description="ARABIC MATHEMATICAL TAILED YEH",
+ direction="al",
+ linebreak="al",
+ specials={ "font", 0x064A },
+ unicodeslot=0x1EE49,
+ },
+ [0x1EE4B]={
+ category="lo",
+ comment="check math properties",
+ description="ARABIC MATHEMATICAL TAILED LAM",
+ direction="al",
+ linebreak="al",
+ specials={ "font", 0x0644 },
+ unicodeslot=0x1EE4B,
+ },
+ [0x1EE4D]={
+ category="lo",
+ comment="check math properties",
+ description="ARABIC MATHEMATICAL TAILED NOON",
+ direction="al",
+ linebreak="al",
+ specials={ "font", 0x0646 },
+ unicodeslot=0x1EE4D,
+ },
+ [0x1EE4E]={
+ category="lo",
+ comment="check math properties",
+ description="ARABIC MATHEMATICAL TAILED SEEN",
+ direction="al",
+ linebreak="al",
+ specials={ "font", 0x0633 },
+ unicodeslot=0x1EE4E,
+ },
+ [0x1EE4F]={
+ category="lo",
+ comment="check math properties",
+ description="ARABIC MATHEMATICAL TAILED AIN",
+ direction="al",
+ linebreak="al",
+ specials={ "font", 0x0639 },
+ unicodeslot=0x1EE4F,
+ },
+ [0x1EE51]={
+ category="lo",
+ comment="check math properties",
+ description="ARABIC MATHEMATICAL TAILED SAD",
+ direction="al",
+ linebreak="al",
+ specials={ "font", 0x0635 },
+ unicodeslot=0x1EE51,
+ },
+ [0x1EE52]={
+ category="lo",
+ comment="check math properties",
+ description="ARABIC MATHEMATICAL TAILED QAF",
+ direction="al",
+ linebreak="al",
+ specials={ "font", 0x0642 },
+ unicodeslot=0x1EE52,
+ },
+ [0x1EE54]={
+ category="lo",
+ comment="check math properties",
+ description="ARABIC MATHEMATICAL TAILED SHEEN",
+ direction="al",
+ linebreak="al",
+ specials={ "font", 0x0634 },
+ unicodeslot=0x1EE54,
+ },
+ [0x1EE57]={
+ category="lo",
+ comment="check math properties",
+ description="ARABIC MATHEMATICAL TAILED KHAH",
+ direction="al",
+ linebreak="al",
+ specials={ "font", 0x062E },
+ unicodeslot=0x1EE57,
+ },
+ [0x1EE59]={
+ category="lo",
+ comment="check math properties",
+ description="ARABIC MATHEMATICAL TAILED DAD",
+ direction="al",
+ linebreak="al",
+ specials={ "font", 0x0636 },
+ unicodeslot=0x1EE59,
+ },
+ [0x1EE5B]={
+ category="lo",
+ comment="check math properties",
+ description="ARABIC MATHEMATICAL TAILED GHAIN",
+ direction="al",
+ linebreak="al",
+ specials={ "font", 0x063A },
+ unicodeslot=0x1EE5B,
+ },
+ [0x1EE5D]={
+ category="lo",
+ comment="check math properties",
+ description="ARABIC MATHEMATICAL TAILED DOTLESS NOON",
+ direction="al",
+ linebreak="al",
+ specials={ "font", 0x06BA },
+ unicodeslot=0x1EE5D,
+ },
+ [0x1EE5F]={
+ category="lo",
+ comment="check math properties",
+ description="ARABIC MATHEMATICAL TAILED DOTLESS QAF",
+ direction="al",
+ linebreak="al",
+ specials={ "font", 0x066F },
+ unicodeslot=0x1EE5F,
+ },
+ [0x1EE61]={
+ category="lo",
+ comment="check math properties",
+ description="ARABIC MATHEMATICAL STRETCHED BEH",
+ direction="al",
+ linebreak="al",
+ specials={ "font", 0x0628 },
+ unicodeslot=0x1EE61,
+ },
+ [0x1EE62]={
+ category="lo",
+ comment="check math properties",
+ description="ARABIC MATHEMATICAL STRETCHED JEEM",
+ direction="al",
+ linebreak="al",
+ specials={ "font", 0x062C },
+ unicodeslot=0x1EE62,
+ },
+ [0x1EE64]={
+ category="lo",
+ comment="check math properties",
+ description="ARABIC MATHEMATICAL STRETCHED HEH",
+ direction="al",
+ linebreak="al",
+ specials={ "font", 0x0647 },
+ unicodeslot=0x1EE64,
+ },
+ [0x1EE67]={
+ category="lo",
+ comment="check math properties",
+ description="ARABIC MATHEMATICAL STRETCHED HAH",
+ direction="al",
+ linebreak="al",
+ specials={ "font", 0x062D },
+ unicodeslot=0x1EE67,
+ },
+ [0x1EE68]={
+ category="lo",
+ comment="check math properties",
+ description="ARABIC MATHEMATICAL STRETCHED TAH",
+ direction="al",
+ linebreak="al",
+ specials={ "font", 0x0637 },
+ unicodeslot=0x1EE68,
+ },
+ [0x1EE69]={
+ category="lo",
+ comment="check math properties",
+ description="ARABIC MATHEMATICAL STRETCHED YEH",
+ direction="al",
+ linebreak="al",
+ specials={ "font", 0x064A },
+ unicodeslot=0x1EE69,
+ },
+ [0x1EE6A]={
+ category="lo",
+ comment="check math properties",
+ description="ARABIC MATHEMATICAL STRETCHED KAF",
+ direction="al",
+ linebreak="al",
+ specials={ "font", 0x0643 },
+ unicodeslot=0x1EE6A,
+ },
+ [0x1EE6C]={
+ category="lo",
+ comment="check math properties",
+ description="ARABIC MATHEMATICAL STRETCHED MEEM",
+ direction="al",
+ linebreak="al",
+ specials={ "font", 0x0645 },
+ unicodeslot=0x1EE6C,
+ },
+ [0x1EE6D]={
+ category="lo",
+ comment="check math properties",
+ description="ARABIC MATHEMATICAL STRETCHED NOON",
+ direction="al",
+ linebreak="al",
+ specials={ "font", 0x0646 },
+ unicodeslot=0x1EE6D,
+ },
+ [0x1EE6E]={
+ category="lo",
+ comment="check math properties",
+ description="ARABIC MATHEMATICAL STRETCHED SEEN",
+ direction="al",
+ linebreak="al",
+ specials={ "font", 0x0633 },
+ unicodeslot=0x1EE6E,
+ },
+ [0x1EE6F]={
+ category="lo",
+ comment="check math properties",
+ description="ARABIC MATHEMATICAL STRETCHED AIN",
+ direction="al",
+ linebreak="al",
+ specials={ "font", 0x0639 },
+ unicodeslot=0x1EE6F,
+ },
+ [0x1EE70]={
+ category="lo",
+ comment="check math properties",
+ description="ARABIC MATHEMATICAL STRETCHED FEH",
+ direction="al",
+ linebreak="al",
+ specials={ "font", 0x0641 },
+ unicodeslot=0x1EE70,
+ },
+ [0x1EE71]={
+ category="lo",
+ comment="check math properties",
+ description="ARABIC MATHEMATICAL STRETCHED SAD",
+ direction="al",
+ linebreak="al",
+ specials={ "font", 0x0635 },
+ unicodeslot=0x1EE71,
+ },
+ [0x1EE72]={
+ category="lo",
+ comment="check math properties",
+ description="ARABIC MATHEMATICAL STRETCHED QAF",
+ direction="al",
+ linebreak="al",
+ specials={ "font", 0x0642 },
+ unicodeslot=0x1EE72,
+ },
+ [0x1EE74]={
+ category="lo",
+ comment="check math properties",
+ description="ARABIC MATHEMATICAL STRETCHED SHEEN",
+ direction="al",
+ linebreak="al",
+ specials={ "font", 0x0634 },
+ unicodeslot=0x1EE74,
+ },
+ [0x1EE75]={
+ category="lo",
+ comment="check math properties",
+ description="ARABIC MATHEMATICAL STRETCHED TEH",
+ direction="al",
+ linebreak="al",
+ specials={ "font", 0x062A },
+ unicodeslot=0x1EE75,
+ },
+ [0x1EE76]={
+ category="lo",
+ comment="check math properties",
+ description="ARABIC MATHEMATICAL STRETCHED THEH",
+ direction="al",
+ linebreak="al",
+ specials={ "font", 0x062B },
+ unicodeslot=0x1EE76,
+ },
+ [0x1EE77]={
+ category="lo",
+ comment="check math properties",
+ description="ARABIC MATHEMATICAL STRETCHED KHAH",
+ direction="al",
+ linebreak="al",
+ specials={ "font", 0x062E },
+ unicodeslot=0x1EE77,
+ },
+ [0x1EE79]={
+ category="lo",
+ comment="check math properties",
+ description="ARABIC MATHEMATICAL STRETCHED DAD",
+ direction="al",
+ linebreak="al",
+ specials={ "font", 0x0636 },
+ unicodeslot=0x1EE79,
+ },
+ [0x1EE7A]={
+ category="lo",
+ comment="check math properties",
+ description="ARABIC MATHEMATICAL STRETCHED ZAH",
+ direction="al",
+ linebreak="al",
+ specials={ "font", 0x0638 },
+ unicodeslot=0x1EE7A,
+ },
+ [0x1EE7B]={
+ category="lo",
+ comment="check math properties",
+ description="ARABIC MATHEMATICAL STRETCHED GHAIN",
+ direction="al",
+ linebreak="al",
+ specials={ "font", 0x063A },
+ unicodeslot=0x1EE7B,
+ },
+ [0x1EE7C]={
+ category="lo",
+ comment="check math properties",
+ description="ARABIC MATHEMATICAL STRETCHED DOTLESS BEH",
+ direction="al",
+ linebreak="al",
+ specials={ "font", 0x066E },
+ unicodeslot=0x1EE7C,
+ },
+ [0x1EE7E]={
+ category="lo",
+ comment="check math properties",
+ description="ARABIC MATHEMATICAL STRETCHED DOTLESS FEH",
+ direction="al",
+ linebreak="al",
+ specials={ "font", 0x06A1 },
+ unicodeslot=0x1EE7E,
+ },
+ [0x1EE80]={
+ category="lo",
+ comment="check math properties",
+ description="ARABIC MATHEMATICAL LOOPED ALEF",
+ direction="al",
+ linebreak="al",
+ specials={ "font", 0x0627 },
+ unicodeslot=0x1EE80,
+ },
+ [0x1EE81]={
+ category="lo",
+ comment="check math properties",
+ description="ARABIC MATHEMATICAL LOOPED BEH",
+ direction="al",
+ linebreak="al",
+ specials={ "font", 0x0628 },
+ unicodeslot=0x1EE81,
+ },
+ [0x1EE82]={
+ category="lo",
+ comment="check math properties",
+ description="ARABIC MATHEMATICAL LOOPED JEEM",
+ direction="al",
+ linebreak="al",
+ specials={ "font", 0x062C },
+ unicodeslot=0x1EE82,
+ },
+ [0x1EE83]={
+ category="lo",
+ comment="check math properties",
+ description="ARABIC MATHEMATICAL LOOPED DAL",
+ direction="al",
+ linebreak="al",
+ specials={ "font", 0x062F },
+ unicodeslot=0x1EE83,
+ },
+ [0x1EE84]={
+ category="lo",
+ comment="check math properties",
+ description="ARABIC MATHEMATICAL LOOPED HEH",
+ direction="al",
+ linebreak="al",
+ specials={ "font", 0x0647 },
+ unicodeslot=0x1EE84,
+ },
+ [0x1EE85]={
+ category="lo",
+ comment="check math properties",
+ description="ARABIC MATHEMATICAL LOOPED WAW",
+ direction="al",
+ linebreak="al",
+ specials={ "font", 0x0648 },
+ unicodeslot=0x1EE85,
+ },
+ [0x1EE86]={
+ category="lo",
+ comment="check math properties",
+ description="ARABIC MATHEMATICAL LOOPED ZAIN",
+ direction="al",
+ linebreak="al",
+ specials={ "font", 0x0632 },
+ unicodeslot=0x1EE86,
+ },
+ [0x1EE87]={
+ category="lo",
+ comment="check math properties",
+ description="ARABIC MATHEMATICAL LOOPED HAH",
+ direction="al",
+ linebreak="al",
+ specials={ "font", 0x062D },
+ unicodeslot=0x1EE87,
+ },
+ [0x1EE88]={
+ category="lo",
+ comment="check math properties",
+ description="ARABIC MATHEMATICAL LOOPED TAH",
+ direction="al",
+ linebreak="al",
+ specials={ "font", 0x0637 },
+ unicodeslot=0x1EE88,
+ },
+ [0x1EE89]={
+ category="lo",
+ comment="check math properties",
+ description="ARABIC MATHEMATICAL LOOPED YEH",
+ direction="al",
+ linebreak="al",
+ specials={ "font", 0x064A },
+ unicodeslot=0x1EE89,
+ },
+ [0x1EE8B]={
+ category="lo",
+ comment="check math properties",
+ description="ARABIC MATHEMATICAL LOOPED LAM",
+ direction="al",
+ linebreak="al",
+ specials={ "font", 0x0644 },
+ unicodeslot=0x1EE8B,
+ },
+ [0x1EE8C]={
+ category="lo",
+ comment="check math properties",
+ description="ARABIC MATHEMATICAL LOOPED MEEM",
+ direction="al",
+ linebreak="al",
+ specials={ "font", 0x0645 },
+ unicodeslot=0x1EE8C,
+ },
+ [0x1EE8D]={
+ category="lo",
+ comment="check math properties",
+ description="ARABIC MATHEMATICAL LOOPED NOON",
+ direction="al",
+ linebreak="al",
+ specials={ "font", 0x0646 },
+ unicodeslot=0x1EE8D,
+ },
+ [0x1EE8E]={
+ category="lo",
+ comment="check math properties",
+ description="ARABIC MATHEMATICAL LOOPED SEEN",
+ direction="al",
+ linebreak="al",
+ specials={ "font", 0x0633 },
+ unicodeslot=0x1EE8E,
+ },
+ [0x1EE8F]={
+ category="lo",
+ comment="check math properties",
+ description="ARABIC MATHEMATICAL LOOPED AIN",
+ direction="al",
+ linebreak="al",
+ specials={ "font", 0x0639 },
+ unicodeslot=0x1EE8F,
+ },
+ [0x1EE90]={
+ category="lo",
+ comment="check math properties",
+ description="ARABIC MATHEMATICAL LOOPED FEH",
+ direction="al",
+ linebreak="al",
+ specials={ "font", 0x0641 },
+ unicodeslot=0x1EE90,
+ },
+ [0x1EE91]={
+ category="lo",
+ comment="check math properties",
+ description="ARABIC MATHEMATICAL LOOPED SAD",
+ direction="al",
+ linebreak="al",
+ specials={ "font", 0x0635 },
+ unicodeslot=0x1EE91,
+ },
+ [0x1EE92]={
+ category="lo",
+ comment="check math properties",
+ description="ARABIC MATHEMATICAL LOOPED QAF",
+ direction="al",
+ linebreak="al",
+ specials={ "font", 0x0642 },
+ unicodeslot=0x1EE92,
+ },
+ [0x1EE93]={
+ category="lo",
+ comment="check math properties",
+ description="ARABIC MATHEMATICAL LOOPED REH",
+ direction="al",
+ linebreak="al",
+ specials={ "font", 0x0631 },
+ unicodeslot=0x1EE93,
+ },
+ [0x1EE94]={
+ category="lo",
+ comment="check math properties",
+ description="ARABIC MATHEMATICAL LOOPED SHEEN",
+ direction="al",
+ linebreak="al",
+ specials={ "font", 0x0634 },
+ unicodeslot=0x1EE94,
+ },
+ [0x1EE95]={
+ category="lo",
+ comment="check math properties",
+ description="ARABIC MATHEMATICAL LOOPED TEH",
+ direction="al",
+ linebreak="al",
+ specials={ "font", 0x062A },
+ unicodeslot=0x1EE95,
+ },
+ [0x1EE96]={
+ category="lo",
+ comment="check math properties",
+ description="ARABIC MATHEMATICAL LOOPED THEH",
+ direction="al",
+ linebreak="al",
+ specials={ "font", 0x062B },
+ unicodeslot=0x1EE96,
+ },
+ [0x1EE97]={
+ category="lo",
+ comment="check math properties",
+ description="ARABIC MATHEMATICAL LOOPED KHAH",
+ direction="al",
+ linebreak="al",
+ specials={ "font", 0x062E },
+ unicodeslot=0x1EE97,
+ },
+ [0x1EE98]={
+ category="lo",
+ comment="check math properties",
+ description="ARABIC MATHEMATICAL LOOPED THAL",
+ direction="al",
+ linebreak="al",
+ specials={ "font", 0x0630 },
+ unicodeslot=0x1EE98,
+ },
+ [0x1EE99]={
+ category="lo",
+ comment="check math properties",
+ description="ARABIC MATHEMATICAL LOOPED DAD",
+ direction="al",
+ linebreak="al",
+ specials={ "font", 0x0636 },
+ unicodeslot=0x1EE99,
+ },
+ [0x1EE9A]={
+ category="lo",
+ comment="check math properties",
+ description="ARABIC MATHEMATICAL LOOPED ZAH",
+ direction="al",
+ linebreak="al",
+ specials={ "font", 0x0638 },
+ unicodeslot=0x1EE9A,
+ },
+ [0x1EE9B]={
+ category="lo",
+ comment="check math properties",
+ description="ARABIC MATHEMATICAL LOOPED GHAIN",
+ direction="al",
+ linebreak="al",
+ specials={ "font", 0x063A },
+ unicodeslot=0x1EE9B,
+ },
+ [0x1EEA1]={
+ category="lo",
+ comment="check math properties",
+ description="ARABIC MATHEMATICAL DOUBLE-STRUCK BEH",
+ direction="al",
+ linebreak="al",
+ specials={ "font", 0x0628 },
+ unicodeslot=0x1EEA1,
+ },
+ [0x1EEA2]={
+ category="lo",
+ comment="check math properties",
+ description="ARABIC MATHEMATICAL DOUBLE-STRUCK JEEM",
+ direction="al",
+ linebreak="al",
+ specials={ "font", 0x062C },
+ unicodeslot=0x1EEA2,
+ },
+ [0x1EEA3]={
+ category="lo",
+ comment="check math properties",
+ description="ARABIC MATHEMATICAL DOUBLE-STRUCK DAL",
+ direction="al",
+ linebreak="al",
+ specials={ "font", 0x062F },
+ unicodeslot=0x1EEA3,
+ },
+ [0x1EEA5]={
+ category="lo",
+ comment="check math properties",
+ description="ARABIC MATHEMATICAL DOUBLE-STRUCK WAW",
+ direction="al",
+ linebreak="al",
+ specials={ "font", 0x0648 },
+ unicodeslot=0x1EEA5,
+ },
+ [0x1EEA6]={
+ category="lo",
+ comment="check math properties",
+ description="ARABIC MATHEMATICAL DOUBLE-STRUCK ZAIN",
+ direction="al",
+ linebreak="al",
+ specials={ "font", 0x0632 },
+ unicodeslot=0x1EEA6,
+ },
+ [0x1EEA7]={
+ category="lo",
+ comment="check math properties",
+ description="ARABIC MATHEMATICAL DOUBLE-STRUCK HAH",
+ direction="al",
+ linebreak="al",
+ specials={ "font", 0x062D },
+ unicodeslot=0x1EEA7,
+ },
+ [0x1EEA8]={
+ category="lo",
+ comment="check math properties",
+ description="ARABIC MATHEMATICAL DOUBLE-STRUCK TAH",
+ direction="al",
+ linebreak="al",
+ specials={ "font", 0x0637 },
+ unicodeslot=0x1EEA8,
+ },
+ [0x1EEA9]={
+ category="lo",
+ comment="check math properties",
+ description="ARABIC MATHEMATICAL DOUBLE-STRUCK YEH",
+ direction="al",
+ linebreak="al",
+ specials={ "font", 0x064A },
+ unicodeslot=0x1EEA9,
+ },
+ [0x1EEAB]={
+ category="lo",
+ comment="check math properties",
+ description="ARABIC MATHEMATICAL DOUBLE-STRUCK LAM",
+ direction="al",
+ linebreak="al",
+ specials={ "font", 0x0644 },
+ unicodeslot=0x1EEAB,
+ },
+ [0x1EEAC]={
+ category="lo",
+ comment="check math properties",
+ description="ARABIC MATHEMATICAL DOUBLE-STRUCK MEEM",
+ direction="al",
+ linebreak="al",
+ specials={ "font", 0x0645 },
+ unicodeslot=0x1EEAC,
+ },
+ [0x1EEAD]={
+ category="lo",
+ comment="check math properties",
+ description="ARABIC MATHEMATICAL DOUBLE-STRUCK NOON",
+ direction="al",
+ linebreak="al",
+ specials={ "font", 0x0646 },
+ unicodeslot=0x1EEAD,
+ },
+ [0x1EEAE]={
+ category="lo",
+ comment="check math properties",
+ description="ARABIC MATHEMATICAL DOUBLE-STRUCK SEEN",
+ direction="al",
+ linebreak="al",
+ specials={ "font", 0x0633 },
+ unicodeslot=0x1EEAE,
+ },
+ [0x1EEAF]={
+ category="lo",
+ comment="check math properties",
+ description="ARABIC MATHEMATICAL DOUBLE-STRUCK AIN",
+ direction="al",
+ linebreak="al",
+ specials={ "font", 0x0639 },
+ unicodeslot=0x1EEAF,
+ },
+ [0x1EEB0]={
+ category="lo",
+ comment="check math properties",
+ description="ARABIC MATHEMATICAL DOUBLE-STRUCK FEH",
+ direction="al",
+ linebreak="al",
+ specials={ "font", 0x0641 },
+ unicodeslot=0x1EEB0,
+ },
+ [0x1EEB1]={
+ category="lo",
+ comment="check math properties",
+ description="ARABIC MATHEMATICAL DOUBLE-STRUCK SAD",
+ direction="al",
+ linebreak="al",
+ specials={ "font", 0x0635 },
+ unicodeslot=0x1EEB1,
+ },
+ [0x1EEB2]={
+ category="lo",
+ comment="check math properties",
+ description="ARABIC MATHEMATICAL DOUBLE-STRUCK QAF",
+ direction="al",
+ linebreak="al",
+ specials={ "font", 0x0642 },
+ unicodeslot=0x1EEB2,
+ },
+ [0x1EEB3]={
+ category="lo",
+ comment="check math properties",
+ description="ARABIC MATHEMATICAL DOUBLE-STRUCK REH",
+ direction="al",
+ linebreak="al",
+ specials={ "font", 0x0631 },
+ unicodeslot=0x1EEB3,
+ },
+ [0x1EEB4]={
+ category="lo",
+ comment="check math properties",
+ description="ARABIC MATHEMATICAL DOUBLE-STRUCK SHEEN",
+ direction="al",
+ linebreak="al",
+ specials={ "font", 0x0634 },
+ unicodeslot=0x1EEB4,
+ },
+ [0x1EEB5]={
+ category="lo",
+ comment="check math properties",
+ description="ARABIC MATHEMATICAL DOUBLE-STRUCK TEH",
+ direction="al",
+ linebreak="al",
+ specials={ "font", 0x062A },
+ unicodeslot=0x1EEB5,
+ },
+ [0x1EEB6]={
+ category="lo",
+ comment="check math properties",
+ description="ARABIC MATHEMATICAL DOUBLE-STRUCK THEH",
+ direction="al",
+ linebreak="al",
+ specials={ "font", 0x062B },
+ unicodeslot=0x1EEB6,
+ },
+ [0x1EEB7]={
+ category="lo",
+ comment="check math properties",
+ description="ARABIC MATHEMATICAL DOUBLE-STRUCK KHAH",
+ direction="al",
+ linebreak="al",
+ specials={ "font", 0x062E },
+ unicodeslot=0x1EEB7,
+ },
+ [0x1EEB8]={
+ category="lo",
+ comment="check math properties",
+ description="ARABIC MATHEMATICAL DOUBLE-STRUCK THAL",
+ direction="al",
+ linebreak="al",
+ specials={ "font", 0x0630 },
+ unicodeslot=0x1EEB8,
+ },
+ [0x1EEB9]={
+ category="lo",
+ comment="check math properties",
+ description="ARABIC MATHEMATICAL DOUBLE-STRUCK DAD",
+ direction="al",
+ linebreak="al",
+ specials={ "font", 0x0636 },
+ unicodeslot=0x1EEB9,
+ },
+ [0x1EEBA]={
+ category="lo",
+ comment="check math properties",
+ description="ARABIC MATHEMATICAL DOUBLE-STRUCK ZAH",
+ direction="al",
+ linebreak="al",
+ specials={ "font", 0x0638 },
+ unicodeslot=0x1EEBA,
+ },
+ [0x1EEBB]={
+ category="lo",
+ comment="check math properties",
+ description="ARABIC MATHEMATICAL DOUBLE-STRUCK GHAIN",
+ direction="al",
+ linebreak="al",
+ specials={ "font", 0x063A },
+ unicodeslot=0x1EEBB,
+ },
+ [0x1EEF0]={
+ category="sm",
+ comment="check math properties",
+ description="ARABIC MATHEMATICAL OPERATOR MEEM WITH HAH WITH TATWEEL",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x1EEF0,
+ },
+ [0x1EEF1]={
+ category="sm",
+ comment="check math properties",
+ description="ARABIC MATHEMATICAL OPERATOR HAH WITH DAL",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x1EEF1,
+ },
[0x1F000]={
category="so",
description="MAHJONG TILE EAST WIND",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F000,
},
[0x1F001]={
category="so",
description="MAHJONG TILE SOUTH WIND",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F001,
},
[0x1F002]={
category="so",
description="MAHJONG TILE WEST WIND",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F002,
},
[0x1F003]={
category="so",
description="MAHJONG TILE NORTH WIND",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F003,
},
[0x1F004]={
category="so",
description="MAHJONG TILE RED DRAGON",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F004,
+ variants={
+ [0xFE0E]="text style",
+ [0xFE0F]="emoji style",
+ },
},
[0x1F005]={
category="so",
description="MAHJONG TILE GREEN DRAGON",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F005,
},
[0x1F006]={
category="so",
description="MAHJONG TILE WHITE DRAGON",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F006,
},
[0x1F007]={
category="so",
description="MAHJONG TILE ONE OF CHARACTERS",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F007,
},
[0x1F008]={
category="so",
description="MAHJONG TILE TWO OF CHARACTERS",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F008,
},
[0x1F009]={
category="so",
description="MAHJONG TILE THREE OF CHARACTERS",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F009,
},
[0x1F00A]={
category="so",
description="MAHJONG TILE FOUR OF CHARACTERS",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F00A,
},
[0x1F00B]={
category="so",
description="MAHJONG TILE FIVE OF CHARACTERS",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F00B,
},
[0x1F00C]={
category="so",
description="MAHJONG TILE SIX OF CHARACTERS",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F00C,
},
[0x1F00D]={
category="so",
description="MAHJONG TILE SEVEN OF CHARACTERS",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F00D,
},
[0x1F00E]={
category="so",
description="MAHJONG TILE EIGHT OF CHARACTERS",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F00E,
},
[0x1F00F]={
category="so",
description="MAHJONG TILE NINE OF CHARACTERS",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F00F,
},
[0x1F010]={
category="so",
description="MAHJONG TILE ONE OF BAMBOOS",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F010,
},
[0x1F011]={
category="so",
description="MAHJONG TILE TWO OF BAMBOOS",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F011,
},
[0x1F012]={
category="so",
description="MAHJONG TILE THREE OF BAMBOOS",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F012,
},
[0x1F013]={
category="so",
description="MAHJONG TILE FOUR OF BAMBOOS",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F013,
},
[0x1F014]={
category="so",
description="MAHJONG TILE FIVE OF BAMBOOS",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F014,
},
[0x1F015]={
category="so",
description="MAHJONG TILE SIX OF BAMBOOS",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F015,
},
[0x1F016]={
category="so",
description="MAHJONG TILE SEVEN OF BAMBOOS",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F016,
},
[0x1F017]={
category="so",
description="MAHJONG TILE EIGHT OF BAMBOOS",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F017,
},
[0x1F018]={
category="so",
description="MAHJONG TILE NINE OF BAMBOOS",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F018,
},
[0x1F019]={
category="so",
description="MAHJONG TILE ONE OF CIRCLES",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F019,
},
[0x1F01A]={
category="so",
description="MAHJONG TILE TWO OF CIRCLES",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F01A,
},
[0x1F01B]={
category="so",
description="MAHJONG TILE THREE OF CIRCLES",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F01B,
},
[0x1F01C]={
category="so",
description="MAHJONG TILE FOUR OF CIRCLES",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F01C,
},
[0x1F01D]={
category="so",
description="MAHJONG TILE FIVE OF CIRCLES",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F01D,
},
[0x1F01E]={
category="so",
description="MAHJONG TILE SIX OF CIRCLES",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F01E,
},
[0x1F01F]={
category="so",
description="MAHJONG TILE SEVEN OF CIRCLES",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F01F,
},
[0x1F020]={
category="so",
description="MAHJONG TILE EIGHT OF CIRCLES",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F020,
},
[0x1F021]={
category="so",
description="MAHJONG TILE NINE OF CIRCLES",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F021,
},
[0x1F022]={
category="so",
description="MAHJONG TILE PLUM",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F022,
},
[0x1F023]={
category="so",
description="MAHJONG TILE ORCHID",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F023,
},
[0x1F024]={
category="so",
description="MAHJONG TILE BAMBOO",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F024,
},
[0x1F025]={
category="so",
description="MAHJONG TILE CHRYSANTHEMUM",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F025,
},
[0x1F026]={
category="so",
description="MAHJONG TILE SPRING",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F026,
},
[0x1F027]={
category="so",
description="MAHJONG TILE SUMMER",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F027,
},
[0x1F028]={
category="so",
description="MAHJONG TILE AUTUMN",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F028,
},
[0x1F029]={
category="so",
description="MAHJONG TILE WINTER",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F029,
},
[0x1F02A]={
category="so",
description="MAHJONG TILE JOKER",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F02A,
},
[0x1F02B]={
category="so",
description="MAHJONG TILE BACK",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F02B,
},
[0x1F030]={
category="so",
description="DOMINO TILE HORIZONTAL BACK",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F030,
},
[0x1F031]={
category="so",
description="DOMINO TILE HORIZONTAL-00-00",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F031,
},
[0x1F032]={
category="so",
description="DOMINO TILE HORIZONTAL-00-01",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F032,
},
[0x1F033]={
category="so",
description="DOMINO TILE HORIZONTAL-00-02",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F033,
},
[0x1F034]={
category="so",
description="DOMINO TILE HORIZONTAL-00-03",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F034,
},
[0x1F035]={
category="so",
description="DOMINO TILE HORIZONTAL-00-04",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F035,
},
[0x1F036]={
category="so",
description="DOMINO TILE HORIZONTAL-00-05",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F036,
},
[0x1F037]={
category="so",
description="DOMINO TILE HORIZONTAL-00-06",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F037,
},
[0x1F038]={
category="so",
description="DOMINO TILE HORIZONTAL-01-00",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F038,
},
[0x1F039]={
category="so",
description="DOMINO TILE HORIZONTAL-01-01",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F039,
},
[0x1F03A]={
category="so",
description="DOMINO TILE HORIZONTAL-01-02",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F03A,
},
[0x1F03B]={
category="so",
description="DOMINO TILE HORIZONTAL-01-03",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F03B,
},
[0x1F03C]={
category="so",
description="DOMINO TILE HORIZONTAL-01-04",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F03C,
},
[0x1F03D]={
category="so",
description="DOMINO TILE HORIZONTAL-01-05",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F03D,
},
[0x1F03E]={
category="so",
description="DOMINO TILE HORIZONTAL-01-06",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F03E,
},
[0x1F03F]={
category="so",
description="DOMINO TILE HORIZONTAL-02-00",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F03F,
},
[0x1F040]={
category="so",
description="DOMINO TILE HORIZONTAL-02-01",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F040,
},
[0x1F041]={
category="so",
description="DOMINO TILE HORIZONTAL-02-02",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F041,
},
[0x1F042]={
category="so",
description="DOMINO TILE HORIZONTAL-02-03",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F042,
},
[0x1F043]={
category="so",
description="DOMINO TILE HORIZONTAL-02-04",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F043,
},
[0x1F044]={
category="so",
description="DOMINO TILE HORIZONTAL-02-05",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F044,
},
[0x1F045]={
category="so",
description="DOMINO TILE HORIZONTAL-02-06",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F045,
},
[0x1F046]={
category="so",
description="DOMINO TILE HORIZONTAL-03-00",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F046,
},
[0x1F047]={
category="so",
description="DOMINO TILE HORIZONTAL-03-01",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F047,
},
[0x1F048]={
category="so",
description="DOMINO TILE HORIZONTAL-03-02",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F048,
},
[0x1F049]={
category="so",
description="DOMINO TILE HORIZONTAL-03-03",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F049,
},
[0x1F04A]={
category="so",
description="DOMINO TILE HORIZONTAL-03-04",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F04A,
},
[0x1F04B]={
category="so",
description="DOMINO TILE HORIZONTAL-03-05",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F04B,
},
[0x1F04C]={
category="so",
description="DOMINO TILE HORIZONTAL-03-06",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F04C,
},
[0x1F04D]={
category="so",
description="DOMINO TILE HORIZONTAL-04-00",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F04D,
},
[0x1F04E]={
category="so",
description="DOMINO TILE HORIZONTAL-04-01",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F04E,
},
[0x1F04F]={
category="so",
description="DOMINO TILE HORIZONTAL-04-02",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F04F,
},
[0x1F050]={
category="so",
description="DOMINO TILE HORIZONTAL-04-03",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F050,
},
[0x1F051]={
category="so",
description="DOMINO TILE HORIZONTAL-04-04",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F051,
},
[0x1F052]={
category="so",
description="DOMINO TILE HORIZONTAL-04-05",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F052,
},
[0x1F053]={
category="so",
description="DOMINO TILE HORIZONTAL-04-06",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F053,
},
[0x1F054]={
category="so",
description="DOMINO TILE HORIZONTAL-05-00",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F054,
},
[0x1F055]={
category="so",
description="DOMINO TILE HORIZONTAL-05-01",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F055,
},
[0x1F056]={
category="so",
description="DOMINO TILE HORIZONTAL-05-02",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F056,
},
[0x1F057]={
category="so",
description="DOMINO TILE HORIZONTAL-05-03",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F057,
},
[0x1F058]={
category="so",
description="DOMINO TILE HORIZONTAL-05-04",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F058,
},
[0x1F059]={
category="so",
description="DOMINO TILE HORIZONTAL-05-05",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F059,
},
[0x1F05A]={
category="so",
description="DOMINO TILE HORIZONTAL-05-06",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F05A,
},
[0x1F05B]={
category="so",
description="DOMINO TILE HORIZONTAL-06-00",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F05B,
},
[0x1F05C]={
category="so",
description="DOMINO TILE HORIZONTAL-06-01",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F05C,
},
[0x1F05D]={
category="so",
description="DOMINO TILE HORIZONTAL-06-02",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F05D,
},
[0x1F05E]={
category="so",
description="DOMINO TILE HORIZONTAL-06-03",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F05E,
},
[0x1F05F]={
category="so",
description="DOMINO TILE HORIZONTAL-06-04",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F05F,
},
[0x1F060]={
category="so",
description="DOMINO TILE HORIZONTAL-06-05",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F060,
},
[0x1F061]={
category="so",
description="DOMINO TILE HORIZONTAL-06-06",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F061,
},
[0x1F062]={
category="so",
description="DOMINO TILE VERTICAL BACK",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F062,
},
[0x1F063]={
category="so",
description="DOMINO TILE VERTICAL-00-00",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F063,
},
[0x1F064]={
category="so",
description="DOMINO TILE VERTICAL-00-01",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F064,
},
[0x1F065]={
category="so",
description="DOMINO TILE VERTICAL-00-02",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F065,
},
[0x1F066]={
category="so",
description="DOMINO TILE VERTICAL-00-03",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F066,
},
[0x1F067]={
category="so",
description="DOMINO TILE VERTICAL-00-04",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F067,
},
[0x1F068]={
category="so",
description="DOMINO TILE VERTICAL-00-05",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F068,
},
[0x1F069]={
category="so",
description="DOMINO TILE VERTICAL-00-06",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F069,
},
[0x1F06A]={
category="so",
description="DOMINO TILE VERTICAL-01-00",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F06A,
},
[0x1F06B]={
category="so",
description="DOMINO TILE VERTICAL-01-01",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F06B,
},
[0x1F06C]={
category="so",
description="DOMINO TILE VERTICAL-01-02",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F06C,
},
[0x1F06D]={
category="so",
description="DOMINO TILE VERTICAL-01-03",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F06D,
},
[0x1F06E]={
category="so",
description="DOMINO TILE VERTICAL-01-04",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F06E,
},
[0x1F06F]={
category="so",
description="DOMINO TILE VERTICAL-01-05",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F06F,
},
[0x1F070]={
category="so",
description="DOMINO TILE VERTICAL-01-06",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F070,
},
[0x1F071]={
category="so",
description="DOMINO TILE VERTICAL-02-00",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F071,
},
[0x1F072]={
category="so",
description="DOMINO TILE VERTICAL-02-01",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F072,
},
[0x1F073]={
category="so",
description="DOMINO TILE VERTICAL-02-02",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F073,
},
[0x1F074]={
category="so",
description="DOMINO TILE VERTICAL-02-03",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F074,
},
[0x1F075]={
category="so",
description="DOMINO TILE VERTICAL-02-04",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F075,
},
[0x1F076]={
category="so",
description="DOMINO TILE VERTICAL-02-05",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F076,
},
[0x1F077]={
category="so",
description="DOMINO TILE VERTICAL-02-06",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F077,
},
[0x1F078]={
category="so",
description="DOMINO TILE VERTICAL-03-00",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F078,
},
[0x1F079]={
category="so",
description="DOMINO TILE VERTICAL-03-01",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F079,
},
[0x1F07A]={
category="so",
description="DOMINO TILE VERTICAL-03-02",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F07A,
},
[0x1F07B]={
category="so",
description="DOMINO TILE VERTICAL-03-03",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F07B,
},
[0x1F07C]={
category="so",
description="DOMINO TILE VERTICAL-03-04",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F07C,
},
[0x1F07D]={
category="so",
description="DOMINO TILE VERTICAL-03-05",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F07D,
},
[0x1F07E]={
category="so",
description="DOMINO TILE VERTICAL-03-06",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F07E,
},
[0x1F07F]={
category="so",
description="DOMINO TILE VERTICAL-04-00",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F07F,
},
[0x1F080]={
category="so",
description="DOMINO TILE VERTICAL-04-01",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F080,
},
[0x1F081]={
category="so",
description="DOMINO TILE VERTICAL-04-02",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F081,
},
[0x1F082]={
category="so",
description="DOMINO TILE VERTICAL-04-03",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F082,
},
[0x1F083]={
category="so",
description="DOMINO TILE VERTICAL-04-04",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F083,
},
[0x1F084]={
category="so",
description="DOMINO TILE VERTICAL-04-05",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F084,
},
[0x1F085]={
category="so",
description="DOMINO TILE VERTICAL-04-06",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F085,
},
[0x1F086]={
category="so",
description="DOMINO TILE VERTICAL-05-00",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F086,
},
[0x1F087]={
category="so",
description="DOMINO TILE VERTICAL-05-01",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F087,
},
[0x1F088]={
category="so",
description="DOMINO TILE VERTICAL-05-02",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F088,
},
[0x1F089]={
category="so",
description="DOMINO TILE VERTICAL-05-03",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F089,
},
[0x1F08A]={
category="so",
description="DOMINO TILE VERTICAL-05-04",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F08A,
},
[0x1F08B]={
category="so",
description="DOMINO TILE VERTICAL-05-05",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F08B,
},
[0x1F08C]={
category="so",
description="DOMINO TILE VERTICAL-05-06",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F08C,
},
[0x1F08D]={
category="so",
description="DOMINO TILE VERTICAL-06-00",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F08D,
},
[0x1F08E]={
category="so",
description="DOMINO TILE VERTICAL-06-01",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F08E,
},
[0x1F08F]={
category="so",
description="DOMINO TILE VERTICAL-06-02",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F08F,
},
[0x1F090]={
category="so",
description="DOMINO TILE VERTICAL-06-03",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F090,
},
[0x1F091]={
category="so",
description="DOMINO TILE VERTICAL-06-04",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F091,
},
[0x1F092]={
category="so",
description="DOMINO TILE VERTICAL-06-05",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F092,
},
[0x1F093]={
category="so",
description="DOMINO TILE VERTICAL-06-06",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F093,
},
[0x1F0A0]={
category="so",
description="PLAYING CARD BACK",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F0A0,
},
[0x1F0A1]={
category="so",
description="PLAYING CARD ACE OF SPADES",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F0A1,
},
[0x1F0A2]={
category="so",
description="PLAYING CARD TWO OF SPADES",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F0A2,
},
[0x1F0A3]={
category="so",
description="PLAYING CARD THREE OF SPADES",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F0A3,
},
[0x1F0A4]={
category="so",
description="PLAYING CARD FOUR OF SPADES",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F0A4,
},
[0x1F0A5]={
category="so",
description="PLAYING CARD FIVE OF SPADES",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F0A5,
},
[0x1F0A6]={
category="so",
description="PLAYING CARD SIX OF SPADES",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F0A6,
},
[0x1F0A7]={
category="so",
description="PLAYING CARD SEVEN OF SPADES",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F0A7,
},
[0x1F0A8]={
category="so",
description="PLAYING CARD EIGHT OF SPADES",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F0A8,
},
[0x1F0A9]={
category="so",
description="PLAYING CARD NINE OF SPADES",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F0A9,
},
[0x1F0AA]={
category="so",
description="PLAYING CARD TEN OF SPADES",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F0AA,
},
[0x1F0AB]={
category="so",
description="PLAYING CARD JACK OF SPADES",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F0AB,
},
[0x1F0AC]={
category="so",
description="PLAYING CARD KNIGHT OF SPADES",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F0AC,
},
[0x1F0AD]={
category="so",
description="PLAYING CARD QUEEN OF SPADES",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F0AD,
},
[0x1F0AE]={
category="so",
description="PLAYING CARD KING OF SPADES",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F0AE,
},
[0x1F0B1]={
category="so",
description="PLAYING CARD ACE OF HEARTS",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F0B1,
},
[0x1F0B2]={
category="so",
description="PLAYING CARD TWO OF HEARTS",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F0B2,
},
[0x1F0B3]={
category="so",
description="PLAYING CARD THREE OF HEARTS",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F0B3,
},
[0x1F0B4]={
category="so",
description="PLAYING CARD FOUR OF HEARTS",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F0B4,
},
[0x1F0B5]={
category="so",
description="PLAYING CARD FIVE OF HEARTS",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F0B5,
},
[0x1F0B6]={
category="so",
description="PLAYING CARD SIX OF HEARTS",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F0B6,
},
[0x1F0B7]={
category="so",
description="PLAYING CARD SEVEN OF HEARTS",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F0B7,
},
[0x1F0B8]={
category="so",
description="PLAYING CARD EIGHT OF HEARTS",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F0B8,
},
[0x1F0B9]={
category="so",
description="PLAYING CARD NINE OF HEARTS",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F0B9,
},
[0x1F0BA]={
category="so",
description="PLAYING CARD TEN OF HEARTS",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F0BA,
},
[0x1F0BB]={
category="so",
description="PLAYING CARD JACK OF HEARTS",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F0BB,
},
[0x1F0BC]={
category="so",
description="PLAYING CARD KNIGHT OF HEARTS",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F0BC,
},
[0x1F0BD]={
category="so",
description="PLAYING CARD QUEEN OF HEARTS",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F0BD,
},
[0x1F0BE]={
category="so",
description="PLAYING CARD KING OF HEARTS",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F0BE,
},
[0x1F0C1]={
category="so",
description="PLAYING CARD ACE OF DIAMONDS",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F0C1,
},
[0x1F0C2]={
category="so",
description="PLAYING CARD TWO OF DIAMONDS",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F0C2,
},
[0x1F0C3]={
category="so",
description="PLAYING CARD THREE OF DIAMONDS",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F0C3,
},
[0x1F0C4]={
category="so",
description="PLAYING CARD FOUR OF DIAMONDS",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F0C4,
},
[0x1F0C5]={
category="so",
description="PLAYING CARD FIVE OF DIAMONDS",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F0C5,
},
[0x1F0C6]={
category="so",
description="PLAYING CARD SIX OF DIAMONDS",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F0C6,
},
[0x1F0C7]={
category="so",
description="PLAYING CARD SEVEN OF DIAMONDS",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F0C7,
},
[0x1F0C8]={
category="so",
description="PLAYING CARD EIGHT OF DIAMONDS",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F0C8,
},
[0x1F0C9]={
category="so",
description="PLAYING CARD NINE OF DIAMONDS",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F0C9,
},
[0x1F0CA]={
category="so",
description="PLAYING CARD TEN OF DIAMONDS",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F0CA,
},
[0x1F0CB]={
category="so",
description="PLAYING CARD JACK OF DIAMONDS",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F0CB,
},
[0x1F0CC]={
category="so",
description="PLAYING CARD KNIGHT OF DIAMONDS",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F0CC,
},
[0x1F0CD]={
category="so",
description="PLAYING CARD QUEEN OF DIAMONDS",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F0CD,
},
[0x1F0CE]={
category="so",
description="PLAYING CARD KING OF DIAMONDS",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F0CE,
},
[0x1F0CF]={
category="so",
description="PLAYING CARD BLACK JOKER",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F0CF,
},
[0x1F0D1]={
category="so",
description="PLAYING CARD ACE OF CLUBS",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F0D1,
},
[0x1F0D2]={
category="so",
description="PLAYING CARD TWO OF CLUBS",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F0D2,
},
[0x1F0D3]={
category="so",
description="PLAYING CARD THREE OF CLUBS",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F0D3,
},
[0x1F0D4]={
category="so",
description="PLAYING CARD FOUR OF CLUBS",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F0D4,
},
[0x1F0D5]={
category="so",
description="PLAYING CARD FIVE OF CLUBS",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F0D5,
},
[0x1F0D6]={
category="so",
description="PLAYING CARD SIX OF CLUBS",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F0D6,
},
[0x1F0D7]={
category="so",
description="PLAYING CARD SEVEN OF CLUBS",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F0D7,
},
[0x1F0D8]={
category="so",
description="PLAYING CARD EIGHT OF CLUBS",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F0D8,
},
[0x1F0D9]={
category="so",
description="PLAYING CARD NINE OF CLUBS",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F0D9,
},
[0x1F0DA]={
category="so",
description="PLAYING CARD TEN OF CLUBS",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F0DA,
},
[0x1F0DB]={
category="so",
description="PLAYING CARD JACK OF CLUBS",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F0DB,
},
[0x1F0DC]={
category="so",
description="PLAYING CARD KNIGHT OF CLUBS",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F0DC,
},
[0x1F0DD]={
category="so",
description="PLAYING CARD QUEEN OF CLUBS",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F0DD,
},
[0x1F0DE]={
category="so",
description="PLAYING CARD KING OF CLUBS",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F0DE,
},
[0x1F0DF]={
category="so",
description="PLAYING CARD WHITE JOKER",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F0DF,
},
[0x1F100]={
@@ -173521,6 +179690,22 @@ characters.data={
linebreak="ai",
unicodeslot=0x1F169,
},
+ [0x1F16A]={
+ category="so",
+ description="RAISED MC SIGN",
+ direction="on",
+ linebreak="al",
+ specials={ "super", 0x004D, 0x0043 },
+ unicodeslot=0x1F16A,
+ },
+ [0x1F16B]={
+ category="so",
+ description="RAISED MD SIGN",
+ direction="on",
+ linebreak="al",
+ specials={ "super", 0x004D, 0x0044 },
+ unicodeslot=0x1F16B,
+ },
[0x1F170]={
category="so",
cjkwd="a",
@@ -173648,6 +179833,10 @@ characters.data={
direction="l",
linebreak="ai",
unicodeslot=0x1F17F,
+ variants={
+ [0xFE0E]="text style",
+ [0xFE0F]="emoji style",
+ },
},
[0x1F180]={
category="so",
@@ -173870,182 +180059,182 @@ characters.data={
category="so",
description="REGIONAL INDICATOR SYMBOL LETTER A",
direction="l",
- linebreak="al",
+ linebreak="ri",
unicodeslot=0x1F1E6,
},
[0x1F1E7]={
category="so",
description="REGIONAL INDICATOR SYMBOL LETTER B",
direction="l",
- linebreak="al",
+ linebreak="ri",
unicodeslot=0x1F1E7,
},
[0x1F1E8]={
category="so",
description="REGIONAL INDICATOR SYMBOL LETTER C",
direction="l",
- linebreak="al",
+ linebreak="ri",
unicodeslot=0x1F1E8,
},
[0x1F1E9]={
category="so",
description="REGIONAL INDICATOR SYMBOL LETTER D",
direction="l",
- linebreak="al",
+ linebreak="ri",
unicodeslot=0x1F1E9,
},
[0x1F1EA]={
category="so",
description="REGIONAL INDICATOR SYMBOL LETTER E",
direction="l",
- linebreak="al",
+ linebreak="ri",
unicodeslot=0x1F1EA,
},
[0x1F1EB]={
category="so",
description="REGIONAL INDICATOR SYMBOL LETTER F",
direction="l",
- linebreak="al",
+ linebreak="ri",
unicodeslot=0x1F1EB,
},
[0x1F1EC]={
category="so",
description="REGIONAL INDICATOR SYMBOL LETTER G",
direction="l",
- linebreak="al",
+ linebreak="ri",
unicodeslot=0x1F1EC,
},
[0x1F1ED]={
category="so",
description="REGIONAL INDICATOR SYMBOL LETTER H",
direction="l",
- linebreak="al",
+ linebreak="ri",
unicodeslot=0x1F1ED,
},
[0x1F1EE]={
category="so",
description="REGIONAL INDICATOR SYMBOL LETTER I",
direction="l",
- linebreak="al",
+ linebreak="ri",
unicodeslot=0x1F1EE,
},
[0x1F1EF]={
category="so",
description="REGIONAL INDICATOR SYMBOL LETTER J",
direction="l",
- linebreak="al",
+ linebreak="ri",
unicodeslot=0x1F1EF,
},
[0x1F1F0]={
category="so",
description="REGIONAL INDICATOR SYMBOL LETTER K",
direction="l",
- linebreak="al",
+ linebreak="ri",
unicodeslot=0x1F1F0,
},
[0x1F1F1]={
category="so",
description="REGIONAL INDICATOR SYMBOL LETTER L",
direction="l",
- linebreak="al",
+ linebreak="ri",
unicodeslot=0x1F1F1,
},
[0x1F1F2]={
category="so",
description="REGIONAL INDICATOR SYMBOL LETTER M",
direction="l",
- linebreak="al",
+ linebreak="ri",
unicodeslot=0x1F1F2,
},
[0x1F1F3]={
category="so",
description="REGIONAL INDICATOR SYMBOL LETTER N",
direction="l",
- linebreak="al",
+ linebreak="ri",
unicodeslot=0x1F1F3,
},
[0x1F1F4]={
category="so",
description="REGIONAL INDICATOR SYMBOL LETTER O",
direction="l",
- linebreak="al",
+ linebreak="ri",
unicodeslot=0x1F1F4,
},
[0x1F1F5]={
category="so",
description="REGIONAL INDICATOR SYMBOL LETTER P",
direction="l",
- linebreak="al",
+ linebreak="ri",
unicodeslot=0x1F1F5,
},
[0x1F1F6]={
category="so",
description="REGIONAL INDICATOR SYMBOL LETTER Q",
direction="l",
- linebreak="al",
+ linebreak="ri",
unicodeslot=0x1F1F6,
},
[0x1F1F7]={
category="so",
description="REGIONAL INDICATOR SYMBOL LETTER R",
direction="l",
- linebreak="al",
+ linebreak="ri",
unicodeslot=0x1F1F7,
},
[0x1F1F8]={
category="so",
description="REGIONAL INDICATOR SYMBOL LETTER S",
direction="l",
- linebreak="al",
+ linebreak="ri",
unicodeslot=0x1F1F8,
},
[0x1F1F9]={
category="so",
description="REGIONAL INDICATOR SYMBOL LETTER T",
direction="l",
- linebreak="al",
+ linebreak="ri",
unicodeslot=0x1F1F9,
},
[0x1F1FA]={
category="so",
description="REGIONAL INDICATOR SYMBOL LETTER U",
direction="l",
- linebreak="al",
+ linebreak="ri",
unicodeslot=0x1F1FA,
},
[0x1F1FB]={
category="so",
description="REGIONAL INDICATOR SYMBOL LETTER V",
direction="l",
- linebreak="al",
+ linebreak="ri",
unicodeslot=0x1F1FB,
},
[0x1F1FC]={
category="so",
description="REGIONAL INDICATOR SYMBOL LETTER W",
direction="l",
- linebreak="al",
+ linebreak="ri",
unicodeslot=0x1F1FC,
},
[0x1F1FD]={
category="so",
description="REGIONAL INDICATOR SYMBOL LETTER X",
direction="l",
- linebreak="al",
+ linebreak="ri",
unicodeslot=0x1F1FD,
},
[0x1F1FE]={
category="so",
description="REGIONAL INDICATOR SYMBOL LETTER Y",
direction="l",
- linebreak="al",
+ linebreak="ri",
unicodeslot=0x1F1FE,
},
[0x1F1FF]={
category="so",
description="REGIONAL INDICATOR SYMBOL LETTER Z",
direction="l",
- linebreak="al",
+ linebreak="ri",
unicodeslot=0x1F1FF,
},
[0x1F200]={
@@ -174173,6 +180362,10 @@ characters.data={
linebreak="id",
specials={ "square", 0x7121 },
unicodeslot=0x1F21A,
+ variants={
+ [0xFE0E]="text style",
+ [0xFE0F]="emoji style",
+ },
},
[0x1F21B]={
category="so",
@@ -174362,6 +180555,10 @@ characters.data={
linebreak="id",
specials={ "square", 0x6307 },
unicodeslot=0x1F22F,
+ variants={
+ [0xFE0E]="text style",
+ [0xFE0F]="emoji style",
+ },
},
[0x1F230]={
category="so",
@@ -174565,1050 +180762,1050 @@ characters.data={
category="so",
description="CYCLONE",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F300,
},
[0x1F301]={
category="so",
description="FOGGY",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F301,
},
[0x1F302]={
category="so",
description="CLOSED UMBRELLA",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F302,
},
[0x1F303]={
category="so",
description="NIGHT WITH STARS",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F303,
},
[0x1F304]={
category="so",
description="SUNRISE OVER MOUNTAINS",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F304,
},
[0x1F305]={
category="so",
description="SUNRISE",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F305,
},
[0x1F306]={
category="so",
description="CITYSCAPE AT DUSK",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F306,
},
[0x1F307]={
category="so",
description="SUNSET OVER BUILDINGS",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F307,
},
[0x1F308]={
category="so",
description="RAINBOW",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F308,
},
[0x1F309]={
category="so",
description="BRIDGE AT NIGHT",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F309,
},
[0x1F30A]={
category="so",
description="WATER WAVE",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F30A,
},
[0x1F30B]={
category="so",
description="VOLCANO",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F30B,
},
[0x1F30C]={
category="so",
description="MILKY WAY",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F30C,
},
[0x1F30D]={
category="so",
description="EARTH GLOBE EUROPE-AFRICA",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F30D,
},
[0x1F30E]={
category="so",
description="EARTH GLOBE AMERICAS",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F30E,
},
[0x1F30F]={
category="so",
description="EARTH GLOBE ASIA-AUSTRALIA",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F30F,
},
[0x1F310]={
category="so",
description="GLOBE WITH MERIDIANS",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F310,
},
[0x1F311]={
category="so",
description="NEW MOON SYMBOL",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F311,
},
[0x1F312]={
category="so",
description="WAXING CRESCENT MOON SYMBOL",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F312,
},
[0x1F313]={
category="so",
description="FIRST QUARTER MOON SYMBOL",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F313,
},
[0x1F314]={
category="so",
description="WAXING GIBBOUS MOON SYMBOL",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F314,
},
[0x1F315]={
category="so",
description="FULL MOON SYMBOL",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F315,
},
[0x1F316]={
category="so",
description="WANING GIBBOUS MOON SYMBOL",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F316,
},
[0x1F317]={
category="so",
description="LAST QUARTER MOON SYMBOL",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F317,
},
[0x1F318]={
category="so",
description="WANING CRESCENT MOON SYMBOL",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F318,
},
[0x1F319]={
category="so",
description="CRESCENT MOON",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F319,
},
[0x1F31A]={
category="so",
description="NEW MOON WITH FACE",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F31A,
},
[0x1F31B]={
category="so",
description="FIRST QUARTER MOON WITH FACE",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F31B,
},
[0x1F31C]={
category="so",
description="LAST QUARTER MOON WITH FACE",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F31C,
},
[0x1F31D]={
category="so",
description="FULL MOON WITH FACE",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F31D,
},
[0x1F31E]={
category="so",
description="SUN WITH FACE",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F31E,
},
[0x1F31F]={
category="so",
description="GLOWING STAR",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F31F,
},
[0x1F320]={
category="so",
description="SHOOTING STAR",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F320,
},
[0x1F330]={
category="so",
description="CHESTNUT",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F330,
},
[0x1F331]={
category="so",
description="SEEDLING",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F331,
},
[0x1F332]={
category="so",
description="EVERGREEN TREE",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F332,
},
[0x1F333]={
category="so",
description="DECIDUOUS TREE",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F333,
},
[0x1F334]={
category="so",
description="PALM TREE",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F334,
},
[0x1F335]={
category="so",
description="CACTUS",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F335,
},
[0x1F337]={
category="so",
description="TULIP",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F337,
},
[0x1F338]={
category="so",
description="CHERRY BLOSSOM",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F338,
},
[0x1F339]={
category="so",
description="ROSE",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F339,
},
[0x1F33A]={
category="so",
description="HIBISCUS",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F33A,
},
[0x1F33B]={
category="so",
description="SUNFLOWER",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F33B,
},
[0x1F33C]={
category="so",
description="BLOSSOM",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F33C,
},
[0x1F33D]={
category="so",
description="EAR OF MAIZE",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F33D,
},
[0x1F33E]={
category="so",
description="EAR OF RICE",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F33E,
},
[0x1F33F]={
category="so",
description="HERB",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F33F,
},
[0x1F340]={
category="so",
description="FOUR LEAF CLOVER",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F340,
},
[0x1F341]={
category="so",
description="MAPLE LEAF",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F341,
},
[0x1F342]={
category="so",
description="FALLEN LEAF",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F342,
},
[0x1F343]={
category="so",
description="LEAF FLUTTERING IN WIND",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F343,
},
[0x1F344]={
category="so",
description="MUSHROOM",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F344,
},
[0x1F345]={
category="so",
description="TOMATO",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F345,
},
[0x1F346]={
category="so",
description="AUBERGINE",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F346,
},
[0x1F347]={
category="so",
description="GRAPES",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F347,
},
[0x1F348]={
category="so",
description="MELON",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F348,
},
[0x1F349]={
category="so",
description="WATERMELON",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F349,
},
[0x1F34A]={
category="so",
description="TANGERINE",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F34A,
},
[0x1F34B]={
category="so",
description="LEMON",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F34B,
},
[0x1F34C]={
category="so",
description="BANANA",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F34C,
},
[0x1F34D]={
category="so",
description="PINEAPPLE",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F34D,
},
[0x1F34E]={
category="so",
description="RED APPLE",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F34E,
},
[0x1F34F]={
category="so",
description="GREEN APPLE",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F34F,
},
[0x1F350]={
category="so",
description="PEAR",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F350,
},
[0x1F351]={
category="so",
description="PEACH",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F351,
},
[0x1F352]={
category="so",
description="CHERRIES",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F352,
},
[0x1F353]={
category="so",
description="STRAWBERRY",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F353,
},
[0x1F354]={
category="so",
description="HAMBURGER",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F354,
},
[0x1F355]={
category="so",
description="SLICE OF PIZZA",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F355,
},
[0x1F356]={
category="so",
description="MEAT ON BONE",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F356,
},
[0x1F357]={
category="so",
description="POULTRY LEG",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F357,
},
[0x1F358]={
category="so",
description="RICE CRACKER",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F358,
},
[0x1F359]={
category="so",
description="RICE BALL",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F359,
},
[0x1F35A]={
category="so",
description="COOKED RICE",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F35A,
},
[0x1F35B]={
category="so",
description="CURRY AND RICE",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F35B,
},
[0x1F35C]={
category="so",
description="STEAMING BOWL",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F35C,
},
[0x1F35D]={
category="so",
description="SPAGHETTI",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F35D,
},
[0x1F35E]={
category="so",
description="BREAD",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F35E,
},
[0x1F35F]={
category="so",
description="FRENCH FRIES",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F35F,
},
[0x1F360]={
category="so",
description="ROASTED SWEET POTATO",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F360,
},
[0x1F361]={
category="so",
description="DANGO",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F361,
},
[0x1F362]={
category="so",
description="ODEN",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F362,
},
[0x1F363]={
category="so",
description="SUSHI",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F363,
},
[0x1F364]={
category="so",
description="FRIED SHRIMP",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F364,
},
[0x1F365]={
category="so",
description="FISH CAKE WITH SWIRL DESIGN",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F365,
},
[0x1F366]={
category="so",
description="SOFT ICE CREAM",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F366,
},
[0x1F367]={
category="so",
description="SHAVED ICE",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F367,
},
[0x1F368]={
category="so",
description="ICE CREAM",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F368,
},
[0x1F369]={
category="so",
description="DOUGHNUT",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F369,
},
[0x1F36A]={
category="so",
description="COOKIE",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F36A,
},
[0x1F36B]={
category="so",
description="CHOCOLATE BAR",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F36B,
},
[0x1F36C]={
category="so",
description="CANDY",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F36C,
},
[0x1F36D]={
category="so",
description="LOLLIPOP",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F36D,
},
[0x1F36E]={
category="so",
description="CUSTARD",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F36E,
},
[0x1F36F]={
category="so",
description="HONEY POT",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F36F,
},
[0x1F370]={
category="so",
description="SHORTCAKE",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F370,
},
[0x1F371]={
category="so",
description="BENTO BOX",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F371,
},
[0x1F372]={
category="so",
description="POT OF FOOD",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F372,
},
[0x1F373]={
category="so",
description="COOKING",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F373,
},
[0x1F374]={
category="so",
description="FORK AND KNIFE",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F374,
},
[0x1F375]={
category="so",
description="TEACUP WITHOUT HANDLE",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F375,
},
[0x1F376]={
category="so",
description="SAKE BOTTLE AND CUP",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F376,
},
[0x1F377]={
category="so",
description="WINE GLASS",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F377,
},
[0x1F378]={
category="so",
description="COCKTAIL GLASS",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F378,
},
[0x1F379]={
category="so",
description="TROPICAL DRINK",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F379,
},
[0x1F37A]={
category="so",
description="BEER MUG",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F37A,
},
[0x1F37B]={
category="so",
description="CLINKING BEER MUGS",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F37B,
},
[0x1F37C]={
category="so",
description="BABY BOTTLE",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F37C,
},
[0x1F380]={
category="so",
description="RIBBON",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F380,
},
[0x1F381]={
category="so",
description="WRAPPED PRESENT",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F381,
},
[0x1F382]={
category="so",
description="BIRTHDAY CAKE",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F382,
},
[0x1F383]={
category="so",
description="JACK-O-LANTERN",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F383,
},
[0x1F384]={
category="so",
description="CHRISTMAS TREE",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F384,
},
[0x1F385]={
category="so",
description="FATHER CHRISTMAS",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F385,
},
[0x1F386]={
category="so",
description="FIREWORKS",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F386,
},
[0x1F387]={
category="so",
description="FIREWORK SPARKLER",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F387,
},
[0x1F388]={
category="so",
description="BALLOON",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F388,
},
[0x1F389]={
category="so",
description="PARTY POPPER",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F389,
},
[0x1F38A]={
category="so",
description="CONFETTI BALL",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F38A,
},
[0x1F38B]={
category="so",
description="TANABATA TREE",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F38B,
},
[0x1F38C]={
category="so",
description="CROSSED FLAGS",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F38C,
},
[0x1F38D]={
category="so",
description="PINE DECORATION",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F38D,
},
[0x1F38E]={
category="so",
description="JAPANESE DOLLS",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F38E,
},
[0x1F38F]={
category="so",
description="CARP STREAMER",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F38F,
},
[0x1F390]={
category="so",
description="WIND CHIME",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F390,
},
[0x1F391]={
category="so",
description="MOON VIEWING CEREMONY",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F391,
},
[0x1F392]={
category="so",
description="SCHOOL SATCHEL",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F392,
},
[0x1F393]={
category="so",
description="GRADUATION CAP",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F393,
},
[0x1F3A0]={
category="so",
description="CAROUSEL HORSE",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F3A0,
},
[0x1F3A1]={
category="so",
description="FERRIS WHEEL",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F3A1,
},
[0x1F3A2]={
category="so",
description="ROLLER COASTER",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F3A2,
},
[0x1F3A3]={
category="so",
description="FISHING POLE AND FISH",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F3A3,
},
[0x1F3A4]={
category="so",
description="MICROPHONE",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F3A4,
},
[0x1F3A5]={
category="so",
description="MOVIE CAMERA",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F3A5,
},
[0x1F3A6]={
category="so",
description="CINEMA",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F3A6,
},
[0x1F3A7]={
category="so",
description="HEADPHONE",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F3A7,
},
[0x1F3A8]={
category="so",
description="ARTIST PALETTE",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F3A8,
},
[0x1F3A9]={
category="so",
description="TOP HAT",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F3A9,
},
[0x1F3AA]={
category="so",
description="CIRCUS TENT",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F3AA,
},
[0x1F3AB]={
category="so",
description="TICKET",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F3AB,
},
[0x1F3AC]={
category="so",
description="CLAPPER BOARD",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F3AC,
},
[0x1F3AD]={
category="so",
description="PERFORMING ARTS",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F3AD,
},
[0x1F3AE]={
category="so",
description="VIDEO GAME",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F3AE,
},
[0x1F3AF]={
category="so",
description="DIRECT HIT",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F3AF,
},
[0x1F3B0]={
category="so",
description="SLOT MACHINE",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F3B0,
},
[0x1F3B1]={
category="so",
description="BILLIARDS",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F3B1,
},
[0x1F3B2]={
category="so",
description="GAME DIE",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F3B2,
},
[0x1F3B3]={
category="so",
description="BOWLING",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F3B3,
},
[0x1F3B4]={
category="so",
description="FLOWER PLAYING CARDS",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F3B4,
},
[0x1F3B5]={
@@ -175629,35 +181826,35 @@ characters.data={
category="so",
description="SAXOPHONE",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F3B7,
},
[0x1F3B8]={
category="so",
description="GUITAR",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F3B8,
},
[0x1F3B9]={
category="so",
description="MUSICAL KEYBOARD",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F3B9,
},
[0x1F3BA]={
category="so",
description="TRUMPET",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F3BA,
},
[0x1F3BB]={
category="so",
description="VIOLIN",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F3BB,
},
[0x1F3BC]={
@@ -175671,1316 +181868,1316 @@ characters.data={
category="so",
description="RUNNING SHIRT WITH SASH",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F3BD,
},
[0x1F3BE]={
category="so",
description="TENNIS RACQUET AND BALL",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F3BE,
},
[0x1F3BF]={
category="so",
description="SKI AND SKI BOOT",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F3BF,
},
[0x1F3C0]={
category="so",
description="BASKETBALL AND HOOP",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F3C0,
},
[0x1F3C1]={
category="so",
description="CHEQUERED FLAG",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F3C1,
},
[0x1F3C2]={
category="so",
description="SNOWBOARDER",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F3C2,
},
[0x1F3C3]={
category="so",
description="RUNNER",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F3C3,
},
[0x1F3C4]={
category="so",
description="SURFER",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F3C4,
},
[0x1F3C6]={
category="so",
description="TROPHY",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F3C6,
},
[0x1F3C7]={
category="so",
description="HORSE RACING",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F3C7,
},
[0x1F3C8]={
category="so",
description="AMERICAN FOOTBALL",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F3C8,
},
[0x1F3C9]={
category="so",
description="RUGBY FOOTBALL",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F3C9,
},
[0x1F3CA]={
category="so",
description="SWIMMER",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F3CA,
},
[0x1F3E0]={
category="so",
description="HOUSE BUILDING",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F3E0,
},
[0x1F3E1]={
category="so",
description="HOUSE WITH GARDEN",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F3E1,
},
[0x1F3E2]={
category="so",
description="OFFICE BUILDING",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F3E2,
},
[0x1F3E3]={
category="so",
description="JAPANESE POST OFFICE",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F3E3,
},
[0x1F3E4]={
category="so",
description="EUROPEAN POST OFFICE",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F3E4,
},
[0x1F3E5]={
category="so",
description="HOSPITAL",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F3E5,
},
[0x1F3E6]={
category="so",
description="BANK",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F3E6,
},
[0x1F3E7]={
category="so",
description="AUTOMATED TELLER MACHINE",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F3E7,
},
[0x1F3E8]={
category="so",
description="HOTEL",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F3E8,
},
[0x1F3E9]={
category="so",
description="LOVE HOTEL",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F3E9,
},
[0x1F3EA]={
category="so",
description="CONVENIENCE STORE",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F3EA,
},
[0x1F3EB]={
category="so",
description="SCHOOL",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F3EB,
},
[0x1F3EC]={
category="so",
description="DEPARTMENT STORE",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F3EC,
},
[0x1F3ED]={
category="so",
description="FACTORY",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F3ED,
},
[0x1F3EE]={
category="so",
description="IZAKAYA LANTERN",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F3EE,
},
[0x1F3EF]={
category="so",
description="JAPANESE CASTLE",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F3EF,
},
[0x1F3F0]={
category="so",
description="EUROPEAN CASTLE",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F3F0,
},
[0x1F400]={
category="so",
description="RAT",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F400,
},
[0x1F401]={
category="so",
description="MOUSE",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F401,
},
[0x1F402]={
category="so",
description="OX",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F402,
},
[0x1F403]={
category="so",
description="WATER BUFFALO",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F403,
},
[0x1F404]={
category="so",
description="COW",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F404,
},
[0x1F405]={
category="so",
description="TIGER",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F405,
},
[0x1F406]={
category="so",
description="LEOPARD",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F406,
},
[0x1F407]={
category="so",
description="RABBIT",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F407,
},
[0x1F408]={
category="so",
description="CAT",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F408,
},
[0x1F409]={
category="so",
description="DRAGON",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F409,
},
[0x1F40A]={
category="so",
description="CROCODILE",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F40A,
},
[0x1F40B]={
category="so",
description="WHALE",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F40B,
},
[0x1F40C]={
category="so",
description="SNAIL",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F40C,
},
[0x1F40D]={
category="so",
description="SNAKE",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F40D,
},
[0x1F40E]={
category="so",
description="HORSE",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F40E,
},
[0x1F40F]={
category="so",
description="RAM",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F40F,
},
[0x1F410]={
category="so",
description="GOAT",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F410,
},
[0x1F411]={
category="so",
description="SHEEP",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F411,
},
[0x1F412]={
category="so",
description="MONKEY",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F412,
},
[0x1F413]={
category="so",
description="ROOSTER",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F413,
},
[0x1F414]={
category="so",
description="CHICKEN",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F414,
},
[0x1F415]={
category="so",
description="DOG",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F415,
},
[0x1F416]={
category="so",
description="PIG",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F416,
},
[0x1F417]={
category="so",
description="BOAR",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F417,
},
[0x1F418]={
category="so",
description="ELEPHANT",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F418,
},
[0x1F419]={
category="so",
description="OCTOPUS",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F419,
},
[0x1F41A]={
category="so",
description="SPIRAL SHELL",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F41A,
},
[0x1F41B]={
category="so",
description="BUG",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F41B,
},
[0x1F41C]={
category="so",
description="ANT",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F41C,
},
[0x1F41D]={
category="so",
description="HONEYBEE",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F41D,
},
[0x1F41E]={
category="so",
description="LADY BEETLE",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F41E,
},
[0x1F41F]={
category="so",
description="FISH",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F41F,
},
[0x1F420]={
category="so",
description="TROPICAL FISH",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F420,
},
[0x1F421]={
category="so",
description="BLOWFISH",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F421,
},
[0x1F422]={
category="so",
description="TURTLE",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F422,
},
[0x1F423]={
category="so",
description="HATCHING CHICK",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F423,
},
[0x1F424]={
category="so",
description="BABY CHICK",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F424,
},
[0x1F425]={
category="so",
description="FRONT-FACING BABY CHICK",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F425,
},
[0x1F426]={
category="so",
description="BIRD",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F426,
},
[0x1F427]={
category="so",
description="PENGUIN",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F427,
},
[0x1F428]={
category="so",
description="KOALA",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F428,
},
[0x1F429]={
category="so",
description="POODLE",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F429,
},
[0x1F42A]={
category="so",
description="DROMEDARY CAMEL",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F42A,
},
[0x1F42B]={
category="so",
description="BACTRIAN CAMEL",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F42B,
},
[0x1F42C]={
category="so",
description="DOLPHIN",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F42C,
},
[0x1F42D]={
category="so",
description="MOUSE FACE",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F42D,
},
[0x1F42E]={
category="so",
description="COW FACE",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F42E,
},
[0x1F42F]={
category="so",
description="TIGER FACE",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F42F,
},
[0x1F430]={
category="so",
description="RABBIT FACE",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F430,
},
[0x1F431]={
category="so",
description="CAT FACE",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F431,
},
[0x1F432]={
category="so",
description="DRAGON FACE",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F432,
},
[0x1F433]={
category="so",
description="SPOUTING WHALE",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F433,
},
[0x1F434]={
category="so",
description="HORSE FACE",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F434,
},
[0x1F435]={
category="so",
description="MONKEY FACE",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F435,
},
[0x1F436]={
category="so",
description="DOG FACE",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F436,
},
[0x1F437]={
category="so",
description="PIG FACE",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F437,
},
[0x1F438]={
category="so",
description="FROG FACE",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F438,
},
[0x1F439]={
category="so",
description="HAMSTER FACE",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F439,
},
[0x1F43A]={
category="so",
description="WOLF FACE",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F43A,
},
[0x1F43B]={
category="so",
description="BEAR FACE",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F43B,
},
[0x1F43C]={
category="so",
description="PANDA FACE",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F43C,
},
[0x1F43D]={
category="so",
description="PIG NOSE",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F43D,
},
[0x1F43E]={
category="so",
description="PAW PRINTS",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F43E,
},
[0x1F440]={
category="so",
description="EYES",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F440,
},
[0x1F442]={
category="so",
description="EAR",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F442,
},
[0x1F443]={
category="so",
description="NOSE",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F443,
},
[0x1F444]={
category="so",
description="MOUTH",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F444,
},
[0x1F445]={
category="so",
description="TONGUE",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F445,
},
[0x1F446]={
category="so",
description="WHITE UP POINTING BACKHAND INDEX",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F446,
},
[0x1F447]={
category="so",
description="WHITE DOWN POINTING BACKHAND INDEX",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F447,
},
[0x1F448]={
category="so",
description="WHITE LEFT POINTING BACKHAND INDEX",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F448,
},
[0x1F449]={
category="so",
description="WHITE RIGHT POINTING BACKHAND INDEX",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F449,
},
[0x1F44A]={
category="so",
description="FISTED HAND SIGN",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F44A,
},
[0x1F44B]={
category="so",
description="WAVING HAND SIGN",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F44B,
},
[0x1F44C]={
category="so",
description="OK HAND SIGN",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F44C,
},
[0x1F44D]={
category="so",
description="THUMBS UP SIGN",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F44D,
},
[0x1F44E]={
category="so",
description="THUMBS DOWN SIGN",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F44E,
},
[0x1F44F]={
category="so",
description="CLAPPING HANDS SIGN",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F44F,
},
[0x1F450]={
category="so",
description="OPEN HANDS SIGN",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F450,
},
[0x1F451]={
category="so",
description="CROWN",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F451,
},
[0x1F452]={
category="so",
description="WOMANS HAT",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F452,
},
[0x1F453]={
category="so",
description="EYEGLASSES",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F453,
},
[0x1F454]={
category="so",
description="NECKTIE",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F454,
},
[0x1F455]={
category="so",
description="T-SHIRT",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F455,
},
[0x1F456]={
category="so",
description="JEANS",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F456,
},
[0x1F457]={
category="so",
description="DRESS",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F457,
},
[0x1F458]={
category="so",
description="KIMONO",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F458,
},
[0x1F459]={
category="so",
description="BIKINI",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F459,
},
[0x1F45A]={
category="so",
description="WOMANS CLOTHES",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F45A,
},
[0x1F45B]={
category="so",
description="PURSE",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F45B,
},
[0x1F45C]={
category="so",
description="HANDBAG",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F45C,
},
[0x1F45D]={
category="so",
description="POUCH",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F45D,
},
[0x1F45E]={
category="so",
description="MANS SHOE",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F45E,
},
[0x1F45F]={
category="so",
description="ATHLETIC SHOE",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F45F,
},
[0x1F460]={
category="so",
description="HIGH-HEELED SHOE",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F460,
},
[0x1F461]={
category="so",
description="WOMANS SANDAL",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F461,
},
[0x1F462]={
category="so",
description="WOMANS BOOTS",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F462,
},
[0x1F463]={
category="so",
description="FOOTPRINTS",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F463,
},
[0x1F464]={
category="so",
description="BUST IN SILHOUETTE",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F464,
},
[0x1F465]={
category="so",
description="BUSTS IN SILHOUETTE",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F465,
},
[0x1F466]={
category="so",
description="BOY",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F466,
},
[0x1F467]={
category="so",
description="GIRL",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F467,
},
[0x1F468]={
category="so",
description="MAN",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F468,
},
[0x1F469]={
category="so",
description="WOMAN",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F469,
},
[0x1F46A]={
category="so",
description="FAMILY",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F46A,
},
[0x1F46B]={
category="so",
description="MAN AND WOMAN HOLDING HANDS",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F46B,
},
[0x1F46C]={
category="so",
description="TWO MEN HOLDING HANDS",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F46C,
},
[0x1F46D]={
category="so",
description="TWO WOMEN HOLDING HANDS",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F46D,
},
[0x1F46E]={
category="so",
description="POLICE OFFICER",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F46E,
},
[0x1F46F]={
category="so",
description="WOMAN WITH BUNNY EARS",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F46F,
},
[0x1F470]={
category="so",
description="BRIDE WITH VEIL",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F470,
},
[0x1F471]={
category="so",
description="PERSON WITH BLOND HAIR",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F471,
},
[0x1F472]={
category="so",
description="MAN WITH GUA PI MAO",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F472,
},
[0x1F473]={
category="so",
description="MAN WITH TURBAN",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F473,
},
[0x1F474]={
category="so",
description="OLDER MAN",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F474,
},
[0x1F475]={
category="so",
description="OLDER WOMAN",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F475,
},
[0x1F476]={
category="so",
description="BABY",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F476,
},
[0x1F477]={
category="so",
description="CONSTRUCTION WORKER",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F477,
},
[0x1F478]={
category="so",
description="PRINCESS",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F478,
},
[0x1F479]={
category="so",
description="JAPANESE OGRE",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F479,
},
[0x1F47A]={
category="so",
description="JAPANESE GOBLIN",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F47A,
},
[0x1F47B]={
category="so",
description="GHOST",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F47B,
},
[0x1F47C]={
category="so",
description="BABY ANGEL",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F47C,
},
[0x1F47D]={
category="so",
description="EXTRATERRESTRIAL ALIEN",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F47D,
},
[0x1F47E]={
category="so",
description="ALIEN MONSTER",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F47E,
},
[0x1F47F]={
category="so",
description="IMP",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F47F,
},
[0x1F480]={
category="so",
description="SKULL",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F480,
},
[0x1F481]={
category="so",
description="INFORMATION DESK PERSON",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F481,
},
[0x1F482]={
category="so",
description="GUARDSMAN",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F482,
},
[0x1F483]={
category="so",
description="DANCER",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F483,
},
[0x1F484]={
category="so",
description="LIPSTICK",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F484,
},
[0x1F485]={
category="so",
description="NAIL POLISH",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F485,
},
[0x1F486]={
category="so",
description="FACE MASSAGE",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F486,
},
[0x1F487]={
category="so",
description="HAIRCUT",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F487,
},
[0x1F488]={
category="so",
description="BARBER POLE",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F488,
},
[0x1F489]={
category="so",
description="SYRINGE",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F489,
},
[0x1F48A]={
category="so",
description="PILL",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F48A,
},
[0x1F48B]={
category="so",
description="KISS MARK",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F48B,
},
[0x1F48C]={
category="so",
description="LOVE LETTER",
- direction="l",
- linebreak="al",
+ direction="on",
+ linebreak="id",
unicodeslot=0x1F48C,
},
[0x1F48D]={
category="so",
description="RING",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F48D,
},
[0x1F48E]={
category="so",
description="GEM STONE",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F48E,
},
[0x1F48F]={
category="so",
description="KISS",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F48F,
},
[0x1F490]={
category="so",
description="BOUQUET",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F490,
},
[0x1F491]={
category="so",
description="COUPLE WITH HEART",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F491,
},
[0x1F492]={
category="so",
description="WEDDING",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F492,
},
[0x1F493]={
category="so",
description="BEATING HEART",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F493,
},
[0x1F494]={
category="so",
description="BROKEN HEART",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F494,
},
[0x1F495]={
category="so",
description="TWO HEARTS",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F495,
},
[0x1F496]={
category="so",
description="SPARKLING HEART",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F496,
},
[0x1F497]={
category="so",
description="GROWING HEART",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F497,
},
[0x1F498]={
category="so",
description="HEART WITH ARROW",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F498,
},
[0x1F499]={
category="so",
description="BLUE HEART",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F499,
},
[0x1F49A]={
category="so",
description="GREEN HEART",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F49A,
},
[0x1F49B]={
category="so",
description="YELLOW HEART",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F49B,
},
[0x1F49C]={
category="so",
description="PURPLE HEART",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F49C,
},
[0x1F49D]={
category="so",
description="HEART WITH RIBBON",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F49D,
},
[0x1F49E]={
category="so",
description="REVOLVING HEARTS",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F49E,
},
[0x1F49F]={
category="so",
description="HEART DECORATION",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F49F,
},
[0x1F4A0]={
@@ -176994,7 +183191,7 @@ characters.data={
category="so",
description="ELECTRIC LIGHT BULB",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F4A1,
},
[0x1F4A2]={
@@ -177008,7 +183205,7 @@ characters.data={
category="so",
description="BOMB",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F4A3,
},
[0x1F4A4]={
@@ -177022,70 +183219,70 @@ characters.data={
category="so",
description="COLLISION SYMBOL",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F4A5,
},
[0x1F4A6]={
category="so",
description="SPLASHING SWEAT SYMBOL",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F4A6,
},
[0x1F4A7]={
category="so",
description="DROPLET",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F4A7,
},
[0x1F4A8]={
category="so",
description="DASH SYMBOL",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F4A8,
},
[0x1F4A9]={
category="so",
description="PILE OF POO",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F4A9,
},
[0x1F4AA]={
category="so",
description="FLEXED BICEPS",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F4AA,
},
[0x1F4AB]={
category="so",
description="DIZZY SYMBOL",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F4AB,
},
[0x1F4AC]={
category="so",
description="SPEECH BALLOON",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F4AC,
},
[0x1F4AD]={
category="so",
description="THOUGHT BALLOON",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F4AD,
},
[0x1F4AE]={
category="so",
description="WHITE FLOWER",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F4AE,
},
[0x1F4AF]={
@@ -177099,7 +183296,7 @@ characters.data={
category="so",
description="MONEY BAG",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F4B0,
},
[0x1F4B1]={
@@ -177120,511 +183317,511 @@ characters.data={
category="so",
description="CREDIT CARD",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F4B3,
},
[0x1F4B4]={
category="so",
description="BANKNOTE WITH YEN SIGN",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F4B4,
},
[0x1F4B5]={
category="so",
description="BANKNOTE WITH DOLLAR SIGN",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F4B5,
},
[0x1F4B6]={
category="so",
description="BANKNOTE WITH EURO SIGN",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F4B6,
},
[0x1F4B7]={
category="so",
description="BANKNOTE WITH POUND SIGN",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F4B7,
},
[0x1F4B8]={
category="so",
description="MONEY WITH WINGS",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F4B8,
},
[0x1F4B9]={
category="so",
description="CHART WITH UPWARDS TREND AND YEN SIGN",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F4B9,
},
[0x1F4BA]={
category="so",
description="SEAT",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F4BA,
},
[0x1F4BB]={
category="so",
description="PERSONAL COMPUTER",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F4BB,
},
[0x1F4BC]={
category="so",
description="BRIEFCASE",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F4BC,
},
[0x1F4BD]={
category="so",
description="MINIDISC",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F4BD,
},
[0x1F4BE]={
category="so",
description="FLOPPY DISK",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F4BE,
},
[0x1F4BF]={
category="so",
description="OPTICAL DISC",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F4BF,
},
[0x1F4C0]={
category="so",
description="DVD",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F4C0,
},
[0x1F4C1]={
category="so",
description="FILE FOLDER",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F4C1,
},
[0x1F4C2]={
category="so",
description="OPEN FILE FOLDER",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F4C2,
},
[0x1F4C3]={
category="so",
description="PAGE WITH CURL",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F4C3,
},
[0x1F4C4]={
category="so",
description="PAGE FACING UP",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F4C4,
},
[0x1F4C5]={
category="so",
description="CALENDAR",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F4C5,
},
[0x1F4C6]={
category="so",
description="TEAR-OFF CALENDAR",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F4C6,
},
[0x1F4C7]={
category="so",
description="CARD INDEX",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F4C7,
},
[0x1F4C8]={
category="so",
description="CHART WITH UPWARDS TREND",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F4C8,
},
[0x1F4C9]={
category="so",
description="CHART WITH DOWNWARDS TREND",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F4C9,
},
[0x1F4CA]={
category="so",
description="BAR CHART",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F4CA,
},
[0x1F4CB]={
category="so",
description="CLIPBOARD",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F4CB,
},
[0x1F4CC]={
category="so",
description="PUSHPIN",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F4CC,
},
[0x1F4CD]={
category="so",
description="ROUND PUSHPIN",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F4CD,
},
[0x1F4CE]={
category="so",
description="PAPERCLIP",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F4CE,
},
[0x1F4CF]={
category="so",
description="STRAIGHT RULER",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F4CF,
},
[0x1F4D0]={
category="so",
description="TRIANGULAR RULER",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F4D0,
},
[0x1F4D1]={
category="so",
description="BOOKMARK TABS",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F4D1,
},
[0x1F4D2]={
category="so",
description="LEDGER",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F4D2,
},
[0x1F4D3]={
category="so",
description="NOTEBOOK",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F4D3,
},
[0x1F4D4]={
category="so",
description="NOTEBOOK WITH DECORATIVE COVER",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F4D4,
},
[0x1F4D5]={
category="so",
description="CLOSED BOOK",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F4D5,
},
[0x1F4D6]={
category="so",
description="OPEN BOOK",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F4D6,
},
[0x1F4D7]={
category="so",
description="GREEN BOOK",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F4D7,
},
[0x1F4D8]={
category="so",
description="BLUE BOOK",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F4D8,
},
[0x1F4D9]={
category="so",
description="ORANGE BOOK",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F4D9,
},
[0x1F4DA]={
category="so",
description="BOOKS",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F4DA,
},
[0x1F4DB]={
category="so",
description="NAME BADGE",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F4DB,
},
[0x1F4DC]={
category="so",
description="SCROLL",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F4DC,
},
[0x1F4DD]={
category="so",
description="MEMO",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F4DD,
},
[0x1F4DE]={
category="so",
description="TELEPHONE RECEIVER",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F4DE,
},
[0x1F4DF]={
category="so",
description="PAGER",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F4DF,
},
[0x1F4E0]={
category="so",
description="FAX MACHINE",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F4E0,
},
[0x1F4E1]={
category="so",
description="SATELLITE ANTENNA",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F4E1,
},
[0x1F4E2]={
category="so",
description="PUBLIC ADDRESS LOUDSPEAKER",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F4E2,
},
[0x1F4E3]={
category="so",
description="CHEERING MEGAPHONE",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F4E3,
},
[0x1F4E4]={
category="so",
description="OUTBOX TRAY",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F4E4,
},
[0x1F4E5]={
category="so",
description="INBOX TRAY",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F4E5,
},
[0x1F4E6]={
category="so",
description="PACKAGE",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F4E6,
},
[0x1F4E7]={
category="so",
description="E-MAIL SYMBOL",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F4E7,
},
[0x1F4E8]={
category="so",
description="INCOMING ENVELOPE",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F4E8,
},
[0x1F4E9]={
category="so",
description="ENVELOPE WITH DOWNWARDS ARROW ABOVE",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F4E9,
},
[0x1F4EA]={
category="so",
description="CLOSED MAILBOX WITH LOWERED FLAG",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F4EA,
},
[0x1F4EB]={
category="so",
description="CLOSED MAILBOX WITH RAISED FLAG",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F4EB,
},
[0x1F4EC]={
category="so",
description="OPEN MAILBOX WITH RAISED FLAG",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F4EC,
},
[0x1F4ED]={
category="so",
description="OPEN MAILBOX WITH LOWERED FLAG",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F4ED,
},
[0x1F4EE]={
category="so",
description="POSTBOX",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F4EE,
},
[0x1F4EF]={
category="so",
description="POSTAL HORN",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F4EF,
},
[0x1F4F0]={
category="so",
description="NEWSPAPER",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F4F0,
},
[0x1F4F1]={
category="so",
description="MOBILE PHONE",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F4F1,
},
[0x1F4F2]={
category="so",
description="MOBILE PHONE WITH RIGHTWARDS ARROW AT LEFT",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F4F2,
},
[0x1F4F3]={
category="so",
description="VIBRATION MODE",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F4F3,
},
[0x1F4F4]={
category="so",
description="MOBILE PHONE OFF",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F4F4,
},
[0x1F4F5]={
category="so",
description="NO MOBILE PHONES",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F4F5,
},
[0x1F4F6]={
category="so",
description="ANTENNA WITH BARS",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F4F6,
},
[0x1F4F7]={
category="so",
description="CAMERA",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F4F7,
},
[0x1F4F9]={
category="so",
description="VIDEO CAMERA",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F4F9,
},
[0x1F4FA]={
category="so",
description="TELEVISION",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F4FA,
},
[0x1F4FB]={
category="so",
description="RADIO",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F4FB,
},
[0x1F4FC]={
category="so",
description="VIDEOCASSETTE",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F4FC,
},
[0x1F500]={
@@ -177680,112 +183877,112 @@ characters.data={
category="so",
description="SPEAKER WITH CANCELLATION STROKE",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F507,
},
[0x1F508]={
category="so",
description="SPEAKER",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F508,
},
[0x1F509]={
category="so",
description="SPEAKER WITH ONE SOUND WAVE",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F509,
},
[0x1F50A]={
category="so",
description="SPEAKER WITH THREE SOUND WAVES",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F50A,
},
[0x1F50B]={
category="so",
description="BATTERY",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F50B,
},
[0x1F50C]={
category="so",
description="ELECTRIC PLUG",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F50C,
},
[0x1F50D]={
category="so",
description="LEFT-POINTING MAGNIFYING GLASS",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F50D,
},
[0x1F50E]={
category="so",
description="RIGHT-POINTING MAGNIFYING GLASS",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F50E,
},
[0x1F50F]={
category="so",
description="LOCK WITH INK PEN",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F50F,
},
[0x1F510]={
category="so",
description="CLOSED LOCK WITH KEY",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F510,
},
[0x1F511]={
category="so",
description="KEY",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F511,
},
[0x1F512]={
category="so",
description="LOCK",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F512,
},
[0x1F513]={
category="so",
description="OPEN LOCK",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F513,
},
[0x1F514]={
category="so",
description="BELL",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F514,
},
[0x1F515]={
category="so",
description="BELL WITH CANCELLATION STROKE",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F515,
},
[0x1F516]={
category="so",
description="BOOKMARK",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F516,
},
[0x1F517]={
@@ -177882,7 +184079,7 @@ characters.data={
[0x1F524]={
category="so",
description="INPUT SYMBOL FOR LATIN LETTERS",
- direction="l",
+ direction="on",
linebreak="al",
unicodeslot=0x1F524,
},
@@ -177890,91 +184087,91 @@ characters.data={
category="so",
description="FIRE",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F525,
},
[0x1F526]={
category="so",
description="ELECTRIC TORCH",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F526,
},
[0x1F527]={
category="so",
description="WRENCH",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F527,
},
[0x1F528]={
category="so",
description="HAMMER",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F528,
},
[0x1F529]={
category="so",
description="NUT AND BOLT",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F529,
},
[0x1F52A]={
category="so",
description="HOCHO",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F52A,
},
[0x1F52B]={
category="so",
description="PISTOL",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F52B,
},
[0x1F52C]={
category="so",
description="MICROSCOPE",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F52C,
},
[0x1F52D]={
category="so",
description="TELESCOPE",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F52D,
},
[0x1F52E]={
category="so",
description="CRYSTAL BALL",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F52E,
},
[0x1F52F]={
category="so",
description="SIX POINTED STAR WITH MIDDLE DOT",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F52F,
},
[0x1F530]={
category="so",
description="JAPANESE SYMBOL FOR BEGINNER",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F530,
},
[0x1F531]={
category="so",
description="TRIDENT EMBLEM",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F531,
},
[0x1F532]={
@@ -178061,1138 +184258,1257 @@ characters.data={
linebreak="al",
unicodeslot=0x1F53D,
},
+ [0x1F540]={
+ category="so",
+ description="CIRCLED CROSS POMMEE",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x1F540,
+ },
+ [0x1F541]={
+ category="so",
+ description="CROSS POMMEE WITH HALF-CIRCLE BELOW",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x1F541,
+ },
+ [0x1F542]={
+ category="so",
+ description="CROSS POMMEE",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x1F542,
+ },
+ [0x1F543]={
+ category="so",
+ description="NOTCHED LEFT SEMICIRCLE WITH THREE DOTS",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x1F543,
+ },
[0x1F550]={
category="so",
description="CLOCK FACE ONE OCLOCK",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F550,
},
[0x1F551]={
category="so",
description="CLOCK FACE TWO OCLOCK",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F551,
},
[0x1F552]={
category="so",
description="CLOCK FACE THREE OCLOCK",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F552,
},
[0x1F553]={
category="so",
description="CLOCK FACE FOUR OCLOCK",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F553,
},
[0x1F554]={
category="so",
description="CLOCK FACE FIVE OCLOCK",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F554,
},
[0x1F555]={
category="so",
description="CLOCK FACE SIX OCLOCK",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F555,
},
[0x1F556]={
category="so",
description="CLOCK FACE SEVEN OCLOCK",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F556,
},
[0x1F557]={
category="so",
description="CLOCK FACE EIGHT OCLOCK",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F557,
},
[0x1F558]={
category="so",
description="CLOCK FACE NINE OCLOCK",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F558,
},
[0x1F559]={
category="so",
description="CLOCK FACE TEN OCLOCK",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F559,
},
[0x1F55A]={
category="so",
description="CLOCK FACE ELEVEN OCLOCK",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F55A,
},
[0x1F55B]={
category="so",
description="CLOCK FACE TWELVE OCLOCK",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F55B,
},
[0x1F55C]={
category="so",
description="CLOCK FACE ONE-THIRTY",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F55C,
},
[0x1F55D]={
category="so",
description="CLOCK FACE TWO-THIRTY",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F55D,
},
[0x1F55E]={
category="so",
description="CLOCK FACE THREE-THIRTY",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F55E,
},
[0x1F55F]={
category="so",
description="CLOCK FACE FOUR-THIRTY",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F55F,
},
[0x1F560]={
category="so",
description="CLOCK FACE FIVE-THIRTY",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F560,
},
[0x1F561]={
category="so",
description="CLOCK FACE SIX-THIRTY",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F561,
},
[0x1F562]={
category="so",
description="CLOCK FACE SEVEN-THIRTY",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F562,
},
[0x1F563]={
category="so",
description="CLOCK FACE EIGHT-THIRTY",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F563,
},
[0x1F564]={
category="so",
description="CLOCK FACE NINE-THIRTY",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F564,
},
[0x1F565]={
category="so",
description="CLOCK FACE TEN-THIRTY",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F565,
},
[0x1F566]={
category="so",
description="CLOCK FACE ELEVEN-THIRTY",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F566,
},
[0x1F567]={
category="so",
description="CLOCK FACE TWELVE-THIRTY",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F567,
},
[0x1F5FB]={
category="so",
description="MOUNT FUJI",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F5FB,
},
[0x1F5FC]={
category="so",
description="TOKYO TOWER",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F5FC,
},
[0x1F5FD]={
category="so",
description="STATUE OF LIBERTY",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F5FD,
},
[0x1F5FE]={
category="so",
description="SILHOUETTE OF JAPAN",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F5FE,
},
[0x1F5FF]={
category="so",
description="MOYAI",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F5FF,
},
+ [0x1F600]={
+ category="so",
+ description="GRINNING FACE",
+ direction="on",
+ linebreak="id",
+ unicodeslot=0x1F600,
+ },
[0x1F601]={
category="so",
description="GRINNING FACE WITH SMILING EYES",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F601,
},
[0x1F602]={
category="so",
description="FACE WITH TEARS OF JOY",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F602,
},
[0x1F603]={
category="so",
description="SMILING FACE WITH OPEN MOUTH",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F603,
},
[0x1F604]={
category="so",
description="SMILING FACE WITH OPEN MOUTH AND SMILING EYES",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F604,
},
[0x1F605]={
category="so",
description="SMILING FACE WITH OPEN MOUTH AND COLD SWEAT",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F605,
},
[0x1F606]={
category="so",
description="SMILING FACE WITH OPEN MOUTH AND TIGHTLY-CLOSED EYES",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F606,
},
[0x1F607]={
category="so",
description="SMILING FACE WITH HALO",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F607,
},
[0x1F608]={
category="so",
description="SMILING FACE WITH HORNS",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F608,
},
[0x1F609]={
category="so",
description="WINKING FACE",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F609,
},
[0x1F60A]={
category="so",
description="SMILING FACE WITH SMILING EYES",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F60A,
},
[0x1F60B]={
category="so",
description="FACE SAVOURING DELICIOUS FOOD",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F60B,
},
[0x1F60C]={
category="so",
description="RELIEVED FACE",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F60C,
},
[0x1F60D]={
category="so",
description="SMILING FACE WITH HEART-SHAPED EYES",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F60D,
},
[0x1F60E]={
category="so",
description="SMILING FACE WITH SUNGLASSES",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F60E,
},
[0x1F60F]={
category="so",
description="SMIRKING FACE",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F60F,
},
[0x1F610]={
category="so",
description="NEUTRAL FACE",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F610,
},
+ [0x1F611]={
+ category="so",
+ description="EXPRESSIONLESS FACE",
+ direction="on",
+ linebreak="id",
+ unicodeslot=0x1F611,
+ },
[0x1F612]={
category="so",
description="UNAMUSED FACE",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F612,
},
[0x1F613]={
category="so",
description="FACE WITH COLD SWEAT",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F613,
},
[0x1F614]={
category="so",
description="PENSIVE FACE",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F614,
},
+ [0x1F615]={
+ category="so",
+ description="CONFUSED FACE",
+ direction="on",
+ linebreak="id",
+ unicodeslot=0x1F615,
+ },
[0x1F616]={
category="so",
description="CONFOUNDED FACE",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F616,
},
+ [0x1F617]={
+ category="so",
+ description="KISSING FACE",
+ direction="on",
+ linebreak="id",
+ unicodeslot=0x1F617,
+ },
[0x1F618]={
category="so",
description="FACE THROWING A KISS",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F618,
},
+ [0x1F619]={
+ category="so",
+ description="KISSING FACE WITH SMILING EYES",
+ direction="on",
+ linebreak="id",
+ unicodeslot=0x1F619,
+ },
[0x1F61A]={
category="so",
description="KISSING FACE WITH CLOSED EYES",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F61A,
},
+ [0x1F61B]={
+ category="so",
+ description="FACE WITH STUCK-OUT TONGUE",
+ direction="on",
+ linebreak="id",
+ unicodeslot=0x1F61B,
+ },
[0x1F61C]={
category="so",
description="FACE WITH STUCK-OUT TONGUE AND WINKING EYE",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F61C,
},
[0x1F61D]={
category="so",
description="FACE WITH STUCK-OUT TONGUE AND TIGHTLY-CLOSED EYES",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F61D,
},
[0x1F61E]={
category="so",
description="DISAPPOINTED FACE",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F61E,
},
+ [0x1F61F]={
+ category="so",
+ description="WORRIED FACE",
+ direction="on",
+ linebreak="id",
+ unicodeslot=0x1F61F,
+ },
[0x1F620]={
category="so",
description="ANGRY FACE",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F620,
},
[0x1F621]={
category="so",
description="POUTING FACE",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F621,
},
[0x1F622]={
category="so",
description="CRYING FACE",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F622,
},
[0x1F623]={
category="so",
description="PERSEVERING FACE",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F623,
},
[0x1F624]={
category="so",
description="FACE WITH LOOK OF TRIUMPH",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F624,
},
[0x1F625]={
category="so",
description="DISAPPOINTED BUT RELIEVED FACE",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F625,
},
+ [0x1F626]={
+ category="so",
+ description="FROWNING FACE WITH OPEN MOUTH",
+ direction="on",
+ linebreak="id",
+ unicodeslot=0x1F626,
+ },
+ [0x1F627]={
+ category="so",
+ description="ANGUISHED FACE",
+ direction="on",
+ linebreak="id",
+ unicodeslot=0x1F627,
+ },
[0x1F628]={
category="so",
description="FEARFUL FACE",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F628,
},
[0x1F629]={
category="so",
description="WEARY FACE",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F629,
},
[0x1F62A]={
category="so",
description="SLEEPY FACE",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F62A,
},
[0x1F62B]={
category="so",
description="TIRED FACE",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F62B,
},
+ [0x1F62C]={
+ category="so",
+ description="GRIMACING FACE",
+ direction="on",
+ linebreak="id",
+ unicodeslot=0x1F62C,
+ },
[0x1F62D]={
category="so",
description="LOUDLY CRYING FACE",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F62D,
},
+ [0x1F62E]={
+ category="so",
+ description="FACE WITH OPEN MOUTH",
+ direction="on",
+ linebreak="id",
+ unicodeslot=0x1F62E,
+ },
+ [0x1F62F]={
+ category="so",
+ description="HUSHED FACE",
+ direction="on",
+ linebreak="id",
+ unicodeslot=0x1F62F,
+ },
[0x1F630]={
category="so",
description="FACE WITH OPEN MOUTH AND COLD SWEAT",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F630,
},
[0x1F631]={
category="so",
description="FACE SCREAMING IN FEAR",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F631,
},
[0x1F632]={
category="so",
description="ASTONISHED FACE",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F632,
},
[0x1F633]={
category="so",
description="FLUSHED FACE",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F633,
},
+ [0x1F634]={
+ category="so",
+ description="SLEEPING FACE",
+ direction="on",
+ linebreak="id",
+ unicodeslot=0x1F634,
+ },
[0x1F635]={
category="so",
description="DIZZY FACE",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F635,
},
[0x1F636]={
category="so",
description="FACE WITHOUT MOUTH",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F636,
},
[0x1F637]={
category="so",
description="FACE WITH MEDICAL MASK",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F637,
},
[0x1F638]={
category="so",
description="GRINNING CAT FACE WITH SMILING EYES",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F638,
},
[0x1F639]={
category="so",
description="CAT FACE WITH TEARS OF JOY",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F639,
},
[0x1F63A]={
category="so",
description="SMILING CAT FACE WITH OPEN MOUTH",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F63A,
},
[0x1F63B]={
category="so",
description="SMILING CAT FACE WITH HEART-SHAPED EYES",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F63B,
},
[0x1F63C]={
category="so",
description="CAT FACE WITH WRY SMILE",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F63C,
},
[0x1F63D]={
category="so",
description="KISSING CAT FACE WITH CLOSED EYES",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F63D,
},
[0x1F63E]={
category="so",
description="POUTING CAT FACE",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F63E,
},
[0x1F63F]={
category="so",
description="CRYING CAT FACE",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F63F,
},
[0x1F640]={
category="so",
description="WEARY CAT FACE",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F640,
},
[0x1F645]={
category="so",
description="FACE WITH NO GOOD GESTURE",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F645,
},
[0x1F646]={
category="so",
description="FACE WITH OK GESTURE",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F646,
},
[0x1F647]={
category="so",
description="PERSON BOWING DEEPLY",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F647,
},
[0x1F648]={
category="so",
description="SEE-NO-EVIL MONKEY",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F648,
},
[0x1F649]={
category="so",
description="HEAR-NO-EVIL MONKEY",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F649,
},
[0x1F64A]={
category="so",
description="SPEAK-NO-EVIL MONKEY",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F64A,
},
[0x1F64B]={
category="so",
description="HAPPY PERSON RAISING ONE HAND",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F64B,
},
[0x1F64C]={
category="so",
description="PERSON RAISING BOTH HANDS IN CELEBRATION",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F64C,
},
[0x1F64D]={
category="so",
description="PERSON FROWNING",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F64D,
},
[0x1F64E]={
category="so",
description="PERSON WITH POUTING FACE",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F64E,
},
[0x1F64F]={
category="so",
description="PERSON WITH FOLDED HANDS",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F64F,
},
[0x1F680]={
category="so",
description="ROCKET",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F680,
},
[0x1F681]={
category="so",
description="HELICOPTER",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F681,
},
[0x1F682]={
category="so",
description="STEAM LOCOMOTIVE",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F682,
},
[0x1F683]={
category="so",
description="RAILWAY CAR",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F683,
},
[0x1F684]={
category="so",
description="HIGH-SPEED TRAIN",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F684,
},
[0x1F685]={
category="so",
description="HIGH-SPEED TRAIN WITH BULLET NOSE",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F685,
},
[0x1F686]={
category="so",
description="TRAIN",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F686,
},
[0x1F687]={
category="so",
description="METRO",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F687,
},
[0x1F688]={
category="so",
description="LIGHT RAIL",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F688,
},
[0x1F689]={
category="so",
description="STATION",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F689,
},
[0x1F68A]={
category="so",
description="TRAM",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F68A,
},
[0x1F68B]={
category="so",
description="TRAM CAR",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F68B,
},
[0x1F68C]={
category="so",
description="BUS",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F68C,
},
[0x1F68D]={
category="so",
description="ONCOMING BUS",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F68D,
},
[0x1F68E]={
category="so",
description="TROLLEYBUS",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F68E,
},
[0x1F68F]={
category="so",
description="BUS STOP",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F68F,
},
[0x1F690]={
category="so",
description="MINIBUS",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F690,
},
[0x1F691]={
category="so",
description="AMBULANCE",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F691,
},
[0x1F692]={
category="so",
description="FIRE ENGINE",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F692,
},
[0x1F693]={
category="so",
description="POLICE CAR",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F693,
},
[0x1F694]={
category="so",
description="ONCOMING POLICE CAR",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F694,
},
[0x1F695]={
category="so",
description="TAXI",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F695,
},
[0x1F696]={
category="so",
description="ONCOMING TAXI",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F696,
},
[0x1F697]={
category="so",
description="AUTOMOBILE",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F697,
},
[0x1F698]={
category="so",
description="ONCOMING AUTOMOBILE",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F698,
},
[0x1F699]={
category="so",
description="RECREATIONAL VEHICLE",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F699,
},
[0x1F69A]={
category="so",
description="DELIVERY TRUCK",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F69A,
},
[0x1F69B]={
category="so",
description="ARTICULATED LORRY",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F69B,
},
[0x1F69C]={
category="so",
description="TRACTOR",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F69C,
},
[0x1F69D]={
category="so",
description="MONORAIL",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F69D,
},
[0x1F69E]={
category="so",
description="MOUNTAIN RAILWAY",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F69E,
},
[0x1F69F]={
category="so",
description="SUSPENSION RAILWAY",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F69F,
},
[0x1F6A0]={
category="so",
description="MOUNTAIN CABLEWAY",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F6A0,
},
[0x1F6A1]={
category="so",
description="AERIAL TRAMWAY",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F6A1,
},
[0x1F6A2]={
category="so",
description="SHIP",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F6A2,
},
[0x1F6A3]={
category="so",
description="ROWBOAT",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F6A3,
},
[0x1F6A4]={
category="so",
description="SPEEDBOAT",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F6A4,
},
[0x1F6A5]={
category="so",
description="HORIZONTAL TRAFFIC LIGHT",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F6A5,
},
[0x1F6A6]={
category="so",
description="VERTICAL TRAFFIC LIGHT",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F6A6,
},
[0x1F6A7]={
category="so",
description="CONSTRUCTION SIGN",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F6A7,
},
[0x1F6A8]={
category="so",
description="POLICE CARS REVOLVING LIGHT",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F6A8,
},
[0x1F6A9]={
category="so",
description="TRIANGULAR FLAG ON POST",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F6A9,
},
[0x1F6AA]={
category="so",
description="DOOR",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F6AA,
},
[0x1F6AB]={
category="so",
description="NO ENTRY SIGN",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F6AB,
},
[0x1F6AC]={
category="so",
description="SMOKING SYMBOL",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F6AC,
},
[0x1F6AD]={
category="so",
description="NO SMOKING SYMBOL",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F6AD,
},
[0x1F6AE]={
category="so",
description="PUT LITTER IN ITS PLACE SYMBOL",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F6AE,
},
[0x1F6AF]={
category="so",
description="DO NOT LITTER SYMBOL",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F6AF,
},
[0x1F6B0]={
category="so",
description="POTABLE WATER SYMBOL",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F6B0,
},
[0x1F6B1]={
category="so",
description="NON-POTABLE WATER SYMBOL",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F6B1,
},
[0x1F6B2]={
category="so",
description="BICYCLE",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F6B2,
},
[0x1F6B3]={
category="so",
description="NO BICYCLES",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F6B3,
},
[0x1F6B4]={
category="so",
description="BICYCLIST",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F6B4,
},
[0x1F6B5]={
category="so",
description="MOUNTAIN BICYCLIST",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F6B5,
},
[0x1F6B6]={
category="so",
description="PEDESTRIAN",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F6B6,
},
[0x1F6B7]={
category="so",
description="NO PEDESTRIANS",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F6B7,
},
[0x1F6B8]={
category="so",
description="CHILDREN CROSSING",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F6B8,
},
[0x1F6B9]={
category="so",
description="MENS SYMBOL",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F6B9,
},
[0x1F6BA]={
category="so",
description="WOMENS SYMBOL",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F6BA,
},
[0x1F6BB]={
category="so",
description="RESTROOM",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F6BB,
},
[0x1F6BC]={
category="so",
description="BABY SYMBOL",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F6BC,
},
[0x1F6BD]={
category="so",
description="TOILET",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F6BD,
},
[0x1F6BE]={
category="so",
description="WATER CLOSET",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F6BE,
},
[0x1F6BF]={
category="so",
description="SHOWER",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F6BF,
},
[0x1F6C0]={
category="so",
description="BATH",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F6C0,
},
[0x1F6C1]={
category="so",
description="BATHTUB",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F6C1,
},
[0x1F6C2]={
category="so",
description="PASSPORT CONTROL",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F6C2,
},
[0x1F6C3]={
category="so",
description="CUSTOMS",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F6C3,
},
[0x1F6C4]={
category="so",
description="BAGGAGE CLAIM",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F6C4,
},
[0x1F6C5]={
category="so",
description="LEFT LUGGAGE",
direction="on",
- linebreak="al",
+ linebreak="id",
unicodeslot=0x1F6C5,
},
[0x1F700]={
diff --git a/Master/texmf-dist/tex/context/base/char-enc.lua b/Master/texmf-dist/tex/context/base/char-enc.lua
index ef6805e54f3..048837eec18 100644
--- a/Master/texmf-dist/tex/context/base/char-enc.lua
+++ b/Master/texmf-dist/tex/context/base/char-enc.lua
@@ -1,9 +1,10 @@
-if not modules then modules = { } end modules ['char-syn'] = {
+if not modules then modules = { } end modules ['char-enc'] = {
version = 1.001,
comment = "companion to char-ini.mkiv",
author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
copyright = "PRAGMA ADE / ConTeXt Development Team",
license = "see context related readme files"
+ -- dataonly = true,
}
-- Thanks to tex4ht for these mappings.
@@ -144,25 +145,25 @@ characters.synonyms = allocate { -- afm mess
Yen = 0x00A5,
}
---~ if not characters.enccodes then
---~
---~ local enccodes = { } characters.enccodes = enccodes
---~
---~ for unicode, data in next, characters.data do
---~ local encname = data.adobename or data.contextname
---~ if encname then
---~ enccodes[encname] = unicode
---~ end
---~ end
---~
---~ for name, unicode in next, characters.synonyms do
---~ if not enccodes[name] then enccodes[name] = unicode end
---~ end
---~
---~
---~ end
---~
---~ storage.register("characters.enccodes", characters.enccodes, "characters.enccodes")
+-- if not characters.enccodes then
+--
+-- local enccodes = { } characters.enccodes = enccodes
+--
+-- for unicode, data in next, characters.data do
+-- local encname = data.adobename or data.contextname
+-- if encname then
+-- enccodes[encname] = unicode
+-- end
+-- end
+--
+-- for name, unicode in next, characters.synonyms do
+-- if not enccodes[name] then enccodes[name] = unicode end
+-- end
+--
+--
+-- end
+--
+-- storage.register("characters.enccodes", characters.enccodes, "characters.enccodes")
-- As this table is seldom used, we can delay its definition. Beware, this means
-- that table.print would not work on this file unless it is accessed once. This
diff --git a/Master/texmf-dist/tex/context/base/char-ent.lua b/Master/texmf-dist/tex/context/base/char-ent.lua
index d2ac22bbf2c..58ee9472ca3 100644
--- a/Master/texmf-dist/tex/context/base/char-ent.lua
+++ b/Master/texmf-dist/tex/context/base/char-ent.lua
@@ -1,15 +1,14 @@
-if not modules then modules = { } end modules ['math-ent'] = {
+if not modules then modules = { } end modules ['char-ent'] = {
version = 1.001,
comment = "companion to math-ini.mkiv",
author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
copyright = "derived from the mathml 2.0 specification",
+ dataonly = true,
}
-- http://www.w3.org/2003/entities/2007/w3centities-f.ent
-- http://www.w3.org/2003/entities/2007/htmlmathml-f.ent
--- this might go into char-def
-
local entities = utilities.storage.allocate {
["AElig"] = "Æ", -- U+000C6
["AMP"] = "&", -- U+00026
@@ -568,7 +567,8 @@ local entities = utilities.storage.allocate {
["Ugr"] = "Υ", -- U+003A5
["Ugrave"] = "Ù", -- U+000D9
["Umacr"] = "Ū", -- U+0016A
- ["UnderBar"] = "_", -- U+0005F
+ -- ["UnderBar"] = "_", -- U+0005F
+ ["UnderBar"] = "‾", -- U+0203E
["UnderBrace"] = "⏟", -- U+023DF
["UnderBracket"] = "⎵", -- U+023B5
["UnderParenthesis"] = "⏝", -- U+023DD
diff --git a/Master/texmf-dist/tex/context/base/char-ini.lua b/Master/texmf-dist/tex/context/base/char-ini.lua
index 244b8d2a70e..b75f5eda758 100644
--- a/Master/texmf-dist/tex/context/base/char-ini.lua
+++ b/Master/texmf-dist/tex/context/base/char-ini.lua
@@ -10,26 +10,17 @@ if not modules then modules = { } end modules ['char-ini'] = {
-- we can remove the tag range starting at 0xE0000 (special applications)
-local tex = tex
-local utf = unicode.utf8
-
-local utfchar, utfbyte, utfvalues = utf.char, utf.byte, string.utfvalues
-local ustring = unicode.ustring
+local utfchar, utfbyte, utfvalues, ustring = utf.char, utf.byte, utf.values, utf.ustring
local concat, unpack, tohash = table.concat, table.unpack, table.tohash
local next, tonumber, type, rawget, rawset = next, tonumber, type, rawget, rawset
local format, lower, gsub, match, gmatch = string.format, string.lower, string.gsub, string.match, string.match, string.gmatch
-local P, R, lpegmatch = lpeg.P, lpeg.R, lpeg.match
+local P, R, Cs, lpegmatch, patterns = lpeg.P, lpeg.R, lpeg.Cs, lpeg.match, lpeg.patterns
+
+local utf8byte = patterns.utf8byte
+local utf8char = patterns.utf8char
local allocate = utilities.storage.allocate
local mark = utilities.storage.mark
-local texsetlccode = tex.setlccode
-local texsetuccode = tex.setuccode
-local texsetsfcode = tex.setsfcode
-local texsetcatcode = tex.setcatcode
-
-local contextsprint = context.sprint
-local ctxcatcodes = tex.ctxcatcodes
-local texcatcodes = tex.texcatcodes
local setmetatableindex = table.setmetatableindex
@@ -48,7 +39,6 @@ loaded!</p>
characters = characters or { }
local characters = characters
-
local data = characters.data
if data then
@@ -64,7 +54,7 @@ end
local pattern = (P("0x") + P("U+")) * ((R("09","AF")^1 * P(-1)) / function(s) return tonumber(s,16) end)
-lpeg.patterns.chartonumber = pattern
+patterns.chartonumber = pattern
local function chartonumber(k)
if type(k) == "string" then
@@ -136,185 +126,259 @@ setmetatableindex(data, function(t,k)
end)
local blocks = allocate {
- ["aegeannumbers"] = { first = 0x10100, last = 0x1013F, description = "Aegean Numbers" },
- ["alphabeticpresentationforms"] = { first = 0x0FB00, last = 0x0FB4F, description = "Alphabetic Presentation Forms" },
- ["ancientgreekmusicalnotation"] = { first = 0x1D200, last = 0x1D24F, description = "Ancient Greek Musical Notation" },
- ["ancientgreeknumbers"] = { first = 0x10140, last = 0x1018F, description = "Ancient Greek Numbers" },
- ["ancientsymbols"] = { first = 0x10190, last = 0x101CF, description = "Ancient Symbols" },
- ["arabic"] = { first = 0x00600, last = 0x006FF, description = "Arabic" },
- ["arabicpresentationformsa"] = { first = 0x0FB50, last = 0x0FDFF, description = "Arabic Presentation Forms-A" },
- ["arabicpresentationformsb"] = { first = 0x0FE70, last = 0x0FEFF, description = "Arabic Presentation Forms-B" },
- ["arabicsupplement"] = { first = 0x00750, last = 0x0077F, description = "Arabic Supplement" },
- ["armenian"] = { first = 0x00530, last = 0x0058F, description = "Armenian" },
- ["arrows"] = { first = 0x02190, last = 0x021FF, description = "Arrows" },
- ["balinese"] = { first = 0x01B00, last = 0x01B7F, description = "Balinese" },
- ["basiclatin"] = { first = 0x00000, last = 0x0007F, description = "Basic Latin" },
- ["bengali"] = { first = 0x00980, last = 0x009FF, description = "Bengali" },
- ["blockelements"] = { first = 0x02580, last = 0x0259F, description = "Block Elements" },
- ["bopomofo"] = { first = 0x03100, last = 0x0312F, description = "Bopomofo" },
- ["bopomofoextended"] = { first = 0x031A0, last = 0x031BF, description = "Bopomofo Extended" },
- ["boxdrawing"] = { first = 0x02500, last = 0x0257F, description = "Box Drawing" },
- ["braillepatterns"] = { first = 0x02800, last = 0x028FF, description = "Braille Patterns" },
- ["buginese"] = { first = 0x01A00, last = 0x01A1F, description = "Buginese" },
- ["buhid"] = { first = 0x01740, last = 0x0175F, description = "Buhid" },
- ["byzantinemusicalsymbols"] = { first = 0x1D000, last = 0x1D0FF, description = "Byzantine Musical Symbols" },
- ["carian"] = { first = 0x102A0, last = 0x102DF, description = "Carian" },
- ["cham"] = { first = 0x0AA00, last = 0x0AA5F, description = "Cham" },
- ["cherokee"] = { first = 0x013A0, last = 0x013FF, description = "Cherokee" },
- ["cjkcompatibility"] = { first = 0x03300, last = 0x033FF, description = "CJK Compatibility" },
- ["cjkcompatibilityforms"] = { first = 0x0FE30, last = 0x0FE4F, description = "CJK Compatibility Forms" },
- ["cjkcompatibilityideographs"] = { first = 0x0F900, last = 0x0FAFF, description = "CJK Compatibility Ideographs" },
- ["cjkcompatibilityideographssupplement"] = { first = 0x2F800, last = 0x2FA1F, description = "CJK Compatibility Ideographs Supplement" },
- ["cjkradicalssupplement"] = { first = 0x02E80, last = 0x02EFF, description = "CJK Radicals Supplement" },
- ["cjkstrokes"] = { first = 0x031C0, last = 0x031EF, description = "CJK Strokes" },
- ["cjksymbolsandpunctuation"] = { first = 0x03000, last = 0x0303F, description = "CJK Symbols and Punctuation" },
- ["cjkunifiedideographs"] = { first = 0x04E00, last = 0x09FFF, description = "CJK Unified Ideographs" },
- ["cjkunifiedideographsextensiona"] = { first = 0x03400, last = 0x04DBF, description = "CJK Unified Ideographs Extension A" },
- ["cjkunifiedideographsextensionb"] = { first = 0x20000, last = 0x2A6DF, description = "CJK Unified Ideographs Extension B" },
- ["combiningdiacriticalmarks"] = { first = 0x00300, last = 0x0036F, description = "Combining Diacritical Marks" },
- ["combiningdiacriticalmarksforsymbols"] = { first = 0x020D0, last = 0x020FF, description = "Combining Diacritical Marks for Symbols" },
- ["combiningdiacriticalmarkssupplement"] = { first = 0x01DC0, last = 0x01DFF, description = "Combining Diacritical Marks Supplement" },
- ["combininghalfmarks"] = { first = 0x0FE20, last = 0x0FE2F, description = "Combining Half Marks" },
- ["controlpictures"] = { first = 0x02400, last = 0x0243F, description = "Control Pictures" },
- ["coptic"] = { first = 0x02C80, last = 0x02CFF, description = "Coptic" },
- ["countingrodnumerals"] = { first = 0x1D360, last = 0x1D37F, description = "Counting Rod Numerals" },
- ["cuneiform"] = { first = 0x12000, last = 0x123FF, description = "Cuneiform" },
- ["cuneiformnumbersandpunctuation"] = { first = 0x12400, last = 0x1247F, description = "Cuneiform Numbers and Punctuation" },
- ["currencysymbols"] = { first = 0x020A0, last = 0x020CF, description = "Currency Symbols" },
- ["cypriotsyllabary"] = { first = 0x10800, last = 0x1083F, description = "Cypriot Syllabary" },
- ["cyrillic"] = { first = 0x00400, last = 0x004FF, description = "Cyrillic" },
- ["cyrillicextendeda"] = { first = 0x02DE0, last = 0x02DFF, description = "Cyrillic Extended-A" },
- ["cyrillicextendedb"] = { first = 0x0A640, last = 0x0A69F, description = "Cyrillic Extended-B" },
- ["cyrillicsupplement"] = { first = 0x00500, last = 0x0052F, description = "Cyrillic Supplement" },
- ["deseret"] = { first = 0x10400, last = 0x1044F, description = "Deseret" },
- ["devanagari"] = { first = 0x00900, last = 0x0097F, description = "Devanagari" },
- ["dingbats"] = { first = 0x02700, last = 0x027BF, description = "Dingbats" },
- ["dominotiles"] = { first = 0x1F030, last = 0x1F09F, description = "Domino Tiles" },
- ["enclosedalphanumerics"] = { first = 0x02460, last = 0x024FF, description = "Enclosed Alphanumerics" },
- ["enclosedcjklettersandmonths"] = { first = 0x03200, last = 0x032FF, description = "Enclosed CJK Letters and Months" },
- ["ethiopic"] = { first = 0x01200, last = 0x0137F, description = "Ethiopic" },
- ["ethiopicextended"] = { first = 0x02D80, last = 0x02DDF, description = "Ethiopic Extended" },
- ["ethiopicsupplement"] = { first = 0x01380, last = 0x0139F, description = "Ethiopic Supplement" },
- ["generalpunctuation"] = { first = 0x02000, last = 0x0206F, description = "General Punctuation" },
- ["geometricshapes"] = { first = 0x025A0, last = 0x025FF, description = "Geometric Shapes" },
- ["georgian"] = { first = 0x010A0, last = 0x010FF, description = "Georgian" },
- ["georgiansupplement"] = { first = 0x02D00, last = 0x02D2F, description = "Georgian Supplement" },
- ["glagolitic"] = { first = 0x02C00, last = 0x02C5F, description = "Glagolitic" },
- ["gothic"] = { first = 0x10330, last = 0x1034F, description = "Gothic" },
- ["greekandcoptic"] = { first = 0x00370, last = 0x003FF, description = "Greek and Coptic" },
- ["greekextended"] = { first = 0x01F00, last = 0x01FFF, description = "Greek Extended" },
- ["gujarati"] = { first = 0x00A80, last = 0x00AFF, description = "Gujarati" },
- ["gurmukhi"] = { first = 0x00A00, last = 0x00A7F, description = "Gurmukhi" },
- ["halfwidthandfullwidthforms"] = { first = 0x0FF00, last = 0x0FFEF, description = "Halfwidth and Fullwidth Forms" },
- ["hangulcompatibilityjamo"] = { first = 0x03130, last = 0x0318F, description = "Hangul Compatibility Jamo" },
- ["hanguljamo"] = { first = 0x01100, last = 0x011FF, description = "Hangul Jamo" },
- ["hangulsyllables"] = { first = 0x0AC00, last = 0x0D7AF, description = "Hangul Syllables" },
- ["hanunoo"] = { first = 0x01720, last = 0x0173F, description = "Hanunoo" },
- ["hebrew"] = { first = 0x00590, last = 0x005FF, description = "Hebrew" },
- ["highprivateusesurrogates"] = { first = 0x0DB80, last = 0x0DBFF, description = "High Private Use Surrogates" },
- ["highsurrogates"] = { first = 0x0D800, last = 0x0DB7F, description = "High Surrogates" },
- ["hiragana"] = { first = 0x03040, last = 0x0309F, description = "Hiragana" },
- ["ideographicdescriptioncharacters"] = { first = 0x02FF0, last = 0x02FFF, description = "Ideographic Description Characters" },
- ["ipaextensions"] = { first = 0x00250, last = 0x002AF, description = "IPA Extensions" },
- ["kanbun"] = { first = 0x03190, last = 0x0319F, description = "Kanbun" },
- ["kangxiradicals"] = { first = 0x02F00, last = 0x02FDF, description = "Kangxi Radicals" },
- ["kannada"] = { first = 0x00C80, last = 0x00CFF, description = "Kannada" },
- ["katakana"] = { first = 0x030A0, last = 0x030FF, description = "Katakana" },
- ["katakanaphoneticextensions"] = { first = 0x031F0, last = 0x031FF, description = "Katakana Phonetic Extensions" },
- ["kayahli"] = { first = 0x0A900, last = 0x0A92F, description = "Kayah Li" },
- ["kharoshthi"] = { first = 0x10A00, last = 0x10A5F, description = "Kharoshthi" },
- ["khmer"] = { first = 0x01780, last = 0x017FF, description = "Khmer" },
- ["khmersymbols"] = { first = 0x019E0, last = 0x019FF, description = "Khmer Symbols" },
- ["lao"] = { first = 0x00E80, last = 0x00EFF, description = "Lao" },
- ["latinextendeda"] = { first = 0x00100, last = 0x0017F, description = "Latin Extended-A" },
- ["latinextendedadditional"] = { first = 0x01E00, last = 0x01EFF, description = "Latin Extended Additional" },
- ["latinextendedb"] = { first = 0x00180, last = 0x0024F, description = "Latin Extended-B" },
- ["latinextendedc"] = { first = 0x02C60, last = 0x02C7F, description = "Latin Extended-C" },
- ["latinextendedd"] = { first = 0x0A720, last = 0x0A7FF, description = "Latin Extended-D" },
- ["latinsupplement"] = { first = 0x00080, last = 0x000FF, description = "Latin-1 Supplement" },
- ["lepcha"] = { first = 0x01C00, last = 0x01C4F, description = "Lepcha" },
- ["letterlikesymbols"] = { first = 0x02100, last = 0x0214F, description = "Letterlike Symbols" },
- ["limbu"] = { first = 0x01900, last = 0x0194F, description = "Limbu" },
- ["linearbideograms"] = { first = 0x10080, last = 0x100FF, description = "Linear B Ideograms" },
- ["linearbsyllabary"] = { first = 0x10000, last = 0x1007F, description = "Linear B Syllabary" },
- ["lowsurrogates"] = { first = 0x0DC00, last = 0x0DFFF, description = "Low Surrogates" },
- ["lycian"] = { first = 0x10280, last = 0x1029F, description = "Lycian" },
- ["lydian"] = { first = 0x10920, last = 0x1093F, description = "Lydian" },
- ["mahjongtiles"] = { first = 0x1F000, last = 0x1F02F, description = "Mahjong Tiles" },
- ["malayalam"] = { first = 0x00D00, last = 0x00D7F, description = "Malayalam" },
- ["mathematicalalphanumericsymbols"] = { first = 0x1D400, last = 0x1D7FF, description = "Mathematical Alphanumeric Symbols" },
- ["mathematicaloperators"] = { first = 0x02200, last = 0x022FF, description = "Mathematical Operators" },
- ["miscellaneousmathematicalsymbolsa"] = { first = 0x027C0, last = 0x027EF, description = "Miscellaneous Mathematical Symbols-A" },
- ["miscellaneousmathematicalsymbolsb"] = { first = 0x02980, last = 0x029FF, description = "Miscellaneous Mathematical Symbols-B" },
- ["miscellaneoussymbols"] = { first = 0x02600, last = 0x026FF, description = "Miscellaneous Symbols" },
- ["miscellaneoussymbolsandarrows"] = { first = 0x02B00, last = 0x02BFF, description = "Miscellaneous Symbols and Arrows" },
- ["miscellaneoustechnical"] = { first = 0x02300, last = 0x023FF, description = "Miscellaneous Technical" },
- ["modifiertoneletters"] = { first = 0x0A700, last = 0x0A71F, description = "Modifier Tone Letters" },
- ["mongolian"] = { first = 0x01800, last = 0x018AF, description = "Mongolian" },
- ["musicalsymbols"] = { first = 0x1D100, last = 0x1D1FF, description = "Musical Symbols" },
- ["myanmar"] = { first = 0x01000, last = 0x0109F, description = "Myanmar" },
- ["newtailue"] = { first = 0x01980, last = 0x019DF, description = "New Tai Lue" },
- ["nko"] = { first = 0x007C0, last = 0x007FF, description = "NKo" },
- ["numberforms"] = { first = 0x02150, last = 0x0218F, description = "Number Forms" },
- ["ogham"] = { first = 0x01680, last = 0x0169F, description = "Ogham" },
- ["olchiki"] = { first = 0x01C50, last = 0x01C7F, description = "Ol Chiki" },
- ["olditalic"] = { first = 0x10300, last = 0x1032F, description = "Old Italic" },
- ["oldpersian"] = { first = 0x103A0, last = 0x103DF, description = "Old Persian" },
- ["opticalcharacterrecognition"] = { first = 0x02440, last = 0x0245F, description = "Optical Character Recognition" },
- ["oriya"] = { first = 0x00B00, last = 0x00B7F, description = "Oriya" },
- ["osmanya"] = { first = 0x10480, last = 0x104AF, description = "Osmanya" },
- ["phagspa"] = { first = 0x0A840, last = 0x0A87F, description = "Phags-pa" },
- ["phaistosdisc"] = { first = 0x101D0, last = 0x101FF, description = "Phaistos Disc" },
- ["phoenician"] = { first = 0x10900, last = 0x1091F, description = "Phoenician" },
- ["phoneticextensions"] = { first = 0x01D00, last = 0x01D7F, description = "Phonetic Extensions" },
- ["phoneticextensionssupplement"] = { first = 0x01D80, last = 0x01DBF, description = "Phonetic Extensions Supplement" },
- ["privateusearea"] = { first = 0x0E000, last = 0x0F8FF, description = "Private Use Area" },
- ["rejang"] = { first = 0x0A930, last = 0x0A95F, description = "Rejang" },
- ["runic"] = { first = 0x016A0, last = 0x016FF, description = "Runic" },
- ["saurashtra"] = { first = 0x0A880, last = 0x0A8DF, description = "Saurashtra" },
- ["shavian"] = { first = 0x10450, last = 0x1047F, description = "Shavian" },
- ["sinhala"] = { first = 0x00D80, last = 0x00DFF, description = "Sinhala" },
- ["smallformvariants"] = { first = 0x0FE50, last = 0x0FE6F, description = "Small Form Variants" },
- ["spacingmodifierletters"] = { first = 0x002B0, last = 0x002FF, description = "Spacing Modifier Letters" },
- ["specials"] = { first = 0x0FFF0, last = 0x0FFFF, description = "Specials" },
- ["sundanese"] = { first = 0x01B80, last = 0x01BBF, description = "Sundanese" },
- ["superscriptsandsubscripts"] = { first = 0x02070, last = 0x0209F, description = "Superscripts and Subscripts" },
- ["supplementalarrowsa"] = { first = 0x027F0, last = 0x027FF, description = "Supplemental Arrows-A" },
- ["supplementalarrowsb"] = { first = 0x02900, last = 0x0297F, description = "Supplemental Arrows-B" },
- ["supplementalmathematicaloperators"] = { first = 0x02A00, last = 0x02AFF, description = "Supplemental Mathematical Operators" },
- ["supplementalpunctuation"] = { first = 0x02E00, last = 0x02E7F, description = "Supplemental Punctuation" },
- ["supplementaryprivateuseareaa"] = { first = 0xF0000, last = 0xFFFFF, description = "Supplementary Private Use Area-A" },
- ["supplementaryprivateuseareab"] = { first = 0x100000,last = 0x10FFFF,description = "Supplementary Private Use Area-B" },
- ["sylotinagri"] = { first = 0x0A800, last = 0x0A82F, description = "Syloti Nagri" },
- ["syriac"] = { first = 0x00700, last = 0x0074F, description = "Syriac" },
- ["tagalog"] = { first = 0x01700, last = 0x0171F, description = "Tagalog" },
- ["tagbanwa"] = { first = 0x01760, last = 0x0177F, description = "Tagbanwa" },
- ["tags"] = { first = 0xE0000, last = 0xE007F, description = "Tags" },
- ["taile"] = { first = 0x01950, last = 0x0197F, description = "Tai Le" },
- ["taixuanjingsymbols"] = { first = 0x1D300, last = 0x1D35F, description = "Tai Xuan Jing Symbols" },
- ["tamil"] = { first = 0x00B80, last = 0x00BFF, description = "Tamil" },
- ["telugu"] = { first = 0x00C00, last = 0x00C7F, description = "Telugu" },
- ["thaana"] = { first = 0x00780, last = 0x007BF, description = "Thaana" },
- ["thai"] = { first = 0x00E00, last = 0x00E7F, description = "Thai" },
- ["tibetan"] = { first = 0x00F00, last = 0x00FFF, description = "Tibetan" },
- ["tifinagh"] = { first = 0x02D30, last = 0x02D7F, description = "Tifinagh" },
- ["ugaritic"] = { first = 0x10380, last = 0x1039F, description = "Ugaritic" },
- ["unifiedcanadianaboriginalsyllabics"] = { first = 0x01400, last = 0x0167F, description = "Unified Canadian Aboriginal Syllabics" },
- ["vai"] = { first = 0x0A500, last = 0x0A63F, description = "Vai" },
- ["variationselectors"] = { first = 0x0FE00, last = 0x0FE0F, description = "Variation Selectors" },
- ["variationselectorssupplement"] = { first = 0xE0100, last = 0xE01EF, description = "Variation Selectors Supplement" },
- ["verticalforms"] = { first = 0x0FE10, last = 0x0FE1F, description = "Vertical Forms" },
- ["yijinghexagramsymbols"] = { first = 0x04DC0, last = 0x04DFF, description = "Yijing Hexagram Symbols" },
- ["yiradicals"] = { first = 0x0A490, last = 0x0A4CF, description = "Yi Radicals" },
- ["yisyllables"] = { first = 0x0A000, last = 0x0A48F, description = "Yi Syllables" },
+ ["aegeannumbers"] = { first = 0x10100, last = 0x1013F, description = "Aegean Numbers" },
+ ["alchemicalsymbols"] = { first = 0x1F700, last = 0x1F77F, description = "Alchemical Symbols" },
+ ["alphabeticpresentationforms"] = { first = 0x0FB00, last = 0x0FB4F, otf="latn", description = "Alphabetic Presentation Forms" },
+ ["ancientgreekmusicalnotation"] = { first = 0x1D200, last = 0x1D24F, otf="grek", description = "Ancient Greek Musical Notation" },
+ ["ancientgreeknumbers"] = { first = 0x10140, last = 0x1018F, otf="grek", description = "Ancient Greek Numbers" },
+ ["ancientsymbols"] = { first = 0x10190, last = 0x101CF, otf="grek", description = "Ancient Symbols" },
+ ["arabic"] = { first = 0x00600, last = 0x006FF, otf="arab", description = "Arabic" },
+ ["arabicextendeda"] = { first = 0x008A0, last = 0x008FF, description = "Arabic Extended-A" },
+ ["arabicmathematicalalphabeticsymbols"] = { first = 0x1EE00, last = 0x1EEFF, description = "Arabic Mathematical Alphabetic Symbols" },
+ ["arabicpresentationformsa"] = { first = 0x0FB50, last = 0x0FDFF, otf="arab", description = "Arabic Presentation Forms-A" },
+ ["arabicpresentationformsb"] = { first = 0x0FE70, last = 0x0FEFF, otf="arab", description = "Arabic Presentation Forms-B" },
+ ["arabicsupplement"] = { first = 0x00750, last = 0x0077F, otf="arab", description = "Arabic Supplement" },
+ ["armenian"] = { first = 0x00530, last = 0x0058F, otf="armn", description = "Armenian" },
+ ["arrows"] = { first = 0x02190, last = 0x021FF, description = "Arrows" },
+ ["avestan"] = { first = 0x10B00, last = 0x10B3F, description = "Avestan" },
+ ["balinese"] = { first = 0x01B00, last = 0x01B7F, otf="bali", description = "Balinese" },
+ ["bamum"] = { first = 0x0A6A0, last = 0x0A6FF, description = "Bamum" },
+ ["bamumsupplement"] = { first = 0x16800, last = 0x16A3F, description = "Bamum Supplement" },
+ ["basiclatin"] = { first = 0x00000, last = 0x0007F, otf="latn", description = "Basic Latin" },
+ ["batak"] = { first = 0x01BC0, last = 0x01BFF, description = "Batak" },
+ ["bengali"] = { first = 0x00980, last = 0x009FF, otf="beng", description = "Bengali" },
+ ["blockelements"] = { first = 0x02580, last = 0x0259F, otf="bopo", description = "Block Elements" },
+ ["bopomofo"] = { first = 0x03100, last = 0x0312F, otf="bopo", description = "Bopomofo" },
+ ["bopomofoextended"] = { first = 0x031A0, last = 0x031BF, otf="bopo", description = "Bopomofo Extended" },
+ ["boxdrawing"] = { first = 0x02500, last = 0x0257F, description = "Box Drawing" },
+ ["brahmi"] = { first = 0x11000, last = 0x1107F, description = "Brahmi" },
+ ["braillepatterns"] = { first = 0x02800, last = 0x028FF, otf="brai", description = "Braille Patterns" },
+ ["buginese"] = { first = 0x01A00, last = 0x01A1F, otf="bugi", description = "Buginese" },
+ ["buhid"] = { first = 0x01740, last = 0x0175F, otf="buhd", description = "Buhid" },
+ ["byzantinemusicalsymbols"] = { first = 0x1D000, last = 0x1D0FF, otf="byzm", description = "Byzantine Musical Symbols" },
+ ["commonindicnumberforms"] = { first = 0x0A830, last = 0x0A83F, description = "Common Indic Number Forms" },
+ ["carian"] = { first = 0x102A0, last = 0x102DF, description = "Carian" },
+ ["cham"] = { first = 0x0AA00, last = 0x0AA5F, description = "Cham" },
+ ["cherokee"] = { first = 0x013A0, last = 0x013FF, otf="cher", description = "Cherokee" },
+ ["cjkcompatibility"] = { first = 0x03300, last = 0x033FF, otf="hang", description = "CJK Compatibility" },
+ ["cjkcompatibilityforms"] = { first = 0x0FE30, last = 0x0FE4F, otf="hang", description = "CJK Compatibility Forms" },
+ ["cjkcompatibilityideographs"] = { first = 0x0F900, last = 0x0FAFF, otf="hang", description = "CJK Compatibility Ideographs" },
+ ["cjkcompatibilityideographssupplement"] = { first = 0x2F800, last = 0x2FA1F, otf="hang", description = "CJK Compatibility Ideographs Supplement" },
+ ["cjkradicalssupplement"] = { first = 0x02E80, last = 0x02EFF, otf="hang", description = "CJK Radicals Supplement" },
+ ["cjkstrokes"] = { first = 0x031C0, last = 0x031EF, otf="hang", description = "CJK Strokes" },
+ ["cjksymbolsandpunctuation"] = { first = 0x03000, last = 0x0303F, otf="hang", description = "CJK Symbols and Punctuation" },
+ ["cjkunifiedideographs"] = { first = 0x04E00, last = 0x09FFF, otf="hang", description = "CJK Unified Ideographs" },
+ ["cjkunifiedideographsextensiona"] = { first = 0x03400, last = 0x04DBF, otf="hang", description = "CJK Unified Ideographs Extension A" },
+ ["cjkunifiedideographsextensionb"] = { first = 0x20000, last = 0x2A6DF, otf="hang", description = "CJK Unified Ideographs Extension B" },
+ ["combiningdiacriticalmarks"] = { first = 0x00300, last = 0x0036F, description = "Combining Diacritical Marks" },
+ ["combiningdiacriticalmarksforsymbols"] = { first = 0x020D0, last = 0x020FF, description = "Combining Diacritical Marks for Symbols" },
+ ["combiningdiacriticalmarkssupplement"] = { first = 0x01DC0, last = 0x01DFF, description = "Combining Diacritical Marks Supplement" },
+ ["combininghalfmarks"] = { first = 0x0FE20, last = 0x0FE2F, description = "Combining Half Marks" },
+ ["controlpictures"] = { first = 0x02400, last = 0x0243F, description = "Control Pictures" },
+ ["coptic"] = { first = 0x02C80, last = 0x02CFF, otf="copt", description = "Coptic" },
+ ["countingrodnumerals"] = { first = 0x1D360, last = 0x1D37F, description = "Counting Rod Numerals" },
+ ["cuneiform"] = { first = 0x12000, last = 0x123FF, otf="xsux", description = "Cuneiform" },
+ ["cuneiformnumbersandpunctuation"] = { first = 0x12400, last = 0x1247F, otf="xsux", description = "Cuneiform Numbers and Punctuation" },
+ ["currencysymbols"] = { first = 0x020A0, last = 0x020CF, description = "Currency Symbols" },
+ ["cypriotsyllabary"] = { first = 0x10800, last = 0x1083F, otf="cprt", description = "Cypriot Syllabary" },
+ ["cyrillic"] = { first = 0x00400, last = 0x004FF, otf="cyrl", description = "Cyrillic" },
+ ["cyrillicextendeda"] = { first = 0x02DE0, last = 0x02DFF, otf="cyrl", description = "Cyrillic Extended-A" },
+ ["cyrillicextendedb"] = { first = 0x0A640, last = 0x0A69F, otf="cyrl", description = "Cyrillic Extended-B" },
+ ["cyrillicsupplement"] = { first = 0x00500, last = 0x0052F, otf="cyrl", description = "Cyrillic Supplement" },
+ ["deseret"] = { first = 0x10400, last = 0x1044F, otf="dsrt", description = "Deseret" },
+ ["devanagari"] = { first = 0x00900, last = 0x0097F, otf="deva", description = "Devanagari" },
+ ["devanagariextended"] = { first = 0x0A8E0, last = 0x0A8FF, description = "Devanagari Extended" },
+ ["dingbats"] = { first = 0x02700, last = 0x027BF, description = "Dingbats" },
+ ["dominotiles"] = { first = 0x1F030, last = 0x1F09F, description = "Domino Tiles" },
+ ["egyptianhieroglyphs"] = { first = 0x13000, last = 0x1342F, description = "Egyptian Hieroglyphs" },
+ ["emoticons"] = { first = 0x1F600, last = 0x1F64F, description = "Emoticons" },
+ ["enclosedalphanumericsupplement"] = { first = 0x1F100, last = 0x1F1FF, description = "Enclosed Alphanumeric Supplement" },
+ ["enclosedalphanumerics"] = { first = 0x02460, last = 0x024FF, description = "Enclosed Alphanumerics" },
+ ["enclosedcjklettersandmonths"] = { first = 0x03200, last = 0x032FF, description = "Enclosed CJK Letters and Months" },
+ ["enclosedideographicsupplement"] = { first = 0x1F200, last = 0x1F2FF, description = "Enclosed Ideographic Supplement" },
+ ["ethiopic"] = { first = 0x01200, last = 0x0137F, otf="ethi", description = "Ethiopic" },
+ ["ethiopicextended"] = { first = 0x02D80, last = 0x02DDF, otf="ethi", description = "Ethiopic Extended" },
+ ["ethiopicextendeda"] = { first = 0x0AB00, last = 0x0AB2F, description = "Ethiopic Extended-A" },
+ ["ethiopicsupplement"] = { first = 0x01380, last = 0x0139F, otf="ethi", description = "Ethiopic Supplement" },
+ ["generalpunctuation"] = { first = 0x02000, last = 0x0206F, description = "General Punctuation" },
+ ["geometricshapes"] = { first = 0x025A0, last = 0x025FF, description = "Geometric Shapes" },
+ ["georgian"] = { first = 0x010A0, last = 0x010FF, otf="geor", description = "Georgian" },
+ ["georgiansupplement"] = { first = 0x02D00, last = 0x02D2F, otf="geor", description = "Georgian Supplement" },
+ ["glagolitic"] = { first = 0x02C00, last = 0x02C5F, otf="glag", description = "Glagolitic" },
+ ["gothic"] = { first = 0x10330, last = 0x1034F, otf="goth", description = "Gothic" },
+ ["greekandcoptic"] = { first = 0x00370, last = 0x003FF, otf="grek", description = "Greek and Coptic" },
+ ["greekextended"] = { first = 0x01F00, last = 0x01FFF, otf="grek", description = "Greek Extended" },
+ ["gujarati"] = { first = 0x00A80, last = 0x00AFF, otf="gujr", description = "Gujarati" },
+ ["gurmukhi"] = { first = 0x00A00, last = 0x00A7F, otf="guru", description = "Gurmukhi" },
+ ["halfwidthandfullwidthforms"] = { first = 0x0FF00, last = 0x0FFEF, description = "Halfwidth and Fullwidth Forms" },
+ ["hangulcompatibilityjamo"] = { first = 0x03130, last = 0x0318F, otf="jamo", description = "Hangul Compatibility Jamo" },
+ ["hanguljamo"] = { first = 0x01100, last = 0x011FF, otf="jamo", description = "Hangul Jamo" },
+ ["hanguljamoextendeda"] = { first = 0x0A960, last = 0x0A97F, description = "Hangul Jamo Extended-A" },
+ ["hanguljamoextendedb"] = { first = 0x0D7B0, last = 0x0D7FF, description = "Hangul Jamo Extended-B" },
+ ["hangulsyllables"] = { first = 0x0AC00, last = 0x0D7AF, otf="hang", description = "Hangul Syllables" },
+ ["hanunoo"] = { first = 0x01720, last = 0x0173F, otf="hano", description = "Hanunoo" },
+ ["hebrew"] = { first = 0x00590, last = 0x005FF, otf="hebr", description = "Hebrew" },
+ ["highprivateusesurrogates"] = { first = 0x0DB80, last = 0x0DBFF, description = "High Private Use Surrogates" },
+ ["highsurrogates"] = { first = 0x0D800, last = 0x0DB7F, description = "High Surrogates" },
+ ["hiragana"] = { first = 0x03040, last = 0x0309F, otf="kana", description = "Hiragana" },
+ ["ideographicdescriptioncharacters"] = { first = 0x02FF0, last = 0x02FFF, description = "Ideographic Description Characters" },
+ ["imperialaramaic"] = { first = 0x10840, last = 0x1085F, description = "Imperial Aramaic" },
+ ["inscriptionalpahlavi"] = { first = 0x10B60, last = 0x10B7F, description = "Inscriptional Pahlavi" },
+ ["inscriptionalparthian"] = { first = 0x10B40, last = 0x10B5F, description = "Inscriptional Parthian" },
+ ["ipaextensions"] = { first = 0x00250, last = 0x002AF, description = "IPA Extensions" },
+ ["javanese"] = { first = 0x0A980, last = 0x0A9DF, description = "Javanese" },
+ ["kaithi"] = { first = 0x11080, last = 0x110CF, description = "Kaithi" },
+ ["kanasupplement"] = { first = 0x1B000, last = 0x1B0FF, description = "Kana Supplement" },
+ ["kanbun"] = { first = 0x03190, last = 0x0319F, description = "Kanbun" },
+ ["kangxiradicals"] = { first = 0x02F00, last = 0x02FDF, description = "Kangxi Radicals" },
+ ["kannada"] = { first = 0x00C80, last = 0x00CFF, otf="knda", description = "Kannada" },
+ ["katakana"] = { first = 0x030A0, last = 0x030FF, otf="kana", description = "Katakana" },
+ ["katakanaphoneticextensions"] = { first = 0x031F0, last = 0x031FF, otf="kana", description = "Katakana Phonetic Extensions" },
+ ["kayahli"] = { first = 0x0A900, last = 0x0A92F, description = "Kayah Li" },
+ ["kharoshthi"] = { first = 0x10A00, last = 0x10A5F, otf="khar", description = "Kharoshthi" },
+ ["khmer"] = { first = 0x01780, last = 0x017FF, otf="khmr", description = "Khmer" },
+ ["khmersymbols"] = { first = 0x019E0, last = 0x019FF, otf="khmr", description = "Khmer Symbols" },
+ ["lao"] = { first = 0x00E80, last = 0x00EFF, otf="lao", description = "Lao" },
+ ["latinextendeda"] = { first = 0x00100, last = 0x0017F, otf="latn", description = "Latin Extended-A" },
+ ["latinextendedadditional"] = { first = 0x01E00, last = 0x01EFF, otf="latn", description = "Latin Extended Additional" },
+ ["latinextendedb"] = { first = 0x00180, last = 0x0024F, otf="latn", description = "Latin Extended-B" },
+ ["latinextendedc"] = { first = 0x02C60, last = 0x02C7F, otf="latn", description = "Latin Extended-C" },
+ ["latinextendedd"] = { first = 0x0A720, last = 0x0A7FF, otf="latn", description = "Latin Extended-D" },
+ ["latinsupplement"] = { first = 0x00080, last = 0x000FF, otf="latn", description = "Latin-1 Supplement" },
+ ["lepcha"] = { first = 0x01C00, last = 0x01C4F, description = "Lepcha" },
+ ["letterlikesymbols"] = { first = 0x02100, last = 0x0214F, description = "Letterlike Symbols" },
+ ["limbu"] = { first = 0x01900, last = 0x0194F, otf="limb", description = "Limbu" },
+ ["linearbideograms"] = { first = 0x10080, last = 0x100FF, otf="linb", description = "Linear B Ideograms" },
+ ["linearbsyllabary"] = { first = 0x10000, last = 0x1007F, otf="linb", description = "Linear B Syllabary" },
+ ["lisu"] = { first = 0x0A4D0, last = 0x0A4FF, description = "Lisu" },
+ ["lowsurrogates"] = { first = 0x0DC00, last = 0x0DFFF, description = "Low Surrogates" },
+ ["lycian"] = { first = 0x10280, last = 0x1029F, description = "Lycian" },
+ ["lydian"] = { first = 0x10920, last = 0x1093F, description = "Lydian" },
+ ["mahjongtiles"] = { first = 0x1F000, last = 0x1F02F, description = "Mahjong Tiles" },
+ ["malayalam"] = { first = 0x00D00, last = 0x00D7F, otf="mlym", description = "Malayalam" },
+ ["mandiac"] = { first = 0x00840, last = 0x0085F, otf="mand", description = "Mandaic" },
+ ["mathematicalalphanumericsymbols"] = { first = 0x1D400, last = 0x1D7FF, description = "Mathematical Alphanumeric Symbols" },
+ ["mathematicaloperators"] = { first = 0x02200, last = 0x022FF, description = "Mathematical Operators" },
+ ["meeteimayek"] = { first = 0x0ABC0, last = 0x0ABFF, description = "Meetei Mayek" },
+ ["meeteimayekextensions"] = { first = 0x0AAE0, last = 0x0AAFF, description = "Meetei Mayek Extensions" },
+ ["meroiticcursive"] = { first = 0x109A0, last = 0x109FF, description = "Meroitic Cursive" },
+ ["meroitichieroglyphs"] = { first = 0x10980, last = 0x1099F, description = "Meroitic Hieroglyphs" },
+ ["miao"] = { first = 0x16F00, last = 0x16F9F, description = "Miao" },
+ ["miscellaneousmathematicalsymbolsa"] = { first = 0x027C0, last = 0x027EF, description = "Miscellaneous Mathematical Symbols-A" },
+ ["miscellaneousmathematicalsymbolsb"] = { first = 0x02980, last = 0x029FF, description = "Miscellaneous Mathematical Symbols-B" },
+ ["miscellaneoussymbols"] = { first = 0x02600, last = 0x026FF, description = "Miscellaneous Symbols" },
+ ["miscellaneoussymbolsandarrows"] = { first = 0x02B00, last = 0x02BFF, description = "Miscellaneous Symbols and Arrows" },
+ ["miscellaneoussymbolsandpictographs"] = { first = 0x1F300, last = 0x1F5FF, description = "Miscellaneous Symbols And Pictographs" },
+ ["miscellaneoustechnical"] = { first = 0x02300, last = 0x023FF, description = "Miscellaneous Technical" },
+ ["modifiertoneletters"] = { first = 0x0A700, last = 0x0A71F, description = "Modifier Tone Letters" },
+ ["mongolian"] = { first = 0x01800, last = 0x018AF, otf="mong", description = "Mongolian" },
+ ["musicalsymbols"] = { first = 0x1D100, last = 0x1D1FF, otf="musc", description = "Musical Symbols" },
+ ["myanmar"] = { first = 0x01000, last = 0x0109F, otf="mymr", description = "Myanmar" },
+ ["myanmarextendeda"] = { first = 0x0AA60, last = 0x0AA7F, description = "Myanmar Extended-A" },
+ ["newtailue"] = { first = 0x01980, last = 0x019DF, description = "New Tai Lue" },
+ ["nko"] = { first = 0x007C0, last = 0x007FF, otf="nko", description = "NKo" },
+ ["numberforms"] = { first = 0x02150, last = 0x0218F, description = "Number Forms" },
+ ["ogham"] = { first = 0x01680, last = 0x0169F, otf="ogam", description = "Ogham" },
+ ["olchiki"] = { first = 0x01C50, last = 0x01C7F, description = "Ol Chiki" },
+ ["olditalic"] = { first = 0x10300, last = 0x1032F, otf="ital", description = "Old Italic" },
+ ["oldpersian"] = { first = 0x103A0, last = 0x103DF, otf="xpeo", description = "Old Persian" },
+ ["oldsoutharabian"] = { first = 0x10A60, last = 0x10A7F, description = "Old South Arabian" },
+ ["odlturkic"] = { first = 0x10C00, last = 0x10C4F, description = "Old Turkic" },
+ ["opticalcharacterrecognition"] = { first = 0x02440, last = 0x0245F, description = "Optical Character Recognition" },
+ ["oriya"] = { first = 0x00B00, last = 0x00B7F, otf="orya", description = "Oriya" },
+ ["osmanya"] = { first = 0x10480, last = 0x104AF, otf="osma", description = "Osmanya" },
+ ["phagspa"] = { first = 0x0A840, last = 0x0A87F, otf="phag", description = "Phags-pa" },
+ ["phaistosdisc"] = { first = 0x101D0, last = 0x101FF, description = "Phaistos Disc" },
+ ["phoenician"] = { first = 0x10900, last = 0x1091F, otf="phnx", description = "Phoenician" },
+ ["phoneticextensions"] = { first = 0x01D00, last = 0x01D7F, description = "Phonetic Extensions" },
+ ["phoneticextensionssupplement"] = { first = 0x01D80, last = 0x01DBF, description = "Phonetic Extensions Supplement" },
+ ["playingcards"] = { first = 0x1F0A0, last = 0x1F0FF, description = "Playing Cards" },
+ ["privateusearea"] = { first = 0x0E000, last = 0x0F8FF, description = "Private Use Area" },
+ ["rejang"] = { first = 0x0A930, last = 0x0A95F, description = "Rejang" },
+ ["ruminumeralsymbols"] = { first = 0x10E60, last = 0x10E7F, description = "Rumi Numeral Symbols" },
+ ["runic"] = { first = 0x016A0, last = 0x016FF, otf="runr", description = "Runic" },
+ ["samaritan"] = { first = 0x00800, last = 0x0083F, description = "Samaritan" },
+ ["saurashtra"] = { first = 0x0A880, last = 0x0A8DF, description = "Saurashtra" },
+ ["sharada"] = { first = 0x11180, last = 0x111DF, description = "Sharada" },
+ ["shavian"] = { first = 0x10450, last = 0x1047F, otf="shaw", description = "Shavian" },
+ ["sinhala"] = { first = 0x00D80, last = 0x00DFF, otf="sinh", description = "Sinhala" },
+ ["smallformvariants"] = { first = 0x0FE50, last = 0x0FE6F, description = "Small Form Variants" },
+ ["sorasompeng"] = { first = 0x110D0, last = 0x110FF, description = "Sora Sompeng" },
+ ["spacingmodifierletters"] = { first = 0x002B0, last = 0x002FF, description = "Spacing Modifier Letters" },
+ ["specials"] = { first = 0x0FFF0, last = 0x0FFFF, description = "Specials" },
+ ["sundanese"] = { first = 0x01B80, last = 0x01BBF, description = "Sundanese" },
+ ["sundanesesupplement"] = { first = 0x01CC0, last = 0x01CCF, description = "Sundanese Supplement" },
+ ["superscriptsandsubscripts"] = { first = 0x02070, last = 0x0209F, description = "Superscripts and Subscripts" },
+ ["supplementalarrowsa"] = { first = 0x027F0, last = 0x027FF, description = "Supplemental Arrows-A" },
+ ["supplementalarrowsb"] = { first = 0x02900, last = 0x0297F, description = "Supplemental Arrows-B" },
+ ["supplementalmathematicaloperators"] = { first = 0x02A00, last = 0x02AFF, description = "Supplemental Mathematical Operators" },
+ ["supplementalpunctuation"] = { first = 0x02E00, last = 0x02E7F, description = "Supplemental Punctuation" },
+ ["supplementaryprivateuseareaa"] = { first = 0xF0000, last = 0xFFFFF, description = "Supplementary Private Use Area-A" },
+ ["supplementaryprivateuseareab"] = { first = 0x100000,last = 0x10FFFF, description = "Supplementary Private Use Area-B" },
+ ["sylotinagri"] = { first = 0x0A800, last = 0x0A82F, otf="sylo", description = "Syloti Nagri" },
+ ["syriac"] = { first = 0x00700, last = 0x0074F, otf="syrc", description = "Syriac" },
+ ["tagalog"] = { first = 0x01700, last = 0x0171F, otf="tglg", description = "Tagalog" },
+ ["tagbanwa"] = { first = 0x01760, last = 0x0177F, otf="tagb", description = "Tagbanwa" },
+ ["tags"] = { first = 0xE0000, last = 0xE007F, description = "Tags" },
+ ["taile"] = { first = 0x01950, last = 0x0197F, otf="tale", description = "Tai Le" },
+ ["taitham"] = { first = 0x01A20, last = 0x01AAF, description = "Tai Tham" },
+ ["taiviet"] = { first = 0x0AA80, last = 0x0AADF, description = "Tai Viet" },
+ ["taixuanjingsymbols"] = { first = 0x1D300, last = 0x1D35F, description = "Tai Xuan Jing Symbols" },
+ ["takri"] = { first = 0x11680, last = 0x116CF, description = "Takri" },
+ ["tamil"] = { first = 0x00B80, last = 0x00BFF, otf="taml", description = "Tamil" },
+ ["telugu"] = { first = 0x00C00, last = 0x00C7F, otf="telu", description = "Telugu" },
+ ["thaana"] = { first = 0x00780, last = 0x007BF, otf="thaa", description = "Thaana" },
+ ["thai"] = { first = 0x00E00, last = 0x00E7F, otf="thai", description = "Thai" },
+ ["tibetan"] = { first = 0x00F00, last = 0x00FFF, otf="tibt", description = "Tibetan" },
+ ["tifinagh"] = { first = 0x02D30, last = 0x02D7F, otf="tfng", description = "Tifinagh" },
+ ["transportandmapsymbols"] = { first = 0x1F680, last = 0x1F6FF, description = "Transport And Map Symbols" },
+ ["ugaritic"] = { first = 0x10380, last = 0x1039F, otf="ugar", description = "Ugaritic" },
+ ["unifiedcanadianaboriginalsyllabics"] = { first = 0x01400, last = 0x0167F, otf="cans", description = "Unified Canadian Aboriginal Syllabics" },
+ ["unifiedcanadianaboriginalsyllabicsextended"] = { first = 0x018B0, last = 0x018FF, description = "Unified Canadian Aboriginal Syllabics Extended" },
+ ["vai"] = { first = 0x0A500, last = 0x0A63F, description = "Vai" },
+ ["variationselectors"] = { first = 0x0FE00, last = 0x0FE0F, description = "Variation Selectors" },
+ ["variationselectorssupplement"] = { first = 0xE0100, last = 0xE01EF, description = "Variation Selectors Supplement" },
+ ["vedicextensions"] = { first = 0x01CD0, last = 0x01CFF, description = "Vedic Extensions" },
+ ["verticalforms"] = { first = 0x0FE10, last = 0x0FE1F, description = "Vertical Forms" },
+ ["yijinghexagramsymbols"] = { first = 0x04DC0, last = 0x04DFF, otf="yi", description = "Yijing Hexagram Symbols" },
+ ["yiradicals"] = { first = 0x0A490, last = 0x0A4CF, otf="yi", description = "Yi Radicals" },
+ ["yisyllables"] = { first = 0x0A000, last = 0x0A48F, otf="yi", description = "Yi Syllables" },
}
characters.blocks = blocks
-setmetatableindex(blocks, function(t,k)
+function characters.blockrange(name)
+ local b = blocks[name]
+ if b then
+ return b.first, b.last
+ else
+ return 0, 0
+ end
+end
+
+setmetatableindex(blocks, function(t,k) -- we could use an intermediate table if called often
return k and rawget(t,lower(gsub(k,"[^a-zA-Z]","")))
end)
+local otfscripts = utilities.storage.allocate()
+characters.otfscripts = otfscripts
+
+setmetatableindex(otfscripts,function(t,unicode)
+ for k, v in next, blocks do
+ local first, last = v.first, v.last
+ if unicode >= first and unicode <= last then
+ local script = v.otf or "dflt"
+ for u=first,last do
+ t[u] = script
+ end
+ return script
+ end
+ end
+ -- pretty slow when we're here
+ t[unicode] = "dflt"
+ return "dflt"
+end)
+
function characters.getrange(name) -- used in font fallback definitions (name or range)
local range = blocks[name]
if range then
@@ -395,6 +459,8 @@ local is_mark = allocate ( tohash {
"mn", "ms",
} )
+-- to be redone: store checked characters
+
characters.is_character = is_character
characters.is_letter = is_letter
characters.is_command = is_command
@@ -404,7 +470,7 @@ characters.is_mark = is_mark
local mt = { -- yes or no ?
__index = function(t,k)
if type(k) == "number" then
- local c = characters.data[k].category
+ local c = data[k].category
return c and rawget(t,c)
else
-- avoid auto conversion in data.characters lookups
@@ -455,213 +521,47 @@ table we derive a few more.</p>
if not characters.fallbacks then
- -- we could the definition by using a metatable
+ characters.fallbacks = { } -- not than many
- characters.fallbacks = { }
- characters.directions = { }
+ local fallbacks = characters.fallbacks
- local fallbacks = characters.fallbacks
- local directions = characters.directions
-
- for k,v in next, data do
- local specials = v.specials
- if specials and specials[1] == "compat" and specials[2] == 0x0020 and specials[3] then
+ for k, d in next, data do
+ local specials = d.specials
+ if specials and specials[1] == "compat" and specials[2] == 0x0020 then
local s = specials[3]
- fallbacks[k] = s
- fallbacks[s] = k
- end
- directions[k] = v.direction
- end
-
-end
-
-storage.register("characters/fallbacks", characters.fallbacks, "characters.fallbacks") -- accents and such
-storage.register("characters/directions", characters.directions, "characters.directions")
-
---[[ldx--
-<p>The <type>context</type> namespace is used to store methods and data
-which is rather specific to <l n='context'/>.</p>
---ldx]]--
-
---[[ldx--
-<p>Instead of using a <l n='tex'/> file to define the named glyphs, we
-use the table. After all, we have this information available anyway.</p>
---ldx]]--
-
-function characters.makeactive(n,name) -- let ?
- contextsprint(ctxcatcodes,format("\\catcode%s=13\\unexpanded\\def %s{\\%s}",n,utfchar(n),name))
- -- context("\\catcode%s=13\\unexpanded\\def %s{\\%s}",n,utfchar(n),name)
-end
-
-function tex.uprint(c,n)
- if n then
- -- contextsprint(c,charfromnumber(n))
- contextsprint(c,utfchar(n))
- else
- -- contextsprint(charfromnumber(c))
- contextsprint(utfchar(c))
- end
-end
-
-local temphack = tohash {
- 0x00A0,
- 0x2000, 0x2001, 0x2002, 0x2003, 0x2004, 0x2005, 0x2006, 0x2007, 0x2008, 0x2009, 0x200A, 0x200B, 0x200C, 0x200D,
- 0x202F,
- 0x205F,
- -- 0xFEFF,
-}
-
-function characters.define(tobelettered, tobeactivated) -- catcodetables
-
- if trace_defining then
- report_defining("defining active character commands")
- end
-
- local activated, a = { }, 0
-
- for u, chr in next, data do -- these will be commands
- local fallback = chr.fallback
- if fallback then
- contextsprint("{\\catcode",u,"=13\\unexpanded\\gdef ",utfchar(u),"{\\checkedchar{",u,"}{",fallback,"}}}")
- a = a + 1
- activated[a] = u
- else
- local contextname = chr.contextname
- if contextname then
- local category = chr.category
- if is_character[category] then
- if chr.unicodeslot < 128 then
- if is_letter[category] then
- contextsprint(ctxcatcodes,format("\\def\\%s{%s}",contextname,utfchar(u))) -- has no s
- else
- contextsprint(ctxcatcodes,format("\\chardef\\%s=%s",contextname,u)) -- has no s
- end
- else
- contextsprint(ctxcatcodes,format("\\def\\%s{%s}",contextname,utfchar(u))) -- has no s
- end
- elseif is_command[category] then
-if not temphack[u] then
- contextsprint("{\\catcode",u,"=13\\unexpanded\\gdef ",utfchar(u),"{\\"..contextname,"}}")
- a = a + 1
- activated[a] = u
-end
- end
+ if s then
+ fallbacks[k] = s
+ fallbacks[s] = k
end
end
end
- if tobelettered then -- shared
- local saved = tex.catcodetable
- for i=1,#tobelettered do
- tex.catcodetable = tobelettered[i]
- if trace_defining then
- report_defining("defining letters (global, shared)")
- end
- for u, chr in next, data do
- if not chr.fallback and is_letter[chr.category] and u >= 128 and u <= 65536 then
- texsetcatcode(u,11)
- end
- local range = chr.range
- if range then
- for i=1,range.first,range.last do
- texsetcatcode(i,11)
- end
- end
- end
- texsetcatcode(0x200C,11) -- non-joiner
- texsetcatcode(0x200D,11) -- joiner
- end
- tex.catcodetable = saved
- end
-
- local nofactivated = #tobeactivated
- if tobeactivated and nofactivated > 0 then
- for i=1,nofactivated do
- local u = activated[i]
- if u then
- report_defining("character 0x%05X is active in sets %s (%s)",u,concat(tobeactivated,","),data[u].description)
- end
- end
- local saved = tex.catcodetable
- for i=1,#tobeactivated do
- local vector = tobeactivated[i]
- if trace_defining then
- report_defining("defining %s active characters in vector %s",nofactivated,vector)
- end
- tex.catcodetable = vector
- for i=1,nofactivated do
- local u = activated[i]
- if u then
- texsetcatcode(u,13)
- end
- end
- end
- tex.catcodetable = saved
- end
-
end
---[[ldx--
-<p>Setting the lccodes is also done in a loop over the data table.</p>
---ldx]]--
-
---~ function tex.setsfcode (index,sf) ... end
---~ function tex.setlccode (index,lc,[uc]) ... end -- optional third value, safes call
---~ function tex.setuccode (index,uc,[lc]) ... end
---~ function tex.setcatcode(index,cc) ... end
-
--- we need a function ...
+if storage then
+ storage.register("characters/fallbacks", characters.fallbacks, "characters.fallbacks") -- accents and such
+end
---~ tex.lccode
---~ tex.uccode
---~ tex.sfcode
---~ tex.catcode
+characters.directions = { }
-function characters.setcodes()
- if trace_defining then
- report_defining("defining lc and uc codes")
- end
- for code, chr in next, data do
- local cc = chr.category
- if is_letter[cc] then
- local range = chr.range
- if range then
- for i=range.first,range.last do
- texsetcatcode(i,11) -- letter
- texsetlccode(i,i,i) -- self self
- end
- else
- local lc, uc = chr.lccode, chr.uccode
- if not lc then chr.lccode, lc = code, code end
- if not uc then chr.uccode, uc = code, code end
- texsetcatcode(code,11) -- letter
- if type(lc) == "table" then
- lc = code
- end
- if type(uc) == "table" then
- uc = code
- end
- texsetlccode(code,lc,uc)
- if cc == "lu" then
- texsetsfcode(code,999)
- end
- end
- elseif is_mark[cc] then
- texsetlccode(code,code,code) -- for hyphenation
+setmetatableindex(characters.directions,function(t,k)
+ local d = data[k]
+ if d then
+ local v = d.direction
+ if v then
+ t[k] = v
+ return v
end
end
-end
+ t[k] = false -- maybe 'l'
+ return v
+end)
--[[ldx--
<p>Next comes a whole series of helper methods. These are (will be) part
of the official <l n='api'/>.</p>
--ldx]]--
---[[ldx--
-<p>A couple of convenience methods. Beware, these are slower than directly
-accessing the data table.</p>
---ldx]]--
-
-- we could make them virtual: characters.contextnames[n]
function characters.contextname(n) return data[n].contextname or "" end
@@ -680,35 +580,17 @@ function characters.category(n,verbose)
end
end
--- xml support (moved)
-
-function characters.remapentity(chr,slot)
- contextsprint(format("{\\catcode%s=13\\xdef%s{\\string%s}}",slot,utfchar(slot),chr))
-end
-
-characters.activeoffset = 0x10000 -- there will be remapped in that byte range
-
--- xml.entities = xml.entities or { }
+-- -- some day we will make a table .. not that many calls to utfchar
--
--- storage.register("xml/entities",xml.entities,"xml.entities") -- this will move to lxml
+-- local utfchar = utf.char
+-- local utfbyte = utf.byte
+-- local utfbytes = { }
+-- local utfchars = { }
--
--- function characters.setmkiventities()
--- local entities = xml.entities
--- entities.lt = "<"
--- entities.amp = "&"
--- entities.gt = ">"
--- end
---
--- function characters.setmkiientities()
--- local entities = xml.entities
--- entities.lt = utfchar(characters.activeoffset + utfbyte("<"))
--- entities.amp = utfchar(characters.activeoffset + utfbyte("&"))
--- entities.gt = utfchar(characters.activeoffset + utfbyte(">"))
--- end
-
--- some day we will make a table
+-- table.setmetatableindex(utfbytes,function(t,k) local v = utfchar(k) t[k] = v return v end)
+-- table.setmetatableindex(utfchars,function(t,k) local v = utfbyte(k) t[k] = v return v end)
-local function utfstring(s)
+local function toutfstring(s)
if type(s) == "table" then
return utfchar(unpack(s)) -- concat { utfchar( unpack(s) ) }
else
@@ -716,7 +598,7 @@ local function utfstring(s)
end
end
-utf.string = utf.string or utfstring
+utf.tostring = toutfstring
local categories = allocate() characters.categories = categories -- lazy table
@@ -737,10 +619,10 @@ local ucchars = allocate() characters.ucchars = ucchars -- lazy table
local shchars = allocate() characters.shchars = shchars -- lazy table
local fschars = allocate() characters.fschars = fschars -- lazy table
-setmetatableindex(lcchars, function(t,u) if u then local c = data[u] c = c and c.lccode c = c and utfstring(c) or (type(u) == "number" and utfchar(u)) or u t[u] = c return c end end)
-setmetatableindex(ucchars, function(t,u) if u then local c = data[u] c = c and c.uccode c = c and utfstring(c) or (type(u) == "number" and utfchar(u)) or u t[u] = c return c end end)
-setmetatableindex(shchars, function(t,u) if u then local c = data[u] c = c and c.shcode c = c and utfstring(c) or (type(u) == "number" and utfchar(u)) or u t[u] = c return c end end)
-setmetatableindex(fschars, function(t,u) if u then local c = data[u] c = c and c.fscode c = c and utfstring(c) or (type(u) == "number" and utfchar(u)) or u t[u] = c return c end end)
+setmetatableindex(lcchars, function(t,u) if u then local c = data[u] c = c and c.lccode c = c and toutfstring(c) or (type(u) == "number" and utfchar(u)) or u t[u] = c return c end end)
+setmetatableindex(ucchars, function(t,u) if u then local c = data[u] c = c and c.uccode c = c and toutfstring(c) or (type(u) == "number" and utfchar(u)) or u t[u] = c return c end end)
+setmetatableindex(shchars, function(t,u) if u then local c = data[u] c = c and c.shcode c = c and toutfstring(c) or (type(u) == "number" and utfchar(u)) or u t[u] = c return c end end)
+setmetatableindex(fschars, function(t,u) if u then local c = data[u] c = c and c.fscode c = c and toutfstring(c) or (type(u) == "number" and utfchar(u)) or u t[u] = c return c end end)
local decomposed = allocate() characters.decomposed = decomposed -- lazy table
local specials = allocate() characters.specials = specials -- lazy table
@@ -815,37 +697,52 @@ function characters.unicodechar(asked)
if n then
return n
elseif type(asked) == "string" then
- asked = gsub(asked," ","")
- return descriptions[asked]
+ return descriptions[asked] or descriptions[gsub(asked," ","")]
end
end
-function characters.lower(str)
- local new, n = { }, 0
- for u in utfvalues(str) do
- n = n + 1
- new[n] = lcchars[u]
- end
- return concat(new)
-end
+-- function characters.lower(str)
+-- local new, n = { }, 0
+-- for u in utfvalues(str) do
+-- n = n + 1
+-- new[n] = lcchars[u]
+-- end
+-- return concat(new)
+-- end
+--
+-- function characters.upper(str)
+-- local new, n = { }, 0
+-- for u in utfvalues(str) do
+-- n = n + 1
+-- new[n] = ucchars[u]
+-- end
+-- return concat(new)
+-- end
+--
+-- function characters.shaped(str)
+-- local new, n = { }, 0
+-- for u in utfvalues(str) do
+-- n = n + 1
+-- new[n] = shchars[u]
+-- end
+-- return concat(new)
+-- end
-function characters.upper(str)
- local new, n = { }, 0
- for u in utfvalues(str) do
- n = n + 1
- new[n] = ucchars[u]
- end
- return concat(new)
-end
+----- tolower = Cs((utf8byte/lcchars)^0)
+----- toupper = Cs((utf8byte/ucchars)^0)
+----- toshape = Cs((utf8byte/shchars)^0)
-function characters.shaped(str)
- local new, n = { }, 0
- for u in utfvalues(str) do
- n = n + 1
- new[n] = shchars[u]
- end
- return concat(new)
-end
+local tolower = Cs((utf8char/lcchars)^0)
+local toupper = Cs((utf8char/ucchars)^0)
+local toshape = Cs((utf8char/shchars)^0)
+
+patterns.tolower = tolower
+patterns.toupper = toupper
+patterns.toshape = toshape
+
+function characters.lower (str) return lpegmatch(tolower,str) end
+function characters.upper (str) return lpegmatch(toupper,str) end
+function characters.shaped(str) return lpegmatch(toshape,str) end
function characters.lettered(str,spacing)
local new, n = { }, 0
@@ -875,6 +772,7 @@ function characters.lettered(str,spacing)
end
return concat(new)
end
+
--[[ldx--
<p>Requesting lower and uppercase codes:</p>
--ldx]]--
@@ -882,17 +780,12 @@ end
function characters.uccode(n) return uccodes[n] end -- obsolete
function characters.lccode(n) return lccodes[n] end -- obsolete
-function characters.flush(n,direct)
+function characters.safechar(n)
local c = data[n]
if c and c.contextname then
- c = "\\" .. c.contextname
- else
- c = utfchar(n)
- end
- if direct then
- return c
+ return "\\" .. c.contextname
else
- contextsprint(c)
+ return utfchar(n)
end
end
@@ -996,13 +889,13 @@ if not characters.superscripts then
if #specials == 2 then
superscripts[k] = specials[2]
else
- report_defining("ignoring superscript %s %s: %s",ustring(k),utfchar(k),v.description)
+ report_defining("ignoring %s %a, char %c, description %a","superscript",ustring(k),k,v.description)
end
elseif what == "sub" then
if #specials == 2 then
subscripts[k] = specials[2]
else
- report_defining("ignoring subscript %s %s: %s",ustring(k),utfchar(k),v.description)
+ report_defining("ignoring %s %a, char %c, description %a","subscript",ustring(k),k,v.description)
end
end
end
@@ -1011,11 +904,255 @@ if not characters.superscripts then
-- print(table.serialize(superscripts, "superscripts", { hexify = true }))
-- print(table.serialize(subscripts, "subscripts", { hexify = true }))
- storage.register("characters/superscripts", superscripts, "characters.superscripts")
- storage.register("characters/subscripts", subscripts, "characters.subscripts")
+ if storage then
+ storage.register("characters/superscripts", superscripts, "characters.superscripts")
+ storage.register("characters/subscripts", subscripts, "characters.subscripts")
+ end
+
+end
+
+-- for the moment only a few
+
+local tracedchars = utilities.strings.tracers
+
+tracedchars[0x00] = "[signal]"
+tracedchars[0x20] = "[space]"
+
+-- the following code will move to char-tex.lua
+
+-- tex
+
+if not tex or not context or not commands then return characters end
+
+local tex = tex
+local texsetlccode = tex.setlccode
+local texsetuccode = tex.setuccode
+local texsetsfcode = tex.setsfcode
+local texsetcatcode = tex.setcatcode
+
+local contextsprint = context.sprint
+local ctxcatcodes = catcodes.numbers.ctxcatcodes
+
+--[[ldx--
+<p>Instead of using a <l n='tex'/> file to define the named glyphs, we
+use the table. After all, we have this information available anyway.</p>
+--ldx]]--
+
+function commands.makeactive(n,name) --
+ contextsprint(ctxcatcodes,format("\\catcode%s=13\\unexpanded\\def %s{\\%s}",n,utfchar(n),name))
+ -- context("\\catcode%s=13\\unexpanded\\def %s{\\%s}",n,utfchar(n),name)
+end
+
+function commands.utfchar(c,n)
+ if n then
+ -- contextsprint(c,charfromnumber(n))
+ contextsprint(c,utfchar(n))
+ else
+ -- contextsprint(charfromnumber(c))
+ contextsprint(utfchar(c))
+ end
+end
+function commands.safechar(n)
+ local c = data[n]
+ if c and c.contextname then
+ contextsprint("\\" .. c.contextname) -- context[c.contextname]()
+ else
+ contextsprint(utfchar(n))
+ end
end
--- interface
+tex.uprint = commands.utfchar
+
+local forbidden = tohash { -- at least now
+ 0x00A0,
+ 0x2000, 0x2001, 0x2002, 0x2003, 0x2004, 0x2005, 0x2006, 0x2007, 0x2008, 0x2009, 0x200A, 0x200B, 0x200C, 0x200D,
+ 0x202F,
+ 0x205F,
+ -- 0xFEFF,
+}
+
+function characters.define(tobelettered, tobeactivated) -- catcodetables
+
+ if trace_defining then
+ report_defining("defining active character commands")
+ end
+
+ local activated, a = { }, 0
+
+ for u, chr in next, data do -- these will be commands
+ local fallback = chr.fallback
+ if fallback then
+ contextsprint("{\\catcode",u,"=13\\unexpanded\\gdef ",utfchar(u),"{\\checkedchar{",u,"}{",fallback,"}}}")
+ a = a + 1
+ activated[a] = u
+ else
+ local contextname = chr.contextname
+ if contextname then
+ local category = chr.category
+ if is_character[category] then
+ if chr.unicodeslot < 128 then
+ if is_letter[category] then
+ contextsprint(ctxcatcodes,format("\\def\\%s{%s}",contextname,utfchar(u))) -- has no s
+ else
+ contextsprint(ctxcatcodes,format("\\chardef\\%s=%s",contextname,u)) -- has no s
+ end
+ else
+ contextsprint(ctxcatcodes,format("\\def\\%s{%s}",contextname,utfchar(u))) -- has no s
+ end
+ elseif is_command[category] and not forbidden[u] then
+ contextsprint("{\\catcode",u,"=13\\unexpanded\\gdef ",utfchar(u),"{\\"..contextname,"}}")
+ a = a + 1
+ activated[a] = u
+ end
+ end
+ end
+ end
+
+ if tobelettered then -- shared
+ local saved = tex.catcodetable
+ for i=1,#tobelettered do
+ tex.catcodetable = tobelettered[i]
+ if trace_defining then
+ report_defining("defining letters (global, shared)")
+ end
+ for u, chr in next, data do
+ if not chr.fallback and is_letter[chr.category] and u >= 128 and u <= 65536 then
+ texsetcatcode(u,11)
+ end
+ local range = chr.range
+ if range then
+ for i=1,range.first,range.last do
+ texsetcatcode(i,11)
+ end
+ end
+ end
+ texsetcatcode(0x200C,11) -- non-joiner
+ texsetcatcode(0x200D,11) -- joiner
+ end
+ tex.catcodetable = saved
+ end
+
+ local nofactivated = #tobeactivated
+ if tobeactivated and nofactivated > 0 then
+ for i=1,nofactivated do
+ local u = activated[i]
+ if u then
+ report_defining("character %U is active in set %a, containing %a",u,data[u].description,tobeactivated)
+ end
+ end
+ local saved = tex.catcodetable
+ for i=1,#tobeactivated do
+ local vector = tobeactivated[i]
+ if trace_defining then
+ report_defining("defining %a active characters in vector %a",nofactivated,vector)
+ end
+ tex.catcodetable = vector
+ for i=1,nofactivated do
+ local u = activated[i]
+ if u then
+ texsetcatcode(u,13)
+ end
+ end
+ end
+ tex.catcodetable = saved
+ end
+
+end
+
+--[[ldx--
+<p>Setting the lccodes is also done in a loop over the data table.</p>
+--ldx]]--
+
+local sfmode = "unset" -- unset, traditional, normal
+
+function characters.setcodes()
+ if trace_defining then
+ report_defining("defining lc and uc codes")
+ end
+ local traditional = sfstate == "traditional" or sfstate == "unset"
+ for code, chr in next, data do
+ local cc = chr.category
+ if is_letter[cc] then
+ local range = chr.range
+ if range then
+ for i=range.first,range.last do
+ texsetcatcode(i,11) -- letter
+ texsetlccode(i,i,i) -- self self
+ end
+ else
+ local lc, uc = chr.lccode, chr.uccode
+ if not lc then
+ chr.lccode, lc = code, code
+ elseif type(lc) == "table" then
+ lc = code
+ end
+ if not uc then
+ chr.uccode, uc = code, code
+ elseif type(uc) == "table" then
+ uc = code
+ end
+ texsetcatcode(code,11) -- letter
+ texsetlccode(code,lc,uc)
+ if traditional and cc == "lu" then
+ texsetsfcode(code,999)
+ end
+ end
+ elseif is_mark[cc] then
+ texsetlccode(code,code,code) -- for hyphenation
+ end
+ end
+ if traditional then
+ sfstate = "traditional"
+ end
+end
+
+-- If this is something that is not documentwide and used a lot, then we
+-- need a more clever approach (trivial but not now).
+
+local function setuppersfcodes(v,n)
+ if sfstate ~= "unset" then
+ report_defining("setting uppercase sf codes to %a",n)
+ for code, chr in next, data do
+ if chr.category == "lu" then
+ texsetsfcode(code,n)
+ end
+ end
+ end
+ sfstate = v
+end
+
+directives.register("characters.spaceafteruppercase",function(v)
+ if v == "traditional" then
+ setuppersfcodes(v,999)
+ elseif v == "normal" then
+ setuppersfcodes(v,1000)
+ end
+end)
+
+-- xml
+
+characters.activeoffset = 0x10000 -- there will be remapped in that byte range
+
+function commands.remapentity(chr,slot)
+ contextsprint(format("{\\catcode%s=13\\xdef%s{\\string%s}}",slot,utfchar(slot),chr))
+end
+
+-- xml.entities = xml.entities or { }
+--
+-- storage.register("xml/entities",xml.entities,"xml.entities") -- this will move to lxml
+--
+-- function characters.setmkiventities()
+-- local entities = xml.entities
+-- entities.lt = "<"
+-- entities.amp = "&"
+-- entities.gt = ">"
+-- end
+--
+-- function characters.setmkiientities()
+-- local entities = xml.entities
+-- entities.lt = utfchar(characters.activeoffset + utfbyte("<"))
+-- entities.amp = utfchar(characters.activeoffset + utfbyte("&"))
+-- entities.gt = utfchar(characters.activeoffset + utfbyte(">"))
+-- end
-commands.utfchar = tex.uprint
diff --git a/Master/texmf-dist/tex/context/base/char-ini.mkiv b/Master/texmf-dist/tex/context/base/char-ini.mkiv
index d1d7574c954..0e2b773f09f 100644
--- a/Master/texmf-dist/tex/context/base/char-ini.mkiv
+++ b/Master/texmf-dist/tex/context/base/char-ini.mkiv
@@ -39,8 +39,12 @@
% {\relax\iffontchar\font#1 \expandafter\firstoftwoarguments\else\expandafter\secondoftwoarguments\fi{\char#1}}
%
% impossible in math mode so there always fallback (till we have gyre):
+%
+% use \normalUchar when possible .. the next one is nice for documents and it also accepts
+% 0x prefixed numbers
+
+\def\utfchar#1{\ctxcommand{utfchar(\number#1)}}
-\def\utfchar #1{\ctxcommand{utfchar(\number#1)}}
\def\checkedchar {\relax\ifmmode\expandafter\checkedmathchar\else\expandafter\checkedtextchar\fi} % #1#2
\def\checkedmathchar#1#2{#2}
\def\checkedtextchar #1{\iffontchar\font#1 \expandafter\firstoftwoarguments\else\expandafter\secondoftwoarguments\fi{\char#1}}
diff --git a/Master/texmf-dist/tex/context/base/char-map.lua b/Master/texmf-dist/tex/context/base/char-map.lua
index 376ebf343b9..749da5289bd 100644
--- a/Master/texmf-dist/tex/context/base/char-map.lua
+++ b/Master/texmf-dist/tex/context/base/char-map.lua
@@ -3,7 +3,8 @@ if not modules then modules = { } end modules ['char-map'] = {
comment = "companion to char-ini.mkiv",
author = "Hans Hagen & Arthur Reutenauer",
copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
+ license = "see context related readme files",
+ dataonly = true,
}
-- not yet used
diff --git a/Master/texmf-dist/tex/context/base/char-tex.lua b/Master/texmf-dist/tex/context/base/char-tex.lua
index e6d6c41e0fa..91aa387b959 100644
--- a/Master/texmf-dist/tex/context/base/char-tex.lua
+++ b/Master/texmf-dist/tex/context/base/char-tex.lua
@@ -189,14 +189,13 @@ local convert_accents_strip = Cs((no_l * accents * no_r + accents + P(1))^0)
local convert_commands_strip = Cs((no_l * commands * no_r + commands + P(1))^0)
function characters.tex.toutf(str,strip)
- if find(str,"\\") then -- we can start at the found position
- if strip then
- return lpegmatch(convert_accents_strip,lpegmatch(convert_commands_strip,str))
- else
- return lpegmatch(convert_accents, lpegmatch(convert_commands, str))
- end
+ if not find(str,"\\") then -- we can start at the found position
+ return str
+ elseif strip then
+ return lpegmatch(convert_accents_strip,lpegmatch(convert_commands_strip,str))
+ else
+ return lpegmatch(convert_accents, lpegmatch(convert_commands, str))
end
- return str
end
--~ print(characters.tex.toutf([[\"{e}]]),true)
diff --git a/Master/texmf-dist/tex/context/base/char-utf.lua b/Master/texmf-dist/tex/context/base/char-utf.lua
index a411c2d82a6..d0e40e664a6 100644
--- a/Master/texmf-dist/tex/context/base/char-utf.lua
+++ b/Master/texmf-dist/tex/context/base/char-utf.lua
@@ -19,10 +19,10 @@ in special kinds of output (for instance <l n='pdf'/>).</p>
over a string.</p>
--ldx]]--
-local utfchar, utfbyte, utfgsub = utf.char, utf.byte, utf.gsub
local concat, gmatch, gsub, find = table.concat, string.gmatch, string.gsub, string.find
-local utfcharacters, utfvalues = string.utfcharacters, string.utfvalues
+local utfchar, utfbyte, utfcharacters, utfvalues = utf.char, utf.byte, utf.characters, utf.values
local allocate = utilities.storage.allocate
+local lpegmatch, lpegpatterns = lpeg.match, lpeg.patterns
local charfromnumber = characters.fromnumber
@@ -76,6 +76,7 @@ local decomposed = allocate {
["ﬖ"] = "վն",
["ﬗ"] = "մխ",
}
+
characters.decomposed = decomposed
local function initialize() -- maybe only 'mn'
@@ -193,6 +194,20 @@ local private = {
utffilters.private = private
+local tohigh = lpeg.replacer(low) -- frozen, only for basic tex
+local tolow = lpeg.replacer(high) -- frozen, only for basic tex
+
+lpegpatterns.utftohigh = tohigh
+lpegpatterns.utftolow = tolow
+
+function utffilters.harden(str)
+ return lpegmatch(tohigh,str)
+end
+
+function utffilters.soften(str)
+ return lpegmatch(tolow,str)
+end
+
local function set(ch)
local cb
if type(ch) == "number" then
@@ -201,17 +216,24 @@ local function set(ch)
cb = utfbyte(ch)
end
if cb < 256 then
+ escapes[ch] = "\\" .. ch
low[ch] = utfchar(0x0F0000 + cb)
+ if ch == "%" then
+ ch = "%%" -- nasty, but we need this as in replacements (also in lpeg) % is interpreted
+ end
high[utfchar(0x0F0000 + cb)] = ch
- escapes[ch] = "\\" .. ch
end
end
private.set = set
-function private.escape (str) return gsub(str,"(.)", escapes) end
-function private.replace(str) return utfgsub(str,"(.)", low ) end
-function private.revert (str) return utfgsub(str,"(.)", high ) end
+-- function private.escape (str) return gsub(str,"(.)", escapes) end
+-- function private.replace(str) return utfgsub(str,"(.)", low ) end
+-- function private.revert (str) return utfgsub(str,"(.)", high ) end
+
+private.escape = utf.remapper(escapes)
+private.replace = utf.remapper(low)
+private.revert = utf.remapper(high)
for ch in gmatch(special,".") do set(ch) end
@@ -236,111 +258,119 @@ not collecting tokens is not only faster but also saves garbage collecting.
-- I might use the combined loop at some point for the filter
-- some day.
---~ function utffilters.collapse(str) -- not really tested (we could preallocate a table)
---~ if str and str ~= "" then
---~ local nstr = #str
---~ if nstr > 1 then
---~ if initialize then -- saves a call
---~ initialize()
---~ end
---~ local tokens, t, first, done, n = { }, 0, false, false, 0
---~ for second in utfcharacters(str) do
---~ local dec = decomposed[second]
---~ if dec then
---~ if not done then
---~ if n > 0 then
---~ for s in utfcharacters(str) do
---~ if n == 1 then
---~ break
---~ else
---~ t = t + 1
---~ tokens[t] = s
---~ n = n - 1
---~ end
---~ end
---~ end
---~ done = true
---~ elseif first then
---~ t = t + 1
---~ tokens[t] = first
---~ end
---~ t = t + 1
---~ tokens[t] = dec
---~ first = false
---~ elseif done then
---~ local crs = high[second]
---~ if crs then
---~ if first then
---~ t = t + 1
---~ tokens[t] = first
---~ end
---~ first = crs
---~ else
---~ local cgf = graphemes[first]
---~ if cgf and cgf[second] then
---~ first = cgf[second]
---~ elseif first then
---~ t = t + 1
---~ tokens[t] = first
---~ first = second
---~ else
---~ first = second
---~ end
---~ end
---~ else
---~ local crs = high[second]
---~ if crs then
---~ for s in utfcharacters(str) do
---~ if n == 1 then
---~ break
---~ else
---~ t = t + 1
---~ tokens[t] = s
---~ n = n - 1
---~ end
---~ end
---~ if first then
---~ t = t + 1
---~ tokens[t] = first
---~ end
---~ first = crs
---~ done = true
---~ else
---~ local cgf = graphemes[first]
---~ if cgf and cgf[second] then
---~ for s in utfcharacters(str) do
---~ if n == 1 then
---~ break
---~ else
---~ t = t + 1
---~ tokens[t] = s
---~ n = n - 1
---~ end
---~ end
---~ first = cgf[second]
---~ done = true
---~ else
---~ first = second
---~ n = n + 1
---~ end
---~ end
---~ end
---~ end
---~ if done then
---~ if first then
---~ t = t + 1
---~ tokens[t] = first
---~ end
---~ return concat(tokens) -- seldom called
---~ end
---~ elseif nstr > 0 then
---~ return high[str] or str
---~ end
---~ end
---~ return str
---~ end
-
-function utffilters.collapse(str) -- not really tested (we could preallocate a table)
+-- function utffilters.collapse(str) -- not really tested (we could preallocate a table)
+-- if str and str ~= "" then
+-- local nstr = #str
+-- if nstr > 1 then
+-- if initialize then -- saves a call
+-- initialize()
+-- end
+-- local tokens, t, first, done, n = { }, 0, false, false, 0
+-- for second in utfcharacters(str) do
+-- local dec = decomposed[second]
+-- if dec then
+-- if not done then
+-- if n > 0 then
+-- for s in utfcharacters(str) do
+-- if n == 1 then
+-- break
+-- else
+-- t = t + 1
+-- tokens[t] = s
+-- n = n - 1
+-- end
+-- end
+-- end
+-- done = true
+-- elseif first then
+-- t = t + 1
+-- tokens[t] = first
+-- end
+-- t = t + 1
+-- tokens[t] = dec
+-- first = false
+-- elseif done then
+-- local crs = high[second]
+-- if crs then
+-- if first then
+-- t = t + 1
+-- tokens[t] = first
+-- end
+-- first = crs
+-- else
+-- local cgf = graphemes[first]
+-- if cgf and cgf[second] then
+-- first = cgf[second]
+-- elseif first then
+-- t = t + 1
+-- tokens[t] = first
+-- first = second
+-- else
+-- first = second
+-- end
+-- end
+-- else
+-- local crs = high[second]
+-- if crs then
+-- for s in utfcharacters(str) do
+-- if n == 1 then
+-- break
+-- else
+-- t = t + 1
+-- tokens[t] = s
+-- n = n - 1
+-- end
+-- end
+-- if first then
+-- t = t + 1
+-- tokens[t] = first
+-- end
+-- first = crs
+-- done = true
+-- else
+-- local cgf = graphemes[first]
+-- if cgf and cgf[second] then
+-- for s in utfcharacters(str) do
+-- if n == 1 then
+-- break
+-- else
+-- t = t + 1
+-- tokens[t] = s
+-- n = n - 1
+-- end
+-- end
+-- first = cgf[second]
+-- done = true
+-- else
+-- first = second
+-- n = n + 1
+-- end
+-- end
+-- end
+-- end
+-- if done then
+-- if first then
+-- t = t + 1
+-- tokens[t] = first
+-- end
+-- return concat(tokens) -- seldom called
+-- end
+-- elseif nstr > 0 then
+-- return high[str] or str
+-- end
+-- end
+-- return str
+-- end
+
+local skippable = table.tohash { "mkiv", "mkvi" }
+local filesuffix = file.suffix
+
+-- we could reuse tokens but it's seldom populated anyway
+
+function utffilters.collapse(str,filename) -- not really tested (we could preallocate a table)
+ if skippable[filesuffix(filename)] then
+ return str
+ end
if str and str ~= "" then
local nstr = #str
if nstr > 1 then
@@ -481,6 +511,14 @@ if sequencers then
sequencers.enableaction(textfileactions,"characters.filters.utf.decompose")
end
+ directives.register("filters.utf.collapse", function(v)
+ sequencers[v and "enableaction" or "disableaction"](textfileactions,"characters.filters.utf.collapse")
+ end)
+
+ directives.register("filters.utf.decompose", function(v)
+ sequencers[v and "enableaction" or "disableaction"](textfileactions,"characters.filters.utf.decompose")
+ end)
+
end
--[[ldx--
diff --git a/Master/texmf-dist/tex/context/base/chem-ini.lua b/Master/texmf-dist/tex/context/base/chem-ini.lua
index 4d47982a220..10db1a1e44e 100644
--- a/Master/texmf-dist/tex/context/base/chem-ini.lua
+++ b/Master/texmf-dist/tex/context/base/chem-ini.lua
@@ -7,73 +7,37 @@ if not modules then modules = { } end modules ['chem-ini'] = {
}
local format = string.format
-local lpegmatch = lpeg.match
-
-local P, R, V, Cc, Cs = lpeg.P, lpeg.R, lpeg.V, lpeg.Cc, lpeg.Cs
+local lpegmatch, patterns = lpeg.match, lpeg.patterns
local trace_molecules = false trackers.register("chemistry.molecules", function(v) trace_molecules = v end)
local report_chemistry = logs.reporter("chemistry")
-local context = context
+local context = context
+local cpatterns = patterns.context
-chemicals = chemicals or { }
-local chemicals = chemicals
+chemistry = chemistry or { }
+local chemistry = chemistry
--[[
-<p>The next code is an adaptation of code from Wolfgang Schuster
-as posted on the mailing list. This version supports nested
-braces and unbraced integers as scripts. We could consider
-spaces as terminals for them but first let collect a bunch
-of input then.</p>
+<p>The next code started out as adaptation of code from Wolfgang Schuster as
+posted on the mailing list. The current version supports nested braces and
+unbraced integers as scripts.</p>
]]--
--- some lpeg, maybe i'll make an syst-lpg module
-
-local lowercase = R("az")
-local uppercase = R("AZ")
-local backslash = P("\\")
-local csname = backslash * P(1) * (1-backslash)^0
-local plus = P("+") / "\\textplus "
-local minus = P("-") / "\\textminus "
-local digit = R("09")
-local sign = plus + minus
-local cardinal = digit^1
-local integer = sign^0 * cardinal
-
-local leftbrace = P("{")
-local rightbrace = P("}")
-local nobrace = 1 - (leftbrace + rightbrace)
-local nested = P { leftbrace * (csname + sign + nobrace + V(1))^0 * rightbrace }
-local any = P(1)
-
-local subscript = P("_")
-local superscript = P("^")
-local somescript = subscript + superscript
-
-local content = Cs(csname + nested + sign + any)
-
--- could be made more efficient
-
-local lowhigh = Cc("\\lohi{%s}{%s}") * subscript * content * superscript * content / format
-local highlow = Cc("\\hilo{%s}{%s}") * superscript * content * subscript * content / format
-local low = Cc("\\low{%s}") * subscript * content / format
-local high = Cc("\\high{%s}") * superscript * content / format
-local justtext = (1 - somescript)^1
-local parser = Cs((csname + lowhigh + highlow + low + high + sign + any)^0)
-
-chemicals.moleculeparser = parser -- can be used to avoid functioncall
+local moleculeparser = cpatterns.scripted
+chemistry.moleculeparser = moleculeparser
-function chemicals.molecule(str)
- return lpegmatch(parser,str)
+function chemistry.molecule(str)
+ return lpegmatch(moleculeparser,str)
end
function commands.molecule(str)
if trace_molecules then
- local rep = lpegmatch(parser,str)
- report_chemistry("molecule %s => %s",str,rep)
+ local rep = lpegmatch(moleculeparser,str)
+ report_chemistry("molecule %a becomes %a",str,rep)
context(rep)
else
- context(lpegmatch(parser,str))
+ context(lpegmatch(moleculeparser,str))
end
end
diff --git a/Master/texmf-dist/tex/context/base/chem-str.lua b/Master/texmf-dist/tex/context/base/chem-str.lua
index fb325ccea9f..311b70b2d1b 100644
--- a/Master/texmf-dist/tex/context/base/chem-str.lua
+++ b/Master/texmf-dist/tex/context/base/chem-str.lua
@@ -1,168 +1,263 @@
if not modules then modules = { } end modules ['chem-str'] = {
version = 1.001,
comment = "companion to chem-str.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ author = "Hans Hagen and Alan Braslau",
copyright = "PRAGMA ADE / ConTeXt Development Team",
license = "see context related readme files"
}
--- This module in incomplete and experimental.
-
--- We can push snippets into an mp instance.
-
-local trace_structure = false trackers.register("chemistry.structure", function(v) trace_structure = v end)
-local trace_textstack = false trackers.register("chemistry.textstack", function(v) trace_textstack = v end)
+-- The original \PPCHTEX\ code was written in pure \TEX\, although later we made
+-- the move from \PICTEX\ to \METAPOST\. The current implementation is a mix between
+-- \TEX\, \LUA\ and \METAPOST. Although the first objective is to get a compatible
+-- but better implementation, later versions might provide more.
+--
+-- Well, the later version has arrived as Alan took it upon him to make the code
+-- deviate even further from the original implementation. The original (early \MKII)
+-- variant operated within the boundaries of \PICTEX\ and as it supported MetaPost as
+-- alternative output. As a consequence it still used a stepwise graphic construction
+-- approach. As we used \TEX\ for parsing, the syntax was more rigid than it is now.
+-- This new variant uses a more mathematical and metapostisch approach. In the process
+-- more rendering variants have been added and alignment has been automated. As a result
+-- the current user interface is slightly different from the old one but hopefully users
+-- will like the added value.
+
+-- directive_strictorder: one might set this to off when associated texts are disordered too
+
+local trace_structure = false trackers .register("chemistry.structure", function(v) trace_structure = v end)
+local trace_metapost = false trackers .register("chemistry.metapost", function(v) trace_metapost = v end)
+local trace_boundingbox = false trackers .register("chemistry.boundingbox", function(v) trace_boundingbox = v end)
+local trace_textstack = false trackers .register("chemistry.textstack", function(v) trace_textstack = v end)
+local directive_strictorder = true directives.register("chemistry.strictorder", function(v) directive_strictorder = v end)
+local directive_strictindex = false directives.register("chemistry.strictindex", function(v) directive_strictindex = v end)
local report_chemistry = logs.reporter("chemistry")
local format, gmatch, match, lower, gsub = string.format, string.gmatch, string.match, string.lower, string.gsub
-local concat, insert, remove = table.concat, table.insert, table.remove
-local processor_tostring = typesetters.processors.tostring
-local lpegmatch = lpeg.match
+local concat, insert, remove, unique, sorted = table.concat, table.insert, table.remove, table.unique, table.sorted
+local processor_tostring = typesetters and typesetters.processors.tostring
local settings_to_array = utilities.parsers.settings_to_array
+local settings_to_array_with_repeat = utilities.parsers.settings_to_array_with_repeat
+local formatters = string.formatters
-local P, R, S, C, Cs, Ct, Cc = lpeg.P, lpeg.R, lpeg.S, lpeg.C, lpeg.Cs, lpeg.Ct, lpeg.Cc
+local lpegmatch = lpeg.match
+local P, R, S, C, Cs, Ct, Cc, Cmt = lpeg.P, lpeg.R, lpeg.S, lpeg.C, lpeg.Cs, lpeg.Ct, lpeg.Cc, lpeg.Cmt
-local variables = interfaces.variables
-local context = context
+local variables = interfaces and interfaces.variables
+local context = context
+local formatters = string.formatters
-chemicals = chemicals or { }
-local chemicals = chemicals
+local v_default = variables.default
+local v_small = variables.small
+local v_medium = variables.medium
+local v_big = variables.big
+local v_normal = variables.normal
+local v_fit = variables.fit
+local v_on = variables.on
+local v_none = variables.none
-chemicals.instance = "metafun" -- "ppchtex"
-chemicals.format = "metafun"
-chemicals.structures = 0
+local mpnamedcolor = attributes.colors.mpnamedcolor
+local topoints = number.topoints
+local todimen = string.todimen
-local remapper = {
- ["+"] = "p",
- ["-"] = "m",
-}
+chemistry = chemistry or { }
+local chemistry = chemistry
+
+chemistry.instance = "chemistry"
+chemistry.format = "metafun"
+chemistry.structures = 0
local common_keys = {
- b = "line", eb = "line", db = "line", er = "line", dr = "line", br = "line",
- sb = "line", msb = "line", psb = "line",
- r = "line", pr = "line", mr = "line",
- au = "line", ad = "line",
- rb = "line", mrb = "line", prb = "line",
- rd = "line", mrd = "line", prd = "line",
- sr = "line", msr = "line", psr = "line",
- c = "line", cc = "line", cd = "line", ccd = "line",
- rn = "number", rtn = "number", rbn = "number",
- s = "line", ss = "line", pss = "line", mss = "line",
- mid = "fixed", mids = "fixed", midz = "text",
- z = "text", rz = "text", mrz = "text", prz = "text", crz = "text",
- rt = "text", rtt = "text", rbt = "text", zt = "text", zn = "number",
- mov = "transform", rot = "transform", adj = "transform", dir = "transform", sub = "transform",
+ b = "line",
+ r = "line",
+ sb = "line",
+ sr = "line",
+ rd = "line",
+ rh = "line",
+ rb = "line",
+ rbd = "line",
+ cc = "line",
+ ccd = "line",
+ line = "line",
+ dash = "line",
+ arrow = "line",
+ c = "fixed",
+ cd = "fixed",
+ z = "text",
+ zt = "text",
+ zlt = "text",
+ zrt = "text",
+ rz = "text",
+ rt = "text",
+ lrt = "text",
+ rrt = "text",
+ label = "text",
+ zln = "number",
+ zrn = "number",
+ rn = "number",
+ lrn = "number",
+ rrn = "number",
+ zn = "number",
+ number = "number",
+ mov = "transform",
+ mark = "transform",
+ move = "transform",
+ diff = "transform",
+ off = "transform",
+ adj = "transform",
+ sub = "transform",
}
local front_keys = {
- b = "line", bb= "line",
- sb = "line", msb = "line", psb = "line",
- r = "line", pr = "line", mr = "line",
- z = "text", mrz = "text", prz = "text",
+ bb = "line",
+ eb = "line",
+ rr = "line",
+ lr = "line",
+ lsr = "line",
+ rsr = "line",
+ lrd = "line",
+ rrd = "line",
+ lrh = "line",
+ rrh = "line",
+ lrbd = "line",
+ rrbd = "line",
+ lrb = "line",
+ rrb = "line",
+ lrz = "text",
+ rrz = "text",
+ lsub = "transform",
+ rsub = "transform",
}
local one_keys = {
- sb = "line", db = "line", tb = "line",
- ep = "line", es = "line", ed = "line", et = "line",
- sd = "line", ldd = "line", rdd = "line",
- hb = "line", bb = "line", oe = "line", bd = "line", bw = "line",
- z = "text", cz = "text", zt = "text", zn = "number",
- zbt = "text", zbn = "number", ztt = "text", ztn = "number",
- mov = "transform", sub = "transform", dir = "transform", off = "transform",
+ db = "line",
+ tb = "line",
+ bb = "line",
+ dr = "line",
+ hb = "line",
+ bd = "line",
+ bw = "line",
+ oe = "line",
+ sd = "line",
+ rdb = "line",
+ ldb = "line",
+ ldd = "line",
+ rdd = "line",
+ ep = "line",
+ es = "line",
+ ed = "line",
+ et = "line",
+ cz = "text",
+ rot = "transform",
+ dir = "transform",
+ rm = "transform",
+ mir = "transform",
}
-local front_align = {
- mrz = { { "b","b","b","b","b","b" } },
- prz = { { "t","t","t","t","t","t" } },
+local ring_keys = {
+ db = "line",
+ br = "line",
+ lr = "line",
+ rr = "line",
+ lsr = "line",
+ rsr = "line",
+ lrd = "line",
+ rrd = "line",
+ lrb = "line",
+ rrb = "line",
+ lrh = "line",
+ rrh = "line",
+ lrbd = "line",
+ rrbd = "line",
+ dr = "line",
+ eb = "line",
+ er = "line",
+ ed = "line",
+ au = "line",
+ ad = "line",
+ s = "line",
+ ss = "line",
+ mid = "line",
+ mids = "line",
+ midz = "text",
+ lrz = "text",
+ rrz = "text",
+ crz = "text",
+ rot = "transform",
+ mir = "transform",
+ adj = "transform",
+ lsub = "transform",
+ rsub = "transform",
+ rm = "transform",
}
+-- table.setmetatableindex(front_keys,common_keys)
+-- table.setmetatableindex(one_keys,common_keys)
+-- table.setmetatableindex(ring_keys,common_keys)
+
+-- or (faster but not needed here):
+
+front_keys = table.merged(front_keys,common_keys)
+one_keys = table.merged(one_keys,common_keys)
+ring_keys = table.merged(ring_keys,common_keys)
+
local syntax = {
- one = {
- n = 1, max = 8, keys = one_keys,
- align = {
- z = { { "r", "r_b", "b", "l_b", "l", "l_t", "t", "r_t" } },
---~ z = { { "r", "r", "b", "l", "l", "l", "t", "r" } },
- }
- },
- three = {
- n = 3, max = 3, keys = common_keys,
- align = {
- mrz = { { "r","b","l" }, { "b","l","t" }, { "l","t","r" }, { "t","r","b" } },
- rz = { { "r","l_b","l_t" }, { "b","l_t","r_t" }, { "l","r_t","r_b" }, { "t","r_b","l_b" } },
- prz = { { "r","l","t" }, { "b","t","r" }, { "l","r","b" }, { "t","b","l" } },
- }
- },
- four = {
- n = 4, max = 4, keys = common_keys,
- align = {
- mrz = { { "t","r","b","l" }, { "r","b","l","t" }, { "b","l","t","r" }, { "l","t","r","b" } },
- rz = { { "r_t","r_b","l_b","l_t" }, { "r_b","l_b","l_t","r_t" }, { "l_b","l_t","r_t","r_b" }, { "l_t","r_t","r_b","l_b" } },
- prz = { { "r","b","l","t" }, { "b","l","t","r" }, { "l","t","r","b" }, { "t","r","b","l" } },
- }
- },
- five = {
- n = 5, max = 5, keys = common_keys,
- align = {
- mrz = { { "t","r","b","b","l" }, { "r","b","l","l","t" }, { "b","l","t","r","r" }, { "l","t","r","r","b" } },
- rz = { { "r","r","b","l","t" }, { "b","b","l","t","r" }, { "l","l","t","r","b" }, { "t","t","r","b","l" } },
- prz = { { "r","b","l","t","t" }, { "b","l","t","r","r" }, { "l","t","r","b","b" }, { "t","r","b","l","l" } },
- }
- },
- six = {
- n = 6, max = 6, keys = common_keys,
- align = {
- mrz = { { "t","t","r","b","b","l" }, { "r","b","b","l","t","t" }, { "b","b","l","t","t","r" }, { "l","t","t","r","b","b" } },
- rz = { { "r","r","b","l","l","t" }, { "b","b","l","t","t","r" }, { "l","l","t","r","r","b" }, { "t","t","r","b","b","l" } },
- prz = { { "r","b","l","l","t","r" }, { "b","l","t","t","r","b" }, { "l","t","r","r","b","l" }, { "t","r","b","b","l","t" } },
- }
- },
- eight = {
- n = 8, max = 8, keys = common_keys,
- align = { -- todo
- mrz = { { "t","r","r","b","b","l","l","t" }, { "r","b","b","l","l","t","t","r" }, { "b","l","l","t","t","r","r","b" }, { "l","t","t","r","r","b","b","l" } },
- rz = { { "r","r","b","b","l","l","t","t" }, { "b","b","l","l","t","t","r","r" }, { "l","l","t","t","r","r","b","b" }, { "t","t","r","r","b","b","l","l" } },
- prz = { { "r","b","b","l","l","t","t","r" }, { "b","l","l","t","t","r","r","b" }, { "l","t","t","r","r","b","b","l" }, { "t","r","r","b","b","l","l","t" } },
- }
- },
- five_front = {
- n = -5, max = 5, keys = front_keys, align = front_align,
- },
- six_front = {
- n = -6, max = 6, keys = front_keys, align = front_align,
- },
- pb = { direct = 'chem_pb ;' },
- pe = { direct = 'chem_pe ;' },
- save = { direct = 'chem_save ;' },
- restore = { direct = 'chem_restore ;' },
- space = { direct = 'chem_symbol("\\chemicalsymbol[space]") ;' },
- plus = { direct = 'chem_symbol("\\chemicalsymbol[plus]") ;' },
- minus = { direct = 'chem_symbol("\\chemicalsymbol[minus]") ;' },
- gives = { direct = 'chem_symbol("\\chemicalsymbol[gives]{%s}{%s}") ;', arguments = 2 },
- equilibrium = { direct = 'chem_symbol("\\chemicalsymbol[equilibrium]{%s}{%s}") ;', arguments = 2 },
- mesomeric = { direct = 'chem_symbol("\\chemicalsymbol[mesomeric]{%s}{%s}") ;', arguments = 2 },
- opencomplex = { direct = 'chem_symbol("\\chemicalsymbol[opencomplex]") ;' },
- closecomplex = { direct = 'chem_symbol("\\chemicalsymbol[closecomplex]") ;' },
+ carbon = { max = 4, keys = one_keys, },
+ alkyl = { max = 4, keys = one_keys, },
+ newmanstagger = { max = 6, keys = one_keys, },
+ newmaneclipsed = { max = 6, keys = one_keys, },
+ one = { max = 8, keys = one_keys, },
+ three = { max = 3, keys = ring_keys, },
+ four = { max = 4, keys = ring_keys, },
+ five = { max = 5, keys = ring_keys, },
+ six = { max = 6, keys = ring_keys, },
+ seven = { max = 7, keys = ring_keys, },
+ eight = { max = 8, keys = ring_keys, },
+ nine = { max = 9, keys = ring_keys, },
+ fivefront = { max = 5, keys = front_keys, },
+ sixfront = { max = 6, keys = front_keys, },
+ chair = { max = 6, keys = front_keys, },
+ boat = { max = 6, keys = front_keys, },
+ pb = { direct = 'chem_pb;' },
+ pe = { direct = 'chem_pe;' },
+ save = { direct = 'chem_save;' },
+ restore = { direct = 'chem_restore;' },
+ chem = { direct = formatters['chem_symbol("\\chemicaltext{%s}");'], arguments = 1 },
+ space = { direct = 'chem_symbol("\\chemicalsymbol[space]");' },
+ plus = { direct = 'chem_symbol("\\chemicalsymbol[plus]");' },
+ minus = { direct = 'chem_symbol("\\chemicalsymbol[minus]");' },
+ gives = { direct = formatters['chem_symbol("\\chemicalsymbol[gives]{%s}{%s}");'], arguments = 2 },
+ equilibrium = { direct = formatters['chem_symbol("\\chemicalsymbol[equilibrium]{%s}{%s}");'], arguments = 2 },
+ mesomeric = { direct = formatters['chem_symbol("\\chemicalsymbol[mesomeric]{%s}{%s}");'], arguments = 2 },
+ opencomplex = { direct = 'chem_symbol("\\chemicalsymbol[opencomplex]");' },
+ closecomplex = { direct = 'chem_symbol("\\chemicalsymbol[closecomplex]");' },
+ reset = { direct = 'chem_reset;' },
+ mp = { direct = formatters['%s'], arguments = 1 }, -- backdoor MP code - dangerous!
}
-local definitions = { }
+chemistry.definitions = chemistry.definitions or { }
+local definitions = chemistry.definitions
+
+storage.register("chemistry/definitions",definitions,"chemistry.definitions")
-function chemicals.undefine(name)
+function chemistry.undefine(name)
definitions[lower(name)] = nil
end
-function chemicals.define(name,spec,text)
+function chemistry.define(name,spec,text)
name = lower(name)
local dn = definitions[name]
- if not dn then dn = { } definitions[name] = dn end
+ if not dn then
+ dn = { }
+ definitions[name] = dn
+ end
dn[#dn+1] = {
- spec = settings_to_array(lower(spec)),
- text = settings_to_array(text),
+ spec = settings_to_array_with_repeat(spec,true),
+ text = settings_to_array_with_repeat(text,true),
}
end
-local metacode, variant, keys, bonds, max, txt, textsize, rot, pstack
-local molecule = chemicals.molecule -- or use lpegmatch(chemicals.moleculeparser,...)
+local metacode, variant, keys, max, txt, pstack, sstack, align
+local molecule = chemistry.molecule -- or use lpegmatch(chemistry.moleculeparser,...)
local function fetch(txt)
local st = stack[txt]
@@ -174,305 +269,511 @@ local function fetch(txt)
end
if t then
if trace_textstack then
- report_chemistry("fetching from stack %s slot %s: %s",txt,st.n,t)
+ report_chemistry("fetching from stack %a, slot %a, data %a",txt,st.n,t)
end
st.n = st.n + 1
end
return txt, t
end
-local digit = R("09")/tonumber
+local remapper = {
+ ["+"] = "p",
+ ["-"] = "m",
+}
+
+local dchrs = R("09")
+local sign = S("+-")
+local digit = dchrs / tonumber
+local amount = (sign^-1 * (dchrs^0 * P('.'))^-1 * dchrs^1) / tonumber
+local single = digit
+local range = digit * P("..") * digit
+local set = Ct(digit^2)
local colon = P(":")
local equal = P("=")
local other = 1 - digit - colon - equal
-local remapped = S("+-") / remapper
-local operation = Cs((remapped^0 * other)^1)
-local amount = digit
-local single = digit
+local remapped = sign / remapper
+local operation = Cs(other^1)
local special = (colon * C(other^1)) + Cc("")
-local range = digit * P("..") * digit
-local set = Ct(digit^2)
local text = (equal * C(P(1)^0)) + Cc(false)
local pattern =
- (amount + Cc(1)) *
- operation *
- special * (
+ (amount + Cc(1))
+ * (remapped + Cc(""))
+ * Cs(operation/lower)
+ * Cs(special/lower) * (
range * Cc(false) * text +
Cc(false) * Cc(false) * set * text +
single * Cc(false) * Cc(false) * text +
Cc(false) * Cc(false) * Cc(false) * text
)
---~ local n, operation, index, upto, set, text = lpegmatch(pattern,"RZ1357")
-
---~ print(lpegmatch(pattern,"RZ=x")) 1 RZ false false false x
---~ print(lpegmatch(pattern,"RZ1=x")) 1 RZ 1 false false x
---~ print(lpegmatch(pattern,"RZ1..3=x")) 1 RZ 1 3 false x
---~ print(lpegmatch(pattern,"RZ13=x")) 1 RZ false false table x
-
-local function process(spec,text,n,rulethickness,rulecolor,offset)
- insert(stack,{ spec=spec, text=text, n=n })
+-- local n, operation, index, upto, set, text = lpegmatch(pattern,"RZ1357")
+
+-- print(lpegmatch(pattern,"RZ=x")) -- 1 RZ false false false x
+-- print(lpegmatch(pattern,"RZ1=x")) -- 1 RZ 1 false false x
+-- print(lpegmatch(pattern,"RZ1..3=x")) -- 1 RZ 1 3 false x
+-- print(lpegmatch(pattern,"RZ13=x")) -- 1 RZ false false table x
+
+local f_initialize = 'if unknown context_chem : input mp-chem.mpiv ; fi ;'
+local f_start_structure = formatters['chem_start_structure(%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s);']
+local f_set_trace_bounds = formatters['chem_trace_boundingbox := %l ;']
+local f_stop_structure = 'chem_stop_structure;'
+local f_start_component = 'chem_start_component;'
+local f_stop_component = 'chem_stop_component;'
+local f_line = formatters['chem_%s%s(%s,%s,%s,%s,%s);']
+local f_set = formatters['chem_set(%s);']
+local f_number = formatters['chem_%s%s(%s,%s,"\\chemicaltext{%s}");']
+local f_text = f_number
+local f_empty_normal = formatters['chem_%s(%s,%s,"");']
+local f_empty_center = formatters['chem_c%s(%s,%s,"");']
+local f_transform = formatters['chem_%s(%s,%s,%s);']
+
+local prepareMPvariable = commands and commands.prepareMPvariable
+
+local function process(level,spec,text,n,rulethickness,rulecolor,offset,default_variant)
+ insert(stack,{ spec = spec, text = text, n = n })
local txt = #stack
local m = #metacode
+ local saved_rulethickness = rulethickness
+ local saved_rulecolor = rulecolor
+ local saved_align = align
+ local current_variant = default_variant or "six"
for i=1,#spec do
- local s = spec[i]
- local d = definitions[s]
+ local step = spec[i]
+ local s = lower(step)
+ local n = current_variant .. ":" .. s
+ local d = definitions[n]
+ if not d then
+ n = s
+ d = definitions[n]
+ end
if d then
+ if trace_structure then
+ report_chemistry("level %a, step %a, definition %a, snippets %a",level,step,n,#d)
+ end
for i=1,#d do
local di = d[i]
- process(di.spec,di.text,1,rulethickness,rulecolor)
+ current_variant = process(level+1,di.spec,di.text,1,rulethickness,rulecolor,offset,current_variant) -- offset?
end
else
- local rep, operation, special, index, upto, set, text = lpegmatch(pattern,s)
- if operation == "pb" then
+ local factor, osign, operation, special, index, upto, set, text = lpegmatch(pattern,step)
+ if trace_structure then
+ local set = set and concat(set," ") or "-"
+ report_chemistry("level %a, step %a, factor %a, osign %a, operation %a, special %a, index %a, upto %a, set %a, text %a",
+ level,step,factor,osign,operation,special,index,upto,set,text)
+ end
+ if operation == "rulecolor" then
+ local t = text
+ if not t then
+ txt, t = fetch(txt)
+ end
+ if t == v_default or t == v_normal or t == "" then
+ rulecolor = saved_rulecolor
+ elseif t then
+ rulecolor = mpnamedcolor(t)
+ end
+ elseif operation == "rulethickness" then
+ local t = text
+ if not t then
+ txt, t = fetch(txt)
+ end
+ if t == v_default or t == v_normal or t == t_medium or t == "" then
+ rulethickness = saved_rulethickness
+ elseif t == v_small then
+ rulethickness = topoints(1/1.2 * todimen(saved_rulethickness))
+ elseif t == v_big then
+ rulethickness = topoints(1.2 * todimen(saved_rulethickness))
+ elseif t then
+ -- rulethickness = topoints(todimen(t)) -- mp can't handle sp
+ rulethickness = topoints(tonumber(t) * todimen(saved_rulethickness))
+ end
+ elseif operation == "symalign" then
+ local t = text
+ if not t then
+ txt, t = fetch(txt)
+ end
+ if t == v_default or t == v_normal then
+ align = saved_align
+ elseif t and t ~= "" then
+ align = "." .. t
+ end
+ elseif operation == "pb" then
insert(pstack,variant)
m = m + 1 ; metacode[m] = syntax.pb.direct
if keys[special] == "text" and index then
if keys["c"..special] == "text" then -- can be option: auto ...
- m = m + 1 ; metacode[m] = format('chem_c%s(%s,%s,"");',special,bonds,index)
+ m = m + 1 ; metacode[m] = f_empty_center(special,variant,index)
else
- m = m + 1 ; metacode[m] = format('chem_%s(%s,%s,"");',special,bonds,index)
+ m = m + 1 ; metacode[m] = f_empty_normal(special,variant,index)
end
end
+ elseif operation == "pe" then
+ variant = remove(pstack)
+ local ss = syntax[variant]
+ keys, max = ss.keys, ss.max
+ m = m + 1 ; metacode[m] = syntax.pe.direct
+ m = m + 1 ; metacode[m] = f_set(variant)
+ current_variant = variant
elseif operation == "save" then
- insert(pstack,variant)
+ insert(sstack,variant)
m = m + 1 ; metacode[m] = syntax.save.direct
- elseif operation == "pe" or operation == "restore" then
- variant = remove(pstack)
+ elseif operation == "restore" then
+ variant = remove(sstack)
local ss = syntax[variant]
- local prev = bonds or 6
- keys, bonds, max, rot = ss.keys, ss.n, ss.max, 1
- m = m + 1 ; metacode[m] = syntax[operation].direct
- m = m + 1 ; metacode[m] = format("chem_set(%s,%s) ;",prev,bonds)
- elseif operation == "front" then
- if syntax[variant .. "_front"] then
- variant = variant .. "_front"
- local ss = syntax[variant]
- local prev = bonds or 6
- keys, bonds, max, rot = ss.keys, ss.n, ss.max, 1
- m = m + 1 ; metacode[m] = format("chem_set(%s,%s) ;",prev,bonds)
- end
+ keys, max = ss.keys, ss.max
+ m = m + 1 ; metacode[m] = syntax.restore.direct
+ m = m + 1 ; metacode[m] = f_set(variant)
+ current_variant = variant
elseif operation then
local ss = syntax[operation]
+ local what = keys[operation]
+ local ns = 0
+ if set then
+ local sv = syntax[current_variant]
+ local ms = sv and sv.max
+ set = unique(set)
+ ns = #set
+ if directive_strictorder then
+ if what == "line" then
+ set = sorted(set)
+ end
+ if directive_strictindex and ms then
+ for i=ns,1,-1 do
+ local si = set[i]
+ if si > ms then
+ report_chemistry("level %a, operation %a, max nofsteps %a, ignoring %a",level,operation,ms,si)
+ set[i] = nil
+ ns = ns - 1
+ else
+ break
+ end
+ end
+ end
+ else
+ if directive_strictindex and ms then
+ local t, nt = { }, 0
+ for i=1,ns do
+ local si = set[i]
+ if si > ms then
+ report_chemistry("level %a, operation %a, max nofsteps %a, ignoring %a",level,operation,ms,si)
+ set[i] = nil
+ else
+ nt = nt + 1
+ t[nt] = si
+ end
+ end
+ ns = nt
+ set = t
+ end
+ end
+ end
if ss then
local ds = ss.direct
if ds then
local sa = ss.arguments
if sa == 1 then
local one ; txt, one = fetch(txt)
- m = m + 1 ; metacode[m] = format(ds,one or "")
- elseif sa ==2 then
+ m = m + 1 ; metacode[m] = ds(one or "")
+ elseif sa == 2 then
local one ; txt, one = fetch(txt)
local two ; txt, two = fetch(txt)
- m = m + 1 ; metacode[m] = format(ds,one or "",two or "")
+ m = m + 1 ; metacode[m] = ds(one or "",two or "")
else
m = m + 1 ; metacode[m] = ds
end
elseif ss.keys then
- local prev = bonds or 6
- variant, keys, bonds, max, rot = s, ss.keys, ss.n, ss.max, 1
- m = m + 1 ; metacode[m] = format("chem_set(%s,%s) ;",prev,bonds)
+ variant, keys, max = s, ss.keys, ss.max
+ m = m + 1 ; metacode[m] = f_set(variant)
+ current_variant = variant
end
- else
- local what = keys[operation]
- if what == "line" then
- if set then
- for i=1,#set do
- local si = set[i]
- m = m + 1 ; metacode[m] = format("chem_%s(%s,%s,%s,%s,%s);",operation,bonds,si,si,rulethickness,rulecolor)
+ elseif what == "line" then
+ local s = osign
+ if s ~= "" then
+ s = "." .. s
+ end
+ if set then
+ -- condense consecutive numbers in a set to a range
+ local sf, st = set[1]
+ for i=1,ns do
+ if i > 1 and set[i] ~= set[i-1]+1 then
+ m = m + 1 ; metacode[m] = f_line(operation,s,variant,sf,st,rulethickness,rulecolor)
+ sf = set[i]
end
- elseif upto then
- m = m + 1 ; metacode[m] = format("chem_%s(%s,%s,%s,%s,%s);",operation,bonds,index,upto,rulethickness,rulecolor)
- elseif index then
- m = m + 1 ; metacode[m] = format("chem_%s(%s,%s,%s,%s,%s);",operation,bonds,index,index,rulethickness,rulecolor)
- else
- m = m + 1 ; metacode[m] = format("chem_%s(%s,%s,%s,%s,%s);",operation,bonds,1,max,rulethickness,rulecolor)
+ st = set[i]
end
- elseif what == "number" then
- if set then
- for i=1,#set do
- local si = set[i]
- m = m + 1 ; metacode[m] = format('chem_%s(%s,%s,"\\chemicaltext{%s}");',operation,bonds,si,si)
- end
- elseif upto then
- for i=index,upto do
- local si = set[i]
- m = m + 1 ; metacode[m] = format('chem_%s(%s,%s,"\\chemicaltext{%s}");',operation,bonds,si,si)
- end
- elseif index then
- m = m + 1 ; metacode[m] = format('chem_%s(%s,%s,"\\chemicaltext{%s}");',operation,bonds,index,index)
- else
- for i=1,max do
- m = m + 1 ; metacode[m] = format('chem_%s(%s,%s,"\\chemicaltext{%s}");',operation,bonds,i,i)
- end
+ m = m + 1 ; metacode[m] = f_line(operation,s,variant,sf,st,rulethickness,rulecolor)
+ elseif upto then
+ m = m + 1 ; metacode[m] = f_line(operation,s,variant,index,upto,rulethickness,rulecolor)
+ elseif index then
+ m = m + 1 ; metacode[m] = f_line(operation,s,variant,index,index,rulethickness,rulecolor)
+ else
+ m = m + 1 ; metacode[m] = f_line(operation,s,variant,1,max,rulethickness,rulecolor)
+ end
+ elseif what == "number" then
+ if set then
+ for i=1,ns do
+ local si = set[i]
+ m = m + 1 ; metacode[m] = f_number(operation,align,variant,si,si)
end
- elseif what == "text" then
- local align = syntax[variant].align
- align = align and align[operation]
- align = align and align[rot]
- if set then
- for i=1,#set do
- local si = set[i]
- local t = text
- if not t then txt, t = fetch(txt) end
- if t then
- local a = align and align[si]
- if a then a = "." .. a else a = "" end
- t = molecule(processor_tostring(t))
- m = m + 1 ; metacode[m] = format('chem_%s%s(%s,%s,"\\chemicaltext{%s}");',operation,a,bonds,si,t)
- end
- end
- elseif upto then
- for i=index,upto do
- local t = text
- if not t then txt, t = fetch(txt) end
- if t then
- local s = align and align[i]
- if s then s = "." .. s else s = "" end
- t = molecule(processor_tostring(t))
- m = m + 1 ; metacode[m] = format('chem_%s%s(%s,%s,"\\chemicaltext{%s}");',operation,s,bonds,i,t)
- end
- end
- elseif index == 0 then
+ elseif upto then
+ for i=index,upto do
+ local si = set[i]
+ m = m + 1 ; metacode[m] = f_number(operation,align,variant,si,si)
+ end
+ elseif index then
+ m = m + 1 ; metacode[m] = f_number(operation,align,variant,index,index)
+ else
+ for i=1,max do
+ m = m + 1 ; metacode[m] = f_number(operation,align,variant,i,i)
+ end
+ end
+ elseif what == "text" then
+ if set then
+ for i=1,ns do
+ local si = set[i]
local t = text
if not t then txt, t = fetch(txt) end
if t then
t = molecule(processor_tostring(t))
- m = m + 1 ; metacode[m] = format('chem_%s_zero("\\chemicaltext{%s}");',operation,t)
+ m = m + 1 ; metacode[m] = f_text(operation,align,variant,si,t)
end
- elseif index then
+ end
+ elseif upto then
+ for i=index,upto do
local t = text
if not t then txt, t = fetch(txt) end
if t then
- local s = align and align[index]
- if s then s = "." .. s else s = "" end
t = molecule(processor_tostring(t))
- m = m + 1 ; metacode[m] = format('chem_%s%s(%s,%s,"\\chemicaltext{%s}");',operation,s,bonds,index,t)
- end
- else
- for i=1,max do
- local t = text
- if not t then txt, t = fetch(txt) end
- if t then
- local s = align and align[i]
- if s then s = "." .. s else s = "" end
- t = molecule(processor_tostring(t))
- m = m + 1 ; metacode[m] = format('chem_%s%s(%s,%s,"\\chemicaltext{%s}");',operation,s,bonds,i,t)
- end
+ m = m + 1 ; metacode[m] = f_text(operation,align,variant,i,t)
end
end
- elseif what == "transform" then
- if index then
- for r=1,rep do
- m = m + 1 ; metacode[m] = format('chem_%s(%s,%s);',operation,bonds,index)
- end
- if operation == "rot" then
- rot = index
+ elseif index == 0 then
+ local t = text
+ if not t then txt, t = fetch(txt) end
+ if t then
+ t = molecule(processor_tostring(t))
+ m = m + 1 ; metacode[m] = f_text(operation,align,variant,index,t)
+ end
+ elseif index then
+ local t = text
+ if not t then txt, t = fetch(txt) end
+ if t then
+ t = molecule(processor_tostring(t))
+ m = m + 1 ; metacode[m] = f_text(operation,align,variant,index,t)
+ end
+ else
+ for i=1,max do
+ local t = text
+ if not t then txt, t = fetch(txt) end
+ if t then
+ t = molecule(processor_tostring(t))
+ m = m + 1 ; metacode[m] = f_text(operation,align,variant,i,t)
end
end
- elseif what == "fixed" then
- m = m + 1 ; metacode[m] = format("chem_%s(%s,%s,%s);",operation,bonds,rulethickness,rulecolor)
end
+ elseif what == "transform" then
+ if osign == "m" then
+ factor = -factor
+ end
+ if set then
+ for i=1,ns do
+ local si = set[i]
+ m = m + 1 ; metacode[m] = f_transform(operation,variant,si,factor)
+ end
+ elseif upto then
+ for i=index,upto do
+ m = m + 1 ; metacode[m] = f_transform(operation,variant,i,factor)
+ end
+ else
+ m = m + 1 ; metacode[m] = f_transform(operation,variant,index or 1,factor)
+ end
+ elseif what == "fixed" then
+ m = m + 1 ; metacode[m] = f_transform(operation,variant,rulethickness,rulecolor)
+ elseif trace_structure then
+ report_chemistry("level %a, ignoring undefined operation %s",level,operation)
end
end
end
end
remove(stack)
+ return current_variant
end
-- the size related values are somewhat special but we want to be
-- compatible
--
--- maybe we should default to fit
---
-- rulethickness in points
-function chemicals.start(settings)
- chemicals.structures = chemicals.structures + 1
- local textsize, rulethickness, rulecolor = settings.size, settings.rulethickness, settings.rulecolor
- local width, height, scale, offset = settings.width or 0, settings.height or 0, settings.scale or "medium", settings.offset or 0
- local l, r, t, b = settings.left or 0, settings.right or 0, settings.top or 0, settings.bottom or 0
- --
- metacode = { "if unknown context_chem : input mp-chem.mpiv ; fi ;" } -- no format anyway
- --
- if scale == variables.small then
- scale = 500
- elseif scale == variables.medium or scale == 0 then
- scale = 625
- elseif scale == variables.big then
- scale = 750
+local function checked(d,factor,unit,scale)
+ if d == v_none then
+ return 0
+ end
+ local n = tonumber(d)
+ if not n then
+ -- assume dimen
+ elseif n >= 10 or n <= -10 then
+ return factor * unit * n / 1000
else
- scale = tonumber(scale)
- if not scale or scale == 0 then
- scale = 750
- elseif scale < 10 then
- scale = 10
- end
+ return factor * unit * n
+ end
+ local n = todimen(d)
+ if n then
+ return scale * n
+ else
+ return v_fit
end
- if width == variables.fit then
- width = true
+end
+
+local function calculated(height,bottom,top,factor,unit,scale)
+ if height == v_none then
+ -- this always wins
+ height = "0pt"
+ bottom = "0pt"
+ top = "0pt"
+ elseif height == v_fit then
+ height = "true"
+ bottom = bottom == v_fit and "true" or topoints(checked(bottom,factor,unit,scale))
+ top = top == v_fit and "true" or topoints(checked(top, factor,unit,scale))
else
- width = tonumber(width) or 0
- if l == 0 then
- if r == 0 then
- l = (width == 0 and 2000) or width/2
- r = l
- elseif width ~= 0 then
- l = width - r
+ height = checked(height,factor,unit,scale)
+ if bottom == v_fit then
+ if top == v_fit then
+ bottom = height / 2
+ top = bottom
+ else
+ top = checked(top,factor,unit,scale)
+ bottom = height - top
end
- elseif r == 0 and width ~= 0 then
- r = width - l
+ elseif top == v_fit then
+ bottom = checked(bottom,factor,unit,scale)
+ top = height - bottom
+ else
+ bottom = checked(bottom,factor,unit,scale)
+ top = checked(top, factor,unit,scale)
+ local ratio = height / (bottom+top)
+ bottom = bottom * ratio
+ top = top * ratio
end
- width = false
+ top = topoints(top)
+ bottom = topoints(bottom)
+ height = topoints(height)
end
- if height == variables.fit then
- height = true
+ return height, bottom, top
+end
+
+function chemistry.start(settings)
+ chemistry.structures = chemistry.structures + 1
+ local unit = settings.unit or 655360
+ local factor = settings.factor or 3
+ local rulethickness = settings.rulethickness or 65536
+ local rulecolor = settings.rulecolor or ""
+ local axiscolor = settings.framecolor or ""
+ local width = settings.width or v_fit
+ local height = settings.height or v_fit
+ local scale = settings.scale or "normal"
+ local rotation = settings.rotation or 0
+ local offset = settings.offset or 0
+ local left = settings.left or v_fit
+ local right = settings.right or v_fit
+ local top = settings.top or v_fit
+ local bottom = settings.bottom or v_fit
+ --
+ metacode = { }
+ --
+ align = settings.symalign or "auto"
+ if trace_structure then
+ report_chemistry("unit %p, factor %s, symalign %s",unit,factor,align)
+ end
+ if align ~= "" then
+ align = "." .. align
+ end
+ if trace_structure then
+ report_chemistry("%s scale %a, rotation %a, width %s, height %s, left %s, right %s, top %s, bottom %s","asked",scale,rotation,width,height,left,right,top,bottom)
+ end
+ if scale == v_small then
+ scale = 1/1.2
+ elseif scale == v_normal or scale == v_medium or scale == 0 then
+ scale = 1
+ elseif scale == v_big then
+ scale = 1.2
else
- height = tonumber(height) or 0
- if t == 0 then
- if b == 0 then
- t = (height == 0 and 2000) or height/2
- b = t
- elseif height ~= 0 then
- t = height - b
- end
- elseif b == 0 and height ~= 0 then
- b = height - t
+ scale = tonumber(scale)
+ if not scale or scale == 0 then
+ scale = 1
+ elseif scale >= 10 then
+ scale = scale / 1000
+ elseif scale < .01 then
+ scale = .01
end
- height = false
end
- scale = 0.75 * scale/625
--
- metacode[#metacode+1] = format("chem_start_structure(%s,%s,%s,%s,%s,%s,%s,%s,%s,%s) ;",
- chemicals.structures,
- l/25, r/25, t/25, b/25, scale,
- tostring(settings.axis == variables.on), tostring(width), tostring(height), tostring(offset)
+ unit = scale * unit
+ --
+ width, left, right = calculated(width, left, right,factor,unit,scale)
+ height, bottom, top = calculated(height,bottom,top, factor,unit,scale)
+ --
+ rotation = tonumber(rotation) or 0
+ --
+ if trace_structure then
+ report_chemistry("%s scale %a, rotation %a, width %s, height %s, left %s, right %s, top %s, bottom %s","used",scale,rotation,width,height,left,right,top,bottom)
+ end
+ metacode[#metacode+1] = f_start_structure(
+ chemistry.structures,
+ left, right, top, bottom,
+ rotation, topoints(unit), factor, topoints(offset),
+ tostring(settings.axis == v_on), topoints(rulethickness), tostring(axiscolor)
)
+ metacode[#metacode+1] = f_set_trace_bounds(trace_boundingbox) ;
--
- variant, keys, bonds, stack, rot, pstack = "six", { }, 6, { }, 1, { }
+ variant, keys, stack, pstack, sstack = "one", { }, { }, { }, { }
end
-function chemicals.stop()
- metacode[#metacode+1] = "chem_stop_structure ;"
- --
+function chemistry.stop()
+ metacode[#metacode+1] = f_stop_structure
local mpcode = concat(metacode,"\n")
- if trace_structure then
+ if trace_metapost then
report_chemistry("metapost code:\n%s", mpcode)
end
- metapost.graphic(chemicals.instance,chemicals.format,mpcode)
+ if metapost.instance(chemistry.instance) then
+ f_initialize = nil
+ end
+ metapost.graphic {
+ instance = chemistry.instance,
+ format = chemistry.format,
+ data = mpcode,
+ definitions = f_initialize,
+ }
+ t_initialize = ""
metacode = nil
end
-function chemicals.component(spec,text,settings)
+function chemistry.component(spec,text,settings)
rulethickness, rulecolor, offset = settings.rulethickness, settings.rulecolor
- local spec = settings_to_array(lower(spec))
- local text = settings_to_array(text)
- metacode[#metacode+1] = "chem_start_component ;"
- process(spec,text,1,rulethickness,rulecolor)
- metacode[#metacode+1] = "chem_stop_component ;"
+ local spec = settings_to_array_with_repeat(spec,true) -- no lower?
+ local text = settings_to_array_with_repeat(text,true)
+-- inspect(spec)
+ metacode[#metacode+1] = f_start_component
+ process(1,spec,text,1,rulethickness,rulecolor) -- offset?
+ metacode[#metacode+1] = f_stop_component
end
+statistics.register("chemical formulas", function()
+ if chemistry.structures > 0 then
+ return format("%s chemical structure formulas",chemistry.structures) -- no timing needed, part of metapost
+ end
+end)
+
+-- interfaces
+
+commands.undefinechemical = chemistry.undefine
+commands.definechemical = chemistry.define
+commands.startchemical = chemistry.start
+commands.stopchemical = chemistry.stop
+commands.chemicalcomponent = chemistry.component
+
+-- todo: top / bottom
+-- maybe add "=" for double and "≡" for triple?
+
local inline = {
["single"] = "\\chemicalsinglebond", ["-"] = "\\chemicalsinglebond",
["double"] = "\\chemicaldoublebond", ["--"] = "\\chemicaldoublebond",
@@ -480,28 +781,20 @@ local inline = {
["gives"] = "\\chemicalgives", ["->"] = "\\chemicalgives",
["equilibrium"] = "\\chemicalequilibrium", ["<->"] = "\\chemicalequilibrium",
["mesomeric"] = "\\chemicalmesomeric", ["<>"] = "\\chemicalmesomeric",
- ["plus"] = "\\chemicalsplus", ["+"] = "\\chemicalsplus",
- ["minus"] = "\\chemicalsminus",
- ["space"] = "\\chemicalsspace",
+ ["plus"] = "\\chemicalplus", ["+"] = "\\chemicalplus",
+ ["minus"] = "\\chemicalminus",
+ ["space"] = "\\chemicalspace",
}
--- todo: top / bottom
-
-function chemicals.inline(spec)
- local spec = settings_to_array(spec)
+function commands.inlinechemical(spec)
+ local spec = settings_to_array_with_repeat(spec,true)
for i=1,#spec do
local s = spec[i]
local inl = inline[lower(s)]
if inl then
- context(inl)
+ context(inl) -- could be a fast context.sprint
else
context.chemicalinline(molecule(s))
end
end
end
-
-statistics.register("chemical formulas", function()
- if chemicals.structures > 0 then
- return format("%s chemical structure formulas",chemicals.structures) -- no timing needed, part of metapost
- end
-end)
diff --git a/Master/texmf-dist/tex/context/base/chem-str.mkiv b/Master/texmf-dist/tex/context/base/chem-str.mkiv
index 1d60a293efc..c28ea21d92a 100644
--- a/Master/texmf-dist/tex/context/base/chem-str.mkiv
+++ b/Master/texmf-dist/tex/context/base/chem-str.mkiv
@@ -2,7 +2,7 @@
%D [ file=chem-ini,
%D version=2009.05.13,
%D subtitle=Chemistry,
-%D author=Hans Hagen,
+%D author=Hans Hagen \& Alan Braslau,
%D date=\currentdate,
%D copyright={PRAGMA ADE \& \CONTEXT\ Development Team}]
%C
@@ -10,8 +10,10 @@
%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
%C details.
-%D This module in incomplete and experimental. Eventually this code
-%D will replace \PPCHTEX.
+%D The original \PPCHTEX\ code was written in pure \TEX\, although later we made
+%D the move from \PICTEX\ to \METAPOST\. The current implementation is a mix between
+%D \TEX\, \LUA\ and \METAPOST. Although the first objective is to get a compatible
+%D but better implementation, later versions might provide more,
\writestatus{loading}{ConTeXt Chemistry Macros / Structure}
@@ -26,26 +28,24 @@
% Here we use chemicalformula instead, so no longer a mix:
%
% \startchemicalformula
-% \chemical{H_2}{top}{bottom}
+% \chemical{2H_2}{top}{bottom}
% \chemical{PLUS}{top}{bottom}
-% \chemical{O}{top}{bottom}
+% \chemical{O_2}{top}{bottom}
% \chemical{GIVES}{top}{bottom}
-% \chemical{H_2O}{top}{bottom}
+% \chemical{2H_2O}{top}{bottom}
% \stopchemicalformula
%
% \startchemicalformula
-% \chemical{H_2}
+% \chemical{2H_2}
% \chemical{PLUS}
-% \chemical{O}
+% \chemical{O_2}
% \chemical{GIVES}
-% \chemical{H_2O}
+% \chemical{2H_2O}
% \stopchemicalformula
%
% The inline variant has only one argument:
%
-% \chemical{H_2,PLUS,O,GIVES,H_2O}
-
-% todo: seven | eight | frontsix | fontfive | carbon | newmans | chair
+% \chemical{2H_2,PLUS,O_2,GIVES,2H_2O}
\unprotect
@@ -58,11 +58,28 @@
\let\setupchemicals\setupchemical
-\unexpanded\def\setupchemicalframed
- {\dosingleempty\dosetupchemicalframed}
+%D We use a dedicated framed macro instead of inheriting one. This is both
+%D a historical and practical reason (like shared keys with different meaning
+%D that could clash, e.g.\ align).
+
+\defineMPinstance
+ [chemistry]
+ [\s!format=metafun,
+ %\s!extensions=\v!yes, % Should we add extensions and initializations?
+ %\s!initializations=\v!yes, % Would this give EmWidth, etc.?
+ ]
+
+\startMPdefinitions{chemistry}
+ input mp-chem.mpiv ;
+\stopMPdefinitions
+
+\defineframed
+ [\??chemicalframed]
+ [\c!align=\v!normal,
+ \c!strut=\v!no]
-\def\dosetupchemicalframed
- {\getparameters[\??chemicalframed]}
+\unexpanded\def\setupchemicalframed
+ {\setupframed[\??chemicalframed]}
\unexpanded\def\definechemical % is global (so we don't use the commandhandler)
{\dosingleargument\chem_define}
@@ -71,7 +88,7 @@
{\startnointerference
\edef\currentdefinedchemical{#1}%
\let\chemical\chem_chemical_nested
- \ctxlua{chemicals.undefine("#1")}%
+ \ctxcommand{undefinechemical("#1")}%
#2% flush
\stopnointerference}
@@ -79,7 +96,7 @@
{\dodoubleempty\chem_chemical_nested_indeed}
\def\chem_chemical_nested_indeed[#1][#2]%
- {\ctxlua{chemicals.define("\currentdefinedchemical",\!!bs#1\!!es,\!!bs\detokenize{#2}\!!es)}}
+ {\ctxcommand{definechemical("\currentdefinedchemical",\!!bs#1\!!es,\!!bs\detokenize{#2}\!!es)}}
% chemical symbols
@@ -90,20 +107,15 @@
{\setvalue{\??chemicalsymbol#1}{#2}}
\unexpanded\def\chemicalsymbol[#1]%
- {\csname\??chemicalsymbol\ifcsname\??chemicalsymbol#1\endcsname\s!unknown\else#1\fi\endcsname}
+ {\csname\??chemicalsymbol\ifcsname\??chemicalsymbol#1\endcsname#1\else\s!unknown\fi\endcsname}
-\definechemicalsymbol[\s!unknown][] % empty
+\definechemicalsymbol[\s!unknown][] % \char"FFFD empty
% size (small medium big)
\edef\chemicaltoplocation{t}
\edef\chemicalbotlocation{b}
-% \unexpanded\def\chemicaltext#1% in ppchtex we had a more clever alignment
-% {\usechemicalstyleandcolor\c!style\c!color
-% \strut
-% #1} % maybe also \setstrut
-
\unexpanded\def\chemicaltext#1%
{\mathematics
{\usechemicalstyleandcolor\c!style\c!color
@@ -132,6 +144,7 @@
\def\chem_start[#1][#2]%
{\ifmmode\vcenter\else\vbox\fi
\bgroup
+ \synchronizestrut{\chemicalparameter\c!strut}%
\dontcomplain
\settrue\indisplaychemical
\forgetall
@@ -147,22 +160,28 @@
\fi\fi
\the\everystructurechemical
\setbox\b_chem_result\hbox\bgroup
- \ctxlua{chemicals.start {
- width = "\chemicalparameter\c!width",
- height = "\chemicalparameter\c!height",
- left = \chemicalparameter\c!left,
- right = \chemicalparameter\c!right,
- top = \chemicalparameter\c!top,
- bottom = \chemicalparameter\c!bottom,
- scale = "\chemicalparameter\c!scale",
- axis = "\chemicalparameter\c!axis",
- offset = "\the\dimexpr.25em\relax",
+ \ctxcommand{startchemical {
+ width = "\chemicalparameter\c!width",
+ height = "\chemicalparameter\c!height",
+ left = "\chemicalparameter\c!left",
+ right = "\chemicalparameter\c!right",
+ top = "\chemicalparameter\c!top",
+ bottom = "\chemicalparameter\c!bottom",
+ scale = "\chemicalparameter\c!scale",
+ rotation = "\chemicalparameter\c!rotation",
+ symalign = "\chemicalparameter\c!symalign",
+ axis = "\chemicalparameter\c!axis",
+ framecolor = "\MPcolor{\chemicalparameter\c!framecolor}",
+ rulethickness = \number\dimexpr\chemicalparameter\c!rulethickness\relax,
+ offset = \number\dimexpr\chemicalparameter\c!offset\relax,
+ unit = \number\dimexpr\chemicalparameter\c!unit\relax,
+ factor = \number\chemicalparameter\c!factor,
} }%
\startnointerference}
\unexpanded\def\stopchemical
{\stopnointerference
- \ctxlua{chemicals.stop()}%
+ \ctxcommand{stopchemical()}%
\egroup
\d_chem_width \wd\b_chem_result
\d_chem_height\ht\b_chem_result
@@ -171,15 +190,18 @@
\doifelsenothing{\chemicalparameter\c!frame}\chem_framed_nop\chem_framed_yes
\egroup}
-\def\chem_framed_yes
- {\localframed%
+\unexpanded\def\chem_framed_yes
+ {\localframedwithsettings
[\??chemicalframed]%
- [\c!frame=\chemicalparameter\c!frame,\c!align=\v!normal,\c!strut=\v!no]{\vbox{\box\b_chem_result\vss}}} % remove depth
+ [\c!frame=\chemicalparameter\c!frame,
+ \c!rulethickness=\chemicalparameter\c!rulethickness,
+ \c!framecolor=\chemicalparameter\c!framecolor]%
+ {\vbox{\box\b_chem_result\vss}}} % remove depth
-\def\chem_framed_nop
- {\localframed%
+\unexpanded\def\chem_framed_nop
+ {\directlocalframed
[\??chemicalframed]%
- [\c!align=\v!normal,\c!strut=\v!no]{\vbox{\box\b_chem_result\vss}}} % remove depth
+ {\vbox{\box\b_chem_result\vss}}} % remove depth
\let\startstructurechemical\startchemical
\let\stopstructurechemical \stopchemical
@@ -200,14 +222,14 @@
\def\strc_chem_indeed_three[#1][#2][#3]%
{\writestatus\m!chemicals{hyperlinked chemicals not yet supported}% todo reference, for the moment ignored
- \ctxlua{chemicals.component(\!!bs#2\!!es, \!!bs\detokenize{#3}\!!es, { % maybe also pass first two args this way
+ \ctxcommand{chemicalcomponent(\!!bs#2\!!es, \!!bs\detokenize{#3}\!!es, { % maybe also pass first two args this way
rulethickness = "\the\dimexpr\chemicalparameter\c!rulethickness\relax", % todo: scaled points
rulecolor = "\MPcolor{\chemicalparameter\c!rulecolor}" % we can precalculate this for speedup
} ) }%
\ignorespaces}
\def\strc_chem_indeed_two[#1][#2]%
- {\ctxlua{chemicals.component(\!!bs#1\!!es,\!!bs\detokenize{#2}\!!es, { % maybe also pass first two args this way
+ {\ctxcommand{chemicalcomponent(\!!bs#1\!!es,\!!bs\detokenize{#2}\!!es, { % maybe also pass first two args this way
rulethickness = "\the\dimexpr\chemicalparameter\c!rulethickness\relax", % todo: scaled points
rulecolor = "\MPcolor{\chemicalparameter\c!rulecolor}" % we can precalculate this for speedup
} ) }%
@@ -242,7 +264,7 @@
\global\setfalse\c_chem_some_text
\to \everystructurechemical
-\def\doaddchemicaltexts
+\def\chem_add_texts
{\setbox2\hbox to \d_chem_width{\strut\hss\hbox{\strut\m_chem_mid_text}\hss}%
\setbox4\hbox to \d_chem_width{\strut\hss\hbox{\strut\m_chem_top_text}\hss}%
\setbox6\hbox to \d_chem_width{\strut\hss\hbox{\strut\m_chem_bot_text}\hss}%
@@ -259,7 +281,7 @@
\appendtoks
\ifconditional\c_chem_some_text
- \doaddchemicaltexts
+ \chem_add_texts
\d_chem_width \wd\b_chem_result
\d_chem_height\ht\b_chem_result
\d_chem_depth \dp\b_chem_result
@@ -289,15 +311,15 @@
\def\chem_arrow_construct#1#2#3%
{\enspace
\mathematics{#1%
- {\strut\hbox \!!spread 2\emwidth{\hss\ctxlua{chemicals.inline(\!!bs#2\!!es)}\hss}}% {\strut\hbox \!!spread 2em{\hss#2\hss}}%
- {\strut\hbox \!!spread 2\emwidth{\hss\ctxlua{chemicals.inline(\!!bs#3\!!es)}\hss}}}% {\strut\hbox \!!spread 2em{\hss#3\hss}}}%
+ {\strut\hbox \s!spread 2\emwidth{\hss\ctxcommand{inlinechemical(\!!bs#3\!!es)}\hss}}% {\strut\hbox \s!spread 2em{\hss#3\hss}}}%
+ {\strut\hbox \s!spread 2\emwidth{\hss\ctxcommand{inlinechemical(\!!bs#2\!!es)}\hss}}}% {\strut\hbox \s!spread 2em{\hss#2\hss}}%
\enspace}
% special macros (probably needs some more work)
\def\chem_top_construct#1#2#3#4%
{\begingroup
- \setbox0\hbox{\tx\setstrut\strut#3}%
+ \setbox0\hbox{\setstrut\strut#3}%
\setbox2\hbox{\setstrut\strut\molecule{#4}}%
\setbox0\hbox{\raise\dimexpr\dp0+\ht2\relax\hbox to \wd2{#1\box0#2}}%
% no: \smashbox0
@@ -306,7 +328,7 @@
\def\chem_bottom_construct#1#2#3#4%
{\begingroup
- \setbox0\hbox{\tx\setstrut\strut#3}%
+ \setbox0\hbox{\setstrut\strut#3}%
\setbox2\hbox{\setstrut\strut#4}%
\setbox0\hbox{\lower\dimexpr\dp2+\ht0\relax\hbox to \wd2{#1\box0#2}}%
% no: \smashbox0
@@ -315,12 +337,12 @@
\unexpanded\def\chemicalleft#1#2%
{\begingroup
- \hbox{\llap{\tx\setstrut\strut#1}\setstrut\strut#2}%
+ \hbox{\llap{\setstrut\strut#1}\setstrut\strut#2}%
\endgroup}%
\unexpanded\def\chemicalright#1#2%
{\begingroup
- \hbox{\setstrut\strut#2\rlap{\tx\setstrut\strut#1}}%
+ \hbox{\setstrut\strut#2\rlap{\setstrut\strut#1}}%
\endgroup}%
\unexpanded\def\chemicaltop {\chem_top_construct \hss \hss }
@@ -343,13 +365,28 @@
% \let\chemicalsmashedleft \chemicalleftcentered
% \let\chemicalsmashedright \chemicalrightcentered
-\unexpanded\def\chemicalalignedtext#1#2#3%
+\unexpanded\def\chemicalalignedtext
+ {\ifmmode
+ \expandafter\chem_aligned_text_math
+ \else
+ \expandafter\chem_aligned_text_text
+ \fi}
+
+\def\chem_aligned_text_text#1#2#3%
{\dontleavehmode
\begingroup
\usechemicalstyleandcolor\c!style\c!color
\hbox to \fontcharwd\font`C{\setstrut\strut#1\molecule{#3}#2}%
\endgroup}
+\def\chem_aligned_text_math#1#2#3%
+ {\dontleavehmode
+ \begingroup
+ \scratchcounter\normalmathstyle
+ \usechemicalstyleandcolor\c!style\c!color
+ \hbox to \fontcharwd\font`C{\setstrut\strut#1\mathematics{\tf\triggermathstyle\scratchcounter\molecule{#3}}#2}%
+ \endgroup}
+
\unexpanded\def\chemicalcentered {\chemicalalignedtext\hss \hss }
\unexpanded\def\chemicalleftcentered {\chemicalalignedtext\relax\hss }
\unexpanded\def\chemicalrightcentered{\chemicalalignedtext\hss \relax}
@@ -373,7 +410,15 @@
\unexpanded\def\chemicaloxidationsix {\chemicaloxidation\relax6}
\unexpanded\def\chemicaloxidationseven{\chemicaloxidation\relax7}
+\unexpanded\def\chemicalbar
+ {\hbox \s!spread .5\emwidth \bgroup
+ \hss
+ \vrule \s!height .9\strutht \s!depth .65\strutdp \s!width .1\exheight
+ \hss
+ \egroup}
+
\appendtoks
+ \let|\chemicalbar % \SR{N|NH}
\let \+\chemicaloxidationplus
\let \-\chemicaloxidationminus
\let \[\chemicalforeveropen
@@ -405,32 +450,53 @@
\let\SR\chemicalsmashedright
\to \everychemical
+% Should these also be defined in lower case, so as to be case independent?
+
\appendtoks
\the\everychemical
\to \everystructurechemical
% inline
+% \unexpanded\def\chemical
+% {\ifinformula
+% \expandafter\displaychemical
+% \else
+% \expandafter\inlinechemical
+% \fi}
+
\unexpanded\def\chemical
{\ifinformula
- \expandafter\displaychemical
+ \expandafter\indisplaychemical
\else
\expandafter\inlinechemical
\fi}
+\unexpanded\def\indisplaychemical
+ {\mathstylecommand\displaychemical\inlinechemical\inlinechemical}
+
+\unexpanded\def\inlinechemical#1%
+ {\dontleavehmode
+ \begingroup
+ \scratchcounter\normalmathstyle
+ \usechemicalstyleandcolor\c!style\c!color
+ \hbox{\mathematics{\tf\triggermathstyle\scratchcounter\ctxcommand{inlinechemical(\!!bs#1\!!es)}}}%
+ \endgroup}
+
\unexpanded\def\displaychemical
{\dotriplegroupempty\chem_display}
-\def\chem_display#1#2#3% todo:
- {\the\everychemical \everychemical\emptytoks
+\def\chem_display#1#2#3%
+ {\the\everychemical
+ \everychemical\emptytoks
\quad
\vcenter\bgroup
\usechemicalstyleandcolor\c!style\c!color
\ifthirdargument
\ifsecondargument
- \halign{&\hss##\hss\cr#2\cr\molecule{#1}\cr#3\cr}%
+ \halign{\aligntab\hss\alignmark\alignmark\hss\cr#2\cr\molecule{#1}\cr#3\cr}%
\else
- \halign{&\hss##\hss\cr\molecule{#1}\cr#2\cr}%
+ \halign{\aligntab\hss\alignmark\alignmark\hss \cr\molecule{#1}\cr#2\cr}%
\fi
\else
\hbox{\molecule{#1}}%
@@ -440,30 +506,30 @@
\unexpanded\def\inlinechemical#1%
{\dontleavehmode
- \hbox{\usechemicalstyleandcolor\c!style\c!color\ctxlua{chemicals.inline(\!!bs#1\!!es)}}}
+ \hbox{\usechemicalstyleandcolor\c!style\c!color\ctxcommand{inlinechemical(\!!bs#1\!!es)}}}
\unexpanded\def\chemicalbondrule
- {\hbox{\vrule\!!height.75ex\!!depth-\dimexpr.75ex-\linewidth\relax\!!width1em\relax}}
+ {\hbox{\vrule\s!height.75\exheight\s!depth-\dimexpr.75\exheight-\linewidth\relax\s!width\emwidth\relax}}
\definechemicalsymbol[i:space] [\enspace\quad\enspace]
\definechemicalsymbol[i:plus] [\enspace\mathematics{+}\enspace]
\definechemicalsymbol[i:minus] [\enspace\mathematics{-}\enspace]
\definechemicalsymbol[i:gives] [\enspace\mathematics{\xrightarrow{}{}}\enspace]
-\definechemicalsymbol[i:equilibrium] [\enspace\mathematics{\xrightpverleftarrow{}{}}\enspace]
+\definechemicalsymbol[i:equilibrium] [\enspace\mathematics{\xrightoverleftarrow{}{}}\enspace]
\definechemicalsymbol[i:mesomeric] [\enspace\mathematics{\xleftrightarrow{}{}}\enspace]
\definechemicalsymbol[i:single] [\chemicalbondrule]
-\definechemicalsymbol[i:tripple] [\hbox{\lower.5ex\chemicalbondrule\hskip-1em\raise.5ex\chemicalbondrule}]
-\definechemicalsymbol[i:double] [\hbox{\chemicalbondrule\hskip-1em\lower.5ex\chemicalbondrule\hskip-1em\raise.5ex\chemicalbondrule}]
+\definechemicalsymbol[i:double] [\hbox{\lower.5ex\chemicalbondrule\hskip-1em\raise.5ex\chemicalbondrule}]
+\definechemicalsymbol[i:triple] [\hbox{\chemicalbondrule\hskip-1em\lower.5ex\chemicalbondrule\hskip-1em\raise.5ex\chemicalbondrule}]
\unexpanded\def\chemicalsinglebond {\chemicalsymbol[i:single]}
-\unexpanded\def\chemicaldoublebond {\chemicalsymbol[i:tripple]}
-\unexpanded\def\chemicaltriplebond {\chemicalsymbol[i:double]}
+\unexpanded\def\chemicaldoublebond {\chemicalsymbol[i:double]}
+\unexpanded\def\chemicaltriplebond {\chemicalsymbol[i:triple]}
\unexpanded\def\chemicalgives {\chemicalsymbol[i:gives]}
\unexpanded\def\chemicalmesomeric {\chemicalsymbol[i:mesomeric]}
\unexpanded\def\chemicalequilibrium{\chemicalsymbol[i:equilibrium]}
-\unexpanded\def\chemicalsplus {\chemicalsymbol[i:plus]}
-\unexpanded\def\chemicalsminus {\chemicalsymbol[i:minus]}
-\unexpanded\def\chemicalsspace {\chemicalsymbol[i:space]}
+\unexpanded\def\chemicalplus {\chemicalsymbol[i:plus]}
+\unexpanded\def\chemicalminus {\chemicalsymbol[i:minus]}
+\unexpanded\def\chemicalspace {\chemicalsymbol[i:space]}
\unexpanded\def\chemicalinline #1{#1}
% display
@@ -491,80 +557,162 @@
\setfalse\c_chem_has_bot}
\unexpanded\def\stopchemicalformula
- {\tabskip1em\relax
+ {\tabskip\emwidth\relax
\nointerlineskip
\ifconditional\c_chem_has_top
\ifconditional\c_chem_has_bot
- \halign{&\hss\usechemicalstyleandcolor\c!style\c!color##\hss\cr\the\t_chem_top\cr\the\t_chem_mid\cr\the\t_chem_bot\cr}%
+ \halign{\aligntab\hss\usechemicalstyleandcolor\c!style\c!color\alignmark\alignmark\hss\cr\the\t_chem_top\cr\the\t_chem_mid\cr\the\t_chem_bot\cr}%
\else
- \halign{&\hss\usechemicalstyleandcolor\c!style\c!color##\hss\cr\the\t_chem_top\cr\the\t_chem_mid\cr}%
+ \halign{\aligntab\hss\usechemicalstyleandcolor\c!style\c!color\alignmark\alignmark\hss\cr\the\t_chem_top\cr\the\t_chem_mid\cr}%
\fi
\else
\ifconditional\c_chem_has_bot
- \halign{&\hss\usechemicalstyleandcolor\c!style\c!color##\hss\cr\the\t_chem_mid\cr\the\t_chem_bot\cr}%
+ \halign{\aligntab\hss\usechemicalstyleandcolor\c!style\c!color\alignmark\alignmark\hss\cr\the\t_chem_mid\cr\the\t_chem_bot\cr}%
\else
- \halign{&\hss\usechemicalstyleandcolor\c!style\c!color##\hss\cr\the\t_chem_mid\cr}%
+ \halign{\aligntab\hss\usechemicalstyleandcolor\c!style\c!color\alignmark\alignmark\hss\cr\the\t_chem_mid\cr}%
\fi
\fi
\egroup}
+% for the moment we have a special set
+
+\definechemicalsymbol[d:space] [\enspace\quad\enspace]
+\definechemicalsymbol[d:plus] [\enspace+\enspace]
+\definechemicalsymbol[d:minus] [\enspace-\enspace]
+\definechemicalsymbol[d:gives] [\rightarrowfill] % \chem_arrow_construct\xrightarrow
+\definechemicalsymbol[d:equilibrium] [\rightoverleftarrowfill] % \chem_arrow_construct\xrightoverleftarrow
+\definechemicalsymbol[d:mesomeric] [\leftarrowfill] % \chem_arrow_construct\xleftrightarrow
+\definechemicalsymbol[d:opencomplex] [\mathematics{\Bigg[}] % not yet ok
+\definechemicalsymbol[d:closecomplex][\mathematics{\Bigg]}] % not yet ok
+
+\definechemicalsymbol[d:SPACE] [{\chemicalsymbol[d:space]}]
+\definechemicalsymbol[d:PLUS] [{\chemicalsymbol[d:plus]}]
+\definechemicalsymbol[d:MINUS] [{\chemicalsymbol[d:minus]}]
+\definechemicalsymbol[d:GIVES] [{\chemicalsymbol[d:gives]}]
+\definechemicalsymbol[d:EQUILIBRIUM] [{\chemicalsymbol[d:equilibrium]}]
+\definechemicalsymbol[d:MESOMERIC] [{\chemicalsymbol[d:mesomeric]}]
+\definechemicalsymbol[d:OPENCOMPLEX] [{\chemicalsymbol[d:opencomplex]}]
+\definechemicalsymbol[d:CLOSECOMPLEX][{\chemicalsymbol[d:closecomplex]}]
+
\unexpanded\def\formulachemical
{\relax\dotriplegroupempty\chem_formula}
\def\chem_formula#1#2#3% we could do hboxes and measure
- {\ifcsname\??chemicalsymbol\detokenize{#1}\endcsname
- \t_chem_mid\expandafter{\the\t_chem_mid\chem_formula_mid{#1}{#2}{#3}}%
+ {\ifthirdargument
+ \doifelsenothing{#2}\chem_formula_top_nop{\chem_formula_top_yes{#2}}%
+ \doifelsenothing{#3}\chem_formula_bot_nop{\chem_formula_bot_yes{#3}}%
+ \else\ifsecondargument
+ \chem_formula_top_nop
+ \doifelsenothing{#2}\chem_formula_bot_nop{\chem_formula_bot_yes{#2}}%
\else
- \ifthirdargument
- \doifelsenothing{#2}\chem_formula_top_nop{\chem_formula_top_yes{#2}}%
- \doifelsenothing{#3}\chem_formula_bot_nop{\chem_formula_bot_yes{#3}}%
- \else\ifsecondargument
- \chem_formula_top_nop
- \doifelsenothing{#2}\chem_formula_bot_nop{\chem_formula_bot_yes{#2}}%
- \else
- \chem_formula_top_nop
- \chem_formula_bot_nop
- \fi\fi
- \t_chem_mid\expandafter{\the\t_chem_mid\molecule{#1}&}%
+ \chem_formula_top_nop
+ \chem_formula_bot_nop
+ \fi\fi
+ \ifcsname\??chemicalsymbol d:\detokenize{#1}\endcsname
+ \t_chem_mid\expandafter{\the\t_chem_mid\chemicalsymbol[d:#1]\aligntab}%
+ \else
+ \t_chem_mid\expandafter{\the\t_chem_mid\molecule{#1}\aligntab}%
\fi}
\def\chem_formula_mid#1%
{\csname\??chemicalsymbol\detokenize{#1}\endcsname}
-\def\chem_formula_top_nop {\t_chem_top\expandafter{\the\t_chem_top&}}
-\def\chem_formula_bot_nop {\t_chem_bot\expandafter{\the\t_chem_bot&}}
-\def\chem_formula_top_yes#1{\t_chem_top\expandafter{\the\t_chem_top\chem_formula_top_indeed{#1}&}\settrue\c_chem_has_top}
-\def\chem_formula_bot_yes#1{\t_chem_bot\expandafter{\the\t_chem_bot\chem_formula_bot_indeed{#1}&}\settrue\c_chem_has_bot}
+\def\chem_formula_top_nop {\t_chem_top\expandafter{\the\t_chem_top\aligntab}}
+\def\chem_formula_bot_nop {\t_chem_bot\expandafter{\the\t_chem_bot\aligntab}}
+\def\chem_formula_top_yes#1{\t_chem_top\expandafter{\the\t_chem_top\chem_formula_top_indeed{#1}\aligntab}\settrue\c_chem_has_top}
+\def\chem_formula_bot_yes#1{\t_chem_bot\expandafter{\the\t_chem_bot\chem_formula_bot_indeed{#1}\aligntab}\settrue\c_chem_has_bot}
\def\chem_formula_top_indeed#1{\strut#1}
\def\chem_formula_bot_indeed#1{\strut#1}
+% Experimental: defaults might change.
+
+\definefloat
+ [\v!chemical]
+ [\v!chemicals]
+
+\setuplabeltext
+ [\v!chemical=]
+
+\setupfloat
+ [\v!chemical]
+ [\c!location=\v!here,
+ \c!inner=\hsize.8\textwidth\dontleavehmode, % brr
+ \c!align={\v!flushleft,\v!lohi}]
+
+\setupcaption
+ [\v!chemical]
+ [\c!location=\v!right,
+ \c!distance=\zeropoint,
+ \c!width=.2\textwidth,
+ \c!align=\v!flushright]
+
+% Can be used as for displayed math: \startplaceformula... to display a chemical formula
+% or a chemical structure:
+%
+% \startplacechemical
+% \startchemicalformula
+% \chemical{2H_2}
+% \chemical{PLUS}
+% \chemical{O_2}
+% \chemical{GIVES}
+% \chemical{2H_2O}
+% \stopchemicalformula
+% \stopplacechemical
+
% gone: state option resolution offset (now frame offset) alternative
\setupchemicalframed
[\c!align=\v!normal,
\c!strut=\v!no,
\c!offset=\v!overlay,
- \c!frame=off]
+ \c!frame=\v!off]
+
+\definecolor % private color
+ [chemicalframecolor]
+ [r=.75,g=.85,b=.95]
\setupchemical
[\c!frame=,
- \c!width=0,
- \c!height=0,
- \c!left=0,
- \c!right=0,
- \c!top=0,
- \c!bottom=0,
+ \c!width=\v!fit, % or unitless number, multiplies scale*EmWidth
+ \c!height=\v!fit, % or unitless number, multiplies scale*EmWidth
+ \c!left=\v!fit, % or unitless number, multiplies scale*EmWidth
+ \c!right=\v!fit, % or unitless number, multiplies scale*EmWidth
+ \c!top=\v!fit, % or unitless number, multiplies scale*EmWidth
+ \c!bottom=\v!fit, % or unitless number, multiplies scale*EmWidth
\c!bodyfont=,
- \c!scale=\v!medium,
+ \c!scale=\v!normal, % small, normal or medium, big, or unitless number (multiplies EmWidth)
\c!size=\v!medium,
- \c!textsize=\v!big,
+ \c!textsize=\v!big, % how is textsize used??
\c!axis=\v!off,
\c!style=\rm,
- \c!location=,
+ \c!rotation=0, % unitless number (interpreted as degrees)
+ \c!symalign=\v!auto,
+ \c!location=, % not yet used (was interaction related in mkii)
+ \c!offset=.25em,
+ \c!unit=\emwidth,
+ \c!factor=3,
\c!color=,
- \c!rulethickness=\linewidth,
- \c!rulecolor=,
- \c!factor=1]
+ \c!strut=\v!yes,
+ \c!framecolor=chemicalframecolor,
+ \c!rulethickness=0.6pt, %1.5\linewidth,
+ \c!rulecolor=]
+
+%D Compatibility:
+
+\definechemical[+R] {\chemical[RR]}
+\definechemical[-R] {\chemical[LR]}
+
+\definechemical[CARBON:CB] {\chemical[NEWMANSTAGGER,C,SB]}
+\definechemical[NEWMANSTAGGER:CB] {\chemical[NEWMANSTAGGER,C,SB]}
+\definechemical[NEWMANECLIPSED:CB]{\chemical[NEWMANECLIPSED,C,SB]}
+\definechemical[CARBON:CB1] {\chemical[CARBON,C,SB,Z234,1.5MOV1,MIR0,C,SB,Z234]}
+
+\definechemical[NEWMAN] {\chemical[]}
+\definechemical[STAGGER] {\chemical[NEWMANSTAGGER]}
+\definechemical[ECLIPSE] {\chemical[NEWMANECLIPSED]}
+\definechemical[ECLIPSED] {\chemical[NEWMANECLIPSED]}
+\definechemical[SIX:FRONT] {\chemical[SIXFRONT]}
+\definechemical[FIVE:FRONT] {\chemical[FIVEFRONT]}
\protect \endinput
diff --git a/Master/texmf-dist/tex/context/base/cldf-bas.lua b/Master/texmf-dist/tex/context/base/cldf-bas.lua
index 30a9265bce4..298b374f045 100644
--- a/Master/texmf-dist/tex/context/base/cldf-bas.lua
+++ b/Master/texmf-dist/tex/context/base/cldf-bas.lua
@@ -1,4 +1,4 @@
-if not modules then modules = { } end modules ['cldf-ini'] = {
+if not modules then modules = { } end modules ['cldf-bas'] = {
version = 1.001,
comment = "companion to cldf-ini.mkiv",
author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
@@ -22,6 +22,8 @@ if not modules then modules = { } end modules ['cldf-ini'] = {
-- flush(ctxcatcodes,"}")
-- end
+-- maybe use context.generics
+
local type = type
local format = string.format
local utfchar = utf.char
@@ -32,13 +34,16 @@ local generics = context.generics
local variables = interfaces.variables
local new_rule = nodes.pool.rule
+local texcount = tex.count
function context.char(k) -- used as escape too, so don't change to utf
if type(k) == "table" then
- -- for i=1,#k do
- -- context(format([[\char%s\relax]],k[i]))
- -- end
- context([[\char%s\relax]],concat(k,[[\relax\char]]))
+ local n = #k
+ if n == 1 then
+ context([[\char%s\relax]],k[1])
+ elseif n > 0 then
+ context([[\char%s\relax]],concat(k,[[\relax\char]]))
+ end
elseif k then
context([[\char%s\relax]],k)
end
@@ -66,6 +71,10 @@ function context.egroup()
context("}")
end
+function context.space()
+ context(" ")
+end
+
function context.hrule(w,h,d,dir)
if type(w) == "table" then
context(new_rule(w.width,w.height,w.depth,w.dir))
@@ -98,22 +107,22 @@ context.vrule = context.hrule
-- not yet used ... but will get variant at the tex end as well
-function context.sethboxregister (n) context("\\setbox %s\\hbox",n) end
-function context.setvboxregister (n) context("\\setbox %s\\vbox",n) end
+function context.sethboxregister(n) context([[\setbox %s\hbox]],n) end
+function context.setvboxregister(n) context([[\setbox %s\vbox]],n) end
function context.starthboxregister(n)
if type(n) == "number" then
- context("\\setbox%s\\hbox\\bgroup",n)
+ context([[\setbox%s\hbox{]],n)
else
- context("\\setbox\\%s\\hbox\\bgroup",n)
+ context([[\setbox\%s\hbox{]],n)
end
end
function context.startvboxregister(n)
if type(n) == "number" then
- context("\\setbox%s\\vbox\\bgroup",n)
+ context([[\setbox%s\vbox{]],n)
else
- context("\\setbox\\%s\\vbox\\bgroup",n)
+ context([[\setbox\%s\vbox{]],n)
end
end
@@ -122,19 +131,36 @@ context.stopvboxregister = context.egroup
function context.flushboxregister(n)
if type(n) == "number" then
- context("\\box%s ",n)
+ context([[\box%s ]],n)
else
- context("\\box\\%s",n)
+ context([[\box\%s]],n)
end
end
function context.beginvbox()
- context("\\vbox\\bgroup") -- we can do \bvbox ... \evbox (less tokens)
+ context([[\vbox{]]) -- we can do \bvbox ... \evbox (less tokens)
end
function context.beginhbox()
- context("\\hbox\\bgroup") -- todo: use fast one
+ context([[\hbox{]]) -- todo: use fast one
end
context.endvbox = context.egroup
context.endhbox = context.egroup
+
+local function allocate(name,what,cmd)
+ local a = format("c_syst_last_allocated_%s",what)
+ local n = texcount[a] + 1
+ if n <= texcount.c_syst_max_allocated_register then
+ texcount[a] = n
+ end
+ context("\\global\\expandafter\\%sdef\\csname %s\\endcsname %s\\relax",cmd or what,name,n)
+ return n
+end
+
+function context.newdimen (name) return allocate(name,"dimen") end
+function context.newskip (name) return allocate(name,"skip") end
+function context.newcount (name) return allocate(name,"count") end
+function context.newmuskip(name) return allocate(name,"muskip") end
+function context.newtoks (name) return allocate(name,"toks") end
+function context.newbox (name) return allocate(name,"box","mathchar") end
diff --git a/Master/texmf-dist/tex/context/base/cldf-bas.mkiv b/Master/texmf-dist/tex/context/base/cldf-bas.mkiv
index f2bd05177a1..f8b5b5d6a3c 100644
--- a/Master/texmf-dist/tex/context/base/cldf-bas.mkiv
+++ b/Master/texmf-dist/tex/context/base/cldf-bas.mkiv
@@ -14,5 +14,6 @@
\writestatus{loading}{ConTeXt Lua Documents / Basics}
\registerctxluafile{cldf-bas}{1.001}
+\registerctxluafile{cldf-prs}{1.001}
\endinput
diff --git a/Master/texmf-dist/tex/context/base/cldf-com.lua b/Master/texmf-dist/tex/context/base/cldf-com.lua
index bacbbeafd6a..fa0dbed3e45 100644
--- a/Master/texmf-dist/tex/context/base/cldf-com.lua
+++ b/Master/texmf-dist/tex/context/base/cldf-com.lua
@@ -11,15 +11,14 @@ local context = context
local generics = context.generics -- needs documentation
local variables = interfaces.variables
-generics.starttabulate = "start" .. variables.tabulate -- todo: e!start
-generics.stoptabulate = "stop" .. variables.tabulate -- todo: e!stop
+generics.starttabulate = "starttabulate" -- "start" .. variables.tabulate -- todo: e!start
+generics.stoptabulate = "stoptabulate" -- "stop" .. variables.tabulate -- todo: e!stop
local NC, NR = context.NC, context.NR
local function tabulaterow(how,...)
- local t = { ... }
- for i=1,#t do
- local ti = tostring(t[i])
+ for i=1,select("#",...) do
+ local ti = tostring(select(i,...))
NC()
if how then
context[how](ti)
diff --git a/Master/texmf-dist/tex/context/base/cldf-ini.lua b/Master/texmf-dist/tex/context/base/cldf-ini.lua
index ed86c29234f..4a7d9f025e4 100644
--- a/Master/texmf-dist/tex/context/base/cldf-ini.lua
+++ b/Master/texmf-dist/tex/context/base/cldf-ini.lua
@@ -20,15 +20,19 @@ if not modules then modules = { } end modules ['cldf-ini'] = {
--
-- tex.print == line with endlinechar appended
+-- todo: context("%bold{total: }%s",total)
+-- todo: context.documentvariable("title")
+
local tex = tex
context = context or { }
local context = context
-local format, find, gmatch, gsub = string.format, string.find, string.gmatch, string.gsub
-local next, type, tostring, tonumber, setmetatable = next, type, tostring, tonumber, setmetatable
+local format, gsub, validstring = string.format, string.gsub, string.valid
+local next, type, tostring, tonumber, setmetatable, unpack, select = next, type, tostring, tonumber, setmetatable, unpack, select
local insert, remove, concat = table.insert, table.remove, table.concat
-local lpegmatch, lpegC, lpegS, lpegP, lpegCc = lpeg.match, lpeg.C, lpeg.S, lpeg.P, lpeg.Cc
+local lpegmatch, lpegC, lpegS, lpegP, lpegCc, patterns = lpeg.match, lpeg.C, lpeg.S, lpeg.P, lpeg.Cc, lpeg.patterns
+local formatters = string.formatters -- using formatteds is slower in this case
local texsprint = tex.sprint
local textprint = tex.tprint
@@ -40,12 +44,14 @@ local isnode = node.is_node -- after 0.65 just node.type
local writenode = node.write
local copynodelist = node.copy_list
-local ctxcatcodes = tex.ctxcatcodes
-local prtcatcodes = tex.prtcatcodes
-local texcatcodes = tex.texcatcodes
-local txtcatcodes = tex.txtcatcodes
-local vrbcatcodes = tex.vrbcatcodes
-local xmlcatcodes = tex.xmlcatcodes
+local catcodenumbers = catcodes.numbers
+
+local ctxcatcodes = catcodenumbers.ctxcatcodes
+local prtcatcodes = catcodenumbers.prtcatcodes
+local texcatcodes = catcodenumbers.texcatcodes
+local txtcatcodes = catcodenumbers.txtcatcodes
+local vrbcatcodes = catcodenumbers.vrbcatcodes
+local xmlcatcodes = catcodenumbers.xmlcatcodes
local flush = texsprint
local flushdirect = texprint
@@ -76,7 +82,7 @@ end
local function _flush_f_(n)
local sn = _stack_f_[n]
if not sn then
- report_cld("data with id %s cannot be found on stack",n)
+ report_cld("data with id %a cannot be found on stack",n)
else
local tn = type(sn)
if tn == "function" then
@@ -100,7 +106,7 @@ end
local function _flush_n_(n)
local sn = _stack_n_[n]
if not sn then
- report_cld("data with id %s cannot be found on stack",n)
+ report_cld("data with id %a cannot be found on stack",n)
elseif texcount["@@trialtypesetting"] == 0 then -- @@trialtypesetting is private!
writenode(sn)
_stack_n_[n] = nil
@@ -154,14 +160,8 @@ context.popcatcodes = popcatcodes
-- -- --
---~ local capture = (
---~ space^0 * newline^2 * lpeg.Cc("") / texprint +
---~ space^0 * newline * space^0 * lpeg.Cc(" ") / texsprint +
---~ content / texsprint
---~ )^0
-
-local newline = lpeg.patterns.newline
-local space = lpeg.patterns.spacer
+local newline = patterns.newline
+local space = patterns.spacer
local spacing = newline * space^0
local content = lpegC((1-spacing)^1) -- texsprint
local emptyline = space^0 * newline^2 -- texprint("")
@@ -344,16 +344,18 @@ end
local methodhandler = resolvers.methodhandler
-function context.viafile(data)
+function context.viafile(data,tag)
if data and data ~= "" then
- local filename = resolvers.savers.byscheme("virtual","viafile",data)
+ local filename = resolvers.savers.byscheme("virtual",validstring(tag,"viafile"),data)
-- context.startregime { "utf" }
context.input(filename)
-- context.stopregime()
end
end
--- -- --
+-- -- -- "{" .. ti .. "}" is somewhat slower in a cld-mkiv run than "{",ti,"}"
+
+local containseol = patterns.containseol
local function writer(parent,command,first,...) -- already optimized before call
local t = { first, ... }
@@ -366,7 +368,7 @@ local function writer(parent,command,first,...) -- already optimized before call
if typ == "string" or typ == "number" then
flush(currentcatcodes,ti)
else -- node.write
- report_context("error: invalid use of direct in '%s', only strings and numbers can be flushed directly, not '%s'",command,typ)
+ report_context("error: invalid use of direct in %a, only strings and numbers can be flushed directly, not %a",command,typ)
end
direct = false
elseif ti == nil then
@@ -375,7 +377,7 @@ local function writer(parent,command,first,...) -- already optimized before call
flush(currentcatcodes,"{}")
elseif typ == "string" then
-- is processelines seen ?
- if processlines and find(ti,"[\n\r]") then -- we can check for ti == "\n"
+ if processlines and lpegmatch(containseol,ti) then
flush(currentcatcodes,"{")
local flushlines = parent.__flushlines or flushlines
flushlines(ti)
@@ -410,7 +412,11 @@ local function writer(parent,command,first,...) -- already optimized before call
done = true
end
end
- flush(currentcatcodes,"]")
+ if done then
+ flush(currentcatcodes,"]")
+ else
+ flush(currentcatcodes,"[]")
+ end
elseif tn == 1 then -- some 20% faster than the next loop
local tj = ti[1]
if type(tj) == "function" then
@@ -440,7 +446,7 @@ local function writer(parent,command,first,...) -- already optimized before call
elseif isnode(ti) then -- slow
flush(currentcatcodes,"{\\cldn{",_store_n_(ti),"}}")
else
- report_context("error: '%s' gets a weird argument '%s'",command,tostring(ti))
+ report_context("error: %a gets a weird argument %a",command,ti)
end
end
end
@@ -448,16 +454,20 @@ end
local generics = { } context.generics = generics
local function indexer(parent,k)
- local c = "\\" .. tostring(generics[k] or k)
- local f = function(first,...)
- if first == nil then
- flush(currentcatcodes,c)
- else
- return writer(parent,c,first,...)
+ if type(k) == "string" then
+ local c = "\\" .. tostring(generics[k] or k)
+ local f = function(first,...)
+ if first == nil then
+ flush(currentcatcodes,c)
+ else
+ return writer(parent,c,first,...)
+ end
end
+ parent[k] = f
+ return f
+ else
+ return context -- catch
end
- parent[k] = f
- return f
end
-- Potential optimization: after the first call we know if there will be an
@@ -518,8 +528,8 @@ local function caller(parent,f,a,...)
local typ = type(f)
if typ == "string" then
if a then
- flush(contentcatcodes,format(f,a,...)) -- was currentcatcodes
- elseif processlines and find(f,"[\n\r]") then
+ flush(contentcatcodes,formatters[f](a,...)) -- was currentcatcodes
+ elseif processlines and lpegmatch(containseol,f) then
local flushlines = parent.__flushlines or flushlines
flushlines(f)
else
@@ -538,10 +548,9 @@ local function caller(parent,f,a,...)
if f then
if a ~= nil then
local flushlines = parent.__flushlines or flushlines
- flushlines(f)
- -- ignore ... maybe some day
+ flushlines(a)
else
- flushdirect(currentcatcodes,"\r")
+ flushdirect(currentcatcodes,"\n") -- no \r, else issues with \startlines ... use context.par() otherwise
end
else
if a ~= nil then
@@ -557,7 +566,7 @@ local function caller(parent,f,a,...)
-- writenode(f)
flush(currentcatcodes,"\\cldn{",_store_n_(f),"}")
else
- report_context("error: 'context' gets a weird argument '%s'",tostring(f))
+ report_context("error: %a gets a weird argument %a","context",f)
end
end
end
@@ -592,13 +601,13 @@ end
function context.fprint(catcodes,fmt,first,...)
if type(catcodes) == "number" then
if first then
- flush(catcodes,format(fmt,first,...))
+ flush(catcodes,formatters[fmt](first,...))
else
flush(catcodes,fmt)
end
else
if fmt then
- flush(format(catodes,fmt,first,...))
+ flush(formatters[catcodes](fmt,first,...))
else
flush(catcodes)
end
@@ -607,7 +616,7 @@ end
function tex.fprint(fmt,first,...) -- goodie
if first then
- flush(currentcatcodes,format(fmt,first,...))
+ flush(currentcatcodes,formatters[fmt](first,...))
else
flush(currentcatcodes,fmt)
end
@@ -625,6 +634,11 @@ local currenttrace = nil
local nofwriters = 0
local nofflushes = 0
+local visualizer = lpeg.replacer {
+ { "\n","<<newline>>" },
+ { "\r","<<par>>" },
+}
+
statistics.register("traced context", function()
if nofwriters > 0 or nofflushes > 0 then
return format("writers: %s, flushes: %s, maxstack: %s",nofwriters,nofflushes,_n_f_)
@@ -638,7 +652,7 @@ local tracedwriter = function(parent,...) -- also catcodes ?
local t, n = { "w : - : " }, 1
local traced = function(normal,catcodes,...) -- todo: check for catcodes
local s = concat({...})
- s = gsub(s,"\r","<<newline>>") -- unlikely
+ s = lpegmatch(visualizer,s)
n = n + 1
t[n] = s
normal(catcodes,...)
@@ -660,17 +674,17 @@ local traced = function(normal,one,two,...)
normal(one,two,...)
local catcodes = type(one) == "number" and one
local arguments = catcodes and { two, ... } or { one, two, ... }
- local collapsed, c = { format("f : %s : ", catcodes or '-') }, 1
+ local collapsed, c = { formatters["f : %s : "](catcodes or '-') }, 1
for i=1,#arguments do
local argument = arguments[i]
local argtype = type(argument)
c = c + 1
if argtype == "string" then
- collapsed[c] = gsub(argument,"\r","<<newline>>")
+ collapsed[c] = lpegmatch(visualizer,argument)
elseif argtype == "number" then
collapsed[c] = argument
else
- collapsed[c] = format("<<%s>>",tostring(argument))
+ collapsed[c] = formatters["<<%S>>"](argument)
end
end
currenttrace(concat(collapsed))
@@ -679,11 +693,11 @@ local traced = function(normal,one,two,...)
normal(one)
local argtype = type(one)
if argtype == "string" then
- currenttrace(format("f : - : %s",gsub(one,"\r","<<newline>>")))
+ currenttrace(formatters["f : - : %s"](lpegmatch(visualizer,one)))
elseif argtype == "number" then
- currenttrace(format("f : - : %s",one))
+ currenttrace(formatters["f : - : %s"](one))
else
- currenttrace(format("f : - : <<%s>>",tostring(one)))
+ currenttrace(formatters["f : - : <<%S>>"](one))
end
end
end
@@ -769,19 +783,19 @@ function context.runfile(filename)
local ok = dofile(foundname)
if type(ok) == "function" then
if trace_cld then
- report_context("begin of file '%s' (function call)",foundname)
+ report_context("begin of file %a (function call)",foundname)
end
ok()
if trace_cld then
- report_context("end of file '%s' (function call)",foundname)
+ report_context("end of file %a (function call)",foundname)
end
elseif ok then
- report_context("file '%s' is processed and returns true",foundname)
+ report_context("file %a is processed and returns true",foundname)
else
- report_context("file '%s' is processed and returns nothing",foundname)
+ report_context("file %a is processed and returns nothing",foundname)
end
else
- report_context("unknown file '%s'",filename)
+ report_context("unknown file %a",filename)
end
end
@@ -815,6 +829,44 @@ local function caller(parent,...) -- todo: nodes
end
end
+-- local function indexer(parent,k)
+-- local f = function(a,...)
+-- if not a then
+-- return function()
+-- return context[k]()
+-- end
+-- elseif select("#",...) == 0 then
+-- return function()
+-- return context[k](a)
+-- end
+-- elseif a then
+-- local t = { ... }
+-- return function()
+-- return context[k](a,unpack(t))
+-- end
+-- end
+-- end
+-- parent[k] = f
+-- return f
+-- end
+--
+-- local function caller(parent,a,...) -- todo: nodes
+-- if not a then
+-- return function()
+-- return context()
+-- end
+-- elseif select("#",...) == 0 then
+-- return function()
+-- return context(a)
+-- end
+-- elseif a then
+-- local t = { ... }
+-- return function()
+-- return context(a,unpack(t))
+-- end
+-- end
+-- end
+
setmetatable(delayed, { __index = indexer, __call = caller } )
-- context.nested (todo: lines)
@@ -874,6 +926,60 @@ end
setmetatable(verbatim, { __index = indexer, __call = caller } )
+-- formatted
+
+local formatted = { } context.formatted = formatted
+
+-- local function indexer(parent,k)
+-- local command = context[k]
+-- local f = function(fmt,...)
+-- command(formatters[fmt](...))
+-- end
+-- parent[k] = f
+-- return f
+-- end
+
+local function indexer(parent,k)
+ if type(k) == "string" then
+ local c = "\\" .. tostring(generics[k] or k)
+ local f = function(first,second,...)
+ if first == nil then
+ flush(currentcatcodes,c)
+ elseif second then
+ return writer(parent,c,formatters[first](second,...))
+ else
+ return writer(parent,c,first)
+ end
+ end
+ parent[k] = f
+ return f
+ else
+ return context -- catch
+ end
+end
+
+-- local function caller(parent,...)
+-- context.fprint(...)
+-- end
+
+local function caller(parent,catcodes,fmt,first,...)
+ if type(catcodes) == "number" then
+ if first then
+ flush(catcodes,formatters[fmt](first,...))
+ else
+ flush(catcodes,fmt)
+ end
+ else
+ if fmt then
+ flush(formatters[catcodes](fmt,first,...))
+ else
+ flush(catcodes)
+ end
+ end
+end
+
+setmetatable(formatted, { __index = indexer, __call = caller } )
+
-- metafun (this will move to another file)
local metafun = { } context.metafun = metafun
@@ -887,7 +993,7 @@ local function caller(parent,f,a,...)
local typ = type(f)
if typ == "string" then
if a then
- flush(currentcatcodes,mpdrawing,"{",format(f,a,...),"}")
+ flush(currentcatcodes,mpdrawing,"{",formatters[f](a,...),"}")
else
flush(currentcatcodes,mpdrawing,"{",f,"}")
end
@@ -905,10 +1011,10 @@ local function caller(parent,f,a,...)
if f then
flush(currentcatcodes,mpdrawing,"{^^M}")
else
- report_context("warning: 'metafun' gets argument 'false' which is currently unsupported")
+ report_context("warning: %a gets argument 'false' which is currently unsupported","metafun")
end
else
- report_context("error: 'metafun' gets a weird argument '%s'",tostring(f))
+ report_context("error: %a gets a weird argument %a","metafun",tostring(f))
end
end
end
@@ -925,7 +1031,7 @@ function metafun.stop()
end
function metafun.color(name)
- return format([[\MPcolor{%s}]],name)
+ return formatters[ [[\MPcolor{%s}]] ](name)
end
-- metafun.delayed
@@ -955,18 +1061,6 @@ setmetatable(delayed, { __index = indexer, __call = caller } )
-- helpers:
--- we could have faster calls here
-
-function context.concat(t,separator)
- local done = false
- for i=1,#t do
- local ti = t[i]
- if ti ~= "" then
- if done then
- context(separator)
- end
- context(ti)
- done = true
- end
- end
+function context.concat(...)
+ context(concat(...))
end
diff --git a/Master/texmf-dist/tex/context/base/cldf-ini.mkiv b/Master/texmf-dist/tex/context/base/cldf-ini.mkiv
index 4ad31d81ebe..77948e0583f 100644
--- a/Master/texmf-dist/tex/context/base/cldf-ini.mkiv
+++ b/Master/texmf-dist/tex/context/base/cldf-ini.mkiv
@@ -31,12 +31,14 @@
%D
%D Anyway \unknown\ the following are {\em not} user commands:
-\def\cldf#1{\directlua\zerocount{_cldf_(#1)}} % global (functions)
-\def\cldn#1{\directlua\zerocount{_cldn_(#1)}} % global (nodes)
+% \zerocount removes as it's the default
-\normalprotected\def\cldprocessfile#1{\directlua\zerocount{context.runfile("#1")}}
- \def\cldcontext #1{\directlua\zerocount{context(#1)}}
- \def\cldcommand #1{\directlua\zerocount{context.#1}}
- % \def\cldverbatim #1{\directlua\zerocount{context.verbatim.#1}} % maybe make verbatim global
+\def\cldf#1{\directlua{_cldf_(#1)}} % global (functions)
+\def\cldn#1{\directlua{_cldn_(#1)}} % global (nodes)
+
+\normalprotected\def\cldprocessfile#1{\directlua{context.runfile("#1")}}
+ \def\cldcontext #1{\directlua{context(#1)}}
+ \def\cldcommand #1{\directlua{context.#1}}
+ % \def\cldverbatim #1{\directlua{context.verbatim.#1}} % maybe make verbatim global
\endinput
diff --git a/Master/texmf-dist/tex/context/base/cldf-int.lua b/Master/texmf-dist/tex/context/base/cldf-int.lua
index 55db9fa0b9d..6cbfd666ffb 100644
--- a/Master/texmf-dist/tex/context/base/cldf-int.lua
+++ b/Master/texmf-dist/tex/context/base/cldf-int.lua
@@ -1,4 +1,4 @@
-if not modules then modules = { } end modules ['mult-clm'] = {
+if not modules then modules = { } end modules ['cldf-int'] = {
version = 1.001,
comment = "companion to mult-clm.mkiv",
author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
@@ -14,9 +14,12 @@ if not modules then modules = { } end modules ['mult-clm'] = {
local format, insert, remove, concat = string.format, table.insert, table.remove, table.concat
local unpack = unpack or table.unpack
-local contextsprint = context.sprint
-local ctxcatcodes = tex.ctxcatcodes
-local vrbcatcodes = tex.vrbcatcodes
+local catcodenumbers = catcodes.numbers
+
+local ctxcatcodes = catcodenumbers.ctxcatcodes
+local vrbcatcodes = catcodenumbers.vrbcatcodes
+
+local contextsprint = context.sprint
local trace_define = false trackers.register("context.define", function(v) trace_define = v end)
diff --git a/Master/texmf-dist/tex/context/base/cldf-prs.lua b/Master/texmf-dist/tex/context/base/cldf-prs.lua
new file mode 100644
index 00000000000..9fbdba0c88a
--- /dev/null
+++ b/Master/texmf-dist/tex/context/base/cldf-prs.lua
@@ -0,0 +1,54 @@
+if not modules then modules = { } end modules ['cldf-bas'] = {
+ version = 1.001,
+ comment = "companion to cldf-ini.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+local lpegmatch, patterns = lpeg.match, lpeg.patterns
+local P, R, V, Cc, Cs = lpeg.P, lpeg.R, lpeg.V, lpeg.Cc, lpeg.Cs
+local format = string.format
+
+local cpatterns = patterns.context or { }
+patterns.context = cpatterns
+
+local backslash = P("\\")
+local csname = backslash * P(1) * (1-backslash)^0
+local sign = P("+") / "\\textplus "
+ + P("-") / "\\textminus "
+local leftbrace = P("{")
+local rightbrace = P("}")
+local nested = P { leftbrace * (V(1) + (1-rightbrace))^0 * rightbrace }
+local subscript = P("_")
+local superscript = P("^")
+local utf8char = patterns.utf8char
+local cardinal = patterns.cardinal
+
+-- local scripts = P { "start",
+-- start = V("csname") + V("lowfirst") + V("highfirst"),
+-- csname = csname,
+-- content = Cs(V("csname") + nested + sign^-1 * (cardinal + utf8char)),
+-- lowfirst = subscript * ( Cc("\\lohi{%s}{%s}") * V("content") * superscript + Cc("\\low{%s}" ) ) * V("content") / format,
+-- highfirst = superscript * ( Cc("\\hilo{%s}{%s}") * V("content") * subscript + Cc("\\high{%s}") ) * V("content") / format,
+-- }
+
+local scripts = P { "start",
+ start = V("csname") + V("lowfirst") + V("highfirst"),
+ csname = csname,
+ content = Cs(V("csname") + nested + sign^-1 * (cardinal + utf8char)),
+ lowfirst = (subscript /"") * ( Cc("\\lohi{") * V("content") * Cc("}{") * (superscript/"") + Cc("\\low{" ) ) * V("content") * Cc("}"),
+ highfirst = (superscript/"") * ( Cc("\\hilo{") * V("content") * Cc("}{") * (subscript /"") + Cc("\\high{") ) * V("content") * Cc("}"),
+ }
+
+local scripted = Cs((csname + scripts + utf8char)^0)
+
+cpatterns.scripts = scripts
+cpatterns.csname = csname
+cpatterns.scripted = scripted
+cpatterns.nested = nested
+
+-- inspect(scripted)
+-- print(lpegmatch(scripted,"10^-3_x"))
+-- print(lpegmatch(scripted,"10^-a"))
+
diff --git a/Master/texmf-dist/tex/context/base/cldf-ver.lua b/Master/texmf-dist/tex/context/base/cldf-ver.lua
index 237078157bb..b48fd253ac5 100644
--- a/Master/texmf-dist/tex/context/base/cldf-ver.lua
+++ b/Master/texmf-dist/tex/context/base/cldf-ver.lua
@@ -6,6 +6,10 @@ if not modules then modules = { } end modules ['cldf-ver'] = {
license = "see context related readme files"
}
+-- We have better verbatim: context.verbatim so that needs to be looked
+-- into. We can also directly store in buffers although this variant works
+-- better when used mixed with other code (synchronization issue).
+
local concat, tohandle = table.concat, table.tohandle
local find, splitlines = string.find, string.splitlines
local tostring, type = tostring, type
@@ -41,7 +45,7 @@ table .tocontext = t_tocontext
string .tocontext = s_tocontext
boolean.tocontext = b_tocontext
-function tocontext(first,...)
+function context.tocontext(first,...)
local t = type(first)
if t == "string" then
s_tocontext(first,...)
diff --git a/Master/texmf-dist/tex/context/base/colo-ext.mkiv b/Master/texmf-dist/tex/context/base/colo-ext.mkiv
index af6c3830ecc..8878da48522 100644
--- a/Master/texmf-dist/tex/context/base/colo-ext.mkiv
+++ b/Master/texmf-dist/tex/context/base/colo-ext.mkiv
@@ -29,18 +29,10 @@
%D
%D will negate the colors in box zero.
-% \unexpanded\def\negatecolorbox#1%
-% {\setbox#1\hbox
-% {\startnegative % might change
-% \startcolor[\s!white]\vrule\!!height\ht#1\!!depth\dp#1\!!width\wd#1\stopcolor
-% \hskip-\wd#1%
-% \box#1%
-% \stopnegative}}
-
\unexpanded\def\negatecolorbox#1%
{\setbox#1\hbox
{\startnegative % might change
-% \startcolor[\s!white]\vrule\!!height\ht#1\!!depth\dp#1\!!width\wd#1\stopcolor
+ % \startcolor[\s!white]\vrule\s!height\ht#1\s!depth\dp#1\s!width\wd#1\stopcolor
\blackrule[\c!color=\s!white,\c!height=\ht#1,\c!depth=\dp#1,\c!width=\wd#1]%
\hskip-\wd#1%
\box#1%
diff --git a/Master/texmf-dist/tex/context/base/colo-icc.lua b/Master/texmf-dist/tex/context/base/colo-icc.lua
index 904d42143d4..f7ed561c1bf 100644
--- a/Master/texmf-dist/tex/context/base/colo-icc.lua
+++ b/Master/texmf-dist/tex/context/base/colo-icc.lua
@@ -1,4 +1,4 @@
-if not modules then modules = { } end modules ['colo-ini'] = {
+if not modules then modules = { } end modules ['colo-icc'] = {
version = 1.000,
comment = "companion to colo-ini.mkiv",
author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
@@ -8,6 +8,7 @@ if not modules then modules = { } end modules ['colo-ini'] = {
local char, byte, gsub, match, format, strip = string.char, string.byte, string.gsub, string.match, string.format, string.strip
local readstring, readnumber = io.readstring, io.readnumber
+local formatters = string.formatters
local colors = attributes and attributes.colors or { } -- when used in mtxrun
@@ -27,11 +28,13 @@ function colors.iccprofile(filename,verbose)
end
end
if fullname == "" then
- return nil, false, format("profile '%s' cannot be found",filename)
+ report_colors("profile %a cannot be found",filename)
+ return nil, false
end
local f = io.open(fullname,"rb")
if not f then
- return nil, false, format("profile '%s'cannot be loaded",fullname)
+ report_colors("profile %a cannot be loaded",fullname)
+ return nil, false
end
local header = {
size = readnumber(f,4),
@@ -99,7 +102,7 @@ function colors.iccprofile(filename,verbose)
}
else
if verbose then
- report_colors("ignoring tag '%s' or type '%s' in profile '%s'",tag,variant,fullname)
+ report_colors("ignoring tag %a or type %a in profile %a",tag,variant,fullname)
end
tags[tag] = nil
end
@@ -112,9 +115,6 @@ function colors.iccprofile(filename,verbose)
header = header,
tags = tags,
}
- return profile, true, format("profile '%s' loaded",fullname)
+ report_colors("profile %a loaded",fullname)
+ return profile, true
end
-
---~ local profile, error, message = colors.iccprofile("ussheetfedcoated.icc")
---~ print(error,message)
---~ table.print(profile)
diff --git a/Master/texmf-dist/tex/context/base/colo-ini.lua b/Master/texmf-dist/tex/context/base/colo-ini.lua
index 747e2116f08..14873393216 100644
--- a/Master/texmf-dist/tex/context/base/colo-ini.lua
+++ b/Master/texmf-dist/tex/context/base/colo-ini.lua
@@ -11,12 +11,14 @@ local concat, insert, remove = table.concat, table.insert, table.remove
local format, gmatch, gsub, lower, match, find = string.format, string.gmatch, string.gsub, string.lower, string.match, string.find
local P, R, C, Cc = lpeg.P, lpeg.R, lpeg.C, lpeg.Cc
local lpegmatch, lpegpatterns = lpeg.match, lpeg.patterns
+local formatters = string.formatters
local trace_define = false trackers.register("colors.define",function(v) trace_define = v end)
local report_colors = logs.reporter("colors","defining")
-local attributes, context, commands = attributes, context, commands
+local attributes, backends, storage = attributes, backends, storage
+local context, commands = context, commands
local settings_to_hash_strict = utilities.parsers.settings_to_hash_strict
@@ -37,6 +39,8 @@ local attributes_list = attributes.list
local colorvalues = colors.values
local transparencyvalues = transparencies.values
+local texattribute = tex.attribute
+
colors.sets = colors.sets or { } -- sets are mostly used for
local colorsets = colors.sets -- showing lists of defined
local colorset = { } -- colors
@@ -67,12 +71,12 @@ local function definecolor(name, ca, global)
if ca and ca > 0 then
if global then
if trace_define then
- report_colors("define global color '%s' with attribute: %s",name,ca)
+ report_colors("define global color %a with attribute %a",name,ca)
end
context.colordefagc(name,ca)
else
if trace_define then
- report_colors("define local color '%s' with attribute: %s",name,ca)
+ report_colors("define local color %a with attribute %a",name,ca)
end
context.colordefalc(name,ca)
end
@@ -90,12 +94,12 @@ local function inheritcolor(name, ca, global)
if ca and ca ~= "" then
if global then
if trace_define then
- report_colors("inherit global color '%s' with attribute: %s",name,ca)
+ report_colors("inherit global color %a with attribute %a",name,ca)
end
context.colordeffgc(name,ca) -- some day we will set the macro directly
else
if trace_define then
- report_colors("inherit local color '%s' with attribute: %s",name,ca)
+ report_colors("inherit local color %a with attribute %a",name,ca)
end
context.colordefflc(name,ca)
end
@@ -113,12 +117,12 @@ local function definetransparent(name, ta, global)
if ta and ta > 0 then
if global then
if trace_define then
- report_colors("define global transparency '%s' with attribute: %s",name,ta)
+ report_colors("define global transparency %a with attribute %a",name,ta)
end
context.colordefagt(name,ta)
else
if trace_define then
- report_colors("define local transparency '%s' with attribute: %s",name,ta)
+ report_colors("define local transparency %a with attribute %a",name,ta)
end
context.colordefalt(name,ta)
end
@@ -135,12 +139,12 @@ local function inherittransparent(name, ta, global)
if ta and ta ~= "" then
if global then
if trace_define then
- report_colors("inherit global transparency '%s' with attribute: %s",name,ta)
+ report_colors("inherit global transparency %a with attribute %a",name,ta)
end
context.colordeffgt(name,ta)
else
if trace_define then
- report_colors("inherit local transparency '%s' with attribute: %s",name,ta)
+ report_colors("inherit local transparency %a with attribute %a",name,ta)
end
context.colordefflt(name,ta)
end
@@ -177,8 +181,7 @@ local gray_okay, rgb_okay, cmyk_okay, spot_okay, multichannel_okay, forced = tru
function colors.forcesupport(gray,rgb,cmyk,spot,multichannel) -- pdfx driven
gray_okay, rgb_okay, cmyk_okay, spot_okay, multichannel_okay, forced = gray, rgb, cmyk, spot, multichannel, true
- report_colors("supported models: gray=%s, rgb=%s, cmyk=%s, spot=%s", -- multichannel=%s
- tostring(gray), tostring(rgb), tostring(cmyk), tostring(spot)) -- tostring(multichannel)
+ report_colors("supported models: gray %a, rgb %a, cmyk %a, spot %a",gray,rgb,cmyk,spot) -- multichannel=%l multichannel
end
local function forcedmodel(model) -- delayed till the backend but mp directly
@@ -287,7 +290,8 @@ local right = P(")")
local comma = P(",")
local mixnumber = lpegpatterns.number / tonumber
local mixname = C(P(1-left-right-comma)^1)
-local mixcolor = Cc("M") * mixnumber * left * mixname * (comma * mixname)^-1 * right * P(-1)
+----- mixcolor = Cc("M") * mixnumber * left * mixname * (comma * mixname)^-1 * right * P(-1)
+local mixcolor = Cc("M") * mixnumber * left * mixname * (comma * mixname)^0 * right * P(-1) -- one is also ok
local exclamation = P("!")
local pgfnumber = lpegpatterns.digit^0 / function(s) return tonumber(s)/100 end
@@ -422,14 +426,11 @@ function colors.definemultitonecolor(name,multispec,colorspec,selfspec)
max = max + 1
dd[max] = k
pp[max] = v
- nn[max] = format("%s_%1.3g",k,tonumber(v) or 0) -- 0 can't happen
+ nn[max] = formatters["%s_%1.3g"](k,tonumber(v) or 0) -- 0 can't happen
end
if max > 0 then
nn = concat(nn,'_')
local parent = gsub(lower(nn),"[^%d%a%.]+","_")
---~ if max == 2 and (not colorspec or colorspec == "") then
---~ colors.defineduocolor(parent,pp[1],l_color[dd[1]],pp[2],l_color[dd[2]],true,true)
---~ elseif (not colorspec or colorspec == "") then
if not colorspec or colorspec == "" then
local cc = { } for i=1,max do cc[i] = l_color[dd[i]] end
colors.definemixcolor(parent,pp,cc,global,freeze) -- can become local
@@ -441,11 +442,8 @@ function colors.definemultitonecolor(name,multispec,colorspec,selfspec)
end
local cp = attributes_list[a_color][parent]
dd, pp = concat(dd,','), concat(pp,',')
---~ print(name,multispec,colorspec,selfspec)
---~ print(parent,max,cp)
if cp then
do_registerspotcolor(parent, name, cp, "", max, dd, pp)
---~ do_registermultitonecolor(parent, name, cp, "", max, dd, pp) -- done in previous ... check it
definecolor(name, register_color(name, 'spot', parent, max, dd, pp), true)
local t = settings_to_hash_strict(selfspec)
if t and t.a and t.t then
@@ -472,47 +470,46 @@ local function mpcolor(model,ca,ta,default)
model = forcedmodel(model)
if tv then
if model == 2 then
- return format("transparent(%s,%s,(%s,%s,%s))",tv[1],tv[2],cv[3],cv[4],cv[5])
+ return formatters["transparent(%s,%s,(%s,%s,%s))"](tv[1],tv[2],cv[3],cv[4],cv[5])
elseif model == 3 then
- return format("transparent(%s,%s,(%s,%s,%s))",tv[1],tv[2],cv[3],cv[4],cv[5])
+ return formatters["transparent(%s,%s,(%s,%s,%s))"](tv[1],tv[2],cv[3],cv[4],cv[5])
elseif model == 4 then
- return format("transparent(%s,%s,cmyk(%s,%s,%s,%s))",tv[1],tv[2],cv[6],cv[7],cv[8],cv[9])
+ return formatters["transparent(%s,%s,cmyk(%s,%s,%s,%s))"](tv[1],tv[2],cv[6],cv[7],cv[8],cv[9])
elseif model == 5 then
- return format('transparent(%s,%s,multitonecolor("%s",%s,"%s","%s"))',tv[1],tv[2],cv[10],cv[11],cv[12],cv[13])
- else
- return format("transparent(%s,%s,(%s,%s,%s))",tv[1],tv[2],cv[3],cv[4],cv[5])
--- this will become (see ** in meta-ini.mkiv)
---
--- return format("transparent(%s,%s,(%s))",tv[1],tv[2],cv[2])
+ return formatters['transparent(%s,%s,multitonecolor("%s",%s,"%s","%s"))'](tv[1],tv[2],cv[10],cv[11],cv[12],cv[13])
+ else -- see ** in meta-ini.mkiv: return formatters["transparent(%s,%s,(%s))"](tv[1],tv[2],cv[2])
+ return formatters["transparent(%s,%s,(%s,%s,%s))"](tv[1],tv[2],cv[3],cv[4],cv[5])
end
else
if model == 2 then
- return format("(%s,%s,%s)",cv[3],cv[4],cv[5])
+ return formatters["(%s,%s,%s)"](cv[3],cv[4],cv[5])
elseif model == 3 then
- return format("(%s,%s,%s)",cv[3],cv[4],cv[5])
+ return formatters["(%s,%s,%s)"](cv[3],cv[4],cv[5])
elseif model == 4 then
- return format("cmyk(%s,%s,%s,%s)",cv[6],cv[7],cv[8],cv[9])
+ return formatters["cmyk(%s,%s,%s,%s)"](cv[6],cv[7],cv[8],cv[9])
elseif model == 5 then
- return format('multitonecolor("%s",%s,"%s","%s")',cv[10],cv[11],cv[12],cv[13])
- else
- return format("(%s,%s,%s)",cv[3],cv[4],cv[5])
--- this will become (see ** in meta-ini.mkiv)
---
--- return format("%s",(cv[2]))
+ return formatters['multitonecolor("%s",%s,"%s","%s")'](cv[10],cv[11],cv[12],cv[13])
+ else -- see ** in meta-ini.mkiv: return formatters["%s"]((cv[2]))
+ return formatters["(%s,%s,%s)"](cv[3],cv[4],cv[5])
end
end
else
default = default or 0 -- rgb !
- return format("(%s,%s,%s)",default,default,default)
+ return formatters["(%s,%s,%s)"](default,default,default)
end
end
+local function mpnamedcolor(name)
+ return mpcolor(texattribute[a_colorspace],l_color[name] or l_color.black)
+end
+
local function mpoptions(model,ca,ta,default) -- will move to mlib-col
- return format("withcolor %s",mpcolor(model,ca,ta,default))
+ return formatters["withcolor %s"](mpcolor(model,ca,ta,default))
end
-colors.mpcolor = mpcolor
-colors.mpoptions = mpoptions
+colors.mpcolor = mpcolor
+colors.mpnamedcolor = mpnamedcolor
+colors.mpoptions = mpoptions
function colors.formatcolor(ca,separator)
local cv = colorvalues[ca]
@@ -666,83 +663,51 @@ function colors.defineintermediatecolor(name,fraction,c_one,c_two,a_one,a_two,sp
end
end
---~ local function f(one,two,i,fraction_one,fraction_two)
---~ local otf = fraction_one * one[i] + fraction_two * two[i]
---~ if otf > 1 then
---~ otf = 1
---~ end
---~ return otf
---~ end
-
---~ function colors.defineduocolor(name,fraction_one,c_one,fraction_two,c_two,global,freeze)
---~ local one, two = colorvalues[c_one], colorvalues[c_two]
---~ if one and two then
---~ fraction_one = tonumber(fraction_one) or 1
---~ fraction_two = tonumber(fraction_two) or 1
---~ local csone, cstwo = one[1], two[1]
---~ local ca
---~ if csone == 2 then
---~ ca = register_color(name,'gray',f(one,two,2,fraction_one,fraction_two))
---~ elseif csone == 3 then
---~ ca = register_color(name,'rgb', f(one,two,3,fraction_one,fraction_two),
---~ f(one,two,4,fraction_one,fraction_two),
---~ f(one,two,5,fraction_one,fraction_two))
---~ elseif csone == 4 then
---~ ca = register_color(name,'cmyk',f(one,two,6,fraction_one,fraction_two),
---~ f(one,two,7,fraction_one,fraction_two),
---~ f(one,two,8,fraction_one,fraction_two),
---~ f(one,two,9,fraction_one,fraction_two))
---~ else
---~ ca = register_color(name,'gray',f(one,two,2,fraction_one,fraction_two))
---~ end
---~ definecolor(name,ca,global,freeze)
---~ end
---~ end
-
- local function f(i,colors,fraction)
- local otf = 0
- for c=1,#colors do
- otf = otf + (tonumber(fraction[c]) or 1) * colors[c][i]
- end
- if otf > 1 then
- otf = 1
- end
- return otf
+local function f(i,colors,fraction)
+ local otf = 0
+ for c=1,#colors do
+ otf = otf + (tonumber(fraction[c]) or 1) * colors[c][i]
+ end
+ if otf > 1 then
+ otf = 1
end
+ return otf
+end
- function colors.definemixcolor(name,fractions,cs,global,freeze)
- local values = { }
- for i=1,#cs do -- do fraction in here
- local v = colorvalues[cs[i]]
- if not v then
- return
- end
- values[i] = v
- end
- local csone = values[1][1]
- local ca
- if csone == 2 then
- ca = register_color(name,'gray',f(2,values,fractions))
- elseif csone == 3 then
- ca = register_color(name,'rgb', f(3,values,fractions),
- f(4,values,fractions),
- f(5,values,fractions))
- elseif csone == 4 then
- ca = register_color(name,'cmyk',f(6,values,fractions),
- f(7,values,fractions),
- f(8,values,fractions),
- f(9,values,fractions))
- else
- ca = register_color(name,'gray',f(2,values,fractions))
+function colors.definemixcolor(name,fractions,cs,global,freeze)
+ local values = { }
+ for i=1,#cs do -- do fraction in here
+ local v = colorvalues[cs[i]]
+ if not v then
+ return
end
- definecolor(name,ca,global,freeze)
+ values[i] = v
end
+ local csone = values[1][1]
+ local ca
+ if csone == 2 then
+ ca = register_color(name,'gray',f(2,values,fractions))
+ elseif csone == 3 then
+ ca = register_color(name,'rgb', f(3,values,fractions),
+ f(4,values,fractions),
+ f(5,values,fractions))
+ elseif csone == 4 then
+ ca = register_color(name,'cmyk',f(6,values,fractions),
+ f(7,values,fractions),
+ f(8,values,fractions),
+ f(9,values,fractions))
+ else
+ ca = register_color(name,'gray',f(2,values,fractions))
+ end
+ definecolor(name,ca,global,freeze)
+end
-- for the moment downward compatible
local patterns = { "colo-imp-%s.mkiv", "colo-imp-%s.tex", "colo-%s.mkiv", "colo-%s.tex" }
local function action(name,foundname)
+ -- could be one command
context.startreadingfile()
context.startcolorset { name }
context.input(foundname)
@@ -753,7 +718,7 @@ end
local function failure(name)
-- context.showmessage("colors",5,name)
- report_colors("unknown: library '%s'",name)
+ report_colors("unknown library %a",name)
end
function colors.usecolors(name)
@@ -877,3 +842,22 @@ end
-- context.popcatcodes()
-- end
+-- handy
+
+local models = storage.allocate { "all", "gray", "rgb", "cmyk", "spot" }
+
+colors.models = models -- check for usage elsewhere
+
+function colors.spec(name)
+ local l = attributes_list[a_color]
+ local t = colorvalues[l[name]] or colorvalues[l.black]
+ return {
+ model = models[t[1]] or models[1],
+ s = t[2],
+ r = t[3], g = t[4], b = t[5],
+ c = t[6], m = t[7], y = t[8], k = t[9],
+ }
+end
+
+-- inspect(attributes.colors.spec("red"))
+-- inspect(attributes.colors.spec("red socks"))
diff --git a/Master/texmf-dist/tex/context/base/colo-ini.mkiv b/Master/texmf-dist/tex/context/base/colo-ini.mkiv
index 5721bb513bb..7052bf6c9c9 100644
--- a/Master/texmf-dist/tex/context/base/colo-ini.mkiv
+++ b/Master/texmf-dist/tex/context/base/colo-ini.mkiv
@@ -242,8 +242,8 @@
\setfalse\c_colo_convert_gray
\getvalue{\??colorconversions\directcolorsparameter\c!conversion}% could be a nice \ifcsname
% too often:
- \ifconditional\c_colo_rgb_supported \colo_helpers_show_message\m!colors9\v!rgb \fi
- \ifconditional\c_colo_cmyk_supported\colo_helpers_show_message\m!colors9\v!cmyk\fi
+ \ifconditional\c_colo_rgb_supported \colo_helpers_show_message\m!colors{10}\v!rgb \fi
+ \ifconditional\c_colo_cmyk_supported\colo_helpers_show_message\m!colors{10}\v!cmyk\fi
\colo_helpers_set_current_model
\ifproductionrun
\edef\p_pagecolormodel{\directcolorsparameter\c!pagecolormodel}%
@@ -357,16 +357,26 @@
\normalexpanded{\colo_palets_define[#1][\csname\??colorpaletspecification#2\endcsname]}%
\fi}}
-\def\colo_palets_define_one#1#2% get rid of { } in #2
- {\colo_palets_define_two{#1}[#2]}%
+% \def\colo_palets_define_one#1#2% get rid of { } in #2
+% {\colo_palets_define_two{#1}[#2]}%
+
+\def\colo_palets_define_one#1#2% get rid of { }
+ {\doifassignmentelse{#2} % catch empty entries
+ {\colo_palets_define_two{#1}[#2]}
+ {\colo_palets_define_three{#1}{#2}}}
\def\colo_palets_define_two#1[#2=#3]%
{\edef\m_colo_palets_tmp{\ifx\m_colo_palets_tmp\empty\else\m_colo_palets_tmp,\fi#2}%
\colo_palets_define_set{#1}{#2}{#3}}%
+\def\colo_palets_define_three#1#2%
+ {\ifcsname\??colorpaletspecification#2\endcsname
+ \processcommacommand[\csname\??colorpaletspecification#2\endcsname]{\colo_palets_define_one{#1}}%
+ \fi}
+
\let\paletsize\!!zerocount
-\def\getpaletsize[#1]%
+\unexpanded\def\getpaletsize[#1]% only works for valid k=v definitions
{\getcommacommandsize[\csname\??colorpaletspecification#1\endcsname]%
\edef\paletsize{\number\commalistsize}}
@@ -577,6 +587,34 @@
\letvalue{\??colorsetter-}\empty % used?
\letvalue{\??transparencysetter-}\empty % used?
+% new: expandable (see tbl)
+
+\def\colo_helpers_fast_activate
+ {\ifx\currentcolorprefix\empty
+ \expandafter\colo_helpers_fast_activate_nop
+ \else
+ \expandafter\colo_helpers_fast_activate_yes
+ \fi}
+
+\def\colo_helpers_fast_activate_yes#1%
+ {\ifcsname\??colorsetter\currentcolorprefix#1\endcsname
+ \csname\??colorsetter\currentcolorprefix#1\endcsname
+ \csname\??transparencysetter\currentcolorprefix#1\endcsname
+ \else\ifcsname\??colorsetter#1\endcsname
+ \csname\??colorsetter#1\endcsname
+ \csname\??transparencysetter#1\endcsname
+ \fi\fi}
+
+\def\colo_helpers_fast_activate_nop#1%
+ {\ifcsname\??colorsetter#1\endcsname
+ \csname\??colorsetter#1\endcsname
+ \csname\??transparencysetter#1\endcsname
+ \fi}
+
+\let\dofastcoloractivation\colo_helpers_fast_activate
+
+% so far
+
\def\colo_helpers_activate % two-step is not that much faster but less tracing
{\ifx\currentcolorprefix\empty
\expandafter\colo_helpers_activate_nop
@@ -688,6 +726,25 @@
% test {\mycolord OEPS} test
% test {\mycolorx OEPS} test
% \stoptext
+%
+% Beware: if we say:
+%
+% \definecolor[one][two]
+%
+% only color one is actually defined and two is not seen at the
+% \LUA\ end. This means that this doesn't work:
+%
+% \definecolor[ColorA][red]
+% \definecolor[ColorB][.5(ColorA)]
+% \definecolor[ColorC][.5(ColorB,white)]
+%
+% But this does work:
+%
+% \definecolor[ColorA][1.0(red)]
+% \definecolor[ColorB][0.5(ColorA)]
+% \definecolor[ColorC][0.5(ColorB,white)]
+%
+% because the fractional definition results in a new definition.
\unexpanded\def\defineintermediatecolor
{\dotripleempty\colo_basics_define_intermediate}
@@ -699,7 +756,7 @@
{\ctxcommand{defineintermediatecolor("#1","#2",
\thecolorattribute{#3},\thecolorattribute{#4},
\thetransparencyattribute{#3},\thetransparencyattribute{#4},
- "#5",false,\iffreezecolors true\else false\fi)}% not global
+ "#5",false,\v_colo_freeze_state)}% not global
\unexpanded\setvalue{#1}{\colo_helpers_activate{#1}}}
%D Here is a more efficient helper for pgf:
@@ -802,9 +859,19 @@
\def\defaulttextcolor {black}
\def\s!themaintextcolor{themaintextcolor}
+\unexpanded\def\inheritmaintextcolor
+ {\ifx\maintextcolor\empty\else\colo_helpers_activate\maintextcolor\fi}
+
+\unexpanded\def\onlyinheritmaintextcolor
+ {\ifx\maintextcolor\empty
+ \deactivatecolor
+ \else
+ \colo_helpers_activate\maintextcolor
+ \fi}
+
\appendtoks
\deactivatecolor % public?
- \ifx\maintextcolor\empty\else\colo_helpers_activate\maintextcolor\fi
+ \inheritmaintextcolor
\to \everybeforeoutput
\def\colo_helpers_switch_to_maintextcolor#1%
@@ -961,7 +1028,7 @@
% ignores in attribute handler
%
-% \def\forcecolorhack{\vrule\!!width\zeropoint\!!height\zeropoint\!!depth\zeropoint}
+% \def\forcecolorhack{\vrule\s!width\zeropoint\s!height\zeropoint\s!depth\zeropoint}
% \normal added else fails in metafun manual (leaders do a hard scan)
diff --git a/Master/texmf-dist/tex/context/base/colo-run.lua b/Master/texmf-dist/tex/context/base/colo-run.lua
index 4f1916d5ada..27f7c6b12ea 100644
--- a/Master/texmf-dist/tex/context/base/colo-run.lua
+++ b/Master/texmf-dist/tex/context/base/colo-run.lua
@@ -6,8 +6,10 @@ if not modules then modules = { } end modules ['colo-run'] = {
license = "see context related readme files"
}
--- For historic reasons the core has a couple of tracing
--- features. Nowadays these would end up in modules.
+-- For historic reasons the core has a couple of tracing features. Nowadays
+-- these would end up in modules.
+
+local colors, commands, context, utilities = colors, commands, context, utilities
local colors= attributes.colors
diff --git a/Master/texmf-dist/tex/context/base/colo-run.mkiv b/Master/texmf-dist/tex/context/base/colo-run.mkiv
index c330accf3dd..5084fdd351e 100644
--- a/Master/texmf-dist/tex/context/base/colo-run.mkiv
+++ b/Master/texmf-dist/tex/context/base/colo-run.mkiv
@@ -42,9 +42,9 @@
%D Palets
\unexpanded\gdef\showpalet
- {\dodoubleargument\doshowpalet}
+ {\dodoubleargument\colo_show_palet}
-\gdef\doshowpalet[#1][#2]%
+\gdef\colo_show_palet[#1][#2]%
{\ifcsname\??colorpalet#1\endcsname
\doifinsetelse\v!vertical{#2} \colo_palets_show_vertical \colo_palets_show_horizontal [#1][#2]%
\fi}
@@ -59,8 +59,8 @@
\tabskip\zeropoint
\def\colo_palets_show_palet##1%
{\doifinsetelse\v!number{#2}{##1\hskip.5em}{}&
- \color[##1]{\vrule\!!width3em\!!height\strutht\!!depth\strutdp}%
- \graycolor[##1]{\vrule\!!width3em\!!height\strutht\!!depth\strutdp}&
+ \color[##1]{\vrule\s!width3em\s!height\strutht\s!depth\strutdp}%
+ \graycolor[##1]{\vrule\s!width3em\s!height\strutht\s!depth\strutdp}&
\doifinset\v!value{#2}{\hskip.5em\colorvalue{##1}}\crcr}
\halign
{\hss##&\hss##\hss&##\cr
@@ -92,11 +92,11 @@
\colo_palets_process[#1]\colo_palets_show_palet}\cr
\doifinset\v!name{#2}{#1\hskip.5em}%
\def\colo_palets_show_palet##1%
- {&\strut\color[##1]{\vrule\!!width\!!widtha\!!height\strutht\!!depth\zeropoint}}%
+ {&\strut\color[##1]{\vrule\s!width\!!widtha\s!height\strutht\s!depth\zeropoint}}%
\colo_palets_process[#1]\colo_palets_show_palet\crcr
\noalign{\vskip-\strutdepth}%
\def\colo_palets_show_palet##1%
- {&\graycolor[##1]{\vrule\!!width\!!widtha\!!height\zeropoint\!!depth\strutdp}}%
+ {&\graycolor[##1]{\vrule\s!width\!!widtha\s!height\zeropoint\s!depth\strutdp}}%
\colo_palets_process[#1]\colo_palets_show_palet\crcr
\doifinset\v!value{#2}
{\def\colo_palets_show_palet##1%
@@ -137,13 +137,13 @@
\def\colo_palets_compare##1%
{\hbox
{\setbox0\hbox
- {#1[##1]{\vrule\!!width\hsize\!!height3ex}}%
+ {#1[##1]{\vrule\s!width\hsize\s!height3ex}}%
\wd0\zeropoint
\box0
\hbox to \hsize
{\def\colo_palets_compare####1%
{\hbox to \!!widtha
- {\hss#1[####1]{\vrule\!!width.5\!!widtha\!!height2.25ex\!!depth-.75ex}\hss}}%
+ {\hss#1[####1]{\vrule\s!width.5\!!widtha\s!height2.25ex\s!depth-.75ex}\hss}}%
\processcommacommand[\getvalue{\??colorpalet#2}]\colo_palets_compare}}
\endgraf}
\processcommacommand[\getvalue{\??colorpalet#2}]\colo_palets_compare}}
@@ -171,8 +171,8 @@
{\halign
{\hss####\hss\cr
\doifinset\v!number{#2}{\strut##1}\cr
- \color[#1:##1]{\vrule\!!width4em\!!height\strutht\!!depth\zeropoint}\cr
- \graycolor[#1:##1]{\vrule\!!width4em\!!height\zeropoint\!!depth\strutdp}\cr
+ \color[#1:##1]{\vrule\s!width4em\s!height\strutht\s!depth\zeropoint}\cr
+ \graycolor[#1:##1]{\vrule\s!width4em\s!height\zeropoint\s!depth\strutdp}\cr
\doifinset\v!value{#2}{\colorvalue{#1:##1}\strut}\crcr}}}}%
\hbox
{\doifinset\v!name{#2}
@@ -195,8 +195,8 @@
\def\colo_groups_show_group##1%
{\doifcolor{#1:##1}
{\doifinset\v!number{#2}{##1\hskip.5em}&
- \color[#1:##1]{\vrule\!!width2.5em\!!height\strutht\!!depth\strutdp}%
- \graycolor[#1:##1]{\vrule\!!width2.5em\!!height\strutht\!!depth\strutdp}&
+ \color[#1:##1]{\vrule\s!width2.5em\s!height\strutht\s!depth\strutdp}%
+ \graycolor[#1:##1]{\vrule\s!width2.5em\s!height\strutht\s!depth\strutdp}&
\doifinset\v!value{#2}{\hskip.5em\colorvalue{#1:##1}}\crcr}}%
\halign
{\hss##&\hss##\hss&##\hss\cr
@@ -228,11 +228,11 @@
\def\colo_groups_compare_step#1#2#3%
{\hbox to \hsize
{\setbox0\hbox
- {#1[#2:#3]{\vrule\!!width\hsize\!!height3ex}}%
+ {#1[#2:#3]{\vrule\s!width\hsize\s!height3ex}}%
\wd0\zeropoint
\box0
\hbox to \hsize
- {\hss\dorecurse\!!counta{#1[#2:\recurselevel]{\vrule\!!width.5\!!widtha\!!height2.25ex\!!depth-.75ex}\hss}}}
+ {\hss\dorecurse\!!counta{#1[#2:\recurselevel]{\vrule\s!width.5\!!widtha\s!height2.25ex\s!depth-.75ex}\hss}}}
\endgraf}
\protect \endinput
diff --git a/Master/texmf-dist/tex/context/base/cont-log.mkiv b/Master/texmf-dist/tex/context/base/cont-log.mkiv
index 67647920dbc..5d41331434c 100644
--- a/Master/texmf-dist/tex/context/base/cont-log.mkiv
+++ b/Master/texmf-dist/tex/context/base/cont-log.mkiv
@@ -118,7 +118,7 @@
\logofont}
\def\syst_logos_meta_hyphen % there is no hyphenchar in this font
- {\discretionary{\vrule\!!height.33em\!!depth-.27em\!!width.33em}{}{}}
+ {\discretionary{\vrule\s!height.33em\s!depth-.27em\s!width.33em}{}{}}
\unexpanded\def\MetaFont
{\dontleavehmode
@@ -270,4 +270,12 @@
\unexpanded\def\MPII{MpII}
\unexpanded\def\MPIV{MpIV}
+\appendtoks
+ \def\ConTeXt {ConTeXt}%
+ \def\MetaPost{MetaPost}%
+ \def\MetaFont{MetaFont}%
+ \def\MetaFun {MetaFun}%
+ \def\TeX {TeX}%
+\to \everysimplifycommands
+
\protect \endinput
diff --git a/Master/texmf-dist/tex/context/base/cont-new.mkii b/Master/texmf-dist/tex/context/base/cont-new.mkii
index b4958762f1a..b2df7931aea 100644
--- a/Master/texmf-dist/tex/context/base/cont-new.mkii
+++ b/Master/texmf-dist/tex/context/base/cont-new.mkii
@@ -11,7 +11,7 @@
%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
%C details.
-\newcontextversion{2012.05.30 11:26}
+\newcontextversion{2013.03.29 01:31}
%D This file is loaded at runtime, thereby providing an
%D excellent place for hacks, patches, extensions and new
diff --git a/Master/texmf-dist/tex/context/base/cont-new.mkiv b/Master/texmf-dist/tex/context/base/cont-new.mkiv
index 5a28f8e29ff..530058fc92f 100644
--- a/Master/texmf-dist/tex/context/base/cont-new.mkiv
+++ b/Master/texmf-dist/tex/context/base/cont-new.mkiv
@@ -11,515 +11,72 @@
%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
%C details.
-\newcontextversion{2012.05.30 11:26}
+\newcontextversion{2013.04.07 14:05}
-%D This file is loaded at runtime, thereby providing an
-%D excellent place for hacks, patches, extensions and new
-%D features.
+%D This file is loaded at runtime, thereby providing an excellent place for
+%D hacks, patches, extensions and new features.
\unprotect
\writestatus\m!system{beware: some patches loaded from cont-new.mkiv}
-\def\dividedsize#1#2#3% size gap n
- {\dimexpr
- \ifnum\dimexpr#1\relax>\plusone
- (\dimexpr#1\relax-\numexpr#3-1\relax\dimexpr#2\relax)/#3\else#1%
- \fi
- \relax}
-
-\def\singlewidened #1{\hbox spread 1em{\hss#1\hss}}
-\def\complexwidened[#1]#2{\hbox spread #1{\hss#2\hss}}
+% \attribute152\zerocount : marks ... lots of sweeps so best early in list
-\definecomplexorsimple\widened
-
-\let\active\activecatcode % for a while (tikz)
-
-% todo
-%
-% \def\definelocation{\dodoubleargument\dodefinelocation}
-% \def\dodefinelocation[#1][#2]{\setvalue{loc:#1}{#2}}
-%
-% \definelocation[lt] [\v!left\v!top]
-% \definelocation[tl] [\v!left\v!top]
-% \definelocation[\v!top\v!left][\v!left\v!top]
-%
-% \def\getlocation#1{\executeifdefined{loc:#1}{#1}}
-
-% \let\cs\getvalue % no, we want \cs to be czech
-
-% experimental so this may change
-
-\def\startdescriptions
- {\dosingleempty\dostartdescriptions}
-
-\def\dostartdescriptions[#1]%
- {\begingroup
- \def\item{\getvalue{#1}}%
- \let\dostoppairdescription \donothing
- \let\@@description \dostartpairdescription
- \let\@@startsomedescription\dostartsomedescription}
+%D Maybe:
-\def\stopdescriptions
- {\dostoppairdescription
- \endgroup}
+\unexpanded\def\tightvbox{\dowithnextbox{\dp\nextbox\zeropoint\box\nextbox}\vbox}
+\unexpanded\def\tightvtop{\dowithnextbox{\ht\nextbox\zeropoint\box\nextbox}\vtop}
-\def\dostartpairdescription[#1][#2]%
- {\dostoppairdescription
- \def\dostoppairdescription{\@@stopdescription{#1}}%
- \bgroup
- \def\currentdescription{#1}%
- \doifelse{\descriptionparameter{\s!do\c!state}}\v!start
- {\@@makedescription{#1}[#2]{}}
- {\@@makedescription{#1}[#2]}}
+%D Maybe:
-\def\dostartsomedescription% #1[#2]#3%
- {\bgroup
- \@@makedescription} % {#1}[#2]{#3}}
-
-% \starttext
-%
-% \definedescription[test]
-%
-% \startdescriptions
-% \test{Foo} Bar bar bar
-% \test{Foo} Bar bar bar
-% \test{Foo} Bar bar bar
-% \stopdescriptions
-%
-% \startdescriptions[test]
-% \item{Foo} Bar bar bar
-% \item{Foo} Bar bar bar
-% \item{Foo} Bar bar bar
-% \stopdescriptions
+% \startluacode
+% function context.loadfile(filename)
+% context(string.strip(io.loaddata(resolvers.findfile(filename))))
+% end
+% \stopluacode
%
-% \startdescriptions
-% \starttest{Foo} Bar bar bar \stoptest
-% \starttest{Foo} Bar bar bar \stoptest
-% \starttest{Foo} Bar bar bar \stoptest
-% \stopdescriptions
-%
-% \startdescriptions[test]
-% \item{Foo} Bar bar bar
-% \item{Foo} Bar bar bar
-% \item{Foo} Bar bar bar
-% \stopdescriptions
-%
-% \stoptext
-
-% this will be activated when
-%
-% \newinsert\thispageinsert % <- installinsertion
-%
-% \def\flushatthispage
-% {\bgroup
-% \dowithnextbox{\insert\thispageinsert{\box\nextbox}\egroup}%
-% \hbox}
-%
-% \appendtoks
-% \ifvoid\thispageinsert\else\hbox{\smashedbox\thispageinsert}\fi
-% \to \everyshipout
-
-% \definemarkedpage[nobackgrounds]
-% \markpage[nobackgrounds]
-% \doifmarkedpageelse{nobackgrounds}
-
-% Just a simple and fast hanger, for usage in macros.
-
-\def\setuphanging
- {\dodoubleempty\getparameters[\??ha]}
-
-\setuphanging
- [\c!distance=.5em]
-
-\def\starthanging
- {\noindent\bgroup
- \dowithnextbox
- {\setbox\nextbox\hbox{\flushnextbox\hskip\@@hadistance}%
- \hangindent\nextboxwd
- \hangafter\plusone
- \flushnextbox\ignorespaces}
- \hbox}
-
-\def\stophanging
- {\endgraf
- \egroup}
+% \edef\tufte{\cldcommand{loadfile("tufte.tex")}}
-% experimental
-
-\def\stophangaround
- {\endgraf
- \egroup}
-
-\def\starthangaround
- {\noindent\bgroup
- \dowithnextbox
- {\ifdim\nextboxht>\strutht\setbox\nextbox\tbox{\flushnextbox}\fi
- \setbox\nextbox\hbox{\flushnextbox\hskip\@@hadistance}%
- \getboxheight\scratchdimen\of\box\nextbox
- \getnoflines\scratchdimen
- \nextboxht\strutht
- \nextboxdp\strutdp
- \hangindent\nextboxwd
- \hangafter-\noflines
- \llap{\flushnextbox}\ignorespaces}
- \hbox}
-
-\def\modevalue#1#2#3%
- {\@EA\ifx\csname\@mode@\systemmodeprefix#1\endcsname\endcsname\enabledmode#2\else#2\fi}
-
-\def\systemmodevalue#1%
- {\modevalue{\systemmodeprefix#1}}
-
-% new, still to be improved
-%
-% \dorecurse{10}
-% {\input thuan
-% \placefigure{}{\framed[height=1.5cm]{test}}
-% \placefloatplaceholder}
-
-\def\placefloatplaceholder
- {\ifroomforfloat \else
- \scratchdimen\pagegoal
- \advance\scratchdimen-\pagetotal
- \advance\scratchdimen-3\lineheight
- \ifdim\scratchdimen>\zeropoint
- \startlinecorrection[blank]
- \mhbox{\inframed{\labeltexts{placeholder}{\lastcaptiontag}}}%
- \stoplinecorrection
- \else
- \allowbreak
- \fi
- \fi}
-
-\setuplabeltext
- [placeholder={, moved}]
-
-% move to support module, and then use context(...)
-
-\startluacode
- function commands.percentageof(str,dim)
- local n = str:match("^(.*)%%$")
- context.sprint(tex.ctxcatcodes,(n and (tonumber(n)/100)*dim .. "sp") or str)
- end
-\stopluacode
-
-\gdef\setpercentdimen#1#2%
- {#1=\ctxcommand{percentageof("#2",\number#1)}\relax}
-
-% \scratchdimen=100pt \setpercentdimen\scratchdimen{10\letterpercent} \the\scratchdimen
-% \scratchdimen=100pt \setpercentdimen\scratchdimen{5pt} \the\scratchdimen
-
-\bgroup \permitcircumflexescape
-
-\obeylines % don't remove %'s !
-
-\gdef\collapsedspace#1%
- {\ifx#1^^M%
- \expandafter\collapsedspace
- \else
- \space
- \expandafter#1%
- \fi}
-
-\gdef\collapsespaces
- {\prependtoksonce\relax\to\everyeof%
- \ignorelines%
- \ignoretabs%
- \let\obeyedspace\collapsedspace%
- \obeyspaces}
-
-\egroup
-
-\def\inlinedbox
- {\bgroup
- \dowithnextbox
- {\scratchdimen\nextboxht
- \advance\scratchdimen\nextboxdp
- \advance\scratchdimen-\lineheight
- \divide\scratchdimen\plustwo
- \advance\scratchdimen\strutdepth
- \setbox\nextbox\hbox{\lower\scratchdimen\flushnextbox}%
- \nextboxht\strutht
- \nextboxdp\strutdp
- \flushnextbox
- \egroup}%
- \hbox}
-
-\def\dimenratio#1#2% etex only
- {\withoutpt\the\dimexpr2\dimexpr(#1)/\dimexpr(#2)/32768\relax\relax}
-
-\def\doxprecurse#1#2%
- {\ifnum#1=\zerocount % no \ifcase
- \expandafter\gobblethreearguments
- \else
- #2\expandafter\expandafter\expandafter\doxprecurse\expandafter
- \fi\expandafter{\the\numexpr#1-1\relax}{#2}}
-
-\def\buttonframed{\dodoubleempty\localframed[\??bt]} % goodie
-
-\unexpanded\def\asciistr#1{\dontleavehmode{\defconvertedargument\ascii{#1}\verbatimfont\ascii}}
-
-\def\shapefill{\vskip\zeropoint\!!plus\lineheight\!!minus\lineheight\relax}
-
-% \ruledhbox
-% {\startignorespaces
-% \def\oeps{a}
-% \startignorespaces
-% \def\oeps{a}
-% \stopignorespaces
-% \def\oeps{a}
-% \stopignorespaces
-% \oeps}
+%D Needs some work:
-\newsignal\boissignal
-\newcount \boislevel
+\unexpanded\def\startgridcorrection
+ {\dosingleempty\spac_grid_correction_start}
-\long\def\startignorespaces
- {\advance\boislevel\plusone
- \ifcase\boislevel\or \ifhmode
- \hskip\boissignal
- \fi \fi
- \ignorespaces}
-
-\long\def\stopignorespaces
- {\ifcase\boislevel\or \ifhmode
- \doloop
- {\ifdim\lastskip=\zeropoint
- \exitloop
- \else\ifdim\lastskip=\boissignal
- \unskip
- \exitloop
- \else
- \unskip
- \fi\fi}%
- \fi \fi
- \advance\boislevel\minusone}
-
-\def\minimalhbox#1#%
- {\dowithnextbox
- {\bgroup
- \setbox\scratchbox\hbox#1{\hss}%
- \ifdim\nextboxwd<\wd\scratchbox\nextboxwd\wd\scratchbox\fi
- \flushnextbox
- \egroup}
- \hbox}
-
-\def\gobbleuntilempty#1\empty{}
-
-\def\dodimchoice#1#2#3%
- {\ifdim#1#2%
- #3\@EA\gobbleuntilempty
- \else
- \@EA\dodimchoice
- \fi{#1}}
-
-\def\donumchoice#1#2#3%
- {\ifnum#1#2%
- #3\@EA\gobbleuntilempty
- \else
- \@EA\dodimchoice
- \fi{#1}}
-
-\def\dimchoice#1#2{\dodimchoice{#1}#2{=#1}{#1}\empty}
-\def\numchoice#1#2{\donumchoice{#1}#2{=#1}{#1}\empty}
-
-% \the\dimexpr(\dimchoice {7pt}{{<10pt}{8pt}{<12pt}{9pt}{<15pt}{10pt}{=11pt}{12pt}})
-% \the\dimexpr(\dimchoice{11pt}{{<10pt}{8pt}{<12pt}{9pt}{<15pt}{10pt}{=11pt}{12pt}})
-% \the\dimexpr(\dimchoice{14pt}{{<10pt}{8pt}{<12pt}{9pt}{<15pt}{10pt}{=11pt}{12pt}})
-
-\def\tabulaterule % to be redone, not correct
- {\dotabulaterule
- {\hrule\!!height.5\scratchdimen\!!depth.5\scratchdimen\relax
- \doifvalue{\??tt\currenttabulate\c!distance}\v!grid
- {\kern-\scratchdimen}}} % experimental tm-prikkels
-% so far
-
-% between alignment lines certain rules apply, and even a
-% simple test can mess up a table, which is why we have a
-% special test facility
-%
-% \ruledvbox
-% {\starttabulate[|l|p|]
-% \NC 1test \NC test \NC \NR
-% \tableifelse{\doifelse{a}{a}}{\NC Xtest \NC test \NC \NR}{}%
-% \stoptabulate}
-
-\long\def\tableifelse#1%
- {\tablenoalign
- {#1%
- {\aftergroup \firstoftwoarguments}%
- {\aftergroup\secondoftwoarguments}}}
-
-\long \def\tableiftextelse#1{\tableifelse{\doiftextelse{#1}}}
-
-\def\tightvbox{\dowithnextbox{\nextboxdp\zeropoint\flushnextbox}\vbox}
-\def\tightvtop{\dowithnextbox{\nextboxht\zeropoint\flushnextbox}\vtop}
-
-% what is this stupid macro meant for:
-
-\def\hyphenationpoint
- {\hskip\zeropoint}
-
-\def\hyphenated#1%
- {\bgroup
- \!!counta\zerocount
- \def\hyphenated##1{\advance\!!counta\plusone}%
- \handletokens#1\with\hyphenated
- \!!countb\plusone
- \def\hyphenated##1%
- {##1%
- \advance\!!countb\plusone\relax
- \ifnum\!!countb>2 \ifnum\!!countb<\!!counta
- \hyphenationpoint
- \fi\fi}%
- \handletokens#1\with\hyphenated
- \egroup}
-
-\def\obeysupersubletters
- {\let\super\normalsuper
- \let\suber\normalsuber
- \let\normalsuper\letterhat
- \let\normalsuber\letterunderscore
- \enablesupersub}
-
-\def\obeysupersubmath
- {\let\normalsuper\letterhat
- \let\normalsuber\letterunderscore
- \enablesupersub}
-
-\def\startgridcorrection
- {\dosingleempty\dostartgridcorrection}
-
-\def\dostartgridcorrection[#1]%
+\def\spac_grid_correction_start[#1]%
{\ifgridsnapping
\snaptogrid[#1]\vbox\bgroup
\else
\startbaselinecorrection
\fi}
-\def\stopgridcorrection
+\unexpanded\def\stopgridcorrection
{\ifgridsnapping
\egroup
\else
\stopbaselinecorrection
\fi}
-
-\def\checkgridsnapping
+
+\unexpanded\def\checkgridsnapping
{\lineskip\ifgridsnapping\zeropoint\else\normallineskip\fi}
-
-\def\startplaatsen
- {\dosingleempty\dostartplaatsen}
-\def\dostartplaatsen[#1]% tzt n*links etc
- {\endgraf
- \noindent\bgroup
- \setlocalhsize
- \hbox to \localhsize\bgroup
- \doifnot{#1}\v!left\hss
- \def\stopplaatsen
- {\unskip\unskip\unskip
- \doifnot{#1}\v!right\hss
- \egroup
- \egroup
- \endgraf}%
- \gobblespacetokens}
+%D Probably obsolete:
-% \startplaatsen[links] bla \stopplaatsen
-
-\def\startcolumnmakeup % don't change
+\unexpanded\def\startcolumnmakeup % don't change
{\bgroup
- \getrawnoflines\textheight % teksthoogte kan topskip hebben, dus raw
- \scratchdimen\noflines\lineheight
- \advance\scratchdimen-\lineheight
- \advance\scratchdimen\topskip
- \setbox\scratchbox
- \ifcase\showgridstate\vbox\else\ruledvbox\fi to \scratchdimen\bgroup
- \forgetall} % ! don't change
+ \getrawnoflines\textheight % raw as we cna have topskip
+ \setbox\scratchbox\vbox to \dimexpr\noflines\lineheight-\lineheight+\topskip\relax
+ \bgroup
+ \forgetall}
-\def\stopcolumnmakeup
+\unexpanded\def\stopcolumnmakeup
{\egroup
\dp\scratchbox\zeropoint
\wd\scratchbox\textwidth
\box\scratchbox
\egroup
\page_otr_command_synchronize_hsize}
-
-\long\def\startexternalfigure
- {\dotripleempty\dostartexternalfigure}
-
-\long\def\dostartexternalfigure[#1][#2][#3]#4\stopexternalfigure
- {\gdef\figuredescription{#4}%
- \externalfigure[#1][#2][#3]%
- \globallet\figuredescription\empty}
-
-\let\figuredescription\empty
-
-% incomplete, will be a special case of float placement
-
-\def\startfixed{\dosingleempty\dostartfixed}
-
-\def\dostartfixed[#1]%
- {\expanded{\dowithnextbox{\noexpand\dodofixed{\ifhmode0\else1\fi}{#1}}}%
- \vbox\bgroup
- \setlocalhsize}
-
-\def\stopfixed
- {\egroup}
-
-\def\dodofixed#1#2%
- {\ifcase#1\relax
- \processaction
- [#2]
- [ \v!high=>\bbox {\flushnextbox},
- \v!low=>\tbox {\flushnextbox},
- \v!middle=>\vcenter{\flushnextbox},
- \v!lohi=>\vcenter{\flushnextbox},
- \s!unknown=>\tbox {\flushnextbox},
- \s!default=>\tbox {\flushnextbox}]%
- \else
- \startbaselinecorrection
- \noindent\flushnextbox
- \stopbaselinecorrection
- \fi}
-
-% \startitemize
-%
-% \item \externalfigure[koe][height=2cm]
-% \item \externalfigure[koe][height=2cm]
-% \item \externalfigure[koe][height=2cm]
-% \item \externalfigure[koe][height=2cm]
-%
-% \page
-%
-% \item \startfixed \externalfigure[koe][height=2cm]\stopfixed
-% \item \startfixed[high]\externalfigure[koe][height=2cm]\stopfixed
-% \item \startfixed[low] \externalfigure[koe][height=2cm]\stopfixed
-% \item \startfixed[lohi]\externalfigure[koe][height=2cm]\stopfixed
-%
-% \page
-%
-% \item test \startfixed \externalfigure[koe][height=2cm]\stopfixed
-% \item test \startfixed[high]\externalfigure[koe][height=2cm]\stopfixed
-% \item test \startfixed[low] \externalfigure[koe][height=2cm]\stopfixed
-% \item test \startfixed[lohi]\externalfigure[koe][height=2cm]\stopfixed
-%
-% \page
-%
-% \item test \par \startfixed \externalfigure[koe][height=2cm]\stopfixed
-% \item test \par \startfixed[high]\externalfigure[koe][height=2cm]\stopfixed
-% \item test \par \startfixed[low] \externalfigure[koe][height=2cm]\stopfixed
-% \item test \par \startfixed[lohi]\externalfigure[koe][height=2cm]\stopfixed
-%
-% \stopitemize
-
-\def\obeyfollowingtoken{{}} % end \cs scanning
-
-% potential new defaults:
-%
-% \setbreakpoints[compound]
-% till we fixed all styles:
+%D Till we fixed all styles:
\let\\=\crlf
diff --git a/Master/texmf-dist/tex/context/base/cont-nop.mkiv b/Master/texmf-dist/tex/context/base/cont-nop.mkiv
new file mode 100644
index 00000000000..c8188503e85
--- /dev/null
+++ b/Master/texmf-dist/tex/context/base/cont-nop.mkiv
@@ -0,0 +1,22 @@
+%D \module
+%D [ file=cont-nop,
+%D version=2012.06.01,
+%D title=\CONTEXT\ Miscellaneous Macros,
+%D subtitle=Startup Dummy,
+%D author=Hans Hagen,
+%D date=\currentdate,
+%D copyright={PRAGMA ADE \& \CONTEXT\ Development Team}]
+%C
+%C This module is part of the \CONTEXT\ macro||package and is
+%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
+%C details.
+
+\unprotect
+
+\writestatus\m!system{loading dummy replacement for jobname}
+
+\protect
+
+\finishjob
+
+\endinput
diff --git a/Master/texmf-dist/tex/context/base/cont-yes.mkiv b/Master/texmf-dist/tex/context/base/cont-yes.mkiv
new file mode 100644
index 00000000000..5eae3eecb77
--- /dev/null
+++ b/Master/texmf-dist/tex/context/base/cont-yes.mkiv
@@ -0,0 +1,91 @@
+%D \module
+%D [ file=cont-yes,
+%D version=2012.06.01,
+%D title=\CONTEXT\ Miscellaneous Macros,
+%D subtitle=Startup Stub,
+%D author=Hans Hagen,
+%D date=\currentdate,
+%D copyright={PRAGMA ADE \& \CONTEXT\ Development Team}]
+%C
+%C This module is part of the \CONTEXT\ macro||package and is
+%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
+%C details.
+
+% At some point I will reconsider the \starttext .. \stoptext
+% wraping as we can assume proper styling. It's a left-over from
+% mkii that we need to get rid of.
+
+\startluacode
+
+ -- When a style is loaded there is a good change that we never enter
+ -- this code.
+
+ local report = logs.reporter("system")
+
+ environment.initializefilenames() -- todo: check if we really need to pre-prep the filename
+
+ local arguments = environment.arguments
+ local suffix = environment.suffix
+ local filename = environment.filename
+
+ if suffix == "xml" or arguments.forcexml then
+
+ -- Maybe we should move the preamble parsing here as it
+ -- can be part of (any) loaded (sub) file. The \starttext
+ -- wrapping might go away.
+
+ report("processing as xml: %s",filename)
+
+ context.starttext()
+ context.xmlprocess("main",filename,"")
+ context.stoptext()
+
+ elseif suffix == "cld" or arguments.forcecld then
+
+ report("processing as cld: %s",filename)
+
+ context.runfile(filename)
+
+ elseif suffix == "lua" or arguments.forcelua then
+
+ -- The wrapping might go away. Why is is it there in the
+ -- first place.
+
+ report("processing as lua: %s",filename)
+
+ context.starttext()
+ context.ctxlua(string.format('dofile("%s")',filename))
+ context.stoptext()
+
+ elseif suffix == "mp" or arguments.forcemp then
+
+ report("processing as metapost: %s",filename)
+
+ context.starttext()
+ context.processMPfigurefile(filename)
+ context.stoptext()
+
+ -- elseif suffix == "prep" then
+ --
+ -- -- Why do we wrap here. Because it can be xml? Let's get rid
+ -- -- of prepping in general.
+ --
+ -- context.starttext()
+ -- context.input(filename)
+ -- context.stoptext()
+
+ else
+
+ -- \writestatus{system}{processing as tex}
+ -- We have a regular tex file so no \starttext yet as we can
+ -- load fonts.
+
+ context.input(filename)
+
+ end
+
+ context.finishjob()
+
+\stopluacode
+
+\endinput
diff --git a/Master/texmf-dist/tex/context/base/context-base.lmx b/Master/texmf-dist/tex/context/base/context-base.lmx
index 09817463b94..482d43e8866 100644
--- a/Master/texmf-dist/tex/context/base/context-base.lmx
+++ b/Master/texmf-dist/tex/context/base/context-base.lmx
@@ -2,6 +2,8 @@
<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Strict//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd">
+<!-- compare with lmx framework variant -->
+
<!--
filename : context-base.xml
comment : companion to mtx-server-ctx-startup.tex
@@ -12,72 +14,75 @@
<html xmlns="http://www.w3.org/1999/xhtml" lang="en" xml:lang="en">
<head>
- <title><?lua pv('title') ?></title>
+ <title><?lua inject(variables.title) ?></title>
<meta http-equiv="Content-Type" content="text/html; charset=UTF-8"/>
- <?lua if (v('refreshtime') and (tonumber(v('refreshtime')) or 0) > 0) and v('refreshurl') then ?>
- <meta http-equiv='refresh' content="<?lua pv('refreshtime') ?>; <?lua pv('refreshurl') ?>"/>
+ <?lua if variables.refreshtime and (tonumber(variables.refreshtime) or 0) > 0 and variables.refreshurl then ?>
+ <meta http-equiv='refresh' content="<?lua inject(variables.refreshtime) ?>; <?lua inject(variables.refreshurl) ?>"/>
<?lua end ?>
<style type="text/css">
- <?lmx-include context.css ?>
+ <?lmx-include context.css strip ?>
+ </style>
+ <style type="text/css">
+ <?lua inject(variables.cssdata) ?>
</style>
- <?lua if v('styles') then for k, v in ipairs(v('styles')) do ?>
- <link rel="StyleSheet" href="<?lua p(v) ?>" type="text/css" />
+ <?lua if variables.styles then for k, v in ipairs(variables.styles) do ?>
+ <link rel="StyleSheet" href="<?lua inject(v) ?>" type="text/css" />
<?lua end end ?>
</head>
- <?lua if (v('refreshtime') and (tonumber(v('refreshtime')) or 0) > 0) and v('refreshurl') then ?>
+ <?lua if variables.refreshtime and (tonumber(variables.refreshtime) or 0) > 0 and variables.refreshurl then ?>
<!--[if IE]>
<script type="text/javascript">
- setTimeout("document.location='<?lua pv('refreshurl') ?>'", 1000*<?lua pv('refreshtime') ?>);
+ setTimeout("document.location='<?lua inject(variables.refreshurl) ?>'", 1000*<?lua inject(variables.refreshtime) ?>);
</script>
<![endif]-->
<?lua end ?>
- <?lua if v('action') then ?>
- <form action="<?lua pv(action) ?>" enctype="multi-part/form-data" method="post">
+ <?lua if variables.action then ?>
+ <form action="<?lua inject(variables.action) ?>" enctype="multi-part/form-data" method="post">
<?lua end ?>
<body>
- <div id="top">
+ <div id="top"><?lua if variables.title then ?>
<div id="top-one">
- <div id="top-two">
- <?lua pv('title') ?>
- </div>
+ <div id="top-two"><?lua
+ inject(variables.title)
+ ?> </div>
</div>
- </div>
- <div id="bottom">
+ <?lua end ?></div>
+ <div id="bottom"><?lua if variables.bottomtext then ?>
<div id="bottom-one">
- <div id="bottom-two">
- <?lua pv('bottomtext') ?>
- </div>
+ <div id="bottom-two"><?lua
+ inject(variables.bottomtext)
+ ?></div>
</div>
- </div>
- <div id="left">
+ <?lua end ?></div>
+ <div id="left"><?lua if variables.lefttext then ?>
<div id="left-one">
- <div id="left-two">
- <?lua pv('lefttext') ?>
- </div>
+ <div id="left-two"><?lua
+ inject(variables.lefttext)
+ ?></div>
</div>
- </div>
- <div id="right">
+ <?lua end ?></div>
+ <div id="right"><?lua if variables.righttext then ?>
<div id="right-safari">
<div id="right-one">
<div id="right-two">
<div id="right-three">
<div id="right-four">
- <div id="right-five">
- <?lua pv('righttext') ?>
- </div>
+ <div id="right-five"><?lua
+ inject(variables.righttext)
+ ?></div>
</div>
</div>
</div>
</div>
</div>
- </div>
+ <?lua end ?></div>
<div id="main">
<div id='main-settings'>
- <?lua pv('maintext') ?>
+ <?lua inject(variables.maintext) ?>
</div>
</div>
</body>
- <?lua if v('action') then ?>
+ <?lua if variables.action then ?>
</form>
<?lua end ?>
</html>
diff --git a/Master/texmf-dist/tex/context/base/context-help.lmx b/Master/texmf-dist/tex/context/base/context-help.lmx
index 5401fb65db0..14049391506 100644
--- a/Master/texmf-dist/tex/context/base/context-help.lmx
+++ b/Master/texmf-dist/tex/context/base/context-help.lmx
@@ -19,10 +19,12 @@
</script>
<title><?lua pv('title') ?></title>
<meta http-equiv="Content-Type" content="text/html; charset=UTF-8"/>
+ <!-- <base target="context-help" /> -->
+ <style type="text/css">
+ <?lmx-include context.css ?>
+ </style>
<style type="text/css">
<!--
-<?lmx-include context.css ?>
-
#main-left {
position: absolute;
left: 0% ;
diff --git a/Master/texmf-dist/tex/context/base/context-version.pdf b/Master/texmf-dist/tex/context/base/context-version.pdf
index 7b8733c8827..27388e1a084 100644
--- a/Master/texmf-dist/tex/context/base/context-version.pdf
+++ b/Master/texmf-dist/tex/context/base/context-version.pdf
Binary files differ
diff --git a/Master/texmf-dist/tex/context/base/context-version.png b/Master/texmf-dist/tex/context/base/context-version.png
index bb280817c4f..a8a38184bdc 100644
--- a/Master/texmf-dist/tex/context/base/context-version.png
+++ b/Master/texmf-dist/tex/context/base/context-version.png
Binary files differ
diff --git a/Master/texmf-dist/tex/context/base/context.mkii b/Master/texmf-dist/tex/context/base/context.mkii
index 8cd02fd9e94..1309ce1cbc4 100644
--- a/Master/texmf-dist/tex/context/base/context.mkii
+++ b/Master/texmf-dist/tex/context/base/context.mkii
@@ -1,4 +1,4 @@
-%D \module
+ %D \module
%D [ file=context,
%D version=2008.28.10, % 1995.10.10,
%D title=\CONTEXT,
@@ -20,7 +20,7 @@
%D your styles an modules.
\edef\contextformat {\jobname}
-\edef\contextversion{2012.05.30 11:26}
+\edef\contextversion{2013.03.29 01:31}
%D For those who want to use this:
diff --git a/Master/texmf-dist/tex/context/base/context.mkiv b/Master/texmf-dist/tex/context/base/context.mkiv
index 5044edae138..224944aec24 100644
--- a/Master/texmf-dist/tex/context/base/context.mkiv
+++ b/Master/texmf-dist/tex/context/base/context.mkiv
@@ -19,11 +19,14 @@
%D 2004.8.30 the low level interface is english. Watch out and adapt
%D your styles an modules.
+% \everypar{\writestatus{!!!!}{some spurious input in line \the\inputlineno}\wait}
+
%D The order of loading will change when all modules have been cleaned
%D up and the dependencies are more consistent.
\edef\contextformat {\jobname}
-\edef\contextversion{2012.05.30 11:26}
+\edef\contextversion{2013.04.07 14:05}
+\edef\contextkind {beta}
%D For those who want to use this:
@@ -62,10 +65,13 @@
\expandafter\end
\fi
-%D There is only this way to pass the version info
-%D to \LUA\ (currently).
+%D There is only this way to pass the version info to \LUA\ (currently). Hm, we could
+%D now put it into the environment.
\newtoks\contextversiontoks \contextversiontoks\expandafter{\contextversion}
+\newtoks\contextkindtoks \contextkindtoks \expandafter{\contextkind}
+
+% \normaleverypar{\wait} % uncomment for test of funny injections
%D Now the more fundamental code gets defined.
@@ -85,8 +91,8 @@
\loadmarkfile{cldf-ini}
-% From here on we have \unexpanded being \normalprotected, as we
-% already had \unexpanded long before etex came around.
+% From here on we have \unexpanded being \normalprotected, as we already had
+% \unexpanded long before etex came around.
\loadmarkfile{syst-aux}
\loadmarkfile{syst-lua}
@@ -107,9 +113,10 @@
\loadmarkfile{mult-ini}
\loadmarkfile{mult-sys}
+\loadmarkfile{mult-aux}
\loadmarkfile{mult-def}
\loadmarkfile{mult-chk}
-\loadmarkfile{mult-aux}
+%loadmarkfile{mult-aux} % moved up
\loadmkvifile{mult-dim}
\loadmarkfile{cldf-int} % interface
@@ -120,7 +127,7 @@
\loadmarkfile{attr-ini}
-\loadmarkfile{core-var}
+\loadmarkfile{core-ini}
\loadmarkfile{core-env}
\loadmarkfile{layo-ini}
@@ -131,7 +138,7 @@
\loadmarkfile{node-fin}
\loadmarkfile{node-mig}
-\loadmarkfile{node-par}
+\loadmarkfile{typo-bld} % par builders
%loadmarkfile{node-pag}
\loadmarkfile{back-ini}
@@ -140,26 +147,26 @@
\loadmarkfile{attr-lay}
\loadmarkfile{attr-neg}
\loadmarkfile{attr-eff}
+\loadmarkfile{attr-mkr}
\loadmarkfile{trac-tex}
-\loadmarkfile{trac-deb}
+\loadmarkfile{trac-deb} % will move up
+\loadmarkfile{trac-ctx} % maybe move up
%loadmarkfile{blob-ini} % not to be used, we only use a helper
\loadmarkfile{supp-box}
-\loadmarkfile{supp-vis}
-\loadmarkfile{supp-fun} % close to obsolete
+%loadmarkfile{supp-vis} % replaced by trac-vis
+%loadmarkfile{supp-fun} % mostly replaced
\loadmarkfile{supp-ran}
\loadmarkfile{supp-mat}
-\loadmarkfile{supp-ali}
-\loadmarkfile{supp-num} % replaced
+\loadmarkfile{spac-cha}
+%loadmarkfile{supp-num} % obsolete
\loadmarkfile{typo-ini}
-\loadmarkfile{page-ins}
-
\loadmkvifile{file-syn}
\loadmkvifile{file-mod}
@@ -186,7 +193,7 @@
\loadmarkfile{node-bck} % overloads anch-pgr (experimental and undocumented)
-\loadmarkfile{trac-vis}
+\loadmarkfile{pack-cut} % leftovers from trac-vis
\loadmarkfile{lang-mis}
\loadmarkfile{lang-url}
@@ -195,7 +202,7 @@
\loadmkvifile{file-job} % why so late?
-\loadmarkfile{symb-ini}
+\loadmarkfile{symb-ini} % brrr depends on fonts
\loadmarkfile{sort-ini}
@@ -228,6 +235,7 @@
\loadmarkfile{spac-ali}
\loadmarkfile{spac-hor}
+\loadmarkfile{spac-flr}
\loadmarkfile{spac-ver}
\loadmarkfile{spac-lin}
\loadmarkfile{spac-pag}
@@ -258,6 +266,7 @@
\loadmarkfile{page-var}
\loadmkvifile{page-otr}
\loadmarkfile{page-ini}
+\loadmarkfile{page-ins}
\loadmarkfile{page-fac}
\loadmarkfile{page-brk}
\loadmarkfile{page-col}
@@ -274,9 +283,10 @@
\loadmkvifile{strc-flt}
-\loadmarkfile{page-mis}
+\loadmarkfile{page-pst}
\loadmkvifile{page-mbk}
-\loadmarkfile{page-mul}
+\loadmarkfile{page-mul} % partly overloaded
+\loadmarkfile{page-mix} % new
\loadmarkfile{page-set}
\loadmarkfile{pack-lyr}
\loadmarkfile{pack-pos}
@@ -286,10 +296,11 @@
\loadmarkfile{page-par}
\loadmarkfile{typo-pag}
\loadmarkfile{typo-mar}
+\loadmarkfile{typo-itm}
\loadmarkfile{buff-ini}
\loadmarkfile{buff-ver}
-\loadmarkfile{buff-par}
+\loadmkvifile{buff-par}
\loadmarkfile{buff-imp-tex} % optional as also runtime if not loaded
\loadmarkfile{buff-imp-mp} % optional as also runtime if not loaded
@@ -302,14 +313,16 @@
\loadmarkfile{strc-blk}
\loadmarkfile{page-imp}
-\loadmarkfile{page-sel} % optional
-\loadmarkfile{page-com} % optional
+\loadmkvifile{page-sel} % optional
+\loadmkvifile{page-inj} % optional
\loadmkvifile{scrn-pag}
\loadmkvifile{scrn-wid}
\loadmkvifile{scrn-but}
\loadmkvifile{scrn-bar}
+\loadmarkfile{page-com} % optional (after scrn-pag)
+
\loadmarkfile{strc-bkm} % bookmarks
\loadmarkfile{tabl-com}
@@ -323,6 +336,7 @@
\loadmarkfile{tabl-ltb}
\loadmarkfile{tabl-tsp}
\loadmkvifile{tabl-xtb}
+\loadmarkfile{tabl-mis}
\loadmarkfile{java-ini}
@@ -331,7 +345,7 @@
\loadmarkfile{char-enc} % will move up
-\loadmkvifile{font-lib}
+\loadmkvifile{font-lib} % way too late
\loadmkvifile{font-fil}
\loadmkvifile{font-var}
\loadmkvifile{font-fea}
@@ -340,14 +354,17 @@
\loadmkvifile{font-sym}
\loadmkvifile{font-sty}
\loadmkvifile{font-set}
-
\loadmkvifile{font-emp}
\loadmarkfile{font-pre}
\loadmarkfile{font-unk}
\loadmarkfile{font-tra}
+\loadmarkfile{font-chk}
\loadmarkfile{font-uni}
\loadmkvifile{font-col}
-\loadmarkfile{font-gds}
+\loadmkvifile{font-gds}
+\loadmkvifile{font-aux}
+
+\loadmarkfile{typo-lan}
\loadmarkfile{lxml-css}
@@ -355,6 +372,9 @@
\loadmarkfile{blob-ini} % not to be used, we only use a helper
+\loadmarkfile{trac-vis}
+\loadmarkfile{trac-jus}
+
\loadmarkfile{typo-cln}
\loadmarkfile{typo-spa}
\loadmarkfile{typo-krn}
@@ -380,6 +400,7 @@
\loadmarkfile{meta-tex}
\loadmarkfile{meta-fun}
\loadmarkfile{meta-pag}
+\loadmarkfile{meta-grd}
\loadmarkfile{page-mrk} % depends on mp
@@ -399,11 +420,14 @@
\loadmarkfile{math-for}
\loadmarkfile{math-def}
\loadmarkfile{math-ali}
-\loadmarkfile{math-arr}
+%loadmarkfile{math-arr}
+\loadmkvifile{math-stc}
\loadmarkfile{math-frc}
+\loadmarkfile{math-mis}
\loadmarkfile{math-scr}
\loadmarkfile{math-int}
\loadmarkfile{math-del}
+\loadmarkfile{math-fen}
\loadmarkfile{math-inl}
\loadmarkfile{math-dis}
%loadmarkfile{math-lan}
@@ -417,14 +441,12 @@
\loadmarkfile{typo-scr}
-\loadmarkfile{core-fnt}
\loadmarkfile{node-rul}
-\loadmarkfile{node-spl}
+\loadmkvifile{font-sol} % font solutions
\loadmkvifile{strc-not}
\loadmkvifile{strc-lnt}
-\loadmarkfile{core-mis}
\loadmarkfile{pack-com}
\loadmarkfile{typo-del}
@@ -454,9 +476,8 @@
\loadmarkfile{cldf-ver} % verbatim, this can come late
\loadmarkfile{cldf-com} % commands, this can come late
-\loadmarkfile{core-ctx}
+\loadmarkfile{core-ctx} % this order might change but we need to check depedencies / move to another namespace
-\loadmarkfile{core-ini}
\loadmarkfile{core-def}
%usemodule[x][res-04] % xml resource libraries
@@ -465,7 +486,7 @@
% now we hook in backend code (needs checking)
-\loadmarkfile{back-pdf} % actually, this one should load the next three
+\loadmarkfile{back-pdf} % actually, this one should load the next three using document.arguments.backend
\loadmarkfile{mlib-pdf}
\loadmarkfile{mlib-pps}
\loadmarkfile{meta-pdf}
@@ -484,9 +505,7 @@
\to \everyjob
\appendtoks
- \ctxlua{statistics.savefmtstatus("\jobname","\contextversion","context.mkiv")}% can become automatic
+ \ctxlua{statistics.savefmtstatus("\jobname","\contextversion","context.mkiv","\contextkind")}% can become automatic
\to \everydump
-% \tracingall
-
\errorstopmode \dump \endinput
diff --git a/Master/texmf-dist/tex/context/base/context.rme b/Master/texmf-dist/tex/context/base/context.rme
index 1ce6b25c0da..ff9ffb8ac23 100644
--- a/Master/texmf-dist/tex/context/base/context.rme
+++ b/Master/texmf-dist/tex/context/base/context.rme
@@ -14,7 +14,10 @@ real problem as the engines don't change much either.
Early 2011 the code base between MkII and MkIV got split completely
and there is no shared code any longer, apart from some styles and
-modules.
+modules. From the perspective of ConteXt we now consider XeTeX to be
+obsolete although we will keep supporting it in MkII. As pdftex is
+still used in older workflows we will support that as long as it's
+around.
The main files context.mkii and context.mkiv are normally not used
directly but instead we use the interface specific formats:
@@ -76,4 +79,4 @@ Don't hesitate to ask questions. ConTeXt can do a lot, but the manuals
always lag behind and can be incomplete.
-------------------------
-Hans Hagen, pragma@wxs.nl
+Hans Hagen, pragma @ wxs . nl
diff --git a/Master/texmf-dist/tex/context/base/context.todo b/Master/texmf-dist/tex/context/base/context.todo
deleted file mode 100644
index 27d914a7b6e..00000000000
--- a/Master/texmf-dist/tex/context/base/context.todo
+++ /dev/null
@@ -1,6 +0,0 @@
-% marginrules
-
-% consistently use label/name/tag
-% consistently use type/kind
-
-% option vs options
diff --git a/Master/texmf-dist/tex/context/base/core-con.lua b/Master/texmf-dist/tex/context/base/core-con.lua
index e4b516e3b43..cb284b9a781 100644
--- a/Master/texmf-dist/tex/context/base/core-con.lua
+++ b/Master/texmf-dist/tex/context/base/core-con.lua
@@ -14,38 +14,37 @@ slower but look nicer this way.</p>
<p>Some code may move to a module in the language namespace.</p>
--ldx]]--
-local utf = unicode.utf8
+local command, context = commands, context
local floor, date, time, concat = math.floor, os.date, os.time, table.concat
-local lower, format, rep, match = string.lower, string.format, string.rep, string.match
+local lower, rep, match = string.lower, string.rep, string.match
local utfchar, utfbyte = utf.char, utf.byte
local tonumber, tostring = tonumber, tostring
-local settings_to_array = utilities.parsers.settings_to_array
-local allocate = utilities.storage.allocate
+local context = context
-local context = context
+local settings_to_array = utilities.parsers.settings_to_array
+local allocate = utilities.storage.allocate
+local formatters = string.formatters
+local variables = interfaces.variables
-local variables = interfaces.variables
+converters = converters or { }
+local converters = converters
-converters = converters or { }
-local converters = converters
+languages = languages or { }
+local languages = languages
-languages = languages or { }
-local languages = languages
-
-local function number(n)
- return tonumber(n)
-end
-
-converters.number = number
+converters.number = tonumber
+converters.numbers = tonumber
function commands.number(n) context(n) end
+commands.numbers = commands.number
+
-- to be reconsidered ... languages namespace here, might become local plus a register command
local counters = allocate {
- ['**'] = {
+ ['default'] = { -- no metatable as we do a test on keys
0x0061, 0x0062, 0x0063, 0x0064, 0x0065,
0x0066, 0x0067, 0x0068, 0x0069, 0x006A,
0x006B, 0x006C, 0x006D, 0x006E, 0x006F,
@@ -135,7 +134,7 @@ counters['kr'] = counters['korean']
counters['kr-p'] = counters['korean-parent']
counters['kr-c'] = counters['korean-circle']
-local fallback = utf.byte('0')
+local fallback = utfbyte('0')
local function chr(n,m)
return (n > 0 and n < 27 and utfchar(n+m)) or ""
@@ -165,7 +164,7 @@ local function maxchrs(n,m,cmd,t)
maxchrs(floor((n-1)/m),m,cmd)
n = (n-1)%m + 1
end
- t[#t+1] = format("%s{%s}",cmd,n)
+ t[#t+1] = formatters["%s{%s}"](cmd,n)
if n <= m then
return concat(t)
end
@@ -178,51 +177,44 @@ converters.maxchrs = maxchrs
local lowercharacter = characters.lcchars
local uppercharacter = characters.ucchars
-local function do_alphabetic(n,mapping,mapper,t)
+local function do_alphabetic(n,mapping,mapper,t) -- todo: make zero based variant (initial n + 1)
if not t then
t = { }
end
- local chr = mapping[n] or fallback
- if mapper then
- chr = mapper[chr]
- end
local max = #mapping
if n > max then
do_alphabetic(floor((n-1)/max),mapping,mapper,t)
n = (n-1) % max + 1
end
- t[#t+1] = chr
+ local chr = mapping[n] or fallback
+ t[#t+1] = mapper and mapper[chr] or chr
if n <= max then
return concat(t)
end
end
-local function alphabetic(n,code)
- return do_alphabetic(n,counters[code] or counters['**'],lowercharacter)
+function converters.alphabetic(n,code)
+ return do_alphabetic(n,counters[code] or counters.default,lowercharacter)
end
-local function Alphabetic(n,code)
- return do_alphabetic(n,counters[code] or counters['**'],uppercharacter)
+function converters.Alphabetic(n,code)
+ return do_alphabetic(n,counters[code] or counters.default,uppercharacter)
end
-local function character (n) return chr (n,96) end
-local function Character (n) return chr (n,64) end
-local function characters(n) return chrs(n,96) end
-local function Characters(n) return chrs(n,64) end
-
-converters.alphabetic = alphabetic
-converters.Alphabetic = Alphabetic
-converters.character = character
-converters.Character = Character
-converters.characters = characters
-converters.Characters = Characters
-
-function commands.alphabetic(n,c) context(alphabetic(n,c)) end
-function commands.Alphabetic(n,c) context(Alphabetic(n,c)) end
-function commands.character (n) context(character (n)) end
-function commands.Character (n) context(Character (n)) end
-function commands.characters(n) context(characters(n)) end
-function commands.Characters(n) context(Characters(n)) end
+local lower_offset = 96
+local upper_offset = 64
+
+function converters.character (n) return chr (n,lower_offset) end
+function converters.Character (n) return chr (n,upper_offset) end
+function converters.characters(n) return chrs(n,lower_offset) end
+function converters.Characters(n) return chrs(n,upper_offset) end
+
+function commands.alphabetic(n,c) context(do_alphabetic(n,counters[c],lowercharacter)) end
+function commands.Alphabetic(n,c) context(do_alphabetic(n,counters[c],uppercharacter)) end
+function commands.character (n) context(chr (n,lower_offset)) end
+function commands.Character (n) context(chr (n,upper_offset)) end
+function commands.characters(n) context(chrs(n,lower_offset)) end
+function commands.Characters(n) context(chrs(n,upper_offset)) end
local days = {
[false] = { 31, 28, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31 },
@@ -245,52 +237,35 @@ local function nofdays(year,month)
return days[isleapyear(year)][month]
end
-local function year () return date("%Y") end
-local function month () return date("%m") end
-local function hour () return date("%H") end
-local function minute() return date("%M") end
-local function second() return date("%S") end
-
local function textime()
return tonumber(date("%H")) * 60 + tonumber(date("%M"))
end
+function converters.year () return date("%Y") end
+function converters.month () return date("%m") end
+function converters.hour () return date("%H") end
+function converters.minute() return date("%M") end
+function converters.second() return date("%S") end
+
converters.weekday = weekday
converters.isleapyear = isleapyear
converters.leapyear = leapyear
converters.nofdays = nofdays
-converters.year = year
-converters.month = month
-converters.hour = hour
-converters.minute = minute
-converters.second = second
converters.textime = textime
-function commands.weekday(day,month,year)
- context(weekday(day,month,year))
-end
-
-function commands.isleapyear(year)
- context(isleapyear(year))
-end
-
-function commands.leapyear(year)
- context(leapyear(year))
-end
-
-function commands.nofdays(year,month)
- context(nofdays(year,month))
-end
+function commands.weekday (day,month,year) context(weekday (day,month,year)) end
+function commands.leapyear(year) context(leapyear(year)) end -- rather useless
+function commands.nofdays (year,month) context(nofdays (year,month)) end
-function commands.year () context(year ()) end
-function commands.month () context(month ()) end
-function commands.hour () context(hour ()) end
-function commands.minute () context(minute ()) end
-function commands.second () context(second ()) end
+function commands.year () context(date("%Y")) end
+function commands.month () context(date("%m")) end
+function commands.hour () context(date("%H")) end
+function commands.minute () context(date("%M")) end
+function commands.second () context(date("%S")) end
function commands.textime() context(textime()) end
function commands.doifleapyearelse(year)
- commands.testcase(leapyear(year))
+ commands.doifelse(isleapyear(year))
end
local roman = {
@@ -307,13 +282,9 @@ local function toroman(n)
end
end
-local Romannumerals = toroman
-
-local function romannumerals(n) return lower(toroman(n)) end
-
converters.toroman = toroman
converters.Romannumerals = toroman
-converters.romannumerals = romannumerals
+converters.romannumerals = function(n) return lower(toroman(n)) end
function commands.romannumerals(n) context(lower(toroman(n))) end
function commands.Romannumerals(n) context( toroman(n)) end
@@ -366,11 +337,8 @@ end
converters.toabjad = toabjad
-local function abjadnumerals (n) return toabjad(n,false) end
-local function abjadnodotnumerals(n) return toabjad(n,true ) end
-
-converters.abjadnumerals = abjadnumerals
-converters.abjadnodotnumerals = abjadnodotnumerals
+function converters.abjadnumerals (n) return toabjad(n,false) end
+function converters.abjadnodotnumerals(n) return toabjad(n,true ) end
function commands.abjadnumerals (n) context(toabjad(n,false)) end
function commands.abjadnodotnumerals(n) context(toabjad(n,true )) end
@@ -515,23 +483,18 @@ end
converters.tochinese = tochinese
-local function chinesenumerals (n) return tochinese(n,"normal") end
-local function chinesecapnumerals(n) return tochinese(n,"cap" ) end
-local function chineseallnumerals(n) return tochinese(n,"all" ) end
-
-converters.chinesenumerals = chinesenumerals
-converters.chinesecapnumerals = chinesecapnumerals
-converters.chineseallnumerals = chineseallnumerals
+function converters.chinesenumerals (n) return tochinese(n,"normal") end
+function converters.chinesecapnumerals(n) return tochinese(n,"cap" ) end
+function converters.chineseallnumerals(n) return tochinese(n,"all" ) end
function commands.chinesenumerals (n) context(tochinese(n,"normal")) end
function commands.chinesecapnumerals(n) context(tochinese(n,"cap" )) end
function commands.chineseallnumerals(n) context(tochinese(n,"all" )) end
converters.sequences = converters.sequences or { }
+local sequences = converters.sequences
-storage.register("converters/sequences", converters.sequences, "converters.sequences")
-
-local sequences = converters.sequences
+storage.register("converters/sequences", sequences, "converters.sequences")
function converters.define(name,set)
sequences[name] = settings_to_array(set)
@@ -669,44 +632,42 @@ end
-- print(gregorian_to_jalali(2009,02,24))
-- print(jalali_to_gregorian(1387,12,06))
--- more efficient but needs testing
---
--- local escapes = utffilters.private.escapes
---
+-- -- more efficient but needs testing
+
+-- local escapes = characters.filters.utf.private.escapes
+
-- local function do_alphabetic(n,mapping,chr)
-- local max = #mapping
-- if n > max then
--- do_alphabetic(floor((n-1)/max),max,chr)
+-- do_alphabetic(floor((n-1)/max),mapping,chr)
-- n = (n-1)%max+1
-- end
-- n = chr(n,mapping)
-- context(escapes[n] or utfchar(n))
-- end
---
--- local lccodes, uccodes = characters.lccode, characters.uccode
---
+
+-- local lccodes, uccodes, safechar = characters.lccode, characters.uccode, commands.safechar
+
-- local function do_alphabetic(n,mapping,chr)
-- local max = #mapping
-- if n > max then
-- do_alphabetic(floor((n-1)/max),mapping,chr)
-- n = (n-1)%max+1
-- end
--- characters.flush(chr(n,mapping))
+-- safechar(chr(n,mapping))
-- end
---
+
-- local function lowercased(n,mapping) return characters.lccode(mapping[n] or fallback) end
-- local function uppercased(n,mapping) return characters.uccode(mapping[n] or fallback) end
---
+
-- function converters.alphabetic(n,code)
--- do_alphabetic(n,counters[code] or counters['**'],lowercased) -- lccode catches wrong tables
+-- do_alphabetic(n,counters[code] or counters.default,lowercased) -- lccode catches wrong tables
-- end
---
+
-- function converters.Alphabetic(n,code)
--- do_alphabetic(n,counters[code] or counters['**'],uppercased)
+-- do_alphabetic(n,counters[code] or counters.default,uppercased)
-- end
--- --
-
local ordinals = {
english = function(n)
local two = n % 100
@@ -753,6 +714,255 @@ function commands.ordinal(n,language)
end
end
+-- verbose numbers
+
+-- verbose numbers
+
+local data = allocate()
+local verbose = { data = data }
+converters.verbose = verbose
+
+-- verbose english
+
+local words = {
+ [0] = "zero",
+ [1] = "one",
+ [2] = "two",
+ [3] = "three",
+ [4] = "four",
+ [5] = "five",
+ [6] = "six",
+ [7] = "seven",
+ [8] = "eight",
+ [9] = "nine",
+ [10] = "ten",
+ [11] = "eleven",
+ [12] = "twelve",
+ [13] = "thirteen",
+ [14] = "fourteen",
+ [15] = "fifteen",
+ [16] = "sixteen",
+ [17] = "seventeen",
+ [18] = "eighteen",
+ [19] = "nineteen",
+ [20] = "twenty",
+ [30] = "thirty",
+ [40] = "forty",
+ [50] = "fifty",
+ [60] = "sixty",
+ [70] = "seventy",
+ [80] = "eighty",
+ [90] = "ninety",
+ [100] = "hundred",
+ [1000] = "thousand",
+ [1000^2] = "million",
+ [1000^3] = "billion",
+ [1000^4] = "trillion",
+}
+
+local function translate(n)
+ local w = words[n]
+ if w then
+ return w
+ end
+ local t = { }
+ local function compose_one(n)
+ local w = words[n]
+ if w then
+ t[#t+1] = w
+ return
+ end
+ local a, b = floor(n/100), n % 100
+ if a == 10 then
+ t[#t+1] = words[1]
+ t[#t+1] = words[1000]
+ elseif a > 0 then
+ t[#t+1] = words[a]
+ t[#t+1] = words[100]
+ -- don't say 'nine hundred zero'
+ if b == 0 then
+ return
+ end
+ end
+ if words[b] then
+ t[#t+1] = words[b]
+ else
+ a, b = floor(b/10), n % 10
+ t[#t+1] = words[a*10]
+ t[#t+1] = words[b]
+ end
+ end
+ local function compose_two(n,m)
+ if n > (m-1) then
+ local a, b = floor(n/m), n % m
+ if a > 0 then
+ compose_one(a)
+ end
+ t[#t+1] = words[m]
+ n = b
+ end
+ return n
+ end
+ n = compose_two(n,1000^4)
+ n = compose_two(n,1000^3)
+ n = compose_two(n,1000^2)
+ n = compose_two(n,1000^1)
+ if n > 0 then
+ compose_one(n)
+ end
+ return #t > 0 and concat(t," ") or tostring(n)
+end
+
+data.english = {
+ words = words,
+ translate = translate,
+}
+
+data.en = data.english
+
+-- print(translate(11111111))
+-- print(translate(2221101))
+-- print(translate(1111))
+-- print(translate(1218))
+-- print(translate(1234))
+-- print(translate(12345))
+-- print(translate(12345678900000))
+
+-- verbose spanish (unchecked)
+
+local words = {
+ [1] = "uno",
+ [2] = "dos",
+ [3] = "tres",
+ [4] = "cuatro",
+ [5] = "cinco",
+ [6] = "seis",
+ [7] = "siete",
+ [8] = "ocho",
+ [9] = "nueve",
+ [10] = "diez",
+ [11] = "once",
+ [12] = "doce",
+ [13] = "trece",
+ [14] = "catorce",
+ [15] = "quince",
+ [16] = "dieciséis",
+ [17] = "diecisiete",
+ [18] = "dieciocho",
+ [19] = "diecinueve",
+ [20] = "veinte",
+ [21] = "veintiuno",
+ [22] = "veintidós",
+ [23] = "veintitrés",
+ [24] = "veinticuatro",
+ [25] = "veinticinco",
+ [26] = "veintiséis",
+ [27] = "veintisiete",
+ [28] = "veintiocho",
+ [29] = "veintinueve",
+ [30] = "treinta",
+ [40] = "cuarenta",
+ [50] = "cincuenta",
+ [60] = "sesenta",
+ [70] = "setenta",
+ [80] = "ochenta",
+ [90] = "noventa",
+ [100] = "ciento",
+ [200] = "doscientos",
+ [300] = "trescientos",
+ [400] = "cuatrocientos",
+ [500] = "quinientos",
+ [600] = "seiscientos",
+ [700] = "setecientos",
+ [800] = "ochocientos",
+ [900] = "novecientos",
+ [1000] = "mil",
+ [1000^2] = "millón",
+ [1000^3] = "mil millónes",
+ [1000^4] = "billón",
+}
+
+local function translate(n)
+ local w = words[n]
+ if w then
+ return w
+ end
+ local t = { }
+ local function compose_one(n)
+ local w = words[n]
+ if w then
+ t[#t+1] = w
+ return
+ end
+ -- a, b = hundreds, remainder
+ local a, b = floor(n/100), n % 100
+ -- one thousand
+ if a == 10 then
+ t[#t+1] = words[1]
+ t[#t+1] = words[1000]
+ -- x hundred (n.b. this will not give thirteen hundred because
+ -- compose_one(n) is only called after
+ -- `n = compose(two(n, 1000^1))`.
+ elseif a > 0 then
+ t[#t+1] = words[a*100]
+ end
+ -- the remainder
+ if words[b] then
+ t[#t+1] = words[b]
+ else
+ -- a, b = tens, remainder
+ a, b = floor(b/10), n % 10
+ t[#t+1] = words[a*10]
+ t[#t+1] = "y"
+ t[#t+1] = words[b]
+ end
+ end
+ -- compose_two handles x billion, ... x thousand. When 1000 or less is
+ -- left, compose_one takes over.
+ local function compose_two(n,m)
+ if n > (m-1) then
+ local a, b = floor(n/m), n % m
+ if a > 0 then
+ compose_one(a)
+ end
+ t[#t+1] = words[m]
+ n = b
+ end
+ return n
+ end
+ n = compose_two(n,1000^4)
+ n = compose_two(n,1000^3)
+ n = compose_two(n,1000^2)
+ n = compose_two(n,1000^1)
+ if n > 0 then
+ compose_one(n)
+ end
+ return #t > 0 and concat(t," ") or tostring(n)
+end
+
+data.spanish = {
+ words = words,
+ translate = translate,
+}
+
+data.es = data.spanish
+
+-- print(translate(31))
+-- print(translate(101))
+-- print(translate(199))
+
+-- verbose handler:
+
+function converters.verbose.translate(n,language)
+ local t = language and data[language]
+ return t and t.translate(n) or n
+end
+
+function commands.verbose(n,language)
+ local t = language and data[language]
+ context(t and t.translate(n) or n)
+end
+
-- --
local v_day = variables.day
@@ -775,18 +985,18 @@ local days = { -- not variables.sunday
}
local months = { -- not variables.januari
- "january",
- "february",
- "march",
- "april",
- "may",
- "june",
- "july",
- "august",
- "september",
- "october",
- "november",
- "december",
+ "january",
+ "february",
+ "march",
+ "april",
+ "may",
+ "june",
+ "july",
+ "august",
+ "september",
+ "october",
+ "november",
+ "december",
}
function commands.dayname(n)
@@ -813,11 +1023,12 @@ end
function commands.currentdate(str,currentlanguage) -- second argument false : no label
local list = utilities.parsers.settings_to_array(str)
+ local splitlabel = languages.labels.split or string.itself -- we need to get the loading order right
local year, month, day = tex.year, tex.month, tex.day
local auto = true
for i=1,#list do
local entry = list[i]
- local tag, plus = languages.labels.split(entry)
+ local tag, plus = splitlabel(entry)
local ordinal, mnemonic, whatordinal = false, false, nil
if not tag then
tag = entry
@@ -883,7 +1094,7 @@ function commands.currentdate(str,currentlanguage) -- second argument false : no
if currentlanguage == false then
-- ignore
else
- context("%s",converters.ordinal(whatordinal,currentlanguage))
+ context(converters.ordinal(whatordinal,currentlanguage)) -- no "%s" needed
end
end
end
diff --git a/Master/texmf-dist/tex/context/base/core-con.mkiv b/Master/texmf-dist/tex/context/base/core-con.mkiv
index 241d4316595..7febdcf39f6 100644
--- a/Master/texmf-dist/tex/context/base/core-con.mkiv
+++ b/Master/texmf-dist/tex/context/base/core-con.mkiv
@@ -20,35 +20,27 @@
\ifdefined\currentlanguage \else \let\currentlanguage\empty \fi
\ifdefined\labeltext \else \let\labeltext \firstofoneargument \fi
-%D This module deals with all kind of conversions from numbers
-%D and dates. I considered splitting this module in a support
-%D one and a core one, but to keep things simple as well as
-%D preserve the overview, I decided against splitting.
+%D This module deals with all kind of conversions from numbers and dates. I
+%D considered splitting this module in a support one and a core one, but to keep
+%D things simple as well as preserve the overview, I decided against splitting.
\let\spr\firstofoneargument % separator
\let\stp\firstofoneargument % stopper
-% cleaner, some day:
-%
-% \def\isolateseparators % etex only, even works with list separator overloading
-% {\unexpanded\def\spr##1{{##1}}%
-% \unexpanded\def\stp##1{{##1}}}
-
-% needed for arab :
+% needed for arab:
\unexpanded\def\isolateseparators % even works with list separator overloading
- {\def\spr##1{{##1}}%
- \def\stp##1{{##1}}}
+ {\def\spr##1{{##1}}% % we can probably best mess around with zwj
+ \def\stp##1{{##1}}} % and friends
%D \macros
%D {numbers}
%D
-%D First we deal with the dummy conversion of numbers using the
-%D \TEX\ primitive \type{\number}. The uppercase alternative is
-%D only there for compatibility with the other conversion
-%D macros. We could do without \type{#1} but this way we get
-%D rid of unwanted braces. For the savety we also define a
-%D non||sence uppercase alternative.
+%D First we deal with the dummy conversion of numbers using the \TEX\ primitive
+%D \type{\number}. The uppercase alternative is only there for compatibility with
+%D the other conversion macros. We could do without \type{#1} but this way we get
+%D rid of unwanted braces. For the savety we also define a non||sence uppercase
+%D alternative.
%D
%D \showsetup{numbers}
%D
@@ -57,8 +49,8 @@
%D \def\Numbers#1{\number#1}
%D \stoptyping
%D
-%D Due to read ahead, as in \type{[\pagenumber\space]} the space will
-%D disappear, unless we use:
+%D Due to read ahead, as in \type{[\pagenumber\space]} the space will disappear,
+%D unless we use:
\def\numbers#1{\purenumber{#1}}
\def\Numbers#1{\purenumber{#1}}
@@ -66,10 +58,9 @@
%D \macros
%D {romannumerals,Romannumerals}
%D
-%D \TEX\ the program uses a rather tricky conversion from
-%D numbers to their roman counterparts. This conversion could
-%D of course be programmed in \TEX\ itself, but I guess Knuth
-%D found the programming trick worth presenting.
+%D \TEX\ the program uses a rather tricky conversion from numbers to their roman
+%D counterparts. This conversion could of course be programmed in \TEX\ itself, but
+%D I guess Knuth found the programming trick worth presenting.
%D
%D \showsetup{romannumerals}
%D \showsetup{Romannumerals}
@@ -113,9 +104,9 @@
%D \macros
%D {character,Character}
%D
-%D Converting a number into a character can of course only
-%D be done with numbers less or equal to~26. At the cost of
-%D much more macros a faster conversion is possible, using:
+%D Converting a number into a character can of course only be done with numbers
+%D less or equal to~26. At the cost of much more macros a faster conversion is
+%D possible, using:
%D
%D \starttyping
%D \setvalue{char1}{a} \def\character#1{\getvalue{char#1}}
@@ -134,9 +125,8 @@
%D \macros
%D {characters,Characters}
%D
-%D Converting large numbers is supported by the next two
-%D macros. This time we just count on: $\cdots$~x, y, z, aa,
-%D ab, ac~$\cdots$.
+%D Converting large numbers is supported by the next two macros. This time we
+%D just count on: $\cdots$~x, y, z, aa, ab, ac~$\cdots$.
%D
%D \showsetup{characters}
%D \showsetup{Characters}
@@ -163,20 +153,25 @@
\endgroup}
\unexpanded\def\oldstyleromannumerals#1% will become obsolete
- {\begingroup
- \leftrulefalse
- \rightrulefalse
+ {\dontleavehmode
+ \hbox\bgroup
\ss\txx
- \boxrulewidth.15ex
- \ruledhbox spread .15em{\hss\uppercased{\romannumerals{#1}}\hss}%
- \endgroup}
+ \setbox\scratchbox\hbox \s!spread .15\emwidth{\hss\uppercased{\romannumerals{#1}}\hss}%
+ \scratchwidth \wd\scratchbox
+ \scratchheight\ht\scratchbox
+ \scratchdimen .1\exheight
+ \vrule\s!width\scratchwidth\s!height\dimexpr\scratchheight+\scratchdimen\relax\s!depth-\dimexpr\scratchheight-+\scratchdimen\relax
+ \hskip-\scratchwidth
+ \vrule\s!width\scratchwidth\s!height\scratchdimen\s!depth\scratchdimen
+ \hskip-\scratchwidth
+ \box\scratchbox
+ \egroup}
%D \macros
%D {protectconversion}
%D
-%D The previous two commands are not robust enough to be
-%D passed to \type{\write} en \type{\message}. That's why we
-%D introduce:
+%D The previous two commands are not robust enough to be passed to \type
+%D {\write} en \type{\message}. That's why we introduce:
\unexpanded\def\protectconversion
{\let\doconvertcharacters\firstofoneargument}
@@ -184,10 +179,9 @@
%D \macros
%D {normaltime,normalyear,normalmonth,normalday}
%D
-%D The last part of this module is dedicated to converting
-%D dates. Because we want to use as meaningful commands as
-%D possible, and because \TEX\ already uses up some of those,
-%D we save the original meanings.
+%D The last part of this module is dedicated to converting dates. Because we
+%D want to use as meaningful commands as possible, and because \TEX\ already
+%D uses up some of those, we save the original meanings.
\savenormalmeaning\time
\savenormalmeaning\year
@@ -197,10 +191,9 @@
%D \macros
%D {month,MONTH}
%D
-%D Converting the month number into a month name is done
-%D using a case statement, abstact values and the label
-%D mechanism. This way users can easily redefine a label from
-%D for instance german into austrian.
+%D Converting the month number into a month name is done using a case statement,
+%D abstact values and the label mechanism. This way users can easily redefine a
+%D label from for instance german into austrian.
%D
%D \starttyping
%D \setuplabeltext [de] [january=J\"anner]
@@ -208,7 +201,7 @@
%D
%D Anyhow, the conversion looks like:
-\def\monthlong #1{\ctxcommand{month(#1)}}
+\def\monthlong #1{\ctxcommand{monthname(#1)}}
\def\monthshort#1{\ctxcommand{monthmnem(#1)}}
\let\convertmonth\monthlong % for old times sake
@@ -224,11 +217,9 @@
\def\MONTHLONG #1{\WORD{\monthlong {#1}}}
\def\MONTHSHORT#1{\WORD{\monthshort{#1}}}
-%D We never explicitly needed this, but Tobias Burnus pointed
-%D out that it would be handy to convert to the day of the
-%D week. In doing so, we have to calculate the total number of
-%D days, taking leapyears into account. For those who are
-%D curious:
+%D We never explicitly needed this, but Tobias Burnus pointed out that it would be
+%D handy to convert to the day of the week. In doing so, we have to calculate the
+%D total number of days, taking leapyears into account. For those who are curious:
%D
%D \startitemize[packed]
%D \item years that can be divided by 4 are leapyears
@@ -239,8 +230,7 @@
%D \macros
%D {weekday,WEEKDAY}
%D
-%D The first one is sort of redundant. It takes the day
-%D number argument.
+%D The first one is sort of redundant. It takes the day number argument.
%D
%D \showsetup{weekday}
%D \showsetup{WEEKDAY}
@@ -277,15 +267,15 @@
%D \getbuffer
%D \stoplines
%D
-%D The macro \type {\getdayoftheweek} can be used to calculate
-%D the number \type {\normalweekday}.
+%D The macro \type {\getdayoftheweek} can be used to calculate the number
+%D \type {\normalweekday}.
%D \macros
%D {doifleapyearelse,
%D getdayspermonth}
%D
-%D Sometimes we need to know if we're dealing with a
-%D leapyear, so here is a testmacro:
+%D Sometimes we need to know if we're dealing with a leapyear, so here is a
+%D testmacro:
%D
%D \starttyping
%D \doifleapyearelse{year}{yes}{no}
@@ -297,8 +287,7 @@
%D \getdayspermonth{year}{month}
%D \stoptyping
%D
-%D The number of days is available in the macro \type
-%D {\numberofdays}.
+%D The number of days is available in the macro \type {\numberofdays}.
\def\doifleapyearelse#1%
{\ctxcommand{doifleapyearelse(\number#1)}}
@@ -320,13 +309,11 @@
%D \macros
%D {currentdate, rawdate, date}
%D
-%D We use these conversion macros in the date formatting
-%D macro:
+%D We use these conversion macros in the date formatting macro:
%D
%D \showsetup{currentdate}
%D
-%D This macro takes care of proper spacing and delivers for
-%D instance:
+%D This macro takes care of proper spacing and delivers for instance:
%D
%D \startbuffer
%D \currentdate[weekday,day,month,year] % still dutch example
@@ -351,8 +338,8 @@
%D
%D or typeset: \getbuffer.
%D
-%D When no argument is passed, the current date is given as
-%D specified per language (using \type{\installlanguage}).
+%D When no argument is passed, the current date is given as specified per
+%D language (using \type{\installlanguage}).
%D
%D \showsetup{currentdate}
%D
@@ -367,8 +354,8 @@
%D
%D \typebuffer
%D
-%D The date is specified by one character keys. When no date
-%D is given, we get the current date.
+%D The date is specified by one character keys. When no date is given, we
+%D get the current date.
%D
%D \startlines
%D \getbuffer
@@ -430,10 +417,13 @@
\def\syst_converters_date[#1][#2]%
{\begingroup
\iffirstargument
- \getparameters[\??da][d=\normalday,m=\normalmonth,y=\normalyear,#1]%
- \normalday \@@dad\relax
- \normalmonth\@@dam\relax
- \normalyear \@@day\relax
+ \letdummyparameter\c!d\normalday
+ \letdummyparameter\c!m\normalmonth
+ \letdummyparameter\c!y\normalyear
+ \getdummyparameters[#1]%
+ \normalday \directdummyparameter\c!d\relax
+ \normalmonth\directdummyparameter\c!m\relax
+ \normalyear \directdummyparameter\c!y\relax
\fi
\syst_converters_current_date[#2]%
\endgroup}
@@ -444,9 +434,8 @@
%D \macros
%D {currenttime}
%D
-%D The currenttime is actually the jobtime. You can specify
-%D a pattern similar to the previous date macro using the
-%D keys \type {h}, \type {m} and a separator.
+%D The currenttime is actually the jobtime. You can specify a pattern similar
+%D to the previous date macro using the keys \type {h}, \type {m} and a separator.
\unexpanded\def\calculatecurrenttime
{\edef\currenthour {\ctxcommand{hour ()}}%
@@ -462,7 +451,6 @@
\unexpanded\def\currenttime
{\doifnextoptionalelse\syst_converters_current_time_yes\syst_converters_current_time_nop}
-
\unexpanded\def\syst_converters_current_time_yes[#1]%
{\calculatecurrenttime
\processallactionsinset[#1][h=>\currenthour,m=>\currentminute,\s!unknown=>\commalistelement]}
@@ -471,16 +459,15 @@
{\normalexpanded{\syst_converters_current_time_yes[\currenttimespecification]}}
-%D Because we're dealing with dates, we also introduce a few
-%D day loops:
+%D Because we're dealing with dates, we also introduce a few day loops:
%D
%D \starttyping
%D \processmonth{year}{month}{command}
%D \processyear{year}{command}{before}{after}
%D \stoptyping
%D
-%D The counters \type {\normalyear}, \type {\normalmonth} and
-%D \type{\normalday} can be used for for date manipulations.
+%D The counters \type {\normalyear}, \type {\normalmonth} and \type {\normalday}
+%D can be used for for date manipulations.
\unexpanded\def\processmonth#1#2#3% year month command
{\begingroup
@@ -505,15 +492,13 @@
%D \macros
%D {defineconversion, convertnumber}
%D
-%D Conversion involves the macros that we implemented earlier
-%D in this module.
+%D Conversion involves the macros that we implemented earlier in this module.
%D
%D \showsetup{defineconversion}
%D \showsetup{convertnumber}
%D
-%D We can feed this command with conversion macros as well as
-%D a set of conversion symbols. Both need a bit different
-%D treatment.
+%D We can feed this command with conversion macros as well as a set of conversion
+%D symbols. Both need a bit different treatment.
%D
%D \starttyping
%D \defineconversion [roman] [\romannumerals]
@@ -549,9 +534,9 @@
\def\syst_converters_checked_conversion#1#2%
{\ctxcommand{checkedconversion("#1",#2)}}
-%D If a conversion is just a font switch then we need to make sure
-%D that the number is indeed end up as number in the input, so we
-%D need to handle the second argument.
+%D If a conversion is just a font switch then we need to make sure that the
+%D number is indeed ends up as number in the input, so we need to handle the
+%D second argument.
\def\convertnumber#1#2% expandable
{\csname\??conversionarguments
@@ -600,19 +585,20 @@
%D \macros
%D {ordinaldaynumber, highordinalstr, ordinalstr}
%D
-%D Efficient general ordinal number converters are sometimes
-%D difficult to implement. Fortunately dates never exceed the
-%D number~31.
+%D Efficient general ordinal number converters are sometimes difficult to
+%D implement. Fortunately dates never exceed the number~31.
\def\highordinalstr #1{\high{\notsmallcapped{#1}}}
\def\ordinalstr #1{\notsmallcapped{#1}}
\def\ordinaldaynumber #1{\ctxcommand{ordinal(#1,"\currentlanguage")}}
-%D As longs as symbols are linked to levels or numbers, we can
-%D also use the conversion mechanism, but in for instance the
-%D itemization macros, we prefer symbols because they can more
-%D easier be (partially) redefined. Symbols are implemented
-%D in another module.
+\def\verbosenumber #1{\ctxcommand{verbose(#1,"\currentlanguage")}}
+\def\VerboseNumber #1{\Words{\ctxcommand{verbose(#1,"\currentlanguage")}}}
+
+%D As longs as symbols are linked to levels or numbers, we can also use the
+%D conversion mechanism, but in for instance the itemization macros, we prefer
+%D symbols because they can more easier be (partially) redefined. Symbols are
+%D implemented in another module.
\def\smallcappedromannumerals#1{\smallcapped{\romannumerals{#1}}}
\def\smallcappedcharacters #1{\smallcapped{\characters {#1}}}
@@ -641,6 +627,12 @@
\defineconversion [\v!Numbers] [\Numbers]
\defineconversion [\v!mediaeval] [\mediaeval]
+\defineconversion [\v!word] [\verbosenumber]
+\defineconversion [\v!words] [\verbosenumber]
+
+\defineconversion [\v!Word] [\VerboseNumber]
+\defineconversion [\v!Words] [\VerboseNumber]
+
\defineconversion [n] [\numbers]
\defineconversion [N] [\Numbers]
\defineconversion [m] [\mediaeval]
diff --git a/Master/texmf-dist/tex/context/base/core-ctx.ctx b/Master/texmf-dist/tex/context/base/core-ctx.ctx
new file mode 100644
index 00000000000..5126ad2d2bb
--- /dev/null
+++ b/Master/texmf-dist/tex/context/base/core-ctx.ctx
@@ -0,0 +1,23 @@
+<?xml version='1.0' standalone='yes'?>
+
+<ctx:job>
+ <ctx:message>demo file</ctx:message>
+ <ctx:preprocess suffix='prep'>
+ <ctx:processors>
+ <ctx:processor name='step-1' suffix='one' >dummy-prep-command-1 <ctx:value name='old'/> <ctx:value name='new'/></ctx:processor>
+ <ctx:processor name='step-2' suffix='prep'>dummy-prep-command-2 <ctx:value name='old'/> <ctx:value name='new'/></ctx:processor>
+ </ctx:processors>
+ <ctx:files>
+ <ctx:file processor='step-1' >one*.xml</ctx:file>
+ <ctx:file processor='step-2' >two*.xml</ctx:file>
+ <ctx:file processor='step-1,step-2'>all*.xml</ctx:file>
+ </ctx:files>
+ </ctx:preprocess>
+ <ctx:process>
+ <ctx:resources>
+ <ctx:environment>step-1-step-2.tex</ctx:environment>
+ </ctx:resources>
+ </ctx:process>
+ <ctx:postprocess>
+ </ctx:postprocess>
+</ctx:job>
diff --git a/Master/texmf-dist/tex/context/base/core-ctx.lua b/Master/texmf-dist/tex/context/base/core-ctx.lua
index e6fb7bb5fd8..33662731e23 100644
--- a/Master/texmf-dist/tex/context/base/core-ctx.lua
+++ b/Master/texmf-dist/tex/context/base/core-ctx.lua
@@ -6,71 +6,300 @@ if not modules then modules = { } end modules ['core-ctx'] = {
license = "see context related readme files"
}
+--[[
+Job control files aka ctx files are rather old and date from the mkii times.
+They were handled in texexec and mtx-context and deals with modes, modules,
+environments and preprocessing in projects where one such file drives the
+processing of lots of files without the need to provide command line
+arguments.
+
+In mkiv this concept was of course supported as well. The first implementation
+of mtx-context took much of the approach of texexec, but by now we have gotten
+rid of the option file (for passing modes, modules and environments), the stubs
+(for directly processing cld and xml) as well as the preprocessing component
+of the ctx files. Special helper features, like typesetting listings, were
+already moved to the extras (a direct side effect of the ability to pass along
+command line arguments.) All this made mtx-context more simple than its ancestor
+texexec.
+
+Because some of the modes might affect the mtx-context end, the ctx file is
+still loaded there but only for getting the modes. The file is loaded again
+during the run but as loading and basic processing takes less than a
+millisecond it's not that much of a burden.
+--]]
+
+-- the ctxrunner tabel might either become private or move to the job namespace
+-- which also affects the loading order
+
local trace_prepfiles = false trackers.register("system.prepfiles", function(v) trace_prepfiles = v end)
+local gsub, find, match, validstring = string.gsub, string.find, string.match, string.valid
+local concat = table.concat
+local xmltext = xml.text
+
local report_prepfiles = logs.reporter("system","prepfiles")
commands = commands or { }
local commands = commands
-local list, suffix, islocal, found = { }, "prep", false, false
-
-function commands.loadctxpreplist()
- local ctlname = file.replacesuffix(tex.jobname,"ctl")
- if lfs.isfile(ctlname) then
- local x = xml.load(ctlname)
- if x then
- islocal = xml.found(x,"ctx:preplist[@local=='yes']")
---~ if trace_prepfiles then
- if islocal then
- report_prepfiles("loading ctx log file (local)") -- todo: m!system
- else
- report_prepfiles("loading ctx log file (specified)") -- todo: m!system
- end
---~ end
- for e in xml.collected(x,"ctx:prepfile") do
- local name = xml.text(e)
- if islocal then
- name = file.basename(name)
- end
- local done = e.at['done'] or 'no'
- if trace_prepfiles then
- report_prepfiles("registering %s -> %s",done)
- end
- found = true
- list[name] = done -- 'yes' or 'no'
+ctxrunner = ctxrunner or { }
+
+ctxrunner.prepfiles = utilities.storage.allocate()
+
+local function dontpreparefile(t,k)
+ return k -- we only store when we have a prepper
+end
+
+table.setmetatableindex(ctxrunner.prepfiles,dontpreparefile)
+
+local function filtered(str,method) -- in resolvers?
+ str = tostring(str)
+ if method == 'name' then str = file.nameonly(str)
+ elseif method == 'path' then str = file.dirname(str)
+ elseif method == 'suffix' then str = file.suffix(str)
+ elseif method == 'nosuffix' then str = file.removesuffix(str)
+ elseif method == 'nopath' then str = file.basename(str)
+ elseif method == 'base' then str = file.basename(str)
+-- elseif method == 'full' then
+-- elseif method == 'complete' then
+-- elseif method == 'expand' then -- str = file.expandpath(str)
+ end
+ return (gsub(str,"\\","/"))
+end
+
+-- local function substitute(e,str)
+-- local attributes = e.at
+-- if str and attributes then
+-- if attributes['method'] then
+-- str = filtered(str,attributes['method'])
+-- end
+-- if str == "" and attributes['default'] then
+-- str = attributes['default']
+-- end
+-- end
+-- return str
+-- end
+
+local function substitute(str)
+ return str
+end
+
+local function justtext(str)
+ str = xml.unescaped(tostring(str))
+ str = xml.cleansed(str)
+ str = gsub(str,"\\+",'/')
+ str = gsub(str,"%s+",' ')
+ return str
+end
+
+function ctxrunner.load(ctxname)
+
+ local xmldata = xml.load(ctxname)
+
+ local jobname = tex.jobname -- todo
+
+ local variables = { job = jobname }
+ local commands = { }
+ local flags = { }
+ local paths = { } -- todo
+ local treatments = { }
+ local suffix = "prep"
+
+ xml.include(xmldata,'ctx:include','name', {'.', file.dirname(ctxname), "..", "../.." })
+
+ for e in xml.collected(xmldata,"/ctx:job/ctx:flags/ctx:flag") do
+ local flag = xmltext(e)
+ local key, value = match(flag,"^(.-)=(.+)$")
+ if key and value then
+ environment.setargument(key,value)
+ else
+ environment.setargument(flag,true)
+ end
+ end
+
+ -- add to document.options.ctxfile[...]
+
+ local ctxfile = document.options.ctxfile
+
+ local modes = ctxfile.modes
+ local modules = ctxfile.modules
+ local environments = ctxfile.environments
+
+ for e in xml.collected(xmldata,"/ctx:job/ctx:process/ctx:resources/ctx:mode") do
+ modes[#modes+1] = xmltext(e)
+ end
+
+ for e in xml.collected(xmldata,"/ctx:job/ctx:process/ctx:resources/ctx:module") do
+ modules[#modules+1] = xmltext(e)
+ end
+
+ for e in xml.collected(xmldata,"/ctx:job/ctx:process/ctx:resources/ctx:environment") do
+ environments[#environments+1] = xmltext(e)
+ end
+
+ for e in xml.collected(xmldata,"ctx:message") do
+ report_prepfiles("ctx comment: %s", xmltext(e))
+ end
+
+ for r, d, k in xml.elements(xmldata,"ctx:value[@name='job']") do
+ d[k] = variables['job'] or ""
+ end
+
+ for e in xml.collected(xmldata,"/ctx:job/ctx:preprocess/ctx:processors/ctx:processor") do
+ local name = e.at and e.at['name'] or "unknown"
+ local suffix = e.at and e.at['suffix'] or "prep"
+ for r, d, k in xml.elements(command,"ctx:old") do
+ d[k] = "%old%"
+ end
+ for r, d, k in xml.elements(e,"ctx:new") do
+ d[k] = "%new%"
+ end
+ for r, d, k in xml.elements(e,"ctx:value") do
+ local tag = d[k].at['name']
+ if tag then
+ d[k] = "%" .. tag .. "%"
end
end
+ local runner = xml.textonly(e)
+ if runner and runner ~= "" then
+ commands[name] = {
+ suffix = suffix,
+ runner = runner,
+ }
+ end
end
-end
--- -- --
+ local suffix = xml.filter(xmldata,"xml:///ctx:job/ctx:preprocess/attribute('suffix')") or suffix
+ local runlocal = xml.filter(xmldata,"xml:///ctx:job/ctx:preprocess/ctx:processors/attribute('local')")
+
+ runlocal = toboolean(runlocal)
-local function found(name) -- used in resolve
- local prepname = name .. "." .. suffix
- if list[name] and lfs.isfile(prepname) then
- if trace_prepfiles then
- report_prepfiles("preprocessing: using %s",prepname)
+ -- todo: only collect, then plug into file handler
+
+ local inputfile = validstring(environment.arguments.input) or jobname
+
+ variables.old = inputfile
+
+ for files in xml.collected(xmldata,"/ctx:job/ctx:preprocess/ctx:files") do
+ for pattern in xml.collected(files,"ctx:file") do
+ local preprocessor = pattern.at['processor'] or ""
+ for r, d, k in xml.elements(pattern,"/ctx:old") do
+ d[k] = jobname
+ end
+ for r, d, k in xml.elements(pattern,"/ctx:value[@name='old'") do
+ d[k] = jobname
+ end
+ pattern =justtext(xml.tostring(pattern))
+ if preprocessor and preprocessor ~= "" and pattern and pattern ~= "" then
+ local noftreatments = #treatments + 1
+ local findpattern = string.topattern(pattern)
+ local preprocessors = utilities.parsers.settings_to_array(preprocessor)
+ treatments[noftreatments] = {
+ pattern = findpattern,
+ preprocessors = preprocessors,
+ }
+ report_prepfiles("step %s, pattern %a, preprocessor: %a",noftreatments,findpattern,preprocessors)
+ end
end
- return prepname
end
- return false
-end
-local function resolve(name) -- used a few times later on
- local filename = file.collapsepath(name)
- local prepname = islocal and found(file.basename(name))
- if prepname then
- return prepname
+ local function needstreatment(oldfile)
+ for i=1,#treatments do
+ local treatment = treatments[i]
+ local pattern = treatment.pattern
+ if find(oldfile,pattern) then
+ return treatment
+ end
+ end
end
- prepname = found(filename)
- if prepname then
- return prepname
+
+ local preparefile = #treatments > 0 and function(prepfiles,filename)
+
+ local treatment = needstreatment(filename)
+ local oldfile = filename
+ local newfile = false
+ if treatment then
+ local preprocessors = treatment.preprocessors
+ local runners = { }
+ for i=1,#preprocessors do
+ local preprocessor = preprocessors[i]
+ local command = commands[preprocessor]
+ if command then
+ local runner = command.runner
+ local suffix = command.suffix
+ local result = filename .. "." .. suffix
+ if runlocal then
+ result = file.basename(result)
+ end
+ variables.old = oldfile
+ variables.new = result
+ runner = utilities.templates.replace(runner,variables)
+ if runner and runner ~= "" then
+ runners[#runners+1] = runner
+ oldfile = result
+ if runlocal then
+ oldfile = file.basename(oldfile)
+ end
+ newfile = oldfile
+ end
+ end
+ end
+ if not newfile then
+ newfile = filename
+ elseif file.needsupdating(filename,newfile) then
+ for i=1,#runners do
+ report_prepfiles("step %i: %s",i,runners[i])
+ end
+ --
+ for i=1,#runners do
+ local command = runners[i]
+ report_prepfiles("command: %s",command)
+ local result = os.spawn(command) or 0
+ -- if result > 0 then
+ -- report_prepfiles("error, return code: %s",result)
+ -- end
+ end
+ if lfs.isfile(newfile) then
+ file.syncmtimes(filename,newfile)
+ report_prepfiles("%a is converted to %a",filename,newfile)
+ else
+ report_prepfiles("%a is not converted to %a",filename,newfile)
+ newfile = filename
+ end
+ elseif lfs.isfile(newfile) then
+ report_prepfiles("%a is already converted to %a",filename,newfile)
+ end
+ else
+ newfile = filename
+ end
+ prepfiles[filename] = newfile
+ -- in case we ask twice (with the prepped name) ... todo: avoid this mess
+ prepfiles[newfile] = newfile
+ return newfile
end
- return false
+
+ table.setmetatableindex(ctxrunner.prepfiles,preparefile or dontpreparefile)
+
+ -- we need to deal with the input filename as it has already be resolved
+
end
---~ support.doiffileexistelse(name)
+-- print("\n")
+-- document = {
+-- options = {
+-- ctxfile = {
+-- modes = { },
+-- modules = { },
+-- environments = { },
+-- }
+-- }
+-- }
+-- environment.arguments.input = "test.tex"
+-- ctxrunner.load("x-ldx.ctx")
+
+local function resolve(name) -- used a few times later on
+ return ctxrunner.prepfiles[file.collapsepath(name)] or false
+end
local processfile = commands.processfile
local doifinputfileelse = commands.doifinputfileelse
@@ -94,3 +323,24 @@ end
function commands.preparedfile(name)
return resolve(name) or name
end
+
+function commands.getctxfile()
+ local ctxfile = document.arguments.ctx or ""
+ if ctxfile ~= "" then
+ ctxrunner.load(ctxfile) -- do we need to locate it?
+ end
+end
+
+function ctxrunner.resolve(name) -- used a few times later on
+ return ctxrunner.prepfiles[file.collapsepath(name)] or name
+end
+
+-- ctxrunner.load("t:/sources/core-ctx.ctx")
+
+-- context(ctxrunner.prepfiles["one-a.xml"]) context.par()
+-- context(ctxrunner.prepfiles["one-b.xml"]) context.par()
+-- context(ctxrunner.prepfiles["two-c.xml"]) context.par()
+-- context(ctxrunner.prepfiles["two-d.xml"]) context.par()
+-- context(ctxrunner.prepfiles["all-x.xml"]) context.par()
+
+-- inspect(ctxrunner.prepfiles)
diff --git a/Master/texmf-dist/tex/context/base/core-ctx.mkiv b/Master/texmf-dist/tex/context/base/core-ctx.mkiv
index e178ee21bea..59115621aa7 100644
--- a/Master/texmf-dist/tex/context/base/core-ctx.mkiv
+++ b/Master/texmf-dist/tex/context/base/core-ctx.mkiv
@@ -13,18 +13,15 @@
\writestatus{loading}{ConTeXt Core Macros / Job Control}
-\unprotect
-
-\setnewconstant\preprocessmethod\plustwo % always check in mkiv
-
\registerctxluafile{core-ctx}{1.000}
-\def\loadctxpreplist
- {\ctxcommand{loadctxpreplist()}%
- \glet\loadctxpreplist\relax}
+\unprotect
-\appendtoks
- \loadctxpreplist
-\to \everystarttext % maybe too late but don't change it now
+\unexpanded\def\job_options_get_commandline {\ctxcommand{getcommandline()}}
+\unexpanded\def\job_options_get_ctxfile {\ctxcommand{getctxfile()}}
+\unexpanded\def\job_options_set_modes {\ctxcommand{setdocumentmodes()}}
+\unexpanded\def\job_options_set_modules {\ctxcommand{setdocumentmodules()}}
+\unexpanded\def\job_options_set_environments{\ctxcommand{setdocumentenvironments()}}
+\unexpanded\def\job_options_set_filenames {\ctxcommand{setdocumentfilenames()}}
\protect \endinput
diff --git a/Master/texmf-dist/tex/context/base/core-dat.lua b/Master/texmf-dist/tex/context/base/core-dat.lua
index 071a3fe0b47..826d3a675ee 100644
--- a/Master/texmf-dist/tex/context/base/core-dat.lua
+++ b/Master/texmf-dist/tex/context/base/core-dat.lua
@@ -11,7 +11,7 @@ if not modules then modules = { } end modules ['core-dat'] = {
replaces the twopass data mechanism.</p>
--ldx]]--
-local tonumber = tonumber
+local tonumber, tostring, type = tonumber, tostring, type
local context, commands = context, commands
@@ -23,9 +23,8 @@ local report_pagestate = logs.reporter("pagestate")
local allocate = utilities.storage.allocate
local settings_to_hash = utilities.parsers.settings_to_hash
-local format = string.format
local texcount = tex.count
-
+local formatters = string.formatters
local v_yes = interfaces.variables.yes
local new_latelua = nodes.pool.latelua
@@ -69,7 +68,12 @@ local function setdata(settings)
local tag = settings.tag
local data = settings.data
local list = tobesaved[name]
- data = settings_to_hash(data) or { }
+ if settings.convert and type(data) == "string" then
+ data = settings_to_hash(data)
+ end
+ if type(data) ~= "table" then
+ data = { data = settings.data }
+ end
if not tag then
tag = #list + 1
else
@@ -84,10 +88,10 @@ local function setdata(settings)
data.order = index
data.realpage = texcount.realpageno
if trace_datasets then
- report_dataset("delayed: name %s, tag %s, index %s",name,tag,index)
+ report_dataset("action %a, name %a, tag %a, index %a","assign delayed",name,tag,index)
end
elseif trace_datasets then
- report_dataset("immediate: name %s, tag %s",name,tag)
+ report_dataset("action %a, name %a, tag %a","assign immediate",name,tag)
end
return name, tag, data
end
@@ -103,47 +107,66 @@ function datasets.extend(name,tag)
t.realpage = realpage
t.order = order
if trace_datasets then
- report_dataset("flushed: name %s, tag %s, page %s, index %s, order",name,tag,t.index or 0,order,realpage)
+ report_dataset("action %a, name %a, tag %a, page %a, index %a","flush by order",name,tag,t.index or 0,order,realpage)
end
end
function datasets.getdata(name,tag,key,default)
local t = collected[name]
- if t then
+ if t == nil then
+ if trace_datasets then
+ report_dataset("error: unknown dataset, name %a",name)
+ end
+ elseif type(t) ~= "table" then
+ return t
+ else
t = t[tag] or t[tonumber(tag)]
- if t then
- if key then
- return t[key] or default
- else
- return t
+ if not t then
+ if trace_datasets then
+ report_dataset("error: unknown dataset, name %a, tag %a",name,tag)
end
- elseif trace_datasets then
- report_dataset("unknown: name %s, tag %s",name,tag)
+ elseif key then
+ return t[key] or default
+ else
+ return t
end
- elseif trace_datasets then
- report_dataset("unknown: name %s",name)
end
return default
end
function commands.setdataset(settings)
- local name, tag, data = setdata(settings)
+ settings.convert = true
+ local name, tag = setdata(settings)
if settings.delay ~= v_yes then
--
elseif type(tag) == "number" then
- context(new_latelua(format("job.datasets.extend(%q,%i)",name,tag)))
+ context(new_latelua(formatters["job.datasets.extend(%q,%i)"](name,tag)))
else
- context(new_latelua(format("job.datasets.extend(%q,%q)",name,tag)))
+ context(new_latelua(formatters["job.datasets.extend(%q,%q)"](name,tag)))
end
end
function commands.datasetvariable(name,tag,key)
local t = collected[name]
- t = t and (t[tag] or t[tonumber(tag)])
- if t then
- local s = t[key]
- if s then
- context(s)
+ if t == nil then
+ if trace_datasets then
+ report_dataset("error: unknown dataset, name %a, tag %a, not passed to tex",name) -- no tag
+ end
+ elseif type(t) ~= "table" then
+ context(tostring(t))
+ else
+ t = t and (t[tag] or t[tonumber(tag)])
+ if not t then
+ if trace_datasets then
+ report_dataset("error: unknown dataset, name %a, tag %a, not passed to tex",name,tag)
+ end
+ elseif type(t) == "table" then
+ local s = t[key]
+ if type(s) ~= "table" then
+ context(tostring(s))
+ elseif trace_datasets then
+ report_dataset("error: unknown dataset, name %a, tag %a, not passed to tex",name,tag)
+ end
end
end
end
@@ -188,7 +211,7 @@ local function setstate(settings)
local data = realpage
list[tag] = data
if trace_pagestates then
- report_pagestate("setting: name %s, tag %s, preset %s",name,tag,realpage)
+ report_pagestate("action %a, name %a, tag %a, preset %a","set",name,tag,realpage)
end
return name, tag, data
end
@@ -198,7 +221,7 @@ pagestates.setstate = setstate
function pagestates.extend(name,tag)
local realpage = texcount.realpageno
if trace_pagestates then
- report_pagestate("synchronizing: name %s, tag %s, preset %s",name,tag,realpage)
+ report_pagestate("action %a, name %a, tag %a, preset %a","synchronize",name,tag,realpage)
end
tobesaved[name][tag] = realpage
end
@@ -210,10 +233,10 @@ function pagestates.realpage(name,tag,default)
if t then
return tonumber(t or default)
elseif trace_pagestates then
- report_pagestate("unknown: name %s, tag %s",name,tag)
+ report_pagestate("error: unknown dataset, name %a, tag %a",name,tag)
end
elseif trace_pagestates then
- report_pagestate("unknown: name %s",name)
+ report_pagestate("error: unknown dataset, name %a, tag %a",name) -- nil
end
return default
end
@@ -221,9 +244,9 @@ end
function commands.setpagestate(settings)
local name, tag, data = setstate(settings)
if type(tag) == "number" then
- context(new_latelua(format("job.pagestates.extend(%q,%i)",name,tag)))
+ context(new_latelua(formatters["job.pagestates.extend(%q,%i)"](name,tag)))
else
- context(new_latelua(format("job.pagestates.extend(%q,%q)",name,tag)))
+ context(new_latelua(formatters["job.pagestates.extend(%q,%q)"](name,tag)))
end
end
diff --git a/Master/texmf-dist/tex/context/base/core-def.mkiv b/Master/texmf-dist/tex/context/base/core-def.mkiv
index 4f856f99622..cf1049e58f8 100644
--- a/Master/texmf-dist/tex/context/base/core-def.mkiv
+++ b/Master/texmf-dist/tex/context/base/core-def.mkiv
@@ -24,44 +24,112 @@
\setupinteraction[\c!symbolset=navigation 1]
+% what a mess
+
+\everypar \emptytoks
+\neverypar \emptytoks
+
+\appendtoks
+ % \flushnotes
+ \page_otr_command_synchronize_side_floats % an otr specific command
+ \checkindentation
+ \showparagraphnumber
+ \restoreinterlinepenalty
+ % \flushmargincontents
+ % \flushcommentanchors
+ \flushnotes
+ \synchronizenotes
+ \OTRSETshowstatus
+ \registerparoptions
+ % \flushsyncpositions
+ \flushpostponednodedata
+ \typo_delimited_repeat
+ \insertparagraphintro
+\to \everypar
+
+\appendtoks
+ \flushnotes
+\to \everydisplay
+
+\appendtoks
+ \adjustsidefloatdisplaylines
+\to \everydisplay
+
+% \appendtoks
+% \flushsyncpositions
+% \to \everyheadstart
+
+% \appendtoks
+% \flushsyncresets
+% \to \everyendoftextbody
+
+\appendtoks
+ \ignorespaces
+\to \everybeginofpar
+
+\appendtoks
+ \removeunwantedspaces
+ % \strut % option ?
+ % \flushsyncresets
+ % \setlastlinewidth % gone, will be done in lua
+ \endgraf
+\to \everyendofpar
+
% initialization order:
\ifdefined\font_preloads_at_every_job \else \let\font_preloads_at_every_job \relax \fi
\ifdefined\font_preloads_at_start_text \else \let\font_preloads_at_start_text\relax \fi
\ifdefined\font_preloads_at_stop_text \else \let\font_preloads_at_stop_text \relax \fi
-\appendtoks \font_preloads_at_start_text \to \everystarttext
-\appendtoks \font_preloads_at_stop_text \to \everystoptext
+\appendtoks
+ \font_preloads_at_start_text
+\to \everystarttext
-%prependtoks \preloadtypescript \to \everyjob
-\appendtoks \showcontextbanner \to \everyjob
-\appendtoks \initializenewlinechar \to \everyjob
-\appendtoks \calculatecurrenttime \to \everyjob
-\appendtoks \loadsystemfiles \to \everyjob
-\appendtoks \loadoptionfile \to \everyjob % can load files !
-\appendtoks \font_preloads_at_every_job \to \everyjob
-\appendtoks \settopskip \to \everyjob
-\appendtoks \initializemainlanguage \to \everyjob
-%appendtoks \MPLIBregister \to \everyjob
-\appendtoks \xmlinitialize \to \everyjob
-\appendtoks \setfalse\c_page_backgrounds_new \to \everyjob
-\appendtoks \setfalse\c_page_backgrounds_some \to \everyjob
-\appendtoks \initializepagecounters \to \everyjob
-\appendtoks \directsetup{*runtime:options} \to \everyjob % we could erase them afterwards % order can change
-\appendtoks \directsetup{*runtime:modules} \to \everyjob % we could erase them afterwards % order can change
+\appendtoks
+ \font_preloads_at_stop_text
+\to \everystoptext
-%appendtoks \page[\v!last] \page \to \everybye % moved to core-job, we need to do this cleaner
-\appendtoks \ifarrangingpages\poparrangedpages\fi \to \everybye
-%appendtoks \registerfileinfo[end]\jobfilename \to \everybye
+\appendtoks
+ \showcontextbanner
+ \initializenewlinechar
+ \calculatecurrenttime
+ \loadsystemfiles
+ % \loadoptionfile % obsolete, but nice to keep as reference of when/how
+ \job_options_get_commandline % expands some commands
+ \job_options_get_ctxfile % might expand some commands
+ \job_options_set_filenames
+ \font_preloads_at_every_job
+ \settopskip % brrr
+ \initializemainlanguage
+ \initializexmlprocessing % is this still needed?
+ \initializepagebackgrounds
+ \initializepagecounters
+ % \directsetup{*runtime:options}% % obsolete as the option file is replaced by a more direct mechanism
+ % \directsetup{*runtime:modules}% % obsolete as the option file is replaced by a more direct mechanism
+ \job_options_set_modes
+ \job_options_set_modules
+ \job_options_set_environments
+\to \everyjob
-%appendtoks \MPLIBallocate{1000} \to \everydump
+\appendtoks
+ \ifarrangingpages\poparrangedpages\fi
+\to \everybye
-\prependtoks \resetallattributes \to \everybeforeoutput
+\prependtoks
+ \resetallattributes
+\to \everybeforeoutput
-\appendtoks \the\everybackendshipout \to \everyshipout
-\prependtoks \the\everylastbackendshipout \to \everylastshipout
+\appendtoks
+ \the\everybackendshipout
+\to \everyshipout
-\prependtoks \lefttoright \to \everybeforeoutput
+\prependtoks
+ \the\everylastbackendshipout
+\to \everylastshipout
+
+\prependtoks
+ \lefttoright
+\to \everybeforeoutput
% temporary here:
@@ -88,4 +156,6 @@
% \resetcharacterspacing
% \to \everyhyphenatedurl
+% \setbreakpoints[compound]
+
\protect \endinput
diff --git a/Master/texmf-dist/tex/context/base/core-env.lua b/Master/texmf-dist/tex/context/base/core-env.lua
index c31df929e15..025192d4bec 100644
--- a/Master/texmf-dist/tex/context/base/core-env.lua
+++ b/Master/texmf-dist/tex/context/base/core-env.lua
@@ -16,6 +16,7 @@ local P, C, S, Cc, lpegmatch, patterns = lpeg.P, lpeg.C, lpeg.S, lpeg.Cc, lpeg.m
local csname_id = token.csname_id
local create = token.create
local texcount = tex.count
+local texsetcount = tex.setcount
local allocate = utilities.storage.allocate
local setmetatableindex = table.setmetatableindex
@@ -37,7 +38,7 @@ setmetatableindex(tex.modes, function(t,k)
if m then
return m()
else
- local n = "mode" .. k
+ local n = "mode>" .. k
if csname_id(n) == undefined then
return false
else
@@ -52,7 +53,7 @@ setmetatableindex(tex.systemmodes, function(t,k)
if m then
return m()
else
- local n = "mode*" .. k
+ local n = "mode>*" .. k
if csname_id(n) == undefined then
return false
else
@@ -77,13 +78,23 @@ end)
-- todo : global
-function tex.settrue(name)
- texcount[name] = 0
-end
-
-function tex.setfalse(name)
- texcount[name] = 1
-end
+-- not possible as we let at the tex end to zerocount and plusone
+--
+-- function tex.settrue(name,glob)
+-- if glob then
+-- texsetcount("global",name,0)
+-- else
+-- texcount[name] = 0
+-- end
+-- end
+--
+-- function tex.setfalse(name,glob)
+-- if glob then
+-- texsetcount("global",name,1)
+-- else
+-- texcount[name] = 1
+-- end
+-- end
---- arg = P("{") * C(patterns.nested) * P("}") + Cc("")
@@ -122,12 +133,15 @@ local types = {
function tex.isdimen(name)
return lookuptoken(name)[1] == dimencode
end
+
function tex.iscount(name)
return lookuptoken(name)[1] == countcode
end
+
function tex.istoken(name)
return lookuptoken(name)[1] == tokencode
end
+
function tex.isskip(name)
return lookuptoken(name)[1] == skipcode
end
diff --git a/Master/texmf-dist/tex/context/base/core-env.mkiv b/Master/texmf-dist/tex/context/base/core-env.mkiv
index 1819d084d0a..0332631dc2f 100644
--- a/Master/texmf-dist/tex/context/base/core-env.mkiv
+++ b/Master/texmf-dist/tex/context/base/core-env.mkiv
@@ -54,42 +54,44 @@
% fast internal ones
+\def\??mode{mode>} % special namespace, also used at lua end
+
\def\syst_modes_new#1%
- {\expandafter\newcount\csname\s!mode#1\endcsname}
+ {\expandafter\newcount\csname\??mode#1\endcsname}
\unexpanded\def\newmode#1%
- {\ifcsname\s!mode#1\endcsname\else\syst_modes_new{#1}\fi}
+ {\ifcsname\??mode#1\endcsname\else\syst_modes_new{#1}\fi}
\unexpanded\def\setmode#1%
- {\ifcsname\s!mode#1\endcsname\else\syst_modes_new{#1}\fi
- \csname\s!mode#1\endcsname\enabledmode}
+ {\ifcsname\??mode#1\endcsname\else\syst_modes_new{#1}\fi
+ \csname\??mode#1\endcsname\enabledmode}
\unexpanded\def\resetmode#1%
- {\ifcsname\s!mode#1\endcsname\else\syst_modes_new{#1}\fi
- \csname\s!mode#1\endcsname\disabledmode}
+ {\ifcsname\??mode#1\endcsname\else\syst_modes_new{#1}\fi
+ \csname\??mode#1\endcsname\disabledmode}
\unexpanded\def\newsystemmode#1%
- {\ifcsname\s!mode\systemmodeprefix#1\endcsname\else\syst_modes_new{\systemmodeprefix#1}\fi}
+ {\ifcsname\??mode\systemmodeprefix#1\endcsname\else\syst_modes_new{\systemmodeprefix#1}\fi}
\unexpanded\def\setsystemmode#1%
- {\ifcsname\s!mode\systemmodeprefix#1\endcsname\else\syst_modes_new{\systemmodeprefix#1}\fi
- \csname\s!mode\systemmodeprefix#1\endcsname\enabledmode}
+ {\ifcsname\??mode\systemmodeprefix#1\endcsname\else\syst_modes_new{\systemmodeprefix#1}\fi
+ \csname\??mode\systemmodeprefix#1\endcsname\enabledmode}
\unexpanded\def\resetsystemmode#1%
- {\ifcsname\s!mode\systemmodeprefix#1\endcsname\else\syst_modes_new{\systemmodeprefix#1}\fi
- \csname\s!mode\systemmodeprefix#1\endcsname\disabledmode}
+ {\ifcsname\??mode\systemmodeprefix#1\endcsname\else\syst_modes_new{\systemmodeprefix#1}\fi
+ \csname\??mode\systemmodeprefix#1\endcsname\disabledmode}
% \def\dosetsystemmode#1%
-% {\csname\s!mode\systemmodeprefix#1\endcsname\enabledmode}
+% {\csname\??mode\systemmodeprefix#1\endcsname\enabledmode}
%
% \def\doresetsystemmode#1%
-% {\csname\s!mode\systemmodeprefix#1\endcsname\disabledmode}
+% {\csname\??mode\systemmodeprefix#1\endcsname\disabledmode}
% demo: trialtypesetting is a systemmode as well as an if
\newsystemmode{trialtypesetting} % the name of \@@trialtypesetting might change (also at the lua end)
-\expandafter\let\expandafter\@@trialtypesetting\csname\s!mode\systemmodeprefix trialtypesetting\endcsname % private !
+\expandafter\let\expandafter\@@trialtypesetting\csname\??mode\systemmodeprefix trialtypesetting\endcsname % private !
\appendtoks
\@@trialtypesetting\enabledmode
@@ -101,36 +103,42 @@
% user ones
+\let\syst_mode_prefix\relax
+
\unexpanded\def\preventmode{\unprotect\syst_modes_prevent}
\unexpanded\def\enablemode {\unprotect\syst_modes_enable }
\unexpanded\def\disablemode{\unprotect\syst_modes_disable}
+\unexpanded\def\globalpreventmode{\let\syst_mode_prefix\global\unprotect\syst_modes_prevent}
+\unexpanded\def\globalenablemode {\let\syst_mode_prefix\global\unprotect\syst_modes_enable }
+\unexpanded\def\globaldisablemode{\let\syst_mode_prefix\global\unprotect\syst_modes_disable}
+
\let\definemode\disablemode % nicer
-\def\syst_modes_prevent[#1]{\protect\rawprocesscommacommand[#1]\syst_modes_prevent_indeed}
-\def\syst_modes_enable [#1]{\protect\rawprocesscommacommand[#1]\syst_modes_enable_indeed }
-\def\syst_modes_disable[#1]{\protect\rawprocesscommacommand[#1]\syst_modes_disable_indeed}
+\def\syst_modes_prevent[#1]{\protect\rawprocesscommacommand[#1]\syst_modes_prevent_indeed\let\syst_mode_prefix\relax}
+\def\syst_modes_enable [#1]{\protect\rawprocesscommacommand[#1]\syst_modes_enable_indeed \let\syst_mode_prefix\relax}
+\def\syst_modes_disable[#1]{\protect\rawprocesscommacommand[#1]\syst_modes_disable_indeed\let\syst_mode_prefix\relax}
\def\syst_modes_prevent_indeed#1%
- {\ifcsname\s!mode#1\endcsname\else\syst_modes_new{#1}\fi
- \csname\s!mode#1\endcsname\preventedmode}
+ {\ifcsname\??mode#1\endcsname\else\syst_modes_new{#1}\fi
+ \syst_mode_prefix\csname\??mode#1\endcsname\preventedmode}
\def\syst_modes_enable_indeed#1% we can speed it up by moving the new outside
- {\ifcsname\s!mode#1\endcsname\else\syst_modes_new{#1}\fi
- \ifnum\csname\s!mode#1\endcsname=\preventedmode \else
- \csname\s!mode#1\endcsname\enabledmode
+ {\ifcsname\??mode#1\endcsname\else\syst_modes_new{#1}\fi
+ \ifnum\csname\??mode#1\endcsname=\preventedmode \else
+ \syst_mode_prefix\csname\??mode#1\endcsname\enabledmode
\fi}
\def\syst_modes_disable_indeed#1%
- {\ifcsname\s!mode#1\endcsname\else\syst_modes_new{#1}\fi
- \ifnum\csname\s!mode#1\endcsname=\preventedmode \else
- \csname\s!mode#1\endcsname\disabledmode
+ {\ifcsname\??mode#1\endcsname\else\syst_modes_new{#1}\fi
+ \ifnum\csname\??mode#1\endcsname=\preventedmode \else
+ \syst_mode_prefix\csname\??mode#1\endcsname\disabledmode
\fi}
% handy for mp
\def\booleanmodevalue#1%
- {\ifcsname\s!mode#1\endcsname\ifcase\csname\s!mode#1\endcsname\s!false\else\s!true\fi\else\s!false\fi}
+ {\ifcsname\??mode#1\endcsname\ifcase\csname\??mode#1\endcsname\s!false\else\s!true\fi\else\s!false\fi}
% check macros
@@ -141,8 +149,8 @@
\newconditional\c_checked_mode
\def\syst_modes_check_indeed#1%
- {\ifcsname\s!mode#1\endcsname
- \ifcase\csname\s!mode#1\endcsname\else
+ {\ifcsname\??mode#1\endcsname
+ \ifcase\csname\??mode#1\endcsname\else
\let\syst_modes_check_step\gobbleoneargument
\fi
\fi}
@@ -157,8 +165,8 @@
\fi}
\def\syst_modes_check_all_indeed#1%
- {\ifcsname\s!mode#1\endcsname
- \ifcase\csname\s!mode#1\endcsname
+ {\ifcsname\??mode#1\endcsname
+ \ifcase\csname\??mode#1\endcsname
\let\syst_modes_check_all_step\gobbleoneargument
\fi
\else
@@ -198,25 +206,25 @@
%D Pushing/popping:
\unexpanded\def\pushmode[#1]%
- {\ifcsname\s!mode#1\endcsname\else\syst_modes_new{#1}\fi
- \expandafter\edef\csname\??modestack#1\endcsname{\number\csname\s!mode#1\endcsname}%
+ {\ifcsname\??mode#1\endcsname\else\syst_modes_new{#1}\fi
+ \expandafter\edef\csname\??modestack#1\endcsname{\number\csname\??mode#1\endcsname}%
\expandafter\pushmacro\csname\??modestack#1\endcsname}
\unexpanded\def\popmode[#1]%
{\ifcsname\??modestack#1\endcsname
\expandafter\popmacro\csname\??modestack#1\endcsname
- \csname\s!mode#1\endcsname\csname\??modestack#1\endcsname\relax
+ \csname\??mode#1\endcsname\csname\??modestack#1\endcsname\relax
\fi}
\def\pushsystemmode#1%
- {\ifcsname\s!mode\systemmodeprefix#1\endcsname\else\syst_modes_new{\systemmodeprefix#1}\fi
- \expandafter\edef\csname\??modestack\systemmodeprefix#1\endcsname{\number\csname\s!mode\systemmodeprefix#1\endcsname}%
+ {\ifcsname\??mode\systemmodeprefix#1\endcsname\else\syst_modes_new{\systemmodeprefix#1}\fi
+ \expandafter\edef\csname\??modestack\systemmodeprefix#1\endcsname{\number\csname\??mode\systemmodeprefix#1\endcsname}%
\expandafter\pushmacro\csname\??modestack\systemmodeprefix#1\endcsname}
\def\popsystemmode#1%
{\ifcsname\??modestack\systemmodeprefix#1\endcsname
\expandafter\popmacro\csname\??modestack\systemmodeprefix#1\endcsname
- \csname\s!mode\systemmodeprefix#1\endcsname\csname\??modestack\systemmodeprefix#1\endcsname\relax
+ \csname\??mode\systemmodeprefix#1\endcsname\csname\??modestack\systemmodeprefix#1\endcsname\relax
\fi}
%D Here is a relatively new variant of mode checking:
@@ -344,22 +352,31 @@
% the next one is meant for \c!setups situations, hence the check for
% a shortcut
-\def\doprocesslocalsetups#1%
- {\edef\tobeprocessedsetups{#1}%
- \ifx\tobeprocessedsetups\empty\else
- \dodoprocesslocalsetups
+\let\m_syst_setups_asked\empty
+
+\def\doprocesslocalsetups#1% sort of public, fast local variant
+ {\edef\m_syst_setups_asked{#1}%
+ \ifx\m_syst_setups_asked\empty\else
+ \expandafter\syst_setups_process_local
+ \fi}
+
+\unexpanded\def\usesetupsparameter#1%
+ {\edef\m_syst_setups_asked{#1\c!setups}%
+ \ifx\m_syst_setups_asked\empty\else
+ \expandafter\syst_setups_process_local
\fi}
-% \def\dodoprocesslocalsetups
-% {\expandafter\processcommalist\expandafter[\tobeprocessedsetups]\syst_setups}
+% \def\syst_setups_process_local
+% {\expandafter\processcommalist\expandafter[\m_syst_setups_asked]\syst_setups}
% setups=S1
% setups=lua(S2)
% setups=S3
% setups={S1,lua(S2),xml(test{123}),S3}
-\def\dodoprocesslocalsetups
- {\ctxcommand{autosetups("\tobeprocessedsetups")}}
+\def\syst_setups_process_local
+ {\ctxcommand{autosetups("\m_syst_setups_asked")}%
+ \relax} % let's prevent lookahead
\def\autosetups#1%
{\ctxcommand{autosetups("#1")}}
@@ -625,4 +642,51 @@
\def\env {\getvariable \s!environment}
\def\envvar {\getvariabledefault\s!environment}
+%D \macros
+%D {defineselector,setupselector,select,directselect}
+%D
+%D \starttyping
+%D \defineselector[caption][max=2,n=2]
+%D
+%D \start
+%D \setupselector[caption][n=1]
+%D \placelist[figure][criterium=all]
+%D \stop
+%D
+%D \starttext
+%D \placefigure
+%D {\select{caption}{zapf}{\input zapf \relax}}
+%D {}
+%D \stoptext
+%D \stoptyping
+
+\installcorenamespace{selector}
+
+\unexpanded\def\defineselector{\dodoubleargument\syst_selectors_define}
+\unexpanded\def\setupselector {\dodoubleargument\syst_selectors_setup}
+
+% \def\syst_selectors_define[#1][#2]{\getparameters[\??selector#1][\c!max=\plusone,\c!n=\plusone,#2]}
+% \def\syst_selectors_setup [#1][#2]{\getparameters[\??selector#1][#2]}
+
+\def\syst_selectors_define[#1][{\getparameters[\??selector#1][\c!max=\plustwo,\c!n=\plusone,}
+\def\syst_selectors_setup [#1][{\getparameters[\??selector#1][}
+
+\unexpanded\def\select % unexpandable
+ {\directselect}
+
+% \def\directselect#1% expandable
+% {\filterfromnext % maybe add an \expanded
+% {\csname\??selector\ifcsname\??selector#1\c!max\endcsname#1\fi\c!max\endcsname}%
+% {\csname\??selector\ifcsname\??selector#1\c!n \endcsname#1\fi\c!n \endcsname}}
+%
+% this is more efficient when the arguments are used a few times (or passed along):
+
+\def\directselect#1% expandable
+ {\expandafter\filterfromnext
+ \csname\??selector\ifcsname\??selector#1\c!max\endcsname#1\fi\c!max\expandafter\endcsname
+ \csname\??selector\ifcsname\??selector#1\c!n \endcsname#1\fi\c!n \endcsname}
+
+\letvalue{\??selector\c!max}\plusone
+\letvalue{\??selector\c!n }\plusone
+
\protect \endinput
diff --git a/Master/texmf-dist/tex/context/base/core-fnt.mkiv b/Master/texmf-dist/tex/context/base/core-fnt.mkiv
deleted file mode 100644
index 980f1a85cd8..00000000000
--- a/Master/texmf-dist/tex/context/base/core-fnt.mkiv
+++ /dev/null
@@ -1,158 +0,0 @@
-%D \module
-%D [ file=core-fnt,
-%D version=1995.10.10,
-%D title=\CONTEXT\ Core Macros,
-%D subtitle=Fonts,
-%D author=Hans Hagen,
-%D date=\currentdate,
-%D copyright={PRAGMA ADE \& \CONTEXT\ Development Team}]
-%C
-%C This module is part of the \CONTEXT\ macro||package and is
-%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
-%C details.
-
-\writestatus{loading}{ConTeXt Core Macros / Fonts}
-
-\unprotect
-
-%D \macros
-%D {compound}
-%D
-%D We will overload the already active \type {|} so we have
-%D to save its meaning in order to be able to use this handy
-%D macro.
-%D
-%D \starttyping
-%D so test\compound{}test can be used instead of test||test
-%D \stoptyping
-
-\bgroup \catcode\barasciicode\activecatcode \gdef\compound#1{|#1|} \egroup
-
-%D Here we hook some code into the clean up mechanism needed
-%D for verbatim data.
-
-\appendtoks
- \disablecompoundcharacters
- \disablediscretionaries
-\to \everycleanupfeatures
-
-%D The following code will me mkiv'd when needed. It's rather easy to
-%D extend the kerner with glue.
-
-%D \macros
-%D {stretched}
-%D
-%D Stretching characters in a word is a sort of typographical
-%D murder. Nevertheless we support this manipulation for use in
-%D for instance titles.
-%D
-%D \starttyping
-%D \hbox to 5cm{\stretched{murder}}
-%D \stoptyping
-%D
-%D \typebuffer
-%D
-%D or
-%D
-%D \startexample
-%D \getbuffer
-%D \stopexample
-%D
-%D \showsetup{stretched}
-
-\unexpanded\def\stretched#1%
- {\ifvmode\hbox to \hsize\else\ifinner\else\hbox\fi\fi
- \bgroup\processtokens\relax\hss\relax{\hss\hss}{#1}\egroup}
-
-%D \startbuffer
-%D \stretched{Unknown Box}
-%D \hbox to .5\hsize{\stretched{A Horizontal Box}}
-%D \vbox to 2cm{\stretched{A Vertical Box}}
-%D \hbox to 3cm{\stretched{sp{\'e}c{\`\i}{\"a}l}}
-%D \stopbuffer
-%D
-%D \getbuffer
-%D
-%D The first line of this macros takes care of boxing. Normally
-%D one will use an \type{\hbox} specification. The last line
-%D shows how special characters should be passed.
-%D
-%D \typebuffer
-
-%D \macros
-%D {stretchednormalcase, stretcheduppercase, stretchedlowercase}
-%D
-%D A convenient alternative is:
-%D
-%D \starttyping
-%D \stretcheduppercase{Is this what you like?}
-%D \stoptyping
-%D
-%D \typebuffer
-%D
-%D this one uses fixed skips and kerns.
-%D
-%D \startexample
-%D \getbuffer
-%D \stopexample
-%D
-%D The default skip can be set with:
-
-%D Given the following settings, the space is 1em by default:
-
-%D OBSOLETE:
-
-\def\stretchedspacefactor{4}
-\def\stretchedspaceamount{.25em}
-\def\stretchedbreaktokens{.@/}
-
-\unexpanded\def\stretchednormalcase
- {\stretchedsomecase\firstofoneargument}
-
-\unexpanded\def\stretcheduppercase
- {\stretchedsomecase{\the\everyuppercase\uppercase}}
-
-\unexpanded\def\stretchedlowercase
- {\stretchedsomecase{\the\everylowercase\lowercase}}
-
-\def\stretchedsomecase#1#2%
- {\bgroup
- #1{\def\textstring{#2}}%
- \ifdim\stretchedspaceamount=\zeropoint
- \textstring
- \else
- \def\textkern##1%
- {% beware: ##1 may not be \box\somebox -)
- \determinemidwordbreak{##1}{\stretchedbreaktokens}%
- \kern\stretchedspaceamount##1\domidwordbreak}%
- \def\textskip
- {\scratchdimen\stretchedspaceamount
- \hskip\stretchedspacefactor\scratchdimen}%
- \@EA\processtokens\@EA\relax\@EA\textkern\@EA\relax\@EA\textskip\@EA{\textstring}%
- \fi
- \egroup}
-
-%D An auxiliary macro, see for usage \type {\stretcheduppercase}.
-
-\let\domidwordbreak\relax
-
-\def\setmidwordbreaktoken#1%
- {\sfcode`#1=5000\relax}
-
-\def\determinemidwordbreak#1#2%
- {\edef\midwordbreaktokens{#2}%
- \ifx\midwordbreaktokens\empty
- \global\let\domidwordbreak\relax
- \else
- \setbox\scratchbox\hbox
- {\expandafter\handletokens\midwordbreaktokens\with\setmidwordbreaktoken
- a\space \!!dimena\lastskip
- #1\space\!!dimenb\lastskip \relax % needed
- \ifdim\!!dimena=\!!dimenb
- \globallet\domidwordbreak\relax
- \else
- \globallet\domidwordbreak\allowbreak
- \fi}%
- \fi}
-
-\protect \endinput
diff --git a/Master/texmf-dist/tex/context/base/core-ini.mkiv b/Master/texmf-dist/tex/context/base/core-ini.mkiv
index de37bf08f42..1682bed1b23 100644
--- a/Master/texmf-dist/tex/context/base/core-ini.mkiv
+++ b/Master/texmf-dist/tex/context/base/core-ini.mkiv
@@ -13,67 +13,199 @@
\writestatus{loading}{ConTeXt Core Macros / Additional Initialization}
-%D We will move more code to here, so that we become less dependent of the
-%D orde in which modules are loaded.
-
\unprotect
-\everypar \emptytoks
-\neverypar \emptytoks
+%D We introduce a couple of variables that are used all over \CONTEXT. Alternatively
+%D we could define them in each module but as they are part of the bigger picture we
+%D prefer to do it here. Ideally we should hav ea proper dependency tree but it might
+%D be that we want to make versions with a smaller footprints in which case one would
+%D still need to define the token list registers (unless we could do that runtime).
-\appendtoks
- %flushnotes
- \page_otr_command_synchronize_side_floats % an otr specific command
- \checkindentation
- \showparagraphnumber
- \restoreinterlinepenalty
- %flushmargincontents
- %flushcommentanchors
- \flushnotes
- \synchronizenotes
- \OTRSETshowstatus
- \flushpostponedbookmark
- \registerparoptions
- %\flushsyncpositions
- \flushpostponednodedata
- \typo_delimited_repeat
- \insertparagraphintro
-\to \everypar
+%D \macros
+%D {every...}
+%D
+%D A few every's.
-\appendtoks
- \flushpostponedbookmark
-\to \neverypar
+%D Output routine:
-\appendtoks
- \flushpostponedbookmark
-\to \everylistentry
+\newtoks \everybeforeoutput
+\newtoks \everyafteroutput
-\appendtoks
- \flushnotes
-\to \everydisplay
+%D Shipout:
+
+\newtoks \everyshipout
+\newtoks \everybeforeshipout
+\newtoks \everyaftershipout
+\newtoks \everyfirstshipout
+\newtoks \everylastshipout
+
+%D End of run:
+
+\newtoks \everybye
+\newtoks \everygoodbye
+\newtoks \everynotabene
+
+%D Document:
+
+\newtoks \everysetupdocument
+\newtoks \everyendoftextbody
+
+\newtoks \everystarttext
+\newtoks \everystoptext
+
+%D Purity:
+
+\newtoks \everyforgetall
+\newtoks \everycleanupfeatures
+\newtoks \everysimplifycommands
+
+\let\simplifiedcommands\everysimplifycommands % backward compatible, will stay as it's used in styles
+
+\unexpanded\def\forgetall {\the\everyforgetall}
+\unexpanded\def\cleanupfeatures {\the\everycleanupfeatures}
+\unexpanded\def\simplifycommands{\the\everysimplifycommands}
\appendtoks
- \adjustsidefloatdisplaylines
-\to \everydisplay
+ \everypar\emptytoks % pretty important
+\to \everyforgetall
+
+%D Page building:
+
+\newtoks \everybeforepagebody
+\newtoks \everyafterpagebody
+
+\let\everypagebody\everybeforepagebody % backward compatible, will become obsolete
+
+%D Floats:
+
+\newtoks \everyinsidefloat
+
+%D Sectioning:
+
+%newtoks \everyheadstart
+
+%D Par building (experimental, used in xml <p> .. </p>)
+
+\newtoks \everybeginofpar
+\newtoks \everyendofpar
+%newtoks \everyparflush
+
+\unexpanded\def\bpar{\the\everybeginofpar\ignorespaces} % may interfere with \everypar
+\unexpanded\def\epar{\ifhmode\removeunwantedspaces\the\everyendofpar\fi} % test prevents problems with \bpar\epar
+
+%D Lists:
+
+\newtoks \everylistentry
+\newtoks \everysavesortkeys
+
+%D Marks:
-% \appendtoks
-% \flushsyncpositions
-% \to \everyheadstart
+%newtoks \everymarking
-% \appendtoks
-% \flushsyncresets
-% \to \everyendoftextbody
+%D Fonts:
+
+\newtoks \everyfont
+\newtoks \everyglobalbodyfont
+\newtoks \everydefinedfont
+
+\newevery \everybodyfont \EveryBodyFont
+\newevery \everyfontswitch \EveryFontSwitch
+
+\newtoks \everysetupbodyfont
+\newtoks \everyswitchtobodyfont
+
+%D Math:
+
+\newtoks \everybeforedisplayformula
+\newtoks \everymathematics
+
+\prependtoks \the\everymathematics \to \everymath
+\prependtoks \the\everymathematics \to \everydisplay
+
+%D Tables:
+
+%newtoks \everytable % we need to disstinguish kinds
+
+%D State mess:
+
+\newtoks \everypushsomestate
+\newtoks \everypopsomestate
+
+\unexpanded\def\pushsomestates{\the\everypushsomestate}
+\unexpanded\def\popsomestates {\the\everypopsomestate }
+
+%D More generic (used to be pushcolor etc)
+
+\newtoks\everystarttextproperties
+\newtoks\everystoptextproperties
+
+\unexpanded\def\starttextproperties{\the\everystarttextproperties}
+\unexpanded\def\stoptextproperties {\the\everystoptextproperties}
+
+%D \macros
+%D {trialtypesetting}
+%D
+%D We disable trial typesetting in the output routine,
+%D just to be sure.
+
+\prependtoks
+ \resettrialtypesetting
+\to \everybeforepagebody
+
+%D \macros
+%D {ifinpagebody,ifinsidecolumns,ifdoublesided,ifsinglesided}
+%D
+%D These will become system modes and conditionals
+
+\newif \ifinpagebody
+\newif \ifinsidecolumns
+\newif \ifdoublesided \doublesidedfalse
+\newif \ifsinglesided \singlesidedtrue
+\newif \ifinsidefloat
+\newif \ifdoingblocks
+\newif \ifgridsnapping
+
+%D \macros
+%D {ifproductionrun}
+%D
+%D This boolean can be used to bypass certain initializations.
+
+\newif\ifproductionrun
\appendtoks
- \ignorespaces
-\to \everybeginofpar
+ \productionruntrue
+\to \everydump
+
+%D \macros
+%D {everyboxedcontent, ifboxedcontent,
+%D startboxedcontent, stopboxedcontent}
+%D
+%D This one is relatively new and will be used as a more robust test for inner
+%D situations.
+
+\newif \ifboxedcontent
+\newtoks\everyboxedcontent
\appendtoks
- \removeunwantedspaces
- %strut % option ?
- % \flushsyncresets
- %setlastlinewidth % gone, will be done in lua
- \endgraf
-\to \everyendofpar
+ \boxedcontenttrue
+\to \everyboxedcontent
+
+\unexpanded\def\startboxedcontent{\bgroup\the\everyboxedcontent}
+
+\let\stopboxedcontent\egroup
+
+%D We store some original meanings, maybe in \type {math-ini}.
+
+\let\normalat \at
+\let\normalin \in
+\let\normalfrom \from
+%let\normalover \over
+\let\normalabout\about
+
+%D This will be implemented way later:
+
+\let\setlayoutcomponentattribute \gobbleoneargument
+\let\resetlayoutcomponentattribute\relax
+\let\layoutcomponentboxattribute \empty
\protect \endinput
diff --git a/Master/texmf-dist/tex/context/base/core-mis.mkiv b/Master/texmf-dist/tex/context/base/core-mis.mkiv
deleted file mode 100644
index 5c364f3c462..00000000000
--- a/Master/texmf-dist/tex/context/base/core-mis.mkiv
+++ /dev/null
@@ -1,738 +0,0 @@
-%D \module
-%D [ file=core-mis,
-%D version=1998.01.29,
-%D title=\CONTEXT\ Core Macros,
-%D subtitle=Miscelaneous,
-%D author=Hans Hagen,
-%D date=\currentdate,
-%D copyright={PRAGMA ADE \& \CONTEXT\ Development Team}]
-%C
-%C This module is part of the \CONTEXT\ macro||package and is
-%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
-%C details.
-
-\writestatus{loading}{ConTeXt Core Macros / Misc Commands}
-
-% needs to be redone
-
-\unprotect
-
-%D Sometimes (for instance in bookmarks) we need to simplify macro
-%D behaviour, so here is the hook.
-
-\ifx\simplifiedcommands\undefined \newtoks\simplifiedcommands \fi
-
-\def\simplifycommands{\the\simplifiedcommands}
-
-%D A possibly growing list:
-
-%appendtoks \def\executesynonym#1#2#3#4{#3}\to\simplifiedcommands
-%appendtoks \def\executesort#1#2#3{#3}\to\simplifiedcommands
-
-\appendtoks \def\ { }\to\simplifiedcommands
-\appendtoks \def\type#1{\letterbackslash\checkedstrippedcsname#1}\to\simplifiedcommands
-\appendtoks \def\tex#1{\letterbackslash#1}\to\simplifiedcommands
-\appendtoks \def\TeX{TeX}\to\simplifiedcommands
-\appendtoks \def\ConTeXt{ConTeXt}\to\simplifiedcommands
-\appendtoks \def\MetaPost{MetaPost}\to\simplifiedcommands
-\appendtoks \def\MetaFont{MetaFont}\to\simplifiedcommands
-\appendtoks \def\MetaFun{MetaFun}\to\simplifiedcommands
-%appendtoks \def||{-}\to\simplifiedcommands
-\appendtoks \def|#1|{\ifx#1\empty\empty-\else#1\fi}\to\simplifiedcommands
-
-\appendtoks\let\buildtextaccent\secondoftwoarguments\to\simplifiedcommands
-
-% THIS WAS MAIN-002.TEX
-
-\def\horitems#1#2% #1=breedte #2=commandos
- {\scratchdimen#1%
- \divide\scratchdimen \nofitems
- \!!counta\zerocount
- \def\docommand##1%
- {\advance\!!counta \plusone
- \processaction
- [\@@isalign]
- [ \v!left=>\hbox to \scratchdimen{\strut##1\hss},
- \v!right=>\hbox to \scratchdimen{\hss\strut##1},
- \v!middle=>\hbox to \scratchdimen{\hss\strut##1\hss},
- \v!margin=>\ifnum\!!counta=\plusone\hss\else\hfill\fi
- \strut##1%
- \ifnum\!!counta=\nofitems\hss\else\hfill\fi,
- \s!default=>\hbox to \scratchdimen{\hss\strut##1\hss}, % midden
- \s!unknown=>\hbox to \scratchdimen{\strut##1\hss}]}% % links
- \hbox to #1{\hss#2\hss}}
-
-\def\veritems#1#2% #1=breedte #2=commandos
- {\scratchdimen#1%
- \def\docommand##1%
- {\ifdim\scratchdimen<\zeropoint % the - was a signal
- \hbox to -\scratchdimen{\hss\strut##1}%
- \else\ifdim\scratchdimen>\zeropoint
- \hbox to \scratchdimen{\strut##1\hss}%
- \else
- \hbox{\strut##1}%
- \fi\fi}%
- \vbox{#2}}
-
-\def\dosetupitems[#1]%
- {\getparameters[\??is][#1]%
- \doif\@@iswidth\v!unknown
- {\def\@@iswidth{\hsize}}%
- \doifconversiondefinedelse\@@issymbol
- {\def\doitembullet##1{\convertnumber{\@@issymbol}{##1}}}
- {\doifsymboldefinedelse\@@issymbol
- {\def\doitembullet##1{\symbol[\@@issymbol]}}{}}}
-
-\def\makeitemsandbullets#1%
- {\doifelse\@@isn\v!unknown
- {\getcommalistsize[#1]%
- \edef\nofitems{\commalistsize}}
- {\edef\nofitems{\@@isn}}%
- \setbox0\hbox
- {\doitems \@@iswidth
- {\processcommalist[#1]\docommand}}%
- \setbox2\hbox
- {\doitems \@@isbulletbreedte
- {\dorecurse\nofitems
- {\docommand{\strut\doitembullet\recurselevel}}}}}
-
-\def\dostartitems#1#2#3%
- {\let\doitems#2%
- \def\@@isbulletbreedte{#3}%
- \makeitemsandbullets{#1}%
- \@@isbefore}
-
-\def\dostopitems
- {\@@isafter
- \egroup}
-
-\setvalue{doitems\v!top}#1%
- {\dostartitems{#1}\horitems\@@iswidth
- \noindent\vbox
- {\forgetall
- \doifsomething\@@issymbol
- {\doifnot\@@issymbol\v!none
- {\box2
- \@@isinbetween
- \nointerlineskip}}%
- \box0}%
- \dostopitems}
-
-\setvalue{doitems\v!bottom}#1%
- {\dostartitems{#1}\horitems\@@iswidth
- \noindent\vbox
- {\forgetall
- \box0
- \doifsomething\@@issymbol
- {\@@isinbetween
- \nointerlineskip
- \box2}}%
- \dostopitems}
-
-\setvalue{doitems\v!inmargin}#1%
- {\dostartitems{#1}\veritems{-1.5em}% - is a signal
- \noindent\hbox{\llap{\box2\hskip\leftmargindistance}\box0}%
- \dostopitems}
-
-\setvalue{doitems\v!left}#1%
- {\advance\hsize -1.5em%
- \dostartitems{#1}\veritems{1.5em}%
- \noindent\hbox{\box2\box0}%
- \dostopitems}
-
-\setvalue{doitems\v!right}#1%
- {\dostartitems{#1}\veritems{0em}%
- \noindent\hbox{\box0\hskip-\wd2\box2}%
- \dostopitems}
-
-\unexpanded\def\setupitems
- {\dosingleargument\dosetupitems}
-
-\def\complexitems[#1]%
- {\bgroup
- \setupitems[#1]%
- \parindent\zeropoint
- \setlocalhsize
- \hsize\localhsize
- \dontcomplain
- \executeifdefined{doitems\@@islocation}{\getvalue{doitems\v!left}}}
-
-\definecomplexorsimpleempty\items
-
-\setupitems
- [\c!location=\v!left,
- \c!symbol=5,
- \c!width=\hsize,
- \c!align=\v!middle,
- \c!n=\v!unknown,
- \c!before=\blank,
- \c!inbetween={\blank[\v!medium]},
- \c!after=\blank]
-
-% there is quite some historic balast in this mechanism, the next variant
-% is a first cleanup
-
-\let\currentparagraph\empty
-
-\newcount\alcounter \newcount\alnsize \newdimen\alhsize
-
-\def\paragraphparameter#1% \checkedparameter\??al\currentparagraph#1
- {\executeifdefined{\??al\currentparagraph#1}{\executeifdefined{\??al#1}\empty}}
-
-\def\paragraphcellmeter#1#2% \checkedparameter\??al\currentparagraph#1
- {\executeifdefined{\??al\currentparagraph\number#1#2}{\paragraphparameter{#2}}}
-
-\def\dodefineparagraphs[#1][#2]%
- {\edef\currentparagraph{#1}%
- \setvalue{\s!do\s!next\currentparagraph}%
- {\def\\{\getvalue\currentparagraph}}%
- \setvalue\currentparagraph
- {\getvalue{\s!do\s!next#1}%
- \dostartparagraphs{#1}}%
- \setvalue{\e!next\currentparagraph}%
- {\getvalue{#1}}%
- \setvalue{\e!start\currentparagraph}%
- {\bgroup
- \edef\currentparagraph{#1}%
- \letvalueempty{\s!do\s!next\currentparagraph}%
- \setvalue{\e!stop\currentparagraph}{\getvalue\currentparagraph\egroup}%
- \getvalue\currentparagraph}%
- \getparameters[\??al\currentparagraph]%
- [%\c!n=3,
- %\c!before=\blank,
- %\c!after=\blank,
- %\c!distance=1em,
- %\c!height=\v!fit,
- %\c!rule=\v!off,
- %\c!command=,
- %\c!align=,
- %\c!tolerance=\v!tolerant,
- %\c!rulethickness=\linewidth,
- %\c!rulecolor=,
- %\c!style=,
- %\c!color=,
- %\c!top=,
- %\c!top=\vss,
- %\c!bottom=\vfill,
- #2]%
- \setvalue{\e!setup#1\e!endsetup}%
- {\setupparagraphs[#1]}%
- \dorecurse
- {\paragraphparameter\c!n}
- {\setupparagraphs
- [\currentparagraph]
- [\recurselevel]
- [\c!width=,
- %\c!bottom=\paragraphparameter\c!bottom,
- %\c!top=\paragraphparameter\c!top,
- %\c!height=\paragraphparameter\c!height,
- %\c!rule=\paragraphparameter\c!rule,
- %\c!rulethickness=\paragraphparameter\c!rulethickness,
- %\c!rulecolor=\paragraphparameter\c!rulecolor,
- %\c!align=\paragraphparameter\c!align,
- %\c!tolerance=\paragraphparameter\c!tolerance, % obsolete
- %\c!distance=\paragraphparameter\c!distance,
- \c!style=\paragraphparameter\c!style,
- \c!color=\paragraphparameter\c!color]}%
- \setupparagraphs[\currentparagraph][1][\c!distance=\zeropoint]}
-
-\unexpanded\def\defineparagraphs
- {\dodoubleargument\dodefineparagraphs}
-
-\def\dosetupparagraphs[#1][#2][#3]%
- {\edef\currentparagraph{#1}%
- \ifsecondargument
- \doifelse{#2}\v!each
- {\dorecurse
- {\paragraphparameter\c!n}
- {\getparameters[\??al\currentparagraph\recurselevel][#3]}}
- {\doifelsenothing{#3}
- {\getparameters[\??al\currentparagraph][#2]}
- {\def\docommand##1{\getparameters[\??al\currentparagraph##1][#3]}%
- \processcommalist[#2]\docommand}}%
- \else
- \getparameters[\??al][#1]%
- \fi}
-
-\unexpanded\def\setupparagraphs
- {\dotripleempty\dosetupparagraphs}
-
-\setupparagraphs
- [\c!n=3,
- \c!before=\blank,
- \c!after=\blank,
- \c!distance=1em,
- \c!height=\v!fit,
- \c!rule=\v!off,
- \c!command=,
- \c!align=,
- \c!tolerance=\v!tolerant, % obsolete
- \c!rulethickness=\linewidth,
- \c!rulecolor=,
- \c!style=,
- \c!color=,
- \c!top=,
- \c!top=\vss,
- \c!bottom=\vfill]
-
-\def\doparagraphrule
- {\doifelse{\paragraphcellmeter\alcounter\c!rule}\v!on
- {\linewidth\paragraphcellmeter\alcounter\c!rulethickness
- \scratchdimen\dimexpr(\paragraphcellmeter\alcounter\c!distance-\linewidth)/2\relax
- \hskip\scratchdimen
- \color[\paragraphcellmeter\alcounter\c!rulecolor]{\vrule\!!width\linewidth}%
- \hskip\scratchdimen}
- {\hskip\paragraphcellmeter\alcounter\c!distance}}
-
-\def\dostartparagraph
- {\doifelsenothing{\paragraphcellmeter\alcounter\c!width}
- {\!!widtha\alhsize
- \divide\!!widtha \alnsize}
- {\!!widtha\paragraphcellmeter\alcounter\c!width}%
- \begingroup
- \dousestylehashparameter{\??al\currentparagraph\number\alcounter}\c!style
- \dousecolorhashparameter{\??al\currentparagraph\number\alcounter}\c!color
- \doifelse{\paragraphcellmeter\alcounter\c!height}\v!fit
- {\setbox\scratchbox\vtop}
- {\setbox\scratchbox\vtop to \paragraphcellmeter\alcounter\c!height}%
- \bgroup
- \blank[\v!disable]%
- \forgetall
- \paragraphcellmeter\alcounter\c!top
- \paragraphparameter\c!inner
- \hsize\!!widtha % setting \wd afterwards removed
- \paragraphcellmeter\alcounter\c!inner % twice
- \expanded{\setupalign [\paragraphcellmeter\alcounter\c!align ]}% {normal,verytolerant,stretch}
- \expanded{\setuptolerance[\paragraphcellmeter\alcounter\c!tolerance]}% obsolete
- \ignorespaces
- \endgraf
- \ignorespaces
- %
- % Nadeel van de onderstaande constructie is dat \everypar
- % binnen een groep kan staan en zo steeds \begstruts
- % worden geplaatst. Mooi is anders dus moet het anders!
- %
- % Hier is \Everypar niet nodig.
- %
- \everypar{\begstrut\everypar\emptytoks}%
- %
- \nospace % remove + ignore
- \paragraphcellmeter\alcounter\c!command}
-
-\def\dostopparagraph
- {\ifvmode
- \removelastskip
- \else
- \unskip\endstrut\endgraf
- \fi
- \paragraphcellmeter\alcounter\c!bottom
- \egroup
- \ifdim\wd\scratchbox=\zeropoint % no data
- \wd\scratchbox\!!widtha
- \fi
- \box\scratchbox
- \endgroup
- \ifnum\alcounter<\paragraphparameter\c!n\relax
- \@EA\doparagraphcell
- \else
- \@EA\dostopparagraphs
- \fi}
-
-\def\doparagraphcell
- {\global\advance\alcounter \plusone
- \doifelsenothing{\paragraphcellmeter\alcounter\c!distance}
- {\ifnum\alcounter=\plusone\else
- \hskip\paragraphparameter\c!distance
- \fi}
- {\ifnum\alcounter=\plusone
- \hskip\paragraphcellmeter\alcounter\c!distance
- \else
- \doparagraphrule
- \fi}%
- \letvalue\currentparagraph\dostopparagraph
- \dostartparagraph}
-
-\def\dostartparagraphs#1%
- {\bgroup
- \edef\currentparagraph{#1}%
- \global\alcounter\zerocount
- \parindent\zeropoint
- \setlocalhsize
- \alhsize\localhsize
- \alnsize\paragraphparameter\c!n\relax
- \dorecurse \alnsize
- {\doifelsenothing{\paragraphcellmeter\recurselevel\c!distance}
- {\ifnum\recurselevel=\plusone\else
- \global\advance\alhsize -\paragraphparameter\c!distance
- \fi}
- {\global\advance\alhsize -\paragraphcellmeter\recurselevel\c!distance}%
- \doifsomething{\paragraphcellmeter\recurselevel\c!width}
- {\global\advance\alnsize \minusone
- \global\advance\alhsize -\paragraphcellmeter\recurselevel\c!width}}%
- %whitespace % gaat fout bij \framed
- \paragraphparameter\c!before
- \leavevmode % gaat wel goed bij \framed, brrr
- \setbox\scratchbox\vbox\bgroup\hbox\bgroup\doparagraphcell}
-
-\def\dostopparagraphs
- {\egroup
- \egroup
- \iftrue
- \hbox{\raise\strutheight\box\scratchbox}% new
- \else
- \box\scratchbox % old
- \fi
- \par
- \paragraphparameter\c!after
- \egroup}
-
-% Is this used at all?
-
-\def\dosetuptab[#1]%
- {\getparameters[\??ta]
- [\c!headstyle=\v!normal,
- \c!headcolor=,
- \c!style=\v!normal,
- \c!color=,
- \c!width=\v!broad,
- \c!sample={\hskip4em},
- \c!before=,
- \c!after=,
- #1]%
- \definedescription
- [tab]
- [\c!headstyle=\@@taheadstyle,
- \c!headcolor=\@@tacolor,
- \c!sample=\@@tasample,
- \c!width=\@@tawidth,
- \c!before=\@@tabefore,
- \c!after=\@@taafter]}
-
-\unexpanded\def\setuptab
- {\dosingleargument\dosetuptab}
-
-\setuptab
- [\c!location=\v!left]
-
-% seldom used, move from kernel to run time module
-
-\ifx\tfx\undefined \let\tfx\relax \fi
-
-\def\basegrid
- {\dosingleempty\dobasegrid}
-
-\def\dobasegrid[#1]%
- {\begingroup
- \getparameters[\??rt]
- [\c!x=0,\c!y=0,
- \c!nx=10,\c!ny=10,
- \c!dx=.5,\c!dy=.5,
- \c!xstep=0,\c!ystep=0,
- \c!unit=\s!cm,
- \c!scale=1,
- \c!factor=1,
- \c!offset=\v!yes,
- \c!location=\v!left,
- #1]%
- \startpositioning
- \dimen0=\@@rtdx\@@rtunit\relax
- \dimen0=\@@rtscale\dimen0\relax
- \dimen0=\@@rtfactor\dimen0\relax
- \multiply\dimen0 \@@rtnx\relax
- \dimen2=\@@rtdy\@@rtunit\relax
- \dimen2=\@@rtscale\dimen2\relax
- \dimen2=\@@rtfactor\dimen2\relax
- \multiply\dimen2 \@@rtny\relax
- \def\horline
- {\vbox
- {\hrule
- \!!width \dimen0
- \!!height \linewidth
- \!!depth \!!zeropoint}}%
- \def\verline%
- {\vrule
- \!!width \linewidth
- \!!height \dimen2
- \!!depth \!!zeropoint}%
- \doglobal\newcounter\@@gridc
- \doglobal\newcounter\@@gridd
- \doglobal\newcounter\@@gride
- \def\setlegend##1##2##3%
- {\gdef\@@gridc{0}%
- \dimen0=2em\relax
- \dimen2=##2\@@rtunit\relax
- \dimen2=\@@rtscale\dimen2\relax
- \dimen2=\@@rtfactor\dimen2\relax
- \divide\dimen0 \dimen2\relax
- \xdef\@@gride{\number\dimen0}%
- \ifnum\@@gride>50
- \gdef\@@gride{100}%
- \else\ifnum\@@gride>10
- \gdef\@@gride{50}%
- \else\ifnum\@@gride>5
- \gdef\@@gride{10}%
- \else\ifnum\@@gride>1
- \gdef\@@gride{5}%
- \else
- \gdef\@@gride{1}%
- \fi\fi\fi\fi
- \gdef\@@gridd{0}%
- \def\legend
- {\ifnum\@@gridd=\zerocount
- \vbox
- {\increment(\@@gridc,##1)%
- \hbox to 2em{\hss\@@gridc\hss}}%
- \global\let\@@gridd=\@@gride
- \fi
- \doglobal\decrement\@@gridd
- \doglobal\increment(\@@gridc,##1)}}%
- \def\draw##1##2##3##4##5##6##7##8##9%
- {\setuppositioning
- [\c!state=##8,
- \c!xstep=\v!absolute,
- \c!ystep=\v!absolute,
- \c!unit=\@@rtunit,
- \c!scale=\@@rtscale,
- \c!factor=\@@rtfactor,
- \c!offset=\@@rtoffset,
- \c!xoffset=##6,
- \c!yoffset=##7]%
- \doifelse{##9}\v!middle
- {\scratchdimen##3pt\scratchdimen.5\scratchdimen
- \edef\@@psxx{\withoutpt\the\scratchdimen}%
- \scratchdimen##4pt\scratchdimen.5\scratchdimen
- \edef\@@psyy{\withoutpt\the\scratchdimen}%
- \scratchcounter##2\advance\scratchcounter -1
- \edef\@@pszz{\the\scratchcounter}}
- {\edef\@@psxx{0}\edef\@@psyy{0}\edef\@@pszz{##2}}%
- \position(\@@psxx,\@@psyy){##1}%
- \setuppositioning
- [\c!state=##8,
- \c!xstep=\v!relative,
- \c!ystep=\v!relative,
- \c!scale=\@@rtscale,
- \c!factor=\@@rtfactor,
- \c!offset=\@@rtoffset,
- \c!unit=\@@rtunit]%
- \dorecurse\@@pszz{\position(##3,##4){##5}}}%
- \draw
- \verline\@@rtnx\@@rtdx0\verline\!!zeropoint\!!zeropoint\v!start\empty
- \draw
- \horline\@@rtny0\@@rtdy\horline\!!zeropoint\!!zeropoint\v!start\empty
- \tfx
- \doifnot\@@rtxstep{0}
- {\setlegend\@@rtxstep\@@rtdx\@@rtx
- \draw\legend\@@rtnx\@@rtdx0\legend{-1em}{-1.5em}\v!overlay\@@rtlocation}%
- \doifnot\@@rtystep{0}
- {\setlegend\@@rtystep\@@rtdy\@@rty
- \draw\legend\@@rtny0\@@rtdy\legend{-2em}{-.75ex}\v!overlay\@@rtlocation}%
- \stoppositioning
- \endgroup}
-
-\let\grid\basegrid
-
-\definetabulate
- [\v!legend]
- [|emj1|i1|mR|]
-
-\setuptabulate
- [\v!legend]
- [\c!unit=.75em,\c!inner=\setquicktabulate\leg,EQ={=}]
-
-\definetabulate
- [\v!legend][\v!two]
- [|emj1|emk1|i1|mR|]
-
-\definetabulate
- [\v!fact]
- [|R|ecmj1|i1mR|]
-
-\setuptabulate
- [\v!fact]
- [\c!unit=.75em,\c!inner=\setquicktabulate\fact,EQ={=}]
-
-\unexpanded\def\xbox
- {\bgroup\aftergroup\egroup\hbox\bgroup\tx\let\next=}
-
-\unexpanded\def\xxbox
- {\bgroup\aftergroup\egroup\hbox\bgroup\txx\let\next=}
-
-%D This one is for Daniel Pittman, who wanted tight fractions. We show
-%D three versions. First the simple one using \type {\low} and \type {high}:
-%D
-%D \startbuffer
-%D \def\vfrac#1#2%
-%D {\hbox{\high{\tx#1\kern-.25em}/\low{\kern-.25em\tx#2}}}
-%D
-%D test \vfrac{1}{2} test \vfrac{123}{456} test
-%D \stopbuffer
-%D
-%D \typebuffer {\showmakeup\getbuffer}
-%D
-%D A better way to handle the kerning is the following, here
-%D we kind of assume that tye slash is symmetrical and has
-%D nearly zero width.
-%D
-%D \startbuffer
-%D \def\vfract#1#2%
-%D {\hbox{\high{\tx#1}\hbox to \zeropoint{\hss/\hss}\low{\tx#2}}}
-%D \stopbuffer
-%D
-%D \typebuffer {\showmakeup\getbuffer}
-%D
-%D The third and best alternative is the following:
-%D
-%D {\showmakeup\getbuffer}\crlf\getbuffer
-%D
-%D This time we measure the height of the \type {/} and
-%D shift over the maximum height and depths of this
-%D character and the fractional digits (we use 57 as
-%D sample). Here we combine all methods in one macros.
-
-\setnewconstant\vulgarfractionmethod\plusthree
-
-\definehspace[vulgarfraction][.25em] % [.15em]
-\definesymbol[vulgarfraction][/] % [\raise.2ex\hbox{/}]
-
-\unexpanded\def\vulgarfraction#1#2%
- {\dontleavehmode
- \hbox
- {\def\vulgarfraction{vulgarfraction}%
- \ifcase\vulgarfractionmethod
- #1\symbol[\vulgarfraction]#2%
- \or
- \high{\tx#1\kern-\hspaceamount\empty\vulgarfraction}%
- \symbol[\vulgarfraction]%
- \low {\kern-\hspaceamount\empty\vulgarfraction\tx#2}%
- \or
- \high{\tx#1}%
- \hbox to \zeropoint{\hss\symbol[\vulgarfraction]\hss}%
- \low{\tx#2}%
- \or
- \setbox0\hbox{\symbol[\vulgarfraction]}%
- \setbox2\hbox{\txx57}%
- \raise\ht0\hbox{\lower\ht2\hbox{\txx#1}}%
- \hbox to \zeropoint{\hss\symbol[\vulgarfraction]\hss}%
- \lower\dp0\hbox{\raise\dp2\hbox{\txx#2}}%
- \fi}}
-
-\ifdefined\vfrac \else \let\vfrac\vulgarfraction \fi
-
-%D \starttabulate
-%D \HL
-%D \NC \bf method \NC \bf visualization \NC\NR
-%D \HL
-%D \NC 0 \NC \vulgarfractionmethod0 \vulgarfraction{1}{2} \NC\NR
-%D \NC 1 \NC \vulgarfractionmethod1 \vulgarfraction{1}{2} \NC\NR
-%D \NC 2 \NC \vulgarfractionmethod2 \vulgarfraction{1}{2} \NC\NR
-%D \NC 3 \NC \vulgarfractionmethod3 \vulgarfraction{1}{2} \NC\NR
-%D \HL
-%D \stoptabulate
-
-%D Under construction:
-%D
-%D \starttyping
-%D \commalistsentence[aap,noot,mies]
-%D \commalistsentence[aap,noot]
-%D \commalistsentence[aap]
-%D \commalistsentence[a,b,c]
-%D \commalistsentence[a,b,c][{ \& },{ and }]
-%D \commalistsentence[a,b,c][+,-]
-%D \stoptyping
-
-% obsolete .. use lua instead
-
-\let\handlecommalistsentence\firstofoneargument
-
-\def\commalistsentenceone{and-1}
-\def\commalistsentencetwo{and-2}
-
-\def\commalistsentence
- {\dodoubleempty\docommalistsentence}
-
-\def\docommalistsentence[#1][#2]%
- {\bgroup
- \getfromcommalist[#2][1]%
- \ifx\commalistelement\empty
- \def\@@commalistsentenceone{\labeltext\commalistsentenceone}%
- \else
- \let\@@commalistsentenceone\commalistelement
- \fi
- \getfromcommalist[#2][2]%
- \ifx\commalistelement\empty
- \def\@@commalistsentencetwo{\labeltext\commalistsentencetwo}%
- \else
- \let\@@commalistsentencetwo\commalistelement
- \fi
- \getcommalistsize[#1]%
- \ifcase\commalistsize\relax
- \def\serializedcommalist{#1}%
- \else
- \let\serializedcommalist\empty
- \scratchcounter\zerocount
- \def\docommand##1%
- {\advance\scratchcounter \plusone
- \ifnum\scratchcounter=\plusone
- \scratchtoks{\handlecommalistsentence{##1}}%
- \else
- \ifnum\scratchcounter=\commalistsize
- \appendtoks\@@commalistsentencetwo\handlecommalistsentence{##1}\to\scratchtoks
- \else
- \appendtoks\@@commalistsentenceone\handlecommalistsentence{##1}\to\scratchtoks
- \fi
- \fi}%
- \processcommacommand[#1]\docommand
- \edef\serializedcommalist{\the\scratchtoks}%
- \fi
- \serializedcommalist
- \egroup}
-
-\def\commacommandsentence[#1]{\@EA\commalistsentence\@EA[#1]}
-
-\setuplabeltext [\s!nl] [and-1={,} , and-2= en ] % 1, 2 en 3
-\setuplabeltext [\s!en] [and-1={,} , and-2={,} ] % 1, 2, 3
-\setuplabeltext [\s!de] [and-1={,} , and-2= und ] % 1, 2 und 3
-\setuplabeltext [\s!hr] [and-1={,} , and-2= i ] % 1, 2 i 3
-
-%D \macros
-%D {somekindoftab}
-%D
-%D This macro can be used to create tabs:
-%D
-%D \starttyping
-%D \setupheadertexts[{\somekindoftab[alternative=horizontal]{\framed{\realfolio}}}]
-%D \setuptexttexts [{\somekindoftab[alternative=vertical] {\framed{\realfolio}}}]
-%D
-%D \starttext
-%D \showframe \dorecurse{10}{test\page}
-%D \stoptext
-%D \stoptyping
-
-\def\somekindoftab
- {\dosingleempty\dosomekindoftab}
-
-\def\dosomekindoftab[#1]%
- {\bgroup
- \getparameters[xx]
- [\c!alternative=\v!vertical,
- \c!width=\textwidth,\c!height=\textheight,
- \c!n=\lastpage,\c!m=\realpageno,
- #1]%
- \doifelse\xxalternative\v!vertical
- {\dodosomekindoftab\vbox\vskip\xxheight}
- {\dodosomekindoftab\hbox\hskip\xxwidth }}
-
-\def\dodosomekindoftab#1#2#3#4%
- {#1 to #3 \bgroup
- \forgetall
- \ifnum\xxm>\plusone
- #2\zeropoint \!!plus \the\numexpr\xxm -1\relax fill\relax
- \fi
- #4%
- \ifnum\xxm<\xxn\relax
- #2\zeropoint \!!plus \the\numexpr\xxn-\xxm\relax fill\relax
- \fi
- \egroup
- \egroup}
-
-\protect \endinput
diff --git a/Master/texmf-dist/tex/context/base/core-sys.lua b/Master/texmf-dist/tex/context/base/core-sys.lua
index 0be9fd588c7..1741bbe0a7a 100644
--- a/Master/texmf-dist/tex/context/base/core-sys.lua
+++ b/Master/texmf-dist/tex/context/base/core-sys.lua
@@ -6,21 +6,93 @@ if not modules then modules = { } end modules ['core-sys'] = {
license = "see context related readme files"
}
-local lower, format = string.lower, string.format
-local extname, basename, removesuffix = file.extname, file.basename, file.removesuffix
+local lower, format, gsub = string.lower, string.format, string.gsub
+local suffixonly, basename, removesuffix = file.suffix, file.basename, file.removesuffix
local environment = environment
-function commands.updatefilenames(inputfilename,outputfilename)
- environment.inputfilename = inputfilename or ""
- environment.outputfilename = outputfilename or ""
- environment.jobfilename = inputfilename or tex.jobname or ""
- environment.jobfilesuffix = lower(extname(environment.jobfilename))
+local report_files = logs.reporter("system","files")
+
+-- function commands.updatefilenames(jobname,fulljobname,inputfilename,outputfilename)
+-- --
+-- environment.jobname = jobname
+-- --
+-- local jobfilename = gsub(fulljobname or jobname or inputfilename or tex.jobname or "","%./","")
+-- --
+-- environment.jobfilename = jobfilename
+-- environment.jobfilesuffix = lower(suffixonly(environment.jobfilename))
+-- --
+-- local inputfilename = gsub(inputfilename or "","%./","")
+-- environment.inputfilename = inputfilename
+-- environment.inputfilebarename = removesuffix(basename(inputfilename))
+-- --
+-- local inputfilerealsuffix = suffixonly(inputfilename)
+-- environment.inputfilerealsuffix = inputfilerealsuffix
+-- --
+-- local inputfilesuffix = inputfilerealsuffix == "" and "tex" or lower(inputfilerealsuffix)
+-- environment.inputfilesuffix = inputfilesuffix
+-- --
+-- local outputfilename = outputfilename or environment.inputfilebarename or ""
+-- environment.outputfilename = outputfilename
+-- --
+-- local runpath = resolvers.cleanpath(lfs.currentdir())
+-- environment.runpath = runpath
+-- --
+-- statistics.register("running on path", function()
+-- return environment.runpath
+-- end)
+-- --
+-- statistics.register("job file properties", function()
+-- return format("jobname %a, input %a, suffix %a",jobfilename,inputfilename,inputfilesuffix)
+-- end)
+-- --
+-- end
+
+function environment.initializefilenames() -- commands.updatefilenames(jobname,fulljobname,input,result)
+
+ local arguments = environment.arguments
+
+ local jobname = arguments.jobname or tex.jobname
+ local fulljobname = arguments.fulljobname or jobname
+ local inputfilename = arguments.input or fulljobname
+ local outputfilename = arguments.result or removesuffix(jobname)
+
+ local inputfilename = suffixonly(inputfilename) == "tex" and removesuffix(inputfilename) or inputfilename or ""
+
+ local filename = fulljobname
+ local suffix = suffixonly(filename)
+
+ local filename = ctxrunner.resolve(filename) -- in case we're prepped
+
+ local jobfilename = jobname or inputfilename or tex.jobname or ""
+ local inputfilename = inputfilename or ""
+
+ jobfilename = gsub(jobfilename, "^./","")
+ inputfilename = gsub(inputfilename,"^./","")
+
+ environment.jobfilename = jobfilename
+ environment.jobfilesuffix = lower(suffixonly(jobfilename))
+
+ environment.inputfilename = inputfilename
environment.inputfilebarename = removesuffix(basename(inputfilename))
- environment.inputfilesuffix = lower(extname(inputfilename))
+ environment.inputfilesuffix = lower(suffixonly(inputfilename))
+
+ environment.outputfilename = outputfilename or environment.inputfilebarename or ""
+
+ environment.filename = filename
+ environment.suffix = suffix
+
+ report_files("jobname %a, input %a, result %a",jobfilename,inputfilename,outputfilename)
+
+ function environment.initializefilenames() end
end
statistics.register("result saved in file", function()
-- suffix will be fetched from backend
- return format( "%s.%s", environment.outputfilename, (tex.pdfoutput>0 and "pdf") or "dvi")
+ local outputfilename = environment.outputfilename or environment.jobname or tex.jobname or "<unset>"
+ if tex.pdfoutput > 0 then
+ return format("%s.%s, compresslevel %s, objectcompreslevel %s",outputfilename,"pdf",tex.pdfcompresslevel, tex.pdfobjcompresslevel)
+ else
+ return format("%s.%s",outputfilename,"dvi") -- hard to imagine
+ end
end)
diff --git a/Master/texmf-dist/tex/context/base/core-sys.mkiv b/Master/texmf-dist/tex/context/base/core-sys.mkiv
index 66f2a43803f..b65934432de 100644
--- a/Master/texmf-dist/tex/context/base/core-sys.mkiv
+++ b/Master/texmf-dist/tex/context/base/core-sys.mkiv
@@ -36,21 +36,31 @@
%D line ending. I hate this mess.
\edef\operatingsystem {\cldcontext{os.platform}}
-\def \jobfilename {\cldcontext{environment.jobfilename or ""}}
-\def \jobfilesuffix {\cldcontext{environment.jobfilesuffix or ""}}
-\def \inputfilebarename{\cldcontext{environment.inputfilebarename or ""}}
-\def \inputfilesuffix {\cldcontext{environment.inputfilesuffix or ""}}
-\def \inputfilename {\cldcontext{environment.inputfilename or ""}}
-\def \outputfilename {\cldcontext{environment.outputfilename or ""}}
+
+%D The jobname is what gets loaded by the cont-yes stub file. This name
+%D also determines the name of tuc etc files.
+
+\def \jobfilename {\cldcontext{environment.jobfilename or ""}}
+\def \jobfilesuffix {\cldcontext{environment.jobfilesuffix or ""}}
+
+%D However, that one can itself load another file.
+
+\def \inputfilebarename {\cldcontext{environment.inputfilebarename or ""}}
+\def \inputfilerealsuffix{\cldcontext{environment.inputfilerealsuffix or ""}}
+\def \inputfilesuffix {\cldcontext{environment.inputfilesuffix or ""}}
+\def \inputfilename {\cldcontext{environment.inputfilename or ""}}
+
+%D The output name is only used for some checking.
+
+\def \outputfilename {\cldcontext{environment.outputfilename or ""}}
\installcorenamespace{system}
\installdirectcommandhandler \??system {system}
\appendtoks
- \edef\outputresolution{\directsystemparameter\c!resolution}%
- \edef\outputfilename {\directsystemparameter\c!file }%
- \edef\inputfilename {\directsystemparameter\c!inputfile }%
+% \edef\outputfilename {\directsystemparameter\c!file }%
+% \edef\inputfilename {\directsystemparameter\c!inputfile }%
\to \everysetupsystem
\appendtoks
@@ -75,9 +85,9 @@
\setsystemmode{suffix-\m_system_job_suffix}%
\to \everysetupsystem
-\appendtoks
- \ctxcommand{updatefilenames("\inputfilename","\outputfilename")}%
-\to \everysetupsystem
+% \appendtoks
+% \ctxcommand{updatefilenames("\jobame","\inputfilename","\outputfilename")}%
+% \to \everysetupsystem
% Some mechanisms (see x-res-01) use either \jobfilename or
% \jobfilename.somesuffix, in which case we need to use the
@@ -104,6 +114,10 @@
%D \NC \type{\operatingsystem} \NC \operatingsystem \NC \NR
%D \stoptabulate
+\appendtoks
+ \edef\outputresolution{\directsystemparameter\c!resolution}%
+\to \everysetupsystem
+
%D The system modes set by the setup command can be used in
%D situations like:
%D
@@ -138,9 +152,9 @@
[\c!directory=,
\c!n=0, % 0:unknown 1: one run 2: first 3: successive 4: final run
\c!resolution=600,% in dpi, no unit in mkiv
- %c!random=, % obsolete here
- \c!file=\jobname,
- \c!inputfile=\outputfilename,
+ % \c!random=, % obsolete here
+ % \c!file=\jobname,
+ % \c!inputfile=\outputfilename,
\c!type=unix, % windows is normally less sensitive to handle
\c!bodyfont=\normalizedlocalbodyfontsize] % of iets anders
@@ -150,14 +164,34 @@
%D But anyway, in \MKIV\ we avoid most of the complications anyway as we
%D deal with much at the \LUA\ end.
-\def\complexstart[#1]{\bgroup\getvalue{\e!start#1}}
-\def\complexstop [#1]{\getvalue{\e!stop #1}\egroup}
-
-\let\simplestart\bgroup
-\let\simplestop \egroup
-
-\definecomplexorsimple\start
-\definecomplexorsimple\stop
+\unexpanded\def\start
+ {\dosingleempty\syst_start}
+
+\def\syst_start
+ {\bgroup
+ \iffirstargument
+ \expandafter\syst_start_yes
+ \else
+ \expandafter\syst_start_nop
+ \fi}
+
+\def\syst_start_yes[#1]%
+ {\edef\m_syst_start_stop{#1}%
+ \ifx\m_syst_start_stop\empty
+ \let\syst_stop_indeed\donothing
+ \else\ifcsname\e!start\m_syst_start_stop\endcsname
+ \expandafter\let\expandafter\syst_stop_indeed\csname\e!stop\m_syst_start_stop\endcsname
+ \csname\e!start\m_syst_start_stop\expandafter\expandafter\expandafter\endcsname
+ \else
+ \let\syst_stop_indeed\donothing
+ \fi\fi}
+
+\def\syst_start_nop[#1]%
+ {\let\syst_stop_indeed\donothing}
+
+\unexpanded\def\stop
+ {\syst_stop_indeed
+ \egroup}
% \c!before \c!after \c!inbetween \c!commands \c!style \c!color
@@ -194,6 +228,12 @@
\dostoptagged
\startstopparameter\c!inbetween}}
+% \unexpanded\def\ignorestartstop[#1]%
+% {\unexpanded\expandafter\def\csname\e!start#1\expandafter\endcsname\expandafter
+% {\expandafter\gobbleuntil\csname\e!stop#1\endcsname}}
+%
+% \ignorestartstop[bagger]
+
\installcorenamespace{highlight}
\installcommandhandler \??highlight {highlight} \??highlight % we could do with less
diff --git a/Master/texmf-dist/tex/context/base/core-two.lua b/Master/texmf-dist/tex/context/base/core-two.lua
index f94b102bced..d6e006e04dd 100644
--- a/Master/texmf-dist/tex/context/base/core-two.lua
+++ b/Master/texmf-dist/tex/context/base/core-two.lua
@@ -153,5 +153,5 @@ commands.savetwopassdata = jobpasses.save
commands.savetaggedtwopassdata = jobpasses.savetagged
function commands.doifelseintwopassdata(id,str)
- commands.testcase(inlist(id,str))
+ commands.doifelse(inlist(id,str))
end
diff --git a/Master/texmf-dist/tex/context/base/core-uti.lua b/Master/texmf-dist/tex/context/base/core-uti.lua
index 1657a75e26d..96ccdca4809 100644
--- a/Master/texmf-dist/tex/context/base/core-uti.lua
+++ b/Master/texmf-dist/tex/context/base/core-uti.lua
@@ -20,21 +20,23 @@ saves much runtime but at the cost of more memory usage.</p>
local format, match = string.format, string.match
local next, type, tostring = next, type, tostring
local concat = table.concat
+local texcount = tex.count
-local definetable = utilities.tables.definetable
-local accesstable = utilities.tables.accesstable
-local migratetable = utilities.tables.migratetable
-local serialize = table.serialize
-local packers = utilities.packers
-local allocate = utilities.storage.allocate
-local mark = utilities.storage.mark
+local definetable = utilities.tables.definetable
+local accesstable = utilities.tables.accesstable
+local migratetable = utilities.tables.migratetable
+local serialize = table.serialize
+local packers = utilities.packers
+local allocate = utilities.storage.allocate
+local mark = utilities.storage.mark
-local report_jobcontrol = logs.reporter("jobcontrol")
+local report_passes = logs.reporter("job","passes")
-job = job or { }
-local job = job
+job = job or { }
+local job = job
-job.version = 1.19
+job.version = 1.22 -- make sure we don't have old lua 5.1 hash leftovers
+job.packversion = 1.02 -- make sure we don't have old lua 5.1 hash leftovers
-- some day we will implement loading of other jobs and then we need
-- job.jobs
@@ -47,11 +49,11 @@ directly access the variable using a <l n='lua'/> call.</p>
local savelist, comment = { }, { }
-function job.comment(str)
- comment[#comment+1] = str
+function job.comment(key,value)
+ comment[key] = value
end
-job.comment(format("version: %1.2f",job.version))
+job.comment("version",job.version)
local enabled = true
@@ -131,9 +133,10 @@ local packlist = {
-- "references", -- we need to rename of them as only one packs (not structures.lists.references)
}
-local jobpacker = packers.new(packlist,1.01)
+local jobpacker = packers.new(packlist,job.packversion) -- jump number when changs in hash
job.pack = true
+-- job.pack = false
directives.register("job.pack",function(v) pack = v end)
@@ -143,12 +146,8 @@ function job.save(filename) -- we could return a table but it can get pretty lar
statistics.starttiming(_save_)
local f = io.open(filename,'w')
if f then
- for c=1,#comment do
- f:write("-- ",comment[c],"\n")
- end
- f:write("\n")
- f:write("local utilitydata = { }\n")
- f:write("\n")
+ f:write("local utilitydata = { }\n\n")
+ f:write(serialize(comment,"utilitydata.comment",true,true),"\n\n")
for l=1,#savelist do
local list = savelist[l]
local target = format("utilitydata.%s",list[1])
@@ -160,30 +159,35 @@ function job.save(filename) -- we could return a table but it can get pretty lar
if job.pack then
packers.pack(data,jobpacker,true)
end
- f:write(definetable(target),"\n")
- f:write(serialize(data,target,true,true),"\n")
+ local definer, name = definetable(target,true,true) -- no first and no last
+ f:write(definer,"\n\n",serialize(data,name,true,true),"\n\n")
end
if job.pack then
packers.strip(jobpacker)
- f:write(serialize(jobpacker,"utilitydata.job.packed",true,true),"\n")
+ f:write(serialize(jobpacker,"utilitydata.job.packed",true,true),"\n\n")
end
- f:write("\n")
- f:write("return utilitydata\n")
- f:write("\n")
+ f:write("return utilitydata")
f:close()
end
statistics.stoptiming(_save_)
end
local function load(filename)
- local data = io.loaddata(filename)
- if data and data ~= "" then
- local version = tonumber(match(data,"^-- version: ([%d%.]+)"))
- if version ~= job.version then
- report_jobcontrol("version mismatch with jobfile: %s <> %s", version or "?", job.version)
+ if lfs.isfile(filename) then
+ local okay, data = pcall(dofile,filename)
+ if okay and type(data) == "table" then
+ local jobversion = job.version
+ local datacomment = data.comment
+ local dataversion = datacomment and datacomment.version or "?"
+ if dataversion ~= jobversion then
+ report_passes("version mismatch: %s <> %s",dataversion,jobversion)
+ else
+ return data
+ end
else
- local data = loadstring(data)
- return data and data()
+ os.remove(filename) -- probably a bad file
+ report_passes("removing stale job data file %a, restart job",filename)
+ os.exit(true) -- trigger second run
end
end
end
@@ -198,10 +202,14 @@ function job.load(filename)
local target = list[1]
local initializer = list[3]
local result = accesstable(target,utilitydata)
- packers.unpack(result,jobpacker,true)
- migratetable(target,mark(result))
- if type(initializer) == "function" then
- initializer(result)
+ local done = packers.unpack(result,jobpacker,true)
+ if done then
+ migratetable(target,mark(result))
+ if type(initializer) == "function" then
+ initializer(result)
+ end
+ else
+ report_passes("pack version mismatch")
end
end
end
@@ -219,8 +227,10 @@ function job.loadother(filename)
local list = savelist[l]
local target = list[1]
local result = accesstable(target,utilitydata)
- packers.unpack(result,jobpacker,true)
- migratetable(target,result,unpacked)
+ local done = packers.unpack(result,jobpacker,true)
+ if done then
+ migratetable(target,result,unpacked)
+ end
end
unpacked.job.packed = nil -- nicer in inspecting
return unpacked
@@ -252,7 +262,7 @@ end)
statistics.register("callbacks", function()
local total, indirect = status.callbacks or 0, status.indirect_callbacks or 0
- local pages = tex.count['realpageno'] - 1
+ local pages = texcount['realpageno'] - 1
if pages > 1 then
return format("direct: %s, indirect: %s, total: %s (%i per page)", total-indirect, indirect, total, total/pages)
else
@@ -268,8 +278,11 @@ end)
function statistics.formatruntime(runtime)
if not environment.initex then -- else error when testing as not counters yet
- local shipped = tex.count['nofshipouts']
- local pages = tex.count['realpageno'] - 1
+ local shipped = texcount['nofshipouts']
+ local pages = texcount['realpageno']
+ if pages > shipped then
+ pages = shipped
+ end
if shipped > 0 or pages > 0 then
local persecond = shipped / runtime
if pages == 0 then pages = shipped end
diff --git a/Master/texmf-dist/tex/context/base/core-uti.mkiv b/Master/texmf-dist/tex/context/base/core-uti.mkiv
index 9a783f78a35..da4a98fb713 100644
--- a/Master/texmf-dist/tex/context/base/core-uti.mkiv
+++ b/Master/texmf-dist/tex/context/base/core-uti.mkiv
@@ -22,10 +22,10 @@
\appendtoks
\ctxlua {
- job.comment("file: \jobname")
- job.comment("format: \contextformat")
- job.comment("stamp: \contextversion")
- job.comment("escape: \!!bs\space...\space\!!es")
+ job.comment("file","\jobname")
+ job.comment("format","\contextformat")
+ job.comment("stamp","\contextversion")
+ job.comment("escape","\!!bs\space...\space\!!es")
}%
\to \everystarttext
diff --git a/Master/texmf-dist/tex/context/base/core-var.mkiv b/Master/texmf-dist/tex/context/base/core-var.mkiv
deleted file mode 100644
index 26559f2467f..00000000000
--- a/Master/texmf-dist/tex/context/base/core-var.mkiv
+++ /dev/null
@@ -1,239 +0,0 @@
-%D \module
-%D [ file=core-var,
-%D version=1998.02.21,
-%D title=\CONTEXT\ Core Macros,
-%D subtitle=Variables,
-%D author=Hans Hagen,
-%D date=\currentdate,
-%D copyright={PRAGMA ADE \& \CONTEXT\ Development Team}]
-%C
-%C This module is part of the \CONTEXT\ macro||package and is
-%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
-%C details.
-
-\writestatus{loading}{ConTeXt Core Macros / Variables}
-
-\unprotect
-
-%D Much of this will move to *-ini files.
-
-%D We introduce a couple of variables that are used all over
-%D \CONTEXT. Alternatively we could define them in each module
-%D but as they are part of the bigger picture we prefer to do
-%D it here.
-
-%D \macros
-%D {every...}
-%D
-%D A few every's. Some are only used in \MKII\ or \MKIV.
-
-%D Output routine:
-
-\newtoks \everybeforeoutput
-\newtoks \everyafteroutput
-
-%D Shipout:
-
-\newtoks \everyshipout
-\newtoks \everybeforeshipout
-\newtoks \everyaftershipout
-\newtoks \everyfirstshipout
-\newtoks \everylastshipout
-
-%D End of run:
-
-\newtoks \everybye
-\newtoks \everygoodbye
-\newtoks \everynotabene
-
-%D Document
-
-\newtoks \everysetupdocument
-\newtoks \everyendoftextbody
-
-\newtoks \everystarttext
-\newtoks \everystoptext
-
-%D Purity:
-
-\newtoks \everyforgetall
-\newtoks \everycleanupfeatures
-
-\def\cleanupfeatures{\the\everycleanupfeatures}
-\def\forgetall {\the\everyforgetall}
-
-%D Page building:
-
-\newtoks \everybeforepagebody
-\newtoks \everyafterpagebody
-
-\let \everypagebody \everybeforepagebody % backward compatible, will become obsolete
-
-%D Floats:
-
-\newtoks \everyinsidefloat
-
-%D Sectioning:
-
-\newtoks \everyheadstart
-
-%D Par building (experimental, used in xml <p> .. </p>)
-
-\newtoks \everybeginofpar
-\newtoks \everyendofpar
-%newtoks \everyparflush
-
-\unexpanded\def\bpar{\dostarttagged\t!paragraph\empty\the\everybeginofpar\ignorespaces} % may interfere with \everypar
-\unexpanded\def\epar{\ifhmode\removeunwantedspaces\the\everyendofpar\fi\dostoptagged } % test prevents problems with \bpar\epar
-
-%D Lists:
-
-\newtoks \everylistentry
-\newtoks \everysavesortkeys
-
-%D Marks:
-
-\newtoks \everymarking
-
-%D Fonts:
-
-\newtoks \everyfont
-\newtoks \everyglobalbodyfont
-\newtoks \everydefinedfont
-
-\newevery \everybodyfont \EveryBodyFont
-\newevery \everyfontswitch \EveryFontSwitch
-
-\newtoks \everysetupbodyfont
-\newtoks \everyswitchtobodyfont
-
-%D Math:
-
-\newtoks \everybeforedisplayformula
-\newtoks \everymathematics
-
-\prependtoks \the\everymathematics \to \everymath
-\prependtoks \the\everymathematics \to \everydisplay
-
-%D Tables
-
-\newtoks \everytable
-
-%D State mess:
-
-\newtoks \everypushsomestate
-\newtoks \everypopsomestate
-
-\def\pushsomestates{\the\everypushsomestate}
-\def\popsomestates {\the\everypopsomestate }
-
-%D More generic (used to be pushcolor etc)
-
-\newtoks\everystarttextproperties
-\newtoks\everystoptextproperties
-
-\unexpanded\def\starttextproperties{\the\everystarttextproperties}
-\unexpanded\def\stoptextproperties {\the\everystoptextproperties}
-
-%D \macros
-%D {trialtypesetting}
-%D
-%D We disable trial typesetting in the output routine,
-%D just to be sure.
-
-\prependtoks \resettrialtypesetting \to \everybeforepagebody
-
-%D \macros
-%D {ifinpagebody,ifinsidecolumns,ifdoublesided,ifsinglesided}
-
-\newif \ifinpagebody
-\newif \ifinsidecolumns
-\newif \ifdoublesided \doublesidedfalse
-\newif \ifsinglesided \singlesidedtrue
-\newif \ifinsidefloat
-\newif \ifdoingblocks
-\newif \ifgridsnapping
-
-%D \macros
-%D {ifprocessingXML}
-%D
-%D We need this one even if no \XML\ is supported.
-
-% \newif\ifprocessingXML % old way
-
-%D \macros
-%D {ifproductionrun}
-%D
-%D This boolean can be used to bypass certain
-%D initializations.
-
-\newif\ifproductionrun \appendtoks \productionruntrue \to \everydump
-
-%D \macros
-%D {everyboxedcontent, ifboxedcontent,
-%D startboxedcontent, stopboxedcontent}
-%D
-%D This one is relatively new and will be used as a more
-%D robust test for inner situations.
-
-\newif \ifboxedcontent
-\newtoks\everyboxedcontent
-
-\appendtoks \boxedcontenttrue \to \everyboxedcontent
-
-\unexpanded\def\startboxedcontent{\bgroup\the\everyboxedcontent}
-\let\stopboxedcontent \egroup
-
-%D \macros
-%D {fastmode,silentmode}
-%D
-%D These commands are obsolete.
-
-\let\fastmode \relax
-\let\silentmode\relax
-
-%D \macros
-%D {defineselector,setupselector}
-%D
-%D \starttyping
-%D \defineselector[caption][max=2,n=2]
-%D
-%D \start
-%D \setupselector[caption][n=1]
-%D \placelist[figure][criterium=all]
-%D \stop
-%D
-%D \starttext
-%D \placefigure
-%D {\select{caption}{zapf}{\input zapf \relax}}
-%D {}
-%D \stoptext
-%D \stoptyping
-
-\unexpanded\def\defineselector{\dodoubleargument\dodefineselector}
-\unexpanded\def\setupselector {\dodoubleargument\dosetupselector}
-
-\def\dodefineselector[#1][#2]{\getparameters[\??sx#1][\c!max=2,\c!n=1,#2]}
-\def\dosetupselector [#1][#2]{\getparameters[\??sx#1][#2]}
-
-\unexpanded\def\select#1%
- {\filterfromnext
- {\executeifdefined{\??sx#1\c!max}1}
- {\executeifdefined{\??sx#1\c!n }1}}
-
-%D We store some original meanings, maybe in \type
-%D {math-ini}.
-
-\let\normalat \at
-\let\normalin \in
-\let\normalfrom \from
-%let\normalover \over
-\let\normalabout\about
-
-%D Add-ons:
-
-\let\setlayoutcomponentattribute \gobbleoneargument
-\let\resetlayoutcomponentattribute\relax
-\let\layoutcomponentboxattribute \empty
-
-\protect \endinput
diff --git a/Master/texmf-dist/tex/context/base/data-aux.lua b/Master/texmf-dist/tex/context/base/data-aux.lua
index 058033afe09..b969e607055 100644
--- a/Master/texmf-dist/tex/context/base/data-aux.lua
+++ b/Master/texmf-dist/tex/context/base/data-aux.lua
@@ -20,7 +20,7 @@ function resolvers.updatescript(oldname,newname) -- oldname -> own.name, not per
newname = file.addsuffix(newname,"lua")
local oldscript = resolvers.cleanpath(oldname)
if trace_locating then
- report_scripts("to be replaced old script %s", oldscript)
+ report_scripts("to be replaced old script %a", oldscript)
end
local newscripts = resolvers.findfiles(newname) or { }
if #newscripts == 0 then
@@ -31,7 +31,7 @@ function resolvers.updatescript(oldname,newname) -- oldname -> own.name, not per
for i=1,#newscripts do
local newscript = resolvers.cleanpath(newscripts[i])
if trace_locating then
- report_scripts("checking new script %s", newscript)
+ report_scripts("checking new script %a", newscript)
end
if oldscript == newscript then
if trace_locating then
@@ -39,7 +39,7 @@ function resolvers.updatescript(oldname,newname) -- oldname -> own.name, not per
end
elseif not find(newscript,scriptpath) then
if trace_locating then
- report_scripts("new script should come from %s",scriptpath)
+ report_scripts("new script should come from %a",scriptpath)
end
elseif not (find(oldscript,file.removesuffix(newname).."$") or find(oldscript,newname.."$")) then
if trace_locating then
diff --git a/Master/texmf-dist/tex/context/base/data-con.lua b/Master/texmf-dist/tex/context/base/data-con.lua
index ed4f2dec093..354c6807d9f 100644
--- a/Master/texmf-dist/tex/context/base/data-con.lua
+++ b/Master/texmf-dist/tex/context/base/data-con.lua
@@ -31,12 +31,6 @@ containers.usecache = true
local report_containers = logs.reporter("resolvers","containers")
-local function report(container,tag,name)
- if trace_cache or trace_containers then
- report_containers("container: %s, tag: %s, name: %s",container.subcategory,tag,name or 'invalid')
- end
-end
-
local allocated = { }
local mt = {
@@ -99,13 +93,17 @@ function containers.read(container,name)
if not stored and container.enabled and caches and containers.usecache then
stored = caches.loaddata(container.readables,name)
if stored and stored.cache_version == container.version then
- report(container,"loaded",name)
+ if trace_cache or trace_containers then
+ report_containers("action %a, category %a, name %a","load",container.subcategory,name)
+ end
else
stored = nil
end
storage[name] = stored
elseif stored then
- report(container,"reusing",name)
+ if trace_cache or trace_containers then
+ report_containers("action %a, category %a, name %a","reuse",container.subcategory,name)
+ end
end
return stored
end
@@ -117,10 +115,14 @@ function containers.write(container, name, data)
local unique, shared = data.unique, data.shared
data.unique, data.shared = nil, nil
caches.savedata(container.writable, name, data)
- report(container,"saved",name)
+ if trace_cache or trace_containers then
+ report_containers("action %a, category %a, name %a","save",container.subcategory,name)
+ end
data.unique, data.shared = unique, shared
end
- report(container,"stored",name)
+ if trace_cache or trace_containers then
+ report_containers("action %a, category %a, name %a","store",container.subcategory,name)
+ end
container.storage[name] = data
end
return data
diff --git a/Master/texmf-dist/tex/context/base/data-env.lua b/Master/texmf-dist/tex/context/base/data-env.lua
index 2d9787b91bb..2ee25120e9c 100644
--- a/Master/texmf-dist/tex/context/base/data-env.lua
+++ b/Master/texmf-dist/tex/context/base/data-env.lua
@@ -12,7 +12,7 @@ local resolvers = resolvers
local allocate = utilities.storage.allocate
local setmetatableindex = table.setmetatableindex
-local fileextname = file.extname
+local suffixonly = file.suffixonly
local formats = allocate()
local suffixes = allocate()
@@ -24,6 +24,8 @@ resolvers.suffixes = suffixes
resolvers.dangerous = dangerous
resolvers.suffixmap = suffixmap
+local luasuffixes = utilities.lua.suffixes
+
local relations = allocate { -- todo: handlers also here
core = {
ofm = { -- will become obsolete
@@ -109,7 +111,7 @@ local relations = allocate { -- todo: handlers also here
lua = {
names = { "lua" },
variable = 'LUAINPUTS',
- suffixes = { 'lua', 'luc', 'tma', 'tmc' },
+ suffixes = { luasuffixes.lua, luasuffixes.luc, luasuffixes.tma, luasuffixes.tmc },
},
lib = {
names = { "lib" },
@@ -268,7 +270,7 @@ function resolvers.formatofvariable(str)
end
function resolvers.formatofsuffix(str) -- of file
- return suffixmap[fileextname(str)] or 'tex' -- so many map onto tex (like mkiv, cld etc)
+ return suffixmap[suffixonly(str)] or 'tex' -- so many map onto tex (like mkiv, cld etc)
end
function resolvers.variableofformat(str)
@@ -280,7 +282,7 @@ function resolvers.variableofformatorsuffix(str)
if v then
return v
end
- v = suffixmap[fileextname(str)]
+ v = suffixmap[suffixonly(str)]
if v then
return formats[v]
end
diff --git a/Master/texmf-dist/tex/context/base/data-exp.lua b/Master/texmf-dist/tex/context/base/data-exp.lua
index 66bbb56cba5..90659e57e7f 100644
--- a/Master/texmf-dist/tex/context/base/data-exp.lua
+++ b/Master/texmf-dist/tex/context/base/data-exp.lua
@@ -85,7 +85,7 @@ local replacer_1 = lpeg.replacer { { ",}", ",@}" }, { "{,", "{@," }, }
local function splitpathexpr(str, newlist, validate) -- I couldn't resist lpegging it (nice exercise).
if trace_expansions then
- report_expansions("expanding variable '%s'",str)
+ report_expansions("expanding variable %a",str)
end
local t, ok, done = newlist or { }, false, false
local n = #t
@@ -114,12 +114,14 @@ local function splitpathexpr(str, newlist, validate) -- I couldn't resist lpeggi
for s in gmatch(str,"[^,]+") do
s = validate(s)
if s then
- n = n + 1 ; t[n] = s
+ n = n + 1
+ t[n] = s
end
end
else
for s in gmatch(str,"[^,]+") do
- n = n + 1 ; t[n] = s
+ n = n + 1
+ t[n] = s
end
end
if trace_expansions then
@@ -133,7 +135,7 @@ end
-- We could make the previous one public.
local function validate(s)
- s = collapsepath(s) -- already keeps the //
+ s = collapsepath(s) -- already keeps the trailing / and //
return s ~= "" and not find(s,"^!*unset/*$") and s
end
@@ -244,7 +246,7 @@ local function splitconfigurationpath(str) -- beware, this can be either a path
end
end
if trace_expansions then
- report_expansions("splitting path specification '%s'",str)
+ report_expansions("splitting path specification %a",str)
for k=1,noffound do
report_expansions("% 4i: %s",k,found[k])
end
@@ -362,13 +364,13 @@ function resolvers.scanfiles(path,branch,usecache)
local files = fullcache[realpath]
if files then
if trace_locating then
- report_expansions("using caches scan of path '%s', branch '%s'",path,branch or path)
+ report_expansions("using caches scan of path %a, branch %a",path,branch or path)
end
return files
end
end
if trace_locating then
- report_expansions("scanning path '%s', branch '%s'",path,branch or path)
+ report_expansions("scanning path %a, branch %a",path,branch or path)
end
local files, n, m, r = scan({ },realpath .. '/',"",0,0,0)
files.__path__ = path -- can be selfautoparent:texmf-whatever
@@ -434,13 +436,13 @@ function resolvers.simplescanfiles(path,branch,usecache)
end
if files then
if trace_locating then
- report_expansions("using caches scan of path '%s', branch '%s'",path,branch or path)
+ report_expansions("using caches scan of path %a, branch %a",path,branch or path)
end
return files
end
end
if trace_locating then
- report_expansions("scanning path '%s', branch '%s'",path,branch or path)
+ report_expansions("scanning path %a, branch %a",path,branch or path)
end
local files = simplescan({ },realpath .. '/',"")
if trace_locating then
diff --git a/Master/texmf-dist/tex/context/base/data-fil.lua b/Master/texmf-dist/tex/context/base/data-fil.lua
index 6eb29ac3228..09129e03ce8 100644
--- a/Master/texmf-dist/tex/context/base/data-fil.lua
+++ b/Master/texmf-dist/tex/context/base/data-fil.lua
@@ -22,11 +22,11 @@ function locators.file(specification)
local realname = resolvers.resolve(name) -- no shortcut
if realname and realname ~= '' and lfs.isdir(realname) then
if trace_locating then
- report_files("file locator '%s' found as '%s'",name,realname)
+ report_files("file locator %a found as %a",name,realname)
end
resolvers.appendhash('file',name,true) -- cache
elseif trace_locating then
- report_files("file locator '%s' not found",name)
+ report_files("file locator %a not found",name)
end
end
@@ -50,12 +50,12 @@ function finders.file(specification,filetype)
local foundname = resolvers.findfile(filename,filetype)
if foundname and foundname ~= "" then
if trace_locating then
- report_files("file finder: '%s' found",filename)
+ report_files("file finder: %a found",filename)
end
return foundname
else
if trace_locating then
- report_files("file finder: %s' not found",filename)
+ report_files("file finder: %a not found",filename)
end
return finders.notfound()
end
@@ -76,13 +76,13 @@ function openers.file(specification,filetype)
local f = io.open(filename,"r")
if f then
if trace_locating then
- report_files("file opener, '%s' opened",filename)
+ report_files("file opener: %a opened",filename)
end
return openers.helpers.textopener("file",filename,f)
end
end
if trace_locating then
- report_files("file opener, '%s' not found",filename)
+ report_files("file opener: %a not found",filename)
end
return openers.notfound()
end
@@ -94,9 +94,9 @@ function loaders.file(specification,filetype)
if f then
logs.show_load(filename)
if trace_locating then
- report_files("file loader, '%s' loaded",filename)
+ report_files("file loader: %a loaded",filename)
end
- local s = f:read("*a")
+ local s = f:read("*a") -- io.readall(f) is faster but we never have large files here
if checkgarbage then
checkgarbage(#s)
end
@@ -107,7 +107,7 @@ function loaders.file(specification,filetype)
end
end
if trace_locating then
- report_files("file loader, '%s' not found",filename)
+ report_files("file loader: %a not found",filename)
end
return loaders.notfound()
end
diff --git a/Master/texmf-dist/tex/context/base/data-ini.lua b/Master/texmf-dist/tex/context/base/data-ini.lua
index 2c263aada22..201c6a2d766 100644
--- a/Master/texmf-dist/tex/context/base/data-ini.lua
+++ b/Master/texmf-dist/tex/context/base/data-ini.lua
@@ -7,10 +7,9 @@ if not modules then modules = { } end modules ['data-ini'] = {
}
local gsub, find, gmatch, char = string.gsub, string.find, string.gmatch, string.char
-local concat = table.concat
local next, type = next, type
-local filedirname, filebasename, fileextname, filejoin = file.dirname, file.basename, file.extname, file.join
+local filedirname, filebasename, filejoin = file.dirname, file.basename, file.join
local trace_locating = false trackers.register("resolvers.locating", function(v) trace_locating = v end)
local trace_detail = false trackers.register("resolvers.details", function(v) trace_detail = v end)
@@ -33,7 +32,7 @@ local resolvers = resolvers
texconfig.kpse_init = false
texconfig.shell_escape = 't'
-if kpse and kpse.default_texmfcnf then
+if not (environment and environment.default_texmfcnf) and kpse and kpse.default_texmfcnf then
local default_texmfcnf = kpse.default_texmfcnf()
-- looks more like context:
default_texmfcnf = gsub(default_texmfcnf,"$SELFAUTOLOC","selfautoloc:")
@@ -98,6 +97,10 @@ do
local args = environment.originalarguments or arg -- this needs a cleanup
+ if not environment.ownmain then
+ environment.ownmain = status and string.match(string.lower(status.banner),"this is ([%a]+)") or "luatex"
+ end
+
local ownbin = environment.ownbin or args[-2] or arg[-2] or args[-1] or arg[-1] or arg[0] or "luatex"
local ownpath = environment.ownpath or os.selfdir
@@ -132,13 +135,13 @@ do
if lfs.chdir(p) then
local pp = lfs.currentdir()
if trace_locating and p ~= pp then
- report_initialization("following symlink '%s' to '%s'",p,pp)
+ report_initialization("following symlink %a to %a",p,pp)
end
ownpath = pp
lfs.chdir(olddir)
else
if trace_locating then
- report_initialization("unable to check path '%s'",p)
+ report_initialization("unable to check path %a",p)
end
ownpath = p
end
@@ -149,9 +152,9 @@ do
end
if not ownpath or ownpath == "" then
ownpath = "."
- report_initialization("forcing fallback ownpath .")
+ report_initialization("forcing fallback to ownpath %a",ownpath)
elseif trace_locating then
- report_initialization("using ownpath '%s'",ownpath)
+ report_initialization("using ownpath %a",ownpath)
end
end
@@ -214,23 +217,12 @@ end
environment.texroot = file.collapsepath(texroot)
--- Tracing. Todo ...
-
-function resolvers.settrace(n) -- no longer number but: 'locating' or 'detail'
- if n then
- trackers.disable("resolvers.*")
- trackers.enable("resolvers."..n)
- end
+if profiler then
+ directives.register("system.profile",function()
+ profiler.start("luatex-profile.log")
+ end)
end
-resolvers.settrace(osgetenv("MTX_INPUT_TRACE"))
-
--- todo:
-
--- if profiler and osgetenv("MTX_PROFILE_RUN") == "YES" then
--- profiler.start("luatex-profile.log")
--- end
-
-- a forward definition
if not resolvers.resolve then
diff --git a/Master/texmf-dist/tex/context/base/data-lst.lua b/Master/texmf-dist/tex/context/base/data-lst.lua
index c86e58fd82a..8996fa251ed 100644
--- a/Master/texmf-dist/tex/context/base/data-lst.lua
+++ b/Master/texmf-dist/tex/context/base/data-lst.lua
@@ -57,20 +57,21 @@ function resolvers.listers.variables(pattern)
instance.expansions = fastcopy(exp)
end
-function resolvers.listers.configurations(report)
+local report_resolved = logs.reporter("system","resolved")
+
+function resolvers.listers.configurations()
local configurations = resolvers.instance.specification
- local report = report or texio.write_nl
for i=1,#configurations do
- report(format("file : %s",resolvers.resolve(configurations[i])))
+ report_resolved("file : %s",resolvers.resolve(configurations[i]))
end
- report("")
+ report_resolved("")
local list = resolvers.expandedpathfromlist(resolvers.splitpath(resolvers.luacnfspec))
for i=1,#list do
local li = resolvers.resolve(list[i])
if lfs.isdir(li) then
- report(format("path - %s",li))
+ report_resolved("path - %s",li)
else
- report(format("path + %s",li))
+ report_resolved("path + %s",li)
end
end
end
diff --git a/Master/texmf-dist/tex/context/base/data-lua.lua b/Master/texmf-dist/tex/context/base/data-lua.lua
index 906a611ee16..7e3d92585ab 100644
--- a/Master/texmf-dist/tex/context/base/data-lua.lua
+++ b/Master/texmf-dist/tex/context/base/data-lua.lua
@@ -6,167 +6,154 @@ if not modules then modules = { } end modules ['data-lua'] = {
license = "see context related readme files"
}
--- some loading stuff ... we might move this one to slot 2 depending
--- on the developments (the loaders must not trigger kpse); we could
--- of course use a more extensive lib path spec
+-- This is now a plug in into l-lua (as we also use the extra paths elsewhere).
-local trace_locating = false trackers.register("resolvers.locating", function(v) trace_locating = v end)
+local resolvers, package = resolvers, package
-local report_libraries = logs.reporter("resolvers","libraries")
+local gsub = string.gsub
+local concat = table.concat
+local addsuffix = file.addsuffix
-local gsub, insert = string.gsub, table.insert
-local unpack = unpack or table.unpack
+local P, S, Cs, lpegmatch = lpeg.P, lpeg.S, lpeg.Cs, lpeg.match
-local resolvers, package = resolvers, package
+local libsuffixes = { 'tex', 'lua' }
+local clibsuffixes = { 'lib' }
+local libformats = { 'TEXINPUTS', 'LUAINPUTS' }
+local clibformats = { 'CLUAINPUTS' }
+local helpers = package.helpers
-local libformats = { 'luatexlibs', 'tex', 'texmfscripts', 'othertextfiles' } -- 'luainputs'
-local clibformats = { 'lib' }
+trackers.register("resolvers.libraries", function(v) helpers.trace = v end)
+trackers.register("resolvers.locating", function(v) helpers.trace = v end)
-local _path_, libpaths, _cpath_, clibpaths
+helpers.report = logs.reporter("resolvers","libraries")
-function package.libpaths()
- if not _path_ or package.path ~= _path_ then
- _path_ = package.path
- libpaths = file.splitpath(_path_,";")
- end
- return libpaths
-end
+local pattern = Cs(P("!")^0 / "" * (P("/") * P(-1) / "/" + P("/")^1 / "/" + 1)^0)
-function package.clibpaths()
- if not _cpath_ or package.cpath ~= _cpath_ then
- _cpath_ = package.cpath
- clibpaths = file.splitpath(_cpath_,";")
- end
- return clibpaths
+local function cleanpath(path) -- hm, don't we have a helper for this?
+ return resolvers.resolve(lpegmatch(pattern,path))
end
-local function thepath(...)
- local t = { ... } t[#t+1] = "?.lua"
- local path = file.join(unpack(t))
- if trace_locating then
- report_libraries("! appending '%s' to 'package.path'",path)
- end
- return path
-end
+helpers.cleanpath = cleanpath
-local p_libpaths, a_libpaths = { }, { }
+local loadedaslib = helpers.loadedaslib
+local loadedbylua = helpers.loadedbylua
+local loadedbypath = helpers.loadedbypath
+local notloaded = helpers.notloaded
-function package.appendtolibpath(...)
- insert(a_libpath,thepath(...))
-end
+local getlibpaths = package.libpaths
+local getclibpaths = package.clibpaths
-function package.prependtolibpath(...)
- insert(p_libpaths,1,thepath(...))
+function helpers.libpaths(libhash)
+ local libpaths = { }
+ for i=1,#libformats do
+ local paths = resolvers.expandedpathlistfromvariable(libformats[i])
+ for i=1,#paths do
+ local path = cleanpath(paths[i])
+ if not libhash[path] then
+ libpaths[#libpaths+1] = path
+ libhash[path] = true
+ end
+ end
+ end
+ return libpaths
end
--- beware, we need to return a loadfile result !
-
-local function loaded(libpaths,name,simple)
- for i=1,#libpaths do -- package.path, might become option
- local libpath = libpaths[i]
- local resolved = gsub(libpath,"%?",simple)
- if trace_locating then -- more detail
- report_libraries("! checking for '%s' on 'package.path': '%s' => '%s'",simple,libpath,resolved)
- end
- if file.is_readable(resolved) then
- if trace_locating then
- report_libraries("! lib '%s' located via 'package.path': '%s'",name,resolved)
+function helpers.clibpaths(clibhash)
+ local clibpaths = { }
+ for i=1,#clibformats do
+ local paths = resolvers.expandedpathlistfromvariable(clibformats[i])
+ for i=1,#paths do
+ local path = cleanpath(paths[i])
+ if not clibhash[path] then
+ clibpaths[#clibpaths+1] = path
+ clibhash[path] = true
end
- return loadfile(resolved)
end
end
+ return clibpaths
end
-package.loaders[2] = function(name) -- was [#package.loaders+1]
- if file.suffix(name) == "" then
- name = file.addsuffix(name,"lua") -- maybe a list
- if trace_locating then -- mode detail
- report_libraries("! locating '%s' with forced suffix",name)
- end
- else
- if trace_locating then -- mode detail
- report_libraries("! locating '%s'",name)
- end
+local function loadedbyformat(name,rawname,suffixes,islib)
+ local trace = helpers.trace
+ local report = helpers.report
+ if trace then
+ report("locating %a as %a using formats %a",rawname,name,suffixes)
end
- for i=1,#libformats do
- local format = libformats[i]
+ for i=1,#suffixes do -- so we use findfile and not a lookup loop
+ local format = suffixes[i]
local resolved = resolvers.findfile(name,format) or ""
- if trace_locating then -- mode detail
- report_libraries("! checking for '%s' using 'libformat path': '%s'",name,format)
+ if trace then
+ report("checking %a using format %a",name,format)
end
if resolved ~= "" then
- if trace_locating then
- report_libraries("! lib '%s' located via environment: '%s'",name,resolved)
+ if trace then
+ report("lib %a located on %a",name,resolved)
+ end
+ if islib then
+ return true, loadedaslib(resolved,rawname)
+ else
+ return true, loadfile(resolved)
end
- return loadfile(resolved)
end
end
- -- libpaths
- local libpaths, clibpaths = package.libpaths(), package.clibpaths()
- local simple = gsub(name,"%.lua$","")
- local simple = gsub(simple,"%.","/")
- local resolved = loaded(p_libpaths,name,simple) or loaded(libpaths,name,simple) or loaded(a_libpaths,name,simple)
- if resolved then
- return resolved
+end
+
+helpers.loadedbyformat = loadedbyformat
+
+-- alternatively we could set the package.searchers
+
+local pattern = Cs((((1-S("\\/"))^0 * (S("\\/")^1/"/"))^0 * (P(".")^1/"/"+P(1))^1) * -1)
+
+local function lualibfile(name)
+ return lpegmatch(pattern,name) or name
+end
+
+helpers.lualibfile = lualibfile
+
+-- print(lualibfile("bar"))
+-- print(lualibfile("foo.bar"))
+-- print(lualibfile("crap/foo...bar"))
+-- print(lualibfile("crap//foo.bar"))
+-- print(lualibfile("crap/../foo.bar"))
+-- print(lualibfile("crap/.././foo.bar"))
+
+-- alternatively we could split in path and base and temporary set the libpath to path
+
+function helpers.loaded(name)
+ local thename = lualibfile(name)
+ local luaname = addsuffix(thename,"lua")
+ local libname = addsuffix(thename,os.libsuffix)
+ local libpaths = getlibpaths()
+ local clibpaths = getclibpaths()
+ local done, result = loadedbyformat(luaname,name,libsuffixes,false)
+ if done then
+ return result
end
- --
- local libname = file.addsuffix(simple,os.libsuffix)
- for i=1,#clibformats do
- -- better have a dedicated loop
- local format = clibformats[i]
- local paths = resolvers.expandedpathlistfromvariable(format)
- for p=1,#paths do
- local path = paths[p]
- local resolved = file.join(path,libname)
- if trace_locating then -- mode detail
- report_libraries("! checking for '%s' using 'clibformat path': '%s'",libname,path)
- end
- if file.is_readable(resolved) then
- if trace_locating then
- report_libraries("! lib '%s' located via 'clibformat': '%s'",libname,resolved)
- end
- return package.loadlib(resolved,name)
- end
- end
+ local done, result = loadedbyformat(libname,name,clibsuffixes,true)
+ if done then
+ return result
end
- for i=1,#clibpaths do -- package.path, might become option
- local libpath = clibpaths[i]
- local resolved = gsub(libpath,"?",simple)
- if trace_locating then -- more detail
- report_libraries("! checking for '%s' on 'package.cpath': '%s'",simple,libpath)
- end
- if file.is_readable(resolved) then
- if trace_locating then
- report_libraries("! lib '%s' located via 'package.cpath': '%s'",name,resolved)
- end
- return package.loadlib(resolved,name)
- end
+ local done, result = loadedbypath(luaname,name,libpaths,false,"lua")
+ if done then
+ return result
end
- -- just in case the distribution is messed up
- if trace_loading then -- more detail
- report_libraries("! checking for '%s' using 'luatexlibs': '%s'",name)
+ local done, result = loadedbypath(luaname,name,clibpaths,false,"lua")
+ if done then
+ return result
end
- local resolved = resolvers.findfile(file.basename(name),'luatexlibs') or ""
- if resolved ~= "" then
- if trace_locating then
- report_libraries("! lib '%s' located by basename via environment: '%s'",name,resolved)
- end
- return loadfile(resolved)
+ local done, result = loadedbypath(libname,name,clibpaths,true,"lib")
+ if done then
+ return result
end
- if trace_locating then
- report_libraries('? unable to locate lib: %s',name)
+ local done, result = loadedbylua(name)
+ if done then
+ return result
end
--- return "unable to locate " .. name
+ return notloaded(name)
end
-resolvers.loadlualib = require
-
--- -- -- --
+-- package.searchers[3] = nil -- get rid of the built in one (done in l-lua)
-package.obsolete = package.obsolete or { }
+-- package.extraclibpath(environment.ownpath)
-package.append_libpath = appendtolibpath -- will become obsolete
-package.prepend_libpath = prependtolibpath -- will become obsolete
-
-package.obsolete.append_libpath = appendtolibpath -- will become obsolete
-package.obsolete.prepend_libpath = prependtolibpath -- will become obsolete
+resolvers.loadlualib = require
diff --git a/Master/texmf-dist/tex/context/base/data-met.lua b/Master/texmf-dist/tex/context/base/data-met.lua
index 5437d9a294f..28c73e46037 100644
--- a/Master/texmf-dist/tex/context/base/data-met.lua
+++ b/Master/texmf-dist/tex/context/base/data-met.lua
@@ -59,41 +59,41 @@ local function methodhandler(what,first,...) -- filename can be nil or false
local resolver = namespace and namespace[scheme]
if resolver then
if trace_methods then
- report_methods("resolver: method=%s, how=%s, scheme=%s, argument=%s",what,how,scheme,first)
+ report_methods("resolving, method %a, how %a, handler %a, argument %a",what,how,scheme,first)
end
return resolver(specification,...)
else
resolver = namespace.default or namespace.file
if resolver then
if trace_methods then
- report_methods("resolver: method=%s, how=%s, default, argument=%s",what,how,first)
+ report_methods("resolving, method %a, how %a, handler %a, argument %a",what,how,"default",first)
end
return resolver(specification,...)
elseif trace_methods then
- report_methods("resolver: method=%s, how=%s, no handler",what,how)
+ report_methods("resolving, method %a, how %a, handler %a, argument %a",what,how,"unset")
end
end
elseif how == "tag" then
local resolver = namespace and namespace[first]
if resolver then
if trace_methods then
- report_methods("resolver: method=%s, how=%s, tag=%s",what,how,first)
+ report_methods("resolving, method %a, how %a, tag %a",what,how,first)
end
return resolver(...)
else
resolver = namespace.default or namespace.file
if resolver then
if trace_methods then
- report_methods("resolver: method=%s, how=%s, default",what,how)
+ report_methods("resolving, method %a, how %a, tag %a",what,how,"default")
end
return resolver(...)
elseif trace_methods then
- report_methods("resolver: method=%s, how=%s, unknown",what,how)
+ report_methods("resolving, method %a, how %a, tag %a",what,how,"unset")
end
end
end
else
- report_methods("resolver: method=%s, unknown",what)
+ report_methods("resolving, invalid method %a")
end
end
diff --git a/Master/texmf-dist/tex/context/base/data-pre.lua b/Master/texmf-dist/tex/context/base/data-pre.lua
index 5b25c5f874d..e48a5aa8cee 100644
--- a/Master/texmf-dist/tex/context/base/data-pre.lua
+++ b/Master/texmf-dist/tex/context/base/data-pre.lua
@@ -19,10 +19,11 @@ local resolvers = resolvers
local prefixes = utilities.storage.allocate()
resolvers.prefixes = prefixes
-local gsub = string.gsub
local cleanpath, findgivenfile, expansion = resolvers.cleanpath, resolvers.findgivenfile, resolvers.expansion
local getenv = resolvers.getenv -- we can probably also use resolvers.expansion
-local P, Cs, lpegmatch = lpeg.P, lpeg.Cs, lpeg.match
+local P, S, R, C, Cs, lpegmatch = lpeg.P, lpeg.S, lpeg.R, lpeg.C, lpeg.Cs, lpeg.match
+local joinpath, basename, dirname = file.join, file.basename, file.dirname
+local getmetatable, rawset, type = getmetatable, rawset, type
-- getenv = function(...) return resolvers.getenv(...) end -- needs checking (definitions changes later on)
@@ -64,28 +65,47 @@ end
prefixes.filename = function(str)
local fullname = findgivenfile(str) or ""
- return cleanpath(file.basename((fullname ~= "" and fullname) or str)) -- no cleanpath needed here
+ return cleanpath(basename((fullname ~= "" and fullname) or str)) -- no cleanpath needed here
end
prefixes.pathname = function(str)
local fullname = findgivenfile(str) or ""
- return cleanpath(file.dirname((fullname ~= "" and fullname) or str))
+ return cleanpath(dirname((fullname ~= "" and fullname) or str))
end
prefixes.selfautoloc = function(str)
- return cleanpath(file.join(getenv('SELFAUTOLOC'),str))
+ return cleanpath(joinpath(getenv('SELFAUTOLOC'),str))
end
prefixes.selfautoparent = function(str)
- return cleanpath(file.join(getenv('SELFAUTOPARENT'),str))
+ return cleanpath(joinpath(getenv('SELFAUTOPARENT'),str))
end
prefixes.selfautodir = function(str)
- return cleanpath(file.join(getenv('SELFAUTODIR'),str))
+ return cleanpath(joinpath(getenv('SELFAUTODIR'),str))
end
prefixes.home = function(str)
- return cleanpath(file.join(getenv('HOME'),str))
+ return cleanpath(joinpath(getenv('HOME'),str))
+end
+
+local function toppath()
+ local inputstack = resolvers.inputstack -- dependency, actually the code should move but it's
+ if not inputstack then -- more convenient to keep it here
+ return "."
+ end
+ local pathname = dirname(inputstack[#inputstack] or "")
+ if pathname == "" then
+ return "."
+ else
+ return pathname
+ end
+end
+
+resolvers.toppath = toppath
+
+prefixes.toppath = function(str)
+ return cleanpath(joinpath(toppath(),str))
end
prefixes.env = prefixes.environment
@@ -121,6 +141,30 @@ function resolvers.resetresolve(str)
resolved, abstract = { }, { }
end
+-- todo: use an lpeg (see data-lua for !! / stripper)
+
+-- local function resolve(str) -- use schemes, this one is then for the commandline only
+-- if type(str) == "table" then
+-- local t = { }
+-- for i=1,#str do
+-- t[i] = resolve(str[i])
+-- end
+-- return t
+-- else
+-- local res = resolved[str]
+-- if not res then
+-- res = gsub(str,"([a-z][a-z]+):([^ \"\';,]*)",_resolve_) -- home:xx;selfautoparent:xx; etc (comma added)
+-- resolved[str] = res
+-- abstract[res] = str
+-- end
+-- return res
+-- end
+-- end
+
+-- home:xx;selfautoparent:xx;
+
+local pattern = Cs((C(R("az")^2) * P(":") * C((1-S(" \"\';,"))^1) / _resolve_ + P(1))^0)
+
local function resolve(str) -- use schemes, this one is then for the commandline only
if type(str) == "table" then
local t = { }
@@ -131,7 +175,7 @@ local function resolve(str) -- use schemes, this one is then for the commandline
else
local res = resolved[str]
if not res then
- res = gsub(str,"([a-z][a-z]+):([^ \"\';,]*)",_resolve_) -- home:xx;selfautoparent:xx; etc (comma added)
+ res = lpegmatch(pattern,str)
resolved[str] = res
abstract[res] = str
end
@@ -146,7 +190,7 @@ end
resolvers.resolve = resolve
resolvers.unresolve = unresolve
-if os.uname then
+if type(os.uname) == "function" then
for k, v in next, os.uname() do
if not prefixes[k] then
@@ -158,11 +202,17 @@ end
if os.type == "unix" then
+ -- We need to distringuish between a prefix and something else : so we
+ -- have a special repath variant for linux. Also, when a new prefix is
+ -- defined, we need to remake the matcher.
+
local pattern
local function makepattern(t,k,v)
+ if t then
+ rawset(t,k,v)
+ end
local colon = P(":")
- local p
for k, v in table.sortedpairs(prefixes) do
if p then
p = P(k) + p
@@ -171,9 +221,6 @@ if os.type == "unix" then
end
end
pattern = Cs((p * colon + colon/";" + P(1))^0)
- if t then
- t[k] = v
- end
end
makepattern()
diff --git a/Master/texmf-dist/tex/context/base/data-res.lua b/Master/texmf-dist/tex/context/base/data-res.lua
index 7206c0d7824..f7e238a11a3 100644
--- a/Master/texmf-dist/tex/context/base/data-res.lua
+++ b/Master/texmf-dist/tex/context/base/data-res.lua
@@ -12,9 +12,12 @@ if not modules then modules = { } end modules ['data-res'] = {
-- instance but for practical purposes we now avoid this and use a
-- instance variable. We always have one instance active (sort of global).
+-- I will reimplement this module ... way too fuzzy now and we can work
+-- with some sensible constraints as it is only is used for context.
+
-- todo: cache:/// home:/// selfautoparent:/// (sometime end 2012)
-local format, gsub, find, lower, upper, match, gmatch = string.format, string.gsub, string.find, string.lower, string.upper, string.match, string.gmatch
+local gsub, find, lower, upper, match, gmatch = string.gsub, string.find, string.lower, string.upper, string.match, string.gmatch
local concat, insert, sortedkeys = table.concat, table.insert, table.sortedkeys
local next, type, rawget = next, type, rawget
local os = os
@@ -22,14 +25,17 @@ local os = os
local P, S, R, C, Cc, Cs, Ct, Carg = lpeg.P, lpeg.S, lpeg.R, lpeg.C, lpeg.Cc, lpeg.Cs, lpeg.Ct, lpeg.Carg
local lpegmatch, lpegpatterns = lpeg.match, lpeg.patterns
+local formatters = string.formatters
local filedirname = file.dirname
local filebasename = file.basename
-local fileextname = file.extname
+local suffixonly = file.suffixonly
local filejoin = file.join
local collapsepath = file.collapsepath
local joinpath = file.joinpath
local allocate = utilities.storage.allocate
+local settings_to_array = utilities.parsers.settings_to_array
local setmetatableindex = table.setmetatableindex
+local luasuffixes = utilities.lua.suffixes
local trace_locating = false trackers.register("resolvers.locating", function(v) trace_locating = v end)
local trace_detail = false trackers.register("resolvers.details", function(v) trace_detail = v end)
@@ -52,7 +58,7 @@ resolvers.cacheversion = '1.0.1'
resolvers.configbanner = ''
resolvers.homedir = environment.homedir
resolvers.criticalvars = allocate { "SELFAUTOLOC", "SELFAUTODIR", "SELFAUTOPARENT", "TEXMFCNF", "TEXMF", "TEXOS" }
-resolvers.luacnfname = 'texmfcnf.lua'
+resolvers.luacnfname = "texmfcnf.lua"
resolvers.luacnfstate = "unknown"
-- The web2c tex binaries as well as kpse have built in paths for the configuration
@@ -284,11 +290,12 @@ end
local slash = P("/")
-local pathexpressionpattern = Cs (
+local pathexpressionpattern = Cs ( -- create lpeg instead (2013/2014)
Cc("^") * (
Cc("%") * S(".-")
+ slash^2 * P(-1) / "/.*"
- + slash^2 / "/.-/"
+-- + slash^2 / "/.-/"
+ + slash^2 / "/[^/]*/*"
+ (1-slash) * P(-1) * Cc("/")
+ P(1)
)^1 * Cc("$") -- yes or no $
@@ -314,15 +321,11 @@ local function reportcriticalvariables(cnfspec)
for i=1,#resolvers.criticalvars do
local k = resolvers.criticalvars[i]
local v = resolvers.getenv(k) or "unknown" -- this one will not resolve !
- report_resolving("variable '%s' set to '%s'",k,v)
+ report_resolving("variable %a set to %a",k,v)
end
report_resolving()
if cnfspec then
- if type(cnfspec) == "table" then
- report_resolving("using configuration specification '%s'",concat(cnfspec,","))
- else
- report_resolving("using configuration specification '%s'",cnfspec)
- end
+ report_resolving("using configuration specification %a",type(cnfspec) == "table" and concat(cnfspec,",") or cnfspec)
end
report_resolving()
end
@@ -332,7 +335,7 @@ end
local function identify_configuration_files()
local specification = instance.specification
if #specification == 0 then
- local cnfspec = getenv('TEXMFCNF')
+ local cnfspec = getenv("TEXMFCNF")
if cnfspec == "" then
cnfspec = resolvers.luacnfspec
resolvers.luacnfstate = "default"
@@ -348,10 +351,10 @@ local function identify_configuration_files()
if lfs.isfile(realname) then
specification[#specification+1] = filename
if trace_locating then
- report_resolving("found configuration file '%s'",realname)
+ report_resolving("found configuration file %a",realname)
end
elseif trace_locating then
- report_resolving("unknown configuration file '%s'",realname)
+ report_resolving("unknown configuration file %a",realname)
end
end
if trace_locating then
@@ -383,7 +386,7 @@ local function load_configuration_files()
if blob then
local parentdata = blob()
if parentdata then
- report_resolving("loading configuration file '%s'",filename)
+ report_resolving("loading configuration file %a",filename)
data = table.merged(parentdata,data)
end
end
@@ -391,7 +394,7 @@ local function load_configuration_files()
data = data and data.content
if data then
if trace_locating then
- report_resolving("loading configuration file '%s'",filename)
+ report_resolving("loading configuration file %a",filename)
report_resolving()
end
local variables = data.variables or { }
@@ -402,7 +405,7 @@ local function load_configuration_files()
initializesetter(filename,k,v)
elseif variables[k] == nil then
if trace_locating and not warning then
- report_resolving("variables like '%s' in configuration file '%s' should move to the 'variables' subtable",
+ report_resolving("variables like %a in configuration file %a should move to the 'variables' subtable",
k,resolvers.resolve(filename))
warning = true
end
@@ -420,7 +423,7 @@ local function load_configuration_files()
-- we push the value into the main environment (osenv) so
-- that it takes precedence over the default one and therefore
-- also over following definitions
- resolvers.setenv('TEXMFCNF',cnfspec) -- resolves prefixes
+ resolvers.setenv("TEXMFCNF",cnfspec) -- resolves prefixes
-- we now identify and load the specified configuration files
instance.specification = { }
identify_configuration_files()
@@ -434,13 +437,13 @@ local function load_configuration_files()
else
if trace_locating then
- report_resolving("skipping configuration file '%s' (no content)",filename)
+ report_resolving("skipping configuration file %a (no content)",filename)
end
setups[pathname] = { }
instance.loaderror = true
end
elseif trace_locating then
- report_resolving("skipping configuration file '%s' (no valid format)",filename)
+ report_resolving("skipping configuration file %a (no valid format)",filename)
end
instance.order[#instance.order+1] = instance.setups[pathname]
if instance.loaderror then
@@ -468,10 +471,11 @@ end
local function locate_file_databases()
-- todo: cache:// and tree:// (runtime)
- local texmfpaths = resolvers.expandedpathlist('TEXMF')
+ local texmfpaths = resolvers.expandedpathlist("TEXMF")
if #texmfpaths > 0 then
for i=1,#texmfpaths do
local path = collapsepath(texmfpaths[i])
+ path = gsub(path,"/+$","") -- in case $HOME expands to something with a trailing /
local stripped = lpegmatch(inhibitstripper,path) -- the !! thing
if stripped ~= "" then
local runtime = stripped == path
@@ -484,9 +488,9 @@ local function locate_file_databases()
end
if trace_locating then
if runtime then
- report_resolving("locating list of '%s' (runtime) (%s)",path,stripped)
+ report_resolving("locating list of %a (runtime) (%s)",path,stripped)
else
- report_resolving("locating list of '%s' (cached)",path)
+ report_resolving("locating list of %a (cached)",path)
end
end
methodhandler('locators',stripped)
@@ -519,11 +523,11 @@ local function save_file_databases() -- will become cachers
local content = instance.files[cachename]
caches.collapsecontent(content)
if trace_locating then
- report_resolving("saving tree '%s'",cachename)
+ report_resolving("saving tree %a",cachename)
end
caches.savecontent(cachename,"files",content)
elseif trace_locating then
- report_resolving("not saving runtime tree '%s'",cachename)
+ report_resolving("not saving runtime tree %a",cachename)
end
end
end
@@ -533,30 +537,30 @@ function resolvers.renew(hashname)
local expanded = resolvers.expansion(hashname) or ""
if expanded ~= "" then
if trace_locating then
- report_resolving("identifying tree '%s' from '%s'",expanded,hashname)
+ report_resolving("identifying tree %a from %a",expanded,hashname)
end
hashname = expanded
else
if trace_locating then
- report_resolving("identifying tree '%s'",hashname)
+ report_resolving("identifying tree %a",hashname)
end
end
local realpath = resolvers.resolve(hashname)
if lfs.isdir(realpath) then
if trace_locating then
- report_resolving("using path '%s'",realpath)
+ report_resolving("using path %a",realpath)
end
methodhandler('generators',hashname)
-- could be shared
local content = instance.files[hashname]
caches.collapsecontent(content)
if trace_locating then
- report_resolving("saving tree '%s'",hashname)
+ report_resolving("saving tree %a",hashname)
end
caches.savecontent(hashname,"files",content)
-- till here
else
- report_resolving("invalid path '%s'",realpath)
+ report_resolving("invalid path %a",realpath)
end
end
end
@@ -581,7 +585,7 @@ function resolvers.appendhash(type,name,cache)
-- safeguard ... tricky as it's actually a bug when seen twice
if not instance.hashed[name] then
if trace_locating then
- report_resolving("hash '%s' appended",name)
+ report_resolving("hash %a appended",name)
end
insert(instance.hashes, { type = type, name = name, cache = cache } )
instance.hashed[name] = cache
@@ -592,7 +596,7 @@ function resolvers.prependhash(type,name,cache)
-- safeguard ... tricky as it's actually a bug when seen twice
if not instance.hashed[name] then
if trace_locating then
- report_resolving("hash '%s' prepended",name)
+ report_resolving("hash %a prepended",name)
end
insert(instance.hashes, 1, { type = type, name = name, cache = cache } )
instance.hashed[name] = cache
@@ -600,9 +604,9 @@ function resolvers.prependhash(type,name,cache)
end
function resolvers.extendtexmfvariable(specification) -- crap, we could better prepend the hash
- local t = resolvers.splitpath(getenv('TEXMF'))
+ local t = resolvers.splitpath(getenv("TEXMF")) -- okay?
insert(t,1,specification)
- local newspec = concat(t,";")
+ local newspec = concat(t,",") -- not ;
if instance.environment["TEXMF"] then
instance.environment["TEXMF"] = newspec
elseif instance.variables["TEXMF"] then
@@ -677,14 +681,19 @@ function resolvers.resetextrapath()
end
function resolvers.registerextrapath(paths,subpaths)
+ paths = settings_to_array(paths)
+ subpaths = settings_to_array(subpaths)
local ep = instance.extra_paths or { }
local oldn = #ep
local newn = oldn
- if paths and paths ~= "" then
- if subpaths and subpaths ~= "" then
- for p in gmatch(paths,"[^,]+") do
- -- we gmatch each step again, not that fast, but used seldom
- for s in gmatch(subpaths,"[^,]+") do
+ local nofpaths = #paths
+ local nofsubpaths = #subpaths
+ if nofpaths > 0 then
+ if nofsubpaths > 0 then
+ for i=1,nofpaths do
+ local p = paths[i]
+ for j=1,nofsubpaths do
+ local s = subpaths[j]
local ps = p .. "/" .. s
if not done[ps] then
newn = newn + 1
@@ -694,7 +703,8 @@ function resolvers.registerextrapath(paths,subpaths)
end
end
else
- for p in gmatch(paths,"[^,]+") do
+ for i=1,nofpaths do
+ local p = paths[i]
if not done[p] then
newn = newn + 1
ep[newn] = resolvers.cleanpath(p)
@@ -702,10 +712,10 @@ function resolvers.registerextrapath(paths,subpaths)
end
end
end
- elseif subpaths and subpaths ~= "" then
+ elseif nofsubpaths > 0 then
for i=1,oldn do
- -- we gmatch each step again, not that fast, but used seldom
- for s in gmatch(subpaths,"[^,]+") do
+ for j=1,nofsubpaths do
+ local s = subpaths[j]
local ps = ep[i] .. "/" .. s
if not done[ps] then
newn = newn + 1
@@ -783,18 +793,21 @@ function resolvers.expandedpathlist(str)
return { }
elseif instance.savelists then
str = lpegmatch(dollarstripper,str)
- if not instance.lists[str] then -- cached
- local lst = made_list(instance,resolvers.splitpath(resolvers.expansion(str)))
- instance.lists[str] = expandedpathfromlist(lst)
- end
- return instance.lists[str]
+ local lists = instance.lists
+ local lst = lists[str]
+ if not lst then
+ local l = made_list(instance,resolvers.splitpath(resolvers.expansion(str)))
+ lst = expandedpathfromlist(l)
+ lists[str] = lst
+ end
+ return lst
else
local lst = resolvers.splitpath(resolvers.expansion(str))
return made_list(instance,expandedpathfromlist(lst))
end
end
-function resolvers.expandedpathlistfromvariable(str) -- brrr
+function resolvers.expandedpathlistfromvariable(str) -- brrr / could also have cleaner ^!! /$ //
str = lpegmatch(dollarstripper,str)
local tmp = resolvers.variableofformatorsuffix(str)
return resolvers.expandedpathlist(tmp ~= "" and tmp or str)
@@ -828,9 +841,9 @@ local function isreadable(name)
local readable = lfs.isfile(name) -- not file.is_readable(name) asit can be a dir
if trace_detail then
if readable then
- report_resolving("file '%s' is readable",name)
+ report_resolving("file %a is readable",name)
else
- report_resolving("file '%s' is not readable", name)
+ report_resolving("file %a is not readable", name)
end
end
return readable
@@ -844,14 +857,14 @@ local function collect_files(names)
for k=1,#names do
local fname = names[k]
if trace_detail then
- report_resolving("checking name '%s'",fname)
+ report_resolving("checking name %a",fname)
end
local bname = filebasename(fname)
local dname = filedirname(fname)
if dname == "" or find(dname,"^%.") then
dname = false
else
-dname = gsub(dname,"*","%.*")
+ dname = gsub(dname,"%*",".*")
dname = "/" .. dname .. "$"
end
local hashes = instance.hashes
@@ -861,7 +874,7 @@ dname = gsub(dname,"*","%.*")
local files = blobpath and instance.files[blobpath]
if files then
if trace_detail then
- report_resolving("deep checking '%s' (%s)",blobpath,bname)
+ report_resolving("deep checking %a, base %a, pattern %a",blobpath,bname,dname)
end
local blobfile = files[bname]
if not blobfile then
@@ -881,7 +894,7 @@ dname = gsub(dname,"*","%.*")
local search = filejoin(blobroot,blobfile,bname)
local result = methodhandler('concatinators',hash.type,blobroot,blobfile,bname)
if trace_detail then
- report_resolving("match: variant '%s', search '%s', result '%s'",variant,search,result)
+ report_resolving("match: variant %a, search %a, result %a",variant,search,result)
end
noffiles = noffiles + 1
filelist[noffiles] = { variant, search, result }
@@ -895,7 +908,7 @@ dname = gsub(dname,"*","%.*")
local search = filejoin(blobroot,vv,bname)
local result = methodhandler('concatinators',hash.type,blobroot,vv,bname)
if trace_detail then
- report_resolving("match: variant '%s', search '%s', result '%s'",variant,search,result)
+ report_resolving("match: variant %a, search %a, result %a",variant,search,result)
end
noffiles = noffiles + 1
filelist[noffiles] = { variant, search, result }
@@ -904,7 +917,7 @@ dname = gsub(dname,"*","%.*")
end
end
elseif trace_locating then
- report_resolving("no match in '%s' (%s)",blobpath,bname)
+ report_resolving("no match in %a (%s)",blobpath,bname)
end
end
end
@@ -951,7 +964,7 @@ local preparetreepattern = Cs((P(".")/"%%." + P("-")/"%%-" + P(1))^0 * Cc("$"))
local collect_instance_files
local function find_analyze(filename,askedformat,allresults)
- local filetype, wantedfiles, ext = '', { }, fileextname(filename)
+ local filetype, wantedfiles, ext = '', { }, suffixonly(filename)
-- too tricky as filename can be bla.1.2.3:
--
-- if not suffixmap[ext] then
@@ -966,13 +979,13 @@ local function find_analyze(filename,askedformat,allresults)
wantedfiles[#wantedfiles+1] = forcedname
filetype = resolvers.formatofsuffix(forcedname)
if trace_locating then
- report_resolving("forcing filetype '%s'",filetype)
+ report_resolving("forcing filetype %a",filetype)
end
end
else
filetype = resolvers.formatofsuffix(filename)
if trace_locating then
- report_resolving("using suffix based filetype '%s'",filetype)
+ report_resolving("using suffix based filetype %a",filetype)
end
end
else
@@ -986,7 +999,7 @@ local function find_analyze(filename,askedformat,allresults)
end
filetype = askedformat
if trace_locating then
- report_resolving("using given filetype '%s'",filetype)
+ report_resolving("using given filetype %a",filetype)
end
end
return filetype, wantedfiles
@@ -995,7 +1008,7 @@ end
local function find_direct(filename,allresults)
if not dangerous[askedformat] and isreadable(filename) then
if trace_detail then
- report_resolving("file '%s' found directly",filename)
+ report_resolving("file %a found directly",filename)
end
return "direct", { filename }
end
@@ -1004,7 +1017,7 @@ end
local function find_wildcard(filename,allresults)
if find(filename,'%*') then
if trace_locating then
- report_resolving("checking wildcard '%s'", filename)
+ report_resolving("checking wildcard %a", filename)
end
local method, result = resolvers.findwildcardfiles(filename)
if result then
@@ -1013,23 +1026,23 @@ local function find_wildcard(filename,allresults)
end
end
-local function find_qualified(filename,allresults) -- this one will be split too
+local function find_qualified(filename,allresults,askedformat,alsostripped) -- this one will be split too
if not file.is_qualified_path(filename) then
return
end
if trace_locating then
- report_resolving("checking qualified name '%s'", filename)
+ report_resolving("checking qualified name %a", filename)
end
if isreadable(filename) then
if trace_detail then
- report_resolving("qualified file '%s' found", filename)
+ report_resolving("qualified file %a found", filename)
end
return "qualified", { filename }
end
if trace_detail then
- report_resolving("locating qualified file '%s'", filename)
+ report_resolving("locating qualified file %a", filename)
end
- local forcedname, suffix = "", fileextname(filename)
+ local forcedname, suffix = "", suffixonly(filename)
if suffix == "" then -- why
local format_suffixes = askedformat == "" and resolvers.defaultsuffixes or suffixes[askedformat]
if format_suffixes then
@@ -1038,14 +1051,14 @@ local function find_qualified(filename,allresults) -- this one will be split too
forcedname = filename .. "." .. s
if isreadable(forcedname) then
if trace_locating then
- report_resolving("no suffix, forcing format filetype '%s'", s)
+ report_resolving("no suffix, forcing format filetype %a", s)
end
return "qualified", { forcedname }
end
end
end
end
- if suffix and suffix ~= "" then
+ if alsostripped and suffix and suffix ~= "" then
-- try to find in tree (no suffix manipulation), here we search for the
-- matching last part of the name
local basename = filebasename(filename)
@@ -1060,6 +1073,8 @@ local function find_qualified(filename,allresults) -- this one will be split too
askedformat = "othertextfiles" -- kind of everything, maybe all
end
--
+ -- is this really what we want? basename if we have an explicit path?
+ --
if basename ~= filename then
local resolved = collect_instance_files(basename,askedformat,allresults)
if #resolved == 0 then
@@ -1102,7 +1117,7 @@ end
local function check_subpath(fname)
if isreadable(fname) then
if trace_detail then
- report_resolving("found '%s' by deep scanning",fname)
+ report_resolving("found %a by deep scanning",fname)
end
return fname
end
@@ -1122,9 +1137,13 @@ local function find_intree(filename,filetype,wantedfiles,allresults)
end
end
if trace_detail then
- report_resolving("checking filename '%s'",filename)
+ report_resolving("checking filename %a",filename)
end
+ local resolve = resolvers.resolve
local result = { }
+ -- pathlist : resolved
+ -- dirlist : unresolved or resolved
+ -- filelist : unresolved
for k=1,#pathlist do
local path = pathlist[k]
local pathname = lpegmatch(inhibitstripper,path)
@@ -1138,28 +1157,29 @@ local function find_intree(filename,filetype,wantedfiles,allresults)
-- compare list entries with permitted pattern -- /xx /xx//
local expression = makepathexpression(pathname)
if trace_detail then
- report_resolving("using pattern '%s' for path '%s'",expression,pathname)
+ report_resolving("using pattern %a for path %a",expression,pathname)
end
for k=1,#filelist do
local fl = filelist[k]
local f = fl[2]
local d = dirlist[k]
- if find(d,expression) then
+ -- resolve is new:
+ if find(d,expression) or find(resolve(d),expression) then
-- todo, test for readable
- result[#result+1] = resolvers.resolve(fl[3]) -- no shortcut
+ result[#result+1] = resolve(fl[3]) -- no shortcut
done = true
if allresults then
if trace_detail then
- report_resolving("match to '%s' in hash for file '%s' and path '%s', continue scanning",expression,f,d)
+ report_resolving("match to %a in hash for file %a and path %a, continue scanning",expression,f,d)
end
else
if trace_detail then
- report_resolving("match to '%s' in hash for file '%s' and path '%s', quit scanning",expression,f,d)
+ report_resolving("match to %a in hash for file %a and path %a, quit scanning",expression,f,d)
end
break
end
elseif trace_detail then
- report_resolving("no match to '%s' in hash for file '%s' and path '%s'",expression,f,d)
+ report_resolving("no match to %a in hash for file %a and path %a",expression,f,d)
end
end
end
@@ -1168,7 +1188,7 @@ local function find_intree(filename,filetype,wantedfiles,allresults)
else
method = "filesystem" -- bonus, even when !! is specified
pathname = gsub(pathname,"/+$","")
- pathname = resolvers.resolve(pathname)
+ pathname = resolve(pathname)
local scheme = url.hasscheme(pathname)
if not scheme or scheme == "file" then
local pname = gsub(pathname,"%.%*$",'')
@@ -1244,7 +1264,7 @@ end
local function find_onpath(filename,filetype,wantedfiles,allresults)
if trace_detail then
- report_resolving("checking filename '%s', filetype '%s', wanted files '%s'",filename, filetype or '?',concat(wantedfiles," | "))
+ report_resolving("checking filename %a, filetype %a, wanted files %a",filename,filetype,concat(wantedfiles," | "))
end
local result = { }
for k=1,#wantedfiles do
@@ -1282,7 +1302,7 @@ collect_instance_files = function(filename,askedformat,allresults) -- uses neste
local results = {
{ find_direct (filename,true) },
{ find_wildcard (filename,true) },
- { find_qualified(filename,true) },
+ { find_qualified(filename,true,askedformat) }, -- we can add ,true if we want to find dups
{ find_intree (filename,filetype,wantedfiles,true) },
{ find_onpath (filename,filetype,wantedfiles,true) },
{ find_otherwise(filename,filetype,wantedfiles,true) },
@@ -1297,7 +1317,7 @@ collect_instance_files = function(filename,askedformat,allresults) -- uses neste
result[#result+1] = c
done[c] = true
end
- status[#status+1] = format("%-10s: %s",method,c)
+ status[#status+1] = formatters["%-10s: %s"](method,c)
end
end
end
@@ -1308,11 +1328,11 @@ collect_instance_files = function(filename,askedformat,allresults) -- uses neste
else
local method, result, stamp, filetype, wantedfiles
if instance.remember then
- stamp = format("%s--%s", filename, askedformat)
+ stamp = formatters["%s--%s"](filename,askedformat)
result = stamp and instance.found[stamp]
if result then
if trace_locating then
- report_resolving("remembered file '%s'",filename)
+ report_resolving("remembered file %a",filename)
end
return result
end
@@ -1321,7 +1341,7 @@ collect_instance_files = function(filename,askedformat,allresults) -- uses neste
if not result then
method, result = find_wildcard(filename)
if not result then
- method, result = find_qualified(filename)
+ method, result = find_qualified(filename,false,askedformat)
if not result then
filetype, wantedfiles = find_analyze(filename,askedformat)
method, result = find_intree(filename,filetype,wantedfiles)
@@ -1343,7 +1363,7 @@ collect_instance_files = function(filename,askedformat,allresults) -- uses neste
end
if stamp then
if trace_locating then
- report_resolving("remembering file '%s'",filename)
+ report_resolving("remembering file %a",filename)
end
instance.found[stamp] = result
end
@@ -1399,7 +1419,9 @@ local function findgivenfiles(filename,allresults)
if found ~= "" then
noffound = noffound + 1
result[noffound] = resolvers.resolve(found)
- if not allresults then break end
+ if not allresults then
+ break
+ end
end
else
for kk=1,#blist do
@@ -1596,15 +1618,19 @@ function resolvers.dowithvariable(name,func)
end
function resolvers.locateformat(name)
- local barename = file.removesuffix(name) -- gsub(name,"%.%a+$","")
- local fmtname = caches.getfirstreadablefile(barename..".fmt","formats") or ""
+ local engine = environment.ownmain or "luatex"
+ local barename = file.removesuffix(name)
+ local fullname = file.addsuffix(barename,"fmt")
+ local fmtname = caches.getfirstreadablefile(fullname,"formats",engine) or ""
if fmtname == "" then
- fmtname = resolvers.findfile(barename..".fmt")
+ fmtname = resolvers.findfile(fullname)
fmtname = resolvers.cleanpath(fmtname)
end
if fmtname ~= "" then
local barename = file.removesuffix(fmtname)
- local luaname, lucname, luiname = barename .. ".lua", barename .. ".luc", barename .. ".lui"
+ local luaname = file.addsuffix(barename,luasuffixes.lua)
+ local lucname = file.addsuffix(barename,luasuffixes.luc)
+ local luiname = file.addsuffix(barename,luasuffixes.lui)
if lfs.isfile(luiname) then
return barename, luiname
elseif lfs.isfile(lucname) then
@@ -1640,13 +1666,11 @@ function resolvers.dowithfilesintree(pattern,handle,before,after) -- will move,
local files = instance.files[blobpath]
local total, checked, done = 0, 0, 0
if files then
- for k,v in next, files do
+ for k, v in table.sortedhash(files) do -- next, files do, beware: this is not the resolve order
total = total + 1
if find(k,"^remap:") then
- k = files[k]
- v = k -- files[k] -- chained
- end
- if find(k,pattern) then
+ -- forget about these
+ elseif find(k,pattern) then
if type(v) == "string" then
checked = checked + 1
if handle(blobtype,blobpath,v,k) then
diff --git a/Master/texmf-dist/tex/context/base/data-sch.lua b/Master/texmf-dist/tex/context/base/data-sch.lua
index 253adb9f6d7..41b941c5a13 100644
--- a/Master/texmf-dist/tex/context/base/data-sch.lua
+++ b/Master/texmf-dist/tex/context/base/data-sch.lua
@@ -6,26 +6,26 @@ if not modules then modules = { } end modules ['data-sch'] = {
license = "see context related readme files"
}
-local http = require("socket.http")
-local ltn12 = require("ltn12")
+local load = load
local gsub, concat, format = string.gsub, table.concat, string.format
local finders, openers, loaders = resolvers.finders, resolvers.openers, resolvers.loaders
-local trace_schemes = false trackers.register("resolvers.schemes",function(v) trace_schemes = v end)
-
+local trace_schemes = false trackers.register("resolvers.schemes",function(v) trace_schemes = v end)
local report_schemes = logs.reporter("resolvers","schemes")
-local resolvers = resolvers
+local http = require("socket.http")
+local ltn12 = require("ltn12")
-resolvers.schemes = resolvers.schemes or { }
-local schemes = resolvers.schemes
-schemes.threshold = 24 * 60 * 60
+local resolvers = resolvers
+local schemes = resolvers.schemes or { }
+resolvers.schemes = schemes
-directives.register("schemes.threshold", function(v) schemes.threshold = tonumber(v) or schemes.threshold end)
+local cleaners = { }
+schemes.cleaners = cleaners
-local cleaners = { }
+local threshold = 24 * 60 * 60
-schemes.cleaners = cleaners
+directives.register("schemes.threshold", function(v) threshold = tonumber(v) or threshold end)
function cleaners.none(specification)
return specification.original
@@ -46,14 +46,14 @@ directives.register("schemes.cleanmethod", function(v) cleaner = cleaners[v] or
function resolvers.schemes.cleanname(specification)
local hash = cleaner(specification)
if trace_schemes then
- report_schemes("hashing %s to %s",specification.original,hash)
+ report_schemes("hashing %a to %a",specification.original,hash)
end
return hash
end
local cached, loaded, reused, thresholds, handlers = { }, { }, { }, { }, { }
-local function runcurl(name,cachename) -- will use sockets instead or the curl library
+local function runcurl(name,cachename) -- we use sockets instead or the curl library when possible
local command = "curl --silent --create-dirs --output " .. cachename .. " " .. name
os.spawn(command)
end
@@ -65,19 +65,18 @@ local function fetch(specification)
local cachename = caches.setfirstwritablefile(cleanname,"schemes")
if not cached[original] then
statistics.starttiming(schemes)
- if not io.exists(cachename) or (os.difftime(os.time(),lfs.attributes(cachename).modification) >
- (thresholds[protocol] or schemes.threshold)) then
+ if not io.exists(cachename) or (os.difftime(os.time(),lfs.attributes(cachename).modification) > (thresholds[protocol] or threshold)) then
cached[original] = cachename
local handler = handlers[scheme]
if handler then
if trace_schemes then
- report_schemes("fetching '%s', protocol '%s', method 'built-in'",original,scheme)
+ report_schemes("fetching %a, protocol %a, method %a",original,scheme,"built-in")
end
logs.flush()
handler(specification,cachename)
else
if trace_schemes then
- report_schemes("fetching '%s', protocol '%s', method 'curl'",original,scheme)
+ report_schemes("fetching %a, protocol %a, method %a",original,scheme,"curl")
end
logs.flush()
runcurl(original,cachename)
@@ -86,19 +85,19 @@ local function fetch(specification)
if io.exists(cachename) then
cached[original] = cachename
if trace_schemes then
- report_schemes("using cached '%s', protocol '%s', cachename '%s'",original,scheme,cachename)
+ report_schemes("using cached %a, protocol %a, cachename %a",original,scheme,cachename)
end
else
cached[original] = ""
if trace_schemes then
- report_schemes("using missing '%s', protocol '%s'",original,scheme)
+ report_schemes("using missing %a, protocol %a",original,scheme)
end
end
loaded[scheme] = loaded[scheme] + 1
statistics.stoptiming(schemes)
else
if trace_schemes then
- report_schemes("reusing '%s', protocol '%s'",original,scheme)
+ report_schemes("reusing %a, protocol %a",original,scheme)
end
reused[scheme] = reused[scheme] + 1
end
@@ -112,14 +111,14 @@ end
local opener = openers.file
local loader = loaders.file
-local function install(scheme,handler,threshold)
+local function install(scheme,handler,newthreshold)
handlers [scheme] = handler
loaded [scheme] = 0
reused [scheme] = 0
finders [scheme] = finder
openers [scheme] = opener
loaders [scheme] = loader
- thresholds[scheme] = threshold or schemes.threshold
+ thresholds[scheme] = newthreshold or threshold
end
schemes.install = install
@@ -160,11 +159,42 @@ statistics.register("scheme handling time", function()
end
local n = nl + nr
if n > 0 then
- l = (nl > 0 and concat(l)) or "none"
- r = (nr > 0 and concat(r)) or "none"
+ l = nl > 0 and concat(l) or "none"
+ r = nr > 0 and concat(r) or "none"
return format("%s seconds, %s processed, threshold %s seconds, loaded: %s, reused: %s",
- statistics.elapsedtime(schemes), n, schemes.threshold, l, r)
+ statistics.elapsedtime(schemes), n, threshold, l, r)
else
return nil
end
end)
+
+-- We provide a few more helpers:
+
+----- http = require("socket.http")
+local httprequest = http.request
+local toquery = url.toquery
+
+-- local function httprequest(url)
+-- return os.resultof(format("curl --silent %q", url))
+-- end
+
+local function fetchstring(url,data)
+ local q = data and toquery(data)
+ if q then
+ url = url .. "?" .. q
+ end
+ local reply = httprequest(url)
+ return reply -- just one argument
+end
+
+schemes.fetchstring = fetchstring
+
+function schemes.fetchtable(url,data)
+ local reply = fetchstring(url,data)
+ if reply then
+ local s = load("return " .. reply)
+ if s then
+ return s()
+ end
+ end
+end
diff --git a/Master/texmf-dist/tex/context/base/data-tex.lua b/Master/texmf-dist/tex/context/base/data-tex.lua
index 6cb361699fa..f5c986d7708 100644
--- a/Master/texmf-dist/tex/context/base/data-tex.lua
+++ b/Master/texmf-dist/tex/context/base/data-tex.lua
@@ -7,6 +7,7 @@ if not modules then modules = { } end modules ['data-tex'] = {
}
local char = string.char
+local insert, remove = table.insert, table.remove
local trace_locating = false trackers.register("resolvers.locating", function(v) trace_locating = v end)
@@ -17,7 +18,7 @@ local resolvers = resolvers
local sequencers = utilities.sequencers
local methodhandler = resolvers.methodhandler
local splitlines = string.splitlines
-local utffiletype = unicode.filetype
+local utffiletype = utf.filetype
-- local fileprocessor = nil
-- local lineprocessor = nil
@@ -51,6 +52,10 @@ appendgroup(textlineactions,"after" ) -- user
local ctrl_d = char( 4) -- unix
local ctrl_z = char(26) -- windows
+resolvers.inputstack = resolvers.inputstack or { }
+
+local inputstack = resolvers.inputstack
+
function helpers.textopener(tag,filename,filehandle,coding)
local lines
local t_filehandle = type(filehandle)
@@ -61,22 +66,23 @@ function helpers.textopener(tag,filename,filehandle,coding)
elseif t_filehandle == "table" then
lines = filehandle
else
- lines = filehandle:read("*a")
+ lines = filehandle:read("*a") -- io.readall(filehandle) ... but never that large files anyway
+ -- lines = io.readall(filehandle)
filehandle:close()
end
if type(lines) == "string" then
local coding = coding or utffiletype(lines) -- so we can signal no regime
if trace_locating then
- report_tex("%s opener, '%s' opened using method '%s'",tag,filename,coding)
+ report_tex("%a opener: %a opened using method %a",tag,filename,coding)
end
if coding == "utf-16-be" then
- lines = unicode.utf16_to_utf8_be(lines)
+ lines = utf.utf16_to_utf8_be(lines)
elseif coding == "utf-16-le" then
- lines = unicode.utf16_to_utf8_le(lines)
+ lines = utf.utf16_to_utf8_le(lines)
elseif coding == "utf-32-be" then
- lines = unicode.utf32_to_utf8_be(lines)
+ lines = utf.utf32_to_utf8_be(lines)
elseif coding == "utf-32-le" then
- lines = unicode.utf32_to_utf8_le(lines)
+ lines = utf.utf32_to_utf8_le(lines)
else -- utf8 or unknown (could be a mkvi file)
local runner = textfileactions.runner
if runner then
@@ -85,22 +91,24 @@ function helpers.textopener(tag,filename,filehandle,coding)
lines = splitlines(lines)
end
elseif trace_locating then
- report_tex("%s opener, '%s' opened",tag,filename)
+ report_tex("%a opener: %a opened",tag,filename)
end
local noflines = #lines
if lines[noflines] == "" then -- maybe some special check is needed
lines[noflines] = nil
end
logs.show_open(filename)
+ insert(inputstack,filename)
return {
filename = filename,
noflines = noflines,
currentline = 0,
close = function()
if trace_locating then
- report_tex("%s closer, '%s' closed",tag,filename)
+ report_tex("%a closer: %a closed",tag,filename)
end
logs.show_close(filename)
+ remove(inputstack)
t = nil
end,
reader = function(self)
@@ -159,7 +167,7 @@ local function installhandler(namespace,what,where,func)
if where == "before" or where == "after" then
sequencers.appendaction(namespace,where,func)
else
- report_tex("installing input %s handlers in %s is not possible",what,tostring(where))
+ report_tex("installing input %a handlers in %a is not possible",what,tostring(where))
end
end
@@ -168,8 +176,8 @@ function resolvers.installinputfilehandler(...) installhandler(helpers.textfilea
-- local basename = file.basename
-- resolvers.installinputlinehandler(function(str,filename,linenumber,noflines)
--- report_tex("[lc] file: %s, line: %s of %s, length: %s",basename(filename),linenumber,noflines,#str)
+-- report_tex("[lc] file %a, line %a of %a, length %a",basename(filename),linenumber,noflines,#str)
-- end)
-- resolvers.installinputfilehandler(function(str,filename)
--- report_tex("[fc] file: %s, length: %s",basename(filename),#str)
+-- report_tex("[fc] file %a, length %a",basename(filename),#str)
-- end)
diff --git a/Master/texmf-dist/tex/context/base/data-tmf.lua b/Master/texmf-dist/tex/context/base/data-tmf.lua
index 75dcdd956f5..c522251932e 100644
--- a/Master/texmf-dist/tex/context/base/data-tmf.lua
+++ b/Master/texmf-dist/tex/context/base/data-tmf.lua
@@ -30,11 +30,11 @@ function resolvers.load_tree(tree,resolve)
local newpath = file.join(newtree,"bin")
if not lfs.isdir(newtree) then
- report_tds("no '%s' under tree %s",texos,tree)
+ report_tds("no %a under tree %a",texos,tree)
os.exit()
end
if not lfs.isdir(newpath) then
- report_tds("no '%s/bin' under tree %s",texos,tree)
+ report_tds("no '%s/bin' under tree %a",texos,tree)
os.exit()
end
@@ -65,9 +65,9 @@ function resolvers.load_tree(tree,resolve)
setenv('TEXMFCNF', resolvers.luacnfspec,true) -- already resolved
setenv('PATH', newpath .. io.pathseparator .. getenv('PATH'))
- report_tds("changing from root '%s' to '%s'",oldroot,newroot)
- report_tds("prepending '%s' to PATH",newpath)
- report_tds("setting TEXMFCNF to '%s'",resolvers.luacnfspec)
+ report_tds("changing from root %a to %a",oldroot,newroot)
+ report_tds("prepending %a to PATH",newpath)
+ report_tds("setting TEXMFCNF to %a",resolvers.luacnfspec)
report_tds()
end
end
diff --git a/Master/texmf-dist/tex/context/base/data-tmp.lua b/Master/texmf-dist/tex/context/base/data-tmp.lua
index 6e235dd3466..525698d1e86 100644
--- a/Master/texmf-dist/tex/context/base/data-tmp.lua
+++ b/Master/texmf-dist/tex/context/base/data-tmp.lua
@@ -24,7 +24,9 @@ luatools with a recache feature.</p>
local format, lower, gsub, concat = string.format, string.lower, string.gsub, table.concat
local serialize, serializetofile = table.serialize, table.tofile
-local mkdirs, isdir = dir.mkdirs, lfs.isdir
+local mkdirs, isdir, isfile = dir.mkdirs, lfs.isdir, lfs.isfile
+local addsuffix, is_writable, is_readable = file.addsuffix, file.is_writable, file.is_readable
+local formatters = string.formatters
local trace_locating = false trackers.register("resolvers.locating", function(v) trace_locating = v end)
local trace_cache = false trackers.register("resolvers.cache", function(v) trace_cache = v end)
@@ -37,7 +39,7 @@ local resolvers = resolvers
-- intermezzo
local directive_cleanup = false directives.register("system.compile.cleanup", function(v) directive_cleanup = v end)
-local directive_strip = true directives.register("system.compile.strip", function(v) directive_strip = v end)
+local directive_strip = false directives.register("system.compile.strip", function(v) directive_strip = v end)
local compile = utilities.lua.compile
@@ -49,8 +51,10 @@ end
-- end of intermezzo
-caches = caches or { }
-local caches = caches
+caches = caches or { }
+local caches = caches
+
+local luasuffixes = utilities.lua.suffixes
caches.base = caches.base or "luatex-cache"
caches.more = caches.more or "context"
@@ -78,19 +82,19 @@ local function identify()
cachepath = file.collapsepath(cachepath)
local valid = isdir(cachepath)
if valid then
- if file.is_readable(cachepath) then
+ if is_readable(cachepath) then
readables[#readables+1] = cachepath
- if not writable and file.is_writable(cachepath) then
+ if not writable and is_writable(cachepath) then
writable = cachepath
end
end
elseif not writable and caches.force then
local cacheparent = file.dirname(cachepath)
- if file.is_writable(cacheparent) and true then -- we go on anyway (needed for mojca's kind of paths)
+ if is_writable(cacheparent) and true then -- we go on anyway (needed for mojca's kind of paths)
if not caches.ask or io.ask(format("\nShould I create the cache path %s?",cachepath), "no", { "yes", "no" }) == "yes" then
mkdirs(cachepath)
- if isdir(cachepath) and file.is_writable(cachepath) then
- report_caches("created: %s",cachepath)
+ if isdir(cachepath) and is_writable(cachepath) then
+ report_caches("path %a created",cachepath)
writable = cachepath
readables[#readables+1] = cachepath
end
@@ -111,8 +115,8 @@ local function identify()
cachepath = resolvers.resolve(cachepath)
cachepath = resolvers.cleanpath(cachepath)
local valid = isdir(cachepath)
- if valid and file.is_readable(cachepath) then
- if not writable and file.is_writable(cachepath) then
+ if valid and is_readable(cachepath) then
+ if not writable and is_writable(cachepath) then
readables[#readables+1] = cachepath
writable = cachepath
break
@@ -149,9 +153,9 @@ local function identify()
-- end
if trace_cache then
for i=1,#readables do
- report_caches("using readable path '%s' (order %s)",readables[i],i)
+ report_caches("using readable path %a (order %s)",readables[i],i)
end
- report_caches("using writable path '%s'",writable)
+ report_caches("using writable path %a",writable)
end
identify = function()
return writable, readables
@@ -166,10 +170,10 @@ function caches.usedpaths()
for i=1,#readables do
local readable = readables[i]
if usedreadables[i] or readable == writable then
- result[#result+1] = format("readable: '%s' (order %s)",readable,i)
+ result[#result+1] = formatters["readable: %a (order %s)"](readable,i)
end
end
- result[#result+1] = format("writable: '%s'",writable)
+ result[#result+1] = formatters["writable: %a"](writable)
return result
else
return writable
@@ -181,12 +185,11 @@ function caches.configfiles()
end
function caches.hashed(tree)
- tree = gsub(tree,"\\$","/")
- tree = gsub(tree,"/+$","")
+ tree = gsub(tree,"[\\/]+$","")
tree = lower(tree)
local hash = md5.hex(tree)
if trace_cache or trace_locating then
- report_caches("hashing tree %s, hash %s",tree,hash)
+ report_caches("hashing tree %a, hash %a",tree,hash)
end
return hash
end
@@ -202,7 +205,7 @@ end
local r_cache, w_cache = { }, { } -- normally w in in r but who cares
-local function getreadablepaths(...) -- we can optimize this as we have at most 2 tags
+local function getreadablepaths(...)
local tags = { ... }
local hash = concat(tags,"/")
local done = r_cache[hash]
@@ -245,7 +248,7 @@ function caches.getfirstreadablefile(filename,...)
for i=1,#rd do
local path = rd[i]
local fullname = file.join(path,filename)
- if file.is_readable(fullname) then
+ if is_readable(fullname) then
usedreadables[i] = true
return fullname, path
end
@@ -266,7 +269,7 @@ function caches.define(category,subcategory) -- for old times sake
end
function caches.setluanames(path,name)
- return path .. "/" .. name .. ".tma", path .. "/" .. name .. ".tmc"
+ return format("%s/%s.%s",path,name,luasuffixes.tma), format("%s/%s.%s",path,name,luasuffixes.tmc)
end
function caches.loaddata(readables,name)
@@ -276,7 +279,20 @@ function caches.loaddata(readables,name)
for i=1,#readables do
local path = readables[i]
local tmaname, tmcname = caches.setluanames(path,name)
- local loader = loadfile(tmcname) or loadfile(tmaname)
+ local loader = false
+ if isfile(tmcname) then
+ loader = loadfile(tmcname)
+ end
+ if not loader and isfile(tmaname) then
+ -- in case we have a different engine
+ utilities.lua.compile(tmaname,tmcname)
+ if isfile(tmcname) then
+ loader = loadfile(tmcname)
+ end
+ if not loader then
+ loader = loadfile(tmaname)
+ end
+ end
if loader then
loader = loader()
collectgarbage("step")
@@ -288,11 +304,15 @@ end
function caches.is_writable(filepath,filename)
local tmaname, tmcname = caches.setluanames(filepath,filename)
- return file.is_writable(tmaname)
+ return is_writable(tmaname)
end
local saveoptions = { compact = true }
+-- add some point we will only use the internal bytecode compiler and
+-- then we can flag success in the tma so that it can trigger a compile
+-- if the other engine
+
function caches.savedata(filepath,filename,data,raw)
local tmaname, tmcname = caches.setluanames(filepath,filename)
local reduce, simplify = true, true
@@ -318,9 +338,9 @@ end
function caches.loadcontent(cachename,dataname)
local name = caches.hashed(cachename)
- local full, path = caches.getfirstreadablefile(name ..".lua","trees")
+ local full, path = caches.getfirstreadablefile(addsuffix(name,luasuffixes.lua),"trees")
local filename = file.join(path,name)
- local blob = loadfile(filename .. ".luc") or loadfile(filename .. ".lua")
+ local blob = loadfile(addsuffix(filename,luasuffixes.luc)) or loadfile(addsuffix(filename,luasuffixes.lua))
if blob then
local data = blob()
if data and data.content then
@@ -328,20 +348,20 @@ function caches.loadcontent(cachename,dataname)
if data.version == resolvers.cacheversion then
content_state[#content_state+1] = data.uuid
if trace_locating then
- report_resolvers("loading '%s' for '%s' from '%s'",dataname,cachename,filename)
+ report_resolvers("loading %a for %a from %a",dataname,cachename,filename)
end
return data.content
else
- report_resolvers("skipping '%s' for '%s' from '%s' (version mismatch)",dataname,cachename,filename)
+ report_resolvers("skipping %a for %a from %a (version mismatch)",dataname,cachename,filename)
end
else
- report_resolvers("skipping '%s' for '%s' from '%s' (datatype mismatch)",dataname,cachename,filename)
+ report_resolvers("skipping %a for %a from %a (datatype mismatch)",dataname,cachename,filename)
end
elseif trace_locating then
- report_resolvers("skipping '%s' for '%s' from '%s' (no content)",dataname,cachename,filename)
+ report_resolvers("skipping %a for %a from %a (no content)",dataname,cachename,filename)
end
elseif trace_locating then
- report_resolvers("skipping '%s' for '%s' from '%s' (invalid file)",dataname,cachename,filename)
+ report_resolvers("skipping %a for %a from %a (invalid file)",dataname,cachename,filename)
end
end
@@ -355,11 +375,12 @@ end
function caches.savecontent(cachename,dataname,content)
local name = caches.hashed(cachename)
- local full, path = caches.setfirstwritablefile(name ..".lua","trees")
+ local full, path = caches.setfirstwritablefile(addsuffix(name,luasuffixes.lua),"trees")
local filename = file.join(path,name) -- is full
- local luaname, lucname = filename .. ".lua", filename .. ".luc"
+ local luaname = addsuffix(filename,luasuffixes.lua)
+ local lucname = addsuffix(filename,luasuffixes.luc)
if trace_locating then
- report_resolvers("preparing '%s' for '%s'",dataname,cachename)
+ report_resolvers("preparing %a for %a",dataname,cachename)
end
local data = {
type = dataname,
@@ -373,20 +394,20 @@ function caches.savecontent(cachename,dataname,content)
local ok = io.savedata(luaname,serialize(data,true))
if ok then
if trace_locating then
- report_resolvers("category '%s', cachename '%s' saved in '%s'",dataname,cachename,luaname)
+ report_resolvers("category %a, cachename %a saved in %a",dataname,cachename,luaname)
end
if utilities.lua.compile(luaname,lucname) then
if trace_locating then
- report_resolvers("'%s' compiled to '%s'",dataname,lucname)
+ report_resolvers("%a compiled to %a",dataname,lucname)
end
return true
else
if trace_locating then
- report_resolvers("compiling failed for '%s', deleting file '%s'",dataname,lucname)
+ report_resolvers("compiling failed for %a, deleting file %a",dataname,lucname)
end
os.remove(lucname)
end
elseif trace_locating then
- report_resolvers("unable to save '%s' in '%s' (access error)",dataname,luaname)
+ report_resolvers("unable to save %a in %a (access error)",dataname,luaname)
end
end
diff --git a/Master/texmf-dist/tex/context/base/data-tre.lua b/Master/texmf-dist/tex/context/base/data-tre.lua
index 1b916bf26ae..0a8b00d9bfb 100644
--- a/Master/texmf-dist/tex/context/base/data-tre.lua
+++ b/Master/texmf-dist/tex/context/base/data-tre.lua
@@ -51,18 +51,18 @@ function resolvers.locators.tree(specification)
local realname = resolvers.resolve(name) -- no shortcut
if realname and realname ~= '' and lfs.isdir(realname) then
if trace_locating then
- report_trees("locator '%s' found",realname)
+ report_trees("locator %a found",realname)
end
resolvers.appendhash('tree',name,false) -- don't cache
elseif trace_locating then
- report_trees("locator '%s' not found",name)
+ report_trees("locator %a not found",name)
end
end
function resolvers.hashers.tree(specification)
local name = specification.filename
if trace_locating then
- report_trees("analysing '%s'",name)
+ report_trees("analysing %a",name)
end
resolvers.methodhandler("hashers",name)
diff --git a/Master/texmf-dist/tex/context/base/data-use.lua b/Master/texmf-dist/tex/context/base/data-use.lua
index d2a9b757191..9c15263bb11 100644
--- a/Master/texmf-dist/tex/context/base/data-use.lua
+++ b/Master/texmf-dist/tex/context/base/data-use.lua
@@ -35,7 +35,7 @@ function resolvers.automount(usecache)
-- skip
elseif find(line,"^zip://") then
if trace_locating then
- report_mounts("mounting %s",line)
+ report_mounts("mounting %a",line)
end
table.insert(resolvers.automounted,line)
resolvers.usezipfile(line)
@@ -59,7 +59,7 @@ statistics.register("used cache path", function() return caches.usedpaths() end
function statistics.savefmtstatus(texname,formatbanner,sourcefile) -- texname == formatname
local enginebanner = status.list().banner
if formatbanner and enginebanner and sourcefile then
- local luvname = file.replacesuffix(texname,"luv")
+ local luvname = file.replacesuffix(texname,"luv") -- utilities.lua.suffixes.luv
local luvdata = {
enginebanner = enginebanner,
formatbanner = formatbanner,
@@ -70,10 +70,14 @@ function statistics.savefmtstatus(texname,formatbanner,sourcefile) -- texname ==
end
end
+-- todo: check this at startup and return (say) 999 as signal that the run
+-- was aborted due to a wrong format in which case mtx-context can trigger
+-- a remake
+
function statistics.checkfmtstatus(texname)
local enginebanner = status.list().banner
if enginebanner and texname then
- local luvname = file.replacesuffix(texname,"luv")
+ local luvname = file.replacesuffix(texname,"luv") -- utilities.lua.suffixes.luv
if lfs.isfile(luvname) then
local luv = dofile(luvname)
if luv and luv.sourcefile then
diff --git a/Master/texmf-dist/tex/context/base/data-vir.lua b/Master/texmf-dist/tex/context/base/data-vir.lua
index 89359c19a08..e5bf35fa77b 100644
--- a/Master/texmf-dist/tex/context/base/data-vir.lua
+++ b/Master/texmf-dist/tex/context/base/data-vir.lua
@@ -6,7 +6,7 @@ if not modules then modules = { } end modules ['data-vir'] = {
license = "see context related readme files"
}
-local format = string.format
+local format, validstrings = string.format, string.valid
local trace_virtual = false
local report_virtual = logs.reporter("resolvers","virtual")
@@ -18,14 +18,16 @@ local resolvers = resolvers
local finders, openers, loaders, savers = resolvers.finders, resolvers.openers, resolvers.loaders, resolvers.savers
-local data, n, template = { }, 0, "virtual://%s.%s" -- hm, number can be query
+local data = { }
+local n = 0 -- hm, number can be query
+local template = "virtual://%s.%s"
function savers.virtual(specification,content)
n = n + 1 -- one number for all namespaces
local path = specification.path
local filename = format(template,path ~= "" and path or "virtualfile",n)
if trace_virtual then
- report_virtual("saver: file '%s' saved",filename)
+ report_virtual("saver: file %a saved",filename)
end
data[filename] = content
return filename
@@ -36,12 +38,12 @@ function finders.virtual(specification)
local d = data[original]
if d then
if trace_virtual then
- report_virtual("finder: file '%s' found",original)
+ report_virtual("finder: file %a found",original)
end
return original
else
if trace_virtual then
- report_virtual("finder: unknown file '%s'",original)
+ report_virtual("finder: unknown file %a",original)
end
return finders.notfound()
end
@@ -52,14 +54,14 @@ function openers.virtual(specification)
local d = data[original]
if d then
if trace_virtual then
- report_virtual("opener, file '%s' opened",original)
+ report_virtual("opener: file %a opened",original)
end
- data[original] = nil
+ data[original] = nil -- when we comment this we can have error messages
-- With utf-8 we signal that no regime is to be applied!
return openers.helpers.textopener("virtual",original,d,"utf-8")
else
if trace_virtual then
- report_virtual("opener, file '%s' not found",original)
+ report_virtual("opener: file %a not found",original)
end
return openers.notfound()
end
@@ -70,13 +72,13 @@ function loaders.virtual(specification)
local d = data[original]
if d then
if trace_virtual then
- report_virtual("loader, file '%s' loaded",original)
+ report_virtual("loader: file %a loaded",original)
end
data[original] = nil
return true, d, #d
end
if trace_virtual then
- report_virtual("loader, file '%s' not loaded",original)
+ report_virtual("loader: file %a not loaded",original)
end
return loaders.notfound()
end
diff --git a/Master/texmf-dist/tex/context/base/data-zip.lua b/Master/texmf-dist/tex/context/base/data-zip.lua
index 676a2df967e..5db69670c6d 100644
--- a/Master/texmf-dist/tex/context/base/data-zip.lua
+++ b/Master/texmf-dist/tex/context/base/data-zip.lua
@@ -14,11 +14,17 @@ local trace_locating = false trackers.register("resolvers.locating", function(v
local report_zip = logs.reporter("resolvers","zip")
--- zip:///oeps.zip?name=bla/bla.tex
--- zip:///oeps.zip?tree=tex/texmf-local
--- zip:///texmf.zip?tree=/tex/texmf
--- zip:///texmf.zip?tree=/tex/texmf-local
--- zip:///texmf-mine.zip?tree=/tex/texmf-projects
+--[[ldx--
+<p>We use a url syntax for accessing the zip file itself and file in it:</p>
+
+<typing>
+zip:///oeps.zip?name=bla/bla.tex
+zip:///oeps.zip?tree=tex/texmf-local
+zip:///texmf.zip?tree=/tex/texmf
+zip:///texmf.zip?tree=/tex/texmf-local
+zip:///texmf-mine.zip?tree=/tex/texmf-projects
+</typing>
+--ldx]]--
local resolvers = resolvers
@@ -77,9 +83,9 @@ function resolvers.locators.zip(specification)
local zipfile = archive and archive ~= "" and zip.openarchive(archive) -- tricky, could be in to be initialized tree
if trace_locating then
if zipfile then
- report_zip("locator, archive '%s' found",archive)
+ report_zip("locator: archive %a found",archive)
else
- report_zip("locator, archive '%s' not found",archive)
+ report_zip("locator: archive %a not found",archive)
end
end
end
@@ -87,7 +93,7 @@ end
function resolvers.hashers.zip(specification)
local archive = specification.filename
if trace_locating then
- report_zip("loading file '%s'",archive)
+ report_zip("loading file %a",archive)
end
resolvers.usezipfile(specification.original)
end
@@ -110,25 +116,25 @@ function resolvers.finders.zip(specification)
local zfile = zip.openarchive(archive)
if zfile then
if trace_locating then
- report_zip("finder, archive '%s' found",archive)
+ report_zip("finder: archive %a found",archive)
end
local dfile = zfile:open(queryname)
if dfile then
dfile = zfile:close()
if trace_locating then
- report_zip("finder, file '%s' found",queryname)
+ report_zip("finder: file %a found",queryname)
end
return specification.original
elseif trace_locating then
- report_zip("finder, file '%s' not found",queryname)
+ report_zip("finder: file %a not found",queryname)
end
elseif trace_locating then
- report_zip("finder, unknown archive '%s'",archive)
+ report_zip("finder: unknown archive %a",archive)
end
end
end
if trace_locating then
- report_zip("finder, '%s' not found",original)
+ report_zip("finder: %a not found",original)
end
return resolvers.finders.notfound()
end
@@ -143,24 +149,24 @@ function resolvers.openers.zip(specification)
local zfile = zip.openarchive(archive)
if zfile then
if trace_locating then
- report_zip("opener, archive '%s' opened",archive)
+ report_zip("opener; archive %a opened",archive)
end
local dfile = zfile:open(queryname)
if dfile then
if trace_locating then
- report_zip("opener, file '%s' found",queryname)
+ report_zip("opener: file %a found",queryname)
end
return resolvers.openers.helpers.textopener('zip',original,dfile)
elseif trace_locating then
- report_zip("opener, file '%s' not found",queryname)
+ report_zip("opener: file %a not found",queryname)
end
elseif trace_locating then
- report_zip("opener, unknown archive '%s'",archive)
+ report_zip("opener: unknown archive %a",archive)
end
end
end
if trace_locating then
- report_zip("opener, '%s' not found",original)
+ report_zip("opener: %a not found",original)
end
return resolvers.openers.notfound()
end
@@ -175,27 +181,27 @@ function resolvers.loaders.zip(specification)
local zfile = zip.openarchive(archive)
if zfile then
if trace_locating then
- report_zip("loader, archive '%s' opened",archive)
+ report_zip("loader: archive %a opened",archive)
end
local dfile = zfile:open(queryname)
if dfile then
logs.show_load(original)
if trace_locating then
- report_zip("loader, file '%s' loaded",original)
+ report_zip("loader; file %a loaded",original)
end
local s = dfile:read("*all")
dfile:close()
return true, s, #s
elseif trace_locating then
- report_zip("loader, file '%s' not found",queryname)
+ report_zip("loader: file %a not found",queryname)
end
elseif trace_locating then
- report_zip("loader, unknown archive '%s'",archive)
+ report_zip("loader; unknown archive %a",archive)
end
end
end
if trace_locating then
- report_zip("loader, '%s' not found",original)
+ report_zip("loader: %a not found",original)
end
return resolvers.openers.notfound()
end
@@ -212,7 +218,7 @@ function resolvers.usezipfile(archive)
local instance = resolvers.instance
local tree = url.query(specification.query).tree or ""
if trace_locating then
- report_zip("registering, registering archive '%s'",archive)
+ report_zip("registering: archive %a",archive)
end
statistics.starttiming(instance)
resolvers.prependhash('zip',archive)
@@ -221,10 +227,10 @@ function resolvers.usezipfile(archive)
instance.files[archive] = resolvers.registerzipfile(z,tree)
statistics.stoptiming(instance)
elseif trace_locating then
- report_zip("registering, unknown archive '%s'",archive)
+ report_zip("registering: unknown archive %a",archive)
end
elseif trace_locating then
- report_zip("registering, '%s' not found",archive)
+ report_zip("registering: archive %a not found",archive)
end
end
@@ -236,7 +242,7 @@ function resolvers.registerzipfile(z,tree)
filter = format("^%s/(.+)/(.-)$",tree)
end
if trace_locating then
- report_zip("registering, using filter '%s'",filter)
+ report_zip("registering: using filter %a",filter)
end
local register, n = resolvers.registerfile, 0
for i in z:files() do
@@ -253,6 +259,6 @@ function resolvers.registerzipfile(z,tree)
n = n + 1
end
end
- report_zip("registering, %s files registered",n)
+ report_zip("registering: %s files registered",n)
return files
end
diff --git a/Master/texmf-dist/tex/context/base/enco-ini.mkiv b/Master/texmf-dist/tex/context/base/enco-ini.mkiv
index a942d511096..77fcbe4839b 100644
--- a/Master/texmf-dist/tex/context/base/enco-ini.mkiv
+++ b/Master/texmf-dist/tex/context/base/enco-ini.mkiv
@@ -43,22 +43,32 @@
%D \macros
%D {defineaccent, definecharacter, definecommand}
-\unexpanded\def\dodefineaccentcommand#1%
- {\setevalue{\string#1}{\noexpand\dohandleaccent{\string#1}}}
-
-\unexpanded\def\dodefineaccent#1#2#3% no spaces, used low level
- {\setvalue{\??ac\string#1\string#2\empty}{#3}}
+\installcorenamespace{accents}
\unexpanded\def\defineaccent#1 #2 #3 %
- {\dodefineaccentcommand{#1}%
- \dodefineaccent{#1}{#2}{#3}}
-
-\def\dohandleaccent#1#2%
- {\ifcsname\??ac\string#1#2\empty\endcsname
- \csname\??ac\string#1#2\empty\endcsname
- \else\ifcsname\??ac\string#1\string#2\empty\endcsname
- \csname\??ac\string#1\string#2\empty\endcsname
- \fi\fi}
+ {\enco_define_accent_command{#1}%
+ \enco_define_accent{#1}{#2}{#3}}
+
+\unexpanded\def\enco_define_accent_command#1%
+ {\setevalue{\string#1}{\noexpand\enco_handle_accent{\string#1}}}
+
+\unexpanded\def\enco_define_accent#1#2#3% no spaces, used low level
+ {\setvalue{\??accents\string#1\string#2\empty}{#3}}
+
+\def\enco_handle_accent#1#2% expandable because we want them in the tuc file
+ {\csname\??accents
+ \ifcsname\??accents\string#1#2\empty\endcsname
+ \string#1#2\empty
+ \else\ifcsname\??accents\string#1\string#2\empty\endcsname
+ \string#1\string#2\empty
+ \else
+ \empty
+ \fi\fi
+ \endcsname}
+
+\let\dohandleaccent \enco_handle_accent % maybe useful
+\let\dodefineaccent \enco_define_accent % used at the lua end
+\let\dodefineaccentcommand\enco_define_accent_command % used at the lua end
\unexpanded\def\definecharacter#1 #2 %
{\doifnumberelse{\string#2}
@@ -77,15 +87,15 @@
%D Accent handling (try to avoid this):
-\newbox\accenttestbox % no longer global so we could use \scratchbox
+\newbox\b_enco_accent
\def\buildmathaccent#1%
{\mathaccent#1 }
\unexpanded\def\buildtextaccent#1#2% we could do all at the lua end
{\begingroup % but that's no fun (yet)
- \setbox\accenttestbox\hbox{#1}%
- \scratchcounter\cldcontext{nodes.firstcharinbox(\number\accenttestbox)}\relax
+ \setbox\b_enco_accent\hbox{#1}%
+ \scratchcounter\cldcontext{nodes.firstcharinbox(\number\b_enco_accent)}\relax
\ifcase\scratchcounter\else\accent\scratchcounter\fi
\relax#2%
\endgroup}
@@ -105,7 +115,7 @@
\hidewidth
\hskip#2\wd0
\hskip-#3\slantperpoint % in plain 1ex * dimenless value
- \vbox to .2ex{\box0\vss}\hidewidth
+ \vbox to .2\exheight{\box0\vss}\hidewidth
\crcr}}}
\unexpanded\def\buildtextmacron {\bottomaccent{.25ex}{0}{15}{\textmacron}}
@@ -128,7 +138,8 @@
\egroup
\egroup}
-\def\buildtextgrave{\topaccent{0pt}{0}{15}{\textgrave}} % e.g.
+\unexpanded\def\buildtextgrave
+ {\topaccent{0pt}{0}{15}{\textgrave}} % e.g.
\unexpanded\def\definemathaccent#1 #2%
{\setvalue{#1}{\mathaccent#2 }}
@@ -145,8 +156,8 @@
\definemathaccent hat \mathhat
\definemathaccent vec \mathvec
\definemathaccent dot \mathdot
-\definemathaccent widetilde \mathwidetilde
-\definemathaccent widehat \mathwidehat
+% \definemathaccent widetilde \mathwidetilde
+% \definemathaccent widehat \mathwidehat
% from enco-com:
@@ -224,7 +235,7 @@
% left-overs (some day in private unicode space, so that we can roundtrip)
-\unexpanded\def\textblacksquare {\dontleavehmode\hbox{\vrule\!!width.3\s!em\!!height.4\s!em\!!depth-.1\s!em}}
+\unexpanded\def\textblacksquare {\dontleavehmode\hbox{\vrule\s!width.3\s!em\s!height.4\s!em\s!depth-.1\s!em}}
\unexpanded\def\schwa {\hbox{\rotate[\c!rotation=180,\c!location=\v!high]{\hbox{e}}}}
\unexpanded\def\schwagrave {\buildtextgrave\schwa}
@@ -244,28 +255,28 @@
\let\textvisiblespace\normalcontrolspace
\unexpanded\def\fastcontrolspace % no glyph resolving after first (use grouped)
- {\dofastcontrolspace}
+ {\enco_fast_control_space}
-\def\dofastcontrolspace
+\def\enco_fast_control_space
{\iffontchar\font\textcontrolspace
- \nofastfallbackcontrolspace
+ \enco_fast_control_space_nop
\else
- \dofastfallbackcontrolspace
+ \enco_fast_control_space_yes
\fi
- \dofastcontrolspace}
+ \enco_fast_control_space}
-\newbox\controlspacebox
+\newbox\b_enco_control_space
-\def\nofastfallbackcontrolspace
- {\let\dofastcontrolspace\textcontrolspace}
+\def\enco_fast_control_space_nop
+ {\let\enco_fast_control_space\textcontrolspace}
-\def\dofastfallbackcontrolspace
- {\setbox\controlspacebox\hbox{\space}%
- \setbox\controlspacebox\hbox to \wd\controlspacebox{\hss\fallbackcontrolspace\hss}%
- \let\dofastcontrolspace\flushcontrolspacebox}
+\def\enco_fast_control_space_yes
+ {\setbox\b_enco_control_space\hbox{\space}%
+ \setbox\b_enco_control_space\hbox to \wd\b_enco_control_space{\hss\fallbackcontrolspace\hss}%
+ \let\enco_fast_control_space\flushcontrolspacebox}
\def\flushcontrolspacebox
- {\copy\controlspacebox}
+ {\copy\b_enco_control_space}
% a few defaults (\<whatever>{}), we really need the verbose \empty as it will be
% stringified .. anyhow, we define this at the lua end now but keep it here as a
@@ -289,39 +300,39 @@
% from enco-mis:
-\def\fakepercent
- {\mathematics{\normalsuperscript{\scriptscriptstyle0}\kern-.25em/\kern-.2em\normalsubscript{\scriptscriptstyle0}}}
+\unexpanded\def\fakepercent
+ {\mathematics{\normalsuperscript{\scriptscriptstyle0}\kern-.25\emwidth/\kern-.2\emwidth\normalsubscript{\scriptscriptstyle0}}}
-\def\fakeperthousand
- {\mathematics{\normalsuperscript{\scriptscriptstyle0}\kern-.25em/\kern-.2em\normalsubscript{\scriptscriptstyle00}}}
+\unexpanded\def\fakeperthousand
+ {\mathematics{\normalsuperscript{\scriptscriptstyle0}\kern-.25\emwidth/\kern-.2\emwidth\normalsubscript{\scriptscriptstyle00}}}
-\def\fakepermine
+\unexpanded\def\fakepermine
{\dontleavehmode
\bgroup
\setbox\scratchbox\hbox
{\mathematics{+}}%
\hbox to \wd\scratchbox
{\hss
- \mathematics{\normalsuperscript{\scriptscriptstyle-}\kern-.4em/\kern-.3em\normalsubscript{\scriptscriptstyle-}}%
+ \mathematics{\normalsuperscript{\scriptscriptstyle-}\kern-.4\emwidth/\kern-.3\emwidth\normalsubscript{\scriptscriptstyle-}}%
\hss}%
\egroup}
%D A smaller and bolder variant, more like the math and monospaced ones.
-\def\fakeunderscore
+\unexpanded\def\fakeunderscore
{\relax\ifmmode
- \vrule\!!depth .12\fontexheight\mathstylefont\normalmathstyle\!!width \fontinterwordspace\mathstylefont\normalmathstyle\!!height\zeropoint\relax
+ \vrule\s!depth .12\fontexheight\mathstylefont\normalmathstyle\s!width \fontinterwordspace\mathstylefont\normalmathstyle\s!height\zeropoint\relax
\else
- \dontleavehmode\hbox{\vrule\!!depth .12\fontexheight\font\!!width \fontinterwordspace\font\!!height\zeropoint}%
+ \dontleavehmode\hbox{\vrule\s!depth .12\fontexheight\font\s!width \fontinterwordspace\font\s!height\zeropoint}%
\fi}
-\def\fakeunderscores{\let\_\fakeunderscore}
-\def\textunderscores{\let\_\textunderscore}
+\unexpanded\def\fakeunderscores{\let\_\fakeunderscore}
+\unexpanded\def\textunderscores{\let\_\textunderscore}
\textunderscores
-\ifx\mathunderscore\undefined \let\mathunderscore\fakeunderscore \fi
-\ifx\textunderscore\undefined \let\textunderscore\fakeunderscore \fi
+\ifdefined\mathunderscore \else \let\mathunderscore\fakeunderscore \fi
+\ifdefined\textunderscore \else \let\textunderscore\fakeunderscore \fi
\unexpanded\def\normalunderscore{\ifmmode\mathunderscore\else\textunderscore\fi}
@@ -349,7 +360,7 @@
\def\periodsdefault{3} % was 5, but now it's like \unknown
\unexpanded\def\periods
- {\dosingleempty\doperiods}
+ {\dosingleempty\enco_periods}
% \def\doperiods[#1]% todo: also n=,width= or maybe just #1,#2
% {\dontleavehmode
@@ -361,7 +372,7 @@
%
% better for export:
-\unexpanded\def\doperiods[#1]% todo: also n=,width= or maybe just #1,#2
+\unexpanded\def\enco_periods[#1]% todo: also n=,width= or maybe just #1,#2
{\dontleavehmode
\hbox\bgroup
\setbox\scratchbox\hbox to \periodswidth{\hss.\hss}%
@@ -377,4 +388,8 @@
%
% Hello\fourdots\ World\fourdots \par Hello\fourdots\ World.
+\appendtoks
+ \let\buildtextaccent\secondoftwoarguments
+\to \everysimplifycommands
+
\protect \endinput
diff --git a/Master/texmf-dist/tex/context/base/file-ini.lua b/Master/texmf-dist/tex/context/base/file-ini.lua
index 4f8b5b6e345..1872ed3d393 100644
--- a/Master/texmf-dist/tex/context/base/file-ini.lua
+++ b/Master/texmf-dist/tex/context/base/file-ini.lua
@@ -7,9 +7,8 @@ if not modules then modules = { } end modules ['file-ini'] = {
}
--[[ldx--
-<p>It's more convenient to manipulate filenames (paths) in
-<l n='lua'/> than in <l n='tex'/>. These methods have counterparts
-at the <l n='tex'/> side.</p>
+<p>It's more convenient to manipulate filenames (paths) in <l n='lua'/> than in
+<l n='tex'/>. These methods have counterparts at the <l n='tex'/> end.</p>
--ldx]]--
resolvers.jobs = resolvers.jobs or { }
diff --git a/Master/texmf-dist/tex/context/base/file-ini.mkvi b/Master/texmf-dist/tex/context/base/file-ini.mkvi
index b0f6244aa9a..989241dd1a7 100644
--- a/Master/texmf-dist/tex/context/base/file-ini.mkvi
+++ b/Master/texmf-dist/tex/context/base/file-ini.mkvi
@@ -73,15 +73,17 @@
%D \popendofline
%D \stoptyping
+\installsystemnamespace{eolstack}
+
\newcount\c_system_files_eol_level
\unexpanded\def\pushendofline
{\advance\c_system_files_eol_level\plusone
- \expandafter\chardef\csname\??fi:eol:\number\c_system_files_eol_level\endcsname\catcode\endoflineasciicode
+ \expandafter\chardef\csname\??eolstack\number\c_system_files_eol_level\endcsname\catcode\endoflineasciicode
\catcode\endoflineasciicode\commentcatcode}
\unexpanded\def\popendofline
- {\catcode\endoflineasciicode\csname\??fi:eol:\number\c_system_files_eol_level\endcsname
+ {\catcode\endoflineasciicode\csname\??eolstack\number\c_system_files_eol_level\endcsname
\advance\c_system_files_eol_level\minusone}
\unexpanded\def\restoreendofline
@@ -101,11 +103,11 @@
\the\everystartreadingfile
\pushcatcodetable % saveguard
\setcatcodetable\ctxcatcodes
- \ctxlua{regimes.push()}}% temporarily this way
+ \ctxcommand{pushregime()}}% temporarily this way
\unexpanded\def\stopreadingfile
{\popcatcodetable % saveguard
- \ctxlua{regimes.pop()}% temporarily this way
+ \ctxcommand{popregime()}% temporarily this way
\the\everystopreadingfile
\global\advance\readingfilelevel\minusone}
@@ -203,11 +205,13 @@
%D This command obeys the standard method for locating files. We could
%D move this function to the \LUA\ end.
+\installsystemnamespace {fileonce}
+
\unexpanded\def\doonlyonce#whatever%
- {\ifcsname\??fi:#whatever\endcsname
+ {\ifcsname\??fileonce#whatever\endcsname
\expandafter\gobbleoneargument
\else
- \letgvalue{\??fi:#whatever}\relax
+ \letgvalue{\??fileonce#whatever}\relax
\expandafter\firstofoneargument
\fi}
@@ -215,11 +219,11 @@
{\doonlyonce{#name}{\doiffileelse{#name}{\inputgivenfile{#name}}\donothing}}
\unexpanded\def\doendinputonce#name%
- {\ifcsname\??fi:#name\endcsname
+ {\ifcsname\??fileonce#name\endcsname
\expandafter\endinput
\fi}
\unexpanded\def\forgetdoingonce#whatever%
- {\global\letbeundefined{\??fi:#whatever}}
+ {\global\letbeundefined{\??fileonce#whatever}}
\protect \endinput
diff --git a/Master/texmf-dist/tex/context/base/file-job.lua b/Master/texmf-dist/tex/context/base/file-job.lua
index 992e4b7ec70..ea4623d7724 100644
--- a/Master/texmf-dist/tex/context/base/file-job.lua
+++ b/Master/texmf-dist/tex/context/base/file-job.lua
@@ -9,8 +9,11 @@ if not modules then modules = { } end modules ['file-job'] = {
-- in retrospect dealing it's not that bad to deal with the nesting
-- and push/poppign at the tex end
-local format, gsub, match = string.format, string.gsub, string.match
+local gsub, match, find = string.gsub, string.match, string.find
local insert, remove, concat = table.insert, table.remove, table.concat
+local validstring = string.valid
+local sortedhash = table.sortedhash
+local formatters = string.formatters
local commands, resolvers, context = commands, resolvers, context
@@ -18,25 +21,40 @@ local trace_jobfiles = false trackers.register("system.jobfiles", function(v)
local report_jobfiles = logs.reporter("system","jobfiles")
-local texsetcount = tex.setcount
-local elements = interfaces.elements
-local variables = interfaces.variables
-local logsnewline = logs.newline
-local logspushtarget = logs.pushtarget
-local logspoptarget = logs.poptarget
-
-local v_outer = variables.outer
-local v_text = variables.text
-local v_project = variables.project
-local v_environment = variables.environment
-local v_product = variables.product
-local v_component = variables.component
-local c_prefix = variables.prefix
+local texsetcount = tex.setcount
+local elements = interfaces.elements
+local constants = interfaces.constants
+local variables = interfaces.variables
+local logsnewline = logs.newline
+local logspushtarget = logs.pushtarget
+local logspoptarget = logs.poptarget
+local settings_to_array = utilities.parsers.settings_to_array
+local allocate = utilities.storage.allocate
+
+local nameonly = file.nameonly
+local suffixonly = file.suffix
+local basename = file.basename
+local addsuffix = file.addsuffix
+local removesuffix = file.removesuffix
+local dirname = file.dirname
+local joinpath = file.join
+local is_qualified_path = file.is_qualified_path
+
+local cleanpath = resolvers.cleanpath
+local inputstack = resolvers.inputstack
+
+local v_outer = variables.outer
+local v_text = variables.text
+local v_project = variables.project
+local v_environment = variables.environment
+local v_product = variables.product
+local v_component = variables.component
+local c_prefix = variables.prefix
-- main code .. there is some overlap .. here we have loc://
local function findctxfile(name) -- loc ? any ?
- if file.is_qualified_path(name) then -- maybe when no suffix do some test for tex
+ if is_qualified_path(name) then -- maybe when no suffix do some test for tex
return name
elseif not url.hasscheme(name) then
return resolvers.finders.byscheme("loc",name) or ""
@@ -59,7 +77,7 @@ function commands.doifinputfileelse(name)
end
function commands.locatefilepath(name)
- context(file.dirname(findctxfile(name)))
+ context(dirname(findctxfile(name)))
end
function commands.usepath(paths)
@@ -74,39 +92,24 @@ function commands.allinputpaths()
context(concat(resolvers.instance.extra_paths or { },","))
end
+function commands.setdocumentfilenames()
+ environment.initializefilenames()
+end
+
function commands.usezipfile(name,tree)
if tree and tree ~= "" then
- resolvers.usezipfile(format("zip:///%s?tree=%s",name,tree))
+ resolvers.usezipfile(formatters["zip:///%s?tree=%s"](name,tree))
else
- resolvers.usezipfile(format("zip:///%s",name))
+ resolvers.usezipfile(formatters["zip:///%s"](name))
end
end
-local report_system = logs.reporter("system","options")
-local report_options = logs.reporter("used options")
-
-function commands.copyfiletolog(name)
- local f = io.open(name)
- if f then
- logspushtarget("logfile")
- logsnewline()
- report_system("start used options")
- logsnewline()
- for line in f:lines() do
- report_options(line)
- end
- logsnewline()
- report_system("stop used options")
- logsnewline()
- logspoptarget()
- f:close()
- end
-end
+local report_system = logs.reporter("system")
-- moved from tex to lua:
local texpatterns = { "%s.mkvi", "%s.mkiv", "%s.tex" }
-local luapatterns = { "%s.luc", "%s.lua" }
+local luapatterns = { "%s" .. utilities.lua.suffixes.luc, "%s.lua" }
local cldpatterns = { "%s.cld" }
local xmlpatterns = { "%s.xml" }
@@ -141,7 +144,7 @@ end
local function startprocessing(name,notext)
if not notext then
- -- report_system("begin file %s at line %s",name,status.linenumber or 0)
+ -- report_system("begin file %a at line %a",name,status.linenumber or 0)
context.dostarttextfile(name)
end
end
@@ -149,14 +152,14 @@ end
local function stopprocessing(notext)
if not notext then
context.dostoptextfile()
- -- report_system("end file %s at line %s",name,status.linenumber or 0)
+ -- report_system("end file %a at line %a",name,status.linenumber or 0)
end
end
--
local action = function(name,foundname) input(foundname) end
-local failure = function(name,foundname) end
+local failure = function(name,foundname) report_jobfiles("unknown %s file %a","tex",name) end
local function usetexfile(name,onlyonce,notext)
startprocessing(name,notext)
@@ -171,7 +174,7 @@ local function usetexfile(name,onlyonce,notext)
end
local action = function(name,foundname) dofile(foundname) end
-local failure = function(name,foundname) end
+local failure = function(name,foundname) report_jobfiles("unknown %s file %a","lua",name) end
local function useluafile(name,onlyonce,notext)
uselibrary {
@@ -184,7 +187,7 @@ local function useluafile(name,onlyonce,notext)
end
local action = function(name,foundname) dofile(foundname) end
-local failure = function(name,foundname) end
+local failure = function(name,foundname) report_jobfiles("unknown %s file %a","cld",name) end
local function usecldfile(name,onlyonce,notext)
startprocessing(name,notext)
@@ -199,7 +202,7 @@ local function usecldfile(name,onlyonce,notext)
end
local action = function(name,foundname) context.xmlprocess(foundname,"main","") end
-local failure = function(name,foundname) end
+local failure = function(name,foundname) report_jobfiles("unknown %s file %a","xml",name) end
local function usexmlfile(name,onlyonce,notext)
startprocessing(name,notext)
@@ -232,7 +235,7 @@ local suffixes = {
local function useanyfile(name,onlyonce)
local s = suffixes[file.suffix(name)]
if s then
- s(file.removesuffix(name),onlyonce)
+ s(removesuffix(name),onlyonce)
else
usetexfile(name,onlyonce) -- e.g. ctx file
--~ resolvers.readfilename(name)
@@ -244,14 +247,12 @@ commands.useanyfile = useanyfile
function resolvers.jobs.usefile(name,onlyonce,notext)
local s = suffixes[file.suffix(name)]
if s then
- s(file.removesuffix(name),onlyonce,notext)
+ s(removesuffix(name),onlyonce,notext)
end
end
-- document structure
-local report_system = logs.reporter("system")
-
local textlevel = 0 -- inaccessible for user, we need to define counter textlevel at the tex end
local function dummyfunction() end
@@ -361,8 +362,8 @@ local stacks = {
--
-local report_system = logs.reporter("system","structure")
-local report_structure = logs.reporter("used structure")
+local report_structures = logs.reporter("system","structure")
+local report_structure = logs.reporter("used structure")
local function pushtree(what,name)
local t = { }
@@ -387,20 +388,18 @@ local function log_tree(top,depth)
end
end
-local function logtree()
+luatex.registerstopactions(function()
logspushtarget("logfile")
logsnewline()
- report_system("start used structure")
+ report_structures("start used structure")
logsnewline()
root.name = environment.jobname
log_tree(root,"")
logsnewline()
- report_system("stop used structure")
+ report_structures("stop used structure")
logsnewline()
logspoptarget()
-end
-
-luatex.registerstopactions(logtree)
+end)
job.structure = job.structure or { }
job.structure.collected = job.structure.collected or { }
@@ -429,48 +428,52 @@ job.register('job.structure.collected',root,initialize)
-- component: small unit, either or not components itself
-- product : combination of components
+local context_processfilemany = context.processfilemany
+local context_processfileonce = context.processfileonce
+local context_processfilenone = context.processfilenone
+
local processors = utilities.storage.allocate {
-- [v_outer] = {
- -- [v_text] = { "many", context.processfilemany },
- -- [v_project] = { "once", context.processfileonce },
- -- [v_environment] = { "once", context.processfileonce },
- -- [v_product] = { "many", context.processfileonce },
- -- [v_component] = { "many", context.processfilemany },
+ -- [v_text] = { "many", context_processfilemany },
+ -- [v_project] = { "once", context_processfileonce },
+ -- [v_environment] = { "once", context_processfileonce },
+ -- [v_product] = { "once", context_processfileonce },
+ -- [v_component] = { "many", context_processfilemany },
-- },
[v_text] = {
- [v_text] = { "many", context.processfilemany },
- [v_project] = { "none", context.processfileonce }, -- none
- [v_environment] = { "once", context.processfileonce }, -- once
- [v_product] = { "none", context.processfileonce }, -- none
- [v_component] = { "many", context.processfilemany }, -- many
+ [v_text] = { "many", context_processfilemany },
+ [v_project] = { "once", context_processfileonce }, -- dubious
+ [v_environment] = { "once", context_processfileonce },
+ [v_product] = { "many", context_processfilemany }, -- dubious
+ [v_component] = { "many", context_processfilemany },
},
[v_project] = {
- [v_text] = { "many", context.processfilemany },
- [v_project] = { "none", context.processfilenone }, -- none
- [v_environment] = { "once", context.processfileonce }, -- once
- [v_product] = { "once", context.processfilenone }, -- once
- [v_component] = { "none", context.processfilenone }, -- many *
+ [v_text] = { "many", context_processfilemany },
+ [v_project] = { "none", context_processfilenone },
+ [v_environment] = { "once", context_processfileonce },
+ [v_product] = { "none", context_processfilenone },
+ [v_component] = { "none", context_processfilenone },
},
[v_environment] = {
- [v_text] = { "many", context.processfilemany },
- [v_project] = { "none", context.processfilenone }, -- none
- [v_environment] = { "once", context.processfileonce }, -- once
- [v_product] = { "none", context.processfilenone }, -- none
- [v_component] = { "none", context.processfilenone }, -- none
+ [v_text] = { "many", context_processfilemany },
+ [v_project] = { "none", context_processfilenone },
+ [v_environment] = { "once", context_processfileonce },
+ [v_product] = { "none", context_processfilenone },
+ [v_component] = { "none", context_processfilenone },
},
[v_product] = {
- [v_text] = { "many", context.processfilemany },
- [v_project] = { "once", context.processfileonce }, -- once
- [v_environment] = { "once", context.processfileonce }, -- once
- [v_product] = { "none", context.processfilemany }, -- none
- [v_component] = { "many", context.processfilemany }, -- many
+ [v_text] = { "many", context_processfilemany },
+ [v_project] = { "once", context_processfileonce },
+ [v_environment] = { "once", context_processfileonce },
+ [v_product] = { "many", context_processfilemany },
+ [v_component] = { "many", context_processfilemany },
},
[v_component] = {
- [v_text] = { "many", context.processfilemany },
- [v_project] = { "once", context.processfileonce }, -- once
- [v_environment] = { "once", context.processfileonce }, -- once
- [v_product] = { "none", context.processfilenone }, -- none
- [v_component] = { "many", context.processfilemany }, -- many
+ [v_text] = { "many", context_processfilemany },
+ [v_project] = { "once", context_processfileonce },
+ [v_environment] = { "once", context_processfileonce },
+ [v_product] = { "none", context_processfilenone },
+ [v_component] = { "many", context_processfilemany },
}
}
@@ -531,6 +534,9 @@ local tolerant = false -- too messy, mkii user with the wrong sructure should ad
local function process(what,name)
local depth = #typestack
local process
+ --
+ name = resolvers.resolve(name)
+ --
-- if not tolerant then
-- okay, would be best but not compatible with mkii
process = processors[currenttype][what]
@@ -553,11 +559,11 @@ local function process(what,name)
local method = process[1]
if method == "none" then
if trace_jobfiles then
- report_jobfiles("%s : %s : ignoring %s '%s' in %s '%s'",depth,method,what,name,currenttype,topofstack(currenttype))
+ report_jobfiles("%s : %s : %s %s %a in %s %a",depth,method,"ignoring",what,name,currenttype,topofstack(currenttype))
end
elseif method == "once" and done[name] then
if trace_jobfiles then
- report_jobfiles("%s : %s : skipping %s '%s' in %s '%s'",depth,method,what,name,currenttype,topofstack(currenttype))
+ report_jobfiles("%s : %s : %s %s %a in %s %a",depth,method,"skipping",what,name,currenttype,topofstack(currenttype))
end
else
-- keep in mind that we also handle "once" at the file level
@@ -566,7 +572,7 @@ local function process(what,name)
local before = start[what]
local after = stop [what]
if trace_jobfiles then
- report_jobfiles("%s : %s : processing %s '%s' in %s '%s'",depth,method,what,name,currenttype,topofstack(currenttype))
+ report_jobfiles("%s : %s : %s %s %a in %s %a",depth,method,"processing",what,name,currenttype,topofstack(currenttype))
end
if before then
before()
@@ -578,7 +584,7 @@ local function process(what,name)
end
else
if trace_jobfiles then
- report_jobfiles("%s : ? : ignoring %s '%s' in %s '%s'",depth,what,name,currenttype,topofstack(currenttype))
+ report_jobfiles("%s : %s : %s %s %a in %s %a",depth,"none","ignoring",what,name,currenttype,topofstack(currenttype))
end
end
end
@@ -610,7 +616,7 @@ local function gotonextlevel(what,name) -- todo: something with suffix name
insert(typestack,currenttype)
insert(pathstack,currentpath)
currenttype = what
- currentpath = file.dirname(name)
+ currentpath = dirname(name)
pushtree(what,name)
if start[what] then
start[what]()
@@ -629,10 +635,17 @@ local function gotopreviouslevel(what)
context.signalendofinput(what)
end
-function commands.startproject (name) gotonextlevel(v_project, name) end
-function commands.startproduct (name) gotonextlevel(v_product, name) end
-function commands.startcomponent (name) gotonextlevel(v_component, name) end
-function commands.startenvironment(name) gotonextlevel(v_environment,name) end
+local function autoname(name)
+ if name == "*" then
+ name = nameonly(inputstack[#inputstack] or name)
+ end
+ return name
+end
+
+function commands.startproject (name) gotonextlevel(v_project, autoname(name)) end
+function commands.startproduct (name) gotonextlevel(v_product, autoname(name)) end
+function commands.startcomponent (name) gotonextlevel(v_component, autoname(name)) end
+function commands.startenvironment(name) gotonextlevel(v_environment,autoname(name)) end
function commands.stopproject () gotopreviouslevel(v_project ) end
function commands.stopproduct () gotopreviouslevel(v_product ) end
@@ -664,7 +677,7 @@ local function convertexamodes(str)
local data = xml.text(e)
local mode = match(label,"^mode:(.+)$")
if mode then
- context.enablemode { format("%s:%s",mode,data) }
+ context.enablemode { formatters["%s:%s"](mode,data) }
end
context.setvariable("exa:variables",label,(gsub(data,"([{}])","\\%1")))
end
@@ -673,13 +686,311 @@ end
function commands.loadexamodes(filename)
if not filename or filename == "" then
- filename = file.removesuffix(tex.jobname)
+ filename = removesuffix(tex.jobname)
end
- filename = resolvers.findfile(file.addsuffix(filename,'ctm')) or ""
+ filename = resolvers.findfile(addsuffix(filename,'ctm')) or ""
if filename ~= "" then
- report_examodes("loading %s",filename) -- todo: message system
+ report_examodes("loading %a",filename) -- todo: message system
convertexamodes(io.loaddata(filename))
else
- report_examodes("no mode file %s",filename) -- todo: message system
+ report_examodes("no mode file %a",filename) -- todo: message system
+ end
+end
+
+-- changed in mtx-context
+-- code moved from luat-ini
+
+-- todo: locals when mtx-context is changed
+
+document = document or {
+ arguments = allocate(),
+ files = allocate(),
+ variables = allocate(), -- for templates
+ options = {
+ commandline = {
+ environments = allocate(),
+ modules = allocate(),
+ modes = allocate(),
+ },
+ ctxfile = {
+ environments = allocate(),
+ modules = allocate(),
+ modes = allocate(),
+ },
+ },
+}
+
+function document.setargument(key,value)
+ document.arguments[key] = value
+end
+
+function document.setdefaultargument(key,default)
+ local v = document.arguments[key]
+ if v == nil or v == "" then
+ document.arguments[key] = default
+ end
+end
+
+function document.setfilename(i,name)
+ if name then
+ document.files[tonumber(i)] = name
+ else
+ document.files[#document.files+1] = tostring(i)
+ end
+end
+
+function document.getargument(key,default) -- commands
+ local v = document.arguments[key]
+ if type(v) == "boolean" then
+ v = (v and "yes") or "no"
+ document.arguments[key] = v
+ end
+ context(v or default or "")
+end
+
+function document.getfilename(i) -- commands
+ context(document.files[tonumber(i)] or "")
+end
+
+function commands.getcommandline() -- has to happen at the tex end in order to expand
+
+ -- the document[arguments|files] tables are copies
+
+ local arguments = document.arguments
+ local files = document.files
+ local options = document.options
+
+ for k, v in next, environment.arguments do
+ k = gsub(k,"^c:","") -- already done, but better be safe than sorry
+ if arguments[k] == nil then
+ arguments[k] = v
+ end
+ end
+
+ -- in the new mtx=context approach we always pass a stub file so we need to
+ -- to trick the files table which actually only has one entry in a tex job
+
+ if arguments.timing then
+ context.usemodule("timing")
+ end
+
+ if arguments.batchmode then
+ context.batchmode(false)
+ end
+
+ if arguments.nonstopmode then
+ context.nonstopmode(false)
+ end
+
+ if arguments.nostatistics then
+ directives.enable("system.nostatistics")
+ end
+
+ if arguments.paranoid then
+ context.setvalue("maxreadlevel",1)
+ end
+
+ if validstring(arguments.path) then
+ context.usepath { arguments.path }
+ end
+
+ local inputfile = validstring(arguments.input)
+
+ if inputfile and dirname(inputfile) == "." and lfs.isfile(inputfile) then
+ -- nicer in checks
+ inputfile = basename(inputfile)
+ end
+
+ local kindofrun = arguments.kindofrun
+ local currentrun = arguments.maxnofruns
+ local maxnofruns = arguments.currentrun
+
+ context.setupsystem {
+ [constants.directory] = validstring(arguments.setuppath),
+ [constants.inputfile] = inputfile,
+ [constants.file] = validstring(arguments.result),
+ [constants.random] = validstring(arguments.randomseed),
+ -- old:
+ [constants.n] = validstring(kindofrun),
+ [constants.m] = validstring(currentrun),
+ }
+
+ environment.kindofrun = tonumber(kindofrun) or 0
+ environment.maxnofruns = tonumber(maxnofruns) or 0
+ environment.currentrun = tonumber(currentrun) or 0
+
+ if validstring(arguments.arguments) then
+ context.setupenv { arguments.arguments }
+ end
+
+ if arguments.once then
+ directives.enable("system.runonce")
+ end
+
+ if arguments.noarrange then
+ context.setuparranging { variables.disable }
+ end
+
+ --
+
+ local commandline = options.commandline
+
+ commandline.environments = table.append(commandline.environments,settings_to_array(validstring(arguments.environment)))
+ commandline.modules = table.append(commandline.modules, settings_to_array(validstring(arguments.usemodule)))
+ commandline.modes = table.append(commandline.modes, settings_to_array(validstring(arguments.mode)))
+
+ --
+
+ if #files == 0 then
+ local list = settings_to_array(validstring(arguments.files))
+ if list and #list > 0 then
+ files = list
+ end
+ end
+
+ if #files == 0 then
+ files = { validstring(arguments.input) }
+ end
+
+ --
+
+ document.arguments = arguments
+ document.files = files
+
+end
+
+-- commandline wins over ctxfile
+
+local function apply(list,action)
+ if list then
+ for i=1,#list do
+ action { list[i] }
+ end
+ end
+end
+
+function commands.setdocumentmodes() -- was setup: *runtime:modes
+ apply(document.options.ctxfile .modes,context.enablemode)
+ apply(document.options.commandline.modes,context.enablemode)
+end
+
+function commands.setdocumentmodules() -- was setup: *runtime:modules
+ apply(document.options.ctxfile .modules,context.usemodule)
+ apply(document.options.commandline.modules,context.usemodule)
+end
+
+function commands.setdocumentenvironments() -- was setup: *runtime:environments
+ apply(document.options.ctxfile .environments,context.environment)
+ apply(document.options.commandline.environments,context.environment)
+end
+
+local report_files = logs.reporter("system","files")
+local report_options = logs.reporter("system","options")
+local report_file = logs.reporter("used file")
+local report_option = logs.reporter("used option")
+
+luatex.registerstopactions(function()
+ local foundintrees = resolvers.instance.foundintrees
+ if #foundintrees > 0 then
+ logspushtarget("logfile")
+ logsnewline()
+ report_files("start used files")
+ logsnewline()
+ for i=1,#foundintrees do
+ report_file("%4i: % T",i,foundintrees[i])
+ end
+ logsnewline()
+ report_files("stop used files")
+ logsnewline()
+ logspoptarget()
+ end
+end)
+
+luatex.registerstopactions(function()
+ local files = document.files -- or environment.files
+ local arguments = document.arguments -- or environment.arguments
+ --
+ logspushtarget("logfile")
+ logsnewline()
+ report_options("start commandline options")
+ logsnewline()
+ for argument, value in sortedhash(arguments) do
+ report_option("%s=%A",argument,value)
+ end
+ logsnewline()
+ report_options("stop commandline options")
+ logsnewline()
+ report_options("start commandline files")
+ logsnewline()
+ for i=1,#files do
+ report_file("% 4i: %s",i,files[i])
+ end
+ logsnewline()
+ report_options("stop commandline files")
+ logsnewline()
+ logspoptarget()
+end)
+
+if environment.initex then
+
+ local report_storage = logs.reporter("system","storage")
+ local report_table = logs.reporter("stored table")
+ local report_module = logs.reporter("stored module")
+ local report_attribute = logs.reporter("stored attribute")
+ local report_catcodetable = logs.reporter("stored catcodetable")
+ local report_corenamespace = logs.reporter("stored corenamespace")
+
+ luatex.registerstopactions(function()
+ logspushtarget("logfile")
+ logsnewline()
+ report_storage("start stored tables")
+ logsnewline()
+ for k,v in sortedhash(storage.data) do
+ report_table("%03i %s",k,v[1])
+ end
+ logsnewline()
+ report_storage("stop stored tables")
+ logsnewline()
+ report_storage("start stored modules")
+ logsnewline()
+ for k,v in sortedhash(lua.bytedata) do
+ report_module("%03i %s %s",k,v[2],v[1])
+ end
+ logsnewline()
+ report_storage("stop stored modules")
+ logsnewline()
+ report_storage("start stored attributes")
+ logsnewline()
+ for k,v in sortedhash(attributes.names) do
+ report_attribute("%03i %s",k,v)
+ end
+ logsnewline()
+ report_storage("stop stored attributes")
+ logsnewline()
+ report_storage("start stored catcodetables")
+ logsnewline()
+ for k,v in sortedhash(catcodes.names) do
+ report_catcodetable("%03i % t",k,v)
+ end
+ logsnewline()
+ report_storage("stop stored catcodetables")
+ logsnewline()
+ report_storage("start stored corenamespaces")
+ for k,v in sortedhash(interfaces.corenamespaces) do
+ report_corenamespace("%03i %s",k,v)
+ end
+ logsnewline()
+ report_storage("stop stored corenamespaces")
+ logsnewline()
+ logspoptarget()
+ end)
+
+end
+
+function commands.doifelsecontinuewithfile(inpname)
+ local continue = addsuffix(inpname,"tex") == addsuffix(environment.inputfilename,"tex")
+ if continue then
+ report_system("continuing input file %a",inpname)
end
+ commands.doifelse(continue)
end
diff --git a/Master/texmf-dist/tex/context/base/file-job.mkvi b/Master/texmf-dist/tex/context/base/file-job.mkvi
index 112400cbd7f..3434e06f00e 100644
--- a/Master/texmf-dist/tex/context/base/file-job.mkvi
+++ b/Master/texmf-dist/tex/context/base/file-job.mkvi
@@ -75,15 +75,18 @@
\def\syst_files_load#name% only mkiv files
{\readsysfile{#name.\mksuffix}{\showmessage\m!system2{#name.\mksuffix}}\donothing}
-\unexpanded\def\loadoptionfile
- {\readjobfile{\jobname.\f!optionextension}
- {\writestatus\m!system{\jobname.\f!optionextension\space loaded}%
- \ctxcommand{copyfiletolog("\jobname.\f!optionextension")}}%
- {\writestatus\m!system{no \jobname.\f!optionextension}}}
+% obsolete, but we keep it as reference of what happened
+%
+% \unexpanded\def\loadoptionfile
+% {\readjobfile{\jobname.\f!optionextension}
+% {\writestatus\m!system{\jobname.\f!optionextension\space loaded}%
+% \ctxcommand{copyfiletolog("\jobname.\f!optionextension")}}%
+% {\writestatus\m!system{no \jobname.\f!optionextension}}}
% document structure
-\ifdefined\textlevel\else \newcount\textlevel \fi % might go away
+\ifdefined\textlevel \else \newcount\textlevel \fi % might go away
+\ifdefined\strc_pagenumbers_flush_final_page \else \let\strc_pagenumbers_flush_final_page\relax \fi % ugly
\unexpanded\def\dostarttext
{\glet\dostarttext\relax
@@ -92,8 +95,8 @@
\unexpanded\def\dostoptext
{\glet\dostoptext\relax
- \flushfinallayoutpage % optional
- \page % anyway
+ \strc_pagenumbers_flush_final_page
+ \page % hm, bonus
\the\everystoptext
\global\everystoptext\emptytoks
\the\everybye
@@ -110,6 +113,8 @@
\unexpanded\def\autostarttext{\ctxcommand{autostarttext()}}
\unexpanded\def\autostoptext {\ctxcommand{autostoptext()}}
+\unexpanded\def\finishjob{\stoptext} % nicer in luatex call commandline
+
\newtoks\everystartnotext
\newtoks\everystopnotext
@@ -190,8 +195,7 @@
%D Handy for modules that have a test/demo appended.
-\def\continueifinputfile#name{\doifnot\inputfilename{#name}\endinput}
-%def\processifinputfile #name{\doif \inputfilename{#name}}
+\unexpanded\def\continueifinputfile#name{\ctxcommand{doifelsecontinuewithfile("#name")}\relax\endinput} % we cannot do \endinput via lua
% \startproject test
% 1: \startmode[*project] project \stopmode \endgraf
@@ -290,6 +294,8 @@
{\setvariables[\s!document][#settings]%
\the\everysetupdocument\relax}
+% metadata:author metadata:title metadata:subject
+
\setvariables
[document]
[\c!before=\directsetup{\s!document:start},
diff --git a/Master/texmf-dist/tex/context/base/file-lib.lua b/Master/texmf-dist/tex/context/base/file-lib.lua
index 92dacbf5cc6..3311321c521 100644
--- a/Master/texmf-dist/tex/context/base/file-lib.lua
+++ b/Master/texmf-dist/tex/context/base/file-lib.lua
@@ -18,11 +18,11 @@ local loaded = { }
local defaultpatterns = { "%s" }
local function defaultaction(name,foundname)
- report_files("asked name: '%s', found name: '%s'",name,foundname)
+ report_files("asked name %a, found name %a",name,foundname)
end
local function defaultfailure(name)
- report_files("asked name: '%s', not found",name)
+ report_files("asked name %a, not found",name)
end
function commands.uselibrary(specification) -- todo; reporter
@@ -33,6 +33,7 @@ function commands.uselibrary(specification) -- todo; reporter
local failure = specification.failure or defaultfailure
local onlyonce = specification.onlyonce
local files = utilities.parsers.settings_to_array(name)
+ local truename = environment.truefilename
local done = false
for i=1,#files do
local filename = files[i]
@@ -42,9 +43,9 @@ function commands.uselibrary(specification) -- todo; reporter
end
for i=1,#patterns do
local somename = format(patterns[i],filename)
-if environment.truefilename then
- somename = environment.truefilename(somename)
-end
+ if truename then
+ somename = truename(somename)
+ end
local foundname = resolvers.getreadfilename("any",".",somename) or ""
if foundname ~= "" then
action(name,foundname)
diff --git a/Master/texmf-dist/tex/context/base/file-mod.lua b/Master/texmf-dist/tex/context/base/file-mod.lua
index e6592f0f062..3659d308962 100644
--- a/Master/texmf-dist/tex/context/base/file-mod.lua
+++ b/Master/texmf-dist/tex/context/base/file-mod.lua
@@ -20,14 +20,15 @@ at the <l n='tex'/> side.</p>
local format, concat, tonumber = string.format, table.concat, tonumber
-local trace_modules = false trackers.register("modules.loading", function(v) trace_modules = v end)
+local trace_modules = false trackers.register("modules.loading", function(v) trace_modules = v end)
local report_modules = logs.reporter("resolvers","modules")
-commands = commands or { }
-local commands = commands
+commands = commands or { }
+local commands = commands
-local findbyscheme = resolvers.finders.byscheme -- use different one
+local findbyscheme = resolvers.finders.byscheme -- use different one
+local iterator = utilities.parsers.iterator
-- modules can have a specific suffix or can specify one
@@ -42,19 +43,19 @@ local function usemodule(name,hasscheme)
-- so we only add one if missing
local fullname = file.addsuffix(name,"tex")
if trace_modules then
- report_modules("checking url: '%s'",fullname)
+ report_modules("checking url %a",fullname)
end
foundname = resolvers.findtexfile(fullname) or ""
- elseif file.extname(name) ~= "" then
+ elseif file.suffix(name) ~= "" then
if trace_modules then
- report_modules("checking file: '%s'",name)
+ report_modules("checking file %a",name)
end
foundname = findbyscheme("any",name) or ""
else
for i=1,#suffixes do
local fullname = file.addsuffix(name,suffixes[i])
if trace_modules then
- report_modules("checking file: '%s'",fullname)
+ report_modules("checking file %a",fullname)
end
foundname = findbyscheme("any",fullname) or ""
if foundname ~= "" then
@@ -64,7 +65,7 @@ local function usemodule(name,hasscheme)
end
if foundname ~= "" then
if trace_modules then
- report_modules("loading: '%s'",foundname)
+ report_modules("loading file %a",foundname)
end
context.startreadingfile()
resolvers.jobs.usefile(foundname,true) -- once, notext
@@ -87,7 +88,7 @@ function commands.usemodules(prefix,askedname,truename)
status = status + 1
else
if trace_modules then
- report_modules("locating: prefix: '%s', askedname: '%s', truename: '%s'",prefix or "", askedname or "", truename or "")
+ report_modules("locating, prefix %a, askedname %a, truename %a",prefix,askedname,truename)
end
local hasscheme = url.hasscheme(truename)
if hasscheme then
@@ -122,11 +123,11 @@ function commands.usemodules(prefix,askedname,truename)
end
end
if status == 0 then
- report_modules("not found: '%s'",askedname)
+ report_modules("%a is not found",askedname)
elseif status == 1 then
- report_modules("loaded: '%s'",trace_modules and truename or askedname)
+ report_modules("%a is loaded",trace_modules and truename or askedname)
else
- report_modules("already loaded: '%s'",trace_modules and truename or askedname)
+ report_modules("%a is already loaded",trace_modules and truename or askedname)
end
modstatus[hashname] = status
end
@@ -144,9 +145,13 @@ statistics.register("loaded tex modules", function()
t[nt] = k
end
end
- local ts = (nt>0 and format(" (%s)",concat(t," "))) or ""
- local fs = (nf>0 and format(" (%s)",concat(f," "))) or ""
- return format("%s requested, %s found%s, %s missing%s",nt+nf,nt,ts,nf,fs)
+ if nf == 0 then
+ return format("%s requested, all found (%s)",nt,concat(t," "))
+ elseif nt == 0 then
+ return format("%s requested, all missing (%s)",nf,concat(f," "))
+ else
+ return format("%s requested, %s found (%s), %s missing (%s)",nt+nf,nt,concat(t," "),nf,concat(f," "))
+ end
else
return nil
end
@@ -166,5 +171,11 @@ function commands.doifolderversionelse(one,two) -- one >= two
two = lpeg.match(splitter,two)
one = (one[1] or 0) * 10000 + (one[2] or 0) * 100 + (one[3] or 0)
two = (two[1] or 0) * 10000 + (two[2] or 0) * 100 + (two[3] or 0)
- commands.testcase(one>=two)
+ commands.doifelse(one>=two)
+end
+
+function commands.useluamodule(list)
+ for filename in iterator(list) do
+ environment.loadluafile(filename)
+ end
end
diff --git a/Master/texmf-dist/tex/context/base/file-mod.mkvi b/Master/texmf-dist/tex/context/base/file-mod.mkvi
index 33d1004029c..92b8a895940 100644
--- a/Master/texmf-dist/tex/context/base/file-mod.mkvi
+++ b/Master/texmf-dist/tex/context/base/file-mod.mkvi
@@ -31,10 +31,11 @@
%D their dedicated manuals. We use \type {\next} so that we
%D can \type {\end} in modules.
-\def\usemodules
+\unexpanded\def\usemodules
{\dotripleempty\strc_modules_use}
-\let\usemodule\usemodules
+\let\usemodule \usemodules
+\let\usetexmodule\usemodules
% \def\strc_modules_use[#category][#name][#parameters]%
% {\pushmacro\currentmodule
@@ -247,4 +248,8 @@
% \setupexternalresources
% [url=]
+% new:
+
+\unexpanded\def\useluamodule[#1]{\ctxcommand{useluamodule("#1")}}
+
\protect \endinput
diff --git a/Master/texmf-dist/tex/context/base/file-res.lua b/Master/texmf-dist/tex/context/base/file-res.lua
index 4687e140e4e..8e65ba4c7d1 100644
--- a/Master/texmf-dist/tex/context/base/file-res.lua
+++ b/Master/texmf-dist/tex/context/base/file-res.lua
@@ -1,4 +1,4 @@
-if not modules then modules = { } end modules ['supp-fil'] = {
+if not modules then modules = { } end modules ['file-res'] = {
version = 1.001,
comment = "companion to supp-fil.mkiv",
author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
@@ -8,6 +8,8 @@ if not modules then modules = { } end modules ['supp-fil'] = {
local format = string.format
local isfile = lfs.isfile
+local is_qualified_path = file.is_qualified_path
+local hasscheme = url.hasscheme
local trace_files = false trackers.register("resolvers.readfile", function(v) trace_files = v end)
local report_files = logs.reporter("files","readfile")
@@ -35,7 +37,7 @@ local function readfilename(specification,backtrack,treetoo)
names[i] = name .. "." .. defaultsuffixes[i]
end
if trace_files then
- report_files("locating: %s, using default suffixes: %s",name,table.concat(defaultsuffixes," "))
+ report_files("locating: %s, using default suffixes: %a",name,defaultsuffixes)
end
end
for i=1,#names do
@@ -76,9 +78,9 @@ local function readfilename(specification,backtrack,treetoo)
local fname = paths[i] .. "/" .. names[i]
if isfile(fname) then
if trace_files then
- report_files("found on extra path: %s",name)
+ report_files("found on extra path: %s",fname)
end
- fnd = name
+ fnd = fname
break
end
end
@@ -127,7 +129,7 @@ openers.any = openers.file loaders.any = loaders.file
function getreadfilename(scheme,path,name) -- better do a split and then pass table
local fullname
- if url.hasscheme(name) then
+ if hasscheme(name) or is_qualified_path(name) then
fullname = name
else
fullname = ((path == "") and format("%s:///%s",scheme,name)) or format("%s:///%s/%s",scheme,path,name)
diff --git a/Master/texmf-dist/tex/context/base/file-syn.lua b/Master/texmf-dist/tex/context/base/file-syn.lua
index 1be9fb4b12f..8d913bb3769 100644
--- a/Master/texmf-dist/tex/context/base/file-syn.lua
+++ b/Master/texmf-dist/tex/context/base/file-syn.lua
@@ -11,6 +11,9 @@ local report_files = logs.reporter("files")
environment.filesynonyms = environment.filesynonyms or { }
local filesynonyms = environment.filesynonyms
+local settings_to_array = utilities.parsers.settings_to_array
+local findfile = resolvers.findfile
+
storage.register("environment/filesynonyms", filesynonyms, "environment.filesynonyms")
local function truefilename(name)
@@ -37,9 +40,9 @@ function commands.definefilesynonym(name,realname)
end
function commands.definefilefallback(name,alternatives)
- local names = utilities.parser.settings_to_array(alternatives)
+ local names = settings_to_array(alternatives)
for i=1,#names do
- local realname = resolvers.findfile(names[i])
+ local realname = findfile(names[i])
if realname ~= "" then
filesynonyms[name] = realname
break
diff --git a/Master/texmf-dist/tex/context/base/font-afk.lua b/Master/texmf-dist/tex/context/base/font-afk.lua
new file mode 100644
index 00000000000..8b65b063184
--- /dev/null
+++ b/Master/texmf-dist/tex/context/base/font-afk.lua
@@ -0,0 +1,200 @@
+if not modules then modules = { } end modules ['font-afk'] = {
+ version = 1.001,
+ comment = "companion to font-afm.lua",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files",
+ dataonly = true,
+}
+
+--[[ldx--
+<p>For ligatures, only characters with a code smaller than 128 make sense,
+anything larger is encoding dependent. An interesting complication is that a
+character can be in an encoding twice but is hashed once.</p>
+--ldx]]--
+
+local allocate = utilities.storage.allocate
+
+fonts.handlers.afm.helpdata = {
+ ligatures = allocate { -- okay, nowadays we could parse the name but type 1 fonts
+ ['f'] = { -- don't have that many ligatures anyway
+ { 'f', 'ff' },
+ { 'i', 'fi' },
+ { 'l', 'fl' },
+ },
+ ['ff'] = {
+ { 'i', 'ffi' }
+ },
+ ['fi'] = {
+ { 'i', 'fii' }
+ },
+ ['fl'] = {
+ { 'i', 'fli' }
+ },
+ ['s'] = {
+ { 't', 'st' }
+ },
+ ['i'] = {
+ { 'j', 'ij' }
+ },
+ },
+ texligatures = allocate {
+ -- ['space'] = {
+ -- { 'L', 'Lslash' },
+ -- { 'l', 'lslash' }
+ -- },
+ -- ['question'] = {
+ -- { 'quoteleft', 'questiondown' }
+ -- },
+ -- ['exclam'] = {
+ -- { 'quoteleft', 'exclamdown' }
+ -- },
+ ['quoteleft'] = {
+ { 'quoteleft', 'quotedblleft' }
+ },
+ ['quoteright'] = {
+ { 'quoteright', 'quotedblright' }
+ },
+ ['hyphen'] = {
+ { 'hyphen', 'endash' }
+ },
+ ['endash'] = {
+ { 'hyphen', 'emdash' }
+ }
+ },
+ leftkerned = allocate {
+ AEligature = "A", aeligature = "a",
+ OEligature = "O", oeligature = "o",
+ IJligature = "I", ijligature = "i",
+ AE = "A", ae = "a",
+ OE = "O", oe = "o",
+ IJ = "I", ij = "i",
+ Ssharp = "S", ssharp = "s",
+ },
+ rightkerned = allocate {
+ AEligature = "E", aeligature = "e",
+ OEligature = "E", oeligature = "e",
+ IJligature = "J", ijligature = "j",
+ AE = "E", ae = "e",
+ OE = "E", oe = "e",
+ IJ = "J", ij = "j",
+ Ssharp = "S", ssharp = "s",
+ },
+ bothkerned = allocate {
+ Acircumflex = "A", acircumflex = "a",
+ Ccircumflex = "C", ccircumflex = "c",
+ Ecircumflex = "E", ecircumflex = "e",
+ Gcircumflex = "G", gcircumflex = "g",
+ Hcircumflex = "H", hcircumflex = "h",
+ Icircumflex = "I", icircumflex = "i",
+ Jcircumflex = "J", jcircumflex = "j",
+ Ocircumflex = "O", ocircumflex = "o",
+ Scircumflex = "S", scircumflex = "s",
+ Ucircumflex = "U", ucircumflex = "u",
+ Wcircumflex = "W", wcircumflex = "w",
+ Ycircumflex = "Y", ycircumflex = "y",
+
+ Agrave = "A", agrave = "a",
+ Egrave = "E", egrave = "e",
+ Igrave = "I", igrave = "i",
+ Ograve = "O", ograve = "o",
+ Ugrave = "U", ugrave = "u",
+ Ygrave = "Y", ygrave = "y",
+
+ Atilde = "A", atilde = "a",
+ Itilde = "I", itilde = "i",
+ Otilde = "O", otilde = "o",
+ Utilde = "U", utilde = "u",
+ Ntilde = "N", ntilde = "n",
+
+ Adiaeresis = "A", adiaeresis = "a", Adieresis = "A", adieresis = "a",
+ Ediaeresis = "E", ediaeresis = "e", Edieresis = "E", edieresis = "e",
+ Idiaeresis = "I", idiaeresis = "i", Idieresis = "I", idieresis = "i",
+ Odiaeresis = "O", odiaeresis = "o", Odieresis = "O", odieresis = "o",
+ Udiaeresis = "U", udiaeresis = "u", Udieresis = "U", udieresis = "u",
+ Ydiaeresis = "Y", ydiaeresis = "y", Ydieresis = "Y", ydieresis = "y",
+
+ Aacute = "A", aacute = "a",
+ Cacute = "C", cacute = "c",
+ Eacute = "E", eacute = "e",
+ Iacute = "I", iacute = "i",
+ Lacute = "L", lacute = "l",
+ Nacute = "N", nacute = "n",
+ Oacute = "O", oacute = "o",
+ Racute = "R", racute = "r",
+ Sacute = "S", sacute = "s",
+ Uacute = "U", uacute = "u",
+ Yacute = "Y", yacute = "y",
+ Zacute = "Z", zacute = "z",
+
+ Dstroke = "D", dstroke = "d",
+ Hstroke = "H", hstroke = "h",
+ Tstroke = "T", tstroke = "t",
+
+ Cdotaccent = "C", cdotaccent = "c",
+ Edotaccent = "E", edotaccent = "e",
+ Gdotaccent = "G", gdotaccent = "g",
+ Idotaccent = "I", idotaccent = "i",
+ Zdotaccent = "Z", zdotaccent = "z",
+
+ Amacron = "A", amacron = "a",
+ Emacron = "E", emacron = "e",
+ Imacron = "I", imacron = "i",
+ Omacron = "O", omacron = "o",
+ Umacron = "U", umacron = "u",
+
+ Ccedilla = "C", ccedilla = "c",
+ Kcedilla = "K", kcedilla = "k",
+ Lcedilla = "L", lcedilla = "l",
+ Ncedilla = "N", ncedilla = "n",
+ Rcedilla = "R", rcedilla = "r",
+ Scedilla = "S", scedilla = "s",
+ Tcedilla = "T", tcedilla = "t",
+
+ Ohungarumlaut = "O", ohungarumlaut = "o",
+ Uhungarumlaut = "U", uhungarumlaut = "u",
+
+ Aogonek = "A", aogonek = "a",
+ Eogonek = "E", eogonek = "e",
+ Iogonek = "I", iogonek = "i",
+ Uogonek = "U", uogonek = "u",
+
+ Aring = "A", aring = "a",
+ Uring = "U", uring = "u",
+
+ Abreve = "A", abreve = "a",
+ Ebreve = "E", ebreve = "e",
+ Gbreve = "G", gbreve = "g",
+ Ibreve = "I", ibreve = "i",
+ Obreve = "O", obreve = "o",
+ Ubreve = "U", ubreve = "u",
+
+ Ccaron = "C", ccaron = "c",
+ Dcaron = "D", dcaron = "d",
+ Ecaron = "E", ecaron = "e",
+ Lcaron = "L", lcaron = "l",
+ Ncaron = "N", ncaron = "n",
+ Rcaron = "R", rcaron = "r",
+ Scaron = "S", scaron = "s",
+ Tcaron = "T", tcaron = "t",
+ Zcaron = "Z", zcaron = "z",
+
+ dotlessI = "I", dotlessi = "i",
+ dotlessJ = "J", dotlessj = "j",
+
+ AEligature = "AE", aeligature = "ae", AE = "AE", ae = "ae",
+ OEligature = "OE", oeligature = "oe", OE = "OE", oe = "oe",
+ IJligature = "IJ", ijligature = "ij", IJ = "IJ", ij = "ij",
+
+ Lstroke = "L", lstroke = "l", Lslash = "L", lslash = "l",
+ Ostroke = "O", ostroke = "o", Oslash = "O", oslash = "o",
+
+ Ssharp = "SS", ssharp = "ss",
+
+ Aumlaut = "A", aumlaut = "a",
+ Eumlaut = "E", eumlaut = "e",
+ Iumlaut = "I", iumlaut = "i",
+ Oumlaut = "O", oumlaut = "o",
+ Uumlaut = "U", uumlaut = "u",
+ }
+}
diff --git a/Master/texmf-dist/tex/context/base/font-afm.lua b/Master/texmf-dist/tex/context/base/font-afm.lua
index 9d3d8ddd759..cb0c2438fe9 100644
--- a/Master/texmf-dist/tex/context/base/font-afm.lua
+++ b/Master/texmf-dist/tex/context/base/font-afm.lua
@@ -17,12 +17,7 @@ where we handles font encodings. Eventually font encoding goes
away.</p>
--ldx]]--
-local trace_features = false trackers.register("afm.features", function(v) trace_features = v end)
-local trace_indexing = false trackers.register("afm.indexing", function(v) trace_indexing = v end)
-local trace_loading = false trackers.register("afm.loading", function(v) trace_loading = v end)
-local trace_defining = false trackers.register("fonts.defining", function(v) trace_defining = v end)
-
-local report_afm = logs.reporter("fonts","afm loading")
+local fonts, logs, trackers, containers, resolvers = fonts, logs, trackers, containers, resolvers
local next, type, tonumber = next, type, tonumber
local format, match, gmatch, lower, gsub, strip = string.format, string.match, string.gmatch, string.lower, string.gsub, string.strip
@@ -30,30 +25,35 @@ local abs = math.abs
local P, S, C, R, lpegmatch, patterns = lpeg.P, lpeg.S, lpeg.C, lpeg.R, lpeg.match, lpeg.patterns
local derivetable = table.derive
-local fonts = fonts
-local afm = { }
-local pfb = { }
-fonts.handlers.afm = afm
-fonts.handlers.pfb = pfb
+local trace_features = false trackers.register("afm.features", function(v) trace_features = v end)
+local trace_indexing = false trackers.register("afm.indexing", function(v) trace_indexing = v end)
+local trace_loading = false trackers.register("afm.loading", function(v) trace_loading = v end)
+local trace_defining = false trackers.register("fonts.defining", function(v) trace_defining = v end)
-afm.version = 1.410 -- incrementing this number one up will force a re-cache
-afm.cache = containers.define("fonts", "afm", afm.version, true)
-afm.autoprefixed = true -- this will become false some day (catches texnansi-blabla.*)
+local report_afm = logs.reporter("fonts","afm loading")
-afm.syncspace = true -- when true, nicer stretch values
-afm.addligatures = true -- best leave this set to true
-afm.addtexligatures = true -- best leave this set to true
-afm.addkerns = true -- best leave this set to true
+local findbinfile = resolvers.findbinfile
local definers = fonts.definers
local readers = fonts.readers
local constructors = fonts.constructors
-local findbinfile = resolvers.findbinfile
+local afm = constructors.newhandler("afm")
+local pfb = constructors.newhandler("pfb")
local afmfeatures = constructors.newfeatures("afm")
local registerafmfeature = afmfeatures.register
+afm.version = 1.410 -- incrementing this number one up will force a re-cache
+afm.cache = containers.define("fonts", "afm", afm.version, true)
+afm.autoprefixed = true -- this will become false some day (catches texnansi-blabla.*)
+
+afm.helpdata = { } -- set later on so no local for this
+afm.syncspace = true -- when true, nicer stretch values
+afm.addligatures = true -- best leave this set to true
+afm.addtexligatures = true -- best leave this set to true
+afm.addkerns = true -- best leave this set to true
+
local function setmode(tfmdata,value)
if value then
tfmdata.properties.mode = lower(value)
@@ -216,7 +216,7 @@ local function get_indexes(data,pfbname)
local glyphs = pfbdata.glyphs
if glyphs then
if trace_loading then
- report_afm("getting index data from %s",pfbname)
+ report_afm("getting index data from %a",pfbname)
end
for index, glyph in next, glyphs do
local name = glyph.name
@@ -224,21 +224,21 @@ local function get_indexes(data,pfbname)
local char = characters[name]
if char then
if trace_indexing then
- report_afm("glyph %s has index %s",name,index)
+ report_afm("glyph %a has index %a",name,index)
end
char.index = index
end
end
end
elseif trace_loading then
- report_afm("no glyph data in pfb file %s",pfbname)
+ report_afm("no glyph data in pfb file %a",pfbname)
end
elseif trace_loading then
- report_afm("no data in pfb file %s",pfbname)
+ report_afm("no data in pfb file %a",pfbname)
end
fontloader.close(pfbblob)
elseif trace_loading then
- report_afm("invalid pfb file %s",pfbname)
+ report_afm("invalid pfb file %a",pfbname)
end
end
@@ -292,7 +292,7 @@ local function readafm(filename)
return data
else
if trace_loading then
- report_afm("no valid afm file %s",filename)
+ report_afm("no valid afm file %a",filename)
end
return nil
end
@@ -327,36 +327,36 @@ function afm.load(filename)
pfbtime = attr.modification or 0
end
if not data or data.size ~= size or data.time ~= time or data.pfbsize ~= pfbsize or data.pfbtime ~= pfbtime then
- report_afm( "reading %s",filename)
+ report_afm("reading %a",filename)
data = readafm(filename)
if data then
if pfbname ~= "" then
get_indexes(data,pfbname)
elseif trace_loading then
- report_afm("no pfb file for %s",filename)
+ report_afm("no pfb file for %a",filename)
end
- report_afm( "unifying %s",filename)
+ report_afm("unifying %a",filename)
unify(data,filename)
if afm.addligatures then
- report_afm( "add ligatures")
+ report_afm("add ligatures")
addligatures(data)
end
if afm.addtexligatures then
- report_afm( "add tex ligatures")
+ report_afm("add tex ligatures")
addtexligatures(data)
end
if afm.addkerns then
- report_afm( "add extra kerns")
+ report_afm("add extra kerns")
addkerns(data)
end
normalize(data)
- report_afm( "add tounicode data")
+ report_afm("add tounicode data")
fonts.mappings.addtounicode(data,filename)
data.size = size
data.time = time
data.pfbsize = pfbsize
data.pfbtime = pfbtime
- report_afm("saving: %s in cache",name)
+ report_afm("saving %a in cache",name)
data = containers.write(afm.cache, name, data)
data = containers.read(afm.cache,name)
end
@@ -381,7 +381,7 @@ unify = function(data, filename)
if not code then
code = private
private = private + 1
- report_afm("assigning private slot U+%05X for unknown glyph name %s", code, name)
+ report_afm("assigning private slot %U for unknown glyph name %a",code,name)
end
end
local index = blob.index
@@ -429,79 +429,26 @@ end
and extra kerns. This saves quite some lookups later.</p>
--ldx]]--
---[[ldx--
-<p>Only characters with a code smaller than 128 make sense,
-anything larger is encoding dependent. An interesting complication
-is that a character can be in an encoding twice but is hashed
-once.</p>
---ldx]]--
-
-local ligatures = { -- okay, nowadays we could parse the name but type 1 fonts
- ['f'] = { -- don't have that many ligatures anyway
- { 'f', 'ff' },
- { 'i', 'fi' },
- { 'l', 'fl' },
- },
- ['ff'] = {
- { 'i', 'ffi' }
- },
- ['fi'] = {
- { 'i', 'fii' }
- },
- ['fl'] = {
- { 'i', 'fli' }
- },
- ['s'] = {
- { 't', 'st' }
- },
- ['i'] = {
- { 'j', 'ij' }
- },
-}
-
-local texligatures = {
- -- ['space'] = {
- -- { 'L', 'Lslash' },
- -- { 'l', 'lslash' }
- -- },
- -- ['question'] = {
- -- { 'quoteleft', 'questiondown' }
- -- },
- -- ['exclam'] = {
- -- { 'quoteleft', 'exclamdown' }
- -- },
- ['quoteleft'] = {
- { 'quoteleft', 'quotedblleft' }
- },
- ['quoteright'] = {
- { 'quoteright', 'quotedblright' }
- },
- ['hyphen'] = {
- { 'hyphen', 'endash' }
- },
- ['endash'] = {
- { 'hyphen', 'emdash' }
- }
-}
-
local addthem = function(rawdata,ligatures)
- local descriptions = rawdata.descriptions
- local resources = rawdata.resources
- local unicodes = resources.unicodes
- local names = resources.names
- for ligname, ligdata in next, ligatures do
- local one = descriptions[unicodes[ligname]]
- if one then
- for _, pair in next, ligdata do
- local two, three = unicodes[pair[1]], unicodes[pair[2]]
- if two and three then
- local ol = one.ligatures
- if ol then
- if not ol[two] then
- ol[two] = three
+ if ligatures then
+ local descriptions = rawdata.descriptions
+ local resources = rawdata.resources
+ local unicodes = resources.unicodes
+ local names = resources.names
+ for ligname, ligdata in next, ligatures do
+ local one = descriptions[unicodes[ligname]]
+ if one then
+ for _, pair in next, ligdata do
+ local two, three = unicodes[pair[1]], unicodes[pair[2]]
+ if two and three then
+ local ol = one.ligatures
+ if ol then
+ if not ol[two] then
+ ol[two] = three
+ end
+ else
+ one.ligatures = { [two] = three }
end
- else
- one.ligatures = { [two] = three }
end
end
end
@@ -509,8 +456,8 @@ local addthem = function(rawdata,ligatures)
end
end
-addligatures = function(rawdata) addthem(rawdata,ligatures ) end
-addtexligatures = function(rawdata) addthem(rawdata,texligatures) end
+addligatures = function(rawdata) addthem(rawdata,afm.helpdata.ligatures ) end
+addtexligatures = function(rawdata) addthem(rawdata,afm.helpdata.texligatures) end
--[[ldx--
<p>We keep the extra kerns in separate kerning tables so that we can use
@@ -524,208 +471,71 @@ them selectively.</p>
-- we don't use the character database. (Ok, we can have a context specific
-- variant).
--- we can make them numbers
-
-local left = {
- AEligature = "A", aeligature = "a",
- OEligature = "O", oeligature = "o",
- IJligature = "I", ijligature = "i",
- AE = "A", ae = "a",
- OE = "O", oe = "o",
- IJ = "I", ij = "i",
- Ssharp = "S", ssharp = "s",
-}
-
-local right = {
- AEligature = "E", aeligature = "e",
- OEligature = "E", oeligature = "e",
- IJligature = "J", ijligature = "j",
- AE = "E", ae = "e",
- OE = "E", oe = "e",
- IJ = "J", ij = "j",
- Ssharp = "S", ssharp = "s",
-}
-
-local both = {
- Acircumflex = "A", acircumflex = "a",
- Ccircumflex = "C", ccircumflex = "c",
- Ecircumflex = "E", ecircumflex = "e",
- Gcircumflex = "G", gcircumflex = "g",
- Hcircumflex = "H", hcircumflex = "h",
- Icircumflex = "I", icircumflex = "i",
- Jcircumflex = "J", jcircumflex = "j",
- Ocircumflex = "O", ocircumflex = "o",
- Scircumflex = "S", scircumflex = "s",
- Ucircumflex = "U", ucircumflex = "u",
- Wcircumflex = "W", wcircumflex = "w",
- Ycircumflex = "Y", ycircumflex = "y",
-
- Agrave = "A", agrave = "a",
- Egrave = "E", egrave = "e",
- Igrave = "I", igrave = "i",
- Ograve = "O", ograve = "o",
- Ugrave = "U", ugrave = "u",
- Ygrave = "Y", ygrave = "y",
-
- Atilde = "A", atilde = "a",
- Itilde = "I", itilde = "i",
- Otilde = "O", otilde = "o",
- Utilde = "U", utilde = "u",
- Ntilde = "N", ntilde = "n",
-
- Adiaeresis = "A", adiaeresis = "a", Adieresis = "A", adieresis = "a",
- Ediaeresis = "E", ediaeresis = "e", Edieresis = "E", edieresis = "e",
- Idiaeresis = "I", idiaeresis = "i", Idieresis = "I", idieresis = "i",
- Odiaeresis = "O", odiaeresis = "o", Odieresis = "O", odieresis = "o",
- Udiaeresis = "U", udiaeresis = "u", Udieresis = "U", udieresis = "u",
- Ydiaeresis = "Y", ydiaeresis = "y", Ydieresis = "Y", ydieresis = "y",
-
- Aacute = "A", aacute = "a",
- Cacute = "C", cacute = "c",
- Eacute = "E", eacute = "e",
- Iacute = "I", iacute = "i",
- Lacute = "L", lacute = "l",
- Nacute = "N", nacute = "n",
- Oacute = "O", oacute = "o",
- Racute = "R", racute = "r",
- Sacute = "S", sacute = "s",
- Uacute = "U", uacute = "u",
- Yacute = "Y", yacute = "y",
- Zacute = "Z", zacute = "z",
-
- Dstroke = "D", dstroke = "d",
- Hstroke = "H", hstroke = "h",
- Tstroke = "T", tstroke = "t",
-
- Cdotaccent = "C", cdotaccent = "c",
- Edotaccent = "E", edotaccent = "e",
- Gdotaccent = "G", gdotaccent = "g",
- Idotaccent = "I", idotaccent = "i",
- Zdotaccent = "Z", zdotaccent = "z",
-
- Amacron = "A", amacron = "a",
- Emacron = "E", emacron = "e",
- Imacron = "I", imacron = "i",
- Omacron = "O", omacron = "o",
- Umacron = "U", umacron = "u",
-
- Ccedilla = "C", ccedilla = "c",
- Kcedilla = "K", kcedilla = "k",
- Lcedilla = "L", lcedilla = "l",
- Ncedilla = "N", ncedilla = "n",
- Rcedilla = "R", rcedilla = "r",
- Scedilla = "S", scedilla = "s",
- Tcedilla = "T", tcedilla = "t",
-
- Ohungarumlaut = "O", ohungarumlaut = "o",
- Uhungarumlaut = "U", uhungarumlaut = "u",
-
- Aogonek = "A", aogonek = "a",
- Eogonek = "E", eogonek = "e",
- Iogonek = "I", iogonek = "i",
- Uogonek = "U", uogonek = "u",
-
- Aring = "A", aring = "a",
- Uring = "U", uring = "u",
-
- Abreve = "A", abreve = "a",
- Ebreve = "E", ebreve = "e",
- Gbreve = "G", gbreve = "g",
- Ibreve = "I", ibreve = "i",
- Obreve = "O", obreve = "o",
- Ubreve = "U", ubreve = "u",
-
- Ccaron = "C", ccaron = "c",
- Dcaron = "D", dcaron = "d",
- Ecaron = "E", ecaron = "e",
- Lcaron = "L", lcaron = "l",
- Ncaron = "N", ncaron = "n",
- Rcaron = "R", rcaron = "r",
- Scaron = "S", scaron = "s",
- Tcaron = "T", tcaron = "t",
- Zcaron = "Z", zcaron = "z",
-
- dotlessI = "I", dotlessi = "i",
- dotlessJ = "J", dotlessj = "j",
-
- AEligature = "AE", aeligature = "ae", AE = "AE", ae = "ae",
- OEligature = "OE", oeligature = "oe", OE = "OE", oe = "oe",
- IJligature = "IJ", ijligature = "ij", IJ = "IJ", ij = "ij",
-
- Lstroke = "L", lstroke = "l", Lslash = "L", lslash = "l",
- Ostroke = "O", ostroke = "o", Oslash = "O", oslash = "o",
-
- Ssharp = "SS", ssharp = "ss",
-
- Aumlaut = "A", aumlaut = "a",
- Eumlaut = "E", eumlaut = "e",
- Iumlaut = "I", iumlaut = "i",
- Oumlaut = "O", oumlaut = "o",
- Uumlaut = "U", uumlaut = "u",
-
-}
-
addkerns = function(rawdata) -- using shcodes is not robust here
local descriptions = rawdata.descriptions
local resources = rawdata.resources
local unicodes = resources.unicodes
local function do_it_left(what)
- for unicode, description in next, descriptions do
- local kerns = description.kerns
- if kerns then
- local extrakerns
- for complex, simple in next, what do
- complex = unicodes[complex]
- simple = unicodes[simple]
- if complex and simple then
- local ks = kerns[simple]
- if ks and not kerns[complex] then
- if extrakerns then
- extrakerns[complex] = ks
- else
- extrakerns = { [complex] = ks }
+ if what then
+ for unicode, description in next, descriptions do
+ local kerns = description.kerns
+ if kerns then
+ local extrakerns
+ for complex, simple in next, what do
+ complex = unicodes[complex]
+ simple = unicodes[simple]
+ if complex and simple then
+ local ks = kerns[simple]
+ if ks and not kerns[complex] then
+ if extrakerns then
+ extrakerns[complex] = ks
+ else
+ extrakerns = { [complex] = ks }
+ end
end
end
end
- end
- if extrakerns then
- description.extrakerns = extrakerns
+ if extrakerns then
+ description.extrakerns = extrakerns
+ end
end
end
end
end
local function do_it_copy(what)
- for complex, simple in next, what do
- complex = unicodes[complex]
- simple = unicodes[simple]
- if complex and simple then
- local complexdescription = descriptions[complex]
- if complexdescription then -- optional
- local simpledescription = descriptions[complex]
- if simpledescription then
- local extrakerns
- local kerns = simpledescription.kerns
- if kerns then
- for unicode, kern in next, kerns do
- if extrakerns then
- extrakerns[unicode] = kern
- else
- extrakerns = { [unicode] = kern }
+ if what then
+ for complex, simple in next, what do
+ complex = unicodes[complex]
+ simple = unicodes[simple]
+ if complex and simple then
+ local complexdescription = descriptions[complex]
+ if complexdescription then -- optional
+ local simpledescription = descriptions[complex]
+ if simpledescription then
+ local extrakerns
+ local kerns = simpledescription.kerns
+ if kerns then
+ for unicode, kern in next, kerns do
+ if extrakerns then
+ extrakerns[unicode] = kern
+ else
+ extrakerns = { [unicode] = kern }
+ end
end
end
- end
- local extrakerns = simpledescription.extrakerns
- if extrakerns then
- for unicode, kern in next, extrakerns do
- if extrakerns then
- extrakerns[unicode] = kern
- else
- extrakerns = { [unicode] = kern }
+ local extrakerns = simpledescription.extrakerns
+ if extrakerns then
+ for unicode, kern in next, extrakerns do
+ if extrakerns then
+ extrakerns[unicode] = kern
+ else
+ extrakerns = { [unicode] = kern }
+ end
end
end
- end
- if extrakerns then
- complexdescription.extrakerns = extrakerns
+ if extrakerns then
+ complexdescription.extrakerns = extrakerns
+ end
end
end
end
@@ -733,11 +543,11 @@ addkerns = function(rawdata) -- using shcodes is not robust here
end
end
-- add complex with values of simplified when present
- do_it_left(left)
- do_it_left(both)
+ do_it_left(afm.helpdata.leftkerned)
+ do_it_left(afm.helpdata.bothkerned)
-- copy kerns from simple char to complex char unless set
- do_it_copy(both)
- do_it_copy(right)
+ do_it_copy(afm.helpdata.bothkerned)
+ do_it_copy(afm.helpdata.rightkerned)
end
--[[ldx--
@@ -911,13 +721,13 @@ local function afmtotfm(specification)
local afmname = specification.filename or specification.name
if specification.forced == "afm" or specification.format == "afm" then -- move this one up
if trace_loading then
- report_afm("forcing afm format for %s",afmname)
+ report_afm("forcing afm format for %a",afmname)
end
else
local tfmname = findbinfile(afmname,"ofm") or ""
if tfmname ~= "" then
if trace_loading then
- report_afm("fallback from afm to tfm for %s",afmname)
+ report_afm("fallback from afm to tfm for %a",afmname)
end
return -- just that
end
@@ -947,7 +757,7 @@ local function afmtotfm(specification)
shared.processes = afm.setfeatures(tfmdata,features)
end
elseif trace_loading then
- report_afm("no (valid) afm file found with name %s",afmname)
+ report_afm("no (valid) afm file found with name %a",afmname)
end
tfmdata = containers.write(constructors.cache,cache_id,tfmdata)
end
@@ -1113,7 +923,7 @@ local function check_afm(specification,fullname)
if shortname ~= "" then
foundname = shortname
if trace_defining then
- report_afm("stripping encoding prefix from filename %s",afmname)
+ report_afm("stripping encoding prefix from filename %a",afmname)
end
end
end
@@ -1153,7 +963,7 @@ end
function readers.pfb(specification,method) -- only called when forced
local original = specification.specification
if trace_defining then
- report_afm("using afm reader for '%s'",original)
+ report_afm("using afm reader for %a",original)
end
specification.specification = gsub(original,"%.pfb",".afm")
specification.forced = "afm"
diff --git a/Master/texmf-dist/tex/context/base/font-age.lua b/Master/texmf-dist/tex/context/base/font-age.lua
index 741bb475ab2..ba8f0bb0b84 100644
--- a/Master/texmf-dist/tex/context/base/font-age.lua
+++ b/Master/texmf-dist/tex/context/base/font-age.lua
@@ -4,10 +4,11 @@ if not modules then modules = { } end modules ['font-age'] = {
author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
copyright = "derived from http://www.adobe.com/devnet/opentype/archives/glyphlist.txt",
original = "Adobe Glyph List, version 2.0, September 20, 2002",
+ dataonly = true,
}
if context then
- texio.write_nl("fatal error: this module is not for context")
+ logs.report("fatal error","this module is not for context")
os.exit()
end
diff --git a/Master/texmf-dist/tex/context/base/font-agl.lua b/Master/texmf-dist/tex/context/base/font-agl.lua
index 6c1f387ba33..ca7fb61cbef 100644
--- a/Master/texmf-dist/tex/context/base/font-agl.lua
+++ b/Master/texmf-dist/tex/context/base/font-agl.lua
@@ -271,7 +271,7 @@ for a, u in next, extras do
end
end
--- We load this table only when needed. We coul duse a loading mechanism
+-- We load this table only when needed. We could use a loading mechanism
-- return the table but there are no more vectors like this so why bother.
fonts.encodings = fonts.encodings or { }
diff --git a/Master/texmf-dist/tex/context/base/font-aux.lua b/Master/texmf-dist/tex/context/base/font-aux.lua
index 0396554adec..2a605d224c9 100644
--- a/Master/texmf-dist/tex/context/base/font-aux.lua
+++ b/Master/texmf-dist/tex/context/base/font-aux.lua
@@ -6,8 +6,8 @@ if not modules then modules = { } end modules ['font-aux'] = {
license = "see context related readme files"
}
-local number = tonumber
-local wrap, yield = coroutine.wrap, coroutine.yield
+local tonumber, type = tonumber, type
+----- wrap, yield = coroutine.wrap, coroutine.yield
local fonts, font = fonts, font
@@ -18,9 +18,11 @@ local currentfont = font.current
local identifiers = fonts.hashes.identifiers
local sortedkeys = table.sortedkeys
--- for unicode, character in fonts.iterators.characters () do print(k,v) end
--- for unicode, description in fonts.iterators.descriptions() do print(k,v) end
--- for index, glyph in fonts.iterators.glyphs () do print(k,v) end
+-- for unicode, character in fonts.iterators.characters () do print(unicode) end
+-- for unicode, description in fonts.iterators.descriptions() do print(unicode) end
+-- for index, glyph in fonts.iterators.glyphs () do print(index ) end
+
+local function dummy() end
local function checkeddata(data) -- beware, nullfont is the fallback in identifiers
local t = type(data)
@@ -32,22 +34,93 @@ local function checkeddata(data) -- beware, nullfont is the fallback in identifi
return identifiers[data] -- has nullfont as fallback
end
+local function getindices(data)
+ data = checkeddata(data)
+ local indices = { }
+ local characters = data.characters
+ if characters then
+ for unicode, character in next, characters do
+ indices[character.index or unicode] = unicode
+ end
+ end
+ return indices
+end
+
+-- function iterators.characters(data)
+-- data = checkeddata(data)
+-- local characters = data.characters
+-- if characters then
+-- local collected = sortedkeys(characters)
+-- return wrap(function()
+-- for c=1,#collected do
+-- local cc = collected[c]
+-- local dc = characters[cc]
+-- if dc then
+-- yield(cc,dc)
+-- end
+-- end
+-- end)
+-- else
+-- return wrap(function() end)
+-- end
+-- end
+
+-- function iterators.descriptions(data)
+-- data = checkeddata(data)
+-- local characters = data.characters
+-- local descriptions = data.descriptions
+-- if characters and descriptions then
+-- local collected = sortedkeys(characters)
+-- return wrap(function()
+-- for c=1,#collected do
+-- local cc = collected[c]
+-- local dc = descriptions[cc]
+-- if dc then
+-- yield(cc,dc)
+-- end
+-- end
+-- end)
+-- else
+-- return wrap(function() end)
+-- end
+-- end
+
+-- function iterators.glyphs(data)
+-- data = checkeddata(data)
+-- local descriptions = data.descriptions
+-- if descriptions then
+-- local indices = getindices(data)
+-- local collected = sortedkeys(indices)
+-- return wrap(function()
+-- for c=1,#collected do
+-- local cc = collected[c]
+-- local dc = descriptions[indices[cc]]
+-- if dc then
+-- yield(cc,dc)
+-- end
+-- end
+-- end)
+-- else
+-- return wrap(function() end)
+-- end
+-- end
+
function iterators.characters(data)
data = checkeddata(data)
local characters = data.characters
if characters then
- local collected = sortedkeys(characters)
- return wrap(function()
- for c=1,#collected do
- local cc = collected[c]
+ local collected = sortedkeys(characters)
+ local n, i = #collected, 0
+ return function()
+ i = i + 1
+ if i <= n then
+ local cc = collected[i]
local dc = characters[cc]
- if dc then
- yield(cc,dc)
- end
+ return cc, dc or { }
end
- end)
+ end
else
- return wrap(function() end)
+ return dummy
end
end
@@ -57,30 +130,18 @@ function iterators.descriptions(data)
local descriptions = data.descriptions
if characters and descriptions then
local collected = sortedkeys(characters)
- return wrap(function()
- for c=1,#collected do
- local cc = collected[c]
+ local n, i = #collected, 0
+ return function()
+ i = i + 1
+ if i <= n then
+ local cc = collected[i]
local dc = descriptions[cc]
- if dc then
- yield(cc,dc)
- end
+ return cc, dc or { }
end
- end)
- else
- return wrap(function() end)
- end
-end
-
-local function getindices(data)
- data = checkeddata(data)
- local indices = { }
- local characters = data.characters
- if characters then
- for unicode, character in next, characters do
- indices[character.index or unicode] = unicode
end
+ else
+ return dummy
end
- return indices
end
function iterators.glyphs(data)
@@ -89,16 +150,16 @@ function iterators.glyphs(data)
if descriptions then
local indices = getindices(data)
local collected = sortedkeys(indices)
- return wrap(function()
- for c=1,#collected do
- local cc = collected[c]
+ local n, i = #collected, 0
+ return function()
+ i = i + 1
+ if i <= n then
+ local cc = collected[i]
local dc = descriptions[indices[cc]]
- if dc then
- yield(cc,dc)
- end
+ return cc, dc or { }
end
- end)
+ end
else
- return wrap(function() end)
+ return dummy
end
end
diff --git a/Master/texmf-dist/tex/context/base/font-aux.mkvi b/Master/texmf-dist/tex/context/base/font-aux.mkvi
new file mode 100644
index 00000000000..bd655e41c60
--- /dev/null
+++ b/Master/texmf-dist/tex/context/base/font-aux.mkvi
@@ -0,0 +1,26 @@
+%D \module
+%D [ file=font-gds,
+%D version=2012.06.26,
+%D title=\CONTEXT\ Font Support,
+%D subtitle=Helpers,
+%D author=Hans Hagen,
+%D date=\currentdate,
+%D copyright={PRAGMA ADE \& \CONTEXT\ Development Team}]
+%C
+%C This module is part of the \CONTEXT\ macro||package and is
+%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
+%C details.
+
+\writestatus{loading}{ConTeXt Font Support / Helpers}
+
+%D Here we collect code moved from other files, like some of the
+%D support modules.
+
+\unprotect
+
+%D Leftovers:
+
+\unexpanded\def\xbox {\bgroup\aftergroup\egroup\hbox\bgroup\tx \let\next=}
+\unexpanded\def\xxbox{\bgroup\aftergroup\egroup\hbox\bgroup\txx\let\next=}
+
+\protect \endinput
diff --git a/Master/texmf-dist/tex/context/base/font-chk.lua b/Master/texmf-dist/tex/context/base/font-chk.lua
index 04b49b02c5a..9e7e31affaf 100644
--- a/Master/texmf-dist/tex/context/base/font-chk.lua
+++ b/Master/texmf-dist/tex/context/base/font-chk.lua
@@ -63,7 +63,7 @@ local function onetimemessage(font,char,message)
messages[message] = category
end
if not category[char] then
- report_fonts("char U+%05X in font '%s' with id %s: %s",char,tfmdata.properties.fullname,font,message)
+ report_fonts("char %U in font %a with id %a: %s",char,tfmdata.properties.fullname,font,message)
category[char] = true
end
end
@@ -149,7 +149,7 @@ local package = "q %0.6f 0 0 %0.6f 0 0 cm %s %s %s rg %s %s %s RG 10 M 1 j 1 J 0
local cache = { } -- saves some tables but not that impressive
-local function addmissingsymbols(tfmdata)
+local function addmissingsymbols(tfmdata) -- we can have an alternative with rules
local characters = tfmdata.characters
local size = tfmdata.parameters.size
local privates = tfmdata.properties.privates
@@ -169,6 +169,7 @@ local function addmissingsymbols(tfmdata)
width = size*fake.width,
height = size*fake.height,
depth = size*fake.depth,
+ -- bah .. low level pdf ... should be a rule or plugged in
commands = { { "special", "pdf: " .. format(package,scale,scale,r,g,b,r,g,b,fake.code) } }
}
cache[hash] = char
diff --git a/Master/texmf-dist/tex/context/base/font-chk.mkiv b/Master/texmf-dist/tex/context/base/font-chk.mkiv
new file mode 100644
index 00000000000..d436388de22
--- /dev/null
+++ b/Master/texmf-dist/tex/context/base/font-chk.mkiv
@@ -0,0 +1,22 @@
+%D \module
+%D [ file=font-chk,
+%D version=2009.01.02, % moved from font-tra
+%D title=\CONTEXT\ Font Macros,
+%D subtitle=Checking,
+%D author=Hans Hagen,
+%D date=\currentdate,
+%D copyright={PRAGMA ADE \& \CONTEXT\ Development Team}]
+%C
+%C This module is part of the \CONTEXT\ macro||package and is
+%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
+%C details.
+
+\writestatus{loading}{ConTeXt Font Macros / Checking}
+
+\registerctxluafile{font-chk}{1.001}
+
+\unexpanded\def\checkcharactersinfont {\ctxcommand{checkcharactersinfont()}}
+\unexpanded\def\removemissingcharacters {\ctxcommand{removemissingcharacters()}}
+\unexpanded\def\replacemissingcharacters{\ctxcommand{replacemissingcharacters()}}
+
+\endinput
diff --git a/Master/texmf-dist/tex/context/base/font-cid.lua b/Master/texmf-dist/tex/context/base/font-cid.lua
index 4a4c4d20916..e4b565313b4 100644
--- a/Master/texmf-dist/tex/context/base/font-cid.lua
+++ b/Master/texmf-dist/tex/context/base/font-cid.lua
@@ -10,17 +10,17 @@ local format, match, lower = string.format, string.match, string.lower
local tonumber = tonumber
local P, S, R, C, V, lpegmatch = lpeg.P, lpeg.S, lpeg.R, lpeg.C, lpeg.V, lpeg.match
-local trace_loading = false trackers.register("otf.loading", function(v) trace_loading = v end)
+local fonts, logs, trackers = fonts, logs, trackers
-local report_otf = logs.reporter("fonts","otf loading")
+local trace_loading = false trackers.register("otf.loading", function(v) trace_loading = v end)
-local fonts = fonts
+local report_otf = logs.reporter("fonts","otf loading")
-local cid = { }
-fonts.cid = cid
+local cid = { }
+fonts.cid = cid
-local cidmap = { }
-local cidmax = 10
+local cidmap = { }
+local cidmax = 10
-- original string parser: 0.109, lpeg parser: 0.036 seconds for Adobe-CNS1-4.cidmap
--
@@ -79,8 +79,7 @@ local function loadcidfile(filename)
end
end
-cid.loadfile = loadcidfile -- we use the frozen variant
-
+cid.loadfile = loadcidfile -- we use the frozen variant
local template = "%s-%s-%s.cidmap"
local function locate(registry,ordering,supplement)
@@ -89,14 +88,14 @@ local function locate(registry,ordering,supplement)
local found = cidmap[hashname]
if not found then
if trace_loading then
- report_otf("checking cidmap, registry: %s, ordering: %s, supplement: %s, filename: %s",registry,ordering,supplement,filename)
+ report_otf("checking cidmap, registry %a, ordering %a, supplement %a, filename %a",registry,ordering,supplement,filename)
end
local fullname = resolvers.findfile(filename,'cid') or ""
if fullname ~= "" then
found = loadcidfile(fullname)
if found then
if trace_loading then
- report_otf("using cidmap file %s",filename)
+ report_otf("using cidmap file %a",filename)
end
cidmap[hashname] = found
found.usedname = file.basename(filename)
@@ -110,7 +109,7 @@ end
function cid.getmap(specification)
if not specification then
- report_otf("invalid cidinfo specification (table expected)")
+ report_otf("invalid cidinfo specification, table expected")
return
end
local registry = specification.registry
@@ -123,7 +122,7 @@ function cid.getmap(specification)
return found
end
if trace_loading then
- report_otf("needed cidmap, registry: %s, ordering: %s, supplement: %s",registry,ordering,supplement)
+ report_otf("cidmap needed, registry %a, ordering %a, supplement %a",registry,ordering,supplement)
end
found = locate(registry,ordering,supplement)
if not found then
diff --git a/Master/texmf-dist/tex/context/base/font-col.lua b/Master/texmf-dist/tex/context/base/font-col.lua
index f8add1e4e1d..20c99c9b4a3 100644
--- a/Master/texmf-dist/tex/context/base/font-col.lua
+++ b/Master/texmf-dist/tex/context/base/font-col.lua
@@ -8,32 +8,40 @@ if not modules then modules = { } end modules ['font-col'] = {
-- possible optimization: delayed initialization of vectors
-local gmatch, type = string.gmatch, type
-local traverse_id = node.traverse_id
-local lpegmatch = lpeg.match
+local context, commands, trackers, logs = context, commands, trackers, logs
+local node, nodes, fonts, characters = node, nodes, fonts, characters
+local file, lpeg, table, string = file, lpeg, table, string
+
+local type, next, toboolean = type, next, toboolean
+local gmatch = string.gmatch
local fastcopy = table.fastcopy
-local settings_to_hash = utilities.parsers.settings_to_hash
+----- P, Cc, lpegmatch = lpeg.P, lpeg.Cc, lpeg.match
-local trace_collecting = false trackers.register("fonts.collecting", function(v) trace_collecting = v end)
+local traverse_id = node.traverse_id
+local settings_to_hash = utilities.parsers.settings_to_hash
-local report_fonts = logs.reporter("fonts","collections")
+local trace_collecting = false trackers.register("fonts.collecting", function(v) trace_collecting = v end)
-local fonts, context = fonts, context
+local report_fonts = logs.reporter("fonts","collections")
-fonts.collections = fonts.collections or { }
-local collections = fonts.collections
+local collections = fonts.collections or { }
+fonts.collections = collections
-collections.definitions = collections.definitions or { }
-local definitions = collections.definitions
+local definitions = collections.definitions or { }
+collections.definitions = definitions
-collections.vectors = collections.vectors or { }
-local vectors = collections.vectors
+local vectors = collections.vectors or { }
+collections.vectors = vectors
-local fontdata = fonts.hashes.identifiers
+local fontdata = fonts.hashes.identifiers
+local glyph_code = nodes.nodecodes.glyph
+local currentfont = font.current
-local glyph = node.id('glyph')
+local fontpatternhassize = fonts.helpers.fontpatternhassize
-local list, current, active = { }, 0, false
+local list = { }
+local current = 0
+local enabled = false
-- maybe also a copy
@@ -55,17 +63,13 @@ function collections.define(name,font,ranges,details)
-- todo: details -> method=force|conditional rscale=
-- todo: remap=name
local d = definitions[name]
- if d then
- if name and trace_collecting then
- report_fonts("def: extending set %s using %s",name, font)
- end
- else
- if name and trace_collecting then
- report_fonts("def: defining set %s using %s",name, font)
- end
+ if not d then
d = { }
definitions[name] = d
end
+ if name and trace_collecting then
+ report_fonts("extending collection %a using %a",name,font)
+ end
details = settings_to_hash(details)
-- todo, combine per font start/stop as arrays
for s in gmatch(ranges,"[^, ]+") do
@@ -73,12 +77,12 @@ function collections.define(name,font,ranges,details)
if start and stop then
if trace_collecting then
if description then
- report_fonts("def: using range %s (U+%05x-U+%05X, %s)",s,start,stop,description)
+ report_fonts("using range %a, slots %U - %U, description %a)",s,start,stop,description)
end
for i=1,#d do
local di = d[i]
if (start >= di.start and start <= di.stop) or (stop >= di.start and stop <= di.stop) then
- report_fonts("def: overlapping ranges U+%05x-U+%05X and U+%05x-U+%05X",start,stop,di.start,di.stop)
+ report_fonts("overlapping ranges %U - %U and %U - %U",start,stop,di.start,di.stop)
end
end
end
@@ -91,9 +95,9 @@ end
-- todo: provide a lua variant (like with definefont)
function collections.registermain(name)
- local last = font.current()
+ local last = currentfont()
if trace_collecting then
- report_fonts("def: registering font %s with name %s",last,name)
+ report_fonts("registering font %a with name %a",last,name)
end
list[#list+1] = last
end
@@ -103,14 +107,14 @@ function collections.clonevector(name)
local d = definitions[name]
local t = { }
if trace_collecting then
- report_fonts("def: process collection %s",name)
+ report_fonts("processing collection %a",name)
end
for i=1,#d do
local f = d[i]
local id = list[i]
local start, stop = f.start, f.stop
if trace_collecting then
- report_fonts("def: remapping font %s to %s for range U+%05X - U+%05X",current,id,start,stop)
+ report_fonts("remapping font %a to %a for range %U - %U",current,id,start,stop)
end
local check = toboolean(f.check or "false",true)
local force = toboolean(f.force or "true",true)
@@ -143,22 +147,24 @@ function collections.clonevector(name)
end
vectors[current] = t
if trace_collecting then
- report_fonts("def: activating collection %s for font %s",name,current)
+ report_fonts("activating collection %a for font %a",name,current)
+ end
+ if not enabled then
+ nodes.tasks.enableaction("processors","fonts.collections.process")
+ enabled = true
end
- active = true
statistics.stoptiming(fonts)
end
-- we already have this parser
-
-local P, Cc = lpeg.P, lpeg.Cc
-local spec = (P("sa") + P("at") + P("scaled") + P("at") + P("mo")) * P(" ")^1 * (1-P(" "))^1 * P(" ")^0 * -1
-local okay = ((1-spec)^1 * spec * Cc(true)) + Cc(false)
-
--- todo: check for already done
+--
+-- local spec = (P("sa") + P("at") + P("scaled") + P("at") + P("mo")) * P(" ")^1 * (1-P(" "))^1 * P(" ")^0 * -1
+-- local okay = ((1-spec)^1 * spec * Cc(true)) + Cc(false)
+--
+-- if lpegmatch(okay,name) then
function collections.prepare(name)
- current = font.current()
+ current = currentfont()
if vectors[current] then
return
end
@@ -166,7 +172,7 @@ function collections.prepare(name)
if d then
if trace_collecting then
local filename = file.basename(fontdata[current].properties.filename or "?")
- report_fonts("def: applying collection %s to %s (file: %s)",name,current,filename)
+ report_fonts("applying collection %a to %a, file %a",name,current,filename)
end
list = { }
context.pushcatcodes("prt") -- context.unprotect()
@@ -175,7 +181,7 @@ function collections.prepare(name)
local f = d[i]
local name = f.font
local scale = f.rscale or 1
- if lpegmatch(okay,name) then
+ if fontpatternhassize(name) then
context.font_fallbacks_clone_unique(name,scale)
else
context.font_fallbacks_clone_inherited(name,scale)
@@ -187,7 +193,7 @@ function collections.prepare(name)
context.popcatcodes() -- context.protect()
elseif trace_collecting then
local filename = file.basename(fontdata[current].properties.filename or "?")
- report_fonts("def: error in applying collection %s to %s (file: %s)",name,current,filename)
+ report_fonts("error while applying collection %a to %a, file %a",name,current,filename)
end
end
@@ -198,32 +204,28 @@ function collections.report(message)
end
function collections.process(head) -- this way we keep feature processing
- if active then
- local done = false
- for n in traverse_id(glyph,head) do
- local v = vectors[n.font]
- if v then
- local id = v[n.char]
- if id then
- if type(id) == "table" then
- local newid, newchar = id[1], id[2]
- if trace_collecting then
- report_fonts("lst: remapping character %s in font %s to character %s in font %s",n.char,n.font,newchar,newid)
- end
- n.font, n.char = newid, newchar
- else
- if trace_collecting then
- report_fonts("lst: remapping font %s to %s for character %s",n.font,id,n.char)
- end
- n.font = id
+ local done = false
+ for n in traverse_id(glyph_code,head) do
+ local v = vectors[n.font]
+ if v then
+ local id = v[n.char]
+ if id then
+ if type(id) == "table" then
+ local newid, newchar = id[1], id[2]
+ if trace_collecting then
+ report_fonts("remapping character %a in font %a to character %a in font %a",n.char,n.font,newchar,newid)
+ end
+ n.font, n.char = newid, newchar
+ else
+ if trace_collecting then
+ report_fonts("remapping font %a to %a for character %a",n.font,id,n.char)
end
+ n.font = id
end
end
end
- return head, done
- else
- return head, false
end
+ return head, done
end
-- interface
diff --git a/Master/texmf-dist/tex/context/base/font-con.lua b/Master/texmf-dist/tex/context/base/font-con.lua
index 92809968857..618bc8f2856 100644
--- a/Master/texmf-dist/tex/context/base/font-con.lua
+++ b/Master/texmf-dist/tex/context/base/font-con.lua
@@ -6,11 +6,8 @@ if not modules then modules = { } end modules ['font-con'] = {
license = "see context related readme files"
}
-
-- some names of table entries will be changed (no _)
-local utf = unicode.utf8
-
local next, tostring, rawget = next, tostring, rawget
local format, match, lower, gsub = string.format, string.match, string.lower, string.gsub
local utfbyte = utf.byte
@@ -29,15 +26,11 @@ local report_defining = logs.reporter("fonts","defining")
--ldx]]--
local fonts = fonts
-local constructors = { }
+local constructors = fonts.constructors or { }
fonts.constructors = constructors
-local handlers = { }
+local handlers = fonts.handlers or { } -- can have preloaded tables
fonts.handlers = handlers
-local specifiers = fonts.specifiers
-local contextsetups = specifiers.contextsetups
-local contextnumbers = specifiers.contextnumbers
-
local allocate = utilities.storage.allocate
local setmetatableindex = table.setmetatableindex
@@ -358,6 +351,10 @@ function constructors.scale(tfmdata,specification)
elseif forcedsize > 1000 then -- safeguard
scaledpoints = forcedsize
end
+ targetparameters.mathsize = mathsize -- context specific
+ targetparameters.textsize = textsize -- context specific
+ targetparameters.forcedsize = forcedsize -- context specific
+ targetparameters.extrafactor = extrafactor -- context specific
--
local tounicode = resources.tounicode
local defaultwidth = resources.defaultwidth or 0
@@ -418,6 +415,8 @@ function constructors.scale(tfmdata,specification)
target.psname = psname
target.name = name
--
+ -- inspect(properties)
+ --
properties.fontname = fontname
properties.fullname = fullname
properties.filename = filename
@@ -507,59 +506,51 @@ function constructors.scale(tfmdata,specification)
local scaledheight = defaultheight * vdelta
local scaleddepth = defaultdepth * vdelta
--
- if trace_defining then
- report_defining("scaling by (%s,%s): name '%s', fullname: '%s', filename: '%s'",
- hdelta,vdelta,name or "noname",fullname or "nofullname",filename or "nofilename")
- end
- --
local hasmath = (properties.hasmath or next(mathparameters)) and true
+ --
if hasmath then
- if trace_defining then
- report_defining("math enabled for: name '%s', fullname: '%s', filename: '%s'",
- name or "noname",fullname or "nofullname",filename or "nofilename")
- end
constructors.assignmathparameters(target,tfmdata) -- does scaling and whatever is needed
properties.hasmath = true
target.nomath = false
target.MathConstants = target.mathparameters
else
- if trace_defining then
- report_defining("math disabled for: name '%s', fullname: '%s', filename: '%s'",
- name or "noname",fullname or "nofullname",filename or "nofilename")
- end
properties.hasmath = false
target.nomath = true
target.mathparameters = nil -- nop
end
--
- local italickey = "italic"
+ local italickey = "italic"
+ local useitalics = true -- something context
--
-- some context specific trickery (this will move to a plugin)
--
if hasmath then
- if properties.mathitalics then
- italickey = "italic_correction"
- if trace_defining then
- report_defining("math italics disabled for: name '%s', fullname: '%s', filename: '%s'",
- name or "noname",fullname or "nofullname",filename or "nofilename")
- end
- end
+ -- the latest luatex can deal with it itself so we now disable this
+ -- mechanism here
+ --
+ -- if properties.mathitalics then
+ -- italickey = "italic_correction"
+ -- if trace_defining then
+ -- report_defining("math italics disabled for font %a, fullname %a, filename %a",name,fullname,filename)
+ -- end
+ -- end
autoitalicamount = false -- new
- else
- if properties.textitalics then
- italickey = "italic_correction"
- if trace_defining then
- report_defining("text italics disabled for: name '%s', fullname: '%s', filename: '%s'",
- name or "noname",fullname or "nofullname",filename or "nofilename")
- end
- if properties.delaytextitalics then
- autoitalicamount = false
- end
+ elseif properties.textitalics then
+ italickey = "italic_correction"
+ useitalics = false
+ if properties.delaytextitalics then
+ autoitalicamount = false
end
end
--
-- end of context specific trickery
--
+ if trace_defining then
+ report_defining("defining tfm, name %a, fullname %a, filename %a, hscale %a, vscale %a, math %a, italics %a",
+ name,fullname,filename,hdelta,vdelta,
+ hasmath and "enabled" or "disabled",useitalics and "enabled" or "disabled")
+ end
+ --
constructors.beforecopyingcharacters(target,tfmdata)
--
local sharedkerns = { }
@@ -641,9 +632,6 @@ function constructors.scale(tfmdata,specification)
if touni then
chr.tounicode = touni
end
- -- if trace_scaling then
- -- report_defining("t=%s, u=%s, i=%s, n=%s c=%s",k,chr.tounicode or "",index or 0,description.name or '-',description.class or '-')
- -- end
if hasquality then
-- we could move these calculations elsewhere (saves calculations)
local ve = character.expansion_factor
@@ -682,9 +670,6 @@ function constructors.scale(tfmdata,specification)
local vn = character.next
if vn then
chr.next = vn
- -- if character.vert_variants or character.horiz_variants then
- -- report_defining("glyph U+%05X has combination of next, vert_variants and horiz_variants",index)
- -- end
else
local vv = character.vert_variants
if vv then
@@ -977,7 +962,11 @@ hashmethods.normal = function(list)
local s = { }
local n = 0
for k, v in next, list do
- if k ~= "number" and k ~= "features" then -- I need to figure this out, features
+ if not k then
+ -- no need to add to hash
+ elseif k == "number" or k == "features" then
+ -- no need to add to hash (maybe we need a skip list)
+ else
n = n + 1
s[n] = k
end
@@ -1034,7 +1023,7 @@ function constructors.setname(tfmdata,specification) -- todo: get specification
if specname then
tfmdata.properties.name = specname
if trace_defining then
- report_otf("overloaded fontname: '%s'",specname)
+ report_otf("overloaded fontname %a",specname)
end
end
end
@@ -1048,10 +1037,10 @@ function constructors.checkedfilename(data)
askedfilename = resolvers.resolve(askedfilename) -- no shortcut
foundfilename = resolvers.findbinfile(askedfilename,"") or ""
if foundfilename == "" then
- report_defining("source file '%s' is not found",askedfilename)
+ report_defining("source file %a is not found",askedfilename)
foundfilename = resolvers.findbinfile(file.basename(askedfilename),"") or ""
if foundfilename ~= "" then
- report_defining("using source file '%s' (cache mismatch)",foundfilename)
+ report_defining("using source file %a due to cache mismatch",foundfilename)
end
end
end
@@ -1069,7 +1058,7 @@ setmetatableindex(formats, function(t,k)
t[k] = l
return l
end
- return rawget(t,file.extname(l))
+ return rawget(t,file.suffix(l))
end)
local locations = { }
@@ -1077,7 +1066,7 @@ local locations = { }
local function setindeed(mode,target,group,name,action,position)
local t = target[mode]
if not t then
- report_defining("fatal error in setting feature '%s', group '%s', mode '%s'",name or "?",group or "?",mode)
+ report_defining("fatal error in setting feature %a, group %a, mode %a",name,group,mode)
os.exit()
elseif position then
-- todo: remove existing
@@ -1097,12 +1086,12 @@ end
local function set(group,name,target,source)
target = target[group]
if not target then
- report_defining("fatal target error in setting feature '%s', group '%s'",name or "?",group or "?")
+ report_defining("fatal target error in setting feature %a, group %a",name,group)
os.exit()
end
local source = source[group]
if not source then
- report_defining("fatal source error in setting feature '%s', group '%s'",name or "?",group or "?")
+ report_defining("fatal source error in setting feature %a, group %a",name,group)
os.exit()
end
local node = source.node
@@ -1166,19 +1155,31 @@ function constructors.getfeatureaction(what,where,mode,name)
end
end
-function constructors.newfeatures(what)
- local features = handlers[what].features
+function constructors.newhandler(what) -- could be a metatable newindex
+ local handler = handlers[what]
+ if not handler then
+ handler = { }
+ handlers[what] = handler
+ end
+ return handler
+end
+
+function constructors.newfeatures(what) -- could be a metatable newindex
+ local handler = handlers[what]
+ local features = handler.features
if not features then
- local tables = handlers[what].tables -- can be preloaded
+ local tables = handler.tables -- can be preloaded
+ local statistics = handler.statistics -- can be preloaded
features = allocate {
defaults = { },
descriptions = tables and tables.features or { },
+ used = statistics and statistics.usedfeatures or { },
initializers = { base = { }, node = { } },
processors = { base = { }, node = { } },
manipulators = { base = { }, node = { } },
}
features.register = function(specification) return register(features,specification) end
- handlers[what].features = features -- will also become hidden
+ handler.features = features -- will also become hidden
end
return features
end
@@ -1234,8 +1235,8 @@ function constructors.initializefeatures(what,tfmdata,features,trace,report)
else
local action = step.action
if trace then
- report("initializing feature %s to %s for mode %s for font %s",feature,
- tostring(value),mode or 'unknown', tfmdata.properties.fullname or 'unknown')
+ report("initializing feature %a to %a for mode %a for font %a",feature,
+ value,mode,tfmdata.properties.fullname)
end
action(tfmdata,value,features) -- can set mode (e.g. goodies) so it can trigger a restart
if mode ~= properties.mode or mode ~= features.mode then
@@ -1285,8 +1286,7 @@ function constructors.collectprocessors(what,tfmdata,features,trace,report)
if features[feature] then
local action = step.action
if trace then
- report("installing feature processor %s for mode %s for font %s",feature,
- mode or 'unknown', tfmdata.properties.fullname or 'unknown')
+ report("installing feature processor %a for mode %a for font %a",feature,mode,tfmdata.properties.fullname)
end
if action then
nofprocesses = nofprocesses + 1
@@ -1295,8 +1295,7 @@ function constructors.collectprocessors(what,tfmdata,features,trace,report)
end
end
elseif trace then
- report("no feature processors for mode %s for font %s",
- mode or 'unknown', tfmdata.properties.fullname or 'unknown')
+ report("no feature processors for mode %a for font %a",mode,tfmdata.properties.fullname)
end
end
return processes
@@ -1319,8 +1318,7 @@ function constructors.applymanipulators(what,tfmdata,features,trace,report)
if value then
local action = step.action
if trace then
- report("applying feature manipulator %s for mode %s for font %s",feature,
- mode or 'unknown', tfmdata.properties.fullname or 'unknown')
+ report("applying feature manipulator %a for mode %a for font %a",feature,mode,tfmdata.properties.fullname)
end
if action then
action(tfmdata,feature,value)
diff --git a/Master/texmf-dist/tex/context/base/font-ctx.lua b/Master/texmf-dist/tex/context/base/font-ctx.lua
index 4b324a6db79..cb640b892e2 100644
--- a/Master/texmf-dist/tex/context/base/font-ctx.lua
+++ b/Master/texmf-dist/tex/context/base/font-ctx.lua
@@ -8,6 +8,11 @@ if not modules then modules = { } end modules ['font-ctx'] = {
-- At some point I will clean up the code here so that at the tex end
-- the table interface is used.
+--
+-- Todo: make a proper 'next id' mechanism (register etc) or wait till 'true'
+-- in virtual fonts indices is implemented.
+
+local context, commands = context, commands
local texcount, texsetcount = tex.count, tex.setcount
local format, gmatch, match, find, lower, gsub, byte = string.format, string.gmatch, string.match, string.find, string.lower, string.gsub, string.byte
@@ -15,8 +20,9 @@ local concat, serialize, sort, fastcopy, mergedtable = table.concat, table.seria
local sortedhash, sortedkeys, sequenced = table.sortedhash, table.sortedkeys, table.sequenced
local settings_to_hash, hash_to_string = utilities.parsers.settings_to_hash, utilities.parsers.hash_to_string
local formatcolumns = utilities.formatters.formatcolumns
+local mergehashes = utilities.parsers.mergehashes
-local tostring, next, type = tostring, next, type
+local tostring, next, type, rawget = tostring, next, type, rawget
local utfchar, utfbyte = utf.char, utf.byte
local round = math.round
@@ -30,6 +36,7 @@ local trace_mapfiles = false trackers.register("fonts.mapfiles", functio
local trace_automode = false trackers.register("fonts.automode", function(v) trace_automode = v end)
local report_features = logs.reporter("fonts","features")
+local report_cummulative = logs.reporter("fonts","cummulative")
local report_defining = logs.reporter("fonts","defining")
local report_status = logs.reporter("fonts","status")
local report_mapfiles = logs.reporter("fonts","mapfiles")
@@ -47,13 +54,21 @@ local loggers = fonts.loggers
local fontgoodies = fonts.goodies
local helpers = fonts.helpers
local hashes = fonts.hashes
-local fontdata = hashes.identifiers
local currentfont = font.current
local texattribute = tex.attribute
+local fontdata = hashes.identifiers
+local characters = hashes.chardata
+local descriptions = hashes.descriptions
+local properties = hashes.properties
+local resources = hashes.resources
+local csnames = hashes.csnames
+local marks = hashes.markdata
+local lastmathids = hashes.lastmathids
+
local designsizefilename = fontgoodies.designsizes.filename
-local otffeatures = fonts.constructors.newfeatures("otf")
+local otffeatures = handlers.otf.features
local registerotffeature = otffeatures.register
local baseprocessors = otffeatures.processors.base
local baseinitializers = otffeatures.initializers.base
@@ -77,27 +92,30 @@ storage.register("fonts/numbers", numbers, "fonts.specifiers.contextnumbers")
storage.register("fonts/merged", merged, "fonts.specifiers.contextmerged")
storage.register("fonts/synonyms", synonyms, "fonts.specifiers.synonyms")
-constructors.resolvevirtualtoo = true -- context specific (due to resolver)
+-- inspect(setups)
-local allocate, mark = utilities.storage.allocate, utilities.storage.mark
-
-local nulldata = {
- name = "nullfont",
- characters = { },
- descriptions = { },
- properties = { },
- parameters = { -- lmromanregular @ 12pt
- slant = 0, -- 1
- space = 256377, -- 2
- space_stretch = 128188, -- 3
- space_shrink = 85459, -- 4
- x_height = 338952, -- 5
- quad = 786432, -- 6
- extra_space = 85459, -- 7
- },
-}
+if environment.initex then
+ setmetatableindex(setups,function(t,k)
+ return type(k) == "number" and rawget(t,numbers[k]) or nil
+ end)
+else
+ setmetatableindex(setups,function(t,k)
+ local v = type(k) == "number" and rawget(t,numbers[k])
+ if v then
+ t[k] = v
+ return v
+ end
+ end)
+end
-constructors.enhanceparameters(nulldata.parameters) -- official copies for us
+-- this will move elsewhere ...
+
+utilities.strings.formatters.add(string.formatters,"font:name", [["'"..file.basename(%s.properties.name).."'"]])
+utilities.strings.formatters.add(string.formatters,"font:features",[["'"..table.sequenced(%s," ",true).."'"]])
+
+-- ... like font-sfm or so
+
+constructors.resolvevirtualtoo = true -- context specific (due to resolver)
local limited = false
@@ -114,7 +132,7 @@ end)
function definers.resetnullfont()
-- resetting is needed because tikz misuses nullfont
- local parameters = nulldata.parameters
+ local parameters = fonts.nulldata.parameters
--
parameters.slant = 0 -- 1
parameters.space = 0 -- 2
@@ -131,97 +149,6 @@ end
commands.resetnullfont = definers.resetnullfont
-setmetatableindex(fontdata, function(t,k) return nulldata end)
-
--- we might make an font-hsh.lua
-
-local chardata = allocate() -- chardata
-local descriptions = allocate()
-local parameters = allocate()
-local properties = allocate()
-local resources = allocate()
-local quaddata = allocate() -- maybe also spacedata
-local markdata = allocate()
-local xheightdata = allocate()
-local csnames = allocate() -- namedata
-local italicsdata = allocate()
-
-hashes.characters = chardata
-hashes.descriptions = descriptions
-hashes.parameters = parameters
-hashes.properties = properties
-hashes.resources = resources
-hashes.quads = quaddata
-hashes.marks = markdata
-hashes.xheights = xheightdata
-hashes.csnames = csnames
-hashes.italics = italicsdata
-
-setmetatableindex(chardata, function(t,k)
- local characters = fontdata[k].characters
- t[k] = characters
- return characters
-end)
-
-setmetatableindex(descriptions, function(t,k)
- local descriptions = fontdata[k].descriptions
- t[k] = descriptions
- return descriptions
-end)
-
-setmetatableindex(parameters, function(t,k)
- local parameters = fontdata[k].parameters
- t[k] = parameters
- return parameters
-end)
-
-setmetatableindex(properties, function(t,k)
- local properties = fontdata[k].properties
- t[k] = properties
- return properties
-end)
-
-setmetatableindex(resources, function(t,k)
- local shared = fontdata[k].shared
- local rawdata = shared and shared.rawdata
- local resources = rawdata and rawdata.resources
- t[k] = resources or false -- better than resolving each time
- return resources
-end)
-
-setmetatableindex(quaddata, function(t,k)
- local parameters = parameters[k]
- local quad = parameters and parameters.quad or 0
- t[k] = quad
- return quad
-end)
-
-setmetatableindex(markdata, function(t,k)
- local resources = fontdata[k].resources or { }
- local marks = resources.marks or { }
- t[k] = marks
- return marks
-end)
-
-setmetatableindex(xheightdata, function(t,k)
- local parameters = parameters[k]
- local xheight = parameters and parameters.xheight or 0
- t[k] = xheight
- return quad
-end)
-
-setmetatableindex(italicsdata, function(t,k) -- is test !
- local properties = fontdata[k].properties
- local hasitalics = properties and properties.hasitalics
- if hasitalics then
- hasitalics = chardata[k] -- convenient return
- else
- hasitalics = false
- end
- t[k] = hasitalics
- return hasitalics
-end)
-
-- this cannot be a feature initializer as there is no auto namespace
-- so we never enter the loop then; we can store the defaults in the tma
-- file (features.gpos.mkmk = 1 etc)
@@ -259,7 +186,7 @@ local function checkedscript(tfmdata,resources,features)
script = latn and "latn" or "dflt"
end
if trace_automode then
- report_defining("auto script mode: using script '%s' in font '%s'",script,file.basename(tfmdata.properties.name))
+ report_defining("auto script mode, using script %a in font %!font:name!",script,tfmdata)
end
features.script = script
return script
@@ -284,13 +211,15 @@ local function checkedmode(tfmdata,resources,features)
if found then
-- more than one lookup
if trace_automode then
- report_defining("forcing node mode in font %s for feature %s, script %s, language %s (multiple lookups)",file.basename(tfmdata.properties.name),feature,script,language)
+ report_defining("forcing mode %a, font %!font:name!, feature %a, script %a, language %a, %s",
+ "node",tfmdata,feature,script,language,"multiple lookups")
end
features.mode = "node"
return "node"
elseif needsnodemode[sequence.type] then
if trace_automode then
- report_defining("forcing node mode in font %s for feature %s, script %s, language %s (no base support)",file.basename(tfmdata.properties.name),feature,script,language)
+ report_defining("forcing mode %a, font %!font:name!, feature %a, script %a, language %a, %s",
+ "node",tfmdata,feature,script,language,"no base support")
end
features.mode = "node"
return "node"
@@ -314,7 +243,7 @@ definers.checkedmode = checkedmode
local function modechecker(tfmdata,features,mode) -- we cannot adapt features as they are shared!
if trace_features then
- report_features(serialize(features,"used"))
+ report_features("fontname %!font:name!, features %!font:features!",tfmdata,features)
end
local rawdata = tfmdata.shared.rawdata
local resources = rawdata and rawdata.resources
@@ -327,7 +256,7 @@ local function modechecker(tfmdata,features,mode) -- we cannot adapt features as
mode = checkedmode(tfmdata,resources,features)
end
else
- report_features("missing resources for font''%s'",file.basename(tfmdata.properties.name))
+ report_features("missing resources for font %!font:name!",tfmdata)
end
return mode
end
@@ -440,12 +369,23 @@ definers.registersplit("@", predefined,"virtual")
local normalize_features = otffeatures.normalize -- should be general
+local function definecontext(name,t) -- can be shared
+ local number = setups[name] and setups[name].number or 0 -- hm, numbers[name]
+ if number == 0 then
+ number = #numbers + 1
+ numbers[number] = name
+ end
+ t.number = number
+ setups[name] = t
+ return number, t
+end
+
local function presetcontext(name,parent,features) -- will go to con and shared
if features == "" and find(parent,"=") then
features = parent
parent = ""
end
- if features == "" then
+ if not features or features == "" then
features = { }
elseif type(features) == "string" then
features = normalize_features(settings_to_hash(features))
@@ -462,18 +402,22 @@ local function presetcontext(name,parent,features) -- will go to con and shared
features[k] = v
end
end
+ else
+ -- just ignore an undefined one .. i.e. we can refer to not yet defined
end
end
end
-- these are auto set so in order to prevent redundant definitions
-- we need to preset them (we hash the features and adding a default
-- setting during initialization may result in a different hash)
---~ for k,v in next, triggers do
---~ if features[v] == nil then -- not false !
---~ local vv = default_features[v]
---~ if vv then features[v] = vv end
---~ end
---~ end
+ --
+ -- for k,v in next, triggers do
+ -- if features[v] == nil then -- not false !
+ -- local vv = default_features[v]
+ -- if vv then features[v] = vv end
+ -- end
+ -- end
+ --
for feature,value in next, features do
if value == nil then -- not false !
local default = default_features[feature]
@@ -486,11 +430,12 @@ local function presetcontext(name,parent,features) -- will go to con and shared
-- optimization)
local t = { } -- can we avoid t ?
for k,v in next, features do
- if v then t[k] = v end
+-- if v then t[k] = v end
+ t[k] = v
end
-- needed for dynamic features
-- maybe number should always be renewed as we can redefine features
- local number = (setups[name] and setups[name].number) or 0 -- hm, numbers[name]
+ local number = setups[name] and setups[name].number or 0 -- hm, numbers[name]
if number == 0 then
number = #numbers + 1
numbers[number] = name
@@ -531,10 +476,10 @@ local function contextnumber(name) -- will be replaced
end
end
-local function mergecontext(currentnumber,extraname,option)
- local current = setups[numbers[currentnumber]]
+local function mergecontext(currentnumber,extraname,option) -- number string number (used in scrp-ini
local extra = setups[extraname]
if extra then
+ local current = setups[numbers[currentnumber]]
local mergedfeatures, mergedname = { }, nil
if option < 0 then
if current then
@@ -567,6 +512,56 @@ local function mergecontext(currentnumber,extraname,option)
end
end
+local extrasets = { }
+
+setmetatableindex(extrasets,function(t,k)
+ local v = mergehashes(setups,k)
+ t[k] = v
+ return v
+end)
+
+local function mergecontextfeatures(currentname,extraname,how,mergedname) -- string string
+ local extra = setups[extraname] or extrasets[extraname]
+ if extra then
+ local current = setups[currentname]
+ local mergedfeatures = { }
+ if how == "+" then
+ if current then
+ for k, v in next, current do
+ mergedfeatures[k] = v
+ end
+ end
+ for k, v in next, extra do
+ mergedfeatures[k] = v
+ end
+ elseif how == "-" then
+ if current then
+ for k, v in next, current do
+ mergedfeatures[k] = v
+ end
+ end
+ for k, v in next, extra do
+ -- only boolean features
+ if v == true then
+ mergedfeatures[k] = false
+ end
+ end
+ else -- =
+ for k, v in next, extra do
+ mergedfeatures[k] = v
+ end
+ end
+ local number = #numbers + 1
+ mergedfeatures.number = number
+ numbers[number] = mergedname
+ merged[number] = option
+ setups[mergedname] = mergedfeatures
+ return number
+ else
+ return numbers[currentname] or 0
+ end
+end
+
local function registercontext(fontnumber,extraname,option)
local extra = setups[extraname]
if extra then
@@ -590,10 +585,29 @@ local function registercontext(fontnumber,extraname,option)
end
end
+local function registercontextfeature(mergedname,extraname,how)
+ local extra = setups[extraname]
+ if extra then
+ local mergedfeatures = { }
+ for k, v in next, extra do
+ mergedfeatures[k] = v
+ end
+ local number = #numbers + 1
+ mergedfeatures.number = number
+ numbers[number] = mergedname
+ merged[number] = how == "=" and 1 or 2 -- 1=replace, 2=combine
+ setups[mergedname] = mergedfeatures
+ return number -- contextnumber(mergedname)
+ else
+ return 0
+ end
+end
+
specifiers.presetcontext = presetcontext
specifiers.contextnumber = contextnumber
specifiers.mergecontext = mergecontext
specifiers.registercontext = registercontext
+specifiers.definecontext = definecontext
-- we extend the hasher:
@@ -602,7 +616,7 @@ constructors.hashmethods.virtual = function(list)
local n = 0
for k, v in next, list do
n = n + 1
- s[n] = k
+ s[n] = k -- no checking on k
end
if n > 0 then
sort(s)
@@ -616,83 +630,96 @@ end
-- end of redefine
-local cache = { } -- concat might be less efficient than nested tables
-
-local function withset(name,what)
- local zero = texattribute[0]
- local hash = zero .. "+" .. name .. "*" .. what
- local done = cache[hash]
- if not done then
- done = mergecontext(zero,name,what)
- cache[hash] = done
- end
- texattribute[0] = done
-end
-
-local function withfnt(name,what)
- local font = currentfont()
- local hash = font .. "*" .. name .. "*" .. what
- local done = cache[hash]
- if not done then
- done = registercontext(font,name,what)
- cache[hash] = done
- end
- texattribute[0] = done
-end
+-- local withcache = { } -- concat might be less efficient than nested tables
+--
+-- local function withset(name,what)
+-- local zero = texattribute[0]
+-- local hash = zero .. "+" .. name .. "*" .. what
+-- local done = withcache[hash]
+-- if not done then
+-- done = mergecontext(zero,name,what)
+-- withcache[hash] = done
+-- end
+-- texattribute[0] = done
+-- end
+--
+-- local function withfnt(name,what,font)
+-- local font = font or currentfont()
+-- local hash = font .. "*" .. name .. "*" .. what
+-- local done = withcache[hash]
+-- if not done then
+-- done = registercontext(font,name,what)
+-- withcache[hash] = done
+-- end
+-- texattribute[0] = done
+-- end
function specifiers.showcontext(name)
return setups[name] or setups[numbers[name]] or setups[numbers[tonumber(name)]] or { }
end
--- todo: support a,b,c
-
-- we need a copy as we will add (fontclass) goodies to the features and
-- that is bad for a shared table
+-- local function splitcontext(features) -- presetcontext creates dummy here
+-- return fastcopy(setups[features] or (presetcontext(features,"","") and setups[features]))
+-- end
+
local function splitcontext(features) -- presetcontext creates dummy here
- return fastcopy(setups[features] or (presetcontext(features,"","") and setups[features]))
+ local sf = setups[features]
+ if not sf then
+ local n -- number
+ if find(features,",") then
+ -- let's assume a combination which is not yet defined but just specified (as in math)
+ n, sf = presetcontext(features,features,"")
+ else
+ -- we've run into an unknown feature and or a direct spec so we create a dummy
+ n, sf = presetcontext(features,"","")
+ end
+ end
+ return fastcopy(sf)
end
---~ local splitter = lpeg.splitat("=")
-
---~ local function splitcontext(features)
---~ local setup = setups[features]
---~ if setup then
---~ return setup
---~ elseif find(features,",") then
---~ -- This is not that efficient but handy anyway for quick and dirty tests
---~ -- beware, due to the way of caching setups you can get the wrong results
---~ -- when components change. A safeguard is to nil the cache.
---~ local merge = nil
---~ for feature in gmatch(features,"[^, ]+") do
---~ if find(feature,"=") then
---~ local k, v = lpegmatch(splitter,feature)
---~ if k and v then
---~ if not merge then
---~ merge = { k = v }
---~ else
---~ merge[k] = v
---~ end
---~ end
---~ else
---~ local s = setups[feature]
---~ if not s then
---~ -- skip
---~ elseif not merge then
---~ merge = s
---~ else
---~ for k, v in next, s do
---~ merge[k] = v
---~ end
---~ end
---~ end
---~ end
---~ setup = merge and presetcontext(features,"",merge) and setups[features]
---~ -- actually we have to nil setups[features] in order to permit redefinitions
---~ setups[features] = nil
---~ end
---~ return setup or (presetcontext(features,"","") and setups[features]) -- creates dummy
---~ end
+-- local splitter = lpeg.splitat("=")
+--
+-- local function splitcontext(features)
+-- local setup = setups[features]
+-- if setup then
+-- return setup
+-- elseif find(features,",") then
+-- -- This is not that efficient but handy anyway for quick and dirty tests
+-- -- beware, due to the way of caching setups you can get the wrong results
+-- -- when components change. A safeguard is to nil the cache.
+-- local merge = nil
+-- for feature in gmatch(features,"[^, ]+") do
+-- if find(feature,"=") then
+-- local k, v = lpegmatch(splitter,feature)
+-- if k and v then
+-- if not merge then
+-- merge = { k = v }
+-- else
+-- merge[k] = v
+-- end
+-- end
+-- else
+-- local s = setups[feature]
+-- if not s then
+-- -- skip
+-- elseif not merge then
+-- merge = s
+-- else
+-- for k, v in next, s do
+-- merge[k] = v
+-- end
+-- end
+-- end
+-- end
+-- setup = merge and presetcontext(features,"",merge) and setups[features]
+-- -- actually we have to nil setups[features] in order to permit redefinitions
+-- setups[features] = nil
+-- end
+-- return setup or (presetcontext(features,"","") and setups[features]) -- creates dummy
+-- end
specifiers.splitcontext = splitcontext
@@ -750,6 +777,18 @@ local scale_scaled = P("scaled") * Cc(4) * spaces * dimension -- value
local sizepattern = spaces * (scale_at + scale_sa + scale_mo + scale_scaled + scale_none)
local splitpattern = spaces * value * spaces * rest
+function helpers.splitfontpattern(str)
+ local name, size = lpegmatch(splitpattern,str)
+ local kind, size = lpegmatch(sizepattern,size)
+ return name, kind, size
+end
+
+function helpers.fontpatternhassize(str)
+ local name, size = lpegmatch(splitpattern,str)
+ local kind, size = lpegmatch(sizepattern,size)
+ return size or false
+end
+
local specification -- still needed as local ?
local getspecification = definers.getspecification
@@ -761,6 +800,7 @@ local setdefaultfontname = context.fntsetdefname
local setsomefontname = context.fntsetsomename
local setemptyfontsize = context.fntsetnopsize
local setsomefontsize = context.fntsetsomesize
+local letvaluerelax = context.letvaluerelax
function commands.definefont_one(str)
statistics.starttiming(fonts)
@@ -771,7 +811,7 @@ function commands.definefont_one(str)
local fullname, size = lpegmatch(splitpattern,str)
local lookup, name, sub, method, detail = getspecification(fullname)
if not name then
- report_defining("strange definition '%s'",str)
+ report_defining("strange definition %a",str)
setdefaultfontname()
elseif name == "unknown" then
setdefaultfontname()
@@ -814,7 +854,7 @@ end
function commands.definefont_two(global,cs,str,size,inheritancemode,classfeatures,fontfeatures,classfallbacks,fontfallbacks,
mathsize,textsize,relativeid,classgoodies,goodies,classdesignsize,fontdesignsize)
if trace_defining then
- report_defining("start stage two: %s (%s)",str,size)
+ report_defining("start stage two: %s (size %s)",str,size)
end
-- name is now resolved and size is scaled cf sa/mo
local lookup, name, sub, method, detail = getspecification(str or "")
@@ -823,7 +863,7 @@ function commands.definefont_two(global,cs,str,size,inheritancemode,classfeature
local designname = designsizefilename(name,designsize,size)
if designname and designname ~= "" then
if trace_defining or trace_designsize then
- report_defining("remapping name: %s, specification: %s, size: %s => designsize: %s",name,designsize,size,designname)
+ report_defining("remapping name %a, specification %a, size %a, designsize %a",name,designsize,size,designname)
end
-- we don't catch detail here
local o_lookup, o_name, o_sub, o_method, o_detail = getspecification(designname)
@@ -905,27 +945,28 @@ function commands.definefont_two(global,cs,str,size,inheritancemode,classfeature
end
end
local tfmdata = definers.read(specification,size) -- id not yet known (size in spec?)
+ --
+ local lastfontid = 0
if not tfmdata then
- report_defining("unable to define %s as [%s]",name,nice_cs(cs))
- texsetcount("global","lastfontid",-1)
- context.letvaluerelax(cs) -- otherwise the current definition takes the previous one
+ report_defining("unable to define %a as %a",name,nice_cs(cs))
+ lastfontid = -1
+ letvaluerelax(cs) -- otherwise the current definition takes the previous one
elseif type(tfmdata) == "number" then
if trace_defining then
- report_defining("reusing %s with id %s as [%s] (features: %s/%s, fallbacks: %s/%s, goodies: %s/%s, designsize: %s/%s)",
+ report_defining("reusing %s, id %a, target %a, features %a / %a, fallbacks %a / %a, goodies %a / %a, designsize %a / %a",
name,tfmdata,nice_cs(cs),classfeatures,fontfeatures,classfallbacks,fontfallbacks,classgoodies,goodies,classdesignsize,fontdesignsize)
end
csnames[tfmdata] = specification.cs
tex.definefont(global,cs,tfmdata)
-- resolved (when designsize is used):
setsomefontsize(fontdata[tfmdata].parameters.size .. "sp")
- texsetcount("global","lastfontid",tfmdata)
+ lastfontid = tfmdata
else
-- setting the extra characters will move elsewhere
local characters = tfmdata.characters
local parameters = tfmdata.parameters
- -- we use char0 as signal
+ -- we use char0 as signal; cf the spec pdf can handle this (no char in slot)
characters[0] = nil
- -- cf the spec pdf can handle this (no char in slot)
-- characters[0x00A0] = { width = parameters.space }
-- characters[0x2007] = { width = characters[0x0030] and characters[0x0030].width or parameters.space } -- figure
-- characters[0x2008] = { width = characters[0x002E] and characters[0x002E].width or parameters.space } -- period
@@ -938,20 +979,27 @@ function commands.definefont_two(global,cs,str,size,inheritancemode,classfeature
constructors.cleanuptable(tfmdata)
constructors.finalize(tfmdata)
if trace_defining then
- report_defining("defining %s with id %s as [%s] (features: %s/%s, fallbacks: %s/%s)",
+ report_defining("defining %a, id %a, target %a, features %a / %a, fallbacks %a / %a",
name,id,nice_cs(cs),classfeatures,fontfeatures,classfallbacks,fontfallbacks)
end
-- resolved (when designsize is used):
setsomefontsize((tfmdata.parameters.size or 655360) .. "sp")
- --~ if specification.fallbacks then
- --~ fonts.collections.prepare(specification.fallbacks)
- --~ end
- texsetcount("global","lastfontid",id)
+ lastfontid = id
end
if trace_defining then
report_defining("memory usage after: %s",statistics.memused())
report_defining("stop stage two")
end
+ --
+ texsetcount("global","lastfontid",lastfontid)
+ if not mathsize then
+ -- forget about it
+ elseif mathsize == 0 then
+ lastmathids[1] = lastfontid
+ else
+ lastmathids[mathsize] = lastfontid
+ end
+ --
statistics.stoptiming(fonts)
end
@@ -1030,17 +1078,17 @@ local calculatescale = constructors.calculatescale
function constructors.calculatescale(tfmdata,scaledpoints,relativeid)
local scaledpoints, delta = calculatescale(tfmdata,scaledpoints)
---~ if enable_auto_r_scale and relativeid then -- for the moment this is rather context specific
---~ local relativedata = fontdata[relativeid]
---~ local rfmdata = relativedata and relativedata.unscaled and relativedata.unscaled
---~ local id_x_height = rfmdata and rfmdata.parameters and rfmdata.parameters.x_height
---~ local tf_x_height = tfmdata and tfmdata.parameters and tfmdata.parameters.x_height
---~ if id_x_height and tf_x_height then
---~ local rscale = id_x_height/tf_x_height
---~ delta = rscale * delta
---~ scaledpoints = rscale * scaledpoints
---~ end
---~ end
+ -- if enable_auto_r_scale and relativeid then -- for the moment this is rather context specific
+ -- local relativedata = fontdata[relativeid]
+ -- local rfmdata = relativedata and relativedata.unscaled and relativedata.unscaled
+ -- local id_x_height = rfmdata and rfmdata.parameters and rfmdata.parameters.x_height
+ -- local tf_x_height = tfmdata and tfmdata.parameters and tfmdata.parameters.x_height
+ -- if id_x_height and tf_x_height then
+ -- local rscale = id_x_height/tf_x_height
+ -- delta = rscale * delta
+ -- scaledpoints = rscale * scaledpoints
+ -- end
+ -- end
return scaledpoints, delta
end
@@ -1103,7 +1151,7 @@ function mappings.loadfile(name)
name = file.addsuffix(name,"map")
if not loaded[name] then
if trace_mapfiles then
- report_mapfiles("loading map file '%s'",name)
+ report_mapfiles("loading map file %a",name)
end
pdf.mapfile(name)
loaded[name] = true
@@ -1121,7 +1169,7 @@ function mappings.loadline(how,line)
end
if not loaded[how] then
if trace_mapfiles then
- report_mapfiles("processing map line '%s'",line)
+ report_mapfiles("processing map line %a",line)
end
pdf.mapline(how)
loaded[how] = true
@@ -1141,10 +1189,7 @@ mappings.reset() -- resets the default file
local function nametoslot(name)
local t = type(name)
if t == "string" then
- local tfmdata = fonts.hashes.identifiers[currentfont()]
- local shared = tfmdata and tfmdata.shared
- local fntdata = shared and shared.rawdata
- return fntdata and fntdata.resources.unicodes[name]
+ return resources[true].unicodes[name]
elseif t == "number" then
return n
end
@@ -1162,7 +1207,7 @@ function loggers.reportdefinedfonts()
local parameters = data.parameters or { }
tn = tn + 1
t[tn] = {
- format("%03i",id or 0),
+ format("%03i",id or 0),
format("%09i",parameters.size or 0),
properties.type or "real",
properties.format or "unknown",
@@ -1170,7 +1215,7 @@ function loggers.reportdefinedfonts()
properties.psname or "",
properties.fullname or "",
}
-report_status("%s: %s",properties.name,concat(sortedkeys(data)," "))
+ report_status("%s: % t",properties.name,sortedkeys(data))
end
formatcolumns(t," ")
report_status()
@@ -1320,15 +1365,10 @@ function commands.resetfontfeature()
texattribute[0] = 0
end
-function commands.addfs(tag) withset(tag, 1) end
-function commands.subfs(tag) withset(tag,-1) end
-function commands.addff(tag) withfnt(tag, 2) end
-function commands.subff(tag) withfnt(tag,-2) end
-
--- function commands.addfontfeaturetoset (tag) withset(tag, 1) end
--- function commands.subtractfontfeaturefromset (tag) withset(tag,-1) end
--- function commands.addfontfeaturetofont (tag) withfnt(tag, 2) end
--- function commands.subtractfontfeaturefromfont(tag) withfnt(tag,-2) end
+-- function commands.addfs(tag) withset(tag, 1) end
+-- function commands.subfs(tag) withset(tag,-1) end
+-- function commands.addff(tag) withfnt(tag, 2) end -- on top of font features
+-- function commands.subff(tag) withfnt(tag,-2) end -- on top of font features
function commands.cleanfontname (name) context(names.cleanname(name)) end
@@ -1347,7 +1387,7 @@ function commands.showchardata(n)
end
local chr = tfmdata.characters[n]
if chr then
- report_status("%s @ %s => U%05X => %s => %s",tfmdata.properties.fullname,tfmdata.parameters.size,n,utfchar(n),serialize(chr,false))
+ report_status("%s @ %s => %U => %c => %s",tfmdata.properties.fullname,tfmdata.parameters.size,n,n,serialize(chr,false))
end
end
end
@@ -1384,7 +1424,8 @@ function helpers.dimenfactor(unit,tfmdata) -- could be a method of a font instan
elseif unit == "em" then
return (tfmdata and tfmdata.parameters.em_width) or 655360
else
- return dimenfactors[unit] or unit
+ local du = dimenfactors[unit]
+ return du and 1/du or tonumber(unit) or 1
end
end
@@ -1536,8 +1577,184 @@ end
-- more interfacing:
-commands.definefontfeature = fonts.specifiers.presetcontext
+commands.definefontfeature = presetcontext
+
+local cache = { }
+
+local hows = {
+ ["+"] = "add",
+ ["-"] = "subtract",
+ ["="] = "replace",
+}
+
+function commands.feature(how,parent,name,font)
+ if not how then
+ if trace_features and texattribute[0] ~= 0 then
+ report_cummulative("font %!font:name!, reset",fontdata[font or true])
+ end
+ texattribute[0] = 0
+ elseif how == true then
+ local hash = "feature > " .. parent
+ local done = cache[hash]
+ if trace_features and done then
+ report_cummulative("font %!font:name!, revive %a : %!font:features!",fontdata[font or true],parent,setups[numbers[done]])
+ end
+ texattribute[0] = done or 0
+ else
+ local full = parent .. how .. name
+ local hash = "feature > " .. full
+ local done = cache[hash]
+ if not done then
+ local n = setups[full]
+ if n then
+ -- already defined
+ else
+ n = mergecontextfeatures(parent,name,how,full)
+ end
+ done = registercontextfeature(hash,full,how)
+ cache[hash] = done
+ if trace_features then
+ report_cummulative("font %!font:name!, %s %a : %!font:features!",fontdata[font or true],hows[how],full,setups[numbers[done]])
+ end
+ end
+ texattribute[0] = done
+ end
+end
function commands.featurelist(...)
context(fonts.specifiers.contexttostring(...))
end
+
+function commands.registerlanguagefeatures()
+ local specifications = languages.data.specifications
+ for i=1,#specifications do
+ local specification = specifications[i]
+ local language = specification.opentype
+ if language then
+ local script = specification.opentypescript or specification.script
+ if script then
+ local context = specification.context
+ if type(context) == "table" then
+ for i=1,#context do
+ definecontext(context[i], { language = language, script = script})
+ end
+ elseif type(context) == "string" then
+ definecontext(context, { language = language, script = script})
+ end
+ end
+ end
+ end
+end
+
+-- a fontkern plug:
+
+local copy_node = node.copy
+local kern = nodes.pool.register(nodes.pool.kern())
+
+node.set_attribute(kern,attributes.private('fontkern'),1) -- we can have several, attributes are shared
+
+nodes.injections.installnewkern(function(k)
+ local c = copy_node(kern)
+ c.kern = k
+ return c
+end)
+
+directives.register("nodes.injections.fontkern", function(v) kern.subtype = v and 0 or 1 end)
+
+-- here
+
+local trace_analyzing = false trackers.register("otf.analyzing", function(v) trace_analyzing = v end)
+
+local otffeatures = fonts.constructors.newfeatures("otf")
+local registerotffeature = otffeatures.register
+
+local analyzers = fonts.analyzers
+local methods = analyzers.methods
+
+local unsetvalue = attributes.unsetvalue
+
+local traverse_by_id = node.traverse_id
+
+local a_color = attributes.private('color')
+local a_colormodel = attributes.private('colormodel')
+local a_state = attributes.private('state')
+local m_color = attributes.list[a_color] or { }
+
+local glyph_code = nodes.nodecodes.glyph
+
+local states = analyzers.states
+
+local names = {
+ [states.init] = "font:1",
+ [states.medi] = "font:2",
+ [states.fina] = "font:3",
+ [states.isol] = "font:4",
+ [states.mark] = "font:5",
+ [states.rest] = "font:6",
+ [states.rphf] = "font:1",
+ [states.half] = "font:2",
+ [states.pref] = "font:3",
+ [states.blwf] = "font:4",
+ [states.pstf] = "font:5",
+}
+
+local function markstates(head)
+ if head then
+ local model = head[a_colormodel] or 1
+ for glyph in traverse_by_id(glyph_code,head) do
+ local a = glyph[a_state]
+ if a then
+ local name = names[a]
+ if name then
+ local color = m_color[name]
+ if color then
+ glyph[a_colormodel] = model
+ glyph[a_color] = color
+ end
+ end
+ end
+ end
+ end
+end
+
+local function analyzeprocessor(head,font,attr)
+ local tfmdata = fontdata[font]
+ local script, language = otf.scriptandlanguage(tfmdata,attr)
+ local action = methods[script]
+ if not action then
+ return head, false
+ end
+ if type(action) == "function" then
+ local head, done = action(head,font,attr)
+ if done and trace_analyzing then
+ markstates(head)
+ end
+ return head, done
+ end
+ action = action[language]
+ if action then
+ local head, done = action(head,font,attr)
+ if done and trace_analyzing then
+ markstates(head)
+ end
+ return head, done
+ else
+ return head, false
+ end
+end
+
+registerotffeature { -- adapts
+ name = "analyze",
+ processors = {
+ node = analyzeprocessor,
+ }
+}
+
+function methods.nocolor(head,font,attr)
+ for n in traverse_by_id(glyph_code,head) do
+ if not font or n.font == font then
+ n[a_color] = unsetvalue
+ end
+ end
+ return head, true
+end
diff --git a/Master/texmf-dist/tex/context/base/font-def.lua b/Master/texmf-dist/tex/context/base/font-def.lua
index 96de480ac86..5074e49ed75 100644
--- a/Master/texmf-dist/tex/context/base/font-def.lua
+++ b/Master/texmf-dist/tex/context/base/font-def.lua
@@ -6,7 +6,8 @@ if not modules then modules = { } end modules ['font-def'] = {
license = "see context related readme files"
}
-local concat = table.concat
+-- We can overload some of the definers.functions so we don't local them.
+
local format, gmatch, match, find, lower, gsub = string.format, string.gmatch, string.match, string.find, string.lower, string.gsub
local tostring, next = tostring, next
local lpegmatch = lpeg.match
@@ -32,6 +33,7 @@ local readers = fonts.readers
local definers = fonts.definers
local specifiers = fonts.specifiers
local constructors = fonts.constructors
+local fontgoodies = fonts.goodies
readers.sequence = allocate { 'otf', 'ttf', 'afm', 'tfm', 'lua' } -- dfont ttc
@@ -42,10 +44,13 @@ definers.methods = definers.methods or { }
local internalized = allocate() -- internal tex numbers (private)
-
local loadedfonts = constructors.loadedfonts
local designsizes = constructors.designsizes
+-- not in generic (some day I'll make two defs, one for context, one for generic)
+
+local resolvefile = fontgoodies and fontgoodies.filenames and fontgoodies.filenames.resolve or function(s) return s end
+
--[[ldx--
<p>We hardly gain anything when we cache the final (pre scaled)
<l n='tfm'/> table. But it can be handy for debugging, so we no
@@ -72,7 +77,7 @@ and prepares a table that will move along as we proceed.</p>
-- name name(sub) name(sub)*spec name*spec
-- name@spec*oeps
-local splitter, splitspecifiers = nil, ""
+local splitter, splitspecifiers = nil, "" -- not so nice
local P, C, S, Cc = lpeg.P, lpeg.C, lpeg.S, lpeg.Cc
@@ -83,7 +88,7 @@ local space = P(" ")
definers.defaultlookup = "file"
-local prefixpattern = P(false)
+local prefixpattern = P(false)
local function addspecifier(symbol)
splitspecifiers = splitspecifiers .. symbol
@@ -119,16 +124,15 @@ function definers.registersplit(symbol,action,verbosename)
end
end
-function definers.makespecification(specification,lookup,name,sub,method,detail,size)
+local function makespecification(specification,lookup,name,sub,method,detail,size)
size = size or 655360
- if trace_defining then
- report_defining("%s -> lookup: %s, name: %s, sub: %s, method: %s, detail: %s",
- specification, (lookup ~= "" and lookup) or "[file]", (name ~= "" and name) or "-",
- (sub ~= "" and sub) or "-", (method ~= "" and method) or "-", (detail ~= "" and detail) or "-")
- end
if not lookup or lookup == "" then
lookup = definers.defaultlookup
end
+ if trace_defining then
+ report_defining("specification %a, lookup %a, name %a, sub %a, method %a, detail %a",
+ specification, lookup, name, sub, method, detail)
+ end
local t = {
lookup = lookup, -- forced type
specification = specification, -- full specification
@@ -144,10 +148,13 @@ function definers.makespecification(specification,lookup,name,sub,method,detail,
return t
end
+
+definers.makespecification = makespecification
+
function definers.analyze(specification, size)
-- can be optimized with locals
local lookup, name, sub, method, detail = getspecification(specification or "")
- return definers.makespecification(specification, lookup, name, sub, method, detail, size)
+ return makespecification(specification, lookup, name, sub, method, detail, size)
end
--[[ldx--
@@ -160,10 +167,13 @@ local resolvers = definers.resolvers
-- todo: reporter
function resolvers.file(specification)
- local suffix = file.suffix(specification.name)
+ local name = resolvefile(specification.name) -- catch for renames
+ local suffix = file.suffix(name)
if fonts.formats[suffix] then
specification.forced = suffix
- specification.name = file.removesuffix(specification.name)
+ specification.name = file.removesuffix(name)
+ else
+ specification.name = name -- cna be resolved
end
end
@@ -194,7 +204,7 @@ function resolvers.spec(specification)
if resolved then
specification.resolved = resolved
specification.sub = sub
- specification.forced = file.extname(resolved)
+ specification.forced = file.suffix(resolved)
specification.name = file.removesuffix(resolved)
end
else
@@ -242,12 +252,13 @@ specification yet.</p>
function definers.applypostprocessors(tfmdata)
local postprocessors = tfmdata.postprocessors
if postprocessors then
+ local properties = tfmdata.properties
for i=1,#postprocessors do
local extrahash = postprocessors[i](tfmdata) -- after scaling etc
if type(extrahash) == "string" and extrahash ~= "" then
-- e.g. a reencoding needs this
extrahash = gsub(lower(extrahash),"[^a-z]","-")
- tfmdata.properties.fullname = format("%s-%s",tfmdata.properties.fullname,extrahash)
+ properties.fullname = format("%s-%s",properties.fullname,extrahash)
end
end
end
@@ -285,7 +296,7 @@ function definers.loadfont(specification)
local reader = readers[lower(forced)]
tfmdata = reader and reader(specification)
if not tfmdata then
- report_defining("forced type %s of %s not found",forced,specification.name)
+ report_defining("forced type %a of %a not found",forced,specification.name)
end
else
local sequence = readers.sequence -- can be overloaded so only a shortcut here
@@ -293,7 +304,7 @@ function definers.loadfont(specification)
local reader = sequence[s]
if readers[reader] then -- we skip not loaded readers
if trace_defining then
- report_defining("trying (reader sequence driven) type %s for %s with file %s",reader,specification.name,specification.filename or "unknown")
+ report_defining("trying (reader sequence driven) type %a for %a with file %a",reader,specification.name,specification.filename)
end
tfmdata = readers[reader](specification)
if tfmdata then
@@ -312,14 +323,26 @@ function definers.loadfont(specification)
end
end
if not tfmdata then
- report_defining("font with asked name '%s' is not found using lookup '%s'",specification.name,specification.lookup)
+ report_defining("font with asked name %a is not found using lookup %a",specification.name,specification.lookup)
end
return tfmdata
end
---[[ldx--
-<p>For virtual fonts we need a slightly different approach:</p>
---ldx]]--
+local function checkvirtual(tfmdata)
+ -- begin of experiment: we can use { "slot", 0, number } in virtual fonts
+ local fonts = tfmdata.fonts
+ local selfid = font.nextid()
+ if fonts and #fonts > 0 then
+ for i=1,#fonts do
+ if fonts[i][2] == 0 then
+ fonts[i][2] = selfid
+ end
+ end
+ else
+ tfmdata.fonts = { "id", selfid }
+ end
+ -- end of experiment
+end
function constructors.readanddefine(name,size) -- no id -- maybe a dummy first
local specification = definers.analyze(name,size)
@@ -333,7 +356,7 @@ function constructors.readanddefine(name,size) -- no id -- maybe a dummy first
if not id then
local tfmdata = definers.loadfont(specification)
if tfmdata then
- tfmdata.properties.hash = hash
+ checkvirtual(tfmdata) -- experiment, will become obsolete when slots can selfreference
id = font.define(tfmdata)
definers.register(tfmdata,id)
else
@@ -373,7 +396,7 @@ function definers.register(tfmdata,id)
if not internalized[hash] then
internalized[hash] = id
if trace_defining then
- report_defining("registering font, id: %s, hash: %s",id or "?",hash or "?")
+ report_defining("registering font, id %s, hash %a",id,hash)
end
fontdata[id] = tfmdata
end
@@ -402,7 +425,6 @@ function definers.read(specification,size,id) -- id can be optional, name can al
if trace_defining then
report_defining("loaded and hashed: %s",hash)
end
- --~ constructors.checkvirtualid(tfmdata) -- interferes
tfmdata.properties.hash = hash
if id then
definers.register(tfmdata,id)
@@ -415,24 +437,22 @@ function definers.read(specification,size,id) -- id can be optional, name can al
end
lastdefined = tfmdata or id -- todo ! ! ! ! !
if not tfmdata then -- or id?
- report_defining( "unknown font %s, loading aborted",specification.name)
+ report_defining( "unknown font %a, loading aborted",specification.name)
elseif trace_defining and type(tfmdata) == "table" then
local properties = tfmdata.properties or { }
local parameters = tfmdata.parameters or { }
- report_defining("using %s font with id %s, name:%s size:%s bytes:%s encoding:%s fullname:%s filename:%s",
- properties.format or "unknown",
- id or "?",
- properties.name or "?",
- parameters.size or "default",
- properties.encodingbytes or "?",
- properties.encodingname or "unicode",
- properties.fullname or "?",
- file.basename(properties.filename or "?"))
+ report_defining("using %s font with id %a, name %a, size %a, bytes %a, encoding %a, fullname %a, filename %a",
+ properties.format, id, properties.name, parameters.size, properties.encodingbytes,
+ properties.encodingname, properties.fullname, file.basename(properties.filename))
end
statistics.stoptiming(fonts)
return tfmdata
end
+function font.getfont(id)
+ return fontdata[id] -- otherwise issues
+end
+
--[[ldx--
<p>We overload the <l n='tfm'/> reader.</p>
--ldx]]--
diff --git a/Master/texmf-dist/tex/context/base/font-enh.lua b/Master/texmf-dist/tex/context/base/font-enh.lua
index ca9893e3d84..9338fc20b15 100644
--- a/Master/texmf-dist/tex/context/base/font-enh.lua
+++ b/Master/texmf-dist/tex/context/base/font-enh.lua
@@ -8,8 +8,12 @@ if not modules then modules = { } end modules ['font-enh'] = {
local next = next
-local trace_defining = false trackers.register("fonts.defining", function(v) trace_defining = v end)
-local report_defining = logs.reporter("fonts","defining")
+local trace_unicoding = false
+
+trackers.register("fonts.defining", function(v) trace_unicoding = v end)
+trackers.register("fonts.unicoding", function(v) trace_unicoding = v end)
+
+local report_unicoding = logs.reporter("fonts","unicoding")
local fonts = fonts
local constructors = fonts.constructors
@@ -43,8 +47,8 @@ local registerotffeature = otffeatures.register
-- end
-- for newcode, oldcode in next, data.unicodes do
-- if newcode ~= oldcode then
--- if trace_defining then
--- report_defining("reencoding U+%05X to U+%05X",oldcode,newcode)
+-- if trace_unicoding then
+-- report_unicoding("reencoding %U to %U",oldcode,newcode)
-- end
-- characters[newcode] = original[oldcode]
-- end
@@ -71,8 +75,8 @@ local registerotffeature = otffeatures.register
-- end
-- for k,v in next, vector do
-- if k ~= v then
--- if trace_defining then
--- report_defining("remapping U+%05X to U+%05X",k,v)
+-- if trace_unicoding then
+-- report_unicoding("remapping %U to %U",k,v)
-- end
-- local c = original[k]
-- characters[v] = c
@@ -137,13 +141,17 @@ local function initializeunicoding(tfmdata)
description = descriptions[newcode],
}
end
- local original = originals[oldcode]
- if original then
- characters [newcode] = original.character
- descriptions[newcode] = original.description
+ if oldcode then
+ local original = originals[oldcode]
+ if original then
+ characters [newcode] = original.character
+ descriptions[newcode] = original.description
+ else
+ characters [newcode] = characters [oldcode]
+ descriptions[newcode] = descriptions[oldcode]
+ end
else
- characters [newcode] = characters [oldcode]
- descriptions[newcode] = descriptions[oldcode]
+ oldcoding[name] = newcode
end
if tounicode then
local index = descriptions[newcode].index
@@ -151,8 +159,12 @@ local function initializeunicoding(tfmdata)
tounicodes[index] = tosixteen(newcode) -- shared (we could have a metatable)
end
end
- if trace_defining then
- report_defining("aliasing glyph '%s' from U+%05X to U+%05X",name,oldcode,newcode)
+ if trace_unicoding then
+ if oldcode then
+ report_unicoding("aliasing glyph %a from %U to %U",name,oldcode,newcode)
+ else
+ report_unicoding("aliasing glyph %a to %U",name,newcode)
+ end
end
end
end
@@ -164,7 +176,11 @@ registerafmfeature {
initializers = {
base = initializeunicoding,
node = initializeunicoding,
- }
+ },
+ -- manipulators = {
+ -- base = finalizeunicoding,
+ -- node = finalizeunicoding,
+ -- }
}
registerotffeature {
@@ -173,5 +189,9 @@ registerotffeature {
initializers = {
base = initializeunicoding,
node = initializeunicoding,
- }
+ },
+ -- manipulators = {
+ -- base = finalizeunicoding,
+ -- node = finalizeunicoding,
+ -- }
}
diff --git a/Master/texmf-dist/tex/context/base/font-ext.lua b/Master/texmf-dist/tex/context/base/font-ext.lua
index 331ffbdfa91..89d5927d40b 100644
--- a/Master/texmf-dist/tex/context/base/font-ext.lua
+++ b/Master/texmf-dist/tex/context/base/font-ext.lua
@@ -6,11 +6,12 @@ if not modules then modules = { } end modules ['font-ext'] = {
license = "see context related readme files"
}
-local utf = unicode.utf8
local next, type, byte = next, type, string.byte
local gmatch, concat, format = string.gmatch, table.concat, string.format
local utfchar = utf.char
-local getparameters = utilities.parsers.getparameters
+
+local commands, context = commands, context
+local fonts, utilities = fonts, utilities
local trace_protrusion = false trackers.register("fonts.protrusion", function(v) trace_protrusion = v end)
local trace_expansion = false trackers.register("fonts.expansion", function(v) trace_expansion = v end)
@@ -26,17 +27,19 @@ will depend of the font format. Here we define the few that are kind
of neutral.</p>
--ldx]]--
-local fonts = fonts
-local fontdata = fonts.hashes.identifiers
+local handlers = fonts.handlers
+local hashes = fonts.hashes
+local otf = handlers.otf
-local otffeatures = fonts.constructors.newfeatures("otf")
-local registerotffeature = otffeatures.register
+local registerotffeature = handlers.otf.features.register
+local registerafmfeature = handlers.afm.features.register
-local afmfeatures = fonts.constructors.newfeatures("afm")
-local registerafmfeature = afmfeatures.register
+local fontdata = hashes.identifiers
local allocate = utilities.storage.allocate
local settings_to_array = utilities.parsers.settings_to_array
+local getparameters = utilities.parsers.getparameters
+
local setmetatableindex = table.setmetatableindex
-- -- -- -- -- --
@@ -62,14 +65,15 @@ end
-- expansion (hz)
-- -- -- -- -- --
-fonts.expansions = allocate()
-local expansions = fonts.expansions
+local expansions = fonts.expansions or allocate()
-expansions.classes = allocate()
-local classes = expansions.classes
+fonts.expansions = expansions
-expansions.vectors = allocate()
-local vectors = expansions.vectors
+local classes = expansions.classes or allocate()
+local vectors = expansions.vectors or allocate()
+
+expansions.classes = classes
+expansions.vectors = vectors
-- beware, pdftex itself uses percentages * 10
@@ -107,7 +111,7 @@ local function initializeexpansion(tfmdata,value)
local step = class.step or 0
local factor = class.factor or 1
if trace_expansion then
- report_expansions("setting class %s, vector: %s, factor: %s, stretch: %s, shrink: %s, step: %s",
+ report_expansions("setting class %a, vector %a, factor %a, stretch %a, shrink %a, step %a",
value,class.vector,factor,stretch,shrink,step)
end
tfmdata.parameters.expansion = {
@@ -140,10 +144,10 @@ local function initializeexpansion(tfmdata,value)
end
end
elseif trace_expansion then
- report_expansions("unknown vector '%s' in class '%s",class.vector,value)
+ report_expansions("unknown vector %a in class %a",class.vector,value)
end
elseif trace_expansion then
- report_expansions("unknown class '%s'",value)
+ report_expansions("unknown class %a",value)
end
end
end
@@ -345,14 +349,14 @@ local function map_opbd_onto_protrusion(tfmdata,value,opbd)
local data = lookuphash[lookup]
if data then
if trace_protrusion then
- report_protrusions("setting left using lfbd lookup '%s'",lookup)
+ report_protrusions("setting left using lfbd lookup %a",lookup)
end
for k, v in next, data do
-- local p = - v[3] / descriptions[k].width-- or 1 ~= 0 too but the same
local p = - (v[1] / 1000) * factor * left
characters[k].left_protruding = p
if trace_protrusion then
- report_protrusions("lfbd -> %s -> U+%05X (%s) -> %0.03f (%s)",lookup,k,utfchar(k),p,concat(v," "))
+ report_protrusions("lfbd -> %s -> %C -> %0.03f (% t)",lookup,k,p,v)
end
end
done = true
@@ -368,14 +372,14 @@ local function map_opbd_onto_protrusion(tfmdata,value,opbd)
local data = lookuphash[lookup]
if data then
if trace_protrusion then
- report_protrusions("setting right using rtbd lookup '%s'",lookup)
+ report_protrusions("setting right using rtbd lookup %a",lookup)
end
for k, v in next, data do
-- local p = v[3] / descriptions[k].width -- or 3
local p = (v[1] / 1000) * factor * right
characters[k].right_protruding = p
if trace_protrusion then
- report_protrusions("rtbd -> %s -> U+%05X (%s) -> %0.03f (%s)",lookup,k,utfchar(k),p,concat(v," "))
+ report_protrusions("rtbd -> %s -> %C -> %0.03f (% t)",lookup,k,p,v)
end
end
end
@@ -383,9 +387,15 @@ local function map_opbd_onto_protrusion(tfmdata,value,opbd)
end
end
end
- tfmdata.parameters.protrusion {
- auto = true
- }
+ local parameters = tfmdata.parameters
+ local protrusion = tfmdata.protrusion
+ if not protrusion then
+ parameters.protrusion = {
+ auto = true
+ }
+ else
+ protrusion.auto = true
+ end
end
-- The opbd test is just there because it was discussed on the
@@ -407,7 +417,7 @@ local function initializeprotrusion(tfmdata,value)
local left = class.left or 1
local right = class.right or 1
if trace_protrusion then
- report_protrusions("setting class %s, vector: %s, factor: %s, left: %s, right: %s",
+ report_protrusions("setting class %a, vector %a, factor %a, left %a, right %a",
value,class.vector,factor,left,right)
end
local data = characters.data
@@ -448,10 +458,10 @@ local function initializeprotrusion(tfmdata,value)
end
end
elseif trace_protrusion then
- report_protrusions("unknown vector '%s' in class '%s",class.vector,value)
+ report_protrusions("unknown vector %a in class %a",class.vector,value)
end
elseif trace_protrusion then
- report_protrusions("unknown class '%s'",value)
+ report_protrusions("unknown class %a",value)
end
end
end
@@ -880,8 +890,8 @@ local new_glyph = nodes.pool.glyph
local hpack_node = node.hpack
function fonts.helpers.addprivate(tfmdata,name,characterdata)
- local properties = tfmdata.properties
- local privates = properties.privates
+ local properties = tfmdata.properties
+ local privates = properties.privates
local lastprivate = properties.lastprivate
if lastprivate then
lastprivate = lastprivate + 1
@@ -892,7 +902,9 @@ function fonts.helpers.addprivate(tfmdata,name,characterdata)
privates = { }
properties.privates = privates
end
- privates[name] = lastprivate
+ if name then
+ privates[name] = lastprivate
+ end
properties.lastprivate = lastprivate
tfmdata.characters[lastprivate] = characterdata
if properties.finalized then
@@ -903,7 +915,7 @@ end
function fonts.helpers.getprivatenode(tfmdata,name)
local properties = tfmdata.properties
- local privates = properties and properties.privates
+ local privates = properties and properties.privates
if privates then
local p = privates[name]
if p then
@@ -925,7 +937,7 @@ end
function fonts.helpers.hasprivate(tfmdata,name)
local properties = tfmdata.properties
- local privates = properties and properties.privates
+ local privates = properties and properties.privates
return privates and privates[name] or false
end
diff --git a/Master/texmf-dist/tex/context/base/font-fbk.lua b/Master/texmf-dist/tex/context/base/font-fbk.lua
index 89831e7c2a0..48e2167e656 100644
--- a/Master/texmf-dist/tex/context/base/font-fbk.lua
+++ b/Master/texmf-dist/tex/context/base/font-fbk.lua
@@ -9,40 +9,37 @@ if not modules then modules = { } end modules ['font-fbk'] = {
local cos, tan, rad, format = math.cos, math.tan, math.rad, string.format
local utfbyte, utfchar = utf.byte, utf.char
-local trace_combining = false trackers.register("fonts.combining", function(v) trace_combining = v end)
-local trace_combining_all = false trackers.register("fonts.combining.all", function(v) trace_combining = v
- trace_combining_all = v end)
+--[[ldx--
+<p>This is very experimental code!</p>
+--ldx]]--
-local force_combining = false -- just for demo purposes (see mk)
+local trace_combining_visualize = false trackers.register("fonts.composing.visualize", function(v) trace_combining_visualize = v end)
+local trace_combining_define = false trackers.register("fonts.composing.define", function(v) trace_combining_define = v end)
-trackers.register("fonts.composing", "fonts.combining")
-trackers.register("fonts.composing.all", "fonts.combining.all")
+trackers.register("fonts.combining", "fonts.composing.define") -- for old times sake (and manuals)
+trackers.register("fonts.combining.all", "fonts.composing.*") -- for old times sake (and manuals)
local report_combining = logs.reporter("fonts","combining")
-local allocate = utilities.storage.allocate
+local force_combining = false -- just for demo purposes (see mk)
---[[ldx--
-<p>This is very experimental code!</p>
---ldx]]--
+local allocate = utilities.storage.allocate
local fonts = fonts
local handlers = fonts.handlers
local constructors = fonts.constructors
-local vf = handlers.vf
-local commands = vf.combiner.commands
-
-local otffeatures = constructors.newfeatures("otf")
-local registerotffeature = otffeatures.register
-local afmfeatures = constructors.newfeatures("afm")
-local registerafmfeature = afmfeatures.register
+local registerotffeature = handlers.otf.features.register
+local registerafmfeature = handlers.afm.features.register
local unicodecharacters = characters.data
local unicodefallbacks = characters.fallbacks
+local vf = handlers.vf
+local commands = vf.combiner.commands
local push = vf.predefined.push
local pop = vf.predefined.pop
+
local force_composed = false
local cache = { } -- we could make these weak
local fraction = 0.15 -- 30 units for lucida
@@ -62,14 +59,14 @@ local function composecharacters(tfmdata)
local italicfactor = parameters.italicfactor or 0
local vfspecials = backends.tables.vfspecials --brr
local red, green, blue, black
- if trace_combining then
+ if trace_combining_visualize then
red = vfspecials.red
green = vfspecials.green
blue = vfspecials.blue
black = vfspecials.black
end
local compose = fonts.goodies.getcompositions(tfmdata)
- if compose and trace_combining then
+ if compose and trace_combining_visualize then
report_combining("using compose information from goodies file")
end
local done = false
@@ -108,8 +105,8 @@ local function composecharacters(tfmdata)
cache[chr] = chr_t
end
if charsacc then
- if trace_combining_all then
- report_combining("%s (U+%05X) = %s (U+%05X) + %s (U+%05X)",utfchar(i),i,utfchar(chr),chr,utfchar(acc),acc)
+ if trace_combining_define then
+ report_combining("composed %C, base %C, accent %C",i,chr,acc)
end
local acc_t = cache[acc]
if not acc_t then
@@ -141,8 +138,8 @@ local function composecharacters(tfmdata)
local ay = a_anchor.y or 0
local dx = cx - ax
local dy = cy - ay
- if trace_combining_all then
- report_combining("building U+%05X (%s) from U+%05X (%s) and U+%05X (%s)",i,utfchar(i),chr,utfchar(chr),acc,utfchar(acc))
+ if trace_combining_define then
+ report_combining("building %C from %C and %C",i,chr,acc)
report_combining(" boundingbox:")
report_combining(" chr: %3i %3i %3i %3i",unpack(cb))
report_combining(" acc: %3i %3i %3i %3i",unpack(ab))
@@ -152,7 +149,7 @@ local function composecharacters(tfmdata)
report_combining(" delta:")
report_combining(" %s: %3i %3i",i_anchored,dx,dy)
end
- if trace_combining then
+ if trace_combining_visualize then
t.commands = { push, {"right", scale*dx}, {"down",-scale*dy}, green, acc_t, black, pop, chr_t }
-- t.commands = {
-- push, {"right", scale*cx}, {"down", -scale*cy}, red, {"rule",10000,10000,10000}, pop,
@@ -172,7 +169,7 @@ local function composecharacters(tfmdata)
local dx = (c_urx - a_urx - a_llx + c_llx)/2
local dd = (c_urx - c_llx)*italicfactor
if a_ury < 0 then
- if trace_combining then
+ if trace_combining_visualize then
t.commands = { push, {"right", dx-dd}, red, acc_t, black, pop, chr_t }
else
t.commands = { push, {"right", dx-dd}, acc_t, pop, chr_t }
@@ -184,16 +181,16 @@ local function composecharacters(tfmdata)
-- takes time and code
dy = compose[i]
if dy then
- dy = dy.DY
+ dy = dy.dy
end
if not dy then
dy = compose[acc]
if dy then
- dy = dy and dy.DY
+ dy = dy and dy.dy
end
end
if not dy then
- dy = compose.DY
+ dy = compose.dy
end
if not dy then
dy = - deltaxheight + extraxheight
@@ -207,13 +204,13 @@ local function composecharacters(tfmdata)
else
dy = - deltaxheight + extraxheight
end
- if trace_combining then
+ if trace_combining_visualize then
t.commands = { push, {"right", dx+dd}, {"down", dy}, green, acc_t, black, pop, chr_t }
else
t.commands = { push, {"right", dx+dd}, {"down", dy}, acc_t, pop, chr_t }
end
else
- if trace_combining then
+ if trace_combining_visualize then
t.commands = { push, {"right", dx+dd}, blue, acc_t, black, pop, chr_t }
else
t.commands = { push, {"right", dx+dd}, acc_t, pop, chr_t }
@@ -224,8 +221,8 @@ local function composecharacters(tfmdata)
t.commands = { chr_t } -- else index mess
end
else
- if trace_combining_all then
- report_combining("%s (U+%05X) = %s (U+%05X) (simplified)",utfchar(i),i,utfchar(chr),chr)
+ if trace_combining_define then
+ report_combining("%C becomes simplfied %C",i,chr)
end
t.commands = { chr_t } -- else index mess
end
@@ -271,11 +268,11 @@ vf.helpers.composecharacters = composecharacters
-- which only makes sense as demo.
commands["compose.trace.enable"] = function()
- trace_combining = true
+ trace_combining_visualize = true
end
commands["compose.trace.disable"] = function()
- trace_combining = false
+ trace_combining_visualize = false
end
commands["compose.force.enable"] = function()
@@ -288,9 +285,9 @@ end
commands["compose.trace.set"] = function(g,v)
if v[2] == nil then
- trace_combining = true
+ trace_combining_visualize = true
else
- trace_combining = v[2]
+ trace_combining_visualize = v[2]
end
end
diff --git a/Master/texmf-dist/tex/context/base/font-fea.mkvi b/Master/texmf-dist/tex/context/base/font-fea.mkvi
index 969e955759e..c1e051621b4 100644
--- a/Master/texmf-dist/tex/context/base/font-fea.mkvi
+++ b/Master/texmf-dist/tex/context/base/font-fea.mkvi
@@ -133,32 +133,172 @@
\attribute\zerocount\zerocount % first in list, so fast match
-\let\currentfeature\empty
+% beware: these are global features on top of existing font features
-% ! ! ! very experimental, some test code for idris advanced features ! ! !
-%
-% \startbuffer
-% \definefontfeature[smallcaps][smallcaps][script=latn]
-% \definefontfeature[oldstyle] [oldstyle] [script=latn]
-%
-% \definedfont[name:cambria at 15pt]
-%
-% Hello there {\setff{smallcaps}capped 123 \setff{oldstyle}123!} \blank
-% Hello there {\addff{smallcaps}capped 123 \addff{oldstyle}123!} \blank
-% Hello there {\addff{smallcaps}capped \subff{smallcaps}normal} \blank
-% \stopbuffer
-%
-% \typebuffer \getbuffer
+\let\currentfeature \s!current
+\let\m_font_feature_list \s!current
+\let\m_font_feature_asked\empty
+
+\newconstant\c_font_feature_state
+
+% hashing at this end is slower
+
+\unexpanded\def\addfeature {\doifnextoptionalelse\font_feature_add_yes \font_feature_add_nop }
+\unexpanded\def\subtractfeature {\doifnextoptionalelse\font_feature_subtract_yes \font_feature_subtract_nop }
+\unexpanded\def\replacefeature {\doifnextoptionalelse\font_feature_replace_yes \font_feature_replace_nop }
+\unexpanded\def\resetandaddfeature{\doifnextoptionalelse\font_feature_reset_add_yes\font_feature_reset_add_nop}
+\unexpanded\def\feature {\doifnextoptionalelse\font_feature_yes \font_feature_nop }
+
+\unexpanded\def\font_feature_add_yes [#feature]{\edef\m_font_feature_asked{#feature}\font_feature_add}
+\unexpanded\def\font_feature_add_nop #feature{\edef\m_font_feature_asked{#feature}\font_feature_add}
+
+\unexpanded\def\font_feature_subtract_yes [#feature]{\edef\m_font_feature_asked{#feature}\font_feature_subtract}
+\unexpanded\def\font_feature_subtract_nop #feature{\edef\m_font_feature_asked{#feature}\font_feature_subtract}
+
+\unexpanded\def\font_feature_replace_yes [#feature]{\edef\m_font_feature_asked{#feature}\font_feature_replace}
+\unexpanded\def\font_feature_replace_nop #feature{\edef\m_font_feature_asked{#feature}\font_feature_replace}
+
+\unexpanded\def\font_feature_reset_add_yes[#feature]{\edef\m_font_feature_asked{#feature}\font_feature_reset_add}
+\unexpanded\def\font_feature_reset_add_nop #feature{\edef\m_font_feature_asked{#feature}\font_feature_reset_add}
+
+\let\doaddfeature \font_feature_add_nop % low level faster ones
+\let\dosubtractfeature \font_feature_subtract_nop
+\let\doreplacefeature \font_feature_replace_nop
+\let\doreserandaddfeature\font_feature_reset_add_nop
+
+\unexpanded\def\font_feature_add
+ {\ifnum\c_font_feature_state=\plusone
+ \ifx\m_font_feature_asked\currentfeature\else
+ \font_feature_add_indeed
+ \fi
+ \else
+ \font_feature_add_indeed
+ \fi}
+
+\unexpanded\def\font_feature_add_indeed
+ {\ctxcommand{feature("+","\m_font_feature_list","\m_font_feature_asked")}%
+ \edef\m_font_feature_list{\m_font_feature_list+\m_font_feature_asked}% also + at the lua end
+ \c_font_feature_state\plusone
+ \let\currentfeature\m_font_feature_asked}
+
+\unexpanded\def\font_feature_subtract
+ {\ifnum\c_font_feature_state=\minusone
+ \ifx\m_font_feature_asked\currentfeature\else
+ \font_feature_subtract_indeed
+ \fi
+ \else
+ \font_feature_subtract_indeed
+ \fi}
+
+\unexpanded\def\font_feature_subtract_indeed
+ {\ctxcommand{feature("-","\m_font_feature_list","\m_font_feature_asked")}%
+ \edef\m_font_feature_list{\m_font_feature_list-\m_font_feature_asked}% also - at the lua end
+ \c_font_feature_state\minusone
+ \let\currentfeature\m_font_feature_asked}
+
+\unexpanded\def\font_feature_replace
+ {\ifnum\c_font_feature_state=\zerocount
+ \ifx\m_font_feature_asked\currentfeature\else
+ \font_feature_replace_indeed
+ \fi
+ \else
+ \font_feature_replace_indeed
+ \fi}
-\unexpanded\def\featureattribute#feature{\ctxcommand{featureattribute("#feature")}}
-\unexpanded\def\setfontfeature #feature{\ctxcommand{setfontfeature("#feature")}\edef\currentfeature{#feature}}
-%unexpanded\def\resetfontfeature {\ctxcommand{resetfontfeature()}\let\currentfeature\empty} % initial value
-\unexpanded\def\resetfontfeature {\attribute\zerocount\zerocount \let\currentfeature\empty} % initial value
+\unexpanded\def\font_feature_replace_indeed
+ {\ctxcommand{feature("=","\m_font_feature_list","\m_font_feature_asked")}%
+ \edef\m_font_feature_list{\m_font_feature_list=\m_font_feature_asked}% also = at the lua end
+ \c_font_feature_state\zerocount
+ \let\currentfeature\m_font_feature_asked}
-\unexpanded\def\addfontfeaturetoset #feature{\ctxcommand{addfs("#feature")}} % merge
-\unexpanded\def\subtractfontfeaturefromset #feature{\ctxcommand{subfs("#feature")}} % merge
-\unexpanded\def\addfontfeaturetofont #feature{\ctxcommand{addff("#feature")}} % overload
-\unexpanded\def\subtractfontfeaturefromfont#feature{\ctxcommand{subff("#feature")}} % overload
+\unexpanded\def\resetfeature
+ {\ifx\currentfeature\s!current \else
+ \font_feature_reset_indeed
+ \fi}
+
+\unexpanded\def\font_feature_reset_indeed
+ {\let\m_font_feature_asked\empty
+ \let\currentfeature \s!current
+ \let\m_font_feature_list \s!current
+ \ctxcommand{feature(false)}}
+
+\unexpanded\def\revivefeature
+ {\ifx\currentfeature\s!current \else
+ \font_feature_revive_indeed
+ \fi}
+
+\unexpanded\def\font_feature_revive_indeed
+ {\ctxcommand{feature(true,"\m_font_feature_list")}}
+
+\unexpanded\def\font_feature_reset_add
+ {\ifnum\c_font_feature_state=\plusone
+ \ifx\m_font_feature_asked\currentfeature\else
+ \font_feature_reset_add_indeed
+ \fi
+ \else
+ \font_feature_reset_add_indeed
+ \fi}
+
+\unexpanded\def\font_feature_reset_add_indeed
+ {\ctxcommand{feature("+","\s!current","\m_font_feature_asked")}%
+ \edef\m_font_feature_list{\s!current+\m_font_feature_asked}% also + at the lua end
+ \c_font_feature_state\plusone
+ \let\currentfeature\m_font_feature_asked}
+
+\installcorenamespace{featureyes}
+\installcorenamespace{featurenop}
+
+\unexpanded\def\font_feature_yes[#method]{\csname\??featureyes\ifcsname\??featureyes#1\endcsname#1\else\s!unknown\fi\endcsname}
+\unexpanded\def\font_feature_nop #method{\csname\??featurenop\ifcsname\??featurenop#1\endcsname#1\else\s!unknown\fi\endcsname}
+
+\letvalue{\??featureyes +}\addfeature
+\letvalue{\??featurenop +}\addfeature
+\letvalue{\??featureyes -}\subtractfeature
+\letvalue{\??featurenop -}\subtractfeature
+\letvalue{\??featureyes =}\replacefeature
+\letvalue{\??featurenop =}\replacefeature
+\letvalue{\??featureyes !}\resetandaddfeature
+\letvalue{\??featurenop !}\resetandaddfeature
+\letvalue{\??featureyes >}\revivefeature
+\letvalue{\??featurenop >}\revivefeature
+\letvalue{\??featureyes <}\resetfeature
+\letvalue{\??featurenop <}\resetfeature
+
+\letvalue{\??featureyes\v!more }\addfeature % add set to previous set and combine with font set
+\letvalue{\??featurenop\v!more }\addfeature
+\letvalue{\??featureyes\v!less }\subtractfeature % subtract set from previous set and combine with font set
+\letvalue{\??featurenop\v!less }\subtractfeature
+\letvalue{\??featureyes\v!new }\replacefeature % replace font set
+\letvalue{\??featurenop\v!new }\replacefeature
+\letvalue{\??featureyes\v!reset }\resetfeature % forget sets and revert to font set
+\letvalue{\??featurenop\v!reset }\resetfeature
+\letvalue{\??featureyes\v!default}\revivefeature % make sure the current set is used on top of the font set
+\letvalue{\??featurenop\v!default}\revivefeature
+\letvalue{\??featureyes\v!old }\revivefeature
+\letvalue{\??featurenop\v!old }\revivefeature
+\letvalue{\??featureyes\v!local }\resetandaddfeature
+\letvalue{\??featurenop\v!local }\resetandaddfeature
+\letvalue{\??featureyes\s!unknown}\empty
+\letvalue{\??featurenop\s!unknown}\empty
+
+% just for old times sake:
+
+\unexpanded\def\featureattribute#feature%
+ {\ctxcommand{featureattribute("#feature")}}
+
+\unexpanded\def\setfontfeature #feature%
+ {\edef\currentfeature{#feature}%
+ \let\m_font_feature_list\currentfeature
+ \ctxcommand{setfontfeature("\currentfeature")}}
+
+\let\resetfontfeature\resetfeature
+
+% these are obsolete (don't use them any longer)
+
+\let\addfontfeaturetoset \font_feature_add_nop
+\let\subtractfontfeaturefromset \font_feature_subtract_nop
+\let\addfontfeaturetofont \font_feature_add_nop
+\let\subtractfontfeaturefromfont\font_feature_subtract_nop
\let\setff\setfontfeature
\let\addfs\addfontfeaturetoset
@@ -186,4 +326,8 @@
\def\doifelsecurrentfonthasfeature#feature%
{\ctxcommand{doifelsecurrentfonthasfeature("#feature")}}
+% new:
+
+\ctxcommand{registerlanguagefeatures()}
+
\protect \endinput
diff --git a/Master/texmf-dist/tex/context/base/font-gds.lua b/Master/texmf-dist/tex/context/base/font-gds.lua
index 3b87e4c772b..91a5ea66c30 100644
--- a/Master/texmf-dist/tex/context/base/font-gds.lua
+++ b/Master/texmf-dist/tex/context/base/font-gds.lua
@@ -8,49 +8,46 @@ if not modules then modules = { } end modules ['font-gds'] = {
-- depends on ctx
-local type, next = type, next
+local type, next, tonumber = type, next, tonumber
local gmatch, format, lower, find, splitup = string.gmatch, string.format, string.lower, string.find, string.splitup
local texsp = tex.sp
local fonts, nodes, attributes, node = fonts, nodes, attributes, node
-local trace_goodies = false
-
-trackers.register("fonts.goodies", function(v) trace_goodies = v end)
-
+local trace_goodies = false trackers.register("fonts.goodies", function(v) trace_goodies = v end)
local report_goodies = logs.reporter("fonts","goodies")
local allocate = utilities.storage.allocate
local otf = fonts.handlers.otf
-local addotffeature = otf.enhancers.addfeature
-
-local otffeatures = fonts.constructors.newfeatures("otf")
-local registerotffeature = otffeatures.register
+local afm = fonts.handlers.afm
+local tfm = fonts.handlers.tfm
-local afmfeatures = fonts.constructors.newfeatures("afm")
-local registerafmfeature = afmfeatures.register
+local registerotffeature = otf.features.register
+local registerafmfeature = afm.features.register
+local registertfmfeature = tfm.features.register
-local tfmfeatures = fonts.constructors.newfeatures("tfm")
-local registertfmfeature = tfmfeatures.register
-
-local fontgoodies = { }
+local fontgoodies = fonts.goodies or { }
fonts.goodies = fontgoodies
-local typefaces = allocate()
+local typefaces = fonts.typefaces or allocate()
fonts.typefaces = typefaces
-local data = allocate()
-fontgoodies.data = fontgoodies.data
+local data = fontgoodies.data or allocate()
+fontgoodies.data = data
-local list = { }
+local list = fontgoodies.list or { }
fontgoodies.list = list -- no allocate as we want to see what is there
+local addotffeature = otf.enhancers.addfeature
+
+local findfile = resolvers.findfile
+
function fontgoodies.report(what,trace,goodies)
if trace_goodies or trace then
local whatever = goodies[what]
if whatever then
- report_goodies("goodie '%s' found in '%s'",what,goodies.name)
+ report_goodies("goodie %a found in %a",what,goodies.name)
end
end
end
@@ -60,9 +57,9 @@ local function loadgoodies(filename) -- maybe a merge is better
if goodies ~= nil then
-- found or tagged unfound
elseif type(filename) == "string" then
- local fullname = resolvers.findfile(file.addsuffix(filename,"lfg")) or "" -- prefered suffix
+ local fullname = findfile(file.addsuffix(filename,"lfg")) or "" -- prefered suffix
if fullname == "" then
- fullname = resolvers.findfile(file.addsuffix(filename,"lua")) or "" -- fallback suffix
+ fullname = findfile(file.addsuffix(filename,"lua")) or "" -- fallback suffix
end
if fullname == "" then
report_goodies("goodie file '%s.lfg' is not found",filename)
@@ -70,10 +67,10 @@ local function loadgoodies(filename) -- maybe a merge is better
else
goodies = dofile(fullname) or false
if not goodies then
- report_goodies("goodie file '%s' is invalid",fullname)
+ report_goodies("goodie file %a is invalid",fullname)
return nil
elseif trace_goodies then
- report_goodies("goodie file '%s' is loaded",fullname)
+ report_goodies("goodie file %a is loaded",fullname)
end
goodies.name = goodies.name or "no name"
for name, fnc in next, list do
@@ -105,7 +102,7 @@ local function setgoodies(tfmdata,value)
local ok = loadgoodies(filename)
if ok then
if trace_goodies then
- report_goodies("assigning goodie '%s'",filename)
+ report_goodies("assigning goodie %a",filename)
end
goodies[#goodies+1] = ok
end
@@ -141,28 +138,30 @@ end
-- fonts.features.flattened = flattenedfeatures
-function fontgoodies.prepare_features(goodies,name,set)
+local function prepare_features(goodies,name,set)
if set then
local ff = flattenedfeatures(set)
local fullname = goodies.name .. "::" .. name
local n, s = fonts.specifiers.presetcontext(fullname,"",ff)
goodies.featuresets[name] = s -- set
if trace_goodies then
- report_goodies("feature set '%s' gets number %s and name '%s'",name,n,fullname)
+ report_goodies("feature set %a gets number %a and name %a",name,n,fullname)
end
return n
end
end
+fontgoodies.prepare_features = prepare_features
+
local function initialize(goodies,tfmdata)
local featuresets = goodies.featuresets
local goodiesname = goodies.name
if featuresets then
if trace_goodies then
- report_goodies("checking featuresets in '%s'",goodies.name)
+ report_goodies("checking featuresets in %a",goodies.name)
end
for name, set in next, featuresets do
- fontgoodies.prepare_features(goodies,name,set)
+ prepare_features(goodies,name,set)
end
end
end
@@ -177,7 +176,7 @@ local function setfeatureset(tfmdata,set,features)
for i=1,#goodies do
-- last one wins
local g = goodies[i]
- what = (g.featuresets and g.featuresets[set]) or what
+ what = g.featuresets and g.featuresets[set] or what
end
if what then
for feature, value in next, what do
@@ -193,12 +192,13 @@ end
-- postprocessors (we could hash processor and share code)
function fontgoodies.registerpostprocessor(tfmdata,f,prepend)
- if not tfmdata.postprocessors then
+ local postprocessors = tfmdata.postprocessors
+ if not postprocessors then
tfmdata.postprocessors = { f }
elseif prepend then
- table.insert(tfmdata.postprocessors,f,1)
+ table.insert(postprocessors,f,1)
else
- table.insert(tfmdata.postprocessors,f)
+ table.insert(postprocessors,f)
end
end
@@ -236,9 +236,9 @@ end
-- colorschemes
-local colorschemes = { }
+local colorschemes = fontgoodies.colorschemes or allocate { }
fontgoodies.colorschemes = colorschemes
-colorschemes.data = { }
+colorschemes.data = colorschemes.data or { }
local function setcolorscheme(tfmdata,scheme)
if type(scheme) == "string" then
@@ -249,7 +249,7 @@ local function setcolorscheme(tfmdata,scheme)
for i=1,#goodies do
-- last one counts
local g = goodies[i]
- what = (g.colorschemes and g.colorschemes[scheme]) or what
+ what = g.colorschemes and g.colorschemes[scheme] or what
end
if type(what) == "table" then
-- this is font bound but we can share them if needed
@@ -300,7 +300,6 @@ end
local fontdata = fonts.hashes.identifiers
local setnodecolor = nodes.tracers.colors.set
-local has_attribute = node.has_attribute
local traverse_id = node.traverse_id
local a_colorscheme = attributes.private('colorscheme')
local glyph = node.id("glyph")
@@ -309,7 +308,7 @@ function colorschemes.coloring(head)
local lastfont, lastscheme
local done = false
for n in traverse_id(glyph,head) do
- local a = has_attribute(n,a_colorscheme)
+ local a = n[a_colorscheme]
if a then
local f = n.font
if f ~= lastfont then
@@ -486,7 +485,7 @@ local function initialize(tfmdata)
mathitalics = mathitalics[file.nameonly(properties.name)] or mathitalics
if mathitalics then
if trace_goodies then
- report_goodies("loading mathitalics for font '%s'",properties.name)
+ report_goodies("loading mathitalics for font %a",properties.name)
end
local corrections = mathitalics.corrections
local defaultfactor = mathitalics.defaultfactor
@@ -511,7 +510,7 @@ local function initialize(tfmdata)
properties.mathitalics = disableengine
end
if trace_goodies then
- report_goodies("assigning mathitalics for font '%s'",properties.name)
+ report_goodies("assigning mathitalics for font %a",properties.name)
end
local mathitalics = properties.mathitalics
local quad = parameters.quad
@@ -605,9 +604,42 @@ end
fontgoodies.register("compositions", initialize)
-local designsizes = { }
+local filenames = fontgoodies.filenames or allocate()
+fontgoodies.filenames = filenames
+
+local filedata = filenames.data or allocate()
+filenames.data = filedata
+
+local function initialize(goodies) -- design sizes are registered global
+ local fn = goodies.filenames
+ if fn then
+ for usedname, alternativenames in next, fn do
+ filedata[usedname] = alternativenames
+ end
+ end
+end
+
+fontgoodies.register("filenames", initialize)
+
+function fontgoodies.filenames.resolve(name)
+ local fd = filedata[name]
+ if fd and findfile(name) == "" then
+ for i=1,#fd do
+ local fn = fd[i]
+ if findfile(fn) ~= "" then
+ return fn
+ end
+ end
+ else
+ -- no lookup, just use the regular mechanism
+ end
+ return name
+end
+
+local designsizes = fontgoodies.designsizes or allocate()
fontgoodies.designsizes = designsizes
-local designdata = allocate()
+
+local designdata = designsizes.data or allocate()
designsizes.data = designdata
local function initialize(goodies) -- design sizes are registered global
@@ -687,3 +719,19 @@ end
commands.loadfontgoodies = fontgoodies.load
commands.enablefontcolorschemes = colorschemes.enable
+
+-- weird place ... depends on math
+
+local function finalize(tfmdata,feature,value)
+ mathematics.overloaddimensions(tfmdata,tfmdata,value)
+end
+
+registerotffeature {
+ name = "mathdimensions",
+ description = "manipulate math dimensions",
+ -- default = true,
+ manipulators = {
+ base = finalize,
+ node = finalize,
+ }
+}
diff --git a/Master/texmf-dist/tex/context/base/font-gds.mkiv b/Master/texmf-dist/tex/context/base/font-gds.mkvi
index 2305a0edd3b..d4760a470f9 100644
--- a/Master/texmf-dist/tex/context/base/font-gds.mkiv
+++ b/Master/texmf-dist/tex/context/base/font-gds.mkvi
@@ -37,8 +37,8 @@
\definesystemattribute[colorscheme][public]
-\unexpanded\def\loadfontgoodies[#1]%
- {\ctxcommand{loadfontgoodies("#1")}}
+\unexpanded\def\loadfontgoodies[#filename]%
+ {\ctxcommand{loadfontgoodies("#filename")}}
\unexpanded\def\setfontcolorscheme % will move to the lua end
{\ctxcommand{enablefontcolorschemes()}%
diff --git a/Master/texmf-dist/tex/context/base/font-hsh.lua b/Master/texmf-dist/tex/context/base/font-hsh.lua
new file mode 100644
index 00000000000..d6c226b7c13
--- /dev/null
+++ b/Master/texmf-dist/tex/context/base/font-hsh.lua
@@ -0,0 +1,182 @@
+if not modules then modules = { } end modules ['font-hsh'] = {
+ version = 1.001,
+ comment = "companion to font-ini.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+local setmetatableindex = table.setmetatableindex
+local currentfont = font.current
+local allocate = utilities.storage.allocate
+
+local fonts = fonts
+local hashes = fonts.hashes or allocate()
+fonts.hashes = hashes
+
+local identifiers = hashes.identifiers or allocate()
+local characters = hashes.characters or allocate() -- chardata
+local descriptions = hashes.descriptions or allocate()
+local parameters = hashes.parameters or allocate()
+local properties = hashes.properties or allocate()
+local resources = hashes.resources or allocate()
+local quads = hashes.quads or allocate() -- maybe also spacedata
+local xheights = hashes.xheights or allocate()
+local csnames = hashes.csnames or allocate() -- namedata
+local marks = hashes.marks or allocate()
+local italics = hashes.italics or allocate()
+local lastmathids = hashes.lastmathids or allocate()
+local dynamics = hashes.dynamics or allocate()
+
+hashes.characters = characters
+hashes.descriptions = descriptions
+hashes.parameters = parameters
+hashes.properties = properties
+hashes.resources = resources
+hashes.quads = quads hashes.emwidths = quads
+hashes.xheights = xheights hashes.exheights = xheights
+hashes.csnames = csnames
+hashes.marks = marks
+hashes.italics = italics
+hashes.lastmathids = lastmathids
+hashes.dynamics = dynamics
+
+local nulldata = allocate {
+ name = "nullfont",
+ characters = { },
+ descriptions = { },
+ properties = { },
+ parameters = { -- lmromanregular @ 12pt
+ slant = 0, -- 1
+ space = 256377, -- 2
+ space_stretch = 128188, -- 3
+ space_shrink = 85459, -- 4
+ x_height = 338952, -- 5
+ quad = 786432, -- 6
+ extra_space = 85459, -- 7
+ },
+}
+
+fonts.nulldata = nulldata
+
+fonts.constructors.enhanceparameters(nulldata.parameters) -- official copies for us
+
+setmetatableindex(identifiers, function(t,k)
+ return k == true and identifiers[currentfont()] or nulldata
+end)
+
+setmetatableindex(characters, function(t,k)
+ if k == true then
+ return characters[currentfont()]
+ else
+ local characters = identifiers[k].characters
+ t[k] = characters
+ return characters
+ end
+end)
+
+setmetatableindex(descriptions, function(t,k)
+ if k == true then
+ return descriptions[currentfont()]
+ else
+ local descriptions = identifiers[k].descriptions
+ t[k] = descriptions
+ return descriptions
+ end
+end)
+
+setmetatableindex(parameters, function(t,k)
+ if k == true then
+ return parameters[currentfont()]
+ else
+ local parameters = identifiers[k].parameters
+ t[k] = parameters
+ return parameters
+ end
+end)
+
+setmetatableindex(properties, function(t,k)
+ if k == true then
+ return properties[currentfont()]
+ else
+ local properties = identifiers[k].properties
+ t[k] = properties
+ return properties
+ end
+end)
+
+setmetatableindex(resources, function(t,k)
+ if k == true then
+ return resources[currentfont()]
+ else
+ local shared = identifiers[k].shared
+ local rawdata = shared and shared.rawdata
+ local resources = rawdata and rawdata.resources
+ t[k] = resources or false -- better than resolving each time
+ return resources
+ end
+end)
+
+setmetatableindex(quads, function(t,k)
+ if k == true then
+ return quads[currentfont()]
+ else
+ local parameters = parameters[k]
+ local quad = parameters and parameters.quad or 0
+ t[k] = quad
+ return quad
+ end
+end)
+
+setmetatableindex(marks, function(t,k)
+ if k == true then
+ return marks[currentfont()]
+ else
+ local resources = identifiers[k].resources or { }
+ local marks = resources.marks or { }
+ t[k] = marks
+ return marks
+ end
+end)
+
+setmetatableindex(xheights, function(t,k)
+ if k == true then
+ return xheights[currentfont()]
+ else
+ local parameters = parameters[k]
+ local xheight = parameters and parameters.xheight or 0
+ t[k] = xheight
+ return xheight
+ end
+end)
+
+setmetatableindex(italics, function(t,k) -- is test !
+ if k == true then
+ return italics[currentfont()]
+ else
+ local properties = identifiers[k].properties
+ local hasitalics = properties and properties.hasitalics
+ if hasitalics then
+ hasitalics = characters[k] -- convenient return
+ else
+ hasitalics = false
+ end
+ t[k] = hasitalics
+ return hasitalics
+ end
+end)
+
+setmetatableindex(dynamics, function(t,k)
+ if k == true then
+ return dynamics[currentfont()]
+ else
+ local shared = identifiers[k].shared
+ local dynamics = shared and shared.dynamics or false
+ t[k] = dynamics
+ return dynamics
+ end
+end)
+
+function font.getfont(id)
+ return identifiers[id]
+end
diff --git a/Master/texmf-dist/tex/context/base/font-ini.lua b/Master/texmf-dist/tex/context/base/font-ini.lua
index 8eeba0ce7f6..884b22474c2 100644
--- a/Master/texmf-dist/tex/context/base/font-ini.lua
+++ b/Master/texmf-dist/tex/context/base/font-ini.lua
@@ -6,33 +6,27 @@ if not modules then modules = { } end modules ['font-ini'] = {
license = "see context related readme files"
}
--- basemethods -> can also be in list
--- presetcontext -> defaults
--- hashfeatures -> ctx version
-
--[[ldx--
<p>Not much is happening here.</p>
--ldx]]--
-local lower = string.lower
-local allocate, mark = utilities.storage.allocate, utilities.storage.mark
+local allocate = utilities.storage.allocate
local report_defining = logs.reporter("fonts","defining")
-fontloader.totable = fontloader.to_table
-
-fonts = fonts or { } -- already defined in context
+fonts = fonts or { }
local fonts = fonts
--- some of these might move to where they are used first:
-
fonts.hashes = { identifiers = allocate() }
+
+fonts.tables = fonts.tables or { }
+fonts.helpers = fonts.helpers or { }
+fonts.tracers = fonts.tracers or { } -- for the moment till we have move to moduledata
+fonts.specifiers = fonts.specifiers or { } -- in format !
+
fonts.analyzers = { } -- not needed here
fonts.readers = { }
-fonts.tables = { }
fonts.definers = { methods = { } }
-fonts.specifiers = fonts.specifiers or { } -- in format !
fonts.loggers = { register = function() end }
-fonts.helpers = { }
-fonts.tracers = { } -- for the moment till we have move to moduledata
+fontloader.totable = fontloader.to_table
diff --git a/Master/texmf-dist/tex/context/base/font-ini.mkvi b/Master/texmf-dist/tex/context/base/font-ini.mkvi
index 61fa7afd118..63b7863c8cc 100644
--- a/Master/texmf-dist/tex/context/base/font-ini.mkvi
+++ b/Master/texmf-dist/tex/context/base/font-ini.mkvi
@@ -1746,7 +1746,7 @@
\unexpanded\def\font_helpers_set_current_font_size#size%
{\edef\fontsize{#size}%
- \font_helpers_check_big_math_synchronization
+ \font_helpers_check_big_math_synchronization % double? better in everymath?
\font_helpers_synchronize_font}
\unexpanded\def\font_helpers_set_current_font_style_alternative#style#alternative% \rmsl
@@ -1757,20 +1757,20 @@
\unexpanded\def\font_helpers_set_current_font_style_size#style#size% \rma
{\edef\fontstyle{#style}%
\edef\fontsize {#size}%
- \font_helpers_check_big_math_synchronization
+ \font_helpers_check_big_math_synchronization % double? better in everymath?
\font_helpers_synchronize_font}
\unexpanded\def\font_helpers_set_current_font_alternative_size#alternative#size% \sla
{\edef\fontalternative{#alternative}%
\edef\fontsize {#size}%
- \font_helpers_check_big_math_synchronization
+ \font_helpers_check_big_math_synchronization % double? better in everymath?
\font_helpers_synchronize_font}
\unexpanded\def\font_helpers_set_current_font_style_alternative_size#style#alternative#size% \rmsla
{\edef\fontstyle {#style}%
\edef\fontalternative{#alternative}%
\edef\fontsize {#size}%
- \font_helpers_check_big_math_synchronization
+ \font_helpers_check_big_math_synchronization % double? better in everymath?
\font_helpers_synchronize_font}
\unexpanded\def\font_helpers_synchronize_font % we can have dups i.e. no need to let fontstrategy
@@ -2095,16 +2095,18 @@
\fi
\the\everysetupbodyfont}}
-\unexpanded\def\switchtobodyfont[#specification]%
- {\doifsomething{#specification}
- {\edef\m_font_step{\bodyfontvariable{#specification}}%
- \ifx\m_font_step\empty
- \font_helpers_set_font\zerocount{#specification}%
- \else
- \font_helpers_switch_bodyfont_step % so we have a fast [small] switch
- \fi
- \the\everybodyfont
- \the\everyswitchtobodyfont}}
+\unexpanded\def\font_basics_switchtobodyfont#specification%
+ {\edef\m_font_step{\bodyfontvariable{#specification}}%
+ \ifx\m_font_step\empty
+ \font_helpers_set_font\zerocount{#specification}%
+ \else
+ \font_helpers_switch_bodyfont_step % so we have a fast [small] switch
+ \fi
+ \the\everybodyfont
+ \the\everyswitchtobodyfont}
+
+\unexpanded\def\switchtobodyfont[#specification]% could become an ifx
+ {\doifsomething{#specification}{\font_basics_switchtobodyfont{#specification}}}
\def\font_helpers_switch_bodyfont_step
{\font_basics_switch_points\m_font_step
@@ -2263,4 +2265,36 @@
% in case of troubles: \let\restorebodyfont\fullrestoreglobalbodyfont
+%D Here are some fast variants that can be used in cases where no font
+%D system is needed and where fonts are frozen:
+%D
+%D \starttyping
+%D \definefont [TestA][Serif at 10pt]
+%D \predefinefont[TestB][Serif at 20pt]
+%D
+%D \testfeatureonce{1000}{{\TestA}} % .312
+%D \testfeatureonce{1000}{{\TestB}} % < .016
+%D \testfeatureonce{1000}{{\definedfont[Serif at 30pt]}} % .312
+%D \testfeatureonce{1000}{{\predefinedfont[Serif at 40pt]}} % < .016
+%D \stoptyping
+
+\installcorenamespace{predefinedfont}
+
+\unexpanded\def\predefinefont[#1]#2[#3]% global !
+ {\setugvalue{#1}{\font_basics_predefine{#1}{#3}}}
+
+\unexpanded\def\predefinedfont[#1]% global !
+ {\ifcsname\??predefinedfont#1\endcsname
+ \csname\??predefinedfont#1\endcsname
+ \else
+ \font_basics_predefined{#1}%
+ \fi}
+
+\unexpanded\def\font_basics_predefine#1#2%
+ {\font_basics_defined_font_yes[#2]%
+ \global\expandafter\let\csname#1\expandafter\endcsname\csname\v_font_identifier_basic\endcsname}
+
+\unexpanded\def\font_basics_predefined#1%
+ {\font_basics_predefine{\??predefinedfont#1}{#1}}
+
\protect \endinput
diff --git a/Master/texmf-dist/tex/context/base/font-ldr.lua b/Master/texmf-dist/tex/context/base/font-ldr.lua
index ca4e3f98a3d..175b4d0cc50 100644
--- a/Master/texmf-dist/tex/context/base/font-ldr.lua
+++ b/Master/texmf-dist/tex/context/base/font-ldr.lua
@@ -6,8 +6,8 @@ if not modules then modules = { } end modules ['font-ldr'] = {
license = "see context related readme files"
}
--- This module provides a replacement for fontloader.to_table
--- and will be loaded in due time.
+-- This module provides an experimental replacement for fontloader.to_table
+-- but is not used that much.
local fields = fontloader.fields
diff --git a/Master/texmf-dist/tex/context/base/font-lib.mkvi b/Master/texmf-dist/tex/context/base/font-lib.mkvi
index d9c04f0a23c..da930da7846 100644
--- a/Master/texmf-dist/tex/context/base/font-lib.mkvi
+++ b/Master/texmf-dist/tex/context/base/font-lib.mkvi
@@ -27,19 +27,26 @@
\registerctxluafile{font-tfm}{1.001}
\registerctxluafile{font-afm}{1.001}
+\registerctxluafile{font-afk}{1.001}
+
+\registerctxluafile{font-hsh}{1.001} % hashes used by context
+\registerctxluafile{font-nod}{1.001}
\registerctxluafile{font-oti}{1.001} % otf initialization
\registerctxluafile{font-ott}{1.001} % otf tables (first)
\registerctxluafile{font-otf}{1.001} % otf main
\registerctxluafile{font-otb}{1.001} % otf main base
\registerctxluafile{node-inj}{1.001} % we might split it off
+%registerctxluafile{font-ota}{1.001} % otf analyzers
+\registerctxluafile{font-otx}{1.001} % otf analyzers
\registerctxluafile{font-otn}{1.001} % otf main node
\registerctxluafile{font-otd}{1.001} % otf dynamics (does an overload)
-\registerctxluafile{font-ota}{1.001} % otf analyzers (needs dynamics)
\registerctxluafile{font-otp}{1.001} % otf pack
\registerctxluafile{font-otc}{1.001} % otf context
\registerctxluafile{font-oth}{1.001} % otf helpers
+\registerctxluafile{font-odv}{1.001} % otf devanagari (experimental)
+
\registerctxluafile{font-pat}{1.001} % patchers
\registerctxluafile{node-fnt}{1.001} % here
@@ -56,7 +63,6 @@
\registerctxluafile{font-ext}{1.001}
\registerctxluafile{font-fbk}{1.001}
-\registerctxluafile{font-chk}{1.001}
\registerctxluafile{font-aux}{1.001}
diff --git a/Master/texmf-dist/tex/context/base/font-log.lua b/Master/texmf-dist/tex/context/base/font-log.lua
index 8bc12a2159a..773108e443e 100644
--- a/Master/texmf-dist/tex/context/base/font-log.lua
+++ b/Master/texmf-dist/tex/context/base/font-log.lua
@@ -43,7 +43,7 @@ function loggers.onetimemessage(font,char,message,reporter)
if not reporter then
reporter = report_defining
end
- reporter("char U+%05X in font '%s' with id %s: %s",char,tfmdata.properties.fullname,font,message)
+ reporter("char %U in font %a with id %s: %s",char,tfmdata.properties.fullname,font,message)
category[char] = true
end
end
@@ -52,7 +52,7 @@ function loggers.register(tfmdata,source,specification) -- save file name in spe
if tfmdata and specification and specification.specification then
local name = lower(specification.name)
if trace_defining and not usedfonts[name] then
- report_defining("registering %s as %s (used: %s)",file.basename(specification.name),source,file.basename(specification.filename))
+ report_defining("registering %a as %a, used %a",file.basename(specification.name),source,file.basename(specification.filename))
end
specification.source = source
-- loadedfonts[lower(specification.specification)] = specification
@@ -75,6 +75,6 @@ statistics.register("loaded fonts", function()
t[n] = basename(name)
end
end
- return (n > 0 and format("%s files: %s",n,concat(t," "))) or "none"
+ return n > 0 and format("%s files: %s",n,concat(t," ")) or "none"
end
end)
diff --git a/Master/texmf-dist/tex/context/base/font-lua.lua b/Master/texmf-dist/tex/context/base/font-lua.lua
index 48ce3c2f53c..6fbbcf17e4e 100644
--- a/Master/texmf-dist/tex/context/base/font-lua.lua
+++ b/Master/texmf-dist/tex/context/base/font-lua.lua
@@ -31,7 +31,7 @@ readers.check_lua = check_lua
function readers.lua(specification)
local original = specification.specification
if trace_defining then
- report_lua("using lua reader for '%s'",original)
+ report_lua("using lua reader for %a",original)
end
local fullname = specification.filename or ""
if fullname == "" then
diff --git a/Master/texmf-dist/tex/context/base/font-map.lua b/Master/texmf-dist/tex/context/base/font-map.lua
index 7f5305f9a44..29b9ecd35c3 100644
--- a/Master/texmf-dist/tex/context/base/font-map.lua
+++ b/Master/texmf-dist/tex/context/base/font-map.lua
@@ -6,18 +6,21 @@ if not modules then modules = { } end modules ['font-map'] = {
license = "see context related readme files"
}
+local tonumber = tonumber
+
local match, format, find, concat, gsub, lower = string.match, string.format, string.find, table.concat, string.gsub, string.lower
local P, R, S, C, Ct, Cc, lpegmatch = lpeg.P, lpeg.R, lpeg.S, lpeg.C, lpeg.Ct, lpeg.Cc, lpeg.match
local utfbyte = utf.byte
+local floor = math.floor
-local trace_loading = false trackers.register("fonts.loading", function(v) trace_loading = v end)
+local trace_loading = false trackers.register("fonts.loading", function(v) trace_loading = v end)
local trace_mapping = false trackers.register("fonts.mapping", function(v) trace_unimapping = v end)
local report_fonts = logs.reporter("fonts","loading") -- not otf only
-local fonts = fonts
-local mappings = { }
-fonts.mappings = mappings
+local fonts = fonts
+local mappings = fonts.mappings or { }
+fonts.mappings = mappings
--[[ldx--
<p>Eventually this code will disappear because map files are kind
@@ -30,7 +33,7 @@ local function loadlumtable(filename) -- will move to font goodies
local lumfile = resolvers.findfile(lumname,"map") or ""
if lumfile ~= "" and lfs.isfile(lumfile) then
if trace_loading or trace_mapping then
- report_fonts("enhance: loading %s ",lumfile)
+ report_fonts("loading map table %a",lumfile)
end
lumunic = dofile(lumfile)
return lumunic, lumfile
@@ -40,7 +43,7 @@ end
local hex = R("AF","09")
local hexfour = (hex*hex*hex*hex) / function(s) return tonumber(s,16) end
local hexsix = (hex^1) / function(s) return tonumber(s,16) end
-local dec = (R("09")^1) / tonumber
+local dec = (R("09")^1) / tonumber
local period = P(".")
local unicode = P("uni") * (hexfour * (period + P(-1)) * Cc(false) + Ct(hexfour^1) * Cc(true))
local ucode = P("u") * (hexsix * (period + P(-1)) * Cc(false) + Ct(hexsix ^1) * Cc(true))
@@ -63,25 +66,27 @@ local function makenameparser(str)
end
end
---~ local parser = mappings.makenameparser("Japan1")
---~ local parser = mappings.makenameparser()
---~ local function test(str)
---~ local b, a = lpegmatch(parser,str)
---~ print((a and table.serialize(b)) or b)
---~ end
---~ test("a.sc")
---~ test("a")
---~ test("uni1234")
---~ test("uni1234.xx")
---~ test("uni12349876")
---~ test("index1234")
---~ test("Japan1.123")
+-- local parser = mappings.makenameparser("Japan1")
+-- local parser = mappings.makenameparser()
+-- local function test(str)
+-- local b, a = lpegmatch(parser,str)
+-- print((a and table.serialize(b)) or b)
+-- end
+-- test("a.sc")
+-- test("a")
+-- test("uni1234")
+-- test("uni1234.xx")
+-- test("uni12349876")
+-- test("index1234")
+-- test("Japan1.123")
local function tounicode16(unicode)
if unicode < 0x10000 then
return format("%04X",unicode)
+ elseif unicode < 0x1FFFFFFFFF then
+ return format("%04X%04X",floor(unicode/1024),unicode%1024+0xDC00)
else
- return format("%04X%04X",unicode/1024+0xD800,unicode%1024+0xDC00)
+ report_fonts("can't convert %a into tounicode",unicode)
end
end
@@ -91,8 +96,10 @@ local function tounicode16sequence(unicodes)
local unicode = unicodes[l]
if unicode < 0x10000 then
t[l] = format("%04X",unicode)
+ elseif unicode < 0x1FFFFFFFFF then
+ t[l] = format("%04X%04X",floor(unicode/1024),unicode%1024+0xDC00)
else
- t[l] = format("%04X%04X",unicode/1024+0xD800,unicode%1024+0xDC00)
+ report_fonts ("can't convert %a into tounicode",unicode)
end
end
return concat(t)
@@ -295,13 +302,13 @@ function mappings.addtounicode(data,filename)
local index = glyph.index
local toun = tounicode[index]
if toun then
- report_fonts("internal: 0x%05X, name: %s, unicode: U+%05X, tounicode: %s",index,name,unic,toun)
+ report_fonts("internal slot %U, name %a, unicode %U, tounicode %a",index,name,unic,toun)
else
- report_fonts("internal: 0x%05X, name: %s, unicode: U+%05X",index,name,unic)
+ report_fonts("internal slot %U, name %a, unicode %U",index,name,unic)
end
end
end
if trace_loading and (ns > 0 or nl > 0) then
- report_fonts("enhance: %s tounicode entries added (%s ligatures)",nl+ns, ns)
+ report_fonts("%s tounicode entries added, ligatures %s",nl+ns,ns)
end
end
diff --git a/Master/texmf-dist/tex/context/base/font-mat.mkvi b/Master/texmf-dist/tex/context/base/font-mat.mkvi
index c8ab45b7582..165b644d2a6 100644
--- a/Master/texmf-dist/tex/context/base/font-mat.mkvi
+++ b/Master/texmf-dist/tex/context/base/font-mat.mkvi
@@ -100,12 +100,13 @@
\csname\??fontinstanceready \fontbody-\s!mm-\fontfamily-\fontsize\endcsname \else
\ifcsname\??fontinstanceready \fontbody-\s!mm-\fontfamily \endcsname \settrue \c_font_auto_size
\csname\??fontinstanceready \fontbody-\s!mm-\fontfamily \endcsname \else
- \nullfont \settrue \c_font_auto_size
+ \settrue \c_font_auto_size
\fi\fi}
\def\font_helpers_set_math_family_indeed#mrtag#family% \fontface etc are also used later on
{\let\savedfontbody\fontbody
\let\fontfamily#family%
+ % the order is important as we depend on known id's when completing fonts
\let\fontface\!!plusthree\font_helpers_set_math_family_a\scriptscriptfont#mrtag\font
\let\fontface\!!plustwo \font_helpers_set_math_family_a\scriptfont #mrtag\font
\let\fontface\!!plusone \font_helpers_set_math_family_a\textfont #mrtag\font
@@ -377,19 +378,23 @@
% and inline text bold) so we will need explicit switches as well as
% an automatic one. (We will use lucida as an example.)
-\unexpanded\def\mr
+\ifdefined\mathdefault \else \let\mathdefault\relax \fi
+
+\unexpanded\def\mr % math regular
{\ifmmode
\font_helpers_synchronize_math_family_mr
\else
\font_helpers_set_current_font_alternative\s!mr
- \fi}
+ \fi
+ \mathdefault}
-\unexpanded\def\mb
+\unexpanded\def\mb % math bold
{\ifmmode
\font_helpers_synchronize_math_family_mb
\else
\font_helpers_set_current_font_alternative\s!mb
- \fi}
+ \fi
+ \mathdefault}
\appendtoks
\font_helpers_synchronize_math_family % auto bold
diff --git a/Master/texmf-dist/tex/context/base/font-mis.lua b/Master/texmf-dist/tex/context/base/font-mis.lua
index 9ca3577511f..247b0f641c9 100644
--- a/Master/texmf-dist/tex/context/base/font-mis.lua
+++ b/Master/texmf-dist/tex/context/base/font-mis.lua
@@ -22,7 +22,7 @@ local handlers = fonts.handlers
handlers.otf = handlers.otf or { }
local otf = handlers.otf
-otf.version = otf.version or 2.737
+otf.version = otf.version or 2.741
otf.cache = otf.cache or containers.define("fonts", "otf", otf.version, true)
function otf.loadcached(filename,format,sub)
@@ -46,7 +46,7 @@ end
local featuregroups = { "gsub", "gpos" }
function fonts.helpers.getfeatures(name,t,script,language) -- maybe per font type
- local t = lower(t or (name and file.extname(name)) or "")
+ local t = lower(t or (name and file.suffix(name)) or "")
if t == "otf" or t == "ttf" or t == "ttc" or t == "dfont" then
local filename = resolvers.findfile(name,t) or ""
if filename ~= "" then
diff --git a/Master/texmf-dist/tex/context/base/font-nod.lua b/Master/texmf-dist/tex/context/base/font-nod.lua
new file mode 100644
index 00000000000..f99130279d1
--- /dev/null
+++ b/Master/texmf-dist/tex/context/base/font-nod.lua
@@ -0,0 +1,434 @@
+if not modules then modules = { } end modules ['font-nod'] = {
+ version = 1.001,
+ comment = "companion to font-ini.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+--[[ldx--
+<p>This is rather experimental. We need more control and some of this
+might become a runtime module instead. This module will be cleaned up!</p>
+--ldx]]--
+
+local tonumber, tostring = tonumber, tostring
+local utfchar = utf.char
+local concat = table.concat
+local match, gmatch, concat, rep = string.match, string.gmatch, table.concat, string.rep
+
+local report_nodes = logs.reporter("fonts","tracing")
+
+fonts = fonts or { }
+nodes = nodes or { }
+
+local fonts, nodes, node, context = fonts, nodes, node, context
+
+local tracers = nodes.tracers or { }
+nodes.tracers = tracers
+
+local tasks = nodes.tasks or { }
+nodes.tasks = tasks
+
+local handlers = nodes.handlers or { }
+nodes.handlers = handlers
+
+local injections = nodes.injections or { }
+nodes.injections = injections
+
+local char_tracers = tracers.characters or { }
+tracers.characters = char_tracers
+
+local step_tracers = tracers.steppers or { }
+tracers.steppers = step_tracers
+
+local copy_node_list = node.copy_list
+local hpack_node_list = node.hpack
+local free_node_list = node.flush_list
+local traverse_nodes = node.traverse
+
+local nodecodes = nodes.nodecodes
+local whatcodes = nodes.whatcodes
+
+local glyph_code = nodecodes.glyph
+local hlist_code = nodecodes.hlist
+local vlist_code = nodecodes.vlist
+local disc_code = nodecodes.disc
+local glue_code = nodecodes.glue
+local kern_code = nodecodes.kern
+local rule_code = nodecodes.rule
+local whatsit_code = nodecodes.whatsit
+local spec_code = nodecodes.glue_spec
+
+local localpar_code = whatcodes.localpar
+local dir_code = whatcodes.dir
+
+local nodepool = nodes.pool
+local new_glyph = nodepool.glyph
+
+local formatters = string.formatters
+local formatter = string.formatter
+
+local hashes = fonts.hashes
+
+local fontidentifiers = hashes.identifiers
+local fontdescriptions = hashes.descriptions
+local fontcharacters = hashes.characters
+local fontproperties = hashes.properties
+local fontparameters = hashes.parameters
+
+function char_tracers.collect(head,list,tag,n)
+ n = n or 0
+ local ok, fn = false, nil
+ while head do
+ local id = head.id
+ if id == glyph_code then
+ local f = head.font
+ if f ~= fn then
+ ok, fn = false, f
+ end
+ local c = head.char
+ local i = fontidentifiers[f].indices[c] or 0
+ if not ok then
+ ok = true
+ n = n + 1
+ list[n] = list[n] or { }
+ list[n][tag] = { }
+ end
+ local l = list[n][tag]
+ l[#l+1] = { c, f, i }
+ elseif id == disc_code then
+ -- skip
+ else
+ ok = false
+ end
+ head = head.next
+ end
+end
+
+function char_tracers.equal(ta, tb)
+ if #ta ~= #tb then
+ return false
+ else
+ for i=1,#ta do
+ local a, b = ta[i], tb[i]
+ if a[1] ~= b[1] or a[2] ~= b[2] or a[3] ~= b[3] then
+ return false
+ end
+ end
+ end
+ return true
+end
+
+function char_tracers.string(t)
+ local tt = { }
+ for i=1,#t do
+ tt[i] = utfchar(t[i][1])
+ end
+ return concat(tt,"")
+end
+
+local f_unicode = formatters["%U"]
+
+function char_tracers.unicodes(t,decimal)
+ local tt = { }
+ for i=1,#t do
+ local n = t[i][1]
+ if n == 0 then
+ tt[i] = "-"
+ elseif decimal then
+ tt[i] = n
+ else
+ tt[i] = f_unicode(n)
+ end
+ end
+ return concat(tt," ")
+end
+
+function char_tracers.indices(t,decimal)
+ local tt = { }
+ for i=1,#t do
+ local n = t[i][3]
+ if n == 0 then
+ tt[i] = "-"
+ elseif decimal then
+ tt[i] = n
+ else
+ tt[i] = f_unicode(n)
+ end
+ end
+ return concat(tt," ")
+end
+
+function char_tracers.start()
+ local npc = handlers.characters
+ local list = { }
+ function handlers.characters(head)
+ local n = #list
+ char_tracers.collect(head,list,'before',n)
+ local h, d = npc(head)
+ char_tracers.collect(head,list,'after',n)
+ if #list > n then
+ list[#list+1] = { }
+ end
+ return h, d
+ end
+ function char_tracers.stop()
+ tracers.list['characters'] = list
+ local variables = {
+ ['title'] = 'ConTeXt Character Processing Information',
+ ['color-background-one'] = lmx.get('color-background-yellow'),
+ ['color-background-two'] = lmx.get('color-background-purple'),
+ }
+ lmx.show('context-characters.lmx',variables)
+ handlers.characters = npc
+ tasks.restart("processors", "characters")
+ end
+ tasks.restart("processors", "characters")
+end
+
+local stack = { }
+
+function tracers.start(tag)
+ stack[#stack+1] = tag
+ local tracer = tracers[tag]
+ if tracer and tracer.start then
+ tracer.start()
+ end
+end
+function tracers.stop()
+ local tracer = stack[#stack]
+ if tracer and tracer.stop then
+ tracer.stop()
+ end
+ stack[#stack] = nil
+end
+
+-- experimental
+
+local collection, collecting, messages = { }, false, { }
+
+function step_tracers.start()
+ collecting = true
+end
+
+function step_tracers.stop()
+ collecting = false
+end
+
+function step_tracers.reset()
+ for i=1,#collection do
+ local c = collection[i]
+ if c then
+ free_node_list(c)
+ end
+ end
+ collection, messages = { }, { }
+end
+
+function step_tracers.nofsteps()
+ return context(#collection)
+end
+
+function step_tracers.glyphs(n,i)
+ local c = collection[i]
+ if c then
+ tex.box[n] = hpack_node_list(copy_node_list(c))
+ end
+end
+
+function step_tracers.features()
+ -- we cannot use first_glyph here as it only finds characters with subtype < 256
+ local f = collection[1]
+ while f do
+ if f.id == glyph_code then
+ local tfmdata, t = fontidentifiers[f.font], { }
+ for feature, value in table.sortedhash(tfmdata.shared.features) do
+ if feature == "number" or feature == "features" then
+ -- private
+ elseif type(value) == "boolean" then
+ if value then
+ t[#t+1] = formatters["%s=yes"](feature)
+ else
+ -- skip
+ end
+ else
+ t[#t+1] = formatters["%s=%s"](feature,value)
+ end
+ end
+ if #t > 0 then
+ context(concat(t,", "))
+ else
+ context("no features")
+ end
+ return
+ end
+ f = f.next
+ end
+end
+
+function tracers.fontchar(font,char)
+ local n = new_glyph()
+ n.font, n.char, n.subtype = font, char, 256
+ context(n)
+end
+
+function step_tracers.font(command)
+ local c = collection[1]
+ while c do
+ local id = c.id
+ if id == glyph_code then
+ local font = c.font
+ local name = file.basename(fontproperties[font].filename or "unknown")
+ local size = fontparameters[font].size or 0
+ if command then
+ context[command](font,name,size) -- size in sp
+ else
+ context("[%s: %s @ %p]",font,name,size)
+ end
+ return
+ else
+ c = c.next
+ end
+ end
+end
+
+function step_tracers.codes(i,command)
+ local c = collection[i]
+ while c do
+ local id = c.id
+ if id == glyph_code then
+ if command then
+ local f, c = c.font,c.char
+ local d = fontdescriptions[f]
+ local d = d and d[c]
+ context[command](f,c,d and d.class or "")
+ else
+ context("[%s:U+%04X]",c.font,c.char)
+ end
+ elseif id == whatsit_code and (c.subtype == localpar_code or c.subtype == dir_code) then
+ context("[%s]",c.dir)
+ else
+ context("[%s]",nodecodes[id])
+ end
+ c = c.next
+ end
+end
+
+function step_tracers.messages(i,command,split)
+ local list = messages[i] -- or { "no messages" }
+ if list then
+ for i=1,#list do
+ local l = list[i]
+ if not command then
+ context("(%s)",l)
+ elseif split then
+ local a, b = match(l,"^(.-)%s*:%s*(.*)$")
+ context[command](a or l or "",b or "")
+ else
+ context[command](l)
+ end
+ end
+ end
+end
+
+-- hooks into the node list processor (see otf)
+
+function step_tracers.check(head)
+ if collecting then
+ step_tracers.reset()
+ local n = copy_node_list(head)
+ injections.handler(n,nil,"trace",true)
+ handlers.protectglyphs(n) -- can be option
+ collection[1] = n
+ end
+end
+
+function step_tracers.register(head)
+ if collecting then
+ local nc = #collection+1
+ if messages[nc] then
+ local n = copy_node_list(head)
+ injections.handler(n,nil,"trace",true)
+ handlers.protectglyphs(n) -- can be option
+ collection[nc] = n
+ end
+ end
+end
+
+function step_tracers.message(str,...)
+ str = formatter(str,...)
+ if collecting then
+ local n = #collection + 1
+ local m = messages[n]
+ if not m then m = { } messages[n] = m end
+ m[#m+1] = str
+ end
+ return str -- saves an intermediate var in the caller
+end
+
+--
+
+local threshold = 65536
+
+local function toutf(list,result,nofresult,stopcriterium)
+ if list then
+ for n in traverse_nodes(list) do
+ local id = n.id
+ if id == glyph_code then
+ local components = n.components
+ if components then
+ result, nofresult = toutf(components,result,nofresult)
+ else
+ local c = n.char
+ local fc = fontcharacters[n.font]
+ if fc then
+ local u = fc[c].tounicode
+ if u then
+ for s in gmatch(u,"....") do
+ nofresult = nofresult + 1
+ result[nofresult] = utfchar(tonumber(s,16))
+ end
+ else
+ nofresult = nofresult + 1
+ result[nofresult] = utfchar(c)
+ end
+ else
+ nofresult = nofresult + 1
+ result[nofresult] = utfchar(c)
+ end
+ end
+ elseif id == disc_code then
+ result, nofresult = toutf(n.replace,result,nofresult) -- needed?
+ elseif id == hlist_code or id == vlist_code then
+ -- if nofresult > 0 and result[nofresult] ~= " " then
+ -- nofresult = nofresult + 1
+ -- result[nofresult] = " "
+ -- end
+ result, nofresult = toutf(n.list,result,nofresult)
+ elseif id == glue_code then
+ if nofresult > 0 and result[nofresult] ~= " " then
+ nofresult = nofresult + 1
+ result[nofresult] = " "
+ end
+ elseif id == kern_code and n.kern > threshold then
+ if nofresult > 0 and result[nofresult] ~= " " then
+ nofresult = nofresult + 1
+ result[nofresult] = " "
+ end
+ end
+ if n == stopcriterium then
+ break
+ end
+ end
+ end
+ if nofresult > 0 and result[nofresult] == " " then
+ result[nofresult] = nil
+ nofresult = nofresult - 1
+ end
+ return result, nofresult
+end
+
+function nodes.toutf(list,stopcriterium)
+ local result, nofresult = toutf(list,{},0,stopcriterium)
+ return concat(result)
+end
diff --git a/Master/texmf-dist/tex/context/base/font-odk.lua b/Master/texmf-dist/tex/context/base/font-odk.lua
new file mode 100644
index 00000000000..c34efc1203e
--- /dev/null
+++ b/Master/texmf-dist/tex/context/base/font-odk.lua
@@ -0,0 +1,904 @@
+-- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- --
+-- We keep the original around for a while so that we can check it --
+-- when the above code does it wrong (data tables are not included). --
+-- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- --
+
+-- author : Kai Eigner, TAT Zetwerk
+-- copyright : TAT Zetwerk
+-- comment : see font-odv.lua for current implementation
+
+-- local state = attributes.private('state')
+-- local sylnr = attributes.private('syllabe')
+--
+-- local function install_dev(tfmdata)
+-- local features = tfmdata.resources.features
+-- local sequences = tfmdata.resources.sequences
+--
+-- local insertpos = 1
+-- for s=1,#sequences do -- classify chars
+-- for k in pairs(basic_shaping_forms) do
+-- if sequences[s].features and ( sequences[s].features[k] or sequences[s].features.locl ) then insertpos = s + 1 end
+-- end
+-- end
+--
+-- features.gsub["dev2_reorder_matras"] = { ["dev2"] = { ["dflt"] = true } }
+-- features.gsub["dev2_reorder_reph"] = { ["dev2"] = { ["dflt"] = true } }
+-- features.gsub["dev2_reorder_pre_base_reordering_consonants"] = { ["dev2"] = { ["dflt"] = true } }
+-- features.gsub["remove_joiners"] = { ["deva"] = { ["dflt"] = true }, ["dev2"] = { ["dflt"] = true } }
+--
+-- local sequence_dev2_reorder_matras = {
+-- chain = 0,
+-- features = { dev2_reorder_matras = { dev2 = { dflt = true } } },
+-- flags = { false, false, false, false },
+-- name = "dev2_reorder_matras",
+-- subtables = { "dev2_reorder_matras" },
+-- type = "dev2_reorder_matras",
+-- }
+-- local sequence_dev2_reorder_reph = {
+-- chain = 0,
+-- features = { dev2_reorder_reph = { dev2 = { dflt = true } } },
+-- flags = { false, false, false, false },
+-- name = "dev2_reorder_reph",
+-- subtables = { "dev2_reorder_reph" },
+-- type = "dev2_reorder_reph",
+-- }
+-- local sequence_dev2_reorder_pre_base_reordering_consonants = {
+-- chain = 0,
+-- features = { dev2_reorder_pre_base_reordering_consonants = { dev2 = { dflt = true } } },
+-- flags = { false, false, false, false },
+-- name = "dev2_reorder_pre_base_reordering_consonants",
+-- subtables = { "dev2_reorder_pre_base_reordering_consonants" },
+-- type = "dev2_reorder_pre_base_reordering_consonants",
+-- }
+-- local sequence_remove_joiners = {
+-- chain = 0,
+-- features = { remove_joiners = { deva = { dflt = true }, dev2 = { dflt = true } } },
+-- flags = { false, false, false, false },
+-- name = "remove_joiners",
+-- subtables = { "remove_joiners" },
+-- type = "remove_joiners",
+-- }
+-- table.insert(sequences, insertpos, sequence_dev2_reorder_pre_base_reordering_consonants)
+-- table.insert(sequences, insertpos, sequence_dev2_reorder_reph)
+-- table.insert(sequences, insertpos, sequence_dev2_reorder_matras)
+-- table.insert(sequences, insertpos, sequence_remove_joiners)
+-- end
+--
+-- local function deva_reorder(head,start,stop,font,attr)
+-- local tfmdata = fontdata[font]
+-- local lookuphash = tfmdata.resources.lookuphash
+-- local sequences = tfmdata.resources.sequences
+--
+-- if not lookuphash["remove_joiners"] then install_dev(tfmdata) end --install Devanagari-features
+--
+-- local sharedfeatures = tfmdata.shared.features
+-- sharedfeatures["remove_joiners"] = true
+-- local datasets = otf.dataset(tfmdata,font,attr)
+--
+-- lookuphash["remove_joiners"] = { [0x200C] = true, [0x200D] = true }
+--
+-- local current, n, base, firstcons, lastcons, basefound = start, start.next, nil, nil, nil, false
+-- local reph, vattu = false, false
+-- for s=1,#sequences do
+-- local dataset = datasets[s]
+-- featurevalue = dataset and dataset[1]
+-- if featurevalue and dataset[4] == "rphf" then reph = true end
+-- if featurevalue and dataset[4] == "blwf" then vattu = true end
+-- end
+-- if ra[start.char] and halant[n.char] and reph then -- if syllable starts with Ra + H and script has 'Reph' then exclude Reph from candidates for base consonants
+-- if n == stop then return head, stop end
+-- if zwj[n.next.char] then
+-- current = start
+-- else
+-- current = n.next
+-- set_attribute(start,state,5) -- rphf
+-- end
+-- end
+--
+-- if nbsp[current.char] then --Stand Alone cluster
+-- if current == stop then
+-- stop = stop.prev
+-- head = node.remove(head, current)
+-- node.free(current)
+-- return head, stop
+-- else
+-- base, firstcons, lastcons = current, current, current
+-- current = current.next
+-- if current ~= stop then
+-- if nukta[current.char] then current = current.next end
+-- if zwj[current.char] then
+-- if current ~= stop and current.next ~= stop and halant[current.next.char] then
+-- current = current.next
+-- local tmp = current.next.next
+-- local changestop = current.next == stop
+-- local tempcurrent = node.copy(current.next)
+-- tempcurrent.next = node.copy(current)
+-- tempcurrent.next.prev = tempcurrent
+-- set_attribute(tempcurrent,state,8) --blwf
+-- tempcurrent = nodes.handlers.characters(tempcurrent)
+-- unset_attribute(tempcurrent,state)
+-- if current.next.char == tempcurrent.char then
+-- node.flush_list(tempcurrent)
+-- local n = node.copy(current)
+-- current.char = dotted_circle
+-- head = node.insert_after(head, current, n)
+-- else
+-- current.char = tempcurrent.char -- (assumes that result of blwf consists of one node)
+-- local freenode = current.next
+-- current.next = tmp
+-- tmp.prev = current
+-- node.free(freenode)
+-- node.flush_list(tempcurrent)
+-- if changestop then stop = current end
+-- end
+-- end
+-- end
+-- end
+-- end
+-- end
+--
+-- while not basefound do -- find base consonant
+-- if consonant[current.char] then
+-- set_attribute(current, state, 6) -- half
+-- if not firstcons then firstcons = current end
+-- lastcons = current
+-- if not base then
+-- base = current
+-- else --check whether consonant has below-base (or post-base) form
+-- local baseform = true
+-- for s=1,#sequences do
+-- local sequence = sequences[s]
+-- local dataset = datasets[s]
+-- featurevalue = dataset and dataset[1]
+-- if featurevalue and dataset[4] == "blwf" then
+-- local subtables = sequence.subtables
+-- for i=1,#subtables do
+-- local lookupname = subtables[i]
+-- local lookupcache = lookuphash[lookupname]
+-- if lookupcache then
+-- local lookupmatch = lookupcache[current.char]
+-- if lookupmatch then
+-- set_attribute(current, state, 8) -- blwf
+-- baseform = false
+-- end
+-- end
+-- end
+-- end
+-- end
+-- if baseform then base = current end
+-- end
+-- end
+-- basefound = current == stop
+-- current = current.next
+-- end
+-- if base ~= lastcons then -- if base consonant is not last one then move halant from base consonant to last one
+-- n = base.next
+-- if nukta[n.char] then n = n.next end
+-- if halant[n.char] then
+-- if lastcons ~= stop then
+-- local ln = lastcons.next
+-- if nukta[ln.char] then lastcons = ln end
+-- end
+-- local np, nn, ln = n.prev, n.next, lastcons.next
+-- np.next = n.next
+-- nn.prev = n.prev
+-- lastcons.next = n
+-- if ln then ln.prev = n end
+-- n.next = ln
+-- n.prev = lastcons
+-- if lastcons == stop then stop = n end
+-- end
+-- end
+--
+-- n = start.next
+-- if ra[start.char] and halant[n.char] and not ( n ~= stop and ( zwj[n.next.char] or zwnj[n.next.char] ) ) then -- if syllable starts with Ra + H then move this combination so that it follows either: the post-base 'matra' (if any) or the base consonant
+-- local matra = base
+-- if base ~= stop and dependent_vowel[base.next.char] then matra = base.next end
+-- local sp, nn, mn = start.prev, n.next, matra.next
+-- if sp then sp.next = nn end
+-- nn.prev = sp
+-- matra.next = start
+-- start.prev = matra
+-- n.next = mn
+-- if mn then mn.prev = n end
+-- if head == start then head = nn end
+-- start = nn
+-- if matra == stop then stop = n end
+-- end
+--
+-- local current = start
+-- while current ~= stop do
+-- if halant[current.next.char] and current.next ~= stop and zwnj[current.next.next.char] then unset_attribute(current, state) end
+-- current = current.next
+-- end
+--
+-- if has_attribute(base, state) and base ~= stop and halant[base.next.char] and not ( base.next ~= stop and zwj[base.next.next.char] ) then unset_attribute(base, state) end
+--
+-- local current, allreordered, moved = start, false, { [base] = true }
+-- local a, b, p, bn = base, base, base, base.next
+-- if base ~= stop and nukta[bn.char] then a, b, p = bn, bn, bn end
+-- while not allreordered do
+-- local c, n, l = current, current.next, nil --current is always consonant
+-- if c ~= stop and nukta[n.char] then c = n n = n.next end
+-- if c ~= stop and halant[n.char] then c = n n = n.next end
+-- while c ~= stop and dependent_vowel[n.char] do c = n n = n.next end
+-- if c ~= stop and vowel_modifier[n.char] then c = n n = n.next end
+-- if c ~= stop and stress_tone_mark[n.char] then c = n n = n.next end
+-- local bp, cn = firstcons.prev, current.next
+-- while cn ~= c.next do -- move pre-base matras...
+-- if pre_mark[cn.char] then
+-- if bp then bp.next = cn end
+-- cn.prev.next = cn.next
+-- if cn.next then cn.next.prev = cn.prev end
+-- if cn == stop then stop = cn.prev end
+-- cn.prev = bp
+-- cn.next = firstcons
+-- firstcons.prev = cn
+-- if firstcons == start then
+-- if head == start then head = cn end
+-- start = cn
+-- end
+-- break
+-- end
+-- cn = cn.next
+-- end
+-- allreordered = c == stop
+-- current = c.next
+-- end
+--
+-- if reph or vattu then
+-- local current, cns = start, nil
+-- while current ~= stop do
+-- local c, n = current, current.next
+-- if ra[current.char] and halant[n.char] then
+-- c, n = n, n.next
+-- local b, bn = base, base
+-- while bn ~= stop do
+-- if dependent_vowel[bn.next.char] then b = bn.next end
+-- bn = bn.next
+-- end
+-- if has_attribute(current,state,attribute) == 5 then -- position Reph (Ra + H) after post-base 'matra' (if any) since these become marks on the 'matra', not on the base glyph
+-- if b ~= current then
+-- if current == start then
+-- if head == start then head = n end
+-- start = n
+-- end
+-- if b == stop then stop = c end
+-- if current.prev then current.prev.next = n end
+-- if n then n.prev = current.prev end
+-- c.next = b.next
+-- if b.next then b.next.prev = c end
+-- b.next = current
+-- current.prev = b
+-- end
+-- elseif cns and cns.next ~= current then -- position below-base Ra (vattu) following the consonants on which it is placed (either the base consonant or one of the pre-base consonants)
+-- local cp, cnsn = current.prev, cns.next
+-- if cp then cp.next = n end
+-- if n then n.prev = cp end
+-- cns.next = current
+-- current.prev = cns
+-- c.next = cnsn
+-- if cnsn then cnsn.prev = c end
+-- if c == stop then stop = cp break end
+-- current = n.prev
+-- end
+-- elseif consonant[current.char] or nbsp[current.char] then
+-- cns = current
+-- if halant[cns.next.char] then cns = cns.next end
+-- end
+-- current = current.next
+-- end
+-- end
+--
+-- if nbsp[base.char] then
+-- head = node.remove(head, base)
+-- node.free(base)
+-- end
+--
+-- return head, stop
+-- end
+--
+-- function dev2_reorder_matras(start,kind,lookupname,replacement)
+-- local current = start
+-- while current and current.id == glyph and current.subtype<256 and current.font == start.font and has_attribute(current, sylnr) == has_attribute(start, sylnr) do
+-- if halant[current.char] and not has_attribute(current, state) then
+-- if current.next and current.next.id == glyph and current.next.subtype<256 and current.next.font == start.font and has_attribute(current.next, sylnr) == has_attribute(start, sylnr) and ( zwj[current.next.char] or zwnj[current.next.char] ) then current = current.next end
+-- local sn = start.next
+-- start.next.prev = start.prev
+-- if start.prev then start.prev.next = start.next end
+-- if current.next then current.next.prev = start end
+-- start.next = current.next
+-- current.next = start
+-- start.prev = current
+-- start = sn
+-- break
+-- end
+-- current = current.next
+-- end
+-- return start, true
+-- end
+--
+-- function dev2_reorder_reph(start,kind,lookupname,replacement)
+-- local current, sn = start.next, nil
+-- while current and current.id == glyph and current.subtype<256 and current.font == start.font and has_attribute(current, sylnr) == has_attribute(start, sylnr) do --step 2
+-- if halant[current.char] and not has_attribute(current, state) then
+-- if current.next and current.next.id == glyph and current.next.subtype<256 and current.next.font == start.font and has_attribute(current.next, sylnr) == has_attribute(start, sylnr) and ( zwj[current.next.char] or zwnj[current.next.char] ) then current = current.next end
+-- sn = start.next
+-- start.next.prev = start.prev
+-- if start.prev then start.prev.next = start.next end
+-- if current.next then current.next.prev = start end
+-- start.next = current.next
+-- current.next = start
+-- start.prev = current
+-- start = sn
+-- break
+-- end
+-- current = current.next
+-- end
+-- if not sn then
+-- current = start.next
+-- while current and current.id == glyph and current.subtype<256 and current.font == start.font and has_attribute(current, sylnr) == has_attribute(start, sylnr) do --step 4
+-- if has_attribute(current, state) == 9 then --post-base
+-- sn = start.next
+-- start.next.prev = start.prev
+-- if start.prev then start.prev.next = start.next end
+-- start.prev = current.prev
+-- current.prev.next = start
+-- start.next = current
+-- current.prev = start
+-- start = sn
+-- break
+-- end
+-- current = current.next
+-- end
+-- end
+-- if not sn then
+-- current = start.next
+-- local c = nil
+-- while current and current.id == glyph and current.subtype<256 and current.font == start.font and has_attribute(current, sylnr) == has_attribute(start, sylnr) do --step 5
+-- if not c and ( above_mark[current.char] or below_mark[current.char] or post_mark[current.char] ) and ReorderClass[current.char] ~= "after subscript" then c = current end
+-- current = current.next
+-- end
+-- if c then
+-- sn = start.next
+-- start.next.prev = start.prev
+-- if start.prev then start.prev.next = start.next end
+-- start.prev = c.prev
+-- c.prev.next = start
+-- start.next = c
+-- c.prev = start
+-- start = sn
+-- end
+-- end
+-- if not sn then
+-- current = start
+-- while current.next and current.next.id == glyph and current.next.subtype<256 and current.next.font == start.font and has_attribute(current.next, sylnr) == has_attribute(start, sylnr) do --step 6
+-- current = current.next
+-- end
+-- if start ~= current then
+-- sn = start.next
+-- start.next.prev = start.prev
+-- if start.prev then start.prev.next = start.next end
+-- if current.next then current.next.prev = start end
+-- start.next = current.next
+-- current.next = start
+-- start.prev = current
+-- start = sn
+-- end
+-- end
+-- return start, true
+-- end
+--
+-- function dev2_reorder_pre_base_reordering_consonants(start,kind,lookupname,replacement)
+-- local current, sn = start, nil
+-- while current and current.id == glyph and current.subtype<256 and current.font == start.font and has_attribute(current, sylnr) == has_attribute(start, sylnr) do
+-- if halant[current.char] and not has_attribute(current, state) then
+-- if current.next and current.next.id == glyph and current.next.subtype<256 and current.next.font == start.font and has_attribute(current.next, sylnr) == has_attribute(start, sylnr) and ( zwj[current.next.char] or zwnj[current.next.char] ) then current = current.next end
+-- sn = start.next
+-- start.next.prev = start.prev
+-- if start.prev then start.prev.next = start.next end
+-- if current.next then current.next.prev = start end
+-- start.next = current.next
+-- current.next = start
+-- start.prev = current
+-- start = sn
+-- break
+-- end
+-- current = current.next
+-- end
+-- if not sn then
+-- current = start.next
+-- while current and current.id == glyph and current.subtype<256 and current.font == start.font and has_attribute(current, sylnr) == has_attribute(start, sylnr) do
+-- if not consonant[current.char] and has_attribute(current, state) then --main
+-- sn = start.next
+-- start.next.prev = start.prev
+-- if start.prev then start.prev.next = start.next end
+-- start.prev = current.prev
+-- current.prev.next = start
+-- start.next = current
+-- current.prev = start
+-- start = sn
+-- break
+-- end
+-- current = current.next
+-- end
+-- end
+-- return start, true
+-- end
+--
+-- function remove_joiners(start,kind,lookupname,replacement)
+-- local stop = start.next
+-- while stop and stop.id == glyph and stop.subtype<256 and stop.font == start.font and (zwj[stop.char] or zwnj[stop.char]) do stop = stop.next end
+-- if stop then stop.prev.next = nil stop.prev = start.prev end
+-- if start.prev then start.prev.next = stop end
+-- node.flush_list(start)
+-- return stop, true
+-- end
+--
+-- local function dev2_reorder(head,start,stop,font,attr)
+-- local tfmdata = fontdata[font]
+-- local lookuphash = tfmdata.resources.lookuphash
+-- local sequences = tfmdata.resources.sequences
+--
+-- if not lookuphash["remove_joiners"] then install_dev(tfmdata) end --install Devanagari-features
+--
+-- local sharedfeatures = tfmdata.shared.features
+-- sharedfeatures["dev2_reorder_matras"] = true
+-- sharedfeatures["dev2_reorder_reph"] = true
+-- sharedfeatures["dev2_reorder_pre_base_reordering_consonants"] = true
+-- sharedfeatures["remove_joiners"] = true
+-- local datasets = otf.dataset(tfmdata,font,attr)
+--
+-- local reph, pre_base_reordering_consonants = false, nil
+-- local halfpos, basepos, subpos, postpos = nil, nil, nil, nil
+-- local locl = { }
+--
+-- for s=1,#sequences do -- classify chars
+-- local sequence = sequences[s]
+-- local dataset = datasets[s]
+-- featurevalue = dataset and dataset[1]
+-- if featurevalue and dataset[4] then
+-- local subtables = sequence.subtables
+-- for i=1,#subtables do
+-- local lookupname = subtables[i]
+-- local lookupcache = lookuphash[lookupname]
+-- if lookupcache then
+-- if dataset[4] == "rphf" then
+-- if dataset[3] ~= 0 then --rphf is result of of chain
+-- else
+-- reph = lookupcache[0x0930] and lookupcache[0x0930][0x094D] and lookupcache[0x0930][0x094D]["ligature"]
+-- end
+-- end
+-- if dataset[4] == "pref" and not pre_base_reordering_consonants then
+-- for k, v in pairs(lookupcache[0x094D]) do
+-- pre_base_reordering_consonants[k] = v and v["ligature"] --ToDo: reph might also be result of chain
+-- end
+-- end
+-- local current = start
+-- while current ~= stop.next do
+-- if dataset[4] == "locl" then locl[current] = lookupcache[current.char] end --ToDo: locl might also be result of chain
+-- if current ~= stop then
+-- local c, n = locl[current] or current.char, locl[current.next] or current.next.char
+-- if dataset[4] == "rphf" and lookupcache[c] and lookupcache[c][n] then --above-base: rphf Consonant + Halant
+-- if current.next ~= stop and ( zwj[current.next.next.char] or zwnj[current.next.next.char] ) then --ZWJ and ZWNJ prevent creation of reph
+-- current = current.next
+-- elseif current == start then
+-- set_attribute(current,state,5)
+-- end
+-- current = current.next
+-- end
+-- if dataset[4] == "half" and lookupcache[c] and lookupcache[c][n] then --half forms: half Consonant + Halant
+-- if current.next ~= stop and zwnj[current.next.next.char] then --ZWNJ prevent creation of half
+-- current = current.next
+-- else
+-- set_attribute(current,state,6)
+-- if not halfpos then halfpos = current end
+-- end
+-- current = current.next
+-- end
+-- if dataset[4] == "pref" and lookupcache[c] and lookupcache[c][n] then --pre-base: pref Halant + Consonant
+-- set_attribute(current,state,7)
+-- set_attribute(current.next,state,7)
+-- current = current.next
+-- end
+-- if dataset[4] == "blwf" and lookupcache[c] and lookupcache[c][n] then --below-base: blwf Halant + Consonant
+-- set_attribute(current,state,8)
+-- set_attribute(current.next,state,8)
+-- current = current.next
+-- subpos = current
+-- end
+-- if dataset[4] == "pstf" and lookupcache[c] and lookupcache[c][n] then --post-base: pstf Halant + Consonant
+-- set_attribute(current,state,9)
+-- set_attribute(current.next,state,9)
+-- current = current.next
+-- postpos = current
+-- end
+-- end
+-- current = current.next
+-- end
+-- end
+-- end
+-- end
+-- end
+--
+-- lookuphash["dev2_reorder_matras"] = pre_mark
+-- lookuphash["dev2_reorder_reph"] = { [reph] = true }
+-- lookuphash["dev2_reorder_pre_base_reordering_consonants"] = pre_base_reordering_consonants or { }
+-- lookuphash["remove_joiners"] = { [0x200C] = true, [0x200D] = true }
+--
+-- local current, base, firstcons = start, nil, nil
+-- if has_attribute(start,state) == 5 then current = start.next.next end -- if syllable starts with Ra + H and script has 'Reph' then exclude Reph from candidates for base consonants
+--
+-- if current ~= stop.next and nbsp[current.char] then --Stand Alone cluster
+-- if current == stop then
+-- stop = stop.prev
+-- head = node.remove(head, current)
+-- node.free(current)
+-- return head, stop
+-- else
+-- base = current
+-- current = current.next
+-- if current ~= stop then
+-- if nukta[current.char] then current = current.next end
+-- if zwj[current.char] then
+-- if current ~= stop and current.next ~= stop and halant[current.next.char] then
+-- current = current.next
+-- local tmp = current.next.next
+-- local changestop = current.next == stop
+-- current.next.next = nil
+-- set_attribute(current,state,7) --pref
+-- current = nodes.handlers.characters(current)
+-- set_attribute(current,state,8) --blwf
+-- current = nodes.handlers.characters(current)
+-- set_attribute(current,state,9) --pstf
+-- current = nodes.handlers.characters(current)
+-- unset_attribute(current,state)
+-- if halant[current.char] then
+-- current.next.next = tmp
+-- local nc = node.copy(current)
+-- current.char = dotted_circle
+-- head = node.insert_after(head, current, nc)
+-- else
+-- current.next = tmp -- (assumes that result of pref, blwf, or pstf consists of one node)
+-- if changestop then stop = current end
+-- end
+-- end
+-- end
+-- end
+-- end
+-- else --not Stand Alone cluster
+-- while current ~= stop.next do -- find base consonant
+-- if consonant[current.char] and not ( current ~= stop and halant[current.next.char] and current.next ~= stop and zwj[current.next.next.char] ) then
+-- if not firstcons then firstcons = current end
+-- if not ( has_attribute(current, state) == 7 or has_attribute(current, state) == 8 or has_attribute(current, state) == 9 ) then base = current end --check whether consonant has below-base or post-base form or is pre-base reordering Ra
+-- end
+-- current = current.next
+-- end
+-- if not base then
+-- base = firstcons
+-- end
+-- end
+--
+-- if not base then
+-- if has_attribute(start, state) == 5 then unset_attribute(start, state) end
+-- return head, stop
+-- else
+-- if has_attribute(base, state) then unset_attribute(base, state) end
+-- basepos = base
+-- end
+-- if not halfpos then halfpos = base end
+-- if not subpos then subpos = base end
+-- if not postpos then postpos = subpos or base end
+--
+-- --Matra characters are classified and reordered by which consonant in a conjunct they have affinity for
+-- local moved = { }
+-- current = start
+-- while current ~= stop.next do
+-- local char, target, cn = locl[current] or current.char, nil, current.next
+-- if not moved[current] and dependent_vowel[char] then
+-- if pre_mark[char] then -- Before first half form in the syllable
+-- moved[current] = true
+-- if current.prev then current.prev.next = current.next end
+-- if current.next then current.next.prev = current.prev end
+-- if current == stop then stop = current.prev end
+-- if halfpos == start then
+-- if head == start then head = current end
+-- start = current
+-- end
+-- if halfpos.prev then halfpos.prev.next = current end
+-- current.prev = halfpos.prev
+-- halfpos.prev = current
+-- current.next = halfpos
+-- halfpos = current
+-- elseif above_mark[char] then -- After main consonant
+-- target = basepos
+-- if subpos == basepos then subpos = current end
+-- if postpos == basepos then postpos = current end
+-- basepos = current
+-- elseif below_mark[char] then -- After subjoined consonants
+-- target = subpos
+-- if postpos == subpos then postpos = current end
+-- subpos = current
+-- elseif post_mark[char] then -- After post-form consonant
+-- target = postpos
+-- postpos = current
+-- end
+-- if ( above_mark[char] or below_mark[char] or post_mark[char] ) and current.prev ~= target then
+-- if current.prev then current.prev.next = current.next end
+-- if current.next then current.next.prev = current.prev end
+-- if current == stop then stop = current.prev end
+-- if target.next then target.next.prev = current end
+-- current.next = target.next
+-- target.next = current
+-- current.prev = target
+-- end
+-- end
+-- current = cn
+-- end
+--
+-- --Reorder marks to canonical order: Adjacent nukta and halant or nukta and vedic sign are always repositioned if necessary, so that the nukta is first.
+-- local current, c = start, nil
+-- while current ~= stop do
+-- if halant[current.char] or stress_tone_mark[current.char] then
+-- if not c then c = current end
+-- else
+-- c = nil
+-- end
+-- if c and nukta[current.next.char] then
+-- if head == c then head = current.next end
+-- if stop == current.next then stop = current end
+-- if c.prev then c.prev.next = current.next end
+-- current.next.prev = c.prev
+-- current.next = current.next.next
+-- if current.next.next then current.next.next.prev = current end
+-- c.prev = current.next
+-- current.next.next = c
+-- end
+-- if stop == current then break end
+-- current = current.next
+-- end
+--
+-- if nbsp[base.char] then
+-- head = node.remove(head, base)
+-- node.free(base)
+-- end
+--
+-- return head, stop
+-- end
+--
+-- function fonts.analyzers.methods.deva(head,font,attr)
+-- local orighead = head
+-- local current, start, done = head, true, false
+-- while current do
+-- if current.id == glyph and current.subtype<256 and current.font == font then
+-- done = true
+-- local syllablestart, syllableend = current, nil
+--
+-- local c = current --Checking Stand Alone cluster (this behavior is copied from dev2)
+-- if ra[c.char] and c.next and c.next.id == glyph and c.next.subtype<256 and c.next.font == font and halant[c.next.char] and c.next.next and c.next.next.id == glyph and c.next.next.subtype<256 and c.next.next.font == font then c = c.next.next end
+-- if nbsp[c.char] and ( not current.prev or current.prev.id ~= glyph or current.prev.subtype>=256 or current.prev.font ~= font or
+-- ( not consonant[current.prev.char] and not independent_vowel[current.prev.char] and not dependent_vowel[current.prev.char] and
+-- not vowel_modifier[current.prev.char] and not stress_tone_mark[current.prev.char] and not nukta[current.prev.char] and not halant[current.prev.char] )
+-- ) then --Stand Alone cluster (at the start of the word only): #[Ra+H]+NBSP+[N]+[<[<ZWJ|ZWNJ>]+H+C>]+[{M}+[N]+[H]]+[SM]+[(VD)]
+-- if c.next and c.next.id == glyph and c.next.subtype<256 and c.next.font == font and nukta[c.next.char] then c = c.next end
+-- local n = c.next
+-- if n and n.id == glyph and n.subtype<256 and n.font == font then
+-- local ni = n.next
+-- if ( zwj[n.char] or zwnj[n.char] ) and ni and ni.id == glyph and ni.subtype<256 and ni.font == font then n = ni ni = ni.next end
+-- if halant[n.char] and ni and ni.id == glyph and ni.subtype<256 and ni.font == font and consonant[ni.char] then c = ni end
+-- end
+-- while c.next and c.next.id == glyph and c.next.subtype<256 and c.next.font == font and dependent_vowel[c.next.char] do c = c.next end
+-- if c.next and c.next.id == glyph and c.next.subtype<256 and c.next.font == font and nukta[c.next.char] then c = c.next end
+-- if c.next and c.next.id == glyph and c.next.subtype<256 and c.next.font == font and halant[c.next.char] then c = c.next end
+-- if c.next and c.next.id == glyph and c.next.subtype<256 and c.next.font == font and vowel_modifier[c.next.char] then c = c.next end
+-- if c.next and c.next.id == glyph and c.next.subtype<256 and c.next.font == font and stress_tone_mark[c.next.char] then c = c.next end
+-- if c.next and c.next.id == glyph and c.next.subtype<256 and c.next.font == font and stress_tone_mark[c.next.char] then c = c.next end
+-- current = c.next
+-- syllableend = c
+-- if syllablestart ~= syllableend then
+-- head, current = deva_reorder(head, syllablestart,syllableend,font,attr)
+-- current = current.next
+-- end
+-- elseif consonant[current.char] then -- syllable containing consonant
+-- prevc = true
+-- while prevc do
+-- prevc = false
+-- local n = current.next
+-- if n and n.id == glyph and n.subtype<256 and n.font == font and nukta[n.char] then n = n.next end
+-- if n and n.id == glyph and n.subtype<256 and n.font == font and halant[n.char] then
+-- local n = n.next
+-- if n and n.id == glyph and n.subtype<256 and n.font == font and ( zwj[n.char] or zwnj[n.char] ) then n = n.next end
+-- if n and n.id == glyph and n.subtype<256 and n.font == font and consonant[n.char] then
+-- prevc = true
+-- current = n
+-- end
+-- end
+-- end
+-- if current.next and current.next.id == glyph and current.next.subtype<256 and current.next.font == font and nukta[current.next.char] then current = current.next end -- nukta (not specified in Microsft Devanagari OpenType specification)
+-- syllableend = current
+-- current = current.next
+-- if current and current.id == glyph and current.subtype<256 and current.font == font and halant[current.char] then -- syllable containing consonant without vowels: {C + [Nukta] + H} + C + H
+-- if current.next and current.next.id == glyph and current.next.subtype<256 and current.next.font == font and ( zwj[current.next.char] or zwnj[current.next.char] ) then current = current.next end
+-- syllableend = current
+-- current = current.next
+-- else -- syllable containing consonant with vowels: {C + [Nukta] + H} + C + [M] + [VM] + [SM]
+-- if current and current.id == glyph and current.subtype<256 and current.font == font and dependent_vowel[current.char] then
+-- syllableend = current
+-- current = current.next
+-- end
+-- if current and current.id == glyph and current.subtype<256 and current.font == font and vowel_modifier[current.char] then
+-- syllableend = current
+-- current = current.next
+-- end
+-- if current and current.id == glyph and current.subtype<256 and current.font == font and stress_tone_mark[current.char] then
+-- syllableend = current
+-- current = current.next
+-- end
+-- end
+-- if syllablestart ~= syllableend then
+-- head, current = deva_reorder(head,syllablestart,syllableend,font,attr)
+-- current = current.next
+-- end
+-- elseif current.id == glyph and current.subtype<256 and current.font == font and independent_vowel[current.char] then -- syllable without consonants: VO + [VM] + [SM]
+-- syllableend = current
+-- current = current.next
+-- if current and current.id == glyph and current.subtype<256 and current.font == font and vowel_modifier[current.char] then
+-- syllableend = current
+-- current = current.next
+-- end
+-- if current and current.id == glyph and current.subtype<256 and current.font == font and stress_tone_mark[current.char] then
+-- syllableend = current
+-- current = current.next
+-- end
+-- else -- Syntax error
+-- if pre_mark[current.char] or above_mark[current.char] or below_mark[current.char] or post_mark[current.char] then
+-- local n = node.copy(current)
+-- if pre_mark[current.char] then
+-- n.char = dotted_circle
+-- else
+-- current.char = dotted_circle
+-- end
+-- head, current = node.insert_after(head, current, n)
+-- end
+-- current = current.next
+-- end
+-- else
+-- current = current.next
+-- end
+-- start = false
+-- end
+--
+-- return head, done
+-- end
+--
+-- function fonts.analyzers.methods.dev2(head,font,attr)
+-- local current, start, done, syl_nr = head, true, false, 0
+-- while current do
+-- local syllablestart, syllableend = nil, nil
+-- if current.id == glyph and current.subtype<256 and current.font == font then
+-- syllablestart = current
+-- done = true
+-- local c, n = current, current.next
+-- if ra[current.char] and n and n.id == glyph and n.subtype<256 and n.font == font and halant[n.char] and n.next and n.next.id == glyph and n.next.subtype<256 and n.next.font == font then c = n.next end
+-- if independent_vowel[c.char] then --Vowel-based syllable: [Ra+H]+V+[N]+[<[<ZWJ|ZWNJ>]+H+C|ZWJ+C>]+[{M}+[N]+[H]]+[SM]+[(VD)]
+-- n = c.next
+-- local ni, nii = nil, nil
+-- if n and n.id == glyph and n.subtype<256 and n.font == font and nukta[n.char] then n = n.next end
+-- if n and n.id == glyph and n.subtype<256 and n.font == font then local ni = n.next end
+-- if ni and ni.id == glyph and ni.subtype<256 and ni.font == font and ni.next and ni.next.id == glyph and ni.next.subtype<256 and ni.next.font == font then
+-- nii = ni.next
+-- if zwj[ni.char] and consonant[nii.char] then
+-- c = nii
+-- elseif (zwj[ni.char] or zwnj[ni.char]) and halant[nii.char] and nii.next and nii.next.id == glyph and nii.next.subtype<256 and nii.next.font == font and consonant[nii.next.char] then
+-- c = nii.next
+-- end
+-- end
+-- if c.next and c.next.id == glyph and c.next.subtype<256 and c.next.font == font and dependent_vowel[c.next.char] then c = c.next end
+-- if c.next and c.next.id == glyph and c.next.subtype<256 and c.next.font == font and nukta[c.next.char] then c = c.next end
+-- if c.next and c.next.id == glyph and c.next.subtype<256 and c.next.font == font and halant[c.next.char] then c = c.next end
+-- if c.next and c.next.id == glyph and c.next.subtype<256 and c.next.font == font and vowel_modifier[c.next.char] then c = c.next end
+-- if c.next and c.next.id == glyph and c.next.subtype<256 and c.next.font == font and stress_tone_mark[c.next.char] then c = c.next end
+-- if c.next and c.next.id == glyph and c.next.subtype<256 and c.next.font == font and stress_tone_mark[c.next.char] then c = c.next end
+-- current = c
+-- syllableend = c
+-- elseif nbsp[c.char] and ( not current.prev or current.prev.id ~= glyph or current.prev.subtype>=256 or current.prev.font ~= font or
+-- ( not consonant[current.prev.char] and not independent_vowel[current.prev.char] and not dependent_vowel[current.prev.char] and
+-- not vowel_modifier[current.prev.char] and not stress_tone_mark[current.prev.char] and not nukta[current.prev.char] and not halant[current.prev.char] )
+-- ) then --Stand Alone cluster (at the start of the word only): #[Ra+H]+NBSP+[N]+[<[<ZWJ|ZWNJ>]+H+C>]+[{M}+[N]+[H]]+[SM]+[(VD)]
+-- if c.next and c.next.id == glyph and c.next.subtype<256 and c.next.font == font and nukta[c.next.char] then c = c.next end
+-- n = c.next
+-- if n and n.id == glyph and n.subtype<256 and n.font == font then
+-- local ni = n.next
+-- if ( zwj[n.char] or zwnj[n.char] ) and ni and ni.id == glyph and ni.subtype<256 and ni.font == font then n = ni ni = ni.next end
+-- if halant[n.char] and ni and ni.id == glyph and ni.subtype<256 and ni.font == font and consonant[ni.char] then c = ni end
+-- end
+-- if c.next and c.next.id == glyph and c.next.subtype<256 and c.next.font == font and dependent_vowel[c.next.char] then c = c.next end
+-- if c.next and c.next.id == glyph and c.next.subtype<256 and c.next.font == font and nukta[c.next.char] then c = c.next end
+-- if c.next and c.next.id == glyph and c.next.subtype<256 and c.next.font == font and halant[c.next.char] then c = c.next end
+-- if c.next and c.next.id == glyph and c.next.subtype<256 and c.next.font == font and vowel_modifier[c.next.char] then c = c.next end
+-- if c.next and c.next.id == glyph and c.next.subtype<256 and c.next.font == font and stress_tone_mark[c.next.char] then c = c.next end
+-- if c.next and c.next.id == glyph and c.next.subtype<256 and c.next.font == font and stress_tone_mark[c.next.char] then c = c.next end
+-- current = c
+-- syllableend = c
+-- elseif consonant[current.char] then --Consonant syllable: {C+[N]+<H+[<ZWNJ|ZWJ>]|<ZWNJ|ZWJ>+H>} + C+[N]+[A] + [< H+[<ZWNJ|ZWJ>] | {M}+[N]+[H]>]+[SM]+[(VD)]
+-- c = current
+-- if c.next and c.next.id == glyph and c.next.subtype<256 and c.next.font == font and nukta[c.next.char] then c = c.next end
+-- n = c
+-- while n.next and n.next.id == glyph and n.next.subtype<256 and n.next.font == font and ( halant[n.next.char] or zwnj[n.next.char] or zwj[n.next.char] ) do
+-- if halant[n.next.char] then
+-- n = n.next
+-- if n.next and n.next.id == glyph and n.next.subtype<256 and n.next.font == font and ( zwnj[n.next.char] or zwj[n.next.char] ) then n = n.next end
+-- else
+-- if n.next.next and n.next.next.id == glyph and n.next.next.subtype<256 and n.next.next.font == font and halant[n.next.next.char] then n = n.next.next end
+-- end
+-- if n.next and n.next.id == glyph and n.next.subtype<256 and n.next.font == font and consonant[n.next.char] then
+-- n = n.next
+-- if n.next and n.next.id == glyph and n.next.subtype<256 and n.next.font == font and nukta[n.next.char] then n = n.next end
+-- c = n
+-- else
+-- break
+-- end
+-- end
+-- if c.next and c.next.id == glyph and c.next.subtype<256 and c.next.font == font and anudatta[c.next.char] then c = c.next end
+-- if c.next and c.next.id == glyph and c.next.subtype<256 and c.next.font == font and halant[c.next.char] then
+-- c = c.next
+-- if c.next and c.next.id == glyph and c.next.subtype<256 and c.next.font == font and ( zwnj[c.next.char] or zwj[c.next.char] ) then c = c.next end
+-- else
+-- if c.next and c.next.id == glyph and c.next.subtype<256 and c.next.font == font and dependent_vowel[c.next.char] then c = c.next end
+-- if c.next and c.next.id == glyph and c.next.subtype<256 and c.next.font == font and nukta[c.next.char] then c = c.next end
+-- if c.next and c.next.id == glyph and c.next.subtype<256 and c.next.font == font and halant[c.next.char] then c = c.next end
+-- end
+-- if c.next and c.next.id == glyph and c.next.subtype<256 and c.next.font == font and vowel_modifier[c.next.char] then c = c.next end
+-- if c.next and c.next.id == glyph and c.next.subtype<256 and c.next.font == font and stress_tone_mark[c.next.char] then c = c.next end
+-- if c.next and c.next.id == glyph and c.next.subtype<256 and c.next.font == font and stress_tone_mark[c.next.char] then c = c.next end
+-- current = c
+-- syllableend = c
+-- end
+-- end
+--
+-- if syllableend then
+-- syl_nr = syl_nr + 1
+-- c = syllablestart
+-- while c ~= syllableend.next do
+-- set_attribute(c,sylnr,syl_nr)
+-- c = c.next
+-- end
+-- end
+-- if syllableend and syllablestart ~= syllableend then
+-- head, current = dev2_reorder(head,syllablestart,syllableend,font,attr)
+-- end
+--
+-- if not syllableend and not has_attribute(current, state) and current.id == glyph and current.subtype<256 and current.font == font then -- Syntax error
+-- if pre_mark[current.char] or above_mark[current.char] or below_mark[current.char] or post_mark[current.char] then
+-- local n = node.copy(current)
+-- if pre_mark[current.char] then
+-- n.char = dotted_circle
+-- else
+-- current.char = dotted_circle
+-- end
+-- head, current = node.insert_after(head, current, n)
+-- end
+-- end
+--
+-- start = false
+-- current = current.next
+-- end
+--
+-- return head, done
+-- end
+--
+-- function otf.handlers.dev2_reorder_matras(start,kind,lookupname,replacement)
+-- return dev2_reorder_matras(start,kind,lookupname,replacement)
+-- end
+--
+-- function otf.handlers.dev2_reorder_reph(start,kind,lookupname,replacement)
+-- return dev2_reorder_reph(start,kind,lookupname,replacement)
+-- end
+--
+-- function otf.handlers.dev2_reorder_pre_base_reordering_consonants(start,kind,lookupname,replacement)
+-- return dev2_reorder_pre_base_reordering_consonants(start,kind,lookupname,replacement)
+-- end
+--
+-- function otf.handlers.remove_joiners(start,kind,lookupname,replacement)
+-- return remove_joiners(start,kind,lookupname,replacement)
+-- end
diff --git a/Master/texmf-dist/tex/context/base/font-odv.lua b/Master/texmf-dist/tex/context/base/font-odv.lua
new file mode 100644
index 00000000000..b518fb79c4f
--- /dev/null
+++ b/Master/texmf-dist/tex/context/base/font-odv.lua
@@ -0,0 +1,2074 @@
+if not modules then modules = { } end modules ['font-odv'] = {
+ version = 1.001,
+ comment = "companion to font-ini.mkiv",
+ author = "Kai Eigner, TAT Zetwerk / Hans Hagen, PRAGMA ADE",
+ copyright = "TAT Zetwerk / PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+-- A few remarks:
+--
+-- This code is a partial rewrite of the code that deals with devanagari. The data and logic
+-- is by Kai Eigner and based based on Microsoft's OpenType specifications for specific
+-- scripts, but with a few improvements. More information can be found at:
+--
+-- deva: http://www.microsoft.com/typography/OpenType%20Dev/devanagari/introO.mspx
+-- dev2: http://www.microsoft.com/typography/OpenType%20Dev/devanagari/intro.mspx
+--
+-- As I touched nearly all code, reshuffled it, optimized a lot, etc. etc. (imagine how
+-- much can get messed up in over a week work) it could be that I introduced bugs. There
+-- is more to gain (esp in the functions applied to a range) but I'll do that when
+-- everything works as expected. Kai's original code is kept in font-odk.lua as a reference
+-- so blame me (HH) for bugs.
+--
+-- Interesting is that Kai managed to write this on top of the existing otf handler. Only a
+-- few extensions were needed, like a few more analyzing states and dealing with changed
+-- head nodes in the core scanner as that only happens here. There's a lot going on here
+-- and it's only because I touched nearly all code that I got a bit of a picture of what
+-- happens. For in-depth knowledge one needs to consult Kai.
+--
+-- The rewrite mostly deals with efficiency, both in terms of speed and code. We also made
+-- sure that it suits generic use as well as use in ConTeXt. I removed some buglets but can
+-- as well have messed up the logic by doing this. For this we keep the original around
+-- as that serves as reference. Due to the lots of reshuffling glyphs quite some leaks
+-- occur(red) but once I'm satisfied with the rewrite I'll weed them. I also integrated
+-- initialization etc into the regular mechanisms.
+--
+-- In the meantime, we're down from 25.5-3.5=22 seconds to 17.7-3.5=14.2 seconds for a 100
+-- page sample (mid 2012) with both variants so it's worth the effort. Some more speedup is
+-- to be expected. Due to the method chosen it will never be real fast. If I ever become a
+-- power user I'll have a go at some further speed up. I will rename some functions (and
+-- features) once we don't need to check the original code. We now use a special subset
+-- sequence for use inside the analyzer (after all we could can store this in the dataset
+-- and save redundant analysis).
+--
+-- I might go for an array approach with respect to attributes (and reshuffling). Easier.
+--
+-- Some data will move to char-def.lua (some day).
+--
+-- Hans Hagen, PRAGMA-ADE, Hasselt NL
+
+-- Matras: according to Microsoft typography specifications "up to one of each type:
+-- pre-, above-, below- or post- base", but that does not seem to be right. It could
+-- become an option.
+--
+-- The next code looks weird anyway: the "and boolean" should move inside the if
+-- or we should check differently (case vs successive).
+--
+-- local function ms_matra(c)
+-- local prebase, abovebase, belowbase, postbase = true, true, true, true
+-- local n = c.next
+-- while n and n.id == glyph_code and n.subtype<256 and n.font == font do
+-- local char = n.char
+-- if not dependent_vowel[char] then
+-- break
+-- elseif pre_mark[char] and prebase then
+-- prebase = false
+-- elseif above_mark[char] and abovebase then
+-- abovebase = false
+-- elseif below_mark[char] and belowbase then
+-- belowbase = false
+-- elseif post_mark[char] and postbase then
+-- postbase = false
+-- else
+-- return c
+-- end
+-- c = c.next
+-- end
+-- return c
+-- end
+
+-- todo: first test for font then for subtype
+
+local insert, imerge = table.insert, table.imerge
+local next = next
+
+local trace_analyzing = false trackers.register("otf.analyzing", function(v) trace_analyzing = v end)
+local report_devanagari = logs.reporter("otf","devanagari")
+
+fonts = fonts or { }
+fonts.analyzers = fonts.analyzers or { }
+fonts.analyzers.methods = fonts.analyzers.methods or { node = { otf = { } } }
+
+local otf = fonts.handlers.otf
+
+local nodecodes = nodes.nodecodes
+local glyph_code = nodecodes.glyph
+
+local handlers = otf.handlers
+local methods = fonts.analyzers.methods
+
+local otffeatures = fonts.constructors.newfeatures("otf")
+local registerotffeature = otffeatures.register
+
+local processcharacters = nodes.handlers.characters
+
+local insert_node_after = node.insert_after
+local copy_node = node.copy
+local free_node = node.free
+local remove_node = node.remove
+local flush_list = node.flush_list
+
+local unsetvalue = attributes.unsetvalue
+
+local fontdata = fonts.hashes.identifiers
+
+local a_state = attributes.private('state')
+local a_syllabe = attributes.private('syllabe')
+
+local dotted_circle = 0x25CC
+
+local states = fonts.analyzers.states -- not features
+
+local s_rphf = states.rphf
+local s_half = states.half
+local s_pref = states.pref
+local s_blwf = states.blwf
+local s_pstf = states.pstf
+
+-- In due time there will be entries here for scripts like Bengali, Gujarati,
+-- Gurmukhi, Kannada, Malayalam, Oriya, Tamil, Telugu. Feel free to provide the
+-- code points.
+
+local consonant = {
+ [0x0915] = true, [0x0916] = true, [0x0917] = true, [0x0918] = true,
+ [0x0919] = true, [0x091A] = true, [0x091B] = true, [0x091C] = true,
+ [0x091D] = true, [0x091E] = true, [0x091F] = true, [0x0920] = true,
+ [0x0921] = true, [0x0922] = true, [0x0923] = true, [0x0924] = true,
+ [0x0925] = true, [0x0926] = true, [0x0927] = true, [0x0928] = true,
+ [0x0929] = true, [0x092A] = true, [0x092B] = true, [0x092C] = true,
+ [0x092D] = true, [0x092E] = true, [0x092F] = true, [0x0930] = true,
+ [0x0931] = true, [0x0932] = true, [0x0933] = true, [0x0934] = true,
+ [0x0935] = true, [0x0936] = true, [0x0937] = true, [0x0938] = true,
+ [0x0939] = true, [0x0958] = true, [0x0959] = true, [0x095A] = true,
+ [0x095B] = true, [0x095C] = true, [0x095D] = true, [0x095E] = true,
+ [0x095F] = true, [0x0979] = true, [0x097A] = true,
+}
+
+local independent_vowel = {
+ [0x0904] = true, [0x0905] = true, [0x0906] = true, [0x0907] = true,
+ [0x0908] = true, [0x0909] = true, [0x090A] = true, [0x090B] = true,
+ [0x090C] = true, [0x090D] = true, [0x090E] = true, [0x090F] = true,
+ [0x0910] = true, [0x0911] = true, [0x0912] = true, [0x0913] = true,
+ [0x0914] = true, [0x0960] = true, [0x0961] = true, [0x0972] = true,
+ [0x0973] = true, [0x0974] = true, [0x0975] = true, [0x0976] = true,
+ [0x0977] = true,
+}
+
+local dependent_vowel = { -- matra
+ [0x093A] = true, [0x093B] = true, [0x093E] = true, [0x093F] = true,
+ [0x0940] = true, [0x0941] = true, [0x0942] = true, [0x0943] = true,
+ [0x0944] = true, [0x0945] = true, [0x0946] = true, [0x0947] = true,
+ [0x0948] = true, [0x0949] = true, [0x094A] = true, [0x094B] = true,
+ [0x094C] = true, [0x094E] = true, [0x094F] = true, [0x0955] = true,
+ [0x0956] = true, [0x0957] = true, [0x0962] = true, [0x0963] = true,
+}
+
+local vowel_modifier = {
+ [0x0900] = true, [0x0901] = true, [0x0902] = true, [0x0903] = true,
+ -- A8E0 - A8F1 are cantillation marks for the Samaveda and may not belong here.
+ [0xA8E0] = true, [0xA8E1] = true, [0xA8E2] = true, [0xA8E3] = true,
+ [0xA8E4] = true, [0xA8E5] = true, [0xA8E6] = true, [0xA8E7] = true,
+ [0xA8E8] = true, [0xA8E9] = true, [0xA8EA] = true, [0xA8EB] = true,
+ [0xA8EC] = true, [0xA8ED] = true, [0xA8EE] = true, [0xA8EF] = true,
+ [0xA8F0] = true, [0xA8F1] = true,
+}
+
+local stress_tone_mark = {
+ [0x0951] = true, [0x0952] = true, [0x0953] = true, [0x0954] = true,
+}
+
+local c_nukta = 0x093C -- used to be tables
+local c_halant = 0x094D -- used to be tables
+local c_ra = 0x0930 -- used to be tables
+local c_anudatta = 0x0952 -- used to be tables
+local c_nbsp = 0x00A0 -- used to be tables
+local c_zwnj = 0x200C -- used to be tables
+local c_zwj = 0x200D -- used to be tables
+
+local zw_char = { -- could also be inlined
+ [0x200C] = true,
+ [0x200D] = true,
+}
+
+local pre_mark = {
+ [0x093F] = true, [0x094E] = true,
+}
+
+local above_mark = {
+ [0x0900] = true, [0x0901] = true, [0x0902] = true, [0x093A] = true,
+ [0x0945] = true, [0x0946] = true, [0x0947] = true, [0x0948] = true,
+ [0x0951] = true, [0x0953] = true, [0x0954] = true, [0x0955] = true,
+ [0xA8E0] = true, [0xA8E1] = true, [0xA8E2] = true, [0xA8E3] = true,
+ [0xA8E4] = true, [0xA8E5] = true, [0xA8E6] = true, [0xA8E7] = true,
+ [0xA8E8] = true, [0xA8E9] = true, [0xA8EA] = true, [0xA8EB] = true,
+ [0xA8EC] = true, [0xA8ED] = true, [0xA8EE] = true, [0xA8EF] = true,
+ [0xA8F0] = true, [0xA8F1] = true,
+}
+
+local below_mark = {
+ [0x093C] = true, [0x0941] = true, [0x0942] = true, [0x0943] = true,
+ [0x0944] = true, [0x094D] = true, [0x0952] = true, [0x0956] = true,
+ [0x0957] = true, [0x0962] = true, [0x0963] = true,
+}
+
+local post_mark = {
+ [0x0903] = true, [0x093B] = true, [0x093E] = true, [0x0940] = true,
+ [0x0949] = true, [0x094A] = true, [0x094B] = true, [0x094C] = true,
+ [0x094F] = true,
+}
+
+local mark_four = { } -- As we access these frequently an extra hash is used.
+
+for k, v in next, pre_mark do mark_four[k] = pre_mark end
+for k, v in next, above_mark do mark_four[k] = above_mark end
+for k, v in next, below_mark do mark_four[k] = below_mark end
+for k, v in next, post_mark do mark_four[k] = post_mark end
+
+local mark_above_below_post = { }
+
+for k, v in next, above_mark do mark_above_below_post[k] = above_mark end
+for k, v in next, below_mark do mark_above_below_post[k] = below_mark end
+for k, v in next, post_mark do mark_above_below_post[k] = post_mark end
+
+-- Again, this table can be extended for other scripts than devanagari. Actually,
+-- for ConTeXt this kind of dat is kept elsewhere so eventually we might move
+-- tables to someplace else.
+
+local reorder_class = {
+ [0x0930] = "before postscript",
+ [0x093F] = "before half",
+ [0x0940] = "after subscript",
+ [0x0941] = "after subscript",
+ [0x0942] = "after subscript",
+ [0x0943] = "after subscript",
+ [0x0944] = "after subscript",
+ [0x0945] = "after subscript",
+ [0x0946] = "after subscript",
+ [0x0947] = "after subscript",
+ [0x0948] = "after subscript",
+ [0x0949] = "after subscript",
+ [0x094A] = "after subscript",
+ [0x094B] = "after subscript",
+ [0x094C] = "after subscript",
+ [0x0962] = "after subscript",
+ [0x0963] = "after subscript",
+ [0x093E] = "after subscript",
+}
+
+-- We use some pseudo features as we need to manipulate the nodelist based
+-- on information in the font as well as already applied features.
+
+local dflt_true = {
+ dflt = true
+}
+
+local dev2_defaults = {
+ dev2 = dflt_true,
+}
+
+local deva_defaults = {
+ dev2 = dflt_true,
+ deva = dflt_true,
+}
+
+local false_flags = { false, false, false, false }
+
+local both_joiners_true = {
+ [0x200C] = true,
+ [0x200D] = true,
+}
+
+local sequence_reorder_matras = {
+ chain = 0,
+ features = { dv01 = dev2_defaults },
+ flags = false_flags,
+ name = "dv01_reorder_matras",
+ subtables = { "dv01_reorder_matras" },
+ type = "devanagari_reorder_matras",
+}
+
+local sequence_reorder_reph = {
+ chain = 0,
+ features = { dv02 = dev2_defaults },
+ flags = false_flags,
+ name = "dv02_reorder_reph",
+ subtables = { "dv02_reorder_reph" },
+ type = "devanagari_reorder_reph",
+}
+
+local sequence_reorder_pre_base_reordering_consonants = {
+ chain = 0,
+ features = { dv03 = dev2_defaults },
+ flags = false_flags,
+ name = "dv03_reorder_pre_base_reordering_consonants",
+ subtables = { "dv03_reorder_pre_base_reordering_consonants" },
+ type = "devanagari_reorder_pre_base_reordering_consonants",
+}
+
+local sequence_remove_joiners = {
+ chain = 0,
+ features = { dv04 = deva_defaults },
+ flags = false_flags,
+ name = "dv04_remove_joiners",
+ subtables = { "dv04_remove_joiners" },
+ type = "devanagari_remove_joiners",
+}
+
+-- Looping over feature twice as efficient as looping over basic forms (some
+-- 350 checks instead of 750 for one font). This is something to keep an eye on
+-- as it might depends on the font. Not that it's a bottleneck.
+
+local basic_shaping_forms = {
+ nukt = true,
+ akhn = true,
+ rphf = true,
+ pref = true,
+ rkrf = true,
+ blwf = true,
+ half = true,
+ pstf = true,
+ vatu = true,
+ cjct = true,
+}
+
+local function initializedevanagi(tfmdata)
+ local script, language = otf.scriptandlanguage(tfmdata,attr) -- todo: take fast variant
+ if script == "deva" or script == "dev2" then
+ local resources = tfmdata.resources
+ local lookuphash = resources.lookuphash
+ if not lookuphash["dv01"] then
+ report_devanagari("adding devanagari features to font")
+ --
+ local features = resources.features
+ local gsubfeatures = features.gsub
+ local sequences = resources.sequences
+ local sharedfeatures = tfmdata.shared.features
+ --
+ local lastmatch = 0
+ for s=1,#sequences do -- classify chars
+ local features = sequences[s].features
+ if features then
+ for k, v in next, features do
+ if basic_shaping_forms[k] then
+ lastmatch = s
+ end
+ end
+ end
+ end
+ local insertindex = lastmatch + 1
+ --
+ lookuphash["dv04_remove_joiners"] = both_joiners_true
+ --
+ gsubfeatures["dv01"] = dev2_defaults -- reorder matras
+ gsubfeatures["dv02"] = dev2_defaults -- reorder reph
+ gsubfeatures["dv03"] = dev2_defaults -- reorder pre base reordering consonants
+ gsubfeatures["dv04"] = deva_defaults -- remove joiners
+ --
+ insert(sequences,insertindex,sequence_reorder_pre_base_reordering_consonants)
+ insert(sequences,insertindex,sequence_reorder_reph)
+ insert(sequences,insertindex,sequence_reorder_matras)
+ insert(sequences,insertindex,sequence_remove_joiners)
+ --
+ if script == "deva" then
+ sharedfeatures["dv04"] = true -- dv04_remove_joiners
+ end
+ --
+ if script == "dev2" then
+ sharedfeatures["dv01"] = true -- dv01_reorder_matras
+ sharedfeatures["dv02"] = true -- dv02_reorder_reph
+ sharedfeatures["dv03"] = true -- dv03_reorder_pre_base_reordering_consonants
+ sharedfeatures["dv04"] = true -- dv04_remove_joiners
+ end
+ --
+ end
+ end
+end
+
+registerotffeature {
+ name = "devanagari",
+ description = "inject additional features",
+ default = true,
+ initializers = {
+ node = initializedevanagi,
+ },
+}
+
+-- hm, this is applied to one character:
+
+local function deva_initialize(font,attr)
+
+ local tfmdata = fontdata[font]
+ local resources = tfmdata.resources
+ local lookuphash = resources.lookuphash
+
+ local datasets = otf.dataset(tfmdata,font,attr)
+ local devanagaridata = datasets.devanagari
+
+ if devanagaridata then -- maybe also check for e.g. reph
+
+ return lookuphash, devanagaridata.reph, devanagaridata.vattu, devanagaridata.blwfcache
+
+ else
+
+ devanagaridata = { }
+ datasets.devanagari = devanagaridata
+
+ local reph = false
+ local vattu = false
+ local blwfcache = { }
+
+ local sequences = resources.sequences
+
+ for s=1,#sequences do -- triggers creation of dataset
+ -- local sequence = sequences[s]
+ local dataset = datasets[s]
+ if dataset and dataset[1] then -- value
+ local kind = dataset[4]
+ if kind == "rphf" then
+ -- deva
+ reph = true
+ elseif kind == "blwf" then
+ -- deva
+ vattu = true
+ -- dev2
+ -- local subtables = sequence.subtables -- dataset[5].subtables
+ local subtables = dataset[5].subtables
+ for i=1,#subtables do
+ local lookupname = subtables[i]
+ local lookupcache = lookuphash[lookupname]
+ if lookupcache then
+ for k, v in next, lookupcache do
+ blwfcache[k] = blwfcache[k] or v
+ end
+ end
+ end
+ end
+ end
+ end
+
+ devanagaridata.reph = reph
+ devanagaridata.vattu = vattu
+ devanagaridata.blwfcache = blwfcache
+
+ return lookuphash, reph, vattu, blwfcache
+
+ end
+
+end
+
+local function deva_reorder(head,start,stop,font,attr)
+
+ local lookuphash, reph, vattu, blwfcache = deva_initialize(font,attr) -- could be inlines but ugly
+
+ local current = start
+ local n = start.next
+ local base = nil
+ local firstcons = nil
+ local lastcons = nil
+ local basefound = false
+
+ if start.char == c_ra and n.char == c_halant and reph then
+ -- if syllable starts with Ra + H and script has 'Reph' then exclude Reph
+ -- from candidates for base consonants
+ if n == stop then
+ return head, stop
+ end
+ if n.next.char == c_zwj then
+ current = start
+ else
+ current = n.next
+ start[a_state] = s_rphf
+ end
+ end
+
+ if current.char == c_nbsp then
+ -- Stand Alone cluster
+ if current == stop then
+ stop = stop.prev
+ head = remove_node(head,current)
+ free_node(current)
+ return head, stop
+ else
+ base, firstcons, lastcons = current, current, current
+ current = current.next
+ if current ~= stop then
+ if current.char == c_nukta then
+ current = current.next
+ end
+ if current.char == c_zwj then
+ if current ~= stop then
+ local next = current.next
+ if next ~= stop and next.char == c_halant then
+ current = next
+ next = current.next
+ local tmp = next.next
+ local changestop = next == stop
+ local tempcurrent = copy_node(next)
+ local nextcurrent = copy_node(current)
+ tempcurrent.next = nextcurrent
+ nextcurrent.prev = tempcurrent
+ tempcurrent[a_state] = s_blwf
+ tempcurrent = processcharacters(tempcurrent)
+ tempcurrent[a_state] = unsetvalue
+ if next.char == tempcurrent.char then
+ flush_list(tempcurrent)
+ local n = copy_node(current)
+ current.char = dotted_circle
+ head = insert_node_after(head, current, n)
+ else
+ current.char = tempcurrent.char -- (assumes that result of blwf consists of one node)
+ local freenode = current.next
+ current.next = tmp
+ tmp.prev = current
+ free_node(freenode)
+ flush_list(tempcurrent)
+ if changestop then
+ stop = current
+ end
+ end
+ end
+ end
+ end
+ end
+ end
+ end
+
+ while not basefound do
+ -- find base consonant
+ if consonant[current.char] then
+ current[a_state] = s_half
+ if not firstcons then
+ firstcons = current
+ end
+ lastcons = current
+ if not base then
+ base = current
+ elseif blwfcache[current.char] then
+ -- consonant has below-base (or post-base) form
+ current[a_state] = s_blwf
+ else
+ base = current
+ end
+ end
+ basefound = current == stop
+ current = current.next
+ end
+
+ if base ~= lastcons then
+ -- if base consonant is not last one then move halant from base consonant to last one
+ local np = base
+ local n = base.next
+ if n.char == c_nukta then
+ np = n
+ n = n.next
+ end
+ if n.char == c_halant then
+ if lastcons ~= stop then
+ local ln = lastcons.next
+ if ln.char == c_nukta then
+ lastcons = ln
+ end
+ end
+ -- local np = n.prev
+ local nn = n.next
+ local ln = lastcons.next -- what if lastcons is nn ?
+ np.next = nn
+ nn.prev = np
+ lastcons.next = n
+ if ln then
+ ln.prev = n
+ end
+ n.next = ln
+ n.prev = lastcons
+ if lastcons == stop then
+ stop = n
+ end
+ end
+ end
+
+ n = start.next
+ if start.char == c_ra and n.char == c_halant and not (n ~= stop and zw_char[n.next.char]) then
+ -- if syllable starts with Ra + H then move this combination so that it follows either:
+ -- the post-base 'matra' (if any) or the base consonant
+ local matra = base
+ if base ~= stop then
+ local next = base.next
+ if dependent_vowel[next.char] then
+ matra = next
+ end
+ end
+ -- [sp][start][n][nn] [matra|base][?]
+ -- [matra|base][start] [n][?] [sp][nn]
+ local sp = start.prev
+ local nn = n.next
+ local mn = matra.next
+ if sp then
+ sp.next = nn
+ end
+ nn.prev = sp
+ matra.next = start
+ start.prev = matra
+ n.next = mn
+ if mn then
+ mn.prev = n
+ end
+ if head == start then
+ head = nn
+ end
+ start = nn
+ if matra == stop then
+ stop = n
+ end
+ end
+
+ local current = start
+ while current ~= stop do
+ local next = current.next
+ if next ~= stop and next.char == c_halant and next.next.char == c_zwnj then
+ current[a_state] = unsetvalue
+ end
+ current = next
+ end
+
+ if base ~= stop and base[a_state] then
+ local next = base.next
+ if next.char == c_halant and not (next ~= stop and next.next.char == c_zwj) then
+ base[a_state] = unsetvalue
+ end
+ end
+
+ -- ToDo: split two- or three-part matras into their parts. Then, move the left 'matra' part to the beginning of the syllable.
+ -- Not necessary for Devanagari. However it is necessay for other scripts, such as Tamil (e.g. TAMIL VOWEL SIGN O - 0BCA)
+
+ -- classify consonants and 'matra' parts as pre-base, above-base (Reph), below-base or post-base, and group elements of the syllable (consonants and 'matras') according to this classification
+
+ local current, allreordered, moved = start, false, { [base] = true }
+ local a, b, p, bn = base, base, base, base.next
+ if base ~= stop and bn.char == c_nukta then
+ a, b, p = bn, bn, bn
+ end
+ while not allreordered do
+ -- current is always consonant
+ local c = current
+ local n = current.next
+ local l = nil -- used ?
+ if c ~= stop then
+ if n.char == c_nukta then
+ c = n
+ n = n.next
+ end
+ if c ~= stop then
+ if n.char == c_halant then
+ c = n
+ n = n.next
+ end
+ while c ~= stop and dependent_vowel[n.char] do
+ c = n
+ n = n.next
+ end
+ if c ~= stop then
+ if vowel_modifier[n.char] then
+ c = n
+ n = n.next
+ end
+ if c ~= stop and stress_tone_mark[n.char] then
+ c = n
+ n = n.next
+ end
+ end
+ end
+ end
+ local bp = firstcons.prev
+ local cn = current.next
+ local last = c.next
+ while cn ~= last do
+ -- move pre-base matras...
+ if pre_mark[cn.char] then
+ if bp then
+ bp.next = cn
+ end
+ local next = cn.next
+ local prev = cn.prev
+ if next then
+ next.prev = prev
+ end
+ prev.next = next
+ if cn == stop then
+ stop = prev
+ end
+ cn.prev = bp
+ cn.next = firstcons
+ firstcons.prev = cn
+ if firstcons == start then
+ if head == start then
+ head = cn
+ end
+ start = cn
+ end
+ break
+ end
+ cn = cn.next
+ end
+ allreordered = c == stop
+ current = c.next
+ end
+
+ if reph or vattu then
+ local current, cns = start, nil
+ while current ~= stop do
+ local c = current
+ local n = current.next
+ if current.char == c_ra and n.char == c_halant then
+ c = n
+ n = n.next
+ local b, bn = base, base
+ while bn ~= stop do
+ local next = bn.next
+ if dependent_vowel[next.char] then
+ b = next
+ end
+ bn = next
+ end
+ if current[a_state] == s_rphf then
+ -- position Reph (Ra + H) after post-base 'matra' (if any) since these
+ -- become marks on the 'matra', not on the base glyph
+ if b ~= current then
+ if current == start then
+ if head == start then
+ head = n
+ end
+ start = n
+ end
+ if b == stop then
+ stop = c
+ end
+ local prev = current.prev
+ if prev then
+ prev.next = n
+ end
+ if n then
+ n.prev = prev
+ end
+ local next = b.next
+ c.next = next
+ if next then
+ next.prev = c
+ end
+ c.next = next
+ b.next = current
+ current.prev = b
+ end
+ elseif cns and cns.next ~= current then
+ -- position below-base Ra (vattu) following the consonants on which it is placed (either the base consonant or one of the pre-base consonants)
+ local cp, cnsn = current.prev, cns.next
+ if cp then
+ cp.next = n
+ end
+ if n then
+ n.prev = cp
+ end
+ cns.next = current
+ current.prev = cns
+ c.next = cnsn
+ if cnsn then
+ cnsn.prev = c
+ end
+ if c == stop then
+ stop = cp
+ break
+ end
+ current = n.prev
+ end
+ else
+ local char = current.char
+ if consonant[char] or char == c_nbsp then -- maybe combined hash
+ cns = current
+ local next = cns.next
+ if next.char == c_halant then
+ cns = next
+ end
+ end
+ end
+ current = current.next
+ end
+ end
+
+ if base.char == c_nbsp then
+ head = remove_node(head,base)
+ free_node(base)
+ end
+
+ return head, stop
+end
+
+-- If a pre-base matra character had been reordered before applying basic features,
+-- the glyph can be moved closer to the main consonant based on whether half-forms had been formed.
+-- Actual position for the matra is defined as “after last standalone halant glyph,
+-- after initial matra position and before the main consonant”.
+-- If ZWJ or ZWNJ follow this halant, position is moved after it.
+
+-- so we break out ... this is only done for the first 'word' (if we feed words we can as
+-- well test for non glyph.
+
+function handlers.devanagari_reorder_matras(head,start,kind,lookupname,replacement) -- no leak
+ local current = start -- we could cache attributes here
+ local startfont = start.font
+ local startattr = start[a_syllabe]
+ -- can be fast loop
+ while current and current.id == glyph_code and current.subtype<256 and current.font == font and current[a_syllabe] == startattr do
+ local next = current.next
+ if current.char == c_halant and not current[a_state] then
+ if next and next.id == glyph_code and next.subtype<256 and next.font == font and next[a_syllabe] == startattr and zw_char[next.char] then
+ current = next
+ end
+ local startnext = start.next
+ head = remove_node(head,start)
+ local next = current.next
+ if next then
+ next.prev = start
+ end
+ start.next = next
+ current.next = start
+ start.prev = current
+ start = startnext
+ break
+ end
+ current = next
+ end
+ return head, start, true
+end
+
+-- todo: way more caching of attributes and font
+
+-- Reph’s original position is always at the beginning of the syllable, (i.e. it is not reordered at the character reordering stage).
+-- However, it will be reordered according to the basic-forms shaping results.
+-- Possible positions for reph, depending on the script, are; after main, before post-base consonant forms,
+-- and after post-base consonant forms.
+
+-- 1 If reph should be positioned after post-base consonant forms, proceed to step 5.
+-- 2 If the reph repositioning class is not after post-base: target position is after the first explicit halant glyph between
+-- the first post-reph consonant and last main consonant. If ZWJ or ZWNJ are following this halant, position is moved after it.
+-- If such position is found, this is the target position. Otherwise, proceed to the next step.
+-- Note: in old-implementation fonts, where classifications were fixed in shaping engine,
+-- there was no case where reph position will be found on this step.
+-- 3 If reph should be repositioned after the main consonant: from the first consonant not ligated with main,
+-- or find the first consonant that is not a potential pre-base reordering Ra.
+-- 4 If reph should be positioned before post-base consonant, find first post-base classified consonant not ligated with main.
+-- If no consonant is found, the target position should be before the first matra, syllable modifier sign or vedic sign.
+-- 5 If no consonant is found in steps 3 or 4, move reph to a position immediately before the first post-base matra,
+-- syllable modifier sign or vedic sign that has a reordering class after the intended reph position.
+-- For example, if the reordering position for reph is post-main, it will skip above-base matras that also have a post-main position.
+-- 6 Otherwise, reorder reph to the end of the syllable.
+
+-- hm, this only looks at the start of a nodelist ... is this supposed to be line based?
+
+function handlers.devanagari_reorder_reph(head,start,kind,lookupname,replacement)
+ -- since in Devanagari reph has reordering position 'before postscript' dev2 only follows step 2, 4, and 6,
+ -- the other steps are still ToDo (required for scripts other than dev2)
+ local current = start.next
+ local startnext = nil
+ local startprev = nil
+ local startfont = start.font
+ local startattr = start[a_syllabe]
+ while current and current.id == glyph_code and current.subtype<256 and current.font == startfont and current[a_syllabe] == startattr do --step 2
+ if current.char == c_halant and not current[a_state] then
+ local next = current.next
+ if next and next.id == glyph_code and next.subtype<256 and next.font == startfont and next[a_syllabe] == startattr and zw_char[next.char] then
+ current = next
+ end
+ startnext = start.next
+ head = remove_node(head,start)
+ local next = current.next
+ if next then
+ next.prev = start
+ end
+ start.next = next
+ current.next = start
+ start.prev = current
+ start = startnext
+ startattr = start[a_syllabe]
+ break
+ end
+ current = current.next
+ end
+ if not startnext then
+ current = start.next
+ while current and current.id == glyph_code and current.subtype<256 and current.font == startfont and current[a_syllabe] == startattr do --step 4
+ if current[a_state] == s_pstf then --post-base
+ startnext = start.next
+ head = remove_node(head,start)
+ local prev = current.prev
+ start.prev = prev
+ prev.next = start
+ start.next = current
+ current.prev = start
+ start = startnext
+ startattr = start[a_syllabe]
+ break
+ end
+ current = current.next
+ end
+ end
+ -- ToDo: determine position for reph with reordering position other than 'before postscript'
+ -- (required for scripts other than dev2)
+ -- leaks
+ if not startnext then
+ current = start.next
+ local c = nil
+ while current and current.id == glyph_code and current.subtype<256 and current.font == startfont and current[a_syllabe] == startattr do --step 5
+ if not c then
+ local char = current.char
+ -- todo: combine in one
+ if mark_above_below_post[char] and reorder_class[char] ~= "after subscript" then
+ c = current
+ end
+ end
+ current = current.next
+ end
+ -- here we can loose the old start node: maybe best split cases
+ if c then
+ startnext = start.next
+ head = remove_node(head,start)
+ local prev = c.prev
+ start.prev = prev
+ prev.next = start
+ start.next = c
+ c.prev = start
+ -- end
+ start = startnext
+ startattr = start[a_syllabe]
+ end
+ end
+ -- leaks
+ if not startnext then
+ current = start
+ local next = current.next
+ while next and next.id == glyph_code and next.subtype<256 and next.font == startfont and next[a_syllabe] == startattr do --step 6
+ current = next
+ next = current.next
+ end
+ if start ~= current then
+ startnext = start.next
+ head = remove_node(head,start)
+ local next = current.next
+ if next then
+ next.prev = start
+ end
+ start.next = next
+ current.next = start
+ start.prev = current
+ start = startnext
+ end
+ end
+ --
+ return head, start, true
+end
+
+-- we can cache some checking (v)
+
+-- If a pre-base reordering consonant is found, reorder it according to the following rules:
+--
+-- 1 Only reorder a glyph produced by substitution during application of the feature.
+-- (Note that a font may shape a Ra consonant with the feature generally but block it in certain contexts.)
+-- 2 Try to find a target position the same way as for pre-base matra. If it is found, reorder pre-base consonant glyph.
+-- 3 If position is not found, reorder immediately before main consonant.
+
+-- UNTESTED: NOT CALLED IN EXAMPLE
+
+function handlers.devanagari_reorder_pre_base_reordering_consonants(head,start,kind,lookupname,replacement)
+ local current = start
+ local startnext = nil
+ local startprev = nil
+ local startfont = start.font
+ local startattr = start[a_syllabe]
+ -- can be fast for loop + caching state
+ while current and current.id == glyph_code and current.subtype<256 and current.font == startfont and current[a_syllabe] == startattr do
+ local next = current.next
+ if current.char == c_halant and not current[a_state] then
+ if next and next.id == glyph_code and next.subtype<256 and next.font == font and next[a_syllabe] == startattr then
+ local char = next.char
+ if char == c_zwnj or char == c_zwj then
+ current = next
+ end
+ end
+ startnext = start.next
+ removenode(start,start)
+ local next = current.next
+ if next then
+ next.prev = start
+ end
+ start.next = next
+ current.next = start
+ start.prev = current
+ start = startnext
+ break
+ end
+ current = next
+ end
+ if not startnext then
+ current = start.next
+ startattr = start[a_syllabe]
+ while current and current.id == glyph_code and current.subtype<256 and current.font == startfont and current[a_syllabe] == startattr do
+ if not consonant[current.char] and current[a_state] then --main
+ startnext = start.next
+ removenode(start,start)
+ local prev = current.prev
+ start.prev = prev
+ prev.next = start
+ start.next = current
+ current.prev = start
+ start = startnext
+ break
+ end
+ current = current.next
+ end
+ end
+ return head, start, true
+end
+
+function handlers.devanagari_remove_joiners(head,start,kind,lookupname,replacement)
+ local stop = start.next
+ local startfont = start.font
+ while stop and stop.id == glyph_code and stop.subtype<256 and stop.font == startfont do
+ local char = stop.char
+ if char == c_zwnj or char == c_zwj then
+ stop = stop.next
+ else
+ break
+ end
+ end
+ if stop then
+ stop.prev.next = nil
+ stop.prev = start.prev
+ end
+ local prev = start.prev
+ if prev then
+ prev.next = stop
+ end
+ flush_list(start)
+ return head, stop, true
+end
+
+local valid = {
+ rphf = true,
+ pref = true,
+ half = true,
+ blwf = true,
+ pstf = true,
+}
+
+local function dev2_initialize(font,attr)
+
+ local tfmdata = fontdata[font]
+ local resources = tfmdata.resources
+ local lookuphash = resources.lookuphash
+
+ local datasets = otf.dataset(tfmdata,font,attr)
+ local devanagaridata = datasets.devanagari
+
+ if devanagaridata then -- maybe also check for e.g. seqsubset
+
+ return lookuphash, devanagaridata.seqsubset
+
+ else
+
+ devanagaridata = { }
+ datasets.devanagari = devanagaridata
+
+ local seqsubset = { }
+ devanagaridata.seqsubset = seqsubset
+
+ local sequences = resources.sequences
+
+ for s=1,#sequences do
+ -- local sequence = sequences[s]
+ local dataset = datasets[s]
+ if dataset and dataset[1] then -- featurevalue
+ local kind = dataset[4]
+ if kind and valid[kind] then
+ -- could become a function call
+ -- local subtables = sequence.subtables
+ local subtables = dataset[5].subtables
+ for i=1,#subtables do
+ local lookupname = subtables[i]
+ local lookupcache = lookuphash[lookupname]
+ if lookupcache then
+ local reph = false
+ local chain = dataset[3]
+ if chain ~= 0 then --rphf is result of of chain
+ --ToDo: rphf might be result of other handler/chainproc
+ else
+ reph = lookupcache[0x0930]
+ if reph then
+ reph = reph[0x094D]
+ if reph then
+ reph = reph["ligature"]
+ end
+ end
+ --ToDo: rphf actualy acts on consonant + halant. This consonant might not necesseraly be 0x0930 ... (but fot dev2 it is)
+ end
+ seqsubset[#seqsubset+1] = { kind, lookupcache, reph }
+ end
+ end
+ end
+ end
+ end
+
+ lookuphash["dv01_reorder_matras"] = pre_mark -- move to initializer ?
+
+ return lookuphash, seqsubset
+
+ end
+
+end
+
+-- this one will be merged into the caller: it saves a call, but we will then make function
+-- of the actions
+
+local function dev2_reorder(head,start,stop,font,attr) -- maybe do a pass over (determine stop in sweep)
+
+ local lookuphash, seqsubset = dev2_initialize(font,attr)
+
+ local reph, pre_base_reordering_consonants = false, { } -- was nil ... probably went unnoticed because never assigned
+ local halfpos, basepos, subpos, postpos = nil, nil, nil, nil
+ local locl = { }
+
+ for i=1,#seqsubset do
+
+ -- maybe quit if start == stop
+
+ local subset = seqsubset[i]
+ local kind = subset[1]
+ local lookupcache = subset[2]
+ if kind == "rphf" then
+ if subset[3] then
+ reph = true
+ end
+ local current = start
+ local last = stop.next
+ while current ~= last do
+ if current ~= stop then
+ local c = locl[current] or current.char
+ local found = lookupcache[c]
+ if found then
+ local next = current.next
+ local n = locl[next] or next.char
+ if found[n] then --above-base: rphf Consonant + Halant
+ local afternext = next ~= stop and next.next
+ if afternext and zw_char[afternext.char] then -- ZWJ and ZWNJ prevent creation of reph
+ current = next
+ current = current.next
+ elseif current == start then
+ current[a_state] = s_rphf
+ current = next
+ else
+ current = next
+ end
+ end
+ end
+ end
+ current = current.next
+ end
+ elseif kind == "pref" then
+ -- why not global? pretty ineffient this way
+ -- this will move to the initializer and we will store the hash in dataset
+ for k, v in lookupcache[0x094D], next do
+ pre_base_reordering_consonants[k] = v and v["ligature"] --ToDo: reph might also be result of chain
+ end
+ --
+ local current = start
+ local last = stop.next
+ while current ~= last do
+ if current ~= stop then
+ local c = locl[current] or current.char
+ local found = lookupcache[c]
+ if found then
+ local next = current.next
+ local n = locl[next] or next.char
+ if found[n] then
+ current[a_state] = s_pref
+ next[a_state] = s_pref
+ current = next
+ end
+ end
+ end
+ current = current.next
+ end
+ elseif kind == "half" then -- half forms: half / Consonant + Halant
+ local current = start
+ local last = stop.next
+ while current ~= last do
+ if current ~= stop then
+ local c = locl[current] or current.char
+ local found = lookupcache[c]
+ if found then
+ local next = current.next
+ local n = locl[next] or next.char
+ if found[n] then
+ if next ~= stop and next.next.char == c_zwnj then --ZWNJ prevent creation of half
+ current = current.next
+ else
+ current[a_state] = s_half
+ if not halfpos then
+ halfpos = current
+ end
+ end
+ current = next
+ end
+ end
+ end
+ current = current.next
+ end
+ elseif kind == "blwf" then -- below-base: blwf / Halant + Consonant
+ local current = start
+ local last = stop.next
+ while current ~= last do
+ if current ~= stop then
+ local c = locl[current] or current.char
+ local found = lookupcache[c]
+ if found then
+ local next = current.next
+ local n = locl[next] or next.char
+ if found[n] then
+ current[a_state] = s_blwf
+ next[a_state] = s_blwf
+ current = next
+ subpos = current
+ end
+ end
+ end
+ current = current.next
+ end
+ elseif kind == "pstf" then -- post-base: pstf / Halant + Consonant
+ local current = start
+ local last = stop.next
+ while current ~= last do
+ if current ~= stop then
+ local c = locl[current] or current.char
+ local found = lookupcache[c]
+ if found then
+ local next = current.next
+ local n = locl[next] or next.char
+ if found[n] then
+ current[a_state] = s_pstf
+ next[a_state] = s_pstf
+ current = next
+ postpos = current
+ end
+ end
+ end
+ current = current.next
+ end
+ end
+ end
+
+ -- this one changes per word
+
+ lookuphash["dv02_reorder_reph"] = { [reph] = true }
+ lookuphash["dv03_reorder_pre_base_reordering_consonants"] = pre_base_reordering_consonants
+
+ local current, base, firstcons = start, nil, nil
+
+ if start[a_state] == s_rphf then
+ -- if syllable starts with Ra + H and script has 'Reph' then exclude Reph from candidates for base consonants
+ current = start.next.next
+ end
+
+ if current ~= stop.next and current.char == c_nbsp then
+ -- Stand Alone cluster
+ if current == stop then
+ stop = stop.prev
+ head = remove_node(head,current)
+ free_node(current)
+ return head, stop
+ else
+ base = current
+ current = current.next
+ if current ~= stop then
+ local char = current.char
+ if char == c_nukta then
+ current = current.next
+ char = current.char
+ end
+ if char == c_zwj then
+ local next = current.next
+ if current ~= stop and next ~= stop and next.char == c_halant then
+ current = next
+ next = current.next
+ local tmp = next.next
+ local changestop = next == stop
+ next.next = nil
+ current[a_state] = s_pref
+ current = processcharacters(current)
+ current[a_state] = s_blwf
+ current = processcharacters(current)
+ current[a_state] = s_pstf
+ current = processcharacters(current)
+ current[a_state] = unsetvalue
+ if current.char == c_halant then
+ current.next.next = tmp
+ local nc = copy_node(current)
+ current.char = dotted_circle
+ head = insert_node_after(head,current,nc)
+ else
+ current.next = tmp -- assumes that result of pref, blwf, or pstf consists of one node
+ if changestop then
+ stop = current
+ end
+ end
+ end
+ end
+ end
+ end
+ else -- not Stand Alone cluster
+ local last = stop.next
+ while current ~= last do -- find base consonant
+ local next = current.next
+ if consonant[current.char] then
+ if not (current ~= stop and next ~= stop and next.char == c_halant and next.next.char == c_zwj) then
+ if not firstcons then
+ firstcons = current
+ end
+ -- check whether consonant has below-base or post-base form or is pre-base reordering Ra
+ local a = current[a_state]
+ if not (a == s_pref or a == s_blwf or a == pstf) then
+ base = current
+ end
+ end
+ end
+ current = next
+ end
+ if not base then
+ base = firstcons
+ end
+ end
+
+ if not base then
+ if start[a_state] == s_rphf then
+ start[a_state] = unsetvalue
+ end
+ return head, stop
+ else
+ if base[a_state] then
+ base[a_state] = unsetvalue
+ end
+ basepos = base
+ end
+ if not halfpos then
+ halfpos = base
+ end
+ if not subpos then
+ subpos = base
+ end
+ if not postpos then
+ postpos = subpos or base
+ end
+
+ -- Matra characters are classified and reordered by which consonant in a conjunct they have affinity for
+
+ local moved = { }
+ local current = start
+ local last = stop.next
+ while current ~= last do
+ local char, target, cn = locl[current] or current.char, nil, current.next
+ if not moved[current] and dependent_vowel[char] then
+ if pre_mark[char] then -- Before first half form in the syllable
+ moved[current] = true
+ local prev = current.prev
+ local next = current.next
+ if prev then
+ prev.next = next
+ end
+ if next then
+ next.prev = prev
+ end
+ if current == stop then
+ stop = current.prev
+ end
+ if halfpos == start then
+ if head == start then
+ head = current
+ end
+ start = current
+ end
+ local prev = halfpos.prev
+ if prev then
+ prev.next = current
+ end
+ current.prev = prev
+ halfpos.prev = current
+ current.next = halfpos
+ halfpos = current
+ elseif above_mark[char] then -- After main consonant
+ target = basepos
+ if subpos == basepos then
+ subpos = current
+ end
+ if postpos == basepos then
+ postpos = current
+ end
+ basepos = current
+ elseif below_mark[char] then -- After subjoined consonants
+ target = subpos
+ if postpos == subpos then
+ postpos = current
+ end
+ subpos = current
+ elseif post_mark[char] then -- After post-form consonant
+ target = postpos
+ postpos = current
+ end
+ if mark_above_below_post[char] then
+ local prev = current.prev
+ if prev ~= target then
+ local next = current.next
+ if prev then -- not needed, already tested with target
+ prev.next = next
+ end
+ if next then
+ next.prev = prev
+ end
+ if current == stop then
+ stop = prev
+ end
+ local next = target.next
+ if next then
+ next.prev = current
+ end
+ current.next = next
+ target.next = current
+ current.prev = target
+ end
+ end
+ end
+ current = cn
+ end
+
+ -- Reorder marks to canonical order: Adjacent nukta and halant or nukta and vedic sign are always repositioned if necessary, so that the nukta is first.
+
+ local current, c = start, nil
+ while current ~= stop do
+ local char = current.char
+ if char == c_halant or stress_tone_mark[char] then
+ if not c then
+ c = current
+ end
+ else
+ c = nil
+ end
+ local next = current.next
+ if c and next.char == c_nukta then
+ if head == c then
+ head = next
+ end
+ if stop == next then
+ stop = current
+ end
+ local prev = c.prev
+ if prev then
+ prev.next = next
+ end
+ next.prev = prev
+ local nextnext = next.next
+ current.next = nextnext
+ local nextnextnext = nextnext.next
+ if nextnextnext then
+ nextnextnext.prev = current
+ end
+ c.prev = nextnext
+ nextnext.next = c
+ end
+ if stop == current then break end
+ current = current.next
+ end
+
+ if base.char == c_nbsp then
+ head = remove_node(head, base)
+ free_node(base)
+ end
+
+ return head, stop
+end
+
+-- cleaned up and optimized ... needs checking (local, check order, fixes, extra hash, etc)
+
+local separator = { }
+
+imerge(separator,consonant)
+imerge(separator,independent_vowel)
+imerge(separator,dependent_vowel)
+imerge(separator,vowel_modifier)
+imerge(separator,stress_tone_mark)
+
+separator[0x093C] = true -- nukta
+separator[0x094D] = true -- halant
+
+local function analyze_next_chars_one(c,font,variant) -- skip one dependent vowel
+ -- why two variants ... the comment suggests that it's the same ruleset
+ local n = c.next
+ if not n then
+ return c
+ end
+ if variant == 1 then
+ local v = n.id == glyph_code and n.subtype<256 and n.font == font
+ if v and n.char == c_nukta then
+ n = n.next
+ if n then
+ v = n.id == glyph_code and n.subtype<256 and n.font == font
+ end
+ end
+ if n and v then
+ local nn = n.next
+ if nn and nn.id == glyph_code and nn.subtype<256 and nn.font == font then
+ local nnn = nn.next
+ if nnn and nnn.id == glyph_code and nnn.subtype<256 and nnn.font == font then
+ local nnc = nn.char
+ local nnnc = nnn.char
+ if nnc == c_zwj and consonant[nnnc] then
+ c = nnn
+ elseif (nnc == c_zwnj or nnc == c_zwj) and nnnc == c_halant then
+ local nnnn = nnn.next
+ if nnnn and nnnn.id == glyph_code and consonant[nnnn.char] and nnnn.subtype<256 and nnnn.font == font then
+ c = nnnn
+ end
+ end
+ end
+ end
+ end
+ elseif variant == 2 then
+ if n.id == glyph_code and n.char == c_nukta and n.subtype<256 and n.font == font then
+ c = n
+ end
+ n = c.next
+ if n and n.id == glyph_code and n.subtype<256 and n.font == font then
+ local nn = n.next
+ if nn then
+ local nv = nn.id == glyph_code and nn.subtype<256 and nn.font == font
+ if nv and zw_char[n.char] then
+ n = nn
+ nn = nn.next
+ nv = nn.id == glyph_code and nn.subtype<256 and nn.font == font
+ end
+ if nn and nv and n.char == c_halant and consonant[nn.char] then
+ c = nn
+ end
+ end
+ end
+ end
+ -- c = ms_matra(c)
+ local n = c.next
+ if not n then
+ return c
+ end
+ local v = n.id == glyph_code and n.subtype<256 and n.font == font
+ if not v then
+ return c
+ end
+ local char = n.char
+ if dependent_vowel[char] then
+ c = c.next
+ n = c.next
+ if not n then
+ return c
+ end
+ v = n.id == glyph_code and n.subtype<256 and n.font == font
+ if not v then
+ return c
+ end
+ char = n.char
+ end
+ if char == c_nukta then
+ c = c.next
+ n = c.next
+ if not n then
+ return c
+ end
+ v = n.id == glyph_code and n.subtype<256 and n.font == font
+ if not v then
+ return c
+ end
+ char = n.char
+ end
+ if char == c_halant then
+ c = c.next
+ n = c.next
+ if not n then
+ return c
+ end
+ v = n.id == glyph_code and n.subtype<256 and n.font == font
+ if not v then
+ return c
+ end
+ char = n.char
+ end
+ if vowel_modifier[char] then
+ c = c.next
+ n = c.next
+ if not n then
+ return c
+ end
+ v = n.id == glyph_code and n.subtype<256 and n.font == font
+ if not v then
+ return c
+ end
+ char = n.char
+ end
+ if stress_tone_mark[char] then
+ c = c.next
+ n = c.next
+ if not n then
+ return c
+ end
+ v = n.id == glyph_code and n.subtype<256 and n.font == font
+ if not v then
+ return c
+ end
+ char = n.char
+ end
+ if stress_tone_mark[char] then
+ return n
+ else
+ return c
+ end
+end
+
+local function analyze_next_chars_two(c,font)
+ local n = c.next
+ if not n then
+ return c
+ end
+ if n.id == glyph_code and n.char == c_nukta and n.subtype<256 and n.font == font then
+ c = n
+ end
+ n = c
+ while true do
+ local nn = n.next
+ if nn and nn.id == glyph_code and nn.subtype<256 and nn.font == font then
+ local char = nn.char
+ if char == c_halant then
+ n = nn
+ local nnn = nn.next
+ if nnn and nnn.id == glyph_code and zw_char[nnn.char] and nnn.subtype<256 and nnn.font == font then
+ n = nnn
+ end
+ elseif char == c_zwnj or char == c_zwj then
+ -- n = nn -- not here (?)
+ local nnn = nn.next
+ if nnn and nnn.id == glyph_code and nnn.char == c_halant and nnn.subtype<256 and nnn.font == font then
+ n = nnn
+ end
+ else
+ break
+ end
+ local nn = n.next
+ if nn and nn.id == glyph_code and consonant[nn.char] and nn.subtype<256 and nn.font == font then
+ n = nn
+ local nnn = nn.next
+ if nnn and nnn.id == glyph_code and nnn.char == c_nukta and nnn.subtype<256 and nnn.font == font then
+ n = nnn
+ end
+ c = n
+ else
+ break
+ end
+ else
+ break
+ end
+ end
+ --
+ if not c then
+ -- This shouldn't happen I guess.
+ return
+ end
+ local n = c.next
+ if not n then
+ return c
+ end
+ local v = n.id == glyph_code and n.subtype<256 and n.font == font
+ if not v then
+ return c
+ end
+ local char = n.char
+ if char == c_anudatta then
+ c = n
+ n = c.next
+ if not n then
+ return c
+ end
+ v = n.id == glyph_code and n.subtype<256 and n.font == font
+ if not v then
+ return c
+ end
+ char = n.char
+ end
+ if char == c_halant then
+ c = c.next
+ n = c.next
+ if not n then
+ return c
+ end
+ v = n.id == glyph_code and n.subtype<256 and n.font == font
+ if not v then
+ return c
+ end
+ char = n.char
+ if char == c_zwnj or char == c_zwj then
+ c = c.next
+ n = c.next
+ if not n then
+ return c
+ end
+ v = n.id == glyph_code and n.subtype<256 and n.font == font
+ if not v then
+ return c
+ end
+ char = n.char
+ end
+ else
+ -- c = ms_matra(c)
+ -- same as one
+ if dependent_vowel[char] then
+ c = c.next
+ n = c.next
+ if not n then
+ return c
+ end
+ v = n.id == glyph_code and n.subtype<256 and n.font == font
+ if not v then
+ return c
+ end
+ char = n.char
+ end
+ if char == c_nukta then
+ c = c.next
+ n = c.next
+ if not n then
+ return c
+ end
+ v = n.id == glyph_code and n.subtype<256 and n.font == font
+ if not v then
+ return c
+ end
+ char = n.char
+ end
+ if char == c_halant then
+ c = c.next
+ n = c.next
+ if not n then
+ return c
+ end
+ v = n.id == glyph_code and n.subtype<256 and n.font == font
+ if not v then
+ return c
+ end
+ char = n.char
+ end
+ end
+ -- same as one
+ if vowel_modifier[char] then
+ c = c.next
+ n = c.next
+ if not n then
+ return c
+ end
+ v = n.id == glyph_code and n.subtype<256 and n.font == font
+ if not v then
+ return c
+ end
+ char = n.char
+ end
+ if stress_tone_mark[char] then
+ c = c.next
+ n = c.next
+ if not n then
+ return c
+ end
+ v = n.id == glyph_code and n.subtype<256 and n.font == font
+ if not v then
+ return c
+ end
+ char = n.char
+ end
+ if stress_tone_mark[char] then
+ return n
+ else
+ return c
+ end
+end
+
+local function inject_syntax_error(head,current,mark)
+ local signal = copy_node(current)
+ if mark == pre_mark then
+ signal.char = dotted_circle
+ else
+ current.char = dotted_circle
+ end
+ return insert_node_after(head,current,signal)
+end
+
+-- It looks like these two analyzers were written independently but they share
+-- a lot. Common code has been synced.
+
+function methods.deva(head,font,attr)
+ local current, start, done = head, true, false
+ while current do
+ if current.id == glyph_code and current.subtype<256 and current.font == font then
+ done = true
+ local syllablestart = current
+ local syllableend = nil
+ local c = current
+ local n = c.next
+ if n and c.char == c_ra and n.id == glyph_code and n.char == c_halant and n.subtype<256 and n.font == font then
+ local n = n.next
+ if n and n.id == glyph_code and n.subtype<256 and n.font == font then
+ c = n
+ end
+ end
+ local standalone = c.char == c_nbsp
+ if standalone then
+ local prev = current.prev
+ if not prev then
+ -- begin of paragraph or box
+ elseif prev.id ~= glyph_code or prev.subtype>=256 or prev.font ~= font then
+ -- different font or language so quite certainly a different word
+ elseif not separator[prev.char] then
+ -- something that separates words
+ else
+ standalone = false
+ end
+ end
+ if standalone then
+ -- stand alone cluster (at the start of the word only): #[Ra+H]+NBSP+[N]+[<[<ZWJ|ZWNJ>]+H+C>]+[{M}+[N]+[H]]+[SM]+[(VD)]
+ local syllabeend, current = analyze_next_chars_one(c,font,2) -- watch out, here we set current to next
+ if syllablestart ~= syllableend then
+ head, current = deva_reorder(head,syllablestart,syllableend,font,attr)
+ current = current.next
+ end
+ else
+ -- we can delay the n.subtype and n.font and test for say halant first
+ -- as an table access is faster than two function calls (subtype and font are
+ -- pseudo fields) but the code becomes messy (unless we make it a function)
+ local char = current.char
+ if consonant[char] then
+ -- syllable containing consonant
+ local prevc = true
+ while prevc do
+ prevc = false
+ local n = current.next
+ if not n then
+ break
+ end
+ local v = n.id == glyph_code and n.subtype<256 and n.font == font
+ if not v then
+ break
+ end
+ local c = n.char
+ if c == c_nukta then
+ n = n.next
+ if not n then
+ break
+ end
+ v = n.id == glyph_code and n.subtype<256 and n.font == font
+ if not v then
+ break
+ end
+ c = n.char
+ end
+ if c == c_halant then
+ n = n.next
+ if not n then
+ break
+ end
+ v = n.id == glyph_code and n.subtype<256 and n.font == font
+ if not v then
+ break
+ end
+ c = n.char
+ if c == c_zwnj or c == c_zwj then
+ n = n.next
+ if not n then
+ break
+ end
+ v = n.id == glyph_code and n.subtype<256 and n.font == font
+ if not v then
+ break
+ end
+ c = n.char
+ end
+ if consonant[c] then
+ prevc = true
+ current = n
+ end
+ end
+ end
+ local n = current.next
+ if n and n.id == glyph_code and n.char == c_nukta and n.subtype<256 and n.font == font then
+ -- nukta (not specified in Microsft Devanagari OpenType specification)
+ current = n
+ n = current.next
+ end
+ syllableend = current
+ current = n
+ if current then
+ local v = current.id == glyph_code and current.subtype<256 and current.font == font
+ if v then
+ if current.char == c_halant then
+ -- syllable containing consonant without vowels: {C + [Nukta] + H} + C + H
+ local n = current.next
+ if n and n.id == glyph_code and zw_char[n.char] and n.subtype<256 and n.font == font then
+ -- code collapsed, probably needs checking with intention
+ syllableend = n
+ current = n.next
+ else
+ syllableend = current
+ current = n
+ end
+ else
+ -- syllable containing consonant with vowels: {C + [Nukta] + H} + C + [M] + [VM] + [SM]
+ local c = current.char
+ if dependent_vowel[c] then
+ syllableend = current
+ current = current.next
+ v = current and current.id == glyph_code and current.subtype<256 and current.font == font
+ if v then
+ c = current.char
+ end
+ end
+ if v and vowel_modifier[c] then
+ syllableend = current
+ current = current.next
+ v = current and current.id == glyph_code and current.subtype<256 and current.font == font
+ if v then
+ c = current.char
+ end
+ end
+ if v and stress_tone_mark[c] then
+ syllableend = current
+ current = current.next
+ end
+ end
+ end
+ end
+ if syllablestart ~= syllableend then
+ head, current = deva_reorder(head,syllablestart,syllableend,font,attr)
+ current = current.next
+ end
+ elseif independent_vowel[char] then
+ -- syllable without consonants: VO + [VM] + [SM]
+ syllableend = current
+ current = current.next
+ if current then
+ local v = current.id == glyph_code and current.subtype<256 and current.font == font
+ if v then
+ local c = current.char
+ if vowel_modifier[c] then
+ syllableend = current
+ current = current.next
+ v = current and current.id == glyph_code and current.subtype<256 and current.font == font
+ if v then
+ c = current.char
+ end
+ end
+ if v and stress_tone_mark[c] then
+ syllableend = current
+ current = current.next
+ end
+ end
+ end
+ else
+ local mark = mark_four[char]
+ if mark then
+ head, current = inject_syntax_error(head,current,mark)
+ end
+ current = current.next
+ end
+ end
+ else
+ current = current.next
+ end
+ start = false
+ end
+
+ return head, done
+end
+
+-- there is a good change that when we run into one with subtype < 256 that the rest is also done
+-- so maybe we can omit this check (it's pretty hard to get glyphs in the stream out of the blue)
+
+-- handler(head,start,kind,lookupname,lookupmatch,sequence,lookuphash,1)
+
+function methods.dev2(head,font,attr)
+ local current = head
+ local start = true
+ local done = false
+ local syllabe = 0
+ while current do
+ local syllablestart, syllableend = nil, nil
+ if current.id == glyph_code and current.subtype<256 and current.font == font then
+ done = true
+ syllablestart = current
+ local c = current
+ local n = current.next
+ if n and c.char == c_ra and n.id == glyph_code and n.char == c_halant and n.subtype<256 and n.font == font then
+ local n = n.next
+ if n and n.id == glyph_code and n.subtype<256 and n.font == font then
+ c = n
+ end
+ end
+ local char = c.char
+ if independent_vowel[char] then
+ -- vowel-based syllable: [Ra+H]+V+[N]+[<[<ZWJ|ZWNJ>]+H+C|ZWJ+C>]+[{M}+[N]+[H]]+[SM]+[(VD)]
+ current = analyze_next_chars_one(c,font,1)
+ syllableend = current
+ else
+ local standalone = char == c_nbsp
+ if standalone then
+ local p = current.prev
+ if not p then
+ -- begin of paragraph or box
+ elseif p.id ~= glyph_code or p.subtype>=256 or p.font ~= font then
+ -- different font or language so quite certainly a different word
+ elseif not separator[p.char] then
+ -- something that separates words
+ else
+ standalone = false
+ end
+ end
+ if standalone then
+ -- Stand Alone cluster (at the start of the word only): #[Ra+H]+NBSP+[N]+[<[<ZWJ|ZWNJ>]+H+C>]+[{M}+[N]+[H]]+[SM]+[(VD)]
+ current = analyze_next_chars_one(c,font,2)
+ syllableend = current
+ elseif consonant[current.char] then
+ -- WHY current INSTEAD OF c ?
+
+ -- Consonant syllable: {C+[N]+<H+[<ZWNJ|ZWJ>]|<ZWNJ|ZWJ>+H>} + C+[N]+[A] + [< H+[<ZWNJ|ZWJ>] | {M}+[N]+[H]>]+[SM]+[(VD)]
+ current = analyze_next_chars_two(current,font) -- not c !
+ syllableend = current
+ end
+ end
+ end
+ if syllableend then
+ syllabe = syllabe + 1
+ local c = syllablestart
+ local n = syllableend.next
+ while c ~= n do
+ c[a_syllabe] = syllabe
+ c = c.next
+ end
+ end
+ if syllableend and syllablestart ~= syllableend then
+ head, current = dev2_reorder(head,syllablestart,syllableend,font,attr)
+ end
+ if not syllableend and current.id == glyph_code and current.subtype<256 and current.font == font and not current[a_state] then
+ local mark = mark_four[current.char]
+ if mark then
+ head, current = inject_syntax_error(head,current,mark)
+ end
+ end
+ start = false
+ current = current.next
+ end
+
+ return head, done
+end
diff --git a/Master/texmf-dist/tex/context/base/font-ota.lua b/Master/texmf-dist/tex/context/base/font-ota.lua
index edf5996b3df..79fcf3fa200 100644
--- a/Master/texmf-dist/tex/context/base/font-ota.lua
+++ b/Master/texmf-dist/tex/context/base/font-ota.lua
@@ -8,11 +8,11 @@ if not modules then modules = { } end modules ['font-ota'] = {
-- this might become scrp-*.lua
-local type, tostring, match, format, concat = type, tostring, string.match, string.format, table.concat
+local type = type
if not trackers then trackers = { register = function() end } end
-local trace_analyzing = false trackers.register("otf.analyzing", function(v) trace_analyzing = v end)
+----- trace_analyzing = false trackers.register("otf.analyzing", function(v) trace_analyzing = v end)
local fonts, nodes, node = fonts, nodes, node
@@ -28,23 +28,19 @@ analyzers.initializers = initializers
analyzers.methods = methods
analyzers.useunicodemarks = false
+local a_state = attributes.private('state')
+
local nodecodes = nodes.nodecodes
local glyph_code = nodecodes.glyph
+local math_code = nodecodes.math
-local set_attribute = node.set_attribute
-local has_attribute = node.has_attribute
local traverse_id = node.traverse_id
local traverse_node_list = node.traverse
+local end_of_math = node.end_of_math
local fontdata = fonts.hashes.identifiers
-local state = attributes.private('state')
local categories = characters and characters.categories or { } -- sorry, only in context
-local tracers = nodes.tracers
-local colortracers = tracers and tracers.colors
-local setnodecolor = colortracers and colortracers.set or function() end
-local resetnodecolor = colortracers and colortracers.reset or function() end
-
local otffeatures = fonts.constructors.newfeatures("otf")
local registerotffeature = otffeatures.register
@@ -53,59 +49,94 @@ local registerotffeature = otffeatures.register
process features right.</p>
--ldx]]--
+-- never use these numbers directly
+
+local s_init = 1 local s_rphf = 7
+local s_medi = 2 local s_half = 8
+local s_fina = 3 local s_pref = 9
+local s_isol = 4 local s_blwf = 10
+local s_mark = 5 local s_pstf = 11
+local s_rest = 6
+
+local states = {
+ init = s_init,
+ medi = s_medi,
+ fina = s_fina,
+ isol = s_isol,
+ mark = s_mark,
+ rest = s_rest,
+ rphf = s_rphf,
+ half = s_half,
+ pref = s_pref,
+ blwf = s_blwf,
+ pstf = s_pstf,
+}
+
+local features = {
+ init = s_init,
+ medi = s_medi,
+ fina = s_fina,
+ isol = s_isol,
+ -- mark = s_mark,
+}
+
+analyzers.states = states
+analyzers.features = features
+
-- todo: analyzers per script/lang, cross font, so we need an font id hash -> script
-- e.g. latin -> hyphenate, arab -> 1/2/3 analyze -- its own namespace
-local state = attributes.private('state')
-
function analyzers.setstate(head,font)
local useunicodemarks = analyzers.useunicodemarks
local tfmdata = fontdata[font]
- local characters = tfmdata.characters
local descriptions = tfmdata.descriptions
local first, last, current, n, done = nil, nil, head, 0, false -- maybe make n boolean
while current do
local id = current.id
if id == glyph_code and current.font == font then
+ done = true
local char = current.char
local d = descriptions[char]
if d then
if d.class == "mark" or (useunicodemarks and categories[char] == "mn") then
done = true
- set_attribute(current,state,5) -- mark
+ current[a_state] = s_mark
elseif n == 0 then
first, last, n = current, current, 1
- set_attribute(current,state,1) -- init
+ current[a_state] = s_init
else
last, n = current, n+1
- set_attribute(current,state,2) -- medi
+ current[a_state] = s_medi
end
else -- finish
if first and first == last then
- set_attribute(last,state,4) -- isol
+ last[a_state] = s_isol
elseif last then
- set_attribute(last,state,3) -- fina
+ last[a_state] = s_fina
end
first, last, n = nil, nil, 0
end
elseif id == disc_code then
-- always in the middle
- set_attribute(current,state,2) -- midi
+ current[a_state] = s_midi
last = current
else -- finish
if first and first == last then
- set_attribute(last,state,4) -- isol
+ last[a_state] = s_isol
elseif last then
- set_attribute(last,state,3) -- fina
+ last[a_state] = s_fina
end
first, last, n = nil, nil, 0
+ if id == math_code then
+ current = end_of_math(current)
+ end
end
current = current.next
end
if first and first == last then
- set_attribute(last,state,4) -- isol
+ last[a_state] = s_isol
elseif last then
- set_attribute(last,state,3) -- fina
+ last[a_state] = s_fina
end
return head, done
end
@@ -117,14 +148,14 @@ end
local function analyzeinitializer(tfmdata,value) -- attr
local script, language = otf.scriptandlanguage(tfmdata) -- attr
local action = initializers[script]
- if action then
- if type(action) == "function" then
+ if not action then
+ -- skip
+ elseif type(action) == "function" then
+ return action(tfmdata,value)
+ else
+ local action = action[language]
+ if action then
return action(tfmdata,value)
- else
- local action = action[language]
- if action then
- return action(tfmdata,value)
- end
end
end
end
@@ -133,14 +164,14 @@ local function analyzeprocessor(head,font,attr)
local tfmdata = fontdata[font]
local script, language = otf.scriptandlanguage(tfmdata,attr)
local action = methods[script]
- if action then
- if type(action) == "function" then
+ if not action then
+ -- skip
+ elseif type(action) == "function" then
+ return action(head,font,attr)
+ else
+ action = action[language]
+ if action then
return action(head,font,attr)
- else
- action = action[language]
- if action then
- return action(head,font,attr)
- end
end
end
return head, false
@@ -163,19 +194,36 @@ registerotffeature {
methods.latn = analyzers.setstate
--- this info eventually will go into char-def and we will have a state
--- table for generic then
-
-local zwnj = 0x200C
-local zwj = 0x200D
-
-local isol = {
+-- This info eventually can go into char-def and we will have a state
+-- table for generic then (unicode recognized all states but in practice
+-- only has only
+--
+-- isolated : isol
+-- final : isol_fina
+-- medial : isol_fina_medi_init
+--
+-- so in practice, without analyzer it's rather useless info which is
+-- why having it in char-def makes only sense for special purposes (like)
+-- like tracing cq. visualizing.
+
+local tatweel = 0x0640
+local zwnj = 0x200C
+local zwj = 0x200D
+
+local isolated = { -- isol
[0x0600] = true, [0x0601] = true, [0x0602] = true, [0x0603] = true,
+ [0x0604] = true,
[0x0608] = true, [0x060B] = true, [0x0621] = true, [0x0674] = true,
- [0x06DD] = true, [zwnj] = true,
+ [0x06DD] = true,
+ -- mandaic
+ [0x0856] = true, [0x0858] = true, [0x0857] = true,
+ -- n'ko
+ [0x07FA] = true,
+ -- also here:
+ [zwnj] = true,
}
-local isol_fina = {
+local final = { -- isol_fina
[0x0622] = true, [0x0623] = true, [0x0624] = true, [0x0625] = true,
[0x0627] = true, [0x0629] = true, [0x062F] = true, [0x0630] = true,
[0x0631] = true, [0x0632] = true, [0x0648] = true, [0x0671] = true,
@@ -191,23 +239,26 @@ local isol_fina = {
[0x06D3] = true, [0x06D5] = true, [0x06EE] = true, [0x06EF] = true,
[0x0759] = true, [0x075A] = true, [0x075B] = true, [0x076B] = true,
[0x076C] = true, [0x0771] = true, [0x0773] = true, [0x0774] = true,
- [0x0778] = true, [0x0779] = true, [0xFEF5] = true, [0xFEF7] = true,
- [0xFEF9] = true, [0xFEFB] = true,
-
+ [0x0778] = true, [0x0779] = true,
+ [0x08AA] = true, [0x08AB] = true, [0x08AC] = true,
+ [0xFEF5] = true, [0xFEF7] = true, [0xFEF9] = true, [0xFEFB] = true,
-- syriac
-
[0x0710] = true, [0x0715] = true, [0x0716] = true, [0x0717] = true,
[0x0718] = true, [0x0719] = true, [0x0728] = true, [0x072A] = true,
[0x072C] = true, [0x071E] = true,
+ [0x072F] = true, [0x074D] = true,
+ -- mandaic
+ [0x0840] = true, [0x0849] = true, [0x0854] = true, [0x0846] = true,
+ [0x084F] = true
}
-local isol_fina_medi_init = {
+local medial = { -- isol_fina_medi_init
[0x0626] = true, [0x0628] = true, [0x062A] = true, [0x062B] = true,
[0x062C] = true, [0x062D] = true, [0x062E] = true, [0x0633] = true,
[0x0634] = true, [0x0635] = true, [0x0636] = true, [0x0637] = true,
[0x0638] = true, [0x0639] = true, [0x063A] = true, [0x063B] = true,
[0x063C] = true, [0x063D] = true, [0x063E] = true, [0x063F] = true,
- [0x0640] = true, [0x0641] = true, [0x0642] = true, [0x0643] = true,
+ [0x0641] = true, [0x0642] = true, [0x0643] = true,
[0x0644] = true, [0x0645] = true, [0x0646] = true, [0x0647] = true,
[0x0649] = true, [0x064A] = true, [0x066E] = true, [0x066F] = true,
[0x0678] = true, [0x0679] = true, [0x067A] = true, [0x067B] = true,
@@ -236,141 +287,147 @@ local isol_fina_medi_init = {
[0x0772] = true, [0x0775] = true, [0x0776] = true, [0x0777] = true,
[0x077A] = true, [0x077B] = true, [0x077C] = true, [0x077D] = true,
[0x077E] = true, [0x077F] = true,
-
+ [0x08A0] = true, [0x08A2] = true, [0x08A4] = true, [0x08A5] = true,
+ [0x08A6] = true, [0x0620] = true, [0x08A8] = true, [0x08A9] = true,
+ [0x08A7] = true, [0x08A3] = true,
-- syriac
-
- [0x0712] = true, [0x0713] = true, [0x0714] = true, [0x071A] = true,
- [0x071B] = true, [0x071C] = true, [0x071D] = true, [0x071F] = true,
- [0x0720] = true, [0x0721] = true, [0x0722] = true, [0x0723] = true,
- [0x0724] = true, [0x0725] = true, [0x0726] = true, [0x0727] = true,
- [0x0729] = true, [0x072B] = true,
-
- -- also
-
- [zwj] = true,
+ [0x0712] = true, [0x0713] = true, [0x0714] = true, [0x071A] = true,
+ [0x071B] = true, [0x071C] = true, [0x071D] = true, [0x071F] = true,
+ [0x0720] = true, [0x0721] = true, [0x0722] = true, [0x0723] = true,
+ [0x0724] = true, [0x0725] = true, [0x0726] = true, [0x0727] = true,
+ [0x0729] = true, [0x072B] = true, [0x072D] = true, [0x072E] = true,
+ [0x074E] = true, [0x074F] = true,
+ -- mandaic
+ [0x0841] = true, [0x0842] = true, [0x0843] = true, [0x0844] = true,
+ [0x0845] = true, [0x0847] = true, [0x0848] = true, [0x0855] = true,
+ [0x0851] = true, [0x084E] = true, [0x084D] = true, [0x084A] = true,
+ [0x084B] = true, [0x084C] = true, [0x0850] = true, [0x0852] = true,
+ [0x0853] = true,
+ -- n'ko
+ [0x07D7] = true, [0x07E8] = true, [0x07D9] = true, [0x07EA] = true,
+ [0x07CA] = true, [0x07DB] = true, [0x07CC] = true, [0x07DD] = true,
+ [0x07CE] = true, [0x07DF] = true, [0x07D4] = true, [0x07E5] = true,
+ [0x07E9] = true, [0x07E7] = true, [0x07E3] = true, [0x07E2] = true,
+ [0x07E0] = true, [0x07E1] = true, [0x07DE] = true, [0x07DC] = true,
+ [0x07D1] = true, [0x07DA] = true, [0x07D8] = true, [0x07D6] = true,
+ [0x07D2] = true, [0x07D0] = true, [0x07CF] = true, [0x07CD] = true,
+ [0x07CB] = true, [0x07D3] = true, [0x07E4] = true, [0x07D5] = true,
+ [0x07E6] = true,
+ -- also here:
+ [tatweel]= true,
+ [zwj] = true,
}
local arab_warned = { }
-
-- todo: gref
local function warning(current,what)
local char = current.char
if not arab_warned[char] then
- log.report("analyze","arab: character %s (U+%05X) has no %s class", char, char, what)
+ log.report("analyze","arab: character %C has no %a class",char,what)
arab_warned[char] = true
end
end
-function methods.nocolor(head,font,attr)
- for n in traverse_id(glyph_code,head) do
- if not font or n.font == font then
- resetnodecolor(n)
- end
- end
- return head, true
-end
+-- potential optimization: local medial_final = table.merged(medial,final)
local function finish(first,last)
if last then
if first == last then
local fc = first.char
- if isol_fina_medi_init[fc] or isol_fina[fc] then
- set_attribute(first,state,4) -- isol
- if trace_analyzing then setnodecolor(first,"font:isol") end
+ if medial[fc] or final[fc] then
+ first[a_state] = s_isol
else
warning(first,"isol")
- set_attribute(first,state,0) -- error
- if trace_analyzing then resetnodecolor(first) end
+ first[a_state] = s_error
end
else
local lc = last.char
- if isol_fina_medi_init[lc] or isol_fina[lc] then -- why isol here ?
- -- if laststate == 1 or laststate == 2 or laststate == 4 then
- set_attribute(last,state,3) -- fina
- if trace_analyzing then setnodecolor(last,"font:fina") end
+ if medial[lc] or final[lc] then
+ -- if laststate == 1 or laststate == 2 or laststate == 4 then
+ last[a_state] = s_fina
else
warning(last,"fina")
- set_attribute(last,state,0) -- error
- if trace_analyzing then resetnodecolor(last) end
+ last[a_state] = s_error
end
end
first, last = nil, nil
elseif first then
-- first and last are either both set so we never com here
local fc = first.char
- if isol_fina_medi_init[fc] or isol_fina[fc] then
- set_attribute(first,state,4) -- isol
- if trace_analyzing then setnodecolor(first,"font:isol") end
+ if medial[fc] or final[fc] then
+ first[a_state] = s_isol
else
warning(first,"isol")
- set_attribute(first,state,0) -- error
- if trace_analyzing then resetnodecolor(first) end
+ first[a_state] = s_error
end
first = nil
end
return first, last
end
-function methods.arab(head,font,attr) -- maybe make a special version with no trace
+function methods.arab(head,font,attr)
local useunicodemarks = analyzers.useunicodemarks
local tfmdata = fontdata[font]
local marks = tfmdata.resources.marks
local first, last, current, done = nil, nil, head, false
while current do
- if current.id == glyph_code and current.subtype<256 and current.font == font and not has_attribute(current,state) then
+ local id = current.id
+ if id == glyph_code and current.font == font and current.subtype<256 and not current[a_state] then
done = true
local char = current.char
if marks[char] or (useunicodemarks and categories[char] == "mn") then
- set_attribute(current,state,5) -- mark
- if trace_analyzing then setnodecolor(current,"font:mark") end
- elseif isol[char] then -- can be zwj or zwnj too
+ current[a_state] = s_mark
+ elseif isolated[char] then -- can be zwj or zwnj too
first, last = finish(first,last)
- set_attribute(current,state,4) -- isol
- if trace_analyzing then setnodecolor(current,"font:isol") end
+ current[a_state] = s_isol
first, last = nil, nil
elseif not first then
- if isol_fina_medi_init[char] then
- set_attribute(current,state,1) -- init
- if trace_analyzing then setnodecolor(current,"font:init") end
+ if medial[char] then
+ current[a_state] = s_init
first, last = first or current, current
- elseif isol_fina[char] then
- set_attribute(current,state,4) -- isol
- if trace_analyzing then setnodecolor(current,"font:isol") end
+ elseif final[char] then
+ current[a_state] = s_isol
first, last = nil, nil
else -- no arab
first, last = finish(first,last)
end
- elseif isol_fina_medi_init[char] then
+ elseif medial[char] then
first, last = first or current, current
- set_attribute(current,state,2) -- medi
- if trace_analyzing then setnodecolor(current,"font:medi") end
- elseif isol_fina[char] then
- if not has_attribute(last,state,1) then
+ current[a_state] = s_medi
+ elseif final[char] then
+ if not last[a_state] == s_init then
-- tricky, we need to check what last may be !
- set_attribute(last,state,2) -- medi
- if trace_analyzing then setnodecolor(last,"font:medi") end
+ last[a_state] = s_medi
end
- set_attribute(current,state,3) -- fina
- if trace_analyzing then setnodecolor(current,"font:fina") end
+ current[a_state] = s_fina
first, last = nil, nil
- elseif char >= 0x0600 and char <= 0x06FF then
- if trace_analyzing then setnodecolor(current,"font:rest") end
+ elseif char >= 0x0600 and char <= 0x06FF then -- needs checking
+ current[a_state] = s_rest
first, last = finish(first,last)
- else --no
+ else -- no
first, last = finish(first,last)
end
else
- first, last = finish(first,last)
+ if first or last then
+ first, last = finish(first,last)
+ end
+ if id == math_code then
+ current = end_of_math(current)
+ end
end
current = current.next
end
- first, last = finish(first,last)
+ if first or last then
+ finish(first,last)
+ end
return head, done
end
methods.syrc = methods.arab
+methods.mand = methods.arab
+methods.nko = methods.arab
directives.register("otf.analyze.useunicodemarks",function(v)
analyzers.useunicodemarks = v
diff --git a/Master/texmf-dist/tex/context/base/font-otb.lua b/Master/texmf-dist/tex/context/base/font-otb.lua
index 44639a80aa1..6abe7c6f4e1 100644
--- a/Master/texmf-dist/tex/context/base/font-otb.lua
+++ b/Master/texmf-dist/tex/context/base/font-otb.lua
@@ -11,70 +11,94 @@ local type, next, tonumber, tostring = type, next, tonumber, tostring
local lpegmatch = lpeg.match
local utfchar = utf.char
-local trace_baseinit = false trackers.register("otf.baseinit", function(v) trace_baseinit = v end)
-local trace_singles = false trackers.register("otf.singles", function(v) trace_singles = v end)
-local trace_multiples = false trackers.register("otf.multiples", function(v) trace_multiples = v end)
-local trace_alternatives = false trackers.register("otf.alternatives", function(v) trace_alternatives = v end)
-local trace_ligatures = false trackers.register("otf.ligatures", function(v) trace_ligatures = v end)
-local trace_kerns = false trackers.register("otf.kerns", function(v) trace_kerns = v end)
-local trace_preparing = false trackers.register("otf.preparing", function(v) trace_preparing = v end)
+local trace_baseinit = false trackers.register("otf.baseinit", function(v) trace_baseinit = v end)
+local trace_singles = false trackers.register("otf.singles", function(v) trace_singles = v end)
+local trace_multiples = false trackers.register("otf.multiples", function(v) trace_multiples = v end)
+local trace_alternatives = false trackers.register("otf.alternatives", function(v) trace_alternatives = v end)
+local trace_ligatures = false trackers.register("otf.ligatures", function(v) trace_ligatures = v end)
+local trace_ligatures_detail = false trackers.register("otf.ligatures.detail", function(v) trace_ligatures_detail = v end)
+local trace_kerns = false trackers.register("otf.kerns", function(v) trace_kerns = v end)
+local trace_preparing = false trackers.register("otf.preparing", function(v) trace_preparing = v end)
-local report_prepare = logs.reporter("fonts","otf prepare")
+local report_prepare = logs.reporter("fonts","otf prepare")
-local fonts = fonts
-local otf = fonts.handlers.otf
+local fonts = fonts
+local otf = fonts.handlers.otf
-local otffeatures = fonts.constructors.newfeatures("otf")
-local registerotffeature = otffeatures.register
+local otffeatures = otf.features
+local registerotffeature = otffeatures.register
-otf.defaultbasealternate = "none" -- first last
+otf.defaultbasealternate = "none" -- first last
-local wildcard = "*"
-local default = "dflt"
+local wildcard = "*"
+local default = "dflt"
+
+local formatters = string.formatters
+local f_unicode = formatters["%U"]
+local f_uniname = formatters["%U (%s)"]
+local f_unilist = formatters["% t (% t)"]
local function gref(descriptions,n)
if type(n) == "number" then
local name = descriptions[n].name
if name then
- return format("U+%05X (%s)",n,name)
+ return f_uniname(n,name)
else
- return format("U+%05X")
+ return f_unicode(n)
end
elseif n then
local num, nam = { }, { }
- for i=2,#n do -- first is likely a key
+ for i=2,#n do
local ni = n[i]
- num[i] = format("U+%05X",ni)
- nam[i] = descriptions[ni].name or "?"
+ if tonumber(ni) then -- first is likely a key
+ local di = descriptions[ni]
+ num[i] = f_unicode(ni)
+ nam[i] = di and di.name or "-"
+ end
end
- return format("%s (%s)",concat(num," "), concat(nam," "))
+ return f_unilist(num,nam)
else
- return "?"
+ return "<error in base mode tracing>"
end
end
local function cref(feature,lookupname)
if lookupname then
- return format("feature %s, lookup %s",feature,lookupname)
+ return formatters["feature %a, lookup %a"](feature,lookupname)
else
- return format("feature %s",feature)
+ return formatters["feature %a"](feature)
end
end
local function report_alternate(feature,lookupname,descriptions,unicode,replacement,value,comment)
- report_prepare("%s: base alternate %s => %s (%s => %s)",cref(feature,lookupname),
- gref(descriptions,unicode),replacement and gref(descriptions,replacement) or "-",
- tostring(value),comment)
+ report_prepare("%s: base alternate %s => %s (%S => %S)",
+ cref(feature,lookupname),
+ gref(descriptions,unicode),
+ replacement and gref(descriptions,replacement),
+ value,
+ comment)
end
local function report_substitution(feature,lookupname,descriptions,unicode,substitution)
- report_prepare("%s: base substitution %s => %s",cref(feature,lookupname),
- gref(descriptions,unicode),gref(descriptions,substitution))
+ report_prepare("%s: base substitution %s => %S",
+ cref(feature,lookupname),
+ gref(descriptions,unicode),
+ gref(descriptions,substitution))
end
local function report_ligature(feature,lookupname,descriptions,unicode,ligature)
- report_prepare("%s: base ligature %s => %s",cref(feature,lookupname),
- gref(descriptions,ligature),gref(descriptions,unicode))
+ report_prepare("%s: base ligature %s => %S",
+ cref(feature,lookupname),
+ gref(descriptions,ligature),
+ gref(descriptions,unicode))
+end
+
+local function report_kern(feature,lookupname,descriptions,unicode,otherunicode,value)
+ report_prepare("%s: base kern %s + %s => %S",
+ cref(feature,lookupname),
+ gref(descriptions,unicode),
+ gref(descriptions,otherunicode),
+ value)
end
local basemethods = { }
@@ -107,7 +131,7 @@ local function registerbasehash(tfmdata)
end
properties.basehash = base
properties.fullname = properties.fullname .. "-" .. base
- -- report_prepare("fullname base hash: '%s', featureset '%s'",tfmdata.properties.fullname,hash)
+ -- report_prepare("fullname base hash '%a, featureset %a",tfmdata.properties.fullname,hash)
applied = { }
end
@@ -165,7 +189,7 @@ local function finalize_ligatures(tfmdata,ligatures)
if ligature then
local unicode, lookupdata = ligature[1], ligature[2]
if trace then
- print("BUILDING",concat(lookupdata," "),unicode)
+ trace_ligatures_detail("building % a into %a",lookupdata,unicode)
end
local size = #lookupdata
local firstcode = lookupdata[1] -- [2]
@@ -178,7 +202,7 @@ local function finalize_ligatures(tfmdata,ligatures)
if not firstdata then
firstcode = private
if trace then
- print(" DEFINING",firstname,firstcode)
+ trace_ligatures_detail("defining %a as %a",firstname,firstcode)
end
unicodes[firstname] = firstcode
firstdata = { intermediate = true, ligatures = { } }
@@ -202,7 +226,7 @@ local function finalize_ligatures(tfmdata,ligatures)
end
end
if trace then
- print("CODES",firstname,firstcode,secondname,secondcode,target)
+ trace_ligatures_detail("codes (%a,%a) + (%a,%a) -> %a",firstname,firstcode,secondname,secondcode,target)
end
local firstligs = firstdata.ligatures
if firstligs then
@@ -354,15 +378,13 @@ local function preparepositionings(tfmdata,feature,value,validlookups,lookuplist
newkerns = { [otherunicode] = value }
done = true
if traceindeed then
- report_prepare("%s: base kern %s + %s => %s",cref(feature,lookup),
- gref(descriptions,unicode),gref(descriptions,otherunicode),value)
+ report_kern(feature,lookup,descriptions,unicode,otherunicode,value)
end
elseif not newkerns[otherunicode] then -- first wins
newkerns[otherunicode] = value
done = true
if traceindeed then
- report_prepare("%s: base kern %s + %s => %s",cref(feature,lookup),
- gref(descriptions,unicode),gref(descriptions,otherunicode),value)
+ report_kern(feature,lookup,descriptions,unicode,otherunicode,value)
end
end
end
@@ -412,7 +434,7 @@ local function make_2(present,tfmdata,characters,tree,name,preceding,unicode,don
local character = characters[preceding]
if not character then
if trace_baseinit then
- report_prepare("weird ligature in lookup %s: U+%05X (%s), preceding U+%05X (%s)",lookupname,v,utfchar(v),preceding,utfchar(preceding))
+ report_prepare("weird ligature in lookup %a, current %C, preceding %C",lookupname,v,preceding)
end
character = makefake(tfmdata,name,present)
end
@@ -543,8 +565,7 @@ local function preparepositionings(tfmdata,feature,value,validlookups,lookuplist
for otherunicode, kern in next, data do
if not kerns[otherunicode] and kern ~= 0 then
kerns[otherunicode] = kern
- report_prepare("%s: base kern %s + %s => %s",cref(feature,lookup),
- gref(descriptions,unicode),gref(descriptions,otherunicode),kern)
+ report_kern(feature,lookup,descriptions,unicode,otherunicode,kern)
end
end
else
@@ -611,7 +632,7 @@ local function featuresinitializer(tfmdata,value)
registerbasehash(tfmdata)
end
if trace_preparing then
- report_prepare("preparation time is %0.3f seconds for %s",os.clock()-t,tfmdata.properties.fullname or "?")
+ report_prepare("preparation time is %0.3f seconds for %a",os.clock()-t,tfmdata.properties.fullname)
end
end
end
diff --git a/Master/texmf-dist/tex/context/base/font-otc.lua b/Master/texmf-dist/tex/context/base/font-otc.lua
index ae463e750a2..a87dcadf88d 100644
--- a/Master/texmf-dist/tex/context/base/font-otc.lua
+++ b/Master/texmf-dist/tex/context/base/font-otc.lua
@@ -17,8 +17,7 @@ local report_otf = logs.reporter("fonts","otf loading")
local fonts = fonts
local otf = fonts.handlers.otf
-local otffeatures = fonts.constructors.newfeatures("otf")
-local registerotffeature = otffeatures.register
+local registerotffeature = otf.features.register
local setmetatableindex = table.setmetatableindex
-- In the userdata interface we can not longer tweak the loaded font as
@@ -166,7 +165,7 @@ local function addfeature(data,feature,specifications)
end
end
if trace_loading then
- report_otf("enhance: registering feature '%s', %s glyphs affected, %s glyphs skipped",feature,done,skip)
+ report_otf("registering feature %a, affected glyphs %a, skipped glyphs %a",feature,done,skip)
end
end
end
@@ -312,14 +311,14 @@ end
local anum_specification = {
{
type = "substitution",
- features = { arab = { URD = true, dflt = true } },
+ features = { arab = { urd = true, dflt = true } },
data = anum_arabic,
flags = noflags, -- { },
valid = valid,
},
{
type = "substitution",
- features = { arab = { URD = true } },
+ features = { arab = { urd = true } },
data = anum_persian,
flags = noflags, -- { },
valid = valid,
diff --git a/Master/texmf-dist/tex/context/base/font-otd.lua b/Master/texmf-dist/tex/context/base/font-otd.lua
index ebd4879599d..a9d093d6d35 100644
--- a/Master/texmf-dist/tex/context/base/font-otd.lua
+++ b/Master/texmf-dist/tex/context/base/font-otd.lua
@@ -6,6 +6,7 @@ if not modules then modules = { } end modules ['font-otd'] = {
license = "see context related readme files"
}
+local type = type
local match = string.match
local sequenced = table.sequenced
@@ -15,6 +16,8 @@ local trace_applied = false trackers.register("otf.applied", function(v)
local report_otf = logs.reporter("fonts","otf loading")
local report_process = logs.reporter("fonts","otf process")
+local allocate = utilities.storage.allocate
+
local fonts = fonts
local otf = fonts.handlers.otf
local hashes = fonts.hashes
@@ -22,8 +25,10 @@ local definers = fonts.definers
local constructors = fonts.constructors
local specifiers = fonts.specifiers
-local fontdata = hashes.identifiers
------ fontresources = hashes.resources -- not yet defined
+local fontidentifiers = hashes.identifiers
+local fontresources = hashes.resources
+local fontproperties = hashes.properties
+local fontdynamics = hashes.dynamics
local contextsetups = specifiers.contextsetups
local contextnumbers = specifiers.contextnumbers
@@ -34,27 +39,28 @@ local setmetatableindex = table.setmetatableindex
local otffeatures = fonts.constructors.newfeatures("otf")
local registerotffeature = otffeatures.register
-local fontdynamics = { }
-hashes.dynamics = fontdynamics
-
local a_to_script = { }
local a_to_language = { }
-setmetatableindex(fontdynamics, function(t,font)
- local d = fontdata[font].shared.dynamics or false
- t[font] = d
- return d
-end)
+-- we can have a scripts hash in fonts.hashes
function otf.setdynamics(font,attribute)
- local features = contextsetups[contextnumbers[attribute]] -- can be moved to caller
+ -- local features = contextsetups[contextnumbers[attribute]] -- can be moved to caller
+ local features = contextsetups[attribute]
if features then
local dynamics = fontdynamics[font]
- local script = features.script or 'dflt'
- local language = features.language or 'dflt'
+ dynamic = contextmerged[attribute] or 0
+ local script, language
+ if dynamic == 2 then -- merge
+ language = features.language or fontproperties[font].language or "dflt"
+ script = features.script or fontproperties[font].script or "dflt"
+ else -- if dynamic == 1 then -- replace
+ language = features.language or "dflt"
+ script = features.script or "dflt"
+ end
if script == "auto" then
- -- checkedscript and resources are defined later so we cannot shortcut them
- script = definers.checkedscript(fontdata[font],hashes.resources[font],features)
+ -- checkedscript and resources are defined later so we cannot shortcut them -- todo: make installer
+ script = definers.checkedscript(fontidentifiers[font],fontresources[font],features)
end
local ds = dynamics[script] -- can be metatable magic (less testing)
if not ds then
@@ -68,10 +74,10 @@ function otf.setdynamics(font,attribute)
end
local dsla = dsl[attribute]
if not dsla then
- local tfmdata = fontdata[font]
+ local tfmdata = fontidentifiers[font]
a_to_script [attribute] = script
a_to_language[attribute] = language
- -- we need to save some values
+ -- we need to save some values .. quite messy
local properties = tfmdata.properties
local shared = tfmdata.shared
local s_script = properties.script
@@ -88,7 +94,7 @@ function otf.setdynamics(font,attribute)
set.mode = "node" -- really needed
dsla = otf.setfeatures(tfmdata,set)
if trace_dynamics then
- report_otf("setting dynamics %s: attribute %s, script %s, language %s, set: %s",contextnumbers[attribute],attribute,script,language,sequenced(set))
+ report_otf("setting dynamics %s: attribute %a, script %a, language %a, set %a",contextnumbers[attribute],attribute,script,language,set)
end
-- we need to restore some values
properties.script = s_script
@@ -98,7 +104,7 @@ function otf.setdynamics(font,attribute)
-- end of restore
dynamics[script][language][attribute] = dsla -- cache
elseif trace_dynamics then
- -- report_otf("using dynamics %s: attribute %s, script %s, language %s",contextnumbers[attribute],attribute,script,language)
+ -- report_otf("using dynamics %s: attribute %a, script %a, language %a",contextnumbers[attribute],attribute,script,language)
end
return dsla
end
@@ -115,51 +121,58 @@ end
-- we reimplement the dataset resolver
-local special_attributes = {
- init = 1,
- medi = 2,
- fina = 3,
- isol = 4
-}
+local autofeatures = fonts.analyzers.features -- was: constants
+
+local resolved = { } -- we only resolve a font,script,language,attribute pair once
+local wildcard = "*"
+local default = "dflt"
-local resolved = { } -- we only resolve a font,script,language,attribute pair once
-local wildcard = "*"
-local default = "dflt"
+-- what about analyze in local and not in font
local function initialize(sequence,script,language,s_enabled,a_enabled,font,attr,dynamic)
local features = sequence.features
if features then
for kind, scripts in next, features do
- local s_e = s_enabled and s_enabled[kind] -- the value
- local a_e = a_enabled and a_enabled[kind] -- the value
- local e_e = s_e or a_e -- todo: when one of them is true and the other is a value
+ local e_e
+ local a_e = a_enabled and a_enabled[kind] -- the value (location)
+ if a_e ~= nil then
+ e_e = a_e
+ else
+ e_e = s_enabled and s_enabled[kind] -- the value (font)
+ end
if e_e then
local languages = scripts[script] or scripts[wildcard]
if languages then
- local valid, what = false
+ -- local valid, what = false
+ local valid = false
-- not languages[language] or languages[default] or languages[wildcard] because we want tracing
-- only first attribute match check, so we assume simple fina's
-- default can become a font feature itself
if languages[language] then
valid = e_e -- was true
- what = language
+ -- what = language
-- elseif languages[default] then
-- valid = true
-- what = default
elseif languages[wildcard] then
valid = e_e -- was true
- what = wildcard
+ -- what = wildcard
end
if valid then
- local attribute = special_attributes[kind] or false
- if a_e and dynamic < 0 then
- valid = false
- end
+ local attribute = autofeatures[kind] or false
+ -- if a_e and dynamic < 0 then
+ -- valid = false
+ -- end
+ -- if trace_applied then
+ -- local typ, action = match(sequence.type,"(.*)_(.*)") -- brrr
+ -- report_process(
+ -- "%s font: %03i, dynamic: %03i, kind: %s, script: %-4s, language: %-4s (%-4s), type: %s, action: %s, name: %s",
+ -- (valid and "+") or "-",font,attr or 0,kind,script,language,what,typ,action,sequence.name)
+ -- end
if trace_applied then
- local typ, action = match(sequence.type,"(.*)_(.*)") -- brrr
report_process(
- "%s font: %03i, dynamic: %03i, kind: %s, script: %-4s, language: %-4s (%-4s), type: %s, action: %s, name: %s",
- (valid and "+") or "-",font,attr or 0,kind,script,language,what,typ,action,sequence.name)
+ "font %s, dynamic %a (%a), feature %a, script %a, language %a, lookup %a, value %a",
+ font,attr or 0,dynamic,kind,script,language,sequence.name,valid)
end
return { valid, attribute, sequence.chain or 0, kind, sequence }
end
@@ -172,28 +185,32 @@ local function initialize(sequence,script,language,s_enabled,a_enabled,font,attr
end
end
--- local contextresolved = { }
---
--- setmetatableindex(contextresolved, function(t,k)
--- local v = contextsetups[contextnumbers[k]]
--- t[k] = v
--- return v
--- end)
+-- there is some fuzzy language/script state stuff in properties (temporary)
-function otf.dataset(tfmdata,sequences,font,attr) -- attr only when explicit (as in special parbuilder)
+function otf.dataset(tfmdata,font,attr) -- attr only when explicit (as in special parbuilder)
local script, language, s_enabled, a_enabled, dynamic
if attr and attr ~= 0 then
- local features = contextsetups[contextnumbers[attr]] -- could be a direct list
- -- local features = contextresolved[attr]
- language = features.language or "dflt"
- script = features.script or "dflt"
- a_enabled = features
- dynamic = contextmerged[attr] or 0
- if dynamic == 2 or dynamic == -2 then
- -- font based
- s_enabled = tfmdata.shared.features
+ dynamic = contextmerged[attr] or 0
+ -- local features = contextsetups[contextnumbers[attr]] -- could be a direct list
+ local features = contextsetups[attr]
+ a_enabled = features -- location based
+ if dynamic == 1 then -- or dynamic == -1 then
+ -- replace
+ language = features.language or "dflt"
+ script = features.script or "dflt"
+ elseif dynamic == 2 then -- or dynamic == -2 then
+ -- merge
+ local properties = tfmdata.properties
+ s_enabled = tfmdata.shared.features -- font based
+ language = features.language or properties.language or "dflt"
+ script = features.script or properties.script or "dflt"
+ else
+ -- error
+ local properties = tfmdata.properties
+ language = properties.language or "dflt"
+ script = properties.script or "dflt"
end
else
local properties = tfmdata.properties
@@ -220,15 +237,25 @@ function otf.dataset(tfmdata,sequences,font,attr) -- attr only when explicit (as
end
local ra = rl[attr]
if ra == nil then -- attr can be false
- ra = { }
+ ra = {
+ -- indexed but we can also add specific data by key in:
+ }
rl[attr] = ra
- setmetatableindex(ra, function(t,k)
- local v = initialize(sequences[k],script,language,s_enabled,a_enabled,font,attr,dynamic)
- t[k] = v or false
- return v
- end)
+ local sequences = tfmdata.resources.sequences
+-- setmetatableindex(ra, function(t,k)
+-- if type(k) == "number" then
+-- local v = initialize(sequences[k],script,language,s_enabled,a_enabled,font,attr,dynamic)
+-- t[k] = v or false
+-- return v
+-- end
+-- end)
+for s=1,#sequences do
+ local v = initialize(sequences[s],script,language,s_enabled,a_enabled,font,attr,dynamic)
+ if v then
+ ra[#ra+1] = v
+ end
+end
end
-
return ra
end
diff --git a/Master/texmf-dist/tex/context/base/font-otf.lua b/Master/texmf-dist/tex/context/base/font-otf.lua
index e1339aec9a9..b8abd590a3e 100644
--- a/Master/texmf-dist/tex/context/base/font-otf.lua
+++ b/Master/texmf-dist/tex/context/base/font-otf.lua
@@ -14,8 +14,6 @@ if not modules then modules = { } end modules ['font-otf'] = {
-- more checking against low level calls of functions
-local utf = unicode.utf8
-
local utfbyte = utf.byte
local format, gmatch, gsub, find, match, lower, strip = string.format, string.gmatch, string.gsub, string.find, string.match, string.lower, string.strip
local type, next, tonumber, tostring = type, next, tonumber, tostring
@@ -49,7 +47,7 @@ local otf = fonts.handlers.otf
otf.glists = { "gsub", "gpos" }
-otf.version = 2.737 -- beware: also sync font-mis.lua
+otf.version = 2.741 -- beware: also sync font-mis.lua
otf.cache = containers.define("fonts", "otf", otf.version, true)
local fontdata = fonts.hashes.identifiers
@@ -91,7 +89,7 @@ registerdirective("fonts.otf.loader.forcenotdef", function(v) forcenotdef =
local function load_featurefile(raw,featurefile)
if featurefile and featurefile ~= "" then
if trace_loading then
- report_otf("featurefile: %s", featurefile)
+ report_otf("using featurefile %a", featurefile)
end
fontloader.apply_featurefile(raw, featurefile)
end
@@ -101,7 +99,7 @@ local function showfeatureorder(rawdata,filename)
local sequences = rawdata.resources.sequences
if sequences and #sequences > 0 then
if trace_loading then
- report_otf("font %s has %s sequences",filename,#sequences)
+ report_otf("font %a has %s sequences",filename,#sequences)
report_otf(" ")
end
for nos=1,#sequences do
@@ -111,7 +109,7 @@ local function showfeatureorder(rawdata,filename)
local subtables = sequence.subtables or { "no-subtables" }
local features = sequence.features
if trace_loading then
- report_otf("%3i %-15s %-20s [%s]",nos,name,typ,concat(subtables,","))
+ report_otf("%3i %-15s %-20s [% t]",nos,name,typ,subtables)
end
if features then
for feature, scripts in next, features do
@@ -122,14 +120,14 @@ local function showfeatureorder(rawdata,filename)
for language, _ in next, languages do
ttt[#ttt+1] = language
end
- tt[#tt+1] = format("[%s: %s]",script,concat(ttt," "))
+ tt[#tt+1] = formatters["[%s: % t]"](script,ttt)
end
if trace_loading then
- report_otf(" %s: %s",feature,concat(tt," "))
+ report_otf(" %s: % t",feature,tt)
end
else
if trace_loading then
- report_otf(" %s: %s",feature,tostring(scripts))
+ report_otf(" %s: %S",feature,scripts)
end
end
end
@@ -139,7 +137,7 @@ local function showfeatureorder(rawdata,filename)
report_otf("\n")
end
elseif trace_loading then
- report_otf("font %s has no sequences",filename)
+ report_otf("font %a has no sequences",filename)
end
end
@@ -254,19 +252,19 @@ local function enhance(name,data,filename,raw)
local enhancer = actions[name]
if enhancer then
if trace_loading then
- report_otf("enhance: %s (%s)",name,filename)
+ report_otf("apply enhancement %a to file %a",name,filename)
ioflush()
end
enhancer(data,filename,raw)
- elseif trace_loading then
- -- report_otf("enhance: %s is undefined",name)
+ else
+ -- no message as we can have private ones
end
end
function enhancers.apply(data,filename,raw)
local basename = file.basename(lower(filename))
if trace_loading then
- report_otf("start enhancing: %s",filename)
+ report_otf("%s enhancing file %a","start",filename)
end
ioflush() -- we want instant messages
for e=1,#ordered_enhancers do
@@ -291,7 +289,7 @@ function enhancers.apply(data,filename,raw)
ioflush() -- we want instant messages
end
if trace_loading then
- report_otf("stop enhancing")
+ report_otf("%s enhancing file %a","stop",filename)
end
ioflush() -- we want instant messages
end
@@ -312,7 +310,7 @@ end
function patches.report(fmt,...)
if trace_loading then
- report_otf("patching: " ..fmt,...)
+ report_otf("patching: %s",formatters[fmt](...))
end
end
@@ -321,7 +319,8 @@ function enhancers.register(what,action) -- only already registered can be overl
end
function otf.load(filename,format,sub,featurefile)
- local name = file.basename(file.removesuffix(filename))
+ local base = file.basename(file.removesuffix(filename))
+ local name = file.removesuffix(base)
local attr = lfs.attributes(filename)
local size = attr and attr.size or 0
local time = attr and attr.modification or 0
@@ -342,7 +341,7 @@ function otf.load(filename,format,sub,featurefile)
for s in gmatch(featurefile,"[^,]+") do
local name = resolvers.findfile(file.addsuffix(s,'fea'),'fea') or ""
if name == "" then
- report_otf("loading: no featurefile '%s'",s)
+ report_otf("loading error, no featurefile %a",s)
else
local attr = lfs.attributes(name)
featurefiles[#featurefiles+1] = {
@@ -359,7 +358,7 @@ function otf.load(filename,format,sub,featurefile)
local data = containers.read(otf.cache,hash)
local reload = not data or data.size ~= size or data.time ~= time
if forceload then
- report_otf("loading: forced reload due to hard coded flag")
+ report_otf("forced reload of %a due to hard coded flag",filename)
reload = true
end
if not reload then
@@ -380,11 +379,11 @@ function otf.load(filename,format,sub,featurefile)
reload = true
end
if reload then
- report_otf("loading: forced reload due to changed featurefile specification: %s",featurefile or "--")
+ report_otf("loading: forced reload due to changed featurefile specification %a",featurefile)
end
end
if reload then
- report_otf("loading: %s (hash: %s)",filename,hash)
+ report_otf("loading %a, hash %a",filename,hash)
local fontdata, messages
if sub then
fontdata, messages = fontloader.open(filename,sub)
@@ -399,11 +398,11 @@ function otf.load(filename,format,sub,featurefile)
report_otf("warning: %s",messages)
else
for m=1,#messages do
- report_otf("warning: %s",tostring(messages[m]))
+ report_otf("warning: %S",messages[m])
end
end
else
- report_otf("font loaded okay")
+ report_otf("loading done")
end
if fontdata then
if featurefiles then
@@ -455,41 +454,41 @@ function otf.load(filename,format,sub,featurefile)
starttiming(data)
report_otf("file size: %s", size)
enhancers.apply(data,filename,fontdata)
+ local packtime = { }
if packdata then
if cleanup > 0 then
collectgarbage("collect")
---~ lua.collectgarbage()
end
+ starttiming(packtime)
enhance("pack",data,filename,nil)
+ stoptiming(packtime)
end
- report_otf("saving in cache: %s",filename)
+ report_otf("saving %a in cache",filename)
data = containers.write(otf.cache, hash, data)
if cleanup > 1 then
collectgarbage("collect")
---~ lua.collectgarbage()
end
stoptiming(data)
if elapsedtime then -- not in generic
- report_otf("preprocessing and caching took %s seconds",elapsedtime(data))
+ report_otf("preprocessing and caching time %s, packtime %s",
+ elapsedtime(data),packdata and elapsedtime(packtime) or 0)
end
fontloader.close(fontdata) -- free memory
if cleanup > 3 then
collectgarbage("collect")
---~ lua.collectgarbage()
end
data = containers.read(otf.cache, hash) -- this frees the old table and load the sparse one
if cleanup > 2 then
collectgarbage("collect")
---~ lua.collectgarbage()
end
else
data = nil
- report_otf("loading failed (file read error)")
+ report_otf("loading failed due to read error")
end
end
if data then
if trace_defining then
- report_otf("loading from cache: %s",hash)
+ report_otf("loading from cache using hash %a",hash)
end
enhance("unpack",data,filename,nil,false)
enhance("add dimensions",data,filename,nil,false)
@@ -529,13 +528,14 @@ actions["add dimensions"] = function(data,filename)
local defaultwidth = resources.defaultwidth or 0
local defaultheight = resources.defaultheight or 0
local defaultdepth = resources.defaultdepth or 0
+ local basename = trace_markwidth and file.basename(filename)
if usemetatables then
for _, d in next, descriptions do
local wd = d.width
if not wd then
d.width = defaultwidth
elseif trace_markwidth and wd ~= 0 and d.class == "mark" then
- report_otf("mark with width %s (%s) in %s",wd,d.name or "<noname>",file.basename(filename))
+ report_otf("mark %a with width %b found in %a",d.name or "<noname>",wd,basename)
-- d.width = -wd
end
setmetatable(d,mt)
@@ -546,7 +546,7 @@ actions["add dimensions"] = function(data,filename)
if not wd then
d.width = defaultwidth
elseif trace_markwidth and wd ~= 0 and d.class == "mark" then
- report_otf("mark with width %s (%s) in %s",wd,d.name or "<noname>",file.basename(filename))
+ report_otf("mark %a with width %b found in %a",d.name or "<noname>",wd,basename)
-- d.width = -wd
end
-- if forcenotdef and not d.name then
@@ -650,7 +650,7 @@ actions["prepare glyphs"] = function(data,filename,raw)
unicode = private
unicodes[name] = private
if trace_private then
- report_otf("enhance: glyph %s at index 0x%04X is moved to private unicode slot U+%05X",name,index,private)
+ report_otf("glyph %a at index %H is moved to private unicode slot %U",name,index,private)
end
private = private + 1
nofnames = nofnames + 1
@@ -674,7 +674,7 @@ actions["prepare glyphs"] = function(data,filename,raw)
descriptions[unicode] = description
else
- -- report_otf("potential problem: glyph 0x%04X is used but empty",index)
+ -- report_otf("potential problem: glyph %U is used but empty",index)
end
end
end
@@ -682,10 +682,10 @@ actions["prepare glyphs"] = function(data,filename,raw)
report_otf("cid font remapped, %s unicode points, %s symbolic names, %s glyphs",nofunicodes, nofnames, nofunicodes+nofnames)
end
elseif trace_loading then
- report_otf("unable to remap cid font, missing cid file for %s",filename)
+ report_otf("unable to remap cid font, missing cid file for %a",filename)
end
elseif trace_loading then
- report_otf("font %s has no glyphs",filename)
+ report_otf("font %a has no glyphs",filename)
end
else
@@ -699,7 +699,7 @@ actions["prepare glyphs"] = function(data,filename,raw)
unicode = private
unicodes[name] = private
if trace_private then
- report_otf("enhance: glyph %s at index 0x%04X is moved to private unicode slot U+%05X",name,index,private)
+ report_otf("glyph %a at index %H is moved to private unicode slot %U",name,index,private)
end
private = private + 1
else
@@ -742,7 +742,7 @@ actions["prepare glyphs"] = function(data,filename,raw)
end
end
else
- report_otf("potential problem: glyph 0x%04X is used but empty",index)
+ report_otf("potential problem: glyph %U is used but empty",index)
end
end
@@ -774,22 +774,22 @@ actions["check encoding"] = function(data,filename,raw)
if find(encname,"unicode") then -- unicodebmp, unicodefull, ...
if trace_loading then
- report_otf("checking embedded unicode map '%s'",encname)
+ report_otf("checking embedded unicode map %a",encname)
end
for unicode, index in next, unicodetoindex do -- altuni already covers this
if unicode <= criterium and not descriptions[unicode] then
local parent = indices[index] -- why nil?
if parent then
- report_otf("weird, unicode U+%05X points to U+%05X with index 0x%04X",unicode,parent,index)
+ report_otf("weird, unicode %U points to %U with index %H",unicode,parent,index)
else
- report_otf("weird, unicode U+%05X points to nowhere with index 0x%04X",unicode,index)
+ report_otf("weird, unicode %U points to nowhere with index %H",unicode,index)
end
end
end
elseif properties.cidinfo then
- report_otf("warning: no unicode map, used cidmap '%s'",properties.cidinfo.usedname or "?")
+ report_otf("warning: no unicode map, used cidmap %a",properties.cidinfo.usedname)
else
- report_otf("warning: non unicode map '%s', only using glyph unicode data",encname or "whatever")
+ report_otf("warning: non unicode map %a, only using glyph unicode data",encname or "whatever")
end
if mapdata then
@@ -831,7 +831,7 @@ actions["add duplicates"] = function(data,filename,raw)
-- todo: lookups etc
end
if trace_loading then
- report_otf("duplicating U+%05X to U+%05X with index 0x%04X (%s kerns)",unicode,u,description.index,n)
+ report_otf("duplicating %U to %U with index %H (%s kerns)",unicode,u,description.index,n)
end
end
end
@@ -1058,7 +1058,10 @@ actions["reorganize subtables"] = function(data,filename,raw)
--
local name = gk.name
--
- if features then
+ if not name then
+ -- in fact an error
+ report_otf("skipping weird lookup number %s",k)
+ elseif features then
-- scripts, tag, ismac
local f = { }
for i=1,#features do
@@ -1114,7 +1117,7 @@ actions["prepare lookups"] = function(data,filename,raw)
end
-- The reverse handler does a bit redundant splitting but it's seldom
--- seen so we don' tbother too much. We could store the replacement
+-- seen so we don't bother too much. We could store the replacement
-- in the current list (value instead of true) but it makes other code
-- uglier. Maybe some day.
@@ -1132,6 +1135,22 @@ local function t_uncover(splitter,cache,covers)
return result
end
+local function s_uncover(splitter,cache,cover)
+ if cover == "" then
+ return nil
+ else
+ local uncovered = cache[cover]
+ if not uncovered then
+ uncovered = lpegmatch(splitter,cover)
+-- for i=1,#uncovered do
+-- uncovered[i] = { [uncovered[i]] = true }
+-- end
+ cache[cover] = uncovered
+ end
+ return { uncovered }
+ end
+end
+
local function t_hashed(t,cache)
if t then
local ht = { }
@@ -1153,22 +1172,6 @@ local function t_hashed(t,cache)
end
end
-local function s_uncover(splitter,cache,cover)
- if cover == "" then
- return nil
- else
- local uncovered = cache[cover]
- if not uncovered then
- uncovered = lpegmatch(splitter,cover)
- for i=1,#uncovered do
- uncovered[i] = { [uncovered[i]] = true }
- end
- cache[cover] = uncovered
- end
- return uncovered
- end
-end
-
local s_hashed = t_hashed
local function r_uncover(splitter,cache,cover,replacements)
@@ -1193,11 +1196,15 @@ local function r_uncover(splitter,cache,cover,replacements)
end
end
-actions["reorganize lookups"] = function(data,filename,raw)
+actions["reorganize lookups"] = function(data,filename,raw) -- we could check for "" and n == 0
-- we prefer the before lookups in a normal order
if data.lookups then
local splitter = data.helpers.tounicodetable
- local cache, h_cache = { }, { }
+ local t_u_cache = { }
+ local s_u_cache = t_u_cache -- string keys
+ local t_h_cache = { }
+ local s_h_cache = t_h_cache -- table keys (so we could use one cache)
+ local r_u_cache = { } -- maybe shared
for _, lookup in next, data.lookups do
local rules = lookup.rules
if rules then
@@ -1205,15 +1212,15 @@ actions["reorganize lookups"] = function(data,filename,raw)
if format == "class" then
local before_class = lookup.before_class
if before_class then
- before_class = t_uncover(splitter,cache,reversed(before_class))
+ before_class = t_uncover(splitter,t_u_cache,reversed(before_class))
end
local current_class = lookup.current_class
if current_class then
- current_class = t_uncover(splitter,cache,current_class)
+ current_class = t_uncover(splitter,t_u_cache,current_class)
end
local after_class = lookup.after_class
if after_class then
- after_class = t_uncover(splitter,cache,after_class)
+ after_class = t_uncover(splitter,t_u_cache,after_class)
end
for i=1,#rules do
local rule = rules[i]
@@ -1223,7 +1230,7 @@ actions["reorganize lookups"] = function(data,filename,raw)
for i=1,#before do
before[i] = before_class[before[i]] or { }
end
- rule.before = t_hashed(before,h_cache)
+ rule.before = t_hashed(before,t_h_cache)
end
local current = class.current
local lookups = rule.lookups
@@ -1231,17 +1238,17 @@ actions["reorganize lookups"] = function(data,filename,raw)
for i=1,#current do
current[i] = current_class[current[i]] or { }
if lookups and not lookups[i] then
- lookups[i] = false -- e.g. we can have two lookups and one replacement
+ lookups[i] = "" -- (was: false) e.g. we can have two lookups and one replacement
end
end
- rule.current = t_hashed(current,h_cache)
+ rule.current = t_hashed(current,t_h_cache)
end
local after = class.after
if after then
for i=1,#after do
after[i] = after_class[after[i]] or { }
end
- rule.after = t_hashed(after,h_cache)
+ rule.after = t_hashed(after,t_h_cache)
end
rule.class = nil
end
@@ -1256,18 +1263,18 @@ actions["reorganize lookups"] = function(data,filename,raw)
if coverage then
local before = coverage.before
if before then
- before = t_uncover(splitter,cache,reversed(before))
- rule.before = t_hashed(before,h_cache)
+ before = t_uncover(splitter,t_u_cache,reversed(before))
+ rule.before = t_hashed(before,t_h_cache)
end
local current = coverage.current
if current then
- current = t_uncover(splitter,cache,current)
- rule.current = t_hashed(current,h_cache)
+ current = t_uncover(splitter,t_u_cache,current)
+ rule.current = t_hashed(current,t_h_cache)
end
local after = coverage.after
if after then
- after = t_uncover(splitter,cache,after)
- rule.after = t_hashed(after,h_cache)
+ after = t_uncover(splitter,t_u_cache,after)
+ rule.after = t_hashed(after,t_h_cache)
end
rule.coverage = nil
end
@@ -1279,22 +1286,22 @@ actions["reorganize lookups"] = function(data,filename,raw)
if reversecoverage then
local before = reversecoverage.before
if before then
- before = t_uncover(splitter,cache,reversed(before))
- rule.before = t_hashed(before,h_cache)
+ before = t_uncover(splitter,t_u_cache,reversed(before))
+ rule.before = t_hashed(before,t_h_cache)
end
local current = reversecoverage.current
if current then
- current = t_uncover(splitter,cache,current)
- rule.current = t_hashed(current,h_cache)
+ current = t_uncover(splitter,t_u_cache,current)
+ rule.current = t_hashed(current,t_h_cache)
end
local after = reversecoverage.after
if after then
- after = t_uncover(splitter,cache,after)
- rule.after = t_hashed(after,h_cache)
+ after = t_uncover(splitter,t_u_cache,after)
+ rule.after = t_hashed(after,t_h_cache)
end
local replacements = reversecoverage.replacements
if replacements then
- rule.replacements = r_uncover(splitter,cache,current,replacements)
+ rule.replacements = r_uncover(splitter,r_u_cache,current,replacements)
end
rule.reversecoverage = nil
end
@@ -1305,19 +1312,19 @@ actions["reorganize lookups"] = function(data,filename,raw)
local glyphs = rule.glyphs
if glyphs then
local fore = glyphs.fore
- if fore then
- fore = s_uncover(splitter,cache,fore)
- rule.before = s_hashed(fore,h_cache)
+ if fore and fore ~= "" then
+ fore = s_uncover(splitter,s_u_cache,fore)
+ rule.before = s_hashed(fore,s_h_cache)
end
local back = glyphs.back
if back then
- back = s_uncover(splitter,cache,back)
- rule.after = s_hashed(back,h_cache)
+ back = s_uncover(splitter,s_u_cache,back)
+ rule.after = s_hashed(back,s_h_cache)
end
local names = glyphs.names
if names then
- names = s_uncover(splitter,cache,names)
- rule.current = s_hashed(names,h_cache)
+ names = s_uncover(splitter,s_u_cache,names)
+ rule.current = s_hashed(names,s_h_cache)
end
rule.glyphs = nil
end
@@ -1337,7 +1344,7 @@ local function check_variants(unicode,the_variants,splitter,unicodes)
for i=1,#glyphs do
local g = glyphs[i]
if done[g] then
- report_otf("skipping cyclic reference U+%05X in math variant U+%05X",g,unicode)
+ report_otf("skipping cyclic reference %U in math variant %U",g,unicode)
else
if n == 0 then
n = 1
@@ -1453,7 +1460,7 @@ actions["reorganize glyph kerns"] = function(data,filename,raw)
end
end
elseif trace_loading then
- report_otf("problems with unicode %s of kern %s of glyph U+%05X",name,k,unicode)
+ report_otf("problems with unicode %a of kern %a of glyph %U",name,k,unicode)
end
end
end
@@ -1487,6 +1494,9 @@ actions["merge kern classes"] = function(data,filename,raw)
if type(lookups) ~= "table" then
lookups = { lookups }
end
+ -- if offsets[1] == nil then
+ -- offsets[1] = ""
+ -- end
-- we can check the max in the loop
-- local maxseconds = getn(seconds)
for n, s in next, firsts do
@@ -1507,9 +1517,9 @@ actions["merge kern classes"] = function(data,filename,raw)
if splt then
local extrakerns = { }
local baseoffset = (fk-1) * maxseconds
- -- for sk=2,maxseconds do
- -- local sv = seconds[sk]
- for sk, sv in next, seconds do
+ for sk=2,maxseconds do -- will become 1 based in future luatex
+ local sv = seconds[sk]
+ -- for sk, sv in next, seconds do
local splt = split[sv]
if splt then -- redundant test
local offset = offsets[baseoffset + sk]
@@ -1538,7 +1548,7 @@ actions["merge kern classes"] = function(data,filename,raw)
lookupkerns[second_unicode] = kern
end
elseif trace_loading then
- report_otf("no glyph data for U+%05X", first_unicode)
+ report_otf("no glyph data for %U", first_unicode)
end
end
end
@@ -1616,7 +1626,7 @@ actions["reorganize glyph lookups"] = function(data,filename,raw)
if not lt then
lookuptypes[tag] = lookuptype
elseif lt ~= lookuptype then
- report_otf("conflicting lookuptypes: %s => %s and %s",tag,lt,lookuptype)
+ report_otf("conflicting lookuptypes, %a points to %a and %a",tag,lt,lookuptype)
end
if lookuptype == "ligature" then
lookuplist[l] = { lpegmatch(splitter,specification.components) }
@@ -1773,7 +1783,7 @@ local function copytotfm(data,cache_id)
for i=1,#variants do
local un = variants[i]
-- if done[un] then
- -- -- report_otf("skipping cyclic reference U+%05X in math variant U+%05X",un,unicode)
+ -- -- report_otf("skipping cyclic reference %U in math variant %U",un,unicode)
-- else
c.next = un
c = characters[un]
@@ -1792,7 +1802,7 @@ local function copytotfm(data,cache_id)
for i=1,#variants do
local un = variants[i]
-- if done[un] then
- -- -- report_otf("skipping cyclic reference U+%05X in math variant U+%05X",un,unicode)
+ -- -- report_otf("skipping cyclic reference %U in math variant %U",un,unicode)
-- else
c.next = un
c = characters[un]
@@ -1972,7 +1982,7 @@ local function read_from_otf(specification)
local allfeatures = tfmdata.shared.features or specification.features.normal
constructors.applymanipulators("otf",tfmdata,allfeatures,trace_features,report_otf)
constructors.setname(tfmdata,specification) -- only otf?
- fonts.loggers.register(tfmdata,file.extname(specification.filename),specification)
+ fonts.loggers.register(tfmdata,file.suffix(specification.filename),specification)
end
return tfmdata
end
diff --git a/Master/texmf-dist/tex/context/base/font-oti.lua b/Master/texmf-dist/tex/context/base/font-oti.lua
index d6853db31f3..06c2a42fac2 100644
--- a/Master/texmf-dist/tex/context/base/font-oti.lua
+++ b/Master/texmf-dist/tex/context/base/font-oti.lua
@@ -8,15 +8,16 @@ if not modules then modules = { } end modules ['font-oti'] = {
local lower = string.lower
-local allocate = utilities.storage.allocate
-
local fonts = fonts
-local otf = { }
-fonts.handlers.otf = otf
+local constructors = fonts.constructors
-local otffeatures = fonts.constructors.newfeatures("otf")
+local otf = constructors.newhandler("otf")
+local otffeatures = constructors.newfeatures("otf")
+local otftables = otf.tables
local registerotffeature = otffeatures.register
+local allocate = utilities.storage.allocate
+
registerotffeature {
name = "features",
description = "initialization of feature handler",
@@ -25,8 +26,6 @@ registerotffeature {
-- these are later hooked into node and base initializaters
-local otftables = otf.tables -- not always defined
-
local function setmode(tfmdata,value)
if value then
tfmdata.properties.mode = lower(value)
diff --git a/Master/texmf-dist/tex/context/base/font-otn.lua b/Master/texmf-dist/tex/context/base/font-otn.lua
index d97ef7363b7..39a3f77dda0 100644
--- a/Master/texmf-dist/tex/context/base/font-otn.lua
+++ b/Master/texmf-dist/tex/context/base/font-otn.lua
@@ -3,9 +3,11 @@ if not modules then modules = { } end modules ['font-otn'] = {
comment = "companion to font-ini.mkiv",
author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
+ license = "see context related readme files",
}
+-- preprocessors = { "nodes" }
+
-- this is still somewhat preliminary and it will get better in due time;
-- much functionality could only be implemented thanks to the husayni font
-- of Idris Samawi Hamid to who we dedicate this module.
@@ -28,6 +30,8 @@ if not modules then modules = { } end modules ['font-otn'] = {
-- handle positions (we need example fonts)
-- handle gpos_single (we might want an extra width field in glyph nodes because adding kerns might interfere)
-- mark (to mark) code is still not what it should be (too messy but we need some more extreem husayni tests)
+-- remove some optimizations (when I have a faster machine)
+
--[[ldx--
<p>This module is a bit more split up that I'd like but since we also want to test
@@ -116,11 +120,14 @@ results in different tables.</p>
-- we now use only one hash. If needed we can have multiple again but in that
-- case I will probably prefix (i.e. rename) the lookups in the cached font file.
+-- Todo: make plugin feature that operates on char/glyphnode arrays
+
local concat, insert, remove = table.concat, table.insert, table.remove
-local format, gmatch, gsub, find, match, lower, strip = string.format, string.gmatch, string.gsub, string.find, string.match, string.lower, string.strip
+local gmatch, gsub, find, match, lower, strip = string.gmatch, string.gsub, string.find, string.match, string.lower, string.strip
local type, next, tonumber, tostring = type, next, tonumber, tostring
local lpegmatch = lpeg.match
local random = math.random
+local formatters = string.formatters
local logs, trackers, nodes, attributes = logs, trackers, nodes, attributes
@@ -151,6 +158,7 @@ local report_subchain = logs.reporter("fonts","otf subchain")
local report_chain = logs.reporter("fonts","otf chain")
local report_process = logs.reporter("fonts","otf process")
local report_prepare = logs.reporter("fonts","otf prepare")
+local report_warning = logs.reporter("fonts","otf warning")
registertracker("otf.verbose_chain", function(v) otf.setcontextchain(v and "verbose") end)
registertracker("otf.normal_chain", function(v) otf.setcontextchain(v and "normal") end)
@@ -166,9 +174,8 @@ local insert_node_after = node.insert_after
local delete_node = nodes.delete
local copy_node = node.copy
local find_node_tail = node.tail or node.slide
-local set_attribute = node.set_attribute
-local has_attribute = node.has_attribute
local flush_node_list = node.flush_list
+local end_of_math = node.end_of_math
local setmetatableindex = table.setmetatableindex
@@ -185,6 +192,7 @@ local glyph_code = nodecodes.glyph
local glue_code = nodecodes.glue
local disc_code = nodecodes.disc
local whatsit_code = nodecodes.whatsit
+local math_code = nodecodes.math
local dir_code = whatcodes.dir
local localpar_code = whatcodes.localpar
@@ -199,15 +207,15 @@ local privateattribute = attributes.private
-- more complex than an average font) but I need proper examples of all cases, not
-- of only some.
-local state = privateattribute('state')
-local markbase = privateattribute('markbase')
-local markmark = privateattribute('markmark')
-local markdone = privateattribute('markdone') -- assigned at the injection end
-local cursbase = privateattribute('cursbase')
-local curscurs = privateattribute('curscurs')
-local cursdone = privateattribute('cursdone')
-local kernpair = privateattribute('kernpair')
-local ligacomp = privateattribute('ligacomp') -- assigned here (ideally it should be combined)
+local a_state = privateattribute('state')
+local a_markbase = privateattribute('markbase')
+local a_markmark = privateattribute('markmark')
+local a_markdone = privateattribute('markdone') -- assigned at the injection end
+local a_cursbase = privateattribute('cursbase')
+local a_curscurs = privateattribute('curscurs')
+local a_cursdone = privateattribute('cursdone')
+local a_kernpair = privateattribute('kernpair')
+local a_ligacomp = privateattribute('ligacomp') -- assigned here (ideally it should be combined)
local injections = nodes.injections
local setmark = injections.setmark
@@ -229,8 +237,7 @@ local onetimemessage = fonts.loggers.onetimemessage
otf.defaultnodealternate = "none" -- first last
--- we share some vars here, after all, we have no nested lookups and
--- less code
+-- we share some vars here, after all, we have no nested lookups and less code
local tfmdata = false
local characters = false
@@ -245,9 +252,6 @@ local handlers = { }
local rlmode = 0
local featurevalue = false
--- we cannot optimize with "start = first_glyph(head)" because then we don't
--- know which rlmode we're in which messes up cursive handling later on
---
-- head is always a whatsit so we can safely assume that head is not changed
-- we use this for special testing and documentation
@@ -267,203 +271,238 @@ local function logwarning(...)
report_direct(...)
end
-local function gref(n)
+local f_unicode = formatters["%U"]
+local f_uniname = formatters["%U (%s)"]
+local f_unilist = formatters["% t (% t)"]
+
+local function gref(n) -- currently the same as in font-otb
if type(n) == "number" then
local description = descriptions[n]
local name = description and description.name
if name then
- return format("U+%05X (%s)",n,name)
+ return f_uniname(n,name)
else
- return format("U+%05X",n)
+ return f_unicode(n)
end
- elseif not n then
- return "<error in tracing>"
- else
+ elseif n then
local num, nam = { }, { }
for i=1,#n do
local ni = n[i]
if tonumber(ni) then -- later we will start at 2
local di = descriptions[ni]
- num[i] = format("U+%05X",ni)
- nam[i] = di and di.name or "?"
+ num[i] = f_unicode(ni)
+ nam[i] = di and di.name or "-"
end
end
- return format("%s (%s)",concat(num," "), concat(nam," "))
+ return f_unilist(num,nam)
+ else
+ return "<error in node mode tracing>"
end
end
-local function cref(kind,chainname,chainlookupname,lookupname,index)
+local function cref(kind,chainname,chainlookupname,lookupname,index) -- not in the mood to alias f_
if index then
- return format("feature %s, chain %s, sub %s, lookup %s, index %s",kind,chainname,chainlookupname,lookupname,index)
+ return formatters["feature %a, chain %a, sub %a, lookup %a, index %a"](kind,chainname,chainlookupname,lookupname,index)
elseif lookupname then
- return format("feature %s, chain %s, sub %s, lookup %s",kind,chainname or "?",chainlookupname or "?",lookupname)
+ return formatters["feature %a, chain %a, sub %a, lookup %a"](kind,chainname,chainlookupname,lookupname)
elseif chainlookupname then
- return format("feature %s, chain %s, sub %s",kind,chainname or "?",chainlookupname)
+ return formatters["feature %a, chain %a, sub %a"](kind,chainname,chainlookupname)
elseif chainname then
- return format("feature %s, chain %s",kind,chainname)
+ return formatters["feature %a, chain %a"](kind,chainname)
else
- return format("feature %s",kind)
+ return formatters["feature %a"](kind)
end
end
local function pref(kind,lookupname)
- return format("feature %s, lookup %s",kind,lookupname)
+ return formatters["feature %a, lookup %a"](kind,lookupname)
end
--- we can assume that languages that use marks are not hyphenated
--- we can also assume that at most one discretionary is present
-
-local function markstoligature(kind,lookupname,start,stop,char)
- local n = copy_node(start)
- local keep = start
- local current
- current, start = insert_node_after(start,start,n)
- local snext = stop.next
- current.next = snext
- if snext then
- snext.prev = current
+-- We can assume that languages that use marks are not hyphenated. We can also assume
+-- that at most one discretionary is present.
+
+-- We do need components in funny kerning mode but maybe I can better reconstruct then
+-- as we do have the font components info available; removing components makes the
+-- previous code much simpler. Also, later on copying and freeing becomes easier.
+-- However, for arabic we need to keep them around for the sake of mark placement
+-- and indices.
+
+local function copy_glyph(g) -- next and prev are untouched !
+ local components = g.components
+ if components then
+ g.components = nil
+ local n = copy_node(g)
+ g.components = components
+ return n
+ else
+ return copy_node(g)
end
- start.prev, stop.next = nil, nil
- current.char, current.subtype, current.components = char, ligature_code, start
- return keep
end
-local function toligature(kind,lookupname,start,stop,char,markflag,discfound) -- brr head
- if start == stop then
- start.char = char
- return start
- elseif discfound then
- -- print("start->stop",nodes.tosequence(start,stop))
- local components = start.components
- if components then
- flush_node_list(components)
- start.components = nil
- end
- local lignode = copy_node(start)
- lignode.font = start.font
- lignode.char = char
- lignode.subtype = ligature_code
- local next = stop.next
+-- start is a mark and we need to keep that one
+
+local function markstoligature(kind,lookupname,head,start,stop,char)
+ if start == stop and start.char == char then
+ return head, start
+ else
local prev = start.prev
- stop.next = nil
+ local next = stop.next
start.prev = nil
- lignode.components = start
- -- print("lignode",nodes.tosequence(lignode))
- -- print("components",nodes.tosequence(lignode.components))
- prev.next = lignode
+ stop.next = nil
+ local base = copy_glyph(start)
+ if head == start then
+ head = base
+ end
+ base.char = char
+ base.subtype = ligature_code
+ base.components = start
+ if prev then
+ prev.next = base
+ end
if next then
- next.prev = lignode
+ next.prev = base
end
- lignode.next = next
- lignode.prev = prev
- -- print("start->end",nodes.tosequence(start))
- return lignode
+ base.next = next
+ base.prev = prev
+ return head, base
+ end
+end
+
+-- The next code is somewhat complicated by the fact that some fonts can have ligatures made
+-- from ligatures that themselves have marks. This was identified by Kai in for instance
+-- arabtype: KAF LAM SHADDA ALEF FATHA (0x0643 0x0644 0x0651 0x0627 0x064E). This becomes
+-- KAF LAM-ALEF with a SHADDA on the first and a FATHA op de second component. In a next
+-- iteration this becomes a KAF-LAM-ALEF with a SHADDA on the second and a FATHA on the
+-- third component.
+
+local function getcomponentindex(start)
+ if start.id ~= glyph_code then
+ return 0
+ elseif start.subtype == ligature_code then
+ local i = 0
+ local components = start.components
+ while components do
+ i = i + getcomponentindex(components)
+ components = components.next
+ end
+ return i
+ elseif not marks[start.char] then
+ return 1
else
- -- start is the ligature
+ return 0
+ end
+end
+
+local function toligature(kind,lookupname,head,start,stop,char,markflag,discfound) -- brr head
+ if start == stop and start.char == char then
+ start.char = char
+ return head, start
+ end
+ local prev = start.prev
+ local next = stop.next
+ start.prev = nil
+ stop.next = nil
+ local base = copy_glyph(start)
+ if start == head then
+ head = base
+ end
+ base.char = char
+ base.subtype = ligature_code
+ base.components = start -- start can have components
+ if prev then
+ prev.next = base
+ end
+ if next then
+ next.prev = base
+ end
+ base.next = next
+ base.prev = prev
+ if not discfound then
local deletemarks = markflag ~= "mark"
- local n = copy_node(start)
- local current
- current, start = insert_node_after(start,start,n)
- local snext = stop.next
- current.next = snext
- if snext then
- snext.prev = current
- end
- start.prev = nil
- stop.next = nil
- current.char = char
- current.subtype = ligature_code
- current.components = start
- local head = current
- -- this is messy ... we should get rid of the components eventually
- local i = 0 -- is index of base
+ local components = start
+ local baseindex = 0
+ local componentindex = 0
+ local head = base
+ local current = base
while start do
- if not marks[start.char] then
- i = i + 1
+ local char = start.char
+ if not marks[char] then
+ baseindex = baseindex + componentindex
+ componentindex = getcomponentindex(start)
elseif not deletemarks then -- quite fishy
- set_attribute(start,ligacomp,i)
+ start[a_ligacomp] = baseindex + (start[a_ligacomp] or componentindex)
if trace_marks then
- logwarning("%s: keep mark %s, gets index %s",pref(kind,lookupname),gref(start.char),i)
+ logwarning("%s: keep mark %s, gets index %s",pref(kind,lookupname),gref(char),start[a_ligacomp])
end
- head, current = insert_node_after(head,current,copy_node(start))
+ head, current = insert_node_after(head,current,copy_node(start)) -- unlikely that mark has components
end
start = start.next
end
- start = current.next
- while start and start.id == glyph_code do
- if marks[start.char] then
- set_attribute(start,ligacomp,i)
+ local start = components
+ while start and start.id == glyph_code do -- hm, is id test needed ?
+ local char = start.char
+ if marks[char] then
+ start[a_ligacomp] = baseindex + (start[a_ligacomp] or componentindex)
if trace_marks then
- logwarning("%s: keep mark %s, gets index %s",pref(kind,lookupname),gref(start.char),i)
+ logwarning("%s: keep mark %s, gets index %s",pref(kind,lookupname),gref(char),start[a_ligacomp])
end
else
break
end
start = start.next
end
- --
- -- we do need components in funny kerning mode but maybe I can better reconstruct then
- -- as we do have the font components info available; removing components makes the
- -- previous code much simpler
- --
- -- flush_node_list(head.components)
- return head
end
+ return head, base
end
-function handlers.gsub_single(start,kind,lookupname,replacement)
+function handlers.gsub_single(head,start,kind,lookupname,replacement)
if trace_singles then
logprocess("%s: replacing %s by single %s",pref(kind,lookupname),gref(start.char),gref(replacement))
end
start.char = replacement
- return start, true
+ return head, start, true
end
-local function get_alternative_glyph(start,alternatives,value)
- -- needs checking: (global value, brrr)
- local choice = nil
- local n = #alternatives
- local char = start.char
- --
+local function get_alternative_glyph(start,alternatives,value,trace_alternatives)
+ local n = #alternatives
if value == "random" then
local r = random(1,n)
- value, choice = format("random, choice %s",r), alternatives[r]
+ return alternatives[r], trace_alternatives and formatters["value %a, taking %a"](value,r)
elseif value == "first" then
- value, choice = format("first, choice %s",1), alternatives[1]
+ return alternatives[1], trace_alternatives and formatters["value %a, taking %a"](value,1)
elseif value == "last" then
- value, choice = format("last, choice %s",n), alternatives[n]
+ return alternatives[n], trace_alternatives and formatters["value %a, taking %a"](value,n)
else
value = tonumber(value)
if type(value) ~= "number" then
- value, choice = "default, choice 1", alternatives[1]
+ return alternatives[1], trace_alternatives and formatters["invalid value %s, taking %a"](value,1)
elseif value > n then
local defaultalt = otf.defaultnodealternate
if defaultalt == "first" then
- value, choice = format("no %s variants, taking %s",value,n), alternatives[n]
+ return alternatives[n], trace_alternatives and formatters["invalid value %s, taking %a"](value,1)
elseif defaultalt == "last" then
- value, choice = format("no %s variants, taking %s",value,1), alternatives[1]
+ return alternatives[1], trace_alternatives and formatters["invalid value %s, taking %a"](value,n)
else
- value, choice = format("no %s variants, ignoring",value), false
+ return false, trace_alternatives and formatters["invalid value %a, %s"](value,"out of range")
end
elseif value == 0 then
- value, choice = format("choice %s (no change)",value), char
+ return start.char, trace_alternatives and formatters["invalid value %a, %s"](value,"no change")
elseif value < 1 then
- value, choice = format("no %s variants, taking %s",value,1), alternatives[1]
+ return alternatives[1], trace_alternatives and formatters["invalid value %a, taking %a"](value,1)
else
- value, choice = format("choice %s",value), alternatives[value]
+ return alternatives[value], trace_alternatives and formatters["value %a, taking %a"](value,value)
end
end
- return choice
end
-local function multiple_glyphs(start,multiple) -- marks ?
+local function multiple_glyphs(head,start,multiple) -- marks ?
local nofmultiples = #multiple
if nofmultiples > 0 then
start.char = multiple[1]
if nofmultiples > 1 then
local sn = start.next
for k=2,nofmultiples do -- todo: use insert_node
- local n = copy_node(start)
+ local n = copy_node(start) -- ignore components
n.char = multiple[k]
n.next = sn
n.prev = start
@@ -474,45 +513,45 @@ local function multiple_glyphs(start,multiple) -- marks ?
start = n
end
end
- return start, true
+ return head, start, true
else
if trace_multiples then
logprocess("no multiple for %s",gref(start.char))
end
- return start, false
+ return head, start, false
end
end
-function handlers.gsub_alternate(start,kind,lookupname,alternative,sequence)
+function handlers.gsub_alternate(head,start,kind,lookupname,alternative,sequence)
local value = featurevalue == true and tfmdata.shared.features[kind] or featurevalue
- local choice = get_alternative_glyph(start,alternative,value)
+ local choice, comment = get_alternative_glyph(start,alternative,value,trace_alternatives)
if choice then
if trace_alternatives then
- logprocess("%s: replacing %s by alternative %s (%s)",pref(kind,lookupname),gref(char),gref(choice),choice)
+ logprocess("%s: replacing %s by alternative %a to %s, %s",pref(kind,lookupname),gref(start.char),choice,gref(choice),comment)
end
start.char = choice
else
if trace_alternatives then
- logwarning("%s: no variant %s for %s",pref(kind,lookupname),tostring(value),gref(char))
+ logwarning("%s: no variant %a for %s, %s",pref(kind,lookupname),value,gref(start.char),comment)
end
end
- return start, true
+ return head, start, true
end
-function handlers.gsub_multiple(start,kind,lookupname,multiple)
+function handlers.gsub_multiple(head,start,kind,lookupname,multiple)
if trace_multiples then
logprocess("%s: replacing %s by multiple %s",pref(kind,lookupname),gref(start.char),gref(multiple))
end
- return multiple_glyphs(start,multiple)
+ return multiple_glyphs(head,start,multiple)
end
-function handlers.gsub_ligature(start,kind,lookupname,ligature,sequence)
+function handlers.gsub_ligature(head,start,kind,lookupname,ligature,sequence)
local s, stop, discfound = start.next, nil, false
local startchar = start.char
if marks[startchar] then
while s do
local id = s.id
- if id == glyph_code and s.subtype<256 and s.font == currentfont then
+ if id == glyph_code and s.font == currentfont and s.subtype<256 then
local lg = ligature[s.char]
if lg then
stop = s
@@ -530,12 +569,12 @@ function handlers.gsub_ligature(start,kind,lookupname,ligature,sequence)
if lig then
if trace_ligatures then
local stopchar = stop.char
- start = markstoligature(kind,lookupname,start,stop,lig)
- logprocess("%s: replacing %s upto %s by ligature %s",pref(kind,lookupname),gref(startchar),gref(stopchar),gref(start.char))
+ head, start = markstoligature(kind,lookupname,head,start,stop,lig)
+ logprocess("%s: replacing %s upto %s by ligature %s case 1",pref(kind,lookupname),gref(startchar),gref(stopchar),gref(start.char))
else
- start = markstoligature(kind,lookupname,start,stop,lig)
+ head, start = markstoligature(kind,lookupname,head,start,stop,lig)
end
- return start, true
+ return head, start, true
else
-- ok, goto next lookup
end
@@ -574,18 +613,18 @@ function handlers.gsub_ligature(start,kind,lookupname,ligature,sequence)
if lig then
if trace_ligatures then
local stopchar = stop.char
- start = toligature(kind,lookupname,start,stop,lig,skipmark,discfound)
- logprocess("%s: replacing %s upto %s by ligature %s",pref(kind,lookupname),gref(startchar),gref(stopchar),gref(start.char))
+ head, start = toligature(kind,lookupname,head, start,stop,lig,skipmark,discfound)
+ logprocess("%s: replacing %s upto %s by ligature %s case 2",pref(kind,lookupname),gref(startchar),gref(stopchar),gref(start.char))
else
- start = toligature(kind,lookupname,start,stop,lig,skipmark,discfound)
+ head, start = toligature(kind,lookupname,head, start,stop,lig,skipmark,discfound)
end
- return start, true
+ return head, start, true
else
-- ok, goto next lookup
end
end
end
- return start, false
+ return head, start, false
end
--[[ldx--
@@ -593,16 +632,16 @@ end
we need to explicitly test for basechar, baselig and basemark entries.</p>
--ldx]]--
-function handlers.gpos_mark2base(start,kind,lookupname,markanchors,sequence)
+function handlers.gpos_mark2base(head,start,kind,lookupname,markanchors,sequence)
local markchar = start.char
if marks[markchar] then
local base = start.prev -- [glyph] [start=mark]
- if base and base.id == glyph_code and base.subtype<256 and base.font == currentfont then
+ if base and base.id == glyph_code and base.font == currentfont and base.subtype<256 then
local basechar = base.char
if marks[basechar] then
while true do
base = base.prev
- if base and base.id == glyph_code and base.subtype<256 and base.font == currentfont then
+ if base and base.id == glyph_code and base.font == currentfont and base.subtype<256 then
basechar = base.char
if not marks[basechar] then
break
@@ -611,7 +650,7 @@ function handlers.gpos_mark2base(start,kind,lookupname,markanchors,sequence)
if trace_bugs then
logwarning("%s: no base for mark %s",pref(kind,lookupname),gref(markchar))
end
- return start, false
+ return head, start, false
end
end
end
@@ -629,10 +668,10 @@ function handlers.gpos_mark2base(start,kind,lookupname,markanchors,sequence)
if ma then
local dx, dy, bound = setmark(start,base,tfmdata.parameters.factor,rlmode,ba,ma)
if trace_marks then
- logprocess("%s, anchor %s, bound %s: anchoring mark %s to basechar %s => (%s,%s)",
+ logprocess("%s, anchor %s, bound %s: anchoring mark %s to basechar %s => (%p,%p)",
pref(kind,lookupname),anchor,bound,gref(markchar),gref(basechar),dx,dy)
end
- return start, true
+ return head, start, true
end
end
end
@@ -650,20 +689,20 @@ function handlers.gpos_mark2base(start,kind,lookupname,markanchors,sequence)
elseif trace_bugs then
logwarning("%s: mark %s is no mark",pref(kind,lookupname),gref(markchar))
end
- return start, false
+ return head, start, false
end
-function handlers.gpos_mark2ligature(start,kind,lookupname,markanchors,sequence)
+function handlers.gpos_mark2ligature(head,start,kind,lookupname,markanchors,sequence)
-- check chainpos variant
local markchar = start.char
if marks[markchar] then
local base = start.prev -- [glyph] [optional marks] [start=mark]
- if base and base.id == glyph_code and base.subtype<256 and base.font == currentfont then
+ if base and base.id == glyph_code and base.font == currentfont and base.subtype<256 then
local basechar = base.char
if marks[basechar] then
while true do
base = base.prev
- if base and base.id == glyph_code and base.subtype<256 and base.font == currentfont then
+ if base and base.id == glyph_code and base.font == currentfont and base.subtype<256 then
basechar = base.char
if not marks[basechar] then
break
@@ -672,11 +711,11 @@ function handlers.gpos_mark2ligature(start,kind,lookupname,markanchors,sequence)
if trace_bugs then
logwarning("%s: no base for mark %s",pref(kind,lookupname),gref(markchar))
end
- return start, false
+ return head, start, false
end
end
end
- local index = has_attribute(start,ligacomp)
+ local index = start[a_ligacomp]
local baseanchors = descriptions[basechar]
if baseanchors then
baseanchors = baseanchors.anchors
@@ -692,10 +731,10 @@ function handlers.gpos_mark2ligature(start,kind,lookupname,markanchors,sequence)
if ba then
local dx, dy, bound = setmark(start,base,tfmdata.parameters.factor,rlmode,ba,ma) -- index
if trace_marks then
- logprocess("%s, anchor %s, index %s, bound %s: anchoring mark %s to baselig %s at index %s => (%s,%s)",
+ logprocess("%s, anchor %s, index %s, bound %s: anchoring mark %s to baselig %s at index %s => (%p,%p)",
pref(kind,lookupname),anchor,index,bound,gref(markchar),gref(basechar),index,dx,dy)
end
- return start, true
+ return head, start, true
end
end
end
@@ -715,20 +754,17 @@ function handlers.gpos_mark2ligature(start,kind,lookupname,markanchors,sequence)
elseif trace_bugs then
logwarning("%s: mark %s is no mark",pref(kind,lookupname),gref(markchar))
end
- return start, false
+ return head, start, false
end
-function handlers.gpos_mark2mark(start,kind,lookupname,markanchors,sequence)
+function handlers.gpos_mark2mark(head,start,kind,lookupname,markanchors,sequence)
local markchar = start.char
if marks[markchar] then
local base = start.prev -- [glyph] [basemark] [start=mark]
- -- while base and has_attribute(base,ligacomp) and has_attribute(base,ligacomp) ~= has_attribute(start,ligacomp) do
- -- base = base.prev -- KE: prevents mkmk for marks on different components of a ligature
- -- end
- local slc = has_attribute(start,ligacomp)
+ local slc = start[a_ligacomp]
if slc then -- a rather messy loop ... needs checking with husayni
while base do
- local blc = has_attribute(base,ligacomp)
+ local blc = base[a_ligacomp]
if blc and blc ~= slc then
base = base.prev
else
@@ -736,7 +772,7 @@ function handlers.gpos_mark2mark(start,kind,lookupname,markanchors,sequence)
end
end
end
- if base and base.id == glyph_code and base.subtype<256 and base.font == currentfont then -- subtype test can go
+ if base and base.id == glyph_code and base.font == currentfont and base.subtype<256 then -- subtype test can go
local basechar = base.char
local baseanchors = descriptions[basechar]
if baseanchors then
@@ -751,10 +787,10 @@ function handlers.gpos_mark2mark(start,kind,lookupname,markanchors,sequence)
if ma then
local dx, dy, bound = setmark(start,base,tfmdata.parameters.factor,rlmode,ba,ma)
if trace_marks then
- logprocess("%s, anchor %s, bound %s: anchoring mark %s to basemark %s => (%s,%s)",
+ logprocess("%s, anchor %s, bound %s: anchoring mark %s to basemark %s => (%p,%p)",
pref(kind,lookupname),anchor,bound,gref(markchar),gref(basechar),dx,dy)
end
- return start,true
+ return head, start, true
end
end
end
@@ -773,11 +809,11 @@ function handlers.gpos_mark2mark(start,kind,lookupname,markanchors,sequence)
elseif trace_bugs then
logwarning("%s: mark %s is no mark",pref(kind,lookupname),gref(markchar))
end
- return start,false
+ return head, start, false
end
-function handlers.gpos_cursive(start,kind,lookupname,exitanchors,sequence) -- to be checked
- local alreadydone = cursonce and has_attribute(start,cursbase)
+function handlers.gpos_cursive(head,start,kind,lookupname,exitanchors,sequence) -- to be checked
+ local alreadydone = cursonce and start[a_cursbase]
if not alreadydone then
local done = false
local startchar = start.char
@@ -787,7 +823,7 @@ function handlers.gpos_cursive(start,kind,lookupname,exitanchors,sequence) -- to
end
else
local nxt = start.next
- while not done and nxt and nxt.id == glyph_code and nxt.subtype<256 and nxt.font == currentfont do
+ while not done and nxt and nxt.id == glyph_code and nxt.font == currentfont and nxt.subtype<256 do
local nextchar = nxt.char
if marks[nextchar] then
-- should not happen (maybe warning)
@@ -806,7 +842,7 @@ function handlers.gpos_cursive(start,kind,lookupname,exitanchors,sequence) -- to
if exit then
local dx, dy, bound = setcursive(start,nxt,tfmdata.parameters.factor,rlmode,exit,entry,characters[startchar],characters[nextchar])
if trace_cursive then
- logprocess("%s: moving %s to %s cursive (%s,%s) using anchor %s and bound %s in rlmode %s",pref(kind,lookupname),gref(startchar),gref(nextchar),dx,dy,anchor,bound,rlmode)
+ logprocess("%s: moving %s to %s cursive (%p,%p) using anchor %s and bound %s in rlmode %s",pref(kind,lookupname),gref(startchar),gref(nextchar),dx,dy,anchor,bound,rlmode)
end
done = true
break
@@ -823,35 +859,35 @@ function handlers.gpos_cursive(start,kind,lookupname,exitanchors,sequence) -- to
end
end
end
- return start, done
+ return head, start, done
else
if trace_cursive and trace_details then
logprocess("%s, cursive %s is already done",pref(kind,lookupname),gref(start.char),alreadydone)
end
- return start, false
+ return head, start, false
end
end
-function handlers.gpos_single(start,kind,lookupname,kerns,sequence)
+function handlers.gpos_single(head,start,kind,lookupname,kerns,sequence)
local startchar = start.char
local dx, dy, w, h = setpair(start,tfmdata.parameters.factor,rlmode,sequence.flags[4],kerns,characters[startchar])
if trace_kerns then
- logprocess("%s: shifting single %s by (%s,%s) and correction (%s,%s)",pref(kind,lookupname),gref(startchar),dx,dy,w,h)
+ logprocess("%s: shifting single %s by (%p,%p) and correction (%p,%p)",pref(kind,lookupname),gref(startchar),dx,dy,w,h)
end
- return start, false
+ return head, start, false
end
-function handlers.gpos_pair(start,kind,lookupname,kerns,sequence)
+function handlers.gpos_pair(head,start,kind,lookupname,kerns,sequence)
-- todo: kerns in disc nodes: pre, post, replace -> loop over disc too
-- todo: kerns in components of ligatures
local snext = start.next
if not snext then
- return start, false
+ return head, start, false
else
local prev, done = start, false
local factor = tfmdata.parameters.factor
local lookuptype = lookuptypes[lookupname]
- while snext and snext.id == glyph_code and snext.subtype<256 and snext.font == currentfont do
+ while snext and snext.id == glyph_code and snext.font == currentfont and snext.subtype<256 do
local nextchar = snext.char
local krn = kerns[nextchar]
if not krn and marks[nextchar] then
@@ -868,14 +904,14 @@ function handlers.gpos_pair(start,kind,lookupname,kerns,sequence)
local startchar = start.char
local x, y, w, h = setpair(start,factor,rlmode,sequence.flags[4],a,characters[startchar])
if trace_kerns then
- logprocess("%s: shifting first of pair %s and %s by (%s,%s) and correction (%s,%s)",pref(kind,lookupname),gref(startchar),gref(nextchar),x,y,w,h)
+ logprocess("%s: shifting first of pair %s and %s by (%p,%p) and correction (%p,%p)",pref(kind,lookupname),gref(startchar),gref(nextchar),x,y,w,h)
end
end
if b and #b > 0 then
local startchar = start.char
local x, y, w, h = setpair(snext,factor,rlmode,sequence.flags[4],b,characters[nextchar])
if trace_kerns then
- logprocess("%s: shifting second of pair %s and %s by (%s,%s) and correction (%s,%s)",pref(kind,lookupname),gref(startchar),gref(nextchar),x,y,w,h)
+ logprocess("%s: shifting second of pair %s and %s by (%p,%p) and correction (%p,%p)",pref(kind,lookupname),gref(startchar),gref(nextchar),x,y,w,h)
end
end
else -- wrong ... position has different entries
@@ -902,7 +938,7 @@ function handlers.gpos_pair(start,kind,lookupname,kerns,sequence)
break
end
end
- return start, done
+ return head, start, done
end
end
@@ -935,21 +971,21 @@ local logwarning = report_chain
-- We could share functions but that would lead to extra function calls with many
-- arguments, redundant tests and confusing messages.
-function chainprocs.chainsub(start,stop,kind,chainname,currentcontext,lookuphash,lookuplist,chainlookupname)
+function chainprocs.chainsub(head,start,stop,kind,chainname,currentcontext,lookuphash,lookuplist,chainlookupname)
logwarning("%s: a direct call to chainsub cannot happen",cref(kind,chainname,chainlookupname))
- return start, false
+ return head, start, false
end
-function chainmores.chainsub(start,stop,kind,chainname,currentcontext,lookuphash,lookuplist,chainlookupname,n)
+function chainmores.chainsub(head,start,stop,kind,chainname,currentcontext,lookuphash,lookuplist,chainlookupname,n)
logprocess("%s: a direct call to chainsub cannot happen",cref(kind,chainname,chainlookupname))
- return start, false
+ return head, start, false
end
-- The reversesub is a special case, which is why we need to store the replacements
-- in a bit weird way. There is no lookup and the replacement comes from the lookup
-- itself. It is meant mostly for dealing with Urdu.
-function chainprocs.reversesub(start,stop,kind,chainname,currentcontext,lookuphash,replacements)
+function chainprocs.reversesub(head,start,stop,kind,chainname,currentcontext,lookuphash,replacements)
local char = start.char
local replacement = replacements[char]
if replacement then
@@ -957,9 +993,9 @@ function chainprocs.reversesub(start,stop,kind,chainname,currentcontext,lookupha
logprocess("%s: single reverse replacement of %s by %s",cref(kind,chainname),gref(char),gref(replacement))
end
start.char = replacement
- return start, true
+ return head, start, true
else
- return start, false
+ return head, start, false
end
end
@@ -987,6 +1023,10 @@ local function delete_till_stop(start,stop,ignoremarks) -- keeps start
repeat -- start x x m x x stop => start m
local next = start.next
if not marks[next.char] then
+ local components = next.components
+ if components then -- probably not needed
+ flush_node_list(components)
+ end
delete_node(start,next)
end
n = n + 1
@@ -994,6 +1034,10 @@ local function delete_till_stop(start,stop,ignoremarks) -- keeps start
else -- start x x x stop => start
repeat
local next = start.next
+ local components = next.components
+ if components then -- probably not needed
+ flush_node_list(components)
+ end
delete_node(start,next)
n = n + 1
until next == stop
@@ -1006,7 +1050,7 @@ end
match.</p>
--ldx]]--
-function chainprocs.gsub_single(start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname,chainindex)
+function chainprocs.gsub_single(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname,chainindex)
-- todo: marks ?
local current = start
local subtables = currentlookup.subtables
@@ -1024,7 +1068,7 @@ function chainprocs.gsub_single(start,stop,kind,chainname,currentcontext,lookuph
end
else
replacement = replacement[currentchar]
- if not replacement then
+ if not replacement or replacement == "" then
if trace_bugs then
logwarning("%s: no single for %s",cref(kind,chainname,chainlookupname,lookupname,chainindex),gref(currentchar))
end
@@ -1035,14 +1079,14 @@ function chainprocs.gsub_single(start,stop,kind,chainname,currentcontext,lookuph
current.char = replacement
end
end
- return start, true
+ return head, start, true
elseif current == stop then
break
else
current = current.next
end
end
- return start, false
+ return head, start, false
end
chainmores.gsub_single = chainprocs.gsub_single
@@ -1052,7 +1096,7 @@ chainmores.gsub_single = chainprocs.gsub_single
the match.</p>
--ldx]]--
-function chainprocs.gsub_multiple(start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname)
+function chainprocs.gsub_multiple(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname)
delete_till_stop(start,stop) -- we could pass ignoremarks as #3 ..
local startchar = start.char
local subtables = currentlookup.subtables
@@ -1064,7 +1108,7 @@ function chainprocs.gsub_multiple(start,stop,kind,chainname,currentcontext,looku
end
else
replacements = replacements[startchar]
- if not replacements then
+ if not replacements or replacement == "" then
if trace_bugs then
logwarning("%s: no multiple for %s",cref(kind,chainname,chainlookupname,lookupname),gref(startchar))
end
@@ -1072,17 +1116,12 @@ function chainprocs.gsub_multiple(start,stop,kind,chainname,currentcontext,looku
if trace_multiples then
logprocess("%s: replacing %s by multiple characters %s",cref(kind,chainname,chainlookupname,lookupname),gref(startchar),gref(replacements))
end
- return multiple_glyphs(start,replacements)
+ return multiple_glyphs(head,start,replacements)
end
end
- return start, false
+ return head, start, false
end
--- function chainmores.gsub_multiple(start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname,n)
--- logprocess("%s: gsub_multiple not yet supported",cref(kind,chainname,chainlookupname))
--- return start, false
--- end
-
chainmores.gsub_multiple = chainprocs.gsub_multiple
--[[ldx--
@@ -1097,7 +1136,7 @@ chainmores.gsub_multiple = chainprocs.gsub_multiple
-- marks come last anyway
-- are there cases where we need to delete the mark
-function chainprocs.gsub_alternate(start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname)
+function chainprocs.gsub_alternate(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname)
local current = start
local subtables = currentlookup.subtables
local value = featurevalue == true and tfmdata.shared.features[kind] or featurevalue
@@ -1113,36 +1152,31 @@ function chainprocs.gsub_alternate(start,stop,kind,chainname,currentcontext,look
else
alternatives = alternatives[currentchar]
if alternatives then
- local choice = get_alternative_glyph(current,alternatives,value)
+ local choice, comment = get_alternative_glyph(current,alternatives,value,trace_alternatives)
if choice then
if trace_alternatives then
- logprocess("%s: replacing %s by alternative %s (%s)",cref(kind,chainname,chainlookupname,lookupname),gref(char),gref(choice),choice)
+ logprocess("%s: replacing %s by alternative %a to %s, %s",cref(kind,chainname,chainlookupname,lookupname),gref(char),choice,gref(choice),comment)
end
start.char = choice
else
if trace_alternatives then
- logwarning("%s: no variant %s for %s",cref(kind,chainname,chainlookupname,lookupname),tostring(value),gref(char))
+ logwarning("%s: no variant %a for %s, %s",cref(kind,chainname,chainlookupname,lookupname),value,gref(char),comment)
end
end
elseif trace_bugs then
- logwarning("%s: no alternative for %s",cref(kind,chainname,chainlookupname,lookupname),gref(currentchar))
+ logwarning("%s: no alternative for %s, %s",cref(kind,chainname,chainlookupname,lookupname),gref(currentchar),comment)
end
end
- return start, true
+ return head, start, true
elseif current == stop then
break
else
current = current.next
end
end
- return start, false
+ return head, start, false
end
--- function chainmores.gsub_alternate(start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname,n)
--- logprocess("%s: gsub_alternate not yet supported",cref(kind,chainname,chainlookupname))
--- return start, false
--- end
-
chainmores.gsub_alternate = chainprocs.gsub_alternate
--[[ldx--
@@ -1151,7 +1185,7 @@ this function (move code inline and handle the marks by a separate function). We
assume rather stupid ligatures (no complex disc nodes).</p>
--ldx]]--
-function chainprocs.gsub_ligature(start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname,chainindex)
+function chainprocs.gsub_ligature(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname,chainindex)
local startchar = start.char
local subtables = currentlookup.subtables
local lookupname = subtables[1]
@@ -1203,13 +1237,13 @@ function chainprocs.gsub_ligature(start,stop,kind,chainname,currentcontext,looku
end
if trace_ligatures then
if start == stop then
- logprocess("%s: replacing character %s by ligature %s",cref(kind,chainname,chainlookupname,lookupname,chainindex),gref(startchar),gref(l2))
+ logprocess("%s: replacing character %s by ligature %s case 3",cref(kind,chainname,chainlookupname,lookupname,chainindex),gref(startchar),gref(l2))
else
- logprocess("%s: replacing character %s upto %s by ligature %s",cref(kind,chainname,chainlookupname,lookupname,chainindex),gref(startchar),gref(stop.char),gref(l2))
+ logprocess("%s: replacing character %s upto %s by ligature %s case 4",cref(kind,chainname,chainlookupname,lookupname,chainindex),gref(startchar),gref(stop.char),gref(l2))
end
end
- start = toligature(kind,lookupname,start,stop,l2,currentlookup.flags[1],discfound)
- return start, true, nofreplacements
+ head, start = toligature(kind,lookupname,head,start,stop,l2,currentlookup.flags[1],discfound)
+ return head, start, true, nofreplacements
elseif trace_bugs then
if start == stop then
logwarning("%s: replacing character %s by ligature fails",cref(kind,chainname,chainlookupname,lookupname,chainindex),gref(startchar))
@@ -1219,12 +1253,12 @@ function chainprocs.gsub_ligature(start,stop,kind,chainname,currentcontext,looku
end
end
end
- return start, false, 0
+ return head, start, false, 0
end
chainmores.gsub_ligature = chainprocs.gsub_ligature
-function chainprocs.gpos_mark2base(start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname)
+function chainprocs.gpos_mark2base(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname)
local markchar = start.char
if marks[markchar] then
local subtables = currentlookup.subtables
@@ -1235,12 +1269,12 @@ function chainprocs.gpos_mark2base(start,stop,kind,chainname,currentcontext,look
end
if markanchors then
local base = start.prev -- [glyph] [start=mark]
- if base and base.id == glyph_code and base.subtype<256 and base.font == currentfont then
+ if base and base.id == glyph_code and base.font == currentfont and base.subtype<256 then
local basechar = base.char
if marks[basechar] then
while true do
base = base.prev
- if base and base.id == glyph_code and base.subtype<256 and base.font == currentfont then
+ if base and base.id == glyph_code and base.font == currentfont and base.subtype<256 then
basechar = base.char
if not marks[basechar] then
break
@@ -1249,7 +1283,7 @@ function chainprocs.gpos_mark2base(start,stop,kind,chainname,currentcontext,look
if trace_bugs then
logwarning("%s: no base for mark %s",pref(kind,lookupname),gref(markchar))
end
- return start, false
+ return head, start, false
end
end
end
@@ -1264,10 +1298,10 @@ function chainprocs.gpos_mark2base(start,stop,kind,chainname,currentcontext,look
if ma then
local dx, dy, bound = setmark(start,base,tfmdata.parameters.factor,rlmode,ba,ma)
if trace_marks then
- logprocess("%s, anchor %s, bound %s: anchoring mark %s to basechar %s => (%s,%s)",
+ logprocess("%s, anchor %s, bound %s: anchoring mark %s to basechar %s => (%p,%p)",
cref(kind,chainname,chainlookupname,lookupname),anchor,bound,gref(markchar),gref(basechar),dx,dy)
end
- return start, true
+ return head, start, true
end
end
end
@@ -1285,10 +1319,10 @@ function chainprocs.gpos_mark2base(start,stop,kind,chainname,currentcontext,look
elseif trace_bugs then
logwarning("%s: mark %s is no mark",cref(kind,chainname,chainlookupname),gref(markchar))
end
- return start, false
+ return head, start, false
end
-function chainprocs.gpos_mark2ligature(start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname)
+function chainprocs.gpos_mark2ligature(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname)
local markchar = start.char
if marks[markchar] then
local subtables = currentlookup.subtables
@@ -1299,12 +1333,12 @@ function chainprocs.gpos_mark2ligature(start,stop,kind,chainname,currentcontext,
end
if markanchors then
local base = start.prev -- [glyph] [optional marks] [start=mark]
- if base and base.id == glyph_code and base.subtype<256 and base.font == currentfont then
+ if base and base.id == glyph_code and base.font == currentfont and base.subtype<256 then
local basechar = base.char
if marks[basechar] then
while true do
base = base.prev
- if base and base.id == glyph_code and base.subtype<256 and base.font == currentfont then
+ if base and base.id == glyph_code and base.font == currentfont and base.subtype<256 then
basechar = base.char
if not marks[basechar] then
break
@@ -1313,12 +1347,12 @@ function chainprocs.gpos_mark2ligature(start,stop,kind,chainname,currentcontext,
if trace_bugs then
logwarning("%s: no base for mark %s",cref(kind,chainname,chainlookupname,lookupname),markchar)
end
- return start, false
+ return head, start, false
end
end
end
-- todo: like marks a ligatures hash
- local index = has_attribute(start,ligacomp)
+ local index = start[a_ligacomp]
local baseanchors = descriptions[basechar].anchors
if baseanchors then
local baseanchors = baseanchors['baselig']
@@ -1332,10 +1366,10 @@ function chainprocs.gpos_mark2ligature(start,stop,kind,chainname,currentcontext,
if ba then
local dx, dy, bound = setmark(start,base,tfmdata.parameters.factor,rlmode,ba,ma) -- index
if trace_marks then
- logprocess("%s, anchor %s, bound %s: anchoring mark %s to baselig %s at index %s => (%s,%s)",
+ logprocess("%s, anchor %s, bound %s: anchoring mark %s to baselig %s at index %s => (%p,%p)",
cref(kind,chainname,chainlookupname,lookupname),anchor,a or bound,gref(markchar),gref(basechar),index,dx,dy)
end
- return start, true
+ return head, start, true
end
end
end
@@ -1354,13 +1388,13 @@ function chainprocs.gpos_mark2ligature(start,stop,kind,chainname,currentcontext,
elseif trace_bugs then
logwarning("%s: mark %s is no mark",cref(kind,chainname,chainlookupname),gref(markchar))
end
- return start, false
+ return head, start, false
end
-function chainprocs.gpos_mark2mark(start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname)
+function chainprocs.gpos_mark2mark(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname)
local markchar = start.char
if marks[markchar] then
---~ local alreadydone = markonce and has_attribute(start,markmark)
+--~ local alreadydone = markonce and start[a_markmark]
--~ if not alreadydone then
-- local markanchors = descriptions[markchar].anchors markanchors = markanchors and markanchors.mark
local subtables = currentlookup.subtables
@@ -1371,13 +1405,10 @@ function chainprocs.gpos_mark2mark(start,stop,kind,chainname,currentcontext,look
end
if markanchors then
local base = start.prev -- [glyph] [basemark] [start=mark]
- -- while (base and has_attribute(base,ligacomp) and has_attribute(base,ligacomp) ~= has_attribute(start,ligacomp)) do
- -- base = base.prev -- KE: prevents mkmk for marks on different components of a ligature
- -- end
- local slc = has_attribute(start,ligacomp)
+ local slc = start[a_ligacomp]
if slc then -- a rather messy loop ... needs checking with husayni
while base do
- local blc = has_attribute(base,ligacomp)
+ local blc = base[a_ligacomp]
if blc and blc ~= slc then
base = base.prev
else
@@ -1385,7 +1416,7 @@ function chainprocs.gpos_mark2mark(start,stop,kind,chainname,currentcontext,look
end
end
end
- if base and base.id == glyph_code and base.subtype<256 and base.font == currentfont then -- subtype test can go
+ if base and base.id == glyph_code and base.font == currentfont and base.subtype<256 then -- subtype test can go
local basechar = base.char
local baseanchors = descriptions[basechar].anchors
if baseanchors then
@@ -1398,10 +1429,10 @@ function chainprocs.gpos_mark2mark(start,stop,kind,chainname,currentcontext,look
if ma then
local dx, dy, bound = setmark(start,base,tfmdata.parameters.factor,rlmode,ba,ma)
if trace_marks then
- logprocess("%s, anchor %s, bound %s: anchoring mark %s to basemark %s => (%s,%s)",
+ logprocess("%s, anchor %s, bound %s: anchoring mark %s to basemark %s => (%p,%p)",
cref(kind,chainname,chainlookupname,lookupname),anchor,bound,gref(markchar),gref(basechar),dx,dy)
end
- return start, true
+ return head, start, true
end
end
end
@@ -1422,13 +1453,11 @@ function chainprocs.gpos_mark2mark(start,stop,kind,chainname,currentcontext,look
elseif trace_bugs then
logwarning("%s: mark %s is no mark",cref(kind,chainname,chainlookupname),gref(markchar))
end
- return start, false
+ return head, start, false
end
--- ! ! ! untested ! ! !
-
-function chainprocs.gpos_cursive(start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname)
- local alreadydone = cursonce and has_attribute(start,cursbase)
+function chainprocs.gpos_cursive(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname)
+ local alreadydone = cursonce and start[a_cursbase]
if not alreadydone then
local startchar = start.char
local subtables = currentlookup.subtables
@@ -1445,7 +1474,7 @@ function chainprocs.gpos_cursive(start,stop,kind,chainname,currentcontext,lookup
end
else
local nxt = start.next
- while not done and nxt and nxt.id == glyph_code and nxt.subtype<256 and nxt.font == currentfont do
+ while not done and nxt and nxt.id == glyph_code and nxt.font == currentfont and nxt.subtype<256 do
local nextchar = nxt.char
if marks[nextchar] then
-- should not happen (maybe warning)
@@ -1464,7 +1493,7 @@ function chainprocs.gpos_cursive(start,stop,kind,chainname,currentcontext,lookup
if exit then
local dx, dy, bound = setcursive(start,nxt,tfmdata.parameters.factor,rlmode,exit,entry,characters[startchar],characters[nextchar])
if trace_cursive then
- logprocess("%s: moving %s to %s cursive (%s,%s) using anchor %s and bound %s in rlmode %s",pref(kind,lookupname),gref(startchar),gref(nextchar),dx,dy,anchor,bound,rlmode)
+ logprocess("%s: moving %s to %s cursive (%p,%p) using anchor %s and bound %s in rlmode %s",pref(kind,lookupname),gref(startchar),gref(nextchar),dx,dy,anchor,bound,rlmode)
end
done = true
break
@@ -1481,18 +1510,18 @@ function chainprocs.gpos_cursive(start,stop,kind,chainname,currentcontext,lookup
end
end
end
- return start, done
+ return head, start, done
else
if trace_cursive and trace_details then
logprocess("%s, cursive %s is already done",pref(kind,lookupname),gref(start.char),alreadydone)
end
- return start, false
+ return head, start, false
end
end
- return start, false
+ return head, start, false
end
-function chainprocs.gpos_single(start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname,chainindex,sequence)
+function chainprocs.gpos_single(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname,chainindex,sequence)
-- untested .. needs checking for the new model
local startchar = start.char
local subtables = currentlookup.subtables
@@ -1503,16 +1532,16 @@ function chainprocs.gpos_single(start,stop,kind,chainname,currentcontext,lookuph
if kerns then
local dx, dy, w, h = setpair(start,tfmdata.parameters.factor,rlmode,sequence.flags[4],kerns,characters[startchar])
if trace_kerns then
- logprocess("%s: shifting single %s by (%s,%s) and correction (%s,%s)",cref(kind,chainname,chainlookupname),gref(startchar),dx,dy,w,h)
+ logprocess("%s: shifting single %s by (%p,%p) and correction (%p,%p)",cref(kind,chainname,chainlookupname),gref(startchar),dx,dy,w,h)
end
end
end
- return start, false
+ return head, start, false
end
-- when machines become faster i will make a shared function
-function chainprocs.gpos_pair(start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname,chainindex,sequence)
+function chainprocs.gpos_pair(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname,chainindex,sequence)
-- logwarning("%s: gpos_pair not yet supported",cref(kind,chainname,chainlookupname))
local snext = start.next
if snext then
@@ -1526,7 +1555,7 @@ function chainprocs.gpos_pair(start,stop,kind,chainname,currentcontext,lookuphas
local lookuptype = lookuptypes[lookupname]
local prev, done = start, false
local factor = tfmdata.parameters.factor
- while snext and snext.id == glyph_code and snext.subtype<256 and snext.font == currentfont do
+ while snext and snext.id == glyph_code and snext.font == currentfont and snext.subtype<256 do
local nextchar = snext.char
local krn = kerns[nextchar]
if not krn and marks[nextchar] then
@@ -1542,14 +1571,14 @@ function chainprocs.gpos_pair(start,stop,kind,chainname,currentcontext,lookuphas
local startchar = start.char
local x, y, w, h = setpair(start,factor,rlmode,sequence.flags[4],a,characters[startchar])
if trace_kerns then
- logprocess("%s: shifting first of pair %s and %s by (%s,%s) and correction (%s,%s)",cref(kind,chainname,chainlookupname),gref(startchar),gref(nextchar),x,y,w,h)
+ logprocess("%s: shifting first of pair %s and %s by (%p,%p) and correction (%p,%p)",cref(kind,chainname,chainlookupname),gref(startchar),gref(nextchar),x,y,w,h)
end
end
if b and #b > 0 then
local startchar = start.char
local x, y, w, h = setpair(snext,factor,rlmode,sequence.flags[4],b,characters[nextchar])
if trace_kerns then
- logprocess("%s: shifting second of pair %s and %s by (%s,%s) and correction (%s,%s)",cref(kind,chainname,chainlookupname),gref(startchar),gref(nextchar),x,y,w,h)
+ logprocess("%s: shifting second of pair %s and %s by (%p,%p) and correction (%p,%p)",cref(kind,chainname,chainlookupname),gref(startchar),gref(nextchar),x,y,w,h)
end
end
else
@@ -1576,11 +1605,11 @@ function chainprocs.gpos_pair(start,stop,kind,chainname,currentcontext,lookuphas
break
end
end
- return start, done
+ return head, start, done
end
end
end
- return start, false
+ return head, start, false
end
-- what pointer to return, spec says stop
@@ -1593,13 +1622,13 @@ end
local function show_skip(kind,chainname,char,ck,class)
if ck[9] then
- logwarning("%s: skipping char %s (%s) in rule %s, lookuptype %s (%s=>%s)",cref(kind,chainname),gref(char),class,ck[1],ck[2],ck[9],ck[10])
+ logwarning("%s: skipping char %s, class %a, rule %a, lookuptype %a, %a => %a",cref(kind,chainname),gref(char),class,ck[1],ck[2],ck[9],ck[10])
else
- logwarning("%s: skipping char %s (%s) in rule %s, lookuptype %s",cref(kind,chainname),gref(char),class,ck[1],ck[2])
+ logwarning("%s: skipping char %s, class %a, rule %a, lookuptype %a",cref(kind,chainname),gref(char),class,ck[1],ck[2])
end
end
-local function normal_handle_contextchain(start,kind,chainname,contexts,sequence,lookuphash)
+local function normal_handle_contextchain(head,start,kind,chainname,contexts,sequence,lookuphash)
-- local rule, lookuptype, sequence, f, l, lookups = ck[1], ck[2] ,ck[3], ck[4], ck[5], ck[6]
local flags = sequence.flags
local done = false
@@ -1619,7 +1648,7 @@ local function normal_handle_contextchain(start,kind,chainname,contexts,sequence
-- f..l = mid string
if s == 1 then
-- never happens
- match = current.id == glyph_code and current.subtype<256 and current.font == currentfont and seq[1][current.char]
+ match = current.id == glyph_code and current.font == currentfont and current.subtype<256 and seq[1][current.char]
else
-- maybe we need a better space check (maybe check for glue or category or combination)
-- we cannot optimize for n=2 because there can be disc nodes
@@ -1639,7 +1668,7 @@ local function normal_handle_contextchain(start,kind,chainname,contexts,sequence
if last then
local id = last.id
if id == glyph_code then
- if last.subtype<256 and last.font == currentfont then
+ if last.font == currentfont and last.subtype<256 then
local char = last.char
local ccd = descriptions[char]
if ccd then
@@ -1689,7 +1718,7 @@ local function normal_handle_contextchain(start,kind,chainname,contexts,sequence
if prev then
local id = prev.id
if id == glyph_code then
- if prev.subtype<256 and prev.font == currentfont then -- normal char
+ if prev.font == currentfont and prev.subtype<256 then -- normal char
local char = prev.char
local ccd = descriptions[char]
if ccd then
@@ -1722,7 +1751,7 @@ local function normal_handle_contextchain(start,kind,chainname,contexts,sequence
break
end
prev = prev.prev
- elseif seq[n][32] then -- somehat special, as zapfino can have many preceding spaces
+ elseif seq[n][32] then -- somewhat special, as zapfino can have many preceding spaces
n = n -1
else
match = false
@@ -1750,7 +1779,7 @@ local function normal_handle_contextchain(start,kind,chainname,contexts,sequence
if current then
local id = current.id
if id == glyph_code then
- if current.subtype<256 and current.font == currentfont then -- normal char
+ if current.font == currentfont and current.subtype<256 then -- normal char
local char = current.char
local ccd = descriptions[char]
if ccd then
@@ -1808,10 +1837,10 @@ local function normal_handle_contextchain(start,kind,chainname,contexts,sequence
local rule, lookuptype, f, l = ck[1], ck[2], ck[4], ck[5]
local char = start.char
if ck[9] then
- logwarning("%s: rule %s matches at char %s for (%s,%s,%s) chars, lookuptype %s (%s=>%s)",
+ logwarning("%s: rule %s matches at char %s for (%s,%s,%s) chars, lookuptype %a, %a => %a",
cref(kind,chainname),rule,gref(char),f-1,l-f+1,s-l,lookuptype,ck[9],ck[10])
else
- logwarning("%s: rule %s matches at char %s for (%s,%s,%s) chars, lookuptype %s",
+ logwarning("%s: rule %s matches at char %s for (%s,%s,%s) chars, lookuptype %a",
cref(kind,chainname),rule,gref(char),f-1,l-f+1,s-l,lookuptype)
end
end
@@ -1825,7 +1854,7 @@ local function normal_handle_contextchain(start,kind,chainname,contexts,sequence
if chainlookup then
local cp = chainprocs[chainlookup.type]
if cp then
- start, done = cp(start,last,kind,chainname,ck,lookuphash,chainlookup,chainlookupname,nil,sequence)
+ head, start, done = cp(head,start,last,kind,chainname,ck,lookuphash,chainlookup,chainlookupname,nil,sequence)
else
logprocess("%s: %s is not yet supported",cref(kind,chainname,chainlookupname),chainlookup.type)
end
@@ -1856,7 +1885,7 @@ local function normal_handle_contextchain(start,kind,chainname,contexts,sequence
local cp = chainlookup and chainmores[chainlookup.type]
if cp then
local ok, n
- start, ok, n = cp(start,last,kind,chainname,ck,lookuphash,chainlookup,chainlookupname,i,sequence)
+ head, start, ok, n = cp(head,start,last,kind,chainname,ck,lookuphash,chainlookup,chainlookupname,i,sequence)
-- messy since last can be changed !
if ok then
done = true
@@ -1870,13 +1899,17 @@ local function normal_handle_contextchain(start,kind,chainname,contexts,sequence
-- logprocess("%s: multiple subchains for %s are not yet supported",cref(kind,chainname,chainlookupname),chainlookup and chainlookup.type or "?")
i = i + 1
end
- start = start.next
+ if start then
+ start = start.next
+ else
+ -- weird
+ end
until i > nofchainlookups
end
else
local replacements = ck[7]
if replacements then
- start, done = chainprocs.reversesub(start,last,kind,chainname,ck,lookuphash,replacements) -- sequence
+ head, start, done = chainprocs.reversesub(head,start,last,kind,chainname,ck,lookuphash,replacements) -- sequence
else
done = true -- can be meant to be skipped
if trace_contexts then
@@ -1886,7 +1919,7 @@ local function normal_handle_contextchain(start,kind,chainname,contexts,sequence
end
end
end
- return start, done
+ return head, start, done
end
-- Because we want to keep this elsewhere (an because speed is less an issue) we
@@ -1899,7 +1932,7 @@ local verbose_handle_contextchain = function(font,...)
end
otf.chainhandlers = {
- normal = normal_handle_contextchain,
+ normal = normal_handle_contextchain,
verbose = verbose_handle_contextchain,
}
@@ -1910,7 +1943,7 @@ function otf.setcontextchain(method)
end
handlers.contextchain = normal_handle_contextchain
else
- logwarning("installing contextchain handler '%s'",method)
+ logwarning("installing contextchain handler %a",method)
local handler = otf.chainhandlers[method]
handlers.contextchain = function(...)
return handler(currentfont,...) -- hm, get rid of ...
@@ -1941,7 +1974,7 @@ local function report_missing_cache(typ,lookup)
local t = f[typ] if not t then t = { } f[typ] = t end
if not t[lookup] then
t[lookup] = true
- logwarning("missing cache for lookup %s of type %s in font %s (%s)",lookup,typ,currentfont,tfmdata.properties.fullname)
+ logwarning("missing cache for lookup %a, type %a, font %a, name %a",lookup,typ,currentfont,tfmdata.properties.fullname)
end
end
@@ -1962,12 +1995,7 @@ end)
-- fonts.hashes.lookups = lookuphashes
-local special_attributes = {
- init = 1,
- medi = 2,
- fina = 3,
- isol = 4
-}
+local autofeatures = fonts.analyzers.features -- was: constants
local function initialize(sequence,script,language,enabled)
local features = sequence.features
@@ -1977,7 +2005,7 @@ local function initialize(sequence,script,language,enabled)
if valid then
local languages = scripts[script] or scripts[wildcard]
if languages and (languages[language] or languages[wildcard]) then
- return { valid, special_attributes[kind] or false, sequence.chain or 0, kind, sequence }
+ return { valid, autofeatures[kind] or false, sequence.chain or 0, kind, sequence }
end
end
end
@@ -1985,7 +2013,7 @@ local function initialize(sequence,script,language,enabled)
return false
end
-function otf.dataset(tfmdata,sequences,font) -- generic variant, overloaded in context
+function otf.dataset(tfmdata,font) -- generic variant, overloaded in context
local shared = tfmdata.shared
local properties = tfmdata.properties
local language = properties.language or "dflt"
@@ -2003,13 +2031,24 @@ function otf.dataset(tfmdata,sequences,font) -- generic variant, overloaded in c
end
local rl = rs[language]
if not rl then
- rl = { }
+ rl = {
+ -- indexed but we can also add specific data by key
+ }
rs[language] = rl
- setmetatableindex(rl, function(t,k)
- local v = enabled and initialize(sequences[k],script,language,enabled)
- t[k] = v
- return v
- end)
+ local sequences = tfmdata.resources.sequences
+-- setmetatableindex(rl, function(t,k)
+-- if type(k) == "number" then
+-- local v = enabled and initialize(sequences[k],script,language,enabled)
+-- t[k] = v
+-- return v
+-- end
+-- end)
+for s=1,#sequences do
+ local v = enabled and initialize(sequences[s],script,language,enabled)
+ if v then
+ rl[#rl+1] = v
+ end
+end
end
return rl
end
@@ -2030,6 +2069,8 @@ end
-- start = start.next
-- end
+-- there will be a new direction parser (pre-parsed etc)
+
local function featuresprocessor(head,font,attr)
local lookuphash = lookuphashes[font] -- we can also check sequences here
@@ -2057,7 +2098,7 @@ local function featuresprocessor(head,font,attr)
local sequences = resources.sequences
local done = false
- local datasets = otf.dataset(tfmdata,sequences,font,attr)
+ local datasets = otf.dataset(tfmdata,font,attr)
local dirstack = { } -- could move outside function
@@ -2066,12 +2107,20 @@ local function featuresprocessor(head,font,attr)
-- to keep track of directions anyway. Also at some point I want to play with
-- font interactions and then we do need the full sweeps.
- for s=1,#sequences do
- local dataset = datasets[s]
- if dataset then
- featurevalue = dataset[1] -- todo: pass to function instead of using a global
- if featurevalue then
- local sequence = sequences[s] -- also dataset[5]
+ -- Keeping track of the headnode is needed for devanagari (I generalized it a bit
+ -- so that multiple cases are also covered.)
+
+-- for s=1,#sequences do
+-- local dataset = datasets[s]
+-- if dataset then
+-- featurevalue = dataset[1] -- todo: pass to function instead of using a global
+-- if featurevalue then -- never false
+
+for s=1,#datasets do
+ local dataset = datasets[s]
+ featurevalue = dataset[1] -- todo: pass to function instead of using a global
+
+ local sequence = dataset[5] -- sequences[s] -- also dataset[5]
local rlparmode = 0
local topstack = 0
local success = false
@@ -2087,8 +2136,8 @@ local function featuresprocessor(head,font,attr)
while start do
local id = start.id
if id == glyph_code then
- if start.subtype<256 and start.font == font then
- local a = has_attribute(start,0)
+ if start.font == font and start.subtype<256 then
+ local a = start[0]
if a then
a = a == attr
else
@@ -2101,7 +2150,7 @@ local function featuresprocessor(head,font,attr)
if lookupcache then
local lookupmatch = lookupcache[start.char]
if lookupmatch then
- start, success = handler(start,dataset[4],lookupname,lookupmatch,sequence,lookuphash,i)
+ head, start, success = handler(head,start,dataset[4],lookupname,lookupmatch,sequence,lookuphash,i)
if success then
break
end
@@ -2135,19 +2184,19 @@ local function featuresprocessor(head,font,attr)
while start do
local id = start.id
if id == glyph_code then
- if start.subtype<256 and start.font == font then
- local a = has_attribute(start,0)
+ if start.font == font and start.subtype<256 then
+ local a = start[0]
if a then
- a = (a == attr) and (not attribute or has_attribute(start,state,attribute))
+ a = (a == attr) and (not attribute or start[a_state] == attribute)
else
- a = not attribute or has_attribute(start,state,attribute)
+ a = not attribute or start[a_state] == attribute
end
if a then
local lookupmatch = lookupcache[start.char]
if lookupmatch then
-- sequence kan weg
local ok
- start, ok = handler(start,dataset[4],lookupname,lookupmatch,sequence,lookuphash,1)
+ head, start, ok = handler(head,start,dataset[4],lookupname,lookupmatch,sequence,lookuphash,1)
if ok then
success = true
end
@@ -2156,6 +2205,8 @@ local function featuresprocessor(head,font,attr)
else
start = start.next
end
+ elseif id == math_code then
+ start = end_of_math(start).next
else
start = start.next
end
@@ -2178,7 +2229,7 @@ local function featuresprocessor(head,font,attr)
rlmode = rlparmode
end
if trace_directions then
- report_process("directions after txtdir %s: txtdir=%s:%s, parmode=%s, txtmode=%s",dir,topstack,newdir or "unset",rlparmode,rlmode)
+ report_process("directions after txtdir %a: parmode %a, txtmode %a, # stack %a, new dir %a",dir,rlparmode,rlmode,topstack,newdir)
end
elseif subtype == localpar_code then
local dir = start.dir
@@ -2191,10 +2242,12 @@ local function featuresprocessor(head,font,attr)
end
rlmode = rlparmode
if trace_directions then
- report_process("directions after pardir %s: parmode=%s, txtmode=%s",dir,rlparmode,rlmode)
+ report_process("directions after pardir %a: parmode %a, txtmode %a",dir,rlparmode,rlmode)
end
end
start = start.next
+ elseif id == math_code then
+ start = end_of_math(start).next
else
start = start.next
end
@@ -2204,12 +2257,12 @@ local function featuresprocessor(head,font,attr)
while start do
local id = start.id
if id == glyph_code then
- if start.subtype<256 and start.font == font then
- local a = has_attribute(start,0)
+ if start.font == font and start.subtype<256 then
+ local a = start[0]
if a then
- a = (a == attr) and (not attribute or has_attribute(start,state,attribute))
+ a = (a == attr) and (not attribute or start[a_state] == attribute)
else
- a = not attribute or has_attribute(start,state,attribute)
+ a = not attribute or start[a_state] == attribute
end
if a then
for i=1,ns do
@@ -2220,7 +2273,7 @@ local function featuresprocessor(head,font,attr)
if lookupmatch then
-- we could move all code inline but that makes things even more unreadable
local ok
- start, ok = handler(start,dataset[4],lookupname,lookupmatch,sequence,lookuphash,i)
+ head, start, ok = handler(head,start,dataset[4],lookupname,lookupmatch,sequence,lookuphash,i)
if ok then
success = true
break
@@ -2256,7 +2309,7 @@ local function featuresprocessor(head,font,attr)
rlmode = rlparmode
end
if trace_directions then
- report_process("directions after txtdir %s: txtdir=%s:%s, parmode=%s, txtmode=%s",dir,topstack,newdir or "unset",rlparmode,rlmode)
+ report_process("directions after txtdir %a: parmode %a, txtmode %a, # stack %a, new dir %a",dir,rlparmode,rlmode,topstack,newdir)
end
elseif subtype == localpar_code then
local dir = start.dir
@@ -2269,10 +2322,12 @@ local function featuresprocessor(head,font,attr)
end
rlmode = rlparmode
if trace_directions then
- report_process("directions after pardir %s: parmode=%s, txtmode=%s",dir,rlparmode,rlmode)
+ report_process("directions after pardir %a: parmode %a, txtmode %a",dir,rlparmode,rlmode)
end
end
start = start.next
+ elseif id == math_code then
+ start = end_of_math(start).next
else
start = start.next
end
@@ -2285,8 +2340,12 @@ local function featuresprocessor(head,font,attr)
if trace_steps then -- ?
registerstep(head)
end
- end
- end
+
+-- end
+-- else
+-- -- report_process("warning, no dataset %a",s)
+-- end
+
end
return head, done
end
@@ -2450,10 +2509,10 @@ local function prepare_contextchains(tfmdata)
local format = lookupdata.format
local validformat = valid[format]
if not validformat then
- report_prepare("unsupported format %s",format)
+ report_prepare("unsupported format %a",format)
elseif not validformat[lookuptype] then
-- todo: dejavu-serif has one (but i need to see what use it has)
- report_prepare("unsupported %s %s for %s",format,lookuptype,lookupname)
+ report_prepare("unsupported format %a, lookuptype %a, lookupname %a",format,lookuptype,lookupname)
else
local contexts = lookuphash[lookupname]
if not contexts then
@@ -2469,7 +2528,7 @@ local function prepare_contextchains(tfmdata)
local replacements = rule.replacements
local sequence = { }
local nofsequences = 0
- -- Wventually we can store start, stop and sequence in the cached file
+ -- Eventually we can store start, stop and sequence in the cached file
-- but then less sharing takes place so best not do that without a lot
-- of profiling so let's forget about it.
if before then
@@ -2509,7 +2568,7 @@ local function prepare_contextchains(tfmdata)
-- no rules
end
else
- report_prepare("missing lookuptype for %s",lookupname)
+ report_prepare("missing lookuptype for lookupname %a",lookupname)
end
end
end
@@ -2530,7 +2589,7 @@ local function featuresinitializer(tfmdata,value)
prepare_lookups(tfmdata)
properties.initialized = true
if trace_preparing then
- report_prepare("preparation time is %0.3f seconds for %s",os.clock()-starttime,tfmdata.properties.fullname or "?")
+ report_prepare("preparation time is %0.3f seconds for %a",os.clock()-starttime,tfmdata.properties.fullname)
end
end
end
@@ -2548,3 +2607,7 @@ registerotffeature {
node = featuresprocessor,
}
}
+
+-- This can be used for extra handlers, but should be used with care!
+
+otf.handlers = handlers
diff --git a/Master/texmf-dist/tex/context/base/font-otp.lua b/Master/texmf-dist/tex/context/base/font-otp.lua
index f019ade7f48..8a37c5cdf5c 100644
--- a/Master/texmf-dist/tex/context/base/font-otp.lua
+++ b/Master/texmf-dist/tex/context/base/font-otp.lua
@@ -7,86 +7,236 @@ if not modules then modules = { } end modules ['font-otp'] = {
}
-- todo: pack math (but not that much to share)
+-- pitfall 5.2: hashed tables can suddenly become indexed with nil slots
local next, type = next, type
local sort, concat = table.sort, table.concat
+local sortedhash = table.sortedhash
+local trace_packing = false trackers.register("otf.packing", function(v) trace_packing = v end)
local trace_loading = false trackers.register("otf.loading", function(v) trace_loading = v end)
+
local report_otf = logs.reporter("fonts","otf loading")
-- also used in other scripts so we need to check some tables:
fonts = fonts or { }
-fonts.handlers = fonts.handlers or { }
-local handlers = fonts.handlers
-handlers.otf = handlers.otf or { }
-local otf = handlers.otf
-otf.enhancers = otf.enhancers or { }
-local enhancers = otf.enhancers
-otf.glists = otf.glists or { "gsub", "gpos" }
-local glists = otf.glists
+
+local handlers = fonts.handlers or { }
+fonts.handlers = handlers
+
+local otf = handlers.otf or { }
+handlers.otf = otf
+
+local enhancers = otf.enhancers or { }
+otf.enhancers = enhancers
+
+local glists = otf.glists or { "gsub", "gpos" }
+otf.glists = glists
local criterium = 1
local threshold = 0
-local function tabstr(t)
- local s, n = { }, 0
+local function tabstr_normal(t)
+ local s = { }
+ local n = 0
for k, v in next, t do
n = n + 1
if type(v) == "table" then
- s[n] = k .. "={" .. tabstr(v) .. "}"
+ s[n] = k .. ">" .. tabstr_normal(v)
elseif v == true then
- s[n] = k .. "=true"
+ s[n] = k .. "+" -- "=true"
elseif v then
s[n] = k .. "=" .. v
else
- s[n] = k .. "=false"
+ s[n] = k .. "-" -- "=false"
end
end
- sort(s)
- return concat(s,",")
+ if n == 0 then
+ return ""
+ elseif n == 1 then
+ return s[1]
+ else
+ sort(s) -- costly but needed (occasional wrong hit otherwise)
+ return concat(s,",")
+ end
+end
+
+local function tabstr_flat(t)
+ local s = { }
+ local n = 0
+ for k, v in next, t do
+ n = n + 1
+ s[n] = k .. "=" .. v
+ end
+ if n == 0 then
+ return ""
+ elseif n == 1 then
+ return s[1]
+ else
+ sort(s) -- costly but needed (occasional wrong hit otherwise)
+ return concat(s,",")
+ end
end
+local function tabstr_mixed(t) -- indexed
+ local s = { }
+ local n = #t
+ if n == 0 then
+ return ""
+ elseif n == 1 then
+ local k = t[1]
+ if k == true then
+ return "++" -- we need to distinguish from "true"
+ elseif k == false then
+ return "--" -- we need to distinguish from "false"
+ else
+ return tostring(k) -- number or string
+ end
+ else
+ for i=1,n do
+ local k = t[i]
+ if k == true then
+ s[i] = "++" -- we need to distinguish from "true"
+ elseif k == false then
+ s[i] = "--" -- we need to distinguish from "false"
+ else
+ s[i] = k -- number or string
+ end
+ end
+ return concat(s,",")
+ end
+end
+
+local function tabstr_boolean(t)
+ local s = { }
+ local n = 0
+ for k, v in next, t do
+ n = n + 1
+ if v then
+ s[n] = k .. "+"
+ else
+ s[n] = k .. "-"
+ end
+ end
+ if n == 0 then
+ return ""
+ elseif n == 1 then
+ return s[1]
+ else
+ sort(s) -- costly but needed (occasional wrong hit otherwise)
+ return concat(s,",")
+ end
+end
+
+-- tabstr_boolean_x = tabstr_boolean
+
+-- tabstr_boolean = function(t)
+-- local a = tabstr_normal(t)
+-- local b = tabstr_boolean_x(t)
+-- print(a)
+-- print(b)
+-- return b
+-- end
+
local function packdata(data)
if data then
+ -- stripdata(data)
local h, t, c = { }, { }, { }
local hh, tt, cc = { }, { }, { }
local nt, ntt = 0, 0
- local function pack_1(v,indexed)
- -- v == table
- local tag = indexed and concat(v," ") or tabstr(v)
+ local function pack_normal(v)
+ local tag = tabstr_normal(v)
+ local ht = h[tag]
+ if ht then
+ c[ht] = c[ht] + 1
+ return ht
+ else
+ nt = nt + 1
+ t[nt] = v
+ h[tag] = nt
+ c[nt] = 1
+ return nt
+ end
+ end
+ local function pack_flat(v)
+ local tag = tabstr_flat(v)
local ht = h[tag]
- if not ht then
+ if ht then
+ c[ht] = c[ht] + 1
+ return ht
+ else
nt = nt + 1
- ht = nt
- t[ht] = v
- h[tag] = ht
- c[ht] = 1
+ t[nt] = v
+ h[tag] = nt
+ c[nt] = 1
+ return nt
+ end
+ end
+ local function pack_boolean(v)
+ local tag = tabstr_boolean(v)
+ local ht = h[tag]
+ if ht then
+ c[ht] = c[ht] + 1
+ return ht
else
+ nt = nt + 1
+ t[nt] = v
+ h[tag] = nt
+ c[nt] = 1
+ return nt
+ end
+ end
+ local function pack_indexed(v)
+ local tag = concat(v," ")
+ local ht = h[tag]
+ if ht then
c[ht] = c[ht] + 1
+ return ht
+ else
+ nt = nt + 1
+ t[nt] = v
+ h[tag] = nt
+ c[nt] = 1
+ return nt
+ end
+ end
+ local function pack_mixed(v)
+ local tag = tabstr_mixed(v)
+-- print(">>>",tag)
+ local ht = h[tag]
+ if ht then
+ c[ht] = c[ht] + 1
+ return ht
+ else
+ nt = nt + 1
+ t[nt] = v
+ h[tag] = nt
+ c[nt] = 1
+ return nt
end
- return ht
end
- local function pack_2(v,indexed)
+ local function pack_final(v)
-- v == number
if c[v] <= criterium then
return t[v]
else
-- compact hash
local hv = hh[v]
- if not hv then
+ if hv then
+ return hv
+ else
ntt = ntt + 1
- hv = ntt
- tt[hv] = t[v]
- hh[v] = hv
- cc[hv] = c[v]
+ tt[ntt] = t[v]
+ hh[v] = ntt
+ cc[ntt] = c[v]
+ return ntt
end
- return hv
end
end
local function success(stage,pass)
if nt == 0 then
- if trace_loading then
+ if trace_loading or trace_packing then
report_otf("pack quality: nothing to pack")
end
return false
@@ -114,35 +264,45 @@ local function packdata(data)
end
data.tables = tt
end
- if trace_loading then
+ if trace_loading or trace_packing then
report_otf("pack quality: stage %s, pass %s, %s packed, 1-10:%s, 11-20:%s, rest:%s (criterium: %s)", stage, pass, one+two+rest, one, two, rest, criterium)
end
return true
else
- if trace_loading then
+ if trace_loading or trace_packing then
report_otf("pack quality: stage %s, pass %s, %s packed, aborting pack (threshold: %s)", stage, pass, nt, threshold)
end
return false
end
end
+ local function packers(pass)
+ if pass == 1 then
+ return pack_normal, pack_indexed, pack_flat, pack_boolean, pack_mixed
+ else
+ return pack_final, pack_final, pack_final, pack_final, pack_final
+ end
+ end
local resources = data.resources
local lookuptypes = resources.lookuptypes
for pass=1,2 do
- local pack = (pass == 1 and pack_1) or pack_2
+ if trace_packing then
+ report_otf("start packing: stage 1, pass %s",pass)
+ end
+ local pack_normal, pack_indexed, pack_flat, pack_boolean, pack_mixed = packers(pass)
for unicode, description in next, data.descriptions do
local boundingbox = description.boundingbox
if boundingbox then
- description.boundingbox = pack(boundingbox,true)
+ description.boundingbox = pack_indexed(boundingbox)
end
local slookups = description.slookups
if slookups then
for tag, slookup in next, slookups do
local what = lookuptypes[tag]
if what == "pair" then
- local t = slookup[2] if t then slookup[2] = pack(t,true) end
- local t = slookup[3] if t then slookup[3] = pack(t,true) end
+ local t = slookup[2] if t then slookup[2] = pack_indexed(t) end
+ local t = slookup[3] if t then slookup[3] = pack_indexed(t) end
elseif what ~= "substitution" then
- slookups[tag] = pack(slookup)
+ slookups[tag] = pack_indexed(slookup) -- true is new
end
end
end
@@ -153,12 +313,12 @@ local function packdata(data)
if what == "pair" then
for i=1,#mlookup do
local lookup = mlookup[i]
- local t = lookup[2] if t then lookup[2] = pack(t,true) end
- local t = lookup[3] if t then lookup[3] = pack(t,true) end
+ local t = lookup[2] if t then lookup[2] = pack_indexed(t) end
+ local t = lookup[3] if t then lookup[3] = pack_indexed(t) end
end
elseif what ~= "substitution" then
for i=1,#mlookup do
- mlookup[i] = pack(mlookup[i]) -- true
+ mlookup[i] = pack_indexed(mlookup[i]) -- true is new
end
end
end
@@ -166,7 +326,7 @@ local function packdata(data)
local kerns = description.kerns
if kerns then
for tag, kern in next, kerns do
- kerns[tag] = pack(kern)
+ kerns[tag] = pack_flat(kern)
end
end
local math = description.math
@@ -174,7 +334,7 @@ local function packdata(data)
local kerns = math.kerns
if kerns then
for tag, kern in next, kerns do
- kerns[tag] = pack(kern)
+ kerns[tag] = pack_normal(kern)
end
end
end
@@ -184,12 +344,14 @@ local function packdata(data)
if what == "baselig" then
for _, a in next, anchor do
for k=1,#a do
- a[k] = pack(a[k])
+-- a[k] = pack_normal(a[k])
+ a[k] = pack_indexed(a[k])
end
end
else
for k, v in next, anchor do
- anchor[k] = pack(v)
+-- anchor[k] = pack_normal(v)
+ anchor[k] = pack_indexed(v)
end
end
end
@@ -200,38 +362,28 @@ local function packdata(data)
for _, lookup in next, lookups do
local rules = lookup.rules
if rules then
- for i=1,#rules do -- was next loop
+ for i=1,#rules do
local rule = rules[i]
---~ local r = rule.before if r then for i=1,#r do r[i] = pack(r[i],true) end end
---~ local r = rule.after if r then for i=1,#r do r[i] = pack(r[i],true) end end
---~ local r = rule.current if r then for i=1,#r do r[i] = pack(r[i],true) end end
---~ local r = rule.replacements if r then rule.replacements = pack(r, true) end
---~ local r = rule.fore if r then rule.fore = pack(r, true) end
---~ local r = rule.back if r then rule.back = pack(r, true) end
---~ local r = rule.names if r then rule.names = pack(r, true) end
---~ local r = rule.lookups if r then rule.lookups = pack(r) end
- local r = rule.before if r then for i=1,#r do r[i] = pack(r[i]) end end
- local r = rule.after if r then for i=1,#r do r[i] = pack(r[i]) end end
- local r = rule.current if r then for i=1,#r do r[i] = pack(r[i]) end end
- local r = rule.replacements if r then rule.replacements = pack(r) end
- -- local r = rule.fore if r then rule.fore = pack(r) end
- -- local r = rule.back if r then rule.back = pack(r) end
- -- local r = rule.names if r then rule.names = pack(r) end
- local r = rule.lookups if r then rule.lookups = pack(r) end
+ local r = rule.before if r then for i=1,#r do r[i] = pack_boolean(r[i]) end end
+ local r = rule.after if r then for i=1,#r do r[i] = pack_boolean(r[i]) end end
+ local r = rule.current if r then for i=1,#r do r[i] = pack_boolean(r[i]) end end
+ local r = rule.replacements if r then rule.replacements = pack_flat (r) end -- can have holes
+ -- local r = rule.lookups if r then rule.lookups = pack_mixed (r) end -- can have false
+ local r = rule.lookups if r then rule.lookups = pack_indexed(r) end -- can have ""
end
end
end
end
- local anchor_to_lookup = resources.anchor_to_lookup
+ local anchor_to_lookup = resources.anchor_to_lookup
if anchor_to_lookup then
for anchor, lookup in next, anchor_to_lookup do
- anchor_to_lookup[anchor] = pack(lookup)
+ anchor_to_lookup[anchor] = pack_normal(lookup)
end
end
local lookup_to_anchor = resources.lookup_to_anchor
if lookup_to_anchor then
for lookup, anchor in next, lookup_to_anchor do
- lookup_to_anchor[lookup] = pack(anchor)
+ lookup_to_anchor[lookup] = pack_normal(anchor)
end
end
local sequences = resources.sequences
@@ -239,16 +391,16 @@ local function packdata(data)
for feature, sequence in next, sequences do
local flags = sequence.flags
if flags then
- sequence.flags = pack(flags)
+ sequence.flags = pack_normal(flags)
end
local subtables = sequence.subtables
if subtables then
- sequence.subtables = pack(subtables)
+ sequence.subtables = pack_normal(subtables)
end
local features = sequence.features
if features then
for script, feature in next, features do
- features[script] = pack(feature)
+ features[script] = pack_normal(feature)
end
end
end
@@ -258,11 +410,11 @@ local function packdata(data)
for name, lookup in next, lookups do
local flags = lookup.flags
if flags then
- lookup.flags = pack(flags)
+ lookup.flags = pack_normal(flags)
end
local subtables = lookup.subtables
if subtables then
- lookup.subtables = pack(subtables)
+ lookup.subtables = pack_normal(subtables)
end
end
end
@@ -272,7 +424,7 @@ local function packdata(data)
local list = features[what]
if list then
for feature, spec in next, list do
- list[feature] = pack(spec)
+ list[feature] = pack_normal(spec)
end
end
end
@@ -283,27 +435,30 @@ local function packdata(data)
end
if nt > 0 then
for pass=1,2 do
- local pack = (pass == 1 and pack_1) or pack_2
+ if trace_packing then
+ report_otf("start packing: stage 2, pass %s",pass)
+ end
+ local pack_normal, pack_indexed, pack_flat, pack_boolean, pack_mixed = packers(pass)
for unicode, description in next, data.descriptions do
local kerns = description.kerns
if kerns then
- description.kerns = pack(kerns)
+ description.kerns = pack_normal(kerns)
end
local math = description.math
if math then
local kerns = math.kerns
if kerns then
- math.kerns = pack(kerns)
+ math.kerns = pack_normal(kerns)
end
end
local anchors = description.anchors
if anchors then
- description.anchors = pack(anchors)
+ description.anchors = pack_normal(anchors)
end
local mlookups = description.mlookups
if mlookups then
for tag, mlookup in next, mlookups do
- mlookups[tag] = pack(mlookup)
+ mlookups[tag] = pack_normal(mlookup)
end
end
end
@@ -314,9 +469,9 @@ local function packdata(data)
if rules then
for i=1,#rules do -- was next loop
local rule = rules[i]
- local r = rule.before if r then rule.before = pack(r) end
- local r = rule.after if r then rule.after = pack(r) end
- local r = rule.current if r then rule.current = pack(r) end
+ local r = rule.before if r then rule.before = pack_normal(r) end
+ local r = rule.after if r then rule.after = pack_normal(r) end
+ local r = rule.current if r then rule.current = pack_normal(r) end
end
end
end
@@ -324,7 +479,7 @@ local function packdata(data)
local sequences = resources.sequences
if sequences then
for feature, sequence in next, sequences do
- sequence.features = pack(sequence.features)
+ sequence.features = pack_normal(sequence.features)
end
end
if not success(2,pass) then
@@ -333,15 +488,15 @@ local function packdata(data)
end
for pass=1,2 do
- local pack = (pass == 1 and pack_1) or pack_2
+ local pack_normal, pack_indexed, pack_flat, pack_boolean, pack_mixed = packers(pass)
for unicode, description in next, data.descriptions do
local slookups = description.slookups
if slookups then
- description.slookups = pack(slookups)
+ description.slookups = pack_normal(slookups)
end
local mlookups = description.mlookups
if mlookups then
- description.mlookups = pack(mlookups)
+ description.mlookups = pack_normal(mlookups)
end
end
end
diff --git a/Master/texmf-dist/tex/context/base/font-ott.lua b/Master/texmf-dist/tex/context/base/font-ott.lua
index 6c671d76ff4..8a5579c0212 100644
--- a/Master/texmf-dist/tex/context/base/font-ott.lua
+++ b/Master/texmf-dist/tex/context/base/font-ott.lua
@@ -1,9 +1,10 @@
-if not modules then modules = { } end modules ['font-otf'] = {
+if not modules then modules = { } end modules ['font-ott'] = {
version = 1.001,
comment = "companion to font-otf.lua (tables)",
author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
+ license = "see context related readme files",
+ -- dataonly = true,
}
local type, next, tonumber, tostring, rawget, rawset = type, next, tonumber, tostring, rawget, rawset
@@ -16,11 +17,14 @@ local allocate = utilities.storage.allocate
local fonts = fonts
local otf = fonts.handlers.otf
-local tables = { }
+local otffeatures = otf.features
+local registerotffeature = otffeatures.register
+
+local tables = otf.tables or { }
otf.tables = tables
-local otffeatures = fonts.constructors.newfeatures("otf")
-local registerotffeature = otffeatures.register
+local statistics = otf.statistics or { }
+otf.statistics = statistics
local scripts = allocate {
['arab'] = 'arabic',
@@ -577,26 +581,26 @@ local features = allocate {
['size'] = 'optical size',
['smcp'] = 'small capitals',
['smpl'] = 'simplified forms',
- ['ss01'] = 'stylistic set 1',
- ['ss02'] = 'stylistic set 2',
- ['ss03'] = 'stylistic set 3',
- ['ss04'] = 'stylistic set 4',
- ['ss05'] = 'stylistic set 5',
- ['ss06'] = 'stylistic set 6',
- ['ss07'] = 'stylistic set 7',
- ['ss08'] = 'stylistic set 8',
- ['ss09'] = 'stylistic set 9',
- ['ss10'] = 'stylistic set 10',
- ['ss11'] = 'stylistic set 11',
- ['ss12'] = 'stylistic set 12',
- ['ss13'] = 'stylistic set 13',
- ['ss14'] = 'stylistic set 14',
- ['ss15'] = 'stylistic set 15',
- ['ss16'] = 'stylistic set 16',
- ['ss17'] = 'stylistic set 17',
- ['ss18'] = 'stylistic set 18',
- ['ss19'] = 'stylistic set 19',
- ['ss20'] = 'stylistic set 20',
+ -- ['ss01'] = 'stylistic set 1',
+ -- ['ss02'] = 'stylistic set 2',
+ -- ['ss03'] = 'stylistic set 3',
+ -- ['ss04'] = 'stylistic set 4',
+ -- ['ss05'] = 'stylistic set 5',
+ -- ['ss06'] = 'stylistic set 6',
+ -- ['ss07'] = 'stylistic set 7',
+ -- ['ss08'] = 'stylistic set 8',
+ -- ['ss09'] = 'stylistic set 9',
+ -- ['ss10'] = 'stylistic set 10',
+ -- ['ss11'] = 'stylistic set 11',
+ -- ['ss12'] = 'stylistic set 12',
+ -- ['ss13'] = 'stylistic set 13',
+ -- ['ss14'] = 'stylistic set 14',
+ -- ['ss15'] = 'stylistic set 15',
+ -- ['ss16'] = 'stylistic set 16',
+ -- ['ss17'] = 'stylistic set 17',
+ -- ['ss18'] = 'stylistic set 18',
+ -- ['ss19'] = 'stylistic set 19',
+ -- ['ss20'] = 'stylistic set 20',
['ssty'] = 'script style', -- math
['subs'] = 'subscript',
['sups'] = 'superscript',
@@ -622,7 +626,11 @@ local features = allocate {
['trep'] = 'traditional tex replacements',
['tlig'] = 'traditional tex ligatures',
- ['ss'] = 'stylistic set %s',
+ ['ss..'] = 'stylistic set ..',
+ ['cv..'] = 'character variant ..',
+ ['js..'] = 'justification ..',
+
+ ["dv.."] = "devanagari ..",
}
local baselines = allocate {
@@ -635,16 +643,16 @@ local baselines = allocate {
['romn'] = 'roman baseline'
}
-local acceptscripts = true directives.register("otf.acceptscripts", function(v) acceptscripts = v end)
-local acceptlanguages = true directives.register("otf.acceptlanguages", function(v) acceptlanguages = v end)
-
-local report_checks = logs.reporter("fonts","checks")
-
tables.scripts = scripts
tables.languages = languages
tables.features = features
tables.baselines = baselines
+local acceptscripts = true directives.register("otf.acceptscripts", function(v) acceptscripts = v end)
+local acceptlanguages = true directives.register("otf.acceptlanguages", function(v) acceptlanguages = v end)
+
+local report_checks = logs.reporter("fonts","checks")
+
-- hm, we overload the metatables
if otffeatures.features then
@@ -684,6 +692,10 @@ setmetatableindex(verboselanguages, resolve)
setmetatableindex(verbosefeatures, resolve)
setmetatableindex(verbosebaselines, resolve)
+-- We could optimize the next lookups by using an extra metatable and storing
+-- already found values but in practice there are not that many lookups so
+-- it's never a bottleneck.
+
setmetatableindex(scripts, function(t,k)
if k then
k = lower(k)
@@ -699,7 +711,7 @@ setmetatableindex(scripts, function(t,k)
if v then
return v
elseif acceptscripts then
- report_checks("registering extra script: %s",k)
+ report_checks("registering extra script %a",k)
rawset(t,k,k)
return k
end
@@ -722,7 +734,7 @@ setmetatableindex(languages, function(t,k)
if v then
return v
elseif acceptlanguages then
- report_checks("registering extra languages: %s",k)
+ report_checks("registering extra language %a",k)
rawset(t,k,k)
return k
end
@@ -750,7 +762,12 @@ local function resolve(t,k)
if tag and dd then
local v = rawget(t,tag)
if v then
- return format(v,tonumber(dd))
+ return v -- return format(v,tonumber(dd)) -- old way
+ else
+ local v = rawget(t,tag.."..") -- nicer in overview
+ if v then
+ return (gsub(v,"%.%.",tonumber(dd))) -- new way
+ end
end
end
end
@@ -760,10 +777,10 @@ end
setmetatableindex(features, resolve)
local function assign(t,k,v)
- if k then
+ if k and v then
v = lower(v)
- rawset(t,k,v)
- rawset(features,gsub(v,"[^a-z0-9]",""),k)
+ rawset(t,k,v) -- rawset ?
+ -- rawset(features,gsub(v,"[^a-z0-9]",""),k) -- why ? old code
end
end
@@ -771,35 +788,93 @@ setmetatablenewindex(features, assign)
local checkers = {
rand = function(v)
- return v and "random"
+ return v == true and "random" or v
end
}
-function otf.features.normalize(features) -- no longer 'lang'
+-- Keep this:
+--
+-- function otf.features.normalize(features)
+-- if features then
+-- local h = { }
+-- for k, v in next, features do
+-- k = lower(k)
+-- if k == "language" then
+-- v = gsub(lower(v),"[^a-z0-9]","")
+-- h.language = rawget(verboselanguages,v) or (languages[v] and v) or "dflt" -- auto adds
+-- elseif k == "script" then
+-- v = gsub(lower(v),"[^a-z0-9]","")
+-- h.script = rawget(verbosescripts,v) or (scripts[v] and v) or "dflt" -- auto adds
+-- else
+-- if type(v) == "string" then
+-- local b = is_boolean(v)
+-- if type(b) == "nil" then
+-- v = tonumber(v) or lower(v)
+-- else
+-- v = b
+-- end
+-- end
+-- if not rawget(features,k) then
+-- k = rawget(verbosefeatures,k) or k
+-- end
+-- local c = checkers[k]
+-- h[k] = c and c(v) or v
+-- end
+-- end
+-- return h
+-- end
+-- end
+
+-- inspect(fonts.handlers.otf.statistics.usedfeatures)
+
+if not storage then
+ return
+end
+
+local usedfeatures = statistics.usedfeatures or { }
+statistics.usedfeatures = usedfeatures
+
+table.setmetatableindex(usedfeatures, function(t,k) if k then local v = { } t[k] = v return v end end) -- table.autotable
+
+storage.register("fonts/otf/usedfeatures", usedfeatures, "fonts.handlers.otf.statistics.usedfeatures" )
+
+function otf.features.normalize(features)
if features then
local h = { }
- for k, v in next, features do
- k = lower(k)
+ for key, value in next, features do
+ local k = lower(key)
if k == "language" then
- v = gsub(lower(v),"[^a-z0-9]","")
+ local v = gsub(lower(value),"[^a-z0-9]","")
h.language = rawget(verboselanguages,v) or (languages[v] and v) or "dflt" -- auto adds
elseif k == "script" then
- v = gsub(lower(v),"[^a-z0-9]","")
+ local v = gsub(lower(value),"[^a-z0-9]","")
h.script = rawget(verbosescripts,v) or (scripts[v] and v) or "dflt" -- auto adds
else
- if type(v) == "string" then
- local b = is_boolean(v)
- if type(b) == "nil" then
- v = tonumber(v) or lower(v)
+ local uk = usedfeatures[key]
+ local uv = uk[value]
+ if uv then
+ -- report_checks("feature value %a first seen at %a",value,key)
+ else
+ if type(value) == "string" then
+ local b = is_boolean(value)
+ if type(b) == "nil" then
+ uv = tonumber(value) or lower(value)
+ else
+ uv = b
+ end
else
- v = b
+ uv = v
end
+ if not rawget(features,k) then
+ k = rawget(verbosefeatures,k) or k
+ end
+ local c = checkers[k]
+ if c then
+ uv = c(uv) or vc
+ end
+ uk[value] = uv
end
- if not rawget(features,k) then
- k = rawget(verbosefeatures,k) or k
- end
- local c = checkers[k]
- h[k] = c and c(v) or v
+ h[k] = uv
end
end
return h
diff --git a/Master/texmf-dist/tex/context/base/font-otx.lua b/Master/texmf-dist/tex/context/base/font-otx.lua
new file mode 100644
index 00000000000..d276b8a1524
--- /dev/null
+++ b/Master/texmf-dist/tex/context/base/font-otx.lua
@@ -0,0 +1,393 @@
+if not modules then modules = { } end modules ['font-otx'] = {
+ version = 1.001,
+ comment = "companion to font-otf.lua (analysing)",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+-- context only
+
+local type = type
+
+if not trackers then trackers = { register = function() end } end
+
+----- trace_analyzing = false trackers.register("otf.analyzing", function(v) trace_analyzing = v end)
+
+local fonts, nodes, node = fonts, nodes, node
+
+local allocate = utilities.storage.allocate
+
+local otf = fonts.handlers.otf
+
+local analyzers = fonts.analyzers
+local initializers = allocate()
+local methods = allocate()
+
+analyzers.initializers = initializers
+analyzers.methods = methods
+---------.useunicodemarks = false
+
+local a_state = attributes.private('state')
+
+local nodecodes = nodes.nodecodes
+local glyph_code = nodecodes.glyph
+local math_code = nodecodes.math
+
+local traverse_id = node.traverse_id
+local traverse_node_list = node.traverse
+local end_of_math = node.end_of_math
+
+local fontdata = fonts.hashes.identifiers
+local categories = characters and characters.categories or { } -- sorry, only in context
+local chardata = characters and characters.data
+
+local otffeatures = fonts.constructors.newfeatures("otf")
+local registerotffeature = otffeatures.register
+
+--[[ldx--
+<p>Analyzers run per script and/or language and are needed in order to
+process features right.</p>
+--ldx]]--
+
+-- never use these numbers directly
+
+local s_init = 1 local s_rphf = 7
+local s_medi = 2 local s_half = 8
+local s_fina = 3 local s_pref = 9
+local s_isol = 4 local s_blwf = 10
+local s_mark = 5 local s_pstf = 11
+local s_rest = 6
+
+local states = {
+ init = s_init,
+ medi = s_medi,
+ fina = s_fina,
+ isol = s_isol,
+ mark = s_mark,
+ rest = s_rest,
+ rphf = s_rphf,
+ half = s_half,
+ pref = s_pref,
+ blwf = s_blwf,
+ pstf = s_pstf,
+}
+
+local features = {
+ init = s_init,
+ medi = s_medi,
+ fina = s_fina,
+ isol = s_isol,
+ -- mark = s_mark,
+}
+
+analyzers.states = states
+analyzers.features = features
+
+-- todo: analyzers per script/lang, cross font, so we need an font id hash -> script
+-- e.g. latin -> hyphenate, arab -> 1/2/3 analyze -- its own namespace
+
+function analyzers.setstate(head,font)
+ local useunicodemarks = analyzers.useunicodemarks
+ local tfmdata = fontdata[font]
+ local descriptions = tfmdata.descriptions
+ local first, last, current, n, done = nil, nil, head, 0, false -- maybe make n boolean
+ while current do
+ local id = current.id
+ if id == glyph_code and current.font == font then
+ done = true
+ local char = current.char
+ local d = descriptions[char]
+ if d then
+ if d.class == "mark" or (useunicodemarks and categories[char] == "mn") then
+ done = true
+ current[a_state] = s_mark
+ elseif n == 0 then
+ first, last, n = current, current, 1
+ current[a_state] = s_init
+ else
+ last, n = current, n+1
+ current[a_state] = s_medi
+ end
+ else -- finish
+ if first and first == last then
+ last[a_state] = s_isol
+ elseif last then
+ last[a_state] = s_fina
+ end
+ first, last, n = nil, nil, 0
+ end
+ elseif id == disc_code then
+ -- always in the middle
+ current[a_state] = s_midi
+ last = current
+ else -- finish
+ if first and first == last then
+ last[a_state] = s_isol
+ elseif last then
+ last[a_state] = s_fina
+ end
+ first, last, n = nil, nil, 0
+ if id == math_code then
+ current = end_of_math(current)
+ end
+ end
+ current = current.next
+ end
+ if first and first == last then
+ last[a_state] = s_isol
+ elseif last then
+ last[a_state] = s_fina
+ end
+ return head, done
+end
+
+-- in the future we will use language/script attributes instead of the
+-- font related value, but then we also need dynamic features which is
+-- somewhat slower; and .. we need a chain of them
+
+local function analyzeinitializer(tfmdata,value) -- attr
+ local script, language = otf.scriptandlanguage(tfmdata) -- attr
+ local action = initializers[script]
+ if not action then
+ -- skip
+ elseif type(action) == "function" then
+ return action(tfmdata,value)
+ else
+ local action = action[language]
+ if action then
+ return action(tfmdata,value)
+ end
+ end
+end
+
+local function analyzeprocessor(head,font,attr)
+ local tfmdata = fontdata[font]
+ local script, language = otf.scriptandlanguage(tfmdata,attr)
+ local action = methods[script]
+ if not action then
+ -- skip
+ elseif type(action) == "function" then
+ return action(head,font,attr)
+ else
+ action = action[language]
+ if action then
+ return action(head,font,attr)
+ end
+ end
+ return head, false
+end
+
+registerotffeature {
+ name = "analyze",
+ description = "analysis of (for instance) character classes",
+ default = true,
+ initializers = {
+ node = analyzeinitializer,
+ },
+ processors = {
+ position = 1,
+ node = analyzeprocessor,
+ }
+}
+
+-- latin
+
+methods.latn = analyzers.setstate
+
+local arab_warned = { }
+
+local function warning(current,what)
+ local char = current.char
+ if not arab_warned[char] then
+ log.report("analyze","arab: character %C has no %a class",char,what)
+ arab_warned[char] = true
+ end
+end
+
+local mappers = {
+ l = s_init, -- left
+ d = s_medi, -- double
+ c = s_medi, -- joiner
+ r = s_fina, -- right
+ u = s_isol, -- nonjoiner
+}
+
+local classifiers = { } -- we can also use this trick for devanagari
+
+local first_arabic, last_arabic = characters.blockrange("arabic")
+local first_syriac, last_syriac = characters.blockrange("syriac")
+local first_mandiac, last_mandiac = characters.blockrange("mandiac")
+local first_nko, last_nko = characters.blockrange("nko")
+
+table.setmetatableindex(classifiers,function(t,k)
+ local c = chardata[k]
+ local v = false
+ if c then
+ local arabic = c.arabic
+ if arabic then
+ v = mappers[arabic]
+ if not v then
+ log.report("analyze","error in mapping arabic %C",k)
+ -- error
+ v = false
+ end
+ elseif k >= first_arabic and k <= last_arabic or k >= first_syriac and k <= last_syriac or
+ k >= first_mandiac and k <= last_mandiac or k >= first_nko and k <= last_nko then
+ if categories[k] == "mn" then
+ v = s_mark
+ else
+ v = s_rest
+ end
+ else
+ end
+ end
+ t[k] = v
+ return v
+end)
+
+function methods.arab(head,font,attr)
+ local first, last = nil, nil
+ local c_first, c_last = nil, nil
+ local current, done = head, false
+ while current do
+ local id = current.id
+ if id == glyph_code and current.font == font and current.subtype<256 and not current[a_state] then
+ done = true
+ local char = current.char
+ local classifier = classifiers[char]
+ if not classifier then
+ if last then
+ if c_last == s_medi or c_last == s_fina then
+ last[a_state] = s_fina
+ else
+ warning(last,"fina")
+ last[a_state] = s_error
+ end
+ first, last = nil, nil
+ elseif first then
+ if c_first == s_medi or c_first == s_fina then
+ first[a_state] = s_isol
+ else
+ warning(first,"isol")
+ first[a_state] = s_error
+ end
+ first = nil
+ end
+ elseif classifier == s_mark then
+ current[a_state] = s_mark
+ elseif classifier == s_isol then
+ if last then
+ if c_last == s_medi or c_last == s_fina then
+ last[a_state] = s_fina
+ else
+ warning(last,"fina")
+ last[a_state] = s_error
+ end
+ first, last = nil, nil
+ elseif first then
+ if c_first == s_medi or c_first == s_fina then
+ first[a_state] = s_isol
+ else
+ warning(first,"isol")
+ first[a_state] = s_error
+ end
+ first = nil
+ end
+ current[a_state] = s_isol
+ elseif classifier == s_medi then
+ if first then
+ last = current
+ c_last = classifier
+ current[a_state] = s_medi
+ else
+ current[a_state] = s_init
+ first = current
+ c_first = classifier
+ end
+ elseif classifier == s_fina then
+ if last then
+ if last[a_state] ~= s_init then
+ last[a_state] = s_medi
+ end
+ current[a_state] = s_fina
+ first, last = nil, nil
+ elseif first then
+ -- if first[a_state] ~= s_init then
+ -- -- needs checking
+ -- first[a_state] = s_medi
+ -- end
+ current[a_state] = s_fina
+ first = nil
+ else
+ current[a_state] = s_isol
+ end
+ else -- classifier == s_rest
+ current[a_state] = s_rest
+ if last then
+ if c_last == s_medi or c_last == s_fina then
+ last[a_state] = s_fina
+ else
+ warning(last,"fina")
+ last[a_state] = s_error
+ end
+ first, last = nil, nil
+ elseif first then
+ if c_first == s_medi or c_first == s_fina then
+ first[a_state] = s_isol
+ else
+ warning(first,"isol")
+ first[a_state] = s_error
+ end
+ first = nil
+ end
+ end
+ else
+ if last then
+ if c_last == s_medi or c_last == s_fina then
+ last[a_state] = s_fina
+ else
+ warning(last,"fina")
+ last[a_state] = s_error
+ end
+ first, last = nil, nil
+ elseif first then
+ if c_first == s_medi or c_first == s_fina then
+ first[a_state] = s_isol
+ else
+ warning(first,"isol")
+ first[a_state] = s_error
+ end
+ first = nil
+ end
+ if id == math_code then -- a bit duplicate as we test for glyphs twice
+ current = end_of_math(current)
+ end
+ end
+ current = current.next
+ end
+ if last then
+ if c_last == s_medi or c_last == s_fina then
+ last[a_state] = s_fina
+ else
+ warning(last,"fina")
+ last[a_state] = s_error
+ end
+ elseif first then
+ if c_first == s_medi or c_first == s_fina then
+ first[a_state] = s_isol
+ else
+ warning(first,"isol")
+ first[a_state] = s_error
+ end
+ end
+ return head, done
+end
+
+methods.syrc = methods.arab
+methods.mand = methods.arab
+methods.nko = methods.arab
+
+-- directives.register("otf.analyze.useunicodemarks",function(v)
+-- analyzers.useunicodemarks = v
+-- end)
diff --git a/Master/texmf-dist/tex/context/base/font-pat.lua b/Master/texmf-dist/tex/context/base/font-pat.lua
index b91502c74e5..3ad37641ce3 100644
--- a/Master/texmf-dist/tex/context/base/font-pat.lua
+++ b/Master/texmf-dist/tex/context/base/font-pat.lua
@@ -6,11 +6,13 @@ if not modules then modules = { } end modules ['font-pat'] = {
license = "see context related readme files"
}
+-- This functiononality is nowadays provided via lfg files so what you see here
+-- is more an example.
+
local match, lower = string.match, string.lower
--- older versions of latin modern didn't have the designsize set
--- so for them we get it from the name
--- reporter moved to elsewhere
+-- Older versions of latin modern didn't have the designsize set so for them we
+-- get it from the name reporter moved to elsewhere.
local fonts = fonts
local otf = fonts.handlers.otf
@@ -22,7 +24,7 @@ local function patch(data,filename)
if data.design_size == 0 then
local ds = match(file.basename(lower(filename)),"(%d+)")
if ds then
- report("design size (%s)",ds)
+ report("font %a has design size %a",filename,ds)
data.design_size = tonumber(ds) * 10
end
end
@@ -32,9 +34,9 @@ register("after","migrate metadata","^lmroman", patch)
register("after","migrate metadata","^lmsans", patch)
register("after","migrate metadata","^lmtypewriter",patch)
--- for some reason (either it's a bug in the font, or it's
--- a problem in the library) the palatino arabic fonts don't
--- have the mkmk features properly set up
+-- For some reason (either it's a bug in the font, or it's a problem in the
+-- library) the palatino arabic fonts don't have the mkmk features properly
+-- set up.
local function patch(data,filename)
local gpos = data.gpos
@@ -42,14 +44,14 @@ local function patch(data,filename)
for k=1,#gpos do
local v = gpos[k]
if not v.features and v.type == "gpos_mark2mark" then
- report("mkmk feature (name: %s)", v.name or "?")
+ report("mkmk feature, name %a", v.name)
v.features = {
{
scripts = {
arab = {
- ["ARA "] = true,
- ["FAR "] = true,
- ["URD "] = true,
+ ["ara "] = true,
+ ["far "] = true,
+ ["urd "] = true,
["dflt"] = true,
}
},
diff --git a/Master/texmf-dist/tex/context/base/font-pre.mkiv b/Master/texmf-dist/tex/context/base/font-pre.mkiv
index 141bfd2ff49..410f2dcef51 100644
--- a/Master/texmf-dist/tex/context/base/font-pre.mkiv
+++ b/Master/texmf-dist/tex/context/base/font-pre.mkiv
@@ -48,6 +48,10 @@
tlig=yes,
trep=yes] % texligatures=yes,texquotes=yes
+% \definefontfeature
+% [newstyle]
+% [onum=no]
+
\definefontfeature % == default unless redefined
[ligatures]
[always]
@@ -91,6 +95,42 @@
[simplearabic]
[script=hebr]
+% \definefont [DevaOne] [file:chandas.ttf*devanagari-one at 12pt]
+% \definefont [DevaTwo] [file:kokila.ttf*devanagari-two at 18pt]
+
+\definefontfeature
+ [devanagari-one]
+ [mode=node,
+ language=dflt,
+ script=deva,
+ nukt=yes,
+ akhn=yes,
+ rphf=yes,
+ blwf=yes,
+ half=yes,
+ vatu=yes,
+ pres=yes,
+ abvs=yes,
+ blws=yes,
+ psts=yes,
+ haln=yes,
+ abvm=yes,
+ blwm=yes,
+ dist=yes]
+
+\definefontfeature
+ [devanagari-two]
+ [devanagari-one]
+ [script=dev2,
+ locl=yes,
+ pref=yes,
+ rkrf=yes,
+ pstf=yes,
+ cjct=yes,
+ init=yes,
+ calt=yes,
+ kern=yes]
+
% symbols:
\definefontfeature
@@ -169,7 +209,35 @@
\definecolor[font:fina][b=.75]
\definecolor[font:isol][r=.75,g=.75] % [y=.75]
\definecolor[font:mark][r=.75,b=.75] % [m=.75]
-\definecolor[font:rest][g=.75,b=.75] % [c=.75]
+\definecolor[font:rest][b=.75,g=.75] % [c=.75]
+
+\definecolor[trace:r][r=.75,t=.5,a=1]
+\definecolor[trace:g][g=.75,t=.5,a=1]
+\definecolor[trace:b][b=.75,t=.5,a=1]
+\definecolor[trace:c][c=.75,t=.5,a=1]
+\definecolor[trace:m][m=.75,t=.5,a=1]
+\definecolor[trace:y][y=.75,t=.5,a=1]
+\definecolor[trace:s][s=.75,t=.5,a=1]
+\definecolor[trace:o][r=1,g=.6,b=.1,t=.5,a=1]
+
+\definecolor[trace:dr][r=.75,t=.75,a=1]
+\definecolor[trace:dg][g=.75,t=.75,a=1]
+\definecolor[trace:db][b=.75,t=.75,a=1]
+\definecolor[trace:dc][c=.75,t=.75,a=1]
+\definecolor[trace:dm][m=.75,t=.75,a=1]
+\definecolor[trace:dy][y=.75,t=.75,a=1]
+\definecolor[trace:ds][s=.75,t=.75,a=1]
+\definecolor[trace:do][r=1,g=.6,b=.1,t=.75,a=1]
+
+\definecolor[font:1] [r=.75]
+\definecolor[font:2] [g=.75]
+\definecolor[font:3] [b=.75]
+\definecolor[font:4] [r=.75,g=.75]
+\definecolor[font:5] [r=.75,b=.75]
+\definecolor[font:6] [b=.75,g=.75]
+\definecolor[font:7] [r=.75]
+\definecolor[font:8] [g=.75]
+\definecolor[font:9] [b=.75]
%D Now we're up to some definitions.
@@ -415,8 +483,6 @@
%D We treat {\sc Small Caps} and \cap {Pseudo Caps} a bit
%D different. We also provide an \WORD {uppercase} style.
-\definealternativestyle [\v!smallcaps] [\sc] [\sc]
-
\definealternativestyle [\v!WORD] [{\setcharactercasing[\v!WORD ]}] [{\setcharactercasing[\v!WORD ]}]
\definealternativestyle [\v!word] [{\setcharactercasing[\v!word ]}] [{\setcharactercasing[\v!word ]}]
\definealternativestyle [\v!Word] [{\setcharactercasing[\v!Word ]}] [{\setcharactercasing[\v!Word ]}]
@@ -427,6 +493,14 @@
\definealternativestyle [\v!cap] [{\setcharactercasing[\v!cap ]}] [{\setcharactercasing[\v!cap ]}]
\definealternativestyle [\v!Cap] [{\setcharactercasing[\v!Cap ]}] [{\setcharactercasing[\v!Cap ]}]
+%D From now (2013-03-17) on we have:
+
+\definefontfeature[f:smallcaps][smcp=yes]
+\definefontfeature[f:oldstyle] [onum=yes]
+
+\definealternativestyle [\v!smallcaps] [\doaddfeature{f:smallcaps}] [\doaddfeature{f:smallcaps}]
+\definealternativestyle [\v!oldstyle] [\doaddfeature{f:oldstyle}] [\doaddfeature{f:oldstyle}]
+
%D \macros
%D {tinyfont}
%D
diff --git a/Master/texmf-dist/tex/context/base/font-run.mkiv b/Master/texmf-dist/tex/context/base/font-run.mkiv
index b346de5e1c6..66f1acc919c 100644
--- a/Master/texmf-dist/tex/context/base/font-run.mkiv
+++ b/Master/texmf-dist/tex/context/base/font-run.mkiv
@@ -13,7 +13,7 @@
%C details.
%D [This code is hooked into the core macros and saves some
-%D format space.]
+%D format space. It needs a cleanup.]
\unprotect
@@ -31,7 +31,7 @@
{\doifnot{[\fontclass]}\title
{\edef\title{[\fontclass]\space\title}}}}
-\gdef\showbodyfont
+\unexpanded\gdef\showbodyfont
{\dosingleempty\doshowbodyfont}
\gdef\doshowbodyfont[#1]%
@@ -40,9 +40,9 @@
{\dosetshowfonttitle{#1}%
\def\bigstrut##1##2%
{\hbox{\vrule
- \!!height##1\strutht
- \!!depth ##2\strutdp
- \!!width \zeropoint}}
+ \s!height##1\strutht
+ \s!depth ##2\strutdp
+ \s!width \zeropoint}}
\def\next##1##2##3%
{&&##1&&##2\tf##3&&##2\sc##3%
&&##2\sl##3&&##2\it##3&&##2\bf##3&&##2\bs##3&&##2\bi##3%
@@ -50,7 +50,7 @@
&&##2\tfa##3&&##2\tfb##3&&##2\tfc##3&&##2\tfd##3&\cr}%
\halign to \localhsize
{\bigstrut{1.5}{2}##&\vrule##
- \tabskip=\!!zeropoint \!!plus 1fill
+ \tabskip\zeropoint \s!plus 1\s!fill
&\hfil##\hfil&\vrule##&\hfil##\hfil&\vrule##
&\hfil##\hfil&\vrule##&\hfil##\hfil&\vrule##
&\hfil##\hfil&\vrule##&\hfil##\hfil&\vrule##
@@ -58,7 +58,7 @@
&\hfil##\hfil&\vrule##&\hfil##\hfil&\vrule##
&\hfil##\hfil&\vrule##&\hfil##\hfil&\vrule##
&\hfil##\hfil&\vrule##&\hfil##\hfil&##\vrule
- \tabskip=\!!zeropoint\cr
+ \tabskip=\zeropoint\cr
\noalign{\hrule}
&\multispan{29}{\vrule\hfil\tttf\strut\title\hfil
\llap{\string\mr\hbox to 1em{\hss:\hss}$\mr \languageparameter\c!text$\quad}\vrule}\cr
@@ -69,7 +69,7 @@
\noalign{\hrule}}}
\ifinsidefloat\else\stopbaselinecorrection\fi}
-\gdef\showbodyfontenvironment
+\unexpanded\gdef\showbodyfontenvironment
{\dosingleempty\doshowbodyfontenvironment}
\gdef\doshowbodyfontenvironment[#1]%
@@ -89,7 +89,7 @@
\noalign{\hrule}}
\halign to \localhsize
{##&\vrule##\strut
- \tabskip=\!!zeropoint \!!plus 1fill
+ \tabskip=\zeropoint \s!plus 1\s!fill
&\hfil##\hfil&\vrule##&\hfil##\hfil&\vrule##&\hfil##\hfil&\vrule##
&\hfil##\hfil&\vrule##&\hfil##\hfil&\vrule##&\hfil##\hfil&\vrule##
&\hfil##\hfil&\vrule##&\hfil##\hfil&##\vrule
@@ -104,7 +104,7 @@
\@EA\globalprocesscommalist\@EA[\bodyfontenvironmentlist]\next}}
\ifinsidefloat\else\stopbaselinecorrection\fi}
-\gdef\showfont
+\unexpanded\gdef\showfont
{\dodoubleempty\doshowfont}
\gdef\doshowfont[#1][#2]%
@@ -132,7 +132,6 @@
\gdef\dodoshowfont#1#2%
{\bgroup
\edef\charplane{\number#2}%
- \boxrulewidth=.1pt
\iffirstargument
% \definefont[\s!dummy][#1]\dummy
\else\ifdim2.5em>.05\hsize \tx
@@ -159,7 +158,7 @@
\donetrue \else \donefalse
\fi
\else
- \setbox\scratchbox\hbox{\gray\vrule\!!width1ex\!!height.5ex\!!depth.5ex}%
+ \setbox\scratchbox\hbox{\gray\vrule\s!width1ex\s!height.5ex\s!depth.5ex}%
\fi
\startoverlay
{\tf\vbox to 2.5em
@@ -206,9 +205,9 @@
% \showfontstyle[modern][rm][tf][all] % #2, #3 and #4 can be 'all'
-\gdef\showfontstyle {\dodoubleempty \dodoshowfontstyle}
-\gdef\redoshowfontstyle {\dotripleempty \dododoshowfontstyle}
-\gdef\redodoshowfontstyle{\doquadrupleempty\dodododoshowfontstyle}
+\unexpanded\gdef\showfontstyle {\dodoubleempty \dodoshowfontstyle}
+\unexpanded\gdef\redoshowfontstyle {\dotripleempty \dododoshowfontstyle}
+\unexpanded\gdef\redodoshowfontstyle{\doquadrupleempty\dodododoshowfontstyle}
% no lists any more, so no 'all' any longer
@@ -254,10 +253,10 @@
\processcommalist[#2]\docommand
\egroup}
-\gdef\showligature#1%
+\unexpanded\gdef\showligature#1%
{\hbox{\type{#1}\enspace\red\ruledhbox{\black#1}}}
-\gdef\showligatures[#1]%
+\unexpanded\gdef\showligatures[#1]%
{\ifx\starttabulate\undefined
\@@onlyenglish\showligatures
\else
@@ -273,7 +272,7 @@
\egroup
\fi}
-\gdef\showfontstrip
+\unexpanded\gdef\showfontstrip
{\dosingleempty\doshowfontstrip}
\gdef\doshowfontstrip[#1]%
@@ -310,9 +309,9 @@
\egroup
\fi}
-\ifx\databox\undefined \newbox\databox \fi
+\ifdefined\databox \else \newbox\databox \fi
-\gdef\testminimalbaseline#1%
+\unexpanded\gdef\testminimalbaseline#1%
{\setbox\databox\ruledhbox{#1}%
\scratchdimen\ht\databox
\advance\scratchdimen\dp\databox
@@ -332,7 +331,7 @@
\noexpand \NC (\ifdim\scratchdimen>\baselineskip not \fi ok)
\noexpand \NC \noexpand \NR }}
-\gdef\showminimalbaseline
+\unexpanded\gdef\showminimalbaseline
{\ifx\starttabulate\undefined
\@@onlyenglish\showminimalbaseline
\else
@@ -343,20 +342,20 @@
\stoptabulate
\fi}
-\gdef\showkerning#1%
+\unexpanded\gdef\showkerning#1%
{\bgroup
\let\MPfshowcommand\ruledhbox
\setMPtext\s!dummy{#1}%
\startMPcode draw textext(\MPstring\s!dummy);\stopMPcode
\egroup}
-\gdef\showcharratio
+\unexpanded\gdef\showcharratio
{\dowithnextboxcontent
{\switchtobodyfont[10pt]}%
{(\withoutpt\the\nextboxht,\withoutpt\the\nextboxdp)}%
\hbox}
-\gdef\showfontparameters
+\unexpanded\gdef\showfontparameters
{\starttabulate[|tl|l|]
\NC slantperpoint \NC \the\fontslantperpoint \font \NC\NR
\NC interwordspace \NC \the\fontinterwordspace \font \NC\NR
diff --git a/Master/texmf-dist/tex/context/base/font-set.mkvi b/Master/texmf-dist/tex/context/base/font-set.mkvi
index 8e404426723..0e2058c1888 100644
--- a/Master/texmf-dist/tex/context/base/font-set.mkvi
+++ b/Master/texmf-dist/tex/context/base/font-set.mkvi
@@ -37,7 +37,7 @@
%D runtime we have to load the default bodyfont size just before
%D we start typesetting.
-\def\fallbacktypeface{modern-designsize} % at some point we might prefer just modern
+% \enablemode[lmmath]
\def\font_preloads_reset_nullfont % this is needed because some macro packages (tikz) misuse \nullfont
{\dorecurse\plusseven{\fontdimen\recurselevel\nullfont\zeropoint}% keep en eye on this as:
@@ -46,7 +46,9 @@
\def\font_preload_default_fonts
{\font_preloads_reset
- \setupbodyfont[\fallbacktypeface,\fontstyle,\fontbody]%
+ \doifmodeelse{lmmath}
+ {\setupbodyfont[modern-designsize-virtual,\fontstyle,\fontbody]}% this will stay
+ {\setupbodyfont[modern-designsize,\fontstyle,\fontbody]}% % this might become 'modern'
\showmessage\m!fonts6{fallback modern \fontstyle\normalspace\normalizedbodyfontsize}}
\def\font_preload_default_fonts_mm
@@ -121,7 +123,7 @@
\unexpanded\def\font_preloads_fourth_stage
{\begingroup
%ifzeropt\fontcharwd\font\number`!\relax
- \setbox\scratchbox\hbox{context}%
+ \setbox\scratchbox\hbox{checking fonts}%
\ifzeropt\wd\scratchbox
\writeline
\writestatus\m!fonts{!! No bodyfont has been defined and no defaults have been}%
diff --git a/Master/texmf-dist/tex/context/base/font-sol.lua b/Master/texmf-dist/tex/context/base/font-sol.lua
new file mode 100644
index 00000000000..db2dd24c248
--- /dev/null
+++ b/Master/texmf-dist/tex/context/base/font-sol.lua
@@ -0,0 +1,884 @@
+if not modules then modules = { } end modules ['font-sol'] = { -- this was: node-spl
+ version = 1.001,
+ comment = "companion to font-sol.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+-- This module is dedicated to the oriental tex project and for
+-- the moment is too experimental to be publicly supported.
+--
+-- We could cache solutions: say that we store the featureset and
+-- all 'words' -> replacement ... so we create a large solution
+-- database (per font)
+--
+-- This module can be optimized by using a dedicated dynamics handler
+-- but I'll only do that when the rest of the code is stable.
+--
+-- Todo: bind setups to paragraph.
+
+local gmatch, concat, format, remove = string.gmatch, table.concat, string.format, table.remove
+local next, tostring, tonumber = next, tostring, tonumber
+local insert, remove = table.insert, table.remove
+local utfchar = utf.char
+local random = math.random
+
+local utilities, logs, statistics, fonts, trackers = utilities, logs, statistics, fonts, trackers
+local interfaces, commands, attributes = interfaces, commands, attributes
+local nodes, node, tex = nodes, node, tex
+
+local trace_split = false trackers.register("builders.paragraphs.solutions.splitters.splitter", function(v) trace_split = v end)
+local trace_optimize = false trackers.register("builders.paragraphs.solutions.splitters.optimizer", function(v) trace_optimize = v end)
+local trace_colors = false trackers.register("builders.paragraphs.solutions.splitters.colors", function(v) trace_colors = v end)
+local trace_goodies = false trackers.register("fonts.goodies", function(v) trace_goodies = v end)
+
+local report_solutions = logs.reporter("fonts","solutions")
+local report_splitters = logs.reporter("fonts","splitters")
+local report_optimizers = logs.reporter("fonts","optimizers")
+
+local variables = interfaces.variables
+
+local v_normal = variables.normal
+local v_reverse = variables.reverse
+local v_preroll = variables.preroll
+local v_random = variables.random
+local v_split = variables.split
+
+local settings_to_array = utilities.parsers.settings_to_array
+local settings_to_hash = utilities.parsers.settings_to_hash
+
+local find_node_tail = node.tail or node.slide
+local free_node = node.free
+local free_nodelist = node.flush_list
+local copy_nodelist = node.copy_list
+local traverse_nodes = node.traverse
+local traverse_ids = node.traverse_id
+local protect_glyphs = nodes.handlers.protectglyphs or node.protect_glyphs
+local hpack_nodes = node.hpack
+local insert_node_before = node.insert_before
+local insert_node_after = node.insert_after
+local repack_hlist = nodes.repackhlist
+local nodes_to_utf = nodes.listtoutf
+
+local setnodecolor = nodes.tracers.colors.set
+
+local nodecodes = nodes.nodecodes
+local whatsitcodes = nodes.whatsitcodes
+local kerncodes = nodes.kerncodes
+
+local glyph_code = nodecodes.glyph
+local disc_code = nodecodes.disc
+local kern_code = nodecodes.kern
+local hlist_code = nodecodes.hlist
+local whatsit_code = nodecodes.whatsit
+
+local fontkern_code = kerncodes.fontkern
+
+local localpar_code = whatsitcodes.localpar
+local dir_code = whatsitcodes.dir
+local userdefined_code = whatsitcodes.userdefined
+
+local nodepool = nodes.pool
+local tasks = nodes.tasks
+local usernodeids = nodepool.userids
+
+local new_textdir = nodepool.textdir
+local new_usernumber = nodepool.usernumber
+local new_glue = nodepool.glue
+local new_leftskip = nodepool.leftskip
+
+local starttiming = statistics.starttiming
+local stoptiming = statistics.stoptiming
+local process_characters = nodes.handlers.characters
+local inject_kerns = nodes.injections.handler
+
+local fonthashes = fonts.hashes
+local fontdata = fonthashes.identifiers
+local setfontdynamics = fonthashes.setdynamics
+local fontprocesses = fonthashes.processes
+
+local texsetattribute = tex.setattribute
+local unsetvalue = attributes.unsetvalue
+
+local parbuilders = builders.paragraphs
+parbuilders.solutions = parbuilders.solutions or { }
+local parsolutions = parbuilders.solutions
+parsolutions.splitters = parsolutions.splitters or { }
+local splitters = parsolutions.splitters
+
+local solutions = { } -- attribute sets
+local registered = { } -- backmapping
+splitters.registered = registered
+
+local a_split = attributes.private('splitter')
+
+local preroll = true
+local criterium = 0
+local randomseed = nil
+local optimize = nil -- set later
+local variant = v_normal
+local splitwords = true
+
+local cache = { }
+local variants = { }
+local max_less = 0
+local max_more = 0
+
+local stack = { }
+
+local dummy = {
+ attribute = unsetvalue,
+ randomseed = 0,
+ criterium = 0,
+ preroll = false,
+ optimize = nil,
+ splitwords = false,
+ variant = v_normal,
+}
+
+local function checksettings(r,settings)
+ local s = r.settings
+ local method = settings_to_hash(settings.method or "")
+ local optimize, preroll, splitwords
+ for k, v in next, method do
+ if k == v_preroll then
+ preroll = true
+ elseif k == v_split then
+ splitwords = true
+ elseif variants[k] then
+ variant = k
+ optimize = variants[k] -- last one wins
+ end
+ end
+ r.randomseed = tonumber(settings.randomseed) or s.randomseed or r.randomseed or 0
+ r.criterium = tonumber(settings.criterium ) or s.criterium or r.criterium or 0
+ r.preroll = preroll or false
+ r.splitwords = splitwords or false
+ r.optimize = optimize or s.optimize or r.optimize or variants[v_normal]
+end
+
+local function pushsplitter(name,settings)
+ local r = name and registered[name]
+ if r then
+ if settings then
+ checksettings(r,settings)
+ end
+ else
+ r = dummy
+ end
+ insert(stack,r)
+ -- brr
+ randomseed = r.randomseed or 0
+ criterium = r.criterium or 0
+ preroll = r.preroll or false
+ optimize = r.optimize or nil
+ splitwords = r.splitwords or nil
+ --
+ texsetattribute(a_split,r.attribute)
+ return #stack
+end
+
+local function popsplitter()
+ remove(stack)
+ local n = #stack
+ local r = stack[n] or dummy
+ --
+ randomseed = r.randomseed or 0
+ criterium = r.criterium or 0
+ preroll = r.preroll or false
+ optimize = r.optimize or nil
+ --
+ texsetattribute(a_split,r.attribute)
+ return n
+end
+
+local contextsetups = fonts.specifiers.contextsetups
+
+local function convert(featuresets,name,list)
+ if list then
+ local numbers = { }
+ local nofnumbers = 0
+ for i=1,#list do
+ local feature = list[i]
+ local fs = featuresets[feature]
+ local fn = fs and fs.number
+ if not fn then
+ -- fall back on global features
+ fs = contextsetups[feature]
+ fn = fs and fs.number
+ end
+ if fn then
+ nofnumbers = nofnumbers + 1
+ numbers[nofnumbers] = fn
+ if trace_goodies or trace_optimize then
+ report_solutions("solution %a of %a uses feature %a with number %s",i,name,feature,fn)
+ end
+ else
+ report_solutions("solution %a of %a has an invalid feature reference %a",i,name,feature)
+ end
+ end
+ return nofnumbers > 0 and numbers
+ end
+end
+
+local function initialize(goodies)
+ local solutions = goodies.solutions
+ if solutions then
+ local featuresets = goodies.featuresets
+ local goodiesname = goodies.name
+ if trace_goodies or trace_optimize then
+ report_solutions("checking solutions in %a",goodiesname)
+ end
+ for name, set in next, solutions do
+ set.less = convert(featuresets,name,set.less)
+ set.more = convert(featuresets,name,set.more)
+ end
+ end
+end
+
+fonts.goodies.register("solutions",initialize)
+
+function splitters.define(name,settings)
+ local goodies = settings.goodies
+ local solution = settings.solution
+ local less = settings.less
+ local more = settings.more
+ local less_set, more_set
+ local l = less and settings_to_array(less)
+ local m = more and settings_to_array(more)
+ if goodies then
+ goodies = fonts.goodies.load(goodies) -- also in tfmdata
+ if goodies then
+ local featuresets = goodies.featuresets
+ local solution = solution and goodies.solutions[solution]
+ if l and #l > 0 then
+ less_set = convert(featuresets,name,less) -- take from settings
+ else
+ less_set = solution and solution.less -- take from goodies
+ end
+ if m and #m > 0 then
+ more_set = convert(featuresets,name,more) -- take from settings
+ else
+ more_set = solution and solution.more -- take from goodies
+ end
+ end
+ else
+ if l then
+ local n = #less_set
+ for i=1,#l do
+ local ss = contextsetups[l[i]]
+ if ss then
+ n = n + 1
+ less_set[n] = ss.number
+ end
+ end
+ end
+ if m then
+ local n = #more_set
+ for i=1,#m do
+ local ss = contextsetups[m[i]]
+ if ss then
+ n = n + 1
+ more_set[n] = ss.number
+ end
+ end
+ end
+ end
+ if trace_optimize then
+ report_solutions("defining solutions %a, less %a, more %a",name,concat(less_set or {}," "),concat(more_set or {}," "))
+ end
+ local nofsolutions = #solutions + 1
+ local t = {
+ solution = solution,
+ less = less_set or { },
+ more = more_set or { },
+ settings = settings, -- for tracing
+ attribute = nofsolutions,
+ }
+ solutions[nofsolutions] = t
+ registered[name] = t
+ return nofsolutions
+end
+
+local nofwords, noftries, nofadapted, nofkept, nofparagraphs = 0, 0, 0, 0, 0
+
+local splitter_one = usernodeids["splitters.one"]
+local splitter_two = usernodeids["splitters.two"]
+
+local a_word = attributes.private('word')
+local a_fontkern = attributes.private('fontkern')
+
+local encapsulate = false
+
+directives.register("builders.paragraphs.solutions.splitters.encapsulate", function(v)
+ encapsulate = v
+end)
+
+function splitters.split(head)
+ -- quite fast
+ local current, done, rlmode, start, stop, attribute = head, false, false, nil, nil, 0
+ cache, max_less, max_more = { }, 0, 0
+ local function flush() -- we can move this
+ local font = start.font
+ local last = stop.next
+ local list = last and copy_nodelist(start,last) or copy_nodelist(start)
+ local n = #cache + 1
+ if encapsulate then
+ local user_one = new_usernumber(splitter_one,n)
+ local user_two = new_usernumber(splitter_two,n)
+ head, start = insert_node_before(head,start,user_one)
+ insert_node_after(head,stop,user_two)
+ else
+ local current = start
+ while true do
+ current[a_word] = n
+ if current == stop then
+ break
+ else
+ current = current.next
+ end
+ end
+ end
+ if rlmode == "TRT" or rlmode == "+TRT" then
+ local dirnode = new_textdir("+TRT")
+ list.prev = dirnode
+ dirnode.next = list
+ list = dirnode
+ end
+ local c = {
+ original = list,
+ attribute = attribute,
+ direction = rlmode,
+ font = font
+ }
+ if trace_split then
+ report_splitters("cached %4i: font %a, attribute %a, direction %a, word %a",
+ n, font, attribute, nodes_to_utf(list,true), rlmode and "r2l" or "l2r")
+ end
+ cache[n] = c
+ local solution = solutions[attribute]
+ local l, m = #solution.less, #solution.more
+ if l > max_less then max_less = l end
+ if m > max_more then max_more = m end
+ start, stop, done = nil, nil, true
+ end
+ while current do -- also nextid
+ local next = current.next
+ local id = current.id
+ if id == glyph_code then
+ if current.subtype < 256 then
+ local a = current[a_split]
+ if not a then
+ start, stop = nil, nil
+ elseif not start then
+ start, stop, attribute = current, current, a
+ elseif a ~= attribute then
+ start, stop = nil, nil
+ else
+ stop = current
+ end
+ end
+ elseif id == disc_code then
+ if splitwords then
+ if start then
+ flush()
+ end
+ elseif start and next and next.id == glyph_code and next.subtype < 256 then
+ -- beware: we can cross future lines
+ stop = next
+ else
+ start, stop = nil, nil
+ end
+ elseif id == whatsit_code then
+ if start then
+ flush()
+ end
+ local subtype = current.subtype
+ if subtype == dir_code or subtype == localpar_code then
+ rlmode = current.dir
+ end
+ else
+ if start then
+ flush()
+ end
+ end
+ current = next
+ end
+ if start then
+ flush()
+ end
+ nofparagraphs = nofparagraphs + 1
+ nofwords = nofwords + #cache
+ return head, done
+end
+
+local function collect_words(list) -- can be made faster for attributes
+ local words, w, word = { }, 0, nil
+ if encapsulate then
+ for current in traverse_ids(whatsit_code,list) do
+ if current.subtype == userdefined_code then -- hm
+ local user_id = current.user_id
+ if user_id == splitter_one then
+ word = { current.value, current, current }
+ w = w + 1
+ words[w] = word
+ elseif user_id == splitter_two then
+ if word then
+ word[3] = current
+ else
+ -- something is wrong
+ end
+ end
+ end
+ end
+ else
+ local current, first, last, index = list, nil, nil, nil
+ while current do
+ -- todo: disc and kern
+ local id = current.id
+ if id == glyph_code or id == disc_code then
+ local a = current[a_word]
+ if a then
+ if a == index then
+ -- same word
+ last = current
+ elseif index then
+ w = w + 1
+ words[w] = { index, first, last }
+ first = current
+ last = current
+ index = a
+ elseif first then
+ last = current
+ index = a
+ else
+ first = current
+ last = current
+ index = a
+ end
+ elseif index then
+ if first then
+ w = w + 1
+ words[w] = { index, first, last }
+ end
+ index = nil
+ first = nil
+ elseif trace_split then
+ if id == disc_code then
+ report_splitters("skipped: disc node")
+ else
+ report_splitters("skipped: %C",current.char)
+ end
+ end
+ elseif id == kern_code and (current.subtype == fontkern_code or current[a_fontkern]) then
+ if first then
+ last = current
+ else
+ first = current
+ last = current
+ end
+ elseif index then
+ w = w + 1
+ words[w] = { index, first, last }
+ index = nil
+ first = nil
+ if id == disc_node then
+ if trace_split then
+ report_splitters("skipped: disc node")
+ end
+ end
+ end
+ current = current.next
+ end
+ if index then
+ w = w + 1
+ words[w] = { index, first, last }
+ end
+ if trace_split then
+ for i=1,#words do
+ local w = words[i]
+ local n, f, l = w[1], w[2], w[3]
+ local c = cache[n]
+ if c then
+ report_splitters("found %4i: word %a, cached %a",n,nodes_to_utf(f,true,true,l),nodes_to_utf(c.original,true))
+ else
+ report_splitters("found %4i: word %a, not in cache",n,nodes_to_utf(f,true,true,l))
+ end
+ end
+ end
+ end
+ return words, list -- check for empty (elsewhere)
+end
+
+-- we could avoid a hpack but hpack is not that slow
+
+local function doit(word,list,best,width,badness,line,set,listdir)
+ local changed = 0
+ local n = word[1]
+ local found = cache[n]
+ if found then
+ local h, t
+ if encapsulate then
+ h = word[2].next -- head of current word
+ t = word[3].prev -- tail of current word
+ else
+ h = word[2]
+ t = word[3]
+ end
+ if splitwords then
+ -- there are no lines crossed in a word
+ else
+ local ok = false
+ local c = h
+ while c do
+ if c == t then
+ ok = true
+ break
+ else
+ c = c.next
+ end
+ end
+ if not ok then
+ report_solutions("skipping hyphenated word (for now)")
+ -- todo: mark in words as skipped, saves a bit runtime
+ return false, changed
+ end
+ end
+ local original, attribute, direction = found.original, found.attribute, found.direction
+ local solution = solutions[attribute]
+ local features = solution and solution[set]
+ if features then
+ local featurenumber = features[best] -- not ok probably
+ if featurenumber then
+ noftries = noftries + 1
+ local first = copy_nodelist(original)
+ if not trace_colors then
+ for n in traverse_nodes(first) do -- maybe fast force so no attr needed
+ n[0] = featurenumber -- this forces dynamics
+ end
+ elseif set == "less" then
+ for n in traverse_nodes(first) do
+ setnodecolor(n,"font:isol") -- yellow
+ n[0] = featurenumber
+ end
+ else
+ for n in traverse_nodes(first) do
+ setnodecolor(n,"font:medi") -- green
+ n[0] = featurenumber
+ end
+ end
+ local font = found.font
+ local setdynamics = setfontdynamics[font]
+ if setdynamics then
+ local processes = setdynamics(font,featurenumber)
+ for i=1,#processes do -- often more than 1
+ first = processes[i](first,font,featurenumber)
+ end
+ else
+ report_solutions("fatal error, no dynamics for font %a",font)
+ end
+ first = inject_kerns(first)
+ if first.id == whatsit_code then
+ local temp = first
+ first = first.next
+ free_node(temp)
+ end
+ local last = find_node_tail(first)
+ -- replace [u]h->t by [u]first->last
+ local prev = h.prev
+ local next = t.next
+ prev.next = first
+ first.prev = prev
+ if next then
+ last.next = next
+ next.prev = last
+ end
+ -- check new pack
+ local temp, b = repack_hlist(list,width,'exactly',listdir)
+ if b > badness then
+ if trace_optimize then
+ report_optimizers("line %a, badness before %a, after %a, criterium %a, verdict %a",line,badness,b,criterium,"quit")
+ end
+ -- remove last insert
+ prev.next = h
+ h.prev = prev
+ if next then
+ t.next = next
+ next.prev = t
+ else
+ t.next = nil
+ end
+ last.next = nil
+ free_nodelist(first)
+ else
+ if trace_optimize then
+ report_optimizers("line %a, badness before: %a, after %a, criterium %a, verdict %a",line,badness,b,criterium,"continue")
+ end
+ -- free old h->t
+ t.next = nil
+ free_nodelist(h) -- somhow fails
+ if not encapsulate then
+ word[2] = first
+ word[3] = last
+ end
+ changed, badness = changed + 1, b
+ end
+ if b <= criterium then
+ return true, changed
+ end
+ end
+ end
+ end
+ return false, changed
+end
+
+-- We repeat some code but adding yet another layer of indirectness is not
+-- making things better.
+
+variants[v_normal] = function(words,list,best,width,badness,line,set,listdir)
+ local changed = 0
+ for i=1,#words do
+ local done, c = doit(words[i],list,best,width,badness,line,set,listdir)
+ changed = changed + c
+ if done then
+ break
+ end
+ end
+ if changed > 0 then
+ nofadapted = nofadapted + 1
+ -- todo: get rid of pack when ok because we already have packed and we only need the last b
+ local list, b = repack_hlist(list,width,'exactly',listdir)
+ return list, true, changed, b -- badness
+ else
+ nofkept = nofkept + 1
+ return list, false, 0, badness
+ end
+end
+
+variants[v_reverse] = function(words,list,best,width,badness,line,set,listdir)
+ local changed = 0
+ for i=#words,1,-1 do
+ local done, c = doit(words[i],list,best,width,badness,line,set,listdir)
+ changed = changed + c
+ if done then
+ break
+ end
+ end
+ if changed > 0 then
+ nofadapted = nofadapted + 1
+ -- todo: get rid of pack when ok because we already have packed and we only need the last b
+ local list, b = repack_hlist(list,width,'exactly',listdir)
+ return list, true, changed, b -- badness
+ else
+ nofkept = nofkept + 1
+ return list, false, 0, badness
+ end
+end
+
+variants[v_random] = function(words,list,best,width,badness,line,set,listdir)
+ local changed = 0
+ while #words > 0 do
+ local done, c = doit(remove(words,random(1,#words)),list,best,width,badness,line,set,listdir)
+ changed = changed + c
+ if done then
+ break
+ end
+ end
+ if changed > 0 then
+ nofadapted = nofadapted + 1
+ -- todo: get rid of pack when ok because we already have packed and we only need the last b
+ local list, b = repack_hlist(list,width,'exactly',listdir)
+ return list, true, changed, b -- badness
+ else
+ nofkept = nofkept + 1
+ return list, false, 0, badness
+ end
+end
+
+local function show_quality(current,what,line)
+ local set = current.glue_set
+ local sign = current.glue_sign
+ local order = current.glue_order
+ local amount = set * ((sign == 2 and -1) or 1)
+ report_optimizers("line %a, category %a, amount %a, set %a, sign %a, how %a, order %a",line,what,amount,set,sign,how,order)
+end
+
+function splitters.optimize(head)
+ if not optimize then
+ report_optimizers("no optimizer set")
+ return
+ end
+ local nc = #cache
+ if nc == 0 then
+ return
+ end
+ starttiming(splitters)
+ local listdir = nil -- todo ! ! !
+ if randomseed then
+ math.setrandomseedi(randomseed)
+ randomseed = nil
+ end
+ local line = 0
+ local tex_hbadness, tex_hfuzz = tex.hbadness, tex.hfuzz
+ tex.hbadness, tex.hfuzz = 10000, number.maxdimen
+ if trace_optimize then
+ report_optimizers("preroll %a, variant %a, criterium %a, cache size %a",preroll,variant,criterium,nc)
+ end
+ for current in traverse_ids(hlist_code,head) do
+ -- report_splitters("before: [%s] => %s",current.dir,nodes.tosequence(current.list,nil))
+ line = line + 1
+ local sign, dir, list, width = current.glue_sign, current.dir, current.list, current.width
+ if not encapsulate and list.id == glyph_code then
+ -- nasty .. we always assume a prev being there .. future luatex will always have a leftskip set
+ -- current.list, list = insert_node_before(list,list,new_glue(0))
+ current.list, list = insert_node_before(list,list,new_leftskip(0))
+ end
+ local temp, badness = repack_hlist(list,width,'exactly',dir) -- it would be nice if the badness was stored in the node
+ if badness > 0 then
+ if sign == 0 then
+ if trace_optimize then
+ report_optimizers("line %a, badness %a, outcome %a, verdict %a",line,badness,"okay","okay")
+ end
+ else
+ local set, max
+ if sign == 1 then
+ if trace_optimize then
+ report_optimizers("line %a, badness %a, outcome %a, verdict %a",line,badness,"underfull","trying more")
+ end
+ set, max = "more", max_more
+ else
+ if trace_optimize then
+ report_optimizers("line %a, badness %a, outcome %a, verdict %a",line,badness,"overfull","trying less")
+ end
+ set, max = "less", max_less
+ end
+ -- we can keep the best variants
+ local lastbest, lastbadness = nil, badness
+ if preroll then
+ local bb, base
+ for i=1,max do
+ if base then
+ free_nodelist(base)
+ end
+ base = copy_nodelist(list)
+ local words = collect_words(base) -- beware: words is adapted
+ for j=i,max do
+ local temp, done, changes, b = optimize(words,base,j,width,badness,line,set,dir)
+ base = temp
+ if trace_optimize then
+ report_optimizers("line %a, alternative %a.%a, changes %a, badness %a",line,i,j,changes,b)
+ end
+ bb = b
+ if b <= criterium then
+ break
+ end
+ -- if done then
+ -- break
+ -- end
+ end
+ if bb and bb > criterium then -- needs checking
+ if not lastbest then
+ lastbest, lastbadness = i, bb
+ elseif bb > lastbadness then
+ lastbest, lastbadness = i, bb
+ end
+ else
+ break
+ end
+ end
+ free_nodelist(base)
+ end
+ local words = collect_words(list)
+ for best=lastbest or 1,max do
+ local temp, done, changes, b = optimize(words,list,best,width,badness,line,set,dir)
+ current.list = temp
+ if trace_optimize then
+ report_optimizers("line %a, alternative %a, changes %a, badness %a",line,best,changes,b)
+ end
+ if done then
+ if b <= criterium then -- was == 0
+ protect_glyphs(list)
+ break
+ end
+ end
+ end
+ end
+ else
+ if trace_optimize then
+ report_optimizers("line %a, verdict %a",line,"not bad enough")
+ end
+ end
+ -- we pack inside the outer hpack and that way keep the original wd/ht/dp as bonus
+ current.list = hpack_nodes(current.list,width,'exactly',listdir)
+ -- report_splitters("after: [%s] => %s",temp.dir,nodes.tosequence(temp.list,nil))
+ end
+ for i=1,nc do
+ local ci = cache[i]
+ free_nodelist(ci.original)
+ end
+ cache = { }
+ tex.hbadness, tex.hfuzz = tex_hbadness, tex_hfuzz
+ stoptiming(splitters)
+end
+
+statistics.register("optimizer statistics", function()
+ if nofwords > 0 then
+ local elapsed = statistics.elapsedtime(splitters)
+ local average = noftries/elapsed
+ return format("%s words identified in %s paragraphs, %s words retried, %s lines tried, %0.3f seconds used, %s adapted, %0.1f lines per second",
+ nofwords,nofparagraphs,noftries,nofadapted+nofkept,elapsed,nofadapted,average)
+ end
+end)
+
+-- we could use a stack
+
+local enableaction = tasks.enableaction
+local disableaction = tasks.disableaction
+
+local function enable()
+ enableaction("processors", "builders.paragraphs.solutions.splitters.split")
+ enableaction("finalizers", "builders.paragraphs.solutions.splitters.optimize")
+end
+
+local function disable()
+ disableaction("processors", "builders.paragraphs.solutions.splitters.split")
+ disableaction("finalizers", "builders.paragraphs.solutions.splitters.optimize")
+end
+
+function splitters.start(name,settings)
+ if pushsplitter(name,settings) == 1 then
+ enable()
+ end
+end
+
+function splitters.stop()
+ if popsplitter() == 0 then
+ disable()
+ end
+end
+
+function splitters.set(name,settings)
+ if #stack > 0 then
+ stack = { }
+ else
+ enable()
+ end
+ pushsplitter(name,settings) -- sets attribute etc
+end
+
+function splitters.reset()
+ if #stack > 0 then
+ stack = { }
+ popsplitter() -- resets attribute etc
+ disable()
+ end
+end
+
+-- interface
+
+commands.definefontsolution = splitters.define
+commands.startfontsolution = splitters.start
+commands.stopfontsolution = splitters.stop
+commands.setfontsolution = splitters.set
+commands.resetfontsolution = splitters.reset
diff --git a/Master/texmf-dist/tex/context/base/node-spl.mkiv b/Master/texmf-dist/tex/context/base/font-sol.mkvi
index 3630212af3b..b40e37ceddf 100644
--- a/Master/texmf-dist/tex/context/base/node-spl.mkiv
+++ b/Master/texmf-dist/tex/context/base/font-sol.mkvi
@@ -1,8 +1,8 @@
%D \module
-%D [ file=node-spl,
+%D [ file=font-sol,
%D version=2009.05.19,
-%D title=\CONTEXT\ Node Macros,
-%D subtitle=Splitters,
+%D title=\CONTEXT\ Font Macros,
+%D subtitle=Solutions,
%D author=Hans Hagen,
%D date=\currentdate,
%D copyright={PRAGMA ADE \& \CONTEXT\ Development Team}]
@@ -13,9 +13,7 @@
\writestatus{loading}{ConTeXt Node Support / Splitters}
-\registerctxluafile{node-spl}{1.001}
-
-\definesystemattribute[splitter][public]
+% todo: pass color for tracing
%D This module is specially made for the oriental \TEX\ project. The working is as
%D follows (and tuned for fonts like Idris' Husayni. The following method came to
@@ -74,36 +72,47 @@
%D \disabletrackers[parbuilders.solutions.splitters.colors]
%D \stoptyping
+\registerctxluafile{font-sol}{1.001}
+
\unprotect
-\newtoks\everysetupfontsolutions
+\definesystemattribute[splitter][public]
-\unexpanded\def\definefontsolution
- {\dodoubleargument\dodefinefontsolution}
+\installcorenamespace{fontsolution}
-\def\dodefinefontsolution[#1][#2]% we could set the attribute at the lua end
- {\setxvalue{\??fu:#1}{\attribute\splitterattribute\ctxlua{builders.paragraphs.solutions.splitters.define("#1","#2")}\relax}}
+\installcommandhandler \??fontsolution {fontsolution} \??fontsolution
-\unexpanded\def\setfontsolution[#1]%
- {\ctxlua{builders.paragraphs.solutions.splitters.enable()}%
- \csname\??fu:#1\endcsname}
+\let\setupfontsolutions\setupfontsolution
-\unexpanded\def\resetfontsolution
- {\ctxlua{builders.paragraphs.solutions.splitters.disable()}%
- \attribute\splitterattribute\attributeunsetvalue}
+\appendtoks
+ \ctxcommand{definefontsolution("\currentfontsolution",{ % these are frozen
+ goodies = "\fontsolutionparameter\s!goodies",
+ solution = "\fontsolutionparameter\c!solution",
+ less = "\fontsolutionparameter\c!less",
+ more = "\fontsolutionparameter\c!more",
+ })}
+\to \everydefinefontsolution
-\letvalue{\??fu:\v!reset}\resetfontsolution
+\unexpanded\def\setfontsolution[#solution]% just one
+ {\edef\currentfontsolution{#solution}%
+ \ctxcommand{setfontsolution("\currentfontsolution",{
+ method = "\fontsolutionparameter\c!method",
+ criterium = "\fontsolutionparameter\c!criterium",
+ % randomseed = "\fontsolutionparameter\c!random",
+ })}}
-\unexpanded\def\setupfontsolutions[#1]%
- {\getparameters[\??fu][#1]%
- \the\everysetupfontsolutions}
+\unexpanded\def\resetfontsolution % resets all
+ {\ctxcommand{resetfontsolution()}%
+ \let\currentfontsolution\empty}
-\appendtoks
- \ctxlua{builders.paragraphs.solutions.splitters.setup {
- method = "\@@fumethod",
- criterium = "\@@fucriterium",
- }}%
-\to \everysetupfontsolutions
+\unexpanded\def\startfontsolution % [#1]
+ {\pushmacro\currentfontsolution
+ \setfontsolution}
+
+\unexpanded\def\stopfontsolution
+ {\ifhmode\par\fi
+ \ctxcommand{stopfontsolution()}%
+ \popmacro\currentfontsolution}
% We initialize this module at the \LUA\ end.
%
diff --git a/Master/texmf-dist/tex/context/base/font-sty.mkvi b/Master/texmf-dist/tex/context/base/font-sty.mkvi
index d8f01afa700..3caa944887c 100644
--- a/Master/texmf-dist/tex/context/base/font-sty.mkvi
+++ b/Master/texmf-dist/tex/context/base/font-sty.mkvi
@@ -84,7 +84,7 @@
% \appendtoks
% \let\groupedcommand\thirdofthreearguments
-% \to \simplifiedcommands
+% \to \everysimplifycommands
%D This command also defines the keyword as command. This means
%D that the example definition of \type{bold} we gave before,
@@ -216,11 +216,19 @@
\installcommandhandler \??style {style} \??style
+\setupstyle
+ [%\c!style=,
+ %\c!color=,
+ \c!method=\v!command]
+
\appendtoks
\letvalue{\??stylecheck\currentstyle}\relax
- \setuevalue{\e!start\currentstyle}{\font_styles_apply_start{\currentstyle}}%
- \setuevalue{\e!stop \currentstyle}{\font_styles_apply_stop}%
- \setuevalue {\currentstyle}{\font_styles_apply_grouped{\currentstyle}}% no longer groupedcommand here
+ \edef\p_method{\styleparameter\c!method}%
+ \ifx\p_method\v!command
+ \setuevalue{\e!start\currentstyle}{\font_styles_apply_start{\currentstyle}}%
+ \setuevalue{\e!stop \currentstyle}{\font_styles_apply_stop}%
+ \setuevalue {\currentstyle}{\font_styles_apply_grouped{\currentstyle}}% no longer groupedcommand here
+ \fi
\to \everydefinestyle
\unexpanded\def\font_styles_apply_start#name%
@@ -242,7 +250,9 @@
\unexpanded\def\font_styles_use_generic#specification%
{\let\currentstyle\s!unknown % reasonable generic tag
- \setupcurrentstyle[\c!style=,\c!color=,#specification]%
+ \letstyleparameter\c!style\empty
+ \letstyleparameter\c!color\empty
+ \setupcurrentstyle[#specification]%
\usestylestyleandcolor\c!style\c!color}
% commands
@@ -251,14 +261,20 @@
\unexpanded\def\style[#name]% as this is can be a switch we use groupedcommand
{\csname\??styleargument
- \ifcsname#name\endcsname1\else\ifcsname\??stylecheck#name\endcsname2\else3\fi\fi
+ \ifcsname\??stylecheck#name\endcsname
+ 2% defined as style
+ \else\ifcsname#name\endcsname
+ 1% defined as command
+ \else
+ 3% specification
+ \fi\fi
\endcsname{#name}}
\setvalue{\??styleargument1}#name%
- {\csname#name\endcsname}
+ {\groupedcommand{\csname#name\endcsname}{}}
\setvalue{\??styleargument2}#name%
- {\groupedcommand{\font_styles_use_defined{#name}}{}}
+ {\groupedcommand{\font_styles_use_defined{#name}}{}} % or {\font_styles_apply_grouped{#name}}
\setvalue{\??styleargument3}#specification%
{\doifassignmentelse{#specification}\font_styles_assignment\font_styles_direct{#specification}}
@@ -273,7 +289,13 @@
\unexpanded\def\startstyle[#name]%
{\begingroup
\csname\??styleenvironment
- \ifcsname#name\endcsname1\else\ifcsname\??stylecheck#name\endcsname2\else3\fi\fi
+ \ifcsname\??stylecheck#name\endcsname
+ 2% defined as style
+ \else\ifcsname#name\endcsname
+ 1% defined as command
+ \else
+ 3% specification
+ \fi\fi
\endcsname{#name}}
\unexpanded\def\stopstyle
@@ -289,7 +311,7 @@
\setvalue{\??styleenvironment3}#specification%
{\doifassignmentelse{#specification}\font_styles_start_assignment\font_styles_start_direct{#specification}}
-\def\font_styles_start_assignment#specification{\usegenericstyle{#specification}}
+\def\font_styles_start_assignment#specification{\font_styles_use_generic{#specification}}
\def\font_styles_start_direct #specification{\definedfont[#specification]\relax}
%D Still experimental (might even go away).
@@ -358,12 +380,12 @@
%D Variant selectors
%D
%D \starttyping
-%D \mathematics {\vsone{\utfchar{"2229}}}
-%D \mathematics {\utfchar{"2229}\vsone{}}
+%D \mathematics {\vsone{\utfchar{0x2229}}}
+%D \mathematics {\utfchar{0x2229}\vsone{}}
%D \stoptyping
-\unexpanded\edef\vsone#character{#character\utfchar{"FE00}} % used
-\unexpanded\edef\vstwo#character{#character\utfchar{"FE01}} % not used but handy for testing
+\unexpanded\edef\vsone#character{#character\normalUchar"FE00 } % used
+\unexpanded\edef\vstwo#character{#character\normalUchar"FE01 } % not used but handy for testing
%D For historic reasons we keep the following around but they are no longer
%D that relevant for \MKIV.
diff --git a/Master/texmf-dist/tex/context/base/font-sym.mkvi b/Master/texmf-dist/tex/context/base/font-sym.mkvi
index 68237c07337..e1d5332c479 100644
--- a/Master/texmf-dist/tex/context/base/font-sym.mkvi
+++ b/Master/texmf-dist/tex/context/base/font-sym.mkvi
@@ -165,8 +165,8 @@
\unexpanded\def\getnamedglyphstyled#fontname#character{{\setstyledsymbolicfont{#fontname}\ctxcommand{fontchar("#character")}}}
\unexpanded\def\getnamedglyphdirect#fontname#character{{\setdirectsymbolicfont{#fontname}\ctxcommand{fontchar("#character")}}}
-\unexpanded\def\getglyphstyled #fontname#character{{\setstyledsymbolicfont{#fontname}\doifnumberelse{#character}\char\donothing#2}}
-\unexpanded\def\getglyphdirect #fontname#character{{\setdirectsymbolicfont{#fontname}\doifnumberelse{#character}\char\donothing#2}}
+\unexpanded\def\getglyphstyled #fontname#character{{\setstyledsymbolicfont{#fontname}\doifnumberelse{#character}\char\donothing#character}}
+\unexpanded\def\getglyphdirect #fontname#character{{\setdirectsymbolicfont{#fontname}\doifnumberelse{#character}\char\donothing#character}}
% this one is wrong:
diff --git a/Master/texmf-dist/tex/context/base/font-syn.lua b/Master/texmf-dist/tex/context/base/font-syn.lua
index c4f0e948b87..81e27afd603 100644
--- a/Master/texmf-dist/tex/context/base/font-syn.lua
+++ b/Master/texmf-dist/tex/context/base/font-syn.lua
@@ -8,18 +8,30 @@ if not modules then modules = { } end modules ['font-syn'] = {
-- todo: subs in lookups requests
-local utf = unicode.utf8
-local next, tonumber = next, tonumber
-local gsub, lower, match, find, lower, upper = string.gsub, string.lower, string.match, string.find, string.lower, string.upper
+local next, tonumber, type, tostring = next, tonumber, type, tostring
+local sub, gsub, lower, match, find, lower, upper = string.sub, string.gsub, string.lower, string.match, string.find, string.lower, string.upper
local find, gmatch = string.find, string.gmatch
local concat, sort, format = table.concat, table.sort, string.format
local serialize = table.serialize
local lpegmatch = lpeg.match
-local utfgsub, utflower = utf.gsub, utf.lower
local unpack = unpack or table.unpack
+local formatters = string.formatters
-local allocate = utilities.storage.allocate
-local sparse = utilities.storage.sparse
+local allocate = utilities.storage.allocate
+local sparse = utilities.storage.sparse
+
+local removesuffix = file.removesuffix
+local splitbase = file.splitbase
+local splitname = file.splitname
+local basename = file.basename
+local nameonly = file.nameonly
+local pathpart = file.pathpart
+local filejoin = file.join
+local is_qualified_path = file.is_qualified_path
+
+local findfile = resolvers.findfile
+local cleanpath = resolvers.cleanpath
+local resolveresolved = resolvers.resolve
local trace_names = false trackers.register("fonts.names", function(v) trace_names = v end)
local trace_warnings = false trackers.register("fonts.warnings", function(v) trace_warnings = v end)
@@ -34,13 +46,13 @@ using a table that has keys filtered from the font related files.</p>
fonts = fonts or { } -- also used elsewhere
-local names = { }
+local names = font.names or allocate { }
fonts.names = names
-names.filters = names.filters or { }
-local filters = names.filters
+local filters = names.filters or { }
+names.filters = filters
-names.data = names.data or { }
+names.data = names.data or allocate { }
names.version = 1.110
names.basename = "names"
@@ -121,6 +133,50 @@ local variants = Cs( -- fax casual
local normalized_variants = sparse()
+names.knownweights = {
+ "black",
+ "bold",
+ "demi",
+ "demibold",
+ "extrabold",
+ "heavy",
+ "light",
+ "medium",
+ "mediumbold",
+ "normal",
+ "regular",
+ "semi",
+ "semibold",
+ "ultra",
+ "ultrabold",
+ "ultralight",
+}
+
+names.knownstyles = {
+ "italic",
+ "normal",
+ "oblique",
+ "regular",
+ "reverseitalic",
+ "reverseoblique",
+ "roman",
+ "slanted",
+}
+
+names.knownwidths = {
+ "book",
+ "condensed",
+ "expanded",
+ "normal",
+ "thin",
+}
+
+names.knownvariants = {
+ "normal",
+ "oldstyle",
+ "smallcaps",
+}
+
local any = P(1)
local analyzed_table
@@ -144,13 +200,13 @@ function names.splitspec(askedname)
width = width and lpegmatch(widths, width) or width
variant = variant and lpegmatch(variants,variant) or variant
if trace_names then
- report_names("requested name '%s' split in name '%s', weight '%s', style '%s', width '%s' and variant '%s'",
- askedname,name or '',weight or '',style or '',width or '',variant or '')
+ report_names("requested name %a split in name %a, weight %a, style %a, width %a and variant %a",
+ askedname,name,weight,style,width,variant)
end
if not weight or not weight or not width or not variant then
weight, style, width, variant = weight or "normal", style or "normal", width or "normal", variant or "normal"
if trace_names then
- report_names("request '%s' normalized to '%s-%s-%s-%s-%s'",
+ report_names("request %a normalized to '%s-%s-%s-%s-%s'",
askedname,name,weight,style,width,variant)
end
end
@@ -175,7 +231,7 @@ filters.ttf = fontloader.info
filters.ttc = fontloader.info
filters.dfont = fontloader.info
-function fontloader.fullinfo(...)
+function fontloader.fullinfo(...) -- check with taco what we get / could get
local ff = fontloader.open(...)
if ff then
local d = ff and fontloader.to_table(ff)
@@ -192,9 +248,9 @@ filters.otf = fontloader.fullinfo
function filters.afm(name)
-- we could parse the afm file as well, and then report an error but
-- it's not worth the trouble
- local pfbname = resolvers.findfile(file.removesuffix(name)..".pfb","pfb") or ""
+ local pfbname = findfile(removesuffix(name)..".pfb","pfb") or ""
if pfbname == "" then
- pfbname = resolvers.findfile(file.removesuffix(file.basename(name))..".pfb","pfb") or ""
+ pfbname = findfile(nameonly(name)..".pfb","pfb") or ""
end
if pfbname ~= "" then
local f = io.open(name)
@@ -228,7 +284,7 @@ for combination with the weight of a font.</p>
filters.list = {
"otf", "ttf", "ttc", "dfont", "afm",
---~ "ttc", "otf", "ttf", "dfont", "afm",
+ -- "ttc", "otf", "ttf", "dfont", "afm",
}
names.fontconfigfile = "fonts.conf" -- a bit weird format, bonus feature
@@ -240,11 +296,11 @@ filters.names = { }
function names.getpaths(trace)
local hash, result, r = { }, { }, 0
local function collect(t,where)
- for i=1, #t do
- local v = resolvers.cleanpath(t[i])
+ for i=1,#t do
+ local v = cleanpath(t[i])
v = gsub(v,"/+$","") -- not needed any more
local key = lower(v)
- report_names("adding path from %s: %s",where,v)
+ report_names("%a specifies path %a",where,v)
if not hash[key] then
r = r + 1
result[r] = v
@@ -263,34 +319,34 @@ function names.getpaths(trace)
end
if confname ~= "" then
-- first look in the tex tree
- local name = resolvers.findfile(confname,"fontconfig files") or ""
+ local name = findfile(confname,"fontconfig files") or ""
if name == "" then
-- after all, fontconfig is a unix thing
- name = file.join("/etc",confname)
+ name = filejoin("/etc",confname)
if not lfs.isfile(name) then
name = "" -- force quit
end
end
if name ~= "" and lfs.isfile(name) then
if trace_names then
- report_names("loading fontconfig file: %s",name)
+ report_names("%s fontconfig file %a","loading",name)
end
local xmldata = xml.load(name)
-- begin of untested mess
xml.include(xmldata,"include","",true,function(incname)
- if not file.is_qualified_path(incname) then
- local path = file.dirname(name) -- main name
+ if not is_qualified_path(incname) then
+ local path = pathpart(name) -- main name
if path ~= "" then
- incname = file.join(path,incname)
+ incname = filejoin(path,incname)
end
end
if lfs.isfile(incname) then
if trace_names then
- report_names("merging included fontconfig file: %s",incname)
+ report_names("%s fontconfig file %a","merging included",incname)
end
return io.loaddata(incname)
elseif trace_names then
- report_names("ignoring included fontconfig file: %s",incname)
+ report_names("%s fontconfig file: %a","ignoring included",incname)
end
end)
-- end of untested mess
@@ -310,12 +366,10 @@ end
local function cleanname(name)
return (gsub(lower(name),"[^%a%d]",""))
- -- once we can load files with utf names, we can play with the following:
- -- return (utfgsub(utfgsub(lower(str),"[^%a%A%d]",""),"%s",""))
end
local function cleanfilename(fullname,defaultsuffix)
- local path, name, suffix = file.splitname(fullname)
+ local path, name, suffix = splitname(fullname)
name = gsub(lower(name),"[^%a%d]","")
if suffix and suffix ~= "" then
return name .. ".".. suffix
@@ -345,33 +399,32 @@ local function walk_tree(pathlist,suffix,identify)
if pathlist then
for i=1,#pathlist do
local path = pathlist[i]
- path = resolvers.cleanpath(path .. "/")
+ path = cleanpath(path .. "/")
path = gsub(path,"/+","/")
local pattern = path .. "**." .. suffix -- ** forces recurse
- report_names( "globbing path %s",pattern)
+ report_names("globbing path %a",pattern)
local t = dir.glob(pattern)
sort(t,sorter)
for j=1,#t do
local completename = t[j]
- identify(completename,file.basename(completename),suffix,completename)
+ identify(completename,basename(completename),suffix,completename)
end
end
end
end
-local function check_name(data,result,filename,suffix,subfont)
+local function check_name(data,result,filename,modification,suffix,subfont)
-- shortcuts
local specifications = data.specifications
- local families = data.families
-- prepare
local names = check_names(result)
-- fetch
- local familyname = (names and names.preffamilyname) or result.familyname
- local fullname = (names and names.fullname) or result.fullname
+ local familyname = names and names.preffamilyname or result.familyname
+ local fullname = names and names.fullname or result.fullname
local fontname = result.fontname
- local subfamily = (names and names.subfamily)
- local modifiers = (names and names.prefmodifiers)
- local weight = (names and names.weight) or result.weight
+ local subfamily = names and names.subfamily
+ local modifiers = names and names.prefmodifiers
+ local weight = names and names.weight or result.weight
local italicangle = tonumber(result.italicangle)
local subfont = subfont or nil
local rawname = fullname or fontname or familyname
@@ -382,7 +435,7 @@ local function check_name(data,result,filename,suffix,subfont)
subfamily = subfamily and cleanname(subfamily)
modifiers = modifiers and cleanname(modifiers)
weight = weight and cleanname(weight)
- italicangle = (italicangle == 0) and nil
+ italicangle = italicangle == 0 and nil
-- analyze
local a_name, a_weight, a_style, a_width, a_variant = analyzespec(fullname or fontname or familyname)
-- check
@@ -404,34 +457,38 @@ local function check_name(data,result,filename,suffix,subfont)
if not familyname then
familyname = a_name
end
- fontname = fontname or fullname or familyname or file.basename(filename)
+ fontname = fontname or fullname or familyname or basename(filename)
fullname = fullname or fontname
familyname = familyname or fontname
specifications[#specifications + 1] = {
- filename = filename, -- unresolved
- format = lower(suffix),
- subfont = subfont,
- rawname = rawname,
- familyname = familyname,
- fullname = fullname,
- fontname = fontname,
- subfamily = subfamily,
- modifiers = modifiers,
- weight = weight,
- style = style,
- width = width,
- variant = variant,
- minsize = result.design_range_bottom or 0,
- maxsize = result.design_range_top or 0,
- designsize = result.design_size or 0,
+ filename = filename, -- unresolved
+ format = lower(suffix),
+ subfont = subfont,
+ rawname = rawname,
+ familyname = familyname,
+ fullname = fullname,
+ fontname = fontname,
+ subfamily = subfamily,
+ modifiers = modifiers,
+ weight = weight,
+ style = style,
+ width = width,
+ variant = variant,
+ minsize = result.design_range_bottom or 0,
+ maxsize = result.design_range_top or 0,
+ designsize = result.design_size or 0,
+ modification = modification or 0,
}
end
local function cleanupkeywords()
- local data = names.data
+ local data = names.data
local specifications = names.data.specifications
if specifications then
- local weights, styles, widths, variants = { }, { }, { }, { }
+ local weights = { }
+ local styles = { }
+ local widths = { }
+ local variants = { }
for i=1,#specifications do
local s = specifications[i]
-- fix (sofar styles are taken from the name, and widths from the specification)
@@ -463,29 +520,40 @@ local function cleanupkeywords()
end
local function collectstatistics()
- local data = names.data
+ local data = names.data
local specifications = data.specifications
if specifications then
- local weights, styles, widths, variants = { }, { }, { }, { }
+ local weights = { }
+ local styles = { }
+ local widths = { }
+ local variants = { }
for i=1,#specifications do
- local s = specifications[i]
- local weight, style, width, variant = s.weight, s.style, s.width, s.variant
+ local s = specifications[i]
+ local weight = s.weight
+ local style = s.style
+ local width = s.width
+ local variant = s.variant
if weight then weights [weight ] = (weights [weight ] or 0) + 1 end
if style then styles [style ] = (styles [style ] or 0) + 1 end
if width then widths [width ] = (widths [width ] or 0) + 1 end
if variant then variants[variant] = (variants[variant] or 0) + 1 end
end
- local stats = data.statistics
- stats.weights, stats.styles, stats.widths, stats.variants, stats.fonts = weights, styles, widths, variants, #specifications
+ local stats = data.statistics
+ stats.weights = weights
+ stats.styles = styles
+ stats.widths = widths
+ stats.variants = variants
+ stats.fonts = #specifications
end
end
local function collecthashes()
- local data = names.data
+ local data = names.data
local mappings = data.mappings
local fallbacks = data.fallbacks
local specifications = data.specifications
- local nofmappings, noffallbacks = 0, 0
+ local nofmappings = 0
+ local noffallbacks = 0
if specifications then
-- maybe multiple passes
for index=1,#specifications do
@@ -498,22 +566,20 @@ local function collecthashes()
if fontname and not mf[fontname] then
mf[fontname], nofmappings = index, nofmappings + 1
end
- if familyname and weight then
+ if familyname and weight and weight ~= sub(familyname,#familyname-#weight+1,#familyname) then
local madename = familyname .. weight
if not mf[madename] and not ff[madename] then
ff[madename], noffallbacks = index, noffallbacks + 1
end
end
- if familyname and subfamily then
+ if familyname and subfamily and subfamily ~= sub(familyname,#familyname-#subfamily+1,#familyname) then
local extraname = familyname .. subfamily
if not mf[extraname] and not ff[extraname] then
ff[extraname], noffallbacks = index, noffallbacks + 1
end
end
- if familyname then
- if not mf[familyname] and not ff[familyname] then
- ff[familyname], noffallbacks = index, noffallbacks + 1
- end
+ if familyname and not mf[familyname] and not ff[familyname] then
+ ff[familyname], noffallbacks = index, noffallbacks + 1
end
end
end
@@ -521,12 +587,12 @@ local function collecthashes()
end
local function collectfamilies()
- local data = names.data
+ local data = names.data
local specifications = data.specifications
- local families = data.families
+ local families = data.families
for index=1,#specifications do
local familyname = specifications[index].familyname
- local family = families[familyname]
+ local family = families[familyname]
if not family then
families[familyname] = { index }
else
@@ -536,14 +602,15 @@ local function collectfamilies()
end
local function checkduplicate(where) -- fails on "Romantik" but that's a border case anyway
- local data = names.data
- local mapping = data[where]
- local specifications, loaded = data.specifications, { }
+ local data = names.data
+ local mapping = data[where]
+ local specifications = data.specifications
+ local loaded = { }
if specifications and mapping then
for _, m in next, mapping do
for k, v in next, m do
local s = specifications[v]
- local hash = format("%s-%s-%s-%s-%s",s.familyname,s.weight or "*",s.style or "*",s.width or "*",s.variant or "*")
+ local hash = formatters["%s-%s-%s-%s-%s"](s.familyname,s.weight or "*",s.style or "*",s.width or "*",s.variant or "*")
local h = loaded[hash]
if h then
local ok = true
@@ -569,12 +636,12 @@ local function checkduplicate(where) -- fails on "Romantik" but that's a border
local nv = #v
if nv > 1 then
if trace_warnings then
- report_names( "double lookup: %s => %s",k,concat(v," | "))
+ report_names("lookup %a clashes with %a",k,v)
end
n = n + nv
end
end
- report_names( "%s double lookups in %s",n,where)
+ report_names("%a double lookups in %a",n,where)
end
local function checkduplicates()
@@ -587,13 +654,19 @@ local sorter = function(a,b)
end
local function sorthashes()
- local data, list = names.data, filters.list
- local mappings, fallbacks, sorted_mappings, sorted_fallbacks = data.mappings, data.fallbacks, { }, { }
- data.sorted_mappings, data.sorted_fallbacks = sorted_mappings, sorted_fallbacks
+ local data = names.data
+ local list = filters.list
+ local mappings = data.mappings
+ local fallbacks = data.fallbacks
+ local sorted_mappings = { }
+ local sorted_fallbacks = { }
+ data.sorted_mappings = sorted_mappings
+ data.sorted_fallbacks = sorted_fallbacks
for i=1,#list do
local l = list[i]
- sorted_mappings[l], sorted_fallbacks[l] = table.keys(mappings[l]), table.keys(fallbacks[l])
- sort(sorted_mappings[l],sorter)
+ sorted_mappings [l] = table.keys(mappings[l])
+ sorted_fallbacks[l] = table.keys(fallbacks[l])
+ sort(sorted_mappings [l],sorter)
sort(sorted_fallbacks[l],sorter)
end
data.sorted_families = table.keys(data.families)
@@ -601,7 +674,7 @@ local function sorthashes()
end
local function unpackreferences()
- local data = names.data
+ local data = names.data
local specifications = data.specifications
if specifications then
for k, v in next, data.families do
@@ -628,40 +701,52 @@ local function unpackreferences()
end
end
-local function analyzefiles()
- local data = names.data
- local done, totalnofread, totalnofskipped, totalnofduplicates, nofread, nofskipped, nofduplicates = { }, 0, 0, 0, 0, 0, 0
- local skip_paths, skip_names = filters.paths, filters.names
---~ local trace_warnings = true
+local function analyzefiles(olddata)
+ if not trace_warnings then
+ report_names("warnings are disabled (tracker 'fonts.warnings')")
+ end
+ local data = names.data
+ local done = { }
+ local totalnofread = 0
+ local totalnofskipped = 0
+ local totalnofduplicates = 0
+ local nofread = 0
+ local nofskipped = 0
+ local nofduplicates = 0
+ local skip_paths = filters.paths
+ local skip_names = filters.names
+ local specifications = data.specifications
+ local oldindices = olddata and olddata.indices or { }
+ local oldspecifications = olddata and olddata.specifications or { }
+ local oldrejected = olddata and olddata.rejected or { }
local function identify(completename,name,suffix,storedname)
- local basename = file.basename(completename)
- local basepath = file.dirname(completename)
+ local pathpart, basepart = splitbase(completename)
nofread = nofread + 1
if done[name] then
-- already done (avoid otf afm clash)
if trace_names then
- report_names("%s font %s already done",suffix,completename)
+ report_names("%s font %a already done",suffix,completename)
end
nofduplicates = nofduplicates + 1
nofskipped = nofskipped + 1
elseif not io.exists(completename) then
-- weird error
if trace_names then
- report_names("%s font %s does not really exist",suffix,completename)
+ report_names("%s font %a does not really exist",suffix,completename)
end
nofskipped = nofskipped + 1
- elseif not file.is_qualified_path(completename) and resolvers.findfile(completename,suffix) == "" then
- -- not locateble by backend anyway
+ elseif not is_qualified_path(completename) and findfile(completename,suffix) == "" then
+ -- not locatable by backend anyway
if trace_names then
- report_names("%s font %s cannot be found by backend",suffix,completename)
+ report_names("%s font %a cannot be found by backend",suffix,completename)
end
nofskipped = nofskipped + 1
else
if #skip_paths > 0 then
for i=1,#skip_paths do
- if find(basepath,skip_paths[i]) then
+ if find(pathpart,skip_paths[i]) then
if trace_names then
- report_names("rejecting path of %s font %s",suffix,completename)
+ report_names("rejecting path of %s font %a",suffix,completename)
end
nofskipped = nofskipped + 1
return
@@ -670,10 +755,10 @@ local function analyzefiles()
end
if #skip_names > 0 then
for i=1,#skip_paths do
- if find(basename,skip_names[i]) then
+ if find(basepart,skip_names[i]) then
done[name] = true
if trace_names then
- report_names("rejecting name of %s font %s",suffix,completename)
+ report_names("rejecting name of %s font %a",suffix,completename)
end
nofskipped = nofskipped + 1
return
@@ -681,29 +766,50 @@ local function analyzefiles()
end
end
if trace_names then
- report_names("identifying %s font %s",suffix,completename)
+ report_names("identifying %s font %a",suffix,completename)
end
- local result, message = filters[lower(suffix)](completename)
- if result then
- if result[1] then
- for r=1,#result do
- local ok = check_name(data,result[r],storedname,suffix,r-1) -- subfonts start at zero
+ local result = nil
+ local modification = lfs.attributes(completename,"modification")
+ if olddata and modification and modification > 0 then
+ local oldindex = oldindices[storedname] -- index into specifications
+ if oldindex then
+ local oldspecification = oldspecifications[oldindex]
+ if oldspecification and oldspecification.filename == storedname then -- double check for out of sync
+ local oldmodification = oldspecification.modification
+ if oldmodification == modification then
+ result = oldspecification
+ specifications[#specifications + 1] = result
+ else
+ end
+ else
+ end
+ elseif oldrejected[storedname] == modification then
+ result = false
+ end
+ end
+ if result == nil then
+ local result, message = filters[lower(suffix)](completename)
+ if result then
+ if result[1] then
+ for r=1,#result do
+ local ok = check_name(data,result[r],storedname,modification,suffix,r-1) -- subfonts start at zero
+ -- if not ok then
+ -- nofskipped = nofskipped + 1
+ -- end
+ end
+ else
+ local ok = check_name(data,result,storedname,modification,suffix)
-- if not ok then
-- nofskipped = nofskipped + 1
-- end
end
- else
- local ok = check_name(data,result,storedname,suffix)
- -- if not ok then
- -- nofskipped = nofskipped + 1
- -- end
- end
- if trace_warnings and message and message ~= "" then
- report_names("warning when identifying %s font %s: %s",suffix,completename,message)
+ if trace_warnings and message and message ~= "" then
+ report_names("warning when identifying %s font %a, %s",suffix,completename,message)
+ end
+ elseif trace_warnings then
+ nofskipped = nofskipped + 1
+ report_names("error when identifying %s font %a, %s",suffix,completename,message or "unknown")
end
- elseif trace_warnings then
- nofskipped = nofskipped + 1
- report_names("error when identifying %s font %s: %s",suffix,completename,message or "unknown")
end
done[name] = true
end
@@ -716,73 +822,85 @@ local function analyzefiles()
local t = os.gettimeofday() -- use elapser
nofread, nofskipped, nofduplicates = 0, 0, 0
suffix = lower(suffix)
- report_names( "identifying %s font files with suffix %s",what,suffix)
+ report_names("identifying %s font files with suffix %a",what,suffix)
method(suffix)
suffix = upper(suffix)
- report_names( "identifying %s font files with suffix %s",what,suffix)
+ report_names("identifying %s font files with suffix %a",what,suffix)
method(suffix)
totalnofread, totalnofskipped, totalnofduplicates = totalnofread + nofread, totalnofskipped + nofskipped, totalnofduplicates + nofduplicates
local elapsed = os.gettimeofday() - t
- report_names( "%s %s files identified, %s skipped, %s duplicates, %s hash entries added, runtime %0.3f seconds",nofread,what,nofskipped,nofduplicates,nofread-nofskipped,elapsed)
+ report_names("%s %s files identified, %s skipped, %s duplicates, %s hash entries added, runtime %0.3f seconds",nofread,what,nofskipped,nofduplicates,nofread-nofskipped,elapsed)
end
logs.flush()
end
- if not trace_warnings then
- report_names( "warnings are disabled (tracker 'fonts.warnings')")
- end
- traverse("tree", function(suffix) -- TEXTREE only
+ -- problem .. this will not take care of duplicates
+ local function withtree(suffix)
resolvers.dowithfilesintree(".*%." .. suffix .. "$", function(method,root,path,name)
if method == "file" or method == "tree" then
local completename = root .."/" .. path .. "/" .. name
- completename = resolvers.resolve(completename) -- no shortcut
+ completename = resolveresolved(completename) -- no shortcut
identify(completename,name,suffix,name)
return true
end
end, function(blobtype,blobpath,pattern)
- blobpath = resolvers.resolve(blobpath) -- no shortcut
- report_names( "scanning %s for %s files",blobpath,suffix)
+ blobpath = resolveresolved(blobpath) -- no shortcut
+ report_names("scanning path %a for %s files",blobpath,suffix)
end, function(blobtype,blobpath,pattern,total,checked,done)
- blobpath = resolvers.resolve(blobpath) -- no shortcut
- report_names( "%s entries found, %s %s files checked, %s okay",total,checked,suffix,done)
+ blobpath = resolveresolved(blobpath) -- no shortcut
+ report_names("%s entries found, %s %s files checked, %s okay",total,checked,suffix,done)
end)
- end)
- if texconfig.kpse_init then
+ end
+ local function withlsr(suffix) -- all trees
-- we do this only for a stupid names run, not used for context itself,
- -- using the vars is to clumsy so we just stick to a full scan instead
- traverse("lsr", function(suffix) -- all trees
- local pathlist = resolvers.splitpath(resolvers.showpath("ls-R") or "")
- walk_tree(pathlist,suffix,identify)
- end)
+ -- using the vars is too clumsy so we just stick to a full scan instead
+ local pathlist = resolvers.splitpath(resolvers.showpath("ls-R") or "")
+ walk_tree(pathlist,suffix,identify)
+ end
+ local function withsystem(suffix) -- OSFONTDIR cum suis
+ walk_tree(names.getpaths(trace),suffix,identify)
+ end
+ traverse("tree",withtree) -- TEXTREE only
+ if texconfig.kpse_init then
+ traverse("lsr", withlsr)
else
- traverse("system", function(suffix) -- OSFONTDIR cum suis
- walk_tree(names.getpaths(trace),suffix,identify)
- end)
+ traverse("system", withsystem)
end
- data.statistics.readfiles, data.statistics.skippedfiles, data.statistics.duplicatefiles = totalnofread, totalnofskipped, totalnofduplicates
+ data.statistics.readfiles = totalnofread
+ data.statistics.skippedfiles = totalnofskipped
+ data.statistics.duplicatefiles = totalnofduplicates
end
local function addfilenames()
- local data = names.data
+ local data = names.data
local specifications = data.specifications
- local files = { }
+ local indices = { }
+ local files = { }
for i=1,#specifications do
local fullname = specifications[i].filename
files[cleanfilename(fullname)] = fullname
+ indices[fullname] = i
end
- data.files = files
+ data.files = files
+ data.indices = indices
end
local function rejectclashes() -- just to be sure, so no explicit afm will be found then
- local specifications, used, okay, o = names.data.specifications, { }, { }, 0
+ local specifications = names.data.specifications
+ local used = { }
+ local okay = { }
+ local rejected = { } -- only keep modification
+ local o = 0
for i=1,#specifications do
local s = specifications[i]
local f = s.fontname
if f then
- local fnd, fnm = used[f], s.filename
+ local fnd = used[f]
+ local fnm = s.filename
if fnd then
if trace_warnings then
- report_names( "fontname '%s' clashes, rejecting '%s' in favor of '%s'",f,fnm,fnd)
+ report_names("fontname %a clashes, %a rejected in favor of %a",f,fnm,fnd)
end
+ rejected[f] = s.modification
else
used[f] = fnm
o = o + 1
@@ -795,30 +913,37 @@ local function rejectclashes() -- just to be sure, so no explicit afm will be fo
end
local d = #specifications - #okay
if d > 0 then
- report_names( "%s files rejected due to clashes",d)
+ report_names("%s files rejected due to clashes",d)
end
names.data.specifications = okay
+ names.data.rejected = rejected
end
local function resetdata()
- local mappings, fallbacks = { }, { }
+ local mappings = { }
+ local fallbacks = { }
for _, k in next, filters.list do
- mappings[k], fallbacks[k] = { }, { }
+ mappings [k] = { }
+ fallbacks[k] = { }
end
names.data = {
- version = names.version,
- mappings = mappings,
- fallbacks = fallbacks,
+ version = names.version,
+ mappings = mappings,
+ fallbacks = fallbacks,
specifications = { },
- families = { },
- statistics = { },
- datastate = resolvers.datastate(),
+ families = { },
+ statistics = { },
+ names = { },
+ indices = { },
+ rejected = { },
+ datastate = resolvers.datastate(),
}
end
-function names.identify()
+function names.identify(force)
+ local starttime = os.gettimeofday() -- use elapser
resetdata()
- analyzefiles()
+ analyzefiles(not force and names.readdata(names.basename))
rejectclashes()
collectfamilies()
collectstatistics()
@@ -827,6 +952,7 @@ function names.identify()
checkduplicates()
addfilenames()
-- sorthashes() -- will be resorted when saved
+ report_names("total scan time %0.3f seconds",os.gettimeofday()-starttime)
end
function names.is_permitted(name)
@@ -839,11 +965,11 @@ function names.readdata(name)
return containers.read(names.cache,name)
end
-function names.load(reload,verbose)
+function names.load(reload,force)
if not names.loaded then
if reload then
if names.is_permitted(names.basename) then
- names.identify(verbose)
+ names.identify(force)
names.writedata(names.basename,names.data)
else
report_names("unable to access database cache")
@@ -890,9 +1016,11 @@ function names.list(pattern,reload,all) -- here?
local t = { }
local data = names.data
if data then
- local list = filters.list
- local mappings, sorted_mappings = data.mappings, data.sorted_mappings
- local fallbacks, sorted_fallbacks = data.fallbacks, data.sorted_fallbacks
+ local list = filters.list
+ local mappings = data.mappings
+ local sorted_mappings = data.sorted_mappings
+ local fallbacks = data.fallbacks
+ local sorted_fallbacks = data.sorted_fallbacks
for i=1,#list do
local format = list[i]
list_them(mappings[format],sorted_mappings[format],pattern,t,all)
@@ -954,10 +1082,12 @@ end
-- we could cache a lookup .. maybe some day ... (only when auto loaded!)
local function foundname(name,sub) -- sub is not used currently
- local data = names.data
- local mappings, sorted_mappings = data.mappings, data.sorted_mappings
- local fallbacks, sorted_fallbacks = data.fallbacks, data.sorted_fallbacks
- local list = filters.list
+ local data = names.data
+ local mappings = data.mappings
+ local sorted_mappings = data.sorted_mappings
+ local fallbacks = data.fallbacks
+ local sorted_fallbacks = data.sorted_fallbacks
+ local list = filters.list
-- dilemma: we lookup in the order otf ttf ttc ... afm but now an otf fallback
-- can come after an afm match ... well, one should provide nice names anyway
-- and having two lists is not an option
@@ -966,7 +1096,7 @@ local function foundname(name,sub) -- sub is not used currently
local found = mappings[l][name]
if found then
if trace_names then
- report_names("resolved via direct name match: '%s'",name)
+ report_names("resolved via direct name match: %a",name)
end
return found
end
@@ -976,7 +1106,7 @@ local function foundname(name,sub) -- sub is not used currently
local found, fname = fuzzy(mappings[l],sorted_mappings[l],name,sub)
if found then
if trace_names then
- report_names("resolved via fuzzy name match: '%s' => '%s'",name,fname)
+ report_names("resolved via fuzzy name match: %a onto %a",name,fname)
end
return found
end
@@ -986,7 +1116,7 @@ local function foundname(name,sub) -- sub is not used currently
local found = fallbacks[l][name]
if found then
if trace_names then
- report_names("resolved via direct fallback match: '%s'",name)
+ report_names("resolved via direct fallback match: %a",name)
end
return found
end
@@ -996,13 +1126,13 @@ local function foundname(name,sub) -- sub is not used currently
local found, fname = fuzzy(sorted_mappings[l],sorted_fallbacks[l],name,sub)
if found then
if trace_names then
- report_names("resolved via fuzzy fallback match: '%s' => '%s'",name,fname)
+ report_names("resolved via fuzzy fallback match: %a onto %a",name,fname)
end
return found
end
end
if trace_names then
- report_names("font with name '%s' cannot be found",name)
+ report_names("font with name %a cannot be found",name)
end
end
@@ -1248,40 +1378,39 @@ end
local function collect(stage,found,done,name,weight,style,width,variant,all)
local data = names.data
- local families, sorted = data.families, data.sorted_families
- strictname = "^".. name -- to be checked
+ local families = data.families
+ local sorted = data.sorted_families
+ local strictname = "^".. name -- to be checked
local family = families[name]
if trace_names then
- report_names("resolving name '%s', weight '%s', style '%s', width '%s', variant '%s'",
- name or "?",tostring(weight),tostring(style),tostring(width),tostring(variant))
+ report_names("resolving name %a, weight %a, style %a, width %a, variant %a",name,weight,style,width,variant)
end
- --~ print(name,serialize(family))
if weight and weight ~= "" then
if style and style ~= "" then
if width and width ~= "" then
if variant and variant ~= "" then
if trace_names then
- report_names("resolving stage %s, name '%s', weight '%s', style '%s', width '%s', variant '%s'",stage,name,weight,style,width,variant)
+ report_names("resolving stage %s, name %a, weight %a, style %a, width %a, variant %a",stage,name,weight,style,width,variant)
end
s_collect_weight_style_width_variant(found,done,all,weight,style,width,variant,family)
m_collect_weight_style_width_variant(found,done,all,weight,style,width,variant,families,sorted,strictname)
else
if trace_names then
- report_names("resolving stage %s, name '%s', weight '%s', style '%s', width '%s'",stage,name,weight,style,width)
+ report_names("resolving stage %s, name %a, weight %a, style %a, width %a",stage,name,weight,style,width)
end
s_collect_weight_style_width(found,done,all,weight,style,width,family)
m_collect_weight_style_width(found,done,all,weight,style,width,families,sorted,strictname)
end
else
if trace_names then
- report_names("resolving stage %s, name '%s', weight '%s', style '%s'",stage,name,weight,style)
+ report_names("resolving stage %s, name %a, weight %a, style %a",stage,name,weight,style)
end
s_collect_weight_style(found,done,all,weight,style,family)
m_collect_weight_style(found,done,all,weight,style,families,sorted,strictname)
end
else
if trace_names then
- report_names("resolving stage %s, name '%s', weight '%s'",stage,name,weight)
+ report_names("resolving stage %s, name %a, weight %a",stage,name,weight)
end
s_collect_weight(found,done,all,weight,family)
m_collect_weight(found,done,all,weight,families,sorted,strictname)
@@ -1289,26 +1418,26 @@ local function collect(stage,found,done,name,weight,style,width,variant,all)
elseif style and style ~= "" then
if width and width ~= "" then
if trace_names then
- report_names("resolving stage %s, name '%s', style '%s', width '%s'",stage,name,style,width)
+ report_names("resolving stage %s, name %a, style %a, width %a",stage,name,style,width)
end
s_collect_style_width(found,done,all,style,width,family)
m_collect_style_width(found,done,all,style,width,families,sorted,strictname)
else
if trace_names then
- report_names("resolving stage %s, name '%s', style '%s'",stage,name,style)
+ report_names("resolving stage %s, name %a, style %a",stage,name,style)
end
s_collect_style(found,done,all,style,family)
m_collect_style(found,done,all,style,families,sorted,strictname)
end
elseif width and width ~= "" then
if trace_names then
- report_names("resolving stage %s, name '%s', width '%s'",stage,name,width)
+ report_names("resolving stage %s, name %a, width %a",stage,name,width)
end
s_collect_width(found,done,all,width,family)
m_collect_width(found,done,all,width,families,sorted,strictname)
else
if trace_names then
- report_names("resolving stage %s, name '%s'",stage,name)
+ report_names("resolving stage %s, name %a",stage,name)
end
s_collect(found,done,all,family)
m_collect(found,done,all,families,sorted,strictname)
@@ -1344,11 +1473,11 @@ local function heuristic(name,weight,style,width,variant,all) -- todo: fallbacks
if nf then
local t = { }
for i=1,nf do
- t[i] = format("'%s'",found[i].fontname)
+ t[i] = formatters["%a"](found[i].fontname)
end
- report_names("name '%s' resolved to %s instances: %s",name,nf,concat(t," "))
+ report_names("name %a resolved to %s instances: % t",name,nf,t)
else
- report_names("name '%s' unresolved",name)
+ report_names("name %a unresolved",name)
end
end
if all then
@@ -1402,7 +1531,6 @@ function names.collectfiles(askedname,reload) -- no all
askedname = cleanname(askedname) -- or cleanname
names.load(reload)
local list = { }
- local basename = file.basename
local specifications = names.data.specifications
for i=1,#specifications do
local s = specifications[i]
@@ -1414,77 +1542,25 @@ function names.collectfiles(askedname,reload) -- no all
end
end
---~ --[[ldx--
---~ <p>Fallbacks, not permanent but a transition thing.</p>
---~ --ldx]]--
---~
---~ names.new_to_old = allocate {
---~ ["lmroman10-capsregular"] = "lmromancaps10-oblique",
---~ ["lmroman10-capsoblique"] = "lmromancaps10-regular",
---~ ["lmroman10-demi"] = "lmromandemi10-oblique",
---~ ["lmroman10-demioblique"] = "lmromandemi10-regular",
---~ ["lmroman8-oblique"] = "lmromanslant8-regular",
---~ ["lmroman9-oblique"] = "lmromanslant9-regular",
---~ ["lmroman10-oblique"] = "lmromanslant10-regular",
---~ ["lmroman12-oblique"] = "lmromanslant12-regular",
---~ ["lmroman17-oblique"] = "lmromanslant17-regular",
---~ ["lmroman10-boldoblique"] = "lmromanslant10-bold",
---~ ["lmroman10-dunhill"] = "lmromandunh10-oblique",
---~ ["lmroman10-dunhilloblique"] = "lmromandunh10-regular",
---~ ["lmroman10-unslanted"] = "lmromanunsl10-regular",
---~ ["lmsans10-demicondensed"] = "lmsansdemicond10-regular",
---~ ["lmsans10-demicondensedoblique"] = "lmsansdemicond10-oblique",
---~ ["lmsansquotation8-bold"] = "lmsansquot8-bold",
---~ ["lmsansquotation8-boldoblique"] = "lmsansquot8-boldoblique",
---~ ["lmsansquotation8-oblique"] = "lmsansquot8-oblique",
---~ ["lmsansquotation8-regular"] = "lmsansquot8-regular",
---~ ["lmtypewriter8-regular"] = "lmmono8-regular",
---~ ["lmtypewriter9-regular"] = "lmmono9-regular",
---~ ["lmtypewriter10-regular"] = "lmmono10-regular",
---~ ["lmtypewriter12-regular"] = "lmmono12-regular",
---~ ["lmtypewriter10-italic"] = "lmmono10-italic",
---~ ["lmtypewriter10-oblique"] = "lmmonoslant10-regular",
---~ ["lmtypewriter10-capsoblique"] = "lmmonocaps10-oblique",
---~ ["lmtypewriter10-capsregular"] = "lmmonocaps10-regular",
---~ ["lmtypewriter10-light"] = "lmmonolt10-regular",
---~ ["lmtypewriter10-lightoblique"] = "lmmonolt10-oblique",
---~ ["lmtypewriter10-lightcondensed"] = "lmmonoltcond10-regular",
---~ ["lmtypewriter10-lightcondensedoblique"] = "lmmonoltcond10-oblique",
---~ ["lmtypewriter10-dark"] = "lmmonolt10-bold",
---~ ["lmtypewriter10-darkoblique"] = "lmmonolt10-boldoblique",
---~ ["lmtypewritervarwd10-regular"] = "lmmonoproplt10-regular",
---~ ["lmtypewritervarwd10-oblique"] = "lmmonoproplt10-oblique",
---~ ["lmtypewritervarwd10-light"] = "lmmonoprop10-regular",
---~ ["lmtypewritervarwd10-lightoblique"] = "lmmonoprop10-oblique",
---~ ["lmtypewritervarwd10-dark"] = "lmmonoproplt10-bold",
---~ ["lmtypewritervarwd10-darkoblique"] = "lmmonoproplt10-boldoblique",
---~ }
---~
---~ names.old_to_new = allocate(table.swapped(names.new_to_old))
-
---~ todo:
---~
---~ blacklisted = {
---~ ["cmr10.ttf"] = "completely messed up",
---~ }
+-- todo:
+--
+-- blacklisted = {
+-- ["cmr10.ttf"] = "completely messed up",
+-- }
function names.exists(name)
local found = false
local list = filters.list
for k=1,#list do
local v = list[k]
- found = (resolvers.findfile(name,v) or "") ~= ""
+ found = (findfile(name,v) or "") ~= ""
if found then
return found
end
end
- return ((resolvers.findfile(name,"tfm") or "") ~= "") or ((names.resolve(name) or "") ~= "")
+ return (findfile(name,"tfm") or "") ~= "" or (names.resolve(name) or "") ~= ""
end
--- for i=1,fonts.names.lookup(pattern) do
--- texio.write_nl(fonts.names.getkey("filename",i))
--- end
-
local lastlookups, lastpattern = { }, ""
function names.lookup(pattern,name,reload) -- todo: find
@@ -1499,7 +1575,7 @@ function names.lookup(pattern,name,reload) -- todo: find
lookups = families[pattern]
end
if trace_names then
- report_names("starting with %s lookups for '%s'",#lookups,pattern)
+ report_names("starting with %s lookups for %a",#lookups,pattern)
end
if lookups then
for key, value in gmatch(pattern,"([^=,]+)=([^=,]+)") do
@@ -1523,7 +1599,7 @@ function names.lookup(pattern,name,reload) -- todo: find
end
end
if trace_names then
- report_names("%s matches for key '%s' with value '%s'",#t,key,value)
+ report_names("%s matches for key %a with value %a",#t,key,value)
end
lookups = t
end
@@ -1617,19 +1693,19 @@ end
function names.resolvespec(askedname,sub) -- overloads previous definition
local name, weight, style, width, variant = names.splitspec(askedname)
if trace_specifications then
- report_names("resolving specification: %s -> name=%s, weight=%s, style=%s, width=%s, variant=%s",askedname,name,weight,style,width,variant)
+ report_names("resolving specification: %a to name=%s, weight=%s, style=%s, width=%s, variant=%s",askedname,name,weight,style,width,variant)
end
local found = names.registered(name,weight,style,width,variant)
if found and found.filename then
if trace_specifications then
- report_names("resolved by registered names: %s -> %s",askedname,found.filename)
+ report_names("resolved by registered names: %a to %s",askedname,found.filename)
end
return found.filename, found.subname, found.rawname
else
found = names.specification(name,weight,style,width,variant)
if found and found.filename then
if trace_specifications then
- report_names("resolved by font database: %s -> %s",askedname,found.filename)
+ report_names("resolved by font database: %a to %s",askedname,found.filename)
end
return found.filename, found.subfont and found.rawname
end
diff --git a/Master/texmf-dist/tex/context/base/font-tfm.lua b/Master/texmf-dist/tex/context/base/font-tfm.lua
index b1ec7c1dec2..316b947a36b 100644
--- a/Master/texmf-dist/tex/context/base/font-tfm.lua
+++ b/Master/texmf-dist/tex/context/base/font-tfm.lua
@@ -15,22 +15,21 @@ local trace_features = false trackers.register("tfm.features", func
local report_defining = logs.reporter("fonts","defining")
local report_tfm = logs.reporter("fonts","tfm loading")
+local findbinfile = resolvers.findbinfile
+
local fonts = fonts
local handlers = fonts.handlers
local readers = fonts.readers
local constructors = fonts.constructors
local encodings = fonts.encodings
-local tfm = { }
-handlers.tfm = tfm
-
-constructors.resolvevirtualtoo = false -- wil be set in font-ctx.lua
-
-local findbinfile = resolvers.findbinfile
+local tfm = constructors.newhandler("tfm")
-local tfmfeatures = fonts.constructors.newfeatures("tfm")
+local tfmfeatures = constructors.newfeatures("tfm")
local registertfmfeature = tfmfeatures.register
+constructors.resolvevirtualtoo = false -- wil be set in font-ctx.lua
+
fonts.formats.tfm = "type1" -- we need to have at least a value here
--[[ldx--
@@ -58,7 +57,7 @@ local function read_from_tfm(specification)
local filename = specification.filename
local size = specification.size
if trace_defining then
- report_defining("loading tfm file %s at size %s",filename,size)
+ report_defining("loading tfm file %a at size %s",filename,size)
end
local tfmdata = font.read_tfm(filename,size) -- not cached, fast enough
if tfmdata then
@@ -92,7 +91,7 @@ local function read_from_tfm(specification)
constructors.enhanceparameters(parameters) -- official copies for us
--
if constructors.resolvevirtualtoo then
- fonts.loggers.register(tfmdata,file.extname(filename),specification) -- strange, why here
+ fonts.loggers.register(tfmdata,file.suffix(filename),specification) -- strange, why here
local vfname = findbinfile(specification.name, 'ovf')
if vfname and vfname ~= "" then
local vfdata = font.read_vf(vfname,size) -- not cached, fast enough
@@ -133,7 +132,7 @@ local function check_tfm(specification,fullname) -- we could split up like afm/o
specification.format = "ofm"
return read_from_tfm(specification)
elseif trace_defining then
- report_defining("loading tfm with name %s fails",specification.name)
+ report_defining("loading tfm with name %a fails",specification.name)
end
end
diff --git a/Master/texmf-dist/tex/context/base/font-tra.mkiv b/Master/texmf-dist/tex/context/base/font-tra.mkiv
index 06b8613785d..1877c4904d7 100644
--- a/Master/texmf-dist/tex/context/base/font-tra.mkiv
+++ b/Master/texmf-dist/tex/context/base/font-tra.mkiv
@@ -13,6 +13,8 @@
\unprotect
+\writestatus{loading}{ConTeXt Font Macros / Tracing}
+
%D \macros
%D {showbodyfont}
%D
@@ -82,10 +84,8 @@
\fetchruntimecommand \showcharratio {\f!fontprefix\s!run}
\fetchruntimecommand \showfontparameters {\f!fontprefix\s!run}
-\unexpanded\def\checkcharactersinfont {\ctxcommand{checkcharactersinfont()}}
-\unexpanded\def\removemissingcharacters{\ctxcommand{removemissingcharacters()}}
-\unexpanded\def\showchardata #1{\ctxcommand{showchardata("#1")}}
-\unexpanded\def\showfontdata {\ctxcommand{showfontparameters()}}
+\unexpanded\def\showchardata #1{\ctxcommand{showchardata("#1")}}
+\unexpanded\def\showfontdata {\ctxcommand{showfontparameters()}}
%D \macros
%D {doiffontpresentelse}
@@ -95,7 +95,7 @@
%D \doiffontpresentelse{adam-lindsay-modern-serif}{YES}{NO}
%D \stoptyping
-\def\doiffontpresentelse#1{\ctxcommand{doifelse(fonts.names.exists("#1"))}}
+\unexpanded\def\doiffontpresentelse#1{\ctxcommand{doifelse(fonts.names.exists("#1"))}}
% experimental, maybe this becomes a module
@@ -103,69 +103,140 @@
\unexpanded\def\startotfcollecting{\ctxlua{nodes.tracers.steppers.start()}}
\unexpanded\def\stopotfcollecting {\ctxlua{nodes.tracers.steppers.stop()}}
-\def\resetotfcollecting{\ctxlua{nodes.tracers.steppers.reset()}}
+\unexpanded\def\resetotfcollecting{\ctxlua{nodes.tracers.steppers.reset()}}
% Rather experimental:
%
% \page \showotfcomposition{arabtype*arab-default at 48pt}{-1}{الضَّرَّ} \page
% \page \showotfcomposition{arabtype*arab-default at 48pt}{-1}{لِلّٰهِ} \page
-\def\showotfstepglyphs#1%
+\unexpanded\def\showotfstepglyphs#1%
{\ctxlua{nodes.tracers.steppers.glyphs(\number\otfcollector,#1)}%
\unhbox\otfcollector}
-\def\otfstepcharcommand#1#2#3% font char class
+\unexpanded\def\otfstepcharcommand#1#2#3% font char class
{\removeunwantedspaces
- \hskip.5em plus .125em\relax
+ \hskip.5\emwidth \s!plus .125\emwidth\relax
\doif{#3}{mark}{\underbar}{U+\hexnumber{#2}}:\ruledhbox{\ctxlua{nodes.tracers.fontchar(#1,#2)}}%
- \hskip.5em plus .125em\relax}
+ \hskip.5\emwidth \s!plus .125\emwidth\relax}
+
+\unexpanded\def\otfstepfontcommand#1#2#3% id font size
+ {\begingroup
+ \tttf #1: #2 @ \the\dimexpr#3\scaledpoint\relax
+ \endgroup}
-\def\otfstepmessagecommand#1#2%
+\unexpanded\def\otfstepmessagecommand#1#2%
{\begingroup
\tttf\language\minusone
\veryraggedright
\forgetparindent
\forgeteverypar
- \hangindent1em
+ \hangindent\emwidth
\hangafter\plusone
- \dontleavehmode\hbox{\detokenize{#1}}\removeunwantedspaces
- \doifsomething{#2}{\break\detokenize{#2}}\endgraf
+ \dontleavehmode\detokenize{#1}\removeunwantedspaces
+ \doifsomething{#2}{,\space\detokenize{#2}}\endgraf
\endgroup
\blank}
-\def\showotfstepchars#1%
+\unexpanded\def\showotfstepfont
+ {\ctxlua{nodes.tracers.steppers.font("otfstepfontcommand")}}
+
+\unexpanded\def\showotfstepchars#1%
{\ctxlua{nodes.tracers.steppers.codes(#1,"otfstepcharcommand")}}
-\def\showotfstepmessages#1%
+\unexpanded\def\showotfstepmessages#1%
{\ctxlua{nodes.tracers.steppers.messages(#1,"otfstepmessagecommand",true)}}
-\def\showotfstepfeatures
+\unexpanded\def\showotfstepfeatures
{\ctxlua{nodes.tracers.steppers.features()}}
-\def\showotfsteps
+\unexpanded\def\otfnoffeaturesteps
+ {\ctxlua{nodes.tracers.steppers.nofsteps()}}
+
+% \unexpanded\def\showotfsteps
+% {\begingroup
+% \veryraggedright
+% \forgetparindent
+% \forgeteverypar
+% \dontleavehmode\bgroup\tttf \language\minusone features: \showotfstepfeatures\egroup
+% \blank
+% \dontleavehmode\bgroup\tttf result:\egroup
+% \blank
+% \startlinecorrection
+% \ruledhbox\bgroup\box\otfcompositionbox\egroup
+% \stoplinecorrection
+% \dorecurse\otfnoffeaturesteps
+% {\blank
+% \showotfstepmessages\recurselevel
+% \blank
+% \startlinecorrection
+% \dontleavehmode\bgroup\resetallattributes\pardir TLT\textdir TLT\relax\tttf\recurselevel: \showotfstepchars\recurselevel\egroup
+% \stoplinecorrection
+% \blank
+% \startlinecorrection
+% \ruledhbox % can be mode
+% \bgroup\resetallattributes\showotfstepglyphs\recurselevel\egroup % reset is new, we don't want additional processing
+% \stoplinecorrection
+% \blank}%
+% \endgroup}
+
+\unexpanded\def\showotfsteps
{\begingroup
\veryraggedright
\forgetparindent
\forgeteverypar
- \dontleavehmode\bgroup\tttf \language\minusone features: \showotfstepfeatures\egroup
+ \tt
+ \hbox to \hsize \bgroup
+ \hbox to 6\emwidth{\bf font\hss}%
+ \vtop \bgroup
+ \hsize\dimexpr\hsize-6\emwidth\relax
+ \language\minusone
+ \bf
+ \showotfstepfont
+ \egroup
+ \egroup
\blank
- \dontleavehmode\bgroup\tttf result:\egroup
+ \hbox to \hsize \bgroup
+ \hbox to 6\emwidth{\bf features\hss}%
+ \vtop \bgroup
+ \hsize\dimexpr\hsize-6\emwidth\relax
+ \language\minusone
+ \showotfstepfeatures
+ \egroup
+ \egroup
\blank
- \startlinecorrection
- \ruledhbox\bgroup\box\otfcompositionbox\egroup
- \stoplinecorrection
- \dorecurse{\ctxlua{nodes.tracers.steppers.nofsteps()}}
+ \scratchcounter\otfnoffeaturesteps\relax
+ \dorecurse\scratchcounter
{\blank
- \showotfstepmessages\recurselevel
- \blank
- \startlinecorrection
- \dontleavehmode\bgroup\resetallattributes\pardir TLT\textdir TLT\relax\tttf\recurselevel: \showotfstepchars\recurselevel\egroup
- \stoplinecorrection
+ \begingroup
+ \advance\leftskip6\emwidth
+ \showotfstepmessages\recurselevel
+ \par
+ \endgroup
\blank
- \startlinecorrection
- \ruledhbox % can be mode
- \bgroup\resetallattributes\showotfstepglyphs\recurselevel\egroup % reset is new, we don't want additional processing
- \stoplinecorrection
+ \dontleavehmode
+ \hbox to \hsize \bgroup
+ \hbox to 6\emwidth \bgroup
+ \bf
+ \ifnum\recurselevel=\scratchcounter result\else step \recurselevel\fi
+ \hss
+ \egroup
+ \vtop \bgroup
+ \hsize\dimexpr\hsize-6\emwidth\relax
+ \resetallattributes
+ \pardir TLT\textdir TLT\relax
+ \dontleavehmode
+ \ifnum\recurselevel=\scratchcounter
+ \ruledhbox{\box\otfcompositionbox}%
+ \else
+ \ruledhbox{\showotfstepglyphs\recurselevel}%
+ \fi
+ \quad
+ \showotfstepchars\recurselevel
+ \hfill
+ \par
+ \egroup
+ \egroup
\blank}%
\endgroup}
@@ -186,14 +257,24 @@
\newbox\otfcompositionbox
-\def\showotfcomposition#1#2#3% {font*features at size}, rl=-1, text
+% this should go in spac-ali:
+
+\installcorenamespace{otfcompositiondir}
+
+\letvalue{\??otfcompositiondir -1}\righttoleft
+\letvalue{\??otfcompositiondir r2l}\righttoleft
+\letvalue{\??otfcompositiondir l2r}\lefttoright
+\letvalue{\??otfcompositiondir +1}\lefttoright
+\letvalue{\??otfcompositiondir 1}\lefttoright
+
+\unexpanded\def\showotfcomposition#1#2#3% {font*features at size}, rl=-1, text
{\begingroup
- \veryraggedright
\forgetparindent
\forgeteverypar
- \setupcolors[\c!state=\v!start]% can be option
+ % \setupcolors[\c!state=\v!start]%
+ \setupalign[\v!verytolerant,\v!flushleft]%
\startotfsample
- \global\setbox\otfcompositionbox\hbox{\definedfont[#1]\ifnum#2<0 \textdir TRT\else\ifnum#2>0 \textdir TLT\fi\fi\relax#3}%
+ \global\setbox\otfcompositionbox\hbox{\definedfont[#1]\relax\getvalue{\??otfcompositiondir#2}\relax#3}%
\stopotfsample
\endgroup}
diff --git a/Master/texmf-dist/tex/context/base/font-vf.lua b/Master/texmf-dist/tex/context/base/font-vf.lua
index 287d073d6bd..1fe6dd71c1d 100644
--- a/Master/texmf-dist/tex/context/base/font-vf.lua
+++ b/Master/texmf-dist/tex/context/base/font-vf.lua
@@ -7,10 +7,13 @@ if not modules then modules = { } end modules ['font-vf'] = {
}
--[[ldx--
-<p>This is very experimental code! Not yet adapted to recent
-changes. This will change.</p>
+<p>This is very experimental code! Not yet adapted to recent changes. This will change.</p>
--ldx]]--
+-- present in the backend but unspecified:
+--
+-- vf.rule vf.special vf.right vf.push vf.down vf.char vf.node vf.fontid vf.pop vf.image vf.nop
+
local next = next
local allocate = utilities.storage.allocate
@@ -19,8 +22,7 @@ local fastcopy = table.fastcopy
local fonts = fonts
local constructors = fonts.constructors
-local vf = { }
-fonts.handlers.vf = vf
+local vf = constructors.newhandler("vf")
-- general code
@@ -30,18 +32,18 @@ function vf.find(name)
local format = fonts.loggers.format(name)
if format == 'tfm' or format == 'ofm' then
if trace_defining then
- report_defining("locating vf for %s",name)
+ report_defining("locating vf for %a",name)
end
return findbinfile(name,"ovf")
else
if trace_defining then
- report_defining("vf for %s is already taken care of",name)
+ report_defining("vf for %a is already taken care of",name)
end
return nil -- ""
end
else
if trace_defining then
- report_defining("locating vf for %s",name)
+ report_defining("locating vf for %a",name)
end
return findbinfile(name,"ovf")
end
diff --git a/Master/texmf-dist/tex/context/base/grph-epd.lua b/Master/texmf-dist/tex/context/base/grph-epd.lua
index 130907d4e6a..4f9d4609765 100644
--- a/Master/texmf-dist/tex/context/base/grph-epd.lua
+++ b/Master/texmf-dist/tex/context/base/grph-epd.lua
@@ -22,5 +22,4 @@ function figures.mergegoodies(optionlist)
if all or options[variables.layer] then
codeinjections.mergeviewerlayers()
end
-
end
diff --git a/Master/texmf-dist/tex/context/base/grph-epd.mkiv b/Master/texmf-dist/tex/context/base/grph-epd.mkiv
index 947128b6b5b..58526fd44dd 100644
--- a/Master/texmf-dist/tex/context/base/grph-epd.mkiv
+++ b/Master/texmf-dist/tex/context/base/grph-epd.mkiv
@@ -17,23 +17,29 @@
\registerctxluafile{grph-epd}{1.001}
-\def\figurereference{\ctxlua{figures.tprint("status","reference")}} % might become private
+\def\figurereference{\ctxcommand{figurestatus("reference")}} % might become private
\defineoverlay[system:graphics:epdf][\directsetup{system:graphics:epdf}]
\startsetups system:graphics:epdf
- \ctxlua{figures.mergegoodies("\@@efinteraction")}%
+ \ctxlua{figures.mergegoodies("\externalfigureparameter\c!interaction")}%
\reference[\figurereference]{}% todo: dest area
\stopsetups
+\defineframed
+ [system_graphics_epdf]
+ [\c!frame=\v!off,
+ \c!offset=\v!overlay,
+ \c!background={\v!foreground,system:graphics:epdf}]
+
\def\grph_epdf_add_overlay
{\global\setbox\foundexternalfigure\vbox\bgroup
- \framed[\c!offset=\v!overlay,\c!background={\v!foreground,system:graphics:epdf}]{\box\foundexternalfigure}%
+ \system_graphics_epdf{\box\foundexternalfigure}%
\egroup}
\appendtoks
\iflocation
- \doif\figurefiletype{pdf}{\doifnot\@@efinteraction\v!none\grph_epdf_add_overlay}%
+ \doif\figurefiletype{pdf}{\doifnot{\externalfigureparameter\c!interaction}\v!none\grph_epdf_add_overlay}%
\fi
\to \externalfigurepostprocessors
diff --git a/Master/texmf-dist/tex/context/base/grph-fig.mkiv b/Master/texmf-dist/tex/context/base/grph-fig.mkiv
index 1908e644522..80b8e35d932 100644
--- a/Master/texmf-dist/tex/context/base/grph-fig.mkiv
+++ b/Master/texmf-dist/tex/context/base/grph-fig.mkiv
@@ -11,351 +11,198 @@
%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
%C details.
-% ex => ef with proper parent
-
-\writestatus{loading}{ConTeXt Graphic Macros / Figure Handling}
-
\unprotect
-\unexpanded\def\setupexternalfigures
- {\dosingleempty\dosetupexternalfigures}
-
-\def\dosetupexternalfigures[#1]%
- {\getparameters[\??ef][#1]% local settings
- \getparameters[\??ex][#1]% global settings
- \setfigurepathlist} % the path may be used elsewhere too (as in x-res-04)
-
-\presetlocalframed[\??ef]
-
-\newconditional\externalfigurelevel % true=background false=normal
-\newconditional\externalfigureflush % true=place false=ignore
+%D Used in the styledesign manual:
+%
+% beware in mkiv we don't have the typeset- prefix
+%
+% \setbuffer[typeset-b]\endbuffer
+% \setbuffer[typeset-a]\endbuffer
+%
+% todo:
+%
+% \appendtoks \setbuffer[typeset-b]\endbuffer\to \everystarttext
+% \appendtoks \setbuffer[typeset-a]\endbuffer\to \everystarttext
-\setfalse\externalfigurelevel
-\settrue \externalfigureflush
+\newcount\c_grph_buffers_n
-\installcorenamespace{externalfigures}
+\let\m_grph_buffers_filename\empty
-\def\doplaceexternalfigure[#1][#2][#3][#4][#5]%
- {\doifsomething{#2}% catches \defineexternalfigure dummies
- {\ifcsname\??externalfigures#2\endcsname
- \doifelse{#1}{#2}
- {\dodoplaceexternalfigure[#1][#2][#3][#4][#5]}
- {\getvalue{\??externalfigures#2}[#5]}%
- \else
- \dodoplaceexternalfigure[#1][#2][#3][#4][#5]%
- \fi}}
+\unexpanded\def\typesetbuffer
+ {\dodoubleempty\grph_buffers_typeset}
-\ifdefined\dotagfigure \else \let\dotagfigure\relax \fi
+\def\grph_buffers_typeset[#1][#2]% beware: this will mix up the mp graphics
+ {\ifsecondargument
+ \grph_buffers_typeset_indeed[#1][#2]%
+ \else\iffirstargument
+ \doifassignmentelse{#1}
+ {\grph_buffers_typeset_indeed[\jobname][#1]}%
+ {\grph_buffers_typeset_indeed[#1][]}%
+ \else
+ \grph_buffers_typeset_indeed[\jobname][]
+ \fi\fi}
-\def\dodoplaceexternalfigure[#1][#2][#3][#4][#5]%
+\def\grph_buffers_typeset_indeed[#1][#2]% we could use the via files
{\bgroup
- \dostarttagged\t!image\empty
- \let\textunderscore\letterunderscore % {\string _} % space needed as _ is now letter in unprotected mode
- \calculateexternalfigure[][#1][#2][#3][#4][#5]% [] is dummy dwcomp
- \dotagfigure
- \naturalvbox attr \imageattribute 2 {\box\foundexternalfigure}%
- \dostoptagged
+ \global\advance\c_grph_buffers_n\plusone
+ \edef\m_grph_buffers_filename{\jobname-buffer-\the\c_grph_buffers_n}%
+ \ctxcommand{runbuffer("\m_grph_buffers_filename.tmp","#1",true)}%
+ \externalfigure[\m_grph_buffers_filename.pdf][#2]%
\egroup}
-\def\externalfigurereplacement#1#2#3%
- {\normalexpanded{\localframed
- [\??ef]
- [\c!width=\figurewidth,
- \c!height=\figureheight,
- \c!background=\v!color,
- \c!backgroundcolor=missingfigurecolor,
- \c!frame=\@@efframe]}% we need to expand this in order to prevent a loop
- {\tt\tfxx \nohyphens
- name: \expanded{\verbatimstring{#1}}\\%
- file: \expanded{\verbatimstring{#2}}\\%
- state: \expanded{\verbatimstring{#3}}}}
-
-\definecolor[missingfigurecolor][s=.8]
-
-\def\externalfigureplaceholder#1#2#3%
- {\localframed
- [\??ef]
- [\c!width=#2,
- \c!height=#3,
- \c!frame=\v!on]%
- {\tt\tfxx \nohyphens
- name: \expanded{\verbatimstring{#1}}\\%
- state: \expanded{\verbatimstring{placeholder}}}}
-
-% new: more convenient/efficient than
-%
-% \use..[a][a][setting] \externalfigure[b][a]
-%
-% is equivalent to:
+% For manuals and such:
%
-% \def..[a][setting] \externalfigure[b][a]
-%
-% see x-res modules for usage:
+% \definetypesetting [name] [options] [settings-a]
%
-% \defineexternalfigure[name][settings]
-
-\unexpanded\def\defineexternalfigure
- {\dodoubleargument\dodefineexternalfigure}
-
-\def\dodefineexternalfigure[#1][#2]%
- {\setvalue{\??externalfigures#1}{\doplaceexternalfigure[#1][][][#2]}}
-
-% \useexternalfigure[alpha][koe]
-% \useexternalfigure[beta] [koe] [breedte=1cm]
-% \useexternalfigure[gamma][koe][alpha]
-% \useexternalfigure[delta][koe][alpha][breedte=2cm]
+% \typesetfile [name] [file] [settings-b]
+% \typesetfile [file] [options] [settings-b]
+% \typesetfile [file] [settings-b]
+% \typesetfile [file]
%
-% volle breedte: \externalfigure[koe] \par
-% 3cm breed: \externalfigure[koe] [breedte=3cm] \par
-% volle breedte: \externalfigure[alpha] \par
-% 1cm breed: \externalfigure[beta] \par
-% volle breedte: \externalfigure[gamma] \par
-% 2cm breed: \externalfigure[delta] \par
-% 4cm breed: \externalfigure[beta] [breedte=4cm] \par
-% 5cm breed: \externalfigure[gamma][breedte=5cm] \par
-
-% \defineexternalfigure[a][width=10cm]
-% \defineexternalfigure[b][width=5cm]
-% \externalfigure[cow][a]
-% \externalfigure[cow][b][height=8cm]
-
-% \useexternalfigure[x][cow][width=10cm,height=1cm]
-% \externalfigure[x]
-% \externalfigure[x][width=3cm]
-
-\def\useexternalfigure
- {\doquadrupleempty\douseexternalfigure}
-
-% [label] [filename]
-% [label] [filename] [parent]
-% [label] [filename] [parent] [settings]
-% [label] [filename] [settings]
-
-\def\useexternalfigure
- {\doquadrupleempty\douseexternalfigure}
-
-\def\douseexternalfigure[#1][#2][#3][#4]%
- {\doifelsenothing{#1}
- {\doifsomething{#2}
- {\doifassignmentelse{#3}
- {\dodouseexternalfigure{#2}{#2}{#3}{#4}}
- {\dodouseexternalfigure{#2}{#2}{}{#4}}}}
- {\doifelsenothing{#2}
- {\doifassignmentelse{#3}
- {\dodouseexternalfigure{#1}{#1}{}{#3}}
- {\dodouseexternalfigure{#1}{#1}{#3}{#4}}}
- {\doifassignmentelse{#3}
- {\dodouseexternalfigure{#1}{#2}{}{#3}}
- {\dodouseexternalfigure{#1}{#2}{#3}{#4}}}}}
-
-\def\dodouseexternalfigure#1#2#3#4%
- {\setvalue{\??externalfigures#1}{\doplaceexternalfigure[#1][#2][#3][#4]}%
- \doanalyzeexternalfigurecollection[#2][#4]}
-
-\newconditional\inexternalfigurecollection
-\newdimen\xexternalfigurecollectionminwidth
-\newdimen\xexternalfigurecollectionmaxwidth
-\newdimen\xexternalfigurecollectionminheight
-\newdimen\xexternalfigurecollectionmaxheight
-
-\def\doanalyzeexternalfigurecollection[#1][#2]%
- {\ifconditional\inexternalfigurecollection
- \setfalse\inexternalfigurecollection
- \getfiguredimensions[#1][#2]%
- \settrue\inexternalfigurecollection
- \scratchdimen\naturalfigurewidth
- \ifdim\scratchdimen>\xexternalfigurecollectionmaxwidth \xexternalfigurecollectionmaxwidth \scratchdimen \fi
- \ifdim\scratchdimen<\xexternalfigurecollectionminwidth \xexternalfigurecollectionminwidth \scratchdimen \fi
- \scratchdimen\naturalfigureheight
- \ifdim\scratchdimen>\xexternalfigurecollectionmaxheight \xexternalfigurecollectionmaxheight\scratchdimen \fi
- \ifdim\scratchdimen<\xexternalfigurecollectionminheight \xexternalfigurecollectionminheight\scratchdimen \fi
- \fi}
-
-\unexpanded\def\startexternalfigurecollection[#1]%
- {\begingroup
- \def\currentexternalfigurecollection{#1}%
- \settrue\inexternalfigurecollection
- \xexternalfigurecollectionminwidth \maxdimen
- \xexternalfigurecollectionmaxwidth \zeropoint
- \xexternalfigurecollectionminheight\maxdimen
- \xexternalfigurecollectionmaxheight\zeropoint}
-
-\unexpanded\def\stopexternalfigurecollection
- {\setxvalue{\??ef:c:\currentexternalfigurecollection:\c!minwidth }{\the\xexternalfigurecollectionminwidth }%
- \setxvalue{\??ef:c:\currentexternalfigurecollection:\c!maxwidth }{\the\xexternalfigurecollectionmaxwidth }%
- \setxvalue{\??ef:c:\currentexternalfigurecollection:\c!minheight}{\the\xexternalfigurecollectionminheight}%
- \setxvalue{\??ef:c:\currentexternalfigurecollection:\c!maxheight}{\the\xexternalfigurecollectionmaxheight}%
- \endgroup}
-
-\def\externalfigurecollectionparameter#1#2%
- {\csname\ifcsname\??ef:c:#1:#2\endcsname\??ef:c:#1:#2\else\s!empty\fi\endcsname}
-
-\def\externalfigurecollectionminwidth #1{\externalfigurecollectionparameter{#1}\c!minwidth }
-\def\externalfigurecollectionmaxwidth #1{\externalfigurecollectionparameter{#1}\c!maxwidth }
-\def\externalfigurecollectionminheight#1{\externalfigurecollectionparameter{#1}\c!minheight}
-\def\externalfigurecollectionmaxheight#1{\externalfigurecollectionparameter{#1}\c!maxheight}
-
-\let\efcparameter\externalfigurecollectionparameter
-\let\efcminwidth \externalfigurecollectionminwidth
-\let\efcmaxwidth \externalfigurecollectionmaxwidth
-\let\efcminheight\externalfigurecollectionminheight
-\let\efcmaxheight\externalfigurecollectionmaxheight
-
-% \startexternalfigurecollection[name]
-% \useexternalfigure[cow] [cow.pdf]
-% \useexternalfigure[mill][mill.png]
-% \stopexternalfigurecollection
+% \enabletrackers[files.run]
% \starttext
-% \bTABLE
-% \bTR
-% \bTD \externalfigure[cow] [height=\externalfigurecollectionmaxheight{name}] \eTD
-% \bTD \externalfigure[mill][height=\externalfigurecollectionmaxheight{name}] \eTD
-% \eTR
-% \eTABLE
+% \typesetfile[oepsoeps.tex][width=10cm,frame=on]
% \stoptext
-\def\dosetefparameters#1#2#3% parent_id use_settings current_settings
- {\doifelsenothing{#1} % inherit from parent
- {\getparameters[\??ef][#2,#3]}
- {\ifcsname\??externalfigures#1\endcsname
- \pushmacro\doplaceexternalfigure
- \def\doplaceexternalfigure[##1][##2][##3][##4]{\getparameters[\??ef][##4,#2,#3]}%
- \getvalue{\??externalfigures#1}%
- \popmacro\doplaceexternalfigure
- \else
- \getparameters[\??ef][#2,#3]%
- \fi}}
-
-\unexpanded\def\externalfigure
- {\dotripleempty\doexternalfigure}
-
-% \def\doexternalfigure[#1][#2][#3]% [label][file][settings] | [file][settings] | [file][parent][settings]
-% {\bgroup
-% \doifelsenothing{#1}
-% {\framed[\c!width=\defaultfigurewidth,\c!height=\defaultfigureheight]{external\\figure\\no name}}
-% {\ifcsname\??externalfigures#1\endcsname
-% \doifassignmentelse{#2}
-% {\getvalue{\??externalfigures#1}[#2]}%
-% {\getvalue{\??externalfigures#1}[#3]}%
-% \else
-% \useexternalfigure[\s!dummy][#1][#2][#3]%
-% \getvalue{\??externalfigures\s!dummy}[]% [] is dummy arg 5
-% \fi}%
-% \globallet\currentresourcecomment\empty
-% \egroup}
-
-\def\doexternalfigure[#1][#2][#3]% [label][file][settings] | [file][settings] | [file][parent][settings]
- {\bgroup % also catched #1 == empty ... scales nicer now
- \ifcsname\??externalfigures#1\endcsname
- \doifassignmentelse{#2}
- {\getvalue{\??externalfigures#1}[#2]}%
- {\getvalue{\??externalfigures#1}[#3]}%
- \else
- \useexternalfigure[\s!dummy][#1][#2][#3]%
- \getvalue{\??externalfigures\s!dummy}[]% [] is dummy arg 5
- \fi
- \globallet\currentresourcecomment\empty
- \egroup}
-
-\def\resourcecomment#1%
- {\gdef\currentresourcecomment{#1}}
+\installcorenamespace{typesettingfile}
-\unexpanded\def\startresourcecomment#1\stopresourcecomment
- {\gdef\currentresourcecomment{#1}}
+\unexpanded\def\definetypesetting{\dotripleempty\grph_typesetting_define}
+\unexpanded\def\typesetfile {\dotripleempty\grph_typesetting_process}
-\let\currentresourcecomment\empty
+\def\grph_typesetting_define[#1][#2][#3]% <name> options settings-a
+ {\doifsomething{#1}{\setuvalue{\??typesettingfile#1}{\grph_typesetting_process_indeed{#2}{#3}}}}
-\unexpanded\def\showexternalfigures % maybe run time command is better, but no core-run, unless figs-run ...
- {%\writestatus\m!system{for \string\showexternalfigures\space see \truefilename{x-res-20}.tex}
- \usemodule[res-20]%
- \showexternalfigures} % so for the moment we do it this way
+\def\grph_typesetting_process[#1][#2][#3]% <name> filename settings-b | filename options settings
+ {\ifcsname\??typesettingfile#1\endcsname
+ \csname\??typesettingfile#1\endcsname{#2}{#3}%
+ \else\ifthirdargument % filename options settings
+ \grph_typesetting_process_indeed{#2}{#3}{#1}{}%
+ \else\ifsecondargument % filename settings
+ \grph_typesetting_process_indeed{}{#2}{#1}{}%
+ \fi\fi\fi}
-\unexpanded\def\overlayfigure#1%
- {\externalfigure[#1][\c!width=\overlaywidth,\c!height=\overlayheight]}
+\def\grph_typesetting_process_indeed#1#2#3#4% options settings-a filename settings-b
+ {\begingroup
+ \edef\m_typesetting_name{\cldcontext{job.files.context("#3","#1")}}%
+ \ifx\m_typesetting_name\empty \else
+ \expanded{\externalfigure[\m_typesetting_name]}[#2,#4]%
+ \fi
+ \endgroup}
-%D Whatever
+%D Whatever ... hardly used ... but historic ... needs checking ...
+%D will probably become m-fig-nn.mkiv .. or I will extend it cq. clean
+%D it up when I needed it. After all, it's documented in old manuals.
-\newbox\colorbarbox
+\newcount\c_grph_steps_reference
+\newdimen\d_grph_steps_x
+\newdimen\d_grph_steps_y
+\newbox \b_grph_steps_colorbar
-\unexpanded\def\makecolorbar[#1]%
- {\def\docommand##1%
- {\color[##1]
- {\blackrule
- [\c!width=2em,
- \c!height=1ex,
- \c!depth=\!!zeropoint]}%
- \endgraf}%
- \global\setbox\colorbarbox\vbox
- {\forgetall
- \processcommalist[#1]\docommand}%
- \global\setbox\colorbarbox\vbox
- {\hskip2em\box\colorbarbox}%
- \wd\colorbarbox\zeropoint}
+\let\placestopfigure\relax
\unexpanded\def\placestartfigure[#1][#2][#3]#4\placestopfigure[#5]%
{\hbox
- {\setbox0\hbox
+ {\setbox\scratchbox\hbox
{\useexternalfigure[\s!dummy][#2][#3,#5]%
\externalfigure[\s!dummy]}%
- \calculateefsteps
+ \grph_steps_calculate
\startpositioning
- \def\referring(##1,##2)##3(##4,##5)##6[##7]%
- {\position(##1,##2){\efgoto(##4,##5){\@@exframes}[##7]}}%
- \def\marking(##1,##2)##3(##4,##5)##6[##7]%
- {\position(##1,##2){\efthisis(##4,##5){\@@exframes}[##7]}}%
- \def\remark{\efnocomment}%
- \def\colorbar##1[##2]{}%
- \position(0,0){\box0}%
+ \let\referring\grph_steps_one_referring
+ \let\marking \grph_steps_one_marking
+ \let\remark \grph_steps_one_remark
+ \let\colorbar \grph_steps_one_colorbar
+ \position(0,0){\box\scratchbox}%
\linewidth\onepoint
\setuppositioning
[\c!unit=pt,
- \c!xscale=\withoutpt\the\efxsteps,
- \c!yscale=\withoutpt\the\efysteps,
+ \c!xscale=\withoutpt\the\d_grph_steps_x,
+ \c!yscale=\withoutpt\the\d_grph_steps_y,
\c!factor=1]%
\ignorespaces#4%
- \def\referring(##1,##2)##3(##4,##5)##6[##7]%
- {}%
- \let\marking\referring
- \def\remark{\efcomment\v!no}%
- \def\colorbar##1[##2]{\makecolorbar[##2]}%
+ \let\referring\grph_steps_two_referring
+ \let\marking \grph_steps_two_marking
+ \let\remark \grph_steps_two_remark
+ \let\colorbar \grph_steps_two_colorbar
\ignorespaces#4%
\stoppositioning
- \box\colorbarbox}}
+ \ifvoid\b_grph_steps_colorbar\else\box\b_grph_steps_colorbar\fi}} % not really needed
+
+\unexpanded\def\grph_steps_one_referring(#1,#2)#3(#4,#5)#6[#7]%
+ {\position(#1,#2){\grph_steps_goto(#4,#5){\externalfigureparameter\c!frames}[#7]}}
+
+\unexpanded\def\grph_steps_one_marking(#1,#2)#3(#4,#5)#6[#7]%
+ {\position(#1,#2){\grph_steps_this_is(#4,#5){\externalfigureparameter\c!frames}[#7]}}
+
+\unexpanded\def\grph_steps_one_remark(#1,#2)#3(#4,#5)% (x,y)(h,b)[...]{tekst}
+ {\def\grph_steps_no_comment_indeed[##1]##2{}%
+ \dosingleempty\grph_steps_no_comment_indeed}
+
+\unexpanded\def\grph_steps_one_colorbar#1[#2]%
+ {}
+
+\unexpanded\def\grph_steps_two_referring(#1,#2)#3(#4,#5)#6[#7]%
+ {}
+
+\let\grph_steps_two_marking\grph_steps_two_referring
+
+\unexpanded\def\grph_steps_two_remark
+ {\grph_steps_comment\v!no}
+
+\unexpanded\def\grph_steps_two_colorbar#1[#2]
+ {\begingroup
+ \global\setbox\b_grph_steps_colorbar\vbox
+ {\forgetall
+ \processcommalist[#2]\grph_colorbar_make_step}%
+ \global\setbox\b_grph_steps_colorbar\vbox
+ {\hskip2\emwidth\box\b_grph_steps_colorbar}%
+ \global\wd\b_grph_steps_colorbar\zeropoint
+ \endgroup}
-\def\dodostartfigure[#1][#2][#3]#4\stopfigure
- {\doifelse\v!test\@@exoption
+\unexpanded\def\grph_colorbar_make_step#1%
+ {\blackrule[\c!color=#1,\c!width=2\emwidth,\c!height=\exheight,\c!depth=\zeropoint]%
+ \endgraf}
+
+\unexpanded\def\startfigure % could be done nicer
+ {\grabuntil{\e!stop\v!figure}\grph_steps_figure_process}
+
+\let\stopfigure\relax
+
+\def\grph_steps_figure_process#1%
+ {\dotripleargument\grph_steps_figure_process_indeed#1\stopfigure}
+
+\def\grph_steps_figure_process_indeed[#1][#2][#3]#4\stopfigure
+ {\doifelse{\externalfigureparameter\c!option}\v!test
{\teststartfigure[#1][#2][#3]#4\teststopfigure
- \let\@@exframes\v!on}
- {\let\@@exframes\v!off}%
- \setvalue{\??externalfigures#1}%
- {\dosingleempty{\placestartfigure[#1][#2][#3]#4\placestopfigure}}%
- }% no longer \doifundefined{#1}{\setvalue{#1}{\getexternalfigure{#1}}}}
+ \letexternalfigureparameter\c!frames\v!on}
+ {\letexternalfigureparameter\c!frames\v!off}%
+ \setvalue{\??externalfigureinstance#1}%
+ {\dosingleempty{\placestartfigure[#1][#2][#3]#4\placestopfigure}}}
-% De onderstaande macro mag niet zondermeer worden aangepast
-% en is afgestemd op gebruik in de handleiding.
+\let\teststopfigure\relax
-\def\teststartfigure[#1][#2][#3]#4\teststopfigure%
+\unexpanded\def\teststartfigure[#1][#2][#3]#4\teststopfigure
{\begingroup
- \setbox0\hbox
+ \setbox\scratchbox\hbox
{\useexternalfigure[\s!dummy][#2][\c!wfactor=\v!max]%
\externalfigure[\s!dummy]}%
- \def\referring{\efmark}%
- \def\marking{\efmark}%
- \def\remark{\efcomment\v!yes}%
- \def\colorbar##1[##2]{}%
- \efreference\zerocount
+ \let\referring\grph_steps_three_referring
+ \let\marking \grph_steps_three_marking
+ \let\remark \grph_steps_three_remark
+ \let\colorbar \grph_steps_three_colorbar
+ \c_grph_steps_reference\zerocount
\setbox0\vbox
- {\hsize240pt
+ {\hsize240\points
\startpositioning
- \calculateefsteps
+ \grph_steps_calculate
\position(0,0)
- {\box0}%
+ {\box\scratchbox}%
\position(0,0)
{\basegrid
- [\c!nx=\@@exxmax,
- \c!dx=\withoutpt\the\efxsteps,
- \c!ny=\@@exymax,
- \c!dy=\withoutpt\the\efysteps,
+ [\c!nx=\externalfigureparameter\c!xmax,
+ \c!dx=\withoutpt\the\d_grph_steps_x,
+ \c!ny=\externalfigureparameter\c!ymax,
+ \c!dy=\withoutpt\the\d_grph_steps_y,
\c!xstep=1,
\c!ystep=1,
\c!scale=1,
@@ -363,18 +210,18 @@
\c!unit=pt]}%
\setuppositioning
[\c!unit=pt,
- \c!xscale=\withoutpt\the\efxsteps,
- \c!yscale=\withoutpt\the\efysteps,
+ \c!xscale=\withoutpt\the\d_grph_steps_x,
+ \c!yscale=\withoutpt\the\d_grph_steps_y,
\c!factor=1]%
\linewidth\onepoint
\ignorespaces#4\relax
\stoppositioning
\vfill}%
- \efreference\zerocount
- \def\referring{\eftext{$\rightarrow$}}%
- \def\marking{\eftext{$\leftarrow$}}%
- \def\remark{\efnocomment}%
- \def\colorbar##1[##2]{}%
+ \c_grph_steps_reference\zerocount
+ \let\referring\grph_steps_four_referring
+ \let\marking \grph_steps_four_marking
+ \let\remark \grph_steps_four_remark
+ \let\colorbar \grph_steps_four_colorbar
\setbox2\vbox
{{\tfa\doifelsenothing{#1}{#2}{#1}}
\blank
@@ -386,165 +233,149 @@
\ht0\ht2
\fi
\hbox
- {\hskip3em
- \vtop{\vskip12pt\box0\vskip6pt}%
- \vtop{\vskip12pt\box2\vskip6pt}}%
+ {\hskip3\emwidth
+ \vtop{\vskip12\points\box0\vskip6\points}%
+ \vtop{\vskip12\points\box2\vskip6\points}}%
\endgroup}
-\def\dodostartfigure[#1][#2][#3]#4\stopfigure
- {\doifelse\v!test\@@exoption
- {\teststartfigure[#1][#2][#3]#4\teststopfigure
- \let\@@exframe\v!on}
- {\let\@@exframe\v!off}%
- \setvalue{\??externalfigures#1}%
- {\def\next{\placestartfigure[#1][#2][#3]#4\placestopfigure}%
- \dosingleempty\next}%
- }% no longer: \doifundefined{#1}{\setvalue{#1}{\getexternalfigure{#1}}}}
+\unexpanded\def\grph_steps_three_referring(#1,#2)#3(#4,#5)#6[#7]%
+ {\advance\c_grph_steps_reference\plusone
+ \position(#1,#2)
+ {\hbox{\the\c_grph_steps_reference}}%
+ \position(#1,#2)
+ {\gotosomeinternal\s!vwb{#7}\realfolio
+ {\grph_steps_marker(#4,#5)\v!on{\thisissomeinternal\s!vwa{#7}}}}}
-\long\def\dostartfigure#1%
- {\dotripleargument\dodostartfigure#1\stopfigure}
+\unexpanded\def\grph_steps_three_remark
+ {\grph_steps_comment\v!yes}
-\unexpanded\def\startfigure
- {\grabuntil{\e!stop\v!figure}\dostartfigure}
-
-%D NEW: used in styledesign manual
+\let\grph_steps_three_marking \grph_steps_three_referring
+\let\grph_steps_three_colorbar\grph_steps_one_colorbar
-% beware in mkiv we don't have the typeset- prefix
-%
-% \setbuffer[typeset-b]\endbuffer
-% \setbuffer[typeset-a]\endbuffer
-%
-% todo:
-%
-% \appendtoks \setbuffer[typeset-b]\endbuffer\to \everystarttext
-% \appendtoks \setbuffer[typeset-a]\endbuffer\to \everystarttext
+\unexpanded\def\grph_steps_four_referring
+ {\grph_steps_text{$\rightarrow$}}
-\newcount\noftypesetbuffers
+\unexpanded\def\grph_steps_four_marking
+ {\grph_steps_text{$\leftarrow$}}
-\def\typesetbuffer
- {\dodoubleempty\dotypesetbuffer}
+\let\grph_steps_four_remark \grph_steps_one_remark
+\let\grph_steps_four_colorbar\grph_steps_one_colorbar
-\def\dotypesetbuffer[#1][#2]% beware: this will mix up the mp graphics
- {\ifsecondargument
- \dodotypesetbuffer[#1][#2]%
- \else\iffirstargument
- \doifassignmentelse{#1}
- {\dodotypesetbuffer[\jobname][#1]}%
- {\dodotypesetbuffer[#1][]}%
+% Helpers:
+
+\def\grph_steps_calculate
+ {\ifnum0\externalfigureparameter\c!xmax=\zerocount
+ \ifnum0\externalfigureparameter\c!ymax=\zerocount
+ \setexternalfigureparameter\c!ymax{24}%
+ \fi
+ \d_grph_steps_y\figureheight
+ \divide\d_grph_steps_y \externalfigureparameter\c!ymax
+ \d_grph_steps_x\d_grph_steps_y
+ \scratchdimen\figurewidth
+ \advance\scratchdimen\d_grph_steps_y
+ \divide \scratchdimen\d_grph_steps_y
+ \setexternalfigureparameter\c!xmax{\number\scratchdimen}%
\else
- \dodotypesetbuffer[\jobname][]
- \fi\fi}
+ \d_grph_steps_x\figurewidth \divide\d_grph_steps_x \externalfigureparameter\c!xmax\relax
+ \d_grph_steps_y\figureheight \divide\d_grph_steps_y \externalfigureparameter\c!ymax\relax
+ \fi}
-\def\dodotypesetbuffer[#1][#2]%
+\def\grph_steps_comment#1(#2,#3)#4(#5,#6)% {kader}(x,y)(h,b)[...]{tekst}
+ {\def\grph_steps_comment_indeed[##1]##2%
+ {\position(#2,#3)%
+ {\setnostrut
+ \framed
+ [\c!width=#5\d_grph_steps_x,
+ \c!height=#6\d_grph_steps_y,
+ \c!offset=\v!none,
+ \c!frame=#1,
+ ##1]%
+ {##2}}}%
+ \dosingleempty\grph_steps_comment_indeed}
+
+% \def\grph_steps_figure#1%
+% {\position(0,0){\getvalue{#1}}}
+
+\def\grph_steps_goto(#1,#2)#3[#4]% (h,b)kader[ref]
+ {\gotobox{\vbox{\grph_steps_area(#1,#2)#3{}}}[#4]}
+
+\def\grph_steps_text#1(#2,#3)#4(#5,#6)#7[#8]%
+ {\advance\c_grph_steps_reference\plusone
+ \hbox
+ {\quad
+ \thisissomeinternal\s!vwb
+ {#8}%
+ \gotosomeinternal\s!vwa{#8}\realfolio
+ {\hbox to 1.5em{\the\c_grph_steps_reference\presetgoto\hfill}}%
+ \quad#1 (#2,#3) (#5,#6) [#8]\hfill}%
+ \endgraf}
+
+\def\grph_steps_this_is(#1,#2)#3[#4]%
+ {\grph_steps_area(#1,#2){#3}{\dosetdirectpagereference{#4}}}
+
+\def\grph_steps_area(#1,#2)#3#4% (h,b){kader}{tekst}
{\bgroup
- \global\advance\noftypesetbuffers\plusone
- \edef\bufferfilename{\jobname-buffer-\the\noftypesetbuffers}%
- \ctxcommand{runbuffer("\bufferfilename.tmp","#1",true)}%
- \externalfigure[\bufferfilename.pdf][#2]%
+ \setnostrut
+ \framed
+ [\c!width=#1\d_grph_steps_x,
+ \c!height=#2\d_grph_steps_y,
+ \c!offset=\zeropoint,
+ \c!frame=#3]
+ {#4}%
\egroup}
-\def\dodotypesetbufferindeed#1%
- {}
+\def\grph_steps_marker(#1,#2)#3#4% (h,b){kader}{tekst}
+ {\framed
+ [\c!width=#1\d_grph_steps_x,
+ \c!height=#2\d_grph_steps_y,
+ \c!offset=\v!none,
+ \c!frame=#3]%
+ {#4}}
-% for me only (manuals and such)
+\protect \endinput
+
+% \startbuffer
+% \definecolor [blue] [c=1,m=.38,y=0,k=.64]
+% \definecolor [yellow] [c=0,m=.28,y=1,k=.06]
%
-% \definetypesetting [name] [options] [settings-a]
+% \definespotcolor [blue-100] [blue] [p=1]
+% \definespotcolor [yellow-100] [yellow] [p=1]
%
-% \typesetfile [name] [file] [settings-b]
-% \typesetfile [file] [options] [settings-b]
-% \typesetfile [file] [settings-b]
-% \typesetfile [file]
+% \definemultitonecolor [combicolor] [blue=.12,yellow=.28] [c=.1,m=.1,y=.3,k=.1]
%
-% \enabletrackers[files.run]
-% \starttext
-% \typesetfile[oepsoeps.tex][width=10cm,frame=on]
-% \stoptext
-
-\installcorenamespace{typesettingfile}
-
-\unexpanded\def\definetypesetting{\dotripleempty\dodefinetypesetting}
-\unexpanded\def\typesetfile {\dotripleempty\dotypesetfile}
-
-\def\dodefinetypesetting[#1][#2][#3]% <name> options settings-a
- {\doifsomething{#1}{\setvalue{\??typesettingfile#1}{\dodotypesetfile{#2}{#3}}}}
-
-\def\dotypesetfile[#1][#2][#3]% <name> filename settings-b | filename options settings
- {\ifcsname\??typesettingfile#1\endcsname
- \csname\??typesettingfile#1\endcsname{#2}{#3}%
- \else\ifthirdargument % filename options settings
- \dodotypesetfile{#2}{#3}{#1}{}%
- \else\ifsecondargument % filename settings
- \dodotypesetfile{}{#2}{#1}{}%
- \fi\fi\fi}
-
-\def\dodotypesetfile#1#2#3#4% options settings-a filename settings-b
- {\edef\typesetfilename{\ctxlua{tex.write(job.files.context("#3","#1"))}}%
- \expanded{\externalfigure[\typesetfilename]}[#2,#4]}
-
-\setupexternalfigures
- [\c!option=,
- \c!object=\v!yes, % we only check for no
- \c!reset=\v!no,
- \c!interaction=\v!none,
- \c!maxwidth=\@@efwidth,
- \c!maxheight=\@@efheight,
- \c!bodyfont=\bodyfontsize,
- \c!directory=,
- \c!radius=.5\bodyfontsize,
- \c!corner=\v!rectangular,
- \c!frame=\v!off,
- \c!background=, % new
- \c!splitcolor=\s!white,
- \c!conversion=,
- \c!resolution=,
- \c!prefix=,
- \c!cache=,
-% \c!grid=,
- \c!equalwidth=,
- \c!equalheight=,
- \c!location={\v!local,\v!global}]
-
-\setupexternalfigures
- [\c!frames=\v!off,
- \c!ymax=24,
- \c!xmax=]
-
-\useexternalfigure
- [buffer] [\jobname.buffer] [\c!object=\v!no]
-
-\protect \endinput
-
-% alternative for positioning
-
-% \definelayer[figure][width=\overlaywidth,height=\overlayheight]
-% \defineoverlay[figure][{\directsetup{figure}\tightlayer[figure]}]
-
-% \setupcolors[state=start]
-
-% \starttext
-
-% \startsetups figure
-% \setlayerframed[figure][preset=rightbottom,x=.25\layerwidth,y=.25\layerheight]{HERE}
-% \setlayerframed[figure][preset=leftbottom, x=.15\layerwidth,y=.35\layerheight]{THERE}
-% \stopsetups
-
-% \externalfigure[cow][background={foreground,figure},width=4cm,height=8cm]
-
-% \startsetups figure
-% \setlayerframed[figure][preset=righttop,x=.25\layerwidth,y=.25\layerheight]{MORE}
-% \setlayerframed[figure][preset=middle,foregroundcolor=green]{EVEN MORE}
-% \stopsetups
-
-% \externalfigure[cow][background={foreground,figure},width=14cm,height=2cm]
-
-% \defineexternalfigure[whatever][background={foreground,figure}]
-
-% \startsetups figure
-% \setlayerframed[figure][preset=righttop,x=.25\layerwidth,y=.25\layerheight]{\red MORE}
-% \setlayerframed[figure][preset=middle,foregroundcolor=green]{EVEN MORE}
-% \stopsetups
-
-% \externalfigure[cow][whatever][width=14cm,height=4cm]
-
-% \stoptext
-
+% \definemultitonecolor [combicolor-b] [blue=1] [c=1,m=.38,y=0,k=.64] % force multitone
+% \definemultitonecolor [combicolor-y] [yellow=1] [c=0,m=.28,y=1,k=.06] % force multitone
+%
+% \useexternalfigure[demo-a][mill.png] [object=no,width=.2\textwidth]
+% \useexternalfigure[demo-b][hacker-bw.jpg][object=no,width=.2\textwidth]
+%
+% \startbaselinecorrection \startcombination[4*1]
+% {\externalfigure[demo-a]} {no color}
+% {\externalfigure[demo-a][color=combicolor]} {indexed duotone}
+% {\externalfigure[demo-a][color=combicolor-b]} {spot color}
+% {\externalfigure[demo-a][color=combicolor-y]} {spot color}
+% \stopcombination \stopbaselinecorrection
+%
+% \startbaselinecorrection \startcombination[4*1]
+% {\externalfigure[demo-b]} {no color}
+% {\externalfigure[demo-b][color=combicolor]} {indexed duotone}
+% {\externalfigure[demo-b][color=combicolor-b]} {spot color}
+% {\externalfigure[demo-b][color=combicolor-y]} {spot color}
+% \stopcombination \stopbaselinecorrection
+%
+% \startbaselinecorrection \startcombination[4*1]
+% {\externalfigure[demo-a]} {no color}
+% {\externalfigure[demo-a][color=combicolor]} {indexed duotone}
+% {\externalfigure[demo-a][color=blue-100]} {spot color}
+% {\externalfigure[demo-a][color=yellow-100]} {spot color}
+% \stopcombination \stopbaselinecorrection
+%
+% \startbaselinecorrection \startcombination[4*1]
+% {\externalfigure[demo-b]} {no color}
+% {\externalfigure[demo-b][color=combicolor]} {indexed duotone}
+% {\externalfigure[demo-b][color=blue-100]} {spot color}
+% {\externalfigure[demo-b][color=yellow-100]} {spot color}
+% \stopcombination \stopbaselinecorrection
+% \stopbuffer
+%
+% \getbuffer \typebuffer
diff --git a/Master/texmf-dist/tex/context/base/grph-fil.lua b/Master/texmf-dist/tex/context/base/grph-fil.lua
index 1ec88b487fe..3449f1779c2 100644
--- a/Master/texmf-dist/tex/context/base/grph-fil.lua
+++ b/Master/texmf-dist/tex/context/base/grph-fil.lua
@@ -6,13 +6,13 @@ if not modules then modules = { } end modules ['grph-fil'] = {
license = "see context related readme files"
}
-local format, concat = string.format, table.concat
-
-local trace_run = false trackers.register("graphic.runfile",function(v) trace_run = v end)
+local type = type
+local trace_run = false trackers.register("graphic.runfile",function(v) trace_run = v end)
local report_run = logs.reporter("graphics","run")
--- this code will move
+-- Historically running files is part of graphics processing, so this is why it
+-- sits here but is part of the job namespace.
local allocate = utilities.storage.allocate
@@ -22,6 +22,7 @@ local tobesaved = allocate()
local jobfiles = {
collected = collected,
tobesaved = tobesaved,
+ forcerun = false, -- maybe a directive some day
}
job.files = jobfiles
@@ -33,14 +34,12 @@ end
job.register('job.files.collected', tobesaved, initializer)
-jobfiles.forcerun = false
-
function jobfiles.run(name,action)
local oldchecksum = collected[name]
local newchecksum = file.checksum(name)
if jobfiles.forcerun or not oldchecksum or oldchecksum ~= newchecksum then
if trace_run then
- report_run("processing file, changes in '%s', processing forced",name)
+ report_run("processing file, changes in %a, processing forced",name)
end
local ta = type(action)
if ta == "function" then
@@ -48,10 +47,10 @@ function jobfiles.run(name,action)
elseif ta == "string" and action ~= "" then
os.execute(action)
else
- report_run("processing file, no action given for processing '%s'",name)
+ report_run("processing file, no action given for processing %a",name)
end
elseif trace_run then
- report_run("processing file, no changes in '%s', not processed",name)
+ report_run("processing file, no changes in %a, not processed",name)
end
tobesaved[name] = newchecksum
end
diff --git a/Master/texmf-dist/tex/context/base/grph-inc.lua b/Master/texmf-dist/tex/context/base/grph-inc.lua
index 33dd0766d2b..9603419ae2e 100644
--- a/Master/texmf-dist/tex/context/base/grph-inc.lua
+++ b/Master/texmf-dist/tex/context/base/grph-inc.lua
@@ -6,19 +6,15 @@ if not modules then modules = { } end modules ['grph-inc'] = {
license = "see context related readme files"
}
--- figures -> managers.figures
-
-- todo: empty filename or only suffix always false (not found)
-
-- lowercase types
-- mps tex tmp svg
-- partly qualified
-- dimensions
--- consult rlx
-
+-- use metatables
-- figures.boxnumber can go as we now can use names
-
-- avoid push
+-- move some to command namespace
--[[
The ConTeXt figure inclusion mechanisms are among the oldest code
@@ -46,13 +42,18 @@ local texbox = tex.box
local contains = table.contains
local concat, insert, remove = table.concat, table.insert, table.remove
local todimen = string.todimen
+local collapsepath = file.collapsepath
+local formatters = string.formatters
+local longtostring = string.longtostring
+local expandfilename = dir.expandname
-local P = lpeg.P
+local P, lpegmatch = lpeg.P, lpeg.match
local settings_to_array = utilities.parsers.settings_to_array
local settings_to_hash = utilities.parsers.settings_to_hash
local allocate = utilities.storage.allocate
local setmetatableindex = table.setmetatableindex
+local replacetemplate = utilities.templates.replace
local variables = interfaces.variables
local codeinjections = backends.codeinjections
@@ -68,7 +69,39 @@ local report_inclusion = logs.reporter("graphics","inclusion")
local context, img = context, img
---- some extra img functions ---
+local f_hash_part = formatters["%s->%s->%s"]
+local f_hash_full = formatters["%s->%s->%s->%s->%s->%s->%s"]
+
+local v_yes = variables.yes
+local v_low = variables.low
+local v_medium = variables.medium
+local v_high = variables.high
+local v_global = variables["global"]
+local v_local = variables["local"]
+local v_default = variables.default
+
+local maxdimen = 2^30-1
+
+function img.check(figure)
+ if figure then
+ local width = figure.width
+ local height = figure.height
+ if height > width then
+ if height > maxdimen then
+ figure.height = maxdimen
+ figure.width = width * maxdimen/height
+ report_inclusion("limiting natural dimensions of %a (%s)",figure.filename,"height")
+ end
+ elseif width > maxdimen then
+ figure.width = maxdimen
+ figure.height = height * maxdimen/width
+ report_inclusion("limiting natural dimensions of %a (%s)",figure.filename,"width")
+ end
+ return figure
+ end
+end
+
+--- some extra img functions --- can become luat-img.lua
local imgkeys = img.keys()
@@ -102,7 +135,7 @@ local validtypes = table.tohash(img.types())
function img.checksize(size)
if size then
size = gsub(size,"box","")
- return (validsizes[size] and size) or "crop"
+ return validsizes[size] and size or "crop"
else
return "crop"
end
@@ -116,54 +149,57 @@ end
--- we can consider an grph-ini file
-figures = figures or { }
-local figures = figures
-
-figures.loaded = allocate()
-figures.used = allocate()
-figures.found = allocate()
-figures.suffixes = allocate()
-figures.patterns = allocate()
-figures.resources = allocate()
-
-
-figures.boxnumber = figures.boxnumber or 0
-figures.defaultsearch = true
-figures.defaultwidth = 0
-figures.defaultheight = 0
-figures.defaultdepth = 0
-figures.nofprocessed = 0
-figures.preferquality = true -- quality over location
-
-local existers = allocate() figures.existers = existers
-local checkers = allocate() figures.checkers = checkers
-local includers = allocate() figures.includers = includers
-local converters = allocate() figures.converters = converters
-local identifiers = allocate() figures.identifiers = identifiers
-local programs = allocate() figures.programs = programs
+figures = figures or { }
+local figures = figures
+
+figures.boxnumber = figures.boxnumber or 0
+figures.defaultsearch = true
+figures.defaultwidth = 0
+figures.defaultheight = 0
+figures.defaultdepth = 0
+figures.nofprocessed = 0
+figures.preferquality = true -- quality over location
+
+local figures_loaded = allocate() figures.loaded = figures_loaded
+local figures_used = allocate() figures.used = figures_used
+local figures_found = allocate() figures.found = figures_found
+local figures_suffixes = allocate() figures.suffixes = figures_suffixes
+local figures_patterns = allocate() figures.patterns = figures_patterns
+local figures_resources = allocate() figures.resources = figures_resources
+
+local existers = allocate() figures.existers = existers
+local checkers = allocate() figures.checkers = checkers
+local includers = allocate() figures.includers = includers
+local converters = allocate() figures.converters = converters
+local identifiers = allocate() figures.identifiers = identifiers
+local programs = allocate() figures.programs = programs
+
+local defaultformat = "pdf"
+local defaultprefix = "m_k_i_v_"
figures.localpaths = allocate {
".", "..", "../.."
}
figures.cachepaths = allocate {
- prefix = "",
- path = ".",
+ prefix = "",
+ path = ".",
subpath = ".",
}
-figures.paths = allocate(table.copy(figures.localpaths))
+local figure_paths = allocate(table.copy(figures.localpaths))
+figures.paths = figure_paths
-figures.order = allocate{
+local figures_order = allocate {
"pdf", "mps", "jpg", "png", "jp2", "jbig", "svg", "eps", "tif", "gif", "mov", "buffer", "tex", "cld", "auto",
}
-local formats = allocate {
+local figures_formats = allocate { -- magic and order will move here
["pdf"] = { list = { "pdf" } },
["mps"] = { patterns = { "mps", "%d+" } },
["jpg"] = { list = { "jpg", "jpeg" } },
- ["jp2"] = { list = { "jp2" } },
["png"] = { list = { "png" } },
+ ["jp2"] = { list = { "jp2" } },
["jbig"] = { list = { "jbig", "jbig2", "jb2" } },
["svg"] = { list = { "svg", "svgz" } },
["eps"] = { list = { "eps", "ai" } },
@@ -176,7 +212,7 @@ local formats = allocate {
["auto"] = { list = { "auto" } },
}
-local magics = allocate {
+local figures_magics = allocate {
{ format = "png", pattern = P("\137PNG\013\010\026\010") }, -- 89 50 4E 47 0D 0A 1A 0A,
{ format = "jpg", pattern = P("\255\216\255") }, -- FF D8 FF
{ format = "jp2", pattern = P("\000\000\000\012\106\080\032\032\013\010"), }, -- 00 00 00 0C 6A 50 20 20 0D 0A },
@@ -184,62 +220,92 @@ local magics = allocate {
{ format = "pdf", pattern = (1 - P("%PDF"))^0 * P("%PDF") },
}
-figures.formats = formats -- frozen
-figures.magics = magics -- frozen
+figures.formats = figures_formats -- frozen
+figures.magics = figures_magics -- frozen
+figures.order = figures_order -- frozen
+
+-- We can set the order but only indirectly so that we can check for support.
+
+function figures.setorder(list) -- can be table or string
+ if type(list) == "string" then
+ list = settings_to_array(list)
+ end
+ if list and #list > 0 then
+ figures_order = allocate()
+ figures.order = figures_order
+ local done = { } -- just to be sure in case the list is generated
+ for i=1,#list do
+ local l = lower(list[i])
+ if figures_formats[l] and not done[l] then
+ figures_order[#figures_order+1] = l
+ done[l] = true
+ end
+ end
+ report_inclusion("lookup order % a",figures_order)
+ else
+ -- invalid list
+ end
+end
function figures.guess(filename)
local f = io.open(filename,'rb')
if f then
local str = f:read(100)
f:close()
- for i=1,#magics do
- local pattern = magics[i]
- if pattern.pattern:match(str) then
- local format = pattern.format
- if trace_figures then
- report_inclusion("file %q has format %s",filename,format)
+ if str then
+ for i=1,#figures_magics do
+ local pattern = figures_magics[i]
+ if lpegmatch(pattern.pattern,str) then
+ local format = pattern.format
+ if trace_figures then
+ report_inclusion("file %a has format %a",filename,format)
+ end
+ return format
end
- return format
end
end
end
end
-function figures.setlookups() -- tobe redone .. just set locals
- local fs, fp = allocate(), allocate()
- figures.suffixes, figures.patterns = fs, fp
- for _, format in next, figures.order do
- local data = formats[format]
+local function setlookups() -- tobe redone .. just set locals
+ figures_suffixes = allocate()
+ figures_patterns = allocate()
+ for _, format in next, figures_order do
+ local data = figures_formats[format]
local list = data.list
if list then
for i=1,#list do
- fs[list[i]] = format -- hash
+ figures_suffixes[list[i]] = format -- hash
end
else
- fs[format] = format
+ figures_suffixes[format] = format
end
local patterns = data.patterns
if patterns then
for i=1,#patterns do
- fp[#fp+1] = { patterns[i], format } -- array
+ figures_patterns[#figures_patterns+1] = { patterns[i], format } -- array
end
end
end
+ figures.suffixes = figures_suffixes
+ figures.patterns = figures_patterns
end
-figures.setlookups()
+setlookups()
+
+figures.setlookups = setlookups
function figures.registerresource(t)
- local n = #figures.resources + 1
- figures.resources[n] = t
+ local n = #figures_resources + 1
+ figures_resources[n] = t
return n
end
local function register(tag,target,what)
- local data = formats[target] -- resolver etc
+ local data = figures_formats[target] -- resolver etc
if not data then
data = { }
- formats[target] = data
+ figures_formats[target] = data
end
local d = data[tag] -- list or pattern
if d and not contains(d,what) then
@@ -247,34 +313,35 @@ local function register(tag,target,what)
else
data[tag] = { what }
end
- if not contains(figures.order,target) then
- figures.order[#figures.order+1] = target
+ if not contains(figures_order,target) then
+ figures_order[#figures_order+1] = target
end
- figures.setlookups()
+ setlookups()
end
function figures.registersuffix (suffix, target) register('list', target,suffix ) end
function figures.registerpattern(pattern,target) register('pattern',target,pattern) end
-local last_locationset, last_pathlist = last_locationset or nil, last_pathlist or nil
+local last_locationset = last_locationset or nil
+local last_pathlist = last_pathlist or nil
function figures.setpaths(locationset,pathlist)
if last_locationset == locationset and last_pathlist == pathlist then
-- this function can be called each graphic so we provide this optimization
return
end
- local iv, t, h = interfaces.variables, figures.paths, settings_to_hash(locationset)
+ local t, h = figure_paths, settings_to_hash(locationset)
if last_locationset ~= locationset then
-- change == reset (actually, a 'reset' would indeed reset
- if h[iv["local"]] then
+ if h[v_local] then
t = table.fastcopy(figures.localpaths or { })
else
t = { }
end
- figures.defaultsearch = h[iv["default"]]
+ figures.defaultsearch = h[v_default]
last_locationset = locationset
end
- if h[iv["global"]] then
+ if h[v_global] then
local list = settings_to_array(pathlist)
for i=1,#list do
local s = list[i]
@@ -283,18 +350,20 @@ function figures.setpaths(locationset,pathlist)
end
end
end
- figures.paths, last_pathlist = t, pathlist
+ figure_paths = t
+ last_pathlist = pathlist
+ figures.paths = figure_paths
if trace_figures then
- report_inclusion("locations: %s",last_locationset)
- report_inclusion("path list: %s",concat(figures.paths, " "))
+ report_inclusion("using locations %a",last_locationset)
+ report_inclusion("using paths % a",figure_paths)
end
end
-- check conversions and handle it here
function figures.hash(data)
- return data.status.hash or tostring(data.status.private) -- the <img object>
--- return data.status.fullname .. "+".. (data.status.page or data.request.page or 1) -- img is still not perfect
+ local status = data and data.status
+ return (status and status.hash or tostring(status.private)) or "nohash" -- the <img object>
end
-- interfacing to tex
@@ -353,19 +422,21 @@ local callstack = { }
function figures.initialize(request)
local figuredata = new()
if request then
- -- request.width/height are strings and are only used when no natural dimensions
- -- can be determined; at some point the handlers might set them to numbers instead
- -- local w, h = tonumber(request.width), tonumber(request.height)
- request.page = math.max(tonumber(request.page) or 1,1)
- request.size = img.checksize(request.size)
- request.object = request.object == variables.yes
- request["repeat"] = request["repeat"] == variables.yes
- request.preview = request.preview == variables.yes
- request.cache = request.cache ~= "" and request.cache
- request.prefix = request.prefix ~= "" and request.prefix
- request.format = request.format ~= "" and request.format
- -- request.width = (w and w > 0) or false
- -- request.height = (h and h > 0) or false
+ -- request.width/height are strings and are only used when no natural dimensions
+ -- can be determined; at some point the handlers might set them to numbers instead
+ local w = tonumber(request.width) or 0
+ local h = tonumber(request.height) or 0
+ request.width = w > 0 and w or nil
+ request.height = h > 0 and h or nil
+ --
+ request.page = math.max(tonumber(request.page) or 1,1)
+ request.size = img.checksize(request.size)
+ request.object = request.object == v_yes
+ request["repeat"] = request["repeat"] == v_yes
+ request.preview = request.preview == v_yes
+ request.cache = request.cache ~= "" and request.cache
+ request.prefix = request.prefix ~= "" and request.prefix
+ request.format = request.format ~= "" and request.format
table.merge(figuredata.request,request)
end
return figuredata
@@ -388,7 +459,7 @@ function figures.current()
return callstack[#callstack] or lastfiguredata
end
-function figures.get(category,tag,default)
+local function get(category,tag,default)
local value = lastfiguredata and lastfiguredata[category]
value = value and value[tag]
if not value or value == "" or value == true then
@@ -398,14 +469,19 @@ function figures.get(category,tag,default)
end
end
---
+figures.get = get
-function figures.tprint(category,tag,default)
- context(figures.get(category,tag,default))
+function commands.figurevariable(category,tag,default)
+ context(get(category,tag,default))
end
-local defaultformat = "pdf"
-local defaultprefix = "m_k_i_v_"
+function commands.figurestatus (tag,default) context(get("status", tag,default)) end
+function commands.figurerequest(tag,default) context(get("request",tag,default)) end
+function commands.figureused (tag,default) context(get("used", tag,default)) end
+
+function commands.figurefilepath() context(file.dirname (get("used","fullname"))) end
+function commands.figurefilename() context(file.nameonly(get("used","fullname"))) end
+function commands.figurefiletype() context(file.extname (get("used","fullname"))) end
-- todo: local path or cache path
@@ -413,217 +489,249 @@ local function forbiddenname(filename)
if not filename or filename == "" then
return false
end
- local expandedfullname = file.collapsepath(filename,true)
- local expandedinputname = file.collapsepath(file.addsuffix(environment.jobfilename,environment.jobfilesuffix),true)
+ local expandedfullname = collapsepath(filename,true)
+ local expandedinputname = collapsepath(file.addsuffix(environment.jobfilename,environment.jobfilesuffix),true)
if expandedfullname == expandedinputname then
- report_inclusion("skipping graphic with same name as input filename (%s), enforce suffix",expandedinputname)
+ report_inclusion("skipping graphic with same name as input filename %a, enforce suffix",expandedinputname)
return true
end
- local expandedoutputname = file.collapsepath(codeinjections.getoutputfilename(),true)
+ local expandedoutputname = collapsepath(codeinjections.getoutputfilename(),true)
if expandedfullname == expandedoutputname then
- report_inclusion("skipping graphic with same name as output filename (%s), enforce suffix",expandedoutputname)
+ report_inclusion("skipping graphic with same name as output filename %a, enforce suffix",expandedoutputname)
return true
end
end
local function register(askedname,specification)
- if specification then
- if forbiddenname(specification.fullname) then
- specification = { }
- else
- local format = specification.format
- if format then
- local conversion = specification.conversion
- local resolution = specification.resolution
- if conversion == "" then
- conversion = nil
- end
- if resolution == "" then
- resolution = nil
- end
- local newformat = conversion
- if not newformat or newformat == "" then
+ if not specification then
+ specification = { }
+ elseif forbiddenname(specification.fullname) then
+ specification = { }
+ else
+ local format = specification.format
+ if format then
+ local conversion = specification.conversion
+ local resolution = specification.resolution
+ if conversion == "" then
+ conversion = nil
+ end
+ if resolution == "" then
+ resolution = nil
+ end
+ local newformat = conversion
+ if not newformat or newformat == "" then
+ newformat = defaultformat
+ end
+ if trace_conversion then
+ report_inclusion("checking conversion of %a, fullname %a, old format %a, new format %a, conversion %a, resolution %a",
+ askedname,specification.fullname,format,newformat,conversion or "default",resolution or "default")
+ end
+ -- quick hack
+ local converter = (newformat ~= format or resolution) and converters[format]
+ if converter then
+ if converter[newformat] then
+ converter = converter[newformat]
+ else
newformat = defaultformat
- end
- if trace_conversion then
- report_inclusion("checking conversion of '%s' (%s): old format '%s', new format '%s', conversion '%s', resolution '%s'",
- askedname,specification.fullname,format,newformat,conversion or "default",resolution or "default")
- end
- -- quick hack
- -- local converter = (newformat ~= format) and converters[format]
- local converter = (newformat ~= format or resolution) and converters[format]
- if converter then
if converter[newformat] then
converter = converter[newformat]
else
+ converter = nil
newformat = defaultformat
- if converter[newformat] then
- converter = converter[newformat]
- else
- converter = nil
- newformat = defaultformat
- end
end
- elseif trace_conversion then
- report_inclusion("no converter for '%s' -> '%s'",format,newformat)
end
- if converter then
- local oldname = specification.fullname
- local newpath = file.dirname(oldname)
- local oldbase = file.basename(oldname)
- --
- -- problem: we can have weird filenames, like a.b.c (no suffix) and a.b.c.gif
- -- so we cannot safely remove a suffix (unless we do that for known suffixes)
- --
- -- local newbase = file.removesuffix(oldbase) -- assumes a known suffix
- --
- -- so we now have (also see *):
- --
- local newbase = oldbase
- --
- local fc = specification.cache or figures.cachepaths.path
- if fc and fc ~= "" and fc ~= "." then
- newpath = fc
- else
- newbase = defaultprefix .. newbase
- end
- if not file.is_writable(newpath) then
- if trace_conversion then
- report_inclusion("path '%s'is not writable, forcing conversion path '.' ",newpath)
- end
- newpath = "."
- end
- local subpath = specification.subpath or figures.cachepaths.subpath
- if subpath and subpath ~= "" and subpath ~= "." then
- newpath = newpath .. "/" .. subpath
- end
- local prefix = specification.prefix or figures.cachepaths.prefix
- if prefix and prefix ~= "" then
- newbase = prefix .. newbase
+ elseif trace_conversion then
+ report_inclusion("no converter for %a to %a",format,newformat)
+ end
+ if converter then
+ local oldname = specification.fullname
+ local newpath = file.dirname(oldname)
+ local oldbase = file.basename(oldname)
+ --
+ -- problem: we can have weird filenames, like a.b.c (no suffix) and a.b.c.gif
+ -- so we cannot safely remove a suffix (unless we do that for known suffixes)
+ --
+ -- local newbase = file.removesuffix(oldbase) -- assumes a known suffix
+ --
+ -- so we now have (also see *):
+ --
+ local newbase = oldbase
+ --
+ local fc = specification.cache or figures.cachepaths.path
+ if fc and fc ~= "" and fc ~= "." then
+ newpath = fc
+ else
+ newbase = defaultprefix .. newbase
+ end
+ if not file.is_writable(newpath) then
+ if trace_conversion then
+ report_inclusion("path %a is not writable, forcing conversion path %a",newpath,".")
end
- if resolution and resolution ~= "" then -- the order might change
- newbase = newbase .. "_" .. resolution
+ newpath = "."
+ end
+ local subpath = specification.subpath or figures.cachepaths.subpath
+ if subpath and subpath ~= "" and subpath ~= "." then
+ newpath = newpath .. "/" .. subpath
+ end
+ local prefix = specification.prefix or figures.cachepaths.prefix
+ if prefix and prefix ~= "" then
+ newbase = prefix .. newbase
+ end
+ if resolution and resolution ~= "" then -- the order might change
+ newbase = newbase .. "_" .. resolution
+ end
+ --
+ -- see *, we had:
+ --
+ -- local newbase = file.addsuffix(newbase,newformat)
+ --
+ -- but now have (result of Aditya's web image testing):
+ --
+ -- as a side effect we can now have multiple fetches with different
+ -- original figures_formats, not that it matters much (apart from older conversions
+ -- sticking around)
+ --
+ local newbase = newbase .. "." .. newformat
+ --
+ local newname = file.join(newpath,newbase)
+ dir.makedirs(newpath)
+ oldname = collapsepath(oldname)
+ newname = collapsepath(newname)
+ local oldtime = lfs.attributes(oldname,'modification') or 0
+ local newtime = lfs.attributes(newname,'modification') or 0
+ if newtime == 0 or oldtime > newtime then
+ if trace_conversion then
+ report_inclusion("converting %a (%a) from %a to %a",askedname,oldname,format,newformat)
end
- --
- -- see *, we had:
- --
- -- local newbase = file.addsuffix(newbase,newformat)
- --
- -- but now have (result of Aditya's web image testing):
- --
- -- as a side effect we can now have multiple fetches with different
- -- original formats, not that it matters much (apart from older conversions
- -- sticking around)
- --
- local newbase = newbase .. "." .. newformat
- --
- local newname = file.join(newpath,newbase)
- dir.makedirs(newpath)
- oldname = file.collapsepath(oldname)
- newname = file.collapsepath(newname)
- local oldtime = lfs.attributes(oldname,'modification') or 0
- local newtime = lfs.attributes(newname,'modification') or 0
- if newtime == 0 or oldtime > newtime then
- if trace_conversion then
- report_inclusion("converting '%s' (%s) from '%s' to '%s'",askedname,oldname,format,newformat)
- end
- converter(oldname,newname,resolution or "")
- else
- if trace_conversion then
- report_inclusion("no need to convert '%s' (%s) from '%s' to '%s'",askedname,oldname,format,newformat)
- end
+ converter(oldname,newname,resolution or "")
+ else
+ if trace_conversion then
+ report_inclusion("no need to convert %a (%a) from %a to %a",askedname,oldname,format,newformat)
end
- if io.exists(newname) and io.size(newname) > 0 then
- specification.foundname = oldname
- specification.fullname = newname
- specification.prefix = prefix
- specification.subpath = subpath
- specification.converted = true
- format = newformat
- if not figures.suffixes[format] then
- -- maybe the new format is lowres.png (saves entry in suffixes)
- -- so let's do thsi extra check
- local suffix = file.suffix(newformat)
- if figures.suffixes[suffix] then
- if trace_figures then
- report_inclusion("using suffix '%s' as format for '%s'",suffix,format)
- end
- format = suffix
+ end
+ if io.exists(newname) and io.size(newname) > 0 then
+ specification.foundname = oldname
+ specification.fullname = newname
+ specification.prefix = prefix
+ specification.subpath = subpath
+ specification.converted = true
+ format = newformat
+ if not figures_suffixes[format] then
+ -- maybe the new format is lowres.png (saves entry in suffixes)
+ -- so let's do thsi extra check
+ local suffix = file.suffix(newformat)
+ if figures_suffixes[suffix] then
+ if trace_figures then
+ report_inclusion("using suffix %a as format for %a",suffix,format)
end
+ format = suffix
end
- elseif io.exists(oldname) then
- specification.fullname = oldname -- was newname
- specification.converted = false
end
+ elseif io.exists(oldname) then
+ specification.fullname = oldname -- was newname
+ specification.converted = false
end
end
- local found = figures.suffixes[format] -- validtypes[format]
- if not found then
- specification.found = false
- if trace_figures then
- report_inclusion("format not supported: %s",format)
- end
- else
- specification.found = true
- if trace_figures then
- if validtypes[format] then -- format?
- report_inclusion("format natively supported by backend: %s",format)
- else
- report_inclusion("format supported by output file format: %s",format)
- end
+ end
+ local found = figures_suffixes[format] -- validtypes[format]
+ if not found then
+ specification.found = false
+ if trace_figures then
+ report_inclusion("format %a is not supported",format)
+ end
+ else
+ specification.found = true
+ if trace_figures then
+ if validtypes[format] then -- format?
+ report_inclusion("format %a natively supported by backend",format)
+ else
+ report_inclusion("format %a supported by output file format",format)
end
end
end
- else
- specification = { }
end
specification.foundname = specification.foundname or specification.fullname
- figures.found[askedname .. "->" .. (specification.conversion or "default") .. "->" .. (specification.resolution or "default")] = specification
+ local askedhash = f_hash_part(askedname,specification.conversion or "default",specification.resolution or "default")
+ figures_found[askedhash] = specification
return specification
end
-local resolve_too = true -- urls
+local resolve_too = false -- true
+
+local internalschemes = {
+ file = true,
+}
local function locate(request) -- name, format, cache
-- not resolvers.cleanpath(request.name) as it fails on a!b.pdf and b~c.pdf
-- todo: more restricted cleanpath
local askedname = request.name
- local foundname = figures.found[askedname .. "->" .. (request.conversion or "default") .. "->" .. (request.resolution or "default")]
+ local askedhash = f_hash_part(askedname,request.conversion or "default",request.resolution or "default")
+ local foundname = figures_found[askedhash]
if foundname then
return foundname
end
+ --
+ local askedcache = request.cache
+ local askedconversion = request.conversion
+ local askedresolution = request.resolution
+ --
+ if request.format == "" or request.format == "unknown" then
+ request.format = nil
+ end
-- protocol check
local hashed = url.hashed(askedname)
- if hashed then
- if hashed.scheme == "file" then
- local path = hashed.path
- if path and path ~= "" then
- askedname = path
+ if not hashed then
+ -- go on
+ elseif internalschemes[hashed.scheme] then
+ local path = hashed.path
+ if path and path ~= "" then
+ askedname = path
+ end
+ else
+ local foundname = resolvers.findbinfile(askedname)
+ if not foundname or not lfs.isfile(foundname) then -- foundname can be dummy
+ if trace_figures then
+ report_inclusion("unknown url %a",askedname)
end
+ -- url not found
+ return register(askedname)
+ end
+ local askedformat = request.format or file.suffix(askedname) or ""
+ local guessedformat = figures.guess(foundname)
+ if askedformat ~= guessedformat then
+ if trace_figures then
+ report_inclusion("url %a has unknown format",askedname)
+ end
+ -- url found, but wrong format
+ return register(askedname)
else
- local foundname = resolvers.findbinfile(askedname)
- if foundname then
- askedname = foundname
+ if trace_figures then
+ report_inclusion("url %a is resolved to %a",askedname,foundname)
end
+ return register(askedname, {
+ askedname = askedname,
+ fullname = foundname,
+ format = askedformat,
+ cache = askedcache,
+ conversion = askedconversion,
+ resolution = askedresolution,
+ })
end
end
-- we could use the hashed data instead
local askedpath= file.is_rootbased_path(askedname)
local askedbase = file.basename(askedname)
- local askedformat = (request.format ~= "" and request.format ~= "unknown" and request.format) or file.extname(askedname) or ""
- local askedcache = request.cache
- local askedconversion = request.conversion
- local askedresolution = request.resolution
+ local askedformat = request.format or file.suffix(askedname) or ""
if askedformat ~= "" then
askedformat = lower(askedformat)
if trace_figures then
- report_inclusion("strategy: forced format %s",askedformat)
+ report_inclusion("forcing format %a",askedformat)
end
- local format = figures.suffixes[askedformat]
+ local format = figures_suffixes[askedformat]
if not format then
- local figurepatterns = figures.patterns
- for i=1,#figurepatterns do
- local pattern = figurepatterns[i]
+ for i=1,#figures_patterns do
+ local pattern = figures_patterns[i]
if find(askedformat,pattern[1]) then
format = pattern[2]
break
@@ -646,7 +754,7 @@ local function locate(request) -- name, format, cache
return register(askedname)
end
elseif trace_figures then
- report_inclusion("strategy: unknown format %s",askedformat)
+ report_inclusion("unknown format %a",askedformat)
end
if askedpath then
-- path and type given, todo: strip pieces of path
@@ -663,9 +771,8 @@ local function locate(request) -- name, format, cache
end
else
-- type given
- local figurepaths = figures.paths
- for i=1,#figurepaths do
- local path = figurepaths[i]
+ for i=1,#figure_paths do
+ local path = figure_paths[i]
local check = path .. "/" .. askedname
-- we pass 'true' as it can be an url as well, as the type
-- is given we don't waste much time
@@ -697,12 +804,11 @@ local function locate(request) -- name, format, cache
end
elseif askedpath then
if trace_figures then
- report_inclusion("strategy: rootbased path")
+ report_inclusion("using rootbased path")
end
- local figureorder = figures.order
- for i=1,#figureorder do
- local format = figureorder[i]
- local list = formats[format].list or { format }
+ for i=1,#figures_order do
+ local format = figures_order[i]
+ local list = figures_formats[format].list or { format }
for j=1,#list do
local suffix = list[j]
local check = file.addsuffix(askedname,suffix)
@@ -722,27 +828,25 @@ local function locate(request) -- name, format, cache
else
if figures.preferquality then
if trace_figures then
- report_inclusion("strategy: unknown format, prefer quality")
+ report_inclusion("unknown format, quality preferred")
end
- local figurepaths = figures.paths
- local figureorder = figures.order
- for j=1,#figureorder do
- local format = figureorder[j]
- local list = formats[format].list or { format }
+ for j=1,#figures_order do
+ local format = figures_order[j]
+ local list = figures_formats[format].list or { format }
for k=1,#list do
local suffix = list[k]
-- local name = file.replacesuffix(askedbase,suffix)
local name = file.replacesuffix(askedname,suffix)
- for i=1,#figurepaths do
- local path = figurepaths[i]
+ for i=1,#figure_paths do
+ local path = figure_paths[i]
local check = path .. "/" .. name
local isfile = url.hashed(check).scheme == "file"
if not isfile then
if trace_figures then
- report_inclusion("warning: skipping path %s",path)
+ report_inclusion("warning: skipping path %a",path)
end
else
- local foundname, quitscanning, forcedformat = figures.exists(check,format,true)
+ local foundname, quitscanning, forcedformat = figures.exists(check,format,resolve_too) -- true)
if foundname then
return register(askedname, {
askedname = askedname,
@@ -759,15 +863,13 @@ local function locate(request) -- name, format, cache
end
else -- 'location'
if trace_figures then
- report_inclusion("strategy: unknown format, prefer path")
+ report_inclusion("unknown format, using path strategy")
end
- local figurepaths = figures.paths
- local figureorder = figures.order
- for i=1,#figurepaths do
- local path = figurepaths[i]
- for j=1,#figureorder do
- local format = figureorder[j]
- local list = formats[format].list or { format }
+ for i=1,#figure_paths do
+ local path = figure_paths[i]
+ for j=1,#figures_order do
+ local format = figures_order[j]
+ local list = figures_formats[format].list or { format }
for k=1,#list do
local suffix = list[k]
local check = path .. "/" .. file.replacesuffix(askedbase,suffix)
@@ -788,12 +890,11 @@ local function locate(request) -- name, format, cache
end
if figures.defaultsearch then
if trace_figures then
- report_inclusion("strategy: default tex path")
+ report_inclusion("using default tex path")
end
- local figureorder = figures.order
- for j=1,#figureorder do
- local format = figureorder[j]
- local list = formats[format].list or { format }
+ for j=1,#figures_order do
+ local format = figures_order[j]
+ local list = figures_formats[format].list or { format }
for k=1,#list do
local suffix = list[k]
local check = resolvers.findfile(file.replacesuffix(askedname,suffix))
@@ -835,7 +936,7 @@ function identifiers.default(data)
end
function figures.identify(data)
- data = data or figures.current()
+ data = data or callstack[#callstack] or lastfiguredata
local list = identifiers.list -- defined at the end
for i=1,#list do
local identifier = list[i]
@@ -852,12 +953,12 @@ function figures.exists(askedname,format,resolve)
end
function figures.check(data)
- data = data or figures.current()
+ data = data or callstack[#callstack] or lastfiguredata
return (checkers[data.status.format] or checkers.generic)(data)
end
function figures.include(data)
- data = data or figures.current()
+ data = data or callstack[#callstack] or lastfiguredata
return (includers[data.status.format] or includers.generic)(data)
end
@@ -868,8 +969,7 @@ end
function figures.done(data)
figures.nofprocessed = figures.nofprocessed + 1
- data = data or figures.current()
---~ print(table.serialize(figures.current()))
+ data = data or callstack[#callstack] or lastfiguredata
local dr, du, ds, nr = data.request, data.used, data.status, figures.boxnumber
local box = texbox[nr]
ds.width = box.width
@@ -877,18 +977,17 @@ function figures.done(data)
ds.xscale = ds.width /(du.width or 1)
ds.yscale = ds.height/(du.height or 1)
ds.page = ds.page or du.page or dr.page -- sort of redundant but can be limited
---~ print(table.serialize(figures.current()))
return data
end
function figures.dummy(data)
- data = data or figures.current()
+ data = data or callstack[#callstack] or lastfiguredata
local dr, du, nr = data.request, data.used, figures.boxnumber
- local box = node.hpack(node.new("hlist")) -- we need to set the dir (luatex 0.60 buglet)
- du.width = du.width or figures.defaultwidth
- du.height = du.height or figures.defaultheight
- du.depth = du.depth or figures.defaultdepth
- -- box.dir = "TLT"
+ local box = node.hpack(node.new("hlist")) -- we need to set the dir (luatex 0.60 buglet)
+ du.width = du.width or figures.defaultwidth
+ du.height = du.height or figures.defaultheight
+ du.depth = du.depth or figures.defaultdepth
+ -- box.dir = "TLT"
box.width = du.width
box.height = du.height
box.depth = du.depth
@@ -908,9 +1007,9 @@ function existers.generic(askedname,resolve)
end
if trace_figures then
if result then
- report_inclusion("found: %s -> %s",askedname,result)
+ report_inclusion("%a resolved to %a",askedname,result)
else
- report_inclusion("not found: %s",askedname)
+ report_inclusion("%a cannot be resolved",askedname)
end
end
return result
@@ -931,8 +1030,8 @@ function checkers.generic(data)
if not resolution or resolution == "" then
resolution = "unknown"
end
- local hash = name .. "->" .. page .. "->" .. size .. "->" .. color .. "->" .. conversion .. "->" .. resolution .. "->" .. mask
- local figure = figures.loaded[hash]
+ local hash = f_hash_full(name,page,size,color,conversion,resolution,mask)
+ local figure = figures_loaded[hash]
if figure == nil then
figure = img.new {
filename = name,
@@ -942,30 +1041,30 @@ function checkers.generic(data)
}
codeinjections.setfigurecolorspace(data,figure)
codeinjections.setfiguremask(data,figure)
- figure = (figure and img.scan(figure)) or false
+ figure = figure and img.check(img.scan(figure)) or false
local f, d = codeinjections.setfigurealternative(data,figure)
figure, data = f or figure, d or data
- figures.loaded[hash] = figure
+ figures_loaded[hash] = figure
if trace_conversion then
- report_inclusion("new graphic, hash: %s",hash)
+ report_inclusion("new graphic, using hash %a",hash)
end
else
if trace_conversion then
- report_inclusion("existing graphic, hash: %s",hash)
+ report_inclusion("existing graphic, using hash %a",hash)
end
end
if figure then
- du.width = figure.width
- du.height = figure.height
- du.pages = figure.pages
- du.depth = figure.depth or 0
- du.colordepth = figure.colordepth or 0
+ du.width = figure.width
+ du.height = figure.height
+ du.pages = figure.pages
+ du.depth = figure.depth or 0
+ du.colordepth = figure.colordepth or 0
du.xresolution = figure.xres or 0
du.yresolution = figure.yres or 0
- du.xsize = figure.xsize or 0
- du.ysize = figure.ysize or 0
- ds.private = figure
- ds.hash = hash
+ du.xsize = figure.xsize or 0
+ du.ysize = figure.ysize or 0
+ ds.private = figure
+ ds.hash = hash
end
return data
end
@@ -976,19 +1075,19 @@ function includers.generic(data)
dr.width = du.width
dr.height = du.height
local hash = figures.hash(data)
- local figure = figures.used[hash]
---~ figures.registerresource {
---~ filename = du.fullname,
---~ width = dr.width,
---~ height = dr.height,
---~ }
+ local figure = figures_used[hash]
+ -- figures.registerresource {
+ -- filename = du.fullname,
+ -- width = dr.width,
+ -- height = dr.height,
+ -- }
if figure == nil then
figure = ds.private
if figure then
figure = img.copy(figure)
- figure = (figure and img.clone(figure,data.request)) or false
+ figure = figure and img.clone(figure,data.request) or false
end
- figures.used[hash] = figure
+ figures_used[hash] = figure
end
if figure then
local nr = figures.boxnumber
@@ -1005,7 +1104,7 @@ end
-- -- -- nongeneric -- -- --
-function checkers.nongeneric(data,command) -- todo: macros and context.*
+local function checkers_nongeneric(data,command) -- todo: macros and context.*
local dr, du, ds = data.request, data.used, data.status
local name = du.fullname or "unknown nongeneric"
local hash = name
@@ -1024,10 +1123,13 @@ function checkers.nongeneric(data,command) -- todo: macros and context.*
return data
end
-function includers.nongeneric(data)
+local function includers_nongeneric(data)
return data
end
+checkers.nongeneric = checkers_nongeneric
+includers.nongeneric = includers_nongeneric
+
-- -- -- mov -- -- --
function checkers.mov(data)
@@ -1038,7 +1140,7 @@ function checkers.mov(data)
dr.width, dr.height = width, height
du.width, du.height, du.foundname = width, height, foundname
if trace_inclusion then
- report_inclusion("including movie '%s': width %s, height %s",foundname,width,height)
+ report_inclusion("including movie %a, width %p, height %p",foundname,width,height)
end
-- we need to push the node.write in between ... we could make a shared helper for this
context.startfoundexternalfigure(width .. "sp",height .. "sp")
@@ -1062,9 +1164,11 @@ includers.mov = includers.nongeneric
-- -- -- mps -- -- --
+internalschemes.mprun = true
+
local function internal(askedname)
- local spec, mprun, mpnum = match(lower(askedname),"mprun(:?)(.-)%.(%d+)")
- if spec == ":" then
+ local spec, mprun, mpnum = match(lower(askedname),"mprun([:%.]?)(.-)%.(%d+)")
+ if spec ~= "" then
return mprun, mpnum
else
return "", mpnum
@@ -1083,9 +1187,9 @@ end
function checkers.mps(data)
local mprun, mpnum = internal(data.used.fullname)
if mpnum then
- return checkers.nongeneric(data,function() context.docheckfiguremprun(mprun,mpnum) end)
+ return checkers_nongeneric(data,function() context.docheckfiguremprun(mprun,mpnum) end)
else
- return checkers.nongeneric(data,function() context.docheckfiguremps(data.used.fullname) end)
+ return checkers_nongeneric(data,function() context.docheckfiguremps(data.used.fullname) end)
end
end
@@ -1095,11 +1199,11 @@ includers.mps = includers.nongeneric
function existers.tex(askedname)
askedname = resolvers.findfile(askedname)
- return (askedname ~= "" and askedname) or false
+ return askedname ~= "" and askedname or false
end
function checkers.tex(data)
- return checkers.nongeneric(data,function() context.docheckfiguretex(data.used.fullname) end)
+ return checkers_nongeneric(data,function() context.docheckfiguretex(data.used.fullname) end)
end
includers.tex = includers.nongeneric
@@ -1113,7 +1217,7 @@ function existers.buffer(askedname)
end
function checkers.buffer(data)
- return checkers.nongeneric(data,function() context.docheckfigurebuffer(file.nameonly(data.used.fullname)) end)
+ return checkers_nongeneric(data,function() context.docheckfigurebuffer(file.nameonly(data.used.fullname)) end)
end
includers.buffers = includers.nongeneric
@@ -1124,9 +1228,9 @@ function existers.auto(askedname)
local name = gsub(askedname, ".auto$", "")
local format = figures.guess(name)
if format then
- report_inclusion("format guess for %q: %s",name,format)
+ report_inclusion("format guess %a for %a",format,name)
else
- report_inclusion("format guess for %q is not possible",name)
+ report_inclusion("format guess for %a is not possible",name)
end
return format and name, true, format
end
@@ -1139,7 +1243,7 @@ includers.auto = includers.generic
existers.cld = existers.tex
function checkers.cld(data)
- return checkers.nongeneric(data,function() context.docheckfigurecld(data.used.fullname) end)
+ return checkers_nongeneric(data,function() context.docheckfigurecld(data.used.fullname) end)
end
includers.cld = includers.nongeneric
@@ -1151,182 +1255,231 @@ local function makeoptions(options)
return (to == "table" and concat(options," ")) or (to == "string" and options) or ""
end
-local function runprogram(template,binary,...)
- local command = format(template,binary,...)
+-- programs.makeoptions = makeoptions
+
+local function runprogram(binary,argument,variables)
local binary = match(binary,"[%S]+") -- to be sure
- if os.which(binary) then
+ if type(argument) == "table" then
+ argument = concat(argument," ") -- for old times sake
+ end
+ if not os.which(binary) then
+ report_inclusion("program %a is not installed, not running command: %s",binary,command)
+ elseif not argument or argument == "" then
+ report_inclusion("nothing to run, unknown program %a",binary)
+ else
+ local command = format([["%s" %s]],binary,replacetemplate(longtostring(argument),variables))
if trace_conversion or trace_programs then
- report_inclusion("running: %s",command)
+ report_inclusion("running command: %s",command)
end
os.spawn(command)
- else
- report_inclusion("program '%s' is not installed, not running: %s",binary,command)
end
end
--- -- -- eps -- -- --
+programs.run = runprogram
-local epsconverter = { }
-converters.eps = epsconverter
+-- -- -- eps & pdf -- -- --
+--
+-- \externalfigure[cow.eps]
+-- \externalfigure[cow.pdf][conversion=stripped]
+
+local epsconverter = converters.eps or { }
+converters.eps = epsconverter
+converters.ps = epsconverter
-programs.gs = {
+local epstopdf = {
resolutions = {
- [variables.low] = "screen",
- [variables.medium] = "ebook",
- [variables.high] = "prepress",
+ [v_low] = "screen",
+ [v_medium] = "ebook",
+ [v_high] = "prepress",
},
- options = {
- "-dAutoRotatePages=/None",
- "-dPDFSETTINGS=/%s",
- "-dEPSCrop",
- },
- command = (os.type == "windows" and "gswin32c") or "gs"
+ command = os.type == "windows" and "gswin32c" or "gs",
+ -- -dProcessDSCComments=false
+ argument = [[
+ -q
+ -sDEVICE=pdfwrite
+ -dNOPAUSE
+ -dNOCACHE
+ -dBATCH
+ -dAutoRotatePages=/None
+ -dPDFSETTINGS=/%presets%
+ -dEPSCrop
+ -sOutputFile=%newname%
+ %oldname%
+ -c quit
+ ]],
}
+programs.epstopdf = epstopdf
+programs.gs = epstopdf
+
function epsconverter.pdf(oldname,newname,resolution) -- the resolution interface might change
- local gs = programs.gs
- runprogram (
- '%s -q -sDEVICE=pdfwrite -dNOPAUSE -dNOCACHE -dBATCH %s -sOutputFile="%s" "%s" -c quit',
- gs.command,
- format(makeoptions(gs.options),gs.resolutions[resolution or ""] or "prepress"),
- newname,
- oldname
- )
+ local epstopdf = programs.epstopdf -- can be changed
+ local presets = epstopdf.resolutions[resolution or ""] or epstopdf.resolutions.high
+ runprogram(epstopdf.command, epstopdf.argument, {
+ newname = newname,
+ oldname = oldname,
+ presets = presets,
+ } )
end
epsconverter.default = epsconverter.pdf
+local pdfconverter = converters.pdf or { }
+converters.pdf = pdfconverter
+
+programs.pdftoeps = {
+ command = "pdftops",
+ argument = [[-eps "%oldname%" "%newname%]],
+}
+
+pdfconverter.stripped = function(oldname,newname)
+ local pdftoeps = programs.pdftoeps -- can be changed
+ local epstopdf = programs.epstopdf -- can be changed
+ local presets = epstopdf.resolutions[resolution or ""] or epstopdf.resolutions.high
+ local tmpname = newname .. ".tmp"
+ runprogram(pdftoeps.command, pdftoeps.argument, { oldname = oldname, newname = tmpname, presets = presets })
+ runprogram(epstopdf.command, epstopdf.argument, { oldname = tmpname, newname = newname, presets = presets })
+ os.remove(tmpname)
+end
+
+figures.registersuffix("stripped","pdf")
+
-- -- -- svg -- -- --
-local svgconverter = { }
-converters.svg = svgconverter
-converters.svgz = svgconverter
+local svgconverter = { }
+converters.svg = svgconverter
+converters.svgz = svgconverter
-- inkscape on windows only works with complete paths
programs.inkscape = {
- options = {
- "--export-dpi=600"
- },
- command = "inkscape"
+ command = "inkscape",
+ pdfargument = [[
+ "%oldname%"
+ --export-dpi=600
+ -A
+ "%newname%"
+ ]],
+ pngargument = [[
+ "%oldname%"
+ --export-dpi=600
+ --export-png="%newname%"
+ ]],
}
function svgconverter.pdf(oldname,newname)
- local inkscape = programs.inkscape
- local oldname = dir.expandname(oldname)
- local newname = dir.expandname(newname)
- runprogram (
- '%s "%s" %s -A "%s"',
- inkscape.command, oldname, makeoptions(inkscape.options), newname
- )
+ local inkscape = programs.inkscape -- can be changed
+ runprogram(inkscape.command, inkscape.pdfargument, {
+ newname = expandfilename(newname),
+ oldname = expandfilename(oldname),
+ } )
end
function svgconverter.png(oldname,newname)
local inkscape = programs.inkscape
- runprogram (
- '%s "%s" --export-png="%s" %s',
- inkscape.command, oldname, newname, makeoptions(inkscape.options)
- )
+ runprogram(inkscape.command, inkscape.pngargument, {
+ newname = expandfilename(newname),
+ oldname = expandfilename(oldname),
+ } )
end
svgconverter.default = svgconverter.pdf
-- -- -- gif -- -- --
-
-local gifconverter = { }
-converters.gif = gifconverter
-
-programs.convert = {
- command = "gm convert" -- graphicmagick
-}
-
-function gifconverter.pdf(oldname,newname)
- local convert = programs.convert
- runprogram (
- "%s %s %s %s",
- convert.command, makeoptions(convert.options), oldname, newname
- )
-end
-
-gifconverter.default = gifconverter.pdf
-
-- -- -- tif -- -- --
--- http://sourceforge.net/projects/gnuwin32/files/tiff/3.8.2-1/tiff-3.8.2-1-bin.zip/download
+local gifconverter = converters.gif or { }
+local tifconverter = converters.tif or { }
+local bmpconverter = converters.bmp or { }
-local tifconverter = { }
-converters.tif = tifconverter
+converters.gif = gifconverter
+converters.tif = tifconverter
+converters.bmp = bmpconverter
programs.convert = {
---~ command = "convert" -- imagemagick
- command = "gm convert" -- graphicmagick
---~ command = "tiff2pdf"
+ command = "gm", -- graphicmagick
+ argument = [[convert "%oldname%" "%newname%"]],
}
-function tifconverter.pdf(oldname,newname)
+local function converter(oldname,newname)
local convert = programs.convert
- runprogram (
- "%s %s %s %s",
- convert.command, makeoptions(convert.options), oldname, newname
---~ "%s %s -z -o %s %s",
---~ convert.command, makeoptions(convert.options), newname, oldname
- )
+ runprogram(convert.command, convert.argument, {
+ newname = newname,
+ oldname = oldname,
+ } )
end
-tifconverter.default = tifconverter.pdf
+tifconverter.pdf = converter
+gifconverter.pdf = converter
+bmpconverter.pdf = converter
+
+gifconverter.default = converter
+tifconverter.default = converter
+bmpconverter.default = converter
-- todo: lowres
-- -- -- bases -- -- --
-figures.bases = { }
-figures.bases.list = { } -- index => { basename, fullname, xmlroot }
-figures.bases.used = { } -- [basename] => { basename, fullname, xmlroot } -- pointer to list
-figures.bases.found = { }
-figures.bases.enabled = false
+local bases = allocate()
+figures.bases = bases
+
+local bases_list = nil -- index => { basename, fullname, xmlroot }
+local bases_used = nil -- [basename] => { basename, fullname, xmlroot } -- pointer to list
+local bases_found = nil
+local bases_enabled = false
+
+local function reset()
+ bases_list = allocate()
+ bases_used = allocate()
+ bases_found = allocate()
+ bases_enabled = false
+ bases.list = bases_list
+ bases.used = bases_used
+ bases.found = bases_found
+end
-local bases = figures.bases
+reset()
function bases.use(basename)
if basename == "reset" then
- bases.list, bases.used, bases.found, bases.enabled = { }, { }, { }, false
+ reset()
else
basename = file.addsuffix(basename,"xml")
- if not bases.used[basename] then
+ if not bases_used[basename] then
local t = { basename, nil, nil }
- bases.used[basename] = t
- bases.list[#bases.list+1] = t
- if not bases.enabled then
- bases.enabled = true
+ bases_used[basename] = t
+ bases_list[#bases_list+1] = t
+ if not bases_enabled then
+ bases_enabled = true
xml.registerns("rlx","http://www.pragma-ade.com/schemas/rlx") -- we should be able to do this per xml file
end
if trace_bases then
- report_inclusion("registering base '%s'",basename)
+ report_inclusion("registering base %a",basename)
end
end
end
end
-function bases.find(basename,askedlabel)
+local function bases_find(basename,askedlabel)
if trace_bases then
- report_inclusion("checking for '%s' in base '%s'",askedlabel,basename)
+ report_inclusion("checking for %a in base %a",askedlabel,basename)
end
basename = file.addsuffix(basename,"xml")
- local t = bases.found[askedlabel]
+ local t = bases_found[askedlabel]
if t == nil then
- local base = bases.used[basename]
+ local base = bases_used[basename]
local page = 0
if base[2] == nil then
-- no yet located
- local figurepaths = figures.paths
- for i=1,#figurepaths do
- local path = figurepaths[i]
+ for i=1,#figure_paths do
+ local path = figure_paths[i]
local xmlfile = path .. "/" .. basename
if io.exists(xmlfile) then
base[2] = xmlfile
base[3] = xml.load(xmlfile)
if trace_bases then
- report_inclusion("base '%s' loaded",xmlfile)
+ report_inclusion("base %a loaded",xmlfile)
end
break
end
@@ -1343,15 +1496,15 @@ function bases.find(basename,askedlabel)
name = xml.text(e,"../*:file"), -- to be checked
page = page,
}
- bases.found[askedlabel] = t
+ bases_found[askedlabel] = t
if trace_bases then
- report_inclusion("figure '%s' found in base '%s'",askedlabel,base[2])
+ report_inclusion("figure %a found in base %a",askedlabel,base[2])
end
return t
end
end
if trace_bases and not t then
- report_inclusion("figure '%s' not found in base '%s'",askedlabel,base[2])
+ report_inclusion("figure %a not found in base %a",askedlabel,base[2])
end
end
end
@@ -1360,11 +1513,10 @@ end
-- we can access sequential or by name
-function bases.locate(askedlabel)
- local list = bases.list
- for i=1,#list do
- local entry = list[i]
- local t = bases.find(entry[1],askedlabel)
+local function bases_locate(askedlabel)
+ for i=1,#bases_list do
+ local entry = bases_list[i]
+ local t = bases_find(entry[1],askedlabel)
if t then
return t
end
@@ -1373,9 +1525,9 @@ function bases.locate(askedlabel)
end
function identifiers.base(data)
- if bases.enabled then
+ if bases_enabled then
local dr, du, ds = data.request, data.used, data.status
- local fbl = bases.locate(dr.name or dr.label)
+ local fbl = bases_locate(dr.name or dr.label)
if fbl then
du.page = fbl.page
du.format = fbl.format
@@ -1389,6 +1541,9 @@ function identifiers.base(data)
return data
end
+bases.locate = bases_locate
+bases.find = bases_find
+
identifiers.list = {
identifiers.base,
identifiers.default
@@ -1433,7 +1588,7 @@ function figures.applyratio(width,height,w,h) -- width and height are strings an
end
end
--- example of a simple plugin:
+-- example of simple plugins:
--
-- figures.converters.png = {
-- png = function(oldname,newname,resolution)
@@ -1443,9 +1598,12 @@ end
-- end,
-- }
+-- local fig = figures.push { name = pdffile }
+-- figures.identify()
+-- figures.check()
+-- local nofpages = fig.used.pages
+-- figures.pop()
+
+-- interfacing
--- local fig = figures.push { name = pdffile }
--- figures.identify()
--- figures.check()
--- local nofpages = fig.used.pages
--- figures.pop()
+commands.setfigurelookuporder = figures.setorder
diff --git a/Master/texmf-dist/tex/context/base/grph-inc.mkiv b/Master/texmf-dist/tex/context/base/grph-inc.mkiv
index 633130ea939..8557bbb0b85 100644
--- a/Master/texmf-dist/tex/context/base/grph-inc.mkiv
+++ b/Master/texmf-dist/tex/context/base/grph-inc.mkiv
@@ -11,15 +11,9 @@
%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
%C details.
-\writestatus{loading}{ConTeXt Graphic Macros / Figure Inclusion}
-
-%D todo:
-%D
-%D - color conversion
-%D - alternative images
-%D - a few more obscure things
+% todo: messages
-% use framedcommandhandler
+\writestatus{loading}{ConTeXt Graphic Macros / Figure Inclusion}
\registerctxluafile{grph-inc}{1.001}
\registerctxluafile{grph-fil}{1.001}
@@ -28,25 +22,386 @@
\unprotect
-%D The following registers are used (if only to be downward compatible).
+%D Including graphics is complicated by the fact that we need to locate them first,
+%D optionally manipulate them and scale then next. Lookups are to be done as efficient
+%D as possible and inclusion of the data might happens only once. In \MKIV\ much of this
+%D is delegated to the \LUA\ end. There is nor so much less code as in \MKII\ but it's
+%D more powerful, flexible, pluggable and some of the extended functionality has been
+%D moved from modules to the core. The overall functionality is rather stable and has
+%D not changed much over the years.
+
+\ifdefined\dotagfigure \else \let\dotagfigure\relax \fi
+
+\installcorenamespace{externalfigure}
+\installcorenamespace{externalfigureinstance}
+\installcorenamespace{externalfigurecollection}
+
+\installframedcommandhandler \??externalfigure {externalfigure} \??externalfigure
+
+\let\setupexternalfigures\setupexternalfigure
+
+\setupexternalfigures[% we really need the defaults
+ \c!method =,
+ \c!label =,
+ \c!size =,
+ \c!conversion =,
+ \c!resolution =,
+ \c!prefix =,
+ \c!cache =,
+ \c!page =\zerocount,
+ \c!display =,
+ \c!mask =,
+ \c!preset =\v!yes,
+ \c!split =,
+ \c!color =,
+ \c!symbol =\v!no,
+ \c!controls =\v!no,
+ \c!resources =,
+ \c!preview =\v!no
+ \c!repeat =\v!no
+ \c!foregroundcolor=,
+ \c!interaction =\v!none,
+ \c!hfactor =,
+ \c!wfactor =,
+ \c!factor =,
+ \c!maxwidth =\externalfigureparameter\c!width,
+ \c!maxheight =\externalfigureparameter\c!height,
+ \c!xscale =,
+ \c!yscale =,
+ \c!scale =,
+ \c!sx =\externalfigureparameter\c!s,
+ \c!sy =\externalfigureparameter\c!s,
+ \c!s =1,
+ \c!width =,
+ \c!height =,
+ \c!lines =,
+ \c!grid =,
+ \c!bodyfont =\bodyfontsize,
+ \c!object =\v!yes,
+ \c!corner =\v!rectangular,
+ \c!frame =\v!off,
+ \c!option =,
+ \c!reset =\v!no,
+ \c!directory =,
+ \c!radius =.5\bodyfontsize,
+ \c!background =,
+ \c!splitcolor =\s!white,
+ \c!order =,
+ \c!equalwidth =,
+ \c!equalheight =,
+ \c!location ={\v!local,\v!global},
+ \c!frames =\v!off,
+ \c!ymax =24,
+ \c!xmax =,
+ \c!align =\v!none, % New, for Tacos extremely large graphics.
+ ]
+
+%D Defining figures.
+
+\newcount\c_grph_include_nesting
+
+\newtoks \everyexternalfigureresets % for the moment still public
+\newtoks \everyexternalfigurechecks % for the moment still public
+
+% \useexternalfigure[alpha][koe]
+% \useexternalfigure[beta] [koe] [breedte=1cm]
+% \useexternalfigure[gamma][koe][alpha]
+% \useexternalfigure[delta][koe][alpha][breedte=2cm]
+%
+% volle breedte: \externalfigure[koe] \par
+% 3cm breed: \externalfigure[koe] [breedte=3cm] \par
+% volle breedte: \externalfigure[alpha] \par
+% 1cm breed: \externalfigure[beta] \par
+% volle breedte: \externalfigure[gamma] \par
+% 2cm breed: \externalfigure[delta] \par
+% 4cm breed: \externalfigure[beta] [breedte=4cm] \par
+% 5cm breed: \externalfigure[gamma][breedte=5cm] \par
+%
+% \defineexternalfigure[a][width=10cm]
+% \defineexternalfigure[b][width=5cm]
+% \externalfigure[cow][a]
+% \externalfigure[cow][b][height=8cm]
+%
+% \useexternalfigure[x][cow][width=10cm,height=1cm]
+% \externalfigure[x]
+% \externalfigure[x][width=3cm]
+%
+% [label] [filename]
+% [label] [filename] [parent]
+% [label] [filename] [parent] [settings]
+% [label] [filename] [settings]
+%
+% new: more convenient/efficient than
+%
+% \use..[a][a][setting] \externalfigure[b][a]
+%
+% is equivalent to:
+%
+% \def..[a][setting] \externalfigure[b][a]
+%
+% see x-res modules for usage:
+%
+% \defineexternalfigure[name][settings]
+
+%D Defining is persistent, i.e.\ when you redefine an instance,
+%D the already set parameters need to be set again or otherwise
+%D the old values will be used.
+
+\newconditional\c_grph_include_trace_inheritance
+
+\installtextracker
+ {graphics.inheritance}
+ {\settrue \c_grph_include_trace_inheritance}
+ {\setfalse\c_grph_include_trace_inheritance}
+
+\installcorenamespace{externalfiguredefinition}
+
+% \unexpanded\def\defineexternalfigure
+% {\dodoubleargument\grph_include_define}
+%
+% \def\grph_include_define[#1][#2]%
+% {\setvalue{\??externalfiguredefinition#1}{\setupcurrentexternalfigure[#2]}}
+
+\let\defineexternalfigures\defineexternalfigure
+
+\unexpanded\def\useexternalfigure
+ {\doquadrupleempty\grph_include_use}
+
+% label file parent settings
+% label file settings
+% label file parent
+
+\def\grph_include_use[#1][#2][#3][#4]%
+ {\doifelsenothing{#1}
+ {\doifsomething{#2}
+ {\doifassignmentelse{#3}
+ {\grph_include_use_indeed{#2}{#2}{#3}{#4}}
+ {\grph_include_use_indeed{#2}{#2}\empty{#4}}}}
+ {\doifelsenothing{#2}
+ {\doifassignmentelse{#3}
+ {\grph_include_use_indeed{#1}{#1}\empty{#3}}
+ {\grph_include_use_indeed{#1}{#1}{#3}{#4}}}
+ {\doifassignmentelse{#3}
+ {\grph_include_use_indeed{#1}{#2}\empty{#3}}
+ {\grph_include_use_indeed{#1}{#2}{#3}{#4}}}}}
+
+\def\grph_include_use_indeed#1#2#3#4%
+ {\setvalue{\??externalfigureinstance#1}{\grph_include_setup{#2}{#3}{#4}}%
+ \grph_include_analyze_collection[#2][#4]}
+
+% inclusion
+
+\unexpanded\def\externalfigure
+ {\dotripleempty\grph_include_figure}
+
+\def\grph_include_figure[#1][#2][#3]%
+ {\docheckassignment{#2}%
+ \ifassignment
+ \grph_include_place[#1][][#2]%
+ \else
+ \grph_include_place[#1][#2][#3]%
+ \fi}
-\newbox \foundexternalfigure
-\newif \ifskipexternalfigures
-\newtoks \everyexternalfigureresets
-\newtoks \everyexternalfigurechecks
-\newtoks \externalfigurepostprocessors
+% todo: chain them
-\def\resetfigurevariables {\the\everyexternalfigureresets}
-\def\checkfigurevariables {\the\everyexternalfigurechecks}
+\def\grph_include_setup#1#2#3% name parent settings
+ {\edef\m_grph_include_name {#1}%
+ \edef\m_grph_include_parent{#2}%
+ \ifx\m_grph_include_name\empty \else
+ \let\p_grph_include_name\m_grph_include_name
+ \fi
+ \ifx\m_grph_include_parent\empty \else
+ \grph_include_inherit_from_parent\m_grph_include_parent
+ \fi
+ \setupcurrentexternalfigure[#3]}
+
+% \def\grph_include_inherit_from_parent#1%
+% {\ifcsname\??externalfiguredefinition#1\endcsname
+% \ifconditional\c_grph_include_trace_inheritance\c_grph_include_trace_inheritance\writestatus\m!figures{inheriting from definition: #1}\fi
+% \csname\??externalfiguredefinition#1\endcsname
+% \fi
+% \ifcsname\??externalfigureinstance#1\endcsname
+% \ifconditional\c_grph_include_trace_inheritance\c_grph_include_trace_inheritance\writestatus\m!figures{inheriting from instance: #1}\fi
+% \csname\??externalfigureinstance#1\endcsname
+% \fi}
+
+\def\grph_include_inherit_from_parent#1%
+ {%\ifcsname\??externalfiguredefinition#1\endcsname
+ % \ifconditional\c_grph_include_trace_inheritance\writestatus\m!figures{inheriting from definition: #1}\fi
+ % \csname\??externalfiguredefinition#1\endcsname
+ %\fi
+ \ifcsname\??externalfigure#1:\s!parent\endcsname
+ \let\currentexternalfigure#1%
+ \fi
+ \ifcsname\??externalfigureinstance#1\endcsname
+ \ifconditional\c_grph_include_trace_inheritance\writestatus\m!figures{inheriting from instance: #1}\fi
+ \csname\??externalfigureinstance#1\endcsname
+ \fi}
-%D Historic feature:
+\newtoks\t_grph_include_local_settings
\appendtoks
- \global\let\externalfigurelog\empty
-\to \everyexternalfigureresets
+ \let\textunderscore\letterunderscore % {\string _} % space needed as _ is now letter in unprotected mode (probably no longer needed)
+ %
+ \dontcomplain
+ \restorecatcodes
+ \forgetall
+\to \t_grph_include_local_settings
-\let\runutilityfiletrue \relax \let\runutilityfilefalse \relax
-\let\consultutilityfiletrue\relax \let\consultutilityfilefalse\relax
+\def\grph_include_place_inherit
+ {\ifconditional\c_grph_include_trace_inheritance
+ \writestatus\m!figures{label: \p_grph_include_label, name: \p_grph_include_name, parent: \p_grph_include_parent}%
+ \fi
+ \ifx\p_grph_include_parent\empty
+ % nothing to be done
+ \else\ifx\p_grph_include_parent\p_grph_include_label
+ % redundant
+ \else
+ \grph_include_inherit_from_parent\p_grph_include_parent
+ \fi\fi
+ \ifx\p_grph_include_label\empty
+ % nothing to be done
+ \else
+ \grph_include_inherit_from_parent\p_grph_include_label
+ \fi}
+
+\def\grph_include_place[#1][#2][#3]% [label][file][settings] | [file][settings] | [file][parent][settings]
+ {\bgroup
+ \advance\c_grph_include_nesting\plusone
+ \edef\currentexternalfigure{\the\c_grph_include_nesting}%
+ \checkexternalfigureparent % each inherits from the root
+ %
+ \the\everyexternalfigureresets
+ %
+ \edef\p_grph_include_label{#1}%
+ \let\p_grph_include_name\p_grph_include_label
+ \docheckassignment{#2}%
+ \ifassignment
+ % [label] [settings]
+ \let\p_grph_include_parent\p_grph_include_label
+ \grph_include_place_inherit
+ \setupcurrentexternalfigure[#2]%
+ \else
+ % [label] [parent] [settings]
+ \edef\p_grph_include_parent{#2}%
+ \ifx\p_grph_include_parent\empty
+ \let\p_grph_include_parent\p_grph_include_label
+ \fi
+ \grph_include_place_inherit
+ \setupcurrentexternalfigure[#3]%
+ \fi
+ %
+ \the\everyexternalfigurechecks
+ %
+ \the\t_grph_include_local_settings
+ %
+ \edef\p_width {\externalfigureparameter\c!width}%
+ \edef\p_height{\externalfigureparameter\c!height}%
+ %
+ \dostarttagged\t!image\empty
+ \ctxlua{figures.push {
+ name = "\p_grph_include_name",
+ label = "\p_grph_include_label",
+ page = "\externalfigureparameter\c!page",
+ size = "\externalfigureparameter\c!size",
+ object = "\externalfigureparameter\c!object",
+ prefix = "\externalfigureparameter\c!prefix",
+ cache = "\externalfigureparameter\c!cache",
+ format = "\externalfigureparameter\c!method",
+ preset = "\externalfigureparameter\c!prefix",
+ controls = "\externalfigureparameter\c!controls",
+ resources = "\externalfigureparameter\c!resources",
+ preview = "\externalfigureparameter\c!preview",
+ display = "\externalfigureparameter\c!display",
+ mask = "\externalfigureparameter\c!mask",
+ conversion = "\externalfigureparameter\c!conversion",
+ resolution = "\externalfigureparameter\c!resolution",
+ color = "\internalspotcolorparent{\externalfigureparameter\c!color}", % hack is needed
+ ["repeat"] = "\externalfigureparameter\c!repeat",
+ \ifx\p_width\empty \else
+ width = \number\dimexpr\p_width,
+ \fi
+ \ifx\p_height\empty \else
+ height = \number\dimexpr\p_height,
+ \fi
+ } }%
+ \ctxlua{figures.identify()}%
+ % also mode: checkpresense only
+ \ifconditional\c_grph_include_test_only
+ \ifcase\figurestatus \else
+ \ctxlua{figures.check()}%
+ \ctxlua{figures.dummy()}%
+ \ctxlua{figures.scale()}%
+ \ctxlua{figures.done()}%
+ \fi
+ \grph_include_set_mode
+ \else
+ \ifcase\figurestatus
+ \ctxlua{figures.dummy()}%
+ \ctxlua{figures.scale()}%
+ \else
+ \ctxlua{figures.check()}%
+ \ctxlua{figures.include()}%
+ \ctxlua{figures.scale()}%
+ \fi
+ \ctxlua{figures.done()}%
+ \grph_include_set_mode
+ \grph_include_finalize
+ \fi
+ \ctxlua{figures.pop()}%
+ \dotagfigure
+ \naturalvbox attr \imageattribute 2 {\box\foundexternalfigure}%
+ \dostoptagged
+ \egroup}
+
+%D Scaling:
+
+\let\dowithfigure\relax % name might change (into a proper hook)
+
+\unexpanded\def\doscalefigure % used at lua end
+ {\global\setbox\foundexternalfigure\vbox{\scale[\v!figure]{\dowithfigure{\box\foundexternalfigure}}}}
+
+\definescale % some day we will inherit
+ [\v!figure]
+ [\c!hfactor =\externalfigureparameter\c!hfactor,
+ \c!wfactor =\externalfigureparameter\c!wfactor,
+ \c!factor =\externalfigureparameter\c!factor,
+ \c!maxwidth =\externalfigureparameter\c!maxwidth ,
+ \c!maxheight =\externalfigureparameter\c!maxheight,
+ \c!equalwidth =\externalfigureparameter\c!equalwidth ,
+ \c!equalheight=\externalfigureparameter\c!equalheight,
+ \c!xscale =\externalfigureparameter\c!xscale,
+ \c!yscale =\externalfigureparameter\c!yscale,
+ \c!scale =\externalfigureparameter\c!scale,
+ \c!sx =\externalfigureparameter\c!sx,
+ \c!sy =\externalfigureparameter\c!sy,
+ \c!s =\externalfigureparameter\c!s,
+ \c!width =\externalfigureparameter\c!width,
+ \c!height =\externalfigureparameter\c!height,
+ \c!lines =\externalfigureparameter\c!lines]
+
+% % this will become:
+%
+% \unexpanded\def\doscalefigure % used at lua end
+% {\global\setbox\foundexternalfigure\vbox\bgroup
+% \bgroup
+% \let\currentscale\currentexternalfigure
+% \let\scaleparameter\externalfigureparameter
+% \dowithnextboxcs\grph_scale_finish\hbox{\dowithfigure{\box\foundexternalfigure}}%
+% \egroup}
+%
+% % or even better:
+%
+% \def\grph_scale_inherited#1%
+% {\bgroup
+% \expandafter\let\expandafter\currentscale \csname current#1\endcsname
+% \expandafter\let\expandafter\scaleparameter\csname #1parameter\endcsname
+% \dowithnextboxcs\grph_scale_finish\hbox}
+%
+% \unexpanded\def\doscalefigure % used at lua end
+% {\global\setbox\foundexternalfigure\vbox\bgroup
+% \grph_scale_inherited{externalfigure}{\dowithfigure{\box\foundexternalfigure}}%
+% \egroup}
%D You can register additional suffixes with the following command:
%D
@@ -57,52 +412,55 @@
%D \stoptyping
\unexpanded\def\definegraphictypesynonym
- {\dodoubleargument\dodefinegraphictypesynonym}
+ {\dodoubleargument\grph_include_set_type_synonym}
-\def\dodefinegraphictypesynonym[#1][#2]%
+\def\grph_include_set_type_synonym[#1][#2]%
{\ctxlua{figures.registersuffix("#1","#2")}}
%D Additional paths can be installed with the regular setup command. The next
%D macro picks up the list.
-\def\setfigurepathlist
- {\ctxlua{figures.setpaths("\@@exlocation",\!!bs\@@exdirectory\!!es)}}
+\unexpanded\def\setfigurepathlist
+ {\ctxlua{figures.setpaths("\externalfigureparameter\c!location",\!!bs\externalfigureparameter\c!directory\!!es)}}
%D Variables:
+\newbox \foundexternalfigure
+\newtoks\externalfigurepostprocessors
+
\def\defaultfigurewidth {8\lineheight}
\def\defaultfigureheight {6\lineheight}
-\def\figurestatus {\numexpr\ctxlua{figures.tprint("status","status",0)}\relax} % number: 0 = not found
-\def\figurewidth {\ctxlua{figures.tprint("status","width",0)}sp}
-\def\figureheight {\ctxlua{figures.tprint("status","height",0)}sp}
-\def\figurexscale {\ctxlua{figures.tprint("status","xscale",1)}}
-\def\figureyscale {\ctxlua{figures.tprint("status","yscale",1)}}
-
-\def\figuresize {\ctxlua{figures.tprint("request","size")}}
-\def\figurelabel {\ctxlua{figures.tprint("request","label")}}
-\def\figurefileoriginal {\ctxlua{figures.tprint("request","name")}}
-\def\figurefilepage {\ctxlua{figures.tprint("request","page",1)}}
-\def\figurefileoptions {\ctxlua{figures.tprint("request","options")}}
-\def\figurefileconversion{\ctxlua{figures.tprint("request","conversion")}}
-\def\figurefilecache {\ctxlua{figures.tprint("request","cache")}}
-\def\figurefileprefix {\ctxlua{figures.tprint("request","prefix")}}
-
-\def\figurenaturalwidth {\ctxlua{figures.tprint("used","width", \number\dimexpr\defaultfigurewidth \relax)}sp}
-\def\figurenaturalheight {\ctxlua{figures.tprint("used","height",\number\dimexpr\defaultfigureheight\relax)}sp}
-\def\figurexresolution {\ctxlua{figures.tprint("used","xresolution",0)}}
-\def\figureyresolution {\ctxlua{figures.tprint("used","yresolution",0)}}
-\def\figurexsize {\ctxlua{figures.tprint("used","xsize",0)}}
-\def\figureysize {\ctxlua{figures.tprint("used","ysize",0)}}
-\def\figurecolordepth {\ctxlua{figures.tprint("used","colordepth",0)}}
-\def\figuredepth {\ctxlua{figures.tprint("used","depth",0)}}
-
-\def\figurefullname {\ctxlua{figures.tprint("used","fullname")}}
-\def\noffigurepages {\ctxlua{figures.tprint("used","pages",0)}}
-
-\def\figurefilepath {\cldcontext{file.dirname (figures.get("used","fullname"))}}
-\def\figurefilename {\cldcontext{file.nameonly(figures.get("used","fullname"))}}
-\def\figurefiletype {\cldcontext{file.extname (figures.get("used","fullname"))}}
+\def\figurestatus {\numexpr\ctxcommand{figurestatus("status",0)}\relax} % number: 0 = not found
+\def\figurewidth {\ctxcommand{figurestatus("width",0)}sp}
+\def\figureheight {\ctxcommand{figurestatus("height",0)}sp}
+\def\figurexscale {\ctxcommand{figurestatus("xscale",1)}}
+\def\figureyscale {\ctxcommand{figurestatus("yscale",1)}}
+
+\def\figuresize {\ctxcommand{figurerequest("size")}}
+\def\figurelabel {\ctxcommand{figurerequest("label")}}
+\def\figurefileoriginal {\ctxcommand{figurerequest("name")}}
+\def\figurefilepage {\ctxcommand{figurerequest("page",1)}}
+\def\figurefileoptions {\ctxcommand{figurerequest("options")}}
+\def\figurefileconversion{\ctxcommand{figurerequest("conversion")}}
+\def\figurefilecache {\ctxcommand{figurerequest("cache")}}
+\def\figurefileprefix {\ctxcommand{figurerequest("prefix")}}
+
+\def\figurenaturalwidth {\ctxcommand{figureused("width", \number\dimexpr\defaultfigurewidth \relax)}sp}
+\def\figurenaturalheight {\ctxcommand{figureused("height",\number\dimexpr\defaultfigureheight\relax)}sp}
+\def\figurexresolution {\ctxcommand{figureused("xresolution",0)}}
+\def\figureyresolution {\ctxcommand{figureused("yresolution",0)}}
+\def\figurexsize {\ctxcommand{figureused("xsize",0)}}
+\def\figureysize {\ctxcommand{figureused("ysize",0)}}
+\def\figurecolordepth {\ctxcommand{figureused("colordepth",0)}}
+\def\figuredepth {\ctxcommand{figureused("depth",0)}}
+
+\def\figurefullname {\ctxcommand{figureused("fullname")}}
+\def\noffigurepages {\ctxcommand{figureused("pages",0)}}
+
+\def\figurefilepath {\ctxcommand{figurefilepath()}}
+\def\figurefilename {\ctxcommand{figurefilename()}}
+\def\figurefiletype {\ctxcommand{figurefiletype()}}
\let\naturalfigurewidth \figurenaturalwidth
\let\naturalfigureheight \figurenaturalheight
@@ -112,209 +470,84 @@
\let\figurescalexscale \figurexscale
\let\figurescaleyscale \figureyscale
+%D Abuse:
+%D
+%D \starttyping
+%D \externalfigure[rubish.pdf] \ifcase\figurestatus\relax \ctxlua{os.exit(999)} \fi
+%D \stoptyping
+
+%D Calculating:
+
+% \enabletrackers[figures.conversion]
+% \externalfigure[demo.svg]
+% \externalfigure[demo.svg][conversion=png]
+
+%D The following registers are used (if only to be downward compatible).
+
+\newconditional\c_grph_include_skip
+\newconditional\c_grph_include_test_only
+\newconditional\c_grph_include_level \setfalse\c_grph_include_level % true=background false=normal
+\newconditional\c_grph_include_flush \settrue \c_grph_include_flush % true=place false=ignore
+
+\newsystemmode\v!figure
+
+\def\grph_include_set_mode
+ {\ifcase\figurestatus
+ \global\resetsystemmode\v!figure % todo, also: \v!resource
+ \else
+ \global\setsystemmode \v!figure % todo, also: \v!resource
+ \fi}
+
\appendtoks
- \ctxlua { % figures.defaultwidth .. why not dimen
- figures.setpaths("\@@exlocation","\@@exdirectory") ;
+ \ctxlua { % figures.defaultwidth .. maybe a dimen some day
+ figures.setpaths("\externalfigureparameter\c!location","\externalfigureparameter\c!directory") ;
figures.defaultwidth = \number\dimexpr\defaultfigurewidth \relax ;
figures.defaultheight = \number\dimexpr\defaultfigureheight\relax ;
figures.boxnumber = \number\foundexternalfigure ;
}%
\to \everyexternalfigureresets
-%D In some situations we need to make sure that the figure related variables
-%D are reset. This is especially important when we are nesting. Is this still
-%D needed in \MKIV.
-
-\def\resetexternalfigures
- {\let\@@efoption \empty % \let\@@efprefix\empty
- \let\@@efmaxwidth \empty % \let\@@efcache \empty
- \let\@@efmaxheight \empty % \let\@@efframe \v!off
- \let\@@efforegroundcolor\empty
- \let\@@efcolor \empty
- \let\@@efconversion \empty
- \let\@@efbackground \empty
- \let\@@efresolution \empty}
-
-\appendtoks \resetexternalfigures \to \everyoverlay
-\appendtoks \resetexternalfigures \to \everybeforepagebody % not really needed
-
-\def\resetfigureusersettings % if we use a command handler we can simply define a new instance
- {%
- \let\@@efmethod \empty
- \let\@@eflabel \empty
- \let\@@efsize \empty
- \let\@@efconversion\@@exconversion
- \let\@@efresolution\@@exresolution
- \let\@@efprefix \@@exprefix
- \let\@@efcache \@@excache
- \let\@@efpage \!!zerocount
- \let\@@efobject \@@exobject
- \let\@@efdisplay \empty
- \let\@@efmask \empty
- %
- \let\@@efpreset \v!yes
- \let\@@efsplit \empty
- \let\@@efcolor \empty
- %
- \let\@@efsymbol \v!no
- %
- \let\@@efcontrols \v!no
- \let\@@efresources \empty
- \let\@@efpreview \v!no
- \let\@@efrepeat \v!no
- %
- \let\@@efforegroundcolor\empty
- %
- \let\@@efinteraction\@@exinteraction
- %
- \let\@@efhfactor \empty
- \let\@@efwfactor \empty
- \let\@@effactor \empty
- \let\@@efmaxwidth \@@exmaxwidth
- \let\@@efmaxheight \@@exmaxheight
- \let\@@efxscale \empty
- \let\@@efyscale \empty
- \let\@@efscale \empty
- \let\@@efsx \!!plusone
- \let\@@efsy \!!plusone
- \let\@@efwidth \empty
- \let\@@efheight \empty
- \let\@@eflines \empty
- \let\@@efgrid \empty}
-
-\resetfigureusersettings
-
\appendtoks
- \resetfigureusersettings
-\to \everyexternalfigureresets
-
-\def\checkfigureusersettings
- {% old features
- \doif\@@exoption\v!frame
- {\let\@@efframe\v!on}%
- \doif\@@exoption\v!empty
- {\skipexternalfigurestrue
- \let\@@efframe\v!off}%
- \doifsomething\@@efwidth {\doifdimensionelse\@@efwidth {\edef\@@efwidth {\the\dimexpr\@@efwidth }}\donothing}%
- \doifsomething\@@efheight{\doifdimensionelse\@@efheight{\edef\@@efheight{\the\dimexpr\@@efheight}}\donothing}%
- % fake color in gray bitmaps, assumes that
- % a transparent color is used
- \doifsomething\@@efforegroundcolor
- {\def\@@efbackground{\v!foreground,\v!color}%
- \def\@@efbackgroundcolor{\@@efforegroundcolor}}}
-
-\appendtoks
- \checkfigureusersettings
+ \edef\p_option{\externalfigureparameter\c!option}%
+ \ifx\p_option\v!frame
+ \setfalse\c_grph_include_skip
+ \letexternalfigureparameter\c!frame\v!on
+ \else\ifx\p_option\v!empty
+ \settrue\c_grph_include_skip
+ \letexternalfigureparameter\c!frame\v!off
+ \else
+ \setfalse\c_grph_include_skip
+ \fi\fi
+ % fake color in gray bitmaps, assumes that
+ % a transparent color is used
+ \edef\p_foregroundcolor{\externalfigureparameter\c!foregroundcolor}%
+ \ifx\p_foregroundcolor\empty \else
+ \setexternalfigureparameter\c!background{\v!foreground,\v!color}%
+ \letexternalfigureparameter\c!backgroundcolor\p_foregroundcolor
+ \fi
\to \everyexternalfigurechecks
%D Internal graphics are handled at the \TEX\ end:
-\def\doprocesstexlikefigure#1% retrofit into mkii
+\def\grph_include_process_tex#1%
{\global\setbox\foundexternalfigure\vbox\framed
[\c!strut=\v!no,\c!align=\v!normal,\c!frame=\v!off,
\c!offset=\v!overlay,\c!width=\v!fit,\c!height=\v!fit]
{\blank[\v!disable]#1\endgraf\removelastskip}} % disable should stay here!
-\def\doprocessmpslikefigure#1% retrofit into mkii
+\def\grph_include_process_mps#1% retrofit into mkii
{\global\setbox\foundexternalfigure\vbox{\convertMPtoPDF{#1}11}}
-\def\doprocesscldlikefigure#1%
+\def\grph_include_process_cld#1%
{\global\setbox\foundexternalfigure\vbox{\cldprocessfile{#1}}}
-\def\docheckfigurebuffer #1{\doprocesstexlikefigure{\getbuffer[#1]}}
-\def\docheckfiguretex #1{\doprocesstexlikefigure{\input{#1}}}
-\def\docheckfigurecld #1{\doprocesscldlikefigure{#1}} % we can always add cldrun
-\def\docheckfiguremps #1{\doprocessmpslikefigure{#1}}
-\def\docheckfiguremprun #1#2{\doprocesstexlikefigure{\useMPrun{#1}{#2}}}
-
-% \def\doscalefigure
-% {\global\setbox\foundexternalfigure\vbox{\doscalebox\??ef{\dowithfigure{\box\foundexternalfigure}}}}
-
-\def\doscalefigure
- {\global\setbox\foundexternalfigure\vbox{\scale[\v!figure]{\dowithfigure{\box\foundexternalfigure}}}}
-
-% quick hack: chaining \??ef via \s!parent happens when this is also a commandhandler
-
-\definescale
- [\v!figure]
- [\c!hfactor =\@@efhfactor ,
- \c!wfactor =\@@efwfactor ,
- \c!factor =\@@effactor ,
- \c!maxwidth =\@@efmaxwidth ,
- \c!maxheight=\@@efmaxheight,
- \c!xscale =\@@efxscale ,
- \c!yscale =\@@efyscale ,
- \c!scale =\@@efscale ,
- \c!sx =\@@efsx ,
- \c!sy =\@@efsy ,
- \c!width =\@@efwidth ,
- \c!height =\@@efheight ,
- \c!lines =\@@eflines ]
-
-\newconditional\testexternalfigureonly
-
-% \enabletrackers[figures.conversion]
-% \externalfigure[demo.svg]
-% \externalfigure[demo.svg][conversion=png]
-
-\unexpanded\def\calculateexternalfigure[#1][#2][#3][#4][#5][#6]% \cmd label filename parent_id preset current
- {\dontcomplain
- \restorecatcodes
- \forgetall
- \resetfigurevariables
- \dosetefparameters{#4}{#5}{#6}%
- \checkfigurevariables
-% \begingroup
-% \color[\@@efcolor]{\xdef\globcolorattr{\internalspotcolorname}}
-% \endgroup
- \ctxlua{figures.push {
- name = "#3",
- label = "#2", % todo: \@eflabel
- page = "\@@efpage",
- size = "\@@efsize",
- object = "\@@efobject",
- prefix = "\@@efprefix",
- cache = "\@@efcache",
- format = "\@@efmethod",
- preset = "\@@efprefix",
- controls = "\@@efcontrols",
- resources = "\@@efresources",
- preview = "\@@efpreview",
- display = "\@@efdisplay",
- mask = "\@@efmask",
- conversion = "\@@efconversion",
- resolution = "\@@efresolution",
- color = "\internalspotcolorparent\@@efcolor", % hack is needed
- ["repeat"] = "\@@efrepeat",
- width = "\@@efwidth", % can be crap
- height = "\@@efheight", % can be crap
- } }%
- \ctxlua{figures.identify()}%
- % also mode: checkpresense only
- \ifconditional\testexternalfigureonly
- \ifcase\figurestatus \else
- \ctxlua{figures.check()}%
- \ctxlua{figures.dummy()}%
- \ctxlua{figures.scale()}%
- \ctxlua{figures.done()}%
- \fi
- \signalexternalfigure
- \else
- \ifcase\figurestatus
- \ctxlua{figures.dummy()}%
- \ctxlua{figures.scale()}%
- \else
- \ctxlua{figures.check()}%
- \ctxlua{figures.include()}%
- \ctxlua{figures.scale()}%
- \fi
- \ctxlua{figures.done()}%
- \signalexternalfigure
- \finishexternalfigure
- \fi
- \ctxlua{figures.pop()}}
+\unexpanded\def\docheckfigurebuffer #1{\grph_include_process_tex{\getbuffer[#1]}} % used al lua end
+\unexpanded\def\docheckfiguretex #1{\grph_include_process_tex{\input{#1}}} % used al lua end
+\unexpanded\def\docheckfigurecld #1{\grph_include_process_cld{#1}} % used al lua end
+\unexpanded\def\docheckfiguremps #1{\grph_include_process_mps{#1}} % used al lua end
+\unexpanded\def\docheckfiguremprun #1#2{\grph_include_process_tex{\useMPrun{#1}{#2}}} % used al lua end
-\def\relocateexternalfigure % easier here than in lua
+\unexpanded\def\relocateexternalfigure % easier here than in lua
{\global\setbox\foundexternalfigure\vbox to \ht\foundexternalfigure\bgroup
\vss
\ht\foundexternalfigure\zeropoint
@@ -324,33 +557,28 @@
\egroup
\egroup}
-\newsystemmode\v!figure
-
-\def\signalexternalfigure % global (dup call to status)
- {\ifcase\figurestatus
- \global\resetsystemmode\v!figure % todo, also: \v!resource
- \else
- \global\setsystemmode \v!figure % todo, also: \v!resource
- \fi}
-
\unexpanded\def\startfoundexternalfigure#1#2% ht wd
{\global\setbox\foundexternalfigure\vbox to #2\bgroup\vss\hbox to #1\bgroup}
\unexpanded\def\stopfoundexternalfigure
{\hss\egroup\egroup}
-\def\emptyfoundexternalfigure
+\unexpanded\def\emptyfoundexternalfigure % sort of obsolete
{\startfoundexternalfigure\defaultfigurewidth\defaultfigureheight
\stopfoundexternalfigure}
-\def\finishexternalfigure % here we use \figurevariables
+% \doifmodeelse{*\v!last}
+% {\settrue \c_grph_include_flush}
+% {\setfalse\c_grph_include_flush}%
+
+\def\grph_include_finalize
{\global\setbox\foundexternalfigure\vbox
{\ifcase\figurestatus
- \let\@@efframe\v!on
+ \letexternalfigureparameter\c!frame\v!on
\fi
- \ifconditional\externalfigureflush
- \ifconditional\externalfigurelevel % probably background
- \ifskipexternalfigures
+ \ifconditional\c_grph_include_flush
+ \ifconditional\c_grph_include_level % probably background
+ \ifconditional\c_grph_include_skip
% nothing
\fakebox\foundexternalfigure
\else\ifcase\figurestatus
@@ -361,28 +589,29 @@
\fi\fi
\else
\iftrialtypesetting \else \feedbackexternalfigure \fi
- \settrue\externalfigurelevel
- \ifskipexternalfigures
+ \settrue\c_grph_include_level
+ \ifconditional\c_grph_include_skip
\ifcase\figurestatus
- \externalfigurereplacement\figurelabel\figurefileoriginal{unknown}%
+ \grph_include_replacement\figurelabel\figurefileoriginal{unknown}%
\else
- \externalfigurereplacement\figurelabel\figurefullname{skipped}%
+ \grph_include_replacement\figurelabel\figurefullname{skipped}%
\fi
\else\ifcase\figurestatus
- \externalfigurereplacement\figurelabel\figurefileoriginal{unknown}%
+ \grph_include_replacement\figurelabel\figurefileoriginal{unknown}%
\else
\the\externalfigurepostprocessors
- \doifelse\@@efreset\v!yes
- {\wd\foundexternalfigure\figurewidth
- \ht\foundexternalfigure\figureheight
- \dp\foundexternalfigure\zeropoint
- \box\foundexternalfigure}
- {\localframed % should also be applied to high res !
- [\??ef]
- [\c!offset=\v!overlay,
- \c!width=\figurewidth,
- \c!height=\figureheight]
- {\vfilll\box\foundexternalfigure}}%
+ \edef\p_reset{\externalfigureparameter\c!reset}%
+ \ifx\p_reset\v!yes
+ \wd\foundexternalfigure\figurewidth
+ \ht\foundexternalfigure\figureheight
+ \dp\foundexternalfigure\zeropoint
+ \box\foundexternalfigure
+ \else
+ \letexternalfigureparameter\c!offset\v!overlay
+ \letexternalfigureparameter\c!width \figurewidth
+ \letexternalfigureparameter\c!height\figureheight
+ \inheritedexternalfigureframed{\vfilll\box\foundexternalfigure}%
+ \fi
\fi\fi
\fi
\else
@@ -390,35 +619,17 @@
\iftrialtypesetting \else \feedbackexternalfigure \fi
\fi}}
-\ifx\externalfigurereplacement\undefined\let\externalfigurereplacement\gobblethreearguments\fi
-\ifx\externalfigureplaceholder\undefined\let\externalfigureplaceholder\gobblethreearguments\fi
+\let\feedbackexternalfigure\relax % hook
-\let\feedbackexternalfigure\relax % \gobblefourarguments
-\let\dowithfigure \relax
+\unexpanded\def\getfiguredimensions
+ {\dodoubleempty\grph_include_get_dimensions}
-\def\getfiguredimensions
- {\dodoubleempty\dogetfiguredimensions}
-
-\def\dogetfiguredimensions[#1][#2]%
+\def\grph_include_get_dimensions[#1][#2]%
{\startnointerference
- \settrue\testexternalfigureonly
+ \settrue\c_grph_include_test_only
\externalfigure[#1][#2,\c!display=,\c!mask=,\c!object=\v!no]%
\stopnointerference}
-\let\getfiguredimensionsonly\getfiguredimensions
-
-% \unexpanded\def\doiffigureelse#1%
-% {\getfiguredimensions[#1]% so data is available !
-% \ifdim\figurewidth=\zeropoint % todo: \figurestatus
-% \expandafter\secondoftwoarguments
-% \else
-% \expandafter\firstoftwoarguments
-% \fi}
-
-% \unexpanded\def\doiffigureelse#1% just low level call
-% {\getfiguredimensions[#1]% so data is available ! ... grouped so status is local
-% \doifmodeelse{*\v!figure}\firstoftwoarguments\secondoftwoarguments} % mode is set global
-
\unexpanded\def\doiffigureelse#1%
{\getfiguredimensions[#1]% so data is available !
\ifcase\figurestatus
@@ -427,74 +638,138 @@
\expandafter\firstoftwoarguments
\fi}
-\def\registerexternalfigure % no placement, handy for preprocessing
- {\dotripleempty\doregisterexternalfigure}
+% No placement, handy for preprocessing:
-\def\doregisterexternalfigure[#1][#2][#3]%
+\unexpanded\def\registerexternalfigure
+ {\dotripleempty\grph_include_register}
+
+\def\grph_include_register[#1][#2][#3]%
{\startnointerference
- \testexternalfigureonly
- \setfalse\externalfigureflush % == test ?
- \externalfigure[#1][#2][#3]% or \doexternalfigure
+ \c_grph_include_test_only
+ \setfalse\c_grph_include_flush % == test ?
+ \externalfigure[#1][#2][#3]% or
\externalfigure[#1][#2,\c!display=,\c!mask=,\c!object=\v!no]%
\stopnointerference}
-% helpers (will be replaced when xforms are accessible at the lua end)
+% Helpers (will be replaced when xforms are accessible at the lua end)
-\def\dosetfigureobject#1%
+\unexpanded\def\dosetfigureobject#1%
{\setobject{FIG}{#1}\vbox{\box\foundexternalfigure}}
-\def\doboxfigureobject#1%
+\unexpanded\def\doboxfigureobject#1%
{\global\setbox\foundexternalfigure\vbox{\getobject{FIG}{#1}}} % probably one vbox too many
-% figurebases
+% Figure bases
-\def\usefigurebase[#1]%
+\unexpanded\def\usefigurebase[#1]%
{\ctxlua{figures.bases.use("#1")}}
-\protect \endinput
+\appendtoks
+ \setfigurepathlist % the path may be used elsewhere too (as in x-res-04)
+\to \everysetupexternalfigure
-% \startbuffer
-% \definecolor [blue] [c=1,m=.38,y=0,k=.64]
-% \definecolor [yellow] [c=0,m=.28,y=1,k=.06]
-%
-% \definespotcolor [blue-100] [blue] [p=1]
-% \definespotcolor [yellow-100] [yellow] [p=1]
-%
-% \definemultitonecolor [combicolor] [blue=.12,yellow=.28] [c=.1,m=.1,y=.3,k=.1]
-%
-% \definemultitonecolor [combicolor-b] [blue=1] [c=1,m=.38,y=0,k=.64] % force multitone
-% \definemultitonecolor [combicolor-y] [yellow=1] [c=0,m=.28,y=1,k=.06] % force multitone
-%
-% \useexternalfigure[demo-a][mill.png] [object=no,width=.2\textwidth]
-% \useexternalfigure[demo-b][hacker-bw.jpg][object=no,width=.2\textwidth]
-%
-% \startbaselinecorrection \startcombination[4*1]
-% {\externalfigure[demo-a]} {no color}
-% {\externalfigure[demo-a][color=combicolor]} {indexed duotone}
-% {\externalfigure[demo-a][color=combicolor-b]} {spot color}
-% {\externalfigure[demo-a][color=combicolor-y]} {spot color}
-% \stopcombination \stopbaselinecorrection
-%
-% \startbaselinecorrection \startcombination[4*1]
-% {\externalfigure[demo-b]} {no color}
-% {\externalfigure[demo-b][color=combicolor]} {indexed duotone}
-% {\externalfigure[demo-b][color=combicolor-b]} {spot color}
-% {\externalfigure[demo-b][color=combicolor-y]} {spot color}
-% \stopcombination \stopbaselinecorrection
-%
-% \startbaselinecorrection \startcombination[4*1]
-% {\externalfigure[demo-a]} {no color}
-% {\externalfigure[demo-a][color=combicolor]} {indexed duotone}
-% {\externalfigure[demo-a][color=blue-100]} {spot color}
-% {\externalfigure[demo-a][color=yellow-100]} {spot color}
-% \stopcombination \stopbaselinecorrection
-%
-% \startbaselinecorrection \startcombination[4*1]
-% {\externalfigure[demo-b]} {no color}
-% {\externalfigure[demo-b][color=combicolor]} {indexed duotone}
-% {\externalfigure[demo-b][color=blue-100]} {spot color}
-% {\externalfigure[demo-b][color=yellow-100]} {spot color}
-% \stopcombination \stopbaselinecorrection
-% \stopbuffer
-%
-% \getbuffer \typebuffer
+\appendtoks
+ \ctxcommand{setfigurelookuporder("\externalfigureparameter\c!order")}%
+\to \everysetupexternalfigure
+
+\definecolor[missingfigurecolor][s=.8]
+
+\def\grph_include_replacement#1#2#3%
+ {\bgroup
+ \letexternalfigureparameter\c!width\figurewidth
+ \letexternalfigureparameter\c!height\figureheight
+ \letexternalfigureparameter\c!background\v!color
+ \setexternalfigureparameter\c!backgroundcolor{missingfigurecolor}%
+ \setexternalfigureparameter\c!align{\v!middle,\v!lohi}% we default to \v!none
+ \inheritedexternalfigureframed
+ {\tt\tfxx \nohyphens
+ name: \expanded{\verbatimstring{#1}}\\%
+ file: \expanded{\verbatimstring{#2}}\\%
+ state: \expanded{\verbatimstring{#3}}}%
+ \egroup}
+
+% maybe setuphandler
+
+\newconditional\c_grph_include_in_collection
+
+\newdimen\d_grph_include_collection_minwidth
+\newdimen\d_grph_include_collection_maxwidth
+\newdimen\d_grph_include_collection_minheight
+\newdimen\d_grph_include_collection_maxheight
+
+\def\grph_include_analyze_collection[#1][#2]%
+ {\ifconditional\c_grph_include_in_collection
+ \setfalse\c_grph_include_in_collection
+ \getfiguredimensions[#1][#2]%
+ \settrue\c_grph_include_in_collection
+ \scratchdimen\naturalfigurewidth
+ \ifdim\scratchdimen>\d_grph_include_collection_maxwidth \d_grph_include_collection_maxwidth \scratchdimen \fi
+ \ifdim\scratchdimen<\d_grph_include_collection_minwidth \d_grph_include_collection_minwidth \scratchdimen \fi
+ \scratchdimen\naturalfigureheight
+ \ifdim\scratchdimen>\d_grph_include_collection_maxheight \d_grph_include_collection_maxheight\scratchdimen \fi
+ \ifdim\scratchdimen<\d_grph_include_collection_minheight \d_grph_include_collection_minheight\scratchdimen \fi
+ \fi}
+
+\unexpanded\def\startexternalfigurecollection[#1]%
+ {\begingroup
+ \def\currentexternalfigurecollection{#1}%
+ \settrue\c_grph_include_in_collection
+ \d_grph_include_collection_minwidth \maxdimen
+ \d_grph_include_collection_maxwidth \zeropoint
+ \d_grph_include_collection_minheight\maxdimen
+ \d_grph_include_collection_maxheight\zeropoint}
+
+\unexpanded\def\stopexternalfigurecollection
+ {\setxvalue{\??externalfigurecollection\currentexternalfigurecollection:\c!minwidth }{\the\d_grph_include_collection_minwidth }%
+ \setxvalue{\??externalfigurecollection\currentexternalfigurecollection:\c!maxwidth }{\the\d_grph_include_collection_maxwidth }%
+ \setxvalue{\??externalfigurecollection\currentexternalfigurecollection:\c!minheight}{\the\d_grph_include_collection_minheight}%
+ \setxvalue{\??externalfigurecollection\currentexternalfigurecollection:\c!maxheight}{\the\d_grph_include_collection_maxheight}%
+ \endgroup}
+
+\def\externalfigurecollectionparameter#1#2%
+ {\csname
+ \ifcsname\??externalfigurecollection#1:#2\endcsname
+ \??externalfigurecollection#1:#2%
+ \else
+ \s!empty
+ \fi
+ \endcsname}
+
+\def\externalfigurecollectionminwidth #1{\externalfigurecollectionparameter{#1}\c!minwidth }
+\def\externalfigurecollectionmaxwidth #1{\externalfigurecollectionparameter{#1}\c!maxwidth }
+\def\externalfigurecollectionminheight#1{\externalfigurecollectionparameter{#1}\c!minheight}
+\def\externalfigurecollectionmaxheight#1{\externalfigurecollectionparameter{#1}\c!maxheight}
+
+\let\efcparameter\externalfigurecollectionparameter % still needed ?
+\let\efcminwidth \externalfigurecollectionminwidth % still needed ?
+\let\efcmaxwidth \externalfigurecollectionmaxwidth % still needed ?
+\let\efcminheight\externalfigurecollectionminheight % still needed ?
+\let\efcmaxheight\externalfigurecollectionmaxheight % still needed ?
+
+% \startexternalfigurecollection[name]
+% \useexternalfigure[cow] [cow.pdf]
+% \useexternalfigure[mill][mill.png]
+% \stopexternalfigurecollection
+% \starttext
+% \bTABLE
+% \bTR
+% \bTD \externalfigure[cow] [height=\externalfigurecollectionmaxheight{name}] \eTD
+% \bTD \externalfigure[mill][height=\externalfigurecollectionmaxheight{name}] \eTD
+% \eTR
+% \eTABLE
+% \stoptext
+
+\unexpanded\def\showexternalfigures
+ {\writestatus\m!system{the \string\showexternalfigures\space command is not (yet) implemented in mkiv}}
+
+\unexpanded\def\overlayfigure#1%
+ {\externalfigure[#1][\c!width=\overlaywidth,\c!height=\overlayheight]}
+
+% Bonus:
+
+\useexternalfigure
+ [buffer]
+ [\jobname.buffer]
+ [\c!object=\v!no]
+
+\protect \endinput
diff --git a/Master/texmf-dist/tex/context/base/grph-raw.lua b/Master/texmf-dist/tex/context/base/grph-raw.lua
index 361f6944d78..4c5b031ea98 100644
--- a/Master/texmf-dist/tex/context/base/grph-raw.lua
+++ b/Master/texmf-dist/tex/context/base/grph-raw.lua
@@ -10,9 +10,12 @@ if not modules then modules = { } end modules ['grph-raw'] = {
-- her gnuplot project. It's somewhat premliminary code but it
-- works ok for that purpose.
+local tonumber = tonumber
+
local report_bitmap = logs.reporter("graphics","bitmaps")
-local texsp = tex.sp
+local context = context
+local texsp = tex.sp
function figures.bitmapimage(t)
local data = t.data
diff --git a/Master/texmf-dist/tex/context/base/grph-raw.mkiv b/Master/texmf-dist/tex/context/base/grph-raw.mkiv
index 5b488cf5837..1c683556412 100644
--- a/Master/texmf-dist/tex/context/base/grph-raw.mkiv
+++ b/Master/texmf-dist/tex/context/base/grph-raw.mkiv
@@ -45,14 +45,14 @@
\unexpanded\def\bitmapimage[#1]#2%
{\hbox\bgroup
- \getparameters[\??gb][\c!color=rgb,\c!width=,\c!height=,\c!x=,\c!y=,#1]%
- \ctxlua{figures.bitmapimage {
+ \getdummyparameters[\c!color=rgb,\c!width=,\c!height=,\c!x=,\c!y=,#1]%
+ \ctxlua{figures.bitmapimage { % we could pass #1 directly ... todo
data = \!!bs#2\!!es,
- xresolution = "\@@gbx",
- yresolution = "\@@gby",
- colorspace = "\@@gbcolor",
- width = "\@@gbwidth",
- height = "\@@gbheight"
+ colorspace = "\directdummyparameter\c!color",
+ width = "\directdummyparameter\c!width",
+ height = "\directdummyparameter\c!height",
+ xresolution = "\directdummyparameter\c!x",
+ yresolution = "\directdummyparameter\c!y",
}}%
\egroup}
diff --git a/Master/texmf-dist/tex/context/base/grph-swf.lua b/Master/texmf-dist/tex/context/base/grph-swf.lua
index 88eed021a22..8c28b76afae 100644
--- a/Master/texmf-dist/tex/context/base/grph-swf.lua
+++ b/Master/texmf-dist/tex/context/base/grph-swf.lua
@@ -6,19 +6,20 @@ if not modules then modules = { } end modules ['grph-swf'] = {
license = "see context related readme files"
}
+-- maybe: backends.codeinjections.insertswf
+
local sub, format, match, byte = string.sub, string.format, string.match, string.byte
-local readstring, readnumber = io.readstring, io.readnumber
local concat = table.concat
local floor = math.floor
local tonumber = tonumber
-local tobitstring = number.tobitstring
-
-local todimen = number.todimen
+local readstring = io.readstring
+local readnumber = io.readnumber
+local tobitstring = number.tobitstring
+local todimen = number.todimen
local nodeinjections = backends.nodeinjections
-
-local figures = figures
-local context = context
+local figures = figures
+local context = context
local function getheader(name)
local f = io.open(name,"rb")
@@ -74,16 +75,16 @@ function figures.checkers.swf(data)
dr.width, dr.height = width, height
du.width, du.height, du.foundname = width, height, foundname
context.startfoundexternalfigure(todimen(width),todimen(height))
- nodeinjections.insertswf {
- foundname = foundname,
- width = width,
- height = height,
- -- factor = number.dimenfactors.bp,
- display = dr.display,
- controls = dr.controls,
- -- label = dr.label,
- resources = dr.resources,
- }
+ nodeinjections.insertswf {
+ foundname = foundname,
+ width = width,
+ height = height,
+ -- factor = number.dimenfactors.bp,
+ display = dr.display,
+ controls = dr.controls,
+ -- label = dr.label,
+ resources = dr.resources,
+ }
context.stopfoundexternalfigure()
return data
end
diff --git a/Master/texmf-dist/tex/context/base/grph-trf.mkiv b/Master/texmf-dist/tex/context/base/grph-trf.mkiv
index 5984c8a0630..d907c1b0ca3 100644
--- a/Master/texmf-dist/tex/context/base/grph-trf.mkiv
+++ b/Master/texmf-dist/tex/context/base/grph-trf.mkiv
@@ -52,6 +52,9 @@
\newcount\c_grph_scale_used_x_scale
\newcount\c_grph_scale_used_y_scale
+\let \m_grph_scale_used_x_scale\!!plusone
+\let \m_grph_scale_used_y_scale\!!plusone
+
\newdimen\d_grph_scale_outer_v_size % we cannot manipulate any global vsize !
% scratch:
@@ -66,8 +69,6 @@
\let\finalscaleboxyscale \!!plusone
\let\finalscaleboxwidth \!!zeropoint
\let\finalscaleboxheight \!!zeropoint
-\let\finalscaleboxxfactor\!!hundred
-\let\finalscaleboxyfactor\!!hundred
% we can let sx/sy win (first check)
@@ -78,8 +79,9 @@
\installcommandhandler \??scale {scale} \??scale % we can have instances
\setupscale
- [\c!sx=1,
- \c!sy=1,
+ [\c!sx=\scaleparameter\c!s,
+ \c!sy=\scaleparameter\c!s,
+ \c!s=1,
%\c!scale=,
%\c!xscale=,
%\c!yscale=,
@@ -149,8 +151,6 @@
\global\let\finalscaleboxyscale \!!plusone
\xdef \finalscaleboxwidth {\the\d_grph_scale_wd}%
\xdef \finalscaleboxheight{\the\d_grph_scale_ht}%
- \global\let\finalscaleboxxfactor\!!hundred
- \global\let\finalscaleboxyfactor\!!hundred
%
\forgetall
\dontcomplain
@@ -208,7 +208,7 @@
\def\grph_scale_rounded#1%
{\expandafter\expandafter\expandafter\grph_scale_rounded_indeed
- \expandafter\WITHOUTPT\the\dimexpr#1\points*100+32768sp\relax.\relax}
+ \expandafter\WITHOUTPT\the\dimexpr#1\points*100+32768\scaledpoint\relax.\relax}
\def\grph_scale_rounded_indeed#1.#2\relax{#1}
@@ -219,9 +219,45 @@
\glet\finalscaleboxxscale\p_sx
\glet\finalscaleboxyscale\p_sy
\ifx\finalscaleboxxscale\empty\let\finalscaleboxxscale\!!plusone\fi
- \ifx\finalscaleboxyscale\empty\let\finalscaleboxyscale\!!plusone\fi
- \xdef\finalscaleboxxfactor{\grph_scale_rounded\finalscaleboxxscale}%
- \xdef\finalscaleboxyfactor{\grph_scale_rounded\finalscaleboxyscale}}
+ \ifx\finalscaleboxyscale\empty\let\finalscaleboxyscale\!!plusone\fi}
+
+\let\grph_scale_calculations_report\relax
+
+% \def\grph_scale_calculations_report
+% {\writestatus
+% {scaled}%
+% {wd:\finalscaleboxwidth,ht:\finalscaleboxheight
+% ,xscale:\finalscaleboxxscale,yscale:\finalscaleboxyscale}}
+
+% \def\grph_scale_calculations_yes
+% {\settrue\c_grph_scale_done
+% % initial values
+% \d_grph_scale_x_offset\zeropoint
+% \d_grph_scale_y_offset\zeropoint
+% \d_grph_scale_x_size \d_grph_scale_wd
+% \d_grph_scale_y_size \d_grph_scale_ht % alleen ht wordt geschaald!
+% % final values
+% \global\d_grph_scale_used_x_size \zeropoint % see note * (core-fig)
+% \global\d_grph_scale_used_y_size \zeropoint % see note * (core-fig)
+% \c_grph_scale_used_x_scale \plusone % see note * (core-fig)
+% \c_grph_scale_used_y_scale \plusone % see note * (core-fig)
+% \let\m_grph_scale_used_x_scale \!!plusone
+% \let\m_grph_scale_used_y_scale \!!plusone
+% % preparations
+% \setfalse\c_grph_scale_scaling_done
+% \grph_scale_check_parameters
+% % calculators
+% % beware, they operate in sequence, and calculate missing dimensions / messy
+% % grph_scale_by_nature % when? needed?
+% \ifconditional\c_grph_scale_scaling_done\else\grph_scale_by_factor \fi
+% \ifconditional\c_grph_scale_scaling_done\else\grph_scale_by_scale \fi
+% \ifconditional\c_grph_scale_scaling_done\else\grph_scale_by_dimension\fi
+% % used in actual scaling
+% \xdef\finalscaleboxwidth {\the\d_grph_scale_used_x_size}%
+% \xdef\finalscaleboxheight {\the\d_grph_scale_used_y_size}%
+% \xdef\finalscaleboxxscale {\luaexpr{\number\c_grph_scale_used_x_scale/1000}}%
+% \xdef\finalscaleboxyscale {\luaexpr{\number\c_grph_scale_used_y_scale/1000}}%
+% \grph_scale_calculations_report}
\def\grph_scale_calculations_yes
{\settrue\c_grph_scale_done
@@ -235,25 +271,23 @@
\global\d_grph_scale_used_y_size \zeropoint % see note * (core-fig)
\c_grph_scale_used_x_scale \plusone % see note * (core-fig)
\c_grph_scale_used_y_scale \plusone % see note * (core-fig)
+ \let\m_grph_scale_used_x_scale \!!plusone
+ \let\m_grph_scale_used_y_scale \!!plusone
% preparations
\setfalse\c_grph_scale_scaling_done
\grph_scale_check_parameters
% calculators
% beware, they operate in sequence, and calculate missing dimensions / messy
- %grph_scale_by_nature % when? needed?
+ % grph_scale_by_nature % when? needed?
\ifconditional\c_grph_scale_scaling_done\else\grph_scale_by_factor \fi
\ifconditional\c_grph_scale_scaling_done\else\grph_scale_by_scale \fi
\ifconditional\c_grph_scale_scaling_done\else\grph_scale_by_dimension\fi
- % finalizers / to be done (no longer needed this way, clean up)
- \grph_scale_convert_large_scale\d_grph_scale_h_size\figx\c_grph_scale_used_x_scale\m_grph_scale_temp_x
- \grph_scale_convert_large_scale\d_grph_scale_v_size\figy\c_grph_scale_used_y_scale\m_grph_scale_temp_y
% used in actual scaling
\xdef\finalscaleboxwidth {\the\d_grph_scale_used_x_size}%
\xdef\finalscaleboxheight {\the\d_grph_scale_used_y_size}%
- \xdef\finalscaleboxxfactor{\the\c_grph_scale_used_x_scale}%
- \xdef\finalscaleboxyfactor{\the\c_grph_scale_used_y_scale}%
- \xdef\finalscaleboxxscale {\withoutpt\the\dimexpr\m_grph_scale_temp_x\points/\plushundred\relax}%
- \xdef\finalscaleboxyscale {\withoutpt\the\dimexpr\m_grph_scale_temp_y\points/\plushundred\relax}}
+ \glet\finalscaleboxxscale \m_grph_scale_used_x_scale
+ \glet\finalscaleboxyscale \m_grph_scale_used_y_scale
+ \grph_scale_calculations_report}
\setvalue{\??scalegrid\v!yes }{\getnoflines \d_grph_scale_used_y_size\edef\p_height{\the\noflines\lineheight}}
\setvalue{\??scalegrid\v!height }{\getrawnoflines\d_grph_scale_used_y_size\edef\p_height{\the\dimexpr\noflines\lineheight+\strutdepth}}
@@ -292,27 +326,59 @@
\grph_scale_by_factor_c
\grph_scale_by_factor_d}}}
+% \def\grph_scale_by_factor_a
+% {\grph_scale_apply_size
+% \ifdim\d_grph_scale_x_size>\d_grph_scale_y_size
+% \grph_scale_calculate_norm \d_grph_scale_used_x_size\p_factor\p_maxwidth\hsize\d_grph_scale_h_size
+% \grph_scale_calculate_scales\d_grph_scale_used_x_size\d_grph_scale_x_size\d_grph_scale_used_y_size\d_grph_scale_y_size
+% \else
+% \grph_scale_calculate_norm \d_grph_scale_used_y_size\p_factor\p_maxheight\d_grph_scale_outer_v_size\d_grph_scale_v_size
+% \grph_scale_calculate_scales\d_grph_scale_used_y_size\d_grph_scale_y_size\d_grph_scale_used_x_size\d_grph_scale_x_size
+% \fi
+% \grph_scale_by_factor_indeed}
+%
+% \def\grph_scale_by_factor_b
+% {\grph_scale_apply_size
+% \grph_scale_calculate_norm \d_grph_scale_used_y_size\p_hfactor\p_maxheight\d_grph_scale_outer_v_size\d_grph_scale_v_size
+% \grph_scale_calculate_scales\d_grph_scale_used_y_size\d_grph_scale_y_size\d_grph_scale_used_x_size\d_grph_scale_x_size
+% \grph_scale_by_factor_indeed}
+%
+% \def\grph_scale_by_factor_c
+% {\grph_scale_apply_size
+% \grph_scale_calculate_norm \d_grph_scale_used_x_size\p_wfactor\p_maxwidth\hsize\d_grph_scale_h_size
+% \grph_scale_calculate_scales\d_grph_scale_used_x_size\d_grph_scale_x_size\d_grph_scale_used_y_size\d_grph_scale_y_size
+% \grph_scale_by_factor_indeed}
+%
+% \def\grph_scale_by_factor_d
+% {\grph_scale_calculate_norm\d_grph_scale_used_y_size\p_factor \p_height \textheight\d_grph_scale_v_size
+% \grph_scale_calculate_norm\d_grph_scale_used_y_size\p_hfactor\p_height \textheight\d_grph_scale_v_size
+% \grph_scale_calculate_norm\d_grph_scale_used_x_size\p_wfactor\p_width \hsize \hsize}
+
\def\grph_scale_by_factor_a
{\grph_scale_apply_size
\ifdim\d_grph_scale_x_size>\d_grph_scale_y_size
\grph_scale_calculate_norm \d_grph_scale_used_x_size\p_factor\p_maxwidth\hsize\d_grph_scale_h_size
- \grph_scale_calculate_scales\d_grph_scale_used_x_size\d_grph_scale_x_size\d_grph_scale_used_y_size\d_grph_scale_y_size
+ \grph_scale_calculate_scales\d_grph_scale_used_x_size\d_grph_scale_x_size
+ \d_grph_scale_used_y_size\m_grph_scale_used_x_scale\d_grph_scale_y_size
\else
\grph_scale_calculate_norm \d_grph_scale_used_y_size\p_factor\p_maxheight\d_grph_scale_outer_v_size\d_grph_scale_v_size
- \grph_scale_calculate_scales\d_grph_scale_used_y_size\d_grph_scale_y_size\d_grph_scale_used_x_size\d_grph_scale_x_size
+ \grph_scale_calculate_scales\d_grph_scale_used_y_size\d_grph_scale_y_size
+ \d_grph_scale_used_x_size\m_grph_scale_used_x_scale\d_grph_scale_x_size
\fi
\grph_scale_by_factor_indeed}
\def\grph_scale_by_factor_b
{\grph_scale_apply_size
\grph_scale_calculate_norm \d_grph_scale_used_y_size\p_hfactor\p_maxheight\d_grph_scale_outer_v_size\d_grph_scale_v_size
- \grph_scale_calculate_scales\d_grph_scale_used_y_size\d_grph_scale_y_size\d_grph_scale_used_x_size\d_grph_scale_x_size
+ \grph_scale_calculate_scales\d_grph_scale_used_y_size\d_grph_scale_y_size
+ \d_grph_scale_used_x_size\m_grph_scale_used_x_scale\d_grph_scale_x_size
\grph_scale_by_factor_indeed}
\def\grph_scale_by_factor_c
{\grph_scale_apply_size
\grph_scale_calculate_norm \d_grph_scale_used_x_size\p_wfactor\p_maxwidth\hsize\d_grph_scale_h_size
- \grph_scale_calculate_scales\d_grph_scale_used_x_size\d_grph_scale_x_size\d_grph_scale_used_y_size\d_grph_scale_y_size
+ \grph_scale_calculate_scales\d_grph_scale_used_x_size\d_grph_scale_x_size
+ \d_grph_scale_used_y_size\m_grph_scale_used_x_scale\d_grph_scale_y_size
\grph_scale_by_factor_indeed}
\def\grph_scale_by_factor_d
@@ -333,13 +399,38 @@
\fi
\grph_scale_by_dimension}
+% \def\grph_scale_by_scale
+% {\edef\m_grph_scale_temp{\p_scale\p_xscale\p_yscale}%
+% \ifx\m_grph_scale_temp\empty \else
+% \grph_scale_apply_scale\d_grph_scale_used_x_size\d_grph_scale_x_size\c_grph_scale_used_x_scale\p_xscale
+% \grph_scale_apply_scale\d_grph_scale_used_y_size\d_grph_scale_y_size\c_grph_scale_used_y_scale\p_yscale
+% \global\d_grph_scale_used_x_size\zeropoint
+% \global\d_grph_scale_used_y_size\zeropoint
+% \ifx\p_maxwidth\empty
+% \ifx\p_maxheight\empty
+% \else
+% \ifdim\d_grph_scale_y_size>\p_maxheight\relax
+% \global\d_grph_scale_used_y_size\p_maxheight
+% \fi
+% \fi
+% \else
+% \ifdim\d_grph_scale_x_size>\p_maxwidth\relax
+% \global\d_grph_scale_used_x_size\p_maxwidth
+% \fi
+% \fi
+% \fi}
+
\def\grph_scale_by_scale
{\edef\m_grph_scale_temp{\p_scale\p_xscale\p_yscale}%
\ifx\m_grph_scale_temp\empty \else
- \grph_scale_apply_scale\d_grph_scale_used_x_size\d_grph_scale_x_size\c_grph_scale_used_x_scale\p_xscale
- \grph_scale_apply_scale\d_grph_scale_used_y_size\d_grph_scale_y_size\c_grph_scale_used_y_scale\p_yscale
+ \grph_scale_apply_scale\m_grph_scale_used_x_scale\p_xscale
+ \grph_scale_apply_scale\m_grph_scale_used_y_scale\p_yscale
+ % \global\d_grph_scale_used_x_size\m_grph_scale_used_x_scale\d_grph_scale_x_size\relax % no global needed here
+ % \global\d_grph_scale_used_y_size\m_grph_scale_used_y_scale\d_grph_scale_y_size\relax % no global needed here
+ % wrong: we need to recalculate the scale
\global\d_grph_scale_used_x_size\zeropoint
\global\d_grph_scale_used_y_size\zeropoint
+ %
\ifx\p_maxwidth\empty
\ifx\p_maxheight\empty
\else
@@ -369,33 +460,71 @@
\fi
\fi}
+% \def\grph_scale_by_dimension_a
+% {\grph_scale_by_dimension_indeed
+% {\grph_scale_calculate_scale\d_grph_scale_used_y_size\d_grph_scale_y_size\c_grph_scale_used_y_scale
+% \grph_scale_calculate_scale\d_grph_scale_used_x_size\d_grph_scale_x_size\c_grph_scale_used_x_scale}%
+% {\grph_scale_calculate_scale\d_grph_scale_used_y_size\d_grph_scale_y_size\c_grph_scale_used_y_scale
+% \grph_scale_calculate_scale\d_grph_scale_used_x_size\d_grph_scale_x_size\c_grph_scale_used_x_scale}%
+% {\grph_scale_calculate_scale\d_grph_scale_used_y_size\d_grph_scale_y_size\c_grph_scale_used_y_scale
+% \grph_scale_calculate_scale\d_grph_scale_used_x_size\d_grph_scale_x_size\c_grph_scale_used_x_scale}}
+%
+% \def\grph_scale_by_dimension_b
+% {\grph_scale_by_dimension_indeed
+% {\grph_scale_calculate_scales\d_grph_scale_used_x_size\d_grph_scale_x_size\d_grph_scale_used_y_size\d_grph_scale_y_size}%
+% {\grph_scale_calculate_scales\d_grph_scale_used_x_size\d_grph_scale_x_size\d_grph_scale_used_y_size\d_grph_scale_y_size}%
+% {\grph_scale_calculate_scales\d_grph_scale_used_x_size\d_grph_scale_x_size\d_grph_scale_used_y_size\d_grph_scale_y_size}}
+%
+% \def\grph_scale_by_dimension_c
+% {\grph_scale_by_dimension_indeed
+% {\grph_scale_calculate_scales\d_grph_scale_used_y_size\d_grph_scale_y_size\d_grph_scale_used_x_size\d_grph_scale_x_size}%
+% {\grph_scale_calculate_scales\d_grph_scale_used_y_size\d_grph_scale_y_size\d_grph_scale_used_x_size\d_grph_scale_x_size}%
+% {\grph_scale_calculate_scales\d_grph_scale_used_y_size\d_grph_scale_y_size\d_grph_scale_used_x_size\d_grph_scale_x_size}}
+%
+% \def\grph_scale_by_dimension_d
+% {\grph_scale_by_dimension_indeed
+% {\grph_scale_apply_scale\d_grph_scale_used_x_size\d_grph_scale_x_size\c_grph_scale_used_x_scale\p_xscale
+% \grph_scale_apply_scale\d_grph_scale_used_y_size\d_grph_scale_y_size\c_grph_scale_used_y_scale\p_yscale}%
+% {\grph_scale_calculate_scales\d_grph_scale_used_x_size\d_grph_scale_x_size\d_grph_scale_used_y_size\d_grph_scale_y_size}%
+% {\grph_scale_calculate_scales\d_grph_scale_used_y_size\d_grph_scale_y_size\d_grph_scale_used_x_size\d_grph_scale_x_size}}
+
\def\grph_scale_by_dimension_a
{\grph_scale_by_dimension_indeed
- {\grph_scale_calculate_scale\d_grph_scale_used_y_size\d_grph_scale_y_size\c_grph_scale_used_y_scale
- \grph_scale_calculate_scale\d_grph_scale_used_x_size\d_grph_scale_x_size\c_grph_scale_used_x_scale}%
- {\grph_scale_calculate_scale\d_grph_scale_used_y_size\d_grph_scale_y_size\c_grph_scale_used_y_scale
- \grph_scale_calculate_scale\d_grph_scale_used_x_size\d_grph_scale_x_size\c_grph_scale_used_x_scale}%
- {\grph_scale_calculate_scale\d_grph_scale_used_y_size\d_grph_scale_y_size\c_grph_scale_used_y_scale
- \grph_scale_calculate_scale\d_grph_scale_used_x_size\d_grph_scale_x_size\c_grph_scale_used_x_scale}}
+ {\grph_scale_calculate_scale\m_grph_scale_used_y_scale\d_grph_scale_used_y_size\d_grph_scale_y_size
+ \grph_scale_calculate_scale\m_grph_scale_used_x_scale\d_grph_scale_used_x_size\d_grph_scale_x_size}%
+ {\grph_scale_calculate_scale\m_grph_scale_used_y_scale\d_grph_scale_used_y_size\d_grph_scale_y_size
+ \grph_scale_calculate_scale\m_grph_scale_used_x_scale\d_grph_scale_used_x_size\d_grph_scale_x_size}%
+ {\grph_scale_calculate_scale\m_grph_scale_used_y_scale\d_grph_scale_used_y_size\d_grph_scale_y_size
+ \grph_scale_calculate_scale\m_grph_scale_used_x_scale\d_grph_scale_used_x_size\d_grph_scale_x_size}}
\def\grph_scale_by_dimension_b
{\grph_scale_by_dimension_indeed
- {\grph_scale_calculate_scales\d_grph_scale_used_x_size\d_grph_scale_x_size\d_grph_scale_used_y_size\d_grph_scale_y_size}%
- {\grph_scale_calculate_scales\d_grph_scale_used_x_size\d_grph_scale_x_size\d_grph_scale_used_y_size\d_grph_scale_y_size}%
- {\grph_scale_calculate_scales\d_grph_scale_used_x_size\d_grph_scale_x_size\d_grph_scale_used_y_size\d_grph_scale_y_size}}
+ {\grph_scale_calculate_scales\d_grph_scale_used_x_size\d_grph_scale_x_size
+ \d_grph_scale_used_y_size\m_grph_scale_used_x_scale\d_grph_scale_y_size}%
+ {\grph_scale_calculate_scales\d_grph_scale_used_x_size\d_grph_scale_x_size
+ \d_grph_scale_used_y_size\m_grph_scale_used_x_scale\d_grph_scale_y_size}%
+ {\grph_scale_calculate_scales\d_grph_scale_used_x_size\d_grph_scale_x_size
+ \d_grph_scale_used_y_size\m_grph_scale_used_x_scale\d_grph_scale_y_size}}
\def\grph_scale_by_dimension_c
- {\grph_scale_by_dimension_indeed
- {\grph_scale_calculate_scales\d_grph_scale_used_y_size\d_grph_scale_y_size\d_grph_scale_used_x_size\d_grph_scale_x_size}%
- {\grph_scale_calculate_scales\d_grph_scale_used_y_size\d_grph_scale_y_size\d_grph_scale_used_x_size\d_grph_scale_x_size}%
- {\grph_scale_calculate_scales\d_grph_scale_used_y_size\d_grph_scale_y_size\d_grph_scale_used_x_size\d_grph_scale_x_size}}
+ {\grph_scale_by_dimension_indeed % weird .. three same cases
+ {\grph_scale_calculate_scales\d_grph_scale_used_y_size\d_grph_scale_y_size
+ \d_grph_scale_used_x_size\m_grph_scale_used_x_scale\d_grph_scale_x_size}%
+ {\grph_scale_calculate_scales\d_grph_scale_used_y_size\d_grph_scale_y_size
+ \d_grph_scale_used_x_size\m_grph_scale_used_x_scale\d_grph_scale_x_size}%
+ {\grph_scale_calculate_scales\d_grph_scale_used_y_size\d_grph_scale_y_size
+ \d_grph_scale_used_x_size\m_grph_scale_used_x_scale\d_grph_scale_x_size}}
\def\grph_scale_by_dimension_d
{\grph_scale_by_dimension_indeed
- {\grph_scale_apply_scale\d_grph_scale_used_x_size\d_grph_scale_x_size\c_grph_scale_used_x_scale\p_xscale
- \grph_scale_apply_scale\d_grph_scale_used_y_size\d_grph_scale_y_size\c_grph_scale_used_y_scale\p_yscale}%
- {\grph_scale_calculate_scales\d_grph_scale_used_x_size\d_grph_scale_x_size\d_grph_scale_used_y_size\d_grph_scale_y_size}%
- {\grph_scale_calculate_scales\d_grph_scale_used_y_size\d_grph_scale_y_size\d_grph_scale_used_x_size\d_grph_scale_x_size}}
+ {\grph_scale_apply_scale\m_grph_scale_used_x_scale\p_xscale
+ \grph_scale_apply_scale\m_grph_scale_used_y_scale\p_yscale
+ \global\d_grph_scale_used_x_size\m_grph_scale_used_x_scale\d_grph_scale_x_size
+ \global\d_grph_scale_used_y_size\m_grph_scale_used_y_scale\d_grph_scale_y_size}%
+ {\grph_scale_calculate_scales\d_grph_scale_used_x_size\d_grph_scale_x_size
+ \d_grph_scale_used_y_size\m_grph_scale_used_x_scale\d_grph_scale_y_size}%
+ {\grph_scale_calculate_scales\d_grph_scale_used_y_size\d_grph_scale_y_size
+ \d_grph_scale_used_x_size\m_grph_scale_used_x_scale\d_grph_scale_x_size}}
\def\grph_scale_by_dimension_indeed#1#2#3%
{#1\relax
@@ -417,34 +546,63 @@
\setvalue{\??scalenorm\v!max }#1#2#3#4#5{\global#1#4}
\setvalue{\??scalenorm\v!fit }#1#2#3#4#5{\global#1#5}
-\setvalue{\??scalenorm\v!broad }#1#2#3#4#5{\global#1\dimexpr#5-4\@@exbodyfont\relax}
-\setvalue{\??scalenorm\s!unknown}#1#2#3#4#5{\global#1\dimexpr#2\dimexpr\@@exbodyfont/10\relax\relax} % brr ex
+\setvalue{\??scalenorm\v!broad }#1#2#3#4#5{\global#1\dimexpr#5-4\externalfigureparameter\c!bodyfont\relax}
+\setvalue{\??scalenorm\s!unknown}#1#2#3#4#5{\global#1\dimexpr#2\dimexpr\externalfigureparameter\c!bodyfont/10\relax\relax} % brr ex
\setvalue{\??scalenorm\v!auto }#1#2#3#4#5{\ifx#3\empty\else\global#1#3\fi}
\setvalue{\??scalenorm\empty }#1#2#3#4#5{\ifx#3\empty\else\global#1#3\fi}
\setvalue{\??scalenorm\s!default}#1#2#3#4#5{\ifx#3\empty\else\global#1#3\fi}
-\def\grph_scale_calculate_scales#1#2#3#4%
- {\scratchdimen\dimexpr#1/\dimexpr#2/\plusthousand\relax\relax
- \c_grph_scale_used_x_scale\scratchdimen
- \c_grph_scale_used_y_scale\scratchdimen
- #3\dimexpr\c_grph_scale_used_x_scale\dimexpr#4/\plusthousand\relax\relax}
+% \def\grph_scale_calculate_scales#1#2#3#4%
+% {\scratchdimen\dimexpr#1/\dimexpr#2/\plusthousand\relax\relax
+% \c_grph_scale_used_x_scale\scratchdimen
+% \c_grph_scale_used_y_scale\scratchdimen
+% %\writestatus{!!!!}{\the#3/\luaexpr{\number\dimexpr#1\relax/\number\dimexpr#2/1000}}%
+% #3\dimexpr\c_grph_scale_used_x_scale\dimexpr#4/\plusthousand\relax\relax}
+%
+% \def\grph_scale_calculate_scale#1#2#3%
+% {#3\dimexpr#1/\dimexpr#2/\plusthousand\relax\relax}
+%
+% \def\grph_scale_apply_scale#1#2#3#4% #4 = parameter / scale can be empty
+% {\ifcase0#4\relax
+% \ifcase0\p_scale\relax
+% #3=\plusthousand
+% \else
+% #3=\p_scale
+% \fi
+% \else
+% #3=#4%
+% \fi
+% \relax % important ! still ?
+% \global#1\ifnum#3=\plusthousand#2\else\dimexpr#3\dimexpr#2/\plusthousand\relax\relax\fi
+% \relax}
+
+\def\grph_scale_calculate_scales#1#2%
+ {\edef\m_grph_scale_used_x_scale{\luaexpr{\number#1/\number#2}}%
+ \let\m_grph_scale_used_y_scale\m_grph_scale_used_x_scale}
+
+% we could inline this:
+%
+% \grph_scale_calculate_scale\m_grph_scale_used_x_scale#1#2
+% \let\m_grph_scale_used_y_scale\m_grph_scale_used_x_scale
\def\grph_scale_calculate_scale#1#2#3%
- {#3\dimexpr#1/\dimexpr#2/\plusthousand\relax\relax}
-
-\def\grph_scale_apply_scale#1#2#3#4% #4 = parameter / scale can be empty
- {\ifcase0#4\relax
- \ifcase0\p_scale\relax
- #3=\plusthousand
- \else
- #3=\p_scale
- \fi
- \else
- #3=#4%
- \fi
- \relax % important ! still ?
- \global#1\ifnum#3=\plusthousand#2\else\dimexpr#3\dimexpr#2/\plusthousand\relax\relax\fi
- \relax}
+ {\edef#1{\luaexpr{\number#2/\number#3}}}
+
+\def\grph_scale_apply_scale#1#2% #1 = parameter / scale can be empty
+ {\edef#1{\luaexpr
+ {\number
+ \ifx#2\empty
+ \ifx \p_scale \empty \plusthousand \else
+ \ifnum\p_scale=\zerocount \plusthousand \else
+ \p_scale \fi\fi
+ \else\ifnum#2=\zerocount
+ \ifx \p_scale \empty \plusthousand \else
+ \ifnum\p_scale=\zerocount \plusthousand \else
+ \p_scale \fi\fi
+ \else
+ #2%
+ \fi\fi
+ /1000}}}
\def\grph_scale_apply_size
{\ifx\p_maxheight\empty
@@ -484,30 +642,34 @@
\d_grph_scale_h_size\p_width
\fi}
-\def\grph_scale_convert_large_scale#1#2#3#4%
- {\scratchdimen#1\relax
- \ifnum#3=\plusthousand
- % == scale 1
- \else
- % better 1000 100 10 ranges, evt round 2sp
- \divide\scratchdimen \plusthousand
- \multiply\scratchdimen #3\relax
- \fi
- \scratchdimen-\scratchdimen % beter hier - dan in driver
- \edef#2{\the\scratchdimen}%
- \scratchcounter#3\relax
- \ifnum\scratchcounter>\plustenthousand
- \divide\scratchcounter\plusten
- \scratchdimen\the\scratchcounter\points
- \else
- \scratchdimen\the\scratchcounter\points
- \divide\scratchdimen\plusten
- \fi
- \edef#4{\withoutpt\the\scratchdimen}}
+%\def\grph_scale_convert_large_scale#1#2#3#4%
+% {\scratchdimen#1\relax
+% \ifnum#3=\plusthousand
+% % == scale 1
+% \else
+% % better 1000 100 10 ranges, evt round 2sp
+% \divide\scratchdimen \plusthousand
+% \multiply\scratchdimen #3\relax
+% \fi
+% \scratchdimen-\scratchdimen % beter hier - dan in driver
+% \edef#2{\the\scratchdimen}%
+% \scratchcounter#3\relax
+% \ifnum\scratchcounter>\plustenthousand
+% \divide\scratchcounter\plusten
+% \scratchdimen\the\scratchcounter\points % \the ?
+% \else
+% \scratchdimen\the\scratchcounter\points % \the ?
+% \divide\scratchdimen\plusten
+% \fi
+% \edef#4{\withoutpt\the\scratchdimen}}
+%
+% \def\grph_scale_convert_large_scale#1#2#3#4% less overflow
+% {\edef#2{\ifnum#3=\plusthousand-\the\dimexpr#1\else\luaexpr{-\number#3*\number\dimexpr#1/1000}sp\fi}%
+% \edef#4{\luaexpr{\number#3/10}}}
% \startcombination
-% {\externalfigure[cow.pdf] [frame=on,height=3cm,equalwidth=6cm]} {}
-% {\externalfigure[mill.png][frame=on,height=3cm,equalwidth=6cm]} {}
+% {\externalfigure[cow.pdf] [frame=on,height=3cm,equalwidth=6cm]} {a cow}
+% {\externalfigure[mill.png][frame=on,height=3cm,equalwidth=6cm]} {a mill}
% \stopcombination
\def\grph_scale_position
@@ -535,7 +697,7 @@
{\hbox}
\def\grph_scale_fast_yes#1%
- {\edef\finalscaleboxxscale{\withoutpt\the\dimexpr#1pt/1000\relax}% brrr
+ {\edef\finalscaleboxxscale{\withoutpt\the\dimexpr#1\onepoint/1000\relax}% brrr
\let\finalscaleboxyscale\finalscaleboxxscale
\dowithnextboxcs\grph_scale_fast_finish\hbox}
@@ -544,6 +706,19 @@
\box\nextbox
\endgroup}
+\unexpanded\def\fastsxsy#1#2%
+ {\bgroup
+ \edef\p_sx{#1}%
+ \edef\p_sy{#2}%
+ \dowithnextboxcs\grph_scale_fast_sx_xy_finish\hbox}
+
+\def\grph_scale_fast_sx_xy_finish
+ {\grph_scale_check_sx_sy
+ \grph_scale_calculations_nop
+ \grph_scale_apply
+ \box\nextbox
+ \egroup}
+
%D \macros
%D {clip, setupclipping}
%D
@@ -575,8 +750,9 @@
%D
%D \showsetup{setupclipping}
-\unexpanded\def\setupclipping
- {\dodoubleargument\getparameters[\??cp]}
+\installcorenamespace{clipping}
+
+\installdirectcommandhandler \??clipping {clipping}
\unexpanded\def\clip
{\dosingleempty\grph_clip}
@@ -584,62 +760,106 @@
\def\grph_clip[#1]% nb top->bottom left->right
{\bgroup
\iffirstargument
- \getparameters[\??cp][#1]%
+ \setupcurrentclipping[#1]%
\fi
\dowithnextboxcs\grph_clip_finish\hbox}
\def\grph_clip_finish
- {\doifelse\@@cpstate\v!start
+ {\doifelse{\clippingparameter\c!state}\v!start
\grph_clip_yes_finish
\grph_clip_nop_finish}
+% \def\grph_clip_yes_finish
+% {\ifdim\@@cpwidth>\zeropoint
+% \!!dimena\@@cpwidth
+% \!!dimenc\@@cphoffset
+% \else
+% \!!dimena\wd\nextbox
+% \divide\!!dimena \@@cpnx
+% \!!dimenc\@@cpx\!!dimena
+% \advance\!!dimenc -\!!dimena
+% \!!dimena\@@cpsx\!!dimena
+% \fi
+% \relax % sure
+% \ifdim\@@cpheight>\zeropoint
+% \!!dimenb\@@cpheight
+% \!!dimend\ht\nextbox
+% \advance\!!dimend -\@@cpvoffset
+% \advance\!!dimend -\!!dimenb
+% \else
+% \!!dimenb\ht\nextbox
+% \divide\!!dimenb \@@cpny
+% \!!dimend-\@@cpy\!!dimenb
+% \advance\!!dimend -\@@cpsy\!!dimenb
+% \advance\!!dimend \!!dimenb
+% \!!dimenb\@@cpsy\!!dimenb
+% \advance\!!dimend \ht\nextbox % dimend !
+% \fi
+% \setbox\nextbox\hbox % old
+% {\advance\!!dimenc -\@@cpleftoffset % new !
+% \advance\!!dimend -\@@cpbottomoffset % new ! % - added
+% \hskip-\!!dimenc\lower\!!dimend\box\nextbox}% old
+% \wd\nextbox\zeropoint
+% \ht\nextbox\zeropoint
+% \dp\nextbox\zeropoint
+% \setbox\nextbox\hbox
+% {\advance\!!dimena \@@cpleftoffset % new !
+% \advance\!!dimena \@@cprightoffset % new !
+% \advance\!!dimenb \@@cpbottomoffset % new !
+% \advance\!!dimenb \@@cptopoffset % new !
+% \dostartclipping\@@cpmp\!!dimena\!!dimenb % old
+% \box\nextbox
+% \dostopclipping}%
+% \setbox\nextbox\hbox % new !
+% {\!!dimena-\@@cpleftoffset % new !
+% \!!dimenb \@@cpbottomoffset % new ! % - removed
+% \hskip\!!dimena\lower\!!dimenb\box\nextbox}% new !
+% \wd\nextbox\!!dimena
+% \ht\nextbox\!!dimenb
+% \dp\nextbox\zeropoint
+% \box\nextbox
+% \egroup}
+
\def\grph_clip_yes_finish
- {\ifdim\@@cpwidth>\zeropoint
- \!!dimena\@@cpwidth
- \!!dimenc\@@cphoffset
+ {\ifdim\clippingparameter\c!width>\zeropoint
+ \scratchwidth \clippingparameter\c!width
+ \scratchxoffset\clippingparameter\c!hoffset
\else
- \!!dimena\wd\nextbox
- \divide\!!dimena \@@cpnx
- \!!dimenc\@@cpx\!!dimena
- \advance\!!dimenc -\!!dimena
- \!!dimena\@@cpsx\!!dimena
+ \scratchwidth\dimexpr\wd\nextbox/\clippingparameter\c!nx\relax
+ \scratchxoffset\dimexpr\clippingparameter\c!x\scratchwidth-\scratchwidth\relax
+ \scratchwidth\clippingparameter\c!sx\scratchwidth
\fi
\relax % sure
- \ifdim\@@cpheight>\zeropoint
- \!!dimenb\@@cpheight
- \!!dimend\ht\nextbox
- \advance\!!dimend -\@@cpvoffset
- \advance\!!dimend -\!!dimenb
+ \ifdim\clippingparameter\c!height>\zeropoint
+ \scratchheight\clippingparameter\c!height
+ \scratchyoffset\dimexpr\ht\nextbox-\clippingparameter\c!voffset-\scratchheight\relax
\else
- \!!dimenb\ht\nextbox
- \divide\!!dimenb \@@cpny
- \!!dimend-\@@cpy\!!dimenb
- \advance\!!dimend -\@@cpsy\!!dimenb
- \advance\!!dimend \!!dimenb
- \!!dimenb\@@cpsy\!!dimenb
- \advance\!!dimend \ht\nextbox % dimend !
+ \scratchheight\dimexpr\ht\nextbox/\clippingparameter\c!ny\relax
+ \scratchyoffset\dimexpr-\clippingparameter\c!y\scratchheight-\clippingparameter\c!sy\scratchheight+\scratchheight\relax
+ \scratchheight\clippingparameter\c!sy\scratchheight
+ \advance\scratchyoffset \ht\nextbox
\fi
- \setbox\nextbox\hbox % old
- {\advance\!!dimenc -\@@cpleftoffset % new !
- \advance\!!dimend -\@@cpbottomoffset % new ! % - added
- \hskip-\!!dimenc\lower\!!dimend\box\nextbox}% old
+ \setbox\nextbox\hbox
+ {\advance\scratchxoffset -\clippingparameter\c!leftoffset \relax
+ \advance\scratchyoffset -\clippingparameter\c!bottomoffset\relax
+ \hskip-\scratchxoffset
+ \lower\scratchyoffset
+ \box\nextbox}%
\wd\nextbox\zeropoint
\ht\nextbox\zeropoint
\dp\nextbox\zeropoint
\setbox\nextbox\hbox
- {\advance\!!dimena \@@cpleftoffset % new !
- \advance\!!dimena \@@cprightoffset % new !
- \advance\!!dimenb \@@cpbottomoffset % new !
- \advance\!!dimenb \@@cptopoffset % new !
- \dostartclipping\@@cpmp\!!dimena\!!dimenb % old
+ {\advance\scratchwidth \dimexpr\clippingparameter\c!leftoffset +\clippingparameter\c!rightoffset\relax
+ \advance\scratchheight\dimexpr\clippingparameter\c!bottomoffset+\clippingparameter\c!topoffset \relax
+ \dostartclipping{\clippingparameter\c!mp}\scratchwidth\scratchheight
\box\nextbox
\dostopclipping}%
- \setbox\nextbox\hbox % new !
- {\!!dimena-\@@cpleftoffset % new !
- \!!dimenb \@@cpbottomoffset % new ! % - removed
- \hskip\!!dimena\lower\!!dimenb\box\nextbox}% new !
- \wd\nextbox\!!dimena
- \ht\nextbox\!!dimenb
+ \setbox\nextbox\hbox
+ {\hskip-\clippingparameter\c!leftoffset
+ \lower \clippingparameter\c!bottomoffset
+ \box\nextbox}%
+ \wd\nextbox\scratchwidth
+ \ht\nextbox\scratchheight
\dp\nextbox\zeropoint
\box\nextbox
\egroup}
@@ -650,18 +870,18 @@
\setupclipping
[\c!state=\v!start,
- \c!n=1, % was 2
- \c!nx=\@@cpn,\c!x=1,\c!sx=1,
- \c!ny=\@@cpn,\c!y=1,\c!sy=1,
- \c!width=\!!zeropoint,
- \c!height=\!!zeropoint,
- \c!hoffset=\!!zeropoint,
- \c!voffset=\!!zeropoint,
+ \c!n=\plusone, % was \plustwo
+ \c!nx=\clippingparameter\c!n,\c!x=\plusone,\c!sx=\plusone,
+ \c!ny=\clippingparameter\c!n,\c!y=\plusone,\c!sy=\plusone,
+ \c!width=\zeropoint,
+ \c!height=\zeropoint,
+ \c!hoffset=\zeropoint,
+ \c!voffset=\zeropoint,
\c!offset=\zeropoint,
- \c!leftoffset=\@@cpoffset, % \zeropoint,
- \c!rightoffset=\@@cpoffset, % \zeropoint,
- \c!topoffset=\@@cpoffset, % \zeropoint,
- \c!bottomoffset=\@@cpoffset,% \zeropoint,
+ \c!leftoffset=\clippingparameter\c!offset,
+ \c!rightoffset=\clippingparameter\c!offset,
+ \c!topoffset=\clippingparameter\c!offset,
+ \c!bottomoffset=\clippingparameter\c!offset,
\c!mp=]
%D \startbuffer
diff --git a/Master/texmf-dist/tex/context/base/grph-u3d.lua b/Master/texmf-dist/tex/context/base/grph-u3d.lua
index f2baac2f316..6961c55037b 100644
--- a/Master/texmf-dist/tex/context/base/grph-u3d.lua
+++ b/Master/texmf-dist/tex/context/base/grph-u3d.lua
@@ -8,17 +8,17 @@ if not modules then modules = { } end modules ['grph-u3d'] = {
-- see lpdf-u3d.lua for comment
+-- maybe: backends.codeinjections.insertu3d
+
local trace_inclusion = false trackers.register("figures.inclusion", function(v) trace_inclusion = v end)
local report_u3d = logs.reporter("graphics","u3d")
-local figures = figures
-
+local figures = figures
+local context = context
local nodeinjections = backends.nodeinjections
local todimen = string.todimen
--- maybe todo: backends.codeinjections.insertu3d
-
function figures.checkers.u3d(data)
local dr, du, ds = data.request, data.used, data.status
local width = todimen(dr.width or figures.defaultwidth)
@@ -27,7 +27,7 @@ function figures.checkers.u3d(data)
dr.width, dr.height = width, height
du.width, du.height, du.foundname = width, height, foundname
if trace_inclusion then
- report_u3d("including u3d '%s': width %s, height %s",foundname,width,height)
+ report_u3d("including u3d %a, width %p, height %p",foundname,width,height)
end
context.startfoundexternalfigure(width .. "sp",height .. "sp")
context(function()
diff --git a/Master/texmf-dist/tex/context/base/grph-wnd.lua b/Master/texmf-dist/tex/context/base/grph-wnd.lua
index 0ea3f8c43d9..ebb9b11691c 100644
--- a/Master/texmf-dist/tex/context/base/grph-wnd.lua
+++ b/Master/texmf-dist/tex/context/base/grph-wnd.lua
@@ -20,7 +20,7 @@ local function togray(oldname,newname)
if lfs.isfile(oldname) then
require("gmwand")
if trace_conversion then
- report_wand("converting '%s' to '%s' using gmwand",oldname,newname)
+ report_wand("converting %a to %a using gmwand",oldname,newname)
end
gmwand.InitializeMagick("./") -- What does this path do?
local wand = gmwand.NewMagickWand()
@@ -29,7 +29,7 @@ local function togray(oldname,newname)
gmwand.MagickWriteImages(wand,newname,1)
gmwand.DestroyMagickWand(wand)
else
- report_wand("unable to convert '%s' to '%s' using gmwand",oldname,newname)
+ report_wand("unable to convert %a to %a using gmwand",oldname,newname)
end
end
@@ -39,7 +39,7 @@ for i=1,#formats do
local oldformat = formats[i]
local newformat = "gray." .. oldformat
if trace_conversion then
- report_wand("installing converter: %s -> %s",oldformat,newformat)
+ report_wand("installing converter for %a to %a",oldformat,newformat)
end
converters[oldformat] = converters[oldformat] or { }
converters[oldformat][newformat] = togray
diff --git a/Master/texmf-dist/tex/context/base/java-imp-fld.mkiv b/Master/texmf-dist/tex/context/base/java-imp-fld.mkiv
index 625bfce115a..aaec257f259 100644
--- a/Master/texmf-dist/tex/context/base/java-imp-fld.mkiv
+++ b/Master/texmf-dist/tex/context/base/java-imp-fld.mkiv
@@ -266,10 +266,14 @@ function ForgetChanges() {
\startJSpreamble FieldStack used later
+function Field_Name(FieldSet,i) {
+ return this.getField(FieldSet + ":" + i)
+}
+
function Reset_Fields(FieldSet) {
var i = 1 ;
while (true) {
- v = this.getField(FieldSet+":"+i) ;
+ v = Field_Name(FieldSet,i) ;
if (!v) {
break ;
} else {
@@ -277,12 +281,13 @@ function Reset_Fields(FieldSet) {
}
i++ ;
}
+ this.dirty = false ;
}
function Set_Fields(FieldSet) {
var i = 1 ;
while (true) {
- v = this.getField(FieldSet+":"+i) ;
+ v = Field_Name(FieldSet,i) ;
if (!v) {
break ;
} else {
@@ -290,36 +295,39 @@ function Set_Fields(FieldSet) {
}
i++ ;
}
+ this.dirty = false ;
}
function Set_Field(FieldSet, FieldName) {
Reset_Fields(FieldSet) ;
- v = this.getField(FieldSet+":"+FieldName) ;
+ v = Field_Name(FieldSet,FieldName) ;
if (v) {
v.value = "Yes" ;
+ this.dirty = false ;
}
}
function Reset_Field(FieldSet, FieldName) {
Set_Fields(FieldSet) ;
- v = this.getField(FieldSet+":"+FieldName) ;
+ v = Field_Name(FieldSet,FieldName) ;
if (v) {
v.value = "Off" ;
+ this.dirty = false ;
}
}
function Walk_Field(FieldSet) {
var i = 1 ;
while (true) {
- v = this.getField(FieldSet + ":" + i) ;
+ v = Field_Name(FieldSet,i) ;
if (v) {
if (v.value != "Off") {
v.value = "Off" ;
var ii = i ;
ii++ ;
- v = this.getField(FieldSet + ":" + ii) ;
+ v = Field_Name(FieldSet,ii) ;
if (! v) {
- v = this.getField(FieldSet + ":" + 1) ;
+ v = Field_Name(FieldSet,1) ;
}
if (v) {
v.value = "Yes" ;
@@ -331,6 +339,7 @@ function Walk_Field(FieldSet) {
break ;
}
}
+ this.dirty = false ;
}
var FieldSets = new Array() ;
@@ -338,36 +347,67 @@ var FieldSets = new Array() ;
function Do_Get_Check_Walk_Field(FieldSet) {
var f = FieldSets[FieldSet]
if (! f) {
- f = new Array() ;
+ f = new Array() ;
f.number = 0 ;
f.delay = 500 ;
f.paused = false ;
f.running = false ;
f.name = FieldSet ;
f.timeout = null ;
+ f.repeat = true ;
+ f.total = 0 ;
+ f.pauseset = new Array() ;
FieldSets[FieldSet] = f ;
+ for (var i=1; i>0; i++) {
+ var v = Field_Name(FieldSet,i) ;
+ if (! v) {
+ f.total = i - 1 ;
+ break ;
+ }
+ }
+ f.start = 0 ;
+ f.stop = f.total ;
+ f.pause = 0 ;
}
+ this.dirty = false ;
return f
}
+function Do_Next_Auto_Walk_Field_Step(FieldSet,fieldset,n) {
+ var v = Field_Name(FieldSet,fieldset.number) ;
+ if (v) {
+ if (v.value != "Off") {
+ v.value = "Off" ;
+ }
+ }
+ v = Field_Name(FieldSet,n) ;
+ if (v) {
+ fieldset.number = n ;
+ v.value = "Yes" ;
+ if (fieldset.pauseset[n]) {
+ // fieldset.pause = n ;
+ // Do_Stop_Auto_Walk_Field(Fieldset) ;
+ Do_Stop_Auto_Walk_Field(FieldSet) ;
+ fieldset.paused = true ;
+ }
+ }
+ this.dirty = false ;
+}
+
function Do_Next_Auto_Walk_Field(FieldSet) {
var fieldset = Do_Get_Check_Walk_Field(FieldSet) ;
if (fieldset) {
- var v = this.getField(FieldSet + ":" + fieldset.number) ;
- if (v) {
- if (v.value != "Off") {
- v.value = "Off" ;
+ if (fieldset.number >= fieldset.stop) {
+ if (fieldset.repeat == false) {
+ fieldset.running = false ;
+ Do_Stop_Auto_Walk_Field(fieldset) ;
+ } else {
+ Do_Next_Auto_Walk_Field_Step(FieldSet,fieldset,fieldset.start) ;
}
+ } else {
+ Do_Next_Auto_Walk_Field_Step(FieldSet,fieldset,fieldset.number+1) ;
}
- fieldset.number++ ;
- v = this.getField(FieldSet + ":" + fieldset.number) ;
- if (! v) {
- fieldset.number = 1 ;
- v = this.getField(FieldSet + ":" + fieldset.number) ;
- }
- if (v) {
- v.value = "Yes" ;
- }
+ this.dirty = false ;
}
}
@@ -379,6 +419,7 @@ function Do_Stop_Auto_Walk_Field(FieldSet) {
app.clearTimeOut(fieldset.timeout) ;
} catch (e) {
}
+ this.dirty = false ;
}
}
@@ -386,7 +427,9 @@ function Do_Start_Auto_Walk_Field(FieldSet) {
var fieldset = Do_Get_Check_Walk_Field(FieldSet) ;
if (fieldset) {
Do_Stop_Auto_Walk_Field(FieldSet) ;
+ Do_Next_Auto_Walk_Field(FieldSet) ;
fieldset.timeout = app.setInterval("Do_Next_Auto_Walk_Field('"+FieldSet+"')", fieldset.delay) ;
+ this.dirty = false ;
}
}
@@ -419,9 +462,48 @@ function Pause_Walk_Field(FieldSet) {
}
}
-function Start_Pause_Walk_Field(FieldSet, Delay) {
+function Start_Pause_Walk_Field(FieldSet, Delay, Option) {
var fieldset = Do_Get_Check_Walk_Field(FieldSet) ;
if (fieldset) {
+ if (Option == "once") {
+ fieldset.repeat = false ;
+ } else if (Option == "pause") {
+ fieldset.repeat = false ;
+ } else {
+ fieldset.repeat = true ;
+ }
+ if (Option == "pause") {
+ for (i=3; i<arguments.length; i++) {
+ fieldset.pauseset[parseInt(arguments[i])] = true ;
+ }
+ if (fieldset.pause == fieldset.stop) {
+ fieldset.start = 1 ;
+ fieldset.pause = 0 ;
+ } else if (fieldset.pause > 1) {
+ fieldset.start = fieldset.pause - 1 ;
+ } else {
+ fieldset.start = 1 ;
+ }
+ fieldset.stop = fieldset.total ;
+ } else {
+ var Start = arguments[3] ;
+ var Stop = arguments[4] ;
+ if (typeof Start == "string") {
+ fieldset.start = parseInt(Start) ;
+ } else if (typeof Start == "number") {
+ fieldset.start = Start ;
+ } else {
+ fieldset.start = 1 ;
+ }
+ if (typeof Stop == "string") {
+ fieldset.stop = parseInt(Stop) ;
+ } else if (typeof Stop == "number") {
+ fieldset.stop = Stop ;
+ } else {
+ fieldset.stop = fieldset.total ;
+ }
+ fieldset.pause = 0 ;
+ }
if (fieldset.running) {
if (fieldset.paused) {
Do_Start_Auto_Walk_Field(FieldSet) ;
@@ -431,7 +513,7 @@ function Start_Pause_Walk_Field(FieldSet, Delay) {
fieldset.paused = true ;
}
} else {
- fieldset.number = 0 ;
+ fieldset.number = fieldset.start - 1 ;
if (Delay) {
fieldset.delay = Delay ;
}
@@ -470,17 +552,18 @@ function Previous_Walk_Field(FieldSet) {
fieldset.running = false ;
fieldset.paused = false ;
if (fieldset.number>0) {
- var v = this.getField(FieldSet + ":" + fieldset.number) ;
+ var v = Field_Name(FieldSet,fieldset.number) ;
if (v) {
if (v.value != "Off") {
v.value = "Off" ;
}
}
fieldset.number-- ;
- v = this.getField(FieldSet + ":" + fieldset.number) ;
+ v = Field_Name(FieldSet,fieldset.number) ;
if (v) {
v.value = "Yes" ;
}
+ this.dirty = false ;
}
}
}
@@ -492,19 +575,20 @@ function Next_Walk_Field(FieldSet) {
fieldset.running = false ;
fieldset.paused = false ;
var f = fieldset.number + 1 ;
- var v = this.getField(FieldSet + ":" + f) ;
+ var v = Field_Name(FieldSet,f) ;
if (v) {
- var v = this.getField(FieldSet + ":" + fieldset.number) ;
+ var v = Field_Name(FieldSet,fieldset.number) ;
if (v) {
if (v.value != "Off") {
v.value = "Off" ;
}
}
fieldset.number++ ;
- v = this.getField(FieldSet + ":" + fieldset.number) ;
+ v = Field_Name(FieldSet,fieldset.number) ;
if (v) {
v.value = "Yes" ;
}
+ this.dirty = false ;
}
}
}
diff --git a/Master/texmf-dist/tex/context/base/java-ini.lua b/Master/texmf-dist/tex/context/base/java-ini.lua
index 1e98de7173f..321e4e24d22 100644
--- a/Master/texmf-dist/tex/context/base/java-ini.lua
+++ b/Master/texmf-dist/tex/context/base/java-ini.lua
@@ -8,10 +8,12 @@ if not modules then modules = { } end modules ['java-ini'] = {
local format = string.format
local concat = table.concat
-local lpegmatch, lpegP, lpegR, lpegS, lpegC, lpegCarg = lpeg.match, lpeg.P, lpeg.R, lpeg.S, lpeg.C, lpeg.Carg
+local lpegmatch, P, S, C, Carg, Cc = lpeg.match, lpeg.P, lpeg.S, lpeg.C, lpeg.Carg, lpeg.Cc
local allocate = utilities.storage.allocate
+local settings_to_array = utilities.parsers.settings_to_array
local variables = interfaces.variables
+local formatters = string.formatters
-- todo: don't flush scripts if no JS key
@@ -33,29 +35,29 @@ local preambled = { }
local function storefunction(s,preamble)
if trace_javascript then
- report_javascripts("found: function '%s'",s)
+ report_javascripts("found function %a",s)
end
functions[s] = preamble
end
-local uses = lpegP("uses")
-local used = lpegP("used")
-local left = lpegP("{")
-local right = lpegP("}")
-local space = lpegS(" \r\n")
+local uses = P("uses")
+local used = P("used")
+local left = P("{")
+local right = P("}")
+local space = S(" \r\n")
local spaces = space^0
-local braced = left * lpegC((1-right-space)^1) * right
-local unbraced = lpegC((1-space)^1)
+local braced = left * C((1-right-space)^1) * right
+local unbraced = C((1-space)^1)
local name = spaces * (braced + unbraced) * spaces
-local any = lpegP(1)
-local script = lpegC(any^1)
-local funct = lpegP("function")
-local leftp = lpegP("(")
-local rightp = lpegP(")")
-local fname = spaces * funct * spaces * (lpegC((1-space-left-leftp)^1) * lpegCarg(1) / storefunction) * spaces * leftp
-
-local parsecode = name * ((uses * name) + lpeg.Cc("")) * spaces * script
-local parsepreamble = name * ((used * name) + lpeg.Cc("")) * spaces * script
+local any = P(1)
+local script = C(any^1)
+local funct = P("function")
+local leftp = P("(")
+local rightp = P(")")
+local fname = spaces * funct * spaces * (C((1-space-left-leftp)^1) * Carg(1) / storefunction) * spaces * leftp
+
+local parsecode = name * ((uses * name) + Cc("")) * spaces * script
+local parsepreamble = name * ((used * name) + Cc("")) * spaces * script
local parsefunctions = (fname + any)^0
function javascripts.storecode(str)
@@ -72,7 +74,7 @@ function javascripts.storepreamble(str) -- now later
preambles[n] = { name, used, script }
preambled[name] = n
if trace_javascript then
- report_javascripts("stored: preamble '%s', state '%s', order '%s'",name,used,n)
+ report_javascripts("stored preamble %a, state %a, order %a",name,used,n)
end
lpegmatch(parsefunctions,script,1,n)
end
@@ -84,7 +86,7 @@ function javascripts.setpreamble(name,script) -- now later
preambles[n] = { name, "now", script }
preambled[name] = n
if trace_javascript then
- report_javascripts("adapted: preamble '%s', state '%s', order '%s'",name,"now",n)
+ report_javascripts("adapted preamble %a, state %a, order %a",name,"now",n)
end
lpegmatch(parsefunctions,script,1,n)
end
@@ -96,14 +98,14 @@ function javascripts.addtopreamble(name,script)
if p then
preambles[p] = { "now", preambles[p] .. " ;\n" .. script }
if trace_javascript then
- report_javascripts("extended: preamble '%s', state '%s', order '%s'",name,"now",p)
+ report_javascripts("extended preamble %a, state %a, order %a",name,"now",p)
end
else
local n = #preambles + 1
preambles[n] = { name, "now", script }
preambled[name] = n
if trace_javascript then
- report_javascripts("stored: preamble '%s', state '%s', order '%s'",name,"now",n)
+ report_javascripts("stored preamble %a, state %a, order %a",name,"now",n)
end
lpegmatch(parsefunctions,script,1,n)
end
@@ -112,13 +114,13 @@ end
function javascripts.usepreamblenow(name) -- now later
if name and name ~= "" and name ~= variables.reset then -- todo: reset
- local names = utilities.parsers.settings_to_array(name)
+ local names = settings_to_array(name)
for i=1,#names do
local somename = names[i]
if not preambled[somename] then
preambles[preambled[somename]][2] = "now"
if trace_javascript then
- report_javascripts("used: preamble '%s', state '%s', order '%s'",somename,"now","auto")
+ report_javascripts("used preamble %a, state %a, order %a",somename,"now","auto")
end
end
end
@@ -139,15 +141,15 @@ function javascripts.code(name,arguments)
preambles[p][2] = "now"
if trace_javascript and not reported[name] then
reported[name] = true
- report_javascripts("used: code '%s', preamble '%s'",name,u)
+ report_javascripts("used code %a, preamble %a",name,u)
end
elseif trace_javascript and not reported[name] then
reported[name] = true
- report_javascripts("used: code '%s'",name)
+ report_javascripts("used code %a",name)
end
elseif trace_javascript and not reported[name] then
reported[name] = true
- report_javascripts("used: code '%s'",name)
+ report_javascripts("used code %a",name)
end
used = true
return code
@@ -157,17 +159,17 @@ function javascripts.code(name,arguments)
used = true
if trace_javascript and not reported[name] then
reported[name] = true
- report_javascripts("used: function '%s'",name)
+ report_javascripts("used function %a",name)
end
preambles[f][2] = "now" -- automatically tag preambles that define the function (as later)
if arguments then
local args = lpegmatch(splitter,arguments)
for i=1,#args do -- can be a helper
- args[i] = format("%q",args[i])
+ args[i] = formatters["%q"](args[i])
end
- return format("%s(%s)",name,concat(args,","))
+ return formatters["%s(%s)"](name,concat(args,","))
else
- return format("%s()",name)
+ return formatters["%s()"](name)
end
end
end
@@ -179,7 +181,7 @@ function javascripts.flushpreambles()
local preamble = preambles[i]
if preamble[2] == "now" then
if trace_javascript then
- report_javascripts("flushed: preamble '%s'",preamble[1])
+ report_javascripts("flushed preamble %a",preamble[1])
end
t[#t+1] = { preamble[1], preamble[3] }
end
@@ -194,13 +196,13 @@ local function action(name,foundname)
context.startnointerference()
context.startreadingfile()
context.input(foundname)
- status_javascripts("loaded: library '%s'",name)
+ status_javascripts("loaded: library %a",name)
context.stopreadingfile()
context.stopnointerference()
end
local function failure(name)
- report_javascripts("unknown: library '%s'",name)
+ report_javascripts("unknown library %a",name)
end
function javascripts.usescripts(name)
diff --git a/Master/texmf-dist/tex/context/base/l-boolean.lua b/Master/texmf-dist/tex/context/base/l-boolean.lua
index 2d502f164e8..f087f1a4ce2 100644
--- a/Master/texmf-dist/tex/context/base/l-boolean.lua
+++ b/Master/texmf-dist/tex/context/base/l-boolean.lua
@@ -15,29 +15,48 @@ function boolean.tonumber(b)
if b then return 1 else return 0 end -- test and return or return
end
-function toboolean(str,tolerant)
- if tolerant then
- local tstr = type(str)
- if tstr == "string" then
- return str == "true" or str == "yes" or str == "on" or str == "1" or str == "t"
- elseif tstr == "number" then
- return tonumber(str) ~= 0
- elseif tstr == "nil" then
- return false
- else
- return str
- end
+function toboolean(str,tolerant) -- global
+ if str == nil then
+ return false
+ elseif str == false then
+ return false
+ elseif str == true then
+ return true
elseif str == "true" then
return true
elseif str == "false" then
return false
+ elseif not tolerant then
+ return false
+ elseif str == 0 then
+ return false
+ elseif (tonumber(str) or 0) > 0 then
+ return true
else
- return str
+ return str == "yes" or str == "on" or str == "t"
end
end
string.toboolean = toboolean
+function string.booleanstring(str)
+ if str == "0" then
+ return false
+ elseif str == "1" then
+ return true
+ elseif str == "" then
+ return false
+ elseif str == "false" then
+ return false
+ elseif str == "true" then
+ return true
+ elseif (tonumber(str) or 0) > 0 then
+ return true
+ else
+ return str == "yes" or str == "on" or str == "t"
+ end
+end
+
function string.is_boolean(str,default)
if type(str) == "string" then
if str == "true" or str == "yes" or str == "on" or str == "t" then
diff --git a/Master/texmf-dist/tex/context/base/l-dir.lua b/Master/texmf-dist/tex/context/base/l-dir.lua
index 71de3114e97..00cda389913 100644
--- a/Master/texmf-dist/tex/context/base/l-dir.lua
+++ b/Master/texmf-dist/tex/context/base/l-dir.lua
@@ -8,7 +8,7 @@ if not modules then modules = { } end modules ['l-dir'] = {
-- dir.expandname will be merged with cleanpath and collapsepath
-local type = type
+local type, select = type, select
local find, gmatch, match, gsub = string.find, string.gmatch, string.match, string.gsub
local concat, insert, remove = table.concat, table.insert, table.remove
local lpegmatch = lpeg.match
@@ -24,6 +24,25 @@ local walkdir = lfs.dir
local isdir = lfs.isdir
local isfile = lfs.isfile
local currentdir = lfs.currentdir
+local chdir = lfs.chdir
+
+-- in case we load outside luatex
+
+if not isdir then
+ function isdir(name)
+ local a = attributes(name)
+ return a and a.mode == "directory"
+ end
+ lfs.isdir = isdir
+end
+
+if not isfile then
+ function isfile(name)
+ local a = attributes(name)
+ return a and a.mode == "file"
+ end
+ lfs.isfile = isfile
+end
-- handy
@@ -243,15 +262,15 @@ local onwindows = os.type == "windows" or find(os.getenv("PATH"),";")
if onwindows then
function dir.mkdirs(...)
- local str, pth, t = "", "", { ... }
- for i=1,#t do
- local s = t[i]
- if s ~= "" then
- if str ~= "" then
- str = str .. "/" .. s
- else
- str = s
- end
+ local str, pth = "", ""
+ for i=1,select("#",...) do
+ local s = select(i,...)
+ if s == "" then
+ -- skip
+ elseif str == "" then
+ str = s
+ else
+ str = str .. "/" .. s
end
end
local first, middle, last
@@ -311,9 +330,9 @@ if onwindows then
else
function dir.mkdirs(...)
- local str, pth, t = "", "", { ... }
- for i=1,#t do
- local s = t[i]
+ local str, pth = "", ""
+ for i=1,select("#",...) do
+ local s = select(i,...)
if s and s ~= "" then -- we catch nil and false
if str ~= "" then
str = str .. "/" .. s
@@ -367,7 +386,7 @@ if onwindows then
function dir.expandname(str) -- will be merged with cleanpath and collapsepath
local first, nothing, last = match(str,"^(//)(//*)(.*)$")
if first then
- first = dir.current() .. "/"
+ first = dir.current() .. "/" -- dir.current sanitizes
end
if not first then
first, last = match(str,"^(//)/*(.*)$")
@@ -376,10 +395,10 @@ if onwindows then
first, last = match(str,"^([a-zA-Z]:)(.*)$")
if first and not find(last,"^/") then
local d = currentdir()
- if lfs.chdir(first) then
+ if chdir(first) then
first = dir.current()
end
- lfs.chdir(d)
+ chdir(d)
end
end
if not first then
@@ -415,13 +434,16 @@ file.expandname = dir.expandname -- for convenience
local stack = { }
function dir.push(newdir)
- insert(stack,lfs.currentdir())
+ insert(stack,currentdir())
+ if newdir and newdir ~= "" then
+ chdir(newdir)
+ end
end
function dir.pop()
local d = remove(stack)
if d then
- lfs.chdir(d)
+ chdir(d)
end
return d
end
diff --git a/Master/texmf-dist/tex/context/base/l-file.lua b/Master/texmf-dist/tex/context/base/l-file.lua
index edfbc0f92b5..af86f93976d 100644
--- a/Master/texmf-dist/tex/context/base/l-file.lua
+++ b/Master/texmf-dist/tex/context/base/l-file.lua
@@ -11,62 +11,228 @@ if not modules then modules = { } end modules ['l-file'] = {
file = file or { }
local file = file
+if not lfs then
+ lfs = optionalrequire("lfs")
+end
+
+if not lfs then
+
+ lfs = {
+ getcurrentdir = function()
+ return "."
+ end,
+ attributes = function()
+ return nil
+ end,
+ isfile = function(name)
+ local f = io.open(name,'rb')
+ if f then
+ f:close()
+ return true
+ end
+ end,
+ isdir = function(name)
+ print("you need to load lfs")
+ return false
+ end
+ }
+
+elseif not lfs.isfile then
+
+ local attributes = lfs.attributes
+
+ function lfs.isdir(name)
+ return attributes(name,"mode") == "directory"
+ end
+
+ function lfs.isfile(name)
+ return attributes(name,"mode") == "file"
+ end
+
+ -- function lfs.isdir(name)
+ -- local a = attributes(name)
+ -- return a and a.mode == "directory"
+ -- end
+
+ -- function lfs.isfile(name)
+ -- local a = attributes(name)
+ -- return a and a.mode == "file"
+ -- end
+
+end
+
local insert, concat = table.insert, table.concat
-local find, gmatch, match, gsub, sub, char, lower = string.find, string.gmatch, string.match, string.gsub, string.sub, string.char, string.lower
+local match = string.match
local lpegmatch = lpeg.match
local getcurrentdir, attributes = lfs.currentdir, lfs.attributes
+local checkedsplit = string.checkedsplit
+
+-- local patterns = file.patterns or { }
+-- file.patterns = patterns
+
+local P, R, S, C, Cs, Cp, Cc, Ct = lpeg.P, lpeg.R, lpeg.S, lpeg.C, lpeg.Cs, lpeg.Cp, lpeg.Cc, lpeg.Ct
+
+local colon = P(":")
+local period = P(".")
+local periods = P("..")
+local fwslash = P("/")
+local bwslash = P("\\")
+local slashes = S("\\/")
+local noperiod = 1-period
+local noslashes = 1-slashes
+local name = noperiod^1
+local suffix = period/"" * (1-period-slashes)^1 * -1
-local P, R, S, C, Cs, Cp, Cc = lpeg.P, lpeg.R, lpeg.S, lpeg.C, lpeg.Cs, lpeg.Cp, lpeg.Cc
+----- pattern = C((noslashes^0 * slashes^1)^1)
+local pattern = C((1 - (slashes^1 * noslashes^1 * -1))^1) * P(1) -- there must be a more efficient way
-local function dirname(name,default)
- return match(name,"^(.+)[/\\].-$") or (default or "")
+local function pathpart(name,default)
+ return name and lpegmatch(pattern,name) or default or ""
end
+local pattern = (noslashes^0 * slashes)^1 * C(noslashes^1) * -1
+
local function basename(name)
- return match(name,"^.+[/\\](.-)$") or name
+ return name and lpegmatch(pattern,name) or name
end
--- local function nameonly(name)
--- return (gsub(match(name,"^.+[/\\](.-)$") or name,"%..*$",""))
--- end
+-- print(pathpart("file"))
+-- print(pathpart("dir/file"))
+-- print(pathpart("/dir/file"))
+-- print(basename("file"))
+-- print(basename("dir/file"))
+-- print(basename("/dir/file"))
+
+local pattern = (noslashes^0 * slashes^1)^0 * Cs((1-suffix)^1) * suffix^0
local function nameonly(name)
- return (gsub(match(name,"^.+[/\\](.-)$") or name,"%.[%a%d]+$",""))
+ return name and lpegmatch(pattern,name) or name
end
-local function extname(name,default)
- return match(name,"^.+%.([^/\\]-)$") or default or ""
+local pattern = (noslashes^0 * slashes)^0 * (noperiod^1 * period)^1 * C(noperiod^1) * -1
+
+local function suffixonly(name)
+ return name and lpegmatch(pattern,name) or ""
+end
+
+file.pathpart = pathpart
+file.basename = basename
+file.nameonly = nameonly
+file.suffixonly = suffixonly
+file.suffix = suffixonly
+
+file.dirname = pathpart -- obsolete
+file.extname = suffixonly -- obsolete
+
+-- actually these are schemes
+
+local drive = C(R("az","AZ")) * colon
+local path = C((noslashes^0 * slashes)^0)
+local suffix = period * C(P(1-period)^0 * P(-1))
+local base = C((1-suffix)^0)
+local rest = C(P(1)^0)
+
+drive = drive + Cc("")
+path = path + Cc("")
+base = base + Cc("")
+suffix = suffix + Cc("")
+
+local pattern_a = drive * path * base * suffix
+local pattern_b = path * base * suffix
+local pattern_c = C(drive * path) * C(base * suffix) -- trick: two extra captures
+local pattern_d = path * rest
+
+function file.splitname(str,splitdrive)
+ if not str then
+ -- error
+ elseif splitdrive then
+ return lpegmatch(pattern_a,str) -- returns drive, path, base, suffix
+ else
+ return lpegmatch(pattern_b,str) -- returns path, base, suffix
+ end
end
-local function splitname(name)
- local n, s = match(name,"^(.+)%.([^/\\]-)$")
- return n or name, s or ""
+function file.splitbase(str)
+ return str and lpegmatch(pattern_d,str) -- returns path, base+suffix
end
-file.basename = basename
-file.dirname = dirname
-file.nameonly = nameonly
-file.extname = extname
-file.suffix = extname
+---- stripslash = C((1 - P("/")^1*P(-1))^0)
+
+function file.nametotable(str,splitdrive)
+ if str then
+ local path, drive, subpath, name, base, suffix = lpegmatch(pattern_c,str)
+ -- if path ~= "" then
+ -- path = lpegmatch(stripslash,path) -- unfortunate hack, maybe this becomes default
+ -- end
+ if splitdrive then
+ return {
+ path = path,
+ drive = drive,
+ subpath = subpath,
+ name = name,
+ base = base,
+ suffix = suffix,
+ }
+ else
+ return {
+ path = path,
+ name = name,
+ base = base,
+ suffix = suffix,
+ }
+ end
+ end
+end
-function file.removesuffix(filename)
- return (gsub(filename,"%.[%a%d]+$",""))
+-- print(file.splitname("file"))
+-- print(file.splitname("dir/file"))
+-- print(file.splitname("/dir/file"))
+-- print(file.splitname("file"))
+-- print(file.splitname("dir/file"))
+-- print(file.splitname("/dir/file"))
+
+-- inspect(file.nametotable("file.ext"))
+-- inspect(file.nametotable("dir/file.ext"))
+-- inspect(file.nametotable("/dir/file.ext"))
+-- inspect(file.nametotable("file.ext"))
+-- inspect(file.nametotable("dir/file.ext"))
+-- inspect(file.nametotable("/dir/file.ext"))
+
+----- pattern = Cs(((period * noperiod^1 * -1) / "" + 1)^1)
+local pattern = Cs(((period * (1-period-slashes)^1 * -1) / "" + 1)^1)
+
+function file.removesuffix(name)
+ return name and lpegmatch(pattern,name)
end
-function file.addsuffix(filename, suffix, criterium)
- if not suffix or suffix == "" then
+-- local pattern = (noslashes^0 * slashes)^0 * (noperiod^1 * period)^1 * Cp() * noperiod^1 * -1
+--
+-- function file.addsuffix(name, suffix)
+-- local p = lpegmatch(pattern,name)
+-- if p then
+-- return name
+-- else
+-- return name .. "." .. suffix
+-- end
+-- end
+
+local suffix = period/"" * (1-period-slashes)^1 * -1
+local pattern = Cs((noslashes^0 * slashes^1)^0 * ((1-suffix)^1)) * Cs(suffix)
+
+function file.addsuffix(filename,suffix,criterium)
+ if not filename or not suffix or suffix == "" then
return filename
elseif criterium == true then
return filename .. "." .. suffix
elseif not criterium then
- local n, s = splitname(filename)
+ local n, s = lpegmatch(pattern,filename)
if not s or s == "" then
return filename .. "." .. suffix
else
return filename
end
else
- local n, s = splitname(filename)
+ local n, s = lpegmatch(pattern,filename)
if s and s ~= "" then
local t = type(criterium)
if t == "table" then
@@ -83,84 +249,54 @@ function file.addsuffix(filename, suffix, criterium)
end
end
end
- return n .. "." .. suffix
+ return (n or filename) .. "." .. suffix
end
end
---~ print("1 " .. file.addsuffix("name","new") .. " -> name.new")
---~ print("2 " .. file.addsuffix("name.old","new") .. " -> name.old")
---~ print("3 " .. file.addsuffix("name.old","new",true) .. " -> name.old.new")
---~ print("4 " .. file.addsuffix("name.old","new","new") .. " -> name.new")
---~ print("5 " .. file.addsuffix("name.old","new","old") .. " -> name.old")
---~ print("6 " .. file.addsuffix("name.old","new","foo") .. " -> name.new")
---~ print("7 " .. file.addsuffix("name.old","new",{"foo","bar"}) .. " -> name.new")
---~ print("8 " .. file.addsuffix("name.old","new",{"old","bar"}) .. " -> name.old")
-
-function file.replacesuffix(filename, suffix)
- return (gsub(filename,"%.[%a%d]+$","")) .. "." .. suffix
-end
-
---~ function file.join(...)
---~ local pth = concat({...},"/")
---~ pth = gsub(pth,"\\","/")
---~ local a, b = match(pth,"^(.*://)(.*)$")
---~ if a and b then
---~ return a .. gsub(b,"//+","/")
---~ end
---~ a, b = match(pth,"^(//)(.*)$")
---~ if a and b then
---~ return a .. gsub(b,"//+","/")
---~ end
---~ return (gsub(pth,"//+","/"))
---~ end
-
-local trick_1 = char(1)
-local trick_2 = "^" .. trick_1 .. "/+"
-
-function file.join(...) -- rather dirty
- local lst = { ... }
- local a, b = lst[1], lst[2]
- if not a or a == "" then -- not a added
- lst[1] = trick_1
- elseif b and find(a,"^/+$") and find(b,"^/") then
- lst[1] = ""
- lst[2] = gsub(b,"^/+","")
- end
- local pth = concat(lst,"/")
- pth = gsub(pth,"\\","/")
- local a, b = match(pth,"^(.*://)(.*)$")
- if a and b then
- return a .. gsub(b,"//+","/")
- end
- a, b = match(pth,"^(//)(.*)$")
- if a and b then
- return a .. gsub(b,"//+","/")
+-- print("1 " .. file.addsuffix("name","new") .. " -> name.new")
+-- print("2 " .. file.addsuffix("name.old","new") .. " -> name.old")
+-- print("3 " .. file.addsuffix("name.old","new",true) .. " -> name.old.new")
+-- print("4 " .. file.addsuffix("name.old","new","new") .. " -> name.new")
+-- print("5 " .. file.addsuffix("name.old","new","old") .. " -> name.old")
+-- print("6 " .. file.addsuffix("name.old","new","foo") .. " -> name.new")
+-- print("7 " .. file.addsuffix("name.old","new",{"foo","bar"}) .. " -> name.new")
+-- print("8 " .. file.addsuffix("name.old","new",{"old","bar"}) .. " -> name.old")
+
+local suffix = period * (1-period-slashes)^1 * -1
+local pattern = Cs((1-suffix)^0)
+
+function file.replacesuffix(name,suffix)
+ if name and suffix and suffix ~= "" then
+ return lpegmatch(pattern,name) .. "." .. suffix
+ else
+ return name
end
- pth = gsub(pth,trick_2,"")
- return (gsub(pth,"//+","/"))
end
---~ print(file.join("//","/y"))
---~ print(file.join("/","/y"))
---~ print(file.join("","/y"))
---~ print(file.join("/x/","/y"))
---~ print(file.join("x/","/y"))
---~ print(file.join("http://","/y"))
---~ print(file.join("http://a","/y"))
---~ print(file.join("http:///a","/y"))
---~ print(file.join("//nas-1","/y"))
+--
+
+local reslasher = lpeg.replacer(P("\\"),"/")
+
+function file.reslash(str)
+ return str and lpegmatch(reslasher,str)
+end
-- We should be able to use:
--
+-- local writable = P(1) * P("w") * Cc(true)
+--
-- function file.is_writable(name)
--- local a = attributes(name) or attributes(dirname(name,"."))
--- return a and sub(a.permissions,2,2) == "w"
+-- local a = attributes(name) or attributes(pathpart(name,"."))
+-- return a and lpegmatch(writable,a.permissions) or false
-- end
--
--- But after some testing Taco and I came up with:
+-- But after some testing Taco and I came up with the more robust
+-- variant:
function file.is_writable(name)
- if lfs.isdir(name) then
+ if not name then
+ -- error
+ elseif lfs.isdir(name) then
name = name .. "/m_t_x_t_e_s_t.tmp"
local f = io.open(name,"wb")
if f then
@@ -185,72 +321,108 @@ function file.is_writable(name)
return false
end
+local readable = P("r") * Cc(true)
+
function file.is_readable(name)
- local a = attributes(name)
- return a and sub(a.permissions,1,1) == "r"
+ if name then
+ local a = attributes(name)
+ return a and lpegmatch(readable,a.permissions) or false
+ else
+ return false
+ end
end
file.isreadable = file.is_readable -- depricated
file.iswritable = file.is_writable -- depricated
--- todo: lpeg \\ / .. does not save much
-
-local checkedsplit = string.checkedsplit
+function file.size(name)
+ if name then
+ local a = attributes(name)
+ return a and a.size or 0
+ else
+ return 0
+ end
+end
-function file.splitpath(str,separator) -- string
- str = gsub(str,"\\","/")
- return checkedsplit(str,separator or io.pathseparator)
+function file.splitpath(str,separator) -- string .. reslash is a bonus (we could do a direct split)
+ return str and checkedsplit(lpegmatch(reslasher,str),separator or io.pathseparator)
end
function file.joinpath(tab,separator) -- table
- return concat(tab,separator or io.pathseparator) -- can have trailing //
+ return tab and concat(tab,separator or io.pathseparator) -- can have trailing //
+end
+
+local stripper = Cs(P(fwslash)^0/"" * reslasher)
+local isnetwork = fwslash * fwslash * (1-fwslash) + (1-fwslash-colon)^1 * colon
+local isroot = fwslash^1 * -1
+local hasroot = fwslash^1
+
+local deslasher = lpeg.replacer(S("\\/")^1,"/")
+
+-- If we have a network or prefix then there is a change that we end up with two
+-- // in the middle ... we could prevent this if we (1) expand prefixes: and (2)
+-- split and rebuild as url. Of course we could assume no network paths (which
+-- makes sense) adn assume either mapped drives (windows) or mounts (unix) but
+-- then we still have to deal with urls ... anyhow, multiple // are never a real
+-- problem but just ugly.
+
+function file.join(...)
+ local lst = { ... }
+ local one = lst[1]
+ if lpegmatch(isnetwork,one) then
+ local two = lpegmatch(deslasher,concat(lst,"/",2))
+ return one .. "/" .. two
+ elseif lpegmatch(isroot,one) then
+ local two = lpegmatch(deslasher,concat(lst,"/",2))
+ if lpegmatch(hasroot,two) then
+ return two
+ else
+ return "/" .. two
+ end
+ elseif one == "" then
+ return lpegmatch(stripper,concat(lst,"/",2))
+ else
+ return lpegmatch(deslasher,concat(lst,"/"))
+ end
end
--- we can hash them weakly
-
---~ function file.collapsepath(str) -- fails on b.c/..
---~ str = gsub(str,"\\","/")
---~ if find(str,"/") then
---~ str = gsub(str,"^%./",(gsub(getcurrentdir(),"\\","/")) .. "/") -- ./xx in qualified
---~ str = gsub(str,"/%./","/")
---~ local n, m = 1, 1
---~ while n > 0 or m > 0 do
---~ str, n = gsub(str,"[^/%.]+/%.%.$","")
---~ str, m = gsub(str,"[^/%.]+/%.%./","")
---~ end
---~ str = gsub(str,"([^/])/$","%1")
---~ -- str = gsub(str,"^%./","") -- ./xx in qualified
---~ str = gsub(str,"/%.$","")
---~ end
---~ if str == "" then str = "." end
---~ return str
---~ end
---~
---~ The previous one fails on "a.b/c" so Taco came up with a split based
---~ variant. After some skyping we got it sort of compatible with the old
---~ one. After that the anchoring to currentdir was added in a better way.
---~ Of course there are some optimizations too. Finally we had to deal with
---~ windows drive prefixes and things like sys://.
+-- print(file.join("c:/whatever","name"))
+-- print(file.join("//","/y"))
+-- print(file.join("/","/y"))
+-- print(file.join("","/y"))
+-- print(file.join("/x/","/y"))
+-- print(file.join("x/","/y"))
+-- print(file.join("http://","/y"))
+-- print(file.join("http://a","/y"))
+-- print(file.join("http:///a","/y"))
+-- print(file.join("//nas-1","/y"))
+
+-- The previous one fails on "a.b/c" so Taco came up with a split based
+-- variant. After some skyping we got it sort of compatible with the old
+-- one. After that the anchoring to currentdir was added in a better way.
+-- Of course there are some optimizations too. Finally we had to deal with
+-- windows drive prefixes and things like sys://. Eventually gsubs and
+-- finds were replaced by lpegs.
+
+local drivespec = R("az","AZ")^1 * colon
+local anchors = fwslash + drivespec
+local untouched = periods + (1-period)^1 * P(-1)
+local splitstarter = (Cs(drivespec * (bwslash/"/" + fwslash)^0) + Cc(false)) * Ct(lpeg.splitat(S("/\\")^1))
+local absolute = fwslash
function file.collapsepath(str,anchor)
- if anchor and not find(str,"^/") and not find(str,"^%a:") then
+ if not str then
+ return
+ end
+ if anchor and not lpegmatch(anchors,str) then
str = getcurrentdir() .. "/" .. str
end
if str == "" or str =="." then
return "."
- elseif find(str,"^%.%.") then
- str = gsub(str,"\\","/")
- return str
- elseif not find(str,"%.") then
- str = gsub(str,"\\","/")
- return str
+ elseif lpegmatch(untouched,str) then
+ return lpegmatch(reslasher,str)
end
- str = gsub(str,"\\","/")
- local starter, rest = match(str,"^(%a+:/*)(.-)$")
- if starter then
- str = rest
- end
- local oldelements = checkedsplit(str,"/")
+ local starter, oldelements = lpegmatch(splitstarter,str)
local newelements = { }
local i = #oldelements
while i > 0 do
@@ -280,29 +452,37 @@ function file.collapsepath(str,anchor)
return starter or "."
elseif starter then
return starter .. concat(newelements, '/')
- elseif find(str,"^/") then
+ elseif lpegmatch(absolute,str) then
return "/" .. concat(newelements,'/')
else
return concat(newelements, '/')
end
end
---~ local function test(str)
---~ print(string.format("%-20s %-15s %-15s",str,file.collapsepath(str),file.collapsepath(str,true)))
---~ end
---~ test("a/b.c/d") test("b.c/d") test("b.c/..")
---~ test("/") test("c:/..") test("sys://..")
---~ test("") test("./") test(".") test("..") test("./..") test("../..")
---~ test("a") test("./a") test("/a") test("a/../..")
---~ test("a/./b/..") test("a/aa/../b/bb") test("a/.././././b/..") test("a/./././b/..")
---~ test("a/b/c/../..") test("./a/b/c/../..") test("a/b/c/../..")
+-- local function test(str)
+-- print(string.format("%-20s %-15s %-15s",str,file.collapsepath(str),file.collapsepath(str,true)))
+-- end
+-- test("a/b.c/d") test("b.c/d") test("b.c/..")
+-- test("/") test("c:/..") test("sys://..")
+-- test("") test("./") test(".") test("..") test("./..") test("../..")
+-- test("a") test("./a") test("/a") test("a/../..")
+-- test("a/./b/..") test("a/aa/../b/bb") test("a/.././././b/..") test("a/./././b/..")
+-- test("a/b/c/../..") test("./a/b/c/../..") test("a/b/c/../..")
+
+local validchars = R("az","09","AZ","--","..")
+local pattern_a = lpeg.replacer(1-validchars)
+local pattern_a = Cs((validchars + P(1)/"-")^1)
+local whatever = P("-")^0 / ""
+local pattern_b = Cs(whatever * (1 - whatever * -1)^1)
function file.robustname(str,strict)
- str = gsub(str,"[^%a%d%/%-%.\\]+","-")
- if strict then
- return lower(gsub(str,"^%-*(.-)%-*$","%1"))
- else
- return str
+ if str then
+ str = lpegmatch(pattern_a,str) or str
+ if strict then
+ return lpegmatch(pattern_b,str) or str -- two step is cleaner (less backtracking)
+ else
+ return str
+ end
end
end
@@ -310,105 +490,25 @@ file.readdata = io.loaddata
file.savedata = io.savedata
function file.copy(oldname,newname)
- file.savedata(newname,io.loaddata(oldname))
+ if oldname and newname then
+ local data = io.loaddata(oldname)
+ if data and data ~= "" then
+ file.savedata(newname,data)
+ end
+ end
end
--- lpeg variants, slightly faster, not always
-
---~ local period = P(".")
---~ local slashes = S("\\/")
---~ local noperiod = 1-period
---~ local noslashes = 1-slashes
---~ local name = noperiod^1
-
---~ local pattern = (noslashes^0 * slashes)^0 * (noperiod^1 * period)^1 * C(noperiod^1) * -1
-
---~ function file.extname(name)
---~ return lpegmatch(pattern,name) or ""
---~ end
-
---~ local pattern = Cs(((period * noperiod^1 * -1)/"" + 1)^1)
-
---~ function file.removesuffix(name)
---~ return lpegmatch(pattern,name)
---~ end
-
---~ local pattern = (noslashes^0 * slashes)^1 * C(noslashes^1) * -1
-
---~ function file.basename(name)
---~ return lpegmatch(pattern,name) or name
---~ end
-
---~ local pattern = (noslashes^0 * slashes)^1 * Cp() * noslashes^1 * -1
-
---~ function file.dirname(name)
---~ local p = lpegmatch(pattern,name)
---~ if p then
---~ return sub(name,1,p-2)
---~ else
---~ return ""
---~ end
---~ end
-
---~ local pattern = (noslashes^0 * slashes)^0 * (noperiod^1 * period)^1 * Cp() * noperiod^1 * -1
-
---~ function file.addsuffix(name, suffix)
---~ local p = lpegmatch(pattern,name)
---~ if p then
---~ return name
---~ else
---~ return name .. "." .. suffix
---~ end
---~ end
-
---~ local pattern = (noslashes^0 * slashes)^0 * (noperiod^1 * period)^1 * Cp() * noperiod^1 * -1
-
---~ function file.replacesuffix(name,suffix)
---~ local p = lpegmatch(pattern,name)
---~ if p then
---~ return sub(name,1,p-2) .. "." .. suffix
---~ else
---~ return name .. "." .. suffix
---~ end
---~ end
-
---~ local pattern = (noslashes^0 * slashes)^0 * Cp() * ((noperiod^1 * period)^1 * Cp() + P(true)) * noperiod^1 * -1
-
---~ function file.nameonly(name)
---~ local a, b = lpegmatch(pattern,name)
---~ if b then
---~ return sub(name,a,b-2)
---~ elseif a then
---~ return sub(name,a)
---~ else
---~ return name
---~ end
---~ end
-
---~ local test = file.extname
---~ local test = file.basename
---~ local test = file.dirname
---~ local test = file.addsuffix
---~ local test = file.replacesuffix
---~ local test = file.nameonly
-
---~ print(1,test("./a/b/c/abd.def.xxx","!!!"))
---~ print(2,test("./../b/c/abd.def.xxx","!!!"))
---~ print(3,test("a/b/c/abd.def.xxx","!!!"))
---~ print(4,test("a/b/c/def.xxx","!!!"))
---~ print(5,test("a/b/c/def","!!!"))
---~ print(6,test("def","!!!"))
---~ print(7,test("def.xxx","!!!"))
-
---~ local tim = os.clock() for i=1,250000 do local ext = test("abd.def.xxx","!!!") end print(os.clock()-tim)
-
-- also rewrite previous
local letter = R("az","AZ") + S("_-+")
local separator = P("://")
-local qualified = P(".")^0 * P("/") + letter*P(":") + letter^1*separator + letter^1 * P("/")
-local rootbased = P("/") + letter*P(":")
+local qualified = period^0 * fwslash
+ + letter * colon
+ + letter^1 * separator
+ + letter^1 * fwslash
+local rootbased = fwslash
+ + letter * colon
lpeg.patterns.qualified = qualified
lpeg.patterns.rootbased = rootbased
@@ -416,58 +516,11 @@ lpeg.patterns.rootbased = rootbased
-- ./name ../name /name c: :// name/name
function file.is_qualified_path(filename)
- return lpegmatch(qualified,filename) ~= nil
+ return filename and lpegmatch(qualified,filename) ~= nil
end
function file.is_rootbased_path(filename)
- return lpegmatch(rootbased,filename) ~= nil
-end
-
--- actually these are schemes
-
-local slash = S("\\/")
-local period = P(".")
-local drive = C(R("az","AZ")) * P(":")
-local path = C(((1-slash)^0 * slash)^0)
-local suffix = period * C(P(1-period)^0 * P(-1))
-local base = C((1-suffix)^0)
-
-drive = drive + Cc("")
-path = path + Cc("")
-base = base + Cc("")
-suffix = suffix + Cc("")
-
-local pattern_a = drive * path * base * suffix
-local pattern_b = path * base * suffix
-local pattern_c = C(drive * path) * C(base * suffix)
-
-function file.splitname(str,splitdrive)
- if splitdrive then
- return lpegmatch(pattern_a,str) -- returns drive, path, base, suffix
- else
- return lpegmatch(pattern_b,str) -- returns path, base, suffix
- end
-end
-
-function file.nametotable(str,splitdrive) -- returns table
- local path, drive, subpath, name, base, suffix = lpegmatch(pattern_c,str)
- if splitdrive then
- return {
- path = path,
- drive = drive,
- subpath = subpath,
- name = name,
- base = base,
- suffix = suffix,
- }
- else
- return {
- path = path,
- name = name,
- base = base,
- suffix = suffix,
- }
- end
+ return filename and lpegmatch(rootbased,filename) ~= nil
end
-- function test(t) for k, v in next, t do print(v, "=>", file.splitname(v)) end end
@@ -477,18 +530,35 @@ end
-- test { "/aa", "/aa/bb", "/aa/bb/cc", "/aa/bb/cc.dd", "/aa/bb/cc.dd.ee" }
-- test { "aa", "aa/bb", "aa/bb/cc", "aa/bb/cc.dd", "aa/bb/cc.dd.ee" }
---~ -- todo:
---~
---~ if os.type == "windows" then
---~ local currentdir = getcurrentdir
---~ function getcurrentdir()
---~ return (gsub(currentdir(),"\\","/"))
---~ end
---~ end
+-- -- maybe:
+--
+-- if os.type == "windows" then
+-- local currentdir = getcurrentdir
+-- function getcurrentdir()
+-- return lpegmatch(reslasher,currentdir())
+-- end
+-- end
-- for myself:
function file.strip(name,dir)
- local b, a = match(name,"^(.-)" .. dir .. "(.*)$")
- return a ~= "" and a or name
+ if name then
+ local b, a = match(name,"^(.-)" .. dir .. "(.*)$")
+ return a ~= "" and a or name
+ end
end
+
+-- local debuglist = {
+-- "pathpart", "basename", "nameonly", "suffixonly", "suffix", "dirname", "extname",
+-- "addsuffix", "removesuffix", "replacesuffix", "join",
+-- "strip","collapsepath", "joinpath", "splitpath",
+-- }
+
+-- for i=1,#debuglist do
+-- local name = debuglist[i]
+-- local f = file[name]
+-- file[name] = function(...)
+-- print(name,f(...))
+-- return f(...)
+-- end
+-- end
diff --git a/Master/texmf-dist/tex/context/base/l-function.lua b/Master/texmf-dist/tex/context/base/l-function.lua
new file mode 100644
index 00000000000..7ded8ceecd5
--- /dev/null
+++ b/Master/texmf-dist/tex/context/base/l-function.lua
@@ -0,0 +1,11 @@
+if not modules then modules = { } end modules ['l-functions'] = {
+ version = 1.001,
+ comment = "companion to luat-lib.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+functions = functions or { }
+
+function functions.dummy() end
diff --git a/Master/texmf-dist/tex/context/base/l-io.lua b/Master/texmf-dist/tex/context/base/l-io.lua
index 4f27dc1dcd1..06e1fb5efc8 100644
--- a/Master/texmf-dist/tex/context/base/l-io.lua
+++ b/Master/texmf-dist/tex/context/base/l-io.lua
@@ -9,6 +9,7 @@ if not modules then modules = { } end modules ['l-io'] = {
local io = io
local byte, find, gsub, format = string.byte, string.find, string.gsub, string.format
local concat = table.concat
+local floor = math.floor
local type = type
if string.find(os.getenv("PATH"),";") then
@@ -17,14 +18,53 @@ else
io.fileseparator, io.pathseparator = "/" , ":"
end
-function io.loaddata(filename,textmode)
+local function readall(f)
+ return f:read("*all")
+end
+
+-- The next one is upto 50% faster on large files and less memory consumption due
+-- to less intermediate large allocations. This phenomena was discussed on the
+-- luatex dev list.
+
+local function readall(f)
+ local size = f:seek("end")
+ if size == 0 then
+ return ""
+ elseif size < 1024*1024 then
+ f:seek("set",0)
+ return f:read('*all')
+ else
+ local done = f:seek("set",0)
+ if size < 1024*1024 then
+ step = 1024 * 1024
+ elseif size > 16*1024*1024 then
+ step = 16*1024*1024
+ else
+ step = floor(size/(1024*1024)) * 1024 * 1024 / 8
+ end
+ local data = { }
+ while true do
+ local r = f:read(step)
+ if not r then
+ return concat(data)
+ else
+ data[#data+1] = r
+ end
+ end
+ end
+end
+
+io.readall = readall
+
+function io.loaddata(filename,textmode) -- return nil if empty
local f = io.open(filename,(textmode and 'r') or 'rb')
if f then
- local data = f:read('*all')
+-- local data = f:read('*all')
+ local data = readall(f)
f:close()
- return data
- else
- return nil
+ if #data > 0 then
+ return data
+ end
end
end
@@ -46,12 +86,53 @@ function io.savedata(filename,data,joiner)
end
end
+-- we can also chunk this one if needed: io.lines(filename,chunksize,"*l")
+
+function io.loadlines(filename,n) -- return nil if empty
+ local f = io.open(filename,'r')
+ if not f then
+ -- no file
+ elseif n then
+ local lines = { }
+ for i=1,n do
+ local line = f:read("*lines")
+ if line then
+ lines[#lines+1] = line
+ else
+ break
+ end
+ end
+ f:close()
+ lines = concat(lines,"\n")
+ if #lines > 0 then
+ return lines
+ end
+ else
+ local line = f:read("*line") or ""
+ f:close()
+ if #line > 0 then
+ return line
+ end
+ end
+end
+
+function io.loadchunk(filename,n)
+ local f = io.open(filename,'rb')
+ if f then
+ local data = f:read(n or 1024)
+ f:close()
+ if #data > 0 then
+ return data
+ end
+ end
+end
+
function io.exists(filename)
local f = io.open(filename)
if f == nil then
return false
else
- assert(f:close())
+ f:close()
return true
end
end
@@ -62,7 +143,7 @@ function io.size(filename)
return 0
else
local s = f:seek("end")
- assert(f:close())
+ f:close()
return s
end
end
@@ -70,9 +151,13 @@ end
function io.noflines(f)
if type(f) == "string" then
local f = io.open(filename)
- local n = f and io.noflines(f) or 0
- assert(f:close())
- return n
+ if f then
+ local n = f and io.noflines(f) or 0
+ f:close()
+ return n
+ else
+ return 0
+ end
else
local n = 0
for _ in f:lines() do
@@ -249,7 +334,7 @@ function io.readstring(f,n,m)
f:seek("set",n)
n = m
end
- local str = gsub(f:read(n),"%z","")
+ local str = gsub(f:read(n),"\000","")
return str
end
@@ -257,3 +342,21 @@ end
if not io.i_limiter then function io.i_limiter() end end -- dummy so we can test safely
if not io.o_limiter then function io.o_limiter() end end -- dummy so we can test safely
+
+-- This works quite ok:
+--
+-- function io.piped(command,writer)
+-- local pipe = io.popen(command)
+-- -- for line in pipe:lines() do
+-- -- print(line)
+-- -- end
+-- while true do
+-- local line = pipe:read(1)
+-- if not line then
+-- break
+-- elseif line ~= "\n" then
+-- writer(line)
+-- end
+-- end
+-- return pipe:close() -- ok, status, (error)code
+-- end
diff --git a/Master/texmf-dist/tex/context/base/l-lpeg.lua b/Master/texmf-dist/tex/context/base/l-lpeg.lua
index 13294ab0d63..323c73b6967 100644
--- a/Master/texmf-dist/tex/context/base/l-lpeg.lua
+++ b/Master/texmf-dist/tex/context/base/l-lpeg.lua
@@ -6,14 +6,16 @@ if not modules then modules = { } end modules ['l-lpeg'] = {
license = "see context related readme files"
}
-
-- a new lpeg fails on a #(1-P(":")) test and really needs a + P(-1)
-local lpeg = require("lpeg")
+-- move utf -> l-unicode
+-- move string -> l-string or keep it here
+
+lpeg = require("lpeg")
-- tracing (only used when we encounter a problem in integration of lpeg in luatex)
-local report = texio and texio.write_nl or print
+-- some code will move to unicode and string
-- local lpmatch = lpeg.match
-- local lpprint = lpeg.print
@@ -32,25 +34,34 @@ local report = texio and texio.write_nl or print
-- local lpcmt = lpeg.Cmt
-- local lpcarg = lpeg.Carg
--- function lpeg.match(l,...) report("LPEG MATCH") lpprint(l) return lpmatch(l,...) end
-
--- function lpeg.P (l) local p = lpp (l) report("LPEG P =") lpprint(l) return p end
--- function lpeg.R (l) local p = lpr (l) report("LPEG R =") lpprint(l) return p end
--- function lpeg.S (l) local p = lps (l) report("LPEG S =") lpprint(l) return p end
--- function lpeg.C (l) local p = lpc (l) report("LPEG C =") lpprint(l) return p end
--- function lpeg.B (l) local p = lpb (l) report("LPEG B =") lpprint(l) return p end
--- function lpeg.V (l) local p = lpv (l) report("LPEG V =") lpprint(l) return p end
--- function lpeg.Cf (l) local p = lpcf (l) report("LPEG Cf =") lpprint(l) return p end
--- function lpeg.Cb (l) local p = lpcb (l) report("LPEG Cb =") lpprint(l) return p end
--- function lpeg.Cg (l) local p = lpcg (l) report("LPEG Cg =") lpprint(l) return p end
--- function lpeg.Ct (l) local p = lpct (l) report("LPEG Ct =") lpprint(l) return p end
--- function lpeg.Cs (l) local p = lpcs (l) report("LPEG Cs =") lpprint(l) return p end
--- function lpeg.Cc (l) local p = lpcc (l) report("LPEG Cc =") lpprint(l) return p end
--- function lpeg.Cmt (l) local p = lpcmt (l) report("LPEG Cmt =") lpprint(l) return p end
--- function lpeg.Carg (l) local p = lpcarg(l) report("LPEG Carg =") lpprint(l) return p end
-
-local type = type
-local byte, char, gmatch = string.byte, string.char, string.gmatch
+-- function lpeg.match(l,...) print("LPEG MATCH") lpprint(l) return lpmatch(l,...) end
+
+-- function lpeg.P (l) local p = lpp (l) print("LPEG P =") lpprint(l) return p end
+-- function lpeg.R (l) local p = lpr (l) print("LPEG R =") lpprint(l) return p end
+-- function lpeg.S (l) local p = lps (l) print("LPEG S =") lpprint(l) return p end
+-- function lpeg.C (l) local p = lpc (l) print("LPEG C =") lpprint(l) return p end
+-- function lpeg.B (l) local p = lpb (l) print("LPEG B =") lpprint(l) return p end
+-- function lpeg.V (l) local p = lpv (l) print("LPEG V =") lpprint(l) return p end
+-- function lpeg.Cf (l) local p = lpcf (l) print("LPEG Cf =") lpprint(l) return p end
+-- function lpeg.Cb (l) local p = lpcb (l) print("LPEG Cb =") lpprint(l) return p end
+-- function lpeg.Cg (l) local p = lpcg (l) print("LPEG Cg =") lpprint(l) return p end
+-- function lpeg.Ct (l) local p = lpct (l) print("LPEG Ct =") lpprint(l) return p end
+-- function lpeg.Cs (l) local p = lpcs (l) print("LPEG Cs =") lpprint(l) return p end
+-- function lpeg.Cc (l) local p = lpcc (l) print("LPEG Cc =") lpprint(l) return p end
+-- function lpeg.Cmt (l) local p = lpcmt (l) print("LPEG Cmt =") lpprint(l) return p end
+-- function lpeg.Carg (l) local p = lpcarg(l) print("LPEG Carg =") lpprint(l) return p end
+
+local type, next, tostring = type, next, tostring
+local byte, char, gmatch, format = string.byte, string.char, string.gmatch, string.format
+----- mod, div = math.mod, math.div
+local floor = math.floor
+
+local P, R, S, V, Ct, C, Cs, Cc, Cp, Cmt = lpeg.P, lpeg.R, lpeg.S, lpeg.V, lpeg.Ct, lpeg.C, lpeg.Cs, lpeg.Cc, lpeg.Cp, lpeg.Cmt
+local lpegtype, lpegmatch, lpegprint = lpeg.type, lpeg.match, lpeg.print
+
+-- let's start with an inspector:
+
+setinspector(function(v) if lpegtype(v) then lpegprint(v) return true end end)
-- Beware, we predefine a bunch of patterns here and one reason for doing so
-- is that we get consistent behaviour in some of the visualizers.
@@ -58,12 +69,6 @@ local byte, char, gmatch = string.byte, string.char, string.gmatch
lpeg.patterns = lpeg.patterns or { } -- so that we can share
local patterns = lpeg.patterns
-local P, R, S, V, match = lpeg.P, lpeg.R, lpeg.S, lpeg.V, lpeg.match
-local Ct, C, Cs, Cc = lpeg.Ct, lpeg.C, lpeg.Cs, lpeg.Cc
-local lpegtype = lpeg.type
-
-local utfcharacters = string.utfcharacters
-local utfgmatch = unicode and unicode.utf8.gmatch
local anything = P(1)
local endofstring = P(-1)
@@ -90,9 +95,12 @@ local utfbom_8 = P('\239\187\191')
local utfbom = utfbom_32_be + utfbom_32_le
+ utfbom_16_be + utfbom_16_le
+ utfbom_8
-local utftype = utfbom_32_be / "utf-32-be" + utfbom_32_le / "utf-32-le"
- + utfbom_16_be / "utf-16-be" + utfbom_16_le / "utf-16-le"
- + utfbom_8 / "utf-8" + alwaysmatched / "unknown"
+local utftype = utfbom_32_be * Cc("utf-32-be") + utfbom_32_le * Cc("utf-32-le")
+ + utfbom_16_be * Cc("utf-16-be") + utfbom_16_le * Cc("utf-16-le")
+ + utfbom_8 * Cc("utf-8") + alwaysmatched * Cc("utf-8") -- assume utf8
+local utfoffset = utfbom_32_be * Cc(4) + utfbom_32_le * Cc(4)
+ + utfbom_16_be * Cc(2) + utfbom_16_le * Cc(2)
+ + utfbom_8 * Cc(3) + Cc(0)
local utf8next = R("\128\191")
@@ -102,21 +110,47 @@ patterns.utf8three = R("\224\239") * utf8next * utf8next
patterns.utf8four = R("\240\244") * utf8next * utf8next * utf8next
patterns.utfbom = utfbom
patterns.utftype = utftype
+patterns.utfoffset = utfoffset
local utf8char = patterns.utf8one + patterns.utf8two + patterns.utf8three + patterns.utf8four
local validutf8char = utf8char^0 * endofstring * Cc(true) + Cc(false)
+local utf8character = P(1) * R("\128\191")^0 -- unchecked but fast
+
patterns.utf8 = utf8char
patterns.utf8char = utf8char
+patterns.utf8character = utf8character -- this one can be used in most cases so we might use that one
patterns.validutf8 = validutf8char
patterns.validutf8char = validutf8char
+local eol = S("\n\r")
+local spacer = S(" \t\f\v") -- + char(0xc2, 0xa0) if we want utf (cf mail roberto)
+local whitespace = eol + spacer
+local nonspacer = 1 - spacer
+local nonwhitespace = 1 - whitespace
+
+patterns.eol = eol
+patterns.spacer = spacer
+patterns.whitespace = whitespace
+patterns.nonspacer = nonspacer
+patterns.nonwhitespace = nonwhitespace
+
+local stripper = spacer^0 * C((spacer^0 * nonspacer^1)^0) -- from example by roberto
+
+----- collapser = Cs(spacer^0/"" * ((spacer^1 * P(-1) / "") + (spacer^1/" ") + P(1))^0)
+local collapser = Cs(spacer^0/"" * nonspacer^0 * ((spacer^0/" " * nonspacer^1)^0))
+
+patterns.stripper = stripper
+patterns.collapser = collapser
+
patterns.digit = digit
patterns.sign = sign
patterns.cardinal = sign^0 * digit^1
patterns.integer = sign^0 * digit^1
-patterns.float = sign^0 * digit^0 * P('.') * digit^1
-patterns.cfloat = sign^0 * digit^0 * P(',') * digit^1
+patterns.unsigned = digit^0 * P('.') * digit^1
+patterns.float = sign^0 * patterns.unsigned
+patterns.cunsigned = digit^0 * P(',') * digit^1
+patterns.cfloat = sign^0 * patterns.cunsigned
patterns.number = patterns.float + patterns.integer
patterns.cnumber = patterns.cfloat + patterns.integer
patterns.oct = P("0") * R("07")^1
@@ -130,16 +164,11 @@ patterns.letter = patterns.lowercase + patterns.uppercase
patterns.space = space
patterns.tab = P("\t")
patterns.spaceortab = patterns.space + patterns.tab
-patterns.eol = S("\n\r")
-patterns.spacer = S(" \t\f\v") -- + char(0xc2, 0xa0) if we want utf (cf mail roberto)
patterns.newline = newline
patterns.emptyline = newline^1
-patterns.nonspacer = 1 - patterns.spacer
-patterns.whitespace = patterns.eol + patterns.spacer
-patterns.nonwhitespace = 1 - patterns.whitespace
patterns.equal = P("=")
patterns.comma = P(",")
-patterns.commaspacer = P(",") * patterns.spacer^0
+patterns.commaspacer = P(",") * spacer^0
patterns.period = P(".")
patterns.colon = P(":")
patterns.semicolon = P(";")
@@ -149,23 +178,33 @@ patterns.squote = squote
patterns.dquote = dquote
patterns.nosquote = (escaped + (1-squote))^0
patterns.nodquote = (escaped + (1-dquote))^0
-patterns.unsingle = (squote/"") * patterns.nosquote * (squote/"")
-patterns.undouble = (dquote/"") * patterns.nodquote * (dquote/"")
+patterns.unsingle = (squote/"") * patterns.nosquote * (squote/"") -- will change to C in the middle
+patterns.undouble = (dquote/"") * patterns.nodquote * (dquote/"") -- will change to C in the middle
patterns.unquoted = patterns.undouble + patterns.unsingle -- more often undouble
patterns.unspacer = ((patterns.spacer^1)/"")^0
+patterns.singlequoted = squote * patterns.nosquote * squote
+patterns.doublequoted = dquote * patterns.nodquote * dquote
+patterns.quoted = patterns.doublequoted + patterns.singlequoted
+
+patterns.propername = R("AZ","az","__") * R("09","AZ","az", "__")^0 * P(-1)
+
patterns.somecontent = (anything - newline - space)^1 -- (utf8char - newline - space)^1
patterns.beginline = #(1-newline)
--- print(string.unquoted("test"))
--- print(string.unquoted([["t\"est"]]))
--- print(string.unquoted([["t\"est"x]]))
--- print(string.unquoted("\'test\'"))
--- print(string.unquoted('"test"'))
--- print(string.unquoted('"test"'))
+patterns.longtostring = Cs(whitespace^0/"" * nonwhitespace^0 * ((whitespace^0/" " * (patterns.quoted + nonwhitespace)^1)^0))
-function lpeg.anywhere(pattern) --slightly adapted from website
- return P { P(pattern) + 1 * V(1) } -- why so complex?
+local function anywhere(pattern) --slightly adapted from website
+ return P { P(pattern) + 1 * V(1) }
+end
+
+lpeg.anywhere = anywhere
+
+function lpeg.instringchecker(p)
+ p = anywhere(p)
+ return function(str)
+ return lpegmatch(p,str) and true or false
+ end
end
function lpeg.splitter(pattern, action)
@@ -214,13 +253,13 @@ function string.splitup(str,separator)
if not separator then
separator = ","
end
- return match(splitters_m[separator] or splitat(separator),str)
+ return lpegmatch(splitters_m[separator] or splitat(separator),str)
end
---~ local p = splitat("->",false) print(match(p,"oeps->what->more")) -- oeps what more
---~ local p = splitat("->",true) print(match(p,"oeps->what->more")) -- oeps what->more
---~ local p = splitat("->",false) print(match(p,"oeps")) -- oeps
---~ local p = splitat("->",true) print(match(p,"oeps")) -- oeps
+-- local p = splitat("->",false) print(lpegmatch(p,"oeps->what->more")) -- oeps what more
+-- local p = splitat("->",true) print(lpegmatch(p,"oeps->what->more")) -- oeps what->more
+-- local p = splitat("->",false) print(lpegmatch(p,"oeps")) -- oeps
+-- local p = splitat("->",true) print(lpegmatch(p,"oeps")) -- oeps
local cache = { }
@@ -230,16 +269,20 @@ function lpeg.split(separator,str)
c = tsplitat(separator)
cache[separator] = c
end
- return match(c,str)
+ return lpegmatch(c,str)
end
function string.split(str,separator)
- local c = cache[separator]
- if not c then
- c = tsplitat(separator)
- cache[separator] = c
+ if separator then
+ local c = cache[separator]
+ if not c then
+ c = tsplitat(separator)
+ cache[separator] = c
+ end
+ return lpegmatch(c,str)
+ else
+ return { str }
end
- return match(c,str)
end
local spacing = patterns.spacer^0 * newline -- sort of strip
@@ -249,29 +292,15 @@ local content = (empty + nonempty)^1
patterns.textline = content
---~ local linesplitter = Ct(content^0)
---~
---~ function string.splitlines(str)
---~ return match(linesplitter,str)
---~ end
-
local linesplitter = tsplitat(newline)
patterns.linesplitter = linesplitter
function string.splitlines(str)
- return match(linesplitter,str)
+ return lpegmatch(linesplitter,str)
end
-local utflinesplitter = utfbom^-1 * tsplitat(newline)
-
-patterns.utflinesplitter = utflinesplitter
-
-function string.utfsplitlines(str)
- return match(utflinesplitter,str or "")
-end
-
---~ lpeg.splitters = cache -- no longer public
+-- lpeg.splitters = cache -- no longer public
local cache = { }
@@ -283,7 +312,7 @@ function lpeg.checkedsplit(separator,str)
c = Ct(separator^0 * other * (separator^1 * other)^0)
cache[separator] = c
end
- return match(c,str)
+ return lpegmatch(c,str)
end
function string.checkedsplit(str,separator)
@@ -294,10 +323,10 @@ function string.checkedsplit(str,separator)
c = Ct(separator^0 * other * (separator^1 * other)^0)
cache[separator] = c
end
- return match(c,str)
+ return lpegmatch(c,str)
end
---~ from roberto's site:
+-- from roberto's site:
local function f2(s) local c1, c2 = byte(s,1,2) return c1 * 64 + c2 - 12416 end
local function f3(s) local c1, c2, c3 = byte(s,1,3) return (c1 * 64 + c2) * 64 + c3 - 925824 end
@@ -309,10 +338,10 @@ patterns.utf8byte = utf8byte
--~ local str = " a b c d "
---~ local s = lpeg.stripper(lpeg.R("az")) print("["..lpeg.match(s,str).."]")
---~ local s = lpeg.keeper(lpeg.R("az")) print("["..lpeg.match(s,str).."]")
---~ local s = lpeg.stripper("ab") print("["..lpeg.match(s,str).."]")
---~ local s = lpeg.keeper("ab") print("["..lpeg.match(s,str).."]")
+--~ local s = lpeg.stripper(lpeg.R("az")) print("["..lpegmatch(s,str).."]")
+--~ local s = lpeg.keeper(lpeg.R("az")) print("["..lpegmatch(s,str).."]")
+--~ local s = lpeg.stripper("ab") print("["..lpegmatch(s,str).."]")
+--~ local s = lpeg.keeper("ab") print("["..lpegmatch(s,str).."]")
local cache = { }
@@ -345,38 +374,83 @@ function lpeg.keeper(str)
end
function lpeg.frontstripper(str) -- or pattern (yet undocumented)
- return (P(str) + P(true)) * Cs(P(1)^0)
+ return (P(str) + P(true)) * Cs(anything^0)
end
function lpeg.endstripper(str) -- or pattern (yet undocumented)
- return Cs((1 - P(str) * P(-1))^0)
+ return Cs((1 - P(str) * endofstring)^0)
end
-- Just for fun I looked at the used bytecode and
-- p = (p and p + pp) or pp gets one more (testset).
-function lpeg.replacer(one,two)
+-- todo: cache when string
+
+function lpeg.replacer(one,two,makefunction,isutf) -- in principle we should sort the keys
+ local pattern
+ local u = isutf and utf8char or 1
if type(one) == "table" then
local no = #one
- if no > 0 then
- local p
+ local p = P(false)
+ if no == 0 then
+ for k, v in next, one do
+ p = p + P(k) / v
+ end
+ pattern = Cs((p + u)^0)
+ elseif no == 1 then
+ local o = one[1]
+ one, two = P(o[1]), o[2]
+ -- pattern = Cs(((1-one)^1 + one/two)^0)
+ pattern = Cs((one/two + u)^0)
+ else
for i=1,no do
local o = one[i]
- local pp = P(o[1]) / o[2]
- if p then
- p = p + pp
- else
- p = pp
- end
+ p = p + P(o[1]) / o[2]
+ end
+ pattern = Cs((p + u)^0)
+ end
+ else
+ pattern = Cs((P(one)/(two or "") + u)^0)
+ end
+ if makefunction then
+ return function(str)
+ return lpegmatch(pattern,str)
+ end
+ else
+ return pattern
+ end
+end
+
+function lpeg.finder(lst,makefunction)
+ local pattern
+ if type(lst) == "table" then
+ pattern = P(false)
+ if #lst == 0 then
+ for k, v in next, lst do
+ pattern = pattern + P(k) -- ignore key, so we can use a replacer table
+ end
+ else
+ for i=1,#lst do
+ pattern = pattern + P(lst[i])
end
- return Cs((p + 1)^0)
end
else
- two = two or ""
- return Cs((P(one)/two + 1)^0)
+ pattern = P(lst)
+ end
+ pattern = (1-pattern)^0 * pattern
+ if makefunction then
+ return function(str)
+ return lpegmatch(pattern,str)
+ end
+ else
+ return pattern
end
end
+-- print(lpeg.match(lpeg.replacer("e","a"),"test test"))
+-- print(lpeg.match(lpeg.replacer{{"e","a"}},"test test"))
+-- print(lpeg.match(lpeg.replacer({ e = "a", t = "x" }),"test test"))
+
local splitters_f, splitters_s = { }, { }
function lpeg.firstofsplit(separator) -- always return value
@@ -404,102 +478,47 @@ function lpeg.balancer(left,right)
return P { left * ((1 - left - right) + V(1))^0 * right }
end
---~ print(1,match(lpeg.firstofsplit(":"),"bc:de"))
---~ print(2,match(lpeg.firstofsplit(":"),":de")) -- empty
---~ print(3,match(lpeg.firstofsplit(":"),"bc"))
---~ print(4,match(lpeg.secondofsplit(":"),"bc:de"))
---~ print(5,match(lpeg.secondofsplit(":"),"bc:")) -- empty
---~ print(6,match(lpeg.secondofsplit(":",""),"bc"))
---~ print(7,match(lpeg.secondofsplit(":"),"bc"))
---~ print(9,match(lpeg.secondofsplit(":","123"),"bc"))
-
---~ -- slower:
---~
---~ function lpeg.counter(pattern)
---~ local n, pattern = 0, (lpeg.P(pattern)/function() n = n + 1 end + lpeg.anything)^0
---~ return function(str) n = 0 ; lpegmatch(pattern,str) ; return n end
---~ end
+-- print(1,lpegmatch(lpeg.firstofsplit(":"),"bc:de"))
+-- print(2,lpegmatch(lpeg.firstofsplit(":"),":de")) -- empty
+-- print(3,lpegmatch(lpeg.firstofsplit(":"),"bc"))
+-- print(4,lpegmatch(lpeg.secondofsplit(":"),"bc:de"))
+-- print(5,lpegmatch(lpeg.secondofsplit(":"),"bc:")) -- empty
+-- print(6,lpegmatch(lpeg.secondofsplit(":",""),"bc"))
+-- print(7,lpegmatch(lpeg.secondofsplit(":"),"bc"))
+-- print(9,lpegmatch(lpeg.secondofsplit(":","123"),"bc"))
+
+-- -- slower:
+--
+-- function lpeg.counter(pattern)
+-- local n, pattern = 0, (lpeg.P(pattern)/function() n = n + 1 end + lpeg.anything)^0
+-- return function(str) n = 0 ; lpegmatch(pattern,str) ; return n end
+-- end
local nany = utf8char/""
function lpeg.counter(pattern)
pattern = Cs((P(pattern)/" " + nany)^0)
return function(str)
- return #match(pattern,str)
+ return #lpegmatch(pattern,str)
end
end
-if utfgmatch then
-
- function lpeg.count(str,what) -- replaces string.count
- if type(what) == "string" then
- local n = 0
- for _ in utfgmatch(str,what) do
- n = n + 1
- end
- return n
- else -- 4 times slower but still faster than / function
- return #match(Cs((P(what)/" " + nany)^0),str)
- end
- end
-
-else
-
- local cache = { }
-
- function lpeg.count(str,what) -- replaces string.count
- if type(what) == "string" then
- local p = cache[what]
- if not p then
- p = Cs((P(what)/" " + nany)^0)
- cache[p] = p
- end
- return #match(p,str)
- else -- 4 times slower but still faster than / function
- return #match(Cs((P(what)/" " + nany)^0),str)
- end
- end
-
-end
-
-local patterns_escapes = { -- also defines in l-string
- ["%"] = "%%",
- ["."] = "%.",
- ["+"] = "%+", ["-"] = "%-", ["*"] = "%*",
- ["["] = "%[", ["]"] = "%]",
- ["("] = "%)", [")"] = "%)",
- -- ["{"] = "%{", ["}"] = "%}"
- -- ["^"] = "%^", ["$"] = "%$",
-}
-
-local simple_escapes = { -- also defines in l-string
- ["-"] = "%-",
- ["."] = "%.",
- ["?"] = ".",
- ["*"] = ".*",
-}
-
-local p = Cs((S("-.+*%()[]") / patterns_escapes + anything)^0)
-local s = Cs((S("-.+*%()[]") / simple_escapes + anything)^0)
+-- utf extensies
-function string.escapedpattern(str,simple)
- return match(simple and s or p,str)
-end
+utf = utf or (unicode and unicode.utf8) or { }
--- utf extensies
+local utfcharacters = utf and utf.characters or string.utfcharacters
+local utfgmatch = utf and utf.gmatch
+local utfchar = utf and utf.char
lpeg.UP = lpeg.P
if utfcharacters then
function lpeg.US(str)
- local p
+ local p = P(false)
for uc in utfcharacters(str) do
- if p then
- p = p + P(uc)
- else
- p = P(uc)
- end
+ p = p + P(uc)
end
return p
end
@@ -508,13 +527,9 @@ if utfcharacters then
elseif utfgmatch then
function lpeg.US(str)
- local p
+ local p = P(false)
for uc in utfgmatch(str,".") do
- if p then
- p = p + P(uc)
- else
- p = P(uc)
- end
+ p = p + P(uc)
end
return p
end
@@ -522,23 +537,17 @@ elseif utfgmatch then
else
function lpeg.US(str)
- local p
+ local p = P(false)
local f = function(uc)
- if p then
- p = p + P(uc)
- else
- p = P(uc)
- end
+ p = p + P(uc)
end
- match((utf8char/f)^0,str)
+ lpegmatch((utf8char/f)^0,str)
return p
end
end
-local range = Cs(utf8byte) * (Cs(utf8byte) + Cc(false))
-
-local utfchar = unicode and unicode.utf8 and unicode.utf8.char
+local range = utf8byte * utf8byte + Cc(false) -- utf8byte is already a capture
function lpeg.UR(str,more)
local first, last
@@ -546,47 +555,50 @@ function lpeg.UR(str,more)
first = str
last = more or first
else
- first, last = match(range,str)
+ first, last = lpegmatch(range,str)
if not last then
return P(str)
end
end
if first == last then
return P(str)
- elseif utfchar and last - first < 8 then -- a somewhat arbitrary criterium
- local p
+ elseif utfchar and (last - first < 8) then -- a somewhat arbitrary criterium
+ local p = P(false)
for i=first,last do
- if p then
- p = p + P(utfchar(i))
- else
- p = P(utfchar(i))
- end
+ p = p + P(utfchar(i))
end
return p -- nil when invalid range
else
local f = function(b)
return b >= first and b <= last
end
+ -- tricky, these nested captures
return utf8byte / f -- nil when invalid range
end
end
---~ lpeg.print(lpeg.R("ab","cd","gh"))
---~ lpeg.print(lpeg.P("a","b","c"))
---~ lpeg.print(lpeg.S("a","b","c"))
+-- print(lpeg.match(lpeg.Cs((C(lpeg.UR("αω"))/{ ["χ"] = "OEPS" })^0),"αωχαω"))
+
+-- lpeg.print(lpeg.R("ab","cd","gh"))
+-- lpeg.print(lpeg.P("a","b","c"))
+-- lpeg.print(lpeg.S("a","b","c"))
+
+-- print(lpeg.count("äáàa",lpeg.P("á") + lpeg.P("à")))
+-- print(lpeg.count("äáàa",lpeg.UP("áà")))
+-- print(lpeg.count("äáàa",lpeg.US("àá")))
+-- print(lpeg.count("äáàa",lpeg.UR("aá")))
+-- print(lpeg.count("äáàa",lpeg.UR("àá")))
+-- print(lpeg.count("äáàa",lpeg.UR(0x0000,0xFFFF)))
---~ print(lpeg.count("äáàa",lpeg.P("á") + lpeg.P("à")))
---~ print(lpeg.count("äáàa",lpeg.UP("áà")))
---~ print(lpeg.count("äáàa",lpeg.US("àá")))
---~ print(lpeg.count("äáàa",lpeg.UR("aá")))
---~ print(lpeg.count("äáàa",lpeg.UR("àá")))
---~ print(lpeg.count("äáàa",lpeg.UR(0x0000,0xFFFF)))
+function lpeg.is_lpeg(p)
+ return p and lpegtype(p) == "pattern"
+end
-function lpeg.oneof(list,...) -- lpeg.oneof("elseif","else","if","then")
+function lpeg.oneof(list,...) -- lpeg.oneof("elseif","else","if","then") -- assume proper order
if type(list) ~= "table" then
list = { list, ... }
end
- -- sort(list) -- longest match first
+ -- table.sort(list) -- longest match first
local p = P(list[1])
for l=2,#list do
p = p + P(list[l])
@@ -594,20 +606,34 @@ function lpeg.oneof(list,...) -- lpeg.oneof("elseif","else","if","then")
return p
end
-function lpeg.is_lpeg(p)
- return p and lpegtype(p) == "pattern"
-end
-
-- For the moment here, but it might move to utilities. Beware, we need to
-- have the longest keyword first, so 'aaa' comes beforte 'aa' which is why we
-- loop back from the end cq. prepend.
-local sort, fastcopy, sortedkeys = table.sort, table.fastcopy, table.sortedkeys -- dependency!
+local sort = table.sort
+
+local function copyindexed(old)
+ local new = { }
+ for i=1,#old do
+ new[i] = old
+ end
+ return new
+end
+
+local function sortedkeys(tab)
+ local keys, s = { }, 0
+ for key,_ in next, tab do
+ s = s + 1
+ keys[s] = key
+ end
+ sort(keys)
+ return keys
+end
function lpeg.append(list,pp,delayed,checked)
local p = pp
if #list > 0 then
- local keys = fastcopy(list)
+ local keys = copyindexed(list)
sort(keys)
for i=#keys,1,-1 do
local k = keys[i]
@@ -704,8 +730,10 @@ end
local function make(t)
local p
--- for k, v in next, t do
- for k, v in table.sortedhash(t) do
+ local keys = sortedkeys(t)
+ for i=1,#keys do
+ local k = keys[i]
+ local v = t[k]
if not p then
if next(v) then
p = P(k) * make(v)
@@ -723,7 +751,7 @@ local function make(t)
return p
end
-function lpeg.utfchartabletopattern(list)
+function lpeg.utfchartabletopattern(list) -- goes to util-lpg
local tree = { }
for i=1,#list do
local t = tree
@@ -754,3 +782,71 @@ end
-- utfchar(0x202F), -- narrownobreakspace
-- utfchar(0x205F), -- math thinspace
-- } )
+
+-- a few handy ones:
+--
+-- faster than find(str,"[\n\r]") when match and # > 7 and always faster when # > 3
+
+patterns.containseol = lpeg.finder(eol) -- (1-eol)^0 * eol
+
+-- The next pattern^n variant is based on an approach suggested
+-- by Roberto: constructing a big repetition in chunks.
+--
+-- Being sparse is not needed, and only complicate matters and
+-- the number of redundant entries is not that large.
+
+local function nextstep(n,step,result)
+ local m = n % step -- mod(n,step)
+ local d = floor(n/step) -- div(n,step)
+ if d > 0 then
+ local v = V(tostring(step))
+ local s = result.start
+ for i=1,d do
+ if s then
+ s = v * s
+ else
+ s = v
+ end
+ end
+ result.start = s
+ end
+ if step > 1 and result.start then
+ local v = V(tostring(step/2))
+ result[tostring(step)] = v * v
+ end
+ if step > 0 then
+ return nextstep(m,step/2,result)
+ else
+ return result
+ end
+end
+
+function lpeg.times(pattern,n)
+ return P(nextstep(n,2^16,{ "start", ["1"] = pattern }))
+end
+
+-- local p = lpeg.Cs((1 - lpeg.times(lpeg.P("AB"),25))^1)
+-- local s = "12" .. string.rep("AB",20) .. "34" .. string.rep("AB",30) .. "56"
+-- inspect(p)
+-- print(lpeg.match(p,s))
+
+-- moved here (before util-str)
+
+local digit = R("09")
+local period = P(".")
+local zero = P("0")
+local trailingzeros = zero^0 * -digit -- suggested by Roberto R
+local case_1 = period * trailingzeros / ""
+local case_2 = period * (digit - trailingzeros)^1 * (trailingzeros / "")
+local number = digit^1 * (case_1 + case_2)
+local stripper = Cs((number + 1)^0)
+
+lpeg.patterns.stripzeros = stripper
+
+-- local sample = "bla 11.00 bla 11 bla 0.1100 bla 1.00100 bla 0.00 bla 0.001 bla 1.1100 bla 0.100100100 bla 0.00100100100"
+-- collectgarbage("collect")
+-- str = string.rep(sample,10000)
+-- local ts = os.clock()
+-- lpegmatch(stripper,str)
+-- print(#str, os.clock()-ts, lpegmatch(stripper,sample))
+
diff --git a/Master/texmf-dist/tex/context/base/l-lua.lua b/Master/texmf-dist/tex/context/base/l-lua.lua
new file mode 100644
index 00000000000..538c65d7ea1
--- /dev/null
+++ b/Master/texmf-dist/tex/context/base/l-lua.lua
@@ -0,0 +1,393 @@
+if not modules then modules = { } end modules ['l-lua'] = {
+ version = 1.001,
+ comment = "companion to luat-lib.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+-- compatibility hacks ... try to avoid usage
+
+local major, minor = string.match(_VERSION,"^[^%d]+(%d+)%.(%d+).*$")
+
+_MAJORVERSION = tonumber(major) or 5
+_MINORVERSION = tonumber(minor) or 1
+_LUAVERSION = _MAJORVERSION + _MINORVERSION/10
+
+-- lpeg
+
+if not lpeg then
+ lpeg = require("lpeg")
+end
+
+-- basics:
+
+if loadstring then
+
+ local loadnormal = load
+
+ function load(first,...)
+ if type(first) == "string" then
+ return loadstring(first,...)
+ else
+ return loadnormal(first,...)
+ end
+ end
+
+else
+
+ loadstring = load
+
+end
+
+-- table:
+
+-- At some point it was announced that i[pairs would be dropped, which makes
+-- sense. As we already used the for loop and # in most places the impact on
+-- ConTeXt was not that large; the remaining ipairs already have been replaced.
+-- Hm, actually ipairs was retained, but we no longer use it anyway (nor
+-- pairs).
+--
+-- Just in case, we provide the fallbacks as discussed in Programming
+-- in Lua (http://www.lua.org/pil/7.3.html):
+
+if not ipairs then
+
+ -- for k, v in ipairs(t) do ... end
+ -- for k=1,#t do local v = t[k] ... end
+
+ local function iterate(a,i)
+ i = i + 1
+ local v = a[i]
+ if v ~= nil then
+ return i, v --, nil
+ end
+ end
+
+ function ipairs(a)
+ return iterate, a, 0
+ end
+
+end
+
+if not pairs then
+
+ -- for k, v in pairs(t) do ... end
+ -- for k, v in next, t do ... end
+
+ function pairs(t)
+ return next, t -- , nil
+ end
+
+end
+
+-- The unpack function has been moved to the table table, and for compatiility
+-- reasons we provide both now.
+
+if not table.unpack then
+
+ table.unpack = _G.unpack
+
+elseif not unpack then
+
+ _G.unpack = table.unpack
+
+end
+
+-- package:
+
+-- if not package.seachers then
+--
+-- package.searchers = package.loaders -- 5.2
+--
+-- elseif not package.loaders then
+--
+-- package.loaders = package.searchers
+--
+-- end
+
+if not package.loaders then -- brr, searchers is a special "loadlib function" userdata type
+
+ package.loaders = package.searchers
+
+end
+
+-- moved from util-deb to here:
+
+local print, select, tostring = print, select, tostring
+
+local inspectors = { }
+
+function setinspector(inspector) -- global function
+ inspectors[#inspectors+1] = inspector
+end
+
+function inspect(...) -- global function
+ for s=1,select("#",...) do
+ local value = select(s,...)
+ local done = false
+ for i=1,#inspectors do
+ done = inspectors[i](value)
+ if done then
+ break
+ end
+ end
+ if not done then
+ print(tostring(value))
+ end
+ end
+end
+
+--
+
+local dummy = function() end
+
+function optionalrequire(...)
+ local ok, result = xpcall(require,dummy,...)
+ if ok then
+ return result
+ end
+end
+
+-- Code moved from data-lua and changed into a plug-in.
+
+-- We overload the regular loader. We do so because we operate mostly in
+-- tds and use our own loader code. Alternatively we could use a more
+-- extensive definition of package.path and package.cpath but even then
+-- we're not done. Also, we now have better tracing.
+--
+-- -- local mylib = require("libtest")
+-- -- local mysql = require("luasql.mysql")
+
+local type = type
+local gsub, format = string.gsub, string.format
+
+local package = package
+local searchers = package.searchers or package.loaders
+
+local libpaths = nil
+local clibpaths = nil
+local libhash = { }
+local clibhash = { }
+local libextras = { }
+local clibextras = { }
+
+-- dummies
+
+local filejoin = file and file.join or function(path,name) return path .. "/" .. name end
+local isreadable = file and file.is_readable or function(name) local f = io.open(name) if f then f:close() return true end end
+local addsuffix = file and file.addsuffix or function(name,suffix) return name .. "." .. suffix end
+
+--
+
+local function cleanpath(path) -- hm, don't we have a helper for this?
+ return path
+end
+
+local helpers = package.helpers or {
+ libpaths = function() return { } end,
+ clibpaths = function() return { } end,
+ cleanpath = cleanpath,
+ trace = false,
+ report = function(...) print(format(...)) end,
+}
+package.helpers = helpers
+
+local function getlibpaths()
+ return libpaths or helpers.libpaths(libhash)
+end
+
+local function getclibpaths()
+ return clibpaths or helpers.clibpaths(clibhash)
+end
+
+package.libpaths = getlibpaths
+package.clibpaths = getclibpaths
+
+local function addpath(what,paths,extras,hash,...)
+ local pathlist = { ... }
+ local cleanpath = helpers.cleanpath
+ local trace = helpers.trace
+ local report = helpers.report
+ --
+ local function add(path)
+ local path = cleanpath(path)
+ if not hash[path] then
+ if trace then
+ report("extra %s path: %s",what,path)
+ end
+ paths [#paths +1] = path
+ extras[#extras+1] = path
+ end
+ end
+ --
+ for p=1,#pathlist do
+ local path = pathlist[p]
+ if type(path) == "table" then
+ for i=1,#path do
+ add(path[i])
+ end
+ else
+ add(path)
+ end
+ end
+ return paths, extras
+end
+
+function package.extralibpath(...)
+ libpaths, libextras = addpath("lua", getlibpaths(), libextras, libhash,...)
+end
+
+function package.extraclibpath(...)
+ clibpaths, clibextras = addpath("lib",getclibpaths(),clibextras,clibhash,...)
+end
+
+-- function package.extralibpath(...)
+-- libpaths = getlibpaths()
+-- local pathlist = { ... }
+-- local cleanpath = helpers.cleanpath
+-- local trace = helpers.trace
+-- local report = helpers.report
+-- --
+-- local function add(path)
+-- local path = cleanpath(path)
+-- if not libhash[path] then
+-- if trace then
+-- report("extra lua path: %s",path)
+-- end
+-- libextras[#libextras+1] = path
+-- libpaths [#libpaths +1] = path
+-- end
+-- end
+-- --
+-- for p=1,#pathlist do
+-- local path = pathlist[p]
+-- if type(path) == "table" then
+-- for i=1,#path do
+-- add(path[i])
+-- end
+-- else
+-- add(path)
+-- end
+-- end
+-- end
+
+-- function package.extraclibpath(...)
+-- clibpaths = getclibpaths()
+-- local pathlist = { ... }
+-- local cleanpath = helpers.cleanpath
+-- local trace = helpers.trace
+-- local report = helpers.report
+-- --
+-- local function add(path)
+-- local path = cleanpath(path)
+-- if not clibhash[path] then
+-- if trace then
+-- report("extra lib path: %s",path)
+-- end
+-- clibextras[#clibextras+1] = path
+-- clibpaths [#clibpaths +1] = path
+-- end
+-- end
+-- --
+-- for p=1,#pathlist do
+-- local path = pathlist[p]
+-- if type(path) == "table" then
+-- for i=1,#path do
+-- add(path[i])
+-- end
+-- else
+-- add(path)
+-- end
+-- end
+-- end
+
+if not searchers[-2] then
+ -- use package-path and package-cpath
+ searchers[-2] = searchers[2]
+end
+
+searchers[2] = function(name)
+ return helpers.loaded(name)
+end
+
+searchers[3] = nil -- get rid of the built in one
+
+local function loadedaslib(resolved,rawname)
+ -- local init = "luaopen_" .. string.match(rawname,".-([^%.]+)$")
+ local init = "luaopen_"..gsub(rawname,"%.","_")
+ if helpers.trace then
+ helpers.report("calling loadlib with '%s' with init '%s'",resolved,init)
+ end
+ return package.loadlib(resolved,init)
+end
+
+local function loadedbylua(name)
+ if helpers.trace then
+ helpers.report("locating '%s' using normal loader",name)
+ end
+ return true, searchers[-2](name) -- the original
+end
+
+local function loadedbypath(name,rawname,paths,islib,what)
+ local trace = helpers.trace
+ local report = helpers.report
+ if trace then
+ report("locating '%s' as '%s' on '%s' paths",rawname,name,what)
+ end
+ for p=1,#paths do
+ local path = paths[p]
+ local resolved = filejoin(path,name)
+ if trace then -- mode detail
+ report("checking for '%s' using '%s' path '%s'",name,what,path)
+ end
+ if isreadable(resolved) then
+ if trace then
+ report("lib '%s' located on '%s'",name,resolved)
+ end
+ if islib then
+ return true, loadedaslib(resolved,rawname)
+ else
+ return true, loadfile(resolved)
+ end
+ end
+ end
+end
+
+local function notloaded(name)
+ if helpers.trace then
+ helpers.report("? unable to locate library '%s'",name)
+ end
+end
+
+helpers.loadedaslib = loadedaslib
+helpers.loadedbylua = loadedbylua
+helpers.loadedbypath = loadedbypath
+helpers.notloaded = notloaded
+
+-- alternatively we could set the package.searchers
+
+function helpers.loaded(name)
+ local thename = gsub(name,"%.","/")
+ local luaname = addsuffix(thename,"lua")
+ local libname = addsuffix(thename,os.libsuffix or "so") -- brrr
+ local libpaths = getlibpaths()
+ local clibpaths = getclibpaths()
+ local done, result = loadedbypath(luaname,name,libpaths,false,"lua")
+ if done then
+ return result
+ end
+ local done, result = loadedbypath(luaname,name,clibpaths,false,"lua")
+ if done then
+ return result
+ end
+ local done, result = loadedbypath(libname,name,clibpaths,true,"lib")
+ if done then
+ return result
+ end
+ local done, result = loadedbylua(name)
+ if done then
+ return result
+ end
+ return notloaded(name)
+end
diff --git a/Master/texmf-dist/tex/context/base/l-md5.lua b/Master/texmf-dist/tex/context/base/l-md5.lua
index 1d471c966ca..8ac20a5a5fb 100644
--- a/Master/texmf-dist/tex/context/base/l-md5.lua
+++ b/Master/texmf-dist/tex/context/base/l-md5.lua
@@ -7,39 +7,85 @@ if not modules then modules = { } end modules ['l-md5'] = {
-- This also provides file checksums and checkers.
+if not md5 then
+ md5 = optionalrequire("md5")
+end
+
+if not md5 then
+ md5 = {
+ sum = function(str) print("error: md5 is not loaded (sum ignored)") return str end,
+ sumhexa = function(str) print("error: md5 is not loaded (sumhexa ignored)") return str end,
+ }
+end
+
local md5, file = md5, file
local gsub, format, byte = string.gsub, string.format, string.byte
+local md5sum = md5.sum
local function convert(str,fmt)
- return (gsub(md5.sum(str),".",function(chr) return format(fmt,byte(chr)) end))
+ return (gsub(md5sum(str),".",function(chr) return format(fmt,byte(chr)) end))
end
if not md5.HEX then function md5.HEX(str) return convert(str,"%02X") end end
if not md5.hex then function md5.hex(str) return convert(str,"%02x") end end
if not md5.dec then function md5.dec(str) return convert(str,"%03i") end end
---~ if not md5.HEX then
---~ local function remap(chr) return format("%02X",byte(chr)) end
---~ function md5.HEX(str) return (gsub(md5.sum(str),".",remap)) end
---~ end
---~ if not md5.hex then
---~ local function remap(chr) return format("%02x",byte(chr)) end
---~ function md5.hex(str) return (gsub(md5.sum(str),".",remap)) end
---~ end
---~ if not md5.dec then
---~ local function remap(chr) return format("%03i",byte(chr)) end
---~ function md5.dec(str) return (gsub(md5.sum(str),".",remap)) end
---~ end
+-- local P, Cs, lpegmatch = lpeg.P, lpeg.Cs,lpeg.match
+--
+-- if not md5.HEX then
+-- local function remap(chr) return format("%02X",byte(chr)) end
+-- function md5.HEX(str) return (gsub(md5.sum(str),".",remap)) end
+-- end
+--
+-- if not md5.hex then
+-- local function remap(chr) return format("%02x",byte(chr)) end
+-- function md5.hex(str) return (gsub(md5.sum(str),".",remap)) end
+-- end
+--
+-- if not md5.dec then
+-- local function remap(chr) return format("%03i",byte(chr)) end
+-- function md5.dec(str) return (gsub(md5.sum(str),".",remap)) end
+-- end
+
+-- if not md5.HEX then
+-- local pattern_HEX = Cs( ( P(1) / function(chr) return format("%02X",byte(chr)) end)^0 )
+-- function md5.HEX(str) return lpegmatch(pattern_HEX,md5.sum(str)) end
+-- end
+--
+-- if not md5.hex then
+-- local pattern_hex = Cs( ( P(1) / function(chr) return format("%02x",byte(chr)) end)^0 )
+-- function md5.hex(str) return lpegmatch(pattern_hex,md5.sum(str)) end
+-- end
+--
+-- if not md5.dec then
+-- local pattern_dec = Cs( ( P(1) / function(chr) return format("%02i",byte(chr)) end)^0 )
+-- function md5.dec(str) return lpegmatch(pattern_dec,md5.sum(str)) end
+-- end
-function file.needs_updating(oldname,newname,threshold) -- size modification access change
- local oldtime = lfs.attributes(oldname, modification)
- local newtime = lfs.attributes(newname, modification)
- if newtime >= oldtime then
- return false
- elseif oldtime - newtime < (threshold or 1) then
- return false
+function file.needsupdating(oldname,newname,threshold) -- size modification access change
+ local oldtime = lfs.attributes(oldname,"modification")
+ if oldtime then
+ local newtime = lfs.attributes(newname,"modification")
+ if not newtime then
+ return true -- no new file, so no updating needed
+ elseif newtime >= oldtime then
+ return false -- new file definitely needs updating
+ elseif oldtime - newtime < (threshold or 1) then
+ return false -- new file is probably still okay
+ else
+ return true -- new file has to be updated
+ end
else
- return true
+ return false -- no old file, so no updating needed
+ end
+end
+
+file.needs_updating = file.needsupdating
+
+function file.syncmtimes(oldname,newname)
+ local oldtime = lfs.attributes(oldname,"modification")
+ if oldtime and lfs.isfile(newname) then
+ lfs.touch(newname,oldtime,oldtime)
end
end
@@ -61,7 +107,7 @@ function file.loadchecksum(name)
return nil
end
-function file.savechecksum(name, checksum)
+function file.savechecksum(name,checksum)
if not checksum then checksum = file.checksum(name) end
if checksum then
io.savedata(name .. ".md5",checksum)
diff --git a/Master/texmf-dist/tex/context/base/l-number.lua b/Master/texmf-dist/tex/context/base/l-number.lua
index b1ffb9ca990..001ca31f773 100644
--- a/Master/texmf-dist/tex/context/base/l-number.lua
+++ b/Master/texmf-dist/tex/context/base/l-number.lua
@@ -6,9 +6,10 @@ if not modules then modules = { } end modules ['l-number'] = {
license = "see context related readme files"
}
--- this module will be replaced when we have the bit library
+-- this module will be replaced when we have the bit library .. the number based sets
+-- might go away
-local tostring = tostring
+local tostring, tonumber = tostring, tonumber
local format, floor, match, rep = string.format, math.floor, string.match, string.rep
local concat, insert = table.concat, table.insert
local lpegmatch = lpeg.match
@@ -16,10 +17,129 @@ local lpegmatch = lpeg.match
number = number or { }
local number = number
--- a,b,c,d,e,f = number.toset(100101)
+if bit32 then -- I wonder if this is faster
+
+ local btest, bor = bit32.btest, bit32.bor
+
+ function number.bit(p)
+ return 2 ^ (p - 1) -- 1-based indexing
+ end
+
+ number.hasbit = btest
+ number.setbit = bor
+
+ function number.setbit(x,p) -- why not bor?
+ return btest(x,p) and x or x + p
+ end
+
+ function number.clearbit(x,p)
+ return btest(x,p) and x - p or x
+ end
+
+else
+
+ -- http://ricilake.blogspot.com/2007/10/iterating-bits-in-lua.html
+
+ function number.bit(p)
+ return 2 ^ (p - 1) -- 1-based indexing
+ end
+
+ function number.hasbit(x, p) -- typical call: if hasbit(x, bit(3)) then ...
+ return x % (p + p) >= p
+ end
+
+ function number.setbit(x, p)
+ return (x % (p + p) >= p) and x or x + p
+ end
+
+ function number.clearbit(x, p)
+ return (x % (p + p) >= p) and x - p or x
+ end
-function number.toset(n)
- return match(tostring(n),"(.?)(.?)(.?)(.?)(.?)(.?)(.?)(.?)")
+end
+
+-- print(number.tobitstring(8))
+-- print(number.tobitstring(14))
+-- print(number.tobitstring(66))
+-- print(number.tobitstring(0x00))
+-- print(number.tobitstring(0xFF))
+-- print(number.tobitstring(46260767936,4))
+
+if bit32 then
+
+ local bextract = bit32.extract
+
+ local t = {
+ "0", "0", "0", "0", "0", "0", "0", "0",
+ "0", "0", "0", "0", "0", "0", "0", "0",
+ "0", "0", "0", "0", "0", "0", "0", "0",
+ "0", "0", "0", "0", "0", "0", "0", "0",
+ }
+
+ function number.tobitstring(b,m)
+ -- if really needed we can speed this one up
+ -- because small numbers need less extraction
+ local n = 32
+ for i=0,31 do
+ local v = bextract(b,i)
+ local k = 32 - i
+ if v == 1 then
+ n = k
+ t[k] = "1"
+ else
+ t[k] = "0"
+ end
+ end
+ if m then
+ m = 33 - m * 8
+ if m < 1 then
+ m = 1
+ end
+ return concat(t,"",m)
+ elseif n < 8 then
+ return concat(t)
+ elseif n < 16 then
+ return concat(t,"",9)
+ elseif n < 24 then
+ return concat(t,"",17)
+ else
+ return concat(t,"",25)
+ end
+ end
+
+else
+
+ function number.tobitstring(n,m)
+ if n > 0 then
+ local t = { }
+ while n > 0 do
+ insert(t,1,n % 2 > 0 and 1 or 0)
+ n = floor(n/2)
+ end
+ local nn = 8 - #t % 8
+ if nn > 0 and nn < 8 then
+ for i=1,nn do
+ insert(t,1,0)
+ end
+ end
+ if m then
+ m = m * 8 - #t
+ if m > 0 then
+ insert(t,1,rep("0",m))
+ end
+ end
+ return concat(t)
+ elseif m then
+ rep("00000000",m)
+ else
+ return "00000000"
+ end
+ end
+
+end
+
+function number.valid(str,default)
+ return tonumber(str) or default or nil
end
function number.toevenhex(n)
@@ -31,100 +151,57 @@ function number.toevenhex(n)
end
end
--- the lpeg way is slower on 8 digits, but faster on 4 digits, some 7.5%
--- on
+-- a,b,c,d,e,f = number.toset(100101)
+--
+-- function number.toset(n)
+-- return match(tostring(n),"(.?)(.?)(.?)(.?)(.?)(.?)(.?)(.?)")
+-- end
+--
+-- -- the lpeg way is slower on 8 digits, but faster on 4 digits, some 7.5%
+-- -- on
--
-- for i=1,1000000 do
-- local a,b,c,d,e,f,g,h = number.toset(12345678)
-- local a,b,c,d = number.toset(1234)
-- local a,b,c = number.toset(123)
+-- local a,b,c = number.toset("123")
-- end
---
--- of course dedicated "(.)(.)(.)(.)" matches are even faster
-local one = lpeg.C(1-lpeg.S(''))^1
+local one = lpeg.C(1-lpeg.S('')/tonumber)^1
function number.toset(n)
return lpegmatch(one,tostring(n))
end
-function number.bits(n,zero)
- local t, i = { }, (zero and 0) or 1
- while n > 0 do
+-- function number.bits(n,zero)
+-- local t, i = { }, (zero and 0) or 1
+-- while n > 0 do
+-- local m = n % 2
+-- if m > 0 then
+-- insert(t,1,i)
+-- end
+-- n = floor(n/2)
+-- i = i + 1
+-- end
+-- return t
+-- end
+--
+-- -- a bit faster
+
+local function bits(n,i,...)
+ if n > 0 then
local m = n % 2
+ local n = floor(n/2)
if m > 0 then
- insert(t,1,i)
- end
- n = floor(n/2)
- i = i + 1
- end
- return t
-end
-
---~ http://ricilake.blogspot.com/2007/10/iterating-bits-in-lua.html
-
-function number.bit(p)
- return 2 ^ (p - 1) -- 1-based indexing
-end
-
-function number.hasbit(x, p) -- typical call: if hasbit(x, bit(3)) then ...
- return x % (p + p) >= p
-end
-
-function number.setbit(x, p)
- return hasbit(x, p) and x or x + p
-end
-
-function number.clearbit(x, p)
- return hasbit(x, p) and x - p or x
-end
-
---~ function number.tobitstring(n)
---~ if n == 0 then
---~ return "0"
---~ else
---~ local t = { }
---~ while n > 0 do
---~ insert(t,1,n % 2 > 0 and 1 or 0)
---~ n = floor(n/2)
---~ end
---~ return concat(t)
---~ end
---~ end
-
-function number.tobitstring(n,m)
- if n == 0 then
- if m then
- rep("00000000",m)
+ return bits(n, i+1, i, ...)
else
- return "00000000"
+ return bits(n, i+1, ...)
end
else
- local t = { }
- while n > 0 do
- insert(t,1,n % 2 > 0 and 1 or 0)
- n = floor(n/2)
- end
- local nn = 8 - #t % 8
- if nn > 0 and nn < 8 then
- for i=1,nn do
- insert(t,1,0)
- end
- end
- if m then
- m = m * 8 - #t
- if m > 0 then
- insert(t,1,rep("0",m))
- end
- end
- return concat(t)
+ return ...
end
end
---~ print(number.tobitstring(8))
---~ print(number.tobitstring(14))
---~ print(number.tobitstring(66))
---~ print(number.tobitstring(0x00))
---~ print(number.tobitstring(0xFF))
---~ print(number.tobitstring(46260767936,8))
---~ print(#number.tobitstring(46260767936,6))
+function number.bits(n)
+ return { bits(n,1) }
+end
diff --git a/Master/texmf-dist/tex/context/base/l-os.lua b/Master/texmf-dist/tex/context/base/l-os.lua
index f9bd46e51c8..42f3e4862c9 100644
--- a/Master/texmf-dist/tex/context/base/l-os.lua
+++ b/Master/texmf-dist/tex/context/base/l-os.lua
@@ -22,17 +22,28 @@ if not modules then modules = { } end modules ['l-os'] = {
-- os.name : windows | msdos | linux | macosx | solaris | .. | generic (new)
-- os.platform : extended os.name with architecture
+-- os.sleep() => socket.sleep()
+-- math.randomseed(tonumber(string.sub(string.reverse(tostring(math.floor(socket.gettime()*10000))),1,6)))
+
-- maybe build io.flush in os.execute
local os = os
+local date, time = os.date, os.time
local find, format, gsub, upper, gmatch = string.find, string.format, string.gsub, string.upper, string.gmatch
local concat = table.concat
-local random, ceil = math.random, math.ceil
-local rawget, rawset, type, getmetatable, setmetatable, tonumber = rawget, rawset, type, getmetatable, setmetatable, tonumber
+local random, ceil, randomseed = math.random, math.ceil, math.randomseed
+local rawget, rawset, type, getmetatable, setmetatable, tonumber, tostring = rawget, rawset, type, getmetatable, setmetatable, tonumber, tostring
-- The following code permits traversing the environment table, at least
-- in luatex. Internally all environment names are uppercase.
+-- The randomseed in Lua is not that random, although this depends on the operating system as well
+-- as the binary (Luatex is normally okay). But to be sure we set the seed anyway.
+
+math.initialseed = tonumber(string.sub(string.reverse(tostring(ceil(socket and socket.gettime()*10000 or time()))),1,6))
+
+randomseed(math.initialseed)
+
if not os.__getenv__ then
os.__getenv__ = os.getenv
@@ -136,15 +147,17 @@ else
os.libsuffix, os.binsuffix, os.binsuffixes = 'so', '', { '' }
end
+local launchers = {
+ windows = "start %s",
+ macosx = "open %s",
+ unix = "$BROWSER %s &> /dev/null &",
+}
+
function os.launch(str)
- if os.type == "windows" then
- os.execute("start " .. str) -- os.spawn ?
- else
- os.execute(str .. " &") -- os.spawn ?
- end
+ os.execute(format(launchers[os.name] or launchers.unix,str))
end
-if not os.times then
+if not os.times then -- ?
-- utime = user time
-- stime = system time
-- cutime = children user time
@@ -180,14 +193,10 @@ os.resolvers = os.resolvers or { } -- will become private
local resolvers = os.resolvers
-local osmt = getmetatable(os) or { __index = function(t,k) t[k] = "unset" return "unset" end } -- maybe nil
-local osix = osmt.__index
-
-osmt.__index = function(t,k)
- return (resolvers[k] or osix)(t,k)
-end
-
-setmetatable(os,osmt)
+setmetatable(os, { __index = function(t,k)
+ local r = resolvers[k]
+ return r and r(t,k) or nil -- no memoize
+end })
-- we can use HOSTTYPE on some platforms
@@ -357,7 +366,7 @@ end
local d
function os.timezone(delta)
- d = d or tonumber(tonumber(os.date("%H")-os.date("!%H")))
+ d = d or tonumber(tonumber(date("%H")-date("!%H")))
if delta then
if d > 0 then
return format("+%02i:00",d)
@@ -369,6 +378,44 @@ function os.timezone(delta)
end
end
+local timeformat = format("%%s%s",os.timezone(true))
+local dateformat = "!%Y-%m-%d %H:%M:%S"
+
+function os.fulltime(t,default)
+ t = tonumber(t) or 0
+ if t > 0 then
+ -- valid time
+ elseif default then
+ return default
+ else
+ t = nil
+ end
+ return format(timeformat,date(dateformat,t))
+end
+
+local dateformat = "%Y-%m-%d %H:%M:%S"
+
+function os.localtime(t,default)
+ t = tonumber(t) or 0
+ if t > 0 then
+ -- valid time
+ elseif default then
+ return default
+ else
+ t = nil
+ end
+ return date(dateformat,t)
+end
+
+function os.converttime(t,default)
+ local t = tonumber(t)
+ if t and t > 0 then
+ return date(dateformat,t)
+ else
+ return default or "-"
+ end
+end
+
local memory = { }
local function which(filename)
@@ -397,6 +444,18 @@ end
os.which = which
os.where = which
+function os.today()
+ return date("!*t") -- table with values
+end
+
+function os.now()
+ return date("!%Y-%m-%d %H:%M:%S") -- 2011-12-04 14:59:12
+end
+
+if not os.sleep and socket then
+ os.sleep = socket.sleep
+end
+
-- print(os.which("inkscape.exe"))
-- print(os.which("inkscape"))
-- print(os.which("gs.exe"))
diff --git a/Master/texmf-dist/tex/context/base/l-pdfview.lua b/Master/texmf-dist/tex/context/base/l-pdfview.lua
index 3f4a8bea5cf..e42462a3d1d 100644
--- a/Master/texmf-dist/tex/context/base/l-pdfview.lua
+++ b/Master/texmf-dist/tex/context/base/l-pdfview.lua
@@ -22,24 +22,26 @@ local opencalls, closecalls, allcalls, runner
if os.type == "windows" then
opencalls = {
- ['default'] = "pdfopen --ax --file", -- --back --file --ax
- ['acrobat'] = "pdfopen --ax --file", -- --back --file --ax
- ['okular'] = 'start "test" "c:/data/system/kde/bin/okular.exe" --unique' -- todo!
+ ['default'] = "pdfopen --rxi --file",
+ ['acrobat'] = "pdfopen --rxi --file",
+ ['fullacrobat'] = "pdfopen --axi --file",
+ ['okular'] = 'start "test" "c:/data/system/kde/bin/okular.exe" --unique' -- todo!
}
closecalls= {
- ['default'] = "pdfclose --ax --file", -- --ax
- ['acrobat'] = "pdfclose --ax --file", -- --ax
+ ['default'] = "pdfclose --file",
+ ['acrobat'] = "pdfclose --file",
['okular'] = false,
}
allcalls = {
- ['default'] = "pdfclose --ax --all", -- --ax
- ['acrobat'] = "pdfclose --ax --all", -- --ax
+ ['default'] = "pdfclose --all",
+ ['acrobat'] = "pdfclose --all",
['okular'] = false,
}
pdfview.method = "acrobat"
runner = function(...)
+-- os.spawn(...)
os.execute(...)
end
@@ -84,7 +86,7 @@ function pdfview.status()
return format("pdfview methods: %s, current method: %s (directives_pdfview_method)",pdfview.methods(),tostring(pdfview.method))
end
-local openedfiles = { }
+-- local openedfiles = { }
local function fullname(name)
return file.addsuffix(name,"pdf")
@@ -98,7 +100,7 @@ function pdfview.open(...)
local name = fullname(t[i])
if io.exists(name) then
runner(format('%s "%s"', opencall, name))
- openedfiles[name] = true
+ -- openedfiles[name] = true
end
end
end
@@ -110,13 +112,13 @@ function pdfview.close(...)
local t = { ... }
for i=1,#t do
local name = fullname(t[i])
- if openedfiles[name] then
+ -- if openedfiles[name] then
runner(format('%s "%s"', closecall, name))
- openedfiles[name] = nil
- else
- pdfview.closeall()
- break
- end
+ -- openedfiles[name] = nil
+ -- else
+ -- pdfview.closeall()
+ -- break
+ -- end
end
end
end
@@ -126,7 +128,7 @@ function pdfview.closeall()
if allcall then
runner(format('%s', allcall))
end
- openedfiles = { }
+ -- openedfiles = { }
end
--~ pdfview.open("t:/document/show-exa.pdf")
diff --git a/Master/texmf-dist/tex/context/base/l-string.lua b/Master/texmf-dist/tex/context/base/l-string.lua
index 989e57f753b..77c076cc53e 100644
--- a/Master/texmf-dist/tex/context/base/l-string.lua
+++ b/Master/texmf-dist/tex/context/base/l-string.lua
@@ -7,43 +7,49 @@ if not modules then modules = { } end modules ['l-string'] = {
}
local string = string
-local sub, gsub, find, match, gmatch, format, char, byte, rep, lower = string.sub, string.gsub, string.find, string.match, string.gmatch, string.format, string.char, string.byte, string.rep, string.lower
-local lpegmatch, S, C, Ct = lpeg.match, lpeg.S, lpeg.C, lpeg.Ct
-
--- some functions may disappear as they are not used anywhere
-
-if not string.split then
-
- -- this will be overloaded by a faster lpeg variant
-
- function string.split(str,pattern)
- local t = { }
- if #str > 0 then
- local n = 1
- for s in gmatch(str..pattern,"(.-)"..pattern) do
- t[n] = s
- n = n + 1
- end
- end
- return t
- end
-
-end
+local sub, gmatch, format, char, byte, rep, lower = string.sub, string.gmatch, string.format, string.char, string.byte, string.rep, string.lower
+local lpegmatch, patterns = lpeg.match, lpeg.patterns
+local P, S, C, Ct, Cc, Cs = lpeg.P, lpeg.S, lpeg.C, lpeg.Ct, lpeg.Cc, lpeg.Cs
+
+-- Some functions are already defined in l-lpeg and maybe some from here will
+-- move there (unless we also expose caches).
+
+-- if not string.split then
+--
+-- function string.split(str,pattern)
+-- local t = { }
+-- if #str > 0 then
+-- local n = 1
+-- for s in gmatch(str..pattern,"(.-)"..pattern) do
+-- t[n] = s
+-- n = n + 1
+-- end
+-- end
+-- return t
+-- end
+--
+-- end
+
+-- function string.unquoted(str)
+-- return (gsub(str,"^([\"\'])(.*)%1$","%2")) -- interesting pattern
+-- end
+
+local unquoted = patterns.squote * C(patterns.nosquote) * patterns.squote
+ + patterns.dquote * C(patterns.nodquote) * patterns.dquote
function string.unquoted(str)
- return (gsub(str,"^([\"\'])(.*)%1$","%2"))
+ return lpegmatch(unquoted,str) or str
end
---~ function stringunquoted(str)
---~ if find(str,"^[\'\"]") then
---~ return sub(str,2,-2)
---~ else
---~ return str
---~ end
---~ end
+-- print(string.unquoted("test"))
+-- print(string.unquoted([["t\"est"]]))
+-- print(string.unquoted([["t\"est"x]]))
+-- print(string.unquoted("\'test\'"))
+-- print(string.unquoted('"test"'))
+-- print(string.unquoted('"test"'))
function string.quoted(str)
- return format("%q",str) -- always "
+ return format("%q",str) -- always double quote
end
function string.count(str,pattern) -- variant 3
@@ -63,55 +69,136 @@ function string.limit(str,n,sentinel) -- not utf proof
end
end
-local space = S(" \t\v\n")
-local nospace = 1 - space
-local stripper = space^0 * C((space^0 * nospace^1)^0) -- roberto's code
+local stripper = patterns.stripper
+local collapser = patterns.collapser
+local longtostring = patterns.longtostring
function string.strip(str)
return lpegmatch(stripper,str) or ""
end
-function string.is_empty(str)
- return not find(str,"%S")
+function string.collapsespaces(str)
+ return lpegmatch(collapser,str) or ""
end
-local patterns_escapes = {
- ["%"] = "%%",
- ["."] = "%.",
- ["+"] = "%+", ["-"] = "%-", ["*"] = "%*",
- ["["] = "%[", ["]"] = "%]",
- ["("] = "%(", [")"] = "%)",
- -- ["{"] = "%{", ["}"] = "%}"
- -- ["^"] = "%^", ["$"] = "%$",
-}
+function string.longtostring(str)
+ return lpegmatch(longtostring,str) or ""
+end
-local simple_escapes = {
- ["-"] = "%-",
- ["."] = "%.",
- ["?"] = ".",
- ["*"] = ".*",
-}
+-- function string.is_empty(str)
+-- return not find(str,"%S")
+-- end
+
+local pattern = P(" ")^0 * P(-1)
+
+function string.is_empty(str)
+ if str == "" then
+ return true
+ else
+ return lpegmatch(pattern,str) and true or false
+ end
+end
+
+-- if not string.escapedpattern then
+--
+-- local patterns_escapes = {
+-- ["%"] = "%%",
+-- ["."] = "%.",
+-- ["+"] = "%+", ["-"] = "%-", ["*"] = "%*",
+-- ["["] = "%[", ["]"] = "%]",
+-- ["("] = "%(", [")"] = "%)",
+-- -- ["{"] = "%{", ["}"] = "%}"
+-- -- ["^"] = "%^", ["$"] = "%$",
+-- }
+--
+-- local simple_escapes = {
+-- ["-"] = "%-",
+-- ["."] = "%.",
+-- ["?"] = ".",
+-- ["*"] = ".*",
+-- }
+--
+-- function string.escapedpattern(str,simple)
+-- return (gsub(str,".",simple and simple_escapes or patterns_escapes))
+-- end
+--
+-- function string.topattern(str,lowercase,strict)
+-- if str == "" then
+-- return ".*"
+-- else
+-- str = gsub(str,".",simple_escapes)
+-- if lowercase then
+-- str = lower(str)
+-- end
+-- if strict then
+-- return "^" .. str .. "$"
+-- else
+-- return str
+-- end
+-- end
+-- end
+--
+-- end
+
+--- needs checking
+
+local anything = patterns.anything
+local allescapes = Cc("%") * S(".-+%?()[]*") -- also {} and ^$ ?
+local someescapes = Cc("%") * S(".-+%()[]") -- also {} and ^$ ?
+local matchescapes = Cc(".") * S("*?") -- wildcard and single match
+
+local pattern_a = Cs ( ( allescapes + anything )^0 )
+local pattern_b = Cs ( ( someescapes + matchescapes + anything )^0 )
+local pattern_c = Cs ( Cc("^") * ( someescapes + matchescapes + anything )^0 * Cc("$") )
function string.escapedpattern(str,simple)
- return (gsub(str,".",simple and simple_escapes or patterns_escapes))
+ return lpegmatch(simple and pattern_b or pattern_a,str)
end
function string.topattern(str,lowercase,strict)
- if str == "" then
+ if str=="" or type(str) ~= "string" then
return ".*"
+ elseif strict then
+ str = lpegmatch(pattern_c,str)
else
- str = gsub(str,".",simple_escapes)
- if lowercase then
- str = lower(str)
- end
- if strict then
- return "^" .. str .. "$"
- else
- return str
- end
+ str = lpegmatch(pattern_b,str)
+ end
+ if lowercase then
+ return lower(str)
+ else
+ return str
end
end
+-- print(string.escapedpattern("12+34*.tex",false))
+-- print(string.escapedpattern("12+34*.tex",true))
+-- print(string.topattern ("12+34*.tex",false,false))
+-- print(string.topattern ("12+34*.tex",false,true))
+
+function string.valid(str,default)
+ return (type(str) == "string" and str ~= "" and str) or default or nil
+end
+
+-- handy fallback
+
+string.itself = function(s) return s end
+
+-- also handy (see utf variant)
+
+local pattern = Ct(C(1)^0) -- string and not utf !
+
+function string.totable(str)
+ return lpegmatch(pattern,str)
+end
+
+-- handy from within tex:
+
+local replacer = lpeg.replacer("@","%%") -- Watch the escaped % in lpeg!
+
+function string.tformat(fmt,...)
+ return format(lpegmatch(replacer,fmt),...)
+end
+
-- obsolete names:
string.quote = string.quoted
diff --git a/Master/texmf-dist/tex/context/base/l-table.lua b/Master/texmf-dist/tex/context/base/l-table.lua
index 337ce054a02..640bbbb283f 100644
--- a/Master/texmf-dist/tex/context/base/l-table.lua
+++ b/Master/texmf-dist/tex/context/base/l-table.lua
@@ -6,65 +6,23 @@ if not modules then modules = { } end modules ['l-table'] = {
license = "see context related readme files"
}
-local type, next, tostring, tonumber, ipairs, table, string = type, next, tostring, tonumber, ipairs, table, string
+local type, next, tostring, tonumber, ipairs, select = type, next, tostring, tonumber, ipairs, select
+local table, string = table, string
local concat, sort, insert, remove = table.concat, table.sort, table.insert, table.remove
-local format, find, gsub, lower, dump, match = string.format, string.find, string.gsub, string.lower, string.dump, string.match
+local format, lower, dump = string.format, string.lower, string.dump
local getmetatable, setmetatable = getmetatable, setmetatable
local getinfo = debug.getinfo
-
--- Starting with version 5.2 Lua no longer provide ipairs, which makes
--- sense. As we already used the for loop and # in most places the
--- impact on ConTeXt was not that large; the remaining ipairs already
--- have been replaced. In a similar fashion we also hardly used pairs.
---
--- Just in case, we provide the fallbacks as discussed in Programming
--- in Lua (http://www.lua.org/pil/7.3.html):
-
-if not ipairs then
-
- -- for k, v in ipairs(t) do ... end
- -- for k=1,#t do local v = t[k] ... end
-
- local function iterate(a,i)
- i = i + 1
- local v = a[i]
- if v ~= nil then
- return i, v --, nil
- end
- end
-
- function ipairs(a)
- return iterate, a, 0
- end
-
-end
-
-if not pairs then
-
- -- for k, v in pairs(t) do ... end
- -- for k, v in next, t do ... end
-
- function pairs(t)
- return next, t -- , nil
- end
-
-end
-
--- Also, unpack has been moved to the table table, and for compatiility
--- reasons we provide both now.
-
-if not table.unpack then
- table.unpack = _G.unpack
-elseif not unpack then
- _G.unpack = table.unpack
-end
+local lpegmatch, patterns = lpeg.match, lpeg.patterns
+local floor = math.floor
-- extra functions, some might go (when not used)
+local stripper = patterns.stripper
+
function table.strip(tab)
local lst, l = { }, 0
for i=1,#tab do
- local s = gsub(tab[i],"^%s*(.-)%s*$","%1")
+ local s = lpegmatch(stripper,tab[i]) or ""
if s == "" then
-- skip this one
else
@@ -76,12 +34,16 @@ function table.strip(tab)
end
function table.keys(t)
- local keys, k = { }, 0
- for key, _ in next, t do
- k = k + 1
- keys[k] = key
+ if t then
+ local keys, k = { }, 0
+ for key, _ in next, t do
+ k = k + 1
+ keys[k] = key
+ end
+ return keys
+ else
+ return { }
end
- return keys
end
local function compare(a,b)
@@ -94,41 +56,59 @@ local function compare(a,b)
end
local function sortedkeys(tab)
- local srt, category, s = { }, 0, 0 -- 0=unknown 1=string, 2=number 3=mixed
- for key,_ in next, tab do
- s = s + 1
- srt[s] = key
- if category == 3 then
- -- no further check
- else
- local tkey = type(key)
- if tkey == "string" then
- category = (category == 2 and 3) or 1
- elseif tkey == "number" then
- category = (category == 1 and 3) or 2
+ if tab then
+ local srt, category, s = { }, 0, 0 -- 0=unknown 1=string, 2=number 3=mixed
+ for key,_ in next, tab do
+ s = s + 1
+ srt[s] = key
+ if category == 3 then
+ -- no further check
else
- category = 3
+ local tkey = type(key)
+ if tkey == "string" then
+ category = (category == 2 and 3) or 1
+ elseif tkey == "number" then
+ category = (category == 1 and 3) or 2
+ else
+ category = 3
+ end
end
end
+ if category == 0 or category == 3 then
+ sort(srt,compare)
+ else
+ sort(srt)
+ end
+ return srt
+ else
+ return { }
end
- if category == 0 or category == 3 then
- sort(srt,compare)
+end
+
+local function sortedhashkeys(tab,cmp) -- fast one
+ if tab then
+ local srt, s = { }, 0
+ for key,_ in next, tab do
+ if key then
+ s= s + 1
+ srt[s] = key
+ end
+ end
+ sort(srt,cmp)
+ return srt
else
- sort(srt)
+ return { }
end
- return srt
end
-local function sortedhashkeys(tab) -- fast one
- local srt, s = { }, 0
- for key,_ in next, tab do
- if key then
- s= s + 1
- srt[s] = key
+function table.allkeys(t)
+ local keys = { }
+ for k, v in next, t do
+ for k, v in next, v do
+ keys[k] = true
end
end
- sort(srt)
- return srt
+ return sortedkeys(keys)
end
table.sortedkeys = sortedkeys
@@ -136,9 +116,16 @@ table.sortedhashkeys = sortedhashkeys
local function nothing() end
-local function sortedhash(t)
+local function sortedhash(t,cmp)
if t then
- local n, s = 0, sortedkeys(t) -- the robust one
+ local s
+ if cmp then
+ -- it would be nice if teh sort function would accept a third argument (or nicer, an optional first)
+ s = sortedhashkeys(t,function(a,b) return cmp(t,a,b) end)
+ else
+ s = sortedkeys(t) -- the robust one
+ end
+ local n = 0
local function kv(s)
n = n + 1
local k = s[n]
@@ -151,9 +138,9 @@ local function sortedhash(t)
end
table.sortedhash = sortedhash
-table.sortedpairs = sortedhash
+table.sortedpairs = sortedhash -- obsolete
-function table.append(t, list)
+function table.append(t,list)
local n = #t
for i=1,#list do
n = n + 1
@@ -175,31 +162,63 @@ function table.prepend(t, list)
return t
end
+-- function table.merge(t, ...) -- first one is target
+-- t = t or { }
+-- local lst = { ... }
+-- for i=1,#lst do
+-- for k, v in next, lst[i] do
+-- t[k] = v
+-- end
+-- end
+-- return t
+-- end
+
function table.merge(t, ...) -- first one is target
t = t or { }
- local lst = { ... }
- for i=1,#lst do
- for k, v in next, lst[i] do
+ for i=1,select("#",...) do
+ for k, v in next, (select(i,...)) do
t[k] = v
end
end
return t
end
+-- function table.merged(...)
+-- local tmp, lst = { }, { ... }
+-- for i=1,#lst do
+-- for k, v in next, lst[i] do
+-- tmp[k] = v
+-- end
+-- end
+-- return tmp
+-- end
+
function table.merged(...)
- local tmp, lst = { }, { ... }
- for i=1,#lst do
- for k, v in next, lst[i] do
- tmp[k] = v
+ local t = { }
+ for i=1,select("#",...) do
+ for k, v in next, (select(i,...)) do
+ t[k] = v
end
end
- return tmp
+ return t
end
+-- function table.imerge(t, ...)
+-- local lst, nt = { ... }, #t
+-- for i=1,#lst do
+-- local nst = lst[i]
+-- for j=1,#nst do
+-- nt = nt + 1
+-- t[nt] = nst[j]
+-- end
+-- end
+-- return t
+-- end
+
function table.imerge(t, ...)
- local lst, nt = { ... }, #t
- for i=1,#lst do
- local nst = lst[i]
+ local nt = #t
+ for i=1,select("#",...) do
+ local nst = select(i,...)
for j=1,#nst do
nt = nt + 1
t[nt] = nst[j]
@@ -208,10 +227,22 @@ function table.imerge(t, ...)
return t
end
+-- function table.imerged(...)
+-- local tmp, ntmp, lst = { }, 0, {...}
+-- for i=1,#lst do
+-- local nst = lst[i]
+-- for j=1,#nst do
+-- ntmp = ntmp + 1
+-- tmp[ntmp] = nst[j]
+-- end
+-- end
+-- return tmp
+-- end
+
function table.imerged(...)
- local tmp, ntmp, lst = { }, 0, {...}
- for i=1,#lst do
- local nst = lst[i]
+ local tmp, ntmp = { }, 0
+ for i=1,select("#",...) do
+ local nst = select(i,...)
for j=1,#nst do
ntmp = ntmp + 1
tmp[ntmp] = nst[j]
@@ -223,7 +254,7 @@ end
local function fastcopy(old,metatabletoo) -- fast one
if old then
local new = { }
- for k,v in next, old do
+ for k, v in next, old do
if type(v) == "table" then
new[k] = fastcopy(v,metatabletoo) -- was just table.copy
else
@@ -277,7 +308,7 @@ end
table.fastcopy = fastcopy
table.copy = copy
-function table.derive(parent)
+function table.derive(parent) -- for the moment not public
local child = { }
if parent then
setmetatable(child,{ __index = parent })
@@ -358,6 +389,15 @@ end
-- problem: there no good number_to_string converter with the best resolution
+-- probably using .. is faster than format
+-- maybe split in a few cases (yes/no hexify)
+
+-- todo: %g faster on numbers than %s
+
+-- we can speed this up with repeaters and formatters (is indeed faster)
+
+local propername = patterns.propername -- was find(name,"^%a[%w%_]*$")
+
local function dummy() end
local function do_serialize(root,name,depth,level,indexed)
@@ -367,14 +407,14 @@ local function do_serialize(root,name,depth,level,indexed)
handle(format("%s{",depth))
else
local tn = type(name)
- if tn == "number" then -- or find(k,"^%d+$") then
+ if tn == "number" then
if hexify then
handle(format("%s[0x%04X]={",depth,name))
else
handle(format("%s[%s]={",depth,name))
end
elseif tn == "string" then
- if noquotes and not reserved[name] and find(name,"^%a[%w%_]*$") then
+ if noquotes and not reserved[name] and lpegmatch(propername,name) then
handle(format("%s%s={",depth,name))
else
handle(format("%s[%q]={",depth,name))
@@ -388,12 +428,25 @@ local function do_serialize(root,name,depth,level,indexed)
end
-- we could check for k (index) being number (cardinal)
if root and next(root) then
- local first, last = nil, 0 -- #root cannot be trusted here (will be ok in 5.2 when ipairs is gone)
+ -- local first, last = nil, 0 -- #root cannot be trusted here (will be ok in 5.2 when ipairs is gone)
+ -- if compact then
+ -- -- NOT: for k=1,#root do (we need to quit at nil)
+ -- for k,v in ipairs(root) do -- can we use next?
+ -- if not first then first = k end
+ -- last = last + 1
+ -- end
+ -- end
+ local first, last = nil, 0
if compact then
- -- NOT: for k=1,#root do (we need to quit at nil)
- for k,v in ipairs(root) do -- can we use next?
- if not first then first = k end
- last = last + 1
+ last = #root
+ for k=1,last do
+ if root[k] == nil then
+ last = k - 1
+ break
+ end
+ end
+ if last > 0 then
+ first = 1
end
end
local sk = sortedkeys(root)
@@ -434,7 +487,7 @@ local function do_serialize(root,name,depth,level,indexed)
handle(format("%s %s,",depth,tostring(v)))
elseif t == "function" then
if functions then
- handle(format('%s loadstring(%q),',depth,dump(v)))
+ handle(format('%s load(%q),',depth,dump(v)))
else
handle(format('%s "function",',depth))
end
@@ -446,7 +499,7 @@ local function do_serialize(root,name,depth,level,indexed)
handle(format("%s __p__=nil,",depth))
end
elseif t == "number" then
- if tk == "number" then -- or find(k,"^%d+$") then
+ if tk == "number" then
if hexify then
handle(format("%s [0x%04X]=0x%04X,",depth,k,v))
else
@@ -458,7 +511,7 @@ local function do_serialize(root,name,depth,level,indexed)
else
handle(format("%s [%s]=%s,",depth,tostring(k),v)) -- %.99g
end
- elseif noquotes and not reserved[k] and find(k,"^%a[%w%_]*$") then
+ elseif noquotes and not reserved[k] and lpegmatch(propername,k) then
if hexify then
handle(format("%s %s=0x%04X,",depth,k,v))
else
@@ -473,7 +526,7 @@ local function do_serialize(root,name,depth,level,indexed)
end
elseif t == "string" then
if reduce and tonumber(v) then
- if tk == "number" then -- or find(k,"^%d+$") then
+ if tk == "number" then
if hexify then
handle(format("%s [0x%04X]=%s,",depth,k,v))
else
@@ -481,13 +534,13 @@ local function do_serialize(root,name,depth,level,indexed)
end
elseif tk == "boolean" then
handle(format("%s [%s]=%s,",depth,tostring(k),v))
- elseif noquotes and not reserved[k] and find(k,"^%a[%w%_]*$") then
+ elseif noquotes and not reserved[k] and lpegmatch(propername,k) then
handle(format("%s %s=%s,",depth,k,v))
else
handle(format("%s [%q]=%s,",depth,k,v))
end
else
- if tk == "number" then -- or find(k,"^%d+$") then
+ if tk == "number" then
if hexify then
handle(format("%s [0x%04X]=%q,",depth,k,v))
else
@@ -495,7 +548,7 @@ local function do_serialize(root,name,depth,level,indexed)
end
elseif tk == "boolean" then
handle(format("%s [%s]=%q,",depth,tostring(k),v))
- elseif noquotes and not reserved[k] and find(k,"^%a[%w%_]*$") then
+ elseif noquotes and not reserved[k] and lpegmatch(propername,k) then
handle(format("%s %s=%q,",depth,k,v))
else
handle(format("%s [%q]=%q,",depth,k,v))
@@ -503,7 +556,7 @@ local function do_serialize(root,name,depth,level,indexed)
end
elseif t == "table" then
if not next(v) then
- if tk == "number" then -- or find(k,"^%d+$") then
+ if tk == "number" then
if hexify then
handle(format("%s [0x%04X]={},",depth,k))
else
@@ -511,7 +564,7 @@ local function do_serialize(root,name,depth,level,indexed)
end
elseif tk == "boolean" then
handle(format("%s [%s]={},",depth,tostring(k)))
- elseif noquotes and not reserved[k] and find(k,"^%a[%w%_]*$") then
+ elseif noquotes and not reserved[k] and lpegmatch(propername,k) then
handle(format("%s %s={},",depth,k))
else
handle(format("%s [%q]={},",depth,k))
@@ -519,15 +572,15 @@ local function do_serialize(root,name,depth,level,indexed)
elseif inline then
local st = simple_table(v)
if st then
- if tk == "number" then -- or find(k,"^%d+$") then
+ if tk == "number" then
if hexify then
handle(format("%s [0x%04X]={ %s },",depth,k,concat(st,", ")))
else
handle(format("%s [%s]={ %s },",depth,k,concat(st,", ")))
end
- elseif tk == "boolean" then -- or find(k,"^%d+$") then
+ elseif tk == "boolean" then
handle(format("%s [%s]={ %s },",depth,tostring(k),concat(st,", ")))
- elseif noquotes and not reserved[k] and find(k,"^%a[%w%_]*$") then
+ elseif noquotes and not reserved[k] and lpegmatch(propername,k) then
handle(format("%s %s={ %s },",depth,k,concat(st,", ")))
else
handle(format("%s [%q]={ %s },",depth,k,concat(st,", ")))
@@ -539,15 +592,15 @@ local function do_serialize(root,name,depth,level,indexed)
do_serialize(v,k,depth,level+1)
end
elseif t == "boolean" then
- if tk == "number" then -- or find(k,"^%d+$") then
+ if tk == "number" then
if hexify then
handle(format("%s [0x%04X]=%s,",depth,k,tostring(v)))
else
handle(format("%s [%s]=%s,",depth,k,tostring(v)))
end
- elseif tk == "boolean" then -- or find(k,"^%d+$") then
+ elseif tk == "boolean" then
handle(format("%s [%s]=%s,",depth,tostring(k),tostring(v)))
- elseif noquotes and not reserved[k] and find(k,"^%a[%w%_]*$") then
+ elseif noquotes and not reserved[k] and lpegmatch(propername,k) then
handle(format("%s %s=%s,",depth,k,tostring(v)))
else
handle(format("%s [%q]=%s,",depth,k,tostring(v)))
@@ -556,30 +609,30 @@ local function do_serialize(root,name,depth,level,indexed)
if functions then
local f = getinfo(v).what == "C" and dump(dummy) or dump(v)
-- local f = getinfo(v).what == "C" and dump(function(...) return v(...) end) or dump(v)
- if tk == "number" then -- or find(k,"^%d+$") then
+ if tk == "number" then
if hexify then
- handle(format("%s [0x%04X]=loadstring(%q),",depth,k,f))
+ handle(format("%s [0x%04X]=load(%q),",depth,k,f))
else
- handle(format("%s [%s]=loadstring(%q),",depth,k,f))
+ handle(format("%s [%s]=load(%q),",depth,k,f))
end
elseif tk == "boolean" then
- handle(format("%s [%s]=loadstring(%q),",depth,tostring(k),f))
- elseif noquotes and not reserved[k] and find(k,"^%a[%w%_]*$") then
- handle(format("%s %s=loadstring(%q),",depth,k,f))
+ handle(format("%s [%s]=load(%q),",depth,tostring(k),f))
+ elseif noquotes and not reserved[k] and lpegmatch(propername,k) then
+ handle(format("%s %s=load(%q),",depth,k,f))
else
- handle(format("%s [%q]=loadstring(%q),",depth,k,f))
+ handle(format("%s [%q]=load(%q),",depth,k,f))
end
end
else
- if tk == "number" then -- or find(k,"^%d+$") then
+ if tk == "number" then
if hexify then
handle(format("%s [0x%04X]=%q,",depth,k,tostring(v)))
else
handle(format("%s [%s]=%q,",depth,k,tostring(v)))
end
- elseif tk == "boolean" then -- or find(k,"^%d+$") then
+ elseif tk == "boolean" then
handle(format("%s [%s]=%q,",depth,tostring(k),tostring(v)))
- elseif noquotes and not reserved[k] and find(k,"^%a[%w%_]*$") then
+ elseif noquotes and not reserved[k] and lpegmatch(propername,k) then
handle(format("%s %s=%q,",depth,k,tostring(v)))
else
handle(format("%s [%q]=%q,",depth,k,tostring(v)))
@@ -660,14 +713,330 @@ local function serialize(_handle,root,name,specification) -- handle wins
handle("}")
end
---~ name:
---~
---~ true : return { }
---~ false : { }
---~ nil : t = { }
---~ string : string = { }
---~ 'return' : return { }
---~ number : [number] = { }
+-- -- This is some 20% faster than using format (because formatters are much faster) but
+-- -- of course, inlining the format using .. is then again faster .. anyway, as we do
+-- -- some pretty printing as well there is not that much to gain unless we make a 'fast'
+-- -- ugly variant as well. But, we would have to move the formatter to l-string then.
+
+-- local formatters = string.formatters
+
+-- local function do_serialize(root,name,level,indexed)
+-- if level > 0 then
+-- if indexed then
+-- handle(formatters["%w{"](level))
+-- else
+-- local tn = type(name)
+-- if tn == "number" then
+-- if hexify then
+-- handle(formatters["%w[%04H]={"](level,name))
+-- else
+-- handle(formatters["%w[%s]={"](level,name))
+-- end
+-- elseif tn == "string" then
+-- if noquotes and not reserved[name] and lpegmatch(propername,name) then
+-- handle(formatters["%w%s={"](level,name))
+-- else
+-- handle(formatters["%w[%q]={"](level,name))
+-- end
+-- elseif tn == "boolean" then
+-- handle(formatters["%w[%S]={"](level,name))
+-- else
+-- handle(formatters["%w{"](level))
+-- end
+-- end
+-- end
+-- -- we could check for k (index) being number (cardinal)
+-- if root and next(root) then
+-- -- local first, last = nil, 0 -- #root cannot be trusted here (will be ok in 5.2 when ipairs is gone)
+-- -- if compact then
+-- -- -- NOT: for k=1,#root do (we need to quit at nil)
+-- -- for k,v in ipairs(root) do -- can we use next?
+-- -- if not first then first = k end
+-- -- last = last + 1
+-- -- end
+-- -- end
+-- local first, last = nil, 0
+-- if compact then
+-- last = #root
+-- for k=1,last do
+-- if root[k] == nil then
+-- last = k - 1
+-- break
+-- end
+-- end
+-- if last > 0 then
+-- first = 1
+-- end
+-- end
+-- local sk = sortedkeys(root)
+-- for i=1,#sk do
+-- local k = sk[i]
+-- local v = root[k]
+-- --~ if v == root then
+-- -- circular
+-- --~ else
+-- local t, tk = type(v), type(k)
+-- if compact and first and tk == "number" and k >= first and k <= last then
+-- if t == "number" then
+-- if hexify then
+-- handle(formatters["%w %04H,"](level,v))
+-- else
+-- handle(formatters["%w %s,"](level,v)) -- %.99g
+-- end
+-- elseif t == "string" then
+-- if reduce and tonumber(v) then
+-- handle(formatters["%w %s,"](level,v))
+-- else
+-- handle(formatters["%w %q,"](level,v))
+-- end
+-- elseif t == "table" then
+-- if not next(v) then
+-- handle(formatters["%w {},"](level))
+-- elseif inline then -- and #t > 0
+-- local st = simple_table(v)
+-- if st then
+-- handle(formatters["%w { %, t },"](level,st))
+-- else
+-- do_serialize(v,k,level+1,true)
+-- end
+-- else
+-- do_serialize(v,k,level+1,true)
+-- end
+-- elseif t == "boolean" then
+-- handle(formatters["%w %S,"](level,v))
+-- elseif t == "function" then
+-- if functions then
+-- handle(formatters['%w load(%q),'](level,dump(v)))
+-- else
+-- handle(formatters['%w "function",'](level))
+-- end
+-- else
+-- handle(formatters["%w %Q,"](level,v))
+-- end
+-- elseif k == "__p__" then -- parent
+-- if false then
+-- handle(formatters["%w __p__=nil,"](level))
+-- end
+-- elseif t == "number" then
+-- if tk == "number" then
+-- if hexify then
+-- handle(formatters["%w [%04H]=%04H,"](level,k,v))
+-- else
+-- handle(formatters["%w [%s]=%s,"](level,k,v)) -- %.99g
+-- end
+-- elseif tk == "boolean" then
+-- if hexify then
+-- handle(formatters["%w [%S]=%04H,"](level,k,v))
+-- else
+-- handle(formatters["%w [%S]=%s,"](level,k,v)) -- %.99g
+-- end
+-- elseif noquotes and not reserved[k] and lpegmatch(propername,k) then
+-- if hexify then
+-- handle(formatters["%w %s=%04H,"](level,k,v))
+-- else
+-- handle(formatters["%w %s=%s,"](level,k,v)) -- %.99g
+-- end
+-- else
+-- if hexify then
+-- handle(formatters["%w [%q]=%04H,"](level,k,v))
+-- else
+-- handle(formatters["%w [%q]=%s,"](level,k,v)) -- %.99g
+-- end
+-- end
+-- elseif t == "string" then
+-- if reduce and tonumber(v) then
+-- if tk == "number" then
+-- if hexify then
+-- handle(formatters["%w [%04H]=%s,"](level,k,v))
+-- else
+-- handle(formatters["%w [%s]=%s,"](level,k,v))
+-- end
+-- elseif tk == "boolean" then
+-- handle(formatters["%w [%S]=%s,"](level,k,v))
+-- elseif noquotes and not reserved[k] and lpegmatch(propername,k) then
+-- handle(formatters["%w %s=%s,"](level,k,v))
+-- else
+-- handle(formatters["%w [%q]=%s,"](level,k,v))
+-- end
+-- else
+-- if tk == "number" then
+-- if hexify then
+-- handle(formatters["%w [%04H]=%q,"](level,k,v))
+-- else
+-- handle(formatters["%w [%s]=%q,"](level,k,v))
+-- end
+-- elseif tk == "boolean" then
+-- handle(formatters["%w [%S]=%q,"](level,k,v))
+-- elseif noquotes and not reserved[k] and lpegmatch(propername,k) then
+-- handle(formatters["%w %s=%q,"](level,k,v))
+-- else
+-- handle(formatters["%w [%q]=%q,"](level,k,v))
+-- end
+-- end
+-- elseif t == "table" then
+-- if not next(v) then
+-- if tk == "number" then
+-- if hexify then
+-- handle(formatters["%w [%04H]={},"](level,k))
+-- else
+-- handle(formatters["%w [%s]={},"](level,k))
+-- end
+-- elseif tk == "boolean" then
+-- handle(formatters["%w [%S]={},"](level,k))
+-- elseif noquotes and not reserved[k] and lpegmatch(propername,k) then
+-- handle(formatters["%w %s={},"](level,k))
+-- else
+-- handle(formatters["%w [%q]={},"](level,k))
+-- end
+-- elseif inline then
+-- local st = simple_table(v)
+-- if st then
+-- if tk == "number" then
+-- if hexify then
+-- handle(formatters["%w [%04H]={ %, t },"](level,k,st))
+-- else
+-- handle(formatters["%w [%s]={ %, t },"](level,k,st))
+-- end
+-- elseif tk == "boolean" then
+-- handle(formatters["%w [%S]={ %, t },"](level,k,st))
+-- elseif noquotes and not reserved[k] and lpegmatch(propername,k) then
+-- handle(formatters["%w %s={ %, t },"](level,k,st))
+-- else
+-- handle(formatters["%w [%q]={ %, t },"](level,k,st))
+-- end
+-- else
+-- do_serialize(v,k,level+1)
+-- end
+-- else
+-- do_serialize(v,k,level+1)
+-- end
+-- elseif t == "boolean" then
+-- if tk == "number" then
+-- if hexify then
+-- handle(formatters["%w [%04H]=%S,"](level,k,v))
+-- else
+-- handle(formatters["%w [%s]=%S,"](level,k,v))
+-- end
+-- elseif tk == "boolean" then
+-- handle(formatters["%w [%S]=%S,"](level,k,v))
+-- elseif noquotes and not reserved[k] and lpegmatch(propername,k) then
+-- handle(formatters["%w %s=%S,"](level,k,v))
+-- else
+-- handle(formatters["%w [%q]=%S,"](level,k,v))
+-- end
+-- elseif t == "function" then
+-- if functions then
+-- local f = getinfo(v).what == "C" and dump(dummy) or dump(v)
+-- -- local f = getinfo(v).what == "C" and dump(function(...) return v(...) end) or dump(v)
+-- if tk == "number" then
+-- if hexify then
+-- handle(formatters["%w [%04H]=load(%q),"](level,k,f))
+-- else
+-- handle(formatters["%w [%s]=load(%q),"](level,k,f))
+-- end
+-- elseif tk == "boolean" then
+-- handle(formatters["%w [%S]=load(%q),"](level,k,f))
+-- elseif noquotes and not reserved[k] and lpegmatch(propername,k) then
+-- handle(formatters["%w %s=load(%q),"](level,k,f))
+-- else
+-- handle(formatters["%w [%q]=load(%q),"](level,k,f))
+-- end
+-- end
+-- else
+-- if tk == "number" then
+-- if hexify then
+-- handle(formatters["%w [%04H]=%Q,"](level,k,v))
+-- else
+-- handle(formatters["%w [%s]=%Q,"](level,k,v))
+-- end
+-- elseif tk == "boolean" then
+-- handle(formatters["%w [%S]=%Q,"](level,k,v))
+-- elseif noquotes and not reserved[k] and lpegmatch(propername,k) then
+-- handle(formatters["%w %s=%Q,"](level,k,v))
+-- else
+-- handle(formatters["%w [%q]=%Q,"](level,k,v))
+-- end
+-- end
+-- --~ end
+-- end
+-- end
+-- if level > 0 then
+-- handle(formatters["%w}"](level))
+-- end
+-- end
+
+-- local function serialize(_handle,root,name,specification) -- handle wins
+-- local tname = type(name)
+-- if type(specification) == "table" then
+-- noquotes = specification.noquotes
+-- hexify = specification.hexify
+-- handle = _handle or specification.handle or print
+-- reduce = specification.reduce or false
+-- functions = specification.functions
+-- compact = specification.compact
+-- inline = specification.inline and compact
+-- if functions == nil then
+-- functions = true
+-- end
+-- if compact == nil then
+-- compact = true
+-- end
+-- if inline == nil then
+-- inline = compact
+-- end
+-- else
+-- noquotes = false
+-- hexify = false
+-- handle = _handle or print
+-- reduce = false
+-- compact = true
+-- inline = true
+-- functions = true
+-- end
+-- if tname == "string" then
+-- if name == "return" then
+-- handle("return {")
+-- else
+-- handle(name .. "={")
+-- end
+-- elseif tname == "number" then
+-- if hexify then
+-- handle(format("[0x%04X]={",name))
+-- else
+-- handle("[" .. name .. "]={")
+-- end
+-- elseif tname == "boolean" then
+-- if name then
+-- handle("return {")
+-- else
+-- handle("{")
+-- end
+-- else
+-- handle("t={")
+-- end
+-- if root then
+-- -- The dummy access will initialize a table that has a delayed initialization
+-- -- using a metatable. (maybe explicitly test for metatable)
+-- if getmetatable(root) then -- todo: make this an option, maybe even per subtable
+-- local dummy = root._w_h_a_t_e_v_e_r_
+-- root._w_h_a_t_e_v_e_r_ = nil
+-- end
+-- -- Let's forget about empty tables.
+-- if next(root) then
+-- do_serialize(root,name,0)
+-- end
+-- end
+-- handle("}")
+-- end
+
+-- name:
+--
+-- true : return { }
+-- false : { }
+-- nil : t = { }
+-- string : string = { }
+-- "return" : return { }
+-- number : [number] = { }
function table.serialize(root,name,specification)
local t, n = { }, 0
@@ -679,6 +1048,13 @@ function table.serialize(root,name,specification)
return concat(t,"\n")
end
+-- local a = { e = { 1,2,3,4,5,6}, a = 1, b = 2, c = "ccc", d = { a = 1, b = 2, c = "ccc", d = { a = 1, b = 2, c = "ccc" } } }
+-- local t = os.clock()
+-- for i=1,10000 do
+-- table.serialize(a)
+-- end
+-- print(os.clock()-t,table.serialize(a))
+
table.tohandle = serialize
-- sometimes tables are real use (zapfino extra pro is some 85M) in which
@@ -723,7 +1099,7 @@ local function flattened(t,f,depth)
f = { }
depth = 0xFFFF
elseif tonumber(f) then
- -- assume then only two arguments are given
+ -- assume that only two arguments are given
depth = f
f = { }
elseif not depth then
@@ -756,7 +1132,7 @@ table.flattened = flattened
local function unnest(t,f) -- only used in mk, for old times sake
if not f then -- and only relevant for token lists
- f = { }
+ f = { } -- this one can become obsolete
end
for i=1,#t do
local v = t[i]
@@ -785,7 +1161,7 @@ local function are_equal(a,b,n,m) -- indexed
local ai, bi = a[i], b[i]
if ai==bi then
-- same
- elseif type(ai)=="table" and type(bi)=="table" then
+ elseif type(ai) == "table" and type(bi) == "table" then
if not are_equal(ai,bi) then
return false
end
@@ -820,10 +1196,10 @@ table.are_equal = are_equal
-- maybe also make a combined one
-function table.compact(t)
+function table.compact(t) -- remove empty tables, assumes subtables
if t then
- for k,v in next, t do
- if not next(v) then
+ for k, v in next, t do
+ if not next(v) then -- no type checking
t[k] = nil
end
end
@@ -852,25 +1228,25 @@ end
function table.swapped(t,s) -- hash
local n = { }
if s then
---~ for i=1,#s do
---~ n[i] = s[i]
---~ end
for k, v in next, s do
n[k] = v
end
end
---~ for i=1,#t do
---~ local ti = t[i] -- don't ask but t[i] can be nil
---~ if ti then
---~ n[ti] = i
---~ end
---~ end
for k, v in next, t do
n[v] = k
end
return n
end
+function table.mirrored(t) -- hash
+ local n = { }
+ for k, v in next, t do
+ n[v] = k
+ n[k] = v
+ end
+ return n
+end
+
function table.reversed(t)
if t then
local tt, tn = { }, #t
@@ -885,33 +1261,59 @@ function table.reversed(t)
end
end
+function table.reverse(t)
+ if t then
+ local n = #t
+ for i=1,floor(n/2) do
+ local j = n - i + 1
+ t[i], t[j] = t[j], t[i]
+ end
+ return t
+ end
+end
+
function table.sequenced(t,sep,simple) -- hash only
- local s, n = { }, 0
- for k, v in sortedhash(t) do
- if simple then
- if v == true then
- n = n + 1
- s[n] = k
- elseif v and v~= "" then
+ if not t then
+ return ""
+ end
+ local n = #t
+ local s = { }
+ if n > 0 then
+ -- indexed
+ for i=1,n do
+ s[i] = tostring(t[i])
+ end
+ else
+ -- hashed
+ n = 0
+ for k, v in sortedhash(t) do
+ if simple then
+ if v == true then
+ n = n + 1
+ s[n] = k
+ elseif v and v~= "" then
+ n = n + 1
+ s[n] = k .. "=" .. tostring(v)
+ end
+ else
n = n + 1
s[n] = k .. "=" .. tostring(v)
end
- else
- n = n + 1
- s[n] = k .. "=" .. tostring(v)
end
end
- return concat(s, sep or " | ")
+ return concat(s,sep or " | ")
end
function table.print(t,...)
if type(t) ~= "table" then
print(tostring(t))
else
- table.tohandle(print,t,...)
+ serialize(print,t,...)
end
end
+setinspector(function(v) if type(v) == "table" then serialize(print,v,"table") return true end end)
+
-- -- -- obsolete but we keep them for a while and might comment them later -- -- --
-- roughly: copy-loop : unpack : sub == 0.9 : 0.4 : 0.45 (so in critical apps, use unpack)
@@ -957,7 +1359,7 @@ function table.unique(old)
return new
end
--- function table.sorted(t,...)
--- table.sort(t,...)
--- return t -- still sorts in-place
--- end
+function table.sorted(t,...)
+ sort(t,...)
+ return t -- still sorts in-place
+end
diff --git a/Master/texmf-dist/tex/context/base/l-unicode.lua b/Master/texmf-dist/tex/context/base/l-unicode.lua
index f4480e93c40..813ffd54b79 100644
--- a/Master/texmf-dist/tex/context/base/l-unicode.lua
+++ b/Master/texmf-dist/tex/context/base/l-unicode.lua
@@ -6,210 +6,624 @@ if not modules then modules = { } end modules ['l-unicode'] = {
license = "see context related readme files"
}
+-- this module will be reorganized
+
+-- todo: utf.sub replacement (used in syst-aux)
+
+-- we put these in the utf namespace:
+
+utf = utf or (unicode and unicode.utf8) or { }
+
+utf.characters = utf.characters or string.utfcharacters
+utf.values = utf.values or string.utfvalues
+
+-- string.utfvalues
+-- string.utfcharacters
+-- string.characters
+-- string.characterpairs
+-- string.bytes
+-- string.bytepairs
+
+local type = type
+local char, byte, format, sub = string.char, string.byte, string.format, string.sub
+local concat = table.concat
+local P, C, R, Cs, Ct, Cmt, Cc, Carg, Cp = lpeg.P, lpeg.C, lpeg.R, lpeg.Cs, lpeg.Ct, lpeg.Cmt, lpeg.Cc, lpeg.Carg, lpeg.Cp
+local lpegmatch, patterns = lpeg.match, lpeg.patterns
+
+local bytepairs = string.bytepairs
+
+local finder = lpeg.finder
+local replacer = lpeg.replacer
+
+local utfvalues = utf.values
+local utfgmatch = utf.gmatch -- not always present
+
+local p_utftype = patterns.utftype
+local p_utfoffset = patterns.utfoffset
+local p_utf8char = patterns.utf8char
+local p_utf8byte = patterns.utf8byte
+local p_utfbom = patterns.utfbom
+local p_newline = patterns.newline
+local p_whitespace = patterns.whitespace
+
if not unicode then
- unicode = { utf8 = { } }
+ unicode = { utf = utf } -- for a while
+
+end
+
+if not utf.char then
local floor, char = math.floor, string.char
- function unicode.utf8.utfchar(n)
+ function utf.char(n)
if n < 0x80 then
+ -- 0aaaaaaa : 0x80
return char(n)
elseif n < 0x800 then
+ -- 110bbbaa : 0xC0 : n >> 6
+ -- 10aaaaaa : 0x80 : n & 0x3F
return char(
0xC0 + floor(n/0x40),
0x80 + (n % 0x40)
)
elseif n < 0x10000 then
+ -- 1110bbbb : 0xE0 : n >> 12
+ -- 10bbbbaa : 0x80 : (n >> 6) & 0x3F
+ -- 10aaaaaa : 0x80 : n & 0x3F
return char(
0xE0 + floor(n/0x1000),
0x80 + (floor(n/0x40) % 0x40),
0x80 + (n % 0x40)
)
- elseif n < 0x40000 then
+ elseif n < 0x200000 then
+ -- 11110ccc : 0xF0 : n >> 18
+ -- 10ccbbbb : 0x80 : (n >> 12) & 0x3F
+ -- 10bbbbaa : 0x80 : (n >> 6) & 0x3F
+ -- 10aaaaaa : 0x80 : n & 0x3F
+ -- dddd : ccccc - 1
return char(
- 0xF0 + floor(n/0x40000),
- 0x80 + floor(n/0x1000),
+ 0xF0 + floor(n/0x40000),
+ 0x80 + (floor(n/0x1000) % 0x40),
0x80 + (floor(n/0x40) % 0x40),
0x80 + (n % 0x40)
)
else
- -- return char(
- -- 0xF1 + floor(n/0x1000000),
- -- 0x80 + floor(n/0x40000),
- -- 0x80 + floor(n/0x1000),
- -- 0x80 + (floor(n/0x40) % 0x40),
- -- 0x80 + (n % 0x40)
- -- )
- return "?"
+ return ""
end
end
end
-local unicode = unicode
+if not utf.byte then
-utf = utf or unicode.utf8
+ local utf8byte = patterns.utf8byte
-local concat = table.concat
-local utfchar, utfbyte, utfgsub = utf.char, utf.byte, utf.gsub
-local char, byte, find, bytepairs, utfvalues, format = string.char, string.byte, string.find, string.bytepairs, string.utfvalues, string.format
-local type = type
+ function utf.byte(c)
+ return lpegmatch(utf8byte,c)
+ end
+
+end
+
+local utfchar, utfbyte = utf.char, utf.byte
+
+-- As we want to get rid of the (unmaintained) utf library we implement our own
+-- variants (in due time an independent module):
+
+function utf.filetype(data)
+ return data and lpegmatch(p_utftype,data) or "unknown"
+end
+
+local toentities = Cs (
+ (
+ patterns.utf8one
+ + (
+ patterns.utf8two
+ + patterns.utf8three
+ + patterns.utf8four
+ ) / function(s) local b = utfbyte(s) if b < 127 then return s else return format("&#%X;",b) end end
+ )^0
+)
+
+patterns.toentities = toentities
+
+function utf.toentities(str)
+ return lpegmatch(toentities,str)
+end
+
+-- local utfchr = { } -- 60K -> 2.638 M extra mem but currently not called that often (on latin)
+--
+-- setmetatable(utfchr, { __index = function(t,k) local v = utfchar(k) t[k] = v return v end } )
+--
+-- collectgarbage("collect")
+-- local u = collectgarbage("count")*1024
+-- local t = os.clock()
+-- for i=1,1000 do
+-- for i=1,600 do
+-- local a = utfchr[i]
+-- end
+-- end
+-- print(os.clock()-t,collectgarbage("count")*1024-u)
+
+-- collectgarbage("collect")
+-- local t = os.clock()
+-- for i=1,1000 do
+-- for i=1,600 do
+-- local a = utfchar(i)
+-- end
+-- end
+-- print(os.clock()-t,collectgarbage("count")*1024-u)
+
+-- local byte = string.byte
+-- local utfchar = utf.char
+
+local one = P(1)
+local two = C(1) * C(1)
+local four = C(R(utfchar(0xD8),utfchar(0xFF))) * C(1) * C(1) * C(1)
+
+-- actually one of them is already utf ... sort of useless this one
+
+-- function utf.char(n)
+-- if n < 0x80 then
+-- return char(n)
+-- elseif n < 0x800 then
+-- return char(
+-- 0xC0 + floor(n/0x40),
+-- 0x80 + (n % 0x40)
+-- )
+-- elseif n < 0x10000 then
+-- return char(
+-- 0xE0 + floor(n/0x1000),
+-- 0x80 + (floor(n/0x40) % 0x40),
+-- 0x80 + (n % 0x40)
+-- )
+-- elseif n < 0x40000 then
+-- return char(
+-- 0xF0 + floor(n/0x40000),
+-- 0x80 + floor(n/0x1000),
+-- 0x80 + (floor(n/0x40) % 0x40),
+-- 0x80 + (n % 0x40)
+-- )
+-- else
+-- -- return char(
+-- -- 0xF1 + floor(n/0x1000000),
+-- -- 0x80 + floor(n/0x40000),
+-- -- 0x80 + floor(n/0x1000),
+-- -- 0x80 + (floor(n/0x40) % 0x40),
+-- -- 0x80 + (n % 0x40)
+-- -- )
+-- return "?"
+-- end
+-- end
+--
+-- merge into:
+
+local pattern = P("\254\255") * Cs( (
+ four / function(a,b,c,d)
+ local ab = 0xFF * byte(a) + byte(b)
+ local cd = 0xFF * byte(c) + byte(d)
+ return utfchar((ab-0xD800)*0x400 + (cd-0xDC00) + 0x10000)
+ end
+ + two / function(a,b)
+ return utfchar(byte(a)*256 + byte(b))
+ end
+ + one
+ )^1 )
+ + P("\255\254") * Cs( (
+ four / function(b,a,d,c)
+ local ab = 0xFF * byte(a) + byte(b)
+ local cd = 0xFF * byte(c) + byte(d)
+ return utfchar((ab-0xD800)*0x400 + (cd-0xDC00) + 0x10000)
+ end
+ + two / function(b,a)
+ return utfchar(byte(a)*256 + byte(b))
+ end
+ + one
+ )^1 )
+
+function string.toutf(s) -- in string namespace
+ return lpegmatch(pattern,s) or s -- todo: utf32
+end
+
+local validatedutf = Cs (
+ (
+ patterns.utf8one
+ + patterns.utf8two
+ + patterns.utf8three
+ + patterns.utf8four
+ + P(1) / "�"
+ )^0
+)
+
+patterns.validatedutf = validatedutf
+
+function utf.is_valid(str)
+ return type(str) == "string" and lpegmatch(validatedutf,str) or false
+end
+
+if not utf.len then
+
+ -- -- alternative 1: 0.77
+ --
+ -- local utfcharcounter = utfbom^-1 * Cs((p_utf8char/'!')^0)
+ --
+ -- function utf.len(str)
+ -- return #lpegmatch(utfcharcounter,str or "")
+ -- end
+ --
+ -- -- alternative 2: 1.70
+ --
+ -- local n = 0
+ --
+ -- local utfcharcounter = utfbom^-1 * (p_utf8char/function() n = n + 1 end)^0 -- slow
+ --
+ -- function utf.length(str)
+ -- n = 0
+ -- lpegmatch(utfcharcounter,str or "")
+ -- return n
+ -- end
+ --
+ -- -- alternative 3: 0.24 (native unicode.utf8.len: 0.047)
+
+ -- local n = 0
+ --
+ -- -- local utfcharcounter = lpeg.patterns.utfbom^-1 * P ( ( Cp() * (
+ -- -- patterns.utf8one ^1 * Cc(1)
+ -- -- + patterns.utf8two ^1 * Cc(2)
+ -- -- + patterns.utf8three^1 * Cc(3)
+ -- -- + patterns.utf8four ^1 * Cc(4) ) * Cp() / function(f,d,t) n = n + (t - f)/d end
+ -- -- )^0 ) -- just as many captures as below
+ --
+ -- -- local utfcharcounter = lpeg.patterns.utfbom^-1 * P ( (
+ -- -- (Cmt(patterns.utf8one ^1,function(_,_,s) n = n + #s return true end))
+ -- -- + (Cmt(patterns.utf8two ^1,function(_,_,s) n = n + #s/2 return true end))
+ -- -- + (Cmt(patterns.utf8three^1,function(_,_,s) n = n + #s/3 return true end))
+ -- -- + (Cmt(patterns.utf8four ^1,function(_,_,s) n = n + #s/4 return true end))
+ -- -- )^0 ) -- not interesting as it creates strings but sometimes faster
+ --
+ -- -- The best so far:
+ --
+ -- local utfcharcounter = utfbom^-1 * P ( (
+ -- Cp() * (patterns.utf8one )^1 * Cp() / function(f,t) n = n + t - f end
+ -- + Cp() * (patterns.utf8two )^1 * Cp() / function(f,t) n = n + (t - f)/2 end
+ -- + Cp() * (patterns.utf8three)^1 * Cp() / function(f,t) n = n + (t - f)/3 end
+ -- + Cp() * (patterns.utf8four )^1 * Cp() / function(f,t) n = n + (t - f)/4 end
+ -- )^0 )
+
+ -- function utf.len(str)
+ -- n = 0
+ -- lpegmatch(utfcharcounter,str or "")
+ -- return n
+ -- end
+
+ local n, f = 0, 1
+
+ local utfcharcounter = patterns.utfbom^-1 * Cmt (
+ Cc(1) * patterns.utf8one ^1
+ + Cc(2) * patterns.utf8two ^1
+ + Cc(3) * patterns.utf8three^1
+ + Cc(4) * patterns.utf8four ^1,
+ function(_,t,d) -- due to Cc no string captures, so faster
+ n = n + (t - f)/d
+ f = t
+ return true
+ end
+ )^0
+
+ function utf.len(str)
+ n, f = 0, 1
+ lpegmatch(utfcharcounter,str or "")
+ return n
+ end
+
+ -- -- these are quite a bit slower:
+
+ -- utfcharcounter = utfbom^-1 * (Cmt(P(1) * R("\128\191")^0, function() n = n + 1 return true end))^0 -- 50+ times slower
+ -- utfcharcounter = utfbom^-1 * (Cmt(P(1), function() n = n + 1 return true end) * R("\128\191")^0)^0 -- 50- times slower
+
+end
+
+utf.length = utf.len
+
+if not utf.sub then
+
+ -- inefficient as lpeg just copies ^n
+
+ -- local function sub(str,start,stop)
+ -- local pattern = p_utf8char^-(start-1) * C(p_utf8char^-(stop-start+1))
+ -- inspect(pattern)
+ -- return lpegmatch(pattern,str) or ""
+ -- end
+
+ -- local b, e, n, first, last = 0, 0, 0, 0, 0
+ --
+ -- local function slide(s,p)
+ -- n = n + 1
+ -- if n == first then
+ -- b = p
+ -- if not last then
+ -- return nil
+ -- end
+ -- end
+ -- if n == last then
+ -- e = p
+ -- return nil
+ -- else
+ -- return p
+ -- end
+ -- end
+ --
+ -- local pattern = Cmt(p_utf8char,slide)^0
+ --
+ -- function utf.sub(str,start,stop) -- todo: from the end
+ -- if not start then
+ -- return str
+ -- end
+ -- b, e, n, first, last = 0, 0, 0, start, stop
+ -- lpegmatch(pattern,str)
+ -- if not stop then
+ -- return sub(str,b)
+ -- else
+ -- return sub(str,b,e-1)
+ -- end
+ -- end
+
+ -- print(utf.sub("Hans Hagen is my name"))
+ -- print(utf.sub("Hans Hagen is my name",5))
+ -- print(utf.sub("Hans Hagen is my name",5,10))
+
+ local utflength = utf.length
+
+ -- also negative indices, upto 10 times slower than a c variant
+
+ local b, e, n, first, last = 0, 0, 0, 0, 0
+
+ local function slide_zero(s,p)
+ n = n + 1
+ if n >= last then
+ e = p - 1
+ else
+ return p
+ end
+ end
+
+ local function slide_one(s,p)
+ n = n + 1
+ if n == first then
+ b = p
+ end
+ if n >= last then
+ e = p - 1
+ else
+ return p
+ end
+ end
+
+ local function slide_two(s,p)
+ n = n + 1
+ if n == first then
+ b = p
+ else
+ return true
+ end
+ end
+
+ local pattern_zero = Cmt(p_utf8char,slide_zero)^0
+ local pattern_one = Cmt(p_utf8char,slide_one )^0
+ local pattern_two = Cmt(p_utf8char,slide_two )^0
+
+ function utf.sub(str,start,stop)
+ if not start then
+ return str
+ end
+ if start == 0 then
+ start = 1
+ end
+ if not stop then
+ if start < 0 then
+ local l = utflength(str) -- we can inline this function if needed
+ start = l + start
+ else
+ start = start - 1
+ end
+ b, n, first = 0, 0, start
+ lpegmatch(pattern_two,str)
+ if n >= first then
+ return sub(str,b)
+ else
+ return ""
+ end
+ end
+ if start < 0 or stop < 0 then
+ local l = utf.length(str)
+ if start < 0 then
+ start = l + start
+ if start <= 0 then
+ start = 1
+ else
+ start = start + 1
+ end
+ end
+ if stop < 0 then
+ stop = l + stop
+ if stop == 0 then
+ stop = 1
+ else
+ stop = stop + 1
+ end
+ end
+ end
+ if start > stop then
+ return ""
+ elseif start > 1 then
+ b, e, n, first, last = 0, 0, 0, start - 1, stop
+ lpegmatch(pattern_one,str)
+ if n >= first and e == 0 then
+ e = #str
+ end
+ return sub(str,b,e)
+ else
+ b, e, n, last = 1, 0, 0, stop
+ lpegmatch(pattern_zero,str)
+ if e == 0 then
+ e = #str
+ end
+ return sub(str,b,e)
+ end
+ end
+
+ -- local n = 100000
+ -- local str = string.rep("123456àáâãäå",100)
+ --
+ -- for i=-15,15,1 do
+ -- for j=-15,15,1 do
+ -- if utf.xsub(str,i,j) ~= utf.sub(str,i,j) then
+ -- print("error",i,j,"l>"..utf.xsub(str,i,j),"s>"..utf.sub(str,i,j))
+ -- end
+ -- end
+ -- if utf.xsub(str,i) ~= utf.sub(str,i) then
+ -- print("error",i,"l>"..utf.xsub(str,i),"s>"..utf.sub(str,i))
+ -- end
+ -- end
+
+ -- print(" 1, 7",utf.xsub(str, 1, 7),utf.sub(str, 1, 7))
+ -- print(" 0, 7",utf.xsub(str, 0, 7),utf.sub(str, 0, 7))
+ -- print(" 0, 9",utf.xsub(str, 0, 9),utf.sub(str, 0, 9))
+ -- print(" 4 ",utf.xsub(str, 4 ),utf.sub(str, 4 ))
+ -- print(" 0 ",utf.xsub(str, 0 ),utf.sub(str, 0 ))
+ -- print(" 0, 0",utf.xsub(str, 0, 0),utf.sub(str, 0, 0))
+ -- print(" 4, 4",utf.xsub(str, 4, 4),utf.sub(str, 4, 4))
+ -- print(" 4, 0",utf.xsub(str, 4, 0),utf.sub(str, 4, 0))
+ -- print("-3, 0",utf.xsub(str,-3, 0),utf.sub(str,-3, 0))
+ -- print(" 0,-3",utf.xsub(str, 0,-3),utf.sub(str, 0,-3))
+ -- print(" 5,-3",utf.xsub(str,-5,-3),utf.sub(str,-5,-3))
+ -- print("-3 ",utf.xsub(str,-3 ),utf.sub(str,-3 ))
+
+end
+
+-- a replacement for simple gsubs:
+
+function utf.remapper(mapping)
+ local pattern = Cs((p_utf8char/mapping)^0)
+ return function(str)
+ if not str or str == "" then
+ return ""
+ else
+ return lpegmatch(pattern,str)
+ end
+ end, pattern
+end
+
+-- local remap = utf.remapper { a = 'd', b = "c", c = "b", d = "a" }
+-- print(remap("abcd 1234 abcd"))
+
+--
+
+function utf.replacer(t) -- no precheck, always string builder
+ local r = replacer(t,false,false,true)
+ return function(str)
+ return lpegmatch(r,str)
+ end
+end
+
+function utf.subtituter(t) -- with precheck and no building if no match
+ local f = finder (t)
+ local r = replacer(t,false,false,true)
+ return function(str)
+ local i = lpegmatch(f,str)
+ if not i then
+ return str
+ elseif i > #str then
+ return str
+ else
+ -- return sub(str,1,i-2) .. lpegmatch(r,str,i-1) -- slower
+ return lpegmatch(r,str)
+ end
+ end
+end
-local utfsplitlines = string.utfsplitlines
+-- inspect(utf.split("a b c d"))
+-- inspect(utf.split("a b c d",true))
+
+local utflinesplitter = p_utfbom^-1 * lpeg.tsplitat(p_newline)
+local utfcharsplitter_ows = p_utfbom^-1 * Ct(C(p_utf8char)^0)
+local utfcharsplitter_iws = p_utfbom^-1 * Ct((p_whitespace^1 + C(p_utf8char))^0)
+local utfcharsplitter_raw = Ct(C(p_utf8char)^0)
+
+patterns.utflinesplitter = utflinesplitter
+
+function utf.splitlines(str)
+ return lpegmatch(utflinesplitter,str or "")
+end
+
+function utf.split(str,ignorewhitespace) -- new
+ if ignorewhitespace then
+ return lpegmatch(utfcharsplitter_iws,str or "")
+ else
+ return lpegmatch(utfcharsplitter_ows,str or "")
+ end
+end
+
+function utf.totable(str) -- keeps bom
+ return lpegmatch(utfcharsplitter_raw,str)
+end
-- 0 EF BB BF UTF-8
-- 1 FF FE UTF-16-little-endian
-- 2 FE FF UTF-16-big-endian
-- 3 FF FE 00 00 UTF-32-little-endian
-- 4 00 00 FE FF UTF-32-big-endian
-
-unicode.utfname = {
- [0] = 'utf-8',
- [1] = 'utf-16-le',
- [2] = 'utf-16-be',
- [3] = 'utf-32-le',
- [4] = 'utf-32-be'
-}
-
+--
-- \000 fails in <= 5.0 but is valid in >=5.1 where %z is depricated
-function unicode.utftype(f)
- local str = f:read(4)
- if not str then
- f:seek('set')
- return 0
- -- elseif find(str,"^%z%z\254\255") then -- depricated
- -- elseif find(str,"^\000\000\254\255") then -- not permitted and bugged
- elseif find(str,"\000\000\254\255",1,true) then -- seems to work okay (TH)
- return 4
- -- elseif find(str,"^\255\254%z%z") then -- depricated
- -- elseif find(str,"^\255\254\000\000") then -- not permitted and bugged
- elseif find(str,"\255\254\000\000",1,true) then -- seems to work okay (TH)
- return 3
- elseif find(str,"^\254\255") then
- f:seek('set',2)
- return 2
- elseif find(str,"^\255\254") then
- f:seek('set',2)
- return 1
- elseif find(str,"^\239\187\191") then
- f:seek('set',3)
- return 0
- else
- f:seek('set')
- return 0
+-- utf.name = {
+-- [0] = 'utf-8',
+-- [1] = 'utf-16-le',
+-- [2] = 'utf-16-be',
+-- [3] = 'utf-32-le',
+-- [4] = 'utf-32-be'
+-- }
+--
+-- function utf.magic(f)
+-- local str = f:read(4)
+-- if not str then
+-- f:seek('set')
+-- return 0
+-- -- elseif find(str,"^%z%z\254\255") then -- depricated
+-- -- elseif find(str,"^\000\000\254\255") then -- not permitted and bugged
+-- elseif find(str,"\000\000\254\255",1,true) then -- seems to work okay (TH)
+-- return 4
+-- -- elseif find(str,"^\255\254%z%z") then -- depricated
+-- -- elseif find(str,"^\255\254\000\000") then -- not permitted and bugged
+-- elseif find(str,"\255\254\000\000",1,true) then -- seems to work okay (TH)
+-- return 3
+-- elseif find(str,"^\254\255") then
+-- f:seek('set',2)
+-- return 2
+-- elseif find(str,"^\255\254") then
+-- f:seek('set',2)
+-- return 1
+-- elseif find(str,"^\239\187\191") then
+-- f:seek('set',3)
+-- return 0
+-- else
+-- f:seek('set')
+-- return 0
+-- end
+-- end
+
+function utf.magic(f) -- not used
+ local str = f:read(4) or ""
+ local off = lpegmatch(p_utfoffset,str)
+ if off < 4 then
+ f:seek('set',off)
end
+ return lpegmatch(p_utftype,str)
end
---~ function unicode.utf16_to_utf8(str, endian) -- maybe a gsub is faster or an lpeg
---~ local result, tmp, n, m, p, r, t = { }, { }, 0, 0, 0, 0, 0 -- we reuse tmp
---~ -- lf | cr | crlf / (cr:13, lf:10)
---~ local function doit() -- inline this
---~ if n == 10 then
---~ if p ~= 13 then
---~ if t > 0 then
---~ r = r + 1
---~ result[r] = concat(tmp,"",1,t)
---~ t = 0
---~ end
---~ p = 0
---~ end
---~ elseif n == 13 then
---~ if t > 0 then
---~ r = r + 1
---~ result[r] = concat(tmp,"",1,t)
---~ t = 0
---~ end
---~ p = n
---~ else
---~ t = t + 1
---~ tmp[t] = utfchar(n)
---~ p = 0
---~ end
---~ end
---~ for l,r in bytepairs(str) do
---~ if r then
---~ if endian then -- maybe make two loops
---~ n = 256*l + r
---~ else
---~ n = 256*r + l
---~ end
---~ if m > 0 then
---~ n = (m-0xD800)*0x400 + (n-0xDC00) + 0x10000
---~ m = 0
---~ doit()
---~ elseif n >= 0xD800 and n <= 0xDBFF then
---~ m = n
---~ else
---~ doit()
---~ end
---~ end
---~ end
---~ if t > 0 then
---~ r = r + 1
---~ result[r] = concat(tmp,"",1,t) -- we reused tmp, hence t
---~ end
---~ return result
---~ end
-
---~ function unicode.utf32_to_utf8(str, endian)
---~ local result, tmp, n, m, p, r, t = { }, { }, 0, -1, 0, 0, 0
---~ -- lf | cr | crlf / (cr:13, lf:10)
---~ local function doit() -- inline this
---~ if n == 10 then
---~ if p ~= 13 then
---~ if t > 0 then
---~ r = r + 1
---~ result[r] = concat(tmp,"",1,t)
---~ t = 0
---~ end
---~ p = 0
---~ end
---~ elseif n == 13 then
---~ if t > 0 then
---~ r = r + 1
---~ result[r] = concat(tmp,"",1,t)
---~ t = 0
---~ end
---~ p = n
---~ else
---~ t = t + 1
---~ tmp[t] = utfchar(n)
---~ p = 0
---~ end
---~ end
---~ for a,b in bytepairs(str) do
---~ if a and b then
---~ if m < 0 then
---~ if endian then -- maybe make two loops
---~ m = 256*256*256*a + 256*256*b
---~ else
---~ m = 256*b + a
---~ end
---~ else
---~ if endian then -- maybe make two loops
---~ n = m + 256*a + b
---~ else
---~ n = m + 256*256*256*b + 256*256*a
---~ end
---~ m = -1
---~ doit()
---~ end
---~ else
---~ break
---~ end
---~ end
---~ if #tmp > 0 then
---~ r = r + 1
---~ result[r] = concat(tmp,"",1,t) -- we reused tmp, hence t
---~ end
---~ return result
---~ end
-
local function utf16_to_utf8_be(t)
if type(t) == "string" then
- t = utfsplitlines(str)
+ t = lpegmatch(utflinesplitter,t)
end
local result = { } -- we reuse result
for i=1,#t do
@@ -237,7 +651,7 @@ end
local function utf16_to_utf8_le(t)
if type(t) == "string" then
- t = utfsplitlines(str)
+ t = lpegmatch(utflinesplitter,t)
end
local result = { } -- we reuse result
for i=1,#t do
@@ -265,7 +679,7 @@ end
local function utf32_to_utf8_be(t)
if type(t) == "string" then
- t = utfsplitlines(t)
+ t = lpegmatch(utflinesplitter,t)
end
local result = { } -- we reuse result
for i=1,#t do
@@ -290,7 +704,7 @@ end
local function utf32_to_utf8_le(t)
if type(t) == "string" then
- t = utfsplitlines(t)
+ t = lpegmatch(utflinesplitter,t)
end
local result = { } -- we reuse result
for i=1,#t do
@@ -313,20 +727,20 @@ local function utf32_to_utf8_le(t)
return t
end
-unicode.utf32_to_utf8_be = utf32_to_utf8_be
-unicode.utf32_to_utf8_le = utf32_to_utf8_le
-unicode.utf16_to_utf8_be = utf16_to_utf8_be
-unicode.utf16_to_utf8_le = utf16_to_utf8_le
+utf.utf32_to_utf8_be = utf32_to_utf8_be
+utf.utf32_to_utf8_le = utf32_to_utf8_le
+utf.utf16_to_utf8_be = utf16_to_utf8_be
+utf.utf16_to_utf8_le = utf16_to_utf8_le
-function unicode.utf8_to_utf8(t)
- return type(t) == "string" and utfsplitlines(t) or t
+function utf.utf8_to_utf8(t)
+ return type(t) == "string" and lpegmatch(utflinesplitter,t) or t
end
-function unicode.utf16_to_utf8(t,endian)
+function utf.utf16_to_utf8(t,endian)
return endian and utf16_to_utf8_be(t) or utf16_to_utf8_le(t) or t
end
-function unicode.utf32_to_utf8(t,endian)
+function utf.utf32_to_utf8(t,endian)
return endian and utf32_to_utf8_be(t) or utf32_to_utf8_le(t) or t
end
@@ -352,132 +766,177 @@ local function big(c)
end
end
-function unicode.utf8_to_utf16(str,littleendian)
+-- function utf.utf8_to_utf16(str,littleendian)
+-- if littleendian then
+-- return char(255,254) .. utfgsub(str,".",little)
+-- else
+-- return char(254,255) .. utfgsub(str,".",big)
+-- end
+-- end
+
+local _, l_remap = utf.remapper(little)
+local _, b_remap = utf.remapper(big)
+
+function utf.utf8_to_utf16(str,littleendian)
if littleendian then
- return char(255,254) .. utfgsub(str,".",little)
+ return char(255,254) .. lpegmatch(l_remap,str)
else
- return char(254,255) .. utfgsub(str,".",big)
+ return char(254,255) .. lpegmatch(b_remap,str)
end
end
-function unicode.utfcodes(str)
- local t, n = { }, 0
- for u in utfvalues(str) do
- n = n + 1
- t[n] = format("0x%04X",u)
- end
- return concat(t,separator or " ")
+-- function utf.tocodes(str,separator) -- can be sped up with an lpeg
+-- local t, n = { }, 0
+-- for u in utfvalues(str) do
+-- n = n + 1
+-- t[n] = format("0x%04X",u)
+-- end
+-- return concat(t,separator or " ")
+-- end
+
+local pattern = Cs (
+ (p_utf8byte / function(unicode ) return format( "0x%04X", unicode) end) *
+ (p_utf8byte * Carg(1) / function(unicode,separator) return format("%s0x%04X",separator,unicode) end)^0
+)
+
+function utf.tocodes(str,separator)
+ return lpegmatch(pattern,str,1,separator or " ")
end
-function unicode.ustring(s)
+function utf.ustring(s)
return format("U+%05X",type(s) == "number" and s or utfbyte(s))
end
-function unicode.xstring(s)
+function utf.xstring(s)
return format("0x%05X",type(s) == "number" and s or utfbyte(s))
end
---~ print(unicode.utfcodes(str))
+--
-local lpegmatch = lpeg.match
-local patterns = lpeg.patterns
-local utftype = patterns.utftype
+local p_nany = p_utf8char / ""
-function unicode.filetype(data)
- return data and lpegmatch(utftype,data) or "unknown"
-end
+if utfgmatch then
-local toentities = lpeg.Cs (
- (
- patterns.utf8one
- + (
- patterns.utf8two
- + patterns.utf8three
- + patterns.utf8four
- ) / function(s) local b = utfbyte(s) if b < 127 then return s else return format("&#%X;",b) end end
- )^0
-)
+ function utf.count(str,what)
+ if type(what) == "string" then
+ local n = 0
+ for _ in utfgmatch(str,what) do
+ n = n + 1
+ end
+ return n
+ else -- 4 times slower but still faster than / function
+ return #lpegmatch(Cs((P(what)/" " + p_nany)^0),str)
+ end
+ end
-patterns.toentities = toentities
+else
+
+ local cache = { }
+
+ function utf.count(str,what)
+ if type(what) == "string" then
+ local p = cache[what]
+ if not p then
+ p = Cs((P(what)/" " + p_nany)^0)
+ cache[p] = p
+ end
+ return #lpegmatch(p,str)
+ else -- 4 times slower but still faster than / function
+ return #lpegmatch(Cs((P(what)/" " + p_nany)^0),str)
+ end
+ end
-function utf.toentities(str)
- return lpegmatch(toentities,str)
end
---~ local utfchr = { } -- 60K -> 2.638 M extra mem but currently not called that often (on latin)
---~
---~ setmetatable(utfchr, { __index = function(t,k) local v = utfchar(k) t[k] = v return v end } )
---~
---~ collectgarbage("collect")
---~ local u = collectgarbage("count")*1024
---~ local t = os.clock()
---~ for i=1,1000 do
---~ for i=1,600 do
---~ local a = utfchr[i]
---~ end
---~ end
---~ print(os.clock()-t,collectgarbage("count")*1024-u)
-
---~ collectgarbage("collect")
---~ local t = os.clock()
---~ for i=1,1000 do
---~ for i=1,600 do
---~ local a = utfchar(i)
---~ end
---~ end
---~ print(os.clock()-t,collectgarbage("count")*1024-u)
-
---~ local byte = string.byte
---~ local utfchar = utf.char
---~ local lpegmatch = lpeg.match, lpeg.P, lpeg.C, lpeg.R, lpeg.Cs
-
-local P, C, R, Cs = lpeg.P, lpeg.C, lpeg.R, lpeg.Cs
+-- maybe also register as string.utf*
-local one = P(1)
-local two = C(1) * C(1)
-local four = C(R(utfchar(0xD8),utfchar(0xFF))) * C(1) * C(1) * C(1)
--- actually one of them is already utf ... sort of useless this one
+if not utf.characters then
-local pattern = P("\254\255") * Cs( (
- four / function(a,b,c,d)
- local ab = 0xFF * byte(a) + byte(b)
- local cd = 0xFF * byte(c) + byte(d)
- return utfchar((ab-0xD800)*0x400 + (cd-0xDC00) + 0x10000)
- end
- + two / function(a,b)
- return utfchar(byte(a)*256 + byte(b))
- end
- + one
- )^1 )
- + P("\255\254") * Cs( (
- four / function(b,a,d,c)
- local ab = 0xFF * byte(a) + byte(b)
- local cd = 0xFF * byte(c) + byte(d)
- return utfchar((ab-0xD800)*0x400 + (cd-0xDC00) + 0x10000)
- end
- + two / function(b,a)
- return utfchar(byte(a)*256 + byte(b))
- end
- + one
- )^1 )
+ -- New: this gmatch hack is taken from the Lua 5.2 book. It's about two times slower
+ -- than the built-in string.utfcharacters.
+
+ function utf.characters(str)
+ return gmatch(str,".[\128-\191]*")
+ end
+
+ string.utfcharacters = utf.characters
-function string.toutf(s)
- return lpegmatch(pattern,s) or s -- todo: utf32
end
-local validatedutf = Cs (
- (
- patterns.utf8one
- + patterns.utf8two
- + patterns.utf8three
- + patterns.utf8four
- + P(1) / "�"
- )^0
-)
+if not utf.values then
-patterns.validatedutf = validatedutf
+ -- So, a logical next step is to check for the values variant. It over five times
+ -- slower than the built-in string.utfvalues. I optimized it a bit for n=0,1.
+
+ ----- wrap, yield, gmatch = coroutine.wrap, coroutine.yield, string.gmatch
+ local find = string.find
+
+ local dummy = function()
+ -- we share this one
+ end
+
+ -- function utf.values(str)
+ -- local n = #str
+ -- if n == 0 then
+ -- return wrap(dummy)
+ -- elseif n == 1 then
+ -- return wrap(function() yield(utfbyte(str)) end)
+ -- else
+ -- return wrap(function() for s in gmatch(str,".[\128-\191]*") do
+ -- yield(utfbyte(s))
+ -- end end)
+ -- end
+ -- end
+ --
+ -- faster:
+
+ function utf.values(str)
+ local n = #str
+ if n == 0 then
+ return dummy
+ elseif n == 1 then
+ return function() return utfbyte(str) end
+ else
+ local p = 1
+ -- local n = #str
+ return function()
+ -- if p <= n then -- slower than the last find
+ local b, e = find(str,".[\128-\191]*",p)
+ if b then
+ p = e + 1
+ return utfbyte(sub(str,b,e))
+ end
+ -- end
+ end
+ end
+ end
+
+ -- slower:
+ --
+ -- local pattern = C(patterns.utf8character) * Cp()
+ -- ----- pattern = patterns.utf8character/utfbyte * Cp()
+ -- ----- pattern = patterns.utf8byte * Cp()
+ --
+ -- function utf.values(str) -- one of the cases where a find is faster than an lpeg
+ -- local n = #str
+ -- if n == 0 then
+ -- return dummy
+ -- elseif n == 1 then
+ -- return function() return utfbyte(str) end
+ -- else
+ -- local p = 1
+ -- return function()
+ -- local s, e = lpegmatch(pattern,str,p)
+ -- if e then
+ -- p = e
+ -- return utfbyte(s)
+ -- -- return s
+ -- end
+ -- end
+ -- end
+ -- end
+
+ string.utfvalues = utf.values
-function string.validutf(str)
- return lpegmatch(validatedutf,str)
end
diff --git a/Master/texmf-dist/tex/context/base/l-url.lua b/Master/texmf-dist/tex/context/base/l-url.lua
index 69f32cc3958..4624a05070d 100644
--- a/Master/texmf-dist/tex/context/base/l-url.lua
+++ b/Master/texmf-dist/tex/context/base/l-url.lua
@@ -6,10 +6,10 @@ if not modules then modules = { } end modules ['l-url'] = {
license = "see context related readme files"
}
-local char, gmatch, gsub, format, byte, find = string.char, string.gmatch, string.gsub, string.format, string.byte, string.find
+local char, format, byte = string.char, string.format, string.byte
local concat = table.concat
local tonumber, type = tonumber, type
-local P, C, R, S, Cs, Cc, Ct = lpeg.P, lpeg.C, lpeg.R, lpeg.S, lpeg.Cs, lpeg.Cc, lpeg.Ct
+local P, C, R, S, Cs, Cc, Ct, Cf, Cg, V = lpeg.P, lpeg.C, lpeg.R, lpeg.S, lpeg.Cs, lpeg.Cc, lpeg.Ct, lpeg.Cf, lpeg.Cg, lpeg.V
local lpegmatch, lpegpatterns, replacer = lpeg.match, lpeg.patterns, lpeg.replacer
-- from wikipedia:
@@ -42,15 +42,21 @@ local endofstring = P(-1)
local hexdigit = R("09","AF","af")
local plus = P("+")
local nothing = Cc("")
-local escaped = (plus / " ") + (percent * C(hexdigit * hexdigit) / tochar)
+local escapedchar = (percent * C(hexdigit * hexdigit)) / tochar
+local escaped = (plus / " ") + escapedchar
+
+local noslash = P("/") / ""
-- we assume schemes with more than 1 character (in order to avoid problems with windows disks)
-- we also assume that when we have a scheme, we also have an authority
+--
+-- maybe we should already split the query (better for unescaping as = & can be part of a value
local schemestr = Cs((escaped+(1-colon-slash-qmark-hash))^2)
local authoritystr = Cs((escaped+(1- slash-qmark-hash))^0)
local pathstr = Cs((escaped+(1- qmark-hash))^0)
-local querystr = Cs((escaped+(1- hash))^0)
+----- querystr = Cs((escaped+(1- hash))^0)
+local querystr = Cs(( (1- hash))^0)
local fragmentstr = Cs((escaped+(1- endofstring))^0)
local scheme = schemestr * colon + nothing
@@ -65,11 +71,20 @@ local parser = Ct(validurl)
lpegpatterns.url = validurl
lpegpatterns.urlsplitter = parser
-local escapes = { } ; for i=0,255 do escapes[i] = format("%%%02X",i) end
+local escapes = { }
-local escaper = Cs((R("09","AZ","az") + S("-./_") + P(1) / escapes)^0)
+setmetatable(escapes, { __index = function(t,k)
+ local v = format("%%%02X",byte(k))
+ t[k] = v
+ return v
+end })
-lpegpatterns.urlescaper = escaper
+local escaper = Cs((R("09","AZ","az")^1 + P(" ")/"%%20" + S("-./_")^1 + P(1) / escapes)^0) -- space happens most
+local unescaper = Cs((escapedchar + 1)^0)
+
+lpegpatterns.urlunescaped = escapedchar
+lpegpatterns.urlescaper = escaper
+lpegpatterns.urlunescaper = unescaper
-- todo: reconsider Ct as we can as well have five return values (saves a table)
-- so we can have two parsers, one with and one without
@@ -81,8 +96,12 @@ end
local isscheme = schemestr * colon * slash * slash -- this test also assumes authority
local function hasscheme(str)
- local scheme = lpegmatch(isscheme,str) -- at least one character
- return scheme ~= "" and scheme or false
+ if str then
+ local scheme = lpegmatch(isscheme,str) -- at least one character
+ return scheme ~= "" and scheme or false
+ else
+ return false
+ end
end
--~ print(hasscheme("home:"))
@@ -103,10 +122,32 @@ local rootbased = P("/")
local barswapper = replacer("|",":")
local backslashswapper = replacer("\\","/")
+-- queries:
+
+local equal = P("=")
+local amp = P("&")
+local key = Cs(((escapedchar+1)-equal )^0)
+local value = Cs(((escapedchar+1)-amp -endofstring)^0)
+
+local splitquery = Cf ( Ct("") * P { "sequence",
+ sequence = V("pair") * (amp * V("pair"))^0,
+ pair = Cg(key * equal * value),
+}, rawset)
+
+-- hasher
+
local function hashed(str) -- not yet ok (/test?test)
+ if str == "" then
+ return {
+ scheme = "invalid",
+ original = str,
+ }
+ end
local s = split(str)
- local somescheme = s[1] ~= ""
- local somequery = s[4] ~= ""
+ local rawscheme = s[1]
+ local rawquery = s[4]
+ local somescheme = rawscheme ~= ""
+ local somequery = rawquery ~= ""
if not somescheme and not somequery then
s = {
scheme = "file",
@@ -122,14 +163,17 @@ local function hashed(str) -- not yet ok (/test?test)
local authority, path, filename = s[2], s[3]
if authority == "" then
filename = path
+ elseif path == "" then
+ filename = ""
else
filename = authority .. "/" .. path
end
s = {
- scheme = s[1],
+ scheme = rawscheme,
authority = authority,
path = path,
- query = s[4],
+ query = lpegmatch(unescaper,rawquery), -- unescaped, but possible conflict with & and =
+ queries = lpegmatch(splitquery,rawquery), -- split first and then unescaped
fragment = s[5],
original = str,
noscheme = false,
@@ -139,6 +183,8 @@ local function hashed(str) -- not yet ok (/test?test)
return s
end
+-- inspect(hashed("template://test"))
+
-- Here we assume:
--
-- files: /// = relative
@@ -189,23 +235,63 @@ function url.construct(hash) -- dodo: we need to escape !
return lpegmatch(escaper,concat(fullurl))
end
+local pattern = Cs(noslash * R("az","AZ") * (S(":|")/":") * noslash * P(1)^0)
+
function url.filename(filename)
- local t = hashed(filename)
- return (t.scheme == "file" and (gsub(t.path,"^/([a-zA-Z])([:|])/)","%1:"))) or filename
+ local spec = hashed(filename)
+ local path = spec.path
+ return (spec.scheme == "file" and path and lpegmatch(pattern,path)) or filename
+end
+
+-- print(url.filename("/c|/test"))
+-- print(url.filename("/c/test"))
+
+local function escapestring(str)
+ return lpegmatch(escaper,str)
end
+url.escape = escapestring
+
function url.query(str)
if type(str) == "string" then
- local t = { }
- for k, v in gmatch(str,"([^&=]*)=([^&=]*)") do
- t[k] = v
- end
- return t
+ return lpegmatch(splitquery,str) or ""
else
return str
end
end
+function url.toquery(data)
+ local td = type(data)
+ if td == "string" then
+ return #str and escape(data) or nil -- beware of double escaping
+ elseif td == "table" then
+ if next(data) then
+ local t = { }
+ for k, v in next, data do
+ t[#t+1] = format("%s=%s",k,escapestring(v))
+ end
+ return concat(t,"&")
+ end
+ else
+ -- nil is a signal that no query
+ end
+end
+
+-- /test/ | /test | test/ | test => test
+
+local pattern = Cs(noslash^0 * (1 - noslash * P(-1))^0)
+
+function url.barepath(path)
+ if not path or path == "" then
+ return ""
+ else
+ return lpegmatch(pattern,path)
+ end
+end
+
+-- print(url.barepath("/test"),url.barepath("test/"),url.barepath("/test/"),url.barepath("test"))
+-- print(url.barepath("/x/yz"),url.barepath("x/yz/"),url.barepath("/x/yz/"),url.barepath("x/yz"))
+
--~ print(url.filename("file:///c:/oeps.txt"))
--~ print(url.filename("c:/oeps.txt"))
--~ print(url.filename("file:///oeps.txt"))
@@ -220,6 +306,9 @@ end
--~ print(table.serialize(t))
--~ end
+--~ inspect(url.hashed("http://www.pragma-ade.com/test%20test?test=test%20test&x=123%3d45"))
+--~ inspect(url.hashed("http://www.pragma-ade.com/test%20test?test=test%20test&x=123%3d45"))
+
--~ test("sys:///./colo-rgb")
--~ test("/data/site/output/q2p-develop/resources/ecaboperception4_res/topicresources/58313733/figuur-cow.jpg")
diff --git a/Master/texmf-dist/tex/context/base/lang-def.lua b/Master/texmf-dist/tex/context/base/lang-def.lua
index 80ff13beb6b..6656d8ed978 100644
--- a/Master/texmf-dist/tex/context/base/lang-def.lua
+++ b/Master/texmf-dist/tex/context/base/lang-def.lua
@@ -1,15 +1,18 @@
-if not modules then modules = { } end modules ['lang-ini'] = {
+if not modules then modules = { } end modules ['lang-def'] = {
version = 1.001,
comment = "companion to lang-ini.mkiv",
author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
copyright = "PRAGMA ADE / ConTeXt Development Team",
license = "see context related readme files"
+ -- dataonly = true, -- saves 10K
}
+local rawget = rawget
local lower = string.lower
languages = languages or { }
local languages = languages
+languages.data = languages.data or { }
local data = languages.data
local allocate = utilities.storage.allocate
@@ -359,7 +362,7 @@ local specifications = allocate {
{
["description"] = "Chinese, simplified",
["script"] = "hans",
- ["opentype-script"] = "hani",
+ ["opentypescript"] = "hani",
["bibliographical"] = "chi",
["terminological"] = "zho",
["context"] = "cn",
@@ -370,56 +373,85 @@ local specifications = allocate {
data.specifications = specifications
-local variants = { } data.variants = variants
-local opentypes = { } data.opentypes = opentypes
-local contexts = { } data.contexts = contexts
-local records = { } data.records = records
+local variants = { } data.variants = variants
+local contexts = { } data.contexts = contexts
+local records = { } data.records = records
+local scripts = { } data.scripts = scripts
+local opentypes = { } data.opentypes = opentypes
+local opentypescripts = { } data.opentypescripts = opentypescripts
for k=1,#specifications do
- local v = specifications[k]
- if v.variant then
- variants[v.variant] = v
+ local specification = specifications[k]
+ local variant = specification.variant
+ if variant then
+ variants[lower(variant)] = specification
end
- if v.opentype then
- opentypes[v.opentype] = v
+ local opentype = specification.opentype
+ if opentype then
+ opentypes[lower(opentype)] = specification
end
- local vc = v.context
- if vc then
- if type(vc) == "table" then
- for k=1,#vc do
- contexts[v] = vc[k]
+ local script = specification.script
+ if script then
+ scripts[lower(script)] = specification
+ end
+ local opentypescript = specification.opentypescript
+ if opentypescript then
+ opentypescripts[lower(opentypescript)] = specification
+ end
+ local context = context
+ if context then
+ if type(context) == "table" then
+ for k=1,#context do
+ contexts[context[k]] = specification
end
else
- contexts[vc] = v
+ contexts[context] = specification
end
end
end
+local defaultvariant = variants["en-us"]
+
+local function get(k,key)
+ local v = rawget(variants,k) or rawget(opentypes,k) or rawget(contexts,k)
+ return v and v[key]
+end
+
setmetatableindex(variants, function(t,k)
- str = lower(str)
- local v = (l_variant[str] or l_opentype[str] or l_context[str] or l_variant.en).language
+ k = lower(k)
+ local v = get(k,"language") or defaultvariant.language
t[k] = v
return v
end)
setmetatableindex(opentypes, function(t,k)
- str = lower(str)
- local v = (l_variant[str] or l_opentype[str] or l_context[str] or l_variant.en).opentype
+ k = lower(k)
+ local v = get(k,"opentype") or "dflt"
+ t[k] = v
+ return v
+end)
+
+setmetatableindex(opentypescripts, function(t,k)
+ k = lower(k)
+ local v = get(k,"opentypescript") or get(k,"script") or defaultvariant.opentypescript or defaultvariant.script
t[k] = v
return v
end)
setmetatableindex(contexts, function(t,k)
- str = lower(str)
- local v = (l_variant[str] or l_opentype[str] or l_context[str] or l_variant[languages.default]).context
- v = (type(v) == "table" and v[1]) or v
+ k = lower(str)
+ local v = get(k,"context") or defaultvariant.context
+ v = type(v) == "table" and v[1] or v
t[k] = v
return v
end)
setmetatableindex(records, function(t,k) -- how useful is this one?
- str = lower(str)
- local v = variants[str] or opentypes[str] or contexts[str] or variants.en
+ k = lower(k)
+ local v = get(k) or defaultvariant
t[k] = v
return v
end)
+
+-- print(opentypes.nl,opentypescripts.nl)
+-- print(opentypes.de,opentypescripts.de)
diff --git a/Master/texmf-dist/tex/context/base/lang-def.mkiv b/Master/texmf-dist/tex/context/base/lang-def.mkiv
index e838f60e1e9..d4e40dad1e7 100644
--- a/Master/texmf-dist/tex/context/base/lang-def.mkiv
+++ b/Master/texmf-dist/tex/context/base/lang-def.mkiv
@@ -211,28 +211,34 @@
\installlanguage
[\s!cs]
[\c!spacing=\v!packed,
- \c!leftsentence={\thickglue\endash\thickglue\penalty-20\relax},
- \c!rightsentence={\thickglue\endash\thickglue\penalty-20\relax},
- \c!leftsubsentence={\nobreakspace\emdash\nobreakspace\penalty-20\relax},
- \c!rightsubsentence={\nobreakspace\emdash\nobreakspace\penalty-20\relax},
+ \c!leftsentence={\nobreakspace\endash\thickglue\penalty\zerocount\relax},
+ \c!rightsentence=\c!leftsentence,
+ \c!leftsubsentence=\c!leftsentence,
+ \c!rightsubsentence=\c!rightsentence,
\c!leftquote=\lowerleftsingleninequote,
\c!rightquote=\upperrightsinglesixquote,
\c!leftquotation=\lowerleftdoubleninequote,
\c!rightquotation=\upperrightdoublesixquote,
- \c!date={\v!day,{.\thinspace},\v!month,\space,\v!year}]
+ \c!date={\v!day,\fourperemspace,\v!month,\space,\v!year}
+ \s!lefthyphenmin=2,
+ \s!righthyphenmin=3
+]
\installlanguage
[\s!sk]
[\c!spacing=\v!packed,
- \c!leftsentence=\emdash,
- \c!rightsentence=\emdash,
- \c!leftsubsentence=\emdash,
- \c!rightsubsentence=\emdash,
- \c!leftquote=\upperleftsinglesixquote,
- \c!rightquote=\upperrightsingleninequote,
- \c!leftquotation=\upperleftdoublesixquote,
- \c!rightquotation=\upperrightdoubleninequote,
- \c!date={\v!day,{.\thinspace},\v!month,\space,\v!year}]
+ \c!leftsentence={\nobreakspace\endash\thickglue\penalty\zerocount\relax},
+ \c!rightsentence=\c!leftsentence,
+ \c!leftsubsentence=\c!leftsentence,
+ \c!rightsubsentence=\c!rightsentence,
+ \c!leftquote=\lowerleftsingleninequote,
+ \c!rightquote=\upperrightsinglesixquote,
+ \c!leftquotation=\lowerleftdoubleninequote,
+ \c!rightquotation=\upperrightdoublesixquote,
+ \c!date={\v!day,\fourperemspace,\v!month,\space,\v!year}
+ \s!lefthyphenmin=2,
+ \s!righthyphenmin=3
+]
\installlanguage
[\s!hr]
diff --git a/Master/texmf-dist/tex/context/base/lang-frd.mkiv b/Master/texmf-dist/tex/context/base/lang-frd.mkiv
new file mode 100644
index 00000000000..352e5016dec
--- /dev/null
+++ b/Master/texmf-dist/tex/context/base/lang-frd.mkiv
@@ -0,0 +1,143 @@
+%D \module
+%D [ file=lang-frd,
+%D version=2004.01.15,
+%D title=\CONTEXT\ Language Macros,
+%D subtitle=Language Frequency Table Data,
+%D author=Hans Hagen,
+%D date=\currentdate,
+%D copyright={PRAGMA ADE \& \CONTEXT\ Development Team}]
+%C
+%C This module is part of the \CONTEXT\ macro||package and is
+%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
+%C details.
+
+\endinput
+
+%D This is experimental work! Old stuff, whenever I need it I might
+%D do it in \LUA.
+
+% http://www.onzetaal.nl/advies/letterfreq.html
+
+\startcharactertable[nl]
+ \charfreq a 7.47
+ \charfreq b 1.58
+ \charfreq c 1.24
+ \charfreq d 5.93
+ \charfreq e 18.91
+ \charfreq f .81
+ \charfreq g 3.4
+ \charfreq h 2.38
+ \charfreq i 6.5
+ \charfreq j 1.46
+ \charfreq k 2.25
+ \charfreq l 3.57
+ \charfreq m 2.21
+ \charfreq n 10.03
+ \charfreq o 6.06
+ \charfreq p 1.57
+ \charfreq q .009
+ \charfreq r 6.41
+ \charfreq s 3.73
+ \charfreq t 6.79
+ \charfreq u 1.99
+ \charfreq v 2.85
+ \charfreq w 1.52
+ \charfreq x .04
+ \charfreq y .035
+ \charfreq z 1.39
+\stopcharactertable
+
+% http://caislab.icu.ac.kr/course/2001/spring/ice605/down/010306.pdf
+
+% \startcharactertable[en]
+% \charfreq a 8.2
+% \charfreq b 1.5
+% \charfreq c 2.8
+% \charfreq d 4.3
+% \charfreq e 12.7
+% \charfreq f 2.2
+% \charfreq g 2
+% \charfreq h 6.1
+% \charfreq i 7
+% \charfreq j .2
+% \charfreq k .8
+% \charfreq l 4
+% \charfreq m 2.4
+% \charfreq n 6.7
+% \charfreq o 7.5
+% \charfreq p 1.9
+% \charfreq q .1
+% \charfreq r 6
+% \charfreq s 6.3
+% \charfreq t 9.1
+% \charfreq u 2.8
+% \charfreq v 1
+% \charfreq w 2.3
+% \charfreq x .1
+% \charfreq y 2
+% \charfreq z .1
+% \stopcharactertable
+
+% http://www.blankenburg.de/gat/pages/fach/info/analyse2.htm
+
+\startcharactertable[en]
+ \charfreq a 8.04
+ \charfreq b 1.54
+ \charfreq c 3.06
+ \charfreq d 3.99
+ \charfreq e 12.51
+ \charfreq f 2.3
+ \charfreq g 1.96
+ \charfreq h 5.49
+ \charfreq i 7.26
+ \charfreq j .16
+ \charfreq k .67
+ \charfreq l 4.14
+ \charfreq m 2.53
+ \charfreq n 7.09
+ \charfreq o 7.6
+ \charfreq p 2
+ \charfreq q .11
+ \charfreq r 6.12
+ \charfreq s 6.54
+ \charfreq t 9.25
+ \charfreq u 2.71
+ \charfreq v .99
+ \charfreq w 1.92
+ \charfreq x .19
+ \charfreq y 1.73
+ \charfreq z .09
+\stopcharactertable
+
+% http://www.blankenburg.de/gat/pages/fach/info/analyse2.htm
+
+\startcharactertable[de]
+ \charfreq a 6.47
+ \charfreq b 1.93
+ \charfreq c 2.68
+ \charfreq d 4.83
+ \charfreq e 17.48
+ \charfreq f 1.65
+ \charfreq g 3.06
+ \charfreq h 4.23
+ \charfreq i 7.73
+ \charfreq j .27
+ \charfreq k 1.46
+ \charfreq l 3.49
+ \charfreq m 2.58
+ \charfreq n 9.84
+ \charfreq o 2.98
+ \charfreq p .96
+ \charfreq q .02
+ \charfreq r 7.54
+ \charfreq s 6.83
+ \charfreq t 6.13
+ \charfreq u 4.17
+ \charfreq v .94
+ \charfreq w 1.48
+ \charfreq x .04
+ \charfreq y .08
+ \charfreq z 1.14
+\stopcharactertable
+
+\endinput
diff --git a/Master/texmf-dist/tex/context/base/lang-frq-de.lua b/Master/texmf-dist/tex/context/base/lang-frq-de.lua
new file mode 100644
index 00000000000..3733f39f92e
--- /dev/null
+++ b/Master/texmf-dist/tex/context/base/lang-frq-de.lua
@@ -0,0 +1,12 @@
+return {
+ language = "de",
+ source = "http://www.blankenburg.de/gat/pages/fach/info/analyse2.htm",
+ frequencies = {
+ [0x0061] = 6.47, [0x0062] = 1.93, [0x0063] = 2.68, [0x0064] = 4.83, [0x0065] = 17.48,
+ [0x0066] = 1.65, [0x0067] = 3.06, [0x0068] = 4.23, [0x0069] = 7.73, [0x006A] = 0.27,
+ [0x006B] = 1.46, [0x006C] = 3.49, [0x006D] = 2.58, [0x006E] = 9.84, [0x006F] = 2.98,
+ [0x0070] = 0.96, [0x0071] = 0.02, [0x0072] = 7.54, [0x0073] = 6.83, [0x0074] = 6.13,
+ [0x0075] = 4.17, [0x0076] = 0.94, [0x0077] = 1.48, [0x0078] = 0.04, [0x0079] = 0.08,
+ [0x007A] = 1.14,
+ }
+}
diff --git a/Master/texmf-dist/tex/context/base/lang-frq-en.lua b/Master/texmf-dist/tex/context/base/lang-frq-en.lua
new file mode 100644
index 00000000000..9e18d716679
--- /dev/null
+++ b/Master/texmf-dist/tex/context/base/lang-frq-en.lua
@@ -0,0 +1,26 @@
+-- return {
+-- language = "en",
+-- source = "http://caislab.icu.ac.kr/course/2001/spring/ice605/down/010306.pdf",
+-- frequencies = {
+-- [0x0061] = 8.2, [0x0062] = 1.5, [0x0063] = 2.8, [0x0064] = 4.3, [0x0065] = 12.7,
+-- [0x0066] = 2.2, [0x0067] = 2.0, [0x0068] = 6.1, [0x0069] = 7.0, [0x006A] = 0.2,
+-- [0x006B] = 0.8, [0x006C] = 4.0, [0x006D] = 2.4, [0x006E] = 6.7, [0x006F] = 7.5,
+-- [0x0070] = 1.9, [0x0071] = 0.1, [0x0072] = 6.0, [0x0073] = 6.3, [0x0074] = 9.1,
+-- [0x0075] = 2.8, [0x0076] = 1.0, [0x0077] = 2.3, [0x0078] = 0.1, [0x0079] = 2.0,
+-- [0x007A] = 0.1,
+-- }
+-- }
+
+return {
+ language = "en",
+ source = "http://www.blankenburg.de/gat/pages/fach/info/analyse2.htm",
+ frequencies = {
+ [0x0061] = 8.04, [0x0062] = 1.54, [0x0063] = 3.06, [0x0064] = 3.99, [0x0065] = 12.51,
+ [0x0066] = 2.30, [0x0067] = 1.96, [0x0068] = 5.49, [0x0069] = 7.26, [0x006A] = 0.16,
+ [0x006B] = 0.67, [0x006C] = 4.14, [0x006D] = 2.53, [0x006E] = 7.09, [0x006F] = 7.60,
+ [0x0070] = 2.00, [0x0071] = 0.11, [0x0072] = 6.12, [0x0073] = 6.54, [0x0074] = 9.25,
+ [0x0075] = 2.71, [0x0076] = 0.99, [0x0077] = 1.92, [0x0078] = 0.19, [0x0079] = 1.73,
+ [0x007A] = 0.09,
+ }
+}
+
diff --git a/Master/texmf-dist/tex/context/base/lang-frq-nl.lua b/Master/texmf-dist/tex/context/base/lang-frq-nl.lua
new file mode 100644
index 00000000000..7b640b77929
--- /dev/null
+++ b/Master/texmf-dist/tex/context/base/lang-frq-nl.lua
@@ -0,0 +1,12 @@
+return {
+ language = "nl",
+ source = "http://www.onzetaal.nl/advies/letterfreq.html",
+ frequencies = {
+ [0x0061] = 7.47, [0x0062] = 1.58, [0x0063] = 1.24, [0x0064] = 5.93, [0x0065] = 18.91,
+ [0x0066] = 0.81, [0x0067] = 3.40, [0x0068] = 2.38, [0x0069] = 6.50, [0x006A] = 1.46,
+ [0x006B] = 2.25, [0x006C] = 3.57, [0x006D] = 2.21, [0x006E] = 10.03, [0x006F] = 6.06,
+ [0x0070] = 1.57, [0x0071] = 0.009, [0x0072] = 6.41, [0x0073] = 3.73, [0x0074] = 6.79,
+ [0x0075] = 1.99, [0x0076] = 2.85, [0x0077] = 1.52, [0x0078] = 0.04, [0x0079] = 0.035,
+ [0x007A] = 1.39,
+ }
+}
diff --git a/Master/texmf-dist/tex/context/base/lang-frq.mkiv b/Master/texmf-dist/tex/context/base/lang-frq.mkiv
new file mode 100644
index 00000000000..eaa93281a92
--- /dev/null
+++ b/Master/texmf-dist/tex/context/base/lang-frq.mkiv
@@ -0,0 +1,235 @@
+%D \module
+%D [ file=lang-frq,
+%D version=2004.01.15,
+%D title=\CONTEXT\ Language Macros,
+%D subtitle=Frequency Tables,
+%D author=Hans Hagen,
+%D date=\currentdate,
+%D copyright={PRAGMA ADE \& \CONTEXT\ Development Team}]
+%C
+%C This module is part of the \CONTEXT\ macro||package and is
+%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
+%C details.
+
+\endinput
+
+\writestatus{loading}{ConTeXt Language Macros / Frequency Tables}
+
+%D Some day I might redo this \LUA. But anyway, who uses it. It's rather
+%D old code.
+
+\unprotect
+
+%M \usemodule[layout]
+
+%D \macros
+%D {charwidthmethod}
+%D
+%D This module implements a method for determining the width of an
+%D average character in a language. It uses the dimensions of the
+%D current fonts.
+%D
+%D \def\ShwChrWd#1#2#3%
+%D {\chardef\charwidthmethod#1\relax
+%D \mainlanguage[#2#3]\the\dimexpr(\averagecharwidth)}
+%D
+%D \starttabulate[|c|c|c|c|c|c|]
+%D \HL
+%D \NC \NC\bf0=amount\NC\bf1=.5em\NC2=ex\NC\bf3=frequency\NC\bf4=list\NC\NR
+%D \HL
+%D \NC\bf en\NC\ShwChrWd0en\NC\ShwChrWd1en\NC\ShwChrWd2en\NC\ShwChrWd3en\NC\ShwChrWd4en\NC\NR
+%D \NC\bf nl\NC\ShwChrWd0nl\NC\ShwChrWd1nl\NC\ShwChrWd2nl\NC\ShwChrWd3nl\NC\ShwChrWd4nl\NC\NR
+%D \NC\bf de\NC\ShwChrWd0de\NC\ShwChrWd1de\NC\ShwChrWd2de\NC\ShwChrWd3de\NC\ShwChrWd4de\NC\NR
+%D \HL
+%D \stoptabulate
+%D
+%D Method~1 ignores the widths and assumes that each character has a
+%D width of .5em, which is true for most monospaced fonts. Method~2
+%D takes the x as starting point, and assumes that it's height kind of
+%D matches its width. Method~3 is the best one, and determines the
+%D average width based on the language specific character table.
+%D Method~4 is a mixture between the first two methods: character
+%D specific widths applied to an equal distribution. Method~0 reports
+%D the total count, which normally is~100.
+
+\chardef\charwidthmethod=3 % 0=amount 1=em 2=ex 3=frequency 4=flattened >4=ex
+
+%D \macros
+%D {charwidthlanguage}
+%D
+%D The language used for the calculations is defined as:
+
+\def\charwidthlanguage{\currentmainlanguage}
+
+%D \macros
+%D {charfreq}
+%D
+%D This method comes into action in the following macro:
+
+\def\charfreq#1 #2 % character fraction
+ {+(\ifcase\charwidthmethod
+ #2\dimexpr100\onepoint\relax
+ \or
+ #2\dimexpr\emwidth/2\relax
+ \or
+ #2\dimexpr\exheight\relax
+ \or
+ #2\fontcharwd\font`#1%
+ \or
+ \dimexpr100\fontcharwd\font`#1/\charactertsize\charwidthlanguage\relax % ugly hack
+ \else
+ #2\dimexpr\exheight\relax
+ \fi)}
+
+%D \macros
+%D {startcharactertable}
+%D
+%D A frequency table is defined with the following macro. The \type
+%D {charfreq} macro is used in this table.
+
+\installcorenamespace{frequencywidths}
+\installcorenamespace{frequencycounts}
+
+\let\stopcharactertable\relax
+
+\unexpanded\def\startcharactertable[#1]#2\stopcharactertable % \dimexpr has fuzzy lookahead
+ {\startnointerference
+ \setgvalue{\??frequencywidths#1}{#2}% the width vector
+ \scratchcounter\zerocount \def\charfreq##1 ##2 {\advance\scratchcounter\plusone} #2%
+ \setxvalue{\??frequencycounts#1}{\the\scratchcounter}% the character count
+ \stopnointerference}
+
+%D \macros
+%D {charactertable,charactertsize}
+%D
+%D The table content as well as the number of entries can be fetched with
+%D the following two macros. The architecture of the table and calling
+%D macro permits a fully expandable application.
+
+\def\charactertable#1%
+ {\csname\??frequencywidths\ifcsname\??frequencywidths#1\endcsname#1\else\s!en\fi\endcsname}
+
+\def\charactertsize#1%
+ {\csname\??frequencycounts\ifcsname\??frequencycounts#1\endcsname#1\else\s!en\fi\endcsname}
+
+%D Although it is of hardly any use, you can inherit a character table:
+%D
+%D \starttyping
+%D \startcharactertable[cz] \charactertable{en} \stopcharactertable
+%D \stoptyping
+
+\startcharactertable[en]
+ % empty
+\stopcharactertable % kind of default
+
+%D \macros
+%D {averagecharwidth}
+%D
+%D This macro reports the average width for the current main
+%D language (\the \dimexpr (\averagecharwidth)).
+
+\def\averagecharwidth{\dimexpr(\zeropoint\charactertable\charwidthlanguage)/100\relax}
+
+\unexpanded\def\showcharfreq
+ {\hbox\bgroup
+ \charwidthlanguage:%
+ \dostepwiserecurse\zerocount\plusfour\plusone
+ {\chardef\charwidthmethod\recurselevel\relax
+ \enspace\recurselevel/\the\dimexpr(\averagecharwidth)}%
+ \egroup}
+
+%D Just for fun, we show a few frequency tables as graphic (\in {figure}
+%D [fig:charfreq]).
+%D
+%D \startbuffer
+%D \definepalet [charfreq] [en=darkred, nl=darkgreen, de=darkblue]
+%D
+%D \def\charfreq#1 #2 %
+%D {\startMPdrawing
+%D interim linejoin := butt ;
+%D a := ASCII "#1" ;
+%D if (a >= (ASCII "a")) and (a <= (ASCII "z")) :
+%D draw ((0,#2*.25cm)--origin--(0,#2*.5cm))
+%D shifted (a*4mm+o,0)
+%D withpen pencircle scaled .5mm
+%D withcolor c;
+%D fi ;
+%D \stopMPdrawing}
+%D
+%D \resetMPdrawing
+%D \startMPdrawing
+%D numeric a, o ; a := o := 0 ;
+%D color c ; c := .5white ;
+%D string s ; s := "" ;
+%D \stopMPdrawing
+%D
+%D \startMPdrawing o := 0mm ; c := \MPcolor{charfreq:en} ; \stopMPdrawing
+%D \charactertable{en}
+%D
+%D \startMPdrawing o := 1mm ; c := \MPcolor{charfreq:nl} ; \stopMPdrawing
+%D \charactertable{nl}
+%D
+%D \startMPdrawing o := 2mm ; c := \MPcolor{charfreq:de} ; \stopMPdrawing
+%D \charactertable{de}
+%D
+%D \startMPdrawing
+%D for a := ASCII "a" upto ASCII "z" :
+%D draw textext.bot("\strut\tttf " & char a) shifted (a*4mm+1mm,-1mm) ;
+%D endfor ;
+%D \stopMPdrawing
+%D
+%D \MPdrawingdonetrue \getMPdrawing \resetMPdrawing
+%D \stopbuffer
+%D
+%D \placefigure
+%D [here]
+%D [fig:charfreq]
+%D {The character distributions for English, Dutch and German.}
+%D {\getbuffer}
+%D
+%D A few samples of usage of this mechanism are shown below:
+%D
+%D \startbuffer
+%D {\mainlanguage[en]\hsize65\averagecharwidth\mainlanguage[en]\input ward \blank}
+%D {\mainlanguage[nl]\hsize65\averagecharwidth\mainlanguage[en]\input ward \blank}
+%D {\mainlanguage[de]\hsize65\averagecharwidth\mainlanguage[en]\input ward \blank}
+%D \stopbuffer
+%D
+%D \typebuffer \getbuffer
+%D
+%D Although the widthts differ, the consequenes for breaking the paragraph
+%D into lines are minimal.
+
+%D \macros
+%D {freezeaveragecharacterwidth}
+%D
+%D This macro can be used to make sure that the width does not change during a
+%D page break when another font is used.
+
+\let\normalaveragecharacterwidth\averagecharacterwidth
+
+\unexpanded\def\freezeaveragecharacterwidth % global
+ {\xdef\averagecharacterwidth{\dimexpr\the\normalaveragecharacterwidth\relax}}
+
+%D Example:
+%D
+%D \starttyping
+%D \input lang-frq.mkiv
+%D \input lang-frd.mkiv
+%D
+%D \setupbodyfont
+%D [dejavu]
+%D
+%D \setemeasure{textwidth}{\the\dimexpr70\averagecharwidth}
+%D
+%D \setuplayout
+%D [width=\measure{textwidth}]
+%D
+%D \showframe
+%D
+%D \starttext
+%D \input ward
+%D \stoptext
+%D \stoptyping
+
+\protect \endinput
diff --git a/Master/texmf-dist/tex/context/base/lang-ini.lua b/Master/texmf-dist/tex/context/base/lang-ini.lua
index 97528097f15..4ae7656d354 100644
--- a/Master/texmf-dist/tex/context/base/lang-ini.lua
+++ b/Master/texmf-dist/tex/context/base/lang-ini.lua
@@ -17,7 +17,6 @@ if not modules then modules = { } end modules ['lang-ini'] = {
--~ lang:hyphenation(string) string = lang:hyphenation() lang:clear_hyphenation()
local type, tonumber = type, tonumber
-local utf = unicode.utf8
local utfbyte = utf.byte
local format, gsub = string.format, string.gsub
local concat, sortedkeys, sortedpairs = table.concat, table.sortedkeys, table.sortedpairs
@@ -29,12 +28,14 @@ local trace_patterns = false trackers.register("languages.patterns", function(v
local report_initialization = logs.reporter("languages","initialization")
-local prehyphenchar = lang.prehyphenchar -- global per language
+local prehyphenchar = lang.prehyphenchar -- global per language
local posthyphenchar = lang.posthyphenchar -- global per language
local lefthyphenmin = lang.lefthyphenmin
local righthyphenmin = lang.righthyphenmin
+local lang = lang
lang.exceptions = lang.hyphenation
+local new_langage = lang.new
languages = languages or {}
local languages = languages
@@ -65,7 +66,7 @@ local function resolve(tag)
if data then
instance = data.instance
if not instance then
- instance = lang.new(data.number)
+ instance = new_langage(data.number)
data.instance = instance
end
end
@@ -78,7 +79,7 @@ local function tolang(what) -- returns lang object
if data then
local instance = data.lang
if not instance then
- instance = lang.new(data.number)
+ instance = new_langage(data.number)
data.instance = instance
end
return instance
@@ -87,7 +88,8 @@ end
-- languages.tolang = tolang
--- todo: en+de => merge
+-- patterns=en
+-- patterns=en,de
local function loaddefinitions(tag,specification)
statistics.starttiming(languages)
@@ -95,44 +97,44 @@ local function loaddefinitions(tag,specification)
local definitions = settings_to_array(specification.patterns or "")
if #definitions > 0 then
if trace_patterns then
- report_initialization("pattern specification for language '%s': %s",tag,specification.patterns)
+ report_initialization("pattern specification for language %a: %s",tag,specification.patterns)
end
local dataused, ok = data.used, false
for i=1,#definitions do
local definition = definitions[i]
- if definition ~= "" then
- if definition == "reset" then -- interfaces.variables.reset
+ if definition == "" then
+ -- error
+ elseif definition == "reset" then -- interfaces.variables.reset
+ if trace_patterns then
+ report_initialization("clearing patterns for language %a",tag)
+ end
+ instance:clear_patterns()
+ elseif not dataused[definition] then
+ dataused[definition] = definition
+ local filename = "lang-" .. definition .. ".lua"
+ local fullname = resolvers.findfile(filename) or ""
+ if fullname ~= "" then
if trace_patterns then
- report_initialization("clearing patterns for language '%s'",tag)
+ report_initialization("loading definition %a for language %a from %a",definition,tag,fullname)
end
- instance:clear_patterns()
- elseif not dataused[definition] then
- dataused[definition] = definition
- local filename = "lang-" .. definition .. ".lua"
- local fullname = resolvers.findfile(filename) or ""
- if fullname ~= "" then
- if trace_patterns then
- report_initialization("loading definition '%s' for language '%s' from '%s'",definition,tag,fullname)
- end
- local defs = dofile(fullname) -- use regular loader instead
- if defs then -- todo: version test
- ok, nofloaded = true, nofloaded + 1
- instance:patterns (defs.patterns and defs.patterns.data or "")
- instance:hyphenation(defs.exceptions and defs.exceptions.data or "")
- else
- report_initialization("invalid definition '%s' for language '%s' in '%s'",definition,tag,filename)
- end
- elseif trace_patterns then
- report_initialization("invalid definition '%s' for language '%s' in '%s'",definition,tag,filename)
+ local defs = dofile(fullname) -- use regular loader instead
+ if defs then -- todo: version test
+ ok, nofloaded = true, nofloaded + 1
+ instance:patterns (defs.patterns and defs.patterns .data or "")
+ instance:hyphenation(defs.exceptions and defs.exceptions.data or "")
+ else
+ report_initialization("invalid definition %a for language %a in %a",definition,tag,filename)
end
elseif trace_patterns then
- report_initialization("definition '%s' for language '%s' already loaded",definition,tag)
+ report_initialization("invalid definition %a for language %a in %a",definition,tag,filename)
end
+ elseif trace_patterns then
+ report_initialization("definition %a for language %a already loaded",definition,tag)
end
end
return ok
elseif trace_patterns then
- report_initialization("no definitions for language '%s'",tag)
+ report_initialization("no definitions for language %a",tag)
end
statistics.stoptiming(languages)
end
@@ -144,7 +146,7 @@ local noflanguages = storage.shared.noflanguages
function languages.define(tag,parent)
noflanguages = noflanguages + 1
if trace_patterns then
- report_initialization("assigning number %s to %s",noflanguages,tag)
+ report_initialization("assigning number %a to %a",noflanguages,tag)
end
numbers[noflanguages] = tag
registered[tag] = {
@@ -161,7 +163,7 @@ function languages.define(tag,parent)
storage.shared.noflanguages = noflanguages
end
-function languages.synonym(synonym,tag) -- convenience function
+function languages.setsynonym(synonym,tag) -- convenience function
local l = registered[tag]
if l then
l.synonyms[synonym] = true -- maybe some day more info
@@ -169,7 +171,7 @@ function languages.synonym(synonym,tag) -- convenience function
end
function languages.installed(separator)
- context(concat(sortedkeys(registered),separator or ","))
+ return concat(sortedkeys(registered),separator or ",")
end
function languages.current(n)
@@ -187,8 +189,6 @@ function languages.association(tag) -- not yet used
local lat = tag and associated[tag]
if lat then
return lat[1], lat[2]
- else
- return nil, nil
end
end
@@ -204,7 +204,7 @@ end
-- a bit messy, we will do all language setting in lua as we can now assign
-- and 'patterns' will go away here.
-function languages.setdirty(tag)
+function languages.unload(tag)
local l = registered[tag]
if l then
l.dirty = true
@@ -217,25 +217,21 @@ if environment.initex then
return 0
end
- function commands.languagenumber()
- context(0)
- end
-
else
- local function getnumber(tag,default,patterns)
+ function languages.getnumber(tag,default,patterns)
local l = registered[tag]
if l then
if l.dirty then
if trace_patterns then
- report_initialization("checking patterns for %s (%s)",tag,default)
+ report_initialization("checking patterns for %a with default %a",tag,default)
end
-- patterns is already resolved to parent patterns if applicable
if patterns and patterns ~= "" then
if l.patterns ~= patterns then
l.patterns = patterns
if trace_patterns then
- report_initialization("loading patterns for '%s' using specification '%s'",tag,patterns)
+ report_initialization("loading patterns for %a using specification %a",tag,patterns)
end
loaddefinitions(tag,l)
else
@@ -244,13 +240,13 @@ else
elseif l.patterns == "" then
l.patterns = tag
if trace_patterns then
- report_initialization("loading patterns for '%s' using tag",tag)
+ report_initialization("loading patterns for %a using tag",tag)
end
local ok = loaddefinitions(tag,l)
if not ok and tag ~= default then
l.patterns = default
if trace_patterns then
- report_initialization("loading patterns for '%s' using default",tag)
+ report_initialization("loading patterns for %a using default",tag)
end
loaddefinitions(tag,l)
end
@@ -264,12 +260,6 @@ else
end
end
- languages.getnumber = getnumber
-
- function commands.languagenumber(tag,default,patterns)
- context(getnumber(tag,default,patterns))
- end
-
end
-- not that usefull, global values
@@ -294,7 +284,7 @@ function languages.loadwords(tag,filename)
end
end
-function languages.exceptions(tag,str)
+function languages.setexceptions(tag,str)
local data, instance = resolve(tag)
if data then
instance:hyphenation(string.strip(str)) -- we need to strip leading spaces
@@ -311,9 +301,9 @@ function languages.hyphenate(tag,str)
end
end
---~ hyphenation.define ("zerolanguage")
---~ hyphenation.loadpatterns ("zerolanguage") -- else bug
---~ hyphenation.loadexceptions("zerolanguage") -- else bug
+-- hyphenation.define ("zerolanguage")
+-- hyphenation.loadpatterns ("zerolanguage") -- else bug
+-- hyphenation.loadexceptions("zerolanguage") -- else bug
languages.logger = languages.logger or { }
@@ -322,13 +312,13 @@ function languages.logger.report()
for tag, l in sortedpairs(registered) do
if l.loaded then
r = r + 1
- result[r] = format("%s:%s:%s", tag, l.parent, l.number)
+ result[r] = format("%s:%s:%s",tag,l.parent,l.number)
end
end
- return (r > 0 and concat(result," ")) or "none"
+ return r > 0 and concat(result," ") or "none"
end
--- must happen at the tex end
+-- must happen at the tex end .. will use lang-def.lua
languages.associate('en','latn','eng')
languages.associate('uk','latn','eng')
@@ -347,60 +337,19 @@ statistics.register("language load time", function()
return statistics.elapsedseconds(languages, format(", nofpatterns: %s",nofloaded))
end)
---~ -- obsolete
---~ --
---~ -- loading the 26 languages that we normally load in mkiv, the string based variant
---~ -- takes .84 seconds (probably due to the sub's) while the lpeg variant takes .78
---~ -- seconds
---~ --
---~ -- the following lpeg can probably be improved (it was one of the first I made)
-
---~ local leftbrace = lpeg.P("{")
---~ local rightbrace = lpeg.P("}")
---~ local spaces = lpeg.S(" \r\n\t\f")
---~ local spacing = spaces^0
---~ local validchar = 1-(spaces+rightbrace+leftbrace)
---~ local validword = validchar^1
---~ local content = spacing * leftbrace * spacing * lpeg.C((spacing * validword)^0) * spacing * rightbrace * lpeg.P(true)
---~
---~ local command = lpeg.P("\\patterns")
---~ local parser = (1-command)^0 * command * content
---~
---~ local function filterpatterns(filename)
---~ return lpegmatch(parser,io.loaddata(resolvers.findfile(filename)) or "")
---~ end
---~
---~ local command = lpeg.P("\\hyphenation")
---~ local parser = (1-command)^0 * command * content
---~
---~ local function filterexceptions(filename)
---~ return lpegmatch(parser,io.loaddata(resolvers.findfile(filename)) or "") -- "" ?
---~ end
---~
---~ local function loadthem(tag, filename, filter, target)
---~ statistics.starttiming(languages)
---~ local data, instance = resolve(tag)
---~ local fullname = (filename and filename ~= "" and resolvers.findfile(filename)) or ""
---~ local ok = fullname ~= ""
---~ if ok then
---~ if trace_patterns then
---~ report_initialization("filtering %s for language '%s' from '%s'",target,tag,fullname)
---~ end
---~ lang[target](data,filter(fullname) or "")
---~ else
---~ if trace_patterns then
---~ report_initialization("no %s for language '%s' in '%s'",target,tag,filename or "?")
---~ end
---~ lang[target](instance,"")
---~ end
---~ statistics.stoptiming(languages)
---~ return ok
---~ end
---~
---~ function hyphenation.loadpatterns(tag, patterns)
---~ return loadthem(tag, patterns, filterpatterns, "patterns")
---~ end
---~
---~ function hyphenation.loadexceptions(tag, exceptions)
---~ return loadthem(tag, exceptions, filterexceptions, "exceptions")
---~ end
+-- interface
+
+local getnumber = languages.getnumber
+
+function commands.languagenumber(tag,default,patterns)
+ context(getnumber(tag,default,patterns))
+end
+
+function commands.installedlanguages(separator)
+ context(languages.installed(separator))
+end
+
+commands.definelanguage = languages.define
+commands.setlanguagesynonym = languages.setsynonym
+commands.unloadlanguage = languages.unload
+commands.setlanguageexceptions = languages.setexceptions
diff --git a/Master/texmf-dist/tex/context/base/lang-ini.mkiv b/Master/texmf-dist/tex/context/base/lang-ini.mkiv
index 150b68f702f..3226c1255ce 100644
--- a/Master/texmf-dist/tex/context/base/lang-ini.mkiv
+++ b/Master/texmf-dist/tex/context/base/lang-ini.mkiv
@@ -174,7 +174,7 @@
\newtoks \everysetuplanguage
-\def\installedlanguages{\ctxlua{languages.installed()}}
+\def\installedlanguages{\ctxcommand{installedlanguages()}}
\unexpanded\def\doiflanguageelse#1%
{\ifcsname\??language#1\c!state\endcsname
@@ -199,10 +199,10 @@
\lang_basics_install_indeed{#1}{#1}%
\getparameters[\??language#1][\c!state=\v!start,#2]}%
\edef\currentsetuplanguage{#1}%
- \ctxlua{languages.define("#1","\specificlanguageparameter{#1}\s!default")}%
+ \ctxcommand{definelanguage("#1","\specificlanguageparameter{#1}\s!default")}%
\the\everysetuplanguage}
{\setvalue{\??languagelinked#1}{#2}%
- \ctxlua{languages.synonym("#1","#2")}%
+ \ctxcommand{setlanguagesynonym("#1","#2")}%
\lang_basics_install_indeed{#1}{#2}}}
\def\lang_basics_install_indeed#1#2%
@@ -215,7 +215,7 @@
\unexpanded\def\doifpatternselse#1%
{\begingroup % will change
- \language[#1]%
+ \lang_basics_set_current[#1]%
\ifnum\normallanguage>\zerocount
\endgroup\expandafter\firstoftwoarguments
\else
@@ -254,7 +254,7 @@
\lang_basics_synchronize}
\appendtoks
- \ctxlua{languages.setdirty("\currentsetuplanguage")}%
+ \ctxcommand{unloadlanguage("\currentsetuplanguage")}%
\to \everysetuplanguage
\setuplanguage
@@ -286,7 +286,8 @@
\c!rightspeech=\languageparameter\c!rightquotation,
\c!limittext=\unknown,
\c!date={\v!year,\ ,\v!month,\ ,\v!day},
- \c!text=Ag]
+ \c!text=Ag,
+ \s!font=] % \v!auto : experimental !
% to be tested:
%
@@ -361,13 +362,40 @@
)}\relax
\the\everylanguage\relax}
+\newcount\hyphenminoffset
+
+\unexpanded\def\lesshyphens
+ {\advance\hyphenminoffset\plusone
+ \lang_basics_synchronize_min_max}
+
+\unexpanded\def\morehyphens
+ {\ifcase\hyphenminoffset \else
+ \advance\hyphenminoffset\minusone
+ \fi
+ \lang_basics_synchronize_min_max}
+
+\unexpanded\def\nohyphens % % % % % not clever, we still hyphenate but supress application
+ {\ifx\dohyphens\relax
+ \unexpanded\edef\dohyphens
+ {\hyphenpenalty \the\hyphenpenalty
+ \exhyphenpenalty\the\exhyphenpenalty
+ \relax}%
+ \fi
+ \hyphenpenalty \plustenthousand
+ \exhyphenpenalty\plustenthousand}
+
+\let\dohyphens\relax
+
+\unexpanded\def\lang_basics_synchronize_min_max
+ {% these values are stored along with glyph nodes
+ \lefthyphenmin \numexpr0\languageparameter\s!lefthyphenmin +\hyphenminoffset\relax
+ \righthyphenmin\numexpr0\languageparameter\s!righthyphenmin+\hyphenminoffset\relax
+ % these values are stored with the language (global!)
+ \prehyphenchar \languageparameter\s!righthyphenchar\relax
+ \posthyphenchar\languageparameter\s!lefthyphenchar \relax}
+
\appendtoks
- % these values are stored along with glyph nodes
- \lefthyphenmin \numexpr0\languageparameter\s!lefthyphenmin +\hyphenminoffset\relax
- \righthyphenmin\numexpr0\languageparameter\s!righthyphenmin+\hyphenminoffset\relax
- % these values are stored with the language (global!)
- \prehyphenchar \languageparameter\s!righthyphenchar\relax
- \posthyphenchar\languageparameter\s!lefthyphenchar \relax
+ \lang_basics_synchronize_min_max
\to \everylanguage
% \appendtoks
@@ -394,25 +422,50 @@
% \dorecurse{100}{dit is toch wel een heel\normalhyphendiscretionary lang\normalhyphendiscretionary woord \recurselevel\ }
% \dorecurse{100}{dit is toch wel een heellangwoord \recurselevel\ }
+% new experimental feature
+
+\unexpanded\def\setuplanguages
+ {\setuplanguage[\s!default]}
+
+% \setuplanguages[\s!font=\v!auto]
+% \setuplanguage[\s!default][\s!font=\v!auto]
+% \setuplanguage[nl][\s!font=\v!auto]
+
+\ifdefined\feature \else \let\feature\gobbleoneargument \fi
+
+\appendtoks
+ \edef\p_language_font{\languageparameter\s!font}%
+ \ifx\p_language_font\v!auto
+ \feature\currentlanguage
+ \fi
+\to \everylanguage
+
% The following may be a solution for the fact that one cannot
% change catcodes of characters like : and ; inside an environment.
% we will also permit access by the other names
-\unexpanded\def\lang_basics_set_current[#1]%
- {\edef\askedlanguage{#1}%
- \ifx\askedlanguage\empty \else
+%D Fast switcher
+
+\def\lang_basics_switch_asked
+ {\ifx\askedlanguage\empty \else
\ifcsname\??languagelinked\askedlanguage\endcsname
\edef\askedlanguage{\csname\??languagelinked\askedlanguage\endcsname}%
\ifx\currentlanguage\askedlanguage \else
\setcurrentlanguage\currentmainlanguage\askedlanguage
\lang_basics_synchronize
\fi
- \else
- \showmessage\m!languages6{#1}%
\fi
\fi}
+\unexpanded\def\uselanguageparameter#1%
+ {\edef\askedlanguage{#1\c!language}%
+ \lang_basics_switch_asked}
+
+\unexpanded\def\lang_basics_set_current[#1]%
+ {\edef\askedlanguage{#1}%
+ \lang_basics_switch_asked}
+
\unexpanded\def\language
{\doifnextoptionalelse\lang_basics_set_current\normallanguage}
@@ -570,15 +623,15 @@
\let\stopexceptions\relax
\unexpanded\def\startexceptions
- {\dosingleempty\dostartexceptions}
+ {\dosingleempty\lang_basics_start_exceptions}
-\def\dostartexceptions[#1]#2\stopexceptions % multilingual or not?
+\def\lang_basics_start_exceptions[#1]#2\stopexceptions % multilingual or not?
{\begingroup
\edef\askedlanguage{\reallanguagetag{#1}}%
\ifx\askedlanguage\empty
\let\askedlanguage\currentlanguage
\fi
- \ctxlua{languages.exceptions("\askedlanguage",\!!bs#2\!!es)}%
+ \ctxcommand{setlanguageexceptions("\askedlanguage",\!!bs#2\!!es)}%
\endgroup}
%D For the moment here:
diff --git a/Master/texmf-dist/tex/context/base/lang-lab.lua b/Master/texmf-dist/tex/context/base/lang-lab.lua
index 360f2aa11b5..91c258418c9 100644
--- a/Master/texmf-dist/tex/context/base/lang-lab.lua
+++ b/Master/texmf-dist/tex/context/base/lang-lab.lua
@@ -6,70 +6,21 @@ if not modules then modules = { } end modules ['lang-lab'] = {
license = "see context related readme files"
}
---~ local function complete()
---~ local function process(what)
---~ for tag, data in next, what do
---~ for k, v in next, data.labels do
---~ languages[k] = true
---~ end
---~ end
---~ end
---~ process(languages.labels.data.titles)
---~ process(languages.labels.data.texts)
---~ process(languages.labels.data.functions)
---~ process(languages.labels.data.tags)
---~ local function process(what)
---~ for tag, data in next, what do
---~ local labels = data.labels
---~ for k, v in next, languages do
---~ if not labels[k] then
---~ labels[k] = ""
---~ end
---~ end
---~ end
---~ end
---~ process(languages.data.labels.titles)
---~ process(languages.data.labels.texts)
---~ process(languages.data.labels.functions)
---~ process(languages.data.labels.tags)
---~ end
---~
---~ local function strip(default)
---~ local function process(what)
---~ for tag, data in next, what do
---~ local labels = data.labels
---~ for k, v in next, labels do
---~ if v == "" then
---~ labels[k] = default
---~ end
---~ end
---~ end
---~ end
---~ process(languages.data.labels.titles)
---~ process(languages.data.labels.texts)
---~ process(languages.data.labels.functions)
---~ process(languages.data.labels.tags)
---~ end
---~
---~ complete()
---~ strip(false)
---~ strip()
-
---~ table.print(languages.data.labels,"languages.data.labels",false,true,true)
-
--- this will move
-
local format, find = string.format, string.find
local next, rawget, type = next, rawget, type
-local prtcatcodes = tex.prtcatcodes
local lpegmatch = lpeg.match
+local formatters = string.formatters
-languages.labels = languages.labels or { }
+local prtcatcodes = catcodes.numbers.prtcatcodes -- todo: use different method
local trace_labels = false trackers.register("languages.labels", function(v) trace_labels = v end)
local report_labels = logs.reporter("languages","labels")
-local variables = interfaces.variables
+languages.labels = languages.labels or { }
+local labels = languages.labels
+
+local variables = interfaces.variables
+local settings_to_array = utilities.parsers.settings_to_array
local splitter = lpeg.splitat(":")
@@ -77,31 +28,32 @@ local function split(tag)
return lpegmatch(splitter,tag)
end
-languages.labels.split = split
+labels.split = split
-local function definelanguagelabels(data,command,tag,rawtag)
+local contextsprint = context.sprint
+
+local function definelanguagelabels(data,class,tag,rawtag)
for language, text in next, data.labels do
if text == "" then
-- skip
elseif type(text) == "table" then
- context("\\%s[%s][%s={{%s},{%s}}]",command,language,tag,text[1],text[2])
+ contextsprint(prtcatcodes,"\\setlabeltextpair{",class,"}{",language,"}{",tag,"}{",text[1],"}{",text[2],"}")
if trace_labels then
- report_labels("language '%s', defining label '%s' as '%s' and '%s'",language,rawtag,text[1],text[2])
+ report_labels("language %a, defining label %a as %a and %a",language,rawtag,text[1],text[2])
end
else
- context("\\%s[%s][%s={{%s},}]",command,language,tag,text)
+ contextsprint(prtcatcodes,"\\setlabeltextpair{",class,"}{",language,"}{",tag,"}{",text,"}{}")
if trace_labels then
- report_labels("language '%s', defining label '%s' as '%s'",language,rawtag,text)
+ report_labels("language %a, defining label %a as %a",language,rawtag,text)
end
end
end
end
-function languages.labels.define(command,name,prefixed)
+function labels.define(class,name,prefixed)
local list = languages.data.labels[name]
if list then
- report_labels("defining label set '%s'",name)
- context.pushcatcodes(prtcatcodes) -- context.unprotect
+ report_labels("defining label set %a",name)
for tag, data in next, list do
if data.hidden then
-- skip
@@ -110,45 +62,81 @@ function languages.labels.define(command,name,prefixed)
if second then
if rawget(variables,first) then
if rawget(variables,second) then
- definelanguagelabels(data,command,format("\\v!%s:\\v!%s",first,second),tag)
+ definelanguagelabels(data,class,formatters["\\v!%s:\\v!%s"](first,second),tag)
else
- definelanguagelabels(data,command,format("\\v!%s:%s",first,second),tag)
+ definelanguagelabels(data,class,formatters["\\v!%s:%s"](first,second),tag)
end
- elseif rawget(variables,second) then
- definelanguagelabels(data,command,format("%s:\\v!%s",first,second),tag)
+ elseif rawget(variables,second) then
+ definelanguagelabels(data,class,formatters["%s:\\v!%s"](first,second),tag)
else
- definelanguagelabels(data,command,format("%s:%s",first,second),tag)
+ definelanguagelabels(data,class,formatters["%s:%s"](first,second),tag)
end
elseif rawget(variables,rawtag) then
- definelanguagelabels(data,command,format("\\v!%s",tag),tag)
+ definelanguagelabels(data,class,formatters["\\v!%s"](tag),tag)
else
- definelanguagelabels(data,command,tag,tag)
+ definelanguagelabels(data,class,tag,tag)
end
else
- definelanguagelabels(data,command,tag,tag)
+ definelanguagelabels(data,class,tag,tag)
end
end
- context.popcatcodes() -- context.protect
else
- report_labels("unknown label set '%s'",name)
+ report_labels("unknown label set %a",name)
end
end
---~ function languages.labels.check()
---~ for category, list in next, languages.data.labels do
---~ for tag, specification in next, list do
---~ for language, text in next, specification.labels do
---~ if type(text) == "string" and find(text,",") then
---~ report_labels("label with comma: category '%s', language '%s', tag '%s', text '%s'",
---~ category, language, tag, text)
---~ end
---~ end
---~ end
---~ end
---~ end
---~
---~ languages.labels.check()
+-- function labels.check()
+-- for category, list in next, languages.data.labels do
+-- for tag, specification in next, list do
+-- for language, text in next, specification.labels do
+-- if type(text) == "string" and find(text,",") then
+-- report_labels("warning: label with comma found, category %a, language %a, tag %a, text %a",
+-- category, language, tag, text)
+-- end
+-- end
+-- end
+-- end
+-- end
+--
+-- labels.check()
+
+-- interface
+
+commands.definelabels = labels.define
-- function commands.setstrippedtextprefix(str)
-- context(string.strip(str))
-- end
+
+-- list : { "a", "b", "c" }
+-- separator : ", "
+-- last : " and "
+
+-- text : "a,b,c"
+-- separators : "{, },{ and }"
+
+function commands.concatcommalist(settings) -- it's too easy to forget that this one is there
+ local list = settings.list or settings_to_array(settings.text or "")
+ local size = #list
+ local command = settings.command and context[settings.command] or context
+ if size > 1 then
+ local separator, last = " ", " "
+ if settings.separators then
+ local set = settings_to_array(settings.separators)
+ separator = set[1] or settings.separator or separator
+ last = set[2] or settings.last or last
+ else
+ separator = settings.separator or separator
+ last = settings.last or last
+ end
+ command(list[1])
+ for i=2,size-1 do
+ context(separator)
+ command(list[i])
+ end
+ context(last)
+ end
+ if size > 0 then
+ command(list[size])
+ end
+end
diff --git a/Master/texmf-dist/tex/context/base/lang-lab.mkiv b/Master/texmf-dist/tex/context/base/lang-lab.mkiv
index a9744f69973..1ddb44cbb79 100644
--- a/Master/texmf-dist/tex/context/base/lang-lab.mkiv
+++ b/Master/texmf-dist/tex/context/base/lang-lab.mkiv
@@ -20,7 +20,9 @@
%D Left-overs:
-\ifdefined\sixperemspace \else \def\sixperemspace{ } \fi % \utfchar{2006"} % we could embed 0x2006 but it does not show up in a editor
+\ifdefined\sixperemspace \else
+ \def\sixperemspace{\normalUchar"2006}
+\fi
%D In this module we deal with language dependant labels and
%D prefixes, like in {\em Figure~12} and {\em Chapter 1}. In
@@ -37,8 +39,6 @@
%D in front as well as after a part number. This is why the
%D current implementation of labels supports two labels too.
-\ifdefined\simplifiedcommands \else \newtoks\simplifiedcommands \fi
-
%D \macros
%D {setupheadtext, setuplabeltext}
%D
@@ -135,8 +135,7 @@
\let#7\firstofoneargument % to be checked
\let#8\firstofoneargument % to be checked
\let#9\firstofoneargument % to be checked
- \to \simplifiedcommands
- }
+ \to \everysimplifycommands}
\let\flushleftlabelclass \firstoftwoarguments
\let\flushrightlabelclass\secondoftwoarguments
@@ -234,17 +233,18 @@
\def\lang_labels_text_prefix_assign_dumb#1[#2,#3]%
{\expandafter\def\csname\??label\currenttextprefixclass:\currenttextprefixtag:#1\endcsname{#2}}
+\unexpanded\def\setlabeltextpair#1#2#3#4#5% a fast one for usage at the Lua end
+ {\expandafter\def\csname\??label#1:\reallanguagetag{#2}:#3\endcsname{{#4}{#5}}} % class tag key left right
+
\definelabelclass [head] [0] % titles
\definelabelclass [label] [0] % texts
\definelabelclass [mathlabel] [0] % functions
\definelabelclass [taglabel] [2] % tags
-\ctxlua{
- languages.labels.define("setupheadtext","titles",true)%
- languages.labels.define("setuplabeltext","texts",true)%
- languages.labels.define("setupmathlabeltext","functions",false)%
- languages.labels.define("setuptaglabeltext","tags",false)%
-}
+\ctxcommand{definelabels("head", "titles", true )}
+\ctxcommand{definelabels("label", "texts", true )}
+\ctxcommand{definelabels("mathlabel", "functions",false)}
+\ctxcommand{definelabels("taglabel", "tags", false)}
%D \macros
%D {translate}
@@ -264,15 +264,17 @@
%D which expands to {\em something} or {\em iets}, depending on
%D de current language.
+\installcorenamespace{translation}
+
\unexpanded\def\translate
{\dosingleempty\lang_translate}
\def\lang_translate[#1]%
- {\getparameters[\??lg][#1]%
- \ifcsname\??lg\currentlanguage\endcsname
- \csname\??lg\currentlanguage\endcsname
- \else\ifcsname\??lg\s!en\endcsname
- \csname\??lg\s!en\endcsname
+ {\getparameters[\??translation][#1]%
+ \ifcsname\??translation\currentlanguage\endcsname
+ \csname\??translation\currentlanguage\endcsname
+ \else\ifcsname\??translation\s!en\endcsname
+ \csname\??translation\s!en\endcsname
\else
[translation #1]%
\fi\fi}
@@ -295,7 +297,43 @@
%D \stoptyping
\def\assigntranslation[#1]\to#2%
- {\getparameters[\??lg][#1]%
- \edef#2{\csname\??lg\currentlanguage\endcsname}}
+ {\getparameters[\??translation][#1]%
+ \edef#2{\csname\??translation\currentlanguage\endcsname}}
+
+%D \macros
+%D {commalistsentence}
+%D
+%D Redone in \LUA:
+%D
+%D \startbuffer
+%D \commalistsentence[aap,noot,mies]
+%D \commalistsentence[aap,noot]
+%D \commalistsentence[aap]
+%D \commalistsentence[a,b,c]
+%D \commalistsentence[a,b,c][{ \& },{ and }]
+%D \commalistsentence[a,b,c][+,-]
+%D \stopbuffer
+%D
+%D \typebuffer
+%D
+%D \startlines
+%D \getbuffer
+%D \stoplines
+
+\unexpanded\def\commalistsentence
+ {\dodoubleempty\typo_helpers_concat_comma_list}
+
+\def\typo_helpers_concat_comma_list[#1][#2]%
+ {\ctxcommand{concatcommalist{
+ text = \!!bs#1\!!es,
+ separators = \!!bs#2\!!es,
+ first = \!!bs\labeltext{and-1}\!!es,
+ second = \!!bs\labeltext{and-2}\!!es
+ }}}
+
+\setuplabeltext [\s!nl] [and-1={{, }}, and-2={{ en }}] % 1, 2 en 3
+\setuplabeltext [\s!en] [and-1={{, }}, and-2={{, }}] % 1, 2, 3
+\setuplabeltext [\s!de] [and-1={{, }}, and-2={{ und }}] % 1, 2 und 3
+\setuplabeltext [\s!hr] [and-1={{, }}, and-2={{ i }}] % 1, 2 i 3
\protect \endinput
diff --git a/Master/texmf-dist/tex/context/base/lang-mis.mkiv b/Master/texmf-dist/tex/context/base/lang-mis.mkiv
index ce23c0f60e8..4d8b8e08aeb 100644
--- a/Master/texmf-dist/tex/context/base/lang-mis.mkiv
+++ b/Master/texmf-dist/tex/context/base/lang-mis.mkiv
@@ -664,4 +664,35 @@
\ifdefined\normalcompound \else \let\normalcompound=| \fi
+%D \macros
+%D {compound}
+%D
+%D We will overload the already active \type {|} so we have
+%D to save its meaning in order to be able to use this handy
+%D macro.
+%D
+%D \starttyping
+%D so test\compound{}test can be used instead of test||test
+%D \stoptyping
+
+\bgroup
+
+ \catcode\barasciicode\activecatcode
+
+ \unexpanded\gdef\compound#1{|#1|}
+
+ \doglobal \appendtoks
+ \def|#1|{\ifx#1\empty\empty-\else#1\fi}%
+ \to \everysimplifycommands
+
+\egroup
+
+%D Here we hook some code into the clean up mechanism needed
+%D for verbatim data.
+
+\appendtoks
+ \disablecompoundcharacters
+ \disablediscretionaries
+\to \everycleanupfeatures
+
\protect \endinput
diff --git a/Master/texmf-dist/tex/context/base/lang-txt.lua b/Master/texmf-dist/tex/context/base/lang-txt.lua
index f28cb18f12f..4c3a3a9859c 100644
--- a/Master/texmf-dist/tex/context/base/lang-txt.lua
+++ b/Master/texmf-dist/tex/context/base/lang-txt.lua
@@ -3,7 +3,8 @@ if not modules then modules = { } end modules ['lang-txt'] = {
comment = "companion to lang-lab.mkiv",
author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
+ license = "see context related readme files",
+ dataonly = true,
}
-- The content of this file is derived from the mkii lang-* files and
@@ -23,7 +24,7 @@ if not modules then modules = { } end modules ['lang-txt'] = {
-- da Danish Arne Jorgensen
-- de German Tobias Burnus, ...
-- en English Hans Hagen, ...
--- es Spanish ...
+-- es Spanish Andrés Montoya, ...
-- fi Finish ...
-- fr French Daniel Flipo, Arthur Reutenauer
-- gr Greek Apostolos Syropoulos, Thomas Schmitz
@@ -65,229 +66,351 @@ data.labels={
functions={
Pr={
labels={
+ cz="P",
en="Pr",
+ sk="P",
},
},
arccos={
labels={
+ cz="arccos",
en="arccos",
+ es="arc\\sixperemspace cos",
hr="arc\\sixperemspace cos",
pl="arc\\sixperemspace cos",
+ sk="arccos",
},
},
arcctg={
labels={
+ cz="arccotg",
en="arccot",
+ es="arc\\sixperemspace cot",
hr="arc\\sixperemspace ctg",
pl="arc\\sixperemspace ctg",
+ sk="arccotg",
},
},
arcsin={
labels={
+ cz="arcsin",
en="arcsin",
+ es="arc\\sixperemspace sen",
hr="arc\\sixperemspace sin",
pl="arc\\sixperemspace sin",
+ sk="arcsin",
},
},
arctan={
labels={
+ cz="arctg",
en="arctan",
+ es="arc\\sixperemspace tan",
hr="arc\\sixperemspace tg",
pl="arc\\sixperemspace tg",
+ sk="arctg",
},
},
arctg={
labels={
+ cz="arctg",
en="arctan",
+ es="arc\\sixperemspace tan",
hr="arc\\sixperemspace tg",
pl="arc\\sixperemspace tg",
+ sk="arctg",
},
},
arg={
labels={
+ cz="arg",
en="arg",
+ es="arg",
+ sk="arg",
},
},
cos={
labels={
+ cz="cos",
en="cos",
+ es="cos",
+ sk="cos",
},
},
cosh={
labels={
+ cz="cosh",
en="cosh",
+ es="cosh",
+ sk="cosh",
},
},
cot={
labels={
+ cz="cotg",
en="cot",
+ es="cot",
hr="ctg",
pl="ctg",
+ sk="cotg",
},
},
coth={
labels={
+ cz="cotgh",
en="coth",
+ es="coth",
+ sk="cotgh",
},
},
csc={
labels={
+ cz="cosec",
en="csc",
+ es="csc",
+ sk="cosec",
},
},
ctg={
labels={
+ cz="cotg",
en="cot",
+ es="cot",
hr="ctg",
pl="ctg",
+ sk="cotg",
},
},
deg={
labels={
+ cz="deg",
en="deg",
+ es="gr",
+ sk="deg",
},
},
det={
labels={
+ cz="det",
en="det",
+ es="det",
+ sk="det",
},
},
dim={
labels={
+ cz="dim",
en="dim",
+ es="dim",
+ sk="dim",
},
},
exp={
labels={
+ cz="exp",
en="exp",
+ es="exp",
+ sk="exp",
},
},
gcd={
labels={
+ cz="NSD",
en="gcd",
+ es="mcd",
hr="nzd",
nl="ggd",
+ sk="NSD",
},
},
hom={
labels={
+ cz="Hom",
en="hom",
+ es="hom",
+ sk="Hom",
},
},
inf={
labels={
+ cz="inf",
en="inf",
+ es="inf",
+ sk="inf",
},
},
injlim={
labels={
+ cz="inj\\sixperemspace lim",
en="inj\\sixperemspace lim",
+ es="lím\\sixperemspace iny",
+ sk="inj\\sixperemspace lim",
},
},
ker={
labels={
+ cz="ker",
en="ker",
+ es="Ker",
+ sk="ker",
},
},
lcm={
labels={
+ cz="NSN",
en="lcm",
+ es="MCM",
hr="nzv",
nl="kgv",
+ sk="NSN",
},
},
lg={
labels={
+ cz="log",
en="lg",
+ es="log",
+ sk="log",
},
},
lim={
labels={
+ cz="lim",
en="lim",
+ es="lím",
+ sk="lim",
},
},
liminf={
labels={
+ cz="lim\\sixperemspace inf",
en="lim\\sixperemspace inf",
+ es="lím\\sixperemspace inf",
+ sk="lim\\sixperemspace inf",
},
},
limsup={
labels={
+ cz="lim\\sixperemspace sup",
en="lim\\sixperemspace sup",
+ es="lím\\sixperemspace sup",
+ sk="lim\\sixperemspace sup",
},
},
ln={
labels={
+ cz="ln",
en="ln",
+ es="ln",
+ sk="ln",
},
},
log={
labels={
+ cz="log",
en="log",
+ es="log",
+ sk="log",
},
},
max={
labels={
+ cz="max",
en="max",
+ es="máx",
+ sk="max",
},
},
median={
labels={
+ cz="\\tilde",
en="median",
+ es="Mediana",
+ sk="\\tilde",
},
},
min={
labels={
+ cz="min",
en="min",
+ es="mín",
+ sk="min",
},
},
mod={
labels={
+ cz="mod",
en="mod",
+ es="mod",
+ sk="mod",
},
},
projlim={
labels={
+ cz="proj\\sixperemspace lim",
en="proj\\sixperemspace lim",
+ es="lím\\sixperemspace proy",
+ sk="proj\\sixperemspace lim",
},
},
sec={
labels={
+ cz="sec",
en="sec",
+ es="sec",
+ sk="sec",
},
},
sin={
labels={
+ cz="sin",
en="sin",
+ es="sen",
+ sk="sin",
},
},
sinh={
labels={
+ cz="sinh",
en="sinh",
+ es="senh",
+ sk="sinh",
},
},
sup={
labels={
+ cz="sup",
en="sup",
+ es="sup",
+ sk="sup",
},
},
tan={
labels={
+ cz="tg",
en="tan",
+ es="tan",
hr="tg",
pl="tg",
+ sk="tg",
},
},
tanh={
labels={
+ cz="tgh",
en="tanh",
+ es="tanh",
+ sk="tgh",
},
},
tg={
labels={
+ cz="tg",
en="tan",
+ es="tan",
hr="tg",
pl="tg",
+ sk="tg",
},
},
},
@@ -296,11 +419,11 @@ data.labels={
labels={
af="",
ca="",
- cs="",
+ cs="a",
da="",
de="und",
en="and",
- es="",
+ es="y",
fi="",
fr="",
gr="",
@@ -316,7 +439,7 @@ data.labels={
pt="",
ro="",
ru="",
- sk="",
+ sk="a",
sl="",
sv="",
tk="",
@@ -403,14 +526,14 @@ data.labels={
},
},
["april:mnem"]={
- labels={
+ labels={
af="",
ca="",
- cs="",
+ cs="dub.",
da="",
de="",
en="apr",
- es="",
+ es="abr.",
fi="",
fr="",
gr="",
@@ -426,7 +549,7 @@ data.labels={
pt="",
ro="",
ru="",
- sk="",
+ sk="apr.",
sl="",
sv="",
tk="",
@@ -444,7 +567,7 @@ data.labels={
da="på side ",
de="auf Seite ",
en="at page ",
- es="",
+ es="en la página ",
fi="",
fr="à la page ",
gr="",
@@ -460,7 +583,7 @@ data.labels={
pt="",
ro="",
ru="на странице ",
- sk="",
+ sk="na strane ",
sl="na strani ",
sv="på sida ",
tk="",
@@ -513,11 +636,11 @@ data.labels={
labels={
af="",
ca="",
- cs="",
+ cs="srp.",
da="",
de="",
en="aug",
- es="",
+ es="ago.",
fi="",
fr="",
gr="",
@@ -533,7 +656,7 @@ data.labels={
pt="",
ro="",
ru="",
- sk="",
+ sk="aug.",
sl="",
sv="",
tk="",
@@ -584,11 +707,11 @@ data.labels={
labels={
af="",
ca="",
- cs="",
+ cs=" (pokračování)",
da="",
de="",
en=" (continued)",
- es="",
+ es=" (continúa)",
fi="",
fr="",
gr="",
@@ -604,7 +727,7 @@ data.labels={
pt="",
ro="",
ru="",
- sk="",
+ sk=" (pokračovanie)",
sl="",
sv="",
tk="",
@@ -656,11 +779,11 @@ data.labels={
labels={
af="",
ca="",
- cs="",
+ cs="pros.",
da="",
de="",
en="dec",
- es="",
+ es="dic.",
fi="",
fr="",
gr="",
@@ -676,7 +799,7 @@ data.labels={
pt="",
ro="",
ru="",
- sk="",
+ sk="dec.",
sl="",
sv="",
tk="",
@@ -728,11 +851,11 @@ data.labels={
labels={
af="",
ca="",
- cs="",
+ cs="ún.",
da="",
de="",
en="feb",
- es="",
+ es="feb.",
fi="",
fr="",
gr="",
@@ -748,7 +871,7 @@ data.labels={
pt="",
ro="",
ru="",
- sk="",
+ sk="feb.",
sl="",
sv="",
tk="",
@@ -767,7 +890,7 @@ data.labels={
da="Figur ",
de="Abbildung ",
en="Figure ",
- es="Ilustración ",
+ es="Figura ",
fi="Kuva ",
fr="Figure ",
gr="Σχήμα",
@@ -822,7 +945,7 @@ data.labels={
pt="sexta-feira",
ro="vineri",
ru="пятница",
- sk="",
+ sk="piatok",
sl="petek",
sv="fredag",
tk="bäşinji gün",
@@ -876,7 +999,7 @@ data.labels={
da="se foroven",
de="siehe oben",
en="as we show above",
- es="",
+ es="como se muestra arriba",
fi="",
fr="ci-dessus",
gr="",
@@ -893,7 +1016,7 @@ data.labels={
pt="",
ro="",
ru="см. выше",
- sk="",
+ sk="pozri hore",
sl="glej zgoraj",
sv="se ovan",
tk="",
@@ -911,7 +1034,7 @@ data.labels={
da="se forneden",
de="siehe unten",
en="as we show below",
- es="",
+ es="como se muestra abajo",
fi="",
fr="ci-dessous",
gr="",
@@ -927,7 +1050,7 @@ data.labels={
pt="",
ro="",
ru="см. ниже",
- sk="",
+ sk="pozri ďalej",
sl="glej spodaj",
sv="se nedan",
tk="",
@@ -1016,11 +1139,11 @@ data.labels={
labels={
af="",
ca="",
- cs="",
+ cs="led.",
da="",
de="",
en="jan",
- es="",
+ es="ene.",
fi="",
fr="",
gr="",
@@ -1036,7 +1159,7 @@ data.labels={
pt="",
ro="",
ru="",
- sk="",
+ sk="jan.",
sl="",
sv="",
tk="",
@@ -1089,11 +1212,11 @@ data.labels={
labels={
af="",
ca="",
- cs="",
+ cs="čce",
da="",
de="",
en="jul",
- es="",
+ es="jul.",
fi="",
fr="",
gr="",
@@ -1109,7 +1232,7 @@ data.labels={
pt="",
ro="",
ru="",
- sk="",
+ sk="júla",
sl="",
sv="",
tk="",
@@ -1161,11 +1284,11 @@ data.labels={
labels={
af="",
ca="",
- cs="",
+ cs="čer.",
da="",
de="",
en="jun",
- es="",
+ es="jun.",
fi="",
fr="",
gr="",
@@ -1181,7 +1304,7 @@ data.labels={
pt="",
ro="",
ru="",
- sk="",
+ sk="júna",
sl="",
sv="",
tk="",
@@ -1306,11 +1429,11 @@ data.labels={
labels={
af="",
ca="",
- cs="",
+ cs="břez.",
da="",
de="",
en="mar",
- es="",
+ es="mar.",
fi="",
fr="",
gr="",
@@ -1326,7 +1449,7 @@ data.labels={
pt="",
ro="",
ru="",
- sk="",
+ sk="mar.",
sl="",
sv="",
tk="",
@@ -1379,11 +1502,11 @@ data.labels={
labels={
af="",
ca="",
- cs="",
+ cs="květ.",
da="",
de="",
en="may",
- es="",
+ es="may.",
fi="",
fr="",
gr="",
@@ -1399,7 +1522,7 @@ data.labels={
pt="",
ro="",
ru="",
- sk="",
+ sk="mája",
sl="",
sv="",
tk="",
@@ -1436,7 +1559,7 @@ data.labels={
pt="segunda-feira",
ro="luni",
ru="понедельник",
- sk="",
+ sk="pondelok",
sl="ponedeljek",
sv="måndag",
tk="birinji gün",
@@ -1488,11 +1611,11 @@ data.labels={
labels={
af="",
ca="",
- cs="",
+ cs="list.",
da="",
de="",
en="nov",
- es="",
+ es="nov.",
fi="",
fr="",
gr="",
@@ -1508,7 +1631,7 @@ data.labels={
pt="",
ro="",
ru="",
- sk="",
+ sk="nov.",
sl="",
sv="",
tk="",
@@ -1559,11 +1682,11 @@ data.labels={
labels={
af="",
ca="",
- cs="",
+ cs="říj.",
da="",
de="",
en="oct",
- es="",
+ es="oct.",
fi="",
fr="",
gr="",
@@ -1579,7 +1702,7 @@ data.labels={
pt="",
ro="",
ru="",
- sk="",
+ sk="okt.",
sl="",
sv="",
tk="",
@@ -1597,7 +1720,7 @@ data.labels={
da="Side ",
de="Seite ",
en="page ",
- es="",
+ es="página ",
fi="",
fr="page ",
gr="",
@@ -1614,7 +1737,7 @@ data.labels={
pt="",
ro="",
ru="страница ",
- sk="",
+ sk="strana ",
sl="stran ",
sv="Sida ",
tk="",
@@ -1647,7 +1770,7 @@ data.labels={
nb="Del",
nl="Deel ",
nn="Del",
- pl="Część ",
+ pl="Część ",
pt="Parte ",
ro="Partea ",
ru="Часть ",
@@ -1688,7 +1811,7 @@ data.labels={
pt="sábado",
ro="sâmbătă",
ru="суббота",
- sk="",
+ sk="sobota",
sl="sobota",
sv="lördag",
tk="altynjy gün",
@@ -1744,7 +1867,7 @@ data.labels={
da="se ",
de="siehe ",
en="see ",
- es="",
+ es="ver: ",
fi="",
fr="cf. ",
gr="",
@@ -1761,7 +1884,7 @@ data.labels={
pt="",
ro="",
ru="см. ",
- sk="",
+ sk="pozri ",
sl="glej ",
sv="se ",
tk="",
@@ -1813,11 +1936,11 @@ data.labels={
labels={
af="",
ca="",
- cs="",
+ cs="září",
da="",
de="",
en="sep",
- es="",
+ es="sep.",
fi="",
fr="",
gr="",
@@ -1833,7 +1956,7 @@ data.labels={
pt="",
ro="",
ru="",
- sk="",
+ sk="sept.",
sl="",
sv="",
tk="",
@@ -1981,7 +2104,7 @@ data.labels={
pt="domingo",
ro="duminică",
ru="воскресенье",
- sk="",
+ sk="nedeľa",
sl="nedelja",
sv="söndag",
tk="dynç gün",
@@ -2055,7 +2178,7 @@ data.labels={
pt="quinta-feira",
ro="joi",
ru="четверг",
- sk="",
+ sk="štvrtok",
sl="četrtek",
sv="torsdag",
tk="dördünji gün",
@@ -2092,7 +2215,7 @@ data.labels={
pt="terça-feira",
ro="marți",
ru="вторник",
- sk="",
+ sk="utorok",
sl="torek",
sv="tisdag",
tk="ikinji gün",
@@ -2129,7 +2252,7 @@ data.labels={
pt="quarta-feira",
ro="miercuri",
ru="среда",
- sk="",
+ sk="streda",
sl="sreda",
sv="onsdag",
tk="üçünji",
@@ -2187,7 +2310,7 @@ data.labels={
da="Indhold",
de="Inhalt",
en="Contents",
- es="Índice",
+ es="Contenido",
fi="Sisällys",
fr="Table des matières",
gr="Περιεχόμενα",
@@ -2220,11 +2343,11 @@ data.labels={
ar="الأشكال",
ca="Figures",
cn="图形",
- cs="Obrázky",
+ cs="Seznam obrázků",
da="Figurer",
de="Abbildungen",
en="Figures",
- es="Ilustraciones",
+ es="Figuras",
fi="Kuvi",
fr="Figures",
gr="Σχήματα",
@@ -2242,7 +2365,7 @@ data.labels={
pt="Figuras",
ro="Figuri",
ru="Список иллюстраций",
- sk="Obrázkov",
+ sk="Zoznam obrázkov",
sl="Slike",
sv="Figurer",
tk="Suratlar",
@@ -2257,7 +2380,7 @@ data.labels={
ar="الرسوم",
ca="Gràfiques",
cn="图",
- cs="Grafy",
+ cs="Seznam grafů",
da="Grafik",
de="Graphiken",
en="Graphics",
@@ -2279,7 +2402,7 @@ data.labels={
pt="Gráficos",
ro="Grafice",
ru="Список графиков",
- sk="Graf",
+ sk="Zoznam grafov",
sl="Slike",
sv="Grafik",
tk="Grafikler",
@@ -2298,7 +2421,7 @@ data.labels={
da="Indeks",
de="Index",
en="Index",
- es="Índice alfabético",
+ es="Índice",
fi="Indeksiluku",
fr="Index",
gr="Ευρετήριο",
@@ -2423,7 +2546,7 @@ data.labels={
pt="",
ro="",
ru="",
- sk="",
+ sk="Literatúra",
sl="Literatura",
sv="",
tk="",
@@ -2438,7 +2561,7 @@ data.labels={
ar="الجداول",
ca="Taules",
cn="表格",
- cs="Tabulky",
+ cs="Seznam tabulek",
da="Tabeller",
de="Tabellen",
en="Tables",
@@ -2460,7 +2583,7 @@ data.labels={
pt="Tabelas",
ro="Tabele",
ru="Список таблиц",
- sk="Tabuliek",
+ sk="Zoznam tabuliek",
sl="Tabele",
sv="Tabeller",
tk="Tablisalar",
diff --git a/Master/texmf-dist/tex/context/base/lang-url.lua b/Master/texmf-dist/tex/context/base/lang-url.lua
index 09a4e5d903f..35381e672c3 100644
--- a/Master/texmf-dist/tex/context/base/lang-url.lua
+++ b/Master/texmf-dist/tex/context/base/lang-url.lua
@@ -6,10 +6,7 @@ if not modules then modules = { } end modules ['lang-url'] = {
license = "see context related readme files"
}
-local utf = unicode.utf8
-
-local utfcharacters, utfvalues = string.utfcharacters, string.utfvalues
-local utfbyte, utfchar, utfgsub = utf.byte, utf.char, utf.gsub
+local utfcharacters, utfvalues, utfbyte, utfchar = utf.characters, utf.values, utf.byte, utf.char
context = context
@@ -63,7 +60,7 @@ local characters = utilities.storage.allocate {
}
local mapping = utilities.storage.allocate {
---~ [utfchar(0xA0)] = "~", -- nbsp (catch)
+ -- [utfchar(0xA0)] = "~", -- nbsp (catch)
}
hyphenatedurl.characters = characters
@@ -72,7 +69,8 @@ hyphenatedurl.lefthyphenmin = 2
hyphenatedurl.righthyphenmin = 3
hyphenatedurl.discretionary = nil
--- more fun is to write nodes
+-- more fun is to write nodes .. maybe it's nicer to do this
+-- in an attribute handler anyway
local function action(hyphenatedurl,str,left,right,disc)
local n = 0
diff --git a/Master/texmf-dist/tex/context/base/lang-wrd.lua b/Master/texmf-dist/tex/context/base/lang-wrd.lua
index 3d3cb6aec99..06a2311a63e 100644
--- a/Master/texmf-dist/tex/context/base/lang-wrd.lua
+++ b/Master/texmf-dist/tex/context/base/lang-wrd.lua
@@ -1,4 +1,4 @@
-if not modules then modules = { } end modules ['lang-ini'] = {
+if not modules then modules = { } end modules ['lang-wrd'] = {
version = 1.001,
comment = "companion to lang-ini.mkiv",
author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
@@ -6,8 +6,8 @@ if not modules then modules = { } end modules ['lang-ini'] = {
license = "see context related readme files"
}
-local utf = unicode.utf8
-local lower, utfchar = string.lower, utf.char
+local lower = string.lower
+local utfchar = utf.char
local concat = table.concat
local lpegmatch = lpeg.match
local P, S, Cs = lpeg.P, lpeg.S, lpeg.Cs
@@ -26,13 +26,13 @@ words.threshold = 4
local numbers = languages.numbers
local registered = languages.registered
-local set_attribute = node.set_attribute
-local unset_attribute = node.unset_attribute
local traverse_nodes = node.traverse
local wordsdata = words.data
local chardata = characters.data
local tasks = nodes.tasks
+local unsetvalue = attributes.unsetvalue
+
local nodecodes = nodes.nodecodes
local kerncodes = nodes.kerncodes
@@ -60,7 +60,7 @@ local loaded = { } -- we share lists
function words.load(tag,filename)
local fullname = resolvers.findfile(filename,'other text file') or ""
if fullname ~= "" then
- report_words("loading word file '%s'",fullname)
+ report_words("loading word file %a",fullname)
statistics.starttiming(languages)
local list = loaded[fullname]
if not list then
@@ -72,7 +72,7 @@ function words.load(tag,filename)
wordsdata[tag] = list
statistics.stoptiming(languages)
else
- report_words("missing word file '%s'",filename)
+ report_words("missing word file %a",filename)
end
end
@@ -90,76 +90,11 @@ function words.found(id, str)
end
end
--- The following code is an adaption of experimental code for
--- hyphenating and spell checking.
+-- The following code is an adaption of experimental code for hyphenating and
+-- spell checking.
-- there is an n=1 problem somewhere in nested boxes
---~ local function mark_words(head,whenfound) -- can be optimized and shared
---~ local current, start, str, language, n, done = head, nil, "", nil, 0, false
---~ local function action()
---~ if #str > 0 then
---~ local f = whenfound(language,str)
---~ if f then
---~ done = true
---~ for i=1,n do
---~ f(start)
---~ start = start.next
---~ end
---~ end
---~ end
---~ str, start, n = "", nil, 0
---~ end
---~ while current do
---~ local id = current.id
---~ if id == glyph_code then
---~ local a = current.lang
---~ if a then
---~ if a ~= language then
---~ if start then
---~ action()
---~ end
---~ language = a
---~ end
---~ elseif start then
---~ action()
---~ language = a
---~ end
---~ local components = current.components
---~ if components then
---~ start = start or current
---~ n = n + 1
---~ for g in traverse_nodes(components) do
---~ str = str .. utfchar(g.char)
---~ end
---~ else
---~ local code = current.char
---~ local data = chardata[code]
---~ if is_letter[data.category] then
---~ start = start or current
---~ n = n + 1
---~ str = str .. utfchar(code) -- slow, maybe str should be a table (and given max)
---~ elseif start then
---~ action()
---~ end
---~ end
---~ elseif id == disc_code then
---~ if n > 0 then
---~ n = n + 1
---~ end
---~ elseif id == kern_code and current.subtype == kerning_code and start then
---~ -- ok
---~ elseif start then
---~ action()
---~ end
---~ current = current.next
---~ end
---~ if start then
---~ action()
---~ end
---~ return head, done
---~ end
-
local function mark_words(head,whenfound) -- can be optimized and shared
local current, language, done = head, nil, nil, 0, false
local str, s, nds, n = { }, 0, { }, 0 -- n could also be a table, saves calls
@@ -263,21 +198,19 @@ end
local cache = { } -- can also be done with method 1 -- frozen colors once used
-setmetatable(cache, {
- __index = function(t,k) -- k == language, numbers[k] == tag
- local c
- if type(k) == "string" then
- c = colist[k]
- elseif k < 0 then
- c = colist["word:unset"]
- else
- c = colist["word:" .. (numbers[k] or "unset")] or colist["word:unknown"]
- end
- local v = c and function(n) set_attribute(n,a_color,c) end or false
- t[k] = v
- return v
+table.setmetatableindex(cache, function(t,k) -- k == language, numbers[k] == tag
+ local c
+ if type(k) == "string" then
+ c = colist[k]
+ elseif k < 0 then
+ c = colist["word:unset"]
+ else
+ c = colist["word:" .. (numbers[k] or "unset")] or colist["word:unknown"]
end
-} )
+ local v = c and function(n) n[a_color] = c end or false
+ t[k] = v
+ return v
+end)
-- method 1
@@ -293,7 +226,7 @@ end
methods[1] = function(head)
for n in traverse_nodes(head) do
- unset_attribute(n,a_color) -- hm, not that selective (reset color)
+ n[a_color] = unsetvalue -- hm, not that selective (reset color)
end
return mark_words(head,sweep)
end
@@ -374,7 +307,7 @@ local function dumpusedwords()
if dumpthem then
collected.threshold = words.threshold
dumpname = dumpname or file.addsuffix(tex.jobname,"words")
- report_words("saving list of used words in '%s'",dumpname)
+ report_words("saving list of used words in %a",dumpname)
io.savedata(dumpname,table.serialize(collected,true))
-- table.tofile(dumpname,list,true)
end
@@ -394,7 +327,7 @@ end
methods[3] = function(head)
for n in traverse_nodes(head) do
- unset_attribute(n,a_color)
+ n[a_color] = unsetvalue
end
return mark_words(head,sweep)
end
diff --git a/Master/texmf-dist/tex/context/base/lpdf-ano.lua b/Master/texmf-dist/tex/context/base/lpdf-ano.lua
index 154296e9be2..e722446ee2c 100644
--- a/Master/texmf-dist/tex/context/base/lpdf-ano.lua
+++ b/Master/texmf-dist/tex/context/base/lpdf-ano.lua
@@ -14,6 +14,7 @@ local next, tostring = next, tostring
local rep, format = string.rep, string.format
local texcount = tex.count
local lpegmatch = lpeg.match
+local formatters = string.formatters
local backends, lpdf = backends, lpdf
@@ -150,7 +151,7 @@ local function link(url,filename,destination,page,actions)
}
}
elseif trace_references then
- report_reference("invalid page reference: %s",tostring(page))
+ report_reference("invalid page reference %a",page)
end
end
return false
@@ -234,7 +235,7 @@ local function use_normal_annotations()
local function reference(width,height,depth,prerolled) -- keep this one
if prerolled then
if trace_references then
- report_reference("w=%s, h=%s, d=%s, a=%s",width,height,depth,prerolled)
+ report_reference("width %p, height %p, depth %p, prerolled %a",width,height,depth,prerolled)
end
return pdfannotation_node(width,height,depth,prerolled)
end
@@ -251,6 +252,9 @@ end
local hashed, nofunique, nofused = { }, 0, 0
+local f_annot = formatters["<< /Type /Annot %s /Rect [%0.3f %0.3f %0.3f %0.3f] >>"]
+local f_bpnf = formatters["_bpnf_(%s,%s,%s,'%s')"]
+
local function use_shared_annotations()
local factor = number.dimenfactors.bp
@@ -259,7 +263,7 @@ local function use_shared_annotations()
local h, v = pdf.h, pdf.v
local llx, lly = h*factor, (v - depth)*factor
local urx, ury = (h + width)*factor, (v + height)*factor
- local annot = format("<< /Type /Annot %s /Rect [%0.3f %0.3f %0.3f %0.3f] >>",prerolled,llx,lly,urx,ury)
+ local annot = f_annot(prerolled,llx,lly,urx,ury)
local n = hashed[annot]
if not n then
n = pdfdelayedobject(annot)
@@ -275,9 +279,9 @@ local function use_shared_annotations()
local function reference(width,height,depth,prerolled)
if prerolled then
if trace_references then
- report_reference("w=%s, h=%s, d=%s, a=%s",width,height,depth,prerolled)
+ report_reference("width %p, height %p, depth %p, prerolled %a",width,height,depth,prerolled)
end
- local luacode = format("_bpnf_(%s,%s,%s,'%s')",width,height,depth,prerolled)
+ local luacode = f_bpnf(width,height,depth,prerolled)
return latelua_node(luacode)
end
end
@@ -323,9 +327,9 @@ function nodeinjections.destination(width,height,depth,name,view)
if not done[name] then
done[name] = true
if trace_destinations then
- report_destination("w=%s, h=%s, d=%s, n=%s, v=%s",width,height,depth,name,view or "no view")
+ report_destination("width %p, height %p, depth %p, name %a, view %a",width,height,depth,name,view)
end
- return pdfdestination_node(width,height,depth,name,view)
+ return pdfdestination_node(width,height,depth,name,view) -- can be begin/end node
end
end
@@ -414,7 +418,7 @@ function specials.internal(var,actions) -- better resolve in strc-ref
--~ inspect(v)
if not v then
-- error
- report_reference("no internal reference '%s'",i or "?")
+ report_reference("no internal reference %a",i)
elseif getinnermethod() == "names" then
-- named
return link(nil,nil,"aut:"..i,v.references.realpage,actions)
@@ -492,17 +496,17 @@ end
-- sections
---~ function specials.section(var,actions)
---~ local sectionname = var.operation
---~ local destination = var.arguments
---~ local internal = structures.sections.internalreference(sectionname,destination)
---~ if internal then
---~ var.special = "internal"
---~ var.operation = internal
---~ var.arguments = nil
---~ specials.internal(var,actions)
---~ end
---~ end
+-- function specials.section(var,actions)
+-- local sectionname = var.operation
+-- local destination = var.arguments
+-- local internal = structures.sections.internalreference(sectionname,destination)
+-- if internal then
+-- var.special = "internal"
+-- var.operation = internal
+-- var.arguments = nil
+-- specials.internal(var,actions)
+-- end
+-- end
specials.section = specials.internal -- specials.section just need to have a value as it's checked
@@ -607,8 +611,8 @@ function executers.submitform(arguments)
local flag = flags[formmethod] or flags.post
flag = (flag and (flag[formformat] or flag.xml)) or 32 -- default: post, xml
return pdfdictionary {
- S = pdfconstant("ResetForm"),
- F = fieldset(arguments[1]),
+ S = pdfconstant("SubmitForm"),
+ F = arguments[1],
Field = fieldset(arguments[2]),
Flags = flag,
-- \PDFsubmitfiller
@@ -687,7 +691,7 @@ local function build(levels,start,parent,method)
return i, n, first, last
elseif level == startlevel then
if trace_bookmarks then
- report_bookmark("%3i %s%s %s",reference.realpage,rep(" ",level-1),(open and "+") or "-",title)
+ report_bookmark("%3i %w%s %s",reference.realpage,(level-1)*2,(open and "+") or "-",title)
end
local prev = child
child = pdfreserveobject()
diff --git a/Master/texmf-dist/tex/context/base/lpdf-col.lua b/Master/texmf-dist/tex/context/base/lpdf-col.lua
index 0c2a49ebef2..b358d082098 100644
--- a/Master/texmf-dist/tex/context/base/lpdf-col.lua
+++ b/Master/texmf-dist/tex/context/base/lpdf-col.lua
@@ -1,4 +1,4 @@
-if not modules then modules = { } end modules ['lpdf-mis'] = {
+if not modules then modules = { } end modules ['lpdf-col'] = {
version = 1.001,
comment = "companion to lpdf-ini.mkiv",
author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
@@ -6,13 +6,17 @@ if not modules then modules = { } end modules ['lpdf-mis'] = {
license = "see context related readme files"
}
-local type, next, tostring = type, next, tostring
+local type, next, tostring, tonumber = type, next, tostring, tonumber
local char, byte, format, gsub, rep, gmatch = string.char, string.byte, string.format, string.gsub, string.rep, string.gmatch
local concat = table.concat
local round = math.round
+local formatters = string.formatters
local backends, lpdf, nodes = backends, lpdf, nodes
+local allocate = utilities.storage.allocate
+local formatters = string.formatters
+
local nodeinjections = backends.pdf.nodeinjections
local codeinjections = backends.pdf.codeinjections
local registrations = backends.pdf.registrations
@@ -40,11 +44,24 @@ local forcedmodel = colors.forcedmodel
local c_transparency = pdfconstant("Transparency")
+local f_gray = formatters["%.3f g %.3f G"]
+local f_rgb = formatters["%.3f %.3f %.3f rg %.3f %.3f %.3f RG"]
+local f_cmyk = formatters["%.3f %.3f %.3f %.3f k %.3f %.3f %.3f %.3f K"]
+local f_spot = formatters["/%s cs /%s CS %s SCN %s scn"]
+local f_tr = formatters["Tr%s"]
+local f_cm = formatters["q %f %f %f %f %f %f cm"]
+local f_effect = formatters["%s Tc %s w %s Tr"]
+local f_tr_gs = formatters["/Tr%s gs"]
+local f_num_1 = tostring
+local f_num_2 = formatters["%s %s"]
+local f_num_3 = formatters["%s %s %s"]
+local f_num_4 = formatters["%s %s %s %s"]
+
local report_color = logs.reporter("colors","backend")
-- page groups (might move to lpdf-ini.lua)
-local colorspaceconstants = { -- v_none is ignored
+local colorspaceconstants = allocate { -- v_none is ignored
gray = pdfconstant("DeviceGray"),
rgb = pdfconstant("DeviceRGB"),
cmyk = pdfconstant("DeviceCMYK"),
@@ -97,26 +114,26 @@ commands.synchronizecolormodel = synchronizecolormodel
-- color injection
function nodeinjections.rgbcolor(r,g,b)
- return register(pdfliteral(format("%s %s %s rg %s %s %s RG",r,g,b,r,g,b)))
+ return register(pdfliteral(f_rgb(r,g,b,r,g,b)))
end
function nodeinjections.cmykcolor(c,m,y,k)
- return register(pdfliteral(format("%s %s %s %s k %s %s %s %s K",c,m,y,k,c,m,y,k)))
+ return register(pdfliteral(f_cmyk(c,m,y,k,c,m,y,k)))
end
function nodeinjections.graycolor(s) -- caching 0/1 does not pay off
- return register(pdfliteral(format("%s g %s G",s,s)))
+ return register(pdfliteral(f_gray(s,s)))
end
function nodeinjections.spotcolor(n,f,d,p)
if type(p) == "string" then
p = gsub(p,","," ") -- brr misuse of spot
end
- return register(pdfliteral(format("/%s cs /%s CS %s SCN %s scn",n,n,p,p)))
+ return register(pdfliteral(f_spot(n,n,p,p)))
end
function nodeinjections.transparency(n)
- return register(pdfliteral(format("/Tr%s gs",n)))
+ return register(pdfliteral(f_tr_gs(n)))
end
-- a bit weird but let's keep it here for a while
@@ -135,7 +152,7 @@ function nodeinjections.effect(effect,stretch,rulethickness)
-- always, no zero test (removed)
rulethickness = bp * rulethickness
effect = effects[effect] or effects['normal']
- return register(pdfliteral(format("%s Tc %s w %s Tr",stretch,rulethickness,effect))) -- watch order
+ return register(pdfliteral(f_effect(stretch,rulethickness,effect))) -- watch order
end
-- spot- and indexcolors
@@ -152,9 +169,9 @@ local pdf_rbg_range = pdfarray { 0, 1, 0, 1, 0, 1 }
local pdf_cmyk_range = pdfarray { 0, 1, 0, 1, 0, 1, 0, 1 }
local pdf_gray_range = pdfarray { 0, 1 }
-local rgb_function = "dup %s mul exch dup %s mul exch %s mul"
-local cmyk_function = "dup %s mul exch dup %s mul exch dup %s mul exch %s mul"
-local gray_function = "%s mul"
+local f_rgb_function = formatters["dup %s mul exch dup %s mul exch %s mul"]
+local f_cmyk_function = formatters["dup %s mul exch dup %s mul exch dup %s mul exch %s mul"]
+local f_gray_function = formatters["%s mul"]
local documentcolorspaces = pdfdictionary()
@@ -191,12 +208,12 @@ local function registersomespotcolor(name,noffractions,names,p,colorspace,range,
Range = range,
}
local calculations = pdfflushstreamobject(format("{ %s }",funct),dictionary)
- -- local calculations = pdfobject {
- -- type = "stream",
- -- immediate = true,
- -- string = format("{ %s }",funct),
- -- attr = dictionary(),
- -- }
+ -- local calculations = pdfobject {
+ -- type = "stream",
+ -- immediate = true,
+ -- string = format("{ %s }",funct),
+ -- attr = dictionary(),
+ -- }
local array = pdfarray {
pdf_separation,
pdfconstant(spotcolornames[name] or name),
@@ -227,7 +244,7 @@ local function registersomespotcolor(name,noffractions,names,p,colorspace,range,
if sn then
colorants[name] = pdfreference(sn)
else
- report_color("unknown colorant %s, using black instead",name or n)
+ report_color("unknown colorant %a, using black instead",name or n)
name = "Black"
end
end
@@ -343,47 +360,47 @@ end
function registrations.rgbspotcolor(name,noffractions,names,p,r,g,b)
if noffractions == 1 then
- registersomespotcolor(name,noffractions,names,p,pdf_device_rgb,pdf_rbg_range,format(rgb_function,r,g,b))
+ registersomespotcolor(name,noffractions,names,p,pdf_device_rgb,pdf_rbg_range,f_rgb_function(r,g,b))
else
- registersomespotcolor(name,noffractions,names,p,pdf_device_rgb,pdf_rbg_range,format("%s %s %s",r,g,b))
+ registersomespotcolor(name,noffractions,names,p,pdf_device_rgb,pdf_rbg_range,f_num_3(r,g,b))
end
delayindexcolor(name,names,function()
- return registersomeindexcolor(name,noffractions,names,p,pdf_device_rgb,pdf_rgb_range,format(rgb_function,r,g,b))
+ return registersomeindexcolor(name,noffractions,names,p,pdf_device_rgb,pdf_rgb_range,f_rgb_function(r,g,b))
end)
end
function registrations.cmykspotcolor(name,noffractions,names,p,c,m,y,k)
if noffractions == 1 then
- registersomespotcolor(name,noffractions,names,p,pdf_device_cmyk,pdf_cmyk_range,format(cmyk_function,c,m,y,k))
+ registersomespotcolor(name,noffractions,names,p,pdf_device_cmyk,pdf_cmyk_range,f_cmyk_function(c,m,y,k))
else
- registersomespotcolor(name,noffractions,names,p,pdf_device_cmyk,pdf_cmyk_range,format("%s %s %s %s",c,m,y,k))
+ registersomespotcolor(name,noffractions,names,p,pdf_device_cmyk,pdf_cmyk_range,f_num_4(c,m,y,k))
end
delayindexcolor(name,names,function()
- return registersomeindexcolor(name,noffractions,names,p,pdf_device_cmyk,pdf_cmyk_range,format(cmyk_function,c,m,y,k))
+ return registersomeindexcolor(name,noffractions,names,p,pdf_device_cmyk,pdf_cmyk_range,f_cmyk_function(c,m,y,k))
end)
end
function registrations.grayspotcolor(name,noffractions,names,p,s)
if noffractions == 1 then
- registersomespotcolor(name,noffractions,names,p,pdf_device_gray,pdf_gray_range,format(gray_function,s))
+ registersomespotcolor(name,noffractions,names,p,pdf_device_gray,pdf_gray_range,f_gray_function(s))
else
- registersomespotcolor(name,noffractions,names,p,pdf_device_gray,pdf_gray_range,s)
+ registersomespotcolor(name,noffractions,names,p,pdf_device_gray,pdf_gray_range,f_num_1(s))
end
delayindexcolor(name,names,function()
- return registersomeindexcolor(name,noffractions,names,p,pdf_device_gray,pdf_gray_range,format(gray_function,s))
+ return registersomeindexcolor(name,noffractions,names,p,pdf_device_gray,pdf_gray_range,f_gray_function(s))
end)
end
function registrations.rgbindexcolor(name,noffractions,names,p,r,g,b)
- registersomeindexcolor(name,noffractions,names,p,pdf_device_rgb,pdf_rgb_range,format(rgb_function,r,g,b))
+ registersomeindexcolor(name,noffractions,names,p,pdf_device_rgb,pdf_rgb_range,f_rgb_function(r,g,b))
end
function registrations.cmykindexcolor(name,noffractions,names,p,c,m,y,k)
- registersomeindexcolor(name,noffractions,names,p,pdf_device_cmyk,pdf_cmyk_range,format(cmyk_function,c,m,y,k))
+ registersomeindexcolor(name,noffractions,names,p,pdf_device_cmyk,pdf_cmyk_range,f_cmyk_function(c,m,y,k))
end
function registrations.grayindexcolor(name,noffractions,names,p,s)
- registersomeindexcolor(name,noffractions,names,p,pdf_device_gray,pdf_gray_range,gray_function)
+ registersomeindexcolor(name,noffractions,names,p,pdf_device_gray,pdf_gray_range,f_gray_function(s))
end
function codeinjections.setfigurecolorspace(data,figure)
@@ -453,17 +470,16 @@ function registrations.transparency(n,a,t)
local mr = pdfreference(m)
transparencyhash[n] = m
documenttransparencies[n] = mr
- lpdf.adddocumentextgstate(format("Tr%s",n),mr)
+ lpdf.adddocumentextgstate(f_tr(n),mr)
end
end
statistics.register("page group warning", function()
if done and not transparencygroups[currentgroupcolormodel] then
- return format("transparencies are used but no pagecolormodel is set")
+ return "transparencies are used but no pagecolormodel is set"
end
end)
-
-- Literals needed to inject code in the mp stream, we cannot use attributes there
-- since literals may have qQ's, much may go away once we have mplib code in place.
--
@@ -480,22 +496,22 @@ local function lpdfcolor(model,ca,default) -- todo: use gray when no color
model = forcedmodel(model)
if model == 2 then
local s = cv[2]
- return format("%s g %s G",s,s)
+ return f_gray(s,s)
elseif model == 3 then
local r, g, b = cv[3], cv[4], cv[5]
- return format("%s %s %s rg %s %s %s RG",r,g,b,r,g,b)
+ return f_rgb(r,g,b,r,g,b)
elseif model == 4 then
local c, m, y, k = cv[6],cv[7],cv[8],cv[9]
- return format("%s %s %s %s k %s %s %s %s K",c,m,y,k,c,m,y,k)
+ return f_cmyk(c,m,y,k,c,m,y,k)
else
local n,f,d,p = cv[10],cv[11],cv[12],cv[13]
if type(p) == "string" then
p = gsub(p,","," ") -- brr misuse of spot
end
- return format("/%s cs /%s CS %s SCN %s scn",n,n,p,p)
+ return f_spot(n,n,p,p)
end
else
- return format("%s g %s G",default or 0,default or 0)
+ return f_gray(default or 0,default or 0)
end
else
return ""
@@ -538,9 +554,9 @@ function lpdf.transparency(ct,default) -- kind of overlaps with transparencycode
if transparencies.supported then
local ct = transparenciesvalue(ct)
if ct then
- return format("/Tr%s gs",registertransparancy(nil,ct[1],ct[2],true))
+ return f_tr_gs(registertransparancy(nil,ct[1],ct[2],true))
else
- return "/Tr0 gs"
+ return f_tr_gs(0)
end
else
return ""
@@ -555,40 +571,19 @@ function lpdf.colorvalue(model,ca,default)
end
model = forcedmodel(model)
if model == 2 then
- return format("%s",cv[2])
+ return f_num_1(cv[2])
elseif model == 3 then
- return format("%s %s %s",cv[3],cv[4],cv[5])
+ return f_num_3(cv[3],cv[4],cv[5])
elseif model == 4 then
- return format("%s %s %s %s",cv[6],cv[7],cv[8],cv[9])
+ return f_num_4(cv[6],cv[7],cv[8],cv[9])
else
- return format("%s",cv[13])
+ return f_num_1(cv[13])
end
else
- return format("%s",default or 0)
+ return f_num_1(default or 0)
end
end
---~ function lpdf.fdfcolor(model,ca,default)
---~ local cv = colorsvalue(ca)
---~ if cv then
---~ if model == 1 then
---~ model = cv[1]
---~ end
---~ model = forcedmodel(model)
---~ if model == 2 then
---~ return format("[%s]",cv[2])
---~ elseif model == 3 then
---~ return format("[%s %s %s]",cv[3],cv[4],cv[5])
---~ elseif model == 4 then
---~ return format("[%s %s %s %s]",cv[6],cv[7],cv[8],cv[9])
---~ elseif model == 4 then
---~ return format("[%s]",cv[13])
---~ end
---~ else
---~ return format("[%s]",default or 0)
---~ end
---~ end
-
function lpdf.colorvalues(model,ca,default)
local cv = colorsvalue(ca)
if cv then
@@ -677,7 +672,7 @@ end
function lpdf.transparencycode(a,t)
if transparencies.supported then
intransparency = true
- return format("/Tr%s gs",registertransparancy(nil,a,t,true)) -- true forces resource
+ return f_tr_gs(registertransparancy(nil,a,t,true)) -- true forces resource
else
return ""
end
@@ -686,7 +681,7 @@ end
function lpdf.finishtransparencycode()
if transparencies.supported and intransparency then
intransparency = false
- return "/Tr0 gs" -- we happen to know this -)
+ return f_tr_gs(0) -- we happen to know this -)
else
return ""
end
@@ -694,7 +689,9 @@ end
-- this will move to lpdf-spe.lua
-backends.pdf.tables.vfspecials = { -- todo: distinguish between glyph and rule color
+local f_slant = formatters["pdf: q 1 0 %f 1 0 0 cm"]
+
+backends.pdf.tables.vfspecials = allocate { -- todo: distinguish between glyph and rule color
red = { "special", 'pdf: 1 0 0 rg 1 0 0 RG' },
green = { "special", 'pdf: 0 1 0 rg 0 1 0 RG' },
@@ -714,7 +711,7 @@ backends.pdf.tables.vfspecials = { -- todo: distinguish between glyph and rule c
palegray = { "special", 'pdf: .75 g' },
},
- startslant = function(a) return { "special", format("pdf: q 1 0 %s 1 0 0 cm",a) } end,
+ startslant = function(a) return { "special", f_slant(a) } end,
stopslant = { "special", "pdf: Q" },
}
diff --git a/Master/texmf-dist/tex/context/base/lpdf-epa.lua b/Master/texmf-dist/tex/context/base/lpdf-epa.lua
index c8d23a61825..034e6d7e241 100644
--- a/Master/texmf-dist/tex/context/base/lpdf-epa.lua
+++ b/Master/texmf-dist/tex/context/base/lpdf-epa.lua
@@ -11,6 +11,9 @@ if not modules then modules = { } end modules ['lpdf-epa'] = {
local type, tonumber = type, tonumber
local format, gsub = string.format, string.gsub
+local formatters = string.formatters
+
+----- lpegmatch, lpegpatterns = lpeg.match, lpeg.patterns
local trace_links = false trackers.register("figures.links", function(v) trace_links = v end)
@@ -20,6 +23,9 @@ local backends, lpdf = backends, lpdf
local variables = interfaces.variables
local codeinjections = backends.pdf.codeinjections
+----- urlescaper = lpegpatterns.urlescaper
+----- utftohigh = lpegpatterns.utftohigh
+local escapetex = characters.filters.utf.private.escape
local layerspec = { -- predefining saves time
"epdflinks"
@@ -30,17 +36,21 @@ local function makenamespace(filename)
end
local function add_link(x,y,w,h,destination,what)
+ x = x .. "bp"
+ y = y .. "bp"
+ w = w .. "bp"
+ h = h .. "bp"
if trace_links then
- report_link("dx: % 4i, dy: % 4i, wd: % 4i, ht: % 4i, destination: %s, type: %s",x,y,w,h,destination,what)
+ report_link("destination %a, type %a, dx %s, dy %s, wd %s, ht %s",destination,what,x,y,w,h)
end
local locationspec = { -- predefining saves time
- x = x .. "bp",
- y = y .. "bp",
+ x = x,
+ y = y,
preset = "leftbottom",
}
local buttonspec = {
- width = w .. "bp",
- height = h .. "bp",
+ width = w,
+ height = h,
offset = variables.overlay,
frame = trace_links and variables.on or variables.off,
}
@@ -53,19 +63,22 @@ local function add_link(x,y,w,h,destination,what)
end
local function link_goto(x,y,w,h,document,annotation,pagedata,namespace)
- local destination = annotation.A.D -- [ 18 0 R /Fit ]
- local what = "page"
- if type(destination) == "string" then
- local destinations = document.destinations
- local wanted = destinations[destination]
- destination = wanted and wanted.D
- if destination then what = "named" end
- end
- local pagedata = destination and destination[1]
- if pagedata then
- local destinationpage = pagedata.number
- if destinationpage then
- add_link(x,y,w,h,namespace .. destinationpage,what)
+ local a = annotation.A
+ if a then
+ local destination = a.D -- [ 18 0 R /Fit ]
+ local what = "page"
+ if type(destination) == "string" then
+ local destinations = document.destinations
+ local wanted = destinations[destination]
+ destination = wanted and wanted.D
+ if destination then what = "named" end
+ end
+ local pagedata = destination and destination[1]
+ if pagedata then
+ local destinationpage = pagedata.number
+ if destinationpage then
+ add_link(x,y,w,h,namespace .. destinationpage,what)
+ end
end
end
end
@@ -73,24 +86,31 @@ end
local function link_uri(x,y,w,h,document,annotation)
local url = annotation.A.URI
if url then
- add_link(x,y,w,h,format("url(%s)",url),"url")
+ -- url = lpegmatch(urlescaper,url)
+ -- url = lpegmatch(utftohigh,url)
+ url = escapetex(url)
+ add_link(x,y,w,h,formatters["url(%s)"](url),"url")
end
end
local function link_file(x,y,w,h,document,annotation)
- local filename = annotation.A.F
- if filename then
- local destination = annotation.A.D
- if not destination then
- add_link(x,y,w,h,format("file(%s)",filename),"file")
- elseif type(destination) == "string" then
- add_link(x,y,w,h,format("%s::%s",filename,destination),"file (named)")
- else
- destination = destination[1] -- array
- if tonumber(destination) then
- add_link(x,y,w,h,format("%s::page(%s)",filename,destination),"file (page)")
+ local a = annotation.A
+ if a then
+ local filename = a.F
+ if filename then
+ filename = escapetex(filename)
+ local destination = a.D
+ if not destination then
+ add_link(x,y,w,h,formatters["file(%s)"](filename),"file")
+ elseif type(destination) == "string" then
+ add_link(x,y,w,h,formatters["%s::%s"](filename,destination),"file (named)")
else
- add_link(x,y,w,h,format("file(%s)",filename),"file")
+ destination = destination[1] -- array
+ if tonumber(destination) then
+ add_link(x,y,w,h,formatters["%s::page(%s)"](filename,destination),"file (page)")
+ else
+ add_link(x,y,w,h,formatters["file(%s)"](filename),"file")
+ end
end
end
end
@@ -110,41 +130,50 @@ function codeinjections.mergereferences(specification)
local yscale = specification.yscale or 1
local size = specification.size or "crop" -- todo
local pagedata = document.pages[pagenumber]
- local annotations = pagedata.Annots
- local namespace = format("lpdf-epa-%s-",file.removesuffix(file.basename(fullname)))
- local reference = namespace .. pagenumber
- if annotations.n > 0 then
+ local annotations = pagedata and pagedata.Annots
+ if annotations and annotations.n > 0 then
+ local namespace = format("lpdf-epa-%s-",file.removesuffix(file.basename(fullname)))
+ local reference = namespace .. pagenumber
local mediabox = pagedata.MediaBox
local llx, lly, urx, ury = mediabox[1], mediabox[2], mediabox[3], mediabox[4]
local width, height = xscale * (urx - llx), yscale * (ury - lly) -- \\overlaywidth, \\overlayheight
context.definelayer( { "epdflinks" }, { height = height.."bp" , width = width.."bp" })
for i=1,annotations.n do
local annotation = annotations[i]
- local subtype = annotation.Subtype
- local rectangle = annotation.Rect
- local a_llx, a_lly, a_urx, a_ury = rectangle[1], rectangle[2], rectangle[3], rectangle[4]
- local x, y = xscale * (a_llx - llx), yscale * (a_lly - lly)
- local w, h = xscale * (a_urx - a_llx), yscale * (a_ury - a_lly)
- if subtype == "Link" then
- local linktype = annotation.A.S
- if linktype == "GoTo" then
- link_goto(x,y,w,h,document,annotation,pagedata,namespace)
- elseif linktype == "GoToR" then
- link_file(x,y,w,h,document,annotation)
- elseif linktype == "URI" then
- link_uri(x,y,w,h,document,annotation)
+ if annotation then
+ local subtype = annotation.Subtype
+ local rectangle = annotation.Rect
+ local a_llx, a_lly, a_urx, a_ury = rectangle[1], rectangle[2], rectangle[3], rectangle[4]
+ local x, y = xscale * (a_llx - llx), yscale * (a_lly - lly)
+ local w, h = xscale * (a_urx - a_llx), yscale * (a_ury - a_lly)
+ if subtype == "Link" then
+ local a = annotation.A
+ if a then
+ local linktype = a.S
+ if linktype == "GoTo" then
+ link_goto(x,y,w,h,document,annotation,pagedata,namespace)
+ elseif linktype == "GoToR" then
+ link_file(x,y,w,h,document,annotation)
+ elseif linktype == "URI" then
+ link_uri(x,y,w,h,document,annotation)
+ elseif trace_links then
+ report_link("unsupported link annotation %a",linktype)
+ end
+ else
+ report_link("mising link annotation")
+ end
elseif trace_links then
- report_link("unsupported link annotation '%s'",linktype)
+ report_link("unsupported annotation %a",subtype)
end
elseif trace_links then
- report_link("unsupported annotation '%s'",subtype)
+ report_link("broken annotation, index %a",i)
end
end
context.flushlayer { "epdflinks" }
-- context("\\gdef\\figurereference{%s}",reference) -- global
context.setgvalue("figurereference",reference) -- global
if trace_links then
- report_link("setting figure reference to '%s'",reference)
+ report_link("setting figure reference to %a",reference)
end
specification.reference = reference
return namespace
@@ -171,19 +200,24 @@ function codeinjections.mergeviewerlayers(specification)
local layers = document.layers
if layers then
for i=1,layers.n do
- local tag = namespace .. gsub(layers[i]," ",":")
- local title = tag
- if trace_links then
- report_link("using layer '%s'",tag)
+ local layer = layers[i]
+ if layer then
+ local tag = namespace .. gsub(layer," ",":")
+ local title = tag
+ if trace_links then
+ report_link("using layer %a",tag)
+ end
+ attributes.viewerlayers.define { -- also does some cleaning
+ tag = tag, -- todo: #3A or so
+ title = title,
+ visible = variables.start,
+ editable = variables.yes,
+ printable = variables.yes,
+ }
+ codeinjections.useviewerlayer(tag)
+ elseif trace_links then
+ report_link("broken layer, index %a",i)
end
- attributes.viewerlayers.define { -- also does some cleaning
- tag = tag, -- todo: #3A or so
- title = title,
- visible = variables.start,
- editable = variables.yes,
- printable = variables.yes,
- }
- codeinjections.useviewerlayer(tag)
end
end
end
diff --git a/Master/texmf-dist/tex/context/base/lpdf-epd.lua b/Master/texmf-dist/tex/context/base/lpdf-epd.lua
index 76d258cefdf..b9f8cfc7ccc 100644
--- a/Master/texmf-dist/tex/context/base/lpdf-epd.lua
+++ b/Master/texmf-dist/tex/context/base/lpdf-epd.lua
@@ -27,6 +27,8 @@ local lower, match, char, find, sub = string.lower, string.match, string.char, s
local concat = table.concat
local toutf = string.toutf
+local report_epdf = logs.reporter("epdf")
+
-- a bit of protection
local limited = false
@@ -59,9 +61,7 @@ local function prepare(document,d,t,n,k)
for i=1,n do
local v = d:getVal(i)
local r = d:getValNF(i)
- if r:getTypeName() ~= "ref" then
- t[d:getKey(i)] = checked_access[v:getTypeName()](v,document)
- else
+ if r:getTypeName() == "ref" then
r = r:getRef().num
local c = document.cache[r]
if c then
@@ -74,6 +74,8 @@ local function prepare(document,d,t,n,k)
end
end
t[d:getKey(i)] = c
+ else
+ t[d:getKey(i)] = checked_access[v:getTypeName()](v,document)
end
end
getmetatable(t).__index = nil
@@ -95,9 +97,9 @@ local function prepare(document,a,t,n,k)
for i=1,n do
local v = a:get(i)
local r = a:getNF(i)
- if r:getTypeName() ~= "ref" then
- t[i] = checked_access[v:getTypeName()](v,document)
- else
+ if v:getTypeName() == "null" then
+ -- TH: weird, but appears possible
+ elseif r:getTypeName() == "ref" then
r = r:getRef().num
local c = document.cache[r]
if c then
@@ -108,6 +110,8 @@ local function prepare(document,a,t,n,k)
document.xrefs[c] = r
end
t[i] = c
+ else
+ t[i] = checked_access[v:getTypeName()](v,document)
end
end
getmetatable(t).__index = nil
@@ -179,14 +183,17 @@ checked_access = {
ref = function(v)
return v:getRef()
end,
+ null = function()
+ return nil
+ end,
}
---~ checked_access.real = epdf.real
---~ checked_access.integer = epdf.integer
---~ checked_access.string = epdf.string
---~ checked_access.boolean = epdf.boolean
---~ checked_access.name = epdf.name
---~ checked_access.ref = epdf.ref
+-- checked_access.real = epdf.real
+-- checked_access.integer = epdf.integer
+-- checked_access.string = epdf.string
+-- checked_access.boolean = epdf.boolean
+-- checked_access.name = epdf.name
+-- checked_access.ref = epdf.ref
local function getnames(document,n,target) -- direct
if n then
@@ -264,10 +271,14 @@ local function getpages(document)
for pagenumber=1,nofpages do
local pagereference = cata:getPageRef(pagenumber).num
local pagedata = some_dictionary(xref:fetch(pagereference,0):getDict(),document,pagereference)
- pagedata.number = pagenumber
- pages[pagenumber] = pagedata
- xrefs[pagedata] = pagereference
- cache[pagereference] = pagedata
+ if pagedata then
+ pagedata.number = pagenumber
+ pages[pagenumber] = pagedata
+ xrefs[pagedata] = pagereference
+ cache[pagereference] = pagedata
+ else
+ report_epdf("missing pagedata at slot %i",i)
+ end
end
pages.n = nofpages
return pages
@@ -323,6 +334,15 @@ function lpdf.epdf.load(filename)
return document
end
+-- for k, v in next, expand(t) do
+
+function lpdf.epdf.expand(t)
+ if type(t) == "table" then
+ local dummy = t.dummy
+ end
+ return t
+end
+
-- helpers
-- function lpdf.epdf.getdestinationpage(document,name)
diff --git a/Master/texmf-dist/tex/context/base/lpdf-fld.lua b/Master/texmf-dist/tex/context/base/lpdf-fld.lua
index 30052538d85..a9b9fd72db2 100644
--- a/Master/texmf-dist/tex/context/base/lpdf-fld.lua
+++ b/Master/texmf-dist/tex/context/base/lpdf-fld.lua
@@ -302,8 +302,8 @@ local function fieldsurrounding(specification)
-- we could test for colorvalue being 1 (black) and omit it then
local colorcode = lpdf.color(3,colorvalue) -- we force an rgb color space
if trace_fields then
- report_fields("fontcode : %s %s @ %s => %s => %s",fontstyle,fontalternative,fontsize,tag,fontcode)
- report_fields("colorcode: %s => %s",colorvalue,colorcode)
+ report_fields("using font, style %a, alternative %a, size %p, tag %a, code %a",fontstyle,fontalternative,fontsize,tag,fontcode)
+ report_fields("using color, value %a, code %a",colorvalue,colorcode)
end
local stream = pdfstream {
pdfconstant(tag),
@@ -685,7 +685,7 @@ function codeinjections.definefield(specification)
local fieldtype = specification.type
if not fieldtype then
if trace_fields then
- report_fields("invalid definition of '%s': unknown type",n)
+ report_fields("invalid definition for %a, unknown type",n)
end
elseif fieldtype == "radio" then
local values = specification.values
@@ -696,10 +696,10 @@ function codeinjections.definefield(specification)
end
fields[n] = specification
if trace_fields then
- report_fields("defining '%s' as radio",n or "?")
+ report_fields("defining %a as type %a",n,"radio")
end
elseif trace_fields then
- report_fields("invalid definition of radio '%s': missing values",n)
+ report_fields("invalid definition of radio %a, missing values",n)
end
elseif fieldtype == "sub" then
-- not in main field list !
@@ -711,16 +711,16 @@ function codeinjections.definefield(specification)
end
if trace_fields then
local p = radios[n] and radios[n].parent
- report_fields("defining '%s' as sub of radio '%s'",n or "?",p or "?")
+ report_fields("defining %a as type sub of radio %a",n,p)
end
elseif trace_fields then
- report_fields("invalid definition of radio sub '%s': no parent",n)
+ report_fields("invalid definition of radio sub %a, no parent given",n)
end
predefinesymbols(specification)
elseif fieldtype == "text" or fieldtype == "line" then
fields[n] = specification
if trace_fields then
- report_fields("defining '%s' as %s",n,fieldtype)
+ report_fields("defining %a as type %a",n,fieldtype)
end
if specification.values ~= "" and specification.default == "" then
specification.default, specification.values = specification.values, nil
@@ -728,12 +728,12 @@ function codeinjections.definefield(specification)
else
fields[n] = specification
if trace_fields then
- report_fields("defining '%s' as %s",n,fieldtype)
+ report_fields("defining %a as type %a",n,fieldtype)
end
predefinesymbols(specification)
end
elseif trace_fields then
- report_fields("invalid definition of '%s': already defined",n)
+ report_fields("invalid definition for %a, already defined",n)
end
end
@@ -741,14 +741,14 @@ function codeinjections.clonefield(specification) -- obsolete
local p, c, v = specification.parent, specification.children, specification.alternative
if not p or not c then
if trace_fields then
- report_fields("invalid clone: children: '%s', parent '%s', alternative: '%s'",c or "?",p or "?", v or "?")
+ report_fields("invalid clone, children %a, parent %a, alternative %a",c,p,v)
end
return
end
local x = fields[p] or radios[p]
if not x then
if trace_fields then
- report_fields("cloning: unknown parent '%s'",p)
+ report_fields("invalid clone, unknown parent %a",p)
end
return
end
@@ -756,11 +756,11 @@ function codeinjections.clonefield(specification) -- obsolete
local f, r, c = fields[n], radios[n], clones[n]
if f or r or c then
if trace_fields then
- report_fields("already cloned: child: '%s', parent '%s', alternative: '%s'",n,p,v or "?")
+ report_fields("already cloned, child %a, parent %a, alternative %a",n,p,v)
end
else
if trace_fields then
- report_fields("cloning: child: '%s', parent '%s', alternative: '%s'",n,p,v or "?")
+ report_fields("cloning, child %a, parent %a, alternative %a",n,p,v)
end
clones[n] = specification
predefinesymbols(specification)
@@ -879,7 +879,7 @@ local methods = { }
function nodeinjections.typesetfield(name,specification)
local field = fields[name] or radios[name] or clones[name]
if not field then
- report_fields( "unknown child '%s'",name)
+ report_fields( "unknown child %a",name)
-- unknown field
return
end
@@ -891,7 +891,7 @@ function nodeinjections.typesetfield(name,specification)
if method then
return method(name,specification,alternative)
else
- report_fields( "unknown method '%s' for child '%s'",field.type,name)
+ report_fields( "unknown method %a for child %a",field.type,name)
end
end
@@ -952,7 +952,7 @@ local function makelinechild(name,specification)
parent = fields[field.parent]
if not parent.pobj then
if trace_fields then
- report_fields("forcing parent text '%s'",parent.name)
+ report_fields("forcing parent text %a",parent.name)
end
makelineparent(parent,specification)
end
@@ -961,13 +961,13 @@ local function makelinechild(name,specification)
field = parent
if not parent.pobj then
if trace_fields then
- report_fields("using parent text '%s'",name)
+ report_fields("using parent text %a",name)
end
makelineparent(parent,specification)
end
end
if trace_fields then
- report_fields("using child text '%s'",name)
+ report_fields("using child text %a",name)
end
local d = pdfdictionary {
Subtype = pdf_widget,
@@ -1010,7 +1010,7 @@ local function makechoicechild(name,specification)
parent = fields[field.parent]
if not parent.pobj then
if trace_fields then
- report_fields("forcing parent choice '%s'",parent.name)
+ report_fields("forcing parent choice %a",parent.name)
end
makechoiceparent(parent,specification,extras)
end
@@ -1019,13 +1019,13 @@ local function makechoicechild(name,specification)
field = parent
if not parent.pobj then
if trace_fields then
- report_fields("using parent choice '%s'",name)
+ report_fields("using parent choice %a",name)
end
makechoiceparent(parent,specification,extras)
end
end
if trace_fields then
- report_fields("using child choice '%s'",name)
+ report_fields("using child choice %a",name)
end
local d = pdfdictionary {
Subtype = pdf_widget,
@@ -1068,7 +1068,7 @@ local function makecheckchild(name,specification)
parent = fields[field.parent]
if not parent.pobj then
if trace_fields then
- report_fields("forcing parent check '%s'",parent.name)
+ report_fields("forcing parent check %a",parent.name)
end
makecheckparent(parent,specification,extras)
end
@@ -1077,13 +1077,13 @@ local function makecheckchild(name,specification)
field = parent
if not parent.pobj then
if trace_fields then
- report_fields("using parent check '%s'",name)
+ report_fields("using parent check %a",name)
end
makecheckparent(parent,specification,extras)
end
end
if trace_fields then
- report_fields("using child check '%s'",name)
+ report_fields("using child check %a",name)
end
local d = pdfdictionary {
Subtype = pdf_widget,
@@ -1134,7 +1134,7 @@ local function makepushchild(name,specification)
parent = fields[field.parent]
if not parent.pobj then
if trace_fields then
- report_fields("forcing parent push '%s'",parent.name)
+ report_fields("forcing parent push %a",parent.name)
end
makepushparent(parent,specification)
end
@@ -1143,13 +1143,13 @@ local function makepushchild(name,specification)
field = parent
if not parent.pobj then
if trace_fields then
- report_fields("using parent push '%s'",name)
+ report_fields("using parent push %a",name)
end
makepushparent(parent,specification)
end
end
if trace_fields then
- report_fields("using child push '%s'",name)
+ report_fields("using child push %a",name)
end
local fontsymbol = specification.fontsymbol
local d = pdfdictionary {
@@ -1198,26 +1198,26 @@ end
-- parent = fields[field.parent]
-- if not parent.pobj then
-- if trace_fields then
--- report_fields("forcing parent radio '%s'",parent.name)
+-- report_fields("forcing parent radio %a",parent.name)
-- end
-- makeradioparent(parent,parent)
-- end
-- else
-- field = radios[name]
-- if not field then
--- report_fields("there is some problem with field '%s'",name)
+-- report_fields("there is some problem with field %a",name)
-- return nil
-- end
-- parent = fields[field.parent]
-- if not parent.pobj then
-- if trace_fields then
--- report_fields("using parent radio '%s'",name)
+-- report_fields("using parent radio %a",name)
-- end
-- makeradioparent(parent,parent)
-- end
-- end
-- if trace_fields then
--- report_fields("using child radio '%s' with values '%s' and default '%s'",name,field.values or "?",field.default or "?")
+-- report_fields("using child radio %a with values %a and default %a",name,field.values,field.default)
-- end
-- local fontsymbol = specification.fontsymbol
-- fontsymbol="star"
@@ -1256,26 +1256,26 @@ local function makeradiochild(name,specification)
parent = fields[field.parent]
if not parent.pobj then
if trace_fields then
- report_fields("forcing parent radio '%s'",parent.name)
+ report_fields("forcing parent radio %a",parent.name)
end
makeradioparent(parent,parent)
end
else
field = radios[name]
if not field then
- report_fields("there is some problem with field '%s'",name)
+ report_fields("there is some problem with field %a",name)
return nil
end
parent = fields[field.parent]
if not parent.pobj then
if trace_fields then
- report_fields("using parent radio '%s'",name)
+ report_fields("using parent radio %a",name)
end
makeradioparent(parent,parent)
end
end
if trace_fields then
- report_fields("using child radio '%s' with values '%s' and default '%s'",name,field.values or "?",field.default or "?")
+ report_fields("using child radio %a with values %a and default %a",name,field.values,field.default)
end
local fontsymbol = specification.fontsymbol
-- fontsymbol = "circle"
diff --git a/Master/texmf-dist/tex/context/base/lpdf-fmt.lua b/Master/texmf-dist/tex/context/base/lpdf-fmt.lua
index 8e118db1265..94c005f6570 100644
--- a/Master/texmf-dist/tex/context/base/lpdf-fmt.lua
+++ b/Master/texmf-dist/tex/context/base/lpdf-fmt.lua
@@ -36,6 +36,8 @@ local pdfstring = lpdf.string
local pdfverbose = lpdf.verbose
local pdfflushstreamfileobject = lpdf.flushstreamfileobject
+local texset = tex.set -- we could make tex.setglobal
+
local addtoinfo = lpdf.addtoinfo
local injectxmpinfo = lpdf.injectxmpinfo
local insertxmpinfo = lpdf.insertxmpinfo
@@ -360,7 +362,7 @@ local function loadprofile(name,filename)
for i=1,#databases do
local filename = locatefile(databases[i])
if filename and filename ~= "" then
- local suffix = file.extname(filename)
+ local suffix = file.suffix(filename)
local lname = lower(name)
if suffix == "xml" then
local xmldata = xml.load(filename) -- no need for caching it
@@ -385,16 +387,16 @@ local function loadprofile(name,filename)
end
if profile then
if next(profile) then
- report_backend("profile specification '%s' loaded from '%s'",name,filename)
+ report_backend("profile specification %a loaded from %a",name,filename)
return profile
elseif trace_format then
- report_backend("profile specification '%s' loaded from '%s' but empty",name,filename)
+ report_backend("profile specification %a loaded from %a but empty",name,filename)
end
return false
end
end
end
- report_backend("profile specification '%s' not found in '%s'",name,concat(filenames, ", "))
+ report_backend("profile specification %a not found in %a",name,concat(filenames, ", "))
end
local function urls(url)
@@ -434,20 +436,20 @@ local function handleinternalprofile(s,include)
local fullname = locatefile(filename)
local channel = channels[colorspace] or nil
if fullname == "" then
- report_backend("error, couldn't locate profile '%s'",filename)
+ report_backend("error, couldn't locate profile %a",filename)
elseif not channel then
- report_backend("error, couldn't resolve channel entry for colorspace '%s'",colorspace)
+ report_backend("error, couldn't resolve channel entry for colorspace %a",colorspace)
else
profile = pdfflushstreamfileobject(fullname,pdfdictionary{ N = channel },false) -- uncompressed
internalprofiles[tag] = profile
if trace_format then
- report_backend("including '%s' color profile from '%s'",colorspace,fullname)
+ report_backend("including %a color profile from %a",colorspace,fullname)
end
end
else
internalprofiles[tag] = true
if trace_format then
- report_backend("not including '%s' color profile '%s'",colorspace,filename)
+ report_backend("not including %a color profile %a",colorspace,filename)
end
end
end
@@ -506,7 +508,7 @@ local function handledefaultprofile(s,spec) -- specification
local tag = profilename(filename)
local n = internalprofiles[tag] -- or externalprofiles[tag]
if n == true then -- not internalized
- report_backend("no default profile '%s' for colorspace '%s'",filename,colorspace)
+ report_backend("no default profile %a for colorspace %a",filename,colorspace)
elseif n then
local a = pdfarray {
pdfconstant("ICCBased"),
@@ -515,12 +517,12 @@ local function handledefaultprofile(s,spec) -- specification
-- used in page /Resources, so this must be inserted at runtime
lpdf.adddocumentcolorspace(prefixes[colorspace],pdfreference(pdfflushobject(a)))
loadeddefaults[colorspace] = true
- report_backend("setting '%s' as default '%s' color space",filename,colorspace)
+ report_backend("setting %a as default %a color space",filename,colorspace)
else
- report_backend("no default profile '%s' for colorspace '%s'",filename,colorspace)
+ report_backend("no default profile %a for colorspace %a",filename,colorspace)
end
elseif trace_format then
- report_backend("a default '%s' colorspace is already in use",colorspace)
+ report_backend("a default %a colorspace is already in use",colorspace)
end
end
@@ -547,18 +549,18 @@ local function handleoutputintent(s,spec)
elseif external and external ~= true then
d.DestOutputProfileRef = pdfreference(external)
else
- report_backend("omitting reference to profile for intent '%s'",name)
+ report_backend("omitting reference to profile for intent %a",name)
end
intents[#intents+1] = pdfreference(pdfflushobject(pdfdictionary(d)))
if trace_format then
- report_backend("setting output intent to '%s' with id '%s' (entry %s)",name,id,#intents)
+ report_backend("setting output intent to %a with id %a for entry %a",name,id,#intents)
end
else
- report_backend("invalid output intent '%s'",name)
+ report_backend("invalid output intent %a",name)
end
loadedintents[name] = true
elseif trace_format then
- report_backend("an output intent with name '%s' is already in use",name)
+ report_backend("an output intent with name %a is already in use",name)
end
end
@@ -569,7 +571,7 @@ local function handleiccprofile(message,spec,name,filename,how,options,alwaysinc
local name = list[i]
local profile = loadprofile(name,filename)
if trace_format then
- report_backend("handling %s '%s'",message,name)
+ report_backend("handling %s %a",message,name)
end
if profile then
if formatspecification.cmyk_colors then
@@ -595,22 +597,22 @@ local function handleiccprofile(message,spec,name,filename,how,options,alwaysinc
end
if external then
if trace_format then
- report_backend("handling external profiles cf. '%s'",name)
+ report_backend("handling external profiles cf. %a",name)
end
handleexternalprofile(profile,false)
else
if trace_format then
- report_backend("handling internal profiles cf. '%s'",name)
+ report_backend("handling internal profiles cf. %a",name)
end
if internal then
handleinternalprofile(profile,always or include)
else
- report_backend("no profile inclusion for '%s'",formatname)
+ report_backend("no profile inclusion for %a",formatname)
end
end
how(profile,spec)
elseif trace_format then
- report_backend("unknown profile '%s'",name)
+ report_backend("unknown profile %a",name)
end
end
end
@@ -625,29 +627,38 @@ end
lpdf.registerdocumentfinalizer(flushoutputintents,2,"output intents")
function codeinjections.setformat(s)
- local format, level, profile, intent, option, filename =
- s.format or "", s.level or "", s.profile or "", s.intent or "", s.option or "", s.file or ""
- if format == "" then
- -- we ignore this as we hook it in \everysetupbackend
- else
+ local format = s.format or ""
+ local level = tonumber(s.level)
+ local intent = s.intent or ""
+ local profile = s.profile or ""
+ local option = s.option or ""
+ local filename = s.file or ""
+ if format ~= "" then
local spec = formats[lower(format)]
if spec then
- formatspecification, formatname = spec, spec.format_name
- level = level and tonumber(level)
- report_backend("setting format to '%s'",formatname)
+ formatspecification = spec
+ formatname = spec.format_name
+ report_backend("setting format to %a",formatname)
local xmp_file = formatspecification.xmp_file or ""
if xmp_file == "" then
-- weird error
else
codeinjections.setxmpfile(xmp_file)
end
- local pdf_version, inject_metadata = spec.pdf_version * 10, spec.inject_metadata
- local majorversion, minorversion = math.div(pdf_version,10), math.mod(pdf_version,10)
+ if not level then
+ level = 3 -- good compromise, default anyway
+ end
+ local pdf_version = spec.pdf_version * 10
+ local inject_metadata = spec.inject_metadata
+ local majorversion = math.div(pdf_version,10)
+ local minorversion = math.mod(pdf_version,10)
local objectcompression = spec.object_compression and pdf_version >= 15
local compresslevel = level or tex.pdfcompresslevel -- keep default
local objectcompresslevel = (objectcompression and (level or tex.pdfobjcompresslevel)) or 0
- tex.pdfcompresslevel, tex.pdfobjcompresslevel = compresslevel, objectcompresslevel
- tex.pdfmajorversion, tex.pdfminorversion = majorversion, minorversion
+ texset("global","pdfcompresslevel",compresslevel)
+ texset("global","pdfobjcompresslevel",objectcompresslevel)
+ texset("global","pdfmajorversion",majorversion)
+ texset("global","pdfminorversion",minorversion)
if objectcompression then
report_backend("forcing pdf version %s.%s, compression level %s, object compression level %s",
majorversion,minorversion,compresslevel,objectcompresslevel)
@@ -694,16 +705,21 @@ function codeinjections.setformat(s)
for k, v in table.sortedhash(formats.default) do
local v = formatspecification[k]
if type(v) ~= "function" then
- report_backend("%s = %s",k,tostring(v or false))
+ report_backend("%a = %a",k,v or false)
end
end
end
function codeinjections.setformat(noname)
- report_backend("error, format is already set to '%s', ignoring '%s'",formatname,noname.format)
+ report_backend("error, format is already set to %a, ignoring %a",formatname,noname.format)
end
else
- report_backend("error, format '%s' is not supported",format)
+ report_backend("error, format %a is not supported",format)
end
+ elseif level then
+ texset("global","pdfcompresslevel",level)
+ texset("global","pdfobjcompresslevel",level)
+ else
+ -- we ignore this as we hook it in \everysetupbackend
end
end
diff --git a/Master/texmf-dist/tex/context/base/lpdf-grp.lua b/Master/texmf-dist/tex/context/base/lpdf-grp.lua
index aba5771fd6d..fed5e6a4665 100644
--- a/Master/texmf-dist/tex/context/base/lpdf-grp.lua
+++ b/Master/texmf-dist/tex/context/base/lpdf-grp.lua
@@ -236,7 +236,7 @@ function img.package(image) -- see lpdf-u3d **
local height = boundingbox[4]
local xform = img.scan {
attr = resources(),
- stream = format("%s 0 0 %s 0 0 cm /%s Do",width,height,imagetag),
+ stream = format("%f 0 0 %f 0 0 cm /%s Do",width,height,imagetag),
bbox = { 0, 0, width/factor, height/factor },
}
img.immediatewrite(xform)
diff --git a/Master/texmf-dist/tex/context/base/lpdf-ini.lua b/Master/texmf-dist/tex/context/base/lpdf-ini.lua
index 88999358ce1..0b1473d2f93 100644
--- a/Master/texmf-dist/tex/context/base/lpdf-ini.lua
+++ b/Master/texmf-dist/tex/context/base/lpdf-ini.lua
@@ -8,10 +8,10 @@ if not modules then modules = { } end modules ['lpdf-ini'] = {
local setmetatable, getmetatable, type, next, tostring, tonumber, rawset = setmetatable, getmetatable, type, next, tostring, tonumber, rawset
local char, byte, format, gsub, concat, match, sub, gmatch = string.char, string.byte, string.format, string.gsub, table.concat, string.match, string.sub, string.gmatch
-local utfvalues = string.utfvalues
-local utfchar = utf.char
+local utfchar, utfvalues = utf.char, utf.values
local sind, cosd = math.sind, math.cosd
local lpegmatch, P, C, R, S, Cc, Cs = lpeg.match, lpeg.P, lpeg.C, lpeg.R, lpeg.S, lpeg.Cc, lpeg.Cs
+local formatters = string.formatters
local pdfreserveobject = pdf.reserveobj
local pdfimmediateobject = pdf.immediateobj
@@ -39,7 +39,7 @@ backends.pdf = backends.pdf or {
lpdf = lpdf or { }
local lpdf = lpdf
-local function tosixteen(str) -- an lpeg might be faster
+local function tosixteen(str) -- an lpeg might be faster (no table)
if not str or str == "" then
return "<feff>" -- not () as we want an indication that it's unicode
else
@@ -105,6 +105,12 @@ local function merge_t(a,b)
return setmetatable(t,getmetatable(a))
end
+local f_key_value = formatters["/%s %s"]
+local f_key_dictionary = formatters["/%s << % t >>"]
+local f_dictionary = formatters["<< % t >>"]
+local f_key_array = formatters["/%s [ % t ]"]
+local f_array = formatters["[ % t ]"]
+
local tostring_a, tostring_d
tostring_d = function(t,contentonly,key)
@@ -120,28 +126,28 @@ tostring_d = function(t,contentonly,key)
rn = rn + 1
local tv = type(v)
if tv == "string" then
- r[rn] = format("/%s %s",k,toeight(v))
+ r[rn] = f_key_value(k,toeight(v))
elseif tv == "unicode" then
- r[rn] = format("/%s %s",k,tosixteen(v))
+ r[rn] = f_key_value(k,tosixteen(v))
elseif tv == "table" then
local mv = getmetatable(v)
if mv and mv.__lpdftype then
- r[rn] = format("/%s %s",k,tostring(v))
+ r[rn] = f_key_value(k,tostring(v))
elseif v[1] then
- r[rn] = format("/%s %s",k,tostring_a(v))
+ r[rn] = f_key_value(k,tostring_a(v))
else
- r[rn] = format("/%s %s",k,tostring_d(v))
+ r[rn] = f_key_value(k,tostring_d(v))
end
else
- r[rn] = format("/%s %s",k,tostring(v))
+ r[rn] = f_key_value(k,tostring(v))
end
end
if contentonly then
- return concat(r, " ")
+ return concat(r," ")
elseif key then
- return format("/%s << %s >>", key, concat(r, " "))
+ return f_key_dictionary(key,r)
else
- return format("<< %s >>", concat(r, " "))
+ return f_dictionary(r)
end
end
end
@@ -180,9 +186,9 @@ tostring_a = function(t,contentonly,key)
if contentonly then
return concat(r, " ")
elseif key then
- return format("/%s [ %s ]", key, concat(r, " "))
+ return f_key_array(key,r)
else
- return format("[ %s ]", concat(r, " "))
+ return f_array(r)
end
end
end
@@ -358,10 +364,10 @@ function lpdf.reserveobject(name)
if name then
names[name] = r
if trace_objects then
- report_objects("reserving number %s under name '%s'",r,name)
+ report_objects("reserving number %a under name %a",r,name)
end
elseif trace_objects then
- report_objects("reserving number %s",r)
+ report_objects("reserving number %a",r)
end
return r
end
@@ -391,15 +397,15 @@ function lpdf.flushobject(name,data)
if named then
if not trace_objects then
elseif trace_detail then
- report_objects("flushing data to reserved object with name '%s' -> %s",name,tostring(data))
+ report_objects("flushing data to reserved object with name %a, data: %S",name,data)
else
- report_objects("flushing data to reserved object with name '%s'",name)
+ report_objects("flushing data to reserved object with name %a",name)
end
return pdfimmediateobject(named,tostring(data))
else
if not trace_objects then
elseif trace_detail then
- report_objects("flushing data to reserved object with number %s -> %s",name,tostring(data))
+ report_objects("flushing data to reserved object with number %s, data: %S",name,data)
else
report_objects("flushing data to reserved object with number %s",name)
end
@@ -407,7 +413,7 @@ function lpdf.flushobject(name,data)
end
else
if trace_objects and trace_detail then
- report_objects("flushing data -> %s",tostring(name))
+ report_objects("flushing data: %S",name)
end
return pdfimmediateobject(tostring(name))
end
@@ -430,7 +436,7 @@ end
function lpdf.flushstreamfileobject(filename,dict,compressed) -- default compressed
if trace_objects then
- report_objects("flushing stream file object '%s'",filename)
+ report_objects("flushing stream file object %a",filename)
end
local dtype = type(dict)
return pdfdeferredobject {
@@ -563,7 +569,7 @@ end
local function run(where,what)
if trace_finalizers then
- report_finalizing("start backend: category=%s, n=%s",what,#where)
+ report_finalizing("start backend, category %a, n %a",what,#where)
end
for i=1,#where do
local w = where[i]
@@ -619,12 +625,12 @@ callbacks.register("finish_pdffile", lpdf.finalizedocument)
local function trace_set(what,key)
if trace_resources then
- report_finalizing("setting key '%s' in '%s'",key,what)
+ report_finalizing("setting key %a in %a",key,what)
end
end
local function trace_flush(what)
if trace_resources then
- report_finalizing("flushing '%s'",what)
+ report_finalizing("flushing %a",what)
end
end
@@ -703,7 +709,7 @@ registerpagefinalizer(checkshades,3,"shades")
function lpdf.rotationcm(a)
local s, c = sind(a), cosd(a)
- return format("%s %s %s %s 0 0 cm",c,s,-s,c)
+ return format("%0.6f %0.6f %0.6f %0.6f 0 0 cm",c,s,-s,c)
end
-- ! -> universaltime
diff --git a/Master/texmf-dist/tex/context/base/lpdf-nod.lua b/Master/texmf-dist/tex/context/base/lpdf-nod.lua
index fe0c975f70e..0ce589c3208 100644
--- a/Master/texmf-dist/tex/context/base/lpdf-nod.lua
+++ b/Master/texmf-dist/tex/context/base/lpdf-nod.lua
@@ -59,7 +59,7 @@ end
function nodepool.pdfsetmatrix(rx,sx,sy,ry,tx,ty)
local t = copy_node(pdfsetmatrix)
- t.data = format("%s %s %s %s",rs or 0,sx or 0,sy or 0,rx or 0) -- todo: tx ty
+ t.data = format("%s %s %s %s",rx or 0,sx or 0,sy or 0,ry or 0) -- todo: tx ty
return t
end
@@ -69,21 +69,68 @@ nodeinjections.transform = nodepool.pdfsetmatrix
function nodepool.pdfannotation(w,h,d,data,n)
local t = copy_node(pdfannot)
- if w and w ~= 0 then t.width = w end
- if h and h ~= 0 then t.height = h end
- if d and d ~= 0 then t.depth = d end
- if n then t.objnum = n end
- if data and data ~= "" then t.data = data end
+ if w and w ~= 0 then
+ t.width = w
+ end
+ if h and h ~= 0 then
+ t.height = h
+ end
+ if d and d ~= 0 then
+ t.depth = d
+ end
+ if n then
+ t.objnum = n
+ end
+ if data and data ~= "" then
+ t.data = data
+ end
return t
end
+-- (!) The next code in pdfdest.w is wrong:
+--
+-- case pdf_dest_xyz:
+-- if (matrixused()) {
+-- set_rect_dimens(pdf, p, parent_box, cur, alt_rule, pdf_dest_margin) ;
+-- } else {
+-- pdf_ann_left(p) = pos.h ;
+-- pdf_ann_top (p) = pos.v ;
+-- }
+-- break ;
+--
+-- so we need to force a matrix.
+
function nodepool.pdfdestination(w,h,d,name,view,n)
local t = copy_node(pdfdest)
- if w and w ~= 0 then t.width = w end
- if h and h ~= 0 then t.height = h end
- if d and d ~= 0 then t.depth = d end
- if n then t.objnum = n end
+ local hasdimensions = false
+ if w and w ~= 0 then
+ t.width = w
+ hasdimensions = true
+ end
+ if h and h ~= 0 then
+ t.height = h
+ hasdimensions = true
+ end
+ if d and d ~= 0 then
+ t.depth = d
+ hasdimensions = true
+ end
+ if n then
+ t.objnum = n
+ end
+ view = views[view] or view or 1 -- fit is default
t.dest_id = name
- t.dest_type = views[view] or view or 1 -- fit is default
- return t
+ t.dest_type = view
+ if hasdimensions and view == 0 then -- xyz
+ -- see (!) s -> m -> t -> r
+ local s = copy_node(pdfsave)
+ local m = copy_node(pdfsetmatrix)
+ local r = copy_node(pdfrestore)
+ m.data = format("1 0 0 1")
+ s.next = m m.next = t t.next = r
+ m.prev = s t.prev = m r.prev = t
+ return s -- a list
+ else
+ return t
+ end
end
diff --git a/Master/texmf-dist/tex/context/base/lpdf-ren.lua b/Master/texmf-dist/tex/context/base/lpdf-ren.lua
index e0c4b19731a..6af65f9de71 100644
--- a/Master/texmf-dist/tex/context/base/lpdf-ren.lua
+++ b/Master/texmf-dist/tex/context/base/lpdf-ren.lua
@@ -9,7 +9,8 @@ if not modules then modules = { } end modules ['lpdf-ren'] = {
-- rendering
local tostring, tonumber, next = tostring, tonumber, next
-local format = string.format
+local format, rep = string.format, string.rep
+local concat = table.concat
local settings_to_array = utilities.parsers.settings_to_array
local backends, lpdf, nodes, node = backends, lpdf, nodes, node
@@ -60,51 +61,12 @@ local lpdf_usage = pdfdictionary { Print = pdfdictionary { PrintState = pdf_off
-- hide and vide actions. This is why we need to be able to force usage of layers
-- at several moments.
--- injection
-
-local cache = { }
-
-function codeinjections.startlayer(name)
- codeinjections.useviewerlayer(name)
- return format("/OC /%s BDC",name)
-end
-
-function codeinjections.stoplayer(name)
- return "EMC"
-end
-
-function nodeinjections.startlayer(name)
- local c = cache[name]
- if not c then
- codeinjections.useviewerlayer(name)
- c = register(pdfliteral(format("/OC /%s BDC",name)))
- cache[name] = c
- end
- return copy_node(c)
-end
-
-local stop = register(pdfliteral("EMC"))
-
-function nodeinjections.stoplayer()
- return copy_node(stop)
-end
-
-local cache = { }
-
-function nodeinjections.switchlayer(name) -- not used, optimization
- local c = cache[name]
- if not c then
- codeinjections.useviewerlayer(name)
- c = register(pdfliteral(format("EMC /OC /%s BDC",name)))
- end
- return copy_node(c)
-end
-
-- management
local pdfln, pdfld = { }, { }
local textlayers, hidelayers, videlayers = pdfarray(), pdfarray(), pdfarray()
local pagelayers, pagelayersreference, cache = nil, nil, { }
+local alphabetic = { }
local specifications = { }
local initialized = { }
@@ -149,6 +111,7 @@ local function useviewerlayer(name) -- move up so that we can use it as local
cache[#cache+1] = { dn, dd }
pdfld[tag] = dr
textlayers[#textlayers+1] = nr
+ alphabetic[tag] = nr
if specification.visible == v_start then
videlayers[#videlayers+1] = nr
else
@@ -185,11 +148,16 @@ local function flushtextlayers()
pdfflushobject(ci[1],ci[2])
end
if textlayers and #textlayers > 0 then -- we can group them if needed, like: layout
+ local sortedlayers = { }
+ for k, v in table.sortedhash(alphabetic) do
+ sortedlayers[#sortedlayers+1] = v -- maybe do a proper numeric sort as well
+ end
local d = pdfdictionary {
OCGs = textlayers,
D = pdfdictionary {
Name = "Document",
- Order = (viewerlayers.hasorder and textlayers) or nil,
+ -- Order = (viewerlayers.hasorder and textlayers) or nil,
+ Order = (viewerlayers.hasorder and sortedlayers) or nil,
ON = videlayers,
OFF = hidelayers,
BaseState = pdf_on,
@@ -230,6 +198,74 @@ function executers.hidelayer (arguments) return setlayer(pdf_off, arguments)
function executers.videlayer (arguments) return setlayer(pdf_on, arguments) end
function executers.togglelayer(arguments) return setlayer(pdf_toggle,arguments) end
+-- injection
+
+function codeinjections.startlayer(name) -- used in mp
+ if not name then
+ name = "unknown"
+ end
+ useviewerlayer(name)
+ return format("/OC /%s BDC",name)
+end
+
+function codeinjections.stoplayer(name) -- used in mp
+ return "EMC"
+end
+
+local cache = { }
+
+function nodeinjections.startlayer(name)
+ local c = cache[name]
+ if not c then
+ useviewerlayer(name)
+ c = register(pdfliteral(format("/OC /%s BDC",name)))
+ cache[name] = c
+ end
+ return copy_node(c)
+end
+
+local stop = register(pdfliteral("EMC"))
+
+function nodeinjections.stoplayer()
+ return copy_node(stop)
+end
+
+-- experimental stacker code (slow, can be optimized): !!!! TEST CODE !!!!
+
+local values = viewerlayers.values
+local startlayer = codeinjections.startlayer
+local stoplayer = codeinjections.stoplayer
+
+function nodeinjections.startstackedlayer(s,t,first,last)
+ local r = { }
+ for i=first,last do
+ r[#r+1] = startlayer(values[t[i]])
+ end
+ r = concat(r," ")
+ return pdfliteral(r)
+end
+
+function nodeinjections.stopstackedlayer(s,t,first,last)
+ local r = { }
+ for i=last,first,-1 do
+ r[#r+1] = stoplayer()
+ end
+ r = concat(r," ")
+ return pdfliteral(r)
+end
+
+function nodeinjections.changestackedlayer(s,t1,first1,last1,t2,first2,last2)
+ local r = { }
+ for i=last1,first1,-1 do
+ r[#r+1] = stoplayer()
+ end
+ for i=first2,last2 do
+ r[#r+1] = startlayer(values[t2[i]])
+ end
+ r = concat(r," ")
+ return pdfliteral(r)
+end
+
-- transitions
local pagetransitions = {
diff --git a/Master/texmf-dist/tex/context/base/lpdf-swf.lua b/Master/texmf-dist/tex/context/base/lpdf-swf.lua
index 4bbec8dbe84..12c80036fd2 100644
--- a/Master/texmf-dist/tex/context/base/lpdf-swf.lua
+++ b/Master/texmf-dist/tex/context/base/lpdf-swf.lua
@@ -108,10 +108,10 @@ local function insertswf(spec)
local names = configuration.Assets.Names
local prefix = false
if root ~= "" and root ~= "." then
- prefix = format("^%s/",string.escapedpattern(root,true))
+ prefix = format("^%s/",string.topattern(root))
end
if prefix and trace_swf then
- report_swf("using strip pattern '%s'",prefix)
+ report_swf("using strip pattern %a",prefix)
end
local function add(fullname,strip)
local filename = gsub(fullname,"^%./","")
@@ -124,7 +124,7 @@ local function insertswf(spec)
names[#names+1] = pdfstring(filename)
names[#names+1] = embeddedreference
if trace_swf then
- report_swf("embedding file '%s' as '%s'",fullname,usedname)
+ report_swf("embedding file %a as %a",fullname,usedname)
end
end
relativepaths = resources.relativepaths
@@ -135,7 +135,7 @@ local function insertswf(spec)
for i=1,#relativepaths do
local relativepath = relativepaths[i]
if trace_swf then
- report_swf("checking path '%s' relative to '%s'",relativepath,root)
+ report_swf("checking path %a relative to %a",relativepath,root)
end
local path = file.join(root == "" and "." or root,relativepath)
local files = dir.glob(path .. "/**")
@@ -152,7 +152,7 @@ local function insertswf(spec)
for i=1,#paths do
local path = paths[i]
if trace_swf then
- report_swf("checking path '%s'",path)
+ report_swf("checking path %a",path)
end
local files = dir.glob(path .. "/**")
for i=1,#files do
diff --git a/Master/texmf-dist/tex/context/base/lpdf-tag.lua b/Master/texmf-dist/tex/context/base/lpdf-tag.lua
index 0be9d3452be..8cdb5f6a415 100644
--- a/Master/texmf-dist/tex/context/base/lpdf-tag.lua
+++ b/Master/texmf-dist/tex/context/base/lpdf-tag.lua
@@ -45,9 +45,11 @@ local glyph_code = nodecodes.glyph
local a_tagged = attributes.private('tagged')
local a_image = attributes.private('image')
-local has_attribute, set_attribute, traverse_nodes, traverse_id = node.has_attribute, node.set_attribute, node.traverse, node.traverse_id
-local tosequence = nodes.tosequence
-local copy_node, slide_nodelist = node.copy, node.slide
+local traverse_nodes = node.traverse
+local traverse_id = node.traverse_id
+local tosequence = nodes.tosequence
+local copy_node = node.copy
+local slide_nodelist = node.slide
local structure_stack = { }
local structure_kids = pdfarray()
@@ -226,7 +228,7 @@ local function collectranges(head,list)
for n in traverse_nodes(head) do
local id = n.id -- 14: image, 8: literal (mp)
if id == glyph_code then
- local at = has_attribute(n,a_tagged)
+ local at = n[a_tagged]
if not at then
range = nil
elseif last ~= at then
@@ -237,9 +239,9 @@ local function collectranges(head,list)
range[4] = n -- stop
end
elseif id == hlist_code or id == vlist_code then
- local at = has_attribute(n,a_image)
+ local at = n[a_image]
if at then
- local at = has_attribute(n,a_tagged)
+ local at = n[a_tagged]
if not at then
range = nil
else
@@ -266,7 +268,7 @@ function nodeinjections.addtags(head)
local attr, id, start, stop = range[1], range[2], range[3], range[4]
local tags = taglist[attr]
if tags then -- not ok ... only first lines
- report_tags("%s => %s : %05i %s",tosequence(start,start),tosequence(stop,stop),attr,concat(tags," "))
+ report_tags("%s => %s : %05i % t",tosequence(start,start),tosequence(stop,stop),attr,tags)
end
end
end
diff --git a/Master/texmf-dist/tex/context/base/lpdf-u3d.lua b/Master/texmf-dist/tex/context/base/lpdf-u3d.lua
index ac603899722..33269486c18 100644
--- a/Master/texmf-dist/tex/context/base/lpdf-u3d.lua
+++ b/Master/texmf-dist/tex/context/base/lpdf-u3d.lua
@@ -370,7 +370,7 @@ local function insert3d(spec) -- width, height, factor, display, controls, label
subtype = "PRC"
elseif find(subdata,"^U3D") then
subtype = "U3D"
- elseif file.extname(foundname) == "prc" then
+ elseif file.suffix(foundname) == "prc" then
subtype = "PRC"
end
@@ -462,7 +462,7 @@ local function insert3d(spec) -- width, height, factor, display, controls, label
},
ProcSet = pdfarray { pdfconstant("PDF"), pdfconstant("ImageC") },
}
- local pwd = pdfflushstreamobject(format("q /GS gs %s 0 0 %s 0 0 cm /IM Do Q",factor*width,factor*height),pw)
+ local pwd = pdfflushstreamobject(format("q /GS gs %f 0 0 %f 0 0 cm /IM Do Q",factor*width,factor*height),pw)
annot.AP = pdfdictionary {
N = pdfreference(pwd)
}
diff --git a/Master/texmf-dist/tex/context/base/lpdf-wid.lua b/Master/texmf-dist/tex/context/base/lpdf-wid.lua
index 13940be9c86..9ea4744f164 100644
--- a/Master/texmf-dist/tex/context/base/lpdf-wid.lua
+++ b/Master/texmf-dist/tex/context/base/lpdf-wid.lua
@@ -185,7 +185,7 @@ local function flushembeddedfiles()
local e = pdfarray()
for tag, reference in next, filestreams do
if not reference then
- report_attachment("unreferenced file: tag '%s'",tag)
+ report_attachment("unreferenced file, tag %a",tag)
elseif referenced[tag] == "hidden" then
e[#e+1] = pdfstring(tag)
e[#e+1] = reference -- already a reference
@@ -279,13 +279,13 @@ function nodeinjections.attachfile(specification)
else
filename = specification.file
if not filename or filename == "" then
- report_attachment("missing file specification: registered '%s', using registered instead",registered)
+ report_attachment("no file specified, using registered %a instead",registered)
filename = registered
specification.file = registered
end
local foundname = resolvers.findbinfile(filename) or ""
if foundname == "" or not lfs.isfile(foundname) then
- report_attachment("invalid file specification: registered '%s', filename '%s'",registered,filename)
+ report_attachment("invalid filename %a, ignoring registered %a",filename,registered)
return nil
else
specification.foundname = foundname
@@ -317,7 +317,7 @@ function nodeinjections.attachfile(specification)
attachments[registered] = aref
end
if not aref then
- report_attachment("skipping: registered '%s'",registered)
+ report_attachment("skipping attachment, registered %a",registered)
-- already reported
elseif specification.method == v_hidden then
referenced[hash] = "hidden"
@@ -595,7 +595,7 @@ end
local function insertrenderingobject(specification) -- todo
local label = specification.label
if not mf[label] then
- report_media("todo: unknown medium '%s'",label or "?")
+ report_media("unknown medium, label %a",label)
local clip = pdfdictionary { -- does not work that well one level up
Type = pdfconstant("MediaClip"),
S = pdfconstant("MCD"),
diff --git a/Master/texmf-dist/tex/context/base/lpdf-xmp.lua b/Master/texmf-dist/tex/context/base/lpdf-xmp.lua
index 02d37342be9..061ed075718 100644
--- a/Master/texmf-dist/tex/context/base/lpdf-xmp.lua
+++ b/Master/texmf-dist/tex/context/base/lpdf-xmp.lua
@@ -83,7 +83,7 @@ local xmp, xmpfile, xmpname = nil, nil, "lpdf-pdx.xml"
local function setxmpfile(name)
if xmp then
- report_xmp("discarding loaded file '%s'",xmpfile)
+ report_xmp("discarding loaded file %a",xmpfile)
xmp = nil
end
xmpfile = name ~= "" and name
@@ -102,7 +102,7 @@ local function valid_xmp()
xmpfile = resolvers.findfile(xmpname) or ""
end
if xmpfile ~= "" then
- report_xmp("using file '%s'",xmpfile)
+ report_xmp("using file %a",xmpfile)
end
local xmpdata = (xmpfile ~= "" and io.loaddata(xmpfile)) or ""
xmp = xml.convert(xmpdata)
@@ -126,7 +126,7 @@ function lpdf.addtoinfo(tag,pdfvalue,strvalue)
addtoinfo(tag,pdfvalue)
local value = strvalue or gsub(tostring(pdfvalue),"^%((.*)%)$","%1") -- hack
if trace_info then
- report_info("set '%s' to '%s'",tag,value)
+ report_info("set %a to %a",tag,value)
end
addxmpinfo(tag,value)
end
@@ -180,9 +180,14 @@ local function flushxmpinfo()
Type = pdfconstant("Metadata"),
}
if trace_xmp then
- report_xmp("data flushed (see log file)")
- texio.write_nl("log","")
- texio.write("log","\n% ",(gsub(blob,"[\r\n]","\n%% ")),"\n")
+ report_xmp("data flushed, see log file")
+ logs.pushtarget("logfile")
+ report_xmp("start xmp blob")
+ logs.newline()
+ logs.writer(blob)
+ logs.newline()
+ report_xmp("stop xmp blob")
+ logs.poptarget()
end
blob = format(xpacket,packetid,blob)
if not verbose and tex.pdfcompresslevel > 0 then
diff --git a/Master/texmf-dist/tex/context/base/luat-bas.mkiv b/Master/texmf-dist/tex/context/base/luat-bas.mkiv
index 683c0e92f8a..e24568b0a73 100644
--- a/Master/texmf-dist/tex/context/base/luat-bas.mkiv
+++ b/Master/texmf-dist/tex/context/base/luat-bas.mkiv
@@ -13,22 +13,24 @@
\writestatus{loading}{ConTeXt Lua Macros / Basic Lua Libraries}
-\registerctxluafile{l-string} {1.001}
-\registerctxluafile{l-table} {1.001}
-\registerctxluafile{l-lpeg} {1.001}
-\registerctxluafile{l-boolean}{1.001}
-\registerctxluafile{l-number} {1.001}
-\registerctxluafile{l-math} {1.001}
-%registerctxluafile{l-aux} {1.001}
-\registerctxluafile{l-io} {1.001}
-\registerctxluafile{l-os} {1.001}
-\registerctxluafile{l-file} {1.001}
-\registerctxluafile{l-md5} {1.001}
-\registerctxluafile{l-dir} {1.001}
-\registerctxluafile{l-unicode}{1.001}
-%registerctxluafile{l-utils} {1.001}
-\registerctxluafile{l-url} {1.001}
-\registerctxluafile{l-set} {1.001}
+\registerctxluafile{l-lua} {1.001}
+\registerctxluafile{l-lpeg} {1.001}
+\registerctxluafile{l-function}{1.001}
+\registerctxluafile{l-string} {1.001}
+\registerctxluafile{l-table} {1.001}
+\registerctxluafile{l-boolean} {1.001}
+\registerctxluafile{l-number} {1.001}
+\registerctxluafile{l-math} {1.001}
+%registerctxluafile{l-aux} {1.001}
+\registerctxluafile{l-io} {1.001}
+\registerctxluafile{l-os} {1.001}
+\registerctxluafile{l-file} {1.001}
+\registerctxluafile{l-md5} {1.001}
+\registerctxluafile{l-dir} {1.001}
+\registerctxluafile{l-unicode} {1.001}
+%registerctxluafile{l-utils} {1.001}
+\registerctxluafile{l-url} {1.001}
+\registerctxluafile{l-set} {1.001}
% \registerctxluafile{socket.lua}{}
% \registerctxluafile{ltn12.lua} {}
diff --git a/Master/texmf-dist/tex/context/base/luat-bwc.lua b/Master/texmf-dist/tex/context/base/luat-bwc.lua
index f893c7c5b5d..993de7bf3f4 100644
--- a/Master/texmf-dist/tex/context/base/luat-bwc.lua
+++ b/Master/texmf-dist/tex/context/base/luat-bwc.lua
@@ -27,6 +27,6 @@ if not tex.wd then
__newindex = function(t,k,v) local bk = box[k] if bk then bk.depth = v end end,
} )
---~ tex.wd, tex.ht, tex.dp = wd, ht, dp
+ -- tex.wd, tex.ht, tex.dp = wd, ht, dp
end
diff --git a/Master/texmf-dist/tex/context/base/luat-cbk.lua b/Master/texmf-dist/tex/context/base/luat-cbk.lua
index 6622c64cd8c..f8c6926f075 100644
--- a/Master/texmf-dist/tex/context/base/luat-cbk.lua
+++ b/Master/texmf-dist/tex/context/base/luat-cbk.lua
@@ -34,10 +34,13 @@ functions.</p>
local trace_callbacks = false trackers.register("system.callbacks", function(v) trace_callbacks = v end)
local trace_calls = false -- only used when analyzing performance and initializations
-local register_callback, find_callback, list_callbacks = callback.register, callback.find, callback.list
+local register_callback = callback.register
+local find_callback = callback.find
+local list_callbacks = callback.list
+
local frozen, stack, list = { }, { }, callbacks.list
-if not callbacks.list then -- otherwise counters get reset
+if not list then -- otherwise counters get reset
list = utilities.storage.allocate(list_callbacks())
@@ -80,11 +83,11 @@ if trace_calls then
end
local function frozen_message(what,name)
- report_callbacks("not %s frozen '%s' (%s)",what,name,frozen[name])
+ report_callbacks("not %s frozen %a to %a",what,name,frozen[name])
end
local function frozen_callback(name)
- return nil, format("callback '%s' is frozen (%s)",name,frozen[name])
+ return nil, format("callback '%s' is frozen to '%s'",name,frozen[name]) -- no formatter yet
end
local function state(name)
@@ -113,15 +116,6 @@ function callbacks.report()
end
end
-function callbacks.table()
- local NC, NR, verbatim = context.NC, context.NR, context.type
- context.starttabulate { "|l|l|p|" }
- for name, _ in sortedhash(list) do
- NC() verbatim(name) NC() verbatim(state(name)) NC() context(frozen[name] or "") NC() NR()
- end
- context.stoptabulate()
-end
-
function callbacks.freeze(name,freeze)
freeze = type(freeze) == "string" and freeze
if find(name,"%*") then
@@ -143,7 +137,7 @@ function callbacks.register(name,func,freeze)
end
return frozen_callback(name)
elseif freeze then
- frozen[name] = (type(freeze) == "string" and freeze) or "registered"
+ frozen[name] = type(freeze) == "string" and freeze or "registered"
end
if delayed[name] and environment.initex then
return nil
@@ -160,7 +154,7 @@ function callback.register(name,func) -- original
return frozen_callback(name)
end
-function callbacks.push(name, func)
+function callbacks.push(name,func)
if not frozen[name] then
local sn = stack[name]
if not sn then
@@ -311,3 +305,16 @@ function garbagecollector.check(size,criterium)
end
end
end
+
+-- this will move
+
+commands = commands or { }
+
+function commands.showcallbacks()
+ local NC, NR, verbatim = context.NC, context.NR, context.type
+ context.starttabulate { "|l|l|p|" }
+ for name, _ in sortedhash(list) do
+ NC() verbatim(name) NC() verbatim(state(name)) NC() context(frozen[name] or "") NC() NR()
+ end
+ context.stoptabulate()
+end
diff --git a/Master/texmf-dist/tex/context/base/luat-cnf.lua b/Master/texmf-dist/tex/context/base/luat-cnf.lua
index 609dc412bcf..3672c603ece 100644
--- a/Master/texmf-dist/tex/context/base/luat-cnf.lua
+++ b/Master/texmf-dist/tex/context/base/luat-cnf.lua
@@ -30,22 +30,22 @@ texconfig.param_size = 25000 -- 60
texconfig.save_size = 50000 -- 4000
texconfig.stack_size = 10000 -- 300
---~ local function initialize()
---~ local t, variable = allocate(), resolvers.variable
---~ for name, default in next, variablenames do
---~ local name = variablenames[i]
---~ local value = variable(name)
---~ value = tonumber(value)
---~ if not value or value == "" or value == 0 then
---~ value = default
---~ end
---~ texconfig[name], t[name] = value, value
---~ end
---~ initialize = nil
---~ return t
---~ end
-
---~ luatex.variables = initialize()
+-- local function initialize()
+-- local t, variable = allocate(), resolvers.variable
+-- for name, default in next, variablenames do
+-- local name = variablenames[i]
+-- local value = variable(name)
+-- value = tonumber(value)
+-- if not value or value == "" or value == 0 then
+-- value = default
+-- end
+-- texconfig[name], t[name] = value, value
+-- end
+-- initialize = nil
+-- return t
+-- end
+--
+-- luatex.variables = initialize()
local stub = [[
@@ -71,9 +71,9 @@ function texconfig.init()
local builtin, globals = { }, { }
- libraries = { -- we set it her as we want libraries also 'indexed'
+ libraries = { -- we set it here as we want libraries also 'indexed'
basiclua = {
- "string", "table", "coroutine", "debug", "file", "io", "lpeg", "math", "os", "package",
+ "string", "table", "coroutine", "debug", "file", "io", "lpeg", "math", "os", "package", "bit32",
},
basictex = { -- noad
"callback", "font", "img", "lang", "lua", "node", "pdf", "status", "tex", "texconfig", "texio", "token",
@@ -88,6 +88,14 @@ function texconfig.init()
"fontforge", -- can be filled by luat-log
"kpse",
},
+ functions = {
+ "assert", "pcall", "xpcall", "error", "collectgarbage",
+ "dofile", "load","loadfile", "require", "module",
+ "getmetatable", "setmetatable",
+ "ipairs", "pairs", "rawequal", "rawget", "rawset", "next",
+ "tonumber", "tostring",
+ "type", "unpack", "select", "print",
+ },
builtin = builtin, -- to be filled
globals = globals, -- to be filled
}
@@ -96,46 +104,60 @@ function texconfig.init()
globals[k] = tostring(v)
end
- local function collect(t)
+ local function collect(t,fnc)
local lib = { }
for k, v in next, t do
- local keys = { }
- local gv = _G[v]
- if type(gv) == "table" then
- for k, v in next, gv do
- keys[k] = tostring(v) -- true -- by tostring we cannot call overloades functions (security)
+ if fnc then
+ lib[v] = _G[v]
+ else
+ local keys = { }
+ local gv = _G[v]
+ local tv = type(gv)
+ if tv == "table" then
+ for k, v in next, gv do
+ keys[k] = tostring(v) -- true -- by tostring we cannot call overloades functions (security)
+ end
end
+ lib[v] = keys
+ builtin[v] = keys
end
- lib[v] = keys
- builtin[v] = keys
end
return lib
end
- libraries.basiclua = collect(libraries.basiclua)
- libraries.basictex = collect(libraries.basictex)
- libraries.extralua = collect(libraries.extralua)
- libraries.extratex = collect(libraries.extratex)
- libraries.obsolete = collect(libraries.obsolete)
+ libraries.basiclua = collect(libraries.basiclua)
+ libraries.basictex = collect(libraries.basictex)
+ libraries.extralua = collect(libraries.extralua)
+ libraries.extratex = collect(libraries.extratex)
+ libraries.functions = collect(libraries.functions,true)
+ libraries.obsolete = collect(libraries.obsolete)
-- shortcut and helper
local function init(start)
local b = lua.bytecode
local i = start
+ local t = os.clock()
while b[i] do
b[i]() ;
b[i] = nil ;
i = i + 1
-- collectgarbage('step')
end
- return i - start
+ return i - start, os.clock() - t
end
-- the stored tables and modules
- storage.noftables = init(0)
- storage.nofmodules = init(%s)
+ storage.noftables , storage.toftables = init(0)
+ storage.nofmodules, storage.tofmodules = init(%s)
+
+ if modules then
+ local loaded = package.loaded
+ for module, _ in next, modules do
+ loaded[module] = true
+ end
+ end
end
@@ -149,7 +171,6 @@ end)
-- done, from now on input and callbacks are internal
]]
-
local variablenames = {
"error_line", "half_error_line",
"expand_depth", "hash_extra", "nest_size",
diff --git a/Master/texmf-dist/tex/context/base/luat-cod.lua b/Master/texmf-dist/tex/context/base/luat-cod.lua
index b022f31c3e9..8b015477f4e 100644
--- a/Master/texmf-dist/tex/context/base/luat-cod.lua
+++ b/Master/texmf-dist/tex/context/base/luat-cod.lua
@@ -6,6 +6,7 @@ if not modules then modules = { } end modules ['luat-cod'] = {
license = "see context related readme files"
}
+local type, loadfile = type, loadfile
local match, gsub, find, format = string.match, string.gsub, string.find, string.format
local texconfig, lua = texconfig, lua
@@ -19,11 +20,13 @@ texconfig.max_in_open = 127
-- registering bytecode chunks
-lua.bytecode = lua.bytecode or { } -- built in anyway
-lua.bytedata = lua.bytedata or { }
-lua.bytedone = lua.bytedone or { }
+local bytecode = lua.bytecode or { }
+local bytedata = lua.bytedata or { }
+local bytedone = lua.bytedone or { }
-local bytecode, bytedata, bytedone = lua.bytecode, lua.bytedata, lua.bytedone
+lua.bytecode = bytecode -- built in anyway
+lua.bytedata = bytedata
+lua.bytedone = bytedone
lua.firstbytecode = 501
lua.lastbytecode = lua.lastbytecode or (lua.firstbytecode - 1) -- as we load ourselves again ... maybe return earlier
@@ -32,18 +35,19 @@ function lua.registeredcodes()
return lua.lastbytecode - lua.firstbytecode + 1
end
+-- no file.* functions yet
+
function lua.registercode(filename,version)
local barename = gsub(filename,"%.[%a%d]+$","")
if barename == filename then filename = filename .. ".lua" end
local basename = match(barename,"^.+[/\\](.-)$") or barename
- if not bytedone[barename] then
+ if not bytedone[basename] then
local code = environment.luafilechunk(filename)
if code then
- assert(code)()
- bytedone[barename] = true
+ bytedone[basename] = true
if environment.initex then
local n = lua.lastbytecode + 1
- bytedata[n] = { barename, version }
+ bytedata[n] = { barename, version or "0.000" }
bytecode[n] = code
lua.lastbytecode = n
end
@@ -54,10 +58,11 @@ end
local finalizers = { }
function lua.registerfinalizer(f,comment)
+ comment = comment or "unknown"
if type(f) == "function" then
finalizers[#finalizers+1] = { action = f, comment = comment }
else
- print(format("fatal error: invalid finalizer, action: %s",finalizer.comment or "unknown"))
+ print(format("\nfatal error: invalid finalizer, action: %s\n",comment))
os.exit()
end
end
@@ -67,7 +72,7 @@ function lua.finalize(logger)
local finalizer = finalizers[i]
finalizer.action()
if logger then
- logger("finalizing lua", "action: %s",finalizer.comment)
+ logger("finalize action: %s",finalizer.comment)
end
end
end
@@ -98,13 +103,18 @@ if not environment.luafilechunk then
end
local data = loadfile(filename)
texio.write("<",data and "+ " or "- ",filename,">")
+ if data then
+ data()
+ end
return data
end
end
-if not environment.engineflags then
+if not environment.engineflags then -- raw flags
+
local engineflags = { }
+
for i=-10,#arg do
local a = arg[i]
if a then
@@ -114,7 +124,9 @@ if not environment.engineflags then
end
end
end
+
environment.engineflags = engineflags
+
end
-- We need a few premature callbacks in the format generator. We
diff --git a/Master/texmf-dist/tex/context/base/luat-cod.mkiv b/Master/texmf-dist/tex/context/base/luat-cod.mkiv
index 930532df97c..9ce6161c32c 100644
--- a/Master/texmf-dist/tex/context/base/luat-cod.mkiv
+++ b/Master/texmf-dist/tex/context/base/luat-cod.mkiv
@@ -15,12 +15,6 @@
\unprotect
-%D We have this one for a rather long time now but nowadays \ETEX\
-%D provides a command with the same name and different meaning. That
-%D one is available as \type {\normalexpanded}.
-
-\long\def\expanded#1{\long\xdef\lastexpanded{\noexpand#1}\lastexpanded}
-
%D We cannot use the following due to the fact that existing usage
%D demanded duplicating hashes.
%D
@@ -55,14 +49,14 @@
% we can drop the \zerocount as it's default
-\def\ctxdirectlua {\directlua\zerocount}
-\def\ctxlatelua {\latelua \zerocount}
-\def\ctxsprint #1{\directlua\zerocount{tex.sprint(tex.ctxcatcodes,#1)}} % saves tokens
-\def\ctxwrite #1{\directlua\zerocount{tex.write(#1)}} % saves tokens
-\def\ctxcommand #1{\directlua\zerocount{commands.#1}} % saves tokens
-\def\ctxdirectcommand#1{\directlua\zerocount{commands.#1}} % saves tokens
-\def\ctxlatecommand #1{\latelua \zerocount{commands.#1}} % saves tokens
-\def\ctxreport #1{\directlua\zerocount{logs.writer[[#1]]}}
+\let\ctxdirectlua \directlua
+\let\ctxlatelua \latelua
+\def\ctxsprint #1{\directlua{tex.sprint(tex.ctxcatcodes,#1)}} % saves tokens
+\def\ctxwrite #1{\directlua{tex.write(#1)}} % saves tokens
+\def\ctxcommand #1{\directlua{commands.#1}} % saves tokens
+\def\ctxdirectcommand#1{\directlua{commands.#1}} % saves tokens
+\def\ctxlatecommand #1{\latelua {commands.#1}} % saves tokens
+\def\ctxreport #1{\directlua{logs.writer[[#1]]}}
%D Take your choice \unknown
@@ -73,7 +67,7 @@
%D Reporting the version of \LUA\ that we use is done as follows:
-\edef\luaversion{\ctxwrite{_VERSION}} % no context luaded yet
+\edef\luaversion{\ctxwrite{_VERSION}}
\def\registerctxluafile#1#2{\ctxlua{lua.registercode("#1","#2")}}
\def\ctxloadluafile #1{\ctxlua{lua.registercode("#1")}}
diff --git a/Master/texmf-dist/tex/context/base/luat-env.lua b/Master/texmf-dist/tex/context/base/luat-env.lua
index 4f1b661c218..8753972c6fe 100644
--- a/Master/texmf-dist/tex/context/base/luat-env.lua
+++ b/Master/texmf-dist/tex/context/base/luat-env.lua
@@ -1,4 +1,4 @@
-if not modules then modules = { } end modules ['luat-env'] = {
+ if not modules then modules = { } end modules ['luat-env'] = {
version = 1.001,
comment = "companion to luat-lib.mkiv",
author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
@@ -6,51 +6,25 @@ if not modules then modules = { } end modules ['luat-env'] = {
license = "see context related readme files"
}
--- A former version provided functionality for non embeded core
--- scripts i.e. runtime library loading. Given the amount of
--- Lua code we use now, this no longer makes sense. Much of this
--- evolved before bytecode arrays were available and so a lot of
+-- A former version provided functionality for non embeded core scripts i.e. runtime
+-- library loading. Given the amount of Lua code we use now, this no longer makes
+-- sense. Much of this evolved before bytecode arrays were available and so a lot of
-- code has disappeared already.
+local rawset, rawget, loadfile, assert = rawset, rawget, loadfile, assert
+
local trace_locating = false trackers.register("resolvers.locating", function(v) trace_locating = v end)
local report_lua = logs.reporter("resolvers","lua")
-local allocate, mark = utilities.storage.allocate, utilities.storage.mark
-
-local format, sub, match, gsub, find = string.format, string.sub, string.match, string.gsub, string.find
-local unquoted, quoted = string.unquoted, string.quoted
-local concat = table.concat
-
--- precautions
-
-os.setlocale(nil,nil) -- useless feature and even dangerous in luatex
+local luautilities = utilities.lua
+local luasuffixes = luautilities.suffixes
-function os.setlocale()
- -- no way you can mess with it
-end
-
--- dirty tricks
-
-if arg and (arg[0] == 'luatex' or arg[0] == 'luatex.exe') and arg[1] == "--luaonly" then
- arg[-1] = arg[0]
- arg[ 0] = arg[2]
- for k=3,#arg do
- arg[k-2] = arg[k]
- end
- arg[#arg] = nil -- last
- arg[#arg] = nil -- pre-last
-end
+environment = environment or { }
+local environment = environment
-- environment
-environment = environment or { }
-local environment = environment
-
-environment.arguments = allocate()
-environment.files = allocate()
-environment.sortedflags = nil
-
local mt = {
__index = function(_,k)
if k == "version" then
@@ -61,6 +35,14 @@ local mt = {
else
return "unknown"
end
+ elseif k == "kind" then
+ local kind = tex.toks and tex.toks.contextkindtoks
+ if kind and kind ~= "" then
+ rawset(environment,"kind",kind)
+ return kind
+ else
+ return "unknown"
+ end
elseif k == "jobname" or k == "formatname" then
local name = tex and tex[k]
if name or name== "" then
@@ -79,162 +61,13 @@ local mt = {
setmetatable(environment,mt)
-function environment.initializearguments(arg)
- local arguments, files = { }, { }
- environment.arguments, environment.files, environment.sortedflags = arguments, files, nil
- for index=1,#arg do
- local argument = arg[index]
- if index > 0 then
- local flag, value = match(argument,"^%-+(.-)=(.-)$")
- if flag then
- arguments[flag] = unquoted(value or "")
- else
- flag = match(argument,"^%-+(.+)")
- if flag then
- arguments[flag] = true
- else
- files[#files+1] = argument
- end
- end
- end
- end
- environment.ownname = environment.ownname or arg[0] or 'unknown.lua'
-end
-
-function environment.setargument(name,value)
- environment.arguments[name] = value
-end
-
--- todo: defaults, better checks e.g on type (boolean versus string)
---
--- tricky: too many hits when we support partials unless we add
--- a registration of arguments so from now on we have 'partial'
-
-function environment.argument(name,partial)
- local arguments, sortedflags = environment.arguments, environment.sortedflags
- if arguments[name] then
- return arguments[name]
- elseif partial then
- if not sortedflags then
- sortedflags = allocate(table.sortedkeys(arguments))
- for k=1,#sortedflags do
- sortedflags[k] = "^" .. sortedflags[k]
- end
- environment.sortedflags = sortedflags
- end
- -- example of potential clash: ^mode ^modefile
- for k=1,#sortedflags do
- local v = sortedflags[k]
- if find(name,v) then
- return arguments[sub(v,2,#v)]
- end
- end
- end
- return nil
-end
-
-function environment.splitarguments(separator) -- rather special, cut-off before separator
- local done, before, after = false, { }, { }
- local originalarguments = environment.originalarguments
- for k=1,#originalarguments do
- local v = originalarguments[k]
- if not done and v == separator then
- done = true
- elseif done then
- after[#after+1] = v
- else
- before[#before+1] = v
- end
- end
- return before, after
-end
-
-function environment.reconstructcommandline(arg,noquote)
- arg = arg or environment.originalarguments
- if noquote and #arg == 1 then
- -- we could just do: return unquoted(resolvers.resolve(arg[i]))
- local a = arg[1]
- a = resolvers.resolve(a)
- a = unquoted(a)
- return a
- elseif #arg > 0 then
- local result = { }
- for i=1,#arg do
- -- we could just do: result[#result+1] = format("%q",unquoted(resolvers.resolve(arg[i])))
- local a = arg[i]
- a = resolvers.resolve(a)
- a = unquoted(a)
- a = gsub(a,'"','\\"') -- tricky
- if find(a," ") then
- result[#result+1] = quoted(a)
- else
- result[#result+1] = a
- end
- end
- return concat(result," ")
- else
- return ""
- end
-end
-
---~ -- to be tested:
---~
---~ function environment.reconstructcommandline(arg,noquote)
---~ arg = arg or environment.originalarguments
---~ if noquote and #arg == 1 then
---~ return unquoted(resolvers.resolve(arg[1]))
---~ elseif #arg > 0 then
---~ local result = { }
---~ for i=1,#arg do
---~ result[#result+1] = format("%q",unquoted(resolvers.resolve(arg[i]))) -- always quote
---~ end
---~ return concat(result," ")
---~ else
---~ return ""
---~ end
---~ end
-
-if arg then
-
- -- new, reconstruct quoted snippets (maybe better just remove the " then and add them later)
- local newarg, instring = { }, false
-
- for index=1,#arg do
- local argument = arg[index]
- if find(argument,"^\"") then
- newarg[#newarg+1] = gsub(argument,"^\"","")
- if not find(argument,"\"$") then
- instring = true
- end
- elseif find(argument,"\"$") then
- newarg[#newarg] = newarg[#newarg] .. " " .. gsub(argument,"\"$","")
- instring = false
- elseif instring then
- newarg[#newarg] = newarg[#newarg] .. " " .. argument
- else
- newarg[#newarg+1] = argument
- end
- end
- for i=1,-5,-1 do
- newarg[i] = arg[i]
- end
-
- environment.initializearguments(newarg)
-
- environment.originalarguments = mark(newarg)
- environment.rawarguments = mark(arg)
-
- arg = { } -- prevent duplicate handling
-
-end
-
-- weird place ... depends on a not yet loaded module
function environment.texfile(filename)
return resolvers.findfile(filename,'tex')
end
-function environment.luafile(filename)
+function environment.luafile(filename) -- needs checking
local resolved = resolvers.findfile(filename,'tex') or ""
if resolved ~= "" then
return resolved
@@ -246,22 +79,36 @@ function environment.luafile(filename)
return resolvers.findfile(filename,'luatexlibs') or ""
end
-environment.loadedluacode = loadfile -- can be overloaded
+-- local function checkstrip(filename)
+-- local modu = modules[file.nameonly(filename)]
+-- return modu and modu.dataonly
+-- end
+
+local stripindeed = false directives.register("system.compile.strip", function(v) stripindeed = v end)
+
+local function strippable(filename)
+ if stripindeed then
+ local modu = modules[file.nameonly(filename)]
+ return modu and modu.dataonly
+ else
+ return false
+ end
+end
function environment.luafilechunk(filename,silent) -- used for loading lua bytecode in the format
filename = file.replacesuffix(filename, "lua")
local fullname = environment.luafile(filename)
if fullname and fullname ~= "" then
- local data = environment.loadedluacode(fullname)
+ local data = luautilities.loadedluacode(fullname,strippable,filename) -- can be overloaded
if trace_locating then
- report_lua("loading file %s%s", fullname, not data and " failed" or "")
+ report_lua("loading file %a %s",fullname,not data and "failed" or "succeeded")
elseif not silent then
texio.write("<",data and "+ " or "- ",fullname,">")
end
return data
else
if trace_locating then
- report_lua("unknown file %s", filename)
+ report_lua("unknown file %a",filename)
end
return nil
end
@@ -273,16 +120,19 @@ function environment.loadluafile(filename, version)
local lucname, luaname, chunk
local basename = file.removesuffix(filename)
if basename == filename then
- lucname, luaname = basename .. ".luc", basename .. ".lua"
+ luaname = file.addsuffix(basename,luasuffixes.lua)
+ lucname = file.addsuffix(basename,luasuffixes.luc)
else
- lucname, luaname = nil, basename -- forced suffix
+ luaname = basename -- forced suffix
+ lucname = nil
end
-- when not overloaded by explicit suffix we look for a luc file first
local fullname = (lucname and environment.luafile(lucname)) or ""
if fullname ~= "" then
if trace_locating then
- report_lua("loading %s", fullname)
+ report_lua("loading %a",fullname)
end
+ -- maybe: package.loaded[file.nameonly(fullname)] = true
chunk = loadfile(fullname) -- this way we don't need a file exists check
end
if chunk then
@@ -299,7 +149,7 @@ function environment.loadluafile(filename, version)
return true
else
if trace_locating then
- report_lua("version mismatch for %s: lua=%s, luc=%s", filename, v, version)
+ report_lua("version mismatch for %a, lua version %a, luc version %a",filename,v,version)
end
environment.loadluafile(filename)
end
@@ -310,12 +160,12 @@ function environment.loadluafile(filename, version)
fullname = (luaname and environment.luafile(luaname)) or ""
if fullname ~= "" then
if trace_locating then
- report_lua("loading %s", fullname)
+ report_lua("loading %a",fullname)
end
chunk = loadfile(fullname) -- this way we don't need a file exists check
if not chunk then
if trace_locating then
- report_lua("unknown file %s", filename)
+ report_lua("unknown file %a",filename)
end
else
assert(chunk)()
diff --git a/Master/texmf-dist/tex/context/base/luat-exe.lua b/Master/texmf-dist/tex/context/base/luat-exe.lua
index 0e9a943131f..a57a5a006fb 100644
--- a/Master/texmf-dist/tex/context/base/luat-exe.lua
+++ b/Master/texmf-dist/tex/context/base/luat-exe.lua
@@ -10,6 +10,7 @@ if not modules then modules = { } end modules ['luat-exe'] = {
local match, find, gmatch = string.match, string.find, string.gmatch
local concat = table.concat
+local select = select
local report_executers = logs.reporter("system","executers")
@@ -29,20 +30,20 @@ local spawn = osspawn
local popen = iopopen
local function register(...)
- local t = { ... }
- for k=1,#t do
- local v = t[k]
- permitted[#permitted+1] = (v == "*" and ".*") or v
+ for k=1,select("#",...) do
+ local v = select(k,...)
+ permitted[#permitted+1] = v == "*" and ".*" or v
end
end
local function prepare(...)
-- todo: make more clever first split
local t = { ... }
+ local n = #n
local one = t[1]
- if #t == 1 then
+ if n == 1 then
if type(one) == 'table' then
- return one, concat(t," ",2,#t)
+ return one, concat(t," ",2,n)
else
local name, arguments = match(one,"^(.-)%s+(.+)$")
if name and arguments then
@@ -52,7 +53,7 @@ local function prepare(...)
end
end
else
- return one, concat(t," ",2,#t)
+ return one, concat(t," ",2,n)
end
end
@@ -63,7 +64,6 @@ local function executer(action)
local v = permitted[k]
if find(name,v) then
return action(name .. " " .. arguments)
- -- print("executed: " .. name .. " " .. arguments)
else
report_executers("not permitted: %s %s",name,arguments)
end
diff --git a/Master/texmf-dist/tex/context/base/luat-fio.lua b/Master/texmf-dist/tex/context/base/luat-fio.lua
index 8e7988c4ee8..d61c6f14275 100644
--- a/Master/texmf-dist/tex/context/base/luat-fio.lua
+++ b/Master/texmf-dist/tex/context/base/luat-fio.lua
@@ -6,9 +6,6 @@ if not modules then modules = { } end modules ['luat-fio'] = {
license = "see context related readme files"
}
-local texiowrite_nl = (texio and texio.write_nl) or print
-local texiowrite = (texio and texio.write) or print
-
local format = string.format
local concat = table.concat
local sequenced = table.sequenced
@@ -43,40 +40,42 @@ if not resolvers.instance then
register('find_read_file' , function(id,name) return findtexfile(name) end, true)
register('open_read_file' , function( name) return opentexfile(name) end, true)
- register('find_data_file' , function(name) return findbinfile(name,"tex") end, true)
- register('find_enc_file' , function(name) return findbinfile(name,"enc") end, true)
- register('find_font_file' , function(name) return findbinfile(name,"tfm") end, true)
- register('find_format_file' , function(name) return findbinfile(name,"fmt") end, true)
- register('find_image_file' , function(name) return findbinfile(name,"tex") end, true)
- register('find_map_file' , function(name) return findbinfile(name,"map") end, true)
- register('find_opentype_file' , function(name) return findbinfile(name,"otf") end, true)
- register('find_output_file' , function(name) return name end, true)
- register('find_pk_file' , function(name) return findbinfile(name,"pk") end, true)
- register('find_sfd_file' , function(name) return findbinfile(name,"sfd") end, true)
- register('find_truetype_file' , function(name) return findbinfile(name,"ttf") end, true)
- register('find_type1_file' , function(name) return findbinfile(name,"pfb") end, true)
- register('find_vf_file' , function(name) return findbinfile(name,"vf") end, true)
-
- register('read_data_file' , function(file) return loadbinfile(file,"tex") end, true)
- register('read_enc_file' , function(file) return loadbinfile(file,"enc") end, true)
- register('read_font_file' , function(file) return loadbinfile(file,"tfm") end, true)
+ register('find_data_file' , function(name) return findbinfile(name,"tex") end, true)
+ register('find_enc_file' , function(name) return findbinfile(name,"enc") end, true)
+ register('find_font_file' , function(name) return findbinfile(name,"tfm") end, true)
+ register('find_format_file' , function(name) return findbinfile(name,"fmt") end, true)
+ register('find_image_file' , function(name) return findbinfile(name,"tex") end, true)
+ register('find_map_file' , function(name) return findbinfile(name,"map") end, true)
+ register('find_opentype_file' , function(name) return findbinfile(name,"otf") end, true)
+ register('find_output_file' , function(name) return name end, true)
+ register('find_pk_file' , function(name) return findbinfile(name,"pk") end, true)
+ register('find_sfd_file' , function(name) return findbinfile(name,"sfd") end, true)
+ register('find_truetype_file' , function(name) return findbinfile(name,"ttf") end, true)
+ register('find_type1_file' , function(name) return findbinfile(name,"pfb") end, true)
+ register('find_vf_file' , function(name) return findbinfile(name,"vf") end, true)
+ register('find_cidmap_file' , function(name) return findbinfile(name,"cidmap") end, true)
+
+ register('read_data_file' , function(file) return loadbinfile(file,"tex") end, true)
+ register('read_enc_file' , function(file) return loadbinfile(file,"enc") end, true)
+ register('read_font_file' , function(file) return loadbinfile(file,"tfm") end, true)
-- format
-- image
- register('read_map_file' , function(file) return loadbinfile(file,"map") end, true)
+ register('read_map_file' , function(file) return loadbinfile(file,"map") end, true)
-- output
- register('read_pk_file' , function(file) return loadbinfile(file,"pk") end, true) -- 600dpi/manfnt.720pk
- register('read_sfd_file' , function(file) return loadbinfile(file,"sfd") end, true)
- register('read_vf_file' , function(file) return loadbinfile(file,"vf" ) end, true)
+ register('read_pk_file' , function(file) return loadbinfile(file,"pk") end, true) -- 600dpi/manfnt.720pk
+ register('read_sfd_file' , function(file) return loadbinfile(file,"sfd") end, true)
+ register('read_vf_file' , function(file) return loadbinfile(file,"vf" ) end, true)
- register('find_font_file' , function(name) return findbinfile(name,"ofm") end, true)
- register('find_vf_file' , function(name) return findbinfile(name,"ovf") end, true)
+ register('find_font_file' , function(name) return findbinfile(name,"ofm") end, true)
+ register('find_vf_file' , function(name) return findbinfile(name,"ovf") end, true)
- register('read_font_file' , function(file) return loadbinfile(file,"ofm") end, true)
- register('read_vf_file' , function(file) return loadbinfile(file,"ovf") end, true)
+ register('read_font_file' , function(file) return loadbinfile(file,"ofm") end, true)
+ register('read_vf_file' , function(file) return loadbinfile(file,"ovf") end, true)
- -- register('read_opentype_file' , function(file) return loadbinfile(file,"otf") end, true)
- -- register('read_truetype_file' , function(file) return loadbinfile(file,"ttf") end, true)
- -- register('read_type1_file' , function(file) return loadbinfile(file,"pfb") end, true)
+ -- register('read_opentype_file' , function(file) return loadbinfile(file,"otf") end, true)
+ -- register('read_truetype_file' , function(file) return loadbinfile(file,"ttf") end, true)
+ -- register('read_type1_file' , function(file) return loadbinfile(file,"pfb") end, true)
+ -- register('read_cidmap_file' , function(file) return loadbinfile(file,"cidmap") end, true)
register('find_write_file' , function(id,name) return name end, true)
register('find_format_file' , function(name) return name end, true)
@@ -96,7 +95,7 @@ luatex.registerstopactions(function()
report_system("start used files")
logs.newline()
for i=1,#foundintrees do
- report_files("%4i: %s",i,sequenced(foundintrees[i]))
+ report_files("%4i: % T",i,foundintrees[i])
end
logs.newline()
report_system("stop used files")
@@ -113,6 +112,6 @@ statistics.register("resource resolver", function()
scandata.time,
scandata.shared,
#resolvers.instance.foundintrees,
- concat(scandata.paths," ")
+ #scandata.paths > 0 and concat(scandata.paths," ") or "<none>"
)
end)
diff --git a/Master/texmf-dist/tex/context/base/luat-fmt.lua b/Master/texmf-dist/tex/context/base/luat-fmt.lua
index cfb9a0bb776..20a4a8fcdf7 100644
--- a/Master/texmf-dist/tex/context/base/luat-fmt.lua
+++ b/Master/texmf-dist/tex/context/base/luat-fmt.lua
@@ -6,36 +6,38 @@ if not modules then modules = { } end modules ['luat-fmt'] = {
license = "see context related readme files"
}
-
local format = string.format
+local concat = table.concat
+local quoted = string.quoted
+local luasuffixes = utilities.lua.suffixes
local report_format = logs.reporter("resolvers","formats")
--- helper for mtxrun
-
-local quoted = string.quoted
-
local function primaryflags() -- not yet ok
local trackers = environment.argument("trackers")
local directives = environment.argument("directives")
- local flags = ""
+ local flags = { }
if trackers and trackers ~= "" then
- flags = flags .. "--trackers=" .. quoted(trackers)
+ flags = { "--trackers=" .. quoted(trackers) }
end
if directives and directives ~= "" then
- flags = flags .. "--directives=" .. quoted(directives)
+ flags = { "--directives=" .. quoted(directives) }
+ end
+ if environment.argument("jit") then
+ flags = { "--jiton" }
end
- return flags
+ return concat(flags," ")
end
function environment.make_format(name)
+ local engine = environment.ownmain or "luatex"
-- change to format path (early as we need expanded paths)
- local olddir = lfs.currentdir()
- local path = caches.getwritablepath("formats") or "" -- maybe platform
+ local olddir = dir.current()
+ local path = caches.getwritablepath("formats",engine) or "" -- maybe platform
if path ~= "" then
lfs.chdir(path)
end
- report_format("format path: %s",lfs.currentdir())
+ report_format("using format path %a",dir.current())
-- check source file
local texsourcename = file.addsuffix(name,"mkiv")
local fulltexsourcename = resolvers.findfile(texsourcename,"tex") or ""
@@ -44,11 +46,11 @@ function environment.make_format(name)
fulltexsourcename = resolvers.findfile(texsourcename,"tex") or ""
end
if fulltexsourcename == "" then
- report_format("no tex source file with name: %s (mkiv or tex)",name)
+ report_format("no tex source file with name %a (mkiv or tex)",name)
lfs.chdir(olddir)
return
else
- report_format("using tex source file: %s",fulltexsourcename)
+ report_format("using tex source file %a",fulltexsourcename)
end
local texsourcepath = dir.expandname(file.dirname(fulltexsourcename)) -- really needed
-- check specification
@@ -59,7 +61,7 @@ function environment.make_format(name)
fullspecificationname = resolvers.findfile(specificationname,"tex") or ""
end
if fullspecificationname == "" then
- report_format("unknown stub specification: %s",specificationname)
+ report_format("unknown stub specification %a",specificationname)
lfs.chdir(olddir)
return
end
@@ -70,39 +72,38 @@ function environment.make_format(name)
if type(usedlualibs) == "string" then
usedluastub = file.join(file.dirname(fullspecificationname),usedlualibs)
elseif type(usedlualibs) == "table" then
- report_format("using stub specification: %s",fullspecificationname)
+ report_format("using stub specification %a",fullspecificationname)
local texbasename = file.basename(name)
- local luastubname = file.addsuffix(texbasename,"lua")
- local lucstubname = file.addsuffix(texbasename,"luc")
+ local luastubname = file.addsuffix(texbasename,luasuffixes.lua)
+ local lucstubname = file.addsuffix(texbasename,luasuffixes.luc)
-- pack libraries in stub
- report_format("creating initialization file: %s",luastubname)
+ report_format("creating initialization file %a",luastubname)
utilities.merger.selfcreate(usedlualibs,specificationpath,luastubname)
-- compile stub file (does not save that much as we don't use this stub at startup any more)
- local strip = resolvers.booleanvariable("LUACSTRIP", true)
if utilities.lua.compile(luastubname,lucstubname) and lfs.isfile(lucstubname) then
- report_format("using compiled initialization file: %s",lucstubname)
+ report_format("using compiled initialization file %a",lucstubname)
usedluastub = lucstubname
else
- report_format("using uncompiled initialization file: %s",luastubname)
+ report_format("using uncompiled initialization file %a",luastubname)
usedluastub = luastubname
end
else
- report_format("invalid stub specification: %s",fullspecificationname)
+ report_format("invalid stub specification %a",fullspecificationname)
lfs.chdir(olddir)
return
end
-- generate format
- local command = format("luatex --ini %s --lua=%s %s %sdump",primaryflags(),quoted(usedluastub),quoted(fulltexsourcename),os.platform == "unix" and "\\\\" or "\\")
+ local command = format("%s --ini %s --lua=%s %s %sdump",engine,primaryflags(),quoted(usedluastub),quoted(fulltexsourcename),os.platform == "unix" and "\\\\" or "\\")
report_format("running command: %s\n",command)
os.spawn(command)
-- remove related mem files
local pattern = file.removesuffix(file.basename(usedluastub)).."-*.mem"
- -- report_format("removing related mplib format with pattern '%s'", pattern)
+ -- report_format("removing related mplib format with pattern %a", pattern)
local mp = dir.glob(pattern)
if mp then
for i=1,#mp do
local name = mp[i]
- report_format("removing related mplib format %s", file.basename(name))
+ report_format("removing related mplib format %a", file.basename(name))
os.remove(name)
end
end
@@ -110,16 +111,16 @@ function environment.make_format(name)
end
function environment.run_format(name,data,more)
- -- hm, rather old code here; we can now use the file.whatever functions
if name and name ~= "" then
+ local engine = environment.ownmain or "luatex"
local barename = file.removesuffix(name)
- local fmtname = caches.getfirstreadablefile(file.addsuffix(barename,"fmt"),"formats")
+ local fmtname = caches.getfirstreadablefile(file.addsuffix(barename,"fmt"),"formats",engine)
if fmtname == "" then
fmtname = resolvers.findfile(file.addsuffix(barename,"fmt")) or ""
end
fmtname = resolvers.cleanpath(fmtname)
if fmtname == "" then
- report_format("no format with name: %s",name)
+ report_format("no format with name %a",name)
else
local barename = file.removesuffix(name) -- expanded name
local luaname = file.addsuffix(barename,"luc")
@@ -127,10 +128,10 @@ function environment.run_format(name,data,more)
luaname = file.addsuffix(barename,"lua")
end
if not lfs.isfile(luaname) then
- report_format("using format name: %s",fmtname)
- report_format("no luc/lua with name: %s",barename)
+ report_format("using format name %a",fmtname)
+ report_format("no luc/lua file with name %a",barename)
else
- local command = format("luatex %s --fmt=%s --lua=%s %s %s",primaryflags(),quoted(barename),quoted(luaname),quoted(data),more ~= "" and quoted(more) or "")
+ local command = format("%s %s --fmt=%s --lua=%s %s %s",engine,primaryflags(),quoted(barename),quoted(luaname),quoted(data),more ~= "" and quoted(more) or "")
report_format("running command: %s",command)
os.spawn(command)
end
diff --git a/Master/texmf-dist/tex/context/base/luat-ini.lua b/Master/texmf-dist/tex/context/base/luat-ini.lua
index 204cc7bd175..587214b93a1 100644
--- a/Master/texmf-dist/tex/context/base/luat-ini.lua
+++ b/Master/texmf-dist/tex/context/base/luat-ini.lua
@@ -6,16 +6,16 @@ if not modules then modules = { } end modules ['luat-ini'] = {
license = "see context related readme files"
}
--- rather experimental down here ... will change with lua 5.2 --
+-- rather experimental down here ... adapted to lua 5.2 ... but still
+-- experimental
-local debug = require "debug"
-local string, table, lpeg, math, io, system = string, table, lpeg, math, io, system
-local next, setfenv = next, setfenv or debug.setfenv
+local debug = require("debug")
-local mark = utilities.storage.mark
+local string, table, lpeg, math, io, system = string, table, lpeg, math, io, system
+local rawset, rawget, next, setmetatable = rawset, rawget, next, setmetatable
--[[ldx--
-<p>We cannot load anything yet. However what we will do us reserve a fewtables.
+<p>We cannot load anything yet. However what we will do us reserve a few tables.
These can be used for runtime user data or third party modules and will not be
cluttered by macro package code.</p>
--ldx]]--
@@ -26,14 +26,8 @@ moduledata = moduledata or { } -- only for development team
documentdata = documentdata or { } -- for users (e.g. raw data)
parametersets = parametersets or { } -- experimental for team
-document = document or { } -- only for context itself
-
---[[ldx--
-<p>These can be used/set by the caller program; <t>mtx-context.lua</t> does it.</p>
---ldx]]--
-
-document.arguments = mark(document.arguments or { })
-document.files = mark(document.files or { })
+table.setmetatableindex(moduledata,table.autokey)
+table.setmetatableindex(thirddata, table.autokey)
--[[ldx--
<p>Please create a namespace within these tables before using them!</p>
@@ -45,145 +39,168 @@ thirddata['tricks' ] = { }
--ldx]]--
--[[ldx--
-<p>We could cook up a readonly model for global tables but it
-makes more sense to invite users to use one of the predefined
-namespaces. One can redefine the protector. After all, it's
-just a lightweight suggestive system, not a watertight
-one.</p>
+<p>We could cook up a readonly model for global tables but it makes more sense
+to invite users to use one of the predefined namespaces. One can redefine the
+protector. After all, it's just a lightweight suggestive system, not a
+watertight one.</p>
--ldx]]--
--- this will change when we move on to lua 5.2+
-
-local global = _G
-
+local global = _G
global.global = global
---~ rawset(global,"global",global)
local dummy = function() end
--- another approach is to freeze tables by using a metatable, this will be
--- implemented stepwise
-
-local protected = {
- -- global table
- global = global,
- -- user tables
- -- moduledata = moduledata,
- userdata = userdata,
- thirddata = thirddata,
- documentdata = documentdata,
- -- reserved
- protect = dummy,
- unprotect = dummy,
- -- luatex
- tex = tex,
- -- lua
- string = string,
- table = table,
- lpeg = lpeg,
- math = math,
- io = io,
- --
- -- maybe other l-*, xml etc
-}
+--[[ldx--
+<p>Another approach is to freeze tables by using a metatable, this will be
+implemented stepwise.</p>
+--ldx]]--
-- moduledata : no need for protection (only for developers)
-- isolatedata : full protection
-- userdata : protected
-- thirddata : protected
-userdata, thirddata = nil, nil
-
--- we could have a metatable that automaticaly creates a top level namespace
-
-if not setfenv then
- texio.write_nl("warning: we need to fix setfenv by using 'load in' or '_ENV'")
-end
-
-local function protect_full(name)
- local t = { }
- for k, v in next, protected do
- t[k] = v
- end
- return t
-end
+--[[ldx--
+<p>We could have a metatable that automaticaly creates a top level namespace.</p>
+--ldx]]--
-local function protect_part(name)
---~ local t = global[name]
- local t = rawget(global,name)
- if not t then
- t = { }
+local luanames = lua.name -- luatex itself
+
+lua.numbers = lua.numbers or { } local numbers = lua.numbers
+lua.messages = lua.messages or { } local messages = lua.messages
+
+storage.register("lua/numbers", numbers, "lua.numbers" )
+storage.register("lua/messages", messages, "lua.messages")
+
+local setfenv = setfenv or debug.setfenv -- < 5.2
+
+if setfenv then
+
+ local protected = {
+ -- global table
+ global = global,
+ -- user tables
+ -- moduledata = moduledata,
+ userdata = userdata,
+ thirddata = thirddata,
+ documentdata = documentdata,
+ -- reserved
+ protect = dummy,
+ unprotect = dummy,
+ -- luatex
+ tex = tex,
+ -- lua
+ string = string,
+ table = table,
+ lpeg = lpeg,
+ math = math,
+ io = io,
+ file = file,
+ bit32 = bit32,
+ --
+ context = context,
+ }
+
+ local protect_full = function(name)
+ local t = { }
for k, v in next, protected do
t[k] = v
end
---~ global[name] = t
- rawset(global,name,t)
+ return t
end
- return t
-end
-function protect(name)
- if name == "isolateddata" then
- setfenv(2,protect_full(name))
- else
- setfenv(2,protect_part(name or "shareddata"))
+ local protect_part = function(name) -- adds
+ local t = rawget(global,name)
+ if not t then
+ t = { }
+ for k, v in next, protected do
+ t[k] = v
+ end
+ rawset(global,name,t)
+ end
+ return t
end
-end
-
-lua.numbers = { }
-lua.messages = { }
-function lua.registername(name,message)
- local lnn = lua.numbers[name]
- if not lnn then
- lnn = #lua.messages + 1
- lua.messages[lnn] = message
- lua.numbers[name] = lnn
- end
- lua.name[lnn] = message
- context(lnn)
- -- initialize once
- if name ~= "isolateddata" then
- protect_full(name or "shareddata")
+ protect = function(name)
+ if name == "isolateddata" then
+ setfenv(2,protect_full(name))
+ else
+ setfenv(2,protect_part(name or "shareddata"))
+ end
end
-end
---~ function lua.checknames()
---~ lua.name[0] = "ctx"
---~ for k, v in next, lua.messages do
---~ lua.name[k] = v
---~ end
---~ end
+ function lua.registername(name,message)
+ local lnn = lua.numbers[name]
+ if not lnn then
+ lnn = #messages + 1
+ messages[lnn] = message
+ numbers[name] = lnn
+ end
+ luanames[lnn] = message
+ context(lnn)
+ -- initialize once
+ if name ~= "isolateddata" then
+ protect_full(name or "shareddata")
+ end
+ end
-storage.register("lua/numbers", lua.numbers, "lua.numbers")
-storage.register("lua/messages", lua.messages, "lua.messages")
+elseif libraries then -- assume >= 5.2
+
+ local shared
+
+ protect = function(name)
+ if not shared then
+ -- e.g. context is not yet known
+ local public = {
+ global = global,
+ -- moduledata = moduledata,
+ userdata = userdata,
+ thirddata = thirddata,
+ documentdata = documentdata,
+ protect = dummy,
+ unprotect = dummy,
+ context = context,
+ }
+ --
+ for k, v in next, libraries.builtin do public[k] = v end
+ for k, v in next, libraries.functions do public[k] = v end
+ for k, v in next, libraries.obsolete do public[k] = nil end
+ --
+ shared = { __index = public }
+ protect = function(name)
+ local t = global[name] or { }
+ setmetatable(t,shared) -- set each time
+ return t
+ end
+ end
+ return protect(name)
+ end
---~ local arguments, files = document.arguments, document.files -- set later
+ function lua.registername(name,message)
+ local lnn = lua.numbers[name]
+ if not lnn then
+ lnn = #messages + 1
+ messages[lnn] = message
+ numbers[name] = lnn
+ end
+ luanames[lnn] = message
+ context(lnn)
+ end
-function document.setargument(key,value)
- document.arguments[key] = value
-end
+else
-function document.setdefaultargument(key,default)
- local v = document.arguments[key]
- if v == nil or v == "" then
- document.arguments[key] = default
- end
-end
+ protect = dummy
-function document.getargument(key,default)
- local v = document.arguments[key]
- if type(v) == "boolean" then
- v = (v and "yes") or "no"
- document.arguments[key] = v
+ function lua.registername(name,message)
+ local lnn = lua.numbers[name]
+ if not lnn then
+ lnn = #messages + 1
+ messages[lnn] = message
+ numbers[name] = lnn
+ end
+ luanames[lnn] = message
+ context(lnn)
end
- context(v or default or "")
-end
-function document.setfilename(i,name)
- document.files[tonumber(i)] = name
end
-function document.getfilename(i)
- context(document.files[i] or "")
-end
diff --git a/Master/texmf-dist/tex/context/base/luat-ini.mkiv b/Master/texmf-dist/tex/context/base/luat-ini.mkiv
index 46b2d1f7ffd..a3a5903119c 100644
--- a/Master/texmf-dist/tex/context/base/luat-ini.mkiv
+++ b/Master/texmf-dist/tex/context/base/luat-ini.mkiv
@@ -27,29 +27,29 @@
%D A few more goodies:
-\unexpanded\def\startlua {\luat_start_lua } \let\stoplua \relax % tex catcodes
-\unexpanded\def\startluacode{\luat_start_lua_code} \let\stopluacode\relax % lua catcodes
+\let\stoplua \relax % tex catcodes
+\let\stopluacode\relax % lua catcodes
% It might makes sense to have a \type {\directelua} so that we can avoid
% the \type {\normalexpanded} around \type {\directlua}. Something to discuss
% in the team.
-\def\luat_start_lua
+\unexpanded\def\startlua % \stoplua
{\begingroup
\obeylualines
\luat_start_lua_indeed}
\def\luat_start_lua_indeed#1\stoplua
- {\normalexpanded{\endgroup\noexpand\directlua\zerocount{#1}}}
+ {\normalexpanded{\endgroup\noexpand\directlua{#1}}} % \zerocount is default
-\def\luat_start_lua_code
+\unexpanded\def\startluacode % \stopluacode
{\begingroup
\obeylualines
\obeyluatokens
\luat_start_lua_code_indeed}
\def\luat_start_lua_code_indeed#1\stopluacode
- {\normalexpanded{\endgroup\noexpand\directlua\zerocount{#1}}}
+ {\normalexpanded{\endgroup\noexpand\directlua{#1}}} % \zerocount is default
%D Some delayed definitions:
@@ -62,42 +62,46 @@
\newtoks\everyluacode
-\edef\lualetterbackslash{\string\\}
-\edef\lualetterbar {\string\|} \edef\lualetterdash {\string\-}
-\edef\lualetterlparent {\string\(} \edef\lualetterrparent {\string\)}
-\edef\lualetterlbrace {\string\{} \edef\lualetterrbrace {\string\}}
-\edef\lualettersquote {\string\'} \edef\lualetterdquote {\string\"}
-\edef\lualettern {\string\n} \edef\lualetterr {\string\r}
-\edef\lualetterf {\string\f} \edef\lualettert {\string\t}
-\edef\lualettera {\string\a} \edef\lualetterb {\string\b}
-\edef\lualetterv {\string\v} \edef\lualetters {\string\s}
-\edef\lualetterone {\string\1} \edef\lualettertwo {\string\2}
-\edef\lualetterthree {\string\3} \edef\lualetterfour {\string\4}
-\edef\lualetterfive {\string\5} \edef\lualettersix {\string\6}
-\edef\lualetterseven {\string\7} \edef\lualettereight {\string\8}
-\edef\lualetternine {\string\9} \edef\lualetterzero {\string\0}
+\edef\lua_letter_backslash{\string\\}
+\edef\lua_letter_bar {\string\|} \edef\lua_letter_dash {\string\-}
+\edef\lua_letter_lparent {\string\(} \edef\lua_letter_rparent {\string\)}
+\edef\lua_letter_lbrace {\string\{} \edef\lua_letter_rbrace {\string\}}
+\edef\lua_letter_squote {\string\'} \edef\lua_letter_dquote {\string\"}
+\edef\lua_letter_n {\string\n} \edef\lua_letter_r {\string\r}
+\edef\lua_letter_f {\string\f} \edef\lua_letter_t {\string\t}
+\edef\lua_letter_a {\string\a} \edef\lua_letter_b {\string\b}
+\edef\lua_letter_v {\string\v} \edef\lua_letter_s {\string\s}
+\edef\lua_letter_one {\string\1} \edef\lua_letter_two {\string\2}
+\edef\lua_letter_three {\string\3} \edef\lua_letter_four {\string\4}
+\edef\lua_letter_five {\string\5} \edef\lua_letter_six {\string\6}
+\edef\lua_letter_seven {\string\7} \edef\lua_letter_eight {\string\8}
+\edef\lua_letter_nine {\string\9} \edef\lua_letter_zero {\string\0}
\appendtoks
- \let\\\lualetterbackslash
- \let\|\lualetterbar \let\-\lualetterdash
- \let\(\lualetterlparent \let\)\lualetterrparent
- \let\{\lualetterlbrace \let\}\lualetterrbrace
- \let\'\lualettersquote \let\"\lualetterdquote
- \let\n\lualettern \let\r\lualetterr
- \let\f\lualetterf \let\t\lualettert
- \let\a\lualettera \let\b\lualetterb
- \let\v\lualetterv \let\s\lualetters
- \let\1\lualetterone \let\2\lualettertwo
- \let\3\lualetterthree \let\4\lualetterfour
- \let\5\lualetterfive \let\6\lualettersix
- \let\7\lualetterseven \let\8\lualettereight
- \let\9\lualetternine \let\0\lualetterzero
+ \let\\\lua_letter_backslash
+ \let\|\lua_letter_bar \let\-\lua_letter_dash
+ \let\(\lua_letter_lparent \let\)\lua_letter_rparent
+ \let\{\lua_letter_lbrace \let\}\lua_letter_rbrace
+ \let\'\lua_letter_squote \let\"\lua_letter_dquote
+ \let\n\lua_letter_n \let\r\lua_letter_r
+ \let\f\lua_letter_f \let\t\lua_letter_t
+ \let\a\lua_letter_a \let\b\lua_letter_b
+ \let\v\lua_letter_v \let\s\lua_letter_s
+ \let\1\lua_letter_one \let\2\lua_letter_two
+ \let\3\lua_letter_three \let\4\lua_letter_four
+ \let\5\lua_letter_five \let\6\lua_letter_six
+ \let\7\lua_letter_seven \let\8\lua_letter_eight
+ \let\9\lua_letter_nine \let\0\lua_letter_zero
\to \everyluacode
-\def\obeyluatokens
+\unexpanded\def\obeyluatokens
{\setcatcodetable\luacatcodes
\the\everyluacode}
+
+\edef\luamajorversion{\ctxwrite{_MINORVERSION}}
+\edef\luaminorversion{\ctxwrite{_MAJORVERSION}}
+
%D \macros
%D {definenamedlua}
%D
@@ -108,19 +112,22 @@
%D Beware: because \type {\expanded} is een convert command, the error
%D message will show \type{<inserted text>} as part of the message.
-\def\luat_start_named_lua_code#1%
+\installcorenamespace{luacode}
+
+\unexpanded\def\luat_start_named_lua_code#1%
{\begingroup
\obeylualines
\obeyluatokens
- \csname\??lu:c:#1\endcsname}
+ \csname\??luacode#1\endcsname}
-\unexpanded\def\definenamedlua[#1]#2[#3]% no optional arg handling here yet
- {\ifcsname\??lu:c:#1\endcsname \else
+\unexpanded\def\definenamedlua[#1]#2[#3]% no optional arg handling here yet / we could use numbers instead (more efficient)
+ {\ifcsname\??luacode#1\endcsname \else
\scratchcounter\ctxlua{lua.registername("#1","#3")}%
- \normalexpanded{\edef\csname\??lu:c:#1\endcsname##1\csname\e!stop#1\v!code\endcsname}%
- {\endgroup\noexpand\directlua\the\scratchcounter{protect("#1\s!data")##1}}%
- \expandafter\def \csname\e!start#1\v!code\endcsname {\luat_start_named_lua_code{#1}}%
- \expandafter\edef\csname #1\v!code\endcsname##1{\noexpand\directlua\the\scratchcounter{protect("#1\s!data")##1}}%
+ \normalexpanded{\xdef\csname\??luacode#1\endcsname##1\csname\e!stop#1\v!code\endcsname}%
+ %{\endgroup\noexpand\directlua\the\scratchcounter{local _ENV=protect("#1\s!data")##1}}%
+ {\noexpand\normalexpanded{\endgroup\noexpand\directlua\the\scratchcounter{local _ENV=protect("#1\s!data")##1}}}%
+ \expandafter\edef\csname\e!start#1\v!code\endcsname {\luat_start_named_lua_code{#1}}%
+ \expandafter\edef\csname #1\v!code\endcsname##1{\noexpand\directlua\the\scratchcounter{local _ENV=protect("#1\s!data")##1}}%
\fi}
%D We predefine a few.
@@ -140,12 +147,25 @@
%D
%D \startusercode
%D global.context("USER 1")
+%D context.par()
%D context("USER 2")
+%D context.par()
%D if characters then
-%D context("ACCESS")
+%D context("ACCESS directly")
+%D elseif global.characters then
+%D context("ACCESS via global")
+%D else
+%D context("NO ACCESS at all")
+%D end
+%D context.par()
+%D if bogus then
+%D context("ACCESS directly")
+%D elseif global.bogus then
+%D context("ACCESS via global")
%D else
-%D context("NO ACCESS")
+%D context("NO ACCESS at all")
%D end
+%D context.par()
%D \stopusercode
%D \stopbuffer
%D
@@ -244,6 +264,6 @@
% use \setcatcodetable\luacatcodes instead of \obeyluatokens now.
\def\luat_lua_code
- {\normalexpanded{\endgroup\noexpand\directlua\zerocount\expandafter{\the\scratchtoks}}}
+ {\normalexpanded{\endgroup\noexpand\directlua\expandafter{\the\scratchtoks}}} % \zerocount is default
\protect \endinput
diff --git a/Master/texmf-dist/tex/context/base/luat-iop.lua b/Master/texmf-dist/tex/context/base/luat-iop.lua
index 5512b258edb..52f14683e76 100644
--- a/Master/texmf-dist/tex/context/base/luat-iop.lua
+++ b/Master/texmf-dist/tex/context/base/luat-iop.lua
@@ -47,7 +47,7 @@ local function protect(ruleset,proc)
elseif match(ruleset,name) then
return proc(name,...)
else
- report_limiter("no access permitted: %s",name)
+ report_limiter("no access permitted for %a",name)
return nil, name .. ": no access permitted"
end
end
@@ -126,7 +126,7 @@ local function i_register(v)
local protect = i_limiter.protect
i_opener = protect(i_opener)
i_limited = true
- report_limiter("input mode: %s",v)
+ report_limiter("input mode set to %a",v)
end
end
end
@@ -138,7 +138,7 @@ local function o_register(v)
local protect = o_limiter.protect
o_opener = protect(o_opener)
o_limited = true
- report_limiter("output mode: %s",v)
+ report_limiter("output mode set to %a",v)
end
end
end
diff --git a/Master/texmf-dist/tex/context/base/luat-lib.mkiv b/Master/texmf-dist/tex/context/base/luat-lib.mkiv
index 5a53b7ea231..3f72e780e8b 100644
--- a/Master/texmf-dist/tex/context/base/luat-lib.mkiv
+++ b/Master/texmf-dist/tex/context/base/luat-lib.mkiv
@@ -15,20 +15,26 @@
\registerctxluafile{util-str}{1.001}
\registerctxluafile{util-tab}{1.001}
-\registerctxluafile{util-pck}{1.001}
\registerctxluafile{util-sto}{1.001} % could also be done in trac-deb.mkiv
+\registerctxluafile{util-pck}{1.001}
\registerctxluafile{util-seq}{1.001}
%registerctxluafile{util-mrg}{1.001} % not needed in context itself, only mtxrun
-\registerctxluafile{util-lua}{1.001}
+%registerctxluafile{util-lua}{1.001} % moved
\registerctxluafile{util-prs}{1.001}
\registerctxluafile{util-fmt}{1.001}
-\registerctxluafile{util-deb}{1.001} % could also be done in trac-deb.mkiv
\registerctxluafile{util-dim}{1.001}
-\registerctxluafile{trac-inf}{1.001}
+%registerctxluafile{trac-inf}{1.001}
\registerctxluafile{trac-set}{1.001}
\registerctxluafile{trac-log}{1.001}
+\registerctxluafile{trac-inf}{1.001}
%registerctxluafile{trac-pro}{1.001}
+\registerctxluafile{util-lua}{1.001}
+\registerctxluafile{util-deb}{1.001} % could also be done in trac-deb.mkiv
+
+\registerctxluafile{util-tpl}{1.001} % needs tracker
+
+\registerctxluafile{util-sta}{1.001}
\registerctxluafile{data-ini}{1.001}
\registerctxluafile{data-exp}{1.001}
@@ -54,6 +60,8 @@
\registerctxluafile{data-use}{1.001}
\registerctxluafile{data-aux}{1.001}
+\registerctxluafile{util-lib}{1.001}
+
\registerctxluafile{luat-cbk}{1.001}
\registerctxluafile{luat-run}{1.001}
\registerctxluafile{luat-fio}{1.001}
@@ -61,11 +69,14 @@
\registerctxluafile{luat-lua}{1.001}
\registerctxluafile{luat-sto}{1.001}
\registerctxluafile{luat-ini}{1.001}
+\registerctxluafile{util-env}{1.001}
\registerctxluafile{luat-env}{1.001}
\registerctxluafile{luat-exe}{1.001}
\registerctxluafile{luat-iop}{1.001}
\registerctxluafile{luat-bwc}{1.001}
+\registerctxluafile{trac-lmx}{1.001} % might become l-lmx or luat-lmx
\registerctxluafile{luat-mac}{1.001}
+%registerctxluafile{luat-prp}{1.001} % for the moment of not much use
\registerctxluafile{lxml-tab}{1.001}
\registerctxluafile{lxml-lpt}{1.001}
diff --git a/Master/texmf-dist/tex/context/base/luat-lua.lua b/Master/texmf-dist/tex/context/base/luat-lua.lua
index d319508f004..972004e887d 100644
--- a/Master/texmf-dist/tex/context/base/luat-lua.lua
+++ b/Master/texmf-dist/tex/context/base/luat-lua.lua
@@ -6,8 +6,6 @@ if not modules then modules = { } end modules ['luat-lua'] = {
license = "see context related readme files"
}
-local concat = table.concat
-
if lua then do
local delayed = { }
@@ -25,23 +23,23 @@ if lua then do
end
function lua.flush(...)
- context.directlua("lua.flushdelayed(%s)",concat({...},','))
+ context.directlua("lua.flushdelayed(%,t)",{...})
end
end end
---~ See mk.pdf for an explanation of the following code:
---~
---~ function test(n)
---~ lua.delay(function(...)
---~ context("pi: %s %s %s",...)
---~ context.par()
---~ end)
---~ lua.delay(function(...)
---~ context("more pi: %s %s %s",...)
---~ context.par()
---~ end)
---~ context("\\setbox0=\\hbox{%s}",math.pi*n)
---~ local box = tex.box[0]
---~ lua.flush(box.width,box.height,box.depth)
---~ end
+-- See mk.pdf for an explanation of the following code:
+--
+-- function test(n)
+-- lua.delay(function(...)
+-- context("pi: %s %s %s",...)
+-- context.par()
+-- end)
+-- lua.delay(function(...)
+-- context("more pi: %s %s %s",...)
+-- context.par()
+-- end)
+-- context("\\setbox0=\\hbox{%s}",math.pi*n)
+-- local box = tex.box[0]
+-- lua.flush(box.width,box.height,box.depth)
+-- end
diff --git a/Master/texmf-dist/tex/context/base/luat-mac.lua b/Master/texmf-dist/tex/context/base/luat-mac.lua
index f8f87a25ae7..6a94012277b 100644
--- a/Master/texmf-dist/tex/context/base/luat-mac.lua
+++ b/Master/texmf-dist/tex/context/base/luat-mac.lua
@@ -20,6 +20,8 @@ local lpegmatch, patterns = lpeg.match, lpeg.patterns
local insert, remove = table.insert, table.remove
local rep, sub = string.rep, string.sub
local setmetatable = setmetatable
+local filesuffix = file.suffix
+local convertlmxstring = lmx and lmx.convertstring
local pushtarget, poptarget = logs.pushtarget, logs.poptarget
@@ -126,6 +128,8 @@ local function matcherror(str,pos)
report_macros("runaway definition at: %s",sub(str,pos-30,pos))
end
+local csname_endcsname = P("\\csname") * (identifier + (1 - P("\\endcsname")))^1
+
local grammar = { "converter",
texcode = pushlocal
* startcode
@@ -144,7 +148,8 @@ local grammar = { "converter",
definition = pushlocal
* definer
* escapedname
- * (declaration + furthercomment + commentline + (1-leftbrace))^0
+-- * (declaration + furthercomment + commentline + (1-leftbrace))^0
+ * (declaration + furthercomment + commentline + csname_endcsname + (1-leftbrace))^0
* V("braced")
* poplocal,
setcode = pushlocal
@@ -199,18 +204,96 @@ function macros.version(data)
return lpegmatch(checker,data)
end
+-- function macros.processmkvi(str,filename)
+-- if filename and filesuffix(filename) == "mkvi" or lpegmatch(checker,str) == "mkvi" then
+-- local oldsize = #str
+-- str = lpegmatch(parser,str,1,true) or str
+-- pushtarget("log")
+-- report_macros("processed mkvi file %a, delta %s",filename,oldsize-#str)
+-- poptarget("log")
+-- end
+-- return str
+-- end
+--
+-- utilities.sequencers.appendaction(resolvers.openers.helpers.textfileactions,"system","resolvers.macros.processmkvi")
+
+-- the document variables hack is temporary
+
+local processors = { }
+
+function processors.mkvi(str,filename)
+ local oldsize = #str
+ str = lpegmatch(parser,str,1,true) or str
+ pushtarget("log")
+ report_macros("processed mkvi file %a, delta %s",filename,oldsize-#str)
+ poptarget("log")
+ return str
+end
+
+function processors.mkix(str,filename) -- we could intercept earlier so that caching works better
+ if not document then -- because now we hash the string as well as the
+ document = { }
+ end
+ if not document.variables then
+ document.variables = { }
+ end
+ local oldsize = #str
+ str = convertlmxstring(str,document.variables,false) or str
+ pushtarget("log")
+ report_macros("processed mkix file %a, delta %s",filename,oldsize-#str)
+ poptarget("log")
+ return str
+end
+
+function processors.mkxi(str,filename)
+ if not document then
+ document = { }
+ end
+ if not document.variables then
+ document.variables = { }
+ end
+ local oldsize = #str
+ str = convertlmxstring(str,document.variables,false) or str
+ str = lpegmatch(parser,str,1,true) or str
+ pushtarget("log")
+ report_macros("processed mkxi file %a, delta %s",filename,oldsize-#str)
+ poptarget("log")
+ return str
+end
+
+function macros.processmk(str,filename)
+ if filename then
+ local suffix = filesuffix(filename)
+ local processor = processors[suffix] or processors[lpegmatch(checker,str)]
+ if processor then
+ str = processor(str,filename)
+ end
+ end
+ return str
+end
+
function macros.processmkvi(str,filename)
- if (filename and file.suffix(filename) == "mkvi") or lpegmatch(checker,str) == "mkvi" then
- local result = lpegmatch(parser,str,1,true) or str
+ if filename and filesuffix(filename) == "mkvi" or lpegmatch(checker,str) == "mkvi" then
+ local oldsize = #str
+ str = lpegmatch(parser,str,1,true) or str
pushtarget("log")
- report_macros("processed file '%s', delta %s",filename,#str-#result)
+ report_macros("processed mkvi file %a, delta %s",filename,oldsize-#str)
poptarget("log")
- return result
- else
- return str
end
+ return str
+end
+
+local sequencers = utilities.sequencers
+
+if sequencers then
+
+ sequencers.appendaction(resolvers.openers.helpers.textfileactions,"system","resolvers.macros.processmk")
+ sequencers.appendaction(resolvers.openers.helpers.textfileactions,"system","resolvers.macros.processmkvi")
+
end
+-- bonus
+
if resolvers.schemes then
local function handler(protocol,name,cachename)
@@ -218,13 +301,13 @@ if resolvers.schemes then
local path = hashed.path
if path and path ~= "" then
local str = resolvers.loadtexfile(path)
- if file.suffix(path) == "mkvi" or lpegmatch(checker,str) == "mkvi" then
+ if filesuffix(path) == "mkvi" or lpegmatch(checker,str) == "mkvi" then
-- already done automatically
io.savedata(cachename,str)
else
local result = lpegmatch(parser,str,1,true) or str
pushtarget("log")
- report_macros("processed scheme '%s', delta %s",filename,#str-#result)
+ report_macros("processed scheme %a, delta %s",filename,#str-#result)
poptarget("log")
io.savedata(cachename,result)
end
@@ -234,9 +317,6 @@ if resolvers.schemes then
resolvers.schemes.install('mkvi',handler,1) -- this will cache !
- utilities.sequencers.appendaction(resolvers.openers.helpers.textfileactions,"system","resolvers.macros.processmkvi")
- -- utilities.sequencers.disableaction(resolvers.openers.helpers.textfileactions,"resolvers.macros.processmkvi")
-
end
-- print(macros.preprocessed(
@@ -294,6 +374,22 @@ end
-- }
-- ]]))
--
+-- print(macros.preprocessed([[
+-- \unexpanded\def\start#tag#stoptag%
+-- {\initialize{#tag}%
+-- \normalexpanded
+-- {\def\yes[#one]#two\csname\e!stop#stoptag\endcsname{\command_yes[#one]{#two}}%
+-- \def\nop #one\csname\e!stop#stoptag\endcsname{\command_nop {#one}}}%
+-- \doifnextoptionalelse\yes\nop}
+-- ]]))
+--
+-- print(macros.preprocessed([[
+-- \normalexpanded{\long\def\expandafter\noexpand\csname\e!start\v!interactionmenu\endcsname[#tag]#content\expandafter\noexpand\csname\e!stop\v!interactionmenu\endcsname}%
+-- {\def\currentinteractionmenu{#tag}%
+-- \expandafter\settrue\csname\??menustate\interactionmenuparameter\c!category\endcsname
+-- \setinteractionmenuparameter\c!menu{#content}}
+-- ]]))
+--
-- Just an experiment:
--
-- \catcode\numexpr"10FF25=\commentcatcode %% > 110000 is invalid
diff --git a/Master/texmf-dist/tex/context/base/luat-run.lua b/Master/texmf-dist/tex/context/base/luat-run.lua
index ce25d1f55a3..eaede103014 100644
--- a/Master/texmf-dist/tex/context/base/luat-run.lua
+++ b/Master/texmf-dist/tex/context/base/luat-run.lua
@@ -9,10 +9,15 @@ if not modules then modules = { } end modules ['luat-run'] = {
local format = string.format
local insert = table.insert
+-- trace_job_status is also controlled by statistics.enable that is set via the directive system.nostatistics
+
local trace_lua_dump = false trackers.register("system.dump", function(v) trace_lua_dump = v end)
local trace_temp_files = false trackers.register("system.tempfiles", function(v) trace_temp_files = v end)
+local trace_job_status = true trackers.register("system.jobstatus", function(v) trace_job_status = v end)
+local trace_tex_status = false trackers.register("system.texstatus", function(v) trace_tex_status = v end)
local report_lua = logs.reporter("system","lua")
+local report_tex = logs.reporter("system","status")
local report_tempfiles = logs.reporter("resolvers","tempfiles")
luatex = luatex or { }
@@ -24,9 +29,6 @@ local stopactions = { }
function luatex.registerstartactions(...) insert(startactions, ...) end
function luatex.registerstopactions (...) insert(stopactions, ...) end
-luatex.showtexstat = luatex.showtexstat or function() end
-luatex.showjobstat = luatex.showjobstat or statistics.showjobstat
-
local function start_run()
if logs.start_run then
logs.start_run()
@@ -40,12 +42,12 @@ local function stop_run()
for i=1,#stopactions do
stopactions[i]()
end
- if luatex.showjobstat then
- statistics.show(logs.report_job_stat)
+ if trace_job_status then
+ statistics.show()
end
- if luatex.showtexstat then
- for k,v in next, status.list() do
- logs.report_tex_stat(k,v)
+ if trace_tex_status then
+ for k, v in table.sortedhash(status.list()) do
+ report_tex("%S=%S",k,v)
end
end
if logs.stop_run then
@@ -67,15 +69,14 @@ end
local function report_output_log()
end
---~ local function show_open()
---~ end
+-- local function show_open()
+-- end
---~ local function show_close()
---~ end
+-- local function show_close()
+-- end
local function pre_dump_actions()
lua.finalize(trace_lua_dump and report_lua or nil)
- statistics.reportstorage("log")
-- statistics.savefmtstatus("\jobname","\contextversion","context.tex")
end
@@ -84,8 +85,8 @@ end
callbacks.register('start_run', start_run, "actions performed at the beginning of a run")
callbacks.register('stop_run', stop_run, "actions performed at the end of a run")
---~ callbacks.register('show_open', show_open, "actions performed when opening a file")
---~ callbacks.register('show_close', show_close, "actions performed when closing a file")
+---------.register('show_open', show_open, "actions performed when opening a file")
+---------.register('show_close', show_close, "actions performed when closing a file")
callbacks.register('report_output_pages', report_output_pages, "actions performed when reporting pages")
callbacks.register('report_output_log', report_output_log, "actions performed when reporting log file")
@@ -107,7 +108,7 @@ function luatex.registertempfile(name,extrasuffix)
name = name .. ".mkiv-tmp" -- maybe just .tmp
end
if trace_temp_files and not tempfiles[name] then
- report_tempfiles("registering temporary file: %s",name)
+ report_tempfiles("registering temporary file %a",name)
end
tempfiles[name] = true
return name
@@ -116,7 +117,7 @@ end
function luatex.cleanuptempfiles()
for name, _ in next, tempfiles do
if trace_temp_files then
- report_tempfiles("removing temporary file: %s",name)
+ report_tempfiles("removing temporary file %a",name)
end
os.remove(name)
end
@@ -138,8 +139,9 @@ directives.register("system.synctex", function(v)
else
report_system("synctex functionality is disabled!")
end
+ synctex = tonumber(synctex) or (toboolean(synctex,true) and 1) or (synctex == "zipped" and 1) or (synctex == "unzipped" and -1) or false
-- currently this is bugged:
- tex.synctex = synctex and 1 or 0
+ tex.synctex = synctex
-- so for the moment we need:
context.normalsynctex()
if synctex then
@@ -150,7 +152,7 @@ directives.register("system.synctex", function(v)
end)
statistics.register("synctex tracing",function()
- if synctex or tex.synctex > 0 then
+ if synctex or tex.synctex ~= 0 then
return "synctex has been enabled (extra log file generated)"
end
end)
diff --git a/Master/texmf-dist/tex/context/base/luat-soc.lua b/Master/texmf-dist/tex/context/base/luat-soc.lua
index 1095ed08779..9342a4b3375 100644
--- a/Master/texmf-dist/tex/context/base/luat-soc.lua
+++ b/Master/texmf-dist/tex/context/base/luat-soc.lua
@@ -1,11 +1,11 @@
-- This is just a loader. The package handler knows about the TEX tree.
---~ require "luatex/lua/socket.lua"
---~ require "luatex/lua/ltn12.lua"
---~ require "luatex/lua/mime.lua"
---~ require "luatex/lua/socket/http.lua"
---~ require "luatex/lua/socket/url.lua"
---~ require "luatex/lua/socket/tp.lua"
---~ require "luatex/lua/socket/ftp.lua"
+-- require "luatex/lua/socket.lua"
+-- require "luatex/lua/ltn12.lua"
+-- require "luatex/lua/mime.lua"
+-- require "luatex/lua/socket/http.lua"
+-- require "luatex/lua/socket/url.lua"
+-- require "luatex/lua/socket/tp.lua"
+-- require "luatex/lua/socket/ftp.lua"
--- "luatex/lua/socket/smtp.lua"
+-- "luatex/lua/socket/smtp.lua"
diff --git a/Master/texmf-dist/tex/context/base/luat-sto.lua b/Master/texmf-dist/tex/context/base/luat-sto.lua
index 30bb7d5bb9b..7a11b7f5e68 100644
--- a/Master/texmf-dist/tex/context/base/luat-sto.lua
+++ b/Master/texmf-dist/tex/context/base/luat-sto.lua
@@ -9,10 +9,12 @@ if not modules then modules = { } end modules ['luat-sto'] = {
-- we could nil some function in the productionrun
local type, next, setmetatable, getmetatable, collectgarbage = type, next, setmetatable, getmetatable, collectgarbage
-local gmatch, format, write_nl = string.gmatch, string.format, texio.write_nl
+local gmatch, format = string.gmatch, string.format
local serialize, concat, sortedhash = table.serialize, table.concat, table.sortedhash
local bytecode = lua.bytecode
+local strippedloadstring = utilities.lua.strippedloadstring
+local trace_storage = false
local report_storage = logs.reporter("system","storage")
storage = storage or { }
@@ -21,9 +23,6 @@ local storage = storage
local data = { }
storage.data = data
-local evaluators = { }
-storage.evaluators = evaluators
-
storage.min = 0 -- 500
storage.max = storage.min - 1
storage.noftables = storage.noftables or 0
@@ -32,6 +31,9 @@ storage.nofmodules = storage.nofmodules or 0
storage.mark = utilities.storage.mark
storage.allocate = utilities.storage.allocate
storage.marked = utilities.storage.marked
+storage.strip = false
+
+directives.register("system.compile.strip", function(v) storage.strip = v end)
function storage.register(...)
local t = { ... }
@@ -39,19 +41,21 @@ function storage.register(...)
if d then
storage.mark(d)
else
- report_storage("fatal error: invalid storage '%s'",t[1])
+ report_storage("fatal error: invalid storage %a",t[1])
os.exit()
end
data[#data+1] = t
return t
end
+local n = 0
local function dump()
local max = storage.max
for i=1,#data do
local d = data[i]
local message, original, target = d[1], d[2] ,d[3]
local c, code, name = 0, { }, nil
+ -- we have a nice definer for this
for str in gmatch(target,"([^%.]+)") do
if name then
name = name .. "." .. str
@@ -62,11 +66,14 @@ local function dump()
end
max = max + 1
if trace_storage then
- report_storage('saving %s in slot %s',message,max)
- c = c + 1 ; code[c] = format("report_storage('restoring %s from slot %s')",message,max)
+ c = c + 1 ; code[c] = format("print('restoring %s from slot %s')",message,max)
end
c = c + 1 ; code[c] = serialize(original,name)
- bytecode[max] = loadstring(concat(code,"\n"))
+ if trace_storage then
+ report_storage('saving %a in slot %a, size %s',message,max,#code[c])
+ end
+ -- we don't need tracing in such tables
+ bytecode[max] = strippedloadstring(concat(code,"\n"),storage.strip,format("slot %s (%s)",max,name))
collectgarbage("step")
end
storage.max = max
@@ -90,58 +97,67 @@ function lua.collectgarbage(threshold)
end
end
--- we also need to count at generation time (nicer for message)
-
---~ if lua.bytecode then -- from 0 upwards
---~ local i, b = storage.min, lua.bytecode
---~ while b[i] do
---~ storage.noftables = i
---~ b[i]()
---~ b[i] = nil
---~ i = i + 1
---~ end
---~ end
+-- -- we also need to count at generation time (nicer for message)
+--
+-- if lua.bytecode then -- from 0 upwards
+-- local i, b = storage.min, lua.bytecode
+-- while b[i] do
+-- storage.noftables = i
+-- b[i]()
+-- b[i] = nil
+-- i = i + 1
+-- end
+-- end
statistics.register("stored bytecode data", function()
- local modules = (storage.nofmodules > 0 and storage.nofmodules) or (status.luabytecodes - lua.firstbytecode - 1)
- local dumps = (storage.noftables > 0 and storage.noftables) or storage.max-storage.min + 1
- return format("%s modules, %s tables, %s chunks",modules,dumps,modules+dumps)
+ local nofmodules = (storage.nofmodules > 0 and storage.nofmodules) or (status.luabytecodes - lua.firstbytecode - 1)
+ local nofdumps = (storage.noftables > 0 and storage.noftables ) or storage.max-storage.min + 1
+ local tofmodules = storage.tofmodules or 0
+ local tofdumps = storage.toftables or 0
+ if environment.initex then
+ local luautilities = utilities.lua
+ local nofstrippedbytes = luautilities.nofstrippedbytes
+ local nofstrippedchunks = luautilities.nofstrippedchunks
+ if nofstrippedbytes > 0 then
+ return format("%s modules, %s tables, %s chunks, %s chunks stripped (%s bytes)",
+ nofmodules,
+ nofdumps,
+ nofmodules + nofdumps,
+ nofstrippedchunks,
+ nofstrippedbytes
+ )
+ elseif nofstrippedchunks > 0 then
+ return format("%s modules, %s tables, %s chunks, %s chunks stripped",
+ nofmodules,
+ nofdumps,
+ nofmodules + nofdumps,
+ nofstrippedchunks
+ )
+ else
+ return format("%s modules, %s tables, %s chunks",
+ nofmodules,
+ nofdumps,
+ nofmodules + nofdumps
+ )
+ end
+ else
+ return format("%s modules (%0.3f sec), %s tables (%0.3f sec), %s chunks (%0.3f sec)",
+ nofmodules, tofmodules,
+ nofdumps, tofdumps,
+ nofmodules + nofdumps, tofmodules + tofdumps
+ )
+ end
end)
if lua.bytedata then
storage.register("lua/bytedata",lua.bytedata,"lua.bytedata")
end
-function statistics.reportstorage(whereto)
- whereto = whereto or "term and log"
- write_nl(whereto," ","stored tables:"," ")
- for k,v in sortedhash(storage.data) do
- write_nl(whereto,format("%03i %s",k,v[1]))
- end
- write_nl(whereto," ","stored modules:"," ")
- for k,v in sortedhash(lua.bytedata) do
- write_nl(whereto,format("%03i %s %s",k,v[2],v[1]))
- end
- write_nl(whereto," ","stored attributes:"," ")
- for k,v in sortedhash(attributes.names) do
- write_nl(whereto,format("%03i %s",k,v))
- end
- write_nl(whereto," ","stored catcodetables:"," ")
- for k,v in sortedhash(catcodes.names) do
- write_nl(whereto,format("%03i %s",k,concat(v," ")))
- end
- write_nl(whereto," ","used corenamespaces:"," ")
- for k,v in sortedhash(interfaces.corenamespaces) do
- write_nl(whereto,format("%03i %s",k,v))
- end
- write_nl(whereto," ")
-end
-
-storage.shared = storage.shared or { }
-
-- Because the storage mechanism assumes tables, we define a table for storing
-- (non table) values.
+storage.shared = storage.shared or { }
+
storage.register("storage/shared", storage.shared, "storage.shared")
local mark = storage.mark
diff --git a/Master/texmf-dist/tex/context/base/lxml-aux.lua b/Master/texmf-dist/tex/context/base/lxml-aux.lua
index be12659ba10..0fffe261a00 100644
--- a/Master/texmf-dist/tex/context/base/lxml-aux.lua
+++ b/Master/texmf-dist/tex/context/base/lxml-aux.lua
@@ -26,7 +26,7 @@ local gmatch, gsub, format, find, strip = string.gmatch, string.gsub, string.for
local utfbyte = utf.byte
local function report(what,pattern,c,e)
- report_xml("%s element '%s' (root: '%s', position: %s, index: %s, pattern: %s)",what,xmlname(e),xmlname(e.__p__),c,e.ni,pattern)
+ report_xml("%s element %a, root %a, position %a, index %a, pattern %a",what,xmlname(e),xmlname(e.__p__),c,e.ni,pattern)
end
local function withelements(e,handle,depth)
@@ -766,3 +766,46 @@ function xml.separate(x,pattern)
end
return x
end
+
+--
+
+local helpers = xml.helpers or { }
+xml.helpers = helpers
+
+local function normal(e,action)
+ local edt = e.dt
+ if edt then
+ for i=1,#edt do
+ local str = edt[i]
+ if type(str) == "string" and str ~= "" then
+ edt[i] = action(str)
+ end
+ end
+ end
+end
+
+local function recurse(e,action)
+ local edt = e.dt
+ if edt then
+ for i=1,#edt do
+ local str = edt[i]
+ if type(str) ~= "string" then
+ recurse(str,action,recursive)
+ elseif str ~= "" then
+ edt[i] = action(str)
+ end
+ end
+ end
+end
+
+function helpers.recursetext(collected,action,recursive)
+ if recursive then
+ for i=1,#collected do
+ recurse(collected[i],action)
+ end
+ else
+ for i=1,#collected do
+ normal(collected[i],action)
+ end
+ end
+end
diff --git a/Master/texmf-dist/tex/context/base/lxml-css.lua b/Master/texmf-dist/tex/context/base/lxml-css.lua
index 112a5e75de0..c5a85c2bd6f 100644
--- a/Master/texmf-dist/tex/context/base/lxml-css.lua
+++ b/Master/texmf-dist/tex/context/base/lxml-css.lua
@@ -24,22 +24,49 @@ local cmf = 1/dimenfactors.cm
local mmf = 1/dimenfactors.mm
local inf = 1/dimenfactors["in"]
+local percentage, exheight, emwidth, pixels
+
+if tex then
+
+ local exheights = fonts.hashes.exheights
+ local emwidths = fonts.hashes.emwidths
+
+ percentage = function(s,pcf) return tonumber(s) * (pcf or tex.hsize) end
+ exheight = function(s,exf) return tonumber(s) * (exf or exheights[true]) end
+ emwidth = function(s,emf) return tonumber(s) * (emf or emwidths[true]) end
+ pixels = function(s,pxf) return tonumber(s) * (pxf or emwidths[true]/300) end
+
+else
+
+ local function generic(s,unit) return tonumber(s) * unit end
+
+ percentage = generic
+ exheight = generic
+ emwidth = generic
+ pixels = generic
+
+end
+
local validdimen = Cg(lpegpatterns.number,'a') * (
- Cb('a') * P("pt") / function(s) return tonumber(s) * bpf end
- + Cb('a') * P("cm") / function(s) return tonumber(s) * cmf end
- + Cb('a') * P("mm") / function(s) return tonumber(s) * mmf end
- + Cb('a') * P("in") / function(s) return tonumber(s) * inf end
- + Cb('a') * P("px") * Carg(1) / function(s,pxf) return tonumber(s) * pxf end
- + Cb('a') * P("%") * Carg(2) / function(s,pcf) return tonumber(s) * pcf end
- + Cb('a') * P("ex") * Carg(3) / function(s,exf) return tonumber(s) * exf end
- + Cb('a') * P("em") * Carg(4) / function(s,emf) return tonumber(s) * emf end
- + Cb('a') * Carg(1) / function(s,pxf) return tonumber(s) * pxf end
+ Cb('a') * P("pt") / function(s) return tonumber(s) * bpf end
+ + Cb('a') * P("cm") / function(s) return tonumber(s) * cmf end
+ + Cb('a') * P("mm") / function(s) return tonumber(s) * mmf end
+ + Cb('a') * P("in") / function(s) return tonumber(s) * inf end
+ + Cb('a') * P("px") * Carg(1) / pixels
+ + Cb('a') * P("%") * Carg(2) / percentage
+ + Cb('a') * P("ex") * Carg(3) / exheight
+ + Cb('a') * P("em") * Carg(4) / emwidth
+ + Cb('a') * Carg(1) / pixels
)
local pattern = (validdimen * lpegpatterns.whitespace^0)^1
-- todo: default if ""
+local function dimension(str,pixel,percent,exheight,emwidth)
+ return (lpegmatch(pattern,str,1,pixel,percent,exheight,emwidth))
+end
+
local function padding(str,pixel,percent,exheight,emwidth)
local top, bottom, left, right = lpegmatch(pattern,str,1,pixel,percent,exheight,emwidth)
if not bottom then
@@ -52,7 +79,8 @@ local function padding(str,pixel,percent,exheight,emwidth)
return top, bottom, left, right
end
-css.padding = padding
+css.dimension = dimension
+css.padding = padding
-- local hsize = 655360*100
-- local exheight = 65536*4
diff --git a/Master/texmf-dist/tex/context/base/lxml-css.mkiv b/Master/texmf-dist/tex/context/base/lxml-css.mkiv
index 2174874d9e2..79de85c088c 100644
--- a/Master/texmf-dist/tex/context/base/lxml-css.mkiv
+++ b/Master/texmf-dist/tex/context/base/lxml-css.mkiv
@@ -13,7 +13,7 @@
\registerctxluafile{lxml-css}{1.001}
-\def\ctxmodulecss#1{\directlua\zerocount{moduledata.css.#1}}
+\def\ctxmodulecss#1{\ctxlua{moduledata.css.#1}}
% No stable interface yet.
diff --git a/Master/texmf-dist/tex/context/base/lxml-ctx.lua b/Master/texmf-dist/tex/context/base/lxml-ctx.lua
index 3319dc63838..968dbda7174 100644
--- a/Master/texmf-dist/tex/context/base/lxml-ctx.lua
+++ b/Master/texmf-dist/tex/context/base/lxml-ctx.lua
@@ -23,7 +23,7 @@ function xml.ctx.enhancers.compound(root,lpath,before,tokens,after) -- todo lpeg
local after = after or "[%a%d][%a%d][%a%d]"
local pattern = "(" .. before .. ")(" .. tokens .. ")(" .. after .. ")"
local action = function(a,b,c)
- return a .. "<compound token=" .. format("%q",b) .. "/>" .. c
+ return a .. "<compound token=" .. format("%q",b) .. "/>" .. c -- formatters["%s<compound token=%q/>%s"](a,b,c)
end
xml.enhance(root,lpath,pattern,action) -- still present?
end
diff --git a/Master/texmf-dist/tex/context/base/lxml-ctx.mkiv b/Master/texmf-dist/tex/context/base/lxml-ctx.mkiv
index 530c29aa778..58807339dd7 100644
--- a/Master/texmf-dist/tex/context/base/lxml-ctx.mkiv
+++ b/Master/texmf-dist/tex/context/base/lxml-ctx.mkiv
@@ -20,9 +20,6 @@
\unprotect
-% The \let|=letterbar is a messy hack and is needed for the tabulate. We now use
-% \detokenize.
-
\settrue \xmllshowbuffer
\setfalse\xmllshowtitle
\settrue \xmllshowwarning
diff --git a/Master/texmf-dist/tex/context/base/lxml-dir.lua b/Master/texmf-dist/tex/context/base/lxml-dir.lua
index 00375193f0f..3c68664aefe 100644
--- a/Master/texmf-dist/tex/context/base/lxml-dir.lua
+++ b/Master/texmf-dist/tex/context/base/lxml-dir.lua
@@ -6,22 +6,23 @@ if not modules then modules = { } end modules ['lxml-dir'] = {
license = "see context related readme files"
}
-local format, gsub = string.format, string.gsub
+local gsub = string.gsub
+local formatters = string.formatters
---~ <?xml version="1.0" standalone="yes"?>
---~ <!-- demo.cdx -->
---~ <directives>
---~ <!--
---~ <directive attribute='id' value="100" setup="cdx:100"/>
---~ <directive attribute='id' value="101" setup="cdx:101"/>
---~ -->
---~ <!--
---~ <directive attribute='cdx' value="colors" element="cals:table" setup="cdx:cals:table:colors"/>
---~ <directive attribute='cdx' value="vertical" element="cals:table" setup="cdx:cals:table:vertical"/>
---~ <directive attribute='cdx' value="noframe" element="cals:table" setup="cdx:cals:table:noframe"/>
---~ -->
---~ <directive attribute='cdx' value="*" element="cals:table" setup="cdx:cals:table:*"/>
---~ </directives>
+-- <?xml version="1.0" standalone="yes"?>
+-- <!-- demo.cdx -->
+-- <directives>
+-- <!--
+-- <directive attribute='id' value="100" setup="cdx:100"/>
+-- <directive attribute='id' value="101" setup="cdx:101"/>
+-- -->
+-- <!--
+-- <directive attribute='cdx' value="colors" element="cals:table" setup="cdx:cals:table:colors"/>
+-- <directive attribute='cdx' value="vertical" element="cals:table" setup="cdx:cals:table:vertical"/>
+-- <directive attribute='cdx' value="noframe" element="cals:table" setup="cdx:cals:table:noframe"/>
+-- -->
+-- <directive attribute='cdx' value="*" element="cals:table" setup="cdx:cals:table:*"/>
+-- </directives>
local lxml, context = lxml, context
@@ -51,7 +52,7 @@ local function load_setup(filename)
local attribute, value, element = at.attribute or "", at.value or "", at.element or '*'
local setup, before, after = at.setup or "", at.before or "", at.after or ""
if attribute ~= "" and value ~= "" then
- local key = format("%s::%s::%s",element,attribute,value)
+ local key = formatters["%s::%s::%s"](element,attribute,value)
local t = data[key] or { }
if setup ~= "" then t.setup = setup end
if before ~= "" then t.before = before end
@@ -60,9 +61,9 @@ local function load_setup(filename)
valid = valid + 1
end
end
- report_lxml("%s directives found in '%s', %s valid",#collection,filename,valid)
+ report_lxml("%s directives found in %a, valid %s",#collection,filename,valid)
else
- report_lxml("no directives found in '%s'",filename)
+ report_lxml("no directives found in %a",filename)
end
end
@@ -79,14 +80,14 @@ local function handle_setup(category,root,attribute,element)
element = ns .. ':' .. tg
end
end
- local setup = data[format("%s::%s::%s",element,attribute,value)]
+ local setup = data[formatters["%s::%s::%s"](element,attribute,value)]
if setup then
setup = setup[category]
end
if setup then
context.directsetup(setup)
else
- setup = data[format("%s::%s::*",element,attribute)]
+ setup = data[formatters["%s::%s::*"](element,attribute)]
if setup then
setup = setup[category]
end
diff --git a/Master/texmf-dist/tex/context/base/lxml-ent.lua b/Master/texmf-dist/tex/context/base/lxml-ent.lua
index be69dec00ee..a5c5bc389a6 100644
--- a/Master/texmf-dist/tex/context/base/lxml-ent.lua
+++ b/Master/texmf-dist/tex/context/base/lxml-ent.lua
@@ -7,9 +7,8 @@ if not modules then modules = { } end modules ['lxml-ent'] = {
}
local type, next, tonumber = type, next, tonumber
-local utf = unicode.utf8
local byte, format = string.byte, string.format
-local utfupper, utfchar = utf.upper, utf.char
+local utfchar = utf.char
local lpegmatch = lpeg.match
--[[ldx--
@@ -36,7 +35,7 @@ local entities = xml.entities -- maybe some day properties
function xml.registerentity(key,value)
entities[key] = value
if trace_entities then
- report_xml("registering entity '%s' as: %s",key,value)
+ report_xml("registering entity %a as %a",key,value)
end
end
diff --git a/Master/texmf-dist/tex/context/base/lxml-inf.lua b/Master/texmf-dist/tex/context/base/lxml-inf.lua
index 834d152fde9..8f1157c7d4c 100644
--- a/Master/texmf-dist/tex/context/base/lxml-inf.lua
+++ b/Master/texmf-dist/tex/context/base/lxml-inf.lua
@@ -8,6 +8,8 @@ if not modules then modules = { } end modules ['lxml-inf'] = {
-- This file will be loaded runtime by x-pending.tex.
+local concat = table.concat
+
local xmlwithelements = xml.withelements
local getid = lxml.getid
diff --git a/Master/texmf-dist/tex/context/base/lxml-ini.mkiv b/Master/texmf-dist/tex/context/base/lxml-ini.mkiv
index 84ebc58233c..8889d906ba4 100644
--- a/Master/texmf-dist/tex/context/base/lxml-ini.mkiv
+++ b/Master/texmf-dist/tex/context/base/lxml-ini.mkiv
@@ -27,7 +27,7 @@
\unprotect % todo \!!bs \!!es where handy (slower)
-\def\ctxlxml #1{\directlua\zerocount{lxml.#1}}
+\def\ctxlxml #1{\ctxlua{lxml.#1}}
\def\xmlmain #1{\ctxlxml{main("#1")}}
\def\xmlmatch #1{\ctxlxml{match("#1")}}
@@ -186,7 +186,7 @@
\unexpanded\def\xmlprocessdata {\lxml_process\zerocount\xmlloaddata}
\unexpanded\def\xmlprocessbuffer {\lxml_process\zerocount\xmlloadbuffer}
\unexpanded\def\xmlprocessregistered{\lxml_process\zerocount\xmlloadregistered}
-\let\xmlprocess \xmlprocessfile
+ \let\xmlprocess \xmlprocessfile
\startxmlsetups xml:flush
\xmlflush{#1}
@@ -313,7 +313,7 @@
\xmlprocessingmode\executeifdefined{\??xmldefaults\directxmlparameter\c!default}\plusone
\to \everysetupxml
-\unexpanded\def\xmlinitialize
+\unexpanded\def\initializexmlprocessing % is this still needed?
{\the\everysetupxml}
\let\p_lxml_entities\empty
@@ -418,7 +418,7 @@
% \stopextendcatcodetable
%
% \ctxlua { % entities are remembered in the format
-% characters.remapentity("<",characters.activeoffset + utf.byte("<"))
-% characters.remapentity("&",characters.activeoffset + utf.byte("&"))
-% characters.remapentity(">",characters.activeoffset + utf.byte(">"))
+% commands.remapentity("<",characters.activeoffset + utf.byte("<"))
+% commands.remapentity("&",characters.activeoffset + utf.byte("&"))
+% commands.remapentity(">",characters.activeoffset + utf.byte(">"))
% }
diff --git a/Master/texmf-dist/tex/context/base/lxml-lpt.lua b/Master/texmf-dist/tex/context/base/lxml-lpt.lua
index 44dd971551a..51ab321b931 100644
--- a/Master/texmf-dist/tex/context/base/lxml-lpt.lua
+++ b/Master/texmf-dist/tex/context/base/lxml-lpt.lua
@@ -1,4 +1,4 @@
-if not modules then modules = { } end modules ['lxml-pth'] = {
+if not modules then modules = { } end modules ['lxml-lpt'] = {
version = 1.001,
comment = "this module is the basis for the lxml-* ones",
author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
@@ -10,11 +10,12 @@ if not modules then modules = { } end modules ['lxml-pth'] = {
-- todo: B/C/[get first match]
local concat, remove, insert = table.concat, table.remove, table.insert
-local type, next, tonumber, tostring, setmetatable, loadstring = type, next, tonumber, tostring, setmetatable, loadstring
+local type, next, tonumber, tostring, setmetatable, load, select = type, next, tonumber, tostring, setmetatable, load, select
local format, upper, lower, gmatch, gsub, find, rep = string.format, string.upper, string.lower, string.gmatch, string.gsub, string.find, string.rep
local lpegmatch, lpegpatterns = lpeg.match, lpeg.patterns
local setmetatableindex = table.setmetatableindex
+local formatters = string.formatters -- no need (yet) as paths are cached anyway
-- beware, this is not xpath ... e.g. position is different (currently) and
-- we have reverse-sibling as reversed preceding sibling
@@ -82,7 +83,7 @@ local function fallback (t, name)
if fn then
t[name] = fn
else
- report_lpath("unknown sub finalizer '%s'",tostring(name))
+ report_lpath("unknown sub finalizer %a",name)
fn = function() end
end
return fn
@@ -539,14 +540,23 @@ local lp_builtin = P (
-- for the moment we keep namespaces with attributes
local lp_attribute = (P("@") + P("attribute::")) / "" * Cc("(ll.at and ll.at['") * ((R("az","AZ") + S("-_:"))^1) * Cc("'])")
-local lp_fastpos_p = ((P("+")^0 * R("09")^1 * P(-1)) / function(s) return "l==" .. s end)
-local lp_fastpos_n = ((P("-") * R("09")^1 * P(-1)) / function(s) return "(" .. s .. "<0 and (#list+".. s .. "==l))" end)
+
+-- lp_fastpos_p = (P("+")^0 * R("09")^1 * P(-1)) / function(s) return "l==" .. s end
+-- lp_fastpos_n = (P("-") * R("09")^1 * P(-1)) / function(s) return "(" .. s .. "<0 and (#list+".. s .. "==l))" end
+
+lp_fastpos_p = P("+")^0 * R("09")^1 * P(-1) / "l==%0"
+lp_fastpos_n = P("-") * R("09")^1 * P(-1) / "(%0<0 and (#list+%0==l))"
+
local lp_fastpos = lp_fastpos_n + lp_fastpos_p
+
local lp_reserved = C("and") + C("or") + C("not") + C("div") + C("mod") + C("true") + C("false")
-local lp_lua_function = C(R("az","AZ","__")^1 * (P(".") * R("az","AZ","__")^1)^1) * ("(") / function(t) -- todo: better . handling
- return t .. "("
-end
+-- local lp_lua_function = C(R("az","AZ","__")^1 * (P(".") * R("az","AZ","__")^1)^1) * ("(") / function(t) -- todo: better . handling
+-- return t .. "("
+-- end
+
+-- local lp_lua_function = (R("az","AZ","__")^1 * (P(".") * R("az","AZ","__")^1)^1) * ("(") / "%0("
+local lp_lua_function = Cs((R("az","AZ","__")^1 * (P(".") * R("az","AZ","__")^1)^1) * ("(")) / "%0"
local lp_function = C(R("az","AZ","__")^1) * P("(") / function(t) -- todo: better . handling
if expressions[t] then
@@ -599,15 +609,12 @@ local converter = Cs (
)
cleaner = Cs ( (
---~ lp_fastpos +
+ -- lp_fastpos +
lp_reserved +
lp_number +
lp_string +
1 )^1 )
-
---~ expr
-
local template_e = [[
local expr = xml.expressions
return function(list,ll,l,order)
@@ -662,6 +669,7 @@ local function errorrunner_e(str,cnv)
end
return false
end
+
local function errorrunner_f(str,arg)
report_lpath("error in finalizer: %s(%s)",str,arg or "")
return false
@@ -673,7 +681,7 @@ end
local function register_expression(expression)
local converted = lpegmatch(converter,expression)
- local runner = loadstring(format(template_e,converted))
+ local runner = load(format(template_e,converted))
runner = (runner and runner()) or function() errorrunner_e(expression,converted) end
return { kind = "expression", expression = expression, converted = converted, evaluator = runner }
end
@@ -681,9 +689,9 @@ end
local function register_finalizer(protocol,name,arguments)
local runner
if arguments and arguments ~= "" then
- runner = loadstring(format(template_f_y,protocol or xml.defaultprotocol,name,arguments))
+ runner = load(format(template_f_y,protocol or xml.defaultprotocol,name,arguments))
else
- runner = loadstring(format(template_f_n,protocol or xml.defaultprotocol,name))
+ runner = load(format(template_f_n,protocol or xml.defaultprotocol,name))
end
runner = (runner and runner()) or function() errorrunner_f(name,arguments) end
return { kind = "finalizer", name = name, arguments = arguments, finalizer = runner }
@@ -863,7 +871,7 @@ lpath = function (pattern) -- the gain of caching is rather minimal
local np = #parsed
if np == 0 then
parsed = { pattern = pattern, register_self, state = "parsing error" }
- report_lpath("parsing error in '%s'",pattern)
+ report_lpath("parsing error in pattern: %s",pattern)
lshow(parsed)
else
-- we could have done this with a more complex parser but this
@@ -1107,6 +1115,7 @@ end
expressions.child = function(e,pattern)
return applylpath(e,pattern) -- todo: cache
end
+
expressions.count = function(e,pattern) -- what if pattern == empty or nil
local collected = applylpath(e,pattern) -- todo: cache
return pattern and (collected and #collected) or 0
@@ -1114,13 +1123,30 @@ end
-- external
-expressions.oneof = function(s,...) -- slow
- local t = {...} for i=1,#t do if s == t[i] then return true end end return false
+-- expressions.oneof = function(s,...)
+-- local t = {...}
+-- for i=1,#t do
+-- if s == t[i] then
+-- return true
+-- end
+-- end
+-- return false
+-- end
+
+expressions.oneof = function(s,...)
+ for i=1,select("#",...) do
+ if s == select(i,...) then
+ return true
+ end
+ end
+ return false
end
+
expressions.error = function(str)
xml.errorhandler(format("unknown function in lpath expression: %s",tostring(str or "?")))
return false
end
+
expressions.undefined = function(s)
return s == nil
end
@@ -1164,7 +1190,7 @@ end
-- user interface
local function traverse(root,pattern,handle)
- report_lpath("use 'xml.selection' instead for '%s'",pattern)
+ -- report_lpath("use 'xml.selection' instead for pattern: %s",pattern)
local collected = applylpath(root,pattern)
if collected then
for c=1,#collected do
@@ -1197,15 +1223,15 @@ xml.selection = selection -- new method, simple handle
-- generic function finalizer (independant namespace)
-local function dofunction(collected,fnc)
+local function dofunction(collected,fnc,...)
if collected then
local f = functions[fnc]
if f then
for c=1,#collected do
- f(collected[c])
+ f(collected[c],...)
end
else
- report_lpath("unknown function '%s'",fnc)
+ report_lpath("unknown function %a",fnc)
end
end
end
@@ -1217,7 +1243,7 @@ finalizers.tex["function"] = dofunction
expressions.text = function(e,n)
local rdt = e.__p__.dt
- return (rdt and rdt[n]) or ""
+ return rdt and rdt[n] or ""
end
expressions.name = function(e,n) -- ns + tg
@@ -1319,34 +1345,89 @@ end
</typing>
--ldx]]--
-local wrap, yield = coroutine.wrap, coroutine.yield
+-- local wrap, yield = coroutine.wrap, coroutine.yield
+-- local dummy = function() end
+--
+-- function xml.elements(root,pattern,reverse) -- r, d, k
+-- local collected = applylpath(root,pattern)
+-- if collected then
+-- if reverse then
+-- return wrap(function() for c=#collected,1,-1 do
+-- local e = collected[c] local r = e.__p__ yield(r,r.dt,e.ni)
+-- end end)
+-- else
+-- return wrap(function() for c=1,#collected do
+-- local e = collected[c] local r = e.__p__ yield(r,r.dt,e.ni)
+-- end end)
+-- end
+-- end
+-- return wrap(dummy)
+-- end
+--
+-- function xml.collected(root,pattern,reverse) -- e
+-- local collected = applylpath(root,pattern)
+-- if collected then
+-- if reverse then
+-- return wrap(function() for c=#collected,1,-1 do yield(collected[c]) end end)
+-- else
+-- return wrap(function() for c=1,#collected do yield(collected[c]) end end)
+-- end
+-- end
+-- return wrap(dummy)
+-- end
+
+-- faster:
+
+local dummy = function() end
function xml.elements(root,pattern,reverse) -- r, d, k
local collected = applylpath(root,pattern)
- if collected then
- if reverse then
- return wrap(function() for c=#collected,1,-1 do
- local e = collected[c] local r = e.__p__ yield(r,r.dt,e.ni)
- end end)
- else
- return wrap(function() for c=1,#collected do
- local e = collected[c] local r = e.__p__ yield(r,r.dt,e.ni)
- end end)
+ if not collected then
+ return dummy
+ elseif reverse then
+ local c = #collected + 1
+ return function()
+ if c > 1 then
+ c = c - 1
+ local e = collected[c]
+ local r = e.__p__
+ return r, r.dt, e.ni
+ end
+ end
+ else
+ local n, c = #collected, 0
+ return function()
+ if c < n then
+ c = c + 1
+ local e = collected[c]
+ local r = e.__p__
+ return r, r.dt, e.ni
+ end
end
end
- return wrap(function() end)
end
function xml.collected(root,pattern,reverse) -- e
local collected = applylpath(root,pattern)
- if collected then
- if reverse then
- return wrap(function() for c=#collected,1,-1 do yield(collected[c]) end end)
- else
- return wrap(function() for c=1,#collected do yield(collected[c]) end end)
+ if not collected then
+ return dummy
+ elseif reverse then
+ local c = #collected + 1
+ return function()
+ if c > 1 then
+ c = c - 1
+ return collected[c]
+ end
+ end
+ else
+ local n, c = #collected, 0
+ return function()
+ if c < n then
+ c = c + 1
+ return collected[c]
+ end
end
end
- return wrap(function() end)
end
-- handy
@@ -1354,6 +1435,32 @@ end
function xml.inspect(collection,pattern)
pattern = pattern or "."
for e in xml.collected(collection,pattern or ".") do
- report_lpath("pattern %q\n\n%s\n",pattern,xml.tostring(e))
+ report_lpath("pattern: %s\n\n%s\n",pattern,xml.tostring(e))
+ end
+end
+
+-- texy (see xfdf):
+
+local function split(e)
+ local dt = e.dt
+ if dt then
+ for i=1,#dt do
+ local dti = dt[i]
+ if type(dti) == "string" then
+ dti = gsub(dti,"^[\n\r]*(.-)[\n\r]*","%1")
+ dti = gsub(dti,"[\n\r]+","\n\n")
+ dt[i] = dti
+ else
+ split(dti)
+ end
+ end
end
+ return e
+end
+
+function xml.finalizers.paragraphs(c)
+ for i=1,#c do
+ split(c[i])
+ end
+ return c
end
diff --git a/Master/texmf-dist/tex/context/base/lxml-tab.lua b/Master/texmf-dist/tex/context/base/lxml-tab.lua
index b5c86078747..2bb5844fcb0 100644
--- a/Master/texmf-dist/tex/context/base/lxml-tab.lua
+++ b/Master/texmf-dist/tex/context/base/lxml-tab.lua
@@ -16,7 +16,7 @@ if not modules then modules = { } end modules ['lxml-tab'] = {
local trace_entities = false trackers.register("xml.entities", function(v) trace_entities = v end)
-local report_xml = logs and logs.reporter("xml","core") or function(...) print(format(...)) end
+local report_xml = logs and logs.reporter("xml","core") or function(...) print(string.format(...)) end
--[[ldx--
<p>The parser used here is inspired by the variant discussed in the lua book, but
@@ -39,13 +39,13 @@ local xml = xml
--~ local xml = xml
-local utf = unicode.utf8
local concat, remove, insert = table.concat, table.remove, table.insert
local type, next, setmetatable, getmetatable, tonumber = type, next, setmetatable, getmetatable, tonumber
-local format, lower, find, match, gsub = string.format, string.lower, string.find, string.match, string.gsub
-local utfchar, utffind, utfgsub = utf.char, utf.find, utf.gsub
+local lower, find, match, gsub = string.lower, string.find, string.match, string.gsub
+local utfchar = utf.char
local lpegmatch = lpeg.match
local P, S, R, C, V, C, Cs = lpeg.P, lpeg.S, lpeg.R, lpeg.C, lpeg.V, lpeg.C, lpeg.Cs
+local formatters = string.formatters
--[[ldx--
<p>First a hack to enable namespace resolving. A namespace is characterized by
@@ -209,7 +209,7 @@ local function add_empty(spacing, namespace, tag)
if #spacing > 0 then
dt[#dt+1] = spacing
end
- local resolved = (namespace == "" and xmlns[#xmlns]) or nsremap[namespace] or namespace
+ local resolved = namespace == "" and xmlns[#xmlns] or nsremap[namespace] or namespace
top = stack[#stack]
dt = top.dt
local t = { ns=namespace or "", rn=resolved, tg=tag, at=at, dt={}, __p__ = top }
@@ -225,7 +225,7 @@ local function add_begin(spacing, namespace, tag)
if #spacing > 0 then
dt[#dt+1] = spacing
end
- local resolved = (namespace == "" and xmlns[#xmlns]) or nsremap[namespace] or namespace
+ local resolved = namespace == "" and xmlns[#xmlns] or nsremap[namespace] or namespace
top = { ns=namespace or "", rn=resolved, tg=tag, at=at, dt={}, __p__ = stack[#stack] }
setmetatable(top, mt)
dt = top.dt
@@ -240,9 +240,9 @@ local function add_end(spacing, namespace, tag)
local toclose = remove(stack)
top = stack[#stack]
if #stack < 1 then
- errorstr = format("nothing to close with %s %s", tag, xml.checkerror(top,toclose) or "")
+ errorstr = formatters["unable to close %s %s"](tag,xml.checkerror(top,toclose) or "")
elseif toclose.tg ~= tag then -- no namespace check
- errorstr = format("unable to close %s with %s %s", toclose.tg, tag, xml.checkerror(top,toclose) or "")
+ errorstr = formatters["unable to close %s with %s %s"](toclose.tg,tag,xml.checkerror(top,toclose) or "")
end
dt = top.dt
dt[#dt+1] = toclose
@@ -279,7 +279,7 @@ local reported_attribute_errors = { }
local function attribute_value_error(str)
if not reported_attribute_errors[str] then
- report_xml("invalid attribute value: %q",str)
+ report_xml("invalid attribute value %a",str)
reported_attribute_errors[str] = true
at._error_ = str
end
@@ -288,7 +288,7 @@ end
local function attribute_specification_error(str)
if not reported_attribute_errors[str] then
- report_xml("invalid attribute specification: %q",str)
+ report_xml("invalid attribute specification %a",str)
reported_attribute_errors[str] = true
at._error_ = str
end
@@ -296,9 +296,9 @@ local function attribute_specification_error(str)
end
xml.placeholders = {
- unknown_dec_entity = function(str) return (str == "" and "&error;") or format("&%s;",str) end,
- unknown_hex_entity = function(str) return format("&#x%s;",str) end,
- unknown_any_entity = function(str) return format("&#x%s;",str) end,
+ unknown_dec_entity = function(str) return str == "" and "&error;" or formatters["&%s;"](str) end,
+ unknown_hex_entity = function(str) return formatters["&#x%s;"](str) end,
+ unknown_any_entity = function(str) return formatters["&#x%s;"](str) end,
}
local placeholders = xml.placeholders
@@ -308,7 +308,7 @@ local function fromhex(s)
if n then
return utfchar(n)
else
- return format("h:%s",s), true
+ return formatters["h:%s"](s), true
end
end
@@ -317,7 +317,7 @@ local function fromdec(s)
if n then
return utfchar(n)
else
- return format("d:%s",s), true
+ return formatters["d:%s"](s), true
end
end
@@ -365,15 +365,7 @@ local privates_n = {
-- keeps track of defined ones
}
-local function escaped(s)
- if s == "" then
- return ""
- else -- if utffind(s,privates_u) then
- return (utfgsub(s,".",privates_u))
- -- else
- -- return s
- end
-end
+local escaped = utf.remapper(privates_u)
local function unescaped(s)
local p = privates_n[s]
@@ -388,13 +380,7 @@ local function unescaped(s)
return p
end
-local function unprivatized(s,resolve)
- if s == "" then
- return ""
- else
- return (utfgsub(s,".",privates_p))
- end
-end
+local unprivatized = utf.remapper(privates_p)
xml.privatetoken = unescaped
xml.unprivatized = unprivatized
@@ -407,14 +393,14 @@ local function handle_hex_entity(str)
h = unify_predefined and predefined_unified[n]
if h then
if trace_entities then
- report_xml("utfize, converting hex entity &#x%s; into %s",str,h)
+ report_xml("utfize, converting hex entity &#x%s; into %a",str,h)
end
elseif utfize then
h = (n and utfchar(n)) or xml.unknown_hex_entity(str) or ""
if not n then
report_xml("utfize, ignoring hex entity &#x%s;",str)
elseif trace_entities then
- report_xml("utfize, converting hex entity &#x%s; into %s",str,h)
+ report_xml("utfize, converting hex entity &#x%s; into %a",str,h)
end
else
if trace_entities then
@@ -434,14 +420,14 @@ local function handle_dec_entity(str)
d = unify_predefined and predefined_unified[n]
if d then
if trace_entities then
- report_xml("utfize, converting dec entity &#%s; into %s",str,d)
+ report_xml("utfize, converting dec entity &#%s; into %a",str,d)
end
elseif utfize then
d = (n and utfchar(n)) or placeholders.unknown_dec_entity(str) or ""
if not n then
report_xml("utfize, ignoring dec entity &#%s;",str)
elseif trace_entities then
- report_xml("utfize, converting dec entity &#%s; into %s",str,d)
+ report_xml("utfize, converting dec entity &#%s; into %a",str,d)
end
else
if trace_entities then
@@ -463,7 +449,7 @@ local function handle_any_entity(str)
a = resolve_predefined and predefined_simplified[str]
if a then
if trace_entities then
- report_xml("resolved entity &%s; -> %s (predefined)",str,a)
+ report_xml("resolving entity &%s; to predefined %a",str,a)
end
else
if type(resolve) == "function" then
@@ -474,13 +460,13 @@ local function handle_any_entity(str)
if a then
if type(a) == "function" then
if trace_entities then
- report_xml("expanding entity &%s; (function)",str)
+ report_xml("expanding entity &%s; to function call",str)
end
a = a(str) or ""
end
a = lpegmatch(parsedentity,a) or a -- for nested
if trace_entities then
- report_xml("resolved entity &%s; -> %s (internal)",str,a)
+ report_xml("resolving entity &%s; to internal %a",str,a)
end
else
local unknown_any_entity = placeholders.unknown_any_entity
@@ -489,7 +475,7 @@ local function handle_any_entity(str)
end
if a then
if trace_entities then
- report_xml("resolved entity &%s; -> %s (external)",str,a)
+ report_xml("resolving entity &%s; to external %s",str,a)
end
else
if trace_entities then
@@ -506,7 +492,7 @@ local function handle_any_entity(str)
acache[str] = a
elseif trace_entities then
if not acache[str] then
- report_xml("converting entity &%s; into %s",str,a)
+ report_xml("converting entity &%s; to %a",str,a)
acache[str] = a
end
end
@@ -519,7 +505,7 @@ local function handle_any_entity(str)
-- one of the predefined
acache[str] = a
if trace_entities then
- report_xml("entity &%s; becomes %s",str,tostring(a))
+ report_xml("entity &%s; becomes %a",str,a)
end
elseif str == "" then
if trace_entities then
@@ -541,7 +527,7 @@ local function handle_any_entity(str)
end
local function handle_end_entity(chr)
- report_xml("error in entity, %q found instead of ';'",chr)
+ report_xml("error in entity, %a found instead of %a",chr,";")
end
local space = S(' \r\n\t')
@@ -695,8 +681,6 @@ local function _xmlconvert_(data, settings)
resolve_predefined = true
end
--
---~ inspect(settings)
- --
stack, top, at, xmlns, errorstr = { }, { }, { }, { }, nil
acache, hcache, dcache = { }, { }, { } -- not stored
reported_attribute_errors = { }
@@ -735,7 +719,12 @@ local function _xmlconvert_(data, settings)
else
errorhandler = errorhandler or xml.errorhandler
if errorhandler then
- xml.errorhandler(format("load error: %s",errorstr))
+ local currentresource = settings.currentresource
+ if currentresource and currentresource ~= "" then
+ xml.errorhandler(formatters["load error in [%s]: %s"](currentresource,errorstr))
+ else
+ xml.errorhandler(formatters["load error: %s"](errorstr))
+ end
end
end
else
@@ -780,7 +769,7 @@ function xmlconvert(data,settings)
if ok then
return result
else
- return _xmlconvert_("")
+ return _xmlconvert_("",settings)
end
end
@@ -833,15 +822,22 @@ function xml.load(filename,settings)
local data = ""
if type(filename) == "string" then
-- local data = io.loaddata(filename) - -todo: check type in io.loaddata
- local f = io.open(filename,'r')
+ local f = io.open(filename,'r') -- why not 'rb'
if f then
- data = f:read("*all")
+ data = f:read("*all") -- io.readall(f) ... only makes sense for large files
f:close()
end
elseif filename then -- filehandle
- data = filename:read("*all")
+ data = filename:read("*all") -- io.readall(f) ... only makes sense for large files
+ end
+ if settings then
+ settings.currentresource = filename
+ local result = xmlconvert(data,settings)
+ settings.currentresource = nil
+ return result
+ else
+ return xmlconvert(data,{ currentresource = filename })
end
- return xmlconvert(data,settings)
end
--[[ldx--
@@ -907,7 +903,7 @@ function xml.checkbom(root) -- can be made faster
return
end
end
- insert(dt, 1, { special=true, ns="", tg="@pi@", dt = { "xml version='1.0' standalone='yes'"} } )
+ insert(dt, 1, { special = true, ns = "", tg = "@pi@", dt = { "xml version='1.0' standalone='yes'" } } )
insert(dt, 2, "\n" )
end
end
@@ -926,7 +922,7 @@ local function verbose_element(e,handlers) -- options
local ats = eat and next(eat) and { }
if ats then
for k,v in next, eat do
- ats[#ats+1] = format('%s=%q',k,escaped(v))
+ ats[#ats+1] = formatters['%s=%q'](k,escaped(v))
end
end
if ern and trace_entities and ern ~= ens then
@@ -1056,7 +1052,7 @@ end
local handlers = { }
local function newhandlers(settings)
- local t = table.copy(handlers.verbose or { }) -- merge
+ local t = table.copy(handlers[settings and settings.parent or "verbose"] or { }) -- merge
if settings then
for k,v in next, settings do
if type(v) == "table" then
@@ -1180,7 +1176,7 @@ local xmlstringhandler = newhandlers {
local function xmltostring(root) -- 25% overhead due to collecting
if not root then
return ""
- elseif type(root) == 'string' then
+ elseif type(root) == "string" then
return root
else -- if next(root) then -- next is faster than type (and >0 test)
return serialize(root,xmlstringhandler) or ""
@@ -1256,16 +1252,19 @@ function xml.parent(root)
end
function xml.body(root)
- return (root.ri and root.dt[root.ri]) or root -- not ok yet
+ return root.ri and root.dt[root.ri] or root -- not ok yet
end
function xml.name(root)
if not root then
return ""
- elseif root.ns == "" then
- return root.tg
+ end
+ local ns = root.ns
+ local tg = root.tg
+ if ns == "" then
+ return tg
else
- return root.ns .. ":" .. root.tg
+ return ns .. ":" .. tg
end
end
@@ -1295,7 +1294,7 @@ dt[k] = xml.assign(root) or xml.assign(dt,k,root)
function xml.assign(dt,k,root)
if dt and k then
- dt[k] = (type(root) == "table" and xml.body(root)) or root
+ dt[k] = type(root) == "table" and xml.body(root) or root
return dt[k]
else
return xml.body(root)
@@ -1315,9 +1314,9 @@ xml.tocdata(e,"error")
function xml.tocdata(e,wrapper) -- a few more in the aux module
local whatever = type(e) == "table" and xmltostring(e.dt) or e or ""
if wrapper then
- whatever = format("<%s>%s</%s>",wrapper,whatever,wrapper)
+ whatever = formatters["<%s>%s</%s>"](wrapper,whatever,wrapper)
end
- local t = { special = true, ns = "", tg = "@cd@", at = {}, rn = "", dt = { whatever }, __p__ = e }
+ local t = { special = true, ns = "", tg = "@cd@", at = { }, rn = "", dt = { whatever }, __p__ = e }
setmetatable(t,getmetatable(e))
e.dt = { t }
end
diff --git a/Master/texmf-dist/tex/context/base/lxml-tex.lua b/Master/texmf-dist/tex/context/base/lxml-tex.lua
index 0b21e0a9c86..112f627511f 100644
--- a/Master/texmf-dist/tex/context/base/lxml-tex.lua
+++ b/Master/texmf-dist/tex/context/base/lxml-tex.lua
@@ -1,4 +1,4 @@
-if not modules then modules = { } end modules ['lxml-tst'] = {
+if not modules then modules = { } end modules ['lxml-tex'] = {
version = 1.001,
comment = "companion to lxml-ini.mkiv",
author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
@@ -10,12 +10,10 @@ if not modules then modules = { } end modules ['lxml-tst'] = {
-- interface and not the context one. If we ever do that there will
-- be an cldf-xml helper library.
-local utf = unicode.utf8
-
-local utfchar, utfupper = utf.char, utf.upper
+local utfchar = utf.char
local concat, insert, remove = table.concat, table.insert, table.remove
local format, sub, gsub, find, gmatch, match = string.format, string.sub, string.gsub, string.find, string.gmatch, string.match
-local type, next, tonumber, tostring = type, next, tonumber, tostring
+local type, next, tonumber, tostring, select = type, next, tonumber, tostring, select
local lpegmatch = lpeg.match
local P, S, C, Cc = lpeg.P, lpeg.S, lpeg.C, lpeg.Cc
@@ -25,9 +23,12 @@ local lowerchars, upperchars, lettered = characters.lower, characters.upper, cha
lxml = lxml or { }
local lxml = lxml
-local ctxcatcodes, notcatcodes = tex.ctxcatcodes, tex.notcatcodes
+local catcodenumbers = catcodes.numbers
+local ctxcatcodes = catcodenumbers.ctxcatcodes -- todo: use different method
+local notcatcodes = catcodenumbers.notcatcodes -- todo: use different method
-local contextsprint = context.sprint -- with catcodes (here we use fast variants, but with option for tracing)
+local context = context
+local contextsprint = context.sprint -- with catcodes (here we use fast variants, but with option for tracing)
local xmlelements, xmlcollected, xmlsetproperty = xml.elements, xml.collected, xml.setproperty
local xmlwithelements = xml.withelements
@@ -69,14 +70,14 @@ local parsedentity = xml.parsedentitylpeg
function lxml.registerentity(key,value)
texentities[key] = value
if trace_entities then
- report_xml("registering tex entity '%s' as: %s",key,value)
+ report_xml("registering tex entity %a as %a",key,value)
end
end
function lxml.resolvedentity(str)
if forceraw then
if trace_entities then
- report_xml("passing entity '%s' as &%s;",str,str)
+ report_xml("passing entity %a as &%s;",str,str)
end
context("&%s;",str)
else
@@ -85,12 +86,12 @@ function lxml.resolvedentity(str)
local te = type(e)
if te == "function" then
if trace_entities then
- report_xml("passing entity '%s' using function",str)
+ report_xml("passing entity %a using function",str)
end
e(str)
elseif e then
if trace_entities then
- report_xml("passing entity '%s' as '%s'using ctxcatcodes",str,e)
+ report_xml("passing entity %a as %a using %a",str,e,"ctxcatcodes")
end
context(e)
end
@@ -104,7 +105,7 @@ function lxml.resolvedentity(str)
end
if e then
if trace_entities then
- report_xml("passing entity '%s' as '%s' using notcatcodes",str,e)
+ report_xml("passing entity %a as %a using %a",str,e,"notcatcodes")
end
contextsprint(notcatcodes,e)
return
@@ -115,18 +116,18 @@ function lxml.resolvedentity(str)
local chr, err = lpegmatch(parsedentity,str)
if chr then
if trace_entities then
- report_xml("passing entity '%s' as '%s' using ctxcatcodes",str,chr)
+ report_xml("passing entity %a as %a using %a",str,chr,"ctxcatcodes")
end
context(chr)
elseif err then
if trace_entities then
- report_xml("passing faulty entity '%s' as '%s'",str,err)
+ report_xml("passing faulty entity %a as %a",str,err)
end
context(err)
else
- local tag = utfupper(str)
+ local tag = upperchars(str)
if trace_entities then
- report_xml("passing entity '%s' to \\xmle using tag '%s'",str,tag)
+ report_xml("passing entity %a to \\xmle using tag %a",str,tag)
end
context.xmle(str,tag) -- we need to use our own upper
end
@@ -260,16 +261,16 @@ local function getid(id, qualified)
return root
end
elseif trace_access then
- report_lxml("'%s' has no index entry '%s'",d,i)
+ report_lxml("%a has no index entry %a",d,i)
end
elseif trace_access then
- report_lxml("'%s' has no index",d)
+ report_lxml("%a has no index",d)
end
elseif trace_access then
- report_lxml("'%s' is not loaded",d)
+ report_lxml("%a is not loaded",d)
end
elseif trace_access then
- report_lxml("'%s' is not loaded",i)
+ report_lxml("%a is not loaded",i)
end
end
elseif trace_access then
@@ -319,7 +320,7 @@ local function addindex(name,check_sum,force)
root.index = index
root.maxindex = maxindex
if trace_access then
- report_lxml("%s indexed, %s nodes",tostring(name),maxindex)
+ report_lxml("indexed entries %a, found nodes %a",tostring(name),maxindex)
end
end
end
@@ -381,7 +382,10 @@ function xml.load(filename,settings)
noffiles, nofconverted = noffiles + 1, nofconverted + 1
starttiming(xml)
local ok, data = resolvers.loadbinfile(filename)
+ settings = settings or { }
+ settings.currentresource = filename
local xmltable = xml.convert((ok and data) or "",settings)
+ settings.currentresource = nil
stoptiming(xml)
return xmltable
end
@@ -390,12 +394,13 @@ local function entityconverter(id,str)
return xmlentities[str] or xmlprivatetoken(str) or "" -- roundtrip handler
end
-function lxml.convert(id,data,entities,compress)
+function lxml.convert(id,data,entities,compress,currentresource)
local settings = { -- we're now roundtrip anyway
unify_predefined_entities = true,
utfize_entities = true,
resolve_predefined_entities = true,
resolve_entities = function(str) return entityconverter(id,str) end, -- needed for mathml
+ currentresource = tostring(currentresource or id),
}
if compress and compress == variables.yes then
settings.strip_cm_and_dt = true
@@ -410,13 +415,13 @@ end
function lxml.load(id,filename,compress,entities)
filename = commands.preparedfile(filename) -- not commands!
if trace_loading then
- report_lxml("loading file '%s' as '%s'",filename,id)
+ report_lxml("loading file %a as %a",filename,id)
end
noffiles, nofconverted = noffiles + 1, nofconverted + 1
-- local xmltable = xml.load(filename)
starttiming(xml)
local ok, data = resolvers.loadbinfile(filename)
- local xmltable = lxml.convert(id,(ok and data) or "",compress,entities)
+ local xmltable = lxml.convert(id,(ok and data) or "",compress,entities,format("id: %s, file: %s",id,filename))
stoptiming(xml)
lxml.store(id,xmltable,filename)
return xmltable, filename
@@ -440,7 +445,7 @@ function lxml.include(id,pattern,attribute,recurse)
end
end
if trace_loading then
- report_lxml("including file: %s",filename)
+ report_lxml("including file %a",filename)
end
noffiles, nofconverted = noffiles + 1, nofconverted + 1
return resolvers.loadtexfile(filename) or ""
@@ -457,14 +462,14 @@ function xml.getbuffer(name,compress,entities) -- we need to make sure that comm
end
nofconverted = nofconverted + 1
local data = buffers.getcontent(name)
- xmltostring(lxml.convert(name,data,compress,entities)) -- one buffer
+ xmltostring(lxml.convert(name,data,compress,entities,format("buffer: %s",tostring(name or "?")))) -- one buffer
end
function lxml.loadbuffer(id,name,compress,entities)
starttiming(xml)
nofconverted = nofconverted + 1
local data = buffers.collectcontent(name or id) -- name can be list
- local xmltable = lxml.convert(id,data,compress,entities)
+ local xmltable = lxml.convert(id,data,compress,entities,format("buffer: %s",tostring(name or id or "?")))
lxml.store(id,xmltable)
stoptiming(xml)
return xmltable, name or id
@@ -473,7 +478,7 @@ end
function lxml.loaddata(id,str,compress,entities)
starttiming(xml)
nofconverted = nofconverted + 1
- local xmltable = lxml.convert(id,str or "",compress,entities)
+ local xmltable = lxml.convert(id,str or "",compress,entities,format("id: %s",id))
lxml.store(id,xmltable)
stoptiming(xml)
return xmltable, id
@@ -496,7 +501,7 @@ end
local function tex_comment(e,handlers)
if trace_comments then
- report_lxml("comment: %s",e.dt[1])
+ report_lxml("comment %a",e.dt[1])
end
end
@@ -523,7 +528,7 @@ local function tex_element(e,handlers)
-- faster than context.xmlw
contextsprint(ctxcatcodes,"\\xmlw{",command,"}{",rootname,"::",ix,"}")
else
- report_lxml( "fatal error: no index for '%s'",command)
+ report_lxml("fatal error: no index for %a",command)
contextsprint(ctxcatcodes,"\\xmlw{",command,"}{",ix or 0,"}")
end
elseif tc == "function" then
@@ -745,37 +750,37 @@ function lxml.main(id)
xmlserialize(getid(id),xmltexhandler) -- the real root (@rt@)
end
---~ -- lines (untested)
---~
---~ local buffer = { }
---~
---~ local xmllinescapture = (
---~ newline^2 / function() buffer[#buffer+1] = "" end +
---~ newline / function() buffer[#buffer] = buffer[#buffer] .. " " end +
---~ content / function(s) buffer[#buffer] = buffer[#buffer] .. s end
---~ )^0
---~
---~ local xmllineshandler = table.copy(xmltexhandler)
---~
---~ xmllineshandler.handle = function(...) lpegmatch(xmllinescapture,concat{ ... }) end
---~
---~ function lines(root)
---~ if not root then
---~ -- rawroot = false
---~ -- quit
---~ elseif type(root) == 'string' then
---~ -- rawroot = false
---~ lpegmatch(xmllinescapture,root)
---~ elseif next(root) then -- tr == 'table'
---~ xmlserialize(root,xmllineshandler)
---~ end
---~ end
---~
---~ function xml.lines(root) -- used at all?
---~ buffer = { "" }
---~ lines(root)
---~ return result
---~ end
+-- -- lines (untested)
+--
+-- local buffer = { }
+--
+-- local xmllinescapture = (
+-- newline^2 / function() buffer[#buffer+1] = "" end +
+-- newline / function() buffer[#buffer] = buffer[#buffer] .. " " end +
+-- content / function(s) buffer[#buffer] = buffer[#buffer] .. s end
+-- )^0
+--
+-- local xmllineshandler = table.copy(xmltexhandler)
+--
+-- xmllineshandler.handle = function(...) lpegmatch(xmllinescapture,concat{ ... }) end
+--
+-- function lines(root)
+-- if not root then
+-- -- rawroot = false
+-- -- quit
+-- elseif type(root) == 'string' then
+-- -- rawroot = false
+-- lpegmatch(xmllinescapture,root)
+-- elseif next(root) then -- tr == 'table'
+-- xmlserialize(root,xmllineshandler)
+-- end
+-- end
+--
+-- function xml.lines(root) -- used at all?
+-- buffer = { "" }
+-- lines(root)
+-- return result
+-- end
local function to_text(e)
if e.command == nil then
@@ -815,52 +820,52 @@ function lxml.installsetup(what,document,setup,where)
end
if what == 1 then
if trace_loading then
- report_lxml("prepending setup %s for %s",setup,document)
+ report_lxml("prepending setup %a for %a",setup,document)
end
insert(sd,1,setup)
elseif what == 2 then
if trace_loading then
- report_lxml("appending setup %s for %s",setup,document)
+ report_lxml("appending setup %a for %a",setup,document)
end
insert(sd,setup)
elseif what == 3 then
if trace_loading then
- report_lxml("inserting setup %s for %s before %s",setup,document,where)
+ report_lxml("inserting setup %a for %a before %a",setup,document,where)
end
insertbeforevalue(sd,setup,where)
elseif what == 4 then
if trace_loading then
- report_lxml("inserting setup %s for %s after %s",setup,document,where)
+ report_lxml("inserting setup %a for %a after %a",setup,document,where)
end
insertaftervalue(sd,setup,where)
end
end
function lxml.flushsetups(id,...)
- local done, list = { }, { ... }
- for i=1,#list do
- local document = list[i]
+ local done = { }
+ for i=1,select("#",...) do
+ local document = select(i,...)
local sd = setups[document]
if sd then
for k=1,#sd do
local v= sd[k]
if not done[v] then
if trace_loading then
- report_lxml("applying setup %02i = %s to %s",k,v,document)
+ report_lxml("applying setup %02i : %a to %a",k,v,document)
end
contextsprint(ctxcatcodes,"\\xmlsetup{",id,"}{",v,"}")
done[v] = true
end
end
elseif trace_loading then
- report_lxml("no setups for %s",document)
+ report_lxml("no setups for %a",document)
end
end
end
function lxml.resetsetups(document)
if trace_loading then
- report_lxml("resetting all setups for %s",document)
+ report_lxml("resetting all setups for %a",document)
end
setups[document] = { }
end
@@ -871,7 +876,7 @@ function lxml.removesetup(document,setup)
for i=1,#s do
if s[i] == setup then
if trace_loading then
- report_lxml("removing setup %s for %s",setup,document)
+ report_lxml("removing setup %a for %a",setup,document)
end
remove(t,i)
break
@@ -922,10 +927,10 @@ function lxml.setsetup(id,pattern,setup)
end
end
elseif trace_setups then
- report_lxml("zero lpath matches for %s",pattern)
+ report_lxml("%s lpath matches for pattern: %s","zero",pattern)
end
elseif trace_setups then
- report_lxml("no lpath matches for %s",pattern)
+ report_lxml("%s lpath matches for pattern: %s","no",pattern)
end
else
local a, b = match(setup,"^(.+:)([%*%-])$")
@@ -974,10 +979,10 @@ function lxml.setsetup(id,pattern,setup)
end
end
elseif trace_setups then
- report_lxml("zero lpath matches for %s",pattern)
+ report_lxml("%s lpath matches for pattern: %s","zero",pattern)
end
elseif trace_setups then
- report_lxml("no lpath matches for %s",pattern)
+ report_lxml("%s lpath matches for pattern: %s","no",pattern)
end
else
local collected = xmlapplylpath(getid(id),pattern)
@@ -1001,10 +1006,10 @@ function lxml.setsetup(id,pattern,setup)
end
end
elseif trace_setups then
- report_lxml("zero lpath matches for %s",pattern)
+ report_lxml("%s lpath matches for pattern: %s","zero",pattern)
end
elseif trace_setups then
- report_lxml("no lpath matches for %s",pattern)
+ report_lxml("%s lpath matches for pattern: %s","no",pattern)
end
end
end
@@ -1397,18 +1402,6 @@ function lxml.raw(id,pattern) -- the content, untouched by commands
end
function lxml.context(id,pattern) -- the content, untouched by commands
- if not pattern then
- local collected = getid(id)
- ctx_text(collected.dt[1])
- else
- local collected = xmlapplylpath(getid(id),pattern) or getid(id)
- if collected and #collected > 0 then
- contextsprint(ctxcatcodes,collected[1].dt)
- end
- end
-end
-
-function lxml.context(id,pattern) -- the content, untouched by commands
if pattern then
local collected = xmlapplylpath(getid(id),pattern) or getid(id)
if collected and #collected > 0 then
@@ -1416,8 +1409,11 @@ function lxml.context(id,pattern) -- the content, untouched by commands
end
else
local collected = getid(id)
- if collected and #collected > 0 then
- ctx_text(collected.dt[1])
+ if collected then
+ local dt = collected.dt
+ if #dt > 0 then
+ ctx_text(dt[1])
+ end
end
end
end
@@ -1592,16 +1588,18 @@ statistics.register("lxml lpath profile", function()
if p and next(p) then
local s = table.sortedkeys(p)
local tested, matched, finalized = 0, 0, 0
- texio.write_nl("log","\nbegin of lxml profile\n")
- texio.write_nl("log","\n tested matched finalized pattern\n\n")
+ logs.pushtarget("logfile")
+ logs.writer("\nbegin of lxml profile\n")
+ logs.writer("\n tested matched finalized pattern\n\n")
for i=1,#s do
local pattern = s[i]
local pp = p[pattern]
local t, m, f = pp.tested, pp.matched, pp.finalized
tested, matched, finalized = tested + t, matched + m, finalized + f
- texio.write_nl("log",format("%9i %9i %9i %s",t,m,f,pattern))
+ logs.writer(format("%9i %9i %9i %s",t,m,f,pattern))
end
- texio.write_nl("log","\nend of lxml profile\n")
+ logs.writer("\nend of lxml profile\n")
+ logs.poptarget()
return format("%s patterns, %s tested, %s matched, %s finalized (see log for details)",#s,tested,matched,finalized)
else
return nil
diff --git a/Master/texmf-dist/tex/context/base/m-barcodes.mkiv b/Master/texmf-dist/tex/context/base/m-barcodes.mkiv
index 0282f706dc0..e4c43b37650 100644
--- a/Master/texmf-dist/tex/context/base/m-barcodes.mkiv
+++ b/Master/texmf-dist/tex/context/base/m-barcodes.mkiv
@@ -11,7 +11,7 @@
%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
%C details.
-%D You can best use m-zint instead of this one.
+\writestatus{barcodes}{the 'zint' module is a better choice}
% \startTEXpage
% \startPSTRICKS
@@ -106,15 +106,13 @@ end
}
\stopsetups
-\def\barcode[#1]%
+\unexpanded\def\barcode[#1]%
{\bgroup
\setvariables[barcode][type=isbn,#1]%
\directsetup{barcode:\getvariable{barcode}{type}}%
\egroup}
-% \usemodule[barcodes]
-
-\doifnotmode{demo}{\endinput}
+\continueifinputfile{m-barcodes.mkiv}
\starttext
\startTEXpage
diff --git a/Master/texmf-dist/tex/context/base/m-chart.lua b/Master/texmf-dist/tex/context/base/m-chart.lua
index 32de8a5ba57..c4da2eb6378 100644
--- a/Master/texmf-dist/tex/context/base/m-chart.lua
+++ b/Master/texmf-dist/tex/context/base/m-chart.lua
@@ -15,11 +15,11 @@ moduledata.charts = moduledata.charts or { }
local gsub, match, find, format, lower = string.gsub, string.match, string.find, string.format, string.lower
local setmetatableindex = table.setmetatableindex
+local P, S, C, Cc, lpegmatch = lpeg.P, lpeg.S, lpeg.C, lpeg.Cc, lpeg.match
-local P, S, C, Cc = lpeg.P, lpeg.S, lpeg.C, lpeg.Cc
-local lpegmatch = lpeg.match
+local report_chart = logs.reporter("chart")
-local points = number.points
+local points = number.points
local variables = interfaces.variables
@@ -27,7 +27,6 @@ local v_yes = variables.yes
local v_no = variables.no
local v_none = variables.none
local v_standard = variables.standard
-local v_start = variables.start
local v_overlay = variables.overlay
local v_round = variables.round
local v_test = variables.test
@@ -37,10 +36,10 @@ local defaults = {
name = "",
option = "",
backgroundcolor = "",
- width = 100*65436,
- height = 50*65436,
- dx = 30*65436,
- dy = 30*65436,
+ width = 100*65536,
+ height = 50*65536,
+ dx = 30*65536,
+ dy = 30*65536,
offset = 0,
bodyfont = "",
dot = "",
@@ -48,25 +47,25 @@ local defaults = {
vcompact = variables_no,
autofocus = "",
focus = "",
- labeloffset = 5*65436,
- commentoffset = 5*65436,
+ labeloffset = 5*65536,
+ commentoffset = 5*65536,
exitoffset = 0,
},
shape = { -- FLOS
- rulethickness = 65436,
+ rulethickness = 65536,
default = "",
framecolor = "darkblue",
backgroundcolor = "lightgray",
},
focus = { -- FLOF
- rulethickness = 65436,
+ rulethickness = 65536,
framecolor = "darkred",
backgroundcolor = "gray",
},
line = { -- FLOL
- rulethickness = 65436,
- radius = 10*65436,
+ rulethickness = 65536,
+ radius = 10*65536,
color = "darkgreen",
corner = "",
dash = "",
@@ -672,7 +671,7 @@ local function process_texts(chart,xoffset,yoffset)
end
end
-local function getchart(settings)
+local function getchart(settings,forced_x,forced_y,forced_nx,forced_ny)
if not settings then
print("no settings given")
return
@@ -688,7 +687,7 @@ local function getchart(settings)
return
end
chart = expanded(chart,settings)
- local cc_settings = chart.settings.chart
+ local chartsettings = chart.settings.chart
local autofocus = chart.settings.chart.autofocus
if autofocus then
autofocus = utilities.parsers.settings_to_hash(autofocus)
@@ -697,10 +696,10 @@ local function getchart(settings)
end
end
-- check natural window
- local x = tonumber(cc_settings.x)
- local y = tonumber(cc_settings.y)
- local nx = tonumber(cc_settings.nx)
- local ny = tonumber(cc_settings.ny)
+ local x = forced_x or tonumber(chartsettings.x)
+ local y = forced_y or tonumber(chartsettings.y)
+ local nx = forced_nx or tonumber(chartsettings.nx)
+ local ny = forced_ny or tonumber(chartsettings.ny)
--
local minx, miny, maxx, maxy = 0, 0, 0, 0
local data = chart.data
@@ -715,8 +714,8 @@ local function getchart(settings)
if miny == 0 or y > maxy then maxy = y end
end
end
--- print("1>",x,y,nx,ny)
--- print("2>",minx, miny, maxx, maxy)
+ -- print("1>",x,y,nx,ny)
+ -- print("2>",minx, miny, maxx, maxy)
-- check of window should be larger (maybe autofocus + nx/ny?)
if autofocus then
-- x and y are ignored
@@ -762,7 +761,9 @@ local function getchart(settings)
end
local function makechart(chart)
- local settings = chart.settings
+ local settings = chart.settings
+ local chartsettings = settings.chart
+ --
context.begingroup()
context.forgetall()
--
@@ -770,27 +771,27 @@ local function makechart(chart)
context("if unknown context_flow : input mp-char.mpiv ; fi ;")
context("flow_begin_chart(0,%s,%s);",chart.nx,chart.ny)
--
- if settings.chart.option == v_test or settings.chart.dot == v_yes then
+ if chartsettings.option == v_test or chartsettings.dot == v_yes then
context("flow_show_con_points := true ;")
context("flow_show_mid_points := true ;")
context("flow_show_all_points := true ;")
- elseif settings.chart.dot ~= "" then -- no checking done, private option
- context("flow_show_%s_points := true ;",settings.chart.dot)
+ elseif chartsettings.dot ~= "" then -- no checking done, private option
+ context("flow_show_%s_points := true ;",chartsettings.dot)
end
--
- local backgroundcolor = settings.chart.backgroundcolor
+ local backgroundcolor = chartsettings.backgroundcolor
if backgroundcolor and backgroundcolor ~= "" then
context("flow_chart_background_color := \\MPcolor{%s} ;",backgroundcolor)
end
--
- local shapewidth = settings.chart.width
- local gridwidth = shapewidth + 2*settings.chart.dx
- local shapeheight = settings.chart.height
- local gridheight = shapeheight + 2*settings.chart.dy
- local chartoffset = settings.chart.offset
- local labeloffset = settings.chart.labeloffset
- local exitoffset = settings.chart.exitoffset
- local commentoffset = settings.chart.commentoffset
+ local shapewidth = chartsettings.width
+ local gridwidth = shapewidth + 2*chartsettings.dx
+ local shapeheight = chartsettings.height
+ local gridheight = shapeheight + 2*chartsettings.dy
+ local chartoffset = chartsettings.offset
+ local labeloffset = chartsettings.labeloffset
+ local exitoffset = chartsettings.exitoffset
+ local commentoffset = chartsettings.commentoffset
context("flow_grid_width := %s ;", points(gridwidth))
context("flow_grid_height := %s ;", points(gridheight))
context("flow_shape_width := %s ;", points(shapewidth))
@@ -802,8 +803,8 @@ local function makechart(chart)
--
local radius = settings.line.radius
local rulethickness = settings.line.rulethickness
- local dx = settings.chart.dx
- local dy = settings.chart.dy
+ local dx = chartsettings.dx
+ local dy = chartsettings.dy
if radius < rulethickness then
radius = 2.5*rulethickness
if radius > dx then
@@ -818,7 +819,7 @@ local function makechart(chart)
context("flow_connection_arrow_size := %s ;", points(radius))
context("flow_connection_dash_size := %s ;", points(radius))
--
- local offset = settings.chart.offset -- todo: pass string
+ local offset = chartsettings.offset -- todo: pass string
if offset == v_none or offset == v_overlay or offset == "" then
offset = -2.5 * radius -- or rulethickness?
elseif offset == v_standard then
@@ -836,42 +837,80 @@ local function makechart(chart)
context.endgroup()
end
+local function splitchart(chart)
+ local settings = chart.settings
+ local splitsettings = settings.split
+ local chartsettings = settings.chart
+ --
+ local name = chartsettings.name
+ --
+ local from_x = chart.from_x
+ local from_y = chart.from_y
+ local to_x = chart.to_x
+ local to_y = chart.to_y
+ --
+ local step_x = splitsettings.nx or to_x
+ local step_y = splitsettings.ny or to_y
+ local delta_x = splitsettings.dx or 0
+ local delta_y = splitsettings.dy or 0
+ --
+ report_chart("spliting %a from (%s,%s) upto (%s,%s) into (%s,%s) with overlap (%s,%s)",
+ name,from_x,from_y,to_x,to_y,step_x,step_y,delta_x,delta_y)
+ --
+ local part_x = 0
+ local first_x = from_x
+ while true do
+ part_x = part_x + 1
+ local last_x = first_x + step_x - 1
+ local done = last_x >= to_x
+ if done then
+ last_x = to_x
+ end
+ local part_y = 0
+ local first_y = from_y
+ while true do
+ part_y = part_y + 1
+ local last_y = first_y + step_y - 1
+ local done = last_y >= to_y
+ if done then
+ last_y = to_y
+ end
+ --
+ report_chart("part (%s,%s) of %a is split from (%s,%s) -> (%s,%s)",part_x,part_y,name,first_x,first_y,last_x,last_y)
+ local x, y, nx, ny = first_x, first_y, last_x - first_x + 1,last_y - first_y + 1
+ context.beforeFLOWsplit()
+ context.handleFLOWsplit(function()
+ makechart(getchart(settings,x,y,nx,ny)) -- we need to pass frozen settings !
+ end)
+ context.afterFLOWsplit()
+ --
+ if done then
+ break
+ else
+ first_y = last_y + 1 - delta_y
+ end
+ end
+ if done then
+ break
+ else
+ first_x = last_x + 1 - delta_x
+ end
+ end
+end
+
function commands.flow_make_chart(settings)
local chart = getchart(settings)
if chart then
local settings = chart.settings
--- if settings.split.state == v_start then
--- local nx = chart.settings.split.nx
--- local ny = chart.settings.split.ny
--- local x = 1
--- while true do
--- local y = 1
--- while true do
--- -- FLOTbefore
--- -- doif @@FLOTmarking on -> cuthbox
--- -- @@FLOTcommand
--- chart.from_x = x
--- chart.from_y = y
--- chart.to_x = math.min(x + nx - 1,chart.nx)
--- chart.to_y = math.min(x + ny - 1,chart.ny)
--- makechart(chart)
--- -- FLOTafter
--- y = y + ny
--- if y > chart.max_y then
--- break
--- else
--- y = y - dy
--- end
--- end
--- x = x + nx
--- if x > chart.max_x then
--- break
--- else
--- x = x - dx
--- end
--- end
--- else
+ if settings then
+ local chartsettings = settings.chart
+ if chartsettings and chartsettings.split == v_yes then
+ splitchart(chart)
+ else
+ makechart(chart)
+ end
+ else
makechart(chart)
--- end
+ end
end
end
diff --git a/Master/texmf-dist/tex/context/base/m-chart.mkii b/Master/texmf-dist/tex/context/base/m-chart.mkii
index 13403966bea..8a5f480ee7f 100644
--- a/Master/texmf-dist/tex/context/base/m-chart.mkii
+++ b/Master/texmf-dist/tex/context/base/m-chart.mkii
@@ -1094,7 +1094,7 @@
\def\FLOWsplitnx{1}
\def\FLOWsplitny{1}
-\def\FLOWcharts%
+\def\FLOWcharts
{\dodoubleempty\doFLOWcharts}
%D While splitting, the following variables are available:
diff --git a/Master/texmf-dist/tex/context/base/m-chart.mkvi b/Master/texmf-dist/tex/context/base/m-chart.mkvi
index e94adeba2cb..2b1a7447ccd 100644
--- a/Master/texmf-dist/tex/context/base/m-chart.mkvi
+++ b/Master/texmf-dist/tex/context/base/m-chart.mkvi
@@ -30,19 +30,19 @@
% todo: figure out a nice way to define the lot: share current and
% support current as name (nb: we need to set parent then)
-\def\??flch{@@flch} % chart
-\def\??flln{@@flln} % line
-\def\??flsh{@@flsh} % shape
-\def\??flfc{@@flfc} % focus
-\def\??flst{@@flst} % sets
-\def\??flsp{@@flsp} % split
-
-\installsimplecommandhandler \??flch {FLOWchart} \??flch
-\installsimplecommandhandler \??flln {FLOWline} \??flln
-\installsimplecommandhandler \??flsh {FLOWshape} \??flsh
-\installsimplecommandhandler \??flfc {FLOWfocus} \??flfc
-\installsimplecommandhandler \??flst {FLOWsets} \??flst
-\installsimplecommandhandler \??flsp {FLOWsplit} \??flsp
+\installcorenamespace {flowchart} % \def\??flch{@@flch} % chart
+\installcorenamespace {flowline} % \def\??flln{@@flln} % line
+\installcorenamespace {flowshape} % \def\??flsh{@@flsh} % shape
+\installcorenamespace {flowfocus} % \def\??flfc{@@flfc} % focus
+\installcorenamespace {flowsets} % \def\??flst{@@flst} % sets
+\installcorenamespace {flowsplit} % \def\??flsp{@@flsp} % split
+
+\installsimplecommandhandler \??flowchart {FLOWchart} \??flowchart % maybe just a setuphandler
+\installsimplecommandhandler \??flowline {FLOWline} \??flowline % maybe just a setuphandler
+\installsimplecommandhandler \??flowshape {FLOWshape} \??flowshape % maybe just a setuphandler
+\installsimplecommandhandler \??flowfocus {FLOWfocus} \??flowfocus % maybe just a setuphandler
+\installsimplecommandhandler \??flowsets {FLOWsets} \??flowsets % maybe just a setuphandler
+\installsimplecommandhandler \??flowsplit {FLOWsplit} \??flowsplit % maybe just a setuphandler
\let\setupFLOWcharts\setupFLOWchart
\let\setupFLOWlines \setupFLOWline
@@ -62,6 +62,7 @@
\c!labeloffset=.5\bodyfontsize,
\c!commentoffset=.5\bodyfontsize,
\c!exitoffset=\zeropoint,
+ % \c!split=\v!no,
% \c!maxwidth=,
% \c!maxheight=,
% \c!option=,
@@ -82,7 +83,7 @@
\c!radius=.375\bodyfontsize,
\c!color=FLOWlinecolor,
\c!rulethickness=.15\bodyfontsize,
- \c!offset=\v!none]
+ \c!offset=\zeropoint]
\setupFLOWshapes
[\c!default=action,
@@ -101,13 +102,17 @@
\c!offset=\FLOWshapeparameter\c!offset]
\setupFLOWsplit
- [\c!state=\v!stop,
- \c!marking=\v!on,
+ [\c!dx=0,
+ \c!dy=0,
% \c!command=,
% \c!before=,
% \c!after=,
\c!nx=3,
- \c!ny=3]
+ \c!ny=4]
+
+\unexpanded\def\beforeFLOWsplit{\FLOWsplitparameter\c!before}
+\unexpanded\def\afterFLOWsplit {\FLOWsplitparameter\c!after}
+\unexpanded\def\handleFLOWsplit{\FLOWsplitparameter\c!command}
\definecolor [FLOWfocuscolor] [s=.2]
\definecolor [FLOWlinecolor] [s=.5]
@@ -184,7 +189,7 @@
{\dodoubleempty\module_charts_process}
\def\module_charts_process[#name][#settings]%
- {\vbox\bgroup
+ {\bgroup % \vbox removed
\insidefloattrue
\dontcomplain
\setupFLOWchart[#settings]%
@@ -212,6 +217,7 @@
labeloffset = \number\dimexpr\FLOWchartparameter\c!labeloffset,
commentoffset = \number\dimexpr\FLOWchartparameter\c!commentoffset,
exitoffset = \number\dimexpr\FLOWchartparameter\c!exitoffset,
+ split = "\FLOWchartparameter\c!split",
},
shape = {
rulethickness = \number\dimexpr\FLOWshapeparameter\c!rulethickness,
@@ -236,9 +242,10 @@
set = {
},
split = {
- state = "\FLOWsplitparameter\c!state",
nx = \number\FLOWsplitparameter\c!nx,
ny = \number\FLOWsplitparameter\c!ny,
+ dx = \number\FLOWsplitparameter\c!dx,
+ dy = \number\FLOWsplitparameter\c!dy,
command = "",
marking = "\FLOWsplitparameter\c!marking",
before = "",
@@ -314,11 +321,14 @@
align=,
set=\setups{flowcell:text:place}]
+\def\FLOWx{\getvariable{flowcell:text}{x}} % compatibility (for Willi)
+\def\FLOWy{\getvariable{flowcell:text}{y}} % compatibility (for Willi)
+
% \c!background={\@@FLOWbackground,\FLOWoverlay},
\defineoverlay
- [flowcell:figure]
- [\overlayfigure{\getvariable{flowcell:text}{figure}}]
+ [flowcell:figure]
+ [\overlayfigure{\getvariable{flowcell:text}{figure}}]
\startsetups flowcell:text:place
\begingroup
@@ -387,35 +397,73 @@
\defineframed[flowcell:mb][flowcell:base][\c!bottom=,\c!align=\v!middle]
\defineframed[flowcell:cb][flowcell:base][\c!bottom=,\c!align=\v!middle]
-% todo: each cell its own setup
+% \startsetups flowcell:text:user
+% \setupframed
+% [flowcell:base]
+% [background=flowcell]
+% \definelayer
+% [flowcell]
+% [width=\namedframedparameter{flowcell:base}{width},
+% height=\namedframedparameter{flowcell:base}{height}]
+% \setlayerframed
+% [flowcell]
+% [preset=rightbottom,offset=-2.75ex]
+% [frame=off]
+% {\tx\FLOWx.\FLOWy}
+% \stopsetups
+%
+% % or:
%
-% \startsetups flowcell
-% \definelayer
-% [flowcell]
-% [width=\FLOWshapewidth,
-% height=\FLOWshapeheight]
-% \setlayerframed
-% [flowcell]
-% [preset=rightbottom,offset=1ex]
-% [frame=off]
-% {\tx(\FLOWx,\FLOWy)}
+% \setupframed
+% [flowcell:base]
+% [background={flowcell-1,flowcell-2}]
+%
+% \defineoverlay
+% [flowcell-1]
+% [\directsetup{flowcell-1}]
+%
+% \definelayer
+% [flowcell-2]
+% [width=\overlaywidth,
+% height=\overlayheight]
+%
+% \startsetups flowcell-1
+% \setlayerframed
+% [flowcell-2]
+% [preset=rightbottom,offset=-2.75ex]
+% [frame=off]
+% {\tx\FLOWx.\FLOWy}
% \stopsetups
-%D \starttyping
-%D \setupFLOWsplit
-%D [nx=5,ny=10,
-%D dx=0,dy=0,
-%D before=,
-%D after=\page]
-%D
-%D \FLOWcharts[mybigflow]
-%D \stoptyping
-%D
-%D \starttyping
-%D \splitfloat
-%D {\placefigure{What a big flowchart this is!}}
-%D {\FLOWcharts[mybigflow]}
-%D \stoptyping
+% %D \starttyping
+% %D \setupFLOWsplit
+% %D [nx=5,ny=10,
+% %D dx=0,dy=0,
+% %D before=,
+% %D after=\page]
+% %D
+% %D \FLOWcharts[mybigflow]
+% %D \stoptyping
+% %D
+% %D \starttyping
+% %D \splitfloat
+% %D {\placefigure{What a big flowchart this is!}}
+% %D {\FLOWcharts[mybigflow]}
+% %D \stoptyping
+
+% \setupFLOWsplit
+% [nx=5,
+% ny=8,
+% dx=1,
+% dy=1,
+% command=\framed,
+% before=\page,
+% after=\page]
+%
+% \FLOWchart[demo] \page
+% \FLOWchart[demo][split=yes] \page
+% \FLOWchart[demo][x=1,y=1,nx=5,ny=8] \page
+% \FLOWchart[demo][x=1,y=9,nx=5,ny=10] \page
\protect
diff --git a/Master/texmf-dist/tex/context/base/m-database.lua b/Master/texmf-dist/tex/context/base/m-database.lua
index b9ec3aa3691..47854daa023 100644
--- a/Master/texmf-dist/tex/context/base/m-database.lua
+++ b/Master/texmf-dist/tex/context/base/m-database.lua
@@ -10,6 +10,7 @@ local sub, gmatch, format = string.sub, string.gmatch, string.format
local concat = table.concat
local lpegpatterns, lpegmatch, lpegsplitat = lpeg.patterns, lpeg.match, lpeg.splitat
local lpegP, lpegC, lpegS, lpegCt = lpeg.P, lpeg.C, lpeg.S, lpeg.Ct
+local stripstring = string.strip
-- One also needs to enable context.trace, here we only plug in some code (maybe
-- some day this tracker will also toggle the main context tracer.
@@ -20,12 +21,19 @@ local report_database = logs.reporter("database")
buffers.database = buffers.database or { }
+local l_tab = lpegpatterns.tab
+local l_space = lpegpatterns.space
+local l_comma = lpegpatterns.comma
+local l_empty = lpegS("\t\n\r ")^0 * lpegP(-1)
+
+local v_yes = interfaces.variables.yes
+
local separators = { -- not interfaced
- tab = lpegpatterns.tab,
- tabs = lpegpatterns.tab^1,
- comma = lpegpatterns.comma,
- space = lpegpatterns.space,
- spaces = lpegpatterns.space^1,
+ tab = l_tab,
+ tabs = l_tab^1,
+ comma = l_comma,
+ space = l_space,
+ spaces = l_space^1,
}
function buffers.database.process(settings)
@@ -46,6 +54,7 @@ function buffers.database.process(settings)
local first, last = settings.first or "", settings.last or ""
local left, right = settings.left or "", settings.right or ""
local setups = settings.setups or ""
+ local strip = settings.strip == v_yes or false
local command = settings.command
separatorchar = (not separatorchar and ",") or separators[separatorchar] or separatorchar
local separator = type(separatorchar) == "string" and lpegS(separatorchar) or separatorchar
@@ -54,7 +63,7 @@ function buffers.database.process(settings)
local quotedata = nil
for chr in gmatch(quotechar,".") do
local quotechar = lpegP(chr)
- local quoteword = quotechar * lpeg.C((1 - quotechar)^0) * quotechar
+ local quoteword = l_space^0 * quotechar * lpegC((1 - quotechar)^0) * quotechar * l_space^0
if quotedata then
quotedata = quotedata + quoteword
else
@@ -63,12 +72,12 @@ function buffers.database.process(settings)
end
whatever = quotedata + whatever
end
- local checker = commentchar ~= "" and lpeg.S(commentchar)
+ local checker = commentchar ~= "" and lpegS(commentchar)
local splitter = lpegCt(whatever * (separator * whatever)^0)
local found = false
for i=1,#data do
local line = data[i]
- if line ~= "" and (not checker or not lpegmatch(checker,line)) then
+ if not lpegmatch(l_empty,line) and (not checker or not lpegmatch(checker,line)) then
local list = lpegmatch(splitter,line)
if not found then
if setups ~= "" then
@@ -82,13 +91,14 @@ function buffers.database.process(settings)
local result, r = { }, 0
r = r + 1 ; result[r] = first
for j=1,#list do
+ local str = strip and stripstring(list[j]) or list[j]
r = r + 1 ; result[r] = left
if command == "" then
- r = r + 1 ; result[r] = list[j]
+ r = r + 1 ; result[r] = str
else
r = r + 1 ; result[r] = command
r = r + 1 ; result[r] = "{"
- r = r + 1 ; result[r] = list[j]
+ r = r + 1 ; result[r] = str
r = r + 1 ; result[r] = "}"
end
r = r + 1 ; result[r] = right
@@ -98,12 +108,13 @@ function buffers.database.process(settings)
else
context(first)
for j=1,#list do
+ local str = strip and stripstring(list[j]) or list[j]
context(left)
if command == "" then
- context(list[j])
+ context(str)
else
context(command)
- context(false,list[j])
+ context(false,str)
end
context(right)
end
diff --git a/Master/texmf-dist/tex/context/base/m-database.mkiv b/Master/texmf-dist/tex/context/base/m-database.mkiv
index cef0aa81554..0285d3bcd1b 100644
--- a/Master/texmf-dist/tex/context/base/m-database.mkiv
+++ b/Master/texmf-dist/tex/context/base/m-database.mkiv
@@ -17,11 +17,6 @@
\unprotect
-\def\c!first {first}
-\def\c!last {last}
-\def\c!quotechar {quotechar}
-\def\c!commentchar {commentchar}
-
\definenamespace
[db]
[type=module,
@@ -36,6 +31,7 @@
[\c!separator={,},
\c!quotechar=,
\c!commentchar=,
+ \c!strip=\v!no,
\c!before=,
\c!after=,
\c!first=,
@@ -48,7 +44,7 @@
\setupdatabase
[\c!separator={,}]
-\def\doprocessdatabase#1#2#3%
+\def\module_database_process#1#2#3%
{\edef\currentdatabasetype{#1}%
\edef\currentdatabase {#2}%
\edef\currentdatabasename{#3}%
@@ -60,6 +56,7 @@
name = "\currentdatabase",
type = "\currentdatabasetype",
database = "\currentdatabasename",
+ strip = "\databaseparameter\c!strip",
separator = \!!bs\databaseparameter\c!separator \!!es,
quotechar = \!!bs\databaseparameter\c!quotechar \!!es,
commentchar = \!!bs\databaseparameter\c!commentchar\!!es,
@@ -73,32 +70,34 @@
command = \!!bs\databaseparameter\c!command \!!es,
}}}
-\unexpanded\def\processdatabasebuffer{\dodoubleempty\doprocessdatabasebuffer}
-\unexpanded\def\processdatabasefile {\dodoubleempty\doprocessdatabasefile }
+\unexpanded\def\processdatabasebuffer{\dodoubleempty\module_database_process_buffer}
+\unexpanded\def\processdatabasefile {\dodoubleempty\module_database_process_file}
-\def\doprocessdatabasebuffer[#1][#2]{\doprocessdatabase{buffer}{#1}{#2}}
-\def\doprocessdatabasefile [#1][#2]{\doprocessdatabase{file} {#1}{#2}}
+\def\module_database_process_buffer[#1][#2]{\module_database_process{buffer}{#1}{#2}}
+\def\module_database_process_file [#1][#2]{\module_database_process{file} {#1}{#2}}
% for old times sake:
-\def\defineseparatedlist {\dodoubleempty\dodefineseparatedlist }
-\def\processseparatedfile{\dodoubleempty\doprocessseparatedfile}
+\unexpanded\def\defineseparatedlist {\dodoubleempty\module_database_separated_list_define}
+\unexpanded\def\processseparatedfile{\dodoubleempty\module_database_separated_list_process}
-\def\dodefineseparatedlist[#1][#2]%
+\def\module_database_separated_list_define[#1][#2]%
{\definedatabase[#1][#2]%
\setuvalue{\e!start#1}{\grabbufferdatadirect{#1}{\e!start#1}{\e!stop#1}}%
\setuvalue{\e!stop#1}{\processdatabasebuffer[#1][#1]}}
-\def\processseparatedfile[#1][#2]%
+\def\module_database_separated_list_process[#1][#2]%
{\processdatabasefile[#1][#2]}
-\def\startseparatedlist[#1]% to be interfaced
- {\def\stopseparatedlist{\processdatabasebuffer[#1][#1]}%
+\unexpanded\def\startseparatedlist[#1]% to be interfaced
+ {\unexpanded\def\stopseparatedlist{\processdatabasebuffer[#1][#1]}%
\grabbufferdatadirect{#1}{startseparatedlist}{stopseparatedlist}}
+\let\setupseparatedlist\setupdatabase
+
\protect
-\doifnotmode{demo}{\endinput}
+\continueifinputfile{m-database.mkiv}
\starttext
diff --git a/Master/texmf-dist/tex/context/base/m-graph.mkiv b/Master/texmf-dist/tex/context/base/m-graph.mkiv
index db72927fbb4..25933d9ebae 100644
--- a/Master/texmf-dist/tex/context/base/m-graph.mkiv
+++ b/Master/texmf-dist/tex/context/base/m-graph.mkiv
@@ -15,106 +15,99 @@
% are limited by what mp can do. We support @ as replacement for
% the percent symbol. We also add a specifier when no one is given.
+\unprotect
+
\startluacode
- local function strip(s)
- return "\\times10^{"..(s:gsub("%+*0*","")).."}"
+ local format, gsub, find, match = string.format, string.gsub, string.find, string.match
+
+ local simplify = true
+
+ local function strip(n,e)
+ -- get rid of e(0)
+ -- get rid of e(+*)
+ e = gsub(e,"^+","")
+ -- remove leading zeros
+ e = gsub(e,"^([+-]*)0+(%d)","%1%2")
+ if not simplify then
+ -- take it as it is
+ elseif n == "1" then
+ return format("10^{%s}",e)
+ end
+ return format("%s\\times10^{%s}",n,e)
end
+
function metapost.format_n(fmt,str)
- fmt = fmt:gsub("@","%%")
- local initial, format, final = fmt:match("^(.-)(%%.-[%a])(.-)$")
- if format then
- str = fmt:format(str)
- str = str:gsub("e(.-)$",strip)
- str = ("%s\\mathematics{%s}%s"):format(initial,str,final)
- elseif not fmt:find("%%") then
- str = ("%"..fmt):format(str)
- str = str:gsub("e(.-)$",strip)
- str = ("\\mathematics{%s}"):format(str)
+ fmt = gsub(fmt,"@","%%")
+ local initial, hasformat, final = match(fmt,"^(.-)(%%.-[%a])(.-)$")
+ if hasformat then
+ str = format(fmt,str)
+ str = gsub(str,"(.-)e(.-)$",strip)
+ str = format("%s\\mathematics{%s}%s",initial,str,final)
+ elseif not find(fmt,"%%") then
+ str = format("%"..fmt,str)
+ str = gsub(str,"(.-)e(.-)$",strip)
+ str = format("\\mathematics{%s}",str)
end
context(str)
end
\stopluacode
-\unexpanded\long\def\MPgraphformat#1#2{\ctxlua{metapost.format_n("#1","#2")}}
+\unexpanded\def\MPgraphformat#1#2{\ctxlua{metapost.format_n("#1","#2")}}
% We could also delegate parsing using lower level plugins.
-\startMPinclusions
- % input string ;
- % input marith ;
- input graph.mp ;
-
- vardef roundd(expr x, d) =
- if abs d > 4 :
- if d > 0 :
- x
- else :
- 0
- fi
- elseif d > 0 :
- save i ; i = floor x ;
- i + round(Ten_to[d]*(x-i))/Ten_to[d]
- else :
- round(x/Ten_to[-d])*Ten_to[-d]
- fi
- enddef ;
-
- Ten_to0 = 1 ;
- Ten_to1 = 10 ;
- Ten_to2 = 100 ;
- Ten_to3 = 1000 ;
- Ten_to4 = 10000 ;
-
- def sFe_base =
- enddef ;
-
- picture Fe_plus ; Fe_plus := btex + etex ;
-
- vardef format (expr f,x) = dofmt_.Feform_(f,x) enddef ;
- vardef Mformat (expr f,x) = dofmt_.Meform (f,x) enddef ;
- vardef formatstr (expr f,x) = dofmt_.Feform_(f,x) enddef ;
- vardef Mformatstr(expr f,x) = dofmt_.Meform(f,x) enddef ;
-
- vardef escaped_format(expr s) =
- "" for n=1 upto length(s) : &
- if ASCII substring (n,n+1) of s = 37 :
- "@"
- else :
- substring (n,n+1) of s
- fi
- endfor
- enddef ;
-
- vardef dofmt_@\#(expr f, x) =
- textext("\MPgraphformat{" & escaped_format(f) & "}{" & (if string x : x else: decimal x fi) & "}")
- enddef ;
-
- % vardef format(expr f, x) =
- % textext("\MPgraphformatN{"&escaped_format(f)&"}{"&(if string x : x else: decimal x fi)&"}")
- % enddef;
- % vardef Mformat(expr f, x) =
- % format(f,x)
- % enddef;
-
-\stopMPinclusions
+\defineMPinstance
+ [graph]
+ [\s!format=metafun,
+ \s!extensions=\v!yes,
+ \s!initializations=\v!yes,
+ \c!method=\s!double]
+
+\startMPdefinitions{graph}
+ if unknown context_grap: input "mp-grap.mpiv" ; fi ;
+\stopMPdefinitions
+
+% For backwards compatibility (for the moment), also load the graph macros in
+% the standard MP instance (scaled integer):
+
+\startMPdefinitions
+ if unknown context_grap: input "mp-grap.mpiv" ; fi ;
+\stopMPdefinitions
+
+\protect
\continueifinputfile{m-graph.mkiv}
\starttext
-\startMPpage
-draw begingraph(3in,2in);
- gdraw "t:/metapost/grphdata/agepop91.d";
-endgraph;
+\startMPpage[instance=graph]
+ label(format("@g","1e-8"), (0, 0)) ;
+ label(format("@g","1e+8"), (2cm, 0)) ;
+ label(format("@g","1e-10"), (0, -0.5cm)) ;
+ label(format("@g","1e+10"), (2cm,-0.5cm)) ;
+ label(format("@g","1e-12"), (0, -1.0cm)) ;
+ label(format("@g","1e+12"), (2cm,-1.0cm)) ;
+ label(format("@g","1e-0"), (0, -1.5cm)) ;
+ label(format("@g","1e+0"), (2cm,-1.5cm)) ;
+ label(format("@g","1"), (0, -2.0cm)) ;
+ label(format("@g","1"), (2cm,-2.0cm)) ;
+ label(format("@g","1e-102"),(0, -2.5cm)) ;
+ label(format("@g","1e+102"),(2cm,-2.5cm)) ;
\stopMPpage
-% \startMPpage
+% \startMPpage[instance=graph]
+% draw begingraph(3in,2in);
+% gdraw "t:/metapost/grphdata/agepop91.d";
+% endgraph;
+% \stopMPpage
+
+% \startMPpage[instance=graph]
% draw begingraph(3in,2in);
% gdraw "agepop91.d" plot btex $\bullet$ etex;
% endgraph;
% \stopMPpage
-% \startMPpage
+% \startMPpage[instance=graph]
% draw begingraph(3in,2in);
% glabel.lft(btex \vbox{\hbox{Population} \hbox{in millions}} etex, OUT);
% glabel.bot(btex Age in years etex, OUT);
@@ -122,7 +115,7 @@ endgraph;
% endgraph;
% \stopMPpage
-% \startMPpage
+% \startMPpage[instance=graph]
% draw begingraph(3in,2in);
% glabel.lft(btex \vbox{\hbox{Population} \hbox{in millions}} etex, OUT);
% glabel.bot(btex Age in years etex, OUT);
@@ -131,7 +124,7 @@ endgraph;
% endgraph;
% \stopMPpage
-% \startMPpage
+% \startMPpage[instance=graph]
% draw begingraph(2.3in,2in);
% setcoords(log,log);
% glabel.lft(btex Seconds etex,OUT);
@@ -144,7 +137,7 @@ endgraph;
% endgraph;
% \stopMPpage
-% \startMPpage
+% \startMPpage[instance=graph]
% draw begingraph(6.5cm,4.5cm);
% setrange(80,0, 90,whatever);
% glabel.bot(btex Year etex, OUT);
diff --git a/Master/texmf-dist/tex/context/base/m-ipsum.mkiv b/Master/texmf-dist/tex/context/base/m-ipsum.mkiv
new file mode 100644
index 00000000000..1c5901d8696
--- /dev/null
+++ b/Master/texmf-dist/tex/context/base/m-ipsum.mkiv
@@ -0,0 +1,198 @@
+%D \module
+%D [ file=m-ipsum,
+%D version=2012.07.19,
+%D title=\CONTEXT\ Extra Modules,
+%D subtitle=Ipsum,
+%D author=Hans Hagen,
+%D date=\currentdate,
+%D copyright={PRAGMA ADE \& \CONTEXT\ Development Team}]
+%C
+%C This module is part of the \CONTEXT\ macro||package and is
+%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
+%C details.
+
+%D After some discussing on the mailing list I made this example of
+%D an implementation. Of course there can be alternatives as it's a
+%D nice exercise in module writing.
+
+\startluacode
+
+local patterns = lpeg.patterns
+
+local variables = interfaces.variables
+local v_random = variables.random
+
+local lowercase = characters.lower
+
+local ipsum = { }
+moduledata.ipsum = ipsum
+
+local data = { }
+
+local function getfiledata(settings)
+ local filename = settings.filename or ""
+ local filedata = data[filename]
+ if not filedata then
+ local text = resolvers.loadtexfile(filename) or ""
+ local paragraphs = lpeg.match(patterns.paragraphs,text) or { }
+ local sentences = lpeg.match(patterns.sentences, text) or { }
+ local words = lpeg.match(patterns.words, text) or { }
+ for i=1,#words do
+ words[i] = lowercase(words[i])
+ end
+ filedata = {
+ -- [variables.paragraphs] = paragraphs,
+ [variables.paragraph] = paragraphs,
+ [variables.lines] = sentences,
+ [variables.line] = sentences,
+ [variables.words] = words,
+ [variables.word] = words,
+ }
+ -- inspect(filedata)
+ data[filename] = filedata
+ end
+ local d = filedata[settings.alternative or v_paragraph] or filedata[v_paragraph] or { }
+ local nd = #d
+ local n = settings.n
+ if n ~= v_random then
+ n = tonumber(n) or 0
+ if n == 0 then
+ n = nd
+ end
+ end
+ return d, n, nd
+end
+
+function moduledata.ipsum.typeset(settings)
+ local d, n, nd = getfiledata(settings)
+ if nd > 0 then
+ context(settings.before)
+ if n == v_random then
+ context(settings.left)
+ context(d[math.random(1,nd)])
+ context(settings.right)
+ else
+ for i=1,n do
+ context(settings.left)
+ context(d[i])
+ context(settings.right)
+ if i < n then
+ context(settings.inbetween)
+ end
+ end
+ end
+ context(settings.after)
+ end
+end
+
+function moduledata.ipsum.direct(settings)
+ local d, n, nd = getfiledata(settings)
+ if nd == 0 then
+ -- nothing
+ elseif n == v_random then
+ context(d[math.random(1,nd)])
+ else
+ for i=1,n do
+ context(d[i])
+ if i < n then
+ context(settings.separator)
+ end
+ end
+ end
+end
+
+\stopluacode
+
+\unprotect
+
+\installnamespace {ipsum}
+
+\installcommandhandler \????ipsum {ipsum} \????ipsum
+
+\setupipsum
+ [\c!file=lorem,
+ \c!alternative=\v!paragraph,
+ %\c!language=,
+ %\c!styl=,
+ %\c!color=,
+ \c!n=0,
+ \c!left=,
+ \c!right=,
+ \c!before=,
+ \c!after=,
+ \c!separator=,
+ \c!inbetween=]
+
+\installactionhandler{ipsum} % grouped
+
+\startsetups[handler:action:ipsum]
+ \useipsumstyleandcolor\c!style\c!color
+ \uselanguageparameter\ipsumparameter
+ \ctxlua{moduledata.ipsum.typeset {
+ alternative = "\ipsumparameter\c!alternative",
+ filename = "\ipsumparameter\c!file",
+ n = "\ipsumparameter\c!n",
+ left = "\luaescapestring{\ipsumparameter\c!left}",
+ right = "\luaescapestring{\ipsumparameter\c!right}",
+ before = "\luaescapestring{\ipsumparameter\c!before}",
+ after = "\luaescapestring{\ipsumparameter\c!after}",
+ inbetween = "\luaescapestring{\ipsumparameter\c!inbetween}",
+ }}
+\stopsetups
+
+\def\directipsum#1% only one argument, expanded
+ {\ctxlua{moduledata.ipsum.typeset {
+ alternative = "\namedipsumparameter{#1}\c!alternative",
+ filename = "\namedipsumparameter{#1}\c!file",
+ n = "\namedipsumparameter{#1}\c!n",
+ separator = "\luaescapestring{\ipsumparameter\c!separator}",
+ }}
+}
+
+\protect
+
+\continueifinputfile{m-ipsum.mkiv}
+
+\setupbodyfont[dejavu,11pt]
+
+\starttext
+
+ \ipsum[alternative=paragraph,before=\blank,after=\blank,language=la]
+
+ \ipsum[alternative=lines,n=2,right=\par,before=\blank,after=\blank,language=la]
+
+ \ipsum[alternative=lines,n=random,before=\blank,after=\blank,language=la]
+
+ \ipsum[alternative=lines,before=\startitemize,after=\stopitemize,left=\startitem,right=\stopitem,language=la]
+
+ \ipsum[alternative=words,left=(,right=),inbetween=\space,language=la]
+
+ \page
+
+ \defineipsum
+ [ward]
+ [file=ward,
+ before=\blank,
+ after=\blank]
+
+ \defineipsum
+ [ward:itemize]
+ [ward]
+ [alternative=lines,
+ before={\startitemize[packed]},
+ after=\stopitemize,
+ left=\startitem,
+ right=\stopitem]
+
+ \defineipsum
+ [ward:title]
+ [ward]
+ [alternative=lines,
+ n=random]
+
+ \subject{\directipsum{ward:title}}
+
+ \ipsum[ward]
+ \ipsum[ward:itemize]
+
+\stoptext
diff --git a/Master/texmf-dist/tex/context/base/m-json.mkiv b/Master/texmf-dist/tex/context/base/m-json.mkiv
new file mode 100644
index 00000000000..329aa0f316c
--- /dev/null
+++ b/Master/texmf-dist/tex/context/base/m-json.mkiv
@@ -0,0 +1,30 @@
+%D \module
+%D [ file=m-json,
+%D version=2012.08.03,
+%D title=\CONTEXT\ Modules,
+%D subtitle=Json,
+%D author=Hans Hagen,
+%D date=\currentdate,
+%D copyright={PRAGMA ADE \& \CONTEXT\ Development Team}]
+%C
+%C This module is part of the \CONTEXT\ macro||package and is
+%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
+%C details.
+
+%D This module is a side effect of looking into json. Currently there are
+%D only a few helpers:
+%D
+%D \starttyping
+%D moduledata.json.tolua (str)
+%D moduledata.json.tostring(val)
+%D \stoptyping
+%D
+%D Nothing spectacular but maybe handy to have around.
+
+\startmodule [json]
+
+% check for: utilities.json
+
+\registerctxluafile{util-jsn}{}
+
+\stopmodule
diff --git a/Master/texmf-dist/tex/context/base/m-morse.mkvi b/Master/texmf-dist/tex/context/base/m-morse.mkvi
index aba83bb2d8a..a2c20dff745 100644
--- a/Master/texmf-dist/tex/context/base/m-morse.mkvi
+++ b/Master/texmf-dist/tex/context/base/m-morse.mkvi
@@ -100,16 +100,22 @@ local codes = {
morse.codes = codes
-setmetatable(codes, { __index = function(t,k)
+local fallbackself = false
+
+local function codefallback(t,k)
if k then
local u = ucchars[k]
local v = rawget(t,u) or rawget(t,shchars[u]) or false
t[k] = v
return v
+ elseif fallbackself then
+ return k
else
return false
end
-end })
+end
+
+table.setmetatableindex(codes,codefallback)
local MorseBetweenWords = context.MorseBetweenWords
local MorseBetweenCharacters = context.MorseBetweenCharacters
@@ -228,16 +234,19 @@ end
\def\MorseSpace {\hskip7\dimexpr\MorseWidth\relax}
\def\MorseUnknown #text{[\detokenize{#text}]}
-\def\MorseCode #text{\ctxlua{moduledata.morse.tomorse(\!!bs#text\!!es,true)}}
-\def\MorseString #text{\ctxlua{moduledata.morse.tomorse(\!!bs#text\!!es)}}
-\def\MorseFile #text{\ctxlua{moduledata.morse.filetomorse("#text")}}
-\def\MorseTable {\ctxlua{moduledata.morse.showtable()}}
+\unexpanded\def\MorseCode #text{\ctxlua{moduledata.morse.tomorse(\!!bs#text\!!es,true)}}
+\unexpanded\def\MorseString #text{\ctxlua{moduledata.morse.tomorse(\!!bs#text\!!es)}}
+\unexpanded\def\MorseFile #text{\ctxlua{moduledata.morse.filetomorse("#text")}}
+\unexpanded\def\MorseTable {\ctxlua{moduledata.morse.showtable()}}
+
+\let\Morse \MorseString
-\let\Morse \MorseString
+%def\MorseShort {·}
+%def\MorseLong {—}
\protect
-\doifnotmode{demo}{\endinput}
+\continueifinputfile{m-morse.mkvi}
\starttext
diff --git a/Master/texmf-dist/tex/context/base/m-nodechart.mkvi b/Master/texmf-dist/tex/context/base/m-nodechart.mkvi
new file mode 100644
index 00000000000..359d598ce62
--- /dev/null
+++ b/Master/texmf-dist/tex/context/base/m-nodechart.mkvi
@@ -0,0 +1,257 @@
+\usemodule[chart]
+
+\startluacode
+
+local format = string.format
+local points = number.nopts
+local ptfactor = number.dimenfactors.pt
+
+local nodecodes = nodes.nodecodes
+local kerncodes = nodes.kerncodes
+local penaltycodes = nodes.penaltycodes
+local gluecodes = nodes.gluecodes
+local whatsitcodes = nodes.whatsitcodes
+
+local formatters = { }
+
+function formatters.glyph(n,comment)
+ -- subtype font char lang left right uchyph components xoffset yoffset width height depth
+ return format("\\doFLOWglyphnode{%s}{%s}{%s}{%s}{U+%05X}",comment,n.subtype,n.font,n.char,n.char)
+end
+
+function formatters.disc(n,comment)
+ -- pre post replace
+ return format("\\doFLOWdiscnode{%s}{%s}",comment,n.subtype)
+end
+
+function formatters.kern(n,comment)
+ -- subtype kern
+ -- return format("\\doFLOWkernnode{%s}{%s}{%s}",comment,kerncodes[n.subtype],points(n.kern))
+ return format("\\doFLOWkernnode{%s}{%s}{%.4f}",comment,kerncodes[n.subtype],n.kern*ptfactor)
+end
+
+function formatters.penalty(n,comment)
+ -- subtype penalty
+ return format("\\doFLOWpenaltynode{%s}{%s}{%s}",comment,"penalty",n.penalty)
+end
+
+function formatters.glue(n,comment)
+ -- subtype width leader spec (stretch shrink ...
+ local s = n.spec
+ -- return format("\\doFLOWgluenode{%s}{%s}{%s}{%s}{%s}",comment,gluecodes[n.subtype],points(s.width),points(s.stretch),points(s.shrink))
+ return format("\\doFLOWgluenode{%s}{%s}{%.4f}{%.4f}{%.4f}",comment,gluecodes[n.subtype],s.width*ptfactor,s.stretch*ptfactor,s.shrink*ptfactor)
+end
+
+function formatters.whatsit(n,comment)
+ -- subtype width leader spec (stretch shrink ...
+ local subtype = n.subtype
+ local whatsit = whatsitcodes[subtype]
+ if whatsit == "dir" or whatsit == "localpar" then
+ return format("\\doFLOWdirnode{%s}{%s}{%s}",comment,whatsit,n.dir)
+ else
+ return nodecodes[n.id]
+ end
+end
+
+local shapes = { -- I will make a dedicated set of shapes for this.
+ glyph = "procedure",
+ disc = "procedure",
+ kern = "action",
+ penalty = "action",
+ glue = "action",
+}
+
+local function flow_nodes_to_chart(head,comment,x,y,how)
+ local current = head
+ while current do
+ local nodecode = nodecodes[current.id]
+ local formatter = formatters[nodecode]
+ local shape = shapes[nodecode]
+ y = y + 1
+ local next = current.next
+ commands.flow_start_cell { shape = { framecolor = "nodechart:" .. nodecode } }
+ commands.flow_set_name(tostring(current))
+ commands.flow_set_location(x,y)
+ if shape then
+ commands.flow_set_shape(shape)
+ end
+ if formatter then
+ commands.flow_set_text("node",formatter(current,comment))
+ else
+ commands.flow_set_text("node",nodecode)
+ end
+ if next then
+ commands.flow_set_connection("bt","",tostring(next))
+ end
+ if nodecode == "glyph" then
+ local components = current.components
+ if components then
+ commands.flow_set_connection("rl","",tostring(components))
+ commands.flow_stop_cell()
+ n = flow_nodes_to_chart(components,"component",x+2,y-1)
+ else
+ commands.flow_stop_cell()
+ end
+ elseif nodecode == "disc" then
+ local pre = current.pre
+ local pos = current.post
+ local rep = current.replace
+ if pre and not rep and not rep then
+ if pre then
+ commands.flow_set_connection("rl","",tostring(pre))
+ end
+ commands.flow_stop_cell()
+ if pre then
+ n = flow_nodes_to_chart(pre,"prebreak",x+1,y-1)
+ end
+ else
+ if pre then
+ commands.flow_set_connection("+rl","",tostring(pre))
+ end
+ if rep then
+ commands.flow_set_connection("rl","",tostring(rep))
+ end
+ if pos then
+ commands.flow_set_connection("-rl","",tostring(pos))
+ end
+ commands.flow_stop_cell()
+ if pre then
+ n = flow_nodes_to_chart(pre,"prebreak",x+1,y-2)
+ end
+ if rep then
+ n = flow_nodes_to_chart(rep,"replacement",x+1,y-1)
+ end
+ if pos then
+ n = flow_nodes_to_chart(pos,"postbreak",x+1,y)
+ end
+ end
+ elseif nodecode == "hlist" then
+ local list = current.list
+ if list then
+ commands.flow_set_connection("rl","",tostring(list))
+ commands.flow_stop_cell()
+ n = flow_nodes_to_chart(list,"list",x+2,y-1)
+ else
+ commands.flow_stop_cell()
+ end
+ else
+ commands.flow_stop_cell()
+ end
+ current = next
+ end
+ return n
+end
+
+function commands.flow_nodes_to_chart(name,head,max)
+ commands.flow_start_chart(name)
+ flow_nodes_to_chart(head,"",1,0)
+ commands.flow_stop_chart()
+end
+
+\stopluacode
+
+\unprotect
+
+\def\enspaceminus{\hskip.5em minus .25em\relax}
+
+\starttexdefinition unexpanded doFLOWglyphnode #comment #subtype #font #char #unicode
+ \dontleavehmode\hbox{\bf\setstrut\strut \doifsomething{#comment}{#comment\enspaceminus:\enspaceminus}glyph #subtype}
+ \vss
+ \dontleavehmode\hbox{\tx\setstrut\strut font\enspaceminus#font:\enspace#unicode:\enspaceminus\setfontofid{#font}\char#char}
+\stoptexdefinition
+
+\starttexdefinition unexpanded doFLOWdiscnode #comment #subtype
+ \dontleavehmode\hbox{\bf\setstrut\strut disc}
+ \vss
+ \dontleavehmode\hbox{\tx\setstrut\strut}
+\stoptexdefinition
+
+\starttexdefinition unexpanded doFLOWkernnode #comment #subtype #kern
+ \dontleavehmode\hbox{\bf\setstrut\strut#subtype}
+ \vss
+ \dontleavehmode\hbox{\tx\setstrut\strut#kern}
+\stoptexdefinition
+
+\starttexdefinition unexpanded doFLOWpenaltynode #comment #subtype #penalty
+ \dontleavehmode\hbox{\bf\setstrut\strut#subtype}
+ \vss
+ \dontleavehmode\hbox{\tx\setstrut\strut#penalty}
+\stoptexdefinition
+
+\starttexdefinition unexpanded doFLOWgluenode #comment #subtype #width #shrink #stretch
+ \dontleavehmode\hbox{\bf\setstrut\strut#subtype}
+ \vss
+ \dontleavehmode\hbox{\tx\setstrut\strut#width\enspaceminus-\enspaceminus#shrink\enspaceminus+\enspaceminus#stretch}
+\stoptexdefinition
+
+\starttexdefinition unexpanded doFLOWdirnode #comment #subtype #direction
+ \dontleavehmode\hbox{\bf\setstrut\strut#subtype}
+ \vss
+ \dontleavehmode\hbox{\tx\setstrut\strut#direction}
+\stoptexdefinition
+
+\defineframed
+ [flowcell:node]
+ [flowcell:base]
+ [\c!top=\vss,
+ \c!bottom=\vss,
+ \c!align=\v!middle,
+ \c!foregroundstyle=\tt]
+
+% this is a temporary interface ... we will have instances and optional settings
+
+\unexpanded\def\boxtoFLOWchart#name#max#box%
+ {\ctxcommand{flow_nodes_to_chart("#name",tex.box[\number#box].list,\number#max)}}
+
+\unexpanded\def\nextboxtoFLOWchart#name#max%
+ {\dowithnextbox{\boxtoFLOWchart{#name}{#max}\nextbox}}
+
+\unexpanded\def\hboxtoFLOWchart#name#max%
+ {\nextboxtoFLOWchart{#name}{#max}\hbox}
+
+\unexpanded\def\vboxtoFLOWchart#name#max%
+ {\nextboxtoFLOWchart{#name}{#max}\vbox}
+
+\protect
+
+\continueifinputfile{m-nodechart.mkvi}
+
+\definecolor[nodechart:glyph][darkred]
+
+\setupbodyfont[dejavu,10pt]
+
+\starttext
+
+\startTEXpage[offset=10pt]
+
+ \hboxtoFLOWchart{dummy}{3}{an affil\discretionary{-}{-}{!}iation}
+
+ \FLOWchart[dummy][width=14em,height=3em,dx=1em,dy=.75em,hcompact=yes]
+
+\stopTEXpage
+
+\startTEXpage[offset=10pt]
+
+ \hboxtoFLOWchart{dummy}{3}{an affiliation}
+
+ \FLOWchart[dummy][width=14em,height=3em,dx=.5em,dy=.75em,hcompact=yes]
+
+\stopTEXpage
+
+\startTEXpage[offset=10pt]
+
+ \hboxtoFLOWchart{dummy}{3}{\nl effe fijn fietsen}
+
+ \FLOWchart[dummy][width=14em,height=3em,dx=.5em,dy=.75em,hcompact=yes]
+
+\stopTEXpage
+
+\startTEXpage[offset=10pt]
+
+ \hboxtoFLOWchart{dummy}{3}{\righttoleft t\kern 1pt est}
+
+ \FLOWchart[dummy][width=14em,height=3em,dx=.5em,dy=.75em,hcompact=yes]
+
+\stopTEXpage
+
+\stoptext
diff --git a/Master/texmf-dist/tex/context/base/supp-fun.mkiv b/Master/texmf-dist/tex/context/base/m-oldfun.mkiv
index 33bee0306d1..1c5a1d29d41 100644
--- a/Master/texmf-dist/tex/context/base/supp-fun.mkiv
+++ b/Master/texmf-dist/tex/context/base/m-oldfun.mkiv
@@ -1,5 +1,5 @@
%D \module
-%D [ file=supp-fun,
+%D [ file=m-oldfun, % was: supp-fun
%D version=1995.10.10,
%D title=\CONTEXT\ Support Macros,
%D subtitle=Fun Stuff,
@@ -119,7 +119,7 @@
\let\globaldropcaps\global % will be an option, but on by default
-\def\localdropcaps{\let\globaldropcaps\relax}
+\unexpanded\def\localdropcaps{\let\globaldropcaps\relax}
\chardef\DroppedStatus = 0 % 0=done 1=starting 2=doing 3=error
\chardef\DropMode = 0 % 1 == marginhang
@@ -128,7 +128,7 @@
\let\keeplinestogether\gobbleoneargument
\fi
-\def\DroppedCaps#1#2#3#4#5#6#7% does not yet handle accented chars
+\unexpanded\def\DroppedCaps#1#2#3#4#5#6#7% does not yet handle accented chars
{\defconvertedargument\asciia{#7}%
\defconvertedcommand \asciib{\DroppedString}%
\doifinstringelse\asciia\asciib
@@ -222,10 +222,10 @@
\newcount\lastprevgraf
\newcount\droppedlines
-\def\CheckDroppedCaps
+\unexpanded\def\CheckDroppedCaps
{\global\lastprevgraf\prevgraf}
-\def\AutoDroppedCaps % will be proper core stuff since it
+\unexpanded\def\AutoDroppedCaps % will be proper core stuff since it
{\globaldropcaps\chardef\DroppedStatus\plusone
\global\lastprevgraf\zerocount
\global\droppedlines\zerocount
@@ -234,10 +234,10 @@
\let\AutoDroppedNext\relax
\ifx\AutoDroppedCapsCommand\undefined
- \def\AutoDroppedCapsCommand{\NiceDroppedCaps{}{SerifBold}{.125em}{3}}
+ \unexpanded\def\AutoDroppedCapsCommand{\NiceDroppedCaps{}{SerifBold}{.125em}{3}}
\fi
-\def\doAutoDroppedCaps
+\unexpanded\def\doAutoDroppedCaps
{\ifcase\DroppedStatus % done
\let\next\relax
\or % starting
@@ -287,7 +287,7 @@
%D the second command scales the font to a nice 2.5 times the
%D line height, a value that gives a pleasant grayness.
-\def\DoLineDroppedCaps#1#2#3#4#5% compensation command font offset lines
+\unexpanded\def\DoLineDroppedCaps#1#2#3#4#5% compensation command font offset lines
{\scratchcounter#5%
\advance\scratchcounter \minusone
\scratchdimen\scratchcounter\baselineskip
@@ -296,10 +296,10 @@
\DroppedCaps{#2}{#3}\TheNormalizedFontSize{#4}
{\scratchcounter\baselineskip}{#5}}
-\def\LineDroppedCaps% command font offset lines
+\unexpanded\def\LineDroppedCaps% command font offset lines
{\DoLineDroppedCaps{\strutht}}
-\def\NiceDroppedCaps% command font offset lines
+\unexpanded\def\NiceDroppedCaps% command font offset lines
{\DoLineDroppedCaps{.5\baselineskip}}
%D \macros
@@ -337,7 +337,7 @@
%D with that. A workaround is rather trivial but obscures the
%D principles used.
-\def\TreatFirstLine#1#2#3#4% before, after, first, next
+\unexpanded\def\TreatFirstLine#1#2#3#4% before, after, first, next
{\leavevmode
\bgroup
\forgetall
@@ -428,7 +428,7 @@
%D \getbuffer complete we also offer a very simple one
%D character alternative, that is not that hard to understand:
-\def\TreatFirstCharacter#1#2% command, character
+\unexpanded\def\TreatFirstCharacter#1#2% command, character
{{#1{#2}}}
%D A previous paragraph started with:
@@ -440,7 +440,7 @@
%D
%D The next hack deals with vertical stacking.
-\def\StackCharacters#1#2#3#4% sequence vsize vskip command
+\unexpanded\def\StackCharacters#1#2#3#4% sequence vsize vskip command
{\vbox #2
{\forgetall
\baselineskip\zeropoint
@@ -531,10 +531,10 @@
%D
%D These are implemented using an auxilliary macro:
-\def\NormalizeFontHeight{\NormalizeFontSize\ht}
-\def\NormalizeFontWidth {\NormalizeFontSize\wd}
+\unexpanded\def\NormalizeFontHeight{\NormalizeFontSize\ht}
+\unexpanded\def\NormalizeFontWidth {\NormalizeFontSize\wd}
-\def\NormalizeFontSize#1#2#3#4#5%
+\unexpanded\def\NormalizeFontSize#1#2#3#4#5%
{\bgroup
\dimen0=#4% #4 can be \ht0 or so
\setbox0\hbox{\definedfont[#5 at 5pt]#3}% 10pt
@@ -557,7 +557,7 @@
%D Extra:
-\def\WidthSpanningText#1#2#3% text width font
+\unexpanded\def\WidthSpanningText#1#2#3% text width font
{\hbox{\NormalizeFontWidth\temp{#1}{#2}{#3}\temp\the\everydefinedfont#1}}
%D Consider for instance:
@@ -642,7 +642,7 @@
% #1 width #2 height #3 font #4 size #5 step #6 interlinie #7 text
-\long\def\FittingText#1#2#3#4#5#6#7%
+\unexpanded\def\FittingText#1#2#3#4#5#6#7%
{\bgroup
\forgetall
\dontcomplain
@@ -681,11 +681,9 @@
\unvbox\scratchbox
\egroup}
-%D New:
-
% \font width gap font spec text
-\def\NormalizeFontWidthSpread#1#2#3#4#5#6%
+\unexpanded\def\NormalizeFontWidthSpread#1#2#3#4#5#6%
{\global\setfalse\NFSpread
\scratchdimen#3%
\scratchdimen-.5\scratchdimen
@@ -704,10 +702,10 @@
\definefont[\strippedcsname#1][#4 #5]%
\fi}
-\def\SpreadGapText#1#2%
+\unexpanded\def\SpreadGapText#1#2%
{{\def\+{\kern#1}#2}}
-\def\GapText#1#2#3#4#5% width distance font spec title
+\unexpanded\def\GapText#1#2#3#4#5% width distance font spec title
{\bgroup
\NormalizeFontWidthSpread\DummyFont{#1}{#2}{#3}{#4}{#5}%
\DummyFont\setupspacing\SpreadGapText{#2}{#5}\endgraf
diff --git a/Master/texmf-dist/tex/context/base/supp-num.mkiv b/Master/texmf-dist/tex/context/base/m-oldnum.mkiv
index be0df026da4..efc0af4721f 100644
--- a/Master/texmf-dist/tex/context/base/supp-num.mkiv
+++ b/Master/texmf-dist/tex/context/base/m-oldnum.mkiv
@@ -1,5 +1,5 @@
%D \module
-%D [ file=supp-num,
+%D [ file=m-oldnum, % was: supp-num
%D version=1998.05.15,
%D title=\CONTEXT\ Support Macros,
%D subtitle=Numbers,
diff --git a/Master/texmf-dist/tex/context/base/m-pstricks.lua b/Master/texmf-dist/tex/context/base/m-pstricks.lua
index 7f795feac6b..b151e313ac8 100644
--- a/Master/texmf-dist/tex/context/base/m-pstricks.lua
+++ b/Master/texmf-dist/tex/context/base/m-pstricks.lua
@@ -39,12 +39,12 @@ local template = [[
\stoptext
]]
-local modules = { }
+local loaded = { }
local graphics = 0
function moduledata.pstricks.usemodule(names)
for name in gmatch(names,"([^%s,]+)") do
- modules[#modules+1] = format([[\readfile{%s}{}{}]],name)
+ loaded[#loaded+1] = format([[\readfile{%s}{}{}]],name)
end
end
@@ -55,10 +55,10 @@ function moduledata.pstricks.process(n)
local tmpfile = name .. ".tmp"
local epsfile = name .. ".ps"
local pdffile = name .. ".pdf"
- local modules = concat(modules,"\n")
+ local loaded = concat(loaded,"\n")
os.remove(epsfile)
os.remove(pdffile)
- io.savedata(tmpfile,format(template,modules,data))
+ io.savedata(tmpfile,format(template,loaded,data))
os.execute(format("mtxrun --script texexec %s --once --dvips",tmpfile))
if lfs.isfile(epsfile) then
os.execute(format("ps2pdf %s %s",epsfile,pdffile))
diff --git a/Master/texmf-dist/tex/context/base/m-spreadsheet.lua b/Master/texmf-dist/tex/context/base/m-spreadsheet.lua
new file mode 100644
index 00000000000..9d5106e357e
--- /dev/null
+++ b/Master/texmf-dist/tex/context/base/m-spreadsheet.lua
@@ -0,0 +1,332 @@
+if not modules then modules = { } end modules ['m-spreadsheet'] = {
+ version = 1.001,
+ comment = "companion to m-spreadsheet.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+local byte, format, gsub, find = string.byte, string.format, string.gsub, string.find
+local R, P, S, C, V, Cs, Cc, Ct, Cg, Cf, Carg = lpeg.R, lpeg.P, lpeg.S, lpeg.C, lpeg.V, lpeg.Cs, lpeg.Cc, lpeg.Ct, lpeg.Cg, lpeg.Cf, lpeg.Carg
+local lpegmatch, patterns = lpeg.match, lpeg.patterns
+local setmetatable, loadstring, next, tostring, tonumber,rawget = setmetatable, loadstring, next, tostring, tonumber, rawget
+local formatters = string.formatters
+
+local context = context
+
+local splitthousands = utilities.parsers.splitthousands
+local variables = interfaces.variables
+
+local v_yes = variables.yes
+
+moduledata = moduledata or { }
+
+local spreadsheets = { }
+moduledata.spreadsheets = spreadsheets
+
+local data = {
+ -- nothing yet
+}
+
+local settings = {
+ period = ".",
+ comma = ",",
+}
+
+spreadsheets.data = data
+spreadsheets.settings = settings
+
+local defaultname = "default"
+local stack = { }
+local current = defaultname
+
+local d_mt ; d_mt = {
+ __index = function(t,k)
+ local v = { }
+ setmetatable(v,d_mt)
+ t[k] = v
+ return v
+ end,
+}
+
+local s_mt ; s_mt = {
+ __index = function(t,k)
+ local v = settings[k]
+ t[k] = v
+ return v
+ end,
+}
+
+function spreadsheets.setup(t)
+ for k, v in next, t do
+ settings[k] = v
+ end
+end
+
+local function emptydata(name,settings)
+ local data = { }
+ local specifications = { }
+ local settings = settings or { }
+ setmetatable(data,d_mt)
+ setmetatable(specifications,d_mt)
+ setmetatable(settings,s_mt)
+ return {
+ name = name,
+ data = data,
+ maxcol = 0,
+ maxrow = 0,
+ settings = settings,
+ temp = { }, -- for local usage
+ specifications = specifications,
+ }
+end
+
+function spreadsheets.reset(name)
+ if not name or name == "" then name = defaultname end
+ data[name] = emptydata(name,data[name] and data[name].settings)
+end
+
+function spreadsheets.start(name,s)
+ if not name or name == "" then
+ name = defaultname
+ end
+ if not s then
+ s = { }
+ end
+ table.insert(stack,current)
+ current = name
+ if data[current] then
+ setmetatable(s,s_mt)
+ data[current].settings = s
+ else
+ data[current] = emptydata(name,s)
+ end
+end
+
+function spreadsheets.stop()
+ current = table.remove(stack)
+end
+
+spreadsheets.reset()
+
+local offset = byte("A") - 1
+
+local function assign(s,n)
+ return formatters["moduledata.spreadsheets.data['%s'].data[%s]"](n,byte(s)-offset)
+end
+
+function datacell(a,b,...)
+ local n = 0
+ if b then
+ local t = { a, b, ... }
+ for i=1,#t do
+ n = n * (i-1) * 26 + byte(t[i]) - offset
+ end
+ else
+ n = byte(a) - offset
+ end
+ return formatters["dat[%s]"](n)
+end
+
+local function checktemplate(s)
+ if find(s,"%%") then
+ -- normal template
+ return s
+ elseif find(s,"@") then
+ -- tex specific template
+ return gsub(s,"@","%%")
+ else
+ -- tex specific quick template
+ return "%" .. s
+ end
+end
+
+local quoted = Cs(patterns.unquoted)
+local spaces = patterns.whitespace^0
+local cell = C(R("AZ"))^1 / datacell * (Cc("[") * (R("09")^1) * Cc("]") + #P(1))
+
+-- A nasty aspect of lpeg: Cf ( spaces * Cc("") * { "start" ... this will create a table that will
+-- be reused, so we accumulate!
+
+local pattern = Cf ( spaces * Ct("") * { "start",
+ start = V("value") + V("set") + V("format") + V("string") + V("code"),
+ value = Cg(P([[=]]) * spaces * Cc("kind") * Cc("value")) * V("code"),
+ set = Cg(P([[!]]) * spaces * Cc("kind") * Cc("set")) * V("code"),
+ format = Cg(P([[@]]) * spaces * Cc("kind") * Cc("format")) * spaces * Cg(Cc("template") * Cs(quoted/checktemplate)) * V("code"),
+ string = Cg(#S([["']]) * Cc("kind") * Cc("string")) * Cg(Cc("content") * quoted),
+ code = spaces * Cg(Cc("code") * Cs((cell + P(1))^0)),
+}, rawset)
+
+local functions = { }
+spreadsheets.functions = functions
+
+function functions._s_(row,col,c,f,t)
+ local r = 0
+ if f and t then -- f..t
+ -- ok
+ elseif f then -- 1..f
+ f, t = 1, f
+ else
+ f, t = 1, row - 1
+ end
+ for i=f,t do
+ local ci = c[i]
+ if type(ci) == "number" then
+ r = r + c[i]
+ end
+ end
+ return r
+end
+
+functions.fmt = string.tformat
+
+local f_code = formatters [ [[
+ local _m_ = moduledata.spreadsheets
+ local dat = _m_.data['%s'].data
+ local tmp = _m_.temp
+ local fnc = _m_.functions
+ local row = %s
+ local col = %s
+ function fnc.sum(...) return fnc._s_(row,col,...) end
+ local sum = fnc.sum
+ local fmt = fnc.fmt
+ return %s
+]] ]
+
+-- to be considered: a weak cache
+
+local function propername(name)
+ if name ~= "" then
+ return name
+ elseif current ~= "" then
+ return current
+ else
+ return defaultname
+ end
+end
+
+-- if name == "" then name = current if name == "" then name = defaultname end end
+
+local function execute(name,r,c,str)
+ if str ~= "" then
+ local d = data[name]
+ if c > d.maxcol then
+ d.maxcol = c
+ end
+ if r > d.maxrow then
+ d.maxrow = r
+ end
+ local specification = lpegmatch(pattern,str,1,name)
+ d.specifications[c][r] = specification
+ local kind = specification.kind
+ if kind == "string" then
+ return specification.content or ""
+ else
+ local code = specification.code
+ if code and code ~= "" then
+ code = f_code(name,r,c,code or "")
+ local result = loadstring(code) -- utilities.lua.strippedloadstring(code,true) -- when tracing
+ result = result and result()
+ if type(result) == "function" then
+ result = result()
+ end
+ if type(result) == "number" then
+ d.data[c][r] = result
+ end
+ if not result then
+ -- nothing
+ elseif kind == "set" then
+ -- no return
+ elseif kind == "format" then
+ return formatters[specification.template](result)
+ else
+ return result
+ end
+ end
+ end
+ end
+end
+
+function spreadsheets.set(name,r,c,str)
+ name = propername(name)
+ execute(name,r,c,str)
+end
+
+function spreadsheets.get(name,r,c,str)
+ name = propername(name)
+ local dname = data[name]
+ if not dname then
+ -- nothing
+ elseif not str or str == "" then
+ context(dname.data[c][r] or 0)
+ else
+ local result = execute(name,r,c,str)
+ if result then
+-- if type(result) == "number" then
+-- dname.data[c][r] = result
+-- result = tostring(result)
+-- end
+ local settings = dname.settings
+ local split = settings.split
+ local period = settings.period
+ local comma = settings.comma
+ if split == v_yes then
+ result = splitthousands(result)
+ end
+ if period == "" then period = nil end
+ if comma == "" then comma = nil end
+ result = gsub(result,".",{ ["."] = period, [","] = comma })
+ context(result)
+ end
+ end
+end
+
+function spreadsheets.doifelsecell(name,r,c)
+ name = propername(name)
+ local d = data[name]
+ local d = d and d.data
+ local r = d and rawget(d,r)
+ local c = r and rawget(r,c)
+ commands.doifelse(c)
+end
+
+local function simplify(name)
+ name = propername(name)
+ local data = data[name]
+ if data then
+ data = data.data
+ local temp = { }
+ for k, v in next, data do
+ local t = { }
+ temp[k] = t
+ for kk, vv in next, v do
+ if type(vv) == "function" then
+ t[kk] = "<function>"
+ else
+ t[kk] = vv
+ end
+ end
+ end
+ return temp
+ end
+end
+
+local function serialize(name)
+ local s = simplify(name)
+ if s then
+ return table.serialize(s,name)
+ else
+ return formatters["<unknown spreadsheet %a>"](name)
+ end
+end
+
+spreadsheets.simplify = simplify
+spreadsheets.serialize = serialize
+
+function spreadsheets.inspect(name)
+ inspect(serialize(name))
+end
+
+function spreadsheets.tocontext(name)
+ context.tocontext(simplify(name))
+end
diff --git a/Master/texmf-dist/tex/context/base/m-spreadsheet.mkiv b/Master/texmf-dist/tex/context/base/m-spreadsheet.mkiv
index 8392140960e..a0596899055 100644
--- a/Master/texmf-dist/tex/context/base/m-spreadsheet.mkiv
+++ b/Master/texmf-dist/tex/context/base/m-spreadsheet.mkiv
@@ -13,192 +13,139 @@
%D This is an experimental follow up on discussion on the mailing list.
-\startluacode
-local byte, format = string.byte, string.format
-local R, P, C, Cs, Cc, Carg, lpegmatch = lpeg.R, lpeg.P, lpeg.C, lpeg.Cs, lpeg.Cc, lpeg.Carg, lpeg.match
-
-local spreadsheets = { }
-moduledata.spreadsheets = spreadsheets
-
-local data = { }
-spreadsheets.data = data
-
-local stack, current = { }, "default"
-
-local mt ; mt = {
- __index = function(t,k)
- local v = { }
- setmetatable(v,mt)
- t[k] = v
- return v
- end,
-}
-
-function spreadsheets.reset(name)
- if not name or name == "" then name = "default" end
- local d = { }
- setmetatable(d,mt)
- data[name] = d
-end
-
-function spreadsheets.start(name)
- if not name or name == "" then name = "default" end
- table.insert(stack,current)
- current = name
- if not data[current] then
- local d = { }
- setmetatable(d,mt)
- data[current] = d
- end
-end
-
-function spreadsheets.stop()
- current = table.remove(stack)
-end
-
-spreadsheets.reset()
-
-local offset = byte("A") - 1
-
-local function assign(s,n)
- return format("moduledata.spreadsheets.data['%s'][%s]",n,byte(s)-offset)
-end
-
--------- datacell(name,a,b,...)
-function datacell(a,b,...)
- local n = 0
- if b then
- local t = { a, b, ... }
- for i=1,#t do
- n = n * (i-1) * 26 + byte(t[i]) - offset
- end
- else
- n = byte(a) - offset
- end
- -- return format("dat['%s'][%s]",name,n)
- return format("dat[%s]",n)
-end
-
------ cell = (Carg(1) * C(R("AZ"))^1) / datacell * (Cc("[") * (R("09")^1) * Cc("]") + #P(1))
-local cell = C(R("AZ"))^1 / datacell * (Cc("[") * (R("09")^1) * Cc("]") + #P(1))
-local pattern = Cs(Cc("return ") * (cell + P(1))^0)
-
-local functions = { }
-spreadsheets.functions = functions
-
-function functions.sum(c,f,t)
- if f and t then
- local r = 0
- for i=f,t do
- r = r + c[i]
- end
- return r
- else
- return 0
- end
-end
-
-function functions.fmt(pattern,n)
- return format("%"..pattern,n)
-end
-
-local template = [[
- local spr = moduledata.spreadsheets.functions
- local dat = moduledata.spreadsheets.data['%s']
- local sum = spr.sum
- local fmt = spr.fmt
- %s
-]]
-
-local function execute(name,r,c,str)
- if name == "" then name = current if name == "" then name = "default" end end
- str = lpegmatch(pattern,str,1,name)
- str = format(template,name,str)
- -- print(str)
- local result = loadstring(str)
- result = result and result() or 0
- data[name][c][r] = result
- return result
-end
-
-function spreadsheets.set(name,r,c,str)
- if name == "" then name = current if name == "" then name = "default" end end
- execute(name,r,c,str)
-end
-
-function spreadsheets.get(name,r,c,str)
- if name == "" then name = current if name == "" then name = "default" end end
- if not str or str == "" then
- context(data[name][c][r] or 0)
- else
- local result = execute(name,r,c,str)
- if result then
- if type(result) == "number" then
- data[name][c][r] = result
- end
- context(result)
- end
- end
-end
-
-function spreadsheets.doifelsecell(name,r,c)
- if name == "" then name = current if name == "" then name = "default" end end
- local d = data[name]
- commands.testcase(d and d[c][r])
-end
-
-function spreadsheets.show(name)
- if name == "" then name = current if name == "" then name = "default" end end
- table.print(data[name],name)
-end
-\stopluacode
+\registerctxluafile{m-spreadsheet}{1.001}
\unprotect
% todo: get(...) set(..) ctx(...)
-\unexpanded\def\resetspreadsheet {\dosingleempty\doresetspreadsheet}
-\unexpanded\def\doresetspreadsheet [#1]{\ctxlua{moduledata.spreadsheets.reset("#1")}}
-\unexpanded\def\startspreadsheet {\dosingleempty\dostartspreadsheet}
-\unexpanded\def\dostartspreadsheet [#1]{\ctxlua{moduledata.spreadsheets.start("#1")}}
-\unexpanded\def\stopspreadsheet {\ctxlua{moduledata.spreadsheets.stop()}}
-\unexpanded\def\showspreadsheet {\dosingleempty\doshowspreadsheet}
-\unexpanded\def\doshowspreadsheet [#1]{\ctxlua{moduledata.spreadsheets.show("#1")}}
-\unexpanded\def\getspreadsheet {\dosingleempty\dogetspreadsheet}
-\unexpanded\def\dosetspreadsheet [#1]#2#3#4{\ctxlua{moduledata.spreadsheets.set ("#1",\number#2,\number#3,"#4")}}
-\unexpanded\def\setspreadsheet {\dosingleempty\dosetspreadsheet}
-\unexpanded\def\dogetspreadsheet [#1]#2#3#4{\ctxlua{moduledata.spreadsheets.get ("#1",\number#2,\number#3,"#4")}}
-\unexpanded\def\doifelsespreadsheetcell {\dosingleempty\dodoifelsespreadsheetcell}
-\unexpanded\def\dodoifelsespreadsheetcell[#1]#2#3{\ctxlua{moduledata.spreadsheets.doifelsecell("#1","#2","#3")}}
-
-\def\TABLEsetspreadsheet#1{\ctxlua{moduledata.spreadsheets.set("",\number\tblrow+1,\number\tblcol,\!!bs#1\!!es)}}
-\def\TABLEgetspreadsheet#1{\ctxlua{moduledata.spreadsheets.get("",\number\tblrow+1,\number\tblcol,\!!bs#1\!!es)}}
+\installcorenamespace{spreadsheet}
+
+\installcommandhandler \??spreadsheet {spreadsheet} \??spreadsheet
+
+\appendtoks
+ \ctxlua{moduledata.spreadsheets.setup{ % global !
+ period = \!!bs\spreadsheetparameter\c!period\!!es,
+ comma = \!!bs\spreadsheetparameter\c!comma\!!es,
+ split = \!!bs\spreadsheetparameter\c!split\!!es,
+ }}%
+\to \everysetupspreadsheet
+
+\setupspreadsheet
+ [%\c!comma=,
+ %\c!period=,
+ \c!split=\v!no]
+
+\unexpanded\def\resetspreadsheet
+ {\dosingleempty\module_spreadsheet_reset}
+
+\unexpanded\def\module_spreadsheet_reset[#1]%
+ {\ctxlua{moduledata.spreadsheets.reset("#1")}}
+
+\unexpanded\def\startspreadsheet
+ {\dosingleempty\module_spreadsheet_start}
+
+\unexpanded\def\module_spreadsheet_start[#1]%
+ {\pushmacro\currentspreadsheet
+ \edef\currentspreadsheet{#1}%
+ \checkspreadsheetparent
+ \edef\m_spreadsheet_period{\spreadsheetparameter\c!period}%
+ \edef\m_spreadsheet_comma {\spreadsheetparameter\c!comma}%
+ \ctxlua{moduledata.spreadsheets.start("#1", {
+ period = \!!bs\detokenize\expandafter{\m_spreadsheet_period}\!!es,
+ comma = \!!bs\detokenize\expandafter{\m_spreadsheet_comma}\!!es,
+ split = \!!bs\spreadsheetparameter\c!split\!!es,
+ })}}
+
+\unexpanded\def\stopspreadsheet
+ {\ctxlua{moduledata.spreadsheets.stop()}%
+ \popmacro\currentspreadsheet}
+
+\unexpanded\def\showspreadsheet
+ {\dosingleempty\module_spreadsheet_show}
+
+\unexpanded\def\module_spreadsheet_show[#1]%
+ {\ctxlua{moduledata.spreadsheets.tocontext("#1")}}
+
+\unexpanded\def\inspectspreadsheet
+ {\dosingleempty\module_spreadsheet_inspect}
+
+\unexpanded\def\module_spreadsheet_inspect[#1]%
+ {\ctxlua{moduledata.spreadsheets.inspect("#1")}}
+
+\unexpanded\def\setspreadsheet
+ {\dosingleempty\module_spreadsheet_set}
+
+\unexpanded\def\module_spreadsheet_set[#1]#2#3#4%
+ {\ctxlua{moduledata.spreadsheets.set("#1",\number#2,\number#3,"#4")}}
+
+\unexpanded\def\getspreadsheet
+ {\dosingleempty\module_spreadsheet_get}
+
+\unexpanded\def\module_spreadsheet_get[#1]#2#3#4%
+ {\ctxlua{moduledata.spreadsheets.get("#1",\number#2,\number#3,"#4")}}
+
+\unexpanded\def\doifelsespreadsheetcell
+ {\dosingleempty\module_spreadsheet_doifelse_cell}
+
+\unexpanded\def\module_spreadsheet_doifelse_cell[#1]#2#3%
+ {\ctxlua{moduledata.spreadsheets.doifelsecell("#1",\number#2,\number#3)}}
+
+\ifdefined\tblrow
+
+ \def\TABLEsetspreadsheet#1{\ctxlua{moduledata.spreadsheets.set("",\number\tblrow+1,\number\tblcol,\!!bs#1\!!es)}}
+ \def\TABLEgetspreadsheet#1{\ctxlua{moduledata.spreadsheets.get("",\number\tblrow+1,\number\tblcol,\!!bs#1\!!es)}}
+
+\else
+
+ \def\TABLEsetspreadsheet#1{\ctxlua{moduledata.spreadsheets.set("",\number\c_tabl_ntb_row+1,\number\c_tabl_ntb_col,\!!bs#1\!!es)}}
+ \def\TABLEgetspreadsheet#1{\ctxlua{moduledata.spreadsheets.get("",\number\c_tabl_ntb_row+1,\number\c_tabl_ntb_col,\!!bs#1\!!es)}}
+
+\fi
\appendtoks
- \resetspreadsheet
+ \module_spreadsheet_reset[\currentspreadsheet]%
\let\setspr\TABLEsetspreadsheet
\let\getspr\TABLEgetspreadsheet
\to \everyTABLEpass
\unexpanded\def\startspreadsheettable % quick and dirty
- {\dosingleempty\dostartspreadsheettable}
+ {\dodoubleempty\module_spreadsheet_start_table}
-\unexpanded\def\dostartspreadsheettable[#1]%
+\unexpanded\def\module_spreadsheet_start_table[#1][#2]%
{\bgroup
- \startspreadsheet[#1]%%
- \def\startrow{\bTR}%
- \def\stoprow {\eTR}%
- \def\startcell##1\stopcell{\bTD\getspr{##1}\eTD}%
- \bTABLE[\c!align=flushright]}
+ \let\startrow \module_spreadsheet_row_start
+ \let\stoprow \module_spreadsheet_row_stop
+ \let\startcell\module_spreadsheet_cell_start
+ \let\stopcell \module_spreadsheet_cell_stop
+ \doifassignmentelse{#1}
+ {\module_spreadsheet_start
+ \bTABLE[\c!align=\v!flushright,#1]}
+ {\module_spreadsheet_start[#1]%
+ \bTABLE[\c!align=\v!flushright,#2]}}
\unexpanded\def\stopspreadsheettable
{\eTABLE
\stopspreadsheet
\egroup}
+\unexpanded\def\module_spreadsheet_row_start{\bTR}
+\unexpanded\def\module_spreadsheet_row_stop {\eTR}
+
+\unexpanded\def\module_spreadsheet_cell_start
+ {\doifnextoptionalelse\module_spreadsheet_cell_start_yes\module_spreadsheet_cell_start_nop}
+
+\unexpanded\def\module_spreadsheet_cell_start_yes[#1]#2\stopcell
+ {\bTD[#1]\getspr{#2}\eTD}
+
+\unexpanded\def\module_spreadsheet_cell_start_nop#1\stopcell
+ {\bTD\getspr{#1}\eTD}
+
+\let\module_spreadsheet_cell_stop\relax
+
\protect
-\doifnotmode{demo}{\endinput}
+\continueifinputfile{m-spreadsheet.mkiv}
\starttext
@@ -220,10 +167,12 @@ end
\bTD[nx=2] \bf \getspr{string.format("\letterpercent0.3f",(A[3] + B[3]) /100)} \eTD
\eTR
\bTR
- \bTD[nx=2] \bf \getspr{fmt("0.3f",(sum(A,1,2)) / 10)} \eTD
+ \bTD[nx=2] \bf \getspr{fmt("@0.3f",(sum(A,1,2)) / 10)} \eTD
\eTR
\eTABLE
+\setupspreadsheet[mysheet]
+
\startspreadsheet[mysheet]
\bTABLE[align=middle]
@@ -231,7 +180,7 @@ end
\bTD \getspr{100} \eTD \bTD test \setspr{30} \eTD
\eTR
\bTR
- \bTD \getspr{20} \eTD \bTD \getspr{4+3} \eTD
+ \bTD \getspr{20} \eTD \bTD \getspr{4+3.5} \eTD
\eTR
\bTR
\bTD \getspr{A[1] + A[2]} \eTD
@@ -244,17 +193,19 @@ end
\stopspreadsheet
-\startspreadsheettable
+\blank
+
+\setupspreadsheet[test][period={{\bf\middlered .}},comma={{\bf\middlegreen ,}},split=yes]
+
+\startspreadsheettable[test]
\startrow
- \startcell 3 \stopcell
- \startcell 9 \stopcell
+ \startcell 123456.78 \stopcell
+ \startcell 1234567.89 \stopcell
\startcell A[1] + B[1] \stopcell
\stoprow
\stopspreadsheettable
-bla bla \getspreadsheet[mysheet]{2}{2}{}
-
-bla bla \getspreadsheet[mysheet]{4}{1}{}
+\blank
% \showspreadsheet
% \showspreadsheet[mysheet]
diff --git a/Master/texmf-dist/tex/context/base/m-timing.mkiv b/Master/texmf-dist/tex/context/base/m-timing.mkiv
index e5413ddb110..5502768f697 100644
--- a/Master/texmf-dist/tex/context/base/m-timing.mkiv
+++ b/Master/texmf-dist/tex/context/base/m-timing.mkiv
@@ -11,7 +11,7 @@
%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
%C details.
-\ifx\ShowNamedUsage\undefined \else \endinput \fi
+\ifdefined\ShowNamedUsage \endinput \fi
%D Written at the end of 2007, this module is dedicated to Taco. Reaching this
%D point in \LUATEX\ was a non trivial effort. By visualizing a bit what happens
@@ -37,7 +37,7 @@
local progress = moduledata.progress
function progress.show(filename,parameters,nodes,other)
- for n, name in pairs(parameters or progress.parameters(filename)) do
+ for n, name in pairs(parameters or progress.parameters()) do
context.ShowNamedUsage(filename or progress.defaultfilename,name,other or "")
end
for n, name in pairs(nodes or progress.nodes(filename)) do
@@ -54,13 +54,14 @@ end
\ctxlua{luatex.registerstopactions(function() moduledata.progress.save() end)}
\stopnotmode
-\def\ShowNamedUsage#1#2#3%
- {\setbox\scratchbox\vbox\bgroup\startMPcode
+\unexpanded\def\ShowNamedUsage#1#2#3%
+ {\setbox\scratchbox\vbox\bgroup
+ \startMPcode
begingroup ; save p, q, b, h, w ;
path p, q, b ; numeric h, w ;
p := \cldcontext{moduledata.progress.path("#1","#2")} ;
% p := p shifted -llcorner p ;
- if bbwidth(p) > 1 :
+ if bbwidth(p) > 0 :
h := 100 ; w := 2 * h ;
w := \the\textwidth-3pt ; % correct for pen
p := p xstretched w ;
@@ -79,7 +80,8 @@ end
fi ;
fi ;
endgroup ;
- \stopMPcode\egroup
+ \stopMPcode
+ \egroup
\scratchdimen\wd\scratchbox
\ifdim\scratchdimen>\zeropoint
\startlinecorrection
@@ -92,9 +94,9 @@ end
\stoplinecorrection
\fi}
-\def\LoadUsage #1{\ctxlua{moduledata.progress.convert("#1")}}
-\def\ShowUsage #1{\ctxlua{moduledata.progress.show("#1",nil,nil,"elapsed_time")}}
-\def\ShowMemoryUsage#1{\ctxlua{moduledata.progress.show("#1",nil,{}, "elapsed_time")}}
-\def\ShowNodeUsage #1{\ctxlua{moduledata.progress.show("#1",{},nil, "elapsed_time")}}
+\unexpanded\def\LoadUsage #1{\ctxlua{moduledata.progress.convert("#1")}}
+\unexpanded\def\ShowUsage #1{\ctxlua{moduledata.progress.show("#1",nil,nil,"elapsed_time")}}
+\unexpanded\def\ShowMemoryUsage#1{\ctxlua{moduledata.progress.show("#1",nil,{}, "elapsed_time")}}
+\unexpanded\def\ShowNodeUsage #1{\ctxlua{moduledata.progress.show("#1",{},nil, "elapsed_time")}}
\endinput
diff --git a/Master/texmf-dist/tex/context/base/m-translate.mkiv b/Master/texmf-dist/tex/context/base/m-translate.mkiv
index ae4f3899ddb..363f115cb45 100644
--- a/Master/texmf-dist/tex/context/base/m-translate.mkiv
+++ b/Master/texmf-dist/tex/context/base/m-translate.mkiv
@@ -65,31 +65,40 @@
\unprotect
-\def\translateinput{\dodoubleargument\dotranslateinput}
+\unexpanded\def\translateinput
+ {\dodoubleargument\module_translate_input}
-\def\dotranslateinput[#1][#2]{\ctxlua{moduledata.translators.register(\!!bs#1\!!es,\!!bs#2\!!es)}}
+\def\module_translate_input[#1][#2]%
+ {\ctxlua{moduledata.translators.register(\!!bs#1\!!es,\!!bs#2\!!es)}}
-\def\resetinputtranslation {\ctxlua{moduledata.translators.reset()}}
-\def\enableinputtranslation {\ctxlua{moduledata.translators.enable()}}
-\def\disableinputtranslation{\ctxlua{moduledata.translators.disable()}}
+\unexpanded\def\resetinputtranslation
+ {\ctxlua{moduledata.translators.reset()}}
-\def\readtranslatedfile#1%
+\unexpanded\def\enableinputtranslation
+ {\ctxlua{moduledata.translators.enable()}}
+
+\unexpanded\def\disableinputtranslation
+ {\ctxlua{moduledata.translators.disable()}}
+
+\unexpanded\def\readtranslatedfile#1%
{\enableinputtranslation
\readfile{#1}\donothing\donothing
\disableinputtranslation}
\protect
-\doifnotmode{demo}{\endinput}
+\continueifinputfile{m-translate.mkiv}
+
\starttext
\translateinput[Moica][Mojca]
- \translateinput[Idris][Idris (aka ادريس)]
+ % \translateinput[Idris][Idris (aka ادريس)]
+ \translateinput[Idris][Idris (aka <something arabic here>)]
\enableinputtranslation
- Well, it's not that hard to satisfy Idris and Moica.
+ Well, it's not that hard to satisfy Idris' and Moicas \TEX\ needs.
\readtranslatedfile{tufte}
diff --git a/Master/texmf-dist/tex/context/base/m-visual.mkiv b/Master/texmf-dist/tex/context/base/m-visual.mkiv
index 5d259f6abc9..504c0d0c5f2 100644
--- a/Master/texmf-dist/tex/context/base/m-visual.mkiv
+++ b/Master/texmf-dist/tex/context/base/m-visual.mkiv
@@ -13,6 +13,9 @@
\unprotect
+%D Much will probably be replaced by \LUA\ based solutions which is
+%D rather trivial and fun doing.
+
%D This module collect a few more visual debugger features. I
%D needed them for manuals and styles. The macros are documented
%D in a my way document.
@@ -28,11 +31,11 @@
\begingroup
\directcolored[fakerulecolor]%
\iffakebaseline
- \vrule\!!height1.25ex\!!depth-.05ex\!!width#1%
+ \vrule\s!height1.25ex\s!depth-.05ex\s!width#1%
\kern-#1%
- \vrule\!!height-.05ex\!!depth .25ex\!!width#1%
+ \vrule\s!height-.05ex\s!depth .25ex\s!width#1%
\else
- \vrule\!!height1.25ex\!!depth .25ex\!!width#1%
+ \vrule\s!height1.25ex\s!depth .25ex\s!width#1%
\fi
\endgroup
\allowbreak}
@@ -116,9 +119,9 @@
\ifx\dofakedroppedcaps\relax
{\fakeparindentcolor
\vrule
- \!!height \strutheight % not longer .5ex
- \!!depth \strutdepth % not longer 0pt
- \!!width \parindent}%
+ \s!height \strutheight % not longer .5ex
+ \s!depth \strutdepth % not longer 0pt
+ \s!width \parindent}%
\else
\dofakedroppedcaps \let\dofakedroppedcaps\relax
\fi
@@ -137,9 +140,9 @@
\advance\scratchdimen -\lineheight
\advance\scratchdimen \dp\strutbox
\vrule
- \!!width#1\wd\scratchbox
- \!!height\ht\scratchbox
- \!!depth\scratchdimen}%
+ \s!width#1\wd\scratchbox
+ \s!height\ht\scratchbox
+ \s!depth\scratchdimen}%
\ht\scratchbox\ht\strutbox
\dp\scratchbox\dp\strutbox
\hangindent\wd\scratchbox
@@ -214,7 +217,7 @@
{\hss\lower.5\ht\scratchbox\box\scratchbox\hss}%
\hbox to \zeropoint
{\hss
- \black\vrule\!!width6\linewidth\!!height3\linewidth\!!depth3\linewidth
+ \black\vrule\s!width6\linewidth\s!height3\linewidth\s!depth3\linewidth
\hss}}}
\unexpanded\def\bodyfontgrid
@@ -243,10 +246,564 @@
\placefigure{\fakewords{8}{15}}{\fakeimage{5cm}{3cm}{10cm}{5cm}}
\dorecurse{2}{\fakewords{100}{200}\endgraf}}}}}
+%D Moved code:
+
+%D \module
+%D [ file=trac-vis, % was core-vis,
+%D version=1996.06.01,
+%D title=\CONTEXT\ Tracking Macros,
+%D subtitle=Visualization,
+%D author=Hans Hagen,
+%D date=\currentdate,
+%D copyright={PRAGMA ADE \& \CONTEXT\ Development Team}]
+%C
+%C This module is part of the \CONTEXT\ macro||package and is
+%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
+%C details.
+
+%D This module adds some more visualization cues to the ones
+%D supplied in the support module.
+%D
+%D %\everypar dual character, \the\everypar and \everypar=
+%D %\hrule cannot be grabbed in advance, switches mode
+%D %\vrule cannot be grabbed in advance, switches mode
+%D %
+%D %\indent only explicit ones
+%D %\noindent only explicit ones
+%D %\par only explicit ones
+%D
+%D %\leftskip only if explicit one
+%D %\rightskip only if explicit one
+
+\writestatus{loading}{ConTeXt Tracking Macros / Visualization}
+
+\unprotect
+
+%D \macros
+%D {indent, noindent, par}
+%D
+%D \TeX\ acts upon paragraphs. In mosts documents paragraphs
+%D are separated by empty lines, which internally are handled as
+%D \type{\par}. Paragraphs can be indented or not, depending on
+%D the setting of \type{\parindent}, the first token of a
+%D paragraph and/or user suppressed or forced indentation.
+%D
+%D Because the actual typesetting is based on both explicit
+%D user and implicit system actions, visualization is only
+%D possible for the user supplied \type{\indent},
+%D \type{\noindent}, and \type{\par}. Other
+%D 'clever' tricks will quite certainly lead to more failures
+%D than successes, so we only support these three explicit
+%D primitives and one macro:
+
+\unexpanded\def\showparagraphcue#1#2#3#4#5%
+ {\bgroup
+ \scratchdimen#1\relax
+ \dontinterfere
+ \dontcomplain
+ %boxrulewidth5\testrulewidth
+ #3#4\relax
+ \setbox\scratchbox\normalhbox to \scratchdimen
+ {#2{\ruledhbox to \scratchdimen
+ {\vrule #5 20\testrulewidth \s!width \zeropoint
+ \normalhss}}}%
+ \smashbox\scratchbox
+ \normalpenalty\plustenthousand
+ \box\scratchbox
+ \egroup}
+
+\unexpanded\def\ruledhanging
+ {\ifdim\hangindent>\zeropoint
+ \ifnum\hangafter<\zerocount
+ \normalhbox
+ {%boxrulewidth5\testrulewidth
+ \setbox\scratchbox\ruledhbox to \hangindent
+ {\scratchdimen\strutht
+ \advance\scratchdimen \strutdp
+ \vrule
+ \s!width \zeropoint
+ \s!height \zeropoint
+ \s!depth -\hangafter\scratchdimen}%
+ \normalhskip-\hangindent
+ \smashbox\scratchbox
+ \raise\strutht\box\scratchbox}%
+ \fi
+ \fi}
+
+\unexpanded\def\ruledparagraphcues
+ {\bgroup
+ \dontcomplain
+ \normalhbox to \zeropoint
+ {\ifdim\leftskip>\zeropoint\relax
+ \showparagraphcue\leftskip\llap\relax\relax\!!depth
+ \normalhskip-\leftskip
+ \fi
+ \ruledhanging
+ \normalhskip\hsize
+ \ifdim\rightskip>\zeropoint\relax
+ \normalhskip-\rightskip
+ \showparagraphcue\rightskip\relax\relax\relax\!!depth
+ \fi}%
+ \egroup}
+
+\unexpanded\def\ruledpar
+ {\relax
+ \ifhmode
+ \showparagraphcue{40\testrulewidth}\relax\rightrulefalse\relax\s!height
+ \fi
+ \normalpar}
+
+\unexpanded\def\rulednoindent
+ {\relax
+ \normalnoindent
+ \ruledparagraphcues
+ \showparagraphcue{40\testrulewidth}\llap\leftrulefalse\relax\s!height}
+
+\unexpanded\def\ruledindent
+ {\relax
+ \normalnoindent
+ \ruledparagraphcues
+ \ifdim\parindent>\zeropoint
+ \showparagraphcue\parindent\relax\relax\relax\s!height
+ \else
+ \showparagraphcue{40\testrulewidth}\llap\relax\relax\s!height
+ \fi
+ \normalhskip\parindent}
+
+\unexpanded\def\dontshowimplicits
+ {\let\noindent \normalnoindent
+ \let\indent \normalindent
+ \let\par \normalpar}
+
+\unexpanded\def\showimplicits
+ {\testrulewidth \defaulttestrulewidth
+ \let\noindent \rulednoindent
+ \let\indent \ruledindent
+ \let\par \ruledpar}
+
+%D The next few||line examples show the four cues. Keep in
+%D mind that we only see them when we explicitly open or close
+%D a paragraph.
+%D
+%D \bgroup
+%D \def\voorbeeld#1%
+%D {#1Visualizing some \TeX\ primitives and Plain \TeX\
+%D macros can be very instructive, at least it is to me.
+%D Here we see {\tt\string#1} and {\tt\string\ruledpar} in
+%D action, while {\tt\string\parindent} equals
+%D {\tt\the\parindent}.\ruledpar}
+%D
+%D \showimplicits
+%D
+%D \voorbeeld \indent
+%D \voorbeeld \noindent
+%D
+%D \parindent=60pt
+%D
+%D \voorbeeld \indent
+%D \voorbeeld \noindent
+%D
+%D \startnarrower
+%D \voorbeeld \indent
+%D \voorbeeld \noindent
+%D \stopnarrower
+%D \egroup
+%D
+%D These examples also demonstrate the visualization of
+%D \type {\leftskip} and \type {\rightskip}. The macro
+%D \type {\nofruledbaselines} determines the number of lines
+%D shown.
+
+\newcounter\ruledbaselines
+
+\def\nofruledbaselines{3}
+
+\unexpanded\def\ruledbaseline
+ {\vrule \s!width \zeropoint
+ \bgroup
+ \dontinterfere
+ \doglobal\increment\ruledbaselines
+ \scratchdimen\nofruledbaselines\baselineskip
+ \setbox\scratchbox\normalvbox to 2\scratchdimen
+ {\leaders
+ \normalhbox
+ {\strut
+ \vrule
+ \s!height \testrulewidth
+ \s!depth \testrulewidth
+ \s!width 120\points}
+ \normalvfill}%
+ \smashbox\scratchbox
+ \advance\scratchdimen \strutheightfactor\baselineskip
+ \setbox\scratchbox\normalhbox
+ {\normalhskip -48\points
+ \normalhbox to 24\points
+ {\normalhss
+ {\ttxx\ruledbaselines}%
+ \normalhskip6\points}%
+ \raise\scratchdimen\box\scratchbox}%
+ \smashbox\scratchbox
+ \box\scratchbox
+ \egroup}
+
+\unexpanded\def\showbaselines
+ {\testrulewidth\defaulttestrulewidth
+ \EveryPar{\ruledbaseline}}
+
+%D \macros
+%D {showpagebuilder}
+%D
+%D The next tracing option probaly is only of use to me and a
+%D few \CONTEXT\ hackers.
+
+\unexpanded\def\showpagebuilder
+ {\EveryPar{\doshowpagebuilder}}
+
+\unexpanded\def\doshowpagebuilder
+ {\strut\llap
+ {\startcolor[blue]\vl
+ \high{\infofont v:\the\vsize }\vl
+ \high{\infofont g:\the\pagegoal }\vl
+ \high{\infofont t:\the\pagetotal}\vl
+ \stopcolor}}
+
+%D \macros
+%D {colormarkbox,rastermarkbox}
+%D
+%D This macro is used in the pagebody routine. No other use
+%D is advocated here.
+%D
+%D \starttyping
+%D \colormarkbox0
+%D \stoptyping
+
+\def\colormarkoffset{\cutmarkoffset}
+\def\colormarklength{\cutmarklength}
+
+\def\dodocolorrangeA#1%
+ {\fastcolored[#1]{\hrule\s!width3em\s!height\scratchdimen\s!depth\zeropoint}}
+
+\def\docolorrangeA#1 #2 %
+ {\vbox
+ {\hsize3em % \scratchdimen
+ \ifcase#1\or
+ \dodocolorrangeA{c=#2}\or
+ \dodocolorrangeA{m=#2}\or
+ \dodocolorrangeA{y=#2}\or
+ \dodocolorrangeA{m=#2,y=#2}\or
+ \dodocolorrangeA{c=#2,y=#2}\or
+ \dodocolorrangeA{c=#2,m=#2}\fi
+ \ifdim\scratchdimen>1ex
+ \vskip-\scratchdimen
+ \vbox to \scratchdimen
+ {\vss\hbox to 3em{\hss#2\hss}\vss}%
+ \fi}}
+
+\def\colorrangeA#1%
+ {\vbox
+ {\startcolor[\s!white]%
+ \scratchdimen\dimexpr(-\colormarklength*4+\tractempheight+\tractempdepth)/21\relax
+ \offinterlineskip
+ \docolorrangeA #1 1.00 \docolorrangeA #1 0.95
+ \docolorrangeA #1 0.75
+ \docolorrangeA #1 0.50
+ \docolorrangeA #1 0.25 \docolorrangeA #1 0.05
+ \docolorrangeA #1 0.00
+ \stopcolor}}
+
+\def\docolorrangeB #1 #2 #3 #4 #5 %
+ {\fastcolored
+ [\c!c=#2,\c!m=#3,\c!y=#4,\c!k=#5]
+ {\vrule\s!width\scratchdimen\s!height\colormarklength\s!depth\zeropoint}%
+ \ifdim\scratchdimen>2em
+ \hskip-\scratchdimen
+ \vbox to \colormarklength
+ {\vss\hbox to \scratchdimen{\hss#1\hss}\vss}%
+ \fi}
+
+\def\colorrangeB
+ {\hbox
+ {\startcolor[\s!white]%
+ \scratchdimen\dimexpr(-\colormarklength*\plustwo+\tractempwidth)/11\relax
+ \docolorrangeB .5~C .5 0 0 0
+ \docolorrangeB .5~M 0 .5 0 0
+ \docolorrangeB .5~Y 0 0 .5 0
+ \docolorrangeB .5~K 0 0 0 .5
+ \docolorrangeB C 1 0 0 0
+ \docolorrangeB G 1 0 1 0
+ \docolorrangeB Y 0 0 1 0
+ \docolorrangeB R 0 1 1 0
+ \docolorrangeB M 0 1 0 0
+ \docolorrangeB B 1 1 0 0
+ \docolorrangeB K 0 0 0 1
+ \stopcolor}}
+
+\def\docolorrangeC#1 %
+ {\fastcolored
+ [\c!s=#1]%
+ {\vrule\s!width\scratchdimen\s!height\colormarklength\s!depth\zeropoint}%
+ \ifdim\scratchdimen>2em
+ \hskip-\scratchdimen
+ \vbox to \colormarklength
+ {\vss\hbox to \scratchdimen{\hss#1\hss}\vss}%
+ \fi}
+
+\def\colorrangeC
+ {\hbox
+ {\startcolor[\s!white]%
+ \scratchdimen\dimexpr(-\colormarklength*2+\tractempwidth)/14\relax
+ \docolorrangeC 1 \docolorrangeC .95
+ \docolorrangeC .9 \docolorrangeC .85
+ \docolorrangeC .8 \docolorrangeC .75
+ \docolorrangeC .7
+ \docolorrangeC .6
+ \docolorrangeC .5
+ \docolorrangeC .4
+ \docolorrangeC .3
+ \docolorrangeC .2
+ \docolorrangeC .1
+ \docolorrangeC 0
+ \stopcolor}}
+
+\def\docolormarkbox#1#2%
+ {\tractempheight\ht#2%
+ \tractempdepth \dp#2%
+ \tractempwidth \wd#2%
+ \setbox#2\hbox
+ {\scratchdimen\dimexpr\colormarklength/2\relax
+ \forgetall
+ \ssxx
+ \setbox\scratchbox\vbox
+ {\offinterlineskip
+ \vskip\dimexpr-\colormarkoffset\scratchdimen-2\scratchdimen\relax
+ \ifcase#1\relax
+ \vskip\dimexpr\colormarklength+\scratchdimen+\tractempheight\relax
+ \else
+ \hbox to \tractempwidth{\hss\hbox{\colorrangeB}\hss}%
+ \vskip\colormarkoffset\scratchdimen
+ \vbox to \tractempheight
+ {\vss
+ \hbox to \tractempwidth
+ {\llap{\colorrangeA1\hskip\colormarkoffset\scratchdimen}\hfill
+ \rlap{\hskip\colormarkoffset\scratchdimen\colorrangeA4}}%
+ \vss
+ \hbox to \tractempwidth
+ {\llap{\colorrangeA2\hskip\colormarkoffset\scratchdimen}\hfill
+ \rlap{\hskip\colormarkoffset\scratchdimen\colorrangeA5}}%
+ \vss
+ \hbox to \tractempwidth
+ {\llap{\colorrangeA3\hskip\colormarkoffset\scratchdimen}\hfill
+ \rlap{\hskip\colormarkoffset\scratchdimen\colorrangeA6}}%
+ \vss}%
+ \fi
+ \vskip\colormarkoffset\scratchdimen
+ \hbox to \tractempwidth
+ {\hss\lower\tractempdepth\hbox{\colorrangeC}\hss}}%
+ \ht\scratchbox\tractempheight
+ \dp\scratchbox\tractempdepth
+ \wd\scratchbox\zeropoint
+ \box\scratchbox
+ \box#2}%
+ \wd#2\tractempwidth
+ \ht#2\tractempheight
+ \dp#2\tractempdepth}
+
+\unexpanded\def\colormarkbox {\docolormarkbox\plusone } % #1
+\unexpanded\def\rastermarkbox{\docolormarkbox\zerocount} % #1
+
+%D \macros
+%D {showwhatsits, dontshowwhatsits}
+%D
+%D \TEX\ has three so called whatsits: \type {\mark}, \type
+%D {\write} and \type {\special}. The first one keeps track of
+%D the current state at page boundaries, the last two are used
+%D to communicate to the outside world. Due to fact that
+%D especially \type {\write} is often used in conjunction with
+%D \type {\edef}, we can only savely support that one in \ETEX.
+%D
+%D \bgroup \showwhatsits \setupcolors[state=start]
+%D
+%D Whatsits show up \color[blue]{in color} and are
+%D characterized bij their first character.\footnote [some note]
+%D {So we may encounter \type {w}, \type {m} and \type{s}.}
+%D They are \writestatus{dummy}{demo}\color[yellow]{stacked}.
+%D
+%D \egroup
+
+\newif\ifimmediatewrite
+
+\let\supernormalmark \normalmark % mark may already been superseded
+\let\supernormalmarks \normalmarks % mark may already been superseded
+
+\unexpanded\def\showwhatsits
+ {\protected\def\normalmark {\visualwhatsit100+m\supernormalmark }%
+ \protected\def\normalmarks{\visualwhatsit100+m\supernormalmarks}%
+ \protected\def\special {\visualwhatsit0100s\normalspecial }%
+ \protected\def\write {\visualwhatsit001-w\normalwrite }%
+ \let\immediate\immediatewhatsit
+ \appendtoks\dontshowwhatsits\to\everystoptext}
+
+\unexpanded\def\immediatewhatsit
+ {\bgroup\futurelet\next\doimmediatewhatsit}
+
+\unexpanded\def\doimmediatewhatsit
+ {\ifx\next\write
+ \egroup\immediatewritetrue
+ \else
+ \egroup\expandafter\normalimmediate
+ \fi}
+
+\unexpanded\def\dontshowwhatsits
+ {\let\immediate \normalimmediate
+ \let\normalmark\supernormalmark
+ \let\special \normalspecial
+ \let\write \normalwrite}
+
+\unexpanded\def\visualwhatsit#1#2#3#4#5%
+ {\bgroup
+ \pushwhatsit
+ \dontinterfere
+ \dontcomplain
+ \dontshowcomposition
+ \dontshowwhatsits
+ \ttx
+ \ifvmode\donetrue\else\donefalse\fi
+ \setbox\scratchbox\hbox
+ {\ifdone
+ \colored[r=#1,g=#2,b=#3]{#5}% temp hack
+ \else
+ \colored[s=0]{#5}% temp hack
+ \fi}%
+ \setbox\scratchbox\hbox
+ {\ifdone
+ \colored[r=#1,g=#2,b=#3]{\vrule\s!width\wd\scratchbox}% temp hack
+ \else
+ \colored[s=0]{\vrule\s!width\wd\scratchbox}% temp hack
+ \fi
+ \hskip-\wd\scratchbox\box\scratchbox}%
+ \scratchdimen1ex
+ \setbox\scratchbox\hbox
+ {\ifdone\hskip\else\raise#4\fi\scratchdimen\box\scratchbox}%
+ \smashbox\scratchbox
+ \ifdone\nointerlineskip\fi
+ \box\scratchbox
+ \ifvmode\nointerlineskip\fi
+ \popwhatsit
+ \egroup
+ \ifimmediatewrite
+ \immediatewritefalse
+ \expandafter\normalimmediate
+ \fi}
+
+\unexpanded\def\pushwhatsit
+ {\ifzeropt\lastskip
+ \ifcase\lastpenalty
+ \ifzeropt\lastkern
+ \ifhmode
+ \let\popwhatsit\relax
+ \else
+ \edef\popwhatsit{\prevdepth\the\prevdepth}%
+ \fi
+ \else
+ \ifhmode
+ \edef\popwhatsit{\kern\the\lastkern}\unkern
+ \else
+ \edef\popwhatsit{\kern\the\lastkern\prevdepth\the\prevdepth}%
+ \kern-\lastkern
+ \fi
+ \fi
+ \else
+ \ifhmode
+ \edef\popwhatsit{\the\lastpenalty}%
+ \unpenalty
+ \else
+ \edef\popwhatsit{\penalty\the\lastpenalty\prevdepth\the\prevdepth}%
+ %\nobreak
+ \fi
+ \fi
+ \else
+ \ifhmode
+ \edef\popwhatsit{\hskip\the\lastskip}\unskip
+ \else
+ \edef\popwhatsit{\vskip\the\lastskip\prevdepth\the\prevdepth}%
+ \vskip-\lastskip
+ \fi
+ \fi}
+
+%D The next macro can be used to keep track of classes of
+%D boxes (handy for development cq.\ tracing).
+
+\def\dodotagbox#1#2#3% can be reimplemented
+ {\def\next##1##2##3##4%
+ {\vbox to \ht#2{##3\hbox to \wd#2{##1#3##2}##4}}%
+ \processaction
+ [#1]
+ [ l=>\next\relax\hfill\vfill\vfill,
+ r=>\next\hfill\relax\vfill\vfill,
+ t=>\next\hfill\hfill\relax\vfill,
+ b=>\next\hfill\hfill\vfill\relax,
+ lt=>\next\relax\hfill\relax\vfill,
+ lb=>\next\relax\hfill\vfill\relax,
+ rt=>\next\hfill\relax\relax\vfill,
+ rb=>\next\hfill\relax\vfill\relax,
+ tl=>\next\relax\hfill\relax\vfill,
+ bl=>\next\relax\hfill\vfill\relax,
+ tr=>\next\hfill\relax\relax\vfill,
+ br=>\next\hfill\relax\vfill\relax,
+ \s!default=>\next\hfill\hfill\vfill\vfill,
+ \s!unknown=>\next\hfill\hfill\vfill\vfill]}
+
+\def\dotagbox[#1]#2%
+ {\bgroup
+ \dowithnextbox
+ {\setbox\scratchbox\flushnextbox
+ \setbox\nextbox\ifhbox\nextbox\hbox\else\vbox\fi
+ \bgroup
+ \startoverlay
+ {\copy\scratchbox}
+ {\dodotagbox{#1}\scratchbox{\framed
+ [\c!background=\v!screen,\c!backgroundscreen=1]{#2}}}
+ \stopoverlay
+ \egroup
+ \nextboxwd\the\wd\scratchbox
+ \nextboxht\the\ht\scratchbox
+ \nextboxdp\the\dp\scratchbox
+ \flushnextbox
+ \egroup}}
+
+\unexpanded\def\tagbox
+ {\dosingleempty\dotagbox}
+
+%D \macros
+%D {coloredhbox,coloredvbox,coloredvtop,
+%D coloredstrut}
+%D
+%D The following visualizations are used in some of the manuals:
+
+\definecolor[boxcolor:ht][r=.5,g=.75,b=.5]
+\definecolor[boxcolor:dp][r=.5,g=.5,b=.75]
+\definecolor[boxcolor:wd][r=.75,g=.5,b=.5]
+\definecolor[strutcolor] [r=.5,g=.25,b=.25]
+
+\unexpanded\def\coloredbox#1%
+ {\dowithnextbox{#1{\hbox
+ {\blackrule[\c!width=\nextboxwd,\c!height=\nextboxht,\c!depth=\zeropoint,\c!color=boxcolor:ht]%
+ \hskip-\nextboxwd
+ \blackrule[\c!width=\nextboxwd,\c!height=\zeropoint,\c!depth=\nextboxdp,\c!color=boxcolor:dp]%
+ \hskip-\nextboxwd
+ \box\nextbox}}}#1}
+
+\unexpanded\def\coloredhbox{\coloredbox\hbox}
+\unexpanded\def\coloredvbox{\coloredbox\vbox}
+\unexpanded\def\coloredvtop{\coloredbox\vtop}
+
+\unexpanded\def\coloredstrut
+ {\color[strutcolor]{\def\strutwidth{2\points}\setstrut\strut}}
+
+\protect
+
\continueifinputfile{m-visual.mkiv}
\starttext
\simplethesis
\stoptext
-
-\protect \endinput
diff --git a/Master/texmf-dist/tex/context/base/math-act.lua b/Master/texmf-dist/tex/context/base/math-act.lua
index 278d323c636..8aeb0a97f2d 100644
--- a/Master/texmf-dist/tex/context/base/math-act.lua
+++ b/Master/texmf-dist/tex/context/base/math-act.lua
@@ -6,10 +6,19 @@ if not modules then modules = { } end modules ['math-act'] = {
license = "see context related readme files"
}
+-- Here we tweak some font properties (if needed).
+
+local type, next = type, next
+local fastcopy = table.fastcopy
+
local trace_defining = false trackers.register("math.defining", function(v) trace_defining = v end)
local report_math = logs.reporter("mathematics","initializing")
+local context = context
+local commands = commands
local mathematics = mathematics
+local texdimen = tex.dimen
+local abs = math.abs
local sequencers = utilities.sequencers
local appendgroup = sequencers.appendgroup
@@ -50,9 +59,6 @@ local how = {
function mathematics.scaleparameters(target,original)
if not target.properties.math_is_scaled then
- -- print("\n",target.properties.fontname)
- -- print(original.mathparameters.DisplayOperatorMinHeight)
- -- print(target.mathparameters.DisplayOperatorMinHeight)
local mathparameters = target.mathparameters
if mathparameters and next(mathparameters) then
local parameters = target.parameters
@@ -70,13 +76,9 @@ function mathematics.scaleparameters(target,original)
else
value = value * factor
end
- -- if name == "DisplayOperatorMinHeight" then
- -- report_math("f: %s, p: %s, h: %s, b: %s, a: %s",target.properties.fontname,name,h or "scaled",mathparameters[name],value)
- -- end
- mathparameters[name] = value
+ mathparameters[name] = value
end
end
- -- print(mathparameters.DisplayOperatorMinHeight)
target.properties.math_is_scaled = true
end
end
@@ -117,12 +119,12 @@ function mathematics.overloadparameters(target,original)
local parameters = mathematics and mathematics.parameters
if parameters then
if trace_defining then
- report_math("overloading math parameters in '%s' @ %s",target.properties.fullname,target.parameters.size)
+ report_math("overloading math parameters in %a @ %p",target.properties.fullname,target.parameters.size)
end
for name, value in next, parameters do
local tvalue = type(value)
if tvalue == "string" then
- report_math("comment for math parameter '%s': %s",name,value)
+ report_math("comment for math parameter %a: %s",name,value)
else
local oldvalue = mathparameters[name]
local newvalue = oldvalue
@@ -135,10 +137,10 @@ function mathematics.overloadparameters(target,original)
newvalue = nil
end
if trace_defining and oldvalue ~= newvalue then
- report_math("overloading math parameter '%s': %s => %s",name,tostring(oldvalue),tostring(newvalue))
+ report_math("overloading math parameter %a: %S => %S",name,oldvalue,newvalue)
end
else
- report_math("invalid math parameter '%s'",name)
+ report_math("invalid math parameter %a",name)
end
mathparameters[name] = newvalue
end
@@ -162,7 +164,7 @@ local function applytweaks(when,target,original)
tweaks = tweaks[when]
if tweaks then
if trace_defining then
- report_math("tweaking math of '%s' @ %s (%s)",target.properties.fullname,target.parameters.size,when)
+ report_math("tweaking math of %a @ %p (%s)",target.properties.fullname,target.parameters.size,when)
end
for i=1,#tweaks do
local tweak= tweaks[i]
@@ -194,6 +196,89 @@ end
sequencers.appendaction("beforecopyingcharacters","system","mathematics.tweakbeforecopyingfont")
sequencers.appendaction("aftercopyingcharacters", "system","mathematics.tweakaftercopyingfont")
+function mathematics.overloaddimensions(target,original,set)
+ local goodies = target.goodies
+ if goodies then
+ for i=1,#goodies do
+ local goodie = goodies[i]
+ local mathematics = goodie.mathematics
+ local dimensions = mathematics and mathematics.dimensions
+ if dimensions then
+ if trace_defining then
+ report_math("overloading dimensions in %a @ %p",target.properties.fullname,target.parameters.size)
+ end
+ local characters = target.characters
+ local parameters = target.parameters
+ local factor = parameters.factor
+ local hfactor = parameters.hfactor
+ local vfactor = parameters.vfactor
+ local addprivate = fonts.helpers.addprivate
+ local function overload(dimensions)
+ for unicode, data in next, dimensions do
+ local character = characters[unicode]
+ if character then
+ --
+ local width = data.width
+ local height = data.height
+ local depth = data.depth
+ if trace_defining and (width or height or depth) then
+ report_math("overloading dimensions of %C, width %a, height %a, depth %a",unicode,width,height,depth)
+ end
+ if width then character.width = width * hfactor end
+ if height then character.height = height * vfactor end
+ if depth then character.depth = depth * vfactor end
+ --
+ local xoffset = data.xoffset
+ local yoffset = data.yoffset
+ if xoffset then
+ xoffset = { "right", xoffset * hfactor }
+ end
+ if yoffset then
+ yoffset = { "down", -yoffset * vfactor }
+ end
+ if xoffset or yoffset then
+ local slot = { "slot", 1, addprivate(target,nil,fastcopy(character)) }
+ if xoffset and yoffset then
+ character.commands = { xoffset, yoffset, slot }
+ elseif xoffset then
+ character.commands = { xoffset, slot }
+ else
+ character.commands = { yoffset, slot }
+ end
+ character.index = nil
+ end
+ elseif trace_defining then
+ report_math("no overloading dimensions of %C, not in font",unicode)
+ end
+ end
+ end
+ if set == nil then
+ set = { "default" }
+ end
+ if set == "all" or set == true then
+ for name, set in next, dimensions do
+ overload(set)
+ end
+ else
+ if type(set) == "string" then
+ set = utilities.parsers.settings_to_array(set)
+ end
+ if type(set) == "table" then
+ for i=1,#set do
+ local d = dimensions[set[i]]
+ if d then
+ overload(d)
+ end
+ end
+ end
+ end
+ end
+ end
+ end
+end
+
+sequencers.appendaction("aftercopyingcharacters", "system","mathematics.overloaddimensions")
+
-- a couple of predefined tewaks:
local tweaks = { }
@@ -202,3 +287,111 @@ mathematics.tweaks = tweaks
function tweaks.fixbadprime(target,original)
target.characters[0xFE325] = target.characters[0x2032]
end
+
+-- helpers
+
+local setmetatableindex = table.setmetatableindex
+local family_font = node.family_font
+
+local fontcharacters = fonts.hashes.characters
+local extensibles = utilities.storage.allocate()
+fonts.hashes.extensibles = extensibles
+
+local chardata = characters.data
+local extensibles = mathematics.extensibles
+
+-- we use numbers at the tex end (otherwise we could stick to chars)
+
+local e_left = extensibles.left
+local e_right = extensibles.right
+local e_horizontal = extensibles.horizontal
+local e_vertical = extensibles.vertical
+local e_mixed = extensibles.mixed
+local e_unknown = extensibles.unknown
+
+local unknown = { e_unknown, false, false }
+
+local function extensiblecode(font,unicode)
+ local characters = fontcharacters[font]
+ local character = characters[unicode]
+ if not character then
+ return unknown
+ end
+ local code = unicode
+ local next = character.next
+ while next do
+ code = next
+ character = characters[next]
+ next = character.next
+ end
+ local char = chardata[unicode]
+ local mathextensible = char and char.mathextensible
+ if character.horiz_variants then
+ if character.vert_variants then
+ return { e_mixed, code, character }
+ else
+ local e = mathextensible and extensibles[mathextensible]
+ return e and { e, code, character } or unknown
+ end
+ elseif character.vert_variants then
+ local e = mathextensible and extensibles[mathextensible]
+ return e and { e, code, character } or unknown
+ else
+ return unknown
+ end
+end
+
+setmetatableindex(extensibles,function(extensibles,font)
+ local codes = { }
+ setmetatableindex(codes, function(codes,unicode)
+ local status = extensiblecode(font,unicode)
+ codes[unicode] = status
+ return status
+ end)
+ extensibles[font] = codes
+ return codes
+end)
+
+function mathematics.extensiblecode(family,unicode)
+ return extensibles[family_font(family or 0)][unicode][1]
+end
+
+function commands.extensiblecode(family,unicode)
+ context(extensibles[family_font(family or 0)][unicode][1])
+end
+
+-- left : [head] ...
+-- right : ... [head]
+-- horizontal : [head] ... [head]
+--
+-- abs(right["start"] - right["end"]) | right.advance | characters[right.glyph].width
+
+function commands.horizontalcode(family,unicode)
+ local font = family_font(family or 0)
+ local data = extensibles[font][unicode]
+ local kind = data[1]
+ if kind == e_left then
+ local charlist = data[3].horiz_variants
+ local characters = fontcharacters[font]
+ local left = charlist[1]
+ texdimen.scratchleftoffset = abs((left["start"] or 0) - (left["end"] or 0))
+ texdimen.scratchrightoffset = 0
+ elseif kind == e_right then
+ local charlist = data[3].horiz_variants
+ local characters = fontcharacters[font]
+ local right = charlist[#charlist]
+ texdimen.scratchleftoffset = 0
+ texdimen.scratchrightoffset = abs((right["start"] or 0) - (right["end"] or 0))
+ elseif kind == e_horizontal then
+ local charlist = data[3].horiz_variants
+ local characters = fontcharacters[font]
+ local left = charlist[1]
+ local right = charlist[#charlist]
+ texdimen.scratchleftoffset = abs((left["start"] or 0) - (left["end"] or 0))
+ texdimen.scratchrightoffset = abs((right["start"] or 0) - (right["end"] or 0))
+ else
+ texdimen.scratchleftoffset = 0
+ texdimen.scratchrightoffset = 0
+ end
+ context(kind)
+end
diff --git a/Master/texmf-dist/tex/context/base/math-ali.mkiv b/Master/texmf-dist/tex/context/base/math-ali.mkiv
index 94800b3ba8b..a172dc8ae5f 100644
--- a/Master/texmf-dist/tex/context/base/math-ali.mkiv
+++ b/Master/texmf-dist/tex/context/base/math-ali.mkiv
@@ -15,17 +15,6 @@
\unprotect
-% helpers .. will move
-
-\def\doxxdoubleempty#1#2%
- {\ifx#2[\expandafter\dodoxxdoubleempty\else\expandafter\noxxdoubleempty\fi#1#2}
-
-\def\dodoxxdoubleempty#1[#2]#3%
- {\ifx#3[\else\expandafter\nonoxxdoubleempty\fi#1[#2]#3}
-
-\def\noxxdoubleempty #1{#1[][]}
-\def\nonoxxdoubleempty#1[#2]{#1[#2][]}
-
%D The code here has been moved from other files. Beware: the \MKII\ and
%D \MKIV\ code is not gathered in files with the same name. In the
%D meantime this code has been adapted to \MKIV\ but mnore is possible.
@@ -38,19 +27,25 @@
% n>1 #### needed, strange # interaction in recurse
+\newtoks\c_math_align_a
+\newtoks\c_math_align_b
+\newtoks\c_math_align_c
+
\def\math_build_eqalign
{\scratchtoks\emptytoks
- \dorecurse{\mathalignmentparameter\c!m}
- {\ifnum\recurselevel>\plusone
- %\appendtoks
- % \tabskip\mathalignmentparameter\c!distance&\tabskip\zeropoint
- %\to\scratchtoks
- \scratchtoks\expandafter{\the\scratchtoks\tabskip\mathalignmentparameter\c!distance&\tabskip\zeropoint}%
- \fi
- \normalexpanded{\scratchtoks{\the\scratchtoks\the\!!toksa}}%
- \dorecurse{\numexpr\mathalignmentparameter\c!n-\plusone\relax}
- {\normalexpanded{\scratchtoks{\the\scratchtoks\the\!!toksb}}}}%
- \normalexpanded{\scratchtoks{\the\scratchtoks\the\!!toksc}}}
+ \dorecurse{\mathalignmentparameter\c!m}\math_build_eqalign_step
+ \normalexpanded{\scratchtoks{\the\scratchtoks\the\c_math_align_c}}}
+
+\def\math_build_eqalign_step
+ {\ifnum\recurselevel>\plusone
+ %\appendtoks
+ % \tabskip\mathalignmentparameter\c!distance&\tabskip\zeropoint
+ %\to\scratchtoks
+ \scratchtoks\expandafter{\the\scratchtoks\tabskip\mathalignmentparameter\c!distance&\tabskip\zeropoint}%
+ \fi
+ \normalexpanded{\scratchtoks{\the\scratchtoks\the\c_math_align_a}}%
+ \dorecurse{\numexpr\mathalignmentparameter\c!n-\plusone\relax}
+ {\normalexpanded{\scratchtoks{\the\scratchtoks\the\c_math_align_b}}}}
\def\math_math_in_eqalign#1{$\tabskip\zeropoint\everycr\emptytoks\displaystyle{{}#1{}}$}
\def\math_text_in_eqalign#1{$\tabskip\zeropoint\everycr\emptytoks#1$}
@@ -73,14 +68,14 @@
% use zeroskipplusfill
\def\math_prepare_r_eqalign_no
- {\!!toksa{\strut\math_first_in_eqalign\hfil\math_left_of_equalign\span\math_math_in_eqalign{##}\math_right_of_eqalign\tabskip\zeropoint}%
- \!!toksb{&\math_next_in_eqalign\math_left_of_equalign\span\math_math_in_eqalign{##}\math_right_of_eqalign\tabskip\zeropoint}%
+ {\c_math_align_a{\strut\math_first_in_eqalign\hfil\math_left_of_equalign\span\math_math_in_eqalign{##}\math_right_of_eqalign\tabskip\zeropoint}%
+ \c_math_align_b{&\math_next_in_eqalign\math_left_of_equalign\span\math_math_in_eqalign{##}\math_right_of_eqalign\tabskip\zeropoint}%
\ifnum\mathraggedstatus=\plusone
- \!!toksc{\hfil&\span\math_text_in_eqalign{##}\tabskip\zeropoint}%
+ \c_math_align_c{\hfil&\span\math_text_in_eqalign{##}\tabskip\zeropoint}%
\else\ifnum\mathraggedstatus=\plusthree
- \!!toksc{\hfil\tabskip\zeropoint\!!plus 1\!!fill&\span\math_text_in_eqalign{##}\tabskip\zeropoint}%
+ \c_math_align_c{\hfil\tabskip\zeropoint\s!plus 1\s!fill&\span\math_text_in_eqalign{##}\tabskip\zeropoint}%
\else
- \!!toksc{\hfil\tabskip\centering&\llap{\span\math_text_in_eqalign{##}}\tabskip\zeropoint}%
+ \c_math_align_c{\hfil\tabskip\centering&\llap{\span\math_text_in_eqalign{##}}\tabskip\zeropoint}%
\fi\fi
\global\mathnumberstatus\zerocount
\math_build_eqalign
@@ -88,31 +83,31 @@
\tabskip\centering}
\def\math_prepare_l_eqalign_no
- {\!!toksa{\strut\math_first_in_eqalign\hfil\math_left_of_equalign\span\math_math_in_eqalign{##}\math_right_of_eqalign\tabskip\zeropoint}%
- \!!toksb{&\math_next_in_eqalign\math_left_of_equalign\span\math_math_in_eqalign{##}\math_right_of_eqalign\tabskip\zeropoint}%
+ {\c_math_align_a{\strut\math_first_in_eqalign\hfil\math_left_of_equalign\span\math_math_in_eqalign{##}\math_right_of_eqalign\tabskip\zeropoint}%
+ \c_math_align_b{&\math_next_in_eqalign\math_left_of_equalign\span\math_math_in_eqalign{##}\math_right_of_eqalign\tabskip\zeropoint}%
% problem: number is handled after rest and so ends up in the margin
\ifnum\mathraggedstatus=\plusone
- \!!toksc{\hfil&\kern-\displaywidth\rlap{\span\math_text_in_eqalign{##}}\tabskip\displaywidth}%
+ \c_math_align_c{\hfil&\kern-\displaywidth\rlap{\span\math_text_in_eqalign{##}}\tabskip\displaywidth}%
\else\ifnum\mathraggedstatus=\plusthree
- \!!toksc{\hfil\tabskip\zeropoint\!!plus 1\!!fill&\kern-\displaywidth\span\math_rlap{\span\math_text_in_eqalign{##}}\tabskip\displaywidth}%
+ \c_math_align_c{\hfil\tabskip\zeropoint\s!plus 1\s!fill&\kern-\displaywidth\span\math_rlap{\span\math_text_in_eqalign{##}}\tabskip\displaywidth}%
\else
- \!!toksc{\hfil\tabskip\centering&\kern-\displaywidth\rlap{\span\math_text_in_eqalign{##}}\tabskip\displaywidth}%
+ \c_math_align_c{\hfil\tabskip\centering&\kern-\displaywidth\rlap{\span\math_text_in_eqalign{##}}\tabskip\displaywidth}%
\fi\fi
\global\mathnumberstatus\zerocount
\math_build_eqalign
\the\mathdisplayaligntweaks
\tabskip\centering}
-\def\math_both_eqalign_no#1#2%
+\def\math_both_eqalign_no_normal#1#2%
{\ifmmode
- \the\mathdisplayaligntweaks % \let\doplaceformulanumber\relax % strange hack
+ \the\mathdisplayaligntweaks % \let\strc_formulas_place_number\relax % strange hack
\vcenter\bgroup
\let\math_finish_eqalign_no\egroup
\else
\let\math_finish_eqalign_no\relax
\fi
#1%
- \halign \ifcase\eqalignmode \or to \displaywidth \fi \@EA {\the\scratchtoks\crcr#2\crcr}%
+ \halign \ifcase\eqalignmode \or to \displaywidth \fi \expandafter {\the\scratchtoks\crcr#2\crcr}%
\math_finish_eqalign_no}
\def\math_both_eqalign_no_aligned#1%
@@ -128,7 +123,7 @@
\fi
\fi
#1%
- \halign \ifcase\eqalignmode \or to \displaywidth \fi \@EA \bgroup\the\scratchtoks\crcr}
+ \halign \ifcase\eqalignmode \or to \displaywidth \fi \expandafter \bgroup\the\scratchtoks\crcr}
\def\math_rlap#1%
{\setbox\scratchbox\hbox{#1}%
@@ -158,7 +153,7 @@
\newtoks \everymathalignment
\def\math_alignment_NR_indeed[#1][#2]%
- {\donestedformulanumber{#1}{#2}% to be tagged (better an attribute)
+ {\strc_formulas_place_number_nested{#1}{#2}% to be tagged (better an attribute)
\crcr
\dostoptagged % finish row
\noalign{\glet\math_alignment_NC\math_alignment_NC_first}} % noalign used for change state, conditional does not work here
@@ -221,11 +216,31 @@
\setupmathalignment
[\c!n=2,
\c!m=1,
- \c!distance=1em]
+ \c!distance=\emwidth]
\definemathalignment[align] % default case (this is what amstex users expect)
\definemathalignment[\v!mathalignment] % prefered case (this is cleaner, less clashing)
+% special case.. in case one mistypes ..
+
+\ifdefined \startalignment
+
+ \let\align_math_normal_start\startalign
+ \let\align_math_normal_stop \stopalign
+
+ \let\align_text_normal_start\startalignment
+ \let\align_text_normal_stop \stopalignment
+
+ \unexpanded\def\startalign{\ifmmode\expandafter\align_math_normal_start\else\expandafter\align_text_normal_start\fi}
+ \unexpanded\def\stopalign {\ifmmode\expandafter\align_math_normal_stop \else\expandafter\align_text_normal_stop \fi}
+
+ \let\startalignment\startalign
+ \let\stopalignment \stopalign
+
+\fi
+
+%
+
\def\numberedeqalign
{\doifelse{\formulaparameter\c!location}\v!left
\math_handle_eqalign_no_l_aligned
@@ -265,7 +280,7 @@
\letvalue{\??mathalignmentvariant\v!middle}\plusthree
\def\math_align_NR_generic[#1][#2]%
- {\donestedformulanumber{#1}{#2}\crcr}
+ {\strc_formulas_place_number_nested{#1}{#2}\crcr}
%D \starttyping
%D \placeformula[eqn0]\startformula \startalign[n=1] a\NR \stopalign \stopformula See \in[eqn0]
@@ -408,8 +423,9 @@
\installcommandhandler \??mathcases {mathcases} \??mathcases
\setupmathcases
- [\c!distance=1em,
- \c!numberdistance=2.5em,
+ [\c!distance=\emwidth,
+ \c!strut=\v!yes, % new
+ \c!numberdistance=2.5\emwidth,
\c!left={\left\{\mskip\thinmuskip},
\c!right={\right.}]
@@ -423,30 +439,72 @@
\edef\currentmathcases{#1}%
\dosingleempty\math_cases_start_indeed}
+\def\math_cases_NC_zero
+ {\math_cases_NC}
+
+\def\math_cases_MC_zero
+ {\math_cases_NC
+ \ifmmode\else
+ \startimath
+ \let\math_cases_end_math\stopimath
+ \fi}
+
+\let\math_cases_end_math\relax
+
+\def\math_cases_NR_zero
+ {\unskip
+ \math_cases_end_math
+ \aligntab
+ \global\let\math_cases_NC\math_cases_NC_first
+ \dodirectdoubleempty\math_cases_NR}
+
+\def\math_cases_NC_first
+ {\global\let\math_cases_NC\math_cases_NC_second}
+
+\def\math_cases_NC_second
+ {\math_cases_end_math\aligntab}
+
+\let\math_cases_NR\math_align_NR_generic
+
\unexpanded\def\math_cases_start_indeed[#1]%
{\iffirstargument
\setupcurrentmathcases[#1]%
\fi
+ \edef\p_strut{\mathcasesparameter\c!strut}%
+ \ifx\p_strut\v!yes
+ \let\math_cases_strut\strut
+ \else
+ \let\math_cases_strut\relax
+ \fi
\mathcasesparameter\c!left
\vcenter\bgroup
\pushmacro\math_cases_NC
\let\endmath\relax
- \def\NC{\math_cases_NC}%
- \def\MC{\math_cases_NC\ifmmode\else$\def\endmath{$}\fi}%
- \global\let\math_cases_NC\math_cases_NC_indeed
- \def\NR{\unskip\endmath&\global\let\math_cases_NC\math_cases_NC_indeed\doxxdoubleempty\math_cases_NR}%
+ \let\NC\math_cases_NC_zero
+ \let\MC\math_cases_MC_zero
+ \let\NR\math_cases_NR_zero
+ \global\let\math_cases_NC\math_cases_NC_first
\normalbaselines
\mathsurround\zeropoint
\everycr\emptytoks
\tabskip\zeropoint
\global\c_math_eqalign_column\plusone
\halign\bgroup
- $\mathcasesparameter\c!style##$\hfil
- &\hskip\mathcasesparameter\c!distance\relax
- \popmacro\math_cases_NC##\hfil
- &\hskip\mathcasesparameter\c!numberdistance\relax
- \let\formuladistance\!!zeropoint
- \span\math_text_in_eqalign{##}%
+ \startimath
+ \mathcasesparameter\c!style
+ \alignmark\alignmark
+ \stopimath
+ \hfil
+ \aligntab
+ \hskip\mathcasesparameter\c!distance\relax
+ \popmacro\math_cases_NC
+ \math_cases_strut % looks better
+ \alignmark\alignmark
+ \hfil
+ \aligntab
+ \hskip\mathcasesparameter\c!numberdistance\relax
+ \let\formuladistance\!!zeropoint
+ \span\math_text_in_eqalign{\alignmark\alignmark}%
\crcr} % todo: number
\def\math_cases_stop
@@ -457,11 +515,6 @@
\mathcasesparameter\c!right
\endgroup}
-\def\math_cases_NC_indeed
- {\gdef\math_cases_NC{\endmath&}}
-
-\let\math_cases_NR\math_align_NR_generic
-
\definemathcases[cases]
\definemathcases[\v!mathcases]
@@ -515,7 +568,7 @@
\installcommandhandler \??mathmatrix {mathmatrix} \??mathmatrix
\setupmathmatrix
- [\c!distance=1em,
+ [\c!distance=\emwidth,
\c!left=,
\c!right=,
\c!align=\v!middle]
@@ -570,20 +623,20 @@
\definemathmatrix[\v!mathmatrix]
\def\math_matrix_prepare
- {\!!toksa{\strut\math_first_in_eqalign\math_left_of_equalign\span
+ {\c_math_align_a{\strut\math_first_in_eqalign\math_left_of_equalign\span
\math_text_in_eqalign{\mathmatrixparameter\c!style##}\math_right_of_eqalign}%
- \!!toksb{&\hskip\mathmatrixparameter\c!distance
+ \c_math_align_b{&\hskip\mathmatrixparameter\c!distance
\math_next_in_eqalign\math_left_of_equalign\span
\math_text_in_eqalign{\mathmatrixparameter\c!style##}\math_right_of_eqalign}%
- \!!toksc{&&\hskip\mathmatrixparameter\c!distance
+ \c_math_align_c{&&\hskip\mathmatrixparameter\c!distance
\math_left_of_equalign\span
\math_text_in_eqalign{\mathmatrixparameter\c!style##}\math_right_of_eqalign}%
\scratchtoks\emptytoks
- \normalexpanded{\scratchtoks{\the\scratchtoks\the\!!toksa}}%
+ \normalexpanded{\scratchtoks{\the\scratchtoks\the\c_math_align_a}}%
\dorecurse{\numexpr\scratchcounter-\plusone\relax}
- {\normalexpanded{\scratchtoks{\the\scratchtoks\the\!!toksb}}}%
- \normalexpanded{\scratchtoks{\the\scratchtoks\the\!!toksc}}%
- \halign \@EA \bgroup\the\scratchtoks \crcr}
+ {\normalexpanded{\scratchtoks{\the\scratchtoks\the\c_math_align_b}}}%
+ \normalexpanded{\scratchtoks{\the\scratchtoks\the\c_math_align_c}}%
+ \halign \expandafter \bgroup\the\scratchtoks \crcr}
\unexpanded\def\dodomatrixNC
{\gdef\domatrixNC{\endmath&}}
@@ -887,10 +940,10 @@
\expandafter\mathbin
\else\ifdim\wd\scratchbox>\zeropoint
\endgroup
- \expandafter\expandafter\expandafter\mathrel
+ \doubleexpandafter\mathrel
\else
\endgroup
- \expandafter\expandafter\expandafter\firstofoneargument
+ \doubleexpandafter\firstofoneargument
\fi\fi}
\unexpanded\def\overset#1#2%
@@ -947,7 +1000,7 @@
\let\normalleqno\leqno
\let\normaleqno \eqno
% added
- \doplaceformulanumber
+ \strc_formulas_place_number
\setbox\scratchbox\math_hbox to \displaywidth\bgroup
\mathinnerstrut
$%
diff --git a/Master/texmf-dist/tex/context/base/math-arr.mkiv b/Master/texmf-dist/tex/context/base/math-arr.mkiv
index f18d6be2b27..6824c362e67 100644
--- a/Master/texmf-dist/tex/context/base/math-arr.mkiv
+++ b/Master/texmf-dist/tex/context/base/math-arr.mkiv
@@ -11,6 +11,10 @@
%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
%C details.
+%D We keep this file around as reference of his things were done in the
+%D past. You can still load this module but it has been replaced by more
+%D modern code.
+
\writestatus{loading}{ConTeXt Math Macros / Arrows}
\unprotect
@@ -30,156 +34,73 @@
% \mathord\rightarrow
% $}
%
-% \def\leftarrowfill % brrr no longer in luated
+% \def\leftarrowfill % brrr no longer in luatex
% {$%
% \mathsurround\zeropoint
% \mathord\leftarrow
% \mkern-7mu%
% \cleaders\hbox{$\mkern-2mu\smash-\mkern-2mu$}\hfill
% \mkern-7mu
-% \smash-$
+% \smash-%
% $}
-\def\exmthfont#1{\mr} % \symbolicsizedfont#1\plusone{MathExtension}}
+%D Extensible arrows are arrows that change their length according to the width of
+%D the text to be placed above and below the arrow.
+%D
+%D Since we need to define a lot of arrows, we first define some helper macros. The
+%D basic idea is to measure the width of the box to be placed above and below the
+%D arrow, and make the \quote {body} of the arrow as long as the bigger of the two
+%D widths.
-\def\domthfrac#1#2#3#4#5#6#7%
- {\begingroup
- \mathsurround\zeropoint
- \setbox0\hbox{$#1 #6$}%
- \setbox2\hbox{$#1 #7$}%
- \dimen0\wd0
- \ifdim\wd2>\dimen0 \dimen0\wd2 \fi
- \setbox4\hbox to \dimen0{\leaders\hbox{#4}\hss#5}%
- \mathord{\vcenter{{\offinterlineskip
- \hbox to \dimen0{\hss\box0\hss}%
- \kern \ht4%
- \hbox to \dimen0{\hss\copy4\hss}%
- \kern \ht4%
- \hbox to \dimen0{\hss\box2\hss}}}}%
- \endgroup}
+\installcorenamespace{matharrowsettings}
-\def\domthsqrt#1#2#3#4#5%
- {\begingroup
- \mathsurround\zeropoint
- \setbox0\hbox{$#1 #5$}%
- \dimen0=1.05\ht0 \advance\dimen0 1pt \ht0 \dimen0
- \dimen0=1.05\dp0 \advance\dimen0 1pt \dp0 \dimen0
- \dimen0\wd0
- \setbox4\hbox to \dimen0{\exmthfont#2\leaders\hbox{#3}\hfill#4}%
- \delimitershortfall=0pt
- \nulldelimiterspace=0pt
- \setbox2\hbox{$\left\delimiter"0270370 \vrule height\ht0 depth \dp0 width0pt
- \right.$}%
- \mathord{\vcenter{\hbox{\copy2
- \rlap{\raise\dimexpr\ht2-\ht4\relax\copy4}\copy0}}}%
- \endgroup}
+\def\m_math_arrows_factor{1}
+\def\m_math_arrows_extra {0}
-\def\mthfrac#1#2#3#4#5{\mathchoice
- {\domthfrac\displaystyle \textface {#1}{#2}{#3}{#4}{#5}}%
- {\domthfrac\textstyle \textface {#1}{#2}{#3}{#4}{#5}}%
- {\domthfrac\scriptstyle \scriptface {#1}{#2}{#3}{#4}{#5}}%
- {\domthfrac\scriptscriptstyle\scriptscriptface{#1}{#2}{#3}{#4}{#5}}}
-
-\def\mthsqrt#1#2#3{\mathchoice
- {\domthsqrt\displaystyle \textface {#1}{#2}{#3}}%
- {\domthsqrt\textstyle \textface {#1}{#2}{#3}}%
- {\domthsqrt\scriptstyle \textface {#1}{#2}{#3}}%
- {\domthsqrt\scriptscriptstyle\textface {#1}{#2}{#3}}}
-
-% temp here
-
-%D We next define extensible arrows. Extensible arrows are arrows that
-%D change their length according to the width of the text to be placed
-%D above and below the arrow. Since we need to define a lot of arrows,
-%D we first define some helper macros. The basic idea is to measure
-%D the width of the box to be placed above and below the arrow, and
-%D make the \quotation{body} of the arrow as long as the bigger of the
-%D two widths.
-
-\def\mtharrfactor{1}
-\def\mtharrextra {0}
-
-% \def\domthxarr#1#2#3#4#5% hm, looks like we do a double mathrel
-% {\begingroup
-% \def\mtharrfactor{1}%
-% \def\mtharrextra {0}%
-% \processaction[#1] % will be sped up
-% [ \v!none=>\def\mtharrfactor{0},
-% \v!small=>\def\mtharrextra{10},
-% \v!medium=>\def\mtharrextra{15},
-% \v!big=>\def\mtharrextra{20},
-% \v!normal=>,
-% \v!default=>,
-% \v!unknown=>\doifnumberelse{#1}{\def\mtharrextra{#1}}\donothing]%
-% \mathsurround\zeropoint
-% \muskip0=\thirdoffourarguments #2mu
-% \muskip2=\fourthoffourarguments #2mu
-% \muskip4=\firstoffourarguments #2mu
-% \muskip6=\secondoffourarguments #2mu
-% \muskip0=\mtharrfactor\muskip0 \advance\muskip0 \mtharrextra mu
-% \muskip2=\mtharrfactor\muskip2 \advance\muskip2 \mtharrextra mu
-% \setbox0\hbox{$\scriptstyle
-% \mkern\muskip4\relax
-% \mkern\muskip0\relax
-% #5\relax
-% \mkern\muskip2\relax
-% \mkern\muskip6\relax
-% $}%
-% \setbox4\hbox{#3\displaystyle}%
-% \dimen0\wd0
-% \ifdim\wd4>\dimen0 \dimen0\wd4 \fi
-% \setbox2\hbox{$\scriptstyle
-% \mkern\muskip4\relax
-% \mkern\muskip0\relax
-% #4\relax
-% \mkern\muskip2\relax
-% \mkern\muskip6\relax
-% $}%
-% \ifdim\wd2>\dimen0 \dimen0\wd2 \fi
-% \setbox4\hbox to \dimen0{#3\displaystyle}%
-% \mathrel{\mathop{\hbox to \dimen0{\hss\copy4\hss}}\limits\normalsuperscript{\box0}\normalsubscript{\box2}}%
-% \endgroup}
-
-\def\domthxarr#1#2#3#4#5% hm, looks like we do a double mathrel
+\setvalue{\??matharrowsettings\v!none }{\def\m_math_arrows_factor{0}}
+\setvalue{\??matharrowsettings\v!small }{\def\m_math_arrows_extra{10}}
+\setvalue{\??matharrowsettings\v!medium }{\def\m_math_arrows_extra{15}}
+\setvalue{\??matharrowsettings\v!big }{\def\m_math_arrows_extra{20}}
+\setvalue{\??matharrowsettings\v!normal }{}
+\setvalue{\??matharrowsettings }{}
+\setvalue{\??matharrowsettings\s!unknown}{\doifnumberelse\p_math_spacing{\let\m_math_arrows_extra\p_math_spacing}\donothing}
+
+\def\math_arrows_construct#1#2#3#4#5% hm, looks like we do a double mathrel (a bit cleaned up .. needs checking)
{\begingroup
- \def\mtharrfactor{1}%
- \def\mtharrextra {0}%
- \processaction[#1] % will be sped up
- [ \v!none=>\def\mtharrfactor{0},
- \v!small=>\def\mtharrextra{10},
- \v!medium=>\def\mtharrextra{15},
- \v!big=>\def\mtharrextra{20},
- \v!normal=>,
- \v!default=>,
- \v!unknown=>\doifnumberelse{#1}{\def\mtharrextra{#1}}\donothing]%
+ \def\m_math_arrows_factor{1}%
+ \def\m_math_arrows_extra {0}%
+ \edef\p_math_spacing{#1}%
+ \csname\??matharrowsettings
+ \ifcsname\??matharrowsettings\p_math_spacing\endcsname\p_math_spacing\else\s!unknown\fi
+ \endcsname
\mathsurround\zeropoint
- \muskip0=\thirdoffourarguments #2mu
- \muskip2=\fourthoffourarguments #2mu
- \muskip0=\mtharrfactor\muskip0
- \muskip2=\mtharrfactor\muskip2
- \advance\muskip0 \mtharrextra mu
- \advance\muskip2 \mtharrextra mu
- \advance\muskip0 \firstoffourarguments #2mu
- \advance\muskip2 \secondoffourarguments #2mu
+ \muskip0=\muexpr\m_math_arrows_factor\muexpr\thirdoffourarguments #2\onemuskip\relax+\m_math_arrows_extra\onemuskip+\firstoffourarguments #2\onemuskip\relax
+ \muskip2=\muexpr\m_math_arrows_factor\muexpr\fourthoffourarguments#2\onemuskip\relax+\m_math_arrows_extra\onemuskip+\secondoffourarguments#2\onemuskip\relax
\setbox0\hbox{$\scriptstyle
\mkern\muskip0\relax
#5\relax
\mkern\muskip2\relax
$}%
- \setbox4\hbox{#3\displaystyle}%
- \dimen0\wd0
- \ifdim\wd4>\dimen0 \dimen0\wd4 \fi
\setbox2\hbox{$\scriptstyle
\mkern\muskip0\relax
#4\relax
\mkern\muskip2\relax
$}%
- \ifdim\wd2>\dimen0 \dimen0\wd2 \fi
- \setbox4\hbox to \dimen0{#3\displaystyle}%
+ \setbox4\hbox{#3\displaystyle}%
+ \dimen0\wd0\relax
+ \ifdim\wd2>\dimen0
+ \dimen0\wd2\relax
+ \fi
+ \ifdim\wd4>\dimen0
+ \dimen0\wd4\relax
+ \fi
+ \ifdim\wd4=\dimen0\else
+ \setbox4\hbox to \dimen0{#3\displaystyle}%
+ \fi
\mathrel{\mathop{\hbox to \dimen0{\hss\copy4\hss}}\limits\normalsuperscript{\box0}\normalsubscript{\box2}}%
\endgroup}
-\let\domthxarrsingle\domthxarr
+\let\math_arrows_construct_single\math_arrows_construct
%D There are some arrows which are created by stacking two arrows. The next
%D macro helps in defining such \quotation{double arrows}.
@@ -194,29 +115,27 @@
% \chemical{SO_2}
% \stopchemicalformula
-\def\domthxarrdouble#1#2#3#4#5#6#7% opt l r sp rs top bot
+\def\math_arrows_construct_double#1#2#3#4#5#6#7% opt l r sp rs top bot
{\mathrel
- {\scratchdimen.32ex\relax % was .22, todo: make configurable
- \setbox0\hbox{$\domthxarr{#1}{#2}{#4}{\phantom{#6}}{#7}$}%
- \setbox2\hbox{$\domthxarr{#1}{#3}{#5}{#6}{\phantom{#7}}$}%
+ {\scratchdimen.32\exheight\relax % was .22, todo: make configurable
+ \setbox0\hbox{$\math_arrows_construct{#1}{#2}{#4}{\phantom{#6}}{#7}$}%
+ \setbox2\hbox{$\math_arrows_construct{#1}{#3}{#5}{#6}{\phantom{#7}}$}%
\raise\scratchdimen\box0
\kern-\wd2
\lower\scratchdimen\box2}}
%D \macros{definematharrow}
%D
-%D Macro for defining new arrows. We can define two types of
-%D arrows|<|single arrows and double arrows. Single arrows are defined
-%D as
+%D Macro for defining new arrows. We can define two types of arrows|<|single arrows
+%D and double arrows. Single arrows are defined as
%D
%D \starttyping
%D \definematharrow [xrightarrow] [0359] [\rightarrowfill]
%D \stoptyping
%D
-%D The first argument is the name of the arrow (\tex{xrightarrow} in
-%D this case.) The second argument consists of a set of 4 numbers and
-%D specify the spacing correction in math units~\type{mu}. These
-%D numbers define:
+%D The first argument is the name of the arrow (\tex {xrightarrow} in this case.) The
+%D second argument consists of a set of 4 numbers and specify the spacing correction
+%D in math units~\type {mu}. These numbers define:
%D
%D \startlines
%D 1st number: arrow||tip correction
@@ -225,9 +144,9 @@
%D 4th number: space (multiplied by \tex{matharrfactor} and advanced by \tex{matharrextra})
%D \stoplines
%D
-%D The third argument is the name of the extensible fill. The third
-%D argument is optional when the arrow is redefined later (this is
-%D useful for font specific tweaking of the skips.) For example,
+%D The third argument is the name of the extensible fill. The third argument is optional
+%D when the arrow is redefined later (this is useful for font specific tweaking of the
+%D skips.) For example,
%D
%D \startbuffer
%D \math{\xrightarrow{above}}
@@ -245,78 +164,80 @@
%D [\rightharpoonupfill,\leftharpoondownfill]
%D \stoptyping
%D
-%D The second and the third set of arguments consist of comma
-%D separated values. The first element of the second argument
-%D (\type{3095}) corresponds to the spacing correction of top arrow
-%D fill (\tex{rightarrowupfill}). Similarly, \type{0359} corresponds
-%D to bottom arrow fill \tex{leftharpoondownfill}). Stacking them on
-%D top of each other we get $\xrightleftharpoons[big]{above}{below}$.
-%D The following math arrows are defined
+%D The second and the third set of arguments consist of comma separated values. The
+%D first element of the second argument (\type {3095}) corresponds to the spacing
+%D correction of top arrow fill (\tex{rightarrowupfill}). Similarly, \type {0359}
+%D corresponds to bottom arrow fill \tex {leftharpoondownfill}). Stacking them on
+%D top of each other we get $\xrightleftharpoons [big] {above} {below}$. The
+%D following math arrows are defined
%D
%D \placetable[none]{}{\starttable[|l|m|]
-%D \NC \tex{xrightarrow } \NC \xrightarrow [big] \NC \NR
-%D \NC \tex{xleftarrow } \NC \xleftarrow [big] \NC \NR
-%D \NC \tex{xequal } \NC \xequal [big] \NC \NR
-%D \NC \tex{xRightarrow } \NC \xRightarrow [big] \NC \NR
-%D \NC \tex{xLeftarrow } \NC \xLeftarrow [big] \NC \NR
-%D \NC \tex{xLeftrightarrow } \NC \xLeftrightarrow [big] \NC \NR
-%D \NC \tex{xleftrightarrow } \NC \xleftrightarrow [big] \NC \NR
-%D \NC \tex{xmapsto } \NC \xmapsto [big] \NC \NR
-%D \NC \tex{xtwoheadrightarrow } \NC \xtwoheadrightarrow [big] \NC \NR
-%D \NC \tex{xtwoheadleftarrow } \NC \xtwoheadleftarrow [big] \NC \NR
-%D \NC \tex{xrightharpoondown } \NC \xrightharpoondown [big] \NC \NR
-%D \NC \tex{xrightharpoonup } \NC \xrightharpoonup [big] \NC \NR
-%D \NC \tex{xleftharpoondown } \NC \xleftharpoondown [big] \NC \NR
-%D \NC \tex{xleftharpoonup } \NC \xleftharpoonup [big] \NC \NR
-%D \NC \tex{xhookleftarrow } \NC \xhookleftarrow [big] \NC \NR
-%D \NC \tex{xhookrightarrow } \NC \xhookrightarrow [big] \NC \NR
-%D \NC \tex{xleftrightharpoons } \NC \xleftrightharpoons [big] \NC \NR
-%D \NC \tex{xrightleftharpoons } \NC \xrightleftharpoons [big] \NC \NR
+%D \NC \tex{xrightarrow} \NC \xrightarrow [big] \NC \NR
+%D \NC \tex{xleftarrow} \NC \xleftarrow [big] \NC \NR
+%D \NC \tex{xequal} \NC \xequal [big] \NC \NR
+%D \NC \tex{xRightarrow} \NC \xRightarrow [big] \NC \NR
+%D \NC \tex{xLeftarrow} \NC \xLeftarrow [big] \NC \NR
+%D \NC \tex{xLeftrightarrow} \NC \xLeftrightarrow [big] \NC \NR
+%D \NC \tex{xleftrightarrow} \NC \xleftrightarrow [big] \NC \NR
+%D \NC \tex{xmapsto} \NC \xmapsto [big] \NC \NR
+%D \NC \tex{xtwoheadrightarrow} \NC \xtwoheadrightarrow [big] \NC \NR
+%D \NC \tex{xtwoheadleftarrow} \NC \xtwoheadleftarrow [big] \NC \NR
+%D \NC \tex{xrightharpoondown} \NC \xrightharpoondown [big] \NC \NR
+%D \NC \tex{xrightharpoonup} \NC \xrightharpoonup [big] \NC \NR
+%D \NC \tex{xleftharpoondown} \NC \xleftharpoondown [big] \NC \NR
+%D \NC \tex{xleftharpoonup} \NC \xleftharpoonup [big] \NC \NR
+%D \NC \tex{xhookleftarrow} \NC \xhookleftarrow [big] \NC \NR
+%D \NC \tex{xhookrightarrow} \NC \xhookrightarrow [big] \NC \NR
+%D \NC \tex{xleftrightharpoons} \NC \xleftrightharpoons [big] \NC \NR
+%D \NC \tex{xrightleftharpoons} \NC \xrightleftharpoons [big] \NC \NR
%D \stoptable}
+%D If needed this can be optimized (i.e. we can preexpand using \type
+%D {\docheckedpair}).
+
\unexpanded\def\definematharrow
- {\doquadrupleargument\dodefinematharrow}
+ {\doquadrupleargument\math_arrows_define}
-\def\dodefinematharrow[#1][#2][#3][#4]% name type[none|both] template command
+\def\math_arrows_define[#1][#2][#3][#4]% name type[none|both] template command
{\iffourthargument
- \executeifdefined{dodefine#2arrow}\gobblethreearguments{#1}{#3}{#4}%
+ \executeifdefined{math_arrows_define_#2}\gobblethreearguments{#1}{#3}{#4}%
\else\ifthirdargument
- \dodefinebotharrow{#1}{#2}{#3}%
+ \math_arrows_define_both{#1}{#2}{#3}%
\else\ifsecondargument
- \redefinebotharrow{#1}{#2}{#3}%
+ \math_arrows_define_both_again{#1}{#2}{#3}%
\fi\fi\fi}
-\def\redefinebotharrow#1#2#3% real dirty, this overload!
- {\doifdefined{#1}
- {\pushmacro\dohandlemtharrow
- \def\dohandlemtharrow[##1][##2]{\setuvalue{#1}{\dohandlemtharrow[#2][##2]}}%
- % == \def\dohandlemtharrow[##1][##2]{\dodefinebotharrow{#1}{#2}{##2}}%
- \getvalue{#1}%
- \popmacro\dohandlemtharrow}}
+\def\math_arrows_define_both_again#1#2#3% real dirty, this overload!
+ {\ifcsname#1\endcsname
+ \pushmacro\math_arrows_do
+ \def\math_arrows_do[##1][##2]{\setuvalue{#1}{\math_arrows_do[#2][##2]}}%
+ \getvalue{#1}%
+ \popmacro\math_arrows_do
+ \fi}
-\def\dodefinebotharrow#1#2#3%
- {\setuvalue{#1}{\dohandlemtharrow[#2][#3]}}
+\def\math_arrows_define_both#1#2#3%
+ {\setuvalue{#1}{\math_arrows_do[#2][#3]}}
-\def\dohandlemtharrow
- {\dotripleempty\doxmtharrow}
+\unexpanded\def\math_arrows_do
+ {\doquadrupleempty\math_arrows_handle}
-\def\doxmtharrow[#1][#2][#3]% #3 == optional arg
- {\def\dodoxmtharrow{\dododoxmtharrow[#1,\empty,\empty][#2,\empty,\empty][#3]}% {##1}{##2}
- \dodoublegroupempty\dodoxmtharrow}
+\def\math_arrows_handle[#1][#2][#3][#4]% #3 == optional arg .. \empty can be just 'empty' [#4] gobbles spaces
+ {\def\math_arrows_handle_indeed{\math_arrows_handle_finalize[#1,\empty,\empty][#2,\empty,\empty][#3]}% {##1}{##2}
+ \dodoublegroupempty\math_arrows_handle_indeed}
-\def\dododoxmtharrow[#1,#2,#3][#4,#5,#6][#7]#8#9% [3] is the optional arg
+\def\math_arrows_handle_finalize[#1,#2,#3][#4,#5,#6][#7]#8#9% [#7] is the optional arg
{\edef\!!stringa{#2}%
\ifx\!!stringa\empty
\ifsecondargument
- \mathrel{\domthxarrsingle{#7}{#1}{#4}{#8}{#9}}%
+ \mathrel{\math_arrows_construct_single{#7}{#1}{#4}{#8}{#9}}%
\else
- \mathrel{\domthxarrsingle{#7}{#1}{#4}{}{#8}}%
+ \mathrel{\math_arrows_construct_single{#7}{#1}{#4}{}{#8}}%
\fi
\else
\ifsecondargument
- \mathrel{\domthxarrdouble{#7}{#1}{#2}{#4}{#5}{#8}{#9}}%
+ \mathrel{\math_arrows_construct_double{#7}{#1}{#2}{#4}{#5}{#8}{#9}}%
\else
- \mathrel{\domthxarrdouble{#7}{#1}{#2}{#4}{#5}{}{#8}}%
+ \mathrel{\math_arrows_construct_double{#7}{#1}{#2}{#4}{#5}{}{#8}}%
\fi
\fi}
@@ -324,40 +245,38 @@
%D \macros{mtharrowfill,defaultmtharrowfill}
%D
-%D To extend the arrows we need to define a \quotation{math arrow
-%D fill}. This command takes 8 arguments: the first four correspond
-%D the second argument of \tex{definematharrow} explained above. The
-%D other three specify the tail, body and head of the arrow. The last
-%D argument specifies the math-mode in which the arrow is drawn.
-%D \tex{defaultmtharrowfill} has values tweaked to match Latin Modern
-%D fonts. For fonts that are significantly different (e.g. cows) a
+%D To extend the arrows we need to define a \quotation {math arrow fill}. This
+%D command takes 8 arguments: the first four correspond the second argument of
+%D \tex {definematharrow} explained above. The other three specify the tail,
+%D body and head of the arrow. The last argument specifies the math-mode in which
+%D the arrow is drawn. \tex {defaultmtharrowfill} has values tweaked to match
+%D Latin Modern fonts. For fonts that are significantly different (e.g. cows) a
%D different set of values need to be determined.
\def\mtharrowfill#1#2#3#4#5#6#7#8%
- {$\mathsurround 0pt
- \thickmuskip0mu\medmuskip\thickmuskip\thinmuskip\thickmuskip
+ {$\mathsurround\zeropoint
+ \thickmuskip\zeromuskip\medmuskip\thickmuskip\thinmuskip\thickmuskip
\relax#8#5%
- \mkern-#1mu
- \cleaders\hbox{$#8\mkern -#2mu#6\mkern -#3mu$}\hfill
- \mkern-#4mu#7$}
+ \mkern-#1\onemuskip
+ \cleaders\hbox{$#8\mkern-#2\onemuskip#6\mkern-#3\onemuskip$}\hfill
+ \mkern-#4\onemuskip#7$}
\def\defaultmtharrowfill{\mtharrowfill 7227}
-%D We now define some arrow fills that will be used for defining the
-%D arrows. Plain \TEX\ already defines \tex{leftarrowfill} and
-%D \tex{rightarrowfill}. The \tex{defaultmtharrowfill} command defines an
-%D arrowfill that takes an argument (so that it can also be used
-%D with over and under arrows). However the Plain \TEX\ definitions of
-%D \tex{leftarrowfill} and \tex{rightarrowfill} do not take this extra
-%D argument. To be backward compatible with Plain \TEX, we define two
-%D arrowfills: \tex{specrightarrowfill} which takes an extra argument, and
-%D \tex{rightarrowfill} which does not.
+%D We now define some arrow fills that will be used for defining the arrows. Plain
+%D \TEX\ already defines \tex {leftarrowfill} and \tex {rightarrowfill}. The \tex
+%D {defaultmtharrowfill} command defines an arrowfill that takes an argument (so
+%D that it can also be used with over and under arrows). However the Plain \TEX\
+%D definitions of \tex {leftarrowfill} and \tex {rightarrowfill} do not take this
+%D extra argument. To be backward compatible with Plain \TEX, we define two
+%D arrowfills: \tex {specrightarrowfill} which takes an extra argument, and \tex
+%D {rightarrowfill} which does not.
\unexpanded\def\specrightarrowfill {\defaultmtharrowfill \relbar \relbar \rightarrow}
\unexpanded\def\specleftarrowfill {\defaultmtharrowfill \leftarrow \relbar \relbar}
-\unexpanded\def\rightarrowfill {\specrightarrowfill \textstyle}
-\unexpanded\def\leftarrowfill {\specleftarrowfill \textstyle}
+\unexpanded\def\rightarrowfill {\specrightarrowfill \textstyle}
+\unexpanded\def\leftarrowfill {\specleftarrowfill \textstyle}
\unexpanded\def\equalfill {\defaultmtharrowfill \Relbar \Relbar \Relbar}
\unexpanded\def\Rightarrowfill {\defaultmtharrowfill \Relbar \Relbar \Rightarrow}
@@ -382,10 +301,27 @@
\unexpanded\def\doublebond{{\xequal}}
\unexpanded\def\triplebond{{\xtriplerel}}
-%D Now we define most commonly used arrows. These include arrows
-%D defined in \filename{amsmath.sty}, \filename{extarrows.sty},
-%D \filename{extpfel.sty} and \filename{mathtools.sty} packages for
-%D \LATEX\ (plus a few more).
+%D A bit or arrow juggling:
+%D
+%D \startbuffer
+%D \hbox to \hsize{\rightoverleftarrowfill}
+%D \stopbuffer
+%D
+%D \typebuffer \blank \getbuffer \blank
+
+\unexpanded\def\rightoverleftarrowfill
+ {\specrightoverleftarrowfill}
+
+\unexpanded\def\specrightoverleftarrowfill
+ {\defaultmtharrowfill
+ \ctxdoublearrowfillleftend
+ \ctxdoublearrowfillmiddlepart
+ \ctxdoublearrowfillrightend
+ \textstyle}
+
+%D Now we define most commonly used arrows. These include arrows defined in \filename
+%D {amsmath.sty}, \filename {extarrows.sty}, \filename {extpfel.sty} and \filename
+%D {mathtools.sty} packages for \LATEX\ (plus a few more).
\definematharrow [xrightarrow] [0359] [\specrightarrowfill]
\definematharrow [xleftarrow] [3095] [\specleftarrowfill]
@@ -427,27 +363,26 @@
%D \macros{definemathoverarrow,defineunderarrow}
%D
-%D These macros for define math-overarrows are adapted from
-%D \filename{amsmath.sty}
+%D These macros for define math-overarrows are adapted from \filename {amsmath.sty}
\unexpanded\def\definemathoverarrow
- {\dotripleargument\dodefinemathoverarrow}
+ {\dotripleargument\math_arrows_define_over}
-\def\dodefinemathoverarrow[#1][#2][#3]%
+\def\math_arrows_define_over[#1][#2][#3]%
{\ifthirdargument
- \setuvalue{#1}{\dohandlemathoverarrow[#2][#3]}%
+ \setuvalue{#1}{\math_arrows_over_handle[#2][#3]}%
\else
- \setuvalue{#1}{\dohandlemathoverarrow[\zeropoint][#2]}%
+ \setuvalue{#1}{\math_arrows_over_handle[\zeropoint][#2]}%
\fi}
-\def\dohandlemathoverarrow[#1][#2]%
- {\mathpalette{\dodohandlemathoverarrow{#1}{#2}}}
+\def\math_arrows_over_handle[#1][#2]%
+ {\mathpalette{\math_arrows_over_handle_indeed{#1}{#2}}}
-%D Note: \filename{math-pln.tex} has \type{\kern-\onepoint} and
-%D \filename{amsmath.sty} does not. We keep the kern amount
-%D configurable. This is useful for harpoons.
+%D Note: \filename {math-pln.tex} has \type {\kern-\onepoint} and \filename
+%D {amsmath.sty} does not. We keep the kern amount configurable. This is useful
+%D for harpoons.
-\def\dodohandlemathoverarrow#1#2#3#4%
+\def\math_arrows_over_handle_indeed#1#2#3#4%
{\vbox{\ialign{##\crcr
#2#3\crcr
\noalign{\kern#1\nointerlineskip}%
@@ -456,21 +391,21 @@
%D Now the under arrows
\unexpanded\def\definemathunderarrow
- {\dotripleargument\dodefinemathunderarrow}
+ {\dotripleargument\math_arrows_define_under}
%D For underarrows the default kern is 0.3ex
-\def\dodefinemathunderarrow[#1][#2][#3]%
+\def\math_arrows_define_under[#1][#2][#3]%
{\ifthirdargument
- \setuvalue{#1}{\dohandlemathunderarrow[#2][#3]}%
+ \setuvalue{#1}{\math_arrows_under_handle[#2][#3]}%
\else
- \setuvalue{#1}{\dohandlemathunderarrow[0.3ex][#2]}%
+ \setuvalue{#1}{\math_arrows_under_handle[0.3ex][#2]}%
\fi}
-\def\dohandlemathunderarrow[#1][#2]%
- {\mathpalette{\dodohandlemathunderarrow{#1}{#2}}}
+\def\math_arrows_under_handle[#1][#2]%
+ {\mathpalette{\math_arrows_under_handle_indeed{#1}{#2}}}
-\def\dodohandlemathunderarrow#1#2#3#4%
+\def\math_arrows_under_handle_indeed#1#2#3#4%
{\vtop{\ialign{##\crcr
$\mathsurround\zeropoint\hfil#3#4\hfil$\crcr
\noalign{\nointerlineskip\kern#1}%
diff --git a/Master/texmf-dist/tex/context/base/math-def.mkiv b/Master/texmf-dist/tex/context/base/math-def.mkiv
index 1c602187f6b..cc2a8fae6c5 100644
--- a/Master/texmf-dist/tex/context/base/math-def.mkiv
+++ b/Master/texmf-dist/tex/context/base/math-def.mkiv
@@ -108,7 +108,7 @@
\installcorenamespace{mathbig}
-\def\choosemathbig#1#2% so we accent \big{||} as well
+\unexpanded\def\choosemathbig#1#2% so we accent \big{||} as well
{{\hbox{$%
\ifcase\bigmathdelimitermethod
\doleftbigmath#2\relax
@@ -119,7 +119,7 @@
\dorightbigmath#2\relax
\else
\doleftbigmath#2\relax
- \vbox\!!to\getvalue{\??mathbig\number#1}\bodyfontsize{}%
+ \vbox to\getvalue{\??mathbig\number#1}\bodyfontsize{}%
\dorightbigmath#2\relax
\fi
\nulldelimiterspace\zeropoint\relax
@@ -167,7 +167,8 @@
\unexpanded\def\implies {\mathrel{\;\Longrightarrow\;}}
\unexpanded\def\impliedby{\mathrel{\;\Longleftarrow\;}}
\unexpanded\def\And {\mathrel{\;\internalAnd\;}}
-\unexpanded\def\iff {\;\Longleftrightarrow\;}
+%unexpanded\def\iff {\;\Longleftrightarrow\;}
+\setuvalue {iff}{\;\Longleftrightarrow\;} % nicer for if checker
% todo: virtual in math-vfu
@@ -222,8 +223,9 @@
\definemathcommand [mathstrut] {\vphantom{(}}
\definemathcommand [joinrel] {\mathrel{\mkern-3mu}}
-\unexpanded\def\{{\mathortext\lbrace\letterleftbrace }
-\unexpanded\def\}{\mathortext\rbrace\letterrightbrace}
+\unexpanded\def\{{\mathortext\lbrace\letterleftbrace } % or maybe a chardef
+\unexpanded\def\}{\mathortext\rbrace\letterrightbrace} % or maybe a chardef
+\unexpanded\def\|{\mathortext\vert \letterbar } % or maybe a chardef
%D The following colon related definitions are provided by Aditya
%D Mahajan who derived them from \type {mathtools.sty} and \type
@@ -326,6 +328,11 @@
\unexpanded\def\normaldoublebrace {\Umathaccents 0 \defaultmathfamily "23DE 0 \defaultmathfamily "23DF }
\unexpanded\def\normaldoubleparent{\Umathaccents 0 \defaultmathfamily "23DC 0 \defaultmathfamily "23DD }
+% let's keep this
+
+\def\Umathbotaccent{\Umathaccent \s!bottom }
+\def\Umathaccents {\Umathaccent \s!both }
+
\let\normaloverbrace \overbrace
\let\normalunderbrace \underbrace
\let\normaloverparent \overparent
@@ -360,6 +367,11 @@
\unexpanded\def\surd{\normalsurd{}}
+% Some special characters:
+
+\unexpanded\def\nabla{∇} % this one adapts
+
+%
% todo mathclass=punctuation ord
% \Umathcode"02C="6 \defaultmathfamily "02C
diff --git a/Master/texmf-dist/tex/context/base/math-del.mkiv b/Master/texmf-dist/tex/context/base/math-del.mkiv
index 569b4cd3def..64657281810 100644
--- a/Master/texmf-dist/tex/context/base/math-del.mkiv
+++ b/Master/texmf-dist/tex/context/base/math-del.mkiv
@@ -15,6 +15,8 @@
\unprotect
+%D Old code that will be redone:
+%D
%D \macros
%D {checkdelimiters, fakeleftdelimiter, fakerightdelimiter}
%D
diff --git a/Master/texmf-dist/tex/context/base/math-dim.lua b/Master/texmf-dist/tex/context/base/math-dim.lua
index a5d7c39639b..f4fc7905e3e 100644
--- a/Master/texmf-dist/tex/context/base/math-dim.lua
+++ b/Master/texmf-dist/tex/context/base/math-dim.lua
@@ -128,9 +128,9 @@ function mathematics.dimensions(dimens) -- beware, dimens get spoiled
return table.fastcopy(dimens), { }
elseif dimens.AxisHeight or dimens.axis_height then
local t = { }
- local math_x_height = dimens.x_height or 10*65526
- local math_quad = dimens.quad or 10*65526
- local default_rule_thickness = dimens.FractionDenominatorGapMin or dimens.default_rule_thickness or 0.4*65526
+ local math_x_height = dimens.x_height or 10*65536
+ local math_quad = dimens.quad or 10*65536
+ local default_rule_thickness = dimens.FractionDenominatorGapMin or dimens.default_rule_thickness or 0.4*65536
dimens["0"] = 0
dimens["60"] = 60
dimens["0.25*default_rule_thickness"] = default_rule_thickness / 4
diff --git a/Master/texmf-dist/tex/context/base/math-ext.lua b/Master/texmf-dist/tex/context/base/math-ext.lua
index da00c7a9ea8..b00d6cde273 100644
--- a/Master/texmf-dist/tex/context/base/math-ext.lua
+++ b/Master/texmf-dist/tex/context/base/math-ext.lua
@@ -8,6 +8,8 @@ if not modules then modules = { } end modules ['math-ext'] = {
local trace_virtual = false trackers.register("math.virtual", function(v) trace_virtual = v end)
+local basename = file.basename
+
local mathematics = mathematics
local characters = characters
@@ -20,12 +22,15 @@ characters.math = characters.math or { }
local mathdata = characters.math
local chardata = characters.data
-function extras.add(unicode,t)
+function extras.add(unicode,t) -- todo: if already stored ...
local min, max = mathematics.extrabase, mathematics.privatebase - 1
+ -- if mathdata[unicode] or chardata[unicode] then
+ -- report_math("extra %U overloads existing character",unicode)
+ -- end
if unicode >= min and unicode <= max then
mathdata[unicode], chardata[unicode] = t, t
else
- report_math("extra U+%05X should be in range U+%05X - U+%05X",unicode,min,max)
+ report_math("extra %U should be in range %U - %U",unicode,min,max)
end
end
@@ -38,33 +43,60 @@ function extras.copy(target,original)
local extrachar = characters[unicode]
local nextinsize = extradesc.nextinsize
if nextinsize then
- for i=1,#nextinsize do
- local nextslot = nextinsize[i]
- local nextbase = characters[nextslot]
- if nextbase then
- local nextnext = nextbase and nextbase.next
- if nextnext then
- local nextchar = characters[nextnext]
- if nextchar then
- if trace_virtual then
- report_math("extra U+%05X in %s at %s maps on U+%05X (class: %s, name: %s)",unicode,
- file.basename(properties.fullname),parameters.size,nextslot,extradesc.mathclass or "?",extradesc.mathname or "?")
- end
- characters[unicode] = nextchar
- break
- end
+ local first = 1
+ local charused = unicode
+ if not extrachar then
+ for i=1,#nextinsize do
+ local slot = nextinsize[i]
+ extrachar = characters[slot]
+ if extrachar then
+ characters[unicode] = extrachar
+ first = i + 1
+ charused = slot
+ break
end
end
end
- if not characters[unicode] then -- can be set in previous loop
- for i=1,#nextinsize do
+ if not extrachar then
+ if trace_virtual then
+ report_math("extra %U in %a at %p with class %a and name %a is not mapped",
+ unicode,basename(properties.fullname),parameters.size,
+ extradesc.mathclass,extradesc.mathname)
+ end
+ elseif not extrachar.next then
+ local nextused = false
+ for i=first,#nextinsize do
local nextslot = nextinsize[i]
local nextbase = characters[nextslot]
if nextbase then
- characters[unicode] = nextbase -- still ok?
- break
+ local nextnext = nextbase and nextbase.next
+ if nextnext then
+ local nextchar = characters[nextnext]
+ if nextchar then
+ extrachar.next = nextchar
+ nextused = nextslot
+ break
+ end
+ end
+ end
+ end
+ if trace_virtual then
+ if nextused then
+ report_math("extra %U in %a at %p with class %a and name %a maps onto %U with next %U",
+ unicode,basename(properties.fullname),parameters.size,charused,
+ extradesc.mathclass,extradesc.mathname,nextused)
+ else
+ report_math("extra %U in %a at %p with class %a and name %a maps onto %U with no next",
+ unicode,basename(properties.fullname),parameters.size,charused,
+ extradesc.mathclass,extradesc.mathname)
end
end
+ else
+ if trace_virtual then
+ report_math("extra %U in %a at %p with class %a and name %a maps onto %U with no next", -- own next
+ unicode,basename(properties.fullname),parameters.size,charused,
+ extradesc.mathclass,extradesc.mathname)
+ end
end
end
end
@@ -72,40 +104,40 @@ end
utilities.sequencers.appendaction(mathactions,"system","mathematics.extras.copy")
--- 0xFE302 -- 0xFE320 for accents
-
-extras.add(0xFE302, {
- category="mn",
- description="WIDE MATHEMATICAL HAT",
- direction="nsm",
- linebreak="cm",
- mathclass="accent",
- mathname="widehat",
- mathstretch="h",
- unicodeslot=0xFE302,
- nextinsize={ 0x00302, 0x0005E },
-} )
-
-extras.add(0xFE303, {
- category="mn",
- cjkwd="a",
- description="WIDE MATHEMATICAL TILDE",
- direction="nsm",
- linebreak="cm",
- mathclass="accent",
- mathname="widetilde",
- mathstretch="h",
- unicodeslot=0xFE303,
- nextinsize={ 0x00303, 0x0007E },
-} )
+-- 0xFE302 -- 0xFE320 for accents (gone with new lm/gyre)
+--
+-- extras.add(0xFE302, {
+-- category="mn",
+-- description="WIDE MATHEMATICAL HAT",
+-- direction="nsm",
+-- linebreak="cm",
+-- mathclass="topaccent",
+-- mathname="widehat",
+-- mathstretch="h",
+-- unicodeslot=0xFE302,
+-- nextinsize={ 0x00302, 0x0005E },
+-- } )
+--
+-- extras.add(0xFE303, {
+-- category="mn",
+-- cjkwd="a",
+-- description="WIDE MATHEMATICAL TILDE",
+-- direction="nsm",
+-- linebreak="cm",
+-- mathclass="topaccent",
+-- mathname="widetilde",
+-- mathstretch="h",
+-- unicodeslot=0xFE303,
+-- nextinsize={ 0x00303, 0x0007E },
+-- } )
-- 0xFE321 -- 0xFE340 for missing characters
extras.add(0xFE321, {
category="sm",
description="MATHEMATICAL SHORT BAR",
--- direction="on",
--- linebreak="nu",
+ -- direction="on",
+ -- linebreak="nu",
mathclass="relation",
mathname="mapstochar",
unicodeslot=0xFE321,
diff --git a/Master/texmf-dist/tex/context/base/math-fbk.lua b/Master/texmf-dist/tex/context/base/math-fbk.lua
new file mode 100644
index 00000000000..eebc4e4e7d6
--- /dev/null
+++ b/Master/texmf-dist/tex/context/base/math-fbk.lua
@@ -0,0 +1,312 @@
+if not modules then modules = { } end modules ['math-fbk'] = {
+ version = 1.001,
+ comment = "companion to math-ini.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+local trace_fallbacks = false trackers.register("math.fallbacks", function(v) trace_fallbacks = v end)
+
+local report_fallbacks = logs.reporter("math","fallbacks")
+
+local fallbacks = { }
+mathematics.fallbacks = fallbacks
+
+local virtualcharacters = { }
+
+local identifiers = fonts.hashes.identifiers
+local lastmathids = fonts.hashes.lastmathids
+
+-- we need a trick (todo): if we define scriptscript, script and text in
+-- that order we could use their id's .. i.e. we could always add a font
+-- table with those id's .. in fact, we could also add a whole lot more
+-- as it doesn't hurt
+--
+-- todo: use index 'true when luatex provides that feature (on the agenda)
+
+function fallbacks.apply(target,original)
+ local mathparameters = target.mathparameters -- why not hasmath
+ if mathparameters then
+ local characters = target.characters
+ local parameters = target.parameters
+ local mathsize = parameters.mathsize
+ local size = parameters.size
+ local usedfonts = target.fonts
+ if not usedfonts then
+ usedfonts = { }
+ target.fonts = usedfonts
+ end
+ -- This is not okay yet ... we have no proper way to refer to 'self'
+ -- otherwise I will make my own id allocator).
+local self = #usedfonts == 0 and font.nextid() or nil -- will be true
+ local textid, scriptid, scriptscriptid
+ local textindex, scriptindex, scriptscriptindex
+ local textdata, scriptdata, scriptscriptdata
+ if mathsize == 3 then
+ -- scriptscriptsize
+ -- textid = nil -- self
+ -- scriptid = nil -- no smaller
+ -- scriptscriptid = nil -- no smaller
+textid = self
+scriptid = self
+scriptscriptid = self
+ elseif mathsize == 2 then
+ -- scriptsize
+ -- textid = nil -- self
+textid = self
+ scriptid = lastmathids[3]
+ scriptscriptid = lastmathids[3]
+ else
+ -- textsize
+ -- textid = nil -- self
+textid = self
+ scriptid = lastmathids[2]
+ scriptscriptid = lastmathids[3]
+ end
+ if textid then
+ textindex = #usedfonts + 1
+ usedfonts[textindex] = { id = textid }
+ textdata = identifiers[textid]
+ else
+ textdata = target
+ end
+ if scriptid then
+ scriptindex = #usedfonts + 1
+ usedfonts[scriptindex] = { id = scriptid }
+ scriptdata = identifiers[scriptid]
+ else
+ scriptindex = textindex
+ scriptdata = textdata
+ end
+ if scriptscriptid then
+ scriptscriptindex = #usedfonts + 1
+ usedfonts[scriptscriptindex] = { id = scriptscriptid }
+ scriptscriptdata = identifiers[scriptscriptid]
+ else
+ scriptscriptindex = scriptindex
+ scriptscriptdata = scriptdata
+ end
+-- report_fallbacks("used textid: %s, used script id: %s, used scriptscript id: %s",
+-- tostring(textid),tostring(scriptid),tostring(scriptscriptid))
+ local data = {
+ textdata = textdata,
+ scriptdata = scriptdata,
+ scriptscriptdata = scriptscriptdata,
+ textindex = textindex,
+ scriptindex = scriptindex,
+ scriptscriptindex = scriptscriptindex,
+ characters = characters,
+ unicode = k,
+ target = target,
+ original = original,
+ size = size,
+ mathsize = mathsize,
+ }
+-- inspect(usedfonts)
+ for k, v in next, virtualcharacters do
+ if not characters[k] then
+ local tv = type(v)
+ if tv == "table" then
+ characters[k] = v
+ elseif tv == "number" then
+ characters[k] = characters[v]
+ elseif tv == "function" then
+ characters[k] = v(data)
+ end
+ if trace_fallbacks then
+ if characters[k] then
+ report_fallbacks("extending font %a with %U",target.properties.fullname,k)
+ end
+ end
+ end
+ end
+ end
+end
+
+utilities.sequencers.appendaction("aftercopyingcharacters","system","mathematics.fallbacks.apply")
+
+function fallbacks.install(unicode,value)
+ virtualcharacters[unicode] = value
+end
+
+-- a few examples:
+
+local function reference(index,char)
+ if index then
+ return { "slot", index, char }
+ else
+ return { "char", char }
+ end
+end
+
+local function raised(data,down)
+ local replacement = data.replacement
+ local character = data.scriptdata.characters[replacement]
+ if character then
+ return {
+ width = character.width,
+ height = character.height,
+ depth = character.depth,
+ commands = {
+ { "down", down and data.size/4 or -data.size/2 }, -- maybe exheight
+ reference(data.scriptindex,replacement)
+ }
+ }
+ end
+end
+
+-- virtualcharacters[0x207A] = 0x2212
+-- virtualcharacters[0x207B] = 0x002B
+-- virtualcharacters[0x208A] = 0x2212
+-- virtualcharacters[0x208B] = 0x002B
+
+virtualcharacters[0x207A] = function(data)
+ data.replacement = 0x2212
+ return raised(data)
+end
+
+virtualcharacters[0x207B] = function(data)
+ data.replacement = 0x002B
+ return raised(data)
+end
+
+virtualcharacters[0x208A] = function(data)
+ data.replacement = 0x2212
+ return raised(data,true)
+end
+
+virtualcharacters[0x208B] = function(data)
+ data.replacement = 0x002B
+ return raised(data,true)
+end
+
+-- local function repeated(data,char,n,fraction)
+-- local character = data.characters[char]
+-- if character then
+-- local width = character.width
+-- local delta = width - character.italic -- width * fraction
+-- local c = { "char", char }
+-- local r = { "right", right }
+-- local commands = { }
+-- for i=1,n-1 do
+-- width = width + delta
+-- commands[#commands+1] = c
+-- commands[#commands+1] = -delta
+-- end
+-- commands[#commands+1] = c
+-- return {
+-- width = width,
+-- height = character.height,
+-- depth = character.depth,
+-- commands = commands,
+-- }
+-- end
+-- end
+
+-- virtualcharacters[0x222C] = function(data)
+-- return repeated(data,0x222B,2,1/8)
+-- end
+
+-- virtualcharacters[0x222D] = function(data)
+-- return repeated(data,0x222B,3,1/8)
+-- end
+
+local addextra = mathematics.extras.add
+
+addextra(0xFE350, {
+ category="sm",
+ description="MATHEMATICAL DOUBLE ARROW LEFT END",
+ mathclass="relation",
+ mathname="ctxdoublearrowfillleftend",
+ unicodeslot=0xFE350,
+} )
+
+addextra(0xFE351, {
+ category="sm",
+ description="MATHEMATICAL DOUBLE ARROW MIDDLE PART",
+ mathclass="relation",
+ mathname="ctxdoublearrowfillmiddlepart",
+ unicodeslot=0xFE351,
+} )
+
+addextra(0xFE352, {
+ category="sm",
+ description="MATHEMATICAL DOUBLE ARROW RIGHT END",
+ mathclass="relation",
+ mathname="ctxdoublearrowfillrightend",
+ unicodeslot=0xFE352,
+} )
+
+local push = { "push" }
+local pop = { "pop" }
+local leftarrow = { "char", 0x2190 }
+local relbar = { "char", 0x2212 }
+local rightarrow = { "char", 0x2192 }
+
+virtualcharacters[0xFE350] = function(data)
+ -- return combined(data,0x2190,0x2212) -- leftarrow relbar
+ local charone = data.characters[0x2190]
+ local chartwo = data.characters[0x2212]
+ if charone and chartwo then
+ local size = data.size/2
+ return {
+ width = chartwo.width,
+ height = size,
+ depth = size,
+ commands = {
+ push,
+ { "down", size/2 },
+ leftarrow,
+ pop,
+ { "down", -size/2 },
+ relbar,
+ }
+ }
+ end
+end
+
+virtualcharacters[0xFE351] = function(data)
+ -- return combined(data,0x2212,0x2212) -- relbar, relbar (isn't that just equal)
+ local char = data.characters[0x2212]
+ if char then
+ local size = data.size/2
+ return {
+ width = char.width,
+ height = size,
+ depth = size,
+ commands = {
+ push,
+ { "down", size/2 },
+ relbar,
+ pop,
+ { "down", -size/2 },
+ relbar,
+ }
+ }
+ end
+end
+
+virtualcharacters[0xFE352] = function(data)
+ -- return combined(data,0x2192,0x2212) -- rightarrow relbar
+ local charone = data.characters[0x2192]
+ local chartwo = data.characters[0x2212]
+ if charone and chartwo then
+ local size = data.size/2
+ return {
+ width = chartwo.width,
+ height = size,
+ depth = size,
+ commands = {
+ push,
+ { "down", size/2 },
+ relbar,
+ pop,
+ { "right", chartwo.width - charone.width },
+ { "down", -size/2 },
+ rightarrow,
+ }
+ }
+ end
+end
+
diff --git a/Master/texmf-dist/tex/context/base/math-fen.mkiv b/Master/texmf-dist/tex/context/base/math-fen.mkiv
new file mode 100644
index 00000000000..9080ffedf53
--- /dev/null
+++ b/Master/texmf-dist/tex/context/base/math-fen.mkiv
@@ -0,0 +1,108 @@
+%D \module
+%D [ file=math-fen,
+%D version=2012.02.18,
+%D title=\CONTEXT\ Math Macros,
+%D subtitle=Fences,
+%D author=Hans Hagen,
+%D date=\currentdate,
+%D copyright={PRAGMA ADE \& \CONTEXT\ Development Team}]
+%C
+%C This module is part of the \CONTEXT\ macro||package and is
+%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
+%C details.
+
+\writestatus{loading}{ConTeXt Math Macros / Fences}
+
+\unprotect
+
+% todo: mathstyle
+
+% \definemathfence [fancybracket] [bracket] [command=yes,color=blue]
+%
+% test $|x|$ test \par
+% test $||x||$ test (okay) \par
+% test $a\left|\frac{1}{b}\right|c$ test \par
+% test $a\left||\frac{1}{b}\right||c$ test (not okay) \par
+%
+% \setupmathfences [color=red]
+%
+% test $a\fenced[bar]{\frac{1}{b}}c$ test \par
+% test $a\fenced[doublebar]{\frac{1}{b}}c$ test \par
+% test $a\fenced[bracket]{\frac{1}{b}}c$ test \par
+% test $a\fancybracket{\frac{1}{b}}c$ test \par
+
+\installcorenamespace{mathfences}
+\installcorenamespace{mathfencesleft}
+\installcorenamespace{mathfencesmiddle}
+\installcorenamespace{mathfencesright}
+
+\installcommandhandler \??mathfences {mathfence} \??mathfences
+
+\let\setupmathfences\setupmathfence
+
+\setupmathfences
+ [\c!left="002E,
+ \c!right="002E,
+ \c!middle="002E,
+ \c!mathstyle=,
+ \c!color=,
+ \c!command=]
+
+\appendtoks
+ \edef\p_command{\mathfenceparameter\c!command}%
+ \ifx\p_command\v!yes
+ \setuevalue\currentmathfence{\math_fenced_fenced[\currentmathfence]}%
+ \fi
+\to \everydefinemathfence
+
+\def\math_fenced_left {\normalleft \utfchar{\mathfenceparameter\c!left }} % no Uchar here
+\def\math_fenced_middle{\normalmiddle\utfchar{\mathfenceparameter\c!middle}} % no Uchar here
+\def\math_fenced_right {\normalright \utfchar{\mathfenceparameter\c!right }} % no Uchar here
+
+\let\fence \relax
+\let\fenced\relax
+
+\unexpanded\def\math_fenced_fenced[#1]%
+ {\begingroup
+ \edef\currentmathfence{#1}%
+ \startusemathstyleparameter\mathfenceparameter
+ \let\fence\math_fenced_middle
+ \edef\p_math_fenced_color{\mathfenceparameter\c!color}%
+ \ifx\p_math_fenced_color\empty
+ \expandafter\math_fenced_normal
+ \else
+ \expandafter\math_fenced_colored
+ \fi}
+
+\def\math_fenced_normal#1%
+ {\math_fenced_left
+ #1%
+ \math_fenced_right
+ \stopusemathstyleparameter
+ \endgroup}
+
+\def\math_fenced_colored#1%
+ {\pushcolor[\p_math_fenced_color]%
+ \math_fenced_left
+ \popcolor
+ #1%
+ \pushcolor[\p_math_fenced_color]%
+ \math_fenced_right
+ \popcolor
+ \stopusemathstyleparameter
+ \endgroup}
+
+\appendtoks
+ \let\fenced\math_fenced_fenced
+\to \everymathematics
+
+\definemathfence [parenthesis] [\c!left="0028,\c!right="0029]
+\definemathfence [bracket] [\c!left="005B,\c!right="005D]
+\definemathfence [braces] [\c!left="007B,\c!right="007D]
+\definemathfence [bar] [\c!left="007C,\c!right="007C]
+\definemathfence [doublebar] [\c!left="2016,\c!right="2016]
+\definemathfence [angle] [\c!left="003C,\c!right="003E]
+
+%definemathfence [fancybracket] [bracket] [command=yes,color=red]
+
+\protect
diff --git a/Master/texmf-dist/tex/context/base/math-for.mkiv b/Master/texmf-dist/tex/context/base/math-for.mkiv
index aee6405720c..0c8bd05aeac 100644
--- a/Master/texmf-dist/tex/context/base/math-for.mkiv
+++ b/Master/texmf-dist/tex/context/base/math-for.mkiv
@@ -30,19 +30,4 @@
\let\setupformulas \setupformula
\let\setupsubformulas\setupsubformula
-% D \macros
-% D {setuptextformulas}
-% D
-% D This command sets up in||line math. Most features deals
-% D with grid snapping and are experimental.
-
-% \newtoks \everysetuptextformulas
-%
-% \unexpanded\def\setuptextformulas
-% {\dosingleempty\dosetuptextformulas}
-%
-% \def\dosetuptextformulas[#1]%
-% {\getparameters[\??mt][#1]%
-% \the\everysetuptextformulas}
-
\protect \endinput
diff --git a/Master/texmf-dist/tex/context/base/math-frc.mkiv b/Master/texmf-dist/tex/context/base/math-frc.mkiv
index 59bd588c0b5..ea3a737aa81 100644
--- a/Master/texmf-dist/tex/context/base/math-frc.mkiv
+++ b/Master/texmf-dist/tex/context/base/math-frc.mkiv
@@ -1,9 +1,9 @@
%D \module
%D [ file=math-frc,
-%D version=2007.07.19,
+%D version=2013.04.06, % 2007.07.19,
%D title=\CONTEXT\ Math Macros,
%D subtitle=Fractions,
-%D author={Hans Hagen \& Taco Hoekwater},
+%D author=Hans Hagen,
%D date=\currentdate,
%D copyright={PRAGMA ADE \& \CONTEXT\ Development Team}]
%C
@@ -15,17 +15,18 @@
\unprotect
-% quite old ... still needed?
+%D todo: struts ... depends on demand
+
+%D This module is reimplemented in \MKIV\ style.
%D \macros
%D {frac, xfrac, xxfrac}
%D
-%D This is another one Tobias asked for. It replaces the
-%D primitive \type {\over}. We also take the opportunity to
-%D handle math style restoring, which makes sure units and
-%D chemicals come out ok.
-%D The \type {\frac} macro kind of replaces the awkward \type
-%D {\over} primitive. Say that we have the following formulas:
+%D This is another one Tobias asked for. It replaces the primitive \type
+%D {\over}. We also take the opportunity to handle math style restoring,
+%D which makes sure units and chemicals come out ok. The \type {\frac}
+%D macro kind of replaces the awkward \type {\over} primitive. Say that
+%D we have the following formulas:
%D
%D \startbuffer[sample]
%D test $\frac {1}{2}$ test $$1 + \frac {1}{2} = 1.5$$
@@ -65,68 +66,85 @@
%D
%D \typebuffer[code] \getbuffer[code,sample]
%D
-%D So we stick to the next definitions (watch the local
-%D overloading of \type {\xfrac}).
-
-\def\math_fractions_forced#1#2#3{\relax\mathematics{\Ustack{{#1{#2}}\normalover{#1{#3}}}}}
-\def\math_fractions_auto #1#2{\relax\mathematics{\Ustack{{#1}\normalover{#2}}}}
+%D So we stick to the next definitions (watch the local overloading of
+%D \type {\xfrac}).
+%D
+%D In the meantime, in \LUATEX, we have better control over styles so the
+%D following macros are different from the \MKII\ ones.
+% obsolete, is now c!mathstyle
+%
+% 0=auto, 1=displaystyle, 2=textstyle, 3=scriptstyle, 4=scriptscriptstyle, 5=mathstyle
+%
% $\mathfracmode0 \frac{1}{2}$
% $\mathfracmode1 \frac{1}{2}$
% $\mathfracmode2 \frac{1}{2}$
% $\mathfracmode3 \frac{1}{2}$
% $\mathfracmode4 \frac{1}{2}$
% $\mathfracmode5 \frac{1}{2}$
-
-% 0=auto, 1=displaystyle, 2=textstyle, 3=scriptstyle, 4=scriptscriptstyle, 5=mathstyle
+%
+% we keep the constant for a while
\setnewconstant\mathfracmode\zerocount
-\unexpanded\def\frac % overloaded later on
- {\ifcase\mathfracmode
- \expandafter\math_fractions_auto
- \or
- \expandafter\math_fractions_forced\expandafter\displaystyle
- \or
- \expandafter\math_fractions_forced\expandafter\textstyle
- \or
- \expandafter\math_fractions_forced\expandafter\scriptstyle
- \or
- \expandafter\math_fractions_forced\expandafter\scriptscriptstyle
- \else
- \expandafter\math_fractions_forced\expandafter\mathstyle
- \fi}
-
-\unexpanded\def\xfrac#1#2%
+\installcorenamespace{mathfractions}
+\installcorenamespace{mathfractionstyle}
+\installcorenamespace{mathfractionalign}
+\installcorenamespace{mathfractionalternative}
+
+\installcommandhandler \??mathfractions {mathfraction} \??mathfractions
+
+\let\setupmathfractions\setupmathfraction
+
+\setupmathfractions
+ [\c!mathstyle=\v!auto,
+ \c!align=\v!normal,
+ \c!alternative=\v!inner,
+ \c!command=\normalover] % beware a <arg 1><command><arg 2> variant
+
+\appendtoks
+ \setuevalue{\currentmathfraction}{\math_frac{\currentmathfraction}}%
+\to \everydefinemathfraction
+
+\letvalue{\??mathfractionalign\v!normal}\mathstylehbox
+\letvalue{\??mathfractionalign\v!lohi }\mathstylevcenteredhbox
+
+\def\math_frac_align
+ {\expandnamespaceparameter\??mathfractionalign\mathfractionparameter\c!align\v!normal}
+
+\unexpanded\def\math_frac#1%
{\begingroup
- \let\xfrac\xxfrac
- \math_fractions_forced\scriptstyle{#1}{#2}%
+ \edef\currentmathfraction{#1}%
+ \expandnamespaceparameter\??mathfractionstyle\mathfractionparameter\c!mathstyle\s!math}
+
+\setvalue{\??mathfractionalternative\v!inner}#1#2#3%
+ {\math_frac_align{\Ustack{{#1{#2}}\mathfractionparameter\c!command{#1{#3}}}}%
\endgroup}
-\unexpanded\def\xxfrac#1#2%
- {\begingroup
- \math_fractions_forced\scriptscriptstyle{#1}{#2}%
+\setvalue{\??mathfractionalternative\v!outer}#1#2#3%
+ {\math_frac_align{\Ustack{{#1{{#2}\mathfractionparameter\c!command{#3}}}}}%
\endgroup}
-%D The \type {xx} variant looks still ugly, so maybe it's
-%D best to say:
+\def\math_frac_alternative
+ {\expandnamespaceparameter\??mathfractionalternative\mathfractionparameter\c!alternative\v!inner}
-\unexpanded\def\xxfrac#1#2%
- {\begingroup
- \math_fractions_forced\scriptscriptstyle{#1}{\raise.25ex\hbox{$\scriptscriptstyle#2$}}%
- \endgroup}
+\setvalue{\??mathfractionstyle\v!auto }{\math_frac_alternative\firstofoneargument} % was: 0
+\setvalue{\??mathfractionstyle\s!display }{\math_frac_alternative\displaystyle} % was: 1
+\setvalue{\??mathfractionstyle\s!text }{\math_frac_alternative\textstyle} % was: 2
+\setvalue{\??mathfractionstyle\s!script }{\math_frac_alternative\scriptstyle} % was: 3
+\setvalue{\??mathfractionstyle\s!scriptscript}{\math_frac_alternative\scriptscriptstyle} % was: 4
+\setvalue{\??mathfractionstyle\s!math }{\math_frac_alternative\mathstyle} % was: else
-%D Something low level for scientific calculator notation:
+\definemathfraction[frac][\c!mathstyle=\v!auto]
-\unexpanded\def\scinot#1#2%
- {#1\times10^{#2}}
+\unexpanded\def\xfrac {\begingroup\let\xfrac\xxfrac\math_frac_alternative\scriptstyle}
+\unexpanded\def\xxfrac{\begingroup \math_frac_alternative\scriptscriptstyle}
-% I have no clue what \mthfrac and \mthsqrt are supposed to do but
-% I guess that it can be done with tweaking luatex's math parameters.
-% Otherwise I'll write something from scratch.
+%D The \type {xx} variant looks still ugly, so maybe it's best to say:
-\unexpanded\def\mthfrac#1#2#3{[mthfrac: #1 #2 #3]}
-\unexpanded\def\mthsqrt#1#2#3{[mthsqrt: #1 #2 #3]}
+\unexpanded\def\xxfrac#1#2%
+ {\begingroup
+ \math_frac_alternative\scriptscriptstyle{#1}{\raise.25\exheight\hbox{$\scriptscriptstyle#2$}}}
%D \macros
%D {dfrac, tfrac, frac, dbinom, tbinom, binom}
@@ -139,21 +157,26 @@
%D \typebuffer
%D \getbuffer
-% extra {} after displaystyle etc are needed
+% \unexpanded\def\dfrac #1#2{{\displaystyle {{#1}\normalover {#2}}}}
+% \unexpanded\def\tfrac #1#2{{\textstyle {{#1}\normalover {#2}}}}
+
+\definemathfraction[dfrac][\c!alternative=\v!outer,\c!mathstyle=\s!display]
+\definemathfraction[tfrac][\c!alternative=\v!outer,\c!mathstyle=\s!text]
+\definemathfraction[sfrac][\c!alternative=\v!outer,\c!mathstyle=\s!script]
-%unexpanded\def\frac #1#2{{ {{#1}\normalover {#2}}}}
-%unexpanded\def\xfrac #1#2{{\scriptstyle {{#1}\normalover {#2}}}}
-%unexpanded\def\xxfrac#1#2{{\scriptscriptstyle{{#1}\normalover {#2}}}}
-\unexpanded\def\dfrac #1#2{{\displaystyle {{#1}\normalover {#2}}}}
-\unexpanded\def\tfrac #1#2{{\textstyle {{#1}\normalover {#2}}}}
+\def\normaloverbinum{\normalabovewithdelims()\zeropoint}
-%unexpanded\def\binom #1#2{{ {{#1}\normalabovewithdelims()\zeropoint{#2}}}}
-\unexpanded\def\dbinom#1#2{{\displaystyle {{#1}\normalabovewithdelims()\zeropoint{#2}}}}
-\unexpanded\def\tbinom#1#2{{\textstyle {{#1}\normalabovewithdelims()\zeropoint{#2}}}}
+% \definemathfraction[ddfrac][\c!mathstyle=\s!display]
+% \definemathfraction[ttfrac][\c!mathstyle=\s!text]
+% \definemathfraction[ssfrac][\c!mathstyle=\s!script]
-\unexpanded\def\binom #1#2{{\Ustack{{#1}\normalabovewithdelims()\zeropoint{#2}}}}
+% \unexpanded\def\binom #1#2{{\Ustack {{#1}\normalabovewithdelims()\zeropoint{#2}}}}
+% \unexpanded\def\dbinom#1#2{{\displaystyle{{#1}\normalabovewithdelims()\zeropoint{#2}}}}
+% \unexpanded\def\tbinom#1#2{{\textstyle {{#1}\normalabovewithdelims()\zeropoint{#2}}}}
-% \let\frac\math_fractions_auto
+\definemathfraction[binom] [\c!alternative=\v!outer,\c!command=\normaloverbinum,\c!mathstyle=\s!auto]
+\definemathfraction[dbinom][\c!alternative=\v!outer,\c!command=\normaloverbinum,\c!mathstyle=\s!display]
+\definemathfraction[tbinom][\c!alternative=\v!outer,\c!command=\normaloverbinum,\c!mathstyle=\s!text]
%D \macros
%D {cfrac}
@@ -226,42 +249,51 @@
\unexpanded\def\splitdfrac#1#2%
{{\displaystyle{{ #1\quad\hfill}\normalabove\zeropoint{ \hfill\quad\mathstrut#2}}}}
-%D For thee moment here, but it might move:
-
-%D \macros
-%D {qedsymbol}
-%D
-%D [HH] The general Quod Erat Domonstrandum symbol is defined
-%D in such a way that we can configure it. Because this symbol
-%D is also used in text mode, we make it a normal text symbol
-%D with special behavior.
-
-\unexpanded\def\qedsymbol#1%
- {\ifhmode
- \unskip\nobreakspace\hfill#1\par
- \else\ifmmode
- #1\relax % leading \eqno removed
- \else
- \dontleavehmode\emptyhbox\hfill#1\par
- \fi\fi}
-
-\definesymbol [qed] [\qedsymbol{\mathematics{\square}}]
-
-%D \macros
-%D {QED}
-%D
-%D [HH] For compatbility reasons we also provide the \type
-%D {\QED} command. In case this command is overloaded, we still
-%D have the symbol available. \symbol[qed]
-
-\unexpanded\def\QED{\symbol[qed]}
-
-%D \macros
-%D {boxed}
-%D
-%D [HH] Another macro that users might expect (slightly adapted):
+\protect \endinput
-\unexpanded\def\boxed % maybe obsolete
- {\ifmmode\expandafter\mframed\else\expandafter\framed\fi}
+% I have no clue what \mthfrac and \mthsqrt are supposed to do but
+% I guess that it can be done with tweaking luatex's math parameters.
+% Otherwise I'll write something from scratch.
-\protect \endinput
+% \def\math_stylebuilders_frac#1#2#3#4#5#6#7%
+% {\begingroup
+% \mathsurround\zeropoint
+% \setbox0\hbox{$#1 #6$}%
+% \setbox2\hbox{$#1 #7$}%
+% \dimen0\wd\ifdim\wd2>\wd0 2\else 0\fi
+% \setbox4\hbox to \dimen0{\leaders\hbox{#4}\hss#5}%
+% \mathord{\vcenter{{\offinterlineskip
+% \hbox to \dimen0{\hss\box0\hss}%
+% \kern\ht4%
+% \hbox to \dimen0{\hss\copy4\hss}%
+% \kern\ht4%
+% \hbox to \dimen0{\hss\box2\hss}}}}%
+% \endgroup}
+%
+% \def\math_stylebuilders_sqrt#1#2#3#4#5%
+% {\begingroup
+% \mathsurround\zeropoint
+% \setbox0\hbox{$#1 #5$}%
+% \ht0\dimexpr1.05\ht0+\onepoint\relax
+% \dp0\dimexpr1.05\dp0+\onepoint\relax
+% \setbox4\hbox to \wd0{\mr#2\leaders\hbox{#3}\hfill#4}%
+% \delimitershortfall\zeropoint
+% \nulldelimiterspace\zeropoint
+% \setbox2\hbox{$\left\delimiter"0270370 \vrule \s!height\ht0 \s!depth \dp0 \s!width\zeropoint\right.$}% is this the right code point?
+% \mathord{\vcenter{\hbox{\copy2\rlap{\raise\dimexpr\ht2-\ht4\relax\copy4}\copy0}}}%
+% \endgroup}
+%
+% \def\mthfrac#1#2#3#4#5{\mathchoice
+% {\math_stylebuilders_frac\displaystyle \textface {#1}{#2}{#3}{#4}{#5}}%
+% {\math_stylebuilders_frac\textstyle \textface {#1}{#2}{#3}{#4}{#5}}%
+% {\math_stylebuilders_frac\scriptstyle \scriptface {#1}{#2}{#3}{#4}{#5}}%
+% {\math_stylebuilders_frac\scriptscriptstyle\scriptscriptface{#1}{#2}{#3}{#4}{#5}}}
+%
+% \def\mthsqrt#1#2#3{\mathchoice
+% {\math_stylebuilders_sqrt\displaystyle \textface{#1}{#2}{#3}}%
+% {\math_stylebuilders_sqrt\textstyle \textface{#1}{#2}{#3}}%
+% {\math_stylebuilders_sqrt\scriptstyle \textface{#1}{#2}{#3}}%
+% {\math_stylebuilders_sqrt\scriptscriptstyle\textface{#1}{#2}{#3}}}
+
+% \unexpanded\def\mthfrac#1#2#3{[mthfrac: #1 #2 #3]}
+% \unexpanded\def\mthsqrt#1#2#3{[mthsqrt: #1 #2 #3]}
diff --git a/Master/texmf-dist/tex/context/base/math-ini.lua b/Master/texmf-dist/tex/context/base/math-ini.lua
index 54452282bb0..530e685685e 100644
--- a/Master/texmf-dist/tex/context/base/math-ini.lua
+++ b/Master/texmf-dist/tex/context/base/math-ini.lua
@@ -1,4 +1,4 @@
-if not modules then modules = { } end modules ['math-ext'] = {
+if not modules then modules = { } end modules ['math-ini'] = {
version = 1.001,
comment = "companion to math-ini.mkiv",
author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
@@ -11,7 +11,8 @@ if not modules then modules = { } end modules ['math-ext'] = {
--
-- isn't characters.data loaded already ... shortcut it here
-local format, utfchar, utfbyte = string.format, utf.char, utf.byte
+local formatters = string.formatters
+local utfchar, utfbyte = utf.char, utf.byte
local setmathcode, setdelcode = tex.setmathcode, tex.setdelcode
local texattribute = tex.attribute
local floor = math.floor
@@ -33,34 +34,66 @@ local mathematics = mathematics
mathematics.extrabase = 0xFE000 -- here we push some virtuals
mathematics.privatebase = 0xFF000 -- here we push the ex
+local chardata = characters.data
+
local families = allocate {
mr = 0,
mb = 1,
}
local classes = allocate {
- ord = 0, -- mathordcomm mathord
- op = 1, -- mathopcomm mathop
- bin = 2, -- mathbincomm mathbin
- rel = 3, -- mathrelcomm mathrel
- open = 4, -- mathopencomm mathopen
- close = 5, -- mathclosecomm mathclose
- punct = 6, -- mathpunctcomm mathpunct
- alpha = 7, -- mathalphacomm firstofoneargument
- accent = 8, -- class 0
- radical = 9,
- xaccent = 10, -- class 3
- topaccent = 11, -- class 0
- botaccent = 12, -- class 0
- under = 13,
- over = 14,
- delimiter = 15,
- inner = 0, -- mathinnercomm mathinner
- nothing = 0, -- mathnothingcomm firstofoneargument
- choice = 0, -- mathchoicecomm @@mathchoicecomm
- box = 0, -- mathboxcomm @@mathboxcomm
- limop = 1, -- mathlimopcomm @@mathlimopcomm
- nolop = 1, -- mathnolopcomm @@mathnolopcomm
+ ord = 0, -- mathordcomm mathord
+ op = 1, -- mathopcomm mathop
+ bin = 2, -- mathbincomm mathbin
+ rel = 3, -- mathrelcomm mathrel
+ open = 4, -- mathopencomm mathopen
+ middle = 4,
+ close = 5, -- mathclosecomm mathclose
+ punct = 6, -- mathpunctcomm mathpunct
+ alpha = 7, -- mathalphacomm firstofoneargument
+ accent = 8, -- class 0
+ radical = 9,
+ xaccent = 10, -- class 3
+ topaccent = 11, -- class 0
+ botaccent = 12, -- class 0
+ under = 13,
+ over = 14,
+ delimiter = 15,
+ inner = 0, -- mathinnercomm mathinner
+ nothing = 0, -- mathnothingcomm firstofoneargument
+ choice = 0, -- mathchoicecomm @@mathchoicecomm
+ box = 0, -- mathboxcomm @@mathboxcomm
+ limop = 1, -- mathlimopcomm @@mathlimopcomm
+ nolop = 1, -- mathnolopcomm @@mathnolopcomm
+ --
+ ordinary = 0, -- ord
+ alphabetic = 7, -- alpha
+ unknown = 0, -- nothing
+ default = 0, -- nothing
+ punctuation = 6, -- punct
+ normal = 0, -- nothing
+ opening = 4, -- open
+ closing = 5, -- close
+ binary = 2, -- bin
+ relation = 3, -- rel
+ fence = 0, -- unknown
+ diacritic = 8, -- accent
+ large = 1, -- op
+ variable = 7, -- alphabetic
+ number = 7, -- alphabetic
+}
+
+local open_class = 4
+local middle_class = 4
+local close_class = 5
+
+local accents = allocate {
+ accent = true, -- some can be both
+ topaccent = true, [11] = true,
+ botaccent = true, [12] = true,
+ under = true, [13] = true,
+ over = true, [14] = true,
+ unknown = false,
}
local codes = allocate {
@@ -74,165 +107,118 @@ local codes = allocate {
variable = 7, [7] = "variable",
}
-mathematics.classes = classes
-mathematics.codes = codes
-mathematics.families = families
-
-classes.alphabetic = classes.alpha
-classes.unknown = classes.nothing
-classes.default = classes.nothing
-classes.punctuation = classes.punct
-classes.normal = classes.nothing
-classes.opening = classes.open
-classes.closing = classes.close
-classes.binary = classes.bin
-classes.relation = classes.rel
-classes.fence = classes.unknown
-classes.diacritic = classes.accent
-classes.large = classes.op
-classes.variable = classes.alphabetic
-classes.number = classes.alphabetic
+local extensibles = allocate {
+ unknown = 0,
+ l = 1, left = 1,
+ r = 2, right = 2,
+ h = 3, horizontal = 3,-- lr or rl
+ u = 5, up = 4,
+ d = 5, down = 5,
+ v = 6, vertical = 6,-- ud or du
+ m = 7, mixed = 7,
+}
+
+table.setmetatableindex(extensibles,function(t,k) t[k] = 0 return 0 end)
+
+mathematics.extensibles = extensibles
+mathematics.classes = classes
+mathematics.codes = codes
+-----------.accents = codes
+mathematics.families = families
-- there will be proper functions soon (and we will move this code in-line)
-- no need for " in class and family (saves space)
-local function delcode(target,family,slot)
- return format('\\Udelcode%s="%X "%X ',target,family,slot)
-end
local function mathchar(class,family,slot)
- return format('\\Umathchar "%X "%X "%X ',class,family,slot)
+ return formatters['\\Umathchar "%X "%X "%X '](class,family,slot)
end
+
local function mathaccent(class,family,slot)
- return format('\\Umathaccent "%X "%X "%X ',0,family,slot) -- no class
+ return formatters['\\Umathaccent "%X "%X "%X '](0,family,slot) -- no class
end
+
local function delimiter(class,family,slot)
- return format('\\Udelimiter "%X "%X "%X ',class,family,slot)
+ return formatters['\\Udelimiter "%X "%X "%X '](class,family,slot)
end
+
local function radical(family,slot)
- return format('\\Uradical "%X "%X ',family,slot)
+ return formatters['\\Uradical "%X "%X '](family,slot)
end
+
local function mathchardef(name,class,family,slot)
- return format('\\Umathchardef\\%s "%X "%X "%X ',name,class,family,slot)
+ return formatters['\\Umathchardef\\%s "%X "%X "%X '](name,class,family,slot)
end
+
local function mathcode(target,class,family,slot)
- return format('\\Umathcode%s="%X "%X "%X ',target,class,family,slot)
+ return formatters['\\Umathcode%s="%X "%X "%X '](target,class,family,slot)
end
+
local function mathtopaccent(class,family,slot)
- return format('\\Umathaccent "%X "%X "%X ',0,family,slot) -- no class
+ return formatters['\\Umathaccent "%X "%X "%X '](0,family,slot) -- no class
end
+
local function mathbotaccent(class,family,slot)
- return format('\\Umathaccent bottom "%X "%X "%X ',0,family,slot) -- no class
+ return formatters['\\Umathaccent bottom "%X "%X "%X '](0,family,slot) -- no class
end
+
local function mathtopdelimiter(class,family,slot)
- return format('\\Udelimiterover "%X "%X ',family,slot) -- no class
+ return formatters['\\Udelimiterover "%X "%X '](family,slot) -- no class
end
+
local function mathbotdelimiter(class,family,slot)
- return format('\\Udelimiterunder "%X "%X ',family,slot) -- no class
+ return formatters['\\Udelimiterunder "%X "%X '](family,slot) -- no class
end
local escapes = characters.filters.utf.private.escapes
-local setmathcharacter, setmathsynonym, setmathsymbol -- once updated we will inline them
-
--- beware ... we only set the math character once ... which is why we
--- have the 'done' checking below
-
-if setmathcode then
+-- not that many so no need to reuse tables
- setmathcharacter = function(class,family,slot,unicode)
- if class <= 7 then
- setmathcode(slot,{class,family,unicode or slot})
- end
- end
-
- setmathsynonym = function(class,family,slot,unicode,setcode)
- if setcode and class <= 7 then
- setmathcode(slot,{class,family,unicode})
- end
- if class == classes.open or class == classes.close then
- setdelcode(slot,{family,unicode,0,0})
- end
- end
-
- setmathsymbol = function(name,class,family,slot) -- hex is nicer for tracing
- if class == classes.accent then
- contextsprint(format([[\unexpanded\gdef\%s{\Umathaccent 0 "%X "%X }]],name,family,slot))
- elseif class == classes.topaccent then
- contextsprint(format([[\unexpanded\gdef\%s{\Umathaccent 0 "%X "%X }]],name,family,slot))
- elseif class == classes.botaccent then
- contextsprint(format([[\unexpanded\gdef\%s{\Umathbotaccent 0 "%X "%X }]],name,family,slot))
- elseif class == classes.over then
- contextsprint(format([[\unexpanded\gdef\%s{\Udelimiterover "%X "%X }]],name,family,slot))
- elseif class == classes.under then
- contextsprint(format([[\unexpanded\gdef\%s{\Udelimiterunder "%X "%X }]],name,family,slot))
- elseif class == classes.open or class == classes.close then
- setdelcode(slot,{family,slot,0,0})
- contextsprint(format([[\unexpanded\gdef\%s{\Udelimiter "%X "%X "%X }]],name,class,family,slot))
- elseif class == classes.delimiter then
- setdelcode(slot,{family,slot,0,0})
- contextsprint(format([[\unexpanded\gdef\%s{\Udelimiter 0 "%X "%X }]],name,family,slot))
- elseif class == classes.radical then
- contextsprint(format([[\unexpanded\gdef\%s{\Uradical "%X "%X }]],name,family,slot))
- else
- -- beware, open/close and other specials should not end up here
--- contextsprint(format([[\unexpanded\gdef\%s{\Umathchar "%X "%X "%X }]],name,class,family,slot))
- contextsprint(format([[\Umathchardef\%s "%X "%X "%X ]],name,class,family,slot))
- end
+local setmathcharacter = function(class,family,slot,unicode,mset,dset)
+ if mset and codes[class] then -- regular codes < 7
+ setmathcode("global",slot,{class,family,unicode})
+ mset = false
end
-
-
-else
-
- setmathcharacter = function(class,family,slot,unicode)
- if class <= 7 then
- contextsprint(mathcode(slot,class,family,unicode or slot))
- end
- end
-
- setmathsynonym = function(class,family,slot,unicode,setcode)
- if setcode and class <= 7 then
- contextsprint(mathcode(slot,class,family,unicode))
- end
- if class == classes.open or class == classes.close then
- contextsprint(delcode(slot,family,unicode))
- end
+ if dset and class == open_class or class == close_class or class == middle_class then
+ setdelcode("global",slot,{family,unicode,0,0})
+ dset = false
end
+ return mset, dset
+end
- setmathsymbol = function(name,class,family,slot)
- if class == classes.accent then
- contextsprint(format([[\unexpanded\xdef\%s{%s}]],name,mathaccent(class,family,slot)))
- elseif class == classes.topaccent then
- contextsprint(format([[\unexpanded\xdef\%s{%s}]],name,mathtopaccent(class,family,slot)))
- elseif class == classes.botaccent then
- contextsprint(format([[\unexpanded\xdef\%s{%s}]],name,mathbotaccent(class,family,slot)))
- elseif class == classes.over then
- contextsprint(format([[\unexpanded\xdef\%s{%s}]],name,mathtopdelimiter(class,family,slot)))
- elseif class == classes.under then
- contextsprint(format([[\unexpanded\xdef\%s{%s}]],name,mathbotdelimiter(class,family,slot)))
- elseif class == classes.open or class == classes.close then
- contextsprint(delcode(slot,family,slot))
- contextsprint(format([[\unexpanded\xdef\%s{%s}]],name,delimiter(class,family,slot)))
- elseif class == classes.delimiter then
- contextsprint(delcode(slot,family,slot))
- contextsprint(format([[\unexpanded\xdef\%s{%s}]],name,delimiter(0,family,slot)))
- elseif class == classes.radical then
- contextsprint(format([[\unexpanded\xdef\%s{%s}]],name,radical(family,slot)))
- else
- -- beware, open/close and other specials should not end up here
- contextsprint(format([[\unexpanded\xdef\%s{%s}]],name,mathchar(class,family,slot)))
- end
+local setmathsymbol = function(name,class,family,slot) -- hex is nicer for tracing
+ if class == classes.accent then
+ contextsprint(formatters[ [[\ugdef\%s{\Umathaccent 0 "%X "%X }]] ](name,family,slot))
+ elseif class == classes.topaccent then
+ contextsprint(formatters[ [[\ugdef\%s{\Umathaccent 0 "%X "%X }]] ](name,family,slot))
+ elseif class == classes.botaccent then
+ contextsprint(formatters[ [[\ugdef\%s{\Umathbotaccent 0 "%X "%X }]] ](name,family,slot))
+ elseif class == classes.over then
+ contextsprint(formatters[ [[\ugdef\%s{\Udelimiterover "%X "%X }]] ](name,family,slot))
+ elseif class == classes.under then
+ contextsprint(formatters[ [[\ugdef\%s{\Udelimiterunder "%X "%X }]] ](name,family,slot))
+ elseif class == open_class or class == close_class or class == middle_class then
+ setdelcode(slot,{family,slot,0,0})
+ contextsprint(formatters[ [[\ugdef\%s{\Udelimiter "%X "%X "%X }]] ](name,class,family,slot))
+ elseif class == classes.delimiter then
+ setdelcode(slot,{family,slot,0,0})
+ contextsprint(formatters[ [[\ugdef\%s{\Udelimiter 0 "%X "%X }]] ](name,family,slot))
+ elseif class == classes.radical then
+ contextsprint(formatters[ [[\ugdef\%s{\Uradical "%X "%X }]] ](name,family,slot))
+ else
+ -- beware, open/close and other specials should not end up here
+ -- contextsprint(formatters[ [[\ugdef\%s{\Umathchar "%X "%X "%X }]],name,class,family,slot))
+ contextsprint(formatters[ [[\Umathchardef\%s "%X "%X "%X ]] ](name,class,family,slot))
end
-
end
local function report(class,family,unicode,name)
local nametype = type(name)
if nametype == "string" then
- report_math("%s:%s %s U+%05X (%s) => %s",classname,class,family,unicode,utfchar(unicode),name)
+ report_math("class name %a, class %a, family %a, char %C, name %a",classname,class,family,unicode,name)
elseif nametype == "number" then
- report_math("%s:%s %s U+%05X (%s) => U+%05X",classname,class,family,unicode,utfchar(unicode),name)
+ report_math("class name %a, class %a, family %a, char %C, number %U",classname,class,family,unicode,name)
else
- report_math("%s:%s %s U+%05X (%s)", classname,class,family,unicode,utfchar(unicode))
+ report_math("class name %a, class %a, family %a, char %C", classname,class,family,unicode)
end
end
@@ -244,7 +230,7 @@ function mathematics.define(family)
local data = characters.data
for unicode, character in next, data do
local symbol = character.mathsymbol
- local setcode = true
+ local mset, dset = true, true
if symbol then
local other = data[symbol]
local class = other.mathclass
@@ -253,8 +239,7 @@ function mathematics.define(family)
if trace_defining then
report(class,family,unicode,symbol)
end
- setmathsynonym(class,family,unicode,symbol,setcode)
- setcode = false
+ mset, dset = setmathcharacter(class,family,unicode,symbol,mset,dset)
end
local spec = other.mathspec
if spec then
@@ -262,8 +247,7 @@ function mathematics.define(family)
local class = m.class
if class then
class = classes[class] or class -- no real checks needed
- setmathsynonym(class,family,unicode,symbol,setcode)
- setcode = false
+ mset, dset = setmathcharacter(class,family,unicode,symbol,mset,dset)
end
end
end
@@ -288,16 +272,11 @@ function mathematics.define(family)
setmathsymbol(name,class,family,unicode)
else
name = class == classes.variable or class == classes.number and character.adobename
- if name then
- if trace_defining then
- report(class,family,unicode,name)
- end
+ if name and trace_defining then
+ report(class,family,unicode,name)
end
end
- if setcode then
- setmathcharacter(class,family,unicode,unicode)
- setcode = false
- end
+ mset, dset = setmathcharacter(class,family,unicode,m.unicode or unicode,mset,dset) -- see solidus
end
end
end
@@ -308,7 +287,7 @@ function mathematics.define(family)
if trace_defining then
report(class,family,unicode,name)
end
- setmathcharacter(class,family,unicode)
+ mset, dset = setmathcharacter(class,family,unicode,mset,dset)
else
name = name or character.contextname
if name then
@@ -321,7 +300,7 @@ function mathematics.define(family)
report(class,family,unicode,character.adobename)
end
end
- setmathcharacter(class,family,unicode,unicode)
+ mset, dset = setmathcharacter(class,family,unicode,unicode,mset,dset)
end
end
end
@@ -329,24 +308,102 @@ end
-- needed for mathml analysis
+-- we could cache
+
local function utfmathclass(chr, default)
- local cd = characters.data[utfbyte(chr)]
- return (cd and cd.mathclass) or default or "unknown"
+ local cd = chardata[utfbyte(chr)]
+ return cd and cd.mathclass or default or "unknown"
+end
+
+local function utfmathaccent(chr,default,asked)
+ local cd = chardata[utfbyte(chr)]
+ if not cd then
+ return default or false
+ end
+ if asked then
+ local mc = cd.mathclass
+ if mc and mc == asked then
+ return true
+ end
+ local ms = cd.mathspec
+ if ms then
+ for i=1,#ms do
+ local msi = ms[i]
+ local mc = msi.class
+ if mc and mc == asked then
+ return true
+ end
+ end
+ end
+ else
+ local mc = cd.mathclass
+ if mc then
+ return accents[mc] or default or false
+ end
+ local ms = cd.mathspec
+ if ms then
+ for i=1,#ms do
+ local msi = ms[i]
+ local mc = msi.class
+ if mc then
+ return accents[mc] or default or false
+ end
+ end
+ end
+ end
+ return default or false
end
local function utfmathstretch(chr, default) -- "h", "v", "b", ""
- local cd = characters.data[utfbyte(chr)]
- return (cd and cd.mathstretch) or default or ""
+ local cd = chardata[utfbyte(chr)]
+ return cd and cd.mathstretch or default or ""
end
-local function utfmathcommand(chr, default)
- local cd = characters.data[utfbyte(chr)]
- local cmd = cd and cd.mathname
- return cmd or default or ""
+local function utfmathcommand(chr,default,asked)
+-- local cd = chardata[utfbyte(chr)]
+-- local cmd = cd and cd.mathname
+-- return cmd or default or ""
+ local cd = chardata[utfbyte(chr)]
+ if not cd then
+ return default or ""
+ end
+ if asked then
+ local mn = cd.mathname
+ local mc = cd.mathclass
+ if mn and mc and mc == asked then
+ return mn
+ end
+ local ms = cd.mathspec
+ if ms then
+ for i=1,#ms do
+ local msi = ms[i]
+ local mn = msi.name
+ if mn and msi.class == asked then
+ return mn
+ end
+ end
+ end
+ else
+ local mn = cd.mathname
+ if mn then
+ return mn
+ end
+ local ms = cd.mathspec
+ if ms then
+ for i=1,#ms do
+ local msi = ms[i]
+ local mn = msi.name
+ if mn then
+ return mn
+ end
+ end
+ end
+ end
+ return default or ""
end
local function utfmathfiller(chr, default)
- local cd = characters.data[utfbyte(chr)]
+ local cd = chardata[utfbyte(chr)]
local cmd = cd and (cd.mathfiller or cd.mathname)
return cmd or default or ""
end
@@ -358,11 +415,14 @@ mathematics.utfmathfiller = utfmathfiller
-- interfaced
-function commands.utfmathclass (chr) context(utfmathclass (chr)) end
-function commands.utfmathstretch(chr) context(utfmathstretch(chr)) end
-function commands.utfmathcommand(chr) context(utfmathcommand(chr)) end
-function commands.utfmathfiller (chr) context(utfmathfiller (chr)) end
+function commands.utfmathclass (...) context(utfmathclass (...)) end
+function commands.utfmathstretch(...) context(utfmathstretch(...)) end
+function commands.utfmathcommand(...) context(utfmathcommand(...)) end
+function commands.utfmathfiller (...) context(utfmathfiller (...)) end
+function commands.doifelseutfmathaccent(chr,asked)
+ commands.doifelse(utfmathaccent(chr,nil,asked))
+end
-- helpers
diff --git a/Master/texmf-dist/tex/context/base/math-ini.mkiv b/Master/texmf-dist/tex/context/base/math-ini.mkiv
index 710a65f5b36..89116084791 100644
--- a/Master/texmf-dist/tex/context/base/math-ini.mkiv
+++ b/Master/texmf-dist/tex/context/base/math-ini.mkiv
@@ -13,28 +13,21 @@
\writestatus{loading}{ConTeXt Math Macros / Initializations}
-%D This module provides namespaces for math fonts, thereby
-%D permitting mixed usage of math fonts. Although not strictly
-%D needed, we also provide a family name mapping mechanism as
-%D used in the (original) AMS math definition files, but here
-%D these names can recursively be remapped and if needed,
-%D dynamically be changed. We've tried to minimize the number
-%D of definition commands and use plain \TEX\ definitions as
-%D fallback. We've tried to follow a couple of conventions
-%D from plain and AMS math in order to achieve backward
-%D compatinility. We also kept an eye on future usage of these
-%D modules in the perspective of MathML and unicode fonts.
+%D This module provides namespaces for math fonts, thereby permitting mixed usage of
+%D math fonts. Although not strictly needed, we also provide a family name mapping
+%D mechanism as used in the (original) AMS math definition files, but here these
+%D names can recursively be remapped and if needed, dynamically be changed. We've
+%D tried to minimize the number of definition commands and use plain \TEX\
+%D definitions as fallback. We've tried to follow a couple of conventions from plain
+%D and AMS math in order to achieve backward compatinility. We also kept an eye on
+%D future usage of these modules in the perspective of MathML and unicode fonts.
+
+%D There is a subtle issue with grouping: the \type {\begingroup} method will not
+%D restore a changed mathstyle so best avoid that one. However, there are cases where
+%D we really need to use such grouping.
\unprotect
-\ifdefined\v!autopunctuation \else \def\v!autopunctuation{autopunctuation} \fi
-\ifdefined\v!integral \else \def\v!integral {integral} \fi
-
-\def\s!lcgreek {lcgreek}
-\def\s!ucgreek {ucgreek}
-\def\s!italics {italics}
-\def\s!integral{integral}
-
%D We move these definitions into the format:
% test [[\char948 \cldcontext{utf.char(948)}]]
@@ -45,21 +38,25 @@
\registerctxluafile{math-act}{1.001}
\registerctxluafile{math-ext}{1.001}
\registerctxluafile{math-vfu}{1.001}
+\registerctxluafile{math-ttv}{1.001}
\registerctxluafile{math-map}{1.001}
\registerctxluafile{math-ren}{1.001}
\registerctxluafile{math-noa}{1.001}
\registerctxluafile{math-tag}{1.001}
+\registerctxluafile{math-fbk}{1.001}
\definesystemattribute[mathalphabet] [public]
\definesystemattribute[mathsize] [public]
\definesystemattribute[mathpunctuation][public]
-\definesystemattribute[mathgreek] [public]
+\definesystemattribute[mathgreek] [public] % will become generic
\definesystemattribute[mathalternate] [public]
\definesystemattribute[mathrendering] [public]
\definesystemattribute[mathcategory] [public]
\definesystemattribute[mathmode] [public]
\definesystemattribute[mathitalics] [public]
+\definesystemattribute[displaymath] [public]
+
\appendtoks
\attribute\mathmodeattribute\plusone
\to \everydisplay
@@ -68,6 +65,10 @@
\attribute\mathmodeattribute\plusone
\to \everybeforedisplayformula
+\appendtoksonce
+ \attribute\displaymathattribute\plusone
+\to \everybeforedisplayformula
+
\setnewconstant\defaultmathfamily \zerocount % 255
%D Some measures (maybe spac-mth):
@@ -82,18 +83,6 @@
%D Configuration for integrals. (If needed we can speed this up and make it
%D installable; no processaction is needed then).
-% \newtoks\everysetupmathematics
-%
-% \unexpanded\def\setupmathematics
-% {\dosingleargument\dosetupmathematics}
-%
-% \def\dosetupmathematics[#1]%
-% {\getparameters[\??mo][#1]%
-% \the\everysetupmathematics}
-%
-% \def\mathematicsparameter#1%
-% {\ifcsname\??mo#1\endcsname\csname\??mo#1\endcsname\fi}
-
\installcorenamespace{mathematics}
\installswitchcommandhandler \??mathematics {mathematics} \??mathematics
@@ -123,25 +112,20 @@
% todo: only in mmode
-\unexpanded\def\mathgreekupright{\attribute\c_math_greek_attribute22 }
-\unexpanded\def\mathgreekitalic {\attribute\c_math_greek_attribute33 }
-\unexpanded\def\mathgreekdefault{\attribute\c_math_greek_attribute\attributeunsetvalue}
-
-\let\mathgreeknormal\mathgreekupright
-\let\mathgreeknone \mathgreekdefault
+% these commands are semi-public but should not be used directly (lua names wil change)
-\unexpanded\def\setmathattribute#1#2{\ifmmode\ctxcommand{setmathattribute("#1","#2")}\fi}
-\unexpanded\def\setmathalphabet #1{\ifmmode\ctxcommand{setmathalphabet("#1")}\fi}
-\unexpanded\def\setmathstyle #1{\ifmmode\ctxcommand{setmathstyle("#1")}\fi}
-\unexpanded\def\setmathalternate #1{\ifmmode\ctxcommand{setmathalternate(\number\defaultmathfamily,"#1")}\fi}
+\unexpanded\def\math_set_attribute #1#2{\ifmmode\ctxcommand{setmathattribute("#1","#2")}\fi}
+\unexpanded\def\math_set_alphabet #1{\ifmmode\ctxcommand{setmathalphabet("#1")}\fi}
+\unexpanded\def\math_set_font_style #1{\ifmmode\ctxcommand{setmathstyle("#1")}\fi}
+\unexpanded\def\math_set_font_alternate#1{\ifmmode\ctxcommand{setmathalternate(\number\defaultmathfamily,"#1")}\fi}
\installcorenamespace{mathstylealternate} % might become a setuphandler
-\unexpanded\def\setmathstylealterternate#1%
+\unexpanded\def\math_set_font_style_alterternate#1%
{\ifcsname\??mathstylealternate\fontclass:#1\endcsname
- \expandafter\setmathalternate\csname\??mathstylealternate\fontclass:#1\endcsname
+ \expandafter\math_set_font_alternate\csname\??mathstylealternate\fontclass:#1\endcsname
\else\ifcsname\??mathstylealternate#1\endcsname
- \expandafter\setmathalternate\csname\??mathstylealternate#1\endcsname
+ \expandafter\math_set_font_alternate\csname\??mathstylealternate#1\endcsname
\fi\fi}
\unexpanded\def\setupmathrendering % the name might change
@@ -154,28 +138,33 @@
\getparameters[\??mathstylealternate][#1]%
\fi}
-\unexpanded\def\mathaltcal{\setmathalternate{cal}\cal} % ss01 in xits
+\unexpanded\def\mathaltcal{\math_set_font_alternate{cal}\cal} % ss01 in xits
-\let\mathalternate\setmathalternate % obsolete
+\let\setmathattribute \math_set_attribute
+\let\setmathalphabet \math_set_alphabet
+\let\setmathfontstyle \math_set_font_style
+\let\setmathfontalternate \math_set_font_alternate
+\let\setmathfontstylealterternate\math_set_font_style_alterternate
-\unexpanded\def\mr {\setmathattribute\s!regular\s!tf\setmathstylealterternate\s!tf}
+\let\mathalternate \math_set_font_alternate % obsolete
-\unexpanded\def\mathdefault {\setmathattribute\s!regular\s!it\setmathstylealterternate\s!it}
-\unexpanded\def\mathscript {\setmathalphabet \s!script \setmathstylealterternate\s!script}
-\unexpanded\def\mathfraktur {\setmathalphabet \s!fraktur \setmathstylealterternate\s!fraktur}
-\unexpanded\def\mathblackboard{\setmathalphabet \s!blackboard \setmathstylealterternate\s!blackboard}
+\unexpanded\def\mathupright {\math_set_attribute\s!regular\s!tf\math_set_font_style_alterternate\s!tf}
+\unexpanded\def\mathdefault {\math_set_attribute\s!regular\s!it\math_set_font_style_alterternate\s!it}
+\unexpanded\def\mathscript {\math_set_alphabet \s!script \math_set_font_style_alterternate\s!script}
+\unexpanded\def\mathfraktur {\math_set_alphabet \s!fraktur \math_set_font_style_alterternate\s!fraktur}
+\unexpanded\def\mathblackboard{\math_set_alphabet \s!blackboard \math_set_font_style_alterternate\s!blackboard}
-\unexpanded\def\mathrm {\setmathattribute\s!rm\s!tf \setmathstylealterternate\s!tf}
-\unexpanded\def\mathss {\setmathattribute\s!ss\s!tf \setmathstylealterternate\s!tf}
-\unexpanded\def\mathtt {\setmathattribute\s!tt\s!tf \setmathstylealterternate\s!tf}
+\unexpanded\def\mathrm {\math_set_attribute\s!rm\s!tf \math_set_font_style_alterternate\s!tf}
+\unexpanded\def\mathss {\math_set_attribute\s!ss\s!tf \math_set_font_style_alterternate\s!tf}
+\unexpanded\def\mathtt {\math_set_attribute\s!tt\s!tf \math_set_font_style_alterternate\s!tf}
-\unexpanded\def\mathtf {\setmathstyle\s!tf \setmathstylealterternate\s!tf}
-\unexpanded\def\mathsl {\setmathstyle\s!it \setmathstylealterternate\s!it} % no sl
-\unexpanded\def\mathit {\setmathstyle\s!it \setmathstylealterternate\s!it}
+\unexpanded\def\mathtf {\math_set_font_style\s!tf \math_set_font_style_alterternate\s!tf}
+\unexpanded\def\mathsl {\math_set_font_style\s!it \math_set_font_style_alterternate\s!it} % no sl
+\unexpanded\def\mathit {\math_set_font_style\s!it \math_set_font_style_alterternate\s!it}
-\unexpanded\def\mathbf {\setmathstyle\s!bf \setmathstylealterternate\s!bf}
-\unexpanded\def\mathbs {\setmathstyle\s!bi \setmathstylealterternate\s!bi} % no sl
-\unexpanded\def\mathbi {\setmathstyle\s!bi \setmathstylealterternate\s!bi}
+\unexpanded\def\mathbf {\math_set_font_style\s!bf \math_set_font_style_alterternate\s!bf}
+\unexpanded\def\mathbs {\math_set_font_style\s!bi \math_set_font_style_alterternate\s!bi} % no sl
+\unexpanded\def\mathbi {\math_set_font_style\s!bi \math_set_font_style_alterternate\s!bi}
\let\tfmath\mathtf % maybe a grouped command
\let\slmath\mathsl
@@ -198,8 +187,6 @@
\unexpanded\def\mathfrak#1{{\mathfraktur #1}} % for AMS compatibility
\unexpanded\def\mathbb #1{{\mathblackboard#1}} % for AMS compatibility
-\let\normalmr\mr
-
\let\normaltf\tf \unexpanded\def\tf{\ifmmode\mathtf\else\normaltf\fi}
\let\normalbf\bf \unexpanded\def\bf{\ifmmode\mathbf\else\normalbf\fi}
\let\normalit\it \unexpanded\def\it{\ifmmode\mathit\else\normalit\fi}
@@ -210,15 +197,17 @@
\let\normalrm\rm \unexpanded\def\rm{\ifmmode\mathrm\else\normalrm\fi}
\let\normalss\ss \unexpanded\def\ss{\ifmmode\mathss\else\normalss\fi}
\let\normaltt\tt \unexpanded\def\tt{\ifmmode\mathtt\else\normaltt\fi}
- \unexpanded\def\mr{\ifmmode \normalmr\fi}
+
+\ifdefined\mr \else \let\mr\relax \fi
+\ifdefined\mb \else \let\mb\relax \fi
\prependtoks
\mathdefault
\to \everymathematics
-%D We could set the rendering attribute at the \LUA\ end but as there
-%D can be many small math snippets we keep track of the state at the
-%D \TEX\ end (mapping is export safe).
+%D We could set the rendering attribute at the \LUA\ end but as there can be many
+%D small math snippets we keep track of the state at the \TEX\ end (mapping is
+%D export safe).
%D
%D \starttyping
%D \startformula
@@ -265,13 +254,19 @@
\def\utfmathcommand#1{\ctxcommand{utfmathcommand(\!!bs#1\!!es)}}
\def\utfmathfiller #1{\ctxcommand{utfmathfiller (\!!bs#1\!!es)}}
+\def\utfmathclassfiltered #1#2{\ctxcommand{utfmathclass (\!!bs#1\!!es,nil,"#2")}}
+\def\utfmathcommandfiltered#1#2{\ctxcommand{utfmathcommand(\!!bs#1\!!es,nil,"#2")}}
+
+\unexpanded\def\doifelseutfmathaccent #1{\ctxcommand{doifelseutfmathaccent(\!!bs#1\!!es)}}
+\unexpanded\def\doifelseutfmathaccentfiltered#1#2{\ctxcommand{doifelseutfmathaccent(\!!bs#1\!!es,"#2")}}
+
%D Not used that much:
\installcorenamespace{mathcodecommand}
-\unexpanded\def\mathlimop #1{\mathop{#1}} %no \limits
-\unexpanded\def\mathbox #1{\dontleavehmode\hbox\Ustartmath\mathsurround\zeropoint#1\Ustopmath}
-\unexpanded\def\mathnolop #1{\mathop{#1}\nolimits}
+\unexpanded\def\mathlimop#1{\mathop{#1}} %no \limits
+\unexpanded\def\mathbox #1{\dontleavehmode\hbox\Ustartmath\mathsurround\zeropoint#1\Ustopmath}
+\unexpanded\def\mathnolop#1{\mathop{#1}\nolimits}
\let\mathnothing\firstofoneunexpanded
\let\mathalpha \firstofoneunexpanded
@@ -383,11 +378,11 @@
\let\math_tags_mo\firstofoneunexpanded
\let\math_tags_mi\firstofoneunexpanded
-\unexpanded\def\math_tags_mn#1{\begingroup\mr#1\endgroup}
-\unexpanded\def\math_tags_ms#1{\begingroup\mr#1\endgroup}
+\unexpanded\def\math_tags_mn#1{\begingroup\mathupright#1\endgroup}
+\unexpanded\def\math_tags_ms#1{\begingroup\mathupright#1\endgroup}
-\unexpanded\def\mfunction #1{{\mr\math_tags_function{#1}}}
-\unexpanded\def\mfunctionlabeltext#1{{\mr\math_tags_functionlabeltext{#1}}}
+\unexpanded\def\mfunction #1{{\mathupright\math_tags_function{#1}}}
+\unexpanded\def\mfunctionlabeltext#1{{\mathupright\math_tags_functionlabeltext{#1}}}
% Once this is stable we can store the number at the tex end which is
% faster. Functions getnumbers >= 1000.
@@ -395,10 +390,10 @@
\expanded\def\math_tags_mathfunction_indeed #1{\ctxcommand{taggedmathfunction("#1",false,\ifconditional\c_apply_function true\else false\fi)}}
\expanded\def\math_tags_mathfunctionlabeltext_indeed#1{\ctxcommand{taggedmathfunction("#1",true ,\ifconditional\c_apply_function true\else false\fi)}}
-\expanded\def\math_tags_mo_indeed#1{\begingroup \attribute\mathcategoryattribute\plusone #1\endgroup}
-\expanded\def\math_tags_mi_indeed#1{\begingroup \attribute\mathcategoryattribute\plustwo #1\endgroup}
-\expanded\def\math_tags_mn_indeed#1{\begingroup\mr\attribute\mathcategoryattribute\plusthree#1\endgroup}
-\expanded\def\math_tags_ms_indeed#1{\begingroup\mr\attribute\mathcategoryattribute\plusfour #1\endgroup}
+\expanded\def\math_tags_mo_indeed#1{\begingroup \attribute\mathcategoryattribute\plusone #1\endgroup}
+\expanded\def\math_tags_mi_indeed#1{\begingroup \attribute\mathcategoryattribute\plustwo #1\endgroup}
+\expanded\def\math_tags_mn_indeed#1{\begingroup\mathupright\attribute\mathcategoryattribute\plusthree#1\endgroup}
+\expanded\def\math_tags_ms_indeed#1{\begingroup\mathupright\attribute\mathcategoryattribute\plusfour #1\endgroup}
\newconditional\c_apply_function
@@ -432,8 +427,8 @@
% \def\mlimitsfunction #1{\mathlimopcomm{{\mr#1}}
% \def\mnolimitsfunction#1{\mathnolopcomm{{\mr#1}}
-%D Taco posted this solution as response to a mail by Olivier, so
-%D let's integrate it here.
+%D Taco posted this solution as response to a mail by Olivier, so let's integrate
+%D it here.
\def\currentmscaledstyle{rm} % will be plugged into the typeface text=ss option
@@ -455,8 +450,7 @@
{\hbox{\csname\currentmscaledstyle\endcsname\tfx #1}}
{\hbox{\csname\currentmscaledstyle\endcsname\tfxx#1}}}
-%D We can force the way functions are typeset by manipulating the text
-%D option:
+%D We can force the way functions are typeset by manipulating the text option:
%D
%D \starttyping
%D \definetypeface[iwona][ss][sans][iwona][default][encoding=texnansi]
@@ -509,7 +503,7 @@
\newtoks\activatedmathcharacters
-\def\activatemathcharacter#1%
+\unexpanded\def\activatemathcharacter#1%
{\appendtoks
\global\mathcode#1=\activemathcharcode
\to \activatedmathcharacters}
@@ -540,12 +534,11 @@
% Here follows some plain legacy: primes.
%
-% The \let\prime\math_prime_indeed might become an obsolete as we have
-% \doubleprime and \tripleprime and collapsing can nicely handle the
-% script then.
+% The \let\prime\math_prime_indeed might become an obsolete as we have \doubleprime
+% and \tripleprime and collapsing can nicely handle the script then.
%
% Collapsing to 0x2033 and 0x2034 happens elsewhere.
-
+%
% \switchtobodyfont[modern]
% \switchtobodyfont[cambria]
% \switchtobodyfont[xits]
@@ -805,31 +798,40 @@
\newconstant\c_math_greek_attribute
-\setvalue{\??mathgreek\v!normal :\v!normal}{22}
-\setvalue{\??mathgreek\v!normal :\v!italic}{23}
-\setvalue{\??mathgreek\v!normal :\v!none }{21}
-
-\setvalue{\??mathgreek\v!italic :\v!normal}{32}
-\setvalue{\??mathgreek\v!italic :\v!italic}{33}
-\setvalue{\??mathgreek\v!italic :\v!none }{31}
-
-\setvalue{\??mathgreek\v!none :\v!normal}{12}
-\setvalue{\??mathgreek\v!none :\v!italic}{13}
-\letvalue{\??mathgreek\v!none :\v!none }\attributeunsetvalue
+\setvalue{\??mathgreek\v!none }{1}
+\setvalue{\??mathgreek\v!normal}{2}
+\setvalue{\??mathgreek\v!italic}{3}
\appendtoks
- \edef\p_lcgreek_ucgreek{\mathematicsparameter\s!lcgreek:\mathematicsparameter\s!ucgreek}%
- \c_math_greek_attribute\csname\??mathgreek
- \ifcsname\??mathgreek\p_lcgreek_ucgreek\endcsname\p_lcgreek_ucgreek\else\v!none\fi
- \endcsname\relax
+ \edef\p_sygreek{\mathematicsparameter\s!sygreek}%
+ \edef\p_lcgreek{\mathematicsparameter\s!lcgreek}%
+ \edef\p_ucgreek{\mathematicsparameter\s!ucgreek}%
+ \c_math_greek_attribute"% hex digits
+ \csname\??mathgreek\ifcsname\??mathgreek\p_sygreek\endcsname\p_sygreek\else\v!none\fi\endcsname
+ \csname\??mathgreek\ifcsname\??mathgreek\p_lcgreek\endcsname\p_lcgreek\else\v!none\fi\endcsname
+ \csname\??mathgreek\ifcsname\??mathgreek\p_ucgreek\endcsname\p_ucgreek\else\v!none\fi\endcsname
+ \relax
+ \ifcase\c_math_greek_attribute
+ \c_math_greek_attribute\attributeunsetvalue
+ \fi
\to \everyswitchmathematics
+% only used local
+
+\unexpanded\def\mathgreekupright{\attribute\mathgreekattribute"222\relax}
+\unexpanded\def\mathgreekitalic {\attribute\mathgreekattribute"333\relax}
+\unexpanded\def\mathgreekdefault{\attribute\mathgreekattribute"000\relax}
+
+\let\mathgreeknormal\mathgreekupright
+\let\mathgreeknone \mathgreekdefault
+
\appendtoks
\attribute\mathgreekattribute\c_math_greek_attribute
\to \everymathematics
\setupmathematics
- [\s!lcgreek=\v!italic,
+ [\s!sygreek=\v!normal,
+ \s!lcgreek=\v!italic,
\s!ucgreek=\v!normal] % was: none
%D Math italics (experiment)
@@ -857,15 +859,16 @@
\c_math_italics_attribute\csname\??mathitalics
\ifcsname\??mathitalics\p_italics\endcsname\p_italics\else\v!none\fi
\endcsname\relax
- \math_italics_initialize
+ % \math_italics_initialize
\to \everyswitchmathematics % only in mathematics
\appendtoks
+ \math_italics_initialize
\attribute\mathitalicsattribute\c_math_italics_attribute
\to \everymathematics
\setupmathematics
- [\s!italics=]
+ [\s!italics=3] % for the moment only this one makes sense .. still experimental
% looks nicer but can generate bogus csnames
%
@@ -890,27 +893,124 @@
%D
%D \blank{\getbuffer}\blank
-\newconditional\automathpunctuation
+% \newconditional\automathpunctuation
+%
+% \unexpanded\def\enablemathpunctuation {\settrue \automathpunctuation}
+% \unexpanded\def\disablemathpunctuation{\setfalse\automathpunctuation}
+%
+% \appendtoks
+% \doifelse{\mathematicsparameter\v!autopunctuation}\v!yes\settrue\setfalse\automathpunctuation
+% \to \everyswitchmathematics
+%
+% \setupmathematics
+% [\v!autopunctuation=\v!no]
+%
+% \def\math_punctuation_next{\ifx\nexttoken\blankspace\char\zerocount\fi}
+%
+% \unexpanded\def\math_punctuation_comma {\textcomma \futurelet\nexttoken\math_punctuation_next}
+% \unexpanded\def\math_punctuation_period{\textperiod\futurelet\nexttoken\math_punctuation_next}
+%
+% \setnewconstant\c_math_comma "002C
+% \setnewconstant\c_math_period "002E
+% \setnewconstant\c_math_special"8000
+%
+% \bgroup
+%
+% \catcode\c_math_comma \activecatcode
+% \catcode\c_math_period\activecatcode
+%
+% \unexpanded\gdef\math_punctuation_initialize_indeed
+% {\mathcode\c_math_comma \c_math_special
+% \mathcode\c_math_period\c_math_special
+% \let,\math_punctuation_comma
+% \let.\math_punctuation_period
+% \attribute\mathpunctuationattribute\plustwo}
+%
+% \unexpanded\gdef\math_punctuation_initialize_yes
+% {\attribute\mathpunctuationattribute\plustwo}
+%
+% \unexpanded\gdef\math_punctuation_initialize_nop
+% {\attribute\mathpunctuationattribute\plusone}
+%
+% \egroup
+%
+% \appendtoks
+% \ifconditional\automathpunctuation
+% \math_punctuation_initialize_indeed
+% \math_punctuation_initialize_yes
+% \let\enablemathpunctuation \math_punctuation_initialize_yes
+% \let\disablemathpunctuation\math_punctuation_initialize_nop
+% \fi
+% \to \everymathematics
+
+% Later I will look again into a \LUATEX\ based solution. It only makes sense
+% to delegate to \LUA\ when we have more variants and need analysis (experimental
+% trickery removed for a while).
-\unexpanded\def\enablemathpunctuation {\settrue \automathpunctuation}
-\unexpanded\def\disablemathpunctuation{\setfalse\automathpunctuation}
+\def\math_punctuation_comma_next {\ifx\nexttoken\blankspace \mathpunct{\textcomma }\else\mathord{\textcomma }\fi}
+\def\math_punctuation_period_next{\ifx\nexttoken\blankspace \mathpunct{\textperiod}\else\mathord{\textperiod}\fi}
-\appendtoks
- \doifelse{\mathematicsparameter\v!autopunctuation}\v!yes\settrue\setfalse\automathpunctuation
-\to \everyswitchmathematics
+\unexpanded\def\math_punctuation_nop_comma {\mathpunct{\textcomma}}
+\unexpanded\def\math_punctuation_all_comma {\futurelet\nexttoken\math_punctuation_comma_next}
+ \let\math_punctuation_yes_comma \math_punctuation_all_comma
+
+\unexpanded\def\math_punctuation_nop_period{\mathord{\textperiod}}
+\unexpanded\def\math_punctuation_all_period{\futurelet\nexttoken\math_punctuation_period_next}
+ \let\math_punctuation_yes_period\math_punctuation_nop_period
+
+\setnewconstant\c_math_comma "002C
+\setnewconstant\c_math_period "002E
+\setnewconstant\c_math_special"8000
+
+\installcorenamespace {mathautopunctuation}
+
+\bgroup
+
+ \catcode\c_math_comma \activecatcode
+ \catcode\c_math_period\activecatcode
+
+ \setgvalue{\??mathautopunctuation\v!no}%
+ {\let,\math_punctuation_nop_comma
+ \let.\math_punctuation_nop_period}
+
+ \setgvalue{\??mathautopunctuation\v!yes}%
+ {\let,\math_punctuation_yes_comma
+ \let.\math_punctuation_yes_period}
+
+ \setgvalue{\??mathautopunctuation\v!all}%
+ {\let,\math_punctuation_all_comma
+ \let.\math_punctuation_all_period}
+
+\egroup
+
+% \appendtoks
+% \global\mathcode\c_math_comma \c_math_special
+% \global\mathcode\c_math_period\c_math_special
+% \to \everyjob
\appendtoks
- \ifconditional\automathpunctuation\attribute\mathpunctuationattribute\plusone\fi
+ \mathcode\c_math_comma \c_math_special
+ \mathcode\c_math_period\c_math_special
+ \csname\??mathautopunctuation\mathematicsparameter\v!autopunctuation\endcsname
\to \everymathematics
+\appendtoks
+ \ifcsname\??mathautopunctuation\mathematicsparameter\v!autopunctuation\endcsname \else
+ \letmathematicsparameter\v!autopunctuation\v!no
+ \fi
+\to \everysetupmathematics
+
+\def\enablemathpunctuation {\csname\??mathautopunctuation\v!no \endcsname}
+\def\disablemathpunctuation{\csname\??mathautopunctuation\v!yes\endcsname}
+
\setupmathematics
- [\v!autopunctuation=\v!no]
+ [\v!autopunctuation=\v!no] % no | yes | all
%D \macros
%D {mathstyle}
%D
-%D If one want to be sure that something is typeset in the
-%D appropriate style, \type {\mathstyle} can be used:
+%D If one want to be sure that something is typeset in the appropriate style, \type
+%D {\mathstyle} can be used:
%D
%D \starttyping
%D \mathstyle{something}
@@ -923,10 +1023,9 @@
% {\scriptstyle #1}%
% {\scriptscriptstyle#1}}
%
-% We now have a primitive operation for this. As the
-% macro overloads a new primitive introduced in \LUATEX,
-% we need to use \type {\normalmathstyle} when we consult
-% the current math style.
+% We now have a primitive operation for this. As the macro overloads a new
+% primitive introduced in \LUATEX, we need to use \type {\normalmathstyle} when we
+% consult the current math style.
%
% \let \mathstyle \Ustack % spoils cramped
%
@@ -981,7 +1080,58 @@
\scriptscriptfont \or
\scriptscriptfont \else
\textfont
- \fi\zerocount}
+ \fi\zerocount} % hm, can ie other value as well
+
+\def\mathsmallstylefont#1% #1 is number (\normalmathstyle)
+ {\ifcase#1\relax
+ \scriptfont \or
+ \scriptfont \or
+ \scriptfont \or
+ \scriptfont \or
+ \scriptscriptfont \or
+ \scriptscriptfont \or
+ \scriptscriptfont \or
+ \scriptscriptfont \else
+ \scriptfont
+ \fi\zerocount} % hm, can ie other value as well
+
+\def\mathstyleface#1% #1 is number (\normalmathstyle)
+ {\ifcase#1
+ \textface \or
+ \textface \or
+ \textface \or
+ \textface \or
+ \scriptface \or
+ \scriptface \or
+ \scriptscriptface \or
+ \scriptscriptface \else
+ \textface
+ \fi}
+
+\def\mathsmallstyleface#1% #1 is number (\normalmathstyle)
+ {\ifcase#1
+ \scriptface \or
+ \scriptface \or
+ \scriptface \or
+ \scriptface \or
+ \scriptscriptface \or
+ \scriptscriptface \or
+ \scriptscriptface \or
+ \scriptscriptface \else
+ \scriptface
+ \fi}
+
+\def\mathstylecommand#1#2#3%
+ {\ifcase\normalmathstyle
+ \expandafter#1\or
+ \expandafter#1\or
+ \expandafter#1\or
+ \expandafter#1\or
+ \expandafter#2\or
+ \expandafter#2\or
+ \expandafter#3\or
+ \expandafter#3\else
+ \expandafter#1\fi}
%D A plain inheritance:
@@ -1002,42 +1152,451 @@
\unexpanded\def\mathstylehbox#1%
{\normalexpanded{\hbox\bgroup
- $\triggermathstyle\normalmathstyle}\mathsurround\zeropoint#1$\egroup}
+ \startimath\triggermathstyle\normalmathstyle}\mathsurround\zeropoint#1\stopimath\egroup}
\unexpanded\def\mathstylevbox#1%
{\normalexpanded{\vbox\bgroup
- $\triggermathstyle\normalmathstyle}\mathsurround\zeropoint#1$\egroup}
+ \startimath\triggermathstyle\normalmathstyle}\mathsurround\zeropoint#1\stopimath\egroup}
\unexpanded\def\mathstylevcenter#1%
{\normalexpanded{\vcenter\bgroup
- $\triggermathstyle\normalmathstyle}\mathsurround\zeropoint#1$\egroup}
+ \startimath\triggermathstyle\normalmathstyle}\mathsurround\zeropoint#1\stopimath\egroup}
\unexpanded\def\mathstylevcenteredhbox#1%
{\normalexpanded{\vcenter\bgroup\hbox\bgroup
- $\triggermathstyle\normalmathstyle}\mathsurround\zeropoint#1$\egroup\egroup}
+ \startimath\triggermathstyle\normalmathstyle}\mathsurround\zeropoint#1\stopimath\egroup\egroup}
\unexpanded\def\mathstylevcenteredvbox#1%
{\normalexpanded{\vcenter\bgroup\vbox\bgroup
- $\triggermathstyle\normalmathstyle}\mathsurround\zeropoint#1$\egroup\egroup}
+ \startimath\triggermathstyle\normalmathstyle}\mathsurround\zeropoint#1\stopimath\egroup\egroup}
+
+\unexpanded\def\setmathsmalltextbox#1#2#%
+ {\normalizebodyfontsize\m_math_text_choice_face{\mathsmallstyleface\normalmathstyle}%
+ \setbox#1#2\bgroup
+ \font_basics_switchtobodyfont\m_math_text_choice_face
+ \let\next}
+
+\unexpanded\def\setmathtextbox#1#2#%
+ {\normalizebodyfontsize\m_math_text_choice_face{\mathstyleface\normalmathstyle}%
+ \setbox#1#2\bgroup
+ \font_basics_switchtobodyfont\m_math_text_choice_face
+ \let\next}
+
+%D Here is the new mechanism ... it might replace some of the above but we will do
+%D that stepwise. Keep in mind that cramped only affects superscripts and even then,
+%D only when in a smaller size than normal.
+%D
+%D \def\TestMe#1%
+%D {\NC \ttbf #1
+%D \NC \ruledhbox{$\setupmathstyle[#1]x + x_j^2 + x_i^{e^2} + \frac{1}{x}$}
+%D \NC \ruledhbox{$\setupmathstyle[#1,small]x + x_j^2 + x_i^{e^2} + \frac{1}{x}$}
+%D \NC \NR}
+%D
+%D \starttabulate[|l|r|l|]
+%D \HL
+%D \NC \NC \NC \ttbf ...,small \NC \NR
+%D \HL
+%D \TestMe{text} \TestMe{text,cramped}
+%D \TestMe{script} \TestMe{script,cramped}
+%D \TestMe{scriptscript} \TestMe{scriptscript,cramped}
+%D \TestMe{display} \TestMe{display,cramped}
+%D \HL
+%D \stoptabulate
+
+\def\triggerdisplaystyle
+ {\ifcase\normalmathstyle\relax
+ \displaystyle \or
+ \crampeddisplaystyle \or
+ \displaystyle \or
+ \crampeddisplaystyle \or
+ \displaystyle \or
+ \crampeddisplaystyle \or
+ \displaystyle \or
+ \crampeddisplaystyle \or
+ \fi}
-%D Something similar can be used in the (re|)|definition
-%D of \type {\text}. This version is a variation on the one
-%D in the math module (see \type{m-math} and|/|or \type
-%D {m-newmat}).
+\def\triggertextstyle
+ {\ifcase\normalmathstyle\relax
+ \textstyle \or
+ \crampedtextstyle \or
+ \textstyle \or
+ \crampedtextstyle \or
+ \textstyle \or
+ \crampedtextstyle \or
+ \textstyle \or
+ \crampedtextstyle \else
+ \fi}
+
+\def\triggerscriptstyle
+ {\ifcase\normalmathstyle\relax
+ \scriptstyle \or
+ \crampedscriptstyle \or
+ \scriptstyle \or
+ \crampedscriptstyle \or
+ \scriptstyle \or
+ \crampedscriptstyle \or
+ \scriptstyle \or
+ \crampedscriptstyle \or
+ \fi}
+
+\def\triggerscriptscriptstyle
+ {\ifcase\normalmathstyle\relax
+ \scriptscriptstyle \or
+ \crampedscriptscriptstyle \or
+ \scriptscriptstyle \or
+ \crampedscriptscriptstyle \or
+ \scriptscriptstyle \or
+ \crampedscriptscriptstyle \or
+ \scriptscriptstyle \or
+ \crampedscriptscriptstyle \or
+ \fi}
+
+\def\triggeruncrampedstyle
+ {\ifcase\normalmathstyle\relax
+ \or \displaystyle \or
+ \or \textstyle \or
+ \or \scriptstyle \or
+ \or \scriptscriptstyle \fi}
+
+\def\triggercrampedstyle
+ {\ifcase\normalmathstyle\relax
+ \crampeddisplaystyle \or \or
+ \crampedtextstyle \or \or
+ \crampedscriptstyle \or \or
+ \crampedscriptscriptstyle \fi}
+
+\def\triggersmallstyle
+ {\ifcase\normalmathstyle\relax
+ \scriptstyle \or
+ \crampedscriptstyle \or
+ \scriptstyle \or
+ \crampedscriptstyle \or
+ \scriptscriptstyle \or
+ \crampedscriptscriptstyle \or
+ \scriptscriptstyle \or
+ \crampedscriptscriptstyle \or
+ \fi}
+
+\def\triggeruncrampedsmallstyle
+ {\ifcase\normalmathstyle\relax
+ \scriptstyle \or
+ \scriptstyle \or
+ \scriptstyle \or
+ \scriptstyle \or
+ \scriptscriptstyle \or
+ \scriptscriptstyle \or
+ \scriptscriptstyle \or
+ \scriptscriptstyle \or
+ \fi}
+
+\def\triggercrampedsmallstyle
+ {\ifcase\normalmathstyle\relax
+ \crampedscriptstyle \or
+ \crampedscriptstyle \or
+ \crampedscriptstyle \or
+ \crampedscriptstyle \or
+ \crampedscriptscriptstyle \or
+ \crampedscriptscriptstyle \or
+ \crampedscriptscriptstyle \or
+ \crampedscriptscriptstyle \or
+ \fi}
+
+\def\triggerbigstyle
+ {\ifcase\normalmathstyle\relax
+ \displaystyle \or
+ \crampeddisplaystyle \or
+ \textstyle \or
+ \crampedtextstyle \or
+ \textstyle \or
+ \crampedtextstyle \or
+ \scriptstyle \or
+ \crampedscriptstyle \or
+ \fi}
+
+\def\triggeruncrampedbigstyle
+ {\ifcase\normalmathstyle\relax
+ \displaystyle \or
+ \displaystyle \or
+ \textstyle \or
+ \textstyle \or
+ \textstyle \or
+ \textstyle \or
+ \scriptstyle \or
+ \scriptstyle \or
+ \fi}
+
+\def\triggercrampedbigstyle
+ {\ifcase\normalmathstyle\relax
+ \crampeddisplaystyle \or
+ \crampeddisplaystyle \or
+ \crampedtextstyle \or
+ \crampedtextstyle \or
+ \crampedtextstyle \or
+ \crampedtextstyle \or
+ \crampedscriptstyle \or
+ \crampedscriptstyle \or
+ \fi}
+
+\installcorenamespace{mathstylecommand}
+\installcorenamespace{mathstylecache}
+
+\newconstant\c_math_styles_state_style
+\newconstant\c_math_styles_state_cramped
+\newconstant\c_math_styles_state_size
+
+\def\math_style_add_to_cache_choice {%
+ \ifcase\c_math_styles_state_size
+ \ifcase\c_math_styles_state_style
+ \ifcase\c_math_styles_state_cramped
+ \relax \or
+ \noexpand\triggeruncrampedstyle \or
+ \noexpand\triggercrampedstyle \fi
+ \or\ifcase\c_math_styles_state_cramped
+ \noexpand\triggerdisplaystyle \or
+ \displaystyle \or
+ \crampeddisplaystyle \fi
+ \or\ifcase\c_math_styles_state_cramped
+ \noexpand\triggertextstyle \or
+ \textstyle \or
+ \crampedtextstyle \fi
+ \or\ifcase\c_math_styles_state_cramped
+ \noexpand\triggerscriptstyle \or
+ \scriptstyle \or
+ \crampedscriptstyle \fi
+ \or\ifcase\c_math_styles_state_cramped
+ \noexpand\triggerscriptscriptstyle \or
+ \scriptscriptstyle \or
+ \crampedscriptscriptstyle \fi
+ \fi
+ \or % small
+ \ifcase\c_math_styles_state_style
+ \ifcase\c_math_styles_state_cramped
+ \noexpand\triggersmallstyle \or
+ \noexpand\triggeruncrampedsmallstyle \or
+ \noexpand\triggercrampedsmallstyle \fi
+ \or\ifcase\c_math_styles_state_cramped
+ \noexpand\triggerscriptstyle \or
+ \scriptstyle \or
+ \crampedscriptstyle \fi
+ \or\ifcase\c_math_styles_state_cramped
+ \noexpand\triggerscriptstyle \or
+ \scriptstyle \or
+ \crampedscriptstyle \fi
+ \or\ifcase\c_math_styles_state_cramped
+ \noexpand\triggerscriptscriptstyle \or
+ \scriptscriptstyle \or
+ \crampedscriptscriptstyle \fi
+ \or\ifcase\c_math_styles_state_cramped
+ \noexpand\triggerscriptscriptstyle \or
+ \scriptscriptstyle \or
+ \crampedscriptscriptstyle \fi
+ \fi
+ \or % large
+ \ifcase\c_math_styles_state_style
+ \ifcase\c_math_styles_state_cramped
+ \noexpand\triggerbigstyle \or
+ \noexpand\triggeruncrampedbigstyle \or
+ \noexpand\triggercrampedbigstyle \fi
+ \or\ifcase\c_math_styles_state_cramped
+ \noexpand\triggerdisplaystyle \or
+ \displaystyle \or
+ \crampeddisplaystyle \fi
+ \or\ifcase\c_math_styles_state_cramped
+ \noexpand\triggertextstyle \or
+ \textstyle \or
+ \crampedtextstyle \fi
+ \or\ifcase\c_math_styles_state_cramped
+ \noexpand\triggertextstyle \or
+ \textstyle \or
+ \crampedtextstyle \fi
+ \or\ifcase\c_math_styles_state_cramped
+ \noexpand\triggerscriptstyle \or
+ \scriptstyle \or
+ \crampedscriptstyle \fi
+ \fi
+ \fi
+}
+
+\unexpanded\def\math_style_set#1%
+ {\edef\m_math_style_asked{#1}%
+ \ifx\m_math_style_asked\empty \else
+ \math_style_set_indeed
+ \fi}
+
+\let\setmathstyle\math_style_set
+
+\def\installmathstyle#1#2%
+ {\ifcsname\??mathstylecommand#1\endcsname \else
+ \setvalue{\??mathstylecommand#1}{#2}%
+ \fi}
+
+\def\math_style_collect#1%
+ {\csname\??mathstylecommand#1\endcsname}
+
+\setvalue{\??mathstylecommand\s!display }{\c_math_styles_state_style \plusone}
+\setvalue{\??mathstylecommand\s!text }{\c_math_styles_state_style \plustwo}
+\setvalue{\??mathstylecommand\s!script }{\c_math_styles_state_style \plusthree}
+\setvalue{\??mathstylecommand\s!scriptscript}{\c_math_styles_state_style \plusfour}
+
+\setvalue{\??mathstylecommand\s!uncramped }{\c_math_styles_state_cramped\plusone}
+\setvalue{\??mathstylecommand\s!cramped }{\c_math_styles_state_cramped\plustwo}
+\setvalue{\??mathstylecommand\v!normal }{\c_math_styles_state_cramped\plusone}
+\setvalue{\??mathstylecommand\v!packed }{\c_math_styles_state_cramped\plustwo}
+
+\setvalue{\??mathstylecommand\v!small }{\c_math_styles_state_size \plusone}
+\setvalue{\??mathstylecommand\v!big }{\c_math_styles_state_size \plustwo}
+
+\unexpanded\def\setupmathstyle[#1]%
+ {\edef\m_math_style_asked{#1}%
+ \ifx\m_math_style_asked\empty \else
+ \math_style_set_indeed
+ \fi}
+
+\unexpanded\def\usemathstyleparameter#1% faster local variant
+ {\edef\m_math_style_asked{#1\c!mathstyle}%
+ \ifx\m_math_style_asked\empty \else
+ \math_style_set_indeed
+ \fi}
+
+%D \startbuffer
+%D \definemathstyle[mystyle][scriptscript]
+%D
+%D $text\startmathstyle[mystyle]scriptscript\stopmathstyle text$
+%D \stopbuffer
+%D
+%D \typebuffer \blank \start \getbuffer \stop \blank
+
+\installcorenamespace {mathstyle}
+
+\unexpanded\def\definemathstyle
+ {\dodoubleargument\math_style_define}
+
+\def\math_style_define[#1][#2]%
+ {\c_math_styles_state_style \zerocount
+ \c_math_styles_state_cramped\zerocount
+ \c_math_styles_state_size \zerocount
+ \rawprocesscommacommand[#2]\math_style_collect
+ \expandafter\let\csname\??mathstyle#1\normalexpanded{\endcsname\math_style_add_to_cache_choice}}
+
+% \def\math_style_set_indeed
+% {\ifcsname\??mathstyle\m_math_style_asked\endcsname
+% \csname\??mathstyle\m_math_style_asked\endcsname
+% \else
+% \math_style_set_indeed_cached
+% \fi}
+%
+% \def\math_style_set_indeed_cached
+% {\ifcsname\??mathstylecache\m_math_style_asked\endcsname
+% % already in cache
+% \else
+% \math_style_add_to_cache
+% \fi
+% \csname\??mathstylecache\m_math_style_asked\endcsname}
+%
+% \def\math_style_add_to_cache
+% {\c_math_styles_state_style \zerocount
+% \c_math_styles_state_cramped\zerocount
+% \c_math_styles_state_size \zerocount
+% \rawprocesscommacommand[\m_math_style_asked]\math_style_collect
+% \global\expandafter\let\csname\??mathstylecache\m_math_style_asked\normalexpanded{\endcsname\math_style_add_to_cache_choice}}
+%
+% ugly but more efficient (as called often)
+
+\def\math_style_set_indeed
+ {\csname\??mathstyle
+ \ifcsname\??mathstyle\m_math_style_asked\endcsname
+ \m_math_style_asked
+ \else
+ \??mathstyle
+ \fi
+ \endcsname}
+
+\setvalue{\??mathstyle\??mathstyle}%
+ {\csname\??mathstylecache
+ \ifcsname\??mathstylecache\m_math_style_asked\endcsname
+ \m_math_style_asked
+ \else
+ \??mathstylecache
+ \fi
+ \endcsname}
+
+\setvalue{\??mathstylecache\??mathstylecache}%
+ {\c_math_styles_state_style \zerocount
+ \c_math_styles_state_cramped\zerocount
+ \c_math_styles_state_size \zerocount
+ \rawprocesscommacommand[\m_math_style_asked]\math_style_collect
+ \global\expandafter\let\csname\??mathstylecache\m_math_style_asked\normalexpanded{\endcsname\math_style_add_to_cache_choice}%
+ \csname\??mathstylecache\m_math_style_asked\endcsname}
+
+%D \startbuffer
+%D $x\begingroup\setupmathstyle[script]x\endgroup x$
+%D $x{\setupmathstyle[script]x}x$
+%D $x\startmathstyle[script]x\stopmathstyle x$
+%D \stopbuffer
+%D
+%D \typebuffer \getbuffer
+
+\unexpanded\def\startmathstyle[#1]%
+ {\edef\m_math_style_asked{#1}%
+ \ifx\m_math_style_asked\empty
+ \let\stopmathstyle\relax
+ \else
+ \bgroup
+ \math_style_set_indeed
+ \let\stopmathstyle\egroup
+ \fi}
+
+\let\stopmathstyle\relax
+
+\unexpanded\def\startusemathstyleparameter#1%
+ {\edef\m_math_style_asked{#1\c!mathstyle}%
+ \ifx\m_math_style_asked\empty
+ \let\stopusemathstyleparameter\relax
+ \else
+ \bgroup
+ \math_style_set_indeed
+ \let\stopusemathstyleparameter\egroup
+ \fi}
+
+\let\stopusemathstyleparameter\relax
+
+%D Something similar can be used in the (re|)|definition of \type {\text}. This
+%D version is a variation on the one in the math module (see \type{m-math} and|/|or
+%D \type {m-newmat}).
\unexpanded\def\mathtext
{\mathortext\math_text_choice\hbox}
-\def\math_text_choice#1%
- {\mathchoice
- {\math_text_choice_indeed\displaystyle\textface {#1}}%
- {\math_text_choice_indeed\textstyle \textface {#1}}%
- {\math_text_choice_indeed\textstyle \scriptface {#1}}%
- {\math_text_choice_indeed\textstyle \scriptscriptface{#1}}}
+% \def\math_text_choice#1%
+% {\mathchoice
+% {\math_text_choice_indeed\displaystyle\textface {#1}}%
+% {\math_text_choice_indeed\textstyle \textface {#1}}%
+% {\math_text_choice_indeed\textstyle \scriptface {#1}}%
+% {\math_text_choice_indeed\textstyle \scriptscriptface{#1}}}
+%
+% \def\math_text_choice_indeed#1#2#3% no \everymath !
+% {\hbox{\everymath{#1}\switchtobodyfont[#2]#3}} % 15 sec
+
+% \let\m_math_text_choice_style\relax
+%
+% \def\math_text_choice#1%
+% {\edef\m_math_text_choice_style{\normalmathstyle}%
+% \hbox\bgroup
+% % \everymath{\triggermathstyle\m_math_text_choice_style}%
+% \normalizebodyfontsize\m_math_text_choice_style{\mathstylefont\m_math_text_choice_style}%
+% \font_basics_switchtobodyfont\m_math_text_choice_style
+% #1%
+% \egroup}
+
+\let\m_math_text_choice_face \relax
+
+% \def\math_text_choice#1% if needed we can get rid of the normalize (predo in font code)
+% {\normalizebodyfontsize\m_math_text_choice_face{\mathstyleface\normalmathstyle}%
+% \hbox{\font_basics_switchtobodyfont\m_math_text_choice_face#1}}
-\def\math_text_choice_indeed#1#2#3% no \everymath !
- %{\hbox{\everymath{#1}\switchtobodyfont [#2]#3}} % 15 sec
- {\hbox{\everymath{#1}\setcurrentfontbody{#2}#3}} % 3 sec (no math)
+\def\math_text_choice% if needed we can get rid of the normalize (predo in font code)
+ {\normalizebodyfontsize\m_math_text_choice_face{\mathstyleface\normalmathstyle}%
+ \hbox\bgroup\font_basics_switchtobodyfont\m_math_text_choice_face\let\next}
%D Safeguard against redefinitions:
@@ -1045,8 +1604,7 @@
\let\_\normalunderscore % is textunderscore or fakeunderscore
\to \everymathematics
-%D Because we may overload \type {\text} in other (structuring)
-%D macros, we say:
+%D Because we may overload \type {\text} in other (structuring) macros, we say:
\appendtoks \let\text\mathtext \to \everymathematics
@@ -1115,8 +1673,33 @@
\unexpanded\def\mathoptext#1{\mathop{\text{#1}}}
-% for a while:
+% new:
+
+% \startsetups math:morespacing
+% \Umathordordspacing\textstyle 1mu plus .5mu minus .25mu\relax
+% \stopsetups
+%
+% \setupmathematics
+% [setups=math:morespacing]
+
+\appendtoks
+ \edef\p_setups{\mathematicsparameter\c!setups}%
+ \ifx\p_setups\empty\else
+ \directsetup\p_setups
+ \fi
+\to \everyswitchmathematics
+
+% new:
+
+\unexpanded\def\smallmathsymbol#1%
+ {\mathematics{\mathbin{\normalexpanded{\raise.15\exheight\hbox{$\triggermathstyle{\the\numexpr\normalmathstyle+2\relax}#1$}}}}}
+
+% this should be a primitive:
+
+% \def\mathextensiblecode#1#2%
+% {\cldcontext{mathematics.extensiblecode(\number#1,\number#2)}}
-\def\Umathbotaccent{\Umathaccent bottom }
+\def\mathextensiblecode#1#2{\ctxcommand{extensiblecode(\number#1,\number#2)}}
+\def\mathhorizontalcode#1#2{\ctxcommand{horizontalcode(\number#1,\number#2)}}
\protect \endinput
diff --git a/Master/texmf-dist/tex/context/base/math-int.mkiv b/Master/texmf-dist/tex/context/base/math-int.mkiv
index 84c51cb2477..6b480961b6a 100644
--- a/Master/texmf-dist/tex/context/base/math-int.mkiv
+++ b/Master/texmf-dist/tex/context/base/math-int.mkiv
@@ -19,12 +19,10 @@
%D $\int _a^b f(x) dx $ and also
%D $\iint _a^b f(x,y) dxdy$,
%D $\iiint _a^b f(x,y) dxdy$,
-%D $\iiiint _a^b f(x) dx $.
%D \startformula
%D \int _a^b f(x) dx \quad
%D \iint _a^b f(x) dx \quad
%D \iiint _a^b f(x) dx \quad
-%D \iiiint _a^b f(x) dx \quad
%D \stopformula
%D \stopbuffer
%D
@@ -38,7 +36,7 @@
\installcorenamespace{mathintegral}
-\newconstant\mathintlimitmode % 0 nolimits 1 displaylimits 2 limits
+\newconstant\mathintlimitmode
\def\intlimits % also used elsewhere
{\ifcase\mathintlimitmode
@@ -47,84 +45,97 @@
\displaylimits
\or
\limits
+ \or
+ % auto
+ \ifcase\normalmathstyle\displaylimits\or\displaylimits\else\limits\fi
+ % \ifnum\attribute\mathmodeattribute=\plusone % we need a proper flag
+ % \displaylimits
+ % \else
+ % \limits
+ % \fi
+ \else
+ % none
\fi}
\letvalue{\??mathintegral nolimits}\zerocount
\letvalue{\??mathintegral displaylimits}\plusone
\letvalue{\??mathintegral limits}\plustwo
+\letvalue{\??mathintegral autolimits}\plusthree
+\letvalue{\??mathintegral none}\plusfour
\appendtoks
\mathintlimitmode\executeifdefined{\??mathintegral\mathematicsparameter\s!integral}\zerocount
\to \everyswitchmathematics
\setupmathematics
- [\v!integral=nolimits]
+% [\v!integral=nolimits]
+ [\v!integral=autolimits]
%D The following code is used for fallbacks and might become obsolete once
%D we have enough \OPENTYPE\ math fonts.
-\def\math_repeated_integal_i
- {\int}
-
-\def\math_repeated_integal_ii
- {\math_repeated_integal_i
- \math_repeated_integral_kern
- \math_repeated_integal_i
- \math_repeat_integral_finish
- \intlimits}
-
-\def\math_repeated_integal_iii
- {\math_repeated_integal_i
- \math_repeated_integral_kern
- \math_repeated_integal_ii}
-
-\def\math_repeated_integal_iiii
- {\math_repeated_integal_i
- \math_repeated_integral_kern
- \math_repeated_integal_iii}
-
-\unexpanded\def\math_repeat_integral#1%
- {\let\math_repeat_integral_finish\donothing
- \iffontchar\textfont\zerocount#1\relax
- \expandafter\math_repeat_integral_real
- \else
- \expandafter\math_repeat_integral_fake
- \fi}
-
-\def\math_repeat_integral_fake#1#2%
- {\let\math_repeat_integral_fake_symbol#2%
- \futurelet\next\math_repeat_integral_fake_indeed}
-
-\def\math_repeat_integral_real#1#2%
- {#1}
-
-\definemathcommand [iint] {\math_repeat_integral{"0222C}\normalint \math_repeated_integal_ii } % double
-\definemathcommand [iiint] {\math_repeat_integral{"0222D}\normaliint \math_repeated_integal_iii } % tripple
-\definemathcommand [iiiint] {\math_repeat_integral{"FFFFF}\normaliiint\math_repeated_integal_iiii} % quadruple
-
-\def\math_repeated_integral_kern
- {\mkern-6mu\mathchoice{\mkern-3mu}{}{}{}}
-
-\def\math_repeat_integral_fake_indeed
- {\ifx\next\limits
- \math_repeated_integral_correction
- \else\ifx\next\displaylimits
- \math_repeated_integral_correction
- \else\ifx\next\nolimits
- % nothing
- \else\ifcase\mathintlimitmode
- % nothing
- \else
- \math_repeated_integral_correction
- \fi\fi\fi\fi
- \math_repeat_integral_fake_symbol}
-
-\def\math_repeated_integral_correction
- {\mkern-7mu\mathchoice{\mkern-2mu}{}{}{}%
- \mathop\bgroup\mkern7mu\mathchoice{\mkern2mu}{}{}{}\let\math_repeat_integral_finish\egroup}
-
-%D If the \type{\limits} option is used after \type {\iint}, use \type
-%D {\mathop} and fudge the left hand space a bit to make the subscript
-%D visually centered.
+% \def\math_repeated_integal_i
+% {\int}
+
+% \def\math_repeated_integal_ii
+% {\math_repeated_integal_i
+% \math_repeated_integral_kern
+% \math_repeated_integal_i
+% \math_repeat_integral_finish
+% \intlimits}
+
+% \def\math_repeated_integal_iii
+% {\math_repeated_integal_i
+% \math_repeated_integral_kern
+% \math_repeated_integal_ii}
+%
+% \def\math_repeated_integal_iiii
+% {\math_repeated_integal_i
+% \math_repeated_integral_kern
+% \math_repeated_integal_iii}
+%
+% \unexpanded\def\math_repeat_integral#1%
+% {\let\math_repeat_integral_finish\donothing
+% \iffontchar\textfont\zerocount#1\relax
+% \expandafter\math_repeat_integral_real
+% \else
+% \expandafter\math_repeat_integral_fake
+% \fi}
+%
+% \def\math_repeat_integral_fake#1#2%
+% {\let\math_repeat_integral_fake_symbol#2%
+% \futurelet\next\math_repeat_integral_fake_indeed}
+%
+% \def\math_repeat_integral_real#1#2%
+% {#1}
+%
+% \definemathcommand [iint] {\math_repeat_integral{"0222C}\normalint \math_repeated_integal_ii } % double
+% \definemathcommand [iiint] {\math_repeat_integral{"0222D}\normaliint \math_repeated_integal_iii } % tripple
+% \definemathcommand [iiiint] {\math_repeat_integral{"FFFFF}\normaliiint\math_repeated_integal_iiii} % quadruple
+%
+% \def\math_repeated_integral_kern
+% {\mkern-6mu\mathchoice{\mkern-3mu}{}{}{}}
+%
+% \def\math_repeat_integral_fake_indeed
+% {\ifx\next\limits
+% \math_repeated_integral_correction
+% \else\ifx\next\displaylimits
+% \math_repeated_integral_correction
+% \else\ifx\next\nolimits
+% % nothing
+% \else\ifcase\mathintlimitmode
+% % nothing
+% \else
+% \math_repeated_integral_correction
+% \fi\fi\fi\fi
+% \math_repeat_integral_fake_symbol}
+%
+% \def\math_repeated_integral_correction
+% {\mkern-7mu\mathchoice{\mkern-2mu}{}{}{}%
+% \mathop\bgroup\mkern7mu\mathchoice{\mkern2mu}{}{}{}\let\math_repeat_integral_finish\egroup}
+%
+% %D If the \type{\limits} option is used after \type {\iint}, use \type
+% %D {\mathop} and fudge the left hand space a bit to make the subscript
+% %D visually centered.
\protect \endinput
diff --git a/Master/texmf-dist/tex/context/base/math-map.lua b/Master/texmf-dist/tex/context/base/math-map.lua
index cd16736110b..9a8c8a69c2a 100644
--- a/Master/texmf-dist/tex/context/base/math-map.lua
+++ b/Master/texmf-dist/tex/context/base/math-map.lua
@@ -31,18 +31,19 @@ if not modules then modules = { } end modules ['math-map'] = {
local type, next = type, next
local floor, div = math.floor, math.div
local merged = table.merged
+local extract = bit32.extract
local allocate = utilities.storage.allocate
local texattribute = tex.attribute
local otffeatures = fonts.constructors.newfeatures("otf")
local registerotffeature = otffeatures.register
+local setmetatableindex = table.setmetatableindex
-local trace_greek = false trackers.register("math.greek", function(v) trace_greek = v end)
+local trace_greek = false trackers.register("math.greek", function(v) trace_greek = v end)
+local report_remapping = logs.reporter("mathematics","remapping")
-local report_remapping = logs.reporter("mathematics","remapping")
-
-mathematics = mathematics or { }
-local mathematics = mathematics
+mathematics = mathematics or { }
+local mathematics = mathematics
-- Unfortunately some alphabets have gaps (thereby troubling all applications that
-- need to deal with math). Somewhat strange considering all those weird symbols that
@@ -104,8 +105,8 @@ registerotffeature {
-- fallbacks; symbols is currently mostly greek
local function todigit(n) local t = { } for i=0, 9 do t[0x00030+i] = n+i end return t end
-local function toupper(n) local t = { } for i=0,26 do t[0x00041+i] = n+i end return t end
-local function tolower(n) local t = { } for i=0,26 do t[0x00061+i] = n+i end return t end
+local function toupper(n) local t = { } for i=0,25 do t[0x00041+i] = n+i end return t end
+local function tolower(n) local t = { } for i=0,25 do t[0x00061+i] = n+i end return t end
local regular_tf = {
digits = todigit(0x00030),
@@ -456,7 +457,7 @@ remap("it","bi")
mathematics.boldmap = boldmap
-local mathremap = { }
+local mathremap = allocate { }
for alphabet, styles in next, alphabets do -- per 9/6/2011 we also have attr for missing
for style, data in next, styles do
@@ -469,6 +470,8 @@ for alphabet, styles in next, alphabets do -- per 9/6/2011 we also have attr for
end
end
+mathematics.mapremap = mathremap
+
-- beware, these are shared tables (no problem since they're not
-- in unicode)
@@ -513,7 +516,7 @@ end
local mathalphabet = attributes.private("mathalphabet")
function mathematics.getboth(alphabet,style)
- local data = alphabets[alphabet or "regular"] or regular
+ local data = alphabet and alphabets[alphabet] or regular
data = data[style or "tf"] or data.tf
return data and data.attribute
end
@@ -526,8 +529,8 @@ function mathematics.getstyle(style)
end
function mathematics.syncboth(alphabet,style)
- local data = alphabets[alphabet or "regular"] or regular
- data = data[style or "tf"] or data.tf
+ local data = alphabet and alphabets[alphabet] or regular
+ data = style and data[style] or data.tf
texattribute[mathalphabet] = data and data.attribute or texattribute[mathalphabet]
end
@@ -539,73 +542,123 @@ function mathematics.syncstyle(style)
end
function mathematics.syncname(alphabet)
---~ local r = mathremap[mathalphabet]
+ -- local r = mathremap[mathalphabet]
local r = mathremap[texattribute[mathalphabet]]
local style = r and r.style or "tf"
local data = alphabets[alphabet][style]
texattribute[mathalphabet] = data and data.attribute or texattribute[mathalphabet]
end
-local issymbol = regular.tf.symbols
-local islcgreek = regular.tf.lcgreek
-local isucgreek = regular.tf.ucgreek
+local islcgreek = regular_tf.lcgreek
+local isucgreek = regular_tf.ucgreek
+local issygreek = regular_tf.symbols
+local isgreek = merged(islcgreek,isucgreek,issygreek)
-local remapping = {
+local greekremapping = {
[1] = { what = "unchanged" }, -- upright
[2] = { what = "upright", it = "tf", bi = "bf" }, -- upright
[3] = { what = "italic", tf = "it", bf = "bi" }, -- italic
}
+local usedremap = { }
+
+local function resolver(map)
+ return function (t,k)
+ local v =
+ map.digits [k] or
+ map.lcletters[k] or map.ucletters[k] or
+ map.lcgreek [k] or map.ucgreek [k] or
+ map.symbols [k] or k
+ t[k] = v
+ return v
+ end
+end
+
+for k, v in next, mathremap do
+ local t = { }
+ setmetatableindex(t,resolver(v))
+ usedremap[k] = t
+end
+
+local function remapgreek(mathalphabet,how,detail,char)
+ local r = mathremap[mathalphabet] -- what if 0
+ local alphabet = r and r.alphabet or "regular"
+ local style = r and r.style or "tf"
+ local remapping = greekremapping[how]
+ if trace_greek then
+ report_remapping("greek %s, %s char %C, alphabet %a %a, method %a","before",detail,char,alphabet,style,remapping.what)
+ end
+ local newstyle = remapping[style]
+ if newstyle then
+ local data = alphabets[alphabet][newstyle] -- always something
+ mathalphabet = data and data.attribute or mathalphabet
+ style = newstyle
+ end
+ if trace_greek then
+ report_remapping("greek %s, %s char %C, alphabet %a %a, method %a","after",detail,char,alphabet,style,remapping.what)
+ end
+ return mathalphabet, style
+end
+
function mathematics.remapalphabets(char,mathalphabet,mathgreek)
+ if not mathalphabet then
+ return
+ end
if mathgreek and mathgreek > 0 then
- local lc, uc = floor(mathgreek/10), mathgreek % 10 -- 2 == upright 3 == italic
- if lc > 1 or uc > 1 then
- local islc, isuc = islcgreek[char] and lc, isucgreek[char] and uc
- if islc or isuc then
- local r = mathremap[mathalphabet] -- what if 0
- local alphabet = r and r.alphabet or "regular"
- local style = r and r.style or "tf"
- if trace_greek then
- report_remapping("before: char: %05X, alphabet: %s %s, lcgreek: %s, ucgreek: %s",char,alphabet,style,remapping[lc].what,remapping[uc].what)
- end
- local s = remapping[islc or isuc][style]
- if s then
- local data = alphabets[alphabet][s]
- mathalphabet, style = data and data.attribute or mathalphabet, s
- end
- if trace_greek then
- report_remapping("after : char: %05X, alphabet: %s %s, lcgreek: %s, ucgreek: %s",char,alphabet,style,remapping[lc].what,remapping[uc].what)
- end
+ if not isgreek[char] then
+ -- nothing needed
+ elseif islcgreek[char] then
+ local lc = extract(mathgreek,4,4)
+ if lc > 1 then
+ mathalphabet = remapgreek(mathalphabet,lc,"lowercase",char)
+ end
+ elseif isucgreek[char] then
+ local uc = extract(mathgreek,0,4)
+ if uc > 1 then
+ mathalphabet = remapgreek(mathalphabet,uc,"uppercase",char)
+ end
+ elseif issygreek[char] then
+ local sy = extract(mathgreek,8,4)
+ if sy > 1 then
+ mathalphabet = remapgreek(mathalphabet,sy,"symbol",char)
end
end
end
- -- table test can go away
- if mathalphabet and mathalphabet > 0 then
- local newchar
- local offset = mathremap[mathalphabet]
- if not offset then
- -- nothing to remap
- elseif char >= 0x030 and char <= 0x039 then
- local o = offset.digits
- newchar = o and ((type(o) == "table" and (o[char] or char)) or (char - 0x030 + o))
- elseif char >= 0x041 and char <= 0x05A then
- local o = offset.ucletters
- newchar = o and ((type(o) == "table" and (o[char] or char)) or (char - 0x041 + o))
- elseif char >= 0x061 and char <= 0x07A then
- local o = offset.lcletters
- newchar = o and ((type(o) == "table" and (o[char] or char)) or (char - 0x061 + o))
- elseif islcgreek[char] then
- newchar = offset.lcgreek[char]
- elseif isucgreek[char] then
- newchar = offset.ucgreek[char]
- elseif issymbol[char] then
- newchar = offset.symbols[char]
+ if mathalphabet > 0 then
+ local remap = usedremap[mathalphabet] -- redundant check
+ if remap then
+ local newchar = remap[char]
+ return newchar ~= char and newchar
end
- return newchar ~= char and newchar
end
- return nil
+ -- return nil
end
+-- begin of experiment
+
+local fallback = {
+ tf = "bf",
+ it = "bi",
+ bf = "tf",
+ bi = "it",
+}
+
+function mathematics.fallbackstyleattr(attribute)
+ local r = mathremap[attribute]
+ local alphabet = r.alphabet or "regular"
+ local style = r.style or "tf"
+ local fback = fallback[style]
+ if fback then
+ local data = alphabets[alphabet][fback]
+ if data then
+ local attr = data.attribute
+ return attribute ~= attr and attr
+ end
+ end
+end
+
+-- end of experiment
+
local function checkedcopy(characters,child,parent)
for k, v in next, child do
if not characters[v] then
diff --git a/Master/texmf-dist/tex/context/base/math-mis.mkiv b/Master/texmf-dist/tex/context/base/math-mis.mkiv
new file mode 100644
index 00000000000..6346da9a02d
--- /dev/null
+++ b/Master/texmf-dist/tex/context/base/math-mis.mkiv
@@ -0,0 +1,60 @@
+%D \module
+%D [ file=math-mis,
+%D version=2013.04.06, % 2007.07.19,
+%D title=\CONTEXT\ Math Macros,
+%D subtitle=Miscellaneous,
+%D author=Hans Hagen,
+%D date=\currentdate,
+%D copyright={PRAGMA ADE \& \CONTEXT\ Development Team}]
+%C
+%C This module is part of the \CONTEXT\ macro||package and is
+%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
+%C details.
+
+\writestatus{loading}{ConTeXt Math Macros / Miscellaneous}
+
+\unprotect
+
+%D This file contains left-overs moved from other files. It's mostly old stuff
+%D that we keep around for compatbility reasons.
+
+%D \macros
+%D {qedsymbol}
+%D
+%D [HH] The general Quod Erat Demonstrandum symbol is defined in such a way that
+%D we can configure it. Because this symbol is also used in text mode, we make it
+%D a normal text symbol with special behavior.
+
+\unexpanded\def\qedsymbol#1%
+ {\ifhmode
+ \unskip\nobreakspace\hfill#1\par
+ \else\ifmmode
+ #1\relax % leading \eqno removed
+ \else
+ \dontleavehmode\emptyhbox\hfill#1\par
+ \fi\fi}
+
+\definesymbol [qed] [\qedsymbol{\mathematics{\square}}]
+
+%D \macros
+%D {QED}
+%D
+%D [HH] For compatbility reasons we also provide the \type {\QED} command. In case
+%D this command is overloaded, we still have the symbol available. \symbol[qed]
+
+\unexpanded\def\QED{\symbol[qed]}
+
+%D \macros
+%D {boxed}
+%D
+%D [HH] Another macro that users might expect (slightly adapted):
+
+\unexpanded\def\boxed % maybe obsolete
+ {\ifmmode\expandafter\mframed\else\expandafter\framed\fi}
+
+%D Something low level for scientific calculator notation:
+
+\unexpanded\def\scinot#1#2%
+ {#1\times10^{#2}}
+
+\protect \endinput
diff --git a/Master/texmf-dist/tex/context/base/math-noa.lua b/Master/texmf-dist/tex/context/base/math-noa.lua
index 8caf21cc2cf..2371110c14e 100644
--- a/Master/texmf-dist/tex/context/base/math-noa.lua
+++ b/Master/texmf-dist/tex/context/base/math-noa.lua
@@ -18,11 +18,8 @@ if not modules then modules = { } end modules ['math-noa'] = {
-- 20D6 -> 2190
-- 20D7 -> 2192
-local utf = unicode.utf8
-
local utfchar, utfbyte = utf.char, utf.byte
-local format, rep = string.format, string.rep
-local concat = table.concat
+local formatters = string.formatters
local fonts, nodes, node, mathematics = fonts, nodes, node, mathematics
@@ -37,6 +34,7 @@ local trace_normalizing = false trackers.register("math.normalizing", functio
local trace_collapsing = false trackers.register("math.collapsing", function(v) trace_collapsing = v end)
local trace_goodies = false trackers.register("math.goodies", function(v) trace_goodies = v end)
local trace_variants = false trackers.register("math.variants", function(v) trace_variants = v end)
+local trace_alternates = false trackers.register("math.alternates", function(v) trace_alternates = v end)
local trace_italics = false trackers.register("math.italics", function(v) trace_italics = v end)
local trace_families = false trackers.register("math.families", function(v) trace_families = v end)
@@ -48,30 +46,40 @@ local report_normalizing = logs.reporter("mathematics","normalizing")
local report_collapsing = logs.reporter("mathematics","collapsing")
local report_goodies = logs.reporter("mathematics","goodies")
local report_variants = logs.reporter("mathematics","variants")
+local report_alternates = logs.reporter("mathematics","alternates")
local report_italics = logs.reporter("mathematics","italics")
local report_families = logs.reporter("mathematics","families")
-local set_attribute = node.set_attribute
-local has_attribute = node.has_attribute
+local a_mathrendering = attributes.private("mathrendering")
+local a_exportstatus = attributes.private("exportstatus")
+
local mlist_to_hlist = node.mlist_to_hlist
local font_of_family = node.family_font
local insert_node_after = node.insert_after
+local insert_node_before = node.insert_before
local free_node = node.free
local new_node = node.new -- todo: pool: math_noad math_sub
local new_kern = nodes.pool.kern
+local new_rule = nodes.pool.rule
+local concat_nodes = nodes.concat
+
+local topoints = number.points
local fonthashes = fonts.hashes
local fontdata = fonthashes.identifiers
local fontcharacters = fonthashes.characters
local fontproperties = fonthashes.properties
local fontitalics = fonthashes.italics
-local fontquads = fonthashes.quads
+local fontemwidths = fonthashes.emwidths
+local fontexheights = fonthashes.exheights
local variables = interfaces.variables
local texattribute = tex.attribute
local unsetvalue = attributes.unsetvalue
+local chardata = characters.data
+
noads = noads or { } -- todo: only here
local noads = noads
@@ -89,6 +97,9 @@ local noadcodes = nodes.noadcodes
local noad_ord = noadcodes.ord
local noad_rel = noadcodes.rel
local noad_punct = noadcodes.punct
+local noad_opdisplaylimits= noadcodes.opdisplaylimits
+local noad_oplimits = noadcodes.oplimits
+local noad_opnolimits = noadcodes.opnolimits
local math_noad = nodecodes.noad -- attr nucleus sub sup
local math_accent = nodecodes.accent -- attr nucleus sub sup accent
@@ -103,6 +114,9 @@ local math_style = nodecodes.style -- attr style
local math_choice = nodecodes.choice -- attr display text script scriptscript
local math_fence = nodecodes.fence -- attr subtype
+local hlist_code = nodecodes.hlist
+local glyph_code = nodecodes.glyph
+
local left_fence_code = 1
local function process(start,what,n,parent)
@@ -110,17 +124,15 @@ local function process(start,what,n,parent)
while start do
local id = start.id
if trace_processing then
- local margin = rep(" ",n or 0)
- local detail = tostring(start)
if id == math_noad then
- report_processing("%s%s (class: %s)",margin,detail,noadcodes[start.subtype] or "?")
+ report_processing("%w%S, class %a",n*2,start,noadcodes[start.subtype])
elseif id == math_char then
local char = start.char
local fam = start.fam
local font = font_of_family(fam)
- report_processing("%s%s (family: %s, font: %s, char: %s, shape: %s)",margin,detail,fam,font,char,utfchar(char))
+ report_processing("%w%S, family %a, font %a, char %a, shape %c",n*2,start,fam,font,char,char)
else
- report_processing("%s%s",margin,detail)
+ report_processing("%w%S",n*2,start)
end
end
local proc = what[id]
@@ -135,8 +147,6 @@ local function process(start,what,n,parent)
end
elseif id == math_char or id == math_textchar or id == math_delim then
break
- elseif id == math_style then
- -- has a next
elseif id == math_noad then
local noad = start.nucleus if noad then process(noad,what,n,start) end -- list
noad = start.sup if noad then process(noad,what,n,start) end -- list
@@ -168,6 +178,8 @@ local function process(start,what,n,parent)
noad = start.sub if noad then process(noad,what,n,start) end -- list
noad = start.accent if noad then process(noad,what,n,start) end -- list
noad = start.bot_accent if noad then process(noad,what,n,start) end -- list
+ elseif id == math_style then
+ -- has a next
else
-- glue, penalty, etc
end
@@ -177,9 +189,9 @@ end
local function processnoads(head,actions,banner)
if trace_processing then
- report_processing("start '%s'",banner)
+ report_processing("start %a",banner)
process(head,actions)
- report_processing("stop '%s'",banner)
+ report_processing("stop %a",banner)
else
process(head,actions)
end
@@ -187,19 +199,101 @@ end
noads.process = processnoads
+-- experiment (when not present fall back to fam 0) -- needs documentation
+
+-- 0-2 regular
+-- 3-5 bold
+-- 6-8 pseudobold
+
+local families = { }
+local a_mathfamily = attributes.private("mathfamily")
+local boldmap = mathematics.boldmap
+
+local familymap = { [0] =
+ "regular",
+ "regular",
+ "regular",
+ "bold",
+ "bold",
+ "bold",
+ "pseudobold",
+ "pseudobold",
+ "pseudobold",
+}
+
+families[math_char] = function(pointer)
+ if pointer.fam == 0 then
+ local a = pointer[a_mathfamily]
+ if a and a > 0 then
+ pointer[a_mathfamily] = 0
+ if a > 5 then
+ local char = pointer.char
+ local bold = boldmap[char]
+ local newa = a - 3
+ if bold then
+ pointer[a_exportstatus] = char
+ pointer.char = bold
+ if trace_families then
+ report_families("replacing %C by bold %C, family %s with remap %s becomes %s with remap %s",char,bold,a,familymap[a],newa,familymap[newa])
+ end
+ else
+ if trace_families then
+ report_families("no bold replacement for %C, family %s with remap %s becomes %s with remap %s",char,a,familymap[a],newa,familymap[newa])
+ end
+ end
+ pointer.fam = newa
+ else
+ if trace_families then
+ local char = pointer.char
+ report_families("family of %C becomes %s with remap %s",char,a,familymap[a])
+ end
+ pointer.fam = a
+ end
+ else
+ -- pointer.fam = 0
+ end
+ end
+end
+
+families[math_delim] = function(pointer)
+ if pointer.small_fam == 0 then
+ local a = pointer[a_mathfamily]
+ if a and a > 0 then
+ pointer[a_mathfamily] = 0
+ if a > 5 then
+ -- no bold delimiters in unicode
+ a = a - 3
+ end
+ pointer.small_fam = a
+ pointer.large_fam = a
+ else
+ pointer.small_fam = 0
+ pointer.large_fam = 0
+ end
+ end
+end
+
+families[math_textchar] = families[math_char]
+
+function handlers.families(head,style,penalties)
+ processnoads(head,families,"families")
+ return true
+end
+
-- character remapping
-local mathalphabet = attributes.private("mathalphabet")
-local mathgreek = attributes.private("mathgreek")
+local a_mathalphabet = attributes.private("mathalphabet")
+local a_mathgreek = attributes.private("mathgreek")
processors.relocate = { }
local function report_remap(tag,id,old,new,extra)
- report_remapping("remapping %s in font %s from U+%05X (%s) to U+%05X (%s)%s",tag,id,old,utfchar(old),new,utfchar(new),extra or "")
+ report_remapping("remapping %s in font %s from %C to %C%s",tag,id,old,new,extra)
end
-local remapalphabets = mathematics.remapalphabets
-local setnodecolor = nodes.tracers.colors.set
+local remapalphabets = mathematics.remapalphabets
+local fallbackstyleattr = mathematics.fallbackstyleattr
+local setnodecolor = nodes.tracers.colors.set
--~ This does not work out well, as there are no fallbacks. Ok, we could
--~ define a poor mans simplify mechanism.
@@ -219,22 +313,62 @@ local function checked(pointer)
if trace_analyzing then
setnodecolor(pointer,"font:isol")
end
- set_attribute(pointer,exportstatus,char) -- testcase: exponentiale
+ pointer[a_exportstatus] = char -- testcase: exponentiale
pointer.char = newchar
return true
end
end
end
+-- processors.relocate[math_char] = function(pointer)
+-- local g = pointer[a_mathgreek] or 0
+-- local a = pointer[a_mathalphabet] or 0
+-- if a > 0 or g > 0 then
+-- if a > 0 then
+-- pointer[a_mathgreek] = 0
+-- end
+-- if g > 0 then
+-- pointer[a_mathalphabet] = 0
+-- end
+-- local char = pointer.char
+-- local newchar = remapalphabets(char,a,g)
+-- if newchar then
+-- local fam = pointer.fam
+-- local id = font_of_family(fam)
+-- local characters = fontcharacters[id]
+-- if characters and characters[newchar] then
+-- if trace_remapping then
+-- report_remap("char",id,char,newchar)
+-- end
+-- if trace_analyzing then
+-- setnodecolor(pointer,"font:isol")
+-- end
+-- pointer.char = newchar
+-- return true
+-- else
+-- if trace_remapping then
+-- report_remap("char",id,char,newchar," fails")
+-- end
+-- end
+-- end
+-- end
+-- if trace_analyzing then
+-- setnodecolor(pointer,"font:medi")
+-- end
+-- if check_coverage then
+-- return checked(pointer)
+-- end
+-- end
+
processors.relocate[math_char] = function(pointer)
- local g = has_attribute(pointer,mathgreek) or 0
- local a = has_attribute(pointer,mathalphabet) or 0
+ local g = pointer[a_mathgreek] or 0
+ local a = pointer[a_mathalphabet] or 0
if a > 0 or g > 0 then
if a > 0 then
- set_attribute(pointer,mathgreek,0)
+ pointer[a_mathgreek] = 0
end
if g > 0 then
- set_attribute(pointer,mathalphabet,0)
+ pointer[a_mathalphabet] = 0
end
local char = pointer.char
local newchar = remapalphabets(char,a,g)
@@ -242,7 +376,7 @@ processors.relocate[math_char] = function(pointer)
local fam = pointer.fam
local id = font_of_family(fam)
local characters = fontcharacters[id]
- if characters and characters[newchar] then
+ if characters[newchar] then
if trace_remapping then
report_remap("char",id,char,newchar)
end
@@ -252,8 +386,27 @@ processors.relocate[math_char] = function(pointer)
pointer.char = newchar
return true
else
- if trace_remapping then
- report_remap("char",id,char,newchar," fails")
+ local fallback = fallbackstyleattr(a)
+ if fallback then
+ local newchar = remapalphabets(char,fallback,g)
+ if newchar then
+ if characters[newchar] then
+ if trace_remapping then
+ report_remap("char",id,char,newchar," (fallback remapping used)")
+ end
+ if trace_analyzing then
+ setnodecolor(pointer,"font:isol")
+ end
+ pointer.char = newchar
+ return true
+ elseif trace_remapping then
+ report_remap("char",id,char,newchar," fails (no fallback character)")
+ end
+ elseif trace_remapping then
+ report_remap("char",id,char,newchar," fails (no fallback remap character)")
+ end
+ elseif trace_remapping then
+ report_remap("char",id,char,newchar," fails (no fallback style)")
end
end
end
@@ -285,15 +438,12 @@ end
-- rendering (beware, not exported)
-local a_mathrendering = attributes.private("mathrendering")
-local a_exportstatus = attributes.private("exportstatus")
-
processors.render = { }
local rendersets = mathematics.renderings.numbers or { } -- store
processors.render[math_char] = function(pointer)
- local attr = has_attribute(pointer,a_mathrendering)
+ local attr = pointer[a_mathrendering]
if attr and attr > 0 then
local char = pointer.char
local renderset = rendersets[attr]
@@ -305,7 +455,7 @@ processors.render[math_char] = function(pointer)
local characters = fontcharacters[id]
if characters and characters[newchar] then
pointer.char = newchar
- set_attribute(pointer,a_exportstatus,char)
+ pointer[a_exportstatus] = char
end
end
end
@@ -333,9 +483,9 @@ local resize = { } processors.resize = resize
resize[math_fence] = function(pointer)
if pointer.subtype == left_fence_code then
- local a = has_attribute(pointer,mathsize)
+ local a = pointer[mathsize]
if a and a > 0 then
- set_attribute(pointer,mathsize,0)
+ pointer[mathsize] = 0
local d = pointer.delim
local df = d.small_fam
local id = font_of_family(df)
@@ -354,72 +504,133 @@ end
-- respacing
-local mathpunctuation = attributes.private("mathpunctuation")
-
-local respace = { } processors.respace = respace
-
-local chardata = characters.data
+-- local mathpunctuation = attributes.private("mathpunctuation")
+--
+-- local respace = { } processors.respace = respace
-- only [nd,ll,ul][po][nd,ll,ul]
-respace[math_char] = function(pointer,what,n,parent) -- not math_noad .. math_char ... and then parent
- pointer = parent
- if pointer and pointer.subtype == noad_ord then
- local a = has_attribute(pointer,mathpunctuation)
- if a and a > 0 then
- set_attribute(pointer,mathpunctuation,0)
- local current_nucleus = pointer.nucleus
- if current_nucleus.id == math_char then
- local current_char = current_nucleus.char
- local fc = chardata[current_char]
- fc = fc and fc.category
- if fc == "nd" or fc == "ll" or fc == "lu" then
- local next_noad = pointer.next
- if next_noad and next_noad.id == math_noad and next_noad.subtype == noad_punct then
- local next_nucleus = next_noad.nucleus
- if next_nucleus.id == math_char then
- local next_char = next_nucleus.char
- local nc = chardata[next_char]
- nc = nc and nc.category
- if nc == "po" then
- local last_noad = next_noad.next
- if last_noad and last_noad.id == math_noad and last_noad.subtype == noad_ord then
- local last_nucleus = last_noad.nucleus
- if last_nucleus.id == math_char then
- local last_char = last_nucleus.char
- local lc = chardata[last_char]
- lc = lc and lc.category
- if lc == "nd" or lc == "ll" or lc == "lu" then
- local ord = new_node(math_noad) -- todo: pool
- ord.subtype, ord.nucleus, ord.sub, ord.sup, ord.attr = noad_ord, next_noad.nucleus, next_noad.sub, next_noad.sup, next_noad.attr
- -- next_noad.nucleus, next_noad.sub, next_noad.sup, next_noad.attr = nil, nil, nil, nil
- next_noad.nucleus, next_noad.sub, next_noad.sup = nil, nil, nil -- else crash with attributes ref count
- --~ next_noad.attr = nil
- ord.next = last_noad
- pointer.next = ord
- free_node(next_noad)
- end
- end
- end
- end
- end
- end
- end
- end
- end
- end
-end
-
-function handlers.respace(head,style,penalties)
- processnoads(head,respace,"respace")
- return true
-end
+-- respace[math_char] = function(pointer,what,n,parent) -- not math_noad .. math_char ... and then parent
+-- pointer = parent
+-- if pointer and pointer.subtype == noad_ord then
+-- local a = pointer[mathpunctuation]
+-- if a and a > 0 then
+-- pointer[mathpunctuation] = 0
+-- local current_nucleus = pointer.nucleus
+-- if current_nucleus.id == math_char then
+-- local current_char = current_nucleus.char
+-- local fc = chardata[current_char]
+-- fc = fc and fc.category
+-- if fc == "nd" or fc == "ll" or fc == "lu" then
+-- local next_noad = pointer.next
+-- if next_noad and next_noad.id == math_noad and next_noad.subtype == noad_punct then
+-- local next_nucleus = next_noad.nucleus
+-- if next_nucleus.id == math_char then
+-- local next_char = next_nucleus.char
+-- local nc = chardata[next_char]
+-- nc = nc and nc.category
+-- if nc == "po" then
+-- local last_noad = next_noad.next
+-- if last_noad and last_noad.id == math_noad and last_noad.subtype == noad_ord then
+-- local last_nucleus = last_noad.nucleus
+-- if last_nucleus.id == math_char then
+-- local last_char = last_nucleus.char
+-- local lc = chardata[last_char]
+-- lc = lc and lc.category
+-- if lc == "nd" or lc == "ll" or lc == "lu" then
+-- local ord = new_node(math_noad) -- todo: pool
+-- ord.subtype, ord.nucleus, ord.sub, ord.sup, ord.attr = noad_ord, next_noad.nucleus, next_noad.sub, next_noad.sup, next_noad.attr
+-- -- next_noad.nucleus, next_noad.sub, next_noad.sup, next_noad.attr = nil, nil, nil, nil
+-- next_noad.nucleus, next_noad.sub, next_noad.sup = nil, nil, nil -- else crash with attributes ref count
+-- --~ next_noad.attr = nil
+-- ord.next = last_noad
+-- pointer.next = ord
+-- free_node(next_noad)
+-- end
+-- end
+-- end
+-- end
+-- end
+-- end
+-- end
+-- end
+-- end
+-- end
+-- end
+
+-- local comma = 0x002C
+-- local period = 0x002E
+--
+-- respace[math_char] = function(pointer,what,n,parent)
+-- pointer = parent
+-- if pointer and pointer.subtype == noad_punct then
+-- local current_nucleus = pointer.nucleus
+-- if current_nucleus.id == math_char then
+-- local current_nucleus = pointer.nucleus
+-- if current_nucleus.id == math_char then
+-- local current_char = current_nucleus.char
+-- local a = pointer[mathpunctuation]
+-- if not a or a == 0 then
+-- if current_char == comma then
+-- -- default tex: 2,5 or 2, 5 --> 2, 5
+-- elseif current_char == period then
+-- -- default tex: 2.5 or 2. 5 --> 2.5
+-- pointer.subtype = noad_ord
+-- end
+-- elseif a == 1 then
+-- local next_noad = pointer.next
+-- if next_noad and next_noad.id == math_noad then
+-- local next_nucleus = next_noad.nucleus
+-- if next_nucleus.id == math_char and next_nucleus.char == 0 then
+-- nodes.remove(pointer,next_noad,true)
+-- end
+-- if current_char == comma then
+-- -- default tex: 2,5 or 2, 5 --> 2, 5
+-- elseif current_char == period then
+-- -- default tex: 2.5 or 2. 5 --> 2.5
+-- pointer.subtype = noad_ord
+-- end
+-- end
+-- elseif a == 2 then
+-- if current_char == comma or current_char == period then
+-- local next_noad = pointer.next
+-- if next_noad and next_noad.id == math_noad then
+-- local next_nucleus = next_noad.nucleus
+-- if next_nucleus.id == math_char and next_nucleus.char == 0 then
+-- if current_char == comma then
+-- -- adaptive: 2, 5 --> 2, 5
+-- elseif current_char == period then
+-- -- adaptive: 2. 5 --> 2. 5
+-- end
+-- nodes.remove(pointer,next_noad,true)
+-- else
+-- if current_char == comma then
+-- -- adaptive: 2,5 --> 2,5
+-- pointer.subtype = noad_ord
+-- elseif current_char == period then
+-- -- adaptive: 2.5 --> 2.5
+-- pointer.subtype = noad_ord
+-- end
+-- end
+-- end
+-- end
+-- end
+-- end
+-- end
+-- end
+-- end
+--
+-- function handlers.respace(head,style,penalties)
+-- processnoads(head,respace,"respace")
+-- return true
+-- end
-- The following code is dedicated to Luigi Scarso who pointed me
-- to the fact that \not= is not producing valid pdf-a code.
-- The code does not solve this for virtual characters but it does
-- a decent job on collapsing so that fonts that have the right
--- glyph will have a decent unicode point.
+-- glyph will have a decent unicode point. In the meantime this code
+-- has been moved elsewhere.
local collapse = { } processors.collapse = collapse
@@ -428,10 +639,22 @@ local mathpairs = characters.mathpairs
mathpairs[0x2032] = { [0x2032] = 0x2033, [0x2033] = 0x2034 } -- (prime,prime) (prime,doubleprime)
mathpairs[0x2033] = { [0x2032] = 0x2034 } -- (doubleprime,prime)
+mathpairs[0x222B] = { [0x222B] = 0x222C, [0x222C] = 0x222D }
+mathpairs[0x222C] = { [0x222B] = 0x222D }
+
+mathpairs[0x007C] = { [0x007C] = 0x2016 } -- double bars
+
+local validpair = {
+ [noad_rel] = true,
+ [noad_ord] = true,
+ [noad_opdisplaylimits] = true,
+ [noad_oplimits] = true,
+ [noad_opnolimits] = true,
+}
+
local function collapsepair(pointer,what,n,parent) -- todo: switch to turn in on and off
if parent then
- local subtype = parent.subtype
- if subtype == noad_rel or subtype == noad_ord then -- ord is new
+ if validpair[parent.subtype] then
local current_nucleus = parent.nucleus
if not parent.sub and not parent.sup and current_nucleus.id == math_char then
local current_char = current_nucleus.char
@@ -439,8 +662,7 @@ local function collapsepair(pointer,what,n,parent) -- todo: switch to turn in on
if mathpair then
local next_noad = parent.next
if next_noad and next_noad.id == math_noad then
- local next_subtype = next_noad.subtype
- if next_subtype == noad_rel or next_subtype == noad_ord then -- ord is new
+ if validpair[next_noad.subtype] then
local next_nucleus = next_noad.nucleus
if next_nucleus.id == math_char then
local next_char = next_nucleus.char
@@ -451,7 +673,7 @@ local function collapsepair(pointer,what,n,parent) -- todo: switch to turn in on
local characters = fontcharacters[id]
if characters and characters[newchar] then
if trace_collapsing then
- report_collapsing("U+%05X + U+%05X => U+%05X",current_char,next_char,newchar)
+ report_collapsing("%U + %U => %U",current_char,next_char,newchar)
end
current_nucleus.char = newchar
local next_next_noad = next_noad.next
@@ -516,7 +738,7 @@ local function replace(pointer,what,n,parent)
nextnucleus.char = s
replaced[char] = (replaced[char] or 0) + 1
if trace_normalizing then
- report_normalizing("superscript: U+05X (%s) => U+05X (%s)",char,utfchar(char),s,utfchar(s))
+ report_normalizing("superscript %C becomes %C",char,s)
end
else
local s = subscripts[char]
@@ -532,7 +754,7 @@ local function replace(pointer,what,n,parent)
nextnucleus.char = s
replaced[char] = (replaced[char] or 0) + 1
if trace_normalizing then
- report_normalizing("subscript: U+05X (%s) => U+05X (%s)",char,utfchar(char),s,utfchar(s))
+ report_normalizing("subscript %C becomes %C",char,s)
end
else
break
@@ -583,15 +805,17 @@ statistics.register("math script replacements", function()
local n, t = 0, { }
for k, v in table.sortedpairs(replaced) do
n = n + v
- t[#t+1] = format("U+%05X:%s",k,utfchar(k))
+ t[#t+1] = formatters["%C"](k)
end
- return format("%s (n=%s)",concat(t," "),n)
+ return formatters["% t (n=%s)"](t,n)
end
end)
-- math alternates: (in xits lgf: $ABC$ $\cal ABC$ $\mathalternate{cal}\cal ABC$)
-- math alternates: (in lucidanova lgf: $ABC \mathalternate{italic} ABC$)
+-- todo: set alternate for specific symbols
+
local function initializemathalternates(tfmdata)
local goodies = tfmdata.goodies
if goodies then
@@ -603,7 +827,7 @@ local function initializemathalternates(tfmdata)
local alternates = mathgoodies and mathgoodies.alternates
if alternates then
if trace_goodies then
- report_goodies("loading alternates for font '%s'",tfmdata.properties.name)
+ report_goodies("loading alternates for font %a",tfmdata.properties.name)
end
local lastattribute, attributes = 0, { }
for k, v in next, alternates do
@@ -645,15 +869,19 @@ function mathematics.setalternate(fam,tag)
end
alternate[math_char] = function(pointer)
- local a = has_attribute(pointer,a_mathalternate)
+ local a = pointer[a_mathalternate]
if a and a > 0 then
- set_attribute(pointer,a_mathalternate,0)
+ pointer[a_mathalternate] = 0
local tfmdata = fontdata[font_of_family(pointer.fam)] -- we can also have a famdata
local mathalternatesattributes = tfmdata.shared.mathalternatesattributes
if mathalternatesattributes then
local what = mathalternatesattributes[a]
local alt = getalternate(tfmdata,pointer.char,what.feature,what.value)
if alt then
+ if trace_alternates then
+ report_alternates("alternate %a, value %a, replacing glyph %U by glyph %U",
+ tostring(what.feature),tostring(what.value),pointer.char,alt)
+ end
pointer.char = alt
end
end
@@ -665,154 +893,186 @@ function handlers.check(head,style,penalties)
return true
end
--- experiment (when not present fall back to fam 0) -- needs documentation
+-- italics: we assume that only characters matter
+--
+-- = we check for correction first because accessing nodes is slower
+-- = the actual glyph is not that important (we can control it with numbers)
--- 0-2 regular
--- 3-5 bold
--- 6-8 pseudobold
+local a_mathitalics = attributes.private("mathitalics")
-local families = { }
-local a_mathfamily = attributes.private("mathfamily")
-local boldmap = mathematics.boldmap
+local italics = { }
+local default_factor = 1/20
-local tracemap = { [0] =
- "regular",
- "regular",
- "regular",
- "bold",
- "bold",
- "bold",
- "pseudobold",
- "pseudobold",
- "pseudobold",
-}
+local function getcorrection(method,font,char) -- -- or character.italic -- (this one is for tex)
-families[math_char] = function(pointer)
- if pointer.fam == 0 then
- local a = has_attribute(pointer,a_mathfamily)
- if a and a > 0 then
- set_attribute(pointer,a_mathfamily,0)
- if a > 5 then
- local char = pointer.char
- local bold = boldmap[char]
- local newa = a - 3
- if bold then
- set_attribute(pointer,exportstatus,char)
- pointer.char = bold
- if trace_families then
- report_families("replacing U+%05X by bold U+%05X, family %s (%s) becomes %s (%s)",
- char,bold,a,tracemap[a],newa,tracemap[newa])
- end
- else
- if trace_families then
- report_families("no bold replacement for U+%05X, family %s (%s) becomes %s (%s)",
- char,a,tracemap[a],newa,tracemap[newa])
- end
+ local correction, fromvisual
+
+ if method == 1 then
+ -- only font data triggered by fontitalics
+ local italics = fontitalics[font]
+ if italics then
+ local character = fontcharacters[font][char]
+ if character then
+ correction = character.italic_correction
+ if correction and correction ~= 0 then
+ return correction, false
end
- pointer.fam = newa
- else
- if trace_families then
- report_families("family of U+%05X becomes %s (%s)",
- pointer.char,a,tracemap[a])
+ end
+ end
+ elseif method == 2 then
+ -- only font data triggered by fontdata
+ local character = fontcharacters[font][char]
+ if character then
+ correction = character.italic_correction
+ if correction and correction ~= 0 then
+ return correction, false
+ end
+ end
+ elseif method == 3 then
+ -- only quad based by selective
+ local visual = chardata[char].visual
+ if not visual then
+ -- skip
+ elseif visual == "it" or visual == "bi" then
+ correction = fontproperties[font].mathitalic_defaultvalue or default_factor*fontemwidths[font]
+ if correction and correction ~= 0 then
+ return correction, true
+ end
+ end
+ elseif method == 4 then
+ -- combination of 1 and 3
+ local italics = fontitalics[font]
+ if italics then
+ local character = fontcharacters[font][char]
+ if character then
+ correction = character.italic_correction
+ if correction and correction ~= 0 then
+ return correction, false
end
- pointer.fam = a
end
- else
- -- pointer.fam = 0
end
- end
-end
-
-families[math_delim] = function(pointer)
- if pointer.small_fam == 0 then
- local a = has_attribute(pointer,a_mathfamily)
- if a and a > 0 then
- set_attribute(pointer,a_mathfamily,0)
- if a > 5 then
- -- no bold delimiters in unicode
- a = a - 3
+ if not correction then
+ local visual = chardata[char].visual
+ if not visual then
+ -- skip
+ elseif visual == "it" or visual == "bi" then
+ correction = fontproperties[font].mathitalic_defaultvalue or default_factor*fontemwidths[font]
+ if correction and correction ~= 0 then
+ return correction, true
+ end
end
- pointer.small_fam = a
- pointer.large_fam = a
- else
- pointer.small_fam = 0
- pointer.large_fam = 0
end
end
-end
-families[math_textchar] = families[math_char]
-
-function handlers.families(head,style,penalties)
- processnoads(head,families,"families")
- return true
end
--- italics: we assume that only characters matter
---
--- = we check for correction first because accessing nodes is slower
--- = the actual glyph is not that important (we can control it with numbers)
-
-local a_mathitalics = attributes.private("mathitalics")
+local function insert_kern(current,kern)
+ local sub = new_node(math_sub) -- todo: pool
+ local noad = new_node(math_noad) -- todo: pool
+ sub.head = kern
+ kern.next = noad
+ noad.nucleus = current
+ return sub
+end
-local italics = { }
-local default_factor = 1/20
+local setcolor = nodes.tracers.colors.set
+local italic_kern = new_kern
+local c_positive_d = "trace:db"
+local c_negative_d = "trace:dr"
+
+trackers.register("math.italics", function(v)
+ if v then
+ italic_kern = function(k,font)
+ local ex = 1.5 * fontexheights[font]
+ if k > 0 then
+ return setcolor(new_rule(k,ex,ex),c_positive_d)
+ else
+ return concat_nodes {
+ old_kern(k),
+ setcolor(new_rule(-k,ex,ex),c_negative_d),
+ old_kern(k),
+ }
+ end
+ end
+ else
+ italic_kern = new_kern
+ end
+end)
italics[math_char] = function(pointer,what,n,parent)
- local method = has_attribute(pointer,a_mathitalics)
+ local method = pointer[a_mathitalics]
if method and method > 0 then
local char = pointer.char
local font = font_of_family(pointer.fam) -- todo: table
- local correction
- if method == 1 then
- -- only font data triggered by fontitalics
- local italics = fontitalics[font]
- if italics then
- local character = fontcharacters[font][char]
- correction = character and character.italic_correction -- or character.italic (this one is for tex)
+ local correction, visual = getcorrection(method,font,char)
+ if correction then
+ local pid = parent.id
+ local sub, sup
+ if pid == math_noad then
+ sup = parent.sup
+ sub = parent.sub
end
- elseif method == 2 then
- -- only font data triggered by fontdata
- local character = fontcharacters[font][char]
- correction = character and character.italic_correction -- or character.italic (this one is for tex)
- elseif method == 3 then
- -- only quad based by selective
- local visual = chardata[char].visual
- if not visual then
- -- skip
- elseif visual == "it" or visual == "bi" then
- correction = fontproperties[font].mathitalic_defaultvalue or default_factor*fontquads[font]
- end
- elseif method == 4 then
- -- combination of 1 and 3
- local italics = fontitalics[font]
- if italics then
- local character = fontcharacters[font][char]
- correction = character and character.italic_correction -- or character.italic (this one is for tex)
- end
- if not correction then
- local visual = chardata[char].visual
- if not visual then
- -- skip
- elseif visual == "it" or visual == "bi" then
- correction = fontproperties[font].mathitalic_defaultvalue or default_factor*fontquads[font]
+ if sup or sub then
+ local subtype = parent.subtype
+ if subtype == noad_oplimits then
+ if sup then
+ parent.sup = insert_kern(sup,italic_kern(correction,font))
+ if trace_italics then
+ report_italics("method %a, adding %p italic correction for upper limit of %C",method,correction,char)
+ end
+ end
+ if sub then
+ local correction = - correction
+ parent.sub = insert_kern(sub,italic_kern(correction,font))
+ if trace_italics then
+ report_italics("method %a, adding %p italic correction for lower limit of %C",method,correction,char)
+ end
+ end
+ else
+ if sup then
+ parent.sup = insert_kern(sup,italic_kern(correction,font))
+ if trace_italics then
+ report_italics("method %a, adding %p italic correction before superscript after %C",method,correction,char)
+ end
+ end
end
- end
- end
- if correction and correction ~= 0 then
- local next_noad = parent.next
- if next_noad and next_noad.id == math_noad then
- local next_subtype = next_noad.subtype
- if next_subtype == noad_punct or next_subtype == noad_ord then
- local next_nucleus = next_noad.nucleus
- if next_nucleus.id == math_char then
- local next_char = next_nucleus.char
- if not chardata[next_char].italic then -- or category
- if trace_italics then
- report_italics("method %s: adding %s italic correction between %s (0x%05X) and %s (0x%05X)",
- method,number.points(correction),utfchar(char),char,utfchar(next_char),next_char)
+ else
+ local next_noad = parent.next
+ if not next_noad then
+ if n== 1 then -- only at the outer level .. will become an option (always,endonly,none)
+ if trace_italics then
+ report_italics("method %a, adding %p italic correction between %C and end math",method,correctio,char)
+ end
+ insert_node_after(parent,parent,italic_kern(correction,font))
+ end
+ elseif next_noad.id == math_noad then
+ local next_subtype = next_noad.subtype
+ if next_subtype == noad_punct or next_subtype == noad_ord then
+ local next_nucleus = next_noad.nucleus
+ if next_nucleus.id == math_char then
+ local next_char = next_nucleus.char
+ local next_data = chardata[next_char]
+ local visual = next_data.visual
+ if visual == "it" or visual == "bi" then
+ -- if trace_italics then
+ -- report_italics("method %a, skipping %p italic correction between italic %C and italic %C",method,correction,char,next_char)
+ -- end
+ else
+ local category = next_data.category
+ if category == "nd" or category == "ll" or category == "lu" then
+ if trace_italics then
+ report_italics("method %a, adding %p italic correction between italic %C and non italic %C",method,correction,char,next_char)
+ end
+ insert_node_after(parent,parent,italic_kern(correction,font))
+ -- elseif next_data.height > (fontexheights[font]/2) then
+ -- if trace_italics then
+ -- report_italics("method %a, adding %p italic correction between %C and ascending %C",method,correction,char,next_char)
+ -- end
+ -- insert_node_after(parent,parent,italic_kern(correction,font))
+ -- elseif trace_italics then
+ -- -- report_italics("method %a, skipping %p italic correction between %C and %C",method,correction,char,next_char)
+ end
end
- insert_node_after(parent,parent,new_kern(correction))
end
end
end
@@ -892,13 +1152,13 @@ variants[math_char] = function(pointer,what,n,parent) -- also set export value
end
if variant then
pointer.char = variant
- set_attribute(pointer,exportstatus,char) -- we don't export the variant as it's visual markup
+ pointer[a_exportstatus] = char -- we don't export the variant as it's visual markup
if trace_variants then
- report_variants("variant (U+%05X,U+%05X) replaced by U+%05X",char,selector,variant)
+ report_variants("variant (%U,%U) replaced by %U",char,selector,variant)
end
else
if trace_variants then
- report_variants("no variant (U+%05X,U+%05X)",char,selector)
+ report_variants("no variant (%U,%U)",char,selector)
end
end
next.prev = pointer
@@ -920,16 +1180,16 @@ function builders.kernel.mlist_to_hlist(head,style,penalties)
return mlist_to_hlist(head,style,penalties), true
end
---~ function builders.kernel.mlist_to_hlist(head,style,penalties)
---~ print("!!!!!!! BEFORE",penalties)
---~ for n in node.traverse(head) do print(n) end
---~ print("!!!!!!!")
---~ head = mlist_to_hlist(head,style,penalties)
---~ print("!!!!!!! AFTER")
---~ for n in node.traverse(head) do print(n) end
---~ print("!!!!!!!")
---~ return head, true
---~ end
+-- function builders.kernel.mlist_to_hlist(head,style,penalties)
+-- print("!!!!!!! BEFORE",penalties)
+-- for n in node.traverse(head) do print(n) end
+-- print("!!!!!!!")
+-- head = mlist_to_hlist(head,style,penalties)
+-- print("!!!!!!! AFTER")
+-- for n in node.traverse(head) do print(n) end
+-- print("!!!!!!!")
+-- return head, true
+-- end
tasks.new {
name = "math",
diff --git a/Master/texmf-dist/tex/context/base/math-stc.mkvi b/Master/texmf-dist/tex/context/base/math-stc.mkvi
new file mode 100644
index 00000000000..2dc2b2c22c1
--- /dev/null
+++ b/Master/texmf-dist/tex/context/base/math-stc.mkvi
@@ -0,0 +1,780 @@
+%D \module
+%D [ file=math-stc,
+%D version=2012.12.29,
+%D title=\CONTEXT\ Math Macros,
+%D subtitle=Stackers,
+%D comment=This replaces math-arr and friends,
+%D author=Hans Hagen,
+%D date=\currentdate,
+%D copyright={PRAGMA ADE \& \CONTEXT\ Development Team}]
+%C
+%C This module is part of the \CONTEXT\ macro||package and is
+%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
+%C details.
+
+\writestatus{loading}{ConTeXt Math Macros / Stackers}
+
+\unprotect
+
+%D At some point the \MKII\ arrow mechanism has been converted to \MKIV, but we kept
+%D most of the logic. We now have a more generic variant dealing with extensibles.
+%D There are a few demands than we need to meet:
+%D
+%D \startitemize
+%D \startitem The width of the extensible need to adapt itself automatically. \stopitem
+%D \startitem We need to be able to control horizontal and vertical offsets. \stopitem
+%D \startitem We best have a math as well as a text variant (which is handy for chemistry). \stopitem
+%D \startitem For historic reasons we need to deal with optional arguments in a special (reverse) way. \stopitem
+%D \startitem We need alternatives for extensibles on top, in the middle and at the bottom. \stopitem
+%D \stopitemize
+%D
+%D After I had experimented a bit with virtual characters for two headed arrows I
+%D discussed the issue with the Gyre folks and we came to the conclusion that it
+%D made sense to have real extensibles instead of constructing them out of snippets.
+%D After all, \OPENTYPE\ math provides for it. So, in December 2013 beta versions of
+%D Latin Modern and Gyre fonts came available that had these! Because we still want
+%D to support the traditional Latin Modern Virtual math font those were extended
+%D with a couple of virtual extensibles as well.
+%D
+%D {\em For the moment we still have some mess here: we can deal with known dimensions, but
+%D fillers (like \type {\rightarrowfil} don't work with \OPENTYPE\ extensibles yet
+%D because there is no way to let them stretch like leaders. At some point \LUATEX\
+%D might provide a auto||fit||to||encapsulated||box and if not I will cook up a \LUA\
+%D based variant.}
+%D
+%D We could mess with something like \type {$mid\limits^{top}_{bottom}$} but we like
+%D a bit more control. At some point we need to add some hacks to get exports
+%D working well.
+%D
+%D In the end we have a more flexible mechanism which also handles text variants.
+
+% possible improvements:
+%
+% - we could skip the left/right offsets when offset=normal, this saves some access time
+% at the lua end and some checking: use \mathhorizontalcode or \mathextensiblecode
+% but in practice arrows etc are not used that often
+
+\installcorenamespace {mathextensiblefallbacks}
+
+\def\math_stackers_fallback
+ {\hbox to \scratchwidth{\csname\??mathextensiblefallbacks\ifcsname\??mathextensiblefallbacks\number\scratchunicode\endcsname\number\scratchunicode\fi\endcsname}}
+ %{\csname\??mathextensiblefallbacks\ifcsname\??mathextensiblefallbacks\number\scratchunicode\endcsname\number\scratchunicode\fi\endcsname }
+
+\def\math_stackers_regular
+ {\mathstylehbox{\Umathaccent\fam\zerocount\scratchunicode{\hskip\scratchwidth}}}
+
+\def\math_stackers_stretch % we don't have that one yet
+ {\mathstylehbox{\Umathaccent\fam\zerocount\scratchunicode{\hskip\hsize}}}
+
+\setvalue{\??mathextensiblefallbacks}{\hbox{\vrule\!!width\scratchwidth\!!height.1\exheight\!!depth\zeropoint}}
+
+\def\math_stackers_with_fallback#codepoint%
+ {\begingroup
+ \scratchunicode#codepoint\relax
+ \ifcase\mathextensiblecode\fam\scratchunicode\relax
+ \math_stackers_fallback
+ \else
+ \math_stackers_stretch
+ \fi
+ \endgroup}
+
+%D We don't really need this because we can assume that fonts have the right
+%D extensibles. If needed I will make a general virtual extender for \OPENTYPE\
+%D fonts.
+%D
+%D Because we have quite some control over positioning, we have somewhat extensive
+%D tracing built in.
+
+\let\math_stackers_top \relax
+\let\math_stackers_middle\relax
+\let\math_stackers_bottom\relax
+\let\math_stackers_skip \hskip
+
+\installtextracker
+ {math.stackers.texts}
+ {\let\math_stackers_top \filledhboxb
+ \let\math_stackers_middle\filledhboxr
+ \let\math_stackers_bottom\filledhboxg
+ \let\math_stackers_skip \math_stackers_skip_indeed}
+ {\let\math_stackers_top \relax
+ \let\math_stackers_middle\relax
+ \let\math_stackers_bottom\relax
+ \let\math_stackers_skip \hskip}
+
+\def\math_stackers_skip_indeed#amount%
+ {\filledhboxk{\unsetteststrut\strut\hskip#amount}} % \dontshowstruts
+
+%D We define a full featured command handler.
+
+\installcorenamespace {mathstackers}
+
+\installcommandhandler \??mathstackers {mathstackers} \??mathstackers
+
+\setupmathstackers
+ [%c!alternative=\v!text, % text | mathematics
+ \c!left=,
+ \c!right=,
+ \c!voffset=.25\exheight,
+ \c!hoffset=.5\emwidth,
+ \c!minheight=\exheight,
+ \c!mindepth=\zeropoint,
+ \c!minwidth=\emwidth,
+ \c!order=\v!normal,
+ \c!strut=,
+ %\c!color=, % todo: when I need it
+ \c!topcommand=,
+ \c!middlecommand=,
+ \c!bottomcommand=,
+ \c!offset=\v!normal, % normal | min | max
+ \c!location=\v!top] % none | normal | small | medium | big
+
+%D We assume that the middle characters (that can be an extensible) to sit on
+%D top of the baseline by default.
+
+\installcorenamespace {mathstackerslocation}
+
+\letvalue{\??mathstackerslocation\v!top }\plusone % on top of baseline
+\letvalue{\??mathstackerslocation\v!high }\plustwo % 25 % down
+\letvalue{\??mathstackerslocation\v!middle }\plusthree % centered
+\letvalue{\??mathstackerslocation\v!low }\plusfour % 75 % down
+\letvalue{\??mathstackerslocation\v!bottom }\plusfive % below baseline
+\letvalue{\??mathstackerslocation }\zerocount
+
+%D First we implement the helper that deals with an extensible in the middle and
+%D top and|/|or bottom texts:
+
+\let\m_math_stackers_text_top \empty
+\let\m_math_stackers_text_bottom\empty
+\let\m_math_stackers_text_middle\empty
+
+\def\math_stackers_flushtext#command#text%
+ {\ifdim\scratchleftoffset >\zeropoint\math_stackers_skip\scratchleftoffset \fi
+ \ifx\p_strut\v!no \else
+ \strut
+ \fi
+ \mathstackersparameter#command#text%
+ \ifdim\scratchrightoffset>\zeropoint\math_stackers_skip\scratchrightoffset\fi}
+
+\def\math_stackers_toptext {\math_stackers_flushtext\c!topcommand \m_math_stackers_text_top }
+\def\math_stackers_bottomtext{\math_stackers_flushtext\c!bottomcommand\m_math_stackers_text_bottom}
+\def\math_stackers_middletext{\math_stackers_flushtext\c!middlecommand\m_math_stackers_text_middle}
+
+\def\math_stackers_content
+ {\ifcase\scratchcounter
+ \math_stackers_fallback
+ \or % left
+ \math_stackers_regular
+ \or % right
+ \math_stackers_regular
+ \or % horizontal
+ \math_stackers_regular
+ \else
+ \math_stackers_fallback
+ \fi}
+
+\def\math_stackers_check_unicode#codepoint%
+ {\scratchunicode#codepoint\relax
+ \scratchhoffset\mathstackersparameter\c!hoffset\relax
+ \scratchvoffset\mathstackersparameter\c!voffset\relax
+ \scratchcounter\mathhorizontalcode\fam\scratchunicode\relax % also sets \leftscratchoffset and \rightscratchoffset
+ \ifx\p_offset\v!max
+ % heads/tails + hoffset
+ \else\ifx\p_offset\v!min
+ % heads/tails - hoffset
+ \advance\scratchleftoffset-\scratchhoffset
+ \advance\scratchrightoffset-\scratchhoffset
+ \else % \v!normal
+ % hoffset
+ \scratchleftoffset\zeropoint
+ \scratchrightoffset\zeropoint
+ \fi\fi
+ \ifdim\scratchleftoffset<\zeropoint
+ \scratchleftoffset\zeropoint
+ \fi
+ \ifdim\scratchrightoffset<\zeropoint
+ \scratchrightoffset\zeropoint
+ \fi}
+
+\def\math_stackers_normalize_three
+ {\scratchheight\ht\scratchboxthree
+ \scratchdepth \dp\scratchboxthree
+ \scratchtopoffset \scratchheight
+ \scratchbottomoffset\scratchdepth
+ \scratchdimen\mathstackersparameter\c!minheight\relax
+ \ifdim\scratchheight<\scratchdimen
+ \scratchheight\scratchdimen
+ \ht\scratchboxthree\scratchheight
+ \fi
+ \scratchdimen\mathstackersparameter\c!mindepth\relax
+ \ifdim\scratchdepth<\scratchdimen
+ \scratchdepth\scratchdimen
+ \dp\scratchboxthree\scratchdepth
+ \fi
+ \advance\scratchtopoffset -\scratchheight
+ \advance\scratchbottomoffset-\scratchdepth
+ \ifdim\scratchtopoffset<\zeropoint
+ \scratchtopoffset\zeropoint
+ \fi
+ \ifdim\scratchbottomoffset<\zeropoint
+ \scratchbottomoffset\zeropoint
+ \fi}
+
+\unexpanded\def\math_stackers_triplet#method#category#codepoint#toptext#bottomtext%
+ {\begingroup
+ \edef\currentmathstackers{#category}%
+ \mathstackersparameter\c!left\relax
+ \ifmmode\mathrel\else\dontleavehmode\fi
+ {\edef\p_offset {\mathstackersparameter\c!offset}%
+ \edef\p_location{\mathstackersparameter\c!location}%
+ \edef\p_order {\mathstackersparameter\c!order}%
+ \edef\p_strut {\mathstackersparameter\c!strut}%
+ \ifx\p_order\v!reverse
+ \edef\m_math_stackers_text_top {#bottomtext}%
+ \edef\m_math_stackers_text_bottom{#toptext}%
+ \else
+ \edef\m_math_stackers_text_top {#toptext}%
+ \edef\m_math_stackers_text_bottom{#bottomtext}%
+ \fi
+ \scratchleftoffset \zeropoint
+ \scratchrightoffset\zeropoint
+ \ifcase#method\relax
+ \math_stackers_check_unicode{#codepoint}%
+ \else
+ \edef\m_math_stackers_text_middle{#codepoint}%
+ \fi
+ \ifx\m_math_stackers_text_top\empty
+ \setbox\scratchboxone\emptyhbox
+ \else
+ \setmathsmalltextbox\scratchboxone\hbox{\math_stackers_toptext}%
+ \fi
+ \ifx\m_math_stackers_text_bottom\empty
+ \setbox\scratchboxtwo\emptyhbox
+ \else
+ \setmathsmalltextbox\scratchboxtwo\hbox{\math_stackers_bottomtext}%
+ \fi
+ %
+ \ifcase#method\relax
+ \scratchwidth\wd
+ \ifdim\wd\scratchboxone>\wd\scratchboxtwo
+ \scratchboxone
+ \else
+ \scratchboxtwo
+ \fi
+ \relax
+ \else
+ \ifx\m_math_stackers_text_middle\empty
+ \setbox\scratchboxthree\emptyhbox
+ \else
+ \setmathtextbox\scratchboxthree\hbox{\math_stackers_middletext}%
+ \fi
+ \scratchwidth\wd
+ \ifdim\wd\scratchboxone>\wd\scratchboxtwo
+ \scratchboxone
+ \else\ifdim\wd\scratchboxtwo>\wd\scratchboxthree
+ \scratchboxtwo
+ \else
+ \scratchboxthree
+ \fi\fi
+ \relax
+ \fi
+ %
+ \scratchdimen\mathstackersparameter\c!minwidth\relax
+ \ifdim\scratchwidth<\scratchdimen
+ \scratchwidth\scratchdimen
+ \fi
+ \advance\scratchwidth2\scratchhoffset
+ %
+ \ifcase#method\relax
+ \setbox\scratchboxthree\math_stackers_content
+ \fi
+ %
+ \ifdim\wd\scratchboxone<\scratchwidth
+ \setbox\scratchboxone\hbox to \scratchwidth{\hss\box\scratchboxone\hss}%
+ \fi
+ \ifdim\wd\scratchboxtwo<\scratchwidth
+ \setbox\scratchboxtwo\hbox to \scratchwidth{\hss\box\scratchboxtwo\hss}%
+ \fi
+ \ifdim\wd\scratchboxthree<\scratchwidth
+ \setbox\scratchboxthree\hbox to \scratchwidth{\hss\box\scratchboxthree\hss}%
+ \fi
+ %
+ \ifcsname\??mathstackerslocation\p_location\endcsname
+ \ifcase\csname\??mathstackerslocation\p_location\endcsname\relax
+ \scratchdistance\zeropoint
+ \or
+ % top
+ \scratchdistance\zeropoint
+ \or
+ % high
+ \scratchdistance.25\htdp\scratchboxthree
+ \or
+ % centered
+ \scratchdistance.5\htdp\scratchboxthree
+ \or
+ % low
+ \scratchdistance.75\htdp\scratchboxthree
+ \or
+ % bottom
+ \scratchdistance\htdp\scratchboxthree
+ \else
+ \scratchdistance\zeropoint
+ \fi
+ \else
+ \scratchdistance\p_location\htdp\scratchboxthree
+ \fi
+ %
+ \ifzeropt\scratchdistance\else
+ \setbox\scratchboxthree\hbox{\lower\scratchdistance\box\scratchboxthree}%
+ \fi
+ %
+ \math_stackers_normalize_three
+ %
+ \math_stackers_middle\bgroup
+ \box\scratchboxthree
+ \egroup
+ %
+ \ifdim\htdp\scratchboxone>\zeropoint
+ \scratchoffset\dimexpr\scratchvoffset
+ \kern-\scratchwidth
+ \math_stackers_top\bgroup
+ \raise\dimexpr\dp\scratchboxone+\scratchheight+\scratchoffset+\scratchtopoffset\relax
+ \box\scratchboxone
+ \egroup
+ \fi
+ %
+ \ifdim\htdp\scratchboxtwo>\zeropoint
+ \scratchoffset\dimexpr\scratchvoffset
+ \kern-\scratchwidth
+ \math_stackers_bottom\bgroup
+ \lower\dimexpr\ht\scratchboxtwo+\scratchdepth+\scratchoffset+\scratchbottomoffset\relax
+ \box\scratchboxtwo
+ \egroup
+ \fi}%
+ \mathstackersparameter\c!right\relax
+ \endgroup}
+
+\unexpanded\def\definemathextensible
+ {\dotripleempty\math_stackers_define_normal}
+
+\def\math_stackers_define_normal[#1][#2][#3]% category name unicode
+ {\ifthirdargument
+ \setuevalue{#2}{\math_stackers_auto_normal{#1}{\number#3}}%
+ \else
+ \setuevalue{#1}{\math_stackers_auto_normal\noexpand\currentmathstackers{\number#2}}%
+ \fi}
+
+\unexpanded\def\math_stackers_auto_normal#category#codepoint%
+ {\begingroup
+ \edef\currentmathstackers{#category}%
+ \scratchcounter#codepoint\relax
+ \dosingleempty\math_stackers_auto_normal_first}
+
+\unexpanded\def\math_stackers_auto_normal_first[#category]% [#2]% #2 gobble spaces
+ {\iffirstargument\edef\currentmathstackers{#category}\fi
+ \permitspacesbetweengroups
+ \dodoublegroupempty\math_stackers_auto_normal_second}
+
+\def\math_stackers_auto_normal_second#toptext#bottomtext%
+ {\math_stackers_triplet\zerocount\currentmathstackers\scratchcounter{#toptext}{#bottomtext}%
+ \endgroup}
+
+%D A few direct accessors:
+
+\unexpanded\def\mathextensible{\begingroup\dosingleempty\math_stackers_handle_math}
+\unexpanded\def\textextensible{\begingroup\dosingleempty\math_stackers_handle_text}
+
+\unexpanded\def\math_stackers_handle_math[#category]%
+ {\math_stackers_handle_extensible{\iffirstargument#category\else\v!mathematics\fi}} % will be defined later on
+
+\unexpanded\def\math_stackers_handle_text[#category]%
+ {\math_stackers_handle_extensible{\iffirstargument#category\else\v!text \fi}} % will be defined later on
+
+\def\math_stackers_handle_extensible#category#codepoint#toptext#bottomtext%
+ {\math_stackers_triplet\zerocount{#category}{#codepoint}{#toptext}{#bottomtext}%
+ \endgroup}
+
+%D The next one deals with under and over extensibles (arrows mostly):
+
+\unexpanded\def\math_stackers_double#where#category#codepoint#text%
+ {\begingroup
+ \edef\currentmathstackers{#category}%
+ \mathstackersparameter\c!left\relax
+ \ifmmode\mathrel\else\dontleavehmode\fi
+ {\edef\currentmathstackers{#category}%
+ \edef\m_math_stackers_text_middle {#text}%
+ %
+ \edef\p_offset {\mathstackersparameter\c!offset}%
+ \edef\p_location{\mathstackersparameter\c!location}%
+ \edef\p_strut {\mathstackersparameter\c!strut}%
+ %
+ \scratchleftoffset \zeropoint
+ \scratchrightoffset\zeropoint
+ %
+ \math_stackers_check_unicode{#codepoint}%
+ %
+ \ifx\currentmathtext\empty
+ \setbox\scratchboxthree\emptyhbox
+ \else
+ \setmathtextbox\scratchboxthree\hbox{\math_stackers_middletext}%
+ \fi
+ \scratchwidth\wd\scratchboxthree
+ %
+ \scratchdimen\mathstackersparameter\c!minwidth\relax
+ \ifdim\scratchwidth<\scratchdimen
+ \scratchwidth\scratchdimen
+ \fi
+ \advance\scratchwidth2\scratchhoffset
+ %
+ \setbox\scratchboxtwo \math_stackers_content
+ \setbox\scratchboxthree\hbox to \scratchwidth{\hss\box\scratchboxthree\hss}%
+ %
+ \math_stackers_normalize_three
+ %
+ \math_stackers_middle\bgroup
+ \box\scratchboxthree
+ \egroup
+ %
+ \ifdim\htdp\scratchboxtwo>\zeropoint
+ \kern-\scratchwidth
+ \ifcase#where\relax
+ \math_stackers_top\bgroup
+ \raise\dimexpr\scratchheight+\scratchtopoffset\relax
+ \box\scratchboxtwo
+ \egroup
+ \else
+ \math_stackers_bottom\bgroup
+ \lower\dimexpr\scratchdepth+\ht\scratchboxtwo+\scratchbottomoffset\relax
+ \box\scratchboxtwo
+ \egroup
+ \fi
+ \fi}%
+ \mathstackersparameter\c!right\relax
+ \endgroup}
+
+\unexpanded\def\definemathoverextensible {\dotripleempty\math_extensiblies_define_over }
+\unexpanded\def\definemathunderextensible{\dotripleempty\math_extensiblies_define_under}
+
+\def\math_extensiblies_define_over[#1][#2][#3]%
+ {\ifthirdargument
+ \setuevalue{#2}{\math_stackers_double\zerocount{#1}{\number#3}}%
+ \else
+ \setuevalue{#1}{\math_stackers_double\zerocount\noexpand\currentmathstackers{\number#2}}%
+ \fi}
+
+\def\math_extensiblies_define_under[#1][#2][#3]%
+ {\ifthirdargument
+ \setuevalue{#2}{\math_stackers_double\plusone{#1}{\number#3}}%
+ \else
+ \setuevalue{#1}{\math_stackers_double\plusone\noexpand\currentmathstackers{\number#2}}%
+ \fi}
+
+\unexpanded\def\mathover {\begingroup\dosingleempty\math_stackers_handle_over }
+\unexpanded\def\mathunder{\begingroup\dosingleempty\math_stackers_handle_under}
+
+\def\math_stackers_handle_over[#category]%
+ {\math_stackers_handle_double\zerocount{\iffirstargument#category\else\v!top \fi}} % will be defined later on
+
+\def\math_stackers_handle_under[#category]#codepoint#bottomtext%
+ {\math_stackers_handle_double\plusone {\iffirstargument#category\else\v!bottom\fi}} % will be defined later on
+
+\def\math_stackers_handle_double#location#category#codepoint#text%
+ {\math_stackers_double#location{#category}{#codepoint}{#text}%
+ \endgroup}
+
+%D Here is a bonus macro that takes three texts. It can be used to get consistent
+%D mixed usage.
+
+\unexpanded\def\mathtriplet
+ {\begingroup
+ \dosingleempty\math_stackers_handle_triplet}
+
+\def\math_stackers_handle_triplet[#category]#middletext#toptext#bottomtext%
+ {\math_stackers_triplet\plusone{\iffirstargument#category\else\currentmathstackers\fi}{#middletext}{#toptext}{#bottomtext}%
+ \endgroup}
+
+\unexpanded\def\definemathtriplet
+ {\dotripleempty\math_stackers_define_triplet}
+
+\def\math_stackers_define_triplet[#1][#2][#3]% category name default
+ {\ifthirdargument
+ \setuevalue{#2}{\math_stackers_auto_triplet_yes{#1}{#3}}%
+ \else\ifsecondargument
+ \setuevalue{#2}{\math_stackers_auto_triplet_nop{#1}}%
+ \else
+ \setuevalue{#1}{\math_stackers_auto_triplet_nop\noexpand\currentmathstackers}%
+ \fi\fi}
+
+\unexpanded\def\math_stackers_auto_triplet_yes#category#middletext%
+ {\begingroup
+ \edef\currentmathstackers {#category}%
+ \def \m_math_stackers_text_middle{#middletext}%
+ \dosingleempty\math_stackers_auto_triplet_yes_first}
+
+\unexpanded\def\math_stackers_auto_triplet_yes_first[#category]% [#2]% #2 gobble spaces
+ {\iffirstargument\edef\currentmathstackers{#category}\fi
+ \permitspacesbetweengroups
+ \dodoublegroupempty\math_stackers_auto_triplet_yes_second}
+
+\def\math_stackers_auto_triplet_yes_second#toptext#bottomtext%
+ {\math_stackers_triplet\plusone\currentmathstackers\m_math_stackers_text_middle{#toptext}{#bottomtext}%
+ \endgroup}
+
+\unexpanded\def\math_stackers_auto_triplet_nop#category%
+ {\begingroup
+ \edef\currentmathstackers{#category}%
+ \dosingleempty\math_stackers_auto_triplet_nop_first}
+
+\unexpanded\def\math_stackers_auto_triplet_nop_first[#category]% [#2]% #2 gobble spaces
+ {\iffirstargument\edef\currentmathstackers{#category}\fi
+ \permitspacesbetweengroups
+ \dotriplegroupempty\math_stackers_auto_triplet_nop_second}
+
+\def\math_stackers_auto_triplet_nop_second#middletext#toptext#bottomtext%
+ {\math_stackers_triplet\plusone\currentmathstackers{#middletext}{#toptext}{#bottomtext}%
+ \endgroup}
+
+%D Definitions:
+
+\definemathstackers
+ [\v!mathematics]
+ [\c!topcommand=\mathematics,
+ \c!middlecommand=\mathematics,
+ \c!bottomcommand=\mathematics]
+
+\definemathstackers
+ [\s!math]
+ [\v!mathematics]
+
+\definemathstackers
+ [\v!text]
+ [\v!mathematics]
+ [\c!topcommand=,
+ \c!middlecommand=\mathematics,
+ \c!bottomcommand=]
+
+\definemathstackers
+ [\v!reverse]
+ [\v!mathematics]
+ [\c!order=\v!reverse]
+
+\definemathstackers
+ [\v!top]
+ [\v!mathematics]
+ [\c!location=\v!top,
+ \c!strut=\v!no,
+ \c!middlecommand=\mathematics,
+ \c!hoffset=\zeropoint]
+
+\definemathstackers
+ [\v!bottom]
+ [\v!mathematics]
+ [\c!location=\v!top,
+ \c!strut=\v!no,
+ \c!middlecommand=\mathematics,
+ \c!hoffset=\zeropoint]
+
+% These are compatibity definitions, math only.
+
+\definemathstackers [\v!none] [\v!mathematics] [\c!hoffset=\zeropoint]
+\definemathstackers [\v!normal] [\v!mathematics] [\c!hoffset=0.5\emwidth] % the default
+\definemathstackers [\v!small] [\v!mathematics] [\c!hoffset=1\emwidth]
+\definemathstackers [\v!medium] [\v!mathematics] [\c!hoffset=1.5\emwidth]
+\definemathstackers [\v!big] [\v!mathematics] [\c!hoffset=2\emwidth]
+
+\definemathextensible [\v!reverse] [xrel] ["002D]
+\definemathextensible [\v!reverse] [xequal] ["003D]
+\definemathextensible [\v!reverse] [xleftarrow] ["2190]
+\definemathextensible [\v!reverse] [xrightarrow] ["2192]
+\definemathextensible [\v!reverse] [xtwoheadleftarrow] ["219E]
+\definemathextensible [\v!reverse] [xtwoheadrightarrow] ["21A0]
+\definemathextensible [\v!reverse] [xmapsto] ["21A6]
+\definemathextensible [\v!reverse] [xhookleftarrow] ["21A9]
+\definemathextensible [\v!reverse] [xhookrightarrow] ["21AA]
+\definemathextensible [\v!reverse] [xleftharpoondown] ["21BD]
+\definemathextensible [\v!reverse] [xleftharpoonup] ["21BC]
+\definemathextensible [\v!reverse] [xrightharpoondown] ["21C1]
+\definemathextensible [\v!reverse] [xrightharpoonup] ["21C0]
+\definemathextensible [\v!reverse] [xrightoverleftarrow] ["21C4]
+\definemathextensible [\v!reverse] [xleftrightharpoons] ["21CB]
+\definemathextensible [\v!reverse] [xrightleftharpoons] ["21CC]
+\definemathextensible [\v!reverse] [xtriplerel] ["2261]
+\definemathextensible [\v!reverse] [xleftrightarrow] ["27F7]
+\definemathextensible [\v!reverse] [xLeftarrow] ["27F8]
+\definemathextensible [\v!reverse] [xRightarrow] ["27F9]
+\definemathextensible [\v!reverse] [xLeftrightarrow] ["27FA]
+
+\definemathextensible [\v!mathematics] [mrel] ["002D]
+\definemathextensible [\v!mathematics] [mequal] ["003D]
+\definemathextensible [\v!mathematics] [mleftarrow] ["2190]
+\definemathextensible [\v!mathematics] [mrightarrow] ["2192]
+\definemathextensible [\v!mathematics] [mtwoheadleftarrow] ["219E]
+\definemathextensible [\v!mathematics] [mtwoheadrightarrow] ["21A0]
+\definemathextensible [\v!mathematics] [mmapsto] ["21A6]
+\definemathextensible [\v!mathematics] [mhookleftarrow] ["21A9]
+\definemathextensible [\v!mathematics] [mhookrightarrow] ["21AA]
+\definemathextensible [\v!mathematics] [mleftharpoondown] ["21BD]
+\definemathextensible [\v!mathematics] [mleftharpoonup] ["21BC]
+\definemathextensible [\v!mathematics] [mrightharpoondown] ["21C1]
+\definemathextensible [\v!mathematics] [mrightharpoonup] ["21C0]
+\definemathextensible [\v!mathematics] [mrightoverleftarrow] ["21C4]
+\definemathextensible [\v!mathematics] [mleftrightharpoons] ["21CB]
+\definemathextensible [\v!mathematics] [mrightleftharpoons] ["21CC]
+\definemathextensible [\v!mathematics] [mtriplerel] ["2261]
+\definemathextensible [\v!mathematics] [mleftrightarrow] ["27F7]
+\definemathextensible [\v!mathematics] [mLeftarrow] ["27F8]
+\definemathextensible [\v!mathematics] [mRightarrow] ["27F9]
+\definemathextensible [\v!mathematics] [mLeftrightarrow] ["27FA]
+
+\definemathextensible [\v!text] [trel] ["002D]
+\definemathextensible [\v!text] [tequal] ["003D]
+\definemathextensible [\v!text] [tleftarrow] ["2190]
+\definemathextensible [\v!text] [trightarrow] ["2192]
+\definemathextensible [\v!text] [ttwoheadleftarrow] ["219E]
+\definemathextensible [\v!text] [ttwoheadrightarrow] ["21A0]
+\definemathextensible [\v!text] [tmapsto] ["21A6]
+\definemathextensible [\v!text] [thookleftarrow] ["21A9]
+\definemathextensible [\v!text] [thookrightarrow] ["21AA]
+\definemathextensible [\v!text] [tleftharpoondown] ["21BD]
+\definemathextensible [\v!text] [tleftharpoonup] ["21BC]
+\definemathextensible [\v!text] [trightharpoondown] ["21C1]
+\definemathextensible [\v!text] [trightharpoonup] ["21C0]
+\definemathextensible [\v!text] [trightoverleftarrow] ["21C4]
+\definemathextensible [\v!text] [tleftrightharpoons] ["21CB]
+\definemathextensible [\v!text] [trightleftharpoons] ["21CC]
+\definemathextensible [\v!text] [ttriplerel] ["2261]
+\definemathextensible [\v!text] [tleftrightarrow] ["27F7]
+\definemathextensible [\v!text] [tLeftarrow] ["27F8]
+\definemathextensible [\v!text] [tRightarrow] ["27F9]
+\definemathextensible [\v!text] [tLeftrightarrow] ["27FA]
+
+\definemathoverextensible [\v!top] [overleftarrow] ["2190]
+\definemathoverextensible [\v!top] [overrightarrow] ["2192]
+\definemathoverextensible [\v!top] [overleftharpoondown] ["21BD]
+\definemathoverextensible [\v!top] [overleftharpoonup] ["21BC]
+\definemathoverextensible [\v!top] [overrightharpoondown] ["21C1]
+\definemathoverextensible [\v!top] [overrightharpoonup] ["21C0]
+\definemathoverextensible [\v!top] [overleftrightarrow] ["27F7]
+\definemathoverextensible [\v!top] [overtwoheadleftarrow] ["27F8]
+\definemathoverextensible [\v!top] [overtwoheadrightarrow] ["27F9]
+
+\definemathunderextensible [\v!bottom] [underleftarrow] ["2190]
+\definemathunderextensible [\v!bottom] [underrightarrow] ["2192]
+\definemathunderextensible [\v!bottom] [underleftharpoondown] ["21BD]
+\definemathunderextensible [\v!bottom] [underleftharpoonup] ["21BC]
+\definemathunderextensible [\v!bottom] [underrightharpoondown] ["21C1]
+\definemathunderextensible [\v!bottom] [underrightharpoonup] ["21C0]
+\definemathunderextensible [\v!bottom] [underleftrightarrow] ["27F7]
+\definemathunderextensible [\v!bottom] [undertwoheadleftarrow] ["27F8]
+\definemathunderextensible [\v!bottom] [undertwoheadrightarrow] ["27F9]
+
+%D Some bonus ones (for the moment here):
+
+\definemathstackers
+ [\v!chemistry]
+ [\c!offset=\v!max,
+ \c!left=\enspace,
+ \c!right=\enspace,
+ \c!hoffset=.5\emwidth]
+
+\definemathextensible [\v!chemistry] [cleftarrow] ["2190]
+\definemathextensible [\v!chemistry] [crightarrow] ["2192]
+\definemathextensible [\v!chemistry] [crightoverleftarrow] ["21C4]
+
+% for the moment:
+
+\def\math_stackers_hacked_fill#1#2#3%
+ {\mathematics
+ {\begingroup
+ \mathsurround\zeropoint
+ \thickmuskip \zeromuskip
+ \medmuskip \zeromuskip
+ \thinmuskip \zeromuskip
+ #1%
+ \mkern-7\onemuskip
+ \cleaders\mathstylehbox{\mkern-2\onemuskip#2\mkern-2\onemuskip}\hfill
+ \mkern-7\onemuskip
+ #3%
+ \endgroup}}
+
+% These will be defined in char-def as well:
+
+\unexpanded\def\rightarrowfill {\math_stackers_hacked_fill \relbar \relbar \rightarrow}
+\unexpanded\def\leftarrowfill {\math_stackers_hacked_fill \leftarrow \relbar \relbar }
+\unexpanded\def\rightoverleftarrowfill{\math_stackers_hacked_fill \ctxdoublearrowfillleftend\ctxdoublearrowfillmiddlepart\ctxdoublearrowfillrightend}
+\unexpanded\def\equalfill {\math_stackers_hacked_fill \Relbar \Relbar \Relbar}
+\unexpanded\def\Rightarrowfill {\math_stackers_hacked_fill \Relbar \Relbar \Rightarrow}
+\unexpanded\def\Leftarrowfill {\math_stackers_hacked_fill \Leftarrow \Relbar \Relbar}
+\unexpanded\def\Leftrightarrowfill {\math_stackers_hacked_fill \Leftarrow \Relbar \Rightarrow}
+\unexpanded\def\leftrightarrowfill {\math_stackers_hacked_fill \leftarrow \relbar \rightarrow}
+\unexpanded\def\mapstofill {\math_stackers_hacked_fill{\mapstochar\relbar} \relbar \rightarrow}
+\unexpanded\def\twoheadrightarrowfill {\math_stackers_hacked_fill \relbar \relbar \twoheadrightarrow}
+\unexpanded\def\twoheadleftarrowfill {\math_stackers_hacked_fill \twoheadleftarrow \relbar \relbar}
+\unexpanded\def\rightharpoondownfill {\math_stackers_hacked_fill \relbar \relbar \rightharpoondown}
+\unexpanded\def\rightharpoonupfill {\math_stackers_hacked_fill \relbar \relbar \rightharpoonup}
+\unexpanded\def\leftharpoondownfill {\math_stackers_hacked_fill \leftharpoondown \relbar \relbar}
+\unexpanded\def\leftharpoonupfill {\math_stackers_hacked_fill \leftharpoonup \relbar \relbar}
+\unexpanded\def\hookleftfill {\math_stackers_hacked_fill \leftarrow \relbar {\relbar\joinrel\rhook}}
+\unexpanded\def\hookrightfill {\math_stackers_hacked_fill{\lhook\joinrel\relbar} \relbar \rightarrow}
+\unexpanded\def\relfill {\math_stackers_hacked_fill \relbar \relbar \relbar}
+\unexpanded\def\triplerelfill {\math_stackers_hacked_fill \equiv \equiv \equiv}
+
+\unexpanded\def\singlebond {{\xrel}} % or \def\singlebond{{\xrel[2]}}
+\unexpanded\def\doublebond {{\xequal}}
+\unexpanded\def\triplebond {{\xtriplerel}}
+
+\unexpanded\def\defineextensiblefiller
+ {\dodoubleargument\math_stackers_define_filler}
+
+\def\math_stackers_define_filler[#1][#2]%
+ {\setuevalue{#1}{\leaders\number#2\hfill}}
+
+%D For the moment:
+
+\def\math_stackers_define_filler[#1][#2]%
+ {\expandafter\let\csname\??mathextensiblefallbacks\number#2\expandafter\endcsname\csname#1\endcsname
+ \expandafter\let\csname #1\expandafter\endcsname\csname#1\endcsname}
+
+\defineextensiblefiller [relfill] ["002D]
+\defineextensiblefiller [equalfill] ["003D]
+\defineextensiblefiller [leftarrowfill] ["2190]
+\defineextensiblefiller [rightarrowfill] ["2192]
+\defineextensiblefiller [twoheadleftarrowfill] ["219E]
+\defineextensiblefiller [twoheadrightarrowfill] ["21A0]
+\defineextensiblefiller [mapstofill] ["21A6]
+\defineextensiblefiller [hookleftarrowfill] ["21A9]
+\defineextensiblefiller [hookrightarrowfill] ["21AA]
+\defineextensiblefiller [leftharpoondownfill] ["21BD]
+\defineextensiblefiller [leftharpoonupfill] ["21BC]
+\defineextensiblefiller [rightharpoondownfill] ["21C1]
+\defineextensiblefiller [rightharpoonupfill] ["21C0]
+\defineextensiblefiller [rightoverleftarrowfill] ["21C4]
+\defineextensiblefiller [leftrightharpoonsfill] ["21CB]
+\defineextensiblefiller [rightleftharpoonsfill] ["21CC]
+\defineextensiblefiller [triplerelfill] ["2261]
+\defineextensiblefiller [leftrightarrowfill] ["27F7]
+\defineextensiblefiller [Leftarrowfill] ["27F8]
+\defineextensiblefiller [Rightarrowfill] ["27F9]
+\defineextensiblefiller [Leftrightarrowfill] ["27FA]
+
+%D Extra:
+
+\unexpanded\edef\singlebond{\mathematics{\mathsurround\zeropoint\char\number"002D}}
+\unexpanded\edef\doublebond{\mathematics{\mathsurround\zeropoint\char\number"003D}}
+\unexpanded\edef\triplebond{\mathematics{\mathsurround\zeropoint\char\number"2261}}
+
+% \mathchardef\singlebond"002D
+% \mathchardef\doublebond"003D
+% \mathchardef\triplebond"2261
+
+\protect \endinput
+
+% \mathrel{\mathop{\hbox to \dimen0{\hss\copy4\hss}}
+% \limits\normalsuperscript{\box0}\normalsubscript{\box2}}%
+
+% $\Uoverdelimiter \defaultmathfamily "2194 {xxxx}$
+% $\Udelimiterover \defaultmathfamily "2194 {xxxx}$
+% $\Uunderdelimiter\defaultmathfamily "2194 {xxxx}$
+% $\Udelimiterunder\defaultmathfamily "2194 {xxxx}$
+% $\Udelimiterover \defaultmathfamily "219A {\Udelimiterunder \defaultmathfamily "219B {xxxx}}$
+
+% $a \mathrel{\mathop{\filledhboxr{mid}}}\limits^{\filledhboxg{\strut top}}_{\filledhboxb{\strut bottom}} b$
diff --git a/Master/texmf-dist/tex/context/base/math-tag.lua b/Master/texmf-dist/tex/context/base/math-tag.lua
index 0ac5b08975e..ab5902dd416 100644
--- a/Master/texmf-dist/tex/context/base/math-tag.lua
+++ b/Master/texmf-dist/tex/context/base/math-tag.lua
@@ -6,13 +6,13 @@ if not modules then modules = { } end modules ['math-tag'] = {
license = "see context related readme files"
}
+-- use lpeg matchers
+
local find, match = string.find, string.match
local insert, remove = table.insert, table.remove
local attributes, nodes = attributes, nodes
-local get_attribute = nodes.getattribute
-local set_attribute = nodes.setattribute
local set_attributes = nodes.setattributes
local traverse_nodes = node.traverse
@@ -64,19 +64,19 @@ local function processsubsup(start)
local nucleus, sup, sub = start.nucleus, start.sup, start.sub
if sub then
if sup then
- set_attribute(start,a_tagged,start_tagged("msubsup"))
+ start[a_tagged] = start_tagged("msubsup")
process(nucleus)
process(sub)
process(sup)
stop_tagged()
else
- set_attribute(start,a_tagged,start_tagged("msub"))
+ start[a_tagged] = start_tagged("msub")
process(nucleus)
process(sub)
stop_tagged()
end
elseif sup then
- set_attribute(start,a_tagged,start_tagged("msup"))
+ start[a_tagged] = start_tagged("msup")
process(nucleus)
process(sup)
stop_tagged()
@@ -97,7 +97,7 @@ process = function(start) -- we cannot use the processor as we have no finalizer
if id == math_char_code then
local char = start.char
-- check for code
- local a = get_attribute(start,a_mathcategory)
+ local a = start[a_mathcategory]
if a then
a = { detail = a }
end
@@ -119,23 +119,22 @@ process = function(start) -- we cannot use the processor as we have no finalizer
else
tag = "mo"
end
--- print(start,a,tag)
- set_attribute(start,a_tagged,start_tagged(tag,a))
+ start[a_tagged] = start_tagged(tag,a)
stop_tagged()
break -- okay?
elseif id == math_textchar_code then
-- check for code
- local a = get_attribute(start,a_mathcategory)
+ local a = start[a_mathcategory]
if a then
- set_attribute(start,a_tagged,start_tagged("ms"),{ detail = a })
+ start[a_tagged] = start_tagged("ms",{ detail = a })
else
- set_attribute(start,a_tagged,start_tagged("ms"))
+ start[a_tagged] = start_tagged("ms")
end
stop_tagged()
break
elseif id == math_delim_code then
-- check for code
- set_attribute(start,a_tagged,start_tagged("mo"))
+ start[a_tagged] = start_tagged("mo")
stop_tagged()
break
elseif id == math_style_code then
@@ -144,13 +143,13 @@ process = function(start) -- we cannot use the processor as we have no finalizer
processsubsup(start)
elseif id == math_box_code or id == hlist_code or id == vlist_code then
-- keep an eye on math_box_code and see what ends up in there
- local attr = get_attribute(start,a_tagged)
+ local attr = start[a_tagged]
local last = attr and taglist[attr]
if last and find(last[#last],"formulacaption[:%-]") then
-- leave alone, will nicely move to the outer level
else
local text = start_tagged("mtext")
- set_attribute(start,a_tagged,text)
+ start[a_tagged] = text
local list = start.list
if not list then
-- empty list
@@ -168,7 +167,7 @@ process = function(start) -- we cannot use the processor as we have no finalizer
local cache = { } -- we can have nested unboxed mess so best local to runner
for n in traverse_nodes(list) do
local id = n.id
- local aa = get_attribute(n,a_tagged)
+ local aa = n[a_tagged]
if aa then
local ac = cache[aa]
if not ac then
@@ -186,9 +185,9 @@ process = function(start) -- we cannot use the processor as we have no finalizer
end
cache[aa] = ac
end
- set_attribute(n,a_tagged,ac)
+ n[a_tagged] = ac
else
- set_attribute(n,a_tagged,text)
+ n[a_tagged] = text
end
if id == hlist_code or id == vlist_code then
runner(n.list)
@@ -202,23 +201,23 @@ process = function(start) -- we cannot use the processor as we have no finalizer
elseif id == math_sub_code then
local list = start.list
if list then
- local attr = get_attribute(start,a_tagged)
+ local attr = start[a_tagged]
local last = attr and taglist[attr]
local action = last and match(last[#last],"maction:(.-)%-")
if action and action ~= "" then
if actionstack[#actionstack] == action then
- set_attribute(start,a_tagged,start_tagged("mrow"))
+ start[a_tagged] = start_tagged("mrow")
process(list)
stop_tagged()
else
insert(actionstack,action)
- set_attribute(start,a_tagged,start_tagged("mrow",{ detail = action }))
+ start[a_tagged] = start_tagged("mrow",{ detail = action })
process(list)
stop_tagged()
remove(actionstack)
end
else
- set_attribute(start,a_tagged,start_tagged("mrow"))
+ start[a_tagged] = start_tagged("mrow")
process(list)
stop_tagged()
end
@@ -226,16 +225,16 @@ process = function(start) -- we cannot use the processor as we have no finalizer
elseif id == math_fraction_code then
local num, denom, left, right = start.num, start.denom, start.left, start.right
if left then
- set_attribute(left,a_tagged,start_tagged("mo"))
+ left[a_tagged] = start_tagged("mo")
process(left)
stop_tagged()
end
- set_attribute(start,a_tagged,start_tagged("mfrac"))
+ start[a_tagged] = start_tagged("mfrac")
process(num)
process(denom)
stop_tagged()
if right then
- set_attribute(right,a_tagged,start_tagged("mo"))
+ right[a_tagged] = start_tagged("mo")
process(right)
stop_tagged()
end
@@ -258,22 +257,22 @@ process = function(start) -- we cannot use the processor as we have no finalizer
local subtype = start.subtype
if subtype == 1 then
-- left
- set_attribute(start,a_tagged,start_tagged("mfenced"))
+ start[a_tagged] = start_tagged("mfenced")
if delim then
- set_attribute(start,a_tagged,start_tagged("mleft"))
+ start[a_tagged] = start_tagged("mleft")
process(delim)
stop_tagged()
end
elseif subtype == 2 then
-- middle
if delim then
- set_attribute(start,a_tagged,start_tagged("mmiddle"))
+ start[a_tagged] = start_tagged("mmiddle")
process(delim)
stop_tagged()
end
elseif subtype == 3 then
if delim then
- set_attribute(start,a_tagged,start_tagged("mright"))
+ start[a_tagged] = start_tagged("mright")
process(delim)
stop_tagged()
end
@@ -289,12 +288,12 @@ process = function(start) -- we cannot use the processor as we have no finalizer
stop_tagged()
end
if degree then -- not good enough, can be empty mlist
- set_attribute(start,a_tagged,start_tagged("mroot"))
+ start[a_tagged] = start_tagged("mroot")
processsubsup(start)
process(degree)
stop_tagged()
else
- set_attribute(start,a_tagged,start_tagged("msqrt"))
+ start[a_tagged] = start_tagged("msqrt")
processsubsup(start)
stop_tagged()
end
@@ -302,19 +301,19 @@ process = function(start) -- we cannot use the processor as we have no finalizer
local accent, bot_accent = start.accent, start.bot_accent
if bot_accent then
if accent then
- set_attribute(start,a_tagged,start_tagged("munderover",{ detail = "accent" }))
+ start[a_tagged] = start_tagged("munderover",{ detail = "accent" })
processsubsup(start)
process(bot_accent)
process(accent)
stop_tagged()
else
- set_attribute(start,a_tagged,start_tagged("munder",{ detail = "accent" }))
+ start[a_tagged] = start_tagged("munder",{ detail = "accent" })
processsubsup(start)
process(bot_accent)
stop_tagged()
end
elseif accent then
- set_attribute(start,a_tagged,start_tagged("mover",{ detail = "accent" }))
+ start[a_tagged] = start_tagged("mover",{ detail = "accent" })
processsubsup(start)
process(accent)
stop_tagged()
@@ -322,10 +321,10 @@ process = function(start) -- we cannot use the processor as we have no finalizer
processsubsup(start)
end
elseif id == glue_code then
- set_attribute(start,a_tagged,start_tagged("mspace"))
+ start[a_tagged] = start_tagged("mspace")
stop_tagged()
else
- set_attribute(start,a_tagged,start_tagged("merror", { detail = nodecodes[i] } ))
+ start[a_tagged] = start_tagged("merror", { detail = nodecodes[i] })
stop_tagged()
end
start = start.next
@@ -335,9 +334,9 @@ end
function noads.handlers.tags(head,style,penalties)
local v_math = start_tagged("math")
local v_mrow = start_tagged("mrow")
- local v_mode = get_attribute(head,a_mathmode)
- set_attribute(head,a_tagged,v_math)
- set_attribute(head,a_tagged,v_mrow)
+ local v_mode = head[a_mathmode]
+ head[a_tagged] = v_math
+ head[a_tagged] = v_mrow
tags.setattributehash(v_math,"mode",v_mode == 1 and "display" or "inline")
process(head)
stop_tagged()
diff --git a/Master/texmf-dist/tex/context/base/math-ttv.lua b/Master/texmf-dist/tex/context/base/math-ttv.lua
new file mode 100644
index 00000000000..1f644e78834
--- /dev/null
+++ b/Master/texmf-dist/tex/context/base/math-ttv.lua
@@ -0,0 +1,801 @@
+if not modules then modules = { } end modules ['math-ttv'] = {
+ version = 1.001,
+ comment = "traditional tex vectors, companion to math-vfu.lua",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files",
+ dataonly = true,
+}
+
+local vfmath = fonts.handlers.vf.math
+local setletters = vfmath.setletters
+local setdigits = vfmath.setdigits
+
+local mathencodings = fonts.encodings.math
+
+-- varphi is part of the alphabet, contrary to the other var*s'
+
+mathencodings["large-to-small"] = {
+ [0x00028] = 0x00, -- (
+ [0x00029] = 0x01, -- )
+ [0x0005B] = 0x02, -- [
+ [0x0005D] = 0x03, -- ]
+ [0x0230A] = 0x04, -- lfloor
+ [0x0230B] = 0x05, -- rfloor
+ [0x02308] = 0x06, -- lceil
+ [0x02309] = 0x07, -- rceil
+ [0x0007B] = 0x08, -- {
+ [0x0007D] = 0x09, -- }
+ [0x027E8] = 0x0A, -- <
+ [0x027E9] = 0x0B, -- >
+ [0x0007C] = 0x0C, -- |
+ -- [0x0] = 0x0D, -- lVert rVert Vert
+ -- [0x0002F] = 0x0E, -- /
+ [0x0005C] = 0x0F, -- \
+ -- [0x0] = 0x3A, -- lgroup
+ -- [0x0] = 0x3B, -- rgroup
+ -- [0x0] = 0x3C, -- arrowvert
+ -- [0x0] = 0x3D, -- Arrowvert
+ [0x02195] = 0x3F, -- updownarrow
+ -- [0x0] = 0x40, -- lmoustache
+ -- [0x0] = 0x41, -- rmoustache
+ [0x0221A] = 0x70, -- sqrt
+ [0x021D5] = 0x77, -- Updownarrow
+ [0x02191] = 0x78, -- uparrow
+ [0x02193] = 0x79, -- downarrow
+ [0x021D1] = 0x7E, -- Uparrow
+ [0x021D3] = 0x7F, -- Downarrow
+ [0x0220F] = 0x59, -- prod
+ [0x02210] = 0x61, -- coprod
+ [0x02211] = 0x58, -- sum
+ [0x0222B] = 0x5A, -- intop
+ [0x0222E] = 0x49, -- ointop
+ -- [0xFE302] = 0x62, -- widehat
+ -- [0xFE303] = 0x65, -- widetilde
+ [0x00302] = 0x62, -- widehat
+ [0x00303] = 0x65, -- widetilde
+ [0x022C0] = 0x5E, -- bigwedge
+ [0x022C1] = 0x5F, -- bigvee
+ [0x022C2] = 0x5C, -- bigcap
+ [0x022C3] = 0x5B, -- bigcup
+ [0x02044] = 0x0E, -- /
+}
+
+-- Beware: these are (in cm/lm) below the baseline due to limitations
+-- in the tfm format bu the engien (combined with the mathclass) takes
+-- care of it. If we need them in textmode, we should make them virtual
+-- and move them up but we're in no hurry with that.
+
+mathencodings["tex-ex"] = {
+ [0x0220F] = 0x51, -- prod
+ [0x02210] = 0x60, -- coprod
+ [0x02211] = 0x50, -- sum
+ [0x0222B] = 0x52, -- intop
+ [0x0222E] = 0x48, -- ointop
+ [0x022C0] = 0x56, -- bigwedge
+ [0x022C1] = 0x57, -- bigvee
+ [0x022C2] = 0x54, -- bigcap
+ [0x022C3] = 0x53, -- bigcup
+ [0x02A00] = 0x4A, -- bigodot -- fixed BJ
+ [0x02A01] = 0x4C, -- bigoplus
+ [0x02A02] = 0x4E, -- bigotimes
+ -- [0x02A03] = , -- bigudot --
+ [0x02A04] = 0x55, -- biguplus
+ [0x02A06] = 0x46, -- bigsqcup
+}
+
+-- only math stuff is needed, since we always use an lm or gyre
+-- font as main font
+
+mathencodings["tex-mr"] = {
+ [0x00393] = 0x00, -- Gamma
+ [0x00394] = 0x01, -- Delta
+ [0x00398] = 0x02, -- Theta
+ [0x0039B] = 0x03, -- Lambda
+ [0x0039E] = 0x04, -- Xi
+ [0x003A0] = 0x05, -- Pi
+ [0x003A3] = 0x06, -- Sigma
+ [0x003A5] = 0x07, -- Upsilon
+ [0x003A6] = 0x08, -- Phi
+ [0x003A8] = 0x09, -- Psi
+ [0x003A9] = 0x0A, -- Omega
+-- [0x00060] = 0x12, -- [math]grave
+-- [0x000B4] = 0x13, -- [math]acute
+-- [0x002C7] = 0x14, -- [math]check
+-- [0x002D8] = 0x15, -- [math]breve
+-- [0x000AF] = 0x16, -- [math]bar
+-- [0x00021] = 0x21, -- !
+-- [0x00028] = 0x28, -- (
+-- [0x00029] = 0x29, -- )
+-- [0x0002B] = 0x2B, -- +
+-- [0x0002F] = 0x2F, -- /
+-- [0x0003A] = 0x3A, -- :
+-- [0x02236] = 0x3A, -- colon
+-- [0x0003B] = 0x3B, -- ;
+-- [0x0003C] = 0x3C, -- <
+-- [0x0003D] = 0x3D, -- =
+-- [0x0003E] = 0x3E, -- >
+-- [0x0003F] = 0x3F, -- ?
+ [0x00391] = 0x41, -- Alpha
+ [0x00392] = 0x42, -- Beta
+ [0x02145] = 0x44,
+ [0x00395] = 0x45, -- Epsilon
+ [0x00397] = 0x48, -- Eta
+ [0x00399] = 0x49, -- Iota
+ [0x0039A] = 0x4B, -- Kappa
+ [0x0039C] = 0x4D, -- Mu
+ [0x0039D] = 0x4E, -- Nu
+ [0x0039F] = 0x4F, -- Omicron
+ [0x003A1] = 0x52, -- Rho
+ [0x003A4] = 0x54, -- Tau
+ [0x003A7] = 0x58, -- Chi
+ [0x00396] = 0x5A, -- Zeta
+-- [0x0005B] = 0x5B, -- [
+-- [0x0005D] = 0x5D, -- ]
+-- [0x0005E] = 0x5E, -- [math]hat -- the text one
+ [0x00302] = 0x5E, -- [math]hat -- the real math one
+-- [0x002D9] = 0x5F, -- [math]dot
+ [0x02146] = 0x64,
+ [0x02147] = 0x65,
+-- [0x002DC] = 0x7E, -- [math]tilde -- the text one
+ [0x00303] = 0x7E, -- [math]tilde -- the real one
+-- [0x000A8] = 0x7F, -- [math]ddot
+}
+
+mathencodings["tex-mr-missing"] = {
+ [0x02236] = 0x3A, -- colon
+}
+
+mathencodings["tex-mi"] = {
+ [0x1D6E4] = 0x00, -- Gamma
+ [0x1D6E5] = 0x01, -- Delta
+ [0x1D6E9] = 0x02, -- Theta
+ [0x1D6F3] = 0x02, -- varTheta (not present in TeX)
+ [0x1D6EC] = 0x03, -- Lambda
+ [0x1D6EF] = 0x04, -- Xi
+ [0x1D6F1] = 0x05, -- Pi
+ [0x1D6F4] = 0x06, -- Sigma
+ [0x1D6F6] = 0x07, -- Upsilon
+ [0x1D6F7] = 0x08, -- Phi
+ [0x1D6F9] = 0x09, -- Psi
+ [0x1D6FA] = 0x0A, -- Omega
+ [0x1D6FC] = 0x0B, -- alpha
+ [0x1D6FD] = 0x0C, -- beta
+ [0x1D6FE] = 0x0D, -- gamma
+ [0x1D6FF] = 0x0E, -- delta
+ [0x1D716] = 0x0F, -- epsilon TODO: 1D716
+ [0x1D701] = 0x10, -- zeta
+ [0x1D702] = 0x11, -- eta
+ [0x1D703] = 0x12, -- theta TODO: 1D703
+ [0x1D704] = 0x13, -- iota
+ [0x1D705] = 0x14, -- kappa
+ [0x1D718] = 0x14, -- varkappa, not in tex fonts
+ [0x1D706] = 0x15, -- lambda
+ [0x1D707] = 0x16, -- mu
+ [0x1D708] = 0x17, -- nu
+ [0x1D709] = 0x18, -- xi
+ [0x1D70B] = 0x19, -- pi
+ [0x1D70C] = 0x1A, -- rho
+ [0x1D70E] = 0x1B, -- sigma
+ [0x1D70F] = 0x1C, -- tau
+ [0x1D710] = 0x1D, -- upsilon
+ [0x1D719] = 0x1E, -- phi
+ [0x1D712] = 0x1F, -- chi
+ [0x1D713] = 0x20, -- psi
+ [0x1D714] = 0x21, -- omega
+ [0x1D700] = 0x22, -- varepsilon (the other way around)
+ [0x1D717] = 0x23, -- vartheta
+ [0x1D71B] = 0x24, -- varpi
+ [0x1D71A] = 0x25, -- varrho
+ [0x1D70D] = 0x26, -- varsigma
+ [0x1D711] = 0x27, -- varphi (the other way around)
+ [0x021BC] = 0x28, -- leftharpoonup
+ [0x021BD] = 0x29, -- leftharpoondown
+ [0x021C0] = 0x2A, -- rightharpoonup
+ [0x021C1] = 0x2B, -- rightharpoondown
+ [0xFE322] = 0x2C, -- lhook (hook for combining arrows)
+ [0xFE323] = 0x2D, -- rhook (hook for combining arrows)
+ [0x025B7] = 0x2E, -- triangleright : cf lmmath / BJ
+ [0x025C1] = 0x2F, -- triangleleft : cf lmmath / BJ
+ [0x022B3] = 0x2E, -- triangleright : cf lmmath this a cramped triangles / BJ / see *
+ [0x022B2] = 0x2F, -- triangleleft : cf lmmath this a cramped triangles / BJ / see *
+-- [0x00041] = 0x30, -- 0
+-- [0x00041] = 0x31, -- 1
+-- [0x00041] = 0x32, -- 2
+-- [0x00041] = 0x33, -- 3
+-- [0x00041] = 0x34, -- 4
+-- [0x00041] = 0x35, -- 5
+-- [0x00041] = 0x36, -- 6
+-- [0x00041] = 0x37, -- 7
+-- [0x00041] = 0x38, -- 8
+-- [0x00041] = 0x39, -- 9
+--~ [0x0002E] = 0x3A, -- .
+ [0x0002C] = 0x3B, -- ,
+ [0x0003C] = 0x3C, -- <
+-- [0x0002F] = 0x3D, -- /, slash, solidus
+ [0x02044] = 0x3D, -- / AM: Not sure
+ [0x0003E] = 0x3E, -- >
+ [0x022C6] = 0x3F, -- star
+ [0x02202] = 0x40, -- partial
+--
+ [0x0266D] = 0x5B, -- flat
+ [0x0266E] = 0x5C, -- natural
+ [0x0266F] = 0x5D, -- sharp
+ [0x02323] = 0x5E, -- smile
+ [0x02322] = 0x5F, -- frown
+ [0x02113] = 0x60, -- ell
+--
+ [0x1D6A4] = 0x7B, -- imath (TODO: also 0131)
+ [0x1D6A5] = 0x7C, -- jmath (TODO: also 0237)
+ [0x02118] = 0x7D, -- wp
+ [0x020D7] = 0x7E, -- vec (TODO: not sure)
+-- 0x7F, -- (no idea what that could be)
+}
+
+mathencodings["tex-it"] = {
+-- [0x1D434] = 0x41, -- A
+ [0x1D6E2] = 0x41, -- Alpha
+-- [0x1D435] = 0x42, -- B
+ [0x1D6E3] = 0x42, -- Beta
+-- [0x1D436] = 0x43, -- C
+-- [0x1D437] = 0x44, -- D
+-- [0x1D438] = 0x45, -- E
+ [0x1D6E6] = 0x45, -- Epsilon
+-- [0x1D439] = 0x46, -- F
+-- [0x1D43A] = 0x47, -- G
+-- [0x1D43B] = 0x48, -- H
+ [0x1D6E8] = 0x48, -- Eta
+-- [0x1D43C] = 0x49, -- I
+ [0x1D6EA] = 0x49, -- Iota
+-- [0x1D43D] = 0x4A, -- J
+-- [0x1D43E] = 0x4B, -- K
+ [0x1D6EB] = 0x4B, -- Kappa
+-- [0x1D43F] = 0x4C, -- L
+-- [0x1D440] = 0x4D, -- M
+ [0x1D6ED] = 0x4D, -- Mu
+-- [0x1D441] = 0x4E, -- N
+ [0x1D6EE] = 0x4E, -- Nu
+-- [0x1D442] = 0x4F, -- O
+ [0x1D6F0] = 0x4F, -- Omicron
+-- [0x1D443] = 0x50, -- P
+ [0x1D6F2] = 0x50, -- Rho
+-- [0x1D444] = 0x51, -- Q
+-- [0x1D445] = 0x52, -- R
+-- [0x1D446] = 0x53, -- S
+-- [0x1D447] = 0x54, -- T
+ [0x1D6F5] = 0x54, -- Tau
+-- [0x1D448] = 0x55, -- U
+-- [0x1D449] = 0x56, -- V
+-- [0x1D44A] = 0x57, -- W
+-- [0x1D44B] = 0x58, -- X
+ [0x1D6F8] = 0x58, -- Chi
+-- [0x1D44C] = 0x59, -- Y
+-- [0x1D44D] = 0x5A, -- Z
+--
+-- [0x1D44E] = 0x61, -- a
+-- [0x1D44F] = 0x62, -- b
+-- [0x1D450] = 0x63, -- c
+-- [0x1D451] = 0x64, -- d
+-- [0x1D452] = 0x65, -- e
+-- [0x1D453] = 0x66, -- f
+-- [0x1D454] = 0x67, -- g
+-- [0x1D455] = 0x68, -- h
+ [0x0210E] = 0x68, -- Planck constant (h)
+-- [0x1D456] = 0x69, -- i
+-- [0x1D457] = 0x6A, -- j
+-- [0x1D458] = 0x6B, -- k
+-- [0x1D459] = 0x6C, -- l
+-- [0x1D45A] = 0x6D, -- m
+-- [0x1D45B] = 0x6E, -- n
+-- [0x1D45C] = 0x6F, -- o
+ [0x1D70A] = 0x6F, -- omicron
+-- [0x1D45D] = 0x70, -- p
+-- [0x1D45E] = 0x71, -- q
+-- [0x1D45F] = 0x72, -- r
+-- [0x1D460] = 0x73, -- s
+-- [0x1D461] = 0x74, -- t
+-- [0x1D462] = 0x75, -- u
+-- [0x1D463] = 0x76, -- v
+-- [0x1D464] = 0x77, -- w
+-- [0x1D465] = 0x78, -- x
+-- [0x1D466] = 0x79, -- y
+-- [0x1D467] = 0x7A, -- z
+}
+
+mathencodings["tex-ss"] = { }
+mathencodings["tex-tt"] = { }
+mathencodings["tex-bf"] = { }
+mathencodings["tex-bi"] = { }
+mathencodings["tex-fraktur"] = { }
+mathencodings["tex-fraktur-bold"] = { }
+
+mathencodings["tex-sy"] = {
+ [0x0002D] = 0x00, -- -
+ [0x02212] = 0x00, -- -
+-- [0x02201] = 0x00, -- complement
+-- [0x02206] = 0x00, -- increment
+-- [0x02204] = 0x00, -- not exists
+-- [0x000B7] = 0x01, -- cdot
+ [0x022C5] = 0x01, -- cdot
+ [0x000D7] = 0x02, -- times
+ [0x0002A] = 0x03, -- *
+ [0x02217] = 0x03, -- *
+ [0x000F7] = 0x04, -- div
+ [0x022C4] = 0x05, -- diamond
+ [0x000B1] = 0x06, -- pm
+ [0x02213] = 0x07, -- mp
+ [0x02295] = 0x08, -- oplus
+ [0x02296] = 0x09, -- ominus
+ [0x02297] = 0x0A, -- otimes
+ [0x02298] = 0x0B, -- oslash
+ [0x02299] = 0x0C, -- odot
+ [0x025EF] = 0x0D, -- bigcirc, Orb (either 25EF or 25CB) -- todo
+ [0x02218] = 0x0E, -- circ
+ [0x02219] = 0x0F, -- bullet
+ [0x02022] = 0x0F, -- bullet
+ [0x0224D] = 0x10, -- asymp
+ [0x02261] = 0x11, -- equiv
+ [0x02286] = 0x12, -- subseteq
+ [0x02287] = 0x13, -- supseteq
+ [0x02264] = 0x14, -- leq
+ [0x02265] = 0x15, -- geq
+ [0x02AAF] = 0x16, -- preceq
+-- [0x0227C] = 0x16, -- preceq, AM:No see 2AAF
+ [0x02AB0] = 0x17, -- succeq
+-- [0x0227D] = 0x17, -- succeq, AM:No see 2AB0
+ [0x0223C] = 0x18, -- sim
+ [0x02248] = 0x19, -- approx
+ [0x02282] = 0x1A, -- subset
+ [0x02283] = 0x1B, -- supset
+ [0x0226A] = 0x1C, -- ll
+ [0x0226B] = 0x1D, -- gg
+ [0x0227A] = 0x1E, -- prec
+ [0x0227B] = 0x1F, -- succ
+ [0x02190] = 0x20, -- leftarrow
+ [0x02192] = 0x21, -- rightarrow
+--~ [0xFE190] = 0x20, -- leftarrow
+--~ [0xFE192] = 0x21, -- rightarrow
+ [0x02191] = 0x22, -- uparrow
+ [0x02193] = 0x23, -- downarrow
+ [0x02194] = 0x24, -- leftrightarrow
+ [0x02197] = 0x25, -- nearrow
+ [0x02198] = 0x26, -- searrow
+ [0x02243] = 0x27, -- simeq
+ [0x021D0] = 0x28, -- Leftarrow
+ [0x021D2] = 0x29, -- Rightarrow
+ [0x021D1] = 0x2A, -- Uparrow
+ [0x021D3] = 0x2B, -- Downarrow
+ [0x021D4] = 0x2C, -- Leftrightarrow
+ [0x02196] = 0x2D, -- nwarrow
+ [0x02199] = 0x2E, -- swarrow
+ [0x0221D] = 0x2F, -- propto
+ [0x02032] = 0x30, -- prime
+ [0x0221E] = 0x31, -- infty
+ [0x02208] = 0x32, -- in
+ [0x0220B] = 0x33, -- ni
+ [0x025B3] = 0x34, -- triangle, bigtriangleup
+ [0x025BD] = 0x35, -- bigtriangledown
+ [0x00338] = 0x36, -- not
+-- 0x37, -- (beginning of arrow)
+ [0x02200] = 0x38, -- forall
+ [0x02203] = 0x39, -- exists
+ [0x000AC] = 0x3A, -- neg, lnot
+ [0x02205] = 0x3B, -- empty set
+ [0x0211C] = 0x3C, -- Re
+ [0x02111] = 0x3D, -- Im
+ [0x022A4] = 0x3E, -- top
+ [0x022A5] = 0x3F, -- bot, perp
+ [0x02135] = 0x40, -- aleph
+ [0x1D49C] = 0x41, -- script A
+ [0x0212C] = 0x42, -- script B
+ [0x1D49E] = 0x43, -- script C
+ [0x1D49F] = 0x44, -- script D
+ [0x02130] = 0x45, -- script E
+ [0x02131] = 0x46, -- script F
+ [0x1D4A2] = 0x47, -- script G
+ [0x0210B] = 0x48, -- script H
+ [0x02110] = 0x49, -- script I
+ [0x1D4A5] = 0x4A, -- script J
+ [0x1D4A6] = 0x4B, -- script K
+ [0x02112] = 0x4C, -- script L
+ [0x02133] = 0x4D, -- script M
+ [0x1D4A9] = 0x4E, -- script N
+ [0x1D4AA] = 0x4F, -- script O
+ [0x1D4AB] = 0x50, -- script P
+ [0x1D4AC] = 0x51, -- script Q
+ [0x0211B] = 0x52, -- script R
+ [0x1D4AE] = 0x53, -- script S
+ [0x1D4AF] = 0x54, -- script T
+ [0x1D4B0] = 0x55, -- script U
+ [0x1D4B1] = 0x56, -- script V
+ [0x1D4B2] = 0x57, -- script W
+ [0x1D4B3] = 0x58, -- script X
+ [0x1D4B4] = 0x59, -- script Y
+ [0x1D4B5] = 0x5A, -- script Z
+ [0x0222A] = 0x5B, -- cup
+ [0x02229] = 0x5C, -- cap
+ [0x0228E] = 0x5D, -- uplus
+ [0x02227] = 0x5E, -- wedge, land
+ [0x02228] = 0x5F, -- vee, lor
+ [0x022A2] = 0x60, -- vdash
+ [0x022A3] = 0x61, -- dashv
+ [0x0230A] = 0x62, -- lfloor
+ [0x0230B] = 0x63, -- rfloor
+ [0x02308] = 0x64, -- lceil
+ [0x02309] = 0x65, -- rceil
+ [0x0007B] = 0x66, -- {, lbrace
+ [0x0007D] = 0x67, -- }, rbrace
+ [0x027E8] = 0x68, -- <, langle
+ [0x027E9] = 0x69, -- >, rangle
+ [0x0007C] = 0x6A, -- |, mid, lvert, rvert
+ [0x02225] = 0x6B, -- parallel
+ -- [0x0 ] = 0x00, -- Vert, lVert, rVert, arrowvert, Arrowvert
+ [0x02195] = 0x6C, -- updownarrow
+ [0x021D5] = 0x6D, -- Updownarrow
+ [0x0005C] = 0x6E, -- \, backslash, setminus
+ [0x02216] = 0x6E, -- setminus
+ [0x02240] = 0x6F, -- wr
+ [0x0221A] = 0x70, -- sqrt. AM: Check surd??
+ [0x02A3F] = 0x71, -- amalg
+ [0x1D6FB] = 0x72, -- nabla
+-- [0x0222B] = 0x73, -- smallint (TODO: what about intop?)
+ [0x02294] = 0x74, -- sqcup
+ [0x02293] = 0x75, -- sqcap
+ [0x02291] = 0x76, -- sqsubseteq
+ [0x02292] = 0x77, -- sqsupseteq
+ [0x000A7] = 0x78, -- S
+ [0x02020] = 0x79, -- dagger, dag
+ [0x02021] = 0x7A, -- ddagger, ddag
+ [0x000B6] = 0x7B, -- P
+ [0x02663] = 0x7C, -- clubsuit
+ [0x02662] = 0x7D, -- diamondsuit
+ [0x02661] = 0x7E, -- heartsuit
+ [0x02660] = 0x7F, -- spadesuit
+ [0xFE321] = 0x37, -- mapstochar
+
+ [0xFE325] = 0x30, -- prime 0x02032
+}
+
+-- The names in masm10.enc can be trusted best and are shown in the first
+-- column, while in the second column we show the tex/ams names. As usual
+-- it costs hours to figure out such a table.
+
+mathencodings["tex-ma"] = {
+ [0x022A1] = 0x00, -- squaredot \boxdot
+ [0x0229E] = 0x01, -- squareplus \boxplus
+ [0x022A0] = 0x02, -- squaremultiply \boxtimes
+ [0x025A1] = 0x03, -- square \square \Box
+ [0x025A0] = 0x04, -- squaresolid \blacksquare
+ [0x025AA] = 0x05, -- squaresmallsolid \centerdot
+ [0x022C4] = 0x06, -- diamond \Diamond \lozenge
+ [0x02666] = 0x07, -- diamondsolid \blacklozenge
+ [0x021BB] = 0x08, -- clockwise \circlearrowright
+ [0x021BA] = 0x09, -- anticlockwise \circlearrowleft
+ [0x021CC] = 0x0A, -- harpoonleftright \rightleftharpoons
+ [0x021CB] = 0x0B, -- harpoonrightleft \leftrightharpoons
+ [0x0229F] = 0x0C, -- squareminus \boxminus
+ [0x022A9] = 0x0D, -- forces \Vdash
+ [0x022AA] = 0x0E, -- forcesbar \Vvdash
+ [0x022A8] = 0x0F, -- satisfies \vDash
+ [0x021A0] = 0x10, -- dblarrowheadright \twoheadrightarrow
+ [0x0219E] = 0x11, -- dblarrowheadleft \twoheadleftarrow
+ [0x021C7] = 0x12, -- dblarrowleft \leftleftarrows
+ [0x021C9] = 0x13, -- dblarrowright \rightrightarrows
+ [0x021C8] = 0x14, -- dblarrowup \upuparrows
+ [0x021CA] = 0x15, -- dblarrowdwn \downdownarrows
+ [0x021BE] = 0x16, -- harpoonupright \upharpoonright \restriction
+ [0x021C2] = 0x17, -- harpoondownright \downharpoonright
+ [0x021BF] = 0x18, -- harpoonupleft \upharpoonleft
+ [0x021C3] = 0x19, -- harpoondownleft \downharpoonleft
+ [0x021A3] = 0x1A, -- arrowtailright \rightarrowtail
+ [0x021A2] = 0x1B, -- arrowtailleft \leftarrowtail
+ [0x021C6] = 0x1C, -- arrowparrleftright \leftrightarrows
+-- [0x021C5] = 0x00, -- \updownarrows (missing in lm)
+ [0x021C4] = 0x1D, -- arrowparrrightleft \rightleftarrows
+ [0x021B0] = 0x1E, -- shiftleft \Lsh
+ [0x021B1] = 0x1F, -- shiftright \Rsh
+ [0x021DD] = 0x20, -- squiggleright \leadsto \rightsquigarrow
+ [0x021AD] = 0x21, -- squiggleleftright \leftrightsquigarrow
+ [0x021AB] = 0x22, -- curlyleft \looparrowleft
+ [0x021AC] = 0x23, -- curlyright \looparrowright
+ [0x02257] = 0x24, -- circleequal \circeq
+ [0x0227F] = 0x25, -- followsorequal \succsim
+ [0x02273] = 0x26, -- greaterorsimilar \gtrsim
+ [0x02A86] = 0x27, -- greaterorapproxeql \gtrapprox
+ [0x022B8] = 0x28, -- multimap \multimap
+ [0x02234] = 0x29, -- therefore \therefore
+ [0x02235] = 0x2A, -- because \because
+ [0x02251] = 0x2B, -- equalsdots \Doteq \doteqdot
+ [0x0225C] = 0x2C, -- defines \triangleq
+ [0x0227E] = 0x2D, -- precedesorequal \precsim
+ [0x02272] = 0x2E, -- lessorsimilar \lesssim
+ [0x02A85] = 0x2F, -- lessorapproxeql \lessapprox
+ [0x02A95] = 0x30, -- equalorless \eqslantless
+ [0x02A96] = 0x31, -- equalorgreater \eqslantgtr
+ [0x022DE] = 0x32, -- equalorprecedes \curlyeqprec
+ [0x022DF] = 0x33, -- equalorfollows \curlyeqsucc
+ [0x0227C] = 0x34, -- precedesorcurly \preccurlyeq
+ [0x02266] = 0x35, -- lessdblequal \leqq
+ [0x02A7D] = 0x36, -- lessorequalslant \leqslant
+ [0x02276] = 0x37, -- lessorgreater \lessgtr
+ [0x02035] = 0x38, -- primereverse \backprime
+ -- [0x0] = 0x39, -- axisshort \dabar
+ [0x02253] = 0x3A, -- equaldotrightleft \risingdotseq
+ [0x02252] = 0x3B, -- equaldotleftright \fallingdotseq
+ [0x0227D] = 0x3C, -- followsorcurly \succcurlyeq
+ [0x02267] = 0x3D, -- greaterdblequal \geqq
+ [0x02A7E] = 0x3E, -- greaterorequalslant \geqslant
+ [0x02277] = 0x3F, -- greaterorless \gtrless
+ [0x0228F] = 0x40, -- squareimage \sqsubset
+ [0x02290] = 0x41, -- squareoriginal \sqsupset
+ -- wrong: see **
+ -- [0x022B3] = 0x42, -- triangleright \rhd \vartriangleright
+ -- [0x022B2] = 0x43, -- triangleleft \lhd \vartriangleleft
+ -- cf lm
+ [0x022B5] = 0x44, -- trianglerightequal \unrhd \trianglerighteq
+ [0x022B4] = 0x45, -- triangleleftequal \unlhd \trianglelefteq
+ --
+ [0x02605] = 0x46, -- star \bigstar
+ [0x0226C] = 0x47, -- between \between
+ [0x025BC] = 0x48, -- triangledownsld \blacktriangledown
+ [0x025B6] = 0x49, -- trianglerightsld \blacktriangleright
+ [0x025C0] = 0x4A, -- triangleleftsld \blacktriangleleft
+ -- [0x0] = 0x4B, -- arrowaxisright
+ -- [0x0] = 0x4C, -- arrowaxisleft
+ [0x025B2] = 0x4D, -- triangle \triangleup \vartriangle
+ [0x025B2] = 0x4E, -- trianglesolid \blacktriangle
+ [0x025BD] = 0x4F, -- triangleinv \triangledown
+ [0x02256] = 0x50, -- ringinequal \eqcirc
+ [0x022DA] = 0x51, -- lessequalgreater \lesseqgtr
+ [0x022DB] = 0x52, -- greaterlessequal \gtreqless
+ [0x02A8B] = 0x53, -- lessdbleqlgreater \lesseqqgtr
+ [0x02A8C] = 0x54, -- greaterdbleqlless \gtreqqless
+ [0x000A5] = 0x55, -- Yen \yen
+ [0x021DB] = 0x56, -- arrowtripleright \Rrightarrow
+ [0x021DA] = 0x57, -- arrowtripleleft \Lleftarrow
+ [0x02713] = 0x58, -- check \checkmark
+ [0x022BB] = 0x59, -- orunderscore \veebar
+ [0x022BC] = 0x5A, -- nand \barwedge
+ [0x02306] = 0x5B, -- perpcorrespond \doublebarwedge
+ [0x02220] = 0x5C, -- angle \angle
+ [0x02221] = 0x5D, -- measuredangle \measuredangle
+ [0x02222] = 0x5E, -- sphericalangle \sphericalangle
+ -- [0x0] = 0x5F, -- proportional \varpropto
+ -- [0x0] = 0x60, -- smile \smallsmile
+ -- [0x0] = 0x61, -- frown \smallfrown
+ [0x022D0] = 0x62, -- subsetdbl \Subset
+ [0x022D1] = 0x63, -- supersetdbl \Supset
+ [0x022D3] = 0x64, -- uniondbl \doublecup \Cup
+ [0x022D2] = 0x65, -- intersectiondbl \doublecap \Cap
+ [0x022CF] = 0x66, -- uprise \curlywedge
+ [0x022CE] = 0x67, -- downfall \curlyvee
+ [0x022CB] = 0x68, -- multiopenleft \leftthreetimes
+ [0x022CC] = 0x69, -- multiopenright \rightthreetimes
+ [0x02AC5] = 0x6A, -- subsetdblequal \subseteqq
+ [0x02AC6] = 0x6B, -- supersetdblequal \supseteqq
+ [0x0224F] = 0x6C, -- difference \bumpeq
+ [0x0224E] = 0x6D, -- geomequivalent \Bumpeq
+ [0x022D8] = 0x6E, -- muchless \lll \llless
+ [0x022D9] = 0x6F, -- muchgreater \ggg \gggtr
+ [0x0231C] = 0x70, -- rightanglenw \ulcorner
+ [0x0231D] = 0x71, -- rightanglene \urcorner
+ [0x024C7] = 0x72, -- circleR \circledR
+ [0x024C8] = 0x73, -- circleS \circledS
+ [0x022D4] = 0x74, -- fork \pitchfork
+ [0x02214] = 0x75, -- dotplus \dotplus
+ [0x0223D] = 0x76, -- revsimilar \backsim
+ [0x022CD] = 0x77, -- revasymptequal \backsimeq -- AM: Check this! I mapped it to simeq.
+ [0x0231E] = 0x78, -- rightanglesw \llcorner
+ [0x0231F] = 0x79, -- rightanglese \lrcorner
+ [0x02720] = 0x7A, -- maltesecross \maltese
+ [0x02201] = 0x7B, -- complement \complement
+ [0x022BA] = 0x7C, -- intercal \intercal
+ [0x0229A] = 0x7D, -- circlering \circledcirc
+ [0x0229B] = 0x7E, -- circleasterisk \circledast
+ [0x0229D] = 0x7F, -- circleminus \circleddash
+}
+
+mathencodings["tex-mb"] = {
+ -- [0x0] = 0x00, -- lessornotequal \lvertneqq
+ -- [0x0] = 0x01, -- greaterornotequal \gvertneqq
+ [0x02270] = 0x02, -- notlessequal \nleq
+ [0x02271] = 0x03, -- notgreaterequal \ngeq
+ [0x0226E] = 0x04, -- notless \nless
+ [0x0226F] = 0x05, -- notgreater \ngtr
+ [0x02280] = 0x06, -- notprecedes \nprec
+ [0x02281] = 0x07, -- notfollows \nsucc
+ [0x02268] = 0x08, -- lessornotdbleql \lneqq
+ [0x02269] = 0x09, -- greaterornotdbleql \gneqq
+ -- [0x0] = 0x0A, -- notlessorslnteql \nleqslant
+ -- [0x0] = 0x0B, -- notgreaterorslnteql \ngeqslant
+ [0x02A87] = 0x0C, -- lessnotequal \lneq
+ [0x02A88] = 0x0D, -- greaternotequal \gneq
+ -- [0x0] = 0x0E, -- notprecedesoreql \npreceq
+ -- [0x0] = 0x0F, -- notfollowsoreql \nsucceq
+ [0x022E8] = 0x10, -- precedeornoteqvlnt \precnsim
+ [0x022E9] = 0x11, -- followornoteqvlnt \succnsim
+ [0x022E6] = 0x12, -- lessornotsimilar \lnsim
+ [0x022E7] = 0x13, -- greaterornotsimilar \gnsim
+ -- [0x0] = 0x14, -- notlessdblequal \nleqq
+ -- [0x0] = 0x15, -- notgreaterdblequal \ngeqq
+ [0x02AB5] = 0x16, -- precedenotslnteql \precneqq
+ [0x02AB6] = 0x17, -- follownotslnteql \succneqq
+ [0x02AB9] = 0x18, -- precedenotdbleqv \precnapprox
+ [0x02ABA] = 0x19, -- follownotdbleqv \succnapprox
+ [0x02A89] = 0x1A, -- lessnotdblequal \lnapprox
+ [0x02A8A] = 0x1B, -- greaternotdblequal \gnapprox
+ [0x02241] = 0x1C, -- notsimilar \nsim
+ [0x02247] = 0x1D, -- notapproxequal \ncong
+ -- [0x0] = 0x1E, -- upslope \diagup
+ -- [0x0] = 0x1F, -- downslope \diagdown
+ -- [0x0] = 0x20, -- notsubsetoreql \varsubsetneq
+ -- [0x0] = 0x21, -- notsupersetoreql \varsupsetneq
+ -- [0x0] = 0x22, -- notsubsetordbleql \nsubseteqq
+ -- [0x0] = 0x23, -- notsupersetordbleql \nsupseteqq
+ [0x02ACB] = 0x24, -- subsetornotdbleql \subsetneqq
+ [0x02ACC] = 0x25, -- supersetornotdbleql \supsetneqq
+ -- [0x0] = 0x26, -- subsetornoteql \varsubsetneqq
+ -- [0x0] = 0x27, -- supersetornoteql \varsupsetneqq
+ [0x0228A] = 0x28, -- subsetnoteql \subsetneq
+ [0x0228B] = 0x29, -- supersetnoteql \supsetneq
+ [0x02288] = 0x2A, -- notsubseteql \nsubseteq
+ [0x02289] = 0x2B, -- notsuperseteql \nsupseteq
+ [0x02226] = 0x2C, -- notparallel \nparallel
+ [0x02224] = 0x2D, -- notbar \nmid \ndivides
+ -- [0x0] = 0x2E, -- notshortbar \nshortmid
+ -- [0x0] = 0x2F, -- notshortparallel \nshortparallel
+ [0x022AC] = 0x30, -- notturnstile \nvdash
+ [0x022AE] = 0x31, -- notforces \nVdash
+ [0x022AD] = 0x32, -- notsatisfies \nvDash
+ [0x022AF] = 0x33, -- notforcesextra \nVDash
+ [0x022ED] = 0x34, -- nottriangeqlright \ntrianglerighteq
+ [0x022EC] = 0x35, -- nottriangeqlleft \ntrianglelefteq
+ [0x022EA] = 0x36, -- nottriangleleft \ntriangleleft
+ [0x022EB] = 0x37, -- nottriangleright \ntriangleright
+ [0x0219A] = 0x38, -- notarrowleft \nleftarrow
+ [0x0219B] = 0x39, -- notarrowright \nrightarrow
+ [0x021CD] = 0x3A, -- notdblarrowleft \nLeftarrow
+ [0x021CF] = 0x3B, -- notdblarrowright \nRightarrow
+ [0x021CE] = 0x3C, -- notdblarrowboth \nLeftrightarrow
+ [0x021AE] = 0x3D, -- notarrowboth \nleftrightarrow
+ [0x022C7] = 0x3E, -- dividemultiply \divideontimes
+ [0x02300] = 0x3F, -- diametersign \varnothing
+ [0x02204] = 0x40, -- notexistential \nexists
+ [0x1D538] = 0x41, -- A (blackboard A)
+ [0x1D539] = 0x42, -- B
+ [0x02102] = 0x43, -- C
+ [0x1D53B] = 0x44, -- D
+ [0x1D53C] = 0x45, -- E
+ [0x1D53D] = 0x46, -- F
+ [0x1D53E] = 0x47, -- G
+ [0x0210D] = 0x48, -- H
+ [0x1D540] = 0x49, -- I
+ [0x1D541] = 0x4A, -- J
+ [0x1D542] = 0x4B, -- K
+ [0x1D543] = 0x4C, -- L
+ [0x1D544] = 0x4D, -- M
+ [0x02115] = 0x4E, -- N
+ [0x1D546] = 0x4F, -- O
+ [0x02119] = 0x50, -- P
+ [0x0211A] = 0x51, -- Q
+ [0x0211D] = 0x52, -- R
+ [0x1D54A] = 0x53, -- S
+ [0x1D54B] = 0x54, -- T
+ [0x1D54C] = 0x55, -- U
+ [0x1D54D] = 0x56, -- V
+ [0x1D54E] = 0x57, -- W
+ [0x1D54F] = 0x58, -- X
+ [0x1D550] = 0x59, -- Y
+ [0x02124] = 0x5A, -- Z (blackboard Z)
+ [0x02132] = 0x60, -- finv \Finv
+ [0x02141] = 0x61, -- fmir \Game
+ -- [0x0] = 0x62, tildewide
+ -- [0x0] = 0x63, tildewider
+ -- [0x0] = 0x64, Finv
+ -- [0x0] = 0x65, Gmir
+ [0x02127] = 0x66, -- Omegainv \mho
+ [0x000F0] = 0x67, -- eth \eth
+ [0x02242] = 0x68, -- equalorsimilar \eqsim
+ [0x02136] = 0x69, -- beth \beth
+ [0x02137] = 0x6A, -- gimel \gimel
+ [0x02138] = 0x6B, -- daleth \daleth
+ [0x022D6] = 0x6C, -- lessdot \lessdot
+ [0x022D7] = 0x6D, -- greaterdot \gtrdot
+ [0x022C9] = 0x6E, -- multicloseleft \ltimes
+ [0x022CA] = 0x6F, -- multicloseright \rtimes
+ -- [0x0] = 0x70, -- barshort \shortmid
+ -- [0x0] = 0x71, -- parallelshort \shortparallel
+ -- [0x02216] = 0x72, -- integerdivide \smallsetminus (2216 already part of tex-sy
+ -- [0x0] = 0x73, -- similar \thicksim
+ -- [0x0] = 0x74, -- approxequal \thickapprox
+ [0x0224A] = 0x75, -- approxorequal \approxeq
+ [0x02AB8] = 0x76, -- followsorequal \succapprox
+ [0x02AB7] = 0x77, -- precedesorequal \precapprox
+ [0x021B6] = 0x78, -- archleftdown \curvearrowleft
+ [0x021B7] = 0x79, -- archrightdown \curvearrowright
+ [0x003DC] = 0x7A, -- Digamma \digamma
+ [0x003F0] = 0x7B, -- kappa \varkappa
+ [0x1D55C] = 0x7C, -- k \Bbbk (blackboard k)
+ [0x0210F] = 0x7D, -- planckover2pi \hslash % 0x7D
+ [0x00127] = 0x7E, -- planckover2pi1 \hbar % 0x7E
+ [0x003F6] = 0x7F, -- epsiloninv \backepsilon
+}
+
+mathencodings["tex-mc"] = {
+ -- this file has no tfm so it gets mapped in the private space
+ [0xFE324] = "mapsfromchar",
+}
+
+mathencodings["tex-fraktur"] = {
+-- [0x1D504] = 0x41, -- A (fraktur A)
+-- [0x1D505] = 0x42, -- B
+ [0x0212D] = 0x43, -- C
+-- [0x1D507] = 0x44, -- D
+-- [0x1D508] = 0x45, -- E
+-- [0x1D509] = 0x46, -- F
+-- [0x1D50A] = 0x47, -- G
+ [0x0210C] = 0x48, -- H
+ [0x02111] = 0x49, -- I
+-- [0x1D50D] = 0x4A, -- J
+-- [0x1D50E] = 0x4B, -- K
+-- [0x1D50F] = 0x4C, -- L
+-- [0x1D510] = 0x4D, -- M
+-- [0x1D511] = 0x4E, -- N
+-- [0x1D512] = 0x4F, -- O
+-- [0x1D513] = 0x50, -- P
+-- [0x1D514] = 0x51, -- Q
+ [0x0211C] = 0x52, -- R
+-- [0x1D516] = 0x53, -- S
+-- [0x1D517] = 0x54, -- T
+-- [0x1D518] = 0x55, -- U
+-- [0x1D519] = 0x56, -- V
+-- [0x1D51A] = 0x57, -- W
+-- [0x1D51B] = 0x58, -- X
+-- [0x1D51C] = 0x59, -- Y
+ [0x02128] = 0x5A, -- Z (fraktur Z)
+-- [0x1D51E] = 0x61, -- a (fraktur a)
+-- [0x1D51F] = 0x62, -- b
+-- [0x1D520] = 0x63, -- c
+-- [0x1D521] = 0x64, -- d
+-- [0x1D522] = 0x65, -- e
+-- [0x1D523] = 0x66, -- f
+-- [0x1D524] = 0x67, -- g
+-- [0x1D525] = 0x68, -- h
+-- [0x1D526] = 0x69, -- i
+-- [0x1D527] = 0x6A, -- j
+-- [0x1D528] = 0x6B, -- k
+-- [0x1D529] = 0x6C, -- l
+-- [0x1D52A] = 0x6D, -- m
+-- [0x1D52B] = 0x6E, -- n
+-- [0x1D52C] = 0x6F, -- o
+-- [0x1D52D] = 0x70, -- p
+-- [0x1D52E] = 0x71, -- q
+-- [0x1D52F] = 0x72, -- r
+-- [0x1D530] = 0x73, -- s
+-- [0x1D531] = 0x74, -- t
+-- [0x1D532] = 0x75, -- u
+-- [0x1D533] = 0x76, -- v
+-- [0x1D534] = 0x77, -- w
+-- [0x1D535] = 0x78, -- x
+-- [0x1D536] = 0x79, -- y
+-- [0x1D537] = 0x7A, -- z
+}
+
+-- now that all other vectors are defined ...
+
+setletters(mathencodings, "tex-it", 0x1D434, 0x1D44E)
+setletters(mathencodings, "tex-ss", 0x1D5A0, 0x1D5BA)
+setletters(mathencodings, "tex-tt", 0x1D670, 0x1D68A)
+setletters(mathencodings, "tex-bf", 0x1D400, 0x1D41A)
+setletters(mathencodings, "tex-bi", 0x1D468, 0x1D482)
+setletters(mathencodings, "tex-fraktur", 0x1D504, 0x1D51E)
+setletters(mathencodings, "tex-fraktur-bold", 0x1D56C, 0x1D586)
+
+setdigits (mathencodings, "tex-ss", 0x1D7E2)
+setdigits (mathencodings, "tex-tt", 0x1D7F6)
+setdigits (mathencodings, "tex-bf", 0x1D7CE)
+
+-- setdigits (mathencodings, "tex-bi", 0x1D7CE)
+
+-- todo: add ss, tt, bf etc vectors
+-- todo: we can make ss tt etc an option
diff --git a/Master/texmf-dist/tex/context/base/math-vfu.lua b/Master/texmf-dist/tex/context/base/math-vfu.lua
index 21b10189471..b169ec5e556 100644
--- a/Master/texmf-dist/tex/context/base/math-vfu.lua
+++ b/Master/texmf-dist/tex/context/base/math-vfu.lua
@@ -10,6 +10,14 @@ if not modules then modules = { } end modules ['math-vfu'] = {
-- better and better. If you have problems with math fonts or miss
-- characters report it to the ConTeXt mailing list. Also thanks to
-- Boguslaw for finding a couple of errors.
+--
+-- This mechanism will stay around. Even when we've switched to the
+-- real fonts, one can still say:
+--
+-- \enablemode[lmmath,pxmath,txmath]
+--
+-- to get the virtual counterparts. There are still areas where the
+-- virtuals are better.
-- 20D6 -> 2190
-- 20D7 -> 2192
@@ -17,6 +25,7 @@ if not modules then modules = { } end modules ['math-vfu'] = {
local type, next = type, next
local max = math.max
local format = string.format
+local utfchar = utf.char
local fonts, nodes, mathematics = fonts, nodes, mathematics
@@ -37,36 +46,36 @@ fonts.handlers.vf.math = vfmath
local shared = { }
---~ local push, pop, back = { "push" }, { "pop" }, { "slot", 1, 0x2215 }
-
---~ local function negate(main,characters,id,size,unicode,basecode)
---~ if not characters[unicode] then
---~ local basechar = characters[basecode]
---~ if basechar then
---~ local ht, wd = basechar.height, basechar.width
---~ characters[unicode] = {
---~ width = wd,
---~ height = ht,
---~ depth = basechar.depth,
---~ italic = basechar.italic,
---~ kerns = basechar.kerns,
---~ commands = {
---~ { "slot", 1, basecode },
---~ push,
---~ { "down", ht/5},
---~ { "right", - wd/2},
---~ back,
---~ push,
---~ }
---~ }
---~ end
---~ end
---~ end
-
---~ \Umathchardef\braceld="0 "1 "FF07A
---~ \Umathchardef\bracerd="0 "1 "FF07B
---~ \Umathchardef\bracelu="0 "1 "FF07C
---~ \Umathchardef\braceru="0 "1 "FF07D
+-- local push, pop, back = { "push" }, { "pop" }, { "slot", 1, 0x2215 }
+--
+-- local function negate(main,characters,id,size,unicode,basecode)
+-- if not characters[unicode] then
+-- local basechar = characters[basecode]
+-- if basechar then
+-- local ht, wd = basechar.height, basechar.width
+-- characters[unicode] = {
+-- width = wd,
+-- height = ht,
+-- depth = basechar.depth,
+-- italic = basechar.italic,
+-- kerns = basechar.kerns,
+-- commands = {
+-- { "slot", 1, basecode },
+-- push,
+-- { "down", ht/5},
+-- { "right", - wd/2},
+-- back,
+-- push,
+-- }
+-- }
+-- end
+-- end
+-- end
+--
+-- \Umathchardef\braceld="0 "1 "FF07A
+-- \Umathchardef\bracerd="0 "1 "FF07B
+-- \Umathchardef\bracelu="0 "1 "FF07C
+-- \Umathchardef\braceru="0 "1 "FF07D
local function brace(main,characters,id,size,unicode,first,rule,left,right,rule,last)
if not characters[unicode] then
@@ -83,21 +92,37 @@ local function brace(main,characters,id,size,unicode,first,rule,left,right,rule,
end
end
-local function arrow(main,characters,id,size,unicode,arrow,minus,isleft)
+local function extension(main,characters,id,size,unicode,first,middle,last)
local chr = characters[unicode]
if not chr then
- -- skip
- elseif isleft then
- chr.horiz_variants = {
- { extender = 0, glyph = arrow },
- { extender = 1, glyph = minus },
- }
- else
- chr.horiz_variants = {
- { extender = 1, glyph = minus },
- { extender = 0, glyph = arrow },
- }
+ return -- skip
+ end
+ local fw = characters[first]
+ if not fw then
+ return
end
+ local mw = characters[middle]
+ if not mw then
+ return
+ end
+ local lw = characters[last]
+ if not lw then
+ return
+ end
+ fw = fw.width
+ mw = mw.width
+ lw = lw.width
+ if fw == 0 then
+ fw = 1
+ end
+ if lw == 0 then
+ lw = 1
+ end
+ chr.horiz_variants = {
+ { extender = 0, glyph = first, ["end"] = fw/2, start = 0, advance = fw },
+ { extender = 1, glyph = middle, ["end"] = mw/2, start = mw/2, advance = mw },
+ { extender = 0, glyph = last, ["end"] = 0, start = lw/2, advance = lw },
+ }
end
local function parent(main,characters,id,size,unicode,first,rule,last)
@@ -115,10 +140,10 @@ end
local push, pop, step = { "push" }, { "pop" }, 0.2 -- 0.1 is nicer but gives larger files
local function make(main,characters,id,size,n,m)
- local old = 0xFF000+n
+ local old = 0xFF000 + n
local c = characters[old]
if c then
- local upslot, dnslot, uprule, dnrule = 0xFF100+n, 0xFF200+n, 0xFF300+m, 0xFF400+m
+ local upslot, dnslot, uprule, dnrule = 0xFF100 + n, 0xFF200 + n, 0xFF300 + m, 0xFF400 + m
local xu = main.parameters.x_height + 0.3*size
local xd = 0.3*size
local w, h, d = c.width, c.height, c.depth
@@ -141,14 +166,24 @@ local function make(main,characters,id,size,n,m)
end
end
-local function minus(main,characters,id,size,unicode) -- push/pop needed?
- local minus = characters[0x002D]
+local function clipped(main,characters,id,size,unicode,original) -- push/pop needed?
+ local minus = characters[original]
if minus then
local mu = size/18
- local width = minus.width - 5*mu
+ local step = 3*mu
+ local width = minus.width
+ if width > step then
+ width = width - step
+ step = step / 2
+ else
+ width = width / 2
+ step = width
+ end
characters[unicode] = {
- width = width, height = minus.height, depth = minus.depth,
- commands = { push, { "right", -3*mu }, { "slot", id, 0x002D }, pop }
+ width = width,
+ height = minus.height,
+ depth = minus.depth,
+ commands = { push, { "right", -step }, { "slot", id, original }, pop }
}
end
end
@@ -170,7 +205,7 @@ local function raise(main,characters,id,size,unicode,private,n) -- this is a rea
local raised = characters[private]
if raised then
if not done[unicode] then
- report_virtual("temporary too large U+%05X due to issues in luatex backend",unicode)
+ report_virtual("temporary too large %U due to issues in luatex backend",unicode)
done[unicode] = true
end
local up = 0.85 * main.parameters.x_height
@@ -281,20 +316,20 @@ local function vertbar(main,characters,id,size,parent,scale,unicode)
end
end
-local function jointwo(main,characters,id,size,unicode,u1,d12,u2)
+local function jointwo(main,characters,id,size,unicode,u1,d12,u2,what)
local c1, c2 = characters[u1], characters[u2]
if c1 and c2 then
local w1, w2 = c1.width, c2.width
local mu = size/18
characters[unicode] = {
- width = w1 + w2 - d12*mu,
+ width = w1 + w2 - d12 * mu,
height = max(c1.height or 0, c2.height or 0),
- depth = max(c1.depth or 0, c2.depth or 0),
+ depth = max(c1.depth or 0, c2.depth or 0),
commands = {
{ "slot", id, u1 },
{ "right", -d12*mu } ,
{ "slot", id, u2 },
- }
+ },
}
end
end
@@ -340,6 +375,37 @@ local function stack(main,characters,id,size,unicode,u1,d12,u2)
end
end
+local function repeated(main,characters,id,size,unicode,u,n,private,fraction) -- math-fbk.lua
+ local c = characters[u]
+ if c then
+ local width = c.width
+ local italic = fraction*width -- c.italic or 0 -- larger ones have funny italics
+ local tc = { "slot", id, u }
+ local tr = { "right", -italic } -- see hack elsewhere
+ local commands = { }
+ for i=1,n-1 do
+ commands[#commands+1] = tc
+ commands[#commands+1] = tr
+ end
+ commands[#commands+1] = tc
+-- inspect(c)
+-- inspect(commands)
+ local next = c.next
+ if next then
+ repeated(main,characters,id,size,private,next,n,private+1,fraction)
+ next = private
+ end
+ characters[unicode] = {
+ width = width + (n-1)*(width-italic),
+ height = c.height,
+ depth = c.depth,
+ italic = italic,
+ commands = commands,
+ next = next,
+ }
+ end
+end
+
function vfmath.addmissing(main,id,size)
local characters = main.characters
local shared = main.shared
@@ -348,19 +414,20 @@ function vfmath.addmissing(main,id,size)
for i=0x7A,0x7D do
make(main,characters,id,size,i,1)
end
+
brace (main,characters,id,size,0x23DE,0xFF17A,0xFF301,0xFF17D,0xFF17C,0xFF301,0xFF17B)
brace (main,characters,id,size,0x23DF,0xFF27C,0xFF401,0xFF27B,0xFF27A,0xFF401,0xFF27D)
+
parent (main,characters,id,size,0x23DC,0xFF17A,0xFF301,0xFF17B)
parent (main,characters,id,size,0x23DD,0xFF27C,0xFF401,0xFF27D)
+
-- negate (main,characters,id,size,0x2260,0x003D)
dots (main,characters,id,size,0x2026) -- ldots
dots (main,characters,id,size,0x22EE) -- vdots
dots (main,characters,id,size,0x22EF) -- cdots
dots (main,characters,id,size,0x22F1) -- ddots
dots (main,characters,id,size,0x22F0) -- udots
- minus (main,characters,id,size,0xFF501)
- arrow (main,characters,id,size,0x2190,0x2190,0xFF501,true) -- left
- arrow (main,characters,id,size,0x2192,0x2192,0xFF501,false) -- right
+
vertbar (main,characters,id,size,0x0007C,0.10,0xFF601) -- big : 0.85 bodyfontsize
vertbar (main,characters,id,size,0xFF601,0.30,0xFF602) -- Big : 1.15 bodyfontsize
vertbar (main,characters,id,size,0xFF602,0.30,0xFF603) -- bigg : 1.45 bodyfontsize
@@ -369,25 +436,74 @@ function vfmath.addmissing(main,id,size)
vertbar (main,characters,id,size,0xFF605,0.30,0xFF606)
vertbar (main,characters,id,size,0xFF606,0.30,0xFF607)
vertbar (main,characters,id,size,0xFF607,0.30,0xFF608)
+
+ clipped (main,characters,id,size,0xFF501,0x0002D) -- minus
+ clipped (main,characters,id,size,0xFF502,0x02190) -- lefthead
+ clipped (main,characters,id,size,0xFF503,0x02192) -- righthead
+ clipped (main,characters,id,size,0xFF504,0xFE321) -- mapsto
+ clipped (main,characters,id,size,0xFF505,0xFE322) -- lhook
+ clipped (main,characters,id,size,0xFF506,0xFE323) -- rhook
+ clipped (main,characters,id,size,0xFF507,0xFE324) -- mapsfrom
+ clipped (main,characters,id,size,0xFF508,0x021D0) -- double lefthead
+ clipped (main,characters,id,size,0xFF509,0x021D2) -- double righthead
+ clipped (main,characters,id,size,0xFF50A,0x0003D) -- equal
+ clipped (main,characters,id,size,0xFF50B,0x0219E) -- lefttwohead
+ clipped (main,characters,id,size,0xFF50C,0x021A0) -- righttwohead
+ clipped (main,characters,id,size,0xFF50D,0xFF350) -- lr arrow combi snippet
+ clipped (main,characters,id,size,0xFF50E,0xFF351) -- lr arrow combi snippet
+ clipped (main,characters,id,size,0xFF50F,0xFF352) -- lr arrow combi snippet
+ clipped (main,characters,id,size,0xFF510,0x02261) -- equiv
+
+ extension(main,characters,id,size,0x2190,0xFF502,0xFF501,0xFF501) -- \leftarrow
+ extension(main,characters,id,size,0x2192,0xFF501,0xFF501,0xFF503) -- \rightarrow
+
+ extension(main,characters,id,size,0x002D,0xFF501,0xFF501,0xFF501) -- \rel
+ extension(main,characters,id,size,0x003D,0xFF50A,0xFF50A,0xFF50A) -- \equal
+ extension(main,characters,id,size,0x2261,0xFF510,0xFF510,0xFF510) -- \equiv
+
jointwo (main,characters,id,size,0x21A6,0xFE321,0,0x02192) -- \mapstochar\rightarrow
jointwo (main,characters,id,size,0x21A9,0x02190,joinrelfactor,0xFE323) -- \leftarrow\joinrel\rhook
jointwo (main,characters,id,size,0x21AA,0xFE322,joinrelfactor,0x02192) -- \lhook\joinrel\rightarrow
- stack (main,characters,id,size,0x2259,0x0003D,3,0x02227) -- \buildrel\wedge\over=
- jointwo (main,characters,id,size,0x22C8,0x022B3,joinrelfactor,0x022B2) -- \mathrel\triangleright\joinrel\mathrel\triangleleft (4 looks better than 3)
- jointwo (main,characters,id,size,0x2260,0x00338,0,0x0003D) -- \not\equal
- jointwo (main,characters,id,size,0x2284,0x00338,0,0x02282) -- \not\subset
- jointwo (main,characters,id,size,0x2285,0x00338,0,0x02283) -- \not\supset
- jointwo (main,characters,id,size,0x22A7,0x0007C,joinrelfactor,0x0003D) -- \mathrel|\joinrel=
jointwo (main,characters,id,size,0x27F5,0x02190,joinrelfactor,0x0002D) -- \leftarrow\joinrel\relbar
- jointwo (main,characters,id,size,0x27F6,0x0002D,joinrelfactor,0x02192) -- \relbar\joinrel\rightarrow
+ jointwo (main,characters,id,size,0x27F6,0x0002D,joinrelfactor,0x02192,2) -- \relbar\joinrel\rightarrow
jointwo (main,characters,id,size,0x27F7,0x02190,joinrelfactor,0x02192) -- \leftarrow\joinrel\rightarrow
jointwo (main,characters,id,size,0x27F8,0x021D0,joinrelfactor,0x0003D) -- \Leftarrow\joinrel\Relbar
jointwo (main,characters,id,size,0x27F9,0x0003D,joinrelfactor,0x021D2) -- \Relbar\joinrel\Rightarrow
jointwo (main,characters,id,size,0x27FA,0x021D0,joinrelfactor,0x021D2) -- \Leftarrow\joinrel\Rightarrow
jointhree(main,characters,id,size,0x27FB,0x02190,joinrelfactor,0x0002D,0,0xFE324) -- \leftarrow\joinrel\relbar\mapsfromchar
jointhree(main,characters,id,size,0x27FC,0xFE321,0,0x0002D,joinrelfactor,0x02192) -- \mapstochar\relbar\joinrel\rightarrow
+
+ extension(main,characters,id,size,0x21A6,0xFF504,0xFF501,0xFF503) -- \mapstochar\rightarrow
+ extension(main,characters,id,size,0x21A9,0xFF502,0xFF501,0xFF506) -- \leftarrow\joinrel\rhook
+ extension(main,characters,id,size,0x21AA,0xFF505,0xFF501,0xFF503) -- \lhook\joinrel\rightarrow
+ extension(main,characters,id,size,0x27F5,0xFF502,0xFF501,0xFF501) -- \leftarrow\joinrel\relbar
+ extension(main,characters,id,size,0x27F6,0xFF501,0xFF501,0xFF503) -- \relbar\joinrel\rightarrow
+ extension(main,characters,id,size,0x27F7,0xFF502,0xFF501,0xFF503) -- \leftarrow\joinrel\rightarrow
+ extension(main,characters,id,size,0x27F8,0xFF508,0xFF50A,0xFF50A) -- \Leftarrow\joinrel\Relbar
+ extension(main,characters,id,size,0x27F9,0xFF50A,0xFF50A,0xFF509) -- \Relbar\joinrel\Rightarrow
+ extension(main,characters,id,size,0x27FA,0xFF508,0xFF50A,0xFF509) -- \Leftarrow\joinrel\Rightarrow
+ extension(main,characters,id,size,0x27FB,0xFF502,0xFF501,0xFF507) -- \leftarrow\joinrel\relbar\mapsfromchar
+ extension(main,characters,id,size,0x27FC,0xFF504,0xFF501,0xFF503) -- \mapstochar\relbar\joinrel\rightarrow
+
+ extension(main,characters,id,size,0x219E,0xFF50B,0xFF501,0xFF501) -- \twoheadleftarrow\joinrel\relbar
+ extension(main,characters,id,size,0x21A0,0xFF501,0xFF501,0xFF50C) -- \relbar\joinrel\twoheadrightarrow
+ extension(main,characters,id,size,0x21C4,0xFF50D,0xFF50E,0xFF50F) -- leftoverright
+
+ -- 21CB leftrightharpoon
+ -- 21CC rightleftharpoon
+
+ stack (main,characters,id,size,0x2259,0x0003D,3,0x02227) -- \buildrel\wedge\over=
+ jointwo (main,characters,id,size,0x22C8,0x022B3,joinrelfactor,0x022B2) -- \mathrel\triangleright\joinrel\mathrel\triangleleft (4 looks better than 3)
+ jointwo (main,characters,id,size,0x22A7,0x0007C,joinrelfactor,0x0003D) -- \mathrel|\joinrel=
+ jointwo (main,characters,id,size,0x2260,0x00338,0,0x0003D) -- \not\equal
+ jointwo (main,characters,id,size,0x2284,0x00338,0,0x02282) -- \not\subset
+ jointwo (main,characters,id,size,0x2285,0x00338,0,0x02283) -- \not\supset
+ jointwo (main,characters,id,size,0x2209,0x00338,0,0x02208) -- \not\in
jointwo (main,characters,id,size,0x2254,0x03A,0,0x03D) -- := (≔)
+ repeated(main,characters,id,size,0x222C,0x222B,2,0xFF800,1/3)
+ repeated(main,characters,id,size,0x222D,0x222B,3,0xFF810,1/3)
+
-- raise (main,characters,id,size,0x02032,0xFE325,1) -- prime
-- raise (main,characters,id,size,0x02033,0xFE325,2) -- double prime
-- raise (main,characters,id,size,0x02034,0xFE325,3) -- triple prime
@@ -408,7 +524,7 @@ local reverse = { } -- index -> unicode
setmetatableindex(reverse, function(t,name)
if trace_virtual then
- report_virtual("initializing math vector '%s'",name)
+ report_virtual("initializing math vector %a",name)
end
local m, r = mathencodings[name], { }
for u, i in next, m do
@@ -429,7 +545,7 @@ function vfmath.define(specification,set,goodies)
local ssname = ss.name
if add_optional and ss.optional then
if trace_virtual then
- report_virtual("loading font %s subfont %s with name %s at %s is skipped",name,s,ssname,size)
+ report_virtual("loading font %a subfont %s with name %a at %p is skipped",name,s,ssname,size)
end
else
if ss.features then
@@ -443,14 +559,14 @@ function vfmath.define(specification,set,goodies)
if alreadyloaded then
f, id = alreadyloaded.f, alreadyloaded.id
if trace_virtual then
- report_virtual("loading font %s subfont %s with name %s is reused",name,s,ssname)
+ report_virtual("loading font %a subfont %s with name %a is reused",name,s,ssname)
end
else
f, id = fonts.constructors.readanddefine(ssname,size)
names[ssname] = { f = f, id = id }
end
if not f or id == 0 then
- report_virtual("loading font %s subfont %s with name %s at %s is skipped, not found",name,s,ssname,size)
+ report_virtual("loading font %a subfont %s with name %a at %p is skipped, not found",name,s,ssname,size)
else
n = n + 1
okset[n] = ss
@@ -460,7 +576,7 @@ function vfmath.define(specification,set,goodies)
shared[n] = { }
end
if trace_virtual then
- report_virtual("loading font %s subfont %s with name %s at %s as id %s using encoding %s",name,s,ssname,size,id,ss.vector or "none")
+ report_virtual("loading font %a subfont %s with name %a at %p as id %s using encoding %p",name,s,ssname,size,id,ss.vector)
end
if not ss.checked then
ss.checked = true
@@ -475,10 +591,10 @@ function vfmath.define(specification,set,goodies)
u = u and u[index]
if u then
if trace_virtual then
- report_virtual("resolving name %s to %s",index,u)
+ report_virtual("resolving name %a to %s",index,u) -- maybe more detail for u
end
else
- report_virtual("unable to resolve name %s",index)
+ report_virtual("unable to resolve name %a",index)
end
vector[unicode] = u
end
@@ -489,7 +605,7 @@ function vfmath.define(specification,set,goodies)
end
end
-- beware, loaded[1] is already passed to tex (we need to make a simple copy then .. todo)
- local parent = loaded[1] -- a text font
+ local parent = loaded[1] or { } -- a text font
local characters = { }
local parameters = { }
local mathparameters = { }
@@ -520,7 +636,7 @@ function vfmath.define(specification,set,goodies)
characters[unicode] = character
end
else
- report_virtual("font %s has no characters",name)
+ report_virtual("font %a has no characters",name)
end
--
if parent.parameters then
@@ -528,7 +644,7 @@ function vfmath.define(specification,set,goodies)
parameters[key] = value
end
else
- report_virtual("font %s has no parameters",name)
+ report_virtual("font %a has no parameters",name)
end
--
local description = { name = "<unset>" }
@@ -570,7 +686,7 @@ function vfmath.define(specification,set,goodies)
else
local newparameters = fs.parameters
if not newparameters then
- report_virtual("font %s, no parameters set",name)
+ report_virtual("no parameters set in font %a",name)
elseif ss.extension then
mathparameters.math_x_height = newparameters.x_height or 0 -- math_x_height : height of x
mathparameters.default_rule_thickness = newparameters[ 8] or 0 -- default_rule_thickness : thickness of \over bars
@@ -579,7 +695,7 @@ function vfmath.define(specification,set,goodies)
mathparameters.big_op_spacing3 = newparameters[11] or 0 -- big_op_spacing3 : minimum baselineskip above displayed op
mathparameters.big_op_spacing4 = newparameters[12] or 0 -- big_op_spacing4 : minimum baselineskip below displayed op
mathparameters.big_op_spacing5 = newparameters[13] or 0 -- big_op_spacing5 : padding above and below displayed limits
- -- report_virtual("loading and virtualizing font %s at size %s, setting ex parameters",name,size)
+ -- report_virtual("loading and virtualizing font %a at size %p, setting ex parameters",name,size)
elseif ss.parameters then
mathparameters.x_height = newparameters.x_height or mathparameters.x_height
mathparameters.x_height = mathparameters.x_height or fp.x_height or 0 -- x_height : height of x
@@ -598,7 +714,7 @@ function vfmath.define(specification,set,goodies)
mathparameters.delim1 = newparameters[20] or 0 -- delim1 : size of \atopwithdelims delimiters in display styles
mathparameters.delim2 = newparameters[21] or 0 -- delim2 : size of \atopwithdelims delimiters in non-displays
mathparameters.axis_height = newparameters[22] or 0 -- axis_height : height of fraction lines above the baseline
- -- report_virtual("loading and virtualizing font %s at size %s, setting sy parameters",name,size)
+ -- report_virtual("loading and virtualizing font %a at size %p, setting sy parameters",name,size)
end
local vectorname = ss.vector
if vectorname then
@@ -620,9 +736,9 @@ function vfmath.define(specification,set,goodies)
local ru = rv[unicode]
if not ru then
if trace_virtual then
- report_virtual( "unicode point U+%05X has no index %04X in vector %s for font %s",unicode,index,vectorname,fontname)
+ report_virtual("unicode slot %U has no index %H in vector %a for font %a",unicode,index,vectorname,fontname)
elseif not already_reported then
- report_virtual( "the mapping is incomplete for '%s' at %s",name,number.topoints(size))
+ report_virtual("the mapping is incomplete for %a at %p",name,size)
already_reported = true
end
rv[unicode] = true
@@ -636,9 +752,11 @@ function vfmath.define(specification,set,goodies)
local kerns = fci.kerns
local width = fci.width
local italic = fci.italic
- if italic and isextension then
- -- int_a^b
- width = width + italic
+ if italic and italic > 0 then
+ -- int_a^b
+ if isextension then
+ width = width + italic -- for obscure reasons the integral as a width + italic correction
+ end
end
if kerns then
local krn = { }
@@ -675,7 +793,6 @@ function vfmath.define(specification,set,goodies)
commands = ref,
}
end
---~ report_virtual("%05X %s %s",unicode,fci.height or "NO HEIGHT",fci.depth or "NO DEPTH")
end
end
if isextension then
@@ -691,7 +808,7 @@ function vfmath.define(specification,set,goodies)
end
local italic = fci.italic
local t = {
- width = fci.width + italic, -- watch this !
+ width = fci.width,
height = fci.height,
depth = fci.depth,
italic = italic,
@@ -749,9 +866,9 @@ function vfmath.define(specification,set,goodies)
local kerns = fci.kerns
if kerns then
local krn = { }
---~ for k=1,#kerns do
---~ krn[offset + k] = kerns[k]
---~ end
+ -- for k=1,#kerns do
+ -- krn[offset + k] = kerns[k]
+ -- end
for k, v in next, kerns do -- is kerns sparse?
krn[offset + k] = v
end
@@ -779,7 +896,7 @@ function vfmath.define(specification,set,goodies)
end
end
else
- report_virtual("error in loading %s: problematic vector %s",name,vectorname)
+ report_virtual("error in loading %a, problematic vector %a",name,vectorname)
end
end
mathematics.extras.copy(main) --not needed here (yet)
@@ -791,6 +908,7 @@ function vfmath.define(specification,set,goodies)
size = size,
}
--
+ --
main.mathparameters = mathparameters -- still traditional ones
vfmath.addmissing(main,#fontlist,size)
mathematics.addfallbacks(main)
@@ -799,10 +917,9 @@ function vfmath.define(specification,set,goodies)
fonts.constructors.assignmathparameters(main,main)
--
main.MathConstants = main.mathparameters -- we directly pass it to TeX (bypasses the scaler) so this is needed
--- inspect(main.MathConstants)
--
if trace_virtual or trace_timings then
- report_virtual("loading and virtualizing font %s at size %s took %0.3f seconds",name,size,os.clock()-start)
+ report_virtual("loading and virtualizing font %a at size %p took %0.3f seconds",name,size,os.clock()-start)
end
--
return main
@@ -814,299 +931,7 @@ function mathematics.makefont(name,set,goodies)
end
end
--- varphi is part of the alphabet, contrary to the other var*s'
-
-mathencodings["large-to-small"] = {
- [0x00028] = 0x00, -- (
- [0x00029] = 0x01, -- )
- [0x0005B] = 0x02, -- [
- [0x0005D] = 0x03, -- ]
- [0x0230A] = 0x04, -- lfloor
- [0x0230B] = 0x05, -- rfloor
- [0x02308] = 0x06, -- lceil
- [0x02309] = 0x07, -- rceil
- [0x0007B] = 0x08, -- {
- [0x0007D] = 0x09, -- }
- [0x027E8] = 0x0A, -- <
- [0x027E9] = 0x0B, -- >
- [0x0007C] = 0x0C, -- |
---~ [0x0] = 0x0D, -- lVert rVert Vert
--- [0x0002F] = 0x0E, -- /
- [0x0005C] = 0x0F, -- \
---~ [0x0] = 0x3A, -- lgroup
---~ [0x0] = 0x3B, -- rgroup
---~ [0x0] = 0x3C, -- arrowvert
---~ [0x0] = 0x3D, -- Arrowvert
- [0x02195] = 0x3F, -- updownarrow
---~ [0x0] = 0x40, -- lmoustache
---~ [0x0] = 0x41, -- rmoustache
- [0x0221A] = 0x70, -- sqrt
- [0x021D5] = 0x77, -- Updownarrow
- [0x02191] = 0x78, -- uparrow
- [0x02193] = 0x79, -- downarrow
- [0x021D1] = 0x7E, -- Uparrow
- [0x021D3] = 0x7F, -- Downarrow
- [0x0220F] = 0x59, -- prod
- [0x02210] = 0x61, -- coprod
- [0x02211] = 0x58, -- sum
- [0x0222B] = 0x5A, -- intop
- [0x0222E] = 0x49, -- ointop
- [0xFE302] = 0x62, -- widehat
- [0xFE303] = 0x65, -- widetilde
- [0x022C0] = 0x5E, -- bigwedge
- [0x022C1] = 0x5F, -- bigvee
- [0x022C2] = 0x5C, -- bigcap
- [0x022C3] = 0x5B, -- bigcup
- [0x02044] = 0x0E, -- /
-}
-
--- Beware: these are (in cm/lm) below the baseline due to limitations
--- in the tfm format bu the engien (combined with the mathclass) takes
--- care of it. If we need them in textmode, we should make them virtual
--- and move them up but we're in no hurry with that.
-
-mathencodings["tex-ex"] = {
- [0x0220F] = 0x51, -- prod
- [0x02210] = 0x60, -- coprod
- [0x02211] = 0x50, -- sum
- [0x0222B] = 0x52, -- intop
- [0x0222E] = 0x48, -- ointop
- [0x022C0] = 0x56, -- bigwedge
- [0x022C1] = 0x57, -- bigvee
- [0x022C2] = 0x54, -- bigcap
- [0x022C3] = 0x53, -- bigcup
- [0x02A00] = 0x4A, -- bigodot -- fixed BJ
- [0x02A01] = 0x4C, -- bigoplus
- [0x02A02] = 0x4E, -- bigotimes
- -- [0x02A03] = , -- bigudot --
- [0x02A04] = 0x55, -- biguplus
- [0x02A06] = 0x46, -- bigsqcup
-}
-
--- only math stuff is needed, since we always use an lm or gyre
--- font as main font
-
-mathencodings["tex-mr"] = {
- [0x00393] = 0x00, -- Gamma
- [0x00394] = 0x01, -- Delta
- [0x00398] = 0x02, -- Theta
- [0x0039B] = 0x03, -- Lambda
- [0x0039E] = 0x04, -- Xi
- [0x003A0] = 0x05, -- Pi
- [0x003A3] = 0x06, -- Sigma
- [0x003A5] = 0x07, -- Upsilon
- [0x003A6] = 0x08, -- Phi
- [0x003A8] = 0x09, -- Psi
- [0x003A9] = 0x0A, -- Omega
--- [0x00060] = 0x12, -- [math]grave
--- [0x000B4] = 0x13, -- [math]acute
--- [0x002C7] = 0x14, -- [math]check
--- [0x002D8] = 0x15, -- [math]breve
--- [0x000AF] = 0x16, -- [math]bar
--- [0x00021] = 0x21, -- !
--- [0x00028] = 0x28, -- (
--- [0x00029] = 0x29, -- )
--- [0x0002B] = 0x2B, -- +
--- [0x0002F] = 0x2F, -- /
--- [0x0003A] = 0x3A, -- :
--- [0x02236] = 0x3A, -- colon
--- [0x0003B] = 0x3B, -- ;
--- [0x0003C] = 0x3C, -- <
--- [0x0003D] = 0x3D, -- =
--- [0x0003E] = 0x3E, -- >
--- [0x0003F] = 0x3F, -- ?
- [0x00391] = 0x41, -- Alpha
- [0x00392] = 0x42, -- Beta
- [0x02145] = 0x44,
- [0x00395] = 0x45, -- Epsilon
- [0x00397] = 0x48, -- Eta
- [0x00399] = 0x49, -- Iota
- [0x0039A] = 0x4B, -- Kappa
- [0x0039C] = 0x4D, -- Mu
- [0x0039D] = 0x4E, -- Nu
- [0x0039F] = 0x4F, -- Omicron
- [0x003A1] = 0x52, -- Rho
- [0x003A4] = 0x54, -- Tau
- [0x003A7] = 0x58, -- Chi
- [0x00396] = 0x5A, -- Zeta
--- [0x0005B] = 0x5B, -- [
--- [0x0005D] = 0x5D, -- ]
--- [0x0005E] = 0x5E, -- [math]hat -- the text one
- [0x00302] = 0x5E, -- [math]hat -- the real math one
--- [0x002D9] = 0x5F, -- [math]dot
- [0x02146] = 0x64,
- [0x02147] = 0x65,
--- [0x002DC] = 0x7E, -- [math]tilde -- the text one
- [0x00303] = 0x7E, -- [math]tilde -- the real one
--- [0x000A8] = 0x7F, -- [math]ddot
-}
-
-mathencodings["tex-mr-missing"] = {
- [0x02236] = 0x3A, -- colon
-}
-
-mathencodings["tex-mi"] = {
- [0x1D6E4] = 0x00, -- Gamma
- [0x1D6E5] = 0x01, -- Delta
- [0x1D6E9] = 0x02, -- Theta
- [0x1D6F3] = 0x02, -- varTheta (not present in TeX)
- [0x1D6EC] = 0x03, -- Lambda
- [0x1D6EF] = 0x04, -- Xi
- [0x1D6F1] = 0x05, -- Pi
- [0x1D6F4] = 0x06, -- Sigma
- [0x1D6F6] = 0x07, -- Upsilon
- [0x1D6F7] = 0x08, -- Phi
- [0x1D6F9] = 0x09, -- Psi
- [0x1D6FA] = 0x0A, -- Omega
- [0x1D6FC] = 0x0B, -- alpha
- [0x1D6FD] = 0x0C, -- beta
- [0x1D6FE] = 0x0D, -- gamma
- [0x1D6FF] = 0x0E, -- delta
- [0x1D716] = 0x0F, -- epsilon TODO: 1D716
- [0x1D701] = 0x10, -- zeta
- [0x1D702] = 0x11, -- eta
- [0x1D703] = 0x12, -- theta TODO: 1D703
- [0x1D704] = 0x13, -- iota
- [0x1D705] = 0x14, -- kappa
- [0x1D718] = 0x14, -- varkappa, not in tex fonts
- [0x1D706] = 0x15, -- lambda
- [0x1D707] = 0x16, -- mu
- [0x1D708] = 0x17, -- nu
- [0x1D709] = 0x18, -- xi
- [0x1D70B] = 0x19, -- pi
- [0x1D70C] = 0x1A, -- rho
- [0x1D70E] = 0x1B, -- sigma
- [0x1D70F] = 0x1C, -- tau
- [0x1D710] = 0x1D, -- upsilon
- [0x1D719] = 0x1E, -- phi
- [0x1D712] = 0x1F, -- chi
- [0x1D713] = 0x20, -- psi
- [0x1D714] = 0x21, -- omega
- [0x1D700] = 0x22, -- varepsilon (the other way around)
- [0x1D717] = 0x23, -- vartheta
- [0x1D71B] = 0x24, -- varpi
- [0x1D71A] = 0x25, -- varrho
- [0x1D70D] = 0x26, -- varsigma
- [0x1D711] = 0x27, -- varphi (the other way around)
- [0x021BC] = 0x28, -- leftharpoonup
- [0x021BD] = 0x29, -- leftharpoondown
- [0x021C0] = 0x2A, -- rightharpoonup
- [0x021C1] = 0x2B, -- rightharpoondown
- [0xFE322] = 0x2C, -- lhook (hook for combining arrows)
- [0xFE323] = 0x2D, -- rhook (hook for combining arrows)
- [0x025B7] = 0x2E, -- triangleright : cf lmmath / BJ
- [0x025C1] = 0x2F, -- triangleleft : cf lmmath / BJ
- [0x022B3] = 0x2E, -- triangleright : cf lmmath this a cramped triangles / BJ / see *
- [0x022B2] = 0x2F, -- triangleleft : cf lmmath this a cramped triangles / BJ / see *
--- [0x00041] = 0x30, -- 0
--- [0x00041] = 0x31, -- 1
--- [0x00041] = 0x32, -- 2
--- [0x00041] = 0x33, -- 3
--- [0x00041] = 0x34, -- 4
--- [0x00041] = 0x35, -- 5
--- [0x00041] = 0x36, -- 6
--- [0x00041] = 0x37, -- 7
--- [0x00041] = 0x38, -- 8
--- [0x00041] = 0x39, -- 9
---~ [0x0002E] = 0x3A, -- .
- [0x0002C] = 0x3B, -- ,
- [0x0003C] = 0x3C, -- <
--- [0x0002F] = 0x3D, -- /, slash, solidus
- [0x02044] = 0x3D, -- / AM: Not sure
- [0x0003E] = 0x3E, -- >
- [0x022C6] = 0x3F, -- star
- [0x02202] = 0x40, -- partial
---
- [0x0266D] = 0x5B, -- flat
- [0x0266E] = 0x5C, -- natural
- [0x0266F] = 0x5D, -- sharp
- [0x02323] = 0x5E, -- smile
- [0x02322] = 0x5F, -- frown
- [0x02113] = 0x60, -- ell
---
- [0x1D6A4] = 0x7B, -- imath (TODO: also 0131)
- [0x1D6A5] = 0x7C, -- jmath (TODO: also 0237)
- [0x02118] = 0x7D, -- wp
- [0x020D7] = 0x7E, -- vec (TODO: not sure)
--- 0x7F, -- (no idea what that could be)
-}
-
-mathencodings["tex-it"] = {
--- [0x1D434] = 0x41, -- A
- [0x1D6E2] = 0x41, -- Alpha
--- [0x1D435] = 0x42, -- B
- [0x1D6E3] = 0x42, -- Beta
--- [0x1D436] = 0x43, -- C
--- [0x1D437] = 0x44, -- D
--- [0x1D438] = 0x45, -- E
- [0x1D6E6] = 0x45, -- Epsilon
--- [0x1D439] = 0x46, -- F
--- [0x1D43A] = 0x47, -- G
--- [0x1D43B] = 0x48, -- H
- [0x1D6E8] = 0x48, -- Eta
--- [0x1D43C] = 0x49, -- I
- [0x1D6EA] = 0x49, -- Iota
--- [0x1D43D] = 0x4A, -- J
--- [0x1D43E] = 0x4B, -- K
- [0x1D6EB] = 0x4B, -- Kappa
--- [0x1D43F] = 0x4C, -- L
--- [0x1D440] = 0x4D, -- M
- [0x1D6ED] = 0x4D, -- Mu
--- [0x1D441] = 0x4E, -- N
- [0x1D6EE] = 0x4E, -- Nu
--- [0x1D442] = 0x4F, -- O
- [0x1D6F0] = 0x4F, -- Omicron
--- [0x1D443] = 0x50, -- P
- [0x1D6F2] = 0x50, -- Rho
--- [0x1D444] = 0x51, -- Q
--- [0x1D445] = 0x52, -- R
--- [0x1D446] = 0x53, -- S
--- [0x1D447] = 0x54, -- T
- [0x1D6F5] = 0x54, -- Tau
--- [0x1D448] = 0x55, -- U
--- [0x1D449] = 0x56, -- V
--- [0x1D44A] = 0x57, -- W
--- [0x1D44B] = 0x58, -- X
- [0x1D6F8] = 0x58, -- Chi
--- [0x1D44C] = 0x59, -- Y
--- [0x1D44D] = 0x5A, -- Z
---
--- [0x1D44E] = 0x61, -- a
--- [0x1D44F] = 0x62, -- b
--- [0x1D450] = 0x63, -- c
--- [0x1D451] = 0x64, -- d
--- [0x1D452] = 0x65, -- e
--- [0x1D453] = 0x66, -- f
--- [0x1D454] = 0x67, -- g
--- [0x1D455] = 0x68, -- h
- [0x0210E] = 0x68, -- Planck constant (h)
--- [0x1D456] = 0x69, -- i
--- [0x1D457] = 0x6A, -- j
--- [0x1D458] = 0x6B, -- k
--- [0x1D459] = 0x6C, -- l
--- [0x1D45A] = 0x6D, -- m
--- [0x1D45B] = 0x6E, -- n
--- [0x1D45C] = 0x6F, -- o
- [0x1D70A] = 0x6F, -- omicron
--- [0x1D45D] = 0x70, -- p
--- [0x1D45E] = 0x71, -- q
--- [0x1D45F] = 0x72, -- r
--- [0x1D460] = 0x73, -- s
--- [0x1D461] = 0x74, -- t
--- [0x1D462] = 0x75, -- u
--- [0x1D463] = 0x76, -- v
--- [0x1D464] = 0x77, -- w
--- [0x1D465] = 0x78, -- x
--- [0x1D466] = 0x79, -- y
--- [0x1D467] = 0x7A, -- z
-}
-
-mathencodings["tex-ss"] = { }
-mathencodings["tex-tt"] = { }
-mathencodings["tex-bf"] = { }
-mathencodings["tex-bi"] = { }
-mathencodings["tex-fraktur"] = { }
-mathencodings["tex-fraktur-bold"] = { }
+-- helpers
function vfmath.setletters(font_encoding, name, uppercase, lowercase)
local enc = font_encoding[name]
@@ -1122,494 +947,3 @@ function vfmath.setdigits(font_encoding, name, digits)
enc[digits+i] = i + 0x30
end
end
-
-mathencodings["tex-sy"] = {
- [0x0002D] = 0x00, -- -
- [0x02212] = 0x00, -- -
--- [0x02201] = 0x00, -- complement
--- [0x02206] = 0x00, -- increment
--- [0x02204] = 0x00, -- not exists
--- [0x000B7] = 0x01, -- cdot
- [0x022C5] = 0x01, -- cdot
- [0x000D7] = 0x02, -- times
- [0x0002A] = 0x03, -- *
- [0x02217] = 0x03, -- *
- [0x000F7] = 0x04, -- div
- [0x022C4] = 0x05, -- diamond
- [0x000B1] = 0x06, -- pm
- [0x02213] = 0x07, -- mp
- [0x02295] = 0x08, -- oplus
- [0x02296] = 0x09, -- ominus
- [0x02297] = 0x0A, -- otimes
- [0x02298] = 0x0B, -- oslash
- [0x02299] = 0x0C, -- odot
- [0x025EF] = 0x0D, -- bigcirc, Orb (either 25EF or 25CB) -- todo
- [0x02218] = 0x0E, -- circ
- [0x02219] = 0x0F, -- bullet
- [0x02022] = 0x0F, -- bullet
- [0x0224D] = 0x10, -- asymp
- [0x02261] = 0x11, -- equiv
- [0x02286] = 0x12, -- subseteq
- [0x02287] = 0x13, -- supseteq
- [0x02264] = 0x14, -- leq
- [0x02265] = 0x15, -- geq
- [0x02AAF] = 0x16, -- preceq
--- [0x0227C] = 0x16, -- preceq, AM:No see 2AAF
- [0x02AB0] = 0x17, -- succeq
--- [0x0227D] = 0x17, -- succeq, AM:No see 2AB0
- [0x0223C] = 0x18, -- sim
- [0x02248] = 0x19, -- approx
- [0x02282] = 0x1A, -- subset
- [0x02283] = 0x1B, -- supset
- [0x0226A] = 0x1C, -- ll
- [0x0226B] = 0x1D, -- gg
- [0x0227A] = 0x1E, -- prec
- [0x0227B] = 0x1F, -- succ
- [0x02190] = 0x20, -- leftarrow
- [0x02192] = 0x21, -- rightarrow
---~ [0xFE190] = 0x20, -- leftarrow
---~ [0xFE192] = 0x21, -- rightarrow
- [0x02191] = 0x22, -- uparrow
- [0x02193] = 0x23, -- downarrow
- [0x02194] = 0x24, -- leftrightarrow
- [0x02197] = 0x25, -- nearrow
- [0x02198] = 0x26, -- searrow
- [0x02243] = 0x27, -- simeq
- [0x021D0] = 0x28, -- Leftarrow
- [0x021D2] = 0x29, -- Rightarrow
- [0x021D1] = 0x2A, -- Uparrow
- [0x021D3] = 0x2B, -- Downarrow
- [0x021D4] = 0x2C, -- Leftrightarrow
- [0x02196] = 0x2D, -- nwarrow
- [0x02199] = 0x2E, -- swarrow
- [0x0221D] = 0x2F, -- propto
- [0x02032] = 0x30, -- prime
- [0x0221E] = 0x31, -- infty
- [0x02208] = 0x32, -- in
- [0x0220B] = 0x33, -- ni
- [0x025B3] = 0x34, -- triangle, bigtriangleup
- [0x025BD] = 0x35, -- bigtriangledown
- [0x00338] = 0x36, -- not
--- 0x37, -- (beginning of arrow)
- [0x02200] = 0x38, -- forall
- [0x02203] = 0x39, -- exists
- [0x000AC] = 0x3A, -- neg, lnot
- [0x02205] = 0x3B, -- empty set
- [0x0211C] = 0x3C, -- Re
- [0x02111] = 0x3D, -- Im
- [0x022A4] = 0x3E, -- top
- [0x022A5] = 0x3F, -- bot, perp
- [0x02135] = 0x40, -- aleph
- [0x1D49C] = 0x41, -- script A
- [0x0212C] = 0x42, -- script B
- [0x1D49E] = 0x43, -- script C
- [0x1D49F] = 0x44, -- script D
- [0x02130] = 0x45, -- script E
- [0x02131] = 0x46, -- script F
- [0x1D4A2] = 0x47, -- script G
- [0x0210B] = 0x48, -- script H
- [0x02110] = 0x49, -- script I
- [0x1D4A5] = 0x4A, -- script J
- [0x1D4A6] = 0x4B, -- script K
- [0x02112] = 0x4C, -- script L
- [0x02133] = 0x4D, -- script M
- [0x1D4A9] = 0x4E, -- script N
- [0x1D4AA] = 0x4F, -- script O
- [0x1D4AB] = 0x50, -- script P
- [0x1D4AC] = 0x51, -- script Q
- [0x0211B] = 0x52, -- script R
- [0x1D4AE] = 0x53, -- script S
- [0x1D4AF] = 0x54, -- script T
- [0x1D4B0] = 0x55, -- script U
- [0x1D4B1] = 0x56, -- script V
- [0x1D4B2] = 0x57, -- script W
- [0x1D4B3] = 0x58, -- script X
- [0x1D4B4] = 0x59, -- script Y
- [0x1D4B5] = 0x5A, -- script Z
- [0x0222A] = 0x5B, -- cup
- [0x02229] = 0x5C, -- cap
- [0x0228E] = 0x5D, -- uplus
- [0x02227] = 0x5E, -- wedge, land
- [0x02228] = 0x5F, -- vee, lor
- [0x022A2] = 0x60, -- vdash
- [0x022A3] = 0x61, -- dashv
- [0x0230A] = 0x62, -- lfloor
- [0x0230B] = 0x63, -- rfloor
- [0x02308] = 0x64, -- lceil
- [0x02309] = 0x65, -- rceil
- [0x0007B] = 0x66, -- {, lbrace
- [0x0007D] = 0x67, -- }, rbrace
- [0x027E8] = 0x68, -- <, langle
- [0x027E9] = 0x69, -- >, rangle
- [0x0007C] = 0x6A, -- |, mid, lvert, rvert
- [0x02225] = 0x6B, -- parallel
- -- [0x02016] = 0x00, -- Vert, lVert, rVert, arrowvert, Arrowvert
- [0x02195] = 0x6C, -- updownarrow
- [0x021D5] = 0x6D, -- Updownarrow
- [0x0005C] = 0x6E, -- \, backslash, setminus
- [0x02216] = 0x6E, -- setminus
- [0x02240] = 0x6F, -- wr
- [0x0221A] = 0x70, -- sqrt. AM: Check surd??
- [0x02A3F] = 0x71, -- amalg
- [0x1D6FB] = 0x72, -- nabla
--- [0x0222B] = 0x73, -- smallint (TODO: what about intop?)
- [0x02294] = 0x74, -- sqcup
- [0x02293] = 0x75, -- sqcap
- [0x02291] = 0x76, -- sqsubseteq
- [0x02292] = 0x77, -- sqsupseteq
- [0x000A7] = 0x78, -- S
- [0x02020] = 0x79, -- dagger, dag
- [0x02021] = 0x7A, -- ddagger, ddag
- [0x000B6] = 0x7B, -- P
- [0x02663] = 0x7C, -- clubsuit
- [0x02662] = 0x7D, -- diamondsuit
- [0x02661] = 0x7E, -- heartsuit
- [0x02660] = 0x7F, -- spadesuit
- [0xFE321] = 0x37, -- mapstochar
-
- [0xFE325] = 0x30, -- prime 0x02032
-}
-
--- The names in masm10.enc can be trusted best and are shown in the first
--- column, while in the second column we show the tex/ams names. As usual
--- it costs hours to figure out such a table.
-
-mathencodings["tex-ma"] = {
- [0x022A1] = 0x00, -- squaredot \boxdot
- [0x0229E] = 0x01, -- squareplus \boxplus
- [0x022A0] = 0x02, -- squaremultiply \boxtimes
- [0x025A1] = 0x03, -- square \square \Box
- [0x025A0] = 0x04, -- squaresolid \blacksquare
- [0x025AA] = 0x05, -- squaresmallsolid \centerdot
- [0x022C4] = 0x06, -- diamond \Diamond \lozenge
- [0x02666] = 0x07, -- diamondsolid \blacklozenge
- [0x021BB] = 0x08, -- clockwise \circlearrowright
- [0x021BA] = 0x09, -- anticlockwise \circlearrowleft
- [0x021CC] = 0x0A, -- harpoonleftright \rightleftharpoons
- [0x021CB] = 0x0B, -- harpoonrightleft \leftrightharpoons
- [0x0229F] = 0x0C, -- squareminus \boxminus
- [0x022A9] = 0x0D, -- forces \Vdash
- [0x022AA] = 0x0E, -- forcesbar \Vvdash
- [0x022A8] = 0x0F, -- satisfies \vDash
- [0x021A0] = 0x10, -- dblarrowheadright \twoheadrightarrow
- [0x0219E] = 0x11, -- dblarrowheadleft \twoheadleftarrow
- [0x021C7] = 0x12, -- dblarrowleft \leftleftarrows
- [0x021C9] = 0x13, -- dblarrowright \rightrightarrows
- [0x021C8] = 0x14, -- dblarrowup \upuparrows
- [0x021CA] = 0x15, -- dblarrowdwn \downdownarrows
- [0x021BE] = 0x16, -- harpoonupright \upharpoonright \restriction
- [0x021C2] = 0x17, -- harpoondownright \downharpoonright
- [0x021BF] = 0x18, -- harpoonupleft \upharpoonleft
- [0x021C3] = 0x19, -- harpoondownleft \downharpoonleft
- [0x021A3] = 0x1A, -- arrowtailright \rightarrowtail
- [0x021A2] = 0x1B, -- arrowtailleft \leftarrowtail
- [0x021C6] = 0x1C, -- arrowparrleftright \leftrightarrows
--- [0x021C5] = 0x00, -- \updownarrows (missing in lm)
- [0x021C4] = 0x1D, -- arrowparrrightleft \rightleftarrows
- [0x021B0] = 0x1E, -- shiftleft \Lsh
- [0x021B1] = 0x1F, -- shiftright \Rsh
- [0x021DD] = 0x20, -- squiggleright \leadsto \rightsquigarrow
- [0x021AD] = 0x21, -- squiggleleftright \leftrightsquigarrow
- [0x021AB] = 0x22, -- curlyleft \looparrowleft
- [0x021AC] = 0x23, -- curlyright \looparrowright
- [0x02257] = 0x24, -- circleequal \circeq
- [0x0227F] = 0x25, -- followsorequal \succsim
- [0x02273] = 0x26, -- greaterorsimilar \gtrsim
- [0x02A86] = 0x27, -- greaterorapproxeql \gtrapprox
- [0x022B8] = 0x28, -- multimap \multimap
- [0x02234] = 0x29, -- therefore \therefore
- [0x02235] = 0x2A, -- because \because
- [0x02251] = 0x2B, -- equalsdots \Doteq \doteqdot
- [0x0225C] = 0x2C, -- defines \triangleq
- [0x0227E] = 0x2D, -- precedesorequal \precsim
- [0x02272] = 0x2E, -- lessorsimilar \lesssim
- [0x02A85] = 0x2F, -- lessorapproxeql \lessapprox
- [0x02A95] = 0x30, -- equalorless \eqslantless
- [0x02A96] = 0x31, -- equalorgreater \eqslantgtr
- [0x022DE] = 0x32, -- equalorprecedes \curlyeqprec
- [0x022DF] = 0x33, -- equalorfollows \curlyeqsucc
- [0x0227C] = 0x34, -- precedesorcurly \preccurlyeq
- [0x02266] = 0x35, -- lessdblequal \leqq
- [0x02A7D] = 0x36, -- lessorequalslant \leqslant
- [0x02276] = 0x37, -- lessorgreater \lessgtr
- [0x02035] = 0x38, -- primereverse \backprime
- -- [0x0] = 0x39, -- axisshort \dabar
- [0x02253] = 0x3A, -- equaldotrightleft \risingdotseq
- [0x02252] = 0x3B, -- equaldotleftright \fallingdotseq
- [0x0227D] = 0x3C, -- followsorcurly \succcurlyeq
- [0x02267] = 0x3D, -- greaterdblequal \geqq
- [0x02A7E] = 0x3E, -- greaterorequalslant \geqslant
- [0x02277] = 0x3F, -- greaterorless \gtrless
- [0x0228F] = 0x40, -- squareimage \sqsubset
- [0x02290] = 0x41, -- squareoriginal \sqsupset
- -- wrong: see **
- -- [0x022B3] = 0x42, -- triangleright \rhd \vartriangleright
- -- [0x022B2] = 0x43, -- triangleleft \lhd \vartriangleleft
- -- cf lm
- [0x022B5] = 0x44, -- trianglerightequal \unrhd \trianglerighteq
- [0x022B4] = 0x45, -- triangleleftequal \unlhd \trianglelefteq
- --
- [0x02605] = 0x46, -- star \bigstar
- [0x0226C] = 0x47, -- between \between
- [0x025BC] = 0x48, -- triangledownsld \blacktriangledown
- [0x025B6] = 0x49, -- trianglerightsld \blacktriangleright
- [0x025C0] = 0x4A, -- triangleleftsld \blacktriangleleft
- -- [0x0] = 0x4B, -- arrowaxisright
- -- [0x0] = 0x4C, -- arrowaxisleft
- [0x025B2] = 0x4D, -- triangle \triangleup \vartriangle
- [0x025B2] = 0x4E, -- trianglesolid \blacktriangle
- [0x025BC] = 0x4F, -- triangleinv \triangledown
- [0x02256] = 0x50, -- ringinequal \eqcirc
- [0x022DA] = 0x51, -- lessequalgreater \lesseqgtr
- [0x022DB] = 0x52, -- greaterlessequal \gtreqless
- [0x02A8B] = 0x53, -- lessdbleqlgreater \lesseqqgtr
- [0x02A8C] = 0x54, -- greaterdbleqlless \gtreqqless
- [0x000A5] = 0x55, -- Yen \yen
- [0x021DB] = 0x56, -- arrowtripleright \Rrightarrow
- [0x021DA] = 0x57, -- arrowtripleleft \Lleftarrow
- [0x02713] = 0x58, -- check \checkmark
- [0x022BB] = 0x59, -- orunderscore \veebar
- [0x022BC] = 0x5A, -- nand \barwedge
- [0x02306] = 0x5B, -- perpcorrespond \doublebarwedge
- [0x02220] = 0x5C, -- angle \angle
- [0x02221] = 0x5D, -- measuredangle \measuredangle
- [0x02222] = 0x5E, -- sphericalangle \sphericalangle
- -- [0x0] = 0x5F, -- proportional \varpropto
- -- [0x0] = 0x60, -- smile \smallsmile
- -- [0x0] = 0x61, -- frown \smallfrown
- [0x022D0] = 0x62, -- subsetdbl \Subset
- [0x022D1] = 0x63, -- supersetdbl \Supset
- [0x022D3] = 0x64, -- uniondbl \doublecup \Cup
- [0x022D2] = 0x65, -- intersectiondbl \doublecap \Cap
- [0x022CF] = 0x66, -- uprise \curlywedge
- [0x022CE] = 0x67, -- downfall \curlyvee
- [0x022CB] = 0x68, -- multiopenleft \leftthreetimes
- [0x022CC] = 0x69, -- multiopenright \rightthreetimes
- [0x02AC5] = 0x6A, -- subsetdblequal \subseteqq
- [0x02AC6] = 0x6B, -- supersetdblequal \supseteqq
- [0x0224F] = 0x6C, -- difference \bumpeq
- [0x0224E] = 0x6D, -- geomequivalent \Bumpeq
- [0x022D8] = 0x6E, -- muchless \lll \llless
- [0x022D9] = 0x6F, -- muchgreater \ggg \gggtr
- [0x0231C] = 0x70, -- rightanglenw \ulcorner
- [0x0231D] = 0x71, -- rightanglene \urcorner
- [0x024C7] = 0x72, -- circleR \circledR
- [0x024C8] = 0x73, -- circleS \circledS
- [0x022D4] = 0x74, -- fork \pitchfork
- [0x02214] = 0x75, -- dotplus \dotplus
- [0x0223D] = 0x76, -- revsimilar \backsim
- [0x022CD] = 0x77, -- revasymptequal \backsimeq -- AM: Check this! I mapped it to simeq.
- [0x0231E] = 0x78, -- rightanglesw \llcorner
- [0x0231F] = 0x79, -- rightanglese \lrcorner
- [0x02720] = 0x7A, -- maltesecross \maltese
- [0x02201] = 0x7B, -- complement \complement
- [0x022BA] = 0x7C, -- intercal \intercal
- [0x0229A] = 0x7D, -- circlering \circledcirc
- [0x0229B] = 0x7E, -- circleasterisk \circledast
- [0x0229D] = 0x7F, -- circleminus \circleddash
-}
-
-mathencodings["tex-mb"] = {
- -- [0x0] = 0x00, -- lessornotequal \lvertneqq
- -- [0x0] = 0x01, -- greaterornotequal \gvertneqq
- [0x02270] = 0x02, -- notlessequal \nleq
- [0x02271] = 0x03, -- notgreaterequal \ngeq
- [0x0226E] = 0x04, -- notless \nless
- [0x0226F] = 0x05, -- notgreater \ngtr
- [0x02280] = 0x06, -- notprecedes \nprec
- [0x02281] = 0x07, -- notfollows \nsucc
- [0x02268] = 0x08, -- lessornotdbleql \lneqq
- [0x02269] = 0x09, -- greaterornotdbleql \gneqq
- -- [0x0] = 0x0A, -- notlessorslnteql \nleqslant
- -- [0x0] = 0x0B, -- notgreaterorslnteql \ngeqslant
- [0x02A87] = 0x0C, -- lessnotequal \lneq
- [0x02A88] = 0x0D, -- greaternotequal \gneq
- -- [0x0] = 0x0E, -- notprecedesoreql \npreceq
- -- [0x0] = 0x0F, -- notfollowsoreql \nsucceq
- [0x022E8] = 0x10, -- precedeornoteqvlnt \precnsim
- [0x022E9] = 0x11, -- followornoteqvlnt \succnsim
- [0x022E6] = 0x12, -- lessornotsimilar \lnsim
- [0x022E7] = 0x13, -- greaterornotsimilar \gnsim
- -- [0x0] = 0x14, -- notlessdblequal \nleqq
- -- [0x0] = 0x15, -- notgreaterdblequal \ngeqq
- [0x02AB5] = 0x16, -- precedenotslnteql \precneqq
- [0x02AB6] = 0x17, -- follownotslnteql \succneqq
- [0x02AB9] = 0x18, -- precedenotdbleqv \precnapprox
- [0x02ABA] = 0x19, -- follownotdbleqv \succnapprox
- [0x02A89] = 0x1A, -- lessnotdblequal \lnapprox
- [0x02A8A] = 0x1B, -- greaternotdblequal \gnapprox
- [0x02241] = 0x1C, -- notsimilar \nsim
- [0x02247] = 0x1D, -- notapproxequal \ncong
- -- [0x0] = 0x1E, -- upslope \diagup
- -- [0x0] = 0x1F, -- downslope \diagdown
- -- [0x0] = 0x20, -- notsubsetoreql \varsubsetneq
- -- [0x0] = 0x21, -- notsupersetoreql \varsupsetneq
- -- [0x0] = 0x22, -- notsubsetordbleql \nsubseteqq
- -- [0x0] = 0x23, -- notsupersetordbleql \nsupseteqq
- [0x02ACB] = 0x24, -- subsetornotdbleql \subsetneqq
- [0x02ACC] = 0x25, -- supersetornotdbleql \supsetneqq
- -- [0x0] = 0x26, -- subsetornoteql \varsubsetneqq
- -- [0x0] = 0x27, -- supersetornoteql \varsupsetneqq
- [0x0228A] = 0x28, -- subsetnoteql \subsetneq
- [0x0228B] = 0x29, -- supersetnoteql \supsetneq
- [0x02288] = 0x2A, -- notsubseteql \nsubseteq
- [0x02289] = 0x2B, -- notsuperseteql \nsupseteq
- [0x02226] = 0x2C, -- notparallel \nparallel
- [0x02224] = 0x2D, -- notbar \nmid \ndivides
- -- [0x0] = 0x2E, -- notshortbar \nshortmid
- -- [0x0] = 0x2F, -- notshortparallel \nshortparallel
- [0x022AC] = 0x30, -- notturnstile \nvdash
- [0x022AE] = 0x31, -- notforces \nVdash
- [0x022AD] = 0x32, -- notsatisfies \nvDash
- [0x022AF] = 0x33, -- notforcesextra \nVDash
- [0x022ED] = 0x34, -- nottriangeqlright \ntrianglerighteq
- [0x022EC] = 0x35, -- nottriangeqlleft \ntrianglelefteq
- [0x022EA] = 0x36, -- nottriangleleft \ntriangleleft
- [0x022EB] = 0x37, -- nottriangleright \ntriangleright
- [0x0219A] = 0x38, -- notarrowleft \nleftarrow
- [0x0219B] = 0x39, -- notarrowright \nrightarrow
- [0x021CD] = 0x3A, -- notdblarrowleft \nLeftarrow
- [0x021CF] = 0x3B, -- notdblarrowright \nRightarrow
- [0x021CE] = 0x3C, -- notdblarrowboth \nLeftrightarrow
- [0x021AE] = 0x3D, -- notarrowboth \nleftrightarrow
- [0x022C7] = 0x3E, -- dividemultiply \divideontimes
- [0x02300] = 0x3F, -- diametersign \varnothing
- [0x02204] = 0x40, -- notexistential \nexists
- [0x1D538] = 0x41, -- A (blackboard A)
- [0x1D539] = 0x42, -- B
- [0x02102] = 0x43, -- C
- [0x1D53B] = 0x44, -- D
- [0x1D53C] = 0x45, -- E
- [0x1D53D] = 0x46, -- F
- [0x1D53E] = 0x47, -- G
- [0x0210D] = 0x48, -- H
- [0x1D540] = 0x49, -- I
- [0x1D541] = 0x4A, -- J
- [0x1D542] = 0x4B, -- K
- [0x1D543] = 0x4C, -- L
- [0x1D544] = 0x4D, -- M
- [0x02115] = 0x4E, -- N
- [0x1D546] = 0x4F, -- O
- [0x02119] = 0x50, -- P
- [0x0211A] = 0x51, -- Q
- [0x0211D] = 0x52, -- R
- [0x1D54A] = 0x53, -- S
- [0x1D54B] = 0x54, -- T
- [0x1D54C] = 0x55, -- U
- [0x1D54D] = 0x56, -- V
- [0x1D54E] = 0x57, -- W
- [0x1D54F] = 0x58, -- X
- [0x1D550] = 0x59, -- Y
- [0x02124] = 0x5A, -- Z (blackboard Z)
- [0x02132] = 0x60, -- finv \Finv
- [0x02141] = 0x61, -- fmir \Game
- -- [0x0] = 0x62, tildewide
- -- [0x0] = 0x63, tildewider
- -- [0x0] = 0x64, Finv
- -- [0x0] = 0x65, Gmir
- [0x02127] = 0x66, -- Omegainv \mho
- [0x000F0] = 0x67, -- eth \eth
- [0x02242] = 0x68, -- equalorsimilar \eqsim
- [0x02136] = 0x69, -- beth \beth
- [0x02137] = 0x6A, -- gimel \gimel
- [0x02138] = 0x6B, -- daleth \daleth
- [0x022D6] = 0x6C, -- lessdot \lessdot
- [0x022D7] = 0x6D, -- greaterdot \gtrdot
- [0x022C9] = 0x6E, -- multicloseleft \ltimes
- [0x022CA] = 0x6F, -- multicloseright \rtimes
- -- [0x0] = 0x70, -- barshort \shortmid
- -- [0x0] = 0x71, -- parallelshort \shortparallel
- -- [0x02216] = 0x72, -- integerdivide \smallsetminus (2216 already part of tex-sy
- -- [0x0] = 0x73, -- similar \thicksim
- -- [0x0] = 0x74, -- approxequal \thickapprox
- [0x0224A] = 0x75, -- approxorequal \approxeq
- [0x02AB8] = 0x76, -- followsorequal \succapprox
- [0x02AB7] = 0x77, -- precedesorequal \precapprox
- [0x021B6] = 0x78, -- archleftdown \curvearrowleft
- [0x021B7] = 0x79, -- archrightdown \curvearrowright
- [0x003DC] = 0x7A, -- Digamma \digamma
- [0x003F0] = 0x7B, -- kappa \varkappa
- [0x1D55C] = 0x7C, -- k \Bbbk (blackboard k)
- [0x0210F] = 0x7D, -- planckover2pi \hslash
- [0x00127] = 0x7E, -- planckover2pi1 \hbar
- [0x003F6] = 0x7F, -- epsiloninv \backepsilon
-}
-
-mathencodings["tex-mc"] = {
- -- this file has no tfm so it gets mapped in the private space
- [0xFE324] = "mapsfromchar",
-}
-
-mathencodings["tex-fraktur"] = {
--- [0x1D504] = 0x41, -- A (fraktur A)
--- [0x1D505] = 0x42, -- B
- [0x0212D] = 0x43, -- C
--- [0x1D507] = 0x44, -- D
--- [0x1D508] = 0x45, -- E
--- [0x1D509] = 0x46, -- F
--- [0x1D50A] = 0x47, -- G
- [0x0210C] = 0x48, -- H
- [0x02111] = 0x49, -- I
--- [0x1D50D] = 0x4A, -- J
--- [0x1D50E] = 0x4B, -- K
--- [0x1D50F] = 0x4C, -- L
--- [0x1D510] = 0x4D, -- M
--- [0x1D511] = 0x4E, -- N
--- [0x1D512] = 0x4F, -- O
--- [0x1D513] = 0x50, -- P
--- [0x1D514] = 0x51, -- Q
- [0x0211C] = 0x52, -- R
--- [0x1D516] = 0x53, -- S
--- [0x1D517] = 0x54, -- T
--- [0x1D518] = 0x55, -- U
--- [0x1D519] = 0x56, -- V
--- [0x1D51A] = 0x57, -- W
--- [0x1D51B] = 0x58, -- X
--- [0x1D51C] = 0x59, -- Y
- [0x02128] = 0x5A, -- Z (fraktur Z)
--- [0x1D51E] = 0x61, -- a (fraktur a)
--- [0x1D51F] = 0x62, -- b
--- [0x1D520] = 0x63, -- c
--- [0x1D521] = 0x64, -- d
--- [0x1D522] = 0x65, -- e
--- [0x1D523] = 0x66, -- f
--- [0x1D524] = 0x67, -- g
--- [0x1D525] = 0x68, -- h
--- [0x1D526] = 0x69, -- i
--- [0x1D527] = 0x6A, -- j
--- [0x1D528] = 0x6B, -- k
--- [0x1D529] = 0x6C, -- l
--- [0x1D52A] = 0x6D, -- m
--- [0x1D52B] = 0x6E, -- n
--- [0x1D52C] = 0x6F, -- o
--- [0x1D52D] = 0x70, -- p
--- [0x1D52E] = 0x71, -- q
--- [0x1D52F] = 0x72, -- r
--- [0x1D530] = 0x73, -- s
--- [0x1D531] = 0x74, -- t
--- [0x1D532] = 0x75, -- u
--- [0x1D533] = 0x76, -- v
--- [0x1D534] = 0x77, -- w
--- [0x1D535] = 0x78, -- x
--- [0x1D536] = 0x79, -- y
--- [0x1D537] = 0x7A, -- z
-}
-
--- now that all other vectors are defined ...
-
-vfmath.setletters(mathencodings, "tex-it", 0x1D434, 0x1D44E)
-vfmath.setletters(mathencodings, "tex-ss", 0x1D5A0, 0x1D5BA)
-vfmath.setletters(mathencodings, "tex-tt", 0x1D670, 0x1D68A)
-vfmath.setletters(mathencodings, "tex-bf", 0x1D400, 0x1D41A)
-vfmath.setletters(mathencodings, "tex-bi", 0x1D468, 0x1D482)
-vfmath.setletters(mathencodings, "tex-fraktur", 0x1D504, 0x1D51E)
-vfmath.setletters(mathencodings, "tex-fraktur-bold", 0x1D56C, 0x1D586)
-
-vfmath.setdigits (mathencodings, "tex-ss", 0x1D7E2)
-vfmath.setdigits (mathencodings, "tex-tt", 0x1D7F6)
-vfmath.setdigits (mathencodings, "tex-bf", 0x1D7CE)
-
--- vfmath.setdigits (mathencodings, "tex-bi", 0x1D7CE)
-
--- todo: add ss, tt, bf etc vectors
--- todo: we can make ss tt etc an option
diff --git a/Master/texmf-dist/tex/context/base/meta-fig.mkiv b/Master/texmf-dist/tex/context/base/meta-fig.mkiv
index bee88ffefa7..7fbc33be9e0 100644
--- a/Master/texmf-dist/tex/context/base/meta-fig.mkiv
+++ b/Master/texmf-dist/tex/context/base/meta-fig.mkiv
@@ -44,7 +44,8 @@
\definefittingpage
[MPpage]
[\c!align=,
- \c!command=\meta_process_graphic]
+ \c!command=\meta_process_graphic_instance{\fittingpageparameter\c!instance},
+ \c!instance=]
%D \macros
%D {MPfigure}
diff --git a/Master/texmf-dist/tex/context/base/meta-fun.lua b/Master/texmf-dist/tex/context/base/meta-fun.lua
index 7594d0c78d7..78ee25bafe4 100644
--- a/Master/texmf-dist/tex/context/base/meta-fun.lua
+++ b/Master/texmf-dist/tex/context/base/meta-fun.lua
@@ -8,7 +8,7 @@ if not modules then modules = { } end modules ['meta-fun'] = {
-- very experimental, actually a joke ... see metafun manual for usage
-local format, loadstring, type = string.format, loadstring, type
+local format, load, type = string.format, load, type
local metapost = metapost
@@ -39,7 +39,7 @@ function metafun.interpolate(f,b,e,s,c)
local done = false
context("(")
for i=b,e,(e-b)/s do
- local d = loadstring(format("return function(x) return %s end",f))
+ local d = load(format("return function(x) return %s end",f))
if d then
d = d()
if done then
diff --git a/Master/texmf-dist/tex/context/base/meta-grd.mkiv b/Master/texmf-dist/tex/context/base/meta-grd.mkiv
new file mode 100644
index 00000000000..da410ba689d
--- /dev/null
+++ b/Master/texmf-dist/tex/context/base/meta-grd.mkiv
@@ -0,0 +1,116 @@
+%D \module
+%D [ file=meta-grd,
+%D version=2012.06.28,
+%D title=\METAPOST\ Graphics,
+%D subtitle=grids,
+%D author=Hans Hagen,
+%D date=\currentdate,
+%D copyright={PRAGMA ADE \& \CONTEXT\ Development Team}]
+%C
+%C This module is part of the \CONTEXT\ macro||package and is
+%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
+%C details.
+
+\writestatus{loading}{MetaPost Graphics / Grids}
+
+%D This used to be a \TEX\ method, and a rather old one too. We keep it around but
+%D in a more modern way.
+%D
+%D \startbuffer
+%D \basegrid
+%D [nx=8,ny=5,
+%D dx=.5,dy=.25,
+%D unit=cm,scale=2,factor=1,
+%D offset=1ex,xstep=2,ystep=1,
+%D align=middle,style=\tt\tx]
+%D \stopbuffer
+%D
+%D \typebuffer
+%D
+%D \placefigure
+%D {An example of a grid.}
+%D {\getbuffer}
+
+\startuseMPgraphic{grid}{nx,ny,dx,dy,factor,scale,xstep,ystep,xoffset,yoffset,align}
+ begingroup ;
+
+ save nx, ny, dx, dy, wd, ht, xstep, ystep, xoffset, yoffset, align, xalign, yalign ;
+
+ numeric nx ; nx := \MPvar{nx} ;
+ numeric ny ; ny := \MPvar{ny} ;
+ numeric dx ; dx := \MPvar{factor} * \MPvar{scale} * \MPvar{dx} ;
+ numeric dy ; dy := \MPvar{factor} * \MPvar{scale} * \MPvar{dy} ;
+ numeric wd ; wd := nx * dx ;
+ numeric ht ; ht := ny * dy ;
+ numeric xstep ; xstep := \MPvar{xstep} ;
+ numeric ystep ; ystep := \MPvar{ystep} ;
+ numeric xoffset ; xoffset := \MPvar{xoffset} ;
+ numeric yoffset ; yoffset := \MPvar{yoffset} ;
+ numeric align ; align := \MPvar{align} ;
+ numeric xalign ; xalign := 0 ;
+ numeric yalign ; yalign := 0 ;
+
+ if align = 1 :
+ xalign := dx/2 ;
+ yalign := dy/2 ;
+ fi ;
+
+ for i=0 step dx until wd :
+ draw (i,0) -- (i,ht) ;
+ endfor ;
+ for i=0 step dy until ht :
+ draw (0,i) -- (wd,i) ;
+ endfor ;
+
+ if xstep > 0 :
+ for i=1 step xstep until nx :
+ draw thetextext.bot(decimal i,(i*dx-xalign,-xoffset)) ;
+ endfor ;
+ fi ;
+ if ystep > 0 :
+ for i=1 step ystep until ny :
+ draw thetextext.lft(decimal i,(-yoffset,i*dy-yalign)) ;
+ endfor ;
+ fi ;
+
+ endgroup ;
+\stopuseMPgraphic
+
+\unprotect
+
+\unexpanded\def\basegrid
+ {\dosingleempty\typo_grid_base}
+
+\def\typo_grid_base[#1]%
+ {\hbox\bgroup
+ \getdummyparameters
+ [\c!nx=10,\c!ny=10,\c!dx=.5,\c!dy=.5,\c!xstep=0,\c!ystep=0,
+ \c!unit=\s!cm,\c!scale=1,\c!factor=1,
+ \c!offset=.25ex,\c!xoffset=\directdummyparameter\c!offset,\c!yoffset=\directdummyparameter\c!offset,
+ \c!align=,
+ #1]%
+ \usedummystyleandcolor\c!style\c!color
+ \edef\p_align{\directdummyparameter\c!align}%
+ \ifx\p_align\v!middle
+ \let\p_align\!!plusone
+ \else
+ \let\p_align\!!zerocount
+ \fi
+ \useMPgraphic
+ {grid}%
+ {nx=\directdummyparameter\c!nx,%
+ ny=\directdummyparameter\c!ny,%
+ dx=\directdummyparameter\c!dx\directdummyparameter\c!unit,%
+ dy=\directdummyparameter\c!dy\directdummyparameter\c!unit,%
+ factor=\directdummyparameter\c!factor,%
+ scale=\directdummyparameter\c!scale,%
+ xstep=\directdummyparameter\c!xstep,%
+ ystep=\directdummyparameter\c!ystep,%
+ xoffset=\directdummyparameter\c!xoffset,%
+ yoffset=\directdummyparameter\c!yoffset,%
+ align=\p_align}%
+ \egroup}
+
+\let\grid\basegrid
+
+\protect \endinput
diff --git a/Master/texmf-dist/tex/context/base/meta-imp-dum.mkiv b/Master/texmf-dist/tex/context/base/meta-imp-dum.mkiv
index a622d94f4d3..1daff57ac8c 100644
--- a/Master/texmf-dist/tex/context/base/meta-imp-dum.mkiv
+++ b/Master/texmf-dist/tex/context/base/meta-imp-dum.mkiv
@@ -59,7 +59,7 @@
% clip currentpicture to p ;
% \stopuseMPgraphic
-\startuseMPgraphic{placeholder}{width,height,reduction,color}
+\startuseMPgraphic{figure:placeholder}{width,height,reduction,color}
numeric w, h, d, r ; path p ;
if cmykcolor \MPvar{color} :
cmykcolor c, b ; b := (0,0,0,0)
@@ -82,66 +82,80 @@
clip currentpicture to p ;
\stopuseMPgraphic
+\defineoverlay
+ [figure:placeholder:graphic]
+ [\useMPgraphic
+ {figure:placeholder}%
+ {width=\figurewidth,%
+ height=\figureheight,%
+ reduction=\externalfigureparameter\c!reduction,%
+ color=placeholder:\the\c_grph_replacement_n}]
+
\definepalet
[placeholder]
[1=red,2=green,3=blue,4=cyan,5=magenta,6=yellow]
-% \newcounter \figurereplacementcycle
-
-\let\figurereplacementcycle\relax
+\newcount\c_grph_replacement_n
\setupexternalfigures
[\c!reduction=0,
\c!text=\v!yes]
-\let\normalexternalfigurereplacement\externalfigurereplacement
+\let\grph_include_replacement_saved\grph_include_replacement
-\unexpanded\def\externalfigurereplacement#1#2#3%
- {\getpaletsize[placeholder]%
- \ifx\figurereplacementcycle\relax
- \getrandomnumber \figurereplacementcycle \!!plusone \paletsize
- \globallet \figurereplacementcycle \figurereplacementcycle
+\unexpanded\def\grph_include_replacement#1#2#3%
+ {\begingroup
+ \getpaletsize[placeholder]%
+ \ifnum\c_grph_replacement_n=\zerocount
+ \getrandomnumber \m_grph_replacement_n\plusone\paletsize
+ \global\c_grph_replacement_n \m_grph_replacement_n\relax
\else
- \doglobal\increment\figurereplacementcycle
+ \global\advance\c_grph_replacement_n\plusone
\fi
- \ifnum\figurereplacementcycle>\paletsize
- \globallet\figurereplacementcycle\!!plusone
+ \ifnum\c_grph_replacement_n>\paletsize
+ \global\c_grph_replacement_n\plusone
\fi
- \defineoverlay
- [\s!dummy]
- [\useMPgraphic
- {placeholder}%
- {width=\figurewidth,
- height=\figureheight,
- reduction=\@@efreduction,
- color=placeholder:\figurereplacementcycle}]%
- \expanded{\localframed
- [\??ef]
+ \setupcurrentexternalfigure
[\c!width=\figurewidth,
\c!height=\figureheight,
\c!frame=\v!off,
\c!strut=\v!no,
- \c!background=\s!dummy,
- \c!foregroundcolor=\s!white]}%
- {\doif\@@eftext\v!yes
- {\infofont \setupinterlinespace \dohyphens % \nohyphens
- \edef\tempa{#1}\ifx\tempa\s!dummy\let\tempa\empty\fi
- \edef\tempb{#2}\ifx\tempb\s!dummy\let\tempb\empty\fi
- \edef\tempc{#3}\ifx\tempc\s!dummy\let\tempc\empty\fi
- \ifx\tempa\empty\else
- name: \expanded{\verbatimstring{#1}}\strut\endgraf
- \fi
- \ifx\tempb\empty\else
- \ifx\tempa\empty\ifx\tempc\empty\else file: \fi\else file: \fi
- \expanded{\verbatimstring{#2}}\strut\endgraf
- \fi
- \ifx\tempc\empty\else
- state: \expanded{\verbatimstring{#3}}\strut\endgraf
- \fi}}}
+ \c!align={\v!middle,\v!lohi},
+ \c!background=figure:placeholder:graphic,
+ \c!foregroundcolor=\s!white]%
+ \doifelse{\externalfigureparameter\c!text}\v!yes
+ {\edef\m_graphics_text_a{#1}\edef\m_graphics_text_a{\ifx\m_graphics_text_a\s!dummy\else\detokenize\expandafter{\m_graphics_text_a}\fi}%
+ \edef\m_graphics_text_b{#2}\edef\m_graphics_text_b{\ifx\m_graphics_text_b\s!dummy\else\detokenize\expandafter{\m_graphics_text_b}\fi}%
+ \edef\m_graphics_text_c{#3}\edef\m_graphics_text_c{\ifx\m_graphics_text_c\s!dummy\else\detokenize\expandafter{\m_graphics_text_c}\fi}%
+ \infofont\setupinterlinespace\dohyphens
+ \inheritedexternalfigureframed{\directsetup{figure:placeholder:text}}}%
+ {\inheritedexternalfigureframed{}}%
+ \endgroup}
\unexpanded\def\dummyfigure
{\externalfigure[placeholder]}
+\startsetups figure:placeholder:text
+ \ifx\m_graphics_text_a\empty\else
+ \strut name: \m_graphics_text_a\par
+ \fi
+ \ifx\m_graphics_text_b\empty\else
+ \strut
+ \ifx\m_graphics_text_a\empty
+ \ifx\m_graphics_text_c\empty
+ \else
+ file:\space
+ \fi
+ \else
+ file:\space
+ \fi
+ \m_graphics_text_b\par
+ \fi
+ \ifx\m_graphics_text_c\empty\else
+ \strut state: \m_graphics_text_c\par
+ \fi
+\stopsetups
+
%D \starttyping
%D \externalfigure[mediashow.swf][comment={Alas, we have no nice preview},background=figure:comment]
%D \externalfigure[mediashow.swf][comment={Alas, we have no nice preview},background=figure:dummy]
@@ -151,7 +165,7 @@
\defineframed
[figurecomment]
[\c!background=\v!color,
- \c!backgroundcolor=\v!gray,
+ \c!backgroundcolor=\s!gray,
\c!frame=\v!off,
\c!foregroundstyle=\ttbf,
\c!align={\v!middle,\v!lohi},
@@ -160,8 +174,8 @@
\defineframed
[figuredummy]
- [\c!background=figure:placeholder,
- \c!foregroundcolor=white,
+ [\c!background=figure:dummy:only,
+ \c!foregroundcolor=\s!white,
\c!backgroundcolor=\v!gray,
\c!frame=\v!off,
\c!foregroundstyle=\ttbf,
@@ -170,18 +184,30 @@
\c!width=\figurewidth]
\defineoverlay
- [figure:placeholder]
- [\externalfigurereplacement{}{}{}]
+ [figure:dummy:only]
+ [\grph_include_replacement\empty\empty\empty]
\defineoverlay
[figure:comment]
- [\figurecomment{\@@efcomment}]
+ [\figurecomment{\externalfigureparameter\c!comment}]
\defineoverlay
[figure:dummy]
- [\figuredummy{\@@efcomment}]
+ [\figuredummy{\externalfigureparameter\c!comment}]
\setupexternalfigures
- [comment=]
+ [\c!comment=]
+
+\protect
+
+\continueifinputfile{meta-imp-dum.mkiv}
+
+\starttext
+
+ \externalfigure[whatever-missing]
+
+ \blank
+
+ \externalfigure[whatever-missing][width=2cm]
-\protect \endinput
+\stoptext
diff --git a/Master/texmf-dist/tex/context/base/meta-imp-txt.mkiv b/Master/texmf-dist/tex/context/base/meta-imp-txt.mkiv
index 08373c0c4e9..3a9ad5927f8 100644
--- a/Master/texmf-dist/tex/context/base/meta-imp-txt.mkiv
+++ b/Master/texmf-dist/tex/context/base/meta-imp-txt.mkiv
@@ -28,6 +28,8 @@
\unprotect
+\definesystemvariable {sh} % ShapedText .. todo: commandhandler
+
\startMPextensions
if unknown context_text: input "mp-text.mpiv" ; fi;
\stopMPextensions
@@ -36,8 +38,8 @@
% \def\newchar#1{\chardef#1=0 }
-\ifdefined\MPtoks \else \newtoks\MPtoks \fi
-\ifdefined\MPbox \else \newbox \MPbox \fi
+\ifdefined\MPtoks \else \newtoks\MPtoks \fi
+\ifdefined\MPbox \else \newbox \MPbox \fi
\ifdefined\parwidth \else \newdimen\parwidth \fi
\ifdefined\parheight \else \newdimen\parheight \fi
@@ -49,7 +51,7 @@
\newif \ifparseries
\ifdefined\parfirst \else \chardef \parfirst=0 \fi
-\def\startshapetext[#1]%
+\unexpanded\def\startshapetext[#1]%
{\global\newcounter\currentshapetext
\global\setbox\shapetextbox\vbox\bgroup
\expanded{\switchtobodyfont[\@@shbodyfont]}%
@@ -76,14 +78,14 @@
\ifparseries\def\par{\endgraf\adaptparshape}\fi
\EveryPar{\begstrut}}
-\def\stopshapetext
+\unexpanded\def\stopshapetext
{\endstrut
%\removebottomthings
\egroup
\global\newcounter\currentshapetext
\getshapecharacteristics}
-\def\adaptparshape%
+\unexpanded\def\adaptparshape
{\def\docommand##1%
{\ifcase\!!counta
\expandafter\appendtoks\space##1 \to\!!toksa
@@ -100,7 +102,7 @@
\expanded{\parshape\totalparlines\the\partoks}%
}%\fi}
-\def\getshapecharacteristics%
+\unexpanded\def\getshapecharacteristics
{\doglobal\increment\currentshapetext
\doifdefinedelse{parlines:\currentshapetext}
{\global\parlines \getvalue{parlines:\currentshapetext}%
@@ -116,7 +118,7 @@
\global\parwidth \hsize
\global\parheight \vsize}}
-\def\setshapecharacteristics%
+\unexpanded\def\setshapecharacteristics
{\doglobal\increment\currentshapetext
\setxvalue{parlines:\currentshapetext }{\the\parlines}%
\setxvalue{parfirst:\currentshapetext }{\the\parfirst}%
@@ -125,7 +127,7 @@
\setxvalue{parwidth:\currentshapetext }{\the\parwidth}%
\setxvalue{parheight:\currentshapetext }{\the\parheight}}
-\def\getshapetext% option: unvbox
+\unexpanded\def\getshapetext % option: unvbox
{\vbox\bgroup
\forgetall
\setbox\scratchbox\vbox to \parheight
@@ -142,17 +144,17 @@
\getshapecharacteristics
\egroup}
-\def\setupshapetexts%
+\unexpanded\def\setupshapetexts
{\dodoubleempty\getparameters[\??sh]}
-\setupshapetexts%
+\setupshapetexts
[\c!bodyfont=]
%%%%%%% rotfont nog definieren
\doifundefined{RotFont}{\definefont[RotFont][RegularBold]}
-\def\processfollowingtoken#1% strut toegevoegd
+\unexpanded\def\processfollowingtoken#1% strut toegevoegd
{\appendtoks#1\to\MPtoks
\setbox\MPbox=\hbox{\RotFont\setstrut\strut\the\MPtoks}%
\startMPdrawing
@@ -168,7 +170,7 @@
% we default to nothing
\stopuseMPgraphic
-\def\followtokens#1%
+\unexpanded\def\followtokens#1%
{\vbox\bgroup
\forgetall
\dontcomplain
@@ -280,7 +282,7 @@
fill z1..z2..z3 & z3..z4..z1 & cycle withcolor \MPvar{color} ;
\stopuniqueMPgraphic
-\def\EnglishRule%
+\unexpanded\def\EnglishRule
{\startlinecorrection[EnglishRule]
\setlocalhsize \noindent \reuseMPgraphic{EnglishRule}
\stoplinecorrection}
@@ -294,7 +296,7 @@
%D \TightText{\ss\bf 123}{0cm}{3cm}{red}
%D \stoplinecorrection
-\def\TightText#1#2#3#4%
+\unexpanded\def\TightText#1#2#3#4%
{\hbox % \ruledhbox
{\startMPcode
picture p ; p := image (graphictext "#1" withfillcolor red) ;
diff --git a/Master/texmf-dist/tex/context/base/meta-ini.lua b/Master/texmf-dist/tex/context/base/meta-ini.lua
index bac1429ae1d..713ba3d5d01 100644
--- a/Master/texmf-dist/tex/context/base/meta-ini.lua
+++ b/Master/texmf-dist/tex/context/base/meta-ini.lua
@@ -7,7 +7,11 @@ if not modules then modules = { } end modules ['meta-ini'] = {
}
local tonumber = tonumber
-local format, gmatch, match, gsub = string.format, string.gmatch, string.match, string.gsub
+local format = string.format
+local lpegmatch, lpegpatterns = lpeg.match, lpeg.patterns
+local P, Cs, R, S, C, Cc = lpeg.P, lpeg.Cs, lpeg.R, lpeg.S, lpeg.C, lpeg.Cc
+
+local context = context
metapost = metapost or { }
@@ -19,14 +23,14 @@ local status_metapost = logs.messenger("metapost")
local patterns = { "meta-imp-%s.mkiv", "meta-imp-%s.tex", "meta-%s.mkiv", "meta-%s.tex" } -- we are compatible
local function action(name,foundname)
- status_metapost("loaded: library '%s'",name)
+ status_metapost("library %a is loaded",name)
context.startreadingfile()
context.input(foundname)
context.stopreadingfile()
end
local function failure(name)
- report_metapost("unknown: library '%s'",name)
+ report_metapost("library %a is unknown or invalid",name)
end
function commands.useMPlibrary(name)
@@ -43,46 +47,94 @@ end
local colorhash = attributes.list[attributes.private('color')]
-local validdimen = lpeg.patterns.validdimen * lpeg.P(-1)
-
-local lpegmatch = lpeg.match
local textype = tex.type
local MPcolor = context.MPcolor
+-- local validdimen = lpegpatterns.validdimen * P(-1)
+--
+-- function commands.prepareMPvariable(v) -- slow but ok
+-- if v == "" then
+-- MPcolor("black")
+-- else
+-- local typ, var = match(v,"(.):(.*)")
+-- if not typ then
+-- -- parse
+-- if colorhash[v] then
+-- MPcolor(v)
+-- elseif tonumber(v) then
+-- context(v)
+-- elseif lpegmatch(validdimen,v) then
+-- return context("\\the\\dimexpr %s",v)
+-- else
+-- for s in gmatch(v,"\\([a-zA-Z]+)") do -- can have trailing space
+-- local t = textype(s)
+-- if t == "dimen" then
+-- return context("\\the\\dimexpr %s",v)
+-- elseif t == "count" then
+-- return context("\\the\\numexpr %s",v)
+-- end
+-- end
+-- context("\\number %s",v) -- 0.4 ...
+-- end
+-- elseif typ == "d" then -- to be documented
+-- -- dimension
+-- context("\\the\\dimexpr %s",var)
+-- elseif typ == "n" then -- to be documented
+-- -- number
+-- context("\\the\\numexpr %s",var)
+-- elseif typ == "s" then -- to be documented
+-- -- string
+-- context(var)
+-- elseif typ == "c" then -- to be documented
+-- -- color
+-- MPcolor(var)
+-- else
+-- context(var)
+-- end
+-- end
+-- end
+
+-- we can actually get the dimen/count values here
+
+local dimenorname =
+ lpegpatterns.validdimen / function(s)
+ context("\\the\\dimexpr %s",s)
+ end
+ + (C(lpegpatterns.float) + Cc(1)) * lpegpatterns.space^0 * P("\\") * C(lpegpatterns.letter^1) / function(f,s)
+ local t = textype(s)
+ if t == "dimen" then
+ context("\\the\\dimexpr %s\\%s",f,s)
+ elseif t == "count" then
+ context("\\the\\numexpr \\%s * %s\\relax",s,f) -- <n>\scratchcounter is not permitted
+ end
+ end
+
+local splitter = lpeg.splitat(":",true)
+
function commands.prepareMPvariable(v) -- slow but ok
if v == "" then
MPcolor("black")
else
- local typ, var = match(v,"(.):(.*)")
- if not typ then
+ local typ, var = lpegmatch(splitter,v)
+ if not var then
-- parse
if colorhash[v] then
MPcolor(v)
elseif tonumber(v) then
context(v)
- elseif lpegmatch(validdimen,v) then
- return context("\\the\\dimexpr %s",v)
- else
- for s in gmatch(v,"\\(.-)") do
- local t = textype(s)
- if t == "dimen" then
- return context("\\the\\dimexpr %s",v)
- elseif t == "count" then
- return context("\\the\\numexpr %s",v)
- end
- end
- return context("\\number %s",v) -- 0.4 ...
+ elseif not lpegmatch(dimenorname,v) then
+ context("\\number %s",v) -- 0.4 ...
end
- elseif typ == "d" then
+ elseif typ == "d" then -- to be documented
-- dimension
context("\\the\\dimexpr %s",var)
- elseif typ == "n" then
+ elseif typ == "n" then -- to be documented
-- number
context("\\the\\numexpr %s",var)
- elseif typ == "s" then
+ elseif typ == "s" then -- to be documented
-- string
context(var)
- elseif typ == "c" then
+ elseif typ == "c" then -- to be documented
-- color
MPcolor(var)
else
@@ -91,12 +143,23 @@ function commands.prepareMPvariable(v) -- slow but ok
end
end
-function metapost.formatnumber(f,n) -- just lua format
- f = gsub(f,"@(%d)","%%.%1")
- f = gsub(f,"@","%%")
- f = format(f,tonumber(n) or 0)
- f = gsub(f,"e([%+%-%d]+)",function(s)
- return format("\\times10^{%s}",tonumber(s) or s) -- strips leading zeros
- end)
- context.mathematics(f)
+-- function metapost.formatnumber(f,n) -- just lua format
+-- f = gsub(f,"@(%d)","%%.%1")
+-- f = gsub(f,"@","%%")
+-- f = format(f,tonumber(n) or 0)
+-- f = gsub(f,"e([%+%-%d]+)",function(s)
+-- return format("\\times10^{%s}",tonumber(s) or s) -- strips leading zeros
+-- end)
+-- context.mathematics(f)
+-- end
+
+-- formatters["\\times10^{%N}"](s) -- strips leading zeros too
+
+local one = Cs((P("@")/"%%." * (R("09")^1) + P("@")/"%%" + 1)^0)
+local two = Cs((P("e")/"" * ((S("+-")^0 * R("09")^1) / function(s) return format("\\times10^{%s}",tonumber(s) or s) end) + 1)^1)
+
+-- local two = Cs((P("e")/"" * ((S("+-")^0 * R("09")^1) / formatters["\\times10^{%N}"]) + 1)^1)
+
+function metapost.formatnumber(fmt,n) -- just lua format
+ context.mathematics(lpegmatch(two,format(lpegmatch(one,fmt),n)))
end
diff --git a/Master/texmf-dist/tex/context/base/meta-ini.mkiv b/Master/texmf-dist/tex/context/base/meta-ini.mkiv
index a62b81ccf3c..00b1da032ab 100644
--- a/Master/texmf-dist/tex/context/base/meta-ini.mkiv
+++ b/Master/texmf-dist/tex/context/base/meta-ini.mkiv
@@ -4,23 +4,64 @@
%D title=\METAPOST\ Graphics,
%D subtitle=Initialization,
%D author=Hans Hagen,
-%D date=\currentdate,
+%D date=\ currentdate,
%D copyright={PRAGMA ADE \& \CONTEXT\ Development Team}]
%C
%C This module is part of the \CONTEXT\ macro||package and is
%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
%C details.
+% initializations:
+%
+% - pass settings from tex to mp (delayed expansion)
+% - used by context core (and modules)
+% - cummulative definitions
+% - flushed each graphic
+% - can be disabled per instance
+% - managed at the tex end
+%
+% extensions:
+%
+% - add mp functionality (immediate expansion)
+% - cummulative
+% - all instances or subset of instances
+% - can be disabled per instance
+% - managed at the lua/mp end
+% - could be managed at the tex end but no real reason and also messy
+%
+% definitions:
+%
+% - add mp functionality (delayed expansion)
+% - cummulative
+% - per instance
+% - managed at the tex end
+%
+% inclusions:
+%
+% - add mp functionality (delayed expansion)
+% - cummulative only when [+]
+% - per instance
+% - managed at the tex end
+%
+% order of execution:
+%
+% definitions
+% extensions
+% inclusions
+% beginfig
+% initializations
+% graphic
+% endfig
+
+% The instance will be implemented stepwise ... I should redo some code in order to
+% make the macros look better than they do now.
+
\writestatus{loading}{MetaPost Graphics / Initializations}
\registerctxluafile{meta-ini}{1.001}
\unprotect
-\newtoks \t_meta_extensions % mp, once
-\newtoks \t_meta_initializations % tex, each
-\newtoks \t_meta_userinclusions % mp, user
-
\newtoks \everyMPgraphic % mp % public or not ?
\appendtoks
@@ -33,11 +74,16 @@
%installcorenamespace{graphicvariable} % todo
\installcorenamespace{mpinstance}
-\installcorenamespace{mpinstancetokens}
+\installcorenamespace{mpinclusions}
+\installcorenamespace{mpdefinitions}
\installcorenamespace{mpgraphic}
\installcorenamespace{mpstaticgraphic}
\installcorenamespace{mpclip}
+\newtoks \t_meta_initializations % tex, each
+\def \t_meta_inclusions {\csname\??mpinclusions \currentMPinstance\endcsname} % token register
+\def \t_meta_definitions {\csname\??mpdefinitions\currentMPinstance\endcsname} % token register
+
% The next command is, of course, dedicated to Mojca, who
% needs it for gnuplot. Anyway, the whole multiple engine
% mechanism is to keep her gnuplot from interfering.
@@ -46,20 +92,25 @@
{\dosinglegroupempty\meta_start_definitions}
\def\meta_start_definitions#1#2\stopMPdefinitions
- {\edef\currentMPinstance{#1}%
+ {\let\m_meta_saved_instance\currentMPinstance
+ \edef\currentMPinstance{#1}%
\ifx\currentMPinstance\empty
\let\currentMPinstance\defaultMPinstance
\fi
- \global\t_meta_instance\expandafter{\the\t_meta_instance#2}}
+ \global\t_meta_definitions\expandafter{\the\t_meta_definitions#2}%
+ \let\currentMPinstance\m_meta_saved_instance}
\let\stopMPdefinitions\relax
-\unexpanded\def\startMPextensions#1\stopMPextensions
- {\global\t_meta_extensions\expandafter{\the\t_meta_extensions#1}}
+\unexpanded\def\startMPextensions
+ {\dosinglegroupempty\meta_start_extensions}
+
+\def\meta_start_extensions#1#2\stopMPextensions % we could use buffers instead
+ {\ctxlua{metapost.setextensions("#1",\!!bs#2\!!es)}}
\let\stopMPextensions\relax
-\unexpanded\def\startMPinitializations#1\stopMPinitializations
+\unexpanded\def\startMPinitializations#1\stopMPinitializations % for all instances, when enables
{\global\t_meta_initializations\expandafter{\the\t_meta_initializations#1}}
\let\stopMPinitializations\relax
@@ -67,24 +118,42 @@
\unexpanded\def\startMPinclusions
{\dosingleempty\meta_start_inclusions}
-\def\meta_start_inclusions[#1]#2\stopMPinclusions
+\unexpanded\def\meta_start_inclusions[#1]%
{\edef\m_meta_option{#1}%
+ \dosinglegroupempty\meta_start_inclusions_indeed}
+
+\def\meta_start_inclusions_indeed#1#2\stopMPinclusions
+ {\let\m_meta_saved_instance\currentMPinstance
+ \edef\currentMPinstance{#1}%
+ \ifx\currentMPinstance\empty
+ \let\currentMPinstance\defaultMPinstance
+ \fi
\ifx\m_meta_option\!!plustoken \else
- \global\t_meta_userinclusions\emptytoks
+ \global\t_meta_inclusions\emptytoks
\fi
- \global\t_meta_userinclusions\expandafter{\the\t_meta_userinclusions#2}}
+ \global\t_meta_inclusions\expandafter{\the\t_meta_inclusions#2}%
+ \let\currentMPinstance\m_meta_saved_instance}
\let\stopMPinclusions\relax
\unexpanded\def\MPinclusions
{\dosingleempty\meta_inclusions}
-\def\meta_inclusions[#1]#2%
+\def\meta_inclusions[#1]%
{\edef\m_meta_option{#1}%
+ \dosinglegroupempty\meta_inclusions_indeed}
+
+\def\meta_inclusions_indeed#1#2%
+ {\let\m_meta_saved_instance\currentMPinstance
+ \edef\currentMPinstance{#1}%
+ \ifx\currentMPinstance\empty
+ \let\currentMPinstance\defaultMPinstance
+ \fi
\ifx\m_meta_option\!!plustoken \else
- \global\t_meta_userinclusions\emptytoks
+ \global\t_meta_inclusions\emptytoks
\fi
- \global\t_meta_userinclusions\expandafter{\the\t_meta_userinclusions#2}}
+ \global\t_meta_inclusions\expandafter{\the\t_meta_inclusions#2}%
+ \let\currentMPinstance\m_meta_saved_instance}
\def\meta_preset_definitions
{\edef\overlaywidth {\overlaywidth \space}%
@@ -93,58 +162,57 @@
\edef\currentwidth {\the\hsize \space}%
\edef\currentheight {\the\vsize \space}}
-\def\t_meta_instance{\csname\??mpinstancetokens\currentMPinstance\endcsname} % token register
-
\installcommandhandler \??mpinstance {MPinstance} \??mpinstance
\setupMPinstance
[\s!format=metafun,
\s!extensions=\v!no,
\s!initializations=\v!no,
+ \c!method=\s!default,
\c!textstyle=,
\c!textcolor=]
-% \unexpanded\def\defineMPinstance
-% {\dodoubleargument\meta_define_instance}
-
-% \def\meta_define_instance[#1][#2]%
-% {\ifcsname\??mpinstancetokens#1\endcsname\else\expandafter\newtoks\csname\??mpinstancetokens#1\endcsname\fi
-% \t_meta_instance\emptytoks % in case we redefine
-% \getparameters[\??mpinstance#1][\s!format=mpost,\s!extensions=\v!no,\s!initializations=\v!no,#2]}
-
\appendtoks
- \ifcsname\??mpinstancetokens\currentMPinstance\endcsname \else
- \expandafter\newtoks\csname\??mpinstancetokens\currentMPinstance\endcsname
+ \ifcsname\??mpdefinitions\currentMPinstance\endcsname \else
+ \expandafter\newtoks\csname\??mpdefinitions\currentMPinstance\endcsname
+ \fi
+ \ifcsname\??mpinclusions\currentMPinstance\endcsname \else
+ \expandafter\newtoks\csname\??mpinclusions\currentMPinstance\endcsname
\fi
- \t_meta_instance\emptytoks % in case we redefine
+ \t_meta_definitions\emptytoks % in case we redefine
+ \t_meta_inclusions \emptytoks % in case we redefine
\to \everydefineMPinstance
\unexpanded\def\resetMPinstance[#1]%
{\writestatus\m!metapost{reset will be implemented when needed}}
\def\meta_analyze_graphicname[#1]%
- {\meta_analyze_graphicname_indeed[#1::::]}
+ %{\normalexpanded{\meta_analyze_graphicname_indeed[#1::::]}}
+ {\normalexpanded{\meta_analyze_graphicname_indeed[#1}::::]}
-\def\meta_analyze_graphicname_indeed[#1::#2::#3]% instance ::
+\unexpanded\def\meta_analyze_graphicname_indeed[#1::#2::#3]% instance ::
{\edef\currentMPgraphicname{#2}%
\ifx\currentMPgraphicname\empty
\edef\currentMPgraphicname{#1}%
\let\currentMPinstance\defaultMPinstance
- \else
+ \else\ifcsname\??mpdefinitions#1\endcsname
\edef\currentMPinstance{#1}%
- \fi
+ \else
+ \let\currentMPinstance\defaultMPinstance
+ \fi\fi
\edef\currentMPformat{\MPinstanceparameter\s!format}}
\def\currentMPinstance{\defaultMPinstance}
\def\currentMPformat {\currentMPinstance}
-\defineMPinstance[metafun] [\s!format=metafun,\s!extensions=\v!yes,\s!initializations=\v!yes]
-\defineMPinstance[extrafun][\s!format=metafun,\s!extensions=\v!yes,\s!initializations=\v!yes]
-\defineMPinstance[mprun] [\s!format=metafun,\s!extensions=\v!yes,\s!initializations=\v!yes]
-\defineMPinstance[metapost][\s!format=mpost]
-\defineMPinstance[nofun] [\s!format=mpost]
+\defineMPinstance[metafun] [\s!format=metafun,\s!extensions=\v!yes,\s!initializations=\v!yes]
+\defineMPinstance[extrafun] [\s!format=metafun,\s!extensions=\v!yes,\s!initializations=\v!yes]
+\defineMPinstance[doublefun] [\s!format=metafun,\s!extensions=\v!yes,\s!initializations=\v!yes,\c!method=\s!double]
+\defineMPinstance[decimalfun][\s!format=metafun,\s!extensions=\v!yes,\s!initializations=\v!yes,\c!method=\s!decimal]
+\defineMPinstance[mprun] [\s!format=metafun,\s!extensions=\v!yes,\s!initializations=\v!yes]
+\defineMPinstance[metapost] [\s!format=mpost]
+\defineMPinstance[nofun] [\s!format=mpost]
-\newconditional\c_meta_include_extensions
\newconditional\c_meta_include_initializations
\def\meta_begin_graphic_group#1%
@@ -157,10 +225,16 @@
\def\MPaskedfigure{false}
\def\meta_flush_current_initializations
- {\ifconditional\c_meta_include_initializations\the\t_meta_initializations;\fi\theMPrandomseed;}
+ {\ifconditional\c_meta_include_initializations
+ \the\t_meta_initializations
+ \fi
+ \theMPrandomseed;}
-\def\meta_flush_current_preamble
- {\ifconditional\c_meta_include_extensions\the\t_meta_extensions;\the\t_meta_userinclusions;\fi\the\t_meta_instance;}
+\def\meta_flush_current_inclusions
+ {\the\t_meta_inclusions}
+
+\def\meta_flush_current_definitions
+ {\the\t_meta_definitions}
\def\meta_start_current_graphic
{\begingroup
@@ -168,14 +242,6 @@
\the\everyMPgraphic
\meta_preset_definitions
\setMPrandomseed % this has to change
- % we need to preexpand the token lists
- \edef\p_extensions{\MPinstanceparameter\s!extensions}%
- \ifx\p_extensions\v!yes
- \settrue \c_meta_include_extensions
- \global\letMPinstanceparameter\s!extensions\v!no % needs checking
- \else
- \setfalse\c_meta_include_extensions
- \fi
\edef\p_initializations{\MPinstanceparameter\s!initializations}%
\ifx\p_initializations\v!yes
\settrue \c_meta_include_initializations
@@ -189,25 +255,62 @@
\useMPinstancestyleandcolor\c!textstyle\c!textcolor}
\def\meta_stop_current_graphic
- {\global\t_meta_instance\emptytoks
+ {\global\t_meta_definitions\emptytoks
+ \global\t_meta_inclusions\emptytoks
\endgroup}
+\def\meta_process_graphic_start
+ {\setbox\b_meta_graphic\hbox\bgroup}
+
+\def\meta_process_graphic_stop
+ {\egroup
+ \meta_place_graphic}
+
+\unexpanded\def\meta_process_graphic_instance#1%
+ {\edef\currentMPinstance{#1}%
+ \ifx\currentMPinstance\empty
+ \let\currentMPinstance\defaultMPinstance
+ \fi
+ \edef\currentMPformat{\MPinstanceparameter\s!format}%
+ \meta_process_graphic}
+
\unexpanded\def\meta_process_graphic#1% todo: extensions and inclusions outside beginfig
{\meta_start_current_graphic
\forgetall
- \setbox\b_meta_graphic\hbox\bgroup % ; added 20100901 (as in mkii)
- \normalexpanded{\noexpand\ctxlua{metapost.graphic(
- "\currentMPinstance",
- "\currentMPformat",
- \!!bs#1;\!!es,
- \!!bs\meta_flush_current_initializations;\!!es,
- \!!bs\meta_flush_current_preamble;\!!es,
- \MPaskedfigure
- )}}%
- \egroup
- \placeMPgraphic
+ \edef\p_extensions{\MPinstanceparameter\s!extensions}%
+ \meta_process_graphic_start
+ \normalexpanded{\noexpand\ctxlua{metapost.graphic {
+ instance = "\currentMPinstance",
+ format = "\currentMPformat",
+ data = \!!bs#1;\!!es,
+ initializations = \!!bs\meta_flush_current_initializations\!!es,
+% useextensions = "\MPinstanceparameter\s!extensions",
+\ifx\p_extensions\v!yes
+ extensions = \!!bs\ctxcommand{getmpextensions("\currentMPinstance")}\!!es,
+\fi
+ inclusions = \!!bs\meta_flush_current_inclusions\!!es,
+ definitions = \!!bs\meta_flush_current_definitions\!!es,
+ figure = "\MPaskedfigure",
+ method = "\MPinstanceparameter\c!method",
+ }}}%
+ \meta_process_graphic_stop
\meta_stop_current_graphic}
+\let\meta_process_graphic_figure_start\relax
+\let\meta_process_graphic_figure_stop \relax
+
+\unexpanded\def\processMPfigurefile#1% special case: obeys beginfig .. endfig and makes pages
+ {\begingroup
+ \let\normal_meta_process_graphic_start\meta_process_graphic_start
+ \let\normal_meta_process_graphic_stop \meta_process_graphic_stop
+ \let\meta_process_graphic_start\relax
+ \let\meta_process_graphic_stop \relax
+ \def\meta_process_graphic_figure_start{\startTEXpage\normal_meta_process_graphic_start}%
+ \def\meta_process_graphic_figure_stop {\normal_meta_process_graphic_stop\stopTEXpage}
+ \def\MPaskedfigure{all}%
+ \meta_process_graphic{input "#1" ;}%
+ \endgroup}
+
\newif\ifsetMPrandomseed \setMPrandomseedtrue % false by default
\def\setMPrandomseed
@@ -286,13 +389,15 @@
\edef\height{#3\space}\let\overlayheight\height
\ifcsname\??mpclip#1\endcsname
\meta_start_current_graphic
- \xdef\MPclippath{\normalexpanded{\noexpand\ctxlua{metapost.theclippath(
- "\currentMPinstance",
- "\currentMPformat",
- \!!bs\getvalue{\??mpclip#1}\!!es,
- \!!bs\meta_flush_current_initializations\!!es,
- \!!bs\meta_flush_current_preamble\!!es
- )}}}%
+ \xdef\MPclippath{\normalexpanded{\noexpand\ctxlua{metapost.theclippath {
+ instance = "\currentMPinstance",
+ format = "\currentMPformat",
+ data = \!!bs\getvalue{\??mpclip#1}\!!es,
+ initializations = \!!bs\meta_flush_current_initializations\!!es,
+ useextensions = "\MPinstanceparameter\s!extensions",
+ inclusions = \!!bs\meta_flush_current_inclusions\!!es,
+ method = "\MPinstanceparameter\c!method",
+ }}}}%
\meta_stop_current_graphic
\ifx\MPclippath\empty
\xdef\MPclippath{#4}%
@@ -344,15 +449,10 @@
%D \setupMPvariables[meta:button][size=20pt]
%D \stoptyping
-% \startlines
-% \def\xxx{\lineheight} \doprepareMPvariable{xxx} \xxx
-% \def\xxx{2pt} \doprepareMPvariable{xxx} \xxx
-% \def\xxx{2} \doprepareMPvariable{xxx} \xxx
-% \def\xxx{\scratchcounter} \doprepareMPvariable{xxx} \xxx
-% \def\xxx{red} \doprepareMPvariable{xxx} \xxx
-% \def\xxx{0.4} \doprepareMPvariable{xxx} \xxx
-% \stoplines
-
+% \lineheight 2pt 2 \scratchcounter red 0.4 .5\bodyfontsize
+%
+% see cont-loc for test code
+%
% currently the inheritance of backgrounds does not work and
% we might drop it anyway (too messy)
@@ -362,34 +462,36 @@
\let \m_meta_current_variable \empty
\let \m_meta_current_variable_template\empty
+\installcorenamespace{graphicvariable}
+
\def \meta_prepare_variable_default {\MPcolor{black}} % just to be sure we use a color but ...
-\edef\meta_unknown_variable_template {\??gv:\s!unknown}
+\edef\meta_unknown_variable_template {\??graphicvariable:\s!unknown}
-\letvalue{\??gv:\s!unknown}\empty
+\letvalue{\??graphicvariable:\s!unknown}\empty
\unexpanded\def\setupMPvariables
{\dodoubleempty\meta_setup_variables}
\def\meta_setup_variables[#1][#2]%
{\ifsecondargument
- \getrawparameters[\??gv#1:][#2]%
+ \getrawparameters[\??graphicvariable#1:][#2]%
\else
- \getrawparameters[\??gv:][#1]%
+ \getrawparameters[\??graphicvariable:][#1]%
\fi}
\unexpanded\def\presetMPvariable
{\dodoubleargument\meta_preset_variable}
\def\meta_preset_variable[#1][#2=#3]%
- {\ifcsname\??gv#1:#2\endcsname \else
- \setvalue{\??gv#1:#2}{#3}%
+ {\ifcsname\??graphicvariable#1:#2\endcsname \else
+ \setvalue{\??graphicvariable#1:#2}{#3}%
\fi}
\def\MPrawvar#1#2% no checking
- {\csname\??gv#1:#2\endcsname}
+ {\csname\??graphicvariable#1:#2\endcsname}
\def\MPvariable#1% todo: could be a framed chain
- {\csname\??gv\currentmpvariableclass:#1\endcsname}
+ {\csname\??graphicvariable\currentmpvariableclass:#1\endcsname}
\unexpanded\def\useMPvariables
{\dodoubleargument\meta_use_variables}
@@ -403,7 +505,7 @@
\unexpanded\def\meta_prepare_variable#1%
{\edef\m_meta_current_variable_template
- {\??gv\currentmpvariableclass:#1}%
+ {\??graphicvariable\currentmpvariableclass:#1}%
\edef\m_meta_current_variable
{\csname\ifcsname\m_meta_current_variable_template\endcsname
\m_meta_current_variable_template\else\meta_unknown_variable_template
@@ -414,6 +516,28 @@
\expandafter\meta_prepare_variable_yes
\fi}
+\unexpanded\def\meta_prepare_instance_variables
+ {\expandafter\processcommalist\expandafter[\m_meta_instance_variables]\meta_prepare_instance_variable}
+
+\unexpanded\def\meta_prepare_instance_variable#1%
+ {\edef\m_meta_current_variable_template
+ {\??graphicvariable\currentmpvariableclass:#1}%
+ \edef\m_meta_current_variable
+ {\csname
+ \ifcsname\m_meta_current_variable_template\endcsname
+ \m_meta_current_variable_template
+ \else\ifcsname\??graphicvariable\currentMPgraphicname:#1\endcsname
+ \??graphicvariable\currentMPgraphicname:#1%
+ \else
+ \meta_unknown_variable_template
+ \fi\fi
+ \endcsname}%
+ \ifx\m_meta_current_variable\empty
+ \expandafter\meta_prepare_variable_nop
+ \else
+ \expandafter\meta_prepare_variable_yes
+ \fi}
+
\def\meta_prepare_variable_nop
{\expandafter \let\csname\m_meta_current_variable_template\endcsname\meta_prepare_variable_default}
@@ -496,7 +620,7 @@
\unexpanded\def\meta_obey_box_origin
{\setbox\b_meta_graphic\hbox\bgroup
- \hskip\MPllx\raise\MPlly\box\b_meta_graphic
+ \kern\MPllx\raise\MPlly\box\b_meta_graphic
\egroup}
\unexpanded\def\obeyMPboxdepth {\let\meta_relocate_box\meta_obey_box_depth }
@@ -506,7 +630,7 @@
\let\meta_relocate_box\relax
-\unexpanded\def\placeMPgraphic % the converter also displaces so in fact we revert
+\unexpanded\def\meta_place_graphic % the converter also displaces so in fact we revert
{\meta_relocate_box
\box\b_meta_graphic}
@@ -530,7 +654,13 @@
\unexpanded\def\startuniqueMPgraphic
{\dodoublegroupempty\meta_start_unique_graphic}
-\def\meta_start_unique_graphic#1#2#3\stopuniqueMPgraphic
+% \def\meta_start_unique_graphic#1#2#3\stopuniqueMPgraphic
+% {\setgvalue{\??mpgraphic#1}{\meta_handle_unique_graphic{#1}{#2}{#3}}}
+
+\def\meta_start_unique_graphic#1%
+ {\normalexpanded{\meta_start_unique_graphic_indeed{#1}}}
+
+\unexpanded\def\meta_start_unique_graphic_indeed#1#2#3\stopuniqueMPgraphic
{\setgvalue{\??mpgraphic#1}{\meta_handle_unique_graphic{#1}{#2}{#3}}}
\let\stopuniqueMPgraphic\relax
@@ -540,14 +670,18 @@
\def\meta_unique_graphic#1#2%
{\meta_begin_graphic_group{#1}%
- \setupMPvariables[\currentMPgraphicname][#2]%
- \getvalue{\??mpgraphic\currentMPgraphicname}\empty
+% \setupMPvariables[\currentMPgraphicname][#2]%
+ \setupMPvariables[#1][#2]%
+ \getvalue{\??mpgraphic#1}\empty
\meta_end_graphic_group}
\def\meta_handle_use_graphic#1#2#3%
{\begingroup
\edef\currentmpvariableclass{#1}%
- \meta_prepare_variables{#2}%
+ \edef\m_meta_instance_variables{#2}%
+ \ifx\m_meta_instance_variables\empty \else
+ \meta_prepare_instance_variables
+ \fi
\meta_enable_include % redundant
\meta_process_graphic{#3}%
\endgroup}
@@ -555,7 +689,22 @@
\unexpanded\def\startuseMPgraphic
{\dodoublegroupempty\meta_start_use_graphic}
-\def\meta_start_use_graphic#1#2#3\stopuseMPgraphic
+% \def\meta_start_use_graphic#1#2#3\stopuseMPgraphic
+% {\setgvalue{\??mpgraphic#1}{\meta_handle_use_graphic{#1}{#2}{#3}}}
+%
+% better, expansion of #1:
+%
+% \def\meta_start_use_graphic#1#2#3\stopuseMPgraphic
+% %{\setgvalue{\??mpgraphic#1}{\meta_handle_use_graphic{#1}{#2}{#3}}}
+% %{\setxvalue{\??mpgraphic#1}{\noexpand\meta_handle_use_graphic{#1}{\normalunexpanded{#2}}{\normalunexpanded{#3}}}}
+% {\global\expandafter\gdef\csname\??mpgraphic#1\expandafter\endcsname\expandafter{\expandafter\meta_handle_use_graphic\expandafter{\normalexpanded{#1}}{#2}{#3}}}
+%
+% cleaner:
+
+\def\meta_start_use_graphic#1%
+ {\normalexpanded{\meta_start_use_graphic_indeed{#1}}}
+
+\unexpanded\def\meta_start_use_graphic_indeed#1#2#3\stopuseMPgraphic
{\setgvalue{\??mpgraphic#1}{\meta_handle_use_graphic{#1}{#2}{#3}}}
\let\stopuseMPgraphic\relax
@@ -563,7 +712,13 @@
\unexpanded\def\startusableMPgraphic % redundant but handy
{\dodoublegroupempty\meta_start_usable_graphic}
-\def\meta_start_usable_graphic#1#2#3\stopusableMPgraphic
+% \def\meta_start_usable_graphic#1#2#3\stopusableMPgraphic
+% {\setgvalue{\??mpgraphic#1}{\meta_handle_use_graphic{#1}{#2}{#3}}}
+
+\def\meta_start_usable_graphic#1%
+ {\normalexpanded{\meta_start_usable_graphic_indeed{#1}}}
+
+\unexpanded\def\meta_start_usable_graphic_indeed#1#2#3\stopusableMPgraphic
{\setgvalue{\??mpgraphic#1}{\meta_handle_use_graphic{#1}{#2}{#3}}}
\let\stopusableMPgraphic\relax
@@ -571,7 +726,10 @@
\def\meta_handle_reusable_graphic#1#2#3%
{\begingroup
\edef\currentmpvariableclass{#1}%
- \meta_prepare_variables{#2}%
+ \edef\m_meta_instance_variables{#2}%
+ \ifx\m_meta_instance_variables\empty \else
+ \meta_prepare_instance_variables
+ \fi
\meta_enable_include % redundant
\global\advance\c_meta_object_counter\plusone
\setobject{MP}{\number\c_meta_object_counter}\hbox{\meta_process_graphic{#3}}% was vbox, graphic must end up as hbox
@@ -582,7 +740,13 @@
\unexpanded\def\startreusableMPgraphic
{\dodoublegroupempty\meta_start_reusable_graphic}
-\def\meta_start_reusable_graphic#1#2#3\stopreusableMPgraphic
+% \def\meta_start_reusable_graphic#1#2#3\stopreusableMPgraphic
+% {\setgvalue{\??mpgraphic#1}{\meta_handle_reusable_graphic{#1}{#2}{#3}}}
+
+\def\meta_start_reusable_graphic#1%
+ {\normalexpanded{\meta_start_reusable_graphic_indeed{#1}}}
+
+\unexpanded\def\meta_start_reusable_graphic_indeed#1#2#3\stopreusableMPgraphic
{\setgvalue{\??mpgraphic#1}{\meta_handle_reusable_graphic{#1}{#2}{#3}}}
\let\stopreusableMPgraphic\relax
@@ -592,8 +756,9 @@
\def\meta_use_graphic#1#2%
{\meta_begin_graphic_group{#1}%
- \doifsomething{#2}{\setupMPvariables[\currentMPgraphicname][#2]}%
- \getvalue{\??mpgraphic\currentMPgraphicname}\empty
+% \doifsomething{#2}{\setupMPvariables[\currentMPgraphicname][#2]}%
+ \doifsomething{#2}{\setupMPvariables[#1][#2]}%
+ \getvalue{\??mpgraphic#1}\empty
\meta_end_graphic_group}
\let\reuseMPgraphic \useMPgraphic % we can save a setup here if needed
@@ -616,9 +781,16 @@
\unexpanded\def\startuniqueMPpagegraphic
{\dodoublegroupempty\meta_start_unique_page_graphic}
-\def\meta_start_unique_page_graphic#1#2#3\stopuniqueMPpagegraphic % inefficient, double storage
- {\setgvalue{\??mpgraphic o:#1}{\meta_handle_unique_graphic{o:#1}{#2}{#3}}% % but these also keep the state
- \setgvalue{\??mpgraphic e:#1}{\meta_handle_unique_graphic{e:#1}{#2}{#3}}} % and meaning will be redefined
+% \def\meta_start_unique_page_graphic#1#2#3\stopuniqueMPpagegraphic % inefficient, double storage
+% {\setgvalue{\??mpgraphic o:#1}{\meta_handle_unique_graphic{o:#1}{#2}{#3}}% % but these also keep the state
+% \setgvalue{\??mpgraphic e:#1}{\meta_handle_unique_graphic{e:#1}{#2}{#3}}} % and meaning will be redefined
+
+\def\meta_start_unique_page_graphic#1%
+ {\normalexpanded{\meta_start_unique_page_graphic_indeed{#1}}}
+
+\unexpanded\def\meta_start_unique_page_graphic_indeed#1#2#3\stopuniqueMPpagegraphic % inefficient, double storage
+ {\setgvalue{\??mpgraphic o:#1}{\meta_handle_unique_graphic{o:#1}{#2}{#3}}% % but these also keep the state
+ \setgvalue{\??mpgraphic e:#1}{\meta_handle_unique_graphic{e:#1}{#2}{#3}}} % and meaning will be redefined
\let\stopuniqueMPpagegraphic\relax
@@ -628,8 +800,9 @@
\def\meta_unique_page_graphic#1#2%
{\meta_begin_graphic_group{#1}%
\let\overlaystamp\overlaypagestamp
- \setupMPvariables[\m_meta_page_prefix:\currentMPgraphicname][#2]% prefix is new here
- \getvalue{\??mpgraphic\m_meta_page_prefix:\currentMPgraphicname}{}%
+% \setupMPvariables[\m_meta_page_prefix:\currentMPgraphicname][#2]% prefix is new here
+ \setupMPvariables[\m_meta_page_prefix:#1][#2]% prefix is new here
+ \getvalue{\??mpgraphic\m_meta_page_prefix:#1}{}%
\meta_end_graphic_group}
%D One way of defining a stamp is:
@@ -651,7 +824,7 @@
{\processcommalist[#1]\meta_extend_overlay_stamp}
\def\meta_extend_overlay_stamp#1%
- {\meta_prepare_variable{#1}%
+ {\meta_prepare_instance_variable{#1}%
\edef\overlaystamp{\overlaystamp:\MPvariable{#1}}}
%D \macros
@@ -722,9 +895,9 @@
\unexpanded\def\startMPenvironment
{\begingroup
\catcode\endoflineasciicode \ignorecatcode
- \dosingleempty\dostartMPenvironment}
+ \dosingleempty\meta_start_environment}
-\def\dostartMPenvironment[#1]#2\stopMPenvironment
+\def\meta_start_environment[#1]#2\stopMPenvironment
{\endgroup
\edef\m_meta_option{#1}
\ifx\m_meta_option\s!reset
@@ -770,34 +943,44 @@
\fi}
\def\meta_start_code_instance#1#2\stopMPcode
- {\meta_begin_graphic_group{#1::\s!dummy}% name does not matter
+ {\begingroup
+ \edef\currentMPinstance{#1}%
+ \let\currentMPgraphicname\empty
+ \edef\currentMPformat{\MPinstanceparameter\s!format}%
\meta_enable_include
\meta_process_graphic{#2}%
- \meta_end_graphic_group}
+ \endgroup}
\def\meta_start_code_standard#1#2\stopMPcode
- {\let\currentMPinstance\defaultMPinstance
- \meta_process_graphic{#2}}
+ {\begingroup
+ \let\currentMPinstance\defaultMPinstance
+ \let\currentMPgraphicname\empty
+ \edef\currentMPformat{\MPinstanceparameter\s!format}%
+ \meta_process_graphic{#2}%
+ \endgroup}
\let\stopMPcode\relax
\unexpanded\def\MPcode
- {\dosinglegroupempty\meta_code}
+ {\dodoublegroupempty\meta_code}
\def\meta_code
- {\iffirstargument
+ {\ifsecondargument
\expandafter\meta_code_instance
\else
\expandafter\meta_code_standard
\fi}
\def\meta_code_instance#1#2%
- {\meta_begin_graphic_group{#1::\s!dummy}% name does not matter
+ {\meta_begin_graphic_group{#1}%
+ \meta_enable_include
\meta_process_graphic{#2}%
\meta_end_graphic_group}
-\def\meta_code_standard#1% #2
- {\meta_process_graphic}
+\def\meta_code_standard#1#2%
+ {\let\currentMPinstance\defaultMPinstance
+ \meta_enable_include
+ \meta_process_graphic{#1}}
% a bit nasty (also needed for compatibility:
@@ -903,10 +1086,6 @@
OverlayLineWidth:=\overlaylinewidth;
%
\m_meta_colo_initializations
-% OverlayLineColor:=\MPcolor{\overlaylinecolor};
-% OverlayColor:=\MPcolor{\overlaycolor};
-% vardef OverlayLineColor=\MPcolor{\overlaylinecolor} enddef;
-% vardef OverlayColor=\MPcolor{\overlaycolor} enddef;
%
BaseLineSkip:=\the\baselineskip;
LineHeight:=\the\baselineskip;
@@ -1172,11 +1351,9 @@
%D
%D Here is a generic setup command:
-\newtoks\everysetupMPgraphics
+\installcorenamespace{MPgraphics}
-\unexpanded\def\setupMPgraphics[#1]%
- {\getparameters[\??mp][#1]%
- \the\everysetupMPgraphics}
+\installsetuponlycommandhandler \??MPgraphics {MPgraphics}
%D Here we hook in the outer color. When \type {color} is set to \type
%D {global} we get the outer color automatically. If you change this
@@ -1184,7 +1361,7 @@
%D behave in unexpected ways.
\appendtoks
- \doifelse\@@mpcolor\v!global{\MPcolormethod\plusone}{\MPcolormethod\zerocount}%
+ \doifelse{\directMPgraphicsparameter\c!color}\v!global{\MPcolormethod\plusone}{\MPcolormethod\zerocount}%
\to \everysetupMPgraphics
\setupMPgraphics
diff --git a/Master/texmf-dist/tex/context/base/meta-pag.mkiv b/Master/texmf-dist/tex/context/base/meta-pag.mkiv
index 6cea270e3c9..7124902e744 100644
--- a/Master/texmf-dist/tex/context/base/meta-pag.mkiv
+++ b/Master/texmf-dist/tex/context/base/meta-pag.mkiv
@@ -33,6 +33,14 @@
\stopMPinitializations
\startMPinitializations
+ CurrentColumn:=\number\mofcolumns;
+ NOfColumns:=\number\nofcolumns;
+ % todo: ColumnDistance
+\stopMPinitializations
+
+% maybe always set as frozen anyway
+
+\startMPinitializations
def LoadPageState =
OnRightPage:=\MPonrightpage;
OnOddPage:=\MPonoddpage;
@@ -75,8 +83,8 @@
InnerEdgeWidth:=\the\inneredgewidth;
OuterEdgeDistance:=\the\outeredgedistance;
OuterEdgeWidth:=\the\outeredgewidth;
- PageOffset:=\the\pageoffset;
- PageDepth:=\the\pagedepth;
+ PageOffset:=\the\pagebackgroundoffset;
+ PageDepth:=\the\pagebackgrounddepth;
LayoutColumns:=\the\layoutcolumns;
LayoutColumnDistance:=\the\layoutcolumndistance;
LayoutColumnWidth:=\the\layoutcolumnwidth;
diff --git a/Master/texmf-dist/tex/context/base/meta-pdf.lua b/Master/texmf-dist/tex/context/base/meta-pdf.lua
index 13d39e445ea..0287b82658e 100644
--- a/Master/texmf-dist/tex/context/base/meta-pdf.lua
+++ b/Master/texmf-dist/tex/context/base/meta-pdf.lua
@@ -9,8 +9,12 @@ if not modules then modules = { } end modules ['meta-pdf'] = {
-- Finally we used an optimized version. The test code can be found in
-- meta-pdh.lua but since we no longer want to overload functione we use
-- more locals now. This module keeps changing as it is also a testbed.
+--
+-- We can make it even more efficient if needed, but as we don't use this
+-- code often in \MKIV\ it makes no sense.
-local concat, format, gsub, find, byte, gmatch, match = table.concat, string.format, string.gsub, string.find, string.byte, string.gmatch, string.match
+local concat, unpack = table.concat, table.unpack
+local format, gsub, find, byte, gmatch, match = string.format, string.gsub, string.find, string.byte, string.gmatch, string.match
local lpegmatch = lpeg.match
local round = math.round
@@ -50,8 +54,8 @@ end
resetall()
--- -- this does not work as expected (displacement of text)
--- -- beware, needs another comment hack
+-- -- this does not work as expected (displacement of text) beware, needs another
+-- -- comment hack
--
-- local function pdfcode(str)
-- context(pdfliteral(str))
@@ -76,7 +80,7 @@ end
local function flushconcat()
if m_stack_concat then
- mpscode(concat(m_stack_concat," ") .. " cm")
+ mpscode("%f %f %f %f %f %f cm",unpack(m_stack_concat)) -- no %s due to 1e-035 issues
m_stack_concat = nil
end
end
@@ -192,6 +196,7 @@ end
function mps.setdash(...) -- can be made faster, operate on t = { ... }
local n = select("#",...)
mpscode("[" .. concat({...}," ",1,n-1) .. "] " .. select(n,...) .. " d")
+ -- mpscode("[" .. concat({select(1,n-1)}," ") .. "] " .. select(n,...) .. " d")
end
function mps.resetdash()
@@ -544,7 +549,7 @@ function mptopdf.convertmpstopdf(name)
resetall()
statistics.stoptiming(mptopdf)
else
- report_mptopdf("file '%s' not found",name)
+ report_mptopdf("file %a not found",name)
end
end
diff --git a/Master/texmf-dist/tex/context/base/meta-pdh.mkiv b/Master/texmf-dist/tex/context/base/meta-pdh.mkiv
index 6d85a8dc151..b65fe6ac697 100644
--- a/Master/texmf-dist/tex/context/base/meta-pdh.mkiv
+++ b/Master/texmf-dist/tex/context/base/meta-pdh.mkiv
@@ -565,7 +565,7 @@
% {\ifcase\pdfoutput\or % will be hooked into the special driver
% \doiffileelse{#7}
% {\doifundefinedelse{mps:x:#7}
-% {\immediate\pdfximage\!!width\onebasepoint\!!height\onebasepoint{#7}%
+% {\immediate\pdfximage\s!width\onebasepoint\s!height\onebasepoint{#7}%
% \setxvalue{mps:x:#7}{\pdfrefximage\the\pdflastximage}}%
% {\message{[reusing figure #7]}}%
% \pdfliteral{q #1 #2 #3 #4 #5 #6 cm}%
diff --git a/Master/texmf-dist/tex/context/base/meta-tex.lua b/Master/texmf-dist/tex/context/base/meta-tex.lua
index 872e8154c4f..c29498ad109 100644
--- a/Master/texmf-dist/tex/context/base/meta-tex.lua
+++ b/Master/texmf-dist/tex/context/base/meta-tex.lua
@@ -31,7 +31,7 @@ if not modules then modules = { } end modules ['meta-tex'] = {
local P, Cs, lpegmatch = lpeg.P, lpeg.Cs, lpeg.match
-local pattern = Cs((P([[\"]]) + P([["]])/"\\quotedbl{}" + P(1))^0)
+local pattern = Cs((P([[\"]]) + P([["]])/"\\quotedbl{}" + P(1))^0) -- or \char
function metapost.escaped(str)
context(lpegmatch(pattern,str))
diff --git a/Master/texmf-dist/tex/context/base/mlib-ctx.lua b/Master/texmf-dist/tex/context/base/mlib-ctx.lua
index 493a4524853..04e0efcb433 100644
--- a/Master/texmf-dist/tex/context/base/mlib-ctx.lua
+++ b/Master/texmf-dist/tex/context/base/mlib-ctx.lua
@@ -9,6 +9,7 @@ if not modules then modules = { } end modules ['mlib-ctx'] = {
-- todo
local format, concat = string.format, table.concat
+local settings_to_hash = utilities.parsers.settings_to_hash
local report_metapost = logs.reporter("metapost")
@@ -19,19 +20,85 @@ local mplib = mplib
metapost = metapost or {}
local metapost = metapost
-metapost.defaultformat = "metafun"
+local v_no = interfaces.variables.no
-function metapost.graphic(instance,mpsformat,str,initializations,preamble,askedfig)
- local mpx = metapost.format(instance,mpsformat or metapost.defaultformat)
- metapost.graphic_base_pass(mpx,str,initializations,preamble,askedfig)
+metapost.defaultformat = "metafun"
+metapost.defaultinstance = "metafun"
+metapost.defaultmethod = "default"
+
+local function setmpsformat(specification)
+ local instance = specification.instance
+ local format = specification.format
+ local method = specification.method
+ if not instance or instance == "" then
+ instance = metapost.defaultinstance
+ specification.instance = instance
+ end
+ if not format or format == "" then
+ format = metapost.defaultformat
+ specification.format = format
+ end
+ if not method or method == "" then
+ method = metapost.defaultmethod
+ specification.method = method
+ end
+ specification.mpx = metapost.format(instance,format,method)
end
-function metapost.getclippath(instance,mpsformat,data,initializations,preamble)
- local mpx = metapost.format(instance,mpsformat or metapost.defaultformat)
- if mpx and data then
+local extensiondata = metapost.extensiondata or storage.allocate { }
+metapost.extensiondata = extensiondata
+
+storage.register("metapost/extensiondata",extensiondata,"metapost.extensiondata")
+
+function metapost.setextensions(instances,data)
+ if data and data ~= "" then
+ extensiondata[#extensiondata+1] = {
+ usedinall = not instances or instances == "",
+ instances = settings_to_hash(instances or ""),
+ extensions = data,
+ }
+ end
+end
+
+function metapost.getextensions(instance,state)
+ if state and state == v_no then
+ return ""
+ else
+ local t = { }
+ for i=1,#extensiondata do
+ local e = extensiondata[i]
+ local status = e.instances[instance]
+ if (status ~= true) and (e.usedinall or status) then
+ t[#t+1] = e.extensions
+ e.instances[instance] = true
+ end
+ end
+ return concat(t," ")
+ end
+end
+
+function commands.getmpextensions(instance,state)
+ context(metapost.getextensions(instance,state))
+end
+
+function metapost.graphic(specification)
+ setmpsformat(specification)
+ metapost.graphic_base_pass(specification)
+end
+
+function metapost.getclippath(specification) -- why not a special instance for this
+ setmpsformat(specification)
+ local mpx = specification.mpx
+ local data = specification.data or ""
+ if mpx and data ~= "" then
starttiming(metapost)
starttiming(metapost.exectime)
- local result = mpx:execute(format("%s;beginfig(1);%s;%s;endfig;",preamble or "",initializations or "",data))
+ local result = mpx:execute ( format ( "%s;%s;beginfig(1);%s;%s;endfig;",
+ specification.extensions or "",
+ specification.inclusions or "",
+ specification.initializations or "",
+ data
+ ) )
stoptiming(metapost.exectime)
if result.status > 0 then
report_metapost("%s: %s", result.status, result.error or result.term or result.log)
@@ -75,11 +142,15 @@ end
statistics.register("metapost processing time", function()
local n = metapost.n
if n and n > 0 then
- local e, t = metapost.makempy.nofconverted, statistics.elapsedtime
- local str = format("%s seconds, loading: %s seconds, execution: %s seconds, n: %s",
- t(metapost), t(mplib), t(metapost.exectime), n)
- if e > 0 then
- return format("%s, external: %s seconds (%s calls)", str, t(metapost.makempy), e)
+ local nofconverted = metapost.makempy.nofconverted
+ local elapsedtime = statistics.elapsedtime
+ local elapsed = statistics.elapsed
+ local str = format("%s seconds, loading: %s, execution: %s, n: %s, average: %s",
+ elapsedtime(metapost), elapsedtime(mplib), elapsedtime(metapost.exectime), n,
+ elapsedtime((elapsed(metapost) + elapsed(mplib) + elapsed(metapost.exectime)) / n))
+ if nofconverted > 0 then
+ return format("%s, external: %s (%s calls)",
+ str, elapsedtime(metapost.makempy), nofconverted)
else
return str
end
@@ -97,9 +168,11 @@ local environments = { }
function metapost.tex.set(str)
environments[#environments+1] = str
end
+
function metapost.tex.reset()
environments = { }
end
+
function metapost.tex.get()
return concat(environments,"\n")
end
diff --git a/Master/texmf-dist/tex/context/base/mlib-pdf.lua b/Master/texmf-dist/tex/context/base/mlib-pdf.lua
index 7a23ba94772..96330995105 100644
--- a/Master/texmf-dist/tex/context/base/mlib-pdf.lua
+++ b/Master/texmf-dist/tex/context/base/mlib-pdf.lua
@@ -6,31 +6,49 @@ if not modules then modules = { } end modules ['mlib-pdf'] = {
license = "see context related readme files",
}
+-- maybe %s is better than %f
+
local format, concat, gsub = string.format, table.concat, string.gsub
local abs, sqrt, round = math.abs, math.sqrt, math.round
local setmetatable = setmetatable
local Cf, C, Cg, Ct, P, S, lpegmatch = lpeg.Cf, lpeg.C, lpeg.Cg, lpeg.Ct, lpeg.P, lpeg.S, lpeg.match
-
-local allocate = utilities.storage.allocate
+local formatters = string.formatters
local report_metapost = logs.reporter("metapost")
local mplib, context = mplib, context
-local copy_node = node.copy
-local write_node = node.write
+local allocate = utilities.storage.allocate
+
+local copy_node = node.copy
+local write_node = node.write
+
+metapost = metapost or { }
+local metapost = metapost
+
+metapost.flushers = metapost.flushers or { }
+local pdfflusher = { }
+metapost.flushers.pdf = pdfflusher
+
+metapost.multipass = false
+metapost.n = 0
+metapost.optimize = true -- false
+
+local experiment = true -- uses context(node) that already does delayed nodes
-metapost = metapost or { }
-local metapost = metapost
+local savedliterals = nil -- needs checking
+local mpsliteral = nodes.pool.register(node.new("whatsit",nodes.whatsitcodes.pdfliteral)) -- pdfliteral.mode = 1
-metapost.multipass = false
-metapost.n = 0
-metapost.optimize = true -- false
+local pdfliteral = function(s)
+ local literal = copy_node(mpsliteral)
+ literal.data = s
+ return literal
+end
---~ Because in MKiV we always have two passes, we save the objects. When an extra
---~ mp run is done (due to for instance texts identifier in the parse pass), we
---~ get a new result table and the stored objects are forgotten. Otherwise they
---~ are reused.
+-- Because in MKiV we always have two passes, we save the objects. When an extra
+-- mp run is done (due to for instance texts identifier in the parse pass), we
+-- get a new result table and the stored objects are forgotten. Otherwise they
+-- are reused.
local function getobjects(result,figure,f)
if metapost.optimize then
@@ -64,30 +82,13 @@ function metapost.convert(result, trialrun, flusher, multipass, askedfig)
return true -- done
end
-metapost.flushers = { }
-metapost.flushers.pdf = { }
-
--- \def\MPLIBtoPDF#1{\ctxlua{metapost.flushliteral(#1)}}
-
-local savedliterals = nil -- needs checking
-
-local mpsliteral = nodes.pool.register(node.new("whatsit",8)) -- pdfliteral
-
-local pdfliteral = function(s)
- local literal = copy_node(mpsliteral)
- literal.data = s
- return literal
-end
-
-local experiment = true -- uses context(node) that already does delayed nodes
-
function metapost.flushliteral(d)
if savedliterals then
local literal = copy_node(mpsliteral)
literal.data = savedliterals[d]
write_node(literal)
else
- report_metapost("problem flushing literal %s",d)
+ report_metapost("problem flushing literal %a",d)
end
end
@@ -95,9 +96,9 @@ function metapost.flushreset() -- will become obsolete and internal
savedliterals = nil
end
-function metapost.flushers.pdf.comment(message)
+function pdfflusher.comment(message)
if message then
- message = format("%% mps graphic %s: %s", metapost.n, message)
+ message = formatters["%% mps graphic %s: %s"](metapost.n,message)
if experiment then
context(pdfliteral(message))
else
@@ -113,20 +114,20 @@ function metapost.flushers.pdf.comment(message)
end
end
-function metapost.flushers.pdf.startfigure(n,llx,lly,urx,ury,message)
+function pdfflusher.startfigure(n,llx,lly,urx,ury,message)
savedliterals = nil
metapost.n = metapost.n + 1
context.startMPLIBtoPDF(llx,lly,urx,ury)
- if message then metapost.flushers.pdf.comment(message) end
+ if message then pdfflusher.comment(message) end
end
-function metapost.flushers.pdf.stopfigure(message)
- if message then metapost.flushers.pdf.comment(message) end
+function pdfflusher.stopfigure(message)
+ if message then pdfflusher.comment(message) end
context.stopMPLIBtoPDF()
context.MPLIBflushreset() -- maybe just at the beginning
end
-function metapost.flushers.pdf.flushfigure(pdfliterals) -- table
+function pdfflusher.flushfigure(pdfliterals) -- table
if #pdfliterals > 0 then
pdfliterals = concat(pdfliterals,"\n")
if experiment then
@@ -144,7 +145,7 @@ function metapost.flushers.pdf.flushfigure(pdfliterals) -- table
end
end
-function metapost.flushers.pdf.textfigure(font,size,text,width,height,depth) -- we could save the factor
+function pdfflusher.textfigure(font,size,text,width,height,depth) -- we could save the factor
text = gsub(text,".","\\hbox{%1}") -- kerning happens in metapost (i have to check if this is true for mplib)
context.MPtextext(font,size,text,0,-number.dimenfactors.bp*depth)
end
@@ -189,11 +190,11 @@ local function flushnormalpath(path, t, open)
nt = nt + 1
pth = path[i]
if not ith then
- t[nt] = format("%f %f m",pth.x_coord,pth.y_coord)
+ t[nt] = formatters["%f %f m"](pth.x_coord,pth.y_coord)
elseif curved(ith,pth) then
- t[nt] = format("%f %f %f %f %f %f c",ith.right_x,ith.right_y,pth.left_x,pth.left_y,pth.x_coord,pth.y_coord)
+ t[nt] = formatters["%f %f %f %f %f %f c"](ith.right_x,ith.right_y,pth.left_x,pth.left_y,pth.x_coord,pth.y_coord)
else
- t[nt] = format("%f %f l",pth.x_coord,pth.y_coord)
+ t[nt] = formatters["%f %f l"](pth.x_coord,pth.y_coord)
end
ith = pth
end
@@ -201,15 +202,15 @@ local function flushnormalpath(path, t, open)
nt = nt + 1
local one = path[1]
if curved(pth,one) then
- t[nt] = format("%f %f %f %f %f %f c",pth.right_x,pth.right_y,one.left_x,one.left_y,one.x_coord,one.y_coord )
+ t[nt] = formatters["%f %f %f %f %f %f c"](pth.right_x,pth.right_y,one.left_x,one.left_y,one.x_coord,one.y_coord )
else
- t[nt] = format("%f %f l",one.x_coord,one.y_coord)
+ t[nt] = formatters["%f %f l"](one.x_coord,one.y_coord)
end
elseif #path == 1 then
-- special case .. draw point
local one = path[1]
nt = nt + 1
- t[nt] = format("%f %f l",one.x_coord,one.y_coord)
+ t[nt] = formatters["%f %f l"](one.x_coord,one.y_coord)
end
return t
end
@@ -223,18 +224,18 @@ local function flushconcatpath(path, t, open)
nt = 0
end
nt = nt + 1
- t[nt] = format("%f %f %f %f %f %f cm", sx, rx, ry, sy, tx ,ty)
+ t[nt] = formatters["%f %f %f %f %f %f cm"](sx,rx,ry,sy,tx,ty)
for i=1,#path do
nt = nt + 1
pth = path[i]
if not ith then
- t[nt] = format("%f %f m",mpconcat(pth.x_coord,pth.y_coord))
+ t[nt] = formatters["%f %f m"](mpconcat(pth.x_coord,pth.y_coord))
elseif curved(ith,pth) then
local a, b = mpconcat(ith.right_x,ith.right_y)
local c, d = mpconcat(pth.left_x,pth.left_y)
- t[nt] = format("%f %f %f %f %f %f c",a,b,c,d,mpconcat(pth.x_coord,pth.y_coord))
+ t[nt] = formatters["%f %f %f %f %f %f c"](a,b,c,d,mpconcat(pth.x_coord,pth.y_coord))
else
- t[nt] = format("%f %f l",mpconcat(pth.x_coord, pth.y_coord))
+ t[nt] = formatters["%f %f l"](mpconcat(pth.x_coord, pth.y_coord))
end
ith = pth
end
@@ -244,15 +245,15 @@ local function flushconcatpath(path, t, open)
if curved(pth,one) then
local a, b = mpconcat(pth.right_x,pth.right_y)
local c, d = mpconcat(one.left_x,one.left_y)
- t[nt] = format("%f %f %f %f %f %f c",a,b,c,d,mpconcat(one.x_coord, one.y_coord))
+ t[nt] = formatters["%f %f %f %f %f %f c"](a,b,c,d,mpconcat(one.x_coord, one.y_coord))
else
- t[nt] = format("%f %f l",mpconcat(one.x_coord,one.y_coord))
+ t[nt] = formatters["%f %f l"](mpconcat(one.x_coord,one.y_coord))
end
elseif #path == 1 then
-- special case .. draw point
nt = nt + 1
local one = path[1]
- t[nt] = format("%f %f l",mpconcat(one.x_coord,one.y_coord))
+ t[nt] = formatters["%f %f l"](mpconcat(one.x_coord,one.y_coord))
end
return t
end
@@ -273,7 +274,7 @@ function metapost.flush(result,flusher,askedfig)
if result then
local figures = result.fig
if figures then
- flusher = flusher or metapost.flushers.pdf
+ flusher = flusher or pdfflusher
local resetplugins = metapost.resetplugins or ignore -- before figure
local processplugins = metapost.processplugins or ignore -- each object
local synchronizeplugins = metapost.synchronizeplugins or ignore
@@ -290,7 +291,7 @@ function metapost.flush(result,flusher,askedfig)
local t = { }
local miterlimit, linecap, linejoin, dashed = -1, -1, -1, false
local bbox = figure:boundingbox()
- local llx, lly, urx, ury = bbox[1], bbox[2], bbox[3], bbox[4] -- faster than unpack
+ local llx, lly, urx, ury = bbox[1], bbox[2], bbox[3], bbox[4]
metapost.llx = llx
metapost.lly = lly
metapost.urx = urx
@@ -319,7 +320,7 @@ function metapost.flush(result,flusher,askedfig)
elseif objecttype == "text" then
t[#t+1] = "q"
local ot = object.transform -- 3,4,5,6,1,2
- t[#t+1] = format("%f %f %f %f %f %f cm",ot[3],ot[4],ot[5],ot[6],ot[1],ot[2]) -- TH: format("%f %f m %f %f %f %f 0 0 cm",unpack(ot))
+ t[#t+1] = formatters["%f %f %f %f %f %f cm"](ot[3],ot[4],ot[5],ot[6],ot[1],ot[2]) -- TH: formatters["%f %f m %f %f %f %f 0 0 cm"](unpack(ot))
flushfigure(t) -- flush accumulated literals
t = { }
textfigure(object.font,object.dsize,object.text,object.width,object.height,object.depth)
@@ -344,21 +345,21 @@ function metapost.flush(result,flusher,askedfig)
local ml = object.miterlimit
if ml and ml ~= miterlimit then
miterlimit = ml
- t[#t+1] = format("%f M",ml)
+ t[#t+1] = formatters["%f M"](ml)
end
local lj = object.linejoin
if lj and lj ~= linejoin then
linejoin = lj
- t[#t+1] = format("%i j",lj)
+ t[#t+1] = formatters["%i j"](lj)
end
local lc = object.linecap
if lc and lc ~= linecap then
linecap = lc
- t[#t+1] = format("%i J",lc)
+ t[#t+1] = formatters["%i J"](lc)
end
local dl = object.dash
if dl then
- local d = format("[%s] %i d",concat(dl.dashes or {}," "),dl.offset)
+ local d = formatters["[%s] %f d"](concat(dl.dashes or {}," "),dl.offset)
if d ~= dashed then
dashed = d
t[#t+1] = dashed
@@ -374,7 +375,7 @@ function metapost.flush(result,flusher,askedfig)
if pen then
if pen.type == 'elliptical' then
transformed, penwidth = pen_characteristics(original) -- boolean, value
- t[#t+1] = format("%f w",penwidth) -- todo: only if changed
+ t[#t+1] = formatters["%f w"](penwidth) -- todo: only if changed
if objecttype == 'fill' then
objecttype = 'both'
end
@@ -451,24 +452,17 @@ function metapost.parse(result,askedfig)
local figures = result.fig
if figures then
local analyzeplugins = metapost.analyzeplugins -- each object
- for f=1, #figures do
+ for f=1,#figures do
local figure = figures[f]
local fignum = figure:charcode() or 0
if askedfig == "direct" or askedfig == "all" or askedfig == fignum then
local bbox = figure:boundingbox()
- local llx, lly, urx, ury = bbox[1], bbox[2], bbox[3], bbox[4] -- faster than unpack
- metapost.llx = llx
- metapost.lly = lly
- metapost.urx = urx
- metapost.ury = ury
+ metapost.llx = bbox[1]
+ metapost.lly = bbox[2]
+ metapost.urx = bbox[3]
+ metapost.ury = bbox[4]
local objects = getobjects(result,figure,f)
if objects then
- -- for o=1,#objects do
- -- local object = objects[o]
- -- local prescript = object.prescript
- -- if prescript then
- -- analyzeplugins(object)
- -- end
for o=1,#objects do
analyzeplugins(objects[o])
end
diff --git a/Master/texmf-dist/tex/context/base/mlib-pdf.mkiv b/Master/texmf-dist/tex/context/base/mlib-pdf.mkiv
index b9871eeadae..0913b3699ae 100644
--- a/Master/texmf-dist/tex/context/base/mlib-pdf.mkiv
+++ b/Master/texmf-dist/tex/context/base/mlib-pdf.mkiv
@@ -21,15 +21,15 @@
%D Some code is shared between MPLIB and MPS. The following variables
%D are also available for introspection and other purposes.
-\newdimen\MPwidth
-\newdimen\MPheight
+\ifdefined\MPwidth \else \newdimen\MPwidth \fi
+\ifdefined\MPheight \else \newdimen\MPheight \fi
-\newdimen\MPllx
-\newdimen\MPlly
-\newdimen\MPurx
-\newdimen\MPury
+\ifdefined\MPllx \else \newdimen\MPllx \fi
+\ifdefined\MPlly \else \newdimen\MPlly \fi
+\ifdefined\MPurx \else \newdimen\MPurx \fi
+\ifdefined\MPury \else \newdimen\MPury \fi
-\newbox \MPbox
+\ifdefined\MPbox \else \newbox \MPbox \fi
\def\setMPboundingbox#1#2#3#4% at some point we might pass them as base or scaled points
{\global\MPllx #1\onebasepoint
@@ -49,7 +49,7 @@
\def\repositionMPboxindeed
{\setbox\MPbox\hbox\bgroup
- \hskip-\MPllx
+ \kern-\MPllx
\raise-\MPlly
\box\MPbox
\egroup}
@@ -91,7 +91,8 @@
\def\MPLIBtoPDF#1{\ctxlua{metapost.flushliteral(#1)}}
\def\startMPLIBtoPDF#1#2#3#4%
- {\dostarttagged\t!mpgraphic\empty
+ {\meta_process_graphic_figure_start
+ \dostarttagged\t!mpgraphic\empty
\naturalhbox attr \imageattribute 1 \bgroup
\dousecolorparameter\s!black\forcecolorhack
\setMPboundingbox{#1}{#2}{#3}{#4}%
@@ -104,7 +105,8 @@
\finalizeMPbox
\box\MPbox
\egroup
- \dostoptagged}
+ \dostoptagged
+ \meta_process_graphic_figure_stop}
\def\MPLIBflushreset % This can (will) move to the Lua end.
{\ctxlua{metapost.flushreset()}}
@@ -138,14 +140,21 @@
\let\stopMPLIBtoPDF \directstopMPLIBtoPDF
\meta_start_current_graphic
\forgetall
- \normalexpanded{\noexpand\ctxlua{metapost.graphic(
- "\currentMPinstance",
- "\currentMPformat",
- \!!bs#2;\!!es,
- \!!bs\meta_flush_current_initializations;\!!es,
- \!!bs\meta_flush_current_preamble;\!!es,
- "all"
- )}}%
+ \edef\p_extensions{\MPinstanceparameter\s!extensions}%
+ \normalexpanded{\noexpand\ctxlua{metapost.graphic {
+ instance = "\currentMPinstance",
+ format = "\currentMPformat",
+ data = \!!bs#2;\!!es,
+ initializations = \!!bs\meta_flush_current_initializations\!!es,
+% useextensions = "\MPinstanceparameter\s!extensions",
+\ifx\p_extensions\v!yes
+ extensions = \!!bs\ctxcommand{getmpextensions("\currentMPinstance")}\!!es,
+\fi
+ inclusions = \!!bs\meta_flush_current_inclusions\!!es,
+ definitions = \!!bs\meta_flush_current_definitions\!!es,
+ figure = "all",
+ method = "\MPinstanceparameter\c!method",
+ }}}%
\meta_stop_current_graphic
\meta_end_graphic_group}
diff --git a/Master/texmf-dist/tex/context/base/mlib-pps.lua b/Master/texmf-dist/tex/context/base/mlib-pps.lua
index 7821b3dbf24..93bddc2dd4c 100644
--- a/Master/texmf-dist/tex/context/base/mlib-pps.lua
+++ b/Master/texmf-dist/tex/context/base/mlib-pps.lua
@@ -6,7 +6,8 @@ if not modules then modules = { } end modules ['mlib-pps'] = {
license = "see context related readme files",
}
--- todo: report max textexts
+-- todo: make a hashed textext variant where we only process the text once (normally
+-- we cannot assume that no macros are involved which influence a next textext
local format, gmatch, match, split = string.format, string.gmatch, string.match, string.split
local tonumber, type = tonumber, type
@@ -14,6 +15,7 @@ local round = math.round
local insert, concat = table.insert, table.concat
local Cs, Cf, C, Cg, Ct, P, S, V, Carg = lpeg.Cs, lpeg.Cf, lpeg.C, lpeg.Cg, lpeg.Ct, lpeg.P, lpeg.S, lpeg.V, lpeg.Carg
local lpegmatch = lpeg.match
+local formatters = string.formatters
local mplib, metapost, lpdf, context = mplib, metapost, lpdf, context
@@ -80,14 +82,21 @@ function metapost.setoutercolor(mode,colormodel,colorattribute,transparencyattri
innertransparency = outertransparency -- not yet used
end
-local function checked_color_pair(color)
+local f_gray = formatters["%.3f g %.3f G"]
+local f_rgb = formatters["%.3f %.3f %.3f rg %.3f %.3f %.3f RG"]
+local f_cmyk = formatters["%.3f %.3f %.3f %.3f k %.3f %.3f %.3f %.3f K"]
+local f_cm = formatters["q %f %f %f %f %f %f cm"]
+local f_shade = formatters["MpSh%s"]
+
+local function checked_color_pair(color,...)
if not color then
return innercolor, outercolor
- elseif outercolormode == 3 then
- innercolor = color
+ end
+ if outercolormode == 3 then
+ innercolor = color(...)
return innercolor, innercolor
else
- return color, outercolor
+ return color(...), outercolor
end
end
@@ -142,7 +151,7 @@ local commasplitter = lpeg.tsplitat(",")
local function checkandconvertspot(n_a,f_a,c_a,v_a,n_b,f_b,c_b,v_b)
-- must be the same but we don't check
- local name = format("MpSh%s",nofshades)
+ local name = f_shade(nofshades)
local ca = lpegmatch(commasplitter,v_a)
local cb = lpegmatch(commasplitter,v_b)
if #ca == 0 or #cb == 0 then
@@ -156,7 +165,7 @@ local function checkandconvertspot(n_a,f_a,c_a,v_a,n_b,f_b,c_b,v_b)
end
local function checkandconvert(ca,cb)
- local name = format("MpSh%s",nofshades)
+ local name = f_shade(nofshades)
if not ca or not cb or type(ca) == "string" then
return { 0 }, { 1 }, "DeviceGray", name
else
@@ -206,10 +215,14 @@ local current_format, current_graphic, current_initializations
metapost.multipass = false
-local textexts = { }
+local textexts = { } -- all boxes, optionally with a different color
+local texslots = { } -- references to textexts in order or usage
+local texorder = { } -- references to textexts by mp index
+local textrial = 0
+local texfinal = 0
local scratchbox = 0
-local function freeboxes() -- todo: mp direct list ipv box
+local function freeboxes()
for n, box in next, textexts do
local tn = textexts[n]
if tn then
@@ -217,11 +230,15 @@ local function freeboxes() -- todo: mp direct list ipv box
-- texbox[scratchbox] = tn
-- texbox[scratchbox] = nil -- this frees too
if trace_textexts then
- report_textexts("freeing %s",n)
+ report_textexts("freeing box %s",n)
end
end
end
textexts = { }
+ texslots = { }
+ texorder = { }
+ textrial = 0
+ texfinal = 0
end
metapost.resettextexts = freeboxes
@@ -237,7 +254,7 @@ end
function metapost.gettext(box,slot)
texbox[box] = copy_list(textexts[slot])
if trace_textexts then
- report_textexts("putting %s in box %s",slot,box)
+ report_textexts("putting text %s in box %s",slot,box)
end
-- textexts[slot] = nil -- no, pictures can be placed several times
end
@@ -257,32 +274,32 @@ function models.all(cr)
elseif metapost.reducetogray then
if n == 1 then
local s = cr[1]
- return checked_color_pair(format("%.3f g %.3f G",s,s))
+ return checked_color_pair(f_gray,s,s)
elseif n == 3 then
local r, g, b = cr[1], cr[2], cr[3]
if r == g and g == b then
- return checked_color_pair(format("%.3f g %.3f G",r,r))
+ return checked_color_pair(f_gray,r,r)
else
- return checked_color_pair(format("%.3f %.3f %.3f rg %.3f %.3f %.3f RG",r,g,b,r,g,b))
+ return checked_color_pair(f_rgb,r,g,b,r,g,b)
end
else
local c, m, y, k = cr[1], cr[2], cr[3], cr[4]
if c == m and m == y and y == 0 then
k = 1 - k
- return checked_color_pair(format("%.3f g %.3f G",k,k))
+ return checked_color_pair(f_gray,k,k)
else
- return checked_color_pair(format("%.3f %.3f %.3f %.3f k %.3f %.3f %.3f %.3f K",c,m,y,k,c,m,y,k))
+ return checked_color_pair(f_cmyk,c,m,y,k,c,m,y,k)
end
end
elseif n == 1 then
local s = cr[1]
- return checked_color_pair(format("%.3f g %.3f G",s,s))
+ return checked_color_pair(f_gray,s,s)
elseif n == 3 then
local r, g, b = cr[1], cr[2], cr[3]
- return checked_color_pair(format("%.3f %.3f %.3f rg %.3f %.3f %.3f RG",r,g,b,r,g,b))
+ return checked_color_pair(f_rgb,r,g,b,r,g,b)
else
local c, m, y, k = cr[1], cr[2], cr[3], cr[4]
- return checked_color_pair(format("%.3f %.3f %.3f %.3f k %.3f %.3f %.3f %.3f K",c,m,y,k,c,m,y,k))
+ return checked_color_pair(f_cmyk,c,m,y,k,c,m,y,k)
end
end
@@ -293,27 +310,27 @@ function models.rgb(cr)
elseif metapost.reducetogray then
if n == 1 then
local s = cr[1]
- checked_color_pair(format("%.3f g %.3f G",s,s))
+ checked_color_pair(f_gray,s,s)
elseif n == 3 then
local r, g, b = cr[1], cr[2], cr[3]
if r == g and g == b then
- return checked_color_pair(format("%.3f g %.3f G",r,r))
+ return checked_color_pair(f_gray,r,r)
else
- return checked_color_pair(format("%.3f %.3f %.3f rg %.3f %.3f %.3f RG",r,g,b,r,g,b))
+ return checked_color_pair(f_rgb,r,g,b,r,g,b)
end
else
local c, m, y, k = cr[1], cr[2], cr[3], cr[4]
if c == m and m == y and y == 0 then
k = 1 - k
- return checked_color_pair(format("%.3f g %.3f G",k,k))
+ return checked_color_pair(f_gray,k,k)
else
local r, g, b = cmyktorgb(c,m,y,k)
- return checked_color_pair(format("%.3f %.3f %.3f rg %.3f %.3f %.3f RG",r,g,b,r,g,b))
+ return checked_color_pair(f_rgb,r,g,b,r,g,b)
end
end
elseif n == 1 then
local s = cr[1]
- return checked_color_pair(format("%.3f g %.3f G",s,s))
+ return checked_color_pair(f_gray,s,s)
else
local r, g, b
if n == 3 then
@@ -321,7 +338,7 @@ function models.rgb(cr)
else
r, g, b = cr[1], cr[2], cr[3]
end
- return checked_color_pair(format("%.3f %.3f %.3f rg %.3f %.3f %.3f RG",r,g,b,r,g,b))
+ return checked_color_pair(f_rgb,r,g,b,r,g,b)
end
end
@@ -332,27 +349,27 @@ function models.cmyk(cr)
elseif metapost.reducetogray then
if n == 1 then
local s = cr[1]
- return checked_color_pair(format("%.3f g %.3f G",s,s))
+ return checked_color_pair(f_gray,s,s)
elseif n == 3 then
local r, g, b = cr[1], cr[2], cr[3]
if r == g and g == b then
- return checked_color_pair(format("%.3f g %.3f G",r,r))
+ return checked_color_pair(f_gray,r,r)
else
local c, m, y, k = rgbtocmyk(r,g,b)
- return checked_color_pair(format("%.3f %.3f %.3f %.3f k %.3f %.3f %.3f %.3f K",c,m,y,k,c,m,y,k))
+ return checked_color_pair(f_cmyk,c,m,y,k,c,m,y,k)
end
else
local c, m, y, k = cr[1], cr[2], cr[3], cr[4]
if c == m and m == y and y == 0 then
- k = 1 - k
- return checked_color_pair(format("%.3f g %.3f G",k,k))
+ k = k - 1
+ return checked_color_pair(f_gray,k,k)
else
- return checked_color_pair(format("%.3f %.3f %.3f %.3f k %.3f %.3f %.3f %.3f K",c,m,y,k,c,m,y,k))
+ return checked_color_pair(f_cmyk,c,m,y,k,c,m,y,k)
end
end
elseif n == 1 then
local s = cr[1]
- return checked_color_pair(format("%.3f g %.3f G",s,s))
+ return checked_color_pair(f_gray,s,s)
else
local c, m, y, k
if n == 3 then
@@ -360,7 +377,7 @@ function models.cmyk(cr)
else
c, m, y, k = cr[1], cr[2], cr[3], cr[4]
end
- return checked_color_pair(format("%.3f %.3f %.3f %.3f k %.3f %.3f %.3f %.3f K",c,m,y,k,c,m,y,k))
+ return checked_color_pair(f_cmyk,c,m,y,k,c,m,y,k)
end
end
@@ -375,7 +392,7 @@ function models.gray(cr)
else
s = cr[1]
end
- return checked_color_pair(format("%.3f g %.3f G",s,s))
+ return checked_color_pair(f_gray,s,s)
end
setmetatableindex(models, function(t,k)
@@ -461,27 +478,32 @@ local function sxsy(wd,ht,dp) -- helper for text
return (wd ~= 0 and factor/wd) or 0, (hd ~= 0 and factor/hd) or 0
end
-local no_trial_run = "mfun_trial_run := false ;"
-local do_trial_run = "if unknown mfun_trial_run : boolean mfun_trial_run fi ; mfun_trial_run := true ;"
-local text_data_template = "mfun_tt_w[%i] := %f ; mfun_tt_h[%i] := %f ; mfun_tt_d[%i] := %f ;"
-local do_begin_fig = "; beginfig(1) ; "
-local do_end_fig = "; endfig ;"
-local do_safeguard = ";"
+local no_first_run = "mfun_first_run := false ;"
+local do_first_run = "mfun_first_run := true ;"
+local no_trial_run = "mfun_trial_run := false ;"
+local do_trial_run = "mfun_trial_run := true ;"
+local do_begin_fig = "; beginfig(1) ; "
+local do_end_fig = "; endfig ;"
+local do_safeguard = ";"
+
+local f_text_data = formatters["mfun_tt_w[%i] := %f ; mfun_tt_h[%i] := %f ; mfun_tt_d[%i] := %f ;"]
function metapost.textextsdata()
local t, nt, n = { }, 0, 0
- for n, box in next, textexts do
+ for n=1,#texorder do
+ local box = textexts[texorder[n]]
if box then
local wd, ht, dp = box.width/factor, box.height/factor, box.depth/factor
if trace_textexts then
- report_textexts("passed data %s: (%0.4f,%0.4f,%0.4f)",n,wd,ht,dp)
+ report_textexts("passed data item %s: (%p,%p,%p)",n,wd,ht,dp)
end
nt = nt + 1
- t[nt] = format(text_data_template,n,wd,n,ht,n,dp)
+ t[nt] = f_text_data(n,wd,n,ht,n,dp)
else
break
end
end
+-- inspect(t)
return t
end
@@ -512,33 +534,69 @@ local function checkaskedfig(askedfig) -- return askedfig, wrappit
end
end
-function metapost.graphic_base_pass(mpsformat,str,initializations,preamble,askedfig)
+function metapost.graphic_base_pass(specification)
+ local mpx = specification.mpx -- mandate
+ local data = specification.data or ""
+ local definitions = specification.definitions or ""
+-- local extensions = metapost.getextensions(specification.instance,specification.useextensions)
+ local extensions = specification.extensions or ""
+ local inclusions = specification.inclusions or ""
+ local initializations = specification.initializations or ""
+ local askedfig = specification.figure -- no default else no wrapper
+ --
nofruns = nofruns + 1
local askedfig, wrappit = checkaskedfig(askedfig)
- local done_1, done_2, forced_1, forced_2
- str, done_1, forced_1 = checktexts(str)
- if not preamble or preamble == "" then
- preamble, done_2, forced_2 = "", false, false
+ local done_1, done_2, done_3, forced_1, forced_2, forced_3
+ data, done_1, forced_1 = checktexts(data)
+ -- we had preamble = extensions + inclusions
+ if extensions == "" then
+ extensions, done_2, forced_2 = "", false, false
else
- preamble, done_2, forced_2 = checktexts(preamble)
+ extensions, done_2, forced_2 = checktexts(extensions)
+ end
+ if inclusions == "" then
+ inclusions, done_3, forced_3 = "", false, false
+ else
+ inclusions, done_3, forced_3 = checktexts(inclusions)
end
metapost.intermediate.needed = false
metapost.multipass = false -- no needed here
- current_format, current_graphic, current_initializations = mpsformat, str, initializations or ""
- if metapost.method == 1 or (metapost.method == 2 and (done_1 or done_2)) then
+ current_format = mpx
+ current_graphic = data
+ current_initializations = initializations
+ local method = metapost.method
+ if trace_runs then
+ if method == 1 then
+ report_metapost("forcing two runs due to library configuration")
+ elseif method ~= 2 then
+ report_metapost("ignoring run due to library configuration")
+ elseif not (done_1 or done_2 or done_3) then
+ report_metapost("forcing one run only due to analysis")
+ elseif done_1 then
+ report_metapost("forcing at max two runs due to main code")
+ elseif done_2 then
+ report_metapost("forcing at max two runs due to extensions")
+ else
+ report_metapost("forcing at max two runs due to inclusions")
+ end
+ end
+ if method == 1 or (method == 2 and (done_1 or done_2 or done_3)) then
if trace_runs then
- report_metapost("first run of job %s (asked: %s)",nofruns,tostring(askedfig))
+ report_metapost("first run of job %s, asked figure %a",nofruns,askedfig)
end
-- first true means: trialrun, second true means: avoid extra run if no multipass
- local flushed = metapost.process(mpsformat, {
- preamble,
+ local flushed = metapost.process(mpx, {
+ definitions,
+ extensions,
+ inclusions,
wrappit and do_begin_fig or "",
+ do_first_run,
do_trial_run,
current_initializations,
do_safeguard,
current_graphic,
wrappit and do_end_fig or "",
- }, true, nil, not (forced_1 or forced_2), false, askedfig)
+ }, true, nil, not (forced_1 or forced_2 or forced_3), false, askedfig)
if metapost.intermediate.needed then
for _, action in next, metapost.intermediate.actions do
action()
@@ -551,23 +609,24 @@ function metapost.graphic_base_pass(mpsformat,str,initializations,preamble,asked
end
else
if trace_runs then
- report_metapost("running job %s (asked: %s)",nofruns,tostring(askedfig))
+ report_metapost("running job %s, asked figure %a",nofruns,askedfig)
end
- metapost.process(mpsformat, {
+ metapost.process(mpx, {
preamble,
wrappit and do_begin_fig or "",
+ do_first_run,
no_trial_run,
current_initializations,
do_safeguard,
current_graphic,
wrappit and do_end_fig or "",
- }, false, nil, false, false, askedfig )
+ }, false, nil, false, false, askedfig)
end
end
function metapost.graphic_extra_pass(askedfig)
if trace_runs then
- report_metapost("second run of job %s (asked: %s)",nofruns,tostring(askedfig))
+ report_metapost("second run of job %s, asked figure %a",nofruns,askedfig)
end
local askedfig, wrappit = checkaskedfig(askedfig)
metapost.process(current_format, {
@@ -605,7 +664,7 @@ function makempy.processgraphics(graphics)
local data = io.loaddata(mpyfile)
for figure in gmatch(data,"beginfig(.-)endfig") do
r = r + 1
- result[r] = format("begingraphictextfig%sendgraphictextfig ;\n", figure)
+ result[r] = formatters["begingraphictextfig%sendgraphictextfig ;\n"](figure)
end
io.savedata(mpyfile,concat(result,""))
end
@@ -642,7 +701,7 @@ local scriptsplitter = Ct ( Ct (
C((1-S("= "))^1) * S("= ")^1 * C((1-S("\n\r"))^0) * S("\n\r")^0
)^0 )
-local function splitscript(script)
+local function splitprescript(script)
local hash = lpegmatch(scriptsplitter,script)
for i=#hash,1,-1 do
local h = hash[i]
@@ -654,6 +713,20 @@ local function splitscript(script)
return hash
end
+-- -- not used:
+--
+-- local function splitpostscript(script)
+-- local hash = lpegmatch(scriptsplitter,script)
+-- for i=1,#hash do
+-- local h = hash[i]
+-- hash[h[1]] = h[2]
+-- end
+-- if trace_scripts then
+-- report_scripts(table.serialize(hash,"postscript"))
+-- end
+-- return hash
+-- end
+
function metapost.pluginactions(what,t,flushfigure) -- before/after object, depending on what
for i=1,#what do
local wi = what[i]
@@ -681,7 +754,7 @@ end
function metapost.analyzeplugins(object) -- each object (first pass)
local prescript = object.prescript -- specifications
if prescript and #prescript > 0 then
- return analyzer(object,splitscript(prescript))
+ return analyzer(object,splitprescript(prescript))
end
end
@@ -690,7 +763,7 @@ function metapost.processplugins(object) -- each object (second pass)
if prescript and #prescript > 0 then
local before = { }
local after = { }
- processor(object,splitscript(prescript),before,after)
+ processor(object,splitprescript(prescript),before,after)
return #before > 0 and before, #after > 0 and after
else
local c = object.color
@@ -707,13 +780,17 @@ local basepoints = number.dimenfactors["bp"]
local function cm(object)
local op = object.path
- local first, second, fourth = op[1], op[2], op[4]
- local tx, ty = first.x_coord , first.y_coord
- local sx, sy = second.x_coord - tx, fourth.y_coord - ty
- local rx, ry = second.y_coord - ty, fourth.x_coord - tx
- if sx == 0 then sx = 0.00001 end
- if sy == 0 then sy = 0.00001 end
- return sx,rx,ry,sy,tx,ty
+ if op then
+ local first, second, fourth = op[1], op[2], op[4]
+ local tx, ty = first.x_coord , first.y_coord
+ local sx, sy = second.x_coord - tx, fourth.y_coord - ty
+ local rx, ry = second.y_coord - ty, fourth.x_coord - tx
+ if sx == 0 then sx = 0.00001 end
+ if sy == 0 then sy = 0.00001 end
+ return sx, rx, ry, sy, tx, ty
+ else
+ return 1, 0, 0, 1, 0, 0 -- weird case
+ end
end
-- color
@@ -722,50 +799,77 @@ local function cl_reset(t)
t[#t+1] = metapost.colorinitializer() -- only color
end
--- text
-
-local tx_done = { }
+local tx_hash = { }
+local tx_last = 0
local function tx_reset()
- tx_done = { }
+ tx_hash = { }
+ tx_last = 0
end
+local fmt = formatters["%s %s %s % t"]
+
local function tx_analyze(object,prescript) -- todo: hash content and reuse them
local tx_stage = prescript.tx_stage
- if tx_stage then
+ if tx_stage == "trial" then
+ textrial = textrial + 1
local tx_number = tonumber(prescript.tx_number)
- if not tx_done[tx_number] then
- tx_done[tx_number] = true
- if trace_textexts then
- report_textexts("setting %s %s (first pass)",tx_stage,tx_number)
- end
- local s = object.postscript or ""
- local c = object.color -- only simple ones, no transparency
- local a = prescript.tr_alternative
- local t = prescript.tr_transparency
+ local s = object.postscript or ""
+ local c = object.color -- only simple ones, no transparency
+ local a = prescript.tr_alternative
+ local t = prescript.tr_transparency
+ local h = fmt(tx_number,a or "?",t or "?",c)
+ local n = tx_hash[h] -- todo: hashed variant with s (nicer for similar labels)
+ if not n then
+ tx_last = tx_last + 1
if not c then
-- no color
elseif #c == 1 then
if a and t then
- s = format("\\directcolored[s=%f,a=%f,t=%f]%s",c[1],a,t,s)
+ s = formatters["\\directcolored[s=%f,a=%f,t=%f]%s"](c[1],a,t,s)
else
- s = format("\\directcolored[s=%f]%s",c[1],s)
+ s = formatters["\\directcolored[s=%f]%s"](c[1],s)
end
elseif #c == 3 then
if a and t then
- s = format("\\directcolored[r=%f,g=%f,b=%f,a=%f,t=%f]%s",c[1],c[2],c[3],a,t,s)
+ s = formatters["\\directcolored[r=%f,g=%f,b=%f,a=%f,t=%f]%s"](c[1],c[2],c[3],a,t,s)
else
- s = format("\\directcolored[r=%f,g=%f,b=%f]%s",c[1],c[2],c[3],s)
+ s = formatters["\\directcolored[r=%f,g=%f,b=%f]%s"](c[1],c[2],c[3],s)
end
elseif #c == 4 then
if a and t then
- s = format("\\directcolored[c=%f,m=%f,y=%f,k=%f,a=%f,t=%f]%s",c[1],c[2],c[3],c[4],a,t,s)
+ s = formatters["\\directcolored[c=%f,m=%f,y=%f,k=%f,a=%f,t=%f]%s"](c[1],c[2],c[3],c[4],a,t,s)
else
- s = format("\\directcolored[c=%f,m=%f,y=%f,k=%f]%s",c[1],c[2],c[3],c[4],s)
+ s = formatters["\\directcolored[c=%f,m=%f,y=%f,k=%f]%s"](c[1],c[2],c[3],c[4],s)
end
end
- context.MPLIBsettext(tx_number,s) -- combine colored in here, saves call
+ context.MPLIBsettext(tx_last,s)
metapost.multipass = true
+ tx_hash[h] = tx_last
+ texslots[textrial] = tx_last
+ texorder[tx_number] = tx_last
+ if trace_textexts then
+ report_textexts("stage %a, usage %a, number %a, new %a, hash %a",tx_stage,textrial,tx_number,tx_last,h)
+ end
+ else
+ texslots[textrial] = n
+ if trace_textexts then
+ report_textexts("stage %a, usage %a, number %a, new %a, hash %a",tx_stage,textrial,tx_number,n,h)
+ end
+ end
+ elseif tx_stage == "extra" then
+ textrial = textrial + 1
+ local tx_number = tonumber(prescript.tx_number)
+ if not texorder[tx_number] then
+ local s = object.postscript or ""
+ tx_last = tx_last + 1
+ context.MPLIBsettext(tx_last,s)
+ metapost.multipass = true
+ texslots[textrial] = tx_last
+ texorder[tx_number] = tx_last
+ if trace_textexts then
+ report_textexts("stage %a, usage %a, number %a, extra %a",tx_stage,textrial,tx_number,tx_last)
+ end
end
end
end
@@ -775,23 +879,31 @@ local function tx_process(object,prescript,before,after)
if tx_number then
tx_number = tonumber(tx_number)
local tx_stage = prescript.tx_stage
- if tx_stage == "final" then -- redundant test
+ if tx_stage == "final" then
+ texfinal = texfinal + 1
+ local n = texslots[texfinal]
if trace_textexts then
- report_textexts("processing %s (second pass)",tx_number)
+ report_textexts("stage %a, usage %a, number %a, use %a",tx_stage,texfinal,tx_number,n)
end
- -- before[#before+1] = format("q %f %f %f %f %f %f cm",cm(object))
- local sx,rx,ry,sy,tx,ty = cm(object)
- before[#before+1] = function()
- -- flush always happens, we can have a special flush function injected before
- local box = textexts[tx_number]
- if box then
- -- context.MPLIBgettextscaled(tx_number,sxsy(box.width,box.height,box.depth))
- context.MPLIBgettextscaledcm(tx_number,sx,rx,ry,sy,tx,ty,sxsy(box.width,box.height,box.depth))
- else
+ local sx, rx, ry, sy, tx, ty = cm(object) -- needs to be frozen outside the function
+ local box = textexts[n]
+ if box then
+ before[#before+1] = function()
+ -- flush always happens, we can have a special flush function injected before
+ context.MPLIBgettextscaledcm(n,
+ format("%f",sx), -- bah ... %s no longer checks
+ format("%f",rx), -- bah ... %s no longer checks
+ format("%f",ry), -- bah ... %s no longer checks
+ format("%f",sy), -- bah ... %s no longer checks
+ format("%f",tx), -- bah ... %s no longer checks
+ format("%f",ty), -- bah ... %s no longer checks
+ sxsy(box.width,box.height,box.depth))
+ end
+ else
+ before[#before+1] = function()
report_textexts("unknown %s",tx_number)
end
end
- -- before[#before+1] = "Q"
if not trace_textexts then
object.path = false -- else: keep it
end
@@ -815,7 +927,7 @@ end
local function gt_analyze(object,prescript)
local gt_stage = prescript.gt_stage
if gt_stage == "trial" then
- graphics[#graphics+1] = format("\\MPLIBgraphictext{%s}",object.postscript or "")
+ graphics[#graphics+1] = formatters["\\MPLIBgraphictext{%s}"](object.postscript or "")
metapost.intermediate.needed = true
metapost.multipass = true
end
@@ -891,9 +1003,9 @@ local function sh_process(object,prescript,before,after)
else
-- fatal error
end
- before[#before+1], after[#after+1] = "q /Pattern cs", format("W n /%s sh Q",name)
+ before[#before+1], after[#after+1] = "q /Pattern cs", formatters["W n /%s sh Q"](name)
-- false, not nil, else mt triggered
- object.colored = false
+ object.colored = false -- hm, not object.color ?
object.type = false
object.grouped = true
end
@@ -904,7 +1016,7 @@ end
local function bm_process(object,prescript,before,after)
local bm_xresolution = prescript.bm_xresolution
if bm_xresolution then
- before[#before+1] = format("q %f %f %f %f %f %f cm",cm(object))
+ before[#before+1] = f_cm(cm(object))
before[#before+1] = function()
figures.bitmapimage {
xresolution = tonumber(bm_xresolution),
@@ -944,7 +1056,7 @@ end
local function fg_process(object,prescript,before,after)
local fg_name = prescript.fg_name
if fg_name then
- before[#before+1] = format("q %f %f %f %f %f %f cm",cm(object)) -- beware: does not use the cm stack
+ before[#before+1] = f_cm(cm(object)) -- beware: does not use the cm stack
before[#before+1] = function()
context.MPLIBfigure(fg_name,prescript.fg_mask or "")
end
@@ -972,7 +1084,7 @@ local function tr_process(object,prescript,before,after)
if tr_alternative then
tr_alternative = tonumber(tr_alternative)
local tr_transparency = tonumber(prescript.tr_transparency)
- before[#before+1] = format("/Tr%s gs",registertransparency(nil,tr_alternative,tr_transparency,true))
+ before[#before+1] = formatters["/Tr%s gs"](registertransparency(nil,tr_alternative,tr_transparency,true))
after[#after+1] = "/Tr0 gs" -- outertransparency
end
local cs = object.color
@@ -1002,7 +1114,7 @@ local function tr_process(object,prescript,before,after)
local t = t_list[sp_name] -- string or attribute
local v = t and attributes.transparencies.value(t)
if v then
- before[#before+1] = format("/Tr%s gs",registertransparency(nil,v[1],v[2],true))
+ before[#before+1] = formatters["/Tr%s gs"](registertransparency(nil,v[1],v[2],true))
after[#after+1] = "/Tr0 gs" -- outertransparency
end
end
@@ -1014,16 +1126,16 @@ local function tr_process(object,prescript,before,after)
local f = cs[1]
if colorspace == 2 then
local s = f*v[2]
- c_b, c_a = checked_color_pair(format("%.3f g %.3f G",s,s))
+ c_b, c_a = checked_color_pair(f_gray,s,s)
elseif colorspace == 3 then
local r, g, b = f*v[3], f*v[4], f*v[5]
- c_b, c_a = checked_color_pair(format("%.3f %.3f %.3f rg %.3f %.3f %.3f RG",r,g,b,r,g,b))
+ c_b, c_a = checked_color_pair(f_rgb,r,g,b,r,g,b)
elseif colorspace == 4 or colorspace == 1 then
local c, m, y, k = f*v[6], f*v[7], f*v[8], f*v[9]
- c_b, c_a = checked_color_pair(format("%.3f %.3f %.3f %.3f k %.3f %.3f %.3f %.3f K",c,m,y,k,c,m,y,k))
+ c_b, c_a = checked_color_pair(f_cmyk,c,m,y,k,c,m,y,k)
else
local s = f*v[2]
- c_b, c_a = checked_color_pair(format("%.3f g %.3f G",s,s))
+ c_b, c_a = checked_color_pair(f_gray,s,s)
end
end
--
diff --git a/Master/texmf-dist/tex/context/base/mlib-pps.mkiv b/Master/texmf-dist/tex/context/base/mlib-pps.mkiv
index cb6120066af..f21d84e0dbd 100644
--- a/Master/texmf-dist/tex/context/base/mlib-pps.mkiv
+++ b/Master/texmf-dist/tex/context/base/mlib-pps.mkiv
@@ -60,17 +60,19 @@
\def\MPLIBgettextscaledcm#1#2#3#4#5#6#7#8#9% 2-7: sx,rx,ry,sy,tx,ty
{\ctxlua{metapost.gettext(\number\MPtextbox,#1)}%
\setbox\MPbox\hbox\bgroup
- \dotransformnextbox{#2}{#3}{#4}{#5}{#6}{#7}% does push pop
+ \dotransformnextbox{#2}{#3}{#4}{#5}{#6}{#7}% does push pop ... will be changed to proper lua call (avoid small numbers)
\vbox to \zeropoint\bgroup
\vss
\hbox to \zeropoint \bgroup
- \scale[\c!sx=#8,\c!sy=#9]{\raise\dp\MPtextbox\box\MPtextbox}%
- \forcecolorhack % needed ? already in the scale macro
- % % This gives: LuaTeX warning: Misplaced \pdfrestore .. don't ask me why.
- %
- % \dostartscaling{#8}{#9}%
- % \raise\dp\MPtextbox\box\MPtextbox
- % \dostopscaling
+% \scale[\c!sx=#8,\c!sy=#9]{\raise\dp\MPtextbox\box\MPtextbox}%
+% \scale[\c!sx=#8,\c!sy=#9,\c!depth=\v!no]{\box\MPtextbox}%
+ \fastsxsy{#8}{#9}{\raise\dp\MPtextbox\box\MPtextbox}%
+ % This gives: LuaTeX warning: Misplaced \pdfrestore .. don't ask me why.
+ % but I'll retry it some day soon.
+ % \dostartscaling{#8}{#9}%
+ % \raise\dp\MPtextbox\box\MPtextbox
+ % \dostopscaling
+ \forcecolorhack % needed ? already in the scale macro
\hss
\egroup
\egroup
@@ -124,7 +126,7 @@
\def\doMPLIBstopgroup#1#2#3#4#5#6% some day this might happen elsewhere
{\egroup
- \setbox\scratchbox\hbox{\hskip\onebasepoint\box\scratchbox}% weird correction
+ \setbox\scratchbox\hbox{\kern\onebasepoint\box\scratchbox}% weird correction
\wd\scratchbox \dimexpr#5\onebasepoint-#3\onebasepoint+2\onebasepoint\relax
\ht\scratchbox #6\onebasepoint
\dp\scratchbox-#4\onebasepoint
@@ -132,8 +134,8 @@
attr {/Group << /S /Transparency /I \ifnum#1=1 true \else false \fi /K \ifnum#1=1 true \else false \fi >>}
resources {\pdfbackendcurrentresources}
\scratchbox
- \setbox\scratchbox\hbox\bgroup\hskip-\onebasepoint\pdfrefxform\pdflastxform\egroup
- \setbox\scratchbox\hbox\bgroup\hskip-\onebasepoint\pdfrefxform\pdflastxform\egroup
+ \setbox\scratchbox\hbox\bgroup\kern-\onebasepoint\pdfrefxform\pdflastxform\egroup
+ \setbox\scratchbox\hbox\bgroup\kern-\onebasepoint\pdfrefxform\pdflastxform\egroup
\wd\scratchbox\zeropoint
\ht\scratchbox\zeropoint
\dp\scratchbox\zeropoint
diff --git a/Master/texmf-dist/tex/context/base/mlib-run.lua b/Master/texmf-dist/tex/context/base/mlib-run.lua
index 59dc88b909d..0d7b5aa6dd1 100644
--- a/Master/texmf-dist/tex/context/base/mlib-run.lua
+++ b/Master/texmf-dist/tex/context/base/mlib-run.lua
@@ -29,58 +29,91 @@ approach is way faster than an external <l n='metapost'/> and processing time
nears zero.</p>
--ldx]]--
-local trace_graphics = false trackers.register("metapost.graphics", function(v) trace_graphics = v end)
+local type, tostring, tonumber = type, tostring, tonumber
+local format, gsub, match, find = string.format, string.gsub, string.match, string.find
+local concat = table.concat
+local emptystring = string.is_empty
+local lpegmatch, P = lpeg.match, lpeg.P
-local report_metapost = logs.reporter("metapost")
+local trace_graphics = false trackers.register("metapost.graphics", function(v) trace_graphics = v end)
+local trace_tracingall = false trackers.register("metapost.tracingall", function(v) trace_tracingall = v end)
+local report_metapost = logs.reporter("metapost")
local texerrormessage = logs.texerrormessage
-local format, gsub, match, find = string.format, string.gsub, string.match, string.find
-local emptystring = string.is_empty
-
-local starttiming, stoptiming = statistics.starttiming, statistics.stoptiming
+local starttiming = statistics.starttiming
+local stoptiming = statistics.stoptiming
-local mplib = mplib
+local mplib = mplib
+metapost = metapost or { }
+local metapost = metapost
-metapost = metapost or { }
-local metapost = metapost
+local mplibone = tonumber(mplib.version()) <= 1.50
metapost.showlog = false
metapost.lastlog = ""
+metapost.collapse = true -- currently mplib cannot deal with begingroup/endgroup mismatch in stepwise processing
metapost.texerrors = false
metapost.exectime = metapost.exectime or { } -- hack
-local mplibone = tonumber(mplib.version()) <= 1.50
-
-directives.register("mplib.texerrors", function(v) metapost.texerrors = v end)
+directives.register("mplib.texerrors", function(v) metapost.texerrors = v end)
+trackers.register ("metapost.showlog", function(v) metapost.showlog = v end)
function metapost.resetlastlog()
metapost.lastlog = ""
end
--- local function realfinder(name, mode, ftype)
--- if mode == "w" then
--- return name
--- elseif file.is_qualified_path(name) then
--- return name
--- else
--- return resolvers.findfile(name,ftype)
+----- mpbasepath = lpeg.instringchecker(lpeg.append { "/metapost/context/", "/metapost/base/" })
+local mpbasepath = lpeg.instringchecker(P("/metapost/") * (P("context") + P("base")) * P("/"))
+
+-- local function i_finder(askedname,mode,ftype) -- fake message for mpost.map and metafun.mpvi
+-- local foundname = file.is_qualified_path(askedname) and askedname or resolvers.findfile(askedname,ftype)
+-- if not mpbasepath(foundname) then
+-- -- we could use the via file but we don't have a complete io interface yet
+-- local data, found, forced = metapost.checktexts(io.loaddata(foundname) or "")
+-- if found then
+-- local tempname = luatex.registertempfile(foundname,true)
+-- io.savedata(tempname,data)
+-- foundname = tempname
+-- end
-- end
+-- return foundname
-- end
-local function i_finder(name, mode, ftype) -- fake message for mpost.map and metafun.mpvi
- name = file.is_qualified_path(name) and name or resolvers.findfile(name,ftype)
- if not (find(name,"/metapost/context/base/") or find(name,"/metapost/context/") or find(name,"/metapost/base/")) then
+-- mplib has no real io interface so we have a different mechanism than
+-- tex (as soon as we have more control, we will use the normal code)
+
+local finders = { }
+mplib.finders = finders
+
+-- for some reason mp sometimes calls this function twice which is inefficient
+-- but we cannot catch this
+
+local function preprocessed(name)
+ if not mpbasepath(name) then
+ -- we could use the via file but we don't have a complete io interface yet
local data, found, forced = metapost.checktexts(io.loaddata(name) or "")
if found then
local temp = luatex.registertempfile(name,true)
io.savedata(temp,data)
- name = temp
+ return temp
end
end
return name
end
+mplib.preprocessed = preprocessed -- helper
+
+finders.file = function(specification,name,mode,ftype)
+ return preprocessed(resolvers.findfile(name,ftype))
+end
+
+local function i_finder(name,mode,ftype) -- fake message for mpost.map and metafun.mpvi
+ local specification = url.hashed(name)
+ local finder = finders[specification.scheme] or finders.file
+ return finder(specification,name,mode,ftype)
+end
+
local function o_finder(name, mode, ftype)
return name
end
@@ -128,8 +161,11 @@ function metapost.reporterror(result)
if t and t ~= "" then
(metapost.texerrors and texerrormessage or report_metapost)("terminal: %s",t)
end
+ if e == "" or e == "no-error" then
+ e = nil
+ end
if e then
- (metapost.texerrors and texerrormessage or report_metapost)("error: %s",(e=="" and "?") or e)
+ (metapost.texerrors and texerrormessage or report_metapost)("error: %s",e)
end
if not t and not e and l then
metapost.lastlog = metapost.lastlog .. "\n" .. l
@@ -145,99 +181,103 @@ end
if mplibone then
- local preamble = [[
- boolean mplib ; mplib := true ;
- string mp_parent_version ; mp_parent_version := "%s" ;
- input "%s" ; dump ;
- ]]
-
- metapost.parameters = {
- hash_size = 100000,
- main_memory = 4000000,
- max_in_open = 50,
- param_size = 100000,
- }
-
- function metapost.make(name, target, version)
- starttiming(mplib)
- target = file.replacesuffix(target or name, "mem") -- redundant
- local mpx = mplib.new ( table.merged (
- metapost.parameters,
- {
- ini_version = true,
- find_file = finder,
- job_name = file.removesuffix(target),
- }
- ) )
- if mpx then
- starttiming(metapost.exectime)
- local result = mpx:execute(format(preamble,version or "unknown",name))
- stoptiming(metapost.exectime)
- mpx:finish()
- end
- stoptiming(mplib)
- end
-
- function metapost.load(name)
- starttiming(mplib)
- local mpx = mplib.new ( table.merged (
- metapost.parameters,
- {
- ini_version = false,
- mem_name = file.replacesuffix(name,"mem"),
- find_file = finder,
- -- job_name = "mplib",
- }
- ) )
- local result
- if not mpx then
- result = { status = 99, error = "out of memory"}
- end
- stoptiming(mplib)
- return mpx, result
- end
-
- function metapost.checkformat(mpsinput)
- local mpsversion = environment.version or "unset version"
- local mpsinput = file.addsuffix(mpsinput or "metafun", "mp")
- local mpsformat = file.removesuffix(file.basename(texconfig.formatname or (tex and tex.formatname) or mpsinput))
- local mpsbase = file.removesuffix(file.basename(mpsinput))
- if mpsbase ~= mpsformat then
- mpsformat = mpsformat .. "-" .. mpsbase
- end
- mpsformat = file.addsuffix(mpsformat, "mem")
- local mpsformatfullname = caches.getfirstreadablefile(mpsformat,"formats") or ""
- if mpsformatfullname ~= "" then
- report_metapost("loading '%s' from '%s'", mpsinput, mpsformatfullname)
- local mpx, result = metapost.load(mpsformatfullname)
- if mpx then
- local result = mpx:execute("show mp_parent_version ;")
- if not result.log then
- metapost.reporterror(result)
- else
- local version = match(result.log,">> *(.-)[\n\r]") or "unknown"
- version = gsub(version,"[\'\"]","")
- if version ~= mpsversion then
- report_metapost("version mismatch: %s <> %s", version or "unknown", mpsversion)
- else
- return mpx
- end
- end
- else
- report_metapost("error in loading '%s' from '%s'", mpsinput, mpsformatfullname)
- metapost.reporterror(result)
- end
- end
- local mpsformatfullname = caches.setfirstwritablefile(mpsformat,"formats")
- report_metapost("making '%s' into '%s'", mpsinput, mpsformatfullname)
- metapost.make(mpsinput,mpsformatfullname,mpsversion) -- somehow return ... fails here
- if lfs.isfile(mpsformatfullname) then
- report_metapost("loading '%s' from '%s'", mpsinput, mpsformatfullname)
- return metapost.load(mpsformatfullname)
- else
- report_metapost("problems with '%s' from '%s'", mpsinput, mpsformatfullname)
- end
- end
+ report_metapost("fatal error: mplib is too old")
+
+ os.exit()
+
+ -- local preamble = [[
+ -- boolean mplib ; mplib := true ;
+ -- string mp_parent_version ; mp_parent_version := "%s" ;
+ -- input "%s" ; dump ;
+ -- ]]
+ --
+ -- metapost.parameters = {
+ -- hash_size = 100000,
+ -- main_memory = 4000000,
+ -- max_in_open = 50,
+ -- param_size = 100000,
+ -- }
+ --
+ -- function metapost.make(name, target, version)
+ -- starttiming(mplib)
+ -- target = file.replacesuffix(target or name, "mem") -- redundant
+ -- local mpx = mplib.new ( table.merged (
+ -- metapost.parameters,
+ -- {
+ -- ini_version = true,
+ -- find_file = finder,
+ -- job_name = file.removesuffix(target),
+ -- }
+ -- ) )
+ -- if mpx then
+ -- starttiming(metapost.exectime)
+ -- local result = mpx:execute(format(preamble,version or "unknown",name))
+ -- stoptiming(metapost.exectime)
+ -- mpx:finish()
+ -- end
+ -- stoptiming(mplib)
+ -- end
+ --
+ -- function metapost.load(name)
+ -- starttiming(mplib)
+ -- local mpx = mplib.new ( table.merged (
+ -- metapost.parameters,
+ -- {
+ -- ini_version = false,
+ -- mem_name = file.replacesuffix(name,"mem"),
+ -- find_file = finder,
+ -- -- job_name = "mplib",
+ -- }
+ -- ) )
+ -- local result
+ -- if not mpx then
+ -- result = { status = 99, error = "out of memory"}
+ -- end
+ -- stoptiming(mplib)
+ -- return mpx, result
+ -- end
+ --
+ -- function metapost.checkformat(mpsinput)
+ -- local mpsversion = environment.version or "unset version"
+ -- local mpsinput = file.addsuffix(mpsinput or "metafun", "mp")
+ -- local mpsformat = file.removesuffix(file.basename(texconfig.formatname or (tex and tex.formatname) or mpsinput))
+ -- local mpsbase = file.removesuffix(file.basename(mpsinput))
+ -- if mpsbase ~= mpsformat then
+ -- mpsformat = mpsformat .. "-" .. mpsbase
+ -- end
+ -- mpsformat = file.addsuffix(mpsformat, "mem")
+ -- local mpsformatfullname = caches.getfirstreadablefile(mpsformat,"formats","metapost") or ""
+ -- if mpsformatfullname ~= "" then
+ -- report_metapost("loading %a from %a", mpsinput, mpsformatfullname)
+ -- local mpx, result = metapost.load(mpsformatfullname)
+ -- if mpx then
+ -- local result = mpx:execute("show mp_parent_version ;")
+ -- if not result.log then
+ -- metapost.reporterror(result)
+ -- else
+ -- local version = match(result.log,">> *(.-)[\n\r]") or "unknown"
+ -- version = gsub(version,"[\'\"]","")
+ -- if version ~= mpsversion then
+ -- report_metapost("version mismatch: %s <> %s", version or "unknown", mpsversion)
+ -- else
+ -- return mpx
+ -- end
+ -- end
+ -- else
+ -- report_metapost("error in loading %a from %a", mpsinput, mpsformatfullname)
+ -- metapost.reporterror(result)
+ -- end
+ -- end
+ -- local mpsformatfullname = caches.setfirstwritablefile(mpsformat,"formats")
+ -- report_metapost("making %a into %a", mpsinput, mpsformatfullname)
+ -- metapost.make(mpsinput,mpsformatfullname,mpsversion) -- somehow return ... fails here
+ -- if lfs.isfile(mpsformatfullname) then
+ -- report_metapost("loading %a from %a", mpsinput, mpsformatfullname)
+ -- return metapost.load(mpsformatfullname)
+ -- else
+ -- report_metapost("problems with %a from %a", mpsinput, mpsformatfullname)
+ -- end
+ -- end
else
@@ -247,12 +287,22 @@ else
input "%s" ;
]]
- function metapost.load(name)
+ local methods = {
+ double = "double",
+ scaled = "scaled",
+ default = "scaled",
+ decimal = false, -- for the moment
+ }
+
+ function metapost.load(name,method)
starttiming(mplib)
+ method = method and methods[method] or "scaled"
local mpx = mplib.new {
ini_version = true,
- find_file = finder,
+ find_file = finder,
+ math_mode = method,
}
+ report_metapost("initializing number mode %a",method)
local result
if not mpx then
result = { status = 99, error = "out of memory"}
@@ -264,7 +314,7 @@ else
return mpx, result
end
- function metapost.checkformat(mpsinput)
+ function metapost.checkformat(mpsinput,method)
local mpsversion = environment.version or "unset version"
local mpsinput = mpsinput or "metafun"
local foundfile = ""
@@ -281,14 +331,14 @@ else
foundfile = finder(file.replacesuffix(mpsinput,"mp")) or ""
end
if foundfile == "" then
- report_metapost("loading '%s' fails, format not found",mpsinput)
+ report_metapost("loading %a fails, format not found",mpsinput)
else
- report_metapost("loading '%s': %s",mpsinput,foundfile)
- local mpx, result = metapost.load(foundfile)
+ report_metapost("loading %a as %a using method %a",mpsinput,foundfile,method or "default")
+ local mpx, result = metapost.load(foundfile,method)
if mpx then
return mpx
else
- report_metapost("error in loading '%s'",mpsinput)
+ report_metapost("error in loading %a",mpsinput)
metapost.reporterror(result)
end
end
@@ -306,17 +356,24 @@ end
local mpxformats = { }
-function metapost.format(instance,name)
+function metapost.format(instance,name,method)
+ if not instance or instance == "" then
+ instance = "metafun" -- brrr
+ end
name = name or instance
local mpx = mpxformats[instance]
if not mpx then
- report_metapost("initializing instance '%s' using format '%s'",instance,name)
- mpx = metapost.checkformat(name)
+ report_metapost("initializing instance %a using format %a",instance,name)
+ mpx = metapost.checkformat(name,method)
mpxformats[instance] = mpx
end
return mpx
end
+function metapost.instance(instance)
+ return mpxformats[instance]
+end
+
function metapost.reset(mpx)
if not mpx then
-- nothing
@@ -350,19 +407,52 @@ function metapost.process(mpx, data, trialrun, flusher, multipass, isextrapass,
if trace_graphics then
if not mp_inp[mpx] then
mp_tag = mp_tag + 1
- mp_inp[mpx] = io.open(format("%s-mplib-run-%03i.mp", tex.jobname,mp_tag),"w")
- mp_log[mpx] = io.open(format("%s-mplib-run-%03i.log",tex.jobname,mp_tag),"w")
+ local jobname = tex.jobname
+ mp_inp[mpx] = io.open(format("%s-mplib-run-%03i.mp", jobname,mp_tag),"w")
+ mp_log[mpx] = io.open(format("%s-mplib-run-%03i.log",jobname,mp_tag),"w")
end
local banner = format("%% begin graphic: n=%s, trialrun=%s, multipass=%s, isextrapass=%s\n\n", metapost.n, tostring(trialrun), tostring(multipass), tostring(isextrapass))
mp_inp[mpx]:write(banner)
mp_log[mpx]:write(banner)
end
if type(data) == "table" then
+ -- this hack is needed because the library currently barks on \n\n
+ -- eventually we can text for "" in the next loop
+ local n = 0
+ local nofsnippets = #data
+ for i=1,nofsnippets do
+ local d = data[i]
+ if d ~= "" then
+ n = n + 1
+ data[n] = d
+ end
+ end
+ for i=nofsnippets,n+1,-1 do
+ data[i] = nil
+ end
+ -- and this one because mp cannot handle snippets due to grouping issues
+ if metapost.collapse then
+ if #data > 1 then
+ data = concat(data,"\n")
+ else
+ data = data[1]
+ end
+ end
+ -- end of hacks
+ end
+ if type(data) == "table" then
+ if trace_tracingall then
+ mpx:execute("tracingall;")
+ end
+ -- table.insert(data,2,"")
for i=1,#data do
local d = data[i]
+ -- d = string.gsub(d,"\r","")
if d then
if trace_graphics then
+ mp_inp[mpx]:write(format("\n%% begin snippet %s\n",i))
mp_inp[mpx]:write(d)
+ mp_inp[mpx]:write(format("\n%% end snippet %s\n",i))
end
starttiming(metapost.exectime)
result = mpx:execute(d)
@@ -375,7 +465,7 @@ function metapost.process(mpx, data, trialrun, flusher, multipass, isextrapass,
end
if not metapost.reporterror(result) then
if metapost.showlog then
- local str = (result.term ~= "" and result.term) or "no terminal output"
+ local str = result.term ~= "" and result.term or "no terminal output"
if not emptystring(str) then
metapost.lastlog = metapost.lastlog .. "\n" .. str
report_metapost("log: %s",str)
@@ -390,11 +480,14 @@ function metapost.process(mpx, data, trialrun, flusher, multipass, isextrapass,
end
end
else
+ if trace_tracingall then
+ data = "tracingall;" .. data
+ end
if trace_graphics then
- mp_inp:write(data)
+ mp_inp[mpx]:write(data)
end
starttiming(metapost.exectime)
- result = mpx[mpx]:execute(data)
+ result = mpx:execute(data)
stoptiming(metapost.exectime)
if trace_graphics and result then
local str = result.log or result.error
@@ -412,7 +505,7 @@ function metapost.process(mpx, data, trialrun, flusher, multipass, isextrapass,
metapost.lastlog = metapost.lastlog .. "\n" .. result.term
report_metapost("info: %s",result.term or "no-term")
end
- if result.fig then
+ if result.fig then
converted = metapost.convert(result, trialrun, flusher, multipass, askedfig)
end
end
@@ -440,13 +533,13 @@ function metapost.directrun(formatname,filename,outputformat,astable,mpdata)
outputformat = "mps"
end
if not data then
- report_metapost("unknown file '%s'",filename or "?")
+ report_metapost("unknown file %a",filename)
else
local mpx = metapost.checkformat(formatname)
if not mpx then
- report_metapost("unknown format '%s'",formatname or "?")
+ report_metapost("unknown format %a",formatname)
else
- report_metapost("processing '%s'",(mpdata and (filename or "data")) or fullname)
+ report_metapost("processing %a",(mpdata and (filename or "data")) or fullname)
local result = mpx:execute(data)
if not result then
report_metapost("error: no result object returned")
@@ -483,7 +576,7 @@ function metapost.directrun(formatname,filename,outputformat,astable,mpdata)
output = figures[v]:svg() -- (3) for prologues
end
local outname = format("%s-%s.%s",basename,v,outputformat)
- report_metapost("saving %s bytes in '%s'",#output,outname)
+ report_metapost("saving %s bytes in %a",#output,outname)
io.savedata(outname,output)
end
return #sorted
diff --git a/Master/texmf-dist/tex/context/base/mtx-context-copy.tex b/Master/texmf-dist/tex/context/base/mtx-context-copy.tex
new file mode 100644
index 00000000000..b798b3f8d16
--- /dev/null
+++ b/Master/texmf-dist/tex/context/base/mtx-context-copy.tex
@@ -0,0 +1,151 @@
+% engine=luatex
+
+%D \module
+%D [ file=mtx-context-copy,
+%D version=2008.11.10, % about that time i started playing with this
+%D title=\CONTEXT\ Extra Trickry,
+%D subtitle=Copying Files,
+%D author=Hans Hagen,
+%D date=\currentdate,
+%D copyright={PRAGMA ADE \& \CONTEXT\ Development Team}]
+%C
+%C This module is part of the \CONTEXT\ macro||package and is
+%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
+%C details.
+
+%D This is a \TEXEXEC\ features that has been moved to \MKIV.
+
+% begin help
+%
+% usage: context --extra=copy [options] list-of-files
+%
+% --interaction : add hyperlinks
+%
+% end help
+
+\input mtx-context-common.tex
+
+\doif {\getdocumentargument{interaction}} {yes} {
+ \setupinteraction
+ [state=start]
+ \setupexternalfigures
+ [interaction=yes]
+}
+
+\setupexternalfigures
+ [directory=]
+
+\definepapersize
+ [fit]
+ [width=\figurewidth,
+ height=\figureheight]
+
+\setuplayout
+ [page]
+
+% this will be an option to \copypages
+
+\unexpanded\def\CopyPages[#1]%
+ {\dorecurse
+ {\noffigurepages}
+ {\startTEXpage
+ \externalfigure[#1][page=\recurselevel]%
+ \stopTEXpage}}
+
+\starttext
+
+\startluacode
+
+ if #document.files == 0 then
+ context("no files given")
+ else
+ for _, filename in ipairs(document.files) do
+ if not string.find(filename,"^mtx%-context%-") then
+ logs.report("copy",filename)
+ context.getfiguredimensions
+ { filename }
+ context.setuppapersize
+ { "fit" }
+ context.CopyPages
+ { filename }
+ end
+ end
+ end
+
+\stopluacode
+
+\stoptext
+
+
+% def copyoutput
+% copyortrim(false,'copy')
+% end
+
+% def trimoutput
+% copyortrim(true,'trim')
+% end
+
+% def copyortrim(trim=false,what='unknown')
+% if job = TEX.new(logger) then
+% prepare(job)
+% job.cleanuptemprunfiles
+% files = if @commandline.option('sort') then @commandline.arguments.sort else @commandline.arguments end
+% if files.length > 0 then
+% if f = File.open(job.tempfilename('tex'),'w') then
+% scale = @commandline.checkedoption('scale')
+% begin
+% scale = (scale.to_f * 1000.0).to_i if scale.to_i < 10
+% rescue
+% scale = 1000
+% end
+% scale = scale.to_i
+% paperoffset = @commandline.checkedoption('paperoffset', '0cm')
+% f << "\\starttext\n"
+% files.each do |filename|
+% result = @commandline.checkedoption('result','texexec')
+% begin
+% if (filename !~ /^texexec/io) && (filename !~ /^#{result}/) then
+% report("copying file: #{filename}")
+% f << "\\getfiguredimensions\n"
+% f << " [#{filename}]\n"
+% f << " [scale=#{scale},\n"
+% f << " page=1,\n"
+% f << " size=trimbox\n" if trim
+% f << "]\n"
+% f << "\\definepapersize\n"
+% f << " [copy]\n"
+% f << " [width=\\figurewidth,\n"
+% f << " height=\\figureheight]\n"
+% f << "\\setuppapersize\n"
+% f << " [copy][copy]\n"
+% f << "\\setuplayout\n"
+% f << " [page]\n"
+% f << "\\setupexternalfigures\n"
+% f << " [directory=]\n"
+% f << "\\copypages\n"
+% f << " [#{filename}]\n"
+% f << " [scale=#{scale},\n"
+% f << " marking=on,\n" if @commandline.option('markings')
+% f << " size=trimbox,\n" if trim
+% f << " offset=#{paperoffset}]\n"
+% end
+% rescue
+% report("wrong specification")
+% end
+% end
+% f << "\\stoptext\n"
+% f.close
+% job.setvariable('interface','english')
+% job.setvariable('simplerun',true)
+% # job.setvariable('nooptionfile',true)
+% job.setvariable('files',[job.tempfilename])
+% job.processtex
+% else
+% report("no files to #{what}")
+% end
+% else
+% report("no files to #{what}")
+% end
+% job.cleanuptemprunfiles
+% end
+% end
diff --git a/Master/texmf-dist/tex/context/base/mtx-context-select.tex b/Master/texmf-dist/tex/context/base/mtx-context-select.tex
index 7d6535119e7..2f63f595513 100644
--- a/Master/texmf-dist/tex/context/base/mtx-context-select.tex
+++ b/Master/texmf-dist/tex/context/base/mtx-context-select.tex
@@ -4,7 +4,7 @@
%D [ file=mtx-context-select,
%D version=2008.11.10, % about that time i started playing with this
%D title=\CONTEXT\ Extra Trickry,
-%D subtitle=Listing Files,
+%D subtitle=Selecting Files,
%D author=Hans Hagen,
%D date=\currentdate,
%D copyright={PRAGMA ADE \& \CONTEXT\ Development Team}]
@@ -94,6 +94,7 @@
end
for _, filename in ipairs(document.files) do
if not string.find(filename,"^mtx%-context%-") then
+ logs.report("select",filename)
context.filterpages (
{ filename },
{ selection },
diff --git a/Master/texmf-dist/tex/context/base/mtx-context-timing.tex b/Master/texmf-dist/tex/context/base/mtx-context-timing.tex
index 1bc5b47762c..c545c768f97 100644
--- a/Master/texmf-dist/tex/context/base/mtx-context-timing.tex
+++ b/Master/texmf-dist/tex/context/base/mtx-context-timing.tex
@@ -28,16 +28,20 @@
style=\tt]
\setupfootertexts
- [\getdocumentfilename{1}-luatex-progress.lut -- \pagenumber]
+ [\getdocumentfilename{1}-luatex-progress.lut \emdash\ \pagenumber]
\setupcolors
[state=start]
\starttext
- \doifsomething {\getdocumentfilename{1}} {
+
+ \doifsomethingelse {\getdocumentfilename{1}} {
+ \writestatus{system}{processing timings of \getdocumentfilename{1}}
\LoadUsage{\getdocumentfilename{1}-luatex-progress}
\ShowUsage{\getdocumentfilename{1}-luatex-progress}
+ } {
+ \writestatus{system}{no filename given for timings}
}
\stoptext
diff --git a/Master/texmf-dist/tex/context/base/mult-aux.lua b/Master/texmf-dist/tex/context/base/mult-aux.lua
index e0fd87a024c..3c4cbcc0fce 100644
--- a/Master/texmf-dist/tex/context/base/mult-aux.lua
+++ b/Master/texmf-dist/tex/context/base/mult-aux.lua
@@ -25,32 +25,32 @@ local data = { }
function namespaces.define(namespace,settings)
if trace_namespaces then
- report_namespaces("installing namespace '%s' with settings '%s'",namespace,settings)
+ report_namespaces("installing namespace %a with settings %a",namespace,settings)
end
if data[namespace] then
- report_namespaces("namespace '%s' is already taken",namespace)
+ report_namespaces("namespace %a is already taken",namespace)
end
if #namespace < 2 then
- report_namespaces("namespace '%s' should have more than 1 character",namespace)
+ report_namespaces("namespace %a should have more than 1 character",namespace)
end
local ns = { }
data[namespace] = ns
utilities.parsers.settings_to_hash(settings,ns)
local name = ns.name
if not name or name == "" then
- report_namespaces("provide a (command) name in namespace '%s'",namespace)
+ report_namespaces("provide a (command) name in namespace %a",namespace)
end
local self = "\\" .. prefix .. namespace
context.unprotect()
-- context.installnamespace(namespace)
- context("\\def\\%s%s{%s%s}",prefix,namespace,meaning,namespace)
+ context("\\def\\%s%s{%s%s}",prefix,namespace,meaning,namespace) -- or context.setvalue
if trace_namespaces then
- report_namespaces("using namespace '%s' for '%s'",namespace,name)
+ report_namespaces("using namespace %a for %a",namespace,name)
end
local parent = ns.parent or ""
if parent ~= "" then
if trace_namespaces then
- report_namespaces("namespace '%s' for '%s' uses parent '%s'",namespace,name,parent)
+ report_namespaces("namespace %a for %a uses parent %a",namespace,name,parent)
end
if not find(parent,"\\") then
parent = "\\" .. prefix .. parent
@@ -59,60 +59,60 @@ function namespaces.define(namespace,settings)
end
context.installparameterhandler(self,name)
if trace_namespaces then
- report_namespaces("installing parameter handler for '%s'",name)
+ report_namespaces("installing parameter handler for %a",name)
end
context.installparameterhashhandler(self,name)
if trace_namespaces then
- report_namespaces("installing parameterhash handler for '%s'",name)
+ report_namespaces("installing parameterhash handler for %a",name)
end
local style = ns.style
if style == v_yes then
context.installstyleandcolorhandler(self,name)
if trace_namespaces then
- report_namespaces("installing attribute handler for '%s'",name)
+ report_namespaces("installing attribute handler for %a",name)
end
end
local command = ns.command
if command == v_yes then
context.installdefinehandler(self,name,parent)
if trace_namespaces then
- report_namespaces("installing definition command for '%s' (single)",name)
+ report_namespaces("installing definition command for %a (single)",name)
end
elseif command == v_list then
context.installdefinehandler(self,name,parent)
if trace_namespaces then
- report_namespaces("installing definition command for '%s' (multiple)",name)
+ report_namespaces("installing definition command for %a (multiple)",name)
end
end
local setup = ns.setup
if setup == v_yes then
context.installsetuphandler(self,name)
if trace_namespaces then
- report_namespaces("installing setup command for '%s' (single)",name)
+ report_namespaces("installing setup command for %a (%s)",name,"single")
end
elseif setup == v_list then
context.installsetuphandler(self,name)
if trace_namespaces then
- report_namespaces("installing setup command for '%s' (multiple)",name)
+ report_namespaces("installing setup command for %a (%s)",name,"multiple")
end
end
local set = ns.set
if set == v_yes then
context.installparametersethandler(self,name)
if trace_namespaces then
- report_namespaces("installing set/let/reset command for '%s' (single)",name)
+ report_namespaces("installing set/let/reset command for %a (%s)",name,"single")
end
elseif set == v_list then
context.installparametersethandler(self,name)
if trace_namespaces then
- report_namespaces("installing set/let/reset command for '%s' (multiple)",name)
+ report_namespaces("installing set/let/reset command for %a (%s)",name,"multiple")
end
end
local frame = ns.frame
if frame == v_yes then
context.installinheritedframed(name)
if trace_namespaces then
- report_namespaces("installing framed command for '%s'",name)
+ report_namespaces("installing framed command for %a",name)
end
end
context.protect()
diff --git a/Master/texmf-dist/tex/context/base/mult-aux.mkiv b/Master/texmf-dist/tex/context/base/mult-aux.mkiv
index b4c6ad03960..43d34e08726 100644
--- a/Master/texmf-dist/tex/context/base/mult-aux.mkiv
+++ b/Master/texmf-dist/tex/context/base/mult-aux.mkiv
@@ -2,7 +2,7 @@
%D [ file=mult-aux,
%D version=2010.08.2,
%D title=\CONTEXT\ Multilingual Macros,
-%D subtitle=helpers,
+%D subtitle=Helpers,
%D author=Hans Hagen,
%D date=\currentdate,
%D copyright={PRAGMA ADE \& \CONTEXT\ Development Team}]
@@ -14,6 +14,9 @@
% todo: setupxxx and setupxxxs (so a plural for the root setup and
% we can consider blocking the root)
+% todo (e.g for columnsets and registers): \definexxx[parent][1]
+%
+
%D A generalization of \MKIV-like inheritance. Just something to play
%D with (interface might change). The code here evolved in an email
%D exchange between me and Wolgang Schuster.
@@ -41,7 +44,7 @@
%D % \whateverparameter \c!test
%D % \whateverparameterhash \c!test
%D % \namedwhateverparameter \mycurrentwhatever \c!test
-%D % \dosetwhateverstyleandcolor \c!style \c!color
+%D % \usewhateverstyleandcolor \c!style \c!color
%D % \everydefinewhatever (sets \currentwhatever)
%D % \everypresetwhatever (can be used to reset parameters as we can redefine)
%D % \everysetupwhatever (sets \currentwhatever)
@@ -93,12 +96,8 @@
\expandafter\mult_interfaces_get_parameters_indeed
\fi#2}
-% \def\mult_interfaces_get_parameters#1% we can assume that the test already happened
-% {\def\m_mult_interfaces_namespace{#1}%
-% \mult_interfaces_get_parameters_indeed}
-
\def\mult_interfaces_get_parameters_indeed#1]% namespace already set
- {\mult_interfaces_get_parameters_item#1,],\@relax@}
+ {\mult_interfaces_get_parameters_item#1,],\_e_o_p_}
\def\mult_interfaces_get_parameters_item#1,#2% #2 takes space before ,
{\if,#1,% dirty trick for testing #1=empty
@@ -106,14 +105,14 @@
\else\if]#1%
\doubleexpandafter\gobbleoneargument
\else
- \mult_interfaces_get_parameters_assign#1==\empty\@relax@
+ \mult_interfaces_get_parameters_assign#1==\empty\_e_o_p_
\doubleexpandafter\mult_interfaces_get_parameters_item
\fi\fi#2}
\def\mult_interfaces_get_parameters_error#1#2#3%
{\showassignerror{#2}{\the\inputlineno\space(#1)}}
-\def\mult_interfaces_get_parameters_assign#1=#2=#3#4\@relax@
+\def\mult_interfaces_get_parameters_assign#1=#2=#3#4\_e_o_p_
{\ifx\empty#1\empty
\expandafter\mult_interfaces_get_parameters_error
\else\ifx#3\empty
@@ -125,21 +124,35 @@
\newif\ifassignment
-\def\mult_check_for_assignment#1=#2#3\_end_
- {\expandafter\if\detokenize{#2}@\assignmentfalse\else\assignmenttrue\fi}
+\def\mult_check_for_assignment_indeed#1=#2#3\_end_
+ {\if#2@\assignmentfalse\else\assignmenttrue\fi}
-% usage: \mult_check_for_assignment##1=@@_end_
+\def\mult_check_for_assignment#1%
+ {\expandafter\mult_check_for_assignment_indeed\detokenize{#1}=@@\_end_}
% End of experimental code.
+% the commented detokenized variant that backtracks ... needs testing usage first
+%
+% \let\whatever\relax
+%
+% \definetest[oeps][bagger=\whatever]
+%
+% \def\currenttest{oeps} \edef\hans{\detokenizedtestparameter{bagger}}\meaning\hans\par
+% \def\currenttest{oeps} \edef\hans{\detokenizedtestparameter{reggab}}\meaning\hans\par
+
+\def\mult_interfaces_detokenize{\expandafter\expandafter\expandafter\detokenize\expandafter\expandafter\expandafter}
+
\unexpanded\def\mult_interfaces_install_parameter_handler#1#2#3#4#5#6#7#8#9% inlining \csname*\endcsname is more efficient (#3 and #6 only)
- {\ifx#2\relax\let#2\empty\fi % it is hardly faster but produces less expansion tracing
+ {\ifx#2\relax\let#2\empty\fi % it is hardly faster but produces less expansion tracing
%\def#3##1{\csname#4{#1#2}{##1}\endcsname}%
\def#3##1{\csname\ifcsname#1#2:##1\endcsname#1#2:##1\else\expandafter#5\csname#1#2:\s!parent\endcsname{##1}\fi\endcsname}%
\def#4##1##2{\ifcsname##1:##2\endcsname##1:##2\else\expandafter#5\csname##1:\s!parent\endcsname{##2}\fi}%
\def#5##1##2{\ifx##1\relax\s!empty\else#4{##1}{##2}\fi}% is {} needed around ##1 ?
\def#6##1##2{\csname\ifcsname#1##1:##2\endcsname#1##1:##2\else\expandafter#5\csname#1##1:\s!parent\endcsname{##2}\fi\endcsname}%
\def#7##1{\detokenize\expandafter\expandafter\expandafter{\csname#1#2:##1\endcsname}}% always root, no backtrack
+ % \def#7##1{\mult_interfaces_detokenize{\csname#4{#1#2}{##1}\endcsname}}% compact version
+ % \def#7##1{\mult_interfaces_detokenize{\csname\ifcsname#1#2:##1\endcsname#1#2:##1\else\expandafter#5\csname#1#2:\s!parent\endcsname{##1}\fi\endcsname}}%
\def#8##1{\csname\ifcsname#1#2:##1\endcsname#1#2:##1\else\s!empty\fi\endcsname}%
\def#9##1{\csname#1#2:##1\endcsname}}
@@ -194,7 +207,9 @@
% In \MKIV\ we can probably use the english variant for all other
% languages too.
-\unexpanded\def\mult_interfaces_install_parameter_set_handler#1#2#3#4#5#6% we can speed this up for english
+% todo: inline the \do*value
+
+\unexpanded\def\mult_interfaces_install_parameter_set_handler#1#2#3#4#5#6%
{\ifx#2\relax\let#2\empty\fi
\unexpanded\def#3{\dosetvalue {#1#2:}}% ##1 {##2} (braces are mandate)
\unexpanded\def#4{\dosetevalue{#1#2:}}% ##1 {##2} (braces are mandate)
@@ -251,30 +266,42 @@
\let\definehandlerparent\empty
-\unexpanded\def\mult_interfaces_install_define_handler#1#2#3#4#5#6#7#8#9%
- {\ifx#4\relax\let#4\empty\fi
+\def\mult_check_for_parent#1#2#3#4%
+ {\ifcsname#1#4:\s!parent\endcsname \else \ifx#4\empty \else
+ \writestatus\m!system{error: invalid parent #4 for #3, #4 defined too (best check it)}%
+ \expandafter\edef\csname#1#4:\s!parent\endcsname{#2}%
+ \fi \fi}
+
+\unexpanded\def\mult_interfaces_install_define_handler#1#2#3#4#5#6#7#8#9% why is \expanded still needed in clones
+ {\ifx#4\relax\let#4\empty\fi % see \defineregister
\unexpanded\def#2{\dotripleempty#5}%
\newtoks#6%
\newtoks#7%
- \def#5[##1][##2][##3]% [child][parent][settings] | [child][settings] | [child][parent] | [child]
+ \unexpanded\def#5[##1][##2][##3]% [child][parent][settings] | [child][settings] | [child][parent] | [child]
{\let#9#4%
\edef#4{##1}%
- \the#6% predefine
\ifthirdargument
+ \the#6% predefine
\edef#8{##2}%
- \mult_interfaces_get_parameters{#1#4:}[\s!parent=#1##2,##3]%
+ \mult_check_for_parent{#1}{#3}#4#8%
+ \expandafter\edef\csname#1#4:\s!parent\endcsname{#1##2}%
+ \mult_interfaces_get_parameters{#1#4:}[##3]%
\else\ifsecondargument
- \mult_check_for_assignment##2=@@\_end_
+ \the#6% predefine
+ \expandafter\mult_check_for_assignment_indeed\detokenize{##2}=@@\_end_
\ifassignment
\let#8\empty
- \mult_interfaces_get_parameters{#1#4:}[\s!parent=#3,##2]%
+ \expandafter\edef\csname#1#4:\s!parent\endcsname{#3}%
+ \mult_interfaces_get_parameters{#1#4:}[##2]%
\else
\edef#8{##2}%
- \mult_interfaces_get_parameters{#1#4:}[\s!parent=#1##2]%
+ \mult_check_for_parent{#1}{#3}#4#8%
+ \expandafter\edef\csname#1#4:\s!parent\endcsname{#1##2}%
\fi
\else
+ \the#6% predefine
\let#8\empty
- \mult_interfaces_get_parameters{#1#4:}[\s!parent=#3]%
+ \expandafter\edef\csname#1#4:\s!parent\endcsname{#3}%
\fi\fi
\the#7%
\let#4#9}}
@@ -292,20 +319,20 @@
\expandafter\noexpand\csname current#2parent\endcsname
\expandafter\noexpand\csname saved_defined_#2\endcsname}}
-\unexpanded\def\mult_interfaces_install_setup_handler#1#2#3#4#5#6#7#8%
+\unexpanded\def\mult_interfaces_install_setup_handler#1#2#3#4#5#6#7#8#9%
{\ifx#3\relax\let#3\empty\fi
\unexpanded\def#2{\dodoubleempty#4}%
\unexpanded\def#6{\mult_interfaces_get_parameters{#1#3:}}% no every ! don't change it
\newtoks#5%
\newtoks#8%
- \def#4[##1][##2]% maybe helper
+ \unexpanded\def#4[##1][##2]% maybe helper
{\let#7#3%
\ifsecondargument
- \def\mult_interfaces_with_comma_list_element####1% we will have a simple one as well
+ \def#9####1% we will have a simple one as well
{\edef#3{####1}%
\mult_interfaces_get_parameters{#1#3:}[##2]%
\the#5}%
- \processcommalist[##1]\mult_interfaces_with_comma_list_element
+ \processcommalist[##1]#9%
\else
\let#3\empty
\mult_interfaces_get_parameters{#1:}[##1]%
@@ -324,7 +351,8 @@
\expandafter\noexpand\csname everysetup#2\endcsname
\expandafter\noexpand\csname setupcurrent#2\endcsname
\expandafter\noexpand\csname saved_setup_current#2\endcsname
- \expandafter\noexpand\csname everysetup#2root\endcsname}}
+ \expandafter\noexpand\csname everysetup#2root\endcsname
+ \expandafter\noexpand\csname nested_setup_current#2\endcsname}}
\let\doingrootsetupnamed\plusone % \setuplayout[name][key=value]
\let\doingrootsetuproot \plustwo % \setuplayout [key=value]
@@ -342,7 +370,7 @@
\newtoks#8%
\newtoks#9%
\ifx#6\relax\let#6\empty\fi
- \def#4[##1][##2]% maybe helper
+ \unexpanded\def#4[##1][##2]% maybe helper
{\ifsecondargument % no commalist here
% \setuplayout[whatever][key=value]
\let#7#3%
@@ -354,7 +382,8 @@
\ifx#3#6\the#8\fi % only switchsetups if previous == current
\let#3#7%
\else\iffirstargument
- \mult_check_for_assignment##1=@@\_end_ % \docheckassignment{##1}%
+ % \mult_check_for_assignment{##1}%
+ \expandafter\mult_check_for_assignment_indeed\detokenize{##1}=@@\_end_
\ifassignment
% \setuplayout[key=value]
\let#7#3%
@@ -403,7 +432,7 @@
\expandafter\noexpand\csname everyswitch#2\endcsname
\expandafter\noexpand\csname everysetup#2root\endcsname}}
-\unexpanded\def\mult_interfaces_install_auto_setup_handler#1#2#3#4#5#6#7#8%
+\unexpanded\def\mult_interfaces_install_auto_setup_handler#1#2#3#4#5#6#7#8#9%
{\ifx#3\relax\let#3\empty\fi
\unexpanded\def#2{\dotripleempty#4}%
\unexpanded\def#6{\mult_interfaces_get_parameters{#1#3:}}%
@@ -411,18 +440,19 @@
\def#4[##1][##2][##3]%
{\let#8#3%
\ifthirdargument
- \def\mult_interfaces_with_comma_list_element####1%
+ \def#9####1%
{\edef#3{####1}%
- \mult_interfaces_get_parameters{#1#3:}[\s!parent=#1##2,##3]% always sets parent
+ \expandafter\def\csname#1#3:\s!parent\endcsname{#1##2}%
+ \mult_interfaces_get_parameters{#1#3:}[##3]% always sets parent
\the#5}%
- \processcommalist[##1]\mult_interfaces_with_comma_list_element
+ \processcommalist[##1]#9%
\else\ifsecondargument
- \def\mult_interfaces_with_comma_list_element####1%
+ \def#9####1%
{\edef#3{####1}%
#7% checks parent and sets if needed
\mult_interfaces_get_parameters{#1#3:}[##2]%
\the#5}%
- \processcommalist[##1]\mult_interfaces_with_comma_list_element
+ \processcommalist[##1]#9%
\else
\let#3\empty
\mult_interfaces_get_parameters{#1:}[##1]%
@@ -440,7 +470,8 @@
\expandafter\noexpand\csname everysetup#2\endcsname
\expandafter\noexpand\csname setupcurrent#2\endcsname
\expandafter\noexpand\csname check#2parent\endcsname
- \expandafter\noexpand\csname saved_setup_current#2\endcsname}}
+ \expandafter\noexpand\csname saved_setup_current#2\endcsname
+ \expandafter\noexpand\csname nested_setup_current#2\endcsname}}
\unexpanded\def\installbasicparameterhandler#1#2%
{\installparameterhandler {#1}{#2}%
@@ -486,25 +517,26 @@
%D We don't need colons for such simple cases.
-\unexpanded\def\mult_interfaces_install_direct_parameter_handler#1#2#3#4%
- {\def#2##1{\csname\ifcsname#1##1\endcsname#1##1\else\s!empty\fi\endcsname}%
- \def#3##1{\detokenize\expandafter\expandafter\expandafter{\csname#1##1\endcsname}}%
- \def#4##1{\csname#1##1\endcsname}}
+\unexpanded\def\mult_interfaces_install_direct_parameter_handler#1#2#3#4#5%
+ {\def#3##1{\csname\ifcsname#1##1\endcsname#1##1\else\s!empty\fi\endcsname}%
+ \def#4##1{\detokenize\expandafter\expandafter\expandafter{\csname#1##1\endcsname}}%
+ % \def#4##1{\mult_interfaces_detokenize{\csname\ifcsname#1#2:##1\endcsname#1#2:##1\else\expandafter#5\csname#1#2:\s!parent\endcsname{##1}\fi\endcsname}}%
+ \def#5##1{\csname#1##1\endcsname}}
\unexpanded\def\installdirectparameterhandler#1#2%
{\normalexpanded
{\mult_interfaces_install_direct_parameter_handler
{\noexpand#1}%
+ \expandafter\noexpand\csname current#2\endcsname
\expandafter\noexpand\csname #2parameter\endcsname
\expandafter\noexpand\csname detokenized#2parameter\endcsname
\expandafter\noexpand\csname direct#2parameter\endcsname}}
-\unexpanded\def\mult_interfaces_install_direct_setup_handler#1#2#3#4%
+\unexpanded\def\mult_interfaces_install_direct_setup_handler#1#2#3#4#5%
{\unexpanded\def#2{\dosingleempty#3}%
- \newtoks#4%
- \def#3[##1]%
- {\mult_interfaces_get_parameters#1[##1]%
- \the#4}}
+ \newtoks#5%
+ \def#3[##1]{\mult_interfaces_get_parameters#1[##1]\the#5}%
+ \def#4{\mult_interfaces_get_parameters#1}}
\unexpanded\def\installdirectsetuphandler#1#2%
{\normalexpanded
@@ -512,6 +544,7 @@
{\noexpand#1}% \??aa
\expandafter\noexpand\csname setup#2\endcsname
\expandafter\noexpand\csname setup_#2\endcsname % semi-public
+ \expandafter\noexpand\csname setupcurrent#2\endcsname % no \every (we use 'current' for consistency)
\expandafter\noexpand\csname everysetup#2\endcsname}}
\unexpanded\def\mult_interfaces_install_direct_parameter_set_handler#1#2#3#4#5%
@@ -578,6 +611,34 @@
\unexpanded\def\relateparameterhandlers#1#2#3#4% {from} {instance} {to} {instance}
{\expandafter\edef\csname\csname#1namespace\endcsname#2:\s!parent\endcsname{\csname#3namespace\endcsname#4}}
+%D Here is another experiment:
+
+\unexpanded\def\installactionhandler#1%
+ {\normalexpanded
+ {\mult_interfaces_install_action_handler
+ {#1}%
+ \expandafter\noexpand\csname current#1\endcsname
+ \expandafter\noexpand\csname setupcurrent#1\endcsname
+ \expandafter\noexpand\csname #1_action\endcsname}}
+
+\unexpanded\def\mult_interfaces_install_action_handler#1#2#3#4%
+ {\unexpanded\expandafter\def\csname#1\endcsname{\dodoubleempty#4}%
+ \unexpanded\def#4[##1][##2]%
+ {\begingroup
+ \ifsecondargument
+ \edef#2{##1}%
+ #3[##2]%
+ \else\iffirstargument
+ \doifassignmentelse{##1}
+ {\let#2\empty
+ #3[##1]}%
+ {\edef#2{##1}}%
+ \else
+ \let#2\empty
+ \fi\fi
+ \directsetup{handler:action:#1}%
+ \endgroup}}
+
% First we had, in tune with the regular system variables:
%
% \starttyping
@@ -606,9 +667,7 @@
\newcount\c_mult_interfaces_n_of_namespaces
-\def\v_interfaces_prefix_template{\number \c_mult_interfaces_n_of_namespaces::}
-\def\v_interfaces_prefix_template{\characters\c_mult_interfaces_n_of_namespaces::}
-\def\v_interfaces_prefix_template{\number \c_mult_interfaces_n_of_namespaces>}
+%def\v_interfaces_prefix_template{\number \c_mult_interfaces_n_of_namespaces>}
\def\v_interfaces_prefix_template{\characters\c_mult_interfaces_n_of_namespaces>}
\def\v_interfaces_prefix_template % consistently %03i>
@@ -635,6 +694,10 @@
\ctxcommand{registernamespace(\number\c_mult_interfaces_n_of_namespaces,"#1")}%
\fi}
+\def\mult_interfaces_get_parameters_error#1#2#3% redefined
+ {\ctxcommand{showassignerror("#1","#2","#3",\the\inputlineno)}%
+ \waitonfatalerror}
+
% We install two core namespaces here, as we want nice error messages. Maybe
% we will reserve the first 9.
@@ -642,6 +705,34 @@
\installcorenamespace{fontinstancebasic}
\installcorenamespace{fontinstanceclass}
+%D The next one is handy for local assignments.
+
+\installcorenamespace{dummy}
+
+\letvalue\??dummy\empty
+
+ \def\dummyparameter #1{\csname\??dummy\ifcsname\??dummy#1\endcsname#1\fi\endcsname}
+ \def\directdummyparameter#1{\csname\??dummy#1\endcsname}
+\unexpanded\def\setdummyparameter #1{\expandafter\def\csname\??dummy#1\endcsname}
+\unexpanded\def\letdummyparameter #1{\expandafter\let\csname\??dummy#1\endcsname}
+
+% \unexpanded\def\getdummyparameters
+% {\mult_interfaces_get_parameters\??dummy}
+
+\unexpanded\def\getdummyparameters[#1%
+ {\if\noexpand#1]%
+ \expandafter\gobbleoneargument
+ \else
+ \let\m_mult_interfaces_namespace\??dummy
+ \expandafter\mult_interfaces_get_parameters_indeed
+ \fi#1}
+
+\mult_interfaces_install_style_and_color_handler
+ \directdummyparameter
+ \usedummystyleandcolor
+ \usedummystyleparameter
+ \usedummycolorparameter
+
% Maybe a \definecorenamespace[name][directparameter,directsetup][parent]
% but we don't gain much. Actually we might just inline all definitions.
@@ -707,6 +798,48 @@
\expandafter\mult_interfaces_show_parent_chain\csname#1:\s!parent\endcsname
\fi}
+%D Another helper (needs to be applied):
+
+\unexpanded\def\doifelsecommandhandler#1#2% namespace name
+ {\ifcsname#1#2:\s!parent\endcsname
+ \expandafter\firstoftwoarguments
+ \else
+ \expandafter\secondoftwoarguments
+ \fi}
+
+\unexpanded\def\doifcommandhandler#1#2% namespace name
+ {\ifcsname#1#2:\s!parent\endcsname
+ \expandafter\firstofoneargument
+ \else
+ \expandafter\gobbleoneargument
+ \fi}
+
+\unexpanded\def\doifnotcommandhandler#1#2% namespace name
+ {\ifcsname#1#2:\s!parent\endcsname
+ \expandafter\gobbleoneargument
+ \else
+ \expandafter\firstofoneargument
+ \fi}
+
+\let\doifcommandhandlerelse\doifelsecommandhandler
+
+% another set of (fast) helpers (grep for usage):
+
+\def\expandnamespaceparameter#1#2#3% \??xx \getp \c!xx \c!yy
+ {\csname#1\ifcsname#1\expandafter\expandafter\expandafter\mult_aux_expand_namespace_parameter#2#3}
+
+\def\mult_aux_expand_namespace_parameter#1#2% \cs \c!yy
+ {#1\endcsname#1\else#2\fi\endcsname}
+
+\def\expandnamespacemacro#1#2#3% \??xx \some_edefed_cs \c!yy
+ {\csname#1\ifcsname#1#2\endcsname#2\else#3\fi\endcsname}
+
+\def\expandnamespacevalue#1#2% \??xx {...} \c!yy == optimized \expandcheckedcsname
+ {\csname#1\ifcsname#1\normalexpanded{\noexpand\syst_helpers_expand_checked_value{#2}}}
+
+\def\syst_helpers_expand_checked_value#1#2%
+ {#1\endcsname#1\else#2\fi\endcsname}
+
%D Conventions:
%D
%D \starttyping
diff --git a/Master/texmf-dist/tex/context/base/mult-chk.lua b/Master/texmf-dist/tex/context/base/mult-chk.lua
index 43e7c51218e..2a2dfcd4bd9 100644
--- a/Master/texmf-dist/tex/context/base/mult-chk.lua
+++ b/Master/texmf-dist/tex/context/base/mult-chk.lua
@@ -9,9 +9,10 @@ if not modules then modules = { } end modules ['mult-chk'] = {
local format = string.format
local lpegmatch = lpeg.match
local type = type
-local make_settings_to_hash_pattern, settings_to_set = utilities.parsers.make_settings_to_hash_pattern, utilities.parsers.settings_to_set
-local allocate = utilities.storage.allocate
+local make_settings_to_hash_pattern = utilities.parsers.make_settings_to_hash_pattern
+local settings_to_set = utilities.parsers.settings_to_set
+local allocate = utilities.storage.allocate
local report_interface = logs.reporter("interface","checking")
@@ -22,7 +23,7 @@ interfaces.syntax = allocate {
}
function interfaces.invalidkey(category,key)
- report_interface("invalid key '%s' for '%s' in line %s",key,category,tex.inputlineno)
+ report_interface("invalid key %a for %a in line %a",key,category,tex.inputlineno)
end
function interfaces.setvalidkeys(category,list)
diff --git a/Master/texmf-dist/tex/context/base/mult-chk.mkiv b/Master/texmf-dist/tex/context/base/mult-chk.mkiv
index 9260d204066..1d02f166d1b 100644
--- a/Master/texmf-dist/tex/context/base/mult-chk.mkiv
+++ b/Master/texmf-dist/tex/context/base/mult-chk.mkiv
@@ -35,8 +35,8 @@
\registerctxluafile{mult-chk}{1.001}
-\def\setvalidparameterkeys{\dodoubleargument\mult_checkers_set_valid_parameter_keys}
-\def\addvalidparameterkeys{\dodoubleargument\mult_checkers_add_valid_parameter_keys}
+\unexpanded\def\setvalidparameterkeys{\dodoubleargument\mult_checkers_set_valid_parameter_keys}
+\unexpanded\def\addvalidparameterkeys{\dodoubleargument\mult_checkers_add_valid_parameter_keys}
\def\mult_checkers_set_valid_parameter_keys[#1][#2]{\ctxlua{interfaces.setvalidkeys("#1",\!!bs#2\!!es)}}
\def\mult_checkers_add_valid_parameter_keys[#1][#2]{\ctxlua{interfaces.addvalidkeys("#1",\!!bs#2\!!es)}}
@@ -61,12 +61,12 @@
\expandafter\mult_checkers_get_checked_parameters_nop_indeed
\fi{#3}#5}
-\def\mult_checkers_get_checked_parameters_nop_indeed#1#2]%
- {\def\p!dogetparameter{\p!doassign#1}%
- \xprocesscommaitem#2,],\@relax@}
+\def\mult_checkers_get_checked_parameters_nop_indeed#1#2]% needs checking with adapted syst-aux.mkiv
+ {\def\syst_helpers_get_parameters_assign{\syst_helpers_get_parameters_assign_indeed#1}% will change
+ \syst_helpers_process_comma_item#2,],\_e_o_p_}
-\def\disablecheckparameters{\let\getcheckedparameters\mult_checkers_get_checked_parameters_nop}
-\def\enablecheckparameters {\let\getcheckedparameters\mult_checkers_get_checked_parameters_yes}
+\unexpanded\def\disablecheckparameters{\let\getcheckedparameters\mult_checkers_get_checked_parameters_nop}
+\unexpanded\def\enablecheckparameters {\let\getcheckedparameters\mult_checkers_get_checked_parameters_yes}
\disablecheckparameters
diff --git a/Master/texmf-dist/tex/context/base/mult-de.mkii b/Master/texmf-dist/tex/context/base/mult-de.mkii
index dee614e5ec5..893a9d35832 100644
--- a/Master/texmf-dist/tex/context/base/mult-de.mkii
+++ b/Master/texmf-dist/tex/context/base/mult-de.mkii
@@ -120,6 +120,7 @@
\setinterfacevariable{chapter}{kapitel}
\setinterfacevariable{character}{buchstabe}
\setinterfacevariable{characters}{buchstaben}
+\setinterfacevariable{chemistry}{chemistry}
\setinterfacevariable{cite}{cite}
\setinterfacevariable{color}{farbe}
\setinterfacevariable{column}{column}
@@ -157,6 +158,7 @@
\setinterfacevariable{enumeration}{nummerierung}
\setinterfacevariable{environment}{umgebung}
\setinterfacevariable{even}{gerade}
+\setinterfacevariable{export}{export}
\setinterfacevariable{external}{extern}
\setinterfacevariable{fact}{gegeben}
\setinterfacevariable{february}{februar}
@@ -258,6 +260,7 @@
\setinterfacevariable{leftpage}{linkerseite}
\setinterfacevariable{lefttoright}{lefttoright}
\setinterfacevariable{legend}{legende}
+\setinterfacevariable{less}{less}
\setinterfacevariable{lesshyphenation}{lesshyphenation}
\setinterfacevariable{line}{zeile}
\setinterfacevariable{linenote}{linenote}
@@ -298,6 +301,7 @@
\setinterfacevariable{monday}{montag}
\setinterfacevariable{mono}{mono}
\setinterfacevariable{month}{monat}
+\setinterfacevariable{more}{more}
\setinterfacevariable{morehyphenation}{morehyphenation}
\setinterfacevariable{name}{name}
\setinterfacevariable{narrow}{schmall}
@@ -455,6 +459,7 @@
\setinterfacevariable{subforward}{untervorwaerts}
\setinterfacevariable{subject}{thema}
\setinterfacevariable{subpage}{unterseite}
+\setinterfacevariable{subs}{subs}
\setinterfacevariable{subsection}{unterabsatz}
\setinterfacevariable{subsubject}{unterthema}
\setinterfacevariable{subsubsection}{unterunterabsatz}
@@ -592,6 +597,7 @@
\setinterfaceconstant{bottom}{unten}
\setinterfaceconstant{bottomafter}{bottomafter}
\setinterfaceconstant{bottombefore}{bottombefore}
+\setinterfaceconstant{bottomcommand}{bottomcommand}
\setinterfaceconstant{bottomdistance}{abstandunten}
\setinterfaceconstant{bottomframe}{untenrahmen}
\setinterfaceconstant{bottomoffset}{untenoffset}
@@ -684,6 +690,7 @@
\setinterfaceconstant{filtercommand}{filtercommand}
\setinterfaceconstant{finalnamesep}{finalnamesep}
\setinterfaceconstant{firstnamesep}{firstnamesep}
+\setinterfaceconstant{firstpage}{ersteseite}
\setinterfaceconstant{focus}{focus}
\setinterfaceconstant{focusin}{focusin}
\setinterfaceconstant{focusout}{focusout}
@@ -750,6 +757,7 @@
\setinterfaceconstant{label}{label}
\setinterfaceconstant{labeloffset}{labeloffset}
\setinterfaceconstant{lastnamesep}{lastnamesep}
+\setinterfaceconstant{lastpage}{letzteseite}
\setinterfaceconstant{lastpubsep}{lastpubsep}
\setinterfaceconstant{layout}{layout}
\setinterfaceconstant{left}{links}
@@ -770,6 +778,7 @@
\setinterfaceconstant{leftsubsentence}{linkersubsatz}
\setinterfaceconstant{lefttext}{linkertext}
\setinterfaceconstant{leftwidth}{linkerbreite}
+\setinterfaceconstant{less}{less}
\setinterfaceconstant{level}{niveau}
\setinterfaceconstant{levels}{niveaus}
\setinterfaceconstant{limittext}{limittext}
@@ -792,6 +801,7 @@
\setinterfaceconstant{marking}{beschriftung}
\setinterfaceconstant{marstyle}{beschrstil}
\setinterfaceconstant{mask}{mask}
+\setinterfaceconstant{mathstyle}{mathstyle}
\setinterfaceconstant{max}{max}
\setinterfaceconstant{maxdepth}{maxdepth}
\setinterfaceconstant{maxheight}{maxhoehe}
@@ -800,6 +810,7 @@
\setinterfaceconstant{menu}{menue}
\setinterfaceconstant{method}{methode}
\setinterfaceconstant{middle}{mittig}
+\setinterfaceconstant{middlecommand}{middlecommand}
\setinterfaceconstant{middlespeech}{middlespeech}
\setinterfaceconstant{middletext}{mittigertext}
\setinterfaceconstant{midsentence}{midsentence}
@@ -808,6 +819,7 @@
\setinterfaceconstant{minheight}{minhoehe}
\setinterfaceconstant{minwidth}{minbreite}
\setinterfaceconstant{monthconversion}{monthconversion}
+\setinterfaceconstant{more}{more}
\setinterfaceconstant{n}{n}
\setinterfaceconstant{name}{name}
\setinterfaceconstant{namesep}{namesep}
@@ -962,6 +974,7 @@
\setinterfaceconstant{separator}{seperator}
\setinterfaceconstant{set}{set}
\setinterfaceconstant{setups}{setups}
+\setinterfaceconstant{shrink}{shrink}
\setinterfaceconstant{side}{objektabstand}
\setinterfaceconstant{sidealign}{sidealign}
\setinterfaceconstant{sidemethod}{sidemethod}
@@ -970,6 +983,7 @@
\setinterfaceconstant{sign}{zeichen}
\setinterfaceconstant{size}{groesse}
\setinterfaceconstant{small}{klein}
+\setinterfaceconstant{solution}{solution}
\setinterfaceconstant{sort}{sort}
\setinterfaceconstant{sorttype}{sorttype}
\setinterfaceconstant{source}{quelle}
@@ -1037,6 +1051,7 @@
\setinterfaceconstant{toffset}{toffset}
\setinterfaceconstant{tolerance}{toleranz}
\setinterfaceconstant{top}{oben}
+\setinterfaceconstant{topcommand}{topcommand}
\setinterfaceconstant{topdistance}{obenabstand}
\setinterfaceconstant{topframe}{obenrahmen}
\setinterfaceconstant{topoffset}{obenoffset}
@@ -1691,6 +1706,7 @@
\setinterfacecommand{switchtorawfont}{switchtorawfont}
\setinterfacecommand{sym}{sym}
\setinterfacecommand{symbol}{symbol}
+\setinterfacecommand{symoffset}{symoffset}
\setinterfacecommand{synchronizationbar}{synchronisationsbalken}
\setinterfacecommand{synchronize}{synchronisieren}
\setinterfacecommand{tab}{tab}
diff --git a/Master/texmf-dist/tex/context/base/mult-def.lua b/Master/texmf-dist/tex/context/base/mult-def.lua
index 1a4921a28ca..fdb8803bf33 100644
--- a/Master/texmf-dist/tex/context/base/mult-def.lua
+++ b/Master/texmf-dist/tex/context/base/mult-def.lua
@@ -3,7 +3,8 @@ if not modules then modules = { } end modules ['mult-def'] = {
comment = "companion to mult-ini.mkiv",
author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
+ license = "see context related readme files",
+ dataonly = true,
}
return {
@@ -5888,6 +5889,10 @@ return {
["pe"]="نم",
["ro"]="sim",
},
+ ["symoffset"]={
+ ["en"]="symoffset",
+ ["nl"]="symoffset",
+ },
["symbol"]={
["cs"]="symbol",
["de"]="symbol",
@@ -6449,6 +6454,18 @@ return {
},
},
["constants"]={
+ ["less"]={
+ ["en"]="less",
+ ["nl"]="minder",
+ },
+ ["more"]={
+ ["en"]="more",
+ ["nl"]="meer",
+ },
+ ["solution"]={
+ ["en"]="solution",
+ ["nl"]="oplossing",
+ },
["anchor"]={
["en"]="anchor",
["nl"]="anker",
@@ -6613,6 +6630,18 @@ return {
["en"]="layout",
["nl"]="layout",
},
+ ["topcommand"]={
+ ["en"]="topcommand",
+ ["nl"]="bovencommando",
+ },
+ ["middlecommand"]={
+ ["en"]="middlecommand",
+ ["nl"]="middencommando",
+ },
+ ["bottomcommand"]={
+ ["en"]="bottomcommand",
+ ["nl"]="ondercommando",
+ },
["action"]={
["cs"]="akce",
["de"]="aktion",
@@ -7805,6 +7834,16 @@ return {
["pe"]="تمرکز",
["ro"]="focus",
},
+ ["firstpage"]={
+ ["cs"]="prvnistranka",
+ ["de"]="ersteseite",
+ ["en"]="firstpage",
+ ["fr"]="premierepage",
+ ["it"]="primapagina",
+ ["nl"]="eerstepagina",
+ ["pe"]="صفحه‌اول",
+ ["ro"]="primapagina",
+ },
["focusin"]={
["cs"]="focusin",
["de"]="focusin",
@@ -8362,6 +8401,16 @@ return {
["pe"]="برچسب",
["ro"]="eticheta",
},
+ ["lastpage"]={
+ ["cs"]="poslednistrana",
+ ["de"]="letzteseite",
+ ["en"]="lastpage",
+ ["fr"]="dernierepage",
+ ["it"]="ultimapagina",
+ ["nl"]="laatstepagina",
+ ["pe"]="صفحه‌آخر",
+ ["ro"]="ultimapagina",
+ },
["left"]={
["cs"]="vlevo",
["de"]="links",
@@ -10023,6 +10072,10 @@ return {
["pe"]="بارگذاریها",
["ro"]="setups",
},
+ ["shrink"]={
+ ["en"]="shrink",
+ ["nl"]="krimp",
+ },
["side"]={
["cs"]="pocitat",
["de"]="objektabstand",
@@ -10310,6 +10363,9 @@ return {
["pe"]="بست",
["ro"]="strut",
},
+ ["mathstyle"]={
+ ["en"]="mathstyle",
+ },
["style"]={
["cs"]="pismeno",
["de"]="stil",
@@ -11269,6 +11325,14 @@ return {
},
},
["variables"]={
+ ["more"]={
+ ["en"]="more",
+ ["nl"]="meer",
+ },
+ ["less"]={
+ ["en"]="less",
+ ["nl"]="minder",
+ },
["embed"]={
["en"]="embed",
["nl"]="sluitin",
@@ -11277,6 +11341,9 @@ return {
["en"]="end",
["nl"]="eind",
},
+ ["export"] = {
+ ["en"]="export",
+ },
["begin"]={
["en"]="begin",
["nl"]="begin",
@@ -12542,6 +12609,10 @@ return {
["pe"]="حرفها",
["ro"]="caractere",
},
+ ["chemistry"]={
+ ["en"]="chemistry",
+ ["nl"]="chemie",
+ },
["color"]={
["cs"]="barevne",
["de"]="farbe",
@@ -15721,6 +15792,9 @@ return {
["pe"]="زیرصفحه",
["ro"]="subpagina",
},
+ ["subs"]={
+ ["en"]="subs",
+ },
["subsection"]={
["cs"]="podsekce",
["de"]="unterabsatz",
diff --git a/Master/texmf-dist/tex/context/base/mult-def.mkiv b/Master/texmf-dist/tex/context/base/mult-def.mkiv
index f6f50ae7252..069d48d2186 100644
--- a/Master/texmf-dist/tex/context/base/mult-def.mkiv
+++ b/Master/texmf-dist/tex/context/base/mult-def.mkiv
@@ -13,21 +13,85 @@
\unprotect
-\setvalue{\??mi :czech}{cs}
-\setvalue{\??mi :german}{de}
-\setvalue{\??mi :english}{en}
-\setvalue{\??mi :french}{fr}
-\setvalue{\??mi :italian}{it}
-\setvalue{\??mi :dutch}{nl}
-\setvalue{\??mi :persian}{pe}
-\setvalue{\??mi:romanian}{ro}
-
-\def\userinterfacetag{\ifcsname\??mi:\currentinterface\endcsname\csname\??mi:\currentinterface\endcsname\else en\fi}
-\def\userresponsestag{\ifcsname\??mi:\currentresponses\endcsname\csname\??mi:\currentresponses\endcsname\else en\fi}
+\installcorenamespace{multilingual}
+
+\setvalue{\??multilingual czech}{cs}
+\setvalue{\??multilingual german}{de}
+\setvalue{\??multilingual english}{en}
+\setvalue{\??multilingual french}{fr}
+\setvalue{\??multilingual italian}{it}
+\setvalue{\??multilingual dutch}{nl}
+\setvalue{\??multilingual persian}{pe}
+\setvalue{\??multilingual romanian}{ro}
+
+\def\userinterfacetag{\ifcsname\??multilingual\currentinterface\endcsname\csname\??multilingual\currentinterface\endcsname\else en\fi}
+\def\userresponsestag{\ifcsname\??multilingual\currentresponses\endcsname\csname\??multilingual\currentresponses\endcsname\else en\fi}
% \input mult-\userinterfacetag \relax
% \input mult-m\userresponsestag \relax
\ctxlua{interfaces.setuserinterface("\userinterfacetag","\userresponsestag")}
+% start todo:
+
+\def\c!language {language}
+\def\c!compressseparator{compressseparator}
+\def\c!renderingsetup {renderingsetup}
+\def\c!filler {filler}
+\def\c!resources {resources}
+\def\c!first {first}
+\def\c!last {last}
+\def\c!quotechar {quotechar}
+\def\c!commentchar {commentchar}
+\def\c!symbolcommand {symbolcommand}
+\def\c!xmlsetup {xmlsetup}
+\def\c!comma {comma}
+\def\c!period {period}
+\def\c!monthconversion {monthconversion}
+\def\c!comment {comment}
+\def\c!textalign {textalign}
+\def\c!up {up}
+\def\c!down {down}
+\def\c!instance {instance}
+
+\def\v!compressseparator{compressseparator}
+\def\v!notation {notation}
+\def\v!endnote {endnote}
+\def\v!interactive {interactive}
+\def\v!autopunctuation {autopunctuation}
+\def\v!integral {integral}
+\def\v!shiftup {shiftup}
+\def\v!shiftdown {shiftdown}
+\def\v!construction {construction}
+\def\v!unframed {unframed}
+\def\v!chemical {chemical}
+\def\v!chemicals {chemicals}
+\def\v!words {words}
+
+\def\s!lcgreek {lcgreek}
+\def\s!ucgreek {ucgreek}
+\def\s!sygreek {sygreek}
+\def\s!italics {italics}
+\def\s!integral {integral}
+\def\s!insert {insert} % maybe insertclass
+\def\s!marker {marker}
+
+\def\s!double {double}
+\def\s!decimal {decimal}
+
+\def\s!current {current}
+
+\def\c!HL {HL}
+\def\c!VL {VL}
+\def\c!NL {NL}
+
+\ifdefined\v!kerncharacters\else \def\v!kerncharacters{kerncharacters} \fi % no time now for translations should be a e! actually
+\ifdefined\v!stretched \else \def\v!stretched {stretched} \fi
+\ifdefined\v!vulgarfraction\else \def\v!vulgarfraction{vulgarfraction} \fi
+\ifdefined\v!block \else \def\v!block {block} \fi
+\ifdefined\v!rule \else \def\v!rule {rule} \fi
+\ifdefined\v!oldstyle \else \def\v!oldstyle {oldstyle} \fi
+
+% stop todo
+
\protect \endinput
diff --git a/Master/texmf-dist/tex/context/base/mult-dim.mkvi b/Master/texmf-dist/tex/context/base/mult-dim.mkvi
index 0889b411e92..ffc04b523f8 100644
--- a/Master/texmf-dist/tex/context/base/mult-dim.mkvi
+++ b/Master/texmf-dist/tex/context/base/mult-dim.mkvi
@@ -35,6 +35,7 @@
\installcorenamespace{dimensionnormal}
\setvalue{\??dimensionnormal \v!none }#value#dimension#small#medium#big{#dimension\zeropoint}
+\setvalue{\??dimensionnormal \empty }#value#dimension#small#medium#big{#dimension\zeropoint}
\setvalue{\??dimensionnormal \v!small }#value#dimension#small#medium#big{#dimension=#small\relax}
\setvalue{\??dimensionnormal \v!medium}#value#dimension#small#medium#big{#dimension=#medium\relax}
\setvalue{\??dimensionnormal \v!big }#value#dimension#small#medium#big{#dimension=#big\relax}
@@ -118,7 +119,7 @@
\setbox\b_assign_width\emptybox}
\setvalue{\??dimensionwidth\v!broad }#value#dimension#content#extra{\setbox\b_assign_width\hbox{#content}#dimension\dimexpr\wd\b_assign_width+#extra\relax
\setbox\b_assign_width\emptybox}
-\setvalue{\??dimensionwidth\v!unknown}#value#dimension#content#extra{#dimension=#value\relax}
+\setvalue{\??dimensionwidth\s!unknown}#value#dimension#content#extra{#dimension=#value\relax}
\unexpanded\def\assignwidth#value%
{\csname\??dimensionwidth\ifcsname\??dimensionwidth#value\endcsname#value\else\s!unknown\fi\endcsname{#value}}
diff --git a/Master/texmf-dist/tex/context/base/mult-en.mkii b/Master/texmf-dist/tex/context/base/mult-en.mkii
index b801ebc5fe4..c3ab2fc16d0 100644
--- a/Master/texmf-dist/tex/context/base/mult-en.mkii
+++ b/Master/texmf-dist/tex/context/base/mult-en.mkii
@@ -120,6 +120,7 @@
\setinterfacevariable{chapter}{chapter}
\setinterfacevariable{character}{character}
\setinterfacevariable{characters}{characters}
+\setinterfacevariable{chemistry}{chemistry}
\setinterfacevariable{cite}{cite}
\setinterfacevariable{color}{color}
\setinterfacevariable{column}{column}
@@ -157,6 +158,7 @@
\setinterfacevariable{enumeration}{enumeration}
\setinterfacevariable{environment}{environment}
\setinterfacevariable{even}{even}
+\setinterfacevariable{export}{export}
\setinterfacevariable{external}{external}
\setinterfacevariable{fact}{fact}
\setinterfacevariable{february}{february}
@@ -258,6 +260,7 @@
\setinterfacevariable{leftpage}{leftpage}
\setinterfacevariable{lefttoright}{lefttoright}
\setinterfacevariable{legend}{legend}
+\setinterfacevariable{less}{less}
\setinterfacevariable{lesshyphenation}{lesshyphenation}
\setinterfacevariable{line}{line}
\setinterfacevariable{linenote}{linenote}
@@ -298,6 +301,7 @@
\setinterfacevariable{monday}{monday}
\setinterfacevariable{mono}{mono}
\setinterfacevariable{month}{month}
+\setinterfacevariable{more}{more}
\setinterfacevariable{morehyphenation}{morehyphenation}
\setinterfacevariable{name}{name}
\setinterfacevariable{narrow}{narrow}
@@ -455,6 +459,7 @@
\setinterfacevariable{subforward}{subforward}
\setinterfacevariable{subject}{subject}
\setinterfacevariable{subpage}{subpage}
+\setinterfacevariable{subs}{subs}
\setinterfacevariable{subsection}{subsection}
\setinterfacevariable{subsubject}{subsubject}
\setinterfacevariable{subsubsection}{subsubsection}
@@ -592,6 +597,7 @@
\setinterfaceconstant{bottom}{bottom}
\setinterfaceconstant{bottomafter}{bottomafter}
\setinterfaceconstant{bottombefore}{bottombefore}
+\setinterfaceconstant{bottomcommand}{bottomcommand}
\setinterfaceconstant{bottomdistance}{bottomdistance}
\setinterfaceconstant{bottomframe}{bottomframe}
\setinterfaceconstant{bottomoffset}{bottomoffset}
@@ -684,6 +690,7 @@
\setinterfaceconstant{filtercommand}{filtercommand}
\setinterfaceconstant{finalnamesep}{finalnamesep}
\setinterfaceconstant{firstnamesep}{firstnamesep}
+\setinterfaceconstant{firstpage}{firstpage}
\setinterfaceconstant{focus}{focus}
\setinterfaceconstant{focusin}{focusin}
\setinterfaceconstant{focusout}{focusout}
@@ -750,6 +757,7 @@
\setinterfaceconstant{label}{label}
\setinterfaceconstant{labeloffset}{labeloffset}
\setinterfaceconstant{lastnamesep}{lastnamesep}
+\setinterfaceconstant{lastpage}{lastpage}
\setinterfaceconstant{lastpubsep}{lastpubsep}
\setinterfaceconstant{layout}{layout}
\setinterfaceconstant{left}{left}
@@ -770,6 +778,7 @@
\setinterfaceconstant{leftsubsentence}{leftsubsentence}
\setinterfaceconstant{lefttext}{lefttext}
\setinterfaceconstant{leftwidth}{leftwidth}
+\setinterfaceconstant{less}{less}
\setinterfaceconstant{level}{level}
\setinterfaceconstant{levels}{levels}
\setinterfaceconstant{limittext}{limittext}
@@ -792,6 +801,7 @@
\setinterfaceconstant{marking}{marking}
\setinterfaceconstant{marstyle}{marstyle}
\setinterfaceconstant{mask}{mask}
+\setinterfaceconstant{mathstyle}{mathstyle}
\setinterfaceconstant{max}{max}
\setinterfaceconstant{maxdepth}{maxdepth}
\setinterfaceconstant{maxheight}{maxheight}
@@ -800,6 +810,7 @@
\setinterfaceconstant{menu}{menu}
\setinterfaceconstant{method}{method}
\setinterfaceconstant{middle}{middle}
+\setinterfaceconstant{middlecommand}{middlecommand}
\setinterfaceconstant{middlespeech}{middlespeech}
\setinterfaceconstant{middletext}{middletext}
\setinterfaceconstant{midsentence}{midsentence}
@@ -808,6 +819,7 @@
\setinterfaceconstant{minheight}{minheight}
\setinterfaceconstant{minwidth}{minwidth}
\setinterfaceconstant{monthconversion}{monthconversion}
+\setinterfaceconstant{more}{more}
\setinterfaceconstant{n}{n}
\setinterfaceconstant{name}{name}
\setinterfaceconstant{namesep}{namesep}
@@ -962,6 +974,7 @@
\setinterfaceconstant{separator}{separator}
\setinterfaceconstant{set}{set}
\setinterfaceconstant{setups}{setups}
+\setinterfaceconstant{shrink}{shrink}
\setinterfaceconstant{side}{side}
\setinterfaceconstant{sidealign}{sidealign}
\setinterfaceconstant{sidemethod}{sidemethod}
@@ -970,6 +983,7 @@
\setinterfaceconstant{sign}{sign}
\setinterfaceconstant{size}{size}
\setinterfaceconstant{small}{small}
+\setinterfaceconstant{solution}{solution}
\setinterfaceconstant{sort}{sort}
\setinterfaceconstant{sorttype}{sorttype}
\setinterfaceconstant{source}{source}
@@ -1037,6 +1051,7 @@
\setinterfaceconstant{toffset}{toffset}
\setinterfaceconstant{tolerance}{tolerance}
\setinterfaceconstant{top}{top}
+\setinterfaceconstant{topcommand}{topcommand}
\setinterfaceconstant{topdistance}{topdistance}
\setinterfaceconstant{topframe}{topframe}
\setinterfaceconstant{topoffset}{topoffset}
@@ -1691,6 +1706,7 @@
\setinterfacecommand{switchtorawfont}{switchtorawfont}
\setinterfacecommand{sym}{sym}
\setinterfacecommand{symbol}{symbol}
+\setinterfacecommand{symoffset}{symoffset}
\setinterfacecommand{synchronizationbar}{synchronizationbar}
\setinterfacecommand{synchronize}{synchronize}
\setinterfacecommand{tab}{tab}
diff --git a/Master/texmf-dist/tex/context/base/mult-fr.mkii b/Master/texmf-dist/tex/context/base/mult-fr.mkii
index 5717cf97b19..1ba4f3c8cf1 100644
--- a/Master/texmf-dist/tex/context/base/mult-fr.mkii
+++ b/Master/texmf-dist/tex/context/base/mult-fr.mkii
@@ -120,6 +120,7 @@
\setinterfacevariable{chapter}{chapitre}
\setinterfacevariable{character}{caractere}
\setinterfacevariable{characters}{caracteres}
+\setinterfacevariable{chemistry}{chemistry}
\setinterfacevariable{cite}{cite}
\setinterfacevariable{color}{couleur}
\setinterfacevariable{column}{colonne}
@@ -157,6 +158,7 @@
\setinterfacevariable{enumeration}{enumeration}
\setinterfacevariable{environment}{environement}
\setinterfacevariable{even}{paire}
+\setinterfacevariable{export}{export}
\setinterfacevariable{external}{external}
\setinterfacevariable{fact}{fait}
\setinterfacevariable{february}{fevrier}
@@ -258,6 +260,7 @@
\setinterfacevariable{leftpage}{pagegauche}
\setinterfacevariable{lefttoright}{lefttoright}
\setinterfacevariable{legend}{legende}
+\setinterfacevariable{less}{less}
\setinterfacevariable{lesshyphenation}{lesshyphenation}
\setinterfacevariable{line}{ligne}
\setinterfacevariable{linenote}{noteligne}
@@ -298,6 +301,7 @@
\setinterfacevariable{monday}{lundi}
\setinterfacevariable{mono}{mono}
\setinterfacevariable{month}{mois}
+\setinterfacevariable{more}{more}
\setinterfacevariable{morehyphenation}{morehyphenation}
\setinterfacevariable{name}{nom}
\setinterfacevariable{narrow}{etroit}
@@ -455,6 +459,7 @@
\setinterfacevariable{subforward}{sousavance}
\setinterfacevariable{subject}{sujet}
\setinterfacevariable{subpage}{souspage}
+\setinterfacevariable{subs}{subs}
\setinterfacevariable{subsection}{soussection}
\setinterfacevariable{subsubject}{soussujet}
\setinterfacevariable{subsubsection}{soussoussection}
@@ -592,6 +597,7 @@
\setinterfaceconstant{bottom}{inf}
\setinterfaceconstant{bottomafter}{bottomafter}
\setinterfaceconstant{bottombefore}{bottombefore}
+\setinterfaceconstant{bottomcommand}{bottomcommand}
\setinterfaceconstant{bottomdistance}{distanceinf}
\setinterfaceconstant{bottomframe}{cadreinf}
\setinterfaceconstant{bottomoffset}{decalageinf}
@@ -684,6 +690,7 @@
\setinterfaceconstant{filtercommand}{filtercommand}
\setinterfaceconstant{finalnamesep}{finalnamesep}
\setinterfaceconstant{firstnamesep}{firstnamesep}
+\setinterfaceconstant{firstpage}{premierepage}
\setinterfaceconstant{focus}{focus}
\setinterfaceconstant{focusin}{focusin}
\setinterfaceconstant{focusout}{focusout}
@@ -750,6 +757,7 @@
\setinterfaceconstant{label}{etiquette}
\setinterfaceconstant{labeloffset}{labeloffset}
\setinterfaceconstant{lastnamesep}{lastnamesep}
+\setinterfaceconstant{lastpage}{dernierepage}
\setinterfaceconstant{lastpubsep}{lastpubsep}
\setinterfaceconstant{layout}{layout}
\setinterfaceconstant{left}{gauche}
@@ -770,6 +778,7 @@
\setinterfaceconstant{leftsubsentence}{sousphrasegauche}
\setinterfaceconstant{lefttext}{textegauche}
\setinterfaceconstant{leftwidth}{largeurgauche}
+\setinterfaceconstant{less}{less}
\setinterfaceconstant{level}{niveau}
\setinterfaceconstant{levels}{niveaux}
\setinterfaceconstant{limittext}{limittext}
@@ -792,6 +801,7 @@
\setinterfaceconstant{marking}{marquage}
\setinterfaceconstant{marstyle}{stylemarquage}
\setinterfaceconstant{mask}{mask}
+\setinterfaceconstant{mathstyle}{mathstyle}
\setinterfaceconstant{max}{max}
\setinterfaceconstant{maxdepth}{maxdepth}
\setinterfaceconstant{maxheight}{hauteurmax}
@@ -800,6 +810,7 @@
\setinterfaceconstant{menu}{menu}
\setinterfaceconstant{method}{methode}
\setinterfaceconstant{middle}{milieu}
+\setinterfaceconstant{middlecommand}{middlecommand}
\setinterfaceconstant{middlespeech}{middlespeech}
\setinterfaceconstant{middletext}{textecentre}
\setinterfaceconstant{midsentence}{midsentence}
@@ -808,6 +819,7 @@
\setinterfaceconstant{minheight}{hauteurmin}
\setinterfaceconstant{minwidth}{largeurmin}
\setinterfaceconstant{monthconversion}{monthconversion}
+\setinterfaceconstant{more}{more}
\setinterfaceconstant{n}{n}
\setinterfaceconstant{name}{nom}
\setinterfaceconstant{namesep}{namesep}
@@ -962,6 +974,7 @@
\setinterfaceconstant{separator}{separateur}
\setinterfaceconstant{set}{set}
\setinterfaceconstant{setups}{reglages}
+\setinterfaceconstant{shrink}{shrink}
\setinterfaceconstant{side}{cote}
\setinterfaceconstant{sidealign}{sidealign}
\setinterfaceconstant{sidemethod}{sidemethod}
@@ -970,6 +983,7 @@
\setinterfaceconstant{sign}{signe}
\setinterfaceconstant{size}{dimension}
\setinterfaceconstant{small}{petit}
+\setinterfaceconstant{solution}{solution}
\setinterfaceconstant{sort}{sort}
\setinterfaceconstant{sorttype}{sorttype}
\setinterfaceconstant{source}{origine}
@@ -1037,6 +1051,7 @@
\setinterfaceconstant{toffset}{toffset}
\setinterfaceconstant{tolerance}{tolerance}
\setinterfaceconstant{top}{sup}
+\setinterfaceconstant{topcommand}{topcommand}
\setinterfaceconstant{topdistance}{distancesup}
\setinterfaceconstant{topframe}{cadresup}
\setinterfaceconstant{topoffset}{decalagesup}
@@ -1691,6 +1706,7 @@
\setinterfacecommand{switchtorawfont}{changepolicebrute}
\setinterfacecommand{sym}{sym}
\setinterfacecommand{symbol}{symbole}
+\setinterfacecommand{symoffset}{symoffset}
\setinterfacecommand{synchronizationbar}{barresynchronisation}
\setinterfacecommand{synchronize}{synchronise}
\setinterfacecommand{tab}{tab}
diff --git a/Master/texmf-dist/tex/context/base/mult-fun.lua b/Master/texmf-dist/tex/context/base/mult-fun.lua
new file mode 100644
index 00000000000..a661c53bb30
--- /dev/null
+++ b/Master/texmf-dist/tex/context/base/mult-fun.lua
@@ -0,0 +1,101 @@
+return {
+ internals = {
+ --
+ "nocolormodel", "greycolormodel", "graycolormodel", "rgbcolormodel", "cmykcolormodel",
+ "shadefactor",
+ "textextoffset",
+ "normaltransparent", "multiplytransparent", "screentransparent", "overlaytransparent", "softlighttransparent",
+ "hardlighttransparent", "colordodgetransparent", "colorburntransparent", "darkentransparent", "lightentransparent",
+ "differencetransparent", "exclusiontransparent", "huetransparent", "saturationtransparent", "colortransparent", "luminositytransparent",
+-- "originlength", "tickstep ", "ticklength",
+-- "autoarrows", "ahfactor",
+-- "angleoffset", anglelength", anglemethod",
+ "metapostversion",
+ "maxdimensions",
+ },
+ commands = {
+ --
+ "sqr", "log", "ln", "exp", "inv", "pow", "pi", "radian",
+ "tand", "cotd", "sin", "cos", "tan", "cot", "atan", "asin", "acos",
+ "invsin", "invcos", "acosh", "asinh", "sinh", "cosh",
+ "paired", "tripled",
+ "unitcircle", "fulldiamond", "unitdiamond", "fullsquare",
+ -- "halfcircle", "quartercircle",
+ "llcircle", "lrcircle", "urcircle", "ulcircle",
+ "tcircle", "bcircle", "lcircle", "rcircle",
+ "lltriangle", "lrtriangle", "urtriangle", "ultriangle",
+ "smoothed", "cornered", "superellipsed", "randomized", "squeezed", "enlonged", "shortened",
+ "punked", "curved", "unspiked", "simplified", "blownup", "stretched",
+ "enlarged", "leftenlarged", "topenlarged", "rightenlarged", "bottomenlarged",
+ "crossed", "laddered", "randomshifted", "interpolated", "paralleled", "cutends", "peepholed",
+ "llenlarged", "lrenlarged", "urenlarged", "ulenlarged",
+ "llmoved", "lrmoved", "urmoved", "ulmoved",
+ "rightarrow", "leftarrow", "centerarrow",
+ "boundingbox", "innerboundingbox", "outerboundingbox", "pushboundingbox", "popboundingbox",
+ "bottomboundary", "leftboundary", "topboundary", "rightboundary",
+ "xsized", "ysized", "xysized", "sized", "xyscaled",
+ "intersection_point", "intersection_found", "penpoint",
+ "bbwidth", "bbheight",
+ "withshade", "withlinearshading", "withcircularshading", "withfromshadecolor", "withtoshadecolor", "withshading", "shadedinto",
+ "withcircularshade", "withlinearshade",
+ "cmyk", "spotcolor", "multitonecolor", "namedcolor",
+ "drawfill", "undrawfill",
+ "inverted", "uncolored", "softened", "grayed", "greyed",
+ "onlayer",
+ "along",
+ "graphictext", "loadfigure", "externalfigure", "withmask", "figure", "register", "bitmapimage",
+ "colordecimals", "ddecimal", "dddecimal", "ddddecimal",
+ "textext", "thetextext", "rawtextext", "textextoffset", "verbatim", "thelabel", "label", "autoalign",
+ "transparent", "withtransparency",
+ "property", "properties", "withproperties",
+ "asgroup",
+ "infont", -- redefined usign textext
+ -- "property", "withproperties", "properties", -- not yet
+ "set_linear_vector", "linear_shade", "define_linear_shade", "define_circular_linear_shade", "define_sampled_linear_shade",
+ "set_circular_vector", "circular_shade", "define_circular_shade", "define_circular_linear_shade", "define_sampled_circular_shade",
+ "space", "CRLF",
+ "grayscale", "greyscale", "withgray", "withgrey",
+ "colorpart",
+ "readfile",
+ "clearxy", "unitvector", "center", -- redefined
+ "epsed", "anchored",
+ "originpath", "infinite",
+ "break",
+ "xstretched", "ystretched", "snapped",
+ --
+ "pathconnectors", "function", "constructedpath", "constructedpairs",
+ "punkedfunction", "curvedfunction", "tightfunction",
+ "punkedpath", "curvedpath", "tightpath",
+ "punkedpairs", "curvedpairs", "tightpairs",
+ --
+ "evenly", "oddly",
+ --
+ "condition",
+ --
+ "pushcurrentpicture", "popcurrentpicture",
+ --
+ "arrowpath",
+-- "colorlike", "dowithpath", "rangepath", "straightpath", "addbackground",
+-- "cleanstring", "asciistring", "setunstringed", "getunstringed", "unstringed",
+-- "showgrid",
+-- "phantom",
+-- "xshifted", "yshifted",
+-- "drawarrowpath", "midarrowhead", "arrowheadonpath",
+-- "drawxticks", "drawyticks", "drawticks",
+-- "pointarrow",
+-- "thefreelabel", "freelabel", "freedotlabel",
+-- "anglebetween", "colorcircle",
+-- "remapcolors", "normalcolors", "resetcolormap", "remapcolor", "remappedcolor",
+-- "recolor", "refill", "redraw", "retext", "untext", "restroke", "reprocess", "repathed",
+ "tensecircle", "roundedsquare",
+ "colortype", "whitecolor", "blackcolor",
+ --
+-- "swappointlabels",
+ "normalfill", "normaldraw", "visualizepaths", "naturalizepaths",
+ "drawboundary", "drawwholepath", "visualizeddraw", "visualizedfill", "draworigin", "drawboundingbox",
+ "drawpath", "drawpoint", "drawpoints", "drawcontrolpoints", "drawcontrollines", "drawpointlabels",
+ "drawlineoptions", "drawpointoptions", "drawcontroloptions", "drawlabeloptions", "draworiginoptions", "drawboundoptions", "drawpathoptions", "resetdrawoptions",
+ --
+ "decorated", "redecorated", "undecorated",
+ },
+}
diff --git a/Master/texmf-dist/tex/context/base/mult-ini.lua b/Master/texmf-dist/tex/context/base/mult-ini.lua
index d14f38ad5f6..5571ee3b948 100644
--- a/Master/texmf-dist/tex/context/base/mult-ini.lua
+++ b/Master/texmf-dist/tex/context/base/mult-ini.lua
@@ -6,16 +6,18 @@ if not modules then modules = { } end modules ['mult-ini'] = {
license = "see context related readme files"
}
-local format, gmatch, gsub = string.format, string.gmatch, string.gsub
+local format, gmatch, match = string.format, string.gmatch, string.match
local lpegmatch = lpeg.match
local serialize = table.serialize
-local allocate = utilities.storage.allocate
-local mark = utilities.storage.mark
-local contextsprint = context.sprint
-local setmetatableindex = table.setmetatableindex
+local allocate = utilities.storage.allocate
+local mark = utilities.storage.mark
+local prtcatcodes = catcodes.numbers.prtcatcodes
+local contextsprint = context.sprint
+local setmetatableindex = table.setmetatableindex
+local formatters = string.formatters
-local report_interface = logs.reporter("interface","initialization")
+local report_interface = logs.reporter("interface","initialization")
interfaces = interfaces or { }
interfaces.constants = mark(interfaces.constants or { })
@@ -25,27 +27,38 @@ interfaces.formats = mark(interfaces.formats or { })
interfaces.translations = mark(interfaces.translations or { })
interfaces.corenamespaces = mark(interfaces.corenamespaces or { })
-storage.register("interfaces/constants", interfaces.constants, "interfaces.constants")
-storage.register("interfaces/variables", interfaces.variables, "interfaces.variables")
-storage.register("interfaces/elements", interfaces.elements, "interfaces.elements")
-storage.register("interfaces/formats", interfaces.formats, "interfaces.formats")
-storage.register("interfaces/translations", interfaces.translations, "interfaces.translations")
-storage.register("interfaces/corenamespaces", interfaces.corenamespaces, "interfaces.corenamespaces")
+local registerstorage = storage.register
+local sharedstorage = storage.shared
+
+local constants = interfaces.constants
+local variables = interfaces.variables
+local elements = interfaces.elements
+local formats = interfaces.formats
+local translations = interfaces.translations
+local corenamespaces = interfaces.corenamespaces
+local reporters = { } -- just an optimization
+
+registerstorage("interfaces/constants", constants, "interfaces.constants")
+registerstorage("interfaces/variables", variables, "interfaces.variables")
+registerstorage("interfaces/elements", elements, "interfaces.elements")
+registerstorage("interfaces/formats", formats, "interfaces.formats")
+registerstorage("interfaces/translations", translations, "interfaces.translations")
+registerstorage("interfaces/corenamespaces", corenamespaces, "interfaces.corenamespaces")
interfaces.interfaces = {
"cs", "de", "en", "fr", "it", "nl", "ro", "pe",
}
-storage.shared.currentinterface = storage.shared.currentinterface or "en"
-storage.shared.currentresponse = storage.shared.currentresponse or "en"
+sharedstorage.currentinterface = sharedstorage.currentinterface or "en"
+sharedstorage.currentresponse = sharedstorage.currentresponse or "en"
-local currentinterface = storage.shared.currentinterface
-local currentresponse = storage.shared.currentresponse
+local currentinterface = sharedstorage.currentinterface
+local currentresponse = sharedstorage.currentresponse
local complete = allocate()
interfaces.complete = complete
-local function resolve(t,k) -- one access needed to get loaded
+local function resolve(t,k) -- one access needed to get loaded (not stored!)
report_interface("loading interface definitions from 'mult-def.lua'")
complete = dofile(resolvers.findfile("mult-def.lua"))
report_interface("loading interface messages from 'mult-mes.lua'")
@@ -56,14 +69,6 @@ end
setmetatableindex(complete, resolve)
-local constants = interfaces.constants
-local variables = interfaces.variables
-local elements = interfaces.elements
-local formats = interfaces.formats
-local translations = interfaces.translations
-local corenamespaces = interfaces.corenamespaces
-local reporters = { } -- just an optimization
-
local function valueiskey(t,k) -- will be helper
t[k] = k
return k
@@ -75,7 +80,7 @@ setmetatableindex(elements, valueiskey)
setmetatableindex(formats, valueiskey)
setmetatableindex(translations, valueiskey)
-function commands.registernamespace(n,namespace)
+function interfaces.registernamespace(n,namespace)
corenamespaces[n] = namespace
end
@@ -85,7 +90,7 @@ local function resolve(t,k)
return v
end
-setmetatableindex(reporters, resolve)
+setmetatableindex(reporters,resolve)
for category, _ in next, translations do
-- We pre-create reporters for already defined messages
@@ -122,9 +127,10 @@ end
-- the old method:
+local replacer = lpeg.replacer { { "--", "%%a" } }
+
local function fulltag(category,tag)
- tag = gsub(tag,"%-%-","%%s")
- return format("%s:%s",category,tag)
+ return formatters["%s:%s"](category,lpegmatch(replacer,tag))
end
function interfaces.setmessages(category,str)
@@ -132,13 +138,13 @@ function interfaces.setmessages(category,str)
if tag == "title" then
translations[tag] = translations[tag] or tag
else
- formats[fulltag(category,tag)] = gsub(message,"%-%-","%%s")
+ formats[fulltag(category,tag)] = lpegmatch(replacer,message)
end
end
end
function interfaces.setmessage(category,tag,message)
- formats[fulltag(category,tag)] = gsub(message,"%-%-","%%s")
+ formats[fulltag(category,tag)] = lpegmatch(replacer,message)
end
function interfaces.getmessage(category,tag,default)
@@ -146,7 +152,7 @@ function interfaces.getmessage(category,tag,default)
end
function interfaces.doifelsemessage(category,tag)
- return commands.testcase(formats[fulltag(category,tag)])
+ return formats[fulltag(category,tag)]
end
local splitter = lpeg.splitat(",")
@@ -184,45 +190,38 @@ end
logs.setmessenger(context.verbatim.ctxreport)
--- status
-
-function commands.writestatus(category,message,...)
- local r = reporters[category]
- r(message,...)
-end
-
-- initialization
function interfaces.setuserinterface(interface,response)
- storage.shared.currentinterface, currentinterface = interface, interface
- storage.shared.currentresponse, currentresponse = response, response
+ sharedstorage.currentinterface, currentinterface = interface, interface
+ sharedstorage.currentresponse, currentresponse = response, response
if environment.initex then
local nofconstants = 0
for given, constant in next, complete.constants do
constant = constant[interface] or constant.en or given
constants[constant] = given -- breedte -> width
- contextsprint("\\do@sicon{",given,"}{",constant,"}")
+ contextsprint(prtcatcodes,"\\ui_c{",given,"}{",constant,"}") -- user interface constant
nofconstants = nofconstants + 1
end
local nofvariables = 0
for given, variable in next, complete.variables do
variable = variable[interface] or variable.en or given
variables[given] = variable -- ja -> yes
- contextsprint("\\do@sivar{",given,"}{",variable,"}")
+ contextsprint(prtcatcodes,"\\ui_v{",given,"}{",variable,"}") -- user interface variable
nofvariables = nofvariables + 1
end
local nofelements = 0
for given, element in next, complete.elements do
element = element[interface] or element.en or given
elements[element] = given
- contextsprint("\\do@siele{",given,"}{",element,"}")
+ contextsprint(prtcatcodes,"\\ui_e{",given,"}{",element,"}") -- user interface element
nofelements = nofelements + 1
end
local nofcommands = 0
for given, command in next, complete.commands do
command = command[interface] or command.en or given
if command ~= given then
- contextsprint("\\do@sicom{",given,"}{",command,"}")
+ contextsprint(prtcatcodes,"\\ui_m{",given,"}{",command,"}") -- user interface macro
end
nofcommands = nofcommands + 1
end
@@ -236,7 +235,7 @@ function interfaces.setuserinterface(interface,response)
translations[given] = translation[interface] or translation.en or given
noftranslations = noftranslations + 1
end
- report_interface("definitions: %s constants, %s variables, %s elements, %s commands, %s formats, %s translations",
+ report_interface("definitions: %a constants, %a variables, %a elements, %a commands, %a formats, %a translations",
nofconstants,nofvariables,nofelements,nofcommands,nofformats,noftranslations)
end
end
@@ -244,12 +243,12 @@ end
interfaces.cachedsetups = interfaces.cachedsetups or { }
interfaces.hashedsetups = interfaces.hashedsetups or { }
-storage.register("interfaces/cachedsetups", interfaces.cachedsetups, "interfaces.cachedsetups")
-storage.register("interfaces/hashedsetups", interfaces.hashedsetups, "interfaces.hashedsetups")
-
local cachedsetups = interfaces.cachedsetups
local hashedsetups = interfaces.hashedsetups
+storage.register("interfaces/cachedsetups", cachedsetups, "interfaces.cachedsetups")
+storage.register("interfaces/hashedsetups", hashedsetups, "interfaces.hashedsetups")
+
function interfaces.cachesetup(t)
local hash = serialize(t)
local done = hashedsetups[hash]
@@ -271,3 +270,40 @@ function interfaces.interfacedcommand(name)
local command = complete.commands[name]
return command and command[currentinterface] or name
end
+
+-- interface
+
+function commands.writestatus(category,message,...)
+ local r = reporters[category]
+ if r then
+ r(message,...)
+ end
+end
+
+commands.registernamespace = interfaces.registernamespace
+commands.setinterfaceconstant = interfaces.setconstant
+commands.setinterfacevariable = interfaces.setvariable
+commands.setinterfaceelement = interfaces.setelement
+commands.setinterfacemessage = interfaces.setmessage
+commands.setinterfacemessages = interfaces.setmessages
+commands.showmessage = interfaces.showmessage
+
+function commands.doifelsemessage(category,tag)
+ commands.doifelse(interfaces.doifelsemessage(category,tag))
+end
+
+function commands.getmessage(category,tag,default)
+ context(interfaces.getmessage(category,tag,default))
+end
+
+function commands.showassignerror(namespace,key,value,line)
+ local ns, instance = match(namespace,"^(%d+)[^%a]+(%a+)")
+ if ns then
+ namespace = corenamespaces[tonumber(ns)] or ns
+ end
+ if instance then
+ context.writestatus("setup",formatters["error in line %a, namespace %a, instance %a, key %a"](line,namespace,instance,key))
+ else
+ context.writestatus("setup",formatters["error in line %a, namespace %a, key %a"](line,namespace,key))
+ end
+end
diff --git a/Master/texmf-dist/tex/context/base/mult-ini.mkiv b/Master/texmf-dist/tex/context/base/mult-ini.mkiv
index f0d47103dc4..52f9255c8ec 100644
--- a/Master/texmf-dist/tex/context/base/mult-ini.mkiv
+++ b/Master/texmf-dist/tex/context/base/mult-ini.mkiv
@@ -101,24 +101,31 @@
%D
%D \starttyping
%D \hrule width 10pt height 2pt depth 1pt
-%D \hrule \!!width 10pt \!!height 2pt \!!depth 1pt
+%D \hrule \s!width 10pt \s!height 2pt \s!depth 1pt
%D \stoptyping
%D
-%D One condition is that we have defined \type{\!!height},
-%D \type{\!!width} and \type{\!!depth} as respectively
-%D \type{height}, \type{width} and \type{depth}. Using this
+%D One condition is that we have defined \type {\s!height},
+%D \type {\s!width} and \type {\s!depth} as respectively
+%D \type {height}, \type {width} and \type {depth}. Using this
%D scheme therefore only makes sense when a token sequence is
%D used more than once. Savings like this should of course be
%D implemented in english, just because \TEX\ is english.
-\def\!!width {width}
-\def\!!height{height}
-\def\!!depth {depth}
-\def\!!plus {plus}
-\def\!!minus {minus}
-\def\!!fill {fill}
-\def\!!to {to}
-\def\!!spread{spread}
+\def\s!width {width} \let\!!width \s!width % obsolete
+\def\s!height{height} \let\!!height\s!height % obsolete
+\def\s!depth {depth} \let\!!depth \s!depth % obsolete
+\def\s!spread{spread} \let\!!spread\s!spread % obsolete
+\def\s!plus {plus} \let\!!plus \s!plus % obsolete
+\def\s!minus {minus} \let\!!minus \s!minus % obsolete
+\def\s!fil {fil}
+\def\s!fill {fill} \let\!!fill \s!fill % obsolete
+\def\s!filll {filll}
+\def\s!to {to} \let\!!to \s!to % obsolete
+\def\s!attr {attr}
+
+\def\s!bottom{bottom}
+\def\s!top {top}
+\def\s!both {both}
%D Kind of special:
@@ -361,7 +368,7 @@
\doifinsetelse{#1}{\currentresponses,all}\mult_messages_start_yes\mult_messages_start_nop{#2}}
\def\mult_messages_start_yes#1#2\stopmessages
- {\ctxlua{interfaces.setmessages("#1",\!!bs#2\!!es)}%
+ {\ctxcommand{setinterfacemessages("#1",\!!bs#2\!!es)}%
\egroup}
\def\mult_messages_start_nop#1#2\stopmessages
@@ -371,12 +378,12 @@
\unexpanded\def\setinterfacemessage#1#2#3%
{\ifcsname\m!prefix!#1\endcsname\else\setgvalue{\m!prefix!#1}{#1}\fi
- \ctxlua{interfaces.setmessage("#1","#2",\!!bs#3\!!es)}}
+ \ctxcommand{setinterfacemessage("#1","#2",\!!bs#3\!!es)}}
-\unexpanded\def\setmessagetext #1#2{\edef\currentmessagetext{\cldcontext{interfaces.getmessage("#1","#2")}}}
-\unexpanded\def\getmessage #1#2{\cldcontext{interfaces.getmessage("#1","#2")}}
-\unexpanded\def\doifelsemessage #1#2{\ctxlua{interfaces.doifelsemessage("#1","#2")}}
-\unexpanded\def\showmessage #1#2#3{\ctxlua{interfaces.showmessage("#1","#2",\!!bs#3\!!es)}}
+\unexpanded\def\setmessagetext #1#2{\edef\currentmessagetext{\ctxcommand{getmessage("#1","#2")}}}
+\unexpanded\def\getmessage #1#2{\ctxcommand{getmessage("#1","#2")}}
+\unexpanded\def\doifelsemessage #1#2{\ctxcommand{doifelsemessage("#1","#2")}}
+\unexpanded\def\showmessage #1#2#3{\ctxcommand{showmessage("#1","#2",\!!bs#3\!!es)}}
\unexpanded\def\writestatus #1#2{\ctxcommand{writestatus("#1",\!!bs#2\!!es)}}
%D \macros
@@ -723,11 +730,11 @@
% temporary mkiv hack (we can best just store the whole table in memory)
\unexpanded\def\setinterfaceconstant#1#2%
- {\ctxlua{interfaces.setconstant("#1","#2")}%
+ {\ctxcommand{setinterfaceconstant("#1","#2")}%
\expandafter\def\csname\c!prefix!#1\endcsname{#1}}
\unexpanded\def\setinterfacevariable#1#2%
- {\ctxlua{interfaces.setvariable("#1","#2")}%
+ {\ctxcommand{setinterfacevariable("#1","#2")}%
\expandafter\def\csname\v!prefix!#1\endcsname{#2}}
%D \macros
@@ -756,7 +763,7 @@
%D part is needed, we use a \type{-}:
\unexpanded\def\setinterfaceelement#1#2%
- {\ctxlua{interfaces.setelement("#1","#2")}%
+ {\ctxcommand{setinterfaceelement("#1","#2")}%
\ifcsname\e!prefix!#1\endcsname
\doifnotvalue{\e!prefix!#1}{#2}{\setvalue{\e!prefix!#1}{#2}}%
\else
@@ -776,15 +783,27 @@
%D For at the \LUA\ end (experiment):
-\def\do@sicon#1#2{\expandafter\gdef\csname\c!prefix!#1\endcsname{#1}%
- \expandafter\gdef\csname\k!prefix!#2\endcsname{#1}} % backmapping from non english
-\def\do@sivar#1#2{\expandafter\gdef\csname\v!prefix!#1\endcsname{#2}}
-\def\do@siele#1#2{\expandafter\gdef\csname\e!prefix!#1\endcsname{#2}}
-\def\do@sicom#1#2{\expandafter\gdef\csname#2\expandafter\endcsname\expandafter{\csname#1\endcsname}}
+% \def\do@sicon#1#2{\expandafter\gdef\csname\c!prefix!#1\endcsname{#1}%
+% \expandafter\gdef\csname\k!prefix!#2\endcsname{#1}} % backmapping from non english
+% \def\do@sivar#1#2{\expandafter\gdef\csname\v!prefix!#1\endcsname{#2}}
+% \def\do@siele#1#2{\expandafter\gdef\csname\e!prefix!#1\endcsname{#2}}
+% \def\do@sicom#1#2{\expandafter\gdef\csname#2\expandafter\endcsname\expandafter{\csname#1\endcsname}}
+%
+% \startinterface english
+%
+% \def\do@sicon#1#2{\expandafter\gdef\csname\c!prefix!#1\endcsname{#1}}
+%
+% \stopinterface
+
+\def\ui_c#1#2{\expandafter\gdef\csname\c!prefix!#1\endcsname{#1}%
+ \expandafter\gdef\csname\k!prefix!#2\endcsname{#1}} % backmapping from non english
+\def\ui_v#1#2{\expandafter\gdef\csname\v!prefix!#1\endcsname{#2}}
+\def\ui_e#1#2{\expandafter\gdef\csname\e!prefix!#1\endcsname{#2}}
+\def\ui_m#1#2{\expandafter\gdef\csname#2\expandafter\endcsname\expandafter{\csname#1\endcsname}}
\startinterface english
- \def\do@sicon#1#2{\expandafter\gdef\csname\c!prefix!#1\endcsname{#1}}
+ \def\ui_c#1#2{\expandafter\gdef\csname\c!prefix!#1\endcsname{#1}}
\stopinterface
@@ -800,7 +819,7 @@
\def\contextbanner
{ConTeXt \space
- ver: \contextversion \space \contextmark \space \space
+ ver: \contextversion \space \contextmark \space \contextkind \space \space
fmt: \formatversion \space \space
int: \currentinterface/\currentresponses}
@@ -814,7 +833,8 @@
\def\contextversion {unknown}
\def\contextversionnumber{0}
\else
- \def\contextversionnumber#1.#2.#3 #4:#5\relax{#1\ifnum#2<10 0\fi#2\ifnum#3<10 0\fi#3 #4:#5}
+ %\def\contextversionnumber#1.#2.#3 #4:#5\relax{#1\ifnum#2<10 0\fi#2\ifnum#3<10 0\fi#3 #4:#5}
+ \def\contextversionnumber#1.#2.#3 #4:#5\relax{#1\ifnum#2<10 0\fi\purenumber{#2}\ifnum#3<10 0\fi\purenumber{#3} #4:#5}
\edef\contextversionnumber{\expandafter\contextversionnumber\contextversion\relax\space\contextmark}
\fi
diff --git a/Master/texmf-dist/tex/context/base/mult-it.mkii b/Master/texmf-dist/tex/context/base/mult-it.mkii
index 88a371ac76e..0d1ea911d1a 100644
--- a/Master/texmf-dist/tex/context/base/mult-it.mkii
+++ b/Master/texmf-dist/tex/context/base/mult-it.mkii
@@ -120,6 +120,7 @@
\setinterfacevariable{chapter}{capitolo}
\setinterfacevariable{character}{lettera}
\setinterfacevariable{characters}{lettere}
+\setinterfacevariable{chemistry}{chemistry}
\setinterfacevariable{cite}{cite}
\setinterfacevariable{color}{colore}
\setinterfacevariable{column}{colonna}
@@ -157,6 +158,7 @@
\setinterfacevariable{enumeration}{enumerazione}
\setinterfacevariable{environment}{ambiente}
\setinterfacevariable{even}{pari}
+\setinterfacevariable{export}{export}
\setinterfacevariable{external}{esterno}
\setinterfacevariable{fact}{fatto}
\setinterfacevariable{february}{febbraio}
@@ -258,6 +260,7 @@
\setinterfacevariable{leftpage}{paginasinistra}
\setinterfacevariable{lefttoright}{lefttoright}
\setinterfacevariable{legend}{legenda}
+\setinterfacevariable{less}{less}
\setinterfacevariable{lesshyphenation}{lesshyphenation}
\setinterfacevariable{line}{riga}
\setinterfacevariable{linenote}{linenote}
@@ -298,6 +301,7 @@
\setinterfacevariable{monday}{lunedi}
\setinterfacevariable{mono}{mono}
\setinterfacevariable{month}{mese}
+\setinterfacevariable{more}{more}
\setinterfacevariable{morehyphenation}{morehyphenation}
\setinterfacevariable{name}{nome}
\setinterfacevariable{narrow}{stretto}
@@ -455,6 +459,7 @@
\setinterfacevariable{subforward}{sottoavanti}
\setinterfacevariable{subject}{argomento}
\setinterfacevariable{subpage}{sottopagina}
+\setinterfacevariable{subs}{subs}
\setinterfacevariable{subsection}{sottocapoverso}
\setinterfacevariable{subsubject}{sottoargomento}
\setinterfacevariable{subsubsection}{sottosottocapoverso}
@@ -592,6 +597,7 @@
\setinterfaceconstant{bottom}{fondo}
\setinterfaceconstant{bottomafter}{bottomafter}
\setinterfaceconstant{bottombefore}{bottombefore}
+\setinterfaceconstant{bottomcommand}{bottomcommand}
\setinterfaceconstant{bottomdistance}{distanzafondo}
\setinterfaceconstant{bottomframe}{cornicefondo}
\setinterfaceconstant{bottomoffset}{offsetfondo}
@@ -684,6 +690,7 @@
\setinterfaceconstant{filtercommand}{filtercommand}
\setinterfaceconstant{finalnamesep}{finalnamesep}
\setinterfaceconstant{firstnamesep}{firstnamesep}
+\setinterfaceconstant{firstpage}{primapagina}
\setinterfaceconstant{focus}{focus}
\setinterfaceconstant{focusin}{focusin}
\setinterfaceconstant{focusout}{focusout}
@@ -750,6 +757,7 @@
\setinterfaceconstant{label}{etichetta}
\setinterfaceconstant{labeloffset}{labeloffset}
\setinterfaceconstant{lastnamesep}{lastnamesep}
+\setinterfaceconstant{lastpage}{ultimapagina}
\setinterfaceconstant{lastpubsep}{lastpubsep}
\setinterfaceconstant{layout}{layout}
\setinterfaceconstant{left}{sinistra}
@@ -770,6 +778,7 @@
\setinterfaceconstant{leftsubsentence}{sottofrasesinistra}
\setinterfaceconstant{lefttext}{testosinistro}
\setinterfaceconstant{leftwidth}{ampiezzasinistra}
+\setinterfaceconstant{less}{less}
\setinterfaceconstant{level}{livello}
\setinterfaceconstant{levels}{livelli}
\setinterfaceconstant{limittext}{limittext}
@@ -792,6 +801,7 @@
\setinterfaceconstant{marking}{marcatura}
\setinterfaceconstant{marstyle}{stilemarcatura}
\setinterfaceconstant{mask}{mask}
+\setinterfaceconstant{mathstyle}{mathstyle}
\setinterfaceconstant{max}{max}
\setinterfaceconstant{maxdepth}{maxdepth}
\setinterfaceconstant{maxheight}{altezzamax}
@@ -800,6 +810,7 @@
\setinterfaceconstant{menu}{menu}
\setinterfaceconstant{method}{metodo}
\setinterfaceconstant{middle}{centro}
+\setinterfaceconstant{middlecommand}{middlecommand}
\setinterfaceconstant{middlespeech}{middlespeech}
\setinterfaceconstant{middletext}{testocentro}
\setinterfaceconstant{midsentence}{midsentence}
@@ -808,6 +819,7 @@
\setinterfaceconstant{minheight}{altezzamin}
\setinterfaceconstant{minwidth}{ampiezzamin}
\setinterfaceconstant{monthconversion}{monthconversion}
+\setinterfaceconstant{more}{more}
\setinterfaceconstant{n}{n}
\setinterfaceconstant{name}{nome}
\setinterfaceconstant{namesep}{namesep}
@@ -962,6 +974,7 @@
\setinterfaceconstant{separator}{separatore}
\setinterfaceconstant{set}{set}
\setinterfaceconstant{setups}{setups}
+\setinterfaceconstant{shrink}{shrink}
\setinterfaceconstant{side}{lato}
\setinterfaceconstant{sidealign}{sidealign}
\setinterfaceconstant{sidemethod}{sidemethod}
@@ -970,6 +983,7 @@
\setinterfaceconstant{sign}{segno}
\setinterfaceconstant{size}{dimensione}
\setinterfaceconstant{small}{piccolo}
+\setinterfaceconstant{solution}{solution}
\setinterfaceconstant{sort}{sort}
\setinterfaceconstant{sorttype}{sorttype}
\setinterfaceconstant{source}{origine}
@@ -1037,6 +1051,7 @@
\setinterfaceconstant{toffset}{toffset}
\setinterfaceconstant{tolerance}{tolleranza}
\setinterfaceconstant{top}{cima}
+\setinterfaceconstant{topcommand}{topcommand}
\setinterfaceconstant{topdistance}{distanzacima}
\setinterfaceconstant{topframe}{cornicecima}
\setinterfaceconstant{topoffset}{offsetcima}
@@ -1691,6 +1706,7 @@
\setinterfacecommand{switchtorawfont}{passaafontgrezzo}
\setinterfacecommand{sym}{sim}
\setinterfacecommand{symbol}{simbolo}
+\setinterfacecommand{symoffset}{symoffset}
\setinterfacecommand{synchronizationbar}{barrasincronizzazione}
\setinterfacecommand{synchronize}{sincronizza}
\setinterfacecommand{tab}{tab}
diff --git a/Master/texmf-dist/tex/context/base/mult-low.lua b/Master/texmf-dist/tex/context/base/mult-low.lua
index 7d61d0427e3..d3c5add106f 100644
--- a/Master/texmf-dist/tex/context/base/mult-low.lua
+++ b/Master/texmf-dist/tex/context/base/mult-low.lua
@@ -17,6 +17,7 @@ return {
"zeropoint", "onepoint", "halfapoint", "onebasepoint", "maxdimen", "scaledpoint", "thousandpoint",
"points", "halfpoint",
"zeroskip",
+ "zeromuskip", "onemuskip",
"pluscxxvii", "pluscxxviii", "pluscclv", "pluscclvi",
"normalpagebox",
-- --
@@ -94,13 +95,15 @@ return {
"startcomponent", "stopcomponent", "component",
"startproduct", "stopproduct", "product",
"startproject", "stopproject", "project",
- "starttext", "stoptext", "startnotext", "stopnotext","startdocument", "stopdocument", "documentvariable",
- "startmodule", "stopmodule", "usemodule",
+ "starttext", "stoptext", "startnotext", "stopnotext","startdocument", "stopdocument", "documentvariable", "setupdocument",
+ "startmodule", "stopmodule", "usemodule", "usetexmodule", "useluamodule",
--
"startTEXpage", "stopTEXpage",
-- "startMPpage", "stopMPpage", -- already catched by nested lexer
--
- "enablemode", "disablemode", "preventmode", "pushmode", "popmode",
+ "enablemode", "disablemode", "preventmode",
+ "globalenablemode", "globaldisablemode", "globalpreventmode",
+ "pushmode", "popmode",
--
"typescriptone", "typescripttwo", "typescriptthree", "mathsizesuffix",
--
@@ -111,6 +114,8 @@ return {
"constantnumber", "constantnumberargument", "constantdimen", "constantdimenargument", "constantemptyargument",
--
"continueifinputfile",
+ --
+ "luastringsep", "!!bs", "!!es",
},
["helpers"] = {
--
@@ -122,8 +127,10 @@ return {
"startlocalsetups", "stoplocalsetups",
"starttexdefinition", "stoptexdefinition",
"starttexcode", "stoptexcode",
+ "startcontextcode", "stopcontextcode",
--
"doifsetupselse", "doifsetups", "doifnotsetups", "setup", "setups", "texsetup", "xmlsetup", "luasetup", "directsetup",
+ "doifelsecommandhandler","doifnotcommandhandler","doifcommandhandler",
--
"newmode", "setmode", "resetmode",
"newsystemmode", "setsystemmode", "resetsystemmode", "pushsystemmode", "popsystemmode",
@@ -134,13 +141,22 @@ return {
--
"then",
--
- "donothing", "dontcomplain",
+ "firstargumentfalse", "firstargumenttrue",
+ "secondargumentfalse", "secondargumenttrue",
+ "thirdargumentfalse", "thirdargumenttrue",
+ "fourthargumentfalse", "fourthargumenttrue",
+ "fifthargumentfalse", "fifthsargumenttrue",
+ "sixthargumentfalse", "sixtsargumenttrue",
+ --
+ "doglobal", "dodoglobal", "redoglobal", "resetglobal",
+ --
+ "donothing", "dontcomplain", "forgetall",
--
"donetrue", "donefalse",
--
"htdp",
"unvoidbox",
- "vfilll",
+ "hfilll", "vfilll",
--
"mathbox", "mathlimop", "mathnolop", "mathnothing", "mathalpha",
--
@@ -158,6 +174,8 @@ return {
"ruledhglue", "ruledvglue", "normalhglue", "normalvglue",
"ruledpenalty",
--
+ "filledhboxb", "filledhboxr", "filledhboxg", "filledhboxc", "filledhboxm", "filledhboxy", "filledhboxk",
+ --
"scratchcounter", "globalscratchcounter",
"scratchdimen", "globalscratchdimen",
"scratchskip", "globalscratchskip",
@@ -165,10 +183,15 @@ return {
"scratchtoks", "globalscratchtoks",
"scratchbox", "globalscratchbox",
--
+ "availablehsize", "localhsize", "setlocalhsize",
+ --
"nextbox", "dowithnextbox", "dowithnextboxcs", "dowithnextboxcontent", "dowithnextboxcontentcs",
--
"scratchwidth", "scratchheight", "scratchdepth", "scratchoffset", "scratchdistance",
"scratchhsize", "scratchvsize",
+ "scratchxoffset", "scratchyoffset", "scratchhoffset", "scratchvoffset",
+ "scratchxposition", "scratchyposition",
+ "scratchtopoffset", "scratchbottomoffset", "scratchleftoffset", "scratchrightoffset",
--
"scratchcounterone", "scratchcountertwo", "scratchcounterthree",
"scratchdimenone", "scratchdimentwo", "scratchdimenthree",
@@ -176,6 +199,10 @@ return {
"scratchmuskipone", "scratchmuskiptwo", "scratchmuskipthree",
"scratchtoksone", "scratchtokstwo", "scratchtoksthree",
"scratchboxone", "scratchboxtwo", "scratchboxthree",
+ "scratchnx", "scratchny", "scratchmx", "scratchmy",
+ "scratchunicode",
+ --
+ "scratchleftskip", "scratchrightskip", "scratchtopskip", "scratchbottomskip",
--
"doif", "doifnot", "doifelse",
"doifinset", "doifnotinset", "doifinsetelse",
@@ -184,33 +211,42 @@ return {
"doifelsevalue", "doifvalue", "doifnotvalue",
"doifnothing", "doifsomething", "doifelsenothing", "doifsomethingelse",
"doifvaluenothing", "doifvaluesomething", "doifelsevaluenothing",
- "doifdimensionelse", "doifnumberelse",
+ "doifdimensionelse", "doifnumberelse", "doifnumber", "doifnotnumber",
"doifcommonelse", "doifcommon", "doifnotcommon",
"doifinstring", "doifnotinstring", "doifinstringelse",
- "doifassignmentelse",
+ "doifassignmentelse", "docheckassignment",
--
"tracingall", "tracingnone", "loggingall",
--
- "appendtoks", "prependtoks", "appendtotoks", "prependtotoks", "to",
+ "removetoks", "appendtoks", "prependtoks", "appendtotoks", "prependtotoks", "to",
--
- "endgraf", "empty", "null", "space", "quad", "enspace", "obeyspaces", "obeylines", "normalspace",
+ "endgraf", "endpar", "everyendpar", "reseteverypar", "finishpar", "empty", "null", "space", "quad", "enspace",
+ "obeyspaces", "obeylines", "obeyedspace", "obeyedline",
+ "normalspace",
--
"executeifdefined",
--
"singleexpandafter", "doubleexpandafter", "tripleexpandafter",
--
- "dontleavehmode", "removelastspace", "removeunwantedspaces",
+ "dontleavehmode", "removelastspace", "removeunwantedspaces", "keepunwantedspaces",
--
"wait", "writestatus", "define", "redefine",
--
- "setmeasure", "setemeasure", "setgmeasure", "setxmeasure", "definemeasure", "measure",
+ "setmeasure", "setemeasure", "setgmeasure", "setxmeasure", "definemeasure", "freezemeasure", "measure",
+ --
+ "installcorenamespace",
--
"getvalue", "setvalue", "setevalue", "setgvalue", "setxvalue", "letvalue", "letgvalue",
"resetvalue", "undefinevalue", "ignorevalue",
"setuvalue", "setuevalue", "setugvalue", "setuxvalue",
- "globallet", "glet",
+ --
+ "globallet", "glet", "udef", "ugdef", "uedef", "uxdef",
+ --
"getparameters", "geteparameters", "getgparameters", "getxparameters", "forgetparameters", "copyparameters",
--
+ "getdummyparameters", "dummyparameter", "directdummyparameter", "setdummyparameter", "letdummyparameter",
+ "usedummystyleandcolor", "usedummystyleparameter", "usedummycolorparameter",
+ --
"processcommalist", "processcommacommand", "quitcommalist", "quitprevcommalist",
"processaction", "processallactions", "processfirstactioninset", "processallactionsinset",
--
@@ -232,10 +268,12 @@ return {
--
"newconstant", "setnewconstant", "newconditional", "settrue", "setfalse", "setconstant",
"newmacro", "setnewmacro", "newfraction",
+ "newsignal",
--
"dosingleempty", "dodoubleempty", "dotripleempty", "doquadrupleempty", "doquintupleempty", "dosixtupleempty", "doseventupleempty",
- "dosingleargument", "dodoubleargument", "dotripleargument", "doquadrupleargument",
+ "dosingleargument", "dodoubleargument", "dotripleargument", "doquadrupleargument", "doquintupleargument", "dosixtupleargument", "doseventupleargument",
"dosinglegroupempty", "dodoublegroupempty", "dotriplegroupempty", "doquadruplegroupempty", "doquintuplegroupempty",
+ "permitspacesbetweengroups", "dontpermitspacesbetweengroups",
--
"nopdfcompression", "maximumpdfcompression", "normalpdfcompression",
--
@@ -245,6 +283,62 @@ return {
--
"startnointerference", "stopnointerference",
--
- "strut", "setstrut", "strutbox", "strutht", "strutdp", "strutwd", "begstrut", "endstrut",
+ "twodigits","threedigits",
+ --
+ "strut", "setstrut", "strutbox", "strutht", "strutdp", "strutwd", "struthtdp", "begstrut", "endstrut", "lineheight",
+ --
+ "ordordspacing", "ordopspacing", "ordbinspacing", "ordrelspacing",
+ "ordopenspacing", "ordclosespacing", "ordpunctspacing", "ordinnerspacing",
+ --
+ "opordspacing", "opopspacing", "opbinspacing", "oprelspacing",
+ "opopenspacing", "opclosespacing", "oppunctspacing", "opinnerspacing",
+ --
+ "binordspacing", "binopspacing", "binbinspacing", "binrelspacing",
+ "binopenspacing", "binclosespacing", "binpunctspacing", "bininnerspacing",
+ --
+ "relordspacing", "relopspacing", "relbinspacing", "relrelspacing",
+ "relopenspacing", "relclosespacing", "relpunctspacing", "relinnerspacing",
+ --
+ "openordspacing", "openopspacing", "openbinspacing", "openrelspacing",
+ "openopenspacing", "openclosespacing", "openpunctspacing", "openinnerspacing",
+ --
+ "closeordspacing", "closeopspacing", "closebinspacing", "closerelspacing",
+ "closeopenspacing", "closeclosespacing", "closepunctspacing", "closeinnerspacing",
+ --
+ "punctordspacing", "punctopspacing", "punctbinspacing", "punctrelspacing",
+ "punctopenspacing", "punctclosespacing", "punctpunctspacing", "punctinnerspacing",
+ --
+ "innerordspacing", "inneropspacing", "innerbinspacing", "innerrelspacing",
+ "inneropenspacing", "innerclosespacing", "innerpunctspacing", "innerinnerspacing",
+ --
+ "normalreqno",
+ --
+ "startimath", "stopimath", "normalstartimath", "normalstopimath",
+ "startdmath", "stopdmath", "normalstartdmath", "normalstopdmath",
+ --
+ "uncramped", "cramped", "triggermathstyle", "mathstylefont", "mathsmallstylefont", "mathstyleface", "mathsmallstyleface", "mathstylecommand", "mathpalette",
+ "mathstylehbox", "mathstylevbox", "mathstylevcenter", "mathstylevcenteredhbox", "mathstylevcenteredvbox",
+ "mathtext", "setmathsmalltextbox", "setmathtextbox",
+ --
+ "triggerdisplaystyle", "triggertextstyle", "triggerscriptstyle", "triggerscriptscriptstyle",
+ "triggeruncrampedstyle", "triggercrampedstyle",
+ "triggersmallstyle", "triggeruncrampedsmallstyle", "triggercrampedsmallstyle",
+ "triggerbigstyle", "triggeruncrampedbigstyle", "triggercrampedbigstyle",
+ --
+ "luaexpr", "expdoifelse", "expdoif", "expdoifnot", "expdoifcommonelse", "expdoifinsetelse",
+ --
+ "ctxdirectlua", "ctxlatelua", "ctxsprint", "ctxwrite", "ctxcommand", "ctxdirectcommand", "ctxlatecommand", "ctxreport",
+ "ctxlua", "luacode", "lateluacode", "directluacode",
+ "registerctxluafile", "ctxloadluafile",
+ "luaversion", "luamajorversion", "luaminorversion",
+ "ctxluacode", "luaconditional", "luaexpanded",
+ "startluaparameterset", "stopluaparameterset", "luaparameterset",
+ "definenamedlua",
+ "obeylualines", "obeyluatokens",
+ "startluacode", "stopluacode", "startlua", "stoplua",
+ --
+ "carryoverpar",
+ --
+ "Umathbotaccent",
}
}
diff --git a/Master/texmf-dist/tex/context/base/mult-mes.lua b/Master/texmf-dist/tex/context/base/mult-mes.lua
index 473c86d3547..aed417c92b0 100644
--- a/Master/texmf-dist/tex/context/base/mult-mes.lua
+++ b/Master/texmf-dist/tex/context/base/mult-mes.lua
@@ -3,7 +3,8 @@ if not modules then modules = { } end modules ['mult-mes'] = {
comment = "companion to mult-ini.mkiv",
author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
+ license = "see context related readme files",
+ dataonly = true,
}
-- floatsblocks -> floats
@@ -15,67 +16,67 @@ if not modules then modules = { } end modules ['mult-mes'] = {
return {
formats = {
- ["adapted: preamble '%s', state '%s', order '%s'"] = {
- nl = "aangepast: preamble '%s', status '%s', volgorde '%s'",
+ ["adapted: preamble %a, state %a, order %a"] = {
+ nl = "aangepast: preamble %a, status %a, volgorde %a",
},
- ["already loaded: '%s'"] = {
- nl = "al geladen: '%s'",
+ ["already loaded: %a"] = {
+ nl = "al geladen: %a",
},
["check:1"] = {
- cs = "postradam '=' po '%s' na radku %s",
- de = "Fehlendes '=' nach '%s' in Zeile %s",
- en = "missing or ungrouped '=' after '%s' in line %s",
- fr = "manquant ou dégroupé '=' après '%s' à la ligne %s",
- it = "'=' mancante o non raggruppato dopo '%s' alla riga %s",
- nl = "'=' ontbreekt of zonder {} na '%s' in regel %s",
- no = "manglende '=' etter '%s' i linje %s",
- ro = "lipseste '=' dupa '%s' in linia %s",
+ cs = "postradam '=' po %a na radku %a",
+ de = "Fehlendes '=' nach %a in Zeile %a",
+ en = "missing or ungrouped '=' after %a in line %a",
+ fr = "manquant ou dégroupé '=' après %a à la ligne %a",
+ it = "'=' mancante o non raggruppato dopo %a alla riga %a",
+ nl = "'=' ontbreekt of zonder {} na %a in regel %a",
+ no = "manglende '=' etter %a i linje %a",
+ ro = "lipseste '=' dupa %a in linia %a",
},
["check:2"] = {
- cs = "ocekavam %s argument(y) na radku %s",
- de = "%s Argument(e) in Zeile %s erwartet",
- en = "%s argument(s) expected in line %s",
- fr = "%s argument(s) attendu(s) à la ligne %s",
- it = "%s argomento/i attesi alla riga %s",
- nl = "%s argument(en) verwacht in regel %s",
- no = "%s argument forventet i linje %s",
- ro = "argumentul(ele) %s sunt asteptate in linia %s",
+ cs = "ocekavam %a argument(y) na radku %a",
+ de = "%a Argument(e) in Zeile %a erwartet",
+ en = "%a argument(s) expected in line %a",
+ fr = "%a argument(s) attendu(s) à la ligne %a",
+ it = "%a argomento/i attesi alla riga %a",
+ nl = "%a argument(en) verwacht in regel %a",
+ no = "%a argument forventet i linje %a",
+ ro = "argumentul(ele) %a sunt asteptate in linia %a",
},
["check:3"] = {
- cs = "%s %s nahrazuje makro, uzijte VERZALKY!",
- de = "%s %s ersetzt ein Makro, verwende VERSALIEN!",
- en = "%s %s replaces a macro, use CAPITALS!",
- fr = "%s %s remplace une macro, utilisez des MAJUSCULES !",
- it = "%s %s sostituisce una macro, usare le MAIUSCOLE!",
- nl = "%s %s vervangt een macro, gebruik HOOFDLETTERS!",
- no = "%s %s overskygger en makro, bruk STORE BOKSTAVER!",
- ro = "%s %s inlocuieste un macro, folositi MAJUSCULE!",
+ cs = "%a %a nahrazuje makro, uzijte VERZALKY!",
+ de = "%a %a ersetzt ein Makro, verwende VERSALIEN!",
+ en = "%a %a replaces a macro, use CAPITALS!",
+ fr = "%a %a remplace une macro, utilisez des MAJUSCULES !",
+ it = "%a %a sostituisce una macro, usare le MAIUSCOLE!",
+ nl = "%a %a vervangt een macro, gebruik HOOFDLETTERS!",
+ no = "%a %a overskygger en makro, bruk STORE BOKSTAVER!",
+ ro = "%a %a inlocuieste un macro, folositi MAJUSCULE!",
},
- ["checking file: '%s'"] = {
- nl = "controleren file: '%s'",
+ ["checking file: %a"] = {
+ nl = "controleren file: %a",
},
- ["checking url: '%s'"] = {
- nl = "controleren url: '%s'",
+ ["checking url: %a"] = {
+ nl = "controleren url: %a",
},
["colors:1"] = {
- cs = "system %s je globalne aktivovana",
- de = "system %s ist global aktiviert",
- en = "system %s is global activated",
- fr = "le système %s est globalement activé",
- it = "sistema %s attivato globalmente",
- nl = "systeem %s is globaal actief",
- no = "system %s er aktivert globalt",
- ro = "sistem %s este activata global",
+ cs = "system %a je globalne aktivovana",
+ de = "system %a ist global aktiviert",
+ en = "system %a is global activated",
+ fr = "le système %a est globalement activé",
+ it = "sistema %a attivato globalmente",
+ nl = "systeem %a is globaal actief",
+ no = "system %a er aktivert globalt",
+ ro = "sistem %a este activata global",
},
["colors:10"] = {
- cs = "%s prostor barev je podporovan",
- de = "%s Farbraum wird unterstuetzt",
- en = "%s color space is supported",
- fr = "%s l'espace de couleur est supporté",
- it = "spazio dei colori %s supportato",
- nl = "%s kleurruimte wordt ondersteund",
- no = "%s fargerom er støttet",
- ro = "spatiul de culoare %s este suportat",
+ cs = "%a prostor barev je podporovan",
+ de = "%a Farbraum wird unterstuetzt",
+ en = "%a color space is supported",
+ fr = "%a l'espace de couleur est supporté",
+ it = "spazio dei colori %a supportato",
+ nl = "%a kleurruimte wordt ondersteund",
+ no = "%a fargerom er støttet",
+ ro = "spatiul de culoare %a este suportat",
},
["colors:11"] = {
cs = "barva je prevedena na sed",
@@ -88,104 +89,104 @@ return {
ro = "culoarea este convertita la gri",
},
["colors:12"] = {
- cs = "%s is registered",
- de = "%s is registered",
- en = "%s is registered",
- fr = "%s est enregistré",
- it = "%s is registered",
- nl = "%s is geregistreerd",
- no = "%s is registered",
- ro = "%s is registered",
+ cs = "%a is registered",
+ de = "%a is registered",
+ en = "%a is registered",
+ fr = "%a est enregistré",
+ it = "%a is registered",
+ nl = "%a is geregistreerd",
+ no = "%a is registered",
+ ro = "%a is registered",
},
["colors:2"] = {
- cs = "system %s je lokalne activovana",
- de = "system %s ist lokal aktiviert",
- en = "system %s is local activated",
- fr = "le système %s est localement activé",
- it = "sistema %s attivato localmente",
- nl = "systeem %s is lokaal actief",
- no = "system %s er aktivert lokalt",
- ro = "sistem %s este activata local",
+ cs = "system %a je lokalne activovana",
+ de = "system %a ist lokal aktiviert",
+ en = "system %a is local activated",
+ fr = "le système %a est localement activé",
+ it = "sistema %a attivato localmente",
+ nl = "systeem %a is lokaal actief",
+ no = "system %a er aktivert lokalt",
+ ro = "sistem %a este activata local",
},
["colors:3"] = {
- cs = "%s neni definovana %s",
- de = "%s ist undefiniert %s",
- en = "%s is not defined %s",
- fr = "%s n'est pas défini %s",
- it = "%s non definito %s",
- nl = "%s is niet gedefinieerd %s",
- no = "%s er udefinert %s",
- ro = "%s nu este definita %s",
+ cs = "%a neni definovana %a",
+ de = "%a ist undefiniert %a",
+ en = "%a is not defined %a",
+ fr = "%a n'est pas défini %a",
+ it = "%a non definito %a",
+ nl = "%a is niet gedefinieerd %a",
+ no = "%a er udefinert %a",
+ ro = "%a nu este definita %a",
},
["colors:4"] = {
- cs = "system %s je nacten",
- de = "system %s ist geladen",
- en = "system %s is loaded",
- fr = "le système %s est chargé",
- it = "sistema %s caricato",
- nl = "systeem %s wordt geladen",
- no = "system %s er lest inn",
- ro = "sistem %s este incarcata",
+ cs = "system %a je nacten",
+ de = "system %a ist geladen",
+ en = "system %a is loaded",
+ fr = "le système %a est chargé",
+ it = "sistema %a caricato",
+ nl = "systeem %a wordt geladen",
+ no = "system %a er lest inn",
+ ro = "sistem %a este incarcata",
},
["colors:5"] = {
- cs = "neznamy system %s",
- de = "unbekanntes System %s",
- en = "unknown system %s",
- fr = "système %s inconnu",
- it = "sistema %s sconosciuto",
- nl = "onbekend systeem %s",
- no = "ukjent system %s",
- ro = "sistem %s necunoscuta",
+ cs = "neznamy system %a",
+ de = "unbekanntes System %a",
+ en = "unknown system %a",
+ fr = "système %a inconnu",
+ it = "sistema %a sconosciuto",
+ nl = "onbekend systeem %a",
+ no = "ukjent system %a",
+ ro = "sistem %a necunoscuta",
},
["colors:6"] = {
- cs = "palette %s je k dispozici",
- de = "palette %s ist verfuegbar",
- en = "palette %s is available",
- fr = "la palette %s est disponible",
- it = "tavolozza %s resa disponibile",
- nl = "palet %s is beschikbaar",
- no = "palett %s er tilgjengelig",
- ro = "paleta %s este disponibila",
+ cs = "palette %a je k dispozici",
+ de = "palette %a ist verfuegbar",
+ en = "palette %a is available",
+ fr = "la palette %a est disponible",
+ it = "tavolozza %a resa disponibile",
+ nl = "palet %a is beschikbaar",
+ no = "palett %a er tilgjengelig",
+ ro = "paleta %a este disponibila",
},
["colors:7"] = {
- cs = "palette %s neni k dispozici",
- de = "palette %s ist nicht verfuegbar",
- en = "palette %s is not available",
- fr = "le palette %s n'est pas disponible",
- it = "tavolozza %s non disponibile",
- nl = "palet %s is niet beschikbaar",
- no = "palett %s er ikke tilgjengelig",
- ro = "palette %s nu este disponibila",
+ cs = "palette %a neni k dispozici",
+ de = "palette %a ist nicht verfuegbar",
+ en = "palette %a is not available",
+ fr = "le palette %a n'est pas disponible",
+ it = "tavolozza %a non disponibile",
+ nl = "palet %a is niet beschikbaar",
+ no = "palett %a er ikke tilgjengelig",
+ ro = "palette %a nu este disponibila",
},
["colors:8"] = {
- cs = "specifikace %s v barve %s bude cerna",
- de = "Spezifikation %s bei Farbe %s wird schwarz",
- en = "specification %s at color %s becomes black",
- fr = "la spécification %s de la couleur %s devient noire",
- it = "specifica %s del colore %s convertita in nero",
- nl = "specificatie %s bij %s wordt zwart",
- no = "spesifikasjon %s for farge %s gir kun svart",
- ro = "specificatia %s la culoarea %s devine neagra",
+ cs = "specifikace %a v barve %a bude cerna",
+ de = "Spezifikation %a bei Farbe %a wird schwarz",
+ en = "specification %a at color %a becomes black",
+ fr = "la spécification %a de la couleur %a devient noire",
+ it = "specifica %a del colore %a convertita in nero",
+ nl = "specificatie %a bij %a wordt zwart",
+ no = "spesifikasjon %a for farge %a gir kun svart",
+ ro = "specificatia %a la culoarea %a devine neagra",
},
["colors:9"] = {
- cs = "%s prostor barev neni podporovan",
- de = "%s Farbraum wird nicht unterstuetzt",
- en = "%s color space is not supported",
- fr = "l'espace de couleur %s n'est pas supporté",
- it = "spazio dei colori %s non supportato",
- nl = "%s kleurruimte wordt niet ondersteund",
- no = "%s fargerom er ikke støttet",
- ro = "spatiul de culoare %s nu este suportat",
+ cs = "%a prostor barev neni podporovan",
+ de = "%a Farbraum wird nicht unterstuetzt",
+ en = "%a color space is not supported",
+ fr = "l'espace de couleur %a n'est pas supporté",
+ it = "spazio dei colori %a non supportato",
+ nl = "%a kleurruimte wordt niet ondersteund",
+ no = "%a fargerom er ikke støttet",
+ ro = "spatiul de culoare %a nu este suportat",
},
["columns:1"] = {
- cs = "je mozno pouze %s sloupcu",
- de = "nur %s Spalten moeglich",
- en = "only %s columns possible",
- fr = "seules %s colonnes possibles",
- it = "solo %s colonne possibili",
- nl = "maximaal %s kolommen",
- no = "maksimalt %s kolonner",
- ro = "este posibil numai %s coloane",
+ cs = "je mozno pouze %a sloupcu",
+ de = "nur %a Spalten moeglich",
+ en = "only %a columns possible",
+ fr = "seules %a colonnes possibles",
+ it = "solo %a colonne possibili",
+ nl = "maximaal %a kolommen",
+ no = "maksimalt %a kolonner",
+ ro = "este posibil numai %a coloane",
},
["columns:10"] = {
cs = "zbyl (mene nez) 1 radek",
@@ -208,14 +209,14 @@ return {
ro = "blocul este prea lat pentru coloana",
},
["columns:12"] = {
- cs = "plovouci objekt je presunut do nasledujiciho sloupce / %s",
- de = "Gleitobjekt in naechste Zeile verschoben / %s",
- en = "float moved to next column / %s",
- fr = "flottant déplacé à la colonne suivante / %s",
- it = "oggetto mobile spostata alla colonna successiva / %s",
- nl = "plaatsblok verplaatst naar volgende kolom / %s",
- no = "flytblokk forskjøvet til neste kolonne / %s",
- ro = "blocul este mutat pe urmatoarea coloana / %s",
+ cs = "plovouci objekt je presunut do nasledujiciho sloupce / %a",
+ de = "Gleitobjekt in naechste Zeile verschoben / %a",
+ en = "float moved to next column / %a",
+ fr = "flottant déplacé à la colonne suivante / %a",
+ it = "oggetto mobile spostata alla colonna successiva / %a",
+ nl = "plaatsblok verplaatst naar volgende kolom / %a",
+ no = "flytblokk forskjøvet til neste kolonne / %a",
+ ro = "blocul este mutat pe urmatoarea coloana / %a",
},
["columns:13"] = {
cs = "siroky plovouci objekt je presunut nad sloupce",
@@ -268,14 +269,14 @@ return {
ro = "cadrele bottom (bottom float) nu sunt inca suportate",
},
["columns:6"] = {
- cs = "%s plovouci objekt(y) odlozeny",
- de = "%s Gleitobjekt(e) verschoben",
- en = "%s float(s) postponed",
- fr = "%s flottant(s) reporté(s)",
- it = "%s float(s) posticipate",
- nl = "%s plaatsblok(en) opgeschort",
- no = "%s flytblokk forskjøvet",
- ro = "%s blocurile sunt amanate",
+ cs = "%a plovouci objekt(y) odlozeny",
+ de = "%a Gleitobjekt(e) verschoben",
+ en = "%a float(s) postponed",
+ fr = "%a flottant(s) reporté(s)",
+ it = "%a float(s) posticipate",
+ nl = "%a plaatsblok(en) opgeschort",
+ no = "%a flytblokk forskjøvet",
+ ro = "%a blocurile sunt amanate",
},
["columns:7"] = {
cs = "vyvazovani ukonceno po 100 krocich",
@@ -288,14 +289,14 @@ return {
ro = "alinierea este oprita dupa 100 de incercari",
},
["columns:8"] = {
- cs = "vyvazeno v %s krocich",
- de = "ausgeglichen nach %s Schritt(en)",
- en = "balanced in %s step(s)",
- fr = "équilibré en %s pas",
- it = "bilanciamento in %s passo/i",
- nl = "gebalanceerd in %s stap(pen)",
- no = "balansert etter %s iterasjoner",
- ro = "aliniat in %s pas(i)",
+ cs = "vyvazeno v %a krocich",
+ de = "ausgeglichen nach %a Schritt(en)",
+ en = "balanced in %a step(s)",
+ fr = "équilibré en %a pas",
+ it = "bilanciamento in %a passo/i",
+ nl = "gebalanceerd in %a stap(pen)",
+ no = "balansert etter %a iterasjoner",
+ ro = "aliniat in %a pas(i)",
},
["columns:9"] = {
cs = "kontrola nerovnost",
@@ -308,156 +309,156 @@ return {
ro = "verificat alinierea",
},
["databases:1"] = {
- cs = "%s",
- de = "%s",
- en = "%s",
- fr = "%s",
- it = "%s",
- nl = "%s",
- no = "%s",
- ro = "%s",
+ cs = "%a",
+ de = "%a",
+ en = "%a",
+ fr = "%a",
+ it = "%a",
+ nl = "%a",
+ no = "%a",
+ ro = "%a",
},
["databases:2"] = {
- cs = "local file %s",
- de = "lokale Datei %s",
- en = "local file %s",
- fr = "fichier local %s",
- it = "file locale %s",
- nl = "lokaal bestand %s",
- no = "lokal fil %s",
- ro = "fisier local %s",
+ cs = "local file %a",
+ de = "lokale Datei %a",
+ en = "local file %a",
+ fr = "fichier local %a",
+ it = "file locale %a",
+ nl = "lokaal bestand %a",
+ no = "lokal fil %a",
+ ro = "fisier local %a",
},
["databases:3"] = {
- cs = "global file %s",
- de = "globale Datei %s",
- en = "global file %s",
- fr = "fichier global %s",
- it = "file globale %s",
- nl = "globaal bestand %s",
- no = "global fil %s",
- ro = "fisier global %s",
+ cs = "global file %a",
+ de = "globale Datei %a",
+ en = "global file %a",
+ fr = "fichier global %a",
+ it = "file globale %a",
+ nl = "globaal bestand %a",
+ no = "global fil %a",
+ ro = "fisier global %a",
},
["databases:4"] = {
- cs = "unknown file %s",
- de = "unbekannte Datei %s",
- en = "unknown file %s",
- fr = "fichier inconnu %s",
- it = "file sconosciuto %s",
- nl = "onbekend bestand %s",
- no = "ukjent fil %s",
- ro = "fisier necunoscut %s",
- },
- ["extended: preamble '%s', state '%s', order '%s'"] = {
- nl = "uitgebreid: preamble '%s', status '%s', volgorde '%s'",
+ cs = "unknown file %a",
+ de = "unbekannte Datei %a",
+ en = "unknown file %a",
+ fr = "fichier inconnu %a",
+ it = "file sconosciuto %a",
+ nl = "onbekend bestand %a",
+ no = "ukjent fil %a",
+ ro = "fisier necunoscut %a",
+ },
+ ["extended: preamble %a, state %a, order %a"] = {
+ nl = "uitgebreid: preamble %a, status %a, volgorde %a",
},
["figures:1"] = {
- cs = "obraz %s nelze nalezt",
- de = "Abbildung %s kann nicht gefunden werden",
- en = "figure %s can not be found",
- fr = "la figure %s ne peut être trouvée",
- it = "figura %s non trovata",
- nl = "figuur %s is niet te vinden",
- ro = "figura %s nu poate fi gasita",
+ cs = "obraz %a nelze nalezt",
+ de = "Abbildung %a kann nicht gefunden werden",
+ en = "figure %a can not be found",
+ fr = "la figure %a ne peut être trouvée",
+ it = "figura %a non trovata",
+ nl = "figuur %a is niet te vinden",
+ ro = "figura %a nu poate fi gasita",
},
["figures:2"] = {
- cs = "obraz %s nepritomen",
- de = "Abbildung %s wird nicht erstellt",
- en = "figure %s is not preset",
- fr = "la figure %s n'est pas pré-sélectionnée",
- it = "la figura %s non è preimpostata",
- nl = "figuur %s wordt niet preset",
- ro = "figura %s nu este presetata",
+ cs = "obraz %a nepritomen",
+ de = "Abbildung %a wird nicht erstellt",
+ en = "figure %a is not preset",
+ fr = "la figure %a n'est pas pré-sélectionnée",
+ it = "la figura %a non è preimpostata",
+ nl = "figuur %a wordt niet preset",
+ ro = "figura %a nu este presetata",
},
["figures:3"] = {
- cs = "dimensions of %s are determined externally",
- de = "dimensions of %s are determined externally",
- en = "dimensions of %s are determined externally",
- fr = "dimensions of %s are determined externally",
- it = "dimensions of %s are determined externally",
- nl = "maten van %s worden extern vastgesteld",
- ro = "dimensions of %s are determined externally",
+ cs = "dimensions of %a are determined externally",
+ de = "dimensions of %a are determined externally",
+ en = "dimensions of %a are determined externally",
+ fr = "dimensions of %a are determined externally",
+ it = "dimensions of %a are determined externally",
+ nl = "maten van %a worden extern vastgesteld",
+ ro = "dimensions of %a are determined externally",
},
["figures:4"] = {
- cs = "dimenze obrazu %s nacteny primo z jeho souboru",
- de = "Dimensionen von %s geladen aus der Abbildungsdatei selbst",
- en = "dimensions of %s loaded from figurefile itself",
- fr = "les dimensions de %s chargées implicitement à partir du fichier de figure",
- it = "dimensioni di %s caricate dal file di immagini stesso",
- nl = "maten van %s geladen uit figuurfile zelf",
- ro = "dimensiunea figurii %s se incarca din fisierul insusi",
+ cs = "dimenze obrazu %a nacteny primo z jeho souboru",
+ de = "Dimensionen von %a geladen aus der Abbildungsdatei selbst",
+ en = "dimensions of %a loaded from figurefile itself",
+ fr = "les dimensions de %a chargées implicitement à partir du fichier de figure",
+ it = "dimensioni di %a caricate dal file di immagini stesso",
+ nl = "maten van %a geladen uit figuurfile zelf",
+ ro = "dimensiunea figurii %a se incarca din fisierul insusi",
},
["figures:5"] = {
- cs = "dimensions of %s are unknown",
- de = "Dimensions of %s are unknown",
- en = "dimensions of %s are unknown",
- fr = "dimensions of %s are unknown",
- it = "dimensions of %s are unknown",
- nl = "maten van %s zijn onbekend",
- ro = "dimensions of %s are unknown",
+ cs = "dimensions of %a are unknown",
+ de = "Dimensions of %a are unknown",
+ en = "dimensions of %a are unknown",
+ fr = "dimensions of %a are unknown",
+ it = "dimensions of %a are unknown",
+ nl = "maten van %a zijn onbekend",
+ ro = "dimensions of %a are unknown",
},
["figures:6"] = {
- cs = "dimenze obrazu %s spocteny programem rlxtools",
- de = "Dimensionen von %s ausgerechnet durch rlxtools",
- en = "dimensions of %s calculated by rlxtools",
- fr = "les dimensions de %s calculées par rlxtools",
- it = "dimensioni di %s calcolate da rlxtools",
- nl = "maten van %s berekend door rlxtools",
- ro = "dimensiunea figurii %s este calculata de rlxtools",
+ cs = "dimenze obrazu %a spocteny programem rlxtools",
+ de = "Dimensionen von %a ausgerechnet durch rlxtools",
+ en = "dimensions of %a calculated by rlxtools",
+ fr = "les dimensions de %a calculées par rlxtools",
+ it = "dimensioni di %a calcolate da rlxtools",
+ nl = "maten van %a berekend door rlxtools",
+ ro = "dimensiunea figurii %a este calculata de rlxtools",
},
["figures:8"] = {
- cs = "obrazovy objekt %s je znovu pouzit",
- de = "Abbildungobjekt %s wurde wiederverwandt",
- en = "figureobject %s is reused",
- fr = "figureobject %s est réutilisé",
- it = "oggetto-figura %s riutilizzato",
- nl = "figuurobject %s wordt opnieuw gebruikt",
- ro = "obiectul figura %s este refolosit",
+ cs = "obrazovy objekt %a je znovu pouzit",
+ de = "Abbildungobjekt %a wurde wiederverwandt",
+ en = "figureobject %a is reused",
+ fr = "figureobject %a est réutilisé",
+ it = "oggetto-figura %a riutilizzato",
+ nl = "figuurobject %a wordt opnieuw gebruikt",
+ ro = "obiectul figura %a este refolosit",
},
["files:1"] = {
- cs = "synonymum souboru %s je jiz pouzito pro %s",
- de = "Dateisynonym %s wird bereits fuer %s benutzt",
- en = "file synonym %s is already used for %s",
- fr = "le synonyme de fichier %s est déjà utilisé pour %s",
- it = "sinonimo file %s già in uso per %s",
- nl = "file synoniem %s is al in gebruik voor %s",
- no = "filesynonym %s er allerede brukt for %s",
- ro = "sinonimul fisierelor %s este folosit deja pentru %s",
+ cs = "synonymum souboru %a je jiz pouzito pro %a",
+ de = "Dateisynonym %a wird bereits fuer %a benutzt",
+ en = "file synonym %a is already used for %a",
+ fr = "le synonyme de fichier %a est déjà utilisé pour %a",
+ it = "sinonimo file %a già in uso per %a",
+ nl = "file synoniem %a is al in gebruik voor %a",
+ no = "filesynonym %a er allerede brukt for %a",
+ ro = "sinonimul fisierelor %a este folosit deja pentru %a",
},
["filters:1"] = {
- cs = "filter %s is loaded",
- de = "filter %s ist geladen",
- en = "filter %s is loaded",
- fr = "le filtre %s est chargé",
- it = "filtro %s caricato",
- nl = "filter %s wordt geladen",
+ cs = "filter %a is loaded",
+ de = "filter %a ist geladen",
+ en = "filter %a is loaded",
+ fr = "le filtre %a est chargé",
+ it = "filtro %a caricato",
+ nl = "filter %a wordt geladen",
},
["filters:2"] = {
- cs = "unknown filter %s",
- de = "unknown filter %s",
- en = "unknown filter %s",
- fr = "filtre %s inconnu",
- it = "filtro sconosciuto %s",
- nl = "onbekend filter %s",
+ cs = "unknown filter %a",
+ de = "unknown filter %a",
+ en = "unknown filter %a",
+ fr = "filtre %a inconnu",
+ it = "filtro sconosciuto %a",
+ nl = "onbekend filter %a",
},
["floatblocks:1"] = {
- cs = "%s precislovano / %s => %s",
- de = "%s neu nummeriert / %s => %s",
- en = "%s renumbered / %s => %s",
- fr = "%s renuméroté / %s => %s",
- it = "%s rinumerato / %s => %s",
- nl = "%s hernummerd / %s => %s",
- no = "%s renummerert / %s => %s",
- ro = "%s renumerotat / %s => %s",
+ cs = "%a precislovano / %a => %a",
+ de = "%a neu nummeriert / %a => %a",
+ en = "%a renumbered / %a => %a",
+ fr = "%a renuméroté / %a => %a",
+ it = "%a rinumerato / %a => %a",
+ nl = "%a hernummerd / %a => %a",
+ no = "%a renummerert / %a => %a",
+ ro = "%a renumerotat / %a => %a",
},
["floatblocks:10"] = {
- cs = "%s omezeno",
- de = "%s begrenzt",
- en = "%s limited",
- fr = "%s limité",
- it = "%s limitato",
- nl = "%s begrensd",
- no = "%s begrenset",
- ro = "%s limitat",
+ cs = "%a omezeno",
+ de = "%a begrenzt",
+ en = "%a limited",
+ fr = "%a limité",
+ it = "%a limitato",
+ nl = "%a begrensd",
+ no = "%a begrenset",
+ ro = "%a limitat",
},
["floatblocks:11"] = {
cs = "nedan zadny blok",
@@ -490,34 +491,34 @@ return {
ro = "there is nothing to split",
},
["floatblocks:2"] = {
- cs = "%s ulozeno",
- de = "%s gespeichert",
- en = "%s saved",
- fr = "%s sauvegardé",
- it = "%s salvato",
- nl = "%s bewaard",
- no = "%s lagret",
- ro = "%s salvat",
+ cs = "%a ulozeno",
+ de = "%a gespeichert",
+ en = "%a saved",
+ fr = "%a sauvegardé",
+ it = "%a salvato",
+ nl = "%a bewaard",
+ no = "%a lagret",
+ ro = "%a salvat",
},
["floatblocks:3"] = {
- cs = "%s presunuto",
- de = "%s verschoben",
- en = "%s moved",
- fr = "%s déplacé",
- it = "%s mosso",
- nl = "%s verplaatst",
- no = "%s flyttet",
- ro = "%s mutat",
+ cs = "%a presunuto",
+ de = "%a verschoben",
+ en = "%a moved",
+ fr = "%a déplacé",
+ it = "%a mosso",
+ nl = "%a verplaatst",
+ no = "%a flyttet",
+ ro = "%a mutat",
},
["floatblocks:4"] = {
- cs = "%s umisteno",
- de = "%s plaziert",
- en = "%s placed",
- fr = "%s placé",
- it = "%s sistemato",
- nl = "%s geplaatst",
- no = "%s plassert",
- ro = "%s plasat",
+ cs = "%a umisteno",
+ de = "%a plaziert",
+ en = "%a placed",
+ fr = "%a placé",
+ it = "%a sistemato",
+ nl = "%a geplaatst",
+ no = "%a plassert",
+ ro = "%a plasat",
},
["floatblocks:5"] = {
cs = "poradi prizpusobeno",
@@ -530,34 +531,34 @@ return {
ro = "ordinea adaptata",
},
["floatblocks:6"] = {
- cs = "pocet hornich plovoucich objektu je omezen na %s",
- de = "Anz. der oberen Gleitobjekte beschraengt auf %s",
- en = "n of top floats limited to %s",
- fr = "n flottants de haut de page limité à %s",
- it = "n di top floats limitato a %s",
- nl = "maximaal %s boven",
- no = "maksimalt %s flytblokker øverst",
- ro = "nr. cadrelor de sus limitat la %s",
+ cs = "pocet hornich plovoucich objektu je omezen na %a",
+ de = "Anz. der oberen Gleitobjekte beschraengt auf %a",
+ en = "n of top floats limited to %a",
+ fr = "n flottants de haut de page limité à %a",
+ it = "n di top floats limitato a %a",
+ nl = "maximaal %a boven",
+ no = "maksimalt %a flytblokker øverst",
+ ro = "nr. cadrelor de sus limitat la %a",
},
["floatblocks:7"] = {
- cs = "pocet spodnich plovoucich objektu je omezen na %s",
- de = "Anz. der unteren Gleitobjekte beschraengt auf %s",
- en = "n of bottom floats limited to %s",
- fr = "n flottants de bas de page limité à %s",
- it = "n di bottom floats limitato a %s",
- nl = "maximaal %s onder",
- no = "maksimalt %s flytblokker nederst",
- ro = "nr. blocurilor de jos limitat la %s",
+ cs = "pocet spodnich plovoucich objektu je omezen na %a",
+ de = "Anz. der unteren Gleitobjekte beschraengt auf %a",
+ en = "n of bottom floats limited to %a",
+ fr = "n flottants de bas de page limité à %a",
+ it = "n di bottom floats limitato a %a",
+ nl = "maximaal %a onder",
+ no = "maksimalt %a flytblokker nederst",
+ ro = "nr. blocurilor de jos limitat la %a",
},
["floatblocks:8"] = {
- cs = "radku je mene nez %s",
- de = "weniger als %s zeilen",
- en = "less than %s lines",
- fr = "moins de %s lignes",
- it = "meno di %s righe",
- nl = "minder dan %s regels",
- no = "mindre enn %s linjer",
- ro = "mai putin de %s linii",
+ cs = "radku je mene nez %a",
+ de = "weniger als %a zeilen",
+ en = "less than %a lines",
+ fr = "moins de %a lignes",
+ it = "meno di %a righe",
+ nl = "minder dan %a regels",
+ no = "mindre enn %a linjer",
+ ro = "mai putin de %a linii",
},
["floatblocks:9"] = {
cs = "poradi naruseno",
@@ -569,118 +570,118 @@ return {
no = "rekkefølge endret",
ro = "ordinea deranjata",
},
- ["flushed: preamble '%s'"] = {
- nl = "weggeschreven: preamble '%s'",
+ ["flushed: preamble %a"] = {
+ nl = "weggeschreven: preamble %a",
},
["fonts:1"] = {
- cs = "kodovani %s",
- de = "Kodierung %s",
- en = "coding %s",
- fr = "encodage %s",
- it = "codifica %s",
- nl = "codering %s",
- no = "koding %s",
- ro = "codificarea %s",
+ cs = "kodovani %a",
+ de = "Kodierung %a",
+ en = "coding %a",
+ fr = "encodage %a",
+ it = "codifica %a",
+ nl = "codering %a",
+ no = "koding %a",
+ ro = "codificarea %a",
},
["fonts:10"] = {
- cs = "neznamy font %s",
- de = "unbekanntes Font %s",
- en = "unknown font file %s",
- fr = "fichier de police %s inconnu",
- it = "file di font sconosciuto %s",
- nl = "onbekende font file %s",
- no = "ukjent fontfil %s",
- ro = "fisier font necunoscut %s",
+ cs = "neznamy font %a",
+ de = "unbekanntes Font %a",
+ en = "unknown font file %a",
+ fr = "fichier de police %a inconnu",
+ it = "file di font sconosciuto %a",
+ nl = "onbekende font file %a",
+ no = "ukjent fontfil %a",
+ ro = "fisier font necunoscut %a",
},
["fonts:14"] = {
- cs = "bodyfont %s is defined (can better be done global)",
- de = "Fliesstext %s wurde definiert (besser waere globale Definition)",
- en = "bodyfont %s is defined (can better be done global)",
- fr = "policecorps %s est défini (une définition globale pourrait être plus adéquat)",
- it = "corpo del testo %s definito (sarebbe meglio globale)",
- nl = "korps %s is gedefinieerd (kan beter globaal plaatsvinden)",
- no = "bodyfont %s is defined (can better be done global)",
- ro = "bodyfont %s is defined (can better be done global)",
+ cs = "bodyfont %a is defined (can better be done global)",
+ de = "Fliesstext %a wurde definiert (besser waere globale Definition)",
+ en = "bodyfont %a is defined (can better be done global)",
+ fr = "policecorps %a est défini (une définition globale pourrait être plus adéquat)",
+ it = "corpo del testo %a definito (sarebbe meglio globale)",
+ nl = "korps %a is gedefinieerd (kan beter globaal plaatsvinden)",
+ no = "bodyfont %a is defined (can better be done global)",
+ ro = "bodyfont %a is defined (can better be done global)",
},
["fonts:2"] = {
- cs = "varianta %s je nactena",
- de = "Variante %s ist geladen",
- en = "variant %s is loaded",
- fr = "la variante %s est chargée",
- it = "variante %s caricata",
- nl = "variant %s wordt geladen",
- no = "variant %s er lest inn",
- ro = "varianta %s este incarcata",
+ cs = "varianta %a je nactena",
+ de = "Variante %a ist geladen",
+ en = "variant %a is loaded",
+ fr = "la variante %a est chargée",
+ it = "variante %a caricata",
+ nl = "variant %a wordt geladen",
+ no = "variant %a er lest inn",
+ ro = "varianta %a este incarcata",
},
["fonts:3"] = {
- cs = "neznama varianta %s",
- de = "Unbekannte Variante %s",
- en = "unknown variant %s",
- fr = "variante %s inconnue",
- it = "variante sconosciuta %s",
- nl = "onbekende variant %s",
- no = "ukjent variant %s",
- ro = "varianta necunoscuta %s",
+ cs = "neznama varianta %a",
+ de = "Unbekannte Variante %a",
+ en = "unknown variant %a",
+ fr = "variante %a inconnue",
+ it = "variante sconosciuta %a",
+ nl = "onbekende variant %a",
+ no = "ukjent variant %a",
+ ro = "varianta necunoscuta %a",
},
["fonts:4"] = {
- cs = "zakladni font %s neni definovan",
- de = "Fliesstext %s ist nicht definiert",
- en = "bodyfont %s is not defined",
- fr = "policecorps %s n'est pas définie",
- it = "corpo del testo %s non definito",
- nl = "korps %s is niet gedefinieerd",
- no = "hovedfont %s er ikke definert",
- ro = "corpul de litere %s nu este definit",
+ cs = "zakladni font %a neni definovan",
+ de = "Fliesstext %a ist nicht definiert",
+ en = "bodyfont %a is not defined",
+ fr = "policecorps %a n'est pas définie",
+ it = "corpo del testo %a non definito",
+ nl = "korps %a is niet gedefinieerd",
+ no = "hovedfont %a er ikke definert",
+ ro = "corpul de litere %a nu este definit",
},
["fonts:5"] = {
- cs = "styl %s neni definovan",
- de = "Stil %s ist nicht definiert",
- en = "style %s is not defined",
- fr = "le style %s n'est pas défini",
- it = "stile %s non definito",
- nl = "stijl %s is niet gedefinieerd",
- no = "stil %s er ikke definert",
- ro = "stilul %s nu este definit",
+ cs = "styl %a neni definovan",
+ de = "Stil %a ist nicht definiert",
+ en = "style %a is not defined",
+ fr = "le style %a n'est pas défini",
+ it = "stile %a non definito",
+ nl = "stijl %a is niet gedefinieerd",
+ no = "stil %a er ikke definert",
+ ro = "stilul %a nu este definit",
},
["fonts:6"] = {
- cs = "%s je nacten",
- de = "%s ist geladen",
- en = "%s is loaded",
- fr = "%s est chargé",
- it = "%s caricato",
- nl = "%s wordt geladen",
- no = "%s er lest inn",
- ro = "%s este incarcat",
+ cs = "%a je nacten",
+ de = "%a ist geladen",
+ en = "%a is loaded",
+ fr = "%a est chargé",
+ it = "%a caricato",
+ nl = "%a wordt geladen",
+ no = "%a er lest inn",
+ ro = "%a este incarcat",
},
["fonts:7"] = {
- cs = "neznamy format %s",
- de = "unbekanntes Format %s",
- en = "unknown format %s",
- fr = "format %s inconnu",
- it = "formato sconosciuto %s",
- nl = "onbekend formaat %s",
- no = "ukjent format %s",
- ro = "format necunoscut %s",
+ cs = "neznamy format %a",
+ de = "unbekanntes Format %a",
+ en = "unknown format %a",
+ fr = "format %a inconnu",
+ it = "formato sconosciuto %a",
+ nl = "onbekend formaat %a",
+ no = "ukjent format %a",
+ ro = "format necunoscut %a",
},
["fonts:8"] = {
- cs = "styl %s definovan",
- de = "Stil %s definiert",
- en = "style %s defined",
- fr = "style %s défini",
- it = "stile %s definito",
- nl = "stijl %s gedefinieerd",
- no = "stil %s definert",
- ro = "stilul %s definit",
+ cs = "styl %a definovan",
+ de = "Stil %a definiert",
+ en = "style %a defined",
+ fr = "style %a défini",
+ it = "stile %a definito",
+ nl = "stijl %a gedefinieerd",
+ no = "stil %a definert",
+ ro = "stilul %a definit",
},
["interactions:1"] = {
- cs = "pomer %s x %s (s x v)",
- de = "Seitenverhaeltnis %s x %s (B x H)",
- en = "aspect ratio %s x %s (b x h)",
- fr = "ratio d'aspect %s x %s (b x h)",
- it = "rapporto %s x %s (b x a)",
- nl = "aspect ratio %s x %s (b x h)",
- no = "forholdstall %s x %s (b x h)",
- ro = "aspectul %s x %s (b x h)",
+ cs = "pomer %a x %a (s x v)",
+ de = "Seitenverhaeltnis %a x %a (B x H)",
+ en = "aspect ratio %a x %a (b x h)",
+ fr = "ratio d'aspect %a x %a (b x h)",
+ it = "rapporto %a x %a (b x a)",
+ nl = "aspect ratio %a x %a (b x h)",
+ no = "forholdstall %a x %a (b x h)",
+ ro = "aspectul %a x %a (b x h)",
},
["interactions:2"] = {
cs = "aktivni",
@@ -693,14 +694,14 @@ return {
ro = "activ",
},
["interactions:21"] = {
- cs = "%s kod vlozen",
- de = "%s Code eingefuegt",
- en = "%s code inserted",
- fr = "%s code inseré",
- it = "codice %s inserito",
- nl = "%s code tussengevoegd",
- no = "%s kode satt inn / tilføyd",
- ro = "%s cod inserat",
+ cs = "%a kod vlozen",
+ de = "%a Code eingefuegt",
+ en = "%a code inserted",
+ fr = "%a code inseré",
+ it = "codice %a inserito",
+ nl = "%a code tussengevoegd",
+ no = "%a kode satt inn / tilføyd",
+ ro = "%a cod inserat",
},
["interactions:3"] = {
cs = "neaktivni",
@@ -713,184 +714,184 @@ return {
ro = "inactiv",
},
["interactions:4"] = {
- cs = "zadna strankova synchronizace (%s) v hmode",
- de = "keine Seitensynchronisation (%s) im hmode",
- en = "no pagesynchronisation (%s) in hmode",
- fr = "pas de synchronisation de page (%s) dans le hmode",
- it = "sincronizzazione di pagina (%s) non disponibile in hmode",
- nl = "geen paginasynchronisatie (%s) in hmode",
- no = "ingen sidesynkronisering (%s) i hmode",
- ro = "nu exista sincronizare pt. pagini (%s) in hmode",
+ cs = "zadna strankova synchronizace (%a) v hmode",
+ de = "keine Seitensynchronisation (%a) im hmode",
+ en = "no pagesynchronisation (%a) in hmode",
+ fr = "pas de synchronisation de page (%a) dans le hmode",
+ it = "sincronizzazione di pagina (%a) non disponibile in hmode",
+ nl = "geen paginasynchronisatie (%a) in hmode",
+ no = "ingen sidesynkronisering (%a) i hmode",
+ ro = "nu exista sincronizare pt. pagini (%a) in hmode",
},
["interactions:5"] = {
- cs = "unknown attachment %s",
- de = "unknown attachment %s",
- en = "unknown attachment %s",
- fr = "le fichier joint %s est inconnu",
- it = "unknown attachment %s",
- nl = "onbekend attachment %s",
- no = "unknown attachment %s",
- ro = "unknown attachment %s",
+ cs = "unknown attachment %a",
+ de = "unknown attachment %a",
+ en = "unknown attachment %a",
+ fr = "le fichier joint %a est inconnu",
+ it = "unknown attachment %a",
+ nl = "onbekend attachment %a",
+ no = "unknown attachment %a",
+ ro = "unknown attachment %a",
},
["interactions:6"] = {
- cs = "attachment file %s does not exist",
- de = "attachment file %s does not exist",
- en = "attachment file %s does not exist",
- fr = "le fichier joint %s n'existe pas",
- it = "attachment file %s does not exist",
- nl = "attachment file %s bestaat niet",
- no = "attachment file %s does not exist",
- ro = "attachment file %s does not exist",
+ cs = "attachment file %a does not exist",
+ de = "attachment file %a does not exist",
+ en = "attachment file %a does not exist",
+ fr = "le fichier joint %a n'existe pas",
+ it = "attachment file %a does not exist",
+ nl = "attachment file %a bestaat niet",
+ no = "attachment file %a does not exist",
+ ro = "attachment file %a does not exist",
},
["languages:1"] = {
- cs = "vzory %s pro %s nacteny (n=%s,e=%s,m=%s)",
- de = "Trennmuster %s fuer %s geladen (n=%s,e=%s,m=%s)",
- en = "patterns %s for %s loaded (n=%s,e=%s,m=%s)",
- fr = "les motifs %s pour %s sont chargés (n=%s,e=%s,m=%s)",
- it = "schemi %s per %s caricati (n=%s,e=%s,m=%s)",
- nl = "afbreekpatronen %s voor %s geladen (n=%s,e=%s,m=%s)",
- no = "orddelingsmønster %s for %s er lest inn (n=%s,e=%s,m=%s)",
- ro = "sablonul %s pentru %s s-a incarcat (n=%s,e=%s,m=%s)",
+ cs = "vzory %a pro %a nacteny (n=%a,e=%a,m=%a)",
+ de = "Trennmuster %a fuer %a geladen (n=%a,e=%a,m=%a)",
+ en = "patterns %a for %a loaded (n=%a,e=%a,m=%a)",
+ fr = "les motifs %a pour %a sont chargés (n=%a,e=%a,m=%a)",
+ it = "schemi %a per %a caricati (n=%a,e=%a,m=%a)",
+ nl = "afbreekpatronen %a voor %a geladen (n=%a,e=%a,m=%a)",
+ no = "orddelingsmønster %a for %a er lest inn (n=%a,e=%a,m=%a)",
+ ro = "sablonul %a pentru %a s-a incarcat (n=%a,e=%a,m=%a)",
},
["languages:10"] = {
- cs = "vzory %s nacteny",
- de = "Trennmuster %s geladen",
- en = "patterns %s loaded",
- fr = "motifs %s chargés",
- it = "schemi %s caricati",
- nl = "patronen %sgeladen",
- no = "orddelingsmønster %s er lest inn",
- ro = "sabloanele %s incarcate",
+ cs = "vzory %a nacteny",
+ de = "Trennmuster %a geladen",
+ en = "patterns %a loaded",
+ fr = "motifs %a chargés",
+ it = "schemi %a caricati",
+ nl = "patronen %ageladen",
+ no = "orddelingsmønster %a er lest inn",
+ ro = "sabloanele %a incarcate",
},
["languages:2"] = {
- cs = "zadne vzory %s pro %s (n=%s,e=%s,m=%s) (%s,%s)",
- de = "Keine Trennmuster %s fuer %s (n=%s,e=%s,m=%s) (%s,%s)",
- en = "no patterns %s for %s (n=%s,e=%s,m=%s) (%s,%s)",
- fr = "pas de motifs %s pour %s (n=%s,e=%s,m=%s) (%s,%s)",
- it = "niente schemi %s per %s (n=%s,e=%s,m=%s) (%s,%s)",
- nl = "geen afbreekpatronen %s voor %s (n=%s,e=%s,m=%s) (%s,%s)",
- no = "ingen orddelingsmønster %s for %s (n=%s,e=%s,m=%s) (%s,%s)",
- ro = "nu exista sabloane %s pentru %s (n=%s,e=%s,m=%s) (%s,%s)",
+ cs = "zadne vzory %a pro %a (n=%a,e=%a,m=%a) (%a,%a)",
+ de = "Keine Trennmuster %a fuer %a (n=%a,e=%a,m=%a) (%a,%a)",
+ en = "no patterns %a for %a (n=%a,e=%a,m=%a) (%a,%a)",
+ fr = "pas de motifs %a pour %a (n=%a,e=%a,m=%a) (%a,%a)",
+ it = "niente schemi %a per %a (n=%a,e=%a,m=%a) (%a,%a)",
+ nl = "geen afbreekpatronen %a voor %a (n=%a,e=%a,m=%a) (%a,%a)",
+ no = "ingen orddelingsmønster %a for %a (n=%a,e=%a,m=%a) (%a,%a)",
+ ro = "nu exista sabloane %a pentru %a (n=%a,e=%a,m=%a) (%a,%a)",
},
["languages:3"] = {
- cs = "deleni slov %s pro %s nacteno (n=%s,e=%s,m=%s)",
- de = "Trenndefinitionen %s fuer %s geladen (n=%s,e=%s,m=%s)",
- en = "hyphenations %s for %s loaded (n=%s,e=%s,m=%s)",
- fr = "hyphenations %s pour %s chargés (n=%s,e=%s,m=%s)",
- it = "sillabazione %s per %s caricata (n=%s,e=%s,m=%s)",
- nl = "afbreekdefinities %s voor %s geladen (n=%s,e=%s,m=%s)",
- no = "orddelingsdefinisjon %s for %s er lest inn (n=%s,e=%s,m=%s)",
- ro = "despartirea in silabe %s pentru %s s-a incarcat (n=%s,e=%s,m=%s)",
+ cs = "deleni slov %a pro %a nacteno (n=%a,e=%a,m=%a)",
+ de = "Trenndefinitionen %a fuer %a geladen (n=%a,e=%a,m=%a)",
+ en = "hyphenations %a for %a loaded (n=%a,e=%a,m=%a)",
+ fr = "hyphenations %a pour %a chargés (n=%a,e=%a,m=%a)",
+ it = "sillabazione %a per %a caricata (n=%a,e=%a,m=%a)",
+ nl = "afbreekdefinities %a voor %a geladen (n=%a,e=%a,m=%a)",
+ no = "orddelingsdefinisjon %a for %a er lest inn (n=%a,e=%a,m=%a)",
+ ro = "despartirea in silabe %a pentru %a s-a incarcat (n=%a,e=%a,m=%a)",
},
["languages:4"] = {
- cs = "zadne deleni slov %s pro %s (n=%s,e=%s,m=%s)",
- de = "Keine Trenndefinitionen %s fuer %s (n=%s,e=%s,m=%s)",
- en = "no hyphenations %s for %s (n=%s,e=%s,m=%s)",
- fr = "pas d'hyphenations %s pour %s (n=%s,e=%s,m=%s)",
- it = "niente sillabazione %s per %s (n=%s,e=%s,m=%s)",
- nl = "geen afbreekdefinities %s voor %s (n=%s,e=%s,m=%s)",
- no = "ingen orddelingsdefinisjon %s for %s (n=%s,e=%s,m=%s)",
- ro = "nu exista despartire in silabe %s pentru %s (n=%s,e=%s,m=%s)",
+ cs = "zadne deleni slov %a pro %a (n=%a,e=%a,m=%a)",
+ de = "Keine Trenndefinitionen %a fuer %a (n=%a,e=%a,m=%a)",
+ en = "no hyphenations %a for %a (n=%a,e=%a,m=%a)",
+ fr = "pas d'hyphenations %a pour %a (n=%a,e=%a,m=%a)",
+ it = "niente sillabazione %a per %a (n=%a,e=%a,m=%a)",
+ nl = "geen afbreekdefinities %a voor %a (n=%a,e=%a,m=%a)",
+ no = "ingen orddelingsdefinisjon %a for %a (n=%a,e=%a,m=%a)",
+ ro = "nu exista despartire in silabe %a pentru %a (n=%a,e=%a,m=%a)",
},
["languages:5"] = {
- cs = "vzory pro %s nenacteny",
- de = "Trennmuster fuer %s nicht geladen",
- en = "patterns for %s not loaded",
- fr = "les motifs pour %s ne sont pas chargés",
- it = "schemi per %s non caricati",
- nl = "afbreekpatronen voor %s niet geladen",
- no = "orddelingsmønster for %s er ikke lest inn",
- ro = "sabloanele pentru %s nu sunt incarcate",
+ cs = "vzory pro %a nenacteny",
+ de = "Trennmuster fuer %a nicht geladen",
+ en = "patterns for %a not loaded",
+ fr = "les motifs pour %a ne sont pas chargés",
+ it = "schemi per %a non caricati",
+ nl = "afbreekpatronen voor %a niet geladen",
+ no = "orddelingsmønster for %a er ikke lest inn",
+ ro = "sabloanele pentru %a nu sunt incarcate",
},
["languages:6"] = {
- cs = "jazyk %s neni definovan",
- de = "Sprache %s ist undefiniert",
- en = "language %s is undefined",
- fr = "langue %s non définie",
- it = "lingua %s non definita",
- nl = "taal %s is niet gedefinieerd",
- no = "spràk %s er udefinert",
- ro = "limba %s nu este definita",
+ cs = "jazyk %a neni definovan",
+ de = "Sprache %a ist undefiniert",
+ en = "language %a is undefined",
+ fr = "langue %a non définie",
+ it = "lingua %a non definita",
+ nl = "taal %a is niet gedefinieerd",
+ no = "spràk %a er udefinert",
+ ro = "limba %a nu este definita",
},
["languages:7"] = {
- cs = "specificke volby jazyka [%s] zavadeji %s (zavlecenou) mezeru",
- de = "Sprachenspezifische Option [%s] fuegt eine Luecke von %s ein",
- en = "language specific options [%s] introduce a %s skip",
- fr = "les options spécifiques de langue [%s] introduisent un %s saut",
- it = "opzioni specifiche per la lingua [%s] introducono un salto %s",
- nl = "taal specifieke opties [%s] introduceren een skip van %s",
- no = "spràk spesifikk opsjon [%s] introduserer et %s hopp",
- ro = "optiunile specifice ale limbii [%s] introduc un spatiu %s",
+ cs = "specificke volby jazyka [%a] zavadeji %a (zavlecenou) mezeru",
+ de = "Sprachenspezifische Option [%a] fuegt eine Luecke von %a ein",
+ en = "language specific options [%a] introduce a %a skip",
+ fr = "les options spécifiques de langue [%a] introduisent un %a saut",
+ it = "opzioni specifiche per la lingua [%a] introducono un salto %a",
+ nl = "taal specifieke opties [%a] introduceren een skip van %a",
+ no = "spràk spesifikk opsjon [%a] introduserer et %a hopp",
+ ro = "optiunile specifice ale limbii [%a] introduc un spatiu %a",
},
["languages:8"] = {
- cs = "specificke volby jazyka [%s] bez mezer pripojeny",
- de = "Sprachenspezifische Option [%s] nahtlos hinzugefuegt",
- en = "language specific options [%s] seamless appended",
- fr = "les options spécifiques de langue [%s] sont ajoutés en douceur",
- it = "opzioni specifiche per la lingua [%s] aggiunte trasparentemente",
- nl = "taal specifieke opties [%s] naadloos toegevoegd",
- no = "spràk spesifikk opsjon [%s] problemfritt tilføyd",
- ro = "optiunile specifice ale limbii [%s] adaugate",
+ cs = "specificke volby jazyka [%a] bez mezer pripojeny",
+ de = "Sprachenspezifische Option [%a] nahtlos hinzugefuegt",
+ en = "language specific options [%a] seamless appended",
+ fr = "les options spécifiques de langue [%a] sont ajoutés en douceur",
+ it = "opzioni specifiche per la lingua [%a] aggiunte trasparentemente",
+ nl = "taal specifieke opties [%a] naadloos toegevoegd",
+ no = "spràk spesifikk opsjon [%a] problemfritt tilføyd",
+ ro = "optiunile specifice ale limbii [%a] adaugate",
},
["languages:9"] = {
- cs = "language %s is active",
- de = "Sprache %s ist aktiv",
- en = "language %s is active",
- fr = "la langue %s est active",
- it = "lingua %s attiva",
- nl = "taal %s is actief",
- no = "spràk %s er aktivt",
- ro = "limba %s este activa",
+ cs = "language %a is active",
+ de = "Sprache %a ist aktiv",
+ en = "language %a is active",
+ fr = "la langue %a est active",
+ it = "lingua %a attiva",
+ nl = "taal %a is actief",
+ no = "spràk %a er aktivt",
+ ro = "limba %a este activa",
},
["layouts:1"] = {
- cs = "vyska textu prizpusobena s %s na strane %s",
- de = "Texthoehe angepasst mit %s auf Seite %s",
- en = "textheight adapted with %s at page %s",
- fr = "hauteurtexte adaptée avec %s à la page %s",
- it = "altezza del testo adattata con %s a pagina %s",
- nl = "teksthoogte aangepast met %s op pagina %s",
- no = "teksthøyde tilpasset med %s på side %s",
- ro = "textheight adaptat cu %s la pagina %s",
+ cs = "vyska textu prizpusobena s %a na strane %a",
+ de = "Texthoehe angepasst mit %a auf Seite %a",
+ en = "textheight adapted with %a at page %a",
+ fr = "hauteurtexte adaptée avec %a à la page %a",
+ it = "altezza del testo adattata con %a a pagina %a",
+ nl = "teksthoogte aangepast met %a op pagina %a",
+ no = "teksthøyde tilpasset med %a på side %a",
+ ro = "textheight adaptat cu %a la pagina %a",
},
["layouts:10"] = {
- cs = "%s a %s nedava dohromady 1.0",
- de = "%s und %s ergeben zusammen nicht 1.0",
- en = "%s and %s don't add up to 1.0",
- fr = "%s et %s ne sont pas ajoutés à 1.0",
- it = "%s e %s non sommano a 1.0",
- nl = "%s en %s tellen niet op tot 1.0",
- no = "%s og %s er ikke 1.0 til sammen",
- ro = "%s si %s nu se adauga pana la 1.0",
+ cs = "%a a %a nedava dohromady 1.0",
+ de = "%a und %a ergeben zusammen nicht 1.0",
+ en = "%a and %a don't add up to 1.0",
+ fr = "%a et %a ne sont pas ajoutés à 1.0",
+ it = "%a e %a non sommano a 1.0",
+ nl = "%a en %a tellen niet op tot 1.0",
+ no = "%a og %a er ikke 1.0 til sammen",
+ ro = "%a si %a nu se adauga pana la 1.0",
},
["layouts:11"] = {
- cs = "svisla mezera %s neni povolena v pevnem radkovem rejstriku",
- de = "Zwischenraum %s nicht im Grittermoduserlau",
- en = "spacing %s not permitted in gridmode",
- fr = "espacement %s non permis en modegrille",
- it = "spaziatura %s non permessa in modo griglia",
- nl = "interlinie %s niet toegestaan in gridmode",
- no = "mellomrom %s ikke tillatt i gridmodus",
- ro = "spatierea %s nu este permisa in gridmode",
+ cs = "svisla mezera %a neni povolena v pevnem radkovem rejstriku",
+ de = "Zwischenraum %a nicht im Grittermoduserlau",
+ en = "spacing %a not permitted in gridmode",
+ fr = "espacement %a non permis en modegrille",
+ it = "spaziatura %a non permessa in modo griglia",
+ nl = "interlinie %a niet toegestaan in gridmode",
+ no = "mellomrom %a ikke tillatt i gridmodus",
+ ro = "spatierea %a nu este permisa in gridmode",
},
["layouts:2"] = {
- cs = "%s krat odlozeny text umisten",
- de = "%s mal verschobener Text plaziert",
- en = "%s times postponed text placed",
- fr = "%s times postponed text placed",
- it = "posizionato testo posticipato %s volte",
- nl = "%s maal uitgestelde tekst tussengevoegd",
- no = "%s ganger forskjøvet tekst plassert",
- ro = "textul amanat de %s ori a fost plasat",
+ cs = "%a krat odlozeny text umisten",
+ de = "%a mal verschobener Text plaziert",
+ en = "%a times postponed text placed",
+ fr = "%a times postponed text placed",
+ it = "posizionato testo posticipato %a volte",
+ nl = "%a maal uitgestelde tekst tussengevoegd",
+ no = "%a ganger forskjøvet tekst plassert",
+ ro = "textul amanat de %a ori a fost plasat",
},
["layouts:3"] = {
- cs = "%s krat text odlozen",
- de = "%s mal Text verschoben",
- en = "%s times text postponed",
- fr = "%s times text postponed",
- it = "testo posticipato %s volte",
- nl = "%s maal tekst plaatsen uitstellen",
- no = "%s ganger tekst forskjøvet",
- ro = "textul amanat de %s ori",
+ cs = "%a krat text odlozen",
+ de = "%a mal Text verschoben",
+ en = "%a times text postponed",
+ fr = "%a times text postponed",
+ it = "testo posticipato %a volte",
+ nl = "%a maal tekst plaatsen uitstellen",
+ no = "%a ganger tekst forskjøvet",
+ ro = "textul amanat de %a ori",
},
["layouts:4"] = {
cs = "okrajove bloky aktivni",
@@ -913,14 +914,14 @@ return {
ro = "blocuri marginale inactive",
},
["layouts:6"] = {
- cs = "sada stran %s zpracovana (velikost %s)",
- de = "Unterseitenfolge %s verarbeitet (Groesse %s)",
- en = "subpage set %s processed (size %s)",
- fr = "jeu de souspage %s traité (taille %s)",
- it = "gruppo di sottopagine %s elaborato (dimensione %s)",
- nl = "subpagina reeks %s verwerkt (aantal %s)",
- no = "delside sett %s behandlet (størrelse %s)",
- ro = "setul %s de subpagini procesat (dimensiunea %s)",
+ cs = "sada stran %a zpracovana (velikost %a)",
+ de = "Unterseitenfolge %a verarbeitet (Groesse %a)",
+ en = "subpage set %a processed (size %a)",
+ fr = "jeu de souspage %a traité (taille %a)",
+ it = "gruppo di sottopagine %a elaborato (dimensione %a)",
+ nl = "subpagina reeks %a verwerkt (aantal %a)",
+ no = "delside sett %a behandlet (størrelse %a)",
+ ro = "setul %a de subpagini procesat (dimensiunea %a)",
},
["layouts:7"] = {
cs = "pocita se misto pro logo",
@@ -943,201 +944,201 @@ return {
ro = "se calculeaza fundalurile",
},
["layouts:9"] = {
- cs = "aktualne ne vice nez %s urovne/urovni vyctu",
- de = "z.Z. nicht mehr als %s Ebenen in Aufzaehlungen",
- en = "currently no more than %s levels in itemizations",
- fr = "pas plus de %s niveaux pour l'instant dans les élémentarisations",
- it = "attualmente non più di %s livelli di elencazione",
- nl = "momenteel maximaal %s niveaus in opsommingen",
- no = "for øyeblikket maksimalt %s nivåer i opplisting",
- ro = "acum nu se supota mai mult de %s nivele de adancime la iteratii",
+ cs = "aktualne ne vice nez %a urovne/urovni vyctu",
+ de = "z.Z. nicht mehr als %a Ebenen in Aufzaehlungen",
+ en = "currently no more than %a levels in itemizations",
+ fr = "pas plus de %a niveaux pour l'instant dans les élémentarisations",
+ it = "attualmente non più di %a livelli di elencazione",
+ nl = "momenteel maximaal %a niveaus in opsommingen",
+ no = "for øyeblikket maksimalt %a nivåer i opplisting",
+ ro = "acum nu se supota mai mult de %a nivele de adancime la iteratii",
},
- ["loaded: '%s'"] = {
- nl = "geladen: '%s'",
+ ["loaded: %a"] = {
+ nl = "geladen: %a",
},
- ["loaded: library '%s'"] = {
- nl = "geladen: bibliotheek '%s'",
+ ["loaded: library %a"] = {
+ nl = "geladen: bibliotheek %a",
},
- ["loading: '%s'"] = {
- nl = "laden: '%s'",
+ ["loading: %a"] = {
+ nl = "laden: %a",
},
- ["locating: '%s'"] = {
- nl = "zoeken: '%s'",
+ ["locating: %a"] = {
+ nl = "zoeken: %a",
},
- ["not found: '%s'"] = {
- nl = "niet gevonden: '%s'",
+ ["not found: %a"] = {
+ nl = "niet gevonden: %a",
},
- ["number 1: %s, number 2: %s"] = {
- en = "number 1: (%s), number 2: (%s)",
- nl = "nummer 1: (%s), nummer 2: (%s)",
+ ["number 1: %a, number 2: %a"] = {
+ en = "number 1: (%a), number 2: (%a)",
+ nl = "nummer 1: (%a), nummer 2: (%a)",
},
- ["popping level: %s"] = {
- nl = "niveau omlaag: %s",
+ ["popping level: %a"] = {
+ nl = "niveau omlaag: %a",
},
["publications:1"] = {
- en = "file %s not found, unknown style ignored",
+ en = "file %a not found, unknown style ignored",
},
["publications:2"] = {
- en = "file %s not found, waiting for bibtex",
+ en = "file %a not found, waiting for bibtex",
},
["publications:3"] = {
en = "wrote a new auxiliary file \\jobname.aux",
},
["publications:4"] = {
- en = "loading database from %s",
+ en = "loading database from %a",
},
["publications:5"] = {
- en = "warning: unknown cite argument %s on line \\the\\inputlineno",
+ en = "warning: unknown cite argument %a on line \\the\\inputlineno",
},
["publications:6"] = {
- en = "loading formatting style from %s",
+ en = "loading formatting style from %a",
},
["publications:7"] = {
en = "placing all entries, use 'text' to be more selective",
},
- ["pushing level: %s"] = {
- nl = "niveau omhoog: %s",
+ ["pushing level: %a"] = {
+ nl = "niveau omhoog: %a",
},
["references:1"] = {
- cs = "neznama reference %s",
- de = "unbekannte Referenz %s",
- en = "unknown reference %s",
- fr = "réference %s inconnue",
- it = "riferimento sconosciuto %s",
- nl = "onbekende verwijzing %s",
- no = "ukjent referanse %s",
- ro = "referinta necunoscuta %s",
+ cs = "neznama reference %a",
+ de = "unbekannte Referenz %a",
+ en = "unknown reference %a",
+ fr = "réference %a inconnue",
+ it = "riferimento sconosciuto %a",
+ nl = "onbekende verwijzing %a",
+ no = "ukjent referanse %a",
+ ro = "referinta necunoscuta %a",
},
["references:2"] = {
- cs = "duplicitni reference %s na strane %s",
- de = "doppelte Referenz %s auf Seite %s",
- en = "duplicate reference %s on page %s",
- fr = "réference %s dupliquée à la page %s",
- it = "riferimento duplicato %s a pagina %s",
- nl = "dubbele verwijzing %s op pagina %s",
- no = "duplikat referanse %s pø side %s",
- ro = "referinta duplicat %s la pagina %s",
+ cs = "duplicitni reference %a na strane %a",
+ de = "doppelte Referenz %a auf Seite %a",
+ en = "duplicate reference %a on page %a",
+ fr = "réference %a dupliquée à la page %a",
+ it = "riferimento duplicato %a a pagina %a",
+ nl = "dubbele verwijzing %a op pagina %a",
+ no = "duplikat referanse %a pø side %a",
+ ro = "referinta duplicat %a la pagina %a",
},
["references:21"] = {
- cs = "dokument %s nacten",
- de = "Dokument %s geladen",
- en = "document %s loaded",
- fr = "document %s chargé",
- it = "documento %s caricato",
- nl = "document %s geladen",
- no = "dokument %s er lest inn",
- ro = "documentul %s este incarcat",
+ cs = "dokument %a nacten",
+ de = "Dokument %a geladen",
+ en = "document %a loaded",
+ fr = "document %a chargé",
+ it = "documento %a caricato",
+ nl = "document %a geladen",
+ no = "dokument %a er lest inn",
+ ro = "documentul %a este incarcat",
},
["references:22"] = {
- cs = "dokument %s neni interaktivni",
- de = "Dokument %s ist nicht aktiv",
- en = "document %s is not interactive",
- fr = "le document %s n'est pas interactif",
- it = "il documento %s non ø interattivo",
- nl = "document %s is niet interactief",
- no = "dokument %s er ikke interaktivt",
- ro = "documentul %s nu este interactiv",
+ cs = "dokument %a neni interaktivni",
+ de = "Dokument %a ist nicht aktiv",
+ en = "document %a is not interactive",
+ fr = "le document %a n'est pas interactif",
+ it = "il documento %a non ø interattivo",
+ nl = "document %a is niet interactief",
+ no = "dokument %a er ikke interaktivt",
+ ro = "documentul %a nu este interactiv",
},
["references:23"] = {
- cs = "obskurni (nejasna) reference %s (prefix=%s)",
- de = "Obskure Referenz %s (Prefix=%s)",
- en = "obscure reference %s (prefix=%s)",
- fr = "reference %s indéterminé (préfixe=%s)",
- it = "riferimento ambiguo %s (prefisso=%s)",
- nl = "onduidelijke verwijzing %s (prefix=%s)",
- no = "obskur referanse %s (Prefix=%s)",
- ro = "referinta obscura %s (prefix=%s)",
+ cs = "obskurni (nejasna) reference %a (prefix=%a)",
+ de = "Obskure Referenz %a (Prefix=%a)",
+ en = "obscure reference %a (prefix=%a)",
+ fr = "reference %a indéterminé (préfixe=%a)",
+ it = "riferimento ambiguo %a (prefisso=%a)",
+ nl = "onduidelijke verwijzing %a (prefix=%a)",
+ no = "obskur referanse %a (Prefix=%a)",
+ ro = "referinta obscura %a (prefix=%a)",
},
["references:24"] = {
- en = "references from document '%s' are not exported",
+ en = "references from document %a are not exported",
},
["references:25"] = {
- en = "references from document '%s' are not imported (export again)",
+ en = "references from document %a are not imported (export again)",
},
["references:26"] = {
- en = "references from document '%s' are imported",
+ en = "references from document %a are imported",
},
["references:3"] = {
- cs = "neznamy typ reference %s",
- de = "unbekannte Referenz Typ %s",
- en = "unknown reference type %s",
- fr = "type %s de réference inconnu",
- it = "riferimento di tipo sconosciuto %s",
- nl = "type verwijzing %s onbekend",
- no = "ukjent referansetype %s",
- ro = "tip necunoscut de referinta %s",
+ cs = "neznamy typ reference %a",
+ de = "unbekannte Referenz Typ %a",
+ en = "unknown reference type %a",
+ fr = "type %a de réference inconnu",
+ it = "riferimento di tipo sconosciuto %a",
+ nl = "type verwijzing %a onbekend",
+ no = "ukjent referansetype %a",
+ ro = "tip necunoscut de referinta %a",
},
["references:30"] = {
- cs = "neznamy objekt %s",
- de = "unbekanntes Object %s",
- en = "unknown object %s",
- fr = "objet %s inconnu",
- it = "oggetto sconosciuto %s",
- nl = "onbekend object %s",
- no = "ukjent objekt %s",
- ro = "obiect necunoscut %s",
+ cs = "neznamy objekt %a",
+ de = "unbekanntes Object %a",
+ en = "unknown object %a",
+ fr = "objet %a inconnu",
+ it = "oggetto sconosciuto %a",
+ nl = "onbekend object %a",
+ no = "ukjent objekt %a",
+ ro = "obiect necunoscut %a",
},
["references:31"] = {
- cs = "duplicitni object %s",
- de = "doppeltes Object %s",
- en = "duplicate object %s",
- fr = "objet %s dupliqué",
- it = "oggetto duplicato %s",
- nl = "dubbel object %s",
- no = "duplikat objekt %s",
- ro = "obiect duplicat %s",
+ cs = "duplicitni object %a",
+ de = "doppeltes Object %a",
+ en = "duplicate object %a",
+ fr = "objet %a dupliqué",
+ it = "oggetto duplicato %a",
+ nl = "dubbel object %a",
+ no = "duplikat objekt %a",
+ ro = "obiect duplicat %a",
},
["references:4"] = {
- cs = "nedovolena reference %s",
- de = "illegale Referenz %s",
- en = "illegal reference %s",
- fr = "réference %s inconnue",
- it = "riferimento illecito %s",
- nl = "verboden verwijzing %s",
- no = "ulovlig referanse %s",
- ro = "referinta eronata %s",
- },
- -- ["number of unknown references: %s"] = {
+ cs = "nedovolena reference %a",
+ de = "illegale Referenz %a",
+ en = "illegal reference %a",
+ fr = "réference %a inconnue",
+ it = "riferimento illecito %a",
+ nl = "verboden verwijzing %a",
+ no = "ulovlig referanse %a",
+ ro = "referinta eronata %a",
+ },
+ -- ["number of unknown references: %a"] = {
-- },
["references:6"] = {
- en = "number of illegal references: %s",
+ en = "number of illegal references: %a",
},
- ["start: '%s'"] = {
+ ["start: %a"] = {
},
- ["stop: '%s'"] = {
+ ["stop: %a"] = {
},
- ["stored: preamble '%s', state '%s', order '%s'"] = {
- nl = "opgeslagen: preamble '%s', status '%s', volgorde '%s'",
+ ["stored: preamble %a, state %a, order %a"] = {
+ nl = "opgeslagen: preamble %a, status %a, volgorde %a",
},
["structures:1"] = {
- cs = "zacatek oddilu (sekce) %s",
- de = "Begin des Abschnittsblocks %s",
- en = "begin of sectionblock %s",
- fr = "début de blocsection %s",
- it = "inizio del blocco (sezione) %s",
- nl = "begin van sectieblok %s",
- no = "starten av blokk %s (seksjon)",
- ro = "inceput de bloc sectiune %s",
+ cs = "zacatek oddilu (sekce) %a",
+ de = "Begin des Abschnittsblocks %a",
+ en = "begin of sectionblock %a",
+ fr = "début de blocsection %a",
+ it = "inizio del blocco (sezione) %a",
+ nl = "begin van sectieblok %a",
+ no = "starten av blokk %a (seksjon)",
+ ro = "inceput de bloc sectiune %a",
},
["structures:2"] = {
- cs = "konec oddilu (sekce) %s",
- de = "Ende des Abschnittsblocks %s",
- en = "end of sectionblock %s",
- fr = "fin de blocsection %s",
- it = "fine del blocco (sezione) %s",
- nl = "eind van sectieblok %s",
- no = "slutten av blokk %s (seksjon)",
- ro = "sfarsit de bloc sectiune %s",
+ cs = "konec oddilu (sekce) %a",
+ de = "Ende des Abschnittsblocks %a",
+ en = "end of sectionblock %a",
+ fr = "fin de blocsection %a",
+ it = "fine del blocco (sezione) %a",
+ nl = "eind van sectieblok %a",
+ no = "slutten av blokk %a (seksjon)",
+ ro = "sfarsit de bloc sectiune %a",
},
["symbols:1"] = {
- cs = "nacita se soubor symbolu %s",
- de = "Lade Symboldatei %s",
- en = "loading symbolset %s",
- fr = "chargement du jeu de symbole %s",
- it = "caricamento gruppo di simboli %s",
- nl = "symboolset %s wordt geladen",
- no = "leser inn symbolsett %s",
- ro = "se incarca setul de simboluri %s",
+ cs = "nacita se soubor symbolu %a",
+ de = "Lade Symboldatei %a",
+ en = "loading symbolset %a",
+ fr = "chargement du jeu de symbole %a",
+ it = "caricamento gruppo di simboli %a",
+ nl = "symboolset %a wordt geladen",
+ no = "leser inn symbolsett %a",
+ ro = "se incarca setul de simboluri %a",
},
["system:1"] = {
cs = "nacteni pomocneho souboru odlozeno (typemode)",
@@ -1150,14 +1151,14 @@ return {
ro = "se incarca utilitarul-fisierul este amanat (typemode)",
},
["system:10"] = {
- cs = "nepouzivejte em v %s",
- de = "Benutzte kein em in %s",
- en = "don't use em in %s",
- fr = "n'utilisez pas em dans %s",
- it = "non usare em in %s",
- nl = "gebruik geen em in %s",
- no = "ikke bruk em i %s",
- ro = "nu folositi em in %s",
+ cs = "nepouzivejte em v %a",
+ de = "Benutzte kein em in %a",
+ en = "don't use em in %a",
+ fr = "n'utilisez pas em dans %a",
+ it = "non usare em in %a",
+ nl = "gebruik geen em in %a",
+ no = "ikke bruk em i %a",
+ ro = "nu folositi em in %a",
},
["system:11"] = {
cs = "vytvarim jednoduchy pomocny soubor",
@@ -1180,94 +1181,94 @@ return {
ro = "fisierul utilitar nu este sortat, folositi texutil",
},
["system:13"] = {
- cs = "znacka %s definovana %s",
- de = "Beschriftung %s definiert %s",
- en = "mark %s defined %s",
- fr = "marquage %s defini %s",
- it = "marcatura %s definita %s",
- nl = "markering %s gedefinieerd %s",
- no = "markering %s definert %s",
- ro = "marcajul %s definit %s",
+ cs = "znacka %a definovana %a",
+ de = "Beschriftung %a definiert %a",
+ en = "mark %a defined %a",
+ fr = "marquage %a defini %a",
+ it = "marcatura %a definita %a",
+ nl = "markering %a gedefinieerd %a",
+ no = "markering %a definert %a",
+ ro = "marcajul %a definit %a",
},
["system:14"] = {
- cs = "vynucena nova stranka v seznamu na %s",
- de = "Erzwungendes Seitenumbruch in Liste bei %s",
- en = "forced newpage in list at %s",
- fr = "nouvellepage forcée dans la liste à %s",
- it = "nuova pagina obbligata in lista a %s",
- nl = "geforceerde paginaovergang in lijst voor %s",
- no = "tvunget sideskift i liste ved %s",
- ro = "s-a fortat trecere pa pagina noua in lista la %s",
+ cs = "vynucena nova stranka v seznamu na %a",
+ de = "Erzwungendes Seitenumbruch in Liste bei %a",
+ en = "forced newpage in list at %a",
+ fr = "nouvellepage forcée dans la liste à %a",
+ it = "nuova pagina obbligata in lista a %a",
+ nl = "geforceerde paginaovergang in lijst voor %a",
+ no = "tvunget sideskift i liste ved %a",
+ ro = "s-a fortat trecere pa pagina noua in lista la %a",
},
["system:15"] = {
- cs = "uklada se buffer %s",
- de = "Speichere Buffer %s",
- en = "saving buffer %s",
- fr = "sauvegarde du tampon (buffer) %s",
- it = "salvataggio del buffer %s",
- nl = "wegschrijven buffer %s",
- no = "lagrer Buffer %s",
- ro = "buffer salvat %s",
+ cs = "uklada se buffer %a",
+ de = "Speichere Buffer %a",
+ en = "saving buffer %a",
+ fr = "sauvegarde du tampon (buffer) %a",
+ it = "salvataggio del buffer %a",
+ nl = "wegschrijven buffer %a",
+ no = "lagrer Buffer %a",
+ ro = "buffer salvat %a",
},
["system:16"] = {
- cs = "sazi se buffer %s",
- de = "Setzte Buffer %s",
- en = "typesetting buffer %s",
- fr = "composition du tampon (buffer) %s",
- it = "composizione del buffer %s",
- nl = "inlezen buffer %s",
- no = "tegnsetter buffer %s",
- ro = "buffer-ul %s s-a cules",
+ cs = "sazi se buffer %a",
+ de = "Setzte Buffer %a",
+ en = "typesetting buffer %a",
+ fr = "composition du tampon (buffer) %a",
+ it = "composizione del buffer %a",
+ nl = "inlezen buffer %a",
+ no = "tegnsetter buffer %a",
+ ro = "buffer-ul %a s-a cules",
},
["system:17"] = {
- cs = "sazi se doslovny (verbatim) buffer %s",
- de = "Setzte tippen-Buffer %s",
- en = "typesetting verbatim buffer %s",
- fr = "composition textuelle du tampon (buffer) %s",
- it = "composizione verbatim del buffer %s",
- nl = "verbatim inlezen buffer %s",
- no = "tegnsetter verbatim-buffer %s",
- ro = "se culege buffer-ul verbatim %s",
+ cs = "sazi se doslovny (verbatim) buffer %a",
+ de = "Setzte tippen-Buffer %a",
+ en = "typesetting verbatim buffer %a",
+ fr = "composition textuelle du tampon (buffer) %a",
+ it = "composizione verbatim del buffer %a",
+ nl = "verbatim inlezen buffer %a",
+ no = "tegnsetter verbatim-buffer %a",
+ ro = "se culege buffer-ul verbatim %a",
},
["system:18"] = {
- cs = "synonymum %s %s neexistuje",
- de = "Synonym %s %s existiert nicht",
- en = "synonym %s %s does not exist",
- fr = "le synonyme %s %s n'existe pas",
- it = "sinonimo %s %s non esistente",
- nl = "synoniem %s %s bestaat niet",
- no = "synonym %s %s eksisterer ikke",
- ro = "sinonimul %s %s nu exista",
+ cs = "synonymum %a %a neexistuje",
+ de = "Synonym %a %a existiert nicht",
+ en = "synonym %a %a does not exist",
+ fr = "le synonyme %a %a n'existe pas",
+ it = "sinonimo %a %a non esistente",
+ nl = "synoniem %a %a bestaat niet",
+ no = "synonym %a %a eksisterer ikke",
+ ro = "sinonimul %a %a nu exista",
},
["system:19"] = {
- cs = "vyznam (synonyma) %s nacten",
- de = "Bedeutung (synonyme) von %s geladen",
- en = "meaning (synonyms) of %s loaded",
- fr = "signification (synonymes) de %s chargée",
- it = "significato (sinonimi) di %s caricato",
- nl = "betekenissen (synoniemen) van %s geladen",
- no = "betydning (synonymer) av %s er lest inn",
- ro = "intelesul (sinonimele) pentru %s incarcat",
+ cs = "vyznam (synonyma) %a nacten",
+ de = "Bedeutung (synonyme) von %a geladen",
+ en = "meaning (synonyms) of %a loaded",
+ fr = "signification (synonymes) de %a chargée",
+ it = "significato (sinonimi) di %a caricato",
+ nl = "betekenissen (synoniemen) van %a geladen",
+ no = "betydning (synonymer) av %a er lest inn",
+ ro = "intelesul (sinonimele) pentru %a incarcat",
},
["system:2"] = {
- cs = "%s nacteno",
- de = "%s geladen",
- en = "%s loaded",
- fr = "%s chargé",
- it = "%s caricato",
- nl = "%s geladen",
- no = "%s er lest inn",
- ro = "%s s-a incarcat",
+ cs = "%a nacteno",
+ de = "%a geladen",
+ en = "%a loaded",
+ fr = "%a chargé",
+ it = "%a caricato",
+ nl = "%a geladen",
+ no = "%a er lest inn",
+ ro = "%a s-a incarcat",
},
["system:20"] = {
- cs = "vyznam (trideni) %s nacten",
- de = "Bedeutung (sortieren) von %s geladen",
- en = "meaning (sorts) of %s loaded",
- fr = "signification (tris) de %s chargée",
- it = "significato (specie) di %s caricato",
- nl = "betekenissen (sorteren) van %s geladen",
- no = "betydning (sorterer) av %s er lest inn",
- ro = "intelesul (ordinea) pentru %s incarcat",
+ cs = "vyznam (trideni) %a nacten",
+ de = "Bedeutung (sortieren) von %a geladen",
+ en = "meaning (sorts) of %a loaded",
+ fr = "signification (tris) de %a chargée",
+ it = "significato (specie) di %a caricato",
+ nl = "betekenissen (sorteren) van %a geladen",
+ no = "betydning (sorterer) av %a er lest inn",
+ ro = "intelesul (ordinea) pentru %a incarcat",
},
["system:21"] = {
cs = "pomocny soubor necten",
@@ -1290,14 +1291,14 @@ return {
ro = "folositi un fisier utilitar valid",
},
["system:23"] = {
- cs = "%s upraveno na %s",
- de = "%s angeordnet auf %s",
- en = "%s arranged at %s",
- fr = "%s arrangé à %s",
- it = "%s sistemato a %s",
- nl = "%s gearrangeerd op %s",
- no = "%s arrangert på %s",
- ro = "%s aranjat la %s",
+ cs = "%a upraveno na %a",
+ de = "%a angeordnet auf %a",
+ en = "%a arranged at %a",
+ fr = "%a arrangé à %a",
+ it = "%a sistemato a %a",
+ nl = "%a gearrangeerd op %a",
+ no = "%a arrangert på %a",
+ ro = "%a aranjat la %a",
},
["system:24"] = {
cs = "plovouci bloky",
@@ -1340,54 +1341,54 @@ return {
ro = "Versiune",
},
["system:4"] = {
- cs = "prikaz %s je jiz definovan",
- de = "Befehl %s ist bereits definiert",
- en = "command %s is already defined",
- fr = "la commande %s est déjà définie",
- it = "comando %s già definito",
- nl = "commando %s is al gedefinieerd",
- no = "kommando %s er allerede definert",
- ro = "comanda %s este deja definita",
+ cs = "prikaz %a je jiz definovan",
+ de = "Befehl %a ist bereits definiert",
+ en = "command %a is already defined",
+ fr = "la commande %a est déjà définie",
+ it = "comando %a già definito",
+ nl = "commando %a is al gedefinieerd",
+ no = "kommando %a er allerede definert",
+ ro = "comanda %a este deja definita",
},
["system:41"] = {
- cs = "externi soubor %s ve skupine %s neexistuje",
- de = "Externe Datei %s in Gruppe %s existiert nicht",
- en = "external file %s in group %s does not exist",
- fr = "le fichier externe %s du groupe %s n'existe pas",
- it = "il file esterno %s del gruppo %s non esiste",
- nl = "externe file %s in groep %s bestaat niet",
- no = "ekstern fil %s i gruppe %s eksisterer ikke",
- ro = "fisierul extern %s din grupul %s nu exista",
+ cs = "externi soubor %a ve skupine %a neexistuje",
+ de = "Externe Datei %a in Gruppe %a existiert nicht",
+ en = "external file %a in group %a does not exist",
+ fr = "le fichier externe %a du groupe %a n'existe pas",
+ it = "il file esterno %a del gruppo %a non esiste",
+ nl = "externe file %a in groep %a bestaat niet",
+ no = "ekstern fil %a i gruppe %a eksisterer ikke",
+ ro = "fisierul extern %a din grupul %a nu exista",
},
["system:5"] = {
- cs = "makra z %s nactena",
- de = "Modul %s geladen",
- en = "module %s loaded",
- fr = "module %s chargé",
- it = "macro del modulo %s caricate",
- nl = "module %s geladen",
- no = "makroene i modul %s er lest inn",
- ro = "macro-urile din modulul %s s-au incarcat",
+ cs = "makra z %a nactena",
+ de = "Modul %a geladen",
+ en = "module %a loaded",
+ fr = "module %a chargé",
+ it = "macro del modulo %a caricate",
+ nl = "module %a geladen",
+ no = "makroene i modul %a er lest inn",
+ ro = "macro-urile din modulul %a s-au incarcat",
},
["system:6"] = {
- cs = "zadna makra v %s nenalezena",
- de = "Modul %s gefunden",
- en = "module %s not found",
- fr = "module %s non trouvé",
- it = "nessuna macro trovata nel modulo %s",
- nl = "geen module %s gevonden",
- no = "ingen makroer funnet i modul %s-",
- ro = "nu s-au gasit macro-uri in modulul %s",
+ cs = "zadna makra v %a nenalezena",
+ de = "Modul %a gefunden",
+ en = "module %a not found",
+ fr = "module %a non trouvé",
+ it = "nessuna macro trovata nel modulo %a",
+ nl = "geen module %a gevonden",
+ no = "ingen makroer funnet i modul %a-",
+ ro = "nu s-au gasit macro-uri in modulul %a",
},
["system:7"] = {
- cs = "makra z %s jsou jiz nactena",
- de = "Modul %s bereits geladen",
- en = "module %s already loaded",
- fr = "module %s déjà chargé",
- it = "macro del modulo %s già caricate",
- nl = "module %s reeds geladen",
- no = "makroene i modul %s er allerede lest inn",
- ro = "macro-urile din modulul %s s-au incarcat deja",
+ cs = "makra z %a jsou jiz nactena",
+ de = "Modul %a bereits geladen",
+ en = "module %a already loaded",
+ fr = "module %a déjà chargé",
+ it = "macro del modulo %a già caricate",
+ nl = "module %a reeds geladen",
+ no = "makroene i modul %a er allerede lest inn",
+ ro = "macro-urile din modulul %a s-au incarcat deja",
},
["system:8"] = {
cs = "nova verze pomocneho souboru, je treba druheho behu",
@@ -1400,18 +1401,18 @@ return {
ro = "o noua versiune de fisier utilitar, este necesara o noua trecere",
},
["system:9"] = {
- cs = "%s nenalezeno/nezpracovano",
- de = "%s nicht gefunden/verarbeitet",
- en = "%s not found/processed",
- fr = "%s non trouvé/traité",
- it = "%s non trovato/elaborato",
- nl = "%s niet gevonden/geplaatst",
- no = "%s ikke funnet/behandlet",
- ro = "%s nu este gasit/procesat",
+ cs = "%a nenalezeno/nezpracovano",
+ de = "%a nicht gefunden/verarbeitet",
+ en = "%a not found/processed",
+ fr = "%a non trouvé/traité",
+ it = "%a non trovato/elaborato",
+ nl = "%a niet gevonden/geplaatst",
+ no = "%a ikke funnet/behandlet",
+ ro = "%a nu este gasit/procesat",
},
["system:91"] = {
- en = "papertray %s",
- nl = "papierlade %s",
+ en = "papertray %a",
+ nl = "papierlade %a",
},
["textblocks:1"] = {
cs = "nova verze, je treba druhy beh",
@@ -1424,54 +1425,54 @@ return {
ro = "o noua versiune, este nevoie de inca o trecere",
},
["textblocks:10"] = {
- cs = "%s nacteno a zpracovano",
- de = "%s geladen und verarbeitet",
- en = "%s loaded and processed",
- fr = "%s chargé et traité",
- it = "%s caricato ed elaborato",
- nl = "%s geladen en verwerkt",
- no = "%s lest inn og behandlet",
- ro = "%s incarcat si procesat",
+ cs = "%a nacteno a zpracovano",
+ de = "%a geladen und verarbeitet",
+ en = "%a loaded and processed",
+ fr = "%a chargé et traité",
+ it = "%a caricato ed elaborato",
+ nl = "%a geladen en verwerkt",
+ no = "%a lest inn og behandlet",
+ ro = "%a incarcat si procesat",
},
["textblocks:11"] = {
- cs = "%s nacteno a vysazeno",
- de = "%s geladen und gesetzt",
- en = "%s loaded and typeset",
- fr = "%s chargé et composé",
- it = "%s caricato e composto",
- nl = "%s geladen en geplaatst",
- no = "%s lest inn og tegnsatt",
- ro = "%s incarcat si cules",
+ cs = "%a nacteno a vysazeno",
+ de = "%a geladen und gesetzt",
+ en = "%a loaded and typeset",
+ fr = "%a chargé et composé",
+ it = "%a caricato e composto",
+ nl = "%a geladen en geplaatst",
+ no = "%a lest inn og tegnsatt",
+ ro = "%a incarcat si cules",
},
["textblocks:12"] = {
- cs = "%s preskoceno",
- de = "%s ausgelassen",
- en = "%s skipped",
- fr = "%s sauté",
- it = "%s saltato",
- nl = "%s overgeslagen",
- no = "%s utelatt",
- ro = "%s sarit peste",
+ cs = "%a preskoceno",
+ de = "%a ausgelassen",
+ en = "%a skipped",
+ fr = "%a sauté",
+ it = "%a saltato",
+ nl = "%a overgeslagen",
+ no = "%a utelatt",
+ ro = "%a sarit peste",
},
["textblocks:2"] = {
- cs = "zapisuji bloky do %s",
- de = "schreibe Bloecke zu %s",
- en = "writing blocks to %s",
- fr = "ecriture des blocs vers %s",
- it = "scrittura dei blocchi su %s",
- nl = "wegschrijven blokken naar %s",
- no = "skriver blokker til %s",
- ro = "se scriu blocurile in %s",
+ cs = "zapisuji bloky do %a",
+ de = "schreibe Bloecke zu %a",
+ en = "writing blocks to %a",
+ fr = "ecriture des blocs vers %a",
+ it = "scrittura dei blocchi su %a",
+ nl = "wegschrijven blokken naar %a",
+ no = "skriver blokker til %a",
+ ro = "se scriu blocurile in %a",
},
["textblocks:3"] = {
- cs = "ctu bloky z %s",
- de = "lese Bloecke von %s",
- en = "reading blocks from %s",
- fr = "lecture des blocs en provenance de %s",
- it = "lettura dei blocchi da %s",
- nl = "inlezen blokken uit %s",
- no = "leser blokker fra %s",
- ro = "se citesc blocurile din %s",
+ cs = "ctu bloky z %a",
+ de = "lese Bloecke von %a",
+ en = "reading blocks from %a",
+ fr = "lecture des blocs en provenance de %a",
+ it = "lettura dei blocchi da %a",
+ nl = "inlezen blokken uit %a",
+ no = "leser blokker fra %a",
+ ro = "se citesc blocurile din %a",
},
["textblocks:4"] = {
cs = "je treba druhy beh",
@@ -1484,85 +1485,85 @@ return {
ro = "este nevoie de inca o trecere",
},
["textblocks:5"] = {
- cs = "%s neni skryto",
- de = "%s nicht verborgen",
- en = "%s not hidden",
- fr = "%s non caché",
- it = "%s non nascosto",
- nl = "%s niet verborgen",
- no = "%s ikke skjult",
- ro = "%s nu este ascuns",
+ cs = "%a neni skryto",
+ de = "%a nicht verborgen",
+ en = "%a not hidden",
+ fr = "%a non caché",
+ it = "%a non nascosto",
+ nl = "%a niet verborgen",
+ no = "%a ikke skjult",
+ ro = "%a nu este ascuns",
},
["textblocks:6"] = {
- cs = "%s skryto a zpracovano",
- de = "%s verborgen und verarbeitet",
- en = "%s hidden and processed",
- fr = "%s caché et traité",
- it = "%s nascosto ed elaborato",
- nl = "%s verborgen en verwerkt",
- no = "%s skjult og behandlet",
- ro = "%s ascuns si procesat",
+ cs = "%a skryto a zpracovano",
+ de = "%a verborgen und verarbeitet",
+ en = "%a hidden and processed",
+ fr = "%a caché et traité",
+ it = "%a nascosto ed elaborato",
+ nl = "%a verborgen en verwerkt",
+ no = "%a skjult og behandlet",
+ ro = "%a ascuns si procesat",
},
["textblocks:7"] = {
- cs = "%s skryto",
- de = "%s verborgen",
- en = "%s hidden",
- fr = "%s caché",
- it = "%s nascosto",
- nl = "%s verborgen",
- no = "%s skjult",
- ro = "%s ascuns",
+ cs = "%a skryto",
+ de = "%a verborgen",
+ en = "%a hidden",
+ fr = "%a caché",
+ it = "%a nascosto",
+ nl = "%a verborgen",
+ no = "%a skjult",
+ ro = "%a ascuns",
},
["textblocks:8"] = {
- cs = "%s vysazeno",
- de = "%s gesetzt",
- en = "%s typeset",
- fr = "%s composé",
- it = "%s composto",
- nl = "%s gehandhaafd",
- no = "%s tegnsatt",
- ro = "%s cules",
+ cs = "%a vysazeno",
+ de = "%a gesetzt",
+ en = "%a typeset",
+ fr = "%a composé",
+ it = "%a composto",
+ nl = "%a gehandhaafd",
+ no = "%a tegnsatt",
+ ro = "%a cules",
},
["textblocks:9"] = {
- cs = "%s nevysazeno",
- de = "%s nicht gesetzt",
- en = "%s not typeset",
- fr = "%s non composé",
- it = "%s non composto",
- nl = "%s niet gehandhaafd",
- no = "%s ikke tegnsatt",
- ro = "%s nu este cules",
+ cs = "%a nevysazeno",
+ de = "%a nicht gesetzt",
+ en = "%a not typeset",
+ fr = "%a non composé",
+ it = "%a non composto",
+ nl = "%a niet gehandhaafd",
+ no = "%a ikke tegnsatt",
+ ro = "%a nu este cules",
},
- ["unknown: library '%s'"] = {
- nl = "onbekend: bibliotheek '%s'",
+ ["unknown: library %a"] = {
+ nl = "onbekend: bibliotheek %a",
},
- ["used: code '%s'"] = {
- nl = "gebruikt: code '%s'",
+ ["used: code %a"] = {
+ nl = "gebruikt: code %a",
},
- ["used: code '%s', preamble '%s'"] = {
- nl = "gebruikt: code '%s', preamble '%s'",
+ ["used: code %a, preamble %a"] = {
+ nl = "gebruikt: code %a, preamble %a",
},
- ["used: function '%s'"] = {
- nl = "gebruikt: functie '%s'",
+ ["used: function %a"] = {
+ nl = "gebruikt: functie %a",
},
- ["used: preamble '%s', state '%s', order '%s'"] = {
- nl = "gebruikt: preamble '%s', status '%s', volgorde '%s'",
+ ["used: preamble %a, state %a, order %a"] = {
+ nl = "gebruikt: preamble %a, status %a, volgorde %a",
},
- ["vector '%s' is loaded"] = {
- nl = "vector '%s' is geladen",
+ ["vector %a is loaded"] = {
+ nl = "vector %a is geladen",
},
- ["vector '%s' is unknown"] = {
- nl = "onbekende vector '%s'",
+ ["vector %a is unknown"] = {
+ nl = "onbekende vector %a",
},
["verbatims:1"] = {
- cs = "soubor %s neexistuje",
- de = "Datei %s existiert nicht",
- en = "file %s does not exist",
- fr = "le fichier %s n'existe pas",
- it = "il file %s non esiste",
- nl = "file %s bestaat niet",
- no = "fil %s eksisterer ikke",
- ro = "fisierul %s nu exista",
+ cs = "soubor %a neexistuje",
+ de = "Datei %a existiert nicht",
+ en = "file %a does not exist",
+ fr = "le fichier %a n'existe pas",
+ it = "il file %a non esiste",
+ nl = "file %a bestaat niet",
+ no = "fil %a eksisterer ikke",
+ ro = "fisierul %a nu exista",
},
},
translations = {
diff --git a/Master/texmf-dist/tex/context/base/mult-mps.lua b/Master/texmf-dist/tex/context/base/mult-mps.lua
index 2f2c419742e..59411cd97c4 100644
--- a/Master/texmf-dist/tex/context/base/mult-mps.lua
+++ b/Master/texmf-dist/tex/context/base/mult-mps.lua
@@ -31,7 +31,7 @@ return {
"def", "vardef", "enddef", "expr", "suffix", "text", "primary", "secondary",
"tertiary", "primarydef", "secondarydef", "tertiarydef",
"randomseed", "also", "contour", "doublepath",
- "withcolor", "withpen", "dashed", "if", "else", "elseif", "fi", "for", "endfor", "forever", "exitif",
+ "withcolor", "withpen", "dashed", "if", "else", "elseif", "fi", "for", "endfor", "forever", "exitif", "within",
"forsuffixes", "downto", "upto", "step", "until",
"charlist", "extensible", "fontdimen", "headerbyte", "kern", "ligtable",
"boundarychar", "chardp", "charext", "charht", "charic", "charwd", "designsize",
@@ -53,6 +53,9 @@ return {
"redpart", "greenpart", "bluepart", "cyanpart", "magentapart", "yellowpart", "blackpart", "greypart",
"rgbcolor", "cmykcolor", "greycolor", "graycolor",
"colormodel", "graypart",
+ "dashpart", "penpart",
+-- "colorpart",
+ "stroked", "filled", "textual", "clipped", "bounded",
"expandafter",
},
commands = {
diff --git a/Master/texmf-dist/tex/context/base/mult-nl.mkii b/Master/texmf-dist/tex/context/base/mult-nl.mkii
index 01f7fa20435..5f1bada7afd 100644
--- a/Master/texmf-dist/tex/context/base/mult-nl.mkii
+++ b/Master/texmf-dist/tex/context/base/mult-nl.mkii
@@ -120,6 +120,7 @@
\setinterfacevariable{chapter}{hoofdstuk}
\setinterfacevariable{character}{letter}
\setinterfacevariable{characters}{letters}
+\setinterfacevariable{chemistry}{chemie}
\setinterfacevariable{cite}{cite}
\setinterfacevariable{color}{kleur}
\setinterfacevariable{column}{kolom}
@@ -157,6 +158,7 @@
\setinterfacevariable{enumeration}{doornummering}
\setinterfacevariable{environment}{omgeving}
\setinterfacevariable{even}{even}
+\setinterfacevariable{export}{export}
\setinterfacevariable{external}{extern}
\setinterfacevariable{fact}{gegeven}
\setinterfacevariable{february}{februari}
@@ -258,6 +260,7 @@
\setinterfacevariable{leftpage}{linkerpagina}
\setinterfacevariable{lefttoright}{lefttoright}
\setinterfacevariable{legend}{legenda}
+\setinterfacevariable{less}{minder}
\setinterfacevariable{lesshyphenation}{lesshyphenation}
\setinterfacevariable{line}{regel}
\setinterfacevariable{linenote}{regelnoot}
@@ -298,6 +301,7 @@
\setinterfacevariable{monday}{maandag}
\setinterfacevariable{mono}{mono}
\setinterfacevariable{month}{maand}
+\setinterfacevariable{more}{meer}
\setinterfacevariable{morehyphenation}{morehyphenation}
\setinterfacevariable{name}{naam}
\setinterfacevariable{narrow}{smal}
@@ -455,6 +459,7 @@
\setinterfacevariable{subforward}{subvooruit}
\setinterfacevariable{subject}{onderwerp}
\setinterfacevariable{subpage}{subpagina}
+\setinterfacevariable{subs}{subs}
\setinterfacevariable{subsection}{subparagraaf}
\setinterfacevariable{subsubject}{subonderwerp}
\setinterfacevariable{subsubsection}{subsubparagraaf}
@@ -592,6 +597,7 @@
\setinterfaceconstant{bottom}{onder}
\setinterfaceconstant{bottomafter}{bottomafter}
\setinterfaceconstant{bottombefore}{bottombefore}
+\setinterfaceconstant{bottomcommand}{ondercommando}
\setinterfaceconstant{bottomdistance}{onderafstand}
\setinterfaceconstant{bottomframe}{onderkader}
\setinterfaceconstant{bottomoffset}{onderoffset}
@@ -684,6 +690,7 @@
\setinterfaceconstant{filtercommand}{filtercommand}
\setinterfaceconstant{finalnamesep}{finalnamesep}
\setinterfaceconstant{firstnamesep}{firstnamesep}
+\setinterfaceconstant{firstpage}{eerstepagina}
\setinterfaceconstant{focus}{focus}
\setinterfaceconstant{focusin}{focusin}
\setinterfaceconstant{focusout}{focusuit}
@@ -750,6 +757,7 @@
\setinterfaceconstant{label}{label}
\setinterfaceconstant{labeloffset}{labeloffset}
\setinterfaceconstant{lastnamesep}{lastnamesep}
+\setinterfaceconstant{lastpage}{laatstepagina}
\setinterfaceconstant{lastpubsep}{lastpubsep}
\setinterfaceconstant{layout}{layout}
\setinterfaceconstant{left}{links}
@@ -770,6 +778,7 @@
\setinterfaceconstant{leftsubsentence}{linkersubzin}
\setinterfaceconstant{lefttext}{linkertekst}
\setinterfaceconstant{leftwidth}{linkerbreedte}
+\setinterfaceconstant{less}{minder}
\setinterfaceconstant{level}{niveau}
\setinterfaceconstant{levels}{niveaus}
\setinterfaceconstant{limittext}{limiettekst}
@@ -792,6 +801,7 @@
\setinterfaceconstant{marking}{markering}
\setinterfaceconstant{marstyle}{marletter}
\setinterfaceconstant{mask}{masker}
+\setinterfaceconstant{mathstyle}{mathstyle}
\setinterfaceconstant{max}{max}
\setinterfaceconstant{maxdepth}{maxdepth}
\setinterfaceconstant{maxheight}{maxhoogte}
@@ -800,6 +810,7 @@
\setinterfaceconstant{menu}{menu}
\setinterfaceconstant{method}{methode}
\setinterfaceconstant{middle}{midden}
+\setinterfaceconstant{middlecommand}{middencommando}
\setinterfaceconstant{middlespeech}{middenuitspraak}
\setinterfaceconstant{middletext}{middentekst}
\setinterfaceconstant{midsentence}{middenzin}
@@ -808,6 +819,7 @@
\setinterfaceconstant{minheight}{minhoogte}
\setinterfaceconstant{minwidth}{minbreedte}
\setinterfaceconstant{monthconversion}{maandconversie}
+\setinterfaceconstant{more}{meer}
\setinterfaceconstant{n}{n}
\setinterfaceconstant{name}{naam}
\setinterfaceconstant{namesep}{namesep}
@@ -962,6 +974,7 @@
\setinterfaceconstant{separator}{scheider}
\setinterfaceconstant{set}{set}
\setinterfaceconstant{setups}{setups}
+\setinterfaceconstant{shrink}{krimp}
\setinterfaceconstant{side}{zij}
\setinterfaceconstant{sidealign}{zijuitlijnen}
\setinterfaceconstant{sidemethod}{zijmethode}
@@ -970,6 +983,7 @@
\setinterfaceconstant{sign}{teken}
\setinterfaceconstant{size}{formaat}
\setinterfaceconstant{small}{klein}
+\setinterfaceconstant{solution}{oplossing}
\setinterfaceconstant{sort}{sort}
\setinterfaceconstant{sorttype}{sortering}
\setinterfaceconstant{source}{bron}
@@ -1037,6 +1051,7 @@
\setinterfaceconstant{toffset}{toffset}
\setinterfaceconstant{tolerance}{tolerantie}
\setinterfaceconstant{top}{boven}
+\setinterfaceconstant{topcommand}{bovencommando}
\setinterfaceconstant{topdistance}{bovenafstand}
\setinterfaceconstant{topframe}{bovenkader}
\setinterfaceconstant{topoffset}{bovenoffset}
@@ -1691,6 +1706,7 @@
\setinterfacecommand{switchtorawfont}{switchtorawfont}
\setinterfacecommand{sym}{sym}
\setinterfacecommand{symbol}{symbool}
+\setinterfacecommand{symoffset}{symoffset}
\setinterfacecommand{synchronizationbar}{synchronisatiebalk}
\setinterfacecommand{synchronize}{synchroniseer}
\setinterfacecommand{tab}{tab}
diff --git a/Master/texmf-dist/tex/context/base/mult-pe.mkii b/Master/texmf-dist/tex/context/base/mult-pe.mkii
index 6dcd2fef6f6..f55a7ab5936 100644
--- a/Master/texmf-dist/tex/context/base/mult-pe.mkii
+++ b/Master/texmf-dist/tex/context/base/mult-pe.mkii
@@ -120,6 +120,7 @@
\setinterfacevariable{chapter}{فصل}
\setinterfacevariable{character}{حرف}
\setinterfacevariable{characters}{حرفها}
+\setinterfacevariable{chemistry}{chemistry}
\setinterfacevariable{cite}{cite}
\setinterfacevariable{color}{رنگ}
\setinterfacevariable{column}{ستون}
@@ -157,6 +158,7 @@
\setinterfacevariable{enumeration}{شماره‌بندی}
\setinterfacevariable{environment}{محیط}
\setinterfacevariable{even}{زوج}
+\setinterfacevariable{export}{export}
\setinterfacevariable{external}{خارجی}
\setinterfacevariable{fact}{fact}
\setinterfacevariable{february}{فوریه}
@@ -258,6 +260,7 @@
\setinterfacevariable{leftpage}{صفحه‌چپ}
\setinterfacevariable{lefttoright}{lefttoright}
\setinterfacevariable{legend}{راهنما}
+\setinterfacevariable{less}{less}
\setinterfacevariable{lesshyphenation}{شکست‌کلمات‌کمتر}
\setinterfacevariable{line}{خط}
\setinterfacevariable{linenote}{خط‌نوشت}
@@ -298,6 +301,7 @@
\setinterfacevariable{monday}{دوشنبه}
\setinterfacevariable{mono}{مونو}
\setinterfacevariable{month}{ماه}
+\setinterfacevariable{more}{more}
\setinterfacevariable{morehyphenation}{شکست‌کلمات‌بیشتر}
\setinterfacevariable{name}{نام}
\setinterfacevariable{narrow}{نازک}
@@ -455,6 +459,7 @@
\setinterfacevariable{subforward}{زیرجلوگرد}
\setinterfacevariable{subject}{موضوع}
\setinterfacevariable{subpage}{زیرصفحه}
+\setinterfacevariable{subs}{subs}
\setinterfacevariable{subsection}{زیربخش}
\setinterfacevariable{subsubject}{زیرموضوع}
\setinterfacevariable{subsubsection}{زیرزیربخش}
@@ -592,6 +597,7 @@
\setinterfaceconstant{bottom}{پایین}
\setinterfaceconstant{bottomafter}{bottomafter}
\setinterfaceconstant{bottombefore}{bottombefore}
+\setinterfaceconstant{bottomcommand}{bottomcommand}
\setinterfaceconstant{bottomdistance}{فاصله‌پایین}
\setinterfaceconstant{bottomframe}{قالب‌پایین}
\setinterfaceconstant{bottomoffset}{آفست‌پایین}
@@ -684,6 +690,7 @@
\setinterfaceconstant{filtercommand}{filtercommand}
\setinterfaceconstant{finalnamesep}{finalnamesep}
\setinterfaceconstant{firstnamesep}{firstnamesep}
+\setinterfaceconstant{firstpage}{صفحه‌اول}
\setinterfaceconstant{focus}{تمرکز}
\setinterfaceconstant{focusin}{تمرکزدرون}
\setinterfaceconstant{focusout}{تمرکزبیرون}
@@ -750,6 +757,7 @@
\setinterfaceconstant{label}{برچسب}
\setinterfaceconstant{labeloffset}{labeloffset}
\setinterfaceconstant{lastnamesep}{lastnamesep}
+\setinterfaceconstant{lastpage}{صفحه‌آخر}
\setinterfaceconstant{lastpubsep}{lastpubsep}
\setinterfaceconstant{layout}{layout}
\setinterfaceconstant{left}{چپ}
@@ -770,6 +778,7 @@
\setinterfaceconstant{leftsubsentence}{زیرجمله‌چپ}
\setinterfaceconstant{lefttext}{متن‌چپ}
\setinterfaceconstant{leftwidth}{عرض‌خط}
+\setinterfaceconstant{less}{less}
\setinterfaceconstant{level}{مرحله}
\setinterfaceconstant{levels}{مرحله‌ها}
\setinterfaceconstant{limittext}{مرزمتن}
@@ -792,6 +801,7 @@
\setinterfaceconstant{marking}{نشانه‌گذاری}
\setinterfaceconstant{marstyle}{سبک‌حاش}
\setinterfaceconstant{mask}{mask}
+\setinterfaceconstant{mathstyle}{mathstyle}
\setinterfaceconstant{max}{بیشترین}
\setinterfaceconstant{maxdepth}{maxdepth}
\setinterfaceconstant{maxheight}{بیشترین‌ارتفاع}
@@ -800,6 +810,7 @@
\setinterfaceconstant{menu}{منو}
\setinterfaceconstant{method}{روش}
\setinterfaceconstant{middle}{میان}
+\setinterfaceconstant{middlecommand}{middlecommand}
\setinterfaceconstant{middlespeech}{سخنرانی‌میانی}
\setinterfaceconstant{middletext}{متن‌میانی}
\setinterfaceconstant{midsentence}{جمله‌میانی}
@@ -808,6 +819,7 @@
\setinterfaceconstant{minheight}{کمترین‌ارتفاع}
\setinterfaceconstant{minwidth}{کمترین‌عرض}
\setinterfaceconstant{monthconversion}{monthconversion}
+\setinterfaceconstant{more}{more}
\setinterfaceconstant{n}{n}
\setinterfaceconstant{name}{نام}
\setinterfaceconstant{namesep}{namesep}
@@ -962,6 +974,7 @@
\setinterfaceconstant{separator}{جداکننده}
\setinterfaceconstant{set}{قراربده}
\setinterfaceconstant{setups}{بارگذاریها}
+\setinterfaceconstant{shrink}{shrink}
\setinterfaceconstant{side}{کنار}
\setinterfaceconstant{sidealign}{تنظیم‌کنار}
\setinterfaceconstant{sidemethod}{روش‌کنار}
@@ -970,6 +983,7 @@
\setinterfaceconstant{sign}{علامت}
\setinterfaceconstant{size}{اندازه}
\setinterfaceconstant{small}{کوچک}
+\setinterfaceconstant{solution}{solution}
\setinterfaceconstant{sort}{sort}
\setinterfaceconstant{sorttype}{ترتیب‌تایپ}
\setinterfaceconstant{source}{منبع}
@@ -1037,6 +1051,7 @@
\setinterfaceconstant{toffset}{toffset}
\setinterfaceconstant{tolerance}{بردباری}
\setinterfaceconstant{top}{بالا}
+\setinterfaceconstant{topcommand}{topcommand}
\setinterfaceconstant{topdistance}{فاصله‌بالا}
\setinterfaceconstant{topframe}{قالب‌راست}
\setinterfaceconstant{topoffset}{آفست‌بالا}
@@ -1691,6 +1706,7 @@
\setinterfacecommand{switchtorawfont}{تغییربه‌قلم‌خام}
\setinterfacecommand{sym}{نم}
\setinterfacecommand{symbol}{نماد}
+\setinterfacecommand{symoffset}{symoffset}
\setinterfacecommand{synchronizationbar}{میله‌تطابق}
\setinterfacecommand{synchronize}{تطابق}
\setinterfacecommand{tab}{تب}
diff --git a/Master/texmf-dist/tex/context/base/mult-prm.lua b/Master/texmf-dist/tex/context/base/mult-prm.lua
index 1775fe0bc5c..e6fa4abccf3 100644
--- a/Master/texmf-dist/tex/context/base/mult-prm.lua
+++ b/Master/texmf-dist/tex/context/base/mult-prm.lua
@@ -1,3 +1,5 @@
+-- the tex table has overlap
+
return {
["aleph"]={
"AlephVersion",
@@ -76,13 +78,14 @@ return {
"widowpenalties",
},
["luatex"]={
+ "Uchar",
"Udelcode",
"Udelcodenum",
"Udelimiter",
"Udelimiterover",
"Udelimiterunder",
"Umathaccent",
- "Umathaccents",
+--"Umathaccents",
"Umathaxis",
"Umathbinbinspacing",
"Umathbinclosespacing",
@@ -92,7 +95,7 @@ return {
"Umathbinordspacing",
"Umathbinpunctspacing",
"Umathbinrelspacing",
- "Umathbotaccent",
+--"Umathbotaccent",
"Umathchar",
"Umathchardef",
"Umathcharnum",
@@ -410,7 +413,7 @@ return {
"Udelimiterover",
"Udelimiterunder",
"Umathaccent",
- "Umathaccents",
+--"Umathaccents",
"Umathaxis",
"Umathbinbinspacing",
"Umathbinclosespacing",
@@ -420,7 +423,7 @@ return {
"Umathbinordspacing",
"Umathbinpunctspacing",
"Umathbinrelspacing",
- "Umathbotaccent",
+--"Umathbotaccent",
"Umathchar",
"Umathchardef",
"Umathcharnum",
@@ -1024,6 +1027,7 @@ return {
"skewchar",
"skip",
"skipdef",
+--"skipexpr",
"spacefactor",
"spaceskip",
"span",
@@ -1111,4 +1115,4 @@ return {
["xetex"]={
"XeTeXversion",
},
-} \ No newline at end of file
+}
diff --git a/Master/texmf-dist/tex/context/base/mult-prm.mkiv b/Master/texmf-dist/tex/context/base/mult-prm.mkiv
index 7e5bc1edda7..e385341c2c6 100644
--- a/Master/texmf-dist/tex/context/base/mult-prm.mkiv
+++ b/Master/texmf-dist/tex/context/base/mult-prm.mkiv
@@ -1,3 +1,18 @@
+%D \module
+%D [ file=mult-prm,
+%D version=2011.09.18, % actually older
+%D title=\CONTEXT\ Multilingual Macros,
+%D subtitle=Primitives,
+%D author=Hans Hagen,
+%D date=\currentdate,
+%D copyright={PRAGMA ADE \& \CONTEXT\ Development Team}]
+%C
+%C This module is part of the \CONTEXT\ macro||package and is therefore
+%C copyrighted by \PRAGMA. See mreadme.pdf for details.
+
+%D This file is only a helper for generating files that can be used in an
+%D editor for syntax highlighting.
+
\startluacode
context.starttext()
@@ -22,8 +37,11 @@
luatex = {
},
aleph = {
+ "AlephVersion", "Alephminorversion", "Alephrevision", "Alephversion",
},
omega = {
+ "Omegaminorversion", "Omegarevision", "Omegaversion",
+ "omathcode", "odelcode", "omathchardef", "omathchar", "omathaccent", "odelimiter", "oradical",
},
xetex = {
"XeTeXversion",
diff --git a/Master/texmf-dist/tex/context/base/mult-ro.mkii b/Master/texmf-dist/tex/context/base/mult-ro.mkii
index 1eef26e0dd5..34dd385a37b 100644
--- a/Master/texmf-dist/tex/context/base/mult-ro.mkii
+++ b/Master/texmf-dist/tex/context/base/mult-ro.mkii
@@ -120,6 +120,7 @@
\setinterfacevariable{chapter}{capitol}
\setinterfacevariable{character}{caracter}
\setinterfacevariable{characters}{caractere}
+\setinterfacevariable{chemistry}{chemistry}
\setinterfacevariable{cite}{cite}
\setinterfacevariable{color}{culoare}
\setinterfacevariable{column}{coloana}
@@ -157,6 +158,7 @@
\setinterfacevariable{enumeration}{enumerare}
\setinterfacevariable{environment}{mediu}
\setinterfacevariable{even}{par}
+\setinterfacevariable{export}{export}
\setinterfacevariable{external}{extern}
\setinterfacevariable{fact}{fapt}
\setinterfacevariable{february}{februarie}
@@ -258,6 +260,7 @@
\setinterfacevariable{leftpage}{paginastanga}
\setinterfacevariable{lefttoright}{lefttoright}
\setinterfacevariable{legend}{legenda}
+\setinterfacevariable{less}{less}
\setinterfacevariable{lesshyphenation}{lesshyphenation}
\setinterfacevariable{line}{linie}
\setinterfacevariable{linenote}{linenote}
@@ -298,6 +301,7 @@
\setinterfacevariable{monday}{luni}
\setinterfacevariable{mono}{mono}
\setinterfacevariable{month}{luna}
+\setinterfacevariable{more}{more}
\setinterfacevariable{morehyphenation}{morehyphenation}
\setinterfacevariable{name}{nume}
\setinterfacevariable{narrow}{ingust}
@@ -455,6 +459,7 @@
\setinterfacevariable{subforward}{subavans}
\setinterfacevariable{subject}{subiect}
\setinterfacevariable{subpage}{subpagina}
+\setinterfacevariable{subs}{subs}
\setinterfacevariable{subsection}{subsectiune}
\setinterfacevariable{subsubject}{subsubiect}
\setinterfacevariable{subsubsection}{subsubsectiune}
@@ -592,6 +597,7 @@
\setinterfaceconstant{bottom}{jos}
\setinterfaceconstant{bottomafter}{bottomafter}
\setinterfaceconstant{bottombefore}{bottombefore}
+\setinterfaceconstant{bottomcommand}{bottomcommand}
\setinterfaceconstant{bottomdistance}{distantajos}
\setinterfaceconstant{bottomframe}{framejos}
\setinterfaceconstant{bottomoffset}{offsetjos}
@@ -684,6 +690,7 @@
\setinterfaceconstant{filtercommand}{filtercommand}
\setinterfaceconstant{finalnamesep}{finalnamesep}
\setinterfaceconstant{firstnamesep}{firstnamesep}
+\setinterfaceconstant{firstpage}{primapagina}
\setinterfaceconstant{focus}{focus}
\setinterfaceconstant{focusin}{focusin}
\setinterfaceconstant{focusout}{focusout}
@@ -750,6 +757,7 @@
\setinterfaceconstant{label}{eticheta}
\setinterfaceconstant{labeloffset}{labeloffset}
\setinterfaceconstant{lastnamesep}{lastnamesep}
+\setinterfaceconstant{lastpage}{ultimapagina}
\setinterfaceconstant{lastpubsep}{lastpubsep}
\setinterfaceconstant{layout}{layout}
\setinterfaceconstant{left}{stanga}
@@ -770,6 +778,7 @@
\setinterfaceconstant{leftsubsentence}{subpropozitiestanga}
\setinterfaceconstant{lefttext}{textstanga}
\setinterfaceconstant{leftwidth}{latimestanga}
+\setinterfaceconstant{less}{less}
\setinterfaceconstant{level}{nivel}
\setinterfaceconstant{levels}{nivele}
\setinterfaceconstant{limittext}{limittext}
@@ -792,6 +801,7 @@
\setinterfaceconstant{marking}{marcaje}
\setinterfaceconstant{marstyle}{stilmarcaj}
\setinterfaceconstant{mask}{mask}
+\setinterfaceconstant{mathstyle}{mathstyle}
\setinterfaceconstant{max}{max}
\setinterfaceconstant{maxdepth}{maxdepth}
\setinterfaceconstant{maxheight}{inaltimemaxima}
@@ -800,6 +810,7 @@
\setinterfaceconstant{menu}{meniu}
\setinterfaceconstant{method}{metoda}
\setinterfaceconstant{middle}{mijloc}
+\setinterfaceconstant{middlecommand}{middlecommand}
\setinterfaceconstant{middlespeech}{middlespeech}
\setinterfaceconstant{middletext}{textmijloc}
\setinterfaceconstant{midsentence}{midsentence}
@@ -808,6 +819,7 @@
\setinterfaceconstant{minheight}{inaltimeminima}
\setinterfaceconstant{minwidth}{latimeminima}
\setinterfaceconstant{monthconversion}{monthconversion}
+\setinterfaceconstant{more}{more}
\setinterfaceconstant{n}{n}
\setinterfaceconstant{name}{nume}
\setinterfaceconstant{namesep}{namesep}
@@ -962,6 +974,7 @@
\setinterfaceconstant{separator}{separator}
\setinterfaceconstant{set}{set}
\setinterfaceconstant{setups}{setups}
+\setinterfaceconstant{shrink}{shrink}
\setinterfaceconstant{side}{parte}
\setinterfaceconstant{sidealign}{sidealign}
\setinterfaceconstant{sidemethod}{sidemethod}
@@ -970,6 +983,7 @@
\setinterfaceconstant{sign}{semn}
\setinterfaceconstant{size}{dimensiune}
\setinterfaceconstant{small}{mic}
+\setinterfaceconstant{solution}{solution}
\setinterfaceconstant{sort}{sort}
\setinterfaceconstant{sorttype}{sorttype}
\setinterfaceconstant{source}{sursa}
@@ -1037,6 +1051,7 @@
\setinterfaceconstant{toffset}{toffset}
\setinterfaceconstant{tolerance}{toleranta}
\setinterfaceconstant{top}{sus}
+\setinterfaceconstant{topcommand}{topcommand}
\setinterfaceconstant{topdistance}{distantasus}
\setinterfaceconstant{topframe}{framesus}
\setinterfaceconstant{topoffset}{offsetsus}
@@ -1691,6 +1706,7 @@
\setinterfacecommand{switchtorawfont}{trecilafontraw}
\setinterfacecommand{sym}{sim}
\setinterfacecommand{symbol}{simbol}
+\setinterfacecommand{symoffset}{symoffset}
\setinterfacecommand{synchronizationbar}{barasincronizare}
\setinterfacecommand{synchronize}{sincronizeaza}
\setinterfacecommand{tab}{tab}
diff --git a/Master/texmf-dist/tex/context/base/mult-sys.mkiv b/Master/texmf-dist/tex/context/base/mult-sys.mkiv
index 9b67ab4bad5..135d8bcf7bd 100644
--- a/Master/texmf-dist/tex/context/base/mult-sys.mkiv
+++ b/Master/texmf-dist/tex/context/base/mult-sys.mkiv
@@ -11,20 +11,19 @@
%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
%C details.
-%D In boring module we define a lot of obscure but useful
-%D system constants. By doing so we save lots of memory while
-%D at the same time we prevent ourself from typing errors.
+%D In boring module we define a lot of obscure but useful system constants.
+%D By doing so we save lots of memory while at the same time we prevent
+%D ourself making typing errors.
\writestatus{loading}{ConTeXt Multilingual Macros / System}
\unprotect
-%D This file is mostly the same as the \MKII\ variant but we
-%D keep extending \MKIV, so it was bout time to have a dedicated
-%D variant.
+%D This file is mostly the same as the \MKII\ variant but we keep extending
+%D \MKIV, so it was bout time to have a dedicated variant.
%D
-%D The constants are grouped in such a way that there is a
-%D minimal change of conflicts.
+%D The constants are grouped in such a way that there is a minimal change of
+%D conflicts.
%D
%D \starttyping
%D \definesystemconstants {word}
@@ -36,8 +35,8 @@
\definesystemconstant {hans}
\definesystemconstant {taco}
-%D First we define some system constants used for both the
-%D multi||lingual interface and multi||linguag typesetting.
+%D First we define some system constants used for both the multi||lingual
+%D interface and multi||linguag typesetting.
\definesystemconstant {afrikaans} \definesystemconstant {af}
\definesystemconstant {arabic} \definesystemconstant {ar}
@@ -83,8 +82,7 @@
\definesystemconstant {japanese} \definesystemconstant {ja}
\definesystemconstant {korean} \definesystemconstant {kr}
-%D For proper \UNICODE\ support we need a few font related
-%D constants.
+%D For proper \UNICODE\ support we need a few font related constants.
\definesystemconstant {BoldItalic}
\definesystemconstant {BoldSlanted}
@@ -102,8 +100,8 @@
\definesystemconstant {Support}
\definesystemconstant {Type}
-\definesystemconstant {Math}
-\definesystemconstant {MathBold}
+\definesystemconstant {Math} % not used
+\definesystemconstant {MathBold} % not used
\definesystemconstant {MathRoman}
\definesystemconstant {MathRomanBold}
@@ -115,7 +113,7 @@
\definesystemconstant {SerifSlanted}
\definesystemconstant {SerifBoldSlanted}
\definesystemconstant {SerifCaps}
-% \definesystemconstant {SerifCapsSlanted}
+%definesystemconstant {SerifCapsSlanted}
\definesystemconstant {Sans}
\definesystemconstant {SansBold}
@@ -155,6 +153,9 @@
%definesystemconstant {mnem} % kind of generic short tag
+\definesystemconstant {otr}
+\definesystemconstant {box}
+
\definesystemconstant {file}
\definesystemconstant {name}
\definesystemconstant {spec}
@@ -173,6 +174,7 @@
\definesystemconstant {fraktur}
\definesystemconstant {blackboard}
+\definesystemconstant {mi} % maybe some day a special default vector
\definesystemconstant {tf}
\definesystemconstant {sl}
\definesystemconstant {it}
@@ -220,174 +222,179 @@
\definesystemconstant {second}
\definesystemconstant {third}
-%D Net come some \CONTEXT\ constants, used in the definition
-%D of private commands:
-
-\definesystemconstant {tex}
-\definesystemconstant {xml}
-\definesystemconstant {lua}
-
-\definesystemconstant {next}
-\definesystemconstant {pickup}
-\definesystemconstant {ascii}
-\definesystemconstant {default}
-\definesystemconstant {unknown}
-\definesystemconstant {action}
-\definesystemconstant {compare}
-\definesystemconstant {do}
-\definesystemconstant {dodo}
-\definesystemconstant {complex}
-\definesystemconstant {simple}
-\definesystemconstant {start}
-\definesystemconstant {stop}
-\definesystemconstant {dummy}
-\definesystemconstant {local}
-\definesystemconstant {global}
-\definesystemconstant {done}
-\definesystemconstant {font}
-\definesystemconstant {link}
-\definesystemconstant {parent}
-\definesystemconstant {child}
-\definesystemconstant {clone}
-\definesystemconstant {section}
-\definesystemconstant {handler}
-\definesystemconstant {counter}
-\definesystemconstant {single}
-\definesystemconstant {multi}
-\definesystemconstant {indeed}
-
-\definesystemconstant {hasnumber}
-\definesystemconstant {hastitle}
-\definesystemconstant {hascaption}
-\definesystemconstant {haslevel}
-
-\definesystemconstant {mkiv}
-\definesystemconstant {mkii}
-
-\definesystemconstant {normal}
-\definesystemconstant {bold}
-\definesystemconstant {italic}
-\definesystemconstant {slanted}
-
-\definesystemconstant {default}
-\definesystemconstant {smallcaps}
-
-\definesystemconstant {run}
-
-\definesystemconstant {mode}
-\definesystemconstant {setup}
-\definesystemconstant {environment}
-\definesystemconstant {document}
-
-%definesystemconstant {fam}
-\definesystemconstant {text}
-\definesystemconstant {script}
-\definesystemconstant {scriptscript}
-
-\definesystemconstant {lefthyphenmin}
-\definesystemconstant {righthyphenmin}
-\definesystemconstant {lefthyphenchar}
-\definesystemconstant {righthyphenchar}
-
-%definesystemconstant {skewchar}
-%definesystemconstant {hyphenchar}
-\definesystemconstant {catcodes}
-%definesystemconstant {encoding}
-%definesystemconstant {entities}
-%definesystemconstant {resource}
-%definesystemconstant {mapping}
-\definesystemconstant {language}
-\definesystemconstant {patterns}
-\definesystemconstant {rname}
-\definesystemconstant {rscale}
-%definesystemconstant {handling}
-\definesystemconstant {features}
-\definesystemconstant {direction}
-\definesystemconstant {fallbacks}
-\definesystemconstant {goodies}
-\definesystemconstant {designsize}
-%definesystemconstant {background}
-%definesystemconstant {ucmap}
-
-%definesystemconstant {property}
-%definesystemconstant {overprint}
-%definesystemconstant {layer}
-\definesystemconstant {effect} % todo s
-%definesystemconstant {negative}
-%definesystemconstant {color}
-%definesystemconstant {transparency}
-
-\definesystemconstant {black}
-\definesystemconstant {white}
-
-\definesystemconstant {system} % not yet interfaces messages
-\definesystemconstant {user}
-\definesystemconstant {check}
-\definesystemconstant {reset}
-\definesystemconstant {set}
-\definesystemconstant {empty}
-
-\definesystemconstant {realpage}
-\definesystemconstant {userpage}
-\definesystemconstant {subpage}
-
-\definesystemconstant {page}
-\definesystemconstant {subpage}
-\definesystemconstant {leftpage}
-\definesystemconstant {rightpage}
-\definesystemconstant {somewhere}
-
-\definesystemconstant {userdata}
-\definesystemconstant {command}
-\definesystemconstant {simple}
-
-\definesystemconstant {full}
-\definesystemconstant {text}
-\definesystemconstant {paragraph}
-\definesystemconstant {margintext}
-\definesystemconstant {line}
-
-\definesystemconstant {number}
-\definesystemconstant {symbol}
-\definesystemconstant {format}
-\definesystemconstant {data}
-\definesystemconstant {float}
-\definesystemconstant {extensions}
-\definesystemconstant {initializations}
-
-\definesystemconstant {rgb}
-\definesystemconstant {cmyk}
-\definesystemconstant {gray}
-\definesystemconstant {spot}
-\definesystemconstant {all}
-\definesystemconstant {dtp}
-\definesystemconstant {none}
-
-\definesystemconstant {map}
-\definesystemconstant {special}
-\definesystemconstant {size}
-
-\definesystemconstant {depth}
-\definesystemconstant {nodepth}
+%D Next come some \CONTEXT\ constants, used in the definition of private commands:
+
+\definesystemconstant {tex}
+\definesystemconstant {xml}
+\definesystemconstant {lua}
+
+\definesystemconstant {next}
+\definesystemconstant {pickup}
+\definesystemconstant {ascii}
+\definesystemconstant {default}
+\definesystemconstant {unknown}
+\definesystemconstant {action}
+\definesystemconstant {compare}
+\definesystemconstant {do}
+\definesystemconstant {dodo}
+\definesystemconstant {complex}
+\definesystemconstant {simple}
+\definesystemconstant {start}
+\definesystemconstant {stop}
+\definesystemconstant {dummy}
+\definesystemconstant {local}
+\definesystemconstant {global}
+\definesystemconstant {done}
+\definesystemconstant {font}
+\definesystemconstant {link}
+\definesystemconstant {parent}
+\definesystemconstant {child}
+\definesystemconstant {clone}
+\definesystemconstant {section}
+\definesystemconstant {handler}
+\definesystemconstant {counter}
+\definesystemconstant {single}
+\definesystemconstant {multi}
+\definesystemconstant {indeed}
+
+% \def\s!parent{->} % 1% faster / => does not work in assignments
+% \def\s!child {<-} % 1% faster / <= does not work in assignments
+
+\definesystemconstant {hasnumber}
+\definesystemconstant {hastitle}
+\definesystemconstant {hascaption}
+\definesystemconstant {haslevel}
+
+\definesystemconstant {mkiv}
+\definesystemconstant {mkii}
+
+\definesystemconstant {normal}
+\definesystemconstant {bold}
+\definesystemconstant {italic}
+\definesystemconstant {slanted}
+
+\definesystemconstant {default}
+\definesystemconstant {smallcaps}
+
+\definesystemconstant {run}
+
+\definesystemconstant {mode}
+\definesystemconstant {setup}
+\definesystemconstant {environment}
+\definesystemconstant {document}
+
+%definesystemconstant {fam}
+\definesystemconstant {display}
+\definesystemconstant {text}
+\definesystemconstant {script}
+\definesystemconstant {scriptscript}
+\definesystemconstant {uncramped}
+\definesystemconstant {cramped}
+
+\definesystemconstant {lefthyphenmin}
+\definesystemconstant {righthyphenmin}
+\definesystemconstant {lefthyphenchar}
+\definesystemconstant {righthyphenchar}
+
+%definesystemconstant {skewchar}
+%definesystemconstant {hyphenchar}
+\definesystemconstant {catcodes}
+%definesystemconstant {encoding}
+%definesystemconstant {entities}
+%definesystemconstant {resource}
+\definesystemconstant {mapping}
+\definesystemconstant {language}
+\definesystemconstant {patterns}
+\definesystemconstant {rname}
+\definesystemconstant {rscale}
+%definesystemconstant {handling}
+\definesystemconstant {features}
+\definesystemconstant {direction}
+\definesystemconstant {fallbacks}
+\definesystemconstant {goodies}
+\definesystemconstant {designsize}
+%definesystemconstant {background}
+%definesystemconstant {ucmap}
+
+%definesystemconstant {property}
+%definesystemconstant {overprint}
+%definesystemconstant {layer}
+\definesystemconstant {effect} % todo s
+%definesystemconstant {negative}
+%definesystemconstant {color}
+%definesystemconstant {transparency}
+
+\definesystemconstant {black}
+\definesystemconstant {white}
+
+\definesystemconstant {system} % not yet interfaces messages
+\definesystemconstant {user}
+\definesystemconstant {check}
+\definesystemconstant {reset}
+\definesystemconstant {set}
+\definesystemconstant {empty}
+
+\definesystemconstant {realpage}
+\definesystemconstant {userpage}
+\definesystemconstant {subpage}
+
+\definesystemconstant {page}
+\definesystemconstant {subpage}
+\definesystemconstant {leftpage}
+\definesystemconstant {rightpage}
+\definesystemconstant {somewhere}
+
+\definesystemconstant {userdata}
+\definesystemconstant {command}
+\definesystemconstant {simple}
+
+\definesystemconstant {full}
+\definesystemconstant {text}
+\definesystemconstant {paragraph}
+\definesystemconstant {margintext}
+\definesystemconstant {line}
+
+\definesystemconstant {number}
+\definesystemconstant {symbol}
+\definesystemconstant {format}
+\definesystemconstant {data}
+\definesystemconstant {float}
+\definesystemconstant {extensions}
+\definesystemconstant {initializations}
+
+\definesystemconstant {rgb}
+\definesystemconstant {cmyk}
+\definesystemconstant {gray}
+\definesystemconstant {spot}
+\definesystemconstant {all}
+\definesystemconstant {dtp}
+\definesystemconstant {none}
+
+\definesystemconstant {map}
+\definesystemconstant {special}
+\definesystemconstant {size}
+
+\definesystemconstant {depth}
+\definesystemconstant {nodepth}
%D Just to be complete we define the standard \TEX\ units.
-\definesystemconstant {cm}
-\definesystemconstant {em}
-\definesystemconstant {ex}
-\definesystemconstant {mm}
-\definesystemconstant {pt}
-\definesystemconstant {sp}
-\definesystemconstant {bp}
-\definesystemconstant {in}
-\definesystemconstant {pc}
-\definesystemconstant {dd}
-\definesystemconstant {cc}
-\definesystemconstant {nd}
-\definesystemconstant {nc}
-
-%D As the name of their define command states, the next set of
-%D constants is used in the message macro's.
+\definesystemconstant {cm}
+\definesystemconstant {em}
+\definesystemconstant {ex}
+\definesystemconstant {mm}
+\definesystemconstant {pt}
+\definesystemconstant {sp}
+\definesystemconstant {bp}
+\definesystemconstant {in}
+\definesystemconstant {pc}
+\definesystemconstant {dd}
+\definesystemconstant {cc}
+\definesystemconstant {nd}
+\definesystemconstant {nc}
+
+%D As the name of their define command states, the next set of constants is used in
+%D the message macro's.
\definemessageconstant {check}
\definemessageconstant {colors}
@@ -415,9 +422,8 @@
\definemessageconstant {chemicals}
\definemessageconstant {publications}
-%D When we use numbers and dimensions the same applies as
-%D with the keywords like \type{width} and \type{plus}
-%D mentioned earlier.
+%D When we use numbers and dimensions the same applies as with the keywords like
+%D \type {width} and \type {plus} mentioned earlier.
\def\!!ten {10}
\def\!!twelve {12}
@@ -447,138 +453,39 @@
\def\__unknown__ {\string\\//} % unlikely value
-%D Variables are composed of a command specific tag and a user
-%D supplied variable (system constant). The first tag \type{ag}
-%D for instance is available as \type{\??ag} and expands to
-%D \type{@@ag} in composed variables.
-
-\definesystemvariable {ab} % AlignedBoxes
-\definesystemvariable {ac} % ACcent
-\definesystemvariable {ae} % AttributEs
-\definesystemvariable {al} % ALinea's
-\definesystemvariable {an} % ANchor
-\definesystemvariable {as} % AlignmentSwitch
-\definesystemvariable {bg} % BleedinG
-\definesystemvariable {bm} % BookMark
-\definesystemvariable {bp} % BreakPoint
-\definesystemvariable {bx} % BackendExport
-\definesystemvariable {cb} % CollectBox
-\definesystemvariable {cp} % CliP
-\definesystemvariable {da} % DAte
-\definesystemvariable {db} % Labels
-\definesystemvariable {dd} % DoorDefinieren
-\definesystemvariable {de} % DEel
-\definesystemvariable {dl} % DunneLijnen
-\definesystemvariable {dn} % DoorNummeren
-\definesystemvariable {dm} % DefineMeasure
-\definesystemvariable {du} % DUmmy
-\definesystemvariable {ef} % ExternFiguur
-\definesystemvariable {en} % ENvironments
-%definesystemvariable {er} % external resources
-\definesystemvariable {et} % EffecT
-\definesystemvariable {ex} % ExterneFiguren
-\definesystemvariable {fc} % FramedContent
-\definesystemvariable {fi} % FIle Once
-\definesystemvariable {fo} % xml FO (xtag)
-\definesystemvariable {fu} % FontSolution
+%D Variables are composed of a command specific tag and a user supplied variable
+%D (system constant). The tag \type {du} for instance is available as \type {\??du}
+%D and expands to \type {@@du} in composed variables.
+
+\definesystemvariable {du} % dummy, will stay
+
+% bibl:
+
+\definesystemvariable {pv} % PublicationVariable
+\definesystemvariable {pb} % PuBlication
+
+% needs checking (namespaces now)
+
\definesystemvariable {fw} % simpleFonts by Wolfgang
-\definesystemvariable {fx} % FoXet
-\definesystemvariable {gb} % Graphic Bitmaps
-\definesystemvariable {gv} % Graphic Variable
-\definesystemvariable {ha} % HAng
-\definesystemvariable {id} % Index
-\definesystemvariable {ih} % InHoudsopgave
-\definesystemvariable {il} % stelInvulRegelsin
-\definesystemvariable {ip} % InsertPages
-\definesystemvariable {is} % Items
-\definesystemvariable {it} % stelInTerliniein
-\definesystemvariable {iv} % stelInvulLijnenin
-\definesystemvariable {ka} % KAntlijn
-\definesystemvariable {kl} % KoLommen
-\definesystemvariable {km} % KenMerk
-\definesystemvariable {kp} % KopPelteken
-\definesystemvariable {ks} % KolomSpan
+
+% old pragma
+
\definesystemvariable {kt} % KonTakten
\definesystemvariable {kw} % KontaktWaarde
-\definesystemvariable {le} % LinetablE
-\definesystemvariable {lf} % LocalFigures
-\definesystemvariable {lg} % taal (LanGuage)
-\definesystemvariable {lk} % LinK
-\definesystemvariable {ll} % Layers
-\definesystemvariable {lr} % LayeR
-\definesystemvariable {lu} % LUacode
-\definesystemvariable {lx} % LayerteXt
-\definesystemvariable {ma} % MargeAchtergrond
-\definesystemvariable {mc} % MultiColumn
-\definesystemvariable {mi} % MultilingualInterface
-\definesystemvariable {ml} % MultilingualLabel
-\definesystemvariable {mm} % MultilingualMath
-\definesystemvariable {mt} % inline MaTh
-\definesystemvariable {mt} % multi column (!!! double usage)
-\definesystemvariable {mo} % Math Options
-\definesystemvariable {mp} % MetaPost
-\definesystemvariable {nn} % structurenumbering
-\definesystemvariable {nm} % Nummering
-\definesystemvariable {np} % NaastPlaatsen
-\definesystemvariable {nr} % Nummeren
-\definesystemvariable {ob} % OBjects
-\definesystemvariable {oi} % OmlijndInstellingen
-\definesystemvariable {ol} % OmLijnd
-\definesystemvariable {od} % Omlijnd Defaults (simple)
-\definesystemvariable {ox} % OffsetBox
-\definesystemvariable {pb} % PuBlication
-\definesystemvariable {pc} % PageComment
-\definesystemvariable {ph} % ParagrapH
-\definesystemvariable {pn} % PaginaNummer
-\definesystemvariable {pr} % PRogrammas
-\definesystemvariable {ps} % PoSitioneren
-\definesystemvariable {px} % Parallel
-\definesystemvariable {py} % PropertYs
-\definesystemvariable {pv} % PublicationVariable
-\definesystemvariable {ql} % catcode table let % already defined
-\definesystemvariable {qd} % catcode table def % already defined
-\definesystemvariable {qu} % catcode table ued % already defined
-\definesystemvariable {qm} % catcode table meaning % already defined
-\definesystemvariable {rf} % ReFerencing
-\definesystemvariable {rn} % RegelNummer
-\definesystemvariable {rs} % RaSters
-\definesystemvariable {rt} % RoosTers
-\definesystemvariable {rw} % RenderingWindow
-\definesystemvariable {sb} % SectieBlok
-\definesystemvariable {sd} % SounD
-\definesystemvariable {se} % SEctie
-\definesystemvariable {sh} % ShapeText
-\definesystemvariable {si} % SplIt
-\definesystemvariable {sp} % SelecteerPapier
\definesystemvariable {st} % STickers
-\definesystemvariable {sx} % Selector
-\definesystemvariable {ta} % TAb
-\definesystemvariable {tb} % TekstBlokken
-\definesystemvariable {te} % TEmplate
-\definesystemvariable {ti} % TabelInstellingen
-\definesystemvariable {tl} % TekstLijnen
-\definesystemvariable {tt} % TabulaTe
-\definesystemvariable {tx} % TeXtflow
-\definesystemvariable {ur} % URl
-\definesystemvariable {vn} % VoetNoten
-\definesystemvariable {xf} % XML File (xtag)
-\definesystemvariable {xp} % XML Processing (xtag, so still needed)
-\definesystemvariable {za} % layout adapt % ZetspiegelAanpassing
-\definesystemvariable {zc} % columns
-
-% still used but defined locally
+\definesystemvariable {km} % KenMerk
-\definesystemvariable {cs} % CharacterSpacing
-\definesystemvariable {ts} % TypeScript
-\definesystemvariable {kk} % Kapitalen
+% mkii
-% obsolete but kept for a while (core-obs)
+\definesystemvariable {xf} % XML File (xtag)
+\definesystemvariable {xp} % XML Processing (xtag, so still needed)
+\definesystemvariable {fo} % xml FO (xtag)
+\definesystemvariable {fx} % FoXet
-\definesystemvariable {fr} % Division
+% mkiv
-%D Next we define some language independant one letter
-%D variables and keywords. We can actually make these
-%D system variables.
+%D Next we define some language independant one letter variables and keywords. We can
+%D actually make these system variables.
\defineinterfaceconstant {x} {x}
\defineinterfaceconstant {y} {y}
@@ -628,27 +535,25 @@
\def\v!oddeven#1{\ifodd#1\v!odd\else\v!even\fi}
-%D The names of files and their extensions are fixed.
-%D \CONTEXT\ uses as less files as possible. Utility files can
-%D be recognized by the first two characters of the extension:
-%D \type{tu}.
+%D The names of files and their extensions are fixed. \CONTEXT\ uses as less
+%D files as possible. Utility files can be recognized by the first two
+%D characters of the extension: \type {tu}.
%definefileconstant {utilityfilename} {texutil}
%definefileconstant {blockextension} {tub}
%definefileconstant {figureextension} {tuf}
%definefileconstant {inputextension} {tui}
%definefileconstant {outputextension} {tuo} % tup for previous run
-\definefileconstant {optionextension} {top}
-\definefileconstant {temporaryextension} {tmp}
+%definefileconstant {optionextension} {top}
+%definefileconstant {temporaryextension} {tmp}
%definefileconstant {patternsextension} {pat}
%definefileconstant {hyphensextension} {hyp}
%definefileconstant {fontmapextension} {map}
\definefileconstant {bibextension} {bbl}
-%D These files are loaded at start||up. They may contain system
-%D specific setups (or calls to other files), old macro's, to
-%D garantee compatibility and new macro's noy yet present in
-%D the format.
+%D These files are loaded at start||up. They may contain system specific setups (or
+%D calls to other files), old macro's, to garantee compatibility and new macro's noy
+%D yet present in the format.
\definefileconstant {errfilename} {cont-err}
\definefileconstant {sysfilename} {cont-sys}
@@ -656,9 +561,8 @@
\definefileconstant {locfilename} {cont-loc}
\definefileconstant {expfilename} {cont-exp}
-%D The setup files for the language, font, color and special
-%D subsystems have a common prefix. This means that we have at
-%D most three characters for unique filenames.
+%D The setup files for the language, font, color and special subsystems have a common
+%D prefix. This means that we have at most three characters for unique filenames.
\definefileconstant {colorprefix} {colo-}
%definefileconstant {encodingprefix} {enco-}
@@ -717,20 +621,32 @@
%defineinterfaceconstant {tmp} {tmp}
%defineinterfaceconstant {cld} {cld}
-%D A careful reader will have noticed that in the module
-%D \type{mult-ini} we defined \type{\selectinterface}. We were
-%D not yet able to actually select an interface, because we
-%D still had to define the constants and variables. Now we've
-%D done so, selection is permitted.
+%D A careful reader will have noticed that in the module \type {mult-ini} we defined
+%D \type {\selectinterface}. We were not yet able to actually select an interface,
+%D because we still had to define the constants and variables. Now we've done so,
+%D selection is permitted.
\selectinterface
-%D Ok, here are some more, because we've got ouselves some
-%D extensions to \CONTEXT.
+%D Ok, here are some more, because we've got ouselves some extensions to \CONTEXT.
\definemessageconstant {addresses}
\definemessageconstant {documents}
-\protect
+%D Nicer than being undefined:
+
+\let\p_align \empty
+\let\p_aligntitle\empty
+\let\p_continue \empty
+\let\p_footer \empty
+\let\p_label \empty
+\let\p_number \empty
+\let\p_page \empty
+\let\p_state \empty
+\let\p_strut \empty
+\let\p_text \empty
+\let\p_tolerance \empty
+
+% more will follow
-\endinput
+\protect \endinput
diff --git a/Master/texmf-dist/tex/context/base/node-acc.lua b/Master/texmf-dist/tex/context/base/node-acc.lua
index d91bb921b69..4380ec3a451 100644
--- a/Master/texmf-dist/tex/context/base/node-acc.lua
+++ b/Master/texmf-dist/tex/context/base/node-acc.lua
@@ -13,8 +13,6 @@ local tasks = nodes.tasks
local traverse_nodes = node.traverse
local traverse_id = node.traverse_id
-local has_attribute = node.has_attribute
-local set_attribute = node.set_attribute
local copy_node = node.copy
local free_nodelist = node.flush_list
@@ -46,17 +44,17 @@ local function injectspaces(head)
g.components = nil
g.subtype = 256
end
- local a = has_attribute(n,a_characters)
+ local a = n[a_characters]
local s = copy_node(n.spec)
g.char, n.spec = 32, s
p.next, g.prev = g, p
g.next, n.prev = n, g
s.width = s.width - g.width
if a then
- set_attribute(g,a_characters,a)
+ g[a_characters] = a
end
- set_attribute(s,a_characters,0)
- set_attribute(n,a_characters,0)
+ s[a_characters] = 0
+ n[a_characters] = 0
end
--~ end
elseif id == hlist_code or id == vlist_code then
@@ -110,7 +108,7 @@ nodes.handlers.accessibility = injectspaces
--~ hyphenated[str] = hsh
--~ codes[hsh] = str
--~ end
---~ set_attribute(n,a_hyphenated,hsh)
+--~ n[a_hyphenated] = hsh
--~ end
--~ elseif id == hlist_code or id == vlist_code then
--~ injectspans(n.list)
@@ -127,7 +125,7 @@ nodes.handlers.accessibility = injectspaces
--~ for n in traverse_nodes(head) do
--~ local id = n.id
--~ if id == disc then
---~ local a = has_attribute(n,a_hyphenated)
+--~ local a = n[a_hyphenated]
--~ if a then
--~ local str = codes[a]
--~ local b = new_pdfliteral(format("/Span << /ActualText %s >> BDC", lpdf.tosixteen(str)))
diff --git a/Master/texmf-dist/tex/context/base/node-aux.lua b/Master/texmf-dist/tex/context/base/node-aux.lua
index 43624adfd3a..e3fc7ad6fe8 100644
--- a/Master/texmf-dist/tex/context/base/node-aux.lua
+++ b/Master/texmf-dist/tex/context/base/node-aux.lua
@@ -12,13 +12,15 @@ local type, tostring = type, tostring
local nodes, node = nodes, node
-local utfvalues = string.utfvalues
+local utfvalues = utf.values
local nodecodes = nodes.nodecodes
local glyph_code = nodecodes.glyph
local hlist_code = nodecodes.hlist
local vlist_code = nodecodes.vlist
+local attributelist_code = nodecodes.attributelist -- temporary
+local math_code = nodecodes.math
local nodepool = nodes.pool
@@ -29,20 +31,22 @@ local traverse_nodes = node.traverse
local traverse_id = node.traverse_id
local free_node = node.free
local hpack_nodes = node.hpack
-local has_attribute = node.has_attribute
-local set_attribute = node.set_attribute
-local get_attribute = node.get_attribute
local unset_attribute = node.unset_attribute
local first_glyph = node.first_glyph or node.first_character
local copy_node = node.copy
+local copy_node_list = node.copy_list
local slide_nodes = node.slide
local insert_node_after = node.insert_after
local isnode = node.is_node
-local current_font = font.current()
+local unsetvalue = attributes.unsetvalue
+
+local current_font = font.current
local texbox = tex.box
+local report_error = logs.reporter("node-aux:error")
+
function nodes.repackhlist(list,...)
--~ nodes.showsimplelist(list)
local temp, b = hpack_nodes(list,...)
@@ -54,7 +58,7 @@ end
local function set_attributes(head,attr,value)
for n in traverse_nodes(head) do
- set_attribute(n,attr,value)
+ n[attr] = value
local id = n.id
if id == hlist_node or id == vlist_node then
set_attributes(n.list,attr,value)
@@ -64,8 +68,8 @@ end
local function set_unset_attributes(head,attr,value)
for n in traverse_nodes(head) do
- if not has_attribute(n,attr) then
- set_attribute(n,attr,value)
+ if not n[attr] then
+ n[attr] = value
end
local id = n.id
if id == hlist_code or id == vlist_code then
@@ -76,7 +80,7 @@ end
local function unset_attributes(head,attr)
for n in traverse_nodes(head) do
- unset_attribute(n,attr)
+ n[attr] = unsetvalue
local id = n.id
if id == hlist_code or id == vlist_code then
unset_attributes(n.list,attr)
@@ -84,16 +88,16 @@ local function unset_attributes(head,attr)
end
end
+nodes.setattribute = node.set_attribute
+nodes.getattribute = node.has_attribute
+nodes.unsetattribute = node.unset_attribute
+nodes.has_attribute = node.has_attribute
+
nodes.firstglyph = first_glyph
-nodes.setattribute = set_attribute
-nodes.getattribute = has_attribute
-nodes.unsetattribute = unset_attribute
nodes.setattributes = set_attributes
nodes.setunsetattributes = set_unset_attributes
nodes.unsetattributes = unset_attributes
-nodes.has_attribute = has_attribute
-
-- function nodes.is_skipable(a,id) -- skipable nodes at the margins during character protrusion
-- return (
-- id ~= glyph_node
@@ -200,23 +204,33 @@ function nodes.firstcharinbox(n)
return 0
end
---~ local function firstline(n)
---~ while n do
---~ local id = n.id
---~ if id == hlist_code then
---~ if n.subtype == line_code then
---~ return n
---~ else
---~ return firstline(n.list)
---~ end
---~ elseif id == vlist_code then
---~ return firstline(n.list)
---~ end
---~ n = n.next
---~ end
---~ end
-
---~ nodes.firstline = firstline
+if not node.end_of_math then
+ function node.end_of_math(n)
+ for n in traverse_id(math_code,n.next) do
+ return n
+ end
+ end
+end
+
+nodes.endofmath = node.end_of_math
+
+-- local function firstline(n)
+-- while n do
+-- local id = n.id
+-- if id == hlist_code then
+-- if n.subtype == line_code then
+-- return n
+-- else
+-- return firstline(n.list)
+-- end
+-- elseif id == vlist_code then
+-- return firstline(n.list)
+-- end
+-- n = n.next
+-- end
+-- end
+
+-- nodes.firstline = firstline
-- this depends on fonts, so we have a funny dependency ... will be
-- sorted out .. we could make tonodes a plugin into this
@@ -264,7 +278,7 @@ end
nodes.tonodes = tonodes
-local function link(head,tail,list,currentfont,currentattr)
+local function link(list,currentfont,currentattr,head,tail)
for i=1,#list do
local n = list[i]
if n then
@@ -272,6 +286,9 @@ local function link(head,tail,list,currentfont,currentattr)
if not tn then
local tn = type(n)
if tn == "number" then
+ if not currentfont then
+ currentfont = current_font()
+ end
local h, t = tonodes(tostring(n),currentfont,currentattr)
if not h then
-- skip
@@ -282,7 +299,10 @@ local function link(head,tail,list,currentfont,currentattr)
end
elseif tn == "string" then
if #tn > 0 then
- local h, t = tonodes(n,font.current(),currentattr)
+ if not currentfont then
+ currentfont = current_font()
+ end
+ local h, t = tonodes(n,currentfont,currentattr)
if not h then
-- skip
elseif not head then
@@ -293,7 +313,10 @@ local function link(head,tail,list,currentfont,currentattr)
end
elseif tn == "table" then
if #tn > 0 then
- head, tail = link(head,tail,n,currentfont,currentattr)
+ if not currentfont then
+ currentfont = current_font()
+ end
+ head, tail = link(n,currentfont,currentattr,head,tail)
end
end
elseif not head then
@@ -303,6 +326,14 @@ local function link(head,tail,list,currentfont,currentattr)
else
tail = n
end
+ elseif n.id == attributelist_code then
+ -- weird case
+ report_error("weird node type in list at index %s:",i)
+ for i=1,#list do
+ local l = list[i]
+ report_error("%3i: %s %S",i,l.id == attributelist_code and "!" or ">",l)
+ end
+ os.exit()
else
tail.next = n
n.prev = tail
@@ -319,10 +350,7 @@ local function link(head,tail,list,currentfont,currentattr)
return head, tail
end
-function nodes.link(...)
- local currentfont = font.current
- return link(nil,nil,{...},currentfont,currentattr)
-end
+nodes.link = link
local function locate(start,wantedid,wantedsubtype)
for n in traverse_nodes(start) do
@@ -342,7 +370,7 @@ end
nodes.locate = locate
-function nodes.concat(list) -- no slide !
+function nodes.concat(list)
local head, tail
for i=1,#list do
local li = list[i]
@@ -351,10 +379,10 @@ function nodes.concat(list) -- no slide !
elseif head then
tail.next = li
li.prev = tail
- tail = li
+ tail = li.next and slide_nodes(li) or li
else
head = li
- tail = li
+ tail = li.next and slide_nodes(li) or li
end
end
return head, tail
diff --git a/Master/texmf-dist/tex/context/base/node-bck.lua b/Master/texmf-dist/tex/context/base/node-bck.lua
index a0044eb7898..feaa2c6849d 100644
--- a/Master/texmf-dist/tex/context/base/node-bck.lua
+++ b/Master/texmf-dist/tex/context/base/node-bck.lua
@@ -11,116 +11,151 @@ if not modules then modules = { } end modules ['node-bck'] = {
local attributes, nodes, node = attributes, nodes, node
-local nodecodes = nodes.nodecodes
+local nodecodes = nodes.nodecodes
+local listcodes = nodes.listcodes
-local hlist_code = nodecodes.hlist
-local vlist_code = nodecodes.vlist
+local hlist_code = nodecodes.hlist
+local vlist_code = nodecodes.vlist
+local glyph_code = nodecodes.glyph
+local cell_code = listcodes.cell
-local has_attribute = node.has_attribute
-local set_attribute = node.set_attribute
-local traverse = node.traverse
+local traverse = node.traverse
+local traverse_id = node.traverse_id
-local nodepool = nodes.pool
-local tasks = nodes.tasks
+local nodepool = nodes.pool
+local tasks = nodes.tasks
-local new_rule = nodepool.rule
-local new_glue = nodepool.glue
+local new_rule = nodepool.rule
+local new_glue = nodepool.glue
-local a_color = attributes.private('color')
-local a_transparency = attributes.private('transparency')
-local a_colorspace = attributes.private('colormodel')
-local a_background = attributes.private('background')
+local a_color = attributes.private('color')
+local a_transparency = attributes.private('transparency')
+local a_colorspace = attributes.private('colormodel')
+local a_background = attributes.private('background')
+local a_alignbackground = attributes.private('alignbackground')
-local function add_backgrounds(head)
- local id = head.id
- if id == vlist_code or id == hlist_code then
- local current = head.list
- while current do
- local id = current.id
- if id == hlist_code then -- and current.list
- local background = has_attribute(current,a_background)
+local function add_backgrounds(head) -- rather old code .. to be redone
+ local current = head
+ while current do
+ local id = current.id
+ if id == hlist_code or id == vlist_code then
+ local list = current.list
+ if list then
+ local head = add_backgrounds(list)
+ if head then
+ current.list = head
+ list = head
+ end
+ end
+ local width = current.width
+ if width > 0 then
+ local background = current[a_background]
if background then
-- direct to hbox
-- colorspace is already set so we can omit that and stick to color
- local mode = has_attribute(current,a_colorspace)
+ local mode = current[a_colorspace]
if mode then
- local glue = new_glue(-current.width)
- local rule = new_rule(current.width,current.height,current.depth)
- local color = has_attribute(current,a_color)
- local transparency = has_attribute(current,a_transparency)
- set_attribute(rule,a_colorspace, mode)
+ local height = current.height
+ local depth = current.depth
+ local skip = id == hlist_code and width or (height + depth)
+ local glue = new_glue(-skip)
+ local rule = new_rule(width,height,depth)
+ local color = current[a_color]
+ local transparency = current[a_transparency]
+ rule[a_colorspace] = mode
if color then
- set_attribute(rule,a_color, color)
+ rule[a_color] = color
end
if transparency then
- set_attribute(rule,a_transparency,transparency)
+ rule[a_transparency] = transparency
end
rule.next = glue
- glue.next = current.list
- current.list = rule
- end
- else
- -- temporary hack for aligments
- local list, background, found = current.list, nil, nil
- for l in traverse(list) do
- background = has_attribute(l,a_background)
- if background then
- found = l
- break
+ glue.prev = rule
+ if list then
+ glue.next = list
+ list.prev = glue
end
+ current.list = rule
end
+ end
+ end
+ end
+ current = current.next
+ end
+ return head, true
+end
+
+local function add_alignbackgrounds(head)
+ local current = head
+ while current do
+ local id = current.id
+ if id == hlist_code then
+ local list = current.list
+ if not list then
+ -- no need to look
+ elseif current.subtype == cell_code then
+ local background = nil
+ local found = nil
+ -- for l in traverse(list) do
+ -- background = l[a_alignbackground]
+ -- if background then
+ -- found = l
+ -- break
+ -- end
+ -- end
+ -- we know that it's a fake hlist (could be user node)
+ -- but we cannot store tables in user nodes yet
+ for l in traverse_id(hpack_code,list) do
+ background = l[a_alignbackground]
if background then
- local mode = has_attribute(found,a_colorspace)
+ found = l
+ end
+ break
+ end
+ --
+ if background then
+ -- current has subtype 5 (cell)
+ local width = current.width
+ if width > 0 then
+ local mode = found[a_colorspace]
if mode then
- local glue = new_glue(-current.width)
- local rule = new_rule(current.width,current.height,current.depth)
- local color = has_attribute(found,a_color)
- local transparency = has_attribute(found,a_transparency)
- set_attribute(rule,a_colorspace, mode)
+ local glue = new_glue(-width)
+ local rule = new_rule(width,current.height,current.depth)
+ local color = found[a_color]
+ local transparency = found[a_transparency]
+ rule[a_colorspace] = mode
if color then
- set_attribute(rule,a_color, color)
+ rule[a_color] = color
end
if transparency then
- set_attribute(rule,a_transparency,transparency)
+ rule[a_transparency] = transparency
end
rule.next = glue
- glue.next = list
+ glue.prev = rule
+ if list then
+ glue.next = list
+ list.prev = glue
+ end
current.list = rule
end
- else
- add_backgrounds(current)
- end
- end
- elseif id == vlist_code then -- and current.list
- -- direct to vbox
- local background = has_attribute(current,a_background)
- if background then
- local mode = has_attribute(current,a_colorspace)
- if mode then
- local glue = new_glue(-current.height-current.depth)
- local rule = new_rule(current.width,current.height,current.depth)
- local color = has_attribute(current,a_color)
- local transparency = has_attribute(current,a_transparency)
- set_attribute(rule,a_colorspace, mode)
- if color then
- set_attribute(rule,a_color, color)
- end
- if transparency then
- set_attribute(rule,a_transparency,transparency)
- end
- rule.next = glue
- glue.next = current.list
- current.list = rule
end
end
- add_backgrounds(current)
+ else
+ add_alignbackgrounds(list)
+ end
+ elseif id == vlist_code then
+ local list = current.list
+ if list then
+ add_alignbackgrounds(list)
end
- current = current.next
end
+ current = current.next
end
return head, true
end
-nodes.handlers.backgrounds = add_backgrounds
+nodes.handlers.backgrounds = add_backgrounds
+nodes.handlers.alignbackgrounds = add_alignbackgrounds
tasks.appendaction("shipouts","normalizers","nodes.handlers.backgrounds")
+tasks.appendaction("shipouts","normalizers","nodes.handlers.alignbackgrounds")
diff --git a/Master/texmf-dist/tex/context/base/node-bck.mkiv b/Master/texmf-dist/tex/context/base/node-bck.mkiv
index 858bd40b76f..25739c56042 100644
--- a/Master/texmf-dist/tex/context/base/node-bck.mkiv
+++ b/Master/texmf-dist/tex/context/base/node-bck.mkiv
@@ -13,8 +13,9 @@
\writestatus{loading}{ConTeXt Node Support / Backgrounds}
-%D This is first attempt to replacing backgrounds in a few
-%D tables mechanisms.
+%D This is first attempt to replacing backgrounds in a few tables
+%D mechanisms. When used more frequently, we can store the color
+%D spec in the attribute.
\unprotect
@@ -24,6 +25,10 @@
{\ctxlua{nodes.tasks.enableaction("shipouts","nodes.handlers.backgrounds")}%
\glet\node_backgrounds_boxes_initialize\donothing}
+\def\node_backgrounds_align_initialize % will move to lua
+ {\ctxlua{nodes.tasks.enableaction("shipouts","nodes.handlers.alignbackgrounds")}%
+ \glet\node_backgrounds_align_initialize\donothing}
+
% \backgroundvbox[green] {\input tufte } \par
% \backgroundvbox[blue] {\input ward } \par
% \backgroundvbox[red] {\input knuth } \par
@@ -38,7 +43,7 @@
\def\thecolorattr#1%
{attr \colormodelattribute \attribute\colormodelattribute
- attr \colorattribute \csname\??colorattribute #1\endcsname
+ attr \colorattribute \csname\??colorattribute#1\endcsname
attr \transparencyattribute \thetransparencyattribute{#1} } % can be optimized
\def\backgroundcolorattr#1%
@@ -49,10 +54,16 @@
\fi\fi}
\def\thebackgroundcolorattr#1%
- {attr \backgroundattribute \plusone
- attr \colormodelattribute \attribute\colormodelattribute
- attr \colorattribute \csname\??colorattribute#1\endcsname
- attr \transparencyattribute \thetransparencyattribute{#1} } % can be optimized
+ {attr \backgroundattribute \plusone
+ attr \colormodelattribute \attribute\colormodelattribute
+ attr \colorattribute \csname\??colorattribute#1\endcsname
+ attr \transparencyattribute \thetransparencyattribute{#1} } % can be optimized
+
+\def\thealignbackgroundcolorattr#1%
+ {attr \alignbackgroundattribute \plusone
+ attr \colormodelattribute \attribute\colormodelattribute
+ attr \colorattribute \csname\??colorattribute#1\endcsname
+ attr \transparencyattribute \thetransparencyattribute{#1} } % can be optimized
\unexpanded\def\backgroundhbox{\node_backgrounds_boxes_add\hbox}
\unexpanded\def\backgroundvbox{\node_backgrounds_boxes_add\vbox}
@@ -84,7 +95,6 @@
% \thebackgroundcolorattr{#2}%
% \fi\fi}
-
% \def\backgroundvbox[#1]{\vbox \backgroundcolorattr{#1}}
% \def\backgroundvtop[#1]{\vtop \backgroundcolorattr{#1}}
% \def\backgroundhbox[#1]{\hbox \backgroundcolorattr{#1}}
diff --git a/Master/texmf-dist/tex/context/base/node-dir.lua b/Master/texmf-dist/tex/context/base/node-dir.lua
index 970313d96f3..6ee5cd4b860 100644
--- a/Master/texmf-dist/tex/context/base/node-dir.lua
+++ b/Master/texmf-dist/tex/context/base/node-dir.lua
@@ -1,4 +1,4 @@
-if not modules then modules = { } end modules ['node-mir'] = {
+if not modules then modules = { } end modules ['node-dir'] = {
version = 1.001,
comment = "companion to node-ini.mkiv",
author = "Taco Hoekwater and Hans Hagen",
diff --git a/Master/texmf-dist/tex/context/base/node-ext.lua b/Master/texmf-dist/tex/context/base/node-ext.lua
index 5c4eec8de11..82ec04ee592 100644
--- a/Master/texmf-dist/tex/context/base/node-ext.lua
+++ b/Master/texmf-dist/tex/context/base/node-ext.lua
@@ -13,14 +13,14 @@ use external applications to process node lists.</p>
--ldx]]--
function nodes.show(stack)
--- texio.write_nl(table.serialize(stack))
+-- logs.writer(table.serialize(stack))
end
function nodes.save(stack,name) -- *.ltn : luatex node file
-- if name then
-- file.savedata(name,table.serialize(stack))
-- else
--- texio.write_nl('log',table.serialize(stack))
+-- logs.writer(table.serialize(stack))
-- end
end
diff --git a/Master/texmf-dist/tex/context/base/node-fin.lua b/Master/texmf-dist/tex/context/base/node-fin.lua
index 5c1cc9ad57a..2e62ebcb5aa 100644
--- a/Master/texmf-dist/tex/context/base/node-fin.lua
+++ b/Master/texmf-dist/tex/context/base/node-fin.lua
@@ -3,17 +3,18 @@ if not modules then modules = { } end modules ['node-fin'] = {
comment = "companion to node-fin.mkiv",
author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
+ license = "see context related readme files",
}
-- this module is being reconstructed
+-- local functions, only slightly slower
local next, type, format = next, type, string.format
local attributes, nodes, node = attributes, nodes, node
-local has_attribute = node.has_attribute
local copy_node = node.copy
+local find_tail = node.slide
local nodecodes = nodes.nodecodes
local whatcodes = nodes.whatcodes
@@ -28,13 +29,15 @@ local vlist_code = nodecodes.vlist
local pdfliteral_code = whatcodes.pdfliteral
-local states = attributes.states
-local numbers = attributes.numbers
-local trigger = attributes.private('trigger')
-local triggering = false
+local states = attributes.states
+local numbers = attributes.numbers
+local a_trigger = attributes.private('trigger')
+local triggering = false
-local starttiming = statistics.starttiming
-local stoptiming = statistics.stoptiming
+local starttiming = statistics.starttiming
+local stoptiming = statistics.stoptiming
+local loadstripped = utilities.lua.loadstripped
+local unsetvalue = attributes.unsetvalue
-- these two will be like trackers
@@ -69,7 +72,7 @@ end
-- end
--
-- function states.check()
--- texio.write_nl(concat(collected,"\n"))
+-- logs.report("states",concat(collected,"\n"))
-- end
-- we used to do the main processor loop here and call processor for each node
@@ -77,97 +80,105 @@ end
-- so that we moved looping to the processor itself; this may lead to a bit of
-- duplicate code once that we have more state handlers
-local function process_attribute(head,plugin) -- head,attribute,enabled,initializer,resolver,processor,finalizer
- local namespace = plugin.namespace
- if namespace.enabled ~= false then -- this test will go away
+-- local function process_attribute(head,plugin) -- head,attribute,enabled,initializer,resolver,processor,finalizer
+-- local namespace = plugin.namespace
+-- if namespace.enabled ~= false then -- this test will go away
+-- starttiming(attributes) -- in principle we could delegate this to the main caller
+-- local done, used, ok = false, nil, false
+-- local attribute = namespace.attribute or numbers[plugin.name] -- todo: plugin.attribute
+-- local processor = plugin.processor
+-- if processor then
+-- local initializer = plugin.initializer
+-- local resolver = plugin.resolver
+-- local inheritance = (resolver and resolver()) or nil -- -0x7FFFFFFF -- we can best use nil and skip !
+-- if initializer then
+-- initializer(namespace,attribute,head)
+-- end
+-- head, ok = processor(namespace,attribute,head,inheritance)
+-- if ok then
+-- local finalizer = plugin.finalizer
+-- if finalizer then
+-- head, ok, used = finalizer(namespace,attribute,head)
+-- if used then
+-- local flusher = plugin.flusher
+-- if flusher then
+-- head = flusher(namespace,attribute,head,used)
+-- end
+-- end
+-- end
+-- done = true
+-- end
+-- end
+-- stoptiming(attributes)
+-- return head, done
+-- else
+-- return head, false
+-- end
+-- end
+--
+-- function nodes.installattributehandler(plugin) -- we need to avoid this nested function
+-- return function(head)
+-- return process_attribute(head,plugin)
+-- end
+-- end
+
+-- An experiment: lean and mean functions. It is not really faster but
+-- with upcoming functionality it might make a difference, e.g. features
+-- like 'casing' and 'italics' can be called a lot so there it makes sense.
+
+nodes.plugindata = nil
+
+local template = [[
+local plugin = nodes.plugindata
+local starttiming = statistics.starttiming
+local stoptiming = statistics.stoptiming
+local namespace = plugin.namespace
+local attribute = namespace.attribute or attributes.numbers[plugin.name]
+local processor = plugin.processor
+local initializer = plugin.initializer
+local resolver = plugin.resolver
+local finalizer = plugin.finalizer
+local flusher = plugin.flusher
+if not processor then
+ return function(head)
+ return head, false
+ end
+elseif initializer or finalizer or resolver then
+ return function(head)
starttiming(attributes)
local done, used, ok = false, nil, false
- local attribute = namespace.attribute or numbers[plugin.name] -- todo: plugin.attribute
- local processor = plugin.processor
- if processor then
- local initializer = plugin.initializer
- local resolver = plugin.resolver
- local inheritance = (resolver and resolver()) or nil -- -0x7FFFFFFF -- we can best use nil and skip !
- if initializer then
- initializer(namespace,attribute,head)
- end
- head, ok = processor(namespace,attribute,head,inheritance)
- if ok then
- local finalizer = plugin.finalizer
- if finalizer then
- head, ok, used = finalizer(namespace,attribute,head)
- if used then
- local flusher = plugin.flusher
- if flusher then
- head = flusher(namespace,attribute,head,used)
- end
- end
+ local inheritance = (resolver and resolver()) or nil -- -0x7FFFFFFF -- we can best use nil and skip !
+ if initializer then
+ initializer(namespace,attribute,head)
+ end
+ head, ok = processor(namespace,attribute,head,inheritance)
+ if ok then
+ if finalizer then
+ head, ok, used = finalizer(namespace,attribute,head)
+ if used and flusher then
+ head = flusher(namespace,attribute,head,used)
end
- done = true
end
+ done = true
end
stoptiming(attributes)
return head, done
- else
- return head, false
end
-end
-
--- nodes.process_attribute = process_attribute
-
-function nodes.installattributehandler(plugin) -- we need to avoid this nested function
+else
return function(head)
- return process_attribute(head,plugin)
+ starttiming(attributes)
+ local head, done = processor(namespace,attribute,head)
+ stoptiming(attributes)
+ return head, done
end
end
+nodes.plugindata = nil
+]]
---~ experiment (maybe local to function makes more sense)
---~
---~ plugindata = { }
---~
---~ local template = [[
---~ local plugin = plugindata["%s"]
---~ local starttiming, stoptiming = statistics.starttiming, statistics.stoptiming
---~ local namespace = plugin.namespace
---~ local attribute = namespace.attribute
---~ local processor = plugin.processor
---~ local initializer = plugin.initializer
---~ local resolver = plugin.resolver
---~ local finalizer = plugin.finalizer
---~ local flusher = plugin.flusher
---~ return function (head)
---~ if namespace.enabled then
---~ starttiming(attributes)
---~ local done, used, ok = false, nil, false
---~ if procesxsor then
---~ local inheritance = (resolver and resolver()) or nil -- -0x7FFFFFFF -- we can best use nil and skip !
---~ if initializer then
---~ initializer(namespace,attribute,head)
---~ end
---~ head, ok = processor(namespace,attribute,head,inheritance)
---~ if ok then
---~ if finalizer then
---~ head, ok, used = finalizer(namespace,attribute,head)
---~ if used and flusher then
---~ head = flusher(namespace,attribute,head,used)
---~ end
---~ end
---~ done = true
---~ end
---~ end
---~ stoptiming(attributes)
---~ return head, done
---~ else
---~ return head, false
---~ end
---~ end
---~ ]]
---~
---~ function nodes.installattributehandler(plugin) -- we need to avoid this nested function
---~ plugindata[plugin.name] = plugin
---~ local str = format(template,plugin.name)
---~ return loadstring(str)()
---~ end
+function nodes.installattributehandler(plugin)
+ nodes.plugindata = plugin
+ return loadstripped(template)()
+end
-- the injectors
@@ -176,6 +187,7 @@ local insert_node_after = node.insert_after
local nsdata, nsnone, nslistwise, nsforced, nsselector, nstrigger
local current, current_selector, done = 0, 0, false -- nb, stack has a local current !
+local nsbegin, nsend
function states.initialize(namespace,attribute,head)
nsdata = namespace.data
@@ -183,10 +195,17 @@ function states.initialize(namespace,attribute,head)
nsforced = namespace.forced
nsselector = namespace.selector
nslistwise = namespace.listwise
- nstrigger = triggering and namespace.triggering and trigger
+ nstrigger = triggering and namespace.triggering and a_trigger
current = 0
current_selector = 0
done = false -- todo: done cleanup
+ nsstep = namespace.resolve_step
+ if nsstep then
+ nsbegin = namespace.resolve_begin
+ nsend = namespace.resolve_end
+ nspush = namespace.push
+ nspop = namespace.pop
+ end
end
function states.finalize(namespace,attribute,head) -- is this one ok?
@@ -205,15 +224,351 @@ function states.finalize(namespace,attribute,head) -- is this one ok?
return head, false, false
end
+-- disc nodes can be ignored
+-- we need to deal with literals too (reset as well as oval)
+-- if id == glyph_code or (id == whatsit_code and stack.subtype == pdfliteral_code) or (id == rule_code and stack.width ~= 0) or (id == glue_code and stack.leader) then
+
+-- local function process(namespace,attribute,head,inheritance,default) -- one attribute
+-- local stack, done = head, false
+-- while stack do
+-- local id = stack.id
+-- if id == glyph_code or (id == rule_code and stack.width ~= 0) or (id == glue_code and stack.leader) then -- or disc_code
+-- local c = stack[attribute]
+-- if c then
+-- if default and c == inheritance then
+-- if current ~= default then
+-- head = insert_node_before(head,stack,copy_node(nsdata[default]))
+-- current = default
+-- done = true
+-- end
+-- elseif current ~= c then
+-- head = insert_node_before(head,stack,copy_node(nsdata[c]))
+-- current = c
+-- done = true
+-- end
+-- -- here ? compare selective
+-- if id == glue_code then --leader
+-- -- same as *list
+-- local content = stack.leader
+-- if content then
+-- local savedcurrent = current
+-- local ci = content.id
+-- if ci == hlist_code or ci == vlist_code then
+-- -- else we reset inside a box unneeded, okay, the downside is
+-- -- that we trigger color in each repeated box, so there is room
+-- -- for improvement here
+-- current = 0
+-- end
+-- local ok = false
+-- if nstrigger and stack[nstrigger] then
+-- local outer = stack[attribute]
+-- if outer ~= inheritance then
+-- stack.leader, ok = process(namespace,attribute,content,inheritance,outer)
+-- else
+-- stack.leader, ok = process(namespace,attribute,content,inheritance,default)
+-- end
+-- else
+-- stack.leader, ok = process(namespace,attribute,content,inheritance,default)
+-- end
+-- current = savedcurrent
+-- done = done or ok
+-- end
+-- end
+-- elseif default and inheritance then
+-- if current ~= default then
+-- head = insert_node_before(head,stack,copy_node(nsdata[default]))
+-- current = default
+-- done = true
+-- end
+-- elseif current > 0 then
+-- head = insert_node_before(head,stack,copy_node(nsnone))
+-- current = 0
+-- done = true
+-- end
+-- elseif id == hlist_code or id == vlist_code then
+-- local content = stack.list
+-- if content then
+-- local ok = false
+-- if nstrigger and stack[nstrigger] then
+-- local outer = stack[attribute]
+-- if outer ~= inheritance then
+-- stack.list, ok = process(namespace,attribute,content,inheritance,outer)
+-- else
+-- stack.list, ok = process(namespace,attribute,content,inheritance,default)
+-- end
+-- else
+-- stack.list, ok = process(namespace,attribute,content,inheritance,default)
+-- end
+-- done = done or ok
+-- end
+-- end
+-- stack = stack.next
+-- end
+-- return head, done
+-- end
+
+-- local function process(namespace,attribute,head,inheritance,default) -- one attribute
+-- local stack, done = head, false
+
+-- local function check()
+-- local c = stack[attribute]
+-- if c then
+-- if default and c == inheritance then
+-- if current ~= default then
+-- head = insert_node_before(head,stack,copy_node(nsdata[default]))
+-- current = default
+-- done = true
+-- end
+-- elseif current ~= c then
+-- head = insert_node_before(head,stack,copy_node(nsdata[c]))
+-- current = c
+-- done = true
+-- end
+-- elseif default and inheritance then
+-- if current ~= default then
+-- head = insert_node_before(head,stack,copy_node(nsdata[default]))
+-- current = default
+-- done = true
+-- end
+-- elseif current > 0 then
+-- head = insert_node_before(head,stack,copy_node(nsnone))
+-- current = 0
+-- done = true
+-- end
+-- return c
+-- end
+
+-- local function nested(content)
+-- if nstrigger and stack[nstrigger] then
+-- local outer = stack[attribute]
+-- if outer ~= inheritance then
+-- return process(namespace,attribute,content,inheritance,outer)
+-- else
+-- return process(namespace,attribute,content,inheritance,default)
+-- end
+-- else
+-- return process(namespace,attribute,content,inheritance,default)
+-- end
+-- end
+
+-- while stack do
+-- local id = stack.id
+-- if id == glyph_code then
+-- check()
+-- elseif id == glue_code then
+-- local content = stack.leader
+-- if content and check() then
+-- local savedcurrent = current
+-- local ci = content.id
+-- if ci == hlist_code or ci == vlist_code then
+-- -- else we reset inside a box unneeded, okay, the downside is
+-- -- that we trigger color in each repeated box, so there is room
+-- -- for improvement here
+-- current = 0
+-- end
+
+-- local ok = false
+-- stack.leader, ok = nested(content)
+-- done = done or ok
+
+-- current = savedcurrent
+-- end
+-- elseif id == hlist_code or id == vlist_code then
+-- local content = stack.list
+-- if content then
+
+-- local ok = false
+-- stack.list, ok = nested(content)
+-- done = done or ok
+
+-- end
+-- elseif id == rule_code then
+-- if stack.width ~= 0 then
+-- check()
+-- end
+-- end
+-- stack = stack.next
+-- end
+-- return head, done
+-- end
+
+-- local function process(namespace,attribute,head,inheritance,default) -- one attribute
+-- local stack, done = head, false
+-- while stack do
+-- local id = stack.id
+-- if id == glyph_code then
+-- -- begin of check
+-- local c = stack[attribute]
+-- if c then
+-- if default and c == inheritance then
+-- if current ~= default then
+-- head = insert_node_before(head,stack,copy_node(nsdata[default]))
+-- current = default
+-- done = true
+-- end
+-- elseif current ~= c then
+-- head = insert_node_before(head,stack,copy_node(nsdata[c]))
+-- current = c
+-- done = true
+-- end
+-- elseif default and inheritance then
+-- if current ~= default then
+-- head = insert_node_before(head,stack,copy_node(nsdata[default]))
+-- current = default
+-- done = true
+-- end
+-- elseif current > 0 then
+-- head = insert_node_before(head,stack,copy_node(nsnone))
+-- current = 0
+-- done = true
+-- end
+-- -- end of check
+-- elseif id == glue_code then
+-- local content = stack.leader
+-- if content then
+-- -- begin of check
+-- local c = stack[attribute]
+-- if c then
+-- if default and c == inheritance then
+-- if current ~= default then
+-- head = insert_node_before(head,stack,copy_node(nsdata[default]))
+-- current = default
+-- done = true
+-- end
+-- elseif current ~= c then
+-- head = insert_node_before(head,stack,copy_node(nsdata[c]))
+-- current = c
+-- done = true
+-- end
+-- -- begin special to this check
+-- local savedcurrent = current
+-- local ci = content.id
+-- if ci == hlist_code or ci == vlist_code then
+-- -- else we reset inside a box unneeded, okay, the downside is
+-- -- that we trigger color in each repeated box, so there is room
+-- -- for improvement here
+-- current = 0
+-- end
+-- -- begin nested --
+-- local ok = false
+-- if nstrigger and stack[nstrigger] then
+-- local outer = stack[attribute]
+-- if outer ~= inheritance then
+-- stack.leader, ok = process(namespace,attribute,content,inheritance,outer)
+-- else
+-- stack.leader, ok = process(namespace,attribute,content,inheritance,default)
+-- end
+-- else
+-- stack.leader, ok = process(namespace,attribute,content,inheritance,default)
+-- end
+-- -- end nested --
+-- done = done or ok
+-- current = savedcurrent
+-- -- end special to this check
+-- elseif default and inheritance then
+-- if current ~= default then
+-- head = insert_node_before(head,stack,copy_node(nsdata[default]))
+-- current = default
+-- done = true
+-- end
+-- elseif current > 0 then
+-- head = insert_node_before(head,stack,copy_node(nsnone))
+-- current = 0
+-- done = true
+-- end
+-- -- end of check
+-- end
+-- elseif id == hlist_code or id == vlist_code then
+-- local content = stack.list
+-- if content then
+-- -- begin nested --
+-- local ok
+-- if nstrigger and stack[nstrigger] then
+-- local outer = stack[attribute]
+-- if outer ~= inheritance then
+-- stack.list, ok = process(namespace,attribute,content,inheritance,outer)
+-- else
+-- stack.list, ok = process(namespace,attribute,content,inheritance,default)
+-- end
+-- else
+-- stack.list, ok = process(namespace,attribute,content,inheritance,default)
+-- end
+-- -- end nested --
+-- done = done or ok
+-- end
+-- elseif id == rule_code then
+-- if stack.width ~= 0 then
+-- -- begin of check
+-- local c = stack[attribute]
+-- if c then
+-- if default and c == inheritance then
+-- if current ~= default then
+-- head = insert_node_before(head,stack,copy_node(nsdata[default]))
+-- current = default
+-- done = true
+-- end
+-- elseif current ~= c then
+-- head = insert_node_before(head,stack,copy_node(nsdata[c]))
+-- current = c
+-- done = true
+-- end
+-- elseif default and inheritance then
+-- if current ~= default then
+-- head = insert_node_before(head,stack,copy_node(nsdata[default]))
+-- current = default
+-- done = true
+-- end
+-- elseif current > 0 then
+-- head = insert_node_before(head,stack,copy_node(nsnone))
+-- current = 0
+-- done = true
+-- end
+-- -- end of check
+-- end
+-- end
+-- stack = stack.next
+-- end
+-- return head, done
+-- end
+
local function process(namespace,attribute,head,inheritance,default) -- one attribute
- local stack, done = head, false
+ local stack = head
+ local done = false
+ local check = false
+ local leader = nil
while stack do
local id = stack.id
- -- we need to deal with literals too (reset as well as oval)
- -- if id == glyph_code or (id == whatsit_code and stack.subtype == pdfliteral_code) or (id == rule_code and stack.width ~= 0) or (id == glue_code and stack.leader) then -- or disc_code
- if id == glyph_code -- or id == disc_code
- or (id == rule_code and stack.width ~= 0) or (id == glue_code and stack.leader) then -- or disc_code
- local c = has_attribute(stack,attribute)
+ if id == glyph_code then
+ check = true
+ elseif id == glue_code then
+ leader = stack.leader
+ if leader then
+ check = true
+ end
+ elseif id == hlist_code or id == vlist_code then
+ local content = stack.list
+ if content then
+ -- begin nested --
+ local ok
+ if nstrigger and stack[nstrigger] then
+ local outer = stack[attribute]
+ if outer ~= inheritance then
+ stack.list, ok = process(namespace,attribute,content,inheritance,outer)
+ else
+ stack.list, ok = process(namespace,attribute,content,inheritance,default)
+ end
+ else
+ stack.list, ok = process(namespace,attribute,content,inheritance,default)
+ end
+ -- end nested --
+ done = done or ok
+ end
+ elseif id == rule_code then
+ check = stack.width ~= 0
+ end
+ -- much faster this way than using a check() and nested() function
+ if check then
+ local c = stack[attribute]
if c then
if default and c == inheritance then
if current ~= default then
@@ -226,33 +581,31 @@ local function process(namespace,attribute,head,inheritance,default) -- one attr
current = c
done = true
end
- -- here ? compare selective
- if id == glue_code then --leader
- -- same as *list
- local content = stack.leader
- if content then
- local savedcurrent = current
- local ci = content.id
- if ci == hlist_code or ci == vlist_code then
- -- else we reset inside a box unneeded, okay, the downside is
- -- that we trigger color in each repeated box, so there is room
- -- for improvement here
- current = 0
- end
- local ok = false
- if nstrigger and has_attribute(stack,nstrigger) then
- local outer = has_attribute(stack,attribute)
- if outer ~= inheritance then
- stack.leader, ok = process(namespace,attribute,content,inheritance,outer)
- else
- stack.leader, ok = process(namespace,attribute,content,inheritance,default)
- end
+ if leader then
+ local savedcurrent = current
+ local ci = leader.id
+ if ci == hlist_code or ci == vlist_code then
+ -- else we reset inside a box unneeded, okay, the downside is
+ -- that we trigger color in each repeated box, so there is room
+ -- for improvement here
+ current = 0
+ end
+ -- begin nested --
+ local ok = false
+ if nstrigger and stack[nstrigger] then
+ local outer = stack[attribute]
+ if outer ~= inheritance then
+ stack.leader, ok = process(namespace,attribute,leader,inheritance,outer)
else
- stack.leader, ok = process(namespace,attribute,content,inheritance,default)
+ stack.leader, ok = process(namespace,attribute,leader,inheritance,default)
end
- current = savedcurrent
- done = done or ok
+ else
+ stack.leader, ok = process(namespace,attribute,leader,inheritance,default)
end
+ -- end nested --
+ done = done or ok
+ current = savedcurrent
+ leader = false
end
elseif default and inheritance then
if current ~= default then
@@ -265,22 +618,7 @@ local function process(namespace,attribute,head,inheritance,default) -- one attr
current = 0
done = true
end
- elseif id == hlist_code or id == vlist_code then
- local content = stack.list
- if content then
- local ok = false
- if nstrigger and has_attribute(stack,nstrigger) then
- local outer = has_attribute(stack,attribute)
- if outer ~= inheritance then
- stack.list, ok = process(namespace,attribute,content,inheritance,outer)
- else
- stack.list, ok = process(namespace,attribute,content,inheritance,default)
- end
- else
- stack.list, ok = process(namespace,attribute,content,inheritance,default)
- end
- done = done or ok
- end
+ check = false
end
stack = stack.next
end
@@ -295,88 +633,271 @@ states.process = process
-- state changes while the main state stays the same (like two glyphs following
-- each other with the same color but different color spaces e.g. \showcolor)
+-- local function selective(namespace,attribute,head,inheritance,default) -- two attributes
+-- local stack, done = head, false
+-- while stack do
+-- local id = stack.id
+-- -- we need to deal with literals too (reset as well as oval)
+-- -- if id == glyph_code or (id == whatsit_code and stack.subtype == pdfliteral_code) or (id == rule_code and stack.width ~= 0) or (id == glue_code and stack.leader) then -- or disc_code
+-- if id == glyph_code -- or id == disc_code
+-- or (id == rule_code and stack.width ~= 0) or (id == glue_code and stack.leader) then -- or disc_code
+-- local c = stack[attribute]
+-- if c then
+-- if default and c == inheritance then
+-- if current ~= default then
+-- local data = nsdata[default]
+-- head = insert_node_before(head,stack,copy_node(data[nsforced or stack[nsselector] or nsselector]))
+-- current = default
+-- done = true
+-- end
+-- else
+-- local s = stack[nsselector]
+-- if current ~= c or current_selector ~= s then
+-- local data = nsdata[c]
+-- head = insert_node_before(head,stack,copy_node(data[nsforced or stack[nsselector] or nsselector]))
+-- current = c
+-- current_selector = s
+-- done = true
+-- end
+-- end
+-- elseif default and inheritance then
+-- if current ~= default then
+-- local data = nsdata[default]
+-- head = insert_node_before(head,stack,copy_node(data[nsforced or stack[nsselector] or nsselector]))
+-- current = default
+-- done = true
+-- end
+-- elseif current > 0 then
+-- head = insert_node_before(head,stack,copy_node(nsnone))
+-- current, current_selector, done = 0, 0, true
+-- end
+-- if id == glue_code then -- leader
+-- -- same as *list
+-- local content = stack.leader
+-- if content then
+-- local savedcurrent = current
+-- local ci = content.id
+-- if ci == hlist_code or ci == vlist_code then
+-- -- else we reset inside a box unneeded, okay, the downside is
+-- -- that we trigger color in each repeated box, so there is room
+-- -- for improvement here
+-- current = 0
+-- end
+-- local ok = false
+-- if nstrigger and stack[nstrigger] then
+-- local outer = stack[attribute]
+-- if outer ~= inheritance then
+-- stack.leader, ok = selective(namespace,attribute,content,inheritance,outer)
+-- else
+-- stack.leader, ok = selective(namespace,attribute,content,inheritance,default)
+-- end
+-- else
+-- stack.leader, ok = selective(namespace,attribute,content,inheritance,default)
+-- end
+-- current = savedcurrent
+-- done = done or ok
+-- end
+-- end
+-- elseif id == hlist_code or id == vlist_code then
+-- local content = stack.list
+-- if content then
+-- local ok = false
+-- if nstrigger and stack[nstrigger] then
+-- local outer = stack[attribute]
+-- if outer ~= inheritance then
+-- stack.list, ok = selective(namespace,attribute,content,inheritance,outer)
+-- else
+-- stack.list, ok = selective(namespace,attribute,content,inheritance,default)
+-- end
+-- else
+-- stack.list, ok = selective(namespace,attribute,content,inheritance,default)
+-- end
+-- done = done or ok
+-- end
+-- end
+-- stack = stack.next
+-- end
+-- return head, done
+-- end
+
+-- local function selective(namespace,attribute,head,inheritance,default) -- two attributes
+-- local stack, done = head, false
+
+-- local function check()
+-- local c = stack[attribute]
+-- if c then
+-- if default and c == inheritance then
+-- if current ~= default then
+-- local data = nsdata[default]
+-- head = insert_node_before(head,stack,copy_node(data[nsforced or stack[nsselector] or nsselector]))
+-- current = default
+-- done = true
+-- end
+-- else
+-- local s = stack[nsselector]
+-- if current ~= c or current_selector ~= s then
+-- local data = nsdata[c]
+-- head = insert_node_before(head,stack,copy_node(data[nsforced or stack[nsselector] or nsselector]))
+-- current = c
+-- current_selector = s
+-- done = true
+-- end
+-- end
+-- elseif default and inheritance then
+-- if current ~= default then
+-- local data = nsdata[default]
+-- head = insert_node_before(head,stack,copy_node(data[nsforced or stack[nsselector] or nsselector]))
+-- current = default
+-- done = true
+-- end
+-- elseif current > 0 then
+-- head = insert_node_before(head,stack,copy_node(nsnone))
+-- current, current_selector, done = 0, 0, true
+-- end
+-- return c
+-- end
+
+-- local function nested(content)
+-- if nstrigger and stack[nstrigger] then
+-- local outer = stack[attribute]
+-- if outer ~= inheritance then
+-- return selective(namespace,attribute,content,inheritance,outer)
+-- else
+-- return selective(namespace,attribute,content,inheritance,default)
+-- end
+-- else
+-- return selective(namespace,attribute,content,inheritance,default)
+-- end
+-- end
+
+-- while stack do
+-- local id = stack.id
+-- if id == glyph_code then
+-- check()
+-- elseif id == glue_code then
+-- local content = stack.leader
+-- if content and check() then
+-- -- local savedcurrent = current
+-- -- local ci = content.id
+-- -- if ci == hlist_code or ci == vlist_code then
+-- -- -- else we reset inside a box unneeded, okay, the downside is
+-- -- -- that we trigger color in each repeated box, so there is room
+-- -- -- for improvement here
+-- -- current = 0
+-- -- end
+
+-- local ok = false
+-- stack.leader, ok = nested(content)
+-- done = done or ok
+
+-- -- current = savedcurrent
+-- end
+-- elseif id == hlist_code or id == vlist_code then
+-- local content = stack.list
+-- if content then
+
+-- local ok = false
+-- stack.list, ok = nested(content)
+-- done = done or ok
+
+-- end
+-- elseif id == rule_code then
+-- if stack.width ~= 0 then
+-- check()
+-- end
+-- end
+-- stack = stack.next
+-- end
+-- return head, done
+-- end
+
local function selective(namespace,attribute,head,inheritance,default) -- two attributes
- local stack, done = head, false
+ local stack = head
+ local done = false
+ local check = false
+ local leader = nil
while stack do
local id = stack.id
- -- we need to deal with literals too (reset as well as oval)
- -- if id == glyph_code or (id == whatsit_code and stack.subtype == pdfliteral_code) or (id == rule_code and stack.width ~= 0) or (id == glue_code and stack.leader) then -- or disc_code
- if id == glyph_code -- or id == disc_code
- or (id == rule_code and stack.width ~= 0) or (id == glue_code and stack.leader) then -- or disc_code
- local c = has_attribute(stack,attribute)
+ if id == glyph_code then
+ check = true
+ elseif id == glue_code then
+ leader = stack.leader
+ if leader then
+ check = true
+ end
+ elseif id == hlist_code or id == vlist_code then
+ local content = stack.list
+ if content then
+ local ok = false
+ -- begin nested
+ if nstrigger and stack[nstrigger] then
+ local outer = stack[attribute]
+ if outer ~= inheritance then
+ stack.list, ok = selective(namespace,attribute,content,inheritance,outer)
+ else
+ stack.list, ok = selective(namespace,attribute,content,inheritance,default)
+ end
+ else
+ stack.list, ok = selective(namespace,attribute,content,inheritance,default)
+ end
+ -- end nested
+ done = done or ok
+ end
+ elseif id == rule_code then
+ check = stack.width ~= 0
+ end
+
+ if check then
+ local c = stack[attribute]
if c then
if default and c == inheritance then
if current ~= default then
local data = nsdata[default]
- head = insert_node_before(head,stack,copy_node(data[nsforced or has_attribute(stack,nsselector) or nsselector]))
+ head = insert_node_before(head,stack,copy_node(data[nsforced or stack[nsselector] or nsselector]))
current = default
done = true
end
else
- local s = has_attribute(stack,nsselector)
+ local s = stack[nsselector]
if current ~= c or current_selector ~= s then
local data = nsdata[c]
- head = insert_node_before(head,stack,copy_node(data[nsforced or has_attribute(stack,nsselector) or nsselector]))
+ head = insert_node_before(head,stack,copy_node(data[nsforced or stack[nsselector] or nsselector]))
current = c
current_selector = s
done = true
end
end
- elseif default and inheritance then
- if current ~= default then
- local data = nsdata[default]
- head = insert_node_before(head,stack,copy_node(data[nsforced or has_attribute(stack,nsselector) or nsselector]))
- current = default
- done = true
- end
- elseif current > 0 then
- head = insert_node_before(head,stack,copy_node(nsnone))
- current, current_selector, done = 0, 0, true
- end
- if id == glue_code then -- leader
- -- same as *list
- local content = stack.leader
- if content then
- local savedcurrent = current
- local ci = content.id
- if ci == hlist_code or ci == vlist_code then
- -- else we reset inside a box unneeded, okay, the downside is
- -- that we trigger color in each repeated box, so there is room
- -- for improvement here
- current = 0
- end
+ if leader then
local ok = false
- if nstrigger and has_attribute(stack,nstrigger) then
- local outer = has_attribute(stack,attribute)
+ -- begin nested
+ if nstrigger and stack[nstrigger] then
+ local outer = stack[attribute]
if outer ~= inheritance then
- stack.leader, ok = selective(namespace,attribute,content,inheritance,outer)
+ stack.leader, ok = selective(namespace,attribute,leader,inheritance,outer)
else
- stack.leader, ok = selective(namespace,attribute,content,inheritance,default)
+ stack.leader, ok = selective(namespace,attribute,leader,inheritance,default)
end
else
- stack.leader, ok = selective(namespace,attribute,content,inheritance,default)
+ stack.leader, ok = selective(namespace,attribute,leader,inheritance,default)
end
- current = savedcurrent
+ -- end nested
done = done or ok
+ leader = false
end
- end
- elseif id == hlist_code or id == vlist_code then
- local content = stack.list
- if content then
- local ok = false
- if nstrigger and has_attribute(stack,nstrigger) then
- local outer = has_attribute(stack,attribute)
- if outer ~= inheritance then
- stack.list, ok = selective(namespace,attribute,content,inheritance,outer)
- else
- stack.list, ok = selective(namespace,attribute,content,inheritance,default)
- end
- else
- stack.list, ok = selective(namespace,attribute,content,inheritance,default)
+ elseif default and inheritance then
+ if current ~= default then
+ local data = nsdata[default]
+ head = insert_node_before(head,stack,copy_node(data[nsforced or stack[nsselector] or nsselector]))
+ current = default
+ done = true
end
- done = done or ok
+ elseif current > 0 then
+ head = insert_node_before(head,stack,copy_node(nsnone))
+ current, current_selector, done = 0, 0, true
end
+ check = false
end
+
stack = stack.next
end
return head, done
@@ -387,48 +908,109 @@ states.selective = selective
-- Ideally the next one should be merged with the previous but keeping it separate is
-- safer. We deal with two situations: efficient boxwise (layoutareas) and mixed layers
-- (as used in the stepper). In the stepper we cannot use the box branch as it involves
--- paragraph lines and then getsmixed up. A messy business (esp since we want to be
+-- paragraph lines and then gets mixed up. A messy business (esp since we want to be
-- efficient).
+--
+-- Todo: make a better stacker. Keep track (in attribute) about nesting level. Not
+-- entirely trivial and a generic solution is nicer (compares to the exporter).
+
+-- local function stacked(namespace,attribute,head,default) -- no triggering, no inheritance, but list-wise
+-- local stack, done = head, false
+-- local current, depth = default or 0, 0
+--
+-- local function check()
+-- local a = stack[attribute]
+-- if a then
+-- if current ~= a then
+-- head = insert_node_before(head,stack,copy_node(nsdata[a]))
+-- depth = depth + 1
+-- current, done = a, true
+-- end
+-- elseif default > 0 then
+-- --
+-- elseif current > 0 then
+-- head = insert_node_before(head,stack,copy_node(nsnone))
+-- depth = depth - 1
+-- current, done = 0, true
+-- end
+-- return a
+-- end
+--
+-- while stack do
+-- local id = stack.id
+-- if id == glyph_code then
+-- check()
+-- elseif id == glue_code then
+-- local content = stack.leader
+-- if content and check() then
+-- local ok = false
+-- stack.leader, ok = stacked(namespace,attribute,content,current)
+-- done = done or ok
+-- end
+-- elseif id == hlist_code or id == vlist_code then
+-- local content = stack.list
+-- if content then
+-- -- the problem is that broken lines gets the attribute which can be a later one
+-- if nslistwise then
+-- local a = stack[attribute]
+-- if a and current ~= a and nslistwise[a] then -- viewerlayer / needs checking, see below
+-- local p = current
+-- current, done = a, true
+-- head = insert_node_before(head,stack,copy_node(nsdata[a]))
+-- stack.list = stacked(namespace,attribute,content,current)
+-- head, stack = insert_node_after(head,stack,copy_node(nsnone))
+-- current = p
+-- else
+-- local ok = false
+-- stack.list, ok = stacked(namespace,attribute,content,current)
+-- done = done or ok
+-- end
+-- else
+-- local ok = false
+-- stack.list, ok = stacked(namespace,attribute,content,current)
+-- done = done or ok
+-- end
+-- end
+-- elseif id == rule_code then
+-- if stack.width ~= 0 then
+-- check()
+-- end
+-- end
+-- stack = stack.next
+-- end
+-- while depth > 0 do
+-- head = insert_node_after(head,stack,copy_node(nsnone))
+-- depth = depth - 1
+-- end
+-- return head, done
+-- end
local function stacked(namespace,attribute,head,default) -- no triggering, no inheritance, but list-wise
- local stack, done = head, false
- local current, depth = default or 0, 0
+ local stack = head
+ local done = false
+ local current = default or 0
+ local depth = 0
+ local check = false
+ local leader = false
while stack do
local id = stack.id
- if id == glyph_code or (id == rule_code and stack.width ~= 0) or (id == glue_code and stack.leader) then -- or disc_code
- local c = has_attribute(stack,attribute)
- if c then
- if current ~= c then
- head = insert_node_before(head,stack,copy_node(nsdata[c]))
- depth = depth + 1
- current, done = c, true
- end
- if id == glue_code then
- local content = stack.leader
- if content then -- unchecked
- local ok = false
- stack.leader, ok = stacked(namespace,attribute,content,current)
- done = done or ok
- end
- end
---~ elseif default then
- elseif default > 0 then
- --
- elseif current > 0 then
- head = insert_node_before(head,stack,copy_node(nsnone))
- depth = depth - 1
- current, done = 0, true
+ if id == glyph_code then
+ check = true
+ elseif id == glue_code then
+ leader = stack.leader
+ if leader then
+ check = true
end
elseif id == hlist_code or id == vlist_code then
local content = stack.list
if content then
-- the problem is that broken lines gets the attribute which can be a later one
if nslistwise then
- local c = has_attribute(stack,attribute)
- if c and current ~= c and nslistwise[c] then -- viewerlayer
+ local a = stack[attribute]
+ if a and current ~= a and nslistwise[a] then -- viewerlayer / needs checking, see below
local p = current
- current, done = c, true
- head = insert_node_before(head,stack,copy_node(nsdata[c]))
+ current, done = a, true
+ head = insert_node_before(head,stack,copy_node(nsdata[a]))
stack.list = stacked(namespace,attribute,content,current)
head, stack = insert_node_after(head,stack,copy_node(nsnone))
current = p
@@ -443,18 +1025,196 @@ local function stacked(namespace,attribute,head,default) -- no triggering, no in
done = done or ok
end
end
+ elseif id == rule_code then
+ check = stack.width ~= 0
end
+
+ if check then
+ local a = stack[attribute]
+ if a then
+ if current ~= a then
+ head = insert_node_before(head,stack,copy_node(nsdata[a]))
+ depth = depth + 1
+ current, done = a, true
+ end
+ if leader then
+ local ok = false
+ stack.leader, ok = stacked(namespace,attribute,content,current)
+ done = done or ok
+ leader = false
+ end
+ elseif default > 0 then
+ --
+ elseif current > 0 then
+ head = insert_node_before(head,stack,copy_node(nsnone))
+ depth = depth - 1
+ current, done = 0, true
+ end
+ check = false
+ end
+
stack = stack.next
end
while depth > 0 do
head = insert_node_after(head,stack,copy_node(nsnone))
- depth = depth -1
+ depth = depth - 1
end
return head, done
end
states.stacked = stacked
+-- experimental
+
+-- local function stacker(namespace,attribute,head,default) -- no triggering, no inheritance, but list-wise
+-- nsbegin()
+-- local current, previous, done, okay = head, head, false, false
+-- local attrib = default or unsetvalue
+--
+-- local function check()
+-- local a = current[attribute] or unsetvalue
+-- if a ~= attrib then
+-- local n = nsstep(a)
+-- if n then
+-- -- !!!! TEST CODE !!!!
+-- -- head = insert_node_before(head,current,copy_node(nsdata[tonumber(n)])) -- a
+-- head = insert_node_before(head,current,n) -- a
+-- end
+-- attrib, done, okay = a, true, true
+-- end
+-- return a
+-- end
+--
+-- while current do
+-- local id = current.id
+-- if id == glyph_code then
+-- check()
+-- elseif id == glue_code then
+-- local content = current.leader
+-- if content and check() then
+-- -- tricky as a leader has to be a list so we cannot inject before
+-- local _, ok = stacker(namespace,attribute,content,attrib)
+-- done = done or ok
+-- end
+-- elseif id == hlist_code or id == vlist_code then
+-- local content = current.list
+-- if not content then
+-- -- skip
+-- elseif nslistwise then
+-- local a = current[attribute]
+-- if a and attrib ~= a and nslistwise[a] then -- viewerlayer
+-- done = true
+-- head = insert_node_before(head,current,copy_node(nsdata[a]))
+-- current.list = stacker(namespace,attribute,content,a)
+-- head, current = insert_node_after(head,current,copy_node(nsnone))
+-- else
+-- local ok = false
+-- current.list, ok = stacker(namespace,attribute,content,attrib)
+-- done = done or ok
+-- end
+-- else
+-- local ok = false
+-- current.list, ok = stacker(namespace,attribute,content,default)
+-- done = done or ok
+-- end
+-- elseif id == rule_code then
+-- if current.width ~= 0 then
+-- check()
+-- end
+-- end
+-- previous = current
+-- current = current.next
+-- end
+-- if okay then
+-- local n = nsend()
+-- if n then
+-- -- !!!! TEST CODE !!!!
+-- -- head = insert_node_after(head,previous,copy_node(nsdata[tostring(n)]))
+-- head = insert_node_after(head,previous,n)
+-- end
+-- end
+-- return head, done
+-- end
+
+local function stacker(namespace,attribute,head,default) -- no triggering, no inheritance, but list-wise
+ nsbegin()
+ local current = head
+ local previous = head
+ local done = false
+ local okay = false
+ local attrib = default or unsetvalue
+ local check = false
+ local leader = false
+ while current do
+ local id = current.id
+ if id == glyph_code then
+ check = true
+ elseif id == glue_code then
+ leader = current.leader
+ if leader then
+ check = true
+ end
+ elseif id == hlist_code or id == vlist_code then
+ local content = current.list
+ if not content then
+ -- skip
+ elseif nslistwise then
+ local a = current[attribute]
+ if a and attrib ~= a and nslistwise[a] then -- viewerlayer
+ done = true
+ head = insert_node_before(head,current,copy_node(nsdata[a]))
+ current.list = stacker(namespace,attribute,content,a)
+ head, current = insert_node_after(head,current,copy_node(nsnone))
+ else
+ local ok = false
+ current.list, ok = stacker(namespace,attribute,content,attrib)
+ done = done or ok
+ end
+ else
+ local ok = false
+ current.list, ok = stacker(namespace,attribute,content,default)
+ done = done or ok
+ end
+ elseif id == rule_code then
+ check = current.width ~= 0
+ end
+
+ if check then
+ local a = current[attribute] or unsetvalue
+ if a ~= attrib then
+ local n = nsstep(a)
+ if n then
+ -- !!!! TEST CODE !!!!
+ -- head = insert_node_before(head,current,copy_node(nsdata[tonumber(n)])) -- a
+ head = insert_node_before(head,current,n) -- a
+ end
+ attrib, done, okay = a, true, true
+ if leader then
+ -- tricky as a leader has to be a list so we cannot inject before
+ local _, ok = stacker(namespace,attribute,leader,attrib)
+ done = done or ok
+ leader = false
+ end
+ end
+ check = false
+ end
+
+ previous = current
+ current = current.next
+ end
+ if okay then
+ local n = nsend()
+ if n then
+ -- !!!! TEST CODE !!!!
+ -- head = insert_node_after(head,previous,copy_node(nsdata[tostring(n)]))
+ head = insert_node_after(head,previous,n)
+ end
+ end
+ return head, done
+end
+
+states.stacker = stacker
+
-- -- --
statistics.register("attribute processing time", function()
diff --git a/Master/texmf-dist/tex/context/base/node-fnt.lua b/Master/texmf-dist/tex/context/base/node-fnt.lua
index 66cd9916fe5..54359117e81 100644
--- a/Master/texmf-dist/tex/context/base/node-fnt.lua
+++ b/Master/texmf-dist/tex/context/base/node-fnt.lua
@@ -3,7 +3,7 @@ if not modules then modules = { } end modules ['node-fnt'] = {
comment = "companion to font-ini.mkiv",
author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
+ license = "see context related readme files",
}
if not context then os.exit() end -- generic function in node-dum
@@ -24,7 +24,6 @@ local fontdata = fonthashes.identifiers
local otf = fonts.handlers.otf
local traverse_id = node.traverse_id
-local has_attribute = node.has_attribute
local starttiming = statistics.starttiming
local stoptiming = statistics.stoptiming
local nodecodes = nodes.nodecodes
@@ -41,7 +40,7 @@ local setmetatableindex = table.setmetatableindex
-- potential speedup: check for subtype < 256 so that we can remove that test
-- elsewhere, danger: injected nodes will not be dealt with but that does not
--- happen often; we could consider processing sublists but that might need mor
+-- happen often; we could consider processing sublists but that might need more
-- checking later on; the current approach also permits variants
local run = 0
@@ -85,9 +84,10 @@ function handlers.characters(head)
report_fonts()
local n = head
while n do
- if n.id == glyph_code then
+ local id = n.id
+ if id == glyph_code then
local font = n.font
- local attr = has_attribute(n,0) or 0
+ local attr = n[0] or 0
report_fonts("font %03i, dynamic %03i, glyph %s",font,attr,utf.char(n.char))
else
report_fonts("[%s]",nodecodes[n.id])
@@ -96,9 +96,9 @@ function handlers.characters(head)
end
end
for n in traverse_id(glyph_code,head) do
--- if n.subtype<256 then
+ -- if n.subtype<256 then -- all are 1
local font = n.font
- local attr = has_attribute(n,0) or 0 -- zero attribute is reserved for fonts in context
+ local attr = n[0] or 0 -- zero attribute is reserved for fonts in context
if font ~= prevfont or attr ~= prevattr then
if attr > 0 then
local used = attrfonts[font]
@@ -133,7 +133,7 @@ function handlers.characters(head)
prevfont = font
prevattr = attr
end
--- end
+ -- end
end
if trace_fontrun then
report_fonts()
@@ -141,8 +141,9 @@ function handlers.characters(head)
report_fonts("dynamics: %s",(a > 0 and concat(keys(attrfonts)," ")) or "none")
report_fonts()
end
- -- we could combine these and just make the attribute nil
- if u == 1 then
+ if u == 0 then
+ -- skip
+ elseif u == 1 then
local font, processors = next(usedfonts)
local n = #processors
if n > 0 then
@@ -157,7 +158,7 @@ function handlers.characters(head)
end
end
end
- elseif u > 0 then
+ else
for font, processors in next, usedfonts do
local n = #processors
local h, d = processors[1](head,font,0)
@@ -172,7 +173,9 @@ function handlers.characters(head)
end
end
end
- if a == 1 then
+ if a == 0 then
+ -- skip
+ elseif a == 1 then
local font, dynamics = next(attrfonts)
for attribute, processors in next, dynamics do -- attr can switch in between
local n = #processors
@@ -191,7 +194,7 @@ function handlers.characters(head)
end
end
end
- elseif a > 0 then
+ else
for font, dynamics in next, attrfonts do
for attribute, processors in next, dynamics do -- attr can switch in between
local n = #processors
diff --git a/Master/texmf-dist/tex/context/base/node-ini.lua b/Master/texmf-dist/tex/context/base/node-ini.lua
index 0ed17a1b5ef..5a3986c3a4b 100644
--- a/Master/texmf-dist/tex/context/base/node-ini.lua
+++ b/Master/texmf-dist/tex/context/base/node-ini.lua
@@ -13,7 +13,6 @@ modules.</p>
-- this module is being reconstructed
-local utf = unicode.utf8
local next, type = next, type
local format, match, gsub = string.format, string.match, string.gsub
local concat, remove = table.concat, table.remove
@@ -73,25 +72,29 @@ nodes.handlers = nodes.handlers or { }
-- there will be more of this:
local skipcodes = allocate {
- [ 0] = "userskip",
- [ 1] = "lineskip",
- [ 2] = "baselineskip",
- [ 3] = "parskip",
- [ 4] = "abovedisplayskip",
- [ 5] = "belowdisplayskip",
- [ 6] = "abovedisplayshortskip",
- [ 7] = "belowdisplayshortskip",
- [ 8] = "leftskip",
- [ 9] = "rightskip",
- [10] = "topskip",
- [11] = "splittopskip",
- [12] = "tabskip",
- [13] = "spaceskip",
- [14] = "xspaceskip",
- [15] = "parfillskip",
- [16] = "thinmuskip",
- [17] = "medmuskip",
- [18] = "thickmuskip",
+ [ 0] = "userskip",
+ [ 1] = "lineskip",
+ [ 2] = "baselineskip",
+ [ 3] = "parskip",
+ [ 4] = "abovedisplayskip",
+ [ 5] = "belowdisplayskip",
+ [ 6] = "abovedisplayshortskip",
+ [ 7] = "belowdisplayshortskip",
+ [ 8] = "leftskip",
+ [ 9] = "rightskip",
+ [ 10] = "topskip",
+ [ 11] = "splittopskip",
+ [ 12] = "tabskip",
+ [ 13] = "spaceskip",
+ [ 14] = "xspaceskip",
+ [ 15] = "parfillskip",
+ [ 16] = "thinmuskip",
+ [ 17] = "medmuskip",
+ [ 18] = "thickmuskip",
+ [100] = "leaders",
+ [101] = "cleaders",
+ [102] = "xleaders",
+ [103] = "gleaders",
}
local penaltycodes = allocate { -- unfortunately not used
@@ -153,6 +156,20 @@ local fillcodes = allocate {
[4] = "filll",
}
+local margincodes = allocate {
+ [0] = "left",
+ [1] = "right",
+}
+
+local disccodes = allocate {
+ [0] = "discretionary", -- \discretionary
+ [1] = "explicit", -- \-
+ [2] = "automatic", -- following a -
+ [3] = "regular", -- simple
+ [4] = "first", -- hard first item
+ [5] = "second", -- hard second item
+}
+
local function simplified(t)
local r = { }
for k, v in next, t do
@@ -164,27 +181,31 @@ end
local nodecodes = simplified(node.types())
local whatcodes = simplified(node.whatsits())
-skipcodes = allocate(swapped(skipcodes, skipcodes ))
-noadcodes = allocate(swapped(noadcodes, noadcodes ))
-nodecodes = allocate(swapped(nodecodes, nodecodes ))
-whatcodes = allocate(swapped(whatcodes, whatcodes ))
-listcodes = allocate(swapped(listcodes, listcodes ))
-glyphcodes = allocate(swapped(glyphcodes, glyphcodes))
-kerncodes = allocate(swapped(kerncodes, kerncodes ))
-penaltycodes = allocate(swapped(penaltycodes, penaltycodes ))
-mathcodes = allocate(swapped(mathcodes, mathcodes ))
-fillcodes = allocate(swapped(fillcodes, fillcodes ))
-
-nodes.skipcodes = skipcodes nodes.gluecodes = skipcodes -- more official
+skipcodes = allocate(swapped(skipcodes,skipcodes))
+noadcodes = allocate(swapped(noadcodes,noadcodes))
+nodecodes = allocate(swapped(nodecodes,nodecodes))
+whatcodes = allocate(swapped(whatcodes,whatcodes))
+listcodes = allocate(swapped(listcodes,listcodes))
+glyphcodes = allocate(swapped(glyphcodes,glyphcodes))
+kerncodes = allocate(swapped(kerncodes,kerncodes))
+penaltycodes = allocate(swapped(penaltycodes,penaltycodes))
+mathcodes = allocate(swapped(mathcodes,mathcodes))
+fillcodes = allocate(swapped(fillcodes,fillcodes))
+margincodes = allocate(swapped(margincodes,margincodes))
+disccodes = allocate(swapped(disccodes,disccodes))
+
+nodes.skipcodes = skipcodes nodes.gluecodes = skipcodes -- more official
nodes.noadcodes = noadcodes
nodes.nodecodes = nodecodes
-nodes.whatcodes = whatcodes nodes.whatsitcodes = whatcodes -- more official
+nodes.whatcodes = whatcodes nodes.whatsitcodes = whatcodes -- more official
nodes.listcodes = listcodes
nodes.glyphcodes = glyphcodes
nodes.kerncodes = kerncodes
nodes.penaltycodes = kerncodes
nodes.mathcodes = mathcodes
nodes.fillcodes = fillcodes
+nodes.margincodes = margincodes
+nodes.disccodes = disccodes nodes.discretionarycodes = disccodes
listcodes.row = listcodes.alignment
listcodes.column = listcodes.alignment
@@ -192,17 +213,24 @@ listcodes.column = listcodes.alignment
kerncodes.italiccorrection = kerncodes.userkern
kerncodes.kerning = kerncodes.fontkern
-nodes.codes = allocate {
+nodes.codes = allocate { -- mostly for listing
+ glue = skipcodes,
+ noad = noadcodes,
+ node = nodecodes,
hlist = listcodes,
vlist = listcodes,
glyph = glyphcodes,
- glue = skipcodes,
kern = kerncodes,
- whatsit = whatcodes,
+ penalty = penaltycodes,
math = mathnodes,
- noad = noadcodes,
+ fill = fillcodes,
+ margin = margincodes,
+ disc = disccodes,
+ whatsit = whatcodes,
}
+local report_codes = logs.reporter("nodes","codes")
+
function nodes.showcodes()
local t = { }
for name, codes in sortedhash(nodes.codes) do
@@ -216,7 +244,7 @@ function nodes.showcodes()
end
formatcolumns(t)
for k=1,#t do
- texio.write_nl(t[k])
+ report_codes (t[k])
end
end
@@ -253,19 +281,18 @@ local hlist_code = nodecodes.hlist
local vlist_code = nodecodes.vlist
local glue_code = nodecodes.glue
---~ if t.id == glue_code then
---~ local s = t.spec
---~ print(t)
---~ print(s,s and s.writable)
---~ if s and s.writable then
---~ free_node(s)
---~ end
---~ t.spec = nil
---~ end
+-- if t.id == glue_code then
+-- local s = t.spec
+-- print(t)
+-- print(s,s and s.writable)
+-- if s and s.writable then
+-- free_node(s)
+-- end
+-- t.spec = nil
+-- end
local function remove(head, current, free_too)
local t = current
---~ print(t)
head, current = remove_node(head,current)
if t then
if free_too then
diff --git a/Master/texmf-dist/tex/context/base/node-ini.mkiv b/Master/texmf-dist/tex/context/base/node-ini.mkiv
index 79e02ff46e4..39d48a00a96 100644
--- a/Master/texmf-dist/tex/context/base/node-ini.mkiv
+++ b/Master/texmf-dist/tex/context/base/node-ini.mkiv
@@ -23,14 +23,14 @@
\registerctxluafile{node-aux}{1.001}
\registerctxluafile{node-tst}{1.001}
\registerctxluafile{node-tra}{1.001} % we might split it off (module)
+\registerctxluafile{node-snp}{1.001}
\registerctxluafile{node-tsk}{1.001}
\registerctxluafile{node-tex}{1.001}
\registerctxluafile{node-pro}{1.001}
-%registerctxluafile{node-shp}{1.001} % moved to node-fin.mkiv
\registerctxluafile{node-ser}{1.001}
\registerctxluafile{node-ext}{1.001}
-%registerctxluafile{node-inj}{1.001} % we might split it off
\registerctxluafile{node-acc}{1.001} % experimental
+%registerctxluafile{node-prp}{1.001} % makes no sense (yet)
\newcount\c_node_tracers_show_box % box number
diff --git a/Master/texmf-dist/tex/context/base/node-inj.lua b/Master/texmf-dist/tex/context/base/node-inj.lua
index 246aaade2c0..3e1687426f4 100644
--- a/Master/texmf-dist/tex/context/base/node-inj.lua
+++ b/Master/texmf-dist/tex/context/base/node-inj.lua
@@ -3,15 +3,16 @@ if not modules then modules = { } end modules ['node-inj'] = {
comment = "companion to node-ini.mkiv",
author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
+ license = "see context related readme files",
}
-- This is very experimental (this will change when we have luatex > .50 and
-- a few pending thingies are available. Also, Idris needs to make a few more
-- test fonts. Btw, future versions of luatex will have extended glyph properties
--- that can be of help.
+-- that can be of help. Some optimizations can go away when we have faster machines.
local next = next
+local utfchar = utf.char
local trace_injections = false trackers.register("nodes.injections", function(v) trace_injections = v end)
@@ -31,42 +32,27 @@ local nodepool = nodes.pool
local newkern = nodepool.kern
local traverse_id = node.traverse_id
-local unset_attribute = node.unset_attribute
-local has_attribute = node.has_attribute
-local set_attribute = node.set_attribute
-local copy_node = node.copy
local insert_node_before = node.insert_before
local insert_node_after = node.insert_after
-local markbase = attributes.private('markbase')
-local markmark = attributes.private('markmark')
-local markdone = attributes.private('markdone')
-local cursbase = attributes.private('cursbase')
-local curscurs = attributes.private('curscurs')
-local cursdone = attributes.private('cursdone')
-local kernpair = attributes.private('kernpair')
-local ligacomp = attributes.private('ligacomp')
-local fontkern = attributes.private('fontkern')
-
-if context then
-
- local kern = nodes.pool.register(newkern())
-
- set_attribute(kern,fontkern,1) -- we can have several, attributes are shared
-
- newkern = function(k)
- local c = copy_node(kern)
- c.kern = k
- return c
- end
-
-end
+local a_kernpair = attributes.private('kernpair')
+local a_ligacomp = attributes.private('ligacomp')
+local a_markbase = attributes.private('markbase')
+local a_markmark = attributes.private('markmark')
+local a_markdone = attributes.private('markdone')
+local a_cursbase = attributes.private('cursbase')
+local a_curscurs = attributes.private('curscurs')
+local a_cursdone = attributes.private('cursdone')
-- This injector has been tested by Idris Samawi Hamid (several arabic fonts as well as
-- the rather demanding Husayni font), Khaled Hosny (latin and arabic) and Kaj Eigner
-- (arabic, hebrew and thai) and myself (whatever font I come across). I'm pretty sure
-- that this code is not 100% okay but examples are needed to figure things out.
+function injections.installnewkern(nk)
+ newkern = nk or newkern
+end
+
local cursives = { }
local marks = { }
local kerns = { }
@@ -82,8 +68,8 @@ function injections.setcursive(start,nxt,factor,rlmode,exit,entry,tfmstart,tfmne
local dx, dy = factor*(exit[1]-entry[1]), factor*(exit[2]-entry[2])
local ws, wn = tfmstart.width, tfmnext.width
local bound = #cursives + 1
- set_attribute(start,cursbase,bound)
- set_attribute(nxt,curscurs,bound)
+ start[a_cursbase] = bound
+ nxt[a_curscurs] = bound
cursives[bound] = { rlmode, dx, dy, ws, wn }
return dx, dy, bound
end
@@ -92,14 +78,14 @@ function injections.setpair(current,factor,rlmode,r2lflag,spec,tfmchr)
local x, y, w, h = factor*spec[1], factor*spec[2], factor*spec[3], factor*spec[4]
-- dy = y - h
if x ~= 0 or w ~= 0 or y ~= 0 or h ~= 0 then
- local bound = has_attribute(current,kernpair)
+ local bound = current[a_kernpair]
if bound then
local kb = kerns[bound]
-- inefficient but singles have less, but weird anyway, needs checking
kb[2], kb[3], kb[4], kb[5] = (kb[2] or 0) + x, (kb[3] or 0) + y, (kb[4] or 0)+ w, (kb[5] or 0) + h
else
bound = #kerns + 1
- set_attribute(current,kernpair,bound)
+ current[a_kernpair] = bound
kerns[bound] = { rlmode, x, y, w, h, r2lflag, tfmchr.width }
end
return x, y, w, h, bound
@@ -111,7 +97,7 @@ function injections.setkern(current,factor,rlmode,x,tfmchr)
local dx = factor*x
if dx ~= 0 then
local bound = #kerns + 1
- set_attribute(current,kernpair,bound)
+ current[a_kernpair] = bound
kerns[bound] = { rlmode, dx }
return dx, bound
else
@@ -121,27 +107,27 @@ end
function injections.setmark(start,base,factor,rlmode,ba,ma,index) -- ba=baseanchor, ma=markanchor
local dx, dy = factor*(ba[1]-ma[1]), factor*(ba[2]-ma[2]) -- the index argument is no longer used but when this
- local bound = has_attribute(base,markbase) -- fails again we should pass it
-local index = 1
+ local bound = base[a_markbase] -- fails again we should pass it
+ local index = 1
if bound then
local mb = marks[bound]
if mb then
-- if not index then index = #mb + 1 end
-index = #mb + 1
+ index = #mb + 1
mb[index] = { dx, dy, rlmode }
- set_attribute(start,markmark,bound)
- set_attribute(start,markdone,index)
+ start[a_markmark] = bound
+ start[a_markdone] = index
return dx, dy, bound
else
- report_injections("possible problem, U+%05X is base mark without data (id: %s)",base.char,bound)
+ report_injections("possible problem, %U is base mark without data (id %a)",base.char,bound)
end
end
-- index = index or 1
index = index or 1
bound = #marks + 1
- set_attribute(base,markbase,bound)
- set_attribute(start,markmark,bound)
- set_attribute(start,markdone,index)
+ base[a_markbase] = bound
+ start[a_markmark] = bound
+ start[a_markdone] = index
marks[bound] = { [index] = { dx, dy, rlmode } }
return dx, dy, bound
end
@@ -154,44 +140,45 @@ local function trace(head)
report_injections("begin run")
for n in traverse_id(glyph_code,head) do
if n.subtype < 256 then
- local kp = has_attribute(n,kernpair)
- local mb = has_attribute(n,markbase)
- local mm = has_attribute(n,markmark)
- local md = has_attribute(n,markdone)
- local cb = has_attribute(n,cursbase)
- local cc = has_attribute(n,curscurs)
- report_injections("char U+%05X, font=%s",n.char,n.font)
+ local kp = n[a_kernpair]
+ local mb = n[a_markbase]
+ local mm = n[a_markmark]
+ local md = n[a_markdone]
+ local cb = n[a_cursbase]
+ local cc = n[a_curscurs]
+ local char = n.char
+ report_injections("font %s, char %U, glyph %c",char,n.font,char)
if kp then
local k = kerns[kp]
if k[3] then
- report_injections(" pairkern: dir=%s, x=%s, y=%s, w=%s, h=%s",dir(k[1]),k[2] or "?",k[3] or "?",k[4] or "?",k[5] or "?")
+ report_injections(" pairkern: dir %a, x %p, y %p, w %p, h %p",dir(k[1]),k[2],k[3],k[4],k[5])
else
- report_injections(" kern: dir=%s, dx=%s",dir(k[1]),k[2] or "?")
+ report_injections(" kern: dir %a, dx %p",dir(k[1]),k[2])
end
end
if mb then
- report_injections(" markbase: bound=%s",mb)
+ report_injections(" markbase: bound %a",mb)
end
if mm then
local m = marks[mm]
if mb then
local m = m[mb]
if m then
- report_injections(" markmark: bound=%s, index=%s, dx=%s, dy=%s",mm,md or "?",m[1] or "?",m[2] or "?")
+ report_injections(" markmark: bound %a, index %a, dx %p, dy %p",mm,md,m[1],m[2])
else
- report_injections(" markmark: bound=%s, missing index",mm)
+ report_injections(" markmark: bound %a, missing index",mm)
end
else
m = m[1]
- report_injections(" markmark: bound=%s, dx=%s, dy=%s",mm,m and m[1] or "?",m and m[2] or "?")
+ report_injections(" markmark: bound %a, dx %p, dy %p",mm,m and m[1],m and m[2])
end
end
if cb then
- report_injections(" cursbase: bound=%s",cb)
+ report_injections(" cursbase: bound %a",cb)
end
if cc then
local c = cursives[cc]
- report_injections(" curscurs: bound=%s, dir=%s, dx=%s, dy=%s",cc,dir(c[1]),c[2] or "?",c[3] or "?")
+ report_injections(" curscurs: bound %a, dir %a, dx %p, dy %p",cc,dir(c[1]),c[2],c[3])
end
end
end
@@ -225,7 +212,7 @@ function injections.handler(head,where,keep)
if tm then
mk[n] = tm[n.char]
end
- local k = has_attribute(n,kernpair)
+ local k = n[a_kernpair]
if k then
local kk = kerns[k]
if kk then
@@ -274,9 +261,9 @@ function injections.handler(head,where,keep)
for i=1,nofvalid do -- valid == glyphs
local n = valid[i]
if not mk[n] then
- local n_cursbase = has_attribute(n,cursbase)
+ local n_cursbase = n[a_cursbase]
if p_cursbase then
- local n_curscurs = has_attribute(n,curscurs)
+ local n_curscurs = n[a_curscurs]
if p_cursbase == n_curscurs then
local c = cursives[n_curscurs]
if c then
@@ -337,14 +324,14 @@ function injections.handler(head,where,keep)
if has_marks then
for i=1,nofvalid do
local p = valid[i]
- local p_markbase = has_attribute(p,markbase)
+ local p_markbase = p[a_markbase]
if p_markbase then
local mrks = marks[p_markbase]
local nofmarks = #mrks
for n in traverse_id(glyph_code,p.next) do
- local n_markmark = has_attribute(n,markmark)
+ local n_markmark = n[a_markmark]
if p_markbase == n_markmark then
- local index = has_attribute(n,markdone) or 1
+ local index = n[a_markdone] or 1
local d = mrks[index]
if d then
local rlmode = d[3]
@@ -352,16 +339,13 @@ function injections.handler(head,where,keep)
-- new per 2010-10-06, width adapted per 2010-02-03
-- we used to negate the width of marks because in tfm
-- that makes sense but we no longer do that so as a
- -- consequence the sign of p.width was changed (we need
- -- to keep an eye on it as we don't have that many fonts
- -- that enter this branch .. I'm still not sure if this
- -- one is right
+ -- consequence the sign of p.width was changed
local k = wx[p]
if k then
- n.xoffset = p.xoffset + p.width + d[1] - k[2]
+ -- brill roman: A\char"0300 (but ugly anyway)
+ n.xoffset = p.xoffset - p.width + d[1] - k[2] -- was + p.width
else
- -- n.xoffset = p.xoffset + p.width + d[1]
- -- lucida U\char"032F (default+mark)
+ -- lucida: U\char"032F (default+mark)
n.xoffset = p.xoffset - p.width + d[1] -- 01-05-2011
end
else
@@ -450,7 +434,7 @@ function injections.handler(head,where,keep)
end
for n in traverse_id(glyph_code,head) do
if n.subtype < 256 then
- local k = has_attribute(n,kernpair)
+ local k = n[a_kernpair]
if k then
local kk = kerns[k]
if kk then
diff --git a/Master/texmf-dist/tex/context/base/node-mig.lua b/Master/texmf-dist/tex/context/base/node-mig.lua
index 1384f6024d0..9fc35a04820 100644
--- a/Master/texmf-dist/tex/context/base/node-mig.lua
+++ b/Master/texmf-dist/tex/context/base/node-mig.lua
@@ -10,8 +10,6 @@ local format = string.format
local attributes, nodes, node = attributes, nodes, node
-local has_attribute = node.has_attribute
-local set_attribute = node.set_attribute
local remove_nodes = nodes.remove
local nodecodes = nodes.nodecodes
@@ -22,7 +20,7 @@ local vlist_code = nodecodes.vlist
local insert_code = nodecodes.ins
local mark_code = nodecodes.mark
-local migrated = attributes.private("migrated")
+local a_migrated = attributes.private("migrated")
local trace_migrations = false trackers.register("nodes.migrations", function(v) trace_migrations = v end)
@@ -70,15 +68,15 @@ function nodes.handlers.migrate(head,where)
local done = false
if head then
if trace_migrations then
- report_nodes("migration sweep '%s'",where)
+ report_nodes("migration sweep %a",where)
end
local current = head
while current do
local id = current.id
-- inserts_too is a temp hack, we should only do them when it concerns
-- newly placed (flushed) inserts
- if id == vlist_code or id == hlist_code or (inserts_too and id == insert_code) and not has_attribute(current,migrated) then
- set_attribute(current,migrated,1)
+ if id == vlist_code or id == hlist_code or (inserts_too and id == insert_code) and not current[a_migrated] then
+ current[a_migrated] = 1
t_sweeps = t_sweeps + 1
local h = current.list
local first, last, ni, nm
@@ -92,7 +90,7 @@ function nodes.handlers.migrate(head,where)
if first then
t_inserts, t_marks = t_inserts + ni, t_marks + nm
if trace_migrations and (ni > 0 or nm > 0) then
- report_nodes("sweep %s, container %s, %s inserts and %s marks migrated outwards during '%s'",
+ report_nodes("sweep %a, container %a, %s inserts and %s marks migrated outwards during %a",
t_sweeps,nodecodes[id],ni,nm,where)
end
-- inserts after head
diff --git a/Master/texmf-dist/tex/context/base/node-par.lua b/Master/texmf-dist/tex/context/base/node-par.lua
deleted file mode 100644
index 8eafd1f6e52..00000000000
--- a/Master/texmf-dist/tex/context/base/node-par.lua
+++ /dev/null
@@ -1,118 +0,0 @@
-if not modules then modules = { } end modules ['node-par'] = {
- version = 1.001,
- comment = "companion to node-par.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
-local builders, nodes, node = builders, nodes, node
-
-builders.paragraphs = builders.paragraphs or { }
-local parbuilders = builders.paragraphs
-
-parbuilders.constructors = parbuilders.constructors or { }
-local constructors = parbuilders.constructors
-
-constructors.names = constructors.names or { }
-local names = constructors.names
-
-constructors.numbers = constructors.numbers or { }
-local numbers = constructors.numbers
-
-constructors.methods = constructors.methods or { }
-local methods = constructors.methods
-
-local p_attribute = attributes.numbers['parbuilder'] or 999
-constructors.attribute = p_attribute
-
-local has_attribute = node.has_attribute
-local starttiming = statistics.starttiming
-local stoptiming = statistics.stoptiming
-
-storage.register("builders/paragraphs/constructors/names", names, "builders.paragraphs.constructors.names")
-storage.register("builders/paragraphs/constructors/numbers", numbers, "builders.paragraphs.constructors.numbers")
-
-local report_parbuilders = logs.reporter("parbuilders")
-
-local mainconstructor = nil -- not stored in format
-
-function constructors.register(name,number)
- names[number] = name
- numbers[name] = number
-end
-
-function constructors.set(name)
- mainconstructor = numbers[name]
-end
-
--- return values:
---
--- true : tex will break itself
--- false : idem but dangerous
--- head : list of valid vmode nodes with last being hlist
-
-function constructors.handler(head,followed_by_display)
- if type(head) == "boolean" then
- return head
- else
- local attribute = has_attribute(head,p_attribute) or mainconstructor
- if attribute then
- local method = names[attribute]
- if method then
- local handler = methods[method]
- if handler then
- return handler(head,followed_by_display)
- else
- report_parbuilders("contructor method '%s' is not defined",tostring(method))
- return true -- let tex break
- end
- end
- end
- return true -- let tex break
- end
-end
-
--- just for testing
-
-function constructors.methods.default(head,followed_by_display)
- return true -- let tex break
-end
-
--- also for testing (no surrounding spacing done)
-
-function constructors.methods.oneline(head,followed_by_display)
- return node.hpack(head)
-end
-
--- It makes no sense to have a sequence here as we already have
--- pre and post hooks and only one parbuilder makes sense, so no:
---
--- local actions = nodes.tasks.actions("parbuilders")
---
--- yet (maybe some day).
---
--- todo: enable one as main
-
-local actions = constructors.handler
-local enabled = false
-
-function constructors.enable () enabled = true end
-function constructors.disable() enabled = false end
-
-local function processor(head,followed_by_display)
- if enabled then
- starttiming(parbuilders)
- local head = actions(head,followed_by_display)
- stoptiming(parbuilders)
- return head
- else
- return true -- let tex do the work
- end
-end
-
-callbacks.register('linebreak_filter', processor, "breaking paragraps into lines")
-
-statistics.register("linebreak processing time", function()
- return statistics.elapsedseconds(parbuilders)
-end)
diff --git a/Master/texmf-dist/tex/context/base/node-par.mkiv b/Master/texmf-dist/tex/context/base/node-par.mkiv
deleted file mode 100644
index 685167e8a90..00000000000
--- a/Master/texmf-dist/tex/context/base/node-par.mkiv
+++ /dev/null
@@ -1,82 +0,0 @@
-%D \module
-%D [ file=node-par,
-%D version=2008.09.30,
-%D title=\CONTEXT\ Node Macros,
-%D subtitle=Paragraph Building,
-%D author=Hans Hagen,
-%D date=\currentdate,
-%D copyright={PRAGMA ADE \& \CONTEXT\ Development Team}]
-%C
-%C This module is part of the \CONTEXT\ macro||package and is
-%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
-%C details.
-
-\writestatus{loading}{ConTeXt Node Macros / Paragraph Building}
-
-%D This is very experimental, undocumented, subjected to changes, etc. just as
-%D the underlying interfaces.
-
-% \enableparbuilders
-%
-% \startparbuilder[default]
-% \input tufte \par
-% \startparbuilder[oneline]
-% \input tufte \par
-% \stopparbuilder
-% \input tufte \par
-% \stopparbuilder
-
-\unprotect
-
-\registerctxluafile{node-par}{1.001}
-
-\definesystemattribute[parbuilder][public]
-
-% todo auto-enable
-
-% management (enable/disable) is global and will move to lua
-
-\installcorenamespace {parbuilder}
-
-\newcount\c_node_paragraphs_n_of_builders
-
-\let\m_node_paragraphs_current_builder\empty
-
-\unexpanded\def\defineparbuilder[#1]%
- {\global\advance\c_node_paragraphs_n_of_builders\plusone
- \ctxlua{builders.paragraphs.constructors.register("#1",\number\c_node_paragraphs_n_of_builders)}%
- \setxvalue{\??parbuilder#1}{\attribute\parbuilderattribute\number\c_node_paragraphs_n_of_builders}}
-
-\unexpanded\def\startparbuilder[#1]%
- {\edef\m_node_paragraphs_current_builder{\number\attribute\parbuilderattribute}%
- \globalpushmacro\m_node_paragraphs_current_builder
- \getvalue{\??parbuilder#1}\relax
- \node_paragraphs_builders_check}
-
-\unexpanded\def\stopparbuilder
- {\ifhmode\par\fi
- \globalpopmacro\m_node_paragraphs_current_builder
- \attribute\parbuilderattribute\m_node_paragraphs_current_builder\relax
- \node_paragraphs_builders_check}
-
-\unexpanded\def\setmainparbuilder[#1]%
- {\ctxlua{builders.paragraphs.constructors.set("#1")}}
-
-% no high level interface, after all implementing a linebreaker is not something that
-% the average user will do
-
-\defineparbuilder[default] % just for testing
-\defineparbuilder[oneline] % just for testing
-\defineparbuilder[basic] % just for testing
-
-\def\enableparbuilders {\ctxlua{builders.paragraphs.constructors.enable ()}}
-\def\disableparbuilders{\ctxlua{builders.paragraphs.constructors.disable()}}
-
-\def\node_paragraphs_builders_check % can be made more efficient as we don't want to do this too often
- {\ifcase\attribute\parbuilderattribute
- \disableparbuilders
- \else
- \enableparbuilders
- \fi}
-
-\protect \endinput
diff --git a/Master/texmf-dist/tex/context/base/node-pro.lua b/Master/texmf-dist/tex/context/base/node-pro.lua
index 73d22697e05..ab5b77f908c 100644
--- a/Master/texmf-dist/tex/context/base/node-pro.lua
+++ b/Master/texmf-dist/tex/context/base/node-pro.lua
@@ -6,7 +6,6 @@ if not modules then modules = { } end modules ['node-pro'] = {
license = "see context related readme files"
}
-local utf = unicode.utf8
local utfchar = utf.char
local format, concat = string.format, table.concat
@@ -57,9 +56,9 @@ local function tracer(what,state,head,groupcode,before,after,show)
end
n = n + 1
if show then
- report_nodes("%s %s: %s, group: %s, nodes: %s -> %s, string: %s",what,n,state,groupcode,before,after,reconstruct(head))
+ report_nodes("%s: location %a, state %a, group %a, # before %a, # after %s, stream: %s",what,n,state,groupcode,before,after,reconstruct(head))
else
- report_nodes("%s %s: %s, group: %s, nodes: %s -> %s",what,n,state,groupcode,before,after)
+ report_nodes("%s: location %a, state %a, group %a, # before %a, # after %s",what,n,state,groupcode,before,after)
end
end
@@ -68,21 +67,21 @@ processors.tracer = tracer
processors.enabled = true -- this will become a proper state (like trackers)
function processors.pre_linebreak_filter(head,groupcode,size,packtype,direction)
- local first, found = first_glyph(head)
+ local first, found = first_glyph(head) -- they really need to be glyphs
if found then
if trace_callbacks then
local before = nodes.count(head,true)
- local head, done = actions(head,groupcode,size,packtype,direction)
+ local head, done = actions(head,groupcode,size,packtype,direction) -- todo : pass first
local after = nodes.count(head,true)
if done then
tracer("pre_linebreak","changed",head,groupcode,before,after,true)
else
tracer("pre_linebreak","unchanged",head,groupcode,before,after,true)
end
- return (done and head) or true
+ return done and head or true
else
- local head, done = actions(head,groupcode,size,packtype,direction)
- return (done and head) or true
+ local head, done = actions(head,groupcode,size,packtype,direction) -- todo : pass first
+ return done and head or true
end
elseif trace_callbacks then
local n = nodes.count(head,false)
@@ -95,7 +94,7 @@ local enabled = true
function processors.hpack_filter(head,groupcode,size,packtype,direction)
if enabled then
- local first, found = first_glyph(head)
+ local first, found = first_glyph(head) -- they really need to be glyphs
if found then
if trace_callbacks then
local before = nodes.count(head,true)
@@ -128,8 +127,8 @@ function nodes.fasthpack(...) -- todo: pass explicit arguments
return hp, b
end
-callbacks.register('pre_linebreak_filter', processors.pre_linebreak_filter,"all kind of horizontal manipulations (before par break)")
-callbacks.register('hpack_filter' , processors.hpack_filter,"all kind of horizontal manipulations")
+callbacks.register('pre_linebreak_filter', processors.pre_linebreak_filter, "all kind of horizontal manipulations (before par break)")
+callbacks.register('hpack_filter' , processors.hpack_filter, "all kind of horizontal manipulations (before hbox creation)")
local actions = tasks.actions("finalizers") -- head, where
@@ -141,27 +140,20 @@ local actions = tasks.actions("finalizers") -- head, where
-- something weird here .. group mvl when making a vbox
function processors.post_linebreak_filter(head,groupcode)
---~ local first, found = first_glyph(head)
---~ if found then
- if trace_callbacks then
- local before = nodes.count(head,true)
- local head, done = actions(head,groupcode)
- local after = nodes.count(head,true)
- if done then
- tracer("post_linebreak","changed",head,groupcode,before,after,true)
- else
- tracer("post_linebreak","unchanged",head,groupcode,before,after,true)
- end
- return (done and head) or true
+ if trace_callbacks then
+ local before = nodes.count(head,true)
+ local head, done = actions(head,groupcode)
+ local after = nodes.count(head,true)
+ if done then
+ tracer("post_linebreak","changed",head,groupcode,before,after,true)
else
- local head, done = actions(head,groupcode)
- return (done and head) or true
+ tracer("post_linebreak","unchanged",head,groupcode,before,after,true)
end
---~ elseif trace_callbacks then
---~ local n = nodes.count(head,false)
---~ tracer("post_linebreak","no chars",head,groupcode,n,n)
---~ end
---~ return true
+ return (done and head) or true
+ else
+ local head, done = actions(head,groupcode)
+ return (done and head) or true
+ end
end
callbacks.register('post_linebreak_filter', processors.post_linebreak_filter,"all kind of horizontal manipulations (after par break)")
diff --git a/Master/texmf-dist/tex/context/base/node-ref.lua b/Master/texmf-dist/tex/context/base/node-ref.lua
index dc14102827f..09e066434a6 100644
--- a/Master/texmf-dist/tex/context/base/node-ref.lua
+++ b/Master/texmf-dist/tex/context/base/node-ref.lua
@@ -1,6 +1,6 @@
-if not modules then modules = { } end modules ['node-bck'] = {
+if not modules then modules = { } end modules ['node-ref'] = {
version = 1.001,
- comment = "companion to node-bck.mkiv",
+ comment = "companion to node-ref.mkiv",
author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
copyright = "PRAGMA ADE / ConTeXt Development Team",
license = "see context related readme files"
@@ -68,29 +68,28 @@ local nodepool = nodes.pool
local new_kern = nodepool.kern
-local has_attribute = node.has_attribute
local traverse = node.traverse
local find_node_tail = node.tail or node.slide
local tosequence = nodes.tosequence
-local function dimensions(parent,start,stop)
- stop = stop and stop.next
- if parent then
- if stop then
- return list_dimensions(parent.glue_set,parent.glue_sign,parent.glue_order,start,stop)
- else
- return list_dimensions(parent.glue_set,parent.glue_sign,parent.glue_order,start)
- end
- else
- if stop then
- return list_dimensions(start,stop)
- else
- return list_dimensions(start)
- end
- end
-end
-
---~ more compact
+-- local function dimensions(parent,start,stop)
+-- stop = stop and stop.next
+-- if parent then
+-- if stop then
+-- return list_dimensions(parent.glue_set,parent.glue_sign,parent.glue_order,start,stop)
+-- else
+-- return list_dimensions(parent.glue_set,parent.glue_sign,parent.glue_order,start)
+-- end
+-- else
+-- if stop then
+-- return list_dimensions(start,stop)
+-- else
+-- return list_dimensions(start)
+-- end
+-- end
+-- end
+--
+-- -- more compact
local function dimensions(parent,start,stop)
if parent then
@@ -111,14 +110,14 @@ local function inject_range(head,first,last,reference,make,stack,parent,pardir,t
if result and resolved then
if head == first then
if trace_backend then
- report_area("head: %04i %s %s %s => w=%s, h=%s, d=%s, c=%s",reference,pardir or "---",txtdir or "----",tosequence(first,last,true),width,height,depth,resolved)
+ report_area("head: %04i %s %s %s => w=%p, h=%p, d=%p, c=%s",reference,pardir or "---",txtdir or "----",tosequence(first,last,true),width,height,depth,resolved)
end
result.next = first
first.prev = result
return result, last
else
if trace_backend then
- report_area("middle: %04i %s %s => w=%s, h=%s, d=%s, c=%s",reference,pardir or "---",txtdir or "----",tosequence(first,last,true),width,height,depth,resolved)
+ report_area("middle: %04i %s %s => w=%p, h=%p, d=%p, c=%s",reference,pardir or "---",txtdir or "----",tosequence(first,last,true),width,height,depth,resolved)
end
local prev = first.prev
if prev then
@@ -183,7 +182,7 @@ local function inject_list(id,current,reference,make,stack,pardir,txtdir)
-- todo: only when width is ok
if result and resolved then
if trace_backend then
- report_area("box: %04i %s %s: w=%s, h=%s, d=%s, c=%s",reference,pardir or "---",txtdir or "----",width,height,depth,resolved)
+ report_area("box: %04i %s %s: w=%p, h=%p, d=%p, c=%s",reference,pardir or "---",txtdir or "----",width,height,depth,resolved)
end
if not first then
current.list = result
@@ -212,21 +211,12 @@ local function inject_areas(head,attribute,make,stack,done,skip,parent,pardir,tx
txtdir = txtdir or "==="
while current do
local id = current.id
- local r = has_attribute(current,attribute)
- if id == whatsit_code then
- local subtype = current.subtype
- if subtype == localpar_code then
- pardir = current.dir
- elseif subtype == dir_code then
- txtdir = current.dir
- end
- elseif id == glue_code and current.subtype == leftskip_code then -- any glue at the left?
- --
- elseif id == hlist_code or id == vlist_code then
--- somehow reference is true so the following fails (second one not done) in
--- test \goto{test}[page(2)] test \gotobox{test}[page(2)]
--- so let's wait till this fails again
--- if not reference and r and (not skip or r > skip) then -- > or ~=
+ if id == hlist_code or id == vlist_code then
+ local r = current[attribute]
+ -- somehow reference is true so the following fails (second one not done) in
+ -- test \goto{test}[page(2)] test \gotobox{test}[page(2)]
+ -- so let's wait till this fails again
+ -- if not reference and r and (not skip or r > skip) then -- > or ~=
if r and (not skip or r > skip) then -- > or ~=
inject_list(id,current,r,make,stack,pardir,txtdir)
end
@@ -241,19 +231,31 @@ local function inject_areas(head,attribute,make,stack,done,skip,parent,pardir,tx
if r then
done[r] = done[r] - 1
end
- elseif not r then
- -- just go on, can be kerns
- elseif not reference then
- reference, first, last, firstdir = r, current, current, txtdir
- elseif r == reference then
- last = current
- elseif (done[reference] or 0) == 0 then -- or id == glue_code and current.subtype == right_skip_code
- if not skip or r > skip then -- maybe no > test
- head, current = inject_range(head,first,last,reference,make,stack,parent,pardir,firstdir)
- reference, first, last, firstdir = nil, nil, nil, nil
+ elseif id == whatsit_code then
+ local subtype = current.subtype
+ if subtype == localpar_code then
+ pardir = current.dir
+ elseif subtype == dir_code then
+ txtdir = current.dir
end
+ elseif id == glue_code and current.subtype == leftskip_code then -- any glue at the left?
+ --
else
- reference, first, last, firstdir = r, current, current, txtdir
+ local r = current[attribute]
+ if not r then
+ -- just go on, can be kerns
+ elseif not reference then
+ reference, first, last, firstdir = r, current, current, txtdir
+ elseif r == reference then
+ last = current
+ elseif (done[reference] or 0) == 0 then -- or id == glue_code and current.subtype == right_skip_code
+ if not skip or r > skip then -- maybe no > test
+ head, current = inject_range(head,first,last,reference,make,stack,parent,pardir,firstdir)
+ reference, first, last, firstdir = nil, nil, nil, nil
+ end
+ else
+ reference, first, last, firstdir = r, current, current, txtdir
+ end
end
current = current.next
end
@@ -271,23 +273,29 @@ local function inject_area(head,attribute,make,stack,done,parent,pardir,txtdir)
local current = head
while current do
local id = current.id
- local r = has_attribute(current,attribute)
- if id == whatsit_code then
+ if id == hlist_code or id == vlist_code then
+ local r = current[attribute]
+ if r and not done[r] then
+ done[r] = true
+ inject_list(id,current,r,make,stack,pardir,txtdir)
+ end
+ local list = current.list
+ if list then
+ current.list = inject_area(list,attribute,make,stack,done,current,pardir,txtdir)
+ end
+ elseif id == whatsit_code then
local subtype = current.subtype
if subtype == localpar_code then
pardir = current.dir
elseif subtype == dir_code then
txtdir = current.dir
end
- elseif id == hlist_code or id == vlist_code then
+ else
+ local r = current[attribute]
if r and not done[r] then
done[r] = true
- inject_list(id,current,r,make,stack,pardir,txtdir)
+ head, current = inject_range(head,current,current,r,make,stack,parent,pardir,txtdir)
end
- current.list = inject_area(current.list,attribute,make,stack,done,current,pardir,txtdir)
- elseif r and not done[r] then
- done[r] = true
- head, current = inject_range(head,current,current,r,make,stack,parent,pardir,txtdir)
end
current = current.next
end
@@ -305,8 +313,8 @@ local new_kern = nodepool.kern
local set_attribute = node.set_attribute
local register_color = colors.register
-local a_colormodel = attributes.private('colormodel')
local a_color = attributes.private('color')
+local a_colormodel = attributes.private('colormodel')
local a_transparency = attributes.private('transparency')
local u_transparency = nil
local u_colors = { }
@@ -331,18 +339,18 @@ local function colorize(width,height,depth,n,reference,what)
end
if width == 0 then
-- probably a strut as placeholder
- report_area("%s %s has no horizontal dimensions: width=%s, height=%s, depth=%s",what,reference,width,height,depth)
+ report_area("%s %s has no %s dimensions, width %p, height %p, depth %p",what,reference,"horizontal",width,height,depth)
width = 65536
end
if height + depth <= 0 then
- report_area("%s %s has no vertical dimensions: width=%s, height=%s, depth=%s",what,reference,n,width,height,depth)
+ report_area("%s %s has no %s dimensions, width %p, height %p, depth %p",what,reference,"vertical",width,height,depth)
height = 65536/2
depth = height
end
local rule = new_rule(width,height,depth)
- set_attribute(rule,a_colormodel,1) -- gray color model
- set_attribute(rule,a_color,u_color)
- set_attribute(rule,a_transparency,u_transparency)
+ rule[a_colormodel] = 1 -- gray color model
+ rule[a_color] = u_color
+ rule[a_transparency] = u_transparency
if width < 0 then
local kern = new_kern(width)
rule.width = -width
@@ -395,7 +403,7 @@ local function makereference(width,height,depth,reference)
local sr = stack[reference]
if sr then
if trace_references then
- report_reference("resolving attribute %s",reference)
+ report_reference("resolving attribute %a",reference)
end
local resolved, ht, dp, set, n = sr[1], sr[2], sr[3], sr[4], sr[5]
if ht then
@@ -423,10 +431,10 @@ local function makereference(width,height,depth,reference)
if cleanupreferences then stack[reference] = nil end
return result, resolved
elseif trace_references then
- report_reference("unable to resolve annotation %s",reference)
+ report_reference("unable to resolve annotation %a",reference)
end
elseif trace_references then
- report_reference("unable to resolve attribute %s",reference)
+ report_reference("unable to resolve attribute %a",reference)
end
end
@@ -462,7 +470,7 @@ local function makedestination(width,height,depth,reference)
local sr = stack[reference]
if sr then
if trace_destinations then
- report_destination("resolving attribute %s",reference)
+ report_destination("resolving attribute %a",reference)
end
local resolved, ht, dp, name, view = sr[1], sr[2], sr[3], sr[4], sr[5]
if ht then
@@ -493,12 +501,12 @@ local function makedestination(width,height,depth,reference)
for n=1,#name do
local annot = nodeinjections.destination(width,height,depth,name[n],view)
if not result then
- result, current = annot, annot
+ result = annot
else
current.next = annot
annot.prev = current
- current = annot
end
+ current = find_node_tail(annot)
end
if result then
-- some internal error
@@ -508,7 +516,7 @@ local function makedestination(width,height,depth,reference)
if cleanupdestinations then stack[reference] = nil end
return result, resolved
elseif trace_destinations then
- report_destination("unable to resolve attribute %s",reference)
+ report_destination("unable to resolve attribute %a",reference)
end
end
@@ -544,6 +552,9 @@ function references.injectcurrentset(h,d) -- used inside doifelse
end
end
+commands.injectreference = references.inject
+commands.injectcurrentreference = references.injectcurrentset
+
--
local function checkboth(open,close)
diff --git a/Master/texmf-dist/tex/context/base/node-res.lua b/Master/texmf-dist/tex/context/base/node-res.lua
index 4522bef98ea..768aac404e2 100644
--- a/Master/texmf-dist/tex/context/base/node-res.lua
+++ b/Master/texmf-dist/tex/context/base/node-res.lua
@@ -33,6 +33,8 @@ local nodecodes = nodes.nodecodes
local glyph_code = nodecodes.glyph
+local allocate = utilities.storage.allocate
+
local reserved, nofreserved = { }, 0
local function register_node(n)
@@ -44,7 +46,9 @@ end
pool.register = register_node
function pool.cleanup(nofboxes) -- todo
- nodes.tracers.steppers.reset() -- todo: make a registration subsystem
+ if nodes.tracers.steppers then -- to be resolved
+ nodes.tracers.steppers.reset() -- todo: make a registration subsystem
+ end
local nl, nr = 0, nofreserved
for i=1,nofreserved do
local ri = reserved[i]
@@ -83,7 +87,6 @@ local glue = register_node(new_node("glue")) -- glue.spec = nil
local glue_spec = register_node(new_node("glue_spec"))
local glyph = register_node(new_node("glyph",0))
local textdir = register_node(new_node("whatsit",whatsitcodes.dir))
-local rule = register_node(new_node("rule"))
local latelua = register_node(new_node("whatsit",whatsitcodes.latelua))
local special = register_node(new_node("whatsit",whatsitcodes.special))
local user_n = register_node(new_node("whatsit",whatsitcodes.userdefined)) user_n.type = 100 -- 44
@@ -99,6 +102,12 @@ local rightskip = register_node(new_node("glue",skipcodes.rightskip))
local temp = register_node(new_node("temp",0))
local noad = register_node(new_node("noad"))
+-- the dir field needs to be set otherwise crash:
+
+local rule = register_node(new_node("rule")) rule .dir = "TLT"
+local hlist = register_node(new_node("hlist")) hlist.dir = "TLT"
+local vlist = register_node(new_node("vlist")) vlist.dir = "TLT"
+
function pool.zeroglue(n)
local s = n.spec
return not writable or (
@@ -223,7 +232,7 @@ function pool.textdir(dir)
return t
end
-function pool.rule(width,height,depth,dir)
+function pool.rule(width,height,depth,dir) -- w/h/d == nil will let them adapt
local n = copy_node(rule)
if width then n.width = width end
if height then n.height = height end
@@ -251,7 +260,7 @@ function pool.leftmarginkern(glyph,width)
if not glyph then
report_nodes("invalid pointer to left margin glyph node")
elseif glyph.id ~= glyph_code then
- report_nodes("invalid node type %s for left margin glyph node",nodecodes[glyph])
+ report_nodes("invalid node type %a for %s margin glyph node",nodecodes[glyph],"left")
else
n.glyph = glyph
end
@@ -266,7 +275,7 @@ function pool.rightmarginkern(glyph,width)
if not glyph then
report_nodes("invalid pointer to right margin glyph node")
elseif glyph.id ~= glyph_code then
- report_nodes("invalid node type %s for right margin glyph node",nodecodes[p])
+ report_nodes("invalid node type %a for %s margin glyph node",nodecodes[p],"right")
else
n.glyph = glyph
end
@@ -284,6 +293,14 @@ function pool.noad()
return copy_node(noad)
end
+function pool.hlist()
+ return copy_node(hlist)
+end
+
+function pool.vlist()
+ return copy_node(vlist)
+end
+
--[[
<p>At some point we ran into a problem that the glue specification
of the zeropoint dimension was overwritten when adapting a glue spec
@@ -312,7 +329,7 @@ end
-- local num = userids["my id"]
-- local str = userids[num]
-local userids = utilities.storage.allocate() pool.userids = userids
+local userids = allocate() pool.userids = userids
local lastid = 0
setmetatable(userids, {
@@ -345,7 +362,7 @@ end
function pool.userlist(id,list)
local n = copy_node(user_l)
if list then
- n.user_id, n.value =id, list
+ n.user_id, n.value = id, list
else
n.value = id
end
@@ -355,7 +372,7 @@ end
function pool.userstring(id,str)
local n = copy_node(user_s)
if str then
- n.user_id, n.value =id, str
+ n.user_id, n.value = id, str
else
n.value = id
end
@@ -365,7 +382,7 @@ end
function pool.usertokens(id,tokens)
local n = copy_node(user_t)
if tokens then
- n.user_id, n.value =id, tokens
+ n.user_id, n.value = id, tokens
else
n.value = id
end
diff --git a/Master/texmf-dist/tex/context/base/node-rul.lua b/Master/texmf-dist/tex/context/base/node-rul.lua
index 7f49edffc79..09300964ed3 100644
--- a/Master/texmf-dist/tex/context/base/node-rul.lua
+++ b/Master/texmf-dist/tex/context/base/node-rul.lua
@@ -43,12 +43,12 @@ function nodes.striprange(first,last) -- todo: dir
if id == glyph_code or id == disc_code then -- or id == rule_code
break
else
-local prev = last.prev -- luatex < 0.70 has italic correction kern not prev'd
-if prev then
- last = last.prev
-else
- break
-end
+ local prev = last.prev -- luatex < 0.70 has italic correction kern not prev'd
+ if prev then
+ last = last.prev
+ else
+ break
+ end
end
end
if not last then
@@ -62,59 +62,57 @@ end
local floor = math.floor
-local trace_ruled = false trackers.register("nodes.rules", function(v) trace_ruled = v end)
-
-local report_ruled = logs.reporter("nodes","rules")
+local trace_ruled = false trackers.register("nodes.rules", function(v) trace_ruled = v end)
+local report_ruled = logs.reporter("nodes","rules")
-local n_tostring = nodes.idstostring
-local n_tosequence = nodes.tosequence
+local n_tostring = nodes.idstostring
+local n_tosequence = nodes.tosequence
-local a_ruled = attributes.private('ruled')
-local a_color = attributes.private('color')
-local a_transparency = attributes.private('transparency')
-local a_colorspace = attributes.private('colormodel')
+local a_ruled = attributes.private('ruled')
+local a_color = attributes.private('color')
+local a_transparency = attributes.private('transparency')
+local a_colorspace = attributes.private('colormodel')
-local insert_before = node.insert_before
-local insert_after = node.insert_after
-local striprange = nodes.striprange
-local list_dimensions = node.dimensions
-local has_attribute = node.has_attribute
-local set_attribute = node.set_attribute
+local insert_node_before = node.insert_before
+local insert_node_after = node.insert_after
+local striprange = nodes.striprange
+local list_dimensions = node.dimensions
-local hpack_nodes = node.hpack
+local hpack_nodes = node.hpack
-local fontdata = fonts.hashes.identifiers
-local variables = interfaces.variables
-local dimenfactor = fonts.helpers.dimenfactor
+local fontdata = fonts.hashes.identifiers
+local variables = interfaces.variables
+local dimenfactor = fonts.helpers.dimenfactor
+local splitdimen = number.splitdimen
-local nodecodes = nodes.nodecodes
-local skipcodes = nodes.skipcodes
-local whatcodes = nodes.whatcodes
-local kerncodes = nodes.kerncodes
+local nodecodes = nodes.nodecodes
+local skipcodes = nodes.skipcodes
+local whatcodes = nodes.whatcodes
+local kerncodes = nodes.kerncodes
-local glyph_code = nodecodes.glyph
-local disc_code = nodecodes.disc
-local glue_code = nodecodes.glue
-local penalty_code = nodecodes.penalty
-local kern_code = nodecodes.kern
-local hlist_code = nodecodes.hlist
-local vlist_code = nodecodes.vlist
-local rule_code = nodecodes.rule
-local whatsit_code = nodecodes.whatsit
+local glyph_code = nodecodes.glyph
+local disc_code = nodecodes.disc
+local glue_code = nodecodes.glue
+local penalty_code = nodecodes.penalty
+local kern_code = nodecodes.kern
+local hlist_code = nodecodes.hlist
+local vlist_code = nodecodes.vlist
+local rule_code = nodecodes.rule
+local whatsit_code = nodecodes.whatsit
-local userskip_code = skipcodes.userskip
-local spaceskip_code = skipcodes.spaceskip
-local xspaceskip_code = skipcodes.xspaceskip
+local userskip_code = skipcodes.userskip
+local spaceskip_code = skipcodes.spaceskip
+local xspaceskip_code = skipcodes.xspaceskip
-local dir_code = whatcodes.dir
+local dir_code = whatcodes.dir
-local kerning_code = kerncodes.kern
+local kerning_code = kerncodes.kern
-local nodepool = nodes.pool
+local nodepool = nodes.pool
-local new_rule = nodepool.rule
-local new_kern = nodepool.kern
-local new_glue = nodepool.glue
+local new_rule = nodepool.rule
+local new_kern = nodepool.kern
+local new_glue = nodepool.glue
-- we can use this one elsewhere too
--
@@ -142,7 +140,7 @@ local function processwords(attribute,data,flush,head,parent) -- we have hlistdi
while n do
local id = n.id
if id == glyph_code or id == rule_code then
- local aa = has_attribute(n,attribute)
+ local aa = n[attribute]
if aa then
if aa == a then
if not f then -- ?
@@ -195,7 +193,7 @@ local function processwords(attribute,data,flush,head,parent) -- we have hlistdi
elseif id == glue_code then
-- catch \underbar{a} \underbar{a} (subtype test is needed)
local subtype = n.subtype
- if continue and has_attribute(n,attribute) and
+ if continue and n[attribute] and
(subtype == userskip_code or subtype == spaceskip_code or subskip == xspaceskip_code) then
l = n
else
@@ -249,7 +247,7 @@ local function flush_ruled(head,f,l,d,level,parent,strip) -- not that fast but a
local before = n_tosequence(f,l,true)
f, l = striprange(f,l)
local after = n_tosequence(f,l,true)
- report_ruled("range stripper: %s -> %s",before,after)
+ report_ruled("range stripper, before %a, after %a",before,after)
else
f, l = striprange(f,l)
end
@@ -258,53 +256,69 @@ local function flush_ruled(head,f,l,d,level,parent,strip) -- not that fast but a
return head
end
local w = list_dimensions(parent.glue_set,parent.glue_sign,parent.glue_order,f,l.next)
- local method, offset, continue, dy, rulethickness, unit, order, max, ma, ca, ta =
- d.method, d.offset, d.continue, d.dy, d.rulethickness, d.unit, d.order, d.max, d.ma, d.ca, d.ta
- local e = dimenfactor(unit,fontdata[f.font]) -- what if no glyph node
- local colorspace = (ma > 0 and ma) or has_attribute(f,a_colorspace) or 1
- local color = (ca > 0 and ca) or has_attribute(f,a_color)
- local transparency = (ta > 0 and ta) or has_attribute(f,a_transparency)
+ local method, offset, continue, dy, order, max = d.method, d.offset, d.continue, d.dy, d.order, d.max
+ local rulethickness, unit = d.rulethickness, d.unit
+ local ma, ca, ta = d.ma, d.ca, d.ta
+ local colorspace = (ma > 0 and ma) or f[a_colorspace] or 1
+ local color = (ca > 0 and ca) or f[a_color]
+ local transparency = (ta > 0 and ta) or f[a_transparency]
local foreground = order == variables.foreground
- rulethickness= rulethickness/2
+
+ local e = dimenfactor(unit,fontdata[f.font]) -- what if no glyph node
+
+ local rt = tonumber(rulethickness)
+ if rt then
+ rulethickness = e * rulethickness / 2
+ else
+ local n, u = splitdimen(rulethickness)
+ if n and u then -- we need to intercept ex and em and % and ...
+ rulethickness = n * dimenfactor(u,fontdata[f.font]) / 2
+ else
+ rulethickness = 1/5
+ end
+ end
+
if level > max then
level = max
end
if method == 0 then -- center
offset = 2*offset
- m = (offset+(level-1)*dy+rulethickness)*e/2
+-- m = (offset+(level-1)*dy+rulethickness)*e/2
+ m = (offset+(level-1)*dy)*e/2 + rulethickness/2
else
m = 0
end
for i=1,level do
- local ht = (offset+(i-1)*dy+rulethickness)*e - m
- local dp = -(offset+(i-1)*dy-rulethickness)*e + m
+-- local ht = (offset+(i-1)*dy+rulethickness)*e - m
+-- local dp = -(offset+(i-1)*dy-rulethickness)*e + m
+ local ht = (offset+(i-1)*dy)*e + rulethickness - m
+ local dp = -(offset+(i-1)*dy)*e + rulethickness + m
local r = new_rule(w,ht,dp)
- local v = has_attribute(f,a_viewerlayer)
+ local v = f[a_viewerlayer]
-- quick hack
if v then
- set_attribute(r,a_viewerlayer,v)
+ r[a_viewerlayer] = v
end
--
if color then
- set_attribute(r,a_colorspace,colorspace)
- set_attribute(r,a_color,color)
+ r[a_colorspace] = colorspace
+ r[a_color] = color
end
if transparency then
- set_attribute(r,a_transparency,transparency)
+ r[a_transparency] = transparency
end
local k = new_kern(-w)
if foreground then
- insert_after(head,l,k)
- insert_after(head,k,r)
+ insert_node_after(head,l,k)
+ insert_node_after(head,k,r)
l = r
else
- head = insert_before(head,f,r)
- insert_after(head,r,k)
+ head = insert_node_before(head,f,r)
+ insert_node_after(head,r,k)
end
if trace_ruled then
- report_ruled("level: %s, width: %i, height: %i, depth: %i, nodes: %s, text: %s",
+ report_ruled("level %a, width %p, height %p, depth %p, nodes %a, text %a",
level,w,ht,dp,n_tostring(f,l),n_tosequence(f,l,true))
- -- level,r.width,r.height,r.depth,n_tostring(f,l),n_tosequence(f,l,true))
end
end
return head
@@ -361,7 +375,7 @@ local function flush_shifted(head,first,last,data,level,parent,strip) -- not tha
local raise = data.dy * dimenfactor(data.unit,fontdata[first.font])
list.shift, list.height, list.depth = raise, height, depth
if trace_shifted then
- report_shifted("width: %s, nodes: %s, text: %s",width,n_tostring(first,last),n_tosequence(first,last,true))
+ report_shifted("width %p, nodes %a, text %a",width,n_tostring(first,last),n_tosequence(first,last,true))
end
return head
end
diff --git a/Master/texmf-dist/tex/context/base/node-rul.mkiv b/Master/texmf-dist/tex/context/base/node-rul.mkiv
index 1f1ac20ec01..54a43a006a8 100644
--- a/Master/texmf-dist/tex/context/base/node-rul.mkiv
+++ b/Master/texmf-dist/tex/context/base/node-rul.mkiv
@@ -21,15 +21,14 @@
%D overstrike,overstrikes,
%D setupbar}
%D
-%D In the rare case that we need undelined words, for instance
-%D because all font alternatives are already in use, one can
-%D use \type{\underbar} and \type{\overstrike} and their plural
-%D forms.
+%D In the rare case that we need undelined words, for instance because all font
+%D alternatives are already in use, one can use \type {\underbar} and \type
+%D {\overstrike} and their plural forms.
%D
%D \startbuffer
-%D \underbars{drawing \underbar{bars} under words is a typewriter leftover}
-%D \overstrikes{striking words makes them \overstrike{unreadable} but
-%D sometimes even \overbar{top lines} come into view.}
+%D \underbars {drawing \underbar{bars} under words is a typewriter leftover}
+%D \overstrikes {striking words makes them \overstrike {unreadable} but
+%D sometimes even \overbar {top lines} come into view.}
%D \stopbuffer
%D
%D \typebuffer
@@ -38,9 +37,9 @@
%D \getbuffer
%D \stoplines
%D
-%D The next macros are derived from the \PLAIN\ \TEX\ one, but
-%D also supports nesting. The \type{$} keeps us in horizontal
-%D mode and at the same time applies grouping.
+%D The next macros are derived from the \PLAIN\ \TEX\ one, but also supports
+%D nesting. The \type{$} keeps us in horizontal mode and at the same time
+%D applies grouping.
%D
%D \showsetup{underbar}
%D \showsetup{underbars}
@@ -51,9 +50,19 @@
%D
%D \showsetup{setupunderbar}
%D
-%D Nested bars can be configured by appending \type {:<index>} to the
-%D category.
-
+%D Nested bars can be configured by appending \type {:<index>} to the category.
+%D Normally units in combination with a unitless thickness specification but
+%D there units can be used too.
+%D
+%D \startbuffer
+%D \setupbars[unit=mm,rulethickness=1] bar\startbar[underbar]foo\stopbar bar\blank
+%D \setupbars[unit=ex,rulethickness=1] bar\startbar[underbar]foo\stopbar bar\blank
+%D \setupbars[unit=pt,rulethickness=1] bar\startbar[underbar]foo\stopbar bar\blank
+%D \setupbars[unit=pt,rulethickness=10pt] bar\startbar[underbar]foo\stopbar bar
+%D \stopbuffer
+%D
+%D \typebuffer \blank \getbuffer \blank
+%D
%D As with many early usage of \LUA\ in \MKIV\ this mechanism explores a way
%D to deal with local settings at the \TEX\ end and remembering parameters
%D at the \LUA\ end. We might do things differently now, but as settings normally
@@ -107,7 +116,7 @@
offset = \barparameter\c!offset,
continue = "\barparameter\c!continue",
dy = \barparameter\c!dy,
- rulethickness = \barparameter\c!rulethickness,
+ rulethickness = "\barparameter\c!rulethickness",
unit = "\barparameter\c!unit",
order = "\barparameter\c!order",
max = \barparameter\c!max,
@@ -286,9 +295,6 @@
\c!style=,
\c!color=]
-\def\v!shiftup {shiftup}
-\def\v!shiftdown{shiftdown}
-
\defineshift [\v!shiftup] [\c!method=0,\c!dy=-1,\c!unit=ex,\c!continue=\v!yes,\c!style=\txx,\c!color=]
\defineshift [\v!shiftdown] [\c!method=1,\c!dy=.3,\c!unit=ex,\c!continue=\v!yes,\c!style=\txx,\c!color=]
diff --git a/Master/texmf-dist/tex/context/base/node-ser.lua b/Master/texmf-dist/tex/context/base/node-ser.lua
index 63690d00ade..b0a6e9952aa 100644
--- a/Master/texmf-dist/tex/context/base/node-ser.lua
+++ b/Master/texmf-dist/tex/context/base/node-ser.lua
@@ -10,15 +10,17 @@ if not modules then modules = { } end modules ['node-ser'] = {
-- of luatex; this is pretty old code that needs an overhaul
local type, format, rep = type, string.format, string.rep
-local concat, tohash, sortedkeys = table.concat, table.tohash, table.sortedkeys
+local concat, tohash, sortedkeys, printtable = table.concat, table.tohash, table.sortedkeys, table.print
local allocate = utilities.storage.allocate
local nodes, node = nodes, node
local traverse = node.traverse
+local is_node = node.is_node
local nodecodes = nodes.nodecodes
+local noadcodes = nodes.noadcodes
local nodefields = nodes.fields
local hlist_code = nodecodes.hlist
@@ -39,6 +41,7 @@ local expand = allocate ( tohash {
"leader", -- leader_ptr
"action", -- action_ptr
"value", -- user_defined nodes with subtype 'a' en 'n'
+ "head",
} )
-- page_insert: "height", "last_ins_ptr", "best_ins_ptr"
@@ -69,7 +72,7 @@ nodes.ignorablefields = ignore
-- not ok yet:
-function nodes.astable(n,sparse) -- not yet ok
+local function astable(n,sparse) -- not yet ok
local f, t = nodefields(n), { }
for i=1,#f do
local v = f[i]
@@ -92,11 +95,15 @@ function nodes.astable(n,sparse) -- not yet ok
return t
end
+nodes.astable = astable
+
+setinspector(function(v) if is_node(v) then printtable(astable(v),tostring(v)) return true end end)
+
-- under construction:
-local function totable(n,flat,verbose) -- todo: no attributes
+local function totable(n,flat,verbose,noattributes)
-- todo: no local function
- local function to_table(n,flat,verbose)
+ local function to_table(n,flat,verbose,noattributes) -- no need to pass
local f = nodefields(n)
local tt = { }
for k=1,#f do
@@ -105,6 +112,8 @@ local function totable(n,flat,verbose) -- todo: no attributes
if nv then
if ignore[v] then
-- skip
+ elseif noattributes and v == "attr" then
+ -- skip
elseif expand[v] then
if type(nv) == "number" or type(nv) == "string" then
tt[v] = nv
@@ -128,14 +137,14 @@ local function totable(n,flat,verbose) -- todo: no attributes
local t, tn = { }, 0
while n do
tn = tn + 1
- t[tn] = to_table(n,flat,verbose)
+ t[tn] = to_table(n,flat,verbose,noattributes)
n = n.next
end
return t
else
local t = to_table(n)
if n.next then
- t.next = totable(n.next,flat,verbose)
+ t.next = totable(n.next,flat,verbose,noattributes)
end
return t
end
@@ -154,7 +163,7 @@ end
-- todo: adapt to nodecodes etc
-local function serialize(root,name,handle,depth,m)
+local function serialize(root,name,handle,depth,m,noattributes)
handle = handle or print
if depth then
depth = depth .. " "
@@ -188,6 +197,11 @@ local function serialize(root,name,handle,depth,m)
local k = fld[f]
if k == "ref_count" then
-- skip
+ elseif noattributes and k == "attr" then
+ -- skip
+ elseif k == "id" then
+ local v = root[k]
+ handle(format("%s id=%s,",depth,nodecodes[v] or noadcodes[v] or v))
elseif k then
local v = root[k]
local t = type(v)
@@ -206,12 +220,12 @@ local function serialize(root,name,handle,depth,m)
elseif t == "boolean" then
handle(format("%s %s=%q,",depth,key(k),tostring(v)))
elseif v then -- userdata or table
- serialize(v,k,handle,depth,m+1)
+ serialize(v,k,handle,depth,m+1,noattributes)
end
end
end
if root['next'] then -- userdata or table
- serialize(root['next'],'next',handle,depth,m+1)
+ serialize(root['next'],'next',handle,depth,m+1,noattributes)
end
end
if m and m > 0 then
@@ -221,13 +235,13 @@ local function serialize(root,name,handle,depth,m)
end
end
-function nodes.serialize(root,name)
+function nodes.serialize(root,name,noattributes)
local t, n = { }, 0
local function flush(s)
n = n + 1
t[n] = s
end
- serialize(root, name, flush, nil, 0)
+ serialize(root,name,flush,nil,0,noattributes)
return concat(t,"\n")
end
@@ -263,7 +277,7 @@ end
function nodes.print(head,n)
while head do
local id = head.id
- texio.write_nl(rep(" ",n or 0) .. tostring(head))
+ logs.writer(string.formatters["%w%S"],n or 0,head)
if id == hlist_code or id == vlist_code then
nodes.print(head.list,(n or 0)+1)
end
diff --git a/Master/texmf-dist/tex/context/base/node-shp.lua b/Master/texmf-dist/tex/context/base/node-shp.lua
index 6fad0f495fb..8f7a411a719 100644
--- a/Master/texmf-dist/tex/context/base/node-shp.lua
+++ b/Master/texmf-dist/tex/context/base/node-shp.lua
@@ -8,20 +8,27 @@ if not modules then modules = { } end modules ['node-shp'] = {
local nodes, node = nodes, node
-local nodecodes = nodes.nodecodes
-local tasks = nodes.tasks
+local next, type = next, type
+local format = string.format
+local concat, sortedpairs = table.concat, table.sortedpairs
+local setmetatableindex = table.setmetatableindex
-local hlist_code = nodecodes.hlist
-local vlist_code = nodecodes.vlist
-local disc_code = nodecodes.disc
-local mark_code = nodecodes.mark
-local kern_code = nodecodes.kern
-local glue_code = nodecodes.glue
+local nodecodes = nodes.nodecodes
+local tasks = nodes.tasks
+local handlers = nodes.handlers
-local texbox = tex.box
+local hlist_code = nodecodes.hlist
+local vlist_code = nodecodes.vlist
+local disc_code = nodecodes.disc
+local mark_code = nodecodes.mark
+local kern_code = nodecodes.kern
+local glue_code = nodecodes.glue
-local free_node = node.free
-local remove_node = node.remove
+local texbox = tex.box
+
+local free_node = node.free
+local remove_node = node.remove
+local traverse_nodes = node.traverse
local function cleanup(head) -- rough
local start = head
@@ -50,21 +57,92 @@ directives.register("backend.cleanup", function()
tasks.enableaction("shipouts","nodes.handlers.cleanuppage")
end)
-function nodes.handlers.cleanuppage(head)
+function handlers.cleanuppage(head)
-- about 10% of the nodes make no sense for the backend
return cleanup(head), true
end
local actions = tasks.actions("shipouts") -- no extra arguments
-function nodes.handlers.finalize(head) -- problem, attr loaded before node, todo ...
+function handlers.finalize(head) -- problem, attr loaded before node, todo ...
return actions(head)
end
---~ nodes.handlers.finalize = actions
+-- handlers.finalize = actions
-- interface
function commands.finalizebox(n)
actions(texbox[n])
end
+
+-- just in case we want to optimize lookups:
+
+local frequencies = { }
+
+nodes.tracers.frequencies = frequencies
+
+local data = { }
+local done = false
+
+setmetatableindex(data,function(t,k)
+ local v = { }
+ setmetatableindex(v,function(t,k)
+ local v = { }
+ t[k] = v
+ setmetatableindex(v,function(t,k)
+ t[k] = 0
+ return 0
+ end)
+ return v
+ end)
+ t[k] = v
+ return v
+end)
+
+local function count(head,data,subcategory)
+ -- no components, pre, post, replace .. can maybe an option .. but
+ -- we use this for optimization so it makes sense to look the the
+ -- main node only
+ for n in traverse_nodes(head) do
+ local id = n.id
+ local dn = data[nodecodes[n.id]]
+ dn[subcategory] = dn[subcategory] + 1
+ if id == hlist_code or id == vlist_code then
+ count(n.list,data,subcategory)
+ end
+ end
+end
+
+local function register(category,subcategory)
+ return function(head)
+ done = true
+ count(head,data[category],subcategory)
+ return head, false
+ end
+end
+
+frequencies.register = register
+frequencies.filename = nil
+
+trackers.register("nodes.frequencies",function(v)
+ if type(v) == "string" then
+ frequencies.filename = v
+ end
+ handlers.frequencies_shipouts_before = register("shipouts", "begin")
+ handlers.frequencies_shipouts_after = register("shipouts", "end")
+ handlers.frequencies_processors_before = register("processors", "begin")
+ handlers.frequencies_processors_after = register("processors", "end")
+ tasks.prependaction("shipouts", "before", "nodes.handlers.frequencies_shipouts_before")
+ tasks.appendaction ("shipouts", "after", "nodes.handlers.frequencies_shipouts_after")
+ tasks.prependaction("processors", "before", "nodes.handlers.frequencies_processors_before")
+ tasks.appendaction ("processors", "after", "nodes.handlers.frequencies_processors_after")
+end)
+
+statistics.register("node frequencies", function()
+ if done then
+ local filename = frequencies.filename or (tex.jobname .. "-frequencies.lua")
+ io.savedata(filename,table.serialize(data,true))
+ return format("saved in %q",filename)
+ end
+end)
diff --git a/Master/texmf-dist/tex/context/base/node-snp.lua b/Master/texmf-dist/tex/context/base/node-snp.lua
new file mode 100644
index 00000000000..31c7771ac92
--- /dev/null
+++ b/Master/texmf-dist/tex/context/base/node-snp.lua
@@ -0,0 +1,66 @@
+if not modules then modules = { } end modules ['node-snp'] = {
+ version = 1.001,
+ comment = "companion to node-ini.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+if not nodes then
+ nodes = { } -- also loaded in mtx-timing
+end
+
+local snapshots = { }
+nodes.snapshots = snapshots
+
+local nodeusage = nodes.pool and nodes.pool.usage
+local clock = os.gettimeofday or os.clock -- should go in environment
+local lasttime = clock()
+local samples = { }
+
+local parameters = {
+ "cs_count",
+ "dyn_used",
+ "elapsed_time",
+ "luabytecode_bytes",
+ "luastate_bytes",
+ "max_buf_stack",
+ "obj_ptr",
+ "pdf_mem_ptr",
+ "pdf_mem_size",
+ "pdf_os_cntr",
+-- "pool_ptr", -- obsolete
+ "str_ptr",
+}
+
+function snapshots.takesample(comment)
+ if nodeusage then
+ local c = clock()
+ local t = {
+ elapsed_time = c - lasttime,
+ node_memory = nodeusage(),
+ comment = comment,
+ }
+ for i=1,#parameters do
+ local parameter = parameters[i]
+ local ps = status[parameter]
+ if ps then
+ t[parameter] = ps
+ end
+ end
+ samples[#samples+1] = t
+ lasttime = c
+ end
+end
+
+function snapshots.getsamples()
+ return samples -- one return value !
+end
+
+function snapshots.resetsamples()
+ samples = { }
+end
+
+function snapshots.getparameters()
+ return parameters
+end
diff --git a/Master/texmf-dist/tex/context/base/node-spl.lua b/Master/texmf-dist/tex/context/base/node-spl.lua
deleted file mode 100644
index 3b208e0e787..00000000000
--- a/Master/texmf-dist/tex/context/base/node-spl.lua
+++ /dev/null
@@ -1,619 +0,0 @@
-if not modules then modules = { } end modules ['node-spl'] = {
- version = 1.001,
- comment = "companion to node-spl.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
--- This module is dedicated to the oriental tex project and for
--- the moment is too experimental to be publicly supported.
---
--- We could cache solutions: say that we store the featureset and
--- all 'words' -> replacement ... so we create a large solution
--- database (per font)
---
--- This module can be optimized by using a dedicated dynamics handler
--- but I'll only do that when the rest of the code is stable.
---
--- Todo: bind setups to paragraph.
-
-local gmatch, concat, format, remove = string.gmatch, table.concat, string.format, table.remove
-local next, tostring, tonumber = next, tostring, tonumber
-local utfchar = utf.char
-local random = math.random
-
-local trace_split = false trackers.register("builders.paragraphs.solutions.splitters.splitter", function(v) trace_split = v end)
-local trace_optimize = false trackers.register("builders.paragraphs.solutions.splitters.optimizer", function(v) trace_optimize = v end)
-local trace_colors = false trackers.register("builders.paragraphs.solutions.splitters.colors", function(v) trace_colors = v end)
-local trace_goodies = false trackers.register("fonts.goodies", function(v) trace_goodies = v end)
-
-local report_solutions = logs.reporter("fonts","solutions")
-local report_splitters = logs.reporter("nodes","splitters")
-local report_optimizers = logs.reporter("nodes","optimizers")
-
-local nodes, node = nodes, node
-
-local variables = interfaces.variables
-
-local settings_to_array = utilities.parsers.settings_to_array
-local settings_to_hash = utilities.parsers.settings_to_hash
-
-local find_node_tail = node.tail or node.slide
-local free_node = node.free
-local free_nodelist = node.flush_list
-local has_attribute = node.has_attribute
-local set_attribute = node.set_attribute
-local new_node = node.new
-local copy_node = node.copy
-local copy_nodelist = node.copy_list
-local traverse_nodes = node.traverse
-local traverse_ids = node.traverse_id
-local protect_glyphs = nodes.handlers.protectglyphs or node.protect_glyphs
-local hpack_nodes = node.hpack
-local insert_node_before = node.insert_before
-local insert_node_after = node.insert_after
-local repack_hlist = nodes.repackhlist
-
-local setnodecolor = nodes.tracers.colors.set
-
-local nodecodes = nodes.nodecodes
-local whatsitcodes = nodes.whatsitcodes
-
-local glyph_code = nodecodes.glyph
-local disc_code = nodecodes.disc
-local hlist_code = nodecodes.hlist
-local whatsit_code = nodecodes.whatsit
-
-local localpar_code = whatsitcodes.localpar
-local dir_code = whatsitcodes.dir
-local userdefined_code = whatsitcodes.userdefined
-
-local nodepool = nodes.pool
-local tasks = nodes.tasks
-local usernodeids = nodepool.userids
-
-local new_textdir = nodepool.textdir
-local new_usernumber = nodepool.usernumber
-
-local starttiming = statistics.starttiming
-local stoptiming = statistics.stoptiming
-local process_characters = nodes.handlers.characters
-local inject_kerns = nodes.injections.handler
-local fontdata = fonts.hashes.identifiers
-local setfontdynamics = fonts.hashes.setdynamics
-local fontprocesses = fonts.hashes.processes
-
-local parbuilders = builders.paragraphs
-parbuilders.solutions = parbuilders.solutions or { }
-parbuilders.solutions.splitters = parbuilders.solutions.splitters or { }
-
-local splitters = parbuilders.solutions.splitters
-
-local preroll = true
-local variant = "normal"
-local split = attributes.private('splitter')
-local cache = { }
-local solutions = { } -- attribute sets
-local variants = { }
-local max_less = 0
-local max_more = 0
-local criterium = 0
-local randomseed = nil
-local optimize = nil -- set later
-
-function splitters.setup(setups)
- local method = settings_to_hash(setups.method or "")
- if method[variables.preroll] then
- preroll = true
- else
- preroll = false
- end
- for k, v in next, method do
- if variants[k] then
- optimize = variants[k]
- end
- end
- randomseed = tonumber(setups.randomseed)
- criterium = tonumber(setups.criterium) or criterium
-end
-
-local contextsetups = fonts.specifiers.contextsetups
-
-local function convert(featuresets,name,set,what)
- local list, numbers, nofnumbers = set[what], { }, 0
- if list then
- for i=1,#list do
- local feature = list[i]
- local fs = featuresets[feature]
- local fn = fs and fs.number
- if not fn then
- -- fall back on global features
- fs = contextsetups[feature]
- fn = fs and fs.number
- end
- if fn then
- nofnumbers = nofnumbers + 1
- numbers[nofnumbers] = fn
- if trace_goodies or trace_optimize then
- report_solutions("solution %s of '%s' uses feature '%s' with number %s",i,name,feature,fn)
- end
- else
- report_solutions("solution %s has an invalid feature reference '%s'",i,name,tostring(feature))
- end
- end
- return nofnumbers > 0 and numbers
- end
-end
-
-local function initialize(goodies)
- local solutions = goodies.solutions
- if solutions then
- local featuresets = goodies.featuresets
- local goodiesname = goodies.name
- if trace_goodies or trace_optimize then
- report_solutions("checking solutions in '%s'",goodiesname)
- end
- for name, set in next, solutions do
- set.less = convert(featuresets,name,set,"less")
- set.more = convert(featuresets,name,set,"more")
- end
- end
-end
-
-fonts.goodies.register("solutions",initialize)
-
-function splitters.define(name,parameters)
- local settings = settings_to_hash(parameters) -- todo: interfacing
- local goodies, solution, less, more = settings.goodies, settings.solution, settings.less, settings.more
- local less_set, more_set
- local l = less and settings_to_array(less)
- local m = more and settings_to_array(more)
- if goodies then
- goodies = fonts.goodies.load(goodies) -- also in tfmdata
- if goodies then
- local featuresets = goodies.featuresets
- local solution = solution and goodies.solutions[solution]
- if l and #l > 0 then
- less_set = convert(featuresets,name,settings,"less") -- take from settings
- else
- less_set = solution and solution.less -- take from goodies
- end
- if m and #m > 0 then
- more_set = convert(featuresets,name,settings,"more") -- take from settings
- else
- more_set = solution and solution.more -- take from goodies
- end
- end
- else
- if l then
- local n = #less_set
- for i=1,#l do
- local ss = contextsetups[l[i]]
- if ss then
- n = n + 1
- less_set[n] = ss.number
- end
- end
- end
- if m then
- local n = #more_set
- for i=1,#m do
- local ss = contextsetups[m[i]]
- if ss then
- n = n + 1
- more_set[n] = ss.number
- end
- end
- end
- end
- if trace_optimize then
- report_solutions("defining solutions '%s', less: '%s', more: '%s'",name,concat(less_set or {}," "),concat(more_set or {}," "))
- end
- local nofsolutions = #solutions + 1
- solutions[nofsolutions] = {
- solution = solution,
- less = less_set or { },
- more = more_set or { },
- settings = settings, -- for tracing
- }
- context(nofsolutions)
-end
-
-local nofwords, noftries, nofadapted, nofkept, nofparagraphs = 0, 0, 0, 0, 0
-
-local splitter_one = usernodeids["splitters.one"]
-local splitter_two = usernodeids["splitters.two"]
-
-function splitters.split(head)
- -- quite fast
- local current, done, rlmode, start, stop, attribute = head, false, false, nil, nil, 0
- cache, max_less, max_more = { }, 0, 0
- local function flush() -- we can move this
- local font = start.font
- local last = stop.next
- local list = last and copy_nodelist(start,last) or copy_nodelist(start)
- local n = #cache + 1
- local user_one = new_usernumber(splitter_one,n)
- local user_two = new_usernumber(splitter_two,n)
- head, start = insert_node_before(head,start,user_one)
- insert_node_after(head,stop,user_two)
- if rlmode == "TRT" or rlmode == "+TRT" then
- local dirnode = new_textdir("+TRT")
- list.prev = dirnode
- dirnode.next = list
- list = dirnode
- end
- local c = {
- original = list,
- attribute = attribute,
- direction = rlmode,
- font = font
- }
- if trace_split then
- report_splitters("cached %4i: font: %s, attribute: %s, word: %s, direction: %s", n,
- font, attribute, nodes.listtoutf(list,true), rlmode)
- end
- cache[n] = c
- local solution = solutions[attribute]
- local l, m = #solution.less, #solution.more
- if l > max_less then max_less = l end
- if m > max_more then max_more = m end
- start, stop, done = nil, nil, true
- end
- while current do
- local id = current.id
- if id == glyph_code and current.subtype < 256 then
- local a = has_attribute(current,split)
- if not a then
- start, stop = nil, nil
- elseif not start then
- start, stop, attribute = current, current, a
- elseif a ~= attribute then
- start, stop = nil, nil
- else
- stop = current
- end
- current = current.next
- elseif id == disc_code then
- start, stop, current = nil, nil, current.next
- elseif id == whatsit_code then
- if start then
- flush()
- end
- local subtype = current.subtype
- if subtype == dir_code or subtype == localpar_code then
- rlmode = current.dir
- end
- current = current.next
- else
- if start then
- flush()
- end
- current = current.next
- end
- end
- if start then
- flush()
- end
- nofparagraphs = nofparagraphs + 1
- nofwords = nofwords + #cache
- return head, done
-end
-
-local function collect_words(list)
- local words, w, word = { }, 0, nil
- for current in traverse_ids(whatsit_code,list) do
- if current.subtype == userdefined_code then
- local user_id = current.user_id
- if user_id == splitter_one then
- word = { current.value, current, current }
- w = w + 1
- words[w] = word
- elseif user_id == splitter_two then
- word[3] = current
- end
- end
- end
- return words -- check for empty (elsewhere)
-end
-
--- we could avoid a hpack but hpack is not that slow
-
-local function doit(word,list,best,width,badness,line,set,listdir)
- local changed = 0
- local n = word[1]
- local found = cache[n]
- if found then
- local original, attribute, direction = found.original, found.attribute, found.direction
- local solution = solutions[attribute]
- local features = solution and solution[set]
- if features then
- local featurenumber = features[best] -- not ok probably
- if featurenumber then
- noftries = noftries + 1
- local first = copy_nodelist(original)
- if not trace_colors then
- for n in traverse_nodes(first) do -- maybe fast force so no attr needed
- set_attribute(n,0,featurenumber) -- this forces dynamics
- end
- elseif set == "less" then
- for n in traverse_nodes(first) do
- setnodecolor(n,"font:isol")
- set_attribute(n,0,featurenumber)
- end
- else
- for n in traverse_nodes(first) do
- setnodecolor(n,"font:medi")
- set_attribute(n,0,featurenumber)
- end
- end
- local font = found.font
- -- local dynamics = found.dynamics
- -- local shared = fontdata[font].shared
- -- if not dynamics then -- we cache this
- -- dynamics = shared.dynamics
- -- found.dynamics = dynamics
- -- end
- -- local processors = found[featurenumber]
- -- if not processors then -- we cache this too
- -- processors = fonts.handlers.otf.setdynamics(font,featurenumber)
- -- found[featurenumber] = processors
- -- end
- local setdynamics = setfontdynamics[font]
- if setdynamics then
- local processes = setdynamics(font,featurenumber)
- for i=1,#processes do -- often more than 1
- first = processes[i](first,font,featurenumber)
- end
- else
- report_solutions("fatal error, no dynamics for font %s",font)
- end
- first = inject_kerns(first)
- local h = word[2].next -- head of current word
- local t = word[3].prev -- tail of current word
- if first.id == whatsit_code then
- local temp = first
- first = first.next
- free_node(temp)
- end
- local last = find_node_tail(first)
- -- replace [u]h->t by [u]first->last
- local next, prev = t.next, h.prev
- prev.next, first.prev = first, prev
- if next then
- last.next, next.prev = next, last
- end
- -- check new pack
- local temp, b = repack_hlist(list,width,'exactly',listdir)
- if b > badness then
- if trace_optimize then
- report_optimizers("line %s, badness before: %s, after: %s, criterium: %s -> quit",line,badness,b,criterium)
- end
- -- remove last insert
- prev.next, h.prev = h, prev
- if next then
- t.next, next.prev = next, t
- else
- t.next = nil
- end
- last.next = nil
- free_nodelist(first)
- else
- if trace_optimize then
- report_optimizers("line %s, badness before: %s, after: %s, criterium: %s -> continue",line,badness,b,criterium)
- end
- -- free old h->t
- t.next = nil
- free_nodelist(h)
- changed, badness = changed + 1, b
- end
- if b <= criterium then
- return true, changed
- end
- end
- end
- end
- return false, changed
-end
-
--- We repeat some code but adding yet another layer of indirectness is not
--- making things better.
-
-variants[variables.normal] = function(words,list,best,width,badness,line,set,listdir)
- local changed = 0
- for i=1,#words do
- local done, c = doit(words[i],list,best,width,badness,line,set,listdir)
- changed = changed + c
- if done then
- break
- end
- end
- if changed > 0 then
- nofadapted = nofadapted + 1
- -- todo: get rid of pack when ok because we already have packed and we only need the last b
- local list, b = repack_hlist(list,width,'exactly',listdir)
- return list, true, changed, b -- badness
- else
- nofkept = nofkept + 1
- return list, false, 0, badness
- end
-end
-
-variants[variables.reverse] = function(words,list,best,width,badness,line,set,listdir)
- local changed = 0
- for i=#words,1,-1 do
- local done, c = doit(words[i],list,best,width,badness,line,set,listdir)
- changed = changed + c
- if done then
- break
- end
- end
- if changed > 0 then
- nofadapted = nofadapted + 1
- -- todo: get rid of pack when ok because we already have packed and we only need the last b
- local list, b = repack_hlist(list,width,'exactly',listdir)
- return list, true, changed, b -- badness
- else
- nofkept = nofkept + 1
- return list, false, 0, badness
- end
-end
-
-variants[variables.random] = function(words,list,best,width,badness,line,set,listdir)
- local changed = 0
- while #words > 0 do
- local done, c = doit(remove(words,random(1,#words)),list,best,width,badness,line,set,listdir)
- changed = changed + c
- if done then
- break
- end
- end
- if changed > 0 then
- nofadapted = nofadapted + 1
- -- todo: get rid of pack when ok because we already have packed and we only need the last b
- local list, b = repack_hlist(list,width,'exactly',listdir)
- return list, true, changed, b -- badness
- else
- nofkept = nofkept + 1
- return list, false, 0, badness
- end
-end
-
-optimize = variants.normal -- the default
-
-local function show_quality(current,what,line)
- local set = current.glue_set
- local sign = current.glue_sign
- local order = current.glue_order
- local amount = set * ((sign == 2 and -1) or 1)
- report_optimizers("line %s, %s, amount %s, set %s, sign %s (%s), order %s",line,what,amount,set,sign,how,order)
-end
-
-function splitters.optimize(head)
- local nc = #cache
- if nc > 0 then
- starttiming(splitters)
- local listdir = nil -- todo ! ! !
- if randomseed then
- math.setrandomseedi(randomseed)
- randomseed = nil
- end
- local line = 0
- local tex_hbadness, tex_hfuzz = tex.hbadness, tex.hfuzz
- tex.hbadness, tex.hfuzz = 10000, number.maxdimen
- if trace_optimize then
- report_optimizers("preroll: %s, variant: %s, preroll criterium: %s, cache size: %s",
- tostring(preroll),variant,criterium,nc)
- end
- for current in traverse_ids(hlist_code,head) do
- -- report_splitters("before: [%s] => %s",current.dir,nodes.tosequence(current.list,nil))
- line = line + 1
- local sign, dir, list, width = current.glue_sign, current.dir, current.list, current.width
- local temp, badness = repack_hlist(list,width,'exactly',dir) -- it would be nice if the badness was stored in the node
- if badness > 0 then
- if sign == 0 then
- if trace_optimize then
- report_optimizers("line %s, badness %s, okay",line,badness)
- end
- else
- local set, max
- if sign == 1 then
- if trace_optimize then
- report_optimizers("line %s, badness %s, underfull, trying more",line,badness)
- end
- set, max = "more", max_more
- else
- if trace_optimize then
- report_optimizers("line %s, badness %s, overfull, trying less",line,badness)
- end
- set, max = "less", max_less
- end
- -- we can keep the best variants
- local lastbest, lastbadness = nil, badness
- if preroll then
- local bb, base
- for i=1,max do
- if base then
- free_nodelist(base)
- end
- base = copy_nodelist(list)
- local words = collect_words(base) -- beware: words is adapted
- for j=i,max do
- local temp, done, changes, b = optimize(words,base,j,width,badness,line,set,dir)
- base = temp
- if trace_optimize then
- report_optimizers("line %s, alternative: %s.%s, changes: %s, badness %s",line,i,j,changes,b)
- end
- bb = b
- if b <= criterium then
- break
- end
- -- if done then
- -- break
- -- end
- end
- if bb and bb > criterium then -- needs checking
- if not lastbest then
- lastbest, lastbadness = i, bb
- elseif bb > lastbadness then
- lastbest, lastbadness = i, bb
- end
- else
- break
- end
- end
- free_nodelist(base)
- end
- local words = collect_words(list)
- for best=lastbest or 1,max do
- local temp, done, changes, b = optimize(words,list,best,width,badness,line,set,dir)
- current.list = temp
- if trace_optimize then
- report_optimizers("line %s, alternative: %s, changes: %s, badness %s",line,best,changes,b)
- end
- if done then
- if b <= criterium then -- was == 0
- protect_glyphs(list)
- break
- end
- end
- end
- end
- else
- if trace_optimize then
- report_optimizers("line %s, not bad enough",line)
- end
- end
- -- we pack inside the outer hpack and that way keep the original wd/ht/dp as bonus
- current.list = hpack_nodes(current.list,width,'exactly',listdir)
- -- report_splitters("after: [%s] => %s",temp.dir,nodes.tosequence(temp.list,nil))
- end
- for i=1,nc do
- local ci = cache[i]
- free_nodelist(ci.original)
- end
- cache = { }
- tex.hbadness, tex.hfuzz = tex_hbadness, tex_hfuzz
- stoptiming(splitters)
- end
-end
-
-statistics.register("optimizer statistics", function()
- if nofwords > 0 then
- local elapsed = statistics.elapsedtime(splitters)
- local average = noftries/elapsed
- return format("%s words identified in %s paragraphs, %s words retried, %s lines tried, %0.3f seconds used, %s adapted, %0.1f lines per second",
- nofwords,nofparagraphs,noftries,nofadapted+nofkept,elapsed,nofadapted,average)
- end
-end)
-
-function splitters.enable()
- tasks.enableaction("processors", "builders.paragraphs.solutions.splitters.split")
- tasks.enableaction("finalizers", "builders.paragraphs.solutions.splitters.optimize")
-end
-
-function splitters.disable()
- tasks.disableaction("processors", "builders.paragraphs.solutions.splitters.split")
- tasks.disableaction("finalizers", "builders.paragraphs.solutions.splitters.optimize")
-end
diff --git a/Master/texmf-dist/tex/context/base/node-tra.lua b/Master/texmf-dist/tex/context/base/node-tra.lua
index 70e4639b896..916b2143d03 100644
--- a/Master/texmf-dist/tex/context/base/node-tra.lua
+++ b/Master/texmf-dist/tex/context/base/node-tra.lua
@@ -12,354 +12,76 @@ might become a runtime module instead. This module will be cleaned up!</p>
--ldx]]--
local utfchar = utf.char
-local concat = table.concat
local format, match, gmatch, concat, rep = string.format, string.match, string.gmatch, table.concat, string.rep
local lpegmatch = lpeg.match
-local write_nl = texio.write_nl
+local clock = os.gettimeofday or os.clock -- should go in environment
local report_nodes = logs.reporter("nodes","tracing")
-fonts = fonts or { }
nodes = nodes or { }
-local fonts, nodes, node, context = fonts, nodes, node, context
+local nodes, node, context = nodes, node, context
-nodes.tracers = nodes.tracers or { }
-local tracers = nodes.tracers
+local tracers = nodes.tracers or { }
+nodes.tracers = tracers
-nodes.tasks = nodes.tasks or { }
-local tasks = nodes.tasks
+local tasks = nodes.tasks or { }
+nodes.tasks = tasks
-nodes.handlers = nodes.handlers or { }
-local handlers = nodes.handlers
+local handlers = nodes.handlers or {}
+nodes.handlers = handlers
-nodes.injections = nodes.injections or { }
-local injections = nodes.injections
+local injections = nodes.injections or { }
+nodes.injections = injections
-tracers.characters = tracers.characters or { }
-tracers.steppers = tracers.steppers or { }
+local traverse_nodes = node.traverse
+local traverse_by_id = node.traverse_id
+local count_nodes = nodes.count
-local char_tracers = tracers.characters
-local step_tracers = tracers.steppers
+local nodecodes = nodes.nodecodes
+local whatcodes = nodes.whatcodes
+local skipcodes = nodes.skipcodes
+local fillcodes = nodes.fillcodes
-local copy_node_list = node.copy_list
-local hpack_node_list = node.hpack
-local free_node_list = node.flush_list
-local traverse_nodes = node.traverse
+local glyph_code = nodecodes.glyph
+local hlist_code = nodecodes.hlist
+local vlist_code = nodecodes.vlist
+local disc_code = nodecodes.disc
+local glue_code = nodecodes.glue
+local kern_code = nodecodes.kern
+local rule_code = nodecodes.rule
+local whatsit_code = nodecodes.whatsit
+local spec_code = nodecodes.glue_spec
-local nodecodes = nodes.nodecodes
-local whatcodes = nodes.whatcodes
-local skipcodes = nodes.skipcodes
+local localpar_code = whatcodes.localpar
+local dir_code = whatcodes.dir
-local glyph_code = nodecodes.glyph
-local hlist_code = nodecodes.hlist
-local vlist_code = nodecodes.vlist
-local disc_code = nodecodes.disc
-local glue_code = nodecodes.glue
-local kern_code = nodecodes.kern
-local rule_code = nodecodes.rule
-local whatsit_code = nodecodes.whatsit
+local nodepool = nodes.pool
-local localpar_code = whatcodes.localpar
-local dir_code = whatcodes.dir
-
-local nodepool = nodes.pool
-
-local new_glyph = nodepool.glyph
-
-function char_tracers.collect(head,list,tag,n)
- local fontdata = fonts.hashes.identifiers
- n = n or 0
- local ok, fn = false, nil
- while head do
- local id = head.id
- if id == glyph_code then
- local f = head.font
- if f ~= fn then
- ok, fn = false, f
- end
- local c = head.char
- local i = fontdata[f].indices[c] or 0
- if not ok then
- ok = true
- n = n + 1
- list[n] = list[n] or { }
- list[n][tag] = { }
- end
- local l = list[n][tag]
- l[#l+1] = { c, f, i }
- elseif id == disc_code then
- -- skip
- else
- ok = false
- end
- head = head.next
- end
-end
-
-function char_tracers.equal(ta, tb)
- if #ta ~= #tb then
- return false
- else
- for i=1,#ta do
- local a, b = ta[i], tb[i]
- if a[1] ~= b[1] or a[2] ~= b[2] or a[3] ~= b[3] then
- return false
- end
- end
- end
- return true
-end
-
-function char_tracers.string(t)
- local tt = { }
- for i=1,#t do
- tt[i] = utfchar(t[i][1])
- end
- return concat(tt,"")
-end
-
-function char_tracers.unicodes(t,decimal)
- local tt = { }
- for i=1,#t do
- local n = t[i][1]
- if n == 0 then
- tt[i] = "-"
- elseif decimal then
- tt[i] = n
- else
- tt[i] = format("U+%04X",n)
- end
- end
- return concat(tt," ")
-end
-
-function char_tracers.indices(t,decimal)
- local tt = { }
- for i=1,#t do
- local n = t[i][3]
- if n == 0 then
- tt[i] = "-"
- elseif decimal then
- tt[i] = n
- else
- tt[i] = format("U+%04X",n)
- end
- end
- return concat(tt," ")
-end
-
-function char_tracers.start()
- local npc = handlers.characters
- local list = { }
- function handlers.characters(head)
- local n = #list
- char_tracers.collect(head,list,'before',n)
- local h, d = npc(head)
- char_tracers.collect(head,list,'after',n)
- if #list > n then
- list[#list+1] = { }
- end
- return h, d
- end
- function char_tracers.stop()
- tracers.list['characters'] = list
- local variables = {
- ['title'] = 'ConTeXt Character Processing Information',
- ['color-background-one'] = lmx.get('color-background-yellow'),
- ['color-background-two'] = lmx.get('color-background-purple'),
- }
- lmx.show('context-characters.lmx',variables)
- handlers.characters = npc
- tasks.restart("processors", "characters")
- end
- tasks.restart("processors", "characters")
-end
-
-local stack = { }
-
-function tracers.start(tag)
- stack[#stack+1] = tag
- local tracer = tracers[tag]
- if tracer and tracer.start then
- tracer.start()
- end
-end
-function tracers.stop()
- local tracer = stack[#stack]
- if tracer and tracer.stop then
- tracer.stop()
- end
- stack[#stack] = nil
-end
-
--- experimental
-
-local collection, collecting, messages = { }, false, { }
-
-function step_tracers.start()
- collecting = true
-end
-
-function step_tracers.stop()
- collecting = false
-end
-
-function step_tracers.reset()
- for i=1,#collection do
- local c = collection[i]
- if c then
- free_node_list(c)
- end
- end
- collection, messages = { }, { }
-end
-
-function step_tracers.nofsteps()
- return context(#collection)
-end
-
-function step_tracers.glyphs(n,i)
- local c = collection[i]
- if c then
- tex.box[n] = hpack_node_list(copy_node_list(c))
- end
-end
-
-function step_tracers.features()
- -- we cannot use first_glyph here as it only finds characters with subtype < 256
- local fontdata = fonts.hashes.identifiers
- local f = collection[1]
- while f do
- if f.id == glyph_code then
- local tfmdata, t = fontdata[f.font], { }
- for feature, value in table.sortedhash(tfmdata.shared.features) do
- if feature == "number" or feature == "features" then
- -- private
- elseif type(value) == "boolean" then
- if value then
- t[#t+1] = format("%s=yes",feature)
- else
- -- skip
- end
- else
- t[#t+1] = format("%s=%s",feature,value)
- end
- end
- if #t > 0 then
- context(concat(t,", "))
- else
- context("no features")
- end
- return
- end
- f = f.next
- end
-end
-
-function tracers.fontchar(font,char)
- local fontchar = fonts.hashes.characters
- local n = new_glyph()
- n.font, n.char, n.subtype = font, char, 256
- context(n)
-end
-
-function step_tracers.codes(i,command)
- local fontdata = fonts.hashes.identifiers
- local c = collection[i]
- while c do
- local id = c.id
- if id == glyph_code then
- if command then
- local f, c = c.font,c.char
- local d = fontdata[f].descriptions
- local d = d and d[c]
- context[command](f,c,d and d.class or "")
- else
- context("[%s:U+%04X]",c.font,c.char)
- end
- elseif id == whatsit_code and (c.subtype == localpar_code or c.subtype == dir_code) then
- context("[%s]",c.dir)
- else
- context("[%s]",nodecodes[id])
- end
- c = c.next
- end
-end
-
-function step_tracers.messages(i,command,split)
- local list = messages[i] -- or { "no messages" }
- if list then
- for i=1,#list do
- local l = list[i]
- if not command then
- context("(%s)",l)
- elseif split then
- local a, b = match(l,"^(.-)%s*:%s*(.*)$")
- context[command](a or l or "",b or "")
- else
- context[command](l)
- end
- end
- end
-end
-
--- hooks into the node list processor (see otf)
-
-function step_tracers.check(head)
- if collecting then
- step_tracers.reset()
- local n = copy_node_list(head)
- injections.handler(n,nil,"trace",true)
- handlers.protectglyphs(n) -- can be option
- collection[1] = n
- end
-end
-
-function step_tracers.register(head)
- if collecting then
- local nc = #collection+1
- if messages[nc] then
- local n = copy_node_list(head)
- injections.handler(n,nil,"trace",true)
- handlers.protectglyphs(n) -- can be option
- collection[nc] = n
- end
- end
-end
-
-function step_tracers.message(str,...)
- str = format(str,...)
- if collecting then
- local n = #collection + 1
- local m = messages[n]
- if not m then m = { } messages[n] = m end
- m[#m+1] = str
- end
- return str -- saves an intermediate var in the caller
-end
+local dimenfactors = number.dimenfactors
+local formatters = string.formatters
-- this will be reorganized:
function nodes.showlist(head, message)
if message then
- write_nl(message)
+ report_nodes(message)
end
for n in traverse_nodes(head) do
- write_nl(tostring(n))
+ report_nodes(tostring(n))
end
end
function nodes.handlers.checkglyphs(head,message)
local t = { }
- for g in traverse_id(glyph_code,head) do
- t[#t+1] = format("U+%04X:%s",g.char,g.subtype)
+ for g in traverse_by_id(glyph_code,head) do
+ t[#t+1] = formatters["%U:%s"](g.char,g.subtype)
end
if #t > 0 then
if message and message ~= "" then
- report_nodes("%s, %s glyphs: %s",message,#t,concat(t," "))
+ report_nodes("%s, %s glyphs: % t",message,#t,t)
else
- report_nodes("%s glyphs: %s",#t,concat(t," "))
+ report_nodes("%s glyphs: % t",#t,t)
end
end
return false
@@ -374,10 +96,12 @@ function nodes.handlers.checkforleaks(sparse)
end
node.flush_list(q)
for k, v in next, l do
- write_nl(format("%s * %s", v, k))
+ write_nl(formatters["%s * %s"](v,k))
end
end
+local f_sequence = formatters["U+%04X:%s"]
+
local function tosequence(start,stop,compact)
if start then
local t = { }
@@ -392,7 +116,7 @@ local function tosequence(start,stop,compact)
t[#t+1] = utfchar(c)
end
else
- t[#t+1] = format("U+%04X:%s",c,utfchar(c))
+ t[#t+1] = f_sequence(c,utfchar(c))
end
elseif id == whatsit_code and start.subtype == localpar_code or start.subtype == dir_code then
t[#t+1] = "[" .. start.dir .. "]"
@@ -428,19 +152,7 @@ end
nodes.tosequence = tosequence
function nodes.report(t,done)
- if done then
- if status.output_active then
- report_nodes("output, changed, %s nodes",nodes.count(t))
- else
- write_nl("nodes","normal, changed, %s nodes",nodes.count(t))
- end
- else
- if status.output_active then
- report_nodes("output, unchanged, %s nodes",nodes.count(t))
- else
- write_nl("nodes","normal, unchanged, %s nodes",nodes.count(t))
- end
- end
+ report_nodes("output %a, %changed %a, %s nodes",status.output_active,done,count_nodes(t))
end
function nodes.packlist(head)
@@ -461,9 +173,9 @@ function nodes.idstostring(head,tail)
last_n = last_n + 1
else
if last_n > 1 then
- t[#t+1] = format("[%s*%s]",last_n,nodecodes[last_id] or "?")
+ t[#t+1] = formatters["[%s*%s]"](last_n,nodecodes[last_id] or "?")
else
- t[#t+1] = format("[%s]",nodecodes[last_id] or "?")
+ t[#t+1] = formatters["[%s]"](nodecodes[last_id] or "?")
end
last_id, last_n = id, 1
end
@@ -474,47 +186,47 @@ function nodes.idstostring(head,tail)
if not last_id then
t[#t+1] = "no nodes"
elseif last_n > 1 then
- t[#t+1] = format("[%s*%s]",last_n,nodecodes[last_id] or "?")
+ t[#t+1] = formatters["[%s*%s]"](last_n,nodecodes[last_id] or "?")
else
- t[#t+1] = format("[%s]",nodecodes[last_id] or "?")
+ t[#t+1] = formatters["[%s]"](nodecodes[last_id] or "?")
end
return concat(t," ")
end
---~ function nodes.xidstostring(head,tail) -- only for special tracing of backlinks
---~ local n = head
---~ while n.next do
---~ n = n.next
---~ end
---~ local t, last_id, last_n = { }, nil, 0
---~ while n do
---~ local id = n.id
---~ if not last_id then
---~ last_id, last_n = id, 1
---~ elseif last_id == id then
---~ last_n = last_n + 1
---~ else
---~ if last_n > 1 then
---~ t[#t+1] = format("[%s*%s]",last_n,nodecodes[last_id] or "?")
---~ else
---~ t[#t+1] = format("[%s]",nodecodes[last_id] or "?")
---~ end
---~ last_id, last_n = id, 1
---~ end
---~ if n == head then
---~ break
---~ end
---~ n = n.prev
---~ end
---~ if not last_id then
---~ t[#t+1] = "no nodes"
---~ elseif last_n > 1 then
---~ t[#t+1] = format("[%s*%s]",last_n,nodecodes[last_id] or "?")
---~ else
---~ t[#t+1] = format("[%s]",nodecodes[last_id] or "?")
---~ end
---~ return table.concat(table.reversed(t)," ")
---~ end
+-- function nodes.xidstostring(head,tail) -- only for special tracing of backlinks
+-- local n = head
+-- while n.next do
+-- n = n.next
+-- end
+-- local t, last_id, last_n = { }, nil, 0
+-- while n do
+-- local id = n.id
+-- if not last_id then
+-- last_id, last_n = id, 1
+-- elseif last_id == id then
+-- last_n = last_n + 1
+-- else
+-- if last_n > 1 then
+-- t[#t+1] = formatters["[%s*%s]"](last_n,nodecodes[last_id] or "?")
+-- else
+-- t[#t+1] = formatters["[%s]"](nodecodes[last_id] or "?")
+-- end
+-- last_id, last_n = id, 1
+-- end
+-- if n == head then
+-- break
+-- end
+-- n = n.prev
+-- end
+-- if not last_id then
+-- t[#t+1] = "no nodes"
+-- elseif last_n > 1 then
+-- t[#t+1] = formatters["[%s*%s]"](last_n,nodecodes[last_id] or "?")
+-- else
+-- t[#t+1] = formatters["[%s]"](nodecodes[last_id] or "?")
+-- end
+-- return table.concat(table.reversed(t)," ")
+-- end
local function showsimplelist(h,depth,n)
while h do
@@ -539,7 +251,7 @@ end
nodes.showsimplelist = function(h,depth) showsimplelist(h,depth,0) end
-local function listtoutf(h,joiner,textonly)
+local function listtoutf(h,joiner,textonly,last)
local joiner = (joiner == true and utfchar(0x200C)) or joiner -- zwnj
local w = { }
while h do
@@ -551,19 +263,23 @@ local function listtoutf(h,joiner,textonly)
end
elseif id == disc_code then
local pre, rep, pos = h.pre, h.replace, h.post
- w[#w+1] = format("[%s|%s|%s]",
+ w[#w+1] = formatters["[%s|%s|%s]"] (
pre and listtoutf(pre,joiner,textonly) or "",
rep and listtoutf(rep,joiner,textonly) or "",
mid and listtoutf(mid,joiner,textonly) or ""
)
elseif textonly then
- if id == glue_code and h.width > 0 then
+ if id == glue_code and h.spec and h.spec.width > 0 then
w[#w+1] = " "
end
else
w[#w+1] = "[-]"
end
- h = h.next
+ if h == last then
+ break
+ else
+ h = h.next
+ end
end
return concat(w)
end
@@ -586,99 +302,127 @@ end
nodes.showboxes = showboxes
-local threshold = 65536
+local ptfactor = dimenfactors.pt
+local bpfactor = dimenfactors.bp
+local stripper = lpeg.patterns.stripzeros
-local function toutf(list,result,nofresult,stopcriterium)
- if list then
- local fontchar = fonts.hashes.characters
- for n in traverse_nodes(list) do
- local id = n.id
- if id == glyph_code then
- local components = n.components
- if components then
- result, nofresult = toutf(components,result,nofresult)
- else
- local c = n.char
- local fc = fontchar[n.font]
- if fc then
- local u = fc[c].tounicode
- if u then
- for s in gmatch(u,"....") do
- nofresult = nofresult + 1
- result[nofresult] = utfchar(tonumber(s,16))
- end
- else
- nofresult = nofresult + 1
- result[nofresult] = utfchar(c)
- end
- else
- nofresult = nofresult + 1
- result[nofresult] = utfchar(c)
- end
- end
- elseif id == disc_code then
- result, nofresult = toutf(n.replace,result,nofresult) -- needed?
- elseif id == hlist_code or id == vlist_code then
---~ if nofresult > 0 and result[nofresult] ~= " " then
---~ nofresult = nofresult + 1
---~ result[nofresult] = " "
---~ end
- result, nofresult = toutf(n.list,result,nofresult)
- elseif id == glue_code then
- if nofresult > 0 and result[nofresult] ~= " " then
- nofresult = nofresult + 1
- result[nofresult] = " "
- end
- elseif id == kern_code and n.kern > threshold then
- if nofresult > 0 and result[nofresult] ~= " " then
- nofresult = nofresult + 1
- result[nofresult] = " "
- end
- end
- if n == stopcriterium then
- break
- end
+-- start redefinition
+--
+-- -- if fmt then
+-- -- return formatters[fmt](n*dimenfactors[unit],unit)
+-- -- else
+-- -- return match(formatters["%.20f"](n*dimenfactors[unit]),"(.-0?)0*$") .. unit
+-- -- end
+--
+-- redefined:
+
+local dimenfactors = number.dimenfactors
+
+local function numbertodimen(d,unit,fmt,strip)
+ if not d then
+ local str = formatters[fmt](0,unit)
+ return strip and lpegmatch(stripper,str) or str
+ end
+ local t = type(d)
+ if t == 'string' then
+ return d
+ end
+ if unit == true then
+ unit = "pt"
+ fmt = "%0.5f%s"
+ else
+ unit = unit or 'pt'
+ if not fmt then
+ fmt = "%s%s"
+ elseif fmt == true then
+ fmt = "%0.5f%s"
end
end
- if nofresult > 0 and result[nofresult] == " " then
- result[nofresult] = nil
- nofresult = nofresult - 1
+ if t == "number" then
+ local str = formatters[fmt](d*dimenfactors[unit],unit)
+ return strip and lpegmatch(stripper,str) or str
+ end
+ local id = node.id
+ if id == kern_code then
+ local str = formatters[fmt](d.width*dimenfactors[unit],unit)
+ return strip and lpegmatch(stripper,str) or str
+ end
+ if id == glue_code then
+ d = d.spec
+ end
+ if not d or not d.id == spec_code then
+ local str = formatters[fmt](0,unit)
+ return strip and lpegmatch(stripper,str) or str
+ end
+ local width = d.width
+ local plus = d.stretch_order
+ local minus = d.shrink_order
+ local stretch = d.stretch
+ local shrink = d.shrink
+ if plus ~= 0 then
+ plus = " plus " .. stretch/65536 .. fillcodes[plus]
+ elseif stretch ~= 0 then
+ plus = formatters[fmt](stretch*dimenfactors[unit],unit)
+ plus = " plus " .. (strip and lpegmatch(stripper,plus) or plus)
+ else
+ plus = ""
+ end
+ if minus ~= 0 then
+ minus = " minus " .. shrink/65536 .. fillcodes[minus]
+ elseif shrink ~= 0 then
+ minus = formatters[fmt](shrink*dimenfactors[unit],unit)
+ minus = " minus " .. (strip and lpegmatch(stripper,minus) or minus)
+ else
+ minus = ""
end
- return result, nofresult
+ local str = formatters[fmt](d.width*dimenfactors[unit],unit)
+ return (strip and lpegmatch(stripper,str) or str) .. plus .. minus
end
-function nodes.toutf(list,stopcriterium)
- local result, nofresult = toutf(list,{},0,stopcriterium)
- return concat(result)
-end
+number.todimen = numbertodimen
--- this will move elsewhere
+function number.topoints (n,fmt) return numbertodimen(n,"pt",fmt) end
+function number.toinches (n,fmt) return numbertodimen(n,"in",fmt) end
+function number.tocentimeters (n,fmt) return numbertodimen(n,"cm",fmt) end
+function number.tomillimeters (n,fmt) return numbertodimen(n,"mm",fmt) end
+function number.toscaledpoints(n,fmt) return numbertodimen(n,"sp",fmt) end
+function number.toscaledpoints(n) return n .. "sp" end
+function number.tobasepoints (n,fmt) return numbertodimen(n,"bp",fmt) end
+function number.topicas (n,fmt) return numbertodimen(n "pc",fmt) end
+function number.todidots (n,fmt) return numbertodimen(n,"dd",fmt) end
+function number.tociceros (n,fmt) return numbertodimen(n,"cc",fmt) end
+function number.tonewdidots (n,fmt) return numbertodimen(n,"nd",fmt) end
+function number.tonewciceros (n,fmt) return numbertodimen(n,"nc",fmt) end
-local ptfactor = number.dimenfactors.pt
-local bpfactor = number.dimenfactors.bp
-local stripper = lpeg.patterns.stripzeros
+-- stop redefinition
local points = function(n)
if not n or n == 0 then
return "0pt"
+ elseif type(n) == "number" then
+ return lpegmatch(stripper,format("%.5fpt",n*ptfactor)) -- faster than formatter
else
- return lpegmatch(stripper,format("%.5fpt",n*ptfactor))
+ return numbertodimen(n,"pt",true,true) -- also deals with nodes
end
end
local basepoints = function(n)
if not n or n == 0 then
return "0bp"
+ elseif type(n) == "number" then
+ return lpegmatch(stripper,format("%.5fbp",n*bpfactor)) -- faster than formatter
else
- return lpegmatch(stripper,format("%.5fbp",n*bpfactor))
+ return numbertodimen(n,"bp",true,true) -- also deals with nodes
end
end
local pts = function(n)
if not n or n == 0 then
return "0pt"
+ elseif type(n) == "number" then
+ return format("%.5fpt",n*ptfactor) -- faster than formatter
else
- return format("%.5fpt",n*ptfactor)
+ return numbertodimen(n,"pt",true) -- also deals with nodes
end
end
@@ -686,7 +430,7 @@ local nopts = function(n)
if not n or n == 0 then
return "0"
else
- return format("%.5f",n*ptfactor)
+ return format("%.5f",n*ptfactor) -- faster than formatter
end
end
@@ -695,37 +439,91 @@ number.basepoints = basepoints
number.pts = pts
number.nopts = nopts
---~ function nodes.thespec(s)
---~ local stretch_order = s.stretch_order
---~ local shrink_order = s.shrink_order
---~ local stretch_unit = (stretch_order ~= 0) and ("fi".. string.rep("l",stretch_order)) or "sp"
---~ local shrink_unit = (shrink_order ~= 0) and ("fi".. string.rep("l",shrink_order)) or "sp"
---~ return string.format("%ssp+ %ssp - %ssp",s.width,s.stretch,stretch_unit,s.shrink,shrink_unit)
---~ end
-
local colors = { }
tracers.colors = colors
-local get_attribute = node.has_attribute
-local set_attribute = node.set_attribute
-local unset_attribute = node.unset_attribute
+local unsetvalue = attributes.unsetvalue
-local attribute = attributes.private('color')
-local colormodel = attributes.private('colormodel')
-local mapping = attributes.list[attribute] or { }
+local a_color = attributes.private('color')
+local a_colormodel = attributes.private('colormodel')
+local m_color = attributes.list[a_color] or { }
function colors.set(n,c,s)
- local mc = mapping[c]
+ local mc = m_color[c]
if not mc then
- unset_attribute(n,attribute)
+ n[a_color] = unsetvalue
else
- if not get_attribute(n,colormodel) then
- set_attribute(n,colormodel,s or 1)
+ if not n[a_colormodel] then
+ n[a_colormodel] = s or 1
+ end
+ n[a_color] = mc
+ end
+ return n
+end
+
+function colors.setlist(n,c,s)
+ local f = n
+ while n do
+ local mc = m_color[c]
+ if not mc then
+ n[a_color] = unsetvalue
+ else
+ if not n[a_colormodel] then
+ n[a_colormodel] = s or 1
+ end
+ n[a_color] = mc
end
- set_attribute(n,attribute,mc)
+ n = n.next
end
+ return f
end
function colors.reset(n)
- unset_attribute(n,attribute)
+ n[a_color] = unsetvalue
+ return n
+end
+
+-- maybe
+
+local transparencies = { }
+tracers.transparencies = transparencies
+
+local a_transparency = attributes.private('transparency')
+local m_transparency = attributes.list[a_transparency] or { }
+
+function transparencies.set(n,t)
+ local mt = m_transparency[t]
+ if not mt then
+ n[a_transparency] = unsetvalue
+ else
+ n[a_transparency] = mt
+ end
+ return n
+end
+
+function transparencies.setlist(n,c,s)
+ local f = n
+ while n do
+ local mt = m_transparency[c]
+ if not mt then
+ n[a_transparency] = unsetvalue
+ else
+ n[a_transparency] = mt
+ end
+ n = n.next
+ end
+ return f
+end
+
+function transparencies.reset(n)
+ n[a_transparency] = unsetvalue
+ return n
+end
+
+-- for the moment here
+
+nodes.visualizers = { }
+
+function nodes.visualizers.handler(head)
+ return head, false
end
diff --git a/Master/texmf-dist/tex/context/base/node-tsk.lua b/Master/texmf-dist/tex/context/base/node-tsk.lua
index efc51913c12..596ac765ae2 100644
--- a/Master/texmf-dist/tex/context/base/node-tsk.lua
+++ b/Master/texmf-dist/tex/context/base/node-tsk.lua
@@ -14,9 +14,9 @@ local format = string.format
local trace_tasks = false trackers.register("tasks.creation", function(v) trace_tasks = v end)
-local report_tasks = logs.reporter("tasks")
+local report_tasks = logs.reporter("tasks")
-local allocate = utilities.storage.allocate
+local allocate = utilities.storage.allocate
local nodes = nodes
@@ -60,7 +60,7 @@ end
local function valid(name)
local data = tasksdata[name]
if not data then
- report_tasks("unknown task %s",name)
+ report_tasks("unknown task %a",name)
else
return data
end
@@ -69,17 +69,17 @@ end
local function validgroup(name,group,what)
local data = tasksdata[name]
if not data then
- report_tasks("unknown task %s",name)
+ report_tasks("unknown task %a",name)
else
local frozen = data.frozen[group]
if frozen then
if frozengroup == "no" then
-- default
elseif frozengroup == "strict" then
- report_tasks("warning: group %s of task %s is frozen, %s applied but not supported",group,name,what)
+ report_tasks("warning: group %a of task %a is frozen, %a applied but not supported",group,name,what)
return
else -- if frozengroup == "tolerant" then
- report_tasks("warning: group %s of task %s is frozen, %s ignored",group,name,what)
+ report_tasks("warning: group %a of task %a is frozen, %a ignored",group,name,what)
end
end
return data
@@ -159,7 +159,7 @@ end
function tasks.showactions(name,group,action,where,kind)
local data = valid(name)
if data then
- report_tasks("task %s, list:\n%s",name,nodeprocessor(data.list))
+ report_tasks("task %a, list:\n%s",name,nodeprocessor(data.list))
end
end
@@ -189,7 +189,7 @@ function tasks.actions(name) -- we optimize for the number or arguments (no ...)
if not runner then
created = created + 1
if trace_tasks then
- report_tasks("creating runner '%s'",name)
+ report_tasks("creating runner %a",name)
end
runner = compile(data.list,data.processor,0)
data.runner = runner
@@ -203,7 +203,7 @@ function tasks.actions(name) -- we optimize for the number or arguments (no ...)
if not runner then
created = created + 1
if trace_tasks then
- report_tasks("creating runner '%s' with 1 extra arguments",name)
+ report_tasks("creating runner %a with %s extra arguments",name,1)
end
runner = compile(data.list,data.processor,1)
data.runner = runner
@@ -217,7 +217,7 @@ function tasks.actions(name) -- we optimize for the number or arguments (no ...)
if not runner then
created = created + 1
if trace_tasks then
- report_tasks("creating runner '%s' with 2 extra arguments",name)
+ report_tasks("creating runner %a with %s extra arguments",name,2)
end
runner = compile(data.list,data.processor,2)
data.runner = runner
@@ -231,7 +231,7 @@ function tasks.actions(name) -- we optimize for the number or arguments (no ...)
if not runner then
created = created + 1
if trace_tasks then
- report_tasks("creating runner '%s' with 3 extra arguments",name)
+ report_tasks("creating runner %a with %s extra arguments",name,3)
end
runner = compile(data.list,data.processor,3)
data.runner = runner
@@ -245,7 +245,7 @@ function tasks.actions(name) -- we optimize for the number or arguments (no ...)
if not runner then
created = created + 1
if trace_tasks then
- report_tasks("creating runner '%s' with 4 extra arguments",name)
+ report_tasks("creating runner %a with %s extra arguments",name,4)
end
runner = compile(data.list,data.processor,4)
data.runner = runner
@@ -259,7 +259,7 @@ function tasks.actions(name) -- we optimize for the number or arguments (no ...)
if not runner then
created = created + 1
if trace_tasks then
- report_tasks("creating runner '%s' with 5 extra arguments",name)
+ report_tasks("creating runner %a with %s extra arguments",name,5)
end
runner = compile(data.list,data.processor,5)
data.runner = runner
@@ -273,7 +273,7 @@ function tasks.actions(name) -- we optimize for the number or arguments (no ...)
if not runner then
created = created + 1
if trace_tasks then
- report_tasks("creating runner '%s' with n extra arguments",name)
+ report_tasks("creating runner %a with %s extra arguments",name,n)
end
runner = compile(data.list,data.processor,"n")
data.runner = runner
diff --git a/Master/texmf-dist/tex/context/base/node-typ.lua b/Master/texmf-dist/tex/context/base/node-typ.lua
index 5f8df2b4443..6e1a31643f2 100644
--- a/Master/texmf-dist/tex/context/base/node-typ.lua
+++ b/Master/texmf-dist/tex/context/base/node-typ.lua
@@ -6,13 +6,14 @@ if not modules then modules = { } end modules ['node-typ'] = {
license = "see context related readme files"
}
-local utfvalues = string.utfvalues
+local utfvalues = utf.values
local currentfont = font.current
local fontparameters = fonts.hashes.parameters
local hpack = node.hpack
local vpack = node.vpack
+local fast_hpack = nodes.fasthpack
local nodepool = nodes.pool
@@ -45,7 +46,9 @@ local function tonodes(str,fontid,spacing) -- quick and dirty
next = newglyph(fontid or 1,c)
spacedone = false
end
- if not head then
+ if not next then
+ -- nothing
+ elseif not head then
head = next
else
prev.next = next
@@ -62,6 +65,10 @@ function typesetters.hpack(str,fontid,spacing)
return hpack(tonodes(str,fontid,spacing),"exactly")
end
+function typesetters.fast_hpack(str,fontid,spacing)
+ return fast_hpack(tonodes(str,fontid,spacing),"exactly")
+end
+
function typesetters.vpack(str,fontid,spacing)
-- vpack is just a hack, and a proper implentation is on the agenda
-- as it needs more info etc than currently available
diff --git a/Master/texmf-dist/tex/context/base/norm-ctx.mkiv b/Master/texmf-dist/tex/context/base/norm-ctx.mkiv
index ff16767433c..6fc012c452e 100644
--- a/Master/texmf-dist/tex/context/base/norm-ctx.mkiv
+++ b/Master/texmf-dist/tex/context/base/norm-ctx.mkiv
@@ -1,8 +1,8 @@
%D \module
%D [ file=norm-ctx,
%D version=2009.03.19,
-%D title=\CONTEXT\ Norm Macros,
-%D subtitle=\ALEPH\ and \OMEGA,
+%D title=\CONTEXT\ Normal Macros,
+%D subtitle=\CONTEXT,
%D author=Hans Hagen,
%D date=\currentdate,
%D copyright={PRAGMA ADE \& \CONTEXT\ Development Team}]
@@ -11,8 +11,77 @@
%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
%C details.
+\unprotect
+
%D A few more might end up here (like the weird ones in syst-ini).
\let\normalreqno\normaleqno
-\endinput
+% more friendly in setups:
+
+\let\ordordspacing \Umathordordspacing
+\let\ordopspacing \Umathordopspacing
+\let\ordbinspacing \Umathordbinspacing
+\let\ordrelspacing \Umathordrelspacing
+\let\ordopenspacing \Umathordopenspacing
+\let\ordclosespacing \Umathordclosespacing
+\let\ordpunctspacing \Umathordpunctspacing
+\let\ordinnerspacing \Umathordinnerspacing
+\let\opordspacing \Umathopordspacing
+\let\opopspacing \Umathopopspacing
+\let\opbinspacing \Umathopbinspacing
+\let\oprelspacing \Umathoprelspacing
+\let\opopenspacing \Umathopopenspacing
+\let\opclosespacing \Umathopclosespacing
+\let\oppunctspacing \Umathoppunctspacing
+\let\opinnerspacing \Umathopinnerspacing
+\let\binordspacing \Umathbinordspacing
+\let\binopspacing \Umathbinopspacing
+\let\binbinspacing \Umathbinbinspacing
+\let\binrelspacing \Umathbinrelspacing
+\let\binopenspacing \Umathbinopenspacing
+\let\binclosespacing \Umathbinclosespacing
+\let\binpunctspacing \Umathbinpunctspacing
+\let\bininnerspacing \Umathbininnerspacing
+\let\relordspacing \Umathrelordspacing
+\let\relopspacing \Umathrelopspacing
+\let\relbinspacing \Umathrelbinspacing
+\let\relrelspacing \Umathrelrelspacing
+\let\relopenspacing \Umathrelopenspacing
+\let\relclosespacing \Umathrelclosespacing
+\let\relpunctspacing \Umathrelpunctspacing
+\let\relinnerspacing \Umathrelinnerspacing
+\let\openordspacing \Umathopenordspacing
+\let\openopspacing \Umathopenopspacing
+\let\openbinspacing \Umathopenbinspacing
+\let\openrelspacing \Umathopenrelspacing
+\let\openopenspacing \Umathopenopenspacing
+\let\openclosespacing \Umathopenclosespacing
+\let\openpunctspacing \Umathopenpunctspacing
+\let\openinnerspacing \Umathopeninnerspacing
+\let\closeordspacing \Umathcloseordspacing
+\let\closeopspacing \Umathcloseopspacing
+\let\closebinspacing \Umathclosebinspacing
+\let\closerelspacing \Umathcloserelspacing
+\let\closeopenspacing \Umathcloseopenspacing
+\let\closeclosespacing\Umathcloseclosespacing
+\let\closepunctspacing\Umathclosepunctspacing
+\let\closeinnerspacing\Umathcloseinnerspacing
+\let\punctordspacing \Umathpunctordspacing
+\let\punctopspacing \Umathpunctopspacing
+\let\punctbinspacing \Umathpunctbinspacing
+\let\punctrelspacing \Umathpunctrelspacing
+\let\punctopenspacing \Umathpunctopenspacing
+\let\punctclosespacing\Umathpunctclosespacing
+\let\punctpunctspacing\Umathpunctpunctspacing
+\let\punctinnerspacing\Umathpunctinnerspacing
+\let\innerordspacing \Umathinnerordspacing
+\let\inneropspacing \Umathinneropspacing
+\let\innerbinspacing \Umathinnerbinspacing
+\let\innerrelspacing \Umathinnerrelspacing
+\let\inneropenspacing \Umathinneropenspacing
+\let\innerclosespacing\Umathinnerclosespacing
+\let\innerpunctspacing\Umathinnerpunctspacing
+\let\innerinnerspacing\Umathinnerinnerspacing
+
+\protect \endinput
diff --git a/Master/texmf-dist/tex/context/base/pack-bar.mkiv b/Master/texmf-dist/tex/context/base/pack-bar.mkiv
index 6967173e2e8..06eeebd1411 100644
--- a/Master/texmf-dist/tex/context/base/pack-bar.mkiv
+++ b/Master/texmf-dist/tex/context/base/pack-bar.mkiv
@@ -13,55 +13,80 @@
\writestatus{loading}{ConTeXt Packaging Macros / Bars}
-%D This code has been moved from scrn-int to here (was some old
-%D experimental code). It could be in scrn-bar but it's static.
\unprotect
+%D This code has been moved from scrn-int to here (was some old
+%D experimental code). It could be in scrn-bar but it's static. In
+%D the meantime the interface has been adapted to a key|/|value one.
+%D
%D \startbuffer
-%D \dorecurse{10}
-%D {\horizontalpositionbar
-%D \pos\recurselevel \min1 \max10
-%D \token\framed{\recurselevel}%
-%D \\}
+%D \dorecurse{10}{
+%D \ruledhbox{\horizontalpositionbar[n=#1,min=1,max=10,text=!,color=red]}
+%D \par
+%D }
+%D \stopbuffer
+%D
+%D \typebuffer \stoplinecorrection \getbuffer \stoplinecorrection
%D
-%D \hbox to 15em
-%D {\hss
-%D \dorecurse{10}
-%D {\verticalpositionbar\pos\recurselevel\min1\max10\token\blackrule\\
-%D \hss}}
+%D \startbuffer
+%D \dorecurse{10}{
+%D \ruledhbox{\horizontalgrowingbar[n=#1,min=1,max=10,text=!,color=red]}
+%D \par
+%D }
%D \stopbuffer
+%D
+%D \typebuffer \stoplinecorrection \getbuffer \stoplinecorrection
+
+\installcorenamespace{positionbar}
+
+\installsimplecommandhandler \??positionbar {positionbar}
+
+\setuppositionbar
+ [\c!min=1,
+ \c!max=1,
+ \c!n=1,
+ \c!text=?,
+ \c!width=\emwidth,
+ \c!height=\strutheight,
+ \c!depth=\strutdepth]
-\def\horizontalpositionbar\pos#1\min#2\max#3\token#4\\%
+\unexpanded\def\horizontalpositionbar[#1]%
{\hbox to \hsize
- {\hskip\zeropoint\!!plus #1\!!fill
- \hskip\zeropoint\!!plus-#2\!!fill
- #4\relax
- \hskip\zeropoint\!!plus #3\!!fill
- \hskip\zeropoint\!!plus-#1\!!fill}}
+ {\setuppositionbar[#1]%
+ \usepositionbarstyleandcolor\c!style\c!color
+ \hskip\zeropoint\s!plus \positionbarparameter\c!n \s!fill
+ \hskip\zeropoint\s!plus-\positionbarparameter\c!min\s!fill
+ \positionbarparameter\c!text\relax
+ \hskip\zeropoint\s!plus \positionbarparameter\c!max\s!fill
+ \hskip\zeropoint\s!plus-\positionbarparameter\c!n \s!fill}}
-\def\verticalpositionbar\pos#1\min#2\max#3\token#4\\%
+\unexpanded\def\verticalpositionbar[#1]%
{\vbox to \vsize
- {\vskip\zeropoint\!!plus #1\!!fill
- \vskip\zeropoint\!!plus-#2\!!fill
- \hbox{#4}\relax
- \vskip\zeropoint\!!plus #3\!!fill
- \vskip\zeropoint\!!plus-#1\!!fill}}
+ {\setuppositionbar[#1]%
+ \usepositionbarstyleandcolor\c!style\c!color
+ \vskip\zeropoint\s!plus \positionbarparameter\c!n \s!fill
+ \vskip\zeropoint\s!plus-\positionbarparameter\c!min\s!fill
+ \positionbarparameter\c!text\relax
+ \vskip\zeropoint\s!plus \positionbarparameter\c!max\s!fill
+ \vskip\zeropoint\s!plus-\positionbarparameter\c!n \s!fill}}
-\def\horizontalgrowingbar\pos#1\min#2\max#3\height#4\depth#5\\%
+\unexpanded\def\horizontalgrowingbar[#1]%
{\hbox to \hsize
- {\scratchcounter\numexpr#1-#2+\plusone\relax
- \leaders\vrule\hskip\zeropoint\!!plus \scratchcounter\!!fill
- \vrule\!!width\zeropoint\!!height#4\!!depth#5%
- \hskip\zeropoint\!!plus #3\!!fill
- \hskip\zeropoint\!!plus-#1\!!fill}}
+ {\setuppositionbar[#1]%
+ \usepositionbarstyleandcolor\c!style\c!color
+ \leaders\vrule\hskip\zeropoint\s!plus \numexpr\positionbarparameter\c!n-\positionbarparameter\c!min+\plusone\relax\s!fill
+ \vrule\s!width\zeropoint\s!height\positionbarparameter\c!height\s!depth\positionbarparameter\c!depth
+ \hskip\zeropoint\s!plus \positionbarparameter\c!max\s!fill
+ \hskip\zeropoint\s!plus-\positionbarparameter\c!n \s!fill}}
-\def\verticalgrowingbar\pos#1\min#2\max#3\width#4\\%
+\unexpanded\def\verticalgrowingbar[#1]%
{\vbox to \vsize
- {\scratchcounter\numexpr#1-#2+\plusone\relax
- \leaders\hrule\vskip\zeropoint\!!plus\scratchcounter\!!fill
- \hrule\!!width#4\!!height\zeropoint\!!depth\zeropoint
- \vskip\zeropoint\!!plus #3\!!fill
- \vskip\zeropoint\!!plus-#1\!!fill}}
+ {\setuppositionbar[#1]%
+ \usepositionbarstyleandcolor\c!style\c!color
+ \leaders\hrule\vskip\zeropoint\s!plus\numexpr\positionbarparameter\c!n-\positionbarparameter\c!min+\plusone\relax\s!fill
+ \hrule\s!width\positionbarparameter\c!width\s!height\zeropoint\s!depth\zeropoint
+ \vskip\zeropoint\s!plus \positionbarparameter\c!max\s!fill
+ \vskip\zeropoint\s!plus-\positionbarparameter\c!n \s!fill}}
\protect \endinput
diff --git a/Master/texmf-dist/tex/context/base/pack-bck.mkvi b/Master/texmf-dist/tex/context/base/pack-bck.mkvi
index fbe704fabbc..72eafd282ee 100644
--- a/Master/texmf-dist/tex/context/base/pack-bck.mkvi
+++ b/Master/texmf-dist/tex/context/base/pack-bck.mkvi
@@ -209,7 +209,7 @@
% \c!color=,
% \c!background=\v!screen,
% \c!backgroundcolor=\backgroundparameter\c!color,
-% \c!screen=\@@rsscreen,
+% \c!screen=\defaultbackgroundscreen,
%
\c!background=\v!color,
\c!backgroundcolor=lightgray,
@@ -240,8 +240,7 @@
%D \macros
%D {backgroundline}
%D
-%D For the moment an undocumented feature, but a candidate
-%D for going public.
+%D For the moment an undocumented feature, but a candidate for going public.
\unexpanded\def\backgroundline[#color]%
{\dontleavehmode
diff --git a/Master/texmf-dist/tex/context/base/pack-box.mkiv b/Master/texmf-dist/tex/context/base/pack-box.mkiv
index 41a17953bb1..f8b36691c75 100644
--- a/Master/texmf-dist/tex/context/base/pack-box.mkiv
+++ b/Master/texmf-dist/tex/context/base/pack-box.mkiv
@@ -13,36 +13,24 @@
\writestatus{loading}{ConTeXt Packaging Macros / Boxes}
-% to be cleaned up
-
-%D This module contains all kind of macros for moving content
-%D around. Many macros here come from other modules, but
-%D depencies made it more clear to isolate them.
-
-% \placeornament
+%D This module contains all kind of macros for moving content around. Many
+%D macros here come from other modules, but depencies made it more clear
+%D to isolate them. We invite users to document the macros. They can be
+%D handy shortcuts for otherwise complex tasks.
\unprotect
-% we need to set the size, else we get dimensions depending
-% on the content, which in itself is ok, but can lead to loops
-% due to rounding errors (happened in demo-obv)
+%D We need to set the size, else we get dimensions depending on the content,
+%D which in itself is ok, but can lead to loops due to rounding errors (happened
+%D in demo-obv).
\definelayer[\v!text-2][\c!position=\v!yes,\c!region=,\c!width=\overlaywidth,\c!height=\overlayheight]
\definelayer[\v!text-1][\c!position=\v!yes,\c!region=,\c!width=\overlaywidth,\c!height=\overlayheight]
\definelayer[\v!text+1][\c!position=\v!yes,\c!region=,\c!width=\overlaywidth,\c!height=\overlayheight]
\definelayer[\v!text+2][\c!position=\v!yes,\c!region=,\c!width=\overlaywidth,\c!height=\overlayheight]
-% \unexpanded\def\positionregionlayer#1#2%
-% {\composedlayer{#2}}
-%
-% \def\internaltextoverlay#1% will become more generic and installable
-% {\startoverlay % i.e. probably an overlay by itself
-% {\positionregionoverlay\textanchor{\v!text#1}}% see later
-% {\positionregionlayer \textanchor{\v!text#1}}%
-% \stopoverlay}
-
-\def\internaltextoverlay#1% will become more generic and installable
- {\startoverlay % i.e. probably an overlay by itself
+\unexpanded\def\internaltextoverlay#1% will become more generic and installable
+ {\startoverlay % i.e. probably an overlay by itself
{\positionregionoverlay\textanchor{\v!text#1}}% see later
{\composedlayer {\v!text#1}}%
\stopoverlay}
@@ -52,75 +40,6 @@
\defineoverlay[\v!text+1][\internaltextoverlay{+1}]
\defineoverlay[\v!text+2][\internaltextoverlay{+2}]
-% to be documented
-
-% \definelayer[anchor]
-%
-% \unexpanded\def\defineanchor
-% {\doquadrupleempty\dodefineanchor}
-%
-% \def\dodefineanchor[#1][#2][#3][#4]%
-% {\setvalue{\??an#1}{\dodefinedanchor[#2][#3][#4]}}
-%
-% \def\dodefinedanchor[#1][#2][#3]%
-% {\def\docommand[##1][##2]%
-% {\ifsecondargument
-% \def\next{\dodoanchorT[#1][#2,##1][#3,##2]}%
-% \else\iffirstargument
-% \def\next{\dodoanchorT[#1][#2,##1][#2,##1]}%
-% \else
-% \def\next{\dodoanchorT[#1][#2][#3]}%
-% \fi\fi
-% \next}%
-% \dodoubleempty\docommand}
-%
-% \unexpanded\def\anchor
-% {\dosingleargument\pack_anchor}
-%
-% \def\pack_anchor[#1]%
-% {\ifcsname\??an#1\endcsname\@EA\nonoanchor\else\@EA\dodoanchor\fi[#1]}
-%
-% \def\nonoanchor[#1]%
-% {\csname\??an#1\endcsname}
-%
-% \def\dodoanchor[#1]%
-% {\dotripleempty\dododoanchor[#1]}
-%
-% \def\dododoanchor
-% {\ifthirdargument
-% \expandafter\dodoanchorT
-% \else
-% \expandafter\dodoanchorS
-% \fi}
-%
-% \def\dodoanchorS[#1][#2][#3]%
-% {\dodoanchorT[#1][#2][#2]}
-%
-% \def\dodoanchorT[#1][#2][#3]% brrr: we need to apply offset only once .. a bit messy
-% {\dowithnextbox
-% {\bgroup
-% % \checktextbackgrounds
-% \setbox\scratchbox\emptyhbox
-% \wd\scratchbox\nextboxwd
-% \ht\scratchbox\nextboxht
-% \dp\scratchbox\nextboxdp
-% \setlayer
-% [anchor]
-% [\c!width=\wd\scratchbox,
-% \c!height=\ht\scratchbox,
-% \c!offset=\!!zeropoint,
-% #2,#3]
-% {\setlayer[#1]{\flushnextbox}}%
-% \framed
-% [#2,
-% \c!background=anchor,
-% \c!offset=\v!overlay,
-% \c!frame=\v!off,
-% #3]
-% {\box\scratchbox}%
-% \egroup}%
-% \vbox}
-
\installcorenamespace {anchor}
\unexpanded\def\defineanchor
@@ -194,7 +113,7 @@
\c!offset=\zeropoint,
#2,#3]
{\setlayer[#1]{\box\b_pack_anchors}}% % #1 uses ovelaywidth/height
- \framed
+ \framed % could be a predefined framed
[\c!background=anchor,
\c!offset=\v!overlay,
\c!frame=\v!off,
@@ -204,157 +123,182 @@
% collectors
-\def\@@collectorbox{@@collectorbox}
+\installcorenamespace{collectorbox}
+\installcorenamespace{collector}
+
+\installcommandhandler \??collector {collector} \??collector
+
+\setupcollector
+ [\c!state=\v!start,
+ \c!x=\zeropoint,
+ \c!y=\zeropoint,
+ \c!offset=\zeropoint,
+ \c!rotation=, % geen 0 !
+ \c!hoffset=\zeropoint,
+ \c!voffset=\zeropoint,
+ \c!location=rb,
+ \c!corner=]
+
+\appendtoks
+ \ifcsname\??collectorbox\currentcollector\endcsname \else
+ \expandafter\newbox\csname\??collectorbox\currentcollector\endcsname
+ \fi
+\to \everydefinecollector
-\unexpanded\def\definecollector
- {\dodoubleargument\dodefinecollector}
+\unexpanded\def\resetcollector[#1]%
+ {\ifcsname\??collectorbox#1\endcsname
+ \global\setbox\csname\??collectorbox#1\endcsname\emptybox
+ \fi}
-\def\dodefinecollector[#1][#2]%
- {\ifcsname\@@collectorbox#1\endcsname \else
- \expandafter\newbox\csname\@@collectorbox#1\endcsname
- \fi
- \resetcollector[#1]%
- \setupcollector
- [#1]
- [\c!state=\v!start,
- \c!x=\!!zeropoint,\c!y=\!!zeropoint,
- \c!offset=\!!zeropoint,\c!rotation=, % geen 0 !
- \c!hoffset=\!!zeropoint,\c!voffset=\!!zeropoint,
- \c!location=rb,\c!corner=,#2]}
-
-\unexpanded\def\setupcollector
- {\dodoubleargument\dosetupcollector}
-
-\def\dosetupcollector[#1][#2]%
- {\def\docommand##1{\getparameters[\??cb##1][#2]}%
- \processcommalist[#1]\docommand}
-
-\def\setcollector
- {\dodoubleargument\dosetcollector}
-
-\def\dosetcollector[#1][#2]%
+\newconditional\c_pack_boxes_collector_valid_box
+
+\let\b_pack_boxes_collector\scratchbox
+
+\def\pack_boxes_collector_check_box#1%
+ {\edef\currentcollector{#1}%
+ \ifcsname\??collectorbox\currentcollector\endcsname
+ \settrue\c_pack_boxes_collector_valid_box
+ \expandafter\let\expandafter\b_pack_boxes_collector\csname\??collectorbox\currentcollector\endcsname
+ \else
+ \setfalse\c_pack_boxes_collector_valid_box
+ \writestatus{collector}{unknown collector \currentcollector}%
+ \fi}
+
+\unexpanded\def\setcollector
{\bgroup
- \forgetall
+ \dodoubleargument\pack_boxes_collector}
+
+\def\pack_boxes_collector[#1][#2]% todo: keep reference point
+ {\pack_boxes_collector_check_box{#1}%
+ \ifconditional\c_pack_boxes_collector_valid_box
+ \setupcurrentcollector[#2]%
+ \expandafter\pack_boxes_collector_yes
+ \else
+ \expandafter\pack_boxes_collector_nop
+ \fi}
+
+\def\pack_boxes_collector_yes
+ {\forgetall
\dontcomplain
- \dowithnextbox
- {\ifcsname\@@collectorbox#1\endcsname
- \dodosetcollector[#1][#2]%
- \else
- \writestatus{collector}{unknown layer #1}%
- \fi
- \egroup}
- \hbox}
+ \dowithnextboxcs\pack_boxes_collector_finish\hbox}
-\def\collectorparameter#1{\csname\??cb\currentcollector#1\endcsname}
-
-\def\dodosetcollector[#1][#2]% todo: keep reference point
- {\def\currentcollector{#1}%
- \chardef\collectorbox\csname\@@collectorbox#1\endcsname
- \getparameters[\??cb#1][#2]%
- \d_pack_layers_x_size\wd\collectorbox
- \d_pack_layers_y_size\ht\collectorbox
- \doifvaluesomething{\??cb#1\c!rotation}
- {\setbox\nextbox\hbox
- {\rotate
- [\c!location=\v!high,
- \c!rotation=\collectorparameter\c!rotation]
- {\flushnextbox}}}%
- \advance\d_pack_layers_y_size\dp\collectorbox
- \d_pack_layers_x_position\collectorparameter\c!x
- \advance\d_pack_layers_x_position\collectorparameter\c!hoffset
- \d_pack_layers_y_position\collectorparameter\c!y
- \advance\d_pack_layers_y_position\collectorparameter\c!voffset
- \doifelse\v!middle{\collectorparameter\c!corner}
- {\ifdim\d_pack_layers_x_size>\zeropoint
- \advance\d_pack_layers_x_position.5\d_pack_layers_x_size
- \fi
- \ifdim\d_pack_layers_y_size>\zeropoint
- \advance\d_pack_layers_y_position.5\d_pack_layers_y_size
- \fi}%
- {\normalexpanded{\noexpand\doifinset{\v!bottom}{\collectorparameter\c!corner}}
- {\ifdim\d_pack_layers_y_size>\zeropoint
- \advance\d_pack_layers_y_position-\d_pack_layers_y_size
- \d_pack_layers_y_position-\d_pack_layers_y_position
- \fi}%
- \normalexpanded{\noexpand\doifinset{\v!right}{\collectorparameter\c!corner}}
- {\ifdim\d_pack_layers_x_size>\zeropoint
- \advance\d_pack_layers_x_position-\d_pack_layers_x_size
- \d_pack_layers_x_position-\d_pack_layers_x_position
- \fi}}%
+\def\pack_boxes_collector_nop
+ {\egroup}
+
+\installcorenamespace{collectorcorners}
+
+\setvalue{\??collectorcorners\v!middle}%
+ {\ifdim\d_pack_layers_x_size>\zeropoint
+ \advance\d_pack_layers_x_position.5\d_pack_layers_x_size
+ \fi
+ \ifdim\d_pack_layers_y_size>\zeropoint
+ \advance\d_pack_layers_y_position.5\d_pack_layers_y_size
+ \fi}
+
+\setvalue{\??collectorcorners\v!bottom}%
+ {\ifdim\d_pack_layers_y_size>\zeropoint
+ \advance\d_pack_layers_y_position-\d_pack_layers_y_size
+ \d_pack_layers_y_position-\d_pack_layers_y_position
+ \fi}
+
+\setvalue{\??collectorcorners\v!right}%
+ {\ifdim\d_pack_layers_x_size>\zeropoint
+ \advance\d_pack_layers_x_position-\d_pack_layers_x_size
+ \d_pack_layers_x_position-\d_pack_layers_x_position
+ \fi}
+
+\def\pack_boxes_collector_check_corner#1%
+ {\ifcsname\??collectorcorners#1\endcsname
+ \csname\??collectorcorners#1\endcsname
+ \fi}
+
+\def\pack_boxes_collector_finish
+ {\edef\p_collector_rotation{\collectorparameter\c!rotation}%
+ \edef\p_collector_corner {\collectorparameter\c!corner}%
+ \ifx\p_collector_rotation\empty \else
+ \setbox\nextbox\hbox
+ {\rotate
+ [\c!location=\v!high,
+ \c!rotation=\p_collector_rotation]
+ {\box\nextbox}}%
+ \fi
+ \d_pack_layers_x_size\wd\b_pack_boxes_collector
+ \d_pack_layers_y_size\htdp\b_pack_boxes_collector
+ \d_pack_layers_x_position\dimexpr\collectorparameter\c!x+\collectorparameter\c!hoffset\relax
+ \d_pack_layers_y_position\dimexpr\collectorparameter\c!y+\collectorparameter\c!voffset\relax
+ \rawprocesscommacommand[\p_collector_corner]\pack_boxes_collector_check_corner
\setbox\nextbox\hbox
- {\alignedbox[\collectorparameter\c!location]\vbox{\flushnextbox}}%
+ {\alignedbox[\collectorparameter\c!location]\vbox{\box\nextbox}}%
\boxmaxdepth\zeropoint % really needed, nice example
\global\advance\boxhdisplacement\d_pack_layers_x_position
\ifdim\boxhdisplacement<\zeropoint
- \global\setbox\collectorbox\hbox
- {\hskip-\boxhdisplacement
- \box\collectorbox}%
+ \global\setbox\b_pack_boxes_collector\hbox
+ {\kern-\boxhdisplacement
+ \box\b_pack_boxes_collector}%
\fi
\global\advance\boxvdisplacement\d_pack_layers_y_position
\ifdim\boxvdisplacement<\zeropoint
- \global\setbox\collectorbox\hbox
+ \global\setbox\b_pack_boxes_collector\hbox
{\lower-\boxvdisplacement
- \box\collectorbox}%
+ \box\b_pack_boxes_collector}%
\fi
- \d_pack_layers_x_size\wd\collectorbox
- \d_pack_layers_y_size\ht\collectorbox
- \advance\d_pack_layers_y_size\dp\collectorbox
- \global\setbox\collectorbox\hbox
- {\box\collectorbox
- \hskip-\d_pack_layers_x_size
- \hskip\d_pack_layers_x_position\relax
- \ifdim\boxhdisplacement<\zeropoint
- \hskip-\boxhdisplacement
- \fi
+ \d_pack_layers_x_size\wd\b_pack_boxes_collector
+ \d_pack_layers_y_size\htdp\b_pack_boxes_collector
+ \global\setbox\b_pack_boxes_collector\hbox
+ {\box\b_pack_boxes_collector
+ \kern\dimexpr
+ -\d_pack_layers_x_size
+ +\d_pack_layers_x_position
+ \ifdim\boxhdisplacement<\zeropoint
+ -\boxhdisplacement
+ \fi
+ \relax
\lower\d_pack_layers_y_position\hbox
{\ifdim\boxvdisplacement<\zeropoint
- \lower-\boxvdisplacement\flushnextbox
- \else
- \flushnextbox
- \fi}}%
+ \lower-\boxvdisplacement
+ \fi
+ \box\nextbox}}%
% combine height and depth into depth only (later flushed as height)
- \global\setbox\collectorbox\hbox
- {\lower\ht\collectorbox\box\collectorbox}%
+ \global\setbox\b_pack_boxes_collector\hbox
+ {\lower\ht\b_pack_boxes_collector\box\b_pack_boxes_collector}%
% just to be sure
- \ifdim\wd\collectorbox<\d_pack_layers_x_size
- \wd\collectorbox\d_pack_layers_x_size
- \fi}
+ \ifdim\wd\b_pack_boxes_collector<\d_pack_layers_x_size
+ \wd\b_pack_boxes_collector\d_pack_layers_x_size
+ \fi
+ \egroup}
-\def\flushcollector[#1]%
- {\ifcsname\@@collectorbox#1\endcsname
- \doifnotvalue{\??cb#1\c!state}\v!stop
- {\vbox
- {\hbox
- {\doifelsevalue{\??cb#1\c!state}\v!repeat
- {\let\next\copy}{\let\next\box}%
- \raise\dp\csname\@@collectorbox#1\endcsname
- \next\csname\@@collectorbox#1\endcsname}}}%
- \else
- \writestatus{collector}{unknown collector #1}%
- \fi}
+\unexpanded\def\flushcollector[#1]%
+ {\bgroup
+ \pack_boxes_collector_check_box{#1}%
+ \ifconditional\c_pack_boxes_collector_valid_box
+ \edef\p_collector_state{\collectorparameter\c!state}%
+ \ifx\p_collector_state\v!stop \else
+ \vbox{\hbox{\raise
+ \dp\b_pack_boxes_collector
+ \ifx\p_collector_state\v!repeat\copy\else\box\fi\b_pack_boxes_collector}}%
+ \fi
+ \fi
+ \egroup}
-\def\composedcollector#1{\flushcollector[#1]}
+\unexpanded\def\composedcollector#1% no [], handy as argument
+ {\flushcollector[#1]}
-\def\resetcollector[#1]%
- {\ifcsname\@@collectorbox#1\endcsname
- \global\setbox\csname\@@collectorbox#1\endcsname\emptybox
- \fi}
+\unexpanded\def\adaptcollector
+ {\dodoubleargument\pack_boxes_collector_adapt}
-\def\adaptcollector
- {\dodoubleargument\doadaptcollector}
-
-\def\doadaptcollector[#1][#2]% % a typical case where \global\wd looks better in the code
- {\bgroup
- \def\currentcollector{#1}%
- \chardef\collectorbox\csname\@@collectorbox#1\endcsname
- \getparameters[\??cb#1][\c!voffset=\zeropoint,\c!hoffset=\zeropoint,#2]%
- \scratchdimen\wd\collectorbox
- \advance\scratchdimen\collectorparameter\c!hoffset
- \global\wd\collectorbox\scratchdimen
- \scratchdimen\ht\collectorbox
- \advance\scratchdimen\collectorparameter\c!voffset
- \global\ht\collectorbox\scratchdimen
- \egroup}
+\def\pack_boxes_collector_adapt[#1][#2]% % a typical case where \global\wd looks better in the code
+ {\begingroup
+ \pack_boxes_collector_check_box{#1}%
+ \ifconditional\c_pack_boxes_collector_valid_box
+ \letcollectorparameter\c!voffset\zeropoint
+ \letcollectorparameter\h!voffset\zeropoint
+ \ifsecondargument
+ \setupcurrentcollector[#2]%
+ \fi
+ \global\wd\b_pack_boxes_collector\dimexpr\wd\b_pack_boxes_collector+\collectorparameter\c!hoffset\relax
+ \global\ht\b_pack_boxes_collector\dimexpr\ht\b_pack_boxes_collector+\collectorparameter\c!voffset\relax
+ \fi
+ \endgroup}
%\definecollector[test]
%\setcollector[test]
@@ -368,23 +312,23 @@
\definecollector
[caption]
-\def\collectedtext
- {\dodoubleempty\docollectedtext}
-
-\def\docollectedtext[#1][#2]#3%
+\unexpanded\def\collectedtext % for captions
{\bgroup
- \dowithnextbox
+ \dodoubleempty\pack_boxes_collector_text}
+
+\def\pack_boxes_collector_text[#1][#2]#3%
+ {\dowithnextbox
{\setcollector
[caption]
- {\flushnextbox}%
+ {\box\nextbox}%
\setcollector
[caption][#1]
- {\getparameters[\??du][\c!style=,\c!color=,#2]%
- \dousestyleparameter\@@dustyle
+ {\letdummyparameter\c!style\empty
+ \letdummyparameter\c!color\empty
+ \getdummyparameters[#2]%
+ \dousestyleparameter{\directdummyparameter\c!style}%
\setupinterlinespace
- \framed % watch the special setting of kader/overlay
- [\c!frame=\v!overlay,\c!foregroundcolor=\@@ducolor,\c!foregroundstyle=\@@dustyle,#2]
- {#3}}%
+ \normalexpanded{\framed[\c!foregroundcolor=\directdummyparameter\c!color,\c!foregroundstyle=\directdummyparameter\c!style},\c!frame=\v!overlay,#2]{#3}}%
\composedcollector{caption}%
\egroup}%
\hbox}
@@ -406,51 +350,6 @@
% [frame=on,offset=0pt]
% {gans}
% {\externalfigure[koe][width=3cm]}
-
-% lean and mean:
-%
-% \installcorenamespace {layeredtext}
-%
-% \newdimen\d_pack_layeredtexts_width
-% \newdimen\d_pack_layeredtexts_height
-%
-% \definelayer
-% [\??layeredtextlayer]
-%
-% \setuplayer
-% [\??layeredtextlayer]%
-% [\c!width=\d_pack_layeredtexts_width,\c!height=\d_pack_layeredtexts_height]%
-%
-% \unexpanded\def\layeredtext
-% {\dodoubleempty\dolayeredtext}
-%
-% \def\dolayeredtext[#1][#2]#3%
-% {\bgroup
-% \dowithnextbox
-% {\d_pack_layeredtexts_width \wd\nextbox
-% \d_pack_layeredtexts_height\ht\nextbox
-% \begingroup % preserve \nextbox
-% \setlayer
-% [\??layeredtextlayer]%
-% [#1]%
-% {\getparameters[\??du][\c!style=,\c!color=,#2]%
-% \dousestyleparameter\@@dustyle
-% \setupinterlinespace
-% \framed
-% [\c!frame=\v!overlay,\c!foregroundcolor=\@@ducolor,\c!foregroundstyle=\@@dustyle,#2]%
-% {#3}}%
-% \endgroup
-% \framed
-% [\c!offset=\v!overlay,
-% \c!frame=\v!off,
-% \c!background={\v!foreground,\??layeredtextlayer},
-% \c!width=\d_pack_layeredtexts_width,
-% \c!height=\d_pack_layeredtexts_height]%
-% {\flushnextbox}%
-% \egroup}%
-% \hbox}
-%
-% maybe faster but more code:
\installcorenamespace {layeredtext}
\installcorenamespace {layeredtextlayer}
@@ -531,15 +430,16 @@
% [frame=on,offset=0pt]
% {gans}
% {\externalfigure[koe][width=3cm]}
-
-\def\ornamenttext
- {\dodoubleempty\doornamenttext}
-\def\doornamenttext[#1][#2]%
+\unexpanded\def\ornamenttext
+ {\dodoubleempty\pack_ornament_text}
+
+\def\pack_ornament_text[#1][#2]%
{\bgroup
\doifassignmentelse{#1}
- {\getparameters[\s!dummy][\c!alternative=\v!a,#1]%
- \doifelse\dummyalternative\v!a
+ {\letdummyparameter\c!alternative\v!a
+ \getdummyparameters[#1]%
+ \doifelse{\directdummyparameter\c!alternative}\v!a
{\egroup\collectedtext}%
{\egroup\layeredtext }%
[#1][#2]}%
@@ -549,7 +449,7 @@
{\dotripleempty\dodefineornament}
\def\dodefineornament[#1][#2][#3]%
- {\setvalue{#1}{\doornamenttext[#2][#3]}}
+ {\setuvalue{#1}{\pack_ornament_text[#2][#3]}}
% \defineornament
% [affiliation]
@@ -582,13 +482,12 @@
% [background=color,style=\ss\tfxx,backgroundcolor=white,offset=0pt]
%
% \affiliation{drawing}{\externalfigure[hakker][width=3cm]}
-
-% pas op: aanpassen aan nieuwe layer hoek ankers en columnset
-\newcount\nofbleeds % per pag
+\newcount\c_pack_boxes_bleeds
+
+\installcorenamespace {bleeding}
-\unexpanded\def\setupbleeding
- {\dodoubleempty\getparameters[\??bg]}
+\installdirectcommandhandler \??bleeding {bleeding} % \??bleeding
\setupbleeding
[\c!location=l,
@@ -597,92 +496,117 @@
\c!height=3cm,
\c!offset=2mm,
\c!page=\v!no,
- \c!voffset=\@@bgoffset,
- \c!hoffset=\@@bgoffset]
+ \c!voffset=\scratchoffset, % is set to \bleedingparameter\c!offset
+ \c!hoffset=\scratchoffset] % which often saves one resolve
-\unexpanded\def\bleed
- {\dosingleempty\pack_boxes_bleed}
+\def\bleedwidth {\the\hsize} % these are global !
+\def\bleedheight{\the\vsize} % these are global !
-\def\bleedwidth {\the\hsize}%
-\def\bleedheight{\the\vsize}%
+\newconditional\c_pack_boxes_l
+\newconditional\c_pack_boxes_r
+\newconditional\c_pack_boxes_t
+\newconditional\c_pack_boxes_b
-\def\pack_boxes_bleed[#1]#2%
+\unexpanded\def\bleed
{\hbox\bgroup
+ \dosingleempty\pack_boxes_bleed}
+
+\installcorenamespace{bleedinglocation}
+
+\setvalue{\??bleedinglocation t}{\settrue\c_pack_boxes_t\scratchhoffset\zeropoint}
+\setvalue{\??bleedinglocation b}{\settrue\c_pack_boxes_b\scratchhoffset\zeropoint}
+\setvalue{\??bleedinglocation l}{\settrue\c_pack_boxes_l\scratchvoffset\zeropoint}
+\setvalue{\??bleedinglocation r}{\settrue\c_pack_boxes_r\scratchvoffset\zeropoint}
+\setvalue{\??bleedinglocation bl}{\settrue\c_pack_boxes_l\settrue\c_pack_boxes_b}
+\setvalue{\??bleedinglocation lb}{\settrue\c_pack_boxes_l\settrue\c_pack_boxes_b}
+\setvalue{\??bleedinglocation br}{\settrue\c_pack_boxes_r\settrue\c_pack_boxes_b}
+\setvalue{\??bleedinglocation rb}{\settrue\c_pack_boxes_r\settrue\c_pack_boxes_b}
+\setvalue{\??bleedinglocation tl}{\settrue\c_pack_boxes_l\settrue\c_pack_boxes_t}
+\setvalue{\??bleedinglocation lt}{\settrue\c_pack_boxes_l\settrue\c_pack_boxes_t}
+\setvalue{\??bleedinglocation tr}{\settrue\c_pack_boxes_r\settrue\c_pack_boxes_t}
+\setvalue{\??bleedinglocation rt}{\settrue\c_pack_boxes_r\settrue\c_pack_boxes_t}
+
+\def\pack_boxes_bleed[#1]%
+ {\global\advance\c_pack_boxes_bleeds\plusone
+ %
\xdef\bleedwidth {\the\hsize}%
\xdef\bleedheight{\the\vsize}%
- \global\advance\nofbleeds\plusone
- \getparameters[\??bg][#1]%
- \!!doneafalse % left
- \!!donebfalse % right
- \!!donecfalse % top
- \!!donedfalse % bottom
- % replace this part ! todo: default location
- \processaction
- [\@@bglocation]
- [ t=>\!!donectrue\let\@@bghoffset\!!zeropoint,
- b=>\!!donedtrue\let\@@bghoffset\!!zeropoint,
- l=>\!!doneatrue\let\@@bgvoffset\!!zeropoint,
- r=>\!!donebtrue\let\@@bgvoffset\!!zeropoint,
- bl=>\!!doneatrue\!!donedtrue,
- lb=>\!!doneatrue\!!donedtrue,
- br=>\!!donebtrue\!!donedtrue,
- rb=>\!!donebtrue\!!donedtrue,
- tl=>\!!doneatrue\!!donectrue,
- lt=>\!!doneatrue\!!donectrue,
- tr=>\!!donebtrue\!!donectrue,
- rt=>\!!donebtrue\!!donectrue]%
- \doifelse\@@bgstretch\v!yes\donetrue\donefalse
- \scratchdimen\@@bgwidth
- \edef\currentbgposition {\??bg:\number\nofbleeds}%
+ %
+ \edef\currentbgposition {bleed:\number\c_pack_boxes_bleeds}%
\edef\currentpageposition{page:0}% todo: per page
- \ifdone
- \if!!donea
- \advance\scratchdimen\dimexpr \MPx\currentbgposition-\MPx\currentpageposition\relax
- \else\if!!doneb
- \scratchdimen\dimexpr\paperwidth-\MPx\currentbgposition+\MPx\currentpageposition\relax % not checked
- \fi\fi
- \fi
- \advance\scratchdimen\@@bghoffset
- \xdef\bleedwidth{\the\scratchdimen}%
- \scratchdimen\@@bgheight
- \ifdone
- \if!!donec
- \scratchdimen\dimexpr\paperheight-\MPy\currentbgposition+\MPy\currentpageposition\relax % not checked
- \else\if!!doned
- \advance\scratchdimen\dimexpr \MPy\currentbgposition-\MPy\currentpageposition\relax % not checked
- \fi\fi
- \fi
- \advance\scratchdimen\@@bgvoffset
- \xdef\bleedheight{\the\scratchdimen}%
%
- \bgroup
- \hsize\bleedwidth
- \vsize\bleedheight
- \global\setbox\globalscratchbox\hbox{#2}%
- \egroup
- \setbox\scratchbox\box\globalscratchbox
+ \setupcurrentbleeding[#1]%
%
- \doif\@@bgpage\v!yes
- {\setbox\scratchbox\topskippedbox{\box\scratchbox}}%
- \setbox\scratchbox\hbox to \@@bgwidth
- {\if!!donea\hss\fi\box\scratchbox\if!!doneb\hss\fi}%
- \if!!doned
- \setbox\scratchbox\hbox
- {\lower\bleedheight\hbox{\raise\@@bgheight\box\scratchbox}}%
+ \scratchwidth \bleedingparameter\c!width
+ \scratchheight \bleedingparameter\c!height
+ \scratchoffset \bleedingparameter\c!offset
+ \scratchhoffset\bleedingparameter\c!hoffset
+ \scratchvoffset\bleedingparameter\c!voffset
+ %
+ \setfalse\c_pack_boxes_l % left
+ \setfalse\c_pack_boxes_r % right
+ \setfalse\c_pack_boxes_t % top
+ \setfalse\c_pack_boxes_b % bottom
+ %
+ \csname\??bleedinglocation\bleedingparameter\c!location\endcsname
+ %
+ \doifelse{\bleedingparameter\c!stretch}\v!yes\donetrue\donefalse
+ %
+ \xdef\bleedwidth{\dimexpr
+ \ifdone
+ \ifconditional\c_pack_boxes_l
+ \scratchwidth+\MPx\currentbgposition-\MPx\currentpageposition
+ \else\ifconditional\c_pack_boxes_r
+ \paperwidth -\MPx\currentbgposition+\MPx\currentpageposition % not checked
+ \else
+ \scratchwidth
+ \fi\fi
+ \else
+ \scratchwidth
+ \fi+\scratchhoffset}%
+ \xdef\bleedheight{\dimexpr
+ \ifdone
+ \ifconditional\c_pack_boxes_t
+ \paperheight -\MPy\currentbgposition+\MPy\currentpageposition % not checked
+ \else\ifconditional\c_pack_boxes_b
+ \scratchheight+\MPy\currentbgposition-\MPy\currentpageposition % not checked
+ \else
+ \scratchheight
+ \fi\fi
+ \else
+ \scratchheight
+ \fi+\scratchvoffset}%
+ \dowithnextboxcontentcs\pack_boxes_bleed_settings\pack_boxes_bleed_finish\hbox}
+
+\def\pack_boxes_bleed_settings
+ {\hsize\bleedwidth
+ \vsize\bleedheight}
+
+\def\pack_boxes_bleed_finish
+ {\doif{\bleedingparameter\c!page}\v!yes
+ {\setbox\nextbox\topskippedbox{\box\nextbox}}%
+ \setbox\nextbox\hbox to \scratchwidth
+ {\ifconditional\c_pack_boxes_l\hss\fi
+ \box\nextbox
+ \ifconditional\c_pack_boxes_r\hss\fi}%
+ \ifconditional\c_pack_boxes_b
+ \setbox\nextbox\hbox
+ {\lower\bleedheight\hbox{\raise\scratchheight\box\nextbox}}%
\fi
- \wd\scratchbox\@@bgwidth
- \ht\scratchbox\@@bgheight
- \dp\scratchbox\zeropoint
+ \wd\nextbox\scratchwidth
+ \ht\nextbox\scratchheight
+ \dp\nextbox\zeropoint
\ifdone
- \hpos\currentbgposition{\box\scratchbox}%
+ \hpos\currentbgposition{\box\nextbox}%
\else
- \box\scratchbox
+ \box\nextbox
\fi
\egroup}
-\setupbleeding[\c!stretch=\v!yes]
-
-\defineexternalfigure[bleed][\c!width=\bleedwidth,\c!height=\bleedheight] % should be \v!bleed
+\defineexternalfigure
+ [bleed] % should be \v!bleed
+ [\c!width=\bleedwidth,
+ \c!height=\bleedheight]
% \placefigure[left]{none}
% {\bleed[width=5cm,height=3cm,location=lt]{\externalfigure[koe][bleed]}}
@@ -717,7 +641,7 @@
% tricky: offsets apply to both the layer and the framed; it makes sense to
% only apply the offset to ...
-\def\setlayerframed
+\unexpanded\def\setlayerframed
{\dotripleempty\pack_layers_set_framed}
\def\pack_layers_set_framed
@@ -741,24 +665,22 @@
{\setlayer[#1][#2]%
\normalframedwithsettings[#3]}
-\def\setlayertext
+\unexpanded\def\setlayertext
{\dotripleempty\pack_layers_set_text}
\def\pack_layers_set_text[#1][#2][#3]%
{\bgroup
- \getparameters
- [\??lx]
- [\c!align=,
- \c!width=\hsize,
- \c!color=,
- \c!style=,
- #3]%
+ \letdummyparameter\c!align\empty
+ \letdummyparameter\c!width\hsize
+ \letdummyparameter\c!color\empty
+ \letdummyparameter\c!style\empty
+ \getdummyparameters[#3]%
\dowithnextboxcontent
{\forgetall
- \hsize\@@lxwidth
- \normalexpanded{\setupalign[\@@lxalign]}%
- \dousestyleparameter\@@lxstyle}
- {\setlayer[#1][#2]{\strut\dousecolorparameter\@@lxcolor\flushnextbox}%
+ \hsize\directdummyparameter\c!width
+ \normalexpanded{\setupalign[\directdummyparameter\c!align]}%
+ \dousestyleparameter{\directdummyparameter\c!style}}
+ {\setlayer[#1][#2]{\strut\dousecolorparameter{\directdummyparameter\c!color}\flushnextbox}% maybe expand the color
\egroup}%
\vtop}
@@ -804,22 +726,6 @@
[\v!middle]
[\c!corner=\v!middle,\c!location=\v!middle]
-% \definelayerpreset
-% [\v!middle\v!top]
-% [\c!location=\v!bottom,\c!hoffset=.5\layerwidth]
-
-% \definelayerpreset
-% [\v!middle\v!bottom]
-% [\c!location=\v!top,\c!hoffset=.5\layerwidth,\c!voffset=\layerheight]
-
-% \definelayerpreset
-% [\v!middle\v!left]
-% [\c!location=\v!right,\c!voffset=.5\layerheight]
-
-% \definelayerpreset
-% [\v!middle\v!right]
-% [\c!location=\v!left,\c!hoffset=\layerwidth,\c!voffset=.5\layerheight]
-
\definelayerpreset
[\v!middle\v!top]
[\c!location=\v!bottom,\c!corner=\v!top,\c!dx=.5\layerwidth]
@@ -901,93 +807,133 @@
% left/right/top/bottomoffset -> dimensions change
% x/y | method=fixed -> dimensions don't change
+\installcorenamespace{offsetbox}
+
+\installautocommandhandler \??offsetbox {offsetbox} \??offsetbox
+
+\setupoffsetbox
+ [\c!x=\zeropoint,
+ \c!y=\zeropoint,
+ \c!width=\wd\nextbox,
+ \c!height=\ht\nextbox,
+ \c!depth=\dp\nextbox,
+ \c!location=,
+ \c!leftoffset=\zeropoint,
+ \c!rightoffset=\zeropoint,
+ \c!topoffset=\zeropoint,
+ \c!bottomoffset=\zeropoint,
+ \c!method=]
+
+\let\setupoffset\setupoffsetbox
+
\unexpanded\def\offsetbox{\dosingleempty\pack_boxes_offset_box}
\unexpanded\def\offset {\dosingleempty\pack_boxes_offset}
-\def\pack_boxes_offset_box[#1]{\bgroup\dowithnextbox{\pack_boxes_offsetfinish{#1}}}
-\def\pack_boxes_offset [#1]{\bgroup\dowithnextbox{\pack_boxes_offsetfinish{#1}}\hbox}
-
-\def\pack_boxes_offsetfinish#1%
- {\getparameters[\??ox]
- [\c!x=\zeropoint,
- \c!y=\zeropoint,
- \c!width=\nextboxwd,
- \c!height=\nextboxht,
- \c!depth=\nextboxdp,
- \c!location=,
- \c!leftoffset=\zeropoint,
- \c!rightoffset=\zeropoint,
- \c!topoffset=\zeropoint,
- \c!bottomoffset=\zeropoint,
- \c!method=,
- #1]%
- \donefalse
- \ifdim\@@oxleftoffset =\zeropoint\else\donetrue\fi
- \ifdim\@@oxrightoffset =\zeropoint\else\donetrue\fi
- \ifdim\@@oxtopoffset =\zeropoint\else\donetrue\fi
- \ifdim\@@oxbottomoffset=\zeropoint\else\donetrue\fi
+\def\pack_boxes_offset_box[#1]%
+ {\bgroup
+ \pack_boxes_offset_check[#1]%
+ \dowithnextboxcs\pack_boxes_offsetfinish}
+
+\def\pack_boxes_offset[#1]%
+ {\bgroup
+ \pack_boxes_offset_check[#1]%
+ \dowithnextboxcs\pack_boxes_offsetfinish\hbox}
+
+\newcount\c_pack_boxes_offset_level
+
+\def\pack_boxes_offset_check
+ {\advance\c_pack_boxes_offset_level\plusone
+ \edef\currentoffsetbox{\the\c_pack_boxes_offset_level}%
+ \checkoffsetboxparent
+ \setupcurrentoffsetbox}
+
+\def\pack_boxes_offsetfinish
+ {\donefalse
+ \scratchxposition \offsetboxparameter\c!x
+ \scratchyposition \offsetboxparameter\c!y
+ \scratchleftoffset \offsetboxparameter\c!leftoffset
+ \scratchrightoffset \offsetboxparameter\c!rightoffset
+ \scratchtopoffset \offsetboxparameter\c!topoffset
+ \scratchbottomoffset\offsetboxparameter\c!bottomoffset
+ \relax % really needed
+ \ifdim\scratchleftoffset =\zeropoint\else\donetrue\fi
+ \ifdim\scratchrightoffset =\zeropoint\else\donetrue\fi
+ \ifdim\scratchtopoffset =\zeropoint\else\donetrue\fi
+ \ifdim\scratchbottomoffset=\zeropoint\else\donetrue\fi
\ifdone
- \doif\@@oxmethod\v!fixed % new
- {\ifdim\@@oxleftoffset=\zeropoint
- \ifdim\@@oxrightoffset=\zeropoint \else
- \edef\@@oxx{\the\dimexpr-\@@oxrightoffset}%
- \let\@@oxrightoffset\zeropoint
- \fi
- \else
- \let\@@oxx\@@oxleftoffset
- \let\@@oxleftoffset\zeropoint
- \fi
- \ifdim\@@oxtopoffset=\zeropoint
- \ifdim\@@oxbottomoffset=\zeropoint \else
- \edef\@@oxy{\the\dimexpr-\@@oxbottomoffset}%
- \let\@@oxbottomoffset\zeropoint
- \fi
- \else
- \let\@@oxy\@@oxtopoffset
- \let\@@oxtopoffset\zeropoint
- \fi
- \donefalse}%
+ \edef\p_method{\offsetboxparameter\c!method}%
+ \ifx\p_method\v!fixed % new
+ \ifdim\scratchleftoffset=\zeropoint
+ \ifdim\scratchrightoffset=\zeropoint \else
+ \scratchxposition-\scratchrightoffset
+ \scratchrightoffset\zeropoint
+ \fi
+ \else
+ \scratchxposition\scratchleftoffset
+ \scratchleftoffset\zeropoint
+ \fi
+ \ifdim\scratchtopoffset=\zeropoint
+ \ifdim\scratchbottomoffset=\zeropoint \else
+ \scratchyposition-\scratchbottomoffset
+ \scratchbottomoffset\zeropoint
+ \fi
+ \else
+ \scratchyposition\scratchtopoffset
+ \scratchtopoffset\zeropoint
+ \fi
+ \donefalse
+ \fi
\fi
\ifdone
\setbox\nextbox\vbox
{\forgetall % already done
\offinterlineskip
- \vskip\@@oxtopoffset
+ \kern\scratchtopoffset
\hbox
- {\hskip\@@oxleftoffset
+ {\kern\scratchleftoffset
\box\nextbox
- \hskip\@@oxrightoffset}%
- \vskip\@@oxbottomoffset}%
+ \kern\scratchrightoffset}%
+ \kern\scratchbottomoffset}%
\ht\nextbox\htdp\nextbox
\dp\nextbox\zeropoint
\fi
- \freezedimenmacro\@@oxwidth
- \freezedimenmacro\@@oxheight
- \freezedimenmacro\@@oxdepth
+ \scratchwidth \offsetboxparameter\c!width
+ \scratchheight\offsetboxparameter\c!height
+ \scratchdepth \offsetboxparameter\c!depth
+ \edef\p_location{\offsetboxparameter\c!location}%
\setbox\nextbox\hbox
- {\hskip\@@oxx\lower\@@oxy\hbox
- {\doifelsenothing\@@oxlocation
- {\box\nextbox}
- {\alignedbox[\@@oxlocation]\hbox{\box\nextbox}}}}%
- \wd\nextbox\@@oxwidth
- \ht\nextbox\@@oxheight
- \dp\nextbox\@@oxdepth
+ {\kern\scratchxposition
+ \lower\scratchyposition\hbox
+ {\ifx\p_location\empty
+ \box\nextbox
+ \else
+ \alignedbox[\p_location]\hbox{\box\nextbox}%
+ \fi}}%
+ \wd\nextbox\scratchwidth
+ \ht\nextbox\scratchheight
+ \dp\nextbox\scratchdepth
\box\nextbox
\egroup}
+%D \starttyping
+%D \framed[offset=overlay]{\offset[leftoffset=1cm]
+%D {\framed[offset=overlay]{\offset[rightoffset=1cm]
+%D {\externalfigure[koe][width=1cm]}}}}
+%D
+%D \blank
+%D
+%D \framed[offset=overlay]{\offset[leftoffset=1cm] {\externalfigure[koe][width=5cm]}} \blank
+%D \framed[offset=overlay]{\offset[rightoffset=1cm] {\externalfigure[koe][width=5cm]}} \blank
+%D \framed[offset=overlay]{\offset[topoffset=1cm] {\externalfigure[koe][width=5cm]}} \blank
+%D \framed[offset=overlay]{\offset[bottomoffset=1cm]{\externalfigure[koe][width=5cm]}} \blank
+%D \stoptyping
+
% \useMPlibrary[pre] \setupbackgrounds[page][background=pagegrid]
%
-% \placefigure[left,none]{}{\offset[leftoffset=1cm]{\externalfigure[koe][breedte=3cm]}}
-% \input tufte
-% \placefigure[left,none]{}{\offset[rightoffset=1cm]{\externalfigure[koe][breedte=3cm]}}
-% \input tufte
-% \placefigure[left,none]{}{\offset[topoffset=1cm]{\externalfigure[koe][breedte=3cm]}}
-% \input tufte
-% \placefigure[left,none]{}{\offset[bottomoffset=1cm]{\externalfigure[koe][breedte=3cm]}}
-% \input tufte
-
-%\ruledhbox{\offsetbox[x=-1cm,y=-1cm,location=c]
-% {\framed[width=4cm,height=4cm]{x}}}
+% \placefigure[left,none]{}{\offset[leftoffset=1cm] {\externalfigure[cow][width=3cm]}} \input tufte
+% \placefigure[left,none]{}{\offset[rightoffset=1cm] {\externalfigure[cow][width=3cm]}} \input tufte
+% \placefigure[left,none]{}{\offset[topoffset=1cm] {\externalfigure[cow][width=3cm]}} \input tufte
+% \placefigure[left,none]{}{\offset[bottomoffset=1cm]{\externalfigure[cow][width=3cm]}} \input tufte
% Some old code:
%
@@ -1020,15 +966,14 @@
\unexpanded\def\phantombox[#1]% == \framed[\c!empty=\v!yes,\c!offset=\v!overlay,#1]{}
{\hbox\bgroup
- \getparameters
- [\??ol] % brrr
- [\c!width=\zeropoint,%
- \c!height=\zeropoint,%
- \c!depth=\zeropoint,#1]%
+ \letdummyparameter\c!width \zeropoint
+ \letdummyparameter\c!height\zeropoint
+ \letdummyparameter\c!depth \zeropoint
+ \getdummyparameters[#1]%
\setbox\scratchbox\emptyhbox
- \wd\scratchbox\@@olwidth
- \ht\scratchbox\@@olheight
- \dp\scratchbox\@@oldepth
+ \wd\scratchbox\directdummyparameter\c!width
+ \ht\scratchbox\directdummyparameter\c!height
+ \dp\scratchbox\directdummyparameter\c!depth
\box\scratchbox
\egroup}
@@ -1044,17 +989,17 @@
\ifcase#1\relax
% just one
\else
- \scratchdimen#2\divide\scratchdimen\wd\nextbox\count0\scratchdimen\advance\count0\plusone
- \scratchdimen#3\divide\scratchdimen\ht\nextbox\count2\scratchdimen\advance\count2\plusone
+ \scratchdimen#2\divide\scratchdimen\wd\nextbox\scratchnx\scratchdimen\advance\scratchnx\plusone\relax
+ \scratchdimen#3\divide\scratchdimen\ht\nextbox\scratchny\scratchdimen\advance\scratchny\plusone\relax
% to be considered: methods
\ifcase#1%
\or % x and y
- \setbox\nextbox\hbox{\dorecurse{\count0}{\copy\nextbox}}%
- \setbox\nextbox\vbox{\dorecurse{\count2}{\copy\nextbox\endgraf}}%
+ \setbox\nextbox\hbox{\dorecurse\scratchnx{\copy\nextbox}}%
+ \setbox\nextbox\vbox{\dorecurse\scratchny{\copy\nextbox\endgraf}}%
\or % x
- \setbox\nextbox\hbox{\dorecurse{\count0}{\copy\nextbox}}%
+ \setbox\nextbox\hbox{\dorecurse\scratchnx{\copy\nextbox}}%
\or % y
- \setbox\nextbox\vbox{\dorecurse{\count2}{\copy\nextbox\endgraf}}%
+ \setbox\nextbox\vbox{\dorecurse\scratchny{\copy\nextbox\endgraf}}%
\fi
\fi
\ifdim\wd\nextbox>#2\relax
diff --git a/Master/texmf-dist/tex/context/base/pack-com.mkiv b/Master/texmf-dist/tex/context/base/pack-com.mkiv
index 4d50bf7c70c..d12c0d90aef 100644
--- a/Master/texmf-dist/tex/context/base/pack-com.mkiv
+++ b/Master/texmf-dist/tex/context/base/pack-com.mkiv
@@ -152,6 +152,11 @@
%
% faster
+\unexpanded\def\pack_common_content_start{\bgroup\ignorespaces}
+\unexpanded\def\pack_common_content_stop {\removeunwantedspaces\egroup}
+\unexpanded\def\pack_common_caption_start{\bgroup\ignorespaces}
+\unexpanded\def\pack_common_caption_stop {\removeunwantedspaces\egroup}
+
\unexpanded\def\stopcombination
{\bgroup\normalexpanded{\egroup{}\ctxcommand{ntimes("{}{}",\number\c_pack_combinations_n)}}%
\dostoptagged
@@ -177,10 +182,10 @@
%
\forgetall
%
- \let\startcontent\bgroup
- \let\stopcontent \egroup
- \let\startcaption\bgroup
- \let\stopcaption \egroup
+ \let\startcontent\pack_common_content_start
+ \let\stopcontent \pack_common_content_stop
+ \let\startcaption\pack_common_caption_start
+ \let\stopcaption \pack_common_caption_stop
%
\edef\p_height {\combinationparameter\c!height}%
\edef\p_width {\combinationparameter\c!width}%
@@ -210,7 +215,7 @@
\alignmark\alignmark
\m_pack_combinations_rightfiller
\aligntab
- \tabskip\zeropoint \!!plus 1fill
+ \tabskip\zeropoint \s!plus 1fill
\alignmark\alignmark
\cr
\pack_combinations_pickup}
@@ -286,7 +291,7 @@
{\aligntab
\aligntab
\aligntab
- \hskip\p_distance
+ \kern\p_distance
\aligntab
\pack_combinations_pickup}
@@ -562,18 +567,26 @@
\edef\p_location{\pairedboxparameter\c!location}%
\edef\p_n {\pairedboxparameter\c!n}%
%
- \let\startcontent\bgroup
- \let\stopcontent \egroup
- \let\startcaption\bgroup
- \let\stopcaption \egroup
+ \let\startcontent\pack_common_content_start
+ \let\stopcontent \pack_common_content_stop
+ \let\startcaption\pack_common_caption_start
+ \let\stopcaption \pack_common_caption_stop
%
\global\setsystemmode{pairedbox}%
\pack_pairedboxes_before
- \dowithnextboxcs\pack_pairedboxes_first\hbox}
+ \dogotopar\pack_pairedboxes_first_pickup}
+
+\def\pack_pairedboxes_first_pickup
+ {\dowithnextboxcs\pack_pairedboxes_first\hbox
+ \bgroup
+ \let\next=}
\def\pack_pairedboxes_first
{\pack_pairedboxes_between
- \dowithnextboxcs\pack_pairedboxes_second\vbox
+ \dogotopar\pack_pairedboxes_second_pickup}
+
+\def\pack_pairedboxes_second_pickup
+ {\dowithnextboxcs\pack_pairedboxes_second\vbox
\bgroup
\pack_pairedboxes_inside_second
\let\next=}
@@ -614,12 +627,12 @@
\def\pack_pairedboxes_flush_left
{\box\b_pack_pairedboxes_second
- \hskip\pairedboxparameter\c!distance
+ \kern\pairedboxparameter\c!distance
\box\b_pack_pairedboxes_first}
\def\pack_pairedboxes_flush_right
{\box\b_pack_pairedboxes_first
- \hskip\pairedboxparameter\c!distance
+ \kern\pairedboxparameter\c!distance
\box\b_pack_pairedboxes_second}
\def\pack_pairedboxes_flush_top
@@ -663,8 +676,8 @@
\setvalue{\??pairedboxalign\v!middle}% 4
{\let\pack_pairedboxes_align_l\hss
\let\pack_pairedboxes_align_r\hss
- \let\pack_pairedboxes_align_t\hss
- \let\pack_pairedboxes_align_b\hss}
+ \let\pack_pairedboxes_align_t\vss
+ \let\pack_pairedboxes_align_b\vss}
\setvalue{\??pairedboxalign\v!bottom}{\getvalue{\??pairedboxalign\v!low }}
\setvalue{\??pairedboxalign \v!top}{\getvalue{\??pairedboxalign\v!high}}
@@ -789,10 +802,10 @@
\unexpanded\def\placeontopofeachother{\bgroup\dowithnextboxcs\pack_topofeachother_one\hbox}
\unexpanded\def\placesidebyside {\bgroup\dowithnextboxcs\pack_sidebyside_one \hbox}
-\def\pack_topofeachother_one{\bgroup\setbox0\box\nextbox\dowithnextboxcs\pack_topofeach_two \hbox}
-\def\pack_sidebyside_one {\bgroup\setbox0\box\nextbox\dowithnextboxcs\pack_sidebyside_two\hbox}
+\def\pack_topofeachother_one{\bgroup\setbox0\box\nextbox\dowithnextboxcs\pack_topofeachother_two\hbox}
+\def\pack_sidebyside_one {\bgroup\setbox0\box\nextbox\dowithnextboxcs\pack_sidebyside_two \hbox}
-\def\pack_topofeachother_two{\setbox2\box\nextbox\halign{\hss####\hss\cr\box0\cr\box2\cr}\egroup\egroup}
-\def\pack_sidebyside_two {\setbox2\box\nextbox\valign{\vss####\vss\cr\box0\cr\box2\cr}\egroup\egroup}
+\def\pack_topofeachother_two{\setbox2\box\nextbox\halign{\hss##\hss\cr\box0\cr\box2\cr}\egroup\egroup}
+\def\pack_sidebyside_two {\setbox2\box\nextbox\valign{\vss##\vss\cr\box0\cr\box2\cr}\egroup\egroup}
\protect \endinput
diff --git a/Master/texmf-dist/tex/context/base/pack-cut.mkiv b/Master/texmf-dist/tex/context/base/pack-cut.mkiv
new file mode 100644
index 00000000000..63f4524a8dd
--- /dev/null
+++ b/Master/texmf-dist/tex/context/base/pack-cut.mkiv
@@ -0,0 +1,163 @@
+%D \module
+%D [ file=pack-cut, % comes from core-vis/trac-vis
+%D version=1996.06.01,
+%D title=\CONTEXT\ Packaging Macros,
+%D subtitle=Cut boxes,
+%D author=Hans Hagen,
+%D date=\currentdate,
+%D copyright={PRAGMA ADE \& \CONTEXT\ Development Team}]
+%C
+%C This module is part of the \CONTEXT\ macro||package and is
+%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
+%C details.
+
+\unprotect
+
+%D \macros
+%D {makecutbox, cuthbox, cutvbox, cutvtop}
+%D
+%D Although mainly used for marking the page, these macros can
+%D also serve local use.
+%D
+%D \startbuffer
+%D \setbox0=\vbox{a real \crlf vertical box} \makecutbox0
+%D \stopbuffer
+%D
+%D \typebuffer
+%D
+%D This marked \type{\vbox} shows up as:
+%D
+%D \startlinecorrection
+%D \getbuffer
+%D \stoplinecorrection
+%D
+%D The alternative macros are used as:
+%D
+%D \startbuffer
+%D \cuthbox{a made cut box}
+%D \stopbuffer
+%D
+%D \typebuffer
+%D
+%D This is typeset as:
+%D
+%D \startlinecorrection
+%D \getbuffer
+%D \stoplinecorrection
+%D
+%D By setting the next macros one can influence the length of
+%D the marks as well as the horizontal and vertical divisions.
+
+\newdimen\d_pack_cutmarks_width
+\newdimen\d_pack_cutmarks_height
+\newdimen\d_pack_cutmarks_depth
+
+\newcount\horizontalcutmarks \horizontalcutmarks \plustwo
+\newcount\verticalcutmarks \verticalcutmarks \plustwo
+\newcount\cutmarkoffset \cutmarkoffset \plusone
+
+\let\cutmarksymbol \relax
+\let\cutmarktoptext \empty
+\let\cutmarkbottomtext \empty
+\let\cutmarkhoffset \empty
+\let\cutmarkvoffset \empty
+\def\cutmarklength {2\bodyfontsize}
+\def\cutmarkrulethickness{\onepoint}
+
+\unexpanded\def\horizontalcuts
+ {\normalhbox to \d_pack_cutmarks_width
+ {\dorecurse\horizontalcutmarks{\vrule\s!width\cutmarkrulethickness\s!height\cutmarklength\normalhfill}%
+ \unskip}}
+
+\unexpanded\def\verticalcuts
+ {\normalvbox to \dimexpr\d_pack_cutmarks_height+\d_pack_cutmarks_depth\relax
+ {\hsize\cutmarklength
+ \dorecurse\verticalcutmarks{\vrule\s!height\cutmarkrulethickness\s!width\hsize\normalvfill}%
+ \unskip}}
+
+\unexpanded\def\baselinecuts
+ {\ifdim\d_pack_cutmarks_depth>\zeropoint
+ \normalvbox to \dimexpr\d_pack_cutmarks_height+\d_pack_cutmarks_depth\relax
+ {\hsize\dimexpr\cutmarklength/2\relax
+ \normalvskip\zeropoint\s!plus\d_pack_cutmarks_height
+ \vrule\s!height\cutmarkrulethickness\s!width\hsize
+ \normalvskip\zeropoint\s!plus\d_pack_cutmarks_depth}%
+ \fi}
+
+\unexpanded\def\cutmarksymbols#1%
+ {\normalhbox to \d_pack_cutmarks_width
+ {\setbox\scratchbox\normalhbox to \cutmarklength
+ {\normalhss\infofont\cutmarksymbol\normalhss}%
+ \normalhss
+ \normalvbox to \cutmarklength
+ {\scratchdimen\dimexpr\cutmarklength/2\relax
+ \scratchskip \ifx\cutmarkhoffset\empty\cutmarkoffset\scratchdimen\else\cutmarkhoffset\fi
+ \normalvss
+ \hbox to \d_pack_cutmarks_width
+ {\llap{\copy\scratchbox\normalhskip\scratchskip}%
+ \normalhskip\scratchdimen\hss\infofont#1\hss\normalhskip\scratchdimen
+ \rlap{\normalhskip\scratchskip\copy\scratchbox}}%
+ \normalvss}%
+ \normalhss}}
+
+\unexpanded\def\makecutbox#1%
+ {\bgroup
+ \d_pack_cutmarks_height\ht#1%
+ \d_pack_cutmarks_depth \dp#1%
+ \d_pack_cutmarks_width \wd#1%
+ \setbox#1\normalhbox
+ {\dontcomplain
+ \forgetall
+ \boxmaxdepth\maxdimen
+ \offinterlineskip
+ \scratchdimen\dimexpr\cutmarklength/2\relax
+ \hsize\d_pack_cutmarks_width
+ \setbox\scratchbox\normalvbox
+ {\setbox\scratchbox\normalhbox{\horizontalcuts}%
+ \scratchskip\ifx\cutmarkvoffset\empty\cutmarkoffset\scratchdimen\else\cutmarkvoffset\fi
+ \tlap{\copy\scratchbox\normalvskip\scratchskip}%
+ \hbox to \d_pack_cutmarks_width
+ {\scratchskip\ifx\cutmarkhoffset\empty\cutmarkoffset\scratchdimen\else\cutmarkhoffset\fi
+ \setbox\scratchbox\normalhbox{\verticalcuts}%
+ \llap{\copy\scratchbox\normalhskip\scratchskip}%
+ \ifdim\d_pack_cutmarks_depth=\zeropoint
+ \normalhfill
+ \else
+ \bgroup
+ \setbox\scratchbox\normalhbox{\baselinecuts}%
+ \llap{\copy\scratchbox\normalhskip\scratchskip}%
+ \normalhfill
+ \rlap{\normalhskip\scratchskip\copy\scratchbox}%
+ \egroup
+ \fi
+ \rlap{\normalhskip\scratchskip\copy\scratchbox}}%
+ \blap{\normalvskip\scratchskip\copy\scratchbox}}%
+ \ht\scratchbox\d_pack_cutmarks_height
+ \dp\scratchbox\d_pack_cutmarks_depth
+ \wd\scratchbox\zeropoint
+ \startcolor[\defaulttextcolor]%
+ \box\scratchbox
+ \ifx\cutmarksymbol\relax \else
+ \setbox\scratchbox\normalvbox
+ {\scratchskip\ifx\cutmarkvoffset\empty\cutmarkoffset\scratchdimen\else\cutmarkvoffset\fi
+ \vskip-\dimexpr\scratchskip+\cutmarklength\relax
+ \normalhbox{\cutmarksymbols\cutmarktoptext}%
+ \vskip\dimexpr\scratchskip+\d_pack_cutmarks_height+\d_pack_cutmarks_depth+\scratchskip\relax
+ \normalhbox{\cutmarksymbols\cutmarkbottomtext}}%
+ \ht\scratchbox\d_pack_cutmarks_height
+ \dp\scratchbox\d_pack_cutmarks_depth
+ \wd\scratchbox\zeropoint
+ \box\scratchbox
+ \fi
+ \stopcolor
+ \box#1}%
+ \wd#1\d_pack_cutmarks_width
+ \ht#1\d_pack_cutmarks_height
+ \dp#1\d_pack_cutmarks_depth
+ \egroup}
+
+\unexpanded\def\cuthbox{\normalhbox\bgroup\dowithnextbox{\makecutbox\nextbox\flushnextbox\egroup}\normalhbox}
+\unexpanded\def\cutvbox{\normalvbox\bgroup\dowithnextbox{\makecutbox\nextbox\flushnextbox\egroup}\normalvbox}
+\unexpanded\def\cutvtop{\normalvtop\bgroup\dowithnextbox{\makecutbox\nextbox\flushnextbox\egroup}\normalvtop}
+
+\protect \endinput
diff --git a/Master/texmf-dist/tex/context/base/pack-lyr.mkiv b/Master/texmf-dist/tex/context/base/pack-lyr.mkiv
index 5b23a255d42..a891c998d4a 100644
--- a/Master/texmf-dist/tex/context/base/pack-lyr.mkiv
+++ b/Master/texmf-dist/tex/context/base/pack-lyr.mkiv
@@ -223,7 +223,7 @@
\def\pack_layers_set[#1][#2][#3]% #4 == box do \fi is ok
{\bgroup
\edef\currentlayer{#1}%
- \edef\p_pack_layers_state{\layerparameter{#1}\c!state}%
+ \edef\p_pack_layers_state{\layerparameter\c!state}%
\ifx\p_pack_layers_state\v!stop
\dowithnextboxcs\egroup\hbox
\else\ifthirdargument
diff --git a/Master/texmf-dist/tex/context/base/pack-mrl.mkiv b/Master/texmf-dist/tex/context/base/pack-mrl.mkiv
index f3f3d11d9a8..7c3f08825b4 100644
--- a/Master/texmf-dist/tex/context/base/pack-mrl.mkiv
+++ b/Master/texmf-dist/tex/context/base/pack-mrl.mkiv
@@ -13,26 +13,24 @@
\writestatus{loading}{ConTeXt Packaging Macros / More Rules}
-%D This module needs an overhaul.
+%D The (re)implementation of margin rules has been moved elsewhere.
\unprotect
%D \macros
%D {setupblackrules,blackrule}
%D
-%D The graphic capabilities of \TEX\ do not go beyond simple
-%D filled rules, except of course when using specials. Let's
-%D start with a warning: using this commands is far more slower
-%D than using the \TEX\ primitives \type{\hrule} and
-%D \type{\vrule}, but they save us some tokens. The
+%D The graphic capabilities of \TEX\ do not go beyond simple filled rules, except of
+%D course when using specials or, in \MKIV, manipulate node lists. Let's start with
+%D a warning: using this commands is far more slower than using the \TEX\ primitives
+%D \type {\hrule} and \type {\vrule}, but they save us some tokens. The
%D characteristics of these rule drawing command can be set by:
%D
%D \showsetup{setupblackrules}
%D
-%D The simple command draws only one rule. Its optional
-%D argument can be used to specify the dimensions. By setting
-%D the width, height or depth to \type {max}, one gets the
-%D natural dimensions.
+%D The simple command draws only one rule. Its optional argument can be used to
+%D specify the dimensions. By setting the width, height or depth to \type {max}, one
+%D gets the natural dimensions.
%D
%D \showsetup{blackrule}
@@ -59,9 +57,9 @@
\fi\fi
\useblackrulesstyleandcolor\c!style\c!color
\vrule
- \!!width \ifx\p_width \v!max\emwidth\else\p_width \fi
- \!!height\ifx\p_height\v!max\strutht\else\p_height\fi
- \!!depth \ifx\p_depth \v!max\strutdp\else\p_depth \fi
+ \s!width \ifx\p_width \v!max\emwidth\else\p_width \fi
+ \s!height\ifx\p_height\v!max\strutht\else\p_height\fi
+ \s!depth \ifx\p_depth \v!max\strutdp\else\p_depth \fi
\egroup}
\setupblackrules
@@ -73,8 +71,8 @@
%D \macros
%D {blackrules}
%D
-%D One can call for a sequence of black rules, if needed
-%D equally spaced over the given width.
+%D One can call for a sequence of black rules, if needed equally spaced over the
+%D given width.
%D
%D \showsetup{blackrules}
%D
@@ -93,9 +91,8 @@
%D \getbuffer
%D \stoplines
%D
-%D We could of course have implemented this macro using
-%D \type{\leaders}, but this would probably have taken more
-%D tokens.
+%D We could of course have implemented this macro using \type {\leaders}, but this
+%D would probably have taken more tokens.
\unexpanded\def\blackrules % probably never used
{\hbox\bgroup
@@ -122,9 +119,9 @@
\useblackrulesstyleandcolor\c!style\c!color
\dorecurse\scratchcounter % a typical case of where we can use a simple loop
{\vrule
- \!!width \scratchwidth
- \!!height\scratchheight
- \!!depth \scratchdepth
+ \s!width \scratchwidth
+ \s!height\scratchheight
+ \s!depth \scratchdepth
\hskip\scratchdistance}%
\unskip
\egroup}
@@ -135,169 +132,51 @@
\c!distance=.25\emwidth,
\c!color=]
-%D Marginrules will either become obsolete or be redone.
-
-%D The next commands can be used to draw margin rules. We
-%D support two methods: \marginrule{one for in||line use} and
-%D one that acts on a paragraph. Drawing a margin rule is
-%D rather straightforward because we can use the commands that
-%D put text in the margin.
-
-\def\dodrawmarginrule
- {\setbox\scratchbox\hbox
- {\vrule\!!depth\strutdepth\!!height\strutheight\!!width\@@karulethickness}%
- \smashbox\scratchbox % no \vsmash !!!
- \box\scratchbox}
-
-\def\drawmarginrule
- {\strut\inleft{\dodrawmarginrule}}
-
-%D \macros
-%D {marginrule}
-%D
-%D The first method gobbles words and simply puts a bar in the
-%D margin. This method is not entirely robust.
-%D
-%D \showsetup{marginrule}
-
-\definecomplexorsimple\marginrule
-
-\def\simplemarginrule
- {\let\processword\drawmarginrule
- \processwords}
-
-\def\complexmarginrule[#1]%
- {\ifnum#1<\@@kalevel\relax \else
- \def\@@kadefaultwidth{#1}%
- \expandafter\simplemarginrule
- \fi}
-
-%D We need an auxiliary variable
-
-\def\@@kadefaultwidth{1}
-
-%D \macros
-%D {setupmarginrules}
-%D
-%D This macro definitions show us that we can pass an optional
-%D level, which is matched against the previous set one. The
-%D level can be set up with
-%D
-%D \showsetup{setupmarginrules}
-
-\unexpanded\def\setupmarginrules
- {\dodoubleargument\getparameters[\??ka]}
-
-%D \macros
-%D {startmarginrule}
-%D
-%D The second method collects text and reformats it afterwards,
-%D using the shapebox macros. We prevent local margin rules.
-%D
-%D \showsetup{startmarginrule}
-
-\definecomplexorsimple\startmarginrule
-
-\def\simplestartmarginrule
- {\bgroup
- \let\drawmarginrule\relax
- \let\stopmarginrule\dostopmarginrule
- \beginofshapebox}
-
-\def\complexstartmarginrule[#1]%
- {\bgroup
- \let\drawmarginrule\relax
- \ifnum#1<\@@kalevel\relax
- \let\stopmarginrule\egroup
- \else
- \def\@@kadefaultwidth{#1}%
- \let\stopmarginrule\dostopmarginrule
- \expandafter\beginofshapebox
- \fi}
-
-\def\dostopmarginrule
- {\endofshapebox
- \reshapebox
- {\hbox{\inleftmargin{\dodrawmarginrule}\box\shapebox}}%
- \flushshapebox
- \egroup}
-
-%D \startbuffer
-%D \setupmarginrules[level=5]
-%D
-%D \startmarginrule[1]
-%D First we set the level at~5. Next we typeset this first
-%D paragraph as a level~1 one. As expected no rule show up.
-%D \stopmarginrule
-%D
-%D \startmarginrule[5]
-%D The second paragraph is a level~5 one. As we can see here,
-%D the marginal rule gets a width according to its level.
-%D \stopmarginrule
-%D
-%D \startmarginrule[8]
-%D It will of course be no surprise that this third paragraph
-%D has a even thicker margin rule. This behavior can be
-%D overruled by specifying the width explictly.
-%D \stopmarginrule
-%D \stopbuffer
-%D
-%D In next example we show most features. Watch the rule
-%D thickness adapting itself to the level.
-%D
-%D \startexample
-%D \getbuffer
-%D \stopexample
-%D
-%D We just said:
-%D
-%D \typebuffer
-
%D \macros
%D {vl, hl}
%D
-%D The command \type{\vl} draws a vertical rule \vl\ with strut
-%D dimensions, multiplied with the factor specified in the
-%D optional argument. The height and depth are clipped \vl[3]
-%D to the baselinedistance. Its horizontal counterpart
-%D \type{\hl} draws a horizontal rule \hl\ with a width of 1em,
-%D multiplied with the optional factor. The horizontal rule is
-%D drawn on top of the baseline.
+%D The command \type {\vl} draws a vertical rule \vl\ with strut dimensions,
+%D multiplied with the factor specified in the optional argument. The height and
+%D depth are clipped \vl [3] to the baselinedistance. Its horizontal counterpart
+%D \type {\hl} draws a horizontal rule \hl\ with a width of 1em, multiplied with the
+%D optional factor. The horizontal rule is drawn on top of the baseline.
%D
%D \showsetup{vl}
%D \showsetup{hl}
-\unexpanded\def\dovlwdhtdp#1#2#3% used elsewhere
+\unexpanded\def\pack_rule_vl_indeed#1#2#3%
{\bgroup
\setbox\scratchbox\hbox
{\vrule
- \!!width #1\linewidth
- \!!height#2\strutht
- \!!depth #3\strutdp}%
+ \s!width #1\linewidth
+ \s!height#2\strutht
+ \s!depth #3\strutdp}%
\dp\scratchbox\strutdp
\ht\scratchbox\strutht
\box\scratchbox
\egroup}
-\def\complexvl[#1]%
- {\dovlwdhtdp\plusone{#1}{#1}}
+\def\pack_rule_vl[#1]%
+ {\pack_rule_vl_indeed{#1}{#1}{#1}}
-\def\complexhl[#1]%
+\def\pack_rule_hl[#1]%
{\hbox
{\vrule
- \!!width #1\emwidth
- \!!height\linewidth
- \!!depth \zeropoint}}
+ \s!width #1\emwidth
+ \s!height\linewidth
+ \s!depth \zeropoint}}
+
+\unexpanded\def\vl{\dosingleempty\pack_rule_vl}
+\unexpanded\def\hl{\dosingleempty\pack_rule_hl}
-\definecomplexorsimple\vl \def\simplevl{\complexvl[\plusone]}
-\definecomplexorsimple\hl \def\simplehl{\complexhl[\plusone]}
+\let\dovlwdhtdp\pack_rule_vl_indeed % used elsewhere
%D \macros
%D {hairline, thinrule, thinrules, setupthinrules}
%D
-%D Drawing thin lines can of course easily be accomplished by
-%D the \TEX\ primitives \type{\hrule} and \type{\vrule}. The
-%D next few macros however free us from some specifications.
+%D Drawing thin lines can of course easily be accomplished by the \TEX\
+%D primitives \type{\hrule} and \type{\vrule}. The next few macros however
+%D free us from some specifications.
%D
%D \startbuffer
%D some text
@@ -338,112 +217,158 @@
%D
%D which looks like: \thinrules[n=2]
-\newconstant\ruletype
+\installcorenamespace{thinrules}
+\installcorenamespace{thinrulealternatives}
-\def\thinrule
+\installdirectcommandhandler \??thinrules {thinrules}
+
+\setupthinrules
+ [\c!interlinespace=\v!small,
+ \c!n=3,
+ \c!before=,
+ \c!inbetween={\blank[\v!white]},
+ \c!after=,
+ \c!color=,
+ \c!height=.5\linewidth,
+ \c!depth=.5\linewidth,
+ \c!frame=\v!on, % compatible with textbackgrounds
+ \c!alternative=\v!b,
+ \c!backgroundcolor=,
+ \c!background=,
+ \c!rulethickness=\linewidth]
+
+\letvalue{\??thinrulealternatives\v!a }\zerocount
+\letvalue{\??thinrulealternatives\v!b }\plusone
+\letvalue{\??thinrulealternatives\v!c }\plustwo
+\letvalue{\??thinrulealternatives\v!none}\zerocount
+
+\newconstant\c_pack_thinrules_type
+
+\unexpanded\def\thinrule
{\strut
\bgroup
- \ruletype\plusone
- \processaction
- [\@@dlalternative]
- [ \v!a=>\ruletype\zerocount,% no line
- %\v!b=>\ruletype\plusone ,% height/depth
- \v!c=>\ruletype\plustwo ,% topheight/botdepth
- % 11=>\ruletype\plusone ,% fallback for backgrounds
- 0=>\ruletype\zerocount,% compatible with backgrounds
- % 1=>\ruletype\plusone ,% compatible with backgrounds
- 2=>\ruletype\plustwo ]% compatible with backgrounds
- \doifsomething\@@dlrulethickness
- {\linewidth\@@dlrulethickness}%
+ \edef\p_height {\directthinrulesparameter\c!height}%
+ \edef\p_depth {\directthinrulesparameter\c!depth}%
+ \edef\p_background{\directthinrulesparameter\c!background}%
+ \edef\p_frame {\directthinrulesparameter\c!frame}%
+ \linewidth\dimexpr\directthinrulesparameter\c!rulethickness/\plustwo\relax
\ifdim\linewidth=\zeropoint
- \ruletype\zerocount
+ \c_pack_thinrules_type\zerocount
+ \else\ifx\p_frame\v!on
+ \c_pack_thinrules_type\expandnamespaceparameter\??thinrulealternatives\directthinrulesparameter\c!alternative\v!b\relax
\else
- \doifnot\@@dlframe\v!on{\ruletype\zerocount}%
+ \c_pack_thinrules_type\zerocount
+ \fi\fi
+ \ifnum\c_pack_thinrules_type=\plusone
+ \ifx\p_height\v!max
+ \scratchheight\strutht
+ \else
+ \setdimensionwithunit\scratchheight\p_height\strutht
+ \fi
+ \ifx\p_depth\v!max
+ \scratchdepth\strutdp
+ \else
+ \setdimensionwithunit\scratchdepth\p_depth\strutdp
+ \fi
+ \else
+ \scratchheight\strutht
+ \scratchdepth \strutdp
\fi
- \ifnum\ruletype=\plusone
- \doif\@@dlheight\v!max{\let\@@dlheight\!!plusone}%
- \doif\@@dldepth \v!max{\let\@@dldepth \!!plusone}%
+ \ifx\p_background\v!color
+ \startcolor[\directthinrulesparameter\c!backgroundcolor]%
+ \ifnum\c_pack_thinrules_type=\plustwo % prevent overshoot due to rounding
+ \leaders
+ \hrule
+ \s!height\dimexpr\scratchheight-\linewidth\relax
+ \s!depth \dimexpr\scratchdepth -\linewidth\relax
+ \hfill
+ \else
+ \leaders
+ \hrule
+ \s!height\scratchheight
+ \s!depth \scratchdepth
+ \hfill
+ \fi
+ \stopcolor
+ \ifcase\c_pack_thinrules_type
+ % no rule
+ \or
+ \startcolor[\directthinrulesparameter\c!color]%
+ \hfillneg
+ \leaders
+ \hrule
+ \s!height\linewidth
+ \s!depth \linewidth
+ \hfill
+ \stopcolor
+ \or
+ \startcolor[\directthinrulesparameter\c!color]%
+ \hfillneg
+ \leaders
+ \hrule
+ \s!height\dimexpr-\scratchdepth+\linewidth\relax
+ \s!depth \scratchdepth
+ \hfill
+ \hfillneg
+ \leaders
+ \hrule
+ \s!height\scratchheight
+ \s!depth \dimexpr-\scratchheight+\linewidth\relax
+ \hfill
+ \stopcolor
+ \fi
\else
- \let\@@dlheight\!!plusone
- \let\@@dldepth\!!plusone
+ \ifcase\c_pack_thinrules_type
+ % no rule
+ \else
+ \startcolor[\directthinrulesparameter\c!color]%
+ \leaders
+ \hrule
+ \s!height\scratchheight
+ \s!depth \scratchdepth
+ \hfill
+ \stopcolor
+ \fi
\fi
- \freezedimensionwithunit\@@dlheight\strutht
- \freezedimensionwithunit\@@dldepth\strutdp
- \divide\linewidth \plustwo
- \doifelse\@@dlbackground\v!color
- {\startcolor[\@@dlbackgroundcolor]%
- \ifnum\ruletype=\plustwo % prevent overshoot due to rounding
- \leaders
- \hrule
- \!!height\dimexpr\@@dlheight-.5\linewidth\relax
- \!!depth \dimexpr\@@dldepth -.5\linewidth\relax
- \hfill
- \else
- \leaders
- \hrule
- \!!height\@@dlheight
- \!!depth \@@dldepth
- \hfill
- \fi
- \stopcolor
- \ifcase\ruletype
- % no rule
- \or
- \startcolor[\@@dlcolor]%
- \hfillneg
- \leaders\hrule\!!height\linewidth\!!depth\linewidth\hfill
- \stopcolor
- \or
- \startcolor[\@@dlcolor]%
- \hfillneg\leaders\hrule\!!height\dimexpr-\@@dldepth+\linewidth\relax\!!depth\@@dldepth\hfill
- \hfillneg\leaders\hrule\!!height\@@dlheight\!!depth\dimexpr-\@@dlheight+\linewidth\relax\hfill
- \stopcolor
- \fi}
- {\ifcase\ruletype \else
- \startcolor[\@@dlcolor]%
- \leaders\hrule\!!height\@@dlheight\!!depth\@@dldepth\hfill
- \stopcolor
- \fi}%
\strut
\carryoverpar\egroup}
-\def\hairline
+\unexpanded\def\hairline
{\endgraf
\thinrule
\endgraf}
-\def\dosetupthinrules[#1]%
- {\getparameters[\??dl][#1]}
+\unexpanded\def\thinrules
+ {\dosingleempty\pack_thinrules}
-\unexpanded\def\setupthinrules
- {\dosingleargument\dosetupthinrules}
-
-\def\dothinrules[#1]%
+\def\pack_thinrules[#1]%
{\bgroup
- \dosetupthinrules[#1]%
- \@@dlbefore
- \assignvalue\@@dlinterlinespace\@@dlinterlinespace{1.0}{1.5}{2.0}%
- \spacing\@@dlinterlinespace
- \dorecurse\@@dln
- {\ifnum\recurselevel=\@@dln \dothinrulesnobreak \else
- \ifnum\recurselevel=2 \dothinrulesnobreak \fi\fi
- \thinrule
- \ifnum\recurselevel<\@@dln\relax
+ \setupcurrentthinrules[#1]%
+ \assignvalue{\directthinrulesparameter\c!interlinespace}\m_pack_thinrules_interlinespace{1.0}{1.5}{2.0}%
+ \spacing\m_pack_thinrules_interlinespace
+ \edef\p_after {\directthinrulesparameter\c!after}%
+ \edef\p_inbetween{\directthinrulesparameter\c!inbetween}%
+ \directthinrulesparameter\c!before
+ \scratchcounter\directthinrulesparameter\c!n\relax
+ \dorecurse\scratchcounter
+ {\ifnum\recurselevel=\scratchcounter \penalty500 \else
+ \ifnum\recurselevel=\plustwo \penalty500 \fi\fi
+ \thinrule
+ \ifnum\recurselevel<\scratchcounter\relax
% test needed, else messed up whitespace
- \ifx\@@dlinbetween\empty
- \softbreak
+ \ifx\p_inbetween\empty
+ \softbreak % \ifhmode \hskip \parfillskip \break \fi
\else
\endgraf
\nowhitespace
- \@@dlinbetween
+ \p_inbetween
\fi
\fi}%
- \doifelsenothing\@@dlafter
- {\carryoverpar\egroup}
- {\@@dlafter\egroup}}
-
-\def\thinrules
- {\dosingleempty\dothinrules}
+ \ifx\p_after\empty
+ \carryoverpar\egroup
+ \else
+ \p_after\egroup
+ \fi{}}
%D A couple of examples are given below.
%D
@@ -464,11 +389,10 @@
%D
%D \typebuffer {\getbuffer}
%D
-%D There are a couple of alternative ways to visualize rules
-%D using backgrounds. At first sight these may look strange,
-%D but they make sense in educational settings. The
-%D alternatives are more or less compatible with the more
-%D advanced \METAPOST\ based implementation.
+%D There are a couple of alternative ways to visualize rules using backgrounds. At
+%D first sight these may look strange, but they make sense in educational settings.
+%D The alternatives are more or less compatible with the more advanced \METAPOST\
+%D based implementation.
%D
%D \startbuffer[a]
%D \setupthinrules
@@ -507,22 +431,10 @@
%D \typebuffer[b] \getbuffer[a,b]
%D \macros
-%D {optimizethinrules}
-%D
-%D By saying \type {\thinrulestrue} or \type {-false}, we
-%D can influence the way dangling lines are handled.
-
-\newif\ifoptimizethinrules \optimizethinrulestrue
-
-\def\dothinrulesnobreak
- {\ifoptimizethinrules\penalty500\fi}
-
-%D \macros
%D {textrule, starttextrule, setuptextrules}
%D
-%D Putting rules before and after a paragraph is very space
-%D sensitive, but the next command handles that quite well. It
-%D comes in two disguises:
+%D Putting rules before and after a paragraph is very space sensitive, but the
+%D next command handles that quite well. It comes in two disguises:
%D
%D \startbuffer
%D \textrule[top]{fragments}
@@ -562,132 +474,176 @@
%D \showsetup{starttextrule}
%D \showsetup{setuptextrules}
%D
-%D The implementation looks a bit complicated due to the
-%D optional arguments.
+%D The implementation looks a bit complicated due to the optional arguments.
-\unexpanded\def\setuptextrules
- {\dodoubleargument\getparameters[\??tl]}
+\installcorenamespace{textrules}
+\installcorenamespace{textrulealternatives}
-\def\complextextrule[#1]% if needed we can make it installable
- {\let\next\dobottomtextrule
- \processaction
- [#1]
- [ \v!top=>\let\next\dotoptextrule,
- \v!middle=>\let\next\domiddletextrule,
- \v!bottom=>\let\next\dobottomtextrule]%
- \dosinglegroupempty\next}
+\installdirectcommandhandler \??textrules {textrules}
-\definecomplexorsimple\textrule
+\setuptextrules
+ [\c!location=\v!left,
+ \c!before=\blank,
+ \c!after=\blank,
+ \c!inbetween=,
+ \c!width=2\emwidth,
+ \c!style=\v!bold,
+ \c!color=,
+ \c!rulecolor=,
+ \c!bodyfont=,
+ \c!depthcorrection=\v!on,
+ \c!rulethickness=\linewidth,
+ \c!distance=.5\emwidth]
-\def\simpletextrule
- {\dosinglegroupempty\dounknowntextrule}
+\unexpanded\def\textrule
+ {\dosingleempty\pack_textrule}
-\def\docomplextextrule#1%
+\def\pack_textrule
+ {\iffirstargument
+ \expandafter\pack_textrule_yes
+ \else
+ \expandafter\pack_textrule_nop
+ \fi}
+
+\def\pack_textrule_yes[#1]%
+ {\expandnamespacevalue\??textrulealternatives{#1}\v!bottom}
+
+\def\pack_textrule_nop[#1]%
+ {\dosinglegroupempty\pack_textrule_nop_indeed}
+
+\def\pack_textrule_nop_indeed
+ {\iffirstargument
+ \expandafter\pack_textrule_nop_indeed_yes
+ \else
+ \expandafter\pack_textrule_nop_indeed_nop
+ \fi}
+
+\def\pack_textrule_nop_indeed_yes
+ {\csname\??textrulealternatives\v!top\endcsname}
+
+\def\pack_textrule_nop_indeed_nop
+ {\csname\??textrulealternatives\v!bottom\endcsname\empty}
+
+%D\startbuffer
+%D\showstruts
+%D
+%D\setupwhitespace[none]
+%D
+%D\textrule[top]{test} xxxxx\smash{\strut} \textrule[bottom]{test}
+%D\textrule[top]{test} xxxxx\strut \textrule[bottom]{test}
+%D
+%D\setupwhitespace[big]
+%D
+%D\textrule[top]{test} xxxxx\smash{\strut} \textrule[bottom]{test}
+%D\textrule[top]{test} xxxxx\strut \textrule[bottom]{test}
+%D\stoptyping
+%D
+%D \typebuffer \start \getbuffer \stop
+
+\setvalue{\??textrulealternatives\v!top}#1%
+ {\page[\v!preference] % interferes
+ \directtextrulesparameter\c!before\relax
+ \blank[\v!samepage,\v!nowhite]%
+ \pack_textrule_with_text{#1}%
+ \blank[\v!samepage,\v!nowhite]%
+ \directtextrulesparameter\c!inbetween\relax
+ \endgraf}
+
+\setvalue{\??textrulealternatives\v!bottom}#1%
+ {\blank[\v!samepage,\v!nowhite]%
+ \pack_textrule_following{#1}%
+ \blank[\v!samepage,\v!nowhite]%
+ \directtextrulesparameter\c!after\relax
+ \page[\v!preference]}
+
+\setvalue{\??textrulealternatives\v!middle}#1%
+ {\blank[\v!samepage,\v!nowhite]%
+ \directtextrulesparameter\c!inbetween\relax
+ \pack_textrule_following{#1}%
+ \blank[\v!samepage,\v!nowhite]%
+ \directtextrulesparameter\c!inbetween\relax
+ \page[\v!preference]}
+
+\def\pack_textrule_with_text#1%
{\bgroup
- \advance\hsize\dimexpr-\rightskip-\leftskip\relax
- \setbox\scratchbox\hbox to \hsize
- {\dimen4\dimexpr .5ex+.5\linewidth\relax
- \dimen6\dimexpr-.5ex+.5\linewidth\relax
+ \setbox\scratchbox\hbox to \availablehsize
+ {\scratchwidth \directtextrulesparameter\c!rulethickness\relax
+ \scratchheight\dimexpr .5\exheight+.5\scratchwidth\relax
+ \scratchdepth \dimexpr-.5\exheight+.5\scratchwidth\relax
\doifsomething{#1}
- {\doifelse\@@tllocation\v!inmargin
+ {\doifelse{\directtextrulesparameter\c!location}\v!inmargin
{\llap
- {\dousestyleparameter\@@tlstyle
- \dousecolorparameter\@@tlcolor
+ {\usetextrulesstyleandcolor\c!style\c!color
#1%
\hskip\leftmargindistance}}
- {\color[\@@tlrulecolor]
- {\vrule\!!height\dimen4\!!depth\dimen6\!!width\@@tlwidth}%
- \hbox spread 2\dimexpr\@@tldistance\relax
+ {\color[\directtextrulesparameter\c!rulecolor]
+ {\vrule\s!height\scratchheight\s!depth\scratchdepth\s!width\directtextrulesparameter\c!width}%
+ \hbox spread 2\dimexpr\directtextrulesparameter\c!distance\relax
{\hss
- \dousestyleparameter\@@tlstyle
- \dousecolorparameter\@@tlcolor
+ \usetextrulesstyleandcolor\c!style\c!color
\strut#1%
\hss}}}%
- \color[\@@tlrulecolor]
- {\leaders\hrule\!!height\dimen4\!!depth\dimen6\hfill}}%
+ \color[\directtextrulesparameter\c!rulecolor]
+ {\leaders\hrule\s!height\scratchheight\s!depth\scratchdepth\hfill}}%
\ht\scratchbox\strutht
\dp\scratchbox\strutdp
\noindent\box\scratchbox
-%\nobreak\verticalstrut\kern-\struttotal
-% evt \witruimte
\egroup}
-\def\dotoptextrule#1%
- {\page[\v!preference] % interferes
- %\whitespace % no
- \@@tlbefore
- \docomplextextrule{#1}%
-% todo, option: \doifnothing{#1}{\ruledvskip-.5ex}
- \nowhitespace
- \@@tlinbetween
- \endgraf}
-
-\def\dodobottomtextrule#1#2%
- {\ifhmode
- \endgraf
- \fi
- \dimen0\strutdp
- \ifdim\prevdepth>\strutdp\else % was <\strutdp
- \ifdim\prevdepth>\zeropoint
- \advance\dimen0 -\prevdepth
+\def\pack_textrule_correct_depth_yes
+ {\vskip\dimexpr
+ \strutdp +.5\exheight
+ \ifdim\prevdepth>\strutdp\else
+ \ifdim\prevdepth>\zeropoint
+ -\prevdepth
+ \fi
\fi
- \fi
- \advance\dimen0 .5ex
- \vskip\dimen0
-% ==
-% \vskip\dimexpr \strutdp + .5ex
-% \ifdim\prevdepth>\strutdp\else\ifdim\prevdepth>\zeropoint-\prevdepth\fi\fi\relax
-%
- \@@tlinbetween
- \doifelsenothing{#2}
- {\bgroup
- \advance\hsize\dimexpr-\rightskip-\leftskip\relax
+ \relax
+ \relax}
+
+\def\pack_textrule_correct_depth_nop
+ {\vskip\dimexpr
+ \strutdp +.5\exheight
+ \relax
+ \relax}
+
+\def\pack_textrule_following#1%
+ {\doifelsenothing{#1}
+ {\ifhmode
+ \endgraf
+ \fi
+ \doifelse{\directtextrulesparameter\c!depthcorrection}\v!on\pack_textrule_correct_depth_yes\pack_textrule_correct_depth_nop
\nointerlineskip
- \moveleft-\leftskip\vbox
- {\color[\@@tlrulecolor]
- {\hrule\!!depth\linewidth\!!height\zeropoint\!!width\hsize}}%
- \egroup}
- {\docomplextextrule{#2}}%
- \ifvmode\prevdepth\zeropoint\fi
- #1%
- \page[\v!preference]}
-
-\def\dobottomtextrule
- {\dodobottomtextrule\@@tlafter}
-
-\def\domiddletextrule
- {\dodobottomtextrule\@@tlinbetween}
-
-\def\dounknowntextrule
- {\iffirstargument
- \@EA\dotoptextrule
- \else
- \@EA\dobottomtextrule\@EA\empty
+ \dontleavehmode\vbox
+ {\color[\directtextrulesparameter\c!rulecolor]
+ {\hrule\s!depth\directtextrulesparameter\c!rulethickness\s!height\zeropoint\s!width\availablehsize}}}
+ {\pack_textrule_with_text{#1}}%
+ \ifvmode
+ \prevdepth\zeropoint
\fi}
%D The grouped commands also supports bodyfont switching:
\unexpanded\def\starttextrule#1%
{\bgroup
- \def\dounknowntextrule{\domiddletextrule}
- \dotoptextrule{#1}
+ \def\pack_textrule_nop_indeed{\csname\??textrulealternatives\v!middle\endcsname}%
+ \csname\??textrulealternatives\v!top\endcsname{#1}%
\bgroup
- \doifsomething\@@tlbodyfont{\switchtobodyfont[\@@tlbodyfont]}}
+ \doifsomething{\directtextrulesparameter\c!bodyfont}{\switchtobodyfont[\directtextrulesparameter\c!bodyfont]}}
\unexpanded\def\stoptextrule
{\par
\egroup
- \dobottomtextrule\empty
+ \csname\??textrulealternatives\v!bottom\endcsname\empty
\egroup}
%D \macros
%D {fillinrules, setupfillinrules}
%D
-%D The next few commands do not really deserve a place in a
-%D core module, because they deal with specific typography.
-%D Nevertheless I decided to make them part of the core,
-%D because they permit us to make questionaires. Let's start
-%D with some examples.
+%D The next few commands do not really deserve a place in a core module, because
+%D they deal with specific typography. Nevertheless I decided to make them part of
+%D the core, because they permit us to make questionaires. Let's start with some
+%D examples.
%D
%D \fillinrules[n=2,width=fit]{first}
%D \fillinrules[n=2,width=broad]{first}
@@ -696,85 +652,112 @@
%D \fillinrules[n=2]{first}{last}
%D \fillintext{first}{last} \input reich \par
%D
-%D The main command is \type{\fillinrules}. This command takes
-%D one and an optional second argument and sets a paragraph with
-%D empty visualized lines.
+%D The main command is \type{\fillinrules}. This command takes one and an optional
+%D second argument and sets a paragraph with empty visualized lines.
%D
%D \showsetup{fillinrules}
%D \showsetup{setupfillinrules}
-\unexpanded\def\setupfillinrules
- {\dodoubleargument\getparameters[\??il]}
-\definecomplexorsimpleempty\fillinrules
+\installcorenamespace{fillinrules}
-\def\complexfillinrules[#1]%
- {\def\docomplexfillinrules##1##2%
- {\dodocomplexfillinrules[#1]{##1}{##2}{\thinrules
- [\c!n=\@@iln,\c!interlinespace=\@@ilinterlinespace,\c!before=,\c!after=]}}%
- \dodoublegroupempty\docomplexfillinrules}
+\installdirectcommandhandler \??fillinrules {fillinrules}
-\def\dodocomplexfillinrules[#1]#2#3#4%
+\setupfillinrules
+ [\c!width=\v!broad,
+ \c!distance=\emwidth,
+ \c!before=\blank,
+ \c!after=\blank,
+ \c!n=\plusone,
+ \c!interlinespace=\v!small,
+ \c!separator=,
+ \c!style=,
+ \c!color=]
+
+\unexpanded\def\fillinrules
+ {\dosingleempty\pack_fillinrules}
+
+\def\pack_fillinrules[#1]%
{\endgraf
- \@@ilbefore
\begingroup
- \setupfillinrules[#1]%
+ \setupcurrentfillinrules[#1]%
+ \let\pack_fillinrules_rule\thinrules
+ \dodoublegroupempty\pack_fillinrules_indeed}
+
+\def\pack_fillinrules_indeed#1#2%
+ {\directfillinrulesparameter\c!before
+ \setupcurrentthinrules
+ [\c!n=\directfillinrulesparameter\c!n,
+ \c!interlinespace=\directfillinrulesparameter\c!interlinespace,
+ \c!before=,
+ \c!after=]%
+ \scratchdistance\directfillinrulesparameter\c!distance\relax
+ \edef\m_fillinrules_one{#1}%
+ \edef\m_fillinrules_two{#2}%
\noindent
- \doifsomething{#2}
- {\doifelse\@@ilwidth\v!fit
- {\let\@@ildistance\!!zeropoint
- \hbox}
- {\doifelse\@@ilwidth\v!broad
- {\hbox}
- {\hbox to \@@ilwidth}}%
- \bgroup
- \dousestyleparameter\@@ilstyle
- \dousecolorparameter\@@ilcolor
- \strut#2\hfill\@@ilseparator\hskip\@@ildistance
- \egroup}%
- %\hangindent=\wd0\relax % tzt hang=yes,n
- %\parindent=\hangindent
- %\box0\relax
+ \ifx\m_fillinrules_one\empty \else
+ \edef\p_width{\directfillinrulesparameter\c!width}%
+ \ifx\p_width\v!fit
+ \scratchdistance\zeropoint
+ \hbox
+ \else\ifx\p_width\v!broad
+ \hbox
+ \else
+ \hbox to \directfillinrulesparameter\c!width
+ \fi\fi
+ \bgroup
+ \usefillinrulesstyleandcolor\c!style\c!color
+ \strut
+ \m_fillinrules_one
+ \hfill\directfillinrulesparameter\c!separator
+ \hskip\scratchdistance
+ \egroup
+ \fi
\setupwhitespace[\v!big]%
\ignorespaces
- #4%
- \doifsomething{#3}
- {\kern\@@ildistance
- \dousestyleparameter\@@ilstyle
- \dousecolorparameter\@@ilcolor
- #3\strut}%
- \endgroup
+ \pack_fillinrules_rule
+ \ifx\m_fillinrules_two\empty \else
+ \kern\scratchdistance
+ \usefillinrulesstyleandcolor\c!style\c!color
+ \m_fillinrules_two
+ \strut
+ \fi
\endgraf
- \@@ilafter}
+ \directfillinrulesparameter\c!after
+ \endgroup}
%D \macros
%D {fillintext}
%D
-%D To provide compatible layouts when texts and lines are
-%D mixed, one can typeset a paragraph by using the command
-%D \type{\fillintext}.
+%D To provide compatible layouts when texts and lines are mixed, one can typeset
+%D a paragraph by using the command \type {\fillintext}.
%D
%D \showsetup{fillintext}
-\definecomplexorsimpleempty\fillintext
+\unexpanded\def\fillintext
+ {\dosingleempty\pack_fillintext}
+
+\def\pack_fillintext[#1]% ugly
+ {\endgraf
+ \begingroup
+ \setupcurrentfillinrules[#1]%
+ \dodoublegroupempty\pack_fillintext_indeed}
-\def\complexfillintext[#1]% rather rough, using an \unhbox is suboptimal
- {\def\docomplexfillintext##1##2%
- {\dowithnextbox
- {\dodocomplexfillinrules[#1]{##1}{\hfill##2}{\unhbox\nextbox\unskip}}%
- \hbox\bgroup\let\par\egroup\ignorespaces}%
- \dodoublegroupempty\docomplexfillintext}
+\def\pack_fillintext_indeed#1#2%
+ {\def\pack_fillinrules_rule{\unhbox\nextbox\unskip}%
+ \dowithnextbox{\pack_fillinrules_indeed{#1}{\hfill#2}}%
+ \hbox\bgroup\let\par\egroup\ignorespaces}
%D \macros
%D {fillinline, setupfillinlines}
%D
-%D Another member of the family takes care of putting a (often
-%D small) rule after a piece of text, like
+%D Another member of the family takes care of putting a (often small) rule after
+%D a piece of text, like
%D
%D \startbuffer
-%D \fillinline \input reich \par
+%D
+%D \stopbuffer\fillinline \input reich \par
%D \fillinline[margin=0cm] \input reich \par
-%D \stopbuffer
%D
%D \startexample
%D \getbuffer
@@ -789,82 +772,71 @@
%D \showsetup{fillinline}
%D \showsetup{setupfillinlines}
-\unexpanded\def\setupfillinlines
- {\dodoubleargument\getparameters[\??iv]}
-
-\definecomplexorsimpleempty\fillinline
+\installcorenamespace{fillinlines}
-\def\complexfillinline[#1]%
- {%\endgraf % interferes with \definedescription cum suis
- \@@ivbefore
- \begingroup
- \setupfillinlines[#1]%
- \advance\rightskip \@@ivmargin
- \parfillskip\zeropoint
- \def\par % very dangerous
- {\let\par\endgraf % -)
- \ifhmode\unskip\hfill\fi
- \scratchdimen\dimexpr\@@ivwidth-\@@ivdistance\relax
- \ifdim\scratchdimen>\@@ivmargin\else\expandafter\rlap\fi
- {\kern\@@ivdistance
- \vrule
- \!!width \scratchdimen
- \!!height.5\linewidth
- \!!depth .5\linewidth}%
- \endgraf % !
- \endgroup
- \endgraf % !
- \@@ilafter}}
-
-%D Will move up:
-
-\setupmarginrules
- [\c!level=0,
- \c!rulethickness=\@@kadefaultwidth\linewidth]
+\installdirectcommandhandler \??fillinlines {fillinlines}
-\setupthinrules
- [\c!interlinespace=\v!small,
- \c!n=3,
- \c!before=,
- \c!inbetween={\blank[\v!white]},
- \c!after=,
+\setupfillinlines
+ [\c!width=8\emwidth, % was 3cm
+ \c!margin=\directfillinlinesparameter\c!width,
+ \c!rulethickness=\linewidth,
\c!color=,
- \c!height=.5\linewidth,
- \c!depth=.5\linewidth,
- \c!frame=\v!on, % compatible with textbackgrounds
- \c!alternative=\v!b,
- \c!backgroundcolor=,
- \c!background=,
- \c!rulethickness=]
-
-\setuptextrules
- [\c!location=\v!left,
+ \c!distance=\emwidth,
\c!before=\blank,
- \c!after=\blank,
- \c!inbetween=,
- \c!width=2em,
- \c!style=\v!bold,
- \c!color=,
- \c!rulecolor=,
- \c!bodyfont=,
- \c!distance=.5em]
+ \c!after=\blank]
-\setupfillinrules
- [\c!width=\v!broad,
- \c!distance=1em,
- \c!before=\blank,
- \c!after=\blank,
- \c!n=1,
- \c!interlinespace=\v!small,
- \c!separator=,
- \c!style=\v!normal,
- \c!color=]
+\unexpanded\def\fillinline
+ {\dosingleempty\pack_fillinline}
+
+\ifdefined\endpar % experiment with \endpar
+
+ \def\pack_fillinline[#1]%
+ {% \endpar % no, as it interferes with \definedescription cum suis
+ \begingroup
+ \setupcurrentfillinlines[#1]%
+ \directfillinlinesparameter\c!before
+ \begingroup
+ \advance\rightskip \directfillinlinesparameter\c!margin\relax
+ \parfillskip\zeropoint
+ \pushmacro\endpar
+ \def\endpar
+ {\popmacro\endpar
+ \ifhmode\unskip\hfill\fi
+ \scratchwidth\dimexpr\directfillinlinesparameter\c!width-\directfillinlinesparameter\c!distance\relax
+ \ifdim\scratchwidth>\directfillinlinesparameter\c!margin\else\expandafter\rlap\fi
+ {\kern\directfillinlinesparameter\c!distance
+ \scratchheight\dimexpr\directfillinlinesparameter\c!rulethickness/\plustwo\relax
+ \color[\directfillinlinesparameter\c!color]{\vrule\s!width\scratchwidth\s!height\scratchheight\s!depth\scratchheight}}%
+ \endpar
+ \endgroup
+ \endpar
+ \directfillinlinesparameter\c!after
+ \endgroup}} % carryover ?
+
+\else
+
+ \def\pack_fillinline[#1]%
+ {%\endgraf % no, as it interferes with \definedescription cum suis
+ \begingroup
+ \setupcurrentfillinlines[#1]%
+ \directfillinlinesparameter\c!before
+ \begingroup
+ \advance\rightskip \directfillinlinesparameter\c!margin\relax
+ \parfillskip\zeropoint
+ \def\par
+ {\let\par\endgraf
+ \ifhmode\unskip\hfill\fi
+ \scratchwidth\dimexpr\directfillinlinesparameter\c!width-\directfillinlinesparameter\c!distance\relax
+ \ifdim\scratchwidth>\directfillinlinesparameter\c!margin\else\expandafter\rlap\fi
+ {\kern\directfillinlinesparameter\c!distance
+ \scratchheight\dimexpr\directfillinlinesparameter\c!rulethickness/\plustwo\relax
+ \color[\directfillinlinesparameter\c!color]{\vrule\s!width\scratchwidth\s!height\scratchheight\s!depth\scratchheight}}%
+ \endgraf
+ \endgroup
+ \endgraf
+ \directfillinlinesparameter\c!after
+ \endgroup}} % carryover ?
-\setupfillinlines
- [\c!width=3cm,
- \c!margin=\@@ivwidth,
- \c!distance=1em,
- \c!before=\blank,
- \c!after=\blank]
+\fi
\protect \endinput
diff --git a/Master/texmf-dist/tex/context/base/pack-obj.lua b/Master/texmf-dist/tex/context/base/pack-obj.lua
index c580aaa6287..1e4e0f59e5b 100644
--- a/Master/texmf-dist/tex/context/base/pack-obj.lua
+++ b/Master/texmf-dist/tex/context/base/pack-obj.lua
@@ -11,6 +11,8 @@ if not modules then modules = { } end modules ['pack-obj'] = {
reusable components.</p>
--ldx]]--
+local commands, context = commands, context
+
local texcount = tex.count
local allocate = utilities.storage.allocate
@@ -46,15 +48,30 @@ end
function jobobjects.number(tag,default)
local o = collected[tag] or tobesaved[tag]
- context((o and o[1]) or default)
+ return o and o[1] or default
end
function jobobjects.page(tag,default)
local o = collected[tag] or tobesaved[tag]
- context((o and o[2]) or default)
+ return o and o[2] or default
+end
+
+-- interface
+
+commands.saveobject = jobobjects.save
+commands.setobject = jobobjects.set
+
+function commands.objectnumber(tag,default)
+ local o = collected[tag] or tobesaved[tag]
+ context(o and o[1] or default)
+end
+
+function commands.objectpage(tag,default)
+ local o = collected[tag] or tobesaved[tag]
+ context(o and o[2] or default)
end
-function jobobjects.doifelse(tag)
- commands.testcase(collected[tag] or tobesaved[tag])
+function commands.doifobjectreferencefoundelse(tag)
+ commands.doifelse(collected[tag] or tobesaved[tag])
end
diff --git a/Master/texmf-dist/tex/context/base/pack-obj.mkiv b/Master/texmf-dist/tex/context/base/pack-obj.mkiv
index 6c9848a0135..356a0b7ebf8 100644
--- a/Master/texmf-dist/tex/context/base/pack-obj.mkiv
+++ b/Master/texmf-dist/tex/context/base/pack-obj.mkiv
@@ -105,29 +105,27 @@
%
% \everyobject{\the\pdfbackendeveryxform}
%
-% \let\doresetobjects\relax
-%
-% \def\setobject #1#2{\begingroup\objectoff\objectoffset\inobjecttrue\the\everyobject\dowithnextbox{\dosetobject{#1}{#2}}}
-% \def\settightobject#1#2{\begingroup\objectoff\zeropoint \inobjecttrue\the\everyobject\dowithnextbox{\dosetobject{#1}{#2}}}
+% \unexpanded\def\setobject #1#2{\begingroup\objectoff\objectoffset\inobjecttrue\the\everyobject\dowithnextbox{\pack_objects_set{#1}{#2}}}
+% \unexpanded\def\settightobject#1#2{\begingroup\objectoff\zeropoint \inobjecttrue\the\everyobject\dowithnextbox{\pack_objects_set{#1}{#2}}}
%
% \let\objectsetvbox\vbox %\def\objectsetvbox{\ruledvbox}
% \let\objectgetvbox\vbox %\def\objectgetvbox{\ruledvbox}
% \let\objectsethbox\hbox %\def\objectsethbox{\ruledhbox}
% \let\objectgethbox\hbox %\def\objectgethbox{\ruledhbox}
%
-% \def\dosetobject#1#2%
+% \unexpanded\def\pack_objects_set#1#2%
% {\objectwd\wd\nextbox
% \objectht\ht\nextbox
% \objectdp\dp\nextbox
% \ifdim\objectoff=\zeropoint\relax
% \setbox\objectbox\box\nextbox
% \else
-% \setbox\objectbox\objectsetvbox spread 2\objectoff{\vss\objectsethbox spread 2\objectoff{\hss\flushnextbox\hss}\vss}%
+% \setbox\objectbox\objectsetvbox spread 2\objectoff{\vss\objectsethbox spread 2\objectoff{\hss\box\nextbox\hss}\vss}%
% \fi
% \ctxlua{objects.register("#1::#2")}%
% \endgroup}
%
-% \def\getobject#1#2%
+% \unexpanded\def\getobject#1#2%
% {\begingroup
% \ctxlua{objects.restore("#1::#2")}%
% \ifdim\objectoff=\zeropoint\relax \else
@@ -140,7 +138,7 @@
% \box\objectbox
% \endgroup}
%
-% \def\getpageobject#1#2%
+% \unexpanded\def\getpageobject#1#2%
% {\begingroup
% \ctxlua{objects.restore("#1::#2")}%
% \ifdim\objectoff=\zeropoint\relax
@@ -155,25 +153,25 @@
% \box\objectbox
% \endgroup}
%
-% \def\setobjectdirectly #1#2{\ctxlua{objects.register("#1::#2")}}
-% \def\getobjectdirectly #1#2{\ctxlua{objects.restore ("#1::#2")}}
-% \def\getobjectdimensions #1#2{\ctxlua{objects.restore ("#1::#2")}}
-% \def\doifobjectfoundelse #1#2{\ctxlua{objects.doifelse("#1::#2")}}
-% \def\doifobjectreferencefoundelse#1#2{\ctxlua{objects.doifelse("#1::#2")}}
+% \unexpanded\def\setobjectdirectly #1#2{\ctxlua{objects.register("#1::#2")}}
+% \unexpanded\def\getobjectdirectly #1#2{\ctxlua{objects.restore ("#1::#2")}}
+% \unexpanded\def\getobjectdimensions #1#2{\ctxlua{objects.restore ("#1::#2")}}
+% \unexpanded\def\doifobjectfoundelse #1#2{\ctxlua{objects.doifelse("#1::#2")}}
+% \unexpanded\def\doifobjectreferencefoundelse#1#2{\ctxlua{objects.doifelse("#1::#2")}}
%
% \let\objectreferenced\relax
% \let\driverreferenced\relax
%
-% \def\doregisterobjectreference{\writestatus{objects}{obsolete: register object reference}\gobblethreearguments}
-% \def\dooverloadobjectreference{\writestatus{objects}{obsolete: overload object reference}\gobblethreearguments}
-% \def\dosetobjectreference {\writestatus{objects}{obsolete: set object reference}\gobblethreearguments}
-% \def\dosetdriverreference {\writestatus{objects}{obsolete: set driver reference}\gobblethreearguments}
+% \unexpanded\def\pack_objects_register_reference{\writestatus{objects}{obsolete: register object reference}\gobblethreearguments}
+% \unexpanded\def\pack_objects_overload_reference{\writestatus{objects}{obsolete: overload object reference}\gobblethreearguments}
+% \unexpanded\def\dosetobjectreference {\writestatus{objects}{obsolete: set object reference}\gobblethreearguments}
+% \unexpanded\def\dosetdriverreference {\writestatus{objects}{obsolete: set driver reference}\gobblethreearguments}
%
% \def\defaultobjectreference{0}
% \def\defaultobjectpage {\realfolio}
%
-% \def\dogetobjectreference #1#2#3{\xdef#3{\ctxlua{objects.reference("#1::#2)}}}
-% \def\dogetobjectreferencepage#1#2#3{\xdef#3{\ctxlua{objects.page("#1::#2))}}}
+% \unexpanded\def\dogetobjectreference #1#2#3{\xdef#3{\ctxlua{objects.reference("#1::#2)}}}
+% \unexpanded\def\dogetobjectreferencepage#1#2#3{\xdef#3{\ctxlua{objects.page("#1::#2))}}}
%
% \protect
%
@@ -219,26 +217,27 @@
%D housekeeping to the driver. The current approach permits
%D us to keep the box characteristic too.
-\newif\ifinobject
+\installcorenamespace {objects}
+
+\newif\ifinobject % public (might become a conditional)
-\def\objectplaceholder{NOT YET FLUSHED}%
+\def\objectplaceholder{NOT YET FLUSHED}
-\def\presetobject#1#2% \global added
- {\ifcsname\??ob:#1::#2\endcsname\else
- \global\@EA\let\csname\??ob:#1::#2\endcsname\objectplaceholder
+\unexpanded\def\presetobject#1#2% \global added
+ {\ifcsname\??objects#1::#2\endcsname\else
+ \global\expandafter\let\csname\??objects#1::#2\endcsname\objectplaceholder
\fi}
-\def\dosetobject#1#2#3% \initializepaper this will move to \everyshipout
- {% \initializepaper
- \ifcsname\??ob:#2::#3\endcsname
+\unexpanded\def\pack_objects_set#1#2#3%
+ {\ifcsname\??objects#2::#3\endcsname
\expandafter\gobblefivearguments
\else % tzt, overload internal referenced objects to save entries
- \expandafter\dodosetobject
+ \expandafter\pack_objects_set_indeed
\fi
{#1}{#2}{#3}}
-\def\resetobject#1#2%
- {\letbeundefined{\??ob:#1::#2}}
+\unexpanded\def\resetobject#1#2%
+ {\letbeundefined{\??objects#1::#2}}
%D \macros
%D {finalizeobjectbox}
@@ -246,7 +245,7 @@
%D This one provides a hook for last minute object box processing
%D we need this in \MKIV.
-\ifx\finalizeobjectbox\undefined
+\ifdefined\finalizeobjectbox \else
\let\finalizeobjectbox\gobbleoneargument
\fi
@@ -257,47 +256,54 @@
\def\objectoffset{1cm}
-\def\dodosetobject#1#2#3%
+\unexpanded\def\pack_objects_set_indeed#1#2#3%
{\bgroup
- \globalpushmacro\crossreferenceobject \objectreferenced
+ \globalpushmacro\crossreferenceobject
+ \objectreferenced
\inobjecttrue
\dowithnextbox
{\globalpopmacro\crossreferenceobject
- \dododosetobject{#1}{#2}{#3}\egroup}}
+ \pack_objects_set_indeed_indeed{#1}{#2}{#3}%
+ \egroup}}
% in luatex version < 66 we had a 1bp compensation (hardcoded in luatex)
-\def\dododosetobject#1#2#3%
+\let\pack_objects_handle\relax
+
+\unexpanded\def\pack_objects_set_indeed_indeed#1#2#3%
{\begingroup
\scratchdimen\objectoffset
- \@EA\xdef\csname\??ob:#2::#3\endcsname
- {\noexpand\dohandleobject{#2}{#3}%
+ \expandafter\xdef\csname\??objects#2::#3\endcsname
+ {\pack_objects_handle
+ {#2}%
+ {#3}%
{\ifhbox\nextbox\hbox\else\vbox\fi}%
- {\number\nextboxwd}{\number\nextboxht}{\number\nextboxdp}%
+ {\number\wd\nextbox}%
+ {\number\ht\nextbox}%
+ {\number\dp\nextbox}%
{\number\scratchdimen}}%
\expanded % freeze the dimensions since \dostartobject may use \nextbox
- {\dostartobject{#2}{#3}{\the\nextboxwd}{\the\nextboxht}{\the\nextboxdp}}%
+ {\dostartobject{#2}{#3}{\the\wd\nextbox}{\the\ht\nextbox}{\the\dp\nextbox}}%
\ifcase#1\relax\else \ifdim\objectoffset>\zeropoint
- \setbox\nextbox\vbox spread 2\scratchdimen
+ \setbox\nextbox\vbox \s!spread 2\scratchdimen
{\forgetall \offinterlineskip
- \vss\hbox spread 2\scratchdimen{\hss\flushnextbox\hss}\vss}%
+ \vss\hbox \s!spread 2\scratchdimen{\hss\box\nextbox\hss}\vss}%
\fi \fi
- \flushnextbox
+ \box\nextbox
\dostopobject
\endgroup}
-\def\getobject#1#2%
- {\ifcsname\??ob:#1::#2\endcsname
+\unexpanded\def\getobject#1#2%
+ {\ifcsname\??objects#1::#2\endcsname
\begingroup
- \let\dohandleobject\dogetobject
- \csname\??ob:#1::#2\expandafter\endcsname
+ \let\pack_objects_handle\pack_objects_get
+ \csname\??objects#1::#2\expandafter\endcsname
\else
{\infofont[object #1::#2]}%
\fi}
-\def\dogetobject#1#2#3#4#5#6#7% don't change this, should work for dvi & pdf
- {% \initializepaper
- \forgetall
+\unexpanded\def\pack_objects_get#1#2#3#4#5#6#7% don't change this, should work for dvi & pdf
+ {\forgetall
% todo: if no attr then faster
\setbox\scratchbox\vbox attr \viewerlayerattribute \attribute\viewerlayerattribute
{\doinsertobject{#1}{#2}}%
@@ -323,18 +329,18 @@
%D The results are reported in \type {\objectwidth}, \type
%D {\objectheight} and \type {\objectdepth}.
-\def\dogetobjectdimensions#1#2#3#4#5#6#7%
+\unexpanded\def\pack_objects_get_dimensions#1#2#3#4#5#6#7%
{\def\objectwidth {#4\s!sp}%
\def\objectheight{#5\s!sp}%
\def\objectdepth {#6\s!sp}%
\def\objectmargin{#7\s!sp}}
-\def\getobjectdimensions#1#2%
- {\let\dohandleobject\dogetobjectdimensions
+\unexpanded\def\getobjectdimensions#1#2%
+ {\let\pack_objects_handle\pack_objects_get_dimensions
\let\objectwidth \!!zeropoint
\let\objectheight\!!zeropoint
\let\objectdepth \!!zeropoint
- \csname\??ob:#1::#2\endcsname}
+ \csname\??objects#1::#2\endcsname}
%D Apart from this kind of objects, that have typeset content,
%D we can have low level driver specific objects. Both types
@@ -352,35 +358,35 @@
%D These commands are to be called by the \type{\startobject},
%D \type{\stopobject} and \type{\insertobject} specials.
-\def\objectreferenced{\global\chardef\crossreferenceobject\plusone}
-\def\driverreferenced{\global\chardef\crossreferenceobject\zerocount}
+\unexpanded\def\objectreferenced{\global\chardef\crossreferenceobject\plusone}
+\unexpanded\def\driverreferenced{\global\chardef\crossreferenceobject\zerocount}
\objectreferenced
% no undefined test ! ! ! ! (pdftex fails on undefined objects)
-\def\doregisterobjectreference#1#2#3{\normalexpanded{\noexpand\ctxlatelua{job.objects.save("#1::#2",#3,\noexpand\the\realpageno)}}}
-\def\dooverloadobjectreference#1#2#3{\ctxlua{job.objects.set("#1::#2",#3,\the\realpageno)}}
+\unexpanded\def\pack_objects_register_reference#1#2#3{\normalexpanded{\noexpand\ctxlatecommand{saveobject("#1::#2",#3,\noexpand\the\realpageno)}}}
+\unexpanded\def\pack_objects_overload_reference#1#2#3{\ctxcommand{setobject("#1::#2",#3,\the\realpageno)}}
-\def\dosetobjectreference
+\unexpanded\def\dosetobjectreference
{\ifcase\crossreferenceobject
\objectreferenced
- \expandafter\dooverloadobjectreference
+ \expandafter\pack_objects_overload_reference
\else
- \expandafter\doregisterobjectreference
+ \expandafter\pack_objects_register_reference
\fi}
-\def\dosetdriverreference
+\unexpanded\def\dosetdriverreference
{\driverreferenced\dosetobjectreference}
\def\defaultobjectreference#1#2{0} % driver dependent
\def\defaultobjectpage #1#2{\realfolio}
-\def\dogetobjectreference #1#2#3{\xdef#3{\ctxlua{job.objects.number("#1::#2","\defaultobjectreference{#1}{#2}")}}}
-\def\dogetobjectreferencepage#1#2#3{\xdef#3{\ctxlua{job.objects.page("#1::#2","\defaultobjectpage{#1}{#2}")}}}
+\unexpanded\def\dogetobjectreference #1#2#3{\xdef#3{\ctxcommand{objectnumber("#1::#2","\defaultobjectreference{#1}{#2}")}}}
+\unexpanded\def\dogetobjectreferencepage#1#2#3{\xdef#3{\ctxcommand{objectpage("#1::#2","\defaultobjectpage{#1}{#2}")}}}
-\def\setobject {\driverreferenced\dosetobject1}
-\def\settightobject{\driverreferenced\dosetobject0}
+\unexpanded\def\setobject {\driverreferenced\pack_objects_set1}
+\unexpanded\def\settightobject{\driverreferenced\pack_objects_set0}
%D \macros
%D {doifobjectfoundelse,doifobjectreferencefoundelse}
@@ -393,13 +399,14 @@
%D \doifobjectreferencefoundelse{class}{object}{do then}{do else}
%D \stoptyping
-\def\doifobjectfoundelse#1#2%
- {\ifcsname\??ob:#1::#2\endcsname
+\unexpanded\def\doifobjectfoundelse#1#2%
+ {\ifcsname\??objects#1::#2\endcsname
\expandafter\firstoftwoarguments
\else
\expandafter\secondoftwoarguments
\fi}
-\def\doifobjectreferencefoundelse#1#2{\ctxlua{job.objects.doifelse("#1::#2")}}
+\unexpanded\def\doifobjectreferencefoundelse#1#2%
+ {\ctxcommand{doifobjectreferencefoundelse("#1::#2")}}
\protect \endinput
diff --git a/Master/texmf-dist/tex/context/base/pack-pos.mkiv b/Master/texmf-dist/tex/context/base/pack-pos.mkiv
index fab73bc4a3b..f92ceb78ab2 100644
--- a/Master/texmf-dist/tex/context/base/pack-pos.mkiv
+++ b/Master/texmf-dist/tex/context/base/pack-pos.mkiv
@@ -23,114 +23,142 @@
% \position[ystep=relative](3,-1){test}
% \position(10,10){test}
% \stoppositioning}
+%
+% watch out: rather global
-\newdimen\positioningxposition \newdimen\positioningyposition
-\newdimen\positioningxdimension \newdimen\positioningydimension
-\newdimen\positioningxoffset \newdimen\positioningyoffset
+\installcorenamespace {positioning}
-\newbox\positioningbox
+\installcommandhandler \??positioning {positioning} \??positioning
-\unexpanded\def\startpositioning
- {\dosingleempty\dostartpositioning}
+\setuppositioning
+ [\c!state=\v!start,
+ \c!unit=\s!cm,
+ \c!factor=\plusone,
+ \c!scale =\plusone,
+ \c!xfactor=\positioningparameter\c!factor,
+ \c!yfactor=\positioningparameter\c!factor,
+ \c!xscale=\positioningparameter\c!scale,
+ \c!yscale=\positioningparameter\c!scale,
+ \c!xstep=\v!absolute,
+ \c!ystep=\v!absolute,
+ \c!xoffset=\zeropoint,
+ \c!yoffset=\zeropoint]
+
+\newdimen\d_pack_positioning_x_position
+\newdimen\d_pack_positioning_y_position
+\newdimen\d_pack_positioning_x_dimension
+\newdimen\d_pack_positioning_y_dimension
+\newdimen\d_pack_positioning_x_offset
+\newdimen\d_pack_positioning_y_offset
-\def\dostartpositioning[#1]%
+\newbox\b_pack_positioning
+
+\unexpanded\def\startpositioning
{\bgroup
- \getparameters[\??ps][#1]%
- \positioningxposition \zeropoint \positioningyposition \zeropoint
- \positioningxdimension\zeropoint \positioningydimension\zeropoint
- \positioningxoffset \zeropoint \positioningyoffset \zeropoint
- \hfuzz \paperwidth \vfuzz \paperheight
- \setbox\positioningbox\hbox\bgroup
+ \dodoubleempty\pack_positioning_start}
+
+\def\pack_positioning_start[#1][#2]%
+ {\ifsecondargument
+ \edef\currentpositioning{#1}%
+ \setupcurrentpositioning[#2]%
+ \else\iffirstargument
+ \doifassignmentelse{#1}
+ {\let\currentpositioning\empty
+ \setupcurrentpositioning[#1]}%
+ {\edef\currentpositioning{#1}}%
+ \else
+ \let\currentpositioning\empty
+ \fi\fi
+ \d_pack_positioning_x_position \zeropoint
+ \d_pack_positioning_y_position \zeropoint
+ \d_pack_positioning_x_dimension\zeropoint
+ \d_pack_positioning_y_dimension\zeropoint
+ \d_pack_positioning_x_offset \zeropoint
+ \d_pack_positioning_y_offset \zeropoint
+ \hfuzz\paperwidth
+ \vfuzz\paperheight
+ \setbox\b_pack_positioning\hbox\bgroup
\ignorespaces}
\unexpanded\def\stoppositioning
{\removeunwantedspaces
- \doifnot\@@psoffset\v!yes
- {\global\positioningxoffset\zeropoint
- \global\positioningyoffset\zeropoint}%
- \global\advance\positioningxdimension \positioningxoffset
- \global\advance\positioningydimension \positioningyoffset
+ \doifnot{\positioningparameter\c!offset}\v!yes
+ {\global\d_pack_positioning_x_offset\zeropoint
+ \global\d_pack_positioning_y_offset\zeropoint}%
+ \global\advance\d_pack_positioning_x_dimension\d_pack_positioning_x_offset
+ \global\advance\d_pack_positioning_y_dimension\d_pack_positioning_y_offset
\egroup
- \vbox to \positioningydimension
- {\vskip\positioningyoffset
- \hbox to \positioningxdimension
- {\hskip\positioningxoffset
- \box\positioningbox
+ \vbox to \d_pack_positioning_y_dimension
+ {\vskip\d_pack_positioning_y_offset
+ \hbox to \d_pack_positioning_x_dimension
+ {\hskip\d_pack_positioning_x_offset
+ \box\b_pack_positioning
\hfill}
\vfill}%
\egroup}
-\def\resetpositioning
- {\let\@@psstate \v!start
- \let\@@psunit \s!cm
- \let\@@psfactor \plusone
- \let\@@psscale \plusone
- \def\@@psxfactor{\@@psfactor}%
- \def\@@psyfactor{\@@psfactor}%
- \def\@@psxscale {\@@psscale}%
- \def\@@psyscale {\@@psscale}%
- \let\@@psxstep \v!absolute
- \let\@@psystep \v!absolute
- \let\@@psxoffset \zeropoint
- \let\@@psyoffset \zeropoint}
-
-\resetpositioning
-
-\unexpanded\def\setuppositioning
- {\resetpositioning
- \dodoubleargument\getparameters[\??ps]}
-
-\def\calculateposition#1#2#3#4#5#6#7#8#9%
- {\setdimensionwithunit\scratchdimen{#1}\@@psunit
- \scratchdimen#8\scratchdimen
- \scratchdimen#9\scratchdimen
- \advance\scratchdimen #4\relax
+\unexpanded\def\pack_positioning_calculate#1#2#3#4#5#6#7#8#9%
+ {\setdimensionwithunit\scratchdimen{#1}{\positioningparameter\c!unit}%
+ \scratchdimen\positioningparameter#8\scratchdimen
+ \scratchdimen\positioningparameter#9\scratchdimen
+ \advance\scratchdimen\positioningparameter#4\relax
% == \scratchdimen\dimexpr#8\dimexpr#9\scratchdimen\relax+#4\relax
- \doif{#2}\v!relative
+ \doif{\positioningparameter#2}\v!relative
{\advance\scratchdimen#3%
- \let#4\zeropoint}%
+ \letpositioningparameter#4\zeropoint}%
#3\scratchdimen
- \doifnot\@@psstate\v!overlay
- {\scratchdimen\dimexpr#5+#3\relax
+ \doifnot{\positioningparameter\c!state}\v!overlay
+ {\scratchdimen\dimexpr#5\nextbox+#3\relax
\ifdim #3<-#7\relax \global#7-#3\relax \fi
\ifdim\scratchdimen> #6\relax \global#6\scratchdimen\fi}}
-\def\position
- {\dosingleempty\doposition}
-
-\def\doposition[#1]#2(#3,#4)%
- {\removeunwantedspaces
- \dowithnextbox{\dodoposition{#1}{#2}{#3}{#4}}\hbox}
-
-\def\dodoposition#1#2#3#4%
+\unexpanded\def\position
{\bgroup
- \dontcomplain
- \getparameters[\??ps][#1]%
- \calculateposition{#3}\@@psxstep\positioningxposition\@@psxoffset\nextboxwd \positioningxdimension\positioningxoffset\@@psxscale\@@psxfactor
- \calculateposition{#4}\@@psystep\positioningyposition\@@psyoffset\nextboxhtdp\positioningydimension\positioningyoffset\@@psyscale\@@psyfactor
+ \dosingleempty\pack_positioning_position}
+
+\def\pack_positioning_position[#1]#2(#3,#4)%
+ {\iffirstargument
+ \setupcurrentpositioning[#1]%
+ \fi
+ \removeunwantedspaces
+ \dowithnextbox{\pack_positioning_position_indeed{#3}{#4}}\hbox}
+
+\def\pack_positioning_position_indeed#1#2%
+ {\dontcomplain
+ \pack_positioning_calculate
+ {#1}%
+ \c!xstep
+ \d_pack_positioning_x_position
+ \c!xoffset
+ \wd
+ \d_pack_positioning_x_dimension
+ \d_pack_positioning_x_offset
+ \c!xscale
+ \c!xfactor
+ \pack_positioning_calculate
+ {#2}%
+ \c!ystep
+ \d_pack_positioning_y_position
+ \c!yoffset
+ \htdp
+ \d_pack_positioning_y_dimension
+ \d_pack_positioning_y_offset
+ \c!yscale
+ \c!yfactor
\vbox to \zeropoint
- {\vskip\positioningyposition
+ {\offinterlineskip % else we get an empty line
+ \vskip\d_pack_positioning_y_position
\hbox to \zeropoint
- {\hskip\positioningxposition
- \flushnextbox
+ {\hskip\d_pack_positioning_x_position
+ \box\nextbox
\hss}
\vss}%
\normalexpanded
{\egroup
- \positioningxposition\the\positioningxposition
- \positioningyposition\the\positioningyposition
- \def\noexpand\@@psxoffset{\the\dimexpr\@@psxoffset}%
- \def\noexpand\@@psyoffset{\the\dimexpr\@@psyoffset}}%
+ \d_pack_positioning_x_position\the\d_pack_positioning_x_position
+ \d_pack_positioning_y_position\the\d_pack_positioning_y_position
+ \setexpandedpositioningparameter\c!xoffset{\the\dimexpr\positioningparameter\c!xoffset}%
+ \setexpandedpositioningparameter\c!yoffset{\the\dimexpr\positioningparameter\c!yoffset}}%
\ignorespaces}
-\setuppositioning
- [\c!unit=\s!cm,
- \c!factor=\plusone,
- \c!scale=\plusone,
- \c!xstep=\v!absolute,
- \c!ystep=\v!absolute,
- \c!offset=\v!yes,
- \c!xoffset=\zeropoint,
- \c!yoffset=\zeropoint]
-
\protect \endinput
diff --git a/Master/texmf-dist/tex/context/base/pack-rul.mkiv b/Master/texmf-dist/tex/context/base/pack-rul.mkiv
index 480997919f7..7b040a40038 100644
--- a/Master/texmf-dist/tex/context/base/pack-rul.mkiv
+++ b/Master/texmf-dist/tex/context/base/pack-rul.mkiv
@@ -13,31 +13,27 @@
\writestatus{loading}{ConTeXt Packaging Macros / Ruled Content}
-%D The code here is expanded lots of time as framed is used in
-%D many places. This is why the code here is (and gets) optimized
-%D as much as possible. Also, by avoiding packaging and expansion
-%D we also keep tracing reasonable. For instance, multiple stacked
-%D backgrounds can slow down a run if not optimized this way.
-
-% eventually this will use the commandhandler code (same trick as
-% with itemize)
+%D The code here is expanded lots of time as framed is used in many places. This is
+%D why the code here is (and gets) optimized as much as possible. Also, by avoiding
+%D packaging and expansion we also keep tracing reasonable. For instance, multiple
+%D stacked backgrounds can slow down a run if not optimized this way.
\registerctxluafile{pack-rul}{1.001}
\unprotect
+\definesystemvariable {ol} % OmLijnd -> check scrn-fld too
+
%D \macros
%D {linewidth, setuplinewidth}
%D
-%D This module deals with rules (lines) in several ways. First
-%D we introduce two macros that can be used to set some common
-%D characteristics.
+%D This module deals with rules (lines) in several ways. First we introduce two
+%D macros that can be used to set some common characteristics.
%D
%D \showsetup{setuplinewidth}
%D
-%D The linewidth is available in \type{\linewidth}. The
-%D preset value of .4pt equals the default hard coded \TEX\
-%D rule width.
+%D The linewidth is available in \type{\linewidth}. The preset value of .4pt equals
+%D the default hard coded \TEX\ rule width.
\newdimen\linewidth
@@ -54,8 +50,16 @@
%D
%D \showsetup{setupscreens}
-\unexpanded\def\setupscreens
- {\dodoubleargument\getparameters[\??rs]}
+\installcorenamespace{screens}
+
+\installsetuponlycommandhandler \??screens {screens}
+
+\appendtoks
+ \edef\defaultbackgroundscreen{\directscreensparameter\c!screen}
+\to \everysetupscreens
+
+\setupscreens
+ [\c!screen=.90] % was .95 but that's hardly visible
%D The parameter handler:
@@ -65,41 +69,26 @@
\installcorenamespace{framedleft}
\installcorenamespace{framedright}
-\let\currentframed\s!unknown % brrr must have a value
-
-% \def\framedparameter #1{\csname\doframedparameter\currentframed{#1}\endcsname}
-% \def\framedparameterhash#1{\doframedparameterhash \currentframed#1}
-
-\def\framedparameter #1{\csname\ifcsname\currentframed#1\endcsname\currentframed#1\else\expandafter\doframedparentparameter\csname\currentframed\s!parent\endcsname{#1}\fi\endcsname}
-\def\framedparameterhash#1{\ifcsname\currentframed#1\endcsname\currentframed\else\expandafter\doframedparentparameterhash\csname\currentframed\s!parent\endcsname#1\fi}
+\installcorenamespace{regularframed}
+\installcorenamespace{simplifiedframed}
-\def\doframedparameter #1#2{\ifcsname#1#2\endcsname#1#2\else\expandafter\doframedparentparameter \csname#1\s!parent\endcsname{#2}\fi}
-\def\doframedparameterhash#1#2{\ifcsname#1#2\endcsname #1\else\expandafter\doframedparentparameterhash\csname#1\s!parent\endcsname#2\fi}
+\installcommandhandler \??framed {framed} \??framed
-\def\doframedparentparameter #1#2{\ifx#1\relax\s!empty\else\doframedparameter #1{#2}\fi}
-\def\doframedparentparameterhash#1#2{\ifx#1\relax \else\doframedparameterhash#1#2\fi}
+\let\pack_framed_framedparameter \framedparameter
+\let\pack_framed_framedparameterhash\framedparameterhash
+\let\pack_framed_setupcurrentframed \setupcurrentframed
-\def\doframedparentparameter#1#2{\ifx#1\relax\doframedrootparameter#2\else\doframedparameter#1{#2}\fi}
-\def\doframedrootparameter #1{\ifcsname\??framed#1\endcsname\??framed#1\else\s!empty\fi}
+\def\pack_framed_initialize
+ {\let\framedparameter \pack_framed_framedparameter
+ \let\framedparameterhash\pack_framed_framedparameterhash
+ \let\setupcurrentframed \pack_framed_setupcurrentframed
+ \inframedtrue}
-\def\useframedstyleandcolor#1#2% style color
- {\edef\currentstyleparameter{\framedparameter#1}%
- \edef\currentcolorparameter{\framedparameter#2}%
- \ifx\currentstyleparameter\empty\else\dousestyleparameter\currentstyleparameter\fi
- \ifx\currentcolorparameter\empty\else\dousecolorparameter\currentcolorparameter\fi}
+%D A helper:
\def\frameddimension#1{\the\dimexpr\framedparameter{#1}\relax}
-% \unexpanded\def\installsomebackground#1#2{\inheritlocalframed[\??ma#1#2][\??od]}
-
-\let\normalframedparameter \framedparameter
-\let\normalframedparameterhash\framedparameterhash
-
-\def\pack_framed_initialize#1% will be inlined
- {\inframedtrue
- \edef\currentframed{#1}%
- \let\framedparameter \normalframedparameter
- \let\framedparameterhash\normalframedparameterhash}
+%D Inheritance:
\def\installinheritedframed#1%
{\normalexpanded{\doinstallinheritedframed
@@ -109,19 +98,21 @@
\expandafter\noexpand\csname do#1parameter\endcsname
\expandafter\noexpand\csname do#1parentparameter\endcsname
\expandafter\noexpand\csname do#1rootparameter\endcsname
+ \expandafter\noexpand\csname setupcurrent#1\endcsname
\expandafter\noexpand\csname inherited#1framed\endcsname
\noexpand\??framed}} % if needed we can have a variant
-\unexpanded\def\doinstallinheritedframed#1#2#3#4#5#6#7#8%
- {\def#5##1##2{\ifx##1\relax#6{##2}\else#4##1{##2}\fi}%
- \def#6##1{\ifcsname#8##1\endcsname#8##1\else\s!empty\fi}%
- \unexpanded\def#7%
+\unexpanded\def\doinstallinheritedframed#1#2#3#4#5#6#7#8#9%
+ {\def#5##1##2{\ifx##1\relax#6{##2}\else#4{##1}{##2}\fi}%
+ \def#6##1{\ifcsname#9:##1\endcsname#9:##1\else\s!empty\fi}% root
+ \unexpanded\def#8%
{\bgroup
\bgroup
\inframedtrue
- \let\currentframed #1% not used (more for tracing)
+ \let\currentframed #1%
\let\framedparameter #2%
\let\framedparameterhash#3%
+ \let\setupcurrentframed #7%
\pack_framed_process_indeed}}
\unexpanded\def\installframedcommandhandler#1#2#3%
@@ -138,12 +129,11 @@
% for regular framed
-\getparameters
- [\??framed]
+\setupframed
[\c!width=\v!fit,
\c!height=\v!broad,
%\c!lines=,
- \c!offset=0.25ex, % \defaultframeoffset
+ \c!offset=.25ex, % \defaultframeoffset
\c!empty=\v!no,
\c!frame=\v!on,
%\c!topframe=,
@@ -158,11 +148,12 @@
%\c!foregroundstyle=,
%\c!background=,
%\c!backgroundscreen=,
+ \c!backgroundscreen=\defaultbackgroundscreen,
%\c!backgroundcolor=,
\c!backgroundoffset=\zeropoint,
%\c!framecolor=,
\c!frameoffset=\zeropoint,
- \c!backgroundcorner=\framedparameter\c!corner,
+ \c!backgroundcorner=\framedparameter\c!corner, % use \p_ here
\c!backgroundradius=\framedparameter\c!radius,
\c!backgrounddepth=\framedparameter\c!depth,
\c!framecorner=\framedparameter\c!corner,
@@ -184,21 +175,21 @@
\c!toffset=\zeropoint,
\c!boffset=\zeropoint]
-% for backgrounds
+%D For backgrounds and such:
-\getparameters
- [\??od] % for fast version
+\defineframed
+ [\??simplifiedframed]
[\c!frame=\v!off,
\c!depth=\zeropoint,
\c!offset=\v!overlay,
- %\c!component=,
- %\c!region=,
+ \c!component=,
+ \c!region=,
\c!radius=.5\bodyfontsize,
\c!rulethickness=\linewidth,
\c!corner=\v!rectangular,
\c!backgroundoffset=\zeropoint,
\c!frameoffset=\zeropoint,
- \c!backgroundcorner=\framedparameter\c!corner,
+ \c!backgroundcorner=\framedparameter\c!corner, % use \p_ here
\c!backgroundradius=\framedparameter\c!radius,
\c!backgrounddepth=\framedparameter\c!depth,
\c!framecorner=\framedparameter\c!corner,
@@ -210,13 +201,19 @@
\c!toffset=\zeropoint,
\c!boffset=\zeropoint]
-%D We will communicate through module specific variables, current
-%D framed parameters and some reserved dimension registers.
+\unexpanded\def\definesimplifiedframed[#1]% no settings
+ {\defineframed[#1][\??simplifiedframed]%
+ \expandafter\let\csname#1\endcsname\undefined}
+
+\expandafter\let\csname\??simplifiedframed\endcsname\undefined
+
+%D We will communicate through module specific variables, current framed
+%D parameters and some reserved dimension registers.
\newdimen\d_framed_target_wd
\newdimen\d_framed_target_ht
\newdimen\d_framed_target_dp
-\newdimen\d_framed_linewidth
+\newdimen\d_framed_linewidth \let\ruledlinewidth\d_framed_linewidth % needed at lua end
\let\p_framed_frame \empty % \framedparameter\c!frame
\let\p_framed_backgroundoffset\empty
@@ -255,9 +252,9 @@
\def\pack_framed_filled_box_normal
{\vrule
- \!!width \d_framed_target_wd
- \!!height\d_framed_target_ht
- \!!depth \d_framed_target_dp
+ \s!width \d_framed_target_wd
+ \s!height\d_framed_target_ht
+ \s!depth \d_framed_target_dp
\relax}
\def\pack_framed_filled_box_radius
@@ -269,16 +266,10 @@
\fi}
\def\pack_framed_filled_box_round
- {\normalexpanded{\doovalbox
- {\the\d_framed_target_wd}%
- {\the\d_framed_target_ht}%
- {\the\d_framed_target_dp}%
- {\the\dimexpr\d_framed_linewidth\relax}%
- {\the\dimexpr\p_framed_backgroundradius\relax}%
- {0}%
- {1}%
- {\ifx\p_framed_backgroundcorner\v!round0\else\number\p_framed_backgroundcorner\fi}%
- }}
+ {\back_ovalbox
+ \d_framed_target_wd \d_framed_target_ht \d_framed_target_dp
+ \d_framed_linewidth \p_framed_backgroundradius
+ \zerocount \plusone {\ifx\p_framed_backgroundcorner\v!round0\else\number\p_framed_backgroundcorner\fi}}
\def\pack_framed_stroked_box
{\edef\p_framed_framecorner{\framedparameter\c!framecorner}%
@@ -292,25 +283,17 @@
{\edef\p_framed_frameradius{\framedparameter\c!frameradius}%
\ifzeropt\dimexpr\p_framed_frameradius\relax % just in case of .x\bodyfontsize
\pack_framed_stroked_box_normal
- \else
- \ifx\p_framed_frame\v!on
- \pack_framed_stroked_box_round
- \fi
- \fi}
+ \else\ifx\p_framed_frame\v!on
+ \pack_framed_stroked_box_round
+ \fi\fi}
% \pack_framed_stroked_box_normal % later
\def\pack_framed_stroked_box_round
- {\normalexpanded{\doovalbox
- {\the\d_framed_target_wd}%
- {\the\d_framed_target_ht}%
- {\the\d_framed_target_dp}%
- {\the\dimexpr\d_framed_linewidth\relax}%
- {\the\dimexpr\p_framed_frameradius\relax}%
- {1}%
- {0}%
- {\ifx\p_framed_framecorner\v!round0\else\number\p_framed_framecorner\fi}%
- }}
+ {\back_ovalbox
+ \d_framed_target_wd \d_framed_target_ht \d_framed_target_dp
+ \d_framed_linewidth \p_framed_frameradius
+ \plusone \zerocount {\ifx\p_framed_framecorner\v!round0\else\number\p_framed_framecorner\fi}}
% a lot of weird corners
%
@@ -370,8 +353,8 @@
\def\pack_framed_background_box_gray_indeed % can be more direct but who cares, just compatibility
{\colored[s=\p_framed_backgroundscreen]{\pack_framed_filled_box}}
-%D It won't be a surprise that we not only provide gray boxes,
-%D but also colored ones. Here it is:
+%D It won't be a surprise that we not only provide gray boxes, but also colored
+%D ones. Here it is:
\def\pack_framed_background_box_color
{\edef\p_framed_backgroundcolor{\framedparameter\c!backgroundcolor}%
@@ -387,10 +370,9 @@
%D overlaywidth, overlayheight, overlaydepth,
%D overlaycolor, overlaylinecolor, overlaylinewidth}
%D
-%D Before we define the macro that actually takes card of the
-%D backgrounds, we introduce overlays. An overlay is something
-%D that contrary to its name lays {\em under} the text. An
-%D example of an overlay definition is:
+%D Before we define the macro that actually takes card of the backgrounds, we
+%D introduce overlays. An overlay is something that contrary to its name lays {\em
+%D under} the text. An example of an overlay definition is:
%D
%D \startbuffer[tmp-1]
%D \defineoverlay
@@ -420,17 +402,16 @@
%D
%D \showsetup{defineoverlay}
%D
-%D This macro's definition is a bit obscure, due the many
-%D non||used arguments and the two step call that enable the
-%D setting of the width, height and depth variables.
-%D Multiple backgrounds are possible and are specified as:
+%D This macro's definition is a bit obscure, due the many non||used arguments and
+%D the two step call that enable the setting of the width, height and depth
+%D variables. Multiple backgrounds are possible and are specified as:
%D
%D \starttyping
%D \framed[background={one,two,three}]{Three backgrounds!}
%D \stoptyping
%D
-%D Most drawing packages only know width and height. Therefore
-%D the dimensions have a slightly different meaning here:
+%D Most drawing packages only know width and height. Therefore the dimensions have a
+%D slightly different meaning here:
%D
%D \startitemize[packed]
%D \item \type{\overlaywidth }: width of the overlay
@@ -452,9 +433,8 @@
\newtoks\everyoverlay
-%D An example of an initialization is the following (overlays
-%D can contain text and be executed under an regime where
-%D interlineskip is off).
+%D An example of an initialization is the following (overlays can contain text
+%D and be executed under an regime where interlineskip is off).
\installcorenamespace{overlay}
\installcorenamespace{overlaybuiltin}
@@ -515,8 +495,8 @@
\expandafter\secondoftwoarguments
\fi}
-%D The content of the box will be (temporary) saved in a box. We
-%D also have an extra box for backgrounds.
+%D The content of the box will be (temporary) saved in a box. We also have an
+%D extra box for backgrounds.
\newbox\b_framed_normal
\newbox\b_framed_extra
@@ -572,8 +552,8 @@
\expandafter\pack_framed_process_backgrounds
\fi#2}
-% beware, a backgroundbox can be empty which is another reason
-% why we set the width to zero instead of back-skipping
+%D Beware, a backgroundbox can be empty which is another reason why we set the
+%D width to zero instead of back-skipping.
\newdimen\framedbackgroundwidth
\newdimen\framedbackgroundheight
@@ -588,7 +568,7 @@
\def\pack_framed_add_background
{\setbox\b_framed_normal\hbox % was vbox
- {\pack_framed_forgetall % can be relaxed
+ {%\pack_framed_forgetall % can be relaxed
\boxmaxdepth\maxdimen
\framedbackgroundoffset\d_framed_backgroundoffset
\framedbackgroundwidth \wd\b_framed_normal
@@ -612,8 +592,7 @@
\egroup}}
\def\pack_framed_overlay_initialize_indeed
- {%\writestatus{!!!!}{<<<<<}%
- \edef\overlaywidth {\the\d_framed_target_wd\space}%
+ {\edef\overlaywidth {\the\d_framed_target_wd\space}%
\edef\overlayheight {\the\dimexpr\d_framed_target_ht+\d_framed_target_dp\relax\space}%
\edef\overlaydepth {\the\d_framed_target_dp\space}%
\edef\overlaycolor {\framedparameter\c!backgroundcolor}% let ?
@@ -624,8 +603,8 @@
\edef\overlayoffset {\the\framedbackgroundoffset\space}% \backgroundoffset % we steal this one
\let\pack_framed_overlay_initialize\relax}
-%D One can explictly insert the foreground box. For that
-%D purpose we introduce the overlay \type {foreground}.
+%D One can explictly insert the foreground box. For that purpose we introduce the
+%D overlay \type {foreground}.
%D
%D We predefine two already familiar backgrounds:
@@ -633,14 +612,13 @@
\letvalue{\??overlaybuiltin\v!color }\pack_framed_background_box_color
\letvalue{\??overlaybuiltin\v!foreground}\pack_framed_background_box_content % replaces: \defineoverlay[\v!foreground][\foregroundbox]
-%D We can specify overlays as a comma separated list of
-%D overlays, a sometimes handy feature.
-
-%D Besides backgrounds (overlays) we also need some macros to
-%D draw outlines (ruled borders). Again we have to deal with
-%D square and round corners. The first category can be handled
-%D by \TEX\ itself, the latter one depends on the driver. This
-%D macro also support a negative offset.
+%D We can specify overlays as a comma separated list of overlays, a sometimes
+%D handy feature.
+%D
+%D Besides backgrounds (overlays) we also need some macros to draw outlines (ruled
+%D borders). Again we have to deal with square and round corners. The first category
+%D can be handled by \TEX\ itself, the latter one depends on the driver. This macro
+%D also support a negative offset.
\def\pack_framed_add_outline
{\setbox\b_framed_normal\hbox % rules on top of box
@@ -687,10 +665,10 @@
\dp\scratchbox\d_framed_target_dp
\box\scratchbox}
-\def\pack_framed_t_rule{\hrule\!!height\d_framed_linewidth\kern-\d_framed_linewidth}
-\def\pack_framed_b_rule{\kern-\d_framed_linewidth\hrule\!!height\d_framed_linewidth}
-\def\pack_framed_r_rule{\kern-\d_framed_linewidth\vrule\!!width\d_framed_linewidth}
-\def\pack_framed_l_rule{\vrule\!!width\d_framed_linewidth\kern-\d_framed_linewidth}
+\def\pack_framed_t_rule{\hrule\s!height\d_framed_linewidth\kern-\d_framed_linewidth}
+\def\pack_framed_b_rule{\kern-\d_framed_linewidth\hrule\s!height\d_framed_linewidth}
+\def\pack_framed_r_rule{\kern-\d_framed_linewidth\vrule\s!width\d_framed_linewidth}
+\def\pack_framed_l_rule{\vrule\s!width\d_framed_linewidth\kern-\d_framed_linewidth}
\letvalue{\??framedtop \v!on \v!on}\pack_framed_t_rule
\letvalue{\??framedtop \v!off\v!on}\pack_framed_t_rule
@@ -710,10 +688,10 @@
% no overlapping rules
-\def\pack_framed_t_rules{\hbox{\kern\d_framed_linewidth\vrule\!!width\dimexpr\d_framed_target_wd-2\d_framed_linewidth\relax\!!height\d_framed_linewidth}\nointerlineskip\kern-\d_framed_linewidth}
-\def\pack_framed_b_rules{\kern-\d_framed_linewidth\nointerlineskip\hbox{\kern\d_framed_linewidth\vrule\!!width\dimexpr\d_framed_target_wd-2\d_framed_linewidth\relax\!!height\d_framed_linewidth}}
-\def\pack_framed_r_rules{\kern-\d_framed_linewidth\vrule\!!height\dimexpr\d_framed_target_ht-\d_framed_linewidth\relax\!!depth-\d_framed_linewidth\!!width\d_framed_linewidth}
-\def\pack_framed_l_rules{\vrule\!!height\dimexpr\d_framed_target_ht-\d_framed_linewidth\relax\!!depth-\d_framed_linewidth\!!width\d_framed_linewidth\kern-\d_framed_linewidth}
+\def\pack_framed_t_rules{\hbox{\kern\d_framed_linewidth\vrule\s!width\dimexpr\d_framed_target_wd-2\d_framed_linewidth\relax\s!height\d_framed_linewidth}\nointerlineskip\kern-\d_framed_linewidth}
+\def\pack_framed_b_rules{\kern-\d_framed_linewidth\nointerlineskip\hbox{\kern\d_framed_linewidth\vrule\s!width\dimexpr\d_framed_target_wd-2\d_framed_linewidth\relax\s!height\d_framed_linewidth}}
+\def\pack_framed_r_rules{\kern-\d_framed_linewidth\vrule\s!height\dimexpr\d_framed_target_ht-\d_framed_linewidth\relax\s!depth-\d_framed_linewidth\s!width\d_framed_linewidth}
+\def\pack_framed_l_rules{\vrule\s!height\dimexpr\d_framed_target_ht-\d_framed_linewidth\relax\s!depth-\d_framed_linewidth\s!width\d_framed_linewidth\kern-\d_framed_linewidth}
\letvalue{\??framedtop \v!small\v!small}\pack_framed_t_rules
\letvalue{\??framedtop \v!off \v!small}\pack_framed_t_rules
@@ -744,11 +722,10 @@
% frame=off,rightframe=small,leftframe=small,topframe=small,bottomframe=on]
% {}
-%D The next few macros are probably the most misused ones in
-%D \CONTEXT. They deal with putting rules around boxes, provide
-%D backgrounds, offer alignment features, and some more. We
-%D start with defining some booleans. These give an impression
-%D of what we are going to take into account.
+%D The next few macros are probably the most misused ones in \CONTEXT. They deal
+%D with putting rules around boxes, provide backgrounds, offer alignment features,
+%D and some more. We start with defining some booleans. These give an impression of
+%D what we are going to take into account.
% todo : \c_framed_hasoffset
% faster : \let\c_framed_hasoffset\falseconditional
@@ -766,67 +743,72 @@
%D \macros
%D {framed, setupframed}
%D
-%D Ruled boxes are typeset using \type{\framed}. This command
-%D is quite versatile and, although some users will probably
-%D seldom use it, one cannot overlook its features.
+%D Ruled boxes are typeset using \type{\framed}. This command is quite versatile
+%D and, although some users will probably seldom use it, one cannot overlook its
+%D features.
%D
-%D \showsetup{setupframed}
-%D \showsetup{framed}
+%D \showsetup{setupframed}
+%D \showsetup{framed}
%D
-%D This general macro is a special version of an even more
-%D general case, that can easily be linked into other macros
-%D that need some kind of framing. The local version is called
-%D with an extra parameter: the variable identifier. The reason
-%D for passing this identifier between brackets lays in the
-%D mere fact that this way we can use the optional argument
-%D grabbers.
+%D This general macro is a special version of an even more general case, that can
+%D easily be linked into other macros that need some kind of framing. The local
+%D version is called with an extra parameter: the variable identifier. The reason
+%D for passing this identifier between brackets lays in the mere fact that this way
+%D we can use the optional argument grabbers.
\def\defaultframeoffset{.25ex}
-\def\presetlocalframed [#1]{\letvalue {#1\s!parent}\??framed}
-\def\inheritlocalframed[#1]#2[#3]{\setevalue{#1\s!parent}{#3}}
+\installcorenamespace{regularframedlevel}
+
+\unexpanded\def\installregularframed#1%
+ {\defineframed[#1]}
-\presetlocalframed[\??ol]
+\unexpanded\def\presetlocalframed[#1]%
+ {\defineframed[#1]}
-\newcount\framednesting
+% \presetlocalframed[\??framed]
+
+\newcount\c_pack_framed_nesting
+
+\unexpanded\def\pack_framed_process_framed[#1]%
+ {\bgroup
+ \iffirstargument % faster
+ \setupcurrentframed[#1]% here !
+ \fi
+ \pack_framed_process_indeed}
\unexpanded\def\framed
{\bgroup
- \advance\framednesting\plusone
- \expandafter\let\csname\??ol:\the\framednesting\s!parent\endcsname\??ol
- \dodoubleempty\pack_framed_process[\??ol:\the\framednesting]}
+ \advance\c_pack_framed_nesting\plusone
+ \expandafter\let\csname\??framed>\the\c_pack_framed_nesting:\s!parent\endcsname\??framed
+ \edef\currentframed{>\the\c_pack_framed_nesting}%
+ \pack_framed_initialize
+ \dosingleempty\pack_framed_process_framed}
\unexpanded\def\startframed
{\dosingleempty\pack_framed_start_framed}
\def\pack_framed_start_framed[#1]%
{\bgroup
- \advance\framednesting\plusone
- \expandafter\let\csname\??ol:\the\framednesting\s!parent\endcsname\??ol
+ \advance\c_pack_framed_nesting\plusone
+ \expandafter\let\csname\??framed>\the\c_pack_framed_nesting:\s!parent\endcsname\??framed
\iffirstargument\secondargumenttrue\fi % dirty trick
- \pack_framed_process[\??ol:\the\framednesting][#1]%
+ \edef\currentframed{>\the\c_pack_framed_nesting}%
+ \pack_framed_initialize
+ \pack_framed_process_framed[#1]% can be inlined
\bgroup}
\let\stopframed\egroup
-\unexpanded\def\setupframed
- {\dodoubleempty\dosetupframed}
-
-\unexpanded\def\normalframedwithsettings
+\unexpanded\def\normalframedwithsettings[#1]%
{\bgroup
- \advance\framednesting\plusone
- \expandafter\let\csname\??ol:\the\framednesting\s!parent\endcsname\??ol
- \pack_framed_process[\??ol:\the\framednesting]}
-
-% we can consider setting the parent of the regular framed to
-% something else in the otr so that we isolate it there
-
-\def\dosetupframed[#1][#2]%
- {\ifsecondargument
- \getparameters[\??ol#1][#2]%
- \else
- \getparameters[\??ol][#1]%
- \fi}
+ \advance\c_pack_framed_nesting\plusone
+ \expandafter\let\csname\??framed>\the\c_pack_framed_nesting:\s!parent\endcsname\??framed
+ \bgroup
+ \edef\currentframed{>\the\c_pack_framed_nesting}%
+ \pack_framed_initialize
+ \setupcurrentframed[#1]%
+ \pack_framed_process_indeed}
%D \startbuffer
%D \setupframed [framecolor=yellow] \framed{A}
@@ -846,13 +828,11 @@
%D \macros
%D {ifinframed}
%D
-%D The normal case first presets all parameters and next starts
-%D looking for the user supplied ones. The first step is
-%D omitted in the local case, because these are preset at
-%D declaration time and keep their values unless explictly
-%D changed. By presetting the variables everytime the normal
-%D command is called, we can use this command nested, without
-%D the unwanted side effect of inheritance. The boolean is
+%D The normal case first presets all parameters and next starts looking for the user
+%D supplied ones. The first step is omitted in the local case, because these are
+%D preset at declaration time and keep their values unless explictly changed. By
+%D presetting the variables everytime the normal command is called, we can use this
+%D command nested, without the unwanted side effect of inheritance. The boolean is
%D used to speed up the color stack.
\newif\ifinframed
@@ -862,7 +842,8 @@
\unexpanded\def\fastlocalframed[#1]#2[#3]#4% 3-4
{\bgroup
- \pack_framed_initialize{#1}%
+ \edef\currentframed{#1}%
+ \pack_framed_initialize
\setbox\b_framed_normal\hbox{#4}%
\edef\p_framed_region{\framedparameter\c!region}%
\ifx\p_framed_region\v!yes % maybe later named
@@ -880,8 +861,7 @@
\d_framed_linewidth\p_framed_rulethickness\relax
\fi
\pack_framed_add_outline % real or invisible frame
- \fi \fi
- \fi
+ \fi\fi
\ifx\p_framed_background\empty \else
\edef\p_framed_backgroundoffset{\framedparameter\c!backgroundoffset}%
\d_framed_backgroundoffset
@@ -897,12 +877,13 @@
\box\b_framed_normal
\egroup}
-%D The next macro uses a box and takes its natural width and
-%D height so these can better be correct.
+%D The next macro uses a box and takes its natural width and height so these
+%D can better be correct.
-\def\localbackgroundframed#1#2#3% namespace component box
+\unexpanded\def\localbackgroundframed#1#2#3% namespace component box
{\bgroup
- \pack_framed_initialize{#1}%
+ \edef\currentframed{#1}%
+ \pack_framed_initialize
\setbox\b_framed_normal\box#3%
\edef\p_framed_region{\framedparameter\c!region}%
\ifx\p_framed_region\v!yes % maybe later named
@@ -917,7 +898,7 @@
\d_framed_linewidth\p_framed_rulethickness\relax
\fi
\pack_framed_add_outline % real or invisible frame
- \fi \fi
+ \fi\fi
\ifx\p_framed_background\empty \else
\edef\p_framed_backgroundoffset{\framedparameter\c!backgroundoffset}%
\d_framed_backgroundoffset
@@ -955,35 +936,36 @@
\unexpanded\def\localframed
{\bgroup
- \dodoubleempty\pack_framed_process}
+ \dodoubleempty\pack_framed_local}
-\unexpanded\def\pack_framed_process[#1][#2]% assumes a \dodoubleempty (slows down), also should have leading \bgroup
+\unexpanded\def\pack_framed_local[#1][#2]%
{\bgroup
- \pack_framed_initialize{#1}%
+ \edef\currentframed{#1}%
+ \pack_framed_initialize
\ifsecondargument % faster
- \getparameters[\currentframed][#2]% here !
+ \setupcurrentframed[#2]% here !
\fi
\pack_framed_process_indeed}
\unexpanded\def\directlocalframed[#1]% no optional
{\bgroup
\bgroup
- \pack_framed_initialize{#1}%
+ \edef\currentframed{#1}%
+ \pack_framed_initialize
\pack_framed_process_indeed}
\unexpanded\def\localframedwithsettings[#1][#2]% no checking (so no spaces between)
{\bgroup
\bgroup
- \pack_framed_initialize{#1}%
- \getparameters[\currentframed][#2]% here !
+ \edef\currentframed{#1}%
+ \pack_framed_initialize
+ \setupcurrentframed[#2]% here !
\pack_framed_process_indeed}
% done
\def\c!fr!analyze{fr:analyze} % private option
-% we can make macros for the offset, width, and height branches or do an \csname
-
\unexpanded\def\pack_framed_process_indeed
{\d_framed_frameoffset\framedparameter\c!frameoffset
\edef\p_framed_backgroundoffset{\framedparameter\c!backgroundoffset}%
@@ -1032,84 +1014,42 @@
\else
\d_framed_linewidth\zeropoint
\fi
+ % 2013/03/12: a change of order (sizes before align
+ \ifx\localwidth\v!local
+ \setlocalhsize
+ \fi
+ %
+ \forgetall % should happen after \localwidth but before align
+ %
\ifx\localformat\empty
\setfalse\c_framed_has_format
\else
\settrue\c_framed_has_format
\dosetraggedcommand\localformat % not that fast
\fi
- \ifx\localoffset\v!none
- \setfalse\c_framed_has_offset
- \setfalse\c_framed_has_strut
- \setfalse\c_framed_is_overlaid
- \d_framed_local_offset\d_framed_linewidth
- \else\ifx\localoffset\v!overlay
- % \ifx\p_framed_frame\v!no \setfalse\c_framed_has_frame \fi % test first
- \setfalse\c_framed_has_offset
- \setfalse\c_framed_has_strut
- \settrue \c_framed_is_overlaid
- \d_framed_local_offset\zeropoint
-% \else\ifx\localoffset\v!strut
-% \setfalse\c_framed_has_offset
-% \settrue \c_framed_has_strut
-% \settrue \c_framed_is_overlaid
-% \d_framed_local_offset\zeropoint
- \else
- \settrue \c_framed_has_offset
- \settrue \c_framed_has_strut
- \setfalse\c_framed_is_overlaid
- \ifx\localoffset\v!default % new per 2-6-2000
- \let\localoffset\defaultframeoffset
- \letframedparameter\c!offset\defaultframeoffset % brrr
- \else
- \let\defaultframeoffset\localoffset
- \fi
- \d_framed_local_offset\dimexpr\localoffset+\d_framed_linewidth\relax
- \fi\fi % \fi
- \d_framed_height\zeropoint
- \d_framed_width \zeropoint
- \ifx\localwidth\empty % fallback to fit
- \ifconditional\c_framed_has_format
- \settrue\c_framed_has_width
- \d_framed_width\hsize
+ %
+ \csname\??framedoffsetalternative
+ \ifcsname\??framedoffsetalternative\localoffset\endcsname
+ \localoffset
\else
- \setfalse\c_framed_has_width
+ \s!unknown
\fi
- \else\ifx\localwidth\v!fit
- \ifconditional\c_framed_has_format
- \settrue\c_framed_has_width
- \d_framed_width\hsize
+ \endcsname
+ \csname\??framedwidthalternative
+ \ifcsname\??framedwidthalternative\localwidth\endcsname
+ \localwidth
\else
- \setfalse\c_framed_has_width
+ \s!unknown
\fi
- \else\ifx\localwidth\v!fixed % equals \v!fit but no shapebox
- \ifconditional\c_framed_has_format
- \settrue\c_framed_has_width
- \d_framed_width\hsize
+ \endcsname
+ \csname\??framedheightalternative
+ \ifcsname\??framedheightalternative\localheight\endcsname
+ \localheight
\else
- \setfalse\c_framed_has_width
+ \s!unknown
\fi
- \else\ifx\localwidth\v!broad
- \settrue\c_framed_has_width
- \d_framed_width\hsize
- \else\ifx\localwidth\v!local
- \settrue\c_framed_has_width
- \setlocalhsize
- \d_framed_width\localhsize
- \else
- \settrue\c_framed_has_width
- \d_framed_width\localwidth
- \fi\fi\fi\fi\fi
- \ifx\localheight\empty % fallback to fit
- \setfalse\c_framed_has_height
- \else\ifx\localheight\v!fit
- \setfalse\c_framed_has_height
- \else\ifx\localheight\v!broad
- \setfalse\c_framed_has_height
- \else
- \settrue\c_framed_has_height
- \d_framed_height\localheight
- \fi\fi\fi
+ \endcsname
+ % the next check could move to heightalternative
\ifconditional\c_framed_has_height
% obey user set height, also downward compatible
\else
@@ -1132,15 +1072,14 @@
% i.e. disable (colsetbackgroundproblemintechniek)
\advance\d_framed_width -2\d_framed_local_offset
\advance\d_framed_height -2\d_framed_local_offset
- \ifx\localstrut\v!no
- \setfalse\c_framed_has_strut
- \else\ifx\localstrut\v!global
- \setstrut
- \else\ifx\localstrut\v!local
- \setfontstrut
- \else
- \setstrut
- \fi\fi\fi
+ \csname\??framedstrutalternative
+ \ifcsname\??framedstrutalternative\localstrut\endcsname
+ \localstrut
+ \else
+ \s!unknown
+ \fi
+ \endcsname
+ % the next check could move to strutalternative
\ifconditional\c_framed_has_strut
\let\localbegstrut\begstrut
\let\localendstrut\endstrut
@@ -1202,12 +1141,12 @@
\fi
\pack_framed_check_extra_offsets
\edef\p_framed_background{\framedparameter\c!background}%
- \ifx\p_framed_background\empty
- \let\pack_framed_forgetall\forgetall
- \else
- \let\pack_framed_forgetall\relax
- \forgetall
- \fi
+% \ifx\p_framed_background\empty
+% \let\pack_framed_forgetall\forgetall
+% \else
+% \let\pack_framed_forgetall\relax
+% \forgetall
+% \fi
\edef\framedwidth {\the\ifdim\d_framed_width >\zeropoint \d_framed_width \else\zeropoint\fi}% public
\edef\framedheight{\the\ifdim\d_framed_height>\zeropoint \d_framed_height\else\zeropoint\fi}% public
\edef\framedoffset{\the\dimexpr\ifconditional\c_framed_has_offset\localoffset \else\zeropoint\fi}% public
@@ -1219,6 +1158,136 @@
\afterassignment\pack_framed_restart
\setbox\b_framed_normal\next}
+% alternatives for width, height, strut and offset
+
+\installcorenamespace{framedwidthalternative}
+\installcorenamespace{framedheightalternative}
+\installcorenamespace{framedstrutalternative}
+\installcorenamespace{framedoffsetalternative}
+
+% widths
+
+\setvalue{\??framedwidthalternative\empty}%
+ {\ifconditional\c_framed_has_format
+ \settrue\c_framed_has_width
+ \d_framed_width\hsize
+ \else
+ \setfalse\c_framed_has_width
+ \d_framed_width\zeropoint
+ \fi}
+
+\setvalue{\??framedwidthalternative\v!fit}%
+ {\ifconditional\c_framed_has_format
+ \settrue\c_framed_has_width
+ \d_framed_width\hsize
+ \else
+ \setfalse\c_framed_has_width
+ \d_framed_width\zeropoint
+ \fi}
+
+\setvalue{\??framedwidthalternative\v!fixed}% equals \v!fit but no shapebox
+ {\ifconditional\c_framed_has_format
+ \settrue\c_framed_has_width
+ \d_framed_width\hsize
+ \else
+ \setfalse\c_framed_has_width
+ \d_framed_width\zeropoint
+ \fi}
+
+\setvalue{\??framedwidthalternative\v!broad}%
+ {\settrue\c_framed_has_width
+ \d_framed_width\hsize}
+
+\setvalue{\??framedwidthalternative\v!max}% idem broad
+ {\settrue\c_framed_has_width
+ \d_framed_width\hsize}
+
+\setvalue{\??framedwidthalternative\v!local}%
+ {\settrue\c_framed_has_width
+ %\setlocalhsize
+ \d_framed_width\localhsize}
+
+\setvalue{\??framedwidthalternative\s!unknown}%
+ {\settrue\c_framed_has_width
+ \d_framed_width\localwidth}
+
+% heights
+
+\setvalue{\??framedheightalternative\empty}%
+ {\setfalse\c_framed_has_height
+ \d_framed_height\zeropoint}
+
+\setvalue{\??framedheightalternative\v!fit}%
+ {\setfalse\c_framed_has_height
+ \d_framed_height\zeropoint}
+
+\setvalue{\??framedheightalternative\v!broad}%
+ {\setfalse\c_framed_has_height
+ \d_framed_height\zeropoint}
+
+\setvalue{\??framedheightalternative\v!max}%
+ {\settrue\c_framed_has_height
+ \d_framed_height\vsize}
+
+\setvalue{\??framedheightalternative\s!unknown}%
+ {\settrue\c_framed_has_height
+ \d_framed_height\localheight}
+
+% struts
+
+\setvalue{\??framedstrutalternative\v!no}%
+ {\setfalse\c_framed_has_strut}
+
+\setvalue{\??framedstrutalternative\v!global}%
+ {\setstrut}
+
+\setvalue{\??framedstrutalternative\v!local}%
+ {\setfontstrut}
+
+\setvalue{\??framedstrutalternative\v!yes}%
+ {\setstrut}
+
+\setvalue{\??framedstrutalternative\s!unknown}%
+ {\setstrut}
+
+% offsets
+
+\setvalue{\??framedoffsetalternative\v!none}%
+ {\setfalse\c_framed_has_offset
+ \setfalse\c_framed_has_strut
+ \setfalse\c_framed_is_overlaid
+ \d_framed_local_offset\d_framed_linewidth}
+
+\setvalue{\??framedoffsetalternative\v!overlay}%
+ {% \ifx\p_framed_frame\v!no \setfalse\c_framed_has_frame \fi % test first
+ \setfalse\c_framed_has_offset
+ \setfalse\c_framed_has_strut
+ \settrue \c_framed_is_overlaid
+ \d_framed_local_offset\zeropoint}
+
+% \setvalue{\??framedoffsetalternative\v!strut}%
+% {\setfalse\c_framed_has_offset
+% \settrue \c_framed_has_strut
+% \settrue \c_framed_is_overlaid
+% \d_framed_local_offset\zeropoint}
+
+\setvalue{\??framedoffsetalternative\v!default}% new per 2-6-2000
+ {\settrue \c_framed_has_offset
+ \settrue \c_framed_has_strut
+ \setfalse\c_framed_is_overlaid
+ \let\localoffset\defaultframeoffset
+ \letframedparameter\c!offset\defaultframeoffset % brrr
+ \d_framed_local_offset\dimexpr\localoffset+\d_framed_linewidth\relax}
+
+\setvalue{\??framedoffsetalternative\s!unknown}%
+ {\settrue \c_framed_has_offset
+ \settrue \c_framed_has_strut
+ \setfalse\c_framed_is_overlaid
+ \let\defaultframeoffset\localoffset
+ \d_framed_local_offset\dimexpr\localoffset+\d_framed_linewidth\relax}
+
+% so far for alternatives
+
\let\pack_framed_stop_orientation\relax
\def\pack_framed_restart
@@ -1227,18 +1296,17 @@
\def\pack_framed_do_top {\raggedtopcommand\framedparameter\c!top}
\def\pack_framed_do_bottom{\framedparameter\c!bottom\raggedbottomcommand}
-%D Carefull analysis of this macro will learn us that not all
-%D branches in the last conditionals can be encountered, that
-%D is, some assignments to \type{\next} will never occur.
-%D Nevertheless we implement the whole scheme, if not for
-%D future extensions.
+%D Carefull analysis of this macro will learn us that not all branches in the last
+%D conditionals can be encountered, that is, some assignments to \type{\next} will
+%D never occur. Nevertheless we implement the whole scheme, if not for future
+%D extensions.
%D \macros
%D {doassigncheckedframeoffset}
%D
%D Offset helper (see menus):
-\def\doassigncheckedframeoffset#1#2%
+\def\doassigncheckedframeoffset#1#2% could be a fast \csname .. \endcsname
{\edef\checkedframeoffset{#2}%
#1%
\ifx\checkedframeoffset\empty \zeropoint\else
@@ -1253,11 +1321,10 @@
%D \macros
%D {ifreshapeframebox}
%D
-%D The last few lines tell what to do after the content of the
-%D box is collected and passed to the next macro. In the case
-%D of a fixed width and centered alignment, the content is
-%D evaluated and used to determine the most natural width. The
-%D rest of the code deals with backgrounds and frames.
+%D The last few lines tell what to do after the content of the box is collected and
+%D passed to the next macro. In the case of a fixed width and centered alignment,
+%D the content is evaluated and used to determine the most natural width. The rest
+%D of the code deals with backgrounds and frames.
\newif\ifreshapeframebox \reshapeframeboxtrue
@@ -1323,8 +1390,8 @@
\else
\pack_framed_finish_c
\fi\fi\fi
- \ifconditional\boxcontentneedsprocessing
- \mkdoprocessboxcontents\b_framed_normal
+ \ifconditional\page_postprocessors_needed_box
+ \page_postprocessors_linenumbers_box\b_framed_normal
\fi
\else
\pack_framed_finish_c
@@ -1570,9 +1637,9 @@
\def\pack_framed_stop_orientation_even
{\setbox\b_framed_normal\hbox{\dorotatebox\p_framed_orientation\hbox{\box\b_framed_normal}}}
-%D The last conditional takes care of the special situation of
-%D in||line \inframed[height=3cm]{framed} boxes. Such boxes have
-%D to be \inframed{aligned} with the running text.
+%D The last conditional takes care of the special situation of in||line \inframed
+%D [height=3cm] {framed} boxes. Such boxes have to be \inframed {aligned} with the
+%D running text.
\unexpanded\def\inframed
{\dosingleempty\pack_framed_inline}
@@ -1585,15 +1652,14 @@
\def\pack_framed_inline[%
{\framed[\c!location=\v!low,}
-%D When we set \type{empty} to \type{yes}, we get
-%D ourselves a frame and/or background, but no content, so
-%D actually we have a sort of phantom framed box.
+%D When we set \type{empty} to \type{yes}, we get ourselves a frame and/or background,
+%D but no content, so actually we have a sort of phantom framed box.
%D \macros
%D {mframed, minframed}
%D
-%D When Tobias asked how to frame mathematical elements in
-%D formulas, Taco's posted the next macro:
+%D When Tobias asked how to frame mathematical elements in formulas, Taco's posted the
+%D next macro:
%D
%D \starttyping
%D \def\mframed#1%
@@ -1605,9 +1671,8 @@
%D \fi}
%D \stoptyping
%D
-%D Because \type {\ifinner} does not (always) reports what
-%D one would expect, we move the test to the outer level. We
-%D also want to pass arguments,
+%D Because \type {\ifinner} does not (always) reports what one would expect, we move the
+%D test to the outer level. We also want to pass arguments,
%D
%D \starttyping
%D \def\mframed%
@@ -1626,8 +1691,8 @@
%D \fi}
%D \stoptyping
%D
-%D Still better is the next alternative, if only because it
-%D takes care of setting the super- and subscripts styles
+%D Still better is the next alternative, if only because it takes care of setting the super-
+%D and subscripts styles
\newcount\c_framed_mstyle
@@ -1637,22 +1702,6 @@
\vphantom{(}%
\Ustopmath}
-% \unexpanded\def\mframed {\dosingleempty\pack_framed_math_display}
-% \unexpanded\def\inmframed{\dosingleempty\pack_framed_math_inline }
-%
-% \def\pack_framed_math_inline[#1]#2%
-% {\begingroup
-% \c_framed_mstyle\mathstyle
-% \inframed[#1]{\Ustartmath\triggermathstyle\c_framed_mstyle#2\Ustopmath}%
-% \endgroup}
-%
-% \def\pack_framed_math_display[#1]#2%
-% {\begingroup
-% \c_framed_mstyle\mathstyle
-% \let\normalstrut\pack_framed_math_strut
-% \framed[#1]{\Ustartmath\triggermathstyle\c_framed_mstyle#2\Ustopmath}%
-% \endgroup}
-
\installcorenamespace{mathframed}
\installframedcommandhandler \??mathframed {mathframed} \??mathframed
@@ -1678,8 +1727,7 @@
\definemathframed[mframed]
\definemathframed[inmframed][\c!location=\v!low]
-%D So instead of the rather versatile \type {\framed}, we ue
-%D the \type {\mframed}.
+%D So instead of the rather versatile \type {\framed}, we use \type {\mframed}:
%D
%D \startbuffer
%D \startformula
@@ -1700,11 +1748,9 @@
%D
%D \typebuffer \getbuffer
%D
-%D As usual, one can specify in what way the text should be
-%D framed. One should be aware of the fact that, inorder to
-%D preserve the proper spacing, the \type {offset} is set to
-%D \type {overlay} and \type {frameoffset} is used used
-%D instead.
+%D As usual, one can specify in what way the text should be framed. One should be
+%D aware of the fact that, inorder to preserve the proper spacing, the \type
+%D {offset} is set to \type {overlay} and \type {frameoffset} is used used instead.
%D
%D \startbuffer
%D \startformula
@@ -1714,13 +1760,11 @@
%D
%D \typebuffer \getbuffer
%D
-%D For inline use, we also provide the \type {\inmframed}
-%D alternative: we want $x \times \inmframed{y}$ in inline
-%D math, right?
+%D For inline use, we also provide the \type {\inmframed} alternative: we want $x
+%D \times \inmframed{y}$ in inline math, right?
-%D This previous framing macros needs a lot of alternatives for
-%D putting rules around boxes, inserting offsets and aligning
-%D text. Each step is handled by separate macros.
+%D This previous framing macros needs a lot of alternatives for putting rules around
+%D boxes, inserting offsets and aligning text. Each step is handled by separate macros.
\newdimen\d_framed_applied_offset
\newdimen\d_framed_loffset
@@ -1728,7 +1772,7 @@
\newdimen\d_framed_toffset
\newdimen\d_framed_boffset
-\def\pack_framed_check_extra_offsets
+\def\pack_framed_check_extra_offsets % we could check h and v indepently
{\setfalse\c_framed_has_extra_offset
\d_framed_loffset\framedparameter\c!loffset\relax
\d_framed_roffset\framedparameter\c!roffset\relax
@@ -1760,8 +1804,8 @@
\hbox{\kern\d_framed_applied_offset\box\b_framed_normal\kern\d_framed_applied_offset}%
\kern\d_framed_applied_offset}}
-%D Let's hope that the next few examples show us enough of
-%D what needs to be done by the auxiliary macros.
+%D Let's hope that the next few examples show us enough of what needs to be
+%D done by the auxiliary macros.
%D
%D \startbuffer
%D \framed[height=1cm,offset=.5cm] {rule based learning}
@@ -1815,25 +1859,21 @@
%D \hbox{\dontcomplain\getbuffer}
%D \stoplinecorrection
%D
-%D So now we're ready for the complicated stuff. We distinguish
-%D between borders with straight lines and those with round
-%D corners. When using the first alternative it is possible to
-%D turn off one or more lines. More fancy shapes are also
-%D possible by specifying dedicated backgrounds. Turning lines
-%D on and off is implemented as efficient as possible and as a
-%D result is interface language dependant. This next
-%D implementation evolved from simpler ones. It puts for
-%D instance the rules on top of the content and provides
-%D additional offset capabilities. The lot of calls to other
-%D macros makes this mechanism not that easy to comprehend.
-
-%D We handle left, right or middle alignment as well as fixed
-%D or free widths and heights. Each combination gets its own
-%D macro.
-
-%D The following code handles one-liners: \type{align={line,flushright}}.
-%D Beware, since we entered a group and either or not grab the next
-%D bgroup token, we need to finish the group in the oneliner mode.
+%D So now we're ready for the complicated stuff. We distinguish between borders with
+%D straight lines and those with round corners. When using the first alternative it
+%D is possible to turn off one or more lines. More fancy shapes are also possible by
+%D specifying dedicated backgrounds. Turning lines on and off is implemented as
+%D efficient as possible and as a result is interface language dependant. This next
+%D implementation evolved from simpler ones. It puts for instance the rules on top
+%D of the content and provides additional offset capabilities. The lot of calls to
+%D other macros makes this mechanism not that easy to comprehend.
+%D
+%D We handle left, right or middle alignment as well as fixed or free widths and
+%D heights. Each combination gets its own macro.
+%D
+%D The following code handles one-liners: \type {align={line,flushright}}. Beware,
+%D since we entered a group and either or not grab the next bgroup token, we need to
+%D finish the group in the oneliner mode.
\ifdefined\raggedonelinerstate \else \newconditional\raggedonelinerstate \fi
@@ -1864,17 +1904,7 @@
%D The handlers:
-\def\pack_framed_forgetall{\forgetall}
-
-% test: (saves one forgetall)
-%
-% \edef\framedbackground{\framedparameter\c!background}%
-% \ifx\framedbackground\empty
-% \let\pack_framed_forgetall\forgetall
-% \else
-% \let\pack_framed_forgetall\relax
-% \forgetall
-% \fi
+% \def\pack_framed_forgetall{\forgetall}
\def\pack_framed_set_foregroundcolor
{\edef\p_framed_foregroundcolor{\framedparameter\c!foregroundcolor}%
@@ -1889,7 +1919,7 @@
{\vbox to \d_framed_height
\bgroup
\let\postprocessframebox\relax
- \pack_framed_forgetall
+% \pack_framed_forgetall
\iftrialtypesetting \else
\pack_framed_set_foregroundcolor
\fi
@@ -1910,7 +1940,7 @@
{\vbox to \d_framed_height
\bgroup
\let\postprocessframebox\relax
- \pack_framed_forgetall
+% \pack_framed_forgetall
\iftrialtypesetting \else
\pack_framed_set_foregroundcolor
\fi
@@ -1931,7 +1961,7 @@
{\vbox to \d_framed_height
\bgroup
\let\postprocessframebox\relax
- \pack_framed_forgetall
+% \pack_framed_forgetall
\iftrialtypesetting \else
\pack_framed_set_foregroundcolor
\fi
@@ -1950,7 +1980,7 @@
{\vbox
\bgroup
\let\postprocessframebox\relax
- \pack_framed_forgetall
+% \pack_framed_forgetall
\iftrialtypesetting \else
\pack_framed_set_foregroundcolor
\fi
@@ -1970,7 +2000,7 @@
{\vbox to \d_framed_height
\bgroup
\let\postprocessframebox\relax
- \pack_framed_forgetall
+% \pack_framed_forgetall
\iftrialtypesetting \else
\pack_framed_set_foregroundcolor
\fi
@@ -1990,7 +2020,7 @@
{\hbox to \d_framed_width
\bgroup
\let\postprocessframebox\relax
- \pack_framed_forgetall
+% \pack_framed_forgetall
\iftrialtypesetting \else
\pack_framed_set_foregroundcolor
\fi
@@ -2013,11 +2043,10 @@
\localstrut
\doformatonelinerbox}
-%D On the next page we show some examples of how these macros
-%D come into action. The examples show us how
-%D \type {fit}, \type {broad} dimensions influence the
-%D formatting. Watch the visualized struts. \footnote {Here we
-%D used \type {\showstruts}.}
+%D On the next page we show some examples of how these macros come into action. The
+%D examples show us how \type {fit}, \type {broad} dimensions influence the
+%D formatting. Watch the visualized struts. \footnote {Here we used \type
+%D {\showstruts}.}
%D
%D \startpostponing
%D \bgroup
@@ -2072,9 +2101,8 @@
%D \macros
%D {framednoflines, framedlastlength}
%D
-%D It is possible to let the frame macro calculate the width
-%D of a centered box automatically (\type {fit}). When
-%D doing so, we need to reshape the box:
+%D It is possible to let the frame macro calculate the width of a centered box
+%D automatically (\type {fit}). When doing so, we need to reshape the box:
\newcount\framednoflines
\newdimen\framedfirstheight
@@ -2106,13 +2134,12 @@
% \startformula \startalign \NC \int_01 \NC B \NR \intertext{test} \NC \int_01 \NC D \NR \stopalign \stopformula
% \stopTEXpage
-%D The examples on the next page show how one can give the
-%D frame as well as the background an additional offset and
-%D even a bit more depth. The blue outline is the frame, the
-%D red box is the background and the small black outline is the
-%D visualization of the resulting box, that is, we applied
-%D \type{\ruledhbox} to the result.
-
+%D The examples on the next page show how one can give the frame as well as the
+%D background an additional offset and even a bit more depth. The blue outline is
+%D the frame, the red box is the background and the small black outline is the
+%D visualization of the resulting box, that is, we applied \type {\ruledhbox} to
+%D the result.
+%D
%D \startpostponing
%D \bgroup
%D \unprotect
@@ -2162,9 +2189,8 @@
%D \egroup
%D \stoppostponing
-%D We can draw lines from left to right and top to bottom by
-%D using the normal \type{\hairline} command. Both directions
-%D need a different treatment.
+%D We can draw lines from left to right and top to bottom by using the normal \type
+%D {\hairline} command. Both directions need a different treatment.
%D
%D \startbuffer
%D \framed[width=4cm] {alfa\hairline beta\hairline gamma}
@@ -2178,26 +2204,8 @@
%D \hbox{\getbuffer}
%D \stoplinecorrection
%D
-%D These macros try to adapt their behaviour as good as
-%D possible to the circumstances and act as natural as
-%D possible.
-
-% \unexpanded\def\pack_framed_vboxed_hairline
-% {\bgroup
-% \dimen2=\ifconditional\c_framed_has_offset \localoffset \else \zeropoint \fi
-% \dimen4=\dimexpr\dimen2+\d_framed_linewidth\relax
-% \setbox0\vbox
-% {\advance\hsize 2\dimen4
-% \vskip\dimen2
-% \hrule
-% \!!height\d_framed_linewidth
-% \!!depth\zeropoint
-% \!!width\hsize
-% \vskip\dimen2}%
-% \endgraf\obeydepth\nointerlineskip
-% \moveleft\dimen4\box0
-% \endgraf\nointerlineskip\localbegstrut % beware, we might kill it in a style using \vskip\lineheight
-% \egroup} % so this must not be changed
+%D These macros try to adapt their behaviour as good as possible to the circumstances
+%D and act as natural as possible.
\unexpanded\def\pack_framed_vboxed_hairline % nasty overlay mess .. needed for autowidth
{\begingroup
@@ -2207,13 +2215,13 @@
\nointerlineskip
\kern\scratchoffset
\dontleavehmode
- \hrule\!!height\d_framed_linewidth\!!depth\zeropoint
+ \hrule\s!height\d_framed_linewidth\s!depth\zeropoint
\par
\kern-\d_framed_linewidth
\dontleavehmode
- \hbox to \zeropoint{\normalhss\vrule\!!height\d_framed_linewidth\!!depth\zeropoint\!!width\scratchwidth}%
+ \hbox to \zeropoint{\normalhss\vrule\s!height\d_framed_linewidth\s!depth\zeropoint\s!width\scratchwidth}%
\hfill
- \hbox to \zeropoint{\vrule\!!height\d_framed_linewidth\!!depth\zeropoint\!!width\scratchwidth\normalhss}%
+ \hbox to \zeropoint{\vrule\s!height\d_framed_linewidth\s!depth\zeropoint\s!width\scratchwidth\normalhss}%
\par
\nointerlineskip
\kern\scratchoffset
@@ -2223,32 +2231,29 @@
\localbegstrut
\endgroup}
-% todo:
-
\unexpanded\def\pack_framed_hboxed_hairline % use framed dimen
{\bgroup
- \dimen2=\ifconditional\c_framed_has_offset \localoffset \else \zeropoint \fi
+ \scratchoffset\ifconditional\c_framed_has_offset \localoffset \else \zeropoint \fi
\ifconditional\c_framed_has_height
- \dimen4\dimexpr\localheight/2+\strutdp-2\d_framed_linewidth\relax
- \dimen6\dimexpr\localheight/2-\strutdp+2\d_framed_linewidth\relax
+ \dimen\scratchheight\dimexpr\localheight/\plustwo+\strutdp-\plustwo\d_framed_linewidth\relax
+ \dimen\scratchdepth \dimexpr\localheight/\plustwo-\strutdp+\plustwo\d_framed_linewidth\relax
\else
- \dimen4\dimexpr\strutht+\dimen2\relax
- \dimen6\dimexpr\strutdp+\dimen2\relax
+ \dimen\scratchheight\dimexpr\strutht+\scratchoffset\relax
+ \dimen\scratchdepth \dimexpr\strutdp+\scratchoffset\relax
\fi
\unskip
\setbox\scratchbox\hbox
- {\hskip\dimen2
- \vrule\!!height\dimen4\!!depth\dimen6\!!width\d_framed_linewidth
- \hskip\dimen2}%
+ {\kern\scratchoffset
+ \vrule\s!height\dimen\scratchheight\s!depth\dimen\scratchdepth\s!width\d_framed_linewidth
+ \kern\scratchoffset}%
\ht\scratchbox\strutht
\dp\scratchbox\strutdp
\box\scratchbox
\ignorespaces
\egroup}
-%D The argument of the frame command accepts \type{\\} as a
-%D sort of newline signal. In horizontal boxes it expands to a
-%D space.
+%D The argument of the frame command accepts \type{\\} as a sort of newline signal. In
+%D horizontal boxes it expands to a space.
\unexpanded\def\pack_framed_vboxed_newline
{\endgraf\ignorespaces}
@@ -2256,10 +2261,9 @@
\unexpanded\def\pack_framed_hboxed_newline
{\unskip\normalspace\ignorespaces}
-%D We can set each rule on or off. The default setting is
-%D inherited from \type{frame}. An earlier implementation
-%D use a bit different approach, but the new one seems more
-%D natural:
+%D We can set each rule on or off. The default setting is inherited from
+%D \type {frame}. An earlier implementation use a bit different approach, but the new
+%D one seems more natural:
%D
%D \bgroup
%D \setuptyping[margin=0pt]
@@ -2299,10 +2303,9 @@
%D \macros
%D {startframedtext, setupframedtexts, defineframedtext}
%D
-%D The general framing command we discussed previously, is not
-%D entirely suited for what we call framed texts, as for
-%D instance used in intermezzo's. The next examples show what
-%D we have in mind.
+%D The general framing command we discussed previously, is not entirely suited for
+%D what we call framed texts, as for instance used in intermezzo's. The next
+%D examples show what we have in mind.
%D
%D \startbuffer[framed-0]
%D \setupframedtexts
@@ -2364,8 +2367,8 @@
%D \bgroup \setuptyping[margin=0pt] \getbuffer[framed-3] \egroup
%D \bgroup \setuptyping[margin=0pt] \getbuffer[framed-4] \egroup
%D
-%D Here we can see that we have a predefined framed text class
-%D as well as the tools for defining our own. So we have:
+%D Here we can see that we have a predefined framed text class as well as the
+%D tools for defining our own. So we have:
%D
%D \showsetup{setupframedtexts}
%D
@@ -2388,7 +2391,7 @@
\let\setupframedtexts\setupframedtext
\setupframedtext
- [\c!width=0.75\hsize,
+ [\c!width=.75\hsize,
\c!height=\v!fit,
\c!align=\v!yes,
%\c!top=,
@@ -2415,7 +2418,7 @@
%\c!foregroundstyle=,
%\c!background=,
%\c!backgroundcolor=,
- \c!backgroundscreen=\@@rsscreen,
+ \c!backgroundscreen=\defaultbackgroundscreen,
\c!linecorrection=\v!on,
\c!depthcorrection=\v!on,
\c!margin=\v!standard]
@@ -2461,7 +2464,6 @@
\hsize\localhsize
% \insidefloattrue % ? better
\normalexpanded{\switchtobodyfont[\framedtextparameter\c!bodyfont]}%
-% \startcolor[\framedtextparameter\c!color]%
\letframedtextparameter\c!strut\v!no
\inheritedframedtextframed\bgroup
\let\\=\endgraf
@@ -2471,9 +2473,8 @@
\setupindenting[\framedtextparameter\c!indenting]%
\useframedtextstyleandcolor\c!style\c!color}
-%D The \type {none} option is handy for nested usage, as
-%D in the presentation styles, where we don't want
-%D interference.
+%D The \type {none} option is handy for nested usage, as in the presentation
+%D styles, where we don't want interference.
\defineplacement[\??framedtext][\s!parent=\??framedtext\currentframedtext]
@@ -2482,7 +2483,6 @@
\removelastskip
\doif{\framedtextparameter\c!depthcorrection}\v!on\pack_framed_text_stop_depth_correction
\stopboxedcontent
-% \stopcolor
\ifconditional\c_framed_text_location_none
\egroup
\box\b_framed_normal
@@ -2551,8 +2551,8 @@
%D \egroup
%D \stoptyping
-%D The simple brace (or group) delimited case is typeset
-%D slightly different and is not aligned.
+%D The simple brace (or group) delimited case is typeset slightly different
+%D and is not aligned.
\unexpanded\def\pack_framed_text_direct#1%
{\bgroup
@@ -2579,7 +2579,8 @@
\egroup
\egroup}
-\defineframedtext[\v!framedtext]
+\defineframedtext
+ [\v!framedtext]
%D \macros
%D {defineframed}
@@ -2588,47 +2589,44 @@
%D
%D \showsetup{defineframed}
%D
-%D As suggested by Wolfgang we can now use the new \MKIV\ inheritance
-%D model instead of passing a combination of arguments. This also
-%D also simplified the \type {\setupframed} command. There are
-%D certainly more places where such improvements can be made.
-
-% actually, this can be another command handler .. todo
+%D As suggested by Wolfgang we can now use the new \MKIV\ inheritance model instead
+%D of passing a combination of arguments. This also also simplified the \type
+%D {\setupframed} command. There are certainly more places where such improvements
+%D can be made.
-\unexpanded\def\defineframed
- {\dotripleempty\pack_framed_define}
+\appendtoks
+ \ifcsname\??regularframedlevel\currentframed\endcsname
+ % already defined, keeps settings
+ \else
+ \expandafter\newcount\csname\??regularframedlevel\currentframed\endcsname
+ \fi
+\to \everypresetframed
-\def\pack_framed_define[#1][#2][#3]%
- {\ifcsname\??ol:#1\endcsname
- % already defined, keeps settings
- \else
- \expandafter\newcount\csname\??ol:#1\endcsname % \the\everypresetframed
- \fi
- \ifsecondargument
- \doifassignmentelse{#2}
- {\getparameters[\??ol#1][\s!parent=\??ol,#2]}%
- {\ifcsname#2\endcsname
- \getparameters[\??ol#1][\s!parent=\??ol#2,#3]%
- \else
- \getparameters[\??ol#1][\s!parent=\??ol,#3]%
- \fi}%
- \else
- \getparameters[\??ol#1][\s!parent=\??ol,#2]%
- \fi
- \setuvalue{#1}{\pack_framed_defined_process[#1]}}% % \the\everydefineframed
+\appendtoks
+ \setuevalue\currentframed{\pack_framed_defined_process[\currentframed]}%
+\to \everydefineframed
-\newcount\c_framed_crap
+\newcount\c_temp_framed_crap
\unexpanded\def\pack_framed_defined_process[#1]% official (not much checking, todo: parent)
{\bgroup
- \ifcsname\??ol:#1\endcsname
- \expandafter\let\expandafter\c_framed_temp\csname\??ol:#1\endcsname
+ \ifcsname\??regularframedlevel#1\endcsname
+ \expandafter\let\expandafter\c_pack_framed_temp\csname\??regularframedlevel#1\endcsname
\else
- \let\c_framed_temp\c_framed_crap
+ \let\c_pack_framed_temp\c_temp_framed_crap
\fi
- \advance\c_framed_temp\plusone
- \expandafter\def\csname\??ol#1:\the\c_framed_temp\s!parent\endcsname{\??ol#1}% \inheritlocalframed
- \dodoubleempty\pack_framed_process[\??ol#1:\the\c_framed_temp]}
+ \advance\c_pack_framed_temp\plusone
+ \expandafter\def\csname\??framed#1>\the\c_pack_framed_temp:\s!parent\endcsname{\??framed#1}% \inheritlocalframed
+ \bgroup
+ \edef\currentframed{#1>\the\c_pack_framed_temp}%
+ \pack_framed_initialize
+ \dosingleempty\pack_framed_defined_process_indeed}
+
+\def\pack_framed_defined_process_indeed[#1]%
+ {\iffirstargument % faster
+ \setupcurrentframed[#1]% here !
+ \fi
+ \pack_framed_process_indeed}
\let\placeframed\pack_framed_defined_process % new per 2012/04/23
@@ -2640,8 +2638,8 @@
%D
%D but the existing one is ok as well (less csname messy too).
-%D New, for the moment private; let's see when GB finds out
-%D about this one and its obscure usage. It's used in:
+%D New, for the moment private; let's see when GB finds out about this one and its
+%D obscure usage. It's used in:
%D
%D \startbuffer
%D \defineframedtext
@@ -2675,21 +2673,17 @@
%D
%D \typebuffer
-% to be redone
-
-\def\framedcontentparameter #1{\csname\doframedcontentparameter{\??fc\currentframedcontent}#1\endcsname}
-\def\doframedcontentparameter #1#2{\ifcsname#1#2\endcsname#1#2\else\expandafter\doframedcontentparentparameter\csname#1\s!parent\endcsname#2\fi}
-\def\doframedcontentparentparameter#1#2{\ifx#1\relax\s!empty\else\doframedcontentparameter#1#2\fi}
-\def\letframedcontentparameter #1{\expandafter\let\csname\??fc\currentframedcontent#1\endcsname}
+\installcorenamespace{framedcontent}
-\presetlocalframed[\??fc]
+\installframedcommandhandler \??framedcontent {framedcontent} \??framedcontent
-\getparameters
- [\??fc]
+\setupframedcontent
[\c!leftoffset=\zeropoint,
- \c!rightoffset=\framedcontentparameter\c!leftoffset,
+ %\c!rightoffset=\framedcontentparameter\c!leftoffset,
+ \c!rightoffset=\scratchleftoffset,
\c!topoffset=\zeropoint,
- \c!bottomoffset=\framedcontentparameter\c!topoffset,
+ %\c!bottomoffset=\framedcontentparameter\c!topoffset,
+ \c!bottomoffset=\scratchtopoffset,
\c!strut=\v!no,
%\c!linecorrection=\v!no,
%\c!left=,
@@ -2697,19 +2691,6 @@
%\c!width=\v!fit,
\c!offset=\v!overlay]
-\unexpanded\def\defineframedcontent
- {\dodoubleempty\dodefineframedcontent}
-
-\def\dodefineframedcontent[#1][#2]%
- {\getparameters[\??fc#1][\s!parent=\??fc,#2]}
-
-\unexpanded\def\setupframedcontent
- {\dodoubleempty\dosetupframedcontent}
-
-\def\dosetupframedcontent[#1][#2]%
- {\def\docommand##1{\getparameters[\??fc##1][#2]}%
- \processcommacommand[#1]\docommand}
-
\unexpanded\def\startframedcontent
{\dosingleempty\pack_framed_start_content}
@@ -2727,21 +2708,25 @@
{\setbox\b_framed_normal\hbox\bgroup
\setlocalhsize
\hsize\localhsize
- \advance\hsize\dimexpr-\framedcontentparameter\c!leftoffset-\framedcontentparameter\c!rightoffset \relax
- \advance\vsize\dimexpr-\framedcontentparameter\c!topoffset -\framedcontentparameter\c!bottomoffset\relax
- \hskip\framedcontentparameter\c!leftoffset
- \vbox\bgroup
- \vskip\framedcontentparameter\c!topoffset
+ \scratchleftoffset \framedcontentparameter\c!leftoffset \relax
+ \scratchrightoffset \framedcontentparameter\c!rightoffset \relax
+ \scratchtopoffset \framedcontentparameter\c!topoffset \relax
+ \scratchbottomoffset\framedcontentparameter\c!bottomoffset\relax
+ \advance\hsize\dimexpr-\scratchleftoffset-\scratchrightoffset \relax
+ \advance\vsize\dimexpr-\scratchtopoffset -\scratchbottomoffset\relax
+ \kern\scratchleftoffset
\vbox\bgroup
- \forgetall
- \blank[\v!disable]}
+ \vskip\scratchtopoffset
+ \vbox\bgroup
+ \forgetall
+ \blank[\v!disable]}
\def\pack_framed_stop_content_indeed
- {\removelastskip
- \egroup
- \vskip\framedcontentparameter\c!bottomoffset
- \egroup
- \hskip\framedcontentparameter\c!rightoffset
+ {\removelastskip
+ \egroup
+ \vskip\scratchbottomoffset
+ \egroup
+ \kern\scratchrightoffset
\egroup
\doif{\framedcontentparameter\c!width}\v!fit
{\letframedcontentparameter\c!width\v!fixed}% no shapebox
@@ -2753,7 +2738,7 @@
% plaats ?
\ifdone\startlinecorrection\fi
\framedcontentparameter\c!left % new
- \localframed[\??fc\currentframedcontent]{\box\b_framed_normal}% hm
+ \inheritedframedcontentframed{\box\b_framed_normal}% hm
\framedcontentparameter\c!right % new
\ifdone\stoplinecorrection\fi
\egroup}
@@ -2763,57 +2748,15 @@
\setuplinewidth
[\v!medium]
-% We could omit the empty setings but that is some 10% slower due to
-% extra testing in the chain.
+%D A Goodie:
-\setupframed
- [\c!width=\v!fit,
- \c!height=\v!broad,
- %\c!lines=,
- \c!offset=0.25ex, % \defaultframeoffset
- \c!empty=\v!no,
- \c!frame=\v!on,
- %\c!topframe=,
- %\c!bottomframe=,
- %\c!leftframe=,
- %\c!rightframe=,
- \c!radius=.5\bodyfontsize,
- \c!rulethickness=\linewidth,
- \c!corner=\v!rectangular,
- \c!depth=\zeropoint,
- %\c!foregroundcolor=,
- %\c!foregroundstyle=,
- %\c!background=,
- \c!backgroundscreen=\@@rsscreen,
- %\c!backgroundcolor=,
- \c!backgroundoffset=\zeropoint,
- %\c!framecolor=,
- \c!frameoffset=\zeropoint,
- % somewhat messy
- \c!backgroundcorner=\framedparameter\c!corner,
- \c!backgroundradius=\framedparameter\c!radius,
- \c!backgrounddepth=\framedparameter\c!depth,
- \c!framecorner=\framedparameter\c!corner,
- \c!frameradius=\framedparameter\c!radius,
- \c!framedepth=\framedparameter\c!depth,
- %
- %\c!component=,
- %\c!region=,
- %\c!align=,
- \c!bottom=\vss,
- %\c!top=,
- \c!autostrut=\v!yes,
- \c!location=\v!normal,
- %\c!orientation=,
- \c!autowidth=\v!yes,
- %\c!setups=,
- \c!strut=\v!yes,
- %\c!loffset=\zeropoint,
- %\c!roffset=\zeropoint,
- %\c!toffset=\zeropoint,
- ]%\c!boffset=\zeropoint]
+\def\v!unframed{unframed}
-\setupscreens
- [\c!screen=0.95]
+\defineframed
+ [\v!unframed]
+ [\c!frame=\v!off,
+ \c!rulethickness=\zeropoint,
+ \c!foregroundstyle=\framedparameter\c!style,
+ \c!foregroundcolor=\framedparameter\c!color]
\protect \endinput
diff --git a/Master/texmf-dist/tex/context/base/page-app.mkiv b/Master/texmf-dist/tex/context/base/page-app.mkiv
index df9607daaca..5f1c2f297b6 100644
--- a/Master/texmf-dist/tex/context/base/page-app.mkiv
+++ b/Master/texmf-dist/tex/context/base/page-app.mkiv
@@ -34,6 +34,7 @@
[fittingpage]
[\c!textstate=\v!empty,
\c!doublesided=\v!no,
+ \c!location=\v!top, % no topskip
\c!pagestate=\fittingpageparameter\c!pagestate]%
\definelayout
@@ -180,20 +181,33 @@
%D \starttext \pagefigure[two.1] \stoptext
%D \stoptyping
+\defineexternalfigure[\v!page:\v!figure][\c!offset=\v!overlay] % we force a parent
+
\unexpanded\def\startpagefigure
{\dodoubleempty\page_figures_start}
+% this one:
+%
+% \def\page_figures_start[#1][#2]%
+% {\bgroup
+% \def\currentexternalfigure{\v!page:\v!figure}%
+% \setupcurrentexternalfigure[\c!offset=\v!overlay,#2]%
+% \startTEXpage[\c!offset=\externalfigureparameter\c!offset]%
+% \externalfigure[#1]\ignorespaces} % so we can put some text below the graphic
+%
+% or this one:
+
\def\page_figures_start[#1][#2]%
{\bgroup
- \getparameters[\??ex][\c!offset=\v!overlay,#2]%
- \startTEXpage[\c!offset=\@@exoffset]%
- \externalfigure[#1][#2]\ignorespaces} % so we can put some text below the graphic
+ \setupexternalfigure[\v!page:\v!figure][\c!offset=\v!overlay,#2]%
+ \startTEXpage[\c!offset=\namedexternalfigureparameter{\v!page:\v!figure}\c!offset]%
+ \externalfigure[#1]\ignorespaces} % so we can put some text below the graphic
\unexpanded\def\stoppagefigure
{\stopTEXpage
\egroup}
-\def\pagefigure
+\unexpanded\def\pagefigure
{\dodoubleempty\page_figure}
\def\page_figure[#1][#2]%
diff --git a/Master/texmf-dist/tex/context/base/page-bck.mkiv b/Master/texmf-dist/tex/context/base/page-bck.mkiv
index 100c186ddce..0246e8eb61b 100644
--- a/Master/texmf-dist/tex/context/base/page-bck.mkiv
+++ b/Master/texmf-dist/tex/context/base/page-bck.mkiv
@@ -11,19 +11,18 @@
%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
%C details.
-% todo: ma:r:c
-%
-% Currently the text cells are fakes and no (foreground) frames which
-% makes sense as order might matter e.g. is text sticks in other
-% cells. The page, text etc ares do support foreground order change.
+% Currently the text cells are fakes and no (foreground) frames which makes sense
+% as order might matter e.g. is text sticks in other cells. The page, text etc
+% areas do support foreground order change.
\writestatus{loading}{ConTeXt Page Macros / Backgrounds}
\unprotect
-%D For special purposes, users can question the \type
-%D {*background} mode. This mode is only available when
-%D typesetting the pagebody.
+% maybe use \currentframedhash here
+
+%D For special purposes, users can question the \type {*background} mode. This mode
+%D is only available when typesetting the pagebody.
%D
%D \starttyping
%D \startmode[*background] ...
@@ -42,17 +41,23 @@
\fi
\to \everybeforepagebody
-%D We keep calculations and checks to a minimum and also
-%D try to minimize the amount of tracing due to expansion.
+\unexpanded\def\initializepagebackgrounds
+ {\setfalse\c_page_backgrounds_new
+ \setfalse\c_page_backgrounds_new_right
+ \setfalse\c_page_backgrounds_new_left
+ \setfalse\c_page_backgrounds_some}
+
+%D We keep calculations and checks to a minimum and also try to minimize
+%D the amount of tracing due to expansion.
\let\currentotrbackground\empty
-\def\page_backgrounds_set_yes{\expandafter\let\csname\currentotrbackground\endcsname\relax }
-\def\page_backgrounds_set_nop{\expandafter\let\csname\currentotrbackground\endcsname\undefined}
+%D This is the only spot where we hav ea low level dependency on the way
+%D parent chains are defined but we want the speed.
\def\page_backgrounds_check_background
- {\ifcsname\currentotrbackground\c!background\endcsname
- \edef\page_background_temp{\csname\currentotrbackground\c!background\endcsname}%
+ {\ifcsname\??framed\currentotrbackground:\c!background\endcsname
+ \edef\page_background_temp{\csname\??framed\currentotrbackground:\c!background\endcsname}%
\ifx\page_background_temp\empty
\expandafter\expandafter\expandafter\page_backgrounds_check_frame
\else
@@ -63,8 +68,8 @@
\fi}
\def\page_backgrounds_check_frame
- {\ifcsname\currentotrbackground\c!frame\endcsname
- \edef\page_background_temp{\csname\currentotrbackground\c!frame\endcsname}%
+ {\ifcsname\??framed\currentotrbackground:\c!frame\endcsname
+ \edef\page_background_temp{\csname\??framed\currentotrbackground:\c!frame\endcsname}%
\ifx\page_background_temp\v!on
\page_backgrounds_set_yes
\else
@@ -75,8 +80,8 @@
\fi}
\def\page_backgrounds_check_leftframe
- {\ifcsname\currentotrbackground\c!leftframe\endcsname
- \edef\page_background_temp{\csname\currentotrbackground\c!leftframe\endcsname}%
+ {\ifcsname\??framed\currentotrbackground:\c!leftframe\endcsname
+ \edef\page_background_temp{\csname\??framed\currentotrbackground:\c!leftframe\endcsname}%
\ifx\page_background_temp\v!on
\page_backgrounds_set_yes
\else
@@ -87,8 +92,8 @@
\fi}
\def\page_backgrounds_check_rightframe
- {\ifcsname\currentotrbackground\c!rightframe\endcsname
- \edef\page_background_temp{\csname\currentotrbackground\c!rightframe\endcsname}%
+ {\ifcsname\??framed\currentotrbackground:\c!rightframe\endcsname
+ \edef\page_background_temp{\csname\??framed\currentotrbackground:\c!rightframe\endcsname}%
\ifx\page_background_temp\v!on
\page_backgrounds_set_yes
\else
@@ -99,8 +104,8 @@
\fi}
\def\page_backgrounds_check_topframe
- {\ifcsname\currentotrbackground\c!topframe\endcsname
- \edef\page_background_temp{\csname\currentotrbackground\c!topframe\endcsname}%
+ {\ifcsname\??framed\currentotrbackground:\c!topframe\endcsname
+ \edef\page_background_temp{\csname\??framed\currentotrbackground:\c!topframe\endcsname}%
\ifx\page_background_temp\v!on
\page_backgrounds_set_yes
\else
@@ -111,8 +116,8 @@
\fi}
\def\page_backgrounds_check_bottomframe
- {\ifcsname\currentotrbackground\c!bottomframe\endcsname
- \edef\page_background_temp{\csname\currentotrbackground\c!bottomframe\endcsname}%
+ {\ifcsname\??framed\currentotrbackground:\c!bottomframe\endcsname
+ \edef\page_background_temp{\csname\??framed\currentotrbackground:\c!bottomframe\endcsname}%
\ifx\page_background_temp\v!on
\page_backgrounds_set_yes
\else
@@ -122,58 +127,66 @@
\page_backgrounds_set_nop
\fi}
-\def\page_backgrounds_check#1%
- {\edef\currentotrbackground{\??ma#1}%
- \page_backgrounds_check_background}
+%D We don't use the commandhandler code as we want these multitude of backgrounds to be
+%D as fast as possible.
-\def\ifsomebackgroundfound#1%
- {\ifcsname\??ma#1\endcsname}
+\installcorenamespace{layoutbackgrounds}
+\installcorenamespace{layoutbackgroundcheck} % we need another hash as \??layoutbackgrounds<...> gets defined
+
+\def\page_backgrounds_set_yes{\expandafter\let\csname\currentotrbackground\endcsname\relax }
+\def\page_backgrounds_set_nop{\expandafter\let\csname\currentotrbackground\endcsname\undefined}
+
+\unexpanded\def\page_backgrounds_check#1%
+ {\edef\currentotrbackground{\??layoutbackgrounds#1}%
+ \page_backgrounds_check_background}
\def\doifsomebackgroundelse#1%
- {\ifcsname\??ma#1\endcsname
+ {\ifcsname\??layoutbackgrounds#1\endcsname
\expandafter\firstoftwoarguments
\else
\expandafter\secondoftwoarguments
\fi}
\def\doifsomebackground#1%
- {\ifcsname\??ma#1\endcsname
+ {\ifcsname\??layoutbackgrounds#1\endcsname
\expandafter\firstofoneargument
\else
\expandafter\gobbleoneargument
\fi}
-%D The background mechanism falls back on the \type {\framed}
-%D macro. This means that all normal frame and overlay
-%D features can be used.
+%D The background mechanism falls back on the \type {\framed} macro. This means
+%D that all normal frame and overlay features can be used.
\def\page_backgrounds_add_to_box#1% area
- {\ifcsname\??ma#1\endcsname
+ {\ifcsname\??layoutbackgrounds#1\endcsname
\expandafter\page_backgrounds_add_to_box_indeed
\else
\expandafter\gobblefourarguments
\fi#1}
-% we don't need the dimensions here as this is a real framed but the question is: do we indeed
-% need a real framed or can we use a fake (i.e. no foreground, only for hidden)
+%D We don't need the dimensions here as this is a real framed but the question is:
+%D do we indeed need a real framed or can we use a fake (i.e. no foreground, only
+%D for hidden).
\def\page_backgrounds_add_to_box_indeed#1#2#3#4% area box width height / always non zero
- {\ifcsname\??ma#1\c!setups\endcsname % to be done
- \doprocesslocalsetups{\csname\??ma#1\c!setups\endcsname}% should not produce funny spaces !
+ {\edef\p_page_backgrounds_setups{\namedframedparameter{\??layoutbackgrounds#1}\c!setups}%
+ \ifx\p_page_backgrounds_setups\empty \else
+ \doprocesslocalsetups\p_page_backgrounds_setups % should not produce funny spaces !
\fi
% #2 has the right dimensions already
- \setbox#2\hbox{\localbackgroundframed{\??ma#1}#1#2}}% a real framed (including foreground)
+ \setbox#2\hbox{\localbackgroundframed{\??layoutbackgrounds#1}#1#2}}% a real framed (including foreground)
+
-%D There are quite some backgrounds. At the bottom layer,
-%D there is the {\em paper} background. This one is only
-%D used for special purposes, like annotations to documents.
+%D There are quite some backgrounds. At the bottom layer, there is the {\em
+%D paper} background. This one is only used for special purposes, like
+%D annotations to documents.
\def\page_backgrounds_add_to_print#1%
{\page_backgrounds_add_to_box\v!paper#1\printpaperwidth\printpaperheight}
-%D The page backgrounds can be put behind the {\em left
-%D page}, the {\em right page} or {\em each page}. As with
-%D the paper background, these are calculated on each page.
+%D The page backgrounds can be put behind the {\em left page}, the {\em
+%D right page} or {\em each page}. As with the paper background, these are
+%D calculated on each page.
\def\page_backgrounds_add_to_paper#1%
{\doifbothsidesoverruled
@@ -182,18 +195,17 @@
{\page_backgrounds_add_to_box\v!leftpage #1\paperwidth\paperheight}%
\page_backgrounds_add_to_box\v!page#1\paperwidth\paperheight}
-%D Then there are the 25 areas that make up the layout: {\em
-%D top, header, text, footer, bottom} times {\em left edge,
-%D left margin, text, right margin, right edge}. These are
-%D only recalculated when they change or when the \type
-%D {status} is set to \type {repeat}.
+%D Then there are the 25 areas that make up the layout: {\em top, header, text,
+%D footer, bottom} times {\em left edge, left margin, text, right margin, right
+%D edge}. These are only recalculated when they change or when the \type {status}
+%D is set to \type {repeat}.
\newbox\leftbackground % todo: rename
\newbox\rightbackground % todo: rename
-%D Finaly there is an aditional {\em text} background, again
-%D useful for special purposes only. This one is calculated
-%D each time. The hidden backgrounds are not meant for users!
+%D Finaly there is an aditional {\em text} background, again useful for
+%D special purposes only. This one is calculated each time. The hidden
+%D backgrounds are not meant for users!
\newconditional\c_page_backgrounds_hidden_enabled
@@ -203,21 +215,19 @@
\fi
\page_backgrounds_add_to_box\v!text#1\makeupwidth\textheight}
-%D The next couple of macros implement the area backgrounds.
-%D As said, these are cached in dedicated boxes. The offsets
-%D and depth of the page are used for alignment purposes.
+%D The next couple of macros implement the area backgrounds. As said, these
+%D are cached in dedicated boxes. The offsets and depth of the page are used
+%D for alignment purposes.
-\newdimen\pageoffset % bleed
-\newdimen\pagedepth
+%newdimen\pageoffset % bleed
+%newdimen\pagedepth % built-in
-%D We need a bit more clever mechanism in order to handle
-%D layers well. This means that we cannot calculate both
-%D background at the same time since something may have
-%D changed halfway a page.
+%D We need a bit more clever mechanism in order to handle layers well.
+%D This means that we cannot calculate both background at the same time
+%D since something may have changed halfway a page.
-%D Margin swapping has been simplified: see mkii code in case of
-%D regression. Calculation is delayed till the page anyway so the
-%D state is known.
+%D Margin swapping has been simplified: see mkii code in case of regression.
+%D Calculation is delayed till the page anyway so the state is known.
\def\page_backgrounds_recalculate
{\global\settrue\c_page_backgrounds_new}
@@ -230,7 +240,7 @@
\page_backgrounds_set_boxes_a
\page_backgrounds_set_boxes_b
\page_backgrounds_set_boxes_c
- \ifx\@@mastate\v!repeat\else
+ \ifx\p_page_backgrounds_state\v!repeat\else
\global\setfalse\c_page_backgrounds_new
\fi}
@@ -273,20 +283,19 @@
\newdimen\pagebackgroundhoffset % THESE WILL BECOME OBSOLETE
\newdimen\pagebackgroundvoffset
+\newdimen\pagebackgroundoffset % used elsewhere
\newdimen\pagebackgrounddepth
\newdimen\pagebackgroundcompensation
-\newdimen\pagebackgroundoffset % used elsewhere
-
\def\page_backgrounds_set_offsets % used in menus (we can use ifcsname's here)
{\ifconditional\c_page_backgrounds_some \ifconditional\c_page_backgrounds_new
\page_backgrounds_set_offsets_indeed % indirect, less tracing
\fi \fi}
\def\page_backgrounds_set_offsets_indeed
- {\ifcsname\??ma\v!text\v!text\endcsname
+ {\ifcsname\??layoutbackgrounds\v!text\v!text\endcsname
\page_backgrounds_set_offsets_yes
- \else\ifcsname\??ma\v!text\endcsname
+ \else\ifcsname\??layoutbackgrounds\v!text\endcsname
\page_backgrounds_set_offsets_yes
\else
\page_backgrounds_set_offsets_nop
@@ -299,26 +308,27 @@
\global\pagebackgroundcompensation\zeropoint}
\def\page_backgrounds_set_offsets_yes
- {\global\pagebackgroundcompensation\csname\??ma\v!page\c!offset\endcsname\relax
+ {\global\pagebackgroundoffset \d_page_backgrounds_depth
+ \global\pagebackgroundcompensation\d_page_backgrounds_offset\relax
\ifzeropt\pagebackgroundcompensation
\page_backgrounds_set_offsets_nop
\else
- \ifcsname\??ma\v!top\v!text\endcsname
+ \ifcsname\??layoutbackgrounds\v!top\v!text\endcsname
\global\pagebackgroundhoffset\zeropoint
- \else\ifcsname\??ma\v!bottom\v!text\endcsname
+ \else\ifcsname\??layoutbackgrounds\v!bottom\v!text\endcsname
\global\pagebackgroundhoffset\zeropoint
\else
\global\pagebackgroundhoffset\pagebackgroundcompensation
\fi\fi
- \ifcsname\??ma\v!text\v!rightedge\endcsname
+ \ifcsname\??layoutbackgrounds\v!text\v!rightedge\endcsname
\global\pagebackgroundvoffset\zeropoint
\global\pagebackgrounddepth \zeropoint
- \else\ifcsname\??ma\v!text\v!leftedge\endcsname
+ \else\ifcsname\??layoutbackgrounds\v!text\v!leftedge\endcsname
\global\pagebackgroundvoffset\zeropoint
\global\pagebackgrounddepth \zeropoint
\else
\global\pagebackgroundvoffset\pagebackgroundcompensation
- \global\pagebackgrounddepth \csname\??ma\v!page\c!depth\endcsname\relax
+ \global\pagebackgrounddepth \d_page_backgrounds_depth
\fi\fi
\fi}
@@ -370,7 +380,7 @@
\bgroup\hbox\bgroup
\goleftonpage
\ifdim\leftedgewidth>\zeropoint
- \ifcsname\??ma#1\v!leftedge\endcsname
+ \ifcsname\??layoutbackgrounds#1\v!leftedge\endcsname
\page_backgrounds_set_box_cell#1\v!leftedge\leftedgewidth#2%
\else
\kern\leftedgewidth
@@ -378,21 +388,21 @@
\kern\leftedgedistance
\fi
\ifdim\leftmarginwidth>\zeropoint
- \ifcsname\??ma#1\v!leftmargin\endcsname
+ \ifcsname\??layoutbackgrounds#1\v!leftmargin\endcsname
\page_backgrounds_set_box_cell#1\v!leftmargin\leftmarginwidth#2%
\else
\kern\leftmarginwidth
\fi
\kern\leftmargindistance
\fi
- \ifcsname\??ma#1\v!text\endcsname
+ \ifcsname\??layoutbackgrounds#1\v!text\endcsname
\page_backgrounds_set_box_cell#1\v!text\makeupwidth#2%
\else
\kern\makeupwidth
\fi
\ifdim\rightmarginwidth>\zeropoint
\kern\rightmargindistance
- \ifcsname\??ma#1\v!rightmargin\endcsname
+ \ifcsname\??layoutbackgrounds#1\v!rightmargin\endcsname
\page_backgrounds_set_box_cell#1\v!rightmargin\rightmarginwidth#2%
\else
\kern\rightmarginwidth
@@ -400,7 +410,7 @@
\fi
\ifdim\rightedgewidth>\zeropoint
\kern\rightedgedistance
- \ifcsname\??ma#1\v!rightedge\endcsname
+ \ifcsname\??layoutbackgrounds#1\v!rightedge\endcsname
\page_backgrounds_set_box_cell#1\v!rightedge\rightedgewidth#2%
\else
\kern\rightedgewidth
@@ -415,30 +425,31 @@
\def\page_backgrounds_set_box_cell#1#2#3#4% pos pos width height
{\begingroup
- \ifcsname\??ma#1#2\c!setups\endcsname
- \doprocesslocalsetups{\csname\??ma#1#2\c!setups\endcsname}% should not produce funny spaces !
+ \edef\p_page_backgrounds_setups{\namedframedparameter{\??layoutbackgrounds#1#2}\c!setups}%
+ \ifx\p_page_backgrounds_setups\empty \else
+ \doprocesslocalsetups\p_page_backgrounds_setups % should not produce funny spaces !
\fi
- \ifcsname\??ma#1#2\c!command\endcsname
+ \edef\p_page_backgrounds_command{\namedframedparameter{\??layoutbackgrounds#1#2}\c!command}%
+ \ifx\p_page_backgrounds_command\empty
\expandafter\page_backgrounds_set_box_cell_nop
\else
\expandafter\page_backgrounds_set_box_cell_yes
- \fi#1#2#3#4%
- \localbackgroundframed{\??ma#1#2}{#1:#2}\scratchbox
+ \fi#3#4%
+ \localbackgroundframed{\??layoutbackgrounds#1#2}{#1:#2}\scratchbox
\endgroup}
-\def\page_backgrounds_set_box_cell_nop#1#2#3#4%
+\def\page_backgrounds_set_box_cell_nop#1#2%
{\setbox\scratchbox\emptyvbox
- \wd\scratchbox#3%
- \ht\scratchbox#4}
+ \wd\scratchbox#1%
+ \ht\scratchbox#2}
-\def\page_backgrounds_set_box_cell_yes#1#2#3#4%
- {\setbox\scratchbox\vbox to #4{\vss\hbox to#3{\hss\csname\??ma#1#2\c!command\endcsname\hss}\vss}%
+\def\page_backgrounds_set_box_cell_yes#1#2%
+ {\setbox\scratchbox\vbox to #2{\vss\hbox to#1{\hss\p_page_backgrounds_command\hss}\vss}%
\dp\scratchbox\zeropoint}
-%D The background mechanism is quite demanding in terms or
-%D resources. We used to delay these definitions till runtime
-%D usage, but since today's \TEX's are large, we now do the
-%D work on forehand.
+%D The background mechanism is quite demanding in terms or resources. We used to
+%D delay these definitions till runtime usage, but since today's \TEX's are large,
+%D we now do the work on forehand.
%D
%D \starttyping
%D \setupbackgrounds [settings]
@@ -448,110 +459,115 @@
%D
%D \showsetup{setupbackgrounds}
%D
-%D Because the number of arguments runs from one to three,
-%D we need to check for it.
+%D Because the number of arguments runs from one to three, we need to check
+%D for it.
\newtoks\everybackgroundssetup
\unexpanded\def\setupbackgrounds
{\dotripleempty\page_backgrounds_setup}
-\def\page_backgrounds_setup[#1][#2][#3]%
+\def\page_backgrounds_setup
{\ifthirdargument
- \page_backgrounds_setup_double{#1}{#2}{#3}%
+ \expandafter\page_backgrounds_setup_double
\else\ifsecondargument
- \page_backgrounds_setup_single{#1}{#2}%
+ \doubleexpandafter\page_backgrounds_setup_single
\else\iffirstargument
- \page_backgrounds_setup_basics{#1}%
- \fi\fi\fi
- \the\everybackgroundssetup}
+ \tripleexpandafter\page_backgrounds_setup_basics
+ \fi\fi\fi}
+
+\newdimen\d_page_backgrounds_offset
+\newdimen\d_page_backgrounds_depth
\appendtoks
- \doifelsevalue{\??ma\v!page\c!offset}\v!overlay
- {\global\pageoffset\zeropoint}
- {\global\pageoffset\csname\??ma\v!page\c!offset\endcsname}%
- \global\pagedepth\csname\??ma\v!page\c!depth\endcsname
- \global\pagebackgroundoffset\pageoffset
- \global\pagebackgrounddepth\pagedepth
- \doifelse\@@mastate\v!stop
- {\global\setfalse\c_page_backgrounds_new}
- {\global\settrue \c_page_backgrounds_new}%
+ \edef\p_page_backgrounds_offset{\namedframedparameter{\??layoutbackgrounds\v!page}\c!offset}%
+ \edef\p_page_backgrounds_depth {\namedframedparameter{\??layoutbackgrounds\v!page}\c!depth }%
+ \edef\p_page_backgrounds_state {\namedframedparameter{\??layoutbackgrounds }\c!state }%
+ \global\d_page_backgrounds_offset\ifx\p_offset\empty\zeropoint\else\p_page_backgrounds_offset\fi
+ \global\d_page_backgrounds_depth \ifx\p_depth \empty\zeropoint\else\p_page_backgrounds_depth \fi
+ \global\pagebackgroundoffset\d_page_backgrounds_offset\relax
+ \global\pagebackgrounddepth \d_page_backgrounds_depth \relax
+ \ifx\p_page_backgrounds_state\v!stop
+ \global\setfalse\c_page_backgrounds_new
+ \else
+ \global\settrue \c_page_backgrounds_new
+ \fi
\to \everybackgroundssetup
\def\v_page_backgrounds_double_set{\v!paper,\v!page,\v!leftpage,\v!rightpage}
\def\v_page_backgrounds_single_set{\v!text,\v!hidden,\v!paper,\v!page,\v!leftpage,\v!rightpage}
\def\v_page_backgrounds_common_set{\v!leftedge,\v!leftmargin,\v!text,\v!rightmargin,\v!rightedge}
-\def\page_backgrounds_setup_double#1#2#3%
+\unexpanded\def\page_backgrounds_setup_double[#1][#2][#3]% if needed we can speed this up
{\global\settrue\c_page_backgrounds_some
- \def\docommand##1%
+ \def\page_backgrounds_setup_step##1%
{\doifinsetelse{##1}\v_page_backgrounds_double_set
{\page_backgrounds_setup_and_check{##1}{#3}}
- {\def\dodocommand####1{\page_backgrounds_setup_and_check{##1####1}{#3}}%
- \processcommalist[#2]\dodocommand}}%
- \processcommalist[#1]\docommand}
+ {\def\page_backgrounds_setup_step_nested####1{\page_backgrounds_setup_and_check{##1####1}{#3}}%
+ \processcommacommand[#2]\page_backgrounds_setup_step_nested}}%
+ \processcommacommand[#1]\page_backgrounds_setup_step
+ \the\everybackgroundssetup}
-\def\page_backgrounds_setup_single#1#2%
+\unexpanded\def\page_backgrounds_setup_single[#1][#2][#3]%
{\global\settrue\c_page_backgrounds_some
\doifcommonelse{#1}\v_page_backgrounds_single_set
- {\def\docommand##1{\page_backgrounds_setup_and_check{##1}{#2}}%
- \processcommalist[#1]\docommand}%
- {\page_backgrounds_setup_double{#1}\v_page_backgrounds_common_set{#2}}}
+ {\def\page_backgrounds_setup_step##1{\page_backgrounds_setup_and_check{##1}{#2}}%
+ \processcommacommand[#1]\page_backgrounds_setup_step
+ \the\everybackgroundssetup}%
+ {\page_backgrounds_setup_double[#1][\v_page_backgrounds_common_set][#2]}}
-\def\page_backgrounds_setup_basics#1%
- {\getparameters[\??ma][#1]}
+\unexpanded\def\page_backgrounds_setup_basics[#1][#2][#3]%
+ {\setupframed[\??layoutbackgrounds][#1]%
+ \the\everybackgroundssetup}
-\def\page_backgrounds_setup_and_check#1#2% tag settings
- {\edef\currentotrbackground{\??ma#1}%
- \getparameters[\currentotrbackground][#2]%
+\unexpanded\def\page_backgrounds_setup_and_check#1#2% tag settings
+ {\edef\currentotrbackground{\??layoutbackgrounds#1}%
+ \setupframed[\currentotrbackground][#2]%
\page_backgrounds_check_background}
-%D Each areas (currently there are $1+3+25+1=30$ of them)
-%D has its own low level framed object associated.
-
-% hm, we can delay them
+%D Each areas (currently there are $1+3+25+1=30$ of them) has its own low level
+%D framed object associated.
+
+\definesimplifiedframed[\??layoutbackgrounds\v!paper]
+\definesimplifiedframed[\??layoutbackgrounds\v!page]
+\definesimplifiedframed[\??layoutbackgrounds\v!leftpage]
+\definesimplifiedframed[\??layoutbackgrounds\v!rightpage]
+
+\definesimplifiedframed[\??layoutbackgrounds\v!text]
+\definesimplifiedframed[\??layoutbackgrounds\v!hidden]
+
+\definesimplifiedframed[\??layoutbackgrounds\v!top\v!leftedge]
+\definesimplifiedframed[\??layoutbackgrounds\v!top\v!leftmargin]
+\definesimplifiedframed[\??layoutbackgrounds\v!top\v!text]
+\definesimplifiedframed[\??layoutbackgrounds\v!top\v!rightmargin]
+\definesimplifiedframed[\??layoutbackgrounds\v!top\v!rightedge]
+
+\definesimplifiedframed[\??layoutbackgrounds\v!header\v!leftedge]
+\definesimplifiedframed[\??layoutbackgrounds\v!header\v!leftmargin]
+\definesimplifiedframed[\??layoutbackgrounds\v!header\v!text]
+\definesimplifiedframed[\??layoutbackgrounds\v!header\v!rightmargin]
+\definesimplifiedframed[\??layoutbackgrounds\v!header\v!rightedge]
+
+\definesimplifiedframed[\??layoutbackgrounds\v!text\v!leftedge]
+\definesimplifiedframed[\??layoutbackgrounds\v!text\v!leftmargin]
+\definesimplifiedframed[\??layoutbackgrounds\v!text\v!text]
+\definesimplifiedframed[\??layoutbackgrounds\v!text\v!rightmargin]
+\definesimplifiedframed[\??layoutbackgrounds\v!text\v!rightedge]
+
+\definesimplifiedframed[\??layoutbackgrounds\v!footer\v!leftedge]
+\definesimplifiedframed[\??layoutbackgrounds\v!footer\v!leftmargin]
+\definesimplifiedframed[\??layoutbackgrounds\v!footer\v!text]
+\definesimplifiedframed[\??layoutbackgrounds\v!footer\v!rightmargin]
+\definesimplifiedframed[\??layoutbackgrounds\v!footer\v!rightedge]
+
+\definesimplifiedframed[\??layoutbackgrounds\v!bottom\v!leftedge]
+\definesimplifiedframed[\??layoutbackgrounds\v!bottom\v!leftmargin]
+\definesimplifiedframed[\??layoutbackgrounds\v!bottom\v!text]
+\definesimplifiedframed[\??layoutbackgrounds\v!bottom\v!rightmargin]
+\definesimplifiedframed[\??layoutbackgrounds\v!bottom\v!rightedge]
-\unexpanded\def\installsomebackground#1#2{\inheritlocalframed[\??ma#1#2][\??od]}
-
-\installsomebackground \v!paper \empty
-\installsomebackground \v!page \empty
-\installsomebackground \v!leftpage \empty
-\installsomebackground \v!rightpage \empty
-
-%D The stand alone text area inherits from the page too.
-
-\installsomebackground \v!text \empty
-\installsomebackground \v!hidden \empty
-
-%D We save some keying by defining the areas using a helper:
-
-\def\docommand#1%
- {\installsomebackground#1\v!leftedge
- \installsomebackground#1\v!leftmargin
- \installsomebackground#1\v!text
- \installsomebackground#1\v!rightmargin
- \installsomebackground#1\v!rightedge}
-
-\docommand \v!top
-\docommand \v!header
-\docommand \v!text
-\docommand \v!footer
-\docommand \v!bottom
-
-%D We need some cleanup now.
-
-\let\docommand\relax
-
-%D We now set up the individual areas to use reasonable
-%D defaults.
-
-\installsomebackground \v!paper \empty
-\installsomebackground \v!page \empty
-\installsomebackground \v!leftpage \empty
-\installsomebackground \v!rightpage \empty
-
-\getparameters
- [\??ma\v!page]
+\setupbackgrounds
+ [\v!page]
[\c!offset=\zeropoint, % hm, so we need to force overlay elsewhere
\c!depth=\zeropoint]
@@ -560,29 +576,23 @@
\setupbackgrounds
[\c!state=\c!start]
-%D The hidden layer can be populated by extending the
-%D following comma separated list. This only happens in core
-%D modules.
+%D The hidden layer can be populated by extending the following comma separated
+%D list. This only happens in core modules.
% todo page-2 .. page+2 achter pagina -> bleed
% spread-2 .. spread+2 achter spread -> spread (repeat 2 times)
-\def\enablehiddenbackground
+\unexpanded\def\enablehiddenbackground
{\global\settrue\c_page_backgrounds_hidden_enabled
\global\settrue\c_page_backgrounds_some
\page_backgrounds_recalculate}
-\def\disablehiddenbackground
+\unexpanded\def\disablehiddenbackground
{\global\setfalse\c_page_backgrounds_hidden_enabled}
\def\hiddenbackgroundlist
{\v!text-2,\v!text-1,\v!foreground,\v!text+1,\v!text+2}
-% \defineoverlay[\v!text-2][\positionoverlay{\v!text-2}]
-% \defineoverlay[\v!text-1][\positionoverlay{\v!text-1}]
-% \defineoverlay[\v!text+1][\positionoverlay{\v!text+1}]
-% \defineoverlay[\v!text+2][\positionoverlay{\v!text+2}]
-
\defineoverlay[\v!text-2][\positionregionoverlay\textanchor{\v!text-2}] % no new anchor, we share text
\defineoverlay[\v!text-1][\positionregionoverlay\textanchor{\v!text-1}]
\defineoverlay[\v!text+1][\positionregionoverlay\textanchor{\v!text+1}]
@@ -592,13 +602,19 @@
[\v!hidden]
[\c!background=\hiddenbackgroundlist]
-% The next series is used in local (for instance floating)
-% backgrounds.
+%D Because we haven't really set up backgrounds yet, we set the main efficiency
+%D switch to false.
+
+\setfalse\c_page_backgrounds_some
+
+\protect \endinput
-% \installsomebackground \v!local \empty % not really a background, invisible for users
+% %D The next series is used in local (for instance floating) backgrounds.
+%
+% \installsimplifiedframed{\??layoutbackgrounds\v!local}
%
% \getparameters
-% [\??ma\v!local]
+% [\??layoutbackgrounds\v!local]
% [\c!component=local,
% \c!background=\localbackgroundlist]
%
@@ -618,7 +634,7 @@
% \fi}
%
% \def\page_backgrounds_add_local_to_box_indeed#1%
-% {\setbox#1\hbox{\localbackgroundframed{\??ma\v!local}\v!local#1}%
+% {\setbox#1\hbox{\localbackgroundframed{\??layoutbackgrounds\v!local}\v!local#1}%
% \global\advance\localpositionnumber\plusone} % afterwards !
%
% \let\page_backgrounds_add_local_to_box\gobbleoneargument
@@ -633,9 +649,3 @@
% \input tufte
% \stopcolumnset
-%D Because we haven't really set up backgrounds yet, we set
-%D the main efficiency switch to false.
-
-\setfalse\c_page_backgrounds_some
-
-\protect \endinput
diff --git a/Master/texmf-dist/tex/context/base/page-box.mkvi b/Master/texmf-dist/tex/context/base/page-box.mkvi
index 58a5627cbfc..8f5647a14bc 100644
--- a/Master/texmf-dist/tex/context/base/page-box.mkvi
+++ b/Master/texmf-dist/tex/context/base/page-box.mkvi
@@ -99,7 +99,7 @@
{\setbox#box\vbox
{\offinterlineskip
\dorecurse{\layoutparameter\c!ny}
- {\hbox{\dorecurse{\layoutparameter\c!nx}{\copy#box\hskip\layoutparameter\c!dx}\unskip}%
+ {\hbox{\dorecurse{\layoutparameter\c!nx}{\copy#box\kern\layoutparameter\c!dx}\unskip}%
\vskip\layoutparameter\c!dy}
\unskip}}
@@ -194,7 +194,7 @@
\advance\scratchwidth \scratchoffset
\ifconditional#right\relax
\scratchdimen-\scratchoffset
- \hskip\scratchdimen
+ \kern\scratchdimen
\else
\scratchdimen\zeropoint
\fi
@@ -227,10 +227,10 @@
\doifbothsides
{\advance\scratchwidth\scratchoffset
\scratchdimen-\scratchoffset
- \hskip\scratchdimen}%
+ \kern\scratchdimen}%
{\scratchdimen\zeropoint}
{\scratchdimen-\scratchoffset
- \hskip\scratchdimen}%
+ \kern\scratchdimen}%
\lower\scratchoffset\hbox
{\clip
[\c!hoffset=\scratchdimen,
diff --git a/Master/texmf-dist/tex/context/base/page-brk.mkiv b/Master/texmf-dist/tex/context/base/page-brk.mkiv
index 72255676097..ac1fffd6b1c 100644
--- a/Master/texmf-dist/tex/context/base/page-brk.mkiv
+++ b/Master/texmf-dist/tex/context/base/page-brk.mkiv
@@ -53,9 +53,9 @@
\def\page_breaks_handle#1%
{\edef\page_breaks_current_options{#1}% handy for tracing
- \processcommacommand[\page_breaks_current_options]\page_breaks_handle_indeed}
+ \processcommacommand[\page_breaks_current_options]\page_breaks_handle_step}
-\def\page_breaks_handle_indeed#1%
+\def\page_breaks_handle_step#1%
{\edef\page_breaks_current_option{#1}% can be used in handler
\ifcsname\??pagebreakmethod\page_breaks_current_option\endcsname
\csname\??pagebreakmethod\page_breaks_current_option\endcsname
@@ -80,7 +80,8 @@
{\setvalue{\??pagebreaks#1}{#2}}
\unexpanded\def\pagebreak
- {\dosingleempty\page_breaks_process}
+ {\par % else no vertical penalties
+ \dosingleempty\page_breaks_process}
\let\page\pagebreak
@@ -269,9 +270,9 @@
\def\page_breaks_columns_handle#1%
{\edef\page_breaks_columns_current_options{#1}%
- \processcommacommand[#1]\page_breaks_columns_handle_indeed}
+ \processcommacommand[#1]\page_breaks_columns_handle_step}
-\def\page_breaks_columns_handle_indeed#1%
+\def\page_breaks_columns_handle_step#1%
{\edef\page_breaks_columns_current_option{#1}%
\ifcsname\??columnbreakmethod\currentoutputroutine:\page_breaks_columns_current_option\endcsname
\csname\??columnbreakmethod\currentoutputroutine:\page_breaks_columns_current_option\endcsname
@@ -296,7 +297,8 @@
{\setvalue{\??columnbreaks#1}{#2}}
\unexpanded\def\columnbreak
- {\dosingleempty\page_breaks_columns_process}
+ {\par % else no vertical penalties
+ \dosingleempty\page_breaks_columns_process}
\let\column\columnbreak
diff --git a/Master/texmf-dist/tex/context/base/page-col.mkiv b/Master/texmf-dist/tex/context/base/page-col.mkiv
index 50a05d3510c..14b5124210f 100644
--- a/Master/texmf-dist/tex/context/base/page-col.mkiv
+++ b/Master/texmf-dist/tex/context/base/page-col.mkiv
@@ -13,74 +13,36 @@
\writestatus{loading}{ConTeXt Page Macros / Column Helpers}
-%D Here we implement a coouple of helpers for dealing with
-%D columns. For the moment we keep the names.
+%D Here we implement a couple of helpers for dealing with columns. For
+%D the moment we keep the names. When the mul and set modules are redone
+%D these can be adapted or disappear.
\unprotect
-%D The next two registers can be used to store pre column
-%D material as well as footnotes or so.
+%D We reserve a counter for the number of columns as well as the current
+%D column. Both are not to be changed by users!
-\newbox\precolumnbox \newdimen\precolumnboxheight
-\newbox\postcolumnbox \newdimen\postcolumnboxheight
+\newcount\nofcolumns \nofcolumns\plusone
+\newcount\mofcolumns \mofcolumns\plusone
-%D We reserve a counter for the number of columns as well as
-%D the current column. Both are not to be changed by users!
-
-\newcount\nofcolumns \nofcolumns = 1
-\newcount\mofcolumns \mofcolumns = 1
+\newconstant\columndirection % 0:lr 1:rl
\setnewconstant\maxnofcolumns 50
\setnewconstant\allocatednofcolumns 0
-%D The next dimensions reports the final column height
-
-\newdimen\finalcolumnheights
-\newcount\finalcolumnlines
-
-%D This register can be used as a temporary storage for page
-%D content.
-
-\newbox\restofpage
-
-%D A few more (some might go away):
-
-\newif\ifintermediatefootnotes
-\newif\ifcarryoverfootnotes %\carryoverfootnotestrue
-\newif\iflastcolumnfootnotes %\lastcolumnfootnotestrue
-\newif\ifbalancecolumns %\balancecolumnstrue
-\newif\ifbalancetoheight %\balancetoheighttrue
-\newif\ifforcecolumngrid \forcecolumngridtrue
-\newif\ifstretchcolumns \stretchcolumnsfalse
-\newif\ifinheritcolumns \inheritcolumnsfalse
-\newif\ifheightencolumns \heightencolumnsfalse
-
-\newif\ifbalancingcolumns
-\newif\ifcollectingcontent
-\newif\ifcolumnoverflow
-
-\newdimen\intercolumnwidth
-\newdimen\localcolumnwidth
-\newdimen\savedpagetotal
-
-\newconstant\columndirection % 0:lr 1:rl
-
-\def\minbalancetoplines {1}
-\def\minfreecolumnlines {2}
-
-\newif\ifrecentercolumnbox \recentercolumnboxtrue
-\newif\ifrerecentercolumnbox \rerecentercolumnboxtrue
-\newif\ifpackcolumnfloats \packcolumnfloatstrue
-
-%D During initialization the temporary boxes are allocated.
-%D This enables us to use as much columns as we want, without
-%D exhausting the pool of boxes too fast. We could have packed
-%D them in one box, but we've got enough boxes.
+%D During initialization the temporary boxes are allocated. This enables us to
+%D use as much columns as we want, without exhausting the pool of boxes too
+%D fast. We could have packed them in one box, but we've got enough boxes.
%D
-%D Two sets of boxes are declared, the txtboxes are used for
-%D the text, the topboxes are for moved column floats.
+%D Two sets of boxes are declared, the txtboxes are used for the text, the
+%D topboxes are for moved column floats.
+
+\installcorenamespace{columntext}
+\installcorenamespace{columnfooter}
+\installcorenamespace{columntop}
+\installcorenamespace{columnbottom}
-\def\initializecolumns#1%
+\unexpanded\def\initializecolumns#1%
{\ifnum#1>\maxnofcolumns
\showmessage\m!columns1\maxnofcolumns
\nofcolumns\maxnofcolumns
@@ -88,17 +50,23 @@
\nofcolumns#1\relax
\fi
\ifnum\nofcolumns>\allocatednofcolumns
- \dorecurse\nofcolumns
- {\ifnum\recurselevel>\allocatednofcolumns\relax
- % \newbox\next \letgvalue{\??zc-\recurselevel-t}=\next
- \expandafter\newbox\csname\??zc-\recurselevel-t\endcsname % text
- \expandafter\newbox\csname\??zc-\recurselevel-f\endcsname % foot
- \expandafter\newbox\csname\??zc-\recurselevel-h\endcsname % top insert
- \expandafter\newbox\csname\??zc-\recurselevel-l\endcsname % top insert
- \fi}%
- \global\allocatednofcolumns\nofcolumns
+ \page_columns_allocate
\fi}
+\def\page_columns_allocate
+ {\dorecurse\nofcolumns
+ {\ifnum\recurselevel>\allocatednofcolumns\relax
+ \ifcsname\??columntext\recurselevel\endcsname \else
+ \expandafter\newbox\csname\??columntext \recurselevel\endcsname % text
+ \expandafter\newbox\csname\??columnfooter\recurselevel\endcsname % footer
+ \expandafter\newbox\csname\??columntop \recurselevel\endcsname % top insert
+ \expandafter\newbox\csname\??columnbottom\recurselevel\endcsname % bottom insert
+ \fi
+ \fi}%
+ \global\allocatednofcolumns\nofcolumns}
+
+\def\currentcolumn {1}
+
\def\firstcolumnbox {\columntextbox\plusone}
\def\currentcolumnbox {\columntextbox\mofcolumns}
\def\lastcolumnbox {\columntextbox\nofcolumns}
@@ -107,50 +75,105 @@
\def\currenttopcolumnbox {\columntopbox \mofcolumns}
\def\lasttopcolumnbox {\columntopbox \nofcolumns}
-\def\columntextbox #1{\csname\??zc-\number#1-t\endcsname}
-\def\columnfootbox #1{\csname\??zc-\number#1-f\endcsname}
-\def\columntopbox #1{\csname\??zc-\number#1-h\endcsname}
-\def\columnbotbox #1{\csname\??zc-\number#1-l\endcsname}
-
-\def\columnsettextbox {\global\setbox\columntextbox}
-\def\columnsetfootbox {\global\setbox\columnfootbox}
-\def\columnsettopbox {\global\setbox\columntopbox}
-\def\columnsetbotbox {\global\setbox\columnbotbox}
-
-\def\columngettextbox {\copy\columntextbox}
-\def\columngetfootbox {\copy\columnfootbox}
-\def\columngettopbox {\copy\columntopbox}
-\def\columngetbotbox {\copy\columnbotbox}
-
-\def\columnerasetextboxes{\dorecurse\allocatednofcolumns{\columnsettextbox\recurselevel\emptybox}}
-\def\columnerasefootboxes{\dorecurse\allocatednofcolumns{\columnsetfootbox\recurselevel\emptybox}}
-\def\columnerasetopboxes {\dorecurse\allocatednofcolumns{\columnsettopbox \recurselevel\emptybox}}
-\def\columnerasebotboxes {\dorecurse\allocatednofcolumns{\columnsetbotbox \recurselevel\emptybox}}
-
-%D Without going in details we present two macro's which handle
-%D the columns. The action which is transfered by the the first
-%D and only parameter can do something with \type
-%D {\currentcolumnbox}. In case of the mid columns, \type
-%D {\firstcolumnbox} and \type {\lastcolumnbox} are handled
-%D outside these macro's.
-
-\def\dohandlecolumn#1%
- {\mofcolumns\recurselevel
- \let\currentcolumn\recurselevel
- #1\relax}
-
-\def\dohandleallcolumns#1%
- {\dorecurse\nofcolumns{\dohandlecolumn{#1}}}
-
-\def\dohandlerevcolumns#1%
- {\dostepwiserecurse\nofcolumns\plusone\minusone{\dohandlecolumn{#1}}}
-
-\def\dohandlemidcolumns#1%
- {\dohandleallcolumns
- {\ifnum\recurselevel>\plusone
- \ifnum\recurselevel<\nofcolumns
- \dohandlecolumn{#1}%
- \fi
- \fi}}
+\def\columntextbox #1{\csname\??columntext \number#1\endcsname}
+\def\columnfootbox #1{\csname\??columnfooter\number#1\endcsname}
+\def\columntopbox #1{\csname\??columntop \number#1\endcsname}
+\def\columnbotbox #1{\csname\??columnbottom\number#1\endcsname}
+
+\unexpanded\def\columnsettextbox {\global\setbox\columntextbox}
+\unexpanded\def\columnsetfootbox {\global\setbox\columnfootbox}
+\unexpanded\def\columnsettopbox {\global\setbox\columntopbox}
+\unexpanded\def\columnsetbotbox {\global\setbox\columnbotbox}
+
+\unexpanded\def\columngettextbox {\copy\columntextbox}
+\unexpanded\def\columngetfootbox {\copy\columnfootbox}
+\unexpanded\def\columngettopbox {\copy\columntopbox}
+\unexpanded\def\columngetbotbox {\copy\columnbotbox}
+
+\unexpanded\def\columnerasetextboxes{\dorecurse\allocatednofcolumns{\columnsettextbox\recurselevel\emptybox}}
+\unexpanded\def\columnerasefootboxes{\dorecurse\allocatednofcolumns{\columnsetfootbox\recurselevel\emptybox}}
+\unexpanded\def\columnerasetopboxes {\dorecurse\allocatednofcolumns{\columnsettopbox \recurselevel\emptybox}}
+\unexpanded\def\columnerasebotboxes {\dorecurse\allocatednofcolumns{\columnsetbotbox \recurselevel\emptybox}}
+
+%D Without going in details we present two macro's which handle the columns. The
+%D action which is transfered by the the first and only parameter can do something
+%D with \type {\currentcolumnbox}. In case of the mid columns, \type
+%D {\firstcolumnbox} and \type {\lastcolumnbox} are handled outside these macro's.
+
+% \unexpanded\def\dohandlecolumn#1%
+% {\mofcolumns\recurselevel
+% \let\currentcolumn\recurselevel
+% #1\relax}
+%
+% \unexpanded\def\dohandleallcolumns#1%
+% {\dorecurse\nofcolumns{\dohandlecolumn{#1}}}
+%
+% \unexpanded\def\dohandlerevcolumns#1%
+% {\dostepwiserecurse\nofcolumns\plusone\minusone{\dohandlecolumn{#1}}}
+%
+% \unexpanded\def\dohandlemidcolumns#1%
+% {\dohandleallcolumns
+% {\ifnum\recurselevel>\plusone
+% \ifnum\recurselevel<\nofcolumns
+% \dohandlecolumn{#1}%
+% \fi
+% \fi}}
+%
+% As we don't nest we can use a fast variant:
+
+\unexpanded\def\dohandleallcolumnscs#1{\let\page_columns_action #1\page_columns_all_indeed}
+\unexpanded\def\dohandleallcolumns #1{\def\page_columns_action{#1}\page_columns_all_indeed}
+
+\unexpanded\def\dohandlerevcolumnscs#1{\let\page_columns_action #1\page_columns_rev_indeed}
+\unexpanded\def\dohandlerevcolumns #1{\def\page_columns_action{#1}\page_columns_rev_indeed}
+
+\unexpanded\def\dohandlemidcolumnscs#1{\let\page_columns_action #1\page_columns_mid_indeed}
+\unexpanded\def\dohandlemidcolumns #1{\def\page_columns_action{#1}\page_columns_mid_indeed}
+
+\unexpanded\def\page_columns_all_indeed
+ {\mofcolumns\plusone
+ \edef\currentcolumn{\the\mofcolumns}%
+ \page_columns_action\relax
+ \ifnum\mofcolumns<\nofcolumns
+ \expandafter\page_columns_all_next
+ \fi}
+
+\unexpanded\def\page_columns_all_next
+ {\advance\mofcolumns\plusone
+ \edef\currentcolumn{\the\mofcolumns}%
+ \page_columns_action\relax
+ \ifnum\mofcolumns<\nofcolumns
+ \expandafter\page_columns_all_next
+ \fi}
+
+\unexpanded\def\page_columns_rev_indeed
+ {\mofcolumns\nofcolumns
+ \edef\currentcolumn{\the\mofcolumns}%
+ \page_columns_action\relax
+ \ifnum\mofcolumns>\plusone
+ \expandafter\page_columns_rev_prev
+ \fi}
+
+\unexpanded\def\page_columns_rev_prev
+ {\advance\mofcolumns\minusone
+ \edef\currentcolumn{\the\mofcolumns}%
+ \page_columns_action\relax
+ \ifnum\mofcolumns>\plusone
+ \expandafter\page_columns_rev_prev
+ \fi}
+
+\unexpanded\def\page_columns_mid_indeed
+ {\mofcolumns\plustwo
+ \ifnum\mofcolumns<\nofcolumns
+ \expandafter\page_columns_mid_next
+ \fi}
+
+\unexpanded\def\page_columns_mid_next
+ {\edef\currentcolumn{\the\mofcolumns}%
+ \page_columns_action\relax
+ \advance\mofcolumns\plusone
+ \ifnum\mofcolumns<\nofcolumns
+ \expandafter\page_columns_mid_next
+ \fi}
\protect \endinput
diff --git a/Master/texmf-dist/tex/context/base/page-com.mkiv b/Master/texmf-dist/tex/context/base/page-com.mkiv
index 80012dd1454..0ff8e3b54e4 100644
--- a/Master/texmf-dist/tex/context/base/page-com.mkiv
+++ b/Master/texmf-dist/tex/context/base/page-com.mkiv
@@ -30,79 +30,141 @@
%D \stoppagecomment
%D \stoptyping
-\unexpanded\def\setuppagecomment
- {\dosingleempty\dosetuppagecomment}
-
-\def\dosetuppagecomment[#1]%
- {\getparameters[\??pc][#1]%
- \doifelse\@@pcstate\v!start
- {\doifinsetelse\@@pclocation{\v!bottom,\v!top}
- {\setuppapersize[\c!left=\hskip\@@pcoffset]%
- \edef\@@pcpaperheight{\the\dimexpr\paperheight+\@@pcoffset+\@@pcoffset+\@@pcdistance+\@@pcheight}%
- \edef\@@pcpaperwidth {\the\dimexpr\paperwidth +\@@pcoffset+\@@pcoffset}%
- \defineoverlay[\v!pagecomment][\placepagecommentTB]}
- {\setuppapersize[\c!top=\vskip\@@pcoffset]%
- \edef\@@pcpaperheight{\the\dimexpr\paperheight+\@@pcoffset+\@@pcoffset}%
- \edef\@@pcpaperwidth {\the\dimexpr\paperwidth +\@@pcoffset+\@@pcoffset+\@@pcdistance+\@@pcwidth}%
- \defineoverlay[\v!pagecomment][\placepagecommentLR]}%
- \processaction
- [\@@pclocation]
- [ \v!bottom=>{\setuplayout[\c!location=]\setuppapersize[\c!bottom=\vss,\c!top =\vskip\@@pcoffset]},
- \v!top=>{\setuplayout[\c!location=]\setuppapersize[\c!top =\vss,\c!bottom=\vskip\@@pcoffset]},
- \v!left=>{\setuplayout[\c!location=]\setuppapersize[\c!left =\hss,\c!right =\hskip\@@pcoffset]},
- \v!right=>{\setuplayout[\c!location=]\setuppapersize[\c!right =\hss,\c!left =\hskip\@@pcoffset]}]%
- \definepapersize
- [\v!pagecomment]
- [\c!height=\@@pcpaperheight,
- \c!width=\@@pcpaperwidth]%
- \let\@@pcprintpapersize\printpapersize
- \setuppapersize[\papersize][\v!pagecomment]%
- \setupbackgrounds[\v!paper][\c!background=\v!pagecomment]}% todo append if already set
- {\doif\@@pcstate\v!stop % else initialization invokes backgrounds
- {% this should be tested first
- % \normalexpanded{\noexpand\setuppapersize[\papersize][\@@pcprintpapersize]}%
- \setupbackgrounds[\v!paper][\c!background=]}}}
-
-\def\@@pcprintpapersize{\printpapersize}
-
-\unexpanded\def\placepagecommentTB
+\installcorenamespace{pagecomment}
+\installcorenamespace{pagecommentlocations}
+\installcorenamespace{pagecommentstates}
+
+\newdimen\d_page_comments_offset
+\newdimen\d_page_comments_page_width
+\newdimen\d_page_comments_page_height
+\newdimen\d_page_comments_width
+\newdimen\d_page_comments_height
+\newdimen\d_page_comments_distance
+
+\newconditional\c_page_comment_enabled
+
+\let\p_page_commands_location\v!none
+\let\p_page_commands_state \v!stop
+
+\installsetuponlycommandhandler \??pagecomment {pagecomment}
+
+\appendtoks
+ \edef\p_page_commands_location{\directpagecommentparameter\c!location}%
+ \edef\p_page_commands_state {\directpagecommentparameter\c!state}%
+ \expandnamespacemacro\??pagecommentstates\p_page_commands_state\v!none
+\to \everysetuppagecomment
+
+\ifdefined\scrn_canvas_synchronize_simple
+
+ \appendtoks
+ \ifx\p_page_commands_state\v!start
+ \scrn_canvas_synchronize_simple
+ \else
+ \scrn_canvas_synchronize_complex
+ \fi
+ \to \everyshipout
+
+\fi
+
+\setvalue{\??pagecommentstates\v!start}%
+ {\d_page_comments_offset \directpagecommentparameter\c!offset \relax
+ \d_page_comments_distance\directpagecommentparameter\c!distance\relax
+ \d_page_comments_width \directpagecommentparameter\c!width \relax
+ \d_page_comments_height \directpagecommentparameter\c!height \relax
+ \expandnamespacemacro\??pagecommentlocations\p_page_commands_location\v!none
+ \definepapersize[\v!pagecomment][\c!height=\d_page_comments_page_height,\c!width=\d_page_comments_page_width]%
+ \setuppapersize[\papersize][\v!pagecomment]%
+ \setupbackgrounds[\v!paper][\c!background=\v!pagecomment]} % maybe append if already set
+
+\setvalue{\??pagecommentstates\v!stop}%
+ {\setupbackgrounds[\v!paper][\c!background=]}
+
+\setvalue{\??pagecommentstates\v!none}%
+ {}
+
+\setvalue{\??pagecommentlocations\v!bottom}%
+ {\setuplayout[\c!location=]%
+ \setuppapersize[\c!top=\vskip\d_page_comments_offset,\c!bottom=\vss,\c!left=\hskip\d_page_comments_offset,\c!right=]%
+ \d_page_comments_page_height\dimexpr\paperheight+2\d_page_comments_offset+\d_page_comments_distance+\d_page_comments_height\relax
+ \d_page_comments_page_width \dimexpr\paperwidth +2\d_page_comments_offset\relax
+ \defineoverlay[\v!pagecomment][\page_comments_top_bottom]}
+
+\setvalue{\??pagecommentlocations\v!top}%
+ {\setuplayout[\c!location=]%
+ \setuppapersize[\c!top=\vss,\c!bottom=\vskip\d_page_comments_offset,\c!left=\hskip\d_page_comments_offset,\c!right=]%
+ \d_page_comments_page_height\dimexpr\paperheight+2\d_page_comments_offset+\d_page_comments_distance+\d_page_comments_height\relax
+ \d_page_comments_page_width \dimexpr\paperwidth +2\d_page_comments_offset\relax
+ \defineoverlay[\v!pagecomment][\page_comments_top_bottom]}
+
+\setvalue{\??pagecommentlocations\v!left}%
+ {\setuplayout[\c!location=]%
+ \setuppapersize[\c!top=\vskip\d_page_comments_offset,\c!bottom=,\c!left=\hss,\c!right=\hskip\d_page_comments_offset]%
+ \d_page_comments_page_height\dimexpr\paperheight+2\d_page_comments_offset\relax
+ \d_page_comments_page_width \dimexpr\paperwidth +2\d_page_comments_offset+\d_page_comments_distance+\d_page_comments_width\relax
+ \defineoverlay[\v!pagecomment][\page_comments_left_right]}
+
+\setvalue{\??pagecommentlocations\v!right}%
+ {\setuplayout[\c!location=]%
+ \setuppapersize[\c!top=\vskip\d_page_comments_offset,\c!bottom=,\c!left=\hskip\d_page_comments_offset,\c!right =\hss]%
+ \d_page_comments_page_height\dimexpr\paperheight+2\d_page_comments_offset\relax
+ \d_page_comments_page_width \dimexpr\paperwidth +2\d_page_comments_offset+\d_page_comments_distance+\d_page_comments_width\relax
+ \defineoverlay[\v!pagecomment][\page_comments_left_right]}
+
+\setvalue{\??pagecommentlocations\v!none}%
+ {}% \setuppapersize[\c!bottom=,\c!top=,\c!left=,\c!right=]}
+
+\unexpanded\def\page_comments_top_bottom
{\vbox to \printpaperheight
{%\forgetall
- \hsize\printpaperwidth
- \vskip\@@pcoffset
- \doifelse\@@pclocation\v!bottom{\vskip\dimexpr\paperheight+\@@pcdistance\relax}\vss
- \hskip\@@pcoffset
- \vbox to \@@pcheight
+ \hsize\printpaperwidth\relax
+ \ifx\p_page_commands_location\v!bottom
+ \vskip\dimexpr\paperheight+\d_page_comments_distance+\d_page_comments_offset\relax
+ \else
+ \vskip\d_page_comments_offset
+ \vss
+ \fi
+ \hskip\d_page_comments_offset
+ \vbox to \d_page_comments_height
{%\forgetall
\hsize\paperwidth
- \ifpagecomment
+ \ifconditional\c_page_comment_enabled
\getbuffer[\v!pagecomment]%
- \global\pagecommentfalse
+ \global\setfalse\c_page_comment_enabled
\fi}%
\hfill
- \doifelse\@@pclocation\v!bottom\vss{\vskip\dimexpr\paperheight+\@@pcdistance\relax}%
- \vskip\@@pcoffset}}
+ \ifx\p_page_commands_location\v!bottom
+ \vss
+ \vskip\d_page_comments_offset
+ \else
+ \vskip\dimexpr\paperheight+\d_page_comments_distance+\d_page_comments_offset\relax
+ \fi}}
-\unexpanded\def\placepagecommentLR
+\unexpanded\def\page_comments_left_right
{\hbox to \printpaperwidth
- {\hskip\@@pcoffset
- \doifelse\@@pclocation\v!right{\hskip\paperwidth\hskip\@@pcdistance}\hss
+ {\ifx\p_page_commands_location\v!right
+ \hskip\dimexpr\paperwidth+\d_page_comments_distance+\d_page_comments_offset\relax
+ \else
+ \hskip\d_page_comments_offset
+ \hss
+ \fi
\vbox to \printpaperheight
{%\forgetall
- \vskip\@@pcoffset
- \hsize\@@pcwidth
- \ifpagecomment
+ \vskip\d_page_comments_offset
+ \hsize\d_page_comments_width
+ \ifconditional\c_page_comment_enabled
\getbuffer[\v!pagecomment]%
- \global\pagecommentfalse
+ \global\setfalse\c_page_comment_enabled
\fi
\vss}%
- \doifelse\@@pclocation\v!right\hss{\hskip\paperwidth\hskip\@@pcdistance}%
- \hskip\@@pcoffset}}
-
-\newif\ifpagecomment
+ \ifx\p_page_commands_location\v!right
+ \hss
+ \hskip\d_page_comments_offset
+ \else
+ \hskip\dimexpr\paperwidth+\d_page_comments_distance+\d_page_comments_offset\relax
+ \fi}}
\setvalue{\e!start\v!pagecomment}%
- {\global\pagecommenttrue
+ {\global\settrue\c_page_comment_enabled
\grabbufferdatadirect\v!pagecomment{\e!start\v!pagecomment}{\e!stop\v!pagecomment}}
\setuppagecomment
diff --git a/Master/texmf-dist/tex/context/base/page-fac.mkiv b/Master/texmf-dist/tex/context/base/page-fac.mkiv
index fef1cd5ac35..88789abe9fb 100644
--- a/Master/texmf-dist/tex/context/base/page-fac.mkiv
+++ b/Master/texmf-dist/tex/context/base/page-fac.mkiv
@@ -25,19 +25,21 @@
%D \stoptext
%D \stoptyping
-\newbox\b_page_facings_content
+\newbox \b_page_facings_content
+\newconditional\c_page_facings_busy
-\unexpanded\def\setupoppositeplacing
- {\dodoubleargument\getparameters[\??np]}
+\installcorenamespace{oppositeplacing}
+
+\installsetuponlycommandhandler \??oppositeplacing {oppositeplacing}
\unexpanded\def\startopposite
{\dowithnextboxcontent
{\hsize\makeupwidth}%
{\global\setbox\b_page_facings_content\vbox
{\ifvoid\b_page_facings_content
- \@@npbefore
+ \directoppositeplacingparameter\c!before
\else
- \@@npinbetween
+ \directoppositeplacingparameter\c!inbetween
\unvbox\b_page_facings_content
\fi
\box\nextbox}}%
@@ -47,26 +49,34 @@
{\egroup}
\def\page_facings_finish
- {\ifvoid\b_page_facings_content\else
+ {\ifvoid\b_page_facings_content \else
\global\setbox\b_page_facings_content\vbox to \makeupheight
{\unvbox\b_page_facings_content
- \@@npafter
+ \directoppositeplacingparameter\c!after
\vss}%
\fi}
\def\page_facings_flush
- {\doif\@@npstate\v!start
- {\ifvoid\b_page_facings_content\else
- \ifnum\realpageno>\plusone
- \begingroup
- \pageornamentstate\plusone % tricky
- \page_facings_finish
- \page_boxes_shipout{\page_boxes_constructed_page\box\b_page_facings_content}%
- \endgroup
- \else
- \global\setbox\b_page_facings_content\emptybox
- \fi
- \fi}}
+ {\ifconditional\c_page_facings_busy
+ \ifvoid\b_page_facings_content \else
+ \page_facings_flush_indeed
+ \fi
+ \fi}
+
+\def\page_facings_flush_indeed
+ {\ifnum\realpageno>\plusone
+ \begingroup
+ \pageornamentstate\plusone % tricky
+ \page_facings_finish
+ \page_boxes_shipout{\page_boxes_constructed_page\box\b_page_facings_content}%
+ \endgroup
+ \else
+ \global\setbox\b_page_facings_content\emptybox
+ \fi}
+
+\appendtoks
+ \doifelse{\directoppositeplacingparameter\c!state}\v!start\settrue\setfalse\c_page_facings_busy
+\to \everysetupoppositeplacing
\setupoppositeplacing
[\c!state=\v!start,
diff --git a/Master/texmf-dist/tex/context/base/page-flt.lua b/Master/texmf-dist/tex/context/base/page-flt.lua
index ecd7004ae9f..68383f175f9 100644
--- a/Master/texmf-dist/tex/context/base/page-flt.lua
+++ b/Master/texmf-dist/tex/context/base/page-flt.lua
@@ -112,12 +112,12 @@ function floats.save(which,data)
insert(stack,t)
setcount("global","savednoffloats",#stacks[default])
if trace_floats then
- report_floats("saving %s float %s in slot %s (%i,%i,%i)",which,noffloats,#stack,w,h,d)
+ report_floats("%s, category %a, number %a, slot %a, width %p, height %p, depth %p","saving",which,noffloats,#stack,w,h,d)
else
interfaces.showmessage("floatblocks",2,noffloats)
end
else
- report_floats("unable to save %s float %s (empty)",which,noffloats)
+ report_floats("ignoring empty, category %a, number %a",which,noffloats)
end
end
@@ -132,7 +132,7 @@ function floats.resave(which)
insert(stack,1,last)
setcount("global","savednoffloats",#stacks[default])
if trace_floats then
- report_floats("resaving %s float %s in slot %s (%i,%i,%i)",which,noffloats,#stack,w,h,d)
+ report_floats("%s, category %a, number %a, slot %a width %p, height %p, depth %p","resaving",which,noffloats,#stack,w,h,d)
else
interfaces.showmessage("floatblocks",2,noffloats)
end
@@ -148,7 +148,7 @@ function floats.flush(which,n,bylabel)
if t then
local w, h, d = setdimensions(b)
if trace_floats then
- report_floats("flushing %s float %s from slot %s (%i,%i,%i)",which,t.n,n,w,h,d)
+ report_floats("%s, category %a, number %a, slot %a width %p, height %p, depth %p","flushing",which,t.n,n,w,h,d)
else
interfaces.showmessage("floatblocks",3,t.n)
end
@@ -168,7 +168,7 @@ function floats.consult(which,n)
if t then
local w, h, d = setdimensions(b)
if trace_floats then
- report_floats("consulting %s float %s in slot %s (%i,%i,%i)",which,t.n,n,w,h,d)
+ report_floats("%s, category %a, number %a, slot %a width %p, height %p, depth %p","consulting",which,t.n,n,w,h,d)
end
return t, b, n
else
@@ -243,11 +243,10 @@ local label = P(":") * C((1-S(",*: "))^0)
local pattern = method * (
label * position
+ C("") * position
- + label
+ + label * C("") * C("")
+ C("") * C("") * C("")
) + C("") * C("") * C("") * C("")
-
-- inspect { lpegmatch(pattern,"somewhere:blabla,crap") }
-- inspect { lpegmatch(pattern,"somewhere:1*2") }
-- inspect { lpegmatch(pattern,"somewhere:blabla:1*2") }
@@ -262,6 +261,9 @@ end
-- interface
+local context = context
+local setvalue = context.setvalue
+
commands.flushfloat = floats.flush
commands.savefloat = floats.save
commands.resavefloat = floats.resave
@@ -278,8 +280,8 @@ function commands.doifelsesavedfloat(...) commands.doifelse(floats.nofstacked(..
function commands.analysefloatmethod(str)
local method, label, row, column = floats.analysemethod(str)
- context.setvalue("floatmethod",method)
- context.setvalue("floatlabel", label )
- context.setvalue("floatrow", row )
- context.setvalue("floatcolumn",column)
+ setvalue("floatmethod",method)
+ setvalue("floatlabel", label )
+ setvalue("floatrow", row )
+ setvalue("floatcolumn",column)
end
diff --git a/Master/texmf-dist/tex/context/base/page-flt.mkiv b/Master/texmf-dist/tex/context/base/page-flt.mkiv
index 18c0ad263c6..d641e1c7d94 100644
--- a/Master/texmf-dist/tex/context/base/page-flt.mkiv
+++ b/Master/texmf-dist/tex/context/base/page-flt.mkiv
@@ -20,31 +20,42 @@
\unprotect
-%D To be checked and removed
+\ifdefined\s!topfloat \else \def\s!topfloat {topfloat} \fi
+\ifdefined\s!bottomfloat \else \def\s!bottomfloat{bottomfloat} \fi
-%def\dosavefloatinfo {\showmessage\m!floatblocks2{\the\totalnoffloats}}
-\def\doinsertfloatinfo {\showmessage\m!floatblocks4{\the\totalnoffloats}}
-\def\dofloatflushedinfo{\showmessage\m!floatblocks3{\the\numexpr\totalnoffloats-\savednoffloats\relax}}
+\defineinsertion[\s!topfloat]
+\defineinsertion[\s!bottomfloat]
+
+\newdimen \d_page_floats_inserted_bottom
+\newdimen \d_page_floats_inserted_top
+\newcount \c_page_floats_n_of_top \c_page_floats_n_of_top \plustwo
+\newcount \c_page_floats_n_of_bottom \c_page_floats_n_of_bottom\zerocount
+
+\newconstant\c_page_floats_insertions_topskip_mode % 1 = no topskip
+
+%def\page_floats_report_saved {\showmessage\m!floatblocks2{\the\totalnoffloats}}
+\def\page_floats_report_total {\showmessage\m!floatblocks4{\the\totalnoffloats}}
+\def\page_floats_report_flushed{\showmessage\m!floatblocks3{\the\numexpr\totalnoffloats-\savednoffloats\relax}}
%D Extra float registers.
-\newif\ifsomefloatwaiting \somefloatwaitingfalse
-\newif\ifroomforfloat \roomforfloattrue
-\newif\ifnofloatpermitted \nofloatpermittedfalse
-\newif\ifcenterfloatbox \centerfloatboxtrue
-\newif\iflocalcenterfloatbox \localcenterfloatboxfalse
-\newif\ifglobalcenterfloatbox \globalcenterfloatboxfalse
-\newif\ifflushingfloats \flushingfloatsfalse
-\newif\ifpackflushedfloats \packflushedfloatstrue % for the moment
-\newif\ifpackcolumnfloats \packcolumnfloatstrue
-
-\newcount\totalnoffloats
-\newcount\savednoffloats
-\newcount\noffloatinserts
+\newconditional\c_page_floats_room
+\newconditional\c_page_floats_some_waiting
+\newconditional\c_page_floats_not_permitted
+\newconditional\c_page_floats_flushing
+\newconditional\c_page_floats_center_box \settrue\c_page_floats_center_box
+\newconditional\c_page_floats_center_box_local
+\newconditional\c_page_floats_center_box_global
+\newconditional\c_page_floats_pack_flushed \settrue\c_page_floats_pack_flushed
+
+%D For the moment we keep this but they will become private too.
+
+\newcount\totalnoffloats % these will be redone ... handled at the lua end anyway
+\newcount\savednoffloats % these will be redone ... handled at the lua end anyway
\newcount\nofcollectedfloats % communication channel
-\newbox \floatlist
-\newbox \savedfloatlist
+\newcount\noffloatinserts % these will be redone ... handled at the lua end anyway
+
\newbox \floattext
\newdimen\floatwidth
@@ -65,55 +76,55 @@
\appendtoks
\ifcase\savednoffloats
- \global\somefloatwaitingfalse
+ \global\setfalse\c_page_floats_some_waiting
\else
- \global\somefloatwaitingtrue
+ \global\settrue\c_page_floats_some_waiting
\fi
\to \everyfloatscheck
-\def\dofloatsflush#1#2%
+\unexpanded\def\page_floats_flush#1#2%
{\ctxcommand{flushfloat("#1",\number#2)}%
\the\everyfloatscheck}
-\def\dofloatsflushbylabel#1#2%
+\unexpanded\def\page_floats_flush_by_label#1#2%
{\ctxcommand{flushfloat("#1","#2",true)}%
\the\everyfloatscheck}
-\def\dofloatssave#1%
+\unexpanded\def\page_floats_save#1%
{\ctxcommand{savefloat("#1")}%
\the\everyfloatscheck}
-\def\dofloatsresave#1%
+\unexpanded\def\page_floats_resave#1%
{\ctxcommand{resavefloat("#1")}%
\the\everyfloatscheck}
-\def\dopushsavedfloats
+\unexpanded\def\page_floats_push_saved
{\ctxcommand{pushfloat()}%
\the\everyfloatscheck}
-\def\dopopsavedfloats
+\unexpanded\def\page_floats_pop_saved
{\ctxcommand{popfloat()}%
\the\everyfloatscheck}
-\def\dofloatsgetinfo#1%
+\unexpanded\def\page_floats_get_info#1%
{\ctxcommand{consultfloat("#1")}}
-\def\doifelsesavedfloat#1%
+\unexpanded\def\page_floats_if_else#1%
{\ctxcommand{doifelsesavedfloat("#1")}}
-\def\dofloatscollect#1#2#3%
+\unexpanded\def\page_floats_collect#1#2#3%
{\ctxcommand{collectfloat("#1",\number\dimexpr#2,\number\dimexpr#3)}}
-\def\nofstackedfloatincategory#1%
+\unexpanded\def\nofstackedfloatincategory#1%
{\ctxcommand{nofstackedfloats("#1")}}
-\let\dopushcolumnfloats\dopushsavedfloats
-\let\dopopcolumnfloats \dopopsavedfloats
+\let\page_floats_column_push_saved\page_floats_push_saved % overloaded in page-mul
+\let\page_floats_column_pop_saved \page_floats_pop_saved % overloaded in page-mul
-\def\dofloatssavepagefloat#1#2%
+\unexpanded\def\page_floats_save_page_float#1#2%
{\ctxcommand{savefloat("#1", { specification = "#2" })}}
-\def\dofloatssavesomewherefloat#1#2% #1=method
+\unexpanded\def\page_floats_save_somewhere_float#1#2% #1=method
{\ctxcommand{savefloat("#1", { specification = "#2", label = "\floatlabel" })}}
%D This is an experimental new feature (for Alan Braslau), a prelude to more:
@@ -133,19 +144,19 @@
%D \placenamedfloat[figure][beta]
%D \stoptyping
-\def\placenamedfloat
- {\dodoubleargument\doplacenamedfloat}
+\unexpanded\def\placenamedfloat
+ {\dodoubleargument\page_floats_place_named}
-\def\doplacenamedfloat[#1][#2]%
+\def\page_floats_place_named[#1][#2]%
{\doloop
- {\dofloatsflushbylabel\s!somewhere{#2}%
+ {\page_floats_flush_by_label\s!somewhere{#2}%
\ifvoid\floatbox
\exitloop
\else
\def\currentfloat{#1}%
\blank[\rootfloatparameter\c!spacebefore]%
\box\floatbox
- \blank[\rootfloatparameter\c!spaceafter]
+ \blank[\rootfloatparameter\c!spaceafter]%
\fi}}
% \setupcaption [figure] [align=flushleft]
@@ -162,10 +173,10 @@
%
% \placefloatwithsetups[somefigure]{}{\externalfigure[dummy][width=5cm,height=2cm]}
-\def\placefloatwithsetups
- {\dotripleempty\doplacefloatwithsetups}
+\unexpanded\def\placefloatwithsetups
+ {\dotripleempty\page_floats_place_with_setups}
-\long\def\doplacefloatwithsetups[#1][#2][#3]#4%
+\def\page_floats_place_with_setups[#1][#2][#3]#4%
{\def\floatsetupcaption {#4}%
\def\floatsetupcontent {\copy\nextbox}%
\def\floatsetupwidth {\wd\nextbox}%
@@ -178,10 +189,10 @@
%D
%D First we reimplement some helpers.
-\def\dogetfloat
- {\ifsomefloatwaiting
- \dofloatsflush\s!text{1}%
- \ifcenterfloatbox
+\unexpanded\def\page_floats_get
+ {\ifconditional\c_page_floats_some_waiting
+ \page_floats_flush\s!text\plusone
+ \ifconditional\c_page_floats_center_box
\ifdim\wd\globalscratchbox<\hsize
\global\setbox\floatbox\hbox to \hsize{\hss\box\floatbox\hss}%
\else
@@ -198,40 +209,53 @@
\global\setbox\floatbox\emptybox
\fi}
-\def\dosavefloat
- {\dofloatssave\s!text
- \nonoindentation} % brrr nonoindentation here
-
-\def\doresavefloat
- {\dofloatsresave\s!text}
-
-\def\doreversesavefloat
- {\dofloatsresave\s!text}
-
-\def\doflushsavedfloats
+\unexpanded\def\page_floats_flush_saved
{\doloop
- {\ifsomefloatwaiting
+ {\ifconditional\c_page_floats_some_waiting
\page_otr_command_check_if_float_fits
- \ifroomforfloat
- \dogetfloat
+ \ifconditional\c_page_floats_room
+ \page_floats_get
\doplacefloatbox
\else
\exitloop
\fi
\else
-% \ifconditional\c_page_margin_blocks_present % not here, here just as many floats as fit
-% \page_otr_command_flush_margin_blocks
-% \else
+ % \ifconditional\c_page_margin_blocks_present % not here, here just as many floats as fit
+ % \page_otr_command_flush_margin_blocks
+ % \else
\exitloop
-% \fi
+ % \fi
\fi}}
+%D This is a future mechamism that will be integrated once we're sure about it:
+%D
+%D \starttyping
+%D \dorecurse{10}
+%D {\input thuan
+%D \placefigure{}{\framed[height=1.5cm]{test}}
+%D \placefloatplaceholder}
+%D \stoptyping
+
+\unexpanded\def\placefloatplaceholder
+ {\ifconditional\c_page_floats_room \else
+ \ifdim\dimexpr\pagegoal-\pagetotal-3\lineheight\relax>\zeropoint
+ \startlinecorrection[blank]
+ \mhbox{\inframed{\labeltexts{placeholder}{\lastcaptiontag}}}%
+ \stoplinecorrection
+ \else
+ \allowbreak
+ \fi
+ \fi}
+
+\setuplabeltext
+ [placeholder={\Word{\lastplacedfloat}~, moved}]
+
%D Page floats use different stacks.
-\newtoks \everybeforeflushedpagefloat
+\newtoks\everybeforeflushedpagefloat
-\def\doflushsomepagefloat#1% future releases can do more clever things
- {\dofloatsflush{#1}{1}%
+\def\page_floats_flush_page_floats_indeed#1% future releases can do more clever things
+ {\page_floats_flush{#1}\plusone
\edef\floatspecification{\ctxcommand{getfloatvariable("specification")}}% Is this okay?
\the\everybeforeflushedpagefloat
\vbox to \textheight
@@ -240,38 +264,15 @@
\doifnotinset\v!low\floatspecification\vfill}%
\page_otr_fill_and_eject_page}
-\def\doflushpagefloats
+\unexpanded\def\page_floats_flush_page_floats % used in postpone
{\edef\m_page_otf_checked_page_float{\ctxcommand{checkedpagefloat()}}% (true) for packed
\ifx\m_page_otf_checked_page_float\empty
% nothing
\else\ifx\m_page_otf_checked_page_float\v!empty
\emptyhbox \page_otr_fill_and_eject_page % why not dummy_page
\else
- \doflushsomepagefloat\m_page_otf_checked_page_float
+ \page_floats_flush_page_floats_indeed\m_page_otf_checked_page_float
\fi\fi}
-
-\def\uncenteredfloatbox % hm, where is this one used (was in save/restore, see old implementation)
- {\ifcenterfloatbox
- \ifhbox\floatbox\relax % remove centering
- \ifdim\wd\floatbox=\hsize
- \ifhbox\floatbox
- \setbox\scratchbox\hbox
- {\unhbox\floatbox
- \unskip\unskip
- \global\setbox\globalscratchbox\lastbox}%
- \box\globalscratchbox
- \else
- \box\floatbox
- \fi
- \else
- \box\floatbox
- \fi
- \else
- \box\floatbox
- \fi
- \else
- \box\floatbox
- \fi}
% temp hack, needed to prevent floatbox being forgotten during
% output, this will change to using another box for flushing
@@ -280,8 +281,6 @@
% \placefigure[top][]{Second}{\framed{bla 2}}
% \dorecurse {40}{text } \placefigure[top][]{Third} {\framed{bla 3}}
-\newbox\savedfloatbox
-
\appendtoks
\global\setbox\savedfloatbox\box\floatbox
\to \everybeforeoutput
@@ -291,3 +290,28 @@
\to \everyafteroutput
\protect \endinput
+
+% hm, where is this one used (was in save/restore, see old implementation)
+%
+% \unexpanded\def\uncenteredfloatbox
+% {\ifconditional\c_page_floats_center_box
+% \ifhbox\floatbox\relax % remove centering
+% \ifdim\wd\floatbox=\hsize
+% \ifhbox\floatbox
+% \setbox\scratchbox\hbox
+% {\unhbox\floatbox
+% \unskip\unskip
+% \global\setbox\globalscratchbox\lastbox}%
+% \box\globalscratchbox
+% \else
+% \box\floatbox
+% \fi
+% \else
+% \box\floatbox
+% \fi
+% \else
+% \box\floatbox
+% \fi
+% \else
+% \box\floatbox
+% \fi}
diff --git a/Master/texmf-dist/tex/context/base/page-flw.mkiv b/Master/texmf-dist/tex/context/base/page-flw.mkiv
index 9afacf5845f..ec1fa636df7 100644
--- a/Master/texmf-dist/tex/context/base/page-flw.mkiv
+++ b/Master/texmf-dist/tex/context/base/page-flw.mkiv
@@ -14,93 +14,127 @@
\writestatus{loading}{ConTeXt Page Macros / Text Flows}
%D This is high experimental and especially flushing may change (proper
-%D spacing is the driving force here).
+%D spacing is the driving force here). It's an old mechanism used for
+%D playing with stepping through document threads. As it's a simple and
+%D effective mechanism we keep it around. It's not to be confused with
+%D upcoming stream support.
+%D
+%D \starttyping
+%D \setuppapersize [S6]
+%D \setuptolerance [verytolerant,stretch]
+%D \setupfooter [strut=no]
+%D \setupwhitespace[big]
+%D
+%D \setuplayout
+%D [rightedge=5cm,width=fit,margin=0pt,edgedistance=1cm,
+%D footer=4cm,footerdistance=1cm,header=0cm]
+%D
+%D \setuptexttexts [edge][][\vbox{\flushtextflow{alpha}}]
+%D \setupfootertexts[edge][][\vbox{\flushtextflow{beta}}]
+%D \setupfootertexts [\vbox{\flushtextflow{gamma}}][]
+%D
+%D \definetextflow [alfa] [width=\rightedgewidth]
+%D \definetextflow [beta] [width=\rightedgewidth]
+%D \definetextflow [gamma] [width=\footerheight]
+%D
+%D \starttext
+%D
+%D \dorecurse{50}
+%D {\getrandomnumber{\funny}{0}{8}
+%D \ifcase\funny \starttextflow[alfa] \input tufte.tex \stoptextflow
+%D \or \starttextflow[beta] \input knuth.tex \stoptextflow
+%D \or \starttextflow[gamma] \input materie.tex \stoptextflow
+%D \or {\bf TUFTE}\quad \input tufte \par
+%D \or {\bf TUFTE}\quad \input tufte \par
+%D \or {\bf KNUTH}\quad \input knuth \par
+%D \or {\bf KNUTH}\quad \input knuth \par
+%D \or {\bf MATERIE}\quad \input materie \par
+%D \else {\bf MATERIE}\quad \input materie \par
+%D \fi}
+%D
+%D \stoptext
+%D \stoptyping
\unprotect
-\unexpanded\def\definetextflow
- {\dodoubleempty\dodefinetextflow}
+\installcorenamespace{textflow}
+\installcorenamespace{textflowbox}
-\def\dodefinetextflow[#1][#2]% flow settings
- {\iffirstargument
- \doiftextflowcollectorelse{#1}
- {\setbox\textflowcollector{#1}\emptybox}
- {\@EA\newbox\csname\??tx:c:#1\endcsname}%
- \getparameters[\??tx:p:#1]
- [\c!width=\hsize,\c!style=,#2]%
- \fi}
+\installcommandhandler \??textflow {textflow} \??textflow
-\def\textflowparameter#1#2{\csname\??tx:p:#1#2\endcsname}
-\def\textflowcollector #1{\csname\??tx:c:#1\endcsname}
+\setuptextflow
+ [%c!style=,
+ %c!color=,
+ \c!width=\availablehsize]
-\def\doiftextflowcollectorelse#1{\doifdefinedelse{\??tx:c:#1}}
+\appendtoks
+ \ifcsname\??textflowbox\currenttextflow\endcsname
+ \setbox\csname\??textflowbox\currenttextflow\endcsname\emptybox
+ \else
+ \expandafter\newbox\csname\??textflowbox\currenttextflow\endcsname
+ \fi
+\to \everydefinetextflow
-\def\doiftextflowelse#1%
- {\doiftextflowcollectorelse{#1}
- {\ifvoid\textflowcollector{#1}%
- \expandafter\secondoftwoarguments
- \else
- \expandafter\firstoftwoarguments
- \fi}
- {\secondoftwoarguments}}
+\let\b_page_textflow_box\zerocount
-\def\doiftextflow#1%
- {\doiftextflowelse{#1}\firstofoneargument\gobbleoneargument}
+\def\textflowcollector#1%
+ {\csname\??textflowbox#1\endcsname}
+
+\unexpanded\def\doiftextflowcollectorelse#1%
+ {\ifcsname\??textflowbox#1\endcsname
+ \expandafter\firstoftwoarguments
+ \else
+ \expandafter\secondoftwoarguments
+ \fi}
+
+\unexpanded\def\doiftextflowelse#1%
+ {\ifcsname\??textflowbox#1\endcsname
+ \ifvoid\csname\??textflowbox#1\endcsname
+ \doubleexpandafter\secondoftwoarguments
+ \else
+ \doubleexpandafter\firstoftwoarguments
+ \fi
+ \else
+ \expandafter\secondoftwoarguments
+ \fi}
+
+% \unexpanded\def\doiftextflow#1%
+% {\doiftextflowelse{#1}\firstofoneargument\gobbleoneargument}
\unexpanded\def\starttextflow[#1]%
- {\doiftextflowcollectorelse{#1}
- {\global\setbox\textflowcollector{#1}\vbox
- \bgroup
- \unvbox\textflowcollector{#1}%
- \hsize\textflowparameter{#1}\c!width
- \dousestyleparameter{\textflowparameter{#1}\c!style}%
- \unexpanded\def\stoptextflow{\endgraf\egroup}}
- {\let\stoptextflow\relax}}
+ {\begingroup
+ \edef\currenttextflow{#1}%
+ \ifcsname\??textflowbox\currenttextflow\endcsname
+ \b_page_textflow_box\csname\??textflowbox\currenttextflow\endcsname
+ \global\setbox\b_page_textflow_box\vbox
+ \bgroup
+ \dontcomplain
+ \ifvoid\b_page_textflow_box\else
+ \unvbox\b_page_textflow_box
+ \fi
+ \hsize\textflowparameter\c!width\relax
+ \usetextflowstyleandcolor\c!style\c!color
+ \unexpanded\def\stoptextflow{\endgraf\egroup\endgroup}%
+ \else
+ \let\stoptextflow\endgroup
+ \fi}
-\def\flushtextflow#1%
- {\doiftextflow{#1}
- {\ifdim\ht\textflowcollector{#1}>\vsize
- \setbox\scratchbox\vsplit\textflowcollector{#1} to \vsize
- \unvbox\scratchbox
+\unexpanded\def\flushtextflow#1%
+ {\begingroup
+ \edef\currenttextflow{#1}%
+ \ifcsname\??textflowbox\currenttextflow\endcsname
+ \b_page_textflow_box\csname\??textflowbox\currenttextflow\endcsname
+ \ifvoid\b_page_textflow_box
+ % sorry
+ \else\ifdim\ht\b_page_textflow_box>\vsize
+ \setbox\scratchbox\vsplit\b_page_textflow_box to \vsize
+ \ifvoid\scratchbox\else
+ \unvbox\scratchbox
+ \fi
\else
- \unvbox\textflowcollector{#1}%
- \fi}}
+ \unvbox\b_page_textflow_box
+ \fi\fi
+ \fi
+ \endgroup}
\protect \endinput
-
-% Example (dutch)
-%
-% \stelpapierformaatin [S6]
-% \steltolerantiein [soepel,rek]
-% \stelkleurenin [status=start]
-% \stelvoetin [strut=nee]
-% \stelwitruimtein [groot]
-%
-% \stellayoutin
-% [rechterrand=5cm,breedte=passend,marge=0pt,randafstand=1cm,
-% voet=4cm,voetafstand=1cm,hoofd=0cm]
-%
-% \stelteksttekstenin[rand][][\vbox{\flushtextflow{alpha}}]
-% \stelvoettekstenin [rand][][\vbox{\flushtextflow{beta}}]
-% \stelvoettekstenin [\vbox{\flushtextflow{gamma}}][]
-%
-% \definetextflow [alfa] [breedte=\rechterrandbreedte]
-% \definetextflow [beta] [breedte=\rechterrandbreedte]
-% \definetextflow [gamma] [breedte=\voethoogte]
-%
-% \starttekst
-%
-% \dorecurse{50}
-% {\getrandomnumber{\funny}{0}{8}
-% \ifcase\funny \starttextflow[alfa] \input tufte.tex \stoptextflow
-% \or \starttextflow[beta] \input knuth.tex \stoptextflow
-% \or \starttextflow[gamma] \input materie.tex \stoptextflow
-% \or {\bf TUFTE}\quad \input tufte \par
-% \or {\bf TUFTE}\quad \input tufte \par
-% \or {\bf KNUTH}\quad \input knuth \par
-% \or {\bf KNUTH}\quad \input knuth \par
-% \or {\bf MATERIE}\quad \input materie \par
-% \else {\bf MATERIE}\quad \input materie \par
-% \fi}
-%
-% \stoptekst
diff --git a/Master/texmf-dist/tex/context/base/page-grd.mkiv b/Master/texmf-dist/tex/context/base/page-grd.mkiv
index e882978ca38..281d0bfbe3c 100644
--- a/Master/texmf-dist/tex/context/base/page-grd.mkiv
+++ b/Master/texmf-dist/tex/context/base/page-grd.mkiv
@@ -70,9 +70,9 @@
{\hskip\layoutcolumnwidth
\ifnum\recurselevel<\layoutcolumns
\vrule
- \!!height\ht\scratchbox
- \!!depth\dp\scratchbox
- \!!width\layoutcolumndistance
+ \s!height\ht\scratchbox
+ \s!depth \dp\scratchbox
+ \s!width \layoutcolumndistance
\fi}}%
\hskip-\makeupwidth
\fi
diff --git a/Master/texmf-dist/tex/context/base/page-imp.mkiv b/Master/texmf-dist/tex/context/base/page-imp.mkiv
index 9d0f722fe28..c22e9e646c9 100644
--- a/Master/texmf-dist/tex/context/base/page-imp.mkiv
+++ b/Master/texmf-dist/tex/context/base/page-imp.mkiv
@@ -71,7 +71,7 @@
\let\installpagehandler\installshipoutmethod % will go
\unexpanded\def\invokepagehandler#1%
- {\expandcheckedcsname{\??shipoutmethod}{#1}\v!normal}
+ {\expandnamespacevalue\??shipoutmethod{#1}\v!normal}
\def\page_shipouts_handle
{\csname\??shipoutmethod\ifcsname\??shipoutmethod\v_page_target_method\endcsname
@@ -90,18 +90,33 @@
\installshipoutmethod \v!none
{\page_shipouts_ignore}
+% extension mechanism
+
+\newcount\c_page_boxes_flush_n % set at the lua end
+
+\let\page_boxes_flush_before\relax
+\let\page_boxes_flush_after \relax
+
+% used here:
+
\def\page_boxes_shipout#1% or: \page_shipouts_apply
- {\dontcomplain % redundant
- \the\everybeforeshipout
- \ifcase\shipoutfinalizemethod
- \page_shipouts_handle{#1}%
- \else
- \setbox\shipoutscratchbox\hbox{#1}% just in case there are objects there, hook for testing (will go away)
- \finalizeshipoutbox\shipoutscratchbox
- \page_shipouts_handle{\box\shipoutscratchbox}%
- \fi
- \setnextrealpageno % so this comes before \everyaftershipout so in fact:
- \the\everyaftershipout} % at this point we're already on the next realpage
+ {\dontcomplain % redundant
+ \ifcase\c_page_boxes_flush_n\else
+ \page_boxes_flush_before
+ \fi
+ \the\everybeforeshipout
+ \ifcase\shipoutfinalizemethod
+ \page_shipouts_handle{#1}%
+ \else
+ \setbox\shipoutscratchbox\hbox{#1}% just in case there are objects there, hook for testing (will go away)
+ \finalizeshipoutbox\shipoutscratchbox
+ \page_shipouts_handle{\box\shipoutscratchbox}%
+ \fi
+ \setnextrealpageno % so this comes before \everyaftershipout so in fact:
+ \the\everyaftershipout % at this point we're already on the next realpage
+ \ifcase\c_page_boxes_flush_n\else
+ \page_boxes_flush_after
+ \fi}
\def\page_shipouts_ignore#1%
{\begingroup
@@ -148,8 +163,7 @@
\donetrue
\fi
\else % testen, aangepast / expanded nodig ?
- \normalexpanded{\doifinsetelse{\the\shippedoutpages}{\pagestoshipout}}%
- \donetrue\donefalse
+ \normalexpanded{\doifinsetelse{\the\shippedoutpages}{\pagestoshipout}}\donetrue\donefalse
\fi
\ifdone
\setbox\shipoutscratchbox\hbox{#1}%
@@ -1092,7 +1106,73 @@
\or \handlearrangedpageXandY{#1}000\arrangedpageA % 12
\poparrangedpages
\fi}
-
+
+% % From Wolfgang for Mari (mail on list) ... yes or no in core .. time for
+% % delayed loading ...
+% %
+% % http://stamphenge.wordpress.com/minibooks/meander-accordion-folded-book/
+%
+% \installpagearrangement MEANDER16
+% {\dosetuparrangement{4}{4}{16}{5}{5}%
+% \pusharrangedpageMEANDERSIXTEEN\poparrangedpagesXY\relax}
+%
+% \def\pusharrangedpageMEANDERSIXTEEN#1%
+% {\advancearrangedpageN
+% \reportarrangedpage\arrangedpageN
+% \ifcase\arrangedpageN
+% \or \handlearrangedpageXandY{#1}000\arrangedpageA
+% \or \handlearrangedpageXandY{#1}010\arrangedpageA
+% \or \handlearrangedpageXandY{#1}020\arrangedpageA
+% \or \handlearrangedpageXandY{#1}030\arrangedpageA
+% \or \handlearrangedpageXandY{#1}131\arrangedpageA
+% \or \handlearrangedpageXandY{#1}121\arrangedpageA
+% \or \handlearrangedpageXandY{#1}111\arrangedpageA
+% \or \handlearrangedpageXandY{#1}101\arrangedpageA
+% \or \handlearrangedpageXandY{#1}002\arrangedpageA
+% \or \handlearrangedpageXandY{#1}012\arrangedpageA
+% \or \handlearrangedpageXandY{#1}022\arrangedpageA
+% \or \handlearrangedpageXandY{#1}032\arrangedpageA
+% \or \handlearrangedpageXandY{#1}133\arrangedpageA
+% \or \handlearrangedpageXandY{#1}123\arrangedpageA
+% \or \handlearrangedpageXandY{#1}113\arrangedpageA
+% \or \handlearrangedpageXandY{#1}103\arrangedpageA
+% \poparrangedpages
+% \fi}
+%
+% % \definepapersize[small][width=6cm,height=6cm]
+% % \definepapersize[big][width=30cm,height=30cm]
+% % \setuppapersize[small][big]
+% % \setuppagenumbering[location=]
+% % \setuparranging[MEANDER16]
+% % \setuplayout
+% % [location=middle,
+% % marking=on]
+% % \starttext
+% % \dorecurse{32}{\centerbox{\ssd\recurselevel}}
+% % \stoptext
+%
+% % By Willi:
+%
+% \installpagearrangement MEANDER9
+% {\dosetuparrangement{3}{3}{9}{4}{4}%
+% \pusharrangedpageMEANDERNINE\poparrangedpagesXY\relax}
+%
+% \def\pusharrangedpageMEANDERNINE#1%
+% {\advancearrangedpageN
+% \reportarrangedpage\arrangedpageN
+% \ifcase\arrangedpageN
+% \or \handlearrangedpageXandY{#1}000\arrangedpageA
+% \or \handlearrangedpageXandY{#1}010\arrangedpageA
+% \or \handlearrangedpageXandY{#1}020\arrangedpageA
+% \or \handlearrangedpageXandY{#1}121\arrangedpageA
+% \or \handlearrangedpageXandY{#1}111\arrangedpageA
+% \or \handlearrangedpageXandY{#1}101\arrangedpageA
+% \or \handlearrangedpageXandY{#1}002\arrangedpageA
+% \or \handlearrangedpageXandY{#1}012\arrangedpageA
+% \or \handlearrangedpageXandY{#1}022\arrangedpageA
+% \poparrangedpages
+% \fi}
+
% % handy for stickers etc, this way we can treat them as page
%
% \setuppapersize [XY][A4]
diff --git a/Master/texmf-dist/tex/context/base/page-inf.mkiv b/Master/texmf-dist/tex/context/base/page-inf.mkiv
index 6c220fb310c..acfbf26f6ab 100644
--- a/Master/texmf-dist/tex/context/base/page-inf.mkiv
+++ b/Master/texmf-dist/tex/context/base/page-inf.mkiv
@@ -24,7 +24,7 @@
\let\currentversioninfo\empty
-\def\setupversion
+\unexpanded\def\setupversion
{\dosingleargument\page_info_setup}
\let\version\setupversion
@@ -52,7 +52,7 @@
{\csname\??layoutinfo#1\endcsname}
\installversioninfo\v!concept
- {\vskip\!!sixpoint
+ {\vskip6\points
\hbox to \makeupwidth
{\infofont
\v!concept:\space\currentdate
@@ -60,7 +60,7 @@
\page_adapts_status_info}}
\installversioninfo\v!file
- {\vskip\!!sixpoint
+ {\vskip6\points
\hbox to \makeupwidth
{\infofont
\getmessage\m!system{27}:\space\currentdate\space
diff --git a/Master/texmf-dist/tex/context/base/page-ini.mkiv b/Master/texmf-dist/tex/context/base/page-ini.mkiv
index aad08ef3e61..fdffa552d5d 100644
--- a/Master/texmf-dist/tex/context/base/page-ini.mkiv
+++ b/Master/texmf-dist/tex/context/base/page-ini.mkiv
@@ -56,18 +56,18 @@
\newbox\pagebox
-\ifx\mkprocesscolumncontents\undefined\let\mkprocesscolumncontents\gobbleoneargument\fi
-\ifx\mkprocesspagecontents \undefined\let\mkprocesspagecontents \gobbleoneargument\fi
-\ifx\mkprocessboxcontents \undefined\let\mkprocessboxcontents \gobbleoneargument\fi
+\ifdefined\page_postprocessors_column \else \let\page_postprocessors_column\gobbleoneargument \fi
+\ifdefined\page_postprocessors_page \else \let\page_postprocessors_page \gobbleoneargument \fi
+\ifdefined\page_postprocessors_box \else \let\page_postprocessors_box \gobbleoneargument \fi
%D Floats.
\def\page_otr_flush_all_floats
{%\flushnotes already done
- \ifsomefloatwaiting
+ \ifconditional\c_page_floats_some_waiting
\begingroup
- \noftopfloats\plusthousand
- \nofbotfloats\zerocount
+ \c_page_floats_n_of_top\plusthousand
+ \c_page_floats_n_of_bottom\zerocount
% this is needed in case a float that has been stored
% ends up at the current page; this border case occurs when
% the calculated room is 'eps' smaller that the room available
@@ -104,10 +104,11 @@
\inpagebodytrue % needed for enabling \blank ! brrr
\page_otr_command_flush_saved_floats
\page_otr_command_set_vsize % this is needed for interacting components, like floats and multicolumns
- \doincrementpageboundcounters % should hook into an every
+ \strc_pagenumbers_increment_counters % should hook into an every
\page_adapts_synchronize
\page_otr_check_for_pending_inserts
- \doflushspread
+ \page_floats_flush_page_floats % before postponed blocks
+ \page_spread_flush % defined later
\page_postponed_blocks_flush}
% Can't we get rid of this hackery? It's used in some widgets
@@ -215,7 +216,6 @@
{\vbox\bgroup % intercept spurious spaces
\the\everybeforepagebody
\starttextproperties
- \dontshowboxes
\checkmarginblocks
\the\beforeeverypage
\normalexpanded{\global\beforepage\emptytoks\the\beforepage}%
@@ -229,4 +229,32 @@
\the\everyafterpagebody
\egroup}
+\def\doiftopofpageelse
+ {\ifdim\pagegoal=\maxdimen
+ \expandafter\firstoftwoarguments
+ \else\ifdim\pagegoal=\vsize
+ \doubleexpandafter\firstoftwoarguments
+ \else
+ \doubleexpandafter\secondoftwoarguments
+ \fi\fi}
+
+% %D Idea:
+%
+% \newinsert\thispageinsert % <- installinsertion
+%
+% \def\flushatthispage
+% {\bgroup
+% \dowithnextbox{\insert\thispageinsert{\box\nextbox}\egroup}%
+% \hbox}
+%
+% \appendtoks
+% \ifvoid\thispageinsert\else\hbox{\smashedbox\thispageinsert}\fi
+% \to \everyshipout
+%
+% %D Idea:
+%
+% \definemarkedpage[nobackgrounds]
+% \markpage[nobackgrounds]
+% \doifmarkedpageelse{nobackgrounds}
+
\protect \endinput
diff --git a/Master/texmf-dist/tex/context/base/page-inj.lua b/Master/texmf-dist/tex/context/base/page-inj.lua
new file mode 100644
index 00000000000..5b450d60ece
--- /dev/null
+++ b/Master/texmf-dist/tex/context/base/page-inj.lua
@@ -0,0 +1,101 @@
+if not modules then modules = { } end modules ["page-inj"] = {
+ version = 1.000,
+ comment = "Page injections",
+ author = "Wolfgang Schuster & Hans Hagen",
+ copyright = "Wolfgang Schuster & Hans Hagen",
+ license = "see context related readme files",
+}
+
+-- Adapted a bit by HH: numbered states, tracking, delayed, order, etc.
+
+local injections = pagebuilders.injections or { }
+pagebuilders.injections = injections
+
+local report = logs.reporter("pagebuilder","injections")
+local trace = false trackers.register("pagebuilder.injections",function(v) trace = v end)
+
+local variables = interfaces.variables
+
+local v_yes = variables.yes
+local v_previous = variables.previous
+local v_next = variables.next
+
+local order = 0
+local cache = { }
+
+function injections.save(specification) -- maybe not public, just commands.*
+ order = order + 1
+ cache[#cache+1] = {
+ order = order,
+ name = specification.name,
+ state = tonumber(specification.state) or specification.state,
+ parameters = specification.userdata,
+ }
+ tex.setcount("global","c_page_boxes_flush_n",#cache)
+end
+
+function injections.flushbefore() -- maybe not public, just commands.*
+ if #cache > 0 then
+ local delayed = { }
+ context.unprotect()
+ for i=1,#cache do
+ local c = cache[i]
+ local oldstate = c.state
+ if oldstate == v_previous then
+ if trace then
+ report("entry %a, order %a, flushing due to state %a",i,c.order,oldstate)
+ end
+ context.page_injections_flush_saved(c.name,c.parameters)
+ elseif type(oldstate) == "number" and oldstate < 0 then
+ local newstate = oldstate + 1
+ if newstate >= 0 then
+ newstate = v_previous
+ end
+ if trace then
+ report("entry %a, order %a, changing state from %a to %a",i,c.order,oldstate,newstate)
+ end
+ c.state = newstate
+ delayed[#delayed+1] = c
+ else
+ delayed[#delayed+1] = c
+ end
+ end
+ context.unprotect()
+ cache = delayed
+ tex.setcount("global","c_page_boxes_flush_n",#cache)
+ end
+end
+
+function injections.flushafter() -- maybe not public, just commands.*
+ if #cache > 0 then
+ local delayed = { }
+ context.unprotect()
+ for i=1,#cache do
+ local c = cache[i]
+ local oldstate = c.state
+ if oldstate == v_next then
+ if trace then
+ report("entry %a, order %a, flushing due to state %a",i,c.order,oldstate)
+ end
+ context.page_injections_flush_saved(c.name,c.parameters)
+ elseif type(oldstate) == "number" and oldstate> 0 then
+ local newstate = oldstate- 1
+ if newstate <= 0 then
+ newstate = v_next
+ end
+ if trace then
+ report("entry %a, order %a, changing state from %a to %a",i,c.order,oldstate,newstate)
+ end
+ c.state = newstate
+ delayed[#delayed+1] = c
+ end
+ end
+ context.protect()
+ cache = delayed
+ tex.setcount("global","c_page_boxes_flush_n",#cache)
+ end
+end
+
+commands.page_injections_save = injections.save
+commands.page_injections_flush_after = injections.flushafter
+commands.page_injections_flush_before = injections.flushbefore
diff --git a/Master/texmf-dist/tex/context/base/page-inj.mkvi b/Master/texmf-dist/tex/context/base/page-inj.mkvi
new file mode 100644
index 00000000000..bee56468346
--- /dev/null
+++ b/Master/texmf-dist/tex/context/base/page-inj.mkvi
@@ -0,0 +1,217 @@
+%D \module
+%D [ file=page-inj,
+%D version=2013.02.10,
+%D title=\CONTEXT\ Page Module,
+%D subtitle=Injections,
+%D author=Wolfgang Schuster \& Hans Hagen,
+%D date=\currentdate,
+%D copyright=Wolfgang Schuster \& Hans Hagen,
+%D license=see context related readme files (gpl)]
+
+%D This module is based on an idea and prototype by Wolfgang but instead of
+%D injectioning into a page constructor we injection it into the regular shipout
+%D handler because that way we get page numbering. It's a sort of variation
+%D on postponing but the content does not end up in the regular page flow, so
+%D it's closer to page figures but without the usual page builder.
+%D
+%D This module is following the mkvi way of rendering setups and configuration
+%D and is somewhat experimental. Probably more alternatives will be added. This
+%D could become a delayed loaded module at some point (no real consequences).
+
+\unprotect
+
+\registerctxluafile{page-inj}{1.001}
+
+\installcorenamespace {pageinjection}
+\installcorenamespace {pageinjectionalternative}
+\installcorenamespace {pageinjectionrenderings}
+
+\installframedcommandhandler \??pageinjection {pageinjection} \??pageinjection
+\installcommandhandler \??pageinjectionalternative {pageinjectionalternative} \??pageinjectionalternative
+
+\def\page_boxes_flush_before{\ctxcommand{page_injections_flush_before()}}
+\def\page_boxes_flush_after {\ctxcommand{page_injections_flush_after ()}}
+
+\def\page_injections_flush_saved#name#parameters%
+ {\begingroup
+ \edef\currentpageinjection{#name}%
+ \getdummyparameters[#parameters]%
+ \edef\currentpageinjectionalternative {\pageinjectionparameter \c!alternative }%
+ \edef\p_page_injectionalternative_rederingsetup{\pageinjectionalternativeparameter\c!renderingsetup}%
+ \page_injections_nextpage
+ \invokepagehandler\v!normal{\inheritedpageinjectionframed{\texsetup\p_page_injectionalternative_rederingsetup}}%
+ \endgroup}
+
+\unexpanded\def\pageinjection
+ {\begingroup
+ \dodoubleempty\page_injections_direct}
+
+\def\page_injections_direct[#1][#2]% name parameters | settings parameters | name | parameters
+ {\doifassignmentelse{#1}
+ {\doifassignmentelse{#2}
+ {\page_injections_direct_settings_parameters{#1}{#2}}
+ {\page_injections_direct_parameters {#1}}}
+ {\doifassignmentelse{#2}
+ {\page_injections_direct_name_parameters {#1}{#2}}
+ {\page_injections_direct_name {#1}}}}
+
+\def\page_injections_direct_settings_parameters#settings% #parameters%
+ {\let\currentpageinjection\empty
+ \checkpageinjectionparent
+ \setupcurrentpageinjection[#settings]%
+ \page_injections_direct_indeed}
+
+\def\page_injections_direct_parameters% #parameters%
+ {\let\currentpageinjection\empty
+ \checkpageinjectionparent
+ \page_injections_direct_indeed}
+
+\def\page_injections_direct_name_parameters#name% #parameters%
+ {\edef\currentpageinjection{#name}%
+ \checkpageinjectionparent
+ \page_injections_direct_indeed}
+
+\def\page_injections_direct_name#name%
+ {\edef\currentpageinjection{#name}%
+ \checkpageinjectionparent
+ \page_injections_direct_indeed{}}
+
+\def\page_injections_direct_indeed% #parameters%
+ {\edef\p_page_injections_state{\pageinjectionparameter\c!state}%
+ \edef\p_page_injections_delay{\pageinjectionparameter\c!delay}%
+ \edef\p_page_injections_page {\pageinjectionparameter\c!page }%
+ \ifx\p_page_injections_delay\v!yes
+ \expandafter\page_injections_direct_indeed_yes
+ \else
+ \expandafter\page_injections_direct_indeed_nop
+ \fi}
+
+\def\page_injections_direct_indeed_yes#parameters%
+ {\ctxcommand{page_injections_save{
+ name = "\currentpageinjection",
+ state = "\p_page_injections_state",
+ userdata = \!!bs\normalunexpanded{#parameters}\!!es
+ }}%
+ \endgroup}
+
+\def\page_injections_direct_indeed_nop#parameters%
+ {\ifx\p_page_injections_page\empty
+ \page
+ \else
+ \page[\p_page_injections_page]%
+ \fi
+ \getdummyparameters[#parameters]%
+ \page_injections_place
+ \endgroup}
+
+\def\page_injections_place
+ {\edef\currentpageinjectionalternative {\pageinjectionparameter \c!alternative }%
+ \edef\p_page_injectionalternative_rederingsetup{\pageinjectionalternativeparameter\c!renderingsetup}%
+ \page_injections_nextpage
+ \ifx\currentpageinjectionalternative\v!none \else % increment counter but don’t generate output
+ \invokepagehandler\v!normal{\inheritedpageinjectionframed{\texsetup\p_page_injectionalternative_rederingsetup}}%
+ \fi}
+
+\def\page_injections_nextpage
+ {\edef\p_page_injections_pagestate{\pageinjectionparameter\c!pagestate}%
+ \ifx\p_page_injections_pagestate\v!stop \else
+ \edef\p_page_injections_n{\pageinjectionparameter\c!n}%
+ \ifx\p_page_injections_n\empty
+ \let\p_page_injections_n\plusone
+ \fi
+ \dorecurse\p_page_injections_n
+ {\incrementcounter[\s!realpage]%
+ \incrementcounter[\s!userpage]}%
+ \fi}
+
+\definepageinjectionalternative[\v!figure][\c!renderingsetup=\??pageinjectionrenderings:\v!figure]
+\definepageinjectionalternative[\v!buffer][\c!renderingsetup=\??pageinjectionrenderings:\v!buffer]
+\definepageinjectionalternative[\v!setups][\c!renderingsetup=\??pageinjectionrenderings:\s!setup ]
+
+\startsetups[\??pageinjectionrenderings:\v!figure]
+ \dontleavehmode\externalfigure[\dummyparameter\c!name][\c!factor=\v!max]
+\stopsetups
+
+\startsetups[\??pageinjectionrenderings:\s!setup]
+ \setups[\dummyparameter\c!setup]% kind of redundant but fits in
+\stopsetups
+
+\startsetups[\??pageinjectionrenderings:\v!buffer]
+ \getbuffer[\dummyparameter\c!name]%
+\stopsetups
+
+% \setuppaper
+% [\c!method=\v!pageinjection]
+
+\setuppageinjection
+ [ \c!width=\paperwidth,
+ \c!height=\paperheight,
+ \c!frame=\v!off,
+ \c!offset=\v!overlay,
+ \c!delay=\v!no,
+ \c!state=\v!next,
+ \c!alternative=\v!figure]
+
+\definepageinjection[\v!previouspage][\c!delay=\v!yes,\c!state=\v!previous]
+\definepageinjection[\v!nextpage] [\c!delay=\v!yes,\c!state=\v!next]
+\definepageinjection[\v!here] % direct
+\definepageinjection[\v!empty] [\c!alternative=\v!none]
+
+%definepageinjection[\v!chapter] [\v!previouspage][\c!pagestate=\v!stop]
+
+\protect \endinput
+
+% \enabletrackers[pagebuilder.injections]
+%
+% \setuppageinjection[offset=2em]
+%
+% \definepageinjection[chapter][previouspage][pagestate=stop]
+% \definepageinjection[later] [delay=yes,state=+2]
+%
+% \useMPlibrary[dum]
+%
+% \starttext
+%
+% \startchapter[title={Insert pages before/after the current page}]
+%
+% \dorecurse{10}{one: \input ward\par}
+%
+% \pageinjection[previouspage][name=dummy-1]
+%
+% \dorecurse{20}{two: \input ward\par} \page
+%
+% \pageinjection[nextpage][name=dummy-2]
+% \pageinjection[later] [name=dummy-4]
+% \pageinjection[delay=yes,state=+2][name=dummy-5]
+%
+% \dorecurse{100}{three: \input ward\par}
+%
+% \stopchapter
+%
+% \startbuffer[whatever]
+% \dorecurse{10}{whatever #1 }
+% \stopbuffer
+%
+% \startchapter[title={Insert pages at the current location}]
+%
+% \dorecurse{10}{\input ward\par}
+%
+% \pageinjection[direct][name=dummy-3]
+% \pageinjection[direct][alternative=buffer,name=whatever]
+%
+% \dorecurse{10}{\input ward\par}
+%
+% \stopchapter
+%
+% \setuphead
+% [chapter]
+% [beforesection={\pageinjection[empty]},
+% insidesection={\pageinjection[chapter][name=\structureuservariable{image}]}]
+%
+% \startchapter[title={Combine both mechanism}][name=dummy-4]
+%
+% \dorecurse{10}{\input ward\par}
+%
+% \stopchapter
+%
+% \stoptext
diff --git a/Master/texmf-dist/tex/context/base/page-ins.lua b/Master/texmf-dist/tex/context/base/page-ins.lua
new file mode 100644
index 00000000000..7f870735d27
--- /dev/null
+++ b/Master/texmf-dist/tex/context/base/page-ins.lua
@@ -0,0 +1,97 @@
+if not modules then modules = { } end modules ['page-ins'] = {
+ version = 1.001,
+ comment = "companion to page-mix.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files",
+ -- public = {
+ -- functions = {
+ -- "inserts.define",
+ -- "inserts.getdata",
+ -- },
+ -- commands = {
+ -- "defineinsertion",
+ -- "inserttionnumber",
+ -- }
+ -- }
+}
+
+-- Maybe we should only register in lua and forget about the tex end.
+
+structures = structures or { }
+structures.inserts = structures.inserts or { }
+local inserts = structures.inserts
+
+local report_inserts = logs.reporter("inserts")
+
+local allocate = utilities.storage.allocate
+
+inserts.stored = inserts.stored or allocate { } -- combining them in one is inefficient in the
+inserts.data = inserts.data or allocate { } -- bytecode storage pool
+
+local variables = interfaces.variables
+local v_page = variables.page
+local v_columns = variables.columns
+local v_firstcolumn = variables.firstcolumn
+local v_lastcolumn = variables.lastcolumn
+local v_text = variables.text
+
+storage.register("structures/inserts/stored", inserts.stored, "structures.inserts.stored")
+
+local data = inserts.data
+local stored = inserts.stored
+
+for name, specification in next, stored do
+ data[specification.number] = specification
+ data[name] = specification
+end
+
+function inserts.define(name,specification)
+ specification.name= name
+ local number = specification.number or 0
+ data[name] = specification
+ data[number] = specification
+ -- only needed at runtime as this get stored in a bytecode register
+ stored[name] = specification
+ if not specification.location then
+ specification.location = v_page
+ end
+ return specification
+end
+
+function inserts.setup(name,settings)
+ local specification = data[name]
+ for k, v in next, settings do
+ -- maybe trace change
+ specification[k] = v
+ end
+ return specification
+end
+
+function inserts.setlocation(name,location) -- a practical fast one
+ data[name].location = location
+end
+
+function inserts.getlocation(name,location)
+ return data[name].location or v_page
+end
+
+function inserts.getdata(name) -- or number
+ return data[name]
+end
+
+function inserts.getname(number)
+ return data[name].name
+end
+
+function inserts.getnumber(name)
+ return data[name].number
+end
+
+-- interface
+
+commands.defineinsertion = inserts.define
+commands.setupinsertion = inserts.setup
+commands.setinsertionlocation = inserts.setlocation
+commands.insertionnumber = function(name) context(data[name].number or 0) end
+
diff --git a/Master/texmf-dist/tex/context/base/page-ins.mkiv b/Master/texmf-dist/tex/context/base/page-ins.mkiv
index 793e28d4c4b..a63de0b26f2 100644
--- a/Master/texmf-dist/tex/context/base/page-ins.mkiv
+++ b/Master/texmf-dist/tex/context/base/page-ins.mkiv
@@ -13,81 +13,164 @@
\writestatus{loading}{ConTeXt Core Macros / Insertions}
-%D Insertions are special data collections that are associated
-%D to \TEX's internal page builder. When multiple footnote
-%D classes were introduced, I decided to isolate some of the
-%D functionality in a module.
+%D Insertions are special data collections that are associated to \TEX's internal
+%D page builder. When multiple footnote classes were introduced, I decided to
+%D isolate some of the functionality in a module.
+
+\registerctxluafile{page-ins}{1.001}
\unprotect
-\newtoks\@@insertionlist
+%D Because we need to deal with inserts at the \LUA\ end as well,
+%D we provide a proper installer.
-\def\processinsertions{\the\@@insertionlist}
+% Not yet used as we need to adapt some code to this.
-\let\doprocessinsert\gobbleoneargument
+\installcorenamespace{insertion}
+\installcorenamespace{insertionnumber}
-\def\installinsertion#1%
- {\ifdefined#1\else
- \let#1\relax
- \fi
- \ifx#1\relax % permits \csname...\endcsname
- \newinsert#1%
- \count#1\plusthousand
- \skip #1\zeropoint
- \dimen#1\maxdimen
- \appendtoks\doprocessinsert#1\to\@@insertionlist
- \fi}
+\installcommandhandler \??insertion {insertion} \??insertion
-\def\synchronizeinsertions
- {\def\doprocessinsert##1{\ifvoid##1\else\insert##1{\unvbox##1}\fi}%
- \processinsertions}
+\setupinsertion
+ [%c!n=\plusone,
+ %c!distance=\zeropoint,
+ %c!maxheight=\maxdimen,
+ %c!factor=\plusthousand,
+ \c!location=\v!page]
-%D For instance, when we postpone footnotes, we need to save
-%D some data related to the inserts. The next methods are
-%D far from ideal, but better than nothing. We save and
-%D restore box content and associated data independently.
-%D The box content is only restores when non||void.
+\newcount\currentinsertionnumber % This is a count and not a macro !
-\def\backupinsertion#1%
- {\csname\string#1\endcsname}
+\newtoks\t_page_inserts_list
-\def\installbackupinsertion#1%
- {\expandafter\newinsert\csname\string#1\endcsname
- \count\backupinsertion#1\zerocount
- \skip \backupinsertion#1\zeropoint
- \dimen\backupinsertion#1\maxdimen}
+\let\doprocessinsert\relax
-\def\saveinsertionbox#1%
- {\ifdim\ht#1>\zeropoint % hm, actually unknown
- \global\setbox\backupinsertion#1\box#1%
- \else
- \global\setbox\backupinsertion#1\emptybox
- \fi}
+%D Maybe some day we will move settings here.
-\def\restoreinsertionbox#1%
- {\ifvoid\backupinsertion#1\else % if void, we keep the content
- \global\setbox#1\box\backupinsertion#1%
- \fi}
+\unexpanded\def\setcurrentinsertion#1%
+ {\edef\currentinsertion{#1}%
+ \currentinsertionnumber\csname\??insertionnumber\currentinsertion\endcsname}
-\def\eraseinsertionbackup#1%
- {\global\setbox\backupinsertion#1\emptybox}
+\def\namedinsertionnumber#1{\csname\??insertionnumber#1\endcsname}
-\def\saveinsertiondata#1%
- {\global\skip \backupinsertion#1\skip #1%
- \global\count\backupinsertion#1\count#1%
- \global\dimen\backupinsertion#1\dimen#1}
+\unexpanded\def\page_inserts_synchronize_registers
+ {\currentinsertionnumber\csname\??insertionnumber\currentinsertion\endcsname}
-\def\restoreinsertiondata#1%
- {\global\skip #1\skip \backupinsertion#1%
- \global\count#1\count\backupinsertion#1%
- \global\dimen#1\dimen\backupinsertion#1}
+% for practical reasone we still set these elsewhere but that might chaneg in the future
+%
+% \global\count\currentinsertionnumber\numexpr\insertionparameter\c!factor/\insertionparameter\c!n\relax
+% \global\skip \currentinsertionnumber\insertionparameter\c!distance \relax
+% \global\dimen\currentinsertionnumber\insertionparameter\c!maxheight\relax}
-%D Auxiliary macros:
+\appendtoks
+ \page_inserts_synchronize_registers
+\to \everysetupinsertion
+
+\unexpanded\def\page_inserts_process#1% beware, this addapts currentinsertion !
+ {\edef\currentinsertion{#1}%
+ \currentinsertionnumber\csname\??insertionnumber\currentinsertion\endcsname
+ \doprocessinsert\currentinsertionnumber} % old method
+
+\unexpanded\def\processinsertions
+ {\the\t_page_inserts_list}
-\def\addinsertionheight#1\to#2%
+\unexpanded\def\synchronizeinsertions
+ {\let\doprocessinsert\page_inserts_synchronize
+ \processinsertions}
+
+\unexpanded\def\page_inserts_synchronize#1% yes or no
{\ifvoid#1\else
- \advance#2 1\skip#1\relax
- \advance#2 \ht #1\relax
+ \insert#1{\unvbox#1}%
+ \fi}
+
+\unexpanded\def\doifinsertionelse#1%
+ {\ifcsname\??insertionnumber#1\endcsname
+ \expandafter\firstoftwoarguments
+ \else
+ \expandafter\secondoftwoarguments
\fi}
+% \unexpanded\def\startinsertion[#1]%
+% {\insert\csname\??insertionnumber#1\endcsname\bgroup}
+%
+% \unexpanded\def\stopinsertion
+% {\egroup}
+
+% For the moment we use the regular insertion allocator so that users can
+% still define their own insertions (not that they will play nicely with
+% all context mechanisms then). We can use the dimensions at the \LUA\ end
+% so we don't need to pass pass them explicitly. Actually, when we see an
+% insertion node at that end, we already know the number.
+
+\appendtoks
+ \ifx\currentinsertionparent\empty
+ \ifcsname\??insertionnumber\currentinsertion\endcsname
+ % bad news
+ \else
+ \expandafter\newinsert\csname\??insertionnumber\currentinsertion\endcsname
+ \page_inserts_synchronize_registers
+ \ctxcommand{defineinsertion("\currentinsertion",{ number = \number\currentinsertionnumber })}%
+ \t_page_inserts_list\expandafter\expandafter\expandafter
+ {\expandafter\the\expandafter\t_page_inserts_list
+ \expandafter\page_inserts_process\csname\??insertionnumber\currentinsertion\endcsname}%
+ \count\currentinsertionnumber\plusthousand
+ \skip \currentinsertionnumber\zeropoint
+ \dimen\currentinsertionnumber\maxdimen
+ \fi
+ \else
+ \expandafter\let\csname\??insertionnumber\currentinsertion\expandafter\endcsname
+ \csname\??insertionnumber\currentinsertionparent\endcsname
+ \fi
+\to \everydefineinsertion
+
+\appendtoks
+ \ctxcommand{setupinsertion("\currentinsertion",{
+ location = "\insertionparameter\c!location",
+ })}%
+\to \everysetupinsertion
+
+\unexpanded\def\page_inserts_set_location#1#2% fast one
+ {\ctxcommand{setinsertionlocation("#1","#2")}}
+
+%D Auxiliary macros:
+
+\def\page_insert_insertion_height#1%
+ {\dimexpr\expandafter\page_insert_insertion_height_indeed\csname\??insertionnumber#1\endcsname\relax}
+
+\def\page_insert_insertion_height_indeed#1%
+ {\ifvoid#1\zeropoint\else1\skip#1+\ht#1\fi}
+
+%D Obsolete:
+
+% \installcorenamespace{insertionbackup}
+%
+% \unexpanded\def\installbackupinsertion#1%
+% {\ifcsname\??insertionbackup\string#1\endcsname \else
+% \expandafter\newinsert\csname\??insertionbackup\string#1\endcsname
+% \count\csname\??insertionbackup\string#1\endcsname\zerocount
+% \skip \csname\??insertionbackup\string#1\endcsname\zeropoint
+% \dimen\csname\??insertionbackup\string#1\endcsname\maxdimen
+% \fi}
+%
+% \unexpanded\def\saveinsertionbox#1% hm, actually unknown
+% {\global\setbox\csname\??insertionbackup\string#1\endcsname
+% \ifdim\ht#1>\zeropoint\box#1\else\emptybox\fi}
+%
+% \unexpanded\def\restoreinsertionbox#1%
+% {\ifvoid\backupinsertion#1\else % if void, we keep the content
+% \global\setbox#1\box\csname\??insertionbackup\string#1\endcsname
+% \fi}
+%
+% \unexpanded\def\eraseinsertionbackup#1%
+% {\global\setbox\csname\??insertionbackup\string#1\endcsname\emptybox}
+%
+% \unexpanded\def\saveinsertiondata#1%
+% {\global\skip \csname\??insertionbackup\string#1\endcsname\skip #1%
+% \global\count\csname\??insertionbackup\string#1\endcsname\count#1%
+% \global\dimen\csname\??insertionbackup\string#1\endcsname\dimen#1}
+%
+% \unexpanded\def\restoreinsertiondata#1%
+% {\global\skip #1\skip \csname\??insertionbackup\string#1\endcsname
+% \global\count#1\count\csname\??insertionbackup\string#1\endcsname
+% \global\dimen#1\dimen\csname\??insertionbackup\string#1\endcsname}
+
\protect \endinput
diff --git a/Master/texmf-dist/tex/context/base/page-lay.mkiv b/Master/texmf-dist/tex/context/base/page-lay.mkiv
index 78839ea705a..bbe60eec5e2 100644
--- a/Master/texmf-dist/tex/context/base/page-lay.mkiv
+++ b/Master/texmf-dist/tex/context/base/page-lay.mkiv
@@ -576,7 +576,7 @@
%\writestatus{layout target}{(\the\paperwidth,\the\paperheight) -> (\the\printpaperwidth,\the\printpaperheight)}%
\page_layouts_synchronize}
-\ifx\page_paper_set_offsets\undefined
+\ifdefined\page_paper_set_offsets \else
\def\page_paper_set_offsets % will move
{\global\paperoffset\v_page_target_offset
@@ -585,8 +585,13 @@
\fi
-\ifdefined\setups \else \unexpanded\def\setups[#1]{\setdefaultpenalties} \fi % still needed?
-\ifdefined\synchronizegridsnapping \else \let\synchronizegridsnapping\relax \fi
+\ifdefined\setups \else
+ \unexpanded\def\setups[#1]{\setdefaultpenalties} % still needed?
+\fi
+
+\ifdefined\synchronizegridsnapping \else
+ \let\synchronizegridsnapping\relax
+\fi
\let\p_page_layouts_width \empty
\let\p_page_layouts_height\empty
@@ -605,7 +610,7 @@
\global\topspace \layoutparameter\c!topspace
\page_layouts_set_dimensions
\synchronizegridsnapping
- \doprocesslocalsetups{\layoutparameter\c!setups}% depends on gridsnapping !
+ \usesetupsparameter\layoutparameter % depends on gridsnapping !
\synchronizewhitespace
\synchronizeblank
\setupinterlinespace[\v!reset]% \synchronizegloballinespecs
@@ -806,7 +811,7 @@
\page_layouts_check_revert
\fi\fi\fi\fi\fi\fi}
-\def\installlayoutmethod#1#2%
+\unexpanded\def\installlayoutmethod#1#2%
{\setgvalue{\??layoutmethod#1}{#2}}
\installlayoutmethod\v!default{\page_layouts_check_default}
@@ -926,8 +931,11 @@
%D \macros
%D {adaptlayout}
+\installcorenamespace{adaptlayout}
\installcorenamespace{pageadaptations}
+\installsetuponlycommandhandler \??adaptlayout {adaptlayout}
+
\newdimen\d_page_adepts_pushed_text_height
\newdimen\d_page_adepts_pushed_footer_height
\newdimen\d_page_adepts_height
@@ -946,27 +954,33 @@
\def\page_adapts_layout_register#1#2%
{\setgvalue{\??pageadaptations#2}{\page_adapts_layout_indeed{#1}}}
+\let\p_adapts_height\zeropoint
+\let\p_adapts_lines \zerocount
+
\def\page_adapts_layout_indeed#1%
- {\getparameters[\??za][\c!height=\zeropoint,\c!lines=0,#1]%
+ {\setupcurrentadaptlayout[\c!height=\zeropoint,\c!lines=\zerocount,#1]%
\page_adepts_push
- \doifelse\@@zaheight\v!max
- {\global\d_page_adepts_height\footerheight}
- {\global\d_page_adepts_height\dimexpr
- \ifnum\@@zalines=\zerocount
- \@@zaheight
+ \edef\p_adapts_height{\adaptlayoutparameter\c!height}%
+ \edef\p_adapts_lines {\adaptlayoutparameter\c!lines}%
+ \ifx\p_adapts_height\v!max
+ \global\d_page_adepts_height\footerheight
+ \else
+ \global\d_page_adepts_height\dimexpr
+ \ifnum\p_adapts_lines=\zerocount
+ \p_adapts_height
\else
- \@@zalines\openlineheight
+ \p_adapts_lines\openlineheight
\fi
\relax
\ifdim\d_page_adepts_height>\footerheight
\global\d_page_adepts_height\footerheight
- \fi}
+ \fi
+ \fi
\global\advance\textheight \d_page_adepts_height
\global\advance\footerheight-\d_page_adepts_height
\showmessage\m!layouts1{\the\d_page_adepts_height,\the\realpageno}%
% this will become a better one (do we need insert correction?)
\page_otr_command_set_vsize
- \global\pagegoal\vsize
%
\page_backgrounds_recalculate
\global\let\page_adepts_push\relax
@@ -1105,33 +1119,50 @@
% #single #left #right
-\def\doifoddpageelse {\ifodd\pagenoshift\expandafter\doifoddpageelseyes \else\expandafter\doifoddpageelsenop \fi}
-\def\doifoddpageelseyes{\ifodd\realpageno \expandafter\secondoftwoarguments\else\expandafter\firstoftwoarguments \fi}
-\def\doifoddpageelsenop{\ifodd\realpageno \expandafter\firstoftwoarguments \else\expandafter\secondoftwoarguments\fi}
+\def\doifoddpageelse
+ {\ifodd\pagenoshift
+ \expandafter\page_layouts_if_odd_else_yes
+ \else
+ \expandafter\page_layouts_if_odd_else_nop
+ \fi}
+
+\def\page_layouts_if_odd_else_yes
+ {\ifodd\realpageno
+ \expandafter\secondoftwoarguments
+ \else
+ \expandafter\firstoftwoarguments
+ \fi}
+
+\def\page_layouts_if_odd_else_nop
+ {\ifodd\realpageno
+ \expandafter\firstoftwoarguments
+ \else
+ \expandafter\secondoftwoarguments
+ \fi}
\let\doifonevenpaginaelse\doifoddpageelse
-\def\redoifoddpageelse#1{\doifoddpageelse}
+\def\page_layouts_if_odd_else_again#1{\doifoddpageelse}
\def\doifbothsidesoverruled
{\ifdoublesided
- \expandafter\redoifoddpageelse
+ \expandafter\page_layouts_if_odd_else_again
\else
\expandafter\firstofthreearguments
\fi}
\def\doifbothsides% #1 #2 #3
{\ifdoublesided
- \expandafter\doifbothsidesindeed
+ \expandafter\page_layouts_if_both_sides
\else
\expandafter\firstofthreearguments
\fi}
-\def\doifbothsidesindeed
+\def\page_layouts_if_both_sides
{\ifsinglesided
\expandafter\firstofthreearguments
\else
- \expandafter\redoifoddpageelse
+ \expandafter\page_layouts_if_odd_else_again
\fi}
\newdimen\texthoffset
@@ -1179,12 +1210,12 @@
\def\rightorleftpageaction
{\ifdoublesided
- \expandafter\rightorleftpageactionindeed
+ \expandafter\page_layouts_right_or_left_page_action
\else
\expandafter\firstoftwoarguments
\fi}
-\def\rightorleftpageactionindeed
+\def\page_layouts_right_or_left_page_action
{\ifsinglesided
\expandafter\firstoftwoarguments
\else
diff --git a/Master/texmf-dist/tex/context/base/page-lin.lua b/Master/texmf-dist/tex/context/base/page-lin.lua
index b830dbbb9ec..e6b500e8bc8 100644
--- a/Master/texmf-dist/tex/context/base/page-lin.lua
+++ b/Master/texmf-dist/tex/context/base/page-lin.lua
@@ -49,7 +49,6 @@ local current_list = { }
local cross_references = { }
local chunksize = 250 -- not used in boxed
-local has_attribute = node.has_attribute
local traverse_id = node.traverse_id
local traverse = node.traverse
local copy_node = node.copy
@@ -70,7 +69,7 @@ local function resolve(n,m) -- we can now check the 'line' flag (todo)
while n do
local id = n.id
if id == whatsit_code then -- why whatsit
- local a = has_attribute(n,a_linereference)
+ local a = n[a_linereference]
if a then
cross_references[a] = m
end
@@ -129,29 +128,35 @@ function boxed.register(configuration)
last = last + 1
data[last] = configuration
if trace_numbers then
- report_lines("registering setup %s",last)
+ report_lines("registering setup %a",last)
end
return last
end
+function commands.registerlinenumbering(configuration)
+ context(boxed.register(configuration))
+end
+
function boxed.setup(n,configuration)
local d = data[n]
if d then
if trace_numbers then
- report_lines("updating setup %s",n)
+ report_lines("updating setup %a",n)
end
for k,v in next, configuration do
d[k] = v
end
else
if trace_numbers then
- report_lines("registering setup %s (br)",n)
+ report_lines("registering setup %a (br)",n)
end
data[n] = configuration
end
return n
end
+commands.setuplinenumbering = boxed.setup
+
local function check_number(n,a,skip,sameline)
local d = data[a]
if d then
@@ -160,17 +165,17 @@ local function check_number(n,a,skip,sameline)
if sameline then
skipflag = 0
if trace_numbers then
- report_lines("skipping broken line number %s for setup %s: %s (%s)",#current_list,a,s,d.continue or "no")
+ report_lines("skipping broken line number %s for setup %a: %s (%s)",#current_list,a,s,d.continue or "no")
end
elseif not skip and s % d.step == 0 then
skipflag, d.start = 1, s + 1 -- (d.step or 1)
if trace_numbers then
- report_lines("making number %s for setup %s: %s (%s)",#current_list,a,s,d.continue or "no")
+ report_lines("making number %s for setup %a: %s (%s)",#current_list,a,s,d.continue or "no")
end
else
skipflag, d.start = 0, s + 1 -- (d.step or 1)
if trace_numbers then
- report_lines("skipping line number %s for setup %s: %s (%s)",#current_list,a,s,d.continue or "no")
+ report_lines("skipping line number %s for setup %a: %s (%s)",#current_list,a,s,d.continue or "no")
end
end
context.makelinenumber(tag,skipflag,s,n.shift,n.width,leftmarginwidth(n.list),n.dir)
@@ -184,7 +189,7 @@ end
local function identify(list)
if list then
for n in traverse_id(hlist_code,list) do
- if has_attribute(n,a_linenumber) then
+ if n[a_linenumber] then
return list
end
end
@@ -224,7 +229,7 @@ function boxed.stage_one(n,nested)
-- skip funny hlists -- todo: check line subtype
else
local list = n.list
- local a = has_attribute(list,a_linenumber)
+ local a = list[a_linenumber]
if a and a > 0 then
if last_a ~= a then
local da = data[a]
@@ -239,12 +244,12 @@ function boxed.stage_one(n,nested)
report_lines("starting line number range %s: start %s, continue",a,da.start,da.continue or "no")
end
end
- if has_attribute(n,a_displaymath) then
+ if n[a_displaymath] then
if nodes.is_display_math(n) then
check_number(n,a,skip)
end
else
- local v = has_attribute(list,a_verbatimline)
+ local v = list[a_verbatimline]
if not v or v ~= last_v then
last_v = v
check_number(n,a,skip)
@@ -280,3 +285,6 @@ function boxed.stage_two(n,m)
end
end
end
+
+commands.linenumbersstageone = boxed.stage_one
+commands.linenumbersstagetwo = boxed.stage_two
diff --git a/Master/texmf-dist/tex/context/base/page-lin.mkiv b/Master/texmf-dist/tex/context/base/page-lin.mkiv
index 7b1cbcd94aa..0f8b7839810 100644
--- a/Master/texmf-dist/tex/context/base/page-lin.mkiv
+++ b/Master/texmf-dist/tex/context/base/page-lin.mkiv
@@ -26,20 +26,6 @@
%
% we should use normal counters but then we need to sync settings
-% not yet ok, we need to give the top line a proper height
-%
-% \newbox\locallinenumberbox
-%
-% \unexpanded\def\startlocallinenumbering
-% {\setbox\locallinenumberbox\vbox\bgroup
-% \startlinenumbering}
-%
-% \unexpanded\def\stoplocallinenumbering
-% {\stoplinenumbering
-% \egroup
-% \mkdoprocessdeepboxcontents\locallinenumberbox
-% \unvbox\locallinenumberbox}
-
% some line
%
% \startlocallinenumbering
@@ -55,137 +41,96 @@
\definesystemattribute[linenumber] [public]
\definesystemattribute[linereference][public]
-\appendtoksonce \attribute\linenumberattribute \attributeunsetvalue \to \everyforgetall
-\appendtoksonce \attribute\displaymathattribute\plusone \to \everybeforedisplayformula
+\appendtoksonce
+ \attribute\linenumberattribute\attributeunsetvalue
+\to \everyforgetall
\newcount \linenumber % not used
-\newbox \linenumberscratchbox
-\newcount \linenumberchunk
-\newcount \linerefcounter
-\newconstant\linenumbernesting
+\newbox \b_page_lines_scratch
+\newcount \c_page_lines_reference
+\newconstant\c_page_lines_nesting
\newconditional\tracelinenumbering
-\def\mkprocesstextlinenumbers#1#2%
- {\setbox\linenumberscratchbox\vbox
- {\forgetall
- \offinterlineskip
- \ctxlua{nodes.lines.boxed.stage_one(\number#1,\ifcase\linenumbernesting false\else true\fi)}}% #2
- \ctxlua{nodes.lines.boxed.stage_two(\number#1,\number\linenumberscratchbox)}}% can move to lua code
-
% id nr shift width leftskip dir
-\let\makelinenumber\gobblesevenarguments
+\installcorenamespace{linenumberinginstance}
-\newconditional\boxcontentneedsprocessing
+\let\makelinenumber\gobblesevenarguments % used at lua end
-\def\mkdoprocesspagecontents #1{\mkaddtextlinenumbers{#1}\plusone \plusone \zerocount}
-\def\mkdoprocessboxcontents #1{\mkaddtextlinenumbers{#1}\plusone \plusone \zerocount}
-\def\mkdoprocessdeepboxcontents#1{\mkaddtextlinenumbers{#1}\plusone \plusone \plusone }
-\def\mkdoprocesscolumncontents #1{\mkaddtextlinenumbers{#1}\currentcolumn\nofcolumns\zerocount}
+\newconditional\page_postprocessors_needed_box
-\def\mklinenumberparameters
- {continue = "\linenumberparameter\c!continue",
- start = \linenumberparameter\c!start,
- step = \linenumberparameter\c!step,
- method = "\linenumberparameter\c!method",
- tag = "\currentlinenumbering"}
+\unexpanded\def\page_postprocessors_linenumbers_page #1{\page_lines_add_numbers_to_box{#1}\plusone \plusone \zerocount}
+\unexpanded\def\page_postprocessors_linenumbers_box #1{\page_lines_add_numbers_to_box{#1}\plusone \plusone \zerocount}
+\unexpanded\def\page_postprocessors_linenumbers_deepbox#1{\page_lines_add_numbers_to_box{#1}\plusone \plusone \plusone }
+\unexpanded\def\page_postprocessors_linenumbers_column #1{\page_lines_add_numbers_to_box{#1}\currentcolumn\nofcolumns\zerocount}
-\def\mklinenumberupdateparameters
- {continue = "\linenumberparameter\c!continue"}
+\def\page_lines_parameters_regular
+ {continue = "\ifnum\c_page_lines_mode=\zerocount\v!yes\else\v!no\fi",
+ start = \number\linenumberingparameter\c!start,
+ step = \number\linenumberingparameter\c!step,
+ method = "\linenumberingparameter\c!method",
+ tag = "\currentlinenumbering"}
-\def\mkdefinetextlinenumbering
- {\setxvalue{ln:c:\currentlinenumbering}{\number\cldcontext{nodes.lines.boxed.register({\mklinenumberparameters})}}}
+\def\page_lines_parameters_update
+ {continue = "\ifnum\c_page_lines_mode=\zerocount\v!yes\else\v!no\fi"}
-\def\mkupdatetextlinenumbering
- {\ctxlua{nodes.lines.boxed.setup(\getvalue{ln:c:\currentlinenumbering},{\mklinenumberupdateparameters})}}
+\def\page_lines_start_define
+ {\setxvalue{\??linenumberinginstance\currentlinenumbering}{\ctxcommand{registerlinenumbering({\page_lines_parameters_regular})}}}
-\def\mkstarttextlinenumbering#1#2% always when assignment
- {\globallet\mkprocesspagecontents \mkdoprocesspagecontents
- \globallet\mkprocesscolumncontents\mkdoprocesscolumncontents
- \global\settrue\boxcontentneedsprocessing % see core-rul.mkiv
- \edef\currentlinenumbering{#1}%
- \ifcase#2\relax
- \mkupdatetextlinenumbering % continue
- \or
- \mkdefinetextlinenumbering % only when assignment
- \fi
- \attribute\linenumberattribute\getvalue{ln:c:\currentlinenumbering}\relax}
+\def\page_lines_start_update
+ {\ctxcommand{setuplinenumbering(\csname\??linenumberinginstance\currentlinenumbering\endcsname,{\page_lines_parameters_update})}}
-\def\mksetuptextlinenumbering
- {\ifcsname ln:c:\currentlinenumbering\endcsname
- \ctxlua{nodes.lines.boxed.setup(\getvalue{ln:c:\currentlinenumbering},{\mklinenumberparameters})}%
+\def\page_lines_setup
+ {\ifcsname \??linenumberinginstance\currentlinenumbering\endcsname
+ \ctxcommand{setuplinenumbering(\csname\??linenumberinginstance\currentlinenumbering\endcsname,{\page_lines_parameters_regular})}%
\fi}
-\def\mkstoptextlinenumbering
- {\attribute\linenumberattribute\attributeunsetvalue}
-
% we could make this a bit more efficient by putting the end reference
% in the same table as the start one but why make things complex ...
-\let\dofinishlinereference\dofinishfullreference
+\let\dofinishlinereference\dofinishfullreference % at lua end
-\def\mksomelinereference#1#2#3%
+\unexpanded\def\page_lines_some_reference#1#2#3%
{\dontleavehmode\begingroup
- \global\advance\linerefcounter\plusone
- \attribute\linereferenceattribute\linerefcounter
+ \global\advance\c_page_lines_reference\plusone
+ \attribute\linereferenceattribute\c_page_lines_reference
#3%
% for the moment we use a simple system i.e. no prefixes etc .. todo: store as number
- \expanded{\strc_references_set_named_reference{line}{#2}{conversion=\linenumberparameter\c!conversion}{\the\linerefcounter}}% kind labels userdata text
+ \normalexpanded{\strc_references_set_named_reference{line}{#2}{conversion=\linenumberingparameter\c!conversion}{\the\c_page_lines_reference}}% kind labels userdata text
\endgroup}
-\def\mkstartlinereference#1{\mksomelinereference{#1}{lr:b:#1}{}\ignorespaces}
-\def\mkstoplinereference #1{\removeunwantedspaces\mksomelinereference{#1}{lr:e:#1}{}}
+\def\page_lines_reference_start#1{\page_lines_some_reference{#1}{lr:b:#1}{}\ignorespaces}
+\def\page_lines_reference_stop #1{\removeunwantedspaces\page_lines_some_reference{#1}{lr:e:#1}{}}
+
+% \def\mklinestartreference#1[#2]{\in{#1}[lr:b:#2]} % not interfaced/ not used
+% \def\mklinestopreference #1[#2]{\in{#1}[lr:e:#2]} % not interfaced/ not used
-\def\mklinestartreference#1[#2]{\in{#1}[lr:b:#2]} % not interfaced
-\def\mklinestopreference #1[#2]{\in{#1}[lr:e:#2]} % not interfaced
+\newif\ifnumberinglines % will change
+\newif\iftypesettinglines % will change
-% high level interface
+\installcorenamespace{linenumbering}
-\newif\ifnumberinglines
-\newif\iftypesettinglines
+\installcommandhandler \??linenumbering {linenumbering} \??linenumbering
-\let\currentlinenumbering\empty
+\setnewconstant\c_page_lines_mode \plusone % 0=continue, 1=restart
+\setnewconstant\c_page_lines_location \plusone % 0=middle, 1=left, 2=right, 3=inner, 4=outer, 5=text, 6=begin, 7=end
+\setnewconstant\c_page_lines_alignment\plusfive % 0=middle, 1=left, 2=right, 5=auto
-\setnewconstant\linenumbermode \plusone % 0=continue, 1=restart
-\setnewconstant\linenumberlocation \plusone % 0=middle, 1=left, 2=right, 3=inner, 4=outer, 5=text, 6=begin, 7=end
-\setnewconstant\linenumberalignment\plusfive % 0=middle, 1=left, 2=right, 5=auto
+\newdimen\d_page_lines_width
+\newdimen\d_page_lines_distance
\newevery \beforeeverylinenumbering \relax
\newevery \aftereverylinenumbering \relax
\newevery \everylinenumber \relax
-\newdimen\linenumberwidth
-\newdimen\linenumberdistance
-
-\unexpanded\def\definelinenumbering
- {\dosingleempty\dodefinelinenumbering}
+\appendtoks
+ \page_lines_setup
+\to \everysetuplinenumbering
-\def\dodefinelinenumbering[#1]%
- {\edef\currentlinenumbering{#1}%
- \mkdefinetextlinenumbering}
-
-\unexpanded\def\setuplinenumbering
- {\dodoubleempty\dosetuplinenumbering}
-
-\def\dosetuplinenumbering[#1][#2]%
- {\ifsecondargument
- \def\currentlinenumbering{#1}%
- \getparameters[\??rn#1][#2]%
- \else
- \let\currentlinenumbering\empty
- \getparameters[\??rn][#1]%
- \fi
- \mksetuptextlinenumbering}
-
-% some day commandhandler
-
-\def\linenumberparameter#1%
- {\csname\??rn\ifcsname\??rn\currentlinenumbering#1\endcsname\currentlinenumbering\fi#1\endcsname}
-
-\unexpanded\def\dolinenumberattributes#1#2%
- {\dousestyleparameter{\linenumberparameter#1}%
- \dousecolorparameter{\linenumberparameter#2}}
+\appendtoks
+ \page_lines_start_define
+\to \everydefinelinenumbering
\setuplinenumbering
[\c!conversion=\v!numbers,
@@ -196,7 +141,7 @@
\c!location=\v!left,
\c!style=,
\c!color=,
- \c!width=2em,
+ \c!width=2\emwidth,
\c!left=,
\c!right=,
\c!command=,
@@ -204,253 +149,314 @@
\c!align=\v!auto]
\definelinenumbering
-
-\unexpanded\def\startlinenumbering
- {\dodoubleempty\dostartlinenumbering}
+ []
% no intermediate changes in values, define a class, otherwise each range
% would need a number
% todo: text
-\expandafter\let\csname\??rn:l:\v!middle \endcsname \zerocount
-\expandafter\let\csname\??rn:l:\v!left \endcsname \plusone
-\expandafter\let\csname\??rn:l:\v!margin \endcsname \plusone
-\expandafter\let\csname\??rn:l:\v!inmargin \endcsname \plusone
-\expandafter\let\csname\??rn:l:\v!inleft \endcsname \plusone
-\expandafter\let\csname\??rn:l:\v!right \endcsname \plustwo
-\expandafter\let\csname\??rn:l:\v!inright \endcsname \plustwo
-\expandafter\let\csname\??rn:l:\v!inner \endcsname \plusthree
-\expandafter\let\csname\??rn:l:\v!outer \endcsname \plusfour
-\expandafter\let\csname\??rn:l:\v!text \endcsname \plusfive
-\expandafter\let\csname\??rn:l:\v!begin \endcsname \plussix
-\expandafter\let\csname\??rn:l:\v!end \endcsname \plusseven
-
-\expandafter\let\csname\??rn:a:\v!middle \endcsname \zerocount
-\expandafter\let\csname\??rn:a:\v!right \endcsname \plusone
-\expandafter\let\csname\??rn:a:\v!flushleft \endcsname \plusone
-\expandafter\let\csname\??rn:a:\v!left \endcsname \plustwo
-\expandafter\let\csname\??rn:a:\v!flushright\endcsname \plustwo
-\expandafter\let\csname\??rn:a:\v!auto \endcsname \plusfive
-
-\def\dostartlinenumbering[#1][#2]% todo: c!continue
+\installcorenamespace{linennumberinglocation}
+\installcorenamespace{linennumberingalternative}
+
+\expandafter\let\csname\??linennumberinglocation\v!middle \endcsname \zerocount
+\expandafter\let\csname\??linennumberinglocation\v!left \endcsname \plusone
+\expandafter\let\csname\??linennumberinglocation\v!margin \endcsname \plusone
+\expandafter\let\csname\??linennumberinglocation\v!inmargin \endcsname \plusone
+\expandafter\let\csname\??linennumberinglocation\v!inleft \endcsname \plusone
+\expandafter\let\csname\??linennumberinglocation\v!right \endcsname \plustwo
+\expandafter\let\csname\??linennumberinglocation\v!inright \endcsname \plustwo
+\expandafter\let\csname\??linennumberinglocation\v!inner \endcsname \plusthree
+\expandafter\let\csname\??linennumberinglocation\v!outer \endcsname \plusfour
+\expandafter\let\csname\??linennumberinglocation\v!text \endcsname \plusfive
+\expandafter\let\csname\??linennumberinglocation\v!begin \endcsname \plussix
+\expandafter\let\csname\??linennumberinglocation\v!end \endcsname \plusseven
+
+\expandafter\let\csname\??linennumberingalternative\v!middle \endcsname \zerocount
+\expandafter\let\csname\??linennumberingalternative\v!right \endcsname \plusone
+\expandafter\let\csname\??linennumberingalternative\v!flushleft \endcsname \plusone
+\expandafter\let\csname\??linennumberingalternative\v!left \endcsname \plustwo
+\expandafter\let\csname\??linennumberingalternative\v!flushright\endcsname \plustwo
+\expandafter\let\csname\??linennumberingalternative\v!auto \endcsname \plusfive
+
+% \startlinenumbering[<startvalue>|continue|settings|name]
+% \startlinenumbering[name][<startvalue>|continue|settings]
+
+\unexpanded\def\startlinenumbering
+ {\dodoubleempty\page_lines_start}
+
+\def\page_lines_start % we stay downward compatible
{\begingroup
- \linenumbermode\plusone
\ifsecondargument
- \def\currentlinenumbering{#1}%
- \doifassignmentelse{#2}
- {\getparameters[\??rn\currentlinenumbering][#2]}
- {\doifnumberelse{#2}% downward compatible
- {\setvalue{\??rn#1\c!start}{#2}}%
- {\doif{#2}\v!continue
- {\getparameters[\??rn\currentlinenumbering][\c!continue=\v!yes]%
- \linenumbermode\zerocount}}}%
+ \expandafter\page_lines_start_two
\else\iffirstargument
- \doifnumberelse{#1}% downward compatible
- {\let\currentlinenumbering\empty
- \setvalue{\??rn\c!start}{#1}}%
- {\doifelse{#1}\v!continue
- {\let\currentlinenumbering\empty
- \getparameters[\??rn\currentlinenumbering][\c!continue=\v!yes]%
- \linenumbermode\zerocount}
- {\def\currentlinenumbering{#1}}}%
- \fi\fi
- \doif{\linenumberparameter\c!continue}\v!yes
- {\linenumbermode\zerocount}%
- \numberinglinestrue
+ \doubleexpandafter\page_lines_start_one
+ \else
+ \doubleexpandafter\page_lines_start_zero
+ \fi\fi}
+
+\def\page_lines_start_zero[#1][#2]%
+ {\edef\m_argument{\linenumberingparameter\c!continue}%
+ \ifx\m_argument\v!continue
+ \c_page_lines_mode\zerocount
+ \else
+ \c_page_lines_mode\plusone
+ \fi
+ \page_lines_start_followup}
+
+\def\page_lines_start_one[#1][#2]% [continue|<number>|settings] % historic
+ {\edef\m_argument{#1}%
+ \ifx\m_argument\v!continue
+ \c_page_lines_mode\zerocount
+ \let\currentlinenumbering\empty
+ \else
+ \c_page_lines_mode\plusone
+ \ifx\m_argument\v!empty
+ \let\currentlinenumbering\empty
+ \else
+ \doifassignmentelse{#1}
+ {\let\currentlinenumbering\empty
+ \setupcurrentlinenumbering[#1]}
+ {\doifnumberelse\m_argument
+ {\let\currentlinenumbering\empty
+ \letlinenumberingparameter\c!start\m_argument}
+ {\let\currentlinenumbering\m_argument}}%
+ \fi
+ \edef\p_continue{\linenumberingparameter\c!continue}%
+ \ifx\p_continue\v!yes
+ \c_page_lines_mode\zerocount
+ \fi
+ \fi
+ \page_lines_start_followup}
+
+\def\page_lines_start_two[#1][#2]% [tag][continue|<number>|settings]
+ {\edef\currentlinenumbering{#1}%
+ \edef\m_argument{#2}%
+ \ifx\m_argument\v!continue
+ \c_page_lines_mode\zerocount
+ \else
+ \c_page_lines_mode\plusone
+ \ifx\m_argument\v!empty \else
+ \doifassignmentelse{#2}
+ {\setupcurrentlinenumbering[#2]}
+ {\doifnumber\m_argument
+ {\letlinenumberingparameter\c!start\m_argument}}%
+ \fi
+ \edef\p_continue{\linenumberingparameter\c!continue}%
+ \ifx\p_continue\v!yes
+ \c_page_lines_mode\zerocount
+ \fi
+ \fi
+ \page_lines_start_followup}
+
+\def\page_lines_start_followup
+ {\numberinglinestrue
\the\beforeeverylinenumbering
- \mkstarttextlinenumbering\currentlinenumbering\linenumbermode}
+ \globallet\page_postprocessors_page \page_postprocessors_linenumbers_page
+ \globallet\page_postprocessors_column\page_postprocessors_linenumbers_column
+ \global\settrue\page_postprocessors_needed_box % see core-rul.mkiv
+ \ifcase\c_page_lines_mode\relax
+ \page_lines_start_update % continue
+ \or
+ \page_lines_start_define % only when assignment
+ \fi
+ \attribute\linenumberattribute\getvalue{\??linenumberinginstance\currentlinenumbering}\relax}
\unexpanded\def\stoplinenumbering
- {\mkstoptextlinenumbering
+ {\attribute\linenumberattribute\attributeunsetvalue
\the\aftereverylinenumbering
\endgroup}
% number placement .. will change into (the new) margin code
-\def\mkdoinnerlinenumber{\doifoddpageelse\mkdoleftlinenumber\mkdorightlinenumber}
-\def\mkdoouterlinenumber{\doifoddpageelse\mkdorightlinenumber\mkdoleftlinenumber}
+\def\page_lines_number_inner_indeed{\doifoddpageelse\page_lines_number_left_indeed\page_lines_number_right_indeed}
+\def\page_lines_number_outer_indeed{\doifoddpageelse\page_lines_number_right_indeed\page_lines_number_left_indeed}
-\def\mkleftlinenumber
- {\ifcase\linenumberlocation
- \expandafter\mkdoleftlinenumber
+\def\page_lines_number_left
+ {\ifcase\c_page_lines_location
+ \expandafter\page_lines_number_left_indeed
\or
- \expandafter\mkdoleftlinenumber
+ \expandafter\page_lines_number_left_indeed
\or
- \expandafter\mkdoleftlinenumber
+ \expandafter\page_lines_number_left_indeed
\or
- \expandafter\mkdoinnerlinenumber
+ \expandafter\page_lines_number_inner_indeed
\or
- \expandafter\mkdoouterlinenumber
+ \expandafter\page_lines_number_outer_indeed
\or
- \expandafter\mkdotextlinenumber
+ \expandafter\page_lines_number_text_indeed
\or
- \expandafter\mkdobeginlinenumber
+ \expandafter\page_lines_number_begin_indeed
\or
- \expandafter\mkdoendlinenumber
+ \expandafter\page_lines_number_end_indeed
\fi}
-\def\mkrightlinenumber
- {\ifcase\linenumberlocation
- \expandafter\mkdorightlinenumber
+\def\page_lines_number_right
+ {\ifcase\c_page_lines_location
+ \expandafter\page_lines_number_right_indeed
\or
- \expandafter\mkdorightlinenumber
+ \expandafter\page_lines_number_right_indeed
\or
- \expandafter\mkdorightlinenumber
+ \expandafter\page_lines_number_right_indeed
\or
- \expandafter\mkdoouterlinenumber
+ \expandafter\page_lines_number_outer_indeed
\or
- \expandafter\mkdoinnerlinenumber
+ \expandafter\page_lines_number_inner_indeed
\or
- \expandafter\mkdotextlinenumber
+ \expandafter\page_lines_number_text_indeed
\or
- \expandafter\mkdoendlinenumber
+ \expandafter\page_lines_number_end_indeed
\or
- \expandafter\mkdobeginlinenumber
+ \expandafter\page_lines_number_begin_indeed
\fi}
-\newconditional\faketextlinenumber
-\newconstant \linenumberbox
-\newconstant \linenumbercolumn
-\newconstant \linenumberlastcolumn
+\newconditional\c_page_lines_fake_number
+\newconstant \b_page_lines_number
+\newconstant \c_page_lines_column
+\newconstant \c_page_lines_last_column
-\def\mkaddtextlinenumbers#1#2#3#4% box col max nesting
+\def\page_lines_add_numbers_to_box#1#2#3#4% box col max nesting
{\bgroup
- \linenumberbox #1\relax
- \linenumbercolumn #2\relax
- \linenumberlastcolumn#3\relax
- \linenumbernesting #4\relax
+ \b_page_lines_number #1\relax
+ \c_page_lines_column #2\relax
+ \c_page_lines_last_column#3\relax
+ \c_page_lines_nesting #4\relax
\fullrestoreglobalbodyfont
- \let\makelinenumber\maketextlinenumber
- \mkprocesstextlinenumbers\linenumberbox\linenumbernesting
+ \let\makelinenumber\page_lines_make_number % used at lua end
+ \setbox\b_page_lines_scratch\vbox
+ {\forgetall
+ \offinterlineskip
+ \ctxcommand{linenumbersstageone(\number\b_page_lines_number,\ifcase\c_page_lines_nesting false\else true\fi)}}%
+ \ctxcommand{linenumbersstagetwo(\number\b_page_lines_number,\number\b_page_lines_scratch)}% can move to lua code
\egroup}
-\def\maketextlinenumber#1#2%
+\let\page_lines_make_number_indeed\relax
+
+\def\page_lines_make_number#1#2%
{\edef\currentlinenumbering{#1}%
\ifcase#2\relax
- \settrue \faketextlinenumber
+ \settrue \c_page_lines_fake_number
\else
- \setfalse\faketextlinenumber
+ \setfalse\c_page_lines_fake_number
\fi
- \linenumberlocation \executeifdefined{\??rn:l:\linenumberparameter\c!location}\plusone % left
- \linenumberalignment\executeifdefined{\??rn:a:\linenumberparameter\c!align }\plusfive % auto
- \ifcase\linenumberlastcolumn\relax
- \settrue \faketextlinenumber
+ \c_page_lines_location \executeifdefined{\??linennumberinglocation \linenumberingparameter\c!location}\plusone \relax % left
+ \c_page_lines_alignment\executeifdefined{\??linennumberingalternative\linenumberingparameter\c!align }\plusfive\relax % auto
+ \ifcase\c_page_lines_last_column\relax
+ \settrue \c_page_lines_fake_number
\or
% one column
- \ifcase\linenumberlocation
- \settrue \faketextlinenumber
- % hm
+ \ifcase\c_page_lines_location
+ \settrue \c_page_lines_fake_number
+ \let\page_lines_make_number_indeed\page_lines_number_fake_indeed
\or
- \let\domakelinenumber\mkleftlinenumber
+ \let\page_lines_make_number_indeed\page_lines_number_left
\or
- \let\domakelinenumber\mkrightlinenumber
+ \let\page_lines_make_number_indeed\page_lines_number_right
\or % inner
- \let\domakelinenumber\mkdoinnerlinenumber
+ \let\page_lines_make_number_indeed\page_lines_number_inner_indeed
\or % outer
- \let\domakelinenumber\mkdoouterlinenumber
+ \let\page_lines_make_number_indeed\page_lines_number_outer_indeed
\or % text
- \let\domakelinenumber\mkdotextlinenumber
+ \let\page_lines_make_number_indeed\page_lines_number_text_indeed
\or
- \let\domakelinenumber\mkdobeginlinenumber
+ \let\page_lines_make_number_indeed\page_lines_number_begin_indeed
\or
- \let\domakelinenumber\mkdoendlinenumber
+ \let\page_lines_make_number_indeed\page_lines_number_end_indeed
\fi
- \else\ifcase\linenumbercolumn\relax
- \settrue \faketextlinenumber
+ \else\ifcase\c_page_lines_column\relax
+ \settrue \c_page_lines_fake_number
\or
- \let\domakelinenumber\mkleftlinenumber
- \ifcase\linenumberlocation\or
- \linenumberlocation\plusone
+ \let\page_lines_make_number_indeed\page_lines_number_left
+ \ifcase\c_page_lines_location\or
+ \c_page_lines_location\plusone
\or
- \linenumberlocation\plustwo
+ \c_page_lines_location\plustwo
\else
- \linenumberlocation\plusone
+ \c_page_lines_location\plusone
\or
- \linenumberlocation\plusone
+ \c_page_lines_location\plusone
\or
- \linenumberlocation\plusone
+ \c_page_lines_location\plusone
\or
- \linenumberlocation\plusone % todo
+ \c_page_lines_location\plusone % todo
\or
- \linenumberlocation\plusone % todo
+ \c_page_lines_location\plusone % todo
\fi
\else
- \let\domakelinenumber\mkrightlinenumber
- \ifcase\linenumberlocation\or
- \linenumberlocation\plustwo
+ \let\page_lines_make_number_indeed\page_lines_number_right
+ \ifcase\c_page_lines_location\or
+ \c_page_lines_location\plustwo
\or
- \linenumberlocation\plusone
+ \c_page_lines_location\plusone
\or
- \linenumberlocation\plustwo
+ \c_page_lines_location\plustwo
\or
- \linenumberlocation\plustwo
+ \c_page_lines_location\plustwo
\or
- \linenumberlocation\plustwo % todo
+ \c_page_lines_location\plustwo % todo
\or
- \linenumberlocation\plustwo % todo
+ \c_page_lines_location\plustwo % todo
\fi
\fi\fi
- \domakelinenumber{#1}}
+ \page_lines_make_number_indeed{#1}}
-\def\mkdotextlinenumber #1#2#3#4#5#6% beware, one needs so compensate for this in the width !
- {\hbox{\dosomelinenumber{#1}{2}{#2}{#5}\hskip#3\scaledpoint}}
+\let\page_lines_number_fake_indeed\gobblesixarguments % needs checking
-\def\mkdotextlinenumber #1#2#3#4#5#6% beware, one needs so compensate for this in the width !
- {\hbox{\dosomelinenumber{#1}{2}{#2}{#5}\hskip#3\scaledpoint}}
+\def\page_lines_number_text_indeed#1#2#3#4#5#6% beware, one needs so compensate for this in the \hsize
+ {\hbox{\page_lines_number_construct{#1}{2}{#2}{#5}\hskip#3\scaledpoint}}
-\def\mkdoleftlinenumber #1#2#3#4#5#6%
+\def\page_lines_number_left_indeed#1#2#3#4#5#6%
{\naturalhbox to \zeropoint
{\ifcase\istltdir#6\else \hskip-#4\scaledpoint \fi
- \llap{\dosomelinenumber{#1}{2}{#2}{#5}\kern#3\scaledpoint}}}
+ \llap{\page_lines_number_construct{#1}{2}{#2}{#5}\kern#3\scaledpoint}}}
-\def\mkdorightlinenumber#1#2#3#4#5#6%
+\def\page_lines_number_right_indeed#1#2#3#4#5#6%
{\naturalhbox to \zeropoint
{\ifcase\istltdir#6\else \hskip-#4\scaledpoint \fi
- \rlap{\hskip#4\scaledpoint\hskip#3\scaledpoint\dosomelinenumber{#1}{1}{#2}{#5}}}}
+ \rlap{\hskip\dimexpr#4\scaledpoint+#3\scaledpoint\relax\page_lines_number_construct{#1}{1}{#2}{#5}}}}
-\def\mkdobeginlinenumber #1#2#3#4#5#6%
+\def\page_lines_number_begin_indeed#1#2#3#4#5#6%
{\ifcase\istltdir#6\relax
- \linenumberlocation\plusone
- \expandafter\mkdoleftlinenumber
+ \c_page_lines_location\plusone
+ \expandafter\page_lines_number_left_indeed
\else
- \linenumberlocation\plustwo
- \expandafter\mkdorightlinenumber
+ \c_page_lines_location\plustwo
+ \expandafter\page_lines_number_left_indeed
\fi{#1}{#2}{#3}{#4}{#5}{#6}}
-\def\mkdoendlinenumber#1#2#3#4#5#6%
+\def\page_lines_number_end_indeed#1#2#3#4#5#6%
{\ifcase\istltdir#6\relax
- \linenumberlocation\plustwo
- \expandafter\mkdorightlinenumber
+ \c_page_lines_location\plustwo
+ \expandafter\page_lines_number_left_indeed
\else
- \linenumberlocation\plusone
- \expandafter\mkdoleftlinenumber
+ \c_page_lines_location\plusone
+ \expandafter\page_lines_number_left_indeed
\fi{#1}{#2}{#3}{#4}{#5}{#6}}
-\def\dosomelinenumber#1#2#3#4% tag 1=left|2=right linenumber leftskip
+\def\page_lines_number_construct#1#2#3#4% tag 1=left|2=right linenumber leftskip
{\begingroup
\def\currentlinenumbering{#1}%
\def\linenumber{#3}% unsafe
- \doifelse{\linenumberparameter\c!width}\v!margin
- {\linenumberwidth\leftmarginwidth}
- {\linenumberwidth\linenumberparameter\c!width}%
- \linenumberdistance\linenumberparameter\c!distance\relax
- \ifcase#2\relax\or\hskip\linenumberdistance\fi\relax
- \ifnum\linenumberlocation=\plusfive
- \scratchdimen\dimexpr#4\scaledpoint-\linenumberdistance\relax
- \linenumberlocation\plusone
+ \doifelse{\linenumberingparameter\c!width}\v!margin
+ {\d_page_lines_width\leftmarginwidth}
+ {\d_page_lines_width\linenumberingparameter\c!width}%
+ \d_page_lines_distance\linenumberingparameter\c!distance\relax
+ \ifcase#2\relax\or\hskip\d_page_lines_distance\fi\relax
+ \ifnum\c_page_lines_location=\plusfive
+ \scratchdimen\dimexpr#4\scaledpoint-\d_page_lines_distance\relax
+ \c_page_lines_location\plusone
\else
\scratchdimen\zeropoint
\fi
- \ifcase\linenumberalignment
- \linenumberlocation\zerocount % middle
+ \ifcase\c_page_lines_alignment
+ \c_page_lines_location\zerocount % middle
\or
- \linenumberlocation\plusone % left
+ \c_page_lines_location\plusone % left
\or
- \linenumberlocation\plustwo % right
+ \c_page_lines_location\plustwo % right
\fi
- \ifconditional\tracelinenumbering\ruledhbox\else\hbox\fi to \linenumberwidth
- {\ifcase\linenumberlocation
+ \ifconditional\tracelinenumbering\ruledhbox\else\hbox\fi to \d_page_lines_width
+ {\ifcase\c_page_lines_location
\hss % middle
\or
% left
@@ -461,16 +467,16 @@
\or
\doifoddpageelse\hss\relax % outer
\fi
- \ifconditional\faketextlinenumber
+ \ifconditional\c_page_lines_fake_number
% we need to reserve space
\else
- \dolinenumberattributes\c!style\c!color
- \linenumberparameter\c!command
- {\linenumberparameter\c!left
- \convertnumber{\linenumberparameter\c!conversion}{#3}%
- \linenumberparameter\c!right}%
+ \uselinenumberingstyleandcolor\c!style\c!color
+ \linenumberingparameter\c!command
+ {\linenumberingparameter\c!left
+ \convertnumber{\linenumberingparameter\c!conversion}{#3}%
+ \linenumberingparameter\c!right}%
\fi
- \ifcase\linenumberlocation
+ \ifcase\c_page_lines_location
\hss % middle
\or
\hss % left
@@ -481,53 +487,60 @@
\or
\doifoddpageelse\relax\hss % outer
\fi}%
- \ifcase#2\relax\or\or\hskip\linenumberdistance\fi\relax
- \hskip-\scratchdimen
+ \ifcase#2\relax
+ \hskip-\scratchdimen
+ \or
+ \hskip-\scratchdimen
+ \or
+ \hskip\dimexpr\d_page_lines_distance-\scratchdimen\relax
+ \fi
+ \relax
\the\everylinenumber
\endgroup}
-% left right inner outer
-
-% align: \alignedline\@@rnalign\v!right{\box0\hskip\@@rndistance}
-
% referencing
-\unexpanded\def\someline [#1]{\mkstartlinereference{#1}\mkstoplinereference{#1}} % was just a def
-\unexpanded\def\startline[#1]{\mkstartlinereference{#1}}
-\unexpanded\def\stopline [#1]{\mkstoplinereference {#1}}
+\unexpanded\def\someline [#1]{\page_lines_reference_start{#1}\page_lines_reference_stop{#1}} % was just a def
+\unexpanded\def\startline[#1]{\page_lines_reference_start{#1}}
+\unexpanded\def\stopline [#1]{\page_lines_reference_stop {#1}}
-\def\mkshowstartlinereference#1%
+\def\page_lines_reference_show_start#1%
{\ifconditional\tracelinenumbering
\setbox\scratchbox\hbox{\llap
- {\vrule\!!width\onepoint\!!depth\strutdp\!!height.8\strutht\raise.85\strutht\hbox{\llap{\tt\txx#1}}}}%
- \smashbox\scratchbox\box\scratchbox
+ {\vrule\s!width\onepoint\s!depth\strutdp\s!height.8\strutht\raise.85\strutht\hbox{\llap{\tt\txx#1}}}}%
+ \smashbox\scratchbox
+ \box\scratchbox
\fi}
-\def\mkshowstoplinereference#1%
+\def\page_lines_reference_show_stop#1%
{\ifconditional\tracelinenumbering
\setbox\scratchbox\hbox{\rlap
- {\raise.85\strutht\hbox{\rlap{\tt\txx#1}}\vrule\!!width\onepoint\!!depth\strutdp\!!height.8\strutht}}%
- \smashbox\scratchbox\box\scratchbox
+ {\raise.85\strutht\hbox{\rlap{\tt\txx#1}}\vrule\s!width\onepoint\s!depth\strutdp\s!height.8\strutht}}%
+ \smashbox\scratchbox
+ \box\scratchbox
\fi}
-\def\mkstartlinereference#1{\mksomelinereference{#1}{lr:b:#1}{\mkshowstartlinereference{#1}}\ignorespaces}
-\def\mkstoplinereference #1{\removeunwantedspaces\mksomelinereference{#1}{lr:e:#1}{\mkshowstoplinereference{#1}}}
+\def\page_lines_reference_start#1{\page_lines_some_reference{#1}{lr:b:#1}{\page_lines_reference_show_start{#1}}\ignorespaces}
+\def\page_lines_reference_stop #1{\removeunwantedspaces\page_lines_some_reference{#1}{lr:e:#1}{\page_lines_reference_show_stop{#1}}}
% eventually we will do this in lua
-\def\currentreferencelinenumber{\ctxlua{structures.references.filter("linenumber")}}
+\def\currentreferencelinenumber{\ctxcommand{filterreference("linenumber")}}
+
+\let\m_page_lines_from\empty
+\let\m_page_lines_to \empty
-\def\doifelsesamelinereference#1#2#3%
+\unexpanded\def\doifelsesamelinereference#1#2#3%
{\doifreferencefoundelse{lr:b:#1}
- {\edef\fline{\currentreferencelinenumber}%
+ {\edef\m_page_lines_from{\currentreferencelinenumber}%
\doifreferencefoundelse{lr:e:#1}
- {\edef\tline{\currentreferencelinenumber}%
- %[\fline,\tline]
- \ifx\fline\tline#2\else#3\fi}
+ {\edef\m_page_lines_to{\currentreferencelinenumber}%
+ %[\m_page_lines_from,\m_page_lines_to]
+ \ifx\m_page_lines_from\m_page_lines_to#2\else#3\fi}
{#2}}
{#2}}
-\def\inline#1[#2]%
+\unexpanded\def\inline#1[#2]%
{\doifelsenothing{#1}
{\doifelsesamelinereference{#2}
{\in{\leftlabeltext\v!line}{\rightlabeltext\v!line}[lr:b:#2]}
@@ -536,9 +549,9 @@
{\in{#1}[lr:b:#2]}
{\in{#1}[lr:b:#2]--\in[lr:e:#2]}}}
-\def\inlinerange[#1]%
+\unexpanded\def\inlinerange[#1]%
{\doifelsesamelinereference{#1}
{\in[lr:b:#1]}
- {\in[lr:b:#1]--\in[lr:e:#1]}}
+ {\in[lr:b:#1]\endash\in[lr:e:#1]}}
\protect \endinput
diff --git a/Master/texmf-dist/tex/context/base/page-mak.mkvi b/Master/texmf-dist/tex/context/base/page-mak.mkvi
index f37c4f6135a..71af520a196 100644
--- a/Master/texmf-dist/tex/context/base/page-mak.mkvi
+++ b/Master/texmf-dist/tex/context/base/page-mak.mkvi
@@ -88,7 +88,10 @@
\newbox \b_page_makeup
\newtoks\t_page_makeup_every_setup
-\def\page_makeup_start_yes[#name][#settings]%
+\def\page_makeup_start_yes[#name]% [#settings]%
+ {\doifelsecommandhandler\??makeup{#name}\page_makeup_start_indeed\page_makeup_start_nop[#name]}%
+
+\def\page_makeup_start_indeed[#name][#settings]%
{\doifelsenothing{\namedmakeupparameter{#name}\c!page}
{\page}% new, so best not have dangling mess here like references (we could capture then and flush embedded)
{\page[\namedmakeupparameter{#name}\c!page]}%
@@ -104,8 +107,8 @@
\global\setbox\b_page_makeup\vbox to \makeupparameter\c!height \bgroup
\usemakeupstyleandcolor\c!style\c!color
\hsize\makeupparameter\c!width
- \setupalign[\makeupparameter\c!align]%
- \doprocesslocalsetups{\makeupparameter\c!setups}% lua(..),xml(...,..),tex(..)
+ \usealignparameter\makeupparameter
+ \usesetupsparameter\makeupparameter % lua(..),xml(...,..),tex(..)
\makeupparameter\c!top
\let\stopmakeup\page_makeup_stop_yes}
@@ -113,22 +116,26 @@
{\endgraf
\makeupparameter\c!bottom
\egroup
- \pushpagestate % new
- \makeupparameter\c!before
- \relax % to be sure we don't enter the \if
- \ifdim\ht\b_page_makeup>\vsize
- \ht\b_page_makeup\vsize % is already set to \textheight (maybe set dp to 0)
- \fi
- \setuppagenumber[\c!state=\makeupparameter\c!pagestate]%
- \box\b_page_makeup
- \the\t_page_makeup_every_setup
- \page
- \makeupparameter\c!after
- \relax % to be sure we don't enter the \if
+ \strc_pagenumbers_page_state_push % new
+ \makeupparameter\c!before\relax
+ \begingroup
+ \ifdim\ht\b_page_makeup>\vsize
+ \ht\b_page_makeup\vsize % is already set to \textheight (maybe set dp to 0)
+ \fi
+ \setuppagenumber[\c!state=\makeupparameter\c!pagestate]%
+ \doif{\makeupparameter\c!location}\v!top{\topskip\zeropoint}%
+ \edef\p_reference{\makeupparameter\c!reference}%
+ \dontleavehmode
+ \usereferenceparameter\makeupparameter
+ \box\b_page_makeup % could be whole box being destination
+ \the\t_page_makeup_every_setup
+ \page
+ \endgroup
+ \makeupparameter\c!after\relax
\ifdoublesided \ifodd\realpageno \else
\getvalue{\??makeupdoublesided\makeupparameter\c!doublesided}%
\fi \fi
- \poppagestate % new
+ \strc_pagenumbers_page_state_pop % new
\egroup
\stoplayout} % includes \page
@@ -166,6 +173,7 @@
%\c!align=,
%\c!before=,
%\c!after=,
+ %\c!location=,
\c!page=\v!right,
\c!doublesided=\v!empty,
\c!top=\vss,
diff --git a/Master/texmf-dist/tex/context/base/page-mbk.mkvi b/Master/texmf-dist/tex/context/base/page-mbk.mkvi
index eb8061bf319..9e3e57193bd 100644
--- a/Master/texmf-dist/tex/context/base/page-mbk.mkvi
+++ b/Master/texmf-dist/tex/context/base/page-mbk.mkvi
@@ -220,9 +220,9 @@
\box\floatbox
\filbreak}%
\ifdim\ht\b_page_margin_blocks>\textheight
- % \dosavefloatinfo % no saving done anyway
+ % page_floats_report_saved % no saving done anyway
\else
- \doinsertfloatinfo
+ \page_floats_report_total
\fi}
\def\page_margin_blocks_process_float_nop
diff --git a/Master/texmf-dist/tex/context/base/page-mix.lua b/Master/texmf-dist/tex/context/base/page-mix.lua
new file mode 100644
index 00000000000..c2cd74b99cc
--- /dev/null
+++ b/Master/texmf-dist/tex/context/base/page-mix.lua
@@ -0,0 +1,660 @@
+if not modules then modules = { } end modules ['page-mix'] = {
+ version = 1.001,
+ comment = "companion to page-mix.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+-- inserts.getname(name)
+
+-- local node, tex = node, tex
+-- local nodes, interfaces, utilities = nodes, interfaces, utilities
+-- local trackers, logs, storage = trackers, logs, storage
+-- local number, table = number, table
+
+local concat = table.concat
+
+local nodecodes = nodes.nodecodes
+local gluecodes = nodes.gluecodes
+local nodepool = nodes.pool
+
+local hlist_code = nodecodes.hlist
+local vlist_code = nodecodes.vlist
+local kern_code = nodecodes.kern
+local glue_code = nodecodes.glue
+local penalty_code = nodecodes.penalty
+local insert_code = nodecodes.ins
+local mark_code = nodecodes.mark
+
+local new_hlist = nodepool.hlist
+local new_vlist = nodepool.vlist
+local new_glue = nodepool.glue
+
+local hpack = node.hpack
+local vpack = node.vpack
+local freenode = node.free
+
+local texbox = tex.box
+local texskip = tex.skip
+local texdimen = tex.dimen
+local points = number.points
+local settings_to_hash = utilities.parsers.settings_to_hash
+
+local variables = interfaces.variables
+local v_yes = variables.yes
+local v_global = variables["global"]
+local v_local = variables["local"]
+local v_columns = variables.columns
+
+local trace_state = false trackers.register("mixedcolumns.trace", function(v) trace_state = v end)
+local trace_detail = false trackers.register("mixedcolumns.detail", function(v) trace_detail = v end)
+
+local report_state = logs.reporter("mixed columns")
+
+pagebuilders = pagebuilders or { }
+pagebuilders.mixedcolumns = pagebuilders.mixedcolumns or { }
+local mixedcolumns = pagebuilders.mixedcolumns
+
+local forcedbreak = -123
+
+-- initializesplitter(specification)
+-- cleanupsplitter()
+
+-- Inserts complicate matters a lot. In order to deal with them well, we need to
+-- distinguish several cases.
+--
+-- (1) full page columns: firstcolumn, columns, lastcolumn, page
+-- (2) mid page columns : firstcolumn, columns, lastcolumn, page
+--
+-- We need to collect them accordingly.
+
+local function collectinserts(result,nxt,nxtid)
+ local inserts, currentskips, nextskips, inserttotal = { }, 0, 0, 0
+ while nxt do
+ if nxtid == insert_code then
+ inserttotal = inserttotal + nxt.height + nxt.depth
+ local s = nxt.subtype
+ local c = inserts[s]
+ if not c then
+ c = { }
+ inserts[s] = c
+ local width = texskip[s].width
+ if not result.inserts[s] then
+ currentskips = currentskips + width
+ end
+ nextskips = nextskips + width
+ end
+ c[#c+1] = nxt
+ if trace_detail then
+ report_state("insert of class %s found",s)
+ end
+ elseif nxtid == mark_code then
+ if trace_detail then
+ report_state("mark found")
+ end
+ else
+ break
+ end
+ nxt = nxt.next
+ if nxt then
+ nxtid = nxt.id
+ else
+ break
+ end
+ end
+ return nxt, inserts, currentskips, nextskips, inserttotal
+end
+
+local function appendinserts(ri,inserts)
+ for class, collected in next, inserts do
+ local ric = ri[class]
+ if not ric then
+ -- assign to collected
+ ri[class] = collected
+ else
+ -- append to collected
+ for j=1,#collected do
+ ric[#ric+1] = collected[j]
+ end
+ end
+ end
+end
+
+local function discardtopglue(current,discarded)
+ while current do
+ local id = current.id
+ if id == glue_code then
+ discarded[#discarded+1] = current
+ current = current.next
+ elseif id == penalty_code then
+ if current.penalty == forcedbreak then
+ discarded[#discarded+1] = current
+ current = current.next
+ while current do
+ local id = current.id
+ if id == glue_code then
+ discarded[#discarded+1] = current
+ current = current.next
+ else
+ break
+ end
+ end
+ else
+ discarded[#discarded+1] = current
+ current = current.next
+ end
+ else
+ break
+ end
+ end
+ return current
+end
+
+local function stripbottomglue(results,discarded)
+ local height = 0
+ for i=1,#results do
+ local r = results[i]
+ local t = r.tail
+ while t and t ~= r.head do
+ local prev = t.prev
+ if not prev then
+ break
+ elseif t.id == penalty_code then
+ if t.penalty == forcedbreak then
+ break
+ else
+ discarded[#discarded+1] = t
+ r.tail = prev
+ t = prev
+ end
+ elseif t.id == glue_code then
+ discarded[#discarded+1] = t
+ r.height = r.height - t.spec.width
+ r.tail = prev
+ t = prev
+ else
+ break
+ end
+ end
+ if r.height > height then
+ height = r.height
+ end
+ end
+ return height
+end
+
+local function setsplit(specification) -- a rather large function
+ local box = specification.box
+ if not box then
+ report_state("fatal error, no box")
+ return
+ end
+ local list = texbox[box]
+ if not list then
+ report_state("fatal error, no list")
+ return
+ end
+ local head = list.head or specification.originalhead
+ if not head then
+ report_state("fatal error, no head")
+ return
+ end
+ local discarded = { }
+ local originalhead = head
+ local originalwidth = specification.originalwidth or list.width
+ local originalheight = specification.originalheight or list.height
+ local current = head
+ local height = 0
+ local depth = 0
+ local skip = 0
+ local options = settings_to_hash(specification.option or "")
+ local stripbottom = specification.alternative == v_local
+ local cycle = specification.cycle or 1
+ local nofcolumns = specification.nofcolumns or 1
+ if nofcolumns == 0 then
+ nofcolumns = 1
+ end
+ local preheight = specification.preheight or 0
+ local extra = specification.extra or 0
+ local maxheight = specification.maxheight
+ local optimal = originalheight/nofcolumns
+ if specification.balance ~= v_yes then
+ optimal = maxheight
+ end
+ local target = optimal + extra
+ local overflow = target > maxheight - preheight
+ local threshold = specification.threshold or 0
+ if overflow then
+ target = maxheight - preheight
+ end
+ if trace_state then
+ report_state("cycle %s, maxheight %p, preheight %p, target %p, overflow %a, extra %p",
+ cycle, maxheight, preheight , target, overflow, extra)
+ end
+ local results = { }
+ for i=1,nofcolumns do
+ results[i] = {
+ head = false,
+ tail = false,
+ height = 0,
+ depth = 0,
+ inserts = { },
+ delta = 0,
+ }
+ end
+ local column = 1
+ local result = results[column]
+ local lasthead = nil
+ local rest = nil
+ local function gotonext()
+ if head == lasthead then
+ if trace_state then
+ report_state("empty column %s, needs more work",column)
+ end
+ rest = current
+ return false
+ else
+ lasthead = head
+ result.head = head
+ if current == head then
+ result.tail = head
+ else
+ result.tail = current.prev
+ end
+ result.height = height
+ result.depth = depth
+ end
+ head = current
+ height = 0
+ depth = 0
+ skip = 0
+ if column == nofcolumns then
+ column = 0 -- nicer in trace
+ rest = head
+ -- lasthead = head
+ return false
+ else
+ column = column + 1
+ result = results[column]
+ current = discardtopglue(current,discarded)
+ head = current
+ -- lasthead = head
+ return true
+ end
+ end
+ local function checked(advance)
+ local total = skip + height + depth + advance
+ local delta = total - target
+ if trace_detail then
+ local currentcolumn = column
+ local state
+ if delta > threshold then
+ result.delta = delta
+ if gotonext() then
+ state = "next"
+ else
+ state = "quit"
+ end
+ else
+ state = "same"
+ end
+ if trace_detail then
+ report_state("check > column %s, advance %p, total %p, target %p => %a (height %p, depth %p, skip %p)",
+ currentcolumn,advance,total,target,state,height,depth,skip)
+ end
+ return state
+ else
+ if delta > threshold then
+ result.delta = delta
+ if gotonext() then
+ return "next"
+ else
+ return "quit"
+ end
+ else
+ return "same"
+ end
+ end
+ end
+ current = discardtopglue(current,discarded)
+ head = current
+ while current do
+ local id = current.id
+ local nxt = current.next
+ if id == hlist_code or id == vlist_code then
+ local nxtid = nxt and nxt.id
+ local inserts, currentskips, nextskips, inserttotal = nil, 0, 0, 0
+ local advance = current.height -- + current.depth
+ if nxt and (nxtid == insert_code or nxtid == mark_code) then
+ nxt, inserts, localskips, insertskips, inserttotal = collectinserts(result,nxt,nxtid)
+ end
+ local state = checked(advance+inserttotal+currentskips)
+ if trace_state then
+ report_state('line > column %s, advance %p, insert %p, height %p, state %a',
+ column,advance,inserttotal,height,state)
+ end
+ if state == "quit" then
+ break
+ else
+ height = height + depth + skip + advance + inserttotal
+ if state == "next" then
+ height = height + nextskips
+ else
+ height = height + currentskips
+ end
+ end
+ depth = current.depth
+ skip = 0
+ if inserts then
+ appendinserts(result.inserts,inserts)
+ end
+ elseif id == glue_code then
+ local advance = current.spec.width
+ if advance ~= 0 then
+ local state = checked(advance)
+ if trace_state then
+ report_state('glue > column %s, advance %p, height %p, state %a',
+ column,advance,height,state)
+ end
+ if state == "quit" then
+ break
+ end
+ height = height + depth + skip
+ depth = 0
+ skip = height > 0 and advance or 0
+ end
+ elseif id == kern_code then
+ local advance = current.kern
+ if advance ~= 0 then
+ local state = checked(advance)
+ if trace_state then
+ report_state('kern > column %s, advance %p, height %p, state %a',
+ column,advance,height,state)
+ end
+ if state == "quit" then
+ break
+ end
+ height = height + depth + skip + advance
+ depth = 0
+ skip = 0
+ end
+ elseif id == penalty_code then
+ local penalty = current.penalty
+ if penalty == 0 then
+ -- don't bother
+ elseif penalty == forcedbreak then
+ if gotonext() then
+ if trace_state then
+ report_state("cycle: %s, forced column break (same page)",cycle)
+ end
+ else
+ if trace_state then
+ report_state("cycle: %s, forced column break (next page)",cycle)
+ end
+ break
+ end
+ else
+ -- todo: nobreak etc ... we might need to backtrack so we need to remember
+ -- the last acceptable break
+ -- club and widow and such i.e. resulting penalties (if we care)
+ end
+ end
+ nxt = current.next -- can have changed
+ if nxt then
+ current = nxt
+ elseif head == lasthead then
+ -- to be checked but break needed as otherwise we have a loop
+ if trace_state then
+ report_state("quit as head is lasthead")
+ end
+ break
+ else
+ local r = results[column]
+ r.head = head
+ r.tail = current
+ r.height = height
+ r.depth = depth
+ break
+ end
+ end
+ if not current then
+ if trace_state then
+ report_state("nilling rest")
+ end
+ rest = nil
+ elseif rest == lasthead then
+ if trace_state then
+ report_state("nilling rest as rest is lasthead")
+ end
+ rest = nil
+ end
+
+ if stripbottom then
+ local height = stripbottomglue(results,discarded)
+ if height > 0 then
+ target = height
+ end
+ end
+
+ specification.results = results
+ specification.height = target
+ specification.originalheight = originalheight
+ specification.originalwidth = originalwidth
+ specification.originalhead = originalhead
+ specification.targetheight = target or 0
+ specification.rest = rest
+ specification.overflow = overflow
+ specification.discarded = discarded
+
+ texbox[specification.box].head = nil
+
+ return specification
+end
+
+function mixedcolumns.finalize(result)
+ if result then
+ local results = result.results
+ for i=1,result.nofcolumns do
+ local r = results[i]
+ local h = r.head
+ if h then
+ h.prev = nil
+ local t = r.tail
+ if t then
+ t.next = nil
+ else
+ h.next = nil
+ r.tail = h
+ end
+ for c, list in next, r.inserts do
+ local t = { }
+ for i=1,#list do
+ local l = list[i]
+ local h = new_hlist()
+ t[i] = h
+ h.head = l.head
+ h.height = l.height
+ h.depth = l.depth
+ l.head = nil
+ end
+ t[1].prev = nil -- needs checking
+ t[#t].next = nil -- needs checking
+ r.inserts[c] = t
+ end
+ end
+ end
+ end
+end
+
+local splitruns = 0
+
+local function report_deltas(result,str)
+ local t = { }
+ for i=1,result.nofcolumns do
+ t[#t+1] = points(result.results[i].delta or 0)
+ end
+ report_state("%s, cycles %s, deltas % | t",str,result.cycle or 1,t)
+end
+
+function mixedcolumns.setsplit(specification)
+ splitruns = splitruns + 1
+ if trace_state then
+ report_state("split run %s",splitruns)
+ end
+ local result = setsplit(specification)
+ if result then
+ if result.overflow then
+ if trace_state then
+ report_deltas(result,"overflow")
+ end
+ -- we might have some rest
+ elseif result.rest and specification.balance == v_yes then
+ local step = specification.step or 65536*2
+ local cycle = 1
+ local cycles = specification.cycles or 100
+ while result.rest and cycle <= cycles do
+ specification.extra = cycle * step
+ result = setsplit(specification) or result
+ if trace_state then
+ report_state("cycle: %s.%s, original height %p, total height %p",
+ splitruns,cycle,result.originalheight,result.nofcolumns*result.targetheight)
+ end
+ cycle = cycle + 1
+ specification.cycle = cycle
+ end
+ if cycle > cycles then
+ report_deltas(result,"too many balancing cycles")
+ elseif trace_state then
+ report_deltas(result,"balanced")
+ end
+ elseif trace_state then
+ report_deltas(result,"done")
+ end
+ return result
+ elseif trace_state then
+ report_state("no result")
+ end
+end
+
+local topskip_code = gluecodes.topskip
+local baselineskip_code = gluecodes.baselineskip
+
+function mixedcolumns.getsplit(result,n)
+ if not result then
+ report_state("flush, column %s, no result",n)
+ return
+ end
+ local r = result.results[n]
+ if not r then
+ report_state("flush, column %s, empty",n)
+ end
+ local h = r.head
+ if not h then
+ return new_glue(result.originalwidth)
+ end
+
+ if trace_state then
+ local id = h.id
+ if id == hlist_code then
+ report_state("flush, column %s, top line: %s",n,nodes.toutf(h.list))
+ else
+ report_state("flush, column %s, head node: %s",n,nodecodes[id])
+ end
+ end
+
+ h.prev = nil -- move up
+ local strutht = result.strutht
+ local strutdp = result.strutdp
+ local lineheight = strutht + strutdp
+
+ local v = new_vlist()
+ v.head = h
+
+ -- local v = vpack(h,"exactly",height)
+
+ v.width = result.originalwidth
+ if result.alternative == v_global then -- option
+ result.height = result.maxheight
+ end
+ v.height = lineheight * math.ceil(result.height/lineheight) - strutdp
+ v.depth = strutdp
+
+ for c, list in next, r.inserts do
+ -- tex.setbox("global",c,vpack(nodes.concat(list)))
+ -- tex.setbox(c,vpack(nodes.concat(list)))
+ texbox[c] = vpack(nodes.concat(list))
+ r.inserts[c] = nil
+ end
+ return v
+end
+
+function mixedcolumns.getrest(result)
+ local rest = result and result.rest
+ result.rest = nil -- to be sure
+ return rest
+end
+
+function mixedcolumns.getlist(result)
+ local originalhead = result and result.originalhead
+ result.originalhead = nil -- to be sure
+ return originalhead
+end
+
+function mixedcolumns.cleanup(result)
+ local discarded = result.discarded
+ for i=1,#discarded do
+ freenode(discarded[i])
+ end
+ result.discarded = { }
+end
+
+-- interface --
+
+local result
+
+function commands.mixsetsplit(specification)
+ if result then
+ for k, v in next, specification do
+ result[k] = v
+ end
+ result = mixedcolumns.setsplit(result)
+ else
+ result = mixedcolumns.setsplit(specification)
+ end
+end
+
+function commands.mixgetsplit(n)
+ if result then
+ context(mixedcolumns.getsplit(result,n))
+ end
+end
+
+function commands.mixfinalize()
+ if result then
+ mixedcolumns.finalize(result)
+ end
+end
+
+function commands.mixflushrest()
+ if result then
+ context(mixedcolumns.getrest(result))
+ end
+end
+
+function commands.mixflushlist()
+ if result then
+ context(mixedcolumns.getlist(result))
+ end
+end
+
+function commands.mixstate()
+ context(result and result.rest and 1 or 0)
+end
+
+function commands.mixcleanup()
+ if result then
+ mixedcolumns.cleanup(result)
+ result = nil
+ end
+end
diff --git a/Master/texmf-dist/tex/context/base/page-mix.mkiv b/Master/texmf-dist/tex/context/base/page-mix.mkiv
new file mode 100644
index 00000000000..a5c343fc7ee
--- /dev/null
+++ b/Master/texmf-dist/tex/context/base/page-mix.mkiv
@@ -0,0 +1,801 @@
+%D \module
+%D [ file=page-mix,
+%D version=2012.07.12,
+%D title=\CONTEXT\ Page Macros,
+%D subtitle=Mixed Columns,
+%D author=Hans Hagen,
+%D date=\currentdate,
+%D copyright={PRAGMA ADE \& \CONTEXT\ Development Team}]
+%C
+%C This module is part of the \CONTEXT\ macro||package and is
+%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
+%C details.
+
+\writestatus{loading}{ConTeXt Page Macros / Mixed Columns}
+
+%D This is a very experimental module. Eventually it will replace the current
+%D multi column mechanism (that then will be an instance). The \LUA\ part of
+%D the interface will quite probably change so don't use that one directly
+%D (yet).
+
+% todo:
+%
+% consult note class
+% notes per page
+% notes in each column
+% notes in last column
+% notes local/global
+% top and bottom inserts
+% wide floats
+% move floats
+
+% luatex buglet:
+%
+% \ctxlua{tex.setbox("global",0,node.hpack(nodes.pool.glyph("a",font.current())))}\box0
+
+\registerctxluafile{page-mix}{1.001}
+
+\unprotect
+
+%D The mixed output routine replaces the traditional multi column handler that
+%D started out in \MKII. One of the complications of a routine is that it needs
+%D to align nicely when mixed in a single column layout. Instead of using all
+%D kind of shift juggling in this mechanism we simply switch to grid mode
+%D locally. After all, columns don't look nice when not on a grid. As the grid
+%D snapper in \MKIV\ is more advanced not that much extra code is needed.
+
+%D We use the command handler but the parent settings are not to be changed.
+%D Instead we could have used a dedicated root setup, but it's not worth the
+%D trouble.
+
+\installcorenamespace{mixedcolumns}
+
+\def\s!mixedcolumn{mixedcolumn}
+
+\installframedcommandhandler \??mixedcolumns {mixedcolumns} \??mixedcolumns
+
+\setupmixedcolumns
+ [\c!distance=1.5\bodyfontsize,
+ \c!n=\plustwo,
+ %\c!rule=\v!none,
+ \c!frame=\v!off,
+ \c!strut=\v!no,
+ \c!offset=\v!overlay,
+ \c!alternative=\v!local,
+ \c!maxheight=\textheight,
+ \c!maxwidth=\makeupwidth,
+ \c!grid=\v!tolerant,
+ \c!step=.25\lineheight, % needs some experimenting
+ \c!method=\ifinner\s!box\else\s!otr\fi] % automatic as suggested by WS
+
+\let\startmixedcolumns\relax % defined later
+\let\stopmixedcolumns \relax % defined later
+
+\appendtoks
+ \setuevalue{\e!start\currentmixedcolumns}{\startmixedcolumns[\currentmixedcolumns]}%
+ \setuevalue{\e!stop \currentmixedcolumns}{\stopmixedcolumns}%
+\to \everydefinemixedcolumns
+
+%D In order to avoid a mixup we use quite some local registers.
+
+\newdimen \d_page_mix_column_width
+\newdimen \d_page_mix_max_height
+\newdimen \d_page_mix_max_width
+\newdimen \d_page_mix_distance
+\newcount \c_page_mix_n_of_columns
+\newdimen \d_page_mix_threshold
+\newdimen \d_page_mix_leftskip
+\newdimen \d_page_mix_rightskip
+
+\newdimen \d_page_mix_balance_step
+\setnewconstant\c_page_mix_balance_cycles 500
+
+\setnewconstant\c_page_mix_break_forced -123
+
+\newbox \b_page_mix_preceding
+\newdimen \d_page_mix_preceding_height
+
+\newbox \b_page_mix_collected
+
+\newconstant \c_page_mix_routine
+
+\setnewconstant\c_page_mix_routine_regular \zerocount
+\setnewconstant\c_page_mix_routine_intercept\plusone
+\setnewconstant\c_page_mix_routine_continue \plustwo
+\setnewconstant\c_page_mix_routine_balance \plusthree
+\setnewconstant\c_page_mix_routine_error \plusfour
+
+%D The main environment is called as follows:
+%D
+%D \starttyping
+%D \startmixedcolumns[instance][settings]
+%D \startmixedcolumns[instance]
+%D \startmixedcolumns[settings]
+%D \stoptyping
+%D
+%D However, best is not to use this one directly but define an instance and
+%D use that one.
+
+% % For the moment only on my machine:
+%
+% \definemixedcolumns
+% [\v!columns]
+%
+% \unexpanded\def\setupcolumns
+% {\setupmixedcolumns[\v!columns]}
+
+%D In itemizations we also need columns, so let's define a apecial instance
+%D for them. These need to work well in situations like this:
+%D
+%D \starttyping
+%D \input zapf
+%D
+%D \startnarrower
+%D \startitemize[columns,two,packed][before=,after=]
+%D \dorecurse{10}{\startitem item #1 \stopitem}
+%D \stopitemize
+%D \stopnarrower
+%D
+%D \input zapf
+%D
+%D \startnarrower
+%D \startitemize[columns,two][before=,after=]
+%D \dorecurse{10}{\startitem item #1 \stopitem}
+%D \stopitemize
+%D \stopnarrower
+%D
+%D \input zapf
+%D
+%D \startnarrower
+%D \startitemize[columns,two]
+%D \dorecurse{10}{\startitem item #1 \stopitem}
+%D \stopitemize
+%D \stopnarrower
+%D
+%D \input zapf
+%D \stoptyping
+
+\ifdefined\s!itemgroupcolumns \else \def\s!itemgroupcolumns{itemgroupcolumns} \fi
+
+\definemixedcolumns
+ [\s!itemgroupcolumns]
+ [\c!n=\itemgroupparameter\c!n,
+ \c!rule=\v!off,
+ \c!balance=\v!yes]
+
+\unexpanded\def\strc_itemgroups_start_columns
+ {\startmixedcolumns[\s!itemgroupcolumns]} % we could have a fast one
+
+\unexpanded\def\strc_itemgroups_stop_columns
+ {\stopmixedcolumns}
+
+%D The mixed output routine can be in different states. First we need to intercept
+%D the already present content. This permits mixed single and multi column usage.
+%D Then we have the continuous routine, one that intercepts pages in sequence.
+%D Finally, when we finish the mixed columns mode, we can (optionally) balance the
+%D last page.
+
+\unexpanded\def\page_mix_command_routine
+ {\ifcase\c_page_mix_routine
+ \page_one_command_routine
+ \or
+ \page_mix_routine_intercept
+ \or
+ \page_mix_routine_continue
+ \or
+ \page_mix_routine_balance
+ \or
+ \page_mix_routine_error
+ \fi}
+
+%D The interceptor is quite simple, at least for the moment.
+
+% \def\page_mix_routine_intercept
+% {\global\setbox\b_page_mix_preceding\vbox
+% {\page_otr_command_flush_top_insertions
+% \unvbox\normalpagebox}}
+
+\def\page_mix_routine_intercept
+ {\ifdim\pagetotal>\pagegoal
+ % testcase: preceding-001 ... if we don't do this, text can disappear as
+ % preceding is overwritten ... needs to be figured out some day
+ \page_one_command_routine
+ \fi
+ \global\setbox\b_page_mix_preceding\vbox
+ {\page_otr_command_flush_top_insertions
+ \ifdim\ht\b_page_mix_preceding=\zeropoint \else
+ \writestatus\m!columns{preceding error}%
+ \unvbox\b_page_mix_preceding
+ \fi
+ \unvbox\normalpagebox}}
+
+%D The error routine is there but unlikely to be called. It is a left-over from
+%D the traditional routine that might come in handy some day.
+
+\def\page_mix_routine_error
+ {\showmessage\m!columns3\empty
+ \page_otr_construct_and_shipout\unvbox\normalpagebox}
+
+%D Some settings (and actions) depend on the current output routine and setting the
+%D hsize and vsize is among them. The calculation of the hsize is done elsewhere.
+
+\unexpanded\def\page_mix_command_set_hsize
+ {\hsize\d_page_mix_column_width
+ \textwidth\d_page_mul_used_width} % needs thinking ... grouping etc
+
+%D When setting the vsize we make sure that we collect a few more lines than needed
+%D so that we have enough to split over the columns. Collecting too much is somewhat
+%D tricky as they will spill over to the next page.
+
+\unexpanded\def\page_mix_command_set_vsize
+ {\vsize\dimexpr\c_page_mix_n_of_columns\textheight+\c_page_mix_n_of_columns\lineheight\relax
+ \pagegoal\vsize}
+
+%D As we use \LUA\ there is the usual amount of tracing at that end. At the tex end
+%D we only visualize boxes.
+
+\let\page_mix_hbox\hbox
+\let\page_mix_vbox\vbox
+
+\installtextracker
+ {mixedcolumns.boxes}
+ {\let\page_mix_hbox\ruledhbox
+ \let\page_mix_vbox\ruledvbox}
+ {\let\page_mix_hbox\hbox
+ \let\page_mix_vbox\vbox}
+
+%D We provide a few column break options. Interesting is that while forcing a new
+%D column in the traditional mechanism was a pain, here it works quite well.
+
+\installcolumnbreakmethod \s!mixedcolumn \v!preference
+ {\goodbreak}
+
+\installcolumnbreakmethod \s!mixedcolumn \v!yes
+ {\penalty\c_page_mix_break_forced\relax}
+
+%D As we operate in grid snapping mode, we use a dedicated macro to enable this
+%D mechamism.
+
+\def\page_mix_enable_grid_snapping
+ {\edef\p_grid{\mixedcolumnsparameter\c!grid}%
+ \ifx\p_grid\empty
+ % maybe listen to main grid setting
+ \else
+ \gridsnappingtrue
+ \setsystemmode\v!grid
+ \spac_grids_snap_value_set\p_grid
+ \fi}
+
+%D Between columns there is normally just spacing unless one enforces a rule.
+%D
+%D \starttyping
+%D \input zapf
+%D
+%D \startnarrower
+%D \startmixedcolumns[n=2,background=color,backgroundcolor=red,rulethickness=1mm,rulecolor=green,separator=rule]
+%D \input zapf
+%D \stopmixedcolumns
+%D \stopnarrower
+%D
+%D \input zapf
+%D \stoptyping
+
+\installcorenamespace{mixedcolumnsseparator}
+
+\setvalue{\??mixedcolumnsseparator\v!rule}%
+ {\starttextproperties
+ \usemixedcolumnscolorparameter\c!rulecolor
+ \vrule\s!width\mixedcolumnsparameter\c!rulethickness
+ \stoptextproperties}
+
+\unexpanded\def\page_mix_command_inject_separator
+ {\bgroup
+ \hss
+ \csname\??mixedcolumnsseparator\mixedcolumnsparameter\c!separator\endcsname
+ \hss
+ \egroup}
+
+%D We've now arrived at the real code. The start command mostly sets up the
+%D environment and variables that are used in the splitter. One of the last
+%D things happening at the start is switching over to the mixed continuous
+%D routine.
+
+\installcorenamespace{mixedcolumnsbefore}
+\installcorenamespace{mixedcolumnsstart}
+\installcorenamespace{mixedcolumnsstop}
+\installcorenamespace{mixedcolumnsafter}
+
+%D For practical reasons there is always a first argument needed that
+%D indicates the class.
+%D
+%D \starttyping
+%D \startmixedcolumns[n=3,alternative=global]
+%D \dorecurse{200}{Zomaar wat #1 met een footnote\footnote{note #1}. }
+%D \stopmixedcolumns
+%D \stoptyping
+
+\let\currentmixedcolumnsmethod\empty
+
+\unexpanded\def\startmixedcolumns
+ {\dodoubleempty\page_mix_start_columns}
+
+\unexpanded\def\page_mix_start_columns
+ {\pushmacro\currentmixedcolumns
+ \pushmacro\currentmixedcolumnsmethod
+ \ifsecondargument
+ \singleexpandafter\page_mix_start_columns_a
+ \else\iffirstargument
+ \doubleexpandafter\page_mix_start_columns_b
+ \else
+ \doubleexpandafter\page_mix_start_columns_c
+ \fi\fi}
+
+\def\page_mix_start_columns_a[#1][#2]%
+ {\edef\currentmixedcolumns{#1}%
+ \edef\currentmixedcolumnsmethod{\mixedcolumnsparameter\c!method}%
+ \mixedcolumnsparameter\c!before\relax
+ \csname\??mixedcolumnsbefore\currentmixedcolumnsmethod\endcsname\relax
+ \begingroup
+ \setupcurrentmixedcolumns[#2]%
+ \page_mix_initialize_columns
+ \csname\??mixedcolumnsstart\currentmixedcolumnsmethod\endcsname}
+
+\def\page_mix_start_columns_b[#1][#2]%
+ {\doifassignmentelse{#1}%
+ {\let\currentmixedcolumns\empty
+ \page_mix_error_b}
+ {\edef\currentmixedcolumns{#1}%
+ \firstargumentfalse}%
+ \edef\currentmixedcolumnsmethod{\mixedcolumnsparameter\c!method}%
+ \mixedcolumnsparameter\c!before\relax % so, it doesn't list to local settings !
+ \csname\??mixedcolumnsbefore\currentmixedcolumnsmethod\endcsname\relax
+ \begingroup
+ \iffirstargument
+ \setupcurrentmixedcolumns[#1]%
+ \fi
+ \page_mix_initialize_columns
+ \csname\??mixedcolumnsstart\currentmixedcolumnsmethod\endcsname} % no \relax
+
+\def\page_mix_error_b
+ {\writestatus\m!columns{best use an instance of mixed columns}}
+
+\def\page_mix_start_columns_c[#1][#2]%
+ {\let\currentmixedcolumns\empty
+ \edef\currentmixedcolumnsmethod{\mixedcolumnsparameter\c!method}%
+ \mixedcolumnsparameter\c!before\relax
+ \csname\??mixedcolumnsbefore\currentmixedcolumnsmethod\endcsname\relax
+ \begingroup
+ \page_mix_initialize_columns
+ \csname\??mixedcolumnsstart\currentmixedcolumnsmethod\endcsname}
+
+\unexpanded\def\page_mix_fast_columns_start#1%
+ {\pushmacro\currentmixedcolumns
+ \pushmacro\currentmixedcolumnsmethod
+ \edef\currentmixedcolumns{#1}%
+ \edef\currentmixedcolumnsmethod{\mixedcolumnsparameter\c!method}%
+ \mixedcolumnsparameter\c!before\relax % so, it doesn't list to local settings !
+ \csname\??mixedcolumnsbefore\currentmixedcolumnsmethod\endcsname\relax
+ \begingroup
+ \page_mix_initialize_columns
+ \csname\??mixedcolumnsstart\currentmixedcolumnsmethod\endcsname} % no \relax
+
+%D When we stop, we switch over to the balancing routine. After we're done we
+%D make sure to set the sizes are set, a somewhat redundant action when we
+%D already have flushed but better be safe.
+
+\unexpanded\def\stopmixedcolumns
+ {\csname\??mixedcolumnsstop \currentmixedcolumnsmethod\endcsname % no \relax
+ \endgroup
+ \csname\??mixedcolumnsafter\currentmixedcolumnsmethod\endcsname\relax
+ \mixedcolumnsparameter\c!after\relax
+ \popmacro\currentmixedcolumnsmethod
+ \popmacro\currentmixedcolumns}
+
+% \unexpanded\def\stopmixedcolumns
+% {\csname\??mixedcolumnsstop \currentmixedcolumnsmethod\endcsname % no \relax
+% \endgroup
+% \csname\??mixedcolumnsafter\currentmixedcolumnsmethod\endcsname\relax
+% \mixedcolumnsparameter\c!after\relax
+% \ifx\currentmixedcolumnsmethod\s!otr
+% \popmacro\currentmixedcolumnsmethod
+% \popmacro\currentmixedcolumns
+% \synchronizeoutput % brrr, otherwise sometimes issues in itemize
+% \else
+% \popmacro\currentmixedcolumnsmethod
+% \popmacro\currentmixedcolumns
+% \fi
+% }
+
+\let\page_mix_fast_columns_stop\stopmixedcolumns
+
+%D This is how the fast one is used:
+
+\unexpanded\def\strc_itemgroups_start_columns
+ {\page_mix_fast_columns_start\s!itemgroupcolumns}
+
+\let\strc_itemgroups_stop_columns\page_mix_fast_columns_stop
+
+\setupmixedcolumns
+ [\s!itemgroupcolumns]
+ [\c!grid=\itemgroupparameter\c!grid]
+
+\setupitemgroups
+ [\c!grid=\v!tolerant]
+
+%D The common initialization:
+
+\def\page_mix_initialize_columns
+ {\page_mix_enable_grid_snapping
+ %
+ \d_page_mix_distance \mixedcolumnsparameter\c!distance
+ \c_page_mix_n_of_columns\mixedcolumnsparameter\c!n
+ \d_page_mix_max_height \mixedcolumnsparameter\c!maxheight
+ \d_page_mix_max_width \mixedcolumnsparameter\c!maxwidth
+ \d_page_mix_balance_step\mixedcolumnsparameter\c!step
+ %
+ \d_page_mix_max_width\dimexpr\d_page_mix_max_width-\leftskip-\rightskip\relax
+ \d_page_mix_leftskip \leftskip
+ \d_page_mix_rightskip\rightskip
+ \leftskip \zeropoint
+ \rightskip\zeropoint
+ %
+ \d_page_mix_threshold\zeropoint
+ %
+ \d_page_mix_column_width\dimexpr(\d_page_mix_max_width-\d_page_mix_distance*\numexpr(\c_page_mix_n_of_columns-\plusone)\relax)/\c_page_mix_n_of_columns\relax
+ %
+ \usemixedcolumnscolorparameter\c!color
+ %
+ \nofcolumns\c_page_mix_n_of_columns} % public
+
+%D The otr method related hooks are defined next:
+
+% \setvalue{\??mixedcolumnsbefore\s!otr}%
+% {\par
+% \ifdim\pagetotal=\zeropoint \else
+% \verticalstrut % probably no longer needed
+% \vskip-\struttotal % probably no longer needed
+% \fi}
+
+\newcount\c_page_mix_otr_nesting
+
+\setvalue{\??mixedcolumnsbefore\s!otr}%
+ {\par
+ \global\advance\c_page_mix_otr_nesting\plusone
+ \ifcase\c_page_mix_otr_nesting\or
+ \ifdim\pagetotal=\zeropoint \else
+ \obeydepth % we could handle this in pre material
+ \fi
+ \fi}
+
+\setvalue{\??mixedcolumnsstart\s!otr}%
+ {\ifcase\c_page_mix_otr_nesting\or
+ \setupoutputroutine[\s!mixedcolumn]%
+ \c_page_mix_routine\c_page_mix_routine_intercept
+ \page_otr_trigger_output_routine
+ %
+ \holdinginserts\maxdimen
+ %
+ \global\d_page_mix_preceding_height\ht\b_page_mix_preceding
+ \c_page_mix_routine\c_page_mix_routine_continue
+ %
+ \page_mix_command_set_vsize
+ \page_mix_command_set_hsize
+ \fi}
+
+\setvalue{\??mixedcolumnsstop\s!otr}%
+ {\par
+ \ifcase\c_page_mix_otr_nesting\or
+ \c_page_mix_routine\c_page_mix_routine_balance
+ \page_otr_trigger_output_routine
+ \fi}
+
+\setvalue{\??mixedcolumnsafter\s!otr}%
+ {\ifcase\c_page_mix_otr_nesting\or
+ \prevdepth\strutdp
+ \page_otr_command_set_vsize
+ \page_otr_command_set_hsize
+ \fi
+ \global\advance\c_page_mix_otr_nesting\minusone}
+
+%D The splitting and therefore balancing is done at the \LUA\ end. This gives
+%D more readable code and also makes it easier to deal with insertions like
+%D footnotes. Eventually we will have multiple strategies available.
+
+\unexpanded\def\page_mix_routine_construct#1%
+ {\ctxcommand{mixsetsplit {
+ box = \number\b_page_mix_collected,
+ nofcolumns = \number\c_page_mix_n_of_columns,
+ maxheight = \number\d_page_mix_max_height,
+ step = \number\d_page_mix_balance_step,
+ cycles = \number\c_page_mix_balance_cycles,
+ preheight = \number\d_page_mix_preceding_height,
+ prebox = \number\b_page_mix_preceding,
+ strutht = \number\strutht,
+ strutdp = \number\strutdp,
+ threshold = \number\d_page_mix_threshold,
+ balance = "#1",
+ alternative = "\mixedcolumnsparameter\c!alternative",
+ }}%
+ \deadcycles\zerocount}
+
+\unexpanded\def\page_mix_routine_package
+ {\ctxcommand{mixfinalize()}%
+ \setbox\b_page_mix_collected\vbox \bgroup
+ \ifvoid\b_page_mix_preceding \else
+ \box\b_page_mix_preceding
+ \global\d_page_mix_preceding_height\zeropoint
+ \nointerlineskip
+ \fi
+ \hskip\d_page_mix_leftskip
+ \page_mix_hbox to \d_page_mix_max_width \bgroup
+ \letmixedcolumnsparameter\c!strut\v!no
+ % maybe use \c_page_mix_used_of_columns
+ \dorecurse\c_page_mix_n_of_columns {%
+ \inheritedmixedcolumnsframed{\page_mix_command_package_column}%
+ \ifnum\recurselevel<\c_page_mix_n_of_columns
+ \page_mix_command_inject_separator
+ \fi
+ }%
+ \egroup
+ \egroup}
+
+\unexpanded\def\page_mix_command_package_column
+ {\page_mix_hbox to \d_page_mix_column_width \bgroup
+ % maybe intercept empty
+ \ctxcommand{mixgetsplit(\recurselevel)}%
+ \hskip-\d_page_mix_column_width
+ \page_mix_hbox to \d_page_mix_column_width \bgroup
+ \placenoteinserts
+ \hss
+ \egroup
+ \egroup}
+
+\unexpanded\def\page_mix_routine_continue
+ {\bgroup
+ \forgetall
+ \dontcomplain
+ \setbox\b_page_mix_collected\vbox{\unvbox\normalpagebox}% brrr we need to make a tight box (combine this in lua)
+ \page_mix_routine_construct\v!no
+ \page_mix_routine_package
+ \page_otr_construct_and_shipout\box\b_page_mix_collected
+ \ctxcommand{mixflushrest()}%
+ \ctxcommand{mixcleanup()}%
+ \egroup}
+
+\unexpanded\def\page_mix_routine_balance
+ {\bgroup
+ \forgetall
+ \dontcomplain
+ \setbox\b_page_mix_collected\vbox{\unvbox\normalpagebox}% brrr we need to make a tight box (combine this in lua)
+ \doloop
+ {%writestatus\m!columns{construct continue (\the\htdp\b_page_mix_collected)}%
+ \page_mix_routine_construct\v!no
+ \ifcase\ctxcommand{mixstate()}\relax
+ % 0 = okay, we can balance
+ \setbox\b_page_mix_collected\vbox{\ctxcommand{mixflushlist()}}% we could avoid this
+ %writestatus\m!columns{construct balance}%
+ \page_mix_routine_construct\v!yes
+ \page_mix_routine_package
+ \c_page_mix_routine\c_page_mix_routine_regular
+ % \setupoutputroutine[\s!singlecolumn]%
+ \page_otr_command_set_vsize
+ \page_otr_command_set_hsize
+ \par
+ %writestatus\m!columns{flush balance}%
+ \box\b_page_mix_collected
+ \vskip\zeropoint % triggers recalculation of page stuff (weird that this is needed but it *is* needed, see mixed-001.tex)
+ \par
+ \nointerlineskip
+ \prevdepth\strutdp
+ \ctxcommand{mixflushrest()}% rubish
+ \ctxcommand{mixcleanup()}% rubish
+ \exitloop
+ \or
+ % 1 = we have stuff left, so flush and rebalance
+ %writestatus\m!columns{flush continue}%
+ \page_mix_routine_package
+ \page_otr_construct_and_shipout\box\b_page_mix_collected
+ \setbox\b_page_mix_collected\vbox{\ctxcommand{mixflushrest()}}% we could avoid this
+ \ctxcommand{mixcleanup()}%
+ \ifdim\ht\b_page_mix_collected=\zeropoint
+ \exitloop
+ \fi
+ \fi}%
+ \egroup}
+
+%D We also implement a variant compatible with the so called simple columns
+%D mechanism:
+%D
+%D \starttyping
+%D \startboxedcolumns
+%D \input zapf
+%D \stopboxedcolumns
+%D \stoptyping
+%D
+%D This is a rather mininimalistic variant.
+
+% Maybe we also need a variant with obeydepth before and prevdepth after so
+% that we get a nice spacing.
+
+\definemixedcolumns
+ [boxedcolumns]
+ [\c!balance=\v!yes,
+ \c!n=2,
+ \c!method=\s!box,
+ \c!strut=\v!yes,
+ \c!maxwidth=\availablehsize]
+
+%D Boxed columns can be used nested:
+%D
+%D \starttyping
+%D \setupmixedcolumns
+%D [boxedcolumns]
+%D [n=2,
+%D background=color,
+%D backgroundcolor=darkred,
+%D color=white,
+%D backgroundoffset=1mm]
+%D
+%D \definemixedcolumns
+%D [nestedboxedcolumns]
+%D [boxedcolumns]
+%D [n=2,
+%D background=color,
+%D backgroundcolor=white,
+%D color=darkred,
+%D strut=yes,
+%D backgroundoffset=0mm]
+%D
+%D \startboxedcolumns
+%D \input zapf \par \input ward \par \obeydepth
+%D \startnestedboxedcolumns
+%D \input zapf
+%D \stopnestedboxedcolumns
+%D \par \input zapf \par \obeydepth
+%D \startnestedboxedcolumns
+%D \input zapf
+%D \stopnestedboxedcolumns
+%D \par \input zapf
+%D \stopboxedcolumns
+%D \stoptyping
+
+%D Next we define the hooks:
+
+\letvalue{\??mixedcolumnsbefore\s!box}\donothing
+\letvalue{\??mixedcolumnsafter \s!box}\donothing
+
+\setvalue{\??mixedcolumnsstart\s!box}%
+ {\edef\p_page_mix_strut{\mixedcolumnsparameter\c!strut}%
+ \setbox\b_page_mix_collected\vbox\bgroup
+ \let\currentoutputroutine\s!mixedcolumn % makes \column work
+ \forgetall
+ \page_mix_command_set_hsize
+ \ifx\p_page_mix_strut\v!yes
+ \begstrut
+ \ignorespaces
+ \fi}
+
+\setvalue{\??mixedcolumnsstop\s!box}%
+ {\ifx\p_page_mix_strut\v!yes
+ \removeunwantedspaces
+ \endstrut
+ \fi
+ \egroup
+ \page_mix_box_balance}
+
+%D The related balancer is only a few lines:
+
+\unexpanded\def\page_mix_box_balance
+ {\bgroup
+ \dontcomplain
+ \page_mix_routine_construct\v!yes
+ \page_mix_routine_package
+ \dontleavehmode\box\b_page_mix_collected
+ \ctxcommand{mixflushrest()}%
+ \ctxcommand{mixcleanup()}%
+ \egroup}
+
+%D As usual, floats complicates matters and this is where experimental code
+%D starts.
+
+\let\page_mix_command_package_contents\page_one_command_package_contents
+\let\page_mix_command_flush_float_box \page_one_command_flush_float_box
+
+\unexpanded\def\page_mix_command_check_if_float_fits
+ {\ifpostponecolumnfloats
+ \global\setfalse\c_page_floats_room
+ \else\ifconditional\c_page_floats_not_permitted
+ \global\setfalse\c_page_floats_room
+ \else
+% \bgroup
+% \getcolumnstatus{\count255}{\dimen0}{\dimen2}%
+% \page_floats_get_info\s!text
+% \setbox\scratchbox\vbox % tricky met objecten ?
+% {\blank[\rootfloatparameter\c!spacebefore]
+% \snaptogrid\vbox{\vskip\floatheight}}% copy?
+% \advance\dimen0\dimexpr\ht\scratchbox+2\openlineheight+.5\lineheight\relax\relax % needed because goal a bit higher
+% \ifdim\dimen0>\dimen2
+% \global\setfalse\c_page_floats_room
+% \else
+ \global\settrue\c_page_floats_room
+ \fi
+ \ifdim\floatwidth>\hsize
+ \showmessage\m!columns{11}\empty
+ \global\setfalse\c_page_floats_room
+ \fi}
+
+\unexpanded\def\page_mix_command_flush_floats
+ {\page_one_command_flush_floats}
+
+\unexpanded\def\page_mix_command_flush_saved_floats
+ {\page_one_command_flush_saved_floats}
+
+% \unexpanded\def\page_mix_command_flush_top_insertions
+% {\page_one_command_flush_top_insertions}
+
+% \unexpanded\def\page_mix_place_float_top
+% {\showmessage\m!columns4\empty\page_one_place_float_here}
+
+% \unexpanded\def\page_mix_place_float_bottom
+% {\showmessage\m!columns5\empty\page_one_place_float_here}
+
+\unexpanded\def\page_mix_place_float_here
+ {\page_one_place_float_here}
+
+\unexpanded\def\page_mix_place_float_force
+ {\page_one_place_float_force}
+
+\unexpanded\def\page_mix_command_side_float_output % hm
+ {\page_otr_construct_and_shipout\unvbox\normalpagebox}
+
+\unexpanded\def\page_mix_command_synchronize_side_floats % hm
+ {\page_sides_forget_floats}
+
+\unexpanded\def\page_mix_command_flush_side_floats
+ {\page_sides_forget_floats}
+
+\unexpanded\def\page_mix_command_next_page
+ {\page_otr_eject_page}
+
+\unexpanded\def\page_mix_command_next_page_and_inserts
+ {\page_otr_eject_page_and_flush_inserts}
+
+%D We need to hook some handlers into the output routine and we define
+%D a dedicated one:
+
+\defineoutputroutine
+ [\s!mixedcolumn]
+ [\s!page_otr_command_routine =\page_mix_command_routine,
+ \s!page_otr_command_package_contents =\page_mix_command_package_contents,
+ \s!page_otr_command_set_vsize =\page_mix_command_set_vsize,
+ \s!page_otr_command_set_hsize =\page_mix_command_set_hsize,
+ \s!page_otr_command_next_page =\page_mix_command_next_page,
+ \s!page_otr_command_next_page_and_inserts =\page_mix_command_next_page_and_inserts,
+ % \s!page_otr_command_synchronize_hsize =\page_mix_command_synchronize_hsize,
+ % \s!page_otr_command_set_top_insertions =\page_mix_command_set_top_insertions,
+ % \s!page_otr_command_set_bottom_insertions =\page_mix_command_set_bottom_insertions,
+ % \s!page_otr_command_flush_top_insertions =\page_mix_command_flush_top_insertions,
+ % \s!page_otr_command_flush_bottom_insertions=\page_mix_command_flush_bottom_insertions,
+ % \s!page_otr_command_set_float_hsize =\page_mix_command_set_float_hsize,
+ \s!page_otr_command_check_if_float_fits =\page_mix_command_check_if_float_fits,
+ \s!page_otr_command_flush_float_box =\page_mix_command_flush_float_box,
+ \s!page_otr_command_synchronize_side_floats=\page_mix_command_synchronize_side_floats,
+ \s!page_otr_command_side_float_output =\page_mix_command_side_float_output,
+ \s!page_otr_command_flush_floats =\page_mix_command_flush_floats,
+ \s!page_otr_command_flush_side_floats =\page_mix_command_flush_side_floats,
+ \s!page_otr_command_flush_saved_floats =\page_mix_command_flush_saved_floats
+ % \s!page_otr_command_flush_margin_blocks =\page_mix_command_flush_margin_blocks, % not used
+ ]
+
+%D Only a few float placement options are supported:
+
+\installfloatmethod \s!mixedcolumn \v!here \page_mix_place_float_here
+\installfloatmethod \s!mixedcolumn \v!force \page_mix_place_float_force
+\installfloatmethod \s!mixedcolumn \v!top \page_mix_place_float_top
+\installfloatmethod \s!mixedcolumn \v!bottom \page_mix_place_float_bottom
+
+\installfloatmethod \s!mixedcolumn \v!local \somelocalfloat
+
+%D It ends here.
+
+\protect \endinput
diff --git a/Master/texmf-dist/tex/context/base/page-mul.mkiv b/Master/texmf-dist/tex/context/base/page-mul.mkiv
index 3792f06ee25..9e0861af93c 100644
--- a/Master/texmf-dist/tex/context/base/page-mul.mkiv
+++ b/Master/texmf-dist/tex/context/base/page-mul.mkiv
@@ -11,35 +11,21 @@
%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
%C details.
-% SEE PAGE-MUL-NEW.MKIV
+% todo: basecolumns as parent for columns and itemize
-% BEWARE: TEMPORARY ISSUES WITH VSIZE AND HSIZE BUT WILL BE REDONE
+% !!! there are some issues with hsize an vsize as well as flushing
+% !!! floats but this module will be redone anyway
\writestatus{loading}{ConTeXt Page Macros / Simple Multi Column}
-%D This module is mostly a copy from the original multi column
-%D routine as implemented in \type {core-mul}. When the main
-%D OTR macro's were isolated in modules and column sets were
-%D introduced, this module became part of the OTR modules. As
-%D a result this module is no longer generic. It also needs
-%D an overhaul.
+%D This module is mostly a copy from the original multi column routine as
+%D implemented in \type {core-mul}. When the main OTR macro's were
+%D isolated in modules and column sets were introduced, this module became
+%D part of the OTR modules. As a result this module is no longer generic.
\unprotect
-\unexpanded\def\page_mul_command_side_float_output
- {\page_otr_construct_and_shipout\unvbox\normalpagebox}
-
-\unexpanded\def\page_mul_command_flush_side_floats
- {\page_sides_forget_floats}
-
-\unexpanded\def\page_mul_command_synchronize_side_floats
- {\page_sides_forget_floats}
-
-\unexpanded\def\page_mul_command_next_page
- {\page_otr_eject_page}
-
-\unexpanded\def\page_mul_command_next_page_and_inserts
- {\page_otr_eject_page_and_flush_inserts}
+\definesystemvariable {ks} % KolomSpan
% check \count<insert> multiplications
@@ -47,7 +33,7 @@
%D implementation was based on Donald Knuth's implementation, which was adapted by
%D Craig Platt to support balancing of the last page. I gradually adapted Platt's
%D version to our needs but under certain circumstances things still went wrong. I
-%D considered all calls to Platt's \type{\balancingerror} as undesirable.
+%D considered all calls to Platt's \type{\page_mul_routine_error} as undesirable.
%D
%D This completely new implementation can handle enough situations for everyday
%D documents, but is still far from perfect. While at the moment the routine doesn't
@@ -56,7 +42,7 @@
%D \startitemize[packed]
%D \item an unlimitted number of columns
%D \item ragged or not ragged bottoms
-%D \item optional balancing without \type{\balancingerrors}
+%D \item optional balancing without \type{\page_mul_routine_errors}
%D \item different \type{\baselineskips}, \type{\spacing}, \type {\topskip} and
%D \type {\maxdepth}
%D \item left- and right indentation, e.g. within lists
@@ -70,83 +56,148 @@
%D special case of multi||columns is posible but at the cost of worse float
%D handling, worse page breaking, worse etc. Complicated multi||column page handling
%D should be done in \cap{DTP}||systems anyway.
-%D
-%D There are three commands provided for entering and leaving multi||column mode and
-%D for going to the next column:
-%D
-%D \interface \type{\beginmulticolumns} \\ \\
-%D \interface \type{\endmulticolumns} \\ \\
-%D \interface \type{\ejectcolumn} \\ \\
-\def\finalcolumntextwidth {\makeupwidth}
-\def\finalcolumntextheight {\textheight}
-\def\columntextwidth {\makeupwidth}
-\def\columntextheight {\textheight}
-\def\usercolumnwidth {\textwidth}
-\def\columntextoffset {\!!zeropoint}
+\installcorenamespace {columns}
+
+\installcommandhandler \??columns {columns} \??columns
+
+%D Going to a new columns is done by means of a \type {\ejectcolumn}. The
+%D following definition does not always work.
+
+ \unexpanded\def\ejectcolumn % not good enough
+ {\goodbreak
+ \showmessage\m!columns2\empty}
-\def\fixedcolumnheight {\textheight}
-\def\betweencolumns {\hskip\bodyfontsize}
+%D The output routines plug into a more generic mechanism as can be seen at the
+%D end of this file. So, occasionally we need to define some plugin code.
-\let\setcolumnfloats \relax % in CONTEXT used for floats
-\let\flushcolumnfloats \relax % in CONTEXT used for floats
-\let\flushcolumnfloat \relax % in CONTEXT used for floats
-\let\finishcolumnbox \relax % in CONTEXT used for backgrounds
+\unexpanded\def\page_mul_place_float_top
+ {\showmessage\m!columns4\empty\page_one_place_float_here}
-%D Both macros are redefined in \CONTEXT\ when backgrounds are applied to
-%D columns. The final values are used when flushing the columns.
+\unexpanded\def\page_mul_place_float_bottom
+ {\showmessage\m!columns5\empty\page_one_place_float_here}
-%D It's more convenient to use \type {\columnwidth} instead of messing around
-%D with boxes each time.
+\unexpanded\def\page_mul_place_float_here
+ {\page_one_place_float_here}
-\newdimen\columnwidth
-\newdimen\gutterwidth
+\unexpanded\def\page_mul_place_float_force
+ {\page_one_place_float_force}
-\def\determinecolumnwidth
+\unexpanded\def\page_mul_command_side_float_output
+ {\page_otr_construct_and_shipout\unvbox\normalpagebox}
+
+\unexpanded\def\page_mul_command_flush_side_floats
+ {\page_sides_forget_floats}
+
+\unexpanded\def\page_mul_command_synchronize_side_floats
+ {\page_sides_forget_floats}
+
+\unexpanded\def\page_mul_command_next_page
+ {\page_otr_eject_page}
+
+\unexpanded\def\page_mul_command_next_page_and_inserts
+ {\page_otr_eject_page_and_flush_inserts}
+
+\let\page_mul_initialize_floats\relax
+\let\page_mul_flush_floats \relax
+\let\page_mul_flush_float \relax
+
+%D A hook:
+
+ \let\finishcolumnbox\relax
+
+%D This will change to a local one:
+
+\ifdefined\nofcolumns \else \newcount\nofcolumn \fi
+\ifdefined\mofcolumns \else \newcount\mofcolumn \fi
+
+\appendtoks
+ \nofcolumns\columnsparameter\c!n\relax
+\to \everysetupcolumns
+
+%D Columns are separated by spacing or rules or whatever suits.
+
+\installcorenamespace{columnseparators}
+
+\setvalue{\??columnseparators\v!on }{\let\page_mul_between_columns\page_mul_between_columns_rule}
+\setvalue{\??columnseparators\v!off }{\let\page_mul_between_columns\page_mul_between_columns_space}
+\setvalue{\??columnseparators }{\let\page_mul_between_columns\page_mul_between_columns_space}
+\setvalue{\??columnseparators\s!unknown}{\let\page_mul_between_columns\p_page_mul_rule}
+
+\def\page_mul_between_columns_rule
{\bgroup
- \setbox\scratchbox\hbox
- {\page_mul_command_set_hsize
- \global\columnwidth\usercolumnwidth
- \global\gutterwidth\intercolumnwidth}%
+ \starttextproperties
+ \scratchdistance\dimexpr\columnsparameter\c!distance/2\relax
+ \hskip\scratchdistance\relax
+ \vrule
+ \s!width\linewidth
+ \ifnum\bottomraggednessmode=\plustwo % baselinebottom
+ \s!depth\strutdepth
+ \fi
+ \hskip\scratchdistance\relax
+ \stoptextproperties
\egroup}
-%D Going to a new columns is done by means of a \type {\ejectcolumn}. The
-%D following definition does not always work.
+\def\page_mul_between_columns_space
+ {\hskip\columnsparameter\c!distance\relax}
-\def\ejectcolumn
- {\goodbreak
- \showmessage\m!columns2\empty}
+\let\page_mul_between_columns\page_mul_between_columns_space
-\def\balancingerror
- {\showmessage\m!columns3\empty
- \page_otr_construct_and_shipout\unvbox\normalpagebox}
+%D We declare some registers:
+
+\newdimen \d_page_mul_available_width
+\newdimen \d_page_mul_distance
+\newdimen \d_page_mul_leftskip
+\newdimen \d_page_mul_rightskip
+\newdimen \d_page_mul_offset
+\newdimen \d_page_mul_forced_height
+\newdimen \d_page_mul_used_width
+\newdimen \d_page_mul_temp
-\def\page_mul_place_float_top {\showmessage\m!columns4\empty\page_one_place_float_here}
-\def\page_mul_place_float_bottom{\showmessage\m!columns5\empty\page_one_place_float_here}
-\def\page_mul_place_float_here {\page_one_place_float_here}
-\def\page_mul_place_float_force {\page_one_place_float_force}
+\newcount \c_page_mul_balance_minimum
+\newcount \c_page_mul_n_of_lines
-%D The local column width is available in the dimension register \type
-%D {\localcolumnwidth}, which is calculated as:
+\newbox \b_page_mul_preceding
+\newdimen \d_page_mul_preceding_height
+\newdimen \d_page_mul_preceding_depth
+\newconditional\c_page_mul_preceding_present
+
+\newbox \b_page_mul_preceding_rest_of_page
+
+\newconditional\c_page_mul_reverse
+
+\newconditional\c_page_mul_trace
+
+%D The next dimensions reports the final column height .. todo
+
+ \newdimen\finalcolumnheights
+ \newcount\finalcolumnlines
+
+ \newdimen\savedpagetotal % brrr
+
+ \newif\ifstretchcolumns \stretchcolumnsfalse
+ \newif\ifheightencolumns \heightencolumnsfalse
+ \newif\ifinheritcolumns \inheritcolumnsfalse
+ \newif\ifbalancecolumns %\balancecolumnstrue
+
+%D An important one:
\unexpanded\def\page_mul_command_set_hsize % beware, this one is available for use in macros
- {\setbox\scratchbox\hbox{\parindent\zeropoint\betweencolumns}%
- \intercolumnwidth\wd\scratchbox
- \localcolumnwidth\columntextwidth
- \advance\localcolumnwidth -\leftskip
- \advance\localcolumnwidth -\rightskip
- % new
- \advance\localcolumnwidth -\colleftskip
- \advance\localcolumnwidth -\colrightskip
- %
- \advance\localcolumnwidth -\nofcolumns\intercolumnwidth
- \advance\localcolumnwidth \intercolumnwidth
- \divide \localcolumnwidth \nofcolumns
- \scratchdimen\columntextoffset
- \multiply\scratchdimen \plustwo
- \advance\localcolumnwidth -\scratchdimen
- \usercolumnwidth\localcolumnwidth
- \hsize\localcolumnwidth} % we don't do it \global
+ {\setbox\scratchbox\hbox{\page_mul_between_columns}%
+ \d_page_mul_distance\wd\scratchbox
+ \d_page_mul_available_width\dimexpr
+ \makeupwidth
+ -\d_page_mul_leftskip
+ -\d_page_mul_rightskip
+ -\nofcolumns\d_page_mul_distance
+ +\d_page_mul_distance
+ \relax
+ \d_page_mul_used_width\dimexpr
+ \d_page_mul_available_width/\nofcolumns
+ -\d_page_mul_offset*\plustwo
+ \relax
+ \textwidth\d_page_mul_used_width % needs thinking ... grouping etc
+ \hsize\d_page_mul_used_width}
%D Torture test:
%D
@@ -217,122 +268,79 @@
%D in||between material, these dimensions are influenced by bodyfont switches inside
%D multi||column mode.
-\newdimen\mcscratchdimen
-\newcount\nofcolumnlines
+\setnewconstant\multicolumnlinemethod\zerocount % 0=normal 1=raw
-\setnewconstant\multicolumnlinemethod\plusone % 0: overshoot (old default), 1: tight
+\def\multicolumnovershootratio{.5} % {\ifgridsnapping0\else.5\fi}
-\def\getmulticolumnlines
- {\mcscratchdimen-\columntextoffset
- \multiply\mcscratchdimen \plustwo
- \advance\mcscratchdimen \columntextheight
- \ifdim\precolumnboxheight>\zeropoint
- \advance\mcscratchdimen -\precolumnboxheight
- \fi
- \settotalinsertionheight
- \advance\mcscratchdimen -\totalinsertionheight
- \ifcase\multicolumnlinemethod \getnoflines\mcscratchdimen
- \or \getrawnoflines\mcscratchdimen
- \else \getrawnoflines\mcscratchdimen
+\unexpanded\def\page_mul_set_n_of_lines
+ {\settotalinsertionheight
+ \d_page_mul_temp\dimexpr
+ -\d_page_mul_offset*\plustwo
+ +\textheight
+ \ifdim\d_page_mul_preceding_height>\zeropoint -\d_page_mul_preceding_height \fi
+ -\totalinsertionheight
+ \relax
+ \ifcase\multicolumnlinemethod
+ \getnoflines \d_page_mul_temp \or
+ \getrawnoflines\d_page_mul_temp \else
+ \getrawnoflines\d_page_mul_temp
\fi
% added 30/7/2004
\ifnum\layoutlines>\zerocount \ifnum\noflines>\layoutlines
\noflines\layoutlines
\fi \fi
- \nofcolumnlines\noflines}
-
-\def\multicolumnovershootratio{.5} % {\ifgridsnapping0\else.5\fi}
+ \c_page_mul_n_of_lines\noflines}
\unexpanded\def\page_mul_command_set_vsize
{\page_one_command_set_vsize % indeed?
- \getmulticolumnlines
- \mcscratchdimen\nofcolumnlines\openlineheight
- \advance\mcscratchdimen \multicolumnovershootratio\openlineheight % collect enough data
- \global\vsize\nofcolumns\mcscratchdimen
- \global\pagegoal\vsize} % let's do it only here
+ \page_mul_set_n_of_lines
+ \d_page_mul_temp\nofcolumns\dimexpr
+ \c_page_mul_n_of_lines\openlineheight
+ +\multicolumnovershootratio\openlineheight % collect enough data
+ \relax
+ \global\vsize\d_page_mul_temp
+ \pagegoal \d_page_mul_temp} % let's do it only here
%D It really starts here. After some checks and initializations we change the output
%D routine to continous multi||column mode. This mode handles columns that fill the
%D current and next full pages. The method used is (more or less) multiplying \type
%D {\vsize} and dividing \type {\hsize} by \type {\nofcolumns}. More on this can be
%D found in the \TeX book. We save the top of the current page in box
-%D \type{\precolumnbox}.
+%D \type {\b_page_mul_preceding}.
%D
%D We manipulate \type {\topskip} a bit, just to be shure that is has no
%D flexibility. This has te be done every time a font switch takles place, because
-%D \type{\topskip} can depend on this.
+%D \type {\topskip} can depend on this.
\newconstant\c_page_mul_routine
-\setnewconstant\c_page_mul_routine_regular \zerocount
-\setnewconstant\c_page_mul_routine_intercept \plusone
-\setnewconstant\c_page_mul_routine_continuous \plustwo
-\setnewconstant\c_page_mul_routine_balanced \plusthree
-\setnewconstant\c_page_mul_routine_error \plusfour
+\setnewconstant\c_page_mul_routine_regular \zerocount
+\setnewconstant\c_page_mul_routine_intercept \plusone
+\setnewconstant\c_page_mul_routine_continue \plustwo
+\setnewconstant\c_page_mul_routine_balance \plusthree
+\setnewconstant\c_page_mul_routine_error \plusfour
\unexpanded\def\page_mul_command_routine
{\ifcase\c_page_mul_routine
\page_one_command_routine
\or
- \interceptmulticolumnsout
+ \page_mul_routine_intercept
\or
- \continuousmulticolumnsout
+ \page_mul_routine_continue
\or
- \balancedmulticolumnsout
+ \page_mul_routine_balance
\or
- \balancingerror
+ \page_mul_routine_error
\fi}
-\def\interceptmulticolumnsout
- {\global\setbox\precolumnbox\vbox
- {\page_otr_command_flush_top_insertions\unvbox\normalpagebox}}
+\def\page_mul_routine_intercept
+ {\global\setbox\b_page_mul_preceding\vbox
+ {\page_otr_command_flush_top_insertions
+ \unvbox\normalpagebox}}
-\def\beginmulticolumns
- {\par
- \flushnotes
- \xdef\precolumndepth{\the\prevdepth}%
- \begingroup
- % new
- \leftskip1\leftskip
- \rightskip1\rightskip
- \edef\colleftskip {\the\leftskip}%
- \edef\colrightskip{\the\rightskip}%
- \leftskip\zeropoint
- \rightskip\zeropoint
- %
- %\setcolumntextwidth\relax
- %\setcolumntextheight\relax
- \widowpenalty\zerocount % is gewoon beter
- \clubpenalty \zerocount % zeker bij grids
- \dopushcolumnfloats
- \dimen0\dimexpr\pagetotal+\parskip+\openlineheight\relax
- \ifdim\dimen0<\pagegoal
- \allowbreak
- \else
- \break % Sometimes fails
- \fi
- \appendtoks\topskip1\topskip\to\everybodyfont
- \the\everybodyfont % ugly here
- \saveinterlinespace % ugly here
- \initializecolumns\nofcolumns
- \hangafter\zerocount
- \hangindent\zeropoint
- \everypar\emptytoks
- \ifdim\pagetotal=\zeropoint \else
- \verticalstrut
- \vskip-\struttotal
- \fi
- \global\savedpagetotal\pagetotal
- \setupoutputroutine[\s!multicolumn]%
- \c_page_mul_routine\c_page_mul_routine_intercept
- \page_otr_trigger_output_routine % no \holdinginserts=1, can make footnote disappear !
- \global\precolumnboxheight\ht\precolumnbox
- \c_page_mul_routine\c_page_mul_routine_continuous
- \setcolumnfloats
- \dohandleallcolumns{\global\setbox\currenttopcolumnbox\emptybox}%
- \checkbegincolumnfootnotes
- \page_otr_command_set_hsize
- \page_otr_command_set_vsize}
+\def\page_mul_routine_error
+ {\showmessage\m!columns3\empty
+ \page_otr_construct_and_shipout\unvbox\normalpagebox}
%D When we leave the multi||column mode, we have to process the not yet shipped out
%D part of the columns. When we don't balance, we simply force a continuous output,
@@ -343,160 +351,150 @@
%D macro's. It takes some reasoning to find out what happens and maybe I'm making
%D some mistake, but it works.
%D
-%D Voiding box \type {\precolumnbox} is sometimes necessary, e.g. when there is no
+%D Voiding box \type {\b_page_mul_preceding} is sometimes necessary, e.g. when there is no
%D text given between \type {\begin..} and \type {\end..}. The \type {\par} is
%D needed!
-\setnewconstant\multicolumnendsyncmethod\plusone % 1: old sync 2: new sync (cont-loc/project) / may fail ! ! ! !
-
-\def\endmulticolumns
- {%\par
- \ifnum\multicolumnendsyncmethod=\plustwo
- \synchronizeoutput
- \else
- % don't combine these
- \vskip\lineheight
- \vskip-\lineheight % take footnotes into account
- \fi
- \doflushcolumnfloat % added recently
- %\doflushcolumnfloats % no, since it results in wrong top floats
- \flushnotes % before start of columns
- \par
- \ifbalancecolumns
- \ifnum\multicolumnendsyncmethod=\plusone
- \c_page_mul_routine\c_page_mul_routine_continuous
- \goodbreak
- \fi
- \c_page_mul_routine\c_page_mul_routine_balanced
- \else
- \goodbreak
- \fi
- % still the multi column routine
- \page_otr_trigger_output_routine % the prevdepth is important, try e.g. toclist in
- \prevdepth\zeropoint % columns before some noncolumned text text
- %
- \c_page_mul_routine\c_page_mul_routine_regular
-% \page_otf_set_engine_output_routine{\singlecolumnout}%
-% \page_otf_set_engine_output_routine{\page_otr_triggered_output_routine}%
- %
- \ifvoid\precolumnbox\else
- \unvbox\precolumnbox
- \fi
- \global\precolumnboxheight\zeropoint
- \endgroup % here
- \nofcolumns\plusone
- \page_otr_command_set_vsize % the outer vsize (NEEDS CHECKING)
- \synchronizeoutput % new may 2004 / we need to: \pagegoal\vsize
- \checkendcolumnfootnotes
- \dosomebreak\allowbreak
- \dopopcolumnfloats}
-
%D Because some initializations happen three times, we defined a macro for
%D them. Erasing \type{\everypar} is needed because we don't want anything
%D to interfere.
-\def\setmulticolumnsout
- {\everypar\emptytoks
+\unexpanded\def\page_mul_initialize_variables
+ {\reseteverypar
\dontcomplain
\settopskip
\setmaxdepth
- \topskip1\topskip
- \splittopskip\topskip
- \splitmaxdepth\maxdepth
- \boxmaxdepth\maxdepth % dangerous
- \emergencystretch\zeropoint\relax} % sometimes needed !
+ \topskip 1\topskip
+ \splittopskip \topskip
+ \splitmaxdepth \maxdepth
+ \boxmaxdepth \maxdepth % dangerous
+ \emergencystretch\zeropoint
+ \relax} % sometimes needed !
%D Flushing the page comes to pasting the columns together and appending the result
-%D to box \type {\precolumnbox}, if not void. I've seen a lot of implementations in
+%D to box \type {\b_page_mul_preceding}, if not void. I've seen a lot of implementations in
%D which some skip was put between normal text and multi||column text. When we don't
%D want this, the baselines can be messed up. I hope the seemingly complicated
%D calculation of a correction \type {\kern} is adequate to overcome this. Although
%D not watertight, spacing is taken into account and even multiple mode changes on
%D one page go well. But cross your fingers and don't blame me.
%D
-%D One of the complications of flushing out the boxes is that \type {\precolumnbox}
+%D One of the complications of flushing out the boxes is that \type {\b_page_mul_preceding}
%D needs to be \type {\unvbox}'ed, otherwise there is too less flexibility in the
%D page when using \type {\raggedbottom}. It took a lot of time before these kind of
%D problems were overcome. Using \type {\unvbox} at the wrong moment can generate
-%D \type {\balancingerror}'s.
+%D \type {\page_mul_routine_error}'s.
%D
%D One can use the macros \type {\maxcolumnheight} and \type {\maxcolumndepth} when
%D generating material between columns as well as postprocessing column lines.
-\let\maxcolumnheight=\zeropoint
-\let\maxcolumndepth =\zeropoint
+\newdimen\maxcolumnheight
+\newdimen\maxcolumndepth
\newbox\columnpagebox
-\def\setmaxcolumndimensions
- {\let\maxcolumnheight\!!zeropoint
- \let\maxcolumndepth \!!zeropoint
- \dohandleallcolumns
- {\ifdim\ht\currentcolumnbox>\maxcolumnheight
- \edef\maxcolumnheight{\the\ht\currentcolumnbox}%
- \fi
- \ifdim\dp\currentcolumnbox>\maxcolumndepth
- \edef\maxcolumndepth{\the\dp\currentcolumnbox}%
- \fi}}
+\def\page_mul_calculate_column_result_dimensions
+ {\maxcolumnheight\zeropoint
+ \maxcolumndepth \zeropoint
+ \dohandleallcolumnscs\page_mul_calculate_column_result_dimensions_step}
+
+\def\page_mul_calculate_column_result_dimensions_step
+ {\ifdim\ht\currentcolumnbox>\maxcolumnheight
+ \maxcolumnheight\ht\currentcolumnbox
+ \fi
+ \ifdim\dp\currentcolumnbox>\maxcolumndepth
+ \maxcolumndepth\dp\currentcolumnbox
+ \fi}
\setnewconstant\multicolumntopflushmethod\plusone % 0: no correction, 1: correction when topstuff, 2: correction, 3: correction++
\setnewconstant\multicolumntopalignmethod\plustwo % 0: nothing, 1: force grid, 2: follow grid
-\def\flushprecolumnboxnogrid
- {\unvbox\precolumnbox}
-
-\def\flushprecolumnboxongrid
- {\scratchdimen\savedpagetotal
- \advance\scratchdimen -\ht\precolumnbox
- \advance\scratchdimen -\dp\precolumnbox
- \advance\scratchdimen -\topskip
- \box\precolumnbox
+\def\page_mul_flush_preceding_normal
+ {\unvbox\b_page_mul_preceding}
+
+\def\page_mul_flush_preceding_ongrid
+ {\scratchdimen\dimexpr
+ \savedpagetotal
+ -\d_page_mul_preceding_height
+ -\d_page_mul_preceding_depth
+ -\topskip
+ \relax
+ \box\b_page_mul_preceding
\kern\scratchdimen}
-\newconditional\someprecolumncontent
+\def\page_mul_flush_packaged_columns_continued
+ {\bgroup
+ \page_mul_flush_packaged_columns_indeed
+ \box\columnpagebox
+ \egroup}
-\def\flushcolumnedpage#1%
+\def\page_mul_flush_packaged_columns_balanced
{\bgroup
- \ifvoid\precolumnbox
- \setfalse\someprecolumncontent % will be set elsewhere
+ \page_mul_flush_packaged_columns_indeed
+ % messy correction, we need to rewrite this module (newcolumns)
+ \setbox\columnpagebox\vbox
+ {\offinterlineskip
+ \scratchdimen\ht\columnpagebox
+ \advance\scratchdimen\dp\columnpagebox % we probably lost that one already
+ \box\columnpagebox
+ \vskip-\scratchdimen}%
+ \scratchdimen\noflines\openlineheight
+ \advance\scratchdimen-\openstrutdepth
+ \ifgridsnapping
+ % quick hack (at least it works with itemize)
\else
- \settrue\someprecolumncontent
- \mkprocessboxcontents\precolumnbox
+ \advance\scratchdimen-\openlineheight
+ \advance\scratchdimen\topskip
+ \fi
+ \ht\columnpagebox\scratchdimen
+ \dp\columnpagebox\openstrutdepth
+ % end of mess
+ \box\columnpagebox
+ \egroup}
+
+\def\page_mul_flush_packaged_columns_indeed
+ {\ifvoid\b_page_mul_preceding
+ \setfalse\c_page_mul_preceding_present % will be set elsewhere
+ \else
+ \settrue\c_page_mul_preceding_present
+ \page_postprocessors_box\b_page_mul_preceding
\fi
\forgetall
- \setmulticolumnsout
- \setmaxcolumndimensions
+ \page_mul_initialize_variables
+ \page_mul_calculate_column_result_dimensions
+ \page_mul_postprocess_linenumbers
\dohandleallcolumns
- {\mkprocesscolumncontents\currentcolumnbox}%
+ {\page_postprocessors_column\currentcolumnbox}%
\dohandleallcolumns
{\page_marks_synchronize_column\plusone\nofcolumns\mofcolumns\currentcolumnbox}%
- \postprocesscolumns
+ \page_mul_postprocess_lines
+ \page_mul_postprocess_columns
\dohandleallcolumns
- {\global\setbox\currentcolumnbox\hbox to \localcolumnwidth
+ {\global\setbox\currentcolumnbox\hbox to \d_page_mul_used_width
{\box\currentcolumnbox}%
- \wd\currentcolumnbox\localcolumnwidth
+ \wd\currentcolumnbox\d_page_mul_used_width
\ifheightencolumns
- \ht\currentcolumnbox\fixedcolumnheight
+ \ht\currentcolumnbox\d_page_mul_forced_height
\fi}%
- \setmaxcolumndimensions
+ \page_mul_calculate_column_result_dimensions
\overlaycolumnfootnotes
\setbox\columnpagebox\vbox
- {\hbox to \finalcolumntextwidth
- {\hskip\colleftskip\relax % new, \relax needed
- \ifreversecolumns
- \@EA\dohandlerevcolumns
+ {\hbox to \makeupwidth
+ {\hskip\d_page_mul_leftskip\relax % new, \relax needed
+ \ifconditional\c_page_mul_reverse
+ \expandafter\dohandlerevcolumns
\else
- \@EA\dohandleallcolumns
+ \expandafter\dohandleallcolumns
\fi
{\finishcolumnbox
{\setbox\scratchbox\hbox
{\ifx\finishcolumnbox\relax\else\strut\fi
- \strut\box\currentcolumnbox}%
+ \box\currentcolumnbox}% hm, why strut
\anch_mark_column_box\scratchbox
\box\scratchbox}%
\hfil}%
\unskip
- \hskip\colrightskip}}% new
+ \hskip\d_page_mul_rightskip}}% new
\scratchdimen\zeropoint
\dohandleallcolumns
{\ifdim-\ht\currenttopcolumnbox<\scratchdimen
@@ -504,31 +502,31 @@
\fi
\global\setbox\currenttopcolumnbox\emptybox}%
\advance\scratchdimen \ht\columnpagebox
- \setbox\scratchbox\hbox to \columntextwidth
+ \setbox\scratchbox\hbox to \makeupwidth
{\vrule
- \!!width\zeropoint
- \!!height\scratchdimen
- \!!depth\dp\columnpagebox
- \dostepwiserecurse2\nofcolumns1{\hfil\betweencolumns}\hfil}%
+ \s!width\zeropoint
+ \s!height\scratchdimen
+ \s!depth\dp\columnpagebox
+ \dostepwiserecurse\plustwo\nofcolumns\plusone{\hfil\page_mul_between_columns}\hfil}%
\setbox\columnpagebox\hbox
{\box\columnpagebox
- \hskip-\columntextwidth
+ \hskip-\makeupwidth
\box\scratchbox}%
- \postprocesscolumnpagebox % new, acts upon \box\columnpagebox
- \ifconditional\someprecolumncontent
- \settrue\someprecolumncontent
+ \page_mul_postprocess_page
+ \ifconditional\c_page_mul_preceding_present
+ \settrue\c_page_mul_preceding_present
% next some incredible crappy code
\ifcase\multicolumntopalignmethod
- \flushprecolumnboxnogrid % not on grid
+ \page_mul_flush_preceding_normal % not on grid
\or
- \flushprecolumnboxongrid % force on grid
- \else\ifgridsnapping % somehow this junk fails in pascal
- \flushprecolumnboxongrid % obey grid settings, force on grid
+ \page_mul_flush_preceding_ongrid % force on grid
+ \else\ifgridsnapping
+ \page_mul_flush_preceding_ongrid % obey grid settings, force on grid
\else
- \flushprecolumnboxnogrid % ignore grid settings, not on grid
+ \page_mul_flush_preceding_normal % ignore grid settings, not on grid
\fi \fi
\fi
- \global\precolumnboxheight\zeropoint
+ \global\d_page_mul_preceding_height\zeropoint
\page_otr_command_set_vsize
\dosomebreak\nobreak % hm, only needed when topstuff
\ifgridsnapping
@@ -538,7 +536,7 @@
% when we run into it again
\or
% \input tufte \startcolumns \showbaselines \input tufte \stopcolumns \input tufte
- \ifconditional\someprecolumncontent
+ \ifconditional\c_page_mul_preceding_present
% \scratchdimen\topskip
% \advance\scratchdimen -\openstrutheight
% \nointerlineskip
@@ -571,29 +569,7 @@
\dp\columnpagebox\zeropoint
\global\finalcolumnheights\ht\columnpagebox
\getnoflines\finalcolumnheights
- \global\finalcolumnlines\noflines
- \ifcase#1\else
- % messy correction, we need to rewrite this module (newcolumns)
- \setbox\columnpagebox\vbox
- {\offinterlineskip
- \scratchdimen\ht\columnpagebox
- \advance\scratchdimen\dp\columnpagebox % we probably lost that one already
- \box\columnpagebox
- \vskip-\scratchdimen}%
- \scratchdimen\noflines\openlineheight
- \advance\scratchdimen-\openstrutdepth
- \ifgridsnapping
- % quick hack (at least it works with itemize)
- \else
- \advance\scratchdimen-\openlineheight
- \advance\scratchdimen\topskip
- \fi
- \ht\columnpagebox\scratchdimen
- \dp\columnpagebox\openstrutdepth
- % end of mess
- \fi
- \box\columnpagebox
- \egroup}
+ \global\finalcolumnlines\noflines}
%D In case one didn't notice, finaly \type{\finishcolumnbox} is applied to
%D all boxes. One can use these hooks for special purposes.
@@ -637,51 +613,55 @@
%D
%D {\getbuffer}
%D
-%D This feature is implemented using the reshape macros
-%D presented in \type{supp-box}.
-
-\def\postprocesscolumns
- {\ifx\postprocesscolumnline\undefined \else
- \dohandleallcolumns
- {\global\setbox\currentcolumnbox\vtop
- {\beginofshapebox
- \unvbox\currentcolumnbox
- \unskip\unskip
- \endofshapebox
- \reshapebox
- {\dimen0\ht\shapebox
- \dimen2\dp\shapebox
- \setbox\shapebox\hbox to \hsize
- {\postprocesscolumnline\shapebox}%
- \ht\shapebox\dimen0
- \dp\shapebox\dimen2
- \box\shapebox}%
- \flushshapebox
- \everypar\emptytoks
- \parskip\zeropoint % = \forgetall
- \verticalstrut
- \vskip-\struttotal
- \vfil}}%
- \fi
- \ifx\postprocesscolumnbox\undefined \else
- \dohandleallcolumns
- {\global\setbox\currentcolumnbox\hbox
- {\postprocesscolumnbox\currentcolumnbox}}
- \fi}
-
-%D We default to doing nothing!
-
-\let\postprocesscolumnline =\undefined
-\let\postprocesscolumnbox =\undefined
-\let\postprocesscolumnpagebox=\relax
-
-%D \macros
-%D {reversecolumnstrue}
-%D
-%D We can force the macro that takes care of combining the columns, to
-%D flush them in the revere order. Of course, by default we don't reverse.
-
-\newif\ifreversecolumns
+%D This feature is implemented using the reshape macros presented
+%D in \type{supp-box}.
+
+\ifdefined\page_postprocessors_column\else\let\page_postprocessors_column\relax\fi % operates on passed box
+\ifdefined\postprocesscolumnline \else\let\postprocesscolumnline \relax\fi % operates on passed box
+\ifdefined\postprocesscolumnbox \else\let\postprocesscolumnbox \relax\fi % operates on passed box
+\ifdefined\postprocesscolumnpagebox \else\let\postprocesscolumnpagebox \relax\fi % operates on passed box
+
+\def\page_mul_postprocess_linenumbers{\ifx\page_postprocessors_column\relax\else\page_mul_postprocess_linenumbers_indeed\fi}
+\def\page_mul_postprocess_lines {\ifx\postprocesscolumnline \relax\else\page_mul_postprocess_lines_indeed \fi}
+\def\page_mul_postprocess_columns {\ifx\postprocesscolumnbox \relax\else\page_mul_postprocess_columns_indeed \fi}
+\def\page_mul_postprocess_page {\ifx\postprocesscolumnpagebox \relax\else\page_mul_postprocess_page_indeed \fi}
+
+\def\page_mul_postprocess_linenumbers_indeed
+ {\dohandleallcolumns{\page_postprocessors_column\currentcolumnbox}}
+
+\def\page_mul_postprocess_lines_indeed
+ {\dohandleallcolumnscs\page_mul_postprocess_lines_step}
+
+\def\page_mul_postprocess_lines_step % TODO: use lua solution instead
+ {\global\setbox\currentcolumnbox\vtop
+ {\beginofshapebox
+ \unvbox\currentcolumnbox
+ \unskip\unskip
+ \endofshapebox
+ \reshapebox
+ {\scratchheight\ht\shapebox
+ \scratchdepth \dp\shapebox
+ \setbox\shapebox\hbox to \hsize
+ {\postprocesscolumnline\shapebox}%
+ \ht\shapebox\scratchheight
+ \dp\shapebox\scratchdepth
+ \box\shapebox}%
+ \flushshapebox
+ \reseteverypar
+ \parskip\zeropoint % = \forgetall
+ \verticalstrut
+ \vskip-\struttotal
+ \vfil}}
+
+\def\page_mul_postprocess_columns_indeed
+ {\dohandleallcolumnscs\page_mul_postprocess_columns_step}
+
+\def\page_mul_postprocess_columns_step
+ {\global\setbox\currentcolumnbox\hbox
+ {\postprocesscolumnbox\currentcolumnbox}}
+
+\def\page_mul_postprocess_page_indeed
+ {\postprocesscolumnpagebox\columnpagebox}
%D Here comes the simple splitting routine. It's a bit longer than expected because
%D of ragging bottoms or not. This part can be a bit shorter but I suppose that I
@@ -691,46 +671,48 @@
%D First we present some auxiliary routines. Any material, like for instance floats,
%D that is already present in the boxes is preserved.
-\def\splitcolumn#1from \box#2to \dimen#3 top \box#4%
+\newdimen\d_page_mul_split_height_used
+\newdimen\d_page_mul_split_height_max
+
+\def\page_mul_split_column#1#2#3#4% copy or box
{\bgroup
\ifdim\ht#4>\zeropoint
- \dimen0\dimen#3\relax
- \dimen2\dimen0
- \advance\dimen0 -\ht#4%
- \columnfootnotecorrection{#1}{\dimen0}%
- \setbox0\vsplit#2 to \dimen0
- \global\setbox#1\vbox to \dimen2
+ \d_page_mul_split_height_used#3\relax
+ \d_page_mul_split_height_max\d_page_mul_split_height_used
+ \advance\d_page_mul_split_height_used -\ht#4%
+ \columnfootnotecorrection{#1}\d_page_mul_split_height_used
+ \setbox\scratchbox\vsplit#2 to \d_page_mul_split_height_used
+ \global\setbox#1\vbox to \d_page_mul_split_height_max
{\ifgridsnapping
- \dimen0-\openstrutheight
- \advance\dimen0 \topskip
- \vskip\dimen0\copy#4\vskip-\dimen0
+ \scratchdimen\dimexpr\topskip-\openstrutheight\relax
+ \vskip\scratchdimen
+ \copy#4%
+ \vskip-\scratchdimen
\else
\unvcopy#4%
\fi
- \fuzzysnappedbox\unvbox0
+ \fuzzysnappedbox\unvbox\scratchbox
\fakecolumnfootnotes{#1}}%
+ \else\ifcase\c_strc_notes_page_location
+ \global\setbox#1\vsplit#2 to #3%
+ \global\setbox#1\vbox
+ {\fuzzysnappedbox\unvbox{#1}}% % or \box ?
\else
- \ifcase\c_strc_notes_page_location
- \global\setbox#1\vsplit#2 to \dimen#3%
- \global\setbox#1\vbox
- {\fuzzysnappedbox\unvbox{#1}}% % or \box ?
- \else
- \columnfootnotecorrection{#1}{\dimen#3}%
- \setbox0\vsplit#2 to \dimen#3%
- \global\setbox#1\vbox to \dimen#3%
- {\fuzzysnappedbox\unvbox0
- \fakecolumnfootnotes{#1}}%
- \fi
- \fi
+ \columnfootnotecorrection{#1}{#3}%
+ \setbox\scratchbox\vsplit#2 to #3%
+ \global\setbox#1\vbox to #3%
+ {\fuzzysnappedbox\unvbox\scratchbox
+ \fakecolumnfootnotes{#1}}%
+ \fi \fi
\egroup}
-\def\splitcurrentcolumn from \box#1to \dimen#2%
- {\splitcolumn\currentcolumnbox from \box#1 to \dimen#2 top \box\currenttopcolumnbox}
+\def\page_mul_split_current_column#1#2%
+ {\page_mul_split_column\currentcolumnbox{#1}{#2}\currenttopcolumnbox}
-\def\splitfirstcolumn from \box#1to \dimen#2%
- {\splitcolumn\firstcolumnbox from \box#1 to \dimen#2 top \box\firsttopcolumnbox}
+\def\page_mul_split_first_column#1#2%
+ {\page_mul_split_column\firstcolumnbox{#1}{#2}\firsttopcolumnbox}
-\def\splitlastcolumn from \box#1to \dimen#2%
+\def\page_mul_split_last_column#1#2%
{\global\setbox\lastcolumnbox\vbox
{\unvcopy\lasttopcolumnbox
\fuzzysnappedbox\unvbox{#1}%
@@ -738,128 +720,130 @@
%D NEW: still to be documented.
-\def\fakecolumnfootnotes#1%
- {\relax
- \ifcase\c_strc_notes_page_location\else
- \ifnum#1=\lastcolumnbox
- \fakenotes
- \fi
- \fi}
-
-\def\columnfootnotecorrection#1#2%
- {\relax
- \ifcase\c_strc_notes_page_location
- % page notes
- \or
- \ifnum#1=\firstcolumnbox\relax
- \calculatetotalclevernoteheight
- \advance#2 -\totalnoteheight
- \fi
- \else
- \ifnum#1=\lastcolumnbox\relax
- \calculatetotalclevernoteheight
- \advance#2 -\totalnoteheight
- \fi
- \fi}
-
-\def\overlaycolumnfootnotes
- {\relax
- \ifcase\c_strc_notes_page_location
- % page
- \or
- \checknotepresence \ifnotespresent \page_mul_notes_flush_first_column \fi
- \or
- \checknotepresence \ifnotespresent \page_mul_notes_flush_last_column \fi
- \fi}
-
-\newbox\b_page_mul_notes
-
-\def\page_mul_notes_flush_first_column
- {\begingroup
- \setbox\b_page_mul_notes\vbox{\placenoteinserts}%
- \ifzeropt\ht\b_page_mul_notes
- % can't happen as we already checked
- \else
- \getmulticolumnlines
- \advance\nofcolumnlines \minustwo
- \scratchdimen\dimexpr\nofcolumnlines\lineheight+\topskip\relax
- \setbox\b_page_mul_notes\hbox{\lower\scratchdimen\box\b_page_mul_notes}%
- \ht\b_page_mul_notes\openstrutheight
- \dp\b_page_mul_notes\openstrutdepth
- \wd\b_page_mul_notes\zeropoint
- \scratchdimen\ht\firstcolumnbox
- \global\setbox\firstcolumnbox\vbox to \scratchdimen
- {\box\firstcolumnbox
- \vskip-\scratchdimen
- \box\b_page_mul_notes}%
- \fi
- \endgroup}
+ \def\fakecolumnfootnotes#1%
+ {\relax
+ \ifcase\c_strc_notes_page_location\else
+ \ifnum#1=\lastcolumnbox
+ \fakenotes
+ \fi
+ \fi}
+
+ \def\columnfootnotecorrection#1#2%
+ {\relax
+ \ifcase\c_strc_notes_page_location
+ % page notes
+ \or
+ \ifnum#1=\firstcolumnbox\relax
+ \calculatetotalclevernoteheight
+ \advance#2 -\totalnoteheight
+ \fi
+ \else
+ \ifnum#1=\lastcolumnbox\relax
+ \calculatetotalclevernoteheight
+ \advance#2 -\totalnoteheight
+ \fi
+ \fi}
+
+ \def\overlaycolumnfootnotes
+ {\relax
+ \ifcase\c_strc_notes_page_location
+ % page
+ \or
+ \checknotepresence \ifnotespresent \page_mul_notes_flush_first_column \fi
+ \or
+ \checknotepresence \ifnotespresent \page_mul_notes_flush_last_column \fi
+ \fi}
+
+ \newbox\b_page_mul_notes
+
+ \def\page_mul_notes_flush_first_column
+ {\begingroup
+ \setbox\b_page_mul_notes\vbox{\placenoteinserts}%
+ \ifzeropt\ht\b_page_mul_notes
+ % can't happen as we already checked
+ \else
+ \page_mul_set_n_of_lines
+ \advance\c_page_mul_n_of_lines \minustwo
+ \scratchdimen\dimexpr\c_page_mul_n_of_lines\lineheight+\topskip\relax
+ \setbox\b_page_mul_notes\hbox{\lower\scratchdimen\box\b_page_mul_notes}%
+ \ht\b_page_mul_notes\openstrutheight
+ \dp\b_page_mul_notes\openstrutdepth
+ \wd\b_page_mul_notes\zeropoint
+ \scratchdimen\ht\firstcolumnbox
+ \global\setbox\firstcolumnbox\vbox to \scratchdimen
+ {\box\firstcolumnbox
+ \vskip-\scratchdimen
+ \box\b_page_mul_notes}%
+ \fi
+ \endgroup}
-\def\page_mul_notes_flush_last_column
- {\begingroup
- \setbox\b_page_mul_notes\vbox{\placenoteinserts}%
- \ifzeropt\ht\b_page_mul_notes
- % can't happen as we already checked
- \else
- % maybe here also \getmulticolumnlines
- \scratchdimen\dimexpr\ht\firstcolumnbox-\openstrutdepth\relax % \strutdp
- \getnoflines\scratchdimen
- \advance\noflines \minustwo
- \scratchdimen\dimexpr\noflines\lineheight+\topskip\relax
- \setbox\b_page_mul_notes\hbox{\lower\scratchdimen\box\b_page_mul_notes}%
- \ht\b_page_mul_notes\openstrutheight
- \dp\b_page_mul_notes\openstrutdepth
- \wd\b_page_mul_notes\zeropoint
- \scratchdimen\ht\lastcolumnbox
- \global\setbox\lastcolumnbox\vbox to \scratchdimen
- {\box\lastcolumnbox
- \vskip-\scratchdimen
- \box\b_page_mul_notes}%
- \fi
- \endgroup}
+ \def\page_mul_notes_flush_last_column
+ {\begingroup
+ \setbox\b_page_mul_notes\vbox{\placenoteinserts}%
+ \ifzeropt\ht\b_page_mul_notes
+ % can't happen as we already checked
+ \else
+ % maybe here also \page_mul_set_n_of_lines
+ \scratchdimen\dimexpr\ht\firstcolumnbox-\openstrutdepth\relax % \strutdp
+ \getnoflines\scratchdimen
+ \advance\noflines \minustwo
+ \scratchdimen\dimexpr\noflines\lineheight+\topskip\relax
+ \setbox\b_page_mul_notes\hbox{\lower\scratchdimen\box\b_page_mul_notes}%
+ \ht\b_page_mul_notes\openstrutheight
+ \dp\b_page_mul_notes\openstrutdepth
+ \wd\b_page_mul_notes\zeropoint
+ \scratchdimen\ht\lastcolumnbox
+ \global\setbox\lastcolumnbox\vbox to \scratchdimen
+ {\box\lastcolumnbox
+ \vskip-\scratchdimen
+ \box\b_page_mul_notes}%
+ \fi
+ \endgroup}
%D Here comes the routine that splits the long box in columns. The macro \type
-%D {\flushcolumnfloats} can be used to flush either floats that were present before
+%D {\page_mul_flush_floats} can be used to flush either floats that were present before
%D the multi||column mode was entered, or floats that migrate to next columns.
%D Flushing floats is a delicate process.
-\def\continuousmulticolumnsout
+\def\page_mul_routine_continue
{\bgroup
\forgetall
- \setmulticolumnsout
-% \dimen0=\columntextheight
-% \advance\dimen0 -\precolumnboxheight
-% \settotalinsertionheight
-% \advance\dimen0 -\totalinsertionheight
-% \ifgridsnapping % evt altijd, nog testen
-% \getnoflines{\dimen0}
-% \dimen0=\noflines\openlineheight
-% \fi
- \getmulticolumnlines
- \dimen0=\nofcolumnlines\openlineheight
+ \page_mul_initialize_variables
+ % \dimen0=\makeupheight
+ % \advance\dimen0 -\d_page_mul_preceding_height
+ % \settotalinsertionheight
+ % \advance\dimen0 -\totalinsertionheight
+ % \ifgridsnapping % evt altijd, nog testen
+ % \getnoflines{\dimen0}
+ % \dimen0=\noflines\openlineheight
+ % \fi
+ \page_mul_set_n_of_lines
+ \d_page_mul_balance_target\c_page_mul_n_of_lines\openlineheight
+ \ifconditional\c_page_mul_trace
+ \writestatus\m!columns{continue: lines=\the\c_page_mul_n_of_lines, target=\the\d_page_mul_balance_target, textheight=\the\textheight}%
+ \fi
\dohandleallcolumns
- {\splitcurrentcolumn from \box\normalpagebox to \dimen0}%
- \setbox\restofpage\vbox{\unvbox\normalpagebox}%
+ {\page_mul_split_current_column\normalpagebox\d_page_mul_balance_target}%
+ \setbox\b_page_mul_preceding_rest_of_page\vbox{\unvbox\normalpagebox}%
\ifinheritcolumns
\ifcase\bottomraggednessmode
% 0 = ragged
\dohandleallcolumns
{\global\setbox\currentcolumnbox\vbox to \ht\firstcolumnbox
- {\dimen0\dp\currentcolumnbox
+ {\scratchdepth\dp\currentcolumnbox
\unvbox\currentcolumnbox
- \vskip-\dimen0
- \vskip\openstrutdepth % \strutdp
+ \vskip\dimexpr\openstrutdepth-\scratchdepth\relax
\prevdepth\openstrutdepth % \strutdp
\vfill}}%
\strc_notes_check_if_bottom_present
- \ifconditional\c_notes_bottom_present \else
- \dimen0\ht\firstcolumnbox
- \fi
+ %\ifconditional\c_notes_bottom_present \else
+ % \dimen0\ht\firstcolumnbox % ??
+ %\fi
\or
% 1 = normal
- \advance\dimen0 \maxdepth
+ \advance\d_page_mul_balance_target\maxdepth
\dohandleallcolumns
- {\global\setbox\currentcolumnbox\vbox to \dimen0
+ {\global\setbox\currentcolumnbox\vbox to \d_page_mul_balance_target
{\unvbox\currentcolumnbox}}%
\or
% 2 = baseline
@@ -867,7 +851,7 @@
\fi
\else
\dohandleallcolumns
- {\global\setbox\currentcolumnbox\vbox to \dimen0
+ {\global\setbox\currentcolumnbox\vbox to \d_page_mul_balance_target
{\ifstretchcolumns
\unvbox\currentcolumnbox
\else
@@ -875,14 +859,14 @@
\vfill
\fi}}%
\dohandleallcolumns
- {\ht\currentcolumnbox\dimen0}% redundant
+ {\ht\currentcolumnbox\d_page_mul_balance_target}% redundant
\fi
- \setbox\precolumnbox\vbox{\flushcolumnedpage\zerocount}%
- \page_otr_construct_and_shipout\box\precolumnbox
+ \setbox\b_page_mul_preceding\vbox{\page_mul_flush_packaged_columns_continued}%
+ \page_otr_construct_and_shipout\box\b_page_mul_preceding
\page_otr_command_set_hsize
\page_otr_command_set_vsize
- \flushcolumnfloats
- \unvbox\restofpage
+ \page_mul_flush_floats
+ \unvbox\b_page_mul_preceding_rest_of_page
% \penalty\outputpenalty % gaat gruwelijk mis in opsommingen
\egroup}
@@ -892,161 +876,184 @@
%D balance columns with few lines. The use of \type {\box2} and \type {\box4}
%D garantees a more robust check when skips are used.
-\def\multicolumnsbalancemax{250} % 100 is too small when floats are involved
-
-\def\balancedmulticolumnsout
+\newbox \b_page_mul_balance_content
+\newbox \b_page_mul_balance_first_column
+\newbox \b_page_mul_balance_column
+\newconstant \c_page_mul_balance_tries_max
+\newcount \c_page_mul_balance_tries
+\newdimen \d_page_mul_balance_target
+\newdimen \d_page_mul_balance_target_less
+\newdimen \d_page_mul_balance_natural_height
+\newdimen \d_page_mul_balance_regular_height
+\newdimen \d_page_mul_balance_step
+\newdimen \d_page_mul_balance_fuzzyness
+\newdimen \d_page_mul_balance_threshold
+\newconditional\c_page_mul_balance_possible
+
+\c_page_mul_balance_tries_max 250 % 100 is too small when floats are involved
+
+\def\page_mul_routine_balance
{\bgroup
- \setmulticolumnsout
+ \page_mul_initialize_variables
\widowpenalty\zerocount
- \setbox0\vbox{\unvbox\normalpagebox}%
- \ifdim\ht0>\openlineheight % at least one line
- \ifnum\minbalancetoplines<2 % balance anyway
- \donetrue
+ \setbox\b_page_mul_balance_content\vbox{\unvbox\normalpagebox}%
+ \ifdim\ht\b_page_mul_balance_content>\openlineheight % at least one line
+ \ifnum\c_page_mul_balance_minimum<\plustwo % balance anyway
+ \settrue\c_page_mul_balance_possible
\else % check criterium to available lines
- \getnoflines{\ht0}%
+ \getnoflines{\ht\b_page_mul_balance_content}%
\divide\noflines \nofcolumns \relax
- \ifnum\noflines<\minbalancetoplines \relax
- % let's play safe
- \ifdim\dimexpr\ht0+\ht\firsttopcolumnbox+\openlineheight\relax>\columntextheight
- \donetrue % column exceeding text height
+ \ifnum\noflines<\c_page_mul_balance_minimum \relax
+ \ifdim\dimexpr\ht\b_page_mul_balance_content+\ht\firsttopcolumnbox+\openlineheight\relax>\makeupheight
+ \settrue\c_page_mul_balance_possible % column exceeding text height
\else
- \donefalse % it seems to fit
+ \setfalse\c_page_mul_balance_possible % it seems to fit
\fi
- \else % balance indeed
- \donetrue
+ \else
+ \settrue\c_page_mul_balance_possible % balance indeed
\fi
\fi
- \else % balancing does not make sense
- \donefalse
+ \else
+ \setfalse\c_page_mul_balance_possible % balancing does not make sense
\fi
- \ifdone % start balancing, was: \ifdim\ht0>\openlineheight
- \dimen0\ht0
- \advance\dimen0 \topskip
- \advance\dimen0 -\baselineskip
- \dohandleallcolumns
- {\advance\dimen0 \ht\currenttopcolumnbox}%
- \divide\dimen0 \nofcolumns
- \vbadness\plustenthousand
- \count255=\zerocount
- \bgroup
- \ifgridsnapping
- \dimen2\lineheight
- \else
- \dimen2=\onepoint % RUBISH
- \dimen2=\spacingfactor\dimen2
- \fi
- \doloop
- {\advance\count255 \plusone
- \global\setbox\restofpage\copy0\relax
- \splitfirstcolumn from \box\restofpage to \dimen0
- \dohandlemidcolumns
- {\splitcurrentcolumn from \box\restofpage to \dimen0}%
- \splitlastcolumn from \box\restofpage to \dimen0
- \setbox2\vbox{\unvcopy\firstcolumnbox}%
- \dimen4\zeropoint
- \dohandleallcolumns
- {\setbox4\vbox
- {\unvcopy\currentcolumnbox
- %rather new, test this on pdftex-z.tex
- \unpenalty\unskip\unpenalty\unskip}% maybe better in main splitter
- %\writestatus{balance}{\the\currentcolumnbox: \the\ht4}%
-% \dimen6\ht4 \ifdim\dimen6>\dimen4 \dimen4=\dimen6 \fi}%
- \ifdim\ht4>\dimen4 \dimen4=\ht4 \fi}%
- \advance\dimen4 -.0005pt % get rid of accurracy problem, pretty new
- \ifnum\count255>\multicolumnsbalancemax\relax
- \exitloop
- \else\ifdim\dimen4>\ht2
- \advance\dimen0 \dimen2\relax
- \else
- \exitloop
- \fi\fi}%
- \dohandleallcolumns
- {\global\setbox\currentcolumnbox\vbox{\unvcopy\currentcolumnbox}}% NIEUW
- \ifnum\count255>\multicolumnsbalancemax\relax
- \showmessage\m!columns7\empty
- \else
- \showmessage\m!columns8{\the\count255\space}%
- \fi
- \egroup
+ \ifconditional\c_page_mul_balance_possible % start balancing, was: \ifdim\ht\b_page_mul_balance_content>\openlineheight
+ \page_mul_balance_try_one
\ifinheritcolumns
- % We cannot assume that the first column is the tallest, if
- % only because we may have an aborted balance (one line in the
- % first column and a graphic in the second one).
- %
- % \dimen0\ht\firstcolumnbox
- % \dimen2\ht\firstcolumnbox
- %
- \dimen0=\zeropoint
- \dohandleallcolumns
- {\ifdim\ht\currentcolumnbox>\dimen0
- \dimen0=\ht\currentcolumnbox
- \fi}%
- \dimen2\dimen0
- % so far
- \advance\dimen2 -\openlineheight
- \dohandleallcolumns
- {\dimen4\ht\currentcolumnbox
- \dimen6=10\openlineheight % funny value
- \global\setbox\currentcolumnbox\vbox to \dimen0
- {\unvbox\currentcolumnbox
- \ifdim\dimen4>\dimen6
- \ifdim\dimen4<\dimen0
- \ifdim\dimen4>\dimen2
- \vskip\zeropoint % !!
- \else
- \vskip\openlineheight
- \vfill
- \fi
- \else
- \vskip\zeropoint
- \fi
- \else
- \vskip\openlineheight
- \vfill
- \fi}}%
+ \page_mul_balance_try_two
\else
- \bgroup
- \ifstretchcolumns
- \dimen0\ht\firstcolumnbox
- \dimen2=\bottomtolerance\ht\firstcolumnbox
- \setbox0\vbox{\unvcopy\lastcolumnbox}%
- \advance\dimen0 -\ht0\relax
- \advance\dimen0 -\dp0\relax
- \ifdim\dimen0>\openlineheight\relax
- \ifdim\dimen0>\dimen2\relax
- % \stretchcolumnsfalse % beter goed slecht dan slecht goed
- \showmessage\m!columns9\empty
- \fi
- \fi
- \fi
- \dohandleallcolumns
- {\global\setbox\currentcolumnbox\vbox to \ht\firstcolumnbox
- {\ifstretchcolumns
- \unvbox\currentcolumnbox
- \else
- \box\currentcolumnbox
- \vfill
- \fi}}%
- \egroup
+ \page_mul_balance_try_three
\fi
\else
% a one liner is not properly handled here, so best rewrite the text then
\showmessage\m!columns{10}\empty
- \global\setbox\firstcolumnbox\vbox{\unvbox0}%
+ \global\setbox\firstcolumnbox\vbox{\unvbox\b_page_mul_balance_content}%
\fi
\c_page_mul_routine\c_page_mul_routine_error
\baselinebottom % forces depth in separation rule
- \flushcolumnedpage\plusone
- \multicolumnseject
+ \page_mul_flush_packaged_columns_balanced
+ \page_mul_eject_page
\egroup}
-\def\multicolumnseject
+\def\page_mul_eject_page
{%\ifdim\pagetotal>\textheight
% \page_otr_trigger_output_routine % new, but wrong as fails on mixed-001.tex (wrong pagetotal at this point)
%\else
\allowbreak
}%\fi}
+\def\page_mul_balance_try_one
+ {\d_page_mul_balance_target\dimexpr\ht\b_page_mul_balance_content+\topskip-\baselineskip\relax
+ \dohandleallcolumns
+ {\advance\d_page_mul_balance_target \ht\currenttopcolumnbox}%
+ \divide\d_page_mul_balance_target \nofcolumns
+ \vbadness\plustenthousand
+ \c_page_mul_balance_tries\zerocount
+ \bgroup
+ \ifgridsnapping
+ \d_page_mul_balance_step\lineheight
+ \else
+ \d_page_mul_balance_step\spacingfactor\onepoint % rubish
+ \fi
+ \doloop\page_mul_balance_try_one_attempt
+ \dohandleallcolumns
+ {\global\setbox\currentcolumnbox\vbox{\unvcopy\currentcolumnbox}}%
+ \ifnum\c_page_mul_balance_tries>\c_page_mul_balance_tries_max\relax
+ \showmessage\m!columns7\empty
+ \else
+ \showmessage\m!columns8{\the\c_page_mul_balance_tries\space}%
+ \fi
+ \egroup}
+
+\def\page_mul_balance_try_one_attempt
+ {\advance\c_page_mul_balance_tries \plusone
+ \global\setbox\b_page_mul_preceding_rest_of_page\copy\b_page_mul_balance_content\relax
+ \page_mul_split_first_column\b_page_mul_preceding_rest_of_page\d_page_mul_balance_target
+ \dohandlemidcolumns
+ {\page_mul_split_current_column\b_page_mul_preceding_rest_of_page\d_page_mul_balance_target}%
+ \page_mul_split_last_column\b_page_mul_preceding_rest_of_page\d_page_mul_balance_target
+ \setbox\b_page_mul_balance_first_column\vbox{\unvcopy\firstcolumnbox}%
+ \d_page_mul_balance_natural_height\zeropoint
+ \dohandleallcolumns\page_mul_balance_try_one_attempt_step
+ \advance\d_page_mul_balance_natural_height -.0005pt % (33sp) get rid of accurracy problem, pretty new
+ \ifnum\c_page_mul_balance_tries>\c_page_mul_balance_tries_max\relax
+ \exitloop
+ \else\ifdim\d_page_mul_balance_natural_height>\ht\b_page_mul_balance_first_column
+ \advance\d_page_mul_balance_target \d_page_mul_balance_step\relax
+ \else
+ \exitloop
+ \fi\fi}
+
+\def\page_mul_balance_try_one_attempt_step
+ {\setbox\b_page_mul_balance_column\vbox
+ {\unvcopy\currentcolumnbox
+ \unpenalty
+ \unskip
+ \unpenalty
+ \unskip}% maybe better in main splitter
+ \ifdim\ht\b_page_mul_balance_column>\d_page_mul_balance_natural_height
+ \d_page_mul_balance_natural_height\ht\b_page_mul_balance_column
+ \fi}
+
+% We cannot assume that the first column is the tallest, if only because we may
+% have an aborted balance (one line in the first column and a graphic in the
+% second one).
+
+\def\page_mul_balance_try_two
+ {\d_page_mul_balance_target\zeropoint
+ \dohandleallcolumns
+ {\ifdim\ht\currentcolumnbox>\d_page_mul_balance_target
+ \d_page_mul_balance_target\ht\currentcolumnbox
+ \fi}%
+ \d_page_mul_balance_target_less\dimexpr\d_page_mul_balance_target-\openlineheight\relax
+ \dohandleallcolumnscs\page_mul_balance_try_two_step}
+
+\def\page_mul_balance_try_two_step
+ {\d_page_mul_balance_regular_height\ht\currentcolumnbox
+ \d_page_mul_balance_threshold\plusten\openlineheight % funny value
+ \global\setbox\currentcolumnbox\vbox to \d_page_mul_balance_target
+ {\unvbox\currentcolumnbox
+ \ifdim\d_page_mul_balance_regular_height>\d_page_mul_balance_threshold
+ \ifdim\d_page_mul_balance_regular_height<\d_page_mul_balance_target
+ \ifdim\d_page_mul_balance_regular_height>\d_page_mul_balance_target_less
+ \vskip\zeropoint % !!
+ \else
+ \vskip\openlineheight
+ \vfill
+ \fi
+ \else
+ \vskip\zeropoint
+ \fi
+ \else
+ \vskip\openlineheight
+ \vfill
+ \fi}}
+
+\def\page_mul_balance_try_three
+ {\bgroup
+ \ifstretchcolumns
+ \d_page_mul_balance_target\ht\firstcolumnbox
+ \d_page_mul_balance_fuzzyness\bottomtolerance\ht\firstcolumnbox
+ \setbox\b_page_mul_balance_content\vbox{\unvcopy\lastcolumnbox}%
+ \advance\d_page_mul_balance_target-\htdp\b_page_mul_balance_content\relax
+ \ifdim\d_page_mul_balance_target>\openlineheight\relax
+ \ifdim\d_page_mul_balance_target>\d_page_mul_balance_fuzzyness\relax
+ % \stretchcolumnsfalse % beter good bad than bad good
+ \showmessage\m!columns9\empty
+ \fi
+ \fi
+ \fi
+ \dohandleallcolumnscs\page_mul_balance_try_three_step
+ \egroup}
+
+\def\page_mul_balance_try_three_step
+ {\global\setbox\currentcolumnbox\vbox to \ht\firstcolumnbox
+ {\ifstretchcolumns
+ \unvbox\currentcolumnbox
+ \else
+ \box\currentcolumnbox
+ \vfill
+ \fi}}
+
%D The multicolumn mechanism is incorporated in a \CONTEXT\ interface,
%D which acts like:
%D
@@ -1084,23 +1091,26 @@
%D
%D When all those floats are flushed, we switch to the local flushing routine.
-\def\dopushcolumnfloats
- {\ifsomefloatwaiting
- \showmessage\m!columns6{\the\savednoffloats}%
- \global\setbox\savedfloatlist\box\floatlist
- \xdef\dopopcolumnfloats
- {\global\savednoffloats\the\savednoffloats
- \global\setbox\floatlist\box\savedfloatlist
- \global\noexpand\somefloatwaitingtrue}%
- \global\savednoffloats\zerocount
- \global\somefloatwaitingfalse
- \else
- \global\let\dopopcolumnfloats\relax
- \fi}
-
-\let\dopopcolumnfloats\relax
+% \newbox \floatlist
+% \newbox \savedfloatlist
+%
+% \def\page_floats_column_push_saved
+% {\ifconditional\c_page_floats_some_waiting
+% \showmessage\m!columns6{\the\savednoffloats}%
+% \global\setbox\savedfloatlist\box\floatlist
+% \xdef\page_floats_column_pop_saved
+% {\global\savednoffloats\the\savednoffloats
+% \global\setbox\floatlist\box\savedfloatlist
+% \global\noexpand\settrue\c_page_floats_some_waiting}%
+% \global\savednoffloats\zerocount
+% \global\setfalse\c_page_floats_some_waiting
+% \else
+% \global\let\page_floats_column_pop_saved\relax
+% \fi}
+%
+% \let\page_floats_column_pop_saved\relax
-% \def\setcolumnfloats % messy as it adapts everypar
+% \def\page_mul_initialize_floats % messy as it adapts everypar
% {\xdef\globalsavednoffloats{\the\savednoffloats}%
% \ifnum\globalsavednoffloats>\zerocount
% \setglobalcolumnfloats % hm, we always push so this never happens
@@ -1108,66 +1118,66 @@
% \setlocalcolumnfloats
% \fi}
-\def\setcolumnfloats % messy as it adapts everypar, we need to adapt this
- {\setlocalcolumnfloats}
-
-\newconditional\onlylocalcolumnfloats % temp hack as we will redo floats (grid snapping is also messy now)
+ \def\page_mul_initialize_floats % messy as it adapts everypar, we need to adapt this
+ {\setlocalcolumnfloats}
-\unexpanded\def\page_mul_command_flush_floats
- {\ifconditional\onlylocalcolumnfloats
- \doflushcolumnfloats
- \else
- \page_one_command_flush_floats
- \fi}
+ \newconditional\onlylocalcolumnfloats % temp hack as we will redo floats (grid snapping is also messy now)
+ \newtoks \everylocalcolumnfloatspar
-\unexpanded\def\page_mul_command_check_if_float_fits
- {\ifconditional\onlylocalcolumnfloats
- \docolumnroomfloat
- \fi}
-
-\unexpanded\def\page_mul_command_flush_saved_floats
- {\ifconditional\onlylocalcolumnfloats\relax
- \else
- \page_one_command_flush_saved_floats
- \fi}
-
-\unexpanded\def\page_mul_command_flush_top_insertions
- {\ifconditional\onlylocalcolumnfloats\relax
- \else
- \page_one_command_flush_top_insertions
- \fi}
-
-\newtoks\everylocalcolumnfloatspar
-
-\everylocalcolumnfloatspar
- {\flushnotes
- \flushcolumnfloat
- % \flushmargincontents
- \checkindentation}
+ \unexpanded\def\page_mul_command_flush_floats
+ {\ifconditional\onlylocalcolumnfloats
+ \doflushcolumnfloats
+ \else
+ \page_one_command_flush_floats
+ \fi}
-\def\setlocalcolumnfloats
- {\settrue\onlylocalcolumnfloats
- \everypar\everylocalcolumnfloatspar
- \let\flushcolumnfloat\doflushcolumnfloat
- \let\flushcolumnfloats\doflushcolumnfloats}
+ \unexpanded\def\page_mul_command_check_if_float_fits
+ {\ifconditional\onlylocalcolumnfloats
+ \docolumnroomfloat
+ \fi}
-\def\setglobalcolumnfloats
- {\setfalse\onlylocalcolumnfloats
- \everypar\emptytoks
- \let\flushcolumnfloat\relax
- \let\flushcolumnfloats\noflushcolumnfloats}
+ \unexpanded\def\page_mul_command_flush_saved_floats
+ {\ifconditional\onlylocalcolumnfloats\relax
+ \else
+ \page_one_command_flush_saved_floats
+ \fi}
-\def\noflushcolumnfloats
- {\bgroup
- \xdef\localsavednoffloats{\the\savednoffloats}%
- \global\savednoffloats\globalsavednoffloats
- \page_otr_command_flush_top_insertions
- \xdef\globalsavenoffloats{\the\savednoffloats}%
- \ifnum\globalsavednoffloats=\zerocount
- \setlocalcolumnfloats
- \fi
- \global\savednoffloats\localsavednoffloats
- \egroup}
+ \unexpanded\def\page_mul_command_flush_top_insertions
+ {\ifconditional\onlylocalcolumnfloats\relax
+ \else
+ \page_one_command_flush_top_insertions
+ \fi}
+
+ \appendtoks
+ \flushnotes
+ \page_mul_flush_float
+ %\flushmargincontents
+ \checkindentation
+ \to \everylocalcolumnfloatspar
+
+ \def\setlocalcolumnfloats
+ {\settrue\onlylocalcolumnfloats
+ \everypar\everylocalcolumnfloatspar
+ \let\page_mul_flush_float\doflushcolumnfloat
+ \let\page_mul_flush_floats\doflushcolumnfloats}
+
+ \def\setglobalcolumnfloats
+ {\setfalse\onlylocalcolumnfloats
+ \reseteverypar
+ \let\page_mul_flush_float\relax
+ \let\page_mul_flush_floats\noflushcolumnfloats}
+
+ \def\noflushcolumnfloats
+ {\bgroup
+ \xdef\localsavednoffloats{\the\savednoffloats}%
+ \global\savednoffloats\globalsavednoffloats
+ \page_otr_command_flush_top_insertions
+ \xdef\globalsavenoffloats{\the\savednoffloats}%
+ \ifnum\globalsavednoffloats=\zerocount
+ \setlocalcolumnfloats
+ \fi
+ \global\savednoffloats\localsavednoffloats
+ \egroup}
%D We need to calculate the amount of free space in a columns. When there is not
%D enough room, we migrate the float to the next column. These macro's are
@@ -1176,124 +1186,124 @@
%D floats have to be taken into account when we calculate the available space. It's
%D a pitty that such things are no integral part of \TEX.
-\def\getcolumnstatus\column#1\total#2\goal#3\\%
- {\dimen0=\ifdim\pagegoal<\maxdimen \pagetotal \else \zeropoint \fi
- \dimen2=\zeropoint
- \count255=\zerocount
- \dimen8=\columntextheight
- \advance\dimen8 -\precolumnboxheight
- \def\dogetcolumnstatus
- {\advance\count255 \plusone
- \advance\dimen2 \ht\currenttopcolumnbox
- \advance\dimen2 \dp\currenttopcolumnbox
- \dimen4\dimen2
- \advance\dimen4 \dimen0
- \dimen6=\count255\dimen8
- \ifdim\dimen4>\dimen6
- \else
- \let\dogetcolumnstatus\relax
- \fi}%
- \dohandleallcolumns{\dogetcolumnstatus}%
- \ifnum\count255=0 \count255=1 \fi
- #1=\count255
- #2=\dimen4
- #3=\dimen6 }
-
-\def\getinsertionheight
- {\ifdim\pagegoal<\maxdimen
- \bgroup
- \dimen0=\columntextheight
- \advance\dimen0 -\pagegoal
- \xdef\insertionheight{\the\dimen0}%
- \egroup
- \else
- \global\let\insertionheight\zeropoint
- \fi}
-
-\def\docolumnroomfloat
- {\ifpostponecolumnfloats
- \global\roomforfloatfalse
- \else\ifnofloatpermitted
- \global\roomforfloatfalse
- \else
- \bgroup
- \getcolumnstatus\column\count255\total\dimen0\goal\dimen2\\%
- \dofloatsgetinfo\s!text
- \setbox\scratchbox\vbox % tricky met objecten ?
- {\blank[\rootfloatparameter\c!spacebefore]
- \snaptogrid\vbox{\vskip\floatheight}}% copy?
- \advance\dimen0\dimexpr\ht\scratchbox+2\openlineheight+.5\lineheight\relax\relax % needed because goal a bit higher
- \ifdim\dimen0>\dimen2
- \global\roomforfloatfalse
- \else
- \global\roomforfloattrue
- \fi
- \ifdim\floatwidth>\hsize
- \showmessage\m!columns{11}\empty
- \global\roomforfloatfalse
- \fi
- \egroup
- \fi\fi}
+ \def\getcolumnstatus#1#2#3%
+ {\dimen0=\ifdim\pagegoal<\maxdimen \pagetotal \else \zeropoint \fi
+ \dimen2=\zeropoint
+ \count255=\zerocount
+ \dimen8=\makeupheight
+ \advance\dimen8 -\d_page_mul_preceding_height
+ \def\dogetcolumnstatus
+ {\advance\count255 \plusone
+ \advance\dimen2 \ht\currenttopcolumnbox
+ \advance\dimen2 \dp\currenttopcolumnbox
+ \dimen4\dimen2
+ \advance\dimen4 \dimen0
+ \dimen6=\count255\dimen8
+ \ifdim\dimen4>\dimen6
+ \else
+ \let\dogetcolumnstatus\relax
+ \fi}%
+ \dohandleallcolumns{\dogetcolumnstatus}%
+ \ifnum\count255=0 \count255=1 \fi
+ #1=\count255
+ #2=\dimen4
+ #3=\dimen6 }
+
+ \def\getinsertionheight
+ {\ifdim\pagegoal<\maxdimen
+ \bgroup
+ \dimen0=\makeupheight
+ \advance\dimen0 -\pagegoal
+ \xdef\insertionheight{\the\dimen0}%
+ \egroup
+ \else
+ \global\let\insertionheight\zeropoint
+ \fi}
+
+ \def\docolumnroomfloat
+ {\ifpostponecolumnfloats
+ \global\setfalse\c_page_floats_room
+ \else\ifconditional\c_page_floats_not_permitted
+ \global\setfalse\c_page_floats_room
+ \else
+ \bgroup
+ \getcolumnstatus{\count255}{\dimen0}{\dimen2}%
+ \page_floats_get_info\s!text
+ \setbox\scratchbox\vbox % tricky met objecten ?
+ {\blank[\rootfloatparameter\c!spacebefore]
+ \snaptogrid\vbox{\vskip\floatheight}}% copy?
+ \advance\dimen0\dimexpr\ht\scratchbox+2\openlineheight+.5\lineheight\relax\relax % needed because goal a bit higher
+ \ifdim\dimen0>\dimen2
+ \global\setfalse\c_page_floats_room
+ \else
+ \global\settrue\c_page_floats_room
+ \fi
+ \ifdim\floatwidth>\hsize
+ \showmessage\m!columns{11}\empty
+ \global\setfalse\c_page_floats_room
+ \fi
+ \egroup
+ \fi\fi}
%D Flushing one float is done as soon as possible, i.e. \type {\everypar}.
%D This means that (at the moment) sidefloats are not supported (overulled)!
-\newif\ifflushingcolumnfloats \flushingcolumnfloatstrue
-
-\def\doflushcolumnfloat
- {\ifpostponecolumnfloats\else\ifflushingcolumnfloats\ifsomefloatwaiting
- \doflushcolumnfloatindeed
- \fi\fi\fi}
-
-\def\doflushcolumnfloatindeed
- {\bgroup
- \forgetall
- \let\doflushcolumnfloat\relax
- \getcolumnstatus\column\mofcolumns\total\dimen0\goal\dimen2\\%
- \ifdim\dimen0>\zeropoint
- \dofloatsgetinfo\s!text
- \ifdim\floatwidth>\hsize
- % dropped ?
- \else
- \setbox2\vbox
- {\blank[\rootfloatparameter\c!spacebefore]
- \snaptogrid\vbox{\vskip\floatheight}}%
- \advance\dimen0 \ht2
- \ifdim\dimen0>\dimen2
- \ifnum\mofcolumns<\nofcolumns
- \advance\mofcolumns \plusone
- \ifdim\ht\currenttopcolumnbox=\zeropoint
- \dofloatsflush\s!text{1}%
- \global\setbox\currenttopcolumnbox\vbox
- {\snaptogrid\vbox{\box\floatbox}
- \whitespace % nodig ?
- \blank[\rootfloatparameter\c!spaceafter]}%
- \dimen4=\htdp\currenttopcolumnbox
- \global\advance\vsize -\dimen4
- \advance\dimen4 -\pagegoal
- \global\pagegoal-\dimen4
- \showmessage\m!columns{12}a%
+ \newif\ifflushingcolumnfloats \flushingcolumnfloatstrue
+
+ \def\doflushcolumnfloat
+ {\ifpostponecolumnfloats\else\ifflushingcolumnfloats\ifconditional\c_page_floats_some_waiting
+ \doflushcolumnfloatindeed
+ \fi\fi\fi}
+
+ \def\doflushcolumnfloatindeed
+ {\bgroup
+ \forgetall
+ \let\doflushcolumnfloat\relax
+ \getcolumnstatus{\mofcolumns}{\dimen0}{\dimen2}%
+ \ifdim\dimen0>\zeropoint
+ \page_floats_get_info\s!text
+ \ifdim\floatwidth>\hsize
+ % dropped ?
+ \else
+ \setbox2\vbox
+ {\blank[\rootfloatparameter\c!spacebefore]
+ \snaptogrid\vbox{\vskip\floatheight}}%
+ \advance\dimen0 \ht2
+ \ifdim\dimen0>\dimen2
+ \ifnum\mofcolumns<\nofcolumns
+ \advance\mofcolumns \plusone
+ \ifdim\ht\currenttopcolumnbox=\zeropoint
+ \page_floats_flush\s!text\plusone
+ \global\setbox\currenttopcolumnbox\vbox
+ {\snaptogrid\vbox{\box\floatbox}
+ \whitespace % nodig ?
+ \blank[\rootfloatparameter\c!spaceafter]}%
+ \dimen4=\htdp\currenttopcolumnbox
+ \global\advance\vsize -\dimen4
+ \advance\dimen4 -\pagegoal
+ \pagegoal-\dimen4
+ \showmessage\m!columns{12}a%
+ \else
+ \showmessage\m!columns{12}b%
+ \fi
+ \else
+ \showmessage\m!columns{12}c%
+ \fi
\else
- \showmessage\m!columns{12}b%
+ \ifhmode{\setbox0\lastbox}\fi% waar is die er in geslopen
+ \par
+ \ifdim\prevdepth<\zeropoint \else % anders bovenaan kolom witruimte
+ \nobreak
+ \blank[\rootfloatparameter\c!spacebefore]
+ \nobreak
+ \fi
+ \page_floats_flush\s!text\plusone
+ \page_otr_command_flush_float_box
+ \blank[\rootfloatparameter\c!spaceafter]
\fi
- \else
- \showmessage\m!columns{12}c%
\fi
- \else
- \ifhmode{\setbox0\lastbox}\fi% waar is die er in geslopen
- \par
- \ifdim\prevdepth<\zeropoint \else % anders bovenaan kolom witruimte
- \nobreak
- \blank[\rootfloatparameter\c!spacebefore]
- \nobreak
- \fi
- \dofloatsflush\s!text{1}%
- \page_otr_command_flush_float_box
- \blank[\rootfloatparameter\c!spaceafter]
\fi
- \fi
- \fi
- \egroup}
+ \egroup}
%D This one looks complicated. Upto \type{\nofcolumns} floats are placed,
%D taking the width of a float into account. This routine can be improved
@@ -1308,96 +1318,96 @@
%D When handling lots of (small) floats spacing can get worse because of
%D lining out the columns.
-\def\doflushcolumnfloats
- {\ifpostponecolumnfloats\else
- \bgroup
- \forgetall
- \ifsomefloatwaiting
- \dimen8\zeropoint
- \dimen4\zeropoint
- \count0\zerocount % count0 can be used local
- \count2\nofcolumns % count2 can be used local
- \dohandleallcolumns
- {\ifnum\count0>\zerocount % the wide one's reserved space
- \global\setbox\currenttopcolumnbox\vbox
- {\snaptogrid\vbox
- {\copy\currenttopcolumnbox
- \hbox{\vphantom{\vskip\floatheight}}}% known from previous
- \whitespace % nodig ?
- \blank[\rootfloatparameter\c!spaceafter]}%
- \else
- \dofloatsgetinfo\s!text
- \ifdim\floatwidth>\hsize
- \dimen0\dimexpr\floatwidth+\intercolumnwidth+.5pt\relax
- \dimen2\dimexpr\hsize +\intercolumnwidth+.5pt\relax
- \divide\dimen0 \dimen2
- \count0\dimen0
- \advance\count0 \plusone
- \ifnum\count0>\count2
- \count0\zerocount
+ \def\doflushcolumnfloats
+ {\ifpostponecolumnfloats\else
+ \bgroup
+ \forgetall
+ \ifconditional\c_page_floats_some_waiting
+ \dimen8\zeropoint
+ \dimen4\zeropoint
+ \count0\zerocount % count0 can be used local
+ \count2\nofcolumns % count2 can be used local
+ \dohandleallcolumns
+ {\ifnum\count0>\zerocount % the wide one's reserved space
+ \global\setbox\currenttopcolumnbox\vbox
+ {\snaptogrid\vbox
+ {\copy\currenttopcolumnbox
+ \hbox{\vphantom{\vskip\floatheight}}}% known from previous
+ \whitespace % nodig ?
+ \blank[\rootfloatparameter\c!spaceafter]}%
\else
- \dimen0\dimexpr\count0\hsize+\count0\intercolumnwidth-\intercolumnwidth\relax
- \dofloatsflush\s!text{1}%
- \ifdim\floatwidth>\finalcolumntextwidth % better somewhere else too
- \global\setbox\floatbox\hbox to \finalcolumntextwidth{\hss\box\floatbox\hss}%
- \fi % otherwise the graphic may disappear
- \global\setbox\floatbox\hbox to \dimen0
- {\processaction[\rootfloatparameter\c!location] % how easy to forget
- [ \v!left=>\box\floatbox\hss,
- \v!right=>\hss\box\floatbox,
- \s!default=>\hss\box\floatbox\hss,
- \s!unknown=>\hss\box\floatbox\hss]}%
+ \page_floats_get_info\s!text
+ \ifdim\floatwidth>\hsize
+ \dimen0\dimexpr\floatwidth+\d_page_mul_distance+.5pt\relax
+ \dimen2\dimexpr\hsize +\d_page_mul_distance+.5pt\relax
+ \divide\dimen0 \dimen2
+ \count0\dimen0
+ \advance\count0 \plusone
+ \ifnum\count0>\count2
+ \count0\zerocount
+ \else
+ \dimen0\dimexpr\count0\hsize+\count0\d_page_mul_distance-\d_page_mul_distance\relax
+ \page_floats_flush\s!text\plusone
+ \ifdim\floatwidth>\makeupwidth % better somewhere else too
+ \global\setbox\floatbox\hbox to \makeupwidth{\hss\box\floatbox\hss}%
+ \fi % otherwise the graphic may disappear
+ \global\setbox\floatbox\hbox to \dimen0
+ {\processaction[\rootfloatparameter\c!location] % how easy to forget
+ [ \v!left=>\box\floatbox\hss,
+ \v!right=>\hss\box\floatbox,
+ \s!default=>\hss\box\floatbox\hss,
+ \s!unknown=>\hss\box\floatbox\hss]}%
+ \fi
+ \showmessage\m!columns{13}\empty
+ \else
+ \page_floats_flush\s!text\plusone
+ \ifdim\floatwidth>\makeupwidth % better somewhere else too
+ \global\setbox\floatbox\hbox to \makeupwidth{\hss\box\floatbox\hss}%
+ \fi % otherwise the graphic may disappear
+ % \showmessage\m!columns{13}\empty
+ \fi
+ \ifdim\ht\floatbox>\zeropoint\relax
+ \global\setbox\currenttopcolumnbox\vbox
+ {\snaptogrid\vbox
+ {\box\currenttopcolumnbox % was copy
+ \box\floatbox}
+ \whitespace % nodig ?
+ \blank[\rootfloatparameter\c!spaceafter]}%
+ \fi
+ \dimen6\htdp\currenttopcolumnbox
\fi
- \showmessage\m!columns{13}\empty
- \else
- \dofloatsflush\s!text{1}%
- \ifdim\floatwidth>\finalcolumntextwidth % better somewhere else too
- \global\setbox\floatbox\hbox to \finalcolumntextwidth{\hss\box\floatbox\hss}%
- \fi % otherwise the graphic may disappear
- % \showmessage\m!columns{13}\empty
- \fi
- \ifdim\ht\floatbox>\zeropoint\relax
- \global\setbox\currenttopcolumnbox\vbox
- {\snaptogrid\vbox
- {\box\currenttopcolumnbox % was copy
- \box\floatbox}
- \whitespace % nodig ?
- \blank[\rootfloatparameter\c!spaceafter]}%
- \fi
- \dimen6\htdp\currenttopcolumnbox
- \fi
- \ifdim\dimen4<\ht\currenttopcolumnbox
- \dimen4\ht\currenttopcolumnbox
- \fi
- \advance\dimen8 \dimen6
- \advance\count2 \minusone
- \advance\count0 \minusone }%
- \page_otr_command_set_vsize
- \global\advance\vsize -\dimen8
- \global\pagegoal\vsize
- \else
- % \page_mul_command_flush_floats % does not snap!
- \fi
- \egroup
- \fi}
+ \ifdim\dimen4<\ht\currenttopcolumnbox
+ \dimen4\ht\currenttopcolumnbox
+ \fi
+ \advance\dimen8 \dimen6
+ \advance\count2 \minusone
+ \advance\count0 \minusone }%
+ \page_otr_command_set_vsize
+ \global\advance\vsize -\dimen8
+ \pagegoal\vsize
+ \else
+ % \page_mul_command_flush_floats % does not snap!
+ \fi
+ \egroup
+ \fi}
%D The next macro can be used to flush floats in the current stream. No
%D width checking is (yet) done.
-\def\insertcolumnfloats
- {\doloop
- {\ifsomefloatwaiting
- \bgroup
- \forgetall
- % no check for width
- \dogetfloat
- \blank[\rootfloatparameter\c!spacebefore]
- \snaptogrid\vbox{\copy\floatbox}
- \blank[\rootfloatparameter\c!spaceafter]
- \egroup
- \else
- \exitloop
- \fi}}
+ \def\insertcolumnfloats
+ {\doloop
+ {\ifconditional\c_page_floats_some_waiting
+ \bgroup
+ \forgetall
+ % no check for width
+ \page_floats_get
+ \blank[\rootfloatparameter\c!spacebefore]
+ \snaptogrid\vbox{\copy\floatbox}
+ \blank[\rootfloatparameter\c!spaceafter]
+ \egroup
+ \else
+ \exitloop
+ \fi}}
%D This were the multi||column routines. They can and need to be improved
%D but at the moment their behaviour is acceptable.
@@ -1417,57 +1427,21 @@
% 3 \input tufte \par \placefigure{}{\framed[width=\hsize,height=3cm]{3}}
% \stopcolumns
-\unexpanded\def\setupcolumns
- {\dosingleempty\dosetupcolumns}
-
-\def\dosetupcolumns[#1]%
- {\getparameters[\??kl][#1]%
- \nofcolumns\@@kln\relax
- \processaction
- [\@@klrule]
- [ \v!on=>\let\betweencolumns\linebetweencolumns,
- \v!off=>\let\betweencolumns\spacebetweencolumns,
- \s!default=>\let\betweencolumns\spacebetweencolumns,
- \s!unknown=>\let\betweencolumns\@@klrule]}
-
-\def\linebetweencolumns
- {\bgroup
- \starttextproperties
- \ifdim\@@kldistance>\zeropoint
- \dimen0=\@@kldistance
- \else
- \dimen0=\linewidth
- \fi
- \advance\dimen0 -\linewidth
- \hskip.5\dimen0
- \vrule
- \!!width\linewidth
- \ifnum\bottomraggednessmode=\plustwo % baselinebottom
- \!!depth\strutdepth
- \fi
- \hskip.5\dimen0\relax
- \stoptextproperties
- \egroup}
-
-\def\spacebetweencolumns
- {\hskip\@@kldistance}
-
-\presetlocalframed[\??kl]
-
-\def\backgroundfinishcolumnbox
- {\doifinsetelse\@@kloffset{\v!none,\v!overlay}
- {\let\@@kloffset\!!zeropoint}
- {\scratchdimen\@@kloffset
- \advance\scratchdimen -\@@klrulethickness
- \edef\@@kloffset{\the\scratchdimen}}%
- \localframed
- [\??kl]
- [\c!strut=\v!no,
- \c!width=\v!fit,
- \c!height=\v!fit,
- \c!align=]}
-
-\definecomplexorsimpleempty\startcolumns
+ % \def\backgroundfinishcolumnbox
+ % {\doifinsetelse\@@kloffset{\v!none,\v!overlay}
+ % {\let\@@kloffset\!!zeropoint}
+ % {\scratchdimen\@@kloffset
+ % \advance\scratchdimen -\@@klrulethickness
+ % \edef\@@kloffset{\the\scratchdimen}}%
+ % \localframed
+ % [\??kl]
+ % [\c!strut=\v!no,
+ % \c!width=\v!fit,
+ % \c!height=\v!fit,
+ % \c!align=]}
+
+ \def\backgroundfinishcolumnbox
+ {}
% to be reconsidered ... (in any case they need to be unexpandable sinze 2011.12.30)
@@ -1475,133 +1449,201 @@
\unexpanded\def\page_columns_align_option_no {\stretchcolumnsfalse\inheritcolumnsfalse}% todo: new key
\unexpanded\def\page_columns_align_option_text{\stretchcolumnsfalse\inheritcolumnstrue }%
-\def\complexstartcolumns[#1]% %% \startcolumns
+\newtoks\t_page_mul_initialize
+
+\unexpanded\def\startcolumns
+ {\dosingleempty\page_mul_start}
+
+\def\page_mul_start[#1]% %% \startcolumns
{\bgroup
- \let\stopcolumns\egroup
\ifinsidecolumns
+ \page_mul_start_nop
\else
- \setupcolumns[#1]%
- \ifnum\@@kln>1\relax
- \whitespace
- \begingroup
- \doif\@@kloption\v!background
- {\let\finishcolumnbox\backgroundfinishcolumnbox
- \let\columntextoffset\@@kloffset}%
- \ifx\@@klcommand\empty\else
- \let\postprocesscolumnline\@@klcommand
- \fi
- \doifelsenothing\@@klheight
- \heightencolumnsfalse
- \heightencolumnstrue
- \doifelse\@@kldirection\v!right
- \reversecolumnsfalse
- \reversecolumnstrue
- \doifelse\@@klbalance\v!yes
- \balancecolumnstrue
- \balancecolumnsfalse
- \installalign\v!yes {\page_columns_align_option_yes }%
- \installalign\v!no {\page_columns_align_option_no }%
- \installalign\v!text{\page_columns_align_option_text}%
- \stretchcolumnsfalse
- \inheritcolumnstrue
- \doifsomething\@@klalign{\expanded{\setupalign[\@@klalign]}}%
- \nofcolumns\@@kln
- \edef\fixedcolumnheight{\@@klheight}%
- \edef\minbalancetoplines{\@@klntop}%
- \setuptolerance[\@@kltolerance]% %% \startcolumns
- \setupblank[\@@klblank]%
- \ifdim\s_spac_whitespace_parskip>\zeropoint\relax
- \setupwhitespace[\@@klblank]%
- \fi
- \unexpanded\def\stopcolumns
- {\endmulticolumns
- \global\insidecolumnsfalse
- \endgroup
- \egroup}%
- \global\insidecolumnstrue
- \beginmulticolumns
+ \iffirstargument
+ \setupcolumns[#1]%
+ \fi
+ \nofcolumns\columnsparameter\c!n\relax
+ \ifnum\nofcolumns>\plusone
+ \page_mul_start_yes
+ \else
+ \page_mul_start_nop
\fi
\fi}
-\installcolumnbreakmethod \s!multicolumn \v!preference
- {\goodbreak}
-
-\installcolumnbreakmethod \s!multicolumn \v!yes
- {\par % todo: since
- {\testrulewidth\zeropoint\ruledvskip\textheight} % we misuse a
- \penalty-200 % side effect
- \vskip-\textheight
- }% bugged : \prevdepth-\thousandpoint} % signals top of column to \blank
+\unexpanded\def\page_mul_start_nop
+ {\let\stopcolumns\page_mul_stop_nop}
-%D New: only at start of columns; may change ! Rather interwoven and therefore
-%D to be integrated when the multi column modules are merged.
-
-\unexpanded\def\setupcolumnspan[#1]%
- {\getparameters[\??ks][#1]}
-
-\presetlocalframed
- [\??ks]
+\unexpanded\def\page_mul_stop_nop
+ {\egroup}
-\setupcolumnspan
- [\c!n=2,
- \c!offset=\v!overlay,
- \c!frame=\v!off]
+\unexpanded\def\page_mul_start_yes
+ {\whitespace
+ \begingroup
+ \let\stopcolumns\page_mul_stop_indeed
+ \global\insidecolumnstrue
+ \the\t_page_mul_initialize
+ %
+ \flushnotes
+ \begingroup
+ %
+ \d_page_mul_leftskip\leftskip
+ \d_page_mul_rightskip\rightskip
+ \leftskip\zeropoint
+ \rightskip\zeropoint
+ %
+ \widowpenalty\zerocount % will become option
+ \clubpenalty \zerocount % will become option
+ %
+ \page_floats_column_push_saved
+ %
+ \ifdim\dimexpr\pagetotal+\parskip+\openlineheight\relax<\pagegoal
+ \allowbreak
+ \else
+ \break % sometimes fails
+ \fi
+ \appendtoks
+ \topskip1\topskip % best a switch
+ \to \everybodyfont
+ \the\everybodyfont % ugly here
+ \saveinterlinespace % ugly here
+ %
+ \initializecolumns\nofcolumns
+ %
+ \hangafter\zerocount
+ \hangindent\zeropoint
+ \reseteverypar
+ \ifdim\pagetotal=\zeropoint \else
+ \verticalstrut
+ \vskip-\struttotal
+ \fi
+ \global\savedpagetotal\pagetotal
+ \setupoutputroutine[\s!multicolumn]%
+ \c_page_mul_routine\c_page_mul_routine_intercept
+ \page_otr_trigger_output_routine % no \holdinginserts=1, can make footnote disappear !
+ \global\d_page_mul_preceding_height\ht\b_page_mul_preceding
+ \c_page_mul_routine\c_page_mul_routine_continue
+ \page_mul_initialize_floats
+ \dohandleallcolumns{\global\setbox\currenttopcolumnbox\emptybox}%
+ \checkbegincolumnfootnotes
+ \page_otr_command_set_hsize
+ \page_otr_command_set_vsize}
-\newbox\b_page_columns_span \let\postprocesscolumnspanbox\gobbleoneargument
+\setnewconstant\multicolumnendsyncmethod\plusone % 1: old sync 2: new sync (cont-loc/project) / may fail ! ! ! !
-\def\dostartcolumnspan[#1]%
- {\bgroup
- \setupcolumnspan[#1]%
- \forgetall
- \ifinsidecolumns
- \advance\hsize \intercolumnwidth
- \hsize\@@ksn\hsize
- \advance\hsize -\intercolumnwidth
+\unexpanded\def\page_mul_stop_indeed
+ {\relax
+ \ifnum\multicolumnendsyncmethod=\plustwo
+ \synchronizeoutput
+ \else
+ % don't collapse these
+ \vskip \lineheight
+ \vskip-\lineheight % take footnotes into account
\fi
- \dowithnextboxcs\dofinishcolumnsetspan\vbox\bgroup
- %\topskipcorrection % becomes an option !
- \EveryPar{\begstrut\EveryPar{}}} % also !
-
-\def\dofinishcolumnsetspan
- {\setbox\b_page_columns_span\flushnextbox
- \ifinsidecolumns\wd\b_page_columns_span\hsize\fi
- \postprocesscolumnspanbox\b_page_columns_span
- \scratchdimen\ht\b_page_columns_span
- \setbox\b_page_columns_span\hbox % depth to be checked, probably option!
- {\localframed[\??ks][\c!offset=\v!overlay]{\box\b_page_columns_span}}%
- \ht\b_page_columns_span\scratchdimen
- \dp\b_page_columns_span\strutdp
- \wd\b_page_columns_span\hsize
- \ifinsidecolumns
- \ifnum\@@ksn>1
- \page_otr_command_set_vsize
- \dohandleallcolumns
- {\ifnum\currentcolumn>\@@ksn\else
- \global\setbox\currenttopcolumnbox=\vbox
- {\ifnum\currentcolumn=1
- \snaptogrid\vbox{\copy\b_page_columns_span}
- \else
- \snaptogrid\vbox{\vphantom{\copy\b_page_columns_span}}
- \fi}%
- \wd\currenttopcolumnbox\hsize
- \global\advance\vsize -\ht\currenttopcolumnbox
- \fi}
- \global\pagegoal\vsize
- \else
- \snaptogrid\vbox{\box\b_page_columns_span}
+ \doflushcolumnfloat % added recently
+ %\doflushcolumnfloats % no, since it results in wrong top floats
+ \flushnotes % before start of columns
+ \par
+ \ifbalancecolumns
+ \ifnum\multicolumnendsyncmethod=\plusone
+ \c_page_mul_routine\c_page_mul_routine_continue
+ \goodbreak
\fi
+ \c_page_mul_routine\c_page_mul_routine_balance
\else
- \snaptogrid\vbox{\box\b_page_columns_span}
+ \goodbreak
\fi
- \endgraf
- \ifvmode\prevdepth\strutdp\fi
- \egroup}
+ % still the multi column routine
+ \page_otr_trigger_output_routine % the prevdepth is important, try e.g. toclist in
+ \prevdepth\zeropoint % columns before some noncolumned text text
+ %
+ \c_page_mul_routine\c_page_mul_routine_regular
+ %
+ \ifvoid\b_page_mul_preceding\else
+ \unvbox\b_page_mul_preceding
+ \fi
+ \global\d_page_mul_preceding_height\zeropoint
+ \endgroup % here
+ \nofcolumns\plusone
+ \page_otr_command_set_vsize
+ \checkendcolumnfootnotes
+ \dosomebreak\allowbreak
+ \page_floats_column_pop_saved
+ %
+ \global\insidecolumnsfalse
+ \endgroup
+ \egroup}%
+
+\appendtoks
+ \edef\p_option{\columnsparameter\c!option}%
+ \ifx\p_option\v!background
+ \let\finishcolumnbox\backgroundfinishcolumnbox
+ \fi
+ \d_page_mul_offset\columnsparameter\c!offset\relax
+ \edef\p_command{\columnsparameter\c!command}%
+ \ifx\p_command\empty \else
+ \let\postprocesscolumnline\p_command
+ \fi
+ \edef\p_height{\columnsparameter\c!height}%
+ \ifx\p_height\empty
+ \d_page_mul_forced_height\textheight
+ \heightencolumnsfalse
+ \else
+ \d_page_mul_forced_height\p_height\relax
+ \heightencolumnstrue
+ \fi
+ \edef\p_direction{\columnsparameter\c!direction}%
+ \ifx\p_direction\v!right
+ \setfalse\c_page_mul_reverse
+ \else
+ \settrue\c_page_mul_reverse
+ \fi
+ \edef\p_balance{\columnsparameter\c!balance}%
+ \ifx\p_balance\v!yes
+ \balancecolumnstrue
+ \else
+ \balancecolumnsfalse
+ \fi
+ \installalign\v!yes {\page_columns_align_option_yes }%
+ \installalign\v!no {\page_columns_align_option_no }%
+ \installalign\v!text{\page_columns_align_option_text}%
+ \stretchcolumnsfalse
+ \inheritcolumnstrue
+ \edef\p_align{\columnsparameter\c!align}%
+ \ifx\p_align\empty \else
+ \setupalign[\p_align]%
+ \fi
+ \edef\p_tolerance{\columnsparameter\c!tolerance}%
+ \ifx\p_tolerance\empty \else
+ \setuptolerance[\p_tolerance]%
+ \fi
+ \edef\p_blank{\columnsparameter\c!blank}%
+ \ifx\p_blank\empty \else
+ \setupblank[\p_blank]%
+ \fi
+ \ifdim\s_spac_whitespace_parskip>\zeropoint\relax
+ \setupwhitespace[\p_blank]%
+ \fi
+ \c_page_mul_balance_minimum\columnsparameter\c!ntop\relax
+ \edef\p_page_mul_rule{\columnsparameter\c!rule}%
+ \expandnamespacemacro\??columnseparators\p_page_mul_rule\s!unknown
+\to \t_page_mul_initialize
+
+%D Columns breaks
+
+\installcolumnbreakmethod \s!multicolumn \v!preference
+ {\goodbreak}
-\unexpanded\def\startcolumnspan
- {\dosingleempty\dostartcolumnspan}
+% \installcolumnbreakmethod \s!multicolumn \v!yes
+% {\vskip\textheight
+% \penalty-200 % we can mark and intercept this
+% \vskip-\textheight}
-\unexpanded\def\stopcolumnspan
- {\egroup}
+\installcolumnbreakmethod \s!multicolumn \v!yes
+ {\vskip .5\pagegoal
+ \penalty-200 % we can mark and intercept this
+ \vskip-.5\pagegoal}
+
+%D Next we initialize the lot:
\setupcolumns
[\c!n=2,
@@ -1619,19 +1661,85 @@
\c!rulethickness=\linewidth,
\c!offset=.5\bodyfontsize]
-%D Undocumented and still under development.
+%D New: only at start of columns; may change ! Rather interwoven and therefore
+%D to be integrated when the multi column modules are merged.
+
+ \unexpanded\def\setupcolumnspan[#1]%
+ {\getparameters[\??ks][#1]}
+
+ \presetlocalframed
+ [\??ks]
+
+ \setupcolumnspan
+ [\c!n=2,
+ \c!offset=\v!overlay,
+ \c!frame=\v!off]
+
+ \newbox\b_page_columns_span \let\page_mul_postprocess_spanbox\gobbleoneargument
+
+ \unexpanded\def\startcolumnspan
+ {\dosingleempty\dostartcolumnspan}
+
+ \unexpanded\def\stopcolumnspan
+ {\egroup}
+
+ \def\dostartcolumnspan[#1]%
+ {\bgroup
+ \setupcolumnspan[#1]%
+ \forgetall
+ \ifinsidecolumns
+ \advance\hsize \d_page_mul_distance
+ \hsize\@@ksn\hsize
+ \advance\hsize -\d_page_mul_distance
+ \fi
+ \dowithnextboxcs\dofinishcolumnsetspan\vbox\bgroup
+ %\topskipcorrection % becomes an option !
+ \EveryPar{\begstrut\EveryPar{}}} % also !
+
+ \def\dofinishcolumnsetspan
+ {\setbox\b_page_columns_span\flushnextbox
+ \ifinsidecolumns\wd\b_page_columns_span\hsize\fi
+ \page_mul_postprocess_spanbox\b_page_columns_span
+ \scratchdimen\ht\b_page_columns_span
+ \setbox\b_page_columns_span\hbox % depth to be checked, probably option!
+ {\localframed[\??ks][\c!offset=\v!overlay]{\box\b_page_columns_span}}%
+ \ht\b_page_columns_span\scratchdimen
+ \dp\b_page_columns_span\strutdp
+ \wd\b_page_columns_span\hsize
+ \ifinsidecolumns
+ \ifnum\@@ksn>1
+ \page_otr_command_set_vsize
+ \dohandleallcolumns
+ {\ifnum\currentcolumn>\@@ksn\else
+ \global\setbox\currenttopcolumnbox=\vbox
+ {\ifnum\currentcolumn=1
+ \snaptogrid\vbox{\copy\b_page_columns_span}
+ \else
+ \snaptogrid\vbox{\vphantom{\copy\b_page_columns_span}}
+ \fi}%
+ \wd\currenttopcolumnbox\hsize
+ \global\advance\vsize -\ht\currenttopcolumnbox
+ \fi}
+ \pagegoal\vsize
+ \else
+ \snaptogrid\vbox{\box\b_page_columns_span}
+ \fi
+ \else
+ \snaptogrid\vbox{\box\b_page_columns_span}
+ \fi
+ \endgraf
+ \ifvmode\prevdepth\strutdp\fi
+ \egroup}
+
+%D Undocumented and still under development.\ifdefined\startsimplecolumns \else
\unexpanded\def\startsimplecolumns
- {\dosingleempty\dostartsimplecolumns}
+ {\dosingleempty\page_mul_simple_start}
-\def\dostartsimplecolumns[#1]%
+\def\page_mul_simple_start[#1]%
{\bgroup
+ \setsimplecolumnshsize[#1]%
\nopenalties
- \getparameters[\??kl]
- [\c!width=\hsize,\c!distance=1.5\bodyfontsize,%
- \c!n=2,\c!lines=0,#1]%
- \let\rigidcolumnlines\@@kllines
- \setrigidcolumnhsize\@@klwidth\@@kldistance\@@kln
\setbox\scratchbox\vbox\bgroup
\forgetall} % \blank[\v!disable]
@@ -1642,35 +1750,44 @@
\egroup}
\unexpanded\def\setsimplecolumnshsize[#1]%
- {\getparameters[\??kl][\c!width=\hsize,\c!distance=1.5\bodyfontsize,\c!n=2,\c!lines=0,#1]%
- \let\rigidcolumnlines\@@kllines
- \setrigidcolumnhsize\@@klwidth\@@kldistance\@@kln}
+ {\getdummyparameters
+ [\c!width=\hsize,
+ \c!distance=1.5\bodyfontsize,
+ \c!n=2,
+ \c!lines=0,
+ #1]%
+ \edef\rigidcolumnlines
+ {\directdummyparameter\c!lines}%
+ \setrigidcolumnhsize
+ {\directdummyparameter\c!width}%
+ {\directdummyparameter\c!distance}%
+ {\directdummyparameter\c!n}}
\let\page_mul_command_package_contents\page_one_command_package_contents
\let\page_mul_command_flush_float_box \page_one_command_flush_float_box
\defineoutputroutine
[\s!multicolumn]
- [\s!page_otr_command_routine =\page_mul_command_routine,
- \s!page_otr_command_package_contents =\page_mul_command_package_contents,
- \s!page_otr_command_set_vsize =\page_mul_command_set_vsize,
- \s!page_otr_command_set_hsize =\page_mul_command_set_hsize,
- \s!page_otr_command_next_page =\page_mul_command_next_page,
- \s!page_otr_command_next_page_and_inserts =\page_mul_command_next_page_and_inserts,
- % \s!page_otr_command_synchronize_hsize =\page_mul_command_synchronize_hsize,
- % \s!page_otr_command_set_top_insertions =\page_mul_command_set_top_insertions,
- % \s!page_otr_command_set_bottom_insertions =\page_mul_command_set_bottom_insertions,
- \s!page_otr_command_flush_top_insertions =\page_mul_command_flush_top_insertions,
- % \s!page_otr_command_flush_bottom_insertions =\page_mul_command_flush_bottom_insertions,
- % \s!page_otr_command_set_float_hsize =\page_mul_command_set_float_hsize,
- \s!page_otr_command_check_if_float_fits =\page_mul_command_check_if_float_fits,
- \s!page_otr_command_flush_float_box =\page_mul_command_flush_float_box,
- \s!page_otr_command_synchronize_side_floats =\page_mul_command_synchronize_side_floats,
- \s!page_otr_command_side_float_output =\page_mul_command_side_float_output,
- \s!page_otr_command_flush_floats =\page_mul_command_flush_floats,
- \s!page_otr_command_flush_side_floats =\page_mul_command_flush_side_floats,
- \s!page_otr_command_flush_saved_floats =\page_mul_command_flush_saved_floats
- % \s!page_otr_command_flush_margin_blocks =\page_mul_command_flush_margin_blocks, % not used
+ [\s!page_otr_command_routine =\page_mul_command_routine,
+ \s!page_otr_command_package_contents =\page_mul_command_package_contents,
+ \s!page_otr_command_set_vsize =\page_mul_command_set_vsize,
+ \s!page_otr_command_set_hsize =\page_mul_command_set_hsize,
+ \s!page_otr_command_next_page =\page_mul_command_next_page,
+ \s!page_otr_command_next_page_and_inserts =\page_mul_command_next_page_and_inserts,
+ % \s!page_otr_command_synchronize_hsize =\page_mul_command_synchronize_hsize,
+ % \s!page_otr_command_set_top_insertions =\page_mul_command_set_top_insertions,
+ % \s!page_otr_command_set_bottom_insertions =\page_mul_command_set_bottom_insertions,
+ \s!page_otr_command_flush_top_insertions =\page_mul_command_flush_top_insertions,
+ % \s!page_otr_command_flush_bottom_insertions=\page_mul_command_flush_bottom_insertions,
+ % \s!page_otr_command_set_float_hsize =\page_mul_command_set_float_hsize,
+ \s!page_otr_command_check_if_float_fits =\page_mul_command_check_if_float_fits,
+ \s!page_otr_command_flush_float_box =\page_mul_command_flush_float_box,
+ \s!page_otr_command_synchronize_side_floats=\page_mul_command_synchronize_side_floats,
+ \s!page_otr_command_side_float_output =\page_mul_command_side_float_output,
+ \s!page_otr_command_flush_floats =\page_mul_command_flush_floats,
+ \s!page_otr_command_flush_side_floats =\page_mul_command_flush_side_floats,
+ \s!page_otr_command_flush_saved_floats =\page_mul_command_flush_saved_floats
+ % \s!page_otr_command_flush_margin_blocks =\page_mul_command_flush_margin_blocks, % not used
]
\protect \endinput
diff --git a/Master/texmf-dist/tex/context/base/page-not.mkiv b/Master/texmf-dist/tex/context/base/page-not.mkiv
index 06e2f387589..d7602bd2658 100644
--- a/Master/texmf-dist/tex/context/base/page-not.mkiv
+++ b/Master/texmf-dist/tex/context/base/page-not.mkiv
@@ -13,12 +13,12 @@
\writestatus{loading}{ConTeXt Page Macros / Footnotes}
-%D Terrible hacks: we need to share save/restore
+%D This is an old mechanism that will be replaced as soon as the multicolumn
+%D code is redone.
-%D We've moved some footnote handling to a separate page
-%D module. The macros below are used in the single and multi
-%D column page handlers and permit mixed usage of column and
-%D page notes.
+%D We've moved some footnote handling to a separate page module. The macros below
+%D are used in the single and multi column page handlers and permit mixed usage of
+%D column and page notes.
\unprotect
@@ -41,11 +41,13 @@
\newdimen\totalinsertionheight
-\def\settotalinsertionheight
+\unexpanded\def\settotalinsertionheight
{\calculatetotalnoteheight
- \totalinsertionheight\totalnoteheight
- \addinsertionheight\topins\to\totalinsertionheight
- \addinsertionheight\botins\to\totalinsertionheight}
+ \totalinsertionheight\dimexpr
+ \totalnoteheight
+ +\page_insert_insertion_height\s!topfloat
+ +\page_insert_insertion_height\s!bottomfloat
+ \relax}
% hm
diff --git a/Master/texmf-dist/tex/context/base/page-one.mkiv b/Master/texmf-dist/tex/context/base/page-one.mkiv
index 89fd7d4a8a3..6261938b6e8 100644
--- a/Master/texmf-dist/tex/context/base/page-one.mkiv
+++ b/Master/texmf-dist/tex/context/base/page-one.mkiv
@@ -20,7 +20,7 @@
% OTRONE: basic single column
-\newdimen\d_page_one_saved_vsize
+\newconstant \c_page_one_float_method
\unexpanded\def\page_one_command_next_page
{\page_otr_eject_page}
@@ -34,6 +34,34 @@
\unexpanded\def\page_one_command_set_float_hsize
{\global\hsize\textwidth}
+% \newdimen\d_page_one_saved_vsize
+%
+% \unexpanded\def\page_one_command_set_vsize
+% {\ifgridsnapping
+% \ifcase\layoutlines
+% \getrawnoflines\textheight
+% \else
+% \noflines\layoutlines
+% \fi
+% \global\vsize\noflines\openlineheight
+% \else
+% \global\vsize\textheight
+% \fi
+% \ifdim\pagegoal<\maxdimen
+% \ifdim\d_page_one_saved_vsize=\vsize
+% % let's assume that the layout didn't change
+% \else
+% \global\d_page_one_saved_vsize\vsize
+% \pagegoal\dimexpr\vsize-\d_page_floats_inserted_top-\d_page_floats_inserted_bottom\relax
+% % \bgroup
+% % \global\d_page_one_saved_vsize\vsize
+% % \advance\vsize-\d_page_floats_inserted_top
+% % \advance\vsize-\d_page_floats_inserted_bottom
+% % \pagegoal\vsize
+% % \egroup
+% \fi
+% \fi}
+
\unexpanded\def\page_one_command_set_vsize
{\ifgridsnapping
\ifcase\layoutlines
@@ -45,20 +73,10 @@
\else
\global\vsize\textheight
\fi
- \ifdim\pagegoal<\maxdimen
- \ifdim\d_page_one_saved_vsize=\vsize
- % let's assume that the layout didn't change
- \else
- \global\d_page_one_saved_vsize\vsize
- \global\pagegoal\dimexpr\vsize-\topinserted-\botinserted\relax
-% \bgroup
-% \global\d_page_one_saved_vsize\vsize
-% \advance\vsize-\topinserted
-% \advance\vsize-\botinserted
-% \global\pagegoal\vsize
-% \egroup
- \fi
- \fi}
+ % alternatively we could set it in builders.buildpage_filter
+ % \ifdim\pagegoal<\maxdimen .. \fi
+ \global\pagegoal\dimexpr\vsize-\d_page_floats_inserted_top-\d_page_floats_inserted_bottom\relax}
+
% 1 = partial page, 2 = whole page, 3 = partial page
@@ -186,31 +204,32 @@
\newskip \s_page_one_between_top_insert
\def\page_one_prepare_top_float
- {\ifdim\topinserted=\zeropoint
+ {\ifdim\d_page_floats_inserted_top=\zeropoint
\settrue\c_page_one_top_of_insert
\else
\setfalse\c_page_one_top_of_insert
\fi
- \s_page_one_between_top_insert\ifdim\floattopskip>\floatbottomskip\floattopskip\else\floatbottomskip\fi\relax
- \global\advance\topinserted\dimexpr\ht\floatbox+\dp\floatbox+\s_page_one_between_top_insert\relax}
+ \s_page_one_between_top_insert\ifdim\d_strc_floats_top>\d_strc_floats_bottom\d_strc_floats_top\else\d_strc_floats_bottom\fi\relax
+ \global\advance\d_page_floats_inserted_top\dimexpr\ht\floatbox+\dp\floatbox+\s_page_one_between_top_insert\relax}
\def\page_one_insert_top_float % maybe remember last beforeskip
- {\insert\topins
- {\forgetall
- \ifconditional\c_page_one_top_of_insert
- \ifconditional\c_page_one_correct_top_insert
- \topskipcorrection % [xx] new: see icare topbleed
- \kern-\lineskip
- \par
- \prevdepth\maxdimen
- \fi
- \fi
- \page_otr_command_flush_float_box
- \vskip\s_page_one_between_top_insert}}
+ {\insert\namedinsertionnumber\s!topfloat\bgroup
+ \forgetall
+ \ifconditional\c_page_one_top_of_insert
+ \ifconditional\c_page_one_correct_top_insert
+ \topskipcorrection % [xx] new: see icare topbleed
+ \kern-\lineskip
+ \par
+ \prevdepth\maxdimen
+ \fi
+ \fi
+ \page_otr_command_flush_float_box
+ \vskip\s_page_one_between_top_insert
+ \egroup}
\unexpanded\def\page_one_command_set_top_insertions
{\bgroup
- \ifsomefloatwaiting
+ \ifconditional\c_page_floats_some_waiting
\noffloatinserts\zerocount
\let\totaltopinserted\!!zeropoint
\page_one_command_set_top_insertions_indeed
@@ -228,25 +247,25 @@
\egroup}
\def\page_one_command_set_top_insertions_indeed
- {\ifnum\noffloatinserts<\noftopfloats
- \dogetfloat
+ {\ifnum\noffloatinserts<\c_page_floats_n_of_top
+ \page_floats_get
\page_one_prepare_top_float
- \ifdim\topinserted<\textheight\relax
- \xdef\totaltopinserted{\the\topinserted}%
+ \ifdim\d_page_floats_inserted_top<\textheight\relax
+ \xdef\totaltopinserted{\the\d_page_floats_inserted_top}%
\page_one_insert_top_float
- \ifsomefloatwaiting
+ \ifconditional\c_page_floats_some_waiting
\advance\noffloatinserts \plusone
\else
- \noffloatinserts\noftopfloats\relax
+ \noffloatinserts\c_page_floats_n_of_top\relax
\fi
- \dofloatflushedinfo
+ \page_floats_report_flushed
\else
- \doresavefloat
- \noffloatinserts\noftopfloats\relax
+ \page_floats_resave\s!text
+ \noffloatinserts\c_page_floats_n_of_top\relax
\fi
\else
- \ifsomefloatwaiting
- \showmessage\m!floatblocks6{\the\noftopfloats}%
+ \ifconditional\c_page_floats_some_waiting
+ \showmessage\m!floatblocks6{\the\c_page_floats_n_of_top}%
\fi
\let\page_one_command_set_top_insertions_indeed\relax
\fi
@@ -254,101 +273,98 @@
\unexpanded\def\page_one_command_set_bottom_insertions
{\bgroup
- \ifsomefloatwaiting
+ \ifconditional\c_page_floats_some_waiting
\noffloatinserts\zerocount
\page_one_command_set_bottom_insertions_indeed
\fi
\egroup}
\def\page_one_command_set_bottom_insertions_indeed
- {\ifnum\noffloatinserts<\nofbotfloats\relax
- \dogetfloat
- \global\advance\botinserted \ht\floatbox\relax
- \global\advance\botinserted \dp\floatbox\relax
- \global\advance\botinserted \floattopskip\relax
- \ifdim\botinserted<\pagegoal\relax
- \insert\botins
- {\forgetall
- \blank[\rootfloatparameter\c!spacebefore]%
- \page_otr_command_flush_float_box}%
- \ifsomefloatwaiting
+ {\ifnum\noffloatinserts<\c_page_floats_n_of_bottom\relax
+ \page_floats_get
+ \global\advance\d_page_floats_inserted_bottom\dimexpr\ht\floatbox+\dp\floatbox+\d_strc_floats_top\relax
+ \ifdim\d_page_floats_inserted_bottom<\pagegoal\relax
+ \insert\namedinsertionnumber\s!bottomfloat\bgroup
+ \forgetall
+ \blank[\rootfloatparameter\c!spacebefore]%
+ \page_otr_command_flush_float_box
+ \egroup
+ \ifconditional\c_page_floats_some_waiting
\advance\noffloatinserts \plusone
\else
- \noffloatinserts\nofbotfloats
+ \noffloatinserts\c_page_floats_n_of_bottom
\fi
- \dofloatflushedinfo
+ \page_floats_report_flushed
\else
- \doresavefloat
- \noffloatinserts\nofbotfloats\relax
+ \page_floats_resave\s!text
+ \noffloatinserts\c_page_floats_n_of_bottom\relax
\fi
- \global\nofloatpermittedtrue % vgl topfloats s!
+ \global\settrue\c_page_floats_not_permitted % vgl topfloats s!
\else
- \ifsomefloatwaiting
- \showmessage\m!floatblocks7{\the\nofbotfloats}%
+ \ifconditional\c_page_floats_some_waiting
+ \showmessage\m!floatblocks7{\the\c_page_floats_n_of_bottom}%
\fi
\let\page_one_command_set_bottom_insertions_indeed\relax
\fi
\page_one_command_set_bottom_insertions_indeed}
-\newconstant\topinserttopskipmode % 1 = no topskip
-
\unexpanded\def\page_one_command_flush_top_insertions
- {\ifvoid\topins\else
+ {\ifvoid\namedinsertionnumber\s!topfloat\else
\ifgridsnapping
- \box\topins
+ \box\namedinsertionnumber\s!topfloat
\vskip-\topskip
\vskip\strutheight % [xx] new: see icare topbleed
\else
- \ifcase\topinserttopskipmode
+ \ifcase\c_page_floats_insertions_topskip_mode
% 0: default, do nothing
\or
% 1: no topskip (crossed fingers)
\vskip-\topskip
\vskip\strutheight
\fi
- \unvbox\topins
+ \unvbox\namedinsertionnumber\s!topfloat
\fi
\fi
- \global\topinserted\zeropoint}
+ \global\d_page_floats_inserted_top\zeropoint}
\unexpanded\def\page_one_command_flush_bottom_insertions
- {\ifvoid\botins\else
+ {\ifvoid\namedinsertionnumber\s!bottomfloat\else
\ifgridsnapping
% \floatparameter\c!bottombefore
- \snaptogrid\hbox{\box\botins}%
+ \snaptogrid\hbox{\box\namedinsertionnumber\s!bottomfloat}%
% \floatparameter\c!bottomafter
\else
\floatparameter\c!bottombefore
- \unvbox\botins
+ \unvbox\namedinsertionnumber\s!bottomfloat
\floatparameter\c!bottomafter
\fi
\fi
- \global\botinserted\zeropoint
- \global\nofloatpermittedfalse}
+ \global\d_page_floats_inserted_bottom\zeropoint
+ \global\setfalse\c_page_floats_not_permitted}
\unexpanded\def\page_one_command_flush_floats
- {\global\flushingfloatstrue
- \ifsomefloatwaiting
+ {\global\settrue\c_page_floats_flushing
+ \ifconditional\c_page_floats_some_waiting
\par
% if kept, then option and definitely off in gridmode ! ! ! !
% \ifvmode \prevdepth\maxdimen \fi % prevents whitespace; problematic in icare tests
\page_one_command_flush_floats_indeed
\fi
\global\savednoffloats\zerocount
- \global\somefloatwaitingfalse
- \global\flushingfloatsfalse}
+ \global\setfalse\c_page_floats_some_waiting
+ \global\setfalse\c_page_floats_flushing}
\unexpanded\def\page_one_command_flush_float_box
- {\ifcenterfloatbox \ifdim\wd\floatbox<\hsize
+ {\ifconditional\c_page_floats_center_box \ifdim\wd\floatbox<\hsize
\global\setbox\floatbox\hbox to \hsize{\hss\box\floatbox\hss}%
\fi \fi
- \snaptogrid\hbox{\iftestfloatbox\ruledhbox\fi{\box\floatbox}}} % was copy
+ \snaptogrid\hbox{\box\floatbox}} % was copy
\def\page_one_command_flush_floats_indeed % much in common with OTRSET
- {\ifsomefloatwaiting
- \ifpackflushedfloats
- \centerfloatboxfalse % not needed as we do call directly
- \dofloatscollect\s!text{\hsize}{1em}%
+ {\ifconditional\c_page_floats_some_waiting
+ \ifconditional\c_page_floats_pack_flushed
+ \setfalse\c_page_floats_center_box % not needed as we do call directly
+ \page_floats_collect\s!text\hsize\emwidth
%% no longer (interferes with footnotes):
%%
%% \page_one_command_set_vsize % test 2011.06.24.001
@@ -357,9 +373,9 @@
{\hfil
\dorecurse\nofcollectedfloats
{\ifcase\columndirection % nog document wide
- \dofloatsflush\s!text{1}%
+ \page_floats_flush\s!text\plusone
\else
- \dofloatsflush\s!text{\the\numexpr\nofcollectedfloats-\recurselevel+1\relax}%
+ \page_floats_flush\s!text{\the\numexpr\nofcollectedfloats-\recurselevel+1\relax}%
\fi
\ifdim\wd\floatbox>\makeupwidth % \hsize
\hbox to \makeupwidth{\hss\box\floatbox\hss}%
@@ -371,7 +387,7 @@
\fi}%
\hfil}%
\else
- \dogetfloat
+ \page_floats_get
\fi
% there is a chance that due to rounding errors, the float
% fits on a page where it was first rejected, in which case
@@ -394,8 +410,8 @@
\fi}
\unexpanded\def\page_one_command_check_if_float_fits
- {\ifnofloatpermitted
- \global\roomforfloatfalse
+ {\ifconditional\c_page_floats_not_permitted
+ \global\setfalse\c_page_floats_room
\else
% new per 31/5/2004, should be an option, only one column mode
\begingroup
@@ -405,10 +421,10 @@
\fi
% should be an option
\endgroup
- \dimen0\dimexpr\pagetotal+\floatheight+\floattopskip-\pageshrink\relax
+ \dimen0\dimexpr\pagetotal+\floatheight+\d_strc_floats_top-\pageshrink\relax
\dimen2\pagegoal
\relax % needed
- \ifcase\textfloatmethod
+ \ifcase\c_page_one_float_method
% method 0 : raw
\or
% method 1 : safe
@@ -419,19 +435,19 @@
\fi
\relax % really needed ! ! ! !
\ifdim\dimen0>\dimen2
- \global\roomforfloatfalse
+ \global\setfalse\c_page_floats_room
\else
- \global\roomforfloattrue
+ \global\settrue\c_page_floats_room
\fi
\fi}
\unexpanded\def\page_one_command_flush_saved_floats
- {\global\topinserted\zeropoint
- \global\botinserted\zeropoint
- \ifflushingfloats \else
+ {\global\d_page_floats_inserted_top\zeropoint
+ \global\d_page_floats_inserted_bottom\zeropoint
+ \ifconditional\c_page_floats_flushing \else
\page_one_command_set_top_insertions
\page_one_command_set_bottom_insertions
- \ifsomefloatwaiting
+ \ifconditional\c_page_floats_some_waiting
\doif{\rootfloatparameter\c!cache}\v!no\page_one_command_flush_floats % could be _otr_
\else\ifconditional\c_page_margin_blocks_present
\page_one_command_flush_floats
@@ -443,7 +459,7 @@
\baselinecorrection
%\fi
\doplacefloatbox
- \doinsertfloatinfo
+ \page_floats_report_total
\dohandlenextfloatindent}
\def\page_one_place_float_force
@@ -469,10 +485,10 @@
%def\page_one_place_float_margin {\page_one_place_float_side_indeed\page_sides_process_float_margin\nonoindentation} % wil be overloaded
\def\page_one_place_float_margin {\page_margin_blocks_process_float}
-\def\page_one_place_float_page {\dofloatssavepagefloat \s!page \floatlocationmethod}
-\def\page_one_place_float_leftpage {\dofloatssavepagefloat \s!leftpage \floatlocationmethod}
-\def\page_one_place_float_rightpage {\dofloatssavepagefloat \s!rightpage\floatlocationmethod}
-\def\page_one_place_float_somewhere {\dofloatssavesomewherefloat\s!somewhere\floatlocationmethod}
+\def\page_one_place_float_page {\page_floats_save_page_float \s!page \floatlocationmethod}
+\def\page_one_place_float_leftpage {\page_floats_save_page_float \s!leftpage \floatlocationmethod}
+\def\page_one_place_float_rightpage {\page_floats_save_page_float \s!rightpage\floatlocationmethod}
+\def\page_one_place_float_somewhere {\page_floats_save_somewhere_float\s!somewhere\floatlocationmethod}
\def\page_one_place_float_here {\page_one_place_float_otherwise_here}
\def\page_one_place_float_auto {\page_one_place_float_otherwise}
@@ -488,38 +504,41 @@
{\doifinsetelse\v!always\floatlocationmethod
{\page[\v!preference]%
\page_otr_command_check_if_float_fits
- \ifroomforfloat
+ \ifconditional\c_page_floats_room
\page_one_place_float_here_indeed
\else
\showmessage\m!floatblocks9\empty
- \doreversesavefloat
+ \page_floats_resave\s!text
\fi}
- {\ifsomefloatwaiting
- \dosavefloat
+ {\ifconditional\c_page_floats_some_waiting
+ \page_floats_save\s!text
+ \nonoindentation
\else
\page[\v!preference]%
\page_otr_command_check_if_float_fits
- \ifroomforfloat
+ \ifconditional\c_page_floats_room
\page_one_place_float_here_indeed
\else
- \dosavefloat
+ \page_floats_save\s!text
+ \nonoindentation
\fi
\fi}}
\def\page_one_place_float_otherwise_else
{\doifinsetelse\v!always\floatlocationmethod
{\page_otr_command_check_if_float_fits
- \ifroomforfloat
+ \ifconditional\c_page_floats_room
\page_one_place_float_auto_top_bottom
\else
\showmessage\m!floatblocks9\empty
- \doreversesavefloat
+ \page_floats_resave\s!text
\fi}
{\page_otr_command_check_if_float_fits
- \ifroomforfloat
+ \ifconditional\c_page_floats_room
\page_one_place_float_auto_top_bottom
\else
- \dosavefloat
+ \page_floats_save\s!text
+ \nonoindentation
\fi}}
\def\floatautofactor{.5}
@@ -544,23 +563,23 @@
\def\page_one_place_float_top_indeed % maybe remember last beforeskip
{\page_one_prepare_top_float
\page_one_insert_top_float
- \doinsertfloatinfo}
+ \page_floats_report_total}
\def\page_one_place_float_bottom_indeed
- {\global\advance\botinserted\dimexpr\ht\floatbox+\dp\floatbox+\floattopskip\relax
- \insert\botins
- {\forgetall
- \blank[\rootfloatparameter\c!spacebefore]%
- \page_otr_command_flush_float_box}%
- %\global\nofloatpermittedtrue
- \doinsertfloatinfo}
+ {\global\advance\d_page_floats_inserted_bottom\dimexpr\ht\floatbox+\dp\floatbox+\d_strc_floats_top\relax
+ \insert\namedinsertionnumber\s!bottomfloat\bgroup
+ \forgetall
+ \blank[\rootfloatparameter\c!spacebefore]%
+ \page_otr_command_flush_float_box
+ \egroup
+ \page_floats_report_total}
\def\page_one_place_float_face % links, rechts, midden, hoog, midden, laag
{%\checkwaitingfloats{#1}%
\startopposite
\page_otr_command_flush_float_box
\stopopposite
- }%\doinsertfloatinfo}
+ }%\page_floats_report_total}
\unexpanded\def\page_one_command_flush_side_floats
{\page_sides_flush_floats}
diff --git a/Master/texmf-dist/tex/context/base/page-otr.mkvi b/Master/texmf-dist/tex/context/base/page-otr.mkvi
index 19c308c32af..e5433c866fa 100644
--- a/Master/texmf-dist/tex/context/base/page-otr.mkvi
+++ b/Master/texmf-dist/tex/context/base/page-otr.mkvi
@@ -258,8 +258,8 @@
\definesystemconstant{page_otr_command_flush_margin_blocks}
\definesystemconstant{singlecolumn}
-\definesystemconstant{multicolumn}
-\definesystemconstant{columnset}
+\definesystemconstant{multicolumn} % will move
+\definesystemconstant{columnset} % will move
\defineoutputroutinecommand
[\s!page_otr_command_routine,
diff --git a/Master/texmf-dist/tex/context/base/page-par.mkiv b/Master/texmf-dist/tex/context/base/page-par.mkiv
index 26e306ef50f..588065d6563 100644
--- a/Master/texmf-dist/tex/context/base/page-par.mkiv
+++ b/Master/texmf-dist/tex/context/base/page-par.mkiv
@@ -15,49 +15,59 @@
\unprotect
-\newcount\internalparagraphnumber
-
-\unexpanded\def\setupparagraphnumbering
- {\dosingleempty\dosetupparagraphnumbering}
-
-\def\dosetupparagraphnumbering[#1]%
- {\getparameters
- [\??ph][#1]%
- \processaction
- [\@@phstate]
- [\v!start=>\let\showparagraphnumber\doshowparagraphnumberA,
- \v!stop=>\let\showparagraphnumber\relax,
- \v!line=>\let\showparagraphnumber\doshowparagraphnumberB,
- \v!reset=>\global\internalparagraphnumber\zerocount
- \let\showparagraphnumber\doshowparagraphnumberA]}
-
-\def\dodoshowparagraphnumber
- {\global\advance\internalparagraphnumber \plusone
- \inleftmargin % \tf normalizes em
- {\tf
- \dousestyleparameter\@phstyle
- \dousecolorparameter\@phcolor
- \the\internalparagraphnumber
- \kern\@@phdistance}}
-
-\def\doshowparagraphnumberA
- {%\ifprocessingverbatim
- % \iflinepar
- % % obsolete: \dodoshowparagraphnumber
- % \fi
- %\else
- \dodoshowparagraphnumber
- }%\fi}
-
-\def\doshowparagraphnumberB
+\installcorenamespace {paragraphnumbering}
+\installcorenamespace {paragraphnumberingvariants}
+
+\installsimplecommandhandler \??paragraphnumbering {paragraphnumbering} \??paragraphnumbering
+
+\definecounter[\v!paragraph]
+
+\let\showparagraphnumber\relax
+
+\appendtoks
+ \page_par_check_state
+\to \everysetupparagraphnumbering
+
+\unexpanded\def\page_par_check_state
+ {\rawprocesscommacommand[\paragraphnumberingparameter\c!state]\page_par_check_state_step}
+
+\def\page_par_check_state_step#1%
+ {\ifcsname\??paragraphnumberingvariants#1\endcsname
+ \csname\??paragraphnumberingvariants#1\endcsname
+ \fi}
+
+\setvalue{\??paragraphnumberingvariants\v!start}%
+ {\let\showparagraphnumber\page_par_show_number_normal}
+
+\setvalue{\??paragraphnumberingvariants\v!stop}%
+ {\let\showparagraphnumber\relax}
+
+\setvalue{\??paragraphnumberingvariants\v!line}%
+ {\let\showparagraphnumber\page_par_show_number_lines}
+
+\setvalue{\??paragraphnumberingvariants\v!reset}%
+ {\strc_counters_reset\v!paragraph
+ \let\showparagraphnumber\page_par_show_number_normal}
+
+\unexpanded\def\page_par_show_number_normal
+ {\strc_counters_increment\v!paragraph
+ \inleftmargin % todo: \c!location, only a few make sense
+ {\hfill % no complaints
+ \tf % \tf normalizes em
+ \useparagraphnumberingstyleandcolor\c!style\c!color
+ \convertedcounter[\v!paragraph]%
+ \kern\paragraphnumberingparameter\c!distance}}
+
+\unexpanded\def\page_par_show_number_lines
{\ifnumberinglines
- \doshowparagraphnumberA
+ \page_par_show_number_normal
\fi}
\setupparagraphnumbering
[\c!state=\v!stop,
- \c!style=,
- \c!color=,
- \c!distance=\ifcase\linenumberlocation2em\else\!!zeropoint\fi] % will change
+ %\c!location,
+ %\c!style=,
+ %\c!color=,
+ \c!distance=\ifcase\c_page_lines_location2\emwidth\else\zeropoint\fi] % will change
\protect \endinput
diff --git a/Master/texmf-dist/tex/context/base/page-plg.mkiv b/Master/texmf-dist/tex/context/base/page-plg.mkiv
index 004b9204d38..b59a05a9454 100644
--- a/Master/texmf-dist/tex/context/base/page-plg.mkiv
+++ b/Master/texmf-dist/tex/context/base/page-plg.mkiv
@@ -11,10 +11,11 @@
%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
%C details.
-% \ifx\pageareabox\undefined \else \endinput \fi
-
\writestatus{loading}{ConTeXt Page Macros / Extra Page Building}
+%D This still needs some work, especially the dimensions need to be
+%D checked cq.\ optimized. Beware: method has become alternative!
+
%D This feature has been present for a while but has never been
%D exploited: pluggable pagebuilders. The next example code
%D demonstrates the application of one such a plug-in. This variant
@@ -31,6 +32,8 @@
%D \startpagelayout[leftpage]
%D \setupTABLE[offset=overlay]
%D \setupTABLE[c][1][width=\leftmarginwidth]
+%D \setupTABLE[c][2][width=\textwidth]
+%D \setupTABLE[c][3][width=\rightmarginwidth]
%D \bTABLE
%D \bTR
%D \bTD[nx=3,background=color,backgroundcolor=green]
@@ -43,7 +46,7 @@
%D \eTR
%D \bTR
%D \bTD[nx=3,offset=overlay]
-%D {\bTABLE[width=.5\hsize]
+%D {\bTABLE
%D \bTR
%D \bTD \pagearea[footer][text][left] \eTD
%D \bTD \pagearea[bottom][text][left] \eTD
@@ -56,7 +59,9 @@
%D
%D \startpagelayout[rightpage]
%D \setupTABLE[offset=overlay]
-%D \setupTABLE[c][1][width=\rightmarginwidth]
+%D \setupTABLE[c][1][width=\leftmarginwidth]
+%D \setupTABLE[c][2][width=\textwidth]
+%D \setupTABLE[c][3][width=\rightmarginwidth]
%D \bTABLE
%D \bTR
%D \bTD[nx=3] \pagearea[header][text][middle] \eTD
@@ -67,7 +72,7 @@
%D \eTR
%D \bTR
%D \bTD[nx=3,offset=overlay]
-%D {\bTABLE[width=.5\hsize]
+%D {\bTABLE
%D \bTR
%D \bTD \pagearea[bottom][text][right] \eTD
%D \bTD \pagearea[footer][text][right] \eTD
@@ -80,12 +85,9 @@
%D
%D \setupcolors[state=start]
%D
-%D \setupbackgrounds[text][background=color,backgroundcolor=blue]
-%D \setupbackgrounds[header][text][background=color,backgroundcolor=red]
-%D
%D \setuppagenumbering[alternative=doublesided,location=]
%D
-%D \setuplayout[method=makeup]
+%D \setuplayout[alternative=makeup]
%D
%D \definetextbackground
%D [test]
@@ -110,6 +112,8 @@
\unprotect
+\installcorenamespace{layoutmakeupalternative}
+
\def\page_boxes_construct_content_makeup#1#2#3% targetbox flusher box
{\setbox#1\hbox
{\vbox to \textheight
@@ -125,65 +129,77 @@
\dp#1\zeropoint
\hsize\paperwidth
\vsize\paperheight
- \setbox#1\vbox{\csname\??layoutmethod\doifbothsidesoverruled\v!page\v!rightpage\v!leftpage\endcsname}%
+ \setbox#1\vbox{\csname\??layoutmakeupalternative\doifbothsidesoverruled\v!page\v!rightpage\v!leftpage\endcsname}%
\wd#1\paperwidth
\ht#1\paperheight
\dp#1\zeropoint}
\installlayoutalternative\v!makeup{\page_boxes_construct_content_makeup}
-\newbox\pageareabox
+\newbox\b_page_area
\unexpanded\def\pagearea
{\dotripleempty\page_area}
-\def\page_area[#1][#2][#3]%
+\def\page_area
{\ifthirdargument
- \doifelse{#3}\v!left
- {\page_area_indeed{#1}{#2}\c!lefttext}
- {\doifelse{#3}\v!right
- {\page_area_indeed{#1}{#2}\c!righttext}
- {\page_area_indeed{#1}{#2}\c!middletext}}%
+ \expandafter\page_area_three
\else\ifsecondargument
- \doifbothsidesoverruled
- {\page_area_indeed{#1}{#2}\c!righttext}
- {\page_area_indeed{#1}{#2}\c!righttext}
- {\page_area_indeed{#1}{#2}\c!lefttext }%
+ \doubleexpandafter\page_area_two
\else
- \doif{#1}\v!text % copy due to trial runs in TABLE
- {\iftrialtypesetting
-% \copy\pagebox
- \fakebox\pagebox
- \else
-% \localpositioningfalse
- \page_backgrounds_add_to_text\pagebox
- \page_grids_add_to_box\pagebox
- \box\pagebox
- \fi}%
+ \doubleexpandafter\page_area_one
\fi\fi}
-\def\page_area_indeed#1#2#3%
- {\setbox\pageareabox\vbox{\getspecificlayouttext{#1}{#2}{#3}}%
- \ifsomebackgroundfound{#1#2}%
+\def\page_area_three[#1][#2][#3]%
+ {\edef\m_page_area_three{#3}%
+ \ifx\m_page_area_three\v!left
+ \let\m_page_area_three\c!lefttext
+ \else\ifx\m_page_area_three\v!right
+ \let\m_page_area_three\c!righttext
+ \else
+ \let\m_page_area_three\c!middletext
+ \fi\fi
+ \page_area_indeed{#1}{#2}\m_page_area_three}
+
+\def\page_area_two[#1][#2][#3]%
+ {\edef\m_page_area_three{\doifbothsidesoverruled\c!righttext\c!righttext\c!lefttext}%
+ \page_area_indeed{#1}{#2}\m_page_area_three}
+
+\def\page_area_one[#1][#2][#3]%
+ {\edef\m_page_area_one{#1}
+ \ifx\m_page_area_one\v!text % copy due to trial runs in TABLE
\iftrialtypesetting
-% \box\pageareabox
- \fakebox\pageareabox
+ \fakebox\pagebox
\else
- \localframed
- [\??ma#1#2]
- [\c!width=\wd\pageareabox,
- \c!height=\ht\pageareabox,
- \c!offset=\v!overlay]
- {\box\pageareabox}%
+ \page_backgrounds_add_to_text\pagebox
+ \page_grids_add_to_box\pagebox
+ \box\pagebox
\fi
- \else
- \box\pageareabox
\fi}
+\def\page_area_indeed#1#2#3%
+ {\setbox\b_page_area\vbox{\getspecificlayouttext{#1}{#2}{#3}}%
+ \iftrialtypesetting
+ \fakebox\b_page_area
+ \else\ifcsname\??layoutbackgrounds#1#2\endcsname
+ \page_area_indeed_yes{#1}{#2}%
+ \else
+ \box\b_page_area
+ \fi\fi}
+
+\def\page_area_indeed_yes#1#2%
+ {% todo: \localbackgroundframed{\??layoutbackgrounds#1#2}{#1:#2}
+ \localframed % todo:
+ [\??layoutbackgrounds#1#2]
+ [\c!width=\wd\b_page_area,
+ \c!height=\ht\b_page_area,
+ \c!offset=\v!overlay]
+ {\box\b_page_area}}
+
% to be done nicely (proper namespacing)
-\setvalue{\??layoutmethod\v!leftpage }{\csname\??layoutmethod\v!page\endcsname}
-\setvalue{\??layoutmethod\v!rightpage}{\csname\??layoutmethod\v!page\endcsname}
+\setvalue{\??layoutmakeupalternative\v!leftpage }{\csname\??layoutmakeupalternative\v!page\endcsname}
+\setvalue{\??layoutmakeupalternative\v!rightpage}{\csname\??layoutmakeupalternative\v!page\endcsname}
\unexpanded\def\startpagelayout
{\bgroup
@@ -194,6 +210,6 @@
\def\page_layouts_start_layout[#1]#2\stoppagelayout
{\egroup
- \setvalue{\??layoutmethod#1}{#2}}
+ \setvalue{\??layoutmakeupalternative#1}{#2}}
\protect \endinput
diff --git a/Master/texmf-dist/tex/context/base/page-mis.lua b/Master/texmf-dist/tex/context/base/page-pst.lua
index 4f9d3c3e208..8586830cf24 100644
--- a/Master/texmf-dist/tex/context/base/page-mis.lua
+++ b/Master/texmf-dist/tex/context/base/page-pst.lua
@@ -1,6 +1,6 @@
-if not modules then modules = { } end modules ['page-mis'] = {
+if not modules then modules = { } end modules ['page-pst'] = {
version = 1.001,
- comment = "companion to page-mis.mkiv",
+ comment = "companion to page-pst.mkiv",
author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
copyright = "PRAGMA ADE / ConTeXt Development Team",
license = "see context related readme files"
@@ -8,6 +8,7 @@ if not modules then modules = { } end modules ['page-mis'] = {
-- todo: adapt message
+local format, validstring = string.format, string.valid
local sortedkeys = table.sortedkeys
local cache = { }
@@ -16,7 +17,7 @@ local function flush(page)
local c = cache[page]
if c then
for i=1,#c do
- context.viafile(c[i])
+ context.viafile(c[i],format("page.%s",validstring(page,"nopage")))
end
cache[page] = nil
end
diff --git a/Master/texmf-dist/tex/context/base/page-mis.mkiv b/Master/texmf-dist/tex/context/base/page-pst.mkiv
index 6137bb462c9..7f8a39ca6b4 100644
--- a/Master/texmf-dist/tex/context/base/page-mis.mkiv
+++ b/Master/texmf-dist/tex/context/base/page-pst.mkiv
@@ -2,7 +2,7 @@
%D [ file=page-mis,
%D version=2008.11.17, % was part of page-flt.tex / 2000.10.20
%D title=\CONTEXT\ Page Macros,
-%D subtitle=Misc Float Things,
+%D subtitle=Postponing,
%D author=Hans Hagen,
%D date=\currentdate,
%D copyright={PRAGMA ADE \& \CONTEXT\ Development Team}]
@@ -11,9 +11,9 @@
%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
%C details.
-\writestatus{loading}{ConTeXt Page Macros / Misc Float Things}
+\writestatus{loading}{ConTeXt Page Macros / Postponing}
-\registerctxluafile{page-mis}{1.001}
+\registerctxluafile{page-pst}{1.001}
\unprotect
@@ -21,8 +21,13 @@
\newevery\everytopofpage\relax
-\appendtoks \the\everytopofpage \to\everystarttext
-\appendtoks\global\everytopofpage\emptytoks\to\everystoptext
+\appendtoks
+ \the\everytopofpage
+\to \everystarttext
+
+\appendtoks
+ \global\everytopofpage\emptytoks
+\to \everystoptext
% \startpostponing [pagenumber] [+pageoffset]
%
@@ -44,8 +49,10 @@
%D is somewhat easier and we also can erase buffers easier when
%D we keep a local cache, especially as we can flush per page.
-\newif \ifinpostponing % prevents nesting
-\newcount\c_page_postponed_blocks_next_page % set at the lua end
+%newif \ifinpostponing % prevents nesting
+
+\newcount \c_page_postponed_blocks_next_page % set at the lua end
+\newconditional\c_page_postponed_busy
\unexpanded\setvalue{\e!start\v!postponing}%
{\bgroup
@@ -65,11 +72,11 @@
\unexpanded\def\page_postponed_blocks_flush_indeed
{\begingroup
\setsystemmode\v!postponing
- \inpostponingtrue % for old times sake
- \global\pagetotal\zeropoint % here? still needed? (was after flush pagefloats)
+ \settrue\c_page_postponed_busy
+ \pagetotal\zeropoint % here? still needed? (was after flush pagefloats)
\the\everytopofpage\relax
%\flushrestfloats
- \doflushpagefloats
+ \page_floats_flush_page_floats
\setnormalcatcodes % postponing in verbatim
\restoreglobalbodyfont % otherwise problems inside split verbatim
\ctxcommand{flushpostponedblocks()}%
@@ -77,8 +84,8 @@
\page_otr_command_flush_floats % new but potential dangerous, maybe we need a classification
\endgroup} % of blocks: with and without flush
-\def\page_postponed_blocks_flush
- {\ifinpostponing
+\unexpanded\def\page_postponed_blocks_flush
+ {\ifconditional\c_page_postponed_busy
% probably a nested flush
\else\ifnum\c_page_postponed_blocks_next_page=\zerocount
% nothing in cache
diff --git a/Master/texmf-dist/tex/context/base/page-run.mkiv b/Master/texmf-dist/tex/context/base/page-run.mkiv
index 62b1030ee11..dabf3725292 100644
--- a/Master/texmf-dist/tex/context/base/page-run.mkiv
+++ b/Master/texmf-dist/tex/context/base/page-run.mkiv
@@ -242,16 +242,16 @@ end
[\v!page]
[\c!frame=\v!on,
\c!corner=\v!rectangular,
- \c!frameoffset=\!!zeropoint,
- \c!framedepth=\!!zeropoint,
+ \c!frameoffset=\zeropoint,
+ \c!framedepth=\zeropoint,
\c!framecolor=layout:page]
\setupbackgrounds
[#1][#2]
[\c!background=,
\c!frame=\v!on,
\c!corner=\v!rectangular,
- \c!frameoffset=\!!zeropoint,
- \c!framedepth=\!!zeropoint,
+ \c!frameoffset=\zeropoint,
+ \c!framedepth=\zeropoint,
\c!framecolor=]
\else\iffirstargument
\showframe
diff --git a/Master/texmf-dist/tex/context/base/page-sel.mkiv b/Master/texmf-dist/tex/context/base/page-sel.mkiv
deleted file mode 100644
index 1e706aa2220..00000000000
--- a/Master/texmf-dist/tex/context/base/page-sel.mkiv
+++ /dev/null
@@ -1,347 +0,0 @@
-%D \module
-%D [ file=page-sel, % moved from page-imp
-%D version=1998.01.15,
-%D title=\CONTEXT\ Page Macros,
-%D subtitle=Page Selection,
-%D author=Hans Hagen,
-%D date=\currentdate,
-%D copyright={PRAGMA ADE \& \CONTEXT\ Development Team}]
-%C
-%C This module is part of the \CONTEXT\ macro||package and is
-%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
-%C details.
-
-%D This will become runtime loaded code.
-
-\writestatus{loading}{ConTeXt Page Macros / Page Selection}
-
-\unprotect
-
-%D One can (mis)use this mechanism, in close cooperation
-%D with \PDFTEX\ to arrange pages of already produced files.
-%D
-%D \starttyping
-%D \insertpages[file.pdf][1,3][n=30,width=18cm]
-%D \stoptyping
-%D
-%D The pages are inserted in the text area, and even pages
-%D are repositioned according to the width. In this example
-%D empty pages are added after page 1 and 3.
-%D
-%D Selecting pages can be accomplished by:
-%D
-%D \starttyping
-%D \filterpages[file.pdf][1,3,5][n=30,width=18cm]
-%D \stoptyping
-%D
-%D One may pass \type {odd} or \type {even} instead of a
-%D comma separated list. A third alternative is:
-%D
-%D \starttyping
-%D \copypages[file.pdf][n=30,scale=950]
-%D \stoptyping
-%D
-%D This macros inserts the page, according to the settings
-%D provided.
-
-\def\insertpages
- {\dotripleempty\doinsertpages}
-
-\def\doinsertpages[#1][#2][#3]%
- {\doifassignmentelse{#2}
- {\dodoinsertpages[#1][][#2]}
- {\dodoinsertpages[#1][#2][#3]}}
-
-\def\dodoinsertpages[#1][#2][#3]%
- {\bgroup
- \dontcomplain
- \getfiguredimensions[#1]%
- \getparameters[\??ip][\c!n=\noffigurepages,\c!width=\!!zeropoint,#3]%
- \doifinset0{#2}{\emptyhbox\page}%
- \dorecurse\@@ipn
- {\dofilterpage{#1}\recurselevel
- \doifinset\recurselevel{#2}{\emptyhbox\page}}%
- \egroup}
-
-\def\filterpages
- {\dotripleempty\dofilterpages}
-
-\def\dofilterpages[#1][#2][#3]% % \noffigurepages not yet supported
- {\bgroup
- \dontcomplain
- \getfiguredimensions[#1]%
- \getparameters[\??ip][\c!n=\noffigurepages,\c!width=\!!zeropoint,#3]%
- \doifelse{#2}\v!even
- {\dorecurse\@@ipn
- {\ifodd\recurselevel\relax\else\dofilterpage{#1}\recurselevel\fi}}
- {\doifelse{#2}\v!odd
- {\dorecurse\@@ipn
- {\ifodd\recurselevel\relax\dofilterpage{#1}\recurselevel\fi}}
- {\def\dodocommand##1{\ifnum##1>\@@ipn\else\dofilterpage{#1}{##1}\fi}%
- \def\docommand ##1{\dowithrange{##1}\dodocommand}%
- \processcommalist[#2]\docommand}}%
- \egroup}
-
-\def\dofilterpage#1#2%
- {\hbox to \textwidth
- {\ifdoublesided\ifdim\@@ipwidth>\zeropoint\relax\ifodd\realpageno\else
- \hfill
- \def\dowithfigure{\hskip-\@@ipwidth}%
- \fi\fi\fi
- \setbox0\hbox
- {\externalfigure[#1][\c!page=#2,\c!height=\textheight]}%
- \wd0\zeropoint
- \box0}
- \page}
-
-\def\copypages
- {\dodoubleempty\docopypages}
-
-\def\docopypages[#1][#2]%
- {\bgroup
- \getfiguredimensions[#1]%
- \getparameters[\??ip]
- [\c!n=\noffigurepages,
- \c!marking=\v!off,
- \c!scale=\!!thousand,
- \c!offset=\!!zeropoint,
- #2]%
- \dorecurse\@@ipn
- {\vbox to \textheight
- {\hsize\textwidth
- \scratchdimen\@@ipoffset
- \centeredbox
- {\doifelse\@@ipmarking\v!on\cuthbox\hbox
- {\ifdim\scratchdimen>\zeropoint\relax
- \advance\vsize -2\scratchdimen
- \advance\hsize -2\scratchdimen
- \externalfigure[#1][\c!page=\recurselevel,#2,\c!scale=,\c!factor=\v!max,\c!offset=\v!overlay]%
- \else
- \externalfigure[#1][\c!page=\recurselevel,#2,\c!offset=\v!overlay]%
- \fi}}}
- \page}
- \egroup}
-
-%D \macros
-%D {combinepages}
-%D
-%D Yet another way of postprocessing is handles by \type
-%D {\combinepages}. This macro builds a matrix of pages from a
-%D file, for example:
-%D
-%D \starttyping
-%D \setuppapersize
-%D [A4][A4] % or [A4,landscape][A4,landscape]
-%D
-%D \setuplayout
-%D [header=0pt,footer=1cm,
-%D backspace=1cm,topspace=1cm,
-%D width=middle,height=middle]
-%D
-%D \setupfootertexts
-%D [presentation---\currentdate\space---\space\pagenumber]
-%D
-%D \starttext
-%D \combinepages[slides][nx=2,ny=3,frame=on]
-%D \stoptext
-%D \stoptyping
-%D
-%D One can influence the way the pages are combined. (This
-%D will be explained some time.)
-
-\def\combinepages
- {\dodoubleempty\docombinepages}
-
-\def\docombinepages[#1][#2]% a=perpag b=free
- {\bgroup
- \dontcomplain
- \getfiguredimensions[#1]%
- \getparameters
- [\??ip]
- [\c!alternative=\v!a,
- \c!n=\noffigurepages,\c!nx=2,\c!ny=2,\c!start=1,\c!stop=\!!maxcard,
- \c!distance=\bodyfontsize,
- \c!bottom=\vfill,\c!top=\vss,
- \c!left=\hss,\c!right=\hss,
- \c!before=\page,\c!after=\page,\c!inbetween=\blank,
- \c!frame=,\c!background=,\c!backgroundcolor=,
- #2]%
- \def\@@ipname{#1}%
- \@@ipbefore
- \expandcheckedcsname{\??ip::\c!alternative:}\@@ipalternative\v!b
- \@@ipafter
- \egroup}
-
-\setvalue{\??ip::\c!alternative:\v!a}%
- {\global\combinedpagescounter\@@ipstart
- \doloop
- {\vbox to \textheight
- {\hsize\textwidth % ? ?
- \scratchdimen\@@ipdistance
- \!!widtha \dimexpr(\hsize-\@@ipnx\scratchdimen+\scratchdimen)/\@@ipnx\relax
- \!!heighta\dimexpr(\vsize-\@@ipny\scratchdimen+\scratchdimen)/\@@ipny\relax
- \dorecurse\@@ipny
- {\hbox to \hsize
- {\dorecurse\@@ipnx
- {\vbox to \!!heighta
- {\hsize\!!widtha
- \vsize\!!heighta
- \@@iptop
- \hbox to \hsize
- {\@@ipleft
- \ifnum\combinedpagescounter>\@@ipstop\relax
- \globallet\@@ipn\!!zerocount
- \else\ifnum\combinedpagescounter>\@@ipn \else
- \externalfigure[\@@ipname]
- [\c!object=\v!no,
- \c!page=\number\combinedpagescounter,
- \c!factor=\v!max,
- \c!background=\@@ipbackground,
- \c!backgroundcolor=\@@ipbackgroundcolor,
- \c!frame=\@@ipframe]%
- \fi\fi
- \@@ipright}
- \@@ipbottom}%
- \global\advance\combinedpagescounter\plusone
- \hfil}%
- \hfilneg}
- \vfil}%
- \vfilneg}%
- \page
- \ifnum\combinedpagescounter>\@@ipn \exitloop\fi}}
-
-\setvalue{\??ip::\c!alternative:\v!c}%
- {\global\combinedpagescounter\@@ipstart
- \doloop
- {\vbox to \textheight
- {\hsize\textwidth % ? ?
- \scratchdimen\@@ipdistance
- \!!widtha \dimexpr(\hsize-\@@ipnx\scratchdimen+\scratchdimen)/\@@ipnx\relax
- \!!heighta\dimexpr(\vsize-\@@ipny\scratchdimen+\scratchdimen)/\@@ipny\relax
- \hbox to \hsize
- {\dorecurse\@@ipnx
- {\@@ipleft
- \vbox to \textheight
- {\hsize\!!widtha
- {\dorecurse\@@ipny
- {\@@iptop
- \hbox to \hsize
- {\vbox to \!!heighta
- {\hsize\!!widtha
- \vsize\!!heighta
- \ifnum\combinedpagescounter>\@@ipstop\relax
- \globallet\@@ipn\!!zerocount
- \else\ifnum\combinedpagescounter>\@@ipn \else
- \externalfigure[\@@ipname]
- [\c!object=\v!no,
- \c!page=\number\combinedpagescounter,
- \c!factor=\v!max,
- \c!background=\@@ipbackground,
- \c!backgroundcolor=\@@ipbackgroundcolor,
- \c!frame=\@@ipframe]%
- \fi\fi}}
- \global\advance\combinedpagescounter\plusone
- \@@ipbottom}%
- \vfil}%
- \vfilneg}
- \hfil}%
- \hfilneg}}
- \page
- \ifnum\combinedpagescounter>\@@ipn \exitloop\fi}}
-
-\setvalue{\??ip::\c!alternative:\v!horizontal}{\getvalue{\??ip::\c!alternative:\v!a}}
-\setvalue{\??ip::\c!alternative:\v!vertical }{\getvalue{\??ip::\c!alternative:\v!c}}
-
-\setvalue{\??ip::\c!alternative:\v!b}%
- {\global\combinedpagescounter\@@ipstart
- \doloop
- {\startbaselinecorrection
- \scratchdimen\@@ipdistance
- \!!widtha\dimexpr(\hsize-\@@ipnx\scratchdimen+\scratchdimen)/\@@ipnx\relax
- \hbox to \hsize
- {\dorecurse\@@ipnx
- {\global\advance\combinedpagescounter\plusone
- \ifnum\combinedpagescounter>\@@ipn \else
- \normalexpanded{\noexpand\externalfigure[\@@ipname]
- [\c!page=\number\combinedpagescounter,
- \c!width=\the\!!widtha,% todo \freezedimenmacro
- \c!background=\@@ipbackground,
- \c!backgroundcolor=\@@ipbackgroundcolor,
- \c!frame=\@@ipframe]}%
- \hfill
- \fi}\hfillneg}%
- \stopbaselinecorrection
- \ifnum\combinedpagescounter<\@@ipn\relax
- \@@ipinbetween
- \else
- \exitloop
- \fi}}
-
-% This macro cuts a page into n parts that can be pasted
-% together.
-
-\def\slicepages
- {\dotripleempty\doslicepages}
-
-\def\doslicepages[#1][#2][#3]%
- {\ifthirdargument
- \dodoslicepages[#1][#2][#3]%
- \else
- \dodoslicepages[#1][#2][#2]%
- \fi}
-
-\newcounter\slicedpagenumber
-
-\def\dodoslicepages[#1][#2][#3]%
- {\bgroup
- \dontcomplain
- \globallet\slicedpagenumber\!!zerocount
- \getfiguredimensions[#1]
- \getparameters
- [\??ip]
- [\c!n=1,
- \c!offset=\!!zeropoint,
- \c!hoffset=\!!zeropoint,\c!voffset=\!!zeropoint,
- \c!width=\figurewidth,\c!height=\figureheight,#2]
- \ifnum\@@ipn>\zerocount
- \definepapersize
- [\s!dummy][\c!height=\@@ipheight,\c!width=\@@ipwidth]
- \setuppapersize
- [\s!dummy][\s!dummy]
- \setuplayout
- [\c!backspace=\!!zeropoint,\c!topspace=\!!zeropoint,
- \c!height=\v!middle,\c!width=\v!middle,
- \c!textdistance=\!!zeropoint,
- \c!header=\!!zeropoint,\c!footer=\!!zeropoint]
- \fi
- \dorecurse\noffigurepages
- {\global\let\slicedpagenumber\recurselevel
- \ifnum\@@ipn>\plusone
- \dorecurse\@@ipn
- {\let\xslice\recurselevel
- \dorecurse\@@ipn
- {\let\yslice\recurselevel
- \clip
- [\c!nx=\@@ipn,\c!ny=\@@ipn,\c!x=\xslice,\c!y=\yslice]
- {\scale
- [\c!scale=\@@ipn000]
- {\externalfigure[#1][\c!page=\slicedpagenumber]}}
- \page}}
- \else
- \ifodd\slicedpagenumber\relax
- \getparameters[\??ip][#2]
- \else
- \getparameters[\??ip][#3]
- \fi
- \hskip\@@ipoffset
- \clip
- [\c!hoffset=\@@iphoffset,\c!voffset=\@@ipvoffset,
- \c!height=\@@ipheight,\c!width=\@@ipwidth]
- {\externalfigure[#1][\c!page=\slicedpagenumber]}
- \page
- \fi}
- \egroup}
-
-% \starttext \slicepages[slice1.pdf][n=3] \stoptext
-
-\protect \endinput
diff --git a/Master/texmf-dist/tex/context/base/page-sel.mkvi b/Master/texmf-dist/tex/context/base/page-sel.mkvi
new file mode 100644
index 00000000000..cb9bcb509eb
--- /dev/null
+++ b/Master/texmf-dist/tex/context/base/page-sel.mkvi
@@ -0,0 +1,369 @@
+%D \module
+%D [ file=page-sel, % moved from page-imp
+%D version=1998.01.15,
+%D title=\CONTEXT\ Page Macros,
+%D subtitle=Page Selection,
+%D author=Hans Hagen,
+%D date=\currentdate,
+%D copyright={PRAGMA ADE \& \CONTEXT\ Development Team}]
+%C
+%C This module is part of the \CONTEXT\ macro||package and is
+%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
+%C details.
+
+%D This code relates to old texexec features and one can wonder if it needs
+%D to be in the core. So, this could become runtime loaded code. Some of
+%D the alternatives need checking.
+
+\writestatus{loading}{ConTeXt Page Macros / Page Selection}
+
+\unprotect
+
+%D One can (mis)use this mechanism to (re)arrange pages of already produced
+%D files.
+%D
+%D \starttyping
+%D \insertpages[file.pdf][1,3][n=30,width=18cm]
+%D \stoptyping
+%D
+%D The pages are inserted in the text area, and even pages are repositioned
+%D according to the width. In this example empty pages are added after page
+%D 1 and 3.
+%D
+%D Selecting pages can be accomplished by:
+%D
+%D \starttyping
+%D \filterpages[file.pdf][1,3,5][n=30,width=18cm]
+%D \stoptyping
+%D
+%D One may pass \type {odd} or \type {even} instead of a comma separated list. A
+%D third alternative is:
+%D
+%D \starttyping
+%D \copypages[file.pdf][n=30,scale=950]
+%D \stoptyping
+%D
+%D This macros inserts the page, according to the settings provided.
+
+\installcorenamespace{withpages}
+
+\installsetuponlycommandhandler \??withpages {withpages}
+% \installdirectcommandhandler \??withpages {withpages}
+
+\newcount\c_page_selectors_n
+
+\unexpanded\def\insertpages
+ {\dotripleempty\page_selectors_insert}
+
+\def\page_selectors_insert[#filename][#emptylist][#settings]%
+ {\doifassignmentelse{#emptylist}
+ {\page_selectors_insert_indeed[#filename][][#emptylist]}
+ {\page_selectors_insert_indeed[#filename][#emptylist][#settings]}}
+
+\def\page_selectors_insert_indeed[#filename][#emptylist][#settings]%
+ {\bgroup
+ \dontcomplain
+ \getfiguredimensions[#filename]%
+ \setupcurrentwithpages[\c!width=\zeropoint,\c!n=\noffigurepages,#settings]%
+ \global\c_page_selectors_n\directwithpagesparameter\c!n\relax
+ \scratchwidth\directwithpagesparameter\c!width\relax
+ \doifinset0{#emptylist}
+ {\emptyhbox\page}%
+ \dorecurse\c_page_selectors_n
+ {\page_selectors_filter_a_page{#filename}\recurselevel
+ \doifinset\recurselevel{#emptylist}
+ {\emptyhbox\page}}%
+ \egroup}
+
+\unexpanded\def\filterpages
+ {\dotripleempty\page_selectors_filter}
+
+\def\page_selectors_filter[#filename][#selection][#settings]% % \noffigurepages not yet supported
+ {\bgroup
+ \dontcomplain
+ \getfiguredimensions[#filename]%
+ \setupcurrentwithpages[\c!width=\zeropoint,\c!n=\noffigurepages,#settings]%
+ \global\c_page_selectors_n\directwithpagesparameter\c!n\relax
+ \scratchwidth\directwithpagesparameter\c!width\relax
+ \edef\p_selection{#selection}%
+ \ifx\p_selection\v!even
+ \dorecurse\c_page_selectors_n
+ {\ifodd\recurselevel\else
+ \page_selectors_filter_a_page{#filename}\recurselevel
+ \fi}%
+ \else\ifx\p_selection\v!odd
+ \dorecurse\c_page_selectors_n
+ {\ifodd\recurselevel\relax
+ \page_selectors_filter_a_page{#filename}\recurselevel
+ \fi}%
+ \else
+ \def\page_selectors_filter_step_indeed#page%
+ {\ifnum#page>\c_page_selectors_n\else
+ \page_selectors_filter_a_page{#filename}{#page}%
+ \fi}%
+ \def\page_selectors_filter_step#step%
+ {\dowithrange{#step}\page_selectors_filter_step_indeed}%
+ \processcommacommand[\p_selection]\page_selectors_filter_step
+ \fi\fi
+ \egroup}
+
+\def\page_selectors_filter_a_page#filename#page%
+ {\hbox to \textwidth
+ {\ifdim\scratchwidth>\zeropoint
+ \rightorleftpageaction{\scratchwidth\zeropoint}{\hfill}%
+ \fi
+ \setbox\scratchbox\hbox
+ {\hskip-\scratchwidth
+ \externalfigure[#filename][\c!page=#page,\c!height=\textheight]\hss}%
+ \wd\scratchbox\zeropoint
+ \box\scratchbox}
+ \page}
+
+\unexpanded\def\copypages
+ {\dotripleempty\page_selectors_copy}
+
+\def\page_selectors_copy[#filename][#settings][#figuresettings]%
+ {\bgroup
+ \getfiguredimensions[#filename]%
+ \setupcurrentwithpages[\c!marking=\v!off,\c!offset=\zeropoint,\c!n=\noffigurepages,#settings]%
+ \global\c_page_selectors_n\directwithpagesparameter\c!n\relax
+ \scratchoffset\directwithpagesparameter\c!offset\relax
+ \dorecurse\c_page_selectors_n
+ {\vbox to \textheight
+ {\hsize\textwidth
+ \centeredbox
+ {\doifelse{\directwithpagesparameter\c!marking}\v!on\cuthbox\hbox % only place where cuthbox is used
+ {\ifdim\scratchoffset>\zeropoint\relax
+ \advance\vsize -2\scratchoffset
+ \advance\hsize -2\scratchoffset
+ \externalfigure[#filename][\c!page=\recurselevel,#figuresettings,\c!scale=,\c!factor=\v!max,\c!offset=\v!overlay]%
+ \else
+ \externalfigure[#filename][\c!page=\recurselevel,#figuresettings,\c!offset=\v!overlay]%
+ \fi}}}
+ \page}
+ \egroup}
+
+%D \macros
+%D {combinepages}
+%D
+%D Yet another way of postprocessing is handles by \type {\combinepages}. This macro
+%D builds a matrix of pages from a file, for example:
+%D
+%D \starttyping
+%D \setuppapersize
+%D [A4][A4] % or [A4,landscape][A4,landscape]
+%D
+%D \setuplayout
+%D [header=0pt,footer=1cm,
+%D backspace=1cm,topspace=1cm,
+%D width=middle,height=middle]
+%D
+%D \setupfootertexts
+%D [presentation---\currentdate\space---\space\pagenumber]
+%D
+%D \starttext
+%D \combinepages[slides][nx=2,ny=3,frame=on]
+%D \stoptext
+%D \stoptyping
+%D
+%D One can influence the way the pages are combined. (This will be explained some
+%D time.)
+
+\installcorenamespace{combinepagesalternative}
+
+\unexpanded\def\combinepages
+ {\dodoubleempty\page_selectors_combine}
+
+\def\page_selectors_combine[#filename][#settings]% a=perpag b=free
+ {\bgroup
+ \dontcomplain
+ \getfiguredimensions[#filename]%
+ \setupcurrentwithpages
+ [\c!alternative=\v!a,
+ \c!n=\noffigurepages,\c!nx=\plustwo,\c!ny=\plustwo,
+ \c!start=\plusone,\c!stop=\maxcard,
+ \c!distance=\bodyfontsize,
+ \c!bottom=\vfill,\c!top=\vss,\c!left=\hss,\c!right=\hss,
+ \c!before=\page,\c!after=\page,\c!inbetween=\blank,
+ \c!frame=,\c!background=,\c!backgroundcolor=,
+ \c!name={#filename},
+ #settings]%
+ \global\c_page_selectors_n\directwithpagesparameter\c!n\relax
+ \directwithpagesparameter\c!before
+ \scratchnx\directwithpagesparameter\c!nx
+ \scratchny\directwithpagesparameter\c!ny
+ \scratchdistance\directwithpagesparameter\c!distance\relax
+ \scratchwidth\dimexpr(\textwidth-\scratchnx\scratchdistance+\scratchdistance)/\scratchnx\relax
+ \scratchheight\dimexpr(\textheight-\scratchny\scratchdistance+\scratchdistance)/\scratchny\relax
+ \expandnamespaceparameter\??combinepagesalternative\directwithpagesparameter\c!alternative\v!b
+ \directwithpagesparameter\c!after
+ \egroup}
+
+\setvalue{\??combinepagesalternative\v!a}%
+ {\global\combinedpagescounter\directwithpagesparameter\c!start\relax
+ \doloop
+ {\vbox to \textheight
+ {\dorecurse\scratchny
+ {\hbox to \textwidth
+ {\dorecurse\scratchnx
+ {\vbox to \scratchheight
+ {\hsize\scratchwidth
+ \vsize\scratchheight
+ \directwithpagesparameter\c!top
+ \hbox to \hsize
+ {\directwithpagesparameter\c!left
+ \ifnum\combinedpagescounter>\directwithpagesparameter\c!stop\relax
+ \global\c_page_selectors_n\zerocount
+ \else\ifnum\combinedpagescounter>\c_page_selectors_n \else
+ \externalfigure
+ [\directwithpagesparameter\c!name]
+ [\c!object=\v!no,
+ \c!page=\number\combinedpagescounter,
+ \c!factor=\v!max,
+ \c!background=\directwithpagesparameter\c!background,
+ \c!backgroundcolor=\directwithpagesparameter\c!backgroundcolor,
+ \c!frame=\directwithpagesparameter\c!frame]%
+ \fi\fi
+ \directwithpagesparameter\c!right}
+ \directwithpagesparameter\c!bottom}%
+ \global\advance\combinedpagescounter\plusone
+ \hfil}%
+ \hfilneg}
+ \vfil}%
+ \vfilneg}%
+ \page
+ \ifnum\combinedpagescounter>\c_page_selectors_n
+ \exitloop
+ \fi}}
+
+\setvalue{\??combinepagesalternative\v!c}%
+ {\global\combinedpagescounter\directwithpagesparameter\c!start\relax
+ \doloop
+ {\vbox to \textheight
+ {\hbox to \textwidth
+ {\dorecurse\scratchnx
+ {\directwithpagesparameter\c!left
+ \vbox to \textheight
+ {\hsize\scratchwidth
+ \dorecurse\scratchny
+ {\directwithpagesparameter\c!top
+ \hbox to \hsize
+ {\vbox to \scratchheight
+ {\hsize\scratchwidth
+ \vsize\scratchheight
+ \ifnum\combinedpagescounter>\directwithpagesparameter\c!stop\relax
+ \global\c_page_selectors_n\zerocount
+ \else\ifnum\combinedpagescounter>\c_page_selectors_n \else
+ \externalfigure
+ [\directwithpagesparameter\c!name]
+ [\c!object=\v!no,
+ \c!page=\number\combinedpagescounter,
+ \c!factor=\v!max,
+ \c!background=\directwithpagesparameter\c!background,
+ \c!backgroundcolor=\directwithpagesparameter\c!backgroundcolor,
+ \c!frame=\directwithpagesparameter\c!frame]%
+ \fi\fi}}
+ \global\advance\combinedpagescounter\plusone
+ \directwithpagesparameter\c!bottom}}%
+ \hfil}%
+ \hfilneg}}%
+ \page
+ \ifnum\combinedpagescounter>\c_page_selectors_n
+ \exitloop
+ \fi}}
+
+\setvalue{\??combinepagesalternative\v!horizontal}{\getvalue{\??combinepagesalternative\v!a}}
+\setvalue{\??combinepagesalternative\v!vertical }{\getvalue{\??combinepagesalternative\v!c}}
+
+\setvalue{\??combinepagesalternative\v!b}%
+ {\global\combinedpagescounter\directwithpagesparameter\c!start\relax
+ \doloop
+ {\startbaselinecorrection
+ \hbox to \textwidth
+ {\dorecurse\scratchnx
+ {\global\advance\combinedpagescounter\plusone
+ \ifnum\combinedpagescounter>\c_page_selectors_n \else
+ \normalexpanded{\externalfigure
+ [\directwithpagesparameter\c!name]
+ [\c!page=\number\combinedpagescounter,
+ \c!width=\the\scratchwidth,
+ \c!background=\directwithpagesparameter\c!background,
+ \c!backgroundcolor=\directwithpagesparameter\c!backgroundcolor,
+ \c!frame=\directwithpagesparameter\c!frame]}%
+ \hfill
+ \fi}%
+ \hfillneg}%
+ \stopbaselinecorrection
+ \ifnum\combinedpagescounter<\c_page_selectors_n\relax
+ \directwithpagesparameter\c!inbetween
+ \else
+ \exitloop
+ \fi}}
+
+% This macro cuts a page into n parts that can be pasted together.
+
+\unexpanded\def\slicepages
+ {\dotripleempty\page_selectors_slice}
+
+\def\page_selectors_slice[#filename][#oddsettings][#evensettings]%
+ {\ifthirdargument
+ \page_selectors_slice_indeed[#filename][#oddsettings][#evensettings]%
+ \else
+ \page_selectors_slice_indeed[#filename][#oddsettings][#oddsettings]%
+ \fi}
+
+\let\slicedpagenumber\!!zerocount
+\let\slicedpagestepx \!!zerocount
+\let\slicedpagestepy \!!zerocount
+
+\def\page_selectors_slice_indeed[#filename][#oddsettings][#evensettings]%
+ {\bgroup
+ \dontcomplain
+ \global\let\slicedpagenumber\!!zerocount
+ \getfiguredimensions[#filename]%
+ \setupcurrentwithpages
+ [\c!offset=\zeropoint,\c!hoffset=\zeropoint,\c!voffset=\zeropoint,
+ \c!width=\figurewidth,\c!height=\figureheight,\c!n=\noffigurepages,#oddsettings]%
+ \global\c_page_selectors_n\directwithpagesparameter\c!n\relax
+ \ifnum\c_page_selectors_n>\zerocount
+ \definepapersize
+ [\s!dummy][\c!height=\directwithpagesparameter\c!height,\c!width=\directwithpagesparameter\c!width]%
+ \setuppapersize
+ [\s!dummy][\s!dummy]%
+ \setuplayout
+ [\c!backspace=\zeropoint,\c!topspace=\zeropoint,
+ \c!height=\v!middle,\c!width=\v!middle,
+ \c!textdistance=\zeropoint,
+ \c!header=\zeropoint,\c!footer=\zeropoint]%
+ \fi
+ \dorecurse\noffigurepages
+ {\global\let\slicedpagenumber\recurselevel
+ \ifnum\c_page_selectors_n>\plusone
+ \dorecurse\c_page_selectors_n
+ {\let\slicedpagestepx\recurselevel
+ \dorecurse\c_page_selectors_n
+ {\let\slicedpagestepy\recurselevel
+ \clip
+ [\c!nx=\c_page_selectors_n,\c!ny=\c_page_selectors_n,\c!x=\slicedpagestepx,\c!y=\slicedpagestepy]
+ {\scale
+ [\c!scale=\number\c_page_selectors_n000]
+ {\externalfigure[#filename][\c!page=\slicedpagenumber]}}%
+ \page}}%
+ \else
+ \ifodd\slicedpagenumber\relax
+ \setupcurrentwithpages[#oddsettings]%
+ \else
+ \setupcurrentwithpages[#evensettings]%
+ \fi
+ \hskip\directwithpagesparameter\c!offset\relax
+ \clip
+ [\c!hoffset=\directwithpagesparameter\c!hoffset,\c!voffset=\directwithpagesparameter\c!voffset,
+ \c!height=\directwithpagesparameter\c!height,\c!width=\directwithpagesparameter\c!width]
+ {\externalfigure[#filename][\c!page=\slicedpagenumber]}%
+ \page
+ \fi}
+ \egroup}
+
+% \starttext \slicepages[slice1.pdf][n=3] \stoptext
+
+\protect \endinput
diff --git a/Master/texmf-dist/tex/context/base/page-set.mkiv b/Master/texmf-dist/tex/context/base/page-set.mkiv
index 467f22493cb..b7c26035ee9 100644
--- a/Master/texmf-dist/tex/context/base/page-set.mkiv
+++ b/Master/texmf-dist/tex/context/base/page-set.mkiv
@@ -11,9 +11,13 @@
%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
%C details.
+% not yet mkiv! probably namespace issues with localframed (only one left)
+%
% getnoflines vs getrawnoflines
-% some day: cleanup and go etex
+% This is a rather old mechanism and we can best keep it as it is. If it gets
+% replaced by a more modern solution, it will be an extra mechanism. So, we
+% only do some basic cleanup.
\writestatus{loading}{ConTeXt Page Macros / Column Sets}
@@ -44,13 +48,95 @@
\newif\ifenoughcolumncells
\newif\ifsomefreecolumncells
\newif\ifcolumnspread
-\newif\iftracecolumnset % \tracecolumnsettrue
+\newif\iftracecolumnset
+
+\newif\ifforcecolumnsetgrid \forcecolumnsetgridtrue
+\newif\ifcollectingsetcontent % never set
+\newif\ifcarryoverfootnotes %\carryoverfootnotestrue
+\newif\iflastcolumnfootnotes % never set \lastcolumnfootnotestrue
+\newif\ifintermediatefootnotes
+
+\newbox \b_page_set_preceding
+\newbox \b_page_set_trailing
+
+\newdimen \d_page_set_local_hsize
+\newconditional\c_page_set_width_set
+
+\installcorenamespace{columnsetgrid}
\def\columnmaxcells {75} % runtime
\def\columnmaxfreecells {0} % runtime
\def\columngaplimit {0} % {5}
-\def\@otr@{otr}
+\def\page_set_cell #1#2{\csname \??columnsetgrid:\number#1:\number#2\endcsname}
+\def\page_set_cell_get#1#2{\box\csname \??columnsetgrid:\number#1:\number#2\endcsname}
+\def\page_set_cell_set#1#2{\global\setbox\csname\??columnsetgrid:\number#1:\number#2\endcsname}
+
+\def\page_set_cell_doifelse#1#2%
+ {\relax
+ \ifvoid\csname\??columnsetgrid:\number#1:\number#2\endcsname
+ \expandafter\secondoftwoarguments
+ \else
+ \expandafter\firstoftwoarguments
+ \fi}
+
+\def\page_set_cell_erase_grid % maybe dedicated loops ... make another loop when max's have changed
+ {\bgroup
+ \increment\columnmaxcells\relax
+ \ifodd\realpageno \else
+ \columnspreadfalse
+ \fi
+ \ifcolumnspread
+ \page_set_cell_erase_grid_spread
+ \else
+ \page_set_cell_erase_grid_page
+ \fi
+ \page_set_cell_erase_grid_top
+ \global\columnfirstcell\zerocount
+ \global\columnlastcell \columnfirstcell
+ \global\columnfreecells\columnfirstcell
+ \egroup}
+
+\let\m_page_column_l\relax
+\let\m_page_column_r\relax
+
+\def\page_set_cell_erase_grid_spread
+ {\dorecurse\nofcolumns
+ {\let \m_page_column_l\recurselevel
+ \edef\m_page_column_r{\the\numexpr\recurselevel+\lofcolumns}%
+ \dostepwiserecurse \zerocount \columnmaxcells \plusone
+ {\ifcsname\??columnsetgrid:\m_page_column_l:\recurselevel\endcsname
+ \global\setbox\csname\??columnsetgrid:\m_page_column_l:\recurselevel\endcsname
+ \ifcsname\??columnsetgrid:\m_page_column_r:\recurselevel\endcsname
+ \box\csname\??columnsetgrid:\m_page_column_r:\recurselevel\endcsname
+ \else
+ \emptyhbox
+ \expandafter\newbox\csname\??columnsetgrid:\m_page_column_r:\recurselevel\endcsname
+ \fi
+ \else
+ \expandafter\newbox\csname\??columnsetgrid:\m_page_column_l:\recurselevel\endcsname
+ \ifcsname\??columnsetgrid:\m_page_column_r:\recurselevel\endcsname
+ \global\setbox\csname\??columnsetgrid:\m_page_column_l:\recurselevel\endcsname\box\csname\??columnsetgrid:\m_page_column_r:\recurselevel\endcsname
+ \else
+ \expandafter\newbox\csname\??columnsetgrid:\m_page_column_r:\recurselevel\endcsname
+ \fi
+ \fi}}}
+
+\def\page_set_cell_erase_grid_page
+ {\dorecurse \tofcolumns
+ {\let\m_page_column_l\recurselevel
+ \dostepwiserecurse \zerocount \columnmaxcells \plusone
+ {\ifcsname\??columnsetgrid:\m_page_column_l:\recurselevel\endcsname
+ \global\setbox\csname\??columnsetgrid:\m_page_column_l:\recurselevel\endcsname\emptybox
+ \else
+ \expandafter\newbox\csname\??columnsetgrid:\m_page_column_l:\recurselevel\endcsname
+ \fi}}}
+
+\def\page_set_cell_erase_grid_top
+ {\dorecurse\tofcolumns
+ {\global\setbox\csname\??columnsetgrid:\recurselevel:\columnmaxcells\endcsname\copy\placeholderboxa}}
+
+% % % %
\def\OTRSETmakeupwidth{\innermakeupwidth}
@@ -60,15 +146,6 @@
\unexpanded\def\page_set_command_synchronize_side_floats
{\page_sides_forget_floats}
-\def\OTRSETgridcell #1#2{\csname \@otr@:\number#1:\number#2\endcsname}
-\def\OTRSETgetgridcell#1#2{\box\csname \@otr@:\number#1:\number#2\endcsname}
-\def\OTRSETsetgridcell#1#2{\global\setbox\csname\@otr@:\number#1:\number#2\endcsname}
-
-\long\def\OTRSETdoifcellelse#1#2%
- {\relax\ifvoid\csname\@otr@:\number#1:\number#2\endcsname
- \@EA\secondoftwoarguments\else\@EA\firstoftwoarguments
- \fi}
-
% The following two macros are used to compensate for a switch in body fonts
% as in:
%
@@ -184,64 +261,16 @@
\relax % needed ! ! ! ! else lookahead over \fi and \@EA
\@EA\egroup\@EA\scratchdimen\the\!!dimena\relax}
-\def\columnerasegridboxes % maybe dedicated loops
- {\bgroup
- \increment\columnmaxcells\relax
- \ifodd\realpageno
- \else % we are on the other page
- \columnspreadfalse
- \fi
- \ifcolumnspread
- \dorecurse\nofcolumns
- {\let\!!stringa\recurselevel
- \scratchcounter\recurselevel \advance\scratchcounter\lofcolumns
- \edef\!!stringb{\the\scratchcounter}%
- \dostepwiserecurse \zerocount \columnmaxcells \plusone
- {\ifcsname\@otr@:\!!stringa:\recurselevel\endcsname
- \global\setbox\csname\@otr@:\!!stringa:\recurselevel\endcsname
- \ifcsname\@otr@:\!!stringb:\recurselevel\endcsname
- \box\csname\@otr@:\!!stringb:\recurselevel\endcsname
- %\global\setbox\csname\@otr@:\!!stringa:\recurselevel\endcsname\box\csname\@otr@:\!!stringb:\recurselevel\endcsname
- \else
- \emptyhbox
- %\global\setbox\csname\@otr@:\!!stringa:\recurselevel\endcsname\emptybox
- \expandafter\newbox\csname\@otr@:\!!stringb:\recurselevel\endcsname
- \fi
- \else
- \expandafter\newbox\csname\@otr@:\!!stringa:\recurselevel\endcsname
- \ifcsname\@otr@:\!!stringb:\recurselevel\endcsname
- \global\setbox\csname\@otr@:\!!stringa:\recurselevel\endcsname\box\csname\@otr@:\!!stringb:\recurselevel\endcsname
- \else
- \expandafter\newbox\csname\@otr@:\!!stringb:\recurselevel\endcsname
- \fi
- \fi}}%
- \else
- \dorecurse \tofcolumns
- {\let\!!stringa\recurselevel
- \dostepwiserecurse \zerocount \columnmaxcells \plusone
- {\ifcsname\@otr@:\!!stringa:\recurselevel\endcsname
- \global\setbox\csname\@otr@:\!!stringa:\recurselevel\endcsname\emptybox
- \else
- \expandafter\newbox\csname\@otr@:\!!stringa:\recurselevel\endcsname
- \fi}}%
- \fi
- \dorecurse\tofcolumns
- {\global\setbox\csname\@otr@:\recurselevel:\columnmaxcells\endcsname\copy\placeholderboxa}%
- \global\columnfirstcell\zerocount
- \global\columnlastcell\columnfirstcell
- \global\columnfreecells\columnfirstcell
- \egroup}
-
\def\doOTRSETsetgridcells#1#2#3#4#5#6% placeholder col row wid hei {data}
{\!!countd#2\advance\!!countd#4\advance\!!countd\minusone
\!!counte#3\advance\!!counte#5\advance\!!counte\minusone
\dostepwiserecurse{#2}\!!countd\plusone
{\!!countf\recurselevel
\dostepwiserecurse{#3}\!!counte\plusone
- {\OTRSETsetgridcell\!!countf\recurselevel#1}}%
+ {\page_set_cell_set\!!countf\recurselevel#1}}%
\dostepwiserecurse{#3}\!!counte\plusone
- {\wd\OTRSETgridcell{#2}\recurselevel\hsize}%
- \OTRSETsetgridcell{#2}\!!counte#6}
+ {\wd\page_set_cell{#2}\recurselevel\hsize}%
+ \page_set_cell_set{#2}\!!counte#6}
\def\OTRSETsetgridcells
{\doOTRSETsetgridcells{\copy\placeholderboxb}}
@@ -252,19 +281,17 @@
\unexpanded\def\setupcolumnsetlines{\doquintupleempty\dosetupcolumnsettrick[l]}
\unexpanded\def\setupcolumnsetstart{\doquintupleempty\dosetupcolumnsettrick[s]}
+\installcorenamespace{columnsettag} % temp hack
+
\def\dosetupcolumnsettrick[#1][#2][#3][#4][#5]% tag id page col value
- {% not needed, is already relative
- % \doifinstringelse{+}{#3}{\scratchcounter\realpageno}{\scratchcounter\zerocount}%
- % \advance\scratchcounter#3\relax % \relax needed
- % \setevalue{\??mc:#1:#2:\the\scratchcounter:\number#4}{\number#5}}
- \iffifthargument
- \setevalue{\??mc:#1:#2:\number#3:\number#4}{\number#5}%
+ {\iffifthargument
+ \setevalue{\??columnsettag:#1:#2:\number#3:\number#4}{\number#5}%
\else
- \setevalue{\??mc:#1:#2:\number#3:0}{\number#4}%
+ \setevalue{\??columnsettag:#1:#2:\number#3:0}{\number#4}%
\fi}
-\def\currentcolumnmaxcellstag #1{\??mc:l:\OTRSETidentifier:\columnsetpage:\number#1}
-\def\currentcolumnstartcelltag#1{\??mc:s:\OTRSETidentifier:\columnsetpage:\number#1}
+\def\currentcolumnmaxcellstag #1{\??columnsettag:l:\OTRSETidentifier:\columnsetpage:\number#1}
+\def\currentcolumnstartcelltag#1{\??columnsettag:s:\OTRSETidentifier:\columnsetpage:\number#1}
\def\doresetcolumnsetlines#1%
{\ifcsname\currentcolumnmaxcellstag{#1}\endcsname
@@ -312,7 +339,7 @@
{\ifnum\columnfirstcell>\columnmaxcells\relax
\exitloop
\else
- \OTRSETdoifcellelse{#1}\columnfirstcell
+ \page_set_cell_doifelse{#1}\columnfirstcell
{\global\advance\columnfirstcell\plusone}\exitloop
\fi}%
\global\columnlastcell\columnfirstcell
@@ -320,7 +347,7 @@
{\ifnum\columnlastcell>\columnmaxcells\relax
\exitloop
\else
- \OTRSETdoifcellelse{#1}\columnlastcell
+ \page_set_cell_doifelse{#1}\columnlastcell
{\global\advance\columnlastcell \minusone \exitloop}
{\global\advance\columnlastcell \plusone }%
\fi}%
@@ -351,7 +378,7 @@
\edef\columnmaxcells{\the\scratchcounter}%
\scratchcounter\zerocount
\dostepwiserecurse{#2}\columnmaxcells\plusone
- {\OTRSETdoifcellelse{#1}\recurselevel
+ {\page_set_cell_doifelse{#1}\recurselevel
{\ifnum\columnmaxfreecells<\scratchcounter
\edef\columnmaxfreecells{\the\scratchcounter}%
\let\columnfrmfreecells\recurselevel
@@ -362,7 +389,7 @@
\long\def\OTRSETrecurseRL#1%
{\dostepwiserecurse\nofcolumns\plusone\minusone
- {#1\hskip\OTRSETgetparameter\c!distance\recurselevel}}
+ {#1\hskip\namedcolumnsetparameter{\currentcolumnset:\recurselevel}\c!distance}}
\def\OTRSETmakegridbox
{\ifcase\columndirection
@@ -373,6 +400,28 @@
\def\OTRSETmakeupwidth{\makeupwidth} % temporary indirectness
+\def\page_set_make_background_box
+ {\begingroup
+ \mofcolumns\recurselevel % used to signal mp
+ \d_page_set_local_hsize\OTRSETlocalwidth\recurselevel
+ \scratchdistance\namedcolumnsetparameter{\currentcolumnset:\recurselevel}\c!distance\relax
+ \edef\currentcolumnset{\currentcolumnset:\recurselevel}%
+ \letcolumnsetparameter\c!width \d_page_set_local_hsize
+ \letcolumnsetparameter\c!height\!!heighta
+ \letcolumnsetparameter\c!lines \empty
+ \letcolumnsetparameter\c!region\currentcolumnset
+ \setbox\scratchbox\hbox\inheritedcolumnsetframed{}% maybe \fastlocalframed
+ \wd\scratchbox\d_page_set_local_hsize
+ \ht\scratchbox\!!heighta
+ \ifcase\columndirection
+ \hskip\scratchdistance
+ \box\scratchbox
+ \else
+ \box\scratchbox
+ \hskip\scratchdistance
+ \fi
+ \endgroup}
+
\def\OTRSETdomakegridbox#1#2#3%
{\hbox\bgroup
\dontcomplain
@@ -380,28 +429,13 @@
\!!heighta \textheight
% test first !
\hbox to \OTRSETmakeupwidth
- {\dostepwiserecurse{#1}{#2}{#3}
- {\mofcolumns\recurselevel
- \localcolumnwidth\OTRSETlocalwidth\mofcolumns
- \setbox\scratchbox\hbox\localframed
- [\??mc\OTRSETidentifier\number\mofcolumns]%
- [\c!width=\localcolumnwidth,\c!height=\!!heighta,\c!lines=]%
- {}%
- \wd\scratchbox\localcolumnwidth
- \ht\scratchbox\!!heighta
- \ifcase\columndirection
- \hskip\OTRSETgetparameter\c!distance\recurselevel
- \box\scratchbox
- \else
- \box\scratchbox
- \hskip\OTRSETgetparameter\c!distance\recurselevel
- \fi}}%
+ {\dostepwiserecurse{#1}{#2}{#3}\page_set_make_background_box}%
\hskip-\OTRSETmakeupwidth
% main text
\hbox to \OTRSETmakeupwidth
{\dostepwiserecurse{#1}{#2}{#3}
{\mofcolumns\recurselevel
- \localcolumnwidth\OTRSETlocalwidth\mofcolumns
+ \d_page_set_local_hsize\OTRSETlocalwidth\mofcolumns
\offinterlineskip
\setbox\scratchbox\vbox to \!!heighta
{%\topskipcorrection % not needed
@@ -417,14 +451,14 @@
\vfill
\fi
\dorecurse\columnmaxcells
- {\setbox\scratchbox\hbox{\OTRSETgetgridcell\mofcolumns\recurselevel}%
+ {\setbox\scratchbox\hbox{\page_set_cell_get\mofcolumns\recurselevel}%
\ht\scratchbox\strutht
\dp\scratchbox\strutdp
\ifcase\columndirection
\box\scratchbox
\else
- \hbox to \localcolumnwidth
- {\hskip\localcolumnwidth\llap{\box\scratchbox}}%
+ \hbox to \d_page_set_local_hsize
+ {\hskip\d_page_set_local_hsize\llap{\box\scratchbox}}%
\fi
\par}%
\ifcase\OTRSETbalancemethod
@@ -439,12 +473,12 @@
\kern\zeropoint
\vss
\fi}%
- \wd\scratchbox\localcolumnwidth % \textwidth
+ \wd\scratchbox\d_page_set_local_hsize % \textwidth
\page_marks_synchronize_column{#1}{#2}\recurselevel\scratchbox
\ifcase\columndirection
- \hskip\OTRSETgetparameter\c!distance\recurselevel\box\scratchbox
+ \hskip\namedcolumnsetparameter{\currentcolumnset:\recurselevel}\c!distance\box\scratchbox
\else
- \box\scratchbox\hskip\OTRSETgetparameter\c!distance\recurselevel
+ \box\scratchbox\hskip\namedcolumnsetparameter{\currentcolumnset:\recurselevel}\c!distance
\fi}}%
\egroup}
@@ -459,7 +493,7 @@
\!!counta\columnmaxcells
\donetrue
\doloop
- {\dorecurse\nofcolumns{\OTRSETdoifcellelse\recurselevel\!!counta\donefalse\donothing}%
+ {\dorecurse\nofcolumns{\page_set_cell_doifelse\recurselevel\!!counta\donefalse\donothing}%
\ifdone
\ifnum\!!counta>\plusone\advance\!!counta\minusone\else\exitloop\fi
\else
@@ -483,22 +517,22 @@
{\placebottomnotes}%
\ifdim\ht\scratchbox>\zeropoint
\setbox\scratchbox\hbox
- {\hbox to \zeropoint{\OTRSETgetgridcell\nofcolumns\lastcolumnlastcell}%
+ {\hbox to \zeropoint{\page_set_cell_get\nofcolumns\lastcolumnlastcell}%
\box\scratchbox}%
\ht\scratchbox\strutht
\dp\scratchbox\strutdp
- \OTRSETsetgridcell\nofcolumns\lastcolumnlastcell\box\scratchbox
+ \page_set_cell_set\nofcolumns\lastcolumnlastcell\box\scratchbox
\fi
\global\lastcolumnlastcell\zerocount
\fi}
\def\OTRSETdoflush
- {\ifcollectingcontent
+ {\ifcollectingsetcontent
\global\mofcolumns\plusone
\else
\OTRSETdofinalflush
\OTRSETdofinaloutput
- \ifnum\columnsetpage>0
+ \ifnum\columnsetpage>\zerocount
\dorecurse\nofcolumns{\doresetcolumnsetlines\recurselevel}%
\fi
\doglobal\increment\columnsetpage
@@ -554,11 +588,10 @@
\newdimen \OTRSETtextsheight
\let \OTRSETidentifier=\empty
-\def\OTRSETgetparameter#1#2{\csname\??mc\OTRSETidentifier\number#2#1\endcsname}
-\def\OTRSETsetparameter#1#2{\setvalue{\??mc\OTRSETidentifier\number#2#1}}
-
\def\OTRSETskipstart
- {\scratchcounter\executeifdefined{\??mc\OTRSETidentifier\c!start}\zerocount
+ {\doifelsenothing{\columnsetparameter\c!start}
+ {\scratchcounter\zerocount}%
+ {\scratchcounter\columnsetparameter\c!start}%
\relax % needed !
\ifcase\scratchcounter\else
\advance\scratchcounter\plusone
@@ -569,7 +602,7 @@
\fi}
\unexpanded\def\page_set_command_set_vsize % snap per sectie (gap here?)
- {\ifcollectingcontent \else % can be assigndimen
+ {\ifcollectingsetcontent \else % can be assigndimen
\OTRSETsetcolumnmaxcells % layout can be changed
\OTRSETskipstart % not that well tested
\OTRSETcheckinsert % added
@@ -577,7 +610,7 @@
\ifsomefreecolumncells
\global\vsize\columnfreecells\lineheight
\ifinotr % else problems with floats, see extreme
- \global\pagegoal\vsize % niet nodig, tenzij binnen otr
+ \pagegoal\vsize % niet nodig, tenzij binnen otr
\fi
\synchronizeoutput % fails on example
% \allowbreak % hm
@@ -586,12 +619,12 @@
\fi}
\def\page_set_command_set_hsize % of course this does not migrate outside the otr
- {\localcolumnwidth\OTRSETlocalwidth\mofcolumns
- \textwidth\localcolumnwidth
- \hsize\localcolumnwidth}
+ {\d_page_set_local_hsize\OTRSETlocalwidth\mofcolumns
+ \textwidth\d_page_set_local_hsize
+ \hsize\d_page_set_local_hsize}
\unexpanded\def\page_set_command_synchronize_hsize
- {\ifcase0\getvalue{\??mc\??mc\c!width}\else % some width set
+ {\ifconditional\c_page_set_width_set
\bgroup
\scratchdimen\OTRSETlocalwidth\mofcolumns
\ifdim\scratchdimen=\textwidth
@@ -829,7 +862,7 @@
{\ifnum\columngaplimit>\zerocount
\donefalse
\dostepwiserecurse{#2}\columnmaxcells\plusone
- {\OTRSETdoifcellelse{#1}\recurselevel
+ {\page_set_cell_doifelse{#1}\recurselevel
{\ifdone
\!!countb\recurselevel \advance\!!countb -\!!counta\relax
\ifnum\!!countb>\plusone
@@ -837,7 +870,7 @@
\ifnum\!!countb<\columngaplimit\relax
\!!countb\recurselevel \advance\!!countb \minusone
\dostepwiserecurse\!!counta\!!countb\plusone
- {\OTRSETsetgridcell{#1}\recurselevel\copy\placeholderboxc}%
+ {\page_set_cell_set{#1}\recurselevel\copy\placeholderboxc}%
%\message{[gap]}%
\fi
\fi
@@ -951,7 +984,7 @@
\else
\advance\columnhcells \plusone
\advance\scratchcounter \plusone
- \advance\totalcolumnspace \OTRSETgetparameter\c!distance\scratchcounter
+ \advance\totalcolumnspace \namedcolumnsetparameter{\currentcolumnset:\number\scratchcounter}\c!distance
\fi
\else
\exitloop
@@ -973,9 +1006,9 @@
%\scratchcounter#2\advance\scratchcounter\minusone
%\ifnum\scratchcounter>0
% \dostepwiserecurse{#1}\!!countd\plusone
-% {\ifdim\wd\OTRSETgridcell\recurselevel\scratchcounter>\zeropoint
+% {\ifdim\wd\page_set_cell\recurselevel\scratchcounter>\zeropoint
% \let\columnspacetopoffset\plusone
-% \else\ifdim\dp\OTRSETgridcell\recurselevel\scratchcounter>\zeropoint
+% \else\ifdim\dp\page_set_cell\recurselevel\scratchcounter>\zeropoint
% \let\columnspacetopoffset\plusone
% \fi\fi}%
% \advance\!!counte \columnspacetopoffset \relax
@@ -986,9 +1019,9 @@
%\advance\scratchcounter \columnvcells \relax
%\ifnum\scratchcounter>\columnmaxcells\else
% \dostepwiserecurse{#1}\!!countd\plusone
-% {\ifdim\wd\OTRSETgridcell\recurselevel\scratchcounter>\zeropoint
+% {\ifdim\wd\page_set_cell\recurselevel\scratchcounter>\zeropoint
% \let\columnspacebotoffset\plusone
-% \else\ifdim\dp\OTRSETgridcell\recurselevel\scratchcounter>\zeropoint
+% \else\ifdim\dp\page_set_cell\recurselevel\scratchcounter>\zeropoint
% \let\columnspacebotoffset\plusone
% \fi\fi}%
% \advance\!!counte \columnspacebotoffset \relax
@@ -999,12 +1032,15 @@
\!!countf\recurselevel\relax
\dostepwiserecurse{#2}\!!counte\plusone % rows
{\ifenoughcolumncells
- \OTRSETdoifcellelse\!!countf\recurselevel
+ \page_set_cell_doifelse\!!countf\recurselevel
{\enoughcolumncellsfalse}{}%
\fi}%
\fi}%
\fi}
+\let\preferedcolumn\empty
+\let\preferedrow \empty
+
\def\OTRSETsetpreferedcolumnslot#1#2%
{\doifsomething{#1}{\edef\preferedcolumn{#1}}%
\doifsomething{#2}{\edef\preferedrow {#2}}}
@@ -1078,7 +1114,7 @@
\def\OTRSETstoreincolumnslotFXTB#1% fixed column
{\OTRSETcheckprefered
- \OTRSETdoifcellelse\pofcolumns\plusone
+ \page_set_cell_doifelse\pofcolumns\plusone
{\OTRSETprepareforcolumnslot2}{\OTRSETprepareforcolumnslot1}{#1}% % 1/2 dependent of place, todo
\OTRSETflushtextsofar
\OTRSETstoreincolumnslotindeed
@@ -1087,7 +1123,7 @@
\def\OTRSETstoreincolumnslotFXBT#1% fixed column
{\OTRSETcheckprefered
- \OTRSETdoifcellelse\pofcolumns\columnmaxcells
+ \page_set_cell_doifelse\pofcolumns\columnmaxcells
{\OTRSETprepareforcolumnslot2}{\OTRSETprepareforcolumnslot3}{#1}% % 3/2 dependent of place, todo
\OTRSETflushtextsofar
\OTRSETstoreincolumnslotindeed
@@ -1264,7 +1300,7 @@
\fi}
\unexpanded\def\page_set_command_check_if_float_fits
- {\global\ifnofloatpermitted\roomforfloatfalse\else\roomforfloattrue\fi}
+ {\global\ifconditional\c_page_floats_not_permitted\setfalse\c_page_floats_room\else\settrue\c_page_floats_room\fi}
\def\OTRSETunpreparebox#1%
{\ifhbox#1% spans and so
@@ -1279,25 +1315,25 @@
\def\OTRSETsavebox#1% clean up the skips
{\OTRSETunpreparebox{#1}%
- \dofloatssave\s!text}
+ \page_floats_save\s!text}
\def\OTRSETresavebox#1% clean up the skips
{\OTRSETunpreparebox{#1}%
- \dofloatsresave\s!text}
+ \page_floats_resave\s!text}
\unexpanded\def\page_set_command_flush_float_box
- {\iftestfloatbox\ruledhbox\fi{\box\floatbox}}
+ {\box\floatbox}
\unexpanded\def\page_set_command_flush_floats
{\bgroup
\def\OTRSETsavebox##1{\!!doneafalse}%
\doloop
- {\ifsomefloatwaiting
+ {\ifconditional\c_page_floats_some_waiting
\OTRSETskipstart
- \dofloatsgetinfo\s!text
+ \page_floats_get_info\s!text
\ifdim\floatwidth>\zeropoint
\!!doneatrue
- \dofloatsflush\s!text{1}%
+ \page_floats_flush\s!text\plusone
\dp\floatbox\zeropoint
\OTRSETstoreincolumnslot{TBLR}\floatbox
\if!!donea
@@ -1333,9 +1369,9 @@
\else
% only span if there is a next column with content
\dorecurse\columnmaxcells
- {\ifdim\ht\OTRSETgridcell\currenthcell\currentvcell>\zeropoint
+ {\ifdim\ht\page_set_cell\currenthcell\currentvcell>\zeropoint
\centergridcellonlyfalse
- \else\ifdim\dp\OTRSETgridcell\currenthcell\currentvcell>\zeropoint
+ \else\ifdim\dp\page_set_cell\currenthcell\currentvcell>\zeropoint
\centergridcellonlyfalse
\fi\fi}%
\fi
@@ -1343,10 +1379,10 @@
\currenthcell\recurselevel
\dorecurse\columnmaxcells
{\currentvcell\recurselevel\relax
- \ifdim\ht\OTRSETgridcell\currenthcell\currentvcell>\zeropoint
- \ifdim\dp\OTRSETgridcell\currenthcell\currentvcell=\zeropoint
+ \ifdim\ht\page_set_cell\currenthcell\currentvcell>\zeropoint
+ \ifdim\dp\page_set_cell\currenthcell\currentvcell=\zeropoint
\bgroup
- \setbox\scratchbox\OTRSETgetgridcell\currenthcell\currentvcell
+ \setbox\scratchbox\page_set_cell_get\currenthcell\currentvcell
\getnoflines{\ht\scratchbox}%
\!!counta\currentvcell
\advance\!!counta -\noflines
@@ -1363,9 +1399,9 @@
{\if!!doneb
\let\xrecurselevel\recurselevel
\dostepwiserecurse\!!counta\currentvcell\plusone
- {\ifdim\ht\OTRSETgridcell\xrecurselevel\recurselevel>\zeropoint
+ {\ifdim\ht\page_set_cell\xrecurselevel\recurselevel>\zeropoint
\!!donebfalse
- \else\ifdim\wd\OTRSETgridcell\xrecurselevel\recurselevel>\zeropoint
+ \else\ifdim\wd\page_set_cell\xrecurselevel\recurselevel>\zeropoint
\!!donebfalse
\fi\fi}%
\if!!doneb
@@ -1375,11 +1411,11 @@
\totalcolumnspace\OTRSETlocalwidth\currenthcell
\dostepwiserecurse\!!countc\!!countb\plusone
{\advance\totalcolumnspace \OTRSETlocalwidth\recurselevel
- \advance\totalcolumnspace \OTRSETgetparameter\c!distance\recurselevel}%
+ \advance\totalcolumnspace \namedcolumnsetparameter{\currentcolumnset:\recurselevel}\c!distance}%
\ifdim\totalcolumnspace>\wd\scratchbox
\setbox\scratchbox\hbox to \totalcolumnspace{\hss\box\scratchbox\hss}%
\fi
- \OTRSETsetgridcell\currenthcell\currentvcell\box\scratchbox
+ \page_set_cell_set\currenthcell\currentvcell\box\scratchbox
\egroup
\fi
\fi}}%
@@ -1388,8 +1424,8 @@
\def\OTRSETinitializecolumns% once per page
{\columnspreadtrue % todo
\ifcolumnspread
- \global\rofcolumns\getvalue{\??mc\OTRSETidentifier\c!nright}%
- \global\lofcolumns\getvalue{\??mc\OTRSETidentifier\c!nleft}%
+ \global\rofcolumns\columnsetparameter\c!nright
+ \global\lofcolumns\columnsetparameter\c!nleft
\global\tofcolumns\rofcolumns \relax
\ifodd\realpageno\relax
\global\nofcolumns\rofcolumns
@@ -1398,67 +1434,70 @@
\global\nofcolumns\lofcolumns
\fi
\else
- \global\nofcolumns\getvalue{\??mc\OTRSETidentifier\c!n}%
+ \global\nofcolumns\columnsetparameter\c!n
\global\rofcolumns\nofcolumns
\global\lofcolumns\nofcolumns
\global\tofcolumns\nofcolumns
\fi
\OTRSETassignwidths
\global\mofcolumns\plusone
- \columnerasegridboxes}
+ \page_set_cell_erase_grid}
-% vanaf hier:
-
-\unexpanded\def\definecolumnset
- {\dodoubleargument\dodefinecolumnset}
-
-\def\dodefinecolumnset[#1][#2]%
- {\getparameters[\??mc#1]
- [\c!direction=\v!right,
- \c!balance=\v!no,
- \c!distance=1.5\bodyfontsize, % controleren
- \c!n=2,
- \c!nleft=\getvalue{\??mc#1\c!n},
- \c!nright=\getvalue{\??mc#1\c!n},
- \c!width=\v!fit,
- \c!lines=0,
- \c!start=0,
- #2]%
- \dorecurse{\getvalue{\??mc#1\c!nleft}} % todo
- {\dododefinecolumnset[#1][\recurselevel]}%
- \dorecurse{\getvalue{\??mc#1\c!nright}} % todo
- {\dododefinecolumnset[#1][\recurselevel]}%
- % redo framed settings
- \setupcolumnset[#1][1][\c!distance=\!!zeropoint]}
-
-\def\dododefinecolumnset[#1][#2]%
- {\presetlocalframed
- [\??mc#1#2]%
- \setupcolumnset
- [#1][#2]
- [\c!offset=\v!overlay,
- \c!frame=\v!off,
- \c!align=,
- \c!lines=0,% really needed since c!regels is now part of framed
- \c!width=\getvalue{\??mc#1\c!width},
- \c!distance=\getvalue{\??mc#1\c!distance}]}
+% this is a first step in upgrading
+
+\installcorenamespace{columnset}
+
+\installframedcommandhandler \??columnset {columnset} \??columnset
+
+\setupcolumnset % todo, use the rather basic backgroundframed
+ [\c!direction=\v!right,
+ \c!balance=\v!no,
+ \c!distance=1.5\bodyfontsize,
+ \c!n=2,
+ \c!nleft=\columnsetparameter\c!n,
+ \c!nright=\columnsetparameter\c!n,
+ \c!width=\v!fit,
+ \c!lines=0,
+ \c!start=0,
+ \c!frame=\v!off,
+ \c!offset=\v!overlay,
+ \c!frame=\v!off,
+ \c!align=,
+ \c!lines=0]
+
+\let\page_set_setup_saved\setupcolumnset
+
+\newconditional\c_page_set_defining
+
+\appendtoks
+ \ifconditional\c_page_set_defining \else
+ \settrue\c_page_set_defining
+ \dorecurse{\columnsetparameter\c!nleft}
+ {\normalexpanded{\definecolumnset[\currentcolumnset:\recurselevel][\currentcolumnset]}}%
+ \dorecurse{\columnsetparameter\c!nright}
+ {\normalexpanded{\definecolumnset[\currentcolumnset:\recurselevel][\currentcolumnset]}}%
+ \normalexpanded{\page_set_setup_saved[\currentcolumnset:1][\c!distance=\zeropoint]}%
+ \setfalse\c_page_set_defining
+ \fi
+\to \everydefinecolumnset
\unexpanded\def\setupcolumnset
- {\dotripleargument\dosetupcolumnset}
+ {\dotripleargument\page_set_setup}
-\def\dosetupcolumnset[#1][#2][#3]%
+\def\page_set_setup[#1][#2][#3]%
{\ifthirdargument
- \def\docommand##1%
+ \unexpanded\def\page_set_setup_step##1%
{\doifelse{##1}\v!each
- {\dorecurse{\getvalue{\??mc#1\c!n}}{\docommand\recurselevel}}
- {\getparameters[\??mc#1##1][#3]}}%
- \processcommalist[#2]\docommand
+ {\dorecurse{\namedcolumnsetparameter{#1}\c!n}{\page_set_setup_step\recurselevel}}
+ {\normalexpanded{\page_set_setup_saved[#1:\recurselevel]}[#3]}}%
+ \processcommalist[#2]\page_set_setup_step
\else
- \getparameters[\??mc#1][#2]%
+ \page_set_setup_saved[#1][#2]%
\fi}
-
-\definecolumnset[\s!default][\c!n=2] % fallback
-
+
+\definecolumnset
+ [\s!default]
+
\unexpanded\def\page_set_command_next_page
{\page_otr_fill_and_eject_page
\relax\ifnum\mofcolumns>\plusone
@@ -1470,7 +1509,6 @@
\let\page_set_command_next_page_and_inserts\page_set_command_next_page
-
\def\OTRSETgotocolumn
{\dosingleempty\doOTRSETgotocolumn}
@@ -1500,7 +1538,7 @@
\currenthcell\mofcolumns
\currentvcell#1\advance\currentvcell \minusone
\dorecurse\currentvcell
- {\OTRSETdoifcellelse\mofcolumns\recurselevel\donothing
+ {\page_set_cell_doifelse\mofcolumns\recurselevel\donothing
{\advance\scratchcounter\plusone}}
\getnoflines\pagetotal
\advance\scratchcounter-\noflines
@@ -1547,7 +1585,7 @@
% \currenthcell\mofcolumns
% \currentvcell#2\advance\currentvcell \minusone
% \dorecurse\currentvcell
-% {\OTRSETdoifcellelse\mofcolumns\recurselevel\donothing
+% {\page_set_cell_doifelse\mofcolumns\recurselevel\donothing
% {\advance\scratchcounter\plusone}}
% \getnoflines\pagetotal
% \advance\scratchcounter-\noflines
@@ -1578,7 +1616,7 @@
\def\dostartcolumnset[#1][#2]%
{\increment\columnsetlevel\relax
- \globallet\localcolumnmaxcells\!!zerocount
+ \global\let\localcolumnmaxcells\!!zerocount
\global\setfalse\OTRSETfinish
\ifnum\columnsetlevel=\plusone
\bgroup
@@ -1610,7 +1648,7 @@
% \stoptext
\def\OTRSETflushleftovers % new per 13/4/2006
- {\OTRSETdoifcellelse{1}{1}
+ {\page_set_cell_doifelse\plusone\plusone
{\bgroup
\OTRSETcentergridcells
\OTRSETbalancemethod\plusone
@@ -1643,7 +1681,7 @@
\fi
\fi
\global\setfalse\OTRSETfinish
- \ifsomefloatwaiting
+ \ifconditional\c_page_floats_some_waiting
\page_otr_command_set_vsize
\pagebreak
\page_otr_command_set_vsize
@@ -1725,8 +1763,8 @@
{\doifsomething\OTRSETlist
{\getfromcommacommand[\OTRSETlist][1]%
\global\let\OTRSETidentifier\commalistelement
- \doifundefined{\??mc\OTRSETidentifier\c!n}
- {\globallet\OTRSETidentifier\s!default}%
+ \xdef\currentcolumnset{\commalistelement}%
+ \checkcolumnsetparent
\let\newcommalistelement\empty
\doglobal\replaceincommalist\OTRSETlist1%
\OTRSETrestart}}
@@ -1752,8 +1790,8 @@
\OTRSETsetcorrectnofcells\currentcolumnstartcell
\advance\scratchcounter \minusone
\dorecurse\scratchcounter
- {\OTRSETdoifcellelse\mofcolumns\recurselevel
- \donothing{\OTRSETsetgridcell\mofcolumns\recurselevel\copy\placeholderboxe}}%
+ {\page_set_cell_doifelse\mofcolumns\recurselevel
+ \donothing{\page_set_cell_set\mofcolumns\recurselevel\copy\placeholderboxe}}%
\egroup}}
\unexpanded\def\page_set_command_routine
@@ -1778,13 +1816,13 @@
% new: raw
\OTRSETsetcolumnmaxcells
% direction
- \doifelsevalue{\??mc\OTRSETidentifier\c!direction}\v!right
+ \doifelse{\columnsetparameter\c!direction}\v!right
{\columndirection\zerocount}
{\columndirection\plusone}%
% balancing
\OTRSETbalancemethod\zerocount
\processaction
- [\getvalue{\??mc\OTRSETidentifier\c!balance}]
+ [\columnsetparameter\c!balance]
[ \v!yes=>\OTRSETbalancemethod\plusone,
\v!top=>\OTRSETbalancemethod\plustwo,
\v!bottom=>\OTRSETbalancemethod\plusthree]}
@@ -1803,46 +1841,46 @@
\ifx\lastskipinotr\undefined \newskip\lastskipinotr \fi
\installoutputroutine\OTRSETflushpreposttext
- {\global\setbox\precolumnbox\vbox
+ {\global\setbox\b_page_set_preceding\vbox
{\unvbox\normalpagebox
\global\lastskipinotr\lastskip}%
\ifdim\lastskipinotr>\zeropoint
- \global\setbox\precolumnbox\hbox
- {\lower\strutdepth\box\precolumnbox}%
+ \global\setbox\b_page_set_preceding\hbox
+ {\lower\strutdepth\box\b_page_set_preceding}%
\fi
- \dp\precolumnbox\strutdepth
+ \dp\b_page_set_preceding\strutdepth
\ifcarryoverfootnotes \else
- \global\setbox\postcolumnbox\vbox{\placebottomnotes}%
+ \global\setbox\b_page_set_trailing\vbox{\placebottomnotes}%
\fi}
\let\precolumnlines \!!zerocount
\let\postcolumnlines\!!zerocount
\def\OTRSEThandlepreposttext
- {\ifdim\ht\precolumnbox>\zeropoint % new
- \getnoflines{\ht\precolumnbox}%
+ {\ifdim\ht\b_page_set_preceding>\zeropoint % new
+ \getnoflines{\ht\b_page_set_preceding}%
\edef\precolumnlines{\the\noflines}%
\doOTRSETsetgridcells
{\copy\placeholderboxe}
\plusone\plusone\nofcolumns\noflines
% normal version (single column set)
- % {\box\precolumnbox}%
+ % {\box\b_page_set_preceding}%
% compensated for bodyfont change
{\hbox
{\OTRSETsetcorrectcellht
- \raise\scratchdimen\box\precolumnbox}}%
+ \raise\scratchdimen\box\b_page_set_preceding}}%
\else
\let\precolumnlines\!!zerocount
\fi
- \ifdim\ht\postcolumnbox>\zeropoint % new, otherwise empty bottom line
- \getnoflines{\ht\postcolumnbox}%
+ \ifdim\ht\b_page_set_trailing>\zeropoint % new, otherwise empty bottom line
+ \getnoflines{\ht\b_page_set_trailing}%
\edef\postcolumnlines{\the\noflines}%
\advance\columnfreecells -\noflines
\advance\columnfreecells \plusone
\doOTRSETsetgridcells
{\copy\placeholderboxe}
\plusone\columnfreecells\nofcolumns\noflines
- {\box\postcolumnbox}%
+ {\box\b_page_set_trailing}%
\else
\let\postcolumnlines\!!zerocount
\fi}
@@ -1855,7 +1893,7 @@
\def\OTRSETcheckgrid
{\topskip1\topskip
- \ifforcecolumngrid
+ \ifforcecolumnsetgrid
\widowpenalty\zerocount
\clubpenalty\zerocount
\brokenpenalty\zerocount
@@ -1894,29 +1932,31 @@
% interface to footnotes
+\installcorenamespace{columnsetwidth}
+
\def\OTRSETassignwidths
{%\scratchdimen\makeupwidth
\freezetextwidth \scratchdimen\textwidth
%
\scratchcounter\zerocount
\dorecurse\nofcolumns
- {\doifelsevalue{\??mc\OTRSETidentifier\recurselevel\c!width}\v!fit
- {\advance\scratchcounter \plusone }
- {\advance\scratchdimen -\getvalue{\??mc\OTRSETidentifier\recurselevel\c!width}}%
- \advance\scratchdimen -\getvalue{\??mc\OTRSETidentifier\recurselevel\c!distance}}%
+ {\doifelse{\namedcolumnsetparameter{\currentcolumnset:\recurselevel}\c!width}\v!fit
+ {\advance\scratchcounter \plusone}
+ {\advance\scratchdimen -\namedcolumnsetparameter{\currentcolumnset:\recurselevel}\c!width}%
+ \advance\scratchdimen -\namedcolumnsetparameter{\currentcolumnset:\recurselevel}\c!distance}%
\ifcase\scratchcounter\else
\divide\scratchdimen \scratchcounter
\fi
- \setgvalue{\??mc\??mc\c!width}{0}%
+ \global\setfalse\c_page_set_width_set
\dorecurse\nofcolumns
- {\doifelsevalue{\??mc\OTRSETidentifier\recurselevel\c!width}\v!fit
+ {\doifelse{\namedcolumnsetparameter{\currentcolumnset:\recurselevel}\c!width}\v!fit
{\dimen0=\scratchdimen}
- {\setgvalue{\??mc\??mc\c!width}{1}%
- \dimen0=\getvalue{\??mc\OTRSETidentifier\recurselevel\c!width}}%
- \setxvalue{\??mc\recurselevel\??mc\c!width}{\the\dimen0}}}
+ {\global\settrue\c_page_set_width_set
+ \dimen0=\namedcolumnsetparameter{\currentcolumnset:\recurselevel}\c!width}%
+ \setxvalue{\??columnsetwidth\recurselevel}{\the\dimen0}}}
\def\OTRSETlocalwidth#1%
- {\getvalue{\??mc\number#1\??mc\c!width}}
+ {\getvalue{\??columnsetwidth\number#1}}
\newbox\placeholderboxa
\newbox\placeholderboxb
@@ -1930,7 +1970,7 @@
{\setbox\scratchbox\hbox to \hsize
{\iftracecolumnset
\hskip-.5ex%
- \startcolor[columnset:#2]\vrule\!!width1ex\!!height.5ex\!!depth.5ex\stopcolor
+ \startcolor[columnset:#2]\vrule\s!width\exheight\s!height.5\exheight\s!depth.5\exheight\stopcolor
\fi
\hss}%
\ifcase#1\relax
@@ -1984,14 +2024,14 @@
\def\page_set_place_float_bottom{\def\floatmethod{BOTS}\page_set_place_float_slot} % check
\def\OTRSETflushfloatbox % nog verder doorvoeren en meer info in marge
- {\iftestfloatbox\ruledhbox\fi{\box\floatbox}}
+ {\box\floatbox}
\def\page_set_place_float_slot
{\setbox\floatbox\vbox{\page_otr_command_flush_float_box}%
\dp\floatbox\strutdp
\@EA\uppercasestring\floatmethod\to\floatmethod
\OTRSETstoreincolumnslot\floatmethod\floatbox
- \doinsertfloatinfo}
+ \page_floats_report_total}
% kind of new, looks much like OTRONE, but not entirely
@@ -1999,7 +2039,7 @@
\unexpanded\def\page_set_command_set_top_insertions
{\bgroup
- \ifsomefloatwaiting
+ \ifconditional\c_page_floats_some_waiting
\noffloatinserts\zerocount
\let\totaltopinserted\!!zeropoint
\OTRSETdodosettopinserts
@@ -2020,9 +2060,9 @@
\egroup}
\def\OTRSETdodosettopinserts
- {\ifnum\noffloatinserts<\noftopfloats
- \dogetfloat
- \ifdim\topinserted=\zeropoint\relax
+ {\ifnum\noffloatinserts<\c_page_floats_n_of_top
+ \page_floats_get
+ \ifdim\d_page_floats_inserted_top=\zeropoint\relax
\settrue\c_page_set_top_of_insert
\else
\setfalse\c_page_set_top_of_insert
@@ -2038,25 +2078,28 @@
\fi
\page_otr_command_flush_float_box
\blank[\rootfloatparameter\c!spaceafter]}%
- \global\advance\topinserted \ht\scratchbox\relax
- \ifdim\topinserted>\vsize % was \textheight\relax
+ \global\advance\d_page_floats_inserted_top \ht\scratchbox\relax
+ \ifdim\d_page_floats_inserted_top>\vsize % was \textheight\relax
\OTRSETresavebox\floatbox
- \noffloatinserts\noftopfloats\relax
- \global\advance\topinserted -\ht\scratchbox
+ \noffloatinserts\c_page_floats_n_of_top\relax
+ \global\advance\d_page_floats_inserted_top -\ht\scratchbox
\let\OTRSETdodosettopinserts\relax % to be tested
\else
- \xdef\totaltopinserted{\the\topinserted}%
- \insert\topins{\forgetall\box\scratchbox}% interlineskip ?
- \ifsomefloatwaiting
+ \xdef\totaltopinserted{\the\d_page_floats_inserted_top}%
+ \insert\namedinsertionnumber\s!topfloat\bgroup
+ \forgetall
+ \box\scratchbox
+ \egroup
+ \ifconditional\c_page_floats_some_waiting
\advance\noffloatinserts \plusone
\else
- \noffloatinserts\noftopfloats\relax
+ \noffloatinserts\c_page_floats_n_of_top\relax
\fi
- \dofloatflushedinfo
+ \page_floats_report_flushed
\fi
\else
- \ifsomefloatwaiting
- \showmessage\m!floatblocks6{\the\noftopfloats}%
+ \ifconditional\c_page_floats_some_waiting
+ \showmessage\m!floatblocks6{\the\c_page_floats_n_of_top}%
\fi
\let\OTRSETdodosettopinserts\relax
\fi
@@ -2064,67 +2107,66 @@
\unexpanded\def\page_set_command_set_bottom_insertions
{\bgroup
- \ifsomefloatwaiting
+ \ifconditional\c_page_floats_some_waiting
\noffloatinserts\zerocount
\OTRSETdodosetbotinserts
\fi
\egroup}
\def\OTRSETdodosetbotinserts
- {\ifnum\noffloatinserts<\nofbotfloats\relax
- \dogetfloat
- \global\advance\botinserted \ht\floatbox\relax
- \global\advance\botinserted \dp\floatbox\relax
- \global\advance\botinserted \floattopskip\relax
- \ifdim\botinserted<\pagegoal\relax
- \insert\botins
- {\forgetall
- \blank[\rootfloatparameter\c!spacebefore]%
- \page_otr_command_flush_float_box}%
- \ifsomefloatwaiting
+ {\ifnum\noffloatinserts<\c_page_floats_n_of_bottom\relax
+ \page_floats_get
+ \global\advance\d_page_floats_inserted_bottom\dimexpr\ht\floatbox+\dp\floatbox+\d_strc_floats_top\relax
+ \ifdim\d_page_floats_inserted_bottom<\pagegoal\relax
+ \insert\namedinsertionnumber\s!bottomfloat\bgroup
+ \forgetall
+ \blank[\rootfloatparameter\c!spacebefore]%
+ \page_otr_command_flush_float_box
+ \egroup
+ \ifconditional\c_page_floats_some_waiting
\advance\noffloatinserts \plusone
\else
- \noffloatinserts\nofbotfloats
+ \noffloatinserts\c_page_floats_n_of_bottom
\fi
- \dofloatflushedinfo
+ \page_floats_report_flushed
\else
\OTRSETresavebox\floatbox
- \noffloatinserts\nofbotfloats\relax
+ \noffloatinserts\c_page_floats_n_of_bottom\relax
\fi
- \global\nofloatpermittedtrue % vgl topfloats s!
+ \global\settrue\c_page_floats_not_permitted % vgl topfloats s!
\else
- \ifsomefloatwaiting
- \showmessage\m!floatblocks7{\the\nofbotfloats}%
+ \ifconditional\c_page_floats_some_waiting
+ \showmessage\m!floatblocks7{\the\c_page_floats_n_of_bottom}%
\fi
\let\OTRSETdodosetbotinserts\relax
\fi
\OTRSETdodosetbotinserts}
\unexpanded\def\page_set_command_flush_top_insertions
- {\ifvoid\topins\else
+ {\ifvoid\namedinsertionnumber\s!topfloat\else
\ifvoid\columntopbox\mofcolumns
- \columnsettopbox\mofcolumns\box\topins
+ \columnsettopbox\mofcolumns\box\namedinsertionnumber\s!topfloat
\else
\columnsettopbox\mofcolumns\vbox % temp, must be better
{\forgetall
\offinterlineskip
\box\columntopbox\mofcolumns
- \box\topins}
+ \box\namedinsertionnumber\s!topfloat}
\fi
\fi
- \global\topinserted\zeropoint\relax} % goes away
+ \global\d_page_floats_inserted_top\zeropoint\relax} % goes away
\unexpanded\def\page_set_command_flush_bottom_insertions
- {\ifvoid\botins \else
- \columnsetbotbox\mofcolumns\box\botins
+ {\ifvoid\namedinsertionnumber\s!bottomfloat \else
+ \columnsetbotbox\mofcolumns\box\namedinsertionnumber\s!bottomfloat
% \else
% \columnsetbotbox\mofcolumns\vbox % temp, must be better
% {\forgetall
% \offinterlineskip
-% \box\botins
+% \box\namedinsertionnumber\s!bottomfloat
% \box\columnbotbox\mofcolumns}
\fi
- \global\botinserted\zeropoint\relax} % goes away
+ \global\d_page_floats_inserted_bottom\zeropoint\relax} % goes away
% set ipv text
@@ -2135,6 +2177,8 @@
% links rechts => odd, even, n, named
+\definesystemvariable {mt} % MulTicolumn
+
\unexpanded\def\definecolumntextarea
{\dotripleempty\dodefinecolumntextarea}
@@ -2285,7 +2329,7 @@
[\??mt\currentcolumntestarea]
[\c!location=,% new (*)
\c!width=\!!widtha,\c!height=\!!heighta,\c!lines=]
- {\columntextareaparameter\empty}}%
+ {\columntextareaparameter\empty{}}}% messy
\!!counta\columntextareaparameter\c!x
\!!countb\columntextareaparameter\c!y
\advance\!!countb \columntextareaparameter\c!ny
@@ -2311,13 +2355,14 @@
\c!height=\!!heighta]%
{\copy\scratchbox}%
\fi}%
- \OTRSETsetgridcell\!!counta\!!countb\box0
+ \page_set_cell_set\!!counta\!!countb\box0
\ifcase\!!countc\else
\advance\!!counta \columntextareaparameter\c!nx
\advance\!!counta -\!!countc
\advance\!!widtha -\!!widthb
\setbox0\hbox
- {\hskip-\namedlayoutparameter\v!odd\c!backspace
+% {\hskip-\namedlayoutparameter\v!odd\c!backspace
+ {\hskip-\layoutparameter\c!backspace
\clip
[ %\c!topoffset=\columntextareaparameter\c!clipoffset,%
%\c!bottomoffset=\columntextareaparameter\c!clipoffset,%
@@ -2328,7 +2373,7 @@
\c!height=\!!heighta,%
\c!hoffset=\!!widthb]%
{\copy\scratchbox}}%
- \OTRSETsetgridcell\!!counta\!!countb\box0%
+ \page_set_cell_set\!!counta\!!countb\box0%
\fi}
\unexpanded\def\setupcolumntextareatext
@@ -2366,17 +2411,20 @@
\fi
\advance\!!counta \plusone
\dostepwiserecurse\!!counta\!!countb\plusone
- {\advance\!!widtha\OTRSETgetparameter\c!distance\recurselevel}%
+ {\advance\!!widtha\namedcolumnsetparameter{\currentcolumnset:\recurselevel}\c!distance}%
\!!widthb\!!widtha
\advance\!!widthb -\!!countc\textwidth
\ifodd\realpageno \else % tricky, assumes that we keep there
\ifcase\!!countc\else
% nog niet ok voor enkel/doublesided
- \advance\!!widtha \namedlayoutparameter\v!even\c!backspace
- \advance\!!widtha \namedlayoutparameter\v!odd \c!backspace
- \advance\!!widthb \namedlayoutparameter\v!even\c!backspace
+% \advance\!!widtha \namedlayoutparameter\v!even\c!backspace
+% \advance\!!widtha \namedlayoutparameter\v!odd \c!backspace
+% \advance\!!widthb \namedlayoutparameter\v!even\c!backspace
+ \advance\!!widtha \layoutparameter\c!backspace
+ \advance\!!widtha \layoutparameter\c!backspace
+ \advance\!!widthb \layoutparameter\c!backspace
\dorecurse\!!countc
- {\advance\!!widtha\OTRSETgetparameter\c!distance\recurselevel}%
+ {\advance\!!widtha\namedcolumnsetparameter{\currentcolumnset:\recurselevel}\c!distance}%
\fi
\fi}
@@ -2386,12 +2434,12 @@
{\!!counta#1\!!countb#2\docalculatecolumnsetspan
\edef\columnsetspanhsize{\the\!!widtha}}
-\unexpanded\def\page_set_command_set_float_hsize
+\unexpanded\def\page_set_command_set_float_hsize % this helper has to be moved to strc-flt
{\hsize % maybe checking optional
- \ifdim\tempfloatwidth>\makeupwidth
+ \ifdim\d_strc_float_temp_width>\makeupwidth
\makeupwidth
\else
- \tempfloatwidth
+ \d_strc_float_temp_width
\fi}
\unexpanded\def\definecolumnsetspan
@@ -2538,12 +2586,12 @@
% \stopcolumnset
\unexpanded\def\page_set_command_flush_saved_floats % rather similar to _one_ ut this might change
- {\global\topinserted\zeropoint
- \global\botinserted\zeropoint
- \ifflushingfloats \else
+ {\global\d_page_floats_inserted_top\zeropoint
+ \global\d_page_floats_inserted_bottom\zeropoint
+ \ifconditional\c_page_floats_flushing \else
\page_set_command_set_top_insertions
\page_set_command_set_bottom_insertions
- \ifsomefloatwaiting
+ \ifconditional\c_page_floats_some_waiting
\doif{\rootfloatparameter\c!cache}\v!no\page_set_command_flush_floats % could be _otr_
\fi
\fi}
diff --git a/Master/texmf-dist/tex/context/base/page-sid.mkiv b/Master/texmf-dist/tex/context/base/page-sid.mkiv
index f4d42a41ce9..f7a2357bf34 100644
--- a/Master/texmf-dist/tex/context/base/page-sid.mkiv
+++ b/Master/texmf-dist/tex/context/base/page-sid.mkiv
@@ -68,29 +68,8 @@
\newdimen \d_page_sides_bottomskip
\newdimen \d_page_sides_downshift
-\setnewconstant \c_page_sides_align \zerocount
\setnewconstant \c_page_sides_method \plusone % 0=raw 1=safe (.99) 2=tight (-1pt)
-
-% begin of public variables, this will change
-%
-% we need a setter and resetter .. or maybe a \placesidefloat[settings] to
-% keep variables separated
-
-\let\sidefloatshift \d_page_sides_shift
-\let\sidefloatextrashift\d_page_sides_extrashift
-\let\sidefloatleftshift \d_page_sided_leftshift
-\let\sidefloatrightshift\d_page_sided_rightshift
-\let\sidefloatleftskip \d_page_sides_leftskip
-\let\sidefloatrightskip \d_page_sides_rightskip
-\let\sidefloatmaximum \d_page_sides_maximum
-\let\sidefloattopskip \d_page_sides_topskip
-\let\sidefloatbottomskip\d_page_sides_bottomskip
-\let\sidefloatdownshift \d_page_sides_downshift
-
-\let\sidefloatsidelines \c_page_sides_n_of_lines
-
-\let\sidefloatalign \c_page_sides_align
-\let\sidefloatmethod \c_page_sides_method
+\setnewconstant \c_page_sides_align \zerocount
\def\page_sides_process_float_backspace {\global\c_page_sides_float_type\plusone \page_sides_handle_float}
\def\page_sides_process_float_leftedge {\global\c_page_sides_float_type\plustwo \page_sides_handle_float}
@@ -104,13 +83,12 @@
\let\logsidefloat \relax
-\newif\ifroomforfloat % shared (will change)
\newif\iftracesidefloats % public (might change)
% end of public variables
\def\page_sides_insert_info
- {\doinsertfloatinfo}
+ {\page_floats_report_total}
\def\page_sides_apply_horizontal_shift
{\ifdim\d_page_sides_maximum>\zeropoint
@@ -211,8 +189,8 @@
+\compensatedinnermakeupmargin
\relax
\fi
- \ifdim\d_page_sides_rightskip>\zeropoint \doglobal\advance\d_page_sides_rightskip\rightskip \fi
- \ifdim\d_page_sides_leftskip >\zeropoint \doglobal\advance\d_page_sides_leftskip \leftskip \fi}
+ \ifdim\d_page_sides_rightskip>\zeropoint \global\advance\d_page_sides_rightskip\rightskip \fi
+ \ifdim\d_page_sides_leftskip >\zeropoint \global\advance\d_page_sides_leftskip \leftskip \fi}
\def\page_sides_flush_floats
{\par
@@ -232,10 +210,7 @@
\doloop
{\strut
\iftracesidefloats
- \color[darkgray]%
- {\baselinerulefalse
- \boxrulewidth.5\points
- \ruledhbox{\strut\kern\d_page_sides_width}}%
+ \color[darkgray]{\ruledhbox{\strut\kern\d_page_sides_width}}%
\fi
\par
\ifdim\dimexpr\d_page_sides_vsize-\pagetotal\relax>\zeropoint
@@ -330,7 +305,9 @@
\ifdim\wd\b_page_sides_bottom<\d_page_sides_hsize
\parskip\zeropoint
%\noindent
- \ifinner\else\vadjust{\penalty\minusone}\fi
+ \ifinner\else
+ \vadjust{\penalty\minusone}%
+ \fi
\ifconditional\c_page_sides_l_eq
\global\setfalse\c_page_sides_l_eq
\else
@@ -421,7 +398,7 @@
\else
\kern\d_page_sides_shift
\fi
- \vbox{#1\ifnum\c_page_sides_align=\plusfour \removedepth\fi}%
+ \vbox{#1\ifnum\c_page_sides_align=\plusfour \removedepth \fi}%
\ifnum\c_page_sides_float_type>\plusfour
\kern\d_page_sides_shift
\else
@@ -535,7 +512,7 @@
\def\page_sides_push_float_inline#1%
{\begingroup
- \everypar\emptytoks % needed !
+ \reseteverypar % needed !
\parskip\zeropoint % needed !
\page_sides_set_skips
\page_sides_insert_info
@@ -579,7 +556,7 @@
\else\ifnum\c_page_sides_float_type>\plusfive
\global\d_page_sides_width\zeropoint
\else
- \global\d_page_sides_width\dimexpr\wd\floatbox+\floatsideskip\relax
+ \global\d_page_sides_width\dimexpr\wd\floatbox+\d_strc_floats_margin\relax
\fi\fi
\ifdim\d_page_sides_width<\zeropoint
\global\d_page_sides_width\zeropoint
@@ -601,7 +578,7 @@
\fi
\relax % really needed ! ! ! !
\ifdim\dimen0>\dimen2
- \global\roomforfloatfalse
+ \global\setfalse\c_page_floats_room
\else
\ifdim\dimexpr\pagegoal-\d_page_sides_vsize\relax<\d_page_sides_bottomskip
\global\advance\d_page_sides_vsize \dimen0
@@ -612,15 +589,15 @@
\global\advance\d_page_sides_vsize \d_page_sides_bottomskip
\global\setfalse\c_page_sides_short
\fi
- \global\roomforfloattrue
+ \global\settrue\c_page_floats_room
\fi}
\def\page_sides_prepare_space
{\par
\whitespace
\begingroup
- \everypar\emptytoks
\forgetall
+ \reseteverypar
\verticalstrut
\vskip-\struttotal
\endgroup}
@@ -633,7 +610,7 @@
\page_sides_relocate_float{#1}%
\page_sides_apply_vertical_shift
\page_sides_analyse_space
- \ifroomforfloat \else
+ \ifconditional\c_page_floats_room \else
\page_otr_fill_and_eject_page
\page_sides_analyse_space
\page_sides_inject_dummy_lines
@@ -653,9 +630,6 @@
\let\page_sides_check_floats\page_sides_check_floats_indeed
-\def\iffirstsidefloatparagraph
- {\ifcase\c_page_sides_checks_done\or}
-
\unexpanded\def\page_sides_check_floats_set
{\scratchdimen\dimexpr\d_page_sides_progress+\strutht-\roundingeps\relax
\c_page_sides_n_of_hang\scratchdimen
@@ -704,7 +678,7 @@
\iftracesidefloats
\hskip-\d_page_sides_width % kern
\color[darkgray]%
- {\vrule\!!height.5\points\!!depth.5\points\!!width\d_page_sides_width
+ {\vrule\s!height.5\points\s!depth.5\points\s!width\d_page_sides_width
\llap{\showstruts\strut\kern.25\bodyfontsize}}%
\fi}
@@ -788,7 +762,7 @@
% some will become obsolete
\def\checksidefloat {\page_sides_check_floats}
-%def\flushsidefloats {\page_sides_flush_floats}
+\def\flushsidefloats {\page_sides_flush_floats}
\def\flushsidefloatsafterpar{\page_sides_flush_floats_after_par}
%def\forgetsidefloats {\page_sides_forget_floats}
%def\synchronizesidefloats {\page_sides_synchronize_floats}
diff --git a/Master/texmf-dist/tex/context/base/page-spr.mkiv b/Master/texmf-dist/tex/context/base/page-spr.mkiv
index f4768d4e95a..dc1e013d50d 100644
--- a/Master/texmf-dist/tex/context/base/page-spr.mkiv
+++ b/Master/texmf-dist/tex/context/base/page-spr.mkiv
@@ -17,10 +17,9 @@
\unprotect
-\newbox\spreadbox
-\newif \ifinspread
-
-\setnewconstant\showspreadmode\plusone
+\newbox \b_page_spread_content
+\newconditional\c_page_spread_busy
+\newconditional\c_page_spread_once % when true only one flush (writes etc)
% beware, ugly overload, to be redone
@@ -35,52 +34,81 @@
\noindent % content can be < \hsize
\page_otr_command_package_contents#2#3}}%
\dp#1\zeropoint
- \setbox#1\hbox to \makeupwidth
- {\ifinspread
- \ifvoid\spreadbox
- \global\setbox\spreadbox\box#1%
- \copy\spreadbox\hss % left page
+ \ifconditional\c_page_spread_busy
+ \normalsettextpagecontent_spread{#1}%
+ \else
+ \normalsettextpagecontent_normal{#1}%
+ \fi}
+
+\def\normalsettextpagecontent_normal#1%
+ {\setbox#1\hbox to \makeupwidth
+ {\hss\box#1\hss}} % never change the \hss's
+
+\def\normalsettextpagecontent_spread#1%
+ {\setbox#1\hbox to \makeupwidth
+ {\ifvoid\b_page_spread_content
+ \ifconditional\c_page_spread_once
+ \box#1%
\else
- % prevent duplicate writes in normal run
- \ifarrangingpages \else \ifcase\showspreadmode
- \global\setbox\spreadbox\emptyhbox
- \wd\spreadbox\makeupwidth
- \ht\spreadbox\textheight
- \fi \fi
- \hss\box\spreadbox % right page
+ \global\setbox\b_page_spread_content\box#1%
+ \copy\b_page_spread_content
\fi
+ \hss % left page
\else
- \hss\box#1\hss % never change the \hss's
+ \hss % right page
+ \ifarrangingpages
+ % writes don't matter anyway
+ \else\ifconditional\c_page_spread_once
+ \global\setbox\b_page_spread_content\emptyhbox
+ \wd\b_page_spread_content\makeupwidth
+ \ht\b_page_spread_content\textheight
+ \else
+ % writes can interfere (todo: write stripper, but how about hyperlinks)
+ \fi \fi
+ \box\b_page_spread_content
\fi}}
-\def\doflushspread
- {\ifinspread \ifvoid\spreadbox\else
+\unexpanded\def\page_spread_flush
+ {\ifconditional\c_page_spread_busy \ifvoid\b_page_spread_content\else
% this page will be discarded later
\emptyhbox \page
\fi \fi }
\unexpanded\def\startspread
- {\bgroup
- \ifdoublesided
- \page[\v!left]%
- %\setsystemmode{spread}%
- \inspreadtrue
- \expanded{\setuplayout[\c!textwidth=\the\dimexpr2\textwidth+2\backspace\relax]}%
- \unexpanded\def\startspread{\bgroup\let\stopspread\egroup}%
- \let\stopspread\dostopspread
+ {\ifdoublesided
+ \ifconditional\c_page_spread_busy
+ \doubleexpandafter\page_spread_start_nop
+ \else
+ \doubleexpandafter\page_spread_start_yes
+ \fi
\else
- \let\stopspread\egroup
+ \expandafter\page_spread_start_nop
\fi}
\let\stopspread\relax
-\def\dostopspread
- {\kern\zeropoint\page
- \inspreadfalse
+\unexpanded\def\page_spread_start_yes
+ {\bgroup
+ \page[\v!left]%
+ %\setsystemmode{spread}%
+ \settrue\c_page_spread_busy
+ \expanded{\setuplayout[\c!textwidth=\the\dimexpr2\textwidth+2\backspace\relax]}%
+ \let\stopspread\page_spread_stop_yes}
+
+\unexpanded\def\page_spread_stop_yes
+ {\kern\zeropoint
+ \page
\setuplayout[\c!textwidth=\textwidth]
\page[\v!left]
\egroup}
+\unexpanded\def\page_spread_start_nop
+ {\bgroup
+ \let\stopspread\page_spread_stop_nop}
+
+\unexpanded\def\page_spread_stop_nop
+ {\egroup}
+
\protect \endinput
% texexec --arr --pdf test
diff --git a/Master/texmf-dist/tex/context/base/page-str.lua b/Master/texmf-dist/tex/context/base/page-str.lua
index 48edd4cfe87..f6314657f01 100644
--- a/Master/texmf-dist/tex/context/base/page-str.lua
+++ b/Master/texmf-dist/tex/context/base/page-str.lua
@@ -74,7 +74,7 @@ function streams.collect(head,where)
dana[1] = head
end
if trace_collecting then
- report_streams("appending snippet '%s' to slot %s",name,#dana)
+ report_streams("appending snippet %a to slot %s",name,#dana)
end
return nil, true
else
@@ -91,7 +91,7 @@ function streams.push(thename)
if dana then
dana[#dana+1] = false
if trace_collecting then
- report_streams("pushing snippet '%s'",thename)
+ report_streams("pushing snippet %a",thename)
end
end
end
@@ -105,7 +105,7 @@ function streams.flush(name,copy) -- problem: we need to migrate afterwards
-- nothing to flush
elseif copy then
if trace_flushing then
- report_streams("flushing copies of %s slots of '%s'",dn,name)
+ report_streams("flushing copies of %s slots of %a",dn,name)
end
for i=1,dn do
local di = dana[i]
@@ -118,7 +118,7 @@ function streams.flush(name,copy) -- problem: we need to migrate afterwards
end
else
if trace_flushing then
- report_streams("flushing %s slots of '%s'",dn,name)
+ report_streams("flushing %s slots of %a",dn,name)
end
for i=1,dn do
local di = dana[i]
@@ -137,7 +137,7 @@ function streams.synchronize(list) -- this is an experiment !
list = settings_to_array(list)
local max = 0
if trace_flushing then
- report_streams("synchronizing list: %s",concat(list," "))
+ report_streams("synchronizing list: % t",list)
end
for i=1,#list do
local dana = data[list[i]]
@@ -168,12 +168,12 @@ function streams.synchronize(list) -- this is an experiment !
end
dana[m] = vbox
if trace_flushing then
- report_streams("slot %s of '%s' is packed to height %s and depth %s",m,name,ht,dp)
+ report_streams("slot %s of %a is packed to height %p and depth %p",m,name,ht,dp)
end
end
end
if trace_flushing then
- report_streams("slot %s has max height %s and max depth %s",m,height,depth)
+ report_streams("slot %s has max height %p and max depth %p",m,height,depth)
end
local strutht, strutdp = texdimen.globalbodyfontstrutheight, texdimen.globalbodyfontstrutdepth
local struthtdp = strutht + strutdp
@@ -189,7 +189,7 @@ function streams.synchronize(list) -- this is an experiment !
-- actually we need to add glue and repack
vbox.height, vbox.depth = height, depth
if trace_flushing then
- report_streams("slot %s of '%s' with delta (%s,%s) is compensated",m,i,delta_height,delta_depth)
+ report_streams("slot %s of %a with delta (%p,%p) is compensated",m,i,delta_height,delta_depth)
end
else
-- this is not yet ok as we also need to keep an eye on vertical spacing
@@ -210,7 +210,7 @@ function streams.synchronize(list) -- this is an experiment !
vbox.list = nil
free_node(vbox)
if trace_flushing then
- report_streams("slot %s:%s with delta (%s,%s) is compensated by %s lines",m,i,delta_height,delta_depth,n)
+ report_streams("slot %s:%s with delta (%p,%p) is compensated by %s lines",m,i,delta_height,delta_depth,n)
end
end
end
diff --git a/Master/texmf-dist/tex/context/base/page-str.mkiv b/Master/texmf-dist/tex/context/base/page-str.mkiv
index f14f2f716b1..200a7137774 100644
--- a/Master/texmf-dist/tex/context/base/page-str.mkiv
+++ b/Master/texmf-dist/tex/context/base/page-str.mkiv
@@ -36,25 +36,25 @@
\unprotect
\let \currentoutputstream \empty
-\newif \ifinoutputstream
+\newif \ifinoutputstream % will becoem a conditional or mode
\newtoks \everyenableoutputstream
\appendtoks
\page_otr_command_flush_side_floats
\to \everyenableoutputstream
-\def\initializeoutputstreams
+\unexpanded\def\initializeoutputstreams
{\ctxlua{streams.initialize()}%
\glet\initializeoutputstreams\relax}
-\def\enableoutputstream[#1]% could be \startoutputsubstream
+\unexpanded\def\enableoutputstream[#1]% could be \startoutputsubstream
{\initializeoutputstreams
\the\everyenableoutputstream
\inoutputstreamtrue
\xdef\currentoutputstream{#1}%
\ctxlua{streams.enable("#1")}}
-\def\disableoutputstream
+\unexpanded\def\disableoutputstream
{\inoutputstreamfalse
\global\let\currentoutputstream\s!default
\ctxlua{streams.disable()}}
@@ -71,15 +71,13 @@
{\ctxlua{streams.stop()}%
\endgroup}
-\let\currentoutputsubstream\empty
-
-\def\startoutputsubstream[#1]%
- {\glet\savedcurrentoutputstream\currentoutputstream
+\unexpanded\def\startoutputsubstream[#1]% just push/pop instead
+ {\globalpushmacro\currentoutputstream
\enableoutputstream[#1]}
-\def\stopoutputsubstream
- {\glet\currentoutputstream\savedcurrentoutputstream
- \enableoutputstream[\savedcurrentoutputstream]}
+\unexpanded\def\stopoutputsubstream
+ {\globalpopmacro\currentoutputstream
+ \enableoutputstream[\currentoutputstream]}
\def\flushoutputstream [#1]{\ctxlua{streams.flush("#1")}}
\def\outputstreamcopy [#1]{\vbox{\ctxlua{streams.flush("#1",true)}}}
@@ -89,7 +87,8 @@
\def\synchronizestreams [#1]{\ctxlua{streams.synchronize("#1")}}
\def\dopushoutputstream [#1]{\ctxlua{streams.push("#1")}}
-\def\pushoutputstream {\dosingleempty\dopushoutputstream}
+\unexpanded\def\pushoutputstream
+ {\dosingleempty\dopushoutputstream}
% \unexpanded\def\defineoutputstream[#1]%
% {\doifundefined{otrs:#1}{\expandafter\newbox\csname otrs:#1\endcsname}}
@@ -103,13 +102,15 @@
% \directsetup{stream:\firstoutputstream:bottom}
% \directsetup{stream:\firstoutputstream:reset}
-%D Obsolete in \MKIV:
-
-\unexpanded\def\definemarknote {\dodoubleempty\dodefinemarknote}
-\def\dodefinemarknote[#1][#2]{}
-\def\setmarknote [#1]{\gobbleoneargument}
-\def\flushmarknotes [#1]{}
-\def\erasemarknotes [#1]{}
+% Obsolete in \MKIV:
+%
+% \unexpanded\def\definemarknote
+% {\dodoubleempty\dodefinemarknote}
+%
+% \def\dodefinemarknote[#1][#2]{}
+% \def\setmarknote [#1]{\gobbleoneargument}
+% \def\flushmarknotes [#1]{}
+% \def\erasemarknotes [#1]{}
\protect \endinput
diff --git a/Master/texmf-dist/tex/context/base/page-txt.mkvi b/Master/texmf-dist/tex/context/base/page-txt.mkvi
index c86597338ca..707af25e9bb 100644
--- a/Master/texmf-dist/tex/context/base/page-txt.mkvi
+++ b/Master/texmf-dist/tex/context/base/page-txt.mkvi
@@ -430,6 +430,9 @@
\global\setfalse\resyncaftertextline
\fi}
+\def\getspecificlayouttext#vertical#horizontal#what%
+ {\csname\namedlayoutelementhash{#vertical:#horizontal}#what\endcsname}
+
% \settext[header][text][middle][xxx][yyy]
\def\settextcontent
@@ -474,8 +477,6 @@
%D macros. These are hooked into the general purpose token
%D list registers mentioned before.
-\def\ignoredlinebreak{\unskip\space\ignorespaces}
-
\def\page_layouts_place_text_line_indeed#vertical#height%
{\let\currentlayouttextline#vertical%
\ifdim#height>\zeropoint\relax % prevents pagenumbers when zero height
@@ -554,40 +555,40 @@
{\edef\currentlayoutelement{\currentlayouttextline:\v!edge}%
\page_layouts_place_element_indeed\leftedgewidth
{\hss\layoutelementparameter#parameter}%
- \hskip\leftedgedistance}
+ \kern\leftedgedistance}
\def\page_layouts_left_margin_element#parameter#extrastate%
{\edef\currentlayoutelement{\currentlayouttextline:\v!margin}%
\page_layouts_place_element_indeed\leftmarginwidth
{\hbox to \leftmarginwidth{\hss\layoutelementparameter#parameter}%
\ifnum#extrastate=\page_layouts_extra_at_margin_left
- \hskip-\leftmarginwidth
+ \kern-\leftmarginwidth
\hbox to \leftmarginwidth{\hss\layoutelementparameter\c!margintext}%
\fi}%
- \hskip\leftmargindistance}
+ \kern\leftmargindistance}
\def\page_layouts_text_body_element#left#middle#right#extrastate%
{\edef\currentlayoutelement{\currentlayouttextline:\v!text}%
\page_layouts_place_element_indeed\makeupwidth
{\hbox to \makeupwidth{\ifnum#extrastate=\page_layouts_extra_at_margin_left\page_layouts_place_extra_text_left\fi\layoutelementparameter#left\hss}%
- \hskip-\makeupwidth
+ \kern-\makeupwidth
\hbox to \makeupwidth{\hss\layoutelementparameter#middle\hss}%
- \hskip-\makeupwidth
+ \kern-\makeupwidth
\hbox to \makeupwidth{\hss\layoutelementparameter#right\ifnum#extrastate=\page_layouts_extra_at_margin_right\page_layouts_place_extra_text_right\fi}}}
\def\page_layouts_right_margin_element#parameter#extrastate%
{\edef\currentlayoutelement{\currentlayouttextline:\v!margin}%
- \hskip\rightmargindistance
+ \kern\rightmargindistance
\page_layouts_place_element_indeed\rightmarginwidth
{\hbox to \rightmarginwidth{\layoutelementparameter#parameter\hss}%
\ifnum#extrastate=\page_layouts_extra_at_margin_right
- \hskip-\rightmarginwidth
+ \kern-\rightmarginwidth
\hbox to \rightmarginwidth{\layoutelementparameter\c!margintext\hss}%
\fi}}
\def\page_layouts_right_edge_element#parameter%
{\edef\currentlayoutelement{\currentlayouttextline:\v!edge}%
- \hskip\rightedgedistance
+ \kern\rightedgedistance
\page_layouts_place_element_indeed\rightedgewidth
{\layoutelementparameter#parameter\hss}}
@@ -618,8 +619,8 @@
%D This code will move to \type {page-flt.tex}.
-\appendtoks \placerightmarginblock \hskip-\rightmarginwidth \to \rightmargintextcontent
-\appendtoks \placeleftmarginblock \hskip-\leftmarginwidth \to \leftmargintextcontent
+\appendtoks \placerightmarginblock \kern-\rightmarginwidth \to \rightmargintextcontent
+\appendtoks \placeleftmarginblock \kern-\leftmarginwidth \to \leftmargintextcontent
%D \macros
%D {definetext}
@@ -693,26 +694,26 @@
% associated. This is a rather messy test but better than the MkII
% way where we use states and keep settings.
-\let\m_page_layouts_page_number_location_ \relax
-\let\m_page_layouts_page_number_location__v\relax
-\let\m_page_layouts_page_number_location__h\relax
-\let\m_page_layouts_page_number_location__x\relax
+\let\m_page_layouts_page_number_location \relax
+\let\m_page_layouts_page_number_location_v\relax
+\let\m_page_layouts_page_number_location_h\relax
+\let\m_page_layouts_page_number_location_x\relax
-\def\page_layouts_place_page_number % also elsewhere .. beware, not \unexpanded else
- {\placelocationpagenumber} % test below fails
+\def\page_layouts_place_page_number % also elsewhere .. beware, not \unexpanded else
+ {\strc_pagenumbers_place_location} % test below fails
\def\page_layouts_reset_page_number_location
- {\ifx\m_page_layouts_page_number_location__v\relax\else
- \edef\currentlayoutelement{\m_page_layouts_page_number_location__v:\m_page_layouts_page_number_location__h}%
- \edef\page_layouts_previous_page_number_locator{\detokenizedlayoutelementparameter\m_page_layouts_page_number_location__x}%
+ {\ifx\m_page_layouts_page_number_location_v\relax\else
+ \edef\currentlayoutelement{\m_page_layouts_page_number_location_v:\m_page_layouts_page_number_location_h}%
+ \edef\page_layouts_previous_page_number_locator{\detokenizedlayoutelementparameter\m_page_layouts_page_number_location_x}%
\doif{\meaning\page_layouts_previous_page_number_locator}{\meaning\page_layouts_place_page_number}
- {\resetlayoutelementparameter\m_page_layouts_page_number_location__x}%
+ {\resetlayoutelementparameter\m_page_layouts_page_number_location_x}%
\fi}
\def\page_layouts_set_page_number_location
- {\edef\currentlayoutelement{\m_page_layouts_page_number_location__v:\m_page_layouts_page_number_location__h}%
- \letlayoutelementparameter\m_page_layouts_page_number_location__x\page_layouts_place_page_number
- \ifx\m_page_layouts_page_number_location__x\c!marginedgetext
+ {\edef\currentlayoutelement{\m_page_layouts_page_number_location_v:\m_page_layouts_page_number_location_h}%
+ \letlayoutelementparameter\m_page_layouts_page_number_location_x\page_layouts_place_page_number
+ \ifx\m_page_layouts_page_number_location_x\c!marginedgetext
\let\page_layouts_place_extra_text_left \page_layouts_place_page_number_left
\let\page_layouts_place_extra_text_right\page_layouts_place_page_number_right
\else
@@ -721,38 +722,39 @@
\fi}
\def\page_layouts_identify_page_number_location
- {\let\m_page_layouts_page_number_location__v\v!footer
- \let\m_page_layouts_page_number_location__h\v!text
- \let\m_page_layouts_page_number_location__x\c!middletext
- \processallactionsinset[\@@nmlocation]
- [ \v!header=>\let\m_page_layouts_page_number_location__v\v!header,
- \v!footer=>\let\m_page_layouts_page_number_location__v\v!footer,
- \v!middle=>\let\m_page_layouts_page_number_location__h\v!text
- \let\m_page_layouts_page_number_location__x\c!middletext,
- \v!left=>\let\m_page_layouts_page_number_location__h\v!text
- \let\m_page_layouts_page_number_location__x\c!lefttext,
- \v!right=>\let\m_page_layouts_page_number_location__h\v!text
- \let\m_page_layouts_page_number_location__x\c!righttext,
- \v!inleft=>\let\m_page_layouts_page_number_location__h\v!margin
- \let\m_page_layouts_page_number_location__x\c!lefttext,
- \v!inright=>\let\m_page_layouts_page_number_location__h\v!margin
- \let\m_page_layouts_page_number_location__x\c!righttext,
- \v!inmargin=>\let\m_page_layouts_page_number_location__h\v!margin
- \def\m_page_layouts_page_number_location__x{\ifdoublesided\c!margintext\else\c!righttext\fi},
- \v!margin=>\let\m_page_layouts_page_number_location__h\v!margin
- \def\m_page_layouts_page_number_location__x{\ifdoublesided\c!margintext\else\c!righttext\fi},
- \v!atmargin=>\let\m_page_layouts_page_number_location__h\v!text
- \let\m_page_layouts_page_number_location__x\c!marginedgetext,
- \v!marginedge=>\let\m_page_layouts_page_number_location__h\v!text
- \let\m_page_layouts_page_number_location__x\c!marginedgetext]}
-
-\unexpanded\def\dosetpagenumberlocation
- {\ifx\@@nmlocation\m_page_layouts_page_number_location_
+ {\let\m_page_layouts_page_number_location_v\v!footer
+ \let\m_page_layouts_page_number_location_h\v!text
+ \let\m_page_layouts_page_number_location_x\c!middletext
+ \processallactionsinset[\directpagenumberingparameter\c!location]
+ [ \v!header=>\let\m_page_layouts_page_number_location_v\v!header,
+ \v!footer=>\let\m_page_layouts_page_number_location_v\v!footer,
+ \v!middle=>\let\m_page_layouts_page_number_location_h\v!text
+ \let\m_page_layouts_page_number_location_x\c!middletext,
+ \v!left=>\let\m_page_layouts_page_number_location_h\v!text
+ \let\m_page_layouts_page_number_location_x\c!lefttext,
+ \v!right=>\let\m_page_layouts_page_number_location_h\v!text
+ \let\m_page_layouts_page_number_location_x\c!righttext,
+ \v!inleft=>\let\m_page_layouts_page_number_location_h\v!margin
+ \let\m_page_layouts_page_number_location_x\c!lefttext,
+ \v!inright=>\let\m_page_layouts_page_number_location_h\v!margin
+ \let\m_page_layouts_page_number_location_x\c!righttext,
+ \v!inmargin=>\let\m_page_layouts_page_number_location_h\v!margin
+ \def\m_page_layouts_page_number_location_x{\ifdoublesided\c!margintext\else\c!righttext\fi},
+ \v!margin=>\let\m_page_layouts_page_number_location_h\v!margin
+ \def\m_page_layouts_page_number_location_x{\ifdoublesided\c!margintext\else\c!righttext\fi},
+ \v!atmargin=>\let\m_page_layouts_page_number_location_h\v!text
+ \let\m_page_layouts_page_number_location_x\c!marginedgetext,
+ \v!marginedge=>\let\m_page_layouts_page_number_location_h\v!text
+ \let\m_page_layouts_page_number_location_x\c!marginedgetext]}
+
+\unexpanded\def\strc_pagenumbers_set_location
+ {\edef\p_strc_pagenumbers_location{\directpagenumberingparameter\c!location}%
+ \ifx\p_strc_pagenumbers_location\m_page_layouts_page_number_location
% unchanged
\else
- \let\m_page_layouts_page_number_location_\@@nmlocation
+ \let\m_page_layouts_page_number_location\p_strc_pagenumbers_location
\page_layouts_reset_page_number_location
- \ifx\@@nmlocation\empty
+ \ifx\p_strc_pagenumbers_location\empty
% set otherwise
\else
\page_layouts_identify_page_number_location
@@ -764,9 +766,12 @@
{\begingroup
\setbox\scratchbox\hbox{\ignorespaces\layoutelementparameter\c!marginedgetext\removeunwantedspaces}%
\ifzeropt\wd\scratchbox\else
- \doifelsenothing\@@nmwidth
- {\box\scratchbox\tfskip}
- {\hbox to \@@nmwidth{\box\scratchbox\hss}}%
+ \edef\p_strc_pagenumbers_width{\directpagenumberingparameter\c!width}%
+ \ifx\p_strc_pagenumbers_width\empty
+ \box\scratchbox\tfskip
+ \else
+ \hbox to \p_strc_pagenumbers_width{\box\scratchbox\hss}%
+ \fi
\fi
\endgroup}
@@ -774,13 +779,16 @@
{\begingroup
\setbox\scratchbox\hbox{\ignorespaces\layoutelementparameter\c!marginedgetext\removeunwantedspaces}%
\ifzeropt\wd\scratchbox\else
- \doifelsenothing\@@nmwidth
- {\tfskip\box\scratchbox}
- {\hbox to \@@nmwidth{\hss\box\scratchbox}}%
+ \edef\p_strc_pagenumbers_width{\directpagenumberingparameter\c!width}%
+ \ifx\p_strc_pagenumbers_width\empty
+ \tfskip\box\scratchbox
+ \else
+ \hbox to \p_strc_pagenumbers_width{\hss\box\scratchbox}%
+ \fi
\fi
\endgroup}
-\dosetpagenumberlocation
+\strc_pagenumbers_set_location % initializes
% will go to page-box.mkiv
@@ -839,7 +847,7 @@
\kern\dimexpr\leftmarginwidth+\leftmargindistance\relax
\fi
\endgroup
- \mkprocesspagecontents{#2}%
+ \page_postprocessors_page{#2}%
\settextpagecontent\b_page_layouts_element{#1}{#2}%
\page_backgrounds_add_to_text\b_page_layouts_element
\page_grids_add_to_box\b_page_layouts_element
diff --git a/Master/texmf-dist/tex/context/base/phys-dim.lua b/Master/texmf-dist/tex/context/base/phys-dim.lua
index c47821b1c3e..45a99978dc3 100644
--- a/Master/texmf-dist/tex/context/base/phys-dim.lua
+++ b/Master/texmf-dist/tex/context/base/phys-dim.lua
@@ -82,8 +82,15 @@ local space = P(" ")
local lparent = P("(")
local rparent = P(")")
+local lbrace = P("{")
+local rbrace = P("}")
+
local digits = digit^1
+local powerdigits = plus * C(digits) / context.digitspowerplus
+ + minus * C(digits) / context.digitspowerminus
+ + C(digits) / context.digitspower
+
local ddigitspace = digitspace / "" / context.digitsspace
local ddigit = digits / context.digitsdigit
local dsemicomma = semicolon / "" / context.digitsseparatorspace
@@ -100,11 +107,8 @@ local dnegative = negative / "" / context.digitsnegative
local dhighspace = highspace / "" / context.digitshighspace
local dsomesign = plus / "" / context.digitsplus
+ minus / "" / context.digitsminus
-local dpower = power / "" * (
- plus * C(digits) / context.digitspowerplus
- + minus * C(digits) / context.digitspowerminus
- + C(digits) / context.digitspower
- )
+local dpower = power / "" * ( powerdigits + lbrace * powerdigits * rbrace )
+
local dpadding = padding / "" / context.digitszeropadding -- todo
local dleader = (dpositive + dnegative + dhighspace + dsomesign + dsignspace)^0
@@ -116,27 +120,51 @@ local dnumber = (ddigitspace + ddigit)^1
-- : ; for the moment not used, maybe for invisible fraction . , when no leading number
-local c_p = (ddigitspace^1 * dskipcomma)^0 -- ___,
- * (ddigitspace^0 * ddigit * dintercomma)^0 -- _00, 000,
- * ddigitspace^0 * ddigit^0 -- _00 000
+-- local c_p = (ddigitspace^1 * dskipcomma)^0 -- ___,
+-- * (ddigitspace^0 * ddigit * dintercomma)^0 -- _00, 000,
+-- * ddigitspace^0 * ddigit^0 -- _00 000
+-- * (
+-- dfinalperiod * ddigit -- .00
+-- + dskipperiod * dpadding^1 -- .==
+-- + dsemiperiod * ddigit -- :00
+-- + dsemiperiod * dpadding^1 -- :==
+-- )^0
+-- + ddigit -- 00
+--
+-- local p_c = (ddigitspace^1 * dskipperiod)^0 -- ___.
+-- * (ddigitspace^0 * ddigit * dinterperiod)^0 -- _00. 000.
+-- * ddigitspace^0 * ddigit^0 -- _00 000
+-- * (
+-- dfinalcomma * ddigit -- ,00
+-- + dskipcomma * dpadding^1 -- ,==
+-- + dsemicomma * ddigit -- :00
+-- + dsemicomma * dpadding^1 -- :==
+-- )^0
+-- + ddigit -- 00
+--
+-- fix by WS/SB (needs further testing)
+
+local c_p = (ddigitspace^1 * dskipcomma)^0 -- ___,
+ * (ddigitspace^0 * ddigit * dintercomma)^0 -- _00, 000,
+ * ddigitspace^0 * ddigit^0 -- _00 000
* (
- dfinalperiod * ddigit -- .00
- + dskipperiod * dpadding^1 -- .==
- + dsemiperiod * ddigit -- :00
- + dsemiperiod * dpadding^1 -- :==
+ dfinalperiod * ddigit * (dintercomma * ddigit)^0 -- .00
+ + dskipperiod * dpadding^1 -- .==
+ + dsemiperiod * ddigit * (dintercomma * ddigit)^0 -- :00
+ + dsemiperiod * dpadding^1 -- :==
)^0
- + ddigit -- 00
+ + ddigit -- 00
-local p_c = (ddigitspace^1 * dskipperiod)^0 -- ___.
- * (ddigitspace^0 * ddigit * dinterperiod)^0 -- _00. 000.
- * ddigitspace^0 * ddigit^0 -- _00 000
+local p_c = (ddigitspace^1 * dskipperiod)^0 -- ___.
+ * (ddigitspace^0 * ddigit * dinterperiod)^0 -- _00. 000.
+ * ddigitspace^0 * ddigit^0 -- _00 000
* (
- dfinalcomma * ddigit -- ,00
- + dskipcomma * dpadding^1 -- ,==
- + dsemicomma * ddigit -- :00
- + dsemicomma * dpadding^1 -- :==
+ dfinalcomma * ddigit * (dinterperiod * ddigit)^0 -- 00
+ + dskipcomma * dpadding^1 -- ,==
+ + dsemicomma * ddigit * (dinterperiod * ddigit)^0 -- :00
+ + dsemicomma * dpadding^1 -- :==
)^0
- + ddigit -- 00
+ + ddigit -- 00
local p_c_dparser = math_one + math_two + dleader * p_c * dtrailer * dfinal
local c_p_dparser = math_one + math_two + dleader * c_p * dtrailer * dfinal
@@ -409,6 +437,8 @@ local short_units = { -- I'm not sure about casing
-- a = "ampere",
A = "ampere",
+ min = "minute",
+
[utfchar(0x2103)] = "celsius",
[utfchar(0x2109)] = "fahrenheit",
}
@@ -482,7 +512,7 @@ local unitsNspace = context.unitsNspace
local labels = languages.data.labels
-labels.prefixes = {
+labels.prefixes = allocate {
yocto = { labels = { en = [[y]] } }, -- 10^{-24}
zepto = { labels = { en = [[z]] } }, -- 10^{-21}
atto = { labels = { en = [[a]] } }, -- 10^{-18}
@@ -515,7 +545,7 @@ labels.prefixes = {
root = { labels = { en = [[√]] } }, -- 0x221A
}
-labels.units = {
+labels.units = allocate {
meter = { labels = { en = [[m]] } },
gram = { labels = { en = [[g]] } }, -- strictly kg is the base unit
second = { labels = { en = [[s]] } },
@@ -597,14 +627,14 @@ labels.units = {
micron = { labels = { en = [[\textmu m]] } },
}
-labels.operators = {
+labels.operators = allocate {
times = { labels = { en = [[\unitsTIMES]] } },
solidus = { labels = { en = [[\unitsSOLIDUS]] } },
per = { labels = { en = [[\unitsSOLIDUS]] } },
outof = { labels = { en = [[\unitsOUTOF]] } },
}
-labels.suffixes = {
+labels.suffixes = allocate {
linear = { labels = { en = [[1]] } },
square = { labels = { en = [[2]] } },
cubic = { labels = { en = [[3]] } },
@@ -616,7 +646,7 @@ labels.suffixes = {
local function dimpus(p,u,s)
if trace_units then
- report_units("p: [%s], u: [%s], s: [%s]",p or "?",u or "?",s or "?")
+ report_units("prefix %a, unit %a, suffix %a",p,u,s)
end --
if p ~= "" then
if u ~= "" then
@@ -653,7 +683,7 @@ end
local function dimop(o)
if trace_units then
- report_units("o: [%s]",o or "?")
+ report_units("operator %a",o)
end
if o then
unitsO(o)
@@ -662,7 +692,7 @@ end
local function dimsym(s)
if trace_units then
- report_units("s: [%s]",s or "?")
+ report_units("symbol %a",s)
end
s = symbol_units[s] or s
if s then
@@ -672,7 +702,7 @@ end
local function dimpre(p)
if trace_units then
- report_units("p: [%s]",p or "?")
+ report_units("prefix [%a",p)
end
p = packaged_units[p] or p
if p then
@@ -787,6 +817,8 @@ local function update_parsers() -- todo: don't remap utf sequences
local stop = Cc(nil) / unitsNstop
local space = Cc(nil) / unitsNspace
+ -- todo: avoid \unitsNstart\unitsNstop (weird that it can happen .. now catched at tex end)
+
local p_c_combinedparser = P { "start",
number = start * dleader * (p_c_dparser + number) * stop,
rule = V("number")^-1 * unitparser,
@@ -849,7 +881,7 @@ local t_shortcuts = {
suffixes = setmetatablenewindex(short_suffixes,trigger),
}
-physics.units.tables = {
+physics.units.tables = allocate {
units = t_units,
shortcuts = t_shortcuts,
}
diff --git a/Master/texmf-dist/tex/context/base/phys-dim.mkiv b/Master/texmf-dist/tex/context/base/phys-dim.mkiv
index 84c3c7a2a05..4efaa65a8a9 100644
--- a/Master/texmf-dist/tex/context/base/phys-dim.mkiv
+++ b/Master/texmf-dist/tex/context/base/phys-dim.mkiv
@@ -216,10 +216,15 @@
\unexpanded\def\digitsperiodsymbol {\csname\??digitsperiod\number\c_phys_digits_method\endcsname}
\unexpanded\def\digitsseparatorspace {\csname\??digitsspace \number\c_phys_digits_method\endcsname}
-\unexpanded\def\digitsfinalcomma {\digitscommasymbol } % more for tracing
+% \unexpanded\def\digitsfinalcomma {\digitscommasymbol } % more for tracing
+% \unexpanded\def\digitsfinalperiod {\digitsperiodsymbol} % more for tracing
+% \unexpanded\def\digitsintermediatecomma {\digitscommasymbol } % more for tracing
+% \unexpanded\def\digitsintermediateperiod {\digitsperiodsymbol} % more for tracing
+
+\unexpanded\def\digitsfinalcomma {\digitsperiodsymbol} % more for tracing
\unexpanded\def\digitsfinalperiod {\digitsperiodsymbol} % more for tracing
\unexpanded\def\digitsintermediatecomma {\digitscommasymbol } % more for tracing
-\unexpanded\def\digitsintermediateperiod {\digitsperiodsymbol} % more for tracing
+\unexpanded\def\digitsintermediateperiod {\digitscommasymbol } % more for tracing
%D The user macro:
@@ -403,6 +408,10 @@
\newtoks \everyunits % we keep the old \units command so we need a longer one
\appendtoks
+ \disablemathpunctuation
+\to \everyunits
+
+\appendtoks
\setuevalue\currentunit{\phys_units_direct{\currentunit}}
\to \everydefineunit
@@ -539,6 +548,16 @@
\fi
\c_phys_units_state\plusfive}
+% This is a hack: for some reason \unit{micro meter} like patterns give
+% \unitsNstart\unitsNstop so there is a buglet in the parser
+
+\let\unitsNstartindeed\unitsNstart
+
+\unexpanded\def\unitsNstart
+ {\doifnextcharelse\unitsNstop\gobbleoneargument\unitsNstartindeed}
+
+% End of hack.
+
\unexpanded\def\unitsNspace
{\space}
@@ -639,12 +658,10 @@
\definelabelclass [prefix] [2]
\definelabelclass [suffix] [2] % This is only a label because we want to show them in a table.
-\ctxlua{
- languages.labels.define("setupprefixtext","prefixes")%
- languages.labels.define("setupunittext","units")%
- languages.labels.define("setupoperatortext","operators")%
- languages.labels.define("setupsuffixtext","suffixes")%
-}
+\ctxcommand{definelabels("prefix", "prefixes" )}
+\ctxcommand{definelabels("unit", "units" )}
+\ctxcommand{definelabels("operator","operators")}
+\ctxcommand{definelabels("suffix", "suffixes" )}
%D You can define additional units:
%D
diff --git a/Master/texmf-dist/tex/context/base/ppchtex.mkii b/Master/texmf-dist/tex/context/base/ppchtex.mkii
index d1209cd97e5..07ca9789ac0 100644
--- a/Master/texmf-dist/tex/context/base/ppchtex.mkii
+++ b/Master/texmf-dist/tex/context/base/ppchtex.mkii
@@ -2130,7 +2130,8 @@
\def\RB{\chemicalrightbottom}%
\def\SL{\chemicalsmashedleft}%
\def\SM{\chemicalsmashedmiddle}%
- \def\SR{\chemicalsmashedright}}
+ \def\SR{\chemicalsmashedright}%
+}
% \reversechemical#1#2#3
%
diff --git a/Master/texmf-dist/tex/context/base/ppchtex.mkiv b/Master/texmf-dist/tex/context/base/ppchtex.mkiv
index a14578cebdb..0f42f91ce9b 100644
--- a/Master/texmf-dist/tex/context/base/ppchtex.mkiv
+++ b/Master/texmf-dist/tex/context/base/ppchtex.mkiv
@@ -474,9 +474,9 @@
\or
\resetchemicalcoordinates
\setbox2\hbox{\ignoreMPboxdepth\getMPdrawing}%
- \wd2=\!!zeropoint
- \ht2=\!!zeropoint
- \dp2=\!!zeropoint
+ \wd2\zeropoint
+ \ht2\zeropoint
+ \dp2\zeropoint
\put {\box2} at 0 0
\endpicture
\popMPdrawing
@@ -515,9 +515,9 @@
\ifMPdrawingdone
\resetchemicalcoordinates
\setbox2\hbox{\ignoreMPboxdepth\getMPdrawing}%
- \wd2=\!!zeropoint
- \ht2=\!!zeropoint
- \dp2=\!!zeropoint
+ \wd2\zeropoint
+ \ht2\zeropoint
+ \dp2\zeropoint
\put {\box2} at 0 0 %
\fi
\endpicture
@@ -576,7 +576,7 @@
\!!widtha=50.8mm
\divide\!!widtha by \@@chemicalresolution\relax
\plotsymbolspacing=\!!widtha
- \setplotsymbol({\vrule\!!height\!!widtha\!!width\!!widtha})%
+ \setplotsymbol({\vrule\s!height\!!widtha\s!width\!!widtha})%
\fi}
% Something for Dirk:
@@ -2149,8 +2149,8 @@
\ifdim\wd4>\dimen2 \dimen0=\wd4 \fi
\chemicaloutermolecule
{#1}
- {\ifdim\ht2>\!!zeropoint\box2\fi} % expands to \empty in test
- {\ifdim\ht4>\!!zeropoint\box4\fi}% % expands to \empty in test
+ {\ifdim\ht2>\zeropoint\box2\fi} % expands to \empty in test
+ {\ifdim\ht4>\zeropoint\box4\fi}% % expands to \empty in test
\egroup}
\def\chemicalsingleouterarrow
diff --git a/Master/texmf-dist/tex/context/base/prop-ini.mkiv b/Master/texmf-dist/tex/context/base/prop-ini.mkiv
index d8d2ea2f4ad..3b1c59b245c 100644
--- a/Master/texmf-dist/tex/context/base/prop-ini.mkiv
+++ b/Master/texmf-dist/tex/context/base/prop-ini.mkiv
@@ -37,7 +37,7 @@
\def\properties_define[#1][#2][#3]%
{\properties_obsolete_message
- \setevalue{\??propertytypes#1e}{#2}%
+ \setevalue{\??propertytypes#1}{#2}%
\getvalue{\??propertydefiners#2}[#1][#2][#3]}
\setvalue{\??propertystarters\v!layer }#1{\let\properties_stop\stopviewerlayer\startviewerlayer[#1]}
diff --git a/Master/texmf-dist/tex/context/base/regi-ini.lua b/Master/texmf-dist/tex/context/base/regi-ini.lua
index ec6f812ccb1..d5d278b1649 100644
--- a/Master/texmf-dist/tex/context/base/regi-ini.lua
+++ b/Master/texmf-dist/tex/context/base/regi-ini.lua
@@ -12,11 +12,15 @@ if not modules then modules = { } end modules ['regi-ini'] = {
runtime.</p>
--ldx]]--
-local utfchar, utfgsub = utf.char, utf.gsub
-local char, gsub, format = string.char, string.gsub, string.format
-local next = next
-local insert, remove = table.insert, table.remove
+local commands, context = commands, context
+local utfchar = utf.char
+local P, Cs, lpegmatch = lpeg.P, lpeg.Cs, lpeg.match
+local char, gsub, format, gmatch, byte, match = string.char, string.gsub, string.format, string.gmatch, string.byte, string.match
+local next = next
+local insert, remove, fastcopy = table.insert, table.remove, table.fastcopy
+local concat = table.concat
+local totable = string.totable
local allocate = utilities.storage.allocate
local sequencers = utilities.sequencers
@@ -107,10 +111,10 @@ local function loadregime(mapping,regime)
for eightbit, unicode in next, data do
vector[char(eightbit)] = utfchar(unicode)
end
- report_loading("vector '%s' is loaded",regime)
+ report_loading("vector %a is loaded",regime)
else
vector = false
- report_loading("vector '%s' is unknown",regime)
+ report_loading("vector %a is unknown",regime)
end
mapping[regime] = vector
return vector
@@ -125,8 +129,8 @@ local function loadreverse(t,k)
return t
end
-setmetatableindex(mapping, loadregime)
-setmetatableindex(backmapping, loadreverse)
+setmetatableindex(mapping, loadregime)
+setmetatableindex(backmapping,loadreverse)
local function translate(line,regime)
if line and #line > 0 then
@@ -138,17 +142,51 @@ local function translate(line,regime)
return line
end
+-- local remappers = { }
+--
+-- local function toregime(vector,str,default) -- toregime('8859-1',"abcde Ä","?")
+-- local t = backmapping[vector]
+-- local remapper = remappers[vector]
+-- if not remapper then
+-- remapper = utf.remapper(t)
+-- remappers[t] = remapper
+-- end
+-- local m = getmetatable(t)
+-- setmetatableindex(t, function(t,k)
+-- local v = default or "?"
+-- t[k] = v
+-- return v
+-- end)
+-- str = remapper(str)
+-- setmetatable(t,m)
+-- return str
+-- end
+--
+-- -- much faster (but only matters when we have > 10K calls
+
+local cache = { } -- if really needed we can copy vectors and hash defaults
+
+setmetatableindex(cache, function(t,k)
+ local v = { remappers = { } }
+ t[k] = v
+ return v
+end)
+
local function toregime(vector,str,default) -- toregime('8859-1',"abcde Ä","?")
- local t = backmapping[vector]
- local m = getmetatable(t)
- setmetatableindex(t, function(t,k)
- local v = default or "?"
- t[k] = v
- return v
- end)
- str = utfgsub(str,".",t)
- setmetatable(t,m)
- return str
+ local d = default or "?"
+ local c = cache[vector].remappers
+ local r = c[d]
+ if not r then
+ local t = fastcopy(backmapping[vector])
+ setmetatableindex(t, function(t,k)
+ local v = d
+ t[k] = v
+ return v
+ end)
+ r = utf.remapper(t)
+ c[d] = r
+ end
+ return r(str)
end
local function disable()
@@ -186,22 +224,25 @@ function regimes.process(str,filename,currentline,noflines,coding)
return str
end
-function regimes.push()
+local function push()
level = level + 1
if trace_translating then
- report_translating("pushing level: %s",level)
+ report_translating("pushing level %s",level)
end
end
-function regimes.pop()
+local function pop()
if level > 0 then
if trace_translating then
- report_translating("popping level: %s",level)
+ report_translating("popping level %s",level)
end
level = level - 1
end
end
+regimes.push = push
+regimes.pop = pop
+
sequencers.prependaction(textlineactions,"system","regimes.process")
sequencers.disableaction(textlineactions,"regimes.process")
@@ -210,6 +251,9 @@ sequencers.disableaction(textlineactions,"regimes.process")
commands.enableregime = enable
commands.disableregime = disable
+commands.pushregime = push
+commands.popregime = pop
+
function commands.currentregime()
context(currentregime)
end
@@ -219,7 +263,7 @@ local stack = { }
function commands.startregime(regime)
insert(stack,currentregime)
if trace_translating then
- report_translating("start: '%s'",regime)
+ report_translating("start using %a",regime)
end
enable(regime)
end
@@ -228,12 +272,101 @@ function commands.stopregime()
if #stack > 0 then
local regime = remove(stack)
if trace_translating then
- report_translating("stop: '%s'",regime)
+ report_translating("stop using %a",regime)
end
enable(regime)
end
end
+-- Next we provide some hacks. Unfortunately we run into crappy encoded
+-- (read : mixed) encoded xml files that have these ë ä ö ü sequences
+-- instead of ë ä ö ü
+
+local patterns = { }
+
+-- function regimes.cleanup(regime,str)
+-- local p = patterns[regime]
+-- if p == nil then
+-- regime = regime and synonyms[regime] or regime or currentregime
+-- local vector = regime ~= "utf" and mapping[regime]
+-- if vector then
+-- local list = { }
+-- for k, uchar in next, vector do
+-- local stream = totable(uchar)
+-- for i=1,#stream do
+-- stream[i] = vector[stream[i]]
+-- end
+-- list[concat(stream)] = uchar
+-- end
+-- p = lpeg.append(list,nil,true)
+-- p = Cs((p+1)^0)
+-- -- lpeg.print(p) -- size 1604
+-- else
+-- p = false
+-- end
+-- patterns[vector] = p
+-- end
+-- return p and lpegmatch(p,str) or str
+-- end
+--
+-- twice as fast and much less lpeg bytecode
+
+function regimes.cleanup(regime,str)
+ local p = patterns[regime]
+ if p == nil then
+ regime = regime and synonyms[regime] or regime or currentregime
+ local vector = regime ~= "utf" and mapping[regime]
+ if vector then
+ local utfchars = { }
+ local firsts = { }
+ for k, uchar in next, vector do
+ local stream = { }
+ local split = totable(uchar)
+ local nofsplits = #split
+ if nofsplits > 1 then
+ local first
+ for i=1,nofsplits do
+ local u = vector[split[i]]
+ if not first then
+ first = firsts[u]
+ if not first then
+ first = { }
+ firsts[u] = first
+ end
+ end
+ stream[i] = u
+ end
+ local nofstream = #stream
+ if nofstream > 1 then
+ first[#first+1] = concat(stream,2,nofstream)
+ utfchars[concat(stream)] = uchar
+ end
+ end
+ end
+ p = P(false)
+ for k, v in next, firsts do
+ local q = P(false)
+ for i=1,#v do
+ q = q + P(v[i])
+ end
+ p = p + P(k) * q
+ end
+ p = Cs(((p+1)/utfchars)^1)
+ -- lpeg.print(p) -- size: 1042
+ else
+ p = false
+ end
+ patterns[regime] = p
+ end
+ return p and lpegmatch(p,str) or str
+end
+
+-- local map = require("regi-cp1252")
+-- local old = [[test ë ä ö ü crap]]
+-- local new = correctencoding(map,old)
+--
+-- print(old,new)
+
-- obsolete:
--
-- function regimes.setsynonym(synonym,target)
diff --git a/Master/texmf-dist/tex/context/base/s-abr-01.tex b/Master/texmf-dist/tex/context/base/s-abr-01.tex
index d6685bd71ad..026f2ea09d8 100644
--- a/Master/texmf-dist/tex/context/base/s-abr-01.tex
+++ b/Master/texmf-dist/tex/context/base/s-abr-01.tex
@@ -19,279 +19,298 @@
\protect
-\logo [MKI] {MkI} % joke
-\logo [MKII] {MkII}
-\logo [MKIII] {MkIII} % joke
-\logo [MKIV] {MkIV}
-\logo [MKVI] {MkVI}
-\logo [MPII] {MpII}
-\logo [MPIV] {MpIV}
+\logo [MKI] {MkI} % joke
+\logo [MKII] {MkII}
+\logo [MKIII] {MkIII} % joke
+\logo [MKIV] {MkIV}
+\logo [MKVI] {MkVI}
+\logo [MKIX] {MkIX}
+\logo [MKXI] {MkXI}
+\logo [MKIC] {MkIC}
+\logo [MKCI] {MkCI}
+\logo [MPII] {MpII}
+\logo [MPIV] {MpIV}
-%logo [FGA] {fga}
-%logo [FGBBS] {fgbbs}
-\logo [ACROBAT] {Acro\-bat}
-\logo [AFM] {afm}
-\logo [API] {api}
-\logo [ALEPH] {Aleph} % {\mathematics{\aleph}}
-\logo [ALGOL] {ALGOL}
-\logo [AMS] {ams}
-\logo [AMSLATEX] {\AmSLaTeX}
-\logo [AMSTEX] {\AmSTeX}
-\logo [ANSI] {ansi}
-\logo [ARABTEX] {Arab\TeX}
-\logo [ASCII] {ascii}
-\logo [ASCIITEX] {ascii\TeX}
-\logo [BACHOTEX] {Bacho\TeX}
-\logo [BIBTEX] {bib\TeX}
-\logo [BLUESKY] {BlueSky}
-\logo [BMP] {bmp}
-\logo [BSD] {bsd}
-\logo [CCODE] {C}
-\logo [CPLUSPLUS] {C\high{++}}
-\logo [CALCMATH] {CalcMath}
-\logo [CD] {cd}
-\logo [CPU] {cpu}
-\logo [CDROM] {cdrom}
-\logo [CID] {cid}
-\logo [CJK] {cjk}
-\logo [CMR] {cmr}
-\logo [CLD] {cld}
-\logo [CMYK] {cmyk}
-\logo [CODHOST] {CodHost}
-\logo [CONTEXT] {\ConTeXt}
-\logo [CSS] {css}
-\logo [CTAN] {ctan}
-\logo [CTXTOOLS] {ctxtools}
-\logo [CWEB] {cweb}
-\logo [DANTE] {Dante}
-\logo [DISTILLER] {distiller}
-\logo [DRATEX] {Dra\TeX}
-\logo [DSC] {dsc}
-\logo [DTD] {dtd}
-\logo [DTK] {dtk}
-\logo [DTP] {dtp}
-\logo [DVD] {dvd}
-\logo [DVI] {dvi}
-\logo [DVIPDFM] {dvipdfm}
-\logo [DVIPDFMX] {dvipdfmx}
-\logo [DVIPOS] {dvipos}
-\logo [DVIPS] {dvips}
-\logo [DVIPSONE] {dvipsone}
-\logo [DVISCR] {dviscr}
-\logo [DVIWINDO] {dviwindo}
-\logo [EBCDIC] {ebcdic}
-\logo [EC] {ec}
-\logo [EIFFEL] {Eiffel}
-\logo [EMACS] {emacs}
-\logo [EMTEX] {em\TeX}
-\logo [ENCODING] {enc}
-\logo [ENCTEX] {enc\TeX}
-\logo [EPS] {eps}
-\logo [ETEX] {\eTeX}
+%logo [FGA] {fga}
+%logo [FGBBS] {fgbbs}
+\logo [ACROBAT] {Acro\-bat}
+\logo [AFM] {afm}
+\logo [API] {api}
+\logo [ALEPH] {Aleph} % {\mathematics{\aleph}}
+\logo [ALGOL] {ALGOL}
+\logo [AMS] {ams}
+\logo [AMSLATEX] {\AmSLaTeX}
+\logo [AMSTEX] {\AmSTeX}
+\logo [ANSI] {ansi}
+\logo [ARABTEX] {Arab\TeX}
+\logo [ASCII] {ascii}
+\logo [ASCIITEX] {ascii\TeX}
+\logo [BACHOTEX] {Bacho\TeX}
+\logo [BIBTEX] {bib\TeX}
+\logo [BLUESKY] {BlueSky}
+\logo [BMP] {bmp}
+\logo [BSD] {bsd}
+\logo [CCODE] {C}
+\logo [CPLUSPLUS] {C\high{++}}
+\logo [CALCMATH] {CalcMath}
+\logo [CLD] {cld}
+\logo [CD] {cd}
+\logo [CPU] {cpu}
+\logo [CDROM] {cdrom}
+\logo [CID] {cid}
+\logo [CJK] {cjk}
+\logo [CMR] {cmr}
+\logo [CLD] {cld}
+\logo [CMYK] {cmyk}
+\logo [CODHOST] {CodHost}
+\logo [CONTEXT] {\ConTeXt}
+\logo [CSS] {css}
+\logo [CTAN] {ctan}
+\logo [CTXTOOLS] {ctxtools}
+\logo [CWEB] {cweb}
+\logo [CSTUG] {cstug}
+\logo [DANTE] {Dante}
+\logo [DISTILLER] {distiller}
+\logo [DRATEX] {Dra\TeX}
+\logo [DSC] {dsc}
+\logo [DTD] {dtd}
+\logo [DTK] {dtk}
+\logo [DTP] {dtp}
+\logo [DVD] {dvd}
+\logo [DVI] {dvi}
+\logo [DVIPDFM] {dvipdfm}
+\logo [DVIPDFMX] {dvipdfmx}
+\logo [DVIPOS] {dvipos}
+\logo [DVIPS] {dvips}
+\logo [DVIPSONE] {dvipsone}
+\logo [DVISCR] {dviscr}
+\logo [DVIWINDO] {dviwindo}
+\logo [EBCDIC] {ebcdic}
+\logo [EC] {ec}
+\logo [EIFFEL] {Eiffel}
+\logo [EMACS] {emacs}
+\logo [EMTEX] {em\TeX}
+\logo [ENCODING] {enc}
+\logo [ENCTEX] {enc\TeX}
+\logo [EPS] {eps}
+\logo [ETEX] {\eTeX}
\logo [EUROBACHOTEX] {EuroBacho\TeX}
-\logo [EUROMATH] {EuroMath}
-\logo [EUROTEX] {Euro\TeX}
-\logo [EXAMPLE] {eXaMpLe}
-\logo [EXAMPLED] {exampled}
-\logo [EXAMPLEQ] {exampleq}
-\logo [EXAMPLER] {exampler}
-\logo [EXAMPLET] {examplet}
-\logo [EXAMPLEX] {examplex}
-\logo [EXIMPLE] {eXiMpLe}
-\logo [FLAC] {flac}
-\logo [FAQ] {faq}
-\logo [FDF] {fdf}
-\logo [FONTFORGE] {FontForge}
-\logo [FOXET] {foXet}
-\logo [FPTEX] {fp\TeX}
-\logo [FREEBSD] {FreeBSD}
-\logo [FTP] {ftp}
-\logo [GHOSTSCRIPT]{Ghost\-script}
-\logo [GHOSTVIEW] {Ghost\-view}
-\logo [GIF] {gif}
-\logo [GNU] {gnu}
-\logo [GNUPLOT] {gnuplot}
-\logo [GS] {Ghost\-Script}
-\logo [GUST] {Gust}
-\logo [GWTEX] {gw\TeX}
-\logo [HSB] {hsb}
-\logo [HTML] {html}
-\logo [HTTP] {http}
-\logo [HZ] {hz}
-\logo [IBM] {ibm}
-\logo [IMAGEMAGICK]{ImageMagick}
-\logo [INITEX] {ini\TeX}
-\logo [INRSTEX] {inrs\TeX}
-\logo [IO] {io}
-\logo [IRCNET] {IRCnet}
-\logo [ISO] {iso}
-\logo [JAVA] {Java}
-\logo [JAVASCRIPT] {Java\-Script}
-\logo [JPEG] {jpeg}
-\logo [JPG] {jpg}
-\logo [KPATHSEA] {kpathsea}
-\logo [KPSE] {kpse}
-\logo [KPSEWHICH] {kpsewhich}
-\logo [MKTEXLSR] {mktexlsr}
-\logo [LAMSTEX] {\LamSTeX}
-\logo [LATEX] {\LaTeX}
-\logo [LATEXTE] {\LaTeX2e}
-\logo [LATEXTN] {\LaTeX2.09}
-\logo [LCD] {lcd}
-\logo [LINUX] {linux}
-\logo [LISP] {Lisp}
-\logo [LPEG] {lpeg}
-\logo [LUA] {Lua}
-\logo [LUAJIT] {LuaJIT}
-\logo [LUATEX] {Lua\TeX}
-\logo [LUATOOLS] {luatools}
-\logo [MACOSX] {MacOSX}
-\logo [MACROTEX] {Macro\TeX}
-\logo [MAKEMPY] {MakeMPY}
-\logo [MAPPING] {map}
-\logo [MAPS] {Maps}
-\logo [MATHML] {MathML}
-\logo [METAFONT] {\MetaFont}
-\logo [METAPOST] {\MetaPost}
-\logo [METATEX] {Meta\TeX}
-\logo [MIKTEX] {Mik\TeX}
-\logo [MLTEX] {ml\TeX}
-\logo [METATYPE] {MetaType1}
-\logo [MODULA] {Modula}
-\logo [MOV] {mov}
-\logo [MPS] {mps}
-\logo [MPTOPDF] {mptopdf}
-\logo [MPLIB] {mplib}
-\logo [MSDOS] {msdos}
-\logo [MICROSOFT] {Microsoft}
-\logo [MSWINDOWS] {MS~Windows}
-\logo [MTXRUN] {mtxrun}
-\logo [MTXTOOLS] {mtxtools}
-\logo [NETPBM] {NetPBM}
-\logo [NTG] {ntg}
-\logo [NTS] {nts}
-\logo [OFM] {ofm}
-\logo [OMEGA] {Omega}
-\logo [OPENMATH] {OpenMath}
-\logo [OPENTYPE] {OpenType}
-\logo [OPI] {opi}
-\logo [OTF] {otf}
-\logo [OTP] {otp}
-\logo [OVF] {ovf}
-\logo [PASCAL] {Pascal}
-\logo [PCTEX] {pc\TeX}
-\logo [PFA] {pfa}
-\logo [PFB] {pfb}
-\logo [PDF] {pdf}
-\logo [PDFETEX] {pdfe\TeX}
-\logo [PDFTEX] {pdf\TeX}
-\logo [PDFTOOLS] {pdftools}
-\logo [PDFTOPS] {pdftops}
-\logo [PERL] {Perl}
-\logo [PERLTK] {Perl/Tk}
-\logo [PICTEX] {\PiCTeX}
-\logo [PK] {pk}
-\logo [PLAIN] {Plain}
-\logo [PNG] {png}
-\logo [POSIX] {posix}
-\logo [POSTSCRIPT] {Post\-Script}
-\logo [PPCHTEX] {\PPCHTeX}
-\logo [PRAGMA] {Pragma ADE}
-\logo [PRESS] {press}
-\logo [PRIFIL] {prifil}
-\logo [PS] {Post\-Script}
-\logo [PSCHECK] {pscheck}
-\logo [PSTOEDIT] {pstoedit}
-\logo [PSTOPAGE] {pstopage}
-\logo [PSTOPDF] {pstopdf}
-\logo [PSTRICKS] {pstricks}
-\logo [RAM] {ram}
-\logo [READER] {Acro\-bat Reader}
-\logo [RELAXNG] {Relax\kern.125emNG}
-\logo [RGB] {rgb}
-\logo [RLXTOOLS] {rlxtools}
-\logo [RUBY] {Ruby}
-\logo [SCITE] {SciTE}
-\logo [SGML] {sgml}
-\logo [SI] {si}
-\logo [SQL] {sql}
-\logo [SVG] {svg}
-\logo [TABLE] {\TaBlE}
-\logo [TCPIP] {tcp/ip}
-\logo [TDS] {tds} % no sc te
-\logo [TETEX] {te\TeX} % no sc te
-\logo [TEX] {\TeX}
-\logo [TEXADRES] {\TeX adress}
-\logo [TEXBASE] {\TeX base}
-\logo [TEXEDIT] {\TeX edit}
-\logo [TEXEXEC] {\TeX exec}
-\logo [TEXFONT] {\TeX font}
-\logo [TEXFORM] {\TeX form}
-\logo [TEXLIVE] {\TeX Live}
-\logo [TEXLUA] {\TeX Lua}
-\logo [TEXMF] {texmf}
-\logo [TEXMFSTART] {texmfstart}
-\logo [TEXNL] {tex-nl}
-\logo [TEXSHOW] {\TeX show}
-\logo [TEXSPELL] {\TeX spell}
-\logo [TEXGYRE] {\TeX\ Gyre}
-\logo [TEXSYNC] {texsync}
-\logo [TEXTMATE] {TextMate}
-\logo [TEXTOOLS] {\TeX tools}
-\logo [TEXUTIL] {\TeX util}
-\logo [TEXWORK] {\TeX work}
-\logo [TEXXET] {\TeX\XeT} \def\XeT{XeT}
-\logo [TFM] {tfm}
-\logo [TIF] {tif}
-\logo [TIFF] {tiff}
-\logo [TIFFINFO] {tiffinfo}
-\logo [TIFFTAGS] {tifftags}
-\logo [TMFTOOLS] {tmftools}
-\logo [TPIC] {tpic}
-\logo [TPM] {tpm}
-\logo [TRUETYPE] {TrueType}
-\logo [TTC] {ttc}
-\logo [TTF] {ttf}
-\logo [TUG] {tug}
-\logo [TUGBOAT] {Tug\-Boat}
-\logo [TUGNEWS] {Tug\-News}
-\logo [TYPEONE] {Type1}
-\logo [UCS] {ucs}
-\logo [UNICODE] {Uni\-code}
-\logo [UNIX] {Unix}
-\logo [URI] {uri}
-\logo [URL] {url}
-\logo [USA] {usa}
-\logo [USENET] {usenet}
-\logo [UTF] {utf}
-\logo [USB] {usb}
-\logo [VF] {vf}
-\logo [WDT] {wdt}
-\logo [WEB] {web}
-\logo [WEBC] {web2c}
-\logo [WIKI] {Wiki}
-\logo [WINDOWS] {Windows}
-\logo [WINNT] {WinNT}
-\logo [WINNX] {Win9x}
-\logo [WWW] {www}
-\logo [WTHREEC] {W3C}
-\logo [WYSIWYG] {wysiwyg}
-\logo [XDVI] {Xdvi}
-\logo [XETEX] {\XeTeX}
-\logo [XFDF] {xfdf}
-\logo [XHTML] {xhtml}
-\logo [XINDY] {Xindy}
-\logo [XML] {xml}
-\logo [XPATH] {xpath}
-\logo [XMLTOOLS] {xmltools}
-\logo [XPDFETEX] {xpdfe\TeX}
-\logo [XSL] {xsl}
-\logo [XSLFO] {xsl-fo}
-\logo [XSLT] {xslt}
-\logo [XSLTPROC] {xsltproc}
-\logo [XYPIC] {XYPIC} % wrong logo
-\logo [YandY] {y\&y}
-\logo [ZIP] {zip}
+\logo [EUROMATH] {EuroMath}
+\logo [EUROTEX] {Euro\TeX}
+\logo [EXAMPLE] {eXaMpLe}
+\logo [EXAMPLED] {exampled}
+\logo [EXAMPLEQ] {exampleq}
+\logo [EXAMPLER] {exampler}
+\logo [EXAMPLET] {examplet}
+\logo [EXAMPLEX] {examplex}
+\logo [EXIMPLE] {eXiMpLe}
+\logo [FLAC] {flac}
+\logo [FAQ] {faq}
+\logo [FDF] {fdf}
+\logo [FONTFORGE] {FontForge}
+\logo [FOXET] {foXet}
+\logo [FPTEX] {fp\TeX}
+\logo [FREEBSD] {FreeBSD}
+\logo [FTP] {ftp}
+\logo [GHOSTSCRIPT] {Ghost\-script}
+\logo [GHOSTVIEW] {Ghost\-view}
+\logo [GIF] {gif}
+\logo [GNU] {gnu}
+\logo [GNUPLOT] {gnuplot}
+\logo [GS] {Ghost\-Script}
+\logo [GUST] {Gust}
+\logo [GCC] {gcc}
+\logo [GWTEX] {gw\TeX}
+\logo [HSB] {hsb}
+\logo [HTML] {html}
+\logo [HTTP] {http}
+\logo [HZ] {hz}
+\logo [IBM] {ibm}
+\logo [IMAGEMAGICK] {ImageMagick}
+\logo [INITEX] {ini\TeX}
+\logo [INRSTEX] {inrs\TeX}
+\logo [IO] {io}
+\logo [IRCNET] {IRCnet}
+\logo [ISO] {iso}
+\logo [JAVA] {Java}
+\logo [JAVASCRIPT] {Java\-Script}
+\logo [JPEG] {jpeg}
+\logo [JPG] {jpg}
+\logo [JBIG] {jbig}
+\logo [KPATHSEA] {kpathsea}
+\logo [KPSE] {kpse}
+\logo [KVM] {kvm}
+\logo [KPSEWHICH] {kpsewhich}
+\logo [MKTEXLSR] {mktexlsr}
+\logo [MYSQL] {MySQL}
+\logo [LAMSTEX] {\LamSTeX}
+\logo [LATEX] {\LaTeX}
+\logo [LATEXTE] {\LaTeX2e}
+\logo [LATEXTN] {\LaTeX2.09}
+\logo [LCD] {lcd}
+\logo [LINUX] {linux}
+\logo [LISP] {Lisp}
+\logo [LPEG] {lpeg}
+\logo [LUA] {Lua}
+\logo [LUAJIT] {LuaJIT}
+\logo [LUAJITTEX] {Luajit\TeX}
+\logo [LUATEX] {Lua\TeX}
+\logo [LUATOOLS] {luatools}
+\logo [LMX] {lmx}
+\logo [MACOSX] {MacOSX}
+\logo [MACROTEX] {Macro\TeX}
+\logo [MAKEMPY] {MakeMPY}
+\logo [MAPPING] {map}
+\logo [MAPS] {Maps}
+\logo [MATHML] {MathML}
+\logo [METAFONT] {\MetaFont}
+\logo [METAPOST] {\MetaPost}
+\logo [METATEX] {Meta\TeX}
+\logo [MIKTEX] {Mik\TeX}
+\logo [MINGW] {MingW}
+\logo [MLTEX] {ml\TeX}
+\logo [METATYPE] {MetaType1}
+\logo [MODULA] {Modula}
+\logo [MOV] {mov}
+\logo [MPS] {mps}
+\logo [MPTOPDF] {mptopdf}
+\logo [MPLIB] {mplib}
+\logo [MSDOS] {msdos}
+\logo [MICROSOFT] {Microsoft}
+\logo [MSWINDOWS] {MS~Windows}
+\logo [MSWORD] {MS~Word}
+\logo [MTXRUN] {mtxrun}
+\logo [MTXTOOLS] {mtxtools}
+\logo [NETPBM] {NetPBM}
+\logo [NTG] {ntg}
+\logo [NTS] {nts}
+\logo [OFM] {ofm}
+\logo [OMEGA] {Omega}
+\logo [OPENMATH] {OpenMath}
+\logo [OPENTYPE] {OpenType}
+\logo [OPI] {opi}
+\logo [OTEX] {Oriental \TeX}
+\logo [OTF] {otf}
+\logo [OTP] {otp}
+\logo [OVF] {ovf}
+\logo [PASCAL] {Pascal}
+\logo [PCTEX] {pc\TeX}
+\logo [PFA] {pfa}
+\logo [PFB] {pfb}
+\logo [PDF] {pdf}
+\logo [PDFETEX] {pdfe\TeX}
+\logo [PDFTEX] {pdf\TeX}
+\logo [PDFTOOLS] {pdftools}
+\logo [PDFTOPS] {pdftops}
+\logo [PERL] {Perl}
+\logo [PERLTK] {Perl/Tk}
+\logo [PICTEX] {\PiCTeX}
+\logo [PK] {pk}
+\logo [PLAIN] {Plain}
+\logo [PNG] {png}
+\logo [POSIX] {posix}
+\logo [POSTSCRIPT] {Post\-Script}
+\logo [PPCHTEX] {\PPCHTeX}
+\logo [PRAGMA] {Pragma ADE}
+\logo [PRESS] {press}
+\logo [PRIFIL] {prifil}
+\logo [PS] {Post\-Script}
+\logo [PSCHECK] {pscheck}
+\logo [PSTOEDIT] {pstoedit}
+\logo [PSTOPAGE] {pstopage}
+\logo [PSTOPDF] {pstopdf}
+\logo [PSTRICKS] {pstricks}
+\logo [RAM] {ram}
+\logo [READER] {Acro\-bat Reader}
+\logo [RELAXNG] {Relax\kern.125emNG}
+\logo [RGB] {rgb}
+\logo [RLXTOOLS] {rlxtools}
+\logo [RUBY] {Ruby}
+\logo [SCITE] {SciTE}
+\logo [SGML] {sgml}
+\logo [SI] {si}
+\logo [SQL] {sql}
+\logo [SSD] {ssd}
+\logo [SVG] {svg}
+\logo [SWIG] {swig}
+\logo [SWIGLIB] {SwigLib}
+\logo [TABLE] {\TaBlE}
+\logo [TCPIP] {tcp/ip}
+\logo [TDS] {tds} % no sc te
+\logo [TETEX] {te\TeX} % no sc te
+\logo [TEX] {\TeX}
+\logo [TEXADRES] {\TeX adress}
+\logo [TEXBASE] {\TeX base}
+\logo [TEXEDIT] {\TeX edit}
+\logo [TEXEXEC] {\TeX exec}
+\logo [TEXFONT] {\TeX font}
+\logo [TEXFORM] {\TeX form}
+\logo [TEXLIVE] {\TeX Live}
+\logo [TEXLUA] {\TeX Lua}
+\logo [TEXMF] {texmf}
+\logo [TEXMFSTART] {texmfstart}
+\logo [TEXNL] {tex-nl}
+\logo [TEXSHOW] {\TeX show}
+\logo [TEXSPELL] {\TeX spell}
+\logo [TEXGYRE] {\TeX\ Gyre}
+\logo [TEXSYNC] {texsync}
+\logo [TEXTMATE] {TextMate}
+\logo [TEXTOOLS] {\TeX tools}
+\logo [TEXUTIL] {\TeX util}
+\logo [TEXWORK] {\TeX work}
+\logo [TEXXET] {\TeX\XeT} \def\XeT{XeT}
+\logo [TFM] {tfm}
+\logo [TIF] {tif}
+\logo [TIFF] {tiff}
+\logo [TIFFINFO] {tiffinfo}
+\logo [TIFFTAGS] {tifftags}
+\logo [TMFTOOLS] {tmftools}
+\logo [TPIC] {tpic}
+\logo [TPM] {tpm}
+\logo [TRUETYPE] {TrueType}
+\logo [TTC] {ttc}
+\logo [TTF] {ttf}
+\logo [TUG] {tug}
+\logo [TUGBOAT] {Tug\-Boat}
+\logo [TUGNEWS] {Tug\-News}
+\logo [TYPEONE] {Type1}
+\logo [UCS] {ucs}
+\logo [UNICODE] {Uni\-code}
+\logo [UNIX] {Unix}
+\logo [URI] {uri}
+\logo [URL] {url}
+\logo [USA] {usa}
+\logo [USENET] {usenet}
+\logo [UTF] {utf}
+\logo [USB] {usb}
+\logo [VF] {vf}
+\logo [WDT] {wdt}
+\logo [WEB] {web}
+\logo [WEBC] {web2c}
+\logo [WIKI] {Wiki}
+\logo [WINDOWS] {Windows}
+\logo [WINNT] {WinNT}
+\logo [WINNX] {Win9x}
+\logo [WWW] {www}
+\logo [WTHREEC] {W3C}
+\logo [WYSIWYG] {wysiwyg}
+\logo [XDVI] {Xdvi}
+\logo [XETEX] {\XeTeX}
+\logo [XFDF] {xfdf}
+\logo [XHTML] {xhtml}
+\logo [XINDY] {Xindy}
+\logo [XML] {xml}
+\logo [XPATH] {xpath}
+\logo [XMLTOOLS] {xmltools}
+\logo [XPDFETEX] {xpdfe\TeX}
+\logo [XSL] {xsl}
+\logo [XSLFO] {xsl-fo}
+\logo [XSLT] {xslt}
+\logo [XSLTPROC] {xsltproc}
+\logo [XYPIC] {XYPIC} % wrong logo
+\logo [VMWARE] {VMWare}
+\logo [YandY] {y\&y}
+\logo [ZIP] {zip}
\def\METAFUN {\MetaFun}
diff --git a/Master/texmf-dist/tex/context/base/s-abr-04.tex b/Master/texmf-dist/tex/context/base/s-abr-04.tex
index ab3dc94cdc9..dcd93c6f151 100644
--- a/Master/texmf-dist/tex/context/base/s-abr-04.tex
+++ b/Master/texmf-dist/tex/context/base/s-abr-04.tex
@@ -46,6 +46,7 @@
\logo [CCODE] {c}
\logo [CALCMATH] {CalcMath}
\logo [CD] {cd}
+\logo [CLD] {cld}
\logo [CPU] {cpu}
\logo [CDROM] {cdrom}
\logo [CID] {cid}
@@ -122,6 +123,7 @@
\logo [JAVASCRIPT] {Java\-Script}
\logo [JPEG] {jpeg}
\logo [JPG] {jpg}
+\logo [JBIG] {jbig}
\logo [KPATHSEA] {kpathsea}
\logo [KPSE] {kpse}
\logo [KPSEWHICH] {kpsewhich}
@@ -136,6 +138,7 @@
\logo [LUA] {Lua}
\logo [LUAJIT] {LuaJIT}
\logo [LUATEX] {Lua\TeX}
+\logo [LUAJITTEX] {Luajit\TeX}
\logo [LUATOOLS] {luatools}
\logo [MACOSX] {MacOSX}
\logo [MACROTEX] {Macro\TeX}
diff --git a/Master/texmf-dist/tex/context/base/s-art-01.mkiv b/Master/texmf-dist/tex/context/base/s-art-01.mkiv
index 10b4de9ab1c..046c35cc1e4 100644
--- a/Master/texmf-dist/tex/context/base/s-art-01.mkiv
+++ b/Master/texmf-dist/tex/context/base/s-art-01.mkiv
@@ -2,6 +2,8 @@
% \showframe
+\unprotect
+
\setuplayout
[\c!topspace=2cm,
\c!bottomspace=2.5cm,
@@ -18,7 +20,8 @@
\setuphead
[\v!chapter]
[\c!style=\bfc,
- \c!headerstate=\v!high]
+ \c!headerstate=\v!high,
+ \c!interaction=\v!all]
\setuphead
[\v!section]
@@ -33,4 +36,9 @@
[\c!style=\bf,
\c!after=]
+\setuplist
+ [\c!interaction=\v!all]
+
+\protect
+
\stopmodule
diff --git a/Master/texmf-dist/tex/context/base/s-def-01.mkiv b/Master/texmf-dist/tex/context/base/s-def-01.mkiv
index 1c8ee9469d2..49e585bd018 100644
--- a/Master/texmf-dist/tex/context/base/s-def-01.mkiv
+++ b/Master/texmf-dist/tex/context/base/s-def-01.mkiv
@@ -4,7 +4,7 @@
\startsetups defaults:frontpart:pagenumbers:roman
\defineconversionset[\c!frontpart:\c!pagenumber][][romannumerals]
- \setupuserpagenumber[\c!way=\v!byblock]
+ \setupuserpagenumber[\c!way=\v!by\v!block]
\stopsetups
\protect
diff --git a/Master/texmf-dist/tex/context/base/s-fnt-10.mkiv b/Master/texmf-dist/tex/context/base/s-fnt-10.mkiv
index 0d4a608a485..0edb19120f9 100644
--- a/Master/texmf-dist/tex/context/base/s-fnt-10.mkiv
+++ b/Master/texmf-dist/tex/context/base/s-fnt-10.mkiv
@@ -135,7 +135,7 @@ end
\page
\egroup}
-\doifnotmode{demo} {\endinput}
+\continueifinputfile{s-fnt-10.mkiv}
\starttext
diff --git a/Master/texmf-dist/tex/context/base/s-fnt-20.mkiv b/Master/texmf-dist/tex/context/base/s-fnt-20.mkiv
index a78f62555c6..5840430990f 100644
--- a/Master/texmf-dist/tex/context/base/s-fnt-20.mkiv
+++ b/Master/texmf-dist/tex/context/base/s-fnt-20.mkiv
@@ -85,10 +85,10 @@
rlig=yes,liga=yes,dlig=yes,
mark=yes,mkmk=yes,kern=yes,curs=yes]
-\setvalue{otftracker-direction-arabtype}{-1}
-\setvalue{otftracker-direction-husayni}{-1}
-\setvalue{otftracker-direction-simplenaskhi}{-1}
-\setvalue{otftracker-direction-default}{0}
+\setvalue{otftracker-direction-arabtype}{r2l}
+\setvalue{otftracker-direction-husayni}{r2l}
+\setvalue{otftracker-direction-simplenaskhi}{r2l}
+\setvalue{otftracker-direction-default}{}
\setvariables
[otftracker]
@@ -126,16 +126,17 @@
\startsetups otftracker
\setupbodyfont[tt,10pt]
\starttext
- \chapter{\getvariable{otftracker}{title}}
- \doifsomething {\getvariable{otftracker}{figure}} {
- \startlinecorrection
- \externalfigure[\getvariable{otftracker}{figure}]
- \stoplinecorrection
- }
- \showotfcomposition
- {\getvariable{otftracker}{font}*\getvariable{otftracker}{features} at \getvariable{otftracker}{size}}
- {\getvariable{otftracker}{direction}}
- {\getvariable{otftracker}{sample}}
+ \normalexpanded{\startchapter[title={\getvariable{otftracker}{title}}]}
+ \doifsomething {\getvariable{otftracker}{figure}} {
+ \startlinecorrection
+ \externalfigure[\getvariable{otftracker}{figure}][maxwidth=\hsize,frame=on]
+ \stoplinecorrection
+ }
+ \showotfcomposition
+ {\getvariable{otftracker}{font}*\getvariable{otftracker}{features} at \getvariable{otftracker}{size}}
+ {\getvariable{otftracker}{direction}}
+ {\getvariable{otftracker}{sample}}
+ \stopchapter
\stoptext
\stopsetups
diff --git a/Master/texmf-dist/tex/context/base/s-fnt-21.mkiv b/Master/texmf-dist/tex/context/base/s-fnt-21.mkiv
index a488546794f..588c9801676 100644
--- a/Master/texmf-dist/tex/context/base/s-fnt-21.mkiv
+++ b/Master/texmf-dist/tex/context/base/s-fnt-21.mkiv
@@ -32,7 +32,7 @@
\endgroup
\stopsetups
-\def\ShowOtfTrackerSample#1%
+\unexpanded\def\ShowOtfTrackerSample#1%
{\doiffile{#1}
{\blank
\startlinecorrection
@@ -44,3 +44,21 @@
\egroup
\stoplinecorrection
\blank}}
+
+\endinput
+
+% \usemodule[fnt-20]
+%
+% \starttext
+%
+% \setvariables
+% [otftracker]
+% [direction=-1,
+% sample=لا,
+% title=Test,
+% font=file:arabtype,
+% % font=file:husayni,
+% % font=file:scheherazaderegot,
+% features=arabic]
+%
+% \stoptext
diff --git a/Master/texmf-dist/tex/context/base/s-fnt-28.mkiv b/Master/texmf-dist/tex/context/base/s-fnt-28.mkiv
index 039cc6ca8ea..778a4c39000 100644
--- a/Master/texmf-dist/tex/context/base/s-fnt-28.mkiv
+++ b/Master/texmf-dist/tex/context/base/s-fnt-28.mkiv
@@ -35,7 +35,7 @@
end
\stopluacode
-\doifnotmode{demo}{\endinput}
+\continueifinputfile{s-fnt-28.mkiv}
\starttext
diff --git a/Master/texmf-dist/tex/context/base/s-fnt-29.mkiv b/Master/texmf-dist/tex/context/base/s-fnt-29.mkiv
index 0378550e86a..f9e0a165879 100644
--- a/Master/texmf-dist/tex/context/base/s-fnt-29.mkiv
+++ b/Master/texmf-dist/tex/context/base/s-fnt-29.mkiv
@@ -48,7 +48,7 @@
\stopluacode
-\doifnotmode{demo}{\endinput}
+\continueifinputfile{s-fnt-29.mkiv}
\setupbodyfont[dejavu,tt,9pt]
diff --git a/Master/texmf-dist/tex/context/base/s-fnt-31.mkiv b/Master/texmf-dist/tex/context/base/s-fnt-31.mkiv
index 171f991db97..24f7f2d7e50 100644
--- a/Master/texmf-dist/tex/context/base/s-fnt-31.mkiv
+++ b/Master/texmf-dist/tex/context/base/s-fnt-31.mkiv
@@ -168,7 +168,7 @@ end
"cambria.ttc(Cambria Math)",
"xits-math.otf",
"stixmath-regular.otf",
- "lmmath-regular",
+ -- "lmmath-regular",
-- "LMMath10-Regular@lmroman10-math",
-- "pxmath@px-math",
-- "lucida-math.otf",
diff --git a/Master/texmf-dist/tex/context/base/s-fnt-32.mkiv b/Master/texmf-dist/tex/context/base/s-fnt-32.mkiv
index 5ffbb2ac5d8..20a939a9771 100644
--- a/Master/texmf-dist/tex/context/base/s-fnt-32.mkiv
+++ b/Master/texmf-dist/tex/context/base/s-fnt-32.mkiv
@@ -30,7 +30,7 @@
end
\stopluacode
-\doifnotmode{demo}{\endinput}
+\continueifinputfile{s-fnt-32.mkiv}
\setupbodyfont[dejavu,10pt]
diff --git a/Master/texmf-dist/tex/context/base/s-fonts-tables.lua b/Master/texmf-dist/tex/context/base/s-fonts-tables.lua
index dcf189f3206..3cf1286a7e3 100644
--- a/Master/texmf-dist/tex/context/base/s-fonts-tables.lua
+++ b/Master/texmf-dist/tex/context/base/s-fonts-tables.lua
@@ -121,7 +121,7 @@ local function typesettable(t,keys,synonyms,nesting,prefix)
elseif v == "basepoints" then
context("%sbp",tk)
elseif v == "scaledpoints" then
- context(number.points(tk))
+ context("%p",tk)
elseif v == "table" then
context("<table>")
else -- if v == "integerscale" then
diff --git a/Master/texmf-dist/tex/context/base/s-inf-01.mkvi b/Master/texmf-dist/tex/context/base/s-inf-01.mkvi
index 51d3cbac824..8263413ace3 100644
--- a/Master/texmf-dist/tex/context/base/s-inf-01.mkvi
+++ b/Master/texmf-dist/tex/context/base/s-inf-01.mkvi
@@ -22,6 +22,8 @@
%D context auto:s-inf-01 --basepath=t:/texmf/tex/context/base
%D \stoptyping
+% \enabletrackers[context.*]
+
\startluacode
local format, gsub, find, match = string.format, string.gsub, string.find, string.match
@@ -48,7 +50,8 @@
local path = document.arguments.basepath or file.dirname(resolvers.find_file("context.mkiv"),".")
local pattern = path .. "/*." .. suffix .. "$" -- avoid bla.tex~
local texfiles = dir.glob(pattern)
- for _, name in ipairs(texfiles) do
+ for i=1,#texfiles do
+ local name = texfiles[i]
local base = file.basename(name)
for p=1,#patterns do
local category = match(base,patterns[p])
@@ -75,13 +78,18 @@
end
end
local data = io.loaddata(name)
- if suffix == "lua" then
- data = gsub(data,"%-%-%[%[.-%]%]%-%-","")
- data = gsub(data,"%-%-.-[\n\r]","")
+ if data then
+ if suffix == "lua" then
+ data = gsub(data,"%-%-%[%[.-%]%]%-%-","")
+ data = gsub(data,"%-%-.-[\n\r]","")
+ else
+ data = gsub(data,"%%.-[\n\r]","")
+ end
+ data = gsub(data,"%s","")
else
- data = gsub(data,"%%.-[\n\r]","")
+ logs.report("error","unknown file %a",name)
+ data = ""
end
- data = gsub(data,"%s","")
sm[n+5] = sm[n+5] + #data
if done then
sm[n] = sm[n] + #data
@@ -98,12 +106,12 @@
if next(list) then
-- already loaded
else
- for k, v in ipairs(types) do
- collect(list,v,k)
+ for i=1,#types do
+ collect(list,types[i],i)
end
- for category, _ in pairs(list) do
+ for category in next, list do
pattern ="{"..category.."%-"
- for suffix, t in pairs(used) do
+ for suffix, t in next, used do
local data = io.loaddata(resolvers.find_file("context."..suffix))
if data and find(data,pattern) then
t[category] = true
@@ -124,21 +132,21 @@
context.NC()
context(category)
context.NC()
- for i, t in ipairs(types) do
+ for i=1,#types do
local n, m = 0, 0
- for k, v in pairs(list) do
+ for k, v in next, list do
local nn, mm = what[k][i], what[k][i+5]
n = n + nn
m = m + (mm or nn)
end
- context.Top(t,norm(max),n,m)
+ context.Top(types[i],norm(max),n,m)
context.NC()
end
context.NC()
context.NR()
context.HL()
for k, v in table.sortedpairs(what) do
- local c = (what == size and comp[k]) or nope
+ local c = what == size and comp[k] or nope
context.NC()
context("%s~%s~~%s~~%s",
(used.mkii[k] and "ii") or "~~",
@@ -147,8 +155,8 @@
k
)
context.NC()
- for i, t in ipairs(types) do
- context.Bar(t,v[i],c[i],norm(v[i]))
+ for i=1,#types do
+ context.Bar(types[i],v[i],c[i],norm(v[i]))
context.NC()
end
context.NR()
@@ -161,8 +169,8 @@
for k, v in table.sortedpairs(what) do
local c = (what == size and comp[k]) or nope
context.StartUp(k)
- for i, t in ipairs(types) do
- context.Up(t,norm(v[i]))
+ for i=1,#types do
+ context.Up(types[i],norm(v[i]))
end
context.StopUp()
end
@@ -226,7 +234,7 @@
\stopTEXpage
\stoptexdefinition
-% \doifnotmode{demo}{\endinput}
+% \continueifinputfile{s-inf-01.mkvi}
\starttext
\Show
diff --git a/Master/texmf-dist/tex/context/base/s-inf-03.mkiv b/Master/texmf-dist/tex/context/base/s-inf-03.mkiv
index 1ff4cd0d094..822173d0083 100644
--- a/Master/texmf-dist/tex/context/base/s-inf-03.mkiv
+++ b/Master/texmf-dist/tex/context/base/s-inf-03.mkiv
@@ -1,8 +1,14 @@
% \nopdfcompression
-\enablemode[ipad]
+% \starttext
+% There is a loop somewhere ... todo.
+% \stoptext
-\doifmodeelse {ipad} {
+\enablemode[tablet] % whatever that means
+
+\setupbodyfont[dejavu]
+
+\doifmodeelse {tablet} {
\setuppapersize
[S6,landscape]
@@ -10,16 +16,16 @@
\definefont
[TitlePageFont]
- [MonoBold at 18pt]
+ [MonoBold at 16pt]
\setupbodyfont
- [tt,10pt]
+ [tt,8pt]
} {
\definefont
[TitlePageFont]
- [MonoBold at 24pt]
+ [MonoBold at 20pt]
\setupbodyfont
[tt]
@@ -84,21 +90,21 @@ local extralua = libraries.extralua
local obsolete = libraries.obsolete
local find = string.find
-local color, goto = context.color, context.goto
+local color, gotolocation = context.color, context["goto"]
for k, v in table.sortedpairs(_G) do
if obsolete[k] or find(k,"_") or k == "arg" or k == "utf" then
--
elseif basiclua[k] then
- goto(function() color( { "darkred" }, k) end, { k } )
+ gotolocation(function() color( { "darkred" }, k) end, { k } )
elseif extralua[k] then
- goto(function() color( { "darkgreen" }, k) end, { k } )
+ gotolocation(function() color( { "darkgreen" }, k) end, { k } )
elseif basictex[k] then
- goto(function() color( { "darkblue" }, k) end, { k } )
+ gotolocation(function() color( { "darkblue" }, k) end, { k } )
elseif extratex[k] then
- goto(function() color( { "darkyellow" }, k) end, { k } )
+ gotolocation(function() color( { "darkyellow" }, k) end, { k } )
elseif type(v) == "table" then
- goto(function() color( { "white" }, k) end, { k } )
+ gotolocation(function() color( { "white" }, k) end, { k } )
end
context(" ")
end
@@ -150,16 +156,19 @@ local byte = string.byte
local upper = string.upper
local skipglobal = table.tohash {
- "_G", "context", "modules", "global", "arg", "utf", 1,
+ "_G", "_M", "_ENV", "",
+ "context", "modules", "global", "arg", "utf", 1,
"_ptbs_", "_pcol_", "_plib_", "_clib_", "_tlib_",
- "_M", "kpse",
+ "kpse",
}
local skipkeys = table.tohash {
"_pcol_", "_plib_", "_clib_", "_tlib_", "_bpnf_", "_ptbs_",
"_cldf_", "_cldn_",
"_clmb_", "_clme_", "_clmm_", "_clmn_", "_clma_", "_clmh_",
- "_G", "_M", "_VERSION", "_COPYRIGHT", "_DESCRIPTION", "_NAME", "_PACKAGE", "__unload",
+ "_G", "_M", "_ENV", "",
+ "_VERSION", "_COPYRIGHT", "_DESCRIPTION", "_NAME", "_PACKAGE", "__unload",
+
}
local sameglobal = {
@@ -223,15 +232,14 @@ end
local NC, NR = context.NC, context.NR
local overstrike, rlap, bf = context.overstrike, context.rlap, context.bf
-local color, goto = context.color, context.goto
+local color, gotolocation = context.color, context["goto"]
local function cleanup(s)
return "\\char" ..byte(s) .. " "
end
local function handler(k,t,depth)
- k = gsub(k,"([~#$%%^&{}\\\|])",cleanup)
--- NC() rlap("\\quad\\tx " .. upper(sub(t,1,1)) .. " ".. k) NC() NC() NR()
+ k = gsub(k,"([~#$%%^&{}\\|])",cleanup)
NC() rlap("\\quad\\tx\\kern" .. (depth or 0).. "em" .. upper(sub(t,1,1)) .. " ".. k) NC() NC() NR()
end
@@ -260,67 +268,65 @@ local function show(title,subtitle,alias,builtin,t,lib,libcolor,glo,glocolor,mar
for i=1,#keys do
local k = keys[i]
local v = t[k]
- if k ~= "obsolete" and not skipkeys[k] and (not obsolete or not obsolete[k]) then
+ if k and k ~= "obsolete" and not skipkeys[k] and (not obsolete or not obsolete[k]) then
local inlib = lib and lib[k]
local inglo = glo and glo[k]
- if k then
- local t = type(v)
- local kstr, tstr = k, t
- local obs = t_obsolete and t_obsolete[k]
- if obs then
- tstr = function() overstrike(t) end
- kstr = function() overstrike(k) end
- end
- local marked = marked(v)
- if marked then
- tstr = "data table"
- end
- if t == "table" then
- local m = getmetatable(v)
- if m and m.__call then
- tstr = "function"
- end
- end
- if not mark then
- --
- elseif inlib and tostring(inlib) ~= tostring(v) then
- tstr = "overloaded ".. tstr
- elseif inglo and tostring(inglo) ~= tostring(v) then
- tstr = "overloaded ".. tstr
+ local t = type(v)
+ local kstr, tstr = k, t
+ local obs = t_obsolete and t_obsolete[k]
+ if obs then
+ tstr = function() overstrike(t) end
+ kstr = function() overstrike(k) end
+ end
+ local marked = marked(v)
+ if marked then
+ tstr = "data table"
+ end
+ if t == "table" then
+ local m = getmetatable(v)
+ if m and m.__call then
+ tstr = "function"
end
- NC() bf()
- if inlib then
- if not mark and t == "table" then
- goto(function() color( { libcolor }, kstr) end, { k } )
- else
- color( { libcolor }, kstr)
- end
- elseif inglo then
- if not mark and t == "table" then
- goto(function() color( { glocolor }, kstr) end, { k } )
- else
- color( { glocolor }, kstr)
- end
+ end
+ if not mark then
+ --
+ elseif inlib and tostring(inlib) ~= tostring(v) then
+ tstr = "overloaded ".. tstr
+ elseif inglo and tostring(inglo) ~= tostring(v) then
+ tstr = "overloaded ".. tstr
+ end
+ NC() bf()
+ if inlib then
+ if not mark and t == "table" then
+ gotolocation(function() color( { libcolor }, kstr) end, { k } )
else
- if not mark and t == "table" then
- goto(k, { kstr } )
- else
- context(kstr)
- end
+ color( { libcolor }, kstr)
end
- NC()
- if inlib then
- color( { libcolor }, tstr)
- elseif inglo then
- color( { glocolor }, tstr)
+ elseif inglo then
+ if not mark and t == "table" then
+ gotolocation(function() color( { glocolor }, kstr) end, { k } )
else
- context(tstr)
+ color( { glocolor }, kstr)
end
- NC() NR()
- if mark and t == "table" and title ~= "libraries" and title ~= "package" and not marked then
- childtables(false,v,handler) -- (k,v,handler)
+ else
+ if not mark and t == "table" then
+ gotolocation(k, { kstr } )
+ else
+ context(kstr)
end
end
+ NC()
+ if inlib then
+ color( { libcolor }, tstr)
+ elseif inglo then
+ color( { glocolor }, tstr)
+ else
+ context(tstr)
+ end
+ NC() NR()
+ if mark and t == "table" and title ~= "libraries" and title ~= "package" and not marked then
+ childtables(false,v,handler) -- (k,v,handler)
+ end
end
end
context.stoptabulate()
diff --git a/Master/texmf-dist/tex/context/base/s-lan-04.mkiv b/Master/texmf-dist/tex/context/base/s-lan-04.mkiv
index 80283bfd47f..c04be94667d 100644
--- a/Master/texmf-dist/tex/context/base/s-lan-04.mkiv
+++ b/Master/texmf-dist/tex/context/base/s-lan-04.mkiv
@@ -138,7 +138,7 @@
\definecolor[hyphenation:de] [g=.8]
\definecolor[hyphenation:nl] [b=.8]
-\doifnotmode{demo}{\endinput}
+\continueifinputfile{s-lan-04.mkiv}
\starttext
diff --git a/Master/texmf-dist/tex/context/base/s-lan-06.mkiv b/Master/texmf-dist/tex/context/base/s-lan-06.mkiv
new file mode 100644
index 00000000000..43d59a2d131
--- /dev/null
+++ b/Master/texmf-dist/tex/context/base/s-lan-06.mkiv
@@ -0,0 +1,53 @@
+%D \module
+%D [ file=s-lan-06,
+%D version=2013.03.22,
+%D title=\CONTEXT\ Style File,
+%D subtitle=Language Environment 6,
+%D author=Hans Hagen,
+%D date=\currentdate,
+%D copyright={PRAGMA ADE \& \CONTEXT\ Development Team}]
+%C
+%C This module is part of the \CONTEXT\ macro||package and is
+%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
+%C details.
+
+\startluacode
+
+languages.tracers = languages.tracers or { }
+
+function languages.tracers.showfrequencies(language)
+ local t = languages.frequencies.getdata(language or "en")
+ context.starttabulate { "|lT|cw(2em)|r|" }
+ context.NC()
+ context.formatted.rlap("%s: %p",t.language,languages.frequencies.averagecharwidth(t.language))
+ context.NC()
+ context.NC()
+ context.NR()
+ context.HL()
+ for k, v in table.sortedhash(t.frequencies) do
+ context.NC()
+ context("%U",k)
+ context.NC()
+ context("%c",k)
+ context.NC()
+ context("%0.3f",v)
+ context.NC()
+ context.NR()
+ end
+ context.stoptabulate()
+end
+
+\stopluacode
+
+\unexpanded\def\ShowLanguageFrequencies#1%
+ {\ctxlua{languages.tracers.showfrequencies("#1")}}
+
+\continueifinputfile{s-lan-06.mkiv}
+
+\starttext
+
+ \hsize65\averagecharwidth \input ward \par
+
+ \ShowLanguageFrequencies\currentmainlanguage
+
+\stoptext
diff --git a/Master/texmf-dist/tex/context/base/s-mat-10.mkiv b/Master/texmf-dist/tex/context/base/s-mat-10.mkiv
index 384371c186f..3eaaf688bab 100644
--- a/Master/texmf-dist/tex/context/base/s-mat-10.mkiv
+++ b/Master/texmf-dist/tex/context/base/s-mat-10.mkiv
@@ -13,10 +13,10 @@
%D This base module will be cleaned up and extended.
-\def\enableshowmathfontvirtual
+\unexpanded\def\enableshowmathfontvirtual
{\ctxlua{fonts.constructors.autocleanup=false}}
-\def\showmathfontcharacters
+\unexpanded\def\showmathfontcharacters
{\dodoubleempty\doshowmathfontcharacters}
\def\doshowmathfontcharacters[#1][#2]%
@@ -62,7 +62,7 @@
\startluacode
local concat = table.concat
-local format, lower = string.format, string.lower
+local lower = string.lower
local utfchar = utf.char
local round = math.round
@@ -76,6 +76,9 @@ local fillinthegaps = true
local upperlimit = 0x0007F
local upperlimit = 0xF0000
+local f_unicode = string.formatters["%U"]
+local f_slot = string.formatters["%s/%0X"]
+
function document.showmathfont(id,slot)
local tfmdata = fontdata[id]
local characters = tfmdata.characters
@@ -118,15 +121,15 @@ function document.showmathfont(id,slot)
local v_variants = char.vert_variants
local h_variants = char.horiz_variants
local commands = char.commands
- local slookups = desc.slookups
- local mlookups = desc.mlookups
+ local slookups = desc and desc.slookups
+ local mlookups = desc and desc.mlookups
local mathclass = info.mathclass
local mathspec = info.mathspec
local mathsymbol = info.mathsymbol
local description = info.description or no_description
context.startmathfontlistentry()
- context.mathfontlistreference(format("U+%05X",unicode))
- context.mathfontlistentryhexdectit(format("U+%05X",code),code,lower(description))
+ context.mathfontlistreference(f_unicode(unicode))
+ context.mathfontlistentryhexdectit(f_unicode(code),code,lower(description))
context.mathfontlistentrywdhtdpic(round(char.width or 0),round(char.height or 0),round(char.depth or 0),round(char.italic or 0))
if virtual and commands then
local t = { }
@@ -134,7 +137,7 @@ function document.showmathfont(id,slot)
local ci = commands[i]
if ci[1] == "slot" then
local fnt, idx = ci[2], ci[3]
- t[#t+1] = format("%s/%0X",names[fnt] or fnt,idx)
+ t[#t+1] = f_slot(names[fnt] or fnt,idx)
end
end
if #t > 0 then
@@ -155,7 +158,7 @@ function document.showmathfont(id,slot)
context.stopmathfontlistentryclassspec()
end
if mathsymbol then
- context.mathfontlistentrysymbol(format("U+%05X",mathsymbol),mathsymbol)
+ context.mathfontlistentrysymbol(f_unicode(mathsymbol),mathsymbol)
end
if next_sizes then
local n, done = 0, { }
@@ -167,7 +170,7 @@ function document.showmathfont(id,slot)
break
else
done[next_sizes] = true
- context.mathfontlistnextentry(n,format("U+%05X",next_sizes),next_sizes)
+ context.mathfontlistnextentry(n,f_unicode(next_sizes),next_sizes)
next_sizes = characters[next_sizes]
v_variants = next_sizes.vert_variants or v_variants
h_variants = next_sizes.horiz_variants or h_variants
@@ -185,14 +188,14 @@ function document.showmathfont(id,slot)
context.startmathfontlisthvariants()
for i=1,#h_variants do -- we might go top-down in the original
local vi = h_variants[i]
- context.mathfontlisthvariantsentry(i,format("U+%05X",vi.glyph),vi.glyph)
+ context.mathfontlisthvariantsentry(i,f_unicode(vi.glyph),vi.glyph)
end
context.stopmathfontlisthvariants()
elseif v_variants then
context.startmathfontlistvvariants()
for i=1,#v_variants do
local vi = v_variants[#v_variants-i+1]
- context.mathfontlistvvariantsentry(i,format("U+%05X",vi.glyph),vi.glyph)
+ context.mathfontlistvvariantsentry(i,f_unicode(vi.glyph),vi.glyph)
end
context.stopmathfontlistvvariants()
end
@@ -230,7 +233,7 @@ function document.showmathfont(id,slot)
local i = 0
for variant, lookuptype in table.sortedpairs(variants) do
i = i + 1
- context.mathfontlookupvariant(i,format("U+%05X",variant),variant,lookuptype)
+ context.mathfontlookupvariant(i,f_unicode(variant),variant,lookuptype)
end
context.stopmathfontlookupvariants()
end
diff --git a/Master/texmf-dist/tex/context/base/s-mat-11.mkiv b/Master/texmf-dist/tex/context/base/s-mat-11.mkiv
index 88bfad8ef2a..862872499dc 100644
--- a/Master/texmf-dist/tex/context/base/s-mat-11.mkiv
+++ b/Master/texmf-dist/tex/context/base/s-mat-11.mkiv
@@ -1,17 +1,5 @@
\usemodule[s][mat-10]
-% layout
-
-\setuplayout
- [width=middle,
- height=middle,
- topspace=15mm,
- backspace=15mm,
- bottomspace=15mm,
- header=1cm,
- headerdistance=0.5cm,
- footer=0pt]
-
% rendering
\def\startmathfontlist
@@ -91,6 +79,16 @@
\continueifinputfile{s-mat-11.mkiv}
+\setuplayout
+ [width=middle,
+ height=middle,
+ topspace=15mm,
+ backspace=15mm,
+ bottomspace=15mm,
+ header=1cm,
+ headerdistance=0.5cm,
+ footer=0pt]
+
\starttext
% \setupbodyfont[cambria, 12pt] \showmathfontcharacters
% \setupbodyfont[lmvirtual,12pt] \showmathfontcharacters
diff --git a/Master/texmf-dist/tex/context/base/s-mat-20.mkiv b/Master/texmf-dist/tex/context/base/s-mat-20.mkiv
new file mode 100644
index 00000000000..544612fec43
--- /dev/null
+++ b/Master/texmf-dist/tex/context/base/s-mat-20.mkiv
@@ -0,0 +1,161 @@
+%D \module
+%D [ file=s-mat-20.mkiv,
+%D version=2012.12.05,
+%D title=\CONTEXT\ Style File,
+%D subtitle=Math Tracing Macros,
+%D author=Hans Hagen,
+%D date=\currentdate,
+%D copyright={PRAGMA ADE \& \CONTEXT\ Development Team}]
+%C
+%C This module is part of the \CONTEXT\ macro||package and is
+%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
+%C details.
+
+\startluacode
+
+local tables = utilities.tables.definedtable("math","tracing","spacing","tables")
+
+tables.styleaxis = {
+ "ord", "op", "bin", "rel", "open", "close", "punct", "inner",
+}
+
+tables.parameters = {
+ "quad", "axis", "operatorsize",
+ "overbarkern", "overbarrule", "overbarvgap",
+ "underbarkern", "underbarrule", "underbarvgap",
+ "radicalkern", "radicalrule", "radicalvgap",
+ "radicaldegreebefore", "radicaldegreeafter", "radicaldegreeraise",
+ "stackvgap", "stacknumup", "stackdenomdown",
+ "fractionrule", "fractionnumvgap", "fractionnumup",
+ "fractiondenomvgap", "fractiondenomdown", "fractiondelsize",
+ "limitabovevgap", "limitabovebgap", "limitabovekern",
+ "limitbelowvgap", "limitbelowbgap", "limitbelowkern",
+ "underdelimitervgap", "underdelimiterbgap",
+ "overdelimitervgap", "overdelimiterbgap",
+ "subshiftdrop", "supshiftdrop", "subshiftdown",
+ "subsupshiftdown", "subtopmax", "supshiftup",
+ "supbottommin", "supsubbottommax", "subsupvgap",
+ "spaceafterscript", "connectoroverlapmin",
+}
+
+tables.styles = {
+ "display",
+ "text",
+ "script",
+ "scriptscript",
+}
+
+function tables.stripmu(str)
+ str = string.gsub(str,"mu","")
+ str = string.gsub(str," ","")
+ str = string.gsub(str,"plus","+")
+ str = string.gsub(str,"minus","-")
+ return str
+end
+
+function tables.strippt(old)
+ local new = string.gsub(old,"pt","")
+ if new ~= old then
+ new = string.format("%0.4f",tonumber(new))
+ end
+ return new
+end
+
+function tables.showspacing()
+
+ local styles = tables.styles
+ local styleaxis = tables.styleaxis
+
+ context.starttabulate { "|Tl|Tl|" .. string.rep("Tc|",(#styles*2)) }
+ context.HL()
+ context.NC()
+ context.NC()
+ context.NC()
+ for i=1,#styles do
+ context.bold(styles[i])
+ context.NC()
+ context.bold("(cramped)")
+ context.NC()
+ end
+ context.NR()
+ context.HL()
+ for i=1,#styleaxis do
+ -- print(key,tex.getmath(key,"text"))
+ local one = styleaxis[i]
+ for j=1,#styleaxis do
+ local two = styleaxis[j]
+ context.NC()
+ if j == 1 then
+ context.bold(one)
+ end
+ context.NC()
+ context.bold(two)
+ context.NC()
+ for i=1,#styles do
+ context("\\ctxlua{context(math.tracing.spacing.tables.stripmu('\\the\\Umath%s%sspacing\\%sstyle'))}",one,two,styles[i])
+ context.NC()
+ context("\\ctxlua{context(math.tracing.spacing.tables.stripmu('\\the\\Umath%s%sspacing\\cramped%sstyle'))}",one,two,styles[i])
+ context.NC()
+ end
+ context.NR()
+ end
+ end
+ context.stoptabulate()
+end
+
+function tables.showparameters()
+
+ local styles = tables.styles
+ local parameters = tables.parameters
+
+ context.starttabulate { "|l|" .. string.rep("Tc|",(#styles*2)) }
+ context.HL()
+ context.NC()
+ context.NC()
+ for i=1,#styles do
+ context.bold(styles[i])
+ context.NC()
+ context.bold("(cramped)")
+ context.NC()
+ end
+ context.NR()
+ context.HL()
+ for i=1,#parameters do
+ local parameter = parameters[i]
+ -- print(parameter,tex.getmath(parameter,"text"))
+ context.NC()
+ context.type(parameter)
+ context.NC()
+ for i=1,#styles do
+ context("\\ctxlua{context(math.tracing.spacing.tables.strippt('\\the\\Umath%s\\%sstyle'))}",parameter,styles[i])
+ context.NC()
+ context("\\ctxlua{context(math.tracing.spacing.tables.strippt('\\the\\Umath%s\\cramped%sstyle'))}",parameter,styles[i])
+ context.NC()
+ end
+ context.NR()
+ end
+ context.stoptabulate()
+
+end
+
+\stopluacode
+
+\continueifinputfile{s-mat-20.mkiv}
+
+\setuplayout
+ [width=middle,
+ height=middle,
+ backspace=1cm,
+ topspace=1cm,
+ footer=0pt,
+ header=0pt]
+
+\setupbodyfont
+ [dejavu,8pt]
+
+\starttext
+
+ \ctxlua{math.tracing.spacing.tables.showspacing()}
+ % \ctxlua{math.tracing.spacing.tables.showparameters()}
+
+\stoptext
diff --git a/Master/texmf-dist/tex/context/base/s-mod-00.mkiv b/Master/texmf-dist/tex/context/base/s-mod-00.mkiv
index 94f6edcb6f0..7af56dc2de9 100644
--- a/Master/texmf-dist/tex/context/base/s-mod-00.mkiv
+++ b/Master/texmf-dist/tex/context/base/s-mod-00.mkiv
@@ -13,6 +13,12 @@
\unprotect
-% gone
+\startmode[nocode]
+
+ % \definieerbuffer[definition] % ignore
+
+ \def\startdefinition#1\stopdefinition{}
+
+\stopmode
\protect \endinput
diff --git a/Master/texmf-dist/tex/context/base/s-mod-01.mkiv b/Master/texmf-dist/tex/context/base/s-mod-01.mkiv
index ae119a7eed8..6946bef691c 100644
--- a/Master/texmf-dist/tex/context/base/s-mod-01.mkiv
+++ b/Master/texmf-dist/tex/context/base/s-mod-01.mkiv
@@ -25,6 +25,9 @@
%D him when you run into problems. Bugs in this style can be sent to
%D Hans.
+\usemodule
+ [abr-00]
+
\setvariables
[document]
[ file=\jobname,
diff --git a/Master/texmf-dist/tex/context/base/s-mod-02.mkiv b/Master/texmf-dist/tex/context/base/s-mod-02.mkiv
index 032ace8a645..37e3d2f143e 100644
--- a/Master/texmf-dist/tex/context/base/s-mod-02.mkiv
+++ b/Master/texmf-dist/tex/context/base/s-mod-02.mkiv
@@ -13,6 +13,12 @@
\unprotect
-% gone
+\startmode[nocode]
+
+ % \definieerbuffer[definition] % ignore
+
+ \def\startdefinition#1\stopdefinition{}
+
+\stopmode
\protect \endinput
diff --git a/Master/texmf-dist/tex/context/base/s-mod.ctx b/Master/texmf-dist/tex/context/base/s-mod.ctx
index 5059c77d442..09ecf714cc4 100644
--- a/Master/texmf-dist/tex/context/base/s-mod.ctx
+++ b/Master/texmf-dist/tex/context/base/s-mod.ctx
@@ -11,7 +11,6 @@
</ctx:files>
</ctx:preprocess>
<ctx:flags>
- <!-- ctx:flag>purge</ctx:flag -->
<ctx:flag>global</ctx:flag>
<ctx:flag>prep</ctx:flag>
<ctx:flag>purge</ctx:flag>
diff --git a/Master/texmf-dist/tex/context/base/s-pre-05.tex b/Master/texmf-dist/tex/context/base/s-pre-05.tex
index 8081127879d..ccffd1f4d72 100644
--- a/Master/texmf-dist/tex/context/base/s-pre-05.tex
+++ b/Master/texmf-dist/tex/context/base/s-pre-05.tex
@@ -16,7 +16,7 @@
%D As all styles sofar, this one has the same structuring
%D commands.
-\startmode[asintended] \setupbodyfont[lbr] \stopmode
+\startmode[asintended] \setupbodyfont[ludicaot] \stopmode
\setupbodyfont[14.4pt]
@@ -115,7 +115,7 @@
\defineoverlay [TitleGraphic] [\useMPgraphic{TitleGraphic}]
\defineoverlay [NextPage] [\overlaybutton{forward}]
-\def\StartTitlePage%
+\unexpanded\def\StartTitlePage
{\setupbackgrounds[page][background={color,TitleGraphic,NextPage}]
\setupbackgrounds[text][text][background=]
\setupinteraction[menu=off]
@@ -128,7 +128,7 @@
\vfil
\let\\=\vfil}
-\def\StopTitlePage%
+\unexpanded\def\StopTitlePage
{\vfil\vfil\vfil
\stopstandardmakeup
\setuplayout[width=430pt,rightedge=110pt]
@@ -137,7 +137,7 @@
\setupbackgrounds[page][background=color]
\setupbackgrounds[text][text][background={HashFrameA,NextPage}]}
-\def\TitlePage#1%
+\unexpanded\def\TitlePage#1%
{\StartTitlePage#1\StopTitlePage}
%D \macros
@@ -146,8 +146,8 @@
%D Since the lists are in the menu, we don't honor list
%D placement macros.
-\def\Topics#1{}
-\def\Subjects{}
+\unexpanded\def\Topics#1{}
+\unexpanded\def\Subjects{}
%D \macros
%D {Topic, Nopic, Subject}
@@ -217,7 +217,7 @@
endfor ;
\stopuseMPgraphic
-\doifnotmode{demo}{\endinput}
+\continueifinputfile{s-pre-05.tex}
%D The (rather silly) demo section.
diff --git a/Master/texmf-dist/tex/context/base/s-pre-60.mkiv b/Master/texmf-dist/tex/context/base/s-pre-60.mkiv
index b39a0631169..70f8978b02c 100644
--- a/Master/texmf-dist/tex/context/base/s-pre-60.mkiv
+++ b/Master/texmf-dist/tex/context/base/s-pre-60.mkiv
@@ -13,6 +13,8 @@
% use lua instead of global mess
+\unprotect
+
\startmode[paper,print]
\let\StartSteps\relax
\let\StopSteps \relax
@@ -23,22 +25,31 @@
\endinput
\stopmode
-\newcounter\StepCounter
-\newcounter\StepMaximum
+\newcount\c_module_pre_steps_current
+\newcount\c_module_pre_steps_maximum
+\newcount\c_module_pre_steps_nesting_step
+\newcount\c_module_pre_steps_nesting_steps
+\newcount\c_module_pre_steps_nesting_busy
+
+\def\StepCounter {\the\c_module_pre_steps_current}
+\def\StepMaximum {\the\c_module_pre_steps_maximum}
+\def\StepLayer {step:\the\c_module_pre_steps_current}
+\def\NextStepLayer {step:\the\numexpr\c_module_pre_steps_current+\plusone\relax}
+\def\FirstStepLayer{step:1}
\useJSscripts[stp]
\startsetups[set-stepper]
- \ifnum\getvariable{stepper}{nofsteps}>\StepMaximum
+ \ifnum\getvariable{stepper}{nofsteps}>\c_module_pre_steps_maximum
- \dostepwiserecurse {\numexpr\StepMaximum+1\relax} {\getvariable{stepper}{nofsteps}} {1} {
+ \dostepwiserecurse {\numexpr\c_module_pre_steps_maximum+\plusone\relax} {\getvariable{stepper}{nofsteps}} {1} {
\doifnotmode{nosteps,nostep} {
\expanded{\defineviewerlayer[step:\recurselevel][state=stop,scope=global]}
}
}
- \xdef\StepMaximum{\getvariable{stepper}{nofsteps}}
+ \global\c_module_pre_steps_maximum\getvariable{stepper}{nofsteps}\relax
\fi
@@ -58,25 +69,100 @@
% todo: roll back blank
-\def\ResetStep {\doglobal\newcounter\StepCounter}
-\def\NextStep {\doglobal\increment \StepCounter}
-\def\PrevStep {\doglobal\decrement \StepCounter}
+\unexpanded\def\ResetStep
+ {\iftrialtypesetting\else
+ \global\c_module_pre_steps_current\zerocount
+ \fi}
+
+\unexpanded\def\NextStep
+ {\iftrialtypesetting\else
+ \global\advance\c_module_pre_steps_current\plusone
+ \fi}
+
+\unexpanded\def\PrevStep
+ {\iftrialtypesetting\else
+ \global\advance\c_module_pre_steps_current\minusone
+ \fi}
+
+\unexpanded\def\FlushStep
+ {\iftrialtypesetting\else
+ \StopStep
+ \NextStep
+ \StartStep
+ \fi}
+
+\unexpanded\def\StartStep
+ {\iftrialtypesetting\else
+ \global\advance\c_module_pre_steps_nesting_step\plusone
+ \ifcase\c_module_pre_steps_nesting_step\or
+ \startviewerlayer[\StepLayer]%
+ \fi
+ \fi
+ \ignorespaces}
+
+\unexpanded\def\StopStep
+ {\removeunwantedspaces
+ \iftrialtypesetting\else
+ \ifcase\c_module_pre_steps_nesting_step\or
+ \stopviewerlayer
+ \fi
+ \global\advance\c_module_pre_steps_nesting_step\minusone
+ \fi}
+
+\unexpanded\def\StartSteps
+ {\iftrialtypesetting\else
+ \global\advance\c_module_pre_steps_nesting_steps\plusone
+ \ifcase\c_module_pre_steps_nesting_steps\or
+ \ResetStep
+ \NextStep
+ \StartStep
+ \fi
+ \fi}
+
+\unexpanded\def\StopSteps
+ {\iftrialtypesetting\else
+ \ifcase\c_module_pre_steps_nesting_steps\or
+ \StopStep
+ \PrevStep
+ \fi
+ \global\advance\c_module_pre_steps_nesting_steps\minusone
+ \fi}
+
+\unexpanded\def\StartBusy
+ {\iftrialtypesetting\else
+ \global\advance\c_module_pre_steps_nesting_busy\plusone
+ \ifcase\c_module_pre_steps_nesting_busy\or
+ \startviewerlayer[step:busy]
+ \fi
+ \fi
+ \ignorespaces}
+
+\unexpanded\def\StopBusy
+ {\removeunwantedspaces
+ \iftrialtypesetting\else
+ \ifcase\c_module_pre_steps_nesting_busy\or
+ \stopviewerlayer
+ \fi
+ \global\advance\c_module_pre_steps_nesting_busy\minusone
+ \fi}
-\def\StepLayer {step:\StepCounter}
-\def\NextStepLayer {step:\the\numexpr\StepCounter+1\relax}
-\def\FirstStepLayer{step:1}
+%D Handy:
+
+\unexpanded\def\StartLocalSteps
+ {\ResetStep}
-\def\StartStep {\startviewerlayer[\StepLayer]\ignorespaces}
-\def\StopStep {\removeunwantedspaces\stopviewerlayer}
-\def\StartSteps {\iftrialtypesetting\else\ResetStep\NextStep\StartStep\fi}
-\def\StopSteps {\iftrialtypesetting\else\StopStep \PrevStep \fi}
-\def\FlushStep {\iftrialtypesetting\else\StopStep \NextStep\StartStep\fi}
+\unexpanded\def\StopLocalSteps
+ {}
-\def\StartBusy {\startviewerlayer[step:busy]\ignorespaces}
-\def\StopBusy {\removeunwantedspaces\stopviewerlayer}
+\unexpanded\def\StartLocalStep
+ {\NextStep
+ \StartStep}
+
+\unexpanded\def\StopLocalStep
+ {\StopStep}
\appendtoks
- \ResetStep
+ \ResetStep
\to \everyaftershipout
\setupinteraction
@@ -112,14 +198,6 @@
[symbol={attach-normal,attach-down},
textlayer=\StepLayer]
-%D Handy:
-
-\def\StartLocalSteps{\ResetStep}
-\def\StopLocalSteps {}
-
-\def\StartLocalStep {\NextStep\StartStep}
-\def\StopLocalStep {\StopStep}
-
%D used as (given some definitions):
%D
%D \starttyping
@@ -131,4 +209,4 @@
%D \StopLocalSteps
%D \stoptyping
-\endinput
+\protect \endinput
diff --git a/Master/texmf-dist/tex/context/base/s-pre-61.tex b/Master/texmf-dist/tex/context/base/s-pre-61.tex
index 21e461320a4..48b9d09a02a 100644
--- a/Master/texmf-dist/tex/context/base/s-pre-61.tex
+++ b/Master/texmf-dist/tex/context/base/s-pre-61.tex
@@ -16,7 +16,7 @@
\usemodule[pre-60]
\doifmodeelse {mkiv} {
- \usetypescriptfile[type-hgz]
+ \usetypescriptfile[ghz]
\definetypeface[mainface][ss][sans][optima-nova][default]
} {
\usetypescriptfile[type-ghz]
@@ -156,6 +156,7 @@
\NormalizeFontHeight \SubTitleFont {\setstrut\strut\quad} {1.5\headerheight} {SansBold}
\NormalizeFontHeight \SubSubTitleFont {\setstrut\strut\quad} {1.0\headerheight} {SansBold}
\to \everystarttext
+
\appendtoks
\NormalizeFontHeight \HeadFont {\setstrut\strut\quad} {1.0\headerheight} {SansBold}
\NormalizeFontHeight \TitleFont {\setstrut\strut\quad} {2.0\headerheight} {SansBold}
diff --git a/Master/texmf-dist/tex/context/base/s-pre-62.tex b/Master/texmf-dist/tex/context/base/s-pre-62.tex
index cc9762e12c8..a1a405c236b 100644
--- a/Master/texmf-dist/tex/context/base/s-pre-62.tex
+++ b/Master/texmf-dist/tex/context/base/s-pre-62.tex
@@ -220,5 +220,5 @@
\long\def\StartTitlePage #1\StopTitlePage {\TitlePage {#1}}
\long\def\StartSubTitlePage#1\StopSubTitlePage{\SubTitlePage{#1}}
-\doifnotmode{demo} {\endinput}
+\endinput
diff --git a/Master/texmf-dist/tex/context/base/s-pre-63.tex b/Master/texmf-dist/tex/context/base/s-pre-63.tex
index 0535fc1d06c..974c67a5f90 100644
--- a/Master/texmf-dist/tex/context/base/s-pre-63.tex
+++ b/Master/texmf-dist/tex/context/base/s-pre-63.tex
@@ -67,6 +67,7 @@ this module is under reconstruction
\setupbodyfont[8pt] \definefont[BigFont][Normal sa 4]
\starttext
+
\StartPage
\StartPair \StartLeft ZAPF \StopLeft \StartRight \input zapf \StopRight \StopPair
\StartPair \StartLeft DAVIS \StopLeft \StartRight \input davis \StopRight \StopPair
diff --git a/Master/texmf-dist/tex/context/base/s-pre-67.tex b/Master/texmf-dist/tex/context/base/s-pre-67.tex
index 88043aa24a2..84ea1a66c93 100644
--- a/Master/texmf-dist/tex/context/base/s-pre-67.tex
+++ b/Master/texmf-dist/tex/context/base/s-pre-67.tex
@@ -119,7 +119,7 @@
\setupwhitespace
[big]
-\def\MyCommand#1#2%
+\unexpanded\def\MyCommand#1#2%
{\setlayer[title][preset=rightbottom,x=.75cm,y=.5cm]{#2}}
\def\titlepage#1#2%
diff --git a/Master/texmf-dist/tex/context/base/s-pre-69.mkiv b/Master/texmf-dist/tex/context/base/s-pre-69.mkiv
index c87bcd53745..cc6af9ad38f 100644
--- a/Master/texmf-dist/tex/context/base/s-pre-69.mkiv
+++ b/Master/texmf-dist/tex/context/base/s-pre-69.mkiv
@@ -311,7 +311,7 @@
\setelementexporttag[paragraphs][nature][display]
\setelementexporttag[paragraph] [nature][mixed]
-\doifnotmode{demo}{\endinput}
+\continueifinputfile{s-pre-69.mkiv}
% finetuning: \StartText{\TEX\ and Reality\vskip2exClashing Mindsets?\vskip1ex}{Bacho\TEX, May 1, 2010}
diff --git a/Master/texmf-dist/tex/context/base/scrn-bar.mkvi b/Master/texmf-dist/tex/context/base/scrn-bar.mkvi
index dabc8f77dc6..1dadc26f38f 100644
--- a/Master/texmf-dist/tex/context/base/scrn-bar.mkvi
+++ b/Master/texmf-dist/tex/context/base/scrn-bar.mkvi
@@ -280,7 +280,7 @@
\unskip}
\def\scrn_bar_alternative_d
- {\ifnum\nofsubpages>\plusone \doif{\counterparameter\s!subpage\c!state}\v!start{%
+ {\ifnum\nofsubpages>\plusone \doif{\namedcounterparameter\s!subpage\c!state}\v!start{%
\d_scrn_bar_width \interactionbarparameter\c!width
\d_scrn_bar_distance\interactionbarparameter\c!distance
\d_scrn_bar_height \interactionbarparameter\c!height
@@ -290,7 +290,7 @@
}\fi}
\def\scrn_bar_alternative_e
- {\ifnum\nofsubpages>\plusone \doif{\counterparameter\s!subpage\c!state}\v!start{%
+ {\ifnum\nofsubpages>\plusone \doif{\namedcounterparameter\s!subpage\c!state}\v!start{%
\d_scrn_bar_width \interactionbarparameter\c!width
\d_scrn_bar_distance\interactionbarparameter\c!distance
\d_scrn_bar_height \interactionbarparameter\c!height
@@ -305,7 +305,7 @@
}\fi}
\def\scrn_bar_alternative_f
- {\ifnum\nofsubpages>\plusone \doif{\counterparameter\s!subpage\c!state}\v!start{%
+ {\ifnum\nofsubpages>\plusone \doif{\namedcounterparameter\s!subpage\c!state}\v!start{%
\d_scrn_bar_width \interactionbarparameter\c!width
\d_scrn_bar_distance\interactionbarparameter\c!distance
\d_scrn_bar_height \interactionbarparameter\c!height
diff --git a/Master/texmf-dist/tex/context/base/scrn-but.lua b/Master/texmf-dist/tex/context/base/scrn-but.lua
index b5b0a8ae42a..e49372ce9fd 100644
--- a/Master/texmf-dist/tex/context/base/scrn-but.lua
+++ b/Master/texmf-dist/tex/context/base/scrn-but.lua
@@ -6,7 +6,7 @@ if not modules then modules = { } end modules ['scrn-but'] = {
license = "see context related readme files"
}
-local format = string.format
+local f_two_colon = string.formatters["%s:%s"]
function commands.registerbuttons(tag,register,language)
local data = sorters.definitions[language]
@@ -14,6 +14,6 @@ function commands.registerbuttons(tag,register,language)
local tag = tag == "" and { "" } or { tag }
for i=1,#orders do
local order = orders[i]
- context.menubutton(tag,format("%s:%s",register,order),order)
+ context.menubutton(tag,f_two_colon(register,order),order)
end
end
diff --git a/Master/texmf-dist/tex/context/base/scrn-but.mkvi b/Master/texmf-dist/tex/context/base/scrn-but.mkvi
index b20d34c09ac..fd2da9e0841 100644
--- a/Master/texmf-dist/tex/context/base/scrn-but.mkvi
+++ b/Master/texmf-dist/tex/context/base/scrn-but.mkvi
@@ -934,32 +934,16 @@
%D Lists:
-\ifdefined\definelistalternative
-
- \definelistalternative [\v!left ] [\c!renderingsetup=strc:lists:rendering:menu]
- \definelistalternative [\v!right ] [\c!renderingsetup=strc:lists:rendering:menu]
- \definelistalternative [\v!top ] [\c!renderingsetup=strc:lists:rendering:menu]
- \definelistalternative [\v!bottom] [\c!renderingsetup=strc:lists:rendering:menu]
-
- \startsetups[strc:lists:rendering:menu]
- \startbut[internal(\currentlistentryinternal)]
- \limitatetext\currentlistentrytext{\currentlistparameter\c!maxwidth}\unknown
- \stopbut
- \stopsetups
-
-\else % will go
-
- \setvalue{\??listelement\v!left }{\def\dosomelistelement{\scrn_menu_list_element\v!left }}
- \setvalue{\??listelement\v!right }{\def\dosomelistelement{\scrn_menu_list_element\v!right }}
- \setvalue{\??listelement\v!top }{\def\dosomelistelement{\scrn_menu_list_element\v!top }}
- \setvalue{\??listelement\v!bottom}{\def\dosomelistelement{\scrn_menu_list_element\v!bottom}}
-
- \def\scrn_menu_list_element#1#2#3#4#5#6#7%
- {\startbut[internal(#3)]
- \limitatetext{#5}{\namedlistparameter{#2}\c!maxwidth}\unknown%
- \stopbut}
-
-\fi
+\definelistalternative [\v!left ] [\c!renderingsetup=strc:lists:rendering:menu]
+\definelistalternative [\v!right ] [\c!renderingsetup=strc:lists:rendering:menu]
+\definelistalternative [\v!top ] [\c!renderingsetup=strc:lists:rendering:menu]
+\definelistalternative [\v!bottom] [\c!renderingsetup=strc:lists:rendering:menu]
+
+\startsetups[strc:lists:rendering:menu]
+ \startbut[internal(\currentlistentrylocation)]
+ \limitatetext\currentlistentrytitle{\listparameter\c!maxwidth}\unknown
+ \stopbut
+\stopsetups
%D Sometimes handy:
diff --git a/Master/texmf-dist/tex/context/base/scrn-fld.lua b/Master/texmf-dist/tex/context/base/scrn-fld.lua
index 275fb4a9b39..9836cbebea3 100644
--- a/Master/texmf-dist/tex/context/base/scrn-fld.lua
+++ b/Master/texmf-dist/tex/context/base/scrn-fld.lua
@@ -73,13 +73,13 @@ function commands.setformsmethod(method)
end
function commands.doiffieldcategoryelse(name)
- commands.testcase(codeinjections.validfieldcategory(name))
+ commands.doifelse(codeinjections.validfieldcategory(name))
end
function commands.doiffieldsetelse(tag)
- commands.testcase(codeinjections.validfieldset(name))
+ commands.doifelse(codeinjections.validfieldset(name))
end
function commands.doiffieldelse(name)
- commands.testcase(codeinjections.validfield(name))
+ commands.doifelse(codeinjections.validfield(name))
end
diff --git a/Master/texmf-dist/tex/context/base/scrn-fld.mkvi b/Master/texmf-dist/tex/context/base/scrn-fld.mkvi
index 87209909961..cc84b9c71f8 100644
--- a/Master/texmf-dist/tex/context/base/scrn-fld.mkvi
+++ b/Master/texmf-dist/tex/context/base/scrn-fld.mkvi
@@ -231,7 +231,6 @@
depth = \number\dimexpr\fieldbodyparameter\c!depth \relax,
align = "\fieldbodyparameter\c!align",
length = "\fieldbodyparameter\c!n",
- style = "\fieldbodyparameter\c!style",
fontstyle = "\fontstyle",
fontalternative = "\fontalternative",
fontsize = "\fontbody",
@@ -241,7 +240,6 @@
\ifx\currentfieldbackgroundcolor\empty \else
backgroundcolor = "\currentfieldbackgroundcolor",
backgroundcolorvalue = "\currentfieldbackgroundcolorvalue",
-
\fi
\ifx\currentfieldframecolor\empty \else
framecolor = "\currentfieldframecolor",
@@ -403,7 +401,7 @@
\setupfieldcontentframed
[\c!align=\v!flushleft,
\c!strut=\v!no,
- \s!parent=\??ol] % needs checking
+ \s!parent=\??regularframed] % needs checking
\setupfieldcontentframed % independent
[\c!alternative=\v!normal,
@@ -419,13 +417,13 @@
[\c!style=,
\c!color=,
\c!align=\v!flushleft,
- \s!parent=\??ol] % needs checking
+ \s!parent=\??regularframed] % needs checking
\setupfieldtotalframed
[%\c!alternative={\v!label,\v!frame,\v!horizontal},
\c!strut=\v!no,
\c!align=,
- \s!parent=\??ol] % needs checking
+ \s!parent=\??regularframed] % needs checking
% \setupcurrent
@@ -616,9 +614,9 @@
\v!horizontal=>\settrue\fieldishorizontal,
\v!vertical=>\settrue\fieldisvertical]%
\ifconditional\fieldisvertical
- \setupfieldtotalframed[\c!distance=\zeropoint,\c!inbetween=\vskip\@@localoffset,\c!align=\v!right,\c!width=20em]%
+ \setupfieldtotalframed[\c!distance=\zeropoint,\c!inbetween=\vskip\d_framed_local_offset,\c!align=\v!right,\c!width=20em]%
\else\ifconditional\fieldishorizontal
- \setupfieldtotalframed[\c!distance=\@@localoffset,\c!inbetween=,\c!align=\c!left,\c!height=10ex]%
+ \setupfieldtotalframed[\c!distance=\d_framed_local_offset,\c!inbetween=,\c!align=\c!left,\c!height=10ex]%
\else
\setupfieldtotalframed[\c!distance=\zeropoint,\c!inbetween=,\c!align=\c!left]%
\fi\fi
diff --git a/Master/texmf-dist/tex/context/base/scrn-hlp.lua b/Master/texmf-dist/tex/context/base/scrn-hlp.lua
index 81d68840b15..5f8368c6dc3 100644
--- a/Master/texmf-dist/tex/context/base/scrn-hlp.lua
+++ b/Master/texmf-dist/tex/context/base/scrn-hlp.lua
@@ -13,7 +13,6 @@ interactions.help = help
local a_help = attributes.private("help")
-local has_attribute = node.has_attribute
local copy_nodelist = node.copy_list
local hpack_nodelist = node.hpack
@@ -62,7 +61,7 @@ local function collect(head,used)
while head do
local id = head.id
if id == hlist_code then
- local a = has_attribute(head,a_help)
+ local a = head[a_help]
if a then
if not used then
used = { a }
diff --git a/Master/texmf-dist/tex/context/base/scrn-ini.lua b/Master/texmf-dist/tex/context/base/scrn-ini.lua
index 2836362dfd4..4831408f983 100644
--- a/Master/texmf-dist/tex/context/base/scrn-ini.lua
+++ b/Master/texmf-dist/tex/context/base/scrn-ini.lua
@@ -1,4 +1,4 @@
-if not modules then modules = { } end modules ['scrn-int'] = {
+if not modules then modules = { } end modules ['scrn-ini'] = {
version = 1.001,
comment = "companion to scrn-int.mkiv",
author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
@@ -6,6 +6,8 @@ if not modules then modules = { } end modules ['scrn-int'] = {
license = "see context related readme files"
}
+local next = next
+
interactions = { }
interactions.general = interactions.general or { }
local general = interactions.general
diff --git a/Master/texmf-dist/tex/context/base/scrn-ini.mkvi b/Master/texmf-dist/tex/context/base/scrn-ini.mkvi
index daf0a1b911c..88b52868667 100644
--- a/Master/texmf-dist/tex/context/base/scrn-ini.mkvi
+++ b/Master/texmf-dist/tex/context/base/scrn-ini.mkvi
@@ -32,14 +32,19 @@
\installswitchcommandhandler \??interaction {interaction} \??interaction
\appendtoks
- \ifx\currentinteraction\previousinteraction
- \doifelse{\interactionparameter\c!state}\v!start
- {\locationtrue \setsystemmode \v!interaction}%
- {\locationfalse \resetsystemmode\v!interaction}%
- \fi
+ \ifx\previousinteraction\empty
+ \checkinteractionstate
+ \else\ifx\currentinteraction\previousinteraction
+ \checkinteractionstate
+ \fi\fi
\to \everysetupinteraction
-\def\doifelselocation
+\unexpanded\def\checkinteractionstate
+ {\doifelse{\interactionparameter\c!state}\v!start
+ {\locationtrue \setsystemmode \v!interaction}%
+ {\locationfalse \resetsystemmode\v!interaction}}
+
+\def\doifelselocation % expandable
{\iflocation
\expandafter\firstoftwoarguments
\else
@@ -144,16 +149,16 @@
\useinteractionstyleandcolor\c!style\c!color
\fi}
-\unexpanded\def\setlocationcolorspec#1#2% \resolver
+\unexpanded\def\setlocationcolorspecspecified#1#2% \resolver
{\ifnum#1=\plusone
\edef\askedcontrastcolor{#2\c!contrastcolor}%
\ifx\askedcontrastcolor\empty
- \doactivatecolor{#2\c!color}%
+ \colo_helpers_activate{#2\c!color}%
\else
- \doactivatecolor\askedcontrastcolor
+ \colo_helpers_activate\askedcontrastcolor
\fi
\else
- \doactivatecolor{#2\c!color}%
+ \colo_helpers_activate{#2\c!color}%
\fi}
\unexpanded\def\setlocationcolor {\setlocationcolorspecified \referencepagestate}
@@ -184,10 +189,12 @@
\scrn_identity_synchronize
\to \everysetupinteraction
+% this comes before starttext
+
\appendtoks % not interfaced i.e. english
- \doifvariable{document}{metadata:author}{\setupinteraction[author=\documentvariable{metadata:author}]}%
- \doifvariable{document}{metadata:title}{\setupinteraction[title=\documentvariable{metadata:title}]}%
- \doifvariable{document}{metadata:subject}{\setupinteraction[subject=\documentvariable{metadata:subject}]}%
+ \doifvariable{document}{metadata:author} {\setupinteraction [\c!author=\documentvariable{metadata:author}]}%
+ \doifvariable{document}{metadata:title} {\setupinteraction [\c!title=\documentvariable{metadata:title}]}%
+ \doifvariable{document}{metadata:subject}{\setupinteraction[\c!subject=\documentvariable{metadata:subject}]}%
\to \everysetupdocument
\setupinteraction
diff --git a/Master/texmf-dist/tex/context/base/scrn-pag.mkvi b/Master/texmf-dist/tex/context/base/scrn-pag.mkvi
index 66f197ffdc2..5bbdadda885 100644
--- a/Master/texmf-dist/tex/context/base/scrn-pag.mkvi
+++ b/Master/texmf-dist/tex/context/base/scrn-pag.mkvi
@@ -150,12 +150,6 @@
{\global\setfalse\c_scrn_canvas_tight_page}%
\to \everysetupinteractionscreen
-\appendtoks
- \doifelse\@@pcstate\v!start
- \scrn_canvas_synchronize_simple
- \scrn_canvas_synchronize_complex
-\to \everyshipout
-
\setupinteractionscreen
[\c!width=\printpaperwidth,
\c!height=\printpaperheight,
diff --git a/Master/texmf-dist/tex/context/base/scrn-ref.lua b/Master/texmf-dist/tex/context/base/scrn-ref.lua
index 0b1dff2c12e..df71b6a97fe 100644
--- a/Master/texmf-dist/tex/context/base/scrn-ref.lua
+++ b/Master/texmf-dist/tex/context/base/scrn-ref.lua
@@ -1,4 +1,4 @@
-if not modules then modules = { } end modules ['scrn-int'] = {
+if not modules then modules = { } end modules ['scrn-ref'] = {
version = 1.001,
comment = "companion to scrn-int.mkiv",
author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
diff --git a/Master/texmf-dist/tex/context/base/scrn-wid.lua b/Master/texmf-dist/tex/context/base/scrn-wid.lua
index 6fdc4fca0d6..4ad46761ee8 100644
--- a/Master/texmf-dist/tex/context/base/scrn-wid.lua
+++ b/Master/texmf-dist/tex/context/base/scrn-wid.lua
@@ -75,7 +75,7 @@ function attachments.register(specification) -- beware of tag/registered mixup(t
checkbuffer(specification)
attachments[registered] = specification
if trace_attachments then
- report_attachments("registering '%s'",registered)
+ report_attachments("registering %a",registered)
end
return specification
end
@@ -85,7 +85,7 @@ function attachments.insert(specification)
local r = attachments[registered]
if r then
if trace_attachments then
- report_attachments("including registered '%s'",registered)
+ report_attachments("including registered %a",registered)
end
for k, v in next, r do
local s = specification[k]
@@ -94,7 +94,7 @@ function attachments.insert(specification)
end
end
elseif trace_attachments then
- report_attachments("including unregistered '%s'",registered)
+ report_attachments("including unregistered %a",registered)
end
checkbuffer(specification)
return nodeinjections.attachfile(specification)
@@ -167,11 +167,19 @@ function renderings.rendering(label)
end
end
-function renderings.var(label,key)
+local function var(label,key)
local rn = renderings[label]
- context(rn and rn[key] or "")
+ return rn and rn[key] or ""
end
+renderings.var = var
+
+function commands.renderingvar(label,key)
+ context(var(label,key))
+end
+
+commands.registerrendering = renderings.register
+
-- Rendering:
function commands.insertrenderingwindow(specification)
diff --git a/Master/texmf-dist/tex/context/base/scrn-wid.mkvi b/Master/texmf-dist/tex/context/base/scrn-wid.mkvi
index 2b75f83cd75..ae5f7c5567b 100644
--- a/Master/texmf-dist/tex/context/base/scrn-wid.mkvi
+++ b/Master/texmf-dist/tex/context/base/scrn-wid.mkvi
@@ -66,6 +66,7 @@
\installcorenamespace{attachment}
\installcorenamespace{attachmentlocation}
+\installcorenamespace{attachmentmethod}
\installcommandhandler \??attachment {attachment} \??attachment
@@ -171,9 +172,9 @@
\def\scrn_attachment_inject[#registered][#settings]%
{\edef\currentattachmentregistered{#registered}%
\setupattachment[\currentattachment][#settings]%
- \expandcheckedcsname{scrn_attachment_method_}{\attachmentparameter\c!method}\v!normal}
+ \expandnamespaceparameter\??attachmentmethod\attachmentparameter\c!method\v!normal}
-\setvalue{scrn_attachment_method_\v!normal}%
+\setvalue{\??attachmentmethod\v!normal}%
{\edef\currentattachmentsymbol{\attachmentparameter\c!symbol}%
\edef\currentattachmentwidth {\attachmentparameter\c!width }%
\edef\currentattachmentheight{\attachmentparameter\c!height}%
@@ -216,7 +217,7 @@
\dp\b_scrn_attachment_link\currentattachmentdepth
\box\b_scrn_attachment_link}
-\setvalue{scrn_attachment_method_\v!hidden}%
+\setvalue{\??attachmentmethod\v!hidden}%
{\ctxcommand{insertattachment{
tag = "\currentattachment",
registered = "\currentattachmentregistered",
@@ -235,6 +236,7 @@
\setvalue{\??attachmentlocation\v!rightmargin}{\inrightmargin}
\setvalue{\??attachmentlocation\v!high }{\high}
\setvalue{\??attachmentlocation\v!none }{\scrn_attachment_collect}
+%setvalue{\??attachmentlocation\v!text }{\gobblenextargument} % gobbles the box
\def\scrn_attachment_collect#content%
{\global\setbox\b_scrn_attachment_collect\hbox\bgroup
@@ -421,7 +423,7 @@
\doifassignmentelse{#title}
{\setupcurrentcomment[#title]}
{\setupcurrentcomment[\c!title=#title,#settings]}%
- \def\scrn_comment_stop{\scrn_comment_inject\egroup}%
+ \unexpanded\def\scrn_comment_stop{\scrn_comment_inject\egroup}%
\grabbufferdatadirect\v!comment{\e!start\currentcomment}{\e!stop\currentcomment}}
\def\scrn_comment_start_ignore
@@ -432,8 +434,8 @@
\installcorenamespace{commentmethods}
-\def\scrn_comment_inject
- {\expandcheckedcsname\??commentmethods{\commentparameter\c!method}\v!normal}
+\unexpanded\def\scrn_comment_inject
+ {\expandnamespaceparameter\??commentmethods\commentparameter\c!method\v!normal}
%D Beware: comments symbols don't scale in acrobat (cf. spec but somewhat
%D weird, esp because for instance attachment symbols do scale).
@@ -476,35 +478,34 @@
\wd\b_scrn_comment_link\currentcommentwidth
\ht\b_scrn_comment_link\currentcommentheight
\dp\b_scrn_comment_link\currentcommentdepth
- \scrn_comment_place}
+ \expandnamespaceparameter\??commentlocation\commentparameter\c!location\s!unknown}
\setvalue{\??commentmethods\v!hidden}%
{}
% todo: dedicated margin classes
-\def\scrn_comment_place
- {\executeifdefined
- {\??commentlocation\commentparameter\c!location}\hbox
- {\hbox{\box\b_scrn_comment_link}}}
-
-\setvalue{\??commentlocation\v!inmargin }{\inmargin }
-\setvalue{\??commentlocation\v!leftedge }{\inleftedge }
-\setvalue{\??commentlocation\v!rightedge }{\inrightedge }
-\setvalue{\??commentlocation\v!leftmargin }{\inleftmargin }
-\setvalue{\??commentlocation\v!rightmargin}{\inrightmargin}
-\setvalue{\??commentlocation\v!high }{\high}
-\setvalue{\??commentlocation\v!none }{\scrn_comment_collect}
-
-\def\scrn_comment_collect#content%
+\setvalue{\??commentlocation\v!inmargin }{\inmargin {\box\b_scrn_comment_link}}
+\setvalue{\??commentlocation\v!leftedge }{\inleftedge {\box\b_scrn_comment_link}}
+\setvalue{\??commentlocation\v!rightedge }{\inrightedge {\box\b_scrn_comment_link}}
+\setvalue{\??commentlocation\v!leftmargin }{\inleftmargin {\box\b_scrn_comment_link}}
+\setvalue{\??commentlocation\v!rightmargin}{\inrightmargin{\box\b_scrn_comment_link}}
+\setvalue{\??commentlocation\v!high }{\high {\box\b_scrn_comment_link}}
+
+\setvalue{\??commentlocation\v!none}%
{\global\setbox\b_scrn_comment_collect\hbox\bgroup
\ifvoid\b_scrn_comment_collect\else
\box\b_scrn_comment_collect
\hskip\commentparameter\c!distance
\fi
- #content%
+ \box\b_scrn_comment_link
\egroup}
+\setvalue{\??commentlocation\s!unknown}%
+ {\ifvoid\b_scrn_comment_collect\else
+ \box\b_scrn_comment_collect
+ \fi}
+
\unexpanded\def\placecomments
{\ifvoid\b_scrn_comment_collect\else
\box\b_scrn_comment_collect
@@ -529,6 +530,13 @@
%D Todo: like external figures, also search on path,
%D although, they need to be present ar viewing time, so ...
+\installcorenamespace{externalsoundtracks}
+
+\installsetuponlycommandhandler \??externalsoundtracks {externalsoundtracks}
+
+\setupexternalsoundtracks
+ [\c!option=]
+
\unexpanded\def\useexternalsoundtrack
{\dodoubleargument\scrn_soundtrack_indeed}
@@ -541,17 +549,11 @@
\def\checksoundtrack#tag% yet untested in mkiv (also move management to lua)
{\iflocation
\ctxcommand{insertsoundclip{
- tag = "#tag",
- repeat = "\@@sdoption", % not entirely ok but works
+ tag = "#tag",
+ ["repeat"] = "\directexternalsoundtrackparameter\c!option", % todo: pass option as-is
}}%
\fi}
-\unexpanded\def\setupexternalsoundtracks
- {\dodoubleargument\getparameters[\??sd]}
-
-\setupexternalsoundtracks
- [\c!option=]
-
%D Renderings (not yet tested in mkvi):
% Todo: multiple instances and inheritance .. will be done when
@@ -568,7 +570,7 @@
\def\setinternalrendering{\dodoubleempty \scrn_rendering_set}
\def\scrn_rendering_use[#tag][#mime][#file][#option]%
- {\ctxlua{interactions.renderings.register {
+ {\ctxcommand{registerrendering{
type = "external",
label = "#tag",
mime = "#mime",
@@ -579,7 +581,7 @@
\def\scrn_rendering_set[#tag][#option]% {content}
{\bgroup
\dowithnextbox
- {\ctxlua{interactions.renderings.register {
+ {\ctxcommand{registerrendering{
type = "internal",
label = "#tag",
mime = "IRO", % brrr
@@ -591,28 +593,24 @@
\egroup}%
\hbox}
-\def\renderingtype #tag{\ctxlua{interactions.renderings.var("#tag","type")}}
-\def\renderingoption#tag{\ctxlua{interactions.renderings.var("#tag","option")}}
+\def\renderingtype #tag{\ctxcommand{renderingvar("#tag","type")}}
+\def\renderingoption#tag{\ctxcommand{renderingvar("#tag","option")}}
-\def\renderingwidth {8cm} % will become private
-\def\renderingheight{6cm} % will become private
+\newdimen\d_scrn_rendering_width \d_scrn_rendering_width 8cm
+\newdimen\d_scrn_rendering_height \d_scrn_rendering_height 6cm
-\unexpanded\def\definerenderingwindow
- {\dodoubleempty\scrn_rendering_define_window}
+\let\m_scrn_rendering_page\!!zerocount
-\def\scrn_rendering_define_window[#tag][#settings]%
- {\presetlocalframed[\??rw#tag]%
- \getparameters
- [\??rw#tag]%
- [\c!openpageaction=,\c!closepageaction=,%
- \c!width=\renderingwidth,\c!height=\renderingheight,%
- #settings]}
+\installcorenamespace{renderingwindow}
-\unexpanded\def\setuprenderingwindow
- {\dodoubleargument\scrn_rendering_setup_window}
+\installframedcommandhandler \??renderingwindow {renderingwindow} \??renderingwindow
-\def\scrn_rendering_setup_window[#tag]%
- {\getparameters[\??rw#tag]}
+\appendtoks
+ \letrenderingwindowparameter\c!openpageaction \empty
+ \letrenderingwindowparameter\c!closepageaction\empty
+ \setrenderingwindowparameter\c!width {\d_scrn_rendering_width }%
+ \setrenderingwindowparameter\c!height {\d_scrn_rendering_height}%
+\to \everypresetrenderingwindow
\unexpanded\def\placerenderingwindow
{\dodoubleempty\scrn_rendering_place_window}
@@ -622,31 +620,32 @@
\edef\currentrendering{\ifsecondargument#rendering\else#window\fi}%
\doifelse{\renderingtype\currentrendering}{internal} % an object
{\getobjectdimensions{IRO}\currentrendering
- \edef\renderingheight{\the\dimexpr\objectheight+\objectdepth\relax}%
- \edef\renderingwidth{\objectwidth}%
- \dogetobjectreferencepage{IRO}\currentrendering\renderingpage}%
- {\def\renderingheight{\vsize}%
- \def\renderingwidth{\hsize}%
- \def\renderingpage{\realpageno}}%
+ \d_scrn_rendering_height\dimexpr\objectheight+\objectdepth\relax
+ \d_scrn_rendering_width\objectwidth\relax
+ \dogetobjectreferencepage{IRO}\currentrendering\m_scrn_rendering_page}%
+ {\d_scrn_rendering_height\vsize
+ \d_scrn_rendering_width\hsize
+ \let\m_scrn_rendering_page\realpageno}%
% create fall back if needed
- \ifcsname\??rw#window\c!width\endcsname
- \def\currentrenderingwindow{#window}%
- \else
+ \edef\currentrenderingwindow{\namedrenderingwindowparameter{#window}\c!width}% stupid test, we need a proper one here
+ \ifx\currentrenderingwindow\empty
\let\currentrenderingwindow\s!default
- \definerenderingwindow[\currentrenderingwindow]%
+ \definerenderingwindow[\currentrenderingwindow]% why not global
+ \else
+ \edef\currentrenderingwindow{#window}%
\fi
-% todo
-% \handlereferenceactions{\getvalue{\??rw\currentrenderingwindow\c!openpageaction }}\dosetuprenderingopenpageaction
-% \handlereferenceactions{\getvalue{\??rw\currentrenderingwindow\c!closepageaction}}\dosetuprenderingclosepageaction
- \localframed
- [\??rw\currentrenderingwindow][\c!offset=\v!overlay]%
+ % todo:
+ % \handlereferenceactions{\renderingwindowparameter\c!openpageaction }\dosetuprenderingopenpageaction
+ % \handlereferenceactions{\renderingwindowparameter\c!closepageaction}\dosetuprenderingclosepageaction
+ \letrenderingwindowparameter\c!offset\v!overlay
+ \inheritedrenderingwindowframed
{\vfill
\ctxcommand{insertrenderingwindow {
label = "\currentrendering",
- width = \number\dimexpr\renderingwidth\relax,
- height = \number\dimexpr\renderingheight\relax,
+ width = \number\d_scrn_rendering_width,
+ height = \number\d_scrn_rendering_height,
option = "\renderingoption\currentrendering",
- page = \number\renderingpage,
+ page = \number\m_scrn_rendering_page,
}}\hfill}%
\egroup}
@@ -662,6 +661,8 @@
% %D \dorecurse{10}{\linkedlistelement[demo]{link \recurselevel} \page}
% %D \stoptyping
%
+% \definesystemvariable {lk} % LinK
+%
% \installcommandhandler\??lk{linkedlist}\??lk
%
% \let\setupbutton\setuplinkedlists\setuplinkedlist
diff --git a/Master/texmf-dist/tex/context/base/scrp-cjk.lua b/Master/texmf-dist/tex/context/base/scrp-cjk.lua
index 3463a8eb51a..5885c98671f 100644
--- a/Master/texmf-dist/tex/context/base/scrp-cjk.lua
+++ b/Master/texmf-dist/tex/context/base/scrp-cjk.lua
@@ -6,9 +6,11 @@ if not modules then modules = { } end modules ['scrp-cjk'] = {
license = "see context related readme files"
}
+-- We can speed this up by preallocating nodes and copying them but the
+-- gain is not that large.
+
local utfchar = utf.char
-local has_attribute = node.has_attribute
local insert_node_after = node.insert_after
local insert_node_before = node.insert_before
local remove_node = nodes.remove
@@ -80,14 +82,14 @@ local function trace_detail(current,what)
local c_ch = current.char
if p_id == glyph_code then
local p_ch = p_id and prev.char
- report_details("[U+%05X %s %s] [%s] [U+%05X %s %s]",p_ch,utfchar(p_ch),hash[p_ch] or "unknown",what,c_ch,utfchar(c_ch),hash[c_ch] or "unknown")
+ report_details("[%C %a] [%s] [%C %a]",p_ch,hash[p_ch],what,c_ch,hash[c_ch])
else
- report_details("[%s] [U+%05X %s %s]",what,c_ch,utfchar(c_ch),hash[c_ch] or "unknown")
+ report_details("[%s] [%C %a]",what,c_ch,hash[c_ch])
end
else
if p_id == glyph_code then
local p_ch = p_id and prev.char
- report_details("[U+%05X %s %s] [%s]",p_ch,utfchar(p_ch),hash[p_ch] or "unknown",what)
+ report_details("[%C %a] [%s]",p_ch,hash[p_ch],what)
else
report_details("[%s]",what)
end
@@ -408,7 +410,7 @@ local function process(head,first,last)
while true do
local upcoming, id = first.next, first.id
if id == glyph_code then
- local a = has_attribute(first,a_prestat)
+ local a = first[a_prestat]
local current = numbertocategory[a]
local action = injectors[previous]
if action then
@@ -417,7 +419,7 @@ local function process(head,first,last)
local font = first.font
if font ~= lastfont then
lastfont = font
- set_parameters(font,numbertodataset[has_attribute(first,a_preproc)])
+ set_parameters(font,numbertodataset[first[a_preproc]])
end
action(head,first)
end
@@ -428,7 +430,7 @@ local function process(head,first,last)
if p and n then
local pid, nid = p.id, n.id
if pid == glyph_code and nid == glyph_code then
- local pa, na = has_attribute(p,a_prestat), has_attribute(n,a_prestat)
+ local pa, na = p[a_prestat], n[a_prestat]
local pcjk, ncjk = pa and numbertocategory[pa], na and numbertocategory[na]
if not pcjk or not ncjk
or pcjk == "korean" or ncjk == "korean"
@@ -629,7 +631,7 @@ local function process(head,first,last)
while true do
local upcoming, id = first.next, first.id
if id == glyph_code then
- local a = has_attribute(first,a_prestat)
+ local a = first[a_prestat]
local current = numbertocategory[a]
local action = injectors[previous]
if action then
@@ -638,7 +640,7 @@ local function process(head,first,last)
local font = first.font
if font ~= lastfont then
lastfont = font
- set_parameters(font,numbertodataset[has_attribute(first,a_preproc)])
+ set_parameters(font,numbertodataset[first[a_preproc]])
end
action(head,first)
end
@@ -649,7 +651,7 @@ local function process(head,first,last)
if p and n then
local pid, nid = p.id, n.id
if pid == glyph_code and nid == glyph_code then
- local pa, na = has_attribute(p,a_prestat), has_attribute(n,a_prestat)
+ local pa, na = p[a_prestat], n[a_prestat]
local pcjk, ncjk = pa and numbertocategory[pa], na and numbertocategory[na]
if not pcjk or not ncjk
or pcjk == "korean" or ncjk == "korean"
@@ -851,7 +853,7 @@ local function process(head,first,last)
while true do
local upcoming, id = first.next, first.id
if id == glyph_code then
- local a = has_attribute(first,a_prestat)
+ local a = first[a_prestat]
local current = numbertocategory[a]
local action = injectors[previous]
if action then
@@ -860,7 +862,7 @@ local function process(head,first,last)
local font = first.font
if font ~= lastfont then
lastfont = font
- set_parameters(font,numbertodataset[has_attribute(first,a_preproc)])
+ set_parameters(font,numbertodataset[first[a_preproc]])
end
action(head,first)
end
@@ -871,7 +873,7 @@ local function process(head,first,last)
if p and n then
local pid, nid = p.id, n.id
if pid == glyph_code and nid == glyph_code then
- local pa, na = has_attribute(p,a_prestat), has_attribute(n,a_prestat)
+ local pa, na = p[a_prestat], n[a_prestat]
local pcjk, ncjk = pa and numbertocategory[pa], na and numbertocategory[na]
if not pcjk or not ncjk
or pcjk == "korean" or ncjk == "korean"
diff --git a/Master/texmf-dist/tex/context/base/scrp-eth.lua b/Master/texmf-dist/tex/context/base/scrp-eth.lua
index 75fad24811c..e1b8df43232 100644
--- a/Master/texmf-dist/tex/context/base/scrp-eth.lua
+++ b/Master/texmf-dist/tex/context/base/scrp-eth.lua
@@ -9,7 +9,6 @@ if not modules then modules = { } end modules ['scrp-eth'] = {
-- at some point I will review the script code but for the moment we
-- do it this way; so space settings like with cjk yet
-local has_attribute = node.has_attribute
local insert_node_before = node.insert_before
local nodepool = nodes.pool
@@ -38,7 +37,7 @@ local inter_character_stretch_factor = 1
local inter_character_shrink_factor = 1
local function space_glue(current)
- local data = numbertodataset[has_attribute(current,a_preproc)]
+ local data = numbertodataset[current[a_preproc]]
if data then
inter_character_space_factor = data.inter_character_space_factor or 1
inter_character_stretch_factor = data.inter_character_stretch_factor or 1
@@ -107,7 +106,7 @@ local function process(head,first,last)
while current do
local id = current.id
if id == glyph_code then
- local prestat = has_attribute(current,a_prestat)
+ local prestat = current[a_prestat]
local category = numbertocategory[prestat]
if injector then
local action = injector[category]
diff --git a/Master/texmf-dist/tex/context/base/scrp-ini.lua b/Master/texmf-dist/tex/context/base/scrp-ini.lua
index e55dc0780ea..c375e2ec3d7 100644
--- a/Master/texmf-dist/tex/context/base/scrp-ini.lua
+++ b/Master/texmf-dist/tex/context/base/scrp-ini.lua
@@ -16,173 +16,182 @@ local trace_injections = false trackers.register("scripts.injections", function
local report_preprocessing = logs.reporter("scripts","preprocessing")
-local allocate = utilities.storage.allocate
-
-local set_attribute = node.set_attribute
-local has_attribute = node.has_attribute
-local first_glyph = node.first_glyph or node.first_character
-local traverse_id = node.traverse_id
-
-local texsetattribute = tex.setattribute
-
-local nodecodes = nodes.nodecodes
-local unsetvalue = attributes.unsetvalue
-
-local glyph_code = nodecodes.glyph
-local glue_code = nodecodes.glue
-
-local a_preproc = attributes.private('preproc')
-local a_prestat = attributes.private('prestat')
-
-local fontdata = fonts.hashes.identifiers
-
-local setnodecolor = nodes.tracers.colors.set
-
-scripts = scripts or { }
-local scripts = scripts
-
-scripts.hash = scripts.hash or { }
-local hash = scripts.hash
-
-local handlers = allocate()
-scripts.handlers = handlers
-
-storage.register("scripts/hash", hash, "scripts.hash")
-
-if not next(hash) then -- this might move to char-def
-
- hash = { -- no local
- --
- -- half width opening parenthesis
- [0x0028] = "half_width_open",
- [0x005B] = "half_width_open",
- [0x007B] = "half_width_open",
- [0x2018] = "half_width_open", -- ‘
- [0x201C] = "half_width_open", -- “
- --
- -- full width opening parenthesis
- --
- [0x3008] = "full_width_open", -- 〈 Left book quote
- [0x300A] = "full_width_open", -- 《 Left double book quote
- [0x300C] = "full_width_open", -- 「 left quote
- [0x300E] = "full_width_open", -- 『 left double quote
- [0x3010] = "full_width_open", -- 【 left double book quote
- [0x3014] = "full_width_open", -- 〔 left book quote
- [0x3016] = "full_width_open", --〖 left double book quote
- [0x3018] = "full_width_open", -- left tortoise bracket
- [0x301A] = "full_width_open", -- left square bracket
- [0x301D] = "full_width_open", -- reverse double prime qm
- [0xFF08] = "full_width_open", -- ( left parenthesis
- [0xFF3B] = "full_width_open", -- [ left square brackets
- [0xFF5B] = "full_width_open", -- { left curve bracket
- --
- -- half width closing parenthesis
- [0x0029] = "half_width_close",
- [0x005D] = "half_width_close",
- [0x007D] = "half_width_close",
- [0x2019] = "half_width_close", -- ’ right quote, right
- [0x201D] = "half_width_close", -- ” right double quote
- --
- -- full width closing parenthesis
- --
- [0x3009] = "full_width_close", -- 〉 book quote
- [0x300B] = "full_width_close", -- 》 double book quote
- [0x300D] = "full_width_close", -- 」 right quote, right
- [0x300F] = "full_width_close", -- 』 right double quote
- [0x3011] = "full_width_close", -- 】 right double book quote
- [0x3015] = "full_width_close", -- 〕 right book quote
- [0x3017] = "full_width_close", -- 〗 right double book quote
- [0x3019] = "full_width_close", -- right tortoise bracket
- [0x301B] = "full_width_close", -- right square bracket
- [0x301E] = "full_width_close", -- double prime qm
- [0x301F] = "full_width_close", -- low double prime qm
- [0xFF09] = "full_width_close", -- ) right parenthesis
- [0xFF3D] = "full_width_close", -- ] right square brackets
- [0xFF5D] = "full_width_close", -- } right curve brackets
-
- [0xFF62] = "half_width_open", -- left corner bracket
- [0xFF63] = "half_width_close", -- right corner bracket
- --
- -- vertical opening vertical
- --
- -- 0xFE35, 0xFE37, 0xFE39, 0xFE3B, 0xFE3D, 0xFE3F, 0xFE41, 0xFE43, 0xFE47,
- --
- -- vertical closing
- --
- -- 0xFE36, 0xFE38, 0xFE3A, 0xFE3C, 0xFE3E, 0xFE40, 0xFE42, 0xFE44, 0xFE48,
- --
- -- half width opening punctuation
- --
- -- <empty>
- --
- -- full width opening punctuation
- --
- -- 0x2236, -- ∶
- -- 0xFF0C, -- ,
- --
- -- half width closing punctuation_hw
- --
- [0x0021] = "half_width_close", -- !
- [0x002C] = "half_width_close", -- ,
- [0x002E] = "half_width_close", -- .
- [0x003A] = "half_width_close", -- :
- [0x003B] = "half_width_close", -- ;
- [0x003F] = "half_width_close", -- ?
- [0xFF61] = "half_width_close", -- hw full stop
- --
- -- full width closing punctuation
- [0x3001] = "full_width_close", -- 、
- [0x3002] = "full_width_close", -- 。
- [0xFF0C] = "full_width_close", -- ,
- [0xFF0E] = "full_width_close", -- .
- -- depends on font
- [0xFF01] = "full_width_close", -- !
- [0xFF1F] = "full_width_close", -- ?
- --
- [0xFF1A] = "full_width_punct", -- :
- [0xFF1B] = "full_width_punct", -- ;
- -- non starter
- --
- [0x3005] = "non_starter", [0x3041] = "non_starter", [0x3043] = "non_starter", [0x3045] = "non_starter", [0x3047] = "non_starter",
- [0x3049] = "non_starter", [0x3063] = "non_starter", [0x3083] = "non_starter", [0x3085] = "non_starter", [0x3087] = "non_starter",
- [0x308E] = "non_starter", [0x3095] = "non_starter", [0x3096] = "non_starter", [0x309B] = "non_starter", [0x309C] = "non_starter",
- [0x309D] = "non_starter", [0x309E] = "non_starter", [0x30A0] = "non_starter", [0x30A1] = "non_starter", [0x30A3] = "non_starter",
- [0x30A5] = "non_starter", [0x30A7] = "non_starter", [0x30A9] = "non_starter", [0x30C3] = "non_starter", [0x30E3] = "non_starter",
- [0x30E5] = "non_starter", [0x30E7] = "non_starter", [0x30EE] = "non_starter", [0x30F5] = "non_starter", [0x30F6] = "non_starter",
- [0x30FC] = "non_starter", [0x30FD] = "non_starter", [0x30FE] = "non_starter", [0x31F0] = "non_starter", [0x31F1] = "non_starter",
- [0x30F2] = "non_starter", [0x30F3] = "non_starter", [0x30F4] = "non_starter", [0x31F5] = "non_starter", [0x31F6] = "non_starter",
- [0x30F7] = "non_starter", [0x30F8] = "non_starter", [0x30F9] = "non_starter", [0x31FA] = "non_starter", [0x31FB] = "non_starter",
- [0x30FC] = "non_starter", [0x30FD] = "non_starter", [0x30FE] = "non_starter", [0x31FF] = "non_starter",
- --
- -- hyphenation
- --
- [0x2026] = "hyphen", -- … ellipsis
- [0x2014] = "hyphen", -- — hyphen
- }
-
- for i=0x03040,0x030FF do if not hash[i] then hash[i] = "katakana" end end -- had tag 'chinese'
- for i=0x031F0,0x031FF do if not hash[i] then hash[i] = "katakana" end end -- had tag 'chinese'
- for i=0x032D0,0x032FE do if not hash[i] then hash[i] = "katakana" end end -- had tag 'chinese'
- for i=0x03400,0x04DFF do if not hash[i] then hash[i] = "chinese" end end
- for i=0x04E00,0x09FFF do if not hash[i] then hash[i] = "chinese" end end
- for i=0x0F900,0x0FAFF do if not hash[i] then hash[i] = "chinese" end end
- for i=0x0FF00,0x0FFEF do if not hash[i] then hash[i] = "katakana" end end -- had tag 'chinese'
- for i=0x20000,0x2A6DF do if not hash[i] then hash[i] = "chinese" end end
- for i=0x2F800,0x2FA1F do if not hash[i] then hash[i] = "chinese" end end
- for i=0x0AC00,0x0D7A3 do if not hash[i] then hash[i] = "korean" end end
- for i=0x01100,0x0115F do if not hash[i] then hash[i] = "jamo_initial" end end
- for i=0x01160,0x011A7 do if not hash[i] then hash[i] = "jamo_medial" end end
- for i=0x011A8,0x011FF do if not hash[i] then hash[i] = "jamo_final" end end
-
- for i=0x01200,0x0139F do hash[i] = "ethiopic_syllable" end
-
- hash[0x01361] = "ethiopic_word"
- hash[0x01362] = "ethiopic_sentence"
-
- scripts.hash = hash
+local utfchar = utf.char
+
+local first_glyph = node.first_glyph or node.first_character
+local traverse_id = node.traverse_id
+
+local texsetattribute = tex.setattribute
+
+local nodecodes = nodes.nodecodes
+local unsetvalue = attributes.unsetvalue
+
+local glyph_code = nodecodes.glyph
+local glue_code = nodecodes.glue
+
+local a_preproc = attributes.private('preproc')
+local a_prestat = attributes.private('prestat')
+
+local fontdata = fonts.hashes.identifiers
+local allocate = utilities.storage.allocate
+local setnodecolor = nodes.tracers.colors.set
+local setmetatableindex = table.setmetatableindex
+
+scripts = scripts or { }
+local scripts = scripts
+
+scripts.hash = scripts.hash or { }
+local hash = scripts.hash
+
+local handlers = allocate()
+scripts.handlers = handlers
+
+local hash = { -- we could put these presets in char-def.lua
+ --
+ -- half width opening parenthesis
+ --
+ [0x0028] = "half_width_open",
+ [0x005B] = "half_width_open",
+ [0x007B] = "half_width_open",
+ [0x2018] = "half_width_open", -- ‘
+ [0x201C] = "half_width_open", -- “
+ --
+ -- full width opening parenthesis
+ --
+ [0x3008] = "full_width_open", -- 〈 Left book quote
+ [0x300A] = "full_width_open", -- 《 Left double book quote
+ [0x300C] = "full_width_open", -- 「 left quote
+ [0x300E] = "full_width_open", -- 『 left double quote
+ [0x3010] = "full_width_open", -- 【 left double book quote
+ [0x3014] = "full_width_open", -- 〔 left book quote
+ [0x3016] = "full_width_open", --〖 left double book quote
+ [0x3018] = "full_width_open", -- left tortoise bracket
+ [0x301A] = "full_width_open", -- left square bracket
+ [0x301D] = "full_width_open", -- reverse double prime qm
+ [0xFF08] = "full_width_open", -- ( left parenthesis
+ [0xFF3B] = "full_width_open", -- [ left square brackets
+ [0xFF5B] = "full_width_open", -- { left curve bracket
+ --
+ -- half width closing parenthesis
+ --
+ [0x0029] = "half_width_close",
+ [0x005D] = "half_width_close",
+ [0x007D] = "half_width_close",
+ [0x2019] = "half_width_close", -- ’ right quote, right
+ [0x201D] = "half_width_close", -- ” right double quote
+ --
+ -- full width closing parenthesis
+ --
+ [0x3009] = "full_width_close", -- 〉 book quote
+ [0x300B] = "full_width_close", -- 》 double book quote
+ [0x300D] = "full_width_close", -- 」 right quote, right
+ [0x300F] = "full_width_close", -- 』 right double quote
+ [0x3011] = "full_width_close", -- 】 right double book quote
+ [0x3015] = "full_width_close", -- 〕 right book quote
+ [0x3017] = "full_width_close", -- 〗 right double book quote
+ [0x3019] = "full_width_close", -- right tortoise bracket
+ [0x301B] = "full_width_close", -- right square bracket
+ [0x301E] = "full_width_close", -- double prime qm
+ [0x301F] = "full_width_close", -- low double prime qm
+ [0xFF09] = "full_width_close", -- ) right parenthesis
+ [0xFF3D] = "full_width_close", -- ] right square brackets
+ [0xFF5D] = "full_width_close", -- } right curve brackets
+ --
+ [0xFF62] = "half_width_open", -- left corner bracket
+ [0xFF63] = "half_width_close", -- right corner bracket
+ --
+ -- vertical opening vertical
+ --
+ -- 0xFE35, 0xFE37, 0xFE39, 0xFE3B, 0xFE3D, 0xFE3F, 0xFE41, 0xFE43, 0xFE47,
+ --
+ -- vertical closing
+ --
+ -- 0xFE36, 0xFE38, 0xFE3A, 0xFE3C, 0xFE3E, 0xFE40, 0xFE42, 0xFE44, 0xFE48,
+ --
+ -- half width opening punctuation
+ --
+ -- <empty>
+ --
+ -- full width opening punctuation
+ --
+ -- 0x2236, -- ∶
+ -- 0xFF0C, -- ,
+ --
+ -- half width closing punctuation_hw
+ --
+ [0x0021] = "half_width_close", -- !
+ [0x002C] = "half_width_close", -- ,
+ [0x002E] = "half_width_close", -- .
+ [0x003A] = "half_width_close", -- :
+ [0x003B] = "half_width_close", -- ;
+ [0x003F] = "half_width_close", -- ?
+ [0xFF61] = "half_width_close", -- hw full stop
+ --
+ -- full width closing punctuation
+ --
+ [0x3001] = "full_width_close", -- 、
+ [0x3002] = "full_width_close", -- 。
+ [0xFF0C] = "full_width_close", -- ,
+ [0xFF0E] = "full_width_close", --
+ --
+ -- depends on font
+ --
+ [0xFF01] = "full_width_close", -- !
+ [0xFF1F] = "full_width_close", -- ?
+ --
+ [0xFF1A] = "full_width_punct", -- :
+ [0xFF1B] = "full_width_punct", -- ;
+ --
+ -- non starter
+ --
+ [0x3005] = "non_starter", [0x3041] = "non_starter", [0x3043] = "non_starter", [0x3045] = "non_starter", [0x3047] = "non_starter",
+ [0x3049] = "non_starter", [0x3063] = "non_starter", [0x3083] = "non_starter", [0x3085] = "non_starter", [0x3087] = "non_starter",
+ [0x308E] = "non_starter", [0x3095] = "non_starter", [0x3096] = "non_starter", [0x309B] = "non_starter", [0x309C] = "non_starter",
+ [0x309D] = "non_starter", [0x309E] = "non_starter", [0x30A0] = "non_starter", [0x30A1] = "non_starter", [0x30A3] = "non_starter",
+ [0x30A5] = "non_starter", [0x30A7] = "non_starter", [0x30A9] = "non_starter", [0x30C3] = "non_starter", [0x30E3] = "non_starter",
+ [0x30E5] = "non_starter", [0x30E7] = "non_starter", [0x30EE] = "non_starter", [0x30F5] = "non_starter", [0x30F6] = "non_starter",
+ [0x30FC] = "non_starter", [0x30FD] = "non_starter", [0x30FE] = "non_starter", [0x31F0] = "non_starter", [0x31F1] = "non_starter",
+ [0x30F2] = "non_starter", [0x30F3] = "non_starter", [0x30F4] = "non_starter", [0x31F5] = "non_starter", [0x31F6] = "non_starter",
+ [0x30F7] = "non_starter", [0x30F8] = "non_starter", [0x30F9] = "non_starter", [0x31FA] = "non_starter", [0x31FB] = "non_starter",
+ [0x30FC] = "non_starter", [0x30FD] = "non_starter", [0x30FE] = "non_starter", [0x31FF] = "non_starter",
+ --
+ -- hyphenation
+ --
+ [0x2026] = "hyphen", -- … ellipsis
+ [0x2014] = "hyphen", -- — hyphen
+ --
+ [0x1361] = "ethiopic_word",
+ [0x1362] = "ethiopic_sentence",
+ --
+}
+local function provide(t,k)
+ local v
+ if not tonumber(k) then v = false
+ elseif (k >= 0x03040 and k <= 0x030FF)
+ or (k >= 0x031F0 and k <= 0x031FF)
+ or (k >= 0x032D0 and k <= 0x032FE)
+ or (k >= 0x0FF00 and k <= 0x0FFEF) then v = "katakana"
+ elseif (k >= 0x03400 and k <= 0x04DFF)
+ or (k >= 0x04E00 and k <= 0x09FFF)
+ or (k >= 0x0F900 and k <= 0x0FAFF)
+ or (k >= 0x20000 and k <= 0x2A6DF)
+ or (k >= 0x2F800 and k <= 0x2FA1F) then v = "chinese"
+ elseif (k >= 0x0AC00 and k <= 0x0D7A3) then v = "korean"
+ elseif (k >= 0x01100 and k <= 0x0115F) then v = "jamo_initial"
+ elseif (k >= 0x01160 and k <= 0x011A7) then v = "jamo_medial"
+ elseif (k >= 0x011A8 and k <= 0x011FF) then v = "jamo_final"
+ elseif (k >= 0x01200 and k <= 0x0139F) then v = "ethiopic_syllable"
+ else v = false
+ end
+ t[k] = v
+ return v
end
+setmetatableindex(hash,provide)
+
+scripts.hash = hash
+
local numbertodataset = allocate()
local numbertohandler = allocate()
@@ -208,11 +217,11 @@ function scripts.installmethod(handler)
local attributes = { }
local datasets = handler.datasets
if not datasets or not datasets.default then
- report_preprocessing("missing (default) dataset in script '%s'",name)
+ report_preprocessing("missing (default) dataset in script %a",name)
datasets.default = { } -- slower but an error anyway
end
for k, v in next, datasets do
- table.setmetatableindex(v,defaults)
+ setmetatableindex(v,defaults)
end
setmetatable(attributes, {
__index = function(t,k)
@@ -248,7 +257,7 @@ function scripts.installdataset(specification) -- global overload
if p then
defaultset = p
else
- report_preprocessing("dataset, unknown parent '%s' for method '%s'",parent,method)
+ report_preprocessing("dataset, unknown parent %a for method %a",parent,method)
end
end
setmetatable(dataset,defaultset)
@@ -261,13 +270,13 @@ function scripts.installdataset(specification) -- global overload
datasets[name] = dataset
end
else
- report_preprocessing("dataset, no default for method '%s'",method)
+ report_preprocessing("dataset, no default for method %a",method)
end
else
- report_preprocessing("dataset, no datasets for method '%s'",method)
+ report_preprocessing("dataset, no datasets for method %a",method)
end
else
- report_preprocessing("dataset, no method '%s'",method)
+ report_preprocessing("dataset, no method %a",method)
end
else
report_preprocessing("dataset, invalid specification") -- maybe report table
@@ -336,7 +345,7 @@ scripts.numbertocategory = numbertocategory
local function colorize(start,stop)
for n in traverse_id(glyph_code,start) do
- local kind = numbertocategory[has_attribute(n,a_prestat)]
+ local kind = numbertocategory[n[a_prestat]]
if kind then
local ac = scriptcolors[kind]
if ac then
@@ -368,7 +377,7 @@ end
-- we can have a fonts.hashes.originals
function scripts.preprocess(head)
- local start = first_glyph(head)
+ local start = first_glyph(head) -- we already have glyphs here (subtype 1)
if not start then
return head, false
else
@@ -377,7 +386,7 @@ function scripts.preprocess(head)
while start do
local id = start.id
if id == glyph_code then
- local a = has_attribute(start,a_preproc)
+ local a = start[a_preproc]
if a then
if a ~= last_a then
if first then
@@ -415,15 +424,15 @@ function scripts.preprocess(head)
end
local h = hash[c]
if h then
- set_attribute(start,a_prestat,categorytonumber[h])
+ start[a_prestat] = categorytonumber[h]
if not first then
first, last = start, start
else
last = start
end
- -- if cjk == "chinese" or cjk == "korean" then -- we need to prevent too much ( ) processing
+ -- if cjk == "chinese" or cjk == "korean" then -- we need to prevent too much ( ) processing
ok = true
- -- end
+ -- end
elseif first then
if ok then
if trace_analyzing then
@@ -492,3 +501,90 @@ function scripts.preprocess(head)
return head, done
end
end
+
+-- new plugin:
+
+local registercontext = fonts.specifiers.registercontext
+local mergecontext = fonts.specifiers.mergecontext
+
+local otfscripts = characters.otfscripts
+
+local report_scripts = logs.reporter("scripts","auto feature")
+local trace_scripts = false trackers.register("scripts.autofeature",function(v) trace_scripts = v end)
+
+local autofontfeature = scripts.autofontfeature or { }
+scripts.autofontfeature = autofontfeature
+
+local cache_yes = { }
+local cache_nop = { }
+
+setmetatableindex(cache_yes,function(t,k) local v = { } t[k] = v return v end)
+setmetatableindex(cache_nop,function(t,k) local v = { } t[k] = v return v end)
+
+-- beware: we need to tag a done (otherwise too many extra instances ... but how
+-- often unpack? wait till we have a bitmap
+--
+-- we can consider merging this in handlers.characters(head) at some point as there
+-- already check for the dynamic attribute so it saves a pass, however, then we also
+-- need to check for a_preproc there which nils the benefit
+--
+-- we can consider cheating: set all glyphs in a word as the first one but it's not
+-- playing nice
+
+function autofontfeature.handler(head)
+ for n in traverse_id(glyph_code,head) do
+ -- if n[a_preproc] then
+ -- -- already tagged by script feature, maybe some day adapt
+ -- else
+ local char = n.char
+ local script = otfscripts[char]
+ if script then
+ local dynamic = n[0] or 0
+ local font = n.font
+ if dynamic > 0 then
+ local slot = cache_yes[font]
+ local attr = slot[script]
+ if not attr then
+ attr = mergecontext(dynamic,name,2)
+ slot[script] = attr
+ if trace_scripts then
+ report_scripts("script: %s, trigger %C, dynamic: %a, variant: %a",script,char,attr,"extended")
+ end
+ end
+ if attr ~= 0 then
+ n[0] = attr
+ -- maybe set preproc when associated
+ end
+ else
+ local slot = cache_nop[font]
+ local attr = slot[script]
+ if not attr then
+ attr = registercontext(font,script,2)
+ slot[script] = attr
+ if trace_scripts then
+ report_scripts("script: %s, trigger %C, dynamic: %s, variant: %a",script,char,attr,"normal")
+ end
+ end
+ if attr ~= 0 then
+ n[0] = attr
+ -- maybe set preproc when associated
+ end
+ end
+ end
+ -- end
+ end
+ return head
+end
+
+function autofontfeature.enable()
+ report_scripts("globally enabled")
+ nodes.tasks.enableaction("processors","scripts.autofontfeature.handler")
+end
+
+function autofontfeature.disable()
+ report_scripts("globally disabled")
+ nodes.tasks.disableaction("processors","scripts.autofontfeature.handler")
+end
+
+commands.enableautofontscript = autofontfeature.enable
+commands.disableautofontscript = autofontfeature.disable
diff --git a/Master/texmf-dist/tex/context/base/scrp-ini.mkiv b/Master/texmf-dist/tex/context/base/scrp-ini.mkiv
index 8182368d787..bf8b580c79a 100644
--- a/Master/texmf-dist/tex/context/base/scrp-ini.mkiv
+++ b/Master/texmf-dist/tex/context/base/scrp-ini.mkiv
@@ -30,6 +30,8 @@
\installcommandhandler \??script {script} \??script
+\let\setupscripts\setupscript % be nice
+
% presets are global and are currently defined in lua
\appendtoks
@@ -64,4 +66,18 @@
\definescript[latin] [\c!method=] % resets the attribute (also currentscript)
+% a new trick (at some point we will predefine more scripts and consider a link with the above)
+
+\appendtoks
+ \ifx\currentscript\empty
+ \doifelse{\scriptparameter\s!features}\v!auto\enableautofontscript\disableautofontscript
+ \fi
+\to \everysetupscript
+
+\unexpanded\def\enableautofontscript {\ctxcommand{enableautofontscript ()}}
+\unexpanded\def\disableautofontscript{\ctxcommand{disableautofontscript()}}
+
+\definefontfeature[latn][script=latn]
+\definefontfeature[grek][script=grek]
+
\protect \endinput
diff --git a/Master/texmf-dist/tex/context/base/sort-ini.lua b/Master/texmf-dist/tex/context/base/sort-ini.lua
index 8640ba825d8..479d1c48957 100644
--- a/Master/texmf-dist/tex/context/base/sort-ini.lua
+++ b/Master/texmf-dist/tex/context/base/sort-ini.lua
@@ -43,11 +43,8 @@ with any demand so nothing here is frozen.</p>
have language etc properties that then can be used.</p>
]]--
-
-local utf = unicode.utf8
local gsub, rep, sub, sort, concat = string.gsub, string.rep, string.sub, table.sort, table.concat
-local utfbyte, utfchar = utf.byte, utf.char
-local utfcharacters, utfvalues = string.utfcharacters, string.utfvalues
+local utfbyte, utfchar, utfcharacters, utfvalues = utf.byte, utf.char, utf.characters, utf.values
local next, type, tonumber, rawget, rawset = next, type, tonumber, rawget, rawset
local allocate = utilities.storage.allocate
@@ -126,9 +123,9 @@ local data, language, method, digits
local replacements, m_mappings, z_mappings, p_mappings, entries, orders, lower, upper, method, sequence
local thefirstofsplit
-local mte = {
+local mte = { -- todo: assign to t
__index = function(t,k)
- if k ~= "" and utfbyte(k) < digitsoffset then
+ if k and k ~= "" and utfbyte(k) < digitsoffset then -- k check really needed (see s-lan-02)
local el
if k then
local l = lower[k] or lcchars[k]
@@ -150,8 +147,10 @@ local mte = {
end
el = el or k
end
- -- rawset(t,k,el) also make a copy?
+ -- rawset(t,k,el)
return el
+ else
+ -- rawset(t,k,k)
end
end
}
@@ -170,12 +169,12 @@ local function preparetables(data)
local n, nn
if k then
if trace_tests then
- report_sorters("simplifing character 0x%04X %s",utfbyte(k),k)
+ report_sorters("simplifing character %C",k)
end
local l = lower[k] or lcchars[k]
if l then
if trace_tests then
- report_sorters(" 1 lower: %s",l)
+ report_sorters(" 1 lower: %C",l)
end
local ml = rawget(t,l)
if ml then
@@ -186,7 +185,7 @@ local function preparetables(data)
n[nn] = ml[i] + (t.__delta or 0)
end
if trace_tests then
- report_sorters(" 2 order: %s",concat(n," "))
+ report_sorters(" 2 order: % t",n)
end
end
end
@@ -194,7 +193,7 @@ local function preparetables(data)
local s = shchars[k] -- maybe all components?
if s and s ~= k then
if trace_tests then
- report_sorters(" 3 shape: %s",s)
+ report_sorters(" 3 shape: %C",s)
end
n = { }
nn = 0
@@ -202,7 +201,7 @@ local function preparetables(data)
local ml = rawget(t,l)
if ml then
if trace_tests then
- report_sorters(" 4 keep: %s",l)
+ report_sorters(" 4 keep: %C",l)
end
if ml then
for i=1,#ml do
@@ -214,7 +213,7 @@ local function preparetables(data)
l = lower[l] or lcchars[l]
if l then
if trace_tests then
- report_sorters(" 5 lower: %s",l)
+ report_sorters(" 5 lower: %C",l)
end
local ml = rawget(t,l)
if ml then
@@ -226,32 +225,34 @@ local function preparetables(data)
end
end
end
- else -- we probably never enter this branch
- -- fschars returns a single char
---~ s = fschars[k]
---~ if s and s ~= k then
---~ if trace_tests then
---~ report_sorters(" 6 split: %s",s)
---~ end
---~ local ml = rawget(t,s)
---~ if ml then
---~ n = { }
---~ nn = 0
---~ for i=1,#ml do
---~ nn = nn + 1
---~ n[nn] = ml[i]
---~ end
---~ end
---~ end
+ else
+ -- -- we probably never enter this branch
+ -- -- fschars returns a single char
+ --
+ -- s = fschars[k]
+ -- if s and s ~= k then
+ -- if trace_tests then
+ -- report_sorters(" 6 split: %s",s)
+ -- end
+ -- local ml = rawget(t,s)
+ -- if ml then
+ -- n = { }
+ -- nn = 0
+ -- for i=1,#ml do
+ -- nn = nn + 1
+ -- n[nn] = ml[i]
+ -- end
+ -- end
+ -- end
local b = utfbyte(k)
n = decomposed[b] or { b }
if trace_tests then
- report_sorters(" 6 split: %s",utf.string(b))
+ report_sorters(" 6 split: %s",utf.tostring(b)) -- todo
end
end
if n then
if trace_tests then
- report_sorters(" 7 order: %s",concat(n," "))
+ report_sorters(" 7 order: % t",n)
end
else
n = noorder
@@ -303,7 +304,7 @@ local function setlanguage(l,m,d,u)
method = (m ~= "" and m) or data.method or constants.defaultmethod
digits = (d ~= "" and d) or data.digits or constants.defaultdigits
if trace_tests then
- report_sorters("setting language '%s', method '%s', digits '%s'",language,method,digits)
+ report_sorters("setting language %a, method %a, digits %a",language,method,digits)
end
replacements = data.replacements
entries = data.entries
@@ -329,12 +330,12 @@ local function setlanguage(l,m,d,u)
nofsequence = nofsequence + 1
sequence[nofsequence] = s
else
- report_sorters("invalid sorter method '%s' in '%s'",s,method)
+ report_sorters("invalid sorter method %a in %a",s,method)
end
end
data.sequence = sequence
if trace_tests then
- report_sorters("using sort sequence: %s",concat(sequence," "))
+ report_sorters("using sort sequence: % t",sequence)
end
--
return data
@@ -352,94 +353,6 @@ end
-- tricky: { 0, 0, 0 } vs { 0, 0, 0, 0 } => longer wins and mm, pm, zm can have them
---~ local function basicsort(sort_a,sort_b) -- todo: local #
---~ if not sort_a or not sort_b then
---~ return 0
---~ elseif #sort_a > #sort_b then
---~ if #sort_b == 0 then
---~ return 1
---~ else
---~ for i=1,#sort_b do
---~ local ai, bi = sort_a[i], sort_b[i]
---~ if ai > bi then
---~ return 1
---~ elseif ai < bi then
---~ return -1
---~ end
---~ end
---~ return 1
---~ end
---~ elseif #sort_a < #sort_b then
---~ if #sort_a == 0 then
---~ return -1
---~ else
---~ for i=1,#sort_a do
---~ local ai, bi = sort_a[i], sort_b[i]
---~ if ai > bi then
---~ return 1
---~ elseif ai < bi then
---~ return -1
---~ end
---~ end
---~ return -1
---~ end
---~ elseif #sort_a == 0 then
---~ return 0
---~ else
---~ for i=1,#sort_a do
---~ local ai, bi = sort_a[i], sort_b[i]
---~ if ai > bi then
---~ return 1
---~ elseif ai < bi then
---~ return -1
---~ end
---~ end
---~ return 0
---~ end
---~ end
-
---~ function comparers.basic(a,b) -- trace ea and eb
---~ local ea, eb = a.split, b.split
---~ local na, nb = #ea, #eb
---~ if na == 0 and nb == 0 then
---~ -- simple variant (single word)
---~ local result = 0
---~ for j=1,#sequence do
---~ local m = sequence[j]
---~ result = basicsort(ea[m],eb[m])
---~ if result ~= 0 then
---~ return result
---~ end
---~ end
---~ return result
---~ else
---~ -- complex variant, used in register (multiple words)
---~ local result = 0
---~ for i=1,nb < na and nb or na do
---~ local eai, ebi = ea[i], eb[i]
---~ for j=1,#sequence do
---~ local m = sequence[j]
---~ result = basicsort(eai[m],ebi[m])
---~ if result ~= 0 then
---~ return result
---~ end
---~ end
---~ if result ~= 0 then
---~ return result
---~ end
---~ end
---~ if result ~= 0 then
---~ return result
---~ elseif na > nb then
---~ return 1
---~ elseif nb > na then
---~ return -1
---~ else
---~ return 0
---~ end
---~ end
---~ end
-
local function basicsort(sort_a,sort_b)
if sort_a and sort_b then
local na = #sort_a
@@ -527,9 +440,10 @@ local function numify(s)
return utfchar(s)
end
-function sorters.strip(str) -- todo: only letters and such utf.gsub("([^%w%d])","")
- if str then
- str = gsub(str,"\\[\"\'~^`]*","") -- \"e
+function sorters.strip(str) -- todo: only letters and such
+ if str and str ~= "" then
+ -- todo: make a decent lpeg
+ str = gsub(str,"\\[\"\'~^`]*","") -- \"e -- hm, too greedy
str = gsub(str,"\\%S*","") -- the rest
str = gsub(str,"%s","\001") -- can be option
str = gsub(str,"[%s%[%](){}%$\"\']*","")
@@ -617,12 +531,12 @@ function splitters.utf(str) -- we could append m and u but this is cleaner, s is
p_case[n] = l
end
char[n], byte[n] = sc, b
-local fs = fscodes[b] or b
+ local fs = fscodes[b] or b
local msc = m_mappings[sc]
if msc ~= noorder then
-if not msc then
- msc = m_mappings[fs]
-end
+ if not msc then
+ msc = m_mappings[fs]
+ end
for i=1,#msc do
nm = nm + 1
m_mapping[nm] = msc[i]
@@ -630,9 +544,9 @@ end
end
local zsc = z_mappings[sc]
if zsc ~= noorder then
-if not zsc then
- zsc = z_mappings[fs]
-end
+ if not zsc then
+ zsc = z_mappings[fs]
+ end
for i=1,#zsc do
nz = nz + 1
z_mapping[nz] = zsc[i]
@@ -640,9 +554,9 @@ end
end
local psc = p_mappings[sc]
if psc ~= noorder then
-if not psc then
- psc = p_mappings[fs]
-end
+ if not psc then
+ psc = p_mappings[fs]
+ end
for i=1,#psc do
np = np + 1
p_mapping[np] = psc[i]
@@ -650,24 +564,22 @@ end
end
end
end
-
- -- only those needed that are part of a sequence
-
---~ local b = byte[1]
---~ if b then
---~ -- we set them to the first split code (korean)
---~ local fs = fscodes[b] or b
---~ if #m_mapping == 0 then
---~ m_mapping = { m_mappings[fs][1] }
---~ end
---~ if #z_mapping == 0 then
---~ z_mapping = { z_mappings[fs][1] }
---~ end
---~ if #p_mapping == 0 then
---~ p_mapping = { p_mappings[fs][1] }
---~ end
---~ end
-
+ -- -- only those needed that are part of a sequence
+ --
+ -- local b = byte[1]
+ -- if b then
+ -- -- we set them to the first split code (korean)
+ -- local fs = fscodes[b] or b
+ -- if #m_mapping == 0 then
+ -- m_mapping = { m_mappings[fs][1] }
+ -- end
+ -- if #z_mapping == 0 then
+ -- z_mapping = { z_mappings[fs][1] }
+ -- end
+ -- if #p_mapping == 0 then
+ -- p_mapping = { p_mappings[fs][1] }
+ -- end
+ -- end
local t = {
ch = char,
uc = byte,
@@ -741,7 +653,7 @@ function sorters.sort(entries,cmp)
first = " "
else
s = first
- report_sorters(">> %s 0x%05X (%s 0x%05X)",first,utfbyte(first),letter,utfbyte(letter))
+ report_sorters(">> %C (%C)",first,letter)
end
report_sorters(" %s | %s",packch(entry),packuc(entry))
end
diff --git a/Master/texmf-dist/tex/context/base/sort-lan.lua b/Master/texmf-dist/tex/context/base/sort-lan.lua
index 6a0cb8eab07..d2fa276d79a 100644
--- a/Master/texmf-dist/tex/context/base/sort-lan.lua
+++ b/Master/texmf-dist/tex/context/base/sort-lan.lua
@@ -3,9 +3,13 @@ if not modules then modules = { } end modules ['sort-lan'] = {
comment = "companion to sort-lan.mkiv",
author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
+ license = "see context related readme files",
+ dataonly = true,
}
+-- todo: look into uts#10 (2012) ... some experiments ... something
+-- to finish in winter.
+
-- Many vectors were supplied by Wolfgang Schuster and Philipp
-- Gesang. However this is a quite adapted and reformatted variant
-- so it needs some checking. Other users provides tables and
@@ -86,8 +90,20 @@ definitions['Duden'] = {
replacements = { { "ß", 's' } },
}
-definitions['de'] = { parent = 'default' } -- new german
+-- definitions['de'] = { parent = 'default' } -- new german
+
+definitions['de'] = {
+ parent = 'default',
+ replacements = {
+ { "ä", 'ae' }, { "Ä", 'Ae' },
+ { "ö", 'oe' }, { "Ö", 'Oe' },
+ { "ü", 'ue' }, { "Ü", 'Ue' },
+ { "ß", 's' },
+ },
+}
+
definitions['deo'] = { parent = 'de' } -- old german
+
definitions['de-DE'] = { parent = 'de' } -- german - Germany
definitions['de-CH'] = { parent = 'de' } -- german - Swiss
diff --git a/Master/texmf-dist/tex/context/base/spac-adj.lua b/Master/texmf-dist/tex/context/base/spac-adj.lua
index 4cba2f91eb2..c87a9d17f62 100644
--- a/Master/texmf-dist/tex/context/base/spac-adj.lua
+++ b/Master/texmf-dist/tex/context/base/spac-adj.lua
@@ -8,17 +8,16 @@ if not modules then modules = { } end modules ['spac-adj'] = {
-- sort of obsolete code
-local attribute = attributes.private('graphicvadjust')
+local a_vadjust = attributes.private('graphicvadjust')
-local nodecodes = nodes.nodecodes
+local nodecodes = nodes.nodecodes
-local hlist_code = nodecodes.hlist
-local vlist_code = nodecodes.vlist
+local hlist_code = nodecodes.hlist
+local vlist_code = nodecodes.vlist
-local remove_node = nodes.remove
-local hpack_node = node.hpack
-local vpack_node = node.vpack
-local has_attribute = node.has_attribute
+local remove_node = nodes.remove
+local hpack_node = node.hpack
+local vpack_node = node.vpack
function nodes.handlers.graphicvadjust(head,groupcode) -- we can make an actionchain for mvl only
if groupcode == "" then -- mvl only
@@ -26,7 +25,7 @@ function nodes.handlers.graphicvadjust(head,groupcode) -- we can make an actionc
while h do
local id = h.id
if id == hlist_code or id == vlist_code then
- local a = has_attribute(h,attribute)
+ local a = h[a_vadjust]
if a then
if p then
local n
diff --git a/Master/texmf-dist/tex/context/base/spac-ali.lua b/Master/texmf-dist/tex/context/base/spac-ali.lua
index effd26fe482..ceb278433f2 100644
--- a/Master/texmf-dist/tex/context/base/spac-ali.lua
+++ b/Master/texmf-dist/tex/context/base/spac-ali.lua
@@ -15,12 +15,12 @@ local prependaction = tasks.prependaction
local disableaction = tasks.disableaction
local enableaction = tasks.enableaction
-local has_attribute = node.has_attribute
-local unset_attribute = node.unset_attribute
local slide_nodes = node.slide
local hpack_nodes = node.hpack -- nodes.fasthpack not really faster here
-local link_nodes = nodes.link
+local unsetvalue = attributes.unsetvalue
+
+local concat_nodes = nodes.concat
local nodecodes = nodes.nodecodes
local listcodes = nodes.listcodes
@@ -61,7 +61,7 @@ local function handler(head,leftpage,realpageno)
local id = current.id
if id == hlist_code then
if current.subtype == line_code then
- local a = has_attribute(current,a_realign)
+ local a = current[a_realign]
if not a or a == 0 then
-- skip
else
@@ -77,22 +77,22 @@ local function handler(head,leftpage,realpageno)
action = leftpage and 2 or 1
end
if action == 1 then
- current.list = hpack_nodes(link_nodes(current.list,new_stretch(3)),current.width,"exactly")
+ current.list = hpack_nodes(concat_nodes{current.list,new_stretch(3)},current.width,"exactly")
if trace_realign then
- report_realign("flush left: align %s, page %s, realpage %s",align,pageno,realpageno)
+ report_realign("flushing left, align %a, page %a, realpage %a",align,pageno,realpageno)
end
elseif action == 2 then
- current.list = hpack_nodes(link_nodes(new_stretch(3),current.list),current.width,"exactly")
+ current.list = hpack_nodes(concat_nodes{new_stretch(3),current.list},current.width,"exactly")
if trace_realign then
- report_realign("flush right: align %s, page %s, realpage %s",align,pageno,realpageno)
+ report_realign("flushing right. align %a, page %a, realpage %a",align,pageno,realpageno)
end
elseif trace_realign then
- report_realign("invalid: align %s, page %s, realpage %s",align,pageno,realpageno)
+ report_realign("invalid flushing, align %a, page %a, realpage %a",align,pageno,realpageno)
end
done = true
nofrealigned = nofrealigned + 1
end
- unset_attribute(current,a_realign)
+ current[a_realign] = unsetvalue
end
end
handler(current.list,leftpage,realpageno)
diff --git a/Master/texmf-dist/tex/context/base/spac-ali.mkiv b/Master/texmf-dist/tex/context/base/spac-ali.mkiv
index 57806b6f240..0f9b211866d 100644
--- a/Master/texmf-dist/tex/context/base/spac-ali.mkiv
+++ b/Master/texmf-dist/tex/context/base/spac-ali.mkiv
@@ -151,37 +151,15 @@
% Tolerance and hyphenation
-\newcount\hyphenminoffset
+\ifdefined\lesshyphens \else \let\lesshyphens\relax \fi
+\ifdefined\morehyphens \else \let\morehyphens\relax \fi
+\ifdefined\nohyphens \else \let\nohyphens \relax \fi
+\ifdefined\dohyphens \else \let\dohyphens \relax \fi
-\ifx\sethyphenationvariables\undefined \let\sethyphenationvariables\relax \fi
-
-\unexpanded\def\lesshyphens
- {\advance\hyphenminoffset\plusone
- \sethyphenationvariables}
-
-\unexpanded\def\morehyphens
- {\ifcase\hyphenminoffset \else
- \advance\hyphenminoffset\minusone
- \fi
- \sethyphenationvariables}
-
-
-\unexpanded\def\nohyphens % % % % % not clever, we still hyphenate but supress application
- {\ifx\dohyphens\relax
- \edef\dohyphens
- {\hyphenpenalty \the\hyphenpenalty
- \exhyphenpenalty\the\exhyphenpenalty
- \relax}%
- \fi
- \hyphenpenalty\plustenthousand
- \exhyphenpenalty\plustenthousand}
-
-\let\dohyphens\relax
-
-\newconstant\c_spac_tolerance_default \c_spac_tolerance_default = 1500 % shouldn't that be 200
-\newconstant\c_spac_tolerance_minimum \c_spac_tolerance_minimum = 1500
-\newconstant\c_spac_tolerance_normal \c_spac_tolerance_normal = 3000
-\newconstant\c_spac_tolerance_extreme \c_spac_tolerance_extreme = 4500
+\newconstant\c_spac_tolerance_default \c_spac_tolerance_default 1500 % shouldn't that be 200
+\newconstant\c_spac_tolerance_minimum \c_spac_tolerance_minimum 1500
+\newconstant\c_spac_tolerance_normal \c_spac_tolerance_normal 3000
+\newconstant\c_spac_tolerance_extreme \c_spac_tolerance_extreme 4500
\def\spac_align_set_raggedness_left {\plustwo\bodyfontsize}
\def\spac_align_set_raggedness_right {\plustwo\bodyfontsize}
@@ -262,13 +240,13 @@
\def\v_spac_align_fill_amount_double {\plustwo fil}
\def\v_spac_align_fill_amount_space {\plustwo fil} % can be added to xspace if we have a key
\def\v_spac_align_fill_amount_half {.5fil}
-\let\v_spac_align_space_amount \interwordspace
+\let\v_spac_align_space_amount \interwordspace
\def\v_spac_align_space_amount_x {.5\emwidth}
\newskip\s_zero_plus_one_fil \s_zero_plus_one_fil = 0pt plus 1fil
\newskip\s_zero_plus_zero \s_zero_plus_zero = 0pt plus 0pt
-% \!!plus ... slower than inline
+% \s!plus ... slower than inline
\unexpanded\def\spac_align_set_horizontal_none % should also relax \updateraggedskips
{\raggedstatus\zerocount
@@ -285,8 +263,8 @@
{\setraggedness\spac_align_set_raggedness_left
\raggedstatus\plusone
\attribute\alignstateattribute\plusone
- \leftskip \plusone\leftskip \!!plus\spac_align_set_raggedness_left
- \rightskip \plusone\rightskip\!!plus\zeropoint
+ \leftskip \plusone\leftskip \s!plus\spac_align_set_raggedness_left
+ \rightskip \plusone\rightskip\s!plus\zeropoint
\spaceskip \v_spac_align_space_amount
\xspaceskip \v_spac_align_space_amount_x
\parfillskip\s_zero_plus_zero
@@ -297,8 +275,8 @@
{\setraggedness\spac_align_set_raggedness_middle
\raggedstatus\plustwo
\attribute\alignstateattribute\plustwo
- \leftskip \plusone\leftskip \!!plus\spac_align_set_raggedness_middle
- \rightskip \plusone\rightskip\!!plus\spac_align_set_raggedness_middle
+ \leftskip \plusone\leftskip \s!plus\spac_align_set_raggedness_middle
+ \rightskip \plusone\rightskip\s!plus\spac_align_set_raggedness_middle
\spaceskip \v_spac_align_space_amount
\xspaceskip \v_spac_align_space_amount_x
\parfillskip\s_zero_plus_zero
@@ -309,8 +287,8 @@
{\setraggedness\spac_align_set_raggedness_right
\raggedstatus\plusthree
\attribute\alignstateattribute\plusthree
- \leftskip \plusone\leftskip \!!plus\zeropoint
- \rightskip \plusone\rightskip\!!plus\spac_align_set_raggedness_right
+ \leftskip \plusone\leftskip \s!plus\zeropoint
+ \rightskip \plusone\rightskip\s!plus\spac_align_set_raggedness_right
\spaceskip \v_spac_align_space_amount
\xspaceskip \v_spac_align_space_amount_x
\parfillskip\s_zero_plus_one_fil
@@ -320,8 +298,8 @@
\unexpanded\def\spac_align_set_horizontal_very_left
{\raggedstatus\plusone
\attribute\alignstateattribute\plusone
- \leftskip \plusone\leftskip \!!plus\v_spac_align_fill_amount
- \rightskip \plusone\rightskip\!!plus\zeropoint
+ \leftskip \plusone\leftskip \s!plus\v_spac_align_fill_amount
+ \rightskip \plusone\rightskip\s!plus\zeropoint
\spaceskip \v_spac_align_space_amount
\xspaceskip \v_spac_align_space_amount_x
\parfillskip\s_zero_plus_zero
@@ -331,8 +309,8 @@
\unexpanded\def\spac_align_set_horizontal_very_center
{\raggedstatus\plustwo
\attribute\alignstateattribute\plustwo
- \leftskip \plusone\leftskip \!!plus\v_spac_align_fill_amount
- \rightskip \plusone\rightskip\!!plus\v_spac_align_fill_amount
+ \leftskip \plusone\leftskip \s!plus\v_spac_align_fill_amount
+ \rightskip \plusone\rightskip\s!plus\v_spac_align_fill_amount
\spaceskip \v_spac_align_space_amount
\xspaceskip \v_spac_align_space_amount_x
\parfillskip\s_zero_plus_zero
@@ -342,8 +320,8 @@
\unexpanded\def\spac_align_set_horizontal_very_right
{\raggedstatus\plusthree
\attribute\alignstateattribute\plusthree
- \leftskip \plusone\leftskip \!!plus\zeropoint
- \rightskip \plusone\rightskip\!!plus\v_spac_align_fill_amount
+ \leftskip \plusone\leftskip \s!plus\zeropoint
+ \rightskip \plusone\rightskip\s!plus\v_spac_align_fill_amount
\spaceskip \v_spac_align_space_amount
\xspaceskip \v_spac_align_space_amount_x
\parfillskip\s_zero_plus_zero
@@ -354,8 +332,8 @@
{\setraggedness\spac_align_set_raggedness_middle
\raggedstatus\plustwo
\attribute\alignstateattribute\plustwo
- \leftskip \plusone\leftskip \!!plus\v_spac_align_fill_amount_half
- \rightskip \plusone\rightskip\!!plus\v_spac_align_fill_amount_half
+ \leftskip \plusone\leftskip \s!plus\v_spac_align_fill_amount_half
+ \rightskip \plusone\rightskip\s!plus\v_spac_align_fill_amount_half
\spaceskip \v_spac_align_space_amount
\xspaceskip \v_spac_align_space_amount_x
\parfillskip\s_zero_plus_zero
@@ -365,11 +343,11 @@
\unexpanded\def\spac_align_set_horizontal_centered_last_line
{\raggedstatus\zerocount
\attribute\alignstateattribute\attributeunsetvalue
- \leftskip \plusone\leftskip \!!plus\v_spac_align_fill_amount\relax
- \rightskip \plusone\rightskip\!!plus\v_spac_align_fill_amount_negative\relax
+ \leftskip \plusone\leftskip \s!plus\v_spac_align_fill_amount\relax
+ \rightskip \plusone\rightskip\s!plus\v_spac_align_fill_amount_negative\relax
\spaceskip \zeropoint\relax
\xspaceskip \zeropoint\relax
- \parfillskip\zeropoint\!!plus\v_spac_align_fill_amount_double\relax
+ \parfillskip\zeropoint\s!plus\v_spac_align_fill_amount_double\relax
\parindent \zeropoint
\relax}
@@ -377,8 +355,8 @@
{\tttf % brrr
\raggedstatus\plusthree
\attribute\alignstateattribute\plusthree
- \leftskip \plusone\leftskip \!!plus\zeropoint\relax
- \rightskip \plusone\rightskip\!!plus\spac_align_set_raggedness_right\relax
+ \leftskip \plusone\leftskip \s!plus\zeropoint\relax
+ \rightskip \plusone\rightskip\s!plus\spac_align_set_raggedness_right\relax
\spaceskip \zeropoint\relax
\xspaceskip \zeropoint\relax
\parfillskip\s_zero_plus_zero
@@ -386,7 +364,7 @@
\relax}
\unexpanded\def\spac_align_set_horizontal_extra
- {\xspaceskip\zeropoint\!!plus\v_spac_align_fill_amount_space\relax}
+ {\xspaceskip\zeropoint\s!plus\v_spac_align_fill_amount_space\relax}
\def\spac_align_flush_horizontal
{\ifcase\c_spac_align_state_horizontal
@@ -438,6 +416,8 @@
\or
% 7 centered last line
\spac_align_set_horizontal_centered_last_line
+ \or
+ \parfillskip\zeropoint
\fi}
% Page spacing:
@@ -499,9 +479,8 @@
\let\raggedcommand \relax
\let\updateraggedskips\relax
-\def\spac_align_add_to_cache#1%
- {\edef\askedraggedalign{#1}%
- \let\raggedbox\relax % why
+\def\spac_align_add_to_cache
+ {\let\raggedbox\relax % why
% we inherit hyphenation and tolerance
\t_spac_align_collected \emptytoks
\c_spac_align_state_broad \zerocount
@@ -509,10 +488,10 @@
\c_spac_align_state_vertical \zerocount
\c_spac_align_state_direction \zerocount % what is default ?
\c_spac_align_state_page \zerocount
- \ifcsname\??aligncommand\askedraggedalign\endcsname
- \csname\??aligncommand\askedraggedalign\endcsname % not much gain in new method
+ \ifcsname\??aligncommand\m_spac_align_asked\endcsname
+ \csname\??aligncommand\m_spac_align_asked\endcsname % not much gain in new method
\else
- \rawprocesscommacommand[\askedraggedalign]\spac_align_collect
+ \rawprocesscommacommand[\m_spac_align_asked]\spac_align_collect
\fi
\normalexpanded{\t_spac_align_collected
{% \resetrealignsignal % can go as it is always set
@@ -525,8 +504,8 @@
}}% kept, nice for tracing
\edef\raggedcommand {\the\t_spac_align_collected }%
\edef\updateraggedskips{\spac_align_flush_horizontal}%
- \global\expandafter\let\csname\??alignmentnormalcache#1\endcsname\raggedcommand
- \global\expandafter\let\csname\??alignmentraggedcache#1\endcsname\updateraggedskips}
+ \global\expandafter\let\csname\??alignmentnormalcache\m_spac_align_asked\endcsname\raggedcommand
+ \global\expandafter\let\csname\??alignmentraggedcache\m_spac_align_asked\endcsname\updateraggedskips}
\def\spac_align_collect#1%
{\csname\??aligncommand#1\endcsname}
@@ -534,11 +513,12 @@
% The local (key driven) setter:
\unexpanded\def\spac_align_prepare#1% deferred
- {\expandafter\let\expandafter\raggedcommand\csname\??alignmentnormalcache#1\endcsname
+ {\edef\m_spac_align_asked{#1}%
+ \expandafter\let\expandafter\raggedcommand\csname\??alignmentnormalcache\m_spac_align_asked\endcsname
\ifx\raggedcommand\relax
- \spac_align_add_to_cache{#1}%
+ \spac_align_add_to_cache
\else
- \expandafter\let\expandafter\updateraggedskips\csname\??alignmentraggedcache#1\endcsname
+ \expandafter\let\expandafter\updateraggedskips\csname\??alignmentraggedcache\m_spac_align_asked\endcsname
\fi}
\let\dosetraggedcommand\spac_align_prepare % sort of public
@@ -549,26 +529,30 @@
{\dosingleempty\spac_align_setup}
\def\spac_align_setup[#1]% immediate
- {\expandafter\let\expandafter\raggedcommand\csname\??alignmentnormalcache#1\endcsname
+ {\edef\m_spac_align_asked{#1}%
+ \expandafter\let\expandafter\raggedcommand\csname\??alignmentnormalcache\m_spac_align_asked\endcsname
\ifx\raggedcommand\relax
- \spac_align_add_to_cache{#1}%
+ \spac_align_add_to_cache
\else
- \expandafter\let\expandafter\updateraggedskips\csname\??alignmentraggedcache#1\endcsname
+ \expandafter\let\expandafter\updateraggedskips\csname\??alignmentraggedcache\m_spac_align_asked\endcsname
\fi
\raggedcommand}
\unexpanded\def\usealignparameter#1% faster local variant
- {\edef\m_spac_align{#1\c!align}%
- \ifx\m_spac_align\empty\else
- \expandafter\let\expandafter\raggedcommand\csname\??alignmentnormalcache\m_spac_align\endcsname
- \ifx\raggedcommand\relax
- \spac_align_add_to_cache\m_spac_align
- \else
- \expandafter\let\expandafter\updateraggedskips\csname\??alignmentraggedcache\m_spac_align\endcsname
- \fi
- \raggedcommand
+ {\edef\m_spac_align_asked{#1\c!align}%
+ \ifx\m_spac_align_asked\empty\else
+ \spac_align_use_indeed
\fi}
+\def\spac_align_use_indeed
+ {\expandafter\let\expandafter\raggedcommand\csname\??alignmentnormalcache\m_spac_align_asked\endcsname
+ \ifx\raggedcommand\relax
+ \spac_align_add_to_cache
+ \else
+ \expandafter\let\expandafter\updateraggedskips\csname\??alignmentraggedcache\m_spac_align_asked\endcsname
+ \fi
+ \raggedcommand}
+
% The keywords:
\unexpanded\def\installalign#1#2% beware: commands must be unexpandable!
@@ -609,6 +593,7 @@
\c_spac_align_state_broad \plustwo }
\setvalue{\??aligncommand\v!disable }{\c_spac_align_state_horizontal\plussix }
\setvalue{\??aligncommand\v!last }{\c_spac_align_state_horizontal\plusseven}
+\setvalue{\??aligncommand\v!paragraph }{\c_spac_align_state_horizontal\pluseight}
\setvalue{\??aligncommand\v!lefttoright }{\c_spac_align_state_direction \plusone }
@@ -636,6 +621,19 @@
\setvalue{\??aligncommand\v!verytolerant }{\t_spac_align_collected\expandafter{\the\t_spac_align_collected\spac_align_set_very_tolerant}}
\setvalue{\??aligncommand\v!stretch }{\t_spac_align_collected\expandafter{\the\t_spac_align_collected\spac_align_set_stretch}}
+%D For Wolfgang:
+
+\newtoks \t_spac_every_swap_align
+
+\appendtoks
+ \setvalue{\??aligncommand\v!right}{\c_spac_align_state_horizontal\plusthree}%
+ \setvalue{\??aligncommand\v!left }{\c_spac_align_state_horizontal\plustwo }%
+\to \t_spac_every_swap_align
+
+\unexpanded\def\enablereversealignment
+ {\the\t_spac_every_swap_align
+ \t_spac_every_swap_align\emptytoks}
+
% Visible commands:
\let\notragged \spac_align_set_horizontal_none
@@ -651,6 +649,10 @@
\let\forgetragged \spac_align_set_horizontal_none
+\appendtoks
+ \spac_align_set_horizontal_none
+\to \everyforgetall
+
% Box commands.
\unexpanded\def\ibox#1#2#3%
@@ -735,11 +737,11 @@
\def\dosetraggedskips#1#2#3#4#5#6#7%
{\raggedstatus #1\relax
- \leftskip 1\leftskip \!!plus#2\relax
- \rightskip 1\rightskip\!!plus#3\relax
+ \leftskip 1\leftskip \s!plus#2\relax
+ \rightskip 1\rightskip\s!plus#3\relax
\spaceskip #4\relax
\xspaceskip #5\relax
- \parfillskip \zeropoint\!!plus#6\relax
+ \parfillskip \zeropoint\s!plus#6\relax
\parindent #7\relax
\attribute\alignstateattribute\ifcase\raggedstatus\attributeunsetvalue\else\raggedstatus\fi}
@@ -813,7 +815,7 @@
\dontleavehmode % added in marrakesch at TUG 2006\begingroup
\begingroup
\setlocalhsize % new
- \def\\{\endgroup\par\doalignline{#1}{#2}\begingroup}%
+ \def\\{\egroup\par\doalignline{#1}{#2}\bgroup}%
\dowithnextbox
{\hbox to \localhsize
{\ifcase\alignstrutmode\or\strut\fi
@@ -832,12 +834,43 @@
% direct commands
-\unexpanded\def\leftaligned {\doalignline\relax \hss }
-\unexpanded\def\midaligned {\doalignline\hss \hss }
-\unexpanded\def\rightaligned{\doalignline\hss \relax}
-\unexpanded\def\maxaligned {\doalignline\relax \relax}
+\installcorenamespace{alignwrapper}
+
+\setuvalue{\??alignwrapper\v!left }{\doalignline\relax \hss }
+\setuvalue{\??alignwrapper\v!middle}{\doalignline\hss \hss }
+\setuvalue{\??alignwrapper\v!right }{\doalignline\hss \relax}
+\setuvalue{\??alignwrapper\v!max }{\doalignline\relax \relax}
+
+\def\spac_align_wrapper_handle#1%
+ {\csname\??alignwrapper\ifcsname\??alignwrapper#1\endcsname#1\else\v!middle\fi\endcsname}
+
+\unexpanded\def\spac_align_wrapper_start[#1]%
+ {\spac_align_wrapper_handle{#1}%
+ \bgroup\ignorespaces}
+
+\unexpanded\def\spac_align_wrapper_stop
+ {\removeunwantedspaces\egroup}
+
+\unexpanded\def\startlinealignment
+ {\dosingleempty\spac_align_wrapper_start}
+
+\let\stoplinealignment\spac_align_wrapper_stop
+
+\unexpanded\def\startleftaligned {\spac_align_wrapper_start[\v!left ]} \let\stopleftaligned \spac_align_wrapper_stop
+\unexpanded\def\startmiddlealigned{\spac_align_wrapper_start[\v!middle]} \let\stopmiddlealigned\spac_align_wrapper_stop
+\unexpanded\def\startrightaligned {\spac_align_wrapper_start[\v!right ]} \let\stoprightaligned \spac_align_wrapper_stop
+\unexpanded\def\startmaxaligned {\spac_align_wrapper_start[\v!max ]} \let\stopmaxaligned \spac_align_wrapper_stop
+
+\let\startmidaligned \startmiddlealigned \let\stopmidaligned \stopmiddlealigned
+\let\startcenteraligned\startmiddlealigned \let\stopcenteraligned\stopmiddlealigned
+
+\unexpanded\def\leftaligned {\spac_align_wrapper_handle\v!left }
+\unexpanded\def\middlealigned{\spac_align_wrapper_handle\v!middle}
+\unexpanded\def\rightaligned {\spac_align_wrapper_handle\v!right }
+\unexpanded\def\maxaligned {\spac_align_wrapper_handle\v!max }
-\let\centeraligned\midaligned
+\let\midaligned \middlealigned
+\let\centeraligned\middlealigned
\installcorenamespace{alignline}
@@ -859,7 +892,7 @@
\dontleavehmode % added in marrakesch at TUG 2006\begingroup
\begingroup
\setlocalhsize
- \def\\{\endgroup\par\doxalignline#1#2#3#4#5#6\begingroup}% inefficient
+ \def\\{\egroup\par\doxalignline#1#2#3#4#5#6\bgroup}% inefficient
\dowithnextbox
{\hbox to \localhsize
{#1\hskip\ifdone#2\else#3\fi#4%
diff --git a/Master/texmf-dist/tex/context/base/spac-cha.mkiv b/Master/texmf-dist/tex/context/base/spac-cha.mkiv
new file mode 100644
index 00000000000..a07c8f198aa
--- /dev/null
+++ b/Master/texmf-dist/tex/context/base/spac-cha.mkiv
@@ -0,0 +1,191 @@
+%D \module
+%D [ file=spac-cha, % was supp-ali,
+%D version=2012.06.08, % 2000.04.17,
+%D title=\CONTEXT\ Spacing Macros,
+%D subtitle=Character Alignment,
+%D author=Hans Hagen,
+%D date=\currentdate,
+%D copyright={PRAGMA ADE \& \CONTEXT\ Development Team}]
+%C
+%C This module is part of the \CONTEXT\ macro||package and is
+%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
+%C details.
+
+%D Yet undocumented but nevertheless used.
+
+% 0 = centered
+% 1 = left in before
+% 2 = right in before
+% 3 = left in after
+% 4 = right in after
+
+\unprotect
+
+% \starttabulate[|cg{.}|cg{,}|cg{,}|]
+% \NC period \NC comma \NC comma \NC\NR
+% \NG 100.000,00 \NG 100.000,00 \NG 100,00 \NC\NR
+% \NG 10.000,00 \NG 10.000,00 \NG 1000,00 \NC\NR
+% \NG 100,00 \NG 100,00 \NG 10,00 \NC\NR
+% \NG 100,00 \NG 100,00 \NG 10,00 \NC\NR
+% \NG 10\\ \NG 10\\ \NG 0,00 \NC\NR
+% \NG 10 \NG 10 \NG 0,00 \NC\NR
+% \NG 10 \NG 10 \NG 0,00 \NC\NR
+% \stoptabulate
+
+% We gain not much by luafication and actually make things worse.
+
+\chardef\characteralignmentmode\plusfour
+\chardef\characteralignmentslot\plusone
+
+\let\afterassignwidth \!!zeropoint
+\let\beforeassignwidth\!!zeropoint
+
+\def\alignmentcharacter{.}
+
+\newdimen\d_supp_charalign_width
+\newtoks \t_supp_charalign_list
+
+\let\alignmentclass\s!default % can be used to handle multiple mixed ones
+
+\installcorenamespace{characteralign}
+
+\unexpanded\def\supp_charalign_push
+ {\ifcsname\??characteralign\alignmentclass\endcsname\else
+ \normalexpanded{\global\t_supp_charalign_list{\the\t_supp_charalign_list\supp_charalign_do{\alignmentclass}}}%
+ \fi
+ \setxvalue{\??characteralign\alignmentclass}{\supp_charalign_do
+ {\afterassignwidth}{\beforeassignwidth}{\alignmentcharacter}}}
+
+\unexpanded\def\supp_charalign_pop_do#1#2#3%
+ {\def\afterassignwidth {#1}%
+ \def\beforeassignwidth {#2}%
+ \def\alignmentcharacter{#3}}
+
+\unexpanded\def\supp_charalign_pop
+ {\let\supp_charalign_do\supp_charalign_pop_do
+ \executeifdefined{\??characteralign\alignmentclass}\donothing}
+
+\unexpanded\def\supp_charalign_reset_do#1%
+ {\global\letbeundefined{\??characteralign#1}} % global !
+
+\unexpanded\def\resetcharacteralign
+ {\let\supp_charalign_do\supp_charalign_reset_do
+ \the\t_supp_charalign_list
+ \global\t_supp_charalign_list\emptytoks}
+
+\unexpanded\def\supp_charalign_firstpass_one#1#2%
+ {\supp_charalign_pop
+ \let\\\empty
+ \setbox\scratchbox\hbox{#1}%
+ \d_supp_charalign_width\wd\scratchbox
+ \setbox\scratchbox\emptyhbox
+ \supp_charalign_check#2#1\relax\relax
+ \scratchdimen-\wd\scratchbox
+ \setbox\scratchbox\hbox{\ignorespaces#2\unskip}%
+ \advance\scratchdimen \wd\scratchbox
+ \ifdim\scratchdimen>\beforeassignwidth\relax
+ \edef\beforeassignwidth{\the\scratchdimen}%
+ \fi
+ \ifdim\scratchdimen=\zeropoint
+ \setbox\scratchbox\hbox{\ignorespaces#2\unskip}%
+ \scratchdimen\wd\scratchbox
+ \ifcase\characteralignmentmode
+ % do nothing
+ \else\ifnum\characteralignmentmode<\plusthree
+ \advance\scratchdimen\d_supp_charalign_width\relax
+ \ifdim\scratchdimen>\beforeassignwidth\relax
+ \edef\beforeassignwidth{\the\scratchdimen}%
+ \fi
+ \else
+ \ifdim\scratchdimen>\afterassignwidth\relax
+ \edef\afterassignwidth{\the\scratchdimen}%
+ \fi
+ \fi\fi
+ \fi
+ \supp_charalign_push}
+
+\unexpanded\def\supp_charalign_firstpass_two#1#2#3%
+ {\ifx#2\relax
+ \setbox\scratchbox\hbox{\ignorespaces#1\unskip}%
+ \ifdim\wd\scratchbox>\afterassignwidth
+ \edef\afterassignwidth{\the\wd\scratchbox}%
+ \fi
+ \else
+ \supp_charalign_check#2#3\relax\relax
+ \fi}
+
+\unexpanded\def\supp_charalign_secondpass_one#1#2%
+ {\supp_charalign_pop
+ \let\\\empty % beware, no grouping
+ \setbox\scratchbox\hbox{#1}%
+ \d_supp_charalign_width\wd\scratchbox
+ \setbox\scratchbox\emptyhbox
+ % new 12,34 vs 10\\ where 10 aligns on 12 if #1 = ,
+ \ifcase\characteralignmentslot
+ \supp_charalign_check#2#1\relax\relax
+ \scratchdimen\wd\scratchbox
+ \setbox\scratchbox\hbox{\ignorespaces##1\unskip}%
+ \else
+ \def\\{#1}%
+ \normalexpanded{\supp_charalign_check#2#1\relax\relax}%
+ \scratchdimen\wd\scratchbox
+ \setbox\scratchbox\hbox{\def\\{\hphantom{#1}}\ignorespaces#2\unskip}%
+ \fi
+ \noindent
+ \ifdim\scratchdimen=\wd\scratchbox
+ \ifcase\characteralignmentmode
+ \box\scratchbox
+ \else
+ \hbox
+ {\dontcomplain
+ \hbox to \beforeassignwidth
+ {\ifcase\characteralignmentmode\or
+ \box\scratchbox\hss
+ \or
+ \hss\box\scratchbox\hskip\d_supp_charalign_width
+ \or
+ \hss\rlap{\box\scratchbox}%
+ \or
+ \hss\rlap{\hbox to \afterassignwidth{\hss\box\scratchbox}}%
+ \fi}%
+ \hskip\afterassignwidth}%
+ \fi
+ \else
+ \hbox
+ {\hbox to \beforeassignwidth
+ {\hss\box\scratchbox\hskip-\scratchdimen}%
+ \hskip\afterassignwidth}%
+ \fi}
+
+\unexpanded\def\supp_charalign_secondpass_two#1#2#3%
+ {\ifx#2\relax
+ \setbox\scratchbox\hbox{\ignorespaces#1\unskip}%
+ \else
+ \supp_charalign_check#2#3\relax\relax
+ \fi}
+
+\unexpanded\def\supp_charalign_firstpass#1%
+ {\unexpanded\def\checkalignment ##1{\supp_charalign_firstpass_one{#1}{##1}}%
+ \unexpanded\def\supp_charalign_check##1#1##2##3\relax{\supp_charalign_firstpass_two{##1}{##2}{##3}}}
+
+\unexpanded\def\supp_charalign_secondpass#1%
+ {\unexpanded\def\checkalignment ##1{\supp_charalign_secondpass_one{#1}{##1}}%
+ \unexpanded\def\supp_charalign_check##1#1##2##3\relax{\supp_charalign_secondpass_two{##1}{##2}{##3}}}
+
+\unexpanded\def\setfirstpasscharacteralign
+ {\supp_charalign_pop
+ \normalexpanded{\supp_charalign_firstpass{\alignmentcharacter}}}
+
+\unexpanded\def\setsecondpasscharacteralign
+ {\supp_charalign_pop
+ \normalexpanded{\supp_charalign_secondpass{\alignmentcharacter}}}
+
+\unexpanded\def\startcharacteralign#1\stopcharacteralign
+ {\bgroup
+ \setfirstpasscharacteralign #1%
+ \setsecondpasscharacteralign#1%
+ \egroup}
+
+\let\stopcharacteralign\relax
+
+\protect \endinput
diff --git a/Master/texmf-dist/tex/context/base/spac-chr.lua b/Master/texmf-dist/tex/context/base/spac-chr.lua
index 00c3d676603..24364978a3b 100644
--- a/Master/texmf-dist/tex/context/base/spac-chr.lua
+++ b/Master/texmf-dist/tex/context/base/spac-chr.lua
@@ -20,8 +20,6 @@ report_characters = logs.reporter("typesetting","characters")
local nodes, node = nodes, node
-local set_attribute = node.set_attribute
-local has_attribute = node.has_attribute
local insert_node_after = node.insert_after
local remove_node = nodes.remove -- ! nodes
local copy_node_list = node.copy_list
@@ -65,8 +63,8 @@ local function inject_quad_space(unicode,head,current,fraction)
local glue = new_glue(fraction)
-- glue.attr = copy_node_list(attr)
glue.attr = attr
-current.attr = nil
- set_attribute(glue,a_character,unicode)
+ current.attr = nil
+ glue[a_character] = unicode
head, current = insert_node_after(head,current,glue)
return head, current
end
@@ -76,10 +74,10 @@ local function inject_char_space(unicode,head,current,parent)
local font = current.font
local char = fontcharacters[font][parent]
local glue = new_glue(char and char.width or fontparameters[font].space)
--- glue.attr = copy_node_list(current.attr)
+ -- glue.attr = copy_node_list(current.attr)
glue.attr = current.attr
-current.attr = nil
- set_attribute(glue,a_character,unicode)
+ current.attr = nil
+ glue[a_character] = unicode
head, current = insert_node_after(head,current,glue)
return head, current
end
@@ -88,11 +86,11 @@ local function inject_nobreak_space(unicode,head,current,space,spacestretch,spac
local attr = current.attr
local glue = new_glue(space,spacestretch,spaceshrink)
local penalty = new_penalty(10000)
--- glue.attr = copy_node_list(attr)
+ -- glue.attr = copy_node_list(attr)
glue.attr = attr
-current.attr = nil
--- penalty.attr = attr
- set_attribute(glue,a_character,unicode)
+ current.attr = nil
+ -- penalty.attr = attr
+ glue[a_character] = unicode
head, current = insert_node_after(head,current,penalty)
head, current = insert_node_after(head,current,glue)
return head, current
@@ -105,7 +103,7 @@ local methods = {
[0x00A0] = function(head,current) -- nbsp
local para = fontparameters[current.font]
- if has_attribute(current,a_alignstate) == 1 then -- flushright
+ if current[a_alignstate] == 1 then -- flushright
head, current = inject_nobreak_space(0x00A0,head,current,para.space,0,0)
current.subtype = space_skip_code
else
@@ -180,21 +178,23 @@ function characters.handler(head)
local current = head
local done = false
while current do
- local next = current.next
local id = current.id
if id == glyph_code then
+ local next = current.next
local char = current.char
local method = methods[char]
if method then
if trace_characters then
- report_characters("replacing character U+%04X (%s)",char,lower(chardata[char].description))
+ report_characters("replacing character %C, description %a",char,lower(chardata[char].description))
end
head = method(head,current)
head = remove_node(head,current,true)
done = true
end
+ current = next
+ else
+ current = current.next
end
- current = next
end
return head, done
end
diff --git a/Master/texmf-dist/tex/context/base/spac-chr.mkiv b/Master/texmf-dist/tex/context/base/spac-chr.mkiv
index e2ed1252280..6b7c8742f00 100644
--- a/Master/texmf-dist/tex/context/base/spac-chr.mkiv
+++ b/Master/texmf-dist/tex/context/base/spac-chr.mkiv
@@ -54,25 +54,25 @@
% Making them active is also not an option unless we do that in every
% catcode vector.
-% \nobreakspace {\begingroup\setalignstateattribute\utfchar{"00A0}\endgroup} % 1 = left
-
-\edef\nobreakspace {\utfchar{"00A0}}
-\edef\ideographicspace {\utfchar{"2000}}
-\edef\ideographichalffillspace{\utfchar{"2001}}
-\edef\twoperemspace {\utfchar{"2002}}
-\edef\threeperemspace {\utfchar{"2004}}
-\edef\fourperemspace {\utfchar{"2005}}
-\edef\sixperemspace {\utfchar{"2006}}
-\edef\figurespace {\utfchar{"2007}}
-\edef\punctuationspace {\utfchar{"2008}}
-\edef\breakablethinspace {\utfchar{"2009}}
-\edef\hairspace {\utfchar{"200A}}
-\edef\zerowidthspace {\utfchar{"200B}}
-\edef\zwnj {\utfchar{"200C}}
-\edef\zwj {\utfchar{"200D}}
-\edef\narrownobreakspace {\utfchar{"202F}}
-
-%edef\zerowidthnobreakspace {\utfchar{"FEFF}}
+% \nobreakspace {\begingroup\setalignstateattribute\normalUchar"00A0\endgroup} % 1 = left
+
+\edef\nobreakspace {\normalUchar"00A0}
+\edef\ideographicspace {\normalUchar"2000}
+\edef\ideographichalffillspace{\normalUchar"2001}
+\edef\twoperemspace {\normalUchar"2002}
+\edef\threeperemspace {\normalUchar"2004}
+\edef\fourperemspace {\normalUchar"2005}
+\edef\sixperemspace {\normalUchar"2006}
+\edef\figurespace {\normalUchar"2007}
+\edef\punctuationspace {\normalUchar"2008}
+\edef\breakablethinspace {\normalUchar"2009}
+\edef\hairspace {\normalUchar"200A}
+\edef\zerowidthspace {\normalUchar"200B}
+\edef\zwnj {\normalUchar"200C}
+\edef\zwj {\normalUchar"200D}
+\edef\narrownobreakspace {\normalUchar"202F}
+
+%edef\zerowidthnobreakspace {\normalUchar"FEFF}
\unexpanded\def\zerowidthnobreakspace{\penalty\plustenthousand\kern\zeropoint}
diff --git a/Master/texmf-dist/tex/context/base/spac-def.mkiv b/Master/texmf-dist/tex/context/base/spac-def.mkiv
index 4cd7b1d1c76..312483cfabf 100644
--- a/Master/texmf-dist/tex/context/base/spac-def.mkiv
+++ b/Master/texmf-dist/tex/context/base/spac-def.mkiv
@@ -13,80 +13,45 @@
\writestatus{loading}{ConTeXt Spacing Macros / Definitions}
-% todo: move resetters to other modules
+% Ideally these definitions should be moved to where they first appear and
+% this will happen at some point.
\unprotect
-\unexpanded\def\forgeteverypar
- {\everypar{\the\neverypar}}
-
-% worth trying:
-%
-% \unexpanded\def\forgeteverypar
-% {\everypar\neverypar}
-
-\unexpanded\def\forgetparskip
- {\s_spac_whitespace_parskip\zeropoint
- \parskip\zeropoint
- \let\v_spac_whitespace_current\v!none}
-
-\unexpanded\def\forgetbothskips
- {\leftskip\zeropoint
- \rightskip\zeropoint
- \relax}
-
-\unexpanded\def\forgethorizontalstretch
- {\emergencystretch\zeropoint}
-
-\unexpanded\def\forgetverticalstretch
- {\spacing\plusone}
-
-\newif\ifforgotten % rather good signal for inner
-
-% This will become better: several resetters that do all in once as currently there is
-% redundant code.
-
-\appendtoks \forgottentrue \to \everyforgetall
-\appendtoks \forgetragged \to \everyforgetall
-\appendtoks \forgetparskip \to \everyforgetall
-\appendtoks \forgetparindent \to \everyforgetall
-\appendtoks \forgetbothskips \to \everyforgetall
-\appendtoks \forgethorizontalstretch \to \everyforgetall % needed in pagebody
-\appendtoks \forgetverticalstretch \to \everyforgetall % needed in otr
-\appendtoks \everypar\emptytoks \to \everyforgetall % indeed!
-
-\appendtoks \forgetverticalstretch \to \everybodyfont
-\appendtoks \presetnormallineheight \to \everybodyfont
-\appendtoks \setnormalbaselines \to \everybodyfont % check if redundant (\forgetverticalstretch does it too)
-\appendtoks \setstrut \to \everybodyfont % check if redundant (\forgetverticalstretch does it too)
-\appendtoks \settopskip \to \everybodyfont % factors set in \forgetverticalstretch
-\appendtoks \setmaxdepth \to \everybodyfont % factors set in \forgetverticalstretch
-\appendtoks \synchronizeindenting \to \everybodyfont
-\appendtoks \synchronizeblank \to \everybodyfont
-\appendtoks \synchronizewhitespace \to \everybodyfont
-\appendtoks \synchronizespacecodes \to \everybodyfont % not needed, frozen factors
-\appendtoks \setrelativeinterlinespace \to \everybodyfont
-
-\appendtoks \updateraggedskips \to \everyfontswitch % under test
-\prependtoks \let\par\normalpar \to \everybeforepagebody % see \fillinline (was endgraf)
-\appendtoks \synchronizespacecodes \to \everydefinedfont % not needed, frozen factors
-
-\setupwhitespace
- [\v!none]
+% maybe \everysynchronizeglobalspacing
+% maybe \everysynchronizelocalspacing
-\indenting
- [\v!never]
+\appendtoks
+ \forgetverticalstretch
+ \presetnormallineheight
+ \setnormalbaselines % check if redundant (\forgetverticalstretch does it too)
+ \setstrut % check if redundant (\forgetverticalstretch does it too)
+ \settopskip % factors set in \forgetverticalstretch
+ \setmaxdepth % factors set in \forgetverticalstretch
+ \synchronizeindenting
+ \synchronizeblank
+ \synchronizewhitespace
+ \synchronizespacecodes % not needed, frozen factors
+ \setrelativeinterlinespace
+\to \everybodyfont
-\setupindenting
- [\v!none]
+% why exception
-\setupblank
- [\v!big] % alternatively [\v!standard]
+\appendtoks
+ \updateraggedskips % under test
+\to \everyfontswitch
+
+% maybe more
+
+\prependtoks
+ \let\par\normalpar
+\to \everybeforepagebody % see \fillinline (was endgraf)
-\defineblank[\v!default] [\v!big] % todo: needs to adapt to \setupblank
-\defineblank[\v!before] [\v!default] % but we need to avoid circular references
-\defineblank[\v!inbetween][\v!default] % then
-\defineblank[\v!after] [\v!before]
+% needs checking:
+
+\appendtoks
+ \synchronizespacecodes
+\to \everydefinedfont % not needed, frozen factors
\setupinterlinespace
[\c!minheight=\zeropoint, % only special purpose
@@ -97,14 +62,11 @@
\c!bottom=0.4,
\c!distance=\onepoint,
\c!line=2.8\exheight,
- \c!stretch=\zerocount]
+ \c!stretch=\zerocount,
+ \c!shrink=\zerocount]
-\setupnarrower
- [\c!before=\endgraf,
- \c!after=\endgraf,
- \c!left=1.5\emwidth,
- \c!right=1.5\emwidth,
- \c!middle=1.5\emwidth]
+\setupblank
+ [\v!big]
\setuptolerance
[\v!horizontal,\v!verystrict]
@@ -119,4 +81,10 @@
\setupspacing
[\v!packed]
+\indenting
+ [\v!never]
+
+\setupindenting
+ [\v!none]
+
\protect \endinput
diff --git a/Master/texmf-dist/tex/context/base/spac-flr.mkiv b/Master/texmf-dist/tex/context/base/spac-flr.mkiv
new file mode 100644
index 00000000000..d09a9cf79f3
--- /dev/null
+++ b/Master/texmf-dist/tex/context/base/spac-flr.mkiv
@@ -0,0 +1,112 @@
+%D \module
+%D [ file=spac-fil,
+%D version=2013.01.13,
+%D title=\CONTEXT\ Spacing Macros,
+%D subtitle=Fillers,
+%D author={Hans Hagen and Wolfgang Schuster},
+%D date=\currentdate,
+%D copyright={PRAGMA ADE \& \CONTEXT\ Development Team}]
+%C
+%C This module is part of the \CONTEXT\ macro||package and is
+%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
+%C details.
+
+\writestatus{loading}{ConTeXt Spacing Macros / Fillers}
+
+\unprotect
+
+%D A bit more extensive variant of a prototype posted by Wolfgang to the
+%D development list. Instead of dealing with a few leader types it makes
+%D sense to support them all as well as simple rule fillers. Eventually we
+%D can also use in for the mathfillers. We distinguish between alternatives
+%D and with them methods, and a checker is provided for use in applying
+%D e.g.\ fillers in lists.
+
+\installcorenamespace{filler}
+\installcorenamespace{filleralternative}
+\installcorenamespace{fillerleadermethod}
+
+\installcommandhandler \??filler {filler} \??filler
+
+\let\setupfillers\setupfiller
+
+\unexpanded\def\filler
+ {\dosingleempty\spac_fillers_indeed}
+
+\unexpanded\def\checkedfiller#1%
+ {\ifcsname\namedfillerhash{#1}\s!parent\endcsname
+ \spac_fillers_indeed[#1]%
+ \expandafter\gobbleoneargument
+ \else
+ \expandafter\firstofoneargument
+ \fi{#1}}
+
+\def\spac_fillers_indeed[#1]%
+ {\removeunwantedspaces
+ \begingroup
+ \edef\currentfiller{#1}%
+ \scratchdimen\fillerparameter\c!leftmargin\relax
+ \ifdim\scratchdimen=\zeropoint\else
+ \hskip\scratchdimen
+ \fi
+ \fillerparameter\c!left\relax
+ \expandnamespaceparameter\??filleralternative\fillerparameter\c!alternative\s!unknown\relax
+ \fillerparameter\c!right\relax
+ \scratchdimen\fillerparameter\c!rightmargin\relax
+ \ifdim\scratchdimen=\zeropoint\else
+ \hskip\scratchdimen
+ \fi
+ \endgroup
+ \ignorespaces}
+
+\setvalue{\??filleralternative\s!unknown}%
+ {}
+
+\setvalue{\??filleralternative\v!symbol}%
+ {\expandnamespaceparameter\??fillerleadermethod\fillerparameter\c!method\v!local
+ \simplealignedbox
+ {\fillerparameter\c!width}%
+ {\fillerparameter\c!align}%
+ {\fillerparameter\c!symbol}%
+ \hfill}
+
+\setvalue{\??filleralternative\c!stretch}%
+ {\hfill}
+
+\setvalue{\??filleralternative\c!rule}%
+ {\expandnamespaceparameter\??fillerleadermethod\fillerparameter\c!method\v!local
+ \hrule
+ \!!height\fillerparameter\c!height
+ \!!depth \fillerparameter\c!depth
+ \hfill}
+
+\letvalue{\??fillerleadermethod\s!local }\normalleaders % overflow ends up inbetween (current box)
+\letvalue{\??fillerleadermethod\v!global}\normalgleaders % overflow ends up inbetween (outermost box)
+\letvalue{\??fillerleadermethod\v!middle}\normalcleaders % overflow ends up before, after (current box)
+\letvalue{\??fillerleadermethod\v!broad }\normalxleaders % overflow ends up before, inbetween, after (current box)
+
+\setupfillers
+ [\c!width=\emwidth,
+ \c!symbol=.,
+ \c!align=\v!middle,
+ \c!height=.1\exheight,
+ \c!depth=\zeropoint,
+ \c!leftmargin=\zeropoint,
+ \c!rightmargin=\zeropoint,
+ \c!alternative=\v!symbol,
+ \c!method=\s!local]
+
+% maybe box -> symbol
+
+\protect \endinput
+
+% \definefiller[test-a][alternative=stretch]
+% \definefiller[test-b][alternative=symbol,symbol=!]
+% \definefiller[test-c][alternative=rule,height=.1ex,leftmargin=.5em,rightmargin=.25em]
+
+% \starttext
+% text\filler[test-a]text \par
+% text\filler[test-b]text \par
+% text\filler[test-c]text \par
+% text\checkedfiller{<nothing>}text \par
+% \stoptext
diff --git a/Master/texmf-dist/tex/context/base/spac-grd.mkiv b/Master/texmf-dist/tex/context/base/spac-grd.mkiv
index 487521f6cc0..7b3ee6d6ce2 100644
--- a/Master/texmf-dist/tex/context/base/spac-grd.mkiv
+++ b/Master/texmf-dist/tex/context/base/spac-grd.mkiv
@@ -11,41 +11,42 @@
%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
%C details.
-%D This module will be merged intio spac-ver.mkiv.
+%D This module will be merged into spac-ver.mkiv.
\writestatus{loading}{ConTeXt Spacing Macros / Grid Snapping}
-% buff-ver.mkiv: halfline spacing
-% strc-mat.mkiv: configure math grid (new code needed)
-% page-one.mkiv: to be checked
-% page-mul.mkiv: to be checked and redone
-% page-sid.mkiv: to be checked
-% page-set.mkiv: to be checked
-% strc-not.mkiv: to be checked
-% strc-ren.mkiv: to be checked
-% strc-flt.mkiv: to be checked (captions, probably top/bottom)
-% core-mis.mkiv: to be checked (placement, probbaly harmless)
-
\unprotect
-%D Helpers:
+%D A rather crappy macro that we need to avoid and as such it will probably
+%D disappear:
+
+\installcorenamespace{lastnodepusher}
\let\poplastnode\relax
-\def\pushlastnode
- {\ifdim\lastskip=\zeropoint
- \ifnum\lastpenalty=\zerocount
- \ifnum\lastkern=\zerocount
- \let\poplastnode\relax
- \else
- \edef\poplastnode{\kern\the\lastkern\relax}\kern-\lastkern % untested
- \fi
+\unexpanded\def\pushlastnode
+ {\csname\??lastnodepusher
+ \ifcsname\??lastnodepusher\the\lastnodetype\endcsname
+ \the\lastnodetype
\else
- \edef\poplastnode{\penalty\the\lastpenalty\relax}\nobreak % untested
+ \s!unknown
\fi
- \else
- \edef\poplastnode{\vskip\the\lastskip\relax}\vskip-\lastskip % \removelastskip
- \fi}
+ \endcsname}
+
+\setvalue{\??lastnodepusher\number\kernnodecode}%
+ {\unexpanded\edef\poplastnode{\kern\the\lastkern\relax}%
+ \kern-\lastkern}
+
+\setvalue{\??lastnodepusher\number\gluenodecode}%
+ {\unexpanded\edef\poplastnode{\vskip\the\lastskip\relax}%
+ \vskip-\lastskip}
+
+\setvalue{\??lastnodepusher\number\penaltynodecode}%
+ {\unexpanded\edef\poplastnode{\penalty\the\lastpenalty\relax}%
+ \nobreak}
+
+\setvalue{\??lastnodepusher\s!unknown}%
+ {\let\poplastnode\relax}
%D Moved from supp-box:
@@ -133,13 +134,17 @@
% \startlinecorrection \framed{xxx} \stoplinecorrection
% \input ward \par
+% to be redone:
+
\unexpanded\def\dotopbaselinecorrection{\expandafter\blank\expandafter[\the\d_spac_lines_correction_before]}
\unexpanded\def\dobotbaselinecorrection{\expandafter\blank\expandafter[\the\d_spac_lines_correction_after ]}
\def\showbaselinecorrection
{\def\dobaselinecorrection % visualization is not watertight!
{\bgroup
-\ifdim\prevdepth>\zeropoint\kern-\prevdepth\fi
+ \ifdim\prevdepth>\zeropoint
+ \kern-\prevdepth
+ \fi
\setbox\scratchbox\emptyhbox
\wd\scratchbox\hsize
\dp\scratchbox\strutdp
@@ -149,9 +154,9 @@
\egroup
\prevdepth\strutdp}%
\def\dotopbaselinecorrection
- {\hrule\!!height\d_spac_lines_correction_before}%
+ {\hrule\s!height\d_spac_lines_correction_before}%
\def\dobotbaselinecorrection
- {\hrule\!!height\d_spac_lines_correction_after}}
+ {\hrule\s!height\d_spac_lines_correction_after}}
\def\dobaselinecorrection % beware, this one is redefined / used locally elsewhere
{\ifdim\prevdepth>\zeropoint\kern-\prevdepth\fi
diff --git a/Master/texmf-dist/tex/context/base/spac-hor.mkiv b/Master/texmf-dist/tex/context/base/spac-hor.mkiv
index eeb5cadfd5b..2ce502fb5f5 100644
--- a/Master/texmf-dist/tex/context/base/spac-hor.mkiv
+++ b/Master/texmf-dist/tex/context/base/spac-hor.mkiv
@@ -34,7 +34,7 @@
\unexpanded\def\setupindenting
{\doifnextoptionalelse\spac_indentation_setup_options\spac_indentation_setup_size}
-\def\spac_indentation_setup_size
+\unexpanded\def\spac_indentation_setup_size
{\assigndimension\v_spac_indentation_current\d_spac_indentation_par{1\emwidth}{1.5\emwidth}{2\emwidth}}
\let\synchronizeindenting\spac_indentation_setup_size
@@ -215,7 +215,7 @@
\let\dorechecknextindentation\relax % public (in macros)
-\def\spac_indentation_check_next_indentation
+\unexpanded\def\spac_indentation_check_next_indentation
{\global\let\dorechecknextindentation\relax
\doifnextcharelse\par\donothing\spac_indentation_variant_no} % messy check as next is seldom \par
@@ -286,6 +286,17 @@
\parindent\zeropoint
\let\v_spac_indentation_current\v!none}
+\appendtoks
+ \forgetparindent
+\to \everyforgetall
+
+\unexpanded\def\forgethorizontalstretch
+ {\emergencystretch\zeropoint}
+
+\appendtoks
+ \forgethorizontalstretch
+\to \everyforgetall % needed in pagebody
+
%D Helper:
\unexpanded\def\softbreak
@@ -332,10 +343,27 @@
\def\spac_spacecodes_setup_yes[#1]%
{\csname\??spacecodemethod#1\endcsname
- \updateraggedskips} % not needed, as sf codes are static
+ \updateraggedskips}
\def\spac_spacecodes_setup_nop
- {\updateraggedskips} % not needed, as sf codes are static
+ {\updateraggedskips}
+
+%D Here's a tweak .. if needed one can configure it in the configuration
+%D so that initialization happens more efficient.
+%D
+%D \starttyping
+%D \startoverlay
+%D {
+%D \green
+%D \enabledirectives[characters.spaceafteruppercase=normal]%
+%D \vbox{\hsize 5em x. X\par x.\ X\par X. X\par X.\ X\par}
+%D } {
+%D \blue
+%D \enabledirectives[characters.spaceafteruppercase=traditional]%
+%D \vbox{\hsize 5em x. X\par x.\ X\par X. X\par X.\ X\par}
+%D }
+%D \stopoverlay
+%D \stoptyping
% This is not needed, as \updateraggedskips is taking care of it:
@@ -406,7 +434,16 @@
\appendtoks
\let~\space
\let\ \space
-\to \simplifiedcommands
+\to \everysimplifycommands
+
+\newsignal\s_spac_keep_unwanted_space
+
+\unexpanded\def\keepunwantedspaces
+ {\ifhmode
+ \ifdim\lastskip=\s_spac_keep_unwanted_space\else
+ \hskip\s_spac_keep_unwanted_space\relax
+ \fi
+ \fi}
\unexpanded\def\removeunwantedspaces
{\ifhmode
@@ -415,7 +452,12 @@
\def\spac_helpers_remove_unwantedspace
{\ifnum\lastnodetype=\gluenodecode
- \unskip \expandafter\spac_helpers_remove_unwantedspace
+ \ifdim\lastskip=\s_spac_keep_unwanted_space\relax
+ \unskip
+ \else
+ \unskip
+ \doubleexpandafter\spac_helpers_remove_unwantedspace
+ \fi
\fi}
% \startbuffer
@@ -466,14 +508,19 @@
% Bovendien definieren we enkele extra \fill's:
-\unexpanded\def\hfilll{\hskip\zeropoint\!!plus1filll\relax}
-\unexpanded\def\vfilll{\vskip\zeropoint\!!plus1filll\relax}
+\unexpanded\def\hfilll {\hskip\zeropoint\s!plus1\s!filll\relax}
+\unexpanded\def\vfilll {\vskip\zeropoint\s!plus1\s!filll\relax}
+
+%unexpanded\def\hfilneg {\hskip\zeropoint\s!plus-1\s!fil\relax}
+\unexpanded\def\hfillneg {\hskip\zeropoint\s!plus-1\s!fill\relax}
+\unexpanded\def\hfilllneg{\hskip\zeropoint\s!plus-1\s!filll\relax}
+%unexpanded\def\vfilneg {\vskip\zeropoint\s!plus-1\s!fil\relax}
+\unexpanded\def\vfillneg {\vskip\zeropoint\s!plus-1\s!fill\relax}
+\unexpanded\def\vfilllneg{\vskip\zeropoint\s!plus-1\s!filll\relax}
\unexpanded\def\tfskip {\begingroup\tf\hskip\emwidth\endgroup}
\unexpanded\def\dotfskip#1{\begingroup\tf\hskip #1\endgroup} % used elsewhere
-% needs a proper \definenarrower or installnarrower
-%
% maybe we should hash the analysis
\installcorenamespace{narrower}
@@ -511,16 +558,16 @@
\def\spac_narrower_initialize[#1]% hm, can be dorepeat directly
{\dorepeatwithcommand[#1]\spac_narrower_method_analyze}
-\installnarrowermethod \v!left {\global\advance\s_spac_narrower_left \narrowerparameter\c!left \relax}
-\installnarrowermethod \v!middle{\global\advance\s_spac_narrower_mid \narrowerparameter\c!middle\relax}
-\installnarrowermethod \v!right {\global\advance\s_spac_narrower_right \narrowerparameter\c!right \relax}
-\installnarrowermethod-\v!left {\global\advance\s_spac_narrower_left -\narrowerparameter\c!left \relax}
-\installnarrowermethod-\v!middle{\global\advance\s_spac_narrower_mid -\narrowerparameter\c!middle\relax}
-\installnarrowermethod-\v!right {\global\advance\s_spac_narrower_right-\narrowerparameter\c!right \relax}
-\installnarrowermethod \v!reset {\global \s_spac_narrower_left \zeropoint
- \global \s_spac_narrower_mid \zeropoint
- \global \s_spac_narrower_right \zeropoint\relax}
-\installnarrowermethod\v!none {}
+\installnarrowermethod \v!left {\global\advance\s_spac_narrower_left \narrowerparameter\c!left \relax}
+\installnarrowermethod \v!middle {\global\advance\s_spac_narrower_mid \narrowerparameter\c!middle\relax}
+\installnarrowermethod \v!right {\global\advance\s_spac_narrower_right \narrowerparameter\c!right \relax}
+\installnarrowermethod{-\v!left }{\global\advance\s_spac_narrower_left -\narrowerparameter\c!left \relax}
+\installnarrowermethod{-\v!middle}{\global\advance\s_spac_narrower_mid -\narrowerparameter\c!middle\relax}
+\installnarrowermethod{-\v!right }{\global\advance\s_spac_narrower_right-\narrowerparameter\c!right \relax}
+\installnarrowermethod \v!reset {\global \s_spac_narrower_left \zeropoint
+ \global \s_spac_narrower_mid \zeropoint
+ \global \s_spac_narrower_right \zeropoint\relax}
+\installnarrowermethod \v!none {}
\unexpanded\def\spac_narrower_start#1%
{\begingroup
@@ -576,9 +623,6 @@
\let\stopnarrow\spac_narrower_stop
-% \def\v!narrower{narrower}
-% \definenarrower[\v!narrower]
-
\newdimen\d_spac_effective_hsize \def\effectivehsize {\hsize}
\newdimen\d_spac_effective_leftskip \def\effectiveleftskip {\leftskip}
\newdimen\d_spac_effective_rightskip \def\effectiverightskip{\rightskip}
@@ -613,6 +657,24 @@
\unexpanded\def\doadaptleftskip #1{\dosetleftskipadaption {#1}\advance\leftskip \leftskipadaption }
\unexpanded\def\doadaptrightskip#1{\dosetrightskipadaption{#1}\advance\rightskip\rightskipadaption}
+\unexpanded\def\forgetbothskips
+ {\leftskip\zeropoint
+ \rightskip\zeropoint
+ \relax}
+
+\appendtoks
+ \forgetbothskips
+\to \everyforgetall
+
+\unexpanded\def\forgetparskip
+ {\s_spac_whitespace_parskip\zeropoint
+ \parskip\zeropoint
+ \let\v_spac_whitespace_current\v!none}
+
+\appendtoks
+ \forgetparskip
+\to \everyforgetall
+
%D Tolerance (can also be set with align):
\installcorenamespace{tolerancemethods}
@@ -626,7 +688,7 @@
\installtolerancemethod \v!vertical \v!verytolerant {\def\bottomtolerance{.100}}
\installtolerancemethod \v!horizontal \v!stretch {\emergencystretch\bodyfontsize}
-\installtolerancemethod \v!horizontal \v!space {\spaceskip.5em\!!plus.25em\!!minus.25em\relax}
+\installtolerancemethod \v!horizontal \v!space {\spaceskip.5em\s!plus.25em\s!minus.25em\relax}
\installtolerancemethod \v!horizontal \v!verystrict {\tolerance 200 }
\installtolerancemethod \v!horizontal \v!strict {\tolerance1500 }
\installtolerancemethod \v!horizontal \v!tolerant {\tolerance3000 }
@@ -861,8 +923,8 @@
\def\flexiblespaceamount#1#2#3%
{#1\interwordspace
- \!!plus#2\interwordstretch
- \!!minus#3\interwordshrink}
+ \s!plus#2\interwordstretch
+ \s!minus#3\interwordshrink}
\def\fixedspaceamount#1%
{#1\interwordspace}
@@ -924,7 +986,7 @@
{\futurelet\nexttoken\spac_spaces_auto_insert_next}
\def\spac_spaces_auto_insert_next
- {\ctxcommand{autonextspace("\meaning\nexttoken")}} % todo, just consult nexttoken at the lua end
+ {\ctxcommand{autonextspace(\!!bs\meaning\nexttoken\!!es)}} % todo, just consult nexttoken at the lua end
%D Moved from bib module:
@@ -947,7 +1009,7 @@
\mskip#1%
\else
\scratchdimen#1\hspaceamount\empty{#2}%
- \scratchskip\scratchdimen\!!plus.5\scratchdimen\!!minus.3\scratchdimen
+ \scratchskip\scratchdimen\s!plus.5\scratchdimen\s!minus.3\scratchdimen
\hskip\scratchskip
\fi
\endgroup}
@@ -956,4 +1018,68 @@
\unexpanded\def\medglue {\spac_glues_text_or_math\medmuskip \v!medium}
\unexpanded\def\thickglue{\spac_glues_text_or_math\thickmuskip\v!big}
+%D A rather unknown one:
+
+\unexpanded\def\widened % moved from cont-new
+ {\doifnextoptionalelse\spac_widened_yes\spac_widened_nop}
+
+\def\spac_widened_yes[#1]#2{\hbox \s!spread #1{\hss#2\hss}}
+\def\spac_widened_nop #1{\hbox \s!spread \emwidth{\hss#1\hss}}
+
+\definecomplexorsimple\widened
+
+%D For the moment here (used in page-txt):
+
+\unexpanded\def\ignoredlinebreak{\unskip\space\ignorespaces}
+
+%D \macros
+%D {startignorespaces}
+%D
+%D I'll probably forget that this one exists:
+%D
+%D \starttyping
+%D \ruledhbox
+%D {\startignorespaces
+%D \def\oeps{a}
+%D \startignorespaces
+%D \def\oeps{a}
+%D \stopignorespaces
+%D \def\oeps{a}
+%D \stopignorespaces
+%D \oeps}
+%D \stoptyping
+
+\newsignal\s_spac_ignore_spaces
+\newcount \c_spac_ignore_spaces
+
+\unexpanded\def\startignorespaces
+ {\advance\c_spac_ignore_spaces\plusone
+ \ifcase\c_spac_ignore_spaces\or \ifhmode
+ \hskip\s_spac_ignore_spaces
+ \fi \fi
+ \ignorespaces}
+
+\unexpanded\def\stopignorespaces
+ {\ifcase\c_spac_ignore_spaces \or
+ \ifhmode
+ \doloop\spac_ignore_spaces_body
+ \fi
+ \fi
+ \advance\c_spac_ignore_spaces\minusone}
+
+\def\spac_ignore_spaces_body
+ {\ifdim\lastskip=\zeropoint
+ \exitloop
+ \else\ifdim\lastskip=\s_spac_ignore_spaces
+ \unskip
+ \exitloop
+ \else
+ \unskip
+ \fi\fi}
+
+%D \macros
+%D {obeyfollowingtoken}
+
+\def\obeyfollowingtoken{{}} % end \cs scanning
+
\protect \endinput
diff --git a/Master/texmf-dist/tex/context/base/spac-par.mkiv b/Master/texmf-dist/tex/context/base/spac-par.mkiv
index af032a14535..6b7ed4f7df6 100644
--- a/Master/texmf-dist/tex/context/base/spac-par.mkiv
+++ b/Master/texmf-dist/tex/context/base/spac-par.mkiv
@@ -203,4 +203,11 @@
\fi
\glet\flushpostponednodedata\relax}
+\unexpanded\def\doflushatpar
+ {\ifvmode
+ \expandafter\flushatnextpar
+ \else
+ \expandafter\firstofoneargument
+ \fi}
+
\protect \endinput
diff --git a/Master/texmf-dist/tex/context/base/spac-ver.lua b/Master/texmf-dist/tex/context/base/spac-ver.lua
index 66698a49e91..7d030ab1ac2 100644
--- a/Master/texmf-dist/tex/context/base/spac-ver.lua
+++ b/Master/texmf-dist/tex/context/base/spac-ver.lua
@@ -22,14 +22,14 @@ if not modules then modules = { } end modules ['spac-ver'] = {
-- todo: strip baselineskip around display math
local next, type, tonumber = next, type, tonumber
-local format, gmatch, concat, match, rep = string.format, string.gmatch, table.concat, string.match, string.rep
+local gmatch, concat = string.gmatch, table.concat
local ceil, floor, max, min, round, abs = math.ceil, math.floor, math.max, math.min, math.round, math.abs
local texlists, texdimen, texbox = tex.lists, tex.dimen, tex.box
local lpegmatch = lpeg.match
local unpack = unpack or table.unpack
-local points = number.points
local allocate = utilities.storage.allocate
local todimen = string.todimen
+local formatters = string.formatters
local P, C, R, S, Cc = lpeg.P, lpeg.C, lpeg.R, lpeg.S, lpeg.Cc
@@ -42,16 +42,18 @@ local stoptiming = statistics.stoptiming
-- vertical space handler
-local trace_vbox_vspacing = false trackers.register("builders.vbox_vspacing", function(v) trace_vbox_vspacing = v end)
-local trace_page_vspacing = false trackers.register("builders.page_vspacing", function(v) trace_page_vspacing = v end)
-local trace_collect_vspacing = false trackers.register("builders.collect_vspacing", function(v) trace_collect_vspacing = v end)
-local trace_vspacing = false trackers.register("builders.vspacing", function(v) trace_vspacing = v end)
-local trace_vsnapping = false trackers.register("builders.vsnapping", function(v) trace_vsnapping = v end)
-local trace_vpacking = false trackers.register("builders.vpacking", function(v) trace_vpacking = v end)
+local trace_vbox_vspacing = false trackers.register("vspacing.vbox", function(v) trace_vbox_vspacing = v end)
+local trace_page_vspacing = false trackers.register("vspacing.page", function(v) trace_page_vspacing = v end)
+local trace_page_builder = false trackers.register("builders.page", function(v) trace_page_builder = v end)
+local trace_collect_vspacing = false trackers.register("vspacing.collect", function(v) trace_collect_vspacing = v end)
+local trace_vspacing = false trackers.register("vspacing.spacing", function(v) trace_vspacing = v end)
+local trace_vsnapping = false trackers.register("vspacing.snapping", function(v) trace_vsnapping = v end)
+local trace_vpacking = false trackers.register("vspacing.packing", function(v) trace_vpacking = v end)
local report_vspacing = logs.reporter("vspacing","spacing")
local report_collapser = logs.reporter("vspacing","collapsing")
local report_snapper = logs.reporter("vspacing","snapping")
+local report_page_builder = logs.reporter("builders","page")
local a_skipcategory = attributes.private('skipcategory')
local a_skippenalty = attributes.private('skippenalty')
@@ -60,11 +62,9 @@ local a_skiporder = attributes.private('skiporder')
local a_snapmethod = attributes.private('snapmethod')
local a_snapvbox = attributes.private('snapvbox')
-local has_attribute = node.has_attribute
-local unset_attribute = node.unset_attribute
-local set_attribute = node.set_attribute
local find_node_tail = node.tail
local free_node = node.free
+local free_node_list = node.flush_list
local copy_node = node.copy
local traverse_nodes = node.traverse
local traverse_nodes_id = node.traverse_id
@@ -98,12 +98,14 @@ local whatsit_code = nodecodes.whatsit
local userskip_code = skipcodes.userskip
-builders.vspacing = builders.vspacing or { }
-local vspacing = builders.vspacing
-vspacing.data = vspacing.data or { }
+local vspacing = builders.vspacing or { }
+builders.vspacing = vspacing
-vspacing.data.snapmethods = vspacing.data.snapmethods or { }
-local snapmethods = vspacing.data.snapmethods --maybe some older code can go
+local vspacingdata = vspacing.data or { }
+vspacing.data = vspacingdata
+
+vspacingdata.snapmethods = vspacingdata.snapmethods or { }
+local snapmethods = vspacingdata.snapmethods --maybe some older code can go
storage.register("builders/vspacing/data/snapmethods", snapmethods, "builders.vspacing.data.snapmethods")
@@ -175,16 +177,16 @@ function vspacing.definesnapmethod(name,method)
context(n)
end
---~ local rule_id = nodecodes.rule
---~ local vlist_id = nodecodes.vlist
---~ function nodes.makevtop(n)
---~ if n.id == vlist_id then
---~ local list = n.list
---~ local height = (list and list.id <= rule_id and list.height) or 0
---~ n.depth = n.depth - height + n.height
---~ n.height = height
---~ end
---~ end
+-- local rule_id = nodecodes.rule
+-- local vlist_id = nodecodes.vlist
+-- function nodes.makevtop(n)
+-- if n.id == vlist_id then
+-- local list = n.list
+-- local height = (list and list.id <= rule_id and list.height) or 0
+-- n.depth = n.depth - height + n.height
+-- n.height = height
+-- end
+-- end
local reference = nodes.reference
@@ -206,14 +208,17 @@ local function validvbox(parentid,list)
else
done = n
end
- elseif id == penalty_code or id == glue_code then
+ elseif id == glue_code or id == penalty_code then
-- go on
else
return nil -- whatever
end
end
- if done and done.id == hlist_code then
- return validvbox(done.id,done.list)
+ if done then
+ local id = done.id
+ if id == hlist_code then
+ return validvbox(id,done.list)
+ end
end
return done -- only one vbox
end
@@ -232,15 +237,15 @@ local function already_done(parentid,list,a_snapmethod) -- todo: done when only
--~ local i = 0
for n in traverse_nodes(list) do
local id = n.id
---~ i = i + 1 print(i,nodecodes[id],has_attribute(n,a_snapmethod))
+--~ i = i + 1 print(i,nodecodes[id],n[a_snapmethod])
if id == hlist_code or id == vlist_code then
- local a = has_attribute(n,a_snapmethod)
+ local a = n[a_snapmethod]
if not a then
-- return true -- not snapped at all
elseif a == 0 then
return true -- already snapped
end
- elseif id == penalty_code or id == glue_code then -- whatsit is weak spot
+ elseif id == glue_code or id == penalty_code then -- whatsit is weak spot
-- go on
else
return false -- whatever
@@ -273,25 +278,24 @@ end
local function snap_hlist(where,current,method,height,depth) -- method.strut is default
local list = current.list
---~ print(table.serialize(method))
local t = trace_vsnapping and { }
if t then
- t[#t+1] = format("list content: %s",nodes.toutf(list))
- t[#t+1] = format("parent id: %s",reference(current))
- t[#t+1] = format("snap method: %s",method.name)
- t[#t+1] = format("specification: %s",method.specification)
+ t[#t+1] = formatters["list content: %s"](nodes.toutf(list))
+ t[#t+1] = formatters["parent id: %s"](reference(current))
+ t[#t+1] = formatters["snap method: %s"](method.name)
+ t[#t+1] = formatters["specification: %s"](method.specification)
end
local snapht, snapdp
if method["local"] then
-- snapping is done immediately here
snapht, snapdp = texdimen.bodyfontstrutheight, texdimen.bodyfontstrutdepth
if t then
- t[#t+1] = format("local: snapht %s snapdp %s",points(snapht),points(snapdp))
+ t[#t+1] = formatters["local: snapht %p snapdp %p"](snapht,snapdp)
end
elseif method["global"] then
snapht, snapdp = texdimen.globalbodyfontstrutheight, texdimen.globalbodyfontstrutdepth
if t then
- t[#t+1] = format("global: snapht %s snapdp %s",points(snapht),points(snapdp))
+ t[#t+1] = formatters["global: snapht %p snapdp %p"](snapht,snapdp)
end
else
-- maybe autolocal
@@ -302,7 +306,7 @@ local function snap_hlist(where,current,method,height,depth) -- method.strut is
snapht, snapdp = lsnapht, lsnapdp
end
if t then
- t[#t+1] = format("auto: snapht %s snapdp %s",points(snapht),points(snapdp))
+ t[#t+1] = formatters["auto: snapht %p snapdp %p"](snapht,snapdp)
end
end
local h, d = height or current.height, depth or current.depth
@@ -314,19 +318,19 @@ local function snap_hlist(where,current,method,height,depth) -- method.strut is
if method.none then
plusht, plusdp = 0, 0
if t then
- t[#t+1] = format("none: plusht 0pt plusdp 0pt")
+ t[#t+1] = "none: plusht 0pt plusdp 0pt"
end
end
if method.halfline then -- extra halfline
plusht, plusdp = plusht + snaphtdp/2, plusdp + snaphtdp/2
if t then
- t[#t+1] = format("halfline: plusht %s plusdp %s",points(plusht),points(plusdp))
+ t[#t+1] = formatters["halfline: plusht %p plusdp %p"](plusht,plusdp)
end
end
if method.line then -- extra line
plusht, plusdp = plusht + snaphtdp, plusdp + snaphtdp
if t then
- t[#t+1] = format("line: plusht %s plusdp %s",points(plusht),points(plusdp))
+ t[#t+1] = formatters["line: plusht %p plusdp %p"](plusht,plusdp)
end
end
@@ -346,20 +350,18 @@ local function snap_hlist(where,current,method,height,depth) -- method.strut is
if lh then
local ht, dp = thebox.height, thebox.depth
if t then
- t[#t+1] = format("first line: height %s depth %s",points(lh),points(ld))
- t[#t+1] = format("dimensions: height %s depth %s",points(ht),points(dp))
+ t[#t+1] = formatters["first line: height %p depth %p"](lh,ld)
+ t[#t+1] = formatters["dimensions: height %p depth %p"](ht,dp)
end
local delta = h - lh
ch, cd = lh, delta + d
---~ ch = ch + plusht
---~ cd = cd + plusdp
-h, d = ch, cd
+ h, d = ch, cd
local shifted = hpack_node(current.list)
shifted.shift = delta
current.list = shifted
done = true
if t then
- t[#t+1] = format("first: height %s depth %s shift %s",points(ch),points(cd),points(delta))
+ t[#t+1] = formatters["first: height %p depth %p shift %p"](ch,cd,delta)
end
elseif t then
t[#t+1] = "first: not done, no content"
@@ -382,20 +384,18 @@ h, d = ch, cd
if lh then
local ht, dp = thebox.height, thebox.depth
if t then
- t[#t+1] = format("last line: height %s depth %s",points(lh),points(ld))
- t[#t+1] = format("dimensions: height %s depth %s",points(ht),points(dp))
+ t[#t+1] = formatters["last line: height %p depth %p" ](lh,ld)
+ t[#t+1] = formatters["dimensions: height %p depth %p"](ht,dp)
end
local delta = d - ld
cd, ch = ld, delta + h
---~ ch = ch + plusht
---~ cd = cd + plusdp
-h, d = ch, cd
+ h, d = ch, cd
local shifted = hpack_node(current.list)
shifted.shift = delta
current.list = shifted
done = true
if t then
- t[#t+1] = format("last: height %s depth %s shift %s",points(ch),points(cd),points(delta))
+ t[#t+1] = formatters["last: height %p depth %p shift %p"](ch,cd,delta)
end
elseif t then
t[#t+1] = "last: not done, no content"
@@ -404,59 +404,48 @@ h, d = ch, cd
t[#t+1] = "last: not done, no vbox"
end
end
-
---~ if done then
---~ -- first or last
---~ else
if method.minheight then
- -- ch = max(floor((h-hr*snapht)/snaphtdp),0)*snaphtdp + plusht
ch = floored((h-hr*snapht)/snaphtdp)*snaphtdp + plusht
if t then
- t[#t+1] = format("minheight: %s",points(ch))
+ t[#t+1] = formatters["minheight: %p"](ch)
end
elseif method.maxheight then
- -- ch = max(ceil((h-hr*snapht)/snaphtdp),0)*snaphtdp + plusht
ch = ceiled((h-hr*snapht)/snaphtdp)*snaphtdp + plusht
if t then
- t[#t+1] = format("maxheight: %s",points(ch))
+ t[#t+1] = formatters["maxheight: %p"](ch)
end
else
ch = plusht
if t then
- t[#t+1] = format("set height: %s",points(ch))
+ t[#t+1] = formatters["set height: %p"](ch)
end
end
---~ if done then
---~ -- first or last
---~ else
if method.mindepth then
- -- cd = max(floor((d-dr*snapdp)/snaphtdp),0)*snaphtdp + plusdp
cd = floored((d-dr*snapdp)/snaphtdp)*snaphtdp + plusdp
if t then
- t[#t+1] = format("mindepth: %s",points(cd))
+ t[#t+1] = formatters["mindepth: %p"](cd)
end
elseif method.maxdepth then
- -- cd = max(ceil((d-dr*snapdp)/snaphtdp),0)*snaphtdp + plusdp
cd = ceiled((d-dr*snapdp)/snaphtdp)*snaphtdp + plusdp
if t then
- t[#t+1] = format("maxdepth: %s",points(cd))
+ t[#t+1] = formatters["maxdepth: %p"](cd)
end
else
cd = plusdp
if t then
- t[#t+1] = format("set depth: %s",points(cd))
+ t[#t+1] = formatters["set depth: %p"](cd)
end
end
if method.top then
ch = ch + tlines * snaphtdp
if t then
- t[#t+1] = format("top height: %s",points(ch))
+ t[#t+1] = formatters["top height: %p"](ch)
end
end
if method.bottom then
cd = cd + blines * snaphtdp
if t then
- t[#t+1] = format("bottom depth: %s",points(cd))
+ t[#t+1] = formatters["bottom depth: %p"](cd)
end
end
@@ -464,41 +453,39 @@ h, d = ch, cd
if offset then
-- we need to set the attr
if t then
- t[#t+1] = format("before offset: %s (width %s height %s depth %s)",
- points(offset),points(current.width),points(current.height),points(current.depth))
+ t[#t+1] = formatters["before offset: %p (width %p height %p depth %p)"](offset,current.width,current.height,current.depth)
end
local shifted = hpack_node(current.list)
shifted.shift = offset
current.list = shifted
if t then
- t[#t+1] = format("after offset: %s (width %s height %s depth %s)",
- points(offset),points(current.width),points(current.height),points(current.depth))
+ t[#t+1] = formatters["after offset: %p (width %p height %p depth %p)"](offset,current.width,current.height,current.depth)
end
- set_attribute(shifted,a_snapmethod,0)
- set_attribute(current,a_snapmethod,0)
+ shifted[a_snapmethod] = 0
+ current[a_snapmethod] = 0
end
if not height then
current.height = ch
if t then
- t[#t+1] = format("forced height: %s",points(ch))
+ t[#t+1] = formatters["forced height: %p"](ch)
end
end
if not depth then
current.depth = cd
if t then
- t[#t+1] = format("forced depth: %s",points(cd))
+ t[#t+1] = formatters["forced depth: %p"](cd)
end
end
local lines = (ch+cd)/snaphtdp
if t then
local original = (h+d)/snaphtdp
local whatever = (ch+cd)/(texdimen.globalbodyfontstrutheight + texdimen.globalbodyfontstrutdepth)
- t[#t+1] = format("final lines: %s -> %s (%s)",original,lines,whatever)
- t[#t+1] = format("final height: %s -> %s",points(h),points(ch))
- t[#t+1] = format("final depth: %s -> %s",points(d),points(cd))
+ t[#t+1] = formatters["final lines: %s -> %s (%s)"](original,lines,whatever)
+ t[#t+1] = formatters["final height: %p -> %p"](h,ch)
+ t[#t+1] = formatters["final depth: %p -> %p"](d,cd)
end
if t then
- report_snapper("trace: %s type %s\n\t%s",where,nodecodes[current.id],concat(t,"\n\t"))
+ report_snapper("trace: %s type %s\n\t%\n\tt",where,nodecodes[current.id],t)
end
return h, d, ch, cd, lines
end
@@ -548,18 +535,18 @@ function vspacing.tocategory(str)
end
end
-vspacing.data.map = vspacing.data.map or { } -- allocate ?
-vspacing.data.skip = vspacing.data.skip or { } -- allocate ?
+vspacingdata.map = vspacingdata.map or { } -- allocate ?
+vspacingdata.skip = vspacingdata.skip or { } -- allocate ?
-storage.register("builders/vspacing/data/map", vspacing.data.map, "builders.vspacing.data.map")
-storage.register("builders/vspacing/data/skip", vspacing.data.skip, "builders.vspacing.data.skip")
+storage.register("builders/vspacing/data/map", vspacingdata.map, "builders.vspacing.data.map")
+storage.register("builders/vspacing/data/skip", vspacingdata.skip, "builders.vspacing.data.skip")
do -- todo: interface.variables
vspacing.fixed = false
- local map = vspacing.data.map
- local skip = vspacing.data.skip
+ local map = vspacingdata.map
+ local skip = vspacingdata.skip
local multiplier = C(S("+-")^0 * R("09")^1) * P("*")
local category = P(":") * C(P(1)^1)
@@ -571,45 +558,55 @@ do -- todo: interface.variables
-- This will change: just node.write and we can store the values in skips which
-- then obeys grouping
+ local fixedblankskip = context.fixedblankskip
+ local flexibleblankskip = context.flexibleblankskip
+ local setblankcategory = context.setblankcategory
+ local setblankorder = context.setblankorder
+ local setblankpenalty = context.setblankpenalty
+ local setblankhandling = context.setblankhandling
+ local flushblankhandling = context.flushblankhandling
+ local addpredefinedblankskip = context.addpredefinedblankskip
+ local addaskedblankskip = context.addaskedblankskip
+
local function analyze(str,oldcategory) -- we could use shorter names
for s in gmatch(str,"([^ ,]+)") do
local amount, keyword, detail = lpegmatch(splitter,s) -- the comma splitter can be merged
if not keyword then
- report_vspacing("unknown directive: %s",s)
+ report_vspacing("unknown directive %a",s)
else
local mk = map[keyword]
if mk then
category = analyze(mk,category)
elseif keyword == k_fixed then
- context.fixedblankskip()
+ fixedblankskip()
elseif keyword == k_flexible then
- context.flexibleblankskip()
+ flexibleblankskip()
elseif keyword == k_category then
local category = tonumber(detail)
if category then
- context.setblankcategory(category)
+ setblankcategory(category)
if category ~= oldcategory then
- context.flushblankhandling()
+ flushblankhandling()
oldcategory = category
end
end
elseif keyword == k_order and detail then
local order = tonumber(detail)
if order then
- context.setblankorder(order)
+ setblankorder(order)
end
elseif keyword == k_penalty and detail then
local penalty = tonumber(detail)
if penalty then
- context.setblankpenalty(penalty)
+ setblankpenalty(penalty)
end
else
amount = tonumber(amount) or 1
local sk = skip[keyword]
if sk then
- context.addpredefinedblankskip(amount,keyword)
+ addpredefinedblankskip(amount,keyword)
else -- no check
- context.addaskedblankskip(amount,keyword)
+ addaskedblankskip(amount,keyword)
end
end
end
@@ -617,15 +614,22 @@ do -- todo: interface.variables
return category
end
+ local pushlogger = context.pushlogger
+ local startblankhandling = context.startblankhandling
+ local stopblankhandling = context.stopblankhandling
+ local poplogger = context.poplogger
+
function vspacing.analyze(str)
if trace_vspacing then
- context.pushlogger(report_vspacing)
- end
- context.startblankhandling()
- analyze(str,1)
- context.stopblankhandling()
- if trace_vspacing then
- context.poplogger()
+ pushlogger(report_vspacing)
+ startblankhandling()
+ analyze(str,1)
+ stopblankhandling()
+ poplogger()
+ else
+ startblankhandling()
+ analyze(str,1)
+ stopblankhandling()
end
end
@@ -646,43 +650,19 @@ end
-- implementation
---~ nodes.snapvalues = { }
-
---~ function nodes.setsnapvalue(n,ht,dp)
---~ nodes.snapvalues[n] = { ht, dp, ht+dp }
---~ end
-
local trace_list, tracing_info, before, after = { }, false, "", ""
-local stripzeros, topoints = utilities.formatters.stripzeros, number.topoints
-
-local function glue_to_string(glue)
- local spec = glue.spec
- local t = { points(spec.width) }
- if spec.stretch_order and spec.stretch_order ~= 0 then
- t[#t+1] = format("plus %s%s",spec.stretch/65536,fillcodes[spec.stretch_order])
- elseif spec.stretch and spec.stretch ~= 0 then
- t[#t+1] = format("plus %s",stripzeros(topoints(spec.stretch)))
- end
- if spec.shrink_order and spec.shrink_order ~= 0 then
- t[#t+1] = format("minus %s%s",spec.shrink/65536,fillcodes[spec.shrink_order])
- elseif spec.shrink and spec.shrink ~= 0 then
- t[#t+1] = format("minus %s",stripzeros(topoints(spec.shrink)))
- end
- return concat(t," ")
-end
-
local function nodes_to_string(head)
local current, t = head, { }
while current do
local id = current.id
local ty = nodecodes[id]
if id == penalty_code then
- t[#t+1] = format("%s:%s",ty,current.penalty)
- elseif id == glue_code then
- t[#t+1] = format("%s:%s",ty,glue_to_string(current)) -- stripzeros(topoints(current.spec.width)))
+ t[#t+1] = formatters["%s:%s"](ty,current.penalty)
+ elseif id == glue_code then -- or id == kern_code then -- to be tested
+ t[#t+1] = formatters["%s:%p"](ty,current)
elseif id == kern_code then
- t[#t+1] = format("%s:%s",ty,stripzeros(topoints(current.kern)))
+ t[#t+1] = formatters["%s:%p"](ty,current.kern)
else
t[#t+1] = ty
end
@@ -696,24 +676,24 @@ local function reset_tracing(head)
end
local function trace_skip(str,sc,so,sp,data)
- trace_list[#trace_list+1] = { "skip", format("%s | %s | category %s | order %s | penalty %s", str, glue_to_string(data), sc or "-", so or "-", sp or "-") }
+ trace_list[#trace_list+1] = { "skip", formatters["%s | %p | category %s | order %s | penalty %s"](str, data, sc or "-", so or "-", sp or "-") }
tracing_info = true
end
local function trace_natural(str,data)
- trace_list[#trace_list+1] = { "skip", format("%s | %s", str, glue_to_string(data)) }
+ trace_list[#trace_list+1] = { "skip", formatters["%s | %p"](str, data) }
tracing_info = true
end
local function trace_info(message, where, what)
- trace_list[#trace_list+1] = { "info", format("%s: %s/%s",message,where,what) }
+ trace_list[#trace_list+1] = { "info", formatters["%s: %s/%s"](message,where,what) }
end
local function trace_node(what)
local nt = nodecodes[what.id]
local tl = trace_list[#trace_list]
if tl and tl[1] == "node" then
- trace_list[#trace_list] = { "node", tl[2] .. " + " .. nt }
+ trace_list[#trace_list] = { "node", formatters["%s + %s"](tl[2],nt) }
else
trace_list[#trace_list+1] = { "node", nt }
end
@@ -721,9 +701,9 @@ end
local function trace_done(str,data)
if data.id == penalty_code then
- trace_list[#trace_list+1] = { "penalty", format("%s | %s", str, data.penalty) }
+ trace_list[#trace_list+1] = { "penalty", formatters["%s | %s"](str,data.penalty) }
else
- trace_list[#trace_list+1] = { "glue", format("%s | %s", str, glue_to_string(data)) }
+ trace_list[#trace_list+1] = { "glue", formatters["%s | %p"](str,data) }
end
tracing_info = true
end
@@ -762,14 +742,14 @@ local splittopskip_code = skipcodes.splittopskip
local free_glue_node = free_node
local discard, largest, force, penalty, add, disable, nowhite, goback, together = 0, 1, 2, 3, 4, 5, 6, 7, 8
---~ local function free_glue_node(n)
---~ -- free_node(n.spec)
---~ print("before",n)
---~ logs.flush()
---~ free_node(n)
---~ print("after")
---~ logs.flush()
---~ end
+-- local function free_glue_node(n)
+-- -- free_node(n.spec)
+-- print("before",n)
+-- logs.flush()
+-- free_node(n)
+-- print("after")
+-- logs.flush()
+-- end
function vspacing.snapbox(n,how)
local sv = snapmethods[how]
@@ -777,7 +757,7 @@ function vspacing.snapbox(n,how)
local box = texbox[n]
local list = box.list
if list then
- local s = has_attribute(list,a_snapmethod)
+ local s = list[a_snapmethod]
if s == 0 then
if trace_vsnapping then
-- report_snapper("box list not snapped, already done")
@@ -787,18 +767,18 @@ function vspacing.snapbox(n,how)
if false then -- todo: already_done
-- assume that the box is already snapped
if trace_vsnapping then
- report_snapper("box list already snapped at (%s,%s): %s",
+ report_snapper("box list already snapped at (%p,%p): %s",
ht,dp,listtoutf(list))
end
else
local h, d, ch, cd, lines = snap_hlist("box",box,sv,ht,dp)
box.height, box.depth = ch, cd
if trace_vsnapping then
- report_snapper("box list snapped from (%s,%s) to (%s,%s) using method '%s' (%s) for '%s' (%s lines): %s",
+ report_snapper("box list snapped from (%p,%p) to (%p,%p) using method %a (%s) for %a (%s lines): %s",
h,d,ch,cd,sv.name,sv.specification,"direct",lines,listtoutf(list))
end
- set_attribute(box, a_snapmethod,0) --
- set_attribute(list,a_snapmethod,0) -- yes or no
+ box[a_snapmethod] = 0 --
+ list[a_snapmethod] = 0 -- yes or no
end
end
end
@@ -818,11 +798,13 @@ local function forced_skip(head,current,width,where,trace)
current = c
end
if trace then
- report_vspacing("inserting forced skip of %s",width)
+ report_vspacing("inserting forced skip of %p",width)
end
return head, current
end
+-- penalty only works well when before skip
+
local function collapser(head,where,what,trace,snap,a_snapmethod) -- maybe also pass tail
if trace then
reset_tracing(head)
@@ -858,25 +840,25 @@ local function collapser(head,where,what,trace,snap,a_snapmethod) -- maybe also
parskip, ignore_parskip, ignore_following, ignore_whitespace = nil, false, false, false
end
if trace_vsnapping then
- report_snapper("global ht/dp = %s/%s, local ht/dp = %s/%s",
+ report_snapper("global ht/dp = %p/%p, local ht/dp = %p/%p",
texdimen.globalbodyfontstrutheight, texdimen.globalbodyfontstrutdepth,
texdimen.bodyfontstrutheight, texdimen.bodyfontstrutdepth)
end
if trace then trace_info("start analyzing",where,what) end
while current do
- local id, subtype = current.id, current.subtype
+ local id = current.id
if id == hlist_code or id == vlist_code then
-- needs checking, why so many calls
if snap then
local list = current.list
- local s = has_attribute(current,a_snapmethod)
+ local s = current[a_snapmethod]
if not s then
-- if trace_vsnapping then
-- report_snapper("mvl list not snapped")
-- end
elseif s == 0 then
if trace_vsnapping then
- report_snapper("mvl %s not snapped, already done: %s",nodecodes[id],listtoutf(list))
+ report_snapper("mvl %a not snapped, already done: %s",nodecodes[id],listtoutf(list))
end
else
local sv = snapmethods[s]
@@ -886,19 +868,19 @@ local function collapser(head,where,what,trace,snap,a_snapmethod) -- maybe also
local ht, dp = current.height, current.depth
-- assume that the box is already snapped
if trace_vsnapping then
- report_snapper("mvl list already snapped at (%s,%s): %s",ht,dp,listtoutf(list))
+ report_snapper("mvl list already snapped at (%p,%p): %s",ht,dp,listtoutf(list))
end
else
local h, d, ch, cd, lines = snap_hlist("mvl",current,sv)
if trace_vsnapping then
- report_snapper("mvl %s snapped from (%s,%s) to (%s,%s) using method '%s' (%s) for '%s' (%s lines): %s",
+ report_snapper("mvl %a snapped from (%p,%p) to (%p,%p) using method %a (%s) for %a (%s lines): %s",
nodecodes[id],h,d,ch,cd,sv.name,sv.specification,where,lines,listtoutf(list))
end
end
elseif trace_vsnapping then
- report_snapper("mvl %s not snapped due to unknown snap specification: %s",nodecodes[id],listtoutf(list))
+ report_snapper("mvl %a not snapped due to unknown snap specification: %s",nodecodes[id],listtoutf(list))
end
- set_attribute(current,a_snapmethod,0)
+ current[a_snapmethod] = 0
end
else
--
@@ -907,264 +889,265 @@ local function collapser(head,where,what,trace,snap,a_snapmethod) -- maybe also
flush("list")
current = current.next
elseif id == penalty_code then
- --~ natural_penalty = current.penalty
- --~ if trace then trace_done("removed penalty",current) end
- --~ head, current = remove_node(head, current, true)
+ -- natural_penalty = current.penalty
+ -- if trace then trace_done("removed penalty",current) end
+ -- head, current = remove_node(head, current, true)
current = current.next
elseif id == kern_code then
if snap and trace_vsnapping and current.kern ~= 0 then
- --~ current.kern = 0
- report_snapper("kern of %s (kept)",current.kern)
+ report_snapper("kern of %p kept",current.kern)
end
flush("kern")
current = current.next
- elseif id ~= glue_code then
- flush("something else")
- current = current.next
- elseif subtype == userskip_code then
- local sc = has_attribute(current,a_skipcategory) -- has no default, no unset (yet)
- local so = has_attribute(current,a_skiporder ) or 1 -- has 1 default, no unset (yet)
- local sp = has_attribute(current,a_skippenalty ) -- has no default, no unset (yet)
- if sp and sc == penalty then
- if not penalty_data then
- penalty_data = sp
- elseif penalty_order < so then
- penalty_order, penalty_data = so, sp
- elseif penalty_order == so and sp > penalty_data then
- penalty_data = sp
- end
- if trace then trace_skip('penalty in skip',sc,so,sp,current) end
- head, current = remove_node(head, current, true)
- elseif not sc then -- if not sc then
- if glue_data then
- if trace then trace_done("flush",glue_data) end
- head = insert_node_before(head,current,glue_data)
- if trace then trace_natural("natural",current) end
- current = current.next
- else
- -- not look back across head
- local previous = current.prev
- if previous and previous.id == glue_code and previous.subtype == userskip_code then
- local ps = previous.spec
- if ps.writable then
- local cs = current.spec
- if cs.writable and ps.stretch_order == 0 and ps.shrink_order == 0 and cs.stretch_order == 0 and cs.shrink_order == 0 then
- local pw, pp, pm = ps.width, ps.stretch, ps.shrink
- local cw, cp, cm = cs.width, cs.stretch, cs.shrink
---~ ps = writable_spec(previous) -- no writable needed here
---~ ps.width, ps.stretch, ps.shrink = pw + cw, pp + cp, pm + cm
-previous.spec = new_gluespec(pw + cw, pp + cp, pm + cm) -- else topskip can disappear
- if trace then trace_natural("removed",current) end
- head, current = remove_node(head, current, true)
- -- current = previous
- if trace then trace_natural("collapsed",previous) end
- -- current = current.next
+ elseif id == glue_code then
+ local subtype = current.subtype
+ if subtype == userskip_code then
+ local sc = current[a_skipcategory] -- has no default, no unset (yet)
+ local so = current[a_skiporder] or 1 -- has 1 default, no unset (yet)
+ local sp = current[a_skippenalty] -- has no default, no unset (yet)
+ if sp and sc == penalty then
+ if not penalty_data then
+ penalty_data = sp
+ elseif penalty_order < so then
+ penalty_order, penalty_data = so, sp
+ elseif penalty_order == so and sp > penalty_data then
+ penalty_data = sp
+ end
+ if trace then trace_skip("penalty in skip",sc,so,sp,current) end
+ head, current = remove_node(head, current, true)
+ elseif not sc then -- if not sc then
+ if glue_data then
+ if trace then trace_done("flush",glue_data) end
+ head = insert_node_before(head,current,glue_data)
+ if trace then trace_natural("natural",current) end
+ current = current.next
+ else
+ -- not look back across head
+ local previous = current.prev
+ if previous and previous.id == glue_code and previous.subtype == userskip_code then
+ local ps = previous.spec
+ if ps.writable then
+ local cs = current.spec
+ if cs.writable and ps.stretch_order == 0 and ps.shrink_order == 0 and cs.stretch_order == 0 and cs.shrink_order == 0 then
+ local pw, pp, pm = ps.width, ps.stretch, ps.shrink
+ local cw, cp, cm = cs.width, cs.stretch, cs.shrink
+ -- ps = writable_spec(previous) -- no writable needed here
+ -- ps.width, ps.stretch, ps.shrink = pw + cw, pp + cp, pm + cm
+ previous.spec = new_gluespec(pw + cw, pp + cp, pm + cm) -- else topskip can disappear
+ if trace then trace_natural("removed",current) end
+ head, current = remove_node(head, current, true)
+ -- current = previous
+ if trace then trace_natural("collapsed",previous) end
+ -- current = current.next
+ else
+ if trace then trace_natural("filler",current) end
+ current = current.next
+ end
else
- if trace then trace_natural("filler",current) end
+ if trace then trace_natural("natural (no prev spec)",current) end
current = current.next
end
else
- if trace then trace_natural("natural (no prev spec)",current) end
+ if trace then trace_natural("natural (no prev)",current) end
current = current.next
end
- else
- if trace then trace_natural("natural (no prev)",current) end
- current = current.next
end
- end
- glue_order, glue_data = 0, nil
- elseif sc == disable then
- ignore_following = true
- if trace then trace_skip("disable",sc,so,sp,current) end
- head, current = remove_node(head, current, true)
- elseif sc == together then
- keep_together = true
- if trace then trace_skip("together",sc,so,sp,current) end
- head, current = remove_node(head, current, true)
- elseif sc == nowhite then
- ignore_whitespace = true
- head, current = remove_node(head, current, true)
- elseif sc == discard then
- if trace then trace_skip("discard",sc,so,sp,current) end
- head, current = remove_node(head, current, true)
- elseif ignore_following then
- if trace then trace_skip("disabled",sc,so,sp,current) end
- head, current = remove_node(head, current, true)
- elseif not glue_data then
- if trace then trace_skip("assign",sc,so,sp,current) end
- glue_order = so
- head, current, glue_data = remove_node(head, current)
- elseif glue_order < so then
- if trace then trace_skip("force",sc,so,sp,current) end
- glue_order = so
- free_glue_node(glue_data)
- head, current, glue_data = remove_node(head, current)
- elseif glue_order == so then
- -- is now exclusive, maybe support goback as combi, else why a set
- if sc == largest then
- local cs, gs = current.spec, glue_data.spec
- local cw, gw = cs.width, gs.width
- if cw > gw then
- if trace then trace_skip('largest',sc,so,sp,current) end
+ glue_order, glue_data = 0, nil
+ elseif sc == disable then
+ ignore_following = true
+ if trace then trace_skip("disable",sc,so,sp,current) end
+ head, current = remove_node(head, current, true)
+ elseif sc == together then
+ keep_together = true
+ if trace then trace_skip("together",sc,so,sp,current) end
+ head, current = remove_node(head, current, true)
+ elseif sc == nowhite then
+ ignore_whitespace = true
+ head, current = remove_node(head, current, true)
+ elseif sc == discard then
+ if trace then trace_skip("discard",sc,so,sp,current) end
+ head, current = remove_node(head, current, true)
+ elseif ignore_following then
+ if trace then trace_skip("disabled",sc,so,sp,current) end
+ head, current = remove_node(head, current, true)
+ elseif not glue_data then
+ if trace then trace_skip("assign",sc,so,sp,current) end
+ glue_order = so
+ head, current, glue_data = remove_node(head, current)
+ elseif glue_order < so then
+ if trace then trace_skip("force",sc,so,sp,current) end
+ glue_order = so
+ free_glue_node(glue_data)
+ head, current, glue_data = remove_node(head, current)
+ elseif glue_order == so then
+ -- is now exclusive, maybe support goback as combi, else why a set
+ if sc == largest then
+ local cs, gs = current.spec, glue_data.spec
+ local cw, gw = cs.width, gs.width
+ if cw > gw then
+ if trace then trace_skip("largest",sc,so,sp,current) end
+ free_glue_node(glue_data) -- also free spec
+ head, current, glue_data = remove_node(head, current)
+ else
+ if trace then trace_skip("remove smallest",sc,so,sp,current) end
+ head, current = remove_node(head, current, true)
+ end
+ elseif sc == goback then
+ if trace then trace_skip("goback",sc,so,sp,current) end
+ free_glue_node(glue_data) -- also free spec
+ head, current, glue_data = remove_node(head, current)
+ elseif sc == force then
+ -- last one counts, some day we can provide an accumulator and largest etc
+ -- but not now
+ if trace then trace_skip("force",sc,so,sp,current) end
free_glue_node(glue_data) -- also free spec
head, current, glue_data = remove_node(head, current)
+ elseif sc == penalty then
+ if trace then trace_skip("penalty",sc,so,sp,current) end
+ free_glue_node(glue_data) -- also free spec
+ glue_data = nil
+ head, current = remove_node(head, current, true)
+ elseif sc == add then
+ if trace then trace_skip("add",sc,so,sp,current) end
+ -- local old, new = glue_data.spec, current.spec
+ local old, new = writable_spec(glue_data), current.spec
+ old.width = old.width + new.width
+ old.stretch = old.stretch + new.stretch
+ old.shrink = old.shrink + new.shrink
+ -- toto: order
+ head, current = remove_node(head, current, true)
else
- if trace then trace_skip('remove smallest',sc,so,sp,current) end
+ if trace then trace_skip("unknown",sc,so,sp,current) end
head, current = remove_node(head, current, true)
end
- elseif sc == goback then
- if trace then trace_skip('goback',sc,so,sp,current) end
- free_glue_node(glue_data) -- also free spec
- head, current, glue_data = remove_node(head, current)
- elseif sc == force then
- -- last one counts, some day we can provide an accumulator and largest etc
- -- but not now
- if trace then trace_skip('force',sc,so,sp,current) end
- free_glue_node(glue_data) -- also free spec
- head, current, glue_data = remove_node(head, current)
- elseif sc == penalty then
- -- ? ? ? ?
- if trace then trace_skip('penalty',sc,so,sp,current) end
- free_glue_node(glue_data) -- also free spec
- glue_data = nil
- head, current = remove_node(head, current, true)
- elseif sc == add then
- if trace then trace_skip('add',sc,so,sp,current) end
- -- local old, new = glue_data.spec, current.spec
- local old, new = writable_spec(glue_data), current.spec
- old.width = old.width + new.width
- old.stretch = old.stretch + new.stretch
- old.shrink = old.shrink + new.shrink
- -- toto: order
- head, current = remove_node(head, current, true)
else
if trace then trace_skip("unknown",sc,so,sp,current) end
head, current = remove_node(head, current, true)
end
- else
- if trace then trace_skip("unknown",sc,so,sp,current) end
- head, current = remove_node(head, current, true)
- end
- if sc == force then
- force_glue = true
- end
- elseif subtype == lineskip_code then
- if snap then
- local s = has_attribute(current,a_snapmethod)
- if s and s ~= 0 then
- set_attribute(current,a_snapmethod,0)
- if current.spec.writable then
- local spec = writable_spec(current)
- spec.width = 0
- if trace_vsnapping then
- report_snapper("lineskip set to zero")
+ if sc == force then
+ force_glue = true
+ end
+ elseif subtype == lineskip_code then
+ if snap then
+ local s = current[a_snapmethod]
+ if s and s ~= 0 then
+ current[a_snapmethod] = 0
+ if current.spec.writable then
+ local spec = writable_spec(current)
+ spec.width = 0
+ if trace_vsnapping then
+ report_snapper("lineskip set to zero")
+ end
end
+ else
+ if trace then trace_skip("lineskip",sc,so,sp,current) end
+ flush("lineskip")
end
else
if trace then trace_skip("lineskip",sc,so,sp,current) end
flush("lineskip")
end
- else
- if trace then trace_skip("lineskip",sc,so,sp,current) end
- flush("lineskip")
- end
- current = current.next
- elseif subtype == baselineskip_code then
- if snap then
- local s = has_attribute(current,a_snapmethod)
- if s and s ~= 0 then
- set_attribute(current,a_snapmethod,0)
- if current.spec.writable then
- local spec = writable_spec(current)
- spec.width = 0
- if trace_vsnapping then
- report_snapper("baselineskip set to zero")
+ current = current.next
+ elseif subtype == baselineskip_code then
+ if snap then
+ local s = current[a_snapmethod]
+ if s and s ~= 0 then
+ current[a_snapmethod] = 0
+ if current.spec.writable then
+ local spec = writable_spec(current)
+ spec.width = 0
+ if trace_vsnapping then
+ report_snapper("baselineskip set to zero")
+ end
end
+ else
+ if trace then trace_skip("baselineskip",sc,so,sp,current) end
+ flush("baselineskip")
end
else
if trace then trace_skip("baselineskip",sc,so,sp,current) end
flush("baselineskip")
end
- else
- if trace then trace_skip("baselineskip",sc,so,sp,current) end
- flush("baselineskip")
- end
- current = current.next
- elseif subtype == parskip_code then
- -- parskip always comes later
- if ignore_whitespace then
- if trace then trace_natural("ignored parskip",current) end
- head, current = remove_node(head, current, true)
- elseif glue_data then
- local ps, gs = current.spec, glue_data.spec
- if ps.writable and gs.writable and ps.width > gs.width then
- glue_data.spec = copy_node(ps)
- if trace then trace_natural("taking parskip",current) end
+ current = current.next
+ elseif subtype == parskip_code then
+ -- parskip always comes later
+ if ignore_whitespace then
+ if trace then trace_natural("ignored parskip",current) end
+ head, current = remove_node(head, current, true)
+ elseif glue_data then
+ local ps, gs = current.spec, glue_data.spec
+ if ps.writable and gs.writable and ps.width > gs.width then
+ glue_data.spec = copy_node(ps)
+ if trace then trace_natural("taking parskip",current) end
+ else
+ if trace then trace_natural("removed parskip",current) end
+ end
+ head, current = remove_node(head, current, true)
else
- if trace then trace_natural("removed parskip",current) end
+ if trace then trace_natural("honored parskip",current) end
+ head, current, glue_data = remove_node(head, current)
end
- head, current = remove_node(head, current, true)
- else
- if trace then trace_natural("honored parskip",current) end
- head, current, glue_data = remove_node(head, current)
- end
- elseif subtype == topskip_code or subtype == splittopskip_code then
- if snap then
- local s = has_attribute(current,a_snapmethod)
- if s and s ~= 0 then
- set_attribute(current,a_snapmethod,0)
- local sv = snapmethods[s]
- local w, cw = snap_topskip(current,sv)
- if trace_vsnapping then
- report_snapper("topskip snapped from %s to %s for '%s'",w,cw,where)
+ elseif subtype == topskip_code or subtype == splittopskip_code then
+ if snap then
+ local s = current[a_snapmethod]
+ if s and s ~= 0 then
+ current[a_snapmethod] = 0
+ local sv = snapmethods[s]
+ local w, cw = snap_topskip(current,sv)
+ if trace_vsnapping then
+ report_snapper("topskip snapped from %p to %p for %a",w,cw,where)
+ end
+ else
+ if trace then trace_skip("topskip",sc,so,sp,current) end
+ flush("topskip")
end
else
if trace then trace_skip("topskip",sc,so,sp,current) end
flush("topskip")
end
- else
- if trace then trace_skip("topskip",sc,so,sp,current) end
- flush("topskip")
- end
- current = current.next
- elseif subtype == abovedisplayskip_code then
- --
- if trace then trace_skip("above display skip (normal)",sc,so,sp,current) end
- flush("above display skip (normal)")
- current = current.next
- --
- elseif subtype == belowdisplayskip_code then
- --
- if trace then trace_skip("below display skip (normal)",sc,so,sp,current) end
- flush("below display skip (normal)")
- current = current.next
- --
- elseif subtype == abovedisplayshortskip_code then
- --
- if trace then trace_skip("above display skip (short)",sc,so,sp,current) end
- flush("above display skip (short)")
- current = current.next
- --
- elseif subtype == belowdisplayshortskip_code then
- --
- if trace then trace_skip("below display skip (short)",sc,so,sp,current) end
- flush("below display skip (short)")
- current = current.next
- --
- else -- other glue
- if snap and trace_vsnapping and current.spec.writable and current.spec.width ~= 0 then
- report_snapper("%s of %s (kept)",skipcodes[subtype],current.spec.width)
- --~ current.spec.width = 0
+ current = current.next
+ elseif subtype == abovedisplayskip_code then
+ --
+ if trace then trace_skip("above display skip (normal)",sc,so,sp,current) end
+ flush("above display skip (normal)")
+ current = current.next
+ --
+ elseif subtype == belowdisplayskip_code then
+ --
+ if trace then trace_skip("below display skip (normal)",sc,so,sp,current) end
+ flush("below display skip (normal)")
+ current = current.next
+ --
+ elseif subtype == abovedisplayshortskip_code then
+ --
+ if trace then trace_skip("above display skip (short)",sc,so,sp,current) end
+ flush("above display skip (short)")
+ current = current.next
+ --
+ elseif subtype == belowdisplayshortskip_code then
+ --
+ if trace then trace_skip("below display skip (short)",sc,so,sp,current) end
+ flush("below display skip (short)")
+ current = current.next
+ --
+ else -- other glue
+ if snap and trace_vsnapping and current.spec.writable and current.spec.width ~= 0 then
+ report_snapper("glue %p of type %a kept",current.spec.width,skipcodes[subtype])
+ --~ current.spec.width = 0
+ end
+ if trace then trace_skip(formatted["glue of type %a"](subtype),sc,so,sp,current) end
+ flush("some glue")
+ current = current.next
end
- if trace then trace_skip(format("some glue (%s)",subtype),sc,so,sp,current) end
- flush("some glue")
+ else
+ flush("something else")
current = current.next
end
end
if trace then trace_info("stop analyzing",where,what) end
- --~ if natural_penalty and (not penalty_data or natural_penalty > penalty_data) then
- --~ penalty_data = natural_penalty
- --~ end
+ -- if natural_penalty and (not penalty_data or natural_penalty > penalty_data) then
+ -- penalty_data = natural_penalty
+ -- end
if trace and (glue_data or penalty_data) then
trace_info("start flushing",where,what)
end
@@ -1191,7 +1174,7 @@ previous.spec = new_gluespec(pw + cw, pp + cp, pm + cm) -- else topskip can disa
end
show_tracing(head)
if oldhead ~= head then
- trace_info("head has been changed from '%s' to '%s'",nodecodes[oldhead.id],nodecodes[head.id])
+ trace_info("head has been changed from %a to %a",nodecodes[oldhead.id],nodecodes[head.id])
end
end
return head, true
@@ -1216,24 +1199,21 @@ end
function vspacing.pagehandler(newhead,where)
-- local newhead = texlists.contrib_head
if newhead then
- -- starttiming(vspacing)
- local newtail = find_node_tail(newhead)
+ local newtail = find_node_tail(newhead) -- best pass that tail, known anyway
local flush = false
stackhack = true -- todo: only when grid snapping once enabled
for n in traverse_nodes(newhead) do -- we could just look for glue nodes
local id = n.id
- if id == glue_code then
- if n.subtype == userskip_code then
- if has_attribute(n,a_skipcategory) then
- stackhack = true
- else
- flush = true
- end
+ if id ~= glue_code then
+ flush = true
+ elseif n.subtype == userskip_code then
+ if n[a_skipcategory] then
+ stackhack = true
else
- -- tricky
+ flush = true
end
else
- flush = true
+ -- tricky
end
end
if flush then
@@ -1266,7 +1246,6 @@ function vspacing.pagehandler(newhead,where)
-- texlists.contrib_head = nil
newhead = nil
end
- -- stoptiming(vspacing)
end
return newhead
end
@@ -1316,28 +1295,51 @@ function builders.vpack_filter(head,groupcode,size,packtype,maxdepth,direction)
else
nodes.processors.tracer("vpack","unchanged",head,groupcode,before,after,true)
end
- stoptiming(builders)
else
head, done = actions(head,groupcode)
- stoptiming(builders)
end
+ stoptiming(builders)
end
return head, done
end
--- This one is special in the sense that it has no head
--- and we operate on the mlv. Also, we need to do the
--- vspacing last as it removes items from the mvl.
+-- This one is special in the sense that it has no head and we operate on the mlv. Also,
+-- we need to do the vspacing last as it removes items from the mvl.
local actions = nodes.tasks.actions("mvlbuilders")
+local function report(groupcode,head)
+ report_page_builder("trigger: %s",groupcode)
+ report_page_builder(" vsize : %p",tex.vsize)
+ report_page_builder(" pagegoal : %p",tex.pagegoal)
+ report_page_builder(" pagetotal: %p",tex.pagetotal)
+ report_page_builder(" list : %s",head and nodeidstostring(head) or "<empty>")
+end
+
function builders.buildpage_filter(groupcode)
- starttiming(builders)
- local head = texlists.contrib_head
- local head, done = actions(head,groupcode)
- texlists.contrib_head = head
- stoptiming(builders)
- return (done and head) or true
+ local head, done = texlists.contrib_head, false
+ -- if head and head.next and head.next.id == hlist_code and head.next.width == 1 then
+ -- report_page_builder("trigger otr calculations")
+ -- free_node_list(head)
+ -- head = nil
+ -- end
+ if head then
+ starttiming(builders)
+ if trace_page_builder then
+ report(groupcode,head)
+ end
+ head, done = actions(head,groupcode)
+ stoptiming(builders)
+ -- -- doesn't work here (not passed on?)
+ -- tex.pagegoal = tex.vsize - tex.dimen.d_page_floats_inserted_top - tex.dimen.d_page_floats_inserted_bottom
+ texlists.contrib_head = head
+ return done and head or true
+ else
+ if trace_page_builder then
+ report(groupcode)
+ end
+ return nil, false
+ end
end
callbacks.register('vpack_filter', builders.vpack_filter, "vertical spacing etc")
diff --git a/Master/texmf-dist/tex/context/base/spac-ver.mkiv b/Master/texmf-dist/tex/context/base/spac-ver.mkiv
index 72ee80d4f02..7323230a7ed 100644
--- a/Master/texmf-dist/tex/context/base/spac-ver.mkiv
+++ b/Master/texmf-dist/tex/context/base/spac-ver.mkiv
@@ -20,7 +20,6 @@
% todo: itemize : intro ... only when there is one or two lines preceding and then
% keep these together i.e. \blank[intro]
-
% Isn't it about time to get rid of topskip i.e. make it equivalent to
% \openstrutheight so that we can remove delta code.
%
@@ -68,8 +67,8 @@
\def\normalskipamount
{\openlineheight
\ifgridsnapping \else \ifblankflexible
- \!!plus \skipgluefactor\openlineheight
- \!!minus\skipgluefactor\openlineheight
+ \s!plus \skipgluefactor\openlineheight
+ \s!minus\skipgluefactor\openlineheight
\fi \fi
\relax}
@@ -91,15 +90,16 @@
\spac_linespacing_setup_specified_interline_space}
\def\spac_linespacing_setup_specified_interline_space
- {\edef\strutheightfactor {\interlinespaceparameter\c!height }%
- \edef\strutdepthfactor {\interlinespaceparameter\c!depth }%
- \edef\minimumstrutheight {\interlinespaceparameter\c!minheight}%
- \edef\minimumstrutdepth {\interlinespaceparameter\c!mindepth }%
- \edef\minimumlinedistance{\interlinespaceparameter\c!distance }%
- \edef\normallineheight {\interlinespaceparameter\c!line }%
- \edef\topskipfactor {\interlinespaceparameter\c!top }%
- \edef\maxdepthfactor {\interlinespaceparameter\c!bottom }%
- \edef\baselinegluefactor {\interlinespaceparameter\c!stretch }%
+ {\edef\strutheightfactor {\interlinespaceparameter\c!height }%
+ \edef\strutdepthfactor {\interlinespaceparameter\c!depth }%
+ \edef\minimumstrutheight {\interlinespaceparameter\c!minheight}%
+ \edef\minimumstrutdepth {\interlinespaceparameter\c!mindepth }%
+ \edef\minimumlinedistance {\interlinespaceparameter\c!distance }%
+ \edef\normallineheight {\interlinespaceparameter\c!line }%
+ \edef\topskipfactor {\interlinespaceparameter\c!top }%
+ \edef\maxdepthfactor {\interlinespaceparameter\c!bottom }%
+ \edef\m_spac_vertical_baseline_stretch_factor{\interlinespaceparameter\c!stretch }%
+ \edef\m_spac_vertical_baseline_shrink_factor {\interlinespaceparameter\c!shrink }%
% often topskip does more bad than good, so:
\ifx\topskipfactor\v!height
\let\topskipfactor\strutheightfactor
@@ -181,12 +181,12 @@
\localinterlinespacefalse}
\unexpanded\def\dosetupcheckedinterlinespace#1% often a chain
- {\edef\askedinterlinespace{#1}%
- \ifx\askedinterlinespace\empty
+ {\edef\p_spac_checked_interlinespace{#1}%
+ \ifx\p_spac_checked_interlinespace\empty
\spac_linespacing_synchronize_local
- \else\ifcsname\namedinterlinespacehash\askedinterlinespace\s!parent\endcsname % we could have a \s!check
+ \else\ifcsname\namedinterlinespacehash\p_spac_checked_interlinespace\s!parent\endcsname % we could have a \s!check
\pushmacro\currentinterlinespace
- \let\currentinterlinespace\askedinterlinespace
+ \let\currentinterlinespace\p_spac_checked_interlinespace
\spac_linespacing_setup_specified_interline_space % \dosetupspecifiedinterlinespaceindeed
\iflocalinterlinespace
\the\everysetuplocalinterlinespace
@@ -197,8 +197,8 @@
\fi
\popmacro\currentinterlinespace
\else
- \normalexpanded{\noexpand\doifassignmentelse{\askedinterlinespace}%
- \setupspecifiedinterlinespace\setuprelativeinterlinespace[\askedinterlinespace]}%
+ \normalexpanded{\noexpand\doifassignmentelse{\p_spac_checked_interlinespace}%
+ \setupspecifiedinterlinespace\setuprelativeinterlinespace[\p_spac_checked_interlinespace]}%
\iflocalinterlinespace
\the\everysetuplocalinterlinespace
\else
@@ -217,8 +217,6 @@
\let\switchtointerlinespace\setuplocalinterlinespace
-% \definecomplexorsimple\setupinterlinespace
-
%D Helpers
\newskip \s_spac_lastskip
@@ -301,15 +299,15 @@
\def\spac_helpers_vglue_indeed
{\par
\d_spac_prevdepth\prevdepth
- \hrule\!!height\zeropoint
+ \hrule\s!height\zeropoint
\nobreak
\vskip\s_spac_lastskip
\prevdepth\d_spac_prevdepth}
\def\spac_helpers_hglue_indeed
- {\dontleavehmode % \leavevmode
+ {\dontleavehmode
\c_spac_spacefactor\spacefactor
- \vrule\!!width\zeropoint
+ \vrule\s!width\zeropoint
\nobreak
\hskip\s_spac_lastskip
\spacefactor\c_spac_spacefactor}
@@ -414,6 +412,10 @@
\parskip\zeropoint
\let\v_spac_whitespace_current\v!none}
+\appendtoks
+ \forgetparskip
+\to \everyforgetall
+
% \installwhitespacemethod \s!unknown {\s_spac_whitespace_parskip\commalistelement\relax}
%
% \def\spac_whitespace_setup_method#1%
@@ -422,6 +424,9 @@
\unexpanded\def\nowhitespace{\vspacing[\v!nowhite]}
\unexpanded\def\whitespace {\vspacing[\v!white]}
+\setupwhitespace
+ [\v!none]
+
% Packed:
% todo: when packed blocks blank, we need to enable forced
@@ -527,8 +532,6 @@
{\removeunwantedspaces
\egroup}
-\newdimen\d_spac_prevdepth
-
\def\spac_lines_start_correction_yes[#1]%
{\edef\m_spac_lines_around{#1}%
\spac_lines_action_around
@@ -601,6 +604,7 @@
%
% \setstrut : instellen \strut
% \setnostrut : resetten \strut, \endstrut, \begstrut
+% \setnonestrut : resetten \strut (fast one)
%
% \setteststrut : instellen zichtbare struts
% \resetteststrut : instellen onzichtbare struts
@@ -628,6 +632,8 @@
% niet bepaald mooi. Een hele kleine waarde (0.025) voldoet,
% omdat een positieve glue eindeloos rekbaar is.
+% more of the next will become private:
+
\newdimen\strutdimen
\newdimen\lineheight
\newdimen\openlineheight
@@ -640,12 +646,14 @@
\def\strutdepthfactor {.28}
\def\baselinefactor {2.8}
-\def\baselinegluefactor {0}
+
+\let\m_spac_vertical_baseline_stretch_factor \zerocount
+\let\m_spac_vertical_baseline_shrink_factor \zerocount
\def\minimumstrutheight {\zeropoint}
\def\minimumstrutdepth {\zeropoint}
-\def\normallineheight {\baselinefactor ex}
+\def\normallineheight {\baselinefactor\exheight}
\def\minimumlinedistance {\lineskip}
\def\strutheight {\zeropoint}
@@ -688,7 +696,7 @@
\else
\systemtopskipfactor\globalbodyfontsize
\ifcase\bottomraggednessmode % ragged bottom
- \!!plus5\globalbodyfontsize
+ \s!plus5\globalbodyfontsize
\fi
\fi
%\relax
@@ -712,6 +720,11 @@
\lineskip \normallineskip
\lineskiplimit\normallineskiplimit}
+\unexpanded\def\flexiblebaselines
+ {\baselineskip \normalbaselineskip
+ \lineskip 1\normallineskip \s!plus 1\s!fill
+ \lineskiplimit\normallineskiplimit}
+
\unexpanded\def\setnormalbaselines
{\ifdim\normallineheight>\zeropoint
\lineheight\normallineheight
@@ -732,8 +745,8 @@
\fi
\normalbaselineskip\openlineheight
\ifgridsnapping\else
- \!!plus \baselinegluefactor\openlineheight
- \!!minus\baselinegluefactor\openlineheight
+ \s!plus \m_spac_vertical_baseline_stretch_factor\openlineheight
+ \s!minus\m_spac_vertical_baseline_shrink_factor \openlineheight
\fi
\normallineskip\minimumlinedistance\relax % \onepoint\relax
\normallineskiplimit\zeropoint\relax
@@ -750,6 +763,13 @@
\setnormalbaselines
\setstrut}
+\unexpanded\def\forgetverticalstretch % \forgetspacing
+ {\spacing\plusone}
+
+\appendtoks
+ \forgetverticalstretch
+\to \everyforgetall % needed in otr
+
%D Sometimes one needs to freeze the interlinespacing
%D
%D \starttyping
@@ -780,7 +800,7 @@
\newbox\strutbox
-\setbox\strutbox\hbox{\vrule\!!height8.5pt\!!depth3.5pt\!!width\zeropoint} % just a start
+\setbox\strutbox\hbox{\vrule\s!height8.5pt\s!depth3.5pt\s!width\zeropoint} % just a start
\def\strut{\relax\ifmmode\copy\else\unhcopy\fi\strutbox}
@@ -846,13 +866,13 @@
\unexpanded\def\charhtstrut
{\begingroup
\setcharstrut{GJY}%
- \vrule\!!width\zeropoint\!!depth\zeropoint\!!height\strutht
+ \vrule\s!width\zeropoint\s!depth\zeropoint\s!height\strutht
\endgroup}
\unexpanded\def\chardpstrut
{\begingroup
\setcharstrut{gjy}%
- \vrule\!!width\zeropoint\!!depth\strutdp\!!height\zeropoint
+ \vrule\s!width\zeropoint\s!depth\strutdp\s!height\zeropoint
\endgroup}
% because of all the callbacks in mkiv, we avoid unnecessary boxes ...
@@ -881,20 +901,22 @@
\def\spac_struts_set_hide
{\setbox\strutbox\hbox
{\vrule
- \!!width \zeropoint
- \!!height\strutht
- \!!depth \strutdp}}
+ \s!width \zeropoint
+ \s!height\strutht
+ \s!depth \strutdp}}
\def\spac_struts_set_vide
{\setbox\strutbox\hbox
- {\hbox to \zeropoint
+ {\spac_struts_vide_hbox to \zeropoint
{% \hss % new, will be option
\vrule
- \!!width \strutwidth
- \!!height\strutht
- \!!depth \strutdp
+ \s!width \strutwidth
+ \s!height\strutht
+ \s!depth \strutdp
\hss}}}
+\let\spac_struts_vide_hbox\hbox % overloaded in trac-vis.mkiv
+
%D The dimen \type {\struttotal} holds the exact size of the
%D strut; occasionally a one scaled point difference can show
%D up with the lineheight.
@@ -933,17 +955,26 @@
{\dosingleempty\spac_struts_setup}
\def\spac_struts_setup[#1]%
- {\ifcsname\??struts#1\endcsname
- \csname\??struts#1\endcsname % these are defined later
+ {\edef\m_strut{#1}%
+ \ifcsname\??struts\m_strut\endcsname
+ \csname\??struts\m_strut\endcsname % these are defined later
+ \else
+ \setcharstrut\m_strut
+ \fi}
+
+\unexpanded\def\synchronizestrut#1% no [] parsing, faster for internal
+ {\edef\m_strut{#1}%
+ \ifcsname\??struts\m_strut\endcsname
+ \csname\??struts\m_strut\endcsname
\else
- \setcharstrut{#1}%
+ \setcharstrut\m_strut
\fi}
-\def\synchronizestrut#1% no [] parsing, faster for internal
+\unexpanded\def\dosynchronizestrut#1% no [] parsing, faster for internal
{\ifcsname\??struts#1\endcsname
\csname\??struts#1\endcsname
\else
- \setcharstrut{#1}%
+ \setcharstrut\m_strut
\fi}
\unexpanded\def\showstruts
@@ -954,6 +985,14 @@
{\def\strutwidth{.8pt}%
\setstrut}
+\unexpanded\def\dontshowstruts
+ {\unsetteststrut
+ \settestcrlf}
+
+\unexpanded\def\unsetteststrut
+ {\let\strutwidth\zeropoint
+ \setstrut}
+
\def\autostrutfactor{1.1}
\unexpanded\def\setautostrut
@@ -1094,7 +1133,7 @@
\unexpanded\def\offinterlineskip
{\baselineskip-\thousandpoint
- \lineskip\zeropoint
+ \lineskip \zeropoint
\lineskiplimit\maxdimen
% We also need this here now; thanks to taco for figuring that out!
\def\minimumlinedistance{\zeropoint}}
@@ -1108,8 +1147,8 @@
\unexpanded\def\spac_helpers_push_interlineskip_yes
{\edef\oninterlineskip
- {\baselineskip\the\baselineskip
- \lineskip\the\lineskip
+ {\baselineskip \the\baselineskip
+ \lineskip \the\lineskip
\lineskiplimit\the\lineskiplimit
\noexpand\edef\noexpand\minimumlinedistance{\the\dimexpr\minimumlinedistance}%
\let\noexpand\offinterlineskip\noexpand\normaloffinterlineskip}} % \noexpand not needed
@@ -1245,7 +1284,6 @@
\definesystemattribute[skipcategory] [public]
\definesystemattribute[skippenalty] [public]
\definesystemattribute[skiporder] [public]
-\definesystemattribute[displaymath] [public]
\definesystemattribute[snapmethod] [public]
\definesystemattribute[snapvbox] [public]
%definesystemattribute[snapcategory] [public]
@@ -1399,6 +1437,15 @@
\unexpanded\def\snaptogrid {\dosingleempty\spac_grids_snap_to}
\unexpanded\def\placeongrid{\dosingleempty\spac_grids_place_on}
+\unexpanded\def\startgridsnapping
+ {\dosingleempty\spac_grids_start_snapping}
+
+\unexpanded\def\spac_grids_start_snapping[#1]%
+ {\snaptogrid[#1]\vbox\bgroup}
+
+\unexpanded\def\stopgridsnapping
+ {\egroup}
+
% \def\spac_grids_move_on[#1]%
% {[obsolete]} % gone, unless we set an attribute
@@ -1484,7 +1531,7 @@
\ruledvbox
\fi}
-\def\gridboxwidth{\ifcase\gridboxlinemode0\or.5\or.5\or0\else.5\fi\testrulewidth}
+\def\gridboxwidth{\ifcase\gridboxlinemode0\or.5\or.5\or0\else.5\fi\linewidth}
\unexpanded\def\setgridbox#1#2#3% maybe ifgridsnapping at outer level
{\setbox#1\gridboxvbox to #3 % given size
@@ -1492,10 +1539,10 @@
\resetteststrut
\offinterlineskip
\hsize#2%
- \baselinerulefalse
\ifnum\gridboxlinenomode=\plusthree
\gridboxlinenomode\ifodd\realpageno\plusone\else\plustwo\fi
\fi
+ \topskipcorrection
\gridboxvbox % calculated size
{\getrawnoflines{#3}% \getnoflines{#3}%
\scratchdimen\dimexpr#2+\lineheight\relax
@@ -1504,17 +1551,17 @@
\hskip-.5\lineheight\relax
\ifcase\gridboxlinenomode\or
\rlap
- {\hskip.2\bodyfontsize\hskip\scratchdimen
- \infofont\hbox to 1em{\hss\recurselevel}}%
+ {\hskip\dimexpr.2\bodyfontsize+\scratchdimen\relax
+ \infofont\hbox to \emwidth{\hss\recurselevel}}%
\or
\llap
- {\infofont\hbox to 1em{\hss\recurselevel}%
+ {\infofont\hbox to \emwidth{\hss\recurselevel}%
\hskip.2\bodyfontsize}%
\fi
\vrule
- \!!height \gridboxwidth
- \!!depth \gridboxwidth
- \!!width \scratchdimen
+ \s!height \gridboxwidth
+ \s!depth \gridboxwidth
+ \s!width \scratchdimen
\par}}
\vfill}}
@@ -1576,6 +1623,18 @@
{\setvalue{\??vspacingamount#1}{\ifgridsnapping#3\else#2\fi}%
\ctxcommand{vspacingsetamount("#1")}}
+% \installcorenamespace{vspacingamountnormal}
+% \installcorenamespace{vspacingamountgrid}
+
+% \def\spac_vspacing_define_amount[#1][#2][#3]% can be combined
+% {\ifcsname n>#1\endcsname\else
+% \expandafter\newtoks\csname n>#1\endcsname
+% \expandafter\newtoks\csname g>#1\endcsname
+% \fi
+% \csname n>#1\endcsname{#2}%
+% \csname g>#1\endcsname{#3}%
+% \ctxcommand{vspacingsetamount("#1")}}
+
\unexpanded\def\definevspacing
{\dodoubleempty\spac_vspacing_define}
@@ -1587,7 +1646,8 @@
\newtoks\everybeforeblankhandling
\newtoks\everyafterblankhandling
-\newconditional\someblankdone
+\newconditional\c_space_vspacing_done
+\newconditional\c_space_vspacing_fixed
\appendtoks
\s_spac_vspacing_temp\zeropoint
@@ -1595,45 +1655,46 @@
\attribute\skippenaltyattribute \attributeunsetvalue
\attribute\skiporderattribute \attributeunsetvalue
\ifgridsnapping
- \settrue\blankisfixed
+ \settrue\c_space_vspacing_fixed
\else
- \setfalse\blankisfixed
+ \setfalse\c_space_vspacing_fixed
\fi
\to \everybeforeblankhandling
\appendtoks
\s_spac_vspacing_temp\plusone\s_spac_vspacing_temp
- \ifconditional\blankisfixed \else
- \!!plus \skipgluefactor\s_spac_vspacing_temp
- \!!minus\skipgluefactor\s_spac_vspacing_temp
+ \ifconditional\c_space_vspacing_fixed \else
+ \s!plus \skipgluefactor\s_spac_vspacing_temp
+ \s!minus\skipgluefactor\s_spac_vspacing_temp
\fi
+ \relax
\to \everyafterblankhandling
-\def\setblankcategory#1%
- {\settrue\someblankdone
+\unexpanded\def\setblankcategory#1%
+ {\settrue\c_space_vspacing_done
\attribute\skipcategoryattribute#1\relax}
-\def\setblankorder#1%
+\unexpanded\def\setblankorder#1%
{\attribute\skiporderattribute#1\relax}
-\def\fixedblankskip
- {\settrue\blankisfixed}
+\unexpanded\def\fixedblankskip
+ {\settrue\c_space_vspacing_fixed}
-\def\flexibleblankskip
- {\setfalse\blankisfixed}
+\unexpanded\def\flexibleblankskip
+ {\setfalse\c_space_vspacing_fixed}
-\def\addblankskip#1#2#3%
- {\settrue\someblankdone
- \advance\s_spac_vspacing_temp#1\dimexpr\ifgridsnapping#3\else#2\fi\relax\relax}
+% \unexpanded\def\addblankskip#1#2#3%
+% {\settrue\c_space_vspacing_done
+% \advance\s_spac_vspacing_temp#1\dimexpr\ifgridsnapping#3\else#2\fi\relax\relax}
-\def\setblankpenalty#1%
+\unexpanded\def\setblankpenalty#1%
{\flushblankhandling
- \settrue\someblankdone
+ \settrue\c_space_vspacing_done
\attribute\skipcategoryattribute \plusthree
\attribute\skippenaltyattribute #1\relax
\flushblankhandling}
-\unexpanded\def\startblankhandling
+\unexpanded\def\startblankhandling % move this to \vspacing
{\par
\ifvmode
\expandafter\dostartblankhandling
@@ -1646,30 +1707,34 @@
\def\dostartblankhandling
{\begingroup
- \setfalse\someblankdone
+ \setfalse\c_space_vspacing_done
\the\everybeforeblankhandling}
\unexpanded\def\stopblankhandling
{\the\everyafterblankhandling
- \ifconditional\someblankdone
+ \ifconditional\c_space_vspacing_done
\vskip\s_spac_vspacing_temp
\fi
\endgroup}
-\def\flushblankhandling
+\unexpanded\def\flushblankhandling
{\the\everyafterblankhandling
- \ifconditional\someblankdone
+ \ifconditional\c_space_vspacing_done
\vskip\s_spac_vspacing_temp
\fi
- \setfalse\someblankdone
+ \setfalse\c_space_vspacing_done
\the\everybeforeblankhandling}
\def\addpredefinedblankskip#1#2%
- {\settrue\someblankdone
+ {\settrue\c_space_vspacing_done
\advance\s_spac_vspacing_temp#1\dimexpr\csname\??vspacingamount#2\endcsname\relax}
-\def\addaskedblankskip#1#2%
- {\settrue\someblankdone
+% \unexpanded\def\addpredefinedblankskip#1#2%
+% {\settrue\c_space_vspacing_done
+% \advance\s_spac_vspacing_temp#1\dimexpr\the\csname\ifgridsnapping g\else n\fi>#2\endcsname\relax}
+
+\unexpanded\def\addaskedblankskip#1#2%
+ {\settrue\c_space_vspacing_done
\advance\s_spac_vspacing_temp#1\dimexpr#2\relax}
% The main spacer:
@@ -1696,17 +1761,38 @@
\fi\fi}
\def\spac_vspacing_yes_indeed[#1]%
- {\ifmmode\else\ctxcommand{vspacing("#1")}\fi}
+ {\ifmmode\else\par\ctxcommand{vspacing("#1")}\fi}
\def\spac_vspacing_yes_ignore[#1]%
{\ifmmode\else\par\fi}
\def\spac_vspacing_nop_indeed
- {\ifmmode\else\ctxcommand{vspacing("\currentvspacing")}\fi}
+ {\ifmmode\else\par\ctxcommand{vspacing("\currentvspacing")}\fi}
\def\spac_vspacing_nop_ignore
{\ifmmode\else\par\fi}
+\def\directvspacing#1%
+ {\par\ctxcommand{vspacing("#1")}}
+
+% handy (and faste):
+
+\unexpanded\def\directvpenalty#1%
+ {\begingroup
+ \attribute\skipcategoryattribute \plusthree
+ \attribute\skippenaltyattribute #1\relax
+ \attribute\skiporderattribute \attributeunsetvalue
+ \vskip\zeropoint
+ \endgroup}
+
+\unexpanded\def\directvskip#1%
+ {\begingroup
+ \attribute\skipcategoryattribute \plusone
+ \attribute\skippenaltyattribute \attributeunsetvalue
+ \attribute\skiporderattribute \attributeunsetvalue
+ \vskip#1\relax
+ \endgroup}
+
% these depend on bigskipamount cum suis so we'd better sync them
\unexpanded\def\setupvspacing
@@ -1730,17 +1816,6 @@
\let\synchronizevspacing\setupvspacing_nop
-% used both
-
-\unexpanded\def\inhibitblank
- {\vspacing[\v!disable]} % can be made faster
-
-\let\doinhibitblank\inhibitblank % keep this command, used in styles
-
-\let\setupblank \setupvspacing
-\let\blank \vspacing
-\let\synchronizeblank\synchronizevspacing
-
% category:4 is default
% this interface might change (into an \install, buw we will then keep this one hidden)
@@ -1775,7 +1850,7 @@
\relax}
\definevspacing[\v!preference][penalty:-500] % goodbreak
-\definevspacing[\v!samepage] [penalty:10000] % real strong
+\definevspacing[\v!samepage] [penalty:10000] % nobreak
\definevspacing[\v!max] [category:1]
\definevspacing[\v!force] [category:2]
\definevspacing[\v!disable] [category:5]
@@ -1790,12 +1865,27 @@
\dorecurse{10} % todo: other values < 4000
{\normalexpanded{\definevspacing[\v!samepage-\recurselevel][penalty:\the\numexpr4000+250*\recurselevel\relax]}}
+\definevspacing[\v!default] [\v!big] % todo: needs to adapt to \setupblank
+\definevspacing[\v!before] [\v!default] % but we need to avoid circular references
+\definevspacing[\v!inbetween][\v!default] % then
+\definevspacing[\v!after] [\v!before]
+
+\setupvspacing
+ [\v!big] % alternatively [\v!standard]
+
%D Maybe at some point we will differ between \type {\vspacing} and
%D \type {\blank} (we needed the first one while playing with the
%D new code).
-\let\blank \vspacing
+\unexpanded\def\inhibitblank
+ {\vspacing[\v!disable]} % can be made faster
+
+\let\doinhibitblank\inhibitblank % keep this command, used in styles
+
\let\defineblank \definevspacing
+\let\setupblank \setupvspacing
+\let\blank \vspacing
+\let\synchronizeblank \synchronizevspacing
\let\defineblankmethod\definevspacingamount
%D The following command is for Wolfgang. It has to be used with
@@ -1850,4 +1940,130 @@
\relax
\fi}
+%D Some preliminary code: a simple and fast hanger, for usage in
+%D macros (moved from cont-new).
+
+\installcorenamespace {hanging}
+
+\installdirectcommandhandler \??hanging {hanging}
+
+\setuphanging
+ [\c!distance=.5\emwidth,
+ \c!location=\v!left,
+ \c!n=\zerocount]
+
+\unexpanded\def\starthanging
+ {\dontleavehmode\bgroup
+ \dosingleempty\spac_hanging_start}
+
+\unexpanded\def\stophanging
+ {\endgraf
+ \egroup}
+
+\let\m_spac_hanging_location\empty
+
+\def\spac_hanging_start[#1]%
+ {\doifassignmentelse{#1}
+ {\let\m_spac_hanging_location\empty
+ \setupcurrenthanging[#1]}%
+ {\edef\m_spac_hanging_location{#1}}%
+ \ifx\m_spac_hanging_location\empty
+ \edef\m_spac_hanging_location{\directhangingparameter\c!location}%
+ \fi
+ \dowithnextboxcs\spac_hanging_finish\hbox}
+
+\def\spac_hanging_finish
+ {\scratchdistance\directhangingparameter\c!distance\relax
+ \ifdim\ht\nextbox>\strutht
+ \setbox\nextbox\tbox{\box\nextbox}%
+ \fi
+ \scratchcounter\directhangingparameter\c!n\relax
+ \ifnum\scratchcounter>\zerocount
+ \hangafter-\scratchcounter
+ \else
+ \getboxheight\scratchdimen\of\box\nextbox
+ \getnoflines\scratchdimen
+ \hangafter-\noflines
+ \fi
+ \ht\nextbox\strutht
+ \dp\nextbox\strutdp
+ \scratchwidth\dimexpr\wd\nextbox+\scratchdistance\relax
+ \ifx\m_spac_hanging_location\v!right
+ \hangindent-\scratchwidth
+ \rlap{\hskip\dimexpr\hsize-\wd\nextbox\relax\box\nextbox}%
+ \else
+ \hangindent\scratchwidth
+ \llap{\box\nextbox\hskip\scratchdistance}%
+ \fi
+ \ignorespaces}
+
+%D \macros
+%D {startfixed}
+%D
+%D \starttyping
+%D \startitemize
+%D \startitem \externalfigure[cow][height=1cm] \stopitem
+%D \startitem \externalfigure[cow][height=1cm] \stopitem
+%D
+%D \startitem \startfixed \externalfigure[cow][height=1cm]\stopfixed \stopitem
+%D \startitem \startfixed[high]\externalfigure[cow][height=1cm]\stopfixed \stopitem
+%D \startitem \startfixed[low] \externalfigure[cow][height=1cm]\stopfixed \stopitem
+%D \startitem \startfixed[lohi]\externalfigure[cow][height=1cm]\stopfixed \stopitem
+%D
+%D \startitem test \par \startfixed \externalfigure[koe][height=1cm]\stopfixed \stopitem
+%D \startitem test \par \startfixed[high]\externalfigure[koe][height=1cm]\stopfixed \stopitem
+%D \startitem test \par \startfixed[low] \externalfigure[koe][height=1cm]\stopfixed \stopitem
+%D \startitem test \par \startfixed[lohi]\externalfigure[koe][height=1cm]\stopfixed \stopitem
+%D \stopitemize
+%D \stopbuffer
+%D
+%D \typebuffer \getbuffer
+
+\installcorenamespace{fixedalternatives}
+
+\unexpanded\def\startfixed
+ {\bgroup
+ \dosingleempty\typo_fixed_start}
+
+\def\typo_fixed_start
+ {\ifhmode
+ \expandafter\typo_fixed_start_h
+ \else
+ \expandafter\typo_fixed_start_v
+ \fi}
+
+\def\typo_fixed_start_h[#1]%
+ {\let\stopfixed\typo_fixed_stop_h
+ \dowithnextbox{\typo_fixed_finish{#1}}%
+ \vbox\bgroup
+ %ignorespaces
+ \setlocalhsize}
+
+\unexpanded\def\typo_fixed_stop_h
+ {%removeunwantedspaces
+ \egroup
+ \egroup}
+
+\def\typo_fixed_start_v[#1]%
+ {\let\stopfixed\typo_fixed_stop_v
+ \startbaselinecorrection}
+
+\unexpanded\def\typo_fixed_stop_v
+ {\stopbaselinecorrection
+ \egroup}
+
+\letvalue{\??fixedalternatives \v!high}\bbox
+\letvalue{\??fixedalternatives \v!low}\tbox
+\letvalue{\??fixedalternatives \v!middle}\vcenter
+\letvalue{\??fixedalternatives \v!lohi}\vcenter
+\letvalue{\??fixedalternatives\s!unknown}\tbox
+\letvalue{\??fixedalternatives\s!default}\tbox
+
+\unexpanded\def\typo_fixed_finish#1%
+ {\expandnamespacevalue\??fixedalternatives{#1}\s!default{\box\nextbox}}
+
+% %D Forgotten already:
+%
+% \def\shapefill{\vskip\zeropoint\s!plus\lineheight\s!minus\lineheight\relax}
+
\protect \endinput
diff --git a/Master/texmf-dist/tex/context/base/status-files.pdf b/Master/texmf-dist/tex/context/base/status-files.pdf
index 8d2ac58573f..23ddb60ccf0 100644
--- a/Master/texmf-dist/tex/context/base/status-files.pdf
+++ b/Master/texmf-dist/tex/context/base/status-files.pdf
Binary files differ
diff --git a/Master/texmf-dist/tex/context/base/status-lua.pdf b/Master/texmf-dist/tex/context/base/status-lua.pdf
index 0d58d1b16fd..19f6e0668f0 100644
--- a/Master/texmf-dist/tex/context/base/status-lua.pdf
+++ b/Master/texmf-dist/tex/context/base/status-lua.pdf
Binary files differ
diff --git a/Master/texmf-dist/tex/context/base/status-mkiv.lua b/Master/texmf-dist/tex/context/base/status-mkiv.lua
index d7ab099a604..f08b2ec6333 100644
--- a/Master/texmf-dist/tex/context/base/status-mkiv.lua
+++ b/Master/texmf-dist/tex/context/base/status-mkiv.lua
@@ -1,1515 +1,5977 @@
--- colo-run.mkiv colo-imp-*.mkiv ...
-
return {
- core = {
- {
- filename = "syst-ini",
- marktype = "mkiv",
- status = "okay",
- },
- {
- filename = "norm-ctx",
- marktype = "mkiv",
- status = "okay",
- },
- {
- filename = "syst-pln",
- marktype = "mkiv",
- status = "okay",
- },
- {
- filename = "syst-mes",
- marktype = "mkiv",
- status = "okay",
- },
- {
- filename = "luat-cod",
- marktype = "mkiv",
- status = "okay",
- },
- {
- filename = "luat-bas",
- marktype = "mkiv",
- status = "okay",
- comment = "maybe combine (3)",
- },
- {
- filename = "luat-lib",
- marktype = "mkiv",
- status = "okay",
- comment = "maybe combine (3)",
- },
- {
- filename = "catc-ini",
- marktype = "mkiv",
- status = "okay",
- },
- {
- filename = "catc-act",
- marktype = "mkiv",
- status = "okay",
- comment = "forward dependency",
- },
- {
- filename = "catc-def",
- marktype = "mkiv",
- status = "okay",
- },
- {
- filename = "catc-ctx",
- marktype = "mkiv",
- status = "okay",
- },
- {
- filename = "catc-sym",
- marktype = "mkiv",
- status = "okay",
- },
- {
- filename = "cldf-ini",
- marktype = "mkiv",
- status = "okay",
- comment = "maybe combine (1)",
- },
- {
- filename = "syst-aux",
- marktype = "mkiv",
- status = "okay",
- comment = "will be better protected"
- },
- {
- filename = "syst-lua",
- marktype = "mkiv",
- status = "okay",
- comment = "maybe combine (1)",
- },
- {
- filename = "syst-con",
- marktype = "mkiv",
- status = "okay",
- comment = "maybe combine (1)",
- },
- {
- filename = "syst-fnt",
- marktype = "mkiv",
- status = "okay",
- comment = "maybe combine (1)",
- },
- {
- filename = "syst-rtp",
- marktype = "mkiv",
- status = "okay",
- comment = "maybe combine (1)",
- },
- {
- filename = "file-ini",
- marktype = "mkvi",
- status = "okay",
- comment = "maybe combine (2)",
- },
- {
- filename = "file-res",
- marktype = "mkvi",
- status = "okay",
- comment = "maybe combine (2)",
- },
- {
- filename = "file-lib",
- marktype = "mkvi",
- status = "okay",
- },
- {
- filename = "supp-dir",
- marktype = "mkiv",
- status = "okay",
- },
- {
- filename = "char-ini",
- marktype = "mkiv",
- status = "okay",
- },
- {
- filename = "char-utf",
- marktype = "mkiv",
- status = "okay",
- },
- {
- filename = "char-act",
- marktype = "mkiv",
- status = "okay",
- comment = "forward dependency",
- },
- {
- filename = "mult-ini",
- marktype = "mkiv",
- status = "okay",
- },
- {
- filename = "mult-sys",
- marktype = "mkiv",
- status = "okay",
- },
- {
- filename = "mult-def",
- marktype = "mkiv",
- status = "okay",
- },
- {
- filename = "mult-chk",
- marktype = "mkiv",
- status = "okay",
- },
- {
- filename = "mult-aux",
- marktype = "mkiv",
- status = "okay",
- },
- {
- filename = "mult-dim",
- marktype = "mkvi",
- status = "okay",
- },
- {
- filename = "cldf-int",
- marktype = "mkiv",
- status = "okay",
- },
- {
- filename = "luat-ini",
- marktype = "mkiv",
- status = "okay",
- },
- {
- filename = "toks-ini",
- marktype = "mkiv",
- status = "okay",
- comment = "maybe this becomes a runtime module",
- },
- {
- filename = "attr-ini",
- marktype = "mkiv",
- status = "okay",
- },
- {
- filename = "core-var",
- marktype = "mkiv",
- status = "unknown",
- comment = "code might move from here",
- },
- {
- filename = "core-env",
- marktype = "mkiv",
- status = "okay",
- comment = "might need more redoing",
- },
- {
- filename = "layo-ini",
- marktype = "mkiv",
- status = "todo",
- comment = "more might move to here",
- },
- {
- filename = "node-ini",
- marktype = "mkiv",
- status = "okay",
- comment = "maybe this becomes a runtime module",
- },
- {
- filename = "cldf-bas",
- marktype = "mkiv",
- status = "okay",
- },
- {
- filename = "node-fin",
- marktype = "mkiv",
- status = "okay",
- comment = "might need more redoing",
- },
- {
- filename = "node-mig",
- marktype = "mkiv",
- status = "okay",
- comment = "needs integration and configuration",
- },
- {
- filename = "node-par",
- marktype = "mkiv",
- status = "experimental",
- },
- {
- filename = "back-ini",
- marktype = "mkiv",
- status = "okay",
- },
- {
- filename = "attr-col",
- marktype = "mkiv",
- status = "okay",
- },
- {
- filename = "attr-lay",
- marktype = "mkiv",
- status = "unknown",
- },
- {
- filename = "attr-neg",
- marktype = "mkiv",
- status = "unknown",
- },
- {
- filename = "attr-eff",
- marktype = "mkiv",
- status = "unknown",
- },
- {
- filename = "trac-tex",
- marktype = "mkiv",
- status = "okay",
- comment = "needs more usage",
- },
- {
- filename = "trac-deb",
- marktype = "mkiv",
- status = "okay",
- },
- {
- filename = "supp-box",
- marktype = "mkiv",
- status = "okay",
- },
- {
- filename = "supp-vis",
- marktype = "mkiv",
- status = "unknown",
- comment = "will become a module (and part will stay in the core)",
- },
- {
- filename = "supp-fun",
- marktype = "mkiv",
- status = "unknown",
- comment = "will be integrated elsewhere",
- },
- {
- filename = "supp-ran",
- marktype = "mkiv",
- status = "okay",
- },
- {
- filename = "supp-mat",
- marktype = "mkiv",
- status = "okay",
- comment = "will be moved to the math-* modules",
- },
- {
- filename = "supp-ali",
- marktype = "mkiv",
- status = "unknown",
- comment = "will be reimplemented",
- },
- {
- filename = "supp-num",
- marktype = "mkiv",
- status = "obsolete",
- comment = "replaced by units",
- },
- {
- filename = "typo-ini",
- marktype = "mkiv",
- status = "okay",
- comment = "will grow",
- },
- {
- filename = "page-ins",
- marktype = "mkiv",
- status = "unknown",
- },
- {
- filename = "file-syn",
- marktype = "mkvi",
- status = "okay",
- },
- {
- filename = "file-mod",
- marktype = "mkvi",
- status = "okay",
- },
- {
- filename = "core-con",
- marktype = "mkiv",
- status = "okay",
- },
- {
- filename = "cont-fil",
- marktype = "mkiv",
- status = "okay",
- },
- {
- filename = "regi-ini",
- marktype = "mkiv",
- status = "okay",
- },
- {
- filename = "enco-ini",
- marktype = "mkiv",
- status = "messy",
- },
- {
- filename = "hand-ini",
- marktype = "mkiv",
- status = "okay",
- },
- {
- filename = "lang-ini",
- marktype = "mkiv",
- status = "okay",
- },
- {
- filename = "lang-lab",
- marktype = "mkiv",
- status = "okay",
- comment = "namespace should be languages",
- },
- {
- filename = "unic-ini",
- marktype = "mkiv",
- status = "okay",
- },
- {
- filename = "core-uti",
- marktype = "mkiv",
- status = "okay",
- },
- {
- filename = "core-two",
- marktype = "mkiv",
- status = "okay",
- comment = "maybe rename to core-two",
- },
- {
- filename = "core-dat",
- marktype = "mkiv",
- status = "okay",
- },
- {
- filename = "colo-ini",
- marktype = "mkiv",
- status = "okay",
- },
- {
- filename = "colo-ext",
- marktype = "mkiv",
- status = "okay",
- },
- {
- filename = "colo-grp",
- marktype = "mkiv",
- status = "okay",
- },
- {
- filename = "node-bck",
- marktype = "mkiv",
- status = "okay",
- },
- {
- filename = "trac-vis",
- marktype = "mkiv",
- status = "unknown",
- },
- {
- filename = "lang-mis",
- marktype = "mkiv",
- status = "okay",
- },
- {
- filename = "lang-url",
- marktype = "mkiv",
- status = "okay",
- },
- {
- filename = "lang-def",
- marktype = "mkiv",
- status = "okay",
- },
- {
- filename = "lang-wrd",
- marktype = "mkiv",
- status = "okay",
- },
- {
- filename = "file-job",
- marktype = "mkvi",
- status = "okay",
- comment = "might need more redoing",
- },
- {
- filename = "symb-ini",
- marktype = "mkiv",
- status = "okay",
- },
- {
- filename = "sort-ini",
- marktype = "mkiv",
- status = "okay",
- },
- {
- filename = "pack-mis",
- marktype = "mkvi",
- status = "okay",
- },
- {
- filename = "pack-rul",
- marktype = "mkiv",
- status = "okay",
- comment = "namespace to be done",
- },
- {
- filename = "pack-mrl",
- marktype = "mkiv",
- status = "todo",
- comment = "this is something to be done on a rainy day"
- },
- {
- filename = "pack-bck",
- marktype = "mkvi",
- status = "okay",
- },
- {
- filename = "pack-fen",
- marktype = "mkiv",
- status = "okay",
- },
- {
- filename = "lxml-ini",
- marktype = "mkiv",
- status = "okay",
- },
- {
- filename = "lxml-sor",
- marktype = "mkiv",
- status = "okay",
- },
- {
- filename = "typo-prc",
- marktype = "mkvi",
- status = "okay",
- },
- {
- filename = "strc-ini",
- marktype = "mkvi",
- status = "okay",
- },
- {
- filename = "strc-tag",
- marktype = "mkiv",
- status = "okay",
- },
- {
- filename = "strc-doc",
- marktype = "mkiv",
- status = "unknown",
- },
- {
- filename = "strc-num",
- marktype = "mkiv",
- status = "unknown",
- },
- {
- filename = "strc-mar",
- marktype = "mkiv",
- status = "unknown",
- },
- {
- filename = "strc-sbe",
- marktype = "mkiv",
- status = "unknown",
- },
- {
- filename = "strc-lst",
- marktype = "mkiv",
- status = "okay",
- },
- {
- filename = "strc-sec",
- marktype = "mkiv",
- status = "unknown",
- },
- {
- filename = "strc-pag",
- marktype = "mkiv",
- status = "unknown",
- },
- {
- filename = "strc-ren",
- marktype = "mkiv",
- status = "unknown",
- },
- {
- filename = "strc-xml",
- marktype = "mkiv",
- status = "unknown",
- },
- {
- filename = "strc-def",
- marktype = "mkiv",
- status = "unknown",
- },
- {
- filename = "strc-ref",
- marktype = "mkvi",
- status = "unknown",
- },
- {
- filename = "strc-reg",
- marktype = "mkiv",
- status = "unknown",
- },
- {
- filename = "strc-lev",
- marktype = "mkvi",
- status = "okay",
- },
- {
- filename = "spac-ali",
- marktype = "mkiv",
- status = "okay",
- comment = "maybe some tuning is needed / will happen",
- },
- {
- filename = "spac-hor",
- marktype = "mkiv",
- status = "okay",
- comment = "probably needs some more work",
- },
- {
- filename = "spac-ver",
- marktype = "mkiv",
- status = "okay",
- comment = "maybe some changes will happen"
- },
- {
- filename = "spac-lin",
- marktype = "mkiv",
- status = "unknown",
- comment = "could be improved if needed"
- },
- {
- filename = "spac-pag",
- marktype = "mkiv",
- status = "okay",
- comment = "this needs to be checked occasionally",
- },
- {
- filename = "spac-par",
- marktype = "mkiv",
- status = "okay",
- },
- {
- filename = "spac-def",
- marktype = "mkiv",
- status = "unknown",
- },
- {
- filename = "spac-grd",
- marktype = "mkiv",
- status = "unknown",
- },
- {
- filename = "anch-pos",
- marktype = "mkiv",
- status = "okay",
- comment = "in transition",
- },
- {
- filename = "anch-pgr",
- marktype = "mkiv",
- status = "okay",
- comment = "in transition",
- },
- {
- filename = "scrn-ini",
- marktype = "mkvi",
- status = "okay",
- comment = "maybe change locationattribute names"
- },
- {
- filename = "scrn-ref",
- marktype = "mkvi",
- status = "okay",
- comment = "namespace needs checking"
- },
- {
- filename = "pack-obj",
- marktype = "mkiv",
- status = "unknown",
- },
- {
- filename = "strc-itm",
- marktype = "mkvi",
- status = "okay",
- },
- {
- filename = "strc-des",
- marktype = "mkiv",
- status = "unknown",
- },
- {
- filename = "strc-des",
- marktype = "mkiv",
- status = "unknown",
- },
- {
- filename = "strc-enu",
- marktype = "mkiv",
- status = "unknown",
- },
- {
- filename = "strc-ind",
- marktype = "mkiv",
- status = "unknown",
- },
- {
- filename = "strc-lab",
- marktype = "mkiv",
- status = "unknown",
- },
- {
- filename = "core-sys",
- marktype = "mkiv",
- status = "okay",
- comment = "a funny mix",
- },
- {
- filename = "page-var",
- marktype = "mkiv",
- status = "unknown",
- },
- {
- filename = "page-otr",
- marktype = "mkvi",
- status = "okay",
- },
- {
- filename = "page-ini",
- marktype = "mkiv",
- status = "okay",
- comment = "code might end up elsewhere",
- },
- {
- filename = "page-fac",
- marktype = "mkiv",
- status = "unknown",
- },
- {
- filename = "page-brk",
- marktype = "mkiv",
- status = "okay",
- comment = "otr commands will be redone",
- },
- {
- filename = "page-col",
- marktype = "mkiv",
- status = "unknown",
- },
- {
- filename = "page-inf",
- marktype = "mkiv",
- status = "unknown",
- },
- {
- filename = "page-grd",
- marktype = "mkiv",
- status = "unknown",
- },
- {
- filename = "page-flt",
- marktype = "mkiv",
- status = "unknown",
- },
- {
- filename = "page-bck",
- marktype = "mkiv",
- status = "okay",
- },
- {
- filename = "page-not",
- marktype = "mkiv",
- status = "unknown",
- },
- {
- filename = "page-one",
- marktype = "mkiv",
- status = "okay",
- comment = "can probably be improved",
- },
- {
- filename = "page-lay",
- marktype = "mkiv",
- status = "okay",
- },
- {
- filename = "page-box",
- marktype = "mkvi",
- status = "okay",
- },
- {
- filename = "page-txt",
- marktype = "mkvi",
- status = "okay",
- },
- {
- filename = "page-sid",
- marktype = "mkiv",
- status = "okay",
- },
- {
- filename = "strc-flt",
- marktype = "mkvi",
- status = "unknown",
- },
- {
- filename = "page-mis",
- marktype = "mkiv",
- status = "unknown",
- },
- {
- filename = "page-mbk",
- marktype = "mkvi",
- status = "okay",
- comment = "might be extended",
- },
- {
- filename = "page-mul",
- marktype = "mkiv",
- status = "unknown",
- },
- {
- filename = "page-set",
- marktype = "mkiv",
- status = "unknown",
- },
- {
- filename = "pack-lyr",
- marktype = "mkiv",
- status = "okay",
- },
- {
- filename = "pack-pos",
- marktype = "mkiv",
- status = "unknown",
- },
- {
- filename = "page-mak",
- marktype = "mkvi",
- status = "okay",
- },
- {
- filename = "page-lin",
- marktype = "mkiv",
- status = "unknown",
- },
- {
- filename = "page-par",
- marktype = "mkiv",
- status = "unknown",
- },
- {
- filename = "typo-pag",
- marktype = "mkiv",
- status = "okay",
- },
- {
- filename = "typo-mar",
- marktype = "mkiv",
- status = "okay",
- },
- {
- filename = "buff-ini",
- marktype = "mkiv",
- status = "okay",
- comment = "check other modules for buffer usage",
- },
- {
- filename = "buff-ver",
- marktype = "mkiv",
- status = "okay",
- comment = "check obsolete processbuffer"
- },
- {
- filename = "buff-par",
- marktype = "mkiv",
- status = "unknown",
- },
- {
- filename = "buff-imp-tex",
- marktype = "mkiv",
- status = "unknown",
- },
- {
- filename = "buff-imp-mp",
- marktype = "mkiv",
- status = "unknown",
- },
- {
- filename = "buff-imp-lua",
- marktype = "mkiv",
- status = "unknown",
- },
- {
- filename = "buff-imp-xml",
- marktype = "mkiv",
- status = "unknown",
- },
- {
- filename = "buff-imp-parsed-xml",
- marktype = "mkiv",
- status = "unknown",
- },
- {
- filename = "strc-blk",
- marktype = "mkiv",
- status = "unknown",
- },
- {
- filename = "page-imp",
- marktype = "mkiv",
- status = "unknown",
- },
- {
- filename = "page-sel",
- marktype = "mkiv",
- status = "unknown",
- },
- {
- filename = "page-com",
- marktype = "mkiv",
- status = "unknown",
- },
- {
- filename = "scrn-pag",
- marktype = "mkvi",
- status = "okay",
- comment = "namespace needs checking"
- },
- {
- filename = "scrn-wid",
- marktype = "mkvi",
- status = "okay",
- comment = "namespace needs checking"
- },
- {
- filename = "scrn-but",
- marktype = "mkvi",
- status = "okay",
- comment = "namespace needs checking"
- },
- {
- filename = "scrn-bar",
- marktype = "mkvi",
- status = "okay",
- comment = "namespace needs checking"
- },
- {
- filename = "strc-bkm",
- marktype = "mkiv",
- status = "unknown",
- },
- {
- filename = "tabl-com",
- marktype = "mkiv",
- status = "okay",
- comment = "somewhat weird",
- },
- {
- filename = "tabl-pln",
- marktype = "mkiv",
- status = "unknown",
- },
- {
- filename = "tabl-tab",
- marktype = "mkiv",
- status = "unknown",
- },
- {
- filename = "tabl-tbl",
- marktype = "mkiv",
- status = "okay",
- comment = "can probably be improved (names and such)",
- },
- {
- filename = "tabl-ntb",
- marktype = "mkiv",
- status = "unknown",
- },
- {
- filename = "tabl-nte",
- marktype = "mkiv",
- status = "unknown",
- },
- {
- filename = "tabl-ltb",
- marktype = "mkiv",
- status = "unknown",
- comment = "will be redone when needed",
- },
- {
- filename = "tabl-tsp",
- marktype = "mkiv",
- status = "unknown",
- },
- {
- filename = "tabl-xtb",
- marktype = "mkvi",
- status = "okay",
- comment = "namespace needs checking"
- },
- {
- filename = "java-ini",
- marktype = "mkiv",
- status = "okay",
- },
- {
- filename = "scrn-fld",
- marktype = "mkvi",
- status = "okay",
- comment = "namespace needs checking"
- },
- {
- filename = "scrn-hlp",
- marktype = "mkvi",
- status = "okay",
- comment = "namespace needs checking"
- },
- {
- filename = "char-enc",
- marktype = "mkiv",
- status = "okay",
- },
- {
- filename = "font-lib",
- marktype = "mkvi",
- status = "okay",
- },
- {
- filename = "font-fil",
- marktype = "mkvi",
- status = "okay",
- },
- {
- filename = "font-fea",
- marktype = "mkvi",
- status = "okay",
- },
- {
- filename = "font-mat",
- marktype = "mkvi",
- status = "okay",
- },
- {
- filename = "font-ini",
- marktype = "mkvi",
- status = "okay",
- comment = "needs occasional checking and upgrading",
- },
- {
- filename = "font-sym",
- marktype = "mkvi",
- status = "okay",
- },
- {
- filename = "font-sty",
- marktype = "mkvi",
- status = "okay",
- },
- {
- filename = "font-set",
- marktype = "mkvi",
- status = "okay",
- },
- {
- filename = "font-emp",
- marktype = "mkvi",
- status = "okay",
- },
- {
- filename = "font-col",
- marktype = "mkvi",
- status = "okay",
- },
- {
- filename = "font-pre",
- marktype = "mkiv",
- status = "okay",
- },
- {
- filename = "font-unk",
- marktype = "mkiv",
- status = "okay",
- },
- {
- filename = "font-tra",
- marktype = "mkiv",
- status = "okay",
- comment = "likely this will become a module",
- },
- {
- filename = "font-uni",
- marktype = "mkiv",
- status = "okay",
- comment = "this one might be merged",
- },
- {
- filename = "font-col",
- marktype = "mkvi",
- status = "okay",
- },
- {
- filename = "font-gds",
- marktype = "mkiv",
- status = "okay",
- },
- {
- filename = "lxml-css",
- marktype = "mkiv",
- status = "okay",
- comment = "this is work in progress",
- },
- {
- filename = "spac-chr",
- marktype = "mkiv",
- status = "okay",
- },
- {
- filename = "blob-ini",
- marktype = "mkiv",
- status = "unknown",
- },
- {
- filename = "typo-cln",
- marktype = "mkiv",
- status = "okay",
- },
- {
- filename = "typo-spa",
- marktype = "mkiv",
- status = "okay",
- },
- {
- filename = "typo-krn",
- marktype = "mkiv",
- status = "okay",
- comment = "do we keep the style and color or not"
- },
- {
- filename = "typo-itc",
- marktype = "mkvi",
- status = "okay",
- },
- {
- filename = "typo-dir",
- marktype = "mkiv",
- status = "okay",
- comment = "maybe singular setup"
- },
- {
- filename = "typo-brk",
- marktype = "mkiv",
- status = "okay",
- },
- {
- filename = "typo-cap",
- marktype = "mkiv",
- status = "okay",
- },
- {
- filename = "typo-dig",
- marktype = "mkiv",
- status = "okay",
- },
- {
- filename = "typo-rep",
- marktype = "mkiv",
- status = "okay",
- },
- {
- filename = "typo-txt",
- marktype = "mkvi",
- status = "okay",
- comment = "maybe there will be a nicer interface",
- },
- {
- filename = "typo-par",
- marktype = "mkiv",
- status = "okay",
- comment = "might get extended",
- },
- {
- filename = "type-ini",
- marktype = "mkvi",
- status = "okay",
- },
- {
- filename = "type-set",
- marktype = "mkiv",
- status = "okay",
- },
- {
- filename = "scrp-ini",
- marktype = "mkiv",
- status = "okay",
- },
- {
- filename = "prop-ini",
- marktype = "mkiv",
- status = "okay",
- comment = "this module is obsolete",
- },
- {
- filename = "mlib-ctx",
- marktype = "mkiv",
- status = "okay",
- },
- {
- filename = "meta-ini",
- marktype = "mkiv",
- status = "okay",
- comment = "metapost code is always evolving",
- },
- {
- filename = "meta-tex",
- marktype = "mkiv",
- status = "okay",
- },
- {
- filename = "meta-fun",
- marktype = "mkiv",
- status = "okay",
- comment = "maybe this one will be merged"
- },
- {
- filename = "meta-pag",
- marktype = "mkiv",
- status = "okay",
- comment = "might get updated when mp code gets cleaned up",
- },
- {
- filename = "page-mrk",
- marktype = "mkiv",
- status = "unknown",
- },
- {
- filename = "page-flw",
- marktype = "mkiv",
- status = "unknown",
- },
- {
- filename = "page-spr",
- marktype = "mkiv",
- status = "unknown",
- },
- {
- filename = "page-plg",
- marktype = "mkiv",
- status = "unknown",
- },
- {
- filename = "page-str",
- marktype = "mkiv",
- status = "unknown",
- },
- {
- filename = "anch-pgr",
- marktype = "mkiv",
- status = "unknown",
- },
- {
- filename = "anch-bck",
- marktype = "mkvi",
- status = "unknown",
- },
- {
- filename = "anch-tab",
- marktype = "mkiv",
- status = "unknown",
- },
- {
- filename = "anch-bar",
- marktype = "mkiv",
- status = "unknown",
- },
- {
- filename = "anch-snc",
- marktype = "mkiv",
- status = "unknown",
- },
- {
- filename = "math-ini",
- marktype = "mkiv",
- status = "okay",
- },
- {
- filename = "math-pln",
- marktype = "mkiv",
- status = "okay",
- comment = "this file might merge into others",
- },
- {
- filename = "math-for",
- marktype = "mkiv",
- status = "unknown",
- },
- {
- filename = "math-def",
- marktype = "mkiv",
- status = "okay",
- comment = "eventually this will be split and spread",
- },
- {
- filename = "math-ali",
- marktype = "mkiv",
- status = "unknown",
- },
- {
- filename = "math-arr",
- marktype = "mkiv",
- status = "unknown",
- },
- {
- filename = "math-frc",
- marktype = "mkiv",
- status = "okay",
- comment = "at least for the moment",
- },
- {
- filename = "math-scr",
- marktype = "mkiv",
- status = "okay",
- },
- {
- filename = "math-int",
- marktype = "mkiv",
- status = "okay",
- },
- {
- filename = "math-del",
- marktype = "mkiv",
- status = "okay",
- comment = "code get replaced (by autodelimiters)",
- },
- {
- filename = "math-inl",
- marktype = "mkiv",
- status = "okay",
- comment = "code might move to here",
- },
- {
- filename = "math-dis",
- marktype = "mkiv",
- status = "okay",
- comment = "code might move to here",
- },
- {
- filename = "phys-dim",
- marktype = "mkiv",
- status = "okay",
- },
- {
- filename = "strc-mat",
- marktype = "mkiv",
- status = "unknown",
- },
- {
- filename = "chem-ini",
- marktype = "mkiv",
- status = "okay",
- },
- {
- filename = "chem-str",
- marktype = "mkiv",
- status = "okay",
- },
- {
- filename = "typo-scr",
- marktype = "mkiv",
- status = "okay",
- },
- {
- filename = "core-fnt",
- marktype = "mkiv",
- status = "unknown",
- },
- {
- filename = "node-rul",
- marktype = "mkiv",
- status = "okay",
- comment = "maybe some cleanup is needed",
- },
- {
- filename = "node-spl",
- marktype = "mkiv",
- status = "unknown",
- },
- {
- filename = "strc-not",
- marktype = "mkiv",
- status = "unknown",
- },
- {
- filename = "strc-lnt",
- marktype = "mkiv",
- status = "unknown",
- },
- {
- filename = "core-mis",
- marktype = "mkiv",
- status = "unknown",
- },
- {
- filename = "pack-com",
- marktype = "mkiv",
- status = "okay",
- },
- {
- filename = "typo-del",
- marktype = "mkiv",
- status = "okay",
- },
- {
- filename = "grph-trf",
- marktype = "mkiv",
- status = "okay",
- comment = "namespace has to be made consistent"
- },
- {
- filename = "grph-inc",
- marktype = "mkiv",
- status = "unknown",
- },
- {
- filename = "grph-fig",
- marktype = "mkiv",
- status = "unknown",
- },
- {
- filename = "grph-raw",
- marktype = "mkiv",
- status = "okay",
- },
- {
- filename = "pack-box",
- marktype = "mkiv",
- status = "unknown",
- },
- {
- filename = "pack-bar",
- marktype = "mkiv",
- status = "unknown",
- },
- {
- filename = "page-app",
- marktype = "mkiv",
- status = "okay",
- },
- {
- filename = "meta-fig",
- marktype = "mkiv",
- status = "okay",
- },
- {
- filename = "lang-spa",
- marktype = "mkiv",
- status = "okay",
- comment = "more or less obsolete"
- },
- {
- filename = "bibl-bib",
- marktype = "mkiv",
- status = "unknown",
- },
- {
- filename = "bibl-tra",
- marktype = "mkiv",
- status = "unknown",
- },
- {
- filename = "meta-xml",
- marktype = "mkiv",
- status = "okay",
- comment = "not needed"
- },
- {
- filename = "cont-log",
- marktype = "mkiv",
- status = "okay",
- },
- {
- filename = "task-ini",
- marktype = "mkiv",
- status = "okay",
- },
- {
- filename = "cldf-ver",
- marktype = "mkiv",
- status = "okay",
- },
- {
- filename = "cldf-com",
- marktype = "mkiv",
- status = "okay",
- },
- {
- filename = "core-ctx",
- marktype = "mkiv",
- status = "okay",
- },
- {
- filename = "core-ini",
- marktype = "mkiv",
- status = "unknown",
- },
- {
- filename = "core-def",
- marktype = "mkiv",
- status = "unknown",
- },
- {
- filename = "back-pdf",
- marktype = "mkiv",
- status = "okay",
- comment = "object related code might move or change",
- },
- {
- filename = "mlib-pdf",
- marktype = "mkiv",
- status = "okay",
- },
- {
- filename = "mlib-pps",
- marktype = "mkiv",
- status = "okay",
- },
- {
- filename = "meta-pdf",
- marktype = "mkiv",
- status = "okay",
- },
- {
- filename = "grph-epd",
- marktype = "mkiv",
- status = "okay",
- comment = "might need more work",
- },
- {
- filename = "back-exp",
- marktype = "mkiv",
- status = "okay",
- comment = "some parameters might move from export to backend"
- },
- },
- extra = {
- {
- filename = "tabl-xnt",
- marktype = "mkvi",
- status = "okay",
- },
- }
+ todo = {
+ category = "lua",
+ filename = "core-run",
+ status = "idea",
+ },
+ main = {
+ {
+ category = "mkiv",
+ filename = "context",
+ loading = "parent",
+ status = "okay",
+ },
+ {
+ category = "lus",
+ comment = "stub file for context",
+ filename = "context",
+ loading = "parent",
+ status = "okay",
+ },
+ {
+ category = "tex",
+ filename = "metatex",
+ loading = "parent",
+ status = "pending",
+ },
+ {
+ category = "lus",
+ comment = "stub file for metatex",
+ filename = "metatex",
+ loading = "parent",
+ status = "pending",
+ },
+ {
+ category = "mkiv",
+ filename = "cont-cs",
+ loading = "parent",
+ status = "okay",
+ },
+ {
+ category = "mkiv",
+ filename = "cont-de",
+ loading = "parent",
+ status = "okay",
+ },
+ {
+ category = "mkiv",
+ filename = "cont-en",
+ loading = "parent",
+ status = "okay",
+ },
+ {
+ category = "mkiv",
+ filename = "cont-fr",
+ loading = "parent",
+ status = "okay",
+ },
+ {
+ category = "mkiv",
+ filename = "cont-gb",
+ loading = "parent",
+ status = "okay",
+ },
+ {
+ category = "mkiv",
+ filename = "cont-it",
+ loading = "parent",
+ status = "okay",
+ },
+ {
+ category = "mkiv",
+ filename = "cont-nl",
+ loading = "parent",
+ status = "okay",
+ },
+ {
+ category = "mkiv",
+ filename = "cont-pe",
+ loading = "parent",
+ status = "okay",
+ },
+ {
+ category = "mkiv",
+ filename = "cont-ro",
+ loading = "parent",
+ status = "okay",
+ },
+ {
+ category = "mpiv",
+ comment = "maybe more delayed loading",
+ filename = "metafun",
+ loading = "parent",
+ status = "okay",
+ },
+ {
+ category = "mkiv",
+ comment = "we keep this around for historic reasons",
+ filename = "ppchtex",
+ loading = "never",
+ status = "okay",
+ },
+ },
+ core = {
+ {
+ category = "mkiv",
+ filename = "syst-ini",
+ loading = "always",
+ status = "okay",
+ },
+ {
+ category = "mkiv",
+ filename = "norm-ctx",
+ loading = "always",
+ status = "okay",
+ },
+ {
+ category = "mkiv",
+ filename = "syst-pln",
+ loading = "always",
+ status = "okay",
+ },
+ {
+ category = "mkiv",
+ filename = "syst-mes",
+ loading = "always",
+ status = "okay",
+ },
+ {
+ category = "mkiv",
+ filename = "luat-cod",
+ loading = "luat-cod",
+ loading = "always",
+ status = "okay",
+ },
+ {
+ category = "mkiv",
+ comment = "maybe combine (3)",
+ filename = "luat-bas",
+ loading = "always",
+ status = "okay",
+ },
+ {
+ category = "mkiv",
+ comment = "maybe combine (3)",
+ filename = "luat-lib",
+ loading = "always",
+ status = "okay",
+ },
+ {
+ category = "mkiv",
+ filename = "catc-ini",
+ loading = "always",
+ status = "okay",
+ },
+ {
+ category = "mkiv",
+ comment = "forward dependency",
+ filename = "catc-act",
+ loading = "always",
+ status = "okay",
+ },
+ {
+ category = "mkiv",
+ filename = "catc-def",
+ loading = "always",
+ status = "okay",
+ },
+ {
+ category = "mkiv",
+ filename = "catc-ctx",
+ loading = "always",
+ status = "okay",
+ },
+ {
+ category = "mkiv",
+ filename = "catc-sym",
+ loading = "always",
+ status = "okay",
+ },
+ {
+ category = "mkiv",
+ filename = "catc-xml",
+ loading = "module",
+ status = "okay",
+ comment = "only needed for mkii xml parser",
+ },
+ {
+ category = "mkiv",
+ comment = "maybe combine (1)",
+ filename = "cldf-ini",
+ loading = "always",
+ status = "okay",
+ },
+ {
+ category = "mkiv",
+ filename = "syst-aux",
+ loading = "always",
+ status = "okay",
+ },
+ {
+ category = "mkiv",
+ comment = "maybe combine (1)",
+ filename = "syst-lua",
+ loading = "always",
+ status = "okay",
+ },
+ {
+ category = "mkiv",
+ comment = "maybe combine (1)",
+ filename = "syst-con",
+ loading = "always",
+ status = "okay",
+ },
+ {
+ category = "mkiv",
+ comment = "maybe combine (1)",
+ filename = "syst-fnt",
+ loading = "always",
+ status = "okay",
+ },
+ {
+ category = "mkiv",
+ comment = "maybe combine (1)",
+ filename = "syst-rtp",
+ loading = "always",
+ status = "okay",
+ },
+ {
+ category = "mkvi",
+ comment = "maybe combine (2)",
+ filename = "file-ini",
+ loading = "always",
+ status = "okay",
+ },
+ {
+ category = "mkvi",
+ comment = "maybe combine (2)",
+ filename = "file-res",
+ loading = "always",
+ status = "okay",
+ },
+ {
+ category = "mkvi",
+ filename = "file-lib",
+ loading = "always",
+ status = "okay",
+ },
+ {
+ category = "mkiv",
+ filename = "supp-dir",
+ loading = "always",
+ status = "okay",
+ },
+ {
+ category = "mkiv",
+ filename = "char-ini",
+ loading = "always",
+ status = "okay",
+ },
+ {
+ category = "mkiv",
+ filename = "char-utf",
+ loading = "always",
+ status = "okay",
+ },
+ {
+ category = "mkiv",
+ comment = "forward dependency",
+ filename = "char-act",
+ loading = "always",
+ status = "okay",
+ },
+ {
+ category = "mkiv",
+ filename = "mult-ini",
+ loading = "always",
+ status = "okay",
+ },
+ {
+ category = "mkiv",
+ filename = "mult-sys",
+ loading = "always",
+ status = "okay",
+ },
+ {
+ category = "mkiv",
+ filename = "mult-aux",
+ loading = "always",
+ status = "okay",
+ },
+ {
+ category = "mkiv",
+ filename = "mult-def",
+ loading = "always",
+ status = "okay",
+ },
+ {
+ category = "mkiv",
+ filename = "mult-chk",
+ loading = "always",
+ status = "okay",
+ },
+ {
+ category = "mkvi",
+ filename = "mult-dim",
+ loading = "always",
+ status = "okay",
+ },
+ {
+ category = "mkiv",
+ filename = "cldf-int",
+ loading = "always",
+ status = "okay",
+ },
+ {
+ category = "mkiv",
+ filename = "luat-ini",
+ loading = "always",
+ status = "okay",
+ },
+ {
+ category = "mkiv",
+ comment = "maybe this becomes a runtime module",
+ filename = "toks-ini",
+ loading = "always",
+ status = "okay",
+ },
+ {
+ category = "mkiv",
+ filename = "attr-ini",
+ loading = "always",
+ status = "okay",
+ },
+ {
+ category = "mkiv",
+ filename = "attr-mkr",
+ loading = "always",
+ status = "okay",
+ },
+ {
+ category = "mkiv",
+ comment = "code might move from here",
+ filename = "core-ini",
+ loading = "always",
+ status = "okay",
+ },
+ {
+ category = "mkiv",
+ comment = "might need more redoing",
+ filename = "core-env",
+ loading = "always",
+ status = "okay",
+ },
+ {
+ category = "mkiv",
+ comment = "in due time more might move to here",
+ filename = "layo-ini",
+ loading = "always",
+ status = "okay",
+ },
+ {
+ category = "mkiv",
+ comment = "maybe this becomes a runtime module",
+ filename = "node-ini",
+ loading = "always",
+ status = "okay",
+ },
+ {
+ category = "mkiv",
+ comment = "maybe use context.generics/context.sprint here",
+ filename = "cldf-bas",
+ loading = "always",
+ status = "okay",
+ },
+ {
+ category = "mkiv",
+ comment = "might need more redoing",
+ filename = "node-fin",
+ loading = "always",
+ status = "okay",
+ },
+ {
+ category = "mkiv",
+ comment = "needs integration and configuration",
+ filename = "node-mig",
+ loading = "always",
+ status = "okay",
+ },
+ {
+ category = "mkiv",
+ filename = "typo-bld",
+ loading = "always",
+ status = "okay",
+ },
+ {
+ category = "mkiv",
+ filename = "node-pag",
+ loading = "never",
+ status = "okay",
+ },
+ {
+ category = "mkiv",
+ filename = "back-ini",
+ loading = "always",
+ status = "okay",
+ },
+ {
+ category = "mkiv",
+ filename = "attr-col",
+ loading = "always",
+ status = "okay",
+ },
+ {
+ category = "mkiv",
+ filename = "attr-lay",
+ loading = "always",
+ status = "okay",
+ },
+ {
+ category = "mkiv",
+ filename = "attr-neg",
+ loading = "always",
+ status = "okay",
+ },
+ {
+ category = "mkiv",
+ filename = "attr-eff",
+ loading = "always",
+ status = "okay",
+ },
+ {
+ category = "mkiv",
+ comment = "needs more usage",
+ filename = "trac-tex",
+ loading = "always",
+ status = "okay",
+ },
+ {
+ category = "mkiv",
+ filename = "trac-deb",
+ loading = "always",
+ status = "okay",
+ },
+ {
+ category = "mkiv",
+ filename = "trac-ctx",
+ loading = "always",
+ status = "okay",
+ },
+ {
+ category = "mkiv",
+ filename = "supp-box",
+ loading = "always",
+ status = "okay",
+ },
+ {
+ category = "mkiv",
+ filename = "supp-ran",
+ loading = "always",
+ status = "okay",
+ },
+ {
+ category = "mkiv",
+ comment = "will be moved to the math-* modules",
+ filename = "supp-mat",
+ loading = "always",
+ status = "okay",
+ },
+ {
+ category = "mkiv",
+ comment = "will grow",
+ filename = "typo-ini",
+ loading = "always",
+ status = "okay",
+ },
+ {
+ category = "mkvi",
+ filename = "file-syn",
+ loading = "always",
+ status = "okay",
+ },
+ {
+ category = "mkvi",
+ filename = "file-mod",
+ loading = "always",
+ status = "okay",
+ },
+ {
+ category = "mkiv",
+ filename = "core-con",
+ loading = "always",
+ status = "okay",
+ },
+ {
+ category = "mkiv",
+ filename = "cont-fil",
+ loading = "always",
+ status = "okay",
+ },
+ {
+ category = "mkiv",
+ filename = "cont-nop",
+ loading = "always",
+ status = "okay",
+ },
+ {
+ category = "mkiv",
+ filename = "cont-yes",
+ loading = "always",
+ status = "okay",
+ },
+ {
+ category = "mkiv",
+ filename = "regi-ini",
+ loading = "always",
+ status = "okay",
+ },
+ {
+ category = "mkiv",
+ filename = "enco-ini",
+ loading = "always",
+ status = "okay",
+ },
+ {
+ category = "mkiv",
+ filename = "hand-ini",
+ loading = "always",
+ status = "okay",
+ },
+ {
+ category = "mkiv",
+ filename = "lang-ini",
+ loading = "always",
+ status = "okay",
+ },
+ {
+ category = "mkiv",
+ comment = "namespace should be languages",
+ filename = "lang-lab",
+ loading = "always",
+ status = "okay",
+ },
+ {
+ category = "mkiv",
+ filename = "unic-ini",
+ loading = "always",
+ status = "okay",
+ },
+ {
+ category = "mkiv",
+ filename = "core-uti",
+ loading = "always",
+ status = "okay",
+ },
+ {
+ category = "mkiv",
+ comment = "maybe rename to core-two",
+ filename = "core-two",
+ loading = "always",
+ status = "okay",
+ },
+ {
+ category = "mkiv",
+ filename = "core-dat",
+ loading = "always",
+ status = "okay",
+ },
+ {
+ category = "mkiv",
+ filename = "colo-ini",
+ loading = "always",
+ status = "okay",
+ },
+ {
+ category = "mkiv",
+ filename = "colo-ext",
+ loading = "always",
+ status = "okay",
+ },
+ {
+ category = "mkiv",
+ filename = "colo-grp",
+ loading = "always",
+ status = "okay",
+ },
+ {
+ category = "mkiv",
+ filename = "node-bck",
+ loading = "always",
+ status = "okay",
+ },
+ {
+ category = "mkiv",
+ filename = "pack-cut",
+ loading = "always",
+ status = "okay",
+ },
+ {
+ category = "mkiv",
+ filename = "lang-mis",
+ loading = "always",
+ status = "okay",
+ },
+ {
+ category = "mkiv",
+ filename = "lang-url",
+ loading = "always",
+ status = "okay",
+ },
+ {
+ category = "mkiv",
+ filename = "lang-def",
+ loading = "always",
+ status = "okay",
+ },
+ {
+ category = "mkiv",
+ filename = "lang-frq",
+ loading = "on demand",
+ status = "okay",
+ },
+ {
+ category = "mkiv",
+ filename = "lang-frd",
+ loading = "on demand",
+ status = "okay",
+ },
+ {
+ category = "mkiv",
+ filename = "lang-wrd",
+ loading = "always",
+ status = "okay",
+ },
+ {
+ category = "mkvi",
+ comment = "might need more redoing",
+ filename = "file-job",
+ loading = "always",
+ status = "okay",
+ },
+ {
+ category = "mkiv",
+ filename = "symb-ini",
+ loading = "always",
+ status = "okay",
+ },
+ {
+ category = "mkiv",
+ filename = "sort-ini",
+ loading = "always",
+ status = "okay",
+ },
+ {
+ category = "mkvi",
+ filename = "pack-mis",
+ loading = "always",
+ status = "okay",
+ },
+ {
+ category = "mkiv",
+ filename = "pack-rul",
+ loading = "always",
+ status = "okay",
+ },
+ {
+ category = "mkiv",
+ comment = "endpar experimental code",
+ filename = "pack-mrl",
+ loading = "always",
+ status = "okay",
+ },
+ {
+ category = "mkvi",
+ filename = "pack-bck",
+ loading = "always",
+ status = "okay",
+ },
+ {
+ category = "mkiv",
+ filename = "pack-fen",
+ loading = "always",
+ status = "okay",
+ },
+ {
+ category = "mkiv",
+ filename = "lxml-ini",
+ loading = "always",
+ status = "okay",
+ },
+ {
+ category = "mkiv",
+ filename = "lxml-sor",
+ loading = "always",
+ status = "okay",
+ },
+ {
+ category = "mkvi",
+ filename = "typo-prc",
+ loading = "always",
+ status = "okay",
+ },
+ {
+ category = "mkvi",
+ filename = "strc-ini",
+ loading = "always",
+ status = "okay",
+ },
+ {
+ category = "mkiv",
+ filename = "strc-tag",
+ loading = "always",
+ status = "okay",
+ },
+ {
+ category = "mkiv",
+ comment = "this module might go away when code has been moved",
+ filename = "strc-doc",
+ loading = "always",
+ status = "okay",
+ },
+ {
+ category = "mkiv",
+ comment = "still some rough edges",
+ filename = "strc-num",
+ loading = "always",
+ status = "okay",
+ },
+ {
+ category = "mkiv",
+ filename = "strc-mar",
+ loading = "always",
+ status = "okay",
+ },
+ {
+ category = "mkiv",
+ filename = "strc-sbe",
+ loading = "always",
+ status = "okay",
+ },
+ {
+ category = "mkvi",
+ filename = "strc-lst",
+ loading = "always",
+ status = "okay",
+ },
+ {
+ category = "mkiv",
+ comment = "some of the local current and synchronization macros will be renamed",
+ filename = "strc-sec",
+ loading = "always",
+ status = "okay",
+ },
+ {
+ category = "mkiv",
+ filename = "strc-pag",
+ loading = "always",
+ status = "okay",
+ },
+ {
+ category = "mkiv",
+ comment = "(support for) setups might get improved",
+ filename = "strc-ren",
+ loading = "always",
+ status = "okay",
+ },
+ {
+ category = "mkiv",
+ comment = "this module might go away",
+ filename = "strc-xml",
+ loading = "always",
+ status = "okay",
+ },
+ {
+ category = "mkiv",
+ filename = "strc-def",
+ loading = "always",
+ status = "okay",
+ },
+ {
+ category = "mkvi",
+ comment = "some more low level names might change",
+ filename = "strc-ref",
+ loading = "always",
+ status = "okay",
+ },
+ {
+ category = "mkiv",
+ comment = "use setups for rendering",
+ filename = "strc-reg",
+ loading = "always",
+ status = "okay",
+ },
+ {
+ category = "mkvi",
+ filename = "strc-lev",
+ loading = "always",
+ status = "okay",
+ },
+ {
+ category = "mkiv",
+ comment = "maybe some tuning is needed / will happen",
+ filename = "spac-ali",
+ loading = "always",
+ status = "okay",
+ },
+ {
+ category = "mkiv",
+ comment = "probably needs some more work",
+ filename = "spac-hor",
+ loading = "always",
+ status = "okay",
+ },
+ {
+ category = "mkiv",
+ comment = "maybe some changes will happen",
+ filename = "spac-ver",
+ loading = "always",
+ status = "okay",
+ },
+ {
+ category = "mkiv",
+ comment = "could be improved if needed",
+ filename = "spac-lin",
+ loading = "always",
+ status = "okay",
+ },
+ {
+ category = "mkiv",
+ comment = "this needs to be checked occasionally",
+ filename = "spac-pag",
+ loading = "always",
+ status = "okay",
+ },
+ {
+ category = "mkiv",
+ filename = "spac-par",
+ loading = "always",
+ status = "okay",
+ },
+ {
+ category = "mkiv",
+ filename = "spac-def",
+ loading = "always",
+ status = "todo",
+ },
+ {
+ category = "mkiv",
+ comment = "needs thinking and redoing",
+ filename = "spac-grd",
+ loading = "always",
+ status = "todo",
+ },
+ {
+ category = "mkiv",
+ comment = "in transition",
+ filename = "anch-pos",
+ loading = "always",
+ status = "okay",
+ },
+ {
+ category = "mkvi",
+ comment = "maybe change locationattribute names",
+ filename = "scrn-ini",
+ loading = "always",
+ status = "okay",
+ },
+ {
+ category = "mkvi",
+ comment = "namespace needs checking",
+ filename = "scrn-ref",
+ loading = "always",
+ status = "okay",
+ },
+ {
+ category = "mkiv",
+ comment = "will change when we have objects at lua end",
+ filename = "pack-obj",
+ loading = "always",
+ status = "okay",
+ },
+ {
+ category = "mkvi",
+ filename = "strc-itm",
+ loading = "always",
+ status = "okay",
+ },
+ {
+ category = "mkvi",
+ comment = "maybe more common counter code here and setups need to be improved",
+ filename = "strc-con",
+ loading = "always",
+ status = "okay",
+ },
+ {
+ category = "mkvi",
+ filename = "strc-des",
+ loading = "always",
+ status = "okay",
+ },
+ {
+ category = "mkvi",
+ comment = "(interactive) coupling is not yet working",
+ filename = "strc-enu",
+ loading = "always",
+ status = "okay",
+ },
+ {
+ category = "mkiv",
+ filename = "strc-ind",
+ loading = "always",
+ status = "okay",
+ },
+ {
+ category = "mkiv",
+ comment = "needs to be adapted when strc-con/des/enu changes",
+ filename = "strc-lab",
+ loading = "always",
+ status = "okay",
+ },
+ {
+ category = "mkiv",
+ filename = "strc-syn",
+ loading = "always",
+ status = "todo",
+ },
+ {
+ category = "mkiv",
+ comment = "a funny mix",
+ filename = "core-sys",
+ loading = "always",
+ status = "okay",
+ },
+ {
+ category = "mkiv",
+ filename = "page-var",
+ loading = "always",
+ status = "okay",
+ },
+ {
+ category = "mkvi",
+ filename = "page-otr",
+ loading = "always",
+ status = "okay",
+ },
+ {
+ category = "mkiv",
+ comment = "code might end up elsewhere",
+ filename = "page-ini",
+ loading = "always",
+ status = "okay",
+ },
+ {
+ category = "mkiv",
+ comment = "dealing with insertions might change",
+ filename = "page-ins",
+ loading = "always",
+ status = "okay",
+ },
+ {
+ category = "mkiv",
+ filename = "page-fac",
+ loading = "always",
+ status = "okay",
+ },
+ {
+ category = "mkiv",
+ comment = "otr commands will be redone",
+ filename = "page-brk",
+ loading = "always",
+ status = "okay",
+ },
+ {
+ category = "mkiv",
+ comment = "helpers for columns",
+ filename = "page-col",
+ loading = "always",
+ status = "okay",
+ },
+ {
+ category = "mkiv",
+ comment = "room for improvement and extension",
+ filename = "page-inf",
+ loading = "always",
+ status = "okay",
+ },
+ {
+ category = "mkiv",
+ filename = "page-grd",
+ loading = "always",
+ status = "okay",
+ },
+ {
+ category = "mkiv",
+ comment = "will be extended when columns are redone",
+ filename = "page-flt",
+ loading = "always",
+ status = "okay",
+ },
+ {
+ category = "mkiv",
+ filename = "page-bck",
+ loading = "always",
+ status = "okay",
+ },
+ {
+ category = "mkiv",
+ filename = "page-not",
+ loading = "always",
+ status = "todo",
+ },
+ {
+ category = "mkiv",
+ comment = "can probably be improved",
+ filename = "page-one",
+ loading = "always",
+ status = "okay",
+ },
+ {
+ category = "mkiv",
+ filename = "page-lay",
+ loading = "always",
+ status = "okay",
+ },
+ {
+ category = "mkvi",
+ filename = "page-box",
+ loading = "always",
+ status = "okay",
+ },
+ {
+ category = "mkvi",
+ comment = "a few things left",
+ filename = "page-txt",
+ loading = "always",
+ status = "okay",
+ },
+ {
+ category = "mkiv",
+ filename = "page-sid",
+ loading = "always",
+ status = "okay",
+ },
+ {
+ category = "mkvi",
+ comment = "in due time we need a further cleanup",
+ filename = "strc-flt",
+ loading = "always",
+ status = "okay",
+ },
+ {
+ category = "mkiv",
+ filename = "page-pst",
+ loading = "always",
+ status = "okay",
+ },
+ {
+ category = "mkvi",
+ comment = "might be extended",
+ filename = "page-mbk",
+ loading = "always",
+ status = "okay",
+ },
+ {
+ category = "mkiv",
+ comment = "will be reimplemented",
+ filename = "page-mul",
+ loading = "always",
+ status = "todo",
+ },
+ {
+ category = "mkiv",
+ comment = "work in progress",
+ filename = "page-mix",
+ loading = "always",
+ status = "todo",
+ },
+ {
+ category = "mkiv",
+ comment = "will be reimplemented",
+ filename = "page-set",
+ loading = "always",
+ status = "todo",
+ },
+ {
+ category = "mkiv",
+ filename = "pack-lyr",
+ loading = "always",
+ status = "okay",
+ },
+ {
+ category = "mkiv",
+ filename = "pack-pos",
+ loading = "always",
+ status = "okay",
+ },
+ {
+ category = "mkvi",
+ filename = "page-mak",
+ loading = "always",
+ status = "okay",
+ },
+ {
+ category = "mkiv",
+ comment = "will probably be overhauled some day",
+ filename = "page-lin",
+ loading = "always",
+ status = "okay",
+ },
+ {
+ category = "mkiv",
+ comment = "might get extended",
+ filename = "page-par",
+ loading = "always",
+ status = "okay",
+ },
+ {
+ category = "mkiv",
+ filename = "typo-pag",
+ loading = "always",
+ status = "okay",
+ },
+ {
+ category = "mkiv",
+ filename = "typo-mar",
+ loading = "always",
+ status = "okay",
+ },
+ {
+ category = "mkiv",
+ filename = "typo-itm",
+ loading = "always",
+ status = "okay",
+ },
+ {
+ category = "mkiv",
+ comment = "check other modules for buffer usage",
+ filename = "buff-ini",
+ loading = "always",
+ status = "okay",
+ },
+ {
+ category = "mkiv",
+ comment = "check obsolete processbuffer",
+ filename = "buff-ver",
+ loading = "always",
+ status = "okay",
+ },
+ {
+ category = "mkvi",
+ comment = "experimental code",
+ filename = "buff-par",
+ loading = "always",
+ status = "okay",
+ },
+ {
+ category = "mkiv",
+ filename = "buff-imp-tex",
+ loading = "always",
+ status = "okay",
+ },
+ {
+ category = "mkiv",
+ filename = "buff-imp-mp",
+ loading = "always",
+ status = "okay",
+ },
+ {
+ category = "mkiv",
+ filename = "buff-imp-lua",
+ loading = "always",
+ status = "okay",
+ },
+ {
+ category = "mkiv",
+ filename = "buff-imp-xml",
+ loading = "always",
+ status = "okay",
+ },
+ {
+ category = "mkiv",
+ filename = "buff-imp-parsed-xml",
+ loading = "always",
+ status = "okay",
+ },
+ {
+ category = "mkiv",
+ filename = "buff-imp-default",
+ loading = "indirect",
+ status = "okay",
+ },
+ {
+ category = "mkiv",
+ filename = "buff-imp-escaped",
+ loading = "indirect",
+ status = "okay",
+ },
+ {
+ category = "mkiv",
+ filename = "buff-imp-nested",
+ loading = "indirect",
+ status = "okay",
+ },
+ {
+ category = "mkiv",
+ filename = "strc-blk",
+ loading = "always",
+ status = "okay",
+ },
+ {
+ category = "mkiv",
+ filename = "page-imp",
+ loading = "always",
+ status = "okay",
+ },
+ {
+ category = "mkvi",
+ comment = "maybe some extensions and delayed loading, needs checking",
+ filename = "page-sel",
+ loading = "always",
+ status = "okay",
+ },
+ {
+ category = "mkiv",
+ filename = "page-com",
+ loading = "always",
+ status = "okay",
+ },
+ {
+ category = "mkvi",
+ comment = "namespace needs checking",
+ filename = "scrn-pag",
+ loading = "always",
+ status = "okay",
+ },
+ {
+ category = "mkvi",
+ comment = "functionality needs checking",
+ filename = "scrn-wid",
+ loading = "always",
+ status = "okay",
+ },
+ {
+ category = "mkvi",
+ comment = "namespace needs checking",
+ filename = "scrn-but",
+ loading = "always",
+ status = "okay",
+ },
+ {
+ category = "mkvi",
+ comment = "namespace needs checking",
+ filename = "scrn-bar",
+ loading = "always",
+ status = "okay",
+ },
+ {
+ category = "mkiv",
+ filename = "strc-bkm",
+ loading = "always",
+ status = "okay",
+ },
+ {
+ category = "mkiv",
+ comment = "somewhat weird",
+ filename = "tabl-com",
+ loading = "always",
+ status = "okay",
+ },
+ {
+ category = "mkiv",
+ comment = "unchecked",
+ filename = "tabl-pln",
+ loading = "always",
+ status = "okay",
+ },
+ {
+ category = "mkiv",
+ filename = "tabl-tab",
+ loading = "always",
+ status = "pending",
+ },
+ {
+ category = "mkiv",
+ comment = "can probably be improved (names and such)",
+ filename = "tabl-tbl",
+ loading = "always",
+ status = "okay",
+ },
+ {
+ category = "mkiv",
+ comment = "frozen functionaly so no drastic cleanup",
+ filename = "tabl-ntb",
+ loading = "always",
+ status = "okay",
+ },
+ {
+ category = "mkiv",
+ filename = "tabl-mis",
+ loading = "always",
+ status = "okay",
+ },
+ {
+ category = "mkiv",
+ filename = "tabl-nte",
+ loading = "always",
+ status = "okay",
+ },
+ {
+ category = "mkiv",
+ comment = "will be redone when needed",
+ filename = "tabl-ltb",
+ loading = "always",
+ status = "pending",
+ },
+ {
+ category = "mkiv",
+ comment = "will be adapted when needed (and rest is done)",
+ filename = "tabl-tsp",
+ loading = "always",
+ status = "okay",
+ },
+ {
+ category = "mkvi",
+ comment = "namespace needs checking",
+ filename = "tabl-xtb",
+ loading = "always",
+ status = "okay",
+ },
+ {
+ category = "mkvi",
+ filename = "tabl-xnt",
+ loading = "module",
+ status = "okay",
+ loading = "always",
+ comment = "only when natural tables need a replacement",
+ },
+ {
+ category = "mkiv",
+ filename = "java-ini",
+ loading = "always",
+ status = "okay",
+ },
+ {
+ category = "mkvi",
+ comment = "namespace needs checking",
+ filename = "scrn-fld",
+ loading = "always",
+ status = "okay",
+ },
+ {
+ category = "mkvi",
+ comment = "namespace needs checking",
+ filename = "scrn-hlp",
+ loading = "always",
+ status = "okay",
+ },
+ {
+ category = "mkiv",
+ filename = "char-enc",
+ loading = "always",
+ status = "okay",
+ },
+ {
+ category = "mkvi",
+ filename = "font-lib",
+ loading = "always",
+ status = "okay",
+ },
+ {
+ category = "mkvi",
+ filename = "font-fil",
+ loading = "always",
+ status = "okay",
+ },
+ {
+ category = "mkvi",
+ filename = "font-var",
+ loading = "always",
+ status = "okay",
+ },
+ {
+ category = "mkvi",
+ filename = "font-fea",
+ loading = "always",
+ status = "okay",
+ },
+ {
+ category = "mkvi",
+ filename = "font-mat",
+ loading = "always",
+ status = "okay",
+ },
+ {
+ category = "mkvi",
+ comment = "needs occasional checking and upgrading",
+ filename = "font-ini",
+ loading = "always",
+ status = "okay",
+ },
+ {
+ category = "mkvi",
+ filename = "font-sym",
+ loading = "always",
+ status = "okay",
+ },
+ {
+ category = "mkvi",
+ filename = "font-sty",
+ loading = "always",
+ status = "okay",
+ },
+ {
+ category = "mkvi",
+ filename = "font-set",
+ status = "okay",
+ },
+ {
+ category = "mkvi",
+ filename = "font-emp",
+ status = "okay",
+ },
+ {
+ category = "mkvi",
+ filename = "font-col",
+ status = "okay",
+ },
+ {
+ category = "mkiv",
+ filename = "font-pre",
+ status = "okay",
+ },
+ {
+ category = "mkiv",
+ filename = "font-unk",
+ status = "okay",
+ },
+ {
+ category = "mkiv",
+ comment = "likely this will become a module",
+ filename = "font-tra",
+ status = "okay",
+ },
+ {
+ category = "mkiv",
+ comment = "this could become a module",
+ filename = "font-chk",
+ status = "okay",
+ },
+ {
+ category = "mkiv",
+ comment = "this one might be merged",
+ filename = "font-uni",
+ loading = "always",
+ status = "okay",
+ },
+ {
+ category = "mkvi",
+ filename = "font-col",
+ loading = "always",
+ status = "okay",
+ },
+ {
+ category = "mkvi",
+ filename = "font-gds",
+ loading = "always",
+ status = "okay",
+ },
+ {
+ category = "mkvi",
+ filename = "font-aux",
+ loading = "always",
+ status = "okay",
+ },
+ {
+ category = "mkiv",
+ filename = "typo-lan",
+ loading = "always",
+ status = "okay",
+ },
+ {
+ category = "mkiv",
+ comment = "this is work in progress",
+ filename = "lxml-css",
+ loading = "always",
+ status = "okay",
+ },
+ {
+ category = "mkiv",
+ filename = "spac-chr",
+ loading = "always",
+ status = "okay",
+ },
+ {
+ category = "mkiv",
+ filename = "spac-cha",
+ loading = "always",
+ status = "okay",
+ },
+ {
+ category = "mkiv",
+ comment = "work in progress",
+ filename = "blob-ini",
+ loading = "always",
+ status = "pending",
+ },
+ {
+ category = "mkiv",
+ filename = "trac-vis",
+ loading = "always",
+ status = "okay",
+ },
+ {
+ category = "mkiv",
+ filename = "trac-jus",
+ loading = "always",
+ status = "okay",
+ },
+ {
+ category = "mkvi",
+ filename = "trac-vis",
+ loading = "always",
+ status = "okay",
+ },
+ {
+ category = "mkiv",
+ filename = "typo-cln",
+ loading = "always",
+ status = "okay",
+ },
+ {
+ category = "mkiv",
+ filename = "typo-spa",
+ loading = "always",
+ status = "okay",
+ },
+ {
+ category = "mkiv",
+ comment = "do we keep the style and color or not",
+ filename = "typo-krn",
+ loading = "always",
+ status = "okay",
+ },
+ {
+ category = "mkvi",
+ filename = "typo-itc",
+ loading = "always",
+ status = "okay",
+ },
+ {
+ category = "mkiv",
+ comment = "maybe singular setup",
+ filename = "typo-dir",
+ loading = "always",
+ status = "okay",
+ },
+ {
+ category = "mkiv",
+ filename = "typo-brk",
+ loading = "always",
+ status = "okay",
+ },
+ {
+ category = "mkiv",
+ filename = "typo-cap",
+ loading = "always",
+ status = "okay",
+ },
+ {
+ category = "mkiv",
+ filename = "typo-dig",
+ loading = "always",
+ status = "okay",
+ },
+ {
+ category = "mkiv",
+ filename = "typo-rep",
+ loading = "always",
+ status = "okay",
+ },
+ {
+ category = "mkvi",
+ comment = "maybe there will be a nicer interface",
+ filename = "typo-txt",
+ loading = "always",
+ status = "okay",
+ },
+ {
+ category = "mkiv",
+ comment = "might get extended",
+ filename = "typo-par",
+ loading = "always",
+ status = "okay",
+ },
+ {
+ category = "mkvi",
+ filename = "type-ini",
+ loading = "always",
+ status = "okay",
+ },
+ {
+ category = "mkiv",
+ filename = "type-set",
+ loading = "always",
+ status = "okay",
+ },
+ {
+ category = "mkiv",
+ filename = "type-def",
+ loading = "type-set",
+ status = "okay",
+ },
+ {
+ category = "mkiv",
+ filename = "type-fbk",
+ loading = "type-set",
+ status = "okay",
+ },
+ {
+ category = "mkiv",
+ filename = "type-lua",
+ loading = "type-set",
+ status = "okay",
+ },
+ {
+ category = "mkiv",
+ filename = "type-one",
+ loading = "type-set",
+ status = "okay",
+ },
+ {
+ category = "mkiv",
+ filename = "type-otf",
+ loading = "type-set",
+ status = "okay",
+ },
+ {
+ category = "mkiv",
+ filename = "type-siz",
+ loading = "type-set",
+ status = "okay",
+ },
+ {
+ category = "mkiv",
+ filename = "type-tmf",
+ loading = "never",
+ status = "okay",
+ comment = "placeholder to prevent other loading",
+ },
+ {
+ category = "mkiv",
+ filename = "scrp-ini",
+ loading = "always",
+ status = "okay",
+ },
+ {
+ category = "mkiv",
+ comment = "this module is obsolete",
+ filename = "prop-ini",
+ loading = "always",
+ status = "okay",
+ },
+ {
+ category = "mkiv",
+ filename = "mlib-ctx",
+ loading = "always",
+ status = "okay",
+ },
+ {
+ category = "mkiv",
+ comment = "metapost code is always evolving",
+ filename = "meta-ini",
+ loading = "always",
+ status = "okay",
+ },
+ {
+ category = "mkiv",
+ comment = "code used in a project",
+ filename = "meta-lua",
+ loading = "experimental",
+ status = "okay",
+ },
+ {
+ category = "mkiv",
+ filename = "meta-tex",
+ loading = "always",
+ status = "okay",
+ },
+ {
+ category = "mkiv",
+ comment = "maybe this one will be merged",
+ filename = "meta-fun",
+ loading = "always",
+ status = "okay",
+ },
+ {
+ category = "mkiv",
+ comment = "might get updated when mp code gets cleaned up",
+ filename = "meta-pag",
+ loading = "always",
+ status = "okay",
+ },
+ {
+ category = "mkiv",
+ filename = "meta-grd",
+ loading = "always",
+ status = "okay",
+ },
+ {
+ category = "mkiv",
+ filename = "page-mrk",
+ loading = "always",
+ status = "okay",
+ },
+ {
+ category = "mkiv",
+ filename = "page-flw",
+ loading = "always",
+ status = "okay",
+ },
+ {
+ category = "mkiv",
+ filename = "page-spr",
+ loading = "always",
+ status = "okay",
+ },
+ {
+ category = "mkiv",
+ comment = "will be made better",
+ filename = "page-plg",
+ loading = "always",
+ status = "okay",
+ },
+ {
+ category = "mkiv",
+ comment = "needs more work (and thinking)",
+ filename = "page-str",
+ loading = "always",
+ status = "okay",
+ },
+ {
+ category = "mkiv",
+ comment = "in transition",
+ filename = "anch-pgr",
+ loading = "always",
+ status = "okay",
+ },
+ {
+ category = "mkvi",
+ comment = "in transition",
+ filename = "anch-bck",
+ loading = "always",
+ status = "okay",
+ },
+ {
+ category = "mkiv",
+ comment = "will stay experimental for a while",
+ filename = "anch-tab",
+ loading = "always",
+ status = "okay",
+ },
+ {
+ category = "mkiv",
+ filename = "anch-bar",
+ loading = "always",
+ status = "okay",
+ },
+ {
+ category = "mkiv",
+ comment = "interesting old mechanism to keep around (module?)",
+ filename = "anch-snc",
+ loading = "always",
+ status = "pending",
+ },
+ {
+ category = "mkiv",
+ filename = "math-ini",
+ loading = "always",
+ status = "okay",
+ },
+ {
+ category = "mkiv",
+ comment = "this file might merge into others",
+ filename = "math-pln",
+ loading = "always",
+ status = "okay",
+ },
+ {
+ category = "mkiv",
+ filename = "math-for",
+ loading = "always",
+ status = "okay",
+ },
+ {
+ category = "mkiv",
+ comment = "eventually this will be split and spread",
+ filename = "math-def",
+ loading = "always",
+ status = "okay",
+ },
+ {
+ category = "mkiv",
+ comment = "will be checked and improved",
+ filename = "math-ali",
+ loading = "always",
+ status = "okay",
+ },
+ {
+ category = "mkiv",
+ comment = "needs testing",
+ filename = "math-arr",
+ loading = "always",
+ status = "okay",
+ },
+ {
+ category = "mkvi",
+ filename = "math-stc",
+ loading = "always",
+ status = "okay",
+ },
+ {
+ category = "mkiv",
+ comment = "at least for the moment",
+ filename = "math-frc",
+ loading = "always",
+ status = "okay",
+ },
+ {
+ category = "mkiv",
+ filename = "math-scr",
+ loading = "always",
+ status = "okay",
+ },
+ {
+ category = "mkiv",
+ filename = "math-int",
+ loading = "always",
+ status = "okay",
+ },
+ {
+ category = "mkiv",
+ comment = "code get replaced (by autodelimiters)",
+ filename = "math-del",
+ loading = "always",
+ status = "okay",
+ },
+ {
+ category = "mkiv",
+ filename = "math-fen",
+ loading = "always",
+ status = "okay",
+ },
+ {
+ category = "mkiv",
+ comment = "code might move to here",
+ filename = "math-inl",
+ loading = "always",
+ status = "okay",
+ },
+ {
+ category = "mkiv",
+ comment = "code might move to here",
+ filename = "math-dis",
+ loading = "always",
+ status = "okay",
+ },
+ {
+ category = "mkiv",
+ filename = "phys-dim",
+ loading = "always",
+ status = "okay",
+ },
+ {
+ category = "mkiv",
+ comment = "some more functionality will end up here",
+ filename = "strc-mat",
+ loading = "always",
+ status = "okay",
+ },
+ {
+ category = "mkiv",
+ filename = "chem-ini",
+ loading = "always",
+ status = "okay",
+ },
+ {
+ category = "mkiv",
+ filename = "chem-str",
+ loading = "always",
+ status = "okay",
+ },
+ {
+ category = "mkiv",
+ filename = "typo-scr",
+ loading = "always",
+ status = "okay",
+ },
+ {
+ category = "mkiv",
+ comment = "maybe some cleanup is needed",
+ filename = "node-rul",
+ loading = "always",
+ status = "okay",
+ },
+ {
+ category = "mkiv",
+ comment = "needs testing",
+ filename = "font-sol",
+ loading = "always",
+ status = "okay",
+ },
+ {
+ category = "mkvI",
+ filename = "strc-not",
+ loading = "always",
+ status = "todo",
+ },
+ {
+ category = "mkvi",
+ comment = "will be extended as part of crited",
+ filename = "strc-lnt",
+ loading = "always",
+ status = "okay",
+ },
+ {
+ category = "mkiv",
+ filename = "pack-com",
+ loading = "always",
+ status = "okay",
+ },
+ {
+ category = "mkiv",
+ filename = "typo-del",
+ loading = "always",
+ status = "okay",
+ },
+ {
+ category = "mkiv",
+ filename = "grph-trf",
+ loading = "always",
+ status = "okay",
+ },
+ {
+ category = "mkiv",
+ filename = "grph-inc",
+ loading = "always",
+ status = "okay",
+ },
+ {
+ category = "mkiv",
+ filename = "grph-fig",
+ loading = "always",
+ status = "okay",
+ },
+ {
+ category = "mkiv",
+ filename = "grph-raw",
+ loading = "always",
+ status = "okay",
+ },
+ {
+ category = "mkiv",
+ filename = "pack-box",
+ loading = "always",
+ status = "okay",
+ },
+ {
+ category = "mkiv",
+ filename = "pack-bar",
+ loading = "always",
+ status = "okay",
+ },
+ {
+ category = "mkiv",
+ filename = "page-app",
+ loading = "always",
+ status = "okay",
+ },
+ {
+ category = "mkiv",
+ filename = "meta-fig",
+ loading = "always",
+ status = "okay",
+ },
+ {
+ category = "mkiv",
+ comment = "more or less obsolete",
+ filename = "lang-spa",
+ loading = "always",
+ status = "okay",
+ },
+ {
+ category = "mkiv",
+ filename = "bibl-bib",
+ loading = "always",
+ status = "pending",
+ },
+ {
+ category = "mkiv",
+ filename = "bibl-tra",
+ loading = "always",
+ status = "pending",
+ },
+ {
+ category = "mkiv",
+ comment = "not needed",
+ filename = "meta-xml",
+ loading = "always",
+ status = "okay",
+ },
+ {
+ category = "mkiv",
+ filename = "cont-log",
+ loading = "always",
+ status = "okay",
+ },
+ {
+ category = "mkiv",
+ filename = "task-ini",
+ loading = "always",
+ status = "okay",
+ },
+ {
+ category = "mkiv",
+ filename = "cldf-ver",
+ loading = "always",
+ status = "okay",
+ },
+ {
+ category = "mkiv",
+ filename = "cldf-com",
+ loading = "always",
+ status = "okay",
+ },
+ {
+ category = "mkiv",
+ filename = "core-ctx",
+ loading = "always",
+ status = "okay",
+ },
+ {
+ category = "mkiv",
+ comment = "will always be messy",
+ filename = "core-def",
+ loading = "always",
+ status = "okay",
+ },
+ {
+ category = "mkiv",
+ comment = "object related code might move or change",
+ filename = "back-pdf",
+ loading = "always",
+ status = "okay",
+ },
+ {
+ category = "mkiv",
+ filename = "back-swf",
+ loading = "never",
+ status = "okay",
+ comment = "no code, just an example of usage",
+ },
+ {
+ category = "mkiv",
+ filename = "back-u3d",
+ loading = "never",
+ status = "okay",
+ comment = "no code, just an example of usage",
+ },
+ {
+ category = "mkiv",
+ filename = "mlib-pdf",
+ loading = "always",
+ status = "okay",
+ },
+ {
+ category = "mkiv",
+ filename = "mlib-pps",
+ loading = "always",
+ status = "okay",
+ },
+ {
+ category = "mkiv",
+ filename = "meta-pdf",
+ loading = "always",
+ status = "okay",
+ },
+ {
+ category = "mkiv",
+ comment = "might need more work",
+ filename = "grph-epd",
+ loading = "always",
+ status = "okay",
+ },
+ {
+ category = "mkiv",
+ comment = "some parameters might move from export to backend",
+ filename = "back-exp",
+ loading = "always",
+ status = "okay",
+ },
+ },
+ extras = {
+ {
+ category = "tex",
+ comment = "add-on for mtx-context",
+ filename = "mtx-context-arrange",
+ loading = "on demand",
+ status = "okay",
+ },
+ {
+ category = "tex",
+ comment = "add-on for mtx-context",
+ filename = "mtx-context-combine",
+ loading = "on demand",
+ status = "okay",
+ },
+ {
+ category = "tex",
+ comment = "add-on for mtx-context",
+ filename = "mtx-context-common",
+ loading = "on demand",
+ status = "okay",
+ },
+ {
+ category = "tex",
+ comment = "add-on for mtx-context",
+ filename = "mtx-context-ideas",
+ loading = "on demand",
+ status = "okay",
+ },
+ {
+ category = "tex",
+ comment = "add-on for mtx-context",
+ filename = "mtx-context-listing",
+ loading = "on demand",
+ status = "okay",
+ },
+ {
+ category = "tex",
+ comment = "add-on for mtx-context",
+ filename = "mtx-context-markdown",
+ loading = "on demand",
+ status = "okay",
+ },
+ {
+ category = "tex",
+ comment = "add-on for mtx-context",
+ filename = "mtx-context-select",
+ loading = "on demand",
+ status = "okay",
+ },
+ {
+ category = "tex",
+ comment = "add-on for mtx-context",
+ filename = "mtx-context-timing",
+ loading = "on demand",
+ status = "okay",
+ },
+ },
+ implementations = {
+ {
+ category = "mkiv",
+ filename = "colo-imp-dem",
+ loading = "on demand",
+ status = "okay",
+ },
+ {
+ category = "mkiv",
+ filename = "colo-imp-ema",
+ loading = "on demand",
+ status = "okay",
+ },
+ {
+ category = "mkiv",
+ filename = "colo-imp-rgb",
+ loading = "on demand",
+ status = "okay",
+ },
+ {
+ category = "mkiv",
+ filename = "colo-imp-x11",
+ loading = "on demand",
+ status = "okay",
+ },
+ {
+ category = "mkiv",
+ filename = "colo-imp-xwi",
+ loading = "on demand",
+ status = "okay",
+ },
+ {
+ category = "mkiv",
+ filename = "java-imp-exa",
+ loading = "on demand",
+ status = "okay",
+ },
+ {
+ category = "mkiv",
+ filename = "java-imp-fil",
+ loading = "on demand",
+ status = "okay",
+ },
+ {
+ category = "mkiv",
+ filename = "java-imp-fld",
+ loading = "on demand",
+ status = "okay",
+ },
+ {
+ category = "mkiv",
+ filename = "java-imp-rhh",
+ loading = "on demand",
+ status = "okay",
+ },
+ {
+ category = "mkiv",
+ filename = "java-imp-stp",
+ loading = "on demand",
+ status = "okay",
+ },
+ {
+ category = "mkiv",
+ filename = "meta-imp-clp",
+ loading = "on demand",
+ status = "okay",
+ },
+ {
+ category = "mkiv",
+ filename = "meta-imp-dum",
+ loading = "on demand",
+ status = "okay",
+ },
+ {
+ category = "mkiv",
+ filename = "meta-imp-fen",
+ loading = "on demand",
+ status = "okay",
+ },
+ {
+ category = "mkiv",
+ filename = "meta-imp-mis",
+ loading = "on demand",
+ status = "okay",
+ },
+ {
+ category = "mkiv",
+ filename = "meta-imp-nav",
+ loading = "on demand",
+ status = "okay",
+ },
+ {
+ category = "mkiv",
+ filename = "meta-imp-pre",
+ loading = "on demand",
+ status = "okay",
+ },
+ {
+ category = "mkiv",
+ filename = "meta-imp-txt",
+ loading = "on demand",
+ status = "okay",
+ },
+ {
+ category = "mkiv",
+ filename = "symb-imp-cow",
+ loading = "on demand",
+ status = "okay",
+ },
+ {
+ category = "mkiv",
+ filename = "symb-imp-eur",
+ loading = "on demand",
+ status = "okay",
+ },
+ {
+ category = "mkiv",
+ filename = "symb-imp-jmn",
+ loading = "on demand",
+ status = "okay",
+ },
+ {
+ category = "mkiv",
+ filename = "symb-imp-mis",
+ loading = "on demand",
+ status = "okay",
+ },
+ {
+ category = "mkiv",
+ filename = "symb-imp-mvs",
+ loading = "on demand",
+ status = "okay",
+ },
+ {
+ category = "mkiv",
+ filename = "symb-imp-nav",
+ loading = "on demand",
+ status = "okay",
+ },
+ {
+ category = "mkiv",
+ filename = "type-imp-antykwa",
+ loading = "on demand",
+ status = "okay",
+ },
+ {
+ category = "mkiv",
+ filename = "type-imp-antykwapoltawskiego",
+ loading = "on demand",
+ status = "okay",
+ },
+ {
+ category = "mkiv",
+ filename = "type-imp-asana",
+ loading = "on demand",
+ status = "okay",
+ },
+ {
+ category = "mkiv",
+ filename = "type-imp-averia",
+ loading = "on demand",
+ status = "okay",
+ },
+ {
+ category = "mkiv",
+ filename = "type-imp-buy",
+ loading = "on demand",
+ status = "okay",
+ },
+ {
+ category = "mkiv",
+ filename = "type-imp-cambria",
+ loading = "on demand",
+ status = "okay",
+ },
+ {
+ category = "mkiv",
+ filename = "type-imp-charter",
+ loading = "on demand",
+ status = "okay",
+ },
+ {
+ category = "mkiv",
+ filename = "type-imp-cleartype",
+ loading = "on demand",
+ status = "okay",
+ },
+ {
+ category = "mkiv",
+ filename = "type-imp-computer-modern-unicode",
+ loading = "on demand",
+ status = "okay",
+ },
+ {
+ category = "mkiv",
+ filename = "type-imp-cow",
+ loading = "on demand",
+ status = "okay",
+ },
+ {
+ category = "mkiv",
+ filename = "type-imp-dejavu",
+ loading = "on demand",
+ status = "okay",
+ },
+ {
+ category = "mkiv",
+ filename = "type-imp-euler",
+ loading = "on demand",
+ status = "okay",
+ },
+ {
+ category = "mkiv",
+ filename = "type-imp-ghz",
+ loading = "on demand",
+ status = "okay",
+ },
+ {
+ category = "mkiv",
+ filename = "type-imp-hgz",
+ loading = "on demand",
+ status = "okay",
+ },
+ {
+ category = "mkiv",
+ filename = "type-imp-husayni",
+ loading = "on demand",
+ status = "okay",
+ },
+ {
+ category = "mkiv",
+ filename = "type-imp-hvmath",
+ loading = "on demand",
+ status = "okay",
+ },
+ {
+ category = "mkiv",
+ filename = "type-imp-inconsolata",
+ loading = "on demand",
+ status = "okay",
+ },
+ {
+ category = "mkiv",
+ filename = "type-imp-informal",
+ loading = "on demand",
+ status = "okay",
+ },
+ {
+ category = "mkiv",
+ filename = "type-imp-iwona",
+ loading = "on demand",
+ status = "okay",
+ },
+ {
+ category = "mkiv",
+ filename = "type-imp-kurier",
+ loading = "on demand",
+ status = "okay",
+ },
+ {
+ category = "mkiv",
+ filename = "type-imp-latinmodern",
+ loading = "on demand",
+ status = "okay",
+ },
+ {
+ category = "mkiv",
+ filename = "type-imp-liberation",
+ loading = "on demand",
+ status = "okay",
+ },
+ {
+ category = "mkiv",
+ filename = "type-imp-libertine",
+ loading = "on demand",
+ status = "okay",
+ },
+ {
+ category = "mkiv",
+ filename = "type-imp-lmnames",
+ loading = "on demand",
+ status = "okay",
+ },
+ {
+ category = "mkiv",
+ filename = "type-imp-lucida-opentype",
+ loading = "on demand",
+ status = "okay",
+ },
+ {
+ category = "mkiv",
+ filename = "type-imp-lucida-typeone",
+ loading = "on demand",
+ status = "okay",
+ },
+ {
+ category = "mkiv",
+ filename = "type-imp-mathdesign",
+ loading = "on demand",
+ status = "okay",
+ },
+ {
+ category = "mkiv",
+ filename = "type-imp-mathtimes",
+ loading = "on demand",
+ status = "okay",
+ },
+ {
+ category = "mkiv",
+ filename = "type-imp-mscore",
+ loading = "on demand",
+ status = "okay",
+ },
+ {
+ category = "mkiv",
+ filename = "type-imp-osx",
+ loading = "on demand",
+ status = "okay",
+ },
+ {
+ category = "mkiv",
+ filename = "type-imp-postscript",
+ loading = "on demand",
+ status = "okay",
+ },
+ {
+ category = "mkiv",
+ filename = "type-imp-punknova",
+ loading = "on demand",
+ status = "okay",
+ },
+ {
+ category = "mkiv",
+ filename = "type-imp-texgyre",
+ loading = "on demand",
+ status = "okay",
+ },
+ {
+ category = "mkiv",
+ filename = "type-imp-unfonts",
+ loading = "on demand",
+ status = "okay",
+ },
+ {
+ category = "mkiv",
+ filename = "type-imp-xits",
+ loading = "on demand",
+ status = "okay",
+ },
+ {
+ category = "mkiv",
+ filename = "type-imp-xitsbidi",
+ loading = "on demand",
+ status = "okay",
+ },
+ },
+ lua = {
+ {
+ category = "lua",
+ filename = "anch-pgr",
+ loading = "anch-pgr",
+ status = "okay",
+ },
+ {
+ category = "lua",
+ filename = "anch-pos",
+ loading = "anch-pos",
+ status = "okay",
+ },
+ {
+ category = "lua",
+ filename = "attr-col",
+ loading = "attr-col",
+ status = "okay",
+ },
+ {
+ category = "lua",
+ filename = "attr-eff",
+ loading = "attr-eff",
+ status = "okay",
+ },
+ {
+ category = "lua",
+ filename = "attr-ini",
+ loading = "attr-ini",
+ status = "okay",
+ },
+ {
+ category = "lua",
+ filename = "attr-lay",
+ loading = "attr-lay",
+ status = "okay",
+ },
+ {
+ category = "lua",
+ filename = "attr-neg",
+ loading = "attr-neg",
+ status = "okay",
+ },
+ {
+ category = "lua",
+ filename = "attr-mkr",
+ loading = "attr-mkr",
+ status = "okay",
+ },
+ {
+ category = "lua",
+ comment = "experimental code, maybe some will move elsewhere",
+ filename = "back-exp",
+ loading = "back-exp",
+ status = "pending",
+ },
+ {
+ category = "lua",
+ filename = "back-ini",
+ loading = "back-ini",
+ status = "okay",
+ },
+ {
+ category = "lua",
+ filename = "back-pdf",
+ loading = "back-pdf",
+ status = "okay",
+ },
+ {
+ category = "lua",
+ filename = "bibl-bib",
+ status = "todo",
+ },
+ {
+ category = "lua",
+ filename = "bibl-tra",
+ status = "todo",
+ },
+ {
+ category = "lua",
+ filename = "blob-ini",
+ status = "todo",
+ },
+ {
+ category = "lua",
+ filename = "buff-imp-default",
+ loading = "buff-imp-default",
+ status = "okay",
+ },
+ {
+ category = "lua",
+ filename = "buff-imp-escaped",
+ loading = "buff-imp-escaped",
+ status = "okay",
+ },
+ {
+ category = "lua",
+ comment = "todo: colorization and nesting as in scite",
+ filename = "buff-imp-lua",
+ loading = "buff-imp-lua",
+ status = "okay",
+ },
+ {
+ category = "lua",
+ comment = "todo: colorization and nesting as in scite",
+ filename = "buff-imp-mp",
+ loading = "buff-imp-mp",
+ status = "okay",
+ },
+ {
+ category = "lua",
+ filename = "buff-imp-nested",
+ loading = "buff-imp-nested",
+ status = "okay",
+ },
+ {
+ category = "lua",
+ filename = "buff-imp-parsed-xml",
+ loading = "buff-imp-parsed-xml",
+ status = "okay",
+ },
+ {
+ category = "lua",
+ comment = "todo: colorization and nesting as in scite",
+ filename = "buff-imp-tex",
+ loading = "buff-imp-tex",
+ status = "okay",
+ },
+ {
+ category = "lua",
+ comment = "todo: colorization and nesting as in scite",
+ filename = "buff-imp-xml",
+ loading = "buff-imp-xml",
+ status = "okay",
+ },
+ {
+ category = "lua",
+ filename = "buff-ini",
+ status = "okay",
+ },
+ {
+ category = "lua",
+ filename = "buff-par",
+ status = "okay",
+ },
+ {
+ category = "lua",
+ comment = "maybe we will provide a few more (nesting) methods",
+ filename = "buff-ver",
+ status = "okay",
+ },
+ {
+ category = "lua",
+ filename = "catc-ini",
+ loading = "catc-ini",
+ status = "okay",
+ },
+ {
+ category = "lua",
+ filename = "char-cjk",
+ loading = "char-ini",
+ status = "okay",
+ },
+ {
+ category = "lua",
+ filename = "char-def",
+ loading = "char-ini",
+ status = "okay",
+ },
+ {
+ category = "lua",
+ comment = "maybe dataonly",
+ filename = "char-enc",
+ loading = "char-enc",
+ status = "okay",
+ },
+ {
+ category = "lua",
+ filename = "char-ent",
+ loading = "char-ent",
+ status = "okay",
+ },
+ {
+ category = "lua",
+ comment = "maybe move blocks table to separate (dataonly) file",
+ filename = "char-ini",
+ loading = "char-ini",
+ status = "okay",
+ },
+ {
+ category = "lua",
+ filename = "char-map",
+ loading = "char-ini",
+ status = "okay",
+ },
+ {
+ category = "lua",
+ filename = "char-tex",
+ loading = "char-ini",
+ status = "okay",
+ },
+ {
+ category = "lua",
+ filename = "char-utf",
+ loading = "char-utf",
+ status = "okay",
+ },
+ {
+ category = "lua",
+ filename = "chem-ini",
+ loading = "chem-ini",
+ status = "okay",
+ },
+ {
+ category = "lua",
+ filename = "chem-str",
+ loading = "chem-str",
+ status = "okay",
+ },
+ {
+ category = "lua",
+ comment = "will be extended and can be optimized if needed",
+ filename = "cldf-bas",
+ loading = "cldf-bas",
+ status = "okay",
+ },
+ {
+ category = "lua",
+ comment = "might change or even go away",
+ filename = "cldf-com",
+ loading = "cldf-com",
+ status = "okay",
+ },
+ {
+ category = "lua",
+ filename = "cldf-ini",
+ loading = "cldf-ini",
+ status = "okay",
+ },
+ {
+ category = "lua",
+ filename = "cldf-int",
+ loading = "cldf-int",
+ status = "pending",
+ },
+ {
+ category = "lua",
+ comment = "maybe this code can be redone more efficiently/robust",
+ filename = "cldf-ver",
+ loading = "cldf-ver",
+ status = "pending",
+ },
+ {
+ category = "lua",
+ comment = "also used in mtx-*",
+ filename = "colo-icc",
+ loading = "colo-ini",
+ status = "okay",
+ },
+ {
+ category = "lua",
+ filename = "colo-ini",
+ loading = "colo-ini",
+ status = "okay",
+ },
+ {
+ category = "lua",
+ comment = "this code might move to a module",
+ filename = "colo-run",
+ loading = "on demand",
+ status = "okay",
+ },
+ {
+ category = "lua",
+ filename = "core-con",
+ loading = "core-con",
+ status = "okay",
+ },
+ {
+ category = "lua",
+ filename = "core-ctx",
+ loading = "core-ctx",
+ status = "okay",
+ },
+ {
+ category = "lua",
+ filename = "core-dat",
+ loading = "core-dat",
+ status = "okay",
+ },
+ {
+ category = "lua",
+ comment = "maybe abusing the tex namespace is wrong",
+ filename = "core-env",
+ loading = "core-env",
+ status = "okay",
+ },
+ {
+ category = "lua",
+ filename = "core-sys",
+ loading = "core-sys",
+ status = "okay",
+ },
+ {
+ category = "lua",
+ commands = "this is in fact replaced by core-dat",
+ filename = "core-two",
+ loading = "core-two",
+ status = "okay",
+ },
+ {
+ category = "lua",
+ comment = "some code will move to better places",
+ filename = "core-uti",
+ loading = "core-uti",
+ status = "okay",
+ },
+ {
+ category = "lua",
+ filename = "data-aux",
+ loading = "luat-lib",
+ status = "todo",
+ },
+ {
+ category = "lua",
+ filename = "data-bin",
+ loading = "luat-lib",
+ status = "todo",
+ },
+ {
+ category = "lua",
+ filename = "data-con",
+ loading = "luat-lib",
+ status = "todo",
+ },
+ {
+ category = "lua",
+ filename = "data-crl",
+ loading = "never",
+ status = "todo",
+ },
+ {
+ category = "lua",
+ filename = "data-ctx",
+ loading = "luat-lib",
+ status = "todo",
+ },
+ {
+ category = "lua",
+ filename = "data-env",
+ loading = "luat-lib",
+ status = "todo",
+ },
+ {
+ category = "lua",
+ filename = "data-exp",
+ loading = "luat-lib",
+ status = "todo",
+ },
+ {
+ category = "lua",
+ filename = "data-fil",
+ loading = "luat-lib",
+ status = "todo",
+ },
+ {
+ category = "lua",
+ filename = "data-gen",
+ status = "todo",
+ },
+ {
+ category = "lua",
+ filename = "data-ini",
+ loading = "luat-lib",
+ status = "todo",
+ },
+ {
+ category = "lua",
+ filename = "data-inp",
+ loading = "luat-lib",
+ status = "todo",
+ },
+ {
+ category = "lua",
+ filename = "data-lst",
+ status = "todo",
+ },
+ {
+ category = "lua",
+ filename = "data-lua",
+ loading = "luat-lib",
+ status = "todo",
+ },
+ {
+ category = "lua",
+ filename = "data-met",
+ loading = "luat-lib",
+ status = "todo",
+ },
+ {
+ category = "lua",
+ filename = "data-out",
+ loading = "luat-lib",
+ status = "todo",
+ },
+ {
+ category = "lua",
+ filename = "data-pre",
+ loading = "luat-lib",
+ status = "todo",
+ },
+ {
+ category = "lua",
+ filename = "data-res",
+ loading = "luat-lib",
+ status = "todo",
+ },
+ {
+ category = "lua",
+ filename = "data-sch",
+ loading = "luat-lib",
+ status = "todo",
+ },
+ {
+ category = "lua",
+ filename = "data-tex",
+ loading = "luat-lib",
+ status = "todo",
+ },
+ {
+ category = "lua",
+ filename = "data-tmf",
+ status = "todo",
+ },
+ {
+ category = "lua",
+ filename = "data-tmp",
+ loading = "luat-lib",
+ status = "todo",
+ },
+ {
+ category = "lua",
+ filename = "data-tre",
+ loading = "luat-lib",
+ status = "todo",
+ },
+ {
+ category = "lua",
+ filename = "data-use",
+ loading = "luat-lib",
+ status = "todo",
+ },
+ {
+ category = "lua",
+ filename = "data-vir",
+ loading = "luat-lib",
+ status = "todo",
+ },
+ {
+ category = "lua",
+ filename = "data-zip",
+ loading = "luat-lib",
+ status = "todo",
+ },
+ {
+ category = "lua",
+ filename = "file-ini",
+ loading = "file-ini",
+ status = "okay",
+ },
+ {
+ category = "lua",
+ filename = "file-job",
+ loading = "file-job",
+ status = "okay",
+ },
+ {
+ category = "lua",
+ filename = "file-lib",
+ loading = "file-lib",
+ status = "okay",
+ },
+ {
+ category = "lua",
+ filename = "file-mod",
+ loading = "file-mod",
+ status = "okay",
+ },
+ {
+ category = "lua",
+ filename = "file-res",
+ loading = "file-res",
+ status = "okay",
+ },
+ {
+ category = "lua",
+ filename = "file-syn",
+ loading = "file-syn",
+ status = "okay",
+ },
+ {
+ category = "lua",
+ filename = "font-afm",
+ loading = "font-lib",
+ status = "okay",
+ },
+ {
+ category = "lua",
+ filename = "font-afk",
+ loading = "font-lib",
+ status = "okay",
+ },
+ {
+ category = "lua",
+ comment = "only used in luatex-fonts",
+ filename = "font-age",
+ loading = "never",
+ status = "okay",
+ },
+ {
+ category = "lua",
+ filename = "font-agl",
+ loading = "on demand",
+ status = "okay",
+ },
+ {
+ category = "lua",
+ comment = "needs some documentation in usage",
+ filename = "font-aux",
+ loading = "font-lib",
+ status = "okay",
+ },
+ {
+ category = "lua",
+ comment = "move more to the commands namespace",
+ filename = "font-chk",
+ loading = "font-chk",
+ status = "okay",
+ },
+ {
+ category = "lua",
+ filename = "font-cid",
+ loading = "font-lib",
+ status = "okay",
+ },
+ {
+ category = "lua",
+ filename = "font-col",
+ loading = "font-col",
+ status = "okay",
+ },
+ {
+ category = "lua",
+ filename = "font-con",
+ loading = "font-lib",
+ status = "okay",
+ },
+ {
+ category = "lua",
+ comment = "will be improved over time",
+ filename = "font-ctx",
+ loading = "font-lib",
+ status = "okay",
+ },
+ {
+ category = "lua",
+ filename = "font-def",
+ loading = "font-lib",
+ status = "okay",
+ },
+ {
+ category = "lua",
+ comment = "part of this code is obsolete",
+ filename = "font-enc",
+ loading = "font-lib",
+ status = "okay",
+ },
+ {
+ category = "lua",
+ comment = "needs documentation at the tex end",
+ filename = "font-enh",
+ loading = "font-lib",
+ status = "okay",
+ },
+ {
+ category = "lua",
+ comment = "maybe some data tables can be be external",
+ filename = "font-ext",
+ loading = "font-lib",
+ status = "okay",
+ },
+ {
+ category = "lua",
+ comment = "okay but can be improved",
+ filename = "font-fbk",
+ loading = "font-lib",
+ status = "pending",
+ },
+ {
+ category = "lua",
+ filename = "font-gds",
+ loading = "font-lib",
+ status = "okay",
+ },
+ {
+ category = "mkvi",
+ filename = "font-hsh",
+ loading = "always",
+ status = "okay",
+ },
+ {
+ category = "lua",
+ filename = "font-ini",
+ loading = "font-lib",
+ status = "okay",
+ },
+ {
+ category = "lua",
+ filename = "font-ldr",
+ loading = "on demand",
+ status = "okay",
+ },
+ {
+ category = "lua",
+ filename = "font-log",
+ loading = "font-lib",
+ status = "okay",
+ },
+ {
+ category = "lua",
+ comment = "currently rather minimalistic",
+ filename = "font-lua",
+ loading = "font-lib",
+ status = "okay",
+ },
+ {
+ category = "lua",
+ comment = "the lum file support will be dropped / no map files anyway",
+ filename = "font-map",
+ loading = "font-lib",
+ status = "okay",
+ },
+ {
+ category = "lua",
+ filename = "font-mis",
+ loading = "on demand",
+ status = "okay",
+ },
+ {
+ category = "mkvi",
+ filename = "font-nod",
+ loading = "always",
+ status = "okay",
+ },
+ {
+ category = "lua",
+ comment = "when more scripts are supported we might end up with imp files",
+ filename = "font-ota",
+ loading = "font-lib",
+ status = "okay",
+ },
+ {
+ category = "lua",
+ filename = "font-otb",
+ loading = "font-lib",
+ status = "okay",
+ },
+ {
+ category = "lua",
+ filename = "font-otc",
+ loading = "font-lib",
+ status = "okay",
+ },
+ {
+ category = "lua",
+ filename = "font-otd",
+ loading = "font-lib",
+ status = "okay",
+ },
+ {
+ category = "lua",
+ filename = "font-otf",
+ loading = "font-lib",
+ status = "okay",
+ },
+ {
+ category = "lua",
+ filename = "font-oth",
+ loading = "font-lib",
+ status = "okay",
+ },
+ {
+ category = "lua",
+ filename = "font-oti",
+ loading = "font-lib",
+ status = "okay",
+ },
+ {
+ category = "lua",
+ filename = "font-otn",
+ loading = "font-lib",
+ status = "okay",
+ },
+ {
+ category = "lua",
+ filename = "font-otp",
+ loading = "font-lib",
+ status = "okay",
+ },
+ {
+ category = "lua",
+ filename = "font-ott",
+ loading = "font-lib",
+ status = "okay",
+ },
+ {
+ category = "lua",
+ comment = "is mostly replaced by lfg files",
+ filename = "font-pat",
+ loading = "font-lib",
+ status = "okay",
+ },
+ {
+ category = "lua",
+ filename = "font-sol",
+ loading = "font-sol",
+ status = "okay",
+ },
+ {
+ category = "lua",
+ comment = "also loaded on demand",
+ filename = "font-syn",
+ loading = "font-lib",
+ status = "okay",
+ },
+ {
+ category = "lua",
+ filename = "font-tfm",
+ loading = "font-lib",
+ status = "okay",
+ },
+ {
+ category = "lua",
+ filename = "font-vf",
+ loading = "font-lib",
+ status = "pending",
+ },
+ {
+ category = "lua",
+ filename = "grph-epd",
+ loading = "grph-epd",
+ status = "okay",
+ },
+ {
+ category = "lua",
+ filename = "grph-fil",
+ loading = "grph-inc",
+ status = "okay",
+ },
+ {
+ category = "lua",
+ filename = "grph-inc",
+ loading = "grph-inc",
+ status = "todo",
+ },
+ {
+ category = "lua",
+ filename = "grph-raw",
+ loading = "grph-raw",
+ status = "okay",
+ },
+ {
+ category = "lua",
+ filename = "grph-swf",
+ loading = "grph-swf",
+ status = "okay",
+ },
+ {
+ category = "lua",
+ filename = "grph-u3d",
+ loading = "grph-u3d",
+ status = "okay",
+ },
+ {
+ category = "lua",
+ comment = "experiment with graphic magick library",
+ filename = "grph-wnd",
+ loading = "on demand",
+ status = "okay",
+ },
+ {
+ category = "lua",
+ filename = "java-ini",
+ loading = "java-ini",
+ status = "okay",
+ },
+ {
+ category = "lua",
+ filename = "l-boolean",
+ status = "todo",
+ },
+ {
+ category = "lua",
+ filename = "l-dir",
+ status = "todo",
+ },
+ {
+ category = "lua",
+ filename = "l-file",
+ status = "todo",
+ },
+ {
+ category = "lua",
+ filename = "l-function",
+ status = "todo",
+ },
+ {
+ category = "lua",
+ filename = "l-io",
+ status = "todo",
+ },
+ {
+ category = "lua",
+ filename = "l-lpeg",
+ status = "todo",
+ },
+ {
+ category = "lua",
+ filename = "l-math",
+ status = "todo",
+ },
+ {
+ category = "lua",
+ filename = "l-md5",
+ status = "todo",
+ },
+ {
+ category = "lua",
+ filename = "l-number",
+ status = "todo",
+ },
+ {
+ category = "lua",
+ filename = "l-os",
+ status = "todo",
+ },
+ {
+ category = "lua",
+ filename = "l-pdfview",
+ status = "todo",
+ },
+ {
+ category = "lua",
+ filename = "l-set",
+ status = "todo",
+ },
+ {
+ category = "lua",
+ filename = "l-string",
+ status = "todo",
+ },
+ {
+ category = "lua",
+ filename = "l-table",
+ status = "todo",
+ },
+ {
+ category = "lua",
+ filename = "l-unicode",
+ status = "todo",
+ },
+ {
+ category = "lua",
+ filename = "l-url",
+ status = "todo",
+ },
+ {
+ category = "lua",
+ filename = "l-xml",
+ status = "todo",
+ },
+ {
+ category = "lua",
+ filename = "lang-def",
+ loading = "lang-def",
+ status = "okay",
+ },
+ {
+ category = "lua",
+ filename = "lang-ini",
+ loading = "lang-ini",
+ status = "okay",
+ },
+ {
+ category = "lua",
+ filename = "lang-lab",
+ loading = "lang-lab",
+ status = "okay",
+ },
+ {
+ category = "lua",
+ filename = "lang-txt",
+ loading = "lang-lab",
+ status = "okay",
+ },
+ {
+ category = "lua",
+ comment = "maybe another approach is nicer",
+ filename = "lang-url",
+ loading = "lang-url",
+ status = "pending",
+ },
+ {
+ category = "lua",
+ filename = "lang-wrd",
+ loading = "lang-wrd",
+ status = "okay",
+ },
+ {
+ category = "lua",
+ comment = "more will end up here",
+ filename = "layo-ini",
+ loading = "layo-ini",
+ status = "okay",
+ },
+ {
+ category = "lua",
+ filename = "lpdf-ano",
+ status = "todo",
+ },
+ {
+ category = "lua",
+ filename = "lpdf-col",
+ status = "todo",
+ },
+ {
+ category = "lua",
+ filename = "lpdf-enc",
+ status = "todo",
+ },
+ {
+ category = "lua",
+ filename = "lpdf-epa",
+ status = "todo",
+ },
+ {
+ category = "lua",
+ filename = "lpdf-epd",
+ status = "todo",
+ },
+ {
+ category = "lua",
+ filename = "lpdf-fld",
+ status = "todo",
+ },
+ {
+ category = "lua",
+ filename = "lpdf-fmt",
+ status = "todo",
+ },
+ {
+ category = "lua",
+ filename = "lpdf-grp",
+ status = "todo",
+ },
+ {
+ category = "lua",
+ filename = "lpdf-ini",
+ status = "todo",
+ },
+ {
+ category = "lua",
+ filename = "lpdf-mis",
+ status = "todo",
+ },
+ {
+ category = "lua",
+ filename = "lpdf-mov",
+ status = "todo",
+ },
+ {
+ category = "lua",
+ filename = "lpdf-nod",
+ status = "todo",
+ },
+ {
+ category = "lua",
+ filename = "lpdf-ren",
+ status = "todo",
+ },
+ {
+ category = "lua",
+ filename = "lpdf-swf",
+ status = "todo",
+ },
+ {
+ category = "lua",
+ filename = "lpdf-tag",
+ status = "todo",
+ },
+ {
+ category = "lua",
+ filename = "lpdf-u3d",
+ status = "todo",
+ },
+ {
+ category = "lua",
+ filename = "lpdf-wid",
+ status = "todo",
+ },
+ {
+ category = "lua",
+ filename = "lpdf-xmp",
+ status = "todo",
+ },
+ {
+ category = "lua",
+ comment = "replacement code for wd/ht/dp",
+ filename = "luat-bwc",
+ loading = "luat-lib",
+ status = "okay",
+ },
+ {
+ category = "lua",
+ filename = "luat-cbk",
+ loading = "luat-lib",
+ status = "okay",
+ },
+ {
+ category = "lua",
+ filename = "luat-cnf",
+ loading = "luat-lib",
+ status = "okay",
+ },
+ {
+ category = "lua",
+ comment = "maybe some code should move",
+ filename = "luat-cod",
+ loading = "luat-cod",
+ status = "okay",
+ },
+ {
+ category = "lua",
+ filename = "luat-env",
+ loading = "luat-lib",
+ status = "okay",
+ },
+ {
+ category = "lua",
+ filename = "luat-exe",
+ loading = "luat-lib",
+ status = "okay",
+ },
+ {
+ category = "lua",
+ filename = "luat-fio",
+ loading = "luat-lib",
+ status = "okay",
+ },
+ {
+ category = "lua",
+ filename = "luat-fmt",
+ loading = "on demand",
+ status = "okay",
+ },
+ {
+ category = "lua",
+ comment = "will be upgraded when we have Lua 5.2",
+ filename = "luat-ini",
+ loading = "luat-lib",
+ status = "pending",
+ },
+ {
+ category = "lua",
+ comment = "will be upgraded when we have Lua 5.2",
+ filename = "util-env",
+ loading = "luat-lib",
+ status = "pending",
+ },
+ {
+ category = "lua",
+ filename = "luat-iop",
+ loading = "luat-lib",
+ status = "okay",
+ },
+ {
+ category = "lua",
+ comment = "this is likely to change some day",
+ filename = "luat-lua",
+ loading = "luat-lib",
+ status = "okay",
+ },
+ {
+ category = "lua",
+ filename = "luat-mac",
+ loading = "luat-lib",
+ status = "okay",
+ },
+ {
+ category = "lua",
+ filename = "luat-run",
+ loading = "luat-lib",
+ status = "okay",
+ },
+ {
+ category = "lua",
+ comment = "related to the socket code",
+ filename = "luat-soc",
+ loading = "on demand",
+ status = "pending",
+ },
+ {
+ category = "lua",
+ filename = "luat-sta",
+ loading = "on demand",
+ status = "okay",
+ },
+ {
+ category = "lua",
+ filename = "luat-sto",
+ loading = "luat-lib",
+ status = "okay",
+ },
+ {
+ category = "lua",
+ filename = "lxml-aux",
+ loading = "luat-lib",
+ status = "todo",
+ },
+ {
+ category = "lua",
+ filename = "lxml-css",
+ status = "todo",
+ },
+ {
+ category = "lua",
+ filename = "lxml-ctx",
+ status = "todo",
+ },
+ {
+ category = "lua",
+ filename = "lxml-dir",
+ status = "todo",
+ },
+ {
+ category = "lua",
+ filename = "lxml-ent",
+ status = "todo",
+ },
+ {
+ category = "lua",
+ filename = "lxml-inf",
+ status = "todo",
+ },
+ {
+ category = "lua",
+ filename = "lxml-lpt",
+ loading = "luat-lib",
+ status = "todo",
+ },
+ {
+ category = "lua",
+ filename = "lxml-mis",
+ loading = "luat-lib",
+ status = "todo",
+ },
+ {
+ category = "lua",
+ filename = "lxml-sor",
+ status = "todo",
+ },
+ {
+ category = "lua",
+ filename = "lxml-tab",
+ loading = "luat-lib",
+ status = "todo",
+ },
+ {
+ category = "lua",
+ filename = "lxml-tex",
+ status = "todo",
+ },
+ {
+ category = "lua",
+ filename = "lxml-xml",
+ loading = "luat-lib",
+ status = "todo",
+ },
+ {
+ category = "lua",
+ filename = "m-chart",
+ status = "todo",
+ },
+ {
+ category = "lua",
+ filename = "m-database",
+ status = "todo",
+ },
+ {
+ category = "lua",
+ filename = "m-markdown",
+ status = "todo",
+ },
+ {
+ category = "lua",
+ filename = "m-pstricks",
+ status = "todo",
+ },
+ {
+ category = "lua",
+ filename = "m-spreadsheet",
+ status = "todo",
+ },
+ {
+ category = "lua",
+ filename = "m-steps",
+ status = "todo",
+ },
+ {
+ category = "lua",
+ filename = "math-act",
+ loading = "math-ini",
+ status = "okay",
+ },
+ {
+ category = "lua",
+ comment = "could be made look nicer, but who cares",
+ filename = "math-dim",
+ loading = "math-ini",
+ status = "okay",
+ },
+ {
+ category = "lua",
+ comment = "the code is related to math-vfu",
+ filename = "math-ext",
+ loading = "math-ini",
+ status = "okay",
+ },
+ {
+ category = "lua",
+ filename = "math-fbk",
+ loading = "math-ini",
+ status = "okay",
+ },
+ {
+ category = "lua",
+ comment = "okay, but we might have a few more low level definers some day",
+ filename = "math-ini",
+ loading = "math-ini",
+ status = "pending",
+ },
+ {
+ category = "lua",
+ filename = "math-map",
+ loading = "math-ini",
+ status = "okay",
+ },
+ {
+ category = "lua",
+ filename = "math-noa",
+ loading = "math-ini",
+ status = "okay",
+ },
+ {
+ category = "lua",
+ filename = "math-ren",
+ loading = "math-ini",
+ status = "okay",
+ },
+ {
+ category = "lua",
+ filename = "math-tag",
+ loading = "math-ini",
+ status = "okay",
+ },
+ {
+ category = "lua",
+ filename = "math-ttv",
+ loading = "math-ini",
+ status = "okay",
+ },
+ {
+ category = "lua",
+ filename = "math-vfu",
+ loading = "math-ini",
+ status = "okay",
+ },
+ {
+ category = "lua",
+ comment = "this is just a first version",
+ filename = "meta-fun",
+ loading = "meta-fun",
+ status = "okay",
+ },
+ {
+ category = "lua",
+ filename = "meta-ini",
+ loading = "meta-ini",
+ status = "okay",
+ },
+ {
+ category = "lua",
+ filename = "meta-lua",
+ loading = "meta-lua",
+ status = "okay",
+ },
+ {
+ category = "lua",
+ comment = "could be done nicer nowadays but who needs it",
+ filename = "meta-pdf",
+ loading = "meta-pdf",
+ status = "okay",
+ },
+ {
+ category = "lua",
+ filename = "meta-pdh",
+ loading = "meta-pdh",
+ status = "okay",
+ loading = "never",
+ comment = "this is historic code that we keep around",
+ },
+ {
+ category = "lua",
+ filename = "meta-tex",
+ loading = "meta-tex",
+ status = "okay",
+ },
+ {
+ category = "lua",
+ filename = "mlib-ctx",
+ loading = "mlib-ctx",
+ status = "okay",
+ },
+ {
+ category = "lua",
+ filename = "mlib-pdf",
+ loading = "mlib-pdf",
+ status = "okay",
+ },
+ {
+ category = "lua",
+ filename = "mlib-pps",
+ loading = "mlib-pdf",
+ status = "okay",
+ },
+ {
+ category = "lua",
+ filename = "mlib-run",
+ loading = "mlib-ctx",
+ status = "okay",
+ },
+ {
+ category = "lua",
+ comment = "this is an experiment, namespaces need to be dealt with properly",
+ filename = "mult-aux",
+ loading = "mult-aux",
+ status = "pending",
+ },
+ {
+ category = "lua",
+ comment = "this is an experiment",
+ filename = "mult-chk",
+ loading = "mult-chk",
+ status = "pending",
+ },
+ {
+ category = "lua",
+ filename = "mult-def",
+ loading = "mult-ini",
+ status = "okay",
+ },
+ {
+ category = "lua",
+ comment = "used for generating editor lexing files",
+ filename = "mult-fun",
+ loading = "never",
+ status = "okay",
+ },
+ {
+ category = "lua",
+ filename = "mult-ini",
+ loading = "mult-ini",
+ status = "okay",
+ },
+ {
+ category = "lua",
+ comment = "used for generating editor lexing files",
+ filename = "mult-low",
+ loading = "never",
+ status = "okay",
+ },
+ {
+ category = "lua",
+ comment = "all messages need to be checked",
+ filename = "mult-mes",
+ loading = "mult-ini",
+ status = "pending",
+ },
+ {
+ category = "lua",
+ comment = "used for generating editor lexing files",
+ filename = "mult-mps",
+ loading = "never",
+ status = "okay",
+ },
+ {
+ category = "lua",
+ comment = "used for generating editor lexing files",
+ filename = "mult-prm",
+ loading = "never",
+ status = "okay",
+ },
+ {
+ category = "lua",
+ filename = "node-acc",
+ status = "todo",
+ },
+ {
+ category = "lua",
+ filename = "node-aux",
+ status = "todo",
+ },
+ {
+ category = "lua",
+ filename = "node-bck",
+ status = "todo",
+ },
+ {
+ category = "lua",
+ filename = "node-dir",
+ status = "todo",
+ },
+ {
+ category = "lua",
+ filename = "node-ext",
+ status = "todo",
+ },
+ {
+ category = "lua",
+ filename = "node-fin",
+ status = "todo",
+ },
+ {
+ category = "lua",
+ filename = "node-fnt",
+ loading = "font-lib",
+ status = "todo",
+ },
+ {
+ category = "lua",
+ filename = "node-ini",
+ status = "todo",
+ },
+ {
+ category = "lua",
+ filename = "node-inj",
+ loading = "font-lib",
+ status = "todo",
+ },
+ {
+ category = "lua",
+ filename = "node-mig",
+ status = "todo",
+ },
+ {
+ category = "lua",
+ filename = "node-pag",
+ status = "todo",
+ },
+ {
+ category = "lua",
+ filename = "node-pro",
+ status = "todo",
+ },
+ {
+ category = "lua",
+ filename = "node-ref",
+ status = "todo",
+ },
+ {
+ category = "lua",
+ filename = "node-res",
+ status = "todo",
+ },
+ {
+ category = "lua",
+ filename = "node-rul",
+ status = "todo",
+ },
+ {
+ category = "lua",
+ filename = "node-ser",
+ status = "todo",
+ },
+ {
+ category = "lua",
+ filename = "node-shp",
+ status = "todo",
+ },
+ {
+ category = "lua",
+ filename = "node-tex",
+ status = "todo",
+ },
+ {
+ category = "lua",
+ filename = "node-tra",
+ status = "todo",
+ },
+ {
+ category = "lua",
+ filename = "node-snp",
+ status = "todo",
+ },
+ {
+ category = "lua",
+ filename = "node-tsk",
+ status = "todo",
+ },
+ {
+ category = "lua",
+ filename = "node-tst",
+ status = "todo",
+ },
+ {
+ category = "lua",
+ filename = "node-typ",
+ status = "todo",
+ },
+ {
+ category = "lua",
+ comment = "will be extended when we have opened up pdf objects",
+ filename = "pack-obj",
+ loading = "pack-obj",
+ status = "okay",
+ },
+ {
+ category = "lua",
+ filename = "pack-rul",
+ loading = "pack-rul",
+ status = "okay",
+ },
+ {
+ category = "lua",
+ filename = "page-flt",
+ status = "todo",
+ },
+ {
+ category = "lua",
+ filename = "page-ins",
+ status = "todo",
+ },
+ {
+ category = "lua",
+ filename = "page-lin",
+ status = "todo",
+ },
+ {
+ category = "lua",
+ filename = "page-mix",
+ status = "todo",
+ },
+ {
+ category = "lua",
+ filename = "page-pst",
+ status = "todo",
+ },
+ {
+ category = "lua",
+ filename = "page-str",
+ status = "todo",
+ },
+ {
+ category = "lua",
+ filename = "phys-dim",
+ loading = "phys-dim",
+ status = "okay",
+ },
+ {
+ category = "lua",
+ filename = "regi-8859-1",
+ loading = "on demand",
+ status = "okay",
+ },
+ {
+ category = "lua",
+ filename = "regi-8859-10",
+ loading = "on demand",
+ status = "okay",
+ },
+ {
+ category = "lua",
+ filename = "regi-8859-11",
+ loading = "on demand",
+ status = "okay",
+ },
+ {
+ category = "lua",
+ filename = "regi-8859-13",
+ loading = "on demand",
+ status = "okay",
+ },
+ {
+ category = "lua",
+ filename = "regi-8859-14",
+ loading = "on demand",
+ status = "okay",
+ },
+ {
+ category = "lua",
+ filename = "regi-8859-15",
+ loading = "on demand",
+ status = "okay",
+ },
+ {
+ category = "lua",
+ filename = "regi-8859-16",
+ loading = "on demand",
+ status = "okay",
+ },
+ {
+ category = "lua",
+ filename = "regi-8859-2",
+ loading = "on demand",
+ status = "okay",
+ },
+ {
+ category = "lua",
+ filename = "regi-8859-3",
+ loading = "on demand",
+ status = "okay",
+ },
+ {
+ category = "lua",
+ filename = "regi-8859-4",
+ loading = "on demand",
+ status = "okay",
+ },
+ {
+ category = "lua",
+ filename = "regi-8859-5",
+ loading = "on demand",
+ status = "okay",
+ },
+ {
+ category = "lua",
+ filename = "regi-8859-6",
+ loading = "on demand",
+ status = "okay",
+ },
+ {
+ category = "lua",
+ filename = "regi-8859-7",
+ loading = "on demand",
+ status = "okay",
+ },
+ {
+ category = "lua",
+ filename = "regi-8859-8",
+ loading = "on demand",
+ status = "okay",
+ },
+ {
+ category = "lua",
+ filename = "regi-8859-9",
+ loading = "on demand",
+ status = "okay",
+ },
+ {
+ category = "lua",
+ filename = "regi-cp1250",
+ loading = "on demand",
+ status = "okay",
+ },
+ {
+ category = "lua",
+ filename = "regi-cp1251",
+ loading = "on demand",
+ status = "okay",
+ },
+ {
+ category = "lua",
+ filename = "regi-cp1252",
+ loading = "on demand",
+ status = "okay",
+ },
+ {
+ category = "lua",
+ filename = "regi-cp1253",
+ loading = "on demand",
+ status = "okay",
+ },
+ {
+ category = "lua",
+ filename = "regi-cp1254",
+ loading = "on demand",
+ status = "okay",
+ },
+ {
+ category = "lua",
+ filename = "regi-cp1255",
+ loading = "on demand",
+ status = "okay",
+ },
+ {
+ category = "lua",
+ filename = "regi-cp1256",
+ loading = "on demand",
+ status = "okay",
+ },
+ {
+ category = "lua",
+ filename = "regi-cp1257",
+ loading = "on demand",
+ status = "okay",
+ },
+ {
+ category = "lua",
+ filename = "regi-cp1258",
+ loading = "on demand",
+ status = "okay",
+ },
+ {
+ category = "lua",
+ comment = "just a demo file",
+ filename = "regi-demo",
+ loading = "never",
+ status = "okay",
+ },
+ {
+ category = "lua",
+ filename = "regi-ini",
+ loading = "regi-ini",
+ status = "okay",
+ },
+ {
+ category = "lua",
+ filename = "s-fonts-tables",
+ status = "todo",
+ },
+ {
+ category = "lua",
+ filename = "s-pre-71",
+ status = "todo",
+ },
+ {
+ category = "lua",
+ filename = "scrn-but",
+ status = "todo",
+ },
+ {
+ category = "lua",
+ filename = "scrn-fld",
+ status = "todo",
+ },
+ {
+ category = "lua",
+ filename = "scrn-hlp",
+ status = "todo",
+ },
+ {
+ category = "lua",
+ filename = "scrn-ini",
+ status = "todo",
+ },
+ {
+ category = "lua",
+ filename = "scrn-pag",
+ status = "todo",
+ },
+ {
+ category = "lua",
+ filename = "scrn-ref",
+ status = "todo",
+ },
+ {
+ category = "lua",
+ filename = "scrn-wid",
+ status = "todo",
+ },
+ {
+ category = "lua",
+ comment = "we can speed this up",
+ filename = "scrp-cjk",
+ loading = "scrp-ini",
+ status = "okay",
+ },
+ {
+ category = "lua",
+ comment = "we can speed this up",
+ filename = "scrp-eth",
+ loading = "scrp-ini",
+ status = "okay",
+ },
+ {
+ category = "lua",
+ filename = "scrp-ini",
+ loading = "scrp-ini",
+ status = "okay",
+ },
+ {
+ category = "lua",
+ filename = "sort-ini",
+ status = "todo",
+ },
+ {
+ category = "lua",
+ filename = "sort-lan",
+ status = "todo",
+ },
+ {
+ category = "lua",
+ filename = "spac-adj",
+ status = "todo",
+ },
+ {
+ category = "lua",
+ filename = "spac-ali",
+ status = "todo",
+ },
+ {
+ category = "lua",
+ filename = "spac-chr",
+ status = "todo",
+ },
+ {
+ category = "lua",
+ filename = "spac-hor",
+ status = "todo",
+ },
+ {
+ category = "lua",
+ filename = "spac-ver",
+ status = "todo",
+ },
+ {
+ category = "lua",
+ filename = "status-mkiv",
+ status = "todo",
+ },
+ {
+ category = "lua",
+ filename = "strc-bkm",
+ status = "todo",
+ },
+ {
+ category = "lua",
+ filename = "strc-blk",
+ status = "todo",
+ },
+ {
+ category = "lua",
+ filename = "strc-con",
+ status = "todo",
+ },
+ {
+ category = "lua",
+ filename = "strc-doc",
+ status = "todo",
+ },
+ {
+ category = "lua",
+ filename = "strc-flt",
+ status = "todo",
+ },
+ {
+ category = "lua",
+ filename = "strc-ini",
+ status = "todo",
+ },
+ {
+ category = "lua",
+ filename = "strc-itm",
+ status = "todo",
+ },
+ {
+ category = "lua",
+ filename = "strc-lev",
+ status = "todo",
+ },
+ {
+ category = "lua",
+ filename = "strc-lst",
+ status = "todo",
+ },
+ {
+ category = "lua",
+ filename = "strc-mar",
+ status = "todo",
+ },
+ {
+ category = "lua",
+ filename = "strc-mat",
+ status = "todo",
+ },
+ {
+ category = "lua",
+ filename = "strc-not",
+ status = "todo",
+ },
+ {
+ category = "lua",
+ filename = "strc-num",
+ status = "todo",
+ },
+ {
+ category = "lua",
+ filename = "strc-pag",
+ status = "todo",
+ },
+ {
+ category = "lua",
+ filename = "strc-ref",
+ status = "todo",
+ },
+ {
+ category = "lua",
+ filename = "strc-reg",
+ status = "todo",
+ },
+ {
+ category = "lua",
+ filename = "strc-rsc",
+ status = "todo",
+ },
+ {
+ category = "lua",
+ filename = "strc-syn",
+ status = "todo",
+ },
+ {
+ category = "lua",
+ filename = "strc-tag",
+ status = "todo",
+ },
+ {
+ category = "lua",
+ filename = "supp-box",
+ loading = "supp-box",
+ status = "okay",
+ },
+ {
+ category = "lua",
+ filename = "supp-ran",
+ loading = "supp-ran",
+ status = "okay",
+ },
+ {
+ category = "lua",
+ filename = "symb-ini",
+ loading = "symb-ini",
+ status = "okay",
+ },
+ {
+ category = "lua",
+ comment = "there will be more in here",
+ filename = "syst-aux",
+ loading = "syst-aux",
+ status = "okay",
+ },
+ {
+ category = "lua",
+ comment = "do some tests with speedups (sprint)",
+ filename = "syst-con",
+ loading = "syst-con",
+ status = "pending",
+ },
+ {
+ category = "lua",
+ comment = "do some tests with speedups (less tokens)",
+ filename = "syst-lua",
+ loading = "syst-lua",
+ status = "pending",
+ },
+ {
+ category = "lua",
+ filename = "tabl-tbl",
+ loading = "tabl-tbl",
+ status = "okay",
+ },
+ {
+ category = "lua",
+ comment = "work in progress",
+ filename = "tabl-xtb",
+ loading = "tabl-xtb",
+ status = "okay",
+ },
+ {
+ category = "lua",
+ comment = "we need a well defined defintion moment",
+ filename = "task-ini",
+ loading = "task-ini",
+ status = "pending",
+ },
+ {
+ category = "lua",
+ filename = "toks-ini",
+ status = "todo",
+ },
+ {
+ category = "lua",
+ comment = "must be applied in more places",
+ filename = "trac-ctx",
+ loading = "trac-ctx",
+ status = "okay",
+ },
+ {
+ category = "lua",
+ filename = "trac-deb",
+ loading = "trac-deb",
+ status = "okay",
+ },
+ {
+ category = "lua",
+ comment = "for the moment somewhat private",
+ filename = "trac-fil",
+ loading = "never",
+ status = "okay",
+ },
+ {
+ category = "lua",
+ filename = "trac-inf",
+ loading = "luat-lib",
+ status = "okay",
+ },
+ {
+ category = "lua",
+ comment = "will be redone and extended",
+ filename = "trac-lmx",
+ loading = "luat-lib",
+ status = "pending",
+ },
+ {
+ category = "lua",
+ filename = "trac-log",
+ loading = "luat-lib",
+ status = "okay",
+ },
+ {
+ category = "lua",
+ filename = "trac-xml",
+ loading = "mtxrun",
+ status = "okay",
+ },
+ {
+ category = "lua",
+ filename = "trac-exp",
+ loading = "mtxrun",
+ status = "okay",
+ },
+ {
+ category = "lua",
+ comment = "experimental code, will be redone when lua 5.2",
+ filename = "trac-pro",
+ loading = "luat-lib",
+ status = "pending",
+ },
+ {
+ category = "lua",
+ comment = "some code can better be in util-set",
+ filename = "trac-set",
+ loading = "luat-lib",
+ status = "pending",
+ },
+ {
+ category = "lua",
+ filename = "trac-tex",
+ loading = "trac-tex",
+ status = "okay",
+ },
+ {
+ category = "lua",
+ filename = "trac-tim",
+ loading = "on demand",
+ status = "okay",
+ },
+ {
+ category = "lua",
+ filename = "trac-vis",
+ loading = "trac-vis",
+ status = "okay",
+ },
+ {
+ category = "lua",
+ filename = "trac-jus",
+ loading = "trac-jus",
+ status = "okay",
+ },
+ {
+ category = "lua",
+ filename = "type-ini",
+ loading = "type-ini",
+ status = "okay",
+ },
+ {
+ category = "lua",
+ filename = "typo-bld",
+ status = "todo",
+ },
+ {
+ category = "lua",
+ filename = "typo-brk",
+ status = "todo",
+ },
+ {
+ category = "lua",
+ filename = "typo-cap",
+ status = "todo",
+ },
+ {
+ category = "lua",
+ filename = "typo-cln",
+ status = "todo",
+ },
+ {
+ category = "lua",
+ filename = "typo-dig",
+ status = "todo",
+ },
+ {
+ category = "lua",
+ filename = "typo-dir",
+ status = "todo",
+ },
+ {
+ category = "lua",
+ filename = "typo-ini",
+ status = "todo",
+ },
+ {
+ category = "lua",
+ filename = "typo-itc",
+ status = "todo",
+ },
+ {
+ category = "lua",
+ filename = "typo-krn",
+ status = "todo",
+ },
+ {
+ category = "lua",
+ filename = "typo-mar",
+ status = "todo",
+ },
+ {
+ category = "lua",
+ filename = "typo-pag",
+ status = "todo",
+ },
+ {
+ category = "lua",
+ filename = "typo-par",
+ status = "todo",
+ },
+ {
+ category = "lua",
+ filename = "typo-prc",
+ status = "todo",
+ },
+ {
+ category = "lua",
+ filename = "typo-lan",
+ status = "okay",
+ },
+ {
+ category = "lua",
+ filename = "typo-rep",
+ status = "todo",
+ },
+ {
+ category = "lua",
+ filename = "typo-spa",
+ status = "todo",
+ },
+ {
+ category = "lua",
+ filename = "unic-ini",
+ loading = "unic-ini",
+ status = "okay",
+ },
+ {
+ category = "lua",
+ filename = "util-deb",
+ loading = "luat-lib",
+ status = "todo",
+ },
+ {
+ category = "lua",
+ filename = "util-dim",
+ loading = "luat-lib",
+ status = "todo",
+ },
+ {
+ category = "lua",
+ filename = "util-fmt",
+ loading = "luat-lib",
+ status = "todo",
+ },
+ {
+ category = "lua",
+ filename = "util-jsn",
+ loading = "m-json",
+ status = "todo",
+ },
+ {
+ category = "lua",
+ filename = "util-lua",
+ loading = "luat-lib",
+ status = "okay",
+ },
+ {
+ category = "lua",
+ filename = "util-lib",
+ loading = "luat-lib",
+ status = "okay",
+ },
+ {
+ category = "lua",
+ filename = "util-mrg",
+ loading = "luat-lib",
+ status = "todo",
+ },
+ {
+ category = "lua",
+ filename = "util-pck",
+ loading = "luat-lib",
+ status = "todo",
+ },
+ {
+ category = "lua",
+ filename = "util-prs",
+ loading = "luat-lib",
+ status = "todo",
+ },
+ {
+ category = "lua",
+ filename = "util-seq",
+ loading = "luat-lib",
+ status = "todo",
+ },
+ {
+ category = "lua",
+ filename = "util-sql",
+ loading = "m-sql",
+ status = "todo",
+ },
+ {
+ category = "lua",
+ filename = "util-sta",
+ loading = "luat-lib",
+ status = "todo",
+ },
+ {
+ category = "lua",
+ filename = "util-sto",
+ loading = "luat-lib",
+ status = "todo",
+ },
+ {
+ category = "lua",
+ filename = "util-str",
+ loading = "luat-lib",
+ status = "todo",
+ },
+ {
+ category = "lua",
+ filename = "util-tab",
+ loading = "luat-lib",
+ status = "todo",
+ },
+ {
+ category = "lua",
+ filename = "util-tpl",
+ loading = "luat-lib",
+ status = "todo",
+ },
+ {
+ category = "lua",
+ filename = "x-asciimath",
+ status = "todo",
+ },
+ {
+ category = "lua",
+ filename = "x-calcmath",
+ status = "todo",
+ },
+ {
+ category = "lua",
+ filename = "x-cals",
+ status = "todo",
+ },
+ {
+ category = "lua",
+ filename = "x-chemml",
+ status = "todo",
+ },
+ {
+ category = "lua",
+ filename = "x-ct",
+ status = "todo",
+ },
+ {
+ category = "lua",
+ filename = "x-ldx",
+ status = "todo",
+ },
+ {
+ category = "lua",
+ filename = "x-mathml",
+ status = "todo",
+ },
+ },
+ metafun = {
+ {
+ category = "mpiv",
+ filename = "mp-base",
+ loading = "always",
+ status = "okay",
+ },
+ {
+ category = "mpiv",
+ filename = "mp-tool",
+ loading = "always",
+ status = "okay",
+ },
+ {
+ category = "mpiv",
+ filename = "mp-mlib",
+ loading = "always",
+ status = "okay",
+ },
+ {
+ category = "mpiv",
+ comment = "sort of obsolete",
+ filename = "mp-core",
+ status = "okay",
+ },
+ {
+ category = "mpiv",
+ comment = "maybe some nicer synonyms",
+ filename = "mp-page",
+ loading = "always",
+ status = "okay",
+ },
+ {
+ category = "mpiv",
+ filename = "mp-butt",
+ loading = "always",
+ status = "okay",
+ },
+ {
+ category = "mpiv",
+ filename = "mp-shap",
+ loading = "always",
+ status = "okay",
+ },
+ {
+ category = "mpiv",
+ filename = "mp-grph",
+ loading = "always",
+ status = "okay",
+ },
+ {
+ category = "mpiv",
+ filename = "mp-grid",
+ loading = "always",
+ status = "okay",
+ },
+ {
+ category = "mpiv",
+ comment = "a hack anyway",
+ filename = "mp-form",
+ loading = "always",
+ status = "okay",
+ },
+ {
+ category = "mpiv",
+ filename = "mp-figs",
+ loading = "always",
+ status = "okay",
+ },
+ {
+ category = "mpiv",
+ filename = "mp-func",
+ loading = "always",
+ status = "okay",
+ },
+ {
+ category = "mpiv",
+ filename = "mp-text",
+ loading = "on demand",
+ status = "todo",
+ },
+ {
+ category = "mpiv",
+ filename = "mp-crop",
+ loading = "on demand",
+ status = "okay",
+ },
+ {
+ category = "mpiv",
+ comment = "follows m-chart",
+ filename = "mp-char",
+ loading = "on demand",
+ status = "okay",
+ },
+ {
+ category = "mpiv",
+ comment = "follows m-steps",
+ filename = "mp-step",
+ loading = "on demand",
+ status = "okay",
+ },
+ {
+ category = "mpiv",
+ filename = "mp-chem",
+ loading = "on demand",
+ status = "okay",
+ },
+ {
+ category = "mpiv",
+ comment = "maybe some namespace changes",
+ filename = "mp-abck",
+ loading = "on demand",
+ status = "okay",
+ },
+ {
+ category = "mpiv",
+ comment = "maybe some namespace changes",
+ filename = "mp-apos",
+ loading = "on demand",
+ status = "okay",
+ },
+ {
+ category = "mpiv",
+ comment = "will be done when needed",
+ filename = "mp-asnc",
+ loading = "on demand",
+ status = "todo",
+ },
+ },
+ modules = {
+ {
+ category = "mkiv",
+ comment = "best use m-zint instead",
+ filename = "m-barcodes",
+ loading = "module",
+ status = "okay",
+ },
+ {
+ category = "mkvi",
+ filename = "m-chart",
+ loading = "module",
+ status = "okay",
+ },
+ {
+ category = "mkiv",
+ comment = "this is a placeholder (chemistry is built-in)",
+ filename = "m-chemic",
+ loading = "never",
+ status = "okay",
+ },
+ {
+ category = "tex",
+ filename = "m-cweb",
+ status = "todo",
+ },
+ {
+ category = "mkiv",
+ filename = "m-database",
+ status = "todo",
+ },
+ {
+ category = "tex",
+ filename = "m-datastrc",
+ status = "todo",
+ },
+ {
+ category = "mkiv",
+ comment = "add-on for mtx-context",
+ filename = "m-directives",
+ loading = "on demand",
+ status = "okay",
+ },
+ {
+ category = "tex",
+ filename = "m-educat",
+ status = "todo",
+ },
+ {
+ category = "mkiv",
+ filename = "m-fields",
+ status = "todo",
+ },
+ {
+ category = "tex",
+ filename = "m-format",
+ status = "todo",
+ },
+ {
+ category = "mkiv",
+ filename = "m-graph",
+ status = "todo",
+ },
+ {
+ category = "mkiv",
+ filename = "m-ipsum",
+ loading = "module",
+ status = "okay",
+ },
+ {
+ category = "mkiv",
+ filename = "m-json",
+ loading = "module",
+ status = "okay",
+ },
+ {
+ category = "tex",
+ filename = "m-layout",
+ status = "todo",
+ },
+ {
+ category = "mkiv",
+ comment = "add-on for mtx-context",
+ filename = "m-logcategories",
+ loading = "on demand",
+ status = "okay",
+ },
+ {
+ category = "mkiv",
+ filename = "m-markdown",
+ status = "todo",
+ },
+ {
+ category = "mkiv",
+ filename = "m-mathcrap",
+ status = "todo",
+ },
+ {
+ category = "mkiv",
+ filename = "m-mkii",
+ status = "todo",
+ },
+ {
+ category = "mkiv",
+ filename = "m-mkivhacks",
+ status = "todo",
+ },
+ {
+ category = "mkvi",
+ filename = "m-morse",
+ loading = "module",
+ status = "okay",
+ },
+ {
+ category = "tex",
+ filename = "m-narrowtt",
+ loading = "module",
+ status = "okay",
+ },
+ {
+ category = "tex",
+ filename = "m-newmat",
+ status = "todo",
+ },
+ {
+ category = "mkiv",
+ filename = "m-ntb-to-xtb",
+ status = "okay",
+ },
+ {
+ category = "mkiv",
+ filename = "m-obsolete",
+ status = "todo",
+ },
+ {
+ category = "mkiv",
+ filename = "m-oldfun",
+ status = "todo",
+ },
+ {
+ category = "mkiv",
+ filename = "m-oldnum",
+ status = "todo",
+ },
+ {
+ category = "tex",
+ filename = "m-pictex",
+ status = "todo",
+ },
+ {
+ category = "mkiv",
+ filename = "m-pstricks",
+ status = "todo",
+ },
+ {
+ category = "mkiv",
+ comment = "keep an eye on changes in lua code",
+ filename = "m-punk",
+ status = "okay",
+ },
+ {
+ category = "tex",
+ filename = "m-r",
+ status = "todo",
+ },
+ {
+ category = "mkiv",
+ filename = "m-spreadsheet",
+ status = "okay",
+ },
+ {
+ category = "mkvi",
+ filename = "m-steps",
+ status = "todo",
+ },
+ {
+ category = "tex",
+ filename = "m-streams",
+ status = "todo",
+ },
+ {
+ category = "tex",
+ filename = "m-subsub",
+ status = "todo",
+ },
+ {
+ category = "mkiv",
+ filename = "m-sql",
+ loading = "module",
+ status = "okay",
+ },
+ {
+ category = "mkiv",
+ filename = "m-timing",
+ status = "okay",
+ },
+ {
+ category = "mkiv",
+ comment = "add-on for mtx-context",
+ filename = "m-trackers",
+ loading = "on demand",
+ status = "okay",
+ },
+ {
+ category = "mkiv",
+ filename = "m-translate",
+ status = "okay",
+ },
+ {
+ category = "mkiv",
+ filename = "m-units",
+ status = "todo",
+ },
+ {
+ category = "mkiv",
+ filename = "m-visual",
+ status = "todo",
+ },
+ {
+ category = "mkiv",
+ filename = "m-zint",
+ status = "okay",
+ },
+ {
+ category = "tex",
+ filename = "s-abr-01",
+ status = "todo",
+ },
+ {
+ category = "tex",
+ filename = "s-abr-02",
+ status = "todo",
+ },
+ {
+ category = "tex",
+ filename = "s-abr-03",
+ status = "todo",
+ },
+ {
+ category = "tex",
+ filename = "s-abr-04",
+ status = "todo",
+ },
+ {
+ category = "mkiv",
+ filename = "s-art-01",
+ status = "todo",
+ },
+ {
+ category = "tex",
+ filename = "s-cdr-01",
+ status = "todo",
+ },
+ {
+ category = "mkiv",
+ filename = "s-def-01",
+ status = "todo",
+ },
+ {
+ category = "tex",
+ filename = "s-faq-00",
+ status = "todo",
+ },
+ {
+ category = "tex",
+ filename = "s-faq-01",
+ status = "todo",
+ },
+ {
+ category = "tex",
+ filename = "s-faq-02",
+ status = "todo",
+ },
+ {
+ category = "tex",
+ filename = "s-faq-03",
+ status = "todo",
+ },
+ {
+ category = "mkiv",
+ filename = "s-fnt-10",
+ status = "todo",
+ },
+ {
+ category = "mkiv",
+ filename = "s-fnt-11",
+ status = "todo",
+ },
+ {
+ category = "mkiv",
+ filename = "s-fnt-20",
+ status = "todo",
+ },
+ {
+ category = "mkiv",
+ filename = "s-fnt-21",
+ status = "todo",
+ },
+ {
+ category = "mkiv",
+ filename = "s-fnt-23",
+ status = "todo",
+ },
+ {
+ category = "mkiv",
+ filename = "s-fnt-24",
+ status = "todo",
+ },
+ {
+ category = "mkiv",
+ filename = "s-fnt-26",
+ status = "todo",
+ },
+ {
+ category = "mkiv",
+ filename = "s-fnt-28",
+ status = "todo",
+ },
+ {
+ category = "mkiv",
+ filename = "s-fnt-29",
+ status = "todo",
+ },
+ {
+ category = "mkiv",
+ filename = "s-fnt-30",
+ status = "todo",
+ },
+ {
+ category = "mkiv",
+ filename = "s-fnt-31",
+ status = "todo",
+ },
+ {
+ category = "mkiv",
+ filename = "s-fnt-32",
+ status = "todo",
+ },
+ {
+ category = "mkiv",
+ filename = "s-fonts-missing",
+ status = "todo",
+ },
+ {
+ category = "mkiv",
+ filename = "s-fonts-tables",
+ status = "todo",
+ },
+ {
+ category = "mkvi",
+ filename = "s-inf-01",
+ status = "okay",
+ },
+ {
+ category = "mkiv",
+ filename = "s-inf-02",
+ status = "todo",
+ },
+ {
+ category = "mkiv",
+ filename = "s-inf-03",
+ status = "todo",
+ },
+ {
+ category = "mkiv",
+ filename = "s-inf-04",
+ status = "todo",
+ },
+ {
+ category = "mkiv",
+ filename = "s-lan-03",
+ status = "todo",
+ },
+ {
+ category = "mkiv",
+ filename = "s-lan-04",
+ status = "todo",
+ },
+ {
+ category = "mkiv",
+ filename = "s-lan-06",
+ status = "okay",
+ },
+ {
+ category = "tex",
+ filename = "s-mag-01",
+ status = "todo",
+ },
+ {
+ category = "mkiv",
+ filename = "s-map-10",
+ status = "todo",
+ },
+ {
+ category = "mkiv",
+ filename = "s-mat-10",
+ status = "todo",
+ },
+ {
+ category = "mkiv",
+ filename = "s-mat-20",
+ status = "okay",
+ },
+ {
+ category = "mkiv",
+ filename = "s-mat-11",
+ status = "todo",
+ },
+ {
+ category = "mkiv",
+ filename = "s-mat-12",
+ status = "todo",
+ },
+ {
+ category = "mkiv",
+ filename = "s-mod-00",
+ status = "todo",
+ },
+ {
+ category = "mkiv",
+ filename = "s-mod-01",
+ status = "todo",
+ },
+ {
+ category = "mkiv",
+ filename = "s-mod-02",
+ status = "todo",
+ },
+ {
+ category = "mkiv",
+ filename = "s-phy-01",
+ status = "todo",
+ },
+ {
+ category = "tex",
+ filename = "s-pre-00",
+ status = "todo",
+ },
+ {
+ category = "tex",
+ filename = "s-pre-01",
+ status = "todo",
+ },
+ {
+ category = "tex",
+ filename = "s-pre-02",
+ status = "todo",
+ },
+ {
+ category = "tex",
+ filename = "s-pre-03",
+ status = "todo",
+ },
+ {
+ category = "tex",
+ filename = "s-pre-04",
+ status = "todo",
+ },
+ {
+ category = "tex",
+ filename = "s-pre-05",
+ status = "todo",
+ },
+ {
+ category = "tex",
+ filename = "s-pre-06",
+ status = "todo",
+ },
+ {
+ category = "tex",
+ filename = "s-pre-07",
+ status = "todo",
+ },
+ {
+ category = "tex",
+ filename = "s-pre-08",
+ status = "todo",
+ },
+ {
+ category = "tex",
+ filename = "s-pre-09",
+ status = "todo",
+ },
+ {
+ category = "tex",
+ filename = "s-pre-10",
+ status = "todo",
+ },
+ {
+ category = "tex",
+ filename = "s-pre-11",
+ status = "todo",
+ },
+ {
+ category = "tex",
+ filename = "s-pre-12",
+ status = "todo",
+ },
+ {
+ category = "tex",
+ filename = "s-pre-13",
+ status = "todo",
+ },
+ {
+ category = "tex",
+ filename = "s-pre-14",
+ status = "todo",
+ },
+ {
+ category = "tex",
+ filename = "s-pre-15",
+ status = "todo",
+ },
+ {
+ category = "tex",
+ filename = "s-pre-16",
+ status = "todo",
+ },
+ {
+ category = "tex",
+ filename = "s-pre-17",
+ status = "todo",
+ },
+ {
+ category = "tex",
+ filename = "s-pre-18",
+ status = "todo",
+ },
+ {
+ category = "tex",
+ filename = "s-pre-19",
+ status = "todo",
+ },
+ {
+ category = "tex",
+ filename = "s-pre-22",
+ status = "todo",
+ },
+ {
+ category = "tex",
+ filename = "s-pre-23",
+ status = "todo",
+ },
+ {
+ category = "tex",
+ filename = "s-pre-26",
+ status = "todo",
+ },
+ {
+ category = "tex",
+ filename = "s-pre-27",
+ status = "todo",
+ },
+ {
+ category = "mkiv",
+ filename = "s-pre-30",
+ status = "todo",
+ },
+ {
+ category = "tex",
+ filename = "s-pre-50",
+ status = "todo",
+ },
+ {
+ category = "mkiv",
+ filename = "s-pre-60",
+ status = "todo",
+ },
+ {
+ category = "tex",
+ filename = "s-pre-61",
+ status = "todo",
+ },
+ {
+ category = "tex",
+ filename = "s-pre-62",
+ status = "todo",
+ },
+ {
+ category = "tex",
+ filename = "s-pre-63",
+ status = "todo",
+ },
+ {
+ category = "tex",
+ filename = "s-pre-64",
+ status = "todo",
+ },
+ {
+ category = "tex",
+ filename = "s-pre-66",
+ status = "todo",
+ },
+ {
+ category = "tex",
+ filename = "s-pre-67",
+ status = "todo",
+ },
+ {
+ category = "tex",
+ filename = "s-pre-68",
+ status = "todo",
+ },
+ {
+ category = "mkiv",
+ filename = "s-pre-69",
+ status = "todo",
+ },
+ {
+ category = "mkiv",
+ filename = "s-pre-70",
+ status = "todo",
+ },
+ {
+ category = "mkiv",
+ filename = "s-pre-71",
+ status = "todo",
+ },
+ {
+ category = "tex",
+ filename = "s-pre-93",
+ status = "todo",
+ },
+ {
+ category = "tex",
+ filename = "s-pre-96",
+ status = "todo",
+ },
+ {
+ category = "tex",
+ filename = "s-ptj-01",
+ status = "todo",
+ },
+ {
+ category = "mkiv",
+ filename = "s-reg-01",
+ status = "todo",
+ },
+ {
+ category = "mkiv",
+ filename = "s-set-31",
+ status = "todo",
+ },
+ {
+ category = "tex",
+ filename = "s-syn-01",
+ status = "todo",
+ },
+ {
+ category = "mkiv",
+ filename = "x-asciimath",
+ status = "okay",
+ },
+ {
+ category = "mkiv",
+ filename = "x-calcmath",
+ status = "todo",
+ },
+ {
+ category = "mkiv",
+ filename = "x-cals",
+ status = "todo",
+ },
+ {
+ category = "mkiv",
+ filename = "x-chemml",
+ status = "todo",
+ },
+ {
+ category = "mkiv",
+ filename = "x-ct",
+ status = "todo",
+ },
+ {
+ category = "tex",
+ filename = "x-dir-01",
+ status = "todo",
+ },
+ {
+ category = "mkiv",
+ filename = "x-dir-05",
+ status = "todo",
+ },
+ {
+ category = "mkiv",
+ filename = "x-entities",
+ status = "okay",
+ },
+ {
+ category = "mkiv",
+ filename = "x-foxet",
+ status = "todo",
+ },
+ {
+ category = "mkiv",
+ filename = "x-ldx",
+ status = "todo",
+ },
+ {
+ category = "mkiv",
+ filename = "x-mathml",
+ status = "todo",
+ },
+ {
+ category = "mkiv",
+ filename = "x-newmml",
+ status = "todo",
+ },
+ {
+ category = "mkiv",
+ comment = "pfsense xml configuration rendering",
+ filename = "x-pfs-01",
+ status = "okay",
+ },
+ {
+ category = "mkiv",
+ filename = "x-physml",
+ status = "todo",
+ },
+ {
+ category = "mkiv",
+ filename = "x-res-01",
+ status = "todo",
+ },
+ {
+ category = "mkiv",
+ filename = "x-res-50",
+ status = "todo",
+ },
+ {
+ category = "mkiv",
+ filename = "x-set-11",
+ status = "todo",
+ },
+ {
+ category = "mkiv",
+ filename = "x-set-12",
+ status = "todo",
+ },
+ {
+ category = "mkiv",
+ filename = "x-udhr",
+ status = "okay",
+ },
+ {
+ category = "mkiv",
+ filename = "x-xtag",
+ status = "todo",
+ },
+ },
+ optional = {
+ {
+ category = "mkiv",
+ filename = "bxml-apa",
+ status = "todo",
+ },
+ {
+ category = "mkiv",
+ filename = "colo-run",
+ loading = "on demand",
+ status = "okay",
+ },
+ {
+ category = "mkiv",
+ comment = "always needs some work",
+ filename = "cont-new",
+ loading = "runtime",
+ status = "todo",
+ },
+ {
+ category = "mkiv",
+ filename = "font-run",
+ loading = "on demand",
+ status = "todo",
+ },
+ {
+ category = "mkiv",
+ filename = "lxml-ctx",
+ status = "okay",
+ comment = "this is an experimental module",
+ },
+ {
+ category = "mkiv",
+ filename = "meta-pdh",
+ loading = "never",
+ status = "okay",
+ comment = "this is historic code that we keep around",
+ },
+ {
+ category = "mkiv",
+ filename = "mult-prm",
+ loading = "never",
+ status = "okay",
+ comment = "this is just a helper for generating files",
+ },
+ {
+ category = "mkiv",
+ filename = "page-run",
+ loading = "on demand",
+ status = "okay",
+ },
+ {
+ category = "mkiv",
+ filename = "spac-adj",
+ loading = "never",
+ status = "obsolete",
+ },
+ {
+ category = "mkiv",
+ filename = "supp-vis",
+ loading = "never",
+ status = "obsolete",
+ comment = "replaced by a more modern variant",
+ },
+ {
+ category = "mkiv",
+ filename = "symb-run",
+ loading = "on demand",
+ status = "okay",
+ },
+ {
+ category = "mkiv",
+ filename = "type-run",
+ loading = "on demand",
+ status = "todo",
+ },
+ },
+ resources = {
+ {
+ category = "ori",
+ comment = "template for a user configuration file (with suffix mkiv)",
+ filename = "cont-sys",
+ loading = "runtime",
+ status = "okay",
+ },
+ {
+ category = "lmx",
+ filename = "context-base",
+ status = "todo",
+ },
+ {
+ category = "lmx",
+ filename = "context-characters",
+ status = "todo",
+ },
+ {
+ category = "lmx",
+ filename = "context-debug",
+ status = "todo",
+ },
+ {
+ category = "lmx",
+ filename = "context-error",
+ status = "todo",
+ },
+ {
+ category = "lmx",
+ filename = "context-fonttest",
+ status = "todo",
+ },
+ {
+ category = "lmx",
+ filename = "context-help",
+ status = "todo",
+ },
+ {
+ category = "lmx",
+ filename = "context-timing",
+ status = "todo",
+ },
+ {
+ category = "pdf",
+ filename = "context-version",
+ status = "todo",
+ },
+ {
+ category = "png",
+ filename = "context-version",
+ status = "todo",
+ },
+ {
+ category = "css",
+ comment = "layout specification for debug and error pages and web services",
+ filename = "context",
+ status = "okay",
+ },
+ {
+ category = "rme",
+ comment = "readme file",
+ filename = "context",
+ status = "okay",
+ },
+ {
+ category = "ctx",
+ comment = "example of a ctx file (for mtx-context)",
+ filename = "core-ctx",
+ status = "okay",
+ },
+ {
+ category = "css",
+ filename = "export-example",
+ status = "todo",
+ },
+ {
+ category = "rng",
+ filename = "export-example",
+ status = "todo",
+ },
+ {
+ category = "tex",
+ filename = "export-example",
+ status = "todo",
+ },
+ {
+ category = "xml",
+ comment = "this file is auto-generated by mtx-language",
+ filename = "lang-all",
+ status = "okay",
+ },
+ {
+ category = "xml",
+ filename = "lpdf-pda",
+ status = "todo",
+ },
+ {
+ category = "xml",
+ filename = "lpdf-pdx",
+ status = "todo",
+ },
+ {
+ category = "rlx",
+ filename = "rlxcache",
+ status = "todo",
+ },
+ {
+ category = "rlx",
+ filename = "rlxtools",
+ status = "todo",
+ },
+ {
+ category = "ctx",
+ filename = "s-mod",
+ status = "todo",
+ },
+ {
+ category = "pdf",
+ filename = "status-files",
+ status = "todo",
+ },
+ {
+ category = "pdf",
+ filename = "status-lua",
+ status = "todo",
+ },
+ {
+ category = "tex",
+ filename = "status-mkiv",
+ status = "todo",
+ },
+ {
+ category = "xsd",
+ filename = "x-chemml",
+ status = "todo",
+ },
+ {
+ category = "xsd",
+ filename = "x-contml",
+ status = "todo",
+ },
+ {
+ category = "rng",
+ filename = "x-corres",
+ status = "todo",
+ },
+ {
+ category = "dtd",
+ filename = "x-fig-00",
+ status = "todo",
+ },
+ {
+ category = "xsd",
+ filename = "x-fig-00",
+ status = "todo",
+ },
+ {
+ category = "ctx",
+ filename = "x-ldx",
+ status = "todo",
+ },
+ {
+ category = "xsd",
+ filename = "x-mathml",
+ status = "todo",
+ },
+ {
+ category = "xsl",
+ filename = "x-om2cml",
+ status = "todo",
+ },
+ {
+ category = "xsl",
+ filename = "x-openmath",
+ status = "todo",
+ },
+ {
+ category = "ctx",
+ comment = "runner for x-pfs-01",
+ filename = "x-pfsense",
+ status = "okay",
+ },
+ {
+ category = "xsd",
+ filename = "x-physml",
+ status = "todo",
+ },
+ {
+ category = "xsl",
+ filename = "x-sm2om",
+ status = "todo",
+ },
+ },
}
diff --git a/Master/texmf-dist/tex/context/base/status-mkiv.tex b/Master/texmf-dist/tex/context/base/status-mkiv.tex
index 0d22d51a15d..8685c97ad3f 100644
--- a/Master/texmf-dist/tex/context/base/status-mkiv.tex
+++ b/Master/texmf-dist/tex/context/base/status-mkiv.tex
@@ -1,38 +1,46 @@
- \setupbodyfont[dejavu,10pt]
+\usemodule[abr-02]
+
+\setupbodyfont
+ [dejavu,9pt]
+
+\setuppapersize
+ [A4,landscape]
\setuplayout
[width=middle,
height=middle,
- backspace=1cm,
- topspace=1cm,
+ backspace=.5cm,
+ topspace=.5cm,
footer=0pt,
header=1.25cm]
\setuphead
- [subject]
+ [title]
[style=\bfa,
- page=yes]
+ page=yes,
+ after={\blank[line]}]
\setuppagenumbering
[location=]
\setupheadertexts
- [\currentdate][MkIV cleanup Status / Page \pagenumber]
+ [\currentdate]
+ [MkIV Status / Page \pagenumber]
+
+% \showmakeup
+% \showallmakeup
\starttext
-% anch attr back buff colo font grph java lang luat lxml math meta mlib mult node
-% pack page phys scrn spac strc supp symb syst tabl toks typo
+% logs.report (immediate) versus logs.messenger (in flow)
-\startsubject[title=Todo]
+\starttitle[title=Todo]
\startitemize[packed]
\startitem currently the new namespace prefixes are not consistent but this
will be done when we're satisfied with one scheme \stopitem
\startitem there will be additional columns in the table, like for namespace
so we need another round of checking then \stopitem
- \startitem the imp modules are not in the list and need checking too \stopitem
- \startitem the s, x, m modules will be checked, redone and reorganized \stopitem
\startitem the lua code will be cleaned up upgraded as some is quite old
and experimental \stopitem
\startitem we need a proper dependency tree and better defined loading order \stopitem
@@ -40,122 +48,281 @@
\startitem we need to check what messages are gone (i.e.\ clean up mult-mes) \stopitem
\startitem some commands can go from mult-def (and the xml file) \stopitem
\startitem check for setuphandler vs simplesetuphandler \stopitem
- \startitem all showcomposition etc can go (we can redo that in lua if needed) \stopitem
\startitem for the moment we will go for \type {xxxx_} namespaces that (mostly) match
the filename but later we can replace these by longer names (via a script) so
module writers should {\bf not} use the core commands with \type{_} in the
name \stopitem
\startitem the message system will be unified \stopitem
+ \startitem maybe rename dowhatevertexcommand to fromluawhatevertexcommand \stopitem
+ \startitem consider moving setups directly to lua end (e.g. in characterspacing, breakpoint, bitmaps etc.) \stopitem
+ \startitem more local temporary \type {\temp...} will become \type {\p_...} \stopitem
+ \startitem check all ctxlua calls for ctxcommand \stopitem
+ \startitem rename all those \type {\current<whatever>}s in strc \stopitem
+ \startitem check \type {option} vs \type {options} \stopitem
+ \startitem check \type {type} vs \type {kind} \stopitem
+ \startitem check \type {label} vs \type {name} vs \type {tag} \stopitem
+ \startitem check \type {limop}, different limops should should be classes \stopitem
+ \startitem too many positions in simple files (itemize etc) \stopitem
+ \startitem math domains/dictionaries \stopitem
+ \startitem xtables don't span vertically with multilines (yet) \stopitem
+ \startitem notes in mixed columns \stopitem
+ \startitem floats in mixed columns \stopitem
+ \startitem check return values \type {os.execute} \stopitem
+ \startitem more r, d, k in xml code \stopitem
+ \startitem mathml, more in \LUA \stopitem
+ \startitem style: font-size, font, color handling in \HTML\ (lxml-css) \stopitem
+ \startitem a \type {\name {A.B.C DEF}} auto-nobreakspace \stopitem
+ \startitem redo \CWEB\ module with \LUA \stopitem
+ \startitem maybe move characters.blocks to its own file \stopitem
+ \startitem more local context = context in \LUA\ files \stopitem
+ \startitem check and optimize all storage.register and locals (cosmetics) \stopitem
+ \startitem check all used modules in \LUA\ (and local them) \stopitem
+ \startitem environment and basic lua helpers are now spread over too many files \stopitem
+ \startitem isolate tracers and showers \stopitem
+ \startitem check all possible usage of ctxcommand \stopitem
+ \startitem there are more s-* modules, like s-fnt-41 \stopitem
+ \startitem check (un)marked tables \stopitem
\stopitemize
-\stopsubject
+\stoptitle
+
+\starttitle[title=To keep an eye on]
+
+\startitemize[packed]
+ \startitem Currently lpeg replacements interpret the percent sign so we need to escape it. \stopitem
+ \startitem Currently numbers and strings are cast in comparisons bu tthat might change in the future. \stopitem
+\stopitemize
-\startsubject[title=Status]
+\stoptitle
+
+\definehighlight[notabenered] [color=darkred, style=bold]
+\definehighlight[notabeneblue] [color=darkblue, style=bold]
+\definehighlight[notabeneyellow] [color=darkyellow, style=bold]
+\definehighlight[notabenemagenta][color=darkmagenta,style=bold]
\startluacode
local coremodules = dofile("status-mkiv.lua")
+ local valid = table.tohash {
+ "toks", "attr", "page", "buff", "font", "colo", "phys", "supp", "typo", "strc",
+ "syst", "tabl", "spac", "scrn", "lang", "lxml", "mlib", "java", "pack", "math",
+ "symb", "grph", "anch", "luat", "mult", "back", "node", "meta", "norm", "catc",
+ "cldf", "file", "char", "core", "layo", "trac", "cont", "regi", "enco", "hand",
+ "unic", "sort", "blob", "type", "scrp", "prop", "chem", "bibl", "task",
+ "whatever", "mp", "s", "x", "m", "mtx",
+ }
+
+ local specialcategories = {
+ mkvi = true,
+ }
+
+ local what = {
+ "main", "core", "lua", "optional", "implementations", "extra", "extras", "metafun", "modules", "resources"
+ }
+
+ local totaltodo = 0
+ local totalpending = 0
+ local totalobsolete = 0
+ local totalloaded = 0
+
+ local function summary(nofloaded,noftodo,nofpending,nofobsolete)
+
+ local nofdone = nofloaded - noftodo - nofpending - nofobsolete
+
+ context.starttabulate { "|B|r|" }
+ context.HL()
+ context.NC() context("done") context.NC() context(nofdone) context.NC() context.NR()
+ context.NC() context("todo") context.NC() context(noftodo) context.NC() context.NR()
+ context.NC() context("pending") context.NC() context(nofpending) context.NC() context.NR()
+ context.NC() context("obsolete") context.NC() context(nofobsolete) context.NC() context.NR()
+ context.HL()
+ context.NC() context("loaded") context.NC() context(nofloaded) context.NC() context.NR()
+ context.HL()
+ context.stoptabulate()
+
+ end
+
if coremodules then
local function tabelize(loaded,what)
if loaded then
- local nofunknown = 0
- local nofloaded = #loaded
+ local noftodo = 0
+ local nofpending = 0
+ local nofobsolete = 0
+ local nofloaded = #loaded
+ local categories = { }
+
+ for k, v in next, valid do
+ categories[k] = { }
+ end
for i=1,nofloaded do
- loaded[i].order = i
+ local l = loaded[i]
+ l.order = i
+ local category = string.match(l.filename,"([^%-]+)%-") or "whatever"
+ local c = categories[category]
+ if c then
+ c[#c+1] = l
+ end
end
- table.sort(loaded,function(a,b) return a.filename < b.filename end)
+ for k, loaded in table.sortedhash(categories) do
+
+ local nofloaded = #loaded
+
+ if nofloaded > 0 then
+
+ table.sort(loaded,function(a,b) return a.filename < b.filename end) -- in place
+
+ context.starttitle { title = string.format("%s: %s",what,k) }
+
+ context.starttabulate { "|Tr|Tlw(3em)|Tlw(12em)|Tlw(12em)|Tlw(4em)|Tl|Tl|Tl|Tp|" }
+ context.NC() context.bold("order")
+ context.NC() context.bold("kind")
+ context.NC() context.bold("file")
+ context.NC() context.bold("loading")
+ context.NC() context.bold("status")
+ context.NC() context.bold("reference")
+ context.NC() context.bold("manual")
+ context.NC() context.bold("wiki")
+ context.NC() context.bold("comment")
+ context.NC() context.NR()
+ context.HL()
+ for i=1,nofloaded do
+ local module = loaded[i]
+ local status = module.status
+ local category = module.category
+ local filename = module.filename
+ context.NC()
+ context(module.order)
+ context.NC()
+ if specialcategories[category] then
+ context.notabeneblue(category)
+ else
+ context(category)
+ end
+ context.NC()
+ if #filename>20 then
+ context(string.sub(filename,1,18) .. "..")
+ else
+ context(filename)
+ end
+ context.NC()
+ context(module.loading)
+ context.NC()
+ if status == "todo" then
+ context.notabenered(status)
+ noftodo = noftodo + 1
+ elseif status == "pending" then
+ context.notabeneyellow(status)
+ nofpending = nofpending + 1
+ elseif status == "obsolete" then
+ context.notabenemagenta(status)
+ nofobsolete = nofobsolete + 1
+ else
+ context(status)
+ end
+ context.NC() context(module.reference)
+ context.NC() context(module.manual)
+ context.NC() context(module.wiki)
+ context.NC() context(module.comment)
+ context.NC() context.NR()
+ end
+ context.stoptabulate()
+
+ context.stoptitle()
- context.starttabulate { "|Tr|Tl|Tl|l|p|" }
- context.NC() -- context.bold("order")
- context.NC() context.bold("file")
- context.NC() context.bold("mark")
- context.NC() context.bold("status")
- context.NC() context.bold("comment")
- context.NC() context.NR()
- for i=1,nofloaded do
- local module = loaded[i]
- local status = module.status
- context.NC() context(module.order)
- context.NC() context(module.filename)
- context.NC() context(module.marktype)
- if status == "unknown" then
- context.NC() context.bold(status)
- nofunknown = nofunknown + 1
- else
- context.NC() context(status)
end
- context.NC() context(module.comment)
- context.NC() context.NR()
+
end
- context.stoptabulate()
- context.blank()
+ context.starttitle { title = string.format("summary of %s modules",what) }
+
+ summary(nofloaded,noftodo,nofpending,nofobsolete)
- context("Of the %s %s modules (so far) in this list %s have the status unknown",nofloaded,what,nofunknown)
+ context.stoptitle()
+
+ totaltodo = totaltodo + noftodo
+ totalpending = totalpending + nofpending
+ totalobsolete = totalobsolete + nofobsolete
+ totalloaded = totalloaded + nofloaded
end
end
- tabelize(coremodules.core, "core")
- tabelize(coremodules.extra,"extra")
+ for i=1,#what do
+ tabelize(coremodules[what[i]],what[i])
+ end
end
- local namespaces = dofile("status-namespaces.lua")
+ -- context.starttitle { title = "Valid prefixes" }
+ --
+ -- for namespace, data in table.sortedhash(namespaces) do
+ -- if valid[namespace] then
+ -- context.type(namespace)
+ -- end
+ -- context.par()
+ -- end
+ --
+ -- context.stoptitle()
- local valid = table.tohash {
- "toks", "attr", "page", "buff", "font", "colo", "phys", "supp", "typo", "strc",
- "syst", "tabl", "spac", "scrn", "lang", "lxml", "mlib", "java", "pack", "math",
- "symb", "grph", "anch", "luat", "mult", "back", "node", "meta",
- "module",
- }
+ context.starttitle { title = string.format("summary of all",what) }
- context.startsubject { title = "Valid prefixes" }
+ summary(totalloaded,totaltodo,totalpending,totalobsolete)
- for namespace, data in table.sortedhash(namespaces) do
- if valid[namespace] then
- context.type(namespace)
- end
- context.par()
- end
+ context.stoptitle()
+
+ if io.exists("status-namespaces.lua") then
- context.stopsubject()
+ context.starttitle { title = "messy namespaces" }
- context.startsubject { title = "Messy namespaces" }
+ local namespaces = dofile("status-namespaces.lua")
- for namespace, data in table.sortedhash(namespaces) do
- if valid[namespace] then
- else
- context(namespace)
+ for namespace, data in table.sortedhash(namespaces) do
+ if valid[namespace] then
+ else
+ context(namespace)
+ end
+ context.par()
end
- context.par()
+
+ context.stoptitle()
+
end
- context.stopsubject()
+ if io.exists("status-registers.lua") then
+
+ context.starttitle { title = "messy registers" }
- local registers = dofile("status-registers.lua")
+ local registers = dofile("status-registers.lua")
- context.startsubject { title = "Messy registers" }
- for register, data in table.sortedhash(registers) do
- context(register)
- context.par()
- for name in table.sortedhash(data) do
- context.quad()
- context.type(name)
+ for register, data in table.sortedhash(registers) do
+ context(register)
+ context.par()
+ for name in table.sortedhash(data) do
+ context.quad()
+ context.type(name)
+ context.par()
+ end
context.par()
end
- context.par()
+
+ context.stoptitle()
+
end
+ context.starttitle { title = "callbacks" }
+
+ commands.showcallbacks()
+
+ context.stoptitle()
+
\stopluacode
-\stopsubject
\stoptext
diff --git a/Master/texmf-dist/tex/context/base/strc-bkm.lua b/Master/texmf-dist/tex/context/base/strc-bkm.lua
index 4eb9551a9f9..d9c268ce4d9 100644
--- a/Master/texmf-dist/tex/context/base/strc-bkm.lua
+++ b/Master/texmf-dist/tex/context/base/strc-bkm.lua
@@ -14,7 +14,7 @@ if not modules then modules = { } end modules ['strc-bkm'] = {
-- we should hook the placement into everystoptext ... needs checking
local format, concat, gsub = string.format, table.concat, string.gsub
-local utfvalues = string.utfvalues
+local utfvalues = utf.values
local settings_to_hash = utilities.parsers.settings_to_hash
local codeinjections = backends.codeinjections
@@ -173,7 +173,7 @@ function bookmarks.flatten(levels)
local old = lj[1]
lj[1] = new
if trace_bookmarks then
- report_bookmarks("promoting entry %s from level %s to %s: %s",j,old,new,lj[2])
+ report_bookmarks("promoting entry %a from level %a to %a: %s",j,old,new,lj[2])
end
end
skip = true
diff --git a/Master/texmf-dist/tex/context/base/strc-bkm.mkiv b/Master/texmf-dist/tex/context/base/strc-bkm.mkiv
index 8ddf0a6dff9..8e1252329e4 100644
--- a/Master/texmf-dist/tex/context/base/strc-bkm.mkiv
+++ b/Master/texmf-dist/tex/context/base/strc-bkm.mkiv
@@ -11,93 +11,109 @@
%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
%C details.
-%D This might become scrn-bkm.mkiv.
-
\writestatus{loading}{ConTeXt Structure Macros / Bookmarks}
\registerctxluafile{strc-bkm}{1.001}
\unprotect
-%D Due to requests I finally decided to support bookmarks, a
-%D driver dependant way of showing tables of content. The most
-%D simple way of support is hooking bookmark generation into
-%D the existing list mechanisms. That way users can generate
-%D bookmarks automatically, although its entirely valid to add
-%D bookmarks by defining alternative ones. These will be added
-%D at the appropriate place in the list.
-
-% \hoofdstuk{het eerste hoofdstuk}
-%
-% \bookmark {de eerste bookmark} % optional overruled hoofdstuk
-%
-% .... text ....
-%
-% \placebookmarks [hoofdstuk,paragraaf,subparagraaf,subsubparagraaf,mylist]
-% [open list]
-%
-% \bookmark[mylist]{whatever}
-
-%D This will go away.
-
-\let\flushpostponedbookmark\relax
+%D Bookmarks are a very viewer dependent feature. They are mostly used
+%D as additional table of contents and therefore relate directly to lists.
+%D
+%D A bookmark list is added to the document only when interaction is
+%D enabled. The given lists are bookmarked and a second argument specifies
+%D the opened bookmark trees.
+%D
+%D \starttyping
+%D \placebookmarks
+%D [chapter,section,subsection,mylist]
+%D [chapter]
+%D \stoptyping
+%D
+%D You can overloads the last set bookmark in a sectioning command:
+%D
+%D \starttyping
+%D \chapter {the first chapter}
+%D \bookmark {the first bookmark}
+%D \stoptyping
+%D
+%D However, in practice you can better use \type {\startchapter} and set the
+%D \type {bookmark} parameter.
+%D
+%D You can add entries to the bookmarklist:
+%D
+%D \starttyping
+%D \bookmark[mylist]{whatever}
+%D \stoptyping
+%D
+%D Use force to get titles in the bookmarklist. This is somewhat tricky as
+%D one does not want "Contents" in a table of contents but it has to be in
+%D the bookmark list.
+
+\installcorenamespace{bookmark}
+
+\installsetuponlycommandhandler \??bookmark {bookmark} % installdirectparametersethandler
+
+\setupbookmark
+ [\c!force=\v!no, % it's easier to force that to inhibit
+ \c!number=\v!yes] % might become v!no
-%D We have better ways now.
+\let\setupbookmarks\setupbookmark
\unexpanded\def\bookmark
- {\dosingleempty\dobookmark}
+ {\dosingleempty\strc_bookmarks_bookmark}
-\def\dobookmark[#1]#2%
+\def\strc_bookmarks_bookmark
+ {\iflocation
+ \expandafter\strc_bookmarks_bookmark_yes
+ \else
+ \expandafter\strc_bookmarks_bookmark_nop
+ \fi}
+
+\def\strc_bookmarks_bookmark_yes[#1]#2%
{\begingroup
\simplifycommands
\ctxcommand{overloadbookmark("#1",\!!bs\detokenize\expandafter{\normalexpanded{#2}}\!!es)}%
\endgroup}
-%D Placement \unknown\ look how simple compared to \MKII:
-
-\newtoks \everysetupbookmarks
-
-\def\bookmarkparameter#1{\ifcsname\??bm#1\endcsname\csname\??bm#1\endcsname\fi}
-
-\unexpanded\def\setupbookmarks[#1]%
- {\getparameters[\??bm][#1]%
- \the\everysetupbookmarks}
+\def\strc_bookmarks_bookmark_nop[#1]#2%
+ {}
\unexpanded\def\placebookmarks
- {\dotripleempty\doplacebookmarks}
-
-% Use force to get titles in the bookmarklist. This is somewhat tricky as one
-% does not want "Contents" in a table of contents but it has to be in the
-% bookmark list.
+ {\dotripleempty\strc_bookmarks_place}
-\def\doplacebookmarks[#1][#2][#3]%
+\def\strc_bookmarks_place
{\iflocation
- \begingroup
- \edef\askednames{#1}%
- \edef\askedopened{#2}%
- \ifx\askednames\empty
- \edef\askednames{\getvalue{\??ih\v!content\c!list}}%
- \fi
- \ifx\askednames\empty
- \let\askednames\v!all
- \fi
- \ifthirdargument
- \getparameters[\??bm][#3]%
- \else\ifsecondargument
- \doifassignmentelse{#2}{\let\askedopened\empty\getparameters[\??bm][#2]}\donothing
- \fi\fi
- \ctxcommand{registerbookmark {
- names = "\askednames",
- opened = "\askedopened",
- force = "\bookmarkparameter\c!force",
- number = "\bookmarkparameter\c!number",
- }}%
- \endgroup
+ \expandafter\strc_bookmarks_place_yes
+ \else
+ \expandafter\gobblethreeoptionals
\fi}
-\setupbookmarks
- [\c!force=\v!no, % it's easier to force that to inhibit
- \c!number=\v!yes] % might become v!no
+\let\m_bookmarks_names \empty
+\let\m_bookmarks_opened\empty
+
+\def\strc_bookmarks_place_yes[#1][#2][#3]%
+ {\begingroup
+ \edef\m_bookmarks_names{#1}%
+ \edef\m_bookmarks_opened{#2}%
+ \ifx\m_bookmarks_names\empty
+ \edef\m_bookmarks_names{\namedlistparameter\v!content\c!list}%
+ \fi
+ \ifx\m_bookmarks_names\empty
+ \let\m_bookmarks_names\v!all
+ \fi
+ \ifthirdargument
+ \setupcurrentbookmark[#3]% no every so not all possible
+ \else\ifsecondargument
+ \doifassignmentelse{#2}{\let\m_bookmarks_opened\empty\setupcurrentbookmark[#2]}\donothing
+ \fi\fi
+ \ctxcommand{registerbookmark {
+ names = "\m_bookmarks_names",
+ opened = "\m_bookmarks_opened",
+ force = "\bookmarkparameter\c!force",
+ number = "\bookmarkparameter\c!number",
+ }}%
+ \endgroup}
\appendtoks
\ctxcommand{setupbookmarks {
@@ -107,7 +123,7 @@
stopper = \!!bs\bookmarkparameter\c!numberstopper\!!es,
segments = "\bookmarkparameter\c!numbersegments",
}}%
-\to \everysetupbookmarks
+\to \everysetupbookmark
\protect \endinput
diff --git a/Master/texmf-dist/tex/context/base/strc-blk.lua b/Master/texmf-dist/tex/context/base/strc-blk.lua
index d2f25dfb34f..791f8f99b4c 100644
--- a/Master/texmf-dist/tex/context/base/strc-blk.lua
+++ b/Master/texmf-dist/tex/context/base/strc-blk.lua
@@ -9,10 +9,9 @@ if not modules then modules = { } end modules ['strc-blk'] = {
-- this one runs on top of buffers and structure
local type = type
-local gmatch, find = string.gmatch, string.find
-local lpegmatch = lpeg.match
+local find, format, validstring = string.find, string.format, string.valid
local settings_to_set, settings_to_array = utilities.parsers.settings_to_set, utilities.parsers.settings_to_array
-local allocate, mark = utilities.storage.allocate, utilities.storage.mark
+local allocate = utilities.storage.allocate
local structures, context = structures, context
@@ -45,7 +44,7 @@ function blocks.print(name,data,hide)
else
context.dostartnormalblock(name)
end
- context.viafile(data)
+ context.viafile(data,format("block.%s",validstring(name,"noname")))
if hide then
context.dostophiddenblock()
else
diff --git a/Master/texmf-dist/tex/context/base/strc-blk.mkiv b/Master/texmf-dist/tex/context/base/strc-blk.mkiv
index 3bd5d04ed20..1dd144aa994 100644
--- a/Master/texmf-dist/tex/context/base/strc-blk.mkiv
+++ b/Master/texmf-dist/tex/context/base/strc-blk.mkiv
@@ -22,92 +22,78 @@
% todo: prefix numbers (needs further integration elsewhere)
% check functionality
% alternative files (needs further integration elsewhere)
+%
+% order matters: \c!before (think of: \c!before=\startitemize)
+%
+% no \endgroups
-\def\namedblockparameter#1#2{\ifcsname\??tb#1#2\endcsname\csname\??tb#1#2\endcsname\fi}
-
-\unexpanded\def\setupblockparameters{\dodoubleargument \dosetupblock} % fast one (for compatibility)
-\unexpanded\def\setupblock {\dodoubleargumentwithset\dosetupblock} % handles set
-
-\def\dosetupblock[#1]{\getparameters[\??tb#1]} % [#1][#2]}
+\installcorenamespace {block}
+\installcorenamespace {blocktemp}
-\unexpanded\def\defineblock[#1]%
- {\processcommalist[#1]\dodefineblock}
+\installcommandhandler \??block {block} \??block
-\def\dodefineblock#1%
- {\getparameters
- [\??tb#1]
- [\c!before=\blank,
- \c!after=\blank,
- \c!inner=,
- \c!style=,
- \c!file=]% todo
- \ctxcommand{definestructureblock("#1")}%
- \setuvalue{\e!begin#1}{\dodoubleempty\dobeginofblock[#1]}%
- \letvalue{\e!end#1}\relax}
+\appendtoks
+ \ctxcommand{definestructureblock("\currentblock")}%
+ \setuevalue{\e!begin\currentblock}{\dodoubleempty\strc_blocks_begin[\currentblock]}%
+ \setuevalue{\e!end \currentblock}{}%
+\to \everydefineblock
-\def\dobeginofblock[#1][#2]%
+\unexpanded\def\strc_blocks_begin[#1][#2]%
{\normalexpanded{\buff_pickup{@block@}{\e!begin#1}{\e!end#1}}
{}% before
{\ctxcommand{savestructureblock("#1","#2","@block@")}}}% after
-\def\dostarthiddenblock
+\let\strc_blocks_setup\relax
+
+\unexpanded\def\dostarthiddenblock % called at lua end
{\startnointerference
\dostartnormalblock}
-\def\dostophiddenblock
+\unexpanded\def\dostophiddenblock % called at lua end
{\dostopnormalblock
\stopnointerference}
-% order matters: \c!before (think of: \c!before=\startitemize)
-
-% no \endgroups
-
-\let\doblocksetups\gobbleoneargument
-
-\def\dostartnormalblock#1% name
+\unexpanded\def\dostartnormalblock#1% called at lua end
{\bgroup
- \visibletrue
+ \visibletrue % will change
\edef\currentblock{#1}%
- \doblocksetups\currentblock
- \let\doblocksetups\gobbleoneargument
- \namedblockparameter\currentblock\c!before
- \dousestylehashparameter{\??tb\currentblock}\c!style
- \dousecolorhashparameter{\??tb\currentblock}\c!color
- \namedblockparameter\currentblock\c!inner
+ \strc_blocks_setup
+ \let\strc_blocks_setup\relax
+ \blockparameter\c!before
+ \useblockstyleandcolor\c!style\c!color % maybe moev one line up (font spacing)
+ \blockparameter\c!inner % better \c!setups
\ignorespaces}
-\def\dostopnormalblock
+\unexpanded\def\dostopnormalblock % called at lua end
{\removeunwantedspaces
- \namedblockparameter\currentblock\c!after
+ \blockparameter\c!after
\par % todo: alternative = text, paragraph
\egroup}
-\def\dosetblockstate[#1][#2][#3]% state name tag
+\def\strc_blocks_set_state[#1][#2][#3]% state name tag
{\ctxcommand{setstructureblockstate("#1","#2","#3")}}
-\def\doselectblocks[#1][#2][#3][#4]% state name tag setups
+\def\strc_blocks_select[#1][#2][#3][#4]% state name tag setups
{\bgroup
\doifassignmentelse{#3}
- {\getparameters[\??tb\??tb][\c!criterium=\v!text,#3]%
- \def\doblocksetups##1{\getparameters[\??tb##1][#3]}%
- \ctxcommand{selectstructureblock("#1","#2","","\@@tb@@tbcriterium")}}
- {\getparameters[\??tb\??tb][\c!criterium=\v!text,#4]%
- \def\doblocksetups##1{\getparameters[\??tb##1][#4]}%
- \ctxcommand{selectstructureblock("#1","#2","#3","\@@tb@@tbcriterium")}}%
+ {\getparameters[\??blocktemp][\c!criterium=\v!text,#3]%
+ \def\strc_blocks_setup{\setupcurrentblock[#3]}%
+ \ctxcommand{selectstructureblock("#1","#2","","\csname\??blocktemp\c!criterium\endcsname")}}
+ {\getparameters[\??blocktemp][\c!criterium=\v!text,#4]%
+ \def\strc_blocks_setup{\setupcurrentblock[#4]}%
+ \ctxcommand{selectstructureblock("#1","#2","#3","\csname\??blocktemp\c!criterium\endcsname")}}%
\egroup}
-% hide: save, if [+] also hidden execute
-% keep: save and normal execute
-
-\def\hideblocks{\dotripleempty\dosetblockstate[hide]}
-\def\keepblocks{\dotripleempty\dosetblockstate[keep]}
-
+% hide : save, if [+] also hidden execute
+% keep : save and normal execute
% use : normal execute unless [-]
% process: hidden execute unless [-]
% select : idem use
-\def\useblocks {\doquadrupleempty\doselectblocks[use]}
-\def\processblocks{\doquadrupleempty\doselectblocks[process]}
-\def\selectblocks {\doquadrupleempty\doselectblocks[use]}
+\unexpanded\def\hideblocks {\dotripleempty \strc_blocks_set_state[hide]}
+\unexpanded\def\keepblocks {\dotripleempty \strc_blocks_set_state[keep]}
+\unexpanded\def\useblocks {\doquadrupleempty\strc_blocks_select [use]}
+\unexpanded\def\processblocks{\doquadrupleempty\strc_blocks_select [process]}
+\unexpanded\def\selectblocks {\doquadrupleempty\strc_blocks_select [use]}
\protect \endinput
diff --git a/Master/texmf-dist/tex/context/base/strc-con.mkvi b/Master/texmf-dist/tex/context/base/strc-con.mkvi
index 956acaae506..0f5b202b9c8 100644
--- a/Master/texmf-dist/tex/context/base/strc-con.mkvi
+++ b/Master/texmf-dist/tex/context/base/strc-con.mkvi
@@ -15,6 +15,8 @@
\registerctxluafile{strc-con}{1.001}
+% todo: check why \copy .. probably mkii leftover
+%
% todo: check if commands similar to lists
% todo: \strc_constructions_setup_counter\strc_constructions_setup_counter{#1} in setup ... synchronization
%
@@ -27,17 +29,16 @@
%
% todo: \currentconstructionattribute : can be a counter instead
-
-%D Constructions are just descriptions but a bit more abstract so that
-%D we can conveniently build upon them. They are not really meant for
-%D users but module writers might find them useful. We mention a
-%D couple of parameters but these are only mentioned because in the
-%D shared code we map all resolvers to constructions.
+%D Constructions are just descriptions but a bit more abstract so that we can
+%D conveniently build upon them. They are not really meant for users but module
+%D writers might find them useful. We mention a couple of parameters but these are
+%D only mentioned because in the shared code we map all resolvers to constructions.
+%D
+%D On top of constructions we build descriptions, enumerations, notes, labels and
+%D maybe some day more.
\unprotect
-\ifdefined\v!construction \else \def\v!construction{construction} \fi
-
%D Todo:
\installcorenamespace{construction}
@@ -70,9 +71,9 @@
%
\c!alternative=\v!left,
\c!display=\v!yes,
- \c!width=7em,
- \c!distance=1em,
- \c!titledistance=0.5em,
+ \c!width=7\emwidth,
+ \c!distance=\emwidth,
+ \c!titledistance=.5\emwidth,
%c!hang=,
%c!sample=,
\c!margin=\v!no,
@@ -137,9 +138,9 @@
{\csname\??constructionstophandler\currentconstructionhandler\endcsname
\endgroup}
-%D As we will build on top of the generic construction mechanism we
-%D have handlers as a sort of plug in mechanism. In order to have
-%D some consistency we share some namespaces.
+%D As we will build on top of the generic construction mechanism we have handlers
+%D as a sort of plug in mechanism. In order to have some consistency we share
+%D some namespaces.
\installcorenamespace{constructioninitializer}
\installcorenamespace{constructionfinalizer}
@@ -154,7 +155,7 @@
\unexpanded\def\strc_constructions_initialize#1% class instance
{\edef\currentconstruction{#1}%
-\let\currentconstructionlistentry\!!zerocount
+ \let\currentconstructionlistentry\!!zerocount
\expandafter\let\expandafter\currentconstructionmain \csname\??constructionmain \currentconstruction\endcsname
\expandafter\let\expandafter\currentconstructionlevel \csname\??constructionlevel\currentconstruction\endcsname
\expandafter\let\expandafter\currentconstructionhandler\csname\??constructionclass\currentconstruction\endcsname
@@ -167,16 +168,16 @@
\newconditional\c_strc_constructions_number_state
\newconditional\c_strc_constructions_title_state
-\setvalue{\??constructioninitializer\v!construction}%
+\unexpanded\setvalue{\??constructioninitializer\v!construction}%
{\setfalse\c_strc_constructions_number_state
\setfalse\c_strc_constructions_title_state}
-\setvalue{\??constructionfinalizer\v!construction}%
+\unexpanded\setvalue{\??constructionfinalizer\v!construction}%
{}
-% We keep the command variant around but rather would move to the
-% start-stop one. Also, passing the title as argument has some
-% history so we need to keep that as well.
+%D We keep the command variant around but rather would move to the start-stop one.
+%D Also, passing the title as argument has some history so we need to keep that as
+%D well.
\ifdefined\dotagsetconstruction \else \let\dotagsetconstruction\relax \fi
@@ -190,10 +191,10 @@
\unexpanded\def\currentconstructionsample
{\begstrut
\constructionparameter\c!text
- \constructionparameter\c!sample
+ \p_strc_constructions_sample
\endstrut}
-\setvalue{\??constructionmainhandler\v!construction}#1%
+\unexpanded\setvalue{\??constructionmainhandler\v!construction}#1%
{\iftrialtypesetting \else
\begingroup
\currentconstructionsynchronize % reinstated
@@ -202,7 +203,7 @@
\endgroup
\fi#1}
-\setvalue{\??constructiontexthandler\v!construction}%
+\unexpanded\setvalue{\??constructiontexthandler\v!construction}%
{\begingroup
\useconstructionstyleandcolor\c!headstyle\c!headcolor % move to \currentconstructiontext
\the\everyconstruction
@@ -224,36 +225,72 @@
\def\resetconstructions % to be used in e.g. footnotes
{\c_strc_constructions_nested_state\zerocount}
-\setvalue{\??constructioncommandhandler\v!construction}%
+\unexpanded\setvalue{\??constructioncommandhandler\v!construction}%
{\endgroup}
-\setvalue{\??constructionstarthandler\v!construction}% this will be redone (reorganized) .. too much boxing
+\let\p_strc_constructions_sample \empty
+\let\p_strc_constructions_align \empty
+\let\p_strc_constructions_indenting \empty
+\let\p_strc_constructions_width \empty
+\let\p_strc_constructions_aligntitle \empty
+\let\p_strc_constructions_renderingsetup\empty
+\let\p_strc_constructions_headalign \empty
+\let\p_strc_constructions_hang \empty
+\let\p_strc_constructions_inbetween \empty
+\let\p_strc_constructions_closesymbol \empty
+\let\p_strc_constructions_distance \empty
+
+\newconditional\c_strc_constructions_distance_none
+
+\unexpanded\setvalue{\??constructionstarthandler\v!construction}% this will be redone (reorganized) .. too much boxing
{\dostarttagged\t!construction\currentconstruction
\dotagsetconstruction
\constructionparameter\c!before
\begingroup
\edef\currentconstructionalternative{\constructionparameter\c!alternative}%
- % \checkconstructionalternativeparent % this catches an unknown alternative
- \doifnotsetups{\constructionalternativeparameter\c!renderingsetup}
- {\letconstructionparameter\c!alternative\v!left
- \let\currentconstructionalternative\v!left}%
+ \edef\p_strc_constructions_renderingsetup{\constructionalternativeparameter\c!renderingsetup}%
+ % catch fuzzyness
+ \ifx\p_strc_constructions_renderingsetup\empty
+ \letconstructionparameter\c!alternative\v!left
+ \let\currentconstructionalternative\v!left
+ \edef\p_strc_constructions_renderingsetup{\constructionalternativeparameter\c!renderingsetup}%
+ \else
+ \doifnotsetups\p_strc_constructions_renderingsetup
+ {\letconstructionparameter\c!alternative\v!left
+ \let\currentconstructionalternative\v!left
+ \edef\p_strc_constructions_renderingsetup{\constructionalternativeparameter\c!renderingsetup}}%
+ \fi
+ %
\doadaptleftskip{\constructionparameter\c!margin}%
\setlocalhsize % so we can use \localhsize in width assignments
- \constructionsheaddistance\constructionalternativeparameter\c!distance\relax
- \ifdim\constructionsheaddistance=\zeropoint
- \doif{\constructionalternativeparameter\c!width}\v!broad{\constructionsheaddistance\emwidth}%
+ \edef\p_strc_constructions_distance{\constructionalternativeparameter\c!distance}%
+ \edef\p_strc_constructions_width{\constructionalternativeparameter\c!width}%
+ \ifx\p_strc_constructions_distance\v!none
+ \settrue\c_strc_constructions_distance_none
+ \constructionsheaddistance\zeropoint
+ \else
+ \setfalse\c_strc_constructions_distance_none
+ \constructionsheaddistance\p_strc_constructions_distance\relax
+ \ifdim\constructionsheaddistance=\zeropoint
+ \ifx\p_strc_constructions_width\v!broad
+ \constructionsheaddistance\emwidth
+ \fi
+ \fi
\fi
% inefficient and not always needed, for instance not with margins so we will make checkers
- % per alternative
+ % per alternative some day (especially in labels this is unwanted overhead)
\setbox\constructionheadbox\hbox
{\forgetall
\dontcomplain
\settrialtypesetting
- \doifelsenothing{\constructionparameter\c!sample}
- {\csname\??constructionmainhandler\currentconstructionhandler\endcsname\currentconstructiontext}%
- {\csname\??constructionmainhandler\currentconstructionhandler\endcsname\currentconstructionsample}}%
+ \edef\p_strc_constructions_sample{\constructionparameter\c!sample}%
+ \ifx\p_strc_constructions_sample\empty
+ \csname\??constructionmainhandler\currentconstructionhandler\endcsname\currentconstructiontext
+ \else
+ \csname\??constructionmainhandler\currentconstructionhandler\endcsname\currentconstructionsample
+ \fi}%
\assignwidth
- {\constructionalternativeparameter\c!width}
+ \p_strc_constructions_width
\constructionsheadwidth
{\unhcopy\constructionheadbox}
\constructionsheaddistance
@@ -266,15 +303,24 @@
\else
\csname\??constructionmainhandler\currentconstructionhandler\endcsname{\boxedconstructionhead\currentconstructiontext}%
\fi}%
- \doifelse{\constructionparameter\c!aligntitle}\v!no
- {\leftconstructionskip \leftskip
- \rightconstructionskip\rightskip}
- {\ifcase\c_strc_constructions_nested_state
- \leftconstructionskip \leftskip
- \rightconstructionskip\rightskip
- \fi}%
- \doifsomething{\constructionparameter\c!align}{\setupalign[\constructionparameter\c!align]}% \use...
- \indenting[\constructionparameter\c!indenting]% \use...
+ \edef\p_strc_constructions_aligntitle{\constructionparameter\c!aligntitle}%
+ \ifx\p_strc_constructions_aligntitle\v!no
+ \leftconstructionskip \leftskip
+ \rightconstructionskip\rightskip
+ \else
+ \ifcase\c_strc_constructions_nested_state
+ \leftconstructionskip \leftskip
+ \rightconstructionskip\rightskip
+ \fi
+ \fi
+ \edef\p_strc_constructions_align{\constructionparameter\c!align}%
+ \ifx\p_strc_constructions_align\empty \else
+ \setupalign[\p_strc_constructions_align]% \use...
+ \fi
+ \edef\p_strc_constructions_indenting{\constructionparameter\c!indenting}%
+ \ifx\p_strc_constructions_indenting\empty \else
+ \indenting[\p_strc_constructions_indenting]% \use...
+ \fi
\ifcase\c_strc_constructions_nested_state
\c_strc_constructions_nested_state\plusone
\or
@@ -282,15 +328,15 @@
\fi
\strc_constructions_close_symbol_reset
%
- % \doifnotsetups {\constructionalternativeparameter\c!renderingsetup}
- % {\let\currentconstructionalternative\v!left}%
- \directsetup{\constructionalternativeparameter\c!renderingsetup}\relax
+ \edef\p_strc_constructions_headalign{\constructionparameter\c!headalign}%
+ %
+ \directsetup\p_strc_constructions_renderingsetup\relax
%
\dostoptagged % tag
\dostarttagged\t!constructioncontent\empty
\ignorespaces} % args not needed
-\setvalue{\??constructionstophandler\v!construction}%
+\unexpanded\setvalue{\??constructionstophandler\v!construction}%
{\strc_constructions_close_symbol_place
\doifnot{\constructionparameter\c!display}\v!no\par
\dostoptagged % content
@@ -302,16 +348,21 @@
% \endgroup
\dorechecknextindentation}
-\def\boxedconstructionhead#1%
+\unexpanded\def\boxedconstructionhead#1%
{\vtop
{\hsize\constructionsheadwidth
- \doifsomething{\constructionparameter\c!headalign}{\setupalign[\constructionparameter\c!headalign]}%
+ \edef\p_strc_constructions_headalign{\constructionparameter\c!headalign}%
+ \ifx\p_strc_constructions_headalign\empty \else
+ \setupalign[\p_strc_constructions_headalign]% \use...
+ \fi
#1}}
\installcorenamespace{constructionalternative}
\installcorenamespace{constructionrenderings}
% see lists: alternativemethods ... todo
+%
+% there will be a proper interface to variables
\installcommandhandler \??constructionalternative {constructionalternative} \??constructionalternative
@@ -391,16 +442,17 @@
{\setbox\constructionheadbox\vtop % \vbox gaat fout in hang
{\forgetall
\dontcomplain
- \hsize\constructionsheadwidth
- \edef\p_headalign{\constructionparameter\c!headalign}%
- \ifx\p_headalign\empty
+ \hsize\constructionsheadwidth\relax
+ \ifx\p_strc_constructions_headalign\empty
\setupalign[#1]% use fast one
\else
- \setupalign[\p_headalign]% use fast one
+ \setupalign[\p_strc_constructions_headalign]% use fast one
+ \fi
+ \ifconditional\c_strc_constructions_distance_none \else
+ \advance\ifx#1\v!flushleft\rightskip\else\leftskip\fi\constructionsheaddistance % only difference and wrong anyway
\fi
- \advance\ifx#1\v!flushleft\rightskip\else\leftskip\fi\constructionsheaddistance % only difference and wrong anyway
\ifhbox\constructionheadbox\unhcopy\else\copy\fi\constructionheadbox}%
-\setbox\constructionheadbox\hbox{\box\constructionheadbox}% needed in case of e.g. a real big head font, see descriptions-006.tex
+ \setbox\constructionheadbox\hbox{\box\constructionheadbox}% needed in case of e.g. a real big head font, see descriptions-006.tex
\ht\constructionheadbox\strutht
\dp\constructionheadbox\strutdp}
@@ -408,12 +460,11 @@
{\setbox\constructionheadbox\vtop
{\forgetall
\dontcomplain
- \hsize\constructionsheadwidth
- \edef\p_headalign{\constructionparameter\c!headalign}%
- \ifx\p_headalign\empty
+ \hsize\constructionsheadwidth\relax
+ \ifx\p_strc_constructions_headalign\empty
\setupalign[#1]% use fast one
\else
- \setupalign[\p_headalign]% use fast one
+ \setupalign[\p_strc_constructions_headalign]% use fast one
\fi
\ifhbox\constructionheadbox\unhcopy\else\copy\fi\constructionheadbox}%
\setbox\constructionheadbox\hbox{\box\constructionheadbox}% needed in case of e.g. a real big head font, see descriptions-006.tex
@@ -421,18 +472,18 @@
\dp\constructionheadbox\strutdp}
\def\strc_constructions_set_hang#1%
- {\edef\p_hang{\constructionparameter\c!hang}%
- \ifx\p_hang\empty
- \else\ifx\p_hang\v!fit
+ {\ifx\p_strc_constructions_hang\empty
+ % nothing
+ \else\ifx\p_strc_constructions_hang\v!fit
\scratchdimen\htdp\constructionheadbox
\getnoflines\scratchdimen
\hangafter-\noflines
- \else\ifx\p_hang\v!broad
+ \else\ifx\p_strc_constructions_hang\v!broad
\scratchdimen\dimexpr\htdp\constructionheadbox+.5\strutht\relax
\getnoflines\scratchdimen
\hangafter-\noflines
\else
- \hangafter-\p_hang
+ \hangafter-\p_strc_constructions_hang
\fi\fi\fi
\relax
\hangindent\ifx#1\v!right-\fi\constructionsheadwidth}
@@ -441,18 +492,18 @@
% environment. The wrapping happens elsewhere.
\startsetups[\??constructionrenderings:\v!left]
- \edef\p_hang{\constructionparameter\c!hang}
- \doifsetupselse{\??constructionrenderings:\v!left:\p_hang} {
- \directsetup{\??constructionrenderings:\v!left:\p_hang}
+ \edef\p_strc_constructions_hang{\constructionparameter\c!hang}%
+ \doifsetupselse{\??constructionrenderings:\v!left:\p_strc_constructions_hang} {
+ \directsetup{\??constructionrenderings:\v!left:\p_strc_constructions_hang}
} {
\directsetup{\??constructionrenderings:\v!left:\v!hanging}
}
\stopsetups
\startsetups[\??constructionrenderings:\v!right]
- \edef\p_hang{\constructionparameter\c!hang}
- \doifsetupselse{\??constructionrenderings:\v!right:\p_hang} {
- \directsetup{\??constructionrenderings:\v!right:\p_hang}
+ \edef\p_strc_constructions_hang{\constructionparameter\c!hang}
+ \doifsetupselse{\??constructionrenderings:\v!right:\p_strc_constructions_hang} {
+ \directsetup{\??constructionrenderings:\v!right:\p_strc_constructions_hang}
} {
\directsetup{\??constructionrenderings:\v!right:\v!hanging}
}
@@ -462,9 +513,8 @@
\let\\=\crlf
\noindent
\strc_constructions_set_pure_box\v!flushleft
- \leftskip\dimexpr\leftconstructionskip+\constructionsheadwidth\relax
- \rightskip\rightconstructionskip
- \advance\leftskip\constructionsheaddistance
+ \leftskip\dimexpr\leftconstructionskip+\constructionsheadwidth+\constructionsheaddistance\relax
+ \rightskip\rightconstructionskip\relax
\llap {
\hbox to \leftskip {
\hskip\leftconstructionskip
@@ -573,13 +623,18 @@
\stopsetups
\startsetups[\??constructionrenderings:\v!top]
- \dohandlepagebreakX\plusone
+ \strc_sectioning_prevent_page_break\plusone
\let\\=\space
\noindent
- \copy\constructionheadbox\par % copy ?
- \nobreak
- \doifelsenothing{\constructionparameter\c!inbetween}{\blank}{\constructionparameter\c!inbetween}%
+ \copy\constructionheadbox % why copy ?
+ \par
\nobreak
+ \edef\p_strc_constructions_inbetween{\constructionparameter\c!inbetween}%
+ \ifx\p_strc_constructions_inbetween\empty \else
+ \p_strc_constructions_inbetween
+ \par
+ \nobreak
+ \fi
\useconstructionstyleandcolor\c!style\c!color
\ignorespaces
\stopsetups
@@ -639,9 +694,9 @@
\stopsetups
\startsetups[\??constructionrenderings:\v!serried]
- \edef\p_width{\constructionparameter\c!width}% CHECK ! ! ! wrong parameter namespace
- \doifsetupselse{\??constructionrenderings:\v!serried:\p_width} {
- \directsetup{\??constructionrenderings:\v!serried:\p_width}
+ \edef\p_strc_constructions_width{\constructionparameter\c!width}% CHECK ! ! ! wrong parameter namespace
+ \doifsetupselse{\??constructionrenderings:\v!serried:\p_strc_constructions_width} {
+ \directsetup{\??constructionrenderings:\v!serried:\p_strc_constructions_width}
} {
\directsetup{\??constructionrenderings:\v!serried:\v!wide}
}
@@ -660,7 +715,9 @@
\let\\=\crlf
\noindent
\ifhbox\constructionheadbox\unhcopy\else\copy\fi\constructionheadbox
- \hskip\constructionsheaddistance \!!plus .5\constructionsheaddistance \!!minus .25\constructionsheaddistance\relax
+ \ifconditional\c_strc_constructions_distance_none \else
+ \hskip\constructionsheaddistance \!!plus .5\constructionsheaddistance \!!minus .25\constructionsheaddistance\relax
+ \fi
\useconstructionstyleandcolor\c!style\c!color
\ignorespaces
\stopsetups
@@ -687,7 +744,7 @@
\noindent
\advance\leftskip-\leftskipadaption\relax
\ifdim\leftskipadaption=\zeropoint
- \leftskipadaption1.5em\relax % just some default
+ \leftskipadaption1.5\emwidth\relax % just some default
\ifnum\c_strc_constructions_nested_state=\plusone
\ifdim\leftskip>\zeropoint \relax
\leftskipadaption\leftskip
@@ -698,8 +755,15 @@
\advance\leftskip\leftskipadaption % but we're already further on
\fi
\hskip-\leftskipadaption\relax
- \ifhbox\constructionheadbox\unhcopy\else\copy\fi\constructionheadbox
- \kern\ifdim\constructionsheaddistance=\zeropoint .75em\else\constructionsheaddistance\fi
+ \ifhbox\constructionheadbox \unhbox\else\box\fi\constructionheadbox
+ \ifconditional\c_strc_constructions_distance_none \else
+ % we used to have just a kern, but long lines look bad then
+ \scratchdistance\ifdim\constructionsheaddistance=\zeropoint .75\emwidth\else\constructionsheaddistance\fi
+ \hskip \scratchdistance
+ \s!plus .25\scratchdistance
+ \s!minus.25\scratchdistance
+ \fi
+ \allowbreak % new
\useconstructionstyleandcolor\c!style\c!color
\ignorespaces
\stopsetups
@@ -731,9 +795,9 @@
\unexpanded\def\strc_constructions_close_symbol_place
{\ifconditional\csname\??constructionclosesymbol\currentconstruction\endcsname
\global\expandafter\setfalse\csname\??constructionclosesymbol\currentconstruction\endcsname
- \edef\p_closesymbol{\constructionparameter\c!closesymbol}%
- \ifx\p_closesymbol\empty \else
- \constructionparameter\c!closecommand{\p_closesymbol}% hm why {} maybe because box expected
+ \edef\p_strc_constructions_closesymbol{\constructionparameter\c!closesymbol}%
+ \ifx\p_strc_constructions_closesymbol\empty \else
+ \constructionparameter\c!closecommand{\p_strc_constructions_closesymbol}% hm why {} maybe because box expected
\fi
\fi}
diff --git a/Master/texmf-dist/tex/context/base/strc-def.mkiv b/Master/texmf-dist/tex/context/base/strc-def.mkiv
index 7917b7d83c0..0738bdf29d8 100644
--- a/Master/texmf-dist/tex/context/base/strc-def.mkiv
+++ b/Master/texmf-dist/tex/context/base/strc-def.mkiv
@@ -16,59 +16,6 @@
\unprotect
-\setupheads[%
- %\c!after=,
- %\c!align=,
- %\c!aligntitle=,
- \c!alternative=\v!normal,
- %\c!before=,
- %\c!color=,
- \c!command=\normalplacehead,
- \c!continue=\v!yes,
- %\c!coupling=,
- %\c!deepnumbercommand=,
- %\c!deeptextcommand=,
- %\c!default=,
- \c!distance=\!!zeropoint,
- \c!expansion=\v!no,
- %\c!file=,
- %\c!footer=,
- %\c!grid=,
- \c!hang=\v!none,
- %\c!header=,
- \c!incrementnumber=\v!yes,
- \c!indentnext=\v!no,
- %\c!label=,
- %\c!limittext=\languageparameter\c!limittext,
- \c!margin=\zeropoint,
- %\c!margintext=,
- \c!number=\v!yes,
- \c!numbercolor=\headparameter\c!color,
- \c!textcolor=\headparameter\c!color,
- \c!numberstyle=\headparameter\c!style,
- \c!textstyle=\headparameter\c!style,
- %\c!numbercommand=,
- %\c!textcommand=,
- \c!ownnumber=\v!no,
- %\c!page=,
- \c!placehead=\v!yes,
- \c!sectionconversionset=\s!default,
- \c!sectionnumber=\v!yes,
- %\c!sectionsegments=,
- \c!sectionseparatorset=\s!default,
- \c!sectionset=\v!all,
- \c!interlinespace=,
- %\c!sectionstopper=,
- %\c!sectionstarter=,
- %\c!strut=,
- %\c!style=,
- %\c!text=,
- %\c!tolerance=,
- %\c!beforesection=\directsetup{document:\currenthead:start}, % these might become defaults i.e. acpect document: namespace
- %\c!insidesection=\directsetup{document:\currenthead:inside}, % these might become defaults i.e. acpect document: namespace
- %\c!aftersection=\directsetup{document:\currenthead:stop}, % these might become defaults i.e. acpect document: namespace
- ]
-
% \setupsectionblock[appendix][sectionconversionset=appendix]
% \setuphead[chapter][sectionconversionset=\sectionblockparameter\c!sectionconversionset] % \sectionblockparameter]
@@ -102,7 +49,7 @@
% \startsetups defaults:frontpart:pagenumbers:roman
% \defineconversionset[\c!frontpart:\c!pagenumber][][romannumerals]
-% \setupuserpagenumber[\c!way=\v!byblock]
+% \setupuserpagenumber[\c!way=\v!by\v!block]
% \stopsetups
% \definesectionblock
@@ -233,7 +180,7 @@
\c!marking=\v!page,
\c!header=,
\c!style=\tfc,
- \c!distance=.75em,
+ \c!distance=.75\emwidth,
\c!before={\blank[2*\v!big]},
\c!after={\blank[2*\v!big]}]
@@ -242,7 +189,7 @@
[%\c!align=,
%\c!indentnext=\v!no,
\c!style=\tfa,
- \c!distance=.75em,
+ \c!distance=.75\emwidth,
\c!before={\blank[2*\v!big]},
\c!after=\blank]
diff --git a/Master/texmf-dist/tex/context/base/strc-des.mkii b/Master/texmf-dist/tex/context/base/strc-des.mkii
index 587ef13e73f..e699df9ea1d 100644
--- a/Master/texmf-dist/tex/context/base/strc-des.mkii
+++ b/Master/texmf-dist/tex/context/base/strc-des.mkii
@@ -47,7 +47,7 @@
% Op die manier is meer mogelijk en worden \par's geskipt.
%
% De macro \??dd#1\s!do\c!commando levert de koppeling tussen
-% \doornumberen en \doordefinieren. Deze constructie is nodig
+% \doornummeren en \doordefinieren. Deze constructie is nodig
% omdat doornummeren geen argument heeft en omdat subnummers
% niet worden genest binnen het hogere niveau.
%
diff --git a/Master/texmf-dist/tex/context/base/strc-des.mkvi b/Master/texmf-dist/tex/context/base/strc-des.mkvi
index 29072fbc61d..5635ff0df58 100644
--- a/Master/texmf-dist/tex/context/base/strc-des.mkvi
+++ b/Master/texmf-dist/tex/context/base/strc-des.mkvi
@@ -33,9 +33,9 @@
%\c!color=,
%\c!headcolor=,
%\c!titlecolor=,
- \c!width=7em,
- \c!distance=1em,
- \c!titledistance=0.5em,
+ \c!width=7\emwidth,
+ \c!distance=\emwidth,
+ \c!titledistance=.5\emwidth,
%\c!hang=,
%\c!sample=,
%\c!align=,
@@ -71,15 +71,17 @@
\setuevalue{\e!stop \currentdescription}{\strc_descriptions_stop}%
\to \everydefinedescription
-\setvalue{\??constructioninitializer\v!description}%
+\let\p_strc_constructions_title \empty
+
+\unexpanded\setvalue{\??constructioninitializer\v!description}%
{\let\currentdescription \currentconstruction
\let\constructionparameter \descriptionparameter
\let\detokenizedconstructionparameter\detokenizeddescriptionparameter
\let\letconstructionparameter \letdescriptionparameter
\let\useconstructionstyleandcolor \usedescriptionstyleandcolor
\let\setupcurrentconstruction \setupcurrentdescription
- \edef\p_title{\constructionparameter\c!title}%
- \ifx\p_title\v!yes
+ \edef\p_strc_constructions_title{\constructionparameter\c!title}%
+ \ifx\p_strc_constructions_title\v!yes
\settrue\c_strc_constructions_title_state
\else
\setfalse\c_strc_constructions_title_state
@@ -116,10 +118,6 @@
\expandafter\strc_descriptions_start_yes_normal
\fi}
-% \unexpanded\def\strc_descriptions_start_yes_titled[#1]#2% todo userdata
-% {\strc_constructions_register[\c!label={\descriptionparameter\c!text},\c!reference={#1},\c!title={#2},\c!bookmark=,\c!list=][]%
-% \csname\??constructionstarthandler\currentconstructionhandler\endcsname}
-
\unexpanded\def\strc_descriptions_start_yes_titled[#1]%
{\doifnextbgroupelse
{\strc_descriptions_start_yes_titled_indeed[#1]}%
@@ -141,11 +139,6 @@
\expandafter\strc_descriptions_start_nop_normal
\fi}
-% \unexpanded\def\strc_descriptions_start_nop_titled#1%
-% {\strc_constructions_register[\c!label={\descriptionparameter\c!text},\c!reference=,\c!title={#1},\c!bookmark=,\c!list=][]%
-% \csname\??constructionstarthandler\currentconstructionhandler\endcsname}
-% \csname\??constructionstarthandler\currentconstructionhandler\endcsname}
-
\unexpanded\def\strc_descriptions_start_nop_titled
{\doifnextbgroupelse
{\strc_descriptions_start_nop_titled_indeed}%
@@ -203,10 +196,6 @@
\expandafter\strc_descriptions_nop_normal
\fi}
-% \unexpanded\def\strc_descriptions_nop_titled#1%
-% {\strc_constructions_register[\c!label={\descriptionparameter\c!text},\c!reference=,\c!title={#1},\c!bookmark=,\c!list=][]%
-% \csname\??constructioncommandhandler\currentconstructionhandler\endcsname}
-
\unexpanded\def\strc_descriptions_nop_titled
{\doifnextbgroupelse
{\strc_descriptions_nop_titled_indeed}%
@@ -227,17 +216,36 @@
\expandafter\let\csname\??constructioncommandhandler\v!description\expandafter\endcsname\csname\??constructioncommandhandler\v!construction\endcsname
\expandafter\let\csname\??constructiontexthandler \v!description\expandafter\endcsname\csname\??constructiontexthandler \v!construction\endcsname
-\setvalue{\??constructioncommandhandler\v!description}%
+\unexpanded\setvalue{\??constructioncommandhandler\v!description}%
{\dowithpar
{\csname\??constructionstarthandler\v!construction\endcsname}%
{\csname\??constructionstophandler \v!construction\endcsname\endgroup}}
-\setvalue{\??constructionstarthandler\v!description}%
+\unexpanded\setvalue{\??constructionstarthandler\v!description}%
{\BeforePar{\csname\??constructionstarthandler\v!construction\endcsname}%
\GotoPar}
-\setvalue{\??constructionstophandler\v!description}%
+\unexpanded\setvalue{\??constructionstophandler\v!description}%
{\csname\??constructionstophandler\v!construction\endcsname
\endgroup}
+%D For historic reasons we have (from now on undocumented):
+
+\definedescription
+ [tab]
+ [\c!alternative=\v!left,
+ \c!headstyle=\v!normal,
+ \c!headcolor=,
+ \c!style=\v!normal,
+ \c!color=,
+ \c!width=\v!broad,
+ \c!sample=\hskip4\emwidth,
+ \c!before=,
+ \c!after=]
+
+\unexpanded\def\setuptab
+ {\setupdescription[tab]}
+
+%D Maybe we should remove this definition.
+
\protect \endinput
diff --git a/Master/texmf-dist/tex/context/base/strc-doc.lua b/Master/texmf-dist/tex/context/base/strc-doc.lua
index f10b175a463..a25d1e1b0da 100644
--- a/Master/texmf-dist/tex/context/base/strc-doc.lua
+++ b/Master/texmf-dist/tex/context/base/strc-doc.lua
@@ -15,14 +15,15 @@ if not modules then modules = { } end modules ['strc-doc'] = {
-- in section titles by default a zero aborts, so there we need: sectionset=bagger with \definestructureprefixset [bagger] [section-2,section-4] []
-- in lists however zero's are ignored, so there numbersegments=2:4 gives result
-local next, type = next, type
+local next, type, tonumber, select = next, type, tonumber, select
local format, gsub, find, gmatch, match = string.format, string.gsub, string.find, string.gmatch, string.match
-local concat = table.concat
+local concat, fastcopy = table.concat, table.fastcopy
local max, min = math.max, math.min
local allocate, mark, accesstable = utilities.storage.allocate, utilities.storage.mark, utilities.tables.accesstable
+local setmetatableindex = table.setmetatableindex
local catcodenumbers = catcodes.numbers
-local ctxcatcodes = tex.ctxcatcodes
+local ctxcatcodes = catcodenumbers.ctxcatcodes
local variables = interfaces.variables
local v_last = variables.last
@@ -61,10 +62,10 @@ local a_internal = attributes.private('internal')
-- -- -- document -- -- --
-local data
+local data -- the current state
function documents.initialize()
- data = {
+ data = allocate { -- whole data is marked
numbers = { },
forced = { },
ownnumbers = { },
@@ -92,11 +93,12 @@ documents.initialize()
function documents.preset(numbers)
local nofnumbers = #numbers
- data.numbers = numbers
- data.depth = nofnumbers
- data.ownnumbers = { }
+ local ownnumbers = { }
+ data.numbers = numbers
+ data.ownnumbers = ownnumbers
+ data.depth = nofnumbers
for i=1,nofnumbers do
- data.ownnumbers[i] = ""
+ ownnumbers[i] = ""
end
sections.setnumber(nofnumbers,"-1")
end
@@ -109,12 +111,12 @@ local tobesaved = allocate()
sections.collected = collected
sections.tobesaved = tobesaved
---~ local function initializer()
---~ collected = sections.collected
---~ tobesaved = sections.tobesaved
---~ end
-
---~ job.register('structures.sections.collected', tobesaved, initializer)
+-- local function initializer()
+-- collected = sections.collected
+-- tobesaved = sections.tobesaved
+-- end
+--
+-- job.register('structures.sections.collected', tobesaved, initializer)
sections.registered = sections.registered or allocate()
local registered = sections.registered
@@ -132,40 +134,39 @@ end
function sections.save(sectiondata)
-- local sectionnumber = helpers.simplify(section.sectiondata) -- maybe done earlier
local numberdata = sectiondata.numberdata
+ local ntobesaved = #tobesaved
if not numberdata or sectiondata.metadata.nolist then
- return #tobesaved
+ return ntobesaved
else
- local n = #tobesaved + 1
- tobesaved[n] = numberdata
- if not collected[n] then
- collected[n] = numberdata
+ ntobesaved = ntobesaved + 1
+ tobesaved[ntobesaved] = numberdata
+ if not collected[ntobesaved] then
+ collected[ntobesaved] = numberdata
end
- return n
+ return ntobesaved
end
end
function sections.load()
- setmetatable(collected,nil)
- local l = lists.collected
- for i=1,#l do
- local li = l[i]
- local lm = li.metadata
- if lm and lm.kind == "section" and not lm.nolist then
- local ln = li.numberdata
- if ln then
- collected[#collected+1] = ln
+ setmetatableindex(collected,nil)
+ local lists = lists.collected
+ for i=1,#lists do
+ local list = lists[i]
+ local metadata = list.metadata
+ if metadata and metadata.kind == "section" and not metadata.nolist then
+ local numberdata = list.numberdata
+ if numberdata then
+ collected[#collected+1] = numberdata
end
end
end
- sections.load = nil
+ sections.load = functions.dummy
end
-setmetatable(collected, {
- __index = function(t,i)
- sections.load()
- return t[i] or { }
- end
-})
+table.setmetatableindex(collected, function(t,i)
+ sections.load()
+ return collected[i] or { }
+end)
--
@@ -195,16 +196,10 @@ function sections.getlevel(name)
return levelmap[name] or 0
end
-local byway = "^" .. v_by
-
-function sections.way(way)
- context((gsub(way,byway,"")))
-end
-
function sections.setblock(name)
local block = name or data.block or "unknown" -- can be used to set the default
data.block = block
- context(block)
+ return block
end
function sections.pushblock(name)
@@ -213,7 +208,7 @@ function sections.pushblock(name)
data.blocks[#data.blocks+1] = block
data.block = block
documents.reset()
- context(block)
+ return block
end
function sections.popblock()
@@ -221,7 +216,7 @@ function sections.popblock()
local block = data.blocks[#data.blocks] or data.block
data.block = block
documents.reset()
- context(block)
+ return block
end
function sections.currentblock()
@@ -236,9 +231,12 @@ function sections.getcurrentlevel()
context(data.depth)
end
+local saveset = { } -- experiment, see sections/tricky-001.tex
+
function sections.somelevel(given)
-- old number
local numbers = data.numbers
+
local ownnumbers = data.ownnumbers
local forced = data.forced
local status = data.status
@@ -247,13 +245,14 @@ function sections.somelevel(given)
local mappedlevel = levelmap[givenname]
local newdepth = tonumber(mappedlevel or (olddepth > 0 and olddepth) or 1) -- hm, levelmap only works for section-*
local directives = given.directives
- local resetset = (directives and directives.resetset) or ""
+ local resetset = directives and directives.resetset or ""
-- local resetter = sets.getall("structure:resets",data.block,resetset)
- -- a trick to permits userdata to overload title, ownnumber and reference
+ -- a trick to permit userdata to overload title, ownnumber and reference
-- normally these are passed as argument but nowadays we provide several
-- interfaces (we need this because we want to be compatible)
if trace_detail then
- report_structure("name '%s', mapped level '%s', old depth '%s', new depth '%s', reset set '%s'",givenname,mappedlevel,olddepth,newdepth,resetset)
+ report_structure("name %a, mapped level %a, old depth %a, new depth %a, reset set %a",
+ givenname,mappedlevel,olddepth,newdepth,resetset)
end
local u = given.userdata
if u then
@@ -265,11 +264,14 @@ function sections.somelevel(given)
if u.label and u.label ~= "" then given.titledata.label = u.label ; u.label = nil end
end
-- so far for the trick
+ if saveset then
+ saveset[newdepth] = (resetset ~= "" and resetset) or saveset[newdepth] or ""
+ end
if newdepth > olddepth then
for i=olddepth+1,newdepth do
- local s = tonumber(sets.get("structure:resets",data.block,resetset,i))
+ local s = tonumber(sets.get("structure:resets",data.block,saveset and saveset[i] or resetset,i))
if trace_detail then
- report_structure("new>old (%s>%s), reset set '%s', reset value '%s', current '%s'",olddepth,newdepth,resetset,s or "?",numbers[i] or "?")
+ report_structure("new depth %s, old depth %s, reset set %a, reset value %a, current %a",olddepth,newdepth,resetset,s,numbers[i])
end
if not s or s == 0 then
numbers[i] = numbers[i] or 0
@@ -282,9 +284,9 @@ function sections.somelevel(given)
end
elseif newdepth < olddepth then
for i=olddepth,newdepth+1,-1 do
- local s = tonumber(sets.get("structure:resets",data.block,resetset,i))
+ local s = tonumber(sets.get("structure:resets",data.block,saveset and saveset[i] or resetset,i))
if trace_detail then
- report_structure("new<old (%s<%s), reset set '%s', reset value '%s', current '%s'",olddepth,newdepth,resetset,s or "?",numbers[i] or "?")
+ report_structure("new depth %s, old depth %s, reset set %a, reset value %a, current %a",olddepth,newdepth,resetset,s,numbers[i])
end
if not s or s == 0 then
numbers[i] = numbers[i] or 0
@@ -316,24 +318,12 @@ function sections.somelevel(given)
end
forced[newdepth] = nil
if trace_detail then
- report_structure("old depth '%s', new depth '%s, old n '%s', new n '%s', forced '%s'",olddepth,newdepth,oldn,newn,concat(fd,""))
- end
- elseif newn then
- newn = oldn + 1
- if trace_detail then
- report_structure("old depth '%s', new depth '%s, old n '%s', new n '%s', increment",olddepth,newdepth,oldn,newn)
+ report_structure("old depth %a, new depth %a, old n %a, new n %a, forced %t",olddepth,newdepth,oldn,newn,fd)
end
else
- local s = tonumber(sets.get("structure:resets",data.block,resetset,newdepth))
- if not s then
- newn = oldn or 0
- elseif s == 0 then
- newn = oldn or 0
- else
- newn = s - 1
- end
+ newn = oldn + 1
if trace_detail then
- report_structure("old depth '%s', new depth '%s, old n '%s', new n '%s', reset",olddepth,newdepth,oldn,newn)
+ report_structure("old depth %a, new depth %a, old n %a, new n %a, increment",olddepth,newdepth,oldn,newn)
end
end
numbers[newdepth] = newn
@@ -350,22 +340,28 @@ function sections.somelevel(given)
numberdata = { }
given.numberdata = numberdata
end
+
local n = { }
for i=1,newdepth do
n[i] = numbers[i]
end
numberdata.numbers = n
+-- numberdata.numbers = fastcopy(numbers)
+
if #ownnumbers > 0 then
- numberdata.ownnumbers = table.fastcopy(ownnumbers)
+ numberdata.ownnumbers = fastcopy(ownnumbers)
end
if trace_detail then
- report_structure("name '%s', numbers '%s', own numbers '%s'",givenname,concat(numberdata.numbers, " "),concat(numberdata.ownnumbers, " "))
+ report_structure("name %a, numbers % a, own numbers % a",givenname,numberdata.numbers,numberdata.ownnumbers)
end
local metadata = given.metadata
local references = given.references
- references.tag = references.tag or tags.getid(metadata.kind,metadata.name)
+ local tag = references.tag or tags.getid(metadata.kind,metadata.name)
+ if tag and tag ~= "" and tag ~= "?" then
+ references.tag = tag
+ end
local setcomponent = structures.references.setcomponent
if setcomponent then
@@ -603,9 +599,8 @@ function sections.typesetnumber(entry,kind,...) -- kind='section','number','pref
local set = ""
local segments = ""
local criterium = ""
- local dataset = { ... }
- for d=1,#dataset do
- local data = dataset[d] -- can be multiple parametersets
+ for d=1,select("#",...) do
+ local data = select(d,...) -- can be multiple parametersets
if data then
if separatorset == "" then separatorset = data.separatorset or "" end
if conversionset == "" then conversionset = data.conversionset or "" end
@@ -767,27 +762,31 @@ function sections.findnumber(depth,what) -- needs checking (looks wrong and slow
if what == v_first or what == v_previous then
for i=index,1,-1 do
local s = collected[i]
- local n = s.numbers
- if #n == depth and n[depth] and n[depth] ~= 0 then
- sectiondata = s
- if quit then
+ if s then
+ local n = s.numbers
+ if #n == depth and n[depth] and n[depth] ~= 0 then
+ sectiondata = s
+ if quit then
+ break
+ end
+ elseif #n < depth then
break
end
- elseif #n < depth then
- break
end
end
elseif what == v_last or what == v_next then
for i=index,#collected do
local s = collected[i]
- local n = s.numbers
- if #n == depth and n[depth] and n[depth] ~= 0 then
- sectiondata = s
- if quit then
+ if s then
+ local n = s.numbers
+ if #n == depth and n[depth] and n[depth] ~= 0 then
+ sectiondata = s
+ if quit then
+ break
+ end
+ elseif #n < depth then
break
end
- elseif #n < depth then
- break
end
end
end
@@ -926,3 +925,17 @@ commands.structureautocatcodedget = function(name,catcode) sections.structured
commands.namedstructurevariable = function(depth,name) sections.structuredata(depth,name) end
commands.namedstructureuservariable = function(depth,name) sections.userdata (depth,name) end
+
+--
+
+function commands.setsectionblock (name) context(sections.setblock(name)) end
+function commands.pushsectionblock(name) context(sections.pushblock(name)) end
+function commands.popsectionblock () context(sections.popblock()) end
+
+--
+
+local byway = "^" .. v_by -- ugly but downward compatible
+
+function commands.way(way)
+ context((gsub(way,byway,"")))
+end
diff --git a/Master/texmf-dist/tex/context/base/strc-doc.mkiv b/Master/texmf-dist/tex/context/base/strc-doc.mkiv
index 4e9dec32e57..c8dfae1e422 100644
--- a/Master/texmf-dist/tex/context/base/strc-doc.mkiv
+++ b/Master/texmf-dist/tex/context/base/strc-doc.mkiv
@@ -17,240 +17,10 @@
\unprotect
-\def\c!xmlsetup{xmlsetup}
+%D This will move:
-% We operate in a \type {@@ns} namespace. All data is passed through
-% variables. Of course we can built another interface on top of this
-% that accepts multiple arguments. We might change this approach and
-% remove this layer but it was needed for the transition.
-
-\installcorenamespace{structure}
-
-\installdirectcommandhandler \??structure {structure} % unchecked, so we need to initialize used parameters
-
-% \definesystemvariable{ns}
-% \def\structureparameter#1{\csname\??ns#1\endcsname}
-% \def\detokenizedstructureparameter#1{\detokenize\expandafter\expandafter\expandafter{\csname\??ns#1\endcsname}}
-
-\setupstructure % not a user command so we might need to change the name
- [\c!number=,
- \c!level=,
- \c!name=,
- \c!title=,
- \c!bookmark=,
- \c!marking=,
- \c!list=,
- \c!label=,
- \c!coupling=,
- \c!ownnumber=,
- \c!sectionseparatorset=\s!default,
- \c!sectionconversionset=\s!default,
- \c!sectionstopper=,
- \c!sectionstarter=,
- \c!sectionsegments=,
- \c!sectionresetset=,
- \c!reference=,
- \c!backreference=,
- \c!expansion=\v!no,
- \c!xmlsetup=,
- \s!catcodes=,
- \c!saveinlist=\v!yes,
- \c!command=\showstructuredata]
-
-% maybe flags for list, bm, mark
-
-\def\structurereferenceprefixon {+}
-\def\structurereferenceprefixoff{-}
-
-\def\setstructurereferenceprefix
- {\ifx\currentstructurereferenceprefix\empty
- % nothing
- \else\ifx\currentstructurereferenceprefix\structurereferenceprefixon
- \setupglobalreferenceprefix[\currentstructurereference]%
- \else\ifx\currentstructurereferenceprefix\structurereferenceprefixoff
- \setupglobalreferenceprefix[]%
- \else
- \setupglobalreferenceprefix[\currentstructurereferenceprefix]%
- \fi\fi\fi
- \let\currentstructurereferenceprefix\referenceprefix}
-
-% why xdef ?
-
-\unexpanded\def\structurecomponent
- {\dodoubleempty\dostructurecomponent}
-
-\def\dostructurecomponent[#1][#2]% #1=interfaced-settings, #2=optional user data (not yet supported)
- {\begingroup
- \setupstructure[#1]%
- \xdef\currentstructurename {\structureparameter\c!name}%
- \xdef\currentstructurecoupling {\structureparameter\c!coupling}%
- \xdef\currentstructureownnumber {\structureparameter\c!ownnumber}% optional own number
- \xdef\currentstructurelevel {\structureparameter\c!level}%
- \edef\currentstructureexpansion {\structureparameter\c!expansion}%
- \xdef\currentstructurexmlsetup {\structureparameter\c!xmlsetup}%
- \xdef\currentstructurecatcodes {\structureparameter\s!catcodes}%
- \xdef\currentstructurelabel {\structureparameter\c!label}%
- \xdef\currentstructurereference {\structureparameter\c!reference}%
- \xdef\currentstructurereferenceprefix{\structureparameter\c!referenceprefix}%
- \xdef\currentstructurebackreference {\structureparameter\c!backreference}%
- \xdef\currentstructureshownumber {\structureparameter\c!number}%
- \xdef\currentstructuresaveinlist {\structureparameter\c!saveinlist}%
- \xdef\currentstructureincrementnumber{\structureparameter\c!incrementnumber}%
- \ifx\currentstructureexpansion\s!xml
- \xmlstartraw
- \xdef\currentstructuretitle {\structureparameter\c!title}%
- \xdef\currentstructurebookmark{\structureparameter\c!bookmark}%
- \xdef\currentstructuremarking {\structureparameter\c!marking}%
- \xdef\currentstructurelist {\structureparameter\c!list}%
- \xmlstopraw
- \ifx\currentstructurelist\empty
- \globallet\currentstructurelist\currentstructuretitle
- \fi
- \globallet\currentstructurecoding\s!xml
- \else
- \ifx\currentstructureexpansion\v!yes
- \xdef\currentstructuretitle {\structureparameter\c!title}%
- \xdef\currentstructurebookmark{\structureparameter\c!bookmark}%
- \xdef\currentstructuremarking {\structureparameter\c!marking}%
- \xdef\currentstructurelist {\structureparameter\c!list}%
- \else
- \xdef\currentstructuretitle {\detokenizedstructureparameter\c!title}%
- \xdef\currentstructurebookmark{\detokenizedstructureparameter\c!bookmark}%
- \xdef\currentstructuremarking {\detokenizedstructureparameter\c!marking}%
- \xdef\currentstructurelist {\detokenizedstructureparameter\c!list}%
- \iflocation \ifx\currentstructurebookmark\empty
- \begingroup
- \simplifycommands
- \xdef\currentstructurebookmark{\detokenize\expandafter{\normalexpanded{\structureparameter\c!title}}}%
- \endgroup
- \fi \fi
- \fi
- \ifx\currentstructurelist\empty
- \globallet\currentstructurelist\currentstructuretitle
- \fi
- \globallet\currentstructurecoding\s!tex
- \fi
- \setnextinternalreference
- \setstructurereferenceprefix
- \xdef\currentstructurenumber{\ctxlua{ % todo: combine with next call, adapt marks accordingly
- structures.sections.somelevel {
- references = {
- internal = \nextinternalreference,
- block = "\currentsectionblock",
- reference = "\currentstructurereference",
- referenceprefix = "\currentstructurereferenceprefix",
- backreference = "\currentstructurebackreference",
- },
- directives = {
- resetset = "\structureparameter\c!sectionresetset",
- },
- metadata = {
- kind = "section",
- name = "\currentstructurename",
- catcodes = \the\ifx\currentstructurecatcodes\empty\catcodetable\else\csname\currentstructurecatcodes\endcsname\fi,
- coding = "\currentstructurecoding",
- \ifx\currentstructurecoding\s!xml
- xmlroot = "\xmldocument",
- \fi
- \ifx\currentstructurexmlsetup\empty \else
- xmlsetup = "\currentstructurexmlsetup",
- \fi
- \ifx\currentstructuresaveinlist\v!no
- nolist = true,
- \fi
- \ifx\currentstructureincrementnumber\v!yes
- increment = "\currentstructureincrementnumber",
- \fi
- },
- titledata = { % we can add mark and reference
- label = \!!bs\detokenize\expandafter{\currentstructurelabel }\!!es,
- title = \!!bs\detokenize\expandafter{\currentstructuretitle }\!!es,
- \ifx\currentstructurebookmark\currentstructuretitle \else
- bookmark = \!!bs\detokenize\expandafter{\currentstructurebookmark }\!!es,
- \fi
- \ifx\currentstructuremarking\currentstructuretitle \else
- marking = \!!bs\detokenize\expandafter{\currentstructuremarking }\!!es,
- \fi
- \ifx\currentstructuresaveinlist\v!no \else
- \ifx\currentstructurelist\currentstructuretitle \else
- list = \!!bs\detokenize\expandafter{\currentstructurelist}\!!es,
- \fi
- \fi
- },
- numberdata = {
- % needed ?
- block = "\currentsectionblock",
- hidenumber = \ifx\currentstructureshownumber\v!no true\else nil\fi, % titles
- % so far
- separatorset = "\structureparameter\c!sectionseparatorset",
- conversion = "\structureparameter\c!sectionconversion", % for good old times sake
- conversionset = "\structureparameter\c!sectionconversionset",
- starter = \!!bs\structureparameter\c!sectionstarter\!!es,
- stopper = \!!bs\structureparameter\c!sectionstopper\!!es,
- set = "\structureparameter\c!sectionset",
- segments = "\structureparameter\c!sectionsegments",
- ownnumber = "\currentstructureownnumber",
- },
- userdata = \!!bs\detokenize{#2}\!!es % will be converted to table at the lua end
- }
- }}%
- \xdef\currentstructurelistnumber{\ctxcommand{addtolist(structures.sections.current())}}%
- % \currentstructuresynchronize has to be called someplace, since it introduces a node
- \setstructuresynchronization\currentstructurelistnumber
- \structureparameter\c!command
- \endgroup}
-
-\let\currentstructurenumber \!!zerocount
-\let\currentsectioncountervalue \!!zerocount % redefined later
-\let\previoussectioncountervalue\!!zerocount % redefined later
-
-% todo: a direct flusher (needed when hidden)
-
-\def\setstructuresynchronization#1% todo: use ctxcontext
+\unexpanded\def\setstructuresynchronization#1% todo: use ctxcontext
{\xdef\currentstructureattribute {\ctxlua {tex.write(structures.references.setinternalreference("\currentstructurereferenceprefix","\currentstructurereference",\nextinternalreference,"\interactionparameter\c!focus"))}}%
\xdef\currentstructuresynchronize{\ctxlatecommand{enhancelist(#1)}}}
-\def\reportcurrentstructure{\ctxlua{structures.sections.reportstructure()}}
-
-% Beware: we need to flush the data to the list explicitly. This is because
-% node in inserted and we may want control over when that happens.
-
-\def\showstructuredata
- {\par
- \dontleavehmode\begingroup
- \currentstructuresynchronize
- \currentstructureattribute
- [\currentstructurename: \showstructurelevel: \currentstructuretitle]
- \endgroup\par}
-
-% We can access the (stored) data with the following macros.
-%
-% \def\MyHeadCommand #1#2{\framed{#1}\framed{#2 / \structureuservariable{subtitle}}}
-% \def\MyListCommand#1#2#3{\externalfigure[\structurelistuservariable{figure}][height=5mm]#2}
-%
-% \setuphead[chapter][command=\MyHeadCommand]
-% \setuplist[chapter][alternative=command,command=\MyListCommand]
-%
-% \starttext
-% \setupheadertexts[chapter]
-% \setupinteraction[state=start]
-% \placebookmarks[chapter]
-% \placelist[chapter]
-% \startchapter[ownnumber=10,title=Ton,list=Hans,marking=Kees,bookmark=Bram][figure=cow.pdf,subtitle=oeps]
-% \stopchapter
-% \stoptext
-
-% todo: #1 => "#1" ... adapt lua code for name and number
-
-\def\structurenumber {\ctxcommand{structurenumber()}}
-\def\structuretitle {\ctxcommand{structuretitle()}}
-\def\structurevariable #1{\ctxcommand{structurevariable("#1")}}
-\def\structureuservariable #1{\ctxcommand{structureuservariable("#1")}}
-\def\structurecatcodedget #1{\ctxcommand{structurecatcodedget("#1")}} % bad name
-\def\structuregivencatcodedget #1#2{\ctxcommand{structuregivencatcodedget("#1",\number#2)}} % bad name
-\def\structureautocatcodedget #1#2{\ctxcommand{structureautocatcodedget ("#1","#2")}}
-
-\def\namedstructurevariable #1#2{\ctxcommand{namedstructurevariable ("#1","#2")}}
-\def\namedstructureuservariable#1#2{\ctxcommand{namedstructureuservariable("#1","#2")}}
-
\protect \endinput
diff --git a/Master/texmf-dist/tex/context/base/strc-enu.mkvi b/Master/texmf-dist/tex/context/base/strc-enu.mkvi
index e3261fa35cb..185b4177eed 100644
--- a/Master/texmf-dist/tex/context/base/strc-enu.mkvi
+++ b/Master/texmf-dist/tex/context/base/strc-enu.mkvi
@@ -75,6 +75,8 @@
\installcommandhandler \??enumeration {enumeration} \??enumeration
+\installcounterassociation{enumeration}
+
\let\setupenumerations\setupenumeration
\setupenumerations % check with old
@@ -85,10 +87,10 @@
%\c!color=,
%\c!headcolor=,
%\c!titlecolor=,
- \c!width=8em,
- \c!distance=0pt,
-\c!distance=1em,
- \c!titledistance=0.5em,
+ \c!width=8\emwidth,
+ \c!distance=\zeropoint,
+ \c!distance=\emwidth,
+ \c!titledistance=.5\emwidth,
%\c!hang=,
%\c!sample=,
%\c!align=,
@@ -127,18 +129,18 @@
%
% with push/pop (also at definition time)
-\unexpanded\def\strc_define_commands_enumeration#1#2#3% current level parent
- {\doifelsenothing{#3}
- {\normalexpanded{\defineconstruction[#1][\s!handler=\v!enumeration,\c!level=#2]}%
- \setevalue{\??enumeration#1:\s!parent}{\??enumeration}}%
- {\normalexpanded{\defineconstruction[#1][#3][\s!handler=\v!enumeration,\c!level=#2]}%
- \setevalue{\??enumeration#1:\s!parent}{\??enumeration#3}}%
- \setuevalue{\e!next #1}{\strc_enumerations_next }%
- \setuevalue{\c!reset#1}{\strc_enumerations_reset }%
- %setuevalue{\c!set #1}{\strc_enumerations_set }%
- \setuevalue {#1}{\strc_enumerations_command{#1}}%
- \setuevalue{\e!start#1}{\strc_enumerations_start {#1}}%
- \setuevalue{\e!stop #1}{\strc_enumerations_stop }}
+\unexpanded\def\strc_define_commands_enumeration#tag#level#parent%
+ {\doifelsenothing{#parent}
+ {\normalexpanded{\defineconstruction[#tag][\s!handler=\v!enumeration,\c!level=#level]}%
+ \setevalue{\??enumeration#tag:\s!parent}{\??enumeration}}%
+ {\normalexpanded{\defineconstruction[#tag][#parent][\s!handler=\v!enumeration,\c!level=#level]}%
+ \setevalue{\??enumeration#tag:\s!parent}{\??enumeration#parent}}%
+ \setuevalue{\e!next #tag}{\strc_enumerations_next }%
+ \setuevalue{\c!reset#tag}{\strc_enumerations_reset }%
+ %setuevalue{\c!set #tag}{\strc_enumerations_set }%
+ \setuevalue {#tag}{\strc_enumerations_command{#tag}}%
+ \setuevalue{\e!start#tag}{\strc_enumerations_start {#tag}}%
+ \setuevalue{\e!stop #tag}{\strc_enumerations_stop }}
\appendtoks
\ifx\currentenumerationparent\empty
@@ -172,18 +174,21 @@
\fi
\doifcounterelse\p_counter\donothing{\strc_enumerations_define_counter\p_counter}%
\letenumerationparameter\s!counter\p_counter
- \strc_enumerations_setup_counter\currentenumeration
+ %\strc_enumerations_setup_counter\currentenumeration
\to \everydefineenumeration
-\setvalue{\??constructioninitializer\v!enumeration}%
+\let\p_strc_constructions_title \empty
+\let\p_strc_constructions_number\empty
+
+\unexpanded\setvalue{\??constructioninitializer\v!enumeration}%
{\let\currentenumeration \currentconstruction
\let\constructionparameter \enumerationparameter
\let\detokenizedconstructionparameter\detokenizedenumerationparameter
\let\letconstructionparameter \letenumerationparameter
\let\useconstructionstyleandcolor \useenumerationstyleandcolor
\let\setupcurrentconstruction \setupcurrentenumeration
- \edef\p_number{\constructionparameter\c!number}%
- \ifx\p_number\v!yes
+ \edef\p_strc_constructions_number{\constructionparameter\c!number}%
+ \ifx\p_strc_constructions_number\v!yes
\settrue\c_strc_constructions_number_state
\iftrialtypesetting
\strc_counters_save\currentconstructionnumber
@@ -192,14 +197,14 @@
\else
\setfalse\c_strc_constructions_number_state
\fi
- \edef\p_title{\constructionparameter\c!title}%
- \ifx\p_title\v!yes
+ \edef\p_strc_constructions_title{\constructionparameter\c!title}%
+ \ifx\p_strc_constructions_title\v!yes
\settrue\c_strc_constructions_title_state
\else
\setfalse\c_strc_constructions_title_state
\fi}
-\setvalue{\??constructionfinalizer\v!enumeration}%
+\unexpanded\setvalue{\??constructionfinalizer\v!enumeration}%
{\ifconditional\c_strc_constructions_number_state
\iftrialtypesetting
\strc_counters_restore\currentconstructionnumber
@@ -214,39 +219,27 @@
\unexpanded\def\strc_enumerations_next {\strc_counters_increment_sub{\specificconstructionnumber\currentconstruction}{\currentconstructionlevel}}
\unexpanded\def\strc_enumerations_reset {\strc_counters_reset_sub {\specificconstructionnumber\currentconstruction}{\currentconstructionlevel}}
-%unexpanded\def\strc_enumerations_set #3{\strc_counters_set_sub {\specificconstructionnumber\currentconstruction}{\currentconstructionlevel}{#2}}
-
-%D Counters (maybe we can share this code): (not ok yet, messy with notes)
-
-%%%%%%%%%%% BEGIN TODO (SEE NOTES) %%%%%%%%%%%%%%%
+%unexpanded\def\strc_enumerations_set#tag{\strc_counters_set_sub {\specificconstructionnumber\currentconstruction}{\currentconstructionlevel}{#tag}}
-\newtoks\everysetupenumerationcounter
+\def\currentconstructionnumber {\constructionparameter\s!counter}
+\def\specificconstructionnumber#tag{\namedconstructionparameter{#tag}\s!counter}
-\let\v_strc_enumerations_counter_name\empty % or just p_counter
-
-\def\strc_enumerations_setup_counter#1%
- {\edef\v_strc_enumerations_counter_name{#1}% only used in the token list
- \the\everysetupenumerationcounter}
-
-\def\strc_enumerations_define_counter#1% todo: fast inheritance (was mainparameter)
- {\definecounter[#1]%
- \strc_enumerations_setup_counter{#1}}
-
-\appendtoks
- \strc_counter_setup_using_parameter\v_strc_enumerations_counter_name\enumerationparameter
-\to \everysetupenumerationcounter
+%D Counters (maybe we can share this code): (not ok yet, messy with notes)
-% \def\currentconstructionnumber {\enumerationparameter\s!counter}
-% \def\specificconstructionnumber#1{\namedenumerationparameter{#1}\s!counter}
+% Maybe we should move counters to the construction level as more derived mechanisms
+% use it \unknown\ so don't depend on names here!
-\def\currentconstructionnumber {\constructionparameter\s!counter}
-\def\specificconstructionnumber#1{\namedconstructionparameter{#1}\s!counter}
+\def\strc_enumerations_define_counter#tag% todo: move inline
+ {\definecounter[#tag]%
+ \registerenumerationcounter{#tag}}
\appendtoks
- \strc_enumerations_setup_counter\currentenumeration
+ \synchronizeenumerationcounters
\to \everysetupenumeration
-%%%%%%%%%%% END TODO (SEE NOTES) %%%%%%%%%%%%%%%
+\appendtoks
+ \synchronizeenumerationcounters
+\to \everydefineenumeration
%D Initializer:
@@ -263,7 +256,7 @@
\expandafter\let\csname\??constructionstarthandler \v!enumeration\expandafter\endcsname\csname\??constructionstarthandler \v!description\endcsname
\expandafter\let\csname\??constructionstophandler \v!enumeration\expandafter\endcsname\csname\??constructionstophandler \v!description\endcsname
-\setvalue{\??constructiontexthandler\v!enumeration}%
+\unexpanded\setvalue{\??constructiontexthandler\v!enumeration}%
{\begingroup
\useconstructionstyleandcolor\c!headstyle\c!headcolor
\strc_enumerations_text
@@ -363,23 +356,23 @@
%
% we will provide some more control, like skipping etc or reference relates
-% todo
-
- \unexpanded\def\strc_enumerations_full_number_coupling
- {\iflocation
- \strc_enumerations_full_number_yes
- \else
- \iftrialtypesetting \else
- % \doenumerationregistercoupling
- \fi
- \strc_enumerations_full_number_yes
- \edef\p_coupling{\constructionparameter\c!coupling}%
- \ifx\p_coupling\empty \else
- \symbolreference[order(construction:\p_coupling:\nextinternalorderreference)]%
- \fi
- \fi}
-
- \def\skipenumerationcoupling[#1]% e.g. for questions with no answer
- {\ctxlua{structures.references.setnextorder("construction","#1")}}
+% to do / to be checked
+
+\unexpanded\def\strc_enumerations_full_number_coupling
+ {\iflocation
+ \strc_enumerations_full_number_yes
+ \else
+ \iftrialtypesetting \else
+ % \doenumerationregistercoupling
+ \fi
+ \strc_enumerations_full_number_yes
+ \edef\p_coupling{\constructionparameter\c!coupling}%
+ \ifx\p_coupling\empty \else
+ \symbolreference[order(construction:\p_coupling:\nextinternalorderreference)]%
+ \fi
+ \fi}
+
+\unexpanded\def\strc_enumerations_skip_number_coupling[#tag]% e.g. for questions with no answer
+ {\ctxlua{structures.references.setnextorder("construction","#tag")}}
\protect \endinput
diff --git a/Master/texmf-dist/tex/context/base/strc-flt.mkvi b/Master/texmf-dist/tex/context/base/strc-flt.mkvi
index 05cfa40e333..4fe1a96ff33 100644
--- a/Master/texmf-dist/tex/context/base/strc-flt.mkvi
+++ b/Master/texmf-dist/tex/context/base/strc-flt.mkvi
@@ -23,8 +23,10 @@
%
% todo: strc_floats_analyze_variables_two could trigger a setup
% and we could have nofmethods of them
-
-% Less globals!
+%
+% todo: move variables from page-flt to strc-flt
+%
+% todo: less globals!
%D This module is being converted into a mkvi one.
%D
@@ -44,7 +46,7 @@
\let\setupcaption \setupfloatcaption
\let\setupcaptions\setupfloatcaption
-\def\dohandlenextfloatindent
+\unexpanded\def\dohandlenextfloatindent
{\useindentnextparameter\floatparameter
\dorechecknextindentation}
@@ -118,7 +120,7 @@
\c!textmethod=\ifgridsnapping2\else0\fi, % 0=raw 1=safe (.99pg) 2=tight (-1pt) % THIS WILL CHANGE
\c!sidemethod=\ifgridsnapping2\else1\fi, % 0=raw 1=safe (.99pg) 2=tight (-1pt) % THIS WILL CHANGE
\c!indentnext=\v!no,
- \c!margin=1em,
+ \c!margin=1\emwidth,
\c!method=1,
\c!cache=\v!yes, % when no, then intermediate flush
\c!leftmargin=\zeropoint, % displacement in 'normal floats'
@@ -129,7 +131,7 @@
\c!rightmargindistance=\floatparameter\c!leftmargindistance,
\c!ntop=2,
\c!nbottom=0,
- \c!nlines=4,
+ \c!nlines=4, % used?
%\c!local=,
%\c!bottombefore=, % e.g. \vfill
%\c!bottomafter=,
@@ -138,51 +140,54 @@
%D Individial settings:
-\strc_counter_preset_using_parameter\setupcaption\sharedcounterparameter
+\installcounterassociation{floatcaption}
\appendtoks
- \let\currentfloat\currentfloatcaption
- \ifx\currentfloat\empty \else
- \strc_counter_setup_using_parameter\currentfloatcaption\floatcaptionparameter
- \strc_counters_check_setup\currentfloatcaption
- \fi
+ \let\currentfloat\currentfloatcaption % ?
+ \synchronizefloatcaptioncounters
\to \everysetupfloatcaption
+\appendtoks
+ \let\currentfloat\currentfloatcaption % ?
+ \synchronizefloatcaptioncounters
+\to \everydefinefloatcaption
+
%D Definitions:
-\let\saveddefinefloat\definefloat
+\let\strc_floats_define_saved\definefloat
\unexpanded\def\definefloat
- {\dotripleempty\dodefinefloat}
+ {\dotripleempty\strc_floats_define}
-\def\dodefinefloat[#1][#2][#3]% #1=naam #2=meervoud #3=parent
+\def\strc_floats_define[#1][#2][#3]% #1=naam #2=meervoud #3=parent
{\ifthirdargument
- \redodefinefloat[#1][#2][#3]%
+ \strc_floats_define_cloned[#1][#2][#3]%
\else\ifsecondargument
- \dododefinefloat[#1][#2]%
+ \strc_floats_define_normal[#1][#2]%
\else
- \dododefinefloat[#1][#1]%
+ \strc_floats_define_normal[#1][#1]%
\fi\fi}
-\def\dododefinefloat[#1][#2]%
- {\definefloatcaption[#1]%
+\def\strc_floats_define_normal[#1][#2]%
+ {\registerfloatcaptioncounter{#1}%
+ \definefloatcaption[#1]%
\definecounter[#1]%
\definelist[#1]%
\presetlabeltext[#1=\Word{#1}~]%
\presetheadtext[#2=\Word{#2}]%
- \saveddefinefloat[#1]%
- \dodefinefloatcommands[#1][#2]}
+ \strc_floats_define_saved[#1]%
+ \strc_floats_define_commands{#1}{#2}}
-\def\redodefinefloat[#1][#2][#3]%
+\def\strc_floats_define_cloned[#1][#2][#3]%
{\definefloatcaption[#1][#3]%
\definecounter[#1][#3]%
\definelist[#1][#3]%
\presetlabeltext[#1=\Word{#3}~]%
\presetheadtext[#2=\Word{#2}]%
- \saveddefinefloat[#1][#3]%
- \dodefinefloatcommands[#1][#2]}
+ \strc_floats_define_saved[#1][#3]%
+ \strc_floats_define_commands{#1}{#2}}
-\def\dodefinefloatcommands[#1][#2]%
+\def\strc_floats_define_commands#1#2%
{\setuvalue {\e!place\e!listof#2}{\dodoubleempty\strc_lists_place[#1]}% call will change
\setuvalue {\e!complete\e!listof#2}{\dotripleempty\strc_lists_complete_indeed[#1][#2]}% call will change
\setuevalue {\e!place#1}{\strc_floats_place{#1}}%
@@ -212,7 +217,7 @@
\newif\ifnofloatnumber
\newif\ifemptyfloatcaption
-\installstructurelistprocessor{float}{\usestructurelistprocessor{number+title}}
+\installstructurelistprocessor\s!float{\usestructurelistprocessor{number+title}}
\unexpanded\def\thecurrentfloatnumbersuffix
{\doifsomething{\floatcaptionparameter\c!suffix}
@@ -254,11 +259,11 @@
% begin of todo
-\unexpanded\def\placefloatcaption{\dodoubleempty\doplacefloatcaption}
-\unexpanded\def\setfloatcaption {\dodoubleempty\dodosetfloatcaption}
+\unexpanded\def\placefloatcaption{\dodoubleempty\strc_floats_place_caption}
+\unexpanded\def\setfloatcaption {\dodoubleempty\strc_floats_set_caption}
-\def\doplacefloatcaption[#tag][#reference]#caption{[not supported]}
-\def\dodosetfloatcaption[#tag][#reference]#caption{[not supported]} % \dosetfloatcaption already in use
+\def\strc_floats_place_caption[#tag][#reference]#caption{[not supported]}
+\def\strc_floats_set_caption [#tag][#reference]#caption{[not supported]}
\unexpanded\def\placefloatcaptiontext [#tag]{[not suported yet]}
\unexpanded\def\placefloatcaptionnumber [#tag]{[not suported yet]}
@@ -332,7 +337,7 @@
% temporary removed ... was not applied systematically
%
% \def\dosetcaptionthings
-% {\doprocesslocalsetups{\floatcaptionparameter\c!setups}\relax}
+% {\usesetupsparameter\floatcaptionparameter}
\def\strc_floats_check_caption_content
{\ifnofloatcaption
@@ -441,11 +446,15 @@
% we're really dealing with the float. Some day I'll root out
% the global settings.
+\let\lastplacedfloat\empty
+
\def\strc_floats_set_current_tag#tag%
{\edef\currentfloat{#tag}%
\ifx\currentfloat\empty
\let\currentfloat\v!figure % a bit of a hack
\fi
+ \global\let\lastplacedfloat\currentfloat
+ \let\m_strc_floats_saved_userdata\empty
\let\currentfloatcaption\currentfloat}
\def\strc_floats_reset_variables
@@ -483,22 +492,36 @@
% start-stop
+% \startplacefigure[title=oeps][subtitle=whatever]
+% \framed[width=10cm,height=5cm]{\floatuserdataparameter{subtitle}}
+% \stopplacefigure
+
+\installcorenamespace{floatuserdata}
+
+\installsetuponlycommandhandler \??floatuserdata {floatuserdata}
+
+\let\m_strc_floats_saved_userdata\empty % todo: reset this in non start|stop cases
+
\unexpanded\def\strc_floats_start_place#tag%
{\flushnotes
\page_otr_command_flush_side_floats % here !
\strc_floats_begin_group
\strc_floats_set_current_tag{#tag}%
- \dosingleempty\strc_floats_start_place_indeed}
+ \dodoubleempty\strc_floats_start_place_indeed}
-\def\strc_floats_start_place_indeed[#settings]% tricky ... saved not ok yet
+\def\strc_floats_start_place_indeed[#settings][#userdata]%
{\strc_floats_reset_variables
\edef\savedfloatlocation{\floatcaptionparameter\c!location}%
\setupcurrentfloatcaption[\c!location=,\c!reference=,\c!title=,\c!marking=,\c!list=,\c!bookmark=,#settings]%
+ \def\m_strc_floats_saved_userdata{#2}%
\edef\floatlocation{\floatcaptionparameter\c!location}%
\setupcurrentfloatcaption[\c!location=\savedfloatlocation]%
\ifx\floatlocation\empty
\edef\floatlocation{\floatparameter\c!default}%
\fi
+ \ifsecondargument
+ \setupcurrentfloatuserdata[#userdata]%
+ \fi
\doifinsetelse\v!split\floatlocation\strc_floats_place_next_box_split\strc_floats_place_next_box_normal
\bgroup
\ignorespaces}
@@ -569,15 +592,18 @@
% special hack
-\def\strc_floats_begin_group {\begingroup}
-\def\strc_floats_end_group {\carryoverpar\endgroup}
-\def\strc_floats_end_split_group {\endgroup}
-
-\def\strc_floats_begin_text_group {\begingroup\let\strc_floats_end_group\relax}
-\def\strc_floats_end_text_group {\endgroup}
+\def\strc_floats_begin_group {\begingroup}
+\def\strc_floats_end_group {\carryoverpar\endgroup}
+\def\strc_floats_end_split_group {\endgroup}
+\def\strc_floats_begin_text_group{\begingroup\let\strc_floats_end_group\relax}
+\def\strc_floats_end_text_group {\endgroup}
% implementation
+%setnewconstant\c_page_one_float_method \zerocount % 0=raw 1=safe (.99) 2=tight (-1pt) / belongs in page-one
+\setnewconstant\c_strc_floats_rotation \zerocount % 0 90 180 270
+\newconditional\c_strc_floats_par_float
+
\ifdefined\page_margin_strc_floats_before \else \let\page_margin_strc_floats_before \relax \fi
\ifdefined\page_margin_strc_floats_set_hsize \else \let\page_margin_strc_floats_set_hsize\relax \fi
@@ -595,7 +621,7 @@
\splitfloat{\strc_floats_place_next_box_normal}}
\unexpanded\def\strc_floats_place_next_box_normal
- {\ifsomefloatwaiting
+ {\ifconditional\c_page_floats_some_waiting
% this was \checkwaitingfloats spread all over
\doifinsetelse\v!always\floatlocation
{\showmessage\m!floatblocks5\empty}
@@ -626,65 +652,60 @@
\strc_floats_check_extra_actions
\strc_floats_analyze_variables_two
\strc_floats_place_packaged_boxes
- \dostoptagged % tricky ... needs checking
+ \dostoptagged % tricky .... needs checking
% we need to carry over the par because of side floats
- \global\sidefloatdownshift \zeropoint
- \global\sidefloatextrashift\zeropoint
- \ifparfloat
+ \global\d_page_sides_downshift \zeropoint
+ \global\d_page_sides_extrashift\zeropoint
+ \ifconditional\c_strc_floats_par_float
\doifinset\v!reset\floatlocation\page_sides_forget_floats
\doinhibitblank
\fi
\strc_floats_end_group}
-\setnewconstant\textfloatmethod\zerocount % 0=raw 1=safe (.99) 2=tight (-1pt)
-\setnewconstant\floatrotation \zerocount % 0 90 180 270
-
% nicer is a bunch of states and one loop that sets those states
\def\strc_floats_analyze_variables_two
{\ifinsidecolumns
- \global\parfloatfalse
+ \global\setfalse\c_strc_floats_par_float
\else
- \doifcommonelse
- {\floatlocation}
- {\flushfloatslist}
- {\global\parfloattrue}
- {\global\parfloatfalse}%
+ \doifcommonelse\floatlocation\flushfloatslist
+ {\global\settrue \c_strc_floats_par_float}
+ {\global\setfalse\c_strc_floats_par_float}%
\fi
- \global\sidefloatshift\zeropoint
- \global\sidefloatmaximum\zeropoint
- \global\sidefloatmethod\floatparameter\c!sidemethod
- \global\textfloatmethod\floatparameter\c!textmethod
- \global\sidefloatalign\zerocount
- \global\floatrotation\zerocount
+ \global\d_page_sides_shift \zeropoint
+ \global\d_page_sides_maximum \zeropoint
+ \global\c_page_sides_method \floatparameter\c!sidemethod
+ \global\c_page_one_float_method \floatparameter\c!textmethod
+ \global\c_page_sides_align \zerocount
+ \global\c_strc_floats_rotation \zerocount
\strc_floats_calculate_skips
- \ifparfloat
+ \ifconditional\c_strc_floats_par_float
\processaction
[\floatparameter\c!sidealign]
- [\v!height=>\global\sidefloatalign\plusone,%
- \v!line=>\global\sidefloatalign\plustwo,% (***)
- \v!depth=>\global\sidefloatalign\plusthree,%
- \v!grid=>\global\sidefloatalign\plusfour,%
- \v!halfline=>\global\sidefloatalign\plusfive]%
- \ifcase\sidefloatalign\relax % todo: optie v!lokaal => \else
- \doifinset\v!height \floatlocation{\global\sidefloatalign\plusone}%
- \doifinset\v!line \floatlocation{\global\sidefloatalign\plustwo}%
- \doifinset\v!depth \floatlocation{\global\sidefloatalign\plusthree}%
- \doifinset\v!grid \floatlocation{\global\sidefloatalign\plusfour}%
- \doifinset\v!halfline\floatlocation{\global\sidefloatalign\plusfive}% meant for 'none'
+ [\v!height=>\global\c_page_sides_align\plusone ,%
+ \v!line=>\global\c_page_sides_align\plustwo ,% (***)
+ \v!depth=>\global\c_page_sides_align\plusthree,%
+ \v!grid=>\global\c_page_sides_align\plusfour ,%
+ \v!halfline=>\global\c_page_sides_align\plusfive ]%
+ \ifcase\c_page_sides_align\relax % todo: optie v!lokaal => \else
+ \doifinset\v!height \floatlocation{\global\c_page_sides_align\plusone }%
+ \doifinset\v!line \floatlocation{\global\c_page_sides_align\plustwo }%
+ \doifinset\v!depth \floatlocation{\global\c_page_sides_align\plusthree}%
+ \doifinset\v!grid \floatlocation{\global\c_page_sides_align\plusfour }%
+ \doifinset\v!halfline\floatlocation{\global\c_page_sides_align\plusfive }% meant for 'none'
\fi
- \doifinset\v!high\floatlocation{\global\sidefloattopskip \zeropoint}%
- \doifinset\v!low \floatlocation{\global\sidefloatbottomskip\zeropoint}%
+ \doifinset\v!high\floatlocation{\global\d_page_sides_topskip \zeropoint}%
+ \doifinset\v!low \floatlocation{\global\d_page_sides_bottomskip\zeropoint}%
\doifinset\v!fit \floatlocation
- {\global\sidefloattopskip \zeropoint
- \global\sidefloatbottomskip\zeropoint
- \global\floatsideskip \zeropoint}%
+ {\global\d_page_sides_topskip \zeropoint
+ \global\d_page_sides_bottomskip\zeropoint
+ \global\d_strc_floats_margin \zeropoint}%
\else
\processallactionsinset
[\floatlocation]
- [ 90=>\global\floatrotation\commalistelement\relax,%
- 180=>\global\floatrotation\commalistelement\relax,%
- 270=>\global\floatrotation\commalistelement\relax]%
+ [ 90=>\global\c_strc_floats_rotation\commalistelement\relax,%
+ 180=>\global\c_strc_floats_rotation\commalistelement\relax,%
+ 270=>\global\c_strc_floats_rotation\commalistelement\relax]%
\fi
\doifinsetelse\v!nonumber\floatlocation
{\global\nofloatnumbertrue}
@@ -703,19 +724,15 @@
% documenteren in details
\def\strc_floats_analyze_variables_one
- {\doifelse{\floatparameter\c!local}\v!yes % fout keyword
- \globalcenterfloatboxtrue
- \globalcenterfloatboxfalse
- \ifglobalcenterfloatbox
- \localcenterfloatboxtrue
+ {\doifelse{\floatparameter\c!local}\v!yes\settrue\setfalse\c_page_floats_center_box_global % fout keyword
+ \ifconditional\c_page_floats_center_box_global
+ \settrue\c_page_floats_center_box_local
\else
- \doifinsetelse\v!local\floatlocation
- \localcenterfloatboxtrue
- \localcenterfloatboxfalse
+ \doifinsetelse\v!local\floatlocation\settrue\setfalse\c_page_floats_center_box_local
\fi
\doifnotcommon{\v!always,\v!here,\v!force}\floatlocation % ! ! ! ! ! !
- {\globalcenterfloatboxfalse
- \localcenterfloatboxfalse}}
+ {\setfalse\c_page_floats_center_box_global
+ \setfalse\c_page_floats_center_box_local}}
\let\naturalfloatheight\!!zeropoint
\let\naturalfloatwidth \!!zeropoint
@@ -743,8 +760,8 @@
\def\floatcaptionattribute
{\iflocation
- \ifnofloatnumber
- \else
+ %\ifnofloatnumber
+ %\else
\ifnofloatcaption
\else
\ifinsidesplitfloat
@@ -756,23 +773,28 @@
attr \destinationattribute \currentfloatattribute
\fi
\fi
- \fi
+ %\fi
\fi}
\newconditional\usesamefloatnumber
\def\strc_floats_place_packaged_boxes
+ {\expandafter\strc_floats_place_packaged_boxes_indeed\expandafter{\m_strc_floats_saved_userdata}}
+
+\def\strc_floats_place_packaged_boxes_indeed#userdata%
{\bgroup
\ifconditional\usesamefloatnumber
\globallet\currentfloatnumber \previousfloatnumber
\globallet\currentfloatattribute \empty
\globallet\currentfloatsynchronize\relax
\else
+ \edef\currentfloatcounter{\namedcounterparameter\currentfloat\s!name}%
+ % *1* as an alternative we could set counter parameters here if needed
\ifnofloatnumber \else \ifnofloatcaption \else
- \strc_counters_increment{\strc_counters_the\currentfloat}%
+ \strc_counters_increment\currentfloatcounter
\fi \fi
\strc_counters_register_component
- {float}%
+ \s!float
\setupcurrentfloatcaption
\floatcaptionparameter
\detokenizedfloatcaptionparameter
@@ -780,11 +802,11 @@
\relax
\relax
[\c!name=\currentfloat,%
- \s!counter=\strc_counters_the\currentfloat,%
+ \s!counter=\currentfloatcounter,%
\s!hascaption=\ifnofloatcaption \v!no\else\v!yes\fi,%
\s!hasnumber=\ifnofloatnumber \v!no\else\v!yes\fi,%
\s!hastitle=\ifemptyfloatcaption\v!no\else\v!yes\fi]%
- []%
+ [#userdata]%
\globallet\previousfloatnumber \m_strc_counters_last_registered_index
\globallet\currentfloatnumber \m_strc_counters_last_registered_index
\globallet\currentfloatattribute \m_strc_counters_last_registered_attribute
@@ -816,21 +838,11 @@
\strc_floats_get_box
\global\insidefloatfalse}
-% \def\strc_floats_set_local_hsize
-% {\iflocalcenterfloatbox
-% \seteffectivehsize
-% \hsize\localhsize
-% \else
-% \doifinset\v!margin\floatlocation % brr, really needed! see wm will be redone
-% {\let\currentmarginblock\empty
-% \hsize\marginblockparameter\c!width}%
-% \fi}
-
\newdimen\availablefloatwidth
\newdimen\availablefloatheight
\def\strc_floats_set_local_hsize
- {\iflocalcenterfloatbox
+ {\ifconditional\c_page_floats_center_box_local
% also available check here?
\seteffectivehsize
\hsize\localhsize
@@ -878,18 +890,18 @@
% watch out: line alone aligns on the line ! ! !
\unexpanded\def\movesidefloat[#settings]% (-)n*line|x=,y=
- {\global\sidefloatdownshift \zeropoint
- \global\sidefloatextrashift\zeropoint
+ {\global\d_page_sides_downshift \zeropoint
+ \global\d_page_sides_extrashift\zeropoint
\doifassignmentelse{#settings}%
{\begingroup
\setupcurrentfloat[\c!x=\zeropoint,\c!y=\zeropoint,#settings]%
\ifgridsnapping
\getnoflines{\floatparameter\c!y}%
- \global\sidefloatdownshift\noflines\lineheight
+ \global\d_page_sides_downshift\noflines\lineheight
\else
- \global\sidefloatdownshift\floatparameter\c!y
+ \global\d_page_sides_downshift\floatparameter\c!y
\fi
- \global\sidefloatextrashift\floatparameter\c!x
+ \global\d_page_sides_extrashift\floatparameter\c!x
\endgroup}
{\movedownsidefloat[#settings]}}
@@ -909,17 +921,17 @@
\def\strc_floats_move_down_line#sign%
{\if!!donea \else
- \global\sidefloatdownshift\zeropoint
+ \global\d_page_sides_downshift\zeropoint
\!!doneatrue
\fi
- \global\advance\sidefloatdownshift#sign\lineheight}
+ \global\advance\d_page_sides_downshift#sign\lineheight}
\def\strc_floats_move_down_hang#lines%
{\if!!doneb \else
- \global\sidefloatsidelines\zerocount
+ \global\c_page_sides_n_of_lines\zerocount
\!!donebtrue
\fi
- \global\advance\sidefloatsidelines#lines\relax}
+ \global\advance\c_page_sides_n_of_lines#lines\relax}
\unexpanded\def\movedownsidefloat[#settings]% already in core
{\doifnotinstring{:}{#settings}
@@ -930,7 +942,7 @@
\endgroup}}
\unexpanded\def\hangsidefloat[#number]%
- {\global\sidefloatsidelines#number\relax}
+ {\global\c_page_sides_n_of_lines#number\relax}
\def\strc_floats_set_extra_action#rightpagelocation#leftpagelocation%
{\rightorleftpageaction
@@ -953,10 +965,10 @@
\setvalue{\??extrafloataction \v!line}#1{} % only -n*line is handled (see ***)
\setvalue{\??extrafloataction \s!unknown}#1{\movedownsidefloat[#1]}
-\def\strc_floats_check_extra_actions % less tracingthis way ...
+\def\strc_floats_check_extra_actions % less tracingthis way ....
{\doifnotinset\v!text\floatlocation % fuzzy, text overloads left, since then it's a directive
{\let\extrafloatlocation\empty
- % \sidefloatdownshift will be reset afterwards, and can
+ % \d_page_sides_downshift will be reset afterwards, and can
% already be set at this point
\processcommacommand[\floatlocation]\strc_floats_check_extra_actions_step
\ifx\extrafloatlocation\empty \else
@@ -971,11 +983,11 @@
% pas op, maxbreedte niet instellen als plaats=links/rechts
\def\strc_floats_set_local_dimensions
- {\global\sidefloatshift \zeropoint % duplicate
- \global\sidefloatmaximum\zeropoint\relax % duplicate
- \ifdim\sidefloatdownshift=\zeropoint\else
+ {\global\d_page_sides_shift \zeropoint % duplicate
+ \global\d_page_sides_maximum\zeropoint\relax % duplicate
+ \ifdim\d_page_sides_downshift=\zeropoint\else
\global\setbox\floatbox\vbox
- {\vskip\sidefloatdownshift
+ {\vskip\d_page_sides_downshift
\nointerlineskip
\box\floatbox}%
\fi
@@ -1002,7 +1014,7 @@
\scratchwidth\p_maxwidth\relax
\ifdim\wd\floatbox>\scratchwidth
\doifcommonelse{\v!inright,\v!rightmargin,\v!rightedge,\v!inleft,\v!leftmargin,\v!leftedge}\floatlocation
- {\global\sidefloatmaximum\scratchwidth}
+ {\global\d_page_sides_maximum\scratchwidth}
{\doifcommonelse{\v!right,\v!left}\floatlocation
\strc_floats_realign_floatbox_horizontal_one
\strc_floats_realign_floatbox_horizontal_two}%
@@ -1023,21 +1035,10 @@
\unexpanded\def\placefloats
{\page_otr_command_flush_floats}
-
-\installinsertion\topins
-\installinsertion\botins
-
-\newdimen\botinserted
-\newdimen\topinserted
-
-\newif\iftestfloatbox
-\newdimen\floatsideskip \floatsideskip 12pt
-\newdimen\floattopskip \floattopskip \floattopskip
-\newdimen\floatbottomskip \floatbottomskip \floattopskip
-
-\newcount\noftopfloats \noftopfloats=2
-\newcount\nofbotfloats \nofbotfloats=0
+\newdimen\d_strc_floats_margin
+\newdimen\d_strc_floats_top
+\newdimen\d_strc_floats_bottom
\def\strc_floats_calculate_skip#target#skip%
{\edef\askedfloatskip{#skip}%
@@ -1052,33 +1053,21 @@
\def\strc_floats_calculate_skips
{\begingroup
- \strc_floats_calculate_skip\floattopskip {\rootfloatparameter\c!spacebefore}%
- \strc_floats_calculate_skip\floatbottomskip {\rootfloatparameter\c!spaceafter}%
- \strc_floats_calculate_skip\sidefloattopskip {\rootfloatparameter\c!sidespacebefore}%
- \strc_floats_calculate_skip\sidefloatbottomskip{\rootfloatparameter\c!sidespaceafter}%
- \global\floatsideskip \rootfloatparameter\c!margin
- \global\sidefloatleftshift \floatparameter\c!leftmargindistance
- \global\sidefloatrightshift\floatparameter\c!rightmargindistance
- \global\noftopfloats \rootfloatparameter\c!ntop\relax
- \global\nofbotfloats \rootfloatparameter\c!nbottom\relax
+ \strc_floats_calculate_skip\d_strc_floats_top {\rootfloatparameter\c!spacebefore }%
+ \strc_floats_calculate_skip\d_strc_floats_bottom {\rootfloatparameter\c!spaceafter }%
+ \strc_floats_calculate_skip\d_page_sides_topskip {\rootfloatparameter\c!sidespacebefore}%
+ \strc_floats_calculate_skip\d_page_sides_bottomskip{\rootfloatparameter\c!sidespaceafter }%
+ \global\d_strc_floats_margin \rootfloatparameter\c!margin
+ \global\d_page_sided_leftshift \floatparameter \c!leftmargindistance
+ \global\d_page_sided_rightshift\floatparameter \c!rightmargindistance
+ \global\c_page_floats_n_of_top \rootfloatparameter\c!ntop
+ \global\c_page_floats_n_of_bottom \rootfloatparameter\c!nbottom
\endgroup}
-% beter de laatste skip buiten de \insert uitvoeren,
-% bovendien bij volle flush onder baseline.
-
-% \def\betweenfloatblanko% assumes that spaceafter is present
-% {\bgroup
-% \setbox0\vbox{\strut\blank[\rootfloatparameter\c!spacebefore]\strut}%
-% \setbox2\vbox{\strut\blank[\rootfloatparameter\c!spaceafter]\strut}%
-% \ifdim\ht0>\ht2
-% \blank[-\rootfloatparameter\c!spaceafter,\rootfloatparameter\c!spacebefore]%
-% \fi
-% \egroup}
-
-\unexpanded\def\betweenfloatblanko% assumes that spaceafter is present
+\unexpanded\def\betweenfloatblanko % assumes that spaceafter is present
{\blank[\rootfloatparameter\c!spacebefore]} % or v!back,....
-\def\doplacefloatbox % used elsewhere
+\unexpanded\def\doplacefloatbox % used elsewhere
{%\forgetall % NO
\whitespace
\blank[\rootfloatparameter\c!spacebefore]
@@ -1096,20 +1085,16 @@
\def\page_one_place_float_text % this macro should be defined elsewhere
{%\checkwaitingfloats\floatlocationmethod
% todo: check if #1 is indeed \floatlocation or maybe more
- \global\floattextwidth\hsize
- \global\floatwidth\wd\floatbox
- \global\floatheight\ht\floatbox % forget about the depth
- \global\advance\floattextwidth -\floatwidth
- \global\advance\floattextwidth -\rootfloatparameter\c!margin\relax
+ \global\floatwidth \wd\floatbox
+ \global\floatheight \ht\floatbox % forget about the depth
+ \global\floattextwidth\dimexpr\hsize-\floatwidth-\rootfloatparameter\c!margin\relax
\edef\floatlocation{\floatlocationmethod}% to be sure .. why
\doifinsetelse\v!tall\floatlocationmethod
- {\floattextheight\pagegoal
- \advance\floattextheight -\pagetotal
- \advance\floattextheight -\bigskipamount % lelijk
+ {\floattextheight\dimexpr\pagegoal-\pagetotal-\bigskipamount\relax % ugly, this bigskip
\ifdim\floattextheight>\textheight
\floattextheight\textheight
\fi
- \boxmaxdepth\zeropoint \relax % toegevoegd
+ \boxmaxdepth\zeropoint\relax % toegevoegd
\ifdim\floattextheight<\floatheight
\floattextheight\floatheight
\fi
@@ -1126,11 +1111,7 @@
\def\strc_floats_stop_text_indeed
{\egroup
\doifnotinset\v!tall\floatlocation
- {\ifdim\ht\floattext<\floatheight
- \floattextheight\floatheight
- \else
- \floattextheight\ht\floattext
- \fi}%
+ {\floattextheight\ifdim\ht\floattext<\floatheight\floatheight\else\ht\floattext\fi}%
\setbox\floatbox\vbox to \floattextheight
{\hsize\floatwidth
\doifinsetelse\v!both\floatlocation
@@ -1171,7 +1152,7 @@
\dostoptagged
\blank[\rootfloatparameter\c!spaceafter]%
\strc_floats_end_text_group
- \doinsertfloatinfo}
+ \page_floats_report_total}
\def\borderedfloatbox
{\begingroup
@@ -1197,11 +1178,16 @@
\alignedline{\floatparameter\c!location}\v!middle}
\newdimen\d_strc_floats_content
+\newdimen\d_strc_float_temp_height
+\newdimen\d_strc_float_temp_width
+
+\def\captionminwidth {15\bodyfontsize}
+\def\captionovershoot{2\emwidth}
\def\strc_floats_set_page_variant
{\bgroup
\strc_floats_set_local_hsize
- \ifcase\floatrotation\else
+ \ifcase\c_strc_floats_rotation\else
\swapdimens\hsize\vsize
\fi
\forgetall
@@ -1211,10 +1197,12 @@
\let\strc_floats_align_content\strc_floats_align_content_indeed
\let\strc_floats_align_caption\strc_floats_align_caption_indeed
\strc_floats_check_caption_content
-\d_strc_floats_content\wd\b_strc_floats_content
+ \d_strc_floats_content\wd\b_strc_floats_content
\ifcase\floatparameter\c!method
- \or % automatic
- \ifnofloatcaption
+ % nothing
+ \or
+ % automatic
+ \ifnofloatcaption
\strc_floats_prepare_no_caption
%\page_backgrounds_add_local_to_box\floatbox % was \doglobal but not needed
\else
@@ -1227,21 +1215,20 @@
%\page_backgrounds_add_local_to_box\b_strc_floats_caption
\strc_floats_build_box
\fi
- \or % semi automatic
- \or % manual
+ \or
+ % semi automatic
+ \or
+ % manual
\fi
- \ifcase\floatrotation
+ \ifcase\c_strc_floats_rotation
\doifnotinset\v!margin\floatlocation % brr, really needed! see wm
{\postcenterfloatbox\d_strc_floats_content}%
\else
\global\setbox\floatbox\vbox
- {\rotate[\c!rotation=\number\floatrotation]{\box\floatbox}}%
+ {\rotate[\c!rotation=\number\c_strc_floats_rotation]{\box\floatbox}}%
\fi
\egroup}
-\def\captionminwidth {15\bodyfontsize}
-\def\captionovershoot {2em}
-
\def\strc_floats_prepare_no_caption
{\global\setbox\floatbox\vbox % pas op als wd groter dan hsize
{\ifinsidecolumns\ifdim\wd\b_strc_floats_content>\hsize
@@ -1250,86 +1237,54 @@
\strc_floats_align_content{\copy\b_strc_floats_content}}}
\def\strc_floats_prepare_page_caption
- {\dostarttagged\t!floatcaption\empty
- \doifinsetelse{\floatcaptionparameter\c!location}{\v!top,\v!bottom}
- {\doifinsetelse{\floatcaptionparameter\c!width}{\v!fit,\v!max}
- {\doifelse{\floatcaptionparameter\c!minwidth}\v!fit
- {\doifelse{\floatcaptionparameter\c!width}\v!max
- {\dopreparestackcaptionmax}
- {\ifdim\wd\b_strc_floats_caption>\wd\b_strc_floats_content % wider caption
- \doifelse{\floatcaptionparameter\c!width}\v!fit
- {\dopreparestackcaptionaut}
- {\dopreparestackcaptionwid}%
- \else
- \dopreparestackcaptionmin
- \fi}}
- {\dopreparestackcaptionfix}}%
- {\dopreparesidewidthcaption}}% new, special effects (see icare)
- {\doifinsetelse{\floatcaptionparameter\c!width}{\v!fit,\v!max}
- {\dopreparesideautocaption}
- {\dopreparesidewidthcaption}}%
- \dostoptagged}
-
-% makes sense if we preexpand more variables
-
-% \def\strc_floats_prepare_page_caption
-% {\edef\p_caption_location{\floatcaptionparameter\c!location}%
-% \edef\p_caption_width {\floatcaptionparameter\c!width}%
-% \edef\p_caption_minwidth{\floatcaptionparameter\c!minwidth}%
-% \edef\p_caption_align {\floatcaptionparameter\c!align}%
-% \dostarttagged\t!floatcaption\empty
-% \ifx\p_caption_location\v!top
-% \strc_floats_prepare_page_caption_top_bottom
-% \else\ifx\p_caption_location\v!bottom
-% \strc_floats_prepare_page_caption_top_bottom
-% \else\ifx\p_caption_width\v!fit
-% \dopreparesideautocaption
-% \else\ifx\p_caption_width\v!max
-% \dopreparesideautocaption
-% \else
-% \dopreparesidewidthcaption
-% \fi\fi\fi\fi
-% \dostoptagged}
+ {\edef\p_strc_floats_caption_location{\floatcaptionparameter\c!location}%
+ \edef\p_strc_floats_caption_width {\floatcaptionparameter\c!width}%
+ \edef\p_strc_floats_caption_minwidth{\floatcaptionparameter\c!minwidth}%
+ \edef\p_strc_floats_caption_align {\floatcaptionparameter\c!align}%
+ \dostarttagged\t!floatcaption\empty
+ \ifx\p_strc_floats_caption_location\v!top
+ \strc_floats_prepare_page_caption_top_bottom
+ \else\ifx\p_strc_floats_caption_location\v!bottom
+ \strc_floats_prepare_page_caption_top_bottom
+ \else\ifx\p_strc_floats_caption_width\v!fit
+ \strc_floats_prepare_side_auto_caption
+ \else\ifx\p_strc_floats_caption_width\v!max
+ \strc_floats_prepare_side_auto_caption
+ \else
+ \strc_floats_prepare_side_width_caption
+ \fi\fi\fi\fi
+ \dostoptagged}
-% \def\strc_floats_prepare_page_caption_top_bottom
-% {\ifx\p_caption_width\v!fit
-% \strc_floats_prepare_page_caption_top_bottom_fit_max
-% \else\ifx\p_caption_width\v!max
-% \strc_floats_prepare_page_caption_top_bottom_fit_max
-% \else
-% \dopreparesidewidthcaption % new, special effects (see icare)
-% \fi\fi}
-
-% \def\strc_floats_prepare_page_caption_top_bottom_fit_max
-% {\ifx\p_caption_minwidth\v!fit
-% \ifx\p_caption_width\v!max
-% \dopreparestackcaptionmax
-% \else\ifdim\wd\b_strc_floats_caption>\wd\b_strc_floats_content % wider caption
-% \ifx\p_caption_width\v!fit
-% \dopreparestackcaptionaut
-% \else
-% \dopreparestackcaptionwid
-% \fi
-% \else
-% \dopreparestackcaptionmin
-% \fi\fi
-% \else
-% \dopreparestackcaptionfix
-% \fi}
-
-% \def\dopreparestackcaptionmin
-% {\setbox\b_strc_floats_caption\vbox
-% {\strc_floats_caption_set_align
-% \hsize\wd\b_strc_floats_content
-% \ifx\p_caption_align\empty
-% \raggedcenter % on purpose overloads align !
-% \fi
-% \strc_floats_make_complete_caption}}
+\def\strc_floats_prepare_page_caption_top_bottom
+ {\ifx\p_strc_floats_caption_width\v!fit
+ \strc_floats_prepare_page_caption_top_bottom_fit_max
+ \else\ifx\p_strc_floats_caption_width\v!max
+ \strc_floats_prepare_page_caption_top_bottom_fit_max
+ \else
+ \strc_floats_prepare_side_width_caption % new, special effects (see icare)
+ \fi\fi}
+
+\def\strc_floats_prepare_page_caption_top_bottom_fit_max
+ {\ifx\p_strc_floats_caption_minwidth\v!fit
+ \ifx\p_strc_floats_caption_width\v!max
+ \strc_floats_prepare_stack_caption_max
+ \else\ifdim\wd\b_strc_floats_caption>\wd\b_strc_floats_content % wider caption
+ \ifx\p_strc_floats_caption_width\v!fit
+ \strc_floats_prepare_stack_caption_auto
+ \else
+ \strc_floats_prepare_stack_caption_width
+ \fi
+ \else
+ \strc_floats_prepare_stack_caption_min
+ \fi\fi
+ \else
+ \strc_floats_prepare_stack_caption_fixed
+ \fi}
\def\strc_floats_caption_set_align
- {\normalexpanded{\setupalign[\v!reset,\floatcaptionparameter\c!align]}}
+ {\normalexpanded{\setupalign[\v!reset,\p_strc_floats_caption_align]}}
-\def\dopreparesideautocaption
+\def\strc_floats_prepare_side_auto_caption
{\scratchdimen\dimexpr\hsize-\wd\b_strc_floats_content-\floatparameter\c!margin\relax
\ifdim\wd\b_strc_floats_caption>\scratchdimen
\ifdim\wd\b_strc_floats_caption<1.3\scratchdimen
@@ -1341,41 +1296,43 @@
\hsize\scratchdimen
\strc_floats_make_complete_caption}}
-\def\dopreparesidewidthcaption
+\def\strc_floats_prepare_side_width_caption
{\setbox\b_strc_floats_caption\vbox
{\strc_floats_caption_set_align
\hsize\floatcaptionparameter\c!width
\strc_floats_make_complete_caption}}
-\def\dopreparestackcaptionfix
+\def\strc_floats_prepare_stack_caption_fixed
{\setbox\b_strc_floats_caption\vbox
{\strc_floats_caption_set_align
\hsize\floatcaptionparameter\c!minwidth % special effects
\strc_floats_make_complete_caption}}
-\def\dopreparestackcaptionmax
+\def\strc_floats_prepare_stack_caption_max
{\setbox\b_strc_floats_caption\vbox
{\strc_floats_caption_set_align
\hsize\wd\b_strc_floats_content
\strc_floats_make_complete_caption}}
-\def\dopreparestackcaptionwid
+\def\strc_floats_prepare_stack_caption_width
{\setbox\b_strc_floats_caption\vbox
{\strc_floats_caption_set_align
\hsize\floatcaptionparameter\c!width
\strc_floats_make_complete_caption}}
-\def\dopreparestackcaptionmin
+\def\strc_floats_prepare_stack_caption_min
{\setbox\b_strc_floats_caption\vbox
{\strc_floats_caption_set_align
\hsize\wd\b_strc_floats_content
- \doifnothing{\floatcaptionparameter\c!align}\raggedcenter % on purpose overloads align !
+ \ifx\p_strc_floats_caption_align\empty
+ \raggedcenter % on purpose overloads align !
+ \fi
\strc_floats_make_complete_caption}}
-\def\dopreparestackcaptionaut
- {\doifsomething{\floatcaptionparameter\c!align}
- {\doifnotinset\v!middle{\floatcaptionparameter\c!align}%
- {\let\captionovershoot\!!zeropoint}}%
+\def\strc_floats_prepare_stack_caption_auto
+ {\ifx\p_strc_floats_caption_align\empty \else
+ \doifnotinset\v!middle\p_strc_floats_caption_align{\let\captionovershoot\!!zeropoint}%
+ \fi
\edef\captionhsize{\the\wd\b_strc_floats_content}%
\ifdim\captionhsize>\hsize
% float is wider than \hsize
@@ -1388,8 +1345,7 @@
\ifdim\ht\scratchbox>\lineheight % more lines
\setbox\b_strc_floats_caption\vbox
{\strc_floats_caption_set_align
- \hsize\captionhsize
- \advance\hsize -\captionovershoot\relax
+ \hsize\dimexpr\captionhsize-\captionovershoot\relax
\ifdim\hsize<\captionminwidth\relax
\hsize\captionhsize
\fi
@@ -1408,175 +1364,192 @@
\fi
\setbox\scratchbox\vbox % test with overshoot
{\settrialtypesetting
- \scratchdimen\captionhsize
- \advance\scratchdimen \captionovershoot
- \advance\scratchdimen 3em % an average word length
- \ifdim\scratchdimen<\hsize \hsize\scratchdimen \fi
+ \scratchdimen\dimexpr\captionhsize+\captionovershoot+3\emwidth\relax % 3em is an average word length
+ \ifdim\scratchdimen<\hsize
+ \hsize\scratchdimen
+ \fi
\notesenabledfalse
\strc_floats_make_complete_caption}%
\ifdim\ht\scratchbox>\lineheight
% at least an average word longer than a line
\setbox\b_strc_floats_caption\vbox
{\strc_floats_caption_set_align
- \scratchdimen\captionhsize
- \advance\scratchdimen \captionovershoot
- \ifdim\scratchdimen<\hsize \hsize\scratchdimen \fi
+ \scratchdimen\dimexpr\captionhsize+\captionovershoot\relax
+ \ifdim\scratchdimen<\hsize
+ \hsize\scratchdimen
+ \fi
+ \strc_floats_make_complete_caption}%
+ \else\ifx\p_strc_floats_caption_align\empty
+ \setbox\b_strc_floats_caption\vbox
+ {\strc_floats_caption_set_align
+ \hsize\captionhsize
+ \raggedcenter % overloads
\strc_floats_make_complete_caption}%
\else
- % just over a line, don't use an overshoot % % % todo: outer/inner and such
- \doifcommonelse{\floatcaptionparameter\c!align}{\v!left,\v!right,\v!flushleft,\v!flushright}
- {\setbox\b_strc_floats_caption\vbox
- {\strc_floats_caption_set_align
- \hsize\captionhsize
- % strange : \raggedcenter
- \strc_floats_make_complete_caption}}
- {% nicer
- \setbox\b_strc_floats_caption\vbox
- {\strc_floats_caption_set_align
- \hsize\captionhsize
- \doifnothing{\floatcaptionparameter\c!align}\raggedcenter% overloads
- \strc_floats_make_complete_caption}}%
- \fi
+ \setbox\b_strc_floats_caption\vbox
+ {\strc_floats_caption_set_align
+ \hsize\captionhsize
+ \strc_floats_make_complete_caption}%
+ \fi\fi
\fi}
-\newdimen\tempfloatheight
-\newdimen\tempfloatwidth
+\def\strc_floats_between_stack
+ {\endgraf
+ \nointerlineskip
+ \floatcaptionparameter\c!inbetween
+ \endgraf}
-\def\dofloatboxbetweenstack
- {\endgraf\nointerlineskip\floatcaptionparameter\c!inbetween\endgraf}
-
-\def\dofloatboxdefaultbuilder % done
+\def\strc_floats_build_box_default % done
{\strc_floats_align_content{\box\b_strc_floats_content}}
-\def\dofloatboxnextrightbuilder#1%
- {\ifparfloat \hbox \else \expandafter \strc_floats_align_content \fi
- {\tempfloatheight\ht\b_strc_floats_content
+\def\strc_floats_build_box_next_right#1%
+ {\ifconditional\c_strc_floats_par_float \hbox \else \expandafter \strc_floats_align_content \fi
+ {\d_strc_float_temp_height\ht\b_strc_floats_content
\box\b_strc_floats_content
- \normalexpanded{\noexpand\doifnotinset{\v!hang}{\floatcaptionparameter\c!location}}{\dotfskip{\floatcaptionparameter\c!distance}}%
- \vbox to\tempfloatheight{#1}}}
-
-\def\dofloatboxnextleftbuilder#1%
- {\ifparfloat \hbox \else \expandafter \strc_floats_align_content \fi
- {\tempfloatheight\ht\b_strc_floats_content
- \vbox to\tempfloatheight{#1}%
- \normalexpanded{\noexpand\doifnotinset{\v!hang}{\floatcaptionparameter\c!location}}{\dotfskip{\floatcaptionparameter\c!distance}}%
+ \doifnotinset\v!hang{\floatcaptionparameter\c!location}
+ {\dotfskip{\floatcaptionparameter\c!distance}}%
+ \vbox to\d_strc_float_temp_height{#1}}}
+
+\def\strc_floats_build_box_next_left#1%
+ {\ifconditional\c_strc_floats_par_float \hbox \else \expandafter \strc_floats_align_content \fi
+ {\d_strc_float_temp_height\ht\b_strc_floats_content
+ \vbox to\d_strc_float_temp_height{#1}%
+ \doifnotinset\v!hang{\floatcaptionparameter\c!location}
+ {\dotfskip{\floatcaptionparameter\c!distance}}%
\box\b_strc_floats_content}}
-\def\dofloatboxnextouterbuilder
- {\doifrightpagefloatelse\dofloatboxnextrightbuilder\dofloatboxnextleftbuilder}
+\def\strc_floats_build_box_next_outer
+ {\doifrightpagefloatelse\strc_floats_build_box_next_right\strc_floats_build_box_next_left}
-\def\dofloatboxnextinnerbuilder
- {\doifrightpagefloatelse\dofloatboxnextleftbuilder\dofloatboxnextrightbuilder}
+\def\strc_floats_build_box_next_inner
+ {\doifrightpagefloatelse\strc_floats_build_box_next_left\strc_floats_build_box_next_right}
-\def\dofloatboxnextrighthangbuilder#1%
- {\ifparfloat \hbox \else \expandafter \strc_floats_align_content \fi
- {\tempfloatheight\ht\b_strc_floats_content
+\def\strc_floats_build_box_next_right_hang#1%
+ {\ifconditional\c_strc_floats_par_float \hbox \else \expandafter \strc_floats_align_content \fi
+ {\d_strc_float_temp_height\ht\b_strc_floats_content
\box\b_strc_floats_content
- \vbox to\tempfloatheight{#1}}}
+ \vbox to\d_strc_float_temp_height{#1}}}
-\def\dofloatboxnextlefthangbuilder#1%
- {\ifparfloat \hbox \else \expandafter \strc_floats_align_content \fi
- {\tempfloatheight\ht\b_strc_floats_content
- \vbox to\tempfloatheight{#1}%
+\def\strc_floats_build_box_next_left_hang#1%
+ {\ifconditional\c_strc_floats_par_float \hbox \else \expandafter \strc_floats_align_content \fi
+ {\d_strc_float_temp_height\ht\b_strc_floats_content
+ \vbox to\d_strc_float_temp_height{#1}%
\box\b_strc_floats_content}}
-\def\dodofloatboxnextrightmarginbuilder#1#2%
- {\ifparfloat
+\def\strc_floats_build_box_next_right_margin_indeed#1#2%
+ {\ifconditional\c_strc_floats_par_float
\hbox\bgroup
- \tempfloatheight\ht\b_strc_floats_content
+ \d_strc_float_temp_height\ht\b_strc_floats_content
\box\b_strc_floats_content
- \hsmash{\hskip#1\vbox to\tempfloatheight{#2}}%
+ \hsmash{\hskip#1\vbox to\d_strc_float_temp_height{#2}}%
\egroup
\else
\begingroup
- \tempfloatheight\ht\b_strc_floats_content
- \everyrightofalignedline{\hsmash{\hskip#1\vbox to\tempfloatheight{#2}}}%
+ \d_strc_float_temp_height\ht\b_strc_floats_content
+ \everyrightofalignedline{\hsmash{\hskip#1\vbox to\d_strc_float_temp_height{#2}}}%
\strc_floats_align_content{\box\b_strc_floats_content}%
\endgroup
\fi}
-\def\dodofloatboxnextleftmarginbuilder#1#2%
- {\ifparfloat
+\def\strc_floats_build_box_next_left_margin_indeed#1#2%
+ {\ifconditional\c_strc_floats_par_float
\hbox\bgroup
- \tempfloatheight\ht\b_strc_floats_content
- \hsmash{\hskip-\dimexpr#1+\wd\b_strc_floats_caption\relax\vbox to\tempfloatheight{#2}}%
+ \d_strc_float_temp_height\ht\b_strc_floats_content
+ \hsmash{\hskip-\dimexpr#1+\wd\b_strc_floats_caption\relax\vbox to\d_strc_float_temp_height{#2}}%
\box\b_strc_floats_content
\egroup
\else
\begingroup
- \tempfloatheight\ht\b_strc_floats_content
- \everyleftofalignedline{\hsmash{\hskip-\dimexpr#1+\wd\b_strc_floats_caption\relax\vbox to\tempfloatheight{#2}}}%
+ \d_strc_float_temp_height\ht\b_strc_floats_content
+ \everyleftofalignedline{\hsmash{\hskip-\dimexpr#1+\wd\b_strc_floats_caption\relax\vbox to\d_strc_float_temp_height{#2}}}%
\strc_floats_align_content{\box\b_strc_floats_content}%
\endgroup
\fi}
-\def\dofloatboxnextrightmarginbuilder{\dodofloatboxnextrightmarginbuilder\rightmargindistance}
-\def\dofloatboxnextleftmarginbuilder {\dodofloatboxnextleftmarginbuilder \leftmargindistance }
+\def\strc_floats_build_box_next_right_margin
+ {\strc_floats_build_box_next_right_margin_indeed\rightmargindistance}
+
+\def\strc_floats_build_box_next_left_margin
+ {\strc_floats_build_box_next_left_margin_indeed \leftmargindistance }
-\def\dofloatboxnextoutermarginbuilder
+\def\strc_floats_build_box_next_outer_margin
{\doifrightpagefloatelse
- {\dodofloatboxnextrightmarginbuilder\rightmargindistance}
- {\dodofloatboxnextleftmarginbuilder \rightmargindistance}}
+ {\strc_floats_build_box_next_right_margin_indeed\rightmargindistance}
+ {\strc_floats_build_box_next_left_margin_indeed \rightmargindistance}}
-\def\dofloatboxnextinnermarginbuilder
+\def\strc_floats_build_box_next_inner_margin
{\doifrightpagefloatelse
- {\dodofloatboxnextleftmarginbuilder \leftmargindistance}
- {\dodofloatboxnextrightmarginbuilder\leftmargindistance}}
-
-\def\dofloatboxnextbuilder % beware, we first check on left/rightmargin because there can be left/right also
- {\let\next\dofloatboxnextleftbuilder
- \normalexpanded{\noexpand\processallactionsinset[\floatcaptionparameter\c!location]}
- [ \v!outermargin=>\let\next\dofloatboxnextoutermarginbuilder,
- \v!innermargin=>\let\next\dofloatboxnextinnermarginbuilder,
- \v!leftmargin=>\let\next\dofloatboxnextleftmarginbuilder,
- \v!rightmargin=>\let\next\dofloatboxnextrightmarginbuilder,
- \v!lefthanging=>\let\next\dofloatboxnextlefthangbuilder,
- \v!righthanging=>\let\next\dofloatboxnextrighthangbuilder,
- \v!outer=>\let\next\dofloatboxnextouterbuilder,
- \v!inner=>\let\next\dofloatboxnextinnerbuilder,
- \v!left=>\let\next\dofloatboxnextleftbuilder,
- \v!right=>\let\next\dofloatboxnextrightbuilder]%
+ {\strc_floats_build_box_next_left_margin_indeed \leftmargindistance}
+ {\strc_floats_build_box_next_right_margin_indeed\leftmargindistance}}
+
+\def\strc_floats_build_box_next % beware, we first check on left/rightmargin because there can be left/right also
+ {\let\next\strc_floats_build_box_next_left
+ \processallactionsinset[\floatcaptionparameter\c!location]
+ [ \v!outermargin=>\let\next\strc_floats_build_box_next_outer_margin,
+ \v!innermargin=>\let\next\strc_floats_build_box_next_inner_margin,
+ \v!leftmargin=>\let\next\strc_floats_build_box_next_left_margin,
+ \v!rightmargin=>\let\next\strc_floats_build_box_next_right_margin,
+ \v!lefthanging=>\let\next\strc_floats_build_box_next_left_hang,
+ \v!righthanging=>\let\next\strc_floats_build_box_next_right_hang,
+ \v!outer=>\let\next\strc_floats_build_box_next_outer,
+ \v!inner=>\let\next\strc_floats_build_box_next_inner,
+ \v!left=>\let\next\strc_floats_build_box_next_left,
+ \v!right=>\let\next\strc_floats_build_box_next_right]%
\next}
-\def\dofloatboxsidebuilder
- {\ifparfloat
- \let\next\dofloatboxhighbuilder
+\def\strc_floats_build_box_side
+ {\ifconditional\c_strc_floats_par_float
+ \let\next\strc_floats_build_box_high
\else
- \let\next\dofloatboxmiddlebuilder
- \expanded{\processallactionsinset[\floatcaptionparameter\c!location]}
- [ \v!low=>\let\next\dofloatboxlowbuilder,
- \v!middle=>\let\next\dofloatboxmiddlebuilder,
- \v!high=>\let\next\dofloatboxhighbuilder]%
+ \let\next\strc_floats_build_box_middle
+ \processallactionsinset[\floatcaptionparameter\c!location]
+ [ \v!low=>\let\next\strc_floats_build_box_low,
+ \v!middle=>\let\next\strc_floats_build_box_middle,
+ \v!high=>\let\next\strc_floats_build_box_high]%
\fi
\next}
-\def\doflushfloatleftcaptionhang
- {\hsmash{\llap{\box\b_strc_floats_caption\dotfskip{\floatcaptionparameter\c!distance}}}}
-
-\def\doflushfloatrightcaptionhang
+\def\strc_floats_flush_right_caption_hang
{\hsmash{\rlap{\dotfskip{\floatcaptionparameter\c!distance}\box\b_strc_floats_caption}}}
-\def\doflushfloatcaptionhang % expanded can go
- {\expanded{\doifinsetelse{\v!righthanging}{\floatcaptionparameter\c!location}}
- {\doflushfloatrightcaptionhang}
- {\expanded{\doifinsetelse{\v!lefthanging}{\floatcaptionparameter\c!location}}
- {\doflushfloatleftcaptionhang}
- {\expanded{\doifinsetelse{\v!hang}{\floatcaptionparameter\c!location}}
- {\expanded{\doifinsetelse{\v!outer}{\floatcaptionparameter\c!location}}
- {\doifrightpagefloatelse{\doflushfloatrightcaptionhang}{\doflushfloatleftcaptionhang}}
- {\expanded{\doifinsetelse{\v!right}{\floatcaptiondirectives}}
- {\doflushfloatrightcaptionhang}
- {\doflushfloatleftcaptionhang}}}
+\def\strc_floats_flush_left_caption_hang
+ {\hsmash{\llap{\box\b_strc_floats_caption\dotfskip{\floatcaptionparameter\c!distance}}}}
+
+% \def\strc_floats_flush_caption_hang % expanded can go
+% {\expanded{\doifinsetelse{\v!righthanging}{\floatcaptionparameter\c!location}}
+% {\strc_floats_flush_right_caption_hang}
+% {\expanded{\doifinsetelse{\v!lefthanging}{\floatcaptionparameter\c!location}}
+% {\strc_floats_flush_left_caption_hang}
+% {\expanded{\doifinsetelse{\v!hang}{\floatcaptionparameter\c!location}}
+% {\expanded{\doifinsetelse{\v!outer}{\floatcaptionparameter\c!location}}
+% {\doifrightpagefloatelse{\strc_floats_flush_right_caption_hang}{\strc_floats_flush_left_caption_hang}}
+% {\expanded{\doifinsetelse{\v!right}{\floatcaptiondirectives}}
+% {\strc_floats_flush_right_caption_hang}
+% {\strc_floats_flush_left_caption_hang}}}
+% {\box\b_strc_floats_caption}}}}
+
+\def\strc_floats_flush_caption_hang % expanded can go
+ {\edef\p_strc_floats_caption_location{\floatcaptionparameter\c!location}%
+ \doifinsetelse\v!righthanging\p_strc_floats_caption_location
+ {\strc_floats_flush_right_caption_hang}
+ {\doifinsetelse\v!lefthanging\p_strc_floats_caption_location
+ {\strc_floats_flush_left_caption_hang}
+ {\doifinsetelse\v!hang\p_strc_floats_caption_location
+ {\doifinsetelse\v!outer\p_strc_floats_caption_location
+ {\doifrightpagefloatelse{\strc_floats_flush_right_caption_hang}{\strc_floats_flush_left_caption_hang}}
+ {\doifinsetelse\v!right\floatcaptiondirectives
+ {\strc_floats_flush_right_caption_hang}
+ {\strc_floats_flush_left_caption_hang}}}
{\box\b_strc_floats_caption}}}}
-\def\dofloatboxhighbuilder
- {\dofloatboxnextbuilder{\dofloatboxbetweenstack\doflushfloatcaptionhang\vfill}}
+\def\strc_floats_build_box_high
+ {\strc_floats_build_box_next{\strc_floats_between_stack\strc_floats_flush_caption_hang\vfill}}
-\def\dofloatboxlowbuilder
- {\dofloatboxnextbuilder{\vfill\doflushfloatcaptionhang\dofloatboxbetweenstack}}
+\def\strc_floats_build_box_low
+ {\strc_floats_build_box_next{\vfill\strc_floats_flush_caption_hang\strc_floats_between_stack}}
-\def\dofloatboxmiddlebuilder
- {\dofloatboxnextbuilder{\vfill\box\b_strc_floats_caption\vfill}}
+\def\strc_floats_build_box_middle
+ {\strc_floats_build_box_next{\vfill\box\b_strc_floats_caption\vfill}}
% \definefloat
% [lefty][lefties][figure]
@@ -1594,125 +1567,136 @@
% \placelefty{}{} \input tufte \input tufte
% \stoptext
-\def\bothangfloat#1{\ruledvbox to \ht\b_strc_floats_content{#1\vss}}
-\def\tophangfloat#1{\ruledvbox to \ht\b_strc_floats_content{\vss#1}}
+\def\strc_floats_build_box_top_stack_normal_overlay
+ {\vbox to \ht\b_strc_floats_content{\vss\strc_floats_build_box_top_stack_normal_content}}
-\def\dofloatboxnormaltopstackbuilder
- {\expanded{\doifinset{\v!overlay}{\floatcaptionparameter\c!location}}\tophangfloat
- {\tempfloatwidth\wd\b_strc_floats_content
- \ifparfloat
- \hbox{\strc_floats_locate_side_float{\box\b_strc_floats_caption}}%
- \dofloatboxbetweenstack
- \hbox{\hbox{\box\b_strc_floats_content}}%
- \else
- \page_otr_command_set_float_hsize
- \hbox{\strc_floats_locate_text_float{\box\b_strc_floats_caption}}
- \dofloatboxbetweenstack
- \hbox{\strc_floats_align_content{\box\b_strc_floats_content}}%
- \fi}}
+\def\strc_floats_build_box_top_stack_normal_content
+ {\d_strc_float_temp_width\wd\b_strc_floats_content
+ \ifconditional\c_strc_floats_par_float
+ \hbox{\strc_floats_locate_side_float{\box\b_strc_floats_caption}}%
+ \strc_floats_between_stack
+ \hbox{\hbox{\box\b_strc_floats_content}}%
+ \else
+ \page_otr_command_set_float_hsize
+ \hbox{\strc_floats_locate_text_float{\box\b_strc_floats_caption}}
+ \strc_floats_between_stack
+ \hbox{\strc_floats_align_content{\box\b_strc_floats_content}}%
+ \fi}
-\def\dofloatboxnormalbotstackbuilder
- {\expanded{\doifinset{\v!overlay}{\floatcaptionparameter\c!location}}\bothangfloat
- {\tempfloatwidth\wd\b_strc_floats_content
- \ifparfloat
- \hbox{\hbox{\box\b_strc_floats_content}}%
- \dofloatboxbetweenstack
- \hbox{\strc_floats_locate_side_float{\box\b_strc_floats_caption}}%
- \else
- \page_otr_command_set_float_hsize
- \hbox{\strc_floats_align_content{\box\b_strc_floats_content}}%
- \dofloatboxbetweenstack
- \hbox{\strc_floats_locate_text_float{\box\b_strc_floats_caption}}%
- \fi}}
+\def\strc_floats_build_box_bottom_stack_normal_overlay
+ {\vbox to \ht\b_strc_floats_content{\strc_floats_build_box_bottom_stack_normal_content\vss}}
-\def\dofloatboxgridtopstackbuilder
+\def\strc_floats_build_box_bottom_stack_normal_content
+ {\d_strc_float_temp_width\wd\b_strc_floats_content
+ \ifconditional\c_strc_floats_par_float
+ \hbox{\hbox{\box\b_strc_floats_content}}%
+ \strc_floats_between_stack
+ \hbox{\strc_floats_locate_side_float{\box\b_strc_floats_caption}}%
+ \else
+ \page_otr_command_set_float_hsize
+ \hbox{\strc_floats_align_content{\box\b_strc_floats_content}}%
+ \strc_floats_between_stack
+ \hbox{\strc_floats_locate_text_float{\box\b_strc_floats_caption}}%
+ \fi}
+
+\def\strc_floats_build_box_top_stack_normal
+ {\doifinsetelse\v!overlay{\floatcaptionparameter\c!location}
+ \strc_floats_build_box_top_stack_normal_overlay
+ \strc_floats_build_box_top_stack_normal_content}
+
+\def\strc_floats_build_box_bottom_stack_normal
+ {\doifinset\v!overlay{\floatcaptionparameter\c!location}
+ \strc_floats_build_box_bottom_stack_normal_overlay
+ \strc_floats_build_box_bottom_stack_normal_content}
+
+\def\strc_floats_build_box_top_stack_grid
{\dp\b_strc_floats_caption\strutdepth
\setbox\scratchbox\vbox
- {\tempfloatwidth\wd\b_strc_floats_content
- \ifparfloat
+ {\d_strc_float_temp_width\wd\b_strc_floats_content
+ \ifconditional\c_strc_floats_par_float
\strc_floats_locate_side_float{\box\b_strc_floats_caption}%
- \vss\dofloatboxbetweenstack
+ \vss\strc_floats_between_stack
\hbox{\box\b_strc_floats_content}%
\else
\page_otr_command_set_float_hsize
\strc_floats_locate_text_float{\box\b_strc_floats_caption}%
- \vss\dofloatboxbetweenstack
+ \vss\strc_floats_between_stack
\strc_floats_align_content{\box\b_strc_floats_content}%
\fi}%
\getnoflines{\dimexpr\htdp\scratchbox-10\scaledpoint\relax}% get rid of inaccuracy
\vbox to \noflines\lineheight{\unvbox\scratchbox}}
-\def\dofloatboxgridbotstackbuilder
+\def\strc_floats_build_box_bottom_stack_grid
{\dp\b_strc_floats_caption\strutdepth
\setbox\scratchbox\vbox
- {\tempfloatwidth\wd\b_strc_floats_content
- \ifparfloat
+ {\d_strc_float_temp_width\wd\b_strc_floats_content
+ \ifconditional\c_strc_floats_par_float
\hbox{\box\b_strc_floats_content}%
- \vss\dofloatboxbetweenstack
+ \vss\strc_floats_between_stack
\strc_floats_locate_side_float{\box\b_strc_floats_caption}%
\else
\page_otr_command_set_float_hsize
\strc_floats_align_content{\box\b_strc_floats_content}%
- \vss\dofloatboxbetweenstack
+ \vss\strc_floats_between_stack
\strc_floats_locate_text_float{\box\b_strc_floats_caption}%
\fi}%
\getnoflines{\dimexpr\htdp\scratchbox-10\scaledpoint\relax}% get rid of inaccuracy
\vbox to \noflines\lineheight{\unvbox\scratchbox}}
-\def\dofloatboxstretchtopstackbuilder
+\def\strc_floats_build_box_top_stack_stretch
{\dp\b_strc_floats_caption\strutdepth
\setbox\scratchbox\vbox
{\strc_floats_align_caption{\copy\b_strc_floats_caption}%
\strc_floats_align_content {\copy\b_strc_floats_content }}%
\getnoflines{\dimexpr\htdp\scratchbox-10\scaledpoint\relax}% get rid of inaccuracy
\vbox to \noflines\lineheight
- {\tempfloatwidth\wd\b_strc_floats_content
- \ifparfloat
+ {\d_strc_float_temp_width\wd\b_strc_floats_content
+ \ifconditional\c_strc_floats_par_float
\strc_floats_locate_side_float{\box\b_strc_floats_caption}%
- \vss\dofloatboxbetweenstack\vss
+ \vss\strc_floats_between_stack\vss
\hbox{\box\b_strc_floats_content}%
\else
\page_otr_command_set_float_hsize
\strc_floats_locate_text_float{\box\b_strc_floats_caption}%
- \vss\dofloatboxbetweenstack\vss
+ \vss\strc_floats_between_stack\vss
\strc_floats_align_content{\box\b_strc_floats_content}%
\fi}}
-\def\dofloatboxstretchbotstackbuilder
+\def\strc_floats_build_box_bottom_stack_stretch
{\dp\b_strc_floats_caption\strutdepth
\setbox\scratchbox\vbox
{\strc_floats_align_content{\copy\b_strc_floats_content }%
\strc_floats_align_caption{\copy\b_strc_floats_caption}}%
\getnoflines{\dimexpr\htdp\scratchbox-10\scaledpoint\relax}% get rid of inaccuracy
\vbox to \noflines\lineheight
- {\tempfloatwidth\wd\b_strc_floats_content
- \ifparfloat
+ {\d_strc_float_temp_width\wd\b_strc_floats_content
+ \ifconditional\c_strc_floats_par_float
\hbox{\box\b_strc_floats_content}%
- \vss\dofloatboxbetweenstack\vss
+ \vss\strc_floats_between_stack\vss
\strc_floats_locate_side_float{\box\b_strc_floats_caption}
\else
\page_otr_command_set_float_hsize
\strc_floats_align_content{\box\b_strc_floats_content}%
- \vss\dofloatboxbetweenstack\vss
+ \vss\strc_floats_between_stack\vss
\strc_floats_locate_text_float{\box\b_strc_floats_caption}%
\fi}}
-\def\dofloatboxtopbuilder
- {\let\next\dofloatboxnormaltopstackbuilder
- \expanded{\processfirstactioninset[\floatcaptionparameter\c!location]}
- [ \v!grid=>\let\next\dofloatboxgridstackbuilder,
- \v!stretch=>\let\next\dofloatboxstretchstackbuilder]%
+\def\strc_floats_build_box_top
+ {\let\next\strc_floats_build_box_top_stack_normal
+ \processfirstactioninset[\floatcaptionparameter\c!location]
+ [ \v!grid=>\let\next\strc_floats_build_box_top_stack_grid,
+ \v!stretch=>\let\next\strc_floats_build_box_top_stack_stretch]%
\next}
-\def\dofloatboxbottombuilder
- {\let\next\dofloatboxnormalbotstackbuilder
- \expanded{\processfirstactioninset[\floatcaptionparameter\c!location]}
- [ \v!grid=>\let\next\dofloatboxgridstackbuilder,
- \v!stretch=>\let\next\dofloatboxstretchstackbuilder]%
+\def\strc_floats_build_box_bottom
+ {\let\next\strc_floats_build_box_bottom_stack_normal
+ \processfirstactioninset[\floatcaptionparameter\c!location]
+ [ \v!grid=>\let\next\strc_floats_build_box_bottom_stack_grid,
+ \v!stretch=>\let\next\strc_floats_build_box_bottom_stack_stretch]%
\next}
-\def\relocatecaptionright#1{\strc_floats_align_caption{\hbox to \tempfloatwidth{\hss#1}}}
-\def\relocatecaptionleft #1{\strc_floats_align_caption{\hbox to \tempfloatwidth{#1\hss}}}
+\def\strc_floats_relocate_caption_right#1{\strc_floats_align_caption{\hbox to \d_strc_float_temp_width{\hss#1}}}
+\def\strc_floats_relocate_caption_left #1{\strc_floats_align_caption{\hbox to \d_strc_float_temp_width{#1\hss}}}
\unexpanded\def\installfloatboxbuilder#1#2{\setvalue{\??floatbuilder#1}{#2}}
@@ -1721,31 +1705,32 @@
{\strc_floats_set_local_hsize
\forgetall
\let\floatcaptionarrangement\s!default
- \def\docommand##1%
- {\doifdefined{\??floatbuilder##1}{\def\floatcaptionarrangement{##1}\quitcommalist}}%
- \processcommacommand[\floatcaptionparameter\c!location]\docommand
+ \processcommacommand[\floatcaptionparameter\c!location]\strc_floats_build_box_step
\executeifdefined{\??floatbuilder\floatcaptionarrangement}{\getvalue{\??floatbuilder\s!default}}}}
+\def\strc_floats_build_box_step#1%
+ {\doifdefined{\??floatbuilder#1}{\def\floatcaptionarrangement{#1}\quitcommalist}}
+
\def\strc_floats_locate_text_float
{\let\next\strc_floats_align_caption
- \expanded{\processallactionsinset[\floatcaptionparameter\c!location]}
- [ \v!left=>\let\next\relocatecaptionleft,
- \v!right=>\let\next\relocatecaptionright,
- \v!inner=>\doifrightpagefloatelse{\let\next\relocatecaptionleft }{\let\next\relocatecaptionright},
- \v!outer=>\doifrightpagefloatelse{\let\next\relocatecaptionright}{\let\next\relocatecaptionleft }]%
+ \processallactionsinset[\floatcaptionparameter\c!location]
+ [ \v!left=>\let\next\strc_floats_relocate_caption_left,
+ \v!right=>\let\next\strc_floats_relocate_caption_right,
+ \v!inner=>\doifrightpagefloatelse{\let\next\strc_floats_relocate_caption_left }{\let\next\strc_floats_relocate_caption_right},
+ \v!outer=>\doifrightpagefloatelse{\let\next\strc_floats_relocate_caption_right}{\let\next\strc_floats_relocate_caption_left }]%
\next}
-\installfloatboxbuilder \v!none \dofloatboxdefaultbuilder
-\installfloatboxbuilder \s!default \dofloatboxdefaultbuilder
-\installfloatboxbuilder \v!high \dofloatboxhighbuilder
-\installfloatboxbuilder \v!low \dofloatboxlowbuilder
-\installfloatboxbuilder \v!middle \dofloatboxmiddlebuilder
+\installfloatboxbuilder \v!none \strc_floats_build_box_default
+\installfloatboxbuilder \s!default \strc_floats_build_box_default
+\installfloatboxbuilder \v!high \strc_floats_build_box_high
+\installfloatboxbuilder \v!low \strc_floats_build_box_low
+\installfloatboxbuilder \v!middle \strc_floats_build_box_middle
-\installfloatboxbuilder \v!left \dofloatboxsidebuilder
-\installfloatboxbuilder \v!right \dofloatboxsidebuilder
+\installfloatboxbuilder \v!left \strc_floats_build_box_side
+\installfloatboxbuilder \v!right \strc_floats_build_box_side
-\installfloatboxbuilder \v!top \dofloatboxtopbuilder
-\installfloatboxbuilder \v!bottom \dofloatboxbottombuilder
+\installfloatboxbuilder \v!top \strc_floats_build_box_top
+\installfloatboxbuilder \v!bottom \strc_floats_build_box_bottom
% \setuplayout[grid=yes] \showgrid \setupcaptions[style=smallbodyfont,location=grid,inbetween=]
%
@@ -1756,6 +1741,8 @@
% test \placefigure{\input zapf\relax}{\externalfigure[cow.pdf][frame=on,grid=depth]} test \page
% \stoptext
+% This might move to page-flt:
+
\newif\ifpostponecolumnfloats \postponecolumnfloatsfalse % don't change
\setnewconstant\postcenterfloatmethod\plusone
@@ -1774,9 +1761,9 @@
\global\setbox\floatbox\hbox to \scratchdimen
% {\hfill\box\floatbox\hfill}} % geen \hss, gaat mis in kolommen !
% {\hss \box\floatbox\hss }} % wel \hss, anders mis in colset
- {\ifglobalcenterfloatbox
+ {\ifconditional\c_page_floats_center_box_global
\donetrue
- \else\iflocalcenterfloatbox
+ \else\ifconditional\c_page_floats_center_box_local
\donetrue
\else
\donefalse
@@ -1793,7 +1780,6 @@
\forgetall
\postponenotes
\dontcomplain
- %\showcomposition
\setbox\b_strc_floats_content\vbox{\borderedfloatbox}%
%\page_backgrounds_add_local_to_box\b_strc_floats_content
\ifnofloatcaption
@@ -1808,33 +1794,106 @@
\fi
\egroup}
+% \def\strc_floats_prepare_side_caption
+% {\dostarttagged\t!floatcaption\empty
+% \edef\p_strc_floats_caption_align{\floatcaptionparameter\c!align}%
+% \doifelse{\floatcaptionparameter\c!width}\v!max
+% {\setbox\b_strc_floats_caption\vbox
+% {\strc_floats_caption_set_align
+% \hsize\wd\b_strc_floats_content
+% \strc_floats_make_complete_caption}}%
+% {\doifelse{\floatcaptionparameter\c!width}\v!fit
+% {\ifdim\wd\b_strc_floats_caption>\wd\b_strc_floats_content\relax
+% \setbox\b_strc_floats_caption\vbox
+% {\forgetall % needed?
+% \hsize\wd\b_strc_floats_content
+% \strc_floats_make_complete_caption}%
+% \else
+% \setbox\b_strc_floats_caption\hbox to \wd\b_strc_floats_content
+% {\hss\hbox{\strc_floats_make_complete_caption}\hss}%
+% \fi}
+% {\setbox\b_strc_floats_caption\vbox
+% {\strc_floats_caption_set_align
+% \hsize\floatcaptionparameter\c!width % \wd\b_strc_floats_content
+% \strc_floats_make_complete_caption}}}%
+% \dostoptagged}
+
\def\strc_floats_prepare_side_caption
{\dostarttagged\t!floatcaption\empty
- \doifelse{\floatcaptionparameter\c!width}\v!max
- {\setbox\b_strc_floats_caption\vbox
- {\strc_floats_caption_set_align
- \hsize\wd\b_strc_floats_content
- \strc_floats_make_complete_caption}}%
- {\doifelse{\floatcaptionparameter\c!width}\v!fit
- {\ifdim\wd\b_strc_floats_caption>\wd\b_strc_floats_content\relax
- \setbox\b_strc_floats_caption\vbox
- {\forgetall % needed?
- \hsize\wd\b_strc_floats_content
- \strc_floats_make_complete_caption}%
- \else
- \setbox\b_strc_floats_caption\hbox to \wd\b_strc_floats_content
- {\hss\hbox{\strc_floats_make_complete_caption}\hss}%
- \fi}
- {\setbox\b_strc_floats_caption\vbox
- {\strc_floats_caption_set_align
- \hsize\floatcaptionparameter\c!width % \wd\b_strc_floats_content
- \strc_floats_make_complete_caption}}}%
+ \edef\p_strc_floats_caption_width{\floatcaptionparameter\c!width}%
+ \edef\p_strc_floats_caption_align{\floatcaptionparameter\c!align}%
+ \ifx\p_strc_floats_caption_width\v!max
+ \strc_floats_prepare_side_caption_max
+ \else\ifx\p_strc_floats_caption_width\v!fit
+ \strc_floats_prepare_side_caption_fit
+ \else
+ \strc_floats_prepare_side_caption_width
+ \fi\fi
\dostoptagged}
+% these could be \??floatpreparesidecaption
+
+\def\strc_floats_prepare_side_caption_max
+ {\setbox\b_strc_floats_caption\vbox
+ {\strc_floats_caption_set_align
+ \hsize\wd\b_strc_floats_content
+ \strc_floats_make_complete_caption}}
+
+\def\strc_floats_prepare_side_caption_fit
+ {\ifdim\wd\b_strc_floats_caption>\wd\b_strc_floats_content\relax
+ \setbox\b_strc_floats_caption\vbox
+ {\forgetall % needed?
+ \hsize\wd\b_strc_floats_content
+ \strc_floats_make_complete_caption}%
+ \else
+ \setbox\b_strc_floats_caption\hbox to \wd\b_strc_floats_content
+ {\hss\hbox{\strc_floats_make_complete_caption}\hss}%
+ \fi}
+
+\def\strc_floats_prepare_side_caption_width
+ {\setbox\b_strc_floats_caption\vbox
+ {\strc_floats_caption_set_align
+ \hsize\p_strc_floats_caption_width % \wd\b_strc_floats_content
+ \strc_floats_make_complete_caption}}
+
+% % maybe (but then also prepare_page that way):
+%
+% \installcorenamespace{floatpreparesidecaption}
+%
+% \def\strc_floats_prepare_side_caption
+% {\dostarttagged\t!floatcaption\empty
+% \edef\p_strc_floats_caption_width{\floatcaptionparameter\c!width}%
+% \edef\p_strc_floats_caption_align{\floatcaptionparameter\c!align}%
+% \expandnamespacemacro\??floatpreparesidecaption\p_strc_floats_caption_width\s!unknown
+% \dostoptagged}
+%
+% \setvalue{\??floatpreparesidecaption\v!max}%
+% {\setbox\b_strc_floats_caption\vbox
+% {\strc_floats_caption_set_align
+% \hsize\wd\b_strc_floats_content
+% \strc_floats_make_complete_caption}}
+%
+% \setvalue{\??floatpreparesidecaption\v!fit}%
+% {\ifdim\wd\b_strc_floats_caption>\wd\b_strc_floats_content\relax
+% \setbox\b_strc_floats_caption\vbox
+% {\forgetall % needed?
+% \hsize\wd\b_strc_floats_content
+% \strc_floats_make_complete_caption}%
+% \else
+% \setbox\b_strc_floats_caption\hbox to \wd\b_strc_floats_content
+% {\hss\hbox{\strc_floats_make_complete_caption}\hss}%
+% \fi}
+%
+% \setvalue{\??floatpreparesidecaption\s!unknown}%
+% {\setbox\b_strc_floats_caption\vbox
+% {\strc_floats_caption_set_align
+% \hsize\p_strc_floats_caption_width % \wd\b_strc_floats_content
+% \strc_floats_make_complete_caption}}
+
\def\strc_floats_locate_side_float#1%
{\begingroup
\alignstrutmode\zerocount
- \hsize\tempfloatwidth \forgetall
+ \hsize\d_strc_float_temp_width \forgetall
\alignedline{\floatparameter\c!location}\v!middle{#1}%
\endgroup}
@@ -1843,13 +1902,11 @@
\let\strc_floats_align_caption\relax
\strc_floats_build_box}
-\newif\ifparfloat
-
\def\strc_floats_set_box % todo : \global\setbox, currently messy
{\ifvisible
\par
\edef\floatcaptiondirectives{\floatparameter\c!location,\floatcaptionparameter\c!location}%
- \ifparfloat
+ \ifconditional\c_strc_floats_par_float
\strc_floats_set_paragraph_variant
\else
\strc_floats_set_page_variant
@@ -1872,9 +1929,7 @@
\fi
\fi}
-\newcounter\noxfloatlocations
-
-% \def\dooutput{\sidefloatoutput} % redefinition of \dooutput
+% \def\dooutput{\sidefloatoutput} % redefinition of \dooutput
\definefloat
[\v!figure]
@@ -1900,17 +1955,17 @@
\installcorenamespace{floatmethods}
-\let\floatmethod \empty
-\let\floatlabel \empty
-\let\floatcolumn \empty
-\let\floatrow \empty
-\let\forcedfloatmethod\empty
+\let\floatmethod \empty % set by lua
+\let\floatlabel \empty % set by lua
+\let\floatcolumn \empty % set by lua
+\let\floatrow \empty % set by lua
+\let\forcedfloatmethod\empty % set by lua
\def\setfloatmethodvariables#1% \floatmethod \floatlabel \floatrow \floatcolumn
{\ctxcommand{analysefloatmethod("#1")}}
\def\somesomewherefloat[#1]%
- {\dofloatssavesomewherefloat\s!somewhere{#1}}
+ {\page_floats_save_somewhere_float\s!somewhere{#1}}
\def\strc_floats_get_box
{\ifvisible
@@ -2073,45 +2128,49 @@
%D Local floats:
-\def\setuplocalfloats
- {\getparameters[\??lf]}
+\installcorenamespace{localfloats}
+\installcorenamespace{localfloatstack}
+
+\installsetuponlycommandhandler \??localfloats {localfloats}
\setuplocalfloats
[%\c!before=\blank,
%\c!after=\blank,
\c!inbetween=\blank]
-\initializeboxstack{localfloats}
+\initializeboxstack\??localfloatstack
-\newcounter\noflocalfloats
+\newcount\c_strc_localfloats_n \let\noflocalfloats\c_strc_localfloats_n
-\def\resetlocalfloats
- {\doglobal\newcounter\noflocalfloats
- \initializeboxstack{localfloats}}
+\unexpanded\def\resetlocalfloats
+ {\global\c_strc_localfloats_n\zerocount
+ \initializeboxstack\??localfloatstack}
-\def\somelocalfloat
- {\doglobal\increment\noflocalfloats
- \savebox{localfloats}{\noflocalfloats}{\box\floatbox}}
+\unexpanded\def\somelocalfloat
+ {\global\advance\c_strc_localfloats_n\plusone
+ \savebox\??localfloatstack{\number\c_strc_localfloats_n}{\box\floatbox}}
-\def\getlocalfloats
- {\dorecurse\noflocalfloats
+\unexpanded\def\getlocalfloats
+ {\dorecurse\c_strc_localfloats_n
{\ifnum\recurselevel=\plusone % 1\relax
- \getvalue{\??lf\c!before}%
+ \directlocalfloatsparameter\c!before
\else
- \getvalue{\??lf\c!inbetween}%
+ \directlocalfloatsparameter\c!inbetween
\fi
- \dontleavehmode\hbox{\foundbox{localfloats}\recurselevel}%
- \ifnum\recurselevel=\noflocalfloats\relax
- \getvalue{\??lf\c!after}%
+ \dontleavehmode\hbox{\foundbox\??localfloatstack\recurselevel}%
+ \ifnum\recurselevel=\c_strc_localfloats_n\relax
+ \directlocalfloatsparameter\c!after
\fi}}
-\def\flushlocalfloats
+\unexpanded\def\flushlocalfloats
{\getlocalfloats
\resetlocalfloats}
-\unexpanded\def\getlocalfloat#1{\expanded{\foundbox{localfloats}{\number#1}}}
+\unexpanded\def\getlocalfloat#1%
+ {\normalexpanded{\foundbox{\??localfloatstack}{\number#1}}}
-\def\forcelocalfloats{\let\forcedfloatmethod\v!local}
+\unexpanded\def\forcelocalfloats
+ {\let\forcedfloatmethod\v!local}
\installfloatmethod \s!singlecolumn \v!local \somelocalfloat
\installfloatmethod \s!multicolumn \v!local \somelocalfloat
diff --git a/Master/texmf-dist/tex/context/base/strc-ind.mkiv b/Master/texmf-dist/tex/context/base/strc-ind.mkiv
index 4b65c740798..0098bff7da6 100644
--- a/Master/texmf-dist/tex/context/base/strc-ind.mkiv
+++ b/Master/texmf-dist/tex/context/base/strc-ind.mkiv
@@ -57,6 +57,7 @@
\newcount\c_strc_indentedtexts_nesting
\newdimen\d_strc_indentedtexts_width
+\newdimen\d_strc_indentedtexts_distance
\appendtoks
\setuevalue {\e!start\currentindentedtext}{\strc_indentedtexts_start{\currentindentedtext}\c_strc_indentedtexts_nesting}%
@@ -67,12 +68,13 @@
\setuevalue{\v!sub\v!sub\currentindentedtext}{\strc_indentedtexts_direct{\currentindentedtext}{2}}%
\to \everydefineindentedtext
-\unexpanded\def\strc_indentedtexts_start#1#2%
+\unexpanded\def\strc_indentedtexts_start#1#2% we need to get rid of \spr
{\par
\begingroup
- \c_strc_indentedtexts_nesting#2\relax
\edef\currentindentedtext{#1}%
+ \c_strc_indentedtexts_nesting#2\relax
\indentedtextparameter\c!before
+ \d_strc_indentedtexts_distance\indentedtextparameter\c!distance\relax
\doifnothing{\indentedtextparameter\c!sample}
{\setindentedtextparameter\c!sample{\indentedtextparameter\c!text}}%
\assignwidth
@@ -81,15 +83,15 @@
{\useindentedtextstyleandcolor\c!headstyle\c!headcolor
\indentedtextparameter\c!sample
\spr{\indentedtextparameter\c!separator}}
- {\indentedtextparameter\c!distance}%
- \advance\d_strc_indentedtexts_width \indentedtextparameter\c!distance
+ {\d_strc_indentedtexts_distance}%
+ \advance\d_strc_indentedtexts_width \d_strc_indentedtexts_distance
\setbox\scratchbox\hbox to \d_strc_indentedtexts_width
{\useindentedtextstyleandcolor\c!headstyle\c!headcolor
\strut
\indentedtextparameter\c!text
\hss
\spr{\indentedtextparameter\c!separator}%
- \hskip\indentedtextparameter\c!distance}%
+ \hskip\d_strc_indentedtexts_distance}%
\parindent\zeropoint
\hskip\c_strc_indentedtexts_nesting\d_strc_indentedtexts_width
\advance\c_strc_indentedtexts_nesting\plusone
diff --git a/Master/texmf-dist/tex/context/base/strc-ini.lua b/Master/texmf-dist/tex/context/base/strc-ini.lua
index 78efcad815a..fd7c10f7952 100644
--- a/Master/texmf-dist/tex/context/base/strc-ini.lua
+++ b/Master/texmf-dist/tex/context/base/strc-ini.lua
@@ -20,17 +20,24 @@ but it does not make sense to store all processdata.
]]--
-local format, concat = string.format, table.concat
+local formatters = string.formatters
local lpegmatch = lpeg.match
local count = tex.count
-local type, next, tonumber = type, next, tonumber
+local type, next, tonumber, select = type, next, tonumber, select
local settings_to_array, settings_to_hash = utilities.parsers.settings_to_array, utilities.parsers.settings_to_hash
local allocate = utilities.storage.allocate
-local ctxcatcodes = tex.ctxcatcodes
-local xmlcatcodes = tex.xmlcatcodes
-local notcatcodes = tex.notcatcodes
-local txtcatcodes = tex.txtcatcodes
+local catcodenumbers = catcodes.numbers -- better use the context(...) way to switch
+
+local ctxcatcodes = catcodenumbers.ctxcatcodes
+local xmlcatcodes = catcodenumbers.xmlcatcodes
+local notcatcodes = catcodenumbers.notcatcodes
+local txtcatcodes = catcodenumbers.txtcatcodes
+
+local context, commands = context, commands
+
+local pushcatcodes = context.pushcatcodes
+local popcatcodes = context.popcatcodes
local trace_processors = false
local report_processors = logs.reporter("processors","structure")
@@ -162,9 +169,9 @@ end
helpers.simplify = simplify
function helpers.merged(...)
- local h, t = { ... }, { }
- for k=1, #h do
- local v = h[k]
+ local t = { }
+ for k=1, select("#",...) do
+ local v = select(k,...)
if v and v ~= "" and not t[k] then
t[k] = v
end
@@ -183,7 +190,7 @@ local tags = {
-- (optionally) as a setups to be applied but keep in mind that document setups
-- also get applied (when they use #1's).
--
--- local command = format("\\xmlprocessbuffer{%s}{%s}{}",metadata.xmlroot or "main",tag)
+-- local command = formatters["\\xmlprocessbuffer{%s}{%s}{}"](metadata.xmlroot or "main",tag)
local experiment = true
@@ -194,39 +201,47 @@ function helpers.title(title,metadata) -- coding is xml is rather old and not th
if metadata.coding == "xml" then
-- title can contain raw xml
local tag = tags[metadata.kind] or tags.generic
- local xmldata = format("<?xml version='1.0'?><%s>%s</%s>",tag,title,tag)
-if not experiment then
- buffers.assign(tag,xmldata)
-end
+ local xmldata = formatters["<?xml version='1.0'?><%s>%s</%s>"](tag,title,tag)
+ if not experiment then
+ buffers.assign(tag,xmldata)
+ end
if trace_processors then
report_processors("putting xml data in buffer: %s",xmldata)
- report_processors("processing buffer with setup '%s' and tag '%s'",xmlsetup or "",tag)
+ report_processors("processing buffer with setup %a and tag %a",xmlsetup,tag)
end
-if experiment then
- -- the question is: will this be forgotten ... better store in a via file
- local xmltable = lxml.convert("temp",xmldata or "")
- lxml.store("temp",xmltable)
- context.xmlsetup("temp",xmlsetup or "")
-else
+ if experiment then
+ -- the question is: will this be forgotten ... better store in a via file
+ local xmltable = lxml.convert("temp",xmldata or "")
+ lxml.store("temp",xmltable)
+ context.xmlsetup("temp",xmlsetup or "")
+ else
context.xmlprocessbuffer("dummy",tag,xmlsetup or "")
-end
+ end
elseif xmlsetup then -- title is reference to node (so \xmlraw should have been used)
if trace_processors then
- report_processors("feeding xmlsetup '%s' using node '%s'",xmlsetup,title)
+ report_processors("feeding xmlsetup %a using node %a",xmlsetup,title)
end
context.xmlsetup(title,metadata.xmlsetup)
else
local catcodes = metadata.catcodes
if catcodes == notcatcodes or catcodes == xmlcatcodes then
if trace_processors then
- report_processors("cct: %s (overloads %s), txt: %s",ctxcatcodes,catcodes,title)
+ report_processors("catcodetable %a, overloads %a, text %a",ctxcatcodes,catcodes,title)
end
context(title) -- nasty
else
if trace_processors then
- report_processors("cct: %s, txt: %s",catcodes,title)
+ report_processors("catcodetable %a, text %a",catcodes,title)
end
- context.sprint(catcodes,title) -- was: texsprint(catcodes,title)
+ --
+ -- context.sprint(catcodes,title)
+ --
+ -- doesn't work when a newline is in there \section{Test\ A} so we do
+ -- it this way:
+ --
+ pushcatcodes(catcodes)
+ context(title)
+ popcatcodes()
end
end
else
diff --git a/Master/texmf-dist/tex/context/base/strc-itm.mkvi b/Master/texmf-dist/tex/context/base/strc-itm.mkvi
index 86acba33c09..5840ee73ea3 100644
--- a/Master/texmf-dist/tex/context/base/strc-itm.mkvi
+++ b/Master/texmf-dist/tex/context/base/strc-itm.mkvi
@@ -182,6 +182,7 @@
\newconditional\c_strc_itemgroups_columns
\newconditional\c_strc_itemgroups_concat
\newconditional\c_strc_itemgroups_txt
+\newconditional\c_strc_itemgroups_extra
\newconditional\c_strc_itemgroups_repeat
% 0 = before/after
@@ -242,11 +243,17 @@
\strc_itemgroups_insert_reference_indeed
\fi \fi}
+\def\strc_itemgroups_insert_extra_reference
+ {\iftrialtypesetting \else \ifx\currentitemreference \empty \else
+ \normalexpanded{\textreference[\currentitemreference]{\strc_itemgroups_extra_symbol}}%
+ \fi \fi}
+
\def\strc_itemgroups_insert_reference_indeed % maybe we need a 'frozen counter' numberdata blob / quick hack .. .mive this to strc-ref
{%\setnextinternalreference
% no need to collect nodes in \b_strc_destination_nodes here ... maybe at some point
\strc_references_start_destination_nodes
- \ctxlua {structures.references.setandgetattribute("\s!full", "\referenceprefix","\currentitemreference",
+ % this is somewhat over the top ... we should use the counter's reference
+ \ctxcommand{setreferenceattribute("\s!full", "\referenceprefix","\currentitemreference",
{
metadata = {
kind = "item",% ?
@@ -259,27 +266,27 @@
section = structures.sections.currentid(),
},
prefixdata = structures.helpers.simplify {
- prefix = "\counterparameter\v_strc_itemgroups_counter\c!prefix",
- separatorset = "\counterparameter\v_strc_itemgroups_counter\c!prefixseparatorset",
- conversion = \!!bs\counterparameter\v_strc_itemgroups_counter\c!prefixconversion\!!es,
- conversionset = "\counterparameter\v_strc_itemgroups_counter\c!prefixconversionset",
- set = "\counterparameter\v_strc_itemgroups_counter\c!prefixset",
- segments = "\counterparameter\v_strc_itemgroups_counter\c!prefixsegments",
+ prefix = "\namedcounterparameter\v_strc_itemgroups_counter\c!prefix",
+ separatorset = "\namedcounterparameter\v_strc_itemgroups_counter\c!prefixseparatorset",
+ conversion = \!!bs\namedcounterparameter\v_strc_itemgroups_counter\c!prefixconversion\!!es,
+ conversionset = "\namedcounterparameter\v_strc_itemgroups_counter\c!prefixconversionset",
+ set = "\namedcounterparameter\v_strc_itemgroups_counter\c!prefixset",
+ segments = "\namedcounterparameter\v_strc_itemgroups_counter\c!prefixsegments",
% segments = "\askedprefixsegments",
- connector = \!!bs\counterparameter\v_strc_itemgroups_counter\c!prefixconnector\!!es,
+ connector = \!!bs\namedcounterparameter\v_strc_itemgroups_counter\c!prefixconnector\!!es,
},
numberdata = structures.helpers.simplify {
numbers = structures.counters.compact("\v_strc_itemgroups_counter",nil,true),
- separatorset = "\counterparameter\v_strc_itemgroups_counter\c!numberseparatorset",
- % conversion = "\counterparameter\v_strc_itemgroups_counter\c!numberconversion",
+ separatorset = "\namedcounterparameter\v_strc_itemgroups_counter\c!numberseparatorset",
+ % conversion = "\namedcounterparameter\v_strc_itemgroups_counter\c!numberconversion",
% conversion = "\currentitemgroupconversionset",
- % conversionset = "\counterparameter\v_strc_itemgroups_counter\c!numberconversionset",
+ % conversionset = "\namedcounterparameter\v_strc_itemgroups_counter\c!numberconversionset",
% todo: fixedconversion = "\currentitemgroupconversionset", % temp hack:
conversionset = "fixed::\currentitemgroupconversionset",
%
% for the moment no stopper, we need to make references configurable first
- % stopper = \!!bs\counterparameter\v_strc_itemgroups_counter\c!numberstopper\!!es,
- segments = "\counterparameter\v_strc_itemgroups_counter\c!numbersegments",
+ % stopper = \!!bs\namedcounterparameter\v_strc_itemgroups_counter\c!numberstopper\!!es,
+ segments = "\namedcounterparameter\v_strc_itemgroups_counter\c!numbersegments",
},
})
}%
@@ -360,7 +367,7 @@
\def\strc_itemgroups_insert_break_indeed {\flushnotes
\vspacing[\v!item @-5]}
\def\strc_itemgroups_insert_nobreak_indeed {\flushnotes
- \ifinsidecolumns
+ \ifinsidecolumns % todo
\vspacing[\v!item @5]%
\else
\vspacing[\v!item @500]%
@@ -452,6 +459,7 @@
% this will be a constant
\setfalse\c_strc_itemgroups_head
\setfalse\c_strc_itemgroups_sub
+ \setfalse\c_strc_itemgroups_symbol
\setfalse\c_strc_itemgroups_columns
% to be checked
\let\m_strc_itemgroups_destination\empty
@@ -584,9 +592,11 @@
{\processcommalist[#subcategories]{\strc_itemgroups_setup_list_level_b{#category}{#whatever}}}}
\def\strc_itemgroups_increment_item_counter
- {\ifconditional\c_strc_itemgroups_sub \else \ifx\strc_itemgroups_extra_symbol\empty
- \strc_counters_increment_sub\v_strc_itemgroups_counter\currentitemlevel
- \fi\fi}
+ {\ifconditional\c_strc_itemgroups_sub \else
+ \ifconditional\c_strc_itemgroups_symbol \else % \ifx\strc_itemgroups_extra_symbol\empty
+ \strc_counters_increment_sub\v_strc_itemgroups_counter\currentitemlevel
+ \fi
+ \fi}
\unexpanded\def\strc_itemgroups_insert_item_counter
{\ifconditional\c_strc_itemgroups_repeat
@@ -601,12 +611,11 @@
\strc_itemgroups_insert_item_counter_indeed
\fi}
-\def\strc_itemgroups_insert_item_counter_indeed
- {\begingroup
- \strc_counters_setup
+\def\strc_itemgroups_insert_item_counter_indeed % quite slow ... every time this setup .. but it
+ {\begingroup % can be optimized ... best move some to strc-num
+ \setupcounter
[\v_strc_itemgroups_counter]%
- [%\c!prefix=\v!no,
- \c!prefix=\itemgroupparameter\c!prefix,
+ [\c!prefix=\itemgroupparameter\c!prefix,
\c!prefixstopper=\itemgroupparameter\c!prefixstopper,
\c!prefixseparatorset=\itemgroupparameter\c!prefixseparatorset,
\c!prefixconversion=\itemgroupparameter\c!prefixconversion,
@@ -617,9 +626,7 @@
\c!criterium=\itemgroupparameter\c!criterium,
\c!numberorder=\ifconditional\c_strc_itemgroups_reverse\v!reverse\else\v!normal\fi,
\c!numberstopper=\expdoif{\itemgroupparameter\c!placestopper}\v!yes{\itemgroupparameter\c!stopper},
- %\c!numberseparatorset=,
- \c!numberconversionset=\v_strc_itemgroups_counter, % itemgroup:\currentparentitemgroup,
- %\c!numberconversion=\currentitemgroupsymbol,
+ \c!numberconversionset=\v_strc_itemgroups_counter,
\c!numbersegments=\currentitemgroupsegments]%
% there will be a fixedconversion key
%\normalexpanded{\defineconversionset[\v_strc_itemgroups_counter][\currentitemgroupconversionset][\currentitemgroupsymbol]}%
@@ -775,7 +782,7 @@
\doadaptleftskip {\itemgroupparameter\c!leftmargin}%
\doadaptrightskip{\itemgroupparameter\c!rightmargin}%
\fi
- \dosetraggedcommand{\itemgroupparameter\c!align}\raggedcommand
+ \usealignparameter\itemgroupparameter
\edef\m_strc_itemgroups_indenting{\itemgroupparameter\c!indenting}%
\ifnum\c_strc_itemgroups_nesting>\zerocount
\settrue\c_strc_itemgroups_first
@@ -832,9 +839,6 @@
\let\startcollectitems\relax
\let\stopcollectitems \relax
-\ifdefined\startcolumns \else \unexpanded\def\startcolumns[#settings]{} \fi
-\ifdefined\stopcolumns \else \unexpanded\def\stopcolumns {} \fi
-
\letvalue{\??itemgroupalign\v!flushleft }\relax
\letvalue{\??itemgroupalign\v!right }\relax
\letvalue{\??itemgroupalign\v!flushright}\hfill
@@ -873,16 +877,31 @@
\def\strc_itemgroups_setup_symbol_asked
{\edef\strc_itemgroups_asked_symbol{\itemgroupparameter\c!symbol}}
-\def\strc_itemgroups_start_columns
- {\startcolumns
- [\c!n=\itemgroupparameter\c!n,
- \c!height=,
- \c!rule=\v!off,
- \c!balance=\v!yes,
- \c!align=\v!no]}
+\ifdefined\strc_itemgroups_start_columns
-\def\strc_itemgroups_stop_columns
- {\stopcolumns}
+ % already defined in page-mix
+
+\else
+
+ % will be redefined in page-mix
+
+ \ifdefined\startcolumns \else
+ \unexpanded\def\startcolumns[#settings]{}
+ \unexpanded\def\stopcolumns {}
+ \fi
+
+ \def\strc_itemgroups_start_columns
+ {\startcolumns
+ [\c!n=\itemgroupparameter\c!n,
+ \c!height=,
+ \c!rule=\v!off,
+ \c!balance=\v!yes,
+ \c!align=\v!no]}
+
+ \def\strc_itemgroups_stop_columns
+ {\stopcolumns}
+
+\fi
\unexpanded\def\stopitemgroup
{\stopcollectitems
@@ -1090,6 +1109,7 @@
\unexpanded\def\strc_itemgroups_start_symbol#text%
{\def\strc_itemgroups_extra_symbol{#text}%
+ \settrue\c_strc_itemgroups_symbol
\startitemgroupitem}
\unexpanded\def\strc_itemgroups_start_dummy
@@ -1117,6 +1137,7 @@
\unexpanded\def\strc_itemgroups_start_text#text%
{\def\strc_itemgroups_extra_symbol{#text}%
+ \settrue\c_strc_itemgroups_symbol
\settrue\c_strc_itemgroups_txt
\startitemgroupitem}
@@ -1219,29 +1240,33 @@
\unexpanded\def\strc_itemgroups_start_head_sym#text%
{\def\strc_itemgroups_extra_symbol{#text}%
+ \settrue\c_strc_itemgroups_symbol
\settrue\c_strc_itemgroups_head
\strc_itemgroups_start_head}
\def\strc_itemgroups_make_symbol_box
{\setbox\b_strc_itemgroups\hbox
{\ifconditional\c_strc_itemgroups_head
- \ifx\strc_itemgroups_extra_symbol\empty
- \useitemgroupstyleandcolor\c!headstyle\c!headcolor
- \strc_itemgroups_used_symbol
- \else
+ \ifconditional\c_strc_itemgroups_symbol
+ \strc_itemgroups_insert_extra_reference
\useitemgroupstyleandcolor\c!symstyle\c!symcolor
\strc_itemgroups_extra_symbol
+ \else
+ \useitemgroupstyleandcolor\c!headstyle\c!headcolor
+ \strc_itemgroups_used_symbol
\fi
\else
- \ifx\strc_itemgroups_extra_symbol\empty
- \useitemgroupstyleandcolor\c!style\c!color
- \strc_itemgroups_used_symbol
- \else
+ \ifconditional\c_strc_itemgroups_symbol
+ \strc_itemgroups_insert_extra_reference
\useitemgroupstyleandcolor\c!symstyle\c!symcolor
\strc_itemgroups_extra_symbol
+ \else
+ \useitemgroupstyleandcolor\c!style\c!color
+ \strc_itemgroups_used_symbol
\fi
\fi}%
- \let\strc_itemgroups_extra_symbol\empty}
+ \let\strc_itemgroups_extra_symbol\empty
+ \setfalse\c_strc_itemgroups_symbol}
\def\strc_itemgroups_make_fitting_box
{\ifdim\wd\b_strc_itemgroups>\itemgroupparameter\c!maxwidth\scaledpoint\relax % brr, sp
@@ -1453,10 +1478,11 @@
\else
\advance\c_strc_itemgroups_collected_current\plusone
\fi
- \doifdefined{\??itemgroupstack\number\c_strc_itemgroups_collected_current}
- {\getvalue{\??itemgroupstack\number\c_strc_itemgroups_collected_current}%
- \letbeundefined{\??itemgroupstack\number\c_strc_itemgroups_collected_current}%
- \advance\c_strc_itemgroups_collected_done\plusone}%
+ \ifcsname\??itemgroupstack\number\c_strc_itemgroups_collected_current\endcsname
+ \getvalue{\??itemgroupstack\number\c_strc_itemgroups_collected_current}%
+ \letbeundefined{\??itemgroupstack\number\c_strc_itemgroups_collected_current}%
+ \advance\c_strc_itemgroups_collected_done\plusone
+ \fi
\ifnum\c_strc_itemgroups_collected_done<\c_strc_itemgroups_collected_stored
\expandafter\strc_itemgroups_collected_flush
\fi}
diff --git a/Master/texmf-dist/tex/context/base/strc-lab.mkiv b/Master/texmf-dist/tex/context/base/strc-lab.mkiv
index 483791462c4..772a9cc19b6 100644
--- a/Master/texmf-dist/tex/context/base/strc-lab.mkiv
+++ b/Master/texmf-dist/tex/context/base/strc-lab.mkiv
@@ -15,82 +15,166 @@
\unprotect
-% labels, we could share with enumerations and forget about the text; anyhow, figure
-% labels etc can use enumerations; we keep labels for compatibility reasons; we need
-% the slightly different namespace; we can still define structure counters directly
-% (multiple levels) and use an enumeration without following text
-
-% unfinished ... no longer okay
-
-\unexpanded\def\setuplabels
- {\getparameters[\??db]}
-
-\unexpanded\def\definelabel
- {\dotripleargumentwithset\dodefinelabel}
-
-\def\dodefinelabel[#1][#2][#3]% #2 or #3 assignment
- {\doenumerationinit{#1}{1}\empty
- \getparameters[\??dd#1][\c!command=,\c!state=\v!start,\c!location=,\c!text=#1]%
- \ifsecondargument\doifassignmentelse{#2}\donetrue\donefalse\else\donetrue\fi
- \ifdone
- % an independent one
- \getparameters[\??dd#1][\s!counter=#1,\s!parent=\??db,#2]%
- \dodefinelabelcommands{#1}{\??db}%
- \dodefineenumerationcounter{#1}%
+%D Labels are implemented somewhat inefficient but have the advantage that they
+%D can be controlled like other constructions. Who knows when that comes in
+%D handy?
+
+\installcorenamespace{labels}
+
+\installcommandhandler \??labels {label} \??labels % plural hash, else clash with label (or rename that one)
+
+\installcounterassociation{label}
+
+\let\setuplabels\setuplabel
+
+\setuplabel
+ [\c!alternative=\v!intext,
+ \c!headstyle=\v!bold,
+ \c!titlestyle=\v!bold,
+ \c!distance=\zeropoint,
+ \c!width=\v!fit,
+ \c!titledistance=.5\emwidth,
+ \c!margin=\v!no,
+ \c!titleleft=(,
+ \c!titleright=),
+ \c!display=\v!no,
+ \c!titlecommand=,
+ \c!expansion=\v!no,
+ \c!way=\v!by\v!text,
+ \c!prefix=\v!no,
+ \c!prefixconnector=.,
+ \c!text=\currentlabel,
+ \c!number=\v!yes, % else description
+ \c!start=0,
+ \c!state=\v!start,
+ \c!levels=3]
+
+\unexpanded\def\strc_define_commands_label#1#2#3% current level parent
+ {\doifelsenothing{#3}
+ {\normalexpanded{\defineconstruction[#1][\s!handler=\v!label,\c!level=#2]}%
+ \setevalue{\??label#1:\s!parent}{\??label}}%
+ {\normalexpanded{\defineconstruction[#1][#3][\s!handler=\v!label,\c!level=#2]}%
+ \setevalue{\??label#1:\s!parent}{\??label#3}}%
+ \setuevalue{\e!next #1}{\strc_labels_next }%
+ \setuevalue{\c!reset#1}{\strc_labels_reset }%
+ %setuevalue{\c!set #1}{\strc_labels_set }%
+ \setuevalue {#1}{\strc_labels_command{#1}}}
+
+\appendtoks
+ \ifx\currentlabelparent\empty
+ % clone => parent | subclone => clone | subsubclone => subclone
+ \let\currentlabelsub\empty
+ \strc_define_commands_label
+ {\currentlabelsub\currentlabel}%
+ \plusone
+ \empty
+ \edef\p_levels{\labelparameter\c!levels}%
+ \dostepwiserecurse\plustwo\p_levels\plusone
+ {\strc_define_commands_label
+ {\v!sub\currentlabelsub\currentlabel}%
+ \recurselevel
+ {\currentlabelsub\currentlabel}%
+ \edef\currentlabelsub{\v!sub\currentlabelsub}}%
\else
- \getparameters[\??dd#1][\s!counter=#1,\s!parent=\??dd#2,#3]%
- \dodefinelabelcommands{#1}{\??dd#2}%
+ % clone => parent | subclone => subparent | subsubclone => subsubparent
+ \let\currentlabelsub\empty
+ \edef\p_levels{\labelparameter\c!levels}%
+ \dorecurse\p_levels
+ {\strc_define_commands_label
+ {\currentlabelsub\currentlabel}%
+ \recurselevel
+ {\currentlabelsub\currentlabelparent}%
+ \edef\currentlabelsub{\v!sub\currentlabelsub}}%
+ \fi
+ \edef\p_counter{\labelparameter\s!counter}% can inherit from parent
+ \ifx\p_counter\empty %
+ \let\p_counter\currentlabel
+ \fi
+ \doifcounterelse\p_counter\donothing{\strc_labels_define_counter\p_counter}%
+ \letlabelparameter\s!counter\p_counter
+\to \everydefinelabel
+
+\let\p_strc_constructions_title \empty
+\let\p_strc_constructions_number\empty
+
+\setvalue{\??constructioninitializer\v!label}%
+ {\let\currentlabel \currentconstruction
+ \let\constructionparameter \labelparameter
+ \let\detokenizedconstructionparameter\detokenizedlabelparameter
+ \let\letconstructionparameter \letlabelparameter
+ \let\useconstructionstyleandcolor \uselabelstyleandcolor
+ \let\setupcurrentconstruction \setupcurrentlabel
+ % shared with enumerations
+ \edef\p_strc_constructions_number{\constructionparameter\c!number}%
+ \ifx\p_strc_constructions_number\v!yes
+ \settrue\c_strc_constructions_number_state
+ \iftrialtypesetting
+ \strc_counters_save\currentconstructionnumber
+ \fi
+ \strc_counters_increment_sub\currentconstructionnumber\currentconstructionlevel
+ \else
+ \setfalse\c_strc_constructions_number_state
+ \fi
+ \edef\p_strc_constructions_title{\constructionparameter\c!title}%
+ \ifx\p_strc_constructions_title\v!yes
+ \settrue\c_strc_constructions_title_state
+ \else
+ \setfalse\c_strc_constructions_title_state
\fi}
-\def\dodefinelabelcommands#1#2%
- {\setuevalue {#1}{\noexpand\dolabelnumbercommand {#1}}%
- \setuevalue{\c!reset #1}{\noexpand\doresetlabelnumber {#1}}%
- %\setuevalue{\c!set #1}{\noexpand\dosetlabelnumber {#1}}% [#2] or {#2} ?
- \setuevalue{\e!next #1}{\noexpand\donextlabelnumber {#1}}%
- \setuevalue{\e!increment#1}{\noexpand\doincrementlabelnumber{#1}}%
- \setuevalue{\c!current #1}{\noexpand\docurrentlabelnumber {#1}}}
-
-% this is just for downward compatibility, we might drop it
-
-\setvalue{\??db:\c!location:\v!inmargin}{\inmargin}
-\setvalue{\??db:\c!location:\v!inleft }{\inleft}
-\setvalue{\??db:\c!location:\v!inright }{\inright}
-\setvalue{\??db:\c!location:\v!margin }{\inmargin}
-
-\def\dolabelinit#1%
- {\def\currentdescriptionmain{#1}%
- \def\currentdescription {#1}%
- \def\currentdescriptionlevel{1}}
-
-\def\docurrentlabelnumber #1{\dolabelinit{#1}\dosingleempty\dodocurrentlabelnumber}
-\def\donextlabelnumber #1{\dolabelinit{#1}\dosingleempty\dodonextlabelnumber}
-\def\dolabelnumbercommand #1{\dolabelinit{#1}\dosingleempty\dodolabelnumbercommand}
-
-\def\doresetlabelnumber #1{\dolabelinit{#1}\strc_counters_reset_sub \currentdescriptionnumber\currentdescriptionlevel}
-\def\dosetlabelnumber #1#2{\dolabelinit{#1}\strc_counters_set_sub \currentdescriptionnumber\currentdescriptionlevel{#2}}
-\def\doincrementlabelnumber #1{\dolabelinit{#1}\strc_counters_increment_sub\currentdescriptionnumber\currentdescriptionlevel}
-
-\def\dodocurrentlabelnumber[#1]%
- {\dontleavehmode
- \writestatus{!!!}{todo: reference of label}%
- \dotextprefix{\descriptionparameter\c!text}%
- \convertedsubcounter[\currentdescriptionnumber][\currentdescriptionlevel]}
-
-\def\dodonextlabelnumber[#1]% todo: ref
- {\strc_counters_increment_sub\currentdescriptionnumber\currentdescriptionlevel
- \dodocurrentlabelnumber[\currentdescriptionnumber]}
-
-\def\dodolabelnumbercommand[#1]% todo: ref
- {\dontleavehmode
- \descriptionparameter\c!before
- \begingroup
- \strc_counters_increment_sub\currentdescriptionnumber\currentdescriptionlevel
- \usedescriptionstyleandcolor\c!headstyle\c!headcolor
- \executeifdefined{\??db:\c!location:\descriptionparameter\c!location}{\descriptionparameter\c!command}{\dodocurrentlabelnumber[#1]}%
- \endgroup
- \descriptionparameter\c!after}
-
-\setuplabels
- [\s!parent=\??dn]
+\setvalue{\??constructionfinalizer\v!label}%
+ {\ifconditional\c_strc_constructions_number_state
+ \iftrialtypesetting
+ \strc_counters_restore\currentconstructionnumber
+ \fi
+ \fi}
+
+%D Interfaces:
+
+\let\strc_labels_command\strc_descriptions_command
+\let\strc_labels_next \strc_enumerations_next
+\let\strc_labels_reset \strc_enumerations_reset
+%let\strc_labels_set \strc_enumerations_set
+
+% similar to enumerations
+
+\def\strc_labels_define_counter#1%
+ {\definecounter[#1]%
+ \registerlabelcounter{#1}}
+
+\appendtoks
+ \synchronizelabelcounters
+\to \everysetuplabel
+
+\appendtoks
+ \synchronizelabelcounters
+\to \everydefinelabel
+
+% no start stop here
+
+\expandafter\let\csname\??constructionmainhandler\v!label\expandafter\endcsname\csname\??constructionmainhandler\v!description\endcsname
+
+\unexpanded\setevalue{\??constructioncommandhandler\v!label}%
+ {\csname\??constructionstarthandler\v!construction\endcsname
+ \csname\??constructionstophandler \v!construction\endcsname
+ \endgroup}
+
+\unexpanded\setvalue{\??constructiontexthandler\v!label}%
+ {\begingroup
+ \useconstructionstyleandcolor\c!headstyle\c!headcolor
+ \strc_labels_text
+ \endgroup}
+
+\let\strc_labels_text\strc_enumerations_text
+
+% inline variant
+
+\defineconstructionalternative
+ [\v!intext]
+ [\c!renderingsetup=\??constructionrenderings:\v!intext]
+
+\startsetups[\??constructionrenderings:\v!intext]
+ \dontleavehmode\box\constructionheadbox
+\stopsetups
\protect \endinput
diff --git a/Master/texmf-dist/tex/context/base/strc-lev.lua b/Master/texmf-dist/tex/context/base/strc-lev.lua
index 4ca049d7a20..50a63c93879 100644
--- a/Master/texmf-dist/tex/context/base/strc-lev.lua
+++ b/Master/texmf-dist/tex/context/base/strc-lev.lua
@@ -6,7 +6,6 @@ if not modules then modules = { } end modules ['strc-lev'] = {
license = "see context related readme files"
}
-local format = string.format
local insert, remove = table.insert, table.remove
local sections = structures.sections
@@ -18,6 +17,8 @@ local level, levels, categories = 0, sections.levels, { }
storage.register("structures/sections/levels", levels, "structures.sections.levels")
+local f_two_colon = string.formatters["%s:%s"]
+
function commands.definesectionlevels(category,list)
levels[category] = utilities.parsers.settings_to_array(list)
end
@@ -27,7 +28,7 @@ function commands.startsectionlevel(category)
level = level + 1
local lc = levels[category]
if not lc or level > #lc then
- context.nostarthead { format("%s:%s",category,level) }
+ context.nostarthead { f_two_colon(category,level) }
else
context.dostarthead { lc[level] }
end
@@ -39,7 +40,7 @@ function commands.stopsectionlevel()
if category then
local lc = levels[category]
if not lc or level > #lc then
- context.nostophead { format("%s:%s",category,level) }
+ context.nostophead { f_two_colon(category,level) }
else
context.dostophead { lc[level] }
end
diff --git a/Master/texmf-dist/tex/context/base/strc-lnt.mkvi b/Master/texmf-dist/tex/context/base/strc-lnt.mkvi
index f07ce783983..4a2cd1cc0a3 100644
--- a/Master/texmf-dist/tex/context/base/strc-lnt.mkvi
+++ b/Master/texmf-dist/tex/context/base/strc-lnt.mkvi
@@ -27,11 +27,26 @@
{\dotripleempty\strc_linenotes_define}
\def\strc_linenotes_define[#1][#2][#3]%
- {\definenote[#1][#2][#3]%
- \expandafter\let\csname\??linenote#1\expandafter\endcsname\csname#1\endcsname % use copy command
- \setuevalue {#1}{\strc_linenotes_direct{#1}}%
- \setuevalue{\e!start#1}{\strc_linenotes_start {#1}}%
- \setuevalue{\e!stop #1}{\strc_linenotes_stop }}
+ {\ifcsname\??linenote#1\endcsname
+ % there might be files that define the default 'linenote'
+ \ifthirdargument
+ \setupnote[#1][#3]%
+ \else\ifsecondargument
+ \setupnote[#1][#2]%
+ \fi\fi
+ \else
+ \ifthirdargument
+ \definenote[#1][#2][#3]%
+ \else\ifsecondargument
+ \definenote[#1][#2]%
+ \else
+ \definenote[#1]%
+ \fi\fi
+ \expandafter\let\csname\??linenote#1\expandafter\endcsname\csname#1\endcsname % use copy command
+ \setuevalue {#1}{\strc_linenotes_direct{#1}}%
+ \setuevalue{\e!start#1}{\strc_linenotes_start {#1}}%
+ \setuevalue{\e!stop #1}{\strc_linenotes_stop }%
+ \fi}
\unexpanded\def\strc_linenotes_direct#1#2%
{\global\advance\c_strc_linenotes\plusone
@@ -41,6 +56,7 @@
\unexpanded\def\strc_linenotes_start#1[#2]#3%
{\global\advance\c_strc_linenotes\plusone
+ \keepunwantedspaces
\strc_linenotes_indeed{#1}{#2}{#3}%
\strc_linenotes_traced{#2}%
\startline[#2]}
@@ -48,19 +64,109 @@
\unexpanded\def\strc_linenotes_stop[#1]%
{\stopline[#1]}
-\unexpanded\def\strc_linenotes_indeed#1#2#3% will be redone
+\let\m_page_lines_previous_to \relax
+\let\m_page_lines_previous_from\relax
+
+\let\m_page_lines_current_to \relax
+\let\m_page_lines_current_from \relax
+
+\newconditional\c_page_lines_current_to
+\newconditional\c_page_lines_current_from
+
+\installcorenamespace{linenotespreviousfrom}
+\installcorenamespace{linenotespreviousto}
+
+\letvalue\??linenotespreviousfrom\empty
+\letvalue\??linenotespreviousto \empty
+
+\def\page_lines_in_from{\in[lr:b:\currentlinenotereference]}
+\def\page_lines_in_to {\in[lr:e:\currentlinenotereference]}
+
+\unexpanded\def\strc_linenotes_range_normal#1% order
+ {\doifreferencefoundelse{lr:b:\currentlinenotereference}\settrue\setfalse\c_page_lines_current_from
+ \ifconditional\c_page_lines_current_from
+ \xdef\m_page_lines_current_from{\currentreferencelinenumber}%
+ \doifreferencefoundelse{lr:e:\currentlinenotereference}\settrue\setfalse\c_page_lines_current_to
+ \ifconditional\c_page_lines_current_to
+ \xdef\m_page_lines_current_to{\currentreferencelinenumber}%
+ \page_lines_in_from
+ \ifx\m_page_lines_current_from\m_page_lines_current_to \else
+ \endash
+ \page_lines_in_to
+ \fi
+ \else
+ \page_lines_in_from
+ \fi
+ \else
+ \page_lines_in_from
+ \fi}
+
+\unexpanded\def\strc_linenotes_range_sparse#1% order
+ {\doifreferencefoundelse{lr:b:\currentlinenotereference}\settrue\setfalse\c_page_lines_current_from
+ \ifconditional\c_page_lines_current_from
+ \xdef\m_page_lines_current_from{\currentreferencelinenumber}%
+ \doifreferencefoundelse{lr:e:\currentlinenotereference}\settrue\setfalse\c_page_lines_current_to
+ \ifconditional\c_page_lines_current_to
+ \xdef\m_page_lines_current_to{\currentreferencelinenumber}%
+ \ifx\m_page_lines_previous_from\m_page_lines_current_from
+ \ifx\m_page_lines_previous_to\m_page_lines_current_to
+ \notationparameter\c!compressseparator
+ \else
+ \page_lines_in_from
+ \fi
+ \else
+ \page_lines_in_from
+ \ifx\m_page_lines_current_from\m_page_lines_current_to
+ \else
+ \endash
+ \page_lines_in_to
+ \fi
+ \fi
+ \else
+ \page_lines_in_from
+ \fi
+ \else
+ \ifx\m_page_lines_previous_from\m_page_lines_current_from
+ \notationparameter\c!compressseparator
+ \else
+ \page_lines_in_from
+ \fi
+ \fi}
+
+\let\currentlinenotereference\empty
+
+\unexpanded\def\strc_linenotes_indeed#1#2#3%
{\begingroup
% we keep things local so we can use it as regular note too
- \unexpanded\def\linenotelinenumber##1{\inlinerange[#2]}% ##1 == order
-% \setupnotation[#1][\c!numbercommand=\linenotelinenumber]% todo: deep hook
-% \setupnote[#1][\c!textcommand=\gobbleoneargument]% todo: deep hook
\edef\currentnotation{#1}%
+ \edef\currentlinenotereference{#2}%
+ \xdef\m_page_lines_previous_from{\csname\??linenotespreviousfrom\ifcsname\??linenotespreviousfrom\currentnotation\endcsname\currentnotation\fi\endcsname}%
+ \xdef\m_page_lines_previous_to {\csname\??linenotespreviousto \ifcsname\??linenotespreviousto \currentnotation\endcsname\currentnotation\fi\endcsname}%
+ \doifelse{\notationparameter\c!compress}\v!yes
+ {\let\linenotelinenumber\strc_linenotes_range_sparse}%
+ {\let\linenotelinenumber\strc_linenotes_range_normal}%
\let\currentnote\currentnotation
\letnotationparameter\c!numbercommand\linenotelinenumber% todo: deep hook
\letnoteparameter \c!textcommand \gobbleoneargument % todo: deep hook
- \relax\getvalue{\??linenote#1}{#3}%
+ \getvalue{\??linenote\currentnotation}{#3}%
+ \expandafter\glet\csname\??linenotespreviousfrom\currentnotation\endcsname\m_page_lines_current_from
+ \expandafter\glet\csname\??linenotespreviousto \currentnotation\endcsname\m_page_lines_current_to
\endgroup}
+% where to hook this one in? resetcounter has no hook:
+
+\unexpanded\def\doresetlinenotecompression#1% \strc_linenotes_reset_previous
+ {\expandafter\glet\csname\??linenotespreviousfrom#1\endcsname\empty
+ \expandafter\glet\csname\??linenotespreviousto #1\endcsname\empty}
+
+\definesymbol
+ [\v!compressseparator]
+ [\space\hbox{\vl\thinspace\vl}]
+
+\setupnotations
+ [%c\compress=\v!no,
+ \c!compressseparator=\symbol\v!compressseparator]
+
\let\strc_linenotes_traced\gobbleoneargument
\def\strc_linenotes_traced_indeed#1%
@@ -79,9 +185,14 @@
\unexpanded\def\tracelinenotes
{\let\strc_linenotes_traced\strc_linenotes_traced_indeed}
+%D Use these when not properly nested:
+
+\let\fromlinenote\startlinenote
+\let\tolinenote \stoplinenote
+
% We predefine one, namely \type {\linenote} cum suis.
-% \definelinenote[\v!linenote]
+\definelinenote[\v!linenote]
% beware: line numbers are added later on so grouping setups is a bad idea
%
diff --git a/Master/texmf-dist/tex/context/base/strc-lst.lua b/Master/texmf-dist/tex/context/base/strc-lst.lua
index 48aab78dbfe..95f5e675ecd 100644
--- a/Master/texmf-dist/tex/context/base/strc-lst.lua
+++ b/Master/texmf-dist/tex/context/base/strc-lst.lua
@@ -122,7 +122,7 @@ function lists.addto(t)
end
local m = t.metadata
local r = t.references
- local i = (r and r.internal) or 0 -- brrr
+ local i = r and r.internal or 0 -- brrr
local p = pushed[i]
if not p then
p = #cached + 1
@@ -130,7 +130,6 @@ function lists.addto(t)
pushed[i] = p
r.listindex = p
end
- --
local setcomponent = references.setcomponent
if setcomponent then
setcomponent(t) -- might move to the tex end
@@ -192,26 +191,6 @@ function lists.enhance(n)
end
end
---~ function lists.enforce(n)
---~ -- todo: symbolic names for counters
---~ local l = cached[n]
---~ if l then
---~ --
---~ l.directives = nil -- might change
---~ -- save in the right order (happens at shipout)
---~ lists.tobesaved[#lists.tobesaved+1] = l
---~ -- default enhancer (cross referencing)
---~ l.references.realpage = texcount.realpageno
---~ -- specific enhancer (kind of obsolete)
---~ local kind = l.metadata.kind
---~ local enhancer = kind and lists.enhancers[kind]
---~ if enhancer then
---~ enhancer(l)
---~ end
---~ return l
---~ end
---~ end
-
-- we can use level instead but we can also decide to remove level from the metadata
local nesting = { }
@@ -260,17 +239,17 @@ local function filtercollected(names, criterium, number, collected, forced, nest
criterium = gsub(criterium or ""," ","") -- not needed
-- new, will be applied stepwise
local wantedblock, wantedcriterium = lpegmatch(splitter,criterium) -- block:criterium
- if not wantedcriterium then
- block = documents.data.block
- elseif wantedblock == "" or wantedblock == variables.all or wantedblock == variables.text then
+ if wantedblock == "" or wantedblock == variables.all or wantedblock == variables.text then
criterium = wantedcriterium ~= "" and wantedcriterium or criterium
+ elseif not wantedcriterium then
+ block = documents.data.block
else
block, criterium = wantedblock, wantedcriterium
end
if block == "" then
block = false
end
---~ print(">>",block,criterium)
+-- print(">>",block,criterium)
--
forced = forced or { } -- todo: also on other branched, for the moment only needed for bookmarks
if type(names) == "string" then
@@ -278,8 +257,7 @@ local function filtercollected(names, criterium, number, collected, forced, nest
end
local all = not next(names) or names[variables.all] or false
if trace_lists then
- report_lists("filtering names: %s, criterium: %s, block: %s, number: %s",
- simple_hash_to_string(names),criterium,block or "*", number or "-")
+ report_lists("filtering names %a, criterium %a, block %a, number %a",names,criterium,block or "*",number)
end
if criterium == variables.intro then
-- special case, no structure yet
@@ -295,7 +273,7 @@ local function filtercollected(names, criterium, number, collected, forced, nest
for i=1,#collected do
local v = collected[i]
local r = v.references
- if r then
+ if r and (not block or not r.block or block == r.block) then
local metadata = v.metadata
if metadata then
local name = metadata.name or false
@@ -353,7 +331,6 @@ local function filtercollected(names, criterium, number, collected, forced, nest
local cnumbers = sectionnumber.numbers
local metadata = v.metadata
if cnumbers then
---~ print(#cnumbers, depth, concat(cnumbers))
if metadata and not metadata.nolist and (all or names[metadata.name or false]) and #cnumbers >= depth then
local ok = true
for d=1,depth do
@@ -434,7 +411,7 @@ local function filtercollected(names, criterium, number, collected, forced, nest
local number = tonumber(number) or numberatdepth(depth) or 0
if trace_lists then
local t = sections.numbers()
- detail = format("depth: %s, number: %s, numbers: %s, startset: %s",depth,number,(#t>0 and concat(t,".",1,depth)) or "?",#collected)
+ detail = format("depth %s, number %s, numbers %s, startset %s",depth,number,(#t>0 and concat(t,".",1,depth)) or "?",#collected)
end
if number > 0 then
local pnumbers = nil
@@ -464,18 +441,14 @@ local function filtercollected(names, criterium, number, collected, forced, nest
end
end
if trace_lists then
- if detail then
- report_lists("criterium: %s, block: %s, %s, found: %s",criterium,block or "*",detail,#result)
- else
- report_lists("criterium: %s, block: %s, found: %s",criterium,block or "*",#result)
- end
+ report_lists("criterium %a, block %a, found %a, detail %a",criterium,block or "*",#result,detail)
end
if sortorder then -- experiment
local sorter = sorters[sortorder]
if sorter then
if trace_lists then
- report_lists("sorting list using method %s",sortorder)
+ report_lists("sorting list using method %a",sortorder)
end
for i=1,#result do
result[i].references.order = i
@@ -597,7 +570,7 @@ function lists.hasnumberdata(name,n)
local data = lists.result[n]
if data then
local numberdata = data.numberdata
- if numberdata then
+ if numberdata and not numberdata.hidenumber then -- th ehide number is true
return true
end
end
@@ -644,7 +617,6 @@ function lists.prefixednumber(name,n,prefixspec,numberspec)
helpers.prefix(data,prefixspec)
local numberdata = data.numberdata
if numberdata then
---~ print(table.serialize(numberspec))
sections.typesetnumber(numberdata,"number",numberspec or false,numberdata or false)
end
end
@@ -731,10 +703,27 @@ function commands.savedlisttitle(name,n,tag)
end
end
+-- function commands.savedlistprefixednumber(name,n)
+-- local data = cached[tonumber(n)]
+-- if data then
+-- local numberdata = data.numberdata
+-- if numberdata then
+-- helpers.prefix(data,data.prefixdata)
+-- sections.typesetnumber(numberdata,"number",numberdata or false)
+-- end
+-- end
+-- end
+
+if not lists.reordered then
+ function lists.reordered(data)
+ return data.numberdata
+ end
+end
+
function commands.savedlistprefixednumber(name,n)
local data = cached[tonumber(n)]
if data then
- local numberdata = data.numberdata
+ local numberdata = lists.reordered(data)
if numberdata then
helpers.prefix(data,data.prefixdata)
sections.typesetnumber(numberdata,"number",numberdata or false)
@@ -743,3 +732,90 @@ function commands.savedlistprefixednumber(name,n)
end
commands.discardfromlist = lists.discard
+
+-- new and experimental and therefore off by default
+
+local sort, setmetatableindex = table.sort, table.setmetatableindex
+
+lists.autoreorder = false -- true
+
+local function addlevel(t,k)
+ local v = { }
+ setmetatableindex(v,function(t,k)
+ local v = { }
+ t[k] = v
+ return v
+ end)
+ t[k] = v
+ return v
+end
+
+local internals = setmetatableindex({ }, function(t,k)
+
+ local sublists = setmetatableindex({ },addlevel)
+
+ local collected = lists.collected or { }
+
+ for i=1,#collected do
+ local entry = collected[i]
+ local numberdata = entry.numberdata
+ if numberdata then
+ local metadata = entry.metadata
+ if metadata then
+ local references = entry.references
+ if references then
+ local kind = metadata.kind
+ local name = numberdata.counter or metadata.name
+ local internal = references.internal
+ if kind and name and internal then
+ local sublist = sublists[kind][name]
+ sublist[#sublist + 1] = { internal, numberdata }
+ end
+ end
+ end
+ end
+ end
+
+ for k, v in next, sublists do
+ for k, v in next, v do
+ local tmp = { }
+ for i=1,#v do
+ tmp[i] = v[i]
+ end
+ sort(v,function(a,b) return a[1] < b[1] end)
+ for i=1,#v do
+ t[v[i][1]] = tmp[i][2]
+ end
+ end
+ end
+
+ setmetatableindex(t,nil)
+
+ return t[k]
+
+end)
+
+function lists.reordered(entry)
+ local numberdata = entry.numberdata
+ if lists.autoreorder then
+ if numberdata then
+ local metadata = entry.metadata
+ if metadata then
+ local references = entry.references
+ if references then
+ local kind = metadata.kind
+ local name = numberdata.counter or metadata.name
+ local internal = references.internal
+ if kind and name and internal then
+ return internals[internal] or numberdata
+ end
+ end
+ end
+ end
+ else
+ function lists.reordered(entry)
+ return entry.numberdata
+ end
+ end
+ return numberdata
+end
diff --git a/Master/texmf-dist/tex/context/base/strc-lst.mkvi b/Master/texmf-dist/tex/context/base/strc-lst.mkvi
index 16544f8662b..15a499c8b15 100644
--- a/Master/texmf-dist/tex/context/base/strc-lst.mkvi
+++ b/Master/texmf-dist/tex/context/base/strc-lst.mkvi
@@ -28,12 +28,6 @@
\unprotect
-% To be added to mult-def.lua:
-
-\ifdefined\c!renderingsetup \else \def\c!renderingsetup{renderingsetup} \fi % maybe not interfaced
-\ifdefined\c!filler \else \def\c!filler {filler} \fi
-\ifdefined\v!interactive \else \def\v!interactive {interactive} \fi
-
%D Lists are mostly used for tables of contents but are in fact a rather generic
%D feature of \CONTEXT. We seperate between storage and rendering and the current
%D implementation is a reworked version of all that was added in steps. As lists
@@ -247,7 +241,7 @@
\setupcurrentlist[#settings]%
\the\everystructurelist
% \doif{\listparameter\c!coupling}\v!on{\startlistreferences{#tag}}%
- \doplacestructurelist % maybe inline
+ \strc_lists_place_current % maybe inline
{#list}%
{\listparameter\c!criterium}%
{\listparameter\c!number}%
@@ -364,9 +358,15 @@
\ctxcommand{listuserdata("\currentlist",\currentlistindex,"#name")}%
\dostoptagged}
+\def\rawstructurelistuservariable#name%
+ {\ctxcommand{listuserdata("\currentlist",\currentlistindex,"#name")}}
+
\unexpanded\def\structurelistfirst {\structurelistuservariable\s!first } % s!
\unexpanded\def\structurelistsecond{\structurelistuservariable\s!second} % s!
+\def\rawstructurelistfirst {\rawstructurelistuservariable\s!first } % s! % was \unexpanded
+\def\rawstructurelistsecond{\rawstructurelistuservariable\s!second} % s! % was \unexpanded
+
\unexpanded\def\doifstructurelisthaspageelse
{\ctxcommand{doiflisthaspageelse("\currentlist",\currentlistindex)}}
@@ -401,22 +401,22 @@
% TODO: pass extra tag name (contents, figures, bibliography ...)
-\unexpanded\def\doplacestructurelist#list#criterium#number#extras#order% beware, not a user command
+\unexpanded\def\strc_lists_place_current#list#criterium#number#extras#order% beware, not a user command
{\dostarttagged\t!list\empty
\ctxcommand{processlist{
names = "#list",
criterium = "#criterium",
- number = "\number#number",
+ number = "#number",
extras = "#extras",
order = "#order"
}}%
\dostoptagged}
-\unexpanded\def\doanalyzestructurelist#list#criterium#number%
+\unexpanded\def\strc_lists_analyze#list#criterium#number%
{\ctxcommand{analyzelist{
names = "#list",
criterium = "#criterium",
- number = "\number#number"
+ number = "#number"
}}}
\def\firststructureelementinlist#list%
@@ -1020,9 +1020,6 @@
% \chapter{Chapter three} \chapter{Chapter four} \chapter{Chapter five}
% \stoptext
-% overrulen interactie kan sneller, bv door hulpconstanten
-% te gebruiken en die te letten
-
\startsetups[\??listrenderings:d]
\ifvmode
\advance\leftskip\listparameter\c!margin
@@ -1182,6 +1179,10 @@
\letvalue{\??listinteractions\v!all }\v!all
\letvalue{\??listinteractions\v!yes }\v!all
+\def\listboxproperties {\strc_lists_get_reference_attribute}
+\def\listrenderingsetup {\the\t_lists_every_renderingtext}
+\def\listrenderingsynchronize{\the\t_lists_every_renderingsynchronize}
+
\unexpanded\def\strc_lists_interaction_check
{\iflocation
\strc_lists_interaction_check_yes
@@ -1271,6 +1272,17 @@
\listparameter\c!textcommand{\limitatetext{#text}\p_maxwidth{\splitsymbol{\listparameter\c!limittext}}}%
\fi}
+% public helpers
+
+\unexpanded\def\startcurrentlistentrywrapper
+ {\hbox \strc_lists_get_reference_attribute\v!all \strc_lists_get_destination_attribute\bgroup}
+
+\let\stopcurrentlistentrywrapper\egroup
+
+\let\currentlistentryreferenceattribute \strc_lists_get_reference_attribute
+\let\currentlistentrydestinationattribute\strc_lists_get_destination_attribute
+\let\currentlistentrylimitedtext \strc_lists_limitated_text
+
% todo:
\def\utilitylistlength{\listlength} % old name ... uses in styles
@@ -1288,7 +1300,7 @@
\let\listlength\!!zerocount
\else
\setupcurrentlist[#settings]%
- \doanalyzestructurelist{#list}{\listparameter\c!criterium}{\listparameter\c!number}%
+ \strc_lists_analyze{#list}{\listparameter\c!criterium}{\listparameter\c!number}%
\normalexpanded{\endgroup\noexpand\edef\noexpand\listlength{\structurelistsize}}%
\fi
\strc_lists_set_mode}
diff --git a/Master/texmf-dist/tex/context/base/strc-mar.lua b/Master/texmf-dist/tex/context/base/strc-mar.lua
index 66ec047394d..7b3ac11e137 100644
--- a/Master/texmf-dist/tex/context/base/strc-mar.lua
+++ b/Master/texmf-dist/tex/context/base/strc-mar.lua
@@ -22,7 +22,6 @@ local glyph_code = nodecodes.glyph
local hlist_code = nodecodes.hlist
local vlist_code = nodecodes.vlist
-local getattribute = nodes.getattribute
local traversenodes = node.traverse
local texsetattribute = tex.setattribute
local texbox = tex.box
@@ -81,9 +80,9 @@ local ranges = {
local function resolve(t,k)
if k then
if trace_marks_set or trace_marks_get then
- report_marks("undefined: name=%s",k)
+ report_marks("undefined mark, name %a",k)
end
- local crap = { autodefined = true }
+ local crap = { autodefined = true } -- maybe set = 0 and reset = 0
t[k] = crap
return crap
else
@@ -103,7 +102,7 @@ local function sweep(head,first,last)
for n in traversenodes(head) do
local id = n.id
if id == glyph_code then
- local a = getattribute(n,a_marks)
+ local a = n[a_marks]
if not a then
-- next
elseif first == 0 then
@@ -112,7 +111,7 @@ local function sweep(head,first,last)
last = a
end
elseif id == hlist_code or id == vlist_code then
- local a = getattribute(n,a_marks)
+ local a = n[a_marks]
if not a then
-- next
elseif first == 0 then
@@ -141,7 +140,7 @@ function marks.synchronize(class,n,option)
local first, last = sweep(box.list,0,0)
if option == v_keep and first == 0 and last == 0 then
if trace_marks_get or trace_marks_set then
- report_marks("synchronize: class=%s, box=%s, retaining",class,n)
+ report_marks("action %a, class %a, box %a","retain at synchronize",class,n)
end
-- todo: check if still valid firts/last in range
first = lasts[class] or 0
@@ -158,12 +157,12 @@ function marks.synchronize(class,n,option)
end
range.first, range.last = first, last
if trace_marks_get or trace_marks_set then
- report_marks("synchronize: class=%s, first=%s, last=%s",class,range.first,range.last)
+ report_marks("action %a, class %a, first %a, last %a","synchronize",class,range.first,range.last)
end
end
end
elseif trace_marks_get or trace_marks_set then
- report_marks("synchronize: class=%s, box=%s, no content",class,n)
+ report_marks("action %s, class %a, box %a","synchronize without content",class,n)
end
end
@@ -232,7 +231,7 @@ function marks.relate(name,chain)
children[#children+1] = name
end
elseif trace_marks_set then
- report_marks("invalid relation: name=%s, chain=%s",name,chain or "-")
+ report_marks("error: invalid relation, name %a, chain %a",name,chain)
end
end
end
@@ -246,7 +245,7 @@ local function resetchildren(new,name)
local ci = children[i]
new[ci] = false
if trace_marks_set then
- report_marks("reset: parent=%s, child=%s",name,ci)
+ report_marks("action %a, parent %a, child %a","reset",name,ci)
end
resetchildren(new,ci)
end
@@ -264,13 +263,16 @@ function marks.set(name,value)
dn = data[name]
end
dn.set = topofstack
+ if not dn.reset then
+ dn.reset = 0 -- in case of selfdefined
+ end
local top = stack[topofstack]
local new = { }
if top then
for k, v in next, top do
local d = data[k]
- local r = d.reset
- local s = d.set
+ local r = d.reset or 0
+ local s = d.set or 0
if r <= topofstack and s < r then
new[k] = false
else
@@ -284,9 +286,9 @@ function marks.set(name,value)
stack[topofstack] = new
if trace_marks_set then
if name == child then
- report_marks("set: name=%s, index=%s, value=%s",name,topofstack,value)
+ report_marks("action %a, name %a, index %a, value %a","set",name,topofstack,value)
else
- report_marks("set: parent=%s, child=%s, index=%s, value=%s",parent,child,topofstack,value)
+ report_marks("action %a, parent %a, child %a, index %a, value %a","set",parent,child,topofstack,value)
end
end
texsetattribute("global",a_marks,topofstack)
@@ -296,7 +298,7 @@ end
local function reset(name)
if v_all then
if trace_marks_set then
- report_marks("reset: all")
+ report_marks("action %a","reset all")
end
stack = { }
for name, dn in next, data do
@@ -315,7 +317,7 @@ local function reset(name)
dn = data[name]
end
if trace_marks_set then
- report_marks("reset: name=%s, index=%s",name,topofstack)
+ report_marks("action %a, name %a, index %a","reset",name,topofstack)
end
dn.reset = topofstack
local children = dn.children
@@ -371,7 +373,7 @@ local function resolve(name,first,last,strict,quitonfalse,notrace)
step, method = 1, "top-down"
end
if trace_marks_get and not notrace then
- report_marks("request: strategy=%s, name=%s, parent=%s, strict=%s",method,child,parent or "",tostring(strict or false))
+ report_marks("action %a, strategy %a, name %a, parent %a, strict %a","request",method,child,parent,strict or false)
end
if trace_marks_all and not notrace then
marks.show(first,last)
@@ -380,28 +382,28 @@ local function resolve(name,first,last,strict,quitonfalse,notrace)
local s = dn.set
if first <= last and first <= r then
if trace_marks_get and not notrace then
- report_marks("reset (first case): name=%s, first=%s, last=%s, reset=%s, index=%s",name,first,last,r,first)
+ report_marks("action %a, name %a, first %a, last %a, reset %a, index %a","reset first",name,first,last,r,first)
end
elseif first >= last and last <= r then
if trace_marks_get and not notrace then
- report_marks("reset (last case): name=%s, first=%s, last=%s, reset=%s, index=%s",name,first,last,r,last)
+ report_marks("action %a, name %a, first %a, last %a, reset %a, index %a","reset last",name,first,last,r,last)
end
elseif not stack[first] or not stack[last] then
if trace_marks_get and not notrace then
-- a previous or next method can give an out of range, which is valid
- report_marks("out of range: name=%s, reset=%s, index=%s",name,r,first)
+ report_marks("error: out of range, name %a, reset %a, index %a",name,r,first)
end
elseif strict then
local top = stack[first]
local fullchain = dn.fullchain
if not fullchain or #fullchain == 0 then
if trace_marks_get and not notrace then
- report_marks("no full chain, trying: name=%s, first=%s, last=%s",name,first,last)
+ report_marks("warning: no full chain, trying again, name %a, first %a, last %a",name,first,last)
end
return resolve(name,first,last)
else
if trace_marks_get and not notrace then
- report_marks("found chain: %s",concat(fullchain," => "))
+ report_marks("found chain [ % => T ]",fullchain)
end
local chaindata, chainlength = { }, #fullchain
for i=1,chainlength do
@@ -410,7 +412,7 @@ local function resolve(name,first,last,strict,quitonfalse,notrace)
local value = resolve(cname,first,last,false,false,true)
if value == "" then
if trace_marks_get and not notrace then
- report_marks("quit chain: name=%s, reset=%s, start=%s",name,r,first)
+ report_marks("quitting chain, name %a, reset %a, start %a",name,r,first)
end
return ""
else
@@ -419,28 +421,28 @@ local function resolve(name,first,last,strict,quitonfalse,notrace)
end
end
if trace_marks_get and not notrace then
- report_marks("chain list: %s",concat(chaindata," => "))
+ report_marks("using chain [ % => T ]",chaindata)
end
local value, index, found = resolve(name,first,last,false,false,true)
if value ~= "" then
if trace_marks_get and not notrace then
- report_marks("following chain: %s",concat(fullchain," => "))
+ report_marks("following chain [ % => T ]",chaindata)
end
for i=1,chainlength do
local cname = fullchain[i]
if data[cname].set > 0 and chaindata[i] ~= found[cname] then
if trace_marks_get and not notrace then
- report_marks("empty in chain: name=%s, reset=%s, index=%s",name,r,first)
+ report_marks("quiting chain, name %a, reset %a, index %a",name,r,first)
end
return ""
end
end
if trace_marks_get and not notrace then
- report_marks("found: name=%s, reset=%s, start=%s, index=%s, value=%s",name,r,first,index,value)
+ report_marks("found in chain, name %a, reset %a, start %a, index %a, value %a",name,r,first,index,value)
end
return value, index, found
elseif trace_marks_get and not notrace then
- report_marks("not found: name=%s, reset=%s",name,r)
+ report_marks("not found, name %a, reset %a",name,r)
end
end
else
@@ -455,18 +457,18 @@ local function resolve(name,first,last,strict,quitonfalse,notrace)
end
elseif value == true then
if trace_marks_get and not notrace then
- report_marks("quit: name=%s, reset=%s, start=%s, index=%s",name,r,first,i)
+ report_marks("quitting steps, name %a, reset %a, start %a, index %a",name,r,first,i)
end
return ""
elseif value ~= "" then
if trace_marks_get and not notrace then
- report_marks("found: name=%s, reset=%s, start=%s, index=%s, value=%s",name,r,first,i,value)
+ report_marks("found in steps, name %a, reset %a, start %a, index %a, value %a",name,r,first,i,value)
end
return value, i, current
end
end
if trace_marks_get and not notrace then
- report_marks("not found: name=%s, reset=%s",name,r)
+ report_marks("not found in steps, name %a, reset %a",name,r)
end
end
end
@@ -481,8 +483,8 @@ local function doresolve(name,rangename,swap,df,dl,strict)
local range = ranges[rangename] or ranges[v_page]
local first, last = range.first, range.last
if trace_marks_get then
- report_marks("resolve: name=%s, range=%s, swap=%s, first=%s, last=%s, df=%s, dl=%s, strict=%s",
- name,rangename,tostring(swap or false),first,last,df,dl,tostring(strict or false))
+ report_marks("action %a, name %a, range %a, swap %a, first %a, last %a, df %a, dl %a, strict %a",
+ "resolving",name,rangename,swap or false,first,last,df,dl,strict or false)
end
if swap then
first, last = last + df, first + dl
@@ -515,11 +517,11 @@ methods[v_next_nocheck] = function(name,range) return doresolve(name,range,t
local function do_first(name,range,check)
if trace_marks_get then
- report_marks("resolve: name=%s, range=%s, resolve first",name,range)
+ report_marks("action %a, name %a, range %a","resolving first",name,range)
end
local f_value, f_index, f_found = doresolve(name,range,false,0,0,check)
if trace_marks_get then
- report_marks("resolve: name=%s, range=%s, resolve last",name,range)
+ report_marks("action %a, name %a, range %a","resolving last",name,range)
end
local l_value, l_index, l_found = doresolve(name,range,true ,0,0,check)
if f_found and l_found and l_index > f_index then
@@ -529,25 +531,25 @@ local function do_first(name,range,check)
local sn = si[name]
if sn and sn ~= false and sn ~= true and sn ~= "" and sn ~= f_value then
if trace_marks_get then
- report_marks("resolve: name=%s, range=%s, index=%s, value=%s",name,range,i,sn)
+ report_marks("action %a, name %a, range %a, index %a, value %a","resolving",name,range,i,sn)
end
return sn, i, si
end
end
end
if trace_marks_get then
- report_marks("resolve: name=%s, range=%s, using first",name,range)
+ report_marks("resolved, name %a, range %a, using first",name,range)
end
return f_value, f_index, f_found
end
local function do_last(name,range,check)
if trace_marks_get then
- report_marks("resolve: name=%s, range=%s, resolve first",name,range)
+ report_marks("action %a, name %a, range %a","resolving first",name,range)
end
local f_value, f_index, f_found = doresolve(name,range,false,0,0,check)
if trace_marks_get then
- report_marks("resolve: name=%s, range=%s, resolve last",name,range)
+ report_marks("action %a, name %a, range %a","resolving last",name,range)
end
local l_value, l_index, l_found = doresolve(name,range,true ,0,0,check)
if f_found and l_found and l_index > f_index then
@@ -557,14 +559,14 @@ local function do_last(name,range,check)
local sn = si[name]
if sn and sn ~= false and sn ~= true and sn ~= "" and sn ~= l_value then
if trace_marks_get then
- report_marks("resolve: name=%s, range=%s, index=%s, value=%s",name,range,i,sn)
+ report_marks("action %a, name %a, range %a, index %a, value %a","resolving",name,range,i,sn)
end
return sn, i, si
end
end
end
if trace_marks_get then
- report_marks("resolve: name=%s, range=%s, using last",name,range)
+ report_marks("resolved, name %a, range %a, using first",name,range)
end
return l_value, l_index, l_found
end
@@ -584,9 +586,9 @@ local function fetched(name,range,method)
if not trace_marks_get then
-- no report
elseif value == "" then
- report_marks("nothing fetched: name=%s, range=%s, method=%s",name,range,method)
+ report_marks("nothing fetched, name %a, range %a, method %a",name,range,method)
else
- report_marks("marking fetched: name=%s, range=%s, method=%s, value=%s",name,range,method,value)
+ report_marks("marking fetched, name %a, range %a, method %a, value %a",name,range,method,value)
end
return value or ""
end
@@ -638,7 +640,7 @@ end
function marks.fetch(name,range,method) -- chapter page first | chapter column:1 first
if trace_marks_get then
- report_marks("marking asked: name=%s, range=%s, method=%s",name,range,method)
+ report_marks("marking requested, name %a, range %a, method %a",name,range,method)
end
if method == "" or method == v_default then
fetchonemark(name,range,v_first)
@@ -689,6 +691,6 @@ commands.fetchtwomarks = marks.fetchtwomarks
commands.fetchallmarks = marks.fetchallmarks
function commands.doifelsemarking(str) -- can be shortcut
- commands.testcase(marks.exists(str))
+ commands.doifelse(marks.exists(str))
end
diff --git a/Master/texmf-dist/tex/context/base/strc-mar.mkiv b/Master/texmf-dist/tex/context/base/strc-mar.mkiv
index 19fc30c3191..3685b66a702 100644
--- a/Master/texmf-dist/tex/context/base/strc-mar.mkiv
+++ b/Master/texmf-dist/tex/context/base/strc-mar.mkiv
@@ -77,8 +77,14 @@
\def\strc_markings_synchronize[#1][#2][#3]% #1=class #2=boxnumber (some day also name) #3=options, maybe second argument table
{\ifvoid#2\else\ctxcommand{synchronizemarking("#1",\number#2,"#3")}\fi}
+% \def\doifelsemarking#1% why no \unexpanded
+% {\ctxcommand{doifelsemarking("#1")}}
+
+% \def\doifelsemarking#1%
+% {\normalexpanded{\noexpand\ctxcommand{doifelsemarking("\noexpand\detokenize{#1}")}}}
+
\def\doifelsemarking#1%
- {\ctxcommand{doifelsemarking("#1")}}
+ {\ctxcommand{doifelsemarking(\!!bs#1\!!es)}}
% \appendtoks
% \strc_markings_synchronize[\v!page][\normalpagebox][\v!keep]% keep if no marks
diff --git a/Master/texmf-dist/tex/context/base/strc-mat.mkii b/Master/texmf-dist/tex/context/base/strc-mat.mkii
index d85df79e2f6..34f716f3bdf 100644
--- a/Master/texmf-dist/tex/context/base/strc-mat.mkii
+++ b/Master/texmf-dist/tex/context/base/strc-mat.mkii
@@ -46,7 +46,7 @@
% \to \everymath
% \placeformula\startformula
-% H(K|M,C) = H(K|C) - H(M|C)\eqno{\hbox{(\in{}[eq:keyapp])}}
+% H(K|M,C) = H(K|C) - H(M|C)\eqno{\hbox{(\in{}[eq:keyapp])}}
% \stopformula
\unexpanded\def\mathortext
@@ -1608,10 +1608,10 @@
\def\domathtext#1%
{\mathchoice
- {\dodomathtext\displaystyle\textface {#1}}%
- {\dodomathtext\textstyle \textface {#1}}%
- {\dodomathtext\textstyle \scriptface {#1}}%
- {\dodomathtext\textstyle \scriptscriptface{#1}}}
+ {\dodomathtext\displaystyle \textface {#1}}%
+ {\dodomathtext\textstyle \textface {#1}}%
+ {\dodomathtext\scriptstyle \scriptface {#1}}%
+ {\dodomathtext\scriptscriptstyle\scriptscriptface{#1}}}
\def\dodomathtext#1#2#3% no \everymath !
%{\hbox{\everymath{#1}\switchtobodyfont [#2]#3}} % 15 sec
diff --git a/Master/texmf-dist/tex/context/base/strc-mat.mkiv b/Master/texmf-dist/tex/context/base/strc-mat.mkiv
index 121808cd13a..20fa078a2d3 100644
--- a/Master/texmf-dist/tex/context/base/strc-mat.mkiv
+++ b/Master/texmf-dist/tex/context/base/strc-mat.mkiv
@@ -1,4 +1,5 @@
%D \module
+%D \module
%D [ file=strc-mat,
%D version=2008.10.20,
%D title=\CONTEXT\ Structure Macros,
@@ -39,51 +40,54 @@
\c!expansion=\v!yes, % maybe automatically
\c!spacebefore=\v!big,
\c!spaceafter=\formulaparameter\c!spacebefore,
- \c!leftmargin=\!!zeropoint,
- \c!rightmargin=\!!zeropoint,
+ \c!leftmargin=\zeropoint,
+ \c!rightmargin=\zeropoint,
\c!indentnext=\v!no,
\c!alternative=\s!default,
\c!strut=\v!no,
- \c!distance=1em]
+ \c!distance=\emwidth]
\setupsubformulas % subformulas could be last in chain
[\c!indentnext=\formulaparameter\c!indentnext]
-\definecounter
+\definecounter % one ?
[\v!formula]
\defineconversionset
[\v!formula]
- [numbers,characters]
+ [numbers,characters] % no \v! ?
-\strc_counter_preset_using_parameter\setupformulas\sharedcounterparameter
+\installcounterassociation{formula} \registerformulacounter\v!formula % currently we only have one
\appendtoks
- \strc_counter_setup_using_parameter\v!formula\formulaparameter
- \strc_counters_check_setup\v!formula % yes or no
+ \synchronizeformulacounters
\to \everysetupformula
+% \appendtoks
+% \synchronizeformulacounters
+% \to \everydefineformula
+
\setupformulas
[\c!numberconversionset=\v!formula] % why forgotten
\appendtoks
\normalexpanded{\definelist[\currentformula]}% is expansion needed?
- \setuevalue{\e!start\currentformula\v!formula}{\dostartformula{\currentformula}}%
- \setuevalue{\e!stop \currentformula\v!formula}{\dostopformula}%
+ \setuevalue{\e!start\currentformula\v!formula}{\strc_formulas_start_formula{\currentformula}}%
+ \setuevalue{\e!stop \currentformula\v!formula}{\strc_formulas_stop_formula}%
\to \everydefineformula
\definelist[\v!formula]
-\setuvalue{\e!start\v!formula}{\dostartformula{}}
-\setuvalue{\e!stop \v!formula}{\dostopformula}
+\setuvalue{\e!start\v!formula}{\strc_formulas_start_formula{}}
+\setuvalue{\e!stop \v!formula}{\strc_formulas_stop_formula}
-\let\dostartformula\relax % defined later
-\let\dostopformula \relax % defined later
+\let\strc_formulas_start_formula\relax % defined later
+\let\strc_formulas_stop_formula \relax % defined later
-\unexpanded\def\defineformulaalternative % this might change ... start and stop can become keys
- {\dotripleargument\dodefineformulaalternative} % to the general define .. s!startcommand
+\unexpanded\def\defineformulaalternative % this might change ... start and stop can become keys
+ {\dotripleargument\strc_formulas_define_alternative} % to the general define .. s!startcommand
-\def\dodefineformulaalternative[#1][#2][#3]%
+\def\strc_formulas_define_alternative[#1][#2][#3]%
{\setvalue{\e!start#1\v!formula}{#2}%
\setvalue{\e!stop #1\v!formula}{#3}}
@@ -126,8 +130,8 @@
% implementation
-\def\storecurrentformulanumber#1#2#3#4#5% ref, todo:str, \sync % todo: title etc (like float)
- {\settrue\handleformulanumber
+\unexpanded\def\strc_formulas_store_number#1#2#3#4#5% ref, todo:str, \sync % todo: title etc (like float)
+ {\settrue\c_strc_formulas_handle_number
\strc_counters_register_component
{formula}%
\setupcurrentformula \formulaparameter \detokenizedformulaparameter
@@ -143,68 +147,75 @@
% modes: 0=unset, 1=forced, 2=none, 3=reference
-\newconstant\placeformulanumbermode
-\newconstant\formulasnumbermode
-\newconstant\subformulasnumbermode
-\newconstant\nestedformulanumbermode
+\newconstant\c_strc_formulas_place_number_mode
+\newconstant\c_strc_formulas_number_mode
+\newconstant\c_strc_formulas_sub_number_mode
+\newconstant\c_strc_formulas_nested_number_mode
\appendtoks
- \placeformulanumbermode \zerocount
- \formulasnumbermode \zerocount
- \subformulasnumbermode \zerocount
- \nestedformulanumbermode\zerocount
+ \c_strc_formulas_place_number_mode \zerocount
+ \c_strc_formulas_number_mode \zerocount
+ \c_strc_formulas_sub_number_mode \zerocount
+ \c_strc_formulas_nested_number_mode\zerocount
\to \everyresetformulas
-\newconditional\handleformulanumber
-\newconditional\incrementformulanumber
-
-\newconditional\insideplaceformula
-\newconditional\insideplacesubformula
-\newconditional\insideformulas
-\newconditional\insidesubformulas
+\newconditional\c_strc_formulas_handle_number
+\newconditional\c_strc_formulas_increment
+\newconditional\c_strc_formulas_inside_place
+\newconditional\c_strc_formulas_inside_place_sub
+\newconditional\c_strc_formulas_inside_formulas
+\newconditional\c_strc_formulas_inside_formulas_sub
\appendtoks
- \global\setfalse\insideplaceformula
- \global\setfalse\insideplacesubformula
+ \global\setfalse\c_strc_formulas_inside_place
+ \global\setfalse\c_strc_formulas_inside_place_sub
\to \everyresetformulas
-\def\handleplaceformulanumbering % place formula
- {\settrue\handleformulanumber
- \docheckformulareference\placeformulanumbermode\currentplaceformulareference
- \glet\doplaceformulanumber \doplaceformulanumberindeed
- \glet\donestedformulanumber\donestedformulanumberindeed}
+\def\strc_formulas_place_numbering % place formula
+ {\settrue\c_strc_formulas_handle_number
+ \strc_formulas_check_reference\c_strc_formulas_place_number_mode\currentplaceformulareference
+ \glet\strc_formulas_place_number\strc_formulas_place_number_indeed
+ \glet\strc_formulas_place_number_nested\strc_formulas_place_number_nested_indeed}
-\def\handleformulasnumbering % formulas
- {\docheckformulareference\formulasnumbermode\currentformulareference}
+\def\strc_formulas_handle_number % formulas
+ {\strc_formulas_check_reference\c_strc_formulas_number_mode\currentformulareference}
-\def\handlesubformulasnumbering % sub formulas
- {\docheckformulareference\subformulasnumbermode\currentsubformulasreference
+\def\strc_formulas_handle_sub_number_indeed % sub formulas
+ {\strc_formulas_check_reference\c_strc_formulas_sub_number_mode\currentsubformulasreference
\strc_counters_increment\v!formula
- \storecurrentformulanumber
+ \strc_formulas_store_number
\currentsubformulasreference
\empty
\currentsubformulasnumber
\currentsubformulassynchronize
- \currentsubformulasattribute
- }
+ \currentsubformulasattribute}
-\let\dotraceformulareferencestate\relax
-\let\doshowformulareferencestate \relax
+\def\strc_formulas_handle_sub_number % sub formulas
+ {\iftrialtypesetting
+ \strc_counters_save\v!formula
+ \strc_formulas_handle_sub_number_indeed
+ \strc_counters_restore\v!formula
+ \else
+ \strc_formulas_handle_sub_number_indeed
+ \fi}
-% \def\dotraceformulareferencestate
+\let\strc_formulas_reference_trace\relax
+\let\strc_formulas_reference_show \relax
+
+% \def\strc_formulas_reference_trace
% {\rlap{\hbox{\quad\tt\txx[%
-% \number\placeformulanumbermode,%
-% \number\formulasnumbermode,%
-% \number\subformulasnumbermode,%
-% \number\nestedformulanumbermode
+% \number\c_strc_formulas_place_number_mode,%
+% \number\c_strc_formulas_number_mode,%
+% \number\c_strc_formulas_sub_number_mode,%
+% \number\c_strc_formulas_nested_number_mode
% ]}}}
-% \def\doshowformulareferencestate
+% \def\strc_formulas_reference_show
% {\writestatus{\v!formula}%
-% {place: \number\placeformulanumbermode,\space
-% formula: \number\formulasnumbermode,\space
-% subformula: \number\subformulasnumbermode,\space
-% nested: \number\nestedformulanumbermode]}}
+% {place: \number\c_strc_formulas_place_number_mode,\space
+% formula: \number\c_strc_formulas_number_mode,\space
+% subformula: \number\c_strc_formulas_sub_number_mode,\space
+% nested: \number\c_strc_formulas_nested_number_mode]}}
\unexpanded\def\placecurrentformulanumber
{\rm % nodig ?
@@ -216,12 +227,12 @@
\namedtaggedlabeltexts
\t!formulalabel \v!formula
\t!formulanumber\v!formula
- {\ignorespaces\doplacecurrentformulanumber\removeunwantedspaces}%
+ {\ignorespaces\strc_formulas_place_current_number\removeunwantedspaces}%
\formulaparameter\c!right}%
\doif{\formulaparameter\c!location}\v!left{\hskip\formulaparameter\c!distance}}
-\def\doplacecurrentformulanumber
- {\dohandlecurrentformulareferences
+\unexpanded\def\strc_formulas_place_current_number
+ {\strc_formulas_handle_current_references
\labeltexts\currentformula{\convertedcounter[\v!formula][]}}
% \def\theboxdestinationattribute#1{\iflocation\ifx#1\relax\else\ifx#1\empty\else attr \destinationattribute#1\fi\fi\fi}
@@ -281,10 +292,10 @@
% currently we do the number, some day we will do the (sub) formula
-\def\dohandlecurrentformulareferences
- {\doshowformulareferencestate
- \ifnum\placeformulanumbermode=\plusthree
- \storecurrentformulanumber
+\def\strc_formulas_handle_current_references
+ {\strc_formulas_reference_show
+ \ifnum\c_strc_formulas_place_number_mode=\plusthree
+ \strc_formulas_store_number
\currentplaceformulareference
\empty
\currentplaceformulanumber
@@ -294,8 +305,8 @@
\glet\currentplaceformulasynchronize\relax
\theformuladestinationattribute\currentplaceformulaattribute
\fi
- \ifnum\formulasnumbermode=\plusthree
- \storecurrentformulanumber
+ \ifnum\c_strc_formulas_number_mode=\plusthree
+ \strc_formulas_store_number
\currentformulasreference
\empty
\currentformulasnumber
@@ -305,12 +316,12 @@
\glet\currentformulassynchronize\relax
\theformuladestinationattribute\currentformulasattribute
\fi
- \ifnum\subformulasnumbermode=\plusthree
+ \ifnum\c_strc_formulas_sub_number_mode=\plusthree
\currentsubformulassynchronize
\glet\currentsubformulassynchronize\relax
\fi
- \ifnum\nestedformulanumbermode=\plusthree
- \storecurrentformulanumber
+ \ifnum\c_strc_formulas_nested_number_mode=\plusthree
+ \strc_formulas_store_number
\currentnestedformulareference
\empty
\currentnestedformulanumber
@@ -323,55 +334,74 @@
% needs checking ... too many:
-\def\dohandleformulanumbering
+\def\strc_formulas_handle_numbering_indeed
{\strc_counters_increment\v!formula
\doiftext\currentplaceformulasuffix{\strc_counters_setown_sub\v!formula\plustwo\currentplaceformulasuffix}%
\placecurrentformulanumber}
-\def\dohandlesubformulanumbering
- {\doiftextelse\currentsubformulasuffix
+\def\strc_formulas_handle_numbering
+ {\iftrialtypesetting
+ \strc_counters_save\v!formula
+ \strc_formulas_handle_numbering_indeed
+ \strc_counters_restore\v!formula
+ \else
+ \strc_formulas_handle_numbering_indeed
+ \fi}
+
+\def\strc_formulas_handle_sub_numbering_indeed
+ {\let\strc_formulas_handle_sub_numbering\relax % else error: see math/numbering-001.tex
+ \doiftextelse\currentsubformulasuffix
{\strc_counters_setown_sub\v!formula\plustwo\currentsubformulasuffix}
{\strc_counters_increment_sub\v!formula\plustwo}%
- \placecurrentformulanumber}
+ \placecurrentformulanumber}
+
+\def\strc_formulas_handle_sub_numbering
+ {\iftrialtypesetting
+ \strc_counters_save\v!formula
+ \strc_formulas_handle_sub_numbering_indeed
+ \strc_counters_restore\v!formula
+ \else
+ \strc_formulas_handle_sub_numbering_indeed
+ \fi}
-\def\dododoformulanumber
- {\ifconditional\handleformulanumber
+\def\strc_formulas_number_indeed
+ {\ifconditional\c_strc_formulas_handle_number
\hbox\bgroup
% main counter
- \ifconditional\insidesubformulas
+ \ifconditional\c_strc_formulas_inside_formulas_sub
% nothing
\else
- \ifcase\formulasnumbermode
- \ifcase\placeformulanumbermode
- \dohandleformulanumbering
+ \ifcase\c_strc_formulas_number_mode
+ \ifcase\c_strc_formulas_place_number_mode
+ \strc_formulas_handle_numbering
\or
- \dohandleformulanumbering
+ \strc_formulas_handle_numbering
\or
% nothing
\or
- \dohandleformulanumbering
+ \strc_formulas_handle_numbering
\fi
\or
- \dohandleformulanumbering
+ \strc_formulas_handle_numbering
\or
% nothing
\or
- \dohandleformulanumbering
+ \strc_formulas_handle_numbering
\fi
\fi
% subcounter
- \ifconditional\insidesubformulas
- \ifcase\subformulasnumbermode
+ \ifconditional\c_strc_formulas_inside_formulas_sub
+ \ifcase\c_strc_formulas_sub_number_mode
% nothing
\or
- \dohandlesubformulanumbering
+ \strc_formulas_handle_sub_numbering
\or
% nothing
\or
- \dohandlesubformulanumbering
+ \strc_formulas_handle_sub_numbering
\fi
\fi
- \dotraceformulareferencestate
+ \strc_formulas_reference_trace
\egroup
\fi}
@@ -393,10 +423,10 @@
%D
%D Otherwise we get a missing \type {$$} error reported.
-\def\resetdisplaymatheq
+\unexpanded\def\resetdisplaymatheq % when used?
{\let\normalleqno\gobbleoneargument \let\leqno\gobbleoneargument
\let\normalreqno\gobbleoneargument \let\eqno \gobbleoneargument
- \let\doplaceformulanumber\empty}
+ \let\strc_formulas_place_number\relax}
%D \macros
%D {startsubformulas}
@@ -431,28 +461,36 @@
% we don't use the skip's
-\def\forgetdisplayskips % to do
+\unexpanded\def\strc_formulas_forget_display_skips
{\abovedisplayskip \zeropoint
\belowdisplayskip \zeropoint
\abovedisplayshortskip\zeropoint
\belowdisplayshortskip\zeropoint}
-\def\predisplaysizethreshhold{2em} % was 3em
+% \def\predisplaysizethreshhold{2\emwidth} % was 3\emwidth
-\def\leftdisplayskip {\leftskip}
-\def\rightdisplayskip {\rightskip}
-\def\leftdisplaymargin {\formulaparameter\c!leftmargin}
-\def\rightdisplaymargin {\formulaparameter\c!rightmargin}
+\newdimen\d_strc_formulas_display_skip_left
+\newdimen\d_strc_formulas_display_skip_right
+\newdimen\d_strc_formulas_display_margin_left
+\newdimen\d_strc_formulas_display_margin_right
+\newdimen\d_strc_formulas_display_pre_threshold
+\newskip \d_strc_formulas_display_skip_par
-\def\beforedisplayspace
- {\doifnot{\formulaparameter\c!spacebefore}\v!none{\blank[\formulaparameter\c!spacebefore]}}
+\unexpanded\def\beforedisplayspace
+ {\edef\p_spacebefore{\formulaparameter\c!spacebefore}%
+ \ifx\p_spacebefore\v!none \else
+ \blank[\p_spacebefore]%
+ \fi}
-\def\afterdisplayspace
- {\doifnot{\formulaparameter\c!spaceafter }\v!none{\blank[\formulaparameter\c!spaceafter ]}}
+\unexpanded\def\afterdisplayspace
+ {\edef\p_spaceafter{\formulaparameter\c!spaceafter}%
+ \ifx\p_spaceafter\v!none \else
+ \blank[\p_spaceafter]%
+ \fi}
-\def\setdisplaydimensions
- {\displayindent\leftdisplayskip
- \advance\displayindent\leftdisplaymargin
+\unexpanded\def\setdisplaydimensions
+ {\displayindent\d_strc_formulas_display_skip_left
+ \advance\displayindent\d_strc_formulas_display_margin_left
\displaywidth\hsize
%\setlocalhsize
%\displaywidth\localhsize
@@ -461,15 +499,14 @@
\else
\advance\displaywidth\hangindent
\fi
- \advance\displaywidth\dimexpr-\displayindent-\rightdisplayskip-\rightdisplaymargin\relax
+ \advance\displaywidth\dimexpr-\displayindent-\d_strc_formulas_display_skip_right-\d_strc_formulas_display_margin_right\relax
\hsize\displaywidth} % new, else overfull in itemize
-\unexpanded\def\dostartformula#1%
- {\dodoubleempty\dodostartformula[#1]}
+\unexpanded\def\strc_formulas_start_formula#1%
+ {\dodoubleempty\strc_formulas_start_formula_indeed[#1]}
-\newskip\formulaparskip
-\newskip\formulastrutht
-\newskip\formulastrutdp
+% \newskip\formulastrutht
+% \newskip\formulastrutdp
%D \startbuffer
%D \startformula[9pt] x = 1 \stopformula
@@ -478,38 +515,56 @@
%D
%D \typebuffer \getbuffer
-\def\dodostartformula[#1][#2]% setting leftskip adaption is slow !
+\unexpanded\def\strc_formulas_start_formula_indeed[#1][#2]% setting leftskip adaption is slow !
{\bgroup % HERE
\def\currentformula{#1}%
\dostarttagged\t!formula\currentformula
\the\everybeforedisplayformula
- \formulaparskip\parskip
- \formulastrutdp\strutdepth
- \formulastrutht\strutheight
- \doifsomething{#2}{\switchtoformulabodyfont[#2]}%
- \parskip\formulaparskip
- % may look better in itemizations
- \doif{\formulaparameter\c!option}\v!middle
- {\def\leftdisplayskip{\zeropoint}%
- \def\rightdisplayskip{\zeropoint}}%
- % this was an experiment
- \doifsomething{\formulaparameter\c!margin}% so we test first
- {\dosetleftskipadaption{\formulaparameter\c!margin}%
- \edef\leftdisplaymargin{\the\leftskipadaption}}% overloaded
- \long\def\dostartformula##1{\bgroup\dostarttagged\t!subformula\def\dostopformula{\dostoptagged\egroup}}%
- \freezedimenmacro\leftdisplayskip
- \freezedimenmacro\rightdisplayskip
- \freezedimenmacro\leftdisplaymargin
- \freezedimenmacro\rightdisplaymargin
- \freezedimenmacro\predisplaysizethreshhold
- \forgetdisplayskips
+ \d_strc_formulas_display_skip_par\parskip\relax
+ %\formulastrutdp\strutdepth
+ %\formulastrutht\strutheight
+ \edef\p_option {\formulaparameter\c!option}%
+ \edef\p_margin {\formulaparameter\c!margin}%
+ \edef\p_bodyfont{#2}%
+ %\ifx\p_bodyfont\empty
+ % \edef\p_bodyfont{\formulaparameter\c!bodyfont}%
+ %\fi
+ \ifx\p_bodyfont\empty \else
+ \switchtoformulabodyfont[#2]%
+ \fi
+ \parskip\d_strc_formulas_display_skip_par\relax
+ \ifx\p_option\v!middle
+ \d_strc_formulas_display_skip_left \zeropoint
+ \d_strc_formulas_display_skip_right\zeropoint
+ \else
+ \d_strc_formulas_display_skip_left \leftskip
+ \d_strc_formulas_display_skip_right\rightskip
+ \fi
+ \d_strc_formulas_display_margin_left \formulaparameter\c!leftmargin \relax
+ \d_strc_formulas_display_margin_right\formulaparameter\c!rightmargin\relax
+ \ifx\p_margin\empty \else
+ \dosetleftskipadaption\p_margin
+ \d_strc_formulas_display_margin_left\leftskipadaption
+ \fi
+ \let\strc_formulas_start_formula\strc_formulas_start_formula_nested
+ %\freezedimenmacro\predisplaysizethreshhold
+ \strc_formulas_forget_display_skips
\getvalue{\e!start\formulaparameter\c!alternative\v!formula}}
+\unexpanded\def\strc_formulas_start_formula_nested#1%
+ {\bgroup
+ \let\strc_formulas_stop_formula\strc_formulas_stop_formula_nested
+ \dostarttagged\t!subformula}
+
+\unexpanded\def\strc_formulas_stop_formula_nested
+ {\dostoptagged
+ \egroup}
+
% tagging of formulanumbers is not ok (we get two display maths blobs)
-\unexpanded\def\dostopformula
+\unexpanded\def\strc_formulas_stop_formula
{\dostarttagged\t!formulacaption\empty
- \doplaceformulanumber
+ \strc_formulas_place_number
\dostoptagged
\dostarttagged\t!formulacontent\empty
\getvalue{\e!stop\formulaparameter\c!alternative\v!formula}%
@@ -520,27 +575,31 @@
\egroup
\hangafter\minusone % added for side floats
\hangindent\zeropoint % added for side floats
- \setfalse\handleformulanumber
+ \setfalse\c_strc_formulas_handle_number
\the\everyresetformulas
\dorechecknextindentation} % here ?
% experiment:
\appendtoks
- \edef\currentformulagrid{\formulaparameter\c!grid}%
- \ifx\currentformulagrid\empty \else
- \spac_grids_snap_value_auto\currentformulagrid
+ \edef\p_grid{\formulaparameter\c!grid}%
+ \ifx\p_grid\empty \else
+ \spac_grids_snap_value_auto\p_grid
\fi
\to \everybeforedisplayformula
-\def\switchtoformulabodyfont{\switchtobodyfont}
+\unexpanded\def\switchtoformulabodyfont
+ {\switchtobodyfont}
-\setuvalue{\v!formula}{\dosingleempty\doformula}
+\setuvalue{\v!formula}{\dosingleempty\strc_formulas_formula}
-\def\doformula[#1]#2% todo: tagged
+\def\strc_formulas_formula[#1]#2% todo: tagged
{\begingroup
- \doifsomething{#1}{\switchtoformulabodyfont[#1]}%
- % not : \def\doformula[##1]##2{\mathematics{##2}}%
+ \edef\p_bodyfont{#1}%
+ \ifx\p_bodyfont\empty \else
+ \switchtoformulabodyfont[\p_bodyfont]%
+ \fi
+ % not : \def\strc_formulas_formula[##1]##2{\mathematics{##2}}%
\mathematics{#2}%
\endgroup}
@@ -558,7 +617,7 @@
\beforedisplayspace
\par
\ifvmode
- \prevdepth-\maxdimen % texbook pagina 79-80
+ \prevdepth-\maxdimen % texbook pagina 79-80
\fi
\noindent % else funny hlist with funny baselineskip
$$% \Ustartdisplaymath
@@ -601,26 +660,26 @@
% \fakewords{20}{40}
\unexpanded\def\startsubformulas
- {\dosingleempty\dostartsubformulas}
+ {\dosingleempty\strc_formulas_start_sub_formulas}
-\def\dostartsubformulas[#1]%
+\def\strc_formulas_start_sub_formulas[#1]%
{\edef\currentsubformulasreference{#1}%
- \global\settrue\insidesubformulas
- \handlesubformulasnumbering}
+ \global\settrue\c_strc_formulas_inside_formulas_sub
+ \strc_formulas_handle_sub_number}
\unexpanded\def\stopsubformulas
{\nonoindentation
\useindentnextparameter\subformulaparameter
\the\everyresetformulas % to be checked
- \global\setfalse\insidesubformulas
+ \global\setfalse\c_strc_formulas_inside_formulas_sub
\dorechecknextindentation} % here ?
%D Named subformulas (to be redone)
\unexpanded\def\startnamedsubformulas
- {\dosingleempty\dostartnamedsubformulas}
+ {\dosingleempty\strc_formulas_start_named_sub_formulas}
-\def\dostartnamedsubformulas[#1]#2%
+\def\strc_formulas_start_named_sub_formulas[#1]#2%
{\setformulalistentry{#2}%
\startsubformulas[#1]}
@@ -639,19 +698,19 @@
%D \typebuffer \getbuffer
\unexpanded\def\startformulas
- {\dosingleempty\dostartformulas}
+ {\dosingleempty\strc_formulas_start_formulas}
-\def\dostartformulas[#1]#2\stopformulas % new / to be internationalized
+\def\strc_formulas_start_formulas[#1]#2\stopformulas % new / to be internationalized
{\bgroup
\dostarttagged\t!formulaset\empty
- \global\settrue\insideformulas
+ \global\settrue\c_strc_formulas_inside_formulas
\edef\currentformulasreference{#1}%
- \handleformulasnumbering
+ \strc_formulas_handle_number
\let\currentformula\empty
- \forgetdisplayskips
+ \strc_formulas_forget_display_skips
\startdisplaymath
\setlocalhsize
- \long\unexpanded\def\startformula##1\stopformula
+ \unexpanded\def\startformula##1\stopformula
{\advance\scratchcounter\plusone}%
\scratchcounter\zerocount
#2% preroll
@@ -660,122 +719,135 @@
\fi
\hbox to \localhsize \bgroup
\hss
- \def\normalstartformula{\vskip-\strutdepth\Ustartdisplaymath}% i hate this
- \def\normalstopformula {\Ustopdisplaymath}%
- \unexpanded\def\startformula {\Ustartmath\vcenter\bgroup\normalstartformula}%
- \unexpanded\def\stopformula {\normalstopformula\egroup\Ustopmath\hss}%
+ \let\startformula\strc_formulas_nested_formula_start
+ \let\stopformula \strc_formulas_nested_formula_stop
#2%
\egroup
\stopdisplaymath
- \global\setfalse\insideformulas
+ \global\setfalse\c_strc_formulas_inside_formulas
\dostoptagged
\egroup
\the\everyresetformulas
\hangafter\minusone % added for side floats
\hangindent\zeropoint} % added for side floats
+\unexpanded\def\strc_formulas_nested_formula_start
+ {\Ustartmath
+ \vcenter\bgroup
+ \vskip-\strutdepth
+ \Ustartdisplaymath}
+
+\unexpanded\def\strc_formulas_nested_formula_stop
+ {\Ustopdisplaymath
+ \egroup
+ \Ustopmath
+ \hss}
+
% place
-\def\inhibitformulanumberflag{-}
-\def\forceformulanumberflag {+}
+\def\m_strc_formulas_flag_inhibit{-}
+\def\m_strc_formulas_flag_force {+}
-\def\docheckformulareference#1#2%
+\def\strc_formulas_check_reference#1#2%
{#1\unless\ifx\namedformulaentry\empty % \relax % new 29/8/2010
\plusthree
\else\ifx#2\empty
\zerocount
- \else\ifx#2\forceformulanumberflag
+ \else\ifx#2\m_strc_formulas_flag_force
\plusone
- \else\ifx#2\inhibitformulanumberflag
+ \else\ifx#2\m_strc_formulas_flag_inhibit
\plustwo
\else
\plusthree
\fi\fi\fi\fi}
-\unexpanded\def\formulanumber{\doformulanumber} % for the moment
+\unexpanded\def\formulanumber
+ {\strc_formulas_number} % for the moment
-\def\doformulanumber
- {\dosingleempty\dodoformulanumber}
+\def\strc_formulas_number
+ {\dosingleempty\strc_formulas_number_again}
-\def\dodoformulanumber[#1]%
+\def\strc_formulas_number_again[#1]%
{\def\currentformulareference{#1}%
- \dosinglegroupempty\dododoformulanumber}
-
-\def\redoformulanumber#1%
- {\def\currentformulasuffix{#1}%
- \dododoformulanumber}
+ \dosinglegroupempty\strc_formulas_number_indeed}
\unexpanded\def\placeformula
- {\global\settrue\insideplaceformula
- \settrue\incrementformulanumber
- \dosingleempty\doplaceformula}
+ {\global\settrue\c_strc_formulas_inside_place
+ \settrue\c_strc_formulas_increment
+ \dosingleempty\strc_formulas_place}
\unexpanded\def\placesubformula
- {\global\settrue\insideplacesubformula
- \setfalse\incrementformulanumber
- \dosingleempty\doplaceformula}
+ {\global\settrue\c_strc_formulas_inside_place_sub
+ \setfalse\c_strc_formulas_increment
+ \dosingleempty\strc_formulas_place}
-\def\doplaceformula[#1]%
+\def\strc_formulas_place[#1]%
{\def\currentplaceformulareference{#1}%
\let\currentplaceformulasuffix\empty
- \doifnextbgroupelse\moreplaceformula\redoplaceformula} % [ref]{}
+ \doifnextbgroupelse\strc_formulas_place_yes\strc_formulas_place_nop\strc_formulas_place_nop} % [ref]{}
-\def\moreplaceformula#1%
+\def\strc_formulas_place_yes#1%
{\def\currentplaceformulasuffix{#1}%
- \redoplaceformula}
+ \strc_formulas_place_nop}
-\def\redoplaceformula
- {\doifnextcharelse$\dispplaceformula\dodoplaceformula} % [ref]$$ [ref]\start
+\def\strc_formulas_place_nop
+ {\doifnextcharelse$\strc_formulas_place_pickup\strc_formulas_place_indeed} % [ref]$$ [ref]\start
-\def\dodoplaceformula
- {\handleplaceformulanumbering}
+\def\strc_formulas_place_indeed
+ {\strc_formulas_place_numbering}
-\def\dispplaceformula$$#1$$%
- {\handleplaceformulanumbering
- \dostartformula{}#1\dostopformula}
+\def\strc_formulas_place_pickup$$#1$$%
+ {\strc_formulas_place_numbering
+ \strc_formulas_start_formula{}#1\strc_formulas_stop_formula}
\let\startplaceformula\placeformula
\let\stopplaceformula \relax
% to be checked
-\let\doplaceformulanumber \relax
-\let\donestedformulanumber\gobbletwoarguments
+\let\strc_formulas_place_number \relax
+\let\strc_formulas_place_number_nested\gobbletwoarguments
-\def\donestedformulanumberindeed#1#2%
+\def\strc_formulas_place_number_nested_indeed#1#2%
{\def\currentnestedformulareference{#1}%
\def\currentnestedformulasuffix{#2}%
- \glet\doplaceformulanumber\relax
- \docheckformulareference\nestedformulanumbermode\currentnestedformulareference
- \ifcase\nestedformulanumbermode
+ \glet\strc_formulas_place_number\relax
+ \strc_formulas_check_reference\c_strc_formulas_nested_number_mode\currentnestedformulareference
+ \ifcase\c_strc_formulas_nested_number_mode
% nothing
\or
- \doformulanumber
+ \strc_formulas_number % hm, looks ahead for []
\or
% nothing
\or
- \doformulanumber
+ \strc_formulas_number % hm, looks ahead for []
\fi}
-\def\doplaceformulanumberindeed
- {\glet\doplaceformulanumber\relax
+\def\strc_formulas_place_number_indeed
+ {\glet\strc_formulas_place_number\relax
\doifelse{\formulaparameter\c!location}\v!left
- {\normalleqno{\dododoformulanumber}}
- {\normalreqno{\dododoformulanumber}}}
+ {\normalleqno{\strc_formulas_number_indeed}}
+ {\normalreqno{\strc_formulas_number_indeed}}}
% todo
\unexpanded\def\placenamedformula
- {\dosingleempty\doplacenamedformula}
+ {\dosingleempty\strc_formulase_place_named}
-\def\doplacenamedformula[#1]#2%
+\def\strc_formulase_place_named
{\iffirstargument
- \def\next{\placeformula[#1]}%
+ \expandafter\strc_formulase_place_named_yes
\else
- \let\next\placeformula
- \fi
- \setformulalistentry{#2}%
- \next}
+ \expandafter\strc_formulase_place_named_nop
+ \fi}
+
+\def\strc_formulase_place_named_yes[#1]#2%
+ {\setformulalistentry{#2}%
+ \placeformula[#1]}
+
+\def\strc_formulase_place_named_nop[#1]#2%
+ {\setformulalistentry{#2}%
+ \placeformula}
\let\namedformulaentry\empty % \relax % this will become a key/value so that we can do bookmarks
diff --git a/Master/texmf-dist/tex/context/base/strc-not.lua b/Master/texmf-dist/tex/context/base/strc-not.lua
index 6a3f3bfad3d..b3a50c0d6f4 100644
--- a/Master/texmf-dist/tex/context/base/strc-not.lua
+++ b/Master/texmf-dist/tex/context/base/strc-not.lua
@@ -13,18 +13,19 @@ local texcount = tex.count
local trace_notes = false trackers.register("structures.notes", function(v) trace_notes = v end)
local trace_references = false trackers.register("structures.notes.references", function(v) trace_references = v end)
-local report_notes = logs.reporter("structure","notes")
+local report_notes = logs.reporter("structure","notes")
-local structures = structures
-local helpers = structures.helpers
-local lists = structures.lists
-local sections = structures.sections
-local counters = structures.counters
-local notes = structures.notes
-local references = structures.references
+local structures = structures
+local helpers = structures.helpers
+local lists = structures.lists
+local sections = structures.sections
+local counters = structures.counters
+local notes = structures.notes
+local references = structures.references
+local counterspecials = counters.specials
-notes.states = notes.states or { }
-lists.enhancers = lists.enhancers or { }
+notes.states = notes.states or { }
+lists.enhancers = lists.enhancers or { }
storage.register("structures/notes/states", notes.states, "structures.notes.states")
@@ -33,10 +34,18 @@ local notedata = { }
local variables = interfaces.variables
local context = context
+local commands = commands
-- state: store, insert, postpone
-function notes.store(tag,n)
+local function store(tag,n)
+ -- somewhat weird but this is a cheap hook spot
+ if not counterspecials[tag] then
+ counterspecials[tag] = function(tag)
+ context.doresetlinenotecompression(tag) -- maybe flag that controls it
+ end
+ end
+ --
local nd = notedata[tag]
if not nd then
nd = { }
@@ -46,14 +55,20 @@ function notes.store(tag,n)
nd[nnd] = n
local state = notestates[tag]
if not state then
- report_notes("unknown state for %s",tag)
+ report_notes("unknown state for %a",tag)
elseif state.kind ~= "insert" then
if trace_notes then
- report_notes("storing %s with state %s as %s",tag,state.kind,nnd)
+ report_notes("storing %a with state %a as %a",tag,state.kind,nnd)
end
state.start = state.start or nnd
end
- context(#nd)
+ return #nd
+end
+
+notes.store = store
+
+function commands.storenote(tag,n)
+ context(store(tag,n))
end
local function get(tag,n) -- tricky ... only works when defined
@@ -63,7 +78,7 @@ local function get(tag,n) -- tricky ... only works when defined
nd = nd[n]
if nd then
if trace_notes then
- report_notes("getting note %s of '%s' with listindex '%s'",n,tag,nd)
+ report_notes("getting note %a of %a with listindex %a",n,tag,nd)
end
-- is this right?
-- local newdata = lists.collected[nd]
@@ -84,21 +99,64 @@ notes.getn = getn
-- we could make a special enhancer
-function notes.listindex(tag,n)
+local function listindex(tag,n)
local ndt = notedata[tag]
return ndt and ndt[n]
end
+notes.listindex = listindex
+
+function commands.notelistindex(tag,n)
+ context(listindex(tag,n))
+end
+
+local function setstate(tag,newkind)
+ local state = notestates[tag]
+ if trace_notes then
+ report_notes("setting state of %a from %s to %s",tag,(state and state.kind) or "unset",newkind)
+ end
+ if not state then
+ state = {
+ kind = newkind
+ }
+ notestates[tag] = state
+ elseif newkind == "insert" then
+ if not state.start then
+ state.kind = newkind
+ end
+ else
+ state.kind = newkind
+ end
+ -- state.start can already be set and will be set when an entry is added or flushed
+ return state
+end
+
+local function getstate(tag)
+ local state = notestates[tag]
+ return state and state.kind or "unknown"
+end
+
+notes.setstate = setstate
+notes.getstate = getstate
+
+commands.setnotestate = setstate
+
+function commands.getnotestate(tag)
+ context(getstate(tag))
+end
+
function notes.define(tag,kind,number)
- local state = notes.setstate(tag,kind)
+ local state = setstate(tag,kind)
state.number = number
end
+commands.definenote = notes.define
+
function notes.save(tag,newkind)
local state = notestates[tag]
if state and not state.saved then
if trace_notes then
- report_notes("saving state of '%s': %s -> %s",tag,state.kind,newkind or state.kind)
+ report_notes("saving state of %a, old: %a, new %a",tag,state.kind,newkind or state.kind)
end
state.saveddata = notedata[tag]
state.savedkind = state.kind
@@ -112,7 +170,7 @@ function notes.restore(tag,forcedstate)
local state = notestates[tag]
if state and state.saved then
if trace_notes then
- report_notes("restoring state of '%s': %s -> %s",tag,state.kind,state.savedkind)
+ report_notes("restoring state of %a, old: %a, new: %a",tag,state.kind,state.savedkind)
end
notedata[tag] = state.saveddata
state.kind = forcedstate or state.savedkind
@@ -121,33 +179,10 @@ function notes.restore(tag,forcedstate)
end
end
-function notes.setstate(tag,newkind)
- local state = notestates[tag]
- if trace_notes then
- report_notes("setting state of '%s' from %s to %s",tag,(state and state.kind) or "unset",newkind)
- end
- if not state then
- state = {
- kind = newkind
- }
- notestates[tag] = state
- elseif newkind == "insert" then
- if not state.start then
- state.kind = newkind
- end
- else
- state.kind = newkind
- end
- -- state.start can already be set and will be set when an entry is added or flushed
- return state
-end
-
-function notes.getstate(tag)
- local state = notestates[tag]
- context(state and state.kind or "unknown")
-end
+commands.savenote = notes.save
+commands.restorenote = notes.restore
-function notes.doifcontent(tag)
+local function hascontent(tag)
local ok = notestates[tag]
if ok then
if ok.kind == "insert" then
@@ -160,17 +195,21 @@ function notes.doifcontent(tag)
ok = ok.start
end
end
- commands.doif(ok)
+ return ok and true or false
+end
+
+notes.hascontent = hascontent
+
+function commands.doifnotecontent(tag)
+ commands.doif(hascontent(tag))
end
local function internal(tag,n)
local nd = get(tag,n)
if nd then
--- inspect(nd)
local r = nd.references
if r then
local i = r.internal
--- return i and lists.internals[i]
return i and references.internals[i] -- dependency on references
end
end
@@ -186,7 +225,7 @@ end
notes.internal = internal
notes.ordered = ordered
-function notes.doifonsamepageasprevious(tag)
+local function onsamepageasprevious(tag)
local same = false
local n = getn(tag,n)
local current, previous = get(tag,n), get(tag,n-1)
@@ -194,7 +233,13 @@ function notes.doifonsamepageasprevious(tag)
local cr, pr = current.references, previous.references
same = cr and pr and cr.realpage == pr.realpage
end
- commands.doifelse(same)
+ return same and true or false
+end
+
+notes.doifonsamepageasprevious = onsamepageasprevious
+
+function commands.doifnoteonsamepageasprevious(tag)
+ commands.doifelse(onsamepageasprevious(tag))
end
function notes.checkpagechange(tag) -- called before increment !
@@ -222,64 +267,57 @@ function notes.postpone()
end
for tag, state in next, notestates do
if state.kind ~= "store" then
- notes.setstate(tag,"postpone")
+ setstate(tag,"postpone")
end
end
end
+commands.postponenotes = notes.postpone
+
function notes.setsymbolpage(tag,n,l)
- local l = l or notes.listindex(tag,n)
+ local l = l or listindex(tag,n)
if l then
local p = texcount.realpageno
if trace_notes or trace_references then
- report_notes("note %s of '%s' with list index %s gets symbol page %s",n,tag,l,p)
+ report_notes("note %a of %a with list index %a gets symbol page %a",n,tag,l,p)
end
local entry = lists.cached[l]
if entry then
entry.references.symbolpage = p
else
- report_notes("internal error: note %s of '%s' is not flushed",n,tag)
+ report_notes("internal error: note %a of %a is not flushed",n,tag)
end
else
- report_notes("internal error: note %s of '%s' is not initialized",n,tag)
+ report_notes("internal error: note %a of %a is not initialized",n,tag)
end
end
--- function notes.getsymbolpage(tag,n)
--- local nd = get(tag,n)
--- local p = nd and nd.references.symbolpage or 0
--- if trace_notes or trace_references then
--- report_notes("page number of note symbol %s of '%s' is %s",n,tag,p)
--- end
--- context(p)
--- end
+commands.setnotesymbolpage = notes.setsymbolpage
-function notes.getsymbolpage(tag,n)
+local function getsymbolpage(tag,n)
local li = internal(tag,n)
li = li and li.references
li = li and (li.symbolpage or li.realpage) or 0
if trace_notes or trace_references then
- report_notes("page number of note symbol %s of '%s' is %s",n,tag,li)
+ report_notes("page number of note symbol %a of %a is %a",n,tag,li)
end
- context(li)
+ return li
end
-function notes.getnumberpage(tag,n)
+local function getnumberpage(tag,n)
local li = internal(tag,n)
li = li and li.references
li = li and li.realpage or 0
if trace_notes or trace_references then
- report_notes("page number of note number %s of '%s' is %s",n,tag,li)
+ report_notes("page number of note number %s of %a is %a",n,tag,li)
end
- context(li)
+ return li
end
-function notes.deltapage(tag,n)
+local function getdeltapage(tag,n)
-- 0:unknown 1:textbefore, 2:textafter, 3:samepage
local what = 0
-
--- references.internals[lists.tobesaved[nd].internal]
-
+ -- references.internals[lists.tobesaved[nd].internal]
local li = internal(tag,n)
if li then
local references = li.references
@@ -287,7 +325,7 @@ function notes.deltapage(tag,n)
local symbolpage = references.symbolpage or 0
local notepage = references.realpage or 0
if trace_references then
- report_notes("note number %s of '%s' points from page %s to page %s ",n,tag,symbolpage,notepage)
+ report_notes("note number %a of %a points from page %a to page %a",n,tag,symbolpage,notepage)
end
if notepage < symbolpage then
what = 3 -- after
@@ -301,10 +339,18 @@ function notes.deltapage(tag,n)
-- nesting in a vbox
end
end
- context(what)
+ return what
end
-function notes.flush(tag,whatkind,how) -- store and postpone
+notes.getsymbolpage = getsymbolpage
+notes.getnumberpage = getnumberpage
+notes.getdeltapage = getdeltapage
+
+function commands.notesymbolpage(tag,n) context(getsymbolpage(tag,n)) end
+function commands.notenumberpage(tag,n) context(getnumberpage(tag,n)) end
+function commands.notedeltapage (tag,n) context(getdeltapage (tag,n)) end
+
+function commands.flushnotes(tag,whatkind,how) -- store and postpone
local state = notestates[tag]
local kind = state.kind
if kind == whatkind then
@@ -313,7 +359,7 @@ function notes.flush(tag,whatkind,how) -- store and postpone
if kind == "postpone" then
if nd and ns then
if trace_notes then
- report_notes("flushing state %s of %s from %s to %s",whatkind,tag,ns,#nd)
+ report_notes("flushing state %a of %a from %a to %a",whatkind,tag,ns,#nd)
end
for i=ns,#nd do
context.handlenoteinsert(tag,i)
@@ -324,7 +370,7 @@ function notes.flush(tag,whatkind,how) -- store and postpone
elseif kind == "store" then
if nd and ns then
if trace_notes then
- report_notes("flushing state %s of %s from %s to %s",whatkind,tag,ns,#nd)
+ report_notes("flushing state %a of %a from %a to %a",whatkind,tag,ns,#nd)
end
-- todo: as registers: start, stop, inbetween
for i=ns,#nd do
@@ -348,24 +394,24 @@ function notes.flush(tag,whatkind,how) -- store and postpone
elseif kind == "reset" then
if nd and ns then
if trace_notes then
- report_notes("flushing state %s of %s from %s to %s",whatkind,tag,ns,#nd)
+ report_notes("flushing state %a of %a from %a to %a",whatkind,tag,ns,#nd)
end
end
state.start = nil
elseif trace_notes then
- report_notes("not flushing state %s of %s",whatkind,tag)
+ report_notes("not flushing state %a of %a",whatkind,tag)
end
elseif trace_notes then
- report_notes("not flushing state %s of %s",whatkind,tag)
+ report_notes("not flushing state %a of %a",whatkind,tag)
end
end
-function notes.flushpostponed()
+function commands.flushpostponednotes()
if trace_notes then
report_notes("flushing all postponed notes")
end
for tag, _ in next, notestates do
- notes.flush(tag,"postpone")
+ commands.flushnotes(tag,"postpone")
end
end
diff --git a/Master/texmf-dist/tex/context/base/strc-not.mkvi b/Master/texmf-dist/tex/context/base/strc-not.mkvi
index 0a57ed8788c..09ea41bc4fa 100644
--- a/Master/texmf-dist/tex/context/base/strc-not.mkvi
+++ b/Master/texmf-dist/tex/context/base/strc-not.mkvi
@@ -21,10 +21,6 @@
% todo: see if we can now use \insertpenalties (>0 == some left)
-\ifdefined\v!notation \else \def\v!notation {notation} \fi
-\ifdefined\v!endnote \else \def\v!endnote {endnote} \fi
-\ifdefined\c!symbolcommand \else \def\c!symbolcommand{symbolcommand} \fi
-
\ifdefined\dotagsetnotesymbol \else \let\dotagsetnotesymbol\relax \fi
\unexpanded\def\unvboxed {\ifvmode\unvbox \else\box \fi} % will change or used more often
@@ -71,9 +67,9 @@
\installcommandhandler \??notation {notation} \??notation
-\let\setupnotations\setupnotation
+\installcounterassociation{notation}
-% \unexpanded\def\lowllap#1{\low{\llap{#1}}}
+\let\setupnotations\setupnotation
\definesymbol[\v!note:\v!previouspage][\llap{\low{\symbol[\v!previouspage]}}]
\definesymbol[\v!note:\v!nextpage ][\llap{\low{\symbol[\v!nextpage ]}}]
@@ -123,29 +119,26 @@
\c!state=\v!start,
\c!levels=3]
-% mostly the same as enumersations but we want to keep them
-% isolated and at some point we might differentiate
-
-% to be considered:
-%
-% \let\??construction\??notation
-%
-% with push/pop (also at definition time)
-
-\unexpanded\def\strc_define_commands_notation#1#2#3% current level parent
- {\doifelsenothing{#3}
- {\normalexpanded{\defineconstruction[#1][\s!handler=\v!notation,\c!level=#2]}%
- \setevalue{\??notation#1:\s!parent}{\??notation}}%
- {\normalexpanded{\defineconstruction[#1][#3][\s!handler=\v!notation,\c!level=#2]}%
- \setevalue{\??notation#1:\s!parent}{\??notation#3}}%
- \setuevalue{\e!next #1}{\strc_notations_next }%
- \setuevalue{\c!reset#1}{\strc_notations_reset }%
- %setuevalue{\c!set #1}{\strc_notations_set }%
-% \setuevalue {#1}{\strc_notations_command{\currentnotation}}%
-% \setuevalue{\e!start#1}{\strc_notations_start {\currentnotation}{#1}}%
- \setuevalue {#1}{\strc_notations_command{#1}}%
- \setuevalue{\e!start#1}{\strc_notations_start {#1}{#1}}% okay?
- \setuevalue{\e!stop #1}{\strc_notations_stop }}
+%D The code here is mostly the same as enumersations but we want to keep them
+%D isolated and at some point we might differentiate.
+
+% \installcorenamespace{noteclass}
+
+\unexpanded\def\strc_define_commands_notation#tag#level#parent%
+ {\doifelsenothing{#parent}
+ {\normalexpanded{\defineconstruction[#tag][\s!handler=\v!notation,\c!level=#level]}%
+ \setevalue{\??notation#tag:\s!parent}{\??notation}}%
+ {\normalexpanded{\defineconstruction[#tag][#parent][\s!handler=\v!notation,\c!level=#level]}%
+ \setevalue{\??note#tag:\s!parent}{\??note#parent}% see later for \s!note
+ \setevalue{\??notation#tag:\s!parent}{\??notation#parent}}%
+ \setuevalue{\e!next #tag}{\strc_notations_next }%
+ \setuevalue{\c!reset#tag}{\strc_notations_reset }%
+ %setuevalue{\c!set #tag}{\strc_notations_set }%
+ %setuevalue {#tag}{\strc_notations_command{\currentnotation}}%
+ %setuevalue{\e!start#tag}{\strc_notations_start {\currentnotation}{#tag}}%
+ \setuevalue {#tag}{\strc_notations_command{#tag}}%
+ \setuevalue{\e!start#tag}{\strc_notations_start {#tag}{#tag}}% okay?
+ \setuevalue{\e!stop #tag}{\strc_notations_stop }}
\appendtoks
\ifx\currentnotationparent\empty
@@ -179,23 +172,23 @@
\ifx\p_counter\empty %
\let\p_counter\currentnotation
\fi
-% \doifcounterelse\p_counter\donothing{\strc_enumerations_define_counter\p_counter}%
-% \letnotationparameter\s!counter\p_counter
-% \strc_enumerations_setup_counter\currentnotation
\doifcounterelse\p_counter\donothing{\strc_notes_define_counter\p_counter}%
\letnotationparameter\s!counter\p_counter
- \strc_notes_setup_counter\currentnotation
+ %\strc_notes_setup_counter\currentnotation
\to \everydefinenotation
-\setvalue{\??constructioninitializer\v!notation}%
+\let\p_strc_constructions_title \empty
+\let\p_strc_constructions_number\empty
+
+\unexpanded\setvalue{\??constructioninitializer\v!notation}%
{\let\currentnotation \currentconstruction
\let\constructionparameter \notationparameter
\let\detokenizedconstructionparameter\detokenizednotationparameter
\let\letconstructionparameter \letnotationparameter
\let\useconstructionstyleandcolor \usenotationstyleandcolor
\let\setupcurrentconstruction \setupcurrentnotation
- \edef\p_number{\constructionparameter\c!number}%
- \ifx\p_number\v!yes
+ \edef\p_strc_constructions_number{\constructionparameter\c!number}%
+ \ifx\p_strc_constructions_number\v!yes
\settrue\c_strc_constructions_number_state
\iftrialtypesetting
\strc_counters_save\currentconstructionnumber
@@ -204,14 +197,14 @@
\else
\setfalse\c_strc_constructions_number_state
\fi
- \edef\p_title{\constructionparameter\c!title}%
- \ifx\p_title\v!yes
+ \edef\p_strc_constructions_title{\constructionparameter\c!title}%
+ \ifx\p_strc_constructions_title\v!yes
\settrue\c_strc_constructions_title_state
\else
\setfalse\c_strc_constructions_title_state
\fi}
-\setvalue{\??constructionfinalizer\v!notation}%
+\unexpanded\setvalue{\??constructionfinalizer\v!notation}%
{\ifconditional\c_strc_constructions_number_state
\iftrialtypesetting
\strc_counters_restore\currentconstructionnumber
@@ -232,58 +225,71 @@
\let\strc_notations_reset\strc_enumerations_reset
%let\strc_notations_set \strc_enumerations_set
-\unexpanded\def\strc_notations_command#1%
+\unexpanded\def\strc_notations_command#tag%
{\begingroup
- \edef\currentnote{#1}%
- \strc_constructions_initialize{#1}%
+ \edef\currentnote{#tag}%
+ \strc_constructions_initialize{#tag}%
\strc_notes_synchronize
\ifnotesenabled
\strc_counters_increment_sub\currentconstructionnumber\currentconstructionlevel
\fi
\doifnextoptionalelse\strc_notations_command_yes\strc_notations_command_nop}
-\unexpanded\def\strc_notations_command_nop#1%
- {\strc_constructions_register[\c!label={\descriptionparameter\c!text},\c!reference=,\c!title={#1},\c!bookmark=,\c!list=][]%
+\unexpanded\def\strc_notations_command_nop#title%
+ {\strc_constructions_register[\c!label={\descriptionparameter\c!text},\c!reference=,\c!title={#title},\c!bookmark=,\c!list=][]%
\csname\??constructionnotehandler\currentconstructionhandler\endcsname
\strc_constructions_finalize
\normalexpanded{\endgroup\noteparameter\c!next}}
-\unexpanded\def\strc_notations_command_yes[#1]%
- {\doifassignmentelse{#1}\strc_notations_command_assignment\strc_notations_command_argument[#1]}
+\unexpanded\def\strc_notations_command_yes[#optional]%
+ {\doifassignmentelse{#optional}\strc_notations_command_assignment\strc_notations_command_argument[#optional]}
-\unexpanded\def\strc_notations_command_assignment[#1]%
- {\strc_constructions_register[\c!label={\descriptionparameter\c!text},\c!reference=,\c!title=,\c!bookmark=,\c!list=,#1][]%
+\unexpanded\def\strc_notations_command_assignment[#settings]%
+ {\strc_constructions_register[\c!label={\descriptionparameter\c!text},\c!reference=,\c!title=,\c!bookmark=,\c!list=,#settings][]%
\csname\??constructionnotehandler\currentconstructionhandler\endcsname
\strc_constructions_finalize
\normalexpanded{\endgroup\noteparameter\c!next}}
-\unexpanded\def\strc_notations_command_argument[#1]#2%
- {\strc_constructions_register[\c!label={\descriptionparameter\c!text},\c!reference={#1},\c!title={#2},\c!bookmark=,\c!list=][]%
+\unexpanded\def\strc_notations_command_argument[#reference]#title%
+ {\strc_constructions_register[\c!label={\descriptionparameter\c!text},\c!reference={#reference},\c!title={#title},\c!bookmark=,\c!list=][]%
\csname\??constructionnotehandler\currentconstructionhandler\endcsname
\strc_constructions_finalize
\normalexpanded{\endgroup\noteparameter\c!next}}
-\unexpanded\def\strc_notations_start#1#2%
+% \unexpanded\def\strc_notations_start#tag#stoptag%
+% {\begingroup
+% \edef\currentnote{#tag}%
+% \strc_constructions_initialize{#tag}%
+% \strc_notes_synchronize
+% \ifnotesenabled
+% \strc_counters_increment_sub\currentconstructionnumber\currentconstructionlevel
+% \fi
+% \normalexpanded % not that efficient but also not that frequently used (\normaldef for parser)
+% {\normaldef\noexpand\strc_pickup_yes[##1]##2\csname\e!stop#stoptag\endcsname{\strc_notations_command_yes[##1]{##2}}%
+% \normaldef\noexpand\strc_pickup_nop ##1\csname\e!stop#stoptag\endcsname{\strc_notations_command_nop {##1}}}%
+% \doifnextoptionalelse\strc_pickup_yes\strc_pickup_nop}
+
+\unexpanded\def\strc_notations_start#tag#stoptag%
{\begingroup
- \edef\currentnote{#1}%
- \strc_constructions_initialize{#1}%
+ \edef\currentnote{#tag}%
+ \strc_constructions_initialize{#tag}%
\strc_notes_synchronize
\ifnotesenabled
\strc_counters_increment_sub\currentconstructionnumber\currentconstructionlevel
\fi
- \normalexpanded % not that efficient but also not that frequently used
- {\def\noexpand\strc_pickup_yes[##1]##2\csname\e!stop#2\endcsname{\strc_notations_command_yes[##1]{##2}}%
- \def\noexpand\strc_pickup_nop ##1\csname\e!stop#2\endcsname{\strc_notations_command_nop {##1}}}%
+ \normalexpanded % not that efficient but also not that frequently used (\normaldef for parser)
+ {\def\noexpand\strc_pickup_yes[#one]#two\csname\e!stop#stoptag\endcsname{\strc_notations_command_yes[#one]{#two}}%
+ \def\noexpand\strc_pickup_nop #one\csname\e!stop#stoptag\endcsname{\strc_notations_command_nop {#one}}}%
\doifnextoptionalelse\strc_pickup_yes\strc_pickup_nop}
-\unexpanded\def\strc_notations_start_yes[#1]#2%
- {\strc_constructions_register[\c!label={\descriptionparameter\c!text},\c!reference={#1},\c!title={#2},\c!bookmark=,\c!list=][]%
+\unexpanded\def\strc_notations_start_yes[#reference]#title%
+ {\strc_constructions_register[\c!label={\descriptionparameter\c!text},\c!reference={#reference},\c!title={#title},\c!bookmark=,\c!list=][]%
\csname\??constructionnotehandler\currentconstructionhandler\endcsname
\strc_constructions_finalize
\normalexpanded{\endgroup\noteparameter\c!next}}
-\unexpanded\def\strc_notations_start_nop#1%
- {\strc_constructions_register[\c!label={\descriptionparameter\c!text},\c!reference=,\c!title={#1},\c!bookmark=,\c!list=][]%
+\unexpanded\def\strc_notations_start_nop#title%
+ {\strc_constructions_register[\c!label={\descriptionparameter\c!text},\c!reference=,\c!title={#title},\c!bookmark=,\c!list=][]%
\csname\??constructionnotehandler\currentconstructionhandler\endcsname
\strc_constructions_finalize
\normalexpanded{\endgroup\noteparameter\c!next}}
@@ -296,10 +302,10 @@
%D level of indirectness. This way notations don't bark on undefined
%D macros when used in combination.
-\setvalue{\??constructionnotehandler\v!notation}%
+\unexpanded\setvalue{\??constructionnotehandler\v!notation}%
{\csname\??constructionnotehandler\currentconstructionhandler:\constructionparameter\c!type\endcsname}
-\setvalue{\??constructionnotehandler\v!notation:}% empty case
+\unexpanded\setvalue{\??constructionnotehandler\v!notation:}% empty case
{[\currentconstructionhandler:\currentconstruction]}
%D Here is a simple renderer for notes
@@ -371,51 +377,41 @@
%D Insertions are part of notes.
-\installcorenamespace{noteinsertion}
+% \installcorenamespace{noteinsertion}
-\def\currentnoteins{\csname\??noteinsertion\currentnote\endcsname}
+\def\currentnoteinsertion {\noteparameter\s!insert}
+\def\currentnoteinsertionnumber{\namedinsertionnumber{\noteparameter\s!insert}}
\appendtoks
- \ifcsname\??noteinsertion\currentnote\endcsname\else
- \expandafter\installinsertion\csname\??noteinsertion\currentnote\endcsname\relax
- \normalexpanded{\t_strc_notes{\the\t_strc_notes\noexpand\strc_notes_process_list{\currentnote}}}%
- \fi
- \ctxlua{structures.notes.define("\currentnote","insert",\number\currentnoteins)}%
\ifx\currentnoteparent\empty
+ \doifinsertionelse\currentnote
+ \donothing
+ {\defineinsertion[\currentnote]% could be an option
+ \normalexpanded{\t_strc_notes{\the\t_strc_notes\noexpand\strc_notes_process_list{\currentnote}}}}%
+ \letnoteparameter\s!insert\currentnote
\definenotation[\currentnote][\c!type=\v!note]%
\else
+ \setexpandednoteparameter\s!insert{\namednoteparameter\currentnoteparent\s!insert}%
\definenotation[\currentnote][\currentnoteparent][\c!type=\v!note]%
\fi
+ \ctxcommand{definenote("\currentnote","insert",\number\currentnoteinsertionnumber)}%
\to \everydefinenote
-% maybe we will share this at some point
-
-%%%%%%%%%%% BEGIN TODO (SEE ENUMERATIONS) %%%%%%%%%%%%%%%
-
-\newtoks\everysetupnotecounter
-
-\let\v_strc_note_counter_name\empty
-
-\def\strc_notes_setup_counter#1%
- {\edef\v_strc_note_counter_name{#1}% only used in the token list
- \the\everysetupnotecounter}
+% maybe we will share this at some point:
-\def\strc_notes_define_counter#1% todo: fast inheritance (was mainparameter
- {\definecounter[#1]%
- \strc_notes_setup_counter{#1}}
+\def\strc_notes_define_counter#tag% todo: move inline
+ {\definecounter[#tag]%
+ \registerenumerationcounter{#tag}}
\appendtoks
- \strc_counter_setup_using_parameter\v_strc_note_counter_name\notationparameter
-\to \everysetupnotecounter
-
-\appendtoks
- \strc_notes_setup_counter\currentnotation
+ \synchronizenotationcounters
\to \everysetupnotation
-%%%%%%%%%%% END TODO (SEE ENUMERATIONS) %%%%%%%%%%%%%%%
+\appendtoks
+ \synchronizenotationcounters
+\to \everydefinenotation
-% \setvalue{\??constructionstarthandler\v!notation}%
-% {\csname\??constructionstarthandler\v!construction\endcsname}
+% so far
%expandafter\let\csname\??constructionstarthandler \v!notation\expandafter\endcsname\csname\??constructionstarthandler \v!enumeration\endcsname
\expandafter\let\csname\??constructionstarthandler \v!notation\expandafter\endcsname\csname\??constructionstarthandler \v!construction\endcsname % no par mess
@@ -423,35 +419,42 @@
\expandafter\let\csname\??constructioncommandhandler\v!notation\expandafter\endcsname\csname\??constructioncommandhandler\v!enumeration \endcsname
\expandafter\let\csname\??constructiontexthandler \v!notation\expandafter\endcsname\csname\??constructiontexthandler \v!enumeration \endcsname
-\setvalue{\??constructionmainhandler\v!notation}#1%
+\unexpanded\setvalue{\??constructionmainhandler\v!notation}#following%
{\iftrialtypesetting \else
\begingroup
\currentconstructionsynchronize
\attribute\destinationattribute\currentconstructionattribute\relax % todo, whole text
\signalcharacter
\endgroup
- \fi#1}
+ \fi#following}
-\setvalue{\??constructionnotehandler\v!notation:\v!note}% in the running text
+\unexpanded\setvalue{\??constructionnotehandler\v!notation:\v!note}% in the running text
{\ifnotesenabled
- \let\currentnote\currentconstructionmain % do be done elsewhere
+ % do be done elsewhere
+ %
+ %let\currentnote\currentconstructionmain
+ \let\currentnote\currentconstruction % else wrong inheritance
+ %
\iftrialtypesetting
\strc_notes_inject_dummy
\else
\begingroup
- \edef\currentnotenumber{\ctxlua{structures.notes.store("\currentnote",\currentconstructionlistentry)}}%
+ \edef\currentnotenumber{\ctxcommand{storenote("\currentnote",\currentconstructionlistentry)}}%
\settrue\processingnote
\ifconditional\c_strc_notes_skip
- \globallet\lastnotesymbol\strc_notes_inject_symbol
+ \globallet\lastnotesymbol\strc_notes_inject_symbol_nop
+ \ifconditional\c_strc_notes_delayed
+ \strc_notes_inject_symbol_snc
+ \fi
\else
\iftypesettinglines % otherwise problems with \type <crlf> {xxx}
\ignorelines % makes footnotes work in \startlines ... \stoplines
\fi
\ifconditional\c_strc_notes_symbol
- \strc_notes_inject_symbol
+ \strc_notes_inject_symbol_yes
\else
\unskip\unskip
- \globallet\lastnotesymbol\strc_notes_inject_symbol
+ \globallet\lastnotesymbol\strc_notes_inject_symbol_yes
\fi
\fi
\ifconditional\postponingnotes % todo: per note class
@@ -514,7 +517,7 @@
\endcsname}
\setvalue{\??noteinteractioninline\v!yes}%
- {\strc_references_get_simple_page_reference{page(\ctxlua{structures.notes.getnumberpage("\currentnote",\currentnotenumber)})}%
+ {\strc_references_get_simple_page_reference{page(\ctxcommand{notenumberpage("\currentnote",\currentnotenumber)})}%
\edef\strc_notes_get_reference_attribute_symbol{attr\referenceattribute\currentreferenceattribute}%
\let \strc_notes_set_style_color_inline \strc_notes_set_style_color_inline_yes}
@@ -531,8 +534,10 @@
\expandafter\let\csname\??noteinteractioninline\v!number\expandafter\endcsname\csname\??noteinteractioninline\v!yes\endcsname
\expandafter\let\csname\??noteinteractioninline\v!text \expandafter\endcsname\csname\??noteinteractioninline\v!yes\endcsname
+% page(...) : we could have a dedicated one
+
\setvalue{\??noteinteractiondisplay\v!yes}%
- {\strc_references_get_simple_page_reference{page(\ctxlua{structures.notes.getsymbolpage("\currentnote",\currentnotenumber)})}%
+ {\strc_references_get_simple_page_reference{page(\ctxcommand{notesymbolpage("\currentnote",\currentnotenumber)})}%
\edef\strc_notes_set_reference_attribute_number{\attribute\referenceattribute\currentreferenceattribute}%
\let \strc_notes_set_reference_attribute_text \donothing
\let \strc_notes_set_destination_attribute_text\donothing
@@ -545,7 +550,7 @@
\let\strc_notes_set_style_color_display \strc_notes_set_style_color_display_nop}
\setvalue{\??noteinteractiondisplay\v!all}%
- {\strc_references_get_simple_page_reference{page(\ctxlua{structures.notes.getsymbolpage("\currentnote",\currentnotenumber)})}%
+ {\strc_references_get_simple_page_reference{page(\ctxcommand{notesymbolpage("\currentnote",\currentnotenumber)})}%
\edef\strc_notes_set_reference_attribute_text {\attribute\referenceattribute\currentreferenceattribute}%
%\strc_references_set_simple_page_reference{note:\cldcontext{structures.notes.internalid("\currentnote",\currentnotenumber)}}%
\strc_references_set_simple_page_reference{note:\currentnote:\currentnotenumber}%
@@ -554,7 +559,7 @@
\let \strc_notes_set_style_color_display \strc_notes_set_style_color_display_yes}
\setvalue{\??noteinteractiondisplay\v!text}%
- {\strc_references_get_simple_page_reference{page(\ctxlua{structures.notes.getsymbolpage("\currentnote",\currentnotenumber)})}%
+ {\strc_references_get_simple_page_reference{page(\ctxcommand{notesymbolpage("\currentnote",\currentnotenumber)})}%
\edef\strc_notes_set_reference_attribute_text {\attribute\referenceattribute\currentreferenceattribute}%
%\strc_references_set_simple_page_reference{note:\cldcontext{structures.notes.internalid("\currentnote",\currentnotenumber)}}%
\strc_references_set_simple_page_reference{note:\currentnote:\currentnotenumber}%
@@ -567,19 +572,19 @@
\let\strc_notes_set_style_color_inline_nop \usenotestyleandcolor
\let\strc_notes_set_style_color_display_nop\usenotationstyleandcolor
-\unexpanded\def\strc_notes_set_style_color_inline_yes#1#2%
- {\usenotestyleandcolor#1#2%
+\unexpanded\def\strc_notes_set_style_color_inline_yes#style#color%
+ {\usenotestyleandcolor#style#color%
\iflocation\strc_notes_set_style_color_special\fi}
-\unexpanded\def\strc_notes_set_style_color_display_yes#1#2%
- {\usenotationstyleandcolor#1#2%
+\unexpanded\def\strc_notes_set_style_color_display_yes#style#color%
+ {\usenotationstyleandcolor#style#color%
\iflocation\strc_notes_set_style_color_special\fi}
\def\strc_notes_set_style_color_special
{\iftrialtypesetting
% keep
\else\ifx\currentcolorparameter\empty
- \scratchcounter\ctxlua{structures.notes.deltapage("\currentnote",\currentnotenumber)}\relax % todo calculate once
+ \scratchcounter\ctxcommand{notedeltapage("\currentnote",\currentnotenumber)}\relax % todo calculate once
\setlocationcolorspecified\scratchcounter
\fi\fi}
@@ -598,7 +603,7 @@
% in mkii the pointer only showed up in pagewise notes
\unexpanded\def\strc_notes_inject_pointer % todo calculate once
- {\ifcase\ctxlua{structures.notes.deltapage("\currentnote",\currentnotenumber)}\relax
+ {\ifcase\ctxcommand{notedeltapage("\currentnote",\currentnotenumber)}\relax
% unknown
\or
% same page
@@ -610,10 +615,19 @@
\unexpanded\def\strc_notes_register_note_page % called more often than needed
{\iftrialtypesetting \else
- \normalexpanded{\noexpand\ctxlatelua{structures.notes.setsymbolpage("\currentnote",\currentnotenumber)}}%
+ \normalexpanded{\noexpand\ctxlatecommand{setnotesymbolpage("\currentnote",\currentnotenumber)}}%
\fi}
-\unexpanded\def\strc_notes_inject_symbol
+\unexpanded\def\strc_notes_inject_symbol_yes
+ {\strc_notes_inject_symbol_indeed\conditionaltrue}
+
+\unexpanded\def\strc_notes_inject_symbol_nop
+ {\strc_notes_inject_symbol_indeed\conditionalfalse}
+
+\unexpanded\def\strc_notes_inject_symbol_snc
+ {\currentconstructionsynchronize} % this flushes the data to the list
+
+\unexpanded\def\strc_notes_inject_symbol_indeed#synchronize%
{\removeunwantedspaces
\doifitalicelse\/\donothing % Charles IV \footnote{the fourth}
\ifdim\lastkern=\notesignal
@@ -622,7 +636,9 @@
\fi
\nobreak
\begingroup
- \currentconstructionsynchronize % this flushes the data to the list
+ \ifconditional#synchronize\relax
+ \strc_notes_inject_symbol_snc % this flushes the data to the list
+ \fi
\strc_notes_register_note_page % this registers the symbol page number (late)
\strc_notes_interaction_check_inline
\strc_notes_set_style_color_inline\c!textstyle\c!textcolor
@@ -646,7 +662,7 @@
\strc_notes_inject_separator
\fi
\nobreak
- \hbox to .5em{}%
+ \hbox to .5\emwidth{}%
\globallet\lastnotesymbol\relax}
\unexpanded\def\strc_notes_inject_separator % patch by WS due to request on list
@@ -751,8 +767,8 @@
\let\strc_notes_process_list\gobbleoneargument
-\unexpanded\def\strc_notes_process#1% argument is a \macro that uses \currentnote
- {\def\strc_notes_process_list##1{\edef\currentnote{##1}\let\currentdescription\currentnote#1}%
+\unexpanded\def\strc_notes_process#action% argument is a \macro that uses \currentnote
+ {\def\strc_notes_process_list##1{\edef\currentnote{##1}\let\currentdescription\currentnote#action}%
\the\t_strc_notes}
\newtoks\everychecknote % just before a note is typeset
@@ -805,8 +821,8 @@
{\letcurrentnoterulecommand\relax % so we default to nothing
\processcommacommand[\noteparameter\c!rule]\strc_notes_set_rule_step}
-\def\strc_notes_set_rule_step#1%
- {\csname\??noterule\ifcsname\??noterule#1\endcsname#1\else\s!unknown\fi\endcsname}
+\def\strc_notes_set_rule_step#alternative%
+ {\csname\??noterule\ifcsname\??noterule#alternative\endcsname#alternative\else\s!unknown\fi\endcsname}
\setvalue{\??noterule\v!command}{\letcurrentnoterulecommand\currentnoterulecommandcommand}
\setvalue{\??noterule \v!on}{\letcurrentnoterulecommand\currentnoterulecommandnormal}
@@ -857,7 +873,7 @@
\ifx\p_factor\empty \else
\ifnum\p_factor<\zerocount \else
% \global
- \count\currentnoteins\p_factor % new: global
+ \count\currentnoteinsertionnumber\p_factor % new: global
\fi
\fi}
@@ -869,50 +885,81 @@
\installcorenamespace{notelocationvariant}
\installcorenamespace{notepositionvariant}
-\installcorenamespace{notedelayed}
+\installcorenamespace{notedelayedvariant}
\installcorenamespace{notelocation}
\newconditional\c_strc_notes_delayed
-\setvalue{\??notelocation\v!page }{\letvalue{\??notelocationvariant\currentnote}\strc_notes_set_location_page}
-\setvalue{\??notelocation\v!columns }{\letvalue{\??notelocationvariant\currentnote}\strc_notes_set_location_columns}
-\setvalue{\??notelocation\v!lastcolumn }{\letvalue{\??notelocationvariant\currentnote}\strc_notes_set_location_lastcolumn}
-\setvalue{\??notelocation\v!firstcolumn}{\letvalue{\??notelocationvariant\currentnote}\strc_notes_set_location_firstcolumn}
-\setvalue{\??notelocation\v!none }{\letvalue{\??notelocationvariant\currentnote}\strc_notes_set_location_none}
-\setvalue{\??notelocation\v!text }{\letvalue{\??notelocationvariant\currentnote}\strc_notes_set_location_text}
+\unexpanded\def\strc_notes_set_delayed_yes{\settrue \c_strc_notes_delayed}
+\unexpanded\def\strc_notes_set_delayed_nop{\setfalse\c_strc_notes_delayed}
+
+\setvalue{\??notelocation\v!page }{\letvalue{\??notedelayedvariant \currentnote}\strc_notes_set_delayed_nop
+ \letvalue{\??notelocationvariant\currentnote}\strc_notes_set_location_page}
+\setvalue{\??notelocation\v!columns }{\letvalue{\??notedelayedvariant \currentnote}\strc_notes_set_delayed_nop
+ \letvalue{\??notelocationvariant\currentnote}\strc_notes_set_location_columns}
+\setvalue{\??notelocation\v!lastcolumn }{\letvalue{\??notedelayedvariant \currentnote}\strc_notes_set_delayed_nop
+ \letvalue{\??notelocationvariant\currentnote}\strc_notes_set_location_lastcolumn}
+\setvalue{\??notelocation\v!firstcolumn}{\letvalue{\??notedelayedvariant \currentnote}\strc_notes_set_delayed_nop
+ \letvalue{\??notelocationvariant\currentnote}\strc_notes_set_location_firstcolumn}
+\setvalue{\??notelocation\v!none }{\letvalue{\??notedelayedvariant \currentnote}\strc_notes_set_delayed_yes
+ \letvalue{\??notelocationvariant\currentnote}\strc_notes_set_location_none}
+\setvalue{\??notelocation\v!text }{\letvalue{\??notedelayedvariant \currentnote}\strc_notes_set_delayed_yes
+ \letvalue{\??notelocationvariant\currentnote}\strc_notes_set_location_text}
\setvalue{\??notelocation\v!high }{\letvalue{\??notepositionvariant\currentnote}\strc_notes_set_position_high}
\setvalue{\??notelocation\v!bottom }{\letvalue{\??notepositionvariant\currentnote}\strc_notes_set_position_bottom}
-\letvalue{\??notedelayed\v!page }\setfalse
-\letvalue{\??notedelayed\v!columns }\setfalse
-\letvalue{\??notedelayed\v!lastcolumn }\setfalse
-\letvalue{\??notedelayed\v!firstcolumn }\setfalse
-\letvalue{\??notedelayed\v!none }\settrue
-\letvalue{\??notedelayed\v!text }\settrue
+\setvalue{\??notedelayedvariant \??notedelayedvariant }{\strc_notes_set_delayed_nop} % not let
+\setvalue{\??notepositionvariant\??notepositionvariant}{\strc_notes_set_position_bottom} % not let
+\setvalue{\??notelocationvariant\??notelocationvariant}{\strc_notes_set_location_page} % not let
\unexpanded\def\strc_notes_set_delayed
- {\ifcsname\??notedelayed\currentnote\endcsname
- \csname\??notedelayed\currentnote\endcsname\c_strc_notes_delayed
- \else
- \setfalse\c_strc_notes_delayed
- \fi}
+ {\csname\??notedelayedvariant
+ \ifcsname\??notedelayedvariant\currentnote\endcsname
+ \currentnote
+ \else
+ \??notedelayedvariant
+ \fi
+ \endcsname}
+
+% \let\strc_notes_set_delayed_yes\truecondition
+% \let\strc_notes_set_delayed_nop\falsecondition
+%
+% \def\c_strc_notes_delayed
+% {\csname\??notedelayedvariant
+% \ifcsname\??notedelayedvariant\currentnote\endcsname
+% \currentnote
+% \else
+% \??notedelayedvariant
+% \fi
+% \endcsname}
+
+\unexpanded\def\strc_notes_set_position
+ {\csname\??notepositionvariant
+ \ifcsname\??notepositionvariant\currentnote\endcsname
+ \currentnote
+ \else
+ \??notepositionvariant
+ \fi
+ \endcsname}
\unexpanded\def\strc_notes_set_location
- {\letvalue{\??notelocationvariant\currentnote}\strc_notes_set_location_page
- \letvalue{\??notepositionvariant\currentnote}\strc_notes_set_position_bottom
- \setfalse\c_strc_notes_delayed
- \normalexpanded{\rawprocesscommalist[\noteparameter\c!location]\strc_notes_set_location_step}}
-
-\unexpanded\def\strc_notes_set_location_step#1%
- {\ifcsname\??notelocation#1\endcsname
- \csname\??notelocation#1\endcsname
- \fi
- \ifcsname\??notedelayed#1\endcsname
- \csname\??notedelayed#1\endcsname\c_strc_notes_delayed
- \fi}
+ {\csname\??notelocationvariant
+ \ifcsname\??notelocationvariant\currentnote\endcsname
+ \currentnote
+ \else
+ \??notelocationvariant
+ \fi
+ \endcsname}
+
+\unexpanded\def\strc_notes_set_variants
+ {\normalexpanded{\rawprocesscommalist[\noteparameter\c!location]\strc_notes_set_location_step}}
+
+\unexpanded\def\strc_notes_set_location_step#alternative% the insert related one
+ {\ifcsname\??notelocation#alternative\endcsname\csname\??notelocation#alternative\endcsname\fi}
\appendtoks
- \strc_notes_set_location
+ \strc_notes_set_variants
+ \strc_notes_set_delayed
\to \everysynchronizenote
\newskip \s_strc_notes_distance % we need to implement stretch
@@ -940,9 +987,10 @@
{\setfalse\c_strc_notes_delayed
\strc_notes_set_distance
\strc_notes_set_columns
- \global\count\currentnoteins\numexpr\plusthousand/\c_strc_notes_columns\relax
- \global\dimen\currentnoteins\ifnotelimit\dimexpr\noteparameter\c!height*\c_strc_notes_columns\relax\else\maxdimen\fi
- \global\skip \currentnoteins\s_strc_notes_distance}
+ \page_inserts_set_location\currentnoteinsertion\v!page % \setupinsertion[\currentnote][\c!location=\v!page]%
+ \global\count\currentnoteinsertionnumber\numexpr\plusthousand/\c_strc_notes_columns\relax
+ \global\dimen\currentnoteinsertionnumber\ifnotelimit\dimexpr\noteparameter\c!height*\c_strc_notes_columns\relax\else\maxdimen\fi
+ \global\skip \currentnoteinsertionnumber\s_strc_notes_distance}
\def\strc_notes_set_location_columns
{\setfalse\c_strc_notes_delayed
@@ -951,33 +999,38 @@
\ifnum\currentnofcolumns=\zerocount
\c_strc_notes_columns\plusone
\fi
- \global\count\currentnoteins\numexpr\plusthousand/\c_strc_notes_columns\relax
- \global\dimen\currentnoteins\ifnotelimit\dimexpr\noteparameter\c!height*\c_strc_notes_columns\relax\else\maxdimen\fi
- \global\skip \currentnoteins\s_strc_notes_distance}
+ \page_inserts_set_location\currentnoteinsertion\v!columns % \setupinsertion[\currentnote][\c!location=\v!columns]%
+ \global\count\currentnoteinsertionnumber\numexpr\plusthousand/\c_strc_notes_columns\relax
+ \global\dimen\currentnoteinsertionnumber\ifnotelimit\dimexpr\noteparameter\c!height*\c_strc_notes_columns\relax\else\maxdimen\fi
+ \global\skip \currentnoteinsertionnumber\s_strc_notes_distance}
-\def\strc_notes_set_location_firstcolumn
+\def\strc_notes_set_location_somecolumn#whatcolumn%
{\setfalse\c_strc_notes_delayed
\strc_notes_set_distance
\strc_notes_set_columns
- \global\count\currentnoteins\plusthousand
- \global\dimen\currentnoteins\ifnotelimit\noteparameter\c!height\else\maxdimen\fi
- \global\skip \currentnoteins\s_strc_notes_distance}
+ \page_inserts_set_location\currentnoteinsertion#whatcolumn% \setupinsertion[\currentnote][\c!location=#whatcolumn]%
+ \global\count\currentnoteinsertionnumber\plusthousand
+ \global\dimen\currentnoteinsertionnumber\ifnotelimit\noteparameter\c!height\else\maxdimen\fi
+ \global\skip \currentnoteinsertionnumber\s_strc_notes_distance}
-\let\strc_notes_set_location_lastcolumn\strc_notes_set_location_firstcolumn
+\def\strc_notes_set_location_firstcolumn{\strc_notes_set_location_somecolumn\v!firstcolumn}
+\def\strc_notes_set_location_lastcolumn {\strc_notes_set_location_somecolumn\v!lastcolumn }
\def\strc_notes_set_location_text % we don't use inserts anyway (e.g. endnotes)
{\settrue\c_strc_notes_delayed
- \ctxlua{structures.notes.setstate("\currentnote","store")}%
- \global\dimen\currentnoteins\maxdimen
- \global\count\currentnoteins\zerocount
- \global\skip \currentnoteins\zeropoint}
+ \ctxcommand{setnotestate("\currentnote","store")}%
+ \page_inserts_set_location\currentnoteinsertion\v!text % \setupinsertion[\currentnote][\c!location=\v!text]%
+ \global\count\currentnoteinsertionnumber\zerocount
+ \global\dimen\currentnoteinsertionnumber\maxdimen
+ \global\skip \currentnoteinsertionnumber\zeropoint}
\let\strc_notes_set_location_none\strc_notes_set_location_text
\def\strc_notes_set_properties
{\strc_notes_set_columns
\strc_notes_set_distance
- \csname\??notelocationvariant\currentnote\endcsname}
+ \strc_notes_set_location
+ \strc_notes_set_delayed}
\let\strc_notes_set_position_high\relax
@@ -993,8 +1046,8 @@
\newconditional\c_notes_bottom_present
\def\strc_notes_check_if_bottom_present_indeed % in otr !
- {\ifvoid\currentnoteins\else
- \csname\??notepositionvariant\currentnote\endcsname
+ {\ifvoid\currentnoteinsertionnumber\else
+ \strc_notes_set_position
\fi}
\def\strc_notes_check_if_bottom_present_step
@@ -1105,39 +1158,39 @@
\newconditional\c_strc_notes_symbol \settrue\c_strc_notes_symbol % not used
\newconditional\c_strc_notes_skip
- \unexpanded\def\setnote [#1]{\getvalue{#1}}
- \unexpanded\def\setnotetext[#1]{\global\settrue\c_strc_notes_skip\getvalue{#1}}
+\unexpanded\def\setnote [#tag]{\getvalue{#tag}}
+\unexpanded\def\setnotetext[#tag]{\global\settrue\c_strc_notes_skip\getvalue{#tag}}
-\unexpanded\def\handlenoteinsert#1#2% tg, id
+\unexpanded\def\handlenoteinsert#tag#id%
{\begingroup
- \edef\currentnote{#1}%
- \strc_constructions_initialize{#1}%
+ \edef\currentnote{#tag}%
+ \strc_constructions_initialize{#tag}%
\strc_notes_synchronize
\the\everybeforenoteinsert
- \insert\currentnoteins\bgroup
+ \insert\currentnoteinsertionnumber\bgroup
\the\everyinsidenoteinsert\relax
- \doprocesslocalsetups{\noteparameter\c!setups}% experimental
+ \usesetupsparameter\noteparameter % experimental
\doifelse{\noteparameter\c!paragraph}\v!yes
{\nointerlineskip
\startvboxtohbox
- \handlenoteitself{#1}{#2}%
+ \handlenoteitself{#tag}{#id}%
% add some slack
\stopvboxtohbox}
- {\handlenoteitself{#1}{#2}}%
+ {\handlenoteitself{#tag}{#id}}%
\egroup
\the\everyafternoteinsert
\endgroup}
-\unexpanded\def\betweennoteitself#1% tg
- {\edef\currentnote{#1}%
+\unexpanded\def\betweennoteitself#tag%
+ {\edef\currentnote{#tag}%
\doif{\noteparameter\c!paragraph}\v!yes\strc_notes_between_paragraphs}
-\unexpanded\def\handlenoteitself#1#2% tg, id
- {\edef\currentnotenumber{#2}%
- \edef\currentnote{#1}%
- \strc_constructions_initialize{#1}%
+\unexpanded\def\handlenoteitself#tag#id%
+ {\edef\currentnotenumber{#id}%
+ \edef\currentnote{#tag}%
+ \strc_constructions_initialize{#tag}%
\strc_notes_synchronize
- \edef\currentconstructionlistentry{\ctxlua{tex.write(structures.notes.listindex("#1",#2))}}% index in list cache
+ \edef\currentconstructionlistentry{\ctxcommand{notelistindex("\currentnote",#id)}}% index in list cache
% as we can have collected notes (e.g. in tables) we need to recover
% \currentdescriptionattribute and \currentdescriptionsynchronize
%
@@ -1157,7 +1210,7 @@
% \endgroup
}
-\unexpanded\def\strc_notes_inject_text
+\unexpanded\def\strc_notes_inject_text % hm main?
{\ctxcommand{savedlisttitle("\currentconstructionmain",\currentconstructionlistentry)}}
\let\startpushnote\relax
@@ -1216,14 +1269,14 @@
\def\strc_notes_place_inserts
{\strc_notes_set_delayed % \strc_notes_synchronize % we need to know if it's delayed
\ifconditional\c_strc_notes_delayed \else
- \ifdim\ht\currentnoteins>\zeropoint % or a faster delayed test
+ \ifdim\ht\currentnoteinsertionnumber>\zeropoint % or a faster delayed test
\strc_notes_place_inserts_indeed
\fi
\fi}
\def\strc_notes_place_inserts_indeed
{\relax
- \ifdim\ht\currentnoteins>\zeropoint
+ \ifdim\ht\currentnoteinsertionnumber>\zeropoint
\endgraf
\ifvmode
\whitespace
@@ -1271,13 +1324,11 @@
\fi}
\def\strc_notes_flush_inserts_normal
- {%\iftrialtypesetting\copy\else\box\fi\currentnoteins
- \strc_notes_flush_global
+ {\strc_notes_flush_global
\obeydepth} % (a) added , since split footnotes will not align properly
\def\strc_notes_flush_inserts_columns
{\startsimplecolumns[\c!distance=\noteparameter\c!columndistance,\c!n=\noteparameter\c!n,\c!width=\noteparameter\c!width]%
- %\iftrialtypesetting\unvcopied\else\unvboxed\fi\currentnoteins % compare with local
\strc_notes_flush_global
\stopsimplecolumns}
@@ -1293,18 +1344,15 @@
{\doifelse{\noteparameter\c!paragraph}\v!yes
{\vbox
{\beginofshapebox
- \iftrialtypesetting\unvcopied\else\unvboxed\fi\currentnoteins
+ \iftrialtypesetting\unvcopied\else\unvboxed\fi\currentnoteinsertionnumber
\endofshapebox
- %\doreshapebox{\box\shapebox}{}{}{}% get rid of penalties etc
\let\strc_notes_between_paragraphs_indeed\strc_notes_between_paragraphs_first % shape works reverse
\doreshapebox
{\hbox{\unhbox\shapebox\strc_notes_between_paragraphs_indeed}}
- {}%
- {}%
- {}% get rid of penalties etc
+ \donothing \donothing \donothing % get rid of penalties etc
\innerflushshapebox
\convertvboxtohbox}}
- {\iftrialtypesetting\unvcopied\else\unvboxed\fi\currentnoteins}}
+ {\iftrialtypesetting\unvcopied\else\unvboxed\fi\currentnoteinsertionnumber}}
%D Supporting end notes is surprisingly easy. Even better, we
%D can combine this feature with solving the common \TEX\
@@ -1338,21 +1386,21 @@
% we need a proper state: normal, postponing, flushing
-\def\postponenotes
+\unexpanded\def\postponenotes
{\ifconditional\postponingnotes\else
\global\settrue\postponingnotes
\global\let\flushnotes\doflushnotes
- \ctxlua{structures.notes.postpone()}%
+ \ctxcommand{postponenotes()}%
\fi}
\let\flushnotes\relax
-\def\doflushnotes
+\unexpanded\def\doflushnotes
{\ifconditional\postponingnotes
\begingroup
\let\flushnotes \relax
\let\postponenotes\relax
- \ctxlua{structures.notes.flushpostponed()}% this also resets the states !
+ \ctxcommand{flushpostponednotes()}% this also resets the states !
\global\setfalse\postponednote
\global\setfalse\postponingnotes
\global\let\flushnotes\relax
@@ -1382,8 +1430,8 @@
\unexpanded\def\startlocalnotes
{\dosingleempty\strc_notes_local_start}
-\def\strc_notes_local_start[#1]%
- {\def\localnoteslist{#1}%
+\def\strc_notes_local_start[#list]% grouping ? (we used to have a second argument ... settings)
+ {\def\localnoteslist{#list}%
\settrue\inlocalnotes
\processcommacommand[\localnoteslist]\strc_notes_local_start_step}
@@ -1391,29 +1439,35 @@
{\processcommacommand[\localnoteslist]\strc_notes_local_stop_step
\setfalse\inlocalnotes}
-\def\strc_notes_local_start_step#1%
- {\doifnot{\noteparameter\c!continue}\v!yes
- {\strc_counters_save{#1}%
- \strc_counters_reset{#1}}%
- \ctxlua{structures.notes.save("#1","store")}}
+\let\p_strc_notes_continue\empty
-\def\strc_notes_local_stop_step#1%
- {\doifnot{\noteparameter\c!continue}\v!yes
- {\strc_counters_restore{#1}}%
- \ctxlua{structures.notes.restore("#1")}}
+\def\strc_notes_local_start_step#tag%
+ {\p_strc_notes_continue{\noteparameter\c!continue}%
+ \ifx\p_strc_notes_continue\v!yes \else
+ \strc_counters_save{#tag}%
+ \strc_counters_reset{#tag}%
+ \fi
+ \ctxcommand{savenote("#tag","store")}}
+
+\def\strc_notes_local_stop_step#tag%
+ {\p_strc_notes_continue{\noteparameter\c!continue}%
+ \ifx\p_strc_notes_continue\v!yes \else
+ \strc_counters_restore{#tag}%
+ \fi
+ \ctxcommand{restorenote("#tag")}}
\unexpanded\def\placelocalnotes
{\dodoubleempty\strc_notes_local_place}
-\def\strc_notes_local_place[#1][#2]%
- {\doif{\ctxlua{structures.notes.getstate("#1")}}{store}{\strc_notes_local_place_indeed{#2}{#1}}}
+\def\strc_notes_local_place[#tag][#settings]%
+ {\doif{\ctxcommand{getnotestate("#tag")}}{store}{\strc_notes_local_place_indeed{#settings}{#tag}}}
-\def\strc_notes_local_place_indeed#1#2% settings note
+\def\strc_notes_local_place_indeed#settings#tag%
{\begingroup
- \edef\currentnote{#2}% is already set?
+ \edef\currentnote{#tag}% is already set?
\the\everyplacelocalnotes
% beware, we cannot trust setting \currentnote here
- \setupcurrentnote[\c!width=\v!fit,\c!height=\v!fit,\c!strut=\v!no,\c!offset=\v!overlay,#1]% we only need a selective one
+ \setupcurrentnote[#settings]% later we set height etc for framed
\strc_notes_place_local_alternative
\strc_notes_set_properties % restore globals (if needed)
\endgroup
@@ -1460,19 +1514,19 @@
\unexpanded\def\placenotes
{\dodoubleempty\strc_notes_place}
-\def\strc_notes_place[#1][#2]%
- {\processcommalist[#1]{\strc_notes_place_indeed{#2}}}
+\def\strc_notes_place[#list][#settings]%
+ {\processcommalist[#list]{\strc_notes_place_indeed{#settings}}}
-\def\strc_notes_place_indeed#1#2% settings note
- {\edef\currentnote{#2}% grouping ?
- \doifelse{\ctxlua{structures.notes.getstate("#2")}}{store}
+\def\strc_notes_place_indeed#settings#tag% settings note
+ {\edef\currentnote{#tag}% grouping ?
+ \doifelse{\ctxcommand{getnotestate("#tag")}}{store}
\strc_notes_local_place_indeed
\strc_notes_global_place_indeed
- {#1}{#2}}
+ {#settings}{#tag}}
-\def\strc_notes_global_place_indeed#1#2%
+\def\strc_notes_global_place_indeed#settings#tag%
{\begingroup
- \setupnote[#2][#1]%
+ \setupnote[#tag][#settings]%
\strc_notes_place_inserts
\endgroup
\the\everysetupnote} % to be checked .. synchronize
@@ -1481,11 +1535,11 @@
\installcorenamespace{notealternative}
-\unexpanded\def\installnotealternative#1#2%
- {\setvalue{\??notealternative#1}{#2}}
+\unexpanded\def\installnotealternative#alternative#command%
+ {\setvalue{\??notealternative#alternative}{#command}}
-\unexpanded\def\doifnotescollected#1%
- {\ctxlua{structures.notes.doifcontent("#1")}}
+\unexpanded\def\doifnotescollected#tag%
+ {\ctxcommand{doifnotecontent("#tag")}}
\def\strc_notes_place_local_alternative % will be a setup (wrapper)
{\doifnotescollected\currentnote
@@ -1496,7 +1550,7 @@
\fi
\begingroup
\strc_notes_set_bodyfont
- \getvalue{\??notealternative\noteparameter\c!alternative}%
+ \csname\??notealternative\noteparameter\c!alternative\endcsname
\endgroup
\ifvmode
\noteparameter\c!after
@@ -1512,8 +1566,7 @@
% setups ?
-% \def\flushlocalnotes#1{\ctxlua{structures.notes.flush("#1","store")}}
-\def\flushlocalnotes#1{\ctxlua{structures.notes.flush("#1","store","\noteparameter\c!criterium")}}
+\def\flushlocalnotes#tag{\ctxcommand{flushnotes("#tag","store","\noteparameter\c!criterium")}}
\installnotealternative \v!none
{\flushlocalnotes\currentnote}
@@ -1569,58 +1622,62 @@
\strc_notes_process\strc_notes_check_presence}
\def\strc_notes_check_presence
- {\ifdim\ht\currentnoteins>\zeropoint
+ {\ifdim\ht\currentnoteinsertionnumber>\zeropoint
\notespresenttrue
\fi}
%D \macros
%D {fakenotes}
- % used in page-mul
+ % used in page-mul
- \ifdefined\currentnofcolumns\else \def\currentnofcolumns{\@@kln} \fi
+ \ifdefined\currentnofcolumns\else \def\currentnofcolumns{\nofcolumns} \fi
- \unexpanded\def\fakenotes
- {\ifhmode\endgraf\fi\ifvmode
- \calculatetotalclevernoteheight
- \ifdim\totalnoteheight>\zeropoint \kern\totalnoteheight \fi
- \fi}
+ \unexpanded\def\fakenotes
+ {\ifhmode\endgraf\fi\ifvmode
+ \calculatetotalclevernoteheight
+ \ifdim\totalnoteheight>\zeropoint \kern\totalnoteheight \fi
+ \fi}
- \unexpanded\def\fakepagenotes
- {\ifhmode\endgraf\fi\ifvmode
- \calculatetotalpagenoteheight
- \ifdim\totalnoteheight>\zeropoint \kern\totalnoteheight \fi
- \fi}
+ \unexpanded\def\fakepagenotes
+ {\ifhmode\endgraf\fi\ifvmode
+ \calculatetotalpagenoteheight
+ \ifdim\totalnoteheight>\zeropoint \kern\totalnoteheight \fi
+ \fi}
- \newdimen\totalnoteheight
+ % used in page-not but not yet ok
- \def\doaddtototalnoteheight#1%
- {\ifdim\ht#1>\zeropoint
- \ifcase\count#1\else
- % todo: divide by count
- \advance\totalnoteheight\ht #1%
- \advance\totalnoteheight\skip#1%
- \fi
- \fi}
+ \newdimen\totalnoteheight
- \def\docalculatetotalnoteheight
- {\ifcase\c_strc_notes_page_location % tricky here ! ! ! to be sorted out ! ! !
- \doaddtototalnoteheight\currentnoteins
- \else
- % \doaddtototalnoteheight\currentbackupnoteins
- \fi}
+ \unexpanded\def\additionaltotalnoteheight#insert% temp hacks anyway
+ {\dimexpr
+ \ifdim\ht#insert>\zeropoint
+ \ifcase\count#insert %
+ \zeropoint
+ \else % todo: divide by count
+ \ht#insert+\skip#insert% hm, no stretch but a dimen anyway
+ \fi
+ \else
+ \zeropoint
+ \fi
+ \relax}
+
+ \def\docalculatetotalnoteheight
+ {\ifcase\c_strc_notes_page_location % tricky here ! ! ! to be sorted out ! ! !
+ \advance\totalnoteheight\normalexpanded{\additionaltotalnoteheight\currentnoteinsertionnumber}%
+ \fi}
- \def\docalculatetotalclevernoteheight
- {\ifcase\c_strc_notes_page_location \else % tricky here ! ! ! to be sorted out ! ! !
- \doaddtototalnoteheight\currentnoteins
- \fi}
+ \def\docalculatetotalclevernoteheight
+ {\ifcase\c_strc_notes_page_location \else % tricky here ! ! ! to be sorted out ! ! !
+ \advance\totalnoteheight\normalexpanded{\additionaltotalnoteheight\currentnoteinsertionnumber}%
+ \fi}
- \def\docalculatetotalpagenoteheight
- {\doaddtototalnoteheight\currentnoteins}
+ \def\docalculatetotalpagenoteheight
+ {\advance\totalnoteheight\normalexpanded{\additionaltotalnoteheight\currentnoteinsertionnumber}}
- \def\calculatetotalnoteheight {\totalnoteheight\zeropoint\strc_notes_process\docalculatetotalnoteheight}
- \def\calculatetotalclevernoteheight{\totalnoteheight\zeropoint\strc_notes_process\docalculatetotalclevernoteheight}
- \def\calculatetotalpagenoteheight {\totalnoteheight\zeropoint\strc_notes_process\docalculatetotalpagenoteheight}
+ \def\calculatetotalnoteheight {\totalnoteheight\zeropoint\strc_notes_process\docalculatetotalnoteheight}
+ \def\calculatetotalclevernoteheight{\totalnoteheight\zeropoint\strc_notes_process\docalculatetotalclevernoteheight}
+ \def\calculatetotalpagenoteheight {\totalnoteheight\zeropoint\strc_notes_process\docalculatetotalpagenoteheight}
%D Now how can this mechanism be hooked into \CONTEXT\ without
%D explictly postponing footnotes? The solution turned out to
@@ -1644,7 +1701,7 @@
%D \stoptyping
% \def\dosynchronizenotes
-% {\ifvoid\currentnoteins\else\insert\currentnoteins{\unvbox\currentnoteins}\fi}
+% {\ifvoid\currentnoteinsertionnumber\else\insert\currentnoteinsertionnumber{\unvbox\currentnoteinsertionnumber}\fi}
%
% \def\synchronizenotes
% {\strc_notes_process\dosynchronizenotes}
@@ -1679,8 +1736,19 @@
\unexpanded\def\startlocalfootnotes {\startlocalnotes [\v!footnote]} % alleen footnote
\unexpanded\def\stoplocalfootnotes {\stoplocalnotes }
-\def\strc_notes_place_footnotes [#1][#2]{\ifsecondargument\placenotes [#1][#2,\c!height=\textheight]\else\placenotes [#1]\fi}
-\def\strc_notes_place_local_footnotes[#1][#2]{\ifsecondargument\placelocalnotes[#1][#2,\c!height=\textheight]\else\placelocalnotes[#1]\fi}
+\def\strc_notes_place_footnotes[#list][#settings]%
+ {\ifsecondargument
+ \placenotes[#list][#settings,\c!height=\textheight]%
+ \else
+ \placenotes[#list][\c!height=\textheight]%
+ \fi}
+
+\def\strc_notes_place_local_footnotes[#list][#settings]%
+ {\ifsecondargument
+ \placelocalnotes[#list][#settings,\c!height=\textheight]%
+ \else
+ \placelocalnotes[#list][\c!height=\textheight]%
+ \fi}
%D Goodies:
%D
@@ -1690,7 +1758,7 @@
%D }
%D \stoptyping
-\def\doifnoteonsamepageelse[#1]{\ctxlua{structures.notes.doifonsamepageasprevious("#1")}}
+\def\doifnoteonsamepageelse[#tag]{\ctxcommand{doifnoteonsamepageasprevious("#tag")}}
%D New trickery:
@@ -1711,15 +1779,15 @@
\unexpanded\def\notesymbol
{\dodoubleempty\strc_notes_symbol}
-\def\strc_notes_symbol[#1][#2]%
+\def\strc_notes_symbol[#tag][#reference]%
{\dontleavehmode
\begingroup
- \edef\currentnote{#1}%
+ \edef\currentnote{#tag}%
\usenotestyleandcolor\c!textstyle\c!textcolor
\ifnotesenabled
\ifsecondargument
\unskip
- \noteparameter\c!textcommand{\in[#2]}% command here?
+ \noteparameter\c!textcommand{\in[#reference]}% command here?
\else
\noteparameter\c!textcommand\lastnotesymbol % check if command double
\fi
@@ -1729,30 +1797,30 @@
\unexpanded\def\note
{\dodoubleempty\strc_notes_note}
-\def\strc_notes_note[#1][#2]%
+\def\strc_notes_note[#tag][#reference]%
{\ifsecondargument
- \strc_notes_symbol[#1][#2]%
+ \strc_notes_symbol[#tag][#reference]%
\else
\secondargumenttrue
- \strc_notes_symbol[\v!footnote][#1]%
+ \strc_notes_symbol[\v!footnote][#tag]%
\fi}
% will be redone if needed
%
-% \def\ownnotesymbol#1% #1 gets number passed
-% {\executeifdefined{\??notesymbol\currentnote}\empty}
+% \def\ownnotesymbol#1% #1 gets number passed
+% {\executeifdefined{\??notesymbol\currentnote}\empty}
%
-% \unexpanded\def\setnotesymbol[#1]#2#3%
-% {\prewordbreak % prevent lookback
-% \setgvalue{\??notesymbol#1}{#3}
-% \strc_notes_inject_symbol}
+% \unexpanded\def\setnotesymbol[#1]#2#3%
+% {\prewordbreak % prevent lookback
+% \setgvalue{\??notesymbol#1}{#3}
+% \strc_notes_inject_symbol}
%
-% \unexpanded\def\ownnote[#1]#2#3#4%
-% {\setnotesymbol[#1]{#2}{#3}%
-% \setnotetext [#1]{#4}}
+% \unexpanded\def\ownnote[#1]#2#3#4%
+% {\setnotesymbol[#1]{#2}{#3}%
+% \setnotetext [#1]{#4}}
%
-% \defineconversion
-% [ownnote]
-% [\ownnotesymbol]
+% \defineconversion
+% [ownnote]
+% [\ownnotesymbol]
\protect \endinput
diff --git a/Master/texmf-dist/tex/context/base/strc-num.lua b/Master/texmf-dist/tex/context/base/strc-num.lua
index 0715a9e4913..b0eae6b78bb 100644
--- a/Master/texmf-dist/tex/context/base/strc-num.lua
+++ b/Master/texmf-dist/tex/context/base/strc-num.lua
@@ -6,12 +6,13 @@ if not modules then modules = { } end modules ['strc-num'] = {
license = "see context related readme files"
}
--- this will be reimplemented and some more will move to the commands namespace
-
local format = string.format
local next, type = next, type
local min, max = math.min, math.max
-local texcount = tex.count
+local texcount, texsetcount = tex.count, tex.setcount
+
+-- Counters are managed here. They can have multiple levels which makes it easier to synchronize
+-- them. Synchronization is sort of special anyway, as it relates to document structuring.
local allocate = utilities.storage.allocate
local setmetatableindex = table.setmetatableindex
@@ -26,8 +27,23 @@ local counters = structures.counters
local documents = structures.documents
local variables = interfaces.variables
-
--- state: start stop none reset
+local v_start = variables.start
+local v_page = variables.page
+local v_reverse = variables.reverse
+local v_first = variables.first
+local v_next = variables.next
+local v_previous = variables.previous
+local v_prev = variables.prev
+local v_last = variables.last
+----- v_no = variables.no
+local v_backward = variables.backward
+local v_forward = variables.forward
+----- v_subs = variables.subs or "subs"
+
+-- states: start stop none reset
+
+-- specials are used for counters that are set and incremented in special ways, like
+-- pagecounters that get this treatment in the page builder
counters.specials = counters.specials or { }
local counterspecials = counters.specials
@@ -66,54 +82,83 @@ end
job.register('structures.counters.collected', tobesaved, initializer, finalizer)
-local function constructor(t,s,name,i) -- variables ?
- if s == "last" then
+local constructor = { -- maybe some day we will provide an installer for more variants
+
+ last = function(t,name,i)
local cc = collected[name]
- t.stop = (cc and cc[i] and cc[i][t.range]) or 0 -- stop is available for diagnostics purposes only
+ local stop = (cc and cc[i] and cc[i][t.range]) or 0 -- stop is available for diagnostics purposes only
+ t.stop = stop
if t.offset then
- return t.stop - t.step
+ return stop - t.step
else
- return t.stop
+ return stop
end
- elseif s == "first" then
- if t.start > 0 then
- return t.start -- brrr
+ end,
+
+ first = function(t,name,i)
+ local start = t.start
+ if start > 0 then
+ return start -- brrr
elseif t.offset then
- return t.start + t.step + 1
+ return start + t.step + 1
else
- return t.start + 1
+ return start + 1
end
- elseif s == "prev" or s == "previous" then
+ end,
+
+ prev = function(t,name,i)
return max(t.first,t.number-1) -- todo: step
- elseif s == "next" then
+ end,
+
+ previous = function(t,name,i)
+ return max(t.first,t.number-1) -- todo: step
+ end,
+
+ next = function(t,name,i)
return min(t.last,t.number+1) -- todo: step
- elseif s == "backward" then
+ end,
+
+ backward =function(t,name,i)
if t.number - 1 < t.first then
return t.last
else
return t.previous
end
- elseif s == "forward" then
+ end,
+
+ forward = function(t,name,i)
if t.number + 1 > t.last then
return t.first
else
return t.next
end
- elseif s == "subs" then
+ end,
+
+ subs = function(t,name,i)
local cc = collected[name]
t.subs = (cc and cc[i+1] and cc[i+1][t.range]) or 0
return t.subs
- else
- return nil -- was 0, but that is fuzzy in testing for e.g. own
- end
+ end,
+
+}
+
+local function dummyconstructor(t,name,i)
+ return nil -- was 0, but that is fuzzy in testing for e.g. own
end
+setmetatableindex(constructor,function(t,k)
+ if trace_counters then
+ report_counters("unknown constructor %a",k)
+ end
+ return dummyconstructor
+end)
+
local function enhance()
for name, cd in next, counterdata do
local data = cd.data
for i=1,#data do
local ci = data[i]
- setmetatableindex(ci, function(t,s) return constructor(t,s,name,i) end)
+ setmetatableindex(ci, function(t,s) return constructor[s](t,name,i) end)
end
end
enhance = nil
@@ -126,7 +171,7 @@ local function allocate(name,i) -- can be metatable
level = 1,
-- block = "", -- todo
numbers = nil,
- state = variables.start, -- true
+ state = v_start, -- true
data = { },
saved = { },
}
@@ -145,7 +190,7 @@ local function allocate(name,i) -- can be metatable
offset = false,
stop = 0, -- via metatable: last, first, stop only for tracing
}
- setmetatableindex(ci, function(t,s) return constructor(t,s,name,i) end)
+ setmetatableindex(ci, function(t,s) return constructor[s](t,name,i) end)
cd[i] = ci
tobesaved[name][i] = { }
else
@@ -164,12 +209,12 @@ local function savevalue(name,i)
local cs = tobesaved[name][i]
local cc = collected[name]
if trace_counters then
- report_counters("saving value %s of counter named %s",cd.number,name)
+ report_counters("action %a, counter %s, value %s","save",name,cd.number)
end
local cr = cd.range
local old = (cc and cc[i] and cc[i][cr]) or 0
local number = cd.number
- if cd.method == variables.page then
+ if cd.method == v_page then
-- we can be one page ahead
number = number - 1
end
@@ -186,8 +231,8 @@ function counters.define(specification)
if name and name ~= "" then
-- todo: step
local d = allocate(name,1)
- d.start = specification.start
- d.state = variables.start or ""
+ d.start = tonumber(specification.start) or 0
+ d.state = v_state or ""
local counter = specification.counter
if counter and counter ~= "" then
d.counter = counter -- only for special purposes, cannot be false
@@ -202,18 +247,15 @@ end
function counters.compact(name,level,onlynumbers)
local cd = counterdata[name]
---~ print(name,cd)
if cd then
local data = cd.data
local compact = { }
for i=1,level or #data do
local d = data[i]
---~ print(name,i,d.number)
if d.number ~= 0 then
compact[i] = (onlynumbers and d.number) or d
end
end
---~ print(table.serialize(compact))
return compact
end
end
@@ -246,65 +288,76 @@ function counters.subs(name,n)
return counterdata[name].data[n].subs or 0
end
-function counters.setvalue(name,tag,value)
+local function setvalue(name,tag,value)
local cd = counterdata[name]
if cd then
cd[tag] = value
end
end
+counters.setvalue = setvalue
+
function counters.setstate(name,value) -- true/false
value = variables[value]
if value then
- counters.setvalue(name,"state",value)
+ setvalue(name,"state",value)
end
end
function counters.setlevel(name,value)
- counters.setvalue(name,"level",value)
+ setvalue(name,"level",value)
end
function counters.setoffset(name,value)
- counters.setvalue(name,"offset",value)
+ setvalue(name,"offset",value)
end
local function synchronize(name,d)
local dc = d.counter
if dc then
if trace_counters then
- report_counters("setting counter %s with name %s to %s",dc,name,d.number)
+ report_counters("action %a, name %a, counter %a, value %a","synchronize",name,dc,d.number)
end
- tex.setcount("global",dc,d.number)
+ texsetcount("global",dc,d.number)
end
local cs = counterspecials[name]
if cs then
if trace_counters then
- report_counters("invoking special for name %s",name)
+ report_counters("action %a, name %a, counter %a","synccommand",name,dc)
end
- cs()
+ cs(name)
end
end
-function counters.reset(name,n)
+local function reset(name,n)
local cd = counterdata[name]
if cd then
for i=n or 1,#cd.data do
local d = cd.data[i]
savevalue(name,i)
- d.number = d.start or 0
+ local number = d.start or 0
+ d.number = number
d.own = nil
+ if trace_counters then
+ report_counters("action %a, name %a, sub %a, value %a","reset",name,i,number)
+ end
synchronize(name,d)
end
cd.numbers = nil
+ else
end
end
-function counters.set(name,n,value)
+local function set(name,n,value)
local cd = counterdata[name]
if cd then
local d = allocate(name,n)
- d.number = value or 0
+ local number = value or 0
+ d.number = number
d.own = nil
+ if trace_counters then
+ report_counters("action %a, name %a, sub %a, value %a","set",name,"no",number)
+ end
synchronize(name,d)
end
end
@@ -313,12 +366,19 @@ local function check(name,data,start,stop)
for i=start or 1,stop or #data do
local d = data[i]
savevalue(name,i)
- d.number = d.start or 0
+ local number = d.start or 0
+ d.number = number
d.own = nil
+ if trace_counters then
+ report_counters("action %a, name %a, sub %a, value %a","check",name,i,number)
+ end
synchronize(name,d)
end
end
+counters.reset = reset
+counters.set = set
+
function counters.setown(name,n,value)
local cd = counterdata[name]
if cd then
@@ -328,7 +388,7 @@ function counters.setown(name,n,value)
local level = cd.level
if not level or level == -1 then
-- -1 is signal that we reset manually
- elseif level > 0 then
+ elseif level > 0 or level == -3 then
check(name,d,n+1)
elseif level == 0 then
-- happens elsewhere, check this for block
@@ -345,7 +405,7 @@ function counters.restart(name,n,newstart,noreset)
local d = allocate(name,n)
d.start = newstart
if not noreset then
- counters.reset(name,n) -- hm
+ reset(name,n) -- hm
end
end
end
@@ -367,24 +427,38 @@ end
function counters.add(name,n,delta)
local cd = counterdata[name]
--- inspect(cd)
- if cd and (cd.state == variables.start or cd.state == "") then
+ if cd and (cd.state == v_start or cd.state == "") then
local data = cd.data
local d = allocate(name,n)
d.number = (d.number or d.start or 0) + delta*(d.step or 0)
-- d.own = nil
local level = cd.level
--- print(name,n,delta,level)
if not level or level == -1 then
-- -1 is signal that we reset manually
+ if trace_counters then
+ report_counters("action %a, name %a, sub %a, how %a","add",name,"no","no checking")
+ end
elseif level == -2 then
-- -2 is signal that we work per text
+ if trace_counters then
+ report_counters("action %a, name %a, sub %a, how %a","add",name,"text","checking")
+ end
check(name,data,n+1)
- elseif level > 0 then
+ elseif level > 0 or level == -3 then
-- within countergroup
+ if trace_counters then
+ report_counters("action %a, name %a, sub %a, how %a","add",name,level,"checking within group")
+ end
check(name,data,n+1)
elseif level == 0 then
-- happens elsewhere
+ if trace_counters then
+ report_counters("action %a, name %a, sub %a, how %a","add",name,level,"no checking")
+ end
+ else
+ if trace_counters then
+ report_counters("action %a, name %a, sub %a, how %a","add",name,"unknown","no checking")
+ end
end
synchronize(name,d)
return d.number -- not needed
@@ -392,19 +466,23 @@ function counters.add(name,n,delta)
return 0
end
-function counters.check(level) -- not used (yet)
+function counters.check(level)
for name, cd in next, counterdata do
- -- report_counters("%s %s %s",name,cd.level,level)
- if cd.level == level then
+ if level > 0 and cd.level == -3 then -- could become an option
if trace_counters then
- report_counters("resetting %s at level %s",name,level)
+ report_counters("action %a, name %a, sub %a, detail %a","reset",name,level,"head")
end
- counters.reset(name)
+ reset(name)
+ elseif cd.level == level then
+ if trace_counters then
+ report_counters("action %a, name %a, sub %a, detail %a","reset",name,level,"normal")
+ end
+ reset(name)
end
end
end
-function counters.get(name,n,key)
+local function get(name,n,key)
local d = allocate(name,n)
d = d and d[key]
if not d then
@@ -416,8 +494,10 @@ function counters.get(name,n,key)
end
end
+counters.get = get
+
function counters.value(name,n) -- what to do with own
- return counters.get(name,n or 1,'number') or 0
+ return get(name,n or 1,'number') or 0
end
function counters.converted(name,spec) -- name can be number and reference to storage
@@ -431,9 +511,8 @@ function counters.converted(name,spec) -- name can be number and reference to st
if cd then
local spec = spec or { }
local numbers, ownnumbers = { }, { }
- local reverse = spec.order == variables.reverse
+ local reverse = spec.order == v_reverse
local kind = spec.type or "number"
- local v_first, v_next, v_previous, v_last = variables.first, variables.next, variables.previous, variables.last
local data = cd.data
for k=1,#data do
local v = data[k]
@@ -446,7 +525,7 @@ function counters.converted(name,spec) -- name can be number and reference to st
vn = v.first
elseif kind == v_next then
vn = v.next
- elseif kind == v_previous then
+ elseif kind == v_prev or kind == v_previous then
vn = v.prev
elseif kind == v_last then
vn = v.last
@@ -498,7 +577,7 @@ function commands.showcounter(name)
local data = cd.data
for i=1,#data do
local d = data[i]
- context(" (%s: %s,%s,%s s:%s r:%s)",i,(d.start or 0),d.number or 0,d.last,d.step or 0,d.range or 0)
+ context(" (%s: %s,%s,%s s:%s r:%s)",i,d.start or 0,d.number or 0,d.last,d.step or 0,d.range or 0)
end
context("]")
end
@@ -517,54 +596,54 @@ function commands.checkcountersetup(name,level,start,state)
sections.setchecker(name,level,counters.reset)
end
---~ -- move to strc-pag.lua
-
---~ function counters.analyze(name,counterspecification)
---~ local cd = counterdata[name]
---~ -- safeguard
---~ if not cd then
---~ return false, false, "no counter data"
---~ end
---~ -- section data
---~ local sectiondata = sections.current()
---~ if not sectiondata then
---~ return cd, false, "not in section"
---~ end
---~ local references = sectiondata.references
---~ if not references then
---~ return cd, false, "no references"
---~ end
---~ local section = references.section
---~ if not section then
---~ return cd, false, "no section"
---~ end
---~ sectiondata = sections.collected[references.section]
---~ if not sectiondata then
---~ return cd, false, "no section data"
---~ end
---~ -- local preferences
---~ local no = variables.no
---~ if counterspecification and counterspecification.prefix == no then
---~ return cd, false, "current spec blocks prefix"
---~ end
---~ -- stored preferences (not used)
---~ if cd.prefix == no then
---~ return cd, false, "entry blocks prefix"
---~ end
---~ -- sectioning
---~ -- if sectiondata.prefix == no then
---~ -- return false, false, "sectiondata blocks prefix"
---~ -- end
---~ -- final verdict
---~ return cd, sectiondata, "okay"
---~ end
-
---~ function counters.prefixedconverted(name,prefixspec,numberspec)
---~ local cd, prefixdata, result = counters.analyze(name,prefixspec)
---~ if cd then
---~ if prefixdata then
---~ sections.typesetnumber(prefixdata,"prefix",prefixspec or false,cd or false)
---~ end
---~ counters.converted(name,numberspec)
---~ end
---~ end
+-- -- move to strc-pag.lua
+--
+-- function counters.analyze(name,counterspecification)
+-- local cd = counterdata[name]
+-- -- safeguard
+-- if not cd then
+-- return false, false, "no counter data"
+-- end
+-- -- section data
+-- local sectiondata = sections.current()
+-- if not sectiondata then
+-- return cd, false, "not in section"
+-- end
+-- local references = sectiondata.references
+-- if not references then
+-- return cd, false, "no references"
+-- end
+-- local section = references.section
+-- if not section then
+-- return cd, false, "no section"
+-- end
+-- sectiondata = sections.collected[references.section]
+-- if not sectiondata then
+-- return cd, false, "no section data"
+-- end
+-- -- local preferences
+-- local no = v_no
+-- if counterspecification and counterspecification.prefix == no then
+-- return cd, false, "current spec blocks prefix"
+-- end
+-- -- stored preferences (not used)
+-- if cd.prefix == no then
+-- return cd, false, "entry blocks prefix"
+-- end
+-- -- sectioning
+-- -- if sectiondata.prefix == no then
+-- -- return false, false, "sectiondata blocks prefix"
+-- -- end
+-- -- final verdict
+-- return cd, sectiondata, "okay"
+-- end
+--
+-- function counters.prefixedconverted(name,prefixspec,numberspec)
+-- local cd, prefixdata, result = counters.analyze(name,prefixspec)
+-- if cd then
+-- if prefixdata then
+-- sections.typesetnumber(prefixdata,"prefix",prefixspec or false,cd or false)
+-- end
+-- counters.converted(name,numberspec)
+-- end
+-- end
diff --git a/Master/texmf-dist/tex/context/base/strc-num.mkiv b/Master/texmf-dist/tex/context/base/strc-num.mkiv
index 9c08693be56..c4d54e51286 100644
--- a/Master/texmf-dist/tex/context/base/strc-num.mkiv
+++ b/Master/texmf-dist/tex/context/base/strc-num.mkiv
@@ -19,89 +19,60 @@
% work in progress
% to be checked: can we use the command handler code here?
+% all settings will move to lua
\installcorenamespace{counter}
-\unexpanded\def\setupstructurecounting{\dodoubleempty\getparameters[\??counter]}
+\installcommandhandler \??counter {counter} \??counter
-\setupstructurecounting
+\let\setupstructurecounting\setupcounter
+
+\setupcounter
[\c!way=\v!by\v!chapter,
% \c!blockway=,
% \c!prefixstarter=,
% \c!prefixstopper=,
\c!prefixconnector=.,
- \c!prefixsegments=\autostructureprefixsegments\sharedcounterparameter,
- \c!start=0,
+ \c!prefixsegments=\autostructureprefixsegments\rootcounterparameter,
+ \c!start=\zerocount,
\c!prefix=\v!yes,
\c!state=\v!start]
-\def\strc_counters_the#1% we need a nicer name
- {\ifcsname\??counter#1\c!number\endcsname
- \expandafter\strc_counters_the\csname\??counter#1\c!number\endcsname
- \else
- #1%
- \fi}
-
\def\autostructureprefixsegments#1% todo: \c!prefixsegments=\v!auto
- {2:\thenamedheadlevel{\ctxlua{structures.sections.way("#1\c!way")}}}
-
-\def\counterparameter#1#2% this will become \named....
- {\csname
- \ifcsname\??counter#1#2\endcsname
- \??counter#1#2%
- \else\ifcsname\??counter\strc_counters_the{#1}#2\endcsname
- \??counter\strc_counters_the{#1}#2%
- \else\ifcsname\??counter#2\endcsname
- \??counter#2%
- \else
- \s!empty
- \fi\fi\fi
- \endcsname}
-
-\def\sharedcounterparameter#1%
- {\csname
- \ifcsname\??counter#1\endcsname
- \??counter#1%
- \else
- \s!empty
- \fi
- \endcsname}
-
-\unexpanded\def\definecounter
- {\dodoubleempty\strc_counters_define}
-
-\def\strc_counters_define[#1][#2]%
- {\doifassignmentelse{#2}
- {\strc_counters_define_yes[#1][#2]}
- {\doifelsenothing{#2}
- {\strc_counters_define_yes[#1][]}
- {\strc_counters_define_nop[#1][#2]}}}
-
-\def\strc_counters_define_yes[#1][#2]%
- {\getparameters[\??counter#1][\s!counter=,#2]% counter is for internal purposes
- \ctxcommand{definecounter {
- name = "#1",
- start = tonumber("\counterparameter{#1}\c!start") or 0,
- counter = "\counterparameter{#1}\s!counter",
- method = "\counterparameter{#1}\c!method",
- }}%
- \strc_counters_check_setup{#1}}
-
-\def\strc_counters_define_nop[#1][#2]% inherit
- {\getparameters[\??counter#1][\c!number=#2]%
- \strc_counters_check_setup{#1}}
-
-\unexpanded\def\setupcounter
- {\dodoubleargument\strc_counters_setup}
-
-\def\strc_counters_setup[#1][#2]%
- {\getparameters[\??counter#1][\c!start=,#2]%
- \strc_counters_check_setup{#1}}
+ {2:\thenamedheadlevel{\ctxcommand{way("#1\c!way")}}}
+
+\appendtoks
+ \resetcounterparameter\s!counter
+\to \everypresetcounter
+
+\appendtoks
+ \ifx\currentcounterparent\empty
+ \edef\p_start{\counterparameter\c!start}%
+ \ctxcommand{definecounter {
+ name = "\currentcounter",
+ start = \ifx\p_start\empty0\else\number\p_start\fi,
+ counter = "\counterparameter\s!counter",
+ method = "\counterparameter\c!method",
+ }}%
+ \letcounterparameter\s!name\currentcounter
+ \else
+ \letcounterparameter\s!name\currentcounterparent
+ \fi
+ \strc_counters_check_setup
+\to \everydefinecounter
+
+\appendtoks
+ \ifx\currentcounter\empty \else
+ \edef\p_start{\counterparameter\c!start}%
+ \setexpandedcounterparameter\c!start{\ifx\p_start\empty0\else\number\p_start\fi}%
+ \strc_counters_check_setup
+ \fi
+\to \everysetupcounter
% % % %
\def\strc_counters_way#1% slow, we need to store it at the tex end
- {\ctxlua{structures.sections.way("\counterparameter{#1}\c!way")}}
+ {\ctxcommand{way("\namedcounterparameter{#1}\c!way")}}
% \def\thenamedcounterlevel#1%
% {\thenamedheadlevel{\strc_counters_way{#1}}}
@@ -109,36 +80,31 @@
\def\thenamedcounterlevel#1%
{\xthenamedheadlevel{\strc_counters_way{#1}}}
-\def\xthenamedheadlevel#1%
- {\xsectionlevel{#1}{\sectionheadsection{\sectionheadcoupling{#1}}}}
-
-\def\xsectionlevel#1#2% direct indirect
- {\csname\??headlevel
- \ifcsname\??headlevel#1\endcsname
- #1%
- \else\ifcsname\??headlevel#2\endcsname
- #2%
- \else
- \v!none
- \fi\fi
- \endcsname}
-
-\def\strc_counters_check_setup#1% does it have to happen here?
- {% this can be done at the lua end / a bit messy here ... todo ...
- \ifcsname\??counter#1\c!number\endcsname
- \doifelsevalue {\??counter#1\c!number}{#1} {\letbeundefined{\??counter#1\c!number}}%
- {\doifvaluenothing{\??counter#1\c!number} {\letbeundefined{\??counter#1\c!number}}}%
- \fi
- \ifcsname\??counter#1\c!number\endcsname
- % it's a clone
- \else
- \edef\currentcounterlevel{\thenamedcounterlevel{#1}}%
- \ctxcommand{checkcountersetup("#1",\currentcounterlevel,"\counterparameter{#1}\c!start","\counterparameter{#1}\c!state")}%
+% \def\strc_counters_check_setup#1% does it have to happen here?
+% {% this can be done at the lua end / a bit messy here ... todo ...
+% \ifcsname\??counter#1\c!number\endcsname
+% \doifelsevalue {\??counter#1\c!number}{#1} {\letbeundefined{\??counter#1\c!number}}%
+% {\doifvaluenothing{\??counter#1\c!number} {\letbeundefined{\??counter#1\c!number}}}%
+% \fi
+% \ifcsname\??counter#1\c!number\endcsname
+% % it's a clone
+% \else
+% \edef\currentcounterlevel{\thenamedcounterlevel{#1}}%
+% \edef\p_start{\counterparameter{#1}\c!start}%
+% \ctxcommand{checkcountersetup("#1",\currentcounterlevel,\ifx\p_start\empty0\else\number\p_start\fi,"\counterparameter{#1}\c!state")}%
+% \fi}
+
+\def\strc_counters_check_setup
+ {\edef\p_name{\directcounterparameter\s!name}%
+ \ifx\currentcounter\p_name
+ \edef\currentcounterlevel{\thenamedcounterlevel\currentcounter}%
+ \edef\p_start{\counterparameter\c!start}%
+ \ctxcommand{checkcountersetup("\currentcounter",\currentcounterlevel,\ifx\p_start\empty0\else\number\p_start\fi,"\counterparameter\c!state")}%
\fi}
-\unexpanded\def\doifcounterelse #1{\ctxcommand{doifelsecounter("\strc_counters_the{#1}")}}
-\unexpanded\def\doifcounter #1{\ctxcommand{doifcounter ("\strc_counters_the{#1}")}}
-\unexpanded\def\doifnotcounter #1{\ctxcommand{doifnotcounter ("\strc_counters_the{#1}")}}
+\unexpanded\def\doifcounterelse #1{\ctxcommand{doifelsecounter("\namedcounterparameter{#1}\s!name")}}
+\unexpanded\def\doifcounter #1{\ctxcommand{doifcounter ("\namedcounterparameter{#1}\s!name")}}
+\unexpanded\def\doifnotcounter #1{\ctxcommand{doifnotcounter ("\namedcounterparameter{#1}\s!name")}}
\unexpanded\def\setcounter {\dotripleempty \strc_counters_set_interfaced}
\unexpanded\def\setcounterown {\dotripleempty \strc_counters_setown_interfaced}
@@ -324,27 +290,27 @@
\def\strc_counters_prev #1{\strc_counters_prev_sub {#1}\plusone}
\def\strc_counters_subs #1{\strc_counters_subs_sub {#1}\plusone}
-\unexpanded\def\strc_counters_set_sub #1#2#3{\ctxcommand{setcounter ("\strc_counters_the{#1}",\number#2,\number#3)}}
-\unexpanded\def\strc_counters_setown_sub #1#2#3{\ctxcommand{setowncounter ("\strc_counters_the{#1}",\number#2,"#3")}}
-\unexpanded\def\strc_counters_restart_sub #1#2#3{\ctxcommand{restartcounter("\strc_counters_the{#1}",\number#2,\number#3)}}
-\unexpanded\def\strc_counters_reset_sub #1#2{\ctxcommand{resetcounter ("\strc_counters_the{#1}",\number#2)}}
-\unexpanded\def\strc_counters_increment_sub #1#2{\ctxcommand{addcounter ("\strc_counters_the{#1}",\number#2,1)}}
-\unexpanded\def\strc_counters_decrement_sub #1#2{\ctxcommand{addcounter ("\strc_counters_the{#1}",\number#2,-1)}}
+\unexpanded\def\strc_counters_set_sub #1#2#3{\ctxcommand{setcounter ("\namedcounterparameter{#1}\s!name",\number#2,\number#3)}}
+\unexpanded\def\strc_counters_setown_sub #1#2#3{\ctxcommand{setowncounter ("\namedcounterparameter{#1}\s!name",\number#2,"#3")}}
+\unexpanded\def\strc_counters_restart_sub #1#2#3{\ctxcommand{restartcounter("\namedcounterparameter{#1}\s!name",\number#2,\number#3)}}
+\unexpanded\def\strc_counters_reset_sub #1#2{\ctxcommand{resetcounter ("\namedcounterparameter{#1}\s!name",\number#2)}}
+\unexpanded\def\strc_counters_increment_sub #1#2{\ctxcommand{addcounter ("\namedcounterparameter{#1}\s!name",\number#2,1)}}
+\unexpanded\def\strc_counters_decrement_sub #1#2{\ctxcommand{addcounter ("\namedcounterparameter{#1}\s!name",\number#2,-1)}}
- \def\strc_counters_raw_sub #1#2{\ctxcommand{countervalue ("\strc_counters_the{#1}",\number#2)}} % maybe raw
- \def\strc_counters_last_sub #1#2{\ctxcommand{lastcountervalue ("\strc_counters_the{#1}",\number#2)}}
- \def\strc_counters_first_sub #1#2{\ctxcommand{firstcountervalue ("\strc_counters_the{#1}",\number#2)}}
- \def\strc_counters_next_sub #1#2{\ctxcommand{nextcountervalue ("\strc_counters_the{#1}",\number#2)}}
- \def\strc_counters_prev_sub #1#2{\ctxcommand{previouscountervalue("\strc_counters_the{#1}",\number#2)}}
- \def\strc_counters_subs_sub #1#2{\ctxcommand{subcountervalues ("\strc_counters_the{#1}",\number#2)}}
+ \def\strc_counters_raw_sub #1#2{\ctxcommand{countervalue ("\namedcounterparameter{#1}\s!name",\number#2)}} % maybe raw
+ \def\strc_counters_last_sub #1#2{\ctxcommand{lastcountervalue ("\namedcounterparameter{#1}\s!name",\number#2)}}
+ \def\strc_counters_first_sub #1#2{\ctxcommand{firstcountervalue ("\namedcounterparameter{#1}\s!name",\number#2)}}
+ \def\strc_counters_next_sub #1#2{\ctxcommand{nextcountervalue ("\namedcounterparameter{#1}\s!name",\number#2)}}
+ \def\strc_counters_prev_sub #1#2{\ctxcommand{previouscountervalue("\namedcounterparameter{#1}\s!name",\number#2)}}
+ \def\strc_counters_subs_sub #1#2{\ctxcommand{subcountervalues ("\namedcounterparameter{#1}\s!name",\number#2)}}
-\unexpanded\def\strc_counters_save #1{\ctxcommand{savecounter ("\strc_counters_the{#1}")}}
-\unexpanded\def\strc_counters_restore #1{\ctxcommand{restorecounter("\strc_counters_the{#1}")}}
+\unexpanded\def\strc_counters_save #1{\ctxcommand{savecounter ("\namedcounterparameter{#1}\s!name")}}
+\unexpanded\def\strc_counters_restore #1{\ctxcommand{restorecounter("\namedcounterparameter{#1}\s!name")}}
-\unexpanded\def\strc_counters_incremented #1{\ctxcommand{incrementedcounter("\strc_counters_the{#1}",1, 1)}}
-\unexpanded\def\strc_counters_decremented #1{\ctxcommand{incrementedcounter("\strc_counters_the{#1}",1,-1)}}
+\unexpanded\def\strc_counters_incremented #1{\ctxcommand{incrementedcounter("\namedcounterparameter{#1}\s!name",1, 1)}}
+\unexpanded\def\strc_counters_decremented #1{\ctxcommand{incrementedcounter("\namedcounterparameter{#1}\s!name",1,-1)}}
-\unexpanded\def\showcounter [#1]{\ctxcommand{tracecounter("\strc_counters_the{#1}")}}
+\unexpanded\def\showcounter [#1]{\ctxcommand{tracecounter("\namedcounterparameter{#1}\s!name")}}
\unexpanded\def\incrementedcounter [#1]{\strc_counters_incremented{#1}} % expandable, no \dosingleargument
\unexpanded\def\decrementedcounter [#1]{\strc_counters_decremented{#1}} % expandable, no \dosingleargument
@@ -376,63 +342,64 @@
\strc_counters_reset{#1}%
\fi
\fi
- \ctxcommand{addcounter("\strc_counters_the{#1}",\number#2,1)}}
+ \ctxcommand{addcounter("\namedcounterparameter{#1}\s!name",\number#2,1)}}
\unexpanded\def\convertedcounter
{\dodoubleempty\strc_counters_converted}
\def\strc_counters_converted[#1][#2]%
{\begingroup
- \ifsecondargument\getparameters[\??counter#1][#2]\fi
+ \edef\currentcounter{#1}%
+ \ifsecondargument\setupcurrentcounter[#2]\fi
\ctxlua{structures.sections.prefixedconverted(
- "\strc_counters_the{#1}",
+ "\counterparameter\s!name",
{
- prefix = "\counterparameter{#1}\c!prefix",
- separatorset = "\counterparameter{#1}\c!prefixseparatorset",
- conversion = "\counterparameter{#1}\c!prefixconversion",
- conversionset = "\counterparameter{#1}\c!prefixconversionset",
- starter = \!!bs\counterparameter{#1}\c!prefixstarter\!!es,
- stopper = \!!bs\counterparameter{#1}\c!prefixstopper\!!es,
- set = "\counterparameter{#1}\c!prefixset",
- segments = "\counterparameter{#1}\c!prefixsegments",
- connector = \!!bs\counterparameter{#1}\c!prefixconnector\!!es,
+ prefix = "\counterparameter\c!prefix",
+ separatorset = "\counterparameter\c!prefixseparatorset",
+ conversion = "\counterparameter\c!prefixconversion",
+ conversionset = "\counterparameter\c!prefixconversionset",
+ starter = \!!bs\counterparameter\c!prefixstarter\!!es,
+ stopper = \!!bs\counterparameter\c!prefixstopper\!!es,
+ set = "\counterparameter\c!prefixset",
+ segments = "\counterparameter\c!prefixsegments",
+ connector = \!!bs\counterparameter\c!prefixconnector\!!es,
},
{
- order = "\counterparameter{#1}\c!numberorder",
- separatorset = "\counterparameter{#1}\c!numberseparatorset",
- conversion = \!!bs\counterparameter{#1}\c!numberconversion\!!es,
- conversionset = "\counterparameter{#1}\c!numberconversionset",
- starter = \!!bs\counterparameter{#1}\c!numberstarter\!!es,
- stopper = \!!bs\counterparameter{#1}\c!numberstopper\!!es,
- segments = "\counterparameter{#1}\c!numbersegments",
- type = "\counterparameter{#1}\c!type",
- criterium = "\counterparameter{#1}\c!criterium", % might change if we also want this with sectioning
+ order = "\counterparameter\c!numberorder",
+ separatorset = "\counterparameter\c!numberseparatorset",
+ conversion = \!!bs\counterparameter\c!numberconversion\!!es,
+ conversionset = "\counterparameter\c!numberconversionset",
+ starter = \!!bs\counterparameter\c!numberstarter\!!es,
+ stopper = \!!bs\counterparameter\c!numberstopper\!!es,
+ segments = "\counterparameter\c!numbersegments",
+ type = "\counterparameter\c!type",
+ criterium = "\counterparameter\c!criterium", % might change if we also want this with sectioning
}
)}%
\endgroup}
\def\directconvertedcounter#1#2% name, type
{\ctxlua{structures.sections.prefixedconverted(
- "\strc_counters_the{#1}",
+ "\namedcounterparameter{#1}\s!name",
{
- prefix = "\counterparameter{#1}\c!prefix",
- separatorset = "\counterparameter{#1}\c!prefixseparatorset",
- conversion = "\counterparameter{#1}\c!prefixconversion",
- conversionset = "\counterparameter{#1}\c!prefixconversionset",
- % starter = \!!bs\counterparameter{#1}\c!prefixstarter\!!es,
- % stopper = \!!bs\counterparameter{#1}\c!prefixstopper\!!es,
- set = "\counterparameter{#1}\c!prefixset",
- segments = "\counterparameter{#1}\c!prefixsegments",
- connector = \!!bs\counterparameter{#1}\c!prefixconnector\!!es,
+ prefix = "\namedcounterparameter{#1}\c!prefix",
+ separatorset = "\namedcounterparameter{#1}\c!prefixseparatorset",
+ conversion = "\namedcounterparameter{#1}\c!prefixconversion",
+ conversionset = "\namedcounterparameter{#1}\c!prefixconversionset",
+ % starter = \!!bs\namedcounterparameter{#1}\c!prefixstarter\!!es,
+ % stopper = \!!bs\namedcounterparameter{#1}\c!prefixstopper\!!es,
+ set = "\namedcounterparameter{#1}\c!prefixset",
+ segments = "\namedcounterparameter{#1}\c!prefixsegments",
+ connector = \!!bs\namedcounterparameter{#1}\c!prefixconnector\!!es,
},
{
- order = "\counterparameter{#1}\c!numberorder",
- separatorset = "\counterparameter{#1}\c!numberseparatorset",
- conversion = \!!bs\counterparameter{#1}\c!numberconversion\!!es,
- conversionset = "\counterparameter{#1}\c!numberconversionset",
- starter = \!!bs\counterparameter{#1}\c!numberstarter\!!es,
- stopper = \!!bs\counterparameter{#1}\c!numberstopper\!!es,
- segments = "\counterparameter{#1}\c!numbersegments",
+ order = "\namedcounterparameter{#1}\c!numberorder",
+ separatorset = "\namedcounterparameter{#1}\c!numberseparatorset",
+ conversion = \!!bs\namedcounterparameter{#1}\c!numberconversion\!!es,
+ conversionset = "\namedcounterparameter{#1}\c!numberconversionset",
+ starter = \!!bs\namedcounterparameter{#1}\c!numberstarter\!!es,
+ stopper = \!!bs\namedcounterparameter{#1}\c!numberstopper\!!es,
+ segments = "\namedcounterparameter{#1}\c!numbersegments",
type = "#2",
}
)}}
@@ -447,49 +414,53 @@
\secondargumentfalse\strc_counters_converted[#1][]%
\fi}
-\unexpanded\def\doifdefinedcounter#1%
- {\ifcsname\s!number#1\c!number\endcsname
- \expandafter\firstofoneargument
- \else
- \expandafter\gobbleoneargument
- \fi}
-
-\unexpanded\def\doifundefinedcounter#1%
- {\ifcsname\s!number#1\c!number\endcsname
- \expandafter\gobbleoneargument
- \else
- \expandafter\firstofoneargument
- \fi}
+% \unexpanded\def\doifdefinedcounter#1%
+% {\ifcsname\namedcounterhash{#1}\s!name\endcsname
+% \expandafter\firstofoneargument
+% \else
+% \expandafter\gobbleoneargument
+% \fi}
+%
+% \unexpanded\def\doifundefinedcounter#1%
+% {\ifcsname\namedcounterhash{#1}\s!name\endcsname
+% \expandafter\gobbleoneargument
+% \else
+% \expandafter\firstofoneargument
+% \fi}
+%
+% \unexpanded\def\doifdefinedcounterelse#1%
+% {\ifcsname\namedcounterhash{#1}\s!name\endcsname
+% \expandafter\firstoftwoarguments
+% \else
+% \expandafter\secondoftwoarguments
+% \fi}
-\unexpanded\def\doifdefinedcounterelse#1%
- {\ifcsname\s!number#1\c!number\endcsname
- \expandafter\firstoftwoarguments
- \else
- \expandafter\secondoftwoarguments
- \fi}
+\unexpanded\def\doifdefinedcounter {\doifcommandhandler \??counter}
+\unexpanded\def\doifundefinedcounter {\doifnotcommandhandler \??counter}
+\unexpanded\def\doifdefinedcounterelse{\doifelsecommandhandler\??counter}
%D What follows is a compatibility layer.
-\let \numberparameter \counterparameter % {name}\c!key
+\let \numberparameter \namedcounterparameter % {name}\c!key
-\let \definenumber \definecounter % [name]
-\let \setupnumber \setupcounter % [name][setups]
+\let \definenumber \definecounter % [name]
+\let \setupnumber \setupcounter % [name][setups]
-\let \setnumber \setcounter % [name]{value}
-\let \resetnumber \resetcounter % [name]
-\let \savenumber \savecounter % [name]
-\let \restorenumber \restorecounter % [name]
-\let \incrementnumber \incrementcounter % [name]
-\let \decrementnumber \decrementcounter % [name]
-\let \rawnumber \rawcounter % [name]
-\let \getnumber \convertedcounter % [name]
-\let \convertednumber \convertedcounter % [name]
+\let \setnumber \setcounter % [name]{value}
+\let \resetnumber \resetcounter % [name]
+\let \savenumber \savecounter % [name]
+\let \restorenumber \restorecounter % [name]
+\let \incrementnumber \incrementcounter % [name]
+\let \decrementnumber \decrementcounter % [name]
+\let \rawnumber \rawcounter % [name]
+\let \getnumber \convertedcounter % [name]
+\let \convertednumber \convertedcounter % [name]
-\let \doifdefinednumber \doifdefinedcounter % {number}{true}
-\let \doifundefinednumber \doifnotdefinedcounter % {number}{true}
-\let \doifdefinednumberelse \doifdefinedcounterelse % {number}{true}{false}
+\let \doifdefinednumber \doifdefinedcounter % {number}{true}
+\let \doifundefinednumber \doifnotdefinedcounter % {number}{true}
+\let \doifdefinednumberelse \doifdefinedcounterelse % {number}{true}{false}
-\let \setupnumbering \setupstructurecounting
+\let \setupnumbering \setupcounter
%D Helpers:
@@ -507,22 +478,20 @@
% \m_strc_counters_last_registered_attribute
% \m_strc_counters_last_registered_synchronize
-\newconditional\hascountercaption
-\newconditional\hascountertitle
-\newconditional\hascounternumber
+% currentstructurecomponent => \strc_current_ or just \m_strc_
\unexpanded\def\strc_counters_register_component#1#2#3#4#5#6#7[#8][#9]% maybe also nolist
{\begingroup
%
#2[\s!haslevel=1,#8]%
- \edef\hascounterlevel {#3\s!haslevel}%
- \edef\hascountercaption{#3\s!hascaption}%
- \edef\hascountertitle {#3\s!hastitle}%
- \edef\hascounternumber {#3\s!hasnumber}%
+ \edef\p_haslevel {#3\s!haslevel}%
+ \edef\p_hascaption{#3\s!hascaption}%
+ \edef\p_hastitle {#3\s!hastitle}%
+ \edef\p_hasnumber {#3\s!hasnumber}%
%
- \edef\askedprefixsegments{#3\c!prefixsegments}%
- \ifx\askedprefixsegments\v!auto
- \edef\askedprefixsegments{\autostructureprefixsegments#3}%
+ \edef\p_prefixsegments{#3\c!prefixsegments}%
+ \ifx\p_prefixsegments\v!auto
+ \edef\p_prefixsegments{\autostructureprefixsegments#3}%
\fi
%
\edef\currentname{#3\c!name}%
@@ -534,7 +503,7 @@
\let\currentcounter\currentname
\fi
%
- \ifx\hascountercaption\v!yes
+ \ifx\p_hascaption\v!yes
\xdef\currentstructurecomponentname {#3\c!name}%
\xdef\currentstructurecomponentlevel {#3\c!level}%
\edef\currentstructurecomponentexpansion {#3\c!expansion}%
@@ -613,7 +582,7 @@
list = \!!bs\detokenize\expandafter{\currentstructurecomponentlist}\!!es,
\fi
},
- \ifx\hascounternumber\v!yes
+ \ifx\p_hasnumber\v!yes
prefixdata = {
prefix = "#3\c!prefix",
separatorset = "#3\c!prefixseparatorset",
@@ -621,11 +590,12 @@
conversionset = "#3\c!prefixconversionset",
set = "#3\c!prefixset",
% segments = "#3\c!prefixsegments",
- segments = "\askedprefixsegments",
+ segments = "\p_prefixsegments",
connector = \!!bs#3\c!prefixconnector\!!es,
},
numberdata = {
numbers = structures.counters.compact("\currentcounter",nil,true),
+ counter = "\currentcounter",
separatorset = "#3\c!numberseparatorset",
conversion = \!!bs#3\c!numberconversion\!!es,
conversionset = "#3\c!numberconversionset",
@@ -637,7 +607,7 @@
userdata = \!!bs\detokenize{#9}\!!es % will be converted to table at the lua end
}
}}%
- \xdef\m_strc_counters_last_registered_attribute {\ctxlua {tex.write(structures.references.setinternalreference(nil,nil,\nextinternalreference))}}%
+ \xdef\m_strc_counters_last_registered_attribute {\ctxcommand {setinternalreference(nil,nil,\nextinternalreference)}}%
\xdef\m_strc_counters_last_registered_synchronize{\ctxlatecommand{enhancelist(\m_strc_counters_last_registered_index)}}%
\else
\glet\m_strc_counters_last_registered_index \relax
@@ -650,32 +620,10 @@
\let\m_strc_counters_last_registered_attribute \relax
\let\m_strc_counters_last_registered_synchronize\relax
-\def\strc_counter_setup_using_parameter#1#2% name \someparameter
- {\setupcounter
- [#1]
- [ \c!start=#2\c!start,
- \c!state=#2\c!state, % beware, "" == start
- \c!way=#2\c!way,
- %
- \c!prefix=#2\c!prefix,
- \c!prefixseparatorset=#2\c!prefixseparatorset,
- \c!prefixconversion=#2\c!prefixconversion,
- \c!prefixconversionset=#2\c!prefixconversionset,
- \c!prefixstarter=#2\c!prefixstarter,
- \c!prefixstopper=#2\c!prefixstopper,
- \c!prefixset=#2\c!prefixset,
- \c!prefixsegments=#2\c!prefixsegments,
- \c!prefixset=#2\c!prefixset,
- \c!prefixconnector=#2\c!prefixconnector,
- %
- \c!numberseparatorset=#2\c!numberseparatorset,
- \c!numberconversion=#2\c!numberconversion,
- \c!numberconversionset=#2\c!numberconversionset,
- \c!numberstarter=#2\c!numberstarter,
- \c!numberstopper=#2\c!numberstopper,
- \c!numbersegments=#2\c!numbersegments]}
-
-\def\strc_counter_preset_using_parameter#1#2% \setupcommand \someparameter
+% This can be improved as we don't need to pas all these variables
+% each time (we can set them up once).
+
+\unexpanded\def\strc_counter_preset_using_parameter#1#2% \setupcommand \someparameter
{#1%
[\c!way =#2\c!way,
\c!prefix =#2\c!prefix,
@@ -694,4 +642,104 @@
\c!numberstopper =#2\c!numberstopper,
\c!numbersegments =#2\c!numbersegments]}
+% \unexpanded\def\strc_counter_setup_using_parameter#1#2% name \someparameter
+% {\setupcounter
+% [#1]
+% [ \c!start=#2\c!start,
+% \c!state=#2\c!state, % beware, "" == start
+% \c!way=#2\c!way,
+% %
+% \c!prefix=#2\c!prefix,
+% \c!prefixseparatorset=#2\c!prefixseparatorset,
+% \c!prefixconversion=#2\c!prefixconversion,
+% \c!prefixconversionset=#2\c!prefixconversionset,
+% \c!prefixstarter=#2\c!prefixstarter,
+% \c!prefixstopper=#2\c!prefixstopper,
+% \c!prefixset=#2\c!prefixset,
+% \c!prefixsegments=#2\c!prefixsegments,
+% \c!prefixset=#2\c!prefixset,
+% \c!prefixconnector=#2\c!prefixconnector,
+% %
+% \c!numberseparatorset=#2\c!numberseparatorset,
+% \c!numberconversion=#2\c!numberconversion,
+% \c!numberconversionset=#2\c!numberconversionset,
+% \c!numberstarter=#2\c!numberstarter,
+% \c!numberstopper=#2\c!numberstopper,
+% \c!numbersegments=#2\c!numbersegments]}
+
+\unexpanded\def\strc_counter_setup_using_parameter#1#2% name \someparameter
+ {\edef\currentcounter{#1}%
+ %
+ \setcounterparameter \c!start{#2\c!start}%
+ \setcounterparameter \c!state{#2\c!state}% % beware, "" == start
+ \setcounterparameter \c!way{#2\c!way}%
+ %
+ \setcounterparameter \c!prefix{#2\c!prefix}%
+ \setcounterparameter \c!prefixseparatorset{#2\c!prefixseparatorset}%
+ \setcounterparameter \c!prefixconversion{#2\c!prefixconversion}%
+ \setcounterparameter\c!prefixconversionset{#2\c!prefixconversionset}%
+ \setcounterparameter \c!prefixstarter{#2\c!prefixstarter}%
+ \setcounterparameter \c!prefixstopper{#2\c!prefixstopper}%
+ \setcounterparameter \c!prefixset{#2\c!prefixset}%
+ \setcounterparameter \c!prefixsegments{#2\c!prefixsegments}%
+ \setcounterparameter \c!prefixset{#2\c!prefixset}%
+ \setcounterparameter \c!prefixconnector{#2\c!prefixconnector}%
+ %
+ \setcounterparameter \c!numberseparatorset{#2\c!numberseparatorset}%
+ \setcounterparameter \c!numberconversion{#2\c!numberconversion}%
+ \setcounterparameter\c!numberconversionset{#2\c!numberconversionset}%
+ \setcounterparameter \c!numberstarter{#2\c!numberstarter}%
+ \setcounterparameter \c!numberstopper{#2\c!numberstopper}%
+ \setcounterparameter \c!numbersegments{#2\c!numbersegments}%
+ %
+ \the\everysetupcounter}
+
+\unexpanded\def\mult_interfaces_counter_association#1#2#3#4#5#6#7% tag current setup parameter list sync register
+ {\strc_counter_preset_using_parameter#3\rootcounterparameter
+ \newtoks#5%
+ \unexpanded\def #6{\ifx#2\empty\the#5\else\strc_counter_setup_using_parameter#2#4\fi}% sync
+ \unexpanded\def#7##1{\normalexpanded{#5{\the#5\strc_counter_setup_using_parameter{##1}\noexpand#4}}}} % register
+
+\unexpanded\def\installcounterassociation#1% => synchronize#1counters register#1counter
+ {\normalexpanded
+ {\mult_interfaces_counter_association
+ {#1}% not \??xx but xx
+ \expandafter\noexpand\csname current#1\endcsname
+ \expandafter\noexpand\csname setup#1\endcsname
+ \expandafter\noexpand\csname #1parameter\endcsname
+ \expandafter\noexpand\csname counter_association_list_#1\endcsname
+ \expandafter\noexpand\csname synchronize#1counters\endcsname
+ \expandafter\noexpand\csname register#1counter\endcsname}}
+
+% needs testing:
+%
+% \unexpanded\def\strc_counter_setup_push#1#2#3% \someparameter \directsomeparameter \setexpandedsomeparameter
+% {\let\savedcounterparameter \counterparameter
+% \let\saveddirectcounterparameter \directcounterparameter
+% \let\savedsetexpandedcounterparameter\setexpandedcounterparameter
+% % remap
+% \let\counterparameter #1%
+% \let\directcounterparameter #2%
+% \let\setexpandedcounterparameter #3}
+%
+% \unexpanded\def\strc_counter_setup_pop
+% {\let\counterparameter \savedcounterparameter
+% \let\directcounterparameter \saveddirectcounterparameter
+% \let\setexpandedcounterparameter \savedsetexpandedcounterparameter}
+%
+% \unexpanded\def\mult_interfaces_counter_association#1#2#3#4#5#6#7% tag current setup parameter list sync register
+% {\strc_counter_preset_using_parameter#3\rootcounterparameter
+% \newtoks#5%
+% \unexpanded\def#6% sync
+% {\strc_counter_setup_push
+% \ifx#2\empty
+% \the#5%
+% \else
+% \let\currentcounter\empty
+% \the\everysetupcounter
+% \fi
+% \strc_counter_setup_pop}%
+% \unexpanded\def#7##1% register
+% {\normalexpanded{#5{\the#5\edef\noexpand\currentcounter{##1}\noexpand\the\everysetupcounter}}}}
+
\protect \endinput
diff --git a/Master/texmf-dist/tex/context/base/strc-pag.lua b/Master/texmf-dist/tex/context/base/strc-pag.lua
index 460d4ff008e..f70d37d6397 100644
--- a/Master/texmf-dist/tex/context/base/strc-pag.lua
+++ b/Master/texmf-dist/tex/context/base/strc-pag.lua
@@ -6,7 +6,7 @@ if not modules then modules = { } end modules ['strc-pag'] = {
license = "see context related readme files"
}
-local texcount, format = tex.count, string.format
+local texcount = tex.count
local allocate, mark = utilities.storage.allocate, utilities.storage.mark
@@ -69,6 +69,9 @@ function pages.save(prefixdata,numberdata)
end
end
+-- We can set the pagenumber but as it only get incremented in the page
+-- builder we have to make sure it starts at least at 1.
+
function counters.specials.userpage()
local r = texcount.realpageno
if r > 0 then
@@ -78,12 +81,23 @@ function counters.specials.userpage()
if trace_pages then
report_pages("forcing pagenumber of realpage %s to %s",r,t.number)
end
+ return
end
end
+ local u = texcount.userpageno
+ if u == 0 then
+ if trace_pages then
+ report_pages("forcing pagenumber of realpage %s to %s (probably a bug)",r,1)
+ end
+ counters.setvalue("userpage",1)
+ texcount.userpageno = 1
+ end
end
+local f_convert = string.formatters["\\convertnumber{%s}{%s}"]
+
local function convertnumber(str,n)
- return format("\\convertnumber{%s}{%s}",str or "numbers",n)
+ return f_convert(str or "numbers",n)
end
function pages.number(realdata,pagespec)
diff --git a/Master/texmf-dist/tex/context/base/strc-pag.mkiv b/Master/texmf-dist/tex/context/base/strc-pag.mkiv
index e3828464cd8..85cfeb40f55 100644
--- a/Master/texmf-dist/tex/context/base/strc-pag.mkiv
+++ b/Master/texmf-dist/tex/context/base/strc-pag.mkiv
@@ -19,11 +19,11 @@
% Allocation:
-\countdef\realpageno = 0 \realpageno = 1
-\countdef\userpageno = 1 \userpageno = 1
-\countdef\subpageno = 2 \subpageno = 0 % !!
-\countdef\arrangeno = 3 \arrangeno = 0 % !!
-\countdef\pagenoshift = 4 \pagenoshift = 0 % !!
+\countdef\realpageno \zerocount \realpageno \plusone
+\countdef\userpageno \plusone \userpageno \plusone
+\countdef\subpageno \plustwo \subpageno \zerocount % !
+\countdef\arrangeno \plusthree \arrangeno \zerocount % !
+\countdef\pagenoshift\plusfour \pagenoshift\zerocount % !
\let\pageno\userpageno
@@ -33,7 +33,8 @@
\newtoks\everyinitializepagecounters
-\def\initializepagecounters{\the\everyinitializepagecounters}
+\unexpanded\def\initializepagecounters
+ {\the\everyinitializepagecounters}
\appendtoks
\initializepagecounters
@@ -71,27 +72,30 @@
% \stopbodymatter
% \stoptext
-\definecounter[\s!realpage][\c!prefix=\v!no,\c!start=1,\c!prefixsegments=,\s!counter=realpageno,\c!method=\v!page]
-\definecounter[\s!userpage][\c!prefix=\v!no,\c!start=1,\c!prefixsegments=,\s!counter=userpageno,\c!method=\v!page]
-\definecounter[\s!subpage] [\c!prefix=\v!no,\c!start=1,\c!prefixsegments=,\s!counter=subpageno, \c!method=\v!page]
+\definecounter[\s!realpage][\c!prefix=\v!no,\c!start=\plusone,\c!prefixsegments=,\s!counter=realpageno,\c!method=\v!page]
+\definecounter[\s!userpage][\c!prefix=\v!no,\c!start=\plusone,\c!prefixsegments=,\s!counter=userpageno,\c!method=\v!page]
+\definecounter[\s!subpage] [\c!prefix=\v!no,\c!start=\plusone,\c!prefixsegments=,\s!counter=subpageno, \c!method=\v!page]
\newtoks\everysetuprealpagenumber % todo: set state: none, start, stop, reset
\newtoks\everysetupuserpagenumber % todo: set state: none, start, stop, reset
\newtoks\everysetupsubpagenumber % todo: set state: none, start, stop, reset
-\unexpanded\def\setuprealpagenumber{\dosingleargument\dosetuprealpagenumber}
-\unexpanded\def\setupuserpagenumber{\dosingleargument\dosetupuserpagenumber}
-\unexpanded\def\setupsubpagenumber {\dosingleargument\dosetupsubpagenumber}
+\unexpanded\def\setuprealpagenumber{\dosingleargument\strc_pagenumbers_setup_realpage}
+\unexpanded\def\setupuserpagenumber{\dosingleargument\strc_pagenumbers_setup_userpage}
+\unexpanded\def\setupsubpagenumber {\dosingleargument\strc_pagenumbers_setup_subpage }
-\def\dosavepagenumberstate#1{\edef\oldpagenumberstate{\counterparameter#1\c!state}}
+\let\m_strc_pagenumbers_state_old\zerocount
+\let\m_strc_pagenumbers_state_new\zerocount
-\def\dosetuprealpagenumber[#1]{\dosavepagenumberstate\s!realpage\strc_counters_setup[\s!realpage][#1]\the\everysetuprealpagenumber}
-\def\dosetupuserpagenumber[#1]{\dosavepagenumberstate\s!userpage\strc_counters_setup[\s!userpage][#1]\the\everysetupuserpagenumber}
-\def\dosetupsubpagenumber [#1]{\dosavepagenumberstate\s!subpage \strc_counters_setup[\s!subpage ][#1]\the\everysetupsubpagenumber }
+\def\strc_pagenumbers_save_state#1{\edef\m_strc_pagenumbers_state_old{\namedcounterparameter#1\c!state}}
-\def\resetrealpagenumber {} % not permitted
-\def\resetuserpagenumber {\strc_counters_reset\s!userpage}
-\def\resetsubpagenumber {\strc_counters_reset\s!subpage}
+\def\strc_pagenumbers_setup_realpage[#1]{\strc_pagenumbers_save_state\s!realpage\setupcounter[\s!realpage][#1]\the\everysetuprealpagenumber}
+\def\strc_pagenumbers_setup_userpage[#1]{\strc_pagenumbers_save_state\s!userpage\setupcounter[\s!userpage][#1]\the\everysetupuserpagenumber}
+\def\strc_pagenumbers_setup_subpage [#1]{\strc_pagenumbers_save_state\s!subpage \setupcounter[\s!subpage ][#1]\the\everysetupsubpagenumber }
+
+\unexpanded\def\resetrealpagenumber {} % not permitted
+\unexpanded\def\resetuserpagenumber {\strc_counters_reset\s!userpage}
+\unexpanded\def\resetsubpagenumber {\strc_counters_reset\s!subpage}
\appendtoks
\strc_counters_set\s!realpage\realpageno
@@ -102,29 +106,31 @@
\let\setuppagenumber\setupuserpagenumber
\let\resetpagenumber\resetuserpagenumber
-\def\savecurrentpagestate % \normalexpanded?
+\def\strc_pagenumbers_page_state_save % \normalexpanded?
{\ctxlua{structures.pages.save({
- prefix = "\counterparameter\s!userpage\c!prefix",
- separatorset = "\counterparameter\s!userpage\c!prefixseparatorset",
- conversion = "\counterparameter\s!userpage\c!prefixconversion",
- conversionset = "\counterparameter\s!userpage\c!prefixconversionset",
- set = "\counterparameter\s!userpage\c!prefixset",
- segments = "\counterparameter\s!userpage\c!prefixsegments",
- connector = \!!bs\counterparameter\s!userpage\c!prefixconnector\!!es,
+ prefix = "\namedcounterparameter\s!userpage\c!prefix",
+ separatorset = "\namedcounterparameter\s!userpage\c!prefixseparatorset",
+ conversion = "\namedcounterparameter\s!userpage\c!prefixconversion",
+ conversionset = "\namedcounterparameter\s!userpage\c!prefixconversionset",
+ set = "\namedcounterparameter\s!userpage\c!prefixset",
+ segments = "\namedcounterparameter\s!userpage\c!prefixsegments",
+ connector = \!!bs\namedcounterparameter\s!userpage\c!prefixconnector\!!es,
},{
- conversion = "\counterparameter\s!userpage\c!numberconversion",
- conversionset = "\counterparameter\s!userpage\c!numberconversionset",
- starter = \!!bs\counterparameter\s!userpage\c!numberstarter\!!es,
- stopper = \!!bs\counterparameter\s!userpage\c!numberstopper\!!es,
+ conversion = "\namedcounterparameter\s!userpage\c!numberconversion",
+ conversionset = "\namedcounterparameter\s!userpage\c!numberconversionset",
+ starter = \!!bs\namedcounterparameter\s!userpage\c!numberstarter\!!es,
+ stopper = \!!bs\namedcounterparameter\s!userpage\c!numberstopper\!!es,
}
)}}
\prependtoks
- \savecurrentpagestate
+ \strc_pagenumbers_page_state_save
\to \everyshipout
-\def\pushpagestate{\setxvalue{\??nm:\s!userpage:\c!state}{\counterparameter\s!userpage\c!state}}
-\def\poppagestate {\normalexpanded{\setuppagenumber[\c!state=\getvalue{\??nm:\s!userpage:\c!state}]}}
+\installcorenamespace{pagestatestack} % no level yet
+
+\unexpanded\def\strc_pagenumbers_page_state_push{\setxvalue{\??pagestatestack\c!state}{\namedcounterparameter\s!userpage\c!state}}
+\unexpanded\def\strc_pagenumbers_page_state_pop {\normalexpanded{\setuppagenumber[\c!state=\getvalue{\??pagestatestack\c!state}]}}
\setuppagenumber
[\c!way=\v!by\v!text,
@@ -207,17 +213,17 @@
\def\nextuserpagenumber {\directconvertedcounter\s!userpage\v!next}
\def\nextsubpagenumber {\directconvertedcounter\s!subpage \v!next}
-\def\dodeincrementpageboundcounters % only at the end
+\unexpanded\def\strc_pagenumbers_decrement_counters % only at the end
{\strc_counters_decrement\s!realpage
\strc_counters_decrement\s!userpage
\strc_counters_decrement\s!subpage}
-\def\doincrementpageboundcounters
+\unexpanded\def\strc_pagenumbers_increment_counters
{\incrementpagenumber
\incrementsubpagenumber}
\appendtoks
- \dodeincrementpageboundcounters
+ \strc_pagenumbers_decrement_counters
\to \everygoodbye
% Equivalents (compatibility):
@@ -238,61 +244,49 @@
% States:
-\newif\ifrightpage \rightpagetrue
\newif\ifdoublesided \newconditional\layoutisdoublesided
\newif\ifsinglesided \newconditional\layoutissinglesided
% Realpage and subpage numbers:
-\def\setnextrealpageno{\global\realpageno\strc_counters_incremented\s!realpage\relax}
-\def\setnextsubpageno {\global\subpageno \strc_counters_incremented\s!subpage \relax}
+\unexpanded\def\setnextrealpageno{\global\realpageno\strc_counters_incremented\s!realpage\relax}
+\unexpanded\def\setnextsubpageno {\global\subpageno \strc_counters_incremented\s!subpage \relax}
% Page numbers: (can move to lua) ... inconsistent names
-\def\dodecrementpagenumber {\global\userpageno\strc_counters_decremented\s!userpage\relax}
-\def\doincrementpagenumber {\global\userpageno\strc_counters_incremented\s!userpage\relax}
+\installcorenamespace{pagenumberinc}
+\installcorenamespace{pagenumberdec}
-\def\decrementsubpagenumber{\global\subpageno \strc_counters_decremented\s!subpage \relax}
-\def\incrementsubpagenumber{\global\subpageno \strc_counters_incremented\s!subpage \relax}
+\unexpanded\def\strc_pagenumbers_decrement_userpage{\global\userpageno\strc_counters_decremented\s!userpage\relax}
+\unexpanded\def\strc_pagenumbers_increment_userpage{\global\userpageno\strc_counters_incremented\s!userpage\relax}
-\def\dosynchronizepagenumber{\global\let\@@pnstate\v!start}
+\unexpanded\def\decrementsubpagenumber{\global\subpageno \strc_counters_decremented\s!subpage \relax}
+\unexpanded\def\incrementsubpagenumber{\global\subpageno \strc_counters_incremented\s!subpage \relax}
-\def\decrementpagenumber{\csname\??pn-\counterparameter\s!userpage\c!state\endcsname}
-\def\incrementpagenumber{\csname\??pn+\counterparameter\s!userpage\c!state\endcsname}
+\unexpanded\def\strc_pagenumbers_synchronize_userpage{\global\c_strc_pagenumbers_state_userpage\plustwo} % start and visible
-\letvalue{\??pn-\v!start}\dodecrementpagenumber
-\letvalue{\??pn-\v!none }\dodecrementpagenumber
-\letvalue{\??pn-\v!empty}\dodecrementpagenumber
+\unexpanded\def\decrementpagenumber{\csname\??pagenumberdec\namedcounterparameter\s!userpage\c!state\endcsname}
+\unexpanded\def\incrementpagenumber{\csname\??pagenumberinc\namedcounterparameter\s!userpage\c!state\endcsname}
-\letvalue{\??pn+\v!start}\doincrementpagenumber
-\letvalue{\??pn+\v!none }\doincrementpagenumber
-\setvalue{\??pn+\v!empty}{\doincrementpagenumber\dosynchronizepagenumber}
-\letvalue{\??pn+\v!keep }\dosynchronizepagenumber
+\letvalue{\??pagenumberdec\v!start}\strc_pagenumbers_decrement_userpage
+\letvalue{\??pagenumberdec\v!none }\strc_pagenumbers_decrement_userpage
+\letvalue{\??pagenumberdec\v!empty}\strc_pagenumbers_decrement_userpage
-% Control:
-
-\def\getpagestatus % hierboven gebruiken
- {\ifdoublesided
- \global\rightpagetrue
- % todo: \global\rightpagetrue or \global\rightpagefalse
- \else
- \global\rightpagetrue
- \fi}
+\letvalue{\??pagenumberinc\v!start}\strc_pagenumbers_increment_userpage
+\letvalue{\??pagenumberinc\v!none }\strc_pagenumbers_increment_userpage
+\setvalue{\??pagenumberinc\v!empty}{\strc_pagenumbers_increment_userpage\strc_pagenumbers_synchronize_userpage}
+\letvalue{\??pagenumberinc\v!keep }\strc_pagenumbers_synchronize_userpage
% Setup general page numbering
-\newtoks\everysetuppagenumbering
+\installcorenamespace{pagenumbering}
-\unexpanded\def\setuppagenumbering
- {\dosingleempty\dosetuppagenumbering}
-
-\def\dosetuppagenumbering[#1]%
- {\getparameters[\??nm][#1]\the\everysetuppagenumbering}
+\installdirectcommandhandler \??pagenumbering {pagenumbering}
\appendtoks
\singlesidedfalse \setfalse\layoutisdoublesided
\doublesidedfalse \setfalse\layoutissinglesided
- \normalexpanded{\noexpand\processallactionsinset[\@@nmalternative]}
+ \processallactionsinset[\directpagenumberingparameter\c!alternative]
[ \v!singlesided=>\singlesidedtrue\settrue\layoutissinglesided,
\v!doublesided=>\doublesidedtrue\settrue\layoutisdoublesided]%
\ifdefined\trackingmarginnotestrue
@@ -303,19 +297,24 @@
\fi
\fi
\page_backgrounds_recalculate
- \dosetpagenumberlocation
+ \strc_pagenumbers_set_location
\to \everysetuppagenumbering
\ifdefined \page_backgrounds_recalculate \else
\let\page_backgrounds_recalculate\relax
\fi
-\ifdefined \dosetpagenumberlocation \else
- \let\dosetpagenumberlocation\relax
+\ifdefined \strc_pagenumbers_set_location \else
+ \let\strc_pagenumbers_set_location\relax
\fi
-\def\flushfinallayoutpage
- {\doifsomething\@@nmpage{\doifnot\@@nmpage\v!no{\page[\@@nmpage]}}}
+\unexpanded\def\strc_pagenumbers_flush_final_page
+ {\edef\p_strc_pagenumbers_page{\directpagenumberingparameter\c!page}%
+ \ifx\p_strc_pagenumbers_page\empty \else
+ \ifx\p_strc_pagenumbers_page\v!no \else
+ \page[\p_strc_pagenumbers_page]
+ \fi
+ \fi}
% The numbered location handler is there because we need to be downward
% compatible. So, in fact there can be multiple handlers active at the
@@ -323,28 +322,32 @@
% Rendering:
-\unexpanded\def\placelocationpagenumber
- {\ifnum\userpagenumberstate=\plustwo
- \ifnum\overallpagenumberstate=\plusone
- \doif\@@nmstrut\v!yes\strut
+\unexpanded\def\strc_pagenumbers_place_location
+ {\ifnum\c_strc_pagenumbers_state_userpage=\plustwo
+ \ifnum\c_strc_pagenumbers_state=\plusone
+ \doif{\directpagenumberingparameter\c!strut}\v!yes\strut
\begingroup
- \dousestyleparameter\@@nmstyle
- \dousecolorparameter\@@nmcolor
- \@@nmcommand{\@@nmleft\labeltexts\v!pagenumber{\prefixedpagenumber}\@@nmright}%
+ \usepagenumberingstyleandcolor\c!style\c!color
+ \directpagenumberingparameter\c!command
+ {\directpagenumberingparameter\c!left
+ \labeltexts\v!pagenumber\prefixedpagenumber
+ \directpagenumberingparameter\c!right}%
\endgroup
\fi
\fi}
\unexpanded\def\completepagenumber
- {\ifnum\userpagenumberstate=\plustwo
- \ifnum\overallpagenumberstate=\plusone
- \@@nmleft\labeltexts\v!pagenumber\prefixedpagenumber\@@nmright
+ {\ifnum\c_strc_pagenumbers_state_userpage=\plustwo
+ \ifnum\c_strc_pagenumbers_state=\plusone
+ \directpagenumberingparameter\c!left
+ \labeltexts\v!pagenumber\prefixedpagenumber
+ \directpagenumberingparameter\c!right
\fi
\fi}
\unexpanded\def\placepagenumber
- {\ifnum\userpagenumberstate=\plustwo
- \ifnum\overallpagenumberstate=\plusone
+ {\ifnum\c_strc_pagenumbers_state_userpage=\plustwo
+ \ifnum\c_strc_pagenumbers_state=\plusone
\labeltexts\v!pagenumber\pagenumber
\fi
\fi}
@@ -356,35 +359,35 @@
% compatible. So, in fact there can be multiple handlers active at the
% same time, but only the current one does something.
-\setnewconstant\realpagenumberstate \plustwo % counter state : 0=stop, 1=start, 2=start and visible
-\setnewconstant\userpagenumberstate \plustwo % counter state : 0=stop, 1=start, 2=start and visible
-\setnewconstant\subpagenumberstate \plustwo % counter state : 0=stop, 1=start, 2=start and visible
-\setnewconstant\overallpagenumberstate\plusone % general number: 0=invisible, 1=visible
+\setnewconstant\c_strc_pagenumbers_state_realpage\plustwo % counter state : 0=stop, 1=start, 2=start and visible
+\setnewconstant\c_strc_pagenumbers_state_userpage\plustwo % counter state : 0=stop, 1=start, 2=start and visible
+\setnewconstant\c_strc_pagenumbers_state_subpage \plustwo % counter state : 0=stop, 1=start, 2=start and visible
+\setnewconstant\c_strc_pagenumbers_state \plusone % general number: 0=invisible, 1=visible
-\def\checkpagenumberstatechange#1#2%
- {\edef\newpagenumberstate{\counterparameter#1\c!state}%
- \ifx\newpagenumberstate\oldpagenumberstate \else
- \doifelse\newpagenumberstate\v!start
+\unexpanded\def\strc_pagenumbers_check_state_change#1#2%
+ {\edef\m_strc_pagenumbers_state_new{\namedcounterparameter#1\c!state}%
+ \ifx\m_strc_pagenumbers_state_new\m_strc_pagenumbers_state_old \else
+ \doifelse\m_strc_pagenumbers_state_new\v!start
{#2\plustwo}%
{#2\zerocount}%
\fi}
\appendtoks % todo: set state: none, start, stop, reset
- \checkpagenumberstatechange\s!realpage\realpagenumberstate
+ \strc_pagenumbers_check_state_change\s!realpage\c_strc_pagenumbers_state_realpage
\to \everysetuprealpagenumber
\appendtoks % todo: set state: none, start, stop, reset
- \checkpagenumberstatechange\s!userpage\userpagenumberstate
+ \strc_pagenumbers_check_state_change\s!userpage\c_strc_pagenumbers_state_userpage
\to \everysetupuserpagenumber
\appendtoks % todo: set state: none, start, stop, reset
- \checkpagenumberstatechange\s!subpage\subpagenumberstate
+ \strc_pagenumbers_check_state_change\s!subpage\c_strc_pagenumbers_state_subpage
\to \everysetupsubpagenumber
\appendtoks % todo: set state: none, start, stop, reset
- \doifelse\@@nmstate\v!start
- {\overallpagenumberstate\plusone }%
- {\overallpagenumberstate\zerocount}%
+ \doifelse{\directpagenumberingparameter\c!state}\v!start
+ {\c_strc_pagenumbers_state\plusone }%
+ {\c_strc_pagenumbers_state\zerocount}%
\to \everysetuppagenumbering
% Done
@@ -413,17 +416,17 @@
% just for downward compatbility
\appendtoks
- \edef\askeduserpagenumber{\counterparameter\s!userpage\c!number}%
+ \edef\askeduserpagenumber{\namedcounterparameter\s!userpage\c!number}%
\ifx\askeduserpagenumber\empty \else
- \normalexpanded{\setuppagenumber[\c!start=\counterparameter\s!userpage\c!number,\c!number=]}%
+ \normalexpanded{\setuppagenumber[\c!start=\askeduserpagenumber,\c!number=]}%
\userpageno\strc_counters_raw\s!userpage
\fi
\to \everysetupuserpagenumber % todo: set state: none, start, stop, reset
\appendtoks
- \edef\askedsubpagenumber{\counterparameter\s!subpage\c!number}%
+ \edef\askedsubpagenumber{\namedcounterparameter\s!subpage\c!number}%
\ifx\askedsubpagenumber\empty \else
- \normalexpanded{\setupsubpagenumber[\c!start=\counterparameter\s!subpage\c!number,\c!number=]}%
+ \normalexpanded{\setupsubpagenumber[\c!start=\askedsubpagenumber,\c!number=]}%
\subpageno\strc_counters_raw\s!subpage\relax
\fi
\to \everysetupsubpagenumber % todo: set state: none, start, stop, reset
@@ -433,7 +436,7 @@
% \setupuserpagenumber[start=2]
% \starttext \dorecurse{20}{\input knuth \par} \stoptext
-\def\checkpagenumbershift
+\unexpanded\def\strc_pagenumbers_check_change_shift
{\userpageno\strc_counters_raw\s!userpage\relax
\ifnum\realpageno=\plusone
\ifodd\userpageno
@@ -444,12 +447,12 @@
\appendtoks % todo: set state: none, start, stop, reset
% this makes starting at an even page possible
- \checkpagenumbershift
+ \strc_pagenumbers_check_change_shift
\to \everysetupuserpagenumber
\appendtoks % todo: set state: none, start, stop, reset
% this makes starting at an even page possible
- \checkpagenumbershift
+ \strc_pagenumbers_check_change_shift
\to \everysetuppagenumbering
\initializepagecounters
diff --git a/Master/texmf-dist/tex/context/base/strc-ref.lua b/Master/texmf-dist/tex/context/base/strc-ref.lua
index 092babfaed6..284418c484c 100644
--- a/Master/texmf-dist/tex/context/base/strc-ref.lua
+++ b/Master/texmf-dist/tex/context/base/strc-ref.lua
@@ -19,6 +19,7 @@ local texcount, texsetcount = tex.count, tex.setcount
local rawget, tonumber = rawget, tonumber
local lpegmatch = lpeg.match
local copytable = table.copy
+local formatters = string.formatters
local allocate = utilities.storage.allocate
local mark = utilities.storage.mark
@@ -28,16 +29,30 @@ local trace_referencing = false trackers.register("structures.referencing",
local trace_analyzing = false trackers.register("structures.referencing.analyzing", function(v) trace_analyzing = v end)
local trace_identifying = false trackers.register("structures.referencing.identifying", function(v) trace_identifying = v end)
local trace_importing = false trackers.register("structures.referencing.importing", function(v) trace_importing = v end)
+local trace_empty = false trackers.register("structures.referencing.empty", function(v) trace_empty = v end)
+
+local check_duplicates = true
+
+directives.register("structures.referencing.checkduplicates", function(v)
+ check_duplicates = v
+end)
local report_references = logs.reporter("references")
-local report_unknown = logs.reporter("unknown")
+local report_unknown = logs.reporter("references","unknown")
local report_identifying = logs.reporter("references","identifying")
local report_importing = logs.reporter("references","importing")
+local report_empty = logs.reporter("references","empty")
local variables = interfaces.variables
local constants = interfaces.constants
local context = context
+local v_default = variables.default
+local v_url = variables.url
+local v_file = variables.file
+local v_unknown = variables.unknown
+local v_yes = variables.yes
+
local texcount = tex.count
local texconditionals = tex.conditionals
@@ -237,11 +252,13 @@ references.setnextorder = setnextorder
function references.setnextinternal(kind,name)
setnextorder(kind,name) -- always incremented with internal
- texsetcount("global","locationcount",texcount.locationcount + 1)
+ local n = texcount.locationcount + 1
+ texsetcount("global","locationcount",n)
+ return n
end
function references.currentorder(kind,name)
- context(orders[kind] and orders[kind][name] or lastorder)
+ return orders[kind] and orders[kind][name] or lastorder
end
local function setcomponent(data)
@@ -257,6 +274,12 @@ local function setcomponent(data)
-- but for the moment we do it here (experiment)
end
+commands.setnextinternalreference = references.setnextinternal
+
+function commands.currentreferenceorder(kind,name)
+ context(references.currentorder(kind,name))
+end
+
references.setcomponent = setcomponent
function references.set(kind,prefix,tag,data)
@@ -269,11 +292,11 @@ function references.set(kind,prefix,tag,data)
local n = 0
for ref in gmatch(tag,"[^,]+") do
if ref ~= "" then
- if pd[ref] then
+ if check_duplicates and pd[ref] then
if prefix and prefix ~= "" then
- report_references("redundant reference: %q in namespace %q",ref,prefix)
+ report_references("redundant reference %a in namespace %a",ref,prefix)
else
- report_references("redundant reference %q",ref)
+ report_references("redundant reference %a",ref)
end
else
n = n + 1
@@ -292,6 +315,8 @@ function references.enhance(prefix,tag)
end
end
+commands.enhancereference = references.enhance
+
-- -- -- related to strc-ini.lua -- -- --
references.resolvers = references.resolvers or { }
@@ -372,7 +397,7 @@ local function register_from_lists(collected,derived,pages,sections)
local t = { kind, i, entry }
for s in gmatch(reference,"%s*([^,]+)") do
if trace_referencing then
- report_references("list entry %s provides %s reference '%s' on realpage %s",i,kind,s,realpage)
+ report_references("list entry %a provides %a reference %a on realpage %a",i,kind,s,realpage)
end
c[s] = c[s] or t -- share them
d[s] = d[s] or t -- share them
@@ -400,14 +425,16 @@ function references.urls.define(name,url,file,description)
end
end
-local pushcatcodes, popcatcodes, txtcatcodes = context.pushcatcodes, context.popcatcodes, tex.txtcatcodes
+local pushcatcodes = context.pushcatcodes
+local popcatcodes = context.popcatcodes
+local txtcatcodes = catcodes.numbers.txtcatcodes -- or just use "txtcatcodes"
function references.urls.get(name)
local u = urls[name]
if u then
local url, file = u[1], u[2]
if file and file ~= "" then
- return format("%s/%s",url,file)
+ return formatters["%s/%s"](url,file)
else
return url
end
@@ -526,9 +553,7 @@ end
function references.programs.get(name)
local f = programs[name]
- if f then
- context(f[1])
- end
+ return f and f[1]
end
function references.checkedprogram(whatever) -- return whatever if not resolved
@@ -542,10 +567,19 @@ function references.checkedprogram(whatever) -- return whatever if not resolved
end
end
+commands.defineprogram = references.programs.define
+
+function commands.getprogram(name)
+ local f = programs[name]
+ if f then
+ context(f[1])
+ end
+end
+
-- shared by urls and files
function references.whatfrom(name)
- context((urls[name] and variables.url) or (files[name] and variables.file) or variables.unknown)
+ context((urls[name] and v_url) or (files[name] and v_file) or v_unknown)
end
function references.from(name)
@@ -553,6 +587,31 @@ function references.from(name)
if u then
local url, file, description = u[1], u[2], u[3]
if description ~= "" then
+ return description
+ -- ok
+ elseif file and file ~= "" then
+ return url .. "/" .. file
+ else
+ return url
+ end
+ else
+ local f = files[name]
+ if f then
+ local file, description = f[1], f[2]
+ if description ~= "" then
+ return description
+ else
+ return file
+ end
+ end
+ end
+end
+
+function commands.from(name)
+ local u = urls[name]
+ if u then
+ local url, file, description = u[1], u[2], u[3]
+ if description ~= "" then
context.dofromurldescription(description)
-- ok
elseif file and file ~= "" then
@@ -578,10 +637,6 @@ function references.define(prefix,reference,list)
d[reference] = { "defined", list }
end
---~ function references.registerspecial(name,action,...)
---~ specials[name] = { action, ... }
---~ end
-
function references.reset(prefix,reference)
local d = defined[prefix]
if d then
@@ -589,6 +644,9 @@ function references.reset(prefix,reference)
end
end
+commands.definereference = references.define
+commands.resetreference = references.reset
+
-- \primaryreferencefoundaction
-- \secondaryreferencefoundaction
-- \referenceunknownaction
@@ -657,7 +715,7 @@ local function resolve(prefix,reference,args,set) -- we start with prefix,refere
set.has_tex = true
end
else
- -- report_references("funny pattern: %s",ri or "?")
+ -- report_references("funny pattern %a",ri)
end
end
end
@@ -699,6 +757,8 @@ function references.expandcurrent() -- todo: two booleans: o_has_tex& a_has_tex
end
end
+commands.expandcurrentreference = references.expandcurrent -- for the moment the same
+
local externals = { }
-- we have prefixes but also components:
@@ -717,7 +777,8 @@ local function loadexternalreferences(name,utilitydata)
for prefix, set in next, external do
for reference, data in next, set do
if trace_importing then
- report_importing("registering external reference: regular | %s | %s | %s",name,prefix,reference)
+ report_importing("registering %a reference, kind %a, name %a, prefix %a, reference %a",
+ "external","regular",name,prefix,reference)
end
local section = reference.section
local realpage = reference.realpage
@@ -748,7 +809,8 @@ local function loadexternalreferences(name,utilitydata)
end
for s in gmatch(reference,"%s*([^,]+)") do
if trace_importing then
- report_importing("registering external reference: %s | %s | %s | %s",kind,name,prefix,s)
+ report_importing("registering %s reference, kind %a, name %a, prefix %a, reference %a",
+ "external",kind,name,prefix,s)
end
target[s] = target[s] or entry
end
@@ -807,7 +869,8 @@ local function loadproductreferences(productname,componentname,utilitydata)
for prefix, set in next, productreferences do
for reference, data in next, set do
if trace_importing then
- report_importing("registering product reference: regular | %s | %s | %s",productname,prefix,reference)
+ report_importing("registering %s reference, kind %a, name %a, prefix %a, reference %a",
+ "product","regular",productname,prefix,reference)
end
local section = reference.section
local realpage = reference.realpage
@@ -835,7 +898,7 @@ local function loadproductreferences(productname,componentname,utilitydata)
local prefix = references.referenceprefix or ""
local component = references.component
local ctarget, ptarget
- if component and component == componentname then
+ if not component or component == componentname then
-- skip
else
-- one level up
@@ -861,13 +924,15 @@ local function loadproductreferences(productname,componentname,utilitydata)
for s in gmatch(reference,"%s*([^,]+)") do
if ptarget then
if trace_importing then
- report_importing("registering product reference: %s | %s | %s | %s",kind,productname,prefix,s)
+ report_importing("registering %s reference, kind %a, name %a, prefix %a, reference %a",
+ "product",kind,productname,prefix,s)
end
ptarget[s] = ptarget[s] or entry
end
if ctarget then
if trace_importing then
- report_importing("registering component reference: %s | %s | %s | %s",kind,productname,prefix,s)
+ report_importing("registering %s reference, kind %a, name %a, prefix %a, referenc %a",
+ "component",kind,productname,prefix,s)
end
ctarget[s] = ctarget[s] or entry
end
@@ -895,7 +960,7 @@ local function loadproductvariables(product,component,utilitydata)
local numbers = firstsection.numberdata.numbers
if numbers then
if trace_importing then
- report_importing("initializing section number to %s",concat(numbers,":"))
+ report_importing("initializing section number to %:t",numbers)
end
productdata.firstsection = firstsection
structures.documents.preset(numbers)
@@ -906,7 +971,7 @@ local function loadproductvariables(product,component,utilitydata)
local number = firstpage and firstpage.number
if number then
if trace_importing then
- report_importing("initializing page number to %s",number)
+ report_importing("initializing page number to %a",number)
end
productdata.firstpage = firstpage
counters.set("userpage",1,number)
@@ -958,7 +1023,7 @@ function structures.references.loadpresets(product,component) -- we can consider
local utilitydata = job.loadother(fullname)
if utilitydata then
if trace_importing then
- report_importing("loading references for component %s of product %s from %s",component,product,fullname)
+ report_importing("loading references for component %a of product %a from %a",component,product,fullname)
end
loadproductvariables (product,component,utilitydata)
loadproductreferences(product,component,utilitydata)
@@ -981,7 +1046,7 @@ if useproduct then
local component = justacomponent()
if component then
if trace_referencing or trace_importing then
- report_references("loading presets for component '%s' of product '%s'",component,product)
+ report_references("loading presets for component %a of product %a",component,product)
end
structures.references.loadpresets(product,component)
end
@@ -1000,9 +1065,9 @@ local function report_identify_special(set,var,i,type)
local error = var.error
local kind = var.kind
if error then
- report_identifying("type %s: %s, n: %s, prefix: %s, special: %s, error: %s",type,reference,i,prefix,special,error)
+ report_identifying("type %a, reference %a, index %a, prefix %a, special %a, error %a",type,reference,i,prefix,special,error)
else
- report_identifying("type %s: %s, n: %s, prefix: %s, special: %s, kind: %s",type,reference,i,prefix,special,kind)
+ report_identifying("type %a, reference %a, index %a, prefix %a, special %a, kind %a",type,reference,i,prefix,special,kind)
end
end
@@ -1013,9 +1078,9 @@ local function report_identify_arguments(set,var,i,type)
local error = var.error
local kind = var.kind
if error then
- report_identifying("type %s: %s, n: %s, prefix: %s, arguments: %s, error: %s",type,reference,i,prefix,arguments,error)
+ report_identifying("type %a, reference %a, index %a, prefix %a, arguments %a, error %a",type,reference,i,prefix,arguments,error)
else
- report_identifying("type %s: %s, n: %s, prefix: %s, arguments: %s, kind: %s",type,reference,i,prefix,arguments,kind)
+ report_identifying("type %a, reference %a, index %a, prefix %a, arguments %a, kind %a",type,reference,i,prefix,arguments,kind)
end
end
@@ -1027,15 +1092,15 @@ local function report_identify_outer(set,var,i,type)
local kind = var.kind
if outer then
if error then
- report_identifying("type %s: %s, n: %s, prefix: %s, outer: %s, error: %s",type,reference,i,prefix,outer,error)
+ report_identifying("type %a, reference %a, index %a, prefix %a, outer %a, error %a",type,reference,i,prefix,outer,error)
else
- report_identifying("type %s: %s, n: %s, prefix: %s, outer: %s, kind: %s",type,reference,i,prefix,outer,kind)
+ report_identifying("type %a, reference %a, index %a, prefix %a, outer %a, kind %a",type,reference,i,prefix,outer,kind)
end
else
if error then
- report_identifying("type %s: %s, n: %s, prefix: %s, error: %s",type,reference,i,prefix,error)
+ report_identifying("type %a, reference %a, index %a, prefix %a, error %a",type,reference,i,prefix,error)
else
- report_identifying("type %s: %s, n: %s, prefix: %s, kind: %s",type,reference,i,prefix,kind)
+ report_identifying("type %a, reference %a, index %a, prefix %a, kind %a",type,reference,i,prefix,kind)
end
end
end
@@ -1179,7 +1244,7 @@ local function identify_inner(set,var,prefix,collected,derived,tobesaved)
ri(var)
else
-- can't happen as we catch it with a metatable now
- report_references("unknown inner resolver for '%s'",i[1])
+ report_references("unknown inner resolver for %a",i[1])
end
else
-- no prefixes here
@@ -1512,14 +1577,14 @@ end
references.identify = identify
-local unknowns, nofunknowns = { }, 0
+local unknowns, nofunknowns, f_valid = { }, 0, formatters["[%s][%s]"]
-function references.doifelse(prefix,reference,highlight,newwindow,layer)
+function references.valid(prefix,reference,highlight,newwindow,layer)
local set, bug = identify(prefix,reference)
local unknown = bug or #set == 0
if unknown then
currentreference = nil -- will go away
- local str = format("[%s][%s]",prefix,reference)
+ local str = f_valid(prefix,reference)
local u = unknowns[str]
if not u then
interfaces.showmessage("references",1,str) -- 1 = unknown, 4 = illegal
@@ -1529,11 +1594,15 @@ function references.doifelse(prefix,reference,highlight,newwindow,layer)
unknowns[str] = u + 1
end
else
- set.highlight, set.newwindow,set.layer = highlight, newwindow, layer
+ set.highlight, set.newwindow, set.layer = highlight, newwindow, layer
currentreference = set[1]
end
-- we can do the expansion here which saves a call
- commands.doifelse(not unknown)
+ return not unknown
+end
+
+function commands.doifelsereference(prefix,reference,highlight,newwindow,layer)
+ commands.doifelse(references.valid(prefix,reference,highlight,newwindow,layer))
end
function references.reportproblems() -- might become local
@@ -1563,12 +1632,12 @@ function references.setinnermethod(m)
if m then
if m == "page" or m == "mixed" or m == "names" then
innermethod = m
- elseif m == true or m == variables.yes then
+ elseif m == true or m == v_yes then
innermethod = "page"
end
end
function references.setinnermethod()
- report_references("inner method is already set and frozen to '%s'",innermethod)
+ report_references("inner method is already set and frozen to %a",innermethod)
end
end
@@ -1612,13 +1681,13 @@ function references.setinternalreference(prefix,tag,internal,view) -- needs chec
end
function references.setandgetattribute(kind,prefix,tag,data,view) -- maybe do internal automatically here
- if references.set(kind,prefix,tag,data) then
- texcount.lastdestinationattribute = references.setinternalreference(prefix,tag,nil,view) or unsetvalue
- else
- texcount.lastdestinationattribute = unsetvalue
- end
+ local attr = references.set(kind,prefix,tag,data) and references.setinternalreference(prefix,tag,nil,view) or unsetvalue
+ texcount.lastdestinationattribute = attr
+ return attr
end
+commands.setreferenceattribute = references.setandgetattribute
+
function references.getinternalreference(n) -- n points into list (todo: registers)
local l = lists.collected[n]
return l and l.references.internal or n
@@ -1637,7 +1706,11 @@ end
function references.getcurrentmetadata(tag)
local data = currentreference and currentreference.i
- data = data and data.metadata and data.metadata[tag]
+ return data and data.metadata and data.metadata[tag]
+end
+
+function commands.getcurrentreferencemetadata(tag)
+ local data = references.getcurrentmetadata(tag)
if data then
context(data)
end
@@ -1650,8 +1723,15 @@ end
references.currentmetadata = currentmetadata
-function references.getcurrentprefixspec(default) -- todo: message
- context.getreferencestructureprefix(currentmetadata("kind") or "?",currentmetadata("name") or "?",default or "?")
+local function getcurrentprefixspec(default)
+ -- todo: message
+ return currentmetadata("kind") or "?", currentmetadata("name") or "?", default or "?"
+end
+
+references.getcurrentprefixspec = getcurrentprefixspec
+
+function commands.getcurrentprefixspec(default)
+ context.getreferencestructureprefix(getcurrentprefixspec(default))
end
function references.filter(name,...) -- number page title ...
@@ -1659,7 +1739,7 @@ function references.filter(name,...) -- number page title ...
if data then
if name == "realpage" then
local cs = references.analyze() -- normally already analyzed but also sets state
- context(cs.realpage or 0) -- todo, return and in command namespace
+ context(tonumber(cs.realpage) or 0) -- todo, return and in command namespace
else -- assumes data is table
local kind = type(data) == "table" and data.metadata and data.metadata.kind
if kind then
@@ -1667,21 +1747,32 @@ function references.filter(name,...) -- number page title ...
filter = filter and (filter[name] or filter.unknown or filters.generic[name] or filters.generic.unknown)
if filter then
if trace_referencing then
- report_references("name '%s', kind '%s', using dedicated filter",name,kind)
+ report_references("name %a, kind %a, using dedicated filter",name,kind)
end
filter(data,name,...)
elseif trace_referencing then
- report_references("name '%s', kind '%s', using generic filter",name,kind)
+ report_references("name %a, kind %a, using generic filter",name,kind)
end
elseif trace_referencing then
- report_references("name '%s', unknown kind",name)
+ report_references("name %a, unknown kind",name)
end
end
+ elseif name == "realpage" then
+ context(0)
elseif trace_referencing then
- report_references("name '%s', no reference",name)
+ report_references("name %a, no reference",name)
end
end
+function references.filterdefault()
+ return references.filter("default",getcurrentprefixspec(v_default))
+end
+
+function commands.currentreferencedefault(tag)
+ if not tag then tag = "default" end
+ references.filter(tag,context.delayed(getcurrentprefixspec(tag)))
+end
+
filters.generic = { }
function filters.generic.title(data)
@@ -1704,7 +1795,7 @@ end
function filters.generic.number(data,what,prefixspec) -- todo: spec and then no stopper
if data then
- local numberdata = data.numberdata
+ numberdata = lists.reordered(data) -- data.numberdata
if numberdata then
helpers.prefix(data,prefixspec)
sections.typesetnumber(numberdata,"number",numberdata)
@@ -1782,13 +1873,19 @@ filters.section = { }
function filters.section.number(data,what,prefixspec)
if data then
local numberdata = data.numberdata
- if numberdata then
- sections.typesetnumber(numberdata,"number",prefixspec,numberdata)
- else
+ if not numberdata then
local useddata = data.useddata
if useddata and useddata.number then
context(useddata.number)
end
+ elseif numberdata.hidenumber then
+ local references = data.references
+ if trace_empty then
+ report_empty("reference %a has a hidden number",references.reference)
+ context.emptyreference() -- maybe an option
+ end
+ else
+ sections.typesetnumber(numberdata,"number",prefixspec,numberdata)
end
end
end
@@ -2049,10 +2146,13 @@ end
-- needs a better split ^^^
-commands.filterreference = references.filter
+commands.filterreference = references.filter
+commands.filterdefaultreference = references.filterdefault
-- done differently now:
function references.export(usedname) end
function references.import(usedname) end
function references.load (usedname) end
+
+commands.exportreferences = references.export
diff --git a/Master/texmf-dist/tex/context/base/strc-ref.mkvi b/Master/texmf-dist/tex/context/base/strc-ref.mkvi
index ee45d76fafe..54f180d8e01 100644
--- a/Master/texmf-dist/tex/context/base/strc-ref.mkvi
+++ b/Master/texmf-dist/tex/context/base/strc-ref.mkvi
@@ -61,20 +61,42 @@
% \definespecial\dosetexecuteJScode
% ...
-%D This module deals with referencing. In \CONTEXT\ referencing
-%D is one of the core features, although at a first glance
-%D probably nobody will notice. This is good, because
-%D referencing should be as hidden as possible.
+%D This module deals with referencing. In \CONTEXT\ referencing is one of
+%D the core features, although at a first glance probably nobody will
+%D notice. This is good, because referencing should be as hidden as possible.
%D
-%D In paper documents, referencing comes down to cross
-%D referencing, but in their interactive counterparts, is also
-%D involves navigation. Many features implemented here are
-%D therefore closely related to navigation.
+%D Before we start implementing functionality we provide a way to set
+%D up this mechanism.
%D
-%D Many \CONTEXT\ commands can optionally be fed with a
-%D reference. Such a reference, when called upon, returns the
-%D number of a figure, table, chapter etc, a piece of text, or
-%D a pagenumber.
+%D \showsetup{setupreferencing}
+%D
+%D In interactive documents verbose references don't always
+%D make sense (what is a page number in an unnumbered
+%D document). By setting the \type{interaction} variable, one
+%D can influences the way interactive references are set.
+
+\installcorenamespace{referencing}
+
+\installdirectcommandhandler \??referencing {referencing} % \??referencing
+
+\newif\ifreferencing \referencingtrue
+
+\appendtoks
+ \edef\p_state{\referencingparameter\c!state}%
+ \ifx\p_state\v!start
+ \referencingtrue
+ \else
+ \referencingfalse
+ \fi
+\to \everysetupreferencing
+
+%D In paper documents, referencing comes down to cross referencing, but in
+%D their interactive counterparts, is also involves navigation. Many features
+%D implemented here are therefore closely related to navigation.
+%D
+%D Many \CONTEXT\ commands can optionally be fed with a reference. Such a
+%D reference, when called upon, returns the number of a figure, table, chapter
+%D etc, a piece of text, or a pagenumber.
%D
%D There are three ways of defining a reference:
%D
@@ -89,17 +111,25 @@
%D \reference[here]{some text}
%D \stoptyping
-\unexpanded\def\textreference {\dosingleargument\strc_references_text_reference}
-\unexpanded\def\pagereference {\dosingleargument\strc_references_page_reference}
-\unexpanded\def\reference {\dosingleargument\strc_references_full_reference}
-\unexpanded\def\setreference {\dodoubleargument\strc_references_set_reference }
+\unexpanded\def\textreference {\dosingleargument\strc_references_text_reference} % no need for \dosingle
+\unexpanded\def\pagereference {\dosingleargument\strc_references_page_reference} % as they're mandate and
+\unexpanded\def\reference {\dosingleargument\strc_references_full_reference} % never forgotten
+\unexpanded\def\setreference {\dodoubleargument\strc_references_set_reference } %
%D These are implemented in a low level form as:
-\def\strc_references_text_reference [#labels]{\strc_references_set_named_reference\s!text{#labels}{}}
-\def\strc_references_page_reference [#labels]{\strc_references_set_named_reference\s!page{#labels}{}{}}
-\def\strc_references_full_reference [#labels]{\strc_references_set_named_reference\s!full{#labels}{}}
-\def\strc_references_set_reference[#labels][#settings]{\strc_references_set_named_reference\s!user{#labels}{#settings}{}}
+\unexpanded\def\strc_references_text_reference [#labels]{\strc_references_set_named_reference\s!text{#labels}{}}
+\unexpanded\def\strc_references_page_reference [#labels]{\strc_references_set_named_reference\s!page{#labels}{}{}}
+\unexpanded\def\strc_references_full_reference [#labels]{\strc_references_set_named_reference\s!full{#labels}{}}
+\unexpanded\def\strc_references_set_reference[#labels][#settings]{\strc_references_set_named_reference\s!user{#labels}{#settings}{}}
+
+\unexpanded\def\dosetdirectpagereference#1{\strc_references_set_named_reference\s!page{#1}{}{}} % low level, maybe use _
+
+\unexpanded\def\usereferenceparameter#1% faster local variant
+ {\edef\m_strc_references_asked{#1\c!reference}%
+ \ifx\m_strc_references_asked\empty\else
+ \dosetdirectpagereference\m_strc_references_asked
+ \fi}
%D Actually there is not much difference between a text and a
%D full reference, but it's the concept that counts. The low
@@ -108,15 +138,15 @@
\newcount\lastreferenceattribute
\newcount\lastdestinationattribute
-\def\dofinishfullreference#prefix#label{\normalexpanded{\ctxlatelua{structures.references.enhance("#prefix","#label")}}}
-\def\dofinishtextreference#prefix#label{\normalexpanded{\ctxlatelua{structures.references.enhance("#prefix","#label",{})}}}
+\def\dofinishfullreference#prefix#label{\normalexpanded{\ctxlatecommand{enhancereference("#prefix","#label")}}}
+\def\dofinishtextreference#prefix#label{\normalexpanded{\ctxlatecommand{enhancereference("#prefix","#label",{})}}}
\let\dofinishpagereference\dofinishfullreference
\let\dofinishuserreference\dofinishfullreference
\def\dofinishsomereference#kind{\executeifdefined{dofinish#{kind}reference}\gobbletwoarguments}
-\def\strc_references_set_named_reference
+\unexpanded\def\strc_references_set_named_reference
{\ifreferencing
\expandafter\strc_references_set_named_reference_indeed
\else
@@ -143,7 +173,7 @@
\edef\currentreferencekind {#kind}%
\edef\currentreferencelabels {#labels}%
\edef\currentreferenceuserdata {#userdata}%
- \edef\currentreferenceexpansion{\@@rfexpansion}% {\referenceparameter\c!expansion}
+ \edef\currentreferenceexpansion{\referencingparameter\c!expansion}% {\referenceparameter\c!expansion}
\ifx\currentreferencelabels\empty
\lastdestinationattribute\attributeunsetvalue
\else
@@ -163,7 +193,7 @@
% beware, the structures.references.set writes a
% \setnextinternalreference
\strc_references_start_destination_nodes
- \ctxlua{structures.references.setandgetattribute("\currentreferencekind", "\referenceprefix","\currentreferencelabels",
+ \ctxcommand{setreferenceattribute("\currentreferencekind", "\referenceprefix","\currentreferencelabels",
{
references = {
% internal = \nextinternalreference, % no need for an internal as we have an explicit
@@ -210,7 +240,7 @@
\lastdestinationattribute\attributeunsetvalue
\else
\strc_references_start_destination_nodes
- \ctxlua{structures.references.setandgetattribute("\s!page", "\referenceprefix","\currentreferencelabels",
+ \ctxcommand{setreferenceattribute("\s!page", "\referenceprefix","\currentreferencelabels",
{
references = {
block = "\currentsectionblock",
@@ -248,7 +278,7 @@
\def\strc_references_set_simple_page_reference#label%
{\iflocation
\strc_references_start_destination_nodes
- \ctxlua{structures.references.setandgetattribute("\s!page", "\referenceprefix","#label",
+ \ctxcommand{setreferenceattribute("\s!page", "\referenceprefix","#label",
{
references = {
% block = "\currentsectionblock",
@@ -268,13 +298,74 @@
\def\strc_references_get_simple_page_reference#label%
{\iflocation
- \ctxlua{structures.references.inject("\referenceprefix","#label",\number\ht\strutbox,\number\dp\strutbox,\extrareferencearguments)}%
+ \ctxcommand{injectreference("\referenceprefix","#label",\number\ht\strutbox,\number\dp\strutbox,\extrareferencearguments)}%
\xdef\currentreferenceattribute{\number\lastreferenceattribute}%
\else
\xdef\currentreferenceattribute{\number\attributeunsetvalue}%
\fi}
%D \macros
+%D {contentreference}
+%D
+%D \starttyping
+%D \setupinteraction
+%D [state=start,
+%D focus=standard]
+%D
+%D \setupheader
+%D [state=stop]
+%D
+%D See page \goto{page 2}[page2] \page
+%D
+%D \contentreference
+%D [page2]
+%D [offset=1cm,toffset=2cm,frame=on]
+%D {\externalfigure[cow.pdf][factor=fit]}
+%D
+%D \stoptyping
+
+\unexpanded\def\contentreference
+ {\hbox\bgroup
+ \dodoubleempty\strc_references_content_pickup}
+
+\def\strc_references_content_pickup
+ {\ifsecondargument
+ \expandafter\strc_references_content_pickup_yes
+ \else
+ \expandafter\strc_references_content_pickup_yes
+ \fi}
+
+\def\strc_references_content_pickup_yes[#1][#2]%
+ {\dowithnextbox{\strc_references_content_yes_finish{#1}{#2}}\hbox}
+
+\def\strc_references_content_pickup_nop[#1][#2]%
+ {\dowithnextbox{\strc_references_content_nop_finish{#1}{#2}}\hbox}
+
+\def\strc_references_content_yes_finish#1#2%
+ {\scratchwidth \wd\nextbox
+ \scratchheight\ht\nextbox
+ \scratchdepth \dp\nextbox
+ \setbox\nextbox\hbox
+ {\framed[\c!frame=\v!off,#2]{\box\nextbox}}%
+ \strc_references_set_simple_page_reference{#1}%
+ \setbox\nextbox\hbox attr \destinationattribute \currentdestinationattribute
+ {\strc_references_flush_destination_nodes
+ \box\nextbox}%
+ \setbox\nextbox\hbox{\box\nextbox}%
+ \wd\nextbox\scratchwidth
+ \ht\nextbox\scratchheight
+ \dp\nextbox\scratchdepth
+ \box\nextbox
+ \egroup}
+
+\def\strc_references_content_nop_finish#1#2%
+ {\strc_references_set_simple_page_reference{#1}%
+ \hbox attr \destinationattribute \currentdestinationattribute
+ {\strc_references_flush_destination_nodes
+ \box\nextbox}%
+ \egroup}
+
+%D \macros
%D {everyreference}
%D
%D For rather tricky purposes, one can assign sanitizing
@@ -437,13 +528,13 @@
{\dodoubleempty\strc_references_define_reference}
\def\strc_references_define_reference[#name][#specification]%
- {\ctxlua{structures.references.define("\referenceprefix","#name",\!!bs\detokenize{#specification}\!!es)}}
+ {\ctxcommand{definereference("\referenceprefix","#name",\!!bs\detokenize{#specification}\!!es)}}
\unexpanded\def\resetreference[#name]%
- {\ctxlua{structures.references.reset("\referenceprefix","#name")}}
+ {\ctxcommand{resetreference("\referenceprefix","#name")}}
\def\setpagereference#name#specification% hm,. low level ?
- {\ctxlua{structures.references.define("","#name",\!!bs\v!page(\luaescapestring{#specification})\!!es)}}
+ {\ctxcommand{definereference("","#name",\!!bs\v!page(\luaescapestring{#specification})\!!es)}}
%D Chained references are defined as:
%D
@@ -473,13 +564,13 @@
\newconditional\gotonewwindow \setfalse\gotonewwindow
\def\expandtexincurrentreference % will happen in lua some time
- {\ifcase\referencehastexstate\else\ctxlua{structures.references.expandcurrent()}\fi}
+ {\ifcase\referencehastexstate\else\ctxcommand{expandcurrentreference()}\fi}
\def\expandreferenceoperation#tag#content{\ctxcommand{setreferenceoperation(#tag,\!!bs#content\!!es)}}
\def\expandreferencearguments#tag#content{\ctxcommand{setreferencearguments(#tag,\!!bs#content\!!es)}}
\def\doifreferencefoundelse#labels#yes#nop%
- {\ctxlua{structures.references.doifelse("\referenceprefix","#labels",\luaconditional\highlighthyperlinks,\luaconditional\gotonewwindow)}%
+ {\ctxcommand{doifelsereference("\referenceprefix",\!!bs#labels\!!es,\luaconditional\highlighthyperlinks,\luaconditional\gotonewwindow)}%
{\expandtexincurrentreference
#yes}%
{#nop}}
@@ -520,6 +611,7 @@
%D text).
\def\dummyreference{{\tttf ??}}
+\def\emptyreference{{\tttf !!}}
%D To prevent repetitive messages concerning a reference
%D being defined, we set such an unknown reference to an empty
@@ -562,8 +654,8 @@
%D the special driver modules (see \type{spec-ini}). The flag
%D \type{\iflocation} signals if we're in interactive mode.
-\ifx\buttonheight\undefined \newdimen\buttonheight \fi
-\ifx\buttonwidth \undefined \newdimen\buttonwidth \fi
+\ifdefined\buttonheight \else \newdimen\buttonheight \fi
+\ifdefined\buttonwidth \else \newdimen\buttonwidth \fi
%D Internal references can best be set using the next few
%D macros. Setting such references to unique values is
@@ -588,17 +680,24 @@
{\global\advance\locationcount\plusone}
\def\setnextinternalreferences#kind#name% plural
- {\ctxlua{structures.references.setnextinternal("#kind","#name")}}
+ {\ctxcommand{setnextinternalreference("#kind","#name")}}
\def\getinternalorderreference#kind#name%
- {\ctxlua{structures.references.currentorder("#kind","#name")}}
+ {\ctxcommand{currentreferenceorder("#kind","#name")}}
\def\thisissomeinternal#kind#name% only for old time sake
{\begingroup
- \ctxlua{structures.references.setinternalreference("","#kind:#name")}%
+ \ctxcommand{setinternalreference("","#kind:#name")}%
\hbox attr \destinationattribute\lastdestinationattribute{}%
\endgroup}
+\installcorenamespace{savedinternalreference}
+
+\letvalue{\??savedinternalreference\s!default}\!!zerocount
+
+\unexpanded\def\storeinternalreference#1#2%
+ {\setxvalue{\??savedinternalreference\currentstructurename}{#2}}
+
\newconditional\preferpagereferences
\def\gotosomeinternal#kind#name#target#text%
@@ -630,8 +729,6 @@
%D \goto{some text}[\v!action(PreviousJump]
%D \stoptyping
-\newif\ifreferencing \referencingtrue
-
%D One can also activate an automatic prefix mechanism. By
%D setting the \type{\prefix} variable to \type{+}, the prefix
%D is incremented, when set to \type{-} or empty, the prefix is
@@ -639,64 +736,24 @@
\newcount\prefixcounter
-%D These settings are accomplished by:
-%D
-%D \showsetup{setupreferencing}
-%D
-%D In interactive documents verbose references don't always
-%D make sense (what is a page number in an unnumbered
-%D document). By setting the \type{interaction} variable, one
-%D can influences the way interactive references are set.
-
-\newtoks \everysetupreferencing
-
-\unexpanded\def\setupreferencing
- {\dosingleargument\strc_references_setup_referencing}
-
-\def\strc_references_setup_referencing[#settings]%
- {\getparameters[\??rf][\c!prefix=\s!unknown,#settings]%
- \the\everysetupreferencing}
-
-\appendtoks
- \processaction
- [\@@rfstate]
- [ \v!stop=>\referencingfalse,
- \v!start=>\referencingtrue]%
-\to \everysetupreferencing
-
\newconditional\autocrossfilereferences
\appendtoks
- \doifelse\@@rfautofile\v!yes\settrue\setfalse\autocrossfilereferences
+ \edef\p_autofile{\referencingparameter\c!autofile}%
+ \ifx\p_autofile\v!yes
+ \settrue \autocrossfilereferences
+ \else
+ \setfalse\autocrossfilereferences
+ \fi
\to \everysetupreferencing
-\def\exportreferences
- {\doif\@@rfexport\v!yes{\ctxlua{structures.references.export()}}}
-
\appendtoks
- \exportreferences
+ \edef\p_export{\referencingparameter\c!export}%
+ \ifx\p_export\v!yes
+ \ctxcommand{exportreferences()}%
+ \fi
\to \everygoodbye
-\def\incrementreferenceprefix{+}
-\def\decrementreferenceprefix{-}
-
-\unexpanded\def\setupreferenceprefix[#prefix]%
- {\edef\@@rfprefix{#prefix}%
- \ifx\@@rfprefix\empty
- \let\referenceprefix\empty
- \else\ifx\@@rfprefix\incrementreferenceprefix
- \global\advance\prefixcounter \plusone
- \edef\referenceprefix{\the\prefixcounter}%
- \let\@@rfprefix\s!unknown
- \else\ifx\@@rfprefix\decrementreferenceprefix
- \let\referenceprefix\empty
- \let\@@rfprefix\s!unknown
- \else\ifx\@@rfprefix\s!unknown
- % forget about it
- \else
- \edef\referenceprefix{\@@rfprefix}% expanded !
- \fi\fi\fi\fi}
-
\unexpanded\def\setupglobalreferenceprefix[#prefix]%
{\xdef\referenceprefix{#prefix}}
@@ -707,8 +764,28 @@
\unexpanded\def\popreferenceprefix
{\popmacro\referenceprefix}
+\def\m_strc_references_prefix_yes{+}
+\def\m_strc_references_prefix_nop{-}
+
+\unexpanded\def\setupreferenceprefix[#prefix]%
+ {\edef\p_prefix{#prefix}%
+ \ifx\p_prefix\empty
+ \let\referenceprefix\empty
+ \else\ifx\p_prefix\m_strc_references_prefix_yes
+ \letreferencingparameter\c!prefix\s!unknown
+ \global\advance\prefixcounter\plusone
+ \edef\referenceprefix{\the\prefixcounter}%
+ \else\ifx\p_prefix\m_strc_references_prefix_nop
+ \letreferencingparameter\c!prefix\s!unknown
+ \let\referenceprefix\empty
+ \else\ifx\p_prefix\s!unknown
+ % forget about it
+ \else
+ \let\referenceprefix\p_prefix
+ \fi\fi\fi\fi}
+
\appendtoks
- \setupreferenceprefix[\@@rfprefix]
+ \setupreferenceprefix[\referencingparameter\c!prefix]
\to \everysetupreferencing
%D We can typeset a reference using \type{\in}, \type{\at} and
@@ -808,11 +885,11 @@
\let\crlf\space
\let\\\space
\postponenotes
- \@@rfleft
+ \referencingparameter\c!left
\doifreferencefoundelse{#label}
- {\goto{\limitatetext\currentreferencetitle\@@rfwidth\unknown}[#label]}% not so efficient (dup lookup)
+ {\goto{\limitatetext\currentreferencetitle{\referencingparameter\c!width}\unknown}[#label]}% not so efficient (dup lookup)
{}% todo
- \@@rfright
+ \referencingparameter\c!right
\endgroup}
%D The previously discussed setup macro lets us specify the
@@ -883,7 +960,9 @@
\let\leftofreference \empty
\let\rightofreference \empty
-\setvalue{\??rf\c!interaction\v!all}%
+\installcorenamespace{referencinginteraction}
+
+\setvalue{\??referencinginteraction\v!all}%
{\the\leftreferencetoks
\doifsometokselse\leftreferencetoks \leftofreferencecontent \donothing
\leftofreference
@@ -892,24 +971,24 @@
\doifsometokselse\rightreferencetoks\rightofreferencecontent\donothing
\the\rightreferencetoks}
-\setvalue{\??rf\c!interaction\v!label}%
+\setvalue{\??referencinginteraction\v!label}%
{\leftofreference
\the\leftreferencetoks
\the\rightreferencetoks
\rightofreference}
-\setvalue{\??rf\c!interaction\v!text}%
+\setvalue{\??referencinginteraction\v!text}%
{\leftofreference
\currentreferencecontent
\rightofreference}
-\setvalue{\??rf\c!interaction\v!symbol}%
+\setvalue{\??referencinginteraction\v!symbol}%
{\referencesymbol}
\def\referencesequence
- {\csname \??rf\c!interaction
- \ifcsname\??rf\c!interaction\@@rfinteraction\endcsname
- \@@rfinteraction
+ {\csname\??referencinginteraction
+ \ifcsname\??referencinginteraction\referencingparameter\c!interaction\endcsname
+ \referencingparameter\c!interaction
\else
\v!all
\fi
@@ -1067,7 +1146,7 @@
\def\autoreferencelabeltextflag{*} % a proper key like 'auto' or 'name' can clash with a label key
\unexpanded\def\autoreferencelabeltext
- {\ctxlua{structures.references.getcurrentmetadata("name")}}
+ {\ctxcommand{getcurrentreferencemetadata("name")}}
% \starttext
% \definereferenceformat[inxx] [left=(,right=),text=txt]
@@ -1144,7 +1223,7 @@
\attribute\referenceattribute\attributeunsetvalue
\global\lastsavedreferenceattribute\attributeunsetvalue
\iflocation
- \ctxlua{structures.references.inject("\referenceprefix","#label",\number\ht\strutbox,\number\dp\strutbox,\extrareferencearguments)}%
+ \ctxcommand{injectreference("\referenceprefix","#label",\number\ht\strutbox,\number\dp\strutbox,\extrareferencearguments)}%
\setlocationattributes
\setstrut % can be option
\global\lastsavedreferenceattribute\lastreferenceattribute
@@ -1163,7 +1242,7 @@
\global\lastsavedreferenceattribute\attributeunsetvalue
\attribute\referenceattribute\attributeunsetvalue
\iflocation
- \ctxlua{structures.references.inject("\referenceprefix","#label",\number\dimexpr\interactionparameter\c!height\relax,\number\dimexpr\interactionparameter\c!depth\relax,\extrareferencearguments)}%
+ \ctxcommand{injectreference("\referenceprefix","#label",\number\dimexpr\interactionparameter\c!height\relax,\number\dimexpr\interactionparameter\c!depth\relax,\extrareferencearguments)}%
\setlocationattributes
\attribute\referenceattribute\lastreferenceattribute
\global\lastsavedreferenceattribute\lastreferenceattribute
@@ -1182,9 +1261,9 @@
\global\lastsavedreferenceattribute\attributeunsetvalue
\attribute\referenceattribute\attributeunsetvalue
\iflocation
- \ctxlua{structures.references.doifelse("\referenceprefix","#label",\extrareferencearguments)}%
+ \ctxcommand{doifelsereference("\referenceprefix","#label",\extrareferencearguments)}%
{\expandtexincurrentreference
- \ctxlua{structures.references.injectcurrentset(\number\ht\strutbox,\number\dp\strutbox)}%
+ \ctxcommand{injectcurrentreference(\number\ht\strutbox,\number\dp\strutbox)}%
\setlocationattributes
\setstrut % can be option
\global\lastsavedreferenceattribute\lastreferenceattribute
@@ -1205,9 +1284,9 @@
\global\lastsavedreferenceattribute\attributeunsetvalue
\attribute\referenceattribute\attributeunsetvalue
\iflocation
- \ctxlua{structures.references.doifelse("\referenceprefix","#label",\extrareferencearguments)}%
+ \ctxcommand{doifelsereference("\referenceprefix","#label",\extrareferencearguments)}%
{\expandtexincurrentreference
- \ctxlua{structures.references.injectcurrentset(\number\dimexpr\interactionparameter\c!height\relax,\number\dimexpr\interactionparameter\c!depth\relax)}%
+ \ctxcommand{injectcurrentreference(\number\dimexpr\interactionparameter\c!height\relax,\number\dimexpr\interactionparameter\c!depth\relax)}%
\setlocationattributes
\global\lastsavedreferenceattribute\lastreferenceattribute
\attribute\referenceattribute\lastreferenceattribute
@@ -1226,7 +1305,7 @@
\global\lastsavedreferenceattribute\attributeunsetvalue
\attribute\referenceattribute\attributeunsetvalue
\iflocation
- \ctxlua{structures.references.inject("\referenceprefix","#label",nil,nil,\extrareferencearguments)}%
+ \ctxcommand{injectreference("\referenceprefix","#label",nil,nil,\extrareferencearguments)}%
\setlocationattributes
\global\lastsavedreferenceattribute\lastreferenceattribute
\dostarttagged\t!link\empty
@@ -1243,7 +1322,7 @@
\global\lastsavedreferenceattribute\attributeunsetvalue
\attribute\referenceattribute\attributeunsetvalue
\iflocation
- \ctxlua{structures.references.inject("\referenceprefix","#label",nil,nil,\extrareferencearguments)}%
+ \ctxcommand{injectreference("\referenceprefix","#label",nil,nil,\extrareferencearguments)}%
\setlocationcolorspec{#resolver}% no consequence for strut
\global\lastsavedreferenceattribute\lastreferenceattribute
\dostarttagged\t!link\empty
@@ -1260,7 +1339,7 @@
\global\lastsavedreferenceattribute\attributeunsetvalue
\attribute\referenceattribute\attributeunsetvalue
\iflocation
- \ctxlua{structures.references.inject("\referenceprefix","#label",nil,nil,\extrareferencearguments)}%
+ \ctxcommand{injectreference("\referenceprefix","#label",nil,nil,\extrareferencearguments)}%
\global\lastsavedreferenceattribute\lastreferenceattribute
\dostarttagged\t!link\empty
\hbox attr \referenceattribute \lastreferenceattribute {#content}%
@@ -1276,9 +1355,9 @@
\global\lastsavedreferenceattribute\attributeunsetvalue
\attribute\referenceattribute\attributeunsetvalue
\iflocation
- \ctxlua{structures.references.doifelse("\referenceprefix","#label",\extrareferencearguments)}%
+ \ctxcommand{doifelsereference("\referenceprefix","#label",\extrareferencearguments)}%
{\expandtexincurrentreference
- \ctxlua{structures.references.injectcurrentset(nil,nil)}%
+ \ctxcommand{injectcurrentreference(nil,nil)}%
\setlocationattributes
\global\lastsavedreferenceattribute\lastreferenceattribute
\dostarttagged\t!link\empty
@@ -1298,8 +1377,8 @@
\ht\scratchbox#height%
\global\lastsavedreferenceattribute\attributeunsetvalue
\attribute\referenceattribute\attributeunsetvalue
- \ctxlua{structures.references.doifelse("\referenceprefix","#label",\extrareferencearguments)}%
- {\ctxlua{structures.references.injectcurrentset(nil,nil)}%
+ \ctxcommand{doifelsereference("\referenceprefix","#label",\extrareferencearguments)}%
+ {\ctxcommand{injectcurrentreference(nil,nil)}%
\global\lastsavedreferenceattribute\lastreferenceattribute
\hbox attr \referenceattribute \lastreferenceattribute {\box\scratchbox}}
{\box\scratchbox}%
@@ -1386,14 +1465,18 @@
%D
%D \showsetup{setupurl}
-\unexpanded\def\setupurl
- {\dodoubleargument\getparameters[\??ur]}
+\installcorenamespace{url}
+
+\installdirectcommandhandler \??url {url}
+
+\setupurl
+ [\c!style=\v!type,
+ \c!color=]
\unexpanded\def\url[#label]% move \hyphenatedurl to lua end (is already lua)
{\dontleavehmode
\begingroup
- \dousestyleparameter\@@urstyle
- \dousecolorparameter\@@urcolor
+ \useurlstyleandcolor\c!style\c!color
\hyphenatedurl{\ctxcommand{geturl("#label")}}%
\endgroup}
@@ -1425,14 +1508,13 @@
\def\strc_references_do_special_from[#label]%
{\dontleavehmode
- \goto{\ctxlua{structures.references.from("#label")}}[fileorurl(#label)]}
+ \goto{\ctxcommand{from("#label")}}[fileorurl(#label)]}
\def\dofromurldescription#content% called at the lua end
{#content}
\def\dofromurlliteral#content% called at the lua end
- {\dousestyleparameter\@@urstyle
- \dousecolorparameter\@@urcolor
+ {\useurlstyleandcolor\c!style\c!color
\hyphenatedurl{#content}}
\let\dofromfiledescription\dofromurldescription
@@ -1463,21 +1545,21 @@
% also lua, like urls and files
-\unexpanded\def\setupprograms
- {\dodoubleargument\getparameters[\??pr]}
+\installcorenamespace{programs}
+
+\installdirectcommandhandler \??programs {programs}
\unexpanded\def\defineprogram
{\dotripleargument\strc_references_define_program}
\def\strc_references_define_program[#name][#program][#description]%
- {\ctxlua{structures.references.programs.define("#name","#program","#description")}}
+ {\ctxcommand{defineprogram("#name",\!!bs#program\!!es,\!!bs#description\!!es)}}
\def\program[#name]% incompatible, more consistent, hardy used anyway
{\dontleavehmode
\begingroup
- \dousestyleparameter\@@prstyle
- \dousecolorparameter\@@prcolor
- \ctxlua{structures.references.programs.get("#name","\@@pralternative","\@@prspace")}%
+ \useprogramsstyleandcolor\c!style\c!color
+ \ctxcommand{getprogram("#name","\directprogramsparameter\c!alternative","\directprogramsparameter\c!space")}%
\endgroup}
%D As we can see, we directly use the special reference
@@ -1571,7 +1653,11 @@
\setbox\scratchbox\hbox{#text}% to be solved some day
\ifdim\wd\scratchbox>\zeropoint
\unhbox\scratchbox
- \doifsomething\@@rfseparator{\removeunwantedspaces\@@rfseparator}% remove is new
+ \edef\p_separator{\referencingparameter\c!separator}%
+ \ifx\p_separator\empty \else
+ \removeunwantedspaces % remove is new
+ \p_separator
+ \fi
\else
\unhbox\scratchbox
\fi
@@ -1598,10 +1684,6 @@
\c!separator=\nonbreakablespace,
\c!export=\v!no]
-\setupurl
- [\c!style=\v!type,
- \c!color=]
-
\setupprograms
[\c!directory=,
\c!style=\v!type,
@@ -1699,7 +1781,9 @@
% todo: parameterhandler
-\def\getreferencestructureprefix#kind#name#category%
+\installcorenamespace{referencingprefix}
+
+\def\getreferencestructureprefix#kind#name#category% name will change
{{
prefix = "\referencestructureprefixparameter{#kind}{#name}{#category}\c!prefix",
separatorset = "\referencestructureprefixparameter{#kind}{#name}{#category}\c!prefixseparatorset",
@@ -1715,22 +1799,30 @@
\def\strc_references_setup_reference_structure_prefix[#kind][#category][#settings]%
{\ifthirdargument
- \getparameters[\??rf:\c!prefix:#kind:#category][#settings]%
+ \getparameters[\??referencingprefix#kind:#category][#settings]%
\else\ifsecondargument
- \getparameters[\??rf:\c!prefix::#kind][#category]%
+ \getparameters[\??referencingprefix:#kind][#category]%
\fi\fi}
\def\referencestructureprefixparameter#kind#name#category#parameter%
- {\ifcsname\??rf:\c!prefix:#name:#category#parameter\endcsname
- \csname\??rf:\c!prefix:#name:#category#parameter\endcsname
- \else\ifcsname\??rf:\c!prefix:#kind:#category#parameter\endcsname
- \csname \??rf:\c!prefix:#kind:#category#parameter\endcsname
- \else\ifcsname\??rf:\c!prefix::#category#parameter\endcsname
- \csname \??rf:\c!prefix::#category#parameter\endcsname
+ {\ifcsname\??referencingprefix#name:#category#parameter\endcsname
+ \csname\??referencingprefix#name:#category#parameter\endcsname
+ \else\ifcsname\??referencingprefix#kind:#category#parameter\endcsname
+ \csname \??referencingprefix#kind:#category#parameter\endcsname
+ \else\ifcsname\??referencingprefix:#category#parameter\endcsname
+ \csname \??referencingprefix:#category#parameter\endcsname
\fi\fi\fi}
+% \def\currentreferencedefault
+% {\ctxcommand{filterdefaultreference()}}
+
\def\currentreferencedefault
- {\ctxlua{structures.references.filter("default",\ctxlua{structures.references.getcurrentprefixspec("\v!default")})}}
+ {\ctxcommand{filterreference("\s!default",\ctxcommand{getcurrentprefixspec("\s!default")})}}
+
+% needs testing
+%
+% \def\currentreferencedefault
+% {\ctxcommand{currentreferencedefault()}}
%D Not all support is visible by looking at the \TEX\ code; here is one of those:^
%D
diff --git a/Master/texmf-dist/tex/context/base/strc-reg.lua b/Master/texmf-dist/tex/context/base/strc-reg.lua
index 6da58345bf5..40cd3455b28 100644
--- a/Master/texmf-dist/tex/context/base/strc-reg.lua
+++ b/Master/texmf-dist/tex/context/base/strc-reg.lua
@@ -169,9 +169,9 @@ local function filtercollected(names,criterium,number,collected,prevmode)
end
if trace_registers then
if detail then
- report_registers("criterium: %s, %s, found: %s",criterium,detail,#result)
+ report_registers("criterium %a, detail %a, found %a",criterium,detail,#result)
else
- report_registers("criterium: %s, found: %s",criterium,#result)
+ report_registers("criterium %a, detail %a, found %a",criterium,nil,#result)
end
end
return result
@@ -266,6 +266,10 @@ local function preprocessentries(rawdata)
end
rawdata.entries = nil
end
+ local seeword = rawdata.seeword
+ if seeword then
+ seeword.processor, seeword.text = splitprocessor(seeword.text or "")
+ end
end
function registers.store(rawdata) -- metadata, references, entries
@@ -519,7 +523,7 @@ function registers.finalize(data,options) -- maps character to index (order)
local entry, tag = firstofsplit(v)
if tag ~= lasttag then
if trace_registers then
- report_registers("splitting at %s",tag)
+ report_registers("splitting at %a",tag)
end
done, nofdone = { }, 0
nofsplit = nofsplit + 1
@@ -675,8 +679,7 @@ function registers.flush(data,options,prefixspec,pagespec)
list[#list] = nil
else
-- we have an \seeindex{Foo}{Bar} without Foo being defined anywhere
- report_registers("invalid see entry in register '%s', reference '%s'",
- entry.metadata.name or "?",list[1][1] or "?")
+ report_registers("invalid see entry in register %a, reference %a",entry.metadata.name,list[1][1])
end
end
end
@@ -826,10 +829,11 @@ function registers.flush(data,options,prefixspec,pagespec)
context.startregisterseewords()
for i=1,nt do
local entry = t[i]
- local processor = entry.processors and entry.processors[1] or ""
+ local seeword = entry.seeword
+ local seetext = seeword.text or ""
+ local processor = seeword.processor or (entry.processors and entry.processors[1]) or ""
local seeindex = entry.references.seeindex or ""
- local seeword = entry.seeword.text or ""
- context.registerseeword(i,n,processor,0,seeindex,seeword)
+ context.registerseeword(i,n,processor,0,seeindex,seetext)
end
context.stopregisterseewords()
end
diff --git a/Master/texmf-dist/tex/context/base/strc-reg.mkiv b/Master/texmf-dist/tex/context/base/strc-reg.mkiv
index e36556949b2..558f266c837 100644
--- a/Master/texmf-dist/tex/context/base/strc-reg.mkiv
+++ b/Master/texmf-dist/tex/context/base/strc-reg.mkiv
@@ -19,12 +19,7 @@
% todo: tag:: becomes rendering
% todo: language, character, linked, location
-
-%D Helper:
-
-% \def\doflushatpar{\ifvmode\expandafter\dogotopar\else\expandafter\firstofoneargument\fi}
-
-\def\doflushatpar{\ifvmode\expandafter\flushatnextpar\else\expandafter\firstofoneargument\fi}
+% todo: fonts etc at sublevels (already defined)
% \starttext
% \placeregister[index]
@@ -34,9 +29,6 @@
% test \index{aa} test \setregisterentry[index][label=y] test \page test \page test \page test \finishregisterentry[index][label=y,entries=yy]
% \stoptext
-% In plaats van + kan een & worden gebruikt. Ook kan als
-% eerste karakter worden opgegeven wat de scheider is.
-%
% \index {entry}
% \index[key] {entry}
% \index[pageclass::] {entry}
@@ -46,52 +38,34 @@
% \index[pageclass::] {textclass::entry}
% \index[pageclass::key]{textclass::entry}
-%D Parameters ... we set up levels so we need a slightly different command handler here .. okay, we
-%D can use the childs for that ... but then we need to tweak the name in order to distinguis:
-%D register -> register:1 (i.e. prefix)
-
-\let\currentregister\empty
-
-% \def\registerparameter#1{\csname\??id\currentregister#1\endcsname}
-
-\def\detokenizedregisterparameter#1{\detokenize\expandafter\expandafter\expandafter{\csname\??id#1\endcsname}} % always root
+% tzt variant with n entries, parameters and userdata (altnum)
-\def\registerparameter #1{\csname\doregisterparameter{\??id\currentregister}{#1}\endcsname}
-\def\registerparameterhash#1{\doregisterparameterhash {\??id\currentregister}#1}
+\installcorenamespace{register}
-\def\doregisterparameter #1#2{\ifcsname#1#2\endcsname#1#2\else\expandafter\doregisterparentparameter \csname#1\s!parent\endcsname{#2}\fi}
-\def\doregisterparameterhash#1#2{\ifcsname#1#2\endcsname #1\else\expandafter\doregisterparentparameterhash\csname#1\s!parent\endcsname{#2}\fi}
+\installcommandhandler\??register {register} \??register
-\def\doregisterparentparameter #1#2{\ifx#1\relax\s!empty\else\doregisterparameter #1{#2}\fi}
-\def\doregisterparentparameterhash#1#2{\ifx#1\relax \else\doregisterparameterhash#1{#2}\fi}
+\let\strc_registers_setup_saved\setupregister
-\def\useregisterstyleandcolor#1#2% style color
- {\edef\currentstyleparameter{\registerparameter#1}%
- \edef\currentcolorparameter{\registerparameter#2}%
- \ifx\currentstyleparameter\empty\else\dousestyleparameter\currentstyleparameter\fi
- \ifx\currentcolorparameter\empty\else\dousecolorparameter\currentcolorparameter\fi}
+\unexpanded\def\setupregister % maybe we should drop the plural form
+ {\dotripleempty\strc_registers_setup}
-%D Setup:
-
-\newtoks\everysetupregister
-
-\unexpanded\def\setupregisters
- {\dotripleempty\dosetupregisters}
-
-\def\dosetupregisters[#1][#2][#3]%
+\def\strc_registers_setup[#1][#2][#3]%
{\ifthirdargument
- \def\dodosetupregister##1{\getparameters[\??id##1#2][#3]}%
- \processcommalist[#1]\dodosetupregister
+ \def\strc_registers_setup_step##1{\strc_registers_setup_saved[#1:##1][#3]}%
+ \processcommalist[#2]\strc_registers_setup_step
\else\ifsecondargument
- \def\dodosetupregister##1{\edef\currentregister{##1}\getparameters[\??id##1][#2]\the\everysetupregister}%
- \processcommalist[#1]\dodosetupregister
+ \strc_registers_setup_saved[#1][#2]%
\else
- \getparameters[\??id][#1]%
+ \strc_registers_setup_saved[#1]%
\fi\fi}
-\let\setupregister\setupregisters
+\unexpanded\def\setupregisters
+ {\dosingleempty\strc_registers_setup_root}
+
+\def\strc_registers_setup_root[#1]%
+ {\strc_registers_setup_saved[#1]\relax}
-\setupregisters
+\setupregister
[\c!n=2,
\c!balance=\v!yes, % \v!no komt niet zo vaak voor
\c!align=\v!flushleft,
@@ -102,7 +76,7 @@
\c!compress=\v!no,
\c!interaction=\v!pagenumber,
\c!alternative=\v!a,
- \c!distance=1em,
+ \c!distance=\emwidth,
\c!style=\v!bold,
\c!pagestyle=\v!slanted,
\c!indicator=\v!yes,
@@ -115,7 +89,7 @@
\c!unknownreference=\v!empty,
\c!prefix=\v!both,
%\c!expansion=,
-%\c!xmlsetup=,
+ %\c!xmlsetup=,
\c!pagenumber=\v!yes,
\c!pageprefixconnector=\endash,
\c!pagesegments=2:2,
@@ -125,48 +99,63 @@
\c!numberorder=\v!numbers, % \v!characters
\s!language=\currentmainlanguage]%
-%D Definition:
+% yes or no shared ?
-\unexpanded\def\defineregister
- {\dodoubleargument\dodefineregister}
+\setupregister
+ [\c!label=,
+ \c!entries=,
+ \c!alternative=]
-\def\dodefineregister[#1][#2]% #2?
- {\setupregister[#1][\s!parent=\??id]%
- \ctxlua{structures.registers.define('#1')}%
- \presetheadtext[#1=\Word{#1}]%
- \setuvalue{#1}{\dodoubleempty\doregister[#1]}%
- \setuvalue{\e!see#1}{\dodoubleempty\doseeregister[#1]}%
-% \setuvalue{\e!coupled#1}{\dolinkedregister{#1}}%
- \setvalue{\e!place#1}{\placeregister[#1]}%
- \setvalue{\e!complete#1}{\completeregister[#1]}%
- \setvalue{\e!setup#1\e!endsetup}[##1]{\getparameters[\??id#1][##1]}}
+%D \starttyping
+%D \setupregister[index][1][textcolor=darkred]
+%D \setupregister[index][2][textcolor=darkgreen,textstyle=bold]
+%D
+%D \placeregister[index][n=1] \blank[3*big]
+%D
+%D test \index{test+one} test \index{test+two} more \index{more}
+%D \stoptyping
-%D Registering:
+\newconditional\c_strc_registers_defining
-\newif\ifwritetoregister \writetoregistertrue
+\ifdefined\Word \else \unexpanded\def\Word#1{#1} \fi
-% tzt variant met n entries, parameters en userdata (altnum)
+\appendtoks
+ \ifconditional\c_strc_registers_defining \else
+ \settrue\c_strc_registers_defining
+ \ctxlua{structures.registers.define('\currentregister')}%
+ \normalexpanded{\presetheadtext[\currentregister=\Word{\currentregister}]}%
+ \setuevalue{\currentregister}{\dodoubleempty\strc_registers_insert_entry[\currentregister]}%
+ \setuevalue{\e!see\currentregister}{\dodoubleempty\strc_registers_insert_see[\currentregister]}%
+ %setuevalue{\e!coupled\currentregister}{\dolinkedregister{\currentregister}}%
+ % historic ballast
+ \setuevalue{\e!place\currentregister}{\placeregister[\currentregister]}%
+ \setuevalue{\e!complete\currentregister}{\completeregister[\currentregister]}%
+ \setuevalue{\e!setup\currentregister\e!endsetup}{\setupregister[\currentregister]}%
+ \dorecurse\plusthree {% weird, expanded should not be needed
+ \normalexpanded{\defineregister[\currentregister:\recurselevel][\currentregister]}%
+ %\defineregister[\currentregister:\recurselevel][\currentregister]%
+ \letregisterparameter{\c!entries:\recurselevel}\empty % needed as we use detokenize (ok, we can
+ \letregisterparameter{\c!keys :\recurselevel}\empty % avoid it, but it's faster too)
+ }%
+ %
+ \setfalse\c_strc_registers_defining
+ \fi
+\to \everydefineregister
-\getparameters
- [\??id]
- [\c!label=,
- \c!entries=,
- \c!entries:1=,\c!entries:2=,\c!entries:3=,
- \c!keys:1=,\c!keys:2=,\c!keys:3=,
- \c!alternative=]
+%D Registering:
-\def\doregisterstructurepageregister
+\def\strc_registers_register_page_entry
{\iftrialtypesetting
\expandafter\gobblethreearguments
\else
- \expandafter\dodoregisterstructurepageregister
+ \expandafter\strc_registers_register_page_entry_indeed
\fi}
-\def\dodoregisterstructurepageregister#1#2#3% register data userdata
+\def\strc_registers_register_page_entry_indeed#1#2#3% register data userdata
{\begingroup
\edef\currentregister{#1}%
- %getparameters[\??id][\c!entries=,\c!label=,\c!keys=,\c!alternative=,#2]%
- \getparameters[\??id][#2]%
+ %\setupcurrentregister[\c!entries=,\c!label=,\c!keys=,\c!alternative=,#2]%
+ \setupcurrentregister[#2]%
\edef\currentregisterlabel {\registerparameter\c!label}%
\edef\currentregisterexpansion{\registerparameter\c!expansion}%
\edef\currentregisterownnumber{\registerparameter\c!ownnumber}%
@@ -245,7 +234,7 @@
section = structures.sections.currentid(), % hm, why then not also lastsection the same way
label = "\currentregisterlabel",
},
-% \ifx\currentregisterentries\empty \else
+ % \ifx\currentregisterentries\empty \else
entries = {
% we need a special one for xml, this is just a single one
\ifx\currentregisterentries\empty
@@ -259,10 +248,10 @@
\!!bs\currentregisterkeys\!!es,
\fi
},
-% \fi
+ % \fi
userdata = structures.helpers.touserdata(\!!bs\detokenize{#3}\!!es)
}
- } }%
+ }}%
\ctxlua{structures.references.setinternalreference(nil,nil,\nextinternalreference)}%
\ifx\currentregisterownnumber\v!yes
\glet\currentregistersynchronize\relax
@@ -275,45 +264,51 @@
\attribute\destinationattribute\lastdestinationattribute \signalcharacter % no \strut as it will be removed during cleanup
\endgroup}
-\def\doregister[#1][#2]%
+\unexpanded\def\strc_registers_insert_entry[#1][#2]%
{\def\currentregister{#1}%
- \doifelse{\registerparameter\c!ownnumber}\v!yes\dodoregister\donoregister{#1}{#2}}
+ \doifelse{\registerparameter\c!ownnumber}\v!yes
+ \strc_registers_insert_entry_yes
+ \strc_registers_insert_entry_nop
+ {#2}}
+
+\def\strc_registers_insert_entry_nop#1#2%
+ {\doflushatpar{\strc_registers_register_page_entry\currentregister{\c!keys={#1},\c!entries={#2}}{}}}
-\def\donoregister #1#2#3{\doflushatpar{\doregisterstructurepageregister{#1}{\c!keys={#2},\c!entries={#3}}{}}}
-\def\dodoregister#1#2#3#4{\doflushatpar{\doregisterstructurepageregister{#1}{\c!keys={#2},\c!alternative=#3,\c!entries={#4}}{}}}
+\def\strc_registers_insert_entry_yes#1#2#3%
+ {\doflushatpar{\strc_registers_register_page_entry\currentregister{\c!keys={#1},\c!alternative=#2,\c!entries={#3}}{}}}
-\unexpanded\def\startregister{\doquadrupleempty\dostartregister}
-\unexpanded\def\stopregister {\dodoubleargument\dostopregister}
+\unexpanded\def\startregister{\doquadrupleempty\strc_registers_start_entry}
+\unexpanded\def\stopregister {\dodoubleargument\strc_registers_stop_entry}
% a synonym, so that we can nest with overlap without syntax check problems
\let\openregisterrange \startregister
\let\closeregisterrange\stopregister
-\def\dostartregister[#1][#2][#3][#4]#5%
+\def\strc_registers_start_entry[#1][#2][#3][#4]#5%
{\iffourthargument
% #1=register #2=tag #3=own #4=sortkey #5=entry
- \doflushatpar{\doregisterstructurepageregister{#1}{\c!label=#2,\c!alternative=#3,\c!keys={#4},\c!entries={#5}}{}}%
+ \doflushatpar{\strc_registers_register_page_entry{#1}{\c!label=#2,\c!alternative=#3,\c!keys={#4},\c!entries={#5}}{}}%
\else
% #1=register #2=tag #3=sortkey #5=entry
- \doflushatpar{\doregisterstructurepageregister{#1}{\c!label=#2,\c!keys={#3},\c!entries={#5}}{}}%
+ \doflushatpar{\strc_registers_register_page_entry{#1}{\c!label=#2,\c!keys={#3},\c!entries={#5}}{}}%
\fi}
-\def\dostopregister[#1][#2]%
+\def\strc_registers_stop_entry[#1][#2]%
{\normalexpanded{\ctxlatelua{structures.registers.extend("#1","#2")}}}
-\def\setregisterentry {\dotripleempty\dosetregisterentry}
-\def\finishregisterentry{\dotripleempty\dofinishregisterentry}
+\def\setregisterentry {\dotripleempty\strc_registers_set_entry}
+\def\finishregisterentry{\dotripleempty\strc_registers_finish_entry}
% not yet document, not sure if this will stay:
-\def\dosetregisterentry [#1][#2][#3]{\doflushatpar{\doregisterstructurepageregister{#1}{#2}{#3}}}
-\def\dofinishregisterentry[#1][#2][#3]{\dofinishregisterstructurepageregister{#1}{#2}{#3}}
+\def\strc_registers_set_entry [#1][#2][#3]{\doflushatpar{\strc_registers_register_page_entry{#1}{#2}{#3}}}
+\def\strc_registers_finish_entry[#1][#2][#3]{\strc_registers_finish_entry_indeed{#1}{#2}{#3}}
-\def\dofinishregisterstructurepageregister#1#2#3% register data userdata
+\def\strc_registers_finish_entry_indeed#1#2#3% register data userdata
{\begingroup
\edef\currentregister{#1}%
- \getparameters[\??id][\c!entries=,\c!label=,\c!keys=,\c!alternative=,#2]%
+ \setupcurrentregister[\c!entries=,\c!label=,\c!keys=,\c!alternative=,#2]%
\edef\currentregisterlabel {\registerparameter\c!label}%
\edef\currentregisterexpansion{\registerparameter\c!expansion}%
\edef\currentregisterownnumber{\registerparameter\c!ownnumber}%
@@ -336,7 +331,7 @@
\normalexpanded{\ctxlua{structures.registers.extend("\currentregister","\currentregisterlabel", {
metadata = {
\ifx\currentregisterownnumber\v!yes
- own = "\registerparameter\c!alternative", % can be used instead of pagenumber
+ own = "\registerparameter\c!alternative", % can be used instead of pagenumber
\fi
},
userdata = structures.helpers.touserdata(\!!bs\detokenize{#3}\!!es)
@@ -353,7 +348,8 @@
},
entries = {
% we need a special one for xml, this is just a single one
- \!!bs\currentregisterentries\!!es, \!!bs\currentregisterkeys\!!es
+ \!!bs\currentregisterentries\!!es,
+ \!!bs\currentregisterkeys\!!es
},
userdata = structures.helpers.touserdata(\!!bs\detokenize{#3}\!!es)
})
@@ -377,29 +373,32 @@
% \placeregister[index][n=1]
% \stoptext
-\def\setstructurepageregister{\dotripleempty\dosetstructurepageregister}
+% some overlap wit previous
+
+\unexpanded\def\setstructurepageregister
+ {\dotripleempty\strc_registers_set}
-\def\dosetstructurepageregister[#1][#2][#3]% [register][settings][userdata]
- {\doflushatpar{\doregisterstructurepageregister{#1}{#2}{#3}}}
+\def\strc_registers_set[#1][#2][#3]% [register][settings][userdata]
+ {\doflushatpar{\strc_registers_register_page_entry{#1}{#2}{#3}}}
-\unexpanded\def\startstructurepageregister{\doquadrupleempty\dostartstructurepageregister}
-\unexpanded\def\stopstructurepageregister {\dodoubleargument\dostopstructurepageregister}
+\unexpanded\def\startstructurepageregister{\doquadrupleempty\strc_registers_start}
+\unexpanded\def\stopstructurepageregister {\dodoubleargument\strc_registers_stop}
\let\openstructurepageregisterrange \startstructurepageregister
\let\closestructurepageregisterrange\stopstructurepageregister
-\def\dostartstructurepageregister[#1][#2][#3][#4]% [register][tag][settings][userdata]
- {\doflushatpar{\doregisterstructurepageregister{#1}{\c!label=#2,#3}{#4}}}
+\def\strc_registers_start[#1][#2][#3][#4]% [register][tag][settings][userdata]
+ {\doflushatpar{\strc_registers_register_page_entry{#1}{\c!label=#2,#3}{#4}}}
-\def\dostopstructurepageregister[#1][#2]%
+\def\strc_registers_stop[#1][#2]%
{\normalexpanded{\ctxlatelua{structures.registers.extend("#1","#2")}}}
% So far.
-\def\doseeregister[#1][#2]#3#4%
- {\doflushatpar{\doprocessseeregister{#1}{#2}{#3}{#4}}}
+\unexpanded\def\strc_registers_insert_see[#1][#2]#3#4%
+ {\doflushatpar{\strc_registers_insert_see_indeed{#1}{#2}{#3}{#4}}}
-\def\doprocessseeregister#1#2#3#4% register key entry seeword
+\def\strc_registers_insert_see_indeed#1#2#3#4% register key entry seeword
{\begingroup
\edef\currentregister{#1}%
\edef\currentregisterexpansion{\registerparameter\c!expansion}%
@@ -434,10 +433,11 @@
},
entries = {
% we need a special one for xml, this is just a single one
- "\currentregisterentries", "#2"
+ \!!bs\currentregisterentries\!!es,
+ \!!bs#2\!!es
},
seeword = {
- text = "\currentregisterseeword"
+ text = \!!bs\currentregisterseeword\!!es
},
}
}}%
@@ -450,9 +450,9 @@
\let\utilityregisterlength\!!zerocount
\def\determineregistercharacteristics
- {\dodoubleempty\dodetermineregistercharacteristics}
+ {\dodoubleempty\strc_registers_determine_characteristics}
-\def\dodetermineregistercharacteristics[#1][#2]%
+\def\strc_registers_determine_characteristics[#1][#2]%
{\begingroup
\edef\currentregister{#1}%
\setupregister[\currentregister][#2]%
@@ -476,67 +476,85 @@
\to \everyplaceregister
\unexpanded\def\placeregister
- {\dodoubleempty\doplaceregister}
+ {\dodoubleempty\strc_registers_place}
-\def\doplaceregister[#1][#2]%
+\def\strc_registers_place[#1][#2]%
{\iffirstargument
\begingroup
-% \forgetall
+ %\forgetall
\edef\currentregister{#1}%
\setupregister[\currentregister][#2]%
\the\everyplaceregister
- \startcolumns
- [\c!n=\registerparameter\c!n,
- \c!balance=\registerparameter\c!balance,
- \c!align=\registerparameter\c!align,
- \c!tolerance=\registerparameter\c!tolerance]%
- \startpacked[\v!blank]%
- \ctxlua{structures.registers.process('\currentregister',{
- language = "\registerparameter\s!language",
- method = "\registerparameter\c!method",
- numberorder = "\registerparameter\c!numberorder",
- compress = "\registerparameter\c!compress",
- criterium = "\registerparameter\c!criterium",
- },
- {
-% prefix = "\registerparameter\c!pageprefix",
- separatorset = "\registerparameter\c!pageprefixseparatorset",
- conversionset = "\registerparameter\c!pageprefixconversionset",
- starter = \!!bs\registerparameter\c!pageprefixstarter\!!es,
- stopper = \!!bs\registerparameter\c!pageprefixstopper\!!es,
- set = "\registerparameter\c!pageprefixset",
- segments = "\registerparameter\c!pageprefixsegments",
- connector = \!!bs\registerparameter\c!pageprefixconnector\!!es,
- },
- {
- prefix = "\registerparameter\c!pageprefix",
- separatorset = "\registerparameter\c!pageseparatorset",
- conversionset = "\registerparameter\c!pageconversionset",
- starter = \!!bs\registerparameter\c!pagestarter\!!es,
- stopper = \!!bs\registerparameter\c!pagestopper\!!es,
- segments = "\registerparameter\c!pagesegments",
- }
- )}%
- \stoppacked
- \stopcolumns
+ \ifnum\registerparameter\c!n>\plusone
+ \startcolumns
+ [\c!n=\registerparameter\c!n,
+ \c!balance=\registerparameter\c!balance,
+ \c!align=\registerparameter\c!align,
+ \c!tolerance=\registerparameter\c!tolerance]%
+ \strc_registers_place_indeed
+ \stopcolumns
+ \else
+ \strc_registers_place_indeed
+ \fi
\endgroup
\fi}
-\def\dolimitedregisterentry#1{\limitatetext{#1}\currentregistermaxwidth\unknown}%
+\def\strc_registers_place_columns
+ {\startcolumns
+ [\c!n=\registerparameter\c!n,
+ \c!balance=\registerparameter\c!balance,
+ \c!align=\registerparameter\c!align,
+ \c!tolerance=\registerparameter\c!tolerance]%
+ \startpacked[\v!blank]%
+ \strc_registers_place_indeed
+ \stoppacked
+ \stopcolumns}
+
+\def\strc_registers_place_normal
+ {\startpacked[\v!blank]%
+ \strc_registers_place_indeed
+ \stoppacked}
+
+\def\strc_registers_place_indeed
+ {\ctxlua{structures.registers.process('\currentregister',{
+ language = "\registerparameter\s!language",
+ method = "\registerparameter\c!method",
+ numberorder = "\registerparameter\c!numberorder",
+ compress = "\registerparameter\c!compress",
+ criterium = "\registerparameter\c!criterium",
+ },{
+ separatorset = "\registerparameter\c!pageprefixseparatorset",
+ conversionset = "\registerparameter\c!pageprefixconversionset",
+ starter = \!!bs\registerparameter\c!pageprefixstarter\!!es,
+ stopper = \!!bs\registerparameter\c!pageprefixstopper\!!es,
+ set = "\registerparameter\c!pageprefixset",
+ segments = "\registerparameter\c!pageprefixsegments",
+ connector = \!!bs\registerparameter\c!pageprefixconnector\!!es,
+ },{
+ prefix = "\registerparameter\c!pageprefix",
+ separatorset = "\registerparameter\c!pageseparatorset",
+ conversionset = "\registerparameter\c!pageconversionset",
+ starter = \!!bs\registerparameter\c!pagestarter\!!es,
+ stopper = \!!bs\registerparameter\c!pagestopper\!!es,
+ segments = "\registerparameter\c!pagesegments",
+ })}}
+
+\def\strc_registers_limited_entry#1%
+ {\limitatetext{#1}\currentregistermaxwidth\unknown}%
\appendtoks
\edef\currentregistermaxwidth{\registerparameter\c!maxwidth}%
\ifx\currentregistermaxwidth\empty
\let\limitedregisterentry\firstofoneargument
\else
- \let\limitedregisterentry\dolimitedregisterentry
+ \let\limitedregisterentry\strc_registers_limited_entry
\fi
\to \everyplaceregister
-\def\completeregister
- {\dodoubleempty\docompleteregister}
+\unexpanded\def\completeregister
+ {\dodoubleempty\strc_registers_complete}
-\def\docompleteregister[#1][#2]%
+\def\strc_registers_complete[#1][#2]%
{\iffirstargument
\begingroup
\edef\currentregister{#1}%
@@ -546,7 +564,7 @@
\stopnamedsection
\endgroup
\fi}
-
+
% test case for collapsing (experimental, for Steffen Wolfrum)
%
% \starttext
@@ -565,37 +583,36 @@
% xxxx \index{xxxx}
% todo \index{todo}
% \stoptext
-
+
%D Character rendering (sections):
+\installcorenamespace{registerindicator}
+
\def\defaultregistercharacter#1%
- {\doifsomething{#1}
- {\doifnot{#1}\s!unknown
- {\doifelse{\registerparameter\c!indicator}\v!yes
- {\doregistercharacter{#1}}
- {\noregistercharacter{#1}}}}}
-
-% \def\doregistercharacter#1%
-% {\expandcheckedcsname % why no \executeifdefined
-% {\??id:\c!indicator:}%
-% {\registerparameter\c!alternative}%
-% {a}%
-% {#1}}
-
-\def\doregistercharacter#1%
- {\expandcheckedcsname % why no \executeifdefined
- {\??id:\c!indicator:}%
- {\registerparameter\c!alternative}%
- {a}%
- {#1}}
-
-\def\noregistercharacter#1%
+ {\edef\currentregistercharacter{#1}%
+ \ifx\currentregistercharacter\empty
+ % skip
+ \else\ifx\currentregistercharacter\s!unknown
+ % skip
+ \else
+ \edef\p_indicator{\registerparameter\c!indicator}%
+ \ifx\p_indicator\v!yes
+ \strc_registers_place_character_yes
+ \else
+ \strc_registers_place_character_nop
+ \fi
+ \fi\fi}
+
+\def\strc_registers_place_character_yes
+ {\expandnamespaceparameter\??registerindicator\registerparameter\c!alternative\v!a{\currentregistercharacter}}
+
+\def\strc_registers_place_character_nop
{\registerparameter\c!before
\goodbreak}
% a = <before> <goodbreak> <character> <par> <after> <nobreak>
-\setvalue{\??id:\c!indicator:a}#1%
+\setvalue{\??registerindicator a}#1%
{\registerparameter\c!before
% bugged, why does leftskip gets set: \vskip\lineheight\goodbreak\vskip-\lineheight
\begingroup
@@ -603,33 +620,34 @@
\dontleavehmode
\strut
\iflocation
- \pagereference[\currentregister:\v!section:#1]%
+ \dosetdirectpagereference{\currentregister:\v!section:#1}%
\fi
\registerparameter\c!command{#1}%
\endgroup
+ \blank[\v!samepage]%
\registerparameter\c!after
\par
\nobreak}
% b = <goodbreak> <before> <character> <after> <nobreak>
-\setvalue{\??id:\c!indicator:b}#1% will be shared with a
+\setvalue{\??registerindicator b}#1% will be shared with a
{\registerparameter\c!before
\begingroup
\useregisterstyleandcolor\c!style\c!color
\dontleavehmode
\strut
\iflocation
- \pagereference[\currentregister:\v!section:#1]%
+ \dosetdirectpagereference{\currentregister:\v!section:#1}%
\fi
\registerparameter\c!command{#1}%
\endgroup
\registerparameter\c!after
\nobreak}
-\setvalue{\??id:\c!indicator:A}#1{\getvalue{\??id:\c!indicator:a}{\WORD{#1}}}
-\setvalue{\??id:\c!indicator:B}#1{\getvalue{\??id:\c!indicator:b}{\WORD{#1}}}
-
+\setvalue{\??registerindicator A}#1{\getvalue{\??registerindicator a}{\WORD{#1}}}
+\setvalue{\??registerindicator B}#1{\getvalue{\??registerindicator b}{\WORD{#1}}}
+
%D The following macros are the interface to the rendering. These are
%D generated by \LUA. This might change.
@@ -637,9 +655,13 @@
%D placeindex as then flushing takes place inside the index. Took me hours
%D to notice that.
+\newconditional\c_strc_registers_page_done
+\newdimen \d_strc_registers_distance
+
\unexpanded\def\startregisteroutput
{\endgraf
\begingroup
+ \d_strc_registers_distance\registerparameter\c!distance\relax
\dostarttagged\t!register\currentregister
\forgeteverypar
\forgetparindent
@@ -654,9 +676,13 @@
{\endgraf
\begingroup
\dostarttagged\t!registerentries\empty
+ \let\savedcurrentregister\currentregister
+ \edef\currentregister{\currentregister:#1}%
\useregisterstyleandcolor\c!textstyle\c!textcolor
- \advance\leftskip\numexpr#1-1\relax\dimexpr\registerparameter\c!distance\relax
- \hangindent\registerparameter\c!distance\hangafter\plusone}
+ \advance\leftskip\numexpr#1-\plusone\relax\dimexpr\d_strc_registers_distance\relax
+ \hangindent\registerparameter\c!distance\relax
+ \hangafter\plusone
+ \let\currentregister\savedcurrentregister}
\unexpanded\def\stopregisterentries
{\endgraf
@@ -673,12 +699,10 @@
{\dostoptagged
\endgraf}
-\newconditional\registerpagedone
-
\unexpanded\def\startregisterpages
{\begingroup
\dostarttagged\t!registerpages\empty
- \setfalse\registerpagedone
+ \setfalse\c_strc_registers_page_done
\useregisterstyleandcolor\c!pagestyle\c!pagecolor}
\unexpanded\def\stopregisterpages
@@ -688,7 +712,7 @@
\unexpanded\def\startregisterseewords
{\ifhmode\crlf\fi
\begingroup
- \setfalse\registerpagedone
+ \setfalse\c_strc_registers_page_done
\dostarttagged\t!registerpage\empty
\useregisterstyleandcolor\c!pagestyle\c!pagecolor}
@@ -696,15 +720,16 @@
{\dostoptagged
\endgroup}
-\def\registerpageseparator% todo: , configurable
- {\ifconditional\registerpagedone
+\unexpanded\def\registerpageseparator% todo: , configurable
+ {\ifconditional\c_strc_registers_page_done
\registerpageseparatorsymbol
\else
- \hskip\registerparameter\c!distance\relax
- \settrue\registerpagedone
+ \hskip\d_strc_registers_distance\relax
+ \settrue\c_strc_registers_page_done
\fi}
-\def\registeronepagerangeseparator{|\endash|}
+\unexpanded\def\registeronepagerangeseparator
+ {|\endash|}
\def\withregisterpagecommand#1#2#3#4%
{\def\currentregisterpageindex{#2}%
@@ -714,13 +739,13 @@
\applyprocessor{#1}{\registerparameter\c!pagecommand{#4}}%
\fi}
-\def\registeronepage#1#2#3#4% #1:processor content
+\unexpanded\def\registeronepage#1#2#3#4% #1:processor content
{\registerpageseparator
\dostarttagged\t!registerpage\empty
\withregisterpagecommand{#1}{#2}{#3}{#4}%
\dostoptagged}
-\def\registerpagerange#1#2#3#4#5#6#7% #1:processor content, content todo: -- configurable
+\unexpanded\def\registerpagerange#1#2#3#4#5#6#7% #1:processor content, content todo: -- configurable
{\registerpageseparator
\dostarttagged\t!registerpagerange\empty
\dostarttagged\t!registerfrompage\empty
@@ -732,16 +757,16 @@
\dostoptagged
\dostoptagged}
-\def\doapplyregisterentrycommand#1#2% processor text
+\unexpanded\def\doapplyregisterentrycommand#1#2% processor text
{\dostarttagged\t!registerentry\empty
\ifx\currentregisterseeindex\empty \else
\dontleavehmode
- \pagereference[seeindex:\currentregisterseeindex]% maybe some day we will support an area
+ \dosetdirectpagereference{seeindex:\currentregisterseeindex}% maybe some day we will support an area
\fi
\applyprocessor{#1}{\registerparameter\c!textcommand{\limitedregisterentry{\registerparameter\c!deeptextcommand{#2}}}}%
\dostoptagged}
-\def\defaultregisterentry#1#2#3#4% #1:processor #2:internal #3:seeindex #4:word
+\unexpanded\def\defaultregisterentry#1#2#3#4% #1:processor #2:internal #3:seeindex #4:word
{\def\currentregisterpageindex{#2}%
\iflocation
\def\currentregisterseeindex{#3}%
@@ -761,11 +786,11 @@
\goto{\applyprocessor{#1}{#2}}[seeindex:\currentregisterseeindex]%
\fi}
-\def\defaultregisterseeword#1#2#3#4#5#6% i n #3:processor #4:internal #5:seeindex #6:word
+\unexpanded\def\defaultregisterseeword#1#2#3#4#5#6% i n #3:processor #4:internal #5:seeindex #6:word
{%\registerpageseparator
\def\currentregisterpageindex{#4}%
\dostarttagged\t!registersee\empty
- \settrue\registerpagedone
+ \settrue\c_strc_registers_page_done
\iflocation
\def\currentregisterseeindex{#5}%
\else
@@ -805,41 +830,44 @@
% not yet ok : new internal handler names
-\def\registerpagebuttonsymbol{\vrule\!!width1em\!!height1ex\!!depth\zeropoint\relax}
+\unexpanded\def\registerpagebuttonsymbol
+ {\vrule\s!width\emwidth\s!height\exheight\s!depth\zeropoint\relax}
+
+\installcorenamespace{registersymbol}
-\setvalue{\??id:\c!symbol:\c!n}%
+\setvalue{\??registersymbol n}%
{\def\registerpageseparatorsymbol{, }}
-\setvalue{\??id:\c!symbol:\c!a}%
+\setvalue{\??registersymbol a}%
{\def\registerpageseparatorsymbol{, }} % now done via conversion
-\setvalue{\??id:\c!symbol:\v!none}%
+\setvalue{\??registersymbol\v!none}%
{\let\registerpageseparatorsymbol\empty
\let\registeronepage \gobblefourarguments
\let\registerpagerange \gobblesevenarguments}
-\setvalue{\??id:\c!symbol:1}%
+\setvalue{\??registersymbol 1}%
{\let\registerpageseparatorsymbol\space
- \def\registeronepage {\symbol[1]\gobblefourarguments}%
- \def\registerpagerange {\symbol[1]\gobblesevenarguments}}
+ \def\registeronepage {\symbol[1]\gobblefourarguments}%
+ \def\registerpagerange{\symbol[1]\gobblesevenarguments}}
-\setvalue{\??id:\c!symbol:2}%
+\setvalue{\??registersymbol 2}%
{\let\registerpageseparatorsymbol\space
- \def\registeronepage {\registerpagebuttonsymbol\gobblefourarguments}%
- \def\registerpagerange {\registerpagebuttonsymbol\gobblesevenarguments}}
+ \def\registeronepage {\registerpagebuttonsymbol\gobblefourarguments}%
+ \def\registerpagerange{\registerpagebuttonsymbol\gobblesevenarguments}}
\def\setregisterpagerendering
{\doifelse{\registerparameter\c!pagenumber}\v!no
{\let \currentregisterpagesymbol\v!none}
{\edef\currentregisterpagesymbol{\registerparameter\c!symbol}}%
\ifx\currentregisterpagesymbol\empty
- \csname\??id:\c!symbol:\c!n\endcsname
- \else\ifcsname\??id:\c!symbol:\currentregisterpagesymbol\endcsname
- \csname\??id:\c!symbol:\currentregisterpagesymbol\endcsname
+ \csname\??registersymbol n\endcsname
+ \else\ifcsname\??registersymbol\currentregisterpagesymbol\endcsname
+ \csname\??registersymbol\currentregisterpagesymbol\endcsname
\else
\let\registerpageseparatorsymbol\space
- \def\registeronepage {\registerparameter\c!symbol\gobblefourarguments}%
- \def\registerpagerange {\registerparameter\c!symbol\gobblesevenarguments}%
+ \def\registeronepage{\registerparameter\c!symbol\gobblefourarguments}%
+ \def\registerpagerange{\registerparameter\c!symbol\gobblesevenarguments}%
\fi\fi}
\appendtoks
@@ -853,6 +881,6 @@
\defineregister
[\v!index]
- [\v!indices]
+% [\v!indices]
\protect \endinput
diff --git a/Master/texmf-dist/tex/context/base/strc-ren.mkiv b/Master/texmf-dist/tex/context/base/strc-ren.mkiv
index b7b4224cd0d..00c8c3cd4d5 100644
--- a/Master/texmf-dist/tex/context/base/strc-ren.mkiv
+++ b/Master/texmf-dist/tex/context/base/strc-ren.mkiv
@@ -15,7 +15,10 @@
\unprotect
-\newconstant\headtimingmode
+\newbox\b_strc_rendering_head
+
+\newdimen\d_strc_rendering_local_leftoffset
+\newdimen\d_strc_rendering_local_rightoffset
% Martin Kolarik's problem:
%
@@ -23,90 +26,57 @@
% \def\doTitle#1#2{\ruledvbox{\forgetall \hsize=4cm \ruledhbox{\ruledvtop{#1}\ruledvtop{#2}}}}
% \section{test test test test test test test test test test test test test test test test test}
-\newevery \everyheadstart \relax
+\newtoks\everyheadstart
-\unexpanded\def\setupheadcomponentfont#1#2%
+\unexpanded\def\strc_rendering_initialize_style_and_color
+ {\ifconditional\headisdisplay
+ \expandafter\strc_rendering_initialize_style_and_color_display
+ \else
+ \expandafter\strc_rendering_initialize_style_and_color_inline
+ \fi}
+
+\unexpanded\def\strc_rendering_initialize_style_and_color_display#1#2%
{\dontconvertfont
- \ifconditional\headisdisplay
- \edef\askedheadinterlinespace{\headparameter\c!interlinespace}%
- \ifx\askedheadinterlinespace\empty
- % here the interline space is only set when style sets no space
- \setfalse\fontattributeisset % use the currentfontparameter state instead
- \setfalse\interlinespaceisset
- \useheadstyleandcolor\c!style\c!color
- \ifconditional\fontattributeisset \ifconditional\interlinespaceisset \else
- \setupinterlinespace
- \fi \fi
- \setfalse\fontattributeisset \useheadstyleandcolor#1#2%
- \ifconditional\fontattributeisset \ifconditional\interlinespaceisset \else
- \setupinterlinespace
- \fi \fi
- \else
- % here the set interline space overloads any other set space in the style
- \setfalse\fontattributeisset
- \useheadstyleandcolor\c!style\c!color
- \ifconditional\fontattributeisset
- \dosetupcheckedinterlinespace\askedheadinterlinespace
- \fi
- \setfalse\fontattributeisset
- \useheadstyleandcolor#1#2%
- \ifconditional\fontattributeisset
- \dosetupcheckedinterlinespace\askedheadinterlinespace
- \fi
- \fi
+ \edef\p_strc_rendering_interlinespace{\headparameter\c!interlinespace}%
+ \ifx\p_strc_rendering_interlinespace\empty
+ % here the interline space is only set when style sets no space
+ \setfalse\fontattributeisset % use the currentfontparameter state instead
+ \setfalse\interlinespaceisset
+ \useheadstyleandcolor\c!style\c!color\relax
+ \ifconditional\fontattributeisset \ifconditional\interlinespaceisset \else
+ \setupinterlinespace
+ \fi \fi
+ \setfalse\fontattributeisset
+ \useheadstyleandcolor#1#2\relax
+ \ifconditional\fontattributeisset \ifconditional\interlinespaceisset \else
+ \setupinterlinespace
+ \fi \fi
\else
+ % here the set interline space overloads any other set space in the style
\setfalse\fontattributeisset
- \useheadstyleandcolor\c!style\c!color
+ \useheadstyleandcolor\c!style\c!color\relax
\ifconditional\fontattributeisset
- \setupspacing
+ \dosetupcheckedinterlinespace\p_strc_rendering_interlinespace
\fi
\setfalse\fontattributeisset
- \useheadstyleandcolor#1#2%
+ \useheadstyleandcolor#1#2\relax
\ifconditional\fontattributeisset
- \setupspacing
+ \dosetupcheckedinterlinespace\p_strc_rendering_interlinespace
\fi
\fi}
-\def\doplaceheadtextcomponent#1%
- {\begingroup
- \setupheadcomponentfont\c!textstyle\c!textcolor
- \headparameter\c!commandbefore
- \ifcsname\currentheadhash\c!deeptextcommand\endcsname
- \expandafter\let\expandafter\deepstructuretitlecommand\csname\currentheadhash\c!deeptextcommand\endcsname
- \fi
- \ifconditional\headisdisplay % \ifdisplaysectionhead
- % struts can be nilled with \setnostrut
- \headparameter\c!textcommand{\setstrut\begstrut#1\endstrut}%
- \xdef\localheadheight {\the\strutht}%
- \xdef\localheaddepth {\the\strutdp}%
- \xdef\localheadlineheight{\the\lineheight}%
- % == \globallet\localheaddepth\strutdepth
- \else
- \headparameter\c!textcommand{#1}%
- \fi
- \headparameter\c!commandafter
- \ifconditional\headisdisplay % \ifdisplaysectionhead
- \endgraf
- \fi
- \endgroup}
-
-\def\doplaceheadnumbercomponent#1%
- {\begingroup
- \setupheadcomponentfont\c!numberstyle\c!numbercolor
- \ifcsname\currentheadhash\c!deepnumbercommand\endcsname
- \expandafter\let\expandafter\deepstructurenumbercommand\csname\currentheadhash\c!deepnumbercommand\endcsname
- \fi
- \ifconditional\headisdisplay % \ifdisplaysectionhead
- % can be nilled with \setnostrut
- \headparameter\c!numbercommand{\setstrut\begstrut#1\endstrut}%
- \else
- \headparameter\c!numbercommand{#1}%
- \fi
- \endgroup}
-
-% \newif\ifheadnumbercontent
-% \newif\ifemptyhead
-% \newif\ifdisplaysectionhead
+\unexpanded\def\strc_rendering_initialize_style_and_color_inline#1#2%
+ {\dontconvertfont
+ \setfalse\fontattributeisset
+ \useheadstyleandcolor\c!style\c!color\relax
+ \ifconditional\fontattributeisset
+ \updateraggedskips % \setupspacing
+ \fi
+ \setfalse\fontattributeisset
+ \useheadstyleandcolor#1#2\relax
+ \ifconditional\fontattributeisset
+ \updateraggedskips % \setupspacing
+ \fi}
\let\currentstructurereferenceattribute\attributeunsetvalue
@@ -142,42 +112,45 @@
% maybe auto: backreference when given, else list
\fi\fi}
-\unexpanded\def\placecurrentheadtext
- {\beginheadplacement
+% a bit messy ... empty in place instead of self .. might change (or use special
+% whatsig (invisible user one)
+
+\unexpanded\def\strc_rendering_place_head_text
+ {\strc_rendering_start_placement
\setheadmarking
\doresettructureheadnumbercontent
- \ifconditional\headleaveempty
- \setbox\sectionheadbox\hbox \headreferenceattributes to \zeropoint{}%
- \makestrutofbox\sectionheadbox
+ \ifconditional\c_strc_sectioning_empty
+ \setbox\b_strc_rendering_head\hbox \headreferenceattributes to \zeropoint{\strut}%
\else
\docheckheadreference
- \setbox\sectionheadbox\hbox \headreferenceattributes
+ \setbox\b_strc_rendering_head\hbox \headreferenceattributes
{\spac_grids_set_local_snapping{\headparameter\c!internalgrid}%
\doresettructureheadnumbercontent
\useheadstyleparameter\c!style
\setinlineheadreferenceattributes
- \headparameter\c!command{}{\doplaceheadtextcomponent\getheadtitle}}%
+ \strc_rendering_inject_text}%
\fi
- \endheadplacement{\getheadsyncs}}
+ \strc_rendering_stop_placement}
-\unexpanded\def\placecurrentheadnumbertext
- {\beginheadplacement
+\unexpanded\def\strc_rendering_place_head_number_and_text
+ {\strc_rendering_start_placement
\setheadmarking
- \doiftextelse{\getheadnumber}\dosettructureheadnumbercontent\doresettructureheadnumbercontent
- \ifconditional\headleaveempty
- \setbox\sectionheadbox\hbox \headreferenceattributes to \zeropoint{}%
- \makestrutofbox\sectionheadbox
+ \doiftextelse\getheadnumber
+ \dosettructureheadnumbercontent
+ \doresettructureheadnumbercontent
+ \ifconditional\c_strc_sectioning_empty
+ \setbox\b_strc_rendering_head\hbox \headreferenceattributes to \zeropoint{\strut}%
\else % = needed
\docheckheadreference
- \setbox\sectionheadbox\hbox \headreferenceattributes
+ \setbox\b_strc_rendering_head\hbox \headreferenceattributes
{\spac_grids_set_local_snapping{\headparameter\c!internalgrid}%
\useheadstyleparameter\c!style
\setinlineheadreferenceattributes
- \headparameter\c!command{\doplaceheadnumbercomponent\getheadnumber}{\doplaceheadtextcomponent\getheadtitle}}%
+ \strc_rendering_inject_number_and_text}%
\fi
- \endheadplacement{\getheadsyncs}}
+ \strc_rendering_stop_placement}
-\unexpanded\def\placecurrentheadempty
+\unexpanded\def\strc_rendering_place_head_empty
{\hbox \headreferenceattributes {\getheadsyncs}}
%D \starttyping
@@ -192,54 +165,96 @@
%D \MySubject{feeling stretched feeling stretched feeling stretched feeling stretched}
%D \stoptyping
-\let\headlastlinewidth\!!zeropoint
+% helpers
-\def\localheadheight {\strutht}
-\def\localheaddepth {\strutdp}
-\def\localheadlineheight{\lineheight}
+% \defineinmargin [ChapterInMargin] [outer] [normal] [distance=0.3em]
+%
+% \defineheadplacement[MyTest][horizontal]#1#2%
+% {\startlocalheadsetup
+% %\ChapterInMargin{\headhbox{\strut#2}}% proper destination, ref okay
+% \ChapterInMargin{\strut#2}% zero destination, ref okay
+% \stoplocalheadsetup}
+%
+% \setuphead
+% [chapter]
+% [alternative=MyTest]
+
+\unexpanded\def\headhbox{\hbox\headreferenceattributes}
+\unexpanded\def\headvbox{\vbox\headreferenceattributes}
-\def\dolocalheadsetup % koppeling met standaard kopcommando / engels
- {\forgetall % traag dus ...
- \doifsomething{\headparameter\c!align } {\normalexpanded{\noexpand\setupalign [\headparameter\c!align ]}}%
- \doifsomething{\headparameter\c!tolerance} {\normalexpanded{\noexpand\setuptolerance[\headparameter\c!tolerance]}}%
- \doif {\headparameter\c!strut }\v!no{\setnostrut}% new
- \def\\{\crlf\strut\ignorespaces}}
+\unexpanded\def\startlocalheadsetup{\bgroup\strc_rendering_initialize_spacing}
+\unexpanded\def\stoplocalheadsetup {\egroup}
+
+\unexpanded\def\strc_rendering_initialize_spacing
+ {\forgetall % local !
+ \edef\p_align{\headparameter\c!align}%
+ \ifx\p_align\empty \else
+ \setupalign[\p_align]%
+ \fi
+ \edef\p_tolerance{\headparameter\c!tolerance}%
+ \ifx\p_tolerance\empty \else
+ \setuptolerance[\p_tolerance]%
+ \fi
+ \edef\p_strut{\headparameter\c!strut}%
+ \ifx\p_strut\v!no
+ \setnostrut
+ \fi
+ \let\\\strc_rendering_shortcut_backslash}
-\def\beginheadplacement
+\unexpanded\def\strc_rendering_shortcut_backslash
+ {\crlf
+ \strut
+ \ignorespaces}
+
+\def\strc_rendering_start_placement
{\bgroup
\setsystemmode\currenthead
- \xdef\localheadheight {\the\strutht}%
- \xdef\localheaddepth {\the\strutdp}%
- \xdef\localheadlineheight{\the\lineheight}%
- % == \globallet\localheaddepth\strutdp
- \everypar\emptytoks % needed indeed
- \noindent % ipv \whitespace elders, na \forgetall !
+ %
+ \strc_rendering_initialize_alternatives
+ \strc_rendering_initialize_dimensions
+ %
+ \strc_rendering_initialize_line_state
+ \reseteverypar % needed indeed
+ \noindent % ipv \whitespace elders, na \forgetall !
\bgroup
- \doifinsetelse{\headparameter\c!aligntitle}{\v!yes,\v!float}% new
- {\skip0 1\leftskip
- \skip2 1\rightskip
- \xdef\localheadskip{\the\skip0}%
- \forgetall
- \leftskip\skip0
- \rightskip\skip2
- \setlocalhsize\hsize\localhsize
- \forgetbothskips}
- {\globallet\localheadskip\!!zeropoint
- \forgetall}%
+ \edef\p_aligntitle{\headparameter\c!aligntitle}%
+ \ifx\p_aligntitle\v!yes
+ \strc_rendering_initialize_hsize_local
+ \else\ifx\p_aligntitle\v!float
+ \strc_rendering_initialize_hsize_local
+ \else
+ \strc_rendering_initialize_hsize_global
+ \fi\fi
\setfalse\inhibitmargindata % brrrr is set in forgetall
\dontcomplain
\postponenotes
- \iflocation
- \ifconditional\headisdisplay % \ifdisplaysectionhead
- \else
- % obsolete: \noninterferingmarks
- \fi
- \fi
- \resetinteractionparameter\c!style
+ \strc_rendering_initialize_interaction
+ % delayed
+ \let\localheadsetup \strc_rendering_initialize_spacing % historic name
+ \let\headsetupspacing\strc_rendering_initialize_spacing}
+
+\def\strc_rendering_initialize_hsize_local
+ {\global\d_strc_rendering_local_leftoffset \leftskip
+ \global\d_strc_rendering_local_rightoffset\rightskip
+ % \forgetall
+ % \leftskip \d_strc_rendering_local_leftoffset % no stretch
+ % \rightskip\d_strc_rendering_local_rightoffset % no stretch
+ % \setlocalhsize
+ % \hsize\localhsize
+ % \forgetbothskips}
+ \scratchwidth\availablehsize
+ \forgetall
+ \hsize\scratchwidth}
+
+\def\strc_rendering_initialize_hsize_global
+ {\global\d_strc_rendering_local_leftoffset \zeropoint
+ \global\d_strc_rendering_local_rightoffset\zeropoint
+ \forgetall}
+
+\def\strc_rendering_initialize_interaction
+ {\resetinteractionparameter\c!style
\resetinteractionparameter\c!color
- \resetinteractionparameter\c!contrastcolor
- %\strictouterreferencestrue % tzt instelling
- \let\localheadsetup\dolocalheadsetup}
+ \resetinteractionparameter\c!contrastcolor}
% \setuphead[chapter] [style=\bfd,after=,hang=line] % fit broad 2
% \setuphead[section] [style=\bfc,after=,hang=line]
@@ -259,87 +274,105 @@
% \subsubsection {Test\\Test} \input tufte \page
% \subsubsubsection{Test\\Test} \input tufte \page
-\def\hangheadplacement
- {\scratchdimen\localheadlineheight
- \bgroup
- \openlineheight\scratchdimen
- \scratchdimen\htdp0%
- \getnoflines\scratchdimen
- \advance\noflines\minusone
- \normalexpanded{\egroup\noflines\the\noflines}% brrr
- \setbox0\hbox{\lower\noflines\scratchdimen\box0}%
- \scratchdimen\dimexpr\htdp0-\localheadheight+\strutdp\relax
- \ht0 \strutht
- \dp0 \strutdp
- \edef\localheaddepth{\the\strutdp}}
-
-\newconditional\continuoussectionhead % oeps, \newif\ifcontinuoushead got lost
-\newbox\sectionheadbox
-
-\def\endheadplacement#1%
- {\noflines\zerocount
- \ifconditional\headisdisplay % \ifdisplaysectionhead
- % new (todo tight == one following line up)
- \processaction
- [\headparameter\c!hang]
- [ \v!line=>\hangheadplacement\noflines\zerocount,
- \v!broad=>\hangheadplacement\getnoflines\scratchdimen,
- \v!fit=>\hangheadplacement\getrawnoflines\scratchdimen,
- \v!none=>\noflines\zerocount,
- \v!default=>\noflines\zerocount,
- \v!unknown=>\hangheadplacement\noflines\numexpr0\commalistelement-1\relax]%
- % so far
- \let\headlastlinewidth\!!zeropoint
+\newdimen\d_strc_rendering_hang_height
+\newcount\n_strc_rendering_hang_lines
+
+\newdimen\d_strc_rendering_local_height
+\newdimen\d_strc_rendering_local_depth
+\newdimen\d_strc_rendering_local_lineheight
+
+\def\strc_rendering_initialize_line_state
+ {\global\d_strc_rendering_local_height\strutht
+ \global\d_strc_rendering_local_depth\strutdp
+ \global\d_strc_rendering_local_lineheight\lineheight}
+
+\def\strc_rendering_check_hang
+ {\begingroup
+ \openlineheight\d_strc_rendering_local_lineheight
+ \d_strc_rendering_hang_height\htdp\b_strc_rendering_head
+ \getnoflines\d_strc_rendering_hang_height
+ \normalexpanded{\endgroup\n_strc_rendering_hang_lines\the\numexpr\noflines-\plusone\relax}% brrr
+ \setbox\b_strc_rendering_head\hbox{\lower\n_strc_rendering_hang_lines\d_strc_rendering_hang_height\box\b_strc_rendering_head}%
+ \d_strc_rendering_hang_height\dimexpr\htdp\b_strc_rendering_head-\d_strc_rendering_local_height+\strutdp\relax
+ \ht\b_strc_rendering_head\strutht
+ \dp\b_strc_rendering_head\strutdp
+ \d_strc_rendering_local_depth\strutdp}
+
+\installcorenamespace{headplacementcheckhang}
+
+\setvalue{\??headplacementcheckhang\v!line }{\strc_rendering_check_hang
+ \n_strc_rendering_hang_lines\zerocount}
+\setvalue{\??headplacementcheckhang\v!broad }{\strc_rendering_check_hang
+ \getnoflines\d_strc_rendering_hang_height}
+\setvalue{\??headplacementcheckhang\v!fit }{\strc_rendering_check_hang
+ \getrawnoflines\d_strc_rendering_hang_height}
+\setvalue{\??headplacementcheckhang\v!none }{\n_strc_rendering_hang_lines\zerocount}
+\setvalue{\??headplacementcheckhang }{\n_strc_rendering_hang_lines\zerocount}
+\setvalue{\??headplacementcheckhang\s!unknown}{\strc_rendering_check_hang
+ \n_strc_rendering_hang_lines\numexpr\headparameter\c!hang-\plusone\relax}
+
+\def\strc_rendering_initialize_line_hang
+ {\ifconditional\headisdisplay
+ \expandnamespaceparameter\??headplacementcheckhang\headparameter\c!hang\s!unknown
+ \relax
+ \else
+ \n_strc_rendering_hang_lines \zerocount
+ \d_strc_rendering_hang_height\zeropoint
+ \fi}
+
+\def\strc_rendering_stop_placement
+ {\n_strc_rendering_hang_lines\zerocount
+ \ifconditional\headisdisplay
+ \strc_rendering_initialize_line_hang
% kind of special, we want to snap heads also according to local specs local
\ifgridsnapping
\hbox\bgroup % extra hbox will trigger global snapper on top of local
- \edef\currentheadgridsnapping{\headparameter\c!grid}%
+ \edef\p_grid{\headparameter\c!grid}%
\ifconditional\headisdisplay
- \ifx\currentheadgridsnapping\empty\else
+ \ifx\p_grid\empty\else
\useheadstyleandcolor\c!style\c!color
\setupinterlinespace
\useheadstyleandcolor\c!textstyle\c!textcolor
\setupinterlinespace
\fi
\fi
- \snaptogrid[\currentheadgridsnapping]\hbox
- {\hskip\localheadskip\hskip\headparameter\c!margin\box\sectionheadbox}%
+ \snaptogrid[\p_grid]\hbox
+ {\hskip\dimexpr\d_strc_rendering_local_leftoffset+\headparameter\c!margin\relax\box\b_strc_rendering_head}%
\egroup
\else
\hbox
- {\hskip\localheadskip\hskip\headparameter\c!margin\box\sectionheadbox}%
+ {\hskip\dimexpr\d_strc_rendering_local_leftoffset+\headparameter\c!margin\relax\box\b_strc_rendering_head}%
\fi
\flushnotes % new, not really needed
\endgraf
\ifvmode
- \ifnum\noflines>\zerocount
- \dorecurse\noflines{\nointerlineskip\dosomebreak\nobreak\strut\endgraf}% to be checked
+ \ifnum\n_strc_rendering_hang_lines>\zerocount
+ \dorecurse\n_strc_rendering_hang_lines{\nointerlineskip\dosomebreak\nobreak\strut\endgraf}% to be checked
\fi
\nointerlineskip
\dosomebreak\nobreak
\fi
- #1%
+ \getheadsyncs
\else
\strut
\flushnotes % new, here since we're in par mode
- \unhbox\sectionheadbox
- \globallet\headlastlinewidth\!!zeropoint
- #1%
- \hskip\numberheaddistance\!!plus\numberheaddistance\!!minus.25\dimexpr\numberheaddistance\relax
- \hskip\continuousheadsignal\ignorespaces
+ \unhbox\b_strc_rendering_head
+ \getheadsyncs
+ \hskip\headnumberdistance\s!plus\headnumberdistance\s!minus.25\dimexpr\headnumberdistance\relax
+ \strc_sectioning_inject_continuous_signal
\fi
- \ifconditional\headisdisplay % \ifdisplaysectionhead
+ \ifconditional\headisdisplay
\ifvmode
\ifgridsnapping % important, font related depth, see comment
\prevdepth\strutdp
\else
- \prevdepth\localheaddepth
+ \prevdepth\d_strc_rendering_local_depth
\fi
\fi
\fi
\egroup
\egroup
- \ifconditional\headisdisplay % \ifdisplaysectionhead
+ \ifconditional\headisdisplay
\useindentnextparameter\headparameter
\else
\nonoindentation % recently added, was a bug
@@ -359,187 +392,356 @@
% \input tufte \par
% \stopnarrower
-\installcorenamespace{headplacementalternative}
-\installcorenamespace{headplacementnature}
-
-\let\numberheadalternative\v!normal
-
-\unexpanded\def\defineheadplacement
- {\dodoubleargument\dodefineheadplacement}
-
-% \def\dodefineheadplacement[#1][#2]% #3#4
-% {\setvalue{\??headplacementnature#1}{#2}%
-% \setvalue{\??headplacementalternative#1}}
-
% \dodefineheadplacement[sectiona][vertical]{#1->#2}
% \dodefineheadplacement[sectionb][vertical]#1#2{#1->#2}
%
% \setuphead[section][alternative=sectiona]
% \setuphead[subsection][alternative=sectionb]
-\def\dodefineheadplacementyes[#1][#2]%#3#4%
- {\setvalue{\??headplacementnature#1}{#2}%
- \setvalue{\??headplacementalternative#1}##1##2}
+% \startsetups[\??headrenderings:\v!vertical:\v!sectiona]
+% ... there will be a more public namespace
+% \stopsetups
-\def\dodefineheadplacementnop[#1][#2]%
- {\setvalue{\??headplacementnature#1}{#2}%
- \setvalue{\??headplacementalternative#1}}
+\installcorenamespace{headplacementalternative}
+\installcorenamespace{headrenderings}
+\installcorenamespace{headalternative}
-\def\dodefineheadplacement[#1][#2]%
- {\doifnextbgroupelse
- {\dodefineheadplacementyes[#1][#2]}%
- {\dodefineheadplacementnop[#1][#2]}}
+\installcommandhandler \??headalternative {headalternative} \??headalternative % or just \??head
-\def\presetnumberheadalternative
- {\doifelsevalue{\??headplacementnature\numberheadalternative}\v!horizontal\setfalse\settrue\headisdisplay}
+\setupheadalternative
+ [%\c!width=\headparameter\c!width,
+ %\c!distance=\headparameter\c!distance,
+ \c!alternative=\v!vertical,
+ \c!renderingsetup=\??headrenderings:\currentheadalternative]
-\def\normalplacehead
- {\csname\??headplacementalternative\ifcsname\??headplacementalternative\numberheadalternative\endcsname\numberheadalternative\else\v!normal\fi\endcsname}
+\let\currentheadalternative \v!normal
+\let\currentheadrenderingsetup \empty
+\let\currentheadrenderingalternative\v!vertical
-\def\setheaddisplay % used in strc-sec
- {\doifelsevalue{\??headplacementnature\headparameter\c!alternative}\v!horizontal
- {\setfalse\headisdisplay}
- {\settrue \headisdisplay}}
+\unexpanded\def\defineheadplacement % old mechanism
+ {\dodoubleargument\strc_rendering_define_placement}
-\defineheadplacement[\v!paragraph][\v!vertical]#1#2%
- {\vbox
- {\localheadsetup
- \begstrut
- \ifconditional\headshownumber % \ifheadnumbercontent
- #1\hskip\numberheaddistance
- \fi
- #2}}
+\def\strc_rendering_define_placement[#1][#2]%
+ {\doifnextbgroupelse
+ {\strc_rendering_define_placement_yes[#1][#2]}%
+ {\strc_rendering_define_placement_nop[#1][#2]}}
-% \setuphead
-% [chapter]
-% [numberwidth=2cm,hang=line,after={\blank[3*line]}]
+\def\strc_rendering_define_placement_yes[#1][#2]%
+ {\defineheadalternative[#1][\c!alternative=#2,\c!renderingsetup=\??headrenderings:\v!command]%
+ \setuvalue{\??headplacementalternative#1}##1##2}
+
+\def\strc_rendering_define_placement_nop[#1][#2]%
+ {\defineheadalternative[#1][\c!alternative=#2,\c!renderingsetup=\??headrenderings:\v!command]%
+ \setuvalue{\??headplacementalternative#1}}
+
+% these can be used in setups:
%
-% \chapter{Oeps oeps oeps} \input tufte \section{Oeps}
-% \chapter{Oeps oeps oeps} \section{Oeps} \input tufte
+% \headnumbercontent
+% \headtextcontent
+%
+% \headwidth
+% \headtextwidth
+% \headnumberdistance
+% \headnumberwidth
+% \headsetupspacing
+%
+% \headshownumber
+% \headisdisplay
-\defineheadplacement[\v!normal][\v!vertical]#1#2%
- {\vbox
- {\localheadsetup
- \edef\headwidth {\headparameter\c!width }%
- \edef\headnumberwidth{\headparameter\c!numberwidth}%
- \edef\headtextwidth {\headparameter\c!textwidth }%
- \ifconditional\headshownumber
- \ifx\headwidth\empty
- \else
- \ifx\headnumberwidth\empty
- \ifx\headtextwidth\empty\else
- \edef\headnumberwidth{\the\dimexpr\headwidth-\headtextwidth\relax}%
- \fi
- \else
- \ifx\headtextwidth\empty
- \edef\headtextwidth{\the\dimexpr\headwidth-\headnumberwidth\relax}%
- \fi
- \fi
- \hsize\headwidth
- \fi
- \ifx\headnumberwidth\empty\else
- \let\numberheaddistance\!!zeropoint
- \fi
- \setbox\scratchbox\hbox \ifx\headnumberwidth\empty\else to \headnumberwidth\fi{{#1}}%
- \scratchdimen\dimexpr\wd\scratchbox+\numberheaddistance\relax
- \ifx\headtextwidth\empty\else
- \hsize\dimexpr\scratchdimen+\headparameter\c!textwidth\relax
- \fi
- \hangindent\scratchdimen
- \hangafter \plusone
- \noindent
- \box\scratchbox\hskip\numberheaddistance
- \else
- \ifx\headtextwidth\empty
- \ifx\headwidth\empty
- \else
- \hsize\headwidth
- \fi
- \else
- \hsize\headtextwidth
- \fi
- \noindent
- \fi
- #2}}
-
-% \unexpanded\def\placeheadmarginalternative#1#2%
-% {\vbox
-% {\localheadsetup
-% \begstrut % use one \strut here!
-% \dontleavehmode % in case there is no strut, else side effects with llap
-% \ifconditional\headshownumber
-% \llap{\hbox{\hfill{#1}\hskip\localheadskip\hskip\leftmargindistance}}% introduces whitespace
-% % maybe better:
-% % \inleftmargin{\hbox{\hss{#1}\hskip\localheadskip}}%
-% \fi
-% {#2}}}
-
-\unexpanded\def\placeheadmarginalternative#1#2%
- {\vbox
- {\localheadsetup
- \begstrut % use one \strut here!
- \dontleavehmode % in case there is no strut, else side effects with llap
- \ifconditional\headshownumber
- \llap{\hbox{\hfill{#1}\hskip\dimexpr\localheadskip+\doifoddpageelse\leftmargindistance\rightmargindistance\relax}}% introduces whitespace
- \fi
- {#2}}}
-
-\defineheadplacement[\v!inmargin][\v!vertical]#1#2{\placeheadmarginalternative{#1}{#2}}
-\defineheadplacement[\v!margin] [\v!vertical]#1#2{\placeheadmarginalternative{#1}{#2}}
-
-\defineheadplacement[\v!middle][\v!vertical]#1#2%
- {\vbox
- {\localheadsetup
- \veryraggedcenter
- \let\\\endgraf
- \let\crlf\endgraf
- \ifconditional\headshownumber
- \strut#1\par
- \fi
- \begstrut#2}}
-
-\defineheadplacement[\v!text][\v!horizontal]#1#2%
- {\bgroup
- \localheadsetup % no stretch in distance
- \ifconditional\headshownumber
- {#1}\kern\numberheaddistance
+\let\headnumbercontent\empty
+\let\headtextcontent \empty
+
+\newdimen\headwidth
+\newdimen\headtextwidth
+\newdimen\headnumberdistance
+\newdimen\headnumberwidth
+
+% \newconditional\headshownumber % defined already
+% \newconditional\headisdisplay % defined already
+
+\unexpanded\def\strc_rendering_initialize_alternatives
+ {\edef\currentheadalternative{\headparameter\c!alternative}%
+ \ifcsname\currentheadalternativehash\s!parent\endcsname \else
+ \let\currentheadalternative\v!normal % cf. mkii
+ \fi
+ \edef\currentheadrenderingsetup{\headalternativeparameter\c!renderingsetup}%
+ \edef\currentheadrenderingalternative{\headalternativeparameter\c!alternative}%
+ \ifx\currentheadrenderingalternative\empty
+ \let\currentheadrenderingalternative\v!vertical
\fi
- {\begstrut#2}%
- \egroup}
-
-\unexpanded\def\placeheadlohialternative#1#2#3%
- {\ifconditional\headshownumber
- \setbox0\hbox{#2}
- \setbox2=#1{\localheadsetup\advance\hsize-\wd0\relax#3}%
- \hbox{\box0\hskip\numberheaddistance\box2}%
+ \ifx\currentheadrenderingalternative\v!horizontal
+ \global\setfalse\headisdisplay % global
\else
- #1{\localheadsetup\noindent#3}%
+ \global\settrue\headisdisplay % global
\fi}
-% onder/boven lijnt het nummer op de onderste/bovenste regel
-% uit van een meerregelige kop
+\unexpanded\def\strc_rendering_initialize_dimensions
+ {\headwidth \headparameter\c!width \relax % \zeropoint == unset
+ \headnumberwidth \headparameter\c!numberwidth\relax % \zeropoint == unset
+ \headnumberdistance\headparameter\c!distance \relax
+ \headtextwidth \headparameter\c!textwidth \relax} % \zeropoint == unset
-\defineheadplacement[\v!bottom][\v!vertical]#1#2{\placeheadlohialternative\vbox{#1}{#2}}
-\defineheadplacement[\v!top] [\v!vertical]#1#2{\placeheadlohialternative\vtop{#1}{#2}}
+\unexpanded\def\headtextcontent
+ {\begingroup
+ \strc_rendering_initialize_style_and_color\c!textstyle\c!textcolor
+ \headparameter\c!commandbefore\relax
+ \ifcsname\currentheadhash\c!deeptextcommand\endcsname
+ \expandafter\let\expandafter\deepstructuretitlecommand\csname\currentheadhash\c!deeptextcommand\endcsname
+ \fi
+ \ifconditional\headisdisplay
+ % struts can be nilled with \setnostrut
+ \headparameter\c!textcommand{\setstrut\begstrut\getheadtitle\endstrut}%
+ \global\d_strc_rendering_local_height\strutht
+ \global\d_strc_rendering_local_depth\strutdp
+ \global\d_strc_rendering_local_lineheight\lineheight
+ \headparameter\c!commandafter\relax
+ \endgraf
+ \else
+ \headparameter\c!textcommand{\getheadtitle}%
+ \headparameter\c!commandafter\relax
+ \fi
+ \endgroup}
-% helpers
+\unexpanded\def\headnumbercontent
+ {\begingroup
+ \strc_rendering_initialize_style_and_color\c!numberstyle\c!numbercolor
+ \ifcsname\currentheadhash\c!deepnumbercommand\endcsname
+ \expandafter\let\expandafter\deepstructurenumbercommand\csname\currentheadhash\c!deepnumbercommand\endcsname
+ \fi
+ \ifconditional\headisdisplay
+ % can be nilled with \setnostrut
+ \headparameter\c!numbercommand{\setstrut\begstrut\getheadnumber\endstrut}%
+ \else
+ \headparameter\c!numbercommand{\getheadnumber}%
+ \fi
+ \endgroup}
-% \defineinmargin [ChapterInMargin] [outer] [normal] [distance=0.3em]
-%
-% \defineheadplacement[MyTest][horizontal]#1#2%
-% {\startlocalheadsetup
-% %\ChapterInMargin{\headhbox{\strut#2}}% proper destination, ref okay
-% \ChapterInMargin{\strut#2}% zero destination, ref okay
-% \stoplocalheadsetup}
+\unexpanded\def\strc_rendering_inject_number_and_text
+ {\edef\p_command{\headparameter\c!command}% assumes \unexpanded definition
+ \ifx\p_command\empty
+ \directsetup\currentheadrenderingsetup
+ \else
+ \p_command\headnumbercontent\headtextcontent
+ \fi}
+
+\unexpanded\def\strc_rendering_inject_text
+ {\edef\p_command{\headparameter\c!command}% assumes \unexpanded definition
+ \ifx\p_command\empty
+ \directsetup\currentheadrenderingsetup
+ \else
+ \p_command\empty\headtextcontent
+ \fi}
+
+\startsetups[\??headrenderings:\v!command]
+ \csname\??headplacementalternative\currentheadalternative\endcsname \headnumbercontent \headtextcontent
+\stopsetups
+
+% obsolete
%
+% \def\normalplacehead % hooks into \c!command
+% {\csname\??headplacementalternative\ifcsname\??headplacementalternative\currentheadalternative\endcsname\currentheadalternative\else\v!normal\fi\endcsname}
+
+\defineheadalternative
+ [\v!paragraph]
+ [\c!alternative=\v!vertical,
+ \c!renderingsetup=\??headrenderings:\v!paragraph]
+
+\startsetups[\??headrenderings:\v!paragraph]
+ \vbox {
+ \headsetupspacing
+ \begstrut
+ \ifconditional\headshownumber % \ifheadnumbercontent
+ \headnumbercontent
+ \hskip\headnumberdistance
+ \fi
+ \headtextcontent
+ }
+\stopsetups
+
% \setuphead
% [chapter]
-% [alternative=MyTest]
-
-\unexpanded\def\headhbox{\hbox\headreferenceattributes}
-\unexpanded\def\headvbox{\vbox\headreferenceattributes}
+% [numberwidth=2cm,hang=line,after={\blank[3*line]}]
+%
+% \chapter{Oeps oeps oeps} \input tufte \section{Oeps}
+% \chapter{Oeps oeps oeps} \section{Oeps} \input tufte
-\unexpanded\def\startlocalheadsetup{\bgroup\localheadsetup}
-\unexpanded\def\stoplocalheadsetup {\egroup}
+\defineheadalternative
+ [\v!normal]
+ [\c!alternative=\v!vertical,
+ \c!renderingsetup=\??headrenderings:\v!normal]
+
+\startsetups[\??headrenderings:\v!normal]
+ \vbox {
+ \headsetupspacing
+ \ifconditional\headshownumber
+ \ifdim\headwidth=\zeropoint \else
+ \ifdim\headnumberwidth=\zeropoint
+ \ifdim\headtextwidth=\zeropoint \else
+ \headnumberwidth\dimexpr\headwidth-\headtextwidth\relax
+ \fi
+ \else
+ \ifdim\headtextwidth=\zeropoint
+ \headtextwidth\dimexpr\headwidth-\headnumberwidth\relax
+ \fi
+ \fi
+ \hsize\headwidth
+ \fi
+ \ifdim\headnumberwidth=\zeropoint\else
+ \headnumberdistance\zeropoint
+ \fi
+ \setbox\scratchbox\hbox \ifdim\headnumberwidth=\zeropoint\else to \headnumberwidth\fi{\headnumbercontent}
+ \scratchdimen\dimexpr\wd\scratchbox+\headnumberdistance\relax
+ \ifdim\headtextwidth=\zeropoint\else
+ \hsize\dimexpr\scratchdimen+\headtextwidth\relax
+ \fi
+ \hangindent\scratchdimen
+ \hangafter \plusone
+ \noindent
+ \box\scratchbox
+ \hskip\headnumberdistance
+ \else
+ \ifdim\headtextwidth=\zeropoint
+ \ifdim\headwidth=\zeropoint \else
+ \hsize\headwidth
+ \fi
+ \else
+ \hsize\headtextwidth
+ \fi
+ \noindent
+ \fi
+ \headtextcontent
+ }
+\stopsetups
+
+\defineheadalternative
+ [\v!inmargin]
+ [\c!alternative=\v!vertical,
+ \c!renderingsetup=\??headrenderings:\v!inmargin]
+
+\startsetups[\??headrenderings:\v!inmargin]
+ \vbox {
+ \headsetupspacing
+ \begstrut % use one \strut here!
+ \dontleavehmode % in case there is no strut, else side effects with llap
+ \ifconditional\headshownumber
+ \llap {
+ \hbox {
+ \hfill
+ \headnumbercontent
+ \hskip\dimexpr\d_strc_rendering_local_leftoffset+\doifoddpageelse\leftmargindistance\rightmargindistance\relax
+ }
+ }
+ \fi
+ \headtextcontent
+ }
+\stopsetups
+
+\defineheadalternative
+ [\v!margin]
+ [\v!inmargin]
+
+% \startsetups[\??headrenderings:\v!vertical:\v!margin]
+% \directsetup{\??headrenderings:\v!vertical:\v!inmargin}
+% \stopsetups
+
+\defineheadalternative
+ [\v!middle]
+ [\c!alternative=\v!vertical,
+ \c!renderingsetup=\??headrenderings:\v!middle]
+
+\startsetups[\??headrenderings:\v!middle]
+ \vbox {
+ \headsetupspacing
+ \veryraggedcenter
+ \let\\\endgraf
+ \let\crlf\endgraf
+ \ifconditional\headshownumber
+ \strut
+ \headnumbercontent
+ \par
+ \fi
+ \begstrut
+ \headtextcontent
+ \endstrut
+ }
+\stopsetups
+
+\defineheadalternative
+ [\v!text]
+ [\c!alternative=\v!horizontal,
+ \c!renderingsetup=\??headrenderings:\v!text]
+
+\startsetups[\??headrenderings:\v!text]
+ \begingroup
+ \headsetupspacing % no stretch in distance
+ \ifconditional\headshownumber
+ \headnumbercontent
+ \kern\headnumberdistance
+ \fi
+ \begstrut
+ \headtextcontent
+ \endstrut
+ \endgroup
+\stopsetups
+
+% onder/boven lijnt het nummer op de onderste/bovenste regel uit van een meerregelige kop
+
+\defineheadalternative
+ [\v!bottom]
+ [\c!alternative=\v!vertical,
+ \c!renderingsetup=\??headrenderings:\v!bottom]
+
+\startsetups[\??headrenderings:\v!bottom]
+ \ifconditional\headshownumber
+ \setbox0\hbox {
+ \headnumbercontent
+ }
+ \setbox2\vbox {
+ \headsetupspacing
+ \advance\hsize-\wd0\relax
+ \headtextcontent
+ }
+ \hbox {
+ \box0
+ \hskip\headnumberdistance
+ \box2
+ }
+ \else
+ \vbox {
+ \headsetupspacing
+ \noindent
+ \headtextcontent
+ }
+ \fi
+\stopsetups
+
+\defineheadalternative
+ [\v!top]
+ [\c!alternative=\v!vertical,
+ \c!renderingsetup=\??headrenderings:\v!top]
+
+\startsetups[\??headrenderings:\v!top]
+ \ifconditional\headshownumber
+ \setbox0\hbox {
+ \headnumbercontent
+ }
+ \setbox2\vtop {
+ \headsetupspacing
+ \advance\hsize-\wd0\relax
+ \headtextcontent
+ }
+ \hbox {
+ \box0
+ \hskip\headnumberdistance
+ \box2
+ }
+ \else
+ \vtop{
+ \headsetupspacing
+ \noindent
+ \headtextcontent
+ }
+ \fi
+\stopsetups
\protect \endinput
diff --git a/Master/texmf-dist/tex/context/base/strc-sbe.mkiv b/Master/texmf-dist/tex/context/base/strc-sbe.mkiv
index a98b6788523..4ea08b30c2b 100644
--- a/Master/texmf-dist/tex/context/base/strc-sbe.mkiv
+++ b/Master/texmf-dist/tex/context/base/strc-sbe.mkiv
@@ -27,78 +27,76 @@
% \chapter{second} \section{alpha} test \section{beta} test \page
% \stopbodymatter % optional
-\unexpanded\def\definesectionblock{\dotripleargument\dodefinesectionblock}
-\unexpanded\def\setupsectionblock {\dodoubleargument\dosetupsectionblock}
-\def\setsectionblock {\dosingleargument\dosetsectionblock}
-
-\def\sectionblockparameter#1%
- {\csname
- \ifcsname\??sb\currentsectionblock#1\endcsname\??sb\currentsectionblock#1\else\s!empty\fi
- \endcsname}
-
-\newtoks \everybeforesectionblock
-\newtoks \everyaftersectionblock
-
-\def\dodefinesectionblock[#1][#2][#3]% singular plural settings
- {\getparameters
- [\??sb#1]
- [\c!number=\v!yes,
- \c!page=\v!right, % anders worden marks te vroeg gereset !
- #3]%
- \expandafter\newif\csname if#2\endcsname % better a mode
- \setsectionblockenvironment{#1}\empty
- \setvalue {\e!start#2}{\startsectionblock[#1]}%
- \setvalue {\e!stop #2}{\stopsectionblock}}
+\installcorenamespace{sectionblock}
+
+\installcommandhandler \??sectionblock {sectionblock} \??sectionblock
+
+\setupsectionblock
+ [\c!number=\v!yes,
+ \c!page=\v!right] % otherwise marks are reset too soon
+
+\newtoks\everybeforesectionblock
+\newtoks\everyaftersectionblock
+
+\let\strc_sectionblock_define_normal\definesectionblock
+
+\unexpanded\def\definesectionblock
+ {\dotripleargument\strc_sectionblock_define}
+
+\def\strc_sectionblock_define[#1][#2][#3]% singular plural settings
+ {\strc_sectionblock_define_normal[#1][#3]%
+ \expandafter\newif\csname if#2\endcsname % obsolete
+ \strc_sectionblock_set_environment{#1}\empty
+ \setuvalue{\e!start#2}{\startsectionblock[#1]}%
+ \setuvalue{\e!stop #2}{\stopsectionblock}}
\ifdefined \resetallstructuremarks \else
\let\resetallstructuremarks\relax
\fi
+\def\strc_sectionblock_handle_page
+ {\edef\p_strc_sectionblock_page{\sectionblockparameter\c!page}%
+ \ifx\p_strc_sectionblock_page\empty \else
+ \page[\p_strc_sectionblock_page]%
+ \fi}
+
\appendtoks
- \doifsomething{\sectionblockparameter\c!page}{\page[\sectionblockparameter\c!page]}%
+ \strc_sectionblock_handle_page
\resetallstructuremarks
- \getsectionblockenvironment\currentsectionblock
+ \strc_sectionblock_get_environment\currentsectionblock
\sectionblockparameter\c!before % don't move
\dostarttagged\t!division\currentsectionblock
\to \everybeforesectionblock
\appendtoks
\sectionblockparameter\c!after % don't move
- \doifsomething{\sectionblockparameter\c!page}{\page[\sectionblockparameter\c!page]}%
+ \strc_sectionblock_handle_page
\dostoptagged
\resetallstructuremarks
\to \everyaftersectionblock
-\def\dosetupsectionblock[#1]%
- {\getparameters[\??sb#1]}% [#2]
+\unexpanded\def\setsectionblock
+ {\dosingleargument\strc_sectionblock_set}
-\def\dosetsectionblock[#1]% used to set the default
- {\edef\currentsectionblock{\ctxlua{structures.sections.setblock("#1")}}}
+\def\strc_sectionblock_set[#1]% used to set the default
+ {\edef\currentsectionblock{\ctxcommand{setsectionblock("#1")}}}
\let\currentsectionblock\empty % was \s!unknown
\unexpanded\def\startsectionblock[#1]%
{%\ctxlua{structures.counters.check(0)}% we assume sane usage of \page, as this is the only workable place (in push)
\begingroup
- \edef\currentsectionblock{\ctxlua{structures.sections.pushblock("#1")}}%
- \csname #1true\endcsname % for old times sake
+ \edef\currentsectionblock{\ctxcommand{pushsectionblock("#1")}}%
+ \csname #1true\endcsname % obsolete
\setsystemmode\currentsectionblock
\the\everybeforesectionblock\relax
\showmessage\m!structures1\currentsectionblock}
\unexpanded\def\stopsectionblock
- {\showmessage\m!structures2\currentsectionblock
- \the\everyaftersectionblock\relax
- \edef\currentsectionblock{\ctxlua{structures.sections.popblock()}}%
- \endgroup}
-
-\long\def\setsectionblockenvironment#1#2%
- {\long\setvalue{\??sb\s!do#1}{\do{#2}}}
-
-\def\getsectionblockenvironment#1%
- {\let\do\firstofoneargument
- %\sectionblockparameter{\s!do#1}}
- \csname\??sb\s!do#1\endcsname}
+ {\showmessage\m!structures2\currentsectionblock
+ \the\everyaftersectionblock\relax
+ \edef\currentsectionblock{\ctxcommand{popsectionblock()}}%
+ \endgroup}
%D \starttyping
%D \startsectionblockenvironment[frontpart]
@@ -120,10 +118,20 @@
%D \stoptext
%D \stoptyping
-\setvalue{\e!start\v!sectionblockenvironment}%
- {\dosingleargument\dostartsectionblockenvironment}
+\installcorenamespace{sectionblockenvironment}
+
+\unexpanded\def\strc_sectionblock_set_environment#1#2%
+ {\setvalue{\??sectionblockenvironment#1}{#2}}
+
+\unexpanded\def\strc_sectionblock_get_environment#1%
+ {\csname\??sectionblockenvironment#1\endcsname}
+
+\unexpanded\setvalue{\e!start\v!sectionblockenvironment}%
+ {\dosingleargument\strc_sectionblock_environment_start}
+
+\letvalue{\e!stop\v!sectionblockenvironment}\relax
-\def\dostartsectionblockenvironment[#1]% evt \pushendofline \popendofline
- {\grabuntil{\e!stop\v!sectionblockenvironment}{\setsectionblockenvironment{#1}}}
+\def\strc_sectionblock_environment_start[#1]% evt \pushendofline \popendofline
+ {\grabuntil{\e!stop\v!sectionblockenvironment}{\strc_sectionblock_set_environment{#1}}}
\protect \endinput
diff --git a/Master/texmf-dist/tex/context/base/strc-sec.mkiv b/Master/texmf-dist/tex/context/base/strc-sec.mkiv
index e9b613f219f..dd51451b9b1 100644
--- a/Master/texmf-dist/tex/context/base/strc-sec.mkiv
+++ b/Master/texmf-dist/tex/context/base/strc-sec.mkiv
@@ -15,7 +15,233 @@
\unprotect
-\ifdefined \v!block \else \def\v!block{block} \fi
+\installcorenamespace{structure}
+
+\installdirectcommandhandler \??structure {structure} % unchecked, so we need to initialize used parameters
+
+\setupstructure % not a user command so we might need to change the name
+ [\c!number=,
+ \c!level=,
+ \c!name=,
+ \c!title=,
+ \c!bookmark=,
+ \c!marking=,
+ \c!list=,
+ \c!label=,
+ \c!coupling=,
+ \c!ownnumber=,
+ \c!sectionseparatorset=\s!default,
+ \c!sectionconversionset=\s!default,
+ \c!sectionstopper=,
+ \c!sectionstarter=,
+ \c!sectionsegments=,
+ \c!sectionresetset=,
+ \c!reference=,
+ \c!backreference=,
+ \c!expansion=\v!no,
+ \c!xmlsetup=,
+ \s!catcodes=,
+ \c!saveinlist=\v!yes]
+
+% maybe flags for list, bm, mark
+
+\def\m_strc_references_prefix_yes{+}
+\def\m_strc_references_prefix_nop{-}
+
+\def\strc_sectioning_set_reference_prefix
+ {\ifx\currentstructurereferenceprefix\empty
+ % nothing
+ \else\ifx\currentstructurereferenceprefix\m_strc_references_prefix_yes
+ \global\advance\prefixcounter \plusone % temp here
+ \setupglobalreferenceprefix[\the\prefixcounter]%
+ \else\ifx\currentstructurereferenceprefix\m_strc_references_prefix_nop
+ \setupglobalreferenceprefix[]%
+ \else
+ \setupglobalreferenceprefix[\currentstructurereferenceprefix]%
+ \fi\fi\fi
+ \let\currentstructurereferenceprefix\referenceprefix}
+
+% why xdef ?
+
+\setupstructure
+ [\c!label={\headparameter{\currentsectionblock\c!label}},
+ \c!incrementnumber=\ifconditional\c_strc_sectioning_increment\v!yes\else\v!no\fi, % not that needed
+ \c!saveinlist=\ifconditional\c_strc_sectioning_to_list\v!yes\else\v!no\fi,
+ \c!level=\currentheadlevel,
+ \c!number=\ifconditional\c_strc_sectioning_increment\ifconditional\headshownumber\v!yes\else\v!no\fi\else\v!no\fi,
+ \c!expansion=\headparameter\c!expansion,
+ \c!xmlsetup=\headparameter\c!xmlsetup,
+ \s!catcodes=\headparameter\s!catcodes,
+ \c!sectionresetset=\headparameter\c!sectionresetset,
+ \c!sectionseparatorset=\headparameter\c!sectionseparatorset,
+ \c!sectionconversionset=\headparameter\c!sectionconversionset,
+ \c!sectionconversion=\headparameter\c!conversion, % just for compatibility
+ \c!sectionstarter=\headparameter\c!sectionstarter,
+ \c!sectionstopper=\headparameter\c!sectionstopper,
+ \c!sectionset=\headparameter\c!sectionset,
+ \c!sectionsegments=\headparameter\c!sectionsegments,
+ \c!reference=\headparameter\c!reference,
+ \c!referenceprefix=\headparameter\c!referenceprefix]
+
+% see lists/neat-001.tex for usage of:
+
+\def\namedstructureheadlocation#1% expandable, maybe [#1]
+ {\csname\??savedinternalreference\ifcsname\??savedinternalreference#1\endcsname#1\else\s!default\fi\endcsname}
+
+\unexpanded\def\strc_sectioning_register#1#2#3% #1=interfaced-settings, #2=optional user data (not yet supported)
+ {\begingroup
+ \setupstructure[\c!name={#1},#2]%
+ \xdef\currentstructurename {\structureparameter\c!name}%
+ \xdef\currentstructurecoupling {\structureparameter\c!coupling}%
+ \xdef\currentstructureownnumber {\structureparameter\c!ownnumber}% optional own number
+ \xdef\currentstructurelevel {\structureparameter\c!level}%
+ \edef\currentstructureexpansion {\structureparameter\c!expansion}%
+ \xdef\currentstructurexmlsetup {\structureparameter\c!xmlsetup}%
+ \xdef\currentstructurecatcodes {\structureparameter\s!catcodes}%
+ \xdef\currentstructurelabel {\structureparameter\c!label}%
+ \xdef\currentstructurereference {\structureparameter\c!reference}%
+ \xdef\currentstructurereferenceprefix{\structureparameter\c!referenceprefix}%
+ \xdef\currentstructurebackreference {\structureparameter\c!backreference}%
+ \xdef\currentstructureshownumber {\structureparameter\c!number}%
+ \xdef\currentstructuresaveinlist {\structureparameter\c!saveinlist}%
+ \xdef\currentstructureincrementnumber{\structureparameter\c!incrementnumber}%
+ \ifx\currentstructureexpansion\s!xml
+ \xmlstartraw
+ \xdef\currentstructuretitle {\structureparameter\c!title}%
+ \xdef\currentstructurebookmark{\structureparameter\c!bookmark}%
+ \xdef\currentstructuremarking {\structureparameter\c!marking}%
+ \xdef\currentstructurelist {\structureparameter\c!list}%
+ \xmlstopraw
+ \ifx\currentstructurelist\empty
+ \globallet\currentstructurelist\currentstructuretitle
+ \fi
+ \globallet\currentstructurecoding\s!xml
+ \else
+ \ifx\currentstructureexpansion\v!yes
+ \xdef\currentstructuretitle {\structureparameter\c!title}%
+ \xdef\currentstructurebookmark{\structureparameter\c!bookmark}%
+ \xdef\currentstructuremarking {\structureparameter\c!marking}%
+ \xdef\currentstructurelist {\structureparameter\c!list}%
+ \else
+ \xdef\currentstructuretitle {\detokenizedstructureparameter\c!title}%
+ \xdef\currentstructurebookmark{\detokenizedstructureparameter\c!bookmark}%
+ \xdef\currentstructuremarking {\detokenizedstructureparameter\c!marking}%
+ \xdef\currentstructurelist {\detokenizedstructureparameter\c!list}%
+ \iflocation \ifx\currentstructurebookmark\empty
+ \begingroup
+ \simplifycommands
+ \xdef\currentstructurebookmark{\detokenize\expandafter{\normalexpanded{\structureparameter\c!title}}}%
+ \endgroup
+ \fi \fi
+ \fi
+ \ifx\currentstructurelist\empty
+ \globallet\currentstructurelist\currentstructuretitle
+ \fi
+ \globallet\currentstructurecoding\s!tex
+ \fi
+ \setnextinternalreference
+ \storeinternalreference\currentstructurename\nextinternalreference %
+ \strc_sectioning_set_reference_prefix
+ \xdef\currentstructurenumber{\ctxlua{ % todo: combine with next call, adapt marks accordingly
+ structures.sections.somelevel {
+ references = {
+ internal = \nextinternalreference,
+ block = "\currentsectionblock",
+ reference = "\currentstructurereference",
+ referenceprefix = "\currentstructurereferenceprefix",
+ backreference = "\currentstructurebackreference",
+ },
+ directives = {
+ resetset = "\structureparameter\c!sectionresetset",
+ },
+ metadata = {
+ kind = "section",
+ name = "\currentstructurename",
+ catcodes = \the\ifx\currentstructurecatcodes\empty\catcodetable\else\csname\currentstructurecatcodes\endcsname\fi,
+ coding = "\currentstructurecoding",
+ \ifx\currentstructurecoding\s!xml
+ xmlroot = "\xmldocument",
+ \fi
+ \ifx\currentstructurexmlsetup\empty \else
+ xmlsetup = "\currentstructurexmlsetup",
+ \fi
+ \ifx\currentstructuresaveinlist\v!no
+ nolist = true,
+ \fi
+ \ifx\currentstructureincrementnumber\v!yes
+ increment = "\currentstructureincrementnumber",
+ \fi
+ },
+ titledata = { % we can add mark and reference
+ label = \!!bs\detokenize\expandafter{\currentstructurelabel }\!!es,
+ title = \!!bs\detokenize\expandafter{\currentstructuretitle }\!!es,
+ \ifx\currentstructurebookmark\currentstructuretitle \else
+ bookmark = \!!bs\detokenize\expandafter{\currentstructurebookmark }\!!es,
+ \fi
+ \ifx\currentstructuremarking\currentstructuretitle \else
+ marking = \!!bs\detokenize\expandafter{\currentstructuremarking }\!!es,
+ \fi
+ \ifx\currentstructuresaveinlist\v!no \else
+ \ifx\currentstructurelist\currentstructuretitle \else
+ list = \!!bs\detokenize\expandafter{\currentstructurelist}\!!es,
+ \fi
+ \fi
+ },
+ numberdata = {
+ % needed ?
+ block = "\currentsectionblock",
+ hidenumber = \ifx\currentstructureshownumber\v!no true\else nil\fi, % titles
+ % so far
+ separatorset = "\structureparameter\c!sectionseparatorset",
+ conversion = "\structureparameter\c!sectionconversion", % for good old times sake
+ conversionset = "\structureparameter\c!sectionconversionset",
+ starter = \!!bs\structureparameter\c!sectionstarter\!!es,
+ stopper = \!!bs\structureparameter\c!sectionstopper\!!es,
+ set = "\structureparameter\c!sectionset",
+ segments = "\structureparameter\c!sectionsegments",
+ ownnumber = "\currentstructureownnumber",
+ },
+ userdata = \!!bs\detokenize{#3}\!!es % will be converted to table at the lua end
+ }
+ }}%
+ \xdef\currentstructurelistnumber{\ctxcommand{addtolist(structures.sections.current())}}%
+ % \currentstructuresynchronize has to be called someplace, since it introduces a node
+ \setstructuresynchronization\currentstructurelistnumber
+ \endgroup}
+
+\let\currentstructurenumber \!!zerocount
+\let\currentsectioncountervalue \!!zerocount % redefined later
+\let\previoussectioncountervalue\!!zerocount % redefined later
+
+% We can access the (stored) data with the following macros.
+%
+% \def\MyHeadCommand #1#2{\framed{#1}\framed{#2 / \structureuservariable{subtitle}}}
+% \def\MyListCommand#1#2#3{\externalfigure[\structurelistuservariable{figure}][height=5mm]#2}
+%
+% \setuphead[chapter][command=\MyHeadCommand]
+% \setuplist[chapter][alternative=command,command=\MyListCommand]
+%
+% \starttext
+% \setupheadertexts[chapter]
+% \setupinteraction[state=start]
+% \placebookmarks[chapter]
+% \placelist[chapter]
+% \startchapter[ownnumber=10,title=Ton,list=Hans,marking=Kees,bookmark=Bram][figure=cow.pdf,subtitle=oeps]
+% \stopchapter
+% \stoptext
+
+% todo: #1 => "#1" ... adapt lua code for name and number
+
+\def\structurenumber {\ctxcommand{structurenumber()}}
+\def\structuretitle {\ctxcommand{structuretitle()}}
+\def\structurevariable #1{\ctxcommand{structurevariable("#1")}}
+\def\structureuservariable #1{\ctxcommand{structureuservariable("#1")}}
+\def\structurecatcodedget #1{\ctxcommand{structurecatcodedget("#1")}} % bad name
+\def\structuregivencatcodedget #1#2{\ctxcommand{structuregivencatcodedget("#1",\number#2)}} % bad name
+\def\structureautocatcodedget #1#2{\ctxcommand{structureautocatcodedget ("#1","#2")}}
+
+\def\namedstructurevariable #1#2{\ctxcommand{namedstructurevariable ("#1","#2")}}
+\def\namedstructureuservariable#1#2{\ctxcommand{namedstructureuservariable("#1","#2")}}
% compatibility issue:
%
@@ -43,6 +269,8 @@
% lua interface / names and interface might change
+\newconditional\c_strc_rendering_continuous % not used (mkii ?)
+
\def\setstructurelevel #1#2{\ctxlua{structures.sections.setlevel("#1","#2")}} % name, level|parent
\def\getstructurelevel #1{\ctxlua{structures.sections.getcurrentlevel("#1")}}% name
\def\setstructurenumber #1#2{\ctxlua{structures.sections.setnumber(#1,"#2")}} % level, number (+/-)
@@ -74,6 +302,62 @@
\installcommandhandler \??head {head} \??head
+\setuphead [%
+ %\c!after=,
+ %\c!align=,
+ %\c!aligntitle=,
+ \c!alternative=\v!normal,
+ %\c!before=,
+ %\c!color=,
+ %\c!command=,
+ \c!continue=\v!yes,
+ %\c!coupling=,
+ %\c!deepnumbercommand=,
+ %\c!deeptextcommand=,
+ %\c!default=,
+ \c!distance=\zeropoint,
+ \c!textwidth=\zeropoint, % signal too
+ \c!numberwidth=\zeropoint, % signal too
+ \c!width=\zeropoint, % signal too
+ \c!expansion=\v!no,
+ %\c!file=,
+ %\c!footer=,
+ %\c!grid=,
+ \c!hang=\v!none,
+ %\c!header=,
+ \c!incrementnumber=\v!yes,
+ \c!indentnext=\v!no,
+ %\c!label=,
+ %\c!limittext=\languageparameter\c!limittext,
+ \c!margin=\zeropoint,
+ %\c!margintext=,
+ \c!number=\v!yes,
+ \c!numbercolor=\headparameter\c!color,
+ \c!textcolor=\headparameter\c!color,
+ \c!numberstyle=\headparameter\c!style,
+ \c!textstyle=\headparameter\c!style,
+ %\c!numbercommand=,
+ %\c!textcommand=,
+ \c!ownnumber=\v!no,
+ %\c!page=,
+ \c!placehead=\v!yes,
+ \c!sectionconversionset=\s!default,
+ \c!sectionnumber=\v!yes,
+ %\c!sectionsegments=,
+ \c!sectionseparatorset=\s!default,
+ \c!sectionset=\v!all,
+ \c!interlinespace=,
+ %\c!sectionstopper=,
+ %\c!sectionstarter=,
+ %\c!strut=,
+ %\c!style=,
+ %\c!text=,
+ %\c!tolerance=,
+ %\c!beforesection=\directsetup{document:\currenthead:start}, % these might become defaults i.e. acpect document: namespace
+ %\c!insidesection=\directsetup{document:\currenthead:inside}, % these might become defaults i.e. acpect document: namespace
+ %\c!aftersection=\directsetup{document:\currenthead:stop}, % these might become defaults i.e. acpect document: namespace
+ ]
+
\let\setupheads\setuphead % will go
\appendtoks
@@ -132,28 +416,28 @@
\appendtoks
% \setevalue{\e!next \currenthead}{\donexthead [\currenthead]}%
- \setevalue{\e!start\currenthead}{\dostarthead[\currenthead]}%
- \setevalue{\e!stop \currenthead}{\dostophead [\currenthead]}%
+ \setuevalue{\e!start\currenthead}{\strc_sectioning_start[\currenthead]}%
+ \setuevalue{\e!stop \currenthead}{\strc_sectioning_stop [\currenthead]}%
\to \everydefinehead
\appendtoks
\doifelse{\headparameter\c!ownnumber}\v!yes
- {\setevalue\currenthead{\dohandleheadown[\currenthead]}}
- {\setevalue\currenthead{\dohandleheadnop[\currenthead]}}%
+ {\setuevalue\currenthead{\strc_sectioning_handle_own[\currenthead]}}
+ {\setuevalue\currenthead{\strc_sectioning_handle_nop[\currenthead]}}%
\to \everysetuphead
\let\currentnamedsection\empty
\unexpanded\def\startnamedsection
- {\dotripleempty\dostartnamedsection}
+ {\dotripleempty\strc_sectioning_start_named_section}
-\def\dostartnamedsection[#1]% [#2][#3]
+\unexpanded\def\strc_sectioning_start_named_section[#1]% [#2][#3]
{\pushmacro\currentnamedsection
\edef\currentnamedsection{#1}%
- \normalexpanded{\dodostarthead[\currentnamedsection]}} % [#2][#3]
+ \normalexpanded{\strc_sectioning_start_named_section_indeed[\currentnamedsection]}} % [#2][#3]
\unexpanded\def\stopnamedsection
- {\normalexpanded{\dostophead[\currentnamedsection]}%
+ {\normalexpanded{\strc_sectioning_stop[\currentnamedsection]}%
\popmacro\currentnamedsection}
% structure sections (the parents of chapter etc)
@@ -169,20 +453,38 @@
\def\resetcurrentstructuremarks {\resetmarking[\lastsectionname]} % will become option
%def\resetcurrentstructuremarkswithpage{\resetmarking[\lastsectionname]} % will become option
-% -2=text -1=manual 0=block 1+=structurelevel
+% We could use a commandhandler here but sections are somewhat special in the
+% sense that we have two ways of chaining: the main section (levels) as well
+% as rendering (head).
-\newcount\maxstructuredepth
+% -2 = text
+% -1 = manual
+% 0 = block
+% +1 = structurelevel 1 .. n
-\def\setnextsectionlevel#1%
- {\global\advance\maxstructuredepth\plusone
- \setevalue{\??headlevel#1}{\the\maxstructuredepth}}
+\newcount\maxstructuredepth
\def\sectionlevel#1%
{\csname\??headlevel\ifcsname\??headlevel#1\endcsname#1\else\v!none\fi\endcsname}
+\def\namedsectionlevel#1#2% direct indirect
+ {\csname\??headlevel
+ \ifcsname\??headlevel#1\endcsname
+ #1%
+ \else\ifcsname\??headlevel#2\endcsname
+ #2%
+ \else
+ \v!none
+ \fi\fi
+ \endcsname}
+
+\def\xthenamedheadlevel#1%
+ {\namedsectionlevel{#1}{\sectionheadsection{\sectionheadcoupling{#1}}}}
+
\setvalue{\??headlevel\v!block}{0}
\setvalue{\??headlevel\v!none }{-1}
\setvalue{\??headlevel\v!text }{-2}
+\setvalue{\??headlevel\v!head }{-3}
\newtoks\everydefinesection
@@ -190,14 +492,15 @@
{\ifcsname\??headlevel#1\endcsname \else
\edef\currentsection{#1}% not used, will go
\edef\currenthead{#1}%
- \setnextsectionlevel{#1}%
+ \global\advance\maxstructuredepth\plusone
+ \setevalue{\??headlevel#1}{\the\maxstructuredepth}%
\setstructurelevel{#1}{\sectionlevel{#1}}%
\normalexpanded{\setheadparameter{\s!parent}{\??head\lastsectionname}}% TO BE CHECKED, WE HAVE A HELPER
\the\everydefinesection
% so far for these default inheritances
\definemarking[#1]%
\ifnum\maxstructuredepth>\plusone
- \normalexpanded{\noexpand\relatemarking[#1][\lastsectionname]}% so, the parent will reset the child
+ \normalexpanded{\relatemarking[#1][\lastsectionname]}% so, the parent will reset the child
\fi
\xdef\lastsectionname{#1}%
\ifx\firstsectionname\empty
@@ -206,16 +509,16 @@
\fi}
\unexpanded\def\setupsection
- {\dotripleempty\dosetupsection}
+ {\dotripleempty\strc_sectioning_setup}
-\def\dosetupsection[#1][#2][#3]%
+\def\strc_sectioning_setup[#1][#2][#3]%
{\ifcsname\??headlevel#1\endcsname
- \dodosetupsection[#1][#2][#3]%
+ \strc_sectioning_setup_indeed[#1][#2][#3]%
\else
- \dodosetupsection[\sectionheadsection{#1}][#2][#3]%
+ \strc_sectioning_setup_indeed[\sectionheadsection{#1}][#2][#3]%
\fi}
-\def\dodosetupsection[#1][#2][#3]%
+\def\strc_sectioning_setup_indeed[#1][#2][#3]%
{\pushmacro\currenthead
\ifthirdargument
\edef\currenthead{#1#2}% % not used at any more in mkiv (sets now)
@@ -251,42 +554,42 @@
% head construction
-\unexpanded\def\dohandleheadown{\dodoubleempty\dodohandleheadown} % [ref] {nr} {title}
-\unexpanded\def\dohandleheadnop{\dodoubleempty\dodohandleheadnop} % [ref] {title}
-\unexpanded\def\dostarthead {\dotripleempty\dodostarthead} % [settings] [userdata] !!! also used at lua end
+\unexpanded\def\strc_sectioning_handle_own{\dodoubleempty\strc_sectioning_handle_own_indeed} % [ref] {nr} {title}
+\unexpanded\def\strc_sectioning_handle_nop{\dodoubleempty\strc_sectioning_handle_nop_indeed} % [ref] {title}
+\unexpanded\def\strc_sectioning_start {\dotripleempty\strc_sectioning_start_named_section_indeed} % [settings] [userdata] !!! also used at lua end
\newconditional\currentstructureown
\newtoks\everybeforehead % hook, todo: before/after keys
\newtoks\everyafterhead % hook, todo: before/after keys
-\unexpanded\def\dodohandleheadown[#1][#2]#3#4%
+\unexpanded\def\strc_sectioning_handle_own_indeed[#1][#2]#3#4%
{\settrue\currentstructureown
\triggerautostructurelevel
- \dohandlehead{#1}{\c!reference={#2},\c!ownnumber={#3},\c!title={#4}}{}} % name ref nr title --
+ \strc_sectioning_handle{#1}{\c!reference={#2},\c!ownnumber={#3},\c!title={#4}}{}} % name ref nr title --
-\unexpanded\def\dodohandleheadnop[#1][#2]% for taco: [key=value] variant
+\unexpanded\def\strc_sectioning_handle_nop_indeed[#1][#2]% for taco: [key=value] variant
{\setfalse\currentstructureown
\triggerautostructurelevel
- \doifassignmentelse{#2}\dodohandleheadnopA\dodohandleheadnopB{#1}{#2}}
+ \doifassignmentelse{#2}\strc_sectioning_handle_nop_indeed_yes\strc_sectioning_handle_nop_indeed_nop{#1}{#2}}
-\unexpanded\def\dodohandleheadnopA#1#2%
- {\dohandlehead{#1}{#2}{}}
+\unexpanded\def\strc_sectioning_handle_nop_indeed_yes#1#2%
+ {\strc_sectioning_handle{#1}{#2}{}}
-\unexpanded\def\dodohandleheadnopB#1#2#3%
- {\dohandlehead{#1}{\c!reference={#2},\c!title={#3}}{}} % name ref nr title --
+\unexpanded\def\strc_sectioning_handle_nop_indeed_nop#1#2#3%
+ {\strc_sectioning_handle{#1}{\c!reference={#2},\c!title={#3}}{}} % name ref nr title --
-\unexpanded\def\dodostarthead[#1][#2][#3]% for the moment no grouping, too annoying with page breaks
+\unexpanded\def\strc_sectioning_start_named_section_indeed[#1][#2][#3]% for the moment no grouping, too annoying with page breaks
{\setfalse\currentstructureown
%\globalpushmacro\currenthead % this does not work out well
\xdef\currenthead{#1}%
\setsystemmode\currenthead % new, also here now
\headparameter\c!beforesection % beware, no users vars set yet
\the\everybeforehead
- \dohandlehead{#1}{#2}{#3}% name -- -- -- userdata (we might move the tagged to here)
+ \strc_sectioning_handle{#1}{#2}{#3}% name -- -- -- userdata (we might move the tagged to here)
\headparameter\c!insidesection}
-\unexpanded\def\dostophead[#1]% !!! also used at lua end
+\unexpanded\def\strc_sectioning_stop[#1]% !!! also used at lua end
{\dostoptagged
\dostoptagged
%\globalpopmacro\currenthead % so we do a hard recover
@@ -295,70 +598,64 @@
\the\everyafterhead
\resetsystemmode\currenthead} % new, also here now
-% \unexpanded\def\donexthead[#1][#2][#3]% obsolete
-% {\setfalse\currentstructureown
-% \xdef\currenthead{#1}%
-% \dohandlehead{#1}{#2}{#3}} % name -- -- -- userdata
+\let\dostarthead\strc_sectioning_start % used at lua end
+\let\dostophead \strc_sectioning_stop % used at lua end
% \newconditional\structurereversesectionnumbers % todo: key/val
-\newconditional\headtolist
-\newconditional\headdoincrement
-\newconditional\headdoplace
-\newconditional\headleaveempty
-\newconditional\headhidden
-\newconditional\headshownumber
-\newconditional\headisdisplay
+\newconditional\c_strc_sectioning_to_list
+\newconditional\c_strc_sectioning_increment
+\newconditional\c_strc_sectioning_place
+\newconditional\c_strc_sectioning_empty
+\newconditional\c_strc_sectioning_hidden
+
+\newconditional\headshownumber % public
+\newconditional\headisdisplay % public
-\setvalue{\??headincrement\v!yes }{\settrue \headdoincrement\settrue \headtolist}
-\setvalue{\??headincrement\v!no }{\setfalse\headdoincrement\setfalse\headtolist}
-\setvalue{\??headincrement\v!list }{\setfalse\headdoincrement\settrue \headtolist}
-\setvalue{\??headincrement\s!empty}{\settrue \headdoincrement\settrue \headtolist}
+\setvalue{\??headincrement\v!yes }{\settrue \c_strc_sectioning_increment\settrue \c_strc_sectioning_to_list}
+\setvalue{\??headincrement\v!no }{\setfalse\c_strc_sectioning_increment\setfalse\c_strc_sectioning_to_list}
+\setvalue{\??headincrement\v!list }{\setfalse\c_strc_sectioning_increment\settrue \c_strc_sectioning_to_list}
+\setvalue{\??headincrement\s!empty}{\settrue \c_strc_sectioning_increment\settrue \c_strc_sectioning_to_list}
-\def\setheadincrement
+\unexpanded\def\strc_sectioning_initialize_increment
{\edef\currentheadincrement{\headparameter\c!incrementnumber}%
\ifcsname\??headincrement\currentheadincrement\endcsname
\csname\??headincrement\currentheadincrement\endcsname
\else
- \settrue \headdoincrement\settrue \headtolist
+ \settrue \c_strc_sectioning_increment\settrue \c_strc_sectioning_to_list
% \filterheadnumber
\fi}
\def\filterheadnumber
- {\settrue\headdoincrement
- \settrue\headtolist
+ {\settrue\c_strc_sectioning_increment
+ \settrue\c_strc_sectioning_to_list
\ifx\currentproduct\empty
% todo : filter from other toc (number, file, title)
% use : \currentheadincrement as spec
\fi}
\setvalue{\??headplace\v!yes}%
- {\setfalse\headleaveempty
- \settrue \headdoplace
- \setfalse\headhidden}
+ {\setfalse\c_strc_sectioning_empty
+ \settrue \c_strc_sectioning_place
+ \setfalse\c_strc_sectioning_hidden}
\setvalue{\??headplace\v!empty}%
- {\settrue \headleaveempty
- \settrue \headdoplace
- \setfalse\headhidden}
+ {\settrue \c_strc_sectioning_empty
+ \settrue \c_strc_sectioning_place
+ \setfalse\c_strc_sectioning_hidden}
\setvalue{\??headplace\v!no}%
- {\settrue \headleaveempty
- \setfalse\headdoplace
- \setfalse\headhidden}
+ {\settrue \c_strc_sectioning_empty
+ \setfalse\c_strc_sectioning_place
+ \setfalse\c_strc_sectioning_hidden}
\setvalue{\??headplace\v!hidden}%
- {\settrue \headleaveempty
- \setfalse\headdoplace
- \settrue \headhidden}
+ {\settrue \c_strc_sectioning_empty
+ \setfalse\c_strc_sectioning_place
+ \settrue \c_strc_sectioning_hidden}
-\def\setheadplacement
- {\executeifdefined
- {\??headplace\headparameter\c!placehead}
- {\getvalue{\??headplace\v!yes}}}
-
-
-\ifdefined\setheaddisplay \else \let\setheaddisplay\relax \fi
+\unexpanded\def\strc_sectioning_initialize_placement
+ {\expandnamespaceparameter\??headplace\headparameter\c!placehead\v!yes}
\newmode\v!sectionnumber
@@ -370,12 +667,18 @@
{\resetsystemmode\v!sectionnumber
\setfalse\headshownumber} % why ?
-\def\setheadnumber
- {\doifelse{\sectionblockparameter\c!number}\v!yes % todo
- {\doifelse{\headparameter\c!number}\v!yes
- {\settrue\headshownumber}
- {\setfalse\headshownumber}}
- {\setfalse\headshownumber}}
+\unexpanded\def\strc_sectioning_initialize_number
+ {\edef\p_number{\sectionblockparameter\c!number}%
+ \ifx\p_number\v!yes
+ \edef\p_number{\headparameter\c!number}%
+ \ifx\p_number\v!yes
+ \settrue\headshownumber
+ \else
+ \setfalse\headshownumber
+ \fi
+ \else
+ \setfalse\headshownumber
+ \fi}
\unexpanded\def\theheadsynchonization
{\currentstructuresynchronize}
@@ -416,45 +719,15 @@
\let\currentheadlevel \!!zerocount
\let\currentheadcounter \!!zerocount
-% here we could inherit as well but it's a bit complex
-
-\def\doregisterhead#1#2#3% name data userdata
- {\structurecomponent
- %[\c!label={\headparameter\c!label}, % why { }
- [\c!label={\headparameter{\currentsectionblock\c!label}},
- \c!incrementnumber=\ifconditional\headdoincrement\v!yes\else\v!no\fi, % not that needed
- \c!saveinlist=\ifconditional\headtolist\v!yes\else\v!no\fi,
- \c!level=\currentheadlevel,
- \c!name=#1,
- \c!number=\ifconditional\headdoincrement\ifconditional\headshownumber\v!yes\else\v!no\fi\else\v!no\fi,
- \c!bookmark=,
- \c!marking=,
- \c!list=,
- \c!expansion=\headparameter\c!expansion,
- \c!xmlsetup=\headparameter\c!xmlsetup,
- \s!catcodes=\headparameter\s!catcodes,
- \c!sectionresetset=\headparameter\c!sectionresetset,
- \c!sectionseparatorset=\headparameter\c!sectionseparatorset,
- \c!sectionconversionset=\headparameter\c!sectionconversionset,
- \c!sectionconversion=\headparameter\c!conversion, % just for compatibility
- \c!sectionstarter=\headparameter\c!sectionstarter,
- \c!sectionstopper=\headparameter\c!sectionstopper,
- \c!sectionset=\headparameter\c!sectionset,
- \c!sectionsegments=\headparameter\c!sectionsegments,
- \c!reference=\headparameter\c!reference,
- \c!referenceprefix=\headparameter\c!referenceprefix,
- \c!backreference=,
- \c!command=,
- #2]%
- [#3]%
- \reportcurrentstructure}
-
-\unexpanded\def\placeheadtext {\dosingleempty\strc_sections_place_head_text } % use with care
-\unexpanded\def\placeheadnumber{\dosingleempty\strc_sections_place_head_number} % use with care
-
-\ifdefined\setupheadcomponentfont \else
-
- \unexpanded\def\setupheadcomponentfont#1#2%
+\unexpanded\def\placeheadtext {\dosingleempty\strc_sectioning_place_head_text } % use with care
+\unexpanded\def\placeheadnumber{\dosingleempty\strc_sectioning_place_head_number} % use with care
+
+
+\unexpanded\def\strc_sectioning_report{\ctxlua{structures.sections.reportstructure()}}
+
+\ifdefined\strc_rendering_initialize_style_and_color \else
+
+ \unexpanded\def\strc_rendering_initialize_style_and_color#1#2%
{\dontconvertfont
\useheadstyleandcolor\c!style\c!color
\useheadstyleandcolor#1#2%
@@ -462,131 +735,133 @@
\fi
-\def\strc_sections_place_head_text[#1]%
+\def\strc_sectioning_place_head_text[#1]%
{\dontleavehmode
\begingroup
+ \unexpanded\def\\{\space}% messy here, but the default (and needs to be grouped)
+ \settrue\headisdisplay % triggers interlinespace checking
\edef\currenthead{#1}%
- \setupheadcomponentfont\c!textstyle\c!textcolor
+ \strc_rendering_initialize_style_and_color\c!textstyle\c!textcolor
\relax
\getspecificstructuretitle{\thenamedheadlevel{#1}}%
\endgraf
\endgroup}
-\def\strc_sections_place_head_number[#1]%
+\def\strc_sectioning_place_head_number[#1]%
{\dontleavehmode
\begingroup
+ \settrue\headisdisplay % triggers interlinespace checking
\edef\currenthead{#1}%
- \setupheadcomponentfont\c!numberstyle\c!numbercolor
+ \strc_rendering_initialize_style_and_color\c!numberstyle\c!numbercolor
\relax
\getfullstructurenumber{\thenamedheadlevel{#1}}%
\endgraf
\endgroup}
-\ifdefined\presetnumberheadalternative \else \let\presetnumberheadalternative\relax \fi
-\ifdefined\setautostructurelevel \else \let\setautostructurelevel \relax \fi
-\ifdefined\triggerautostructurelevel \else \let\triggerautostructurelevel \relax \fi
+\ifdefined\triggerautostructurelevel \else \let\triggerautostructurelevel\relax \fi
-\def\dohandlehead#1#2#3% name data userdata (we can move #1 to the caller)
+\def\strc_sectioning_handle#1#2#3% name data userdata (we can move #1 to the caller)
{\xdef\currenthead {#1}%
\xdef\currentheadcoupling{\sectionheadcoupling\currenthead}%
\xdef\currentheadsection {\sectionheadsection \currentheadcoupling}%
\xdef\currentheadlevel {\sectionlevel \currentheadsection}%
- %writestatus\m!system{setup: \currenthead,\currentheadcoupling,\currentheadsection,\currentheadlevel}%
%
- \setautostructurelevel
- \setheadincrement
- \setheadplacement
- \setheaddisplay
- \setheadnumber
+ %\writestatus\m!system{setup: \currenthead,\currentheadcoupling,\currentheadsection,\currentheadlevel}%
+ %
+ \strc_sectioning_initialize_autolevel
+ \strc_sectioning_initialize_increment
+ \strc_sectioning_initialize_placement
+ \strc_sectioning_initialize_number
%
- \unexpanded\def\\{\space}%
\flushingcolumnfloatsfalse
%
% todo: also mark (for header)
%
% we might remove the lower level
%
- % not here, after optional \page: \doregisterhead\currenthead{#2}{#3}%
+ % not here, after optional \page: \strc_sectioning_register{#1}{#2}{#3}%
%
% \xdef\currentheadcounter{\currentsectioncountervalue}% lua call
%
% \currentstructuresynchronize % will move
%
- \edef\numberheaddistance {\headparameter\c!distance }% compatibility
- \edef\numberheadalternative{\headparameter\c!alternative}% compatibility
- \presetnumberheadalternative
- %
\let\getheadnumber\empty
\let\getheadtitle \empty
\let\getheadsyncs \empty
- \ifconditional\headdoincrement
- \ifconditional\headdoplace
- \doheadspacingbeforeyes
- \doregisterhead\currenthead{#2}{#3}% after optional \page
+ \ifconditional\c_strc_sectioning_increment
+ \ifconditional\c_strc_sectioning_place
+ \strc_sectioning_before_yes
+ \strc_sectioning_register{#1}{#2}{#3}% after optional \page
+ \strc_sectioning_report
\let\getheadsyncs\theheadsynchonization
\let\getheadtitle\fullheadtitle
\ifconditional\headshownumber
\let\getheadnumber\fullheadnumber
- \placecurrentheadnumbertext
+ \strc_rendering_place_head_number_and_text
\else
- \placecurrentheadtext
+ \strc_rendering_place_head_text
\fi
- \doheadspacingafteryes
- \else\ifconditional\headhidden
- \doregisterhead\currenthead{#2}{#3}% after optional \page
+ \strc_sectioning_after_yes
+ \else\ifconditional\c_strc_sectioning_hidden
+ \strc_sectioning_register{#1}{#2}{#3}% after optional \page
+ \strc_sectioning_report
\let\getheadsyncs\theheadsynchonization
- \placecurrentheadhidden % only something when tracing
+ \strc_rendering_place_head_hidden % only something when tracing
\else
- \doheadspacingbeforenop % toegevoegd ivm subpaginanr / tug sheets
- \doregisterhead\currenthead{#2}{#3}% after optional \page
+ \strc_sectioning_before_nop % toegevoegd ivm subpaginanr / tug sheets
+ \strc_sectioning_register{#1}{#2}{#3}% after optional \page
+ \strc_sectioning_report
\let\getheadsyncs\theheadsynchonization
- \placecurrentheadempty % just flush 'm
- \doheadspacingafternop
+ \strc_rendering_place_head_empty % just flush 'm
+ \strc_sectioning_after_nop
\fi\fi
\else
- \ifconditional\headdoplace
- \doheadspacingbeforeyes
- \doregisterhead\currenthead{#2}{#3}% after optional \page
+ \ifconditional\c_strc_sectioning_place
+ \strc_sectioning_before_yes
+ \strc_sectioning_register{#1}{#2}{#3}% after optional \page
+ \strc_sectioning_report
\let\getheadsyncs\theheadsynchonization
\let\getheadtitle\fullheadtitle
- \placecurrentheadtext
- \doheadspacingafteryes
- \else\ifconditional\headhidden
- \doregisterhead\currenthead{#2}{#3}% after optional \page
+ \strc_rendering_place_head_text
+ \strc_sectioning_after_yes
+ \else\ifconditional\c_strc_sectioning_hidden
+ \strc_sectioning_register{#1}{#2}{#3}% after optional \page
+ \strc_sectioning_report
\let\getheadsyncs\theheadsynchonization
- \placecurrentheadhidden % only something when tracing
+ \strc_rendering_place_head_hidden % only something when tracing
\else
% do nothing / should be vbox to 0pt
- \doheadspacingbeforenop
- \doregisterhead\currenthead{#2}{#3}% after optional \page
+ \strc_sectioning_before_nop
+ \strc_sectioning_register{#1}{#2}{#3}% after optional \page
+ \strc_sectioning_report
\let\getheadsyncs\theheadsynchonization
- \placecurrentheadempty % just flush 'm
- \doheadspacingafternop
+ \strc_rendering_place_head_empty % just flush 'm
+ \strc_sectioning_after_nop
\fi\fi
\fi
\flushingcolumnfloatstrue
- \setfalse\ignorehandlepagebreak
+ \setfalse\c_strc_sectioniong_ignore_page
% ignorespaces prevents spaces creeping in when after=\dontleavehmode
\dostarttagged\t!sectioncontent\empty
- \ifconditional\headisdisplay % \ifdisplaysectionhead
+ \ifconditional\headisdisplay
\ignorespaces
\else
\expandafter\GotoPar
\fi}
-% typesetting
+% typesetting (the getters are public)
-\unexpanded\def\placecurrentheadnumbertext
+\unexpanded\def\strc_rendering_place_head_number_and_text
{\setheadmarking
\getheadnumber/\getheadtitle
\getheadsyncs}
-\unexpanded\def\placecurrentheadtext
+\unexpanded\def\strc_rendering_place_head_text
{\setheadmarking
\getheadtitle
\getheadsyncs}
-\unexpanded\def\placecurrentheadempty
+\unexpanded\def\strc_rendering_place_head_empty
{\setheadmarking
\getheadsyncs}
@@ -596,7 +871,7 @@
% todo: when in the page builder we need to resolve the marking immediately
% because otherwise we have an async
-\unexpanded\def\placecurrentheadhidden % maybe trialtypesetting check
+\unexpanded\def\strc_rendering_place_head_hidden % maybe trialtypesetting check
{\setxvalue{\??hiddenheadattr\currenthead}%
{\headreferenceattributes}% can be used when making a box
\setxvalue{\??hiddenheadsync\currenthead}%
@@ -625,138 +900,144 @@
% pagebreaks
-\newcount\precedingstructurelevel \precedingstructurelevel\plusone
-\newconditional\ignorehandlepagebreak
+\letvalue{\??headmarknop\v!page }\donothing
+\setvalue{\??headmarknop\v!reset }{\resetcurrentstructuremarks}
+\letvalue{\??headmarknop\s!unknown}\donothing
+
+\letvalue{\??headmarkyes\v!page }\donothing % to be checked: {\resetcurrentstructuremarks}
+\setvalue{\??headmarkyes\v!reset }{\resetcurrentstructuremarks}
+\letvalue{\??headmarkyes\s!unknown}\donothing
+
+\def\strc_sectioning_check_layout
+ {\edef\p_page{\headparameter\c!page}%
+ \ifx\p_page\empty
+ \strc_sectioning_check_layout_nop
+ \else
+ \strc_sectioning_check_layout_yes
+ \fi}
-\def\doheadspacingbeforeyes
- {\docheckheadbefore
- \dohandleheadpagebreakyes
+\def\strc_sectioning_check_layout_nop
+ {\expandnamespaceparameter\??headmarknop\headparameter\c!marking\s!unknown}
+
+\def\strc_sectioning_check_layout_yes
+ {\page[\p_page]%
+ \expandnamespaceparameter\??headmarkyes\headparameter\c!marking\s!unknown
+ \edef\p_header{\headparameter\c!header}%
+ \ifx\p_header\empty \else
+ \doifelselayouttextline\v!header{\normalexpanded{\setuplayouttext[\v!header][\c!state=\p_header]}}\donothing
+ \fi
+ \edef\p_text{\headparameter\c!text}%
+ \ifx\p_text\empty \else
+ \doifelselayouttextline\v!text {\normalexpanded{\setuplayouttext[\v!text ][\c!state=\p_text ]}}\donothing
+ \fi
+ \edef\p_footer{\headparameter\c!footer}%
+ \ifx\p_footer\empty \else
+ \doifelselayouttextline\v!footer{\normalexpanded{\setuplayouttext[\v!footer][\c!state=\p_footer]}}\donothing
+ \fi}
+
+\newcount \c_strc_sectioniong_preceding_level \c_strc_sectioniong_preceding_level\plusone
+\newconditional\c_strc_sectioning_auto_break \settrue\c_strc_sectioning_auto_break
+\newconditional\c_strc_sectioniong_ignore_page
+\newsignal \s_strc_sectioniong_continuous_signal
+
+\unexpanded\def\strc_sectioning_inject_continuous_signal
+ {\ifhmode
+ \hskip\s_strc_sectioniong_continuous_signal\relax
+ \fi}
+
+\def\strc_sectioning_before_yes
+ {\strc_sectioning_check_before\strc_sectioning_handle_page_yes
\headparameter\c!inbetween
\dostarttagged\t!section\currenthead}
-\def\doheadspacingbeforenop
- {\docheckheadbefore
- \dohandleheadpagebreaknop
+\def\strc_sectioning_before_nop
+ {\strc_sectioning_check_before\strc_sectioning_handle_page_nop
\headparameter\c!inbetween
\dostarttagged\currenthead\empty}
-\def\emptyheadcorrection
- {\ifconditional\headleaveempty % inlined \emptyheadcorrection (with after=\blank)
- \penalty10000 % first ... we need to adapt this all to vspacing
+\def\strc_sectioning_empty_correction
+ {\ifconditional\c_strc_sectioning_empty
+ % this needs checking
+ \penalty\plustenthousand
\vskip-\lineheight
\kern\zeropoint
\prevdepth\strutdepth
\fi}
-\def\doheadspacingafteryes
+\def\strc_sectioning_after_yes
{\ifconditional\headisdisplay
- \dosomebreak\nobreak % needs to be adapted to vspacing
- \emptyheadcorrection
+ \ifconditional\c_strc_sectioning_auto_break
+ \vspacing[\v!samepage-\currentheadlevel]%
+ \fi
+ \strc_sectioning_empty_correction
\headparameter\c!after
\fi}
-\def\doheadspacingafternop
+\def\strc_sectioning_after_nop
{}
-\newsignal\continuousheadsignal
-
-\def\docheckheadbefore#1%
+\def\strc_sectioning_check_before#1%
{\ifhmode
- \scratchcounter\lastpenalty\unpenalty % no beauty in this
- \ifdim\lastskip=\continuousheadsignal
+ \scratchcounter\lastpenalty
+ \unpenalty % no beauty in this
+ \ifdim\lastskip=\s_strc_sectioniong_continuous_signal
% no page break
- \ifconditional\ignorehandlepagebreak
- \setfalse\ignorehandlepagebreak
+ \ifconditional\c_strc_sectioniong_ignore_page
+ \setfalse\c_strc_sectioniong_ignore_page
\else
- \global\precedingstructurelevel\currentheadlevel
+ \global\c_strc_sectioniong_preceding_level\currentheadlevel
\nobreak
\fi
- \global\settrue\continuoussectionhead
+ \global\settrue\c_strc_rendering_continuous
\else
\penalty\scratchcounter
- \global\setfalse\continuoussectionhead
+ \global\setfalse\c_strc_rendering_continuous
#1%
\fi
\else
- \global\setfalse\continuoussectionhead
+ \global\setfalse\c_strc_rendering_continue
#1%
\fi}
-\def\dodocheckheadlayout#1#2%
- {\doifelselayouttextline{#1}
- {\doifsomething{\headparameter#2}{\expanded{\setuplayouttext[#1][\c!state=\headparameter#2]}}}
- \donothing}
-
-\setvalue{\??headmarknop\v!page }{}
-\setvalue{\??headmarknop\v!reset}{\resetcurrentstructuremarks}
-\setvalue{\??headmarkyes\v!page }{} % to be checked: {\resetcurrentstructuremarks}
-\setvalue{\??headmarkyes\v!reset}{\resetcurrentstructuremarks}
-
-\def\docheckheadlayout
- {\doifelsenothing{\headparameter\c!page}
- {\getvalue{\??headmarknop\headparameter\c!marking}}
- {\page[\headparameter\c!page]%
- \getvalue{\??headmarkyes\headparameter\c!marking}%
- \dodocheckheadlayout\v!header\c!header
- \dodocheckheadlayout\v!text \c!text
- \dodocheckheadlayout\v!footer\c!footer}}
-
\def\currentsectioncountervalue {\ctxlua{structures.sections.depthnumber(\thenamedheadlevel\currenthead)}}
\def\previoussectioncountervalue{\ctxlua{structures.sections.depthnumber(\thenamedheadlevel\currenthead-1)}}
-\def\dohandleheadpagebreaknop
- {\doifelse{\headparameter\c!continue}\v!yes
- {\ifnum\previoussectioncountervalue=\zerocount
- \docheckheadlayout
- \else\ifnum\currentsectioncountervalue>\zerocount
- \docheckheadlayout
- \fi\fi}%
- {\docheckheadlayout}}
+\def\strc_sectioning_handle_page_nop
+ {\edef\p_continue{\headparameter\c!continue}%
+ \ifx\p_continue\v!yes
+ \ifnum\previoussectioncountervalue=\zerocount
+ \strc_sectioning_check_layout
+ \else\ifnum\currentsectioncountervalue>\zerocount
+ \strc_sectioning_check_layout
+ \fi\fi
+ \else
+ \strc_sectioning_check_layout
+ \fi}
-\def\dohandleheadpagebreakyes
+\def\strc_sectioning_handle_page_yes
{%[[\currenthead @\thenamedheadlevel\currenthead/prev:\previoussectioncountervalue/curr:\currentsectioncountervalue]]
- \ifconditional\ignorehandlepagebreak
- \setfalse\ignorehandlepagebreak
+ \ifconditional\c_strc_sectioniong_ignore_page
+ \setfalse\c_strc_sectioniong_ignore_page
\else
-% \ifnum\lastpenalty>\zerocount
-% \global\pagebreakdisabledtrue
-% \fi
% beware, these numbers are not yet know here
- \doifelse{\headparameter\c!continue}\v!yes
- {\ifnum\previoussectioncountervalue=\zerocount
- \docheckheadlayout
- \else\ifnum\currentsectioncountervalue>\zerocount
- \docheckheadlayout
- \fi\fi}%
- {\docheckheadlayout}%
- \doifnot{\headparameter\c!aligntitle}\v!float\page_otr_command_flush_side_floats
- \headparameter\c!before
- \relax
-% \ifpagebreakdisabled
-% \global\pagebreakdisabledfalse
-% \else
-% \dopreventbreakafterheadauto % not ok as it binds the prev par
-% \fi
- \doif{\headparameter\c!aligntitle}\v!float\indent
- \global\precedingstructurelevel\currentheadlevel
- \fi}
-
-\settrue\autoheadbreak % todo: \vspacing[category:8] == keep_together
-
-\def\dopreventbreakafterheadauto % used after \c!before
- {\ifconditional\autoheadbreak
- \vspacing[\v!samepage-\currentheadlevel]%
+ \strc_sectioning_handle_page_nop
+ \edef\p_aligntitle{\headparameter\c!aligntitle}%
+ \ifx\p_aligntitle\v!float
+ \headparameter\c!before\relax
+ \indent
+ \else
+ \page_otr_command_flush_side_floats
+ \headparameter\c!before\relax
+ \fi
+ \global\c_strc_sectioniong_preceding_level\currentheadlevel
\fi}
-\def\dopreventbreakafterheadspec#1% see enumerations etc
- {\ifconditional\autoheadbreak
- \vspacing[\v!samepage-\the\numexpr\currentheadlevel+1\relax]% todo #1
+\unexpanded\def\strc_sectioning_prevent_page_break#1% see strc-con
+ {\ifconditional\c_strc_sectioning_auto_break
+ \vspacing[\v!samepage-\the\numexpr\currentheadlevel+\ifx#1\empty\plusone\else#1\fi\relax]%
\fi}
-\def\dohandlepagebreakX{\dopreventbreakafterheadspec} % no \let so we can redefine
-
-% we do support negative numbers but it can have side effects that we won't
-% catch
+% We do support negative numbers but it can have side effects that we won't catch:
%
% \chapter{some} \setupheadnumber[chapter][3] \chapter{more}
% \setupheadnumber[section][8] \section{b} \section{c} \setupheadnumber[section][-1] \section{d}
@@ -765,9 +1046,9 @@
{\sectionlevel{\sectionheadsection{\sectionheadcoupling{#1}}}}
\unexpanded\def\setupheadnumber
- {\dodoubleargument\dosetupheadnumber}
+ {\dodoubleargument\strc_sectioning_setup_number}
-\def\dosetupheadnumber[#1][#2]% todo: reset if at other level
+\def\strc_sectioning_setup_number[#1][#2]% todo: reset if at other level
{\setstructurenumber{\thenamedheadlevel{#1}}{#2}}
\def\currentheadnumber{0} % ==> \currentheadnumber
@@ -781,15 +1062,15 @@
\def\somenamedheadnumber#1#2{\getsomestructurenumber{\thenamedheadlevel{#1}}{#2}}
\unexpanded\def\headnumber
- {\dodoubleempty\doheadnumber}
+ {\dodoubleempty\strc_sectioning_number}
-\def\doheadnumber[#1][#2]% simple case is just a number
+\def\strc_sectioning_number[#1][#2]% simple case is just a number
{\getsomefullstructurenumber{\iffirstargument\thenamedheadlevel{#1}\fi}{#2}}
-\def\someheadnumber
- {\dodoubleempty\dosomeheadnumber}
+\unexpanded\def\someheadnumber
+ {\dodoubleempty\strc_sectioning_number_some}
-\def\dosomeheadnumber[#1][#2]%
+\def\strc_sectioning_number_some[#1][#2]%
{\dontleavehmode
\begingroup
\edef\currenthead{#1}%
@@ -807,22 +1088,22 @@
% experimental
-\newconditional\c_strc_sections_auto_levels
+\newconditional\c_strc_sectioning_auto_levels
\appendtoks
- \settrue\c_strc_sections_auto_levels
+ \settrue\c_strc_sectioning_auto_levels
\to \everyenableelements
-\def\setautostructurelevel
- {\ifconditional\c_strc_sections_auto_levels
+\unexpanded\def\strc_sectioning_initialize_autolevel
+ {\ifconditional\c_strc_sectioning_auto_levels
\ctxcommand{autonextstructurelevel(\number\currentheadlevel)}%
- \global\setfalse\c_strc_sections_auto_levels
+ \global\setfalse\c_strc_sectioning_auto_levels
\fi}
-\def\triggerautostructurelevel
- {\global\settrue\c_strc_sections_auto_levels}
+\unexpanded\def\triggerautostructurelevel
+ {\global\settrue\c_strc_sectioning_auto_levels}
-\def\finalizeautostructurelevels
+\unexpanded\def\finalizeautostructurelevels
{\ctxcommand{autofinishstructurelevels()}}
\unexpanded\def\finalizeautostructurelevel
diff --git a/Master/texmf-dist/tex/context/base/strc-syn.lua b/Master/texmf-dist/tex/context/base/strc-syn.lua
index c2b5251d08e..ca4b3ac1898 100644
--- a/Master/texmf-dist/tex/context/base/strc-syn.lua
+++ b/Master/texmf-dist/tex/context/base/strc-syn.lua
@@ -1,4 +1,4 @@
-if not modules then modules = { } end modules ['str-syn'] = {
+if not modules then modules = { } end modules ['strc-syn'] = {
version = 1.001,
comment = "companion to str-syn.mkiv",
author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
diff --git a/Master/texmf-dist/tex/context/base/strc-syn.mkiv b/Master/texmf-dist/tex/context/base/strc-syn.mkiv
index 535eab04ba3..e0087d45008 100644
--- a/Master/texmf-dist/tex/context/base/strc-syn.mkiv
+++ b/Master/texmf-dist/tex/context/base/strc-syn.mkiv
@@ -65,8 +65,8 @@
%\c!headcolor=,
%\c!criterium=,
\c!location=\v!left,
- \c!width=5em,
- \c!distance=0pt,
+ \c!width=5\emwidth,
+ \c!distance=\zeropoint,
%\c!sample=,
%\c!hang=,
%\c!align=,
diff --git a/Master/texmf-dist/tex/context/base/strc-tag.lua b/Master/texmf-dist/tex/context/base/strc-tag.lua
index 78b8074878f..7e5c6f99377 100644
--- a/Master/texmf-dist/tex/context/base/strc-tag.lua
+++ b/Master/texmf-dist/tex/context/base/strc-tag.lua
@@ -161,6 +161,7 @@ local properties = allocate {
metadata = { pdf = "Div", nature = "display" },
metavariable = { pdf = "Span", nature = "mixed" },
+ mid = { pdf = "Span", nature = "inline" },
sub = { pdf = "Span", nature = "inline" },
sup = { pdf = "Span", nature = "inline" },
subsup = { pdf = "Span", nature = "inline" },
diff --git a/Master/texmf-dist/tex/context/base/strc-tag.mkiv b/Master/texmf-dist/tex/context/base/strc-tag.mkiv
index 84d6fb2d377..6e792fd3f97 100644
--- a/Master/texmf-dist/tex/context/base/strc-tag.mkiv
+++ b/Master/texmf-dist/tex/context/base/strc-tag.mkiv
@@ -126,6 +126,7 @@
\def\t!ignore {ignore} % Span
+\def\t!mid {mid} % Span
\def\t!sub {sub} % Span
\def\t!sup {sup} % Span
\def\t!subsup {subsup} % Span
@@ -255,13 +256,80 @@
[\c!state=\v!stop,
\c!method=\v!auto]
+% \unexpanded\def\startparagraph
+% {\dostarttagged\t!paragraph\empty}
+%
+% \unexpanded\def\stopparagraph
+% {% \removeunwantedspaces % yes or no
+% \dostoptagged
+% \par}
+
+% Cf suggestion by Wolfgang we now have named paragraphs. Watch out, the content
+% is grouped but only when we have an instance.
+%
+% \defineparagraph[red] [color=red]
+% \defineparagraph[bold][style=bold]
+%
+% \startparagraph \input ward \stopparagraph
+% \startparagraph[red] \input ward \stopparagraph
+% \startparagraph[bold] \input ward \stopparagraph
+
+\installcorenamespace {paragraph}
+\installcommandhandler \??paragraph {paragraph} \??paragraph
+
+\setupparagraph % someday maybe also strut (beg/end)
+ [\c!color=,
+ \c!style=]
+
\unexpanded\def\startparagraph
- {\dostarttagged\t!paragraph\empty}
+ {\dodoubleempty\paragraph_start}
+
+\def\paragraph_start
+ {\iffirstargument
+ \ifsecondargument
+ \doubleexpandafter\paragraph_start_two
+ \else
+ \doubleexpandafter\paragraph_start_one
+ \fi
+ \else
+ \expandafter\paragraph_start_zero
+ \fi}
+
+\def\paragraph_start_two[#1][#2]%
+ {\endgraf % we end before the group
+ \begingroup
+ \let\stopparagraph\paragraph_stop_indeed
+ \edef\currentparagraph{#1}%
+ \setupcurrentparagraph[#2]%
+ \paragraph_start_indeed}
+
+\def\paragraph_start_one[#1][#2]%
+ {\endgraf % we end before the group
+ \begingroup
+ \let\stopparagraph\paragraph_stop_indeed
+ \doifassignmentelse{#1}
+ {\let\currentparagraph\empty
+ \setupcurrentparagraph[#1]}
+ {\edef\currentparagraph{#1}}%
+ \paragraph_start_indeed}
+
+\def\paragraph_start_indeed
+ {\useparagraphstyleandcolor\c!style\c!color
+ \usealignparameter\paragraphparameter
+ \usesetupsparameter\paragraphparameter
+ \dostarttagged\t!paragraph\currentparagraph}
+
+\def\paragraph_start_zero[#1][#2]%
+ {\let\currentparagraph\empty
+ \paragraph_start_indeed}
+
+\unexpanded\def\paragraph_stop_indeed
+ {\dostoptagged
+ \endgraf % we end inside the group
+ \endgroup}
\unexpanded\def\stopparagraph
- {% \removeunwantedspaces % yes or no
- \dostoptagged
- \par}
+ {\dostoptagged}
\let\startpar\startparagraph
\let\stoppar \stopparagraph
@@ -332,4 +400,12 @@
\unexpanded\def\settaggedmetadata[#1]%
{\ctxlua{structures.tags.registermetadata(\!!bs#1\!!es)}}
+%D An overload:
+
+\let\strc_tagged_saved_bpar\bpar
+\let\strc_tagged_saved_epar\epar
+
+\unexpanded\def\bpar{\dostarttagged\t!paragraph\empty\strc_tagged_saved_bpar}
+\unexpanded\def\epar{\strc_tagged_saved_epar\dostoptagged}
+
\protect
diff --git a/Master/texmf-dist/tex/context/base/supp-ali.mkiv b/Master/texmf-dist/tex/context/base/supp-ali.mkiv
deleted file mode 100644
index 54a7b77e4f2..00000000000
--- a/Master/texmf-dist/tex/context/base/supp-ali.mkiv
+++ /dev/null
@@ -1,173 +0,0 @@
-%D \module
-%D [ file=supp-ali,
-%D version=2000.04.17,
-%D title=\CONTEXT\ Support Macros,
-%D subtitle=Alignment,
-%D author=Hans Hagen,
-%D date=\currentdate,
-%D copyright={PRAGMA ADE \& \CONTEXT\ Development Team}]
-%C
-%C This module is part of the \CONTEXT\ macro||package and is
-%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
-%C details.
-
-%D Yet undocumented.
-
-% 0 = centered
-% 1 = left in before
-% 2 = right in before
-% 3 = left in after
-% 4 = right in after
-
-\unprotect
-
-% \starttabulate[|cg{.}|cg{,}|cg{,}|]
-% \NC period \NC comma \NC comma \NC\NR
-% \NG 100.000,00 \NG 100.000,00 \NG 100,00 \NC\NR
-% \NG 10.000,00 \NG 10.000,00 \NG 1000,00 \NC\NR
-% \NG 100,00 \NG 100,00 \NG 10,00 \NC\NR
-% \NG 100,00 \NG 100,00 \NG 10,00 \NC\NR
-% \NG 10\\ \NG 10\\ \NG 0,00 \NC\NR
-% \NG 10 \NG 10 \NG 0,00 \NC\NR
-% \NG 10 \NG 10 \NG 0,00 \NC\NR
-% \stoptabulate
-
-\chardef\characteralignmentmode=4
-\chardef\characteralignmentslot=1
-
-\newtoks\@@characteralignlst
-
-\let\afterassignwidth \!!zeropoint
-\let\beforeassignwidth\!!zeropoint
-
-\def\alignmentcharacter{.}
-
-\let\alignmentclass\s!default % can be used to handle multiple mixed ones
-
-\def\setfirstpasscharacteralign
- {\popcharacteralign
- \expanded{\dosetfirstpasscharacteralign{\alignmentcharacter}}}
-
-\def\dosetfirstpasscharacteralign#1%
- {\def\checkalignment##1%
- {\popcharacteralign
- \let\\\empty
- \setbox\scratchbox\hbox{#1}%
- \edef\characterassignwidth{\the\wd\scratchbox}%
- \setbox\scratchbox\emptyhbox
- \docheckalignment##1#1\relax\relax
- \scratchdimen-\wd\scratchbox
- \setbox\scratchbox\hbox{\ignorespaces##1\unskip}%
- \advance\scratchdimen \wd\scratchbox
- \ifdim\scratchdimen>\beforeassignwidth\relax
- \edef\beforeassignwidth{\the\scratchdimen}%
- \fi
- \ifdim\scratchdimen=\zeropoint
- \setbox\scratchbox\hbox{\ignorespaces##1\unskip}%
- \scratchdimen\wd\scratchbox
- \ifcase\characteralignmentmode
- % do nothing
- \else\ifnum\characteralignmentmode<\plusthree
- \advance\scratchdimen \characterassignwidth
- \ifdim\scratchdimen>\beforeassignwidth\relax
- \edef\beforeassignwidth{\the\scratchdimen}%
- \fi
- \else
- \ifdim\scratchdimen>\afterassignwidth\relax
- \edef\afterassignwidth{\the\scratchdimen}%
- \fi
- \fi\fi
- \fi
- \pushcharacteralign}%
- \def\docheckalignment##1#1##2##3\relax
- {\ifx##2\relax
- \setbox\scratchbox\hbox{\ignorespaces##1\unskip}%
- \ifdim\wd\scratchbox>\afterassignwidth
- \edef\afterassignwidth{\the\wd\scratchbox}%
- \fi
- \else
- \docheckalignment##2##3\relax\relax
- \fi}}
-
-\def\setsecondpasscharacteralign
- {\popcharacteralign
- \expanded{\dosetsecondpasscharacteralign{\alignmentcharacter}}}
-
-\def\dosetsecondpasscharacteralign#1%
- {\def\checkalignment##1%
- {\popcharacteralign
- \let\\\empty % beware, no grouping
- \setbox\scratchbox\hbox{#1}%
- \edef\characterassignwidth{\the\wd\scratchbox}%
- \setbox\scratchbox\emptyhbox
- % new 12,34 vs 10\\ where 10 aligns on 12 if #1 = ,
- \ifcase\characteralignmentslot
- \docheckalignment##1#1\relax\relax
- \scratchdimen\wd\scratchbox
- \setbox\scratchbox\hbox{\ignorespaces##1\unskip}%
- \else
- \def\\{#1}%
- \expanded{\docheckalignment##1#1\relax\relax}%
- \scratchdimen\wd\scratchbox
- \setbox\scratchbox\hbox{\def\\{\hphantom{#1}}\ignorespaces##1\unskip}%
- \fi
- \noindent
- \ifdim\scratchdimen=\wd\scratchbox
- \ifcase\characteralignmentmode
- \box\scratchbox
- \else
- \hbox
- {\dontcomplain
- \hbox to \beforeassignwidth
- {\ifcase\characteralignmentmode\or
- \box\scratchbox\hss
- \or
- \hss\box\scratchbox\hskip\characterassignwidth
- \or
- \hss\rlap{\box\scratchbox}%
- \or
- \hss\rlap{\hbox to \afterassignwidth{\hss\box\scratchbox}}%
- \fi}%
- \hskip\afterassignwidth}%
- \fi
- \else
- \hbox
- {\hbox to \beforeassignwidth
- {\hss\box\scratchbox\hskip-\scratchdimen}%
- \hskip\afterassignwidth}%
- \fi}%
- \def\docheckalignment##1#1##2##3\relax
- {\ifx##2\relax
- \setbox\scratchbox\hbox{\ignorespaces##1\unskip}%
- \else
- \docheckalignment##2##3\relax\relax
- \fi}}
-
-% provide a means to use multiple alignments mixed
-
-\def\pushcharacteralign
- {\ifcsname @cac@\alignmentclass\endcsname\else
- \doglobal\appendetoks\noexpand\do{\alignmentclass}\to\@@characteralignlst
- \fi
- \setxvalue{@cac@\alignmentclass}{\noexpand\do
- {\afterassignwidth}{\beforeassignwidth}{\alignmentcharacter}}}
-
-\def\popcharacteralign
- {\def\do##1##2##3%
- {\def\afterassignwidth {##1}%
- \def\beforeassignwidth {##2}%
- \def\alignmentcharacter{##3}}%
- \executeifdefined{@cac@\alignmentclass}\donothing}
-
-\def\resetcharacteralign % does not work well nested
- {\def\do##1{\global\letbeundefined{@cac@##1}}% global !
- \the\@@characteralignlst
- \global\@@characteralignlst\emptytoks}
-
-\long\def\startcharacteralign#1\stopcharacteralign
- {\bgroup
- \setfirstpasscharacteralign #1%
- \setsecondpasscharacteralign#1%
- \egroup}
-
-\protect \endinput
diff --git a/Master/texmf-dist/tex/context/base/supp-box.lua b/Master/texmf-dist/tex/context/base/supp-box.lua
index 853de4cbefc..c7382834a61 100644
--- a/Master/texmf-dist/tex/context/base/supp-box.lua
+++ b/Master/texmf-dist/tex/context/base/supp-box.lua
@@ -10,6 +10,9 @@ if not modules then modules = { } end modules ['supp-box'] = {
local report_hyphenation = logs.reporter("languages","hyphenation")
+local tex, node = tex, node
+local context, commands, nodes = context, commands, nodes
+
local nodecodes = nodes.nodecodes
local disc_code = nodecodes.disc
@@ -19,6 +22,7 @@ local glue_code = nodecodes.glue
local glyph_code = nodecodes.glyph
local new_penalty = nodes.pool.penalty
+
local free_node = node.free
local copynodelist = node.copy_list
local copynode = node.copy
@@ -65,10 +69,9 @@ local function applytochars(list,what,nested)
while current do
local id = current.id
if nested and (id == hlist_code or id == vlist_code) then
- context.hbox()
- context.bgroup()
+ context.beginhbox()
applytochars(current.list,what,nested)
- context.egroup()
+ context.endhbox()
elseif id ~= glyph_code then
noaction(copynode(current))
else
@@ -92,8 +95,7 @@ local function applytowords(list,what,nested)
end
noaction(copynode(current))
elseif nested and (id == hlist_code or id == vlist_code) then
- context.hbox()
- context.bgroup()
+ context.beginhbox()
applytowords(current.list,what,nested)
context.egroup()
elseif not start then
diff --git a/Master/texmf-dist/tex/context/base/supp-box.mkiv b/Master/texmf-dist/tex/context/base/supp-box.mkiv
index de1da677a88..25c7764f8d9 100644
--- a/Master/texmf-dist/tex/context/base/supp-box.mkiv
+++ b/Master/texmf-dist/tex/context/base/supp-box.mkiv
@@ -75,9 +75,10 @@
%D core macros \type {\strutdepth} and alike, which are
%D values.
-\def\strutdp{\dp\strutbox}
-\def\strutht{\ht\strutbox}
-\def\strutwd{\wd\strutbox}
+\def\strutdp {\dp\strutbox}
+\def\strutht {\ht\strutbox}
+\def\strutwd {\wd\strutbox}
+\def\struthtdp{\dimexpr\ht\strutbox+\dp\strutbox\relax}
%D \macros
%D {voidbox,nextbox}
@@ -1236,7 +1237,7 @@
\ifdone \else
\parfillskip\zeropoint
\rightskip\zeropoint
- \hskip\zeropoint \!!plus 1\!!fill % \hsize
+ \hskip\zeropoint \s!plus 1\s!fill % \hsize
\fi
\unhcopy0}%
\ifdim\ht\nextbox>\strutht
@@ -1423,12 +1424,27 @@
{\setbox\nextbox\hbox
{\strut
\dp\nextbox\zeropoint
- \lower\strutdepth\box\nextbox}%
- \dp\nextbox\strutdepth
- \ht\nextbox\strutheight
+ \lower\strutdp\box\nextbox}%
+ \dp\nextbox\strutdp
+ \ht\nextbox\strutht
\box\nextbox
\egroup}
+%D A variant on this:
+
+\unexpanded\def\inlinedbox
+ {\bgroup
+ \dowithnextbox
+ {\setbox\nextbox\hbox
+ {\lower
+ \dimexpr(\htdp\nextbox-\lineheight)/\plustwo+\strutdp\relax
+ \box\nextbox}%
+ \ht\nextbox\strutht
+ \dp\nextbox\strutdp
+ \box\nextbox
+ \egroup}%
+ \hbox}
+
%D \macros
%D {struttedbox}
%D
@@ -1440,10 +1456,10 @@
\dowithnextboxcs\syst_boxes_struttedbox_finish\hbox}
\def\syst_boxes_struttedbox_finish
- {\ht\nextbox\strutdepth
+ {\dp\nextbox\strutdepth
\ht\nextbox\strutheight
\box\nextbox
- \egroup}%
+ \egroup}
%D \macros
%D {topskippedbox}
@@ -1513,8 +1529,8 @@
\bgroup
\dontcomplain
\forgetall
- \setbox0\hbox{\vrule\!!width \zeropoint#1}%
- \setbox2\vbox{\hrule\!!height\zeropoint#1}%
+ \setbox0\hbox{\vrule\s!width \zeropoint#1}%
+ \setbox2\vbox{\hrule\s!height\zeropoint#1}%
\advance\vsize \ht2
\advance\hsize \wd0
\vbox to \vsize
@@ -1573,9 +1589,9 @@
{\bgroup
\dowithnextbox
{\setlocalhsize
- \setbox\scratchbox\hbox{\vrule\!!width \zeropoint#1}%
+ \setbox\scratchbox\hbox{\vrule\s!width \zeropoint#1}%
\ifzeropt\wd\scratchbox\else\hsize\wd\scratchbox\fi
- \setbox\scratchbox\vbox{\hrule\!!height\zeropoint#1}%
+ \setbox\scratchbox\vbox{\hrule\s!height\zeropoint#1}%
\ifzeropt\ht\scratchbox\else\vsize\ht\scratchbox\fi
\vbox to \vsize{\vss\hbox to \hsize{\hss\box\nextbox\hss}\vss}%
\egroup}%
@@ -1783,7 +1799,7 @@
\ht\hhbox\strutht
\dp\hhbox\strutdp
\ifzeropt\hhboxindent\else % \ifdim\hhboxindent=\zeropoint\else
- \setbox\hhbox\hbox{\hskip-\hhboxindent\box\hhbox}%
+ \setbox\hhbox\hbox{\kern-\hhboxindent\box\hhbox}%
\hhboxindent\zeropoint
\fi
\global\lasthhboxwidth\wd\hhbox
@@ -1791,7 +1807,7 @@
\ifvoid\unhhedbox
\exitloop
\else
- \hskip\zeropoint \!!plus \zeropoint
+ \hskip\zeropoint \s!plus \zeropoint
\fi}%
\egroup}
@@ -1973,7 +1989,7 @@
\scratchwidth\wd\ifdim\wd\nextbox>\wd\processbox\nextbox\else\processbox\fi
\setbox\processbox\hbox to \scratchwidth
{\hbox to \scratchwidth{\hss\box\processbox\hss}%
- \hskip-\scratchwidth
+ \kern-\scratchwidth
\hbox to \scratchwidth{\hss\box\nextbox \hss}}}
\unexpanded\def\startoverlay
@@ -2169,7 +2185,7 @@
\newbox\fakedboxcursor
\setbox\fakedboxcursor\hbox
- {\vrule\!!width\zeropoint\!!height\zeropoint\!!depth\zeropoint}
+ {\vrule\s!width\zeropoint\s!height\zeropoint\s!depth\zeropoint}
\unexpanded\def\boxcursor % overloaded in core-vis
{\iftraceboxplacement
@@ -2178,9 +2194,9 @@
\setbox\scratchbox\hbox to \zeropoint
{\hss
\vrule
- \!!width \scratchdimen
- \!!height\scratchdimen
- \!!depth \scratchdimen
+ \s!width \scratchdimen
+ \s!height\scratchdimen
+ \s!depth \scratchdimen
\hss}%
\smashedbox\scratchbox
\egroup
@@ -2213,7 +2229,7 @@
{\global\boxhdisplacement\boxoffset
\global\boxvdisplacement.5\ht\nextbox
\global\advance\boxvdisplacement-.5\dp\nextbox
- \boxcursor\hskip\boxhdisplacement\lower\boxvdisplacement\box\nextbox
+ \boxcursor\kern\boxhdisplacement\lower\boxvdisplacement\box\nextbox
\egroup}
\def\syst_boxes_leftbox_finish
@@ -2221,21 +2237,21 @@
\global\advance\boxhdisplacement-\boxoffset
\global\boxvdisplacement.5\ht\nextbox
\global\advance\boxvdisplacement-.5\dp\nextbox
- \boxcursor\hskip\boxhdisplacement\lower\boxvdisplacement\box\nextbox
+ \boxcursor\kern\boxhdisplacement\lower\boxvdisplacement\box\nextbox
\egroup}
\def\syst_boxes_topbox_finish
{\global\boxhdisplacement-.5\wd\nextbox
\global\boxvdisplacement-\dp\nextbox
\global\advance\boxvdisplacement-\boxoffset
- \boxcursor\hskip\boxhdisplacement\raise-\boxvdisplacement\box\nextbox
+ \boxcursor\kern\boxhdisplacement\raise-\boxvdisplacement\box\nextbox
\egroup}
\def\syst_boxes_bottombox_finish
{\global\boxhdisplacement-.5\wd\nextbox
\global\boxvdisplacement\ht\nextbox
\global\advance\boxvdisplacement\boxoffset
- \boxcursor\hskip\boxhdisplacement\lower\boxvdisplacement\box\nextbox
+ \boxcursor\kern\boxhdisplacement\lower\boxvdisplacement\box\nextbox
\egroup}
\def\syst_boxes_lefttopbox_finish
@@ -2243,14 +2259,14 @@
\global\advance\boxhdisplacement-\boxoffset
\global\boxvdisplacement-\dp\nextbox
\global\advance\boxvdisplacement-\boxoffset
- \boxcursor\hskip\boxhdisplacement\raise-\boxvdisplacement\box\nextbox
+ \boxcursor\kern\boxhdisplacement\raise-\boxvdisplacement\box\nextbox
\egroup}
\def\syst_boxes_righttopbox_finish
{\global\boxhdisplacement\boxoffset
\global\boxvdisplacement-\dp\nextbox
\global\advance\boxvdisplacement-\boxoffset
- \boxcursor\hskip\boxhdisplacement\raise-\boxvdisplacement\box\nextbox
+ \boxcursor\kern\boxhdisplacement\raise-\boxvdisplacement\box\nextbox
\egroup}
\def\syst_boxes_leftbottombox_finish
@@ -2258,14 +2274,14 @@
\global\advance\boxhdisplacement-\boxoffset
\global\boxvdisplacement\ht\nextbox
\global\advance\boxvdisplacement\boxoffset
- \boxcursor\hskip\boxhdisplacement\lower\boxvdisplacement\box\nextbox
+ \boxcursor\kern\boxhdisplacement\lower\boxvdisplacement\box\nextbox
\egroup}
\def\syst_boxes_rightbottombox_finish
{\global\boxhdisplacement\boxoffset
\global\boxvdisplacement\ht\nextbox
\global\advance\boxvdisplacement\boxoffset
- \boxcursor\hskip\boxhdisplacement\lower\boxvdisplacement\box\nextbox
+ \boxcursor\kern\boxhdisplacement\lower\boxvdisplacement\box\nextbox
\egroup}
\unexpanded\def\middlebox {\hbox\bgroup\dowithnextboxcs\syst_boxes_middlebox_finish \placedbox}
@@ -2277,27 +2293,27 @@
{\global\boxhdisplacement-.5\wd\nextbox
\global\boxvdisplacement.5\ht\nextbox
\global\advance\boxvdisplacement-.5\dp\nextbox
- \boxcursor\hskip\boxhdisplacement\lower\boxvdisplacement\box\nextbox
+ \boxcursor\kern\boxhdisplacement\lower\boxvdisplacement\box\nextbox
\egroup}
\def\syst_boxes_baselinemiddlebox_finish
{\global\boxhdisplacement-.5\wd\nextbox
\global\advance\boxhdisplacement-\boxoffset
\global\boxvdisplacement-\boxoffset
- \boxcursor\hskip\boxhdisplacement\raise-\boxvdisplacement\box\nextbox
+ \boxcursor\kern\boxhdisplacement\raise-\boxvdisplacement\box\nextbox
\egroup}
\def\syst_boxes_baselineleftbox_finish
{\global\boxhdisplacement-\wd\nextbox
\global\advance\boxhdisplacement-\boxoffset
\global\boxvdisplacement-\boxoffset
- \boxcursor\hskip\boxhdisplacement\raise-\boxvdisplacement\box\nextbox
+ \boxcursor\kern\boxhdisplacement\raise-\boxvdisplacement\box\nextbox
\egroup}
\def\syst_boxes_baselinerightbox_finish
{\global\boxhdisplacement\boxoffset
\global\boxvdisplacement-\boxoffset
- \boxcursor\hskip\boxhdisplacement\raise-\boxvdisplacement\box\nextbox
+ \boxcursor\kern\boxhdisplacement\raise-\boxvdisplacement\box\nextbox
\egroup}
%D \macros
@@ -2431,15 +2447,14 @@
%D \macros
%D {makestrutofbox}
%D
-%D This macro sets the dimensions of a box to those of a
-%D strut.
+%D This macro sets the dimensions of a box to those of a strut.
\def\domakestrutofbox
{\ht\c_boxes_register\strutht
\dp\c_boxes_register\strutdp
\wd\c_boxes_register\zeropoint}
-\unexpanded\def\makestrutofbox
+\unexpanded\def\makestrutofbox % not used
{\afterassignment\domakestrutofbox\c_boxes_register}
%D \macros
@@ -2615,6 +2630,21 @@
% \showboxbreadth\maxdimen
% \showboxdepth \maxdimen
+%D Moved from cont-new:
+%D
+%D \starttyping
+%D \minimalhbox 100pt {test}
+%D \stoptyping
+
+\unexpanded\def\minimalhbox#1#%
+ {\dowithnextbox
+ {\bgroup
+ \setbox\scratchbox\hbox#1{\hss}%
+ \ifdim\wd\nextbox<\wd\scratchbox\wd\nextbox\wd\scratchbox\fi
+ \box\nextbox
+ \egroup}
+ \hbox}
+
\protect \endinput
% a bit of test code:
diff --git a/Master/texmf-dist/tex/context/base/supp-mat.mkiv b/Master/texmf-dist/tex/context/base/supp-mat.mkiv
index bc26611ae05..f77ee3454a3 100644
--- a/Master/texmf-dist/tex/context/base/supp-mat.mkiv
+++ b/Master/texmf-dist/tex/context/base/supp-mat.mkiv
@@ -51,32 +51,27 @@
\let\normalstartimath \Ustartmath
\let\normalstopimath \Ustopmath
\let\normalstartdmath \Ustartdisplaymath
-\let\normalstopdmath \Ustopmath
+\let\normalstopdmath \Ustopdisplaymath
+
\def\normalmathaligntab{&} % \let\normalmathaligntab\aligntab does to work well in a let to & (a def works ok)
-\let\normalsuper \Usuperscript % obsolete
-\let\normalsuber \Usubscript % obsolete
-\let\startimath \Ustartmath % obsolete, less safe
-\let\stopimath \Ustopmath % obsolete, less safe
-\let\startdmath \Ustartdisplaymath % obsolete, less safe
-\let\stopdmath \Ustopmath % obsolete, less safe
+\let\normalsuper \Usuperscript % obsolete
+\let\normalsuber \Usubscript % obsolete
+
+\let\startimath \Ustartmath
+\let\stopimath \Ustopmath
+\let\startdmath \Ustartdisplaymath
+\let\stopdmath \Ustopmath
\unexpanded\def\mathematics#1{\relax \ifmmode#1\else\normalstartimath#1\normalstopimath\fi}
\unexpanded\def\displaymath#1{\noindent \ifmmode#1\else\normalstartdmath#1\normalstopdmath\fi}
\unexpanded\def\inlinemath #1{\dontleavehmode\ifmmode#1\else\normalstartimath#1\normalstopimath\fi}
\unexpanded\def\textmath #1{\dontleavehmode\ifmmode#1\else\begingroup\everymath\emptytoks\normalstartimath#1\normalstopimath\endgroup\fi} % \mathsurround\zeropoint
-\let\m\mathematics % unexpanded?
+\let\m\mathematics
\let\stopmathmode\relax
-% \unexpanded\def\startmathmode % cannot be used nested
-% {\relax\ifmmode
-% \let\stopmathmode\relax
-% \else
-% \normalstartimath\let\stopmathmode\normalstopimath
-% \fi}
-
\unexpanded\def\startmathmode % nested variant
{\relax\ifmmode
\begingroup
@@ -96,7 +91,12 @@
%D
%D An example of usage of the following can be found in the MathML module:
-\unexpanded\def\displaymathematics#1{\relax\ifmmode#1\else\dostartformula{}#1\dostopformula\fi}
+\ifdefined\strc_formulas_start_formula \else
+ \def\strc_formulas_start_formula{\normalstartdmath}
+ \def\strc_formulas_stop_formula {\normalstopdmath }
+\fi
+
+\unexpanded\def\displaymathematics#1{\relax\ifmmode#1\else\strc_formulas_start_formula{}#1\strc_formulas_stop_formula\fi}
\unexpanded\def\inlinemathematics {\dontleavehmode\mathematics}
\unexpanded\def\automathematics {\relax\ifhmode\expandafter\inlinemathematics\else\expandafter\displaymathematics\fi}
@@ -128,7 +128,8 @@
\def\dimensionhalfspace {\,}
\unexpanded\def\dimension#1%
- {\def\dodimensionsignal{\kern\dimensionsignal}%
+ {\pushmacro\dodimensionsignal
+ \gdef\dodimensionsignal{\kern\dimensionsignal}%
\ifdim\lastskip=\zeropoint\relax
\ifdim\lastkern=\zeropoint\relax
\ifmmode
@@ -144,70 +145,16 @@
\else
\unskip\mathematics{\dimensionhalfspace\dimensionhalfspace\dimensiontypeface#1}%
\fi
- \dodimensionsignal}
+ \dodimensionsignal
+ \popmacro\dodimensionsignal}
\unexpanded\def\nodimension#1%
{\unskip#1\global\let\dodimensionsignal\relax}
-% %D \macros
-% %D {super, suber}
-% %D
-% %D \TEX\ uses \type{^} and \type{_} for entering super- and
-% %D subscript mode. We want however a bit more control than
-% %D normally provided, and therefore provide \type {\super}
-% %D and \type{\suber} (\type {\sub} is already taken).
-%
-% \global\let\normalsuper=\normalsuperscript % will become obsolete
-% \global\let\normalsuber=\normalsubscript % will become obsolete
-%
-% \newcount\supersubmode
-%
-% \newevery\everysupersub \EverySuperSub
-%
-% \appendtoks \advance\supersubmode\plusone \to \everysupersub
-%
-% \def\super#1{\normalsuperscript{\the\everysupersub#1}}
-% \def\suber#1{\normalsubscript {\the\everysupersub#1}}
-%
-% %D \macros
-% %D {enablesupsub}
-% %D
-% %D We can let \type {^} and \type {_} act like \type {\super}
-% %D and \type {\suber} by saying \type {\enablesupsub}.
-%
-% \bgroup
-% \catcode\circumflexasciicode\activecatcode
-% \catcode\underscoreasciicode\activecatcode
-% \gdef\enablesupsub
-% {\catcode\circumflexasciicode\activecatcode
-% \catcode\underscoreasciicode\activecatcode
-% \def^{\ifmmode\expandafter\super\else\expandafter\normalsuper\fi}%
-% \def_{\ifmmode\expandafter\suber\else\expandafter\normalsuber\fi}}
-% \egroup
-%
-% %D \macros
-% %D {restoremathstyle}
-% %D
-% %D We can pick up the current math style by calling \type
-% %D {\restoremathstyle}.
-%
-% \def\restoremathstyle
-% {\ifmmode
-% \ifcase\supsubmode
-% \textstyle
-% \or
-% \scriptstyle
-% \else
-% \scriptscriptstyle
-% \fi
-% \fi}
-
%D \macros
%D {cramped}
%D
-%D \TEX provides no primitive to force in cramped math mode. Here is
-%D a macro that does so. It is based on a solution by Don Knuth (\useurl
-%D {http://www.ctan.org/tex-archive/digests/tex-implementors/042}).
+%D Cramped math:
%D
%D \startbuffer
%D \ruledhbox{$\left\{{x^2\over y^2}\right\}$}
@@ -245,25 +192,47 @@
% \ht\scratchbox-\scratchdimen
% \box\scratchbox}
-\def\math_cramped#1#2%
+% mkii:
+%
+% \def\math_cramped#1#2%
+% {\begingroup % added HH, made even more cramped
+% \setbox\scratchbox\hbox
+% {\nulldelimiterspace\zeropoint
+% \normalstartimath\mathsurround\zeropoint#1\radical\zerocount{#2}\normalstopimath}%
+% \ht\scratchbox-\dimexpr
+% \ifx#1\displaystyle
+% \fontdimen\pluseight\textfont\plusthree
+% +.25\fontdimen\plusfive\textfont\plustwo
+% \else
+% 1.25\fontdimen\pluseight
+% \ifx#1\textstyle
+% \textfont
+% \else\ifx#1\scriptstyle
+% \scriptfont
+% \else
+% \scriptscriptfont
+% \fi \fi
+% \plusthree
+% \fi
+% -\ht\scratchbox
+% \relax
+% \box\scratchbox
+% \endgroup}
+
+\def\math_cramped#1#2% we have all in one family
{\begingroup % added HH, made even more cramped
\setbox\scratchbox\hbox
{\nulldelimiterspace\zeropoint
\normalstartimath\mathsurround\zeropoint#1\radical\zerocount{#2}\normalstopimath}%
\ht\scratchbox-\dimexpr
\ifx#1\displaystyle
- \fontdimen8\textfont3
- +.25\fontdimen5\textfont2
+ \fontdimen\pluseight\textfont\plusone
+ + .25\fontdimen\plusfive \textfont\plusone
\else
- 1.25\fontdimen8
- \ifx#1\textstyle
- \textfont
- \else\ifx#1\scriptstyle
- \scriptfont
- \else
- \scriptscriptfont
- \fi \fi
- \plusthree
+ 1.25\fontdimen\pluseight
+ \ifx#1\textstyle \textfont \else
+ \ifx#1\scriptstyle\scriptfont \else
+ \scriptscriptfont\fi\fi \plusone
\fi
-\ht\scratchbox
\relax
@@ -305,7 +274,7 @@
% experiment, not yet to be used
-% \def\displaybreak
+% \unexpanded\def\displaybreak
% {\ifhmode
% \removeunwantedspaces
% \ifcase\raggedstatus\hfill\fi
diff --git a/Master/texmf-dist/tex/context/base/supp-ran.lua b/Master/texmf-dist/tex/context/base/supp-ran.lua
index 75202f696f8..7997db8f648 100644
--- a/Master/texmf-dist/tex/context/base/supp-ran.lua
+++ b/Master/texmf-dist/tex/context/base/supp-ran.lua
@@ -6,32 +6,34 @@ if not modules then modules = { } end modules ['supp-ran'] = {
license = "see context related readme files"
}
--- We cannot ask for the current seed, so we need some messy hack
--- here.
+-- We cannot ask for the current seed, so we need some messy hack here.
local report_system = logs.reporter("system","randomizer")
-commands = commands or { }
-local commands = commands
-
local math = math
+local context, commands = context, commands
+
local random, randomseed, round, seed, last = math.random, math.randomseed, math.round, false, 1
-function math.setrandomseedi(n,comment)
+local maxcount = 2^30-1 -- 1073741823
+
+local function setrandomseedi(n,comment)
if not n then
---~ n = 0.5 -- hack
+ -- n = 0.5 -- hack
end
if n <= 1 then
- n = n*1073741823 -- maxcount
+ n = n * maxcount
end
n = round(n)
if false then
report_system("setting seed to %s (%s)",n,comment or "normal")
end
randomseed(n)
- last = random(0,1073741823) -- we need an initial value
+ last = random(0,maxcount) -- we need an initial value
end
+math.setrandomseedi = setrandomseedi
+
function commands.getrandomcounta(min,max)
last = random(min,max)
context(last)
@@ -44,7 +46,7 @@ end
function commands.setrandomseed(n)
last = n
- math.setrandomseedi(n)
+ setrandomseedi(n)
end
function commands.getrandomseed(n)
@@ -54,10 +56,9 @@ end
-- maybe stack
function commands.freezerandomseed(n)
- -- print("<<<<",seed,last)
if seed == false or seed == nil then
seed = last
- math.setrandomseedi(seed,"freeze",seed)
+ setrandomseedi(seed,"freeze",seed)
end
if n then
randomseed(n)
@@ -65,9 +66,8 @@ function commands.freezerandomseed(n)
end
function commands.defrostrandomseed()
- -- print(">>>>",seed,last)
if seed ~= false then
- math.setrandomseedi(seed,"defrost",seed) -- was last (bug)
+ setrandomseedi(seed,"defrost",seed) -- was last (bug)
seed = false
end
end
diff --git a/Master/texmf-dist/tex/context/base/supp-ran.mkiv b/Master/texmf-dist/tex/context/base/supp-ran.mkiv
index 356ed580fa8..f5466a0e1fd 100644
--- a/Master/texmf-dist/tex/context/base/supp-ran.mkiv
+++ b/Master/texmf-dist/tex/context/base/supp-ran.mkiv
@@ -18,8 +18,8 @@
\registerctxluafile{supp-ran}{1.001}
-\unexpanded\def\getrandomcount #1#2#3{#1=\ctxcommand{getrandomcounta(\number#2,\number#3)}}
-\unexpanded\def\getrandomdimen #1#2#3{#1=\ctxcommand{getrandomcounta(\number\dimexpr#2,\number\dimexpr#3)}\scaledpoint}
+\unexpanded\def\getrandomcount #1#2#3{#1=\ctxcommand{getrandomcounta(\number#2,\number#3)}\relax}
+\unexpanded\def\getrandomdimen #1#2#3{#1=\ctxcommand{getrandomcounta(\number\dimexpr#2,\number\dimexpr#3)}\scaledpoint\relax}
\unexpanded\def\getrandomnumber#1#2#3{\edef#1{\ctxcommand{getrandomcounta(\number#2,\number#3)}}}
\unexpanded\def\getrandomfloat #1#2#3{\edef#1{\ctxcommand{getrandomcountb(\number\dimexpr#2\points,\number\dimexpr#3\points)}}}
\unexpanded\def\setrandomseed #1{\ctxcommand{setrandomseed(\number#1)}}
diff --git a/Master/texmf-dist/tex/context/base/supp-vis.mkiv b/Master/texmf-dist/tex/context/base/supp-vis.mkiv
index 1b025316215..de7c4288d49 100644
--- a/Master/texmf-dist/tex/context/base/supp-vis.mkiv
+++ b/Master/texmf-dist/tex/context/base/supp-vis.mkiv
@@ -96,8 +96,7 @@
%D off with \type{\dontinterfere}.
\unexpanded\def\dontinterfere % or maybe just forgetall
- {\everypar \emptytoks
- \let\par \endgraf
+ {\reseteverypar
\parindent\zeropoint
\parskip \zeropoint
\leftskip \zeropoint
@@ -186,12 +185,12 @@
%D offers \type{\hfilneg} and \type{\vfilneg}, we define our
%D own alternative double \type{ll}'ed ones.
-\def\hfilneg {\normalhskip\zeropoint\!!plus-1fil\relax}
-\def\vfilneg {\normalvskip\zeropoint\!!plus-1fil\relax}
-\def\hfillneg {\normalhskip\zeropoint\!!plus-1fill\relax}
-\def\vfillneg {\normalvskip\zeropoint\!!plus-1fill\relax}
-\def\hfilllneg{\normalhskip\zeropoint\!!plus-1filll\relax}
-\def\vfilllneg{\normalvskip\zeropoint\!!plus-1filll\relax}
+\def\hfilneg {\normalhskip\zeropoint\s!plus-1\s!fil\relax}
+\def\vfilneg {\normalvskip\zeropoint\s!plus-1\s!fil\relax}
+\def\hfillneg {\normalhskip\zeropoint\s!plus-1\s!fill\relax}
+\def\vfillneg {\normalvskip\zeropoint\s!plus-1\s!fill\relax}
+\def\hfilllneg{\normalhskip\zeropoint\s!plus-1\s!filll\relax}
+\def\vfilllneg{\normalvskip\zeropoint\s!plus-1\s!filll\relax}
%D \macros
%D {normalhss,normalhfil,normalhfill,
@@ -323,14 +322,14 @@
{\dontcomplain
\normaloffinterlineskip
\visualhrule
- \!!height\boxrulewidth
- \iftoprule\else\!!width\zeropoint\fi
+ \s!height\boxrulewidth
+ \iftoprule\else\s!width\zeropoint\fi
\normalvskip-\boxrulewidth
\normalhbox to \boxrulewd
{\visualvrule
- \!!height\boxruleht
- \!!depth \boxruledp
- \!!width \ifleftrule\boxrulewidth\else\zeropoint\fi
+ \s!height\boxruleht
+ \s!depth \boxruledp
+ \s!width \ifleftrule\boxrulewidth\else\zeropoint\fi
\ifdim\boxruleht>\zeropoint \else \baselinerulefalse \fi
\ifdim\boxruledp>\zeropoint \else \baselinerulefalse \fi
\ifbaselinerule
@@ -340,23 +339,23 @@
\cleaders
\ifbaselinefill
\visualhrule
- \!!height\boxrulewidth
+ \s!height\boxrulewidth
\else
\normalhbox
{\normalhskip2.5\boxrulewidth
\visualvrule
- \!!height\boxrulewidth
- \!!width5\boxrulewidth
+ \s!height\boxrulewidth
+ \s!width5\boxrulewidth
\normalhskip2.5\boxrulewidth}%
\fi
\fi
\normalhfill
\visualvrule
- \!!width\ifrightrule\boxrulewidth\else\zeropoint\fi}%
+ \s!width\ifrightrule\boxrulewidth\else\zeropoint\fi}%
\normalvskip-\boxrulewidth
\visualhrule
- \!!height\boxrulewidth
- \ifbottomrule\else\!!width\zeropoint\fi}%
+ \s!height\boxrulewidth
+ \ifbottomrule\else\s!width\zeropoint\fi}%
\wd#1\zeropoint
\setbox#1=\ifhbox#1\normalhbox\else\normalvbox\fi
{\normalhbox
@@ -726,9 +725,9 @@
\def\setvisiblehfilbox#1\to#2#3#4%
{\setbox#1\normalhbox
{\visualvrule
- \!!width #2\testrulewidth
- \!!height#3\testrulewidth
- \!!depth #4\testrulewidth}%
+ \s!width #2\testrulewidth
+ \s!height#3\testrulewidth
+ \s!depth #4\testrulewidth}%
\smashbox#1}
\def\doruledhfiller#1#2#3#4%
@@ -827,9 +826,9 @@
\def\setvisiblevfilbox#1\to#2#3#4%
{\setbox#1\normalhbox
{\visualvrule
- \!!width #2\testrulewidth
- \!!height#3\testrulewidth
- \!!depth #4\testrulewidth}%
+ \s!width #2\testrulewidth
+ \s!height#3\testrulewidth
+ \s!depth #4\testrulewidth}%
\smashbox#1}%
\def\doruledvfiller#1#2#3%
@@ -940,22 +939,22 @@
\setbox0\normalhbox
{\normalhskip-\testrulewidth
\visualvrule
- \!!width4\testrulewidth
- \!!height16\testrulewidth
- \!!depth16\testrulewidth}%
+ \s!width4\testrulewidth
+ \s!height16\testrulewidth
+ \s!depth16\testrulewidth}%
\else
\setbox0\normalhbox to \ifnegative-\fi\scratchskip
{\visualvrule
- \!!width2\testrulewidth
- \ifnegative\!!depth\else\!!height\fi16\testrulewidth
+ \s!width2\testrulewidth
+ \ifnegative\s!depth\else\s!height\fi16\testrulewidth
\cleaders
\visualhrule
\ifnegative
- \!!depth2\testrulewidth
- \!!height\zeropoint
+ \s!depth2\testrulewidth
+ \s!height\zeropoint
\else
- \!!height2\testrulewidth
- \!!depth\zeropoint
+ \s!height2\testrulewidth
+ \s!depth\zeropoint
\fi
\normalhfill
\ifflexible
@@ -965,15 +964,15 @@
\normalhbox
{\normalhskip 2\testrulewidth
\visualvrule
- \!!width2\testrulewidth
- \!!height\ifnegative-7\else9\fi\testrulewidth
- \!!depth\ifnegative9\else-7\fi\testrulewidth
+ \s!width2\testrulewidth
+ \s!height\ifnegative-7\else9\fi\testrulewidth
+ \s!depth\ifnegative9\else-7\fi\testrulewidth
\normalhskip 2\testrulewidth}%
\normalhfill
\fi
\visualvrule
- \!!width2\testrulewidth
- \ifnegative\!!depth\else\!!height\fi16\testrulewidth}%
+ \s!width2\testrulewidth
+ \ifnegative\s!depth\else\s!height\fi16\testrulewidth}%
\setbox0\normalhbox
{\ifnegative\else\normalhskip-\scratchskip\fi
\box0}%
@@ -1076,14 +1075,14 @@
\ifzero
\setbox0\normalvcue
{\visualvrule
- \!!width32\testrulewidth
- \!!height2\testrulewidth
- \!!depth2\testrulewidth}%
+ \s!width32\testrulewidth
+ \s!height2\testrulewidth
+ \s!depth2\testrulewidth}%
\else
\setbox0\normalvbox to \ifnegative-\fi\scratchskip
{\visualhrule
- \!!width16\testrulewidth
- \!!height2\testrulewidth
+ \s!width16\testrulewidth
+ \s!height2\testrulewidth
\ifflexible
\cleaders
\normalhbox to 16\testrulewidth
@@ -1091,8 +1090,8 @@
\normalvbox
{\normalvskip 2\testrulewidth
\visualhrule
- \!!width2\testrulewidth
- \!!height2\testrulewidth
+ \s!width2\testrulewidth
+ \s!height2\testrulewidth
\normalvskip 2\testrulewidth}%
\normalhss}%
\normalvfill
@@ -1100,12 +1099,12 @@
\normalvfill
\fi
\visualhrule
- \!!width16\testrulewidth
- \!!height2\testrulewidth}%
+ \s!width16\testrulewidth
+ \s!height2\testrulewidth}%
\setbox2\normalvbox to \ht0
{\visualhrule
- \!!width2\testrulewidth
- \!!height\ht0}%
+ \s!width2\testrulewidth
+ \s!height\ht0}%
\ifnegative
\ht0\zeropoint
\setbox0\normalhbox
@@ -1192,25 +1191,25 @@
\ifzero
\setbox0\ruledhbox to 8\testrulewidth
{\visualvrule
- \!!width\zeropoint
- \!!height16\testrulewidth
- \!!depth16\testrulewidth}%
+ \s!width\zeropoint
+ \s!height16\testrulewidth
+ \s!depth16\testrulewidth}%
\setbox0\normalhbox
{\normalhskip-4\testrulewidth\box0}%
\else
\setbox0\ruledhbox to \ifnegative-\fi\scratchskip
{\visualvrule
- \!!width\zeropoint
- \ifnegative\!!depth\else\!!height\fi16\testrulewidth
+ \s!width\zeropoint
+ \ifnegative\s!depth\else\s!height\fi16\testrulewidth
\ifflexible
\normalhskip2\testrulewidth
\cleaders
\normalhbox
{\normalhskip 2\testrulewidth
\visualvrule
- \!!width2\testrulewidth
- \!!height\ifnegative-7\else9\fi\testrulewidth
- \!!depth\ifnegative9\else-7\fi\testrulewidth
+ \s!width2\testrulewidth
+ \s!height\ifnegative-7\else9\fi\testrulewidth
+ \s!depth\ifnegative9\else-7\fi\testrulewidth
\normalhskip 2\testrulewidth}%
\normalhfill
\else
@@ -1268,9 +1267,9 @@
\ifzero
\setbox0\ruledhbox to 32\testrulewidth
{\visualvrule
- \!!width\zeropoint
- \!!height4\testrulewidth
- \!!depth4\testrulewidth}%
+ \s!width\zeropoint
+ \s!height4\testrulewidth
+ \s!depth4\testrulewidth}%
\else
\setbox0\ruledvbox to \ifnegative-\fi\scratchskip
{\hsize16\testrulewidth
@@ -1281,15 +1280,15 @@
\normalvbox
{\normalvskip 2\testrulewidth
\visualhrule
- \!!width2\testrulewidth
- \!!height2\testrulewidth
+ \s!width2\testrulewidth
+ \s!height2\testrulewidth
\normalvskip 2\testrulewidth}%
\normalhss}%
\normalvfill
\else
\visualvrule
- \!!width\zeropoint
- \!!height\ifnegative-\fi\scratchskip
+ \s!width\zeropoint
+ \s!height\ifnegative-\fi\scratchskip
\normalhfill
\fi}
\fi
@@ -1363,7 +1362,7 @@
\def\supp_visualizers_hglue_indeed
{\leavevmode
\scratchcounter\spacefactor
- \visualvrule\!!width\zeropoint
+ \visualvrule\s!width\zeropoint
\normalpenalty\plustenthousand
\ruledhkern\normalhskip\scratchskip
\spacefactor\scratchcounter
@@ -1393,7 +1392,7 @@
\def\supp_visualizers_vglue_indeed
{\endgraf % \par
\nextdepth\prevdepth
- \visualhrule\!!height\zeropoint
+ \visualhrule\s!height\zeropoint
\normalpenalty\plustenthousand
\ruledvkern\normalvskip\scratchskip
\prevdepth\nextdepth
@@ -1419,26 +1418,26 @@
{$\normalmkern\ifnegative-\fi\scratchskip$}%
\setbox\boxrulescratchbox\normalhbox to \wd\boxrulescratchbox
{\visualvrule
- \!!height16\testrulewidth
- \!!depth 16\testrulewidth
- \!!width \testrulewidth
+ \s!height16\testrulewidth
+ \s!depth 16\testrulewidth
+ \s!width \testrulewidth
\leaders
\visualhrule
- \!!height\ifpositive 16\else-14\fi\testrulewidth
- \!!depth \ifpositive-14\else 16\fi\testrulewidth
+ \s!height\ifpositive 16\else-14\fi\testrulewidth
+ \s!depth \ifpositive-14\else 16\fi\testrulewidth
\normalhfill
\ifflexible
\normalhskip-\wd\boxrulescratchbox
\leaders
\visualhrule
- \!!height\testrulewidth
- \!!depth \testrulewidth
+ \s!height\testrulewidth
+ \s!depth \testrulewidth
\normalhfill
\fi
\visualvrule
- \!!height16\testrulewidth
- \!!depth 16\testrulewidth
- \!!width \testrulewidth}%
+ \s!height16\testrulewidth
+ \s!depth 16\testrulewidth
+ \s!width \testrulewidth}%
\smashbox0%
\ifnegative
#1\scratchskip
@@ -1569,19 +1568,19 @@
\normalhskip-\scratchdimen
\normalhskip-2\testrulewidth
\visualvrule
- \!!width 2\testrulewidth
- \!!height#3\testrulewidth
- \!!depth #4\testrulewidth
+ \s!width 2\testrulewidth
+ \s!height#3\testrulewidth
+ \s!depth #4\testrulewidth
\fi
\visualvrule
- \!!width \scratchdimen
- \!!height#5\testrulewidth
- \!!depth #6\testrulewidth
+ \s!width \scratchdimen
+ \s!height#5\testrulewidth
+ \s!depth #6\testrulewidth
\ifnum#2>\zerocount
\visualvrule
- \!!width 2\testrulewidth
- \!!height#3\testrulewidth
- \!!depth #4\testrulewidth
+ \s!width 2\testrulewidth
+ \s!height#3\testrulewidth
+ \s!depth #4\testrulewidth
\fi
\fi}%
\smashbox#1}
@@ -1600,8 +1599,8 @@
\setbox0\ruledhbox to 8\testrulewidth
{\ifnegative\else\normalhss\fi
\visualvrule
- \!!depth8\testrulewidth
- \!!width\ifzero\zeropoint\else4\testrulewidth\fi
+ \s!depth8\testrulewidth
+ \s!width\ifzero\zeropoint\else4\testrulewidth\fi
\ifpositive\else\normalhss\fi}%
\supp_visualizers_penalty_box{2}{\scratchcounter}{0}{8}{-3.5}{4.5}%
\normalpenalty\plustenthousand
@@ -1651,13 +1650,13 @@
\boxrulewidth\testrulewidth
\setbox0\ruledhbox
{\visualvrule
- \!!height4\testrulewidth
- \!!depth 4\testrulewidth
- \!!width \zeropoint
+ \s!height4\testrulewidth
+ \s!depth 4\testrulewidth
+ \s!width \zeropoint
\visualvrule
- \!!height\ifnegative.5\else4\fi\testrulewidth
+ \s!height\ifnegative.5\else4\fi\testrulewidth
\!!dept \ifpositive.5\else4\fi\testrulewidth
- \!!width 8\testrulewidth}%
+ \s!width 8\testrulewidth}%
\supp_visualizers_penalty_box{2}{\scratchcounter}{4}{4}{.5}{.5}%
\setbox0\normalhbox
{\normalhskip-4\testrulewidth
diff --git a/Master/texmf-dist/tex/context/base/symb-imp-cc.mkiv b/Master/texmf-dist/tex/context/base/symb-imp-cc.mkiv
new file mode 100644
index 00000000000..742a19f9f74
--- /dev/null
+++ b/Master/texmf-dist/tex/context/base/symb-imp-cc.mkiv
@@ -0,0 +1,52 @@
+%D \module
+%D [ file=symb-imp-cc,
+%D version=2013.03.22,
+%D title=\CONTEXT\ Symbol Libraries,
+%D subtitle=Creative Commons,
+%D author=Hans Hagen,
+%D date=\currentdate,
+%D copyright={PRAGMA ADE \& \CONTEXT\ Development Team}]
+%C
+%C This module is part of the \CONTEXT\ macro||package and is
+%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
+%C details.
+
+\definefontfeature
+ [cc-icons]
+ [mode=base,
+ goodies=cc-icons,
+ unicoding=yes]
+
+\definefontsynonym
+ [creativecommons]
+ [cc-icons*cc-icons]
+
+\startsymbolset[cc]
+ \definesymbol [sa] [\getnamedglyphdirect{creativecommons}{sa}]
+ \definesymbol [by] [\getnamedglyphdirect{creativecommons}{by}]
+ \definesymbol [cc] [\getnamedglyphdirect{creativecommons}{cc}]
+ \definesymbol [nd] [\getnamedglyphdirect{creativecommons}{nd}]
+ \definesymbol [nc] [\getnamedglyphdirect{creativecommons}{nc}]
+ \definesymbol [sampling] [\getnamedglyphdirect{creativecommons}{sampling}]
+ \definesymbol [share] [\getnamedglyphdirect{creativecommons}{share}]
+ \definesymbol [remix] [\getnamedglyphdirect{creativecommons}{remix}]
+ \definesymbol [logo] [\getnamedglyphdirect{creativecommons}{logo}]
+
+ \definesymbol [cc-by-sa-nc] [\dontleavehmode\lower.15ex\hbox\bgroup
+ \getnamedglyphdirect{creativecommons}{cc}\enspace
+ \getnamedglyphdirect{creativecommons}{by}\enspace
+ \getnamedglyphdirect{creativecommons}{sa}\enspace
+ \getnamedglyphdirect{creativecommons}{nc}%
+ \egroup]
+
+\stopsymbolset
+
+\continueifinputfile{symb-imp-cc.mkiv}
+
+\starttext
+
+ \showsymbolset[cc]
+
+ \symbol[cc][cc-by-sa-nc]
+
+\stoptext
diff --git a/Master/texmf-dist/tex/context/base/symb-ini.lua b/Master/texmf-dist/tex/context/base/symb-ini.lua
index 559b032e490..deeef667a67 100644
--- a/Master/texmf-dist/tex/context/base/symb-ini.lua
+++ b/Master/texmf-dist/tex/context/base/symb-ini.lua
@@ -9,8 +9,10 @@ if not modules then modules = { } end modules ['symb-ini'] = {
local variables = interfaces.variables
-fonts.symbols = fonts.symbols or { }
-local symbols = fonts.symbols
+fonts = fonts or { } -- brrrr
+
+local symbols = fonts.symbols or { }
+fonts.symbols = symbols
local report_symbols = logs.reporter ("fonts","symbols")
local status_symbols = logs.messenger("fonts","symbols")
@@ -22,13 +24,13 @@ local function action(name,foundname)
-- context.startnointerference()
context.startreadingfile()
context.input(foundname)
- status_symbols("loaded: library '%s'",name)
+ status_symbols("library %a loaded",name)
context.stopreadingfile()
-- context.stopnointerference()
end
local function failure(name)
- report_symbols("unknown: library '%s'",name)
+ report_symbols("library %a is unknown",name)
end
function symbols.uselibrary(name)
diff --git a/Master/texmf-dist/tex/context/base/symb-run.mkiv b/Master/texmf-dist/tex/context/base/symb-run.mkiv
index 4bd2ceccae4..3d92c44166f 100644
--- a/Master/texmf-dist/tex/context/base/symb-run.mkiv
+++ b/Master/texmf-dist/tex/context/base/symb-run.mkiv
@@ -44,10 +44,10 @@
\unprotect
-\gdef\doshowsymbolset[#1]%
- {\ctxcommand{showsymbolset("#1","\symbolset{#1}")}}
+\unexpanded\gdef\showsymbolset
+ {\dosingleargument\symb_show_set}
-\gdef\showsymbolset
- {\dosingleargument\doshowsymbolset}
+\gdef\symb_show_set[#1]%
+ {\ctxcommand{showsymbolset("#1","\symbolset{#1}")}}
\protect \endinput
diff --git a/Master/texmf-dist/tex/context/base/syst-aux.lua b/Master/texmf-dist/tex/context/base/syst-aux.lua
index 7fd25d6ab0a..b0fb8483b35 100644
--- a/Master/texmf-dist/tex/context/base/syst-aux.lua
+++ b/Master/texmf-dist/tex/context/base/syst-aux.lua
@@ -6,19 +6,36 @@ if not modules then modules = { } end modules ['syst-aux'] = {
license = "see context related readme files"
}
+-- slower than lpeg:
+--
+-- utfmatch(str,"(.?)(.*)$")
+-- utf.sub(str,1,1)
+
+local commands, context = commands, context
+
local settings_to_array = utilities.parsers.settings_to_array
-local concat = table.concat
+local format = string.format
+local utfsub = utf.sub
+local P, C, Carg, lpegmatch, utf8char = lpeg.P, lpeg.C, lpeg.Carg, lpeg.match, lpeg.patterns.utf8char
local setvalue = context.setvalue
+local pattern = C(utf8char^-1) * C(P(1)^0)
+
function commands.getfirstcharacter(str)
- local first, rest = utf.match(str,"(.?)(.*)$")
+ local first, rest = lpegmatch(pattern,str)
setvalue("firstcharacter",first)
setvalue("remainingcharacters",rest)
end
+local pattern = C(utf8char^-1)
+
function commands.doiffirstcharelse(chr,str)
- commands.doifelse(utf.sub(str,1,1) == chr)
+ commands.doifelse(lpegmatch(pattern,str) == chr)
+end
+
+function commands.getsubstring(str,first,last)
+ context(utfsub(str,tonumber(first),tonumber(last)))
end
-- function commands.addtocommalist(list,item)
@@ -50,3 +67,14 @@ end
-- end
-- end
-- end
+
+local pattern = (C((1-P("%"))^1) * Carg(1)) /function(n,d) return format("%.0fsp",d * tonumber(n)/100) end * P("%") * P(-1)
+
+-- commands.percentageof("10%",65536*10)
+
+function commands.percentageof(str,dim)
+ context(lpegmatch(pattern,str,1,dim) or str)
+end
+
+-- \gdef\setpercentdimen#1#2%
+-- {#1=\ctxcommand{percentageof("#2",\number#1)}\relax}
diff --git a/Master/texmf-dist/tex/context/base/syst-aux.mkiv b/Master/texmf-dist/tex/context/base/syst-aux.mkiv
index 0e43a6202bd..a5551b954d9 100644
--- a/Master/texmf-dist/tex/context/base/syst-aux.mkiv
+++ b/Master/texmf-dist/tex/context/base/syst-aux.mkiv
@@ -11,10 +11,6 @@
%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
%C details.
-%D Some of the macros will move to syst-obs as they might become
-%D obsolete once we've redone the bibliography module. Of course
-%D the handy helpers will stay.
-%D
%D There are some references to \LUA\ variants here but these concern
%D (often old) experiments, moved from local test modules to here,
%D cleaned up, but not really used. After all it's not that urgent
@@ -38,13 +34,13 @@
%D is used in \CONTEXT\ and therefore we might also assume that
%D some basic functionality is available.
%D
-%D Some of the macros here are used in the bibliography module. They
-%D will be moved to a separate syst module some once the bib module
-%D is made \MKIV.
-%D
%D The original files contain previous implementations and notes about
%D performance. This file will be stripped down in due time.
+%D Some of the macros here were only used in the bibliography module. They
+%D have been be moved to a separate syst module since the bib module is no
+%D longer using them. Some more will go away.
+
\unprotect
%D \macros
@@ -62,6 +58,36 @@
\let\unexpanded\normalprotected
+% %D \macros
+% %D {expunded}
+% %D
+% %D \unexpanded\edef\TestA{zzz}
+% %D \edef\TestB{zzz}
+% %D
+% %D \doifelse {\TestA} {\TestB} {WRONG} {OKAY} \par
+% %D \doifelse {\TestA} {\expunded\TestB} {WRONG} {OKAY} \par
+% %D \doifelse {\expunded\TestA} {\TestB} {OKAY} {WRONG} \par
+% %D \doifelse {\expunded\TestA} {\expunded\TestB} {OKAY} {WRONG} \par
+
+% %def\expunded#1{\normalexpanded\expandafter{#1}}
+% \def\expunded#1{\expandafter\empty#1} % used within an edef anyway
+
+
+%D As we don't have namespace definers yet, we use a special one:
+
+\newcount\c_syst_helpers_n_of_namespaces \c_syst_helpers_n_of_namespaces\pluseight % 1-8 reserved for catcodes
+
+\def\v_interfaces_prefix_template_system{\number \c_syst_helpers_n_of_namespaces>>}
+%def\v_interfaces_prefix_template_system{\characters\c_syst_helpers_n_of_namespaces>>} % no \characters yet
+
+\unexpanded\def\installsystemnamespace#1% maybe move this to syst-ini
+ {\ifcsname ??#1\endcsname
+ \writestatus\m!system{duplicate system namespace '#1'}\wait
+ \else
+ \global\advance\c_syst_helpers_n_of_namespaces\plusone
+ \expandafter\edef\csname ??#1\endcsname{\v_interfaces_prefix_template_system}%
+ \fi}
+
%D \macros
%D {normalspace}
%D
@@ -150,34 +176,35 @@
\let\@NX \noexpand
\def\@EAEA {\expandafter\expandafter} % can often be avoided
-%D Sometimes we pass macros as arguments to commands that
-%D don't expand them before interpretation. Such commands can
-%D be enclosed with \type{\expanded}, like:
+%D Sometimes we pass macros as arguments to commands that don't expand them
+%D before interpretation. Such commands can be enclosed with \type {\expanded},
+%D like:
%D
%D \starttyping
%D \expanded{\setupsomething[\alfa]}
%D \stoptyping
%D
-%D Such situations occur for instance when \type{\alfa} is a
-%D commalist or when data stored in macros is fed to index of
-%D list commands. If needed, one should use \type{\noexpand}
-%D inside the argument. Later on we will meet some more clever
-%D alternatives to this command. Beware, only the simple one
-%D has \type {\noexpand} before its argument.
+%D Such situations occur for instance when \type{\alfa} is a commalist or when data
+%D stored in macros is fed to index of list commands. If needed, one should use
+%D \type{\noexpand} inside the argument. Later on we will meet some more clever
+%D alternatives to this command. Beware, only the simple one has \type {\noexpand}
+%D before its argument.
-\let\@@expanded\empty % always long and global (less restores)
+\let\m_syst_helpers_expanded\empty
\unexpanded\def\expanded#1%
- {\xdef\@@expanded{\noexpand#1}\@@expanded}
+ {\xdef\m_syst_helpers_expanded{\noexpand#1}\m_syst_helpers_expanded}
-\unexpanded\def\startexpanded#1\stopexpanded % see x-fo for example
- {\xdef\@@expanded{#1}\@@expanded}
+\unexpanded\def\startexpanded#1\stopexpanded
+ {\xdef\m_syst_helpers_expanded{#1}\m_syst_helpers_expanded}
\let\stopexpanded\relax
-%D Recent \TEX's have a primitive \expanded
+%D Recent \TEX\ engines have a primitive \type {\expanded} and we will use that when
+%D possible. After all, we can make not expandable macros now.
-% not yet as we need to adapt ##'s in calls
+% We cannot use the next variant as first we need to adapt \type {##}'s
+% in callers:
%
% \def\expanded#1%
% {\normalexpanded{\noexpand#1}}
@@ -188,8 +215,8 @@
%D \macros
%D {gobbleoneargument,gobble...arguments}
%D
-%D The next set of macros just do nothing, except that they
-%D get rid of a number of arguments.
+%D The next set of macros just do nothing, except that they get rid of a number of
+%D arguments.
\def\gobbleoneargument #1{}
\def\gobbletwoarguments #1#2{}
@@ -208,6 +235,18 @@
\def\gobblefouroptionals [#1][#2][#3][#4]{}
\def\gobblefiveoptionals [#1][#2][#3][#4][#5]{}
+%D Reserved macros for tests:
+
+\let\donothing\empty
+
+\let\m_syst_string_one \empty
+\let\m_syst_string_two \empty
+\let\m_syst_string_three\empty
+\let\m_syst_string_four \empty
+
+\let\m_syst_action_yes \empty
+\let\m_syst_action_nop \empty
+
%D \macros
%D {doifnextcharelse}
%D
@@ -235,8 +274,8 @@
\unexpanded\def\doifnextcharelse#1#2#3% #1 should not be {} !
{\let\charactertoken=#1% = needed here
- \def\!!stringa{#2}%
- \def\!!stringb{#3}%
+ \def\m_syst_action_yes{#2}%
+ \def\m_syst_action_nop{#3}%
\futurelet\nexttoken\syst_helpers_inspect_next_character}
\def\syst_helpers_inspect_next_character
@@ -248,9 +287,9 @@
\def\syst_helpers_inspect_next_character_indeed
{\ifx\nexttoken\charactertoken
- \expandafter\!!stringa
+ \expandafter\m_syst_action_yes
\else
- \expandafter\!!stringb
+ \expandafter\m_syst_action_nop
\fi}
%D Because we will mostly use this macro for testing if the next
@@ -274,14 +313,14 @@
\let\syst_helpers_next_optional_character_token=[
\unexpanded\def\doifnextoptionalelse#1#2%
- {\def\syst_helpers_next_optional_command_yes{#1}%
- \def\syst_helpers_next_optional_command_nop{#2}%
+ {\def\m_syst_action_yes{#1}%
+ \def\m_syst_action_nop{#2}%
\let\if_next_blank_space_token\iffalse
\futurelet\nexttoken\syst_helpers_inspect_next_optional_character}
\unexpanded\def\doifnextoptionalcselse#1#2% \cs \cs (upto 10% faster)
- {\let\syst_helpers_next_optional_command_yes#1%
- \let\syst_helpers_next_optional_command_nop#2%
+ {\let\m_syst_action_yes#1%
+ \let\m_syst_action_nop#2%
\let\if_next_blank_space_token\iffalse
\futurelet\nexttoken\syst_helpers_inspect_next_optional_character}
@@ -294,16 +333,16 @@
\def\syst_helpers_inspect_next_optional_character_indeed
{\ifx\nexttoken\syst_helpers_next_optional_character_token
- \expandafter\syst_helpers_next_optional_command_yes
+ \expandafter\m_syst_action_yes
\else
- \expandafter\syst_helpers_next_optional_command_nop
+ \expandafter\m_syst_action_nop
\fi}
\let\syst_helpers_next_bgroup_character_token\bgroup
\unexpanded\def\doifnextbgroupelse#1#2%
- {\def\syst_helpers_next_bgroup_command_yes{#1}%
- \def\syst_helpers_next_bgroup_command_nop{#2}%
+ {\def\m_syst_action_yes{#1}%
+ \def\m_syst_action_nop{#2}%
\let\if_next_blank_space_token\iffalse
\futurelet\nexttoken\syst_helpers_inspect_next_bgroup_character}
@@ -316,16 +355,16 @@
\def\syst_helpers_inspect_next_bgroup_character_indeed
{\ifx\nexttoken\syst_helpers_next_bgroup_character_token
- \expandafter\syst_helpers_next_bgroup_command_yes
+ \expandafter\m_syst_action_yes
\else
- \expandafter\syst_helpers_next_bgroup_command_nop
+ \expandafter\m_syst_action_nop
\fi}
\let\syst_helpers_next_parenthesis_character_token(
\unexpanded\def\doifnextparenthesiselse#1#2%
- {\def\syst_helpers_next_parenthesis_command_yes{#1}%
- \def\syst_helpers_next_parenthesis_command_nop{#2}%
+ {\def\m_syst_action_yes{#1}%
+ \def\m_syst_action_nop{#2}%
\let\if_next_blank_space_token\iffalse
\futurelet\nexttoken\syst_helpers_inspect_next_parenthesis_character}
@@ -338,44 +377,39 @@
\def\syst_helpers_inspect_next_parenthesis_character_indeed
{\ifx\nexttoken\syst_helpers_next_parenthesis_character_token
- \expandafter\syst_helpers_next_parenthesis_command_yes
+ \expandafter\m_syst_action_yes
\else
- \expandafter\syst_helpers_next_parenthesis_command_nop
+ \expandafter\m_syst_action_nop
\fi}
%D The next one is handy in predictable situations:
\unexpanded\def\doiffastoptionalcheckelse#1#2%
- {\def\syst_helpers_next_optional_command_yes{#1}%
- \def\syst_helpers_next_optional_command_nop{#2}%
+ {\def\m_syst_action_yes{#1}%
+ \def\m_syst_action_nop{#2}%
\futurelet\nexttoken\syst_helpers_do_if_fast_optional_check_else}
\unexpanded\def\doiffastoptionalcheckcselse#1#2% \cs \cs
- {\let\syst_helpers_next_optional_command_yes#1%
- \let\syst_helpers_next_optional_command_nop#2%
+ {\let\m_syst_action_yes#1%
+ \let\m_syst_action_nop#2%
\futurelet\nexttoken\syst_helpers_do_if_fast_optional_check_else}
\def\syst_helpers_do_if_fast_optional_check_else
{\ifx\nexttoken\syst_helpers_next_optional_character_token
- \expandafter\syst_helpers_next_optional_command_yes
+ \expandafter\m_syst_action_yes
\else
- \expandafter\syst_helpers_next_optional_command_nop
+ \expandafter\m_syst_action_nop
\fi}
-%D This macro uses some auxiliary macros. Although we were able
-%D to program quite complicated things, I only understood these
-%D after rereading the \TEX book. The trick is in using a
-%D command with a one character name. Such commands differ from
-%D the longer ones in the fact that trailing spaces are {\em
-%D not} skipped. This enables us to indirectly define a long
-%D named macro that gobbles a space.
-%D
-%D In the first line we define \type{\blankspace}. Next we
-%D make \type{\:} equivalent to \type{\reinspect...}. This
-%D one||character command is expanded before the next
-%D \type{\def} comes into action. This way the space after
-%D \type{\:} becomes a delimiter of the longer named
-%D \type{\reinspectnextcharacter}.
+%D This macro uses some auxiliary macros. Although we were able to program quite
+%D complicated things, I only understood these after rereading the \TEX book. The
+%D trick is in using a command with a one character name. Such commands differ from
+%D the longer ones in the fact that trailing spaces are {\em not} skipped. This
+%D enables us to indirectly define a long named macro that gobbles a space. In the
+%D first line we define \type {\blankspace}. Next we make \type {\:} equivalent to
+%D \type {\reinspect...}. This one||character command is expanded before the next
+%D \type {\def} comes into action. This way the space after \type {\:} becomes a
+%D delimiter of the longer named \type {\reinspectnextcharacter}.
% try: \expandafter\def\firstofoneargument{\syst_helpers_reinspect_next_character} {...}
@@ -402,10 +436,9 @@
%D letvalue,letgvalue,getvalue,resetvalue,
%D undefinevalue,ignorevalue}
%D
-%D \TEX's primitive \type{\csname} can be used to construct
-%D all kind of commands that cannot be defined with
-%D \type{\def} and \type{\let}. Every macro programmer sooner
-%D or later wants macros like these.
+%D \TEX's primitive \type {\csname} can be used to construct all kind of commands
+%D that cannot be defined with \type {\def} and \type {\let}. Every macro programmer
+%D sooner or later wants macros like these.
%D
%D \starttyping
%D \setvalue {name}{...} = \def\name{...}
@@ -418,9 +451,8 @@
%D \resetvalue {name} = \def\name{}
%D \stoptyping
%D
-%D As we will see, \CONTEXT\ uses these commands many times,
-%D which is mainly due to its object oriented and parameter
-%D driven character.
+%D As we will see, \CONTEXT\ uses these commands many times, which is mainly due to
+%D its object oriented and parameter driven character.
\def\setvalue #1{\expandafter \def\csname#1\endcsname}
\def\setgvalue #1{\expandafter\gdef\csname#1\endcsname}
@@ -441,22 +473,20 @@
%D \macros
%D {globallet,glet}
%D
-%D In \CONTEXT\ of May 2000 using \type {\globallet}
-%D instead of the two tokens will save us some
-%D $300\times4=1200$ bytes of format file on a 32~bit
-%D system. So:
+%D In \CONTEXT\ of May 2000 using \type {\globallet} instead of the two
+%D tokens will save us some $300\times4=1200$ bytes of format file on a 32~bit
+%D system. Not that it matters much today. This shortcut is already defined:
-\def\globallet{\global\let} \let\glet\globallet
+\unexpanded\def\glet{\global\let} \let\globallet\glet
%D \macros
%D {doifundefined,doifdefined,
%D doifundefinedelse,doifdefinedelse,
%D doifalldefinedelse}
%D
-%D The standard way of testing if a macro is defined is
-%D comparing its meaning with another undefined one, usually
-%D \type{\undefined}. To garantee correct working of the next
-%D set of macros, \type{\undefined} may never be defined!
+%D The standard way of testing if a macro is defined is comparing its meaning with
+%D another undefined one, usually \type{\undefined}. To garantee correct working of
+%D the next set of macros, \type{\undefined} may never be defined!
%D
%D \starttyping
%D \doifundefined {string} {...}
@@ -466,13 +496,12 @@
%D \doifalldefinedelse {commalist} {then ...} {else ...}
%D \stoptyping
%D
-%D Every macroname that \TEX\ builds gets an entry in the hash
-%D table, which is of limited size. It is expected that e-\TeX\
-%D will offer a less memory||consuming alternative.
+%D Every macroname that \TEX\ builds gets an entry in the hash table, which is of
+%D limited size. It is expected that \ETEX\ will offer a less memory||consuming
+%D alternative.
-%D Although it will probably never be a big problem, it is good
-%D to be aware of the difference between testing on a macro
-%D name to be build by using \type{\csname} and
+%D Although it will probably never be a big problem, it is good to be aware of the
+%D difference between testing on a macro name to be build by using \type{\csname} and
%D \type{\endcsname} and testing the \type{\name} directly.
%D
%D \starttyping
@@ -481,11 +510,6 @@
%D \ifundefined\NameB ... \else ... \fi
%D \stoptyping
-% \def\ifundefined#1% obsolete
-% {\unless\ifcsname#1\endcsname}
-%
-% use a real if like \ifcsname#1\endcsname\else instead
-
\suppressifcsnameerror\plusone
\def\doifundefinedelse#1%
@@ -511,12 +535,10 @@
%D \macros
%D {letbeundefined}
%D
-%D Testing for being undefined comes down to testing on \type
-%D {\relax} when we use \type {\csname}, but when using \type
-%D {\ifx}, we test on being \type {\undefined}! In \ETEX\ we
-%D have \type {\ifcsname} and that way of testing on existance
-%D is not the same as the one described here. Therefore we
-%D introduce:
+%D Testing for being undefined comes down to testing on \type {\relax} when we use
+%D \type {\csname}, but when using \type {\ifx}, we test on being \type
+%D {\undefined}! In \ETEX\ we have \type {\ifcsname} and that way of testing on
+%D existance is not the same as the one described here. Therefore we introduce:
\def\letbeundefined#1% potential stack buildup when used \global
{\expandafter\let\csname#1\endcsname\undefined}
@@ -527,13 +549,11 @@
\def\globalundefine#1% conditional
{\ifcsname#1\endcsname\expandafter\global\let\csname#1\endcsname\undefined\fi}
-%D Beware, being \type {\undefined} in \ETEX\ means that the macro
-%D {\em is} defined!
-
-%D When we were developing the scientific units module, we
-%D encountered different behavior in text and math mode, which
-%D was due to this grouping subtilities. We therefore decided
-%D to use \type{\begingroup} instead of \type{\bgroup}.
+%D Beware, being \type {\undefined} in \ETEX\ means that the macro {\em is} defined!
+%D
+%D When we were developing the scientific units module, we encountered different
+%D behavior in text and math mode, which was due to this grouping subtilities. We
+%D therefore decided to use \type{\begingroup} instead of \type{\bgroup}.
\unexpanded\def\doifalldefinedelse#1%
{\begingroup
@@ -554,15 +574,13 @@
%D \macros
%D {doif,doifelse,doifnot}
%D
-%D Programming in \TEX\ differs from programming in procedural
-%D languages like \MODULA. This means that one --- well, let me
-%D speek for myself --- tries to do the things in the well
-%D known way. Therefore the next set of \type{\ifthenelse}
-%D commands were between the first ones we needed. A few years
-%D later, the opposite became true: when programming in
-%D \MODULA, I sometimes miss handy things like grouping,
-%D runtime redefinition, expansion etc. While \MODULA\ taught
-%D me to structure, \TEX\ taught me to think recursive.
+%D Programming in \TEX\ differs from programming in procedural languages like
+%D \MODULA. This means that one --- well, let me speek for myself --- tries to do
+%D the things in the well known way. Therefore the next set of \type{\ifthenelse}
+%D commands were between the first ones we needed. A few years later, the opposite
+%D became true: when programming in \MODULA, I sometimes miss handy things like
+%D grouping, runtime redefinition, expansion etc. While \MODULA\ taught me to
+%D structure, \TEX\ taught me to think recursive.
%D
%D \starttyping
%D \doif {string1} {string2} {...}
@@ -571,27 +589,27 @@
%D \stoptyping
\unexpanded\def\doif#1#2%
- {\edef\!!stringa{#1}%
- \edef\!!stringb{#2}%
- \ifx\!!stringa\!!stringb
+ {\edef\m_syst_string_one{#1}%
+ \edef\m_syst_string_two{#2}%
+ \ifx\m_syst_string_one\m_syst_string_two
\expandafter\firstofoneargument
\else
\expandafter\gobbleoneargument
\fi}
\unexpanded\def\doifnot#1#2%
- {\edef\!!stringa{#1}%
- \edef\!!stringb{#2}%
- \ifx\!!stringa\!!stringb
+ {\edef\m_syst_string_one{#1}%
+ \edef\m_syst_string_two{#2}%
+ \ifx\m_syst_string_one\m_syst_string_two
\expandafter\gobbleoneargument
\else
\expandafter\firstofoneargument
\fi}
\unexpanded\def\doifelse#1#2%
- {\edef\!!stringa{#1}%
- \edef\!!stringb{#2}%
- \ifx\!!stringa\!!stringb
+ {\edef\m_syst_string_one{#1}%
+ \edef\m_syst_string_two{#2}%
+ \ifx\m_syst_string_one\m_syst_string_two
\expandafter\firstoftwoarguments
\else
\expandafter\secondoftwoarguments
@@ -611,24 +629,24 @@
%D This time, the string is not expanded.
\unexpanded\def\doifemptyelse#1%
- {\def\!!stringa{#1}%
- \ifx\!!stringa\empty
+ {\def\m_syst_string_one{#1}%
+ \ifx\m_syst_string_one\empty
\expandafter\firstoftwoarguments
\else
\expandafter\secondoftwoarguments
\fi}
\unexpanded\def\doifempty#1%
- {\def\!!stringa{#1}%
- \ifx\!!stringa\empty
+ {\def\m_syst_string_one{#1}%
+ \ifx\m_syst_string_one\empty
\expandafter\firstofoneargument
\else
\expandafter\gobbleoneargument
\fi}
\unexpanded\def\doifnotempty#1%
- {\def\!!stringa{#1}%
- \ifx\!!stringa\empty
+ {\def\m_syst_string_one{#1}%
+ \ifx\m_syst_string_one\empty
\expandafter\gobbleoneargument
\else
\expandafter\firstofoneargument
@@ -637,9 +655,8 @@
%D \macros
%D {doifinset,doifnotinset,doifinsetelse}
%D
-%D We can check if a string is present in a comma separated
-%D set of strings. Depending on the result, some action is
-%D taken.
+%D We can check if a string is present in a comma separated set of strings.
+%D Depending on the result, some action is taken.
%D
%D \starttyping
%D \doifinset {string} {string,...} {...}
@@ -680,119 +697,119 @@
\def\syst_helpers_do_quit_if_item_in_set #1],\relax{\firstofoneargument}
\def\syst_helpers_do_quit_if_item_not_in_set #1],\relax{\gobbleoneargument}
-\def\syst_helpers_re_do_if_in_set_else{\expandafter\syst_helpers_do_check_if_item_in_set_else\!!stringb,],\relax}
-\def\syst_helpers_re_do_if_in_set {\expandafter\syst_helpers_do_check_if_item_in_set \!!stringb,],\relax}
-\def\syst_helpers_re_do_if_not_in_set {\expandafter\syst_helpers_do_check_if_item_not_in_set \!!stringb,],\relax}
+\def\syst_helpers_re_do_if_in_set_else{\expandafter\syst_helpers_do_check_if_item_in_set_else\m_syst_string_two,],\relax}
+\def\syst_helpers_re_do_if_in_set {\expandafter\syst_helpers_do_check_if_item_in_set \m_syst_string_two,],\relax}
+\def\syst_helpers_re_do_if_not_in_set {\expandafter\syst_helpers_do_check_if_item_not_in_set \m_syst_string_two,],\relax}
\unexpanded\def\doifinsetelse#1% make this two step too
- {\edef\!!stringa{#1}%
- \ifx\!!stringa\empty
+ {\edef\m_syst_string_one{#1}%
+ \ifx\m_syst_string_one\empty
\expandafter\thirdofthreearguments
\else
\expandafter\syst_helpers_do_if_in_set_else
\fi}
\def\syst_helpers_do_if_in_set_else#1%
- {\edef\!!stringb{#1}%
- \ifx\!!stringb\empty
+ {\edef\m_syst_string_two{#1}%
+ \ifx\m_syst_string_two\empty
\expandafter\secondoftwoarguments
\else
\expandafter\syst_helpers_re_do_if_in_set_else
\fi}
\unexpanded\def\doifinset#1%
- {\edef\!!stringa{#1}%
- \ifx\!!stringa\empty
+ {\edef\m_syst_string_one{#1}%
+ \ifx\m_syst_string_one\empty
\expandafter\gobbletwoarguments
\else
\expandafter\syst_helpers_do_if_in_set
\fi}
\def\syst_helpers_do_if_in_set#1%
- {\edef\!!stringb{#1}%
- \ifx\!!stringb\empty
+ {\edef\m_syst_string_two{#1}%
+ \ifx\m_syst_string_two\empty
\expandafter\gobbleoneargument
\else
\expandafter\syst_helpers_re_do_if_in_set
\fi}
\unexpanded\def\doifnotinset#1%
- {\edef\!!stringa{#1}%
- \ifx\!!stringa\empty
+ {\edef\m_syst_string_one{#1}%
+ \ifx\m_syst_string_one\empty
\expandafter\secondoftwoarguments
\else
\expandafter\syst_helpers_do_if_not_in_set
\fi}
\def\syst_helpers_do_if_not_in_set#1%
- {\edef\!!stringb{#1}%
- \ifx\!!stringb\empty
+ {\edef\m_syst_string_two{#1}%
+ \ifx\m_syst_string_two\empty
\expandafter\firstofoneargument
\else
\expandafter\syst_helpers_re_do_if_not_in_set % ...]{true}
\fi}
\def\syst_helpers_do_check_if_item_in_set_else#1,#2% #2 eats up preceding space
- {\edef\!!stringb{#1}%
- \ifx\!!stringb\empty
+ {\edef\m_syst_string_two{#1}%
+ \ifx\m_syst_string_two\empty
\expandafter\syst_helpers_do_check_if_item_in_set_else
\else
\expandafter\syst_helpers_do_do_check_if_item_in_set_else
\fi#2}
\def\syst_helpers_do_do_check_if_item_in_set_else
- {\ifx\!!stringb\v_syst_helpers_right_optional_bracket
+ {\ifx\m_syst_string_two\v_syst_helpers_right_optional_bracket
\expandafter\thirdofthreearguments
\else
\expandafter\syst_helpers_do_do_do_check_if_item_in_set_else
\fi}
\def\syst_helpers_do_do_do_check_if_item_in_set_else
- {\ifx\!!stringa\!!stringb
+ {\ifx\m_syst_string_one\m_syst_string_two
\expandafter\syst_helpers_do_quit_if_item_in_set_else
\else
\expandafter\syst_helpers_do_check_if_item_in_set_else
\fi}
\def\syst_helpers_do_check_if_item_in_set#1,#2% #2 eats up preceding space
- {\edef\!!stringb{#1}%
- \ifx\!!stringb\empty
+ {\edef\m_syst_string_two{#1}%
+ \ifx\m_syst_string_two\empty
\expandafter\syst_helpers_do_check_if_item_in_set
\else
\expandafter\syst_helpers_do_do_check_if_item_in_set
\fi#2}
\def\syst_helpers_do_do_check_if_item_in_set
- {\ifx\!!stringb\v_syst_helpers_right_optional_bracket
+ {\ifx\m_syst_string_two\v_syst_helpers_right_optional_bracket
\expandafter\gobbletwoarguments
\else
\expandafter\syst_helpers_do_do_do_check_if_item_in_set
\fi}
\def\syst_helpers_do_do_do_check_if_item_in_set
- {\ifx\!!stringa\!!stringb
+ {\ifx\m_syst_string_one\m_syst_string_two
\expandafter\syst_helpers_do_quit_if_item_in_set
\else
\expandafter\syst_helpers_do_check_if_item_in_set
\fi}
\def\syst_helpers_do_check_if_item_not_in_set#1,#2% #2 eats up preceding space
- {\edef\!!stringb{#1}%
- \ifx\!!stringb\empty
+ {\edef\m_syst_string_two{#1}%
+ \ifx\m_syst_string_two\empty
\expandafter\syst_helpers_do_check_if_item_not_in_set
\else
\expandafter\syst_helpers_do_do_check_if_item_not_in_set
\fi#2}
\def\syst_helpers_do_do_check_if_item_not_in_set
- {\ifx\!!stringb\v_syst_helpers_right_optional_bracket
+ {\ifx\m_syst_string_two\v_syst_helpers_right_optional_bracket
\expandafter\secondoftwoarguments
\else
\expandafter\syst_helpers_do_do_do_check_if_item_not_in_set
\fi}
\def\syst_helpers_do_do_do_check_if_item_not_in_set
- {\ifx\!!stringa\!!stringb
+ {\ifx\m_syst_string_one\m_syst_string_two
\expandafter\syst_helpers_do_quit_if_item_not_in_set
\else
\expandafter\syst_helpers_do_check_if_item_not_in_set
@@ -801,8 +818,8 @@
%D \macros
%D {doifcommon,doifnotcommon,doifcommonelse}
%D
-%D Probably the most time consuming tests are those that test
-%D for overlap in sets of strings.
+%D Probably the most time consuming tests are those that test for overlap in sets
+%D of strings.
%D
%D \starttyping
%D \doifcommon {string,...} {string,...} {...}
@@ -890,16 +907,16 @@
%D {processcommalist,processcommacommand,quitcommalist,
%D processcommalistwithparameters}
%D
-%D We've already seen some macros that take care of comma
-%D separated lists. Such list can be processed with
+%D We've already seen some macros that take care of comma separated lists. Such
+%D list can be processed with
%D
%D \starttyping
%D \processcommalist[string,string,...]\commando
%D \stoptyping
%D
-%D The user supplied command \type{\commando} receives one
-%D argument: the string. This command permits nesting and
-%D spaces after commas are skipped. Empty sets are no problem.
+%D The user supplied command \type{\commando} receives one argument: the string.
+%D This command permits nesting and spaces after commas are skipped. Empty sets
+%D are no problem.
%D
%D \startbuffer
%D \def\dosomething#1{(#1)}
@@ -920,8 +937,10 @@
\newcount\commalevel
+\installsystemnamespace{nextcommalevel}
+
\def\syst_helpers_do_do_do_process_comma_item
- {\csname\s!next\the\commalevel\endcsname}
+ {\csname\??nextcommalevel\the\commalevel\endcsname}
\def\syst_helpers_do_do_process_comma_item
{\ifx\nexttoken\blankspace
@@ -940,9 +959,8 @@
\def\syst_helpers_do_process_comma_item
{\futurelet\nexttoken\syst_helpers_do_do_process_comma_item}
-%D Empty arguments are not processed. Empty items (\type{,,})
-%D however are treated. We have to check for the special case
-%D \type{[{a,b,c}]}.
+%D Empty arguments are not processed. Empty items (\type {,,}) however are
+%D treated. We have to check for the special case \type {[{a,b,c}]}.
\unexpanded\def\processcommalist[%
{\futurelet\nexttoken\syst_helpers_do_check_comma_item}
@@ -957,27 +975,27 @@
\def\syst_helpers_do_process_comma_list#1]#2%
{\global\advance\commalevel \plusone
- \expandafter\def\csname\s!next\the\commalevel\endcsname##1,%
+ \expandafter\def\csname\??nextcommalevel\the\commalevel\endcsname##1,%
{#2{##1}\syst_helpers_do_process_comma_item}%
\expandafter\syst_helpers_do_do_process_comma_item\gobbleoneargument#1,]\relax
\global\advance\commalevel \minusone }
%D One way of quitting a commalist halfway is:
-\def\quitcommalist
+\unexpanded\def\quitcommalist
{\begingroup\let\syst_helpers_do_process_comma_item\syst_helpers_do_quit_comma_list}
\def\syst_helpers_do_quit_comma_list#1]%
{\endgroup}
-\def\quitprevcommalist
+\unexpanded\def\quitprevcommalist
{\begingroup\let\syst_helpers_do_process_comma_item\syst_helpers_do_quit_prev_comma_list}
\def\syst_helpers_do_quit_prev_comma_list#1]%
{\let\syst_helpers_do_process_comma_item\syst_helpers_do_quit_comma_list}
-%D The hack we used for checking the next character
-%D \type {\doifnextcharelse} is also used here.
+%D The hack we used for checking the next character \type {\doifnextcharelse}
+%D is also used here.
\let\next\:
@@ -991,15 +1009,13 @@
%D
%D \getbuffer
-%D When a list is saved in a macro, we can use a construction
-%D like:
+%D When a list is saved in a macro, we can use a construction like:
%D
%D \starttyping
%D \expandafter\processcommalist\expandafter[\list]\command
%D \stoptyping
%D
-%D Such solutions suit most situations, but we wanted a bit
-%D more.
+%D Such solutions suit most situations, but we wanted a bit more.
%D
%D \starttyping
%D \processcommacommand[string,\stringset,string]\commando
@@ -1016,14 +1032,14 @@
%D \processcommacommand[\first,between,\second]\message
%D \stoptyping
%D
-%D Commands that are part of the list are expanded, so the
-%D use of this macro has its limits.
+%D Commands that are part of the list are expanded, so the use of
+%D this macro has its limits.
\unexpanded\def\processcommacommand[#1]%
{\normalexpanded{\processcommalist[#1]}}
-%D The argument to \type{\command} is not delimited. Because
-%D we often use \type{[]} as delimiters, we also have:
+%D The argument to \type{\command} is not delimited. Because we often
+%D use \type{[]} as delimiters, we also have:
%D
%D \starttyping
%D \processcommalistwithparameters[string,string,...]\command
@@ -1044,13 +1060,18 @@
%D
%D Two more:
+\let\syst_helpers_comma_list_step\relax
+
\unexpanded\def\startprocesscommalist[#1]#2\stopprocesscommalist
- {\def\currentcommalistcommand##1{\def\currentcommalistitem{##1}#2}%
- \processcommalist[#1]\currentcommalistcommand}
+ {\def\syst_helpers_comma_list_step##1{\def\currentcommalistitem{##1}#2}%
+ \processcommalist[#1]\syst_helpers_comma_list_step}
\unexpanded\def\startprocesscommacommand[#1]#2\stopprocesscommacommand
- {\def\currentcommalistcommand##1{\def\currentcommalistitem{##1}#2}%
- \normalexpanded{\processcommalist[#1]}\currentcommalistcommand}
+ {\def\syst_helpers_comma_list_step##1{\def\currentcommalistitem{##1}#2}%
+ \normalexpanded{\processcommalist[#1]}\syst_helpers_comma_list_step}
+
+\let\stopprocesscommalist \relax
+\let\stopprocesscommacommand\relax
%D \macros
%D {processaction,
@@ -1097,8 +1118,8 @@
% obsolete: \def\expandactions{\let\expandedaction\edef} \expandactions (see mkii)
\unexpanded\def\syst_helpers_do_compare_process_action_a[#1=>#2][#3]%
- {\edef\!!stringb{#1}%
- \ifx\!!stringb\s!default
+ {\edef\m_syst_string_two{#1}%
+ \ifx\m_syst_string_two\s!default
\let\commalistelement\empty
#2%
\fi}
@@ -1106,19 +1127,19 @@
% met \quitcommalist tot meer dan 25\% sneller
\unexpanded\def\syst_helpers_do_compare_process_action_b[#1=>#2][#3]%
- {\edef\!!stringb{#1}%
- \ifx\!!stringa\!!stringb
+ {\edef\m_syst_string_two{#1}%
+ \ifx\m_syst_string_one\m_syst_string_two
\def\commalistelement{#3}%
#2%
\expandafter\quitcommalist
- \else\ifx\!!stringb\s!unknown
+ \else\ifx\m_syst_string_two\s!unknown
\def\commalistelement{#3}% beware of loops
#2%
\fi\fi}
\unexpanded\def\processaction[#1]#2[%
- {\edef\!!stringa{#1}%
- \ifx\!!stringa\empty
+ {\edef\m_syst_string_one{#1}%
+ \ifx\m_syst_string_one\empty
\let\syst_helpers_do_compare_process_action\syst_helpers_do_compare_process_action_a
\else
\let\syst_helpers_do_compare_process_action\syst_helpers_do_compare_process_action_b
@@ -1127,23 +1148,23 @@
\processnextcommalist\relax\relax\syst_helpers_do_process_action[}
\unexpanded\def\syst_helpers_do_compare_process_action_c[#1=>#2][#3]%
- {\edef\!!stringa{#1}%
- \edef\!!stringb{#3}%
- \ifx\!!stringa\!!stringb
+ {\edef\m_syst_string_one{#1}%
+ \edef\m_syst_string_two{#3}%
+ \ifx\m_syst_string_one\m_syst_string_two
\def\commalistelement{#3}%
#2%
\expandafter\quitprevcommalist
\else
- \edef\!!stringa{#1}%
- \ifx\!!stringa\s!unknown
+ \edef\m_syst_string_one{#1}%
+ \ifx\m_syst_string_one\s!unknown
\def\commalistelement{#3}%
#2%
\fi
\fi}
\unexpanded\def\processfirstactioninset[#1]%
- {\edef\!!stringa{#1}%
- \ifx\!!stringa\empty
+ {\edef\m_syst_string_one{#1}%
+ \ifx\m_syst_string_one\empty
\expandafter\processaction
\else
\expandafter\syst_helpers_process_first_action_in_set_indeed
@@ -1157,26 +1178,28 @@
\normalexpanded{\processcommalist[#1]}\syst_helpers_do_process_action}
\unexpanded\def\syst_helpers_do_compare_process_action_d[#1=>#2][#3]%
- {\edef\!!stringa{#1}%
- \edef\!!stringb{#3}%
- \ifx\!!stringa\!!stringb
+ {\edef\m_syst_string_one{#1}%
+ \edef\m_syst_string_two{#3}%
+ \ifx\m_syst_string_one\m_syst_string_two
\def\commalistelement{#3}%
#2%
\expandafter\quitcommalist
\else
- \edef\!!stringa{#1}%
- \ifx\!!stringa\s!unknown
+ \edef\m_syst_string_one{#1}%
+ \ifx\m_syst_string_one\s!unknown
\def\commalistelement{#3}%
#2%
\fi
\fi}
+\installsystemnamespace{nextactionlevel}
+
\unexpanded\def\syst_helpers_do_process_all_actions_in_set
- {\csname\s!do\the\processlevel\endcsname}
+ {\csname\??nextactionlevel\the\processlevel\endcsname}
\unexpanded\def\processallactionsinset[#1]%
- {\edef\!!stringa{#1}%
- \ifx\!!stringa\empty
+ {\edef\m_syst_string_one{#1}%
+ \ifx\m_syst_string_one\empty
\expandafter\processaction
\else
\expandafter\syst_helpers_process_all_actions_in_set_indeed
@@ -1185,7 +1208,7 @@
\unexpanded\def\syst_helpers_process_all_actions_in_set_indeed[#1]#2[#3]%
{\advance\processlevel \plusone
- \expandafter\def\csname\s!do\the\processlevel\endcsname##1%
+ \expandafter\def\csname\??nextactionlevel\the\processlevel\endcsname##1%
{\def\syst_helpers_do_do_process_action####1{\syst_helpers_do_compare_process_action_d[####1][##1]}%
\processcommalist[#3]\syst_helpers_do_do_process_action}%
\normalexpanded{\processcommalist[#1]}\syst_helpers_do_process_all_actions_in_set
@@ -1197,7 +1220,7 @@
{#1%
\let\nexttoken#4%
\global\advance\commalevel \plusone
- \expandafter\def\csname\s!next\the\commalevel\endcsname##1,%
+ \expandafter\def\csname\??nextcommalevel\the\commalevel\endcsname##1,%
{#3{##1}\syst_helpers_do_process_comma_item}%
\syst_helpers_do_do_process_comma_item#4#5,]\relax
\global\advance\commalevel\minusone
@@ -1231,46 +1254,48 @@
%D \doifinsetelse {substring} {string} {then ...} {else ...}
%D \stoptyping
+\let\m_syst_sub_string\empty
+
\unexpanded\def\doifinstringelse#1%
- {\edef\@@@instring{#1}% expand #1 here
- \ifx\@@@instring\empty
+ {\edef\m_syst_sub_string{#1}% expand #1 here
+ \ifx\m_syst_sub_string\empty
\expandafter\thirdofthreearguments
\else
\expandafter\syst_helpers_do_if_in_string_else_indeed
\fi}
\unexpanded\def\syst_helpers_do_if_in_string_else_indeed#1%
- {\syst_helpers_do_if_in_string_else\@@@instring{#1}%
+ {\syst_helpers_do_if_in_string_else\m_syst_sub_string{#1}%
\expandafter\firstoftwoarguments
\else
\expandafter\secondoftwoarguments
\fi}
\unexpanded\def\doifinstring#1%%
- {\edef\@@@instring{#1}% expand #1 here
- \ifx\@@@instring\empty
+ {\edef\m_syst_sub_string{#1}% expand #1 here
+ \ifx\m_syst_sub_string\empty
\expandafter\gobbletwoarguments
\else
\expandafter\syst_helpers_do_if_in_string_indeed
\fi}
\unexpanded\def\syst_helpers_do_if_in_string_indeed#1%
- {\syst_helpers_do_if_in_string_else\@@@instring{#1}%
+ {\syst_helpers_do_if_in_string_else\m_syst_sub_string{#1}%
\expandafter\firstofoneargument
\else
\expandafter\gobbleoneargument
\fi}
\unexpanded\def\doifnotinstring#1%%
- {\edef\@@@instring{#1}% expand #1 here
- \ifx\@@@instring\empty
+ {\edef\m_syst_sub_string{#1}% expand #1 here
+ \ifx\m_syst_sub_string\empty
\expandafter\gobbletwoarguments
\else
\expandafter\syst_helpers_do_if_not_in_string_indeed
\fi}
\unexpanded\def\syst_helpers_do_if_not_in_string_indeed#1%
- {\syst_helpers_do_if_in_string_else\@@@instring{#1}%
+ {\syst_helpers_do_if_in_string_else\m_syst_sub_string{#1}%
\expandafter\gobbleoneargument
\else
\expandafter\firstofoneargument
@@ -1280,8 +1305,8 @@
\unexpanded\def\syst_helpers_do_if_in_string_else#1#2% ##2 can be {abc}
{\expandafter\def\expandafter\syst_helpers_do_do_if_in_string_else
- \expandafter##\expandafter1#1##2##3\war{\unless\if##2@}% expand #1 here
- \expandafter\syst_helpers_do_do_if_in_string_else\normalexpanded{#2#1}@@\war} % expand #2 here
+ \expandafter##\expandafter1#1##2##3\_e_o_s_{\unless\if##2@}% expand #1 here
+ \expandafter\syst_helpers_do_do_if_in_string_else\normalexpanded{#2#1}@@\_e_o_s_} % expand #2 here
%D The next alternative proved to be upto twice as fast on
%D tasks like checking reserved words in pretty verbatim
@@ -1297,33 +1322,31 @@
%D expansion.
\unexpanded\def\syst_helpers_do_if_in_csname_else#1#2%
- {\def\syst_helpers_do_do_if_in_csname_else##1#1##2##3\war
+ {\def\syst_helpers_do_do_if_in_csname_else##1#1##2##3\_e_o_s_
{\unless\if##2@}%
- \expandafter\syst_helpers_do_do_if_in_csname_else#2#1@@\war}
+ \expandafter\syst_helpers_do_do_if_in_csname_else#2#1@@\_e_o_s_}
\unexpanded\def\doifincsnameelse#1#2%
- {\edef\@@@instring{#1}%
- \expandafter\syst_helpers_do_if_in_csname_else\expandafter{\@@@instring}{#2}%
+ {\normalexpanded{\syst_helpers_do_if_in_csname_else{#1}}{#2}%
\expandafter\firstoftwoarguments
\else
\expandafter\secondoftwoarguments
\fi}
%D \macros
-%D {doifnumberelse}
+%D {doifnumberelse,doifnumber,doifnotnumber}
%D
-%D The next macro executes a command depending of the outcome
-%D of a test on numerals. This is probably one of the fastest
-%D test possible, exept from a less robust 10||step
-%D \type{\if}||ladder or some tricky \type{\lcode} checking.
+%D The next macro executes a command depending of the outcome of a test on
+%D numerals. This is probably one of the fastest test possible, exept from
+%D a less robust 10||step \type {\if}||ladder or some tricky \type {\lcode}
+%D checking.
%D
%D \starttyping
%D \doifnumberelse {string} {then ...} {else ...}
%D \stoptyping
%D
-%D The macro accepts \type{123}, \type{abc}, \type{{}},
-%D \type{\getal} and \type{\the\count...}. This macro is a
-%D rather dirty one.
+%D The macro accepts \type {123}, \type {abc}, \type {{}}, \type {\getal} and
+%D \type {\the\count...}. This macro is a rather dirty one.
\def\doifnumberelse#1% does not accept counters (fully expandable)
{\ifcase0\ifcase1#1\or\or\or\or\or\or\or\or\or\else1\fi\space
@@ -1332,6 +1355,35 @@
\expandafter\firstoftwoarguments
\fi}
+\def\doifnumber#1%
+ {\ifcase0\ifcase1#1\or\or\or\or\or\or\or\or\or\else1\fi\space
+ \expandafter\firstofoneargument
+ \else
+ \expandafter\gobbleoneargument
+ \fi}
+
+\def\doifnotnumber#1%
+ {\ifcase0\ifcase1#1\or\or\or\or\or\or\or\or\or\else1\fi\space
+ \expandafter\gobbleoneargument
+ \else
+ \expandafter\firstofoneargument
+ \fi}
+
+%D \macros
+%D {setpercentdimen}
+%D
+%D \starttyping
+%D \scratchdimen=100pt \setpercentdimen\scratchdimen{10\letterpercent}
+%D \scratchdimen=100pt \setpercentdimen\scratchdimen{5pt}
+%D \scratchdimen \percentdimen \hsize {10\letterpercent}
+%D \stoptyping
+
+\def\percentdimen#1#2% dimen percentage (with %)
+ {\dimexpr\ctxcommand{percentageof("#2",\number#1)}\relax}
+
+\unexpanded\def\setpercentdimen#1#2% dimen percentage (with %)
+ {#1=\ctxcommand{percentageof("#2",\number#1)}\relax}
+
%D \macros
%D {makerawcommalist,
%D rawdoinsetelse,
@@ -1372,13 +1424,13 @@
\def\syst_helpers_raw_process_comma_item#1,#2% #2 eats up preceding space
{\if]#1\else
- \csname\s!next\the\commalevel\endcsname{#1}%
+ \csname\??nextcommalevel\the\commalevel\endcsname{#1}%
\expandafter\syst_helpers_raw_process_comma_item
\fi#2}
\unexpanded\def\rawprocesscommalist[#1]#2% accepteert ook [\cs]
{\global\advance\commalevel \plusone
- \expandafter\let\csname\s!next\the\commalevel\endcsname#2%
+ \expandafter\let\csname\??nextcommalevel\the\commalevel\endcsname#2%
\expandafter\syst_helpers_raw_process_comma_item#1,],% \relax
\global\advance\commalevel \minusone }
@@ -1388,33 +1440,33 @@
% \def\rawdoifinsetelse#1#2{\doifinstringelse{,#1,}{,#2,}}
% \def\rawdoifinset #1#2{\doifinstring {,#1,}{,#2,}}
-\def\@@rawempty{,,}
+\def\m_syst_two_commas{,,}
\unexpanded\def\rawdoifinsetelse#1%
- {\edef\@@@instring{,#1,}% expand #1 here
- \ifx\@@@instring\@@rawempty
+ {\edef\m_syst_sub_string{,#1,}% expand #1 here
+ \ifx\m_syst_sub_string\m_syst_two_commas
\expandafter\thirdofthreearguments
\else
\expandafter\syst_helpers_raw_do_if_in_set_else
\fi}
\unexpanded\def\syst_helpers_raw_do_if_in_set_else#1%
- {\syst_helpers_do_if_in_string_else\@@@instring{,#1,}%
+ {\syst_helpers_do_if_in_string_else\m_syst_sub_string{,#1,}%
\expandafter\firstoftwoarguments
\else
\expandafter\secondoftwoarguments
\fi}
\unexpanded\def\rawdoifinset#1%
- {\edef\@@@instring{,#1,}% expand #1 here
- \ifx\@@@instring\@@rawempty
+ {\edef\m_syst_sub_string{,#1,}% expand #1 here
+ \ifx\m_syst_sub_string\m_syst_two_commas
\expandafter\gobbletwoarguments
\else
\expandafter\syst_helpers_raw_do_if_in_set
\fi}
\unexpanded\def\syst_helpers_raw_do_if_in_set#1%%
- {\syst_helpers_do_if_in_string_else\@@@instring{,#1,}%
+ {\syst_helpers_do_if_in_string_else\m_syst_sub_string{,#1,}%
\expandafter\firstofoneargument
\else
\expandafter\gobbleoneargument
@@ -1423,42 +1475,40 @@
%D Some more raw material:
\def\syst_helpers_do_raw_process_action[#1][#2]%
- {\def\syst_helpers_do_do_raw_process_action##1,#1=>##2,##3\war
+ {\def\syst_helpers_do_do_raw_process_action##1,#1=>##2,##3\_e_o_s_
{\if##3@\else
- \def\!!processaction{##2}%
+ \def\m_syst_helpers_process_action{##2}%
\fi}%
- \syst_helpers_do_do_raw_process_action,#2,#1=>,@\war}
+ \syst_helpers_do_do_raw_process_action,#2,#1=>,@\_e_o_s_}
\unexpanded\def\rawprocessaction[#1]#2[#3]%
- {\edef\!!stringa{#1}%
- \edef\!!stringb{undefined}% better \!!undefined
- \let\!!processaction\!!stringb
- \ifx\!!stringa\empty
+ {\edef\m_syst_string_one{#1}%
+ \edef\m_syst_string_two{undefined}% better \!!undefined
+ \let\m_syst_helpers_process_action\m_syst_string_two
+ \ifx\m_syst_string_one\empty
\expandafter\syst_helpers_do_raw_process_action\expandafter[\s!default][#3]%
\else
- \expandafter\syst_helpers_do_raw_process_action\expandafter[\!!stringa][#3]%
- \ifx\!!processaction\!!stringb
+ \expandafter\syst_helpers_do_raw_process_action\expandafter[\m_syst_string_one][#3]%
+ \ifx\m_syst_helpers_process_action\m_syst_string_two
\expandafter\syst_helpers_do_raw_process_action\expandafter[\s!unknown][#3]%
\fi
\fi
- \ifx\!!processaction\!!stringb
+ \ifx\m_syst_helpers_process_action\m_syst_string_two
\else
- \!!processaction
+ \m_syst_helpers_process_action
\fi}
-%D When we process the list \type{a,b,c,d,e}, the raw routine
-%D takes over 30\% less time, when we feed $20+$ character
-%D strings we gain about 20\%. Alternatives which use
-%D \type{\futurelet} perform worse. Part of the speedup is
-%D due to the \type{\let} and \type{\expandafter} in the test.
+%D When we process the list \type{a,b,c,d,e}, the raw routine takes over 30\% less
+%D time, when we feed $20+$ character strings we gain about 20\%. Alternatives which
+%D use \type{\futurelet} perform worse. Part of the speedup is due to the
+%D \type{\let} and \type{\expandafter} in the test.
%D \macros
%D {dosetvalue,dosetevalue,dosetgvalue,docopyvalue,doresetvalue,
%D dogetvalue}
%D
-%D When we are going to do assignments, we have to take
-%D multi||linguality into account. For the moment we keep
-%D things simple and single||lingual.
+%D When we are going to do assignments, we have to take multi||linguality into account.
+%D For the moment we keep things simple and single||lingual.
%D
%D \starttyping
%D \dosetvalue {label} {variable} {value}
@@ -1468,8 +1518,8 @@
%D \doresetvalue {label} {variable}
%D \stoptyping
%D
-%D These macros are in fact auxiliary ones and are not meant
-%D for use outside the assignment macros.
+%D These macros are in fact auxiliary ones and are not meant for use outside the
+%D assignment macros.
\def\dosetvalue#1#2% #3
{\expandafter\def\csname#1#2\endcsname} % {#3}}
@@ -1492,13 +1542,11 @@
%D \macros
%D {doassign,undoassign,doassignempty}
%D
-%D Assignments are the backbone of \CONTEXT. Abhorred by the
-%D concept of style file hacking, we took a considerable effort
-%D in building a parameterized system. Unfortunately there is a
-%D price to pay in terms of speed. Compared to other packages
-%D and taking the functionality of \CONTEXT\ into account, the
-%D total size of the format file is still very acceptable. Now
-%D how are these assignments done.
+%D Assignments are the backbone of \CONTEXT. Abhorred by the concept of style file
+%D hacking, we took a considerable effort in building a parameterized system.
+%D Unfortunately there is a price to pay in terms of speed. Compared to other
+%D packages and taking the functionality of \CONTEXT\ into account, the total size
+%D of the format file is still very acceptable. Now how are these assignments done.
%D
%D Assignments can be realized with:
%D
@@ -1519,38 +1567,36 @@
%D \def\labelvariable{value}
%D \stoptyping
%D
-%D We do check for the presence of an \type{=} and loudly
-%D complain of it's missed. We will redefine this macro later
-%D on, when a more advanced message mechanism is implemented.
+%D We do check for the presence of an \type{=} and loudly complain of it's missed. We
+%D will redefine this macro later on, when a more advanced message mechanism is
+%D implemented.
\newif\iferrorisfatal
-\def\waitonfatalerror
+\unexpanded\def\waitonfatalerror
{\iferrorisfatal\wait\fi}
-\def\showassignerror#1#2%
+\unexpanded\def\showassignerror#1#2%
{\writestatus{setup}{missing or ungrouped '=' after '#1' in line #2}%
\waitonfatalerror}
-\def\doassignempty[#1][#2=#3]%
+\unexpanded\def\doassignempty[#1][#2=#3]%
{\ifcsname#1#2\endcsname\else\dosetvalue{#1}{#2}{#3}\fi}
%D \macros
%D {getparameters,geteparameters,getgparameters,
%D forgetparameters}
%D
-%D Using the assignment commands directly is not our
-%D ideal of user friendly interfacing, so we take some further
-%D steps.
+%D Using the assignment commands directly is not our ideal of user friendly interfacing,
+%D so we take some further steps.
%D
%D \starttyping
%D \getparameters [label] [...=...,...=...]
%D \forgetparameters [label] [...=...,...=...]
%D \stoptyping
%D
-%D Again, the label identifies the category a variable
-%D belongs to. The second argument can be a comma separated
-%D list of assignments.
+%D Again, the label identifies the category a variable belongs to. The second argument
+%D can be a comma separated list of assignments.
%D
%D \starttyping
%D \getparameters
@@ -1567,8 +1613,7 @@
%D \stoptyping
%D
%D
-%D In the pre||multi||lingual stadium \CONTEXT\ took the next
-%D approach. With
+%D In the pre||multi||lingual stadium \CONTEXT\ took the next approach. With
%D
%D \starttyping
%D \def\??demo {@@demo}
@@ -1592,20 +1637,16 @@
%D \def\@@demobeta{2}
%D \stoptyping
%D
-%D Because we want to be able to distinguish the \type{!!}
-%D pre||tagged user supplied variables from internal
-%D counterparts, we will introduce a slightly different tag in
-%D the multi||lingual modules. There we will use \type{c!} or
-%D \type{v!}, depending on the context.
+%D Because we want to be able to distinguish the \type{!!} pre||tagged user supplied
+%D variables from internal counterparts, we will introduce a slightly different tag
+%D in the multi||lingual modules. There we will use \type{c!} or \type{v!},
+%D depending on the context.
%D
-%D By calling \type{\p!doassign} directly, we save ourselves
-%D some argument passing and gain some speed. Whatever
-%D optimizations we do, this command will always be one of the
-%D bigger bottlenecks.
-%D
-%D The alternative \type{\geteparameters} --- it's funny to
-%D see that this alternative saw the light so lately --- can be
-%D used to do expanded assigments.
+%D By calling \type{doassign} directly, we save ourselves some argument passing
+%D and gain some speed. Whatever optimizations we do, this command will always be
+%D one of the bigger bottlenecks. The alternative \type{\geteparameters} --- it's
+%D funny to see that this alternative saw the light so lately --- can be used to do
+%D expanded assigments.
\let\currentvalue\empty
@@ -1617,46 +1658,46 @@
\let\getexpandedparameters\geteparameters
-\def\dogetparameters#1[#2]#3[#4%
+\unexpanded\def\dogetparameters#1[#2]#3[#4%
{\if\noexpand#4]%
\expandafter\gobbleoneargument
\else
\let\setsomevalue#1%
- \def\p!dogetparameter{\p!doassign#2}%
- \expandafter\xdogetparameters
+ \def\syst_helpers_get_parameters_assign{\syst_helpers_get_parameters_assign_indeed#2}%
+ \expandafter\syst_helpers_get_parameters
\fi#4}
-\def\xdogetparameters#1]%
- {\xprocesscommaitem#1,],\@relax@}
+\def\syst_helpers_get_parameters#1]%
+ {\xprocesscommaitem#1,],\_e_o_p_}
-\def\xprocesscommaitem#1,#2% #2 takes space before ,
+\def\syst_helpers_process_comma_item#1,#2% #2 takes space before ,
{\if,#1,% dirty trick for testing #1=empty
- \expandafter\xprocesscommaitem
+ \expandafter\syst_helpers_process_comma_item
\else\if]#1%
\doubleexpandafter\gobbleoneargument
\else
- \p!dogetparameter\@relax@#1==\empty\@relax@
- \doubleexpandafter\xprocesscommaitem
+ \syst_helpers_get_parameters_assign\_e_o_p_#1==\empty\_e_o_p_
+ \doubleexpandafter\syst_helpers_process_comma_item
\fi\fi#2}
-\def\xshowassignerror#1#2#3%
+\def\syst_helpers_assign_error#1#2#3%
{\showassignerror{#2}{\the\inputlineno\space(#1)}}
-\def\p!n!doassign#1\@relax@#2=#3=#4#5\@relax@
+\def\syst_helpers_get_parameters_assign_normal#1\_e_o_p_#2=#3=#4#5\_e_o_p_
{\ifx\empty#2\empty
- \expandafter\xshowassignerror
+ \expandafter\syst_helpers_assign_error
\else\ifx#4\empty
- \doubleexpandafter\xshowassignerror
+ \doubleexpandafter\syst_helpers_assign_error
\else
\doubleexpandafter\setsomevalue
\fi\fi
{#1}{#2}{#3}}
-\def\p!e!doassign#1\@relax@#2=#3=#4#5\@relax@
+\def\syst_helpers_get_parameters_assign_error#1\_e_o_p_#2=#3=#4#5\_e_o_p_
{\ifx\empty#2\empty
- \expandafter\xshowassignerror
+ \expandafter\syst_helpers_assign_error
\else\ifx#4\empty
- \doubleexpandafter\xshowassignerror
+ \doubleexpandafter\syst_helpers_assign_error
\else
\ifcsname#1#2\endcsname
\expandafter\let\expandafter\currentvalue\csname#1#2\endcsname
@@ -1667,11 +1708,11 @@
\fi\fi
{#1}{#2}{#3}}
-\let\p!doassign\p!n!doassign
+\let\syst_helpers_get_parameters_assign_indeed\syst_helpers_get_parameters_assign_normal
-\def\doassign [#1][#2]{\let\setsomevalue\dosetvalue \p!doassign#1\@relax@#2==\empty\@relax@}
-\def\doeassign [#1][#2]{\let\setsomevalue\dosetevalue \p!doassign#1\@relax@#2==\empty\@relax@}
-\def\undoassign[#1][#2]{\let\setsomevalue\doresetvalue\p!doassign#1\@relax@#2==\empty\@relax@}
+\unexpanded\def\doassign [#1][#2]{\let\setsomevalue\dosetvalue \syst_helpers_get_parameters_assign_indeed#1\_e_o_p_#2==\empty\_e_o_p_}
+\unexpanded\def\doeassign [#1][#2]{\let\setsomevalue\dosetevalue \syst_helpers_get_parameters_assign_indeed#1\_e_o_p_#2==\empty\_e_o_p_}
+\unexpanded\def\undoassign[#1][#2]{\let\setsomevalue\doresetvalue\syst_helpers_get_parameters_assign_indeed#1\_e_o_p_#2==\empty\_e_o_p_}
%D \macros
%D {processassignmentlist,processassignmentcommand,
@@ -1688,8 +1729,8 @@
%D worth the trouble and tokens.
\unexpanded\def\processassignmentlist[#1]#2% #2 == \command{key}{value]
- {\def\doprocessassignmententry##1{#2}% {##2}{##3} % namespace is ignored
- \dogetparameters\doprocessassignmententry[][#1]}
+ {\def\syst_helpers_process_assignment_entry##1{#2}% {##2}{##3} % namespace is ignored
+ \dogetparameters\syst_helpers_process_assignment_entry[][#1]}
\unexpanded\def\processassignmentcommand[#1]%
{\normalexpanded{\processassignmentlist[#1]}}
@@ -1704,9 +1745,8 @@
%D \macros{currentvalue}
%D
-%D Just in case a \type{\getparameter} argument itself ends up
-%D inside a \type{\write} or other expandable location, our
-%D new macro needs a default value.
+%D Just in case a \type{\getparameter} argument itself ends up inside a \type
+%D {\write} or other expandable location, our new macro needs a default value.
%D
%D \starttyping
%D \getparameters[xxx][aaa=bbb]\par
@@ -1716,7 +1756,8 @@
%D \getparameters[xxx][aaa]\par
%D \stoptyping
-%D \macros {expandparameters}
+%D \macros
+%D {expandparameters}
%D
%D Example usage:
%D
@@ -1733,26 +1774,27 @@
%D \startlines
%D \getbuffer
%D \stoplines
-
+%D
%D Here we hook in the code (beware, this is the optimized get **):
-\def\xdoget@n@parameters#1]%
- {\xprocesscommaitem#1,],\@relax@}
+\def\syst_helpers_get_parameters_normal#1]%
+ {\syst_helpers_process_comma_item#1,],\_e_o_p_}
-\def\xdoget@e@parameters#1]%
+\def\syst_helpers_get_parameters_expanded#1]%
{\let\dosetnvalue\setsomevalue
\let\setsomevalue\dosetevalue
- \let\p!doassign\p!e!doassign
+ \let\syst_helpers_get_parameters_assign_indeed\syst_helpers_get_parameters_assign_error
\let\setsomevalue\dosetevalue
- \xprocesscommaitem#1,],\@relax@
- \let\p!doassign\p!n!doassign
+ \syst_helpers_process_comma_item#1,],\_e_o_p_
+ \let\syst_helpers_get_parameters_assign_indeed\syst_helpers_get_parameters_assign_normal
\let\setsomevalue\dosetnvalue
- \let\xdogetparameters\xdoget@n@parameters
+ \let\syst_helpers_get_parameters\syst_helpers_get_parameters_normal
\let\currentvalue\empty}
-\let\xdogetparameters\xdoget@n@parameters % **
+\let\syst_helpers_get_parameters\syst_helpers_get_parameters_normal % **
-\def\expandparameters{\let\xdogetparameters\xdoget@e@parameters}
+\unexpanded\def\expandparameters
+ {\let\syst_helpers_get_parameters\syst_helpers_get_parameters_expanded}
%D \macros
%D {getemptyparameters}
@@ -1764,9 +1806,9 @@
%D \getemptyparameters [label] [...=...,...=...]
%D \stoptyping
-\def\getemptyparameters[#1]#2[#3]%
- {\def\p!dogetemptyparameter##1{\doassignempty[#1][##1]}%
- \processcommalist[#3]\p!dogetemptyparameter}
+\unexpanded\def\getemptyparameters[#1]#2[#3]%
+ {\def\syst_helpers_get_empty_parameters##1{\doassignempty[#1][##1]}%
+ \processcommalist[#3]\syst_helpers_get_empty_parameters}
%D \macros
%D {copyparameters}
@@ -1801,8 +1843,8 @@
\unexpanded\def\copyparameters[#1]#2[#3]#4[#5]%
{\doifnot{#1}{#3}
- {\def\docopyparameter{\docopyvalue{#1}{#3}}% ##1
- \processcommalist[#5]\docopyparameter}}
+ {\def\syst_helpers_copy_parameter{\docopyvalue{#1}{#3}}% ##1
+ \processcommalist[#5]\syst_helpers_copy_parameter}}
%D \macros
%D {ifparameters,checkparameters}
@@ -1820,58 +1862,60 @@
\newif\ifparameters
-\def\p!checkparameters#1=#2#3\war%
+\def\syst_helpers_check_parameters#1=#2#3\_e_o_s_
{\if#2@\parametersfalse\else\parameterstrue\fi}
-\def\checkparameters[#1]%
- {\p!checkparameters#1=@@\war}
+\unexpanded\def\checkparameters[#1]%
+ {\syst_helpers_check_parameters#1=@@\_e_o_s_}
%D \macros
%D {getfromcommalist,getfromcommacommand,
%D commalistelement,
%D getcommalistsize,getcommacommandsize}
%D
-%D It's possible to get an element from a commalist or a
-%D command representing a commalist.
+%D It's possible to get an element from a commalist or a command representing
+%D a commalist.
%D
%D \starttyping
%D \getfromcommalist [string] [n]
%D \getfromcommacommand [string,\strings,string,...] [n]
%D \stoptyping
%D
-%D The difference betwee the two of them is the same as the
-%D difference between \type{\processcomma...}. The found string
-%D is stored in \type{\commalistelement}.
+%D The difference betwee the two of them is the same as the difference between
+%D \type {\processcomma...}. The found string is stored in \type
+%D {\commalistelement}.
%D
-%D We can calculate the size of a comma separated list by
-%D using:
+%D We can calculate the size of a comma separated list by using:
%D
%D \starttyping
%D \getcommalistsize [string,string,...]
%D \getcommacommandsize [string,\strings,string,...]
%D \stoptyping
%D
-%D Afterwards, the length is available in the macro
-%D \type{\commalistsize} (not a \COUNTER).
+%D Afterwards, the length is available in the macro \type {\commalistsize}
+%D (not a \COUNTER).
\newcount\commalistcounter
\def\commalistsize{0}
-\def\p!dogetcommalistsize#1%
+\def\syst_helpers_get_comma_list_size#1%
{\advance\commalistcounter\plusone}
-\def\getcommalistsize#1]% don't loose [{#1}]
+\unexpanded\def\getcommalistsize#1]% don't loose [{#1}]
{\commalistcounter\zerocount
- \processcommalist#1]\p!dogetcommalistsize % was [{#1}]
+ \processcommalist#1]\syst_helpers_get_comma_list_size % was [{#1}]
\edef\commalistsize{\the\commalistcounter}}
-\def\getcommacommandsize[#1]%
- {\edef\commacommand{#1}%
- \scratchtoks\expandafter{\expandafter[\commacommand]}%
- \expandafter\getcommalistsize\the\scratchtoks }
+% \def\getcommacommandsize[#1]%
+% {\edef\commacommand{#1}%
+% \scratchtoks\expandafter{\expandafter[\commacommand]}%
+% \expandafter\getcommalistsize\the\scratchtoks }
+
+\unexpanded\def\getcommacommandsize[#1]%
+ {\normalexpanded{\getcommalistsize[#1]}}
-\def\p!dogetfromcommalist#1%
+\def\syst_helpers_get_from_comma_list#1%
{\advance\commalistcounter \minusone
\ifcase\commalistcounter
\def\commalistelement{#1}%
@@ -1881,7 +1925,7 @@
\unexpanded\def\getfromcommalist[#1]#2[#3]%
{\let\commalistelement\empty
\commalistcounter#3\relax
- \processcommalist[#1]\p!dogetfromcommalist}
+ \processcommalist[#1]\syst_helpers_get_from_comma_list}
\unexpanded\def\getfromcommacommand[#1]%
{\normalexpanded{\getfromcommalist[#1]}}
@@ -1919,16 +1963,11 @@
%D \dogetcommalistelement1\from a,b,c\to\commalistelement
%D \stoptyping
-\def\dodogetcommalistelement#1\from#2,#3,#4,#5,#6,#7,#8\to#9%
+\def\syst_helpers_get_comma_list_element#1\from#2,#3,#4,#5,#6,#7,#8\to#9%
{\edef#9{\ifcase#1\relax\or#2\or#3\or#4\or#5\or#6\or#7\or#8\fi}}
-\def\dogetcommalistelement#1\from#2\to%
- {\dodogetcommalistelement#1\from#2,,,,,,\to}
-
-% check sources
-
\def\dogetcommacommandelement#1\from#2\to%
- {\expandafter\dodogetcommalistelement\expandafter#1\expandafter\from#2,,,,,,\to}
+ {\expandafter\syst_helpers_get_comma_list_element\expandafter#1\expandafter\from#2,,,,,,\to}
%D \macros
%D {dosingleargument,dodoubleargument,dotripleargument,
@@ -1984,13 +2023,15 @@
%D us to do some checking, we reimplemented the non||empty
%D ones.
-\unexpanded\def\dosingleargument {\let\expectedarguments\plusone \dosingleempty }
-\unexpanded\def\dodoubleargument {\let\expectedarguments\plustwo \dodoubleempty }
-\unexpanded\def\dotripleargument {\let\expectedarguments\plusthree \dotripleempty }
-\unexpanded\def\doquadrupleargument {\let\expectedarguments\plusfour \doquadrupleempty }
-\unexpanded\def\doquintupleargument {\let\expectedarguments\plusfive \doquintupleempty }
-\unexpanded\def\dosixtupleargument {\let\expectedarguments\plussix \dosixtupleempty }
-\unexpanded\def\doseventupleargument{\let\expectedarguments\plusseven \doseventupleempty}
+% no longer a mesage:
+%
+% \unexpanded\def\dosingleargument {\let\expectedarguments\plusone \dosingleempty }
+% \unexpanded\def\dodoubleargument {\let\expectedarguments\plustwo \dodoubleempty }
+% \unexpanded\def\dotripleargument {\let\expectedarguments\plusthree \dotripleempty }
+% \unexpanded\def\doquadrupleargument {\let\expectedarguments\plusfour \doquadrupleempty }
+% \unexpanded\def\doquintupleargument {\let\expectedarguments\plusfive \doquintupleempty }
+% \unexpanded\def\dosixtupleargument {\let\expectedarguments\plussix \dosixtupleempty }
+% \unexpanded\def\doseventupleargument{\let\expectedarguments\plusseven \doseventupleempty}
%D \macros
%D {iffirstagument,ifsecondargument,ifthirdargument,
@@ -2013,8 +2054,7 @@
%D doquadrupleempty,doquintupleempty,dosixtupeempty,
%D doseventupleempty}
%D
-%D The empty argument supplying macros mentioned before, look
-%D like:
+%D The empty argument supplying macros mentioned before, look like:
%D
%D \starttyping
%D \dosingleempty \command
@@ -2022,11 +2062,11 @@
%D \dotripleempty \command
%D \doquadrupleempty \command
%D \doquintupleempty \command
-%D \dosixtupleempty \command
+%D \dosixtuple_empty \command
%D \doseventupleempty\command
%D \stoptyping
%D
-%D So \type{\dodoubleempty} leades to:
+%D So \type{\dodoubleempty} leads to:
%D
%D \starttyping
%D \command[#1][#2]
@@ -2041,16 +2081,16 @@
\setnewconstant\noexpectedarguments\zerocount
\setnewconstant\expectedarguments \zerocount
-\def\showargumenterror#1#2%
+\unexpanded\def\showargumenterror#1#2%
{\writestatus{system}{\number#1 argument(s) expected in line #2}}
-\def\doshowargumenterror
+\unexpanded\def\syst_helpers_argument_error
{\ifnum\expectedarguments>\noexpectedarguments
\showargumenterror{\number\expectedarguments}{\number\inputlineno}%
\fi
- \noshowargumenterror}
+ \syst_helpers_argument_reset}
-\def\noshowargumenterror
+\unexpanded\def\syst_helpers_argument_reset
{\let\expectedarguments\noexpectedarguments}
% \def\test[#1]{(#1)}
@@ -2074,193 +2114,193 @@
%D Single:
\unexpanded\def\dosingleempty#1%
- {\noshowargumenterror
+ {\syst_helpers_argument_reset
\doifnextoptionalelse
{\firstargumenttrue#1}%
- {\dosingleemptyNOPone#1}}
+ {\syst_helpers_single_empty_one_nop#1}}
-\def\dosingleemptyNOPone#1%
+\def\syst_helpers_single_empty_one_nop#1%
{\firstargumentfalse
#1[]}
%D Double
\unexpanded\def\dodoubleempty#1%
- {\noshowargumenterror
+ {\syst_helpers_argument_reset
\doifnextoptionalelse
- {\dodoubleemptyYESone#1}%
- {\dodoubleemptyNOPone#1}}
+ {\syst_helpers_double_empty_one_yes#1}%
+ {\syst_helpers_double_empty_one_nop#1}}
-\def\dodoubleemptyYESone#1[#2]%
+\def\syst_helpers_double_empty_one_yes#1[#2]%
{\firstargumenttrue
\doifnextoptionalelse
{\secondargumenttrue#1[{#2}]}%
- {\dodoubleemptyNOPtwo#1{#2}}}
+ {\syst_helpers_double_empty_two_nop#1{#2}}}
-\def\dodoubleemptyNOPone#1%
+\def\syst_helpers_double_empty_one_nop#1%
{\firstargumentfalse
\secondargumentfalse
#1[][]}
-\def\dodoubleemptyNOPtwo
+\def\syst_helpers_double_empty_two_nop
{\secondargumentfalse
\if_next_blank_space_token
- \expandafter\dodoubleemptyonespaced
+ \expandafter\syst_helpers_double_empty_one_spaced
\else
- \expandafter\dodoubleemptyonenormal
+ \expandafter\syst_helpers_double_empty_one_normal
\fi}
-\def\dodoubleemptyonespaced#1#2{#1[{#2}][] }
-\def\dodoubleemptyonenormal#1#2{#1[{#2}][]}
+\def\syst_helpers_double_empty_one_spaced#1#2{#1[{#2}][] }
+\def\syst_helpers_double_empty_one_normal#1#2{#1[{#2}][]}
% Three
\unexpanded\def\dotripleempty#1%
- {\noshowargumenterror
+ {\syst_helpers_argument_reset
\doifnextoptionalelse
- {\dotripleemptyYESone#1}%
- {\dotripleemptyNOPone#1}}
+ {\syst_helpers_triple_empty_one_yes#1}%
+ {\syst_helpers_triple_empty_one_nop#1}}
-\def\dotripleemptyYESone#1[#2]%
+\def\syst_helpers_triple_empty_one_yes#1[#2]%
{\firstargumenttrue
\doifnextoptionalelse
- {\dotripleemptyYEStwo#1{#2}}%
- {\dotripleemptyNOPtwo#1{#2}}}
+ {\syst_helpers_triple_empty_two_yes#1{#2}}%
+ {\syst_helpers_triple_empty_two_nop#1{#2}}}
-\def\dotripleemptyYEStwo#1#2[#3]%
+\def\syst_helpers_triple_empty_two_yes#1#2[#3]%
{\secondargumenttrue
\doifnextoptionalelse
{\thirdargumenttrue#1[{#2}][{#3}]}%
- {\dotripleemptyNOPthree#1{#2}{#3}}}
+ {\syst_helpers_triple_empty_three_nop#1{#2}{#3}}}
-\def\dotripleemptyNOPone#1%
+\def\syst_helpers_triple_empty_one_nop#1%
{\firstargumentfalse
\secondargumentfalse
\thirdargumentfalse
#1[][][]}
-\def\dotripleemptyNOPtwo
+\def\syst_helpers_triple_empty_two_nop
{\secondargumentfalse
\thirdargumentfalse
\if_next_blank_space_token
- \expandafter\dotripleemptytwospaced
+ \expandafter\syst_helpers_triple_empty_two_spaced
\else
- \expandafter\dotripleemptytwonormal
+ \expandafter\syst_helpers_triple_empty_two_normal
\fi}
-\def\dotripleemptyNOPthree
+\def\syst_helpers_triple_empty_three_nop
{\thirdargumentfalse
\if_next_blank_space_token
- \expandafter\dotripleemptythreespaced
+ \expandafter\syst_helpers_triple_empty_three_spaced
\else
- \expandafter\dotripleemptythreenormal
+ \expandafter\syst_helpers_triple_empty_three_normal
\fi}
-\def\dotripleemptytwospaced #1#2{#1[{#2}][][] }
-\def\dotripleemptytwonormal #1#2{#1[{#2}][][]}
-\def\dotripleemptythreespaced#1#2#3{#1[{#2}][{#3}][] }
-\def\dotripleemptythreenormal#1#2#3{#1[{#2}][{#3}][]}
+\def\syst_helpers_triple_empty_two_spaced #1#2{#1[{#2}][][] }
+\def\syst_helpers_triple_empty_two_normal #1#2{#1[{#2}][][]}
+\def\syst_helpers_triple_empty_three_spaced#1#2#3{#1[{#2}][{#3}][] }
+\def\syst_helpers_triple_empty_three_normal#1#2#3{#1[{#2}][{#3}][]}
%D Four:
\unexpanded\def\doquadrupleempty#1%
- {\noshowargumenterror
+ {\syst_helpers_argument_reset
\doifnextoptionalelse
- {\doquadrupleemptyYESone#1}%
- {\doquadrupleemptyNOPone#1}}
+ {\syst_helpers_quadruple_empty_one_yes#1}%
+ {\syst_helpers_quadruple_empty_one_nop#1}}
-\def\doquadrupleemptyYESone#1[#2]%
+\def\syst_helpers_quadruple_empty_one_yes#1[#2]%
{\firstargumenttrue
\doifnextoptionalelse
- {\doquadrupleemptyYEStwo#1{#2}}%
- {\doquadrupleemptyNOPtwo#1{#2}}}
+ {\syst_helpers_quadruple_empty_two_yes#1{#2}}%
+ {\syst_helpers_quadruple_empty_two_nop#1{#2}}}
-\def\doquadrupleemptyYEStwo#1#2[#3]%
+\def\syst_helpers_quadruple_empty_two_yes#1#2[#3]%
{\secondargumenttrue
\doifnextoptionalelse
- {\doquadrupleemptyYESthree#1{#2}{#3}}%
- {\doquadrupleemptyNOPthree#1{#2}{#3}}}
+ {\syst_helpers_quadruple_empty_three_yes#1{#2}{#3}}%
+ {\syst_helpers_quadruple_empty_three_nop#1{#2}{#3}}}
-\def\doquadrupleemptyYESthree#1#2#3[#4]%
+\def\syst_helpers_quadruple_empty_three_yes#1#2#3[#4]%
{\thirdargumenttrue
\doifnextoptionalelse
{\fourthargumenttrue#1[{#2}][{#3}][{#4}]}%
- {\doquadrupleemptyNOPfour#1{#2}{#3}{#4}}}
+ {\syst_helpers_quadruple_empty_four_nop#1{#2}{#3}{#4}}}
-\def\doquadrupleemptyNOPone#1%
+\def\syst_helpers_quadruple_empty_one_nop#1%
{\firstargumentfalse
\secondargumentfalse
\thirdargumentfalse
\fourthargumentfalse
#1[][][][]}
-\def\doquadrupleemptyNOPtwo
+\def\syst_helpers_quadruple_empty_two_nop
{\secondargumentfalse
\thirdargumentfalse
\fourthargumentfalse
\if_next_blank_space_token
- \expandafter\doquadrupleemptytwospaced
+ \expandafter\syst_helpers_quadruple_empty_two_spaced
\else
- \expandafter\doquadrupleemptytwonormal
+ \expandafter\syst_helpers_quadruple_empty_two_normal
\fi}
-\def\doquadrupleemptyNOPthree
+\def\syst_helpers_quadruple_empty_three_nop
{\thirdargumentfalse
\fourthargumentfalse
\if_next_blank_space_token
- \expandafter\doquadrupleemptythreespaced
+ \expandafter\syst_helpers_quadruple_empty_three_spaced
\else
- \expandafter\doquadrupleemptythreenormal
+ \expandafter\syst_helpers_quadruple_empty_three_normal
\fi}
-\def\doquadrupleemptyNOPfour
+\def\syst_helpers_quadruple_empty_four_nop
{\fourthargumentfalse
\if_next_blank_space_token
- \expandafter\doquadrupleemptyfourspaced
+ \expandafter\syst_helpers_quadruple_empty_four_spaced
\else
- \expandafter\doquadrupleemptyfournormal
+ \expandafter\syst_helpers_quadruple_empty_four_normal
\fi}
-\def\doquadrupleemptytwospaced #1#2{#1[{#2}][][][] }
-\def\doquadrupleemptytwonormal #1#2{#1[{#2}][][][]}
-\def\doquadrupleemptythreespaced #1#2#3{#1[{#2}][{#3}][][] }
-\def\doquadrupleemptythreenormal #1#2#3{#1[{#2}][{#3}][][]}
-\def\doquadrupleemptyfourspaced #1#2#3#4{#1[{#2}][{#3}][{#4}][] }
-\def\doquadrupleemptyfournormal #1#2#3#4{#1[{#2}][{#3}][{#4}][]}
+\def\syst_helpers_quadruple_empty_two_spaced #1#2{#1[{#2}][][][] }
+\def\syst_helpers_quadruple_empty_two_normal #1#2{#1[{#2}][][][]}
+\def\syst_helpers_quadruple_empty_three_spaced #1#2#3{#1[{#2}][{#3}][][] }
+\def\syst_helpers_quadruple_empty_three_normal #1#2#3{#1[{#2}][{#3}][][]}
+\def\syst_helpers_quadruple_empty_four_spaced #1#2#3#4{#1[{#2}][{#3}][{#4}][] }
+\def\syst_helpers_quadruple_empty_four_normal #1#2#3#4{#1[{#2}][{#3}][{#4}][]}
%D Five:
\unexpanded\def\doquintupleempty#1%
- {\noshowargumenterror
+ {\syst_helpers_argument_reset
\doifnextoptionalelse
- {\doquintupleemptyYESone#1}%
- {\doquintupleemptyNOPone#1}}
+ {\syst_helpers_quintuple_empty_one_yes#1}%
+ {\syst_helpers_quintuple_empty_one_nop#1}}
-\def\doquintupleemptyYESone#1[#2]%
+\def\syst_helpers_quintuple_empty_one_yes#1[#2]%
{\firstargumenttrue
\doifnextoptionalelse
- {\doquintupleemptyYEStwo#1{#2}}%
- {\doquintupleemptyNOPtwo#1{#2}}}
+ {\syst_helpers_quintuple_empty_two_yes#1{#2}}%
+ {\syst_helpers_quintuple_empty_two_nop#1{#2}}}
-\def\doquintupleemptyYEStwo#1#2[#3]%
+\def\syst_helpers_quintuple_empty_two_yes#1#2[#3]%
{\secondargumenttrue
\doifnextoptionalelse
- {\doquintupleemptyYESthree#1{#2}{#3}}%
- {\doquintupleemptyNOPthree#1{#2}{#3}}}
+ {\syst_helpers_quintuple_empty_three_yes#1{#2}{#3}}%
+ {\syst_helpers_quintuple_empty_three_nop#1{#2}{#3}}}
-\def\doquintupleemptyYESthree#1#2#3[#4]%
+\def\syst_helpers_quintuple_empty_three_yes#1#2#3[#4]%
{\thirdargumenttrue
\doifnextoptionalelse
- {\doquintupleemptyYESfour#1{#2}{#3}{#4}}%
- {\doquintupleemptyNOPfour#1{#2}{#3}{#4}}}
+ {\syst_helpers_quintuple_empty_four_yes#1{#2}{#3}{#4}}%
+ {\syst_helpers_quintuple_empty_four_nop#1{#2}{#3}{#4}}}
-\def\doquintupleemptyYESfour#1#2#3#4[#5]%
+\def\syst_helpers_quintuple_empty_four_yes#1#2#3#4[#5]%
{\fourthargumenttrue
\doifnextoptionalelse
{\fifthargumenttrue#1[{#2}][{#3}][{#4}][{#5}]}%
- {\doquintupleemptyNOPfive#1{#2}{#3}{#4}{#5}}}
+ {\syst_helpers_quintuple_empty_five_nop#1{#2}{#3}{#4}{#5}}}
-\def\doquintupleemptyNOPone#1%
+\def\syst_helpers_quintuple_empty_one_nop#1%
{\firstargumentfalse
\secondargumentfalse
\thirdargumentfalse
@@ -2268,92 +2308,92 @@
\fifthargumentfalse
#1[][][][][]}
-\def\doquintupleemptyNOPtwo
+\def\syst_helpers_quintuple_empty_two_nop
{\secondargumentfalse
\thirdargumentfalse
\fourthargumentfalse
\fifthargumentfalse
\if_next_blank_space_token
- \expandafter\doquintupleemptytwospaced
+ \expandafter\syst_helpers_quintuple_empty_two_spaced
\else
- \expandafter\doquintupleemptytwonormal
+ \expandafter\syst_helpers_quintuple_empty_two_normal
\fi}
-\def\doquintupleemptyNOPthree
+\def\syst_helpers_quintuple_empty_three_nop
{\thirdargumentfalse
\fourthargumentfalse
\fifthargumentfalse
\if_next_blank_space_token
- \expandafter\doquintupleemptythreespaced
+ \expandafter\syst_helpers_quintuple_empty_three_spaced
\else
- \expandafter\doquintupleemptythreenormal
+ \expandafter\syst_helpers_quintuple_empty_three_normal
\fi}
-\def\doquintupleemptyNOPfour
+\def\syst_helpers_quintuple_empty_four_nop
{\fourthargumentfalse
\fifthargumentfalse
\if_next_blank_space_token
- \expandafter\doquintupleemptyfourspaced
+ \expandafter\syst_helpers_quintuple_empty_four_spaced
\else
- \expandafter\doquintupleemptyfournormal
+ \expandafter\syst_helpers_quintuple_empty_four_normal
\fi}
-\def\doquintupleemptyNOPfive
+\def\syst_helpers_quintuple_empty_five_nop
{\fifthargumentfalse
\if_next_blank_space_token
- \expandafter\doquintupleemptyfivespaced
+ \expandafter\syst_helpers_quintuple_empty_five_spaced
\else
- \expandafter\doquintupleemptyfivenormal
+ \expandafter\syst_helpers_quintuple_empty_five_normal
\fi}
-\def\doquintupleemptytwospaced #1#2{#1[{#2}][][][][] }
-\def\doquintupleemptytwonormal #1#2{#1[{#2}][][][][]}
-\def\doquintupleemptythreespaced #1#2#3{#1[{#2}][{#3}][][][] }
-\def\doquintupleemptythreenormal #1#2#3{#1[{#2}][{#3}][][][]}
-\def\doquintupleemptyfourspaced #1#2#3#4{#1[{#2}][{#3}][{#4}][][] }
-\def\doquintupleemptyfournormal #1#2#3#4{#1[{#2}][{#3}][{#4}][][]}
-\def\doquintupleemptyfivespaced #1#2#3#4#5{#1[{#2}][{#3}][{#4}][{#5}][] }
-\def\doquintupleemptyfivenormal #1#2#3#4#5{#1[{#2}][{#3}][{#4}][{#5}][]}
+\def\syst_helpers_quintuple_empty_two_spaced #1#2{#1[{#2}][][][][] }
+\def\syst_helpers_quintuple_empty_two_normal #1#2{#1[{#2}][][][][]}
+\def\syst_helpers_quintuple_empty_three_spaced #1#2#3{#1[{#2}][{#3}][][][] }
+\def\syst_helpers_quintuple_empty_three_normal #1#2#3{#1[{#2}][{#3}][][][]}
+\def\syst_helpers_quintuple_empty_four_spaced #1#2#3#4{#1[{#2}][{#3}][{#4}][][] }
+\def\syst_helpers_quintuple_empty_four_normal #1#2#3#4{#1[{#2}][{#3}][{#4}][][]}
+\def\syst_helpers_quintuple_empty_five_spaced #1#2#3#4#5{#1[{#2}][{#3}][{#4}][{#5}][] }
+\def\syst_helpers_quintuple_empty_five_normal #1#2#3#4#5{#1[{#2}][{#3}][{#4}][{#5}][]}
%D Six
\unexpanded\def\dosixtupleempty#1%
- {\noshowargumenterror
+ {\syst_helpers_argument_reset
\doifnextoptionalelse
- {\dosixtupleemptyYESone#1}
- {\dosixtupleemptyNOPone#1}}
+ {\syst_helpers_sixtuple_empty_one_yes#1}
+ {\syst_helpers_sixtuple_empty_one_nop#1}}
-\def\dosixtupleemptyYESone#1[#2]%
+\def\syst_helpers_sixtuple_empty_one_yes#1[#2]%
{\firstargumenttrue
\doifnextoptionalelse
- {\dosixtupleemptyYEStwo#1{#2}}%
- {\dosixtupleemptyNOPtwo#1{#2}}}
+ {\syst_helpers_sixtuple_empty_two_yes#1{#2}}%
+ {\syst_helpers_sixtuple_empty_two_nop#1{#2}}}
-\def\dosixtupleemptyYEStwo#1#2[#3]%
+\def\syst_helpers_sixtuple_empty_two_yes#1#2[#3]%
{\secondargumenttrue
\doifnextoptionalelse
- {\dosixtupleemptyYESthree#1{#2}{#3}}%
- {\dosixtupleemptyNOPthree#1{#2}{#3}}}
+ {\syst_helpers_sixtuple_empty_three_yes#1{#2}{#3}}%
+ {\syst_helpers_sixtuple_empty_three_nop#1{#2}{#3}}}
-\def\dosixtupleemptyYESthree#1#2#3[#4]%
+\def\syst_helpers_sixtuple_empty_three_yes#1#2#3[#4]%
{\thirdargumenttrue
\doifnextoptionalelse
- {\dosixtupleemptyYESfour#1{#2}{#3}{#4}}%
- {\dosixtupleemptyNOPfour#1{#2}{#3}{#4}}}
+ {\syst_helpers_sixtuple_empty_four_yes#1{#2}{#3}{#4}}%
+ {\syst_helpers_sixtuple_empty_four_nop#1{#2}{#3}{#4}}}
-\def\dosixtupleemptyYESfour#1#2#3#4[#5]%
+\def\syst_helpers_sixtuple_empty_four_yes#1#2#3#4[#5]%
{\fourthargumenttrue
\doifnextoptionalelse
- {\dosixtupleemptyYESfive#1{#2}{#3}{#4}{#5}}%
- {\dosixtupleemptyNOPfive#1{#2}{#3}{#4}{#5}}}
+ {\syst_helpers_sixtuple_empty_five_yes#1{#2}{#3}{#4}{#5}}%
+ {\syst_helpers_sixtuple_empty_five_nop#1{#2}{#3}{#4}{#5}}}
-\def\dosixtupleemptyYESfive#1#2#3#4#5[#6]%
+\def\syst_helpers_sixtuple_empty_five_yes#1#2#3#4#5[#6]%
{\fifthargumenttrue
\doifnextoptionalelse
{\sixthargumenttrue#1[{#2}][{#3}][{#4}][{#5}][{#6}]}%
- {\dosixtupleemptyNOPsix#1{#2}{#3}{#4}{#5}{#6}}}
+ {\syst_helpers_sixtuple_empty_six_nop#1{#2}{#3}{#4}{#5}{#6}}}
-\def\dosixemptyNOPone#1%
+\def\syst_helpers_sixtuple_empty_one_nop#1%
{\firstargumentfalse
\secondargumentfalse
\thirdargumentfalse
@@ -2362,112 +2402,112 @@
\sixthargumentfalse
#1[][][][][][]}
-\def\dosixtupleemptyNOPtwo
+\def\syst_helpers_sixtuple_empty_two_nop
{\secondargumentfalse
\thirdargumentfalse
\fourthargumentfalse
\fifthargumentfalse
\sixthargumentfalse
\if_next_blank_space_token
- \expandafter\dosixemptytwospaced
+ \expandafter\syst_helpers_sixtuple_empty_two_spaced
\else
- \expandafter\dosixemptytwonormal
+ \expandafter\syst_helpers_sixtuple_empty_two_normal
\fi}
-\def\dosixtupleemptyNOPthree
+\def\syst_helpers_sixtuple_empty_three_nop
{\thirdargumentfalse
\fourthargumentfalse
\fifthargumentfalse
\sixthargumentfalse
\if_next_blank_space_token
- \expandafter\dosixemptythreespaced
+ \expandafter\syst_helpers_sixtuple_empty_three_spaced
\else
- \expandafter\dosixemptythreenormal
+ \expandafter\syst_helpers_sixtuple_empty_three_normal
\fi}
-\def\dosixtupleemptyNOPfour
+\def\syst_helpers_sixtuple_empty_four_nop
{\fourthargumentfalse
\fifthargumentfalse
\sixthargumentfalse
\if_next_blank_space_token
- \expandafter\dosixemptyfourspaced
+ \expandafter\syst_helpers_sixtuple_empty_four_spaced
\else
- \expandafter\dosixemptyfournormal
+ \expandafter\syst_helpers_sixtuple_empty_four_normal
\fi}
-\def\dosixtupleemptyNOPfive
+\def\syst_helpers_sixtuple_empty_five_nop
{\fifthargumentfalse
\sixthargumentfalse
\if_next_blank_space_token
- \expandafter\dosixemptyfivespaced
+ \expandafter\syst_helpers_sixtuple_empty_five_spaced
\else
- \expandafter\dosixemptyfivenormal
+ \expandafter\syst_helpers_sixtuple_empty_five_normal
\fi}
-\def\dosixtupleemptyNOPsix
+\def\syst_helpers_sixtuple_empty_six_nop
{\sixthargumentfalse
\if_next_blank_space_token
- \expandafter\dosixemptysixspaced
+ \expandafter\syst_helpers_sixtuple_empty_six_spaced
\else
- \expandafter\dosixemptysixnormal
+ \expandafter\syst_helpers_sixtuple_empty_six_normal
\fi}
-\def\dosixemptytwospaced #1#2{#1[{#2}][][][][][] }
-\def\dosixemptytwonormal #1#2{#1[{#2}][][][][][]}
-\def\dosixemptythreespaced #1#2#3{#1[{#2}][{#3}][][][][] }
-\def\dosixemptythreenormal #1#2#3{#1[{#2}][{#3}][][][][]}
-\def\dosixemptyfourspaced #1#2#3#4{#1[{#2}][{#3}][{#4}][][][] }
-\def\dosixemptyfournormal #1#2#3#4{#1[{#2}][{#3}][{#4}][][][]}
-\def\dosixemptyfivespaced #1#2#3#4#5{#1[{#2}][{#3}][{#4}][{#5}][][] }
-\def\dosixemptyfivenormal #1#2#3#4#5{#1[{#2}][{#3}][{#4}][{#5}][][]}
-\def\dosixemptysixspaced #1#2#3#4#5#6{#1[{#2}][{#3}][{#4}][{#5}][{#6}][] }
-\def\dosixemptysixnormal #1#2#3#4#5#6{#1[{#2}][{#3}][{#4}][{#5}][{#6}][]}
+\def\syst_helpers_sixtuple_empty_two_spaced #1#2{#1[{#2}][][][][][] }
+\def\syst_helpers_sixtuple_empty_two_normal #1#2{#1[{#2}][][][][][]}
+\def\syst_helpers_sixtuple_empty_three_spaced #1#2#3{#1[{#2}][{#3}][][][][] }
+\def\syst_helpers_sixtuple_empty_three_normal #1#2#3{#1[{#2}][{#3}][][][][]}
+\def\syst_helpers_sixtuple_empty_four_spaced #1#2#3#4{#1[{#2}][{#3}][{#4}][][][] }
+\def\syst_helpers_sixtuple_empty_four_normal #1#2#3#4{#1[{#2}][{#3}][{#4}][][][]}
+\def\syst_helpers_sixtuple_empty_five_spaced #1#2#3#4#5{#1[{#2}][{#3}][{#4}][{#5}][][] }
+\def\syst_helpers_sixtuple_empty_five_normal #1#2#3#4#5{#1[{#2}][{#3}][{#4}][{#5}][][]}
+\def\syst_helpers_sixtuple_empty_six_spaced #1#2#3#4#5#6{#1[{#2}][{#3}][{#4}][{#5}][{#6}][] }
+\def\syst_helpers_sixtuple_empty_six_normal #1#2#3#4#5#6{#1[{#2}][{#3}][{#4}][{#5}][{#6}][]}
%D Seven:
\unexpanded\def\doseventupleempty#1%
- {\noshowargumenterror
+ {\syst_helpers_argument_reset
\doifnextoptionalelse
- {\doseventupleemptyYESone#1}%
- {\doseventupleemptyNOPone#1}}
+ {\syst_helpers_seventuple_empty_one_yes#1}%
+ {\syst_helpers_seventuple_empty_one_nop#1}}
-\def\doseventupleemptyYESone#1[#2]%
+\def\syst_helpers_seventuple_empty_one_yes#1[#2]%
{\firstargumenttrue
\doifnextoptionalelse
- {\doseventupleemptyYEStwo#1{#2}}%
- {\doseventupleemptyNOPtwo#1{#2}}}
+ {\syst_helpers_seventuple_empty_two_yes#1{#2}}%
+ {\syst_helpers_seventuple_empty_two_nop#1{#2}}}
-\def\doseventupleemptyYEStwo#1#2[#3]%
+\def\syst_helpers_seventuple_empty_two_yes#1#2[#3]%
{\secondargumenttrue
\doifnextoptionalelse
- {\doseventupleemptyYESthree#1{#2}{#3}}%
- {\doseventupleemptyNOPthree#1{#2}{#3}}}
+ {\syst_helpers_seventuple_empty_three_yes#1{#2}{#3}}%
+ {\syst_helpers_seventuple_empty_three_nop#1{#2}{#3}}}
-\def\doseventupleemptyYESthree#1#2#3[#4]%
+\def\syst_helpers_seventuple_empty_three_yes#1#2#3[#4]%
{\thirdargumenttrue
\doifnextoptionalelse
- {\doseventupleemptyYESfour#1{#2}{#3}{#4}}%
- {\doseventupleemptyNOPfour#1{#2}{#3}{#4}}}
+ {\syst_helpers_seventuple_empty_four_yes#1{#2}{#3}{#4}}%
+ {\syst_helpers_seventuple_empty_four_nop#1{#2}{#3}{#4}}}
-\def\doseventupleemptyYESfour#1#2#3#4[#5]%
+\def\syst_helpers_seventuple_empty_four_yes#1#2#3#4[#5]%
{\fourthargumenttrue
\doifnextoptionalelse
- {\doseventupleemptyYESfive#1{#2}{#3}{#4}{#5}}%
- {\doseventupleemptyNOPfive#1{#2}{#3}{#4}{#5}}}
+ {\syst_helpers_seventuple_empty_five_yes#1{#2}{#3}{#4}{#5}}%
+ {\syst_helpers_seventuple_empty_five_nop#1{#2}{#3}{#4}{#5}}}
-\def\doseventupleemptyYESfive#1#2#3#4#5[#6]%
+\def\syst_helpers_seventuple_empty_five_yes#1#2#3#4#5[#6]%
{\fifthargumenttrue
\doifnextoptionalelse
- {\doseventupleemptyYESsix#1{#2}{#3}{#4}{#5}{#6}}%
- {\doseventupleemptyNOPsix#1{#2}{#3}{#4}{#5}{#6}}}
+ {\syst_helpers_seventuple_empty_six_yes#1{#2}{#3}{#4}{#5}{#6}}%
+ {\syst_helpers_seventuple_empty_six_nop#1{#2}{#3}{#4}{#5}{#6}}}
-\def\doseventupleemptyYESsix#1#2#3#4#5#6[#7]%
+\def\syst_helpers_seventuple_empty_six_yes#1#2#3#4#5#6[#7]%
{\sixthargumenttrue
\doifnextoptionalelse
{\seventhargumenttrue#1[{#2}][{#3}][{#4}][{#5}][{#6}][{#7}]}%
- {\doseventupleemptyNOPseven#1{#2}{#3}{#4}{#5}{#6}{#7}}}
+ {\syst_helpers_seventuple_empty_seven_nop#1{#2}{#3}{#4}{#5}{#6}{#7}}}
-\def\dosevenemptyNOPone#1%
+\def\syst_helpers_seventuple_empty_one_nop#1%
{\firstargumentfalse
\secondargumentfalse
\thirdargumentfalse
@@ -2477,7 +2517,7 @@
\seventhargumentfalse
#1[][][][][][][]}
-\def\doseventupleemptyNOPtwo
+\def\syst_helpers_seventuple_empty_two_nop
{\secondargumentfalse
\thirdargumentfalse
\fourthargumentfalse
@@ -2485,73 +2525,81 @@
\sixthargumentfalse
\seventhargumentfalse
\if_next_blank_space_token
- \expandafter\dosevenemptytwospaced
+ \expandafter\syst_helpers_seventuple_empty_two_spaced
\else
- \expandafter\dosevenemptytwonormal
+ \expandafter\syst_helpers_seventuple_empty_two_normal
\fi}
-\def\doseventupleemptyNOPthree
+\def\syst_helpers_seventuple_empty_three_nop
{\thirdargumentfalse
\fourthargumentfalse
\fifthargumentfalse
\sixthargumentfalse
\seventhargumentfalse
\if_next_blank_space_token
- \expandafter\dosevenemptythreespaced
+ \expandafter\syst_helpers_seventuple_empty_three_spaced
\else
- \expandafter\dosevenemptythreenormal
+ \expandafter\syst_helpers_seventuple_empty_three_normal
\fi}
-\def\doseventupleemptyNOPfour
+\def\syst_helpers_seventuple_empty_four_nop
{\fourthargumentfalse
\fifthargumentfalse
\sixthargumentfalse
\seventhargumentfalse
\if_next_blank_space_token
- \expandafter\dosevenemptyfourspaced
+ \expandafter\syst_helpers_seventuple_empty_four_spaced
\else
- \expandafter\dosevenemptyfournormal
+ \expandafter\syst_helpers_seventuple_empty_four_normal
\fi}
-\def\doseventupleemptyNOPfive
+\def\syst_helpers_seventuple_empty_five_nop
{\fifthargumentfalse
\sixthargumentfalse
\seventhargumentfalse
\if_next_blank_space_token
- \expandafter\dosevenemptyfivespaced
+ \expandafter\syst_helpers_seventuple_empty_five_spaced
\else
- \expandafter\dosevenemptyfivenormal
+ \expandafter\syst_helpers_seventuple_empty_five_normal
\fi}
-\def\doseventupleemptyNOPsix
+\def\syst_helpers_seventuple_empty_six_nop
{\sixthargumentfalse
\seventhargumentfalse
\if_next_blank_space_token
- \expandafter\dosevenemptysixspaced
+ \expandafter\syst_helpers_seventuple_empty_six_spaced
\else
- \expandafter\dosevenemptysixnormal
+ \expandafter\syst_helpers_seventuple_empty_six_normal
\fi}
-\def\doseventupleemptyNOPseven
+\def\syst_helpers_seventuple_empty_seven_nop
{\seventhargumentfalse
\if_next_blank_space_token
- \expandafter\dosevenemptysevenspaced
- \else
- \expandafter\dosevenemptysevennormal
- \fi}
-
-\def\dosevenemptytwospaced #1#2{#1[{#2}][][][][][][] }
-\def\dosevenemptytwonormal #1#2{#1[{#2}][][][][][][]}
-\def\dosevenemptythreespaced #1#2#3{#1[{#2}][{#3}][][][][][] }
-\def\dosevenemptythreenormal #1#2#3{#1[{#2}][{#3}][][][][][]}
-\def\dosevenemptyfourspaced #1#2#3#4{#1[{#2}][{#3}][{#4}][][][][] }
-\def\dosevenemptyfournormal #1#2#3#4{#1[{#2}][{#3}][{#4}][][][][]}
-\def\dosevenemptyfivespaced #1#2#3#4#5{#1[{#2}][{#3}][{#4}][{#5}][][][] }
-\def\dosevenemptyfivenormal #1#2#3#4#5{#1[{#2}][{#3}][{#4}][{#5}][][][]}
-\def\dosevenemptysixspaced #1#2#3#4#5#6{#1[{#2}][{#3}][{#4}][{#5}][{#6}][][] }
-\def\dosevenemptysixnormal #1#2#3#4#5#6{#1[{#2}][{#3}][{#4}][{#5}][{#6}][][]}
-\def\dosevenemptysevenspaced#1#2#3#4#5#6#7{#1[{#2}][{#3}][{#4}][{#5}][{#6}][{#7}][] }
-\def\dosevenemptysevennormal#1#2#3#4#5#6#7{#1[{#2}][{#3}][{#4}][{#5}][{#6}][{#7}][]}
+ \expandafter\syst_helpers_seventuple_empty_seven_spaced
+ \else
+ \expandafter\syst_helpers_seventuple_empty_seven_normal
+ \fi}
+
+\def\syst_helpers_seventuple_empty_two_spaced #1#2{#1[{#2}][][][][][][] }
+\def\syst_helpers_seventuple_empty_two_normal #1#2{#1[{#2}][][][][][][]}
+\def\syst_helpers_seventuple_empty_three_spaced #1#2#3{#1[{#2}][{#3}][][][][][] }
+\def\syst_helpers_seventuple_empty_three_normal #1#2#3{#1[{#2}][{#3}][][][][][]}
+\def\syst_helpers_seventuple_empty_four_spaced #1#2#3#4{#1[{#2}][{#3}][{#4}][][][][] }
+\def\syst_helpers_seventuple_empty_four_normal #1#2#3#4{#1[{#2}][{#3}][{#4}][][][][]}
+\def\syst_helpers_seventuple_empty_five_spaced #1#2#3#4#5{#1[{#2}][{#3}][{#4}][{#5}][][][] }
+\def\syst_helpers_seventuple_empty_five_normal #1#2#3#4#5{#1[{#2}][{#3}][{#4}][{#5}][][][]}
+\def\syst_helpers_seventuple_empty_six_spaced #1#2#3#4#5#6{#1[{#2}][{#3}][{#4}][{#5}][{#6}][][] }
+\def\syst_helpers_seventuple_empty_six_normal #1#2#3#4#5#6{#1[{#2}][{#3}][{#4}][{#5}][{#6}][][]}
+\def\syst_helpers_seventuple_empty_seven_spaced#1#2#3#4#5#6#7{#1[{#2}][{#3}][{#4}][{#5}][{#6}][{#7}][] }
+\def\syst_helpers_seventuple_empty_seven_normal#1#2#3#4#5#6#7{#1[{#2}][{#3}][{#4}][{#5}][{#6}][{#7}][]}
+
+\let\dosingleargument \dosingleempty
+\let\dodoubleargument \dodoubleempty
+\let\dotripleargument \dotripleempty
+\let\doquadrupleargument \doquadrupleempty
+\let\doquintupleargument \doquintupleempty
+\let\dosixtupleargument \dosixtupleempty
+\let\doseventupleargument\doseventupleempty
%D \macros
%D {strippedcsname}
@@ -2628,22 +2676,20 @@
%D worthwile to offer two more alternatives. Watch the build
%D in protection.
-\def\docomplexorsimple#1#2%
+\unexpanded\def\syst_helpers_complex_or_simple#1#2%
{\doifnextoptionalelse{\firstargumenttrue#1}{\firstargumentfalse#2}}
-\def\docomplexorsimpleempty#1%
+\unexpanded\def\syst_helpers_complex_or_simple_empty#1%
{\doifnextoptionalelse{\firstargumenttrue#1}{\firstargumentfalse#1[]}}
\unexpanded\def\definecomplexorsimple#1%
- {\unexpanded\edef#1%
- {\noexpand\docomplexorsimple
- \expandafter\noexpand\csname\s!complex\strippedcsname#1\endcsname
- \expandafter\noexpand\csname\s!simple \strippedcsname#1\endcsname}}
+ {\unexpanded\edef#1{\syst_helpers_complex_or_simple
+ \expandafter\noexpand\csname\s!complex\strippedcsname#1\endcsname
+ \expandafter\noexpand\csname\s!simple \strippedcsname#1\endcsname}}
\unexpanded\def\definecomplexorsimpleempty#1%
- {\unexpanded\edef#1%
- {\noexpand\docomplexorsimpleempty
- \expandafter\noexpand\csname\s!complex\strippedcsname#1\endcsname}}
+ {\unexpanded\edef#1{\syst_helpers_complex_or_simple_empty
+ \expandafter\noexpand\csname\s!complex\strippedcsname#1\endcsname}}
%D These commands are called as:
%D
@@ -2679,8 +2725,10 @@
%D We can add additional definitions later when we have defined
%D \type {\appendtoks}.
-\def \permitspacesbetweengroups{\let\@@permitspacesbetweengroups\zerocount}
-\def\dontpermitspacesbetweengroups{\let\@@permitspacesbetweengroups\plusone}
+\newconditional\c_syst_helpers_permit_spaces_between_groups
+
+\unexpanded\def \permitspacesbetweengroups{\settrue \c_syst_helpers_permit_spaces_between_groups}
+\unexpanded\def\dontpermitspacesbetweengroups{\setfalse\c_syst_helpers_permit_spaces_between_groups}
\dontpermitspacesbetweengroups
@@ -2689,94 +2737,94 @@
%D potentially being an \type {conditional} token. Okay, these macros
%D are not called that often but it saves crap when tracing.
-\def\dodogetgroupargument
+\unexpanded\def\syst_helpers_get_grouped_argument#1#2%
+ {\let\syst_helpers_get_grouped_argument_yes#1%
+ \let\syst_helpers_get_grouped_argument_nop#2%
+ \futurelet\nextargument\syst_helpers_get_grouped_argument_indeed}
+
+\def\syst_helpers_get_grouped_argument_indeed
{\ifx\nextargument\bgroup
- \expandafter\dodogetgroupargumentA
+ \expandafter\syst_helpers_get_grouped_argument_a
\else
- \expandafter\dodogetgroupargumentB
+ \expandafter\syst_helpers_get_grouped_argument_b
\fi}
-\def\dodogetgroupargumentA
- {\noshowargumenterror
- \dogroupargumentyes\dodogetargument}
+\def\syst_helpers_get_grouped_argument_a
+ {\syst_helpers_argument_reset
+ \syst_helpers_get_grouped_argument_yes\syst_helpers_get_grouped_argument_nested}
-\def\dodogetgroupargumentB
- {\ifcase\@@permitspacesbetweengroups
- \expandafter\dodogetgroupargumentF
+\def\syst_helpers_get_grouped_argument_b
+ {\ifconditional\c_syst_helpers_permit_spaces_between_groups
+ \expandafter\syst_helpers_get_grouped_argument_f
\else
- \expandafter\dodogetgroupargumentD
+ \expandafter\syst_helpers_get_grouped_argument_d
\fi}
-\def\dodogetgroupargumentD
- {\doshowargumenterror
- \dogroupargumentnop\dodogetargument{}}
+\def\syst_helpers_get_grouped_argument_d
+ {\syst_helpers_argument_error
+ \syst_helpers_get_grouped_argument_nop\syst_helpers_get_grouped_argument_nested{}}
\begingroup
- \def\\ {\dogetgroupargument\dogroupargumentyes\dogroupargumentnop}
- \global\let\dodogetgroupargumentE\\
+ \def\\ {\syst_helpers_get_grouped_argument\syst_helpers_get_grouped_argument_yes\syst_helpers_get_grouped_argument_nop}
+ \global\let\syst_helpers_get_grouped_argument_e\\
\endgroup
-\def\dodogetgroupargumentF
+\def\syst_helpers_get_grouped_argument_f
{\ifx\nextargument\blankspace
- \expandafter\dodogetgroupargumentE % G
+ \expandafter\syst_helpers_get_grouped_argument_e % g
\else
- \expandafter\dodogetgroupargumentD % H
+ \expandafter\syst_helpers_get_grouped_argument_d % h
\fi}
-\def\dogetgroupargument#1#2%
- {\let\dogroupargumentyes#1%
- \let\dogroupargumentnop#2%
- \futurelet\nextargument\dodogetgroupargument}
-
\def\dosinglegroupempty#1%
- {\def\dodogetargument%
+ {\def\syst_helpers_get_grouped_argument_nested
{\dontpermitspacesbetweengroups
#1}%
- \dogetgroupargument\firstargumenttrue\firstargumentfalse}
+ \syst_helpers_get_grouped_argument\firstargumenttrue\firstargumentfalse}
\def\dodoublegroupempty#1%
- {\def\dodogetargument##1%
- {\def\dodogetargument%
+ {\def\syst_helpers_get_grouped_argument_nested##1%
+ {\def\syst_helpers_get_grouped_argument_nested
{\dontpermitspacesbetweengroups
#1{##1}}%
- \dogetgroupargument\secondargumenttrue\secondargumentfalse}%
- \dogetgroupargument\firstargumenttrue\firstargumentfalse}
+ \syst_helpers_get_grouped_argument\secondargumenttrue\secondargumentfalse}%
+ \syst_helpers_get_grouped_argument\firstargumenttrue\firstargumentfalse}
\def\dotriplegroupempty#1%
- {\def\dodogetargument##1%
- {\def\dodogetargument####1%
- {\def\dodogetargument%
+ {\def\syst_helpers_get_grouped_argument_nested##1%
+ {\def\syst_helpers_get_grouped_argument_nested####1%
+ {\def\syst_helpers_get_grouped_argument_nested
{\dontpermitspacesbetweengroups
#1{##1}{####1}}%
- \dogetgroupargument\thirdargumenttrue\thirdargumentfalse}%
- \dogetgroupargument\secondargumenttrue\secondargumentfalse}%
- \dogetgroupargument\firstargumenttrue\firstargumentfalse}
+ \syst_helpers_get_grouped_argument\thirdargumenttrue\thirdargumentfalse}%
+ \syst_helpers_get_grouped_argument\secondargumenttrue\secondargumentfalse}%
+ \syst_helpers_get_grouped_argument\firstargumenttrue\firstargumentfalse}
\def\doquadruplegroupempty#1%
- {\def\dodogetargument##1%
- {\def\dodogetargument####1%
- {\def\dodogetargument########1%
- {\def\dodogetargument%
+ {\def\syst_helpers_get_grouped_argument_nested##1%
+ {\def\syst_helpers_get_grouped_argument_nested####1%
+ {\def\syst_helpers_get_grouped_argument_nested########1%
+ {\def\syst_helpers_get_grouped_argument_nested
{\dontpermitspacesbetweengroups
#1{##1}{####1}{########1}}%
- \dogetgroupargument\fourthargumenttrue\fourthargumentfalse}%
- \dogetgroupargument\thirdargumenttrue\thirdargumentfalse}%
- \dogetgroupargument\secondargumenttrue\secondargumentfalse}%
- \dogetgroupargument\firstargumenttrue\firstargumentfalse}
+ \syst_helpers_get_grouped_argument\fourthargumenttrue\fourthargumentfalse}%
+ \syst_helpers_get_grouped_argument\thirdargumenttrue\thirdargumentfalse}%
+ \syst_helpers_get_grouped_argument\secondargumenttrue\secondargumentfalse}%
+ \syst_helpers_get_grouped_argument\firstargumenttrue\firstargumentfalse}
\def\doquintuplegroupempty#1%
- {\def\dodogetargument##1%
- {\def\dodogetargument####1%
- {\def\dodogetargument########1%
- {\def\dodogetargument################1%
- {\def\dodogetargument%
+ {\def\syst_helpers_get_grouped_argument_nested##1%
+ {\def\syst_helpers_get_grouped_argument_nested####1%
+ {\def\syst_helpers_get_grouped_argument_nested########1%
+ {\def\syst_helpers_get_grouped_argument_nested################1%
+ {\def\syst_helpers_get_grouped_argument_nested
{\dontpermitspacesbetweengroups
#1{##1}{####1}{########1}{################1}}%
- \dogetgroupargument\fifthargumenttrue\fifthargumentfalse}%
- \dogetgroupargument\fourthargumenttrue\fourthargumentfalse}%
- \dogetgroupargument\thirdargumenttrue\thirdargumentfalse}%
- \dogetgroupargument\secondargumenttrue\secondargumentfalse}%
- \dogetgroupargument\firstargumenttrue\firstargumentfalse}
+ \syst_helpers_get_grouped_argument\fifthargumenttrue\fifthargumentfalse}%
+ \syst_helpers_get_grouped_argument\fourthargumenttrue\fourthargumentfalse}%
+ \syst_helpers_get_grouped_argument\thirdargumenttrue\thirdargumentfalse}%
+ \syst_helpers_get_grouped_argument\secondargumenttrue\secondargumentfalse}%
+ \syst_helpers_get_grouped_argument\firstargumenttrue\firstargumentfalse}
%D These macros can explictly take care of spaces, which means
%D that the next definition and calls are valid:
@@ -2857,7 +2905,7 @@
%D nesting is to be expected, we can reuse \type{\wait} within
%D \type{\wait} itself.
-\def\wait
+\unexpanded\def\wait
{\begingroup
\read16 to \wait
\endgroup}
@@ -2888,24 +2936,24 @@
\newtoks\everywritestring
- \def\writedirect {\immediate\write\statuswrite}
- \def\writeline {\writedirect{}}
- \def\writestring#1{\begingroup\the\everywritestring\writedirect{#1}\endgroup}
+ \def\writedirect {\immediate\write\statuswrite}
+ \def\writeline {\writedirect{}}
+ \unexpanded\def\writestring#1{\begingroup\the\everywritestring\writedirect{#1}\endgroup}
\fi
-\def\normalwritestatus#1#2%
- {\writestring{\expandafter\dosplitstatus\expandafter\statuswidth#1%
+\unexpanded\def\normalwritestatus#1#2%
+ {\writestring{\expandafter\syst_helpers_split_status_yes\expandafter\statuswidth#1%
\space\space\space\space\space\space\space
\space\space\space\space\space\space\space
\space\space\space\space\space\space\end
\space:\space#2}}
-\def\dosplitstatus#1#2%
- {\ifcase#1 \expandafter\nosplitstatus\fi#2%
- \expandafter\dosplitstatus\expandafter{\the\numexpr#1+\minusone\relax}}
+\def\syst_helpers_split_status_yes#1#2%
+ {\ifcase#1 \expandafter\syst_helpers_split_status_nop\fi#2%
+ \expandafter\syst_helpers_split_status_yes\expandafter{\the\numexpr#1+\minusone\relax}}
-\def\nosplitstatus#1\end
+\def\syst_helpers_split_status_nop#1\end
{}
%D \macros
@@ -2925,13 +2973,13 @@
\newif\ifdebuggerinfo
-\def\debuggerinfo#1#2%
+\unexpanded\def\debuggerinfo#1#2%
{\ifdebuggerinfo
\writestatus{debugger}{#1:: #2}%
\fi}
-\ifdefined\writestatus \else \let\writestatus\normalwritestatus \fi
-\ifdefined\writebanner \else \def\writebanner{\writestring} \fi
+\ifdefined\writestatus \else \let\writestatus\normalwritestatus \fi
+\ifdefined\writebanner \else \unexpanded\def\writebanner{\writestring} \fi
% % % % % % % % % % % % % % % % % % % % % % % %
@@ -2941,13 +2989,13 @@
%D A raw and dirty alternative for \type {\getparameters}; no
%D checking is done!
-\def\rawsetparameter#1=#2,%
+\unexpanded\def\rawsetparameter#1=#2,%
{\if]#1\else
\expandafter\def\csname\rawparameterprefix#1\endcsname{#2}%
\expandafter\rawsetparameter
\fi}
-\def\rawgetparameters[#1][#2% some 5-10% faster
+\unexpanded\def\rawgetparameters[#1][#2% some 5-10% faster
{\ifx#2]% test is needed, else bomb on [#1][]
\expandafter\gobbleoneargument
\else
@@ -2970,28 +3018,28 @@
%D \type {\redoglobal}. When using only alternatives, one can
%D reset this mechanism with \type {\resetglobal}.
-\def\resetglobal
+\unexpanded\def\resetglobal
{\let\redoglobal\relax
\let\dodoglobal\relax}
\resetglobal
-\def\doglobal
+\unexpanded\def\doglobal
{\ifx\redoglobal\relax
\let\redoglobal\global
- \let\dodoglobal\@@dodoglobal
+ \let\dodoglobal\syst_helpers_dodo_global
\fi}
-\def\@@dodoglobal
+\def\syst_helpers_dodo_global
{\resetglobal\global}
\def\saveglobal
- {\let\@@dodoglobal\dodoglobal
- \let\@@redoglobal\redoglobal}
+ {\let\syst_helpers_dodo_global\dodoglobal
+ \let\syst_helpers_redo_global\redoglobal}
\def\restoreglobal
- {\let\redoglobal\@@redoglobal
- \let\dodoglobal\@@dodoglobal}
+ {\let\redoglobal\syst_helpers_redo_global
+ \let\dodoglobal\syst_helpers_dodo_global}
%D A very useful application of this macro is \type {\newif},
%D \TEX's fake boolean type. Not being a primitive,
@@ -3034,7 +3082,7 @@
\unexpanded\expandafter\def
\fi#1}
-\def\redefine#1%
+\unexpanded\def\redefine#1%
{\ifdefined#1%
\message{[\noexpand#1is redefined]}%
\fi
@@ -3062,76 +3110,46 @@
%
% [\test]
-% todo: pick up keywords:
-%
-% \starttexdefinition unexpanded bagger ....
-
-% \bgroup \obeylines
-%
-% \gdef\starttexdefinition%
-% {\bgroup%
-% \obeylines%
-% \dostarttexdefinition}
-%
-% \gdef\dostarttexdefinition #1
-% {\catcode\endoflineasciicode\ignorecatcode%
-% \doifinstringelse\letterhash{\detokenize{#1}}\dodostarttexdefinition\nonostarttexdefinition#1
-% }
-%
-% \gdef\dodostarttexdefinition#1 #2
-% {\dododostarttexdefinition{#1}{#2}}
-%
-% \gdef\dododostarttexdefinition#1#2#3\stoptexdefinition%
-% {\egroup%
-% \expandafter\def\csname#1\endcsname#2{#3}}
-%
-% \gdef\nonostarttexdefinition#1
-% {\nononostarttexdefinition{#1}{}}
-%
-% \gdef\nononostarttexdefinition#1#2#3\stoptexdefinition%
-% {\egroup%
-% \expandafter\def\csname#1\endcsname{#3}}
-%
-% \egroup
-
\def\s!unexpanded{unexpanded}
\bgroup \obeylines
-\gdef\starttexdefinition%
+\global\let\stoptexdefinition\relax
+
+\unexpanded\gdef\starttexdefinition%
{\bgroup%
\obeylines%
- \dostarttexdefinition}
+ \syst_helpers_start_tex_definition}
-\gdef\dostarttexdefinition #1
+\gdef\syst_helpers_start_tex_definition #1
{\catcode\endoflineasciicode\ignorecatcode%
- \doifinstringelse\letterhash{\detokenize{#1}}\dodostarttexdefinition\nonostarttexdefinition#1
+ \doifinstringelse\letterhash{\detokenize{#1}}\syst_helpers_start_tex_definition_yes\syst_helpers_start_tex_definition_nop#1
}
-\gdef\dodostarttexdefinition#1 #2
+\gdef\syst_helpers_start_tex_definition_yes#1 #2
{\edef\texdefinitionname{#1}%
\ifx\texdefinitionname\s!unexpanded%
- \expandafter\dododostarttexdefinitionU%
+ \expandafter\syst_helpers_start_tex_definition_yes_unexpanded%
\else%
- \expandafter\dododostarttexdefinitionN%
+ \expandafter\syst_helpers_start_tex_definition_yes_normal%
\fi%
{#1}#2
}
-\gdef\dododostarttexdefinitionU#1#2 #3
+\gdef\syst_helpers_start_tex_definition_yes_unexpanded#1#2 #3
#4\stoptexdefinition%
{\egroup% #1=unexpanded
\unexpanded\expandafter\def\csname#2\endcsname#3{#4}}
-\gdef\dododostarttexdefinitionN#1#2
+\gdef\syst_helpers_start_tex_definition_yes_normal#1#2
#3\stoptexdefinition%
{\egroup%
\expandafter\def\csname#1\endcsname#2{#3}}
-\gdef\nonostarttexdefinition#1
- {\nononostarttexdefinition{#1}{}}
+\gdef\syst_helpers_start_tex_definition_nop#1
+ {\syst_helpers_start_tex_definition_nop_indeed{#1}{}}
-\gdef\nononostarttexdefinition#1#2#3\stoptexdefinition%
+\gdef\syst_helpers_start_tex_definition_nop_indeed#1#2#3\stoptexdefinition%
{\egroup%
\expandafter\def\csname#1\endcsname{#3}}
@@ -3141,8 +3159,8 @@
% This is a first variant, more might be added:
-\def\starttexcode{\unprotect}
-\def\stoptexcode {\protect}
+\unexpanded\def\starttexcode{\unprotect}
+\unexpanded\def\stoptexcode {\protect}
%D \macros
%D {newcounter,
@@ -3202,7 +3220,7 @@
\def\zerocountervalue{0}
-\def\newcounter#1%
+\unexpanded\def\newcounter#1%
{\dodoglobal\let#1\zerocountervalue}
%D Nowadays we don't mind a few more tokens if we can gain a
@@ -3217,14 +3235,14 @@
\def\syst_helpers_do_do_increment(#1{\doifnextcharelse,{\syst_helpers_do_do_do_increment#1}{\syst_helpers_do_do_do_increment#1,\plusone}}
\def\syst_helpers_do_do_decrement(#1{\doifnextcharelse,{\syst_helpers_do_do_do_decrement#1}{\syst_helpers_do_do_do_decrement#1,\plusone}}
-\def\fastincrement#1{\dodoglobal\edef#1{\the\numexpr#1+\plusone \relax}}
-\def\fastdecrement#1{\dodoglobal\edef#1{\the\numexpr#1+\minusone\relax}}
+\unexpanded\def\fastincrement#1{\dodoglobal\edef#1{\the\numexpr#1+\plusone \relax}}
+\unexpanded\def\fastdecrement#1{\dodoglobal\edef#1{\the\numexpr#1+\minusone\relax}}
-\def\increment{\doifnextcharelse(\syst_helpers_do_do_increment\syst_helpers_do_increment}
-\def\decrement{\doifnextcharelse(\syst_helpers_do_do_decrement\syst_helpers_do_decrement}
+\unexpanded\def\increment{\doifnextcharelse(\syst_helpers_do_do_increment\syst_helpers_do_increment}
+\unexpanded\def\decrement{\doifnextcharelse(\syst_helpers_do_do_decrement\syst_helpers_do_decrement}
-\def\incrementvalue#1{\expandafter\increment\csname#1\endcsname}
-\def\decrementvalue#1{\expandafter\decrement\csname#1\endcsname}
+\unexpanded\def\incrementvalue#1{\expandafter\increment\csname#1\endcsname}
+\unexpanded\def\decrementvalue#1{\expandafter\decrement\csname#1\endcsname}
%D \macros
%D {newsignal}
@@ -3245,9 +3263,9 @@
\newdimen\maximumsignal % step is about 0.00025pt
-\def\newsignal#1%
+\unexpanded\def\newsignal#1%
{\ifdefined#1\else
- \advance\maximumsignal 2sp % to be save in rounding
+ \advance\maximumsignal 2\scaledpoint % to be save in rounding
\edef#1{\the\maximumsignal}%
\fi}
@@ -3262,9 +3280,9 @@
%D \stoptyping
\def\checkedstrippedcsname#1% this permits \strippedcsname{\xxx} and \strippedcsname{xxx}
- {\expandafter\docheckedstrippedcsname\string#1}
+ {\expandafter\syst_helpers_checked_stripped_csname\string#1}
-\def\docheckedstrippedcsname#1%
+\def\syst_helpers_checked_stripped_csname#1%
{\if\noexpand#1\letterbackslash\else#1\fi}
%D \macros
@@ -3272,15 +3290,14 @@
%D
%D We will use this one in:
-\def\savenormalmeaning#1%
+\unexpanded\def\savenormalmeaning#1%
{\ifcsname normal\strippedcsname#1\endcsname \else
- \letvalue{normal\strippedcsname#1}#1%
+ \expandafter\let\csname normal\strippedcsname#1\endcsname#1%
\fi}
%D \macros
%D {dorecurse,recurselevel,recursedepth,
-%D dostepwiserecurse,
-%D for}
+%D dostepwiserecurse}
%D
%D \TEX\ does not offer us powerfull for||loop mechanisms. On
%D the other hand its recursion engine is quite unique. We
@@ -3326,77 +3343,79 @@
\def\recursedepth{\the\outerrecurse}
\def\recurselevel{0}
-\let\nextrecurse\relax
+\let\syst_helpers_stepwise_next\relax
-\def\@@irecurse{@@ir@@} % ecurse} % stepper
-\def\@@arecurse{@@ar@@} % ecurse} % action
+\installsystemnamespace{recurseindex}
+\installsystemnamespace{recurseaction}
\unexpanded\def\dostepwiserecurse#1#2#3#4% can be made faster by postponing #4
{\global\advance\outerrecurse \plusone
- \global\expandafter\def\csname\@@arecurse\recursedepth\endcsname{#4}%
- \global\expandafter\let\csname\@@irecurse\recursedepth\endcsname\recurselevel
- \ifnum#3>0\relax
+ \global\expandafter\def\csname\??recurseaction\recursedepth\endcsname{#4}%
+ \global\expandafter\let\csname\??recurseindex\recursedepth\endcsname\recurselevel
+ \ifnum#3>\zerocount\relax
\ifnum#2<#1\relax
- \let\nextrecurse\exitstepwiserecurse
+ \let\syst_helpers_stepwise_next\syst_helpers_stepwise_exit
\else
- \let\nextrecurse\dodostepwiserecurse
+ \let\syst_helpers_stepwise_next\syst_helpers_stepwise_recurse
\fi
\else
- \ifnum#3<0\relax
+ \ifnum#3<\zerocount\relax
\ifnum#1<#2\relax
- \let\nextrecurse\exitstepwiserecurse
+ \let\syst_helpers_stepwise_next\syst_helpers_stepwise_exit
\else
- \let\nextrecurse\dodostepwisereverse
+ \let\syst_helpers_stepwise_next\syst_helpers_stepwise_reverse
\fi
\else
- \let\nextrecurse\exitstepwiserecurse
+ \let\syst_helpers_stepwise_next\syst_helpers_stepwise_exit
\fi
- \fi\normalexpanded{\nextrecurse{\number#1}{\number#2}{\number#3}}}
+ \fi\normalexpanded{\syst_helpers_stepwise_next{\number#1}{\number#2}{\number#3}}}
-\def\dodostepwiserecurse#1#2#3% from to step
+\unexpanded\def\syst_helpers_stepwise_recurse#1#2#3% from to step
{\ifnum#1>#2\relax
- \expandafter\nodostepwiserecurse
+ \expandafter\syst_helpers_stepwise_recurse_nop
\else
\def\recurselevel{#1}%
- \doubleexpandafter\redostepwiserecurse\expandafter
+ \doubleexpandafter\syst_helpers_stepwise_recurse_yes\expandafter
\fi\expandafter{\the\numexpr\recurselevel+#3\relax}{#2}{#3}}
-\unexpanded\def\expandrecursecontent
- {\csname\@@arecurse\recursedepth\endcsname}
+\unexpanded\def\syst_helpers_recurse_content
+ {\csname\??recurseaction\recursedepth\endcsname}
-\unexpanded\def\redostepwiserecurse
- {\expandrecursecontent\dodostepwiserecurse}
+\unexpanded\def\syst_helpers_stepwise_recurse_yes
+ {\syst_helpers_recurse_content
+ \syst_helpers_stepwise_recurse}
-\unexpanded\def\dodostepwisereverse#1#2#3% from to step
+\unexpanded\def\syst_helpers_stepwise_reverse#1#2#3% from to step
{\ifnum#1<#2\relax
- \expandafter\nodostepwiserecurse
+ \expandafter\syst_helpers_stepwise_recurse_nop
\else
\def\recurselevel{#1}%
\innerrecurse#1\relax
\advance\innerrecurse#3\relax
- \doubleexpandafter\redostepwisereverse\expandafter
+ \doubleexpandafter\syst_helpers_stepwise_reverse_yes\expandafter
\fi\expandafter{\the\innerrecurse}{#2}{#3}}
-\unexpanded\def\redostepwisereverse
- {\expandrecursecontent\dodostepwisereverse}
+\unexpanded\def\syst_helpers_stepwise_reverse_yes
+ {\syst_helpers_recurse_content
+ \syst_helpers_stepwise_reverse}
-\unexpanded\def\exitstepwiserecurse
- {\nodostepwiserecurse\relax}
+\unexpanded\def\syst_helpers_stepwise_exit
+ {\syst_helpers_stepwise_recurse_nop\relax}
-\unexpanded\def\nodostepwiserecurse#1#2#3#4%
- {\expandafter\let\expandafter\recurselevel\csname\@@irecurse\recursedepth\endcsname
- \global\advance\outerrecurse \minusone}
+\unexpanded\def\syst_helpers_stepwise_recurse_nop#1#2#3#4%
+ {\expandafter\let\expandafter\recurselevel\csname\??recurseindex\recursedepth\endcsname
+ \global\advance\outerrecurse\minusone}
-\unexpanded\def\nonostepwiserecurse#1#2#3%
- {\expandafter\let\expandafter\recurselevel\csname\@@irecurse\recursedepth\endcsname
- \global\advance\outerrecurse \minusone}
+% \unexpanded\def\nonostepwiserecurse#1#2#3%
+% {\expandafter\let\expandafter\recurselevel\csname\??recurseindex\recursedepth\endcsname
+% \global\advance\outerrecurse\minusone}
\unexpanded\def\dorecurse#1%
- {\dostepwiserecurse1{#1}1}
+ {\dostepwiserecurse\plusone{#1}\plusone}
-\def\doexpandedrecurse#1#2%
+\def\doexpandedrecurse#1#2% user macro (also was \doxprecurse)
{\ifnum#1>\zerocount
- #2\expandafter\doexpandedrecurse\expandafter{\the\numexpr#1-1\relax}{#2}%
+ #2\expandafter\doexpandedrecurse\expandafter{\the\numexpr#1-\plusone\relax}{#2}%
\fi}
%D As we can see here, the simple command \type{\dorecurse} is
@@ -3422,47 +3441,48 @@
{\ifcase#1\relax
\expandafter\gobbletwoarguments
\or
- \expandafter\ydorecurse
+ \expandafter\syst_helpers_recurse_y
\else
- \expandafter\xdorecurse
+ \expandafter\syst_helpers_recurse_x
\fi{#1}}
-\unexpanded\def\xdorecurse#1#2%
+\unexpanded\def\syst_helpers_recurse_x#1#2%
{\global\advance\outerrecurse \plusone
- \expandafter\gdef\csname\@@arecurse\recursedepth\endcsname{#2}%
- \global\expandafter\let\csname\@@irecurse\recursedepth\endcsname\recurselevel
- \expandafter\dodorecurse\expandafter1\expandafter{\number#1}}
+ \expandafter\gdef\csname\??recurseaction\recursedepth\endcsname{#2}%
+ \global\expandafter\let\csname\??recurseindex\recursedepth\endcsname\recurselevel
+ \expandafter\syst_helpers_recurse_indeed\expandafter1\expandafter{\number#1}}
-\unexpanded\def\ydorecurse#1#2%
+\unexpanded\def\syst_helpers_recurse_y#1#2%
{\global\advance\outerrecurse \plusone
- \global\expandafter\let\csname\@@irecurse\recursedepth\endcsname\recurselevel
+ \global\expandafter\let\csname\??recurseindex\recursedepth\endcsname\recurselevel
\let\recurselevel\!!plusone
#2%
- \expandafter\let\expandafter\recurselevel\csname\@@irecurse\recursedepth\endcsname
+ \expandafter\let\expandafter\recurselevel\csname\??recurseindex\recursedepth\endcsname
\global\advance\outerrecurse \minusone}
-\unexpanded\def\dodorecurse#1#2% from to
+\unexpanded\def\syst_helpers_recurse_indeed#1#2% from to
{\ifnum#1>#2\relax
- \expandafter\nodorecurse
+ \expandafter\syst_helpers_recurse_indeed_nop
\else
\def\recurselevel{#1}%
- \doubleexpandafter\redorecurse
+ \doubleexpandafter\syst_helpers_recurse_indeed_yes
\fi\expandafter{\the\numexpr\recurselevel+\plusone\relax}{#2}}
-\unexpanded\def\dodorecurse#1#2% from to
+\unexpanded\def\syst_helpers_recurse_indeed#1#2% from to
{\ifnum#1>#2\relax
- \expandafter\nodorecurse
+ \expandafter\syst_helpers_recurse_indeed_nop
\else
\def\recurselevel{#1}%
\innerrecurse#1\advance\innerrecurse\plusone
- \doubleexpandafter\redorecurse
+ \doubleexpandafter\syst_helpers_recurse_indeed_yes
\fi\expandafter{\the\innerrecurse}{#2}}
-\unexpanded\def\redorecurse
- {\expandrecursecontent\dodorecurse}
+\unexpanded\def\syst_helpers_recurse_indeed_yes
+ {\syst_helpers_recurse_content
+ \syst_helpers_recurse_indeed}
-\unexpanded\def\nodorecurse#1#2#3%
- {\expandafter\let\expandafter\recurselevel\csname\@@irecurse\recursedepth\endcsname
+\unexpanded\def\syst_helpers_recurse_indeed_nop#1#2#3%
+ {\expandafter\let\expandafter\recurselevel\csname\??recurseindex\recursedepth\endcsname
\global\advance\outerrecurse \minusone }
%D \macros
@@ -3483,32 +3503,33 @@
%D When needed, one can call for \type{\looplevel} and
%D \type{\loopdepth}.
-\let\endofloop\donothing
+\let\endofloop\donothing % maybe \syst_helpers_loop_end
\unexpanded\def\doloop#1%
{\global\advance\outerrecurse \plusone
- \expandafter\gdef\csname\@@arecurse\recursedepth\endcsname{#1}%
- \global\expandafter\let\csname\@@irecurse\recursedepth\endcsname\recurselevel
- \let\endofloop\dodoloop
- \dodoloop1} % no \plusone else \recurselevel wrong
+ \expandafter\gdef\csname\??recurseaction\recursedepth\endcsname{#1}%
+ \global\expandafter\let\csname\??recurseindex\recursedepth\endcsname\recurselevel
+ \let\endofloop\syst_helpers_loop
+ \syst_helpers_loop1} % no \plusone else \recurselevel wrong
-\unexpanded\def\dodoloop#1%
+\unexpanded\def\syst_helpers_loop#1%
{\def\recurselevel{#1}%
- \expandafter\redoloop\expandafter{\the\numexpr\recurselevel+\plusone\relax}}
+ \expandafter\syst_helpers_loop_yes\expandafter{\the\numexpr\recurselevel+\plusone\relax}}
-\unexpanded\def\redoloop
- {\expandrecursecontent\endofloop}
+\unexpanded\def\syst_helpers_loop_yes
+ {\syst_helpers_recurse_content
+ \endofloop}
-\unexpanded\def\nodoloop#1%
- {\let\endofloop\dodoloop % new, permits nested \doloop's
- \expandafter\let\expandafter\recurselevel\csname\@@irecurse\recursedepth\endcsname
+\unexpanded\def\syst_helpers_loop_nop#1%
+ {\let\endofloop\syst_helpers_loop % new, permits nested \doloop's
+ \expandafter\let\expandafter\recurselevel\csname\??recurseindex\recursedepth\endcsname
\global\advance\outerrecurse\minusone}
\unexpanded\def\exitloop % \exitloop quits at end
- {\let\endofloop\nodoloop}
+ {\let\endofloop\syst_helpers_loop_nop}
\unexpanded\def\exitloopnow#1\endofloop % \exitloopnow quits directly
- {\nodoloop}
+ {\syst_helpers_loop_nop}
%D The loop is executed at least once, so beware of situations
%D like:
@@ -3548,94 +3569,113 @@
%D \dorecurse{3}{\expanded{\definesymbol[test-\recurselevel][xx-\recurselevel]}}
%D \stoptyping
-\def\expandrecursecontent
- {\csname\@@arecurse\recursedepth\expandafter\expandafter\expandafter\endcsname
+\def\syst_helpers_recurse_content
+ {\csname\??recurseaction\recursedepth\expandafter\expandafter\expandafter\endcsname
\expandafter\expandafter\expandafter{\expandafter\recurselevel\expandafter}\expandafter{\recursedepth}}
-\unexpanded\def\xdorecurse#1#2%
+\unexpanded\def\syst_helpers_recurse_x#1#2%
{\global\advance\outerrecurse \plusone
- \global\expandafter\def\csname\@@arecurse\recursedepth\endcsname##1##2{#2}%
- \global\expandafter\let\csname\@@irecurse\recursedepth\endcsname\recurselevel
- \expandafter\dodorecurse\expandafter1\expandafter{\number#1}}
+ \global\expandafter\def\csname\??recurseaction\recursedepth\endcsname##1##2{#2}%
+ \global\expandafter\let\csname\??recurseindex\recursedepth\endcsname\recurselevel
+ \expandafter\syst_helpers_recurse_indeed\expandafter1\expandafter{\number#1}}
-\unexpanded\def\ydorecurse#1#2%
+\unexpanded\def\syst_helpers_recurse_y#1#2%
{\global\advance\outerrecurse \plusone
- \global\expandafter\let\csname\@@irecurse\recursedepth\endcsname\recurselevel
+ \global\expandafter\let\csname\??recurseindex\recursedepth\endcsname\recurselevel
\let\recurselevel\!!plusone
- \global\expandafter\def\csname\@@arecurse\recursedepth\endcsname##1##2{#2}%
- \expandrecursecontent
- \expandafter\let\expandafter\recurselevel\csname\@@irecurse\recursedepth\endcsname
+ \global\expandafter\def\csname\??recurseaction\recursedepth\endcsname##1##2{#2}%
+ \syst_helpers_recurse_content
+ \expandafter\let\expandafter\recurselevel\csname\??recurseindex\recursedepth\endcsname
\global\advance\outerrecurse \minusone}
\unexpanded\def\dostepwiserecurse#1#2#3#4% can be made faster by postponing #4
{\global\advance\outerrecurse \plusone
- \global\expandafter\def\csname\@@arecurse\recursedepth\endcsname##1##2{#4}%
- \global\expandafter\let\csname\@@irecurse\recursedepth\endcsname\recurselevel
- \ifnum#3>0\relax
+ \global\expandafter\def\csname\??recurseaction\recursedepth\endcsname##1##2{#4}%
+ \global\expandafter\let\csname\??recurseindex\recursedepth\endcsname\recurselevel
+ \ifnum#3>\zerocount\relax
\ifnum#2<#1\relax
- \let\nextrecurse\exitstepwiserecurse
+ \let\syst_helpers_stepwise_next\syst_helpers_stepwise_exit
\else
- \let\nextrecurse\dodostepwiserecurse
+ \let\syst_helpers_stepwise_next\syst_helpers_stepwise_recurse
\fi
\else
- \ifnum#3<0\relax
+ \ifnum#3<\zerocount\relax
\ifnum#1<#2\relax
- \let\nextrecurse\exitstepwiserecurse
+ \let\syst_helpers_stepwise_next\syst_helpers_stepwise_exit
\else
- \let\nextrecurse\dodostepwisereverse
+ \let\syst_helpers_stepwise_next\syst_helpers_stepwise_reverse
\fi
\else
- \let\nextrecurse\exitstepwiserecurse
+ \let\syst_helpers_stepwise_next\syst_helpers_stepwise_exit
\fi
- \fi\normalexpanded{\nextrecurse{\number#1}{\number#2}{\number#3}}}
+ \fi\normalexpanded{\syst_helpers_stepwise_next{\number#1}{\number#2}{\number#3}}}
\unexpanded\def\doloop#1%
{\global\advance\outerrecurse \plusone
- \global\expandafter\def\csname\@@arecurse\recursedepth\endcsname##1##2{#1}%
- \global\expandafter\let\csname\@@irecurse\recursedepth\endcsname\recurselevel
- \let\endofloop\dodoloop
- \dodoloop1} % no \plusone else \recurselevel wrong
+ \global\expandafter\def\csname\??recurseaction\recursedepth\endcsname##1##2{#1}%
+ \global\expandafter\let\csname\??recurseindex\recursedepth\endcsname\recurselevel
+ \let\endofloop\syst_helpers_loop
+ \syst_helpers_loop1} % no \plusone else \recurselevel wrong
% faster
+% \unexpanded\def\dostepwiserecurse#1#2#3#4% can be made faster by postponing #4
+% {\global\advance\outerrecurse \plusone
+% \global\expandafter\def\csname\??recurseaction\recursedepth\endcsname##1##2{#4}%
+% \global\expandafter\let\csname\??recurseindex\recursedepth\endcsname\recurselevel
+% \csname @swr%
+% \ifnum#3>\zerocount
+% \ifnum#2<#1\else d\fi
+% \else\ifnum#3<\zerocount
+% \ifnum#1<#2\else r\fi
+% \fi\fi
+% \expandafter\endcsname\normalexpanded{{\number#1}{\number#2}{\number#3}}}
+
+% \let\@swr \syst_helpers_stepwise_exit
+% \let\@swrd\syst_helpers_stepwise_recurse
+% \let\@swrr\syst_helpers_stepwise_reverse
+
+\installsystemnamespace{recursestepwise}
+
\unexpanded\def\dostepwiserecurse#1#2#3#4% can be made faster by postponing #4
{\global\advance\outerrecurse \plusone
- \global\expandafter\def\csname\@@arecurse\recursedepth\endcsname##1##2{#4}%
- \global\expandafter\let\csname\@@irecurse\recursedepth\endcsname\recurselevel
- \csname @swr%
+ \global\expandafter\def\csname\??recurseaction\recursedepth\endcsname##1##2{#4}%
+ \global\expandafter\let\csname\??recurseindex\recursedepth\endcsname\recurselevel
+ \csname\??recursestepwise
\ifnum#3>\zerocount
\ifnum#2<#1\else d\fi
\else\ifnum#3<\zerocount
\ifnum#1<#2\else r\fi
\fi\fi
\expandafter\endcsname\normalexpanded{{\number#1}{\number#2}{\number#3}}}
+ % \expandafter\endcsname\expandafter{\number#1\expandafter}\expandafter{\number#2\expandafter}\expandafter{\number#3}}
-\let\@swr \exitstepwiserecurse
-\let\@swrd\dodostepwiserecurse
-\let\@swrr\dodostepwisereverse
+\letvalue{\??recursestepwise }\syst_helpers_stepwise_exit
+\letvalue{\??recursestepwise d}\syst_helpers_stepwise_recurse
+\letvalue{\??recursestepwise r}\syst_helpers_stepwise_reverse
% quite okay too, but untested
%
% \def\dostepwiserecurse#1#2#3#4% can be made faster by postponing #4
% {\global\advance\outerrecurse \plusone
-% \global\expandafter\def\csname\@@arecurse\recursedepth\endcsname##1##2{#4}%
-% \global\expandafter\let\csname\@@irecurse\recursedepth\endcsname\recurselevel
+% \global\expandafter\def\csname\??recurseaction\recursedepth\endcsname##1##2{#4}%
+% \global\expandafter\let\csname\??recurseindex\recursedepth\endcsname\recurselevel
% \normalexpanded
% {\ifnum#3>\zerocount
% \ifnum#2<#1
-% \exitstepwiserecurse
+% \syst_helpers_stepwise_exit
% \else
-% \dodostepwiserecurse
+% \syst_helpers_stepwise_recurse
% \fi
% \else
% \ifnum#3<\zerocount
% \ifnum#1<#2
-% \exitstepwiserecurse
+% \syst_helpers_stepwise_exit
% \else
-% \dodostepwisereverse
+% \syst_helpers_stepwise_reverse
% \fi
% \else
-% \exitstepwiserecurse
+% \syst_helpers_stepwise_exit
% \fi
% \fi{\number#1}{\number#2}{\number#3}}}
@@ -3643,21 +3683,22 @@
\newcount\fastloopindex
\newcount\fastloopfinal
-\let\fastloopcs\relax
+
+\let\m_syst_helpers_fast_loop_cs\relax
\unexpanded\def\dofastloopcs#1#2%
- {\let\fastloopcs#2
+ {\let\m_syst_helpers_fast_loop_cs#2%
\fastloopindex\plusone
\fastloopfinal#1\relax
- \dodofastloopcs}
+ \syst_helpers_fast_loop_cs}
-\unexpanded\def\dodofastloopcs
+\unexpanded\def\syst_helpers_fast_loop_cs
{\ifnum\fastloopindex>\fastloopfinal
- \let\fastloopcs\relax
+ \let\m_syst_helpers_fast_loop_cs\relax
\else
- \fastloopcs
+ \m_syst_helpers_fast_loop_cs
\advance\fastloopindex\plusone
- \expandafter\dodofastloopcs
+ \expandafter\syst_helpers_fast_loop_cs
\fi}
% Helper:
@@ -3693,10 +3734,10 @@
\unexpanded\def\doloopoverlist#1#2%
{\global\advance\outerrecurse\plusone
- \expandafter\gdef\csname\@@arecurse\recursedepth\endcsname##1{\edef\recursestring{##1}#2}%
- \expandafter\glet\csname\@@irecurse\recursedepth\endcsname\recursestring
- \normalexpanded{\processcommalist[#1]{\expandafter\noexpand\csname\@@arecurse\recursedepth\endcsname}}%
- \expandafter\let\expandafter\recursestring\csname\@@irecurse\recursedepth\endcsname
+ \expandafter\gdef\csname\??recurseaction\recursedepth\endcsname##1{\edef\recursestring{##1}#2}%
+ \expandafter\glet\csname\??recurseindex\recursedepth\endcsname\recursestring
+ \normalexpanded{\processcommalist[#1]{\expandafter\noexpand\csname\??recurseaction\recursedepth\endcsname}}%
+ \expandafter\let\expandafter\recursestring\csname\??recurseindex\recursedepth\endcsname
\global\advance\outerrecurse\minusone}
%D \macros
@@ -3724,20 +3765,20 @@
% \EveryPar{y } \everypar{before } [before] \par
% }
-% retrofit this into mkii
-
-\def\dowithevery#1%
- {\expandafter\removetoks\expandafter\the\csname t\strippedcsname#1\endcsname\from#1%
- \expandafter\appendtoks\expandafter\the\csname t\strippedcsname#1\endcsname\to #1%
- \csname t\strippedcsname#1\endcsname}
+\installsystemnamespace{extraevery}
-\def\newevery#1#2%
+\unexpanded\def\newevery#1#2%
{\ifx#1\everypar\else\newtoks#1\fi% we test for redefinition elsewhere
\ifx#2\relax\else\ifdefined#2\else
- \expandafter\newtoks\csname t\strippedcsname#1\endcsname
- \def#2{\dowithevery#1}%
+ \expandafter\newtoks\csname\??extraevery\strippedcsname#1\endcsname
+ \def#2{\syst_helpers_every#1}%
\fi\fi}
+\unexpanded\def\syst_helpers_every#1%
+ {\expandafter\removetoks\expandafter\the\csname\??extraevery\strippedcsname#1\endcsname\from#1%
+ \expandafter\appendtoks\expandafter\the\csname\??extraevery\strippedcsname#1\endcsname\to #1%
+ \csname\??extraevery\strippedcsname#1\endcsname}
+
%D This one permits definitions like:
\newevery \everypar \EveryPar % we get a warning which is ok
@@ -3745,7 +3786,23 @@
%D and how about:
-\newevery \neverypar \NeveryPar
+% \newtoks \neverypar
+% \newtoks \neveryendpar
+%
+% \normalprotected\def\syst_helpers_forgotten_endpar
+% {\the\neveryendpar\normalpar}
+%
+% \unexpanded\def\forgeteverypar
+% {\everypar{\the\neverypar}%
+% \let\endpar\syst_helpers_forgotten_endpar}
+%
+% \unexpanded\def\finishpar
+% {\ifvmode\else\par\fi}
+
+\newtoks \neverypar
+
+\unexpanded\def\forgeteverypar
+ {\everypar{\the\neverypar}}
%D Which we're going to use indeed! When the second argument
%D equals \type {\relax}, the first token list is created
@@ -3817,10 +3874,10 @@
%D Both commands accept the prefix \type{\doglobal} for global
%D assignments.
-\def\convertvalue#1\to
+\unexpanded\def\convertvalue#1\to
{\expandafter\convertcommand\csname#1\endcsname\to}
-\def\defconvertedvalue#1#2% less sensitive for \to
+\unexpanded\def\defconvertedvalue#1#2% less sensitive for \to
{\expandafter\defconvertedcommand\expandafter#1\csname#2\endcsname}
%D \macros
@@ -3843,10 +3900,10 @@
%D \doifassignmentelse {...} {then ...} {else ...}
%D \stoptyping
-\def\docheckifassignmentelse#1=#2#3\@end@{\if#2@}%
+\def\syst_helpers_check_if_assignment_else#1=#2#3\_e_o_p_{\if#2@}%
-\def\doifassignmentelse#1% expandable
- {\expandafter\docheckifassignmentelse\detokenize{#1}=@@\@end@
+\unexpanded\def\doifassignmentelse#1% expandable
+ {\expandafter\syst_helpers_check_if_assignment_else\detokenize{#1}=@@\_e_o_p_
\expandafter\secondoftwoarguments
\else
\expandafter\firstoftwoarguments
@@ -3854,23 +3911,12 @@
\newif\ifassignment
-% \def\docheckassignmentindeed#1=#2#3\@end@{\if#2@\assignmentfalse\else\assignmenttrue\fi}
-%
-% \def\docheckassignment#1%
-% {\expandafter\docheckassignmentindeed\detokenize{#1}=@@\@end@}
-
-% D \macros
-% D {convertasciiafter}
-% D
-% D Sometimes we need to convert an argument to a string (letters
-% D only), for instance when we compare it with another string:
-% D
-% D \starttyping
-% D \convertasciiafter\doifinstringelse{em}{\ascii}{...}
-% D \stoptyping
-%
-% \def\convertasciiafter#1#2%
-% {\expandafter#1\expandafter{\detokenize{#2}}}
+\unexpanded\def\docheckassignment#1%
+ {\expandafter\syst_helpers_check_if_assignment_else\detokenize{#1}=@@\_e_o_p_
+ \assignmentfalse
+ \else
+ \assignmenttrue
+ \fi}
%D In \ETEX\ we can use \type {\detokenize} and gain some
%D speed, but in general far less that 1\% for \type
@@ -3879,17 +3925,17 @@
%D something I found out when primitives like \type
%D {\jobname} were fed (or something undefined).
-\def\convertargument#1\to#2{\dodoglobal\edef#2{\detokenize{#1}}}
-\def\convertcommand #1\to#2{\dodoglobal\edef#2{\expandafter\detokenize\expandafter{#1}}} % hm, only second is also ok
+\unexpanded\def\convertargument#1\to#2{\dodoglobal\edef#2{\detokenize{#1}}}
+\unexpanded\def\convertcommand #1\to#2{\dodoglobal\edef#2{\expandafter\detokenize\expandafter{#1}}} % hm, only second is also ok
-\def\defconvertedargument #1#2{\edef#1{\detokenize{#2}}}
-\def\defconvertedcommand #1#2{\edef#1{\detokenize\expandafter{#2}}}
-\def\edefconvertedargument#1#2{\edef#1{#2}%
- \edef#1{\detokenize\expandafter{#1}}}
-\def\gdefconvertedargument#1#2{\xdef#1{\detokenize{#2}}}
-\def\gdefconvertedcommand #1#2{\xdef#1{\detokenize\expandafter{#2}}}
-\def\xdefconvertedargument#1#2{\xdef#1{#2}%
- \xdef#1{\detokenize\expandafter{#1}}}
+\unexpanded\def\defconvertedargument #1#2{\edef#1{\detokenize{#2}}}
+\unexpanded\def\defconvertedcommand #1#2{\edef#1{\detokenize\expandafter{#2}}}
+\unexpanded\def\edefconvertedargument#1#2{\edef#1{#2}%
+ \edef#1{\detokenize\expandafter{#1}}}
+\unexpanded\def\gdefconvertedargument#1#2{\xdef#1{\detokenize{#2}}}
+\unexpanded\def\gdefconvertedcommand #1#2{\xdef#1{\detokenize\expandafter{#2}}}
+\unexpanded\def\xdefconvertedargument#1#2{\xdef#1{#2}%
+ \xdef#1{\detokenize\expandafter{#1}}}
%D When you try to convert a primitive command, you'll find
%D out that the \ETEX\ method fails on for instance \type
@@ -3910,14 +3956,11 @@
%D argument are completely redundant.
%D \macros
-%D {showvalue,showargument}
+%D {showvalue}
%D
-%D Two handy macros for testing purposes only:
-
-\def\showvalue#1%
- {\expandafter\show\csname#1\endcsname}
+%D Ahandy macro, for testing purposes only:
-\def\showvalue#1%
+\unexpanded\def\showvalue#1%
{\ifcsname#1\endcsname
\expandafter\show\csname#1\endcsname
\else
@@ -3938,11 +3981,11 @@
%D
%D Watch the one level expansion of the second argument.
-\def\doifmeaningelse#1#2%
- {\edef\!!stringa{\meaning#1}%
- \def \!!stringb{#2}%
- \edef\!!stringb{\meaning\!!stringb}%
- \ifx\!!stringa\!!stringb
+\unexpanded\def\doifmeaningelse#1#2%
+ {\edef\m_syst_string_one{\meaning#1}%
+ \def \m_syst_string_two{#2}%
+ \edef\m_syst_string_two{\meaning\m_syst_string_two}%
+ \ifx\m_syst_string_one\m_syst_string_two
\expandafter\firstoftwoarguments
\else
\expandafter\secondoftwoarguments
@@ -3960,14 +4003,14 @@
%D \doifsamestringelse{\jobname}{oeps}{YES}{NO}
%D \stoptyping
-\def\@@doifsamestringelse#1#2#3#4%
- {\edef\!!stringa{\detokenize\expandafter{\normalexpanded{#3}}}%
- \edef\!!stringb{\detokenize\expandafter{\normalexpanded{#4}}}%
- \ifx\!!stringa\!!stringb\expandafter#1\else\expandafter#2\fi}
+\def\syst_helpers_if_samestring_else#1#2#3#4%
+ {\edef\m_syst_string_one{\detokenize\expandafter{\normalexpanded{#3}}}%
+ \edef\m_syst_string_two{\detokenize\expandafter{\normalexpanded{#4}}}%
+ \ifx\m_syst_string_one\m_syst_string_two\expandafter#1\else\expandafter#2\fi}
-\def\doifsamestringelse{\@@doifsamestringelse\firstoftwoarguments\secondoftwoarguments}
-\def\doifsamestring {\@@doifsamestringelse\firstofoneargument \gobbleoneargument }
-\def\doifnotsamestring {\@@doifsamestringelse\gobbleoneargument \firstofoneargument }
+\unexpanded\def\doifsamestringelse{\syst_helpers_if_samestring_else\firstoftwoarguments\secondoftwoarguments}
+\unexpanded\def\doifsamestring {\syst_helpers_if_samestring_else\firstofoneargument \gobbleoneargument }
+\unexpanded\def\doifnotsamestring {\syst_helpers_if_samestring_else\gobbleoneargument \firstofoneargument }
%D \macros
%D {ConvertToConstant,ConvertConstantAfter}
@@ -3995,10 +4038,10 @@
%D In examples~2 and~3 both arguments equal, in~1 and~4
%D they differ.
-\def\ConvertToConstant#1#2#3%
- {\edef\!!stringa{\expandafter\detokenize\expandafter{#2}}%
- \edef\!!stringb{\expandafter\detokenize\expandafter{#3}}%
- #1{\!!stringa}{\!!stringb}}
+\unexpanded\def\ConvertToConstant#1#2#3%
+ {\edef\m_syst_string_one{\expandafter\detokenize\expandafter{#2}}%
+ \edef\m_syst_string_two{\expandafter\detokenize\expandafter{#3}}%
+ #1{\m_syst_string_one}{\m_syst_string_two}}
%D When the argument \type{#1} consists of commands, we had
%D better use
@@ -4027,14 +4070,14 @@
%D
%D where \type{...} can be anything legally \TEX.
-\def\CheckConstantAfter#1#2%
+\unexpanded\def\CheckConstantAfter#1#2%
{\expandafter\convertargument\v!prefix!\to\ascii
\convertargument#1\to#2\relax
\doifinstringelse\ascii{#2}
{\expandafter\convertargument#1\to#2}
{}}
-\def\ConvertConstantAfter#1#2#3%
+\unexpanded\def\ConvertConstantAfter#1#2#3%
{\CheckConstantAfter{#2}\asciia
\CheckConstantAfter{#3}\asciib
#1{\asciia}{\asciib}}
@@ -4050,7 +4093,7 @@
%D
%D We don't explicitly test if the macro is defined.
-\def\assignifempty#1#2% can be sped up
+\unexpanded\def\assignifempty#1#2% can be sped up
{\doifsomething{#1}{\def#1{#2}}} % {\doifnot{#1}{}{\def#1{#2}}}
%D \macros
@@ -4094,11 +4137,13 @@
%D
%D results in: \type{\message{Hello world!}}.
-\def\dograbuntil#1#2%
- {\def\next##1#1{#2{##1}}\next}
+\let\syst_helpers_grab_indeed\relax
-\def\grabuntil#1%
- {\expandafter\dograbuntil\expandafter{\csname#1\endcsname}}
+\unexpanded\def\syst_helpers_grab#1#2%
+ {\def\syst_helpers_grab_indeed##1#1{#2{##1}}\syst_helpers_grab_indeed}
+
+\unexpanded\def\grabuntil#1%
+ {\expandafter\syst_helpers_grab\expandafter{\csname#1\endcsname}}
%D The next command build on this mechanism:
%D
@@ -4122,13 +4167,15 @@
%D is related to these commands. This one simply throws away
%D everything preceding \type{\command}.
-\def\processbetween#1#2%
+\let\syst_helpers_gobble_indeed\relax
+
+\unexpanded\def\processbetween#1#2%
{\setvalue{\s!start#1}{\grabuntil{\s!stop#1}{#2}}}
-\def\gobbleuntil#1%
- {\def\next##1#1{}\next}
+\unexpanded\def\gobbleuntil#1%
+ {\def\syst_helpers_gobble_indeed##1#1{}\syst_helpers_gobble_indeed}
-\def\gobbleuntilrelax#1\relax
+\unexpanded\def\gobbleuntilrelax#1\relax
{}
%D The next one simply expands the pickup up tokens.
@@ -4137,8 +4184,10 @@
%D \processuntil{sequence}
%D \stoptyping
-\def\processuntil#1%
- {\def\next##1#1{##1}\next}
+\let\syst_helpers_until_indeed\relax
+
+\unexpanded\def\processuntil#1%
+ {\def\syst_helpers_until_indeed##1#1{##1}\syst_helpers_until_indeed}
%D \macros
%D {groupedcommand}
@@ -4183,7 +4232,7 @@
%D
%D \starttyping
%D \def\rightword%
-%D {\groupedcommand{\hfill\hbox}{\parfillskip\!!zeropoint}}
+%D {\groupedcommand{\hfill\hbox}{\parfillskip\zeropoint}}
%D
%D .......... \rightword{the right way}
%D \stoptyping
@@ -4202,45 +4251,85 @@
%D to be {\bold bold} or not, that's the question
%D \stoptyping
%D
-%D This alternative checks for a \type{\bgroup} token first.
-%D The internal alternative does not accept the box handling
-%D mentioned before, but further nesting works all right. The
-%D extra \type{\bgroup}||\type{\egroup} is needed to keep
-%D \type{\AfterGroup} both into sight and local.
+%D This alternative checks for a \type {\bgroup} token first. The internal
+%D alternative does not accept the box handling mentioned before, but further
+%D nesting works all right. The extra \type {\bgroup}||\type {\egroup} is needed to
+%D keep \type {\m_syst_helpers_handle_group_after} both into sight and local.
-\def\HandleGroup#1#2%
+\let\m_syst_helpers_handle_group_after \relax
+\let\m_syst_helpers_handle_group_before\relax
+
+% keep:
+%
+% \unexpanded\def\syst_helpers_handle_group_normal#1#2%
+% {\bgroup
+% \def\m_syst_helpers_handle_group_before{\bgroup#1\bgroup\aftergroup\m_syst_helpers_handle_group_after}% can't we remove the second \bgroup
+% \def\m_syst_helpers_handle_group_after {#2\egroup\egroup}% and one \egroup here?
+% \afterassignment\m_syst_helpers_handle_group_before
+% \let\next=}
+
+\unexpanded\def\syst_helpers_handle_group_normal#1#2%
{\bgroup
- \def\BeforeGroup{\bgroup#1\bgroup\aftergroup\AfterGroup}% can't we remove the second \bgroup
- \def\AfterGroup {#2\egroup\egroup}% % and one \egroup here?
- \afterassignment\BeforeGroup
+ \def\m_syst_helpers_handle_group_before{#1}%
+ \def\m_syst_helpers_handle_group_after {#2}%
+ \afterassignment\m_syst_helpers_handle_group_normal_before
\let\next=}
-\def\HandleSimpleGroup#1#2% no inner group (so no kerning interference)
+\def\m_syst_helpers_handle_group_normal_before
+ {\bgroup
+ \m_syst_helpers_handle_group_before
+ \bgroup
+ \aftergroup\m_syst_helpers_handle_group_normal_after}
+
+\def\m_syst_helpers_handle_group_normal_after
+ {\m_syst_helpers_handle_group_after
+ \egroup
+ \egroup}
+
+% keep:
+%
+% \unexpanded\def\syst_helpers_handle_group_simple#1#2% no inner group (so no kerning interference)
+% {\bgroup
+% %def\m_syst_helpers_handle_group_before{\bgroup#1\aftergroup\m_syst_helpers_handle_group_after}% interferes
+% \def\m_syst_helpers_handle_group_before{\bgroup\aftergroup\m_syst_helpers_handle_group_after#1}%
+% \def\m_syst_helpers_handle_group_after {#2\egroup}%
+% \afterassignment\m_syst_helpers_handle_group_before
+% \let\next=}
+
+\unexpanded\def\syst_helpers_handle_group_simple#1#2% no inner group (so no kerning interference)
{\bgroup
- %def\BeforeGroup{\bgroup#1\aftergroup\AfterGroup}% interferes
- \def\BeforeGroup{\bgroup\aftergroup\AfterGroup#1}%
- \def\AfterGroup {#2\egroup}%
- \afterassignment\BeforeGroup
+ \def\m_syst_helpers_handle_group_before{#1}%
+ \def\m_syst_helpers_handle_group_after {#2}%
+ \afterassignment\m_syst_helpers_handle_group_simple_before
\let\next=}
-% \def\HandleNoGroup#1#2%
-% {\def\AfterGroup{#2\egroup}%
-% \bgroup\aftergroup\AfterGroup#1}
+\def\m_syst_helpers_handle_group_simple_before
+ {\bgroup
+ \aftergroup\m_syst_helpers_handle_group_simple_after
+ \m_syst_helpers_handle_group_before}
+
+\def\m_syst_helpers_handle_group_simple_after
+ {\m_syst_helpers_handle_group_after
+ \egroup}%
-\def\HandleNoGroup % retrofit into mkii
+\unexpanded\def\syst_helpers_handle_group_nop
{\ifnum\currentgrouptype=\semisimplegroupcode
- \expandafter\HandleNoGroupA
+ \expandafter\syst_helpers_handle_group_nop_a
\else
- \expandafter\HandleNoGroupB
+ \expandafter\syst_helpers_handle_group_nop_b
\fi}
-\def\HandleNoGroupA#1#2%
- {\def\AfterGroup{#2\endgroup}%
- \begingroup\aftergroup\AfterGroup#1}
+\def\syst_helpers_handle_group_nop_a#1#2%
+ {\def\m_syst_helpers_handle_group_after{#2\endgroup}%
+ \begingroup
+ \aftergroup\m_syst_helpers_handle_group_after
+ #1}
-\def\HandleNoGroupB#1#2%
- {\def\AfterGroup{#2\egroup}%
- \bgroup\aftergroup\AfterGroup#1}
+\def\syst_helpers_handle_group_nop_b#1#2%
+ {\def\m_syst_helpers_handle_group_after{#2\egroup}%
+ \bgroup
+ \aftergroup\m_syst_helpers_handle_group_after
+ #1}
%D I considered it a nuisance that
%D
@@ -4255,10 +4344,10 @@
%D implementation became:
\unexpanded\def\groupedcommand#1#2%
- {\doifnextbgroupelse{\HandleGroup{#1}{#2}}{\HandleNoGroup{#1}{#2}}}
+ {\doifnextbgroupelse{\syst_helpers_handle_group_normal{#1}{#2}}{\syst_helpers_handle_group_nop{#1}{#2}}}
\unexpanded\def\simplegroupedcommand#1#2%
- {\doifnextbgroupelse{\HandleSimpleGroup{#1}{#2}}{\HandleNoGroup{#1}{#2}}}
+ {\doifnextbgroupelse{\syst_helpers_handle_group_simple{#1}{#2}}{\syst_helpers_handle_group_nop{#1}{#2}}}
%D Users should be aware of the fact that grouping can
%D interfere with ones paragraph settings that are executed
@@ -4288,10 +4377,10 @@
%D \type{CAPITALS}. This suggestion is feasible, because
%D \CONTEXT only defines lowcased macros.
-\def\showdefinederror#1#2%
+\unexpanded\def\showdefinederror#1#2%
{\writestatus\m!system{#1 #2 replaces a macro, use CAPITALS!}}
-\def\checkdefined#1#2#3%
+\unexpanded\def\checkdefined#1#2#3%
{\doifdefined{#3}{\showdefinederror{#2}{#3}}}
%D \macros
@@ -4365,6 +4454,11 @@
{\def\dodogotopar{#1}%
\redogotopar\par}
+\def\dogotoparcs#1%
+ {\let\dodogotopar#1%
+ \redogotopar\par}
+
+
\unexpanded\def\GetPar
{\expanded
{\dowithpar
@@ -4413,10 +4507,13 @@
%D ... \par
%D \stoptyping
+\let\syst_helpers_next_par\relax
+\let\syst_helpers_next_arg\relax
+
\unexpanded\def\dowithpargument#1%
- {\def\nextpar##1 \par{#1{##1}}%
- \def\nextarg##1{#1{##1}}%
- \doifnextbgroupelse\nextarg{\doifnextcharelse\par{#1{}}\nextpar}}
+ {\def\syst_helpers_next_par##1 \par{#1{##1}}%
+ \def\syst_helpers_next_arg##1{#1{##1}}%
+ \doifnextbgroupelse\syst_helpers_next_arg{\doifnextcharelse\par{#1{}}\syst_helpers_next_par}}
%D The \type{p} in the previous command stands for paragraph.
%D When we want to act upon words we can use the \type{w}
@@ -4440,10 +4537,13 @@
%D ...
%D \stoptyping
+\let\syst_helpers_next_war\relax
+\let\syst_helpers_next_arg\relax
+
\unexpanded\def\dowithwargument#1%
- {\def\nextwar##1 {#1{##1}}%
- \def\nextarg##1{#1{##1}}%
- \doifnextbgroupelse\nextarg\nextwar}
+ {\def\syst_helpers_next_war##1 {#1{##1}}%
+ \def\syst_helpers_next_arg##1{#1{##1}}%
+ \doifnextbgroupelse\syst_helpers_next_arg\syst_helpers_next_war}
%D \macros
%D {dorepeat,dorepeatwithcommand}
@@ -4476,12 +4576,12 @@
%D specification is missing, the command executes once.
\unexpanded\def\dorepeatwithcommand[#1]%
- {\dodorepeatwithcommand#1*\empty*\relax}
+ {\syst_helpers_repeat_with_command#1*\empty*\relax}
-\def\dodorepeatwithcommand#1*#2#3*#4\relax#5%
- {\ifx#2\empty\redorepeatwithcommand[#1]#5\else\dododorepeatwithcommand{#1}{#2}{#3}#5\fi}
+\def\syst_helpers_repeat_with_command#1*#2#3*#4\relax#5%
+ {\ifx#2\empty\syst_helpers_repeat_with_command_again[#1]#5\else\syst_helpers_repeat_with_command_indeed{#1}{#2}{#3}#5\fi}
-\def\dododorepeatwithcommand#1#2#3#4%
+\def\syst_helpers_repeat_with_command_indeed#1#2#3#4%
{\ifx#2\empty % redundant but gives cleaner extensions
#4{#1}%
\else\ifnum#1<\zerocount
@@ -4493,7 +4593,7 @@
\dorecurse{#1}{#4{#2#3}}%
\fi\fi\fi}
-\def\redorepeatwithcommand[#1]#2%
+\def\syst_helpers_repeat_with_command_again[#1]#2%
{#2{#1}}
%D The extension hook permits something like:
@@ -4503,11 +4603,11 @@
%D
%D \catcode`\*=\superscriptcatcode
%D
-%D \gdef\redorepeatwithcommand[#1]%
+%D \gdef\syst_helpers_repeat_with_command_again[#1]%
%D {\redodorepeatwithcommand#1*\empty*\relax}
%D
%D \gdef\redodorepeatwithcommand#1*#2#3*#4\relax#5%
-%D {\dododorepeatwithcommand{#1}{#2}{#3}#5}
+%D {\syst_helpers_repeat_with_command_indeed{#1}{#2}{#3}#5}
%D
%D \egroup
%D \stoptyping
@@ -4515,14 +4615,6 @@
%D although one may wonder if changing the catcode of \type {*} is wise.
%D \macros
-%D {normalbgroup,normalgroup}
-%D
-%D No comment.
-
-\let\normalbgroup\bgroup
-\let\normalegroup\egroup
-
-%D \macros
%D {doifstringinstringelse}
%D
%D The next macro is meant for situations where both strings
@@ -4539,7 +4631,7 @@
%D
%D A bit faster is:
-\def\pp!doifstringinstringelse#1%
+\def\syst_helpers_if_instring_else_indeed#1%
{\if#1@%
\expandafter\secondoftwoarguments
\else
@@ -4547,9 +4639,9 @@
\fi}
\def\doifstringinstringelse#1#2%
- {\expandafter\def\expandafter\p!doifstringinstringelse\expandafter##\expandafter1#1##2##3\war
- {\pp!doifstringinstringelse##2}%
- \expandafter\expandafter\expandafter\p!doifstringinstringelse\expandafter#2#1@@\war}
+ {\expandafter\def\expandafter\syst_helpers_if_instring_else\expandafter##\expandafter1#1##2##3\_e_o_s_
+ {\syst_helpers_if_instring_else_indeed##2}%
+ \expandafter\expandafter\expandafter\syst_helpers_if_instring_else\expandafter#2#1@@\_e_o_s_}
%D \macros
%D {appendtoks,prependtoks,appendtoksonce,prependtoksonce,
@@ -4569,36 +4661,39 @@
%D These macros are clones of the ones implemented in page~378 of
%D Knuth's \TeX book.
-\newtoks\@@scratchtoks
+\newtoks\t_syst_helpers_scratch
+\let \m_syst_helpers_scratch\empty
+
+% no longer \def but \let to target toks
-\unexpanded\def\appendtoks {\doappendtoks \relax}
-\unexpanded\def\prependtoks {\doprependtoks \relax}
-\unexpanded\def\appendtoksonce {\doappendtoksonce \relax}
-\unexpanded\def\prependtoksonce{\doprependtoksonce\relax}
+\unexpanded\def\appendtoks {\syst_helpers_append_toks \relax}
+\unexpanded\def\prependtoks {\syst_helpers_prepend_toks \relax}
+\unexpanded\def\appendtoksonce {\syst_helpers_append_toks_once \relax}
+\unexpanded\def\prependtoksonce{\syst_helpers_prepend_toks_once\relax}
-\def\dodoappendtoks
- {\dodoglobal\@@toks\doubleexpandafter{\expandafter\the\expandafter\@@toks\the\@@scratchtoks}}
+\def\syst_helpers_append_toks_indeed
+ {\dodoglobal\m_syst_helpers_scratch\doubleexpandafter{\expandafter\the\expandafter\m_syst_helpers_scratch\the\t_syst_helpers_scratch}}
-\def\dodoprependtoks
- {\dodoglobal\@@toks\doubleexpandafter{\expandafter\the\expandafter\@@scratchtoks\the\@@toks}}
+\def\syst_helpers_prepend_toks_indeed
+ {\dodoglobal\m_syst_helpers_scratch\doubleexpandafter{\expandafter\the\expandafter\t_syst_helpers_scratch\the\m_syst_helpers_scratch}}
-\def\doappendtoks#1\to#2%
- {\def\@@toks{#2}%
- \@@scratchtoks\expandafter{\gobbleoneargument#1}\dodoappendtoks}
+\def\syst_helpers_append_toks#1\to#2%
+ {\let\m_syst_helpers_scratch#2%
+ \t_syst_helpers_scratch\expandafter{\gobbleoneargument#1}\syst_helpers_append_toks_indeed}
-\def\doprependtoks#1\to#2%
- {\def\@@toks{#2}%
- \@@scratchtoks\expandafter{\gobbleoneargument#1}\dodoprependtoks}
+\def\syst_helpers_prepend_toks#1\to#2%
+ {\let\m_syst_helpers_scratch#2%
+ \t_syst_helpers_scratch\expandafter{\gobbleoneargument#1}\syst_helpers_prepend_toks_indeed}
-\def\doappendtoksonce#1\to#2%
- {\def\@@toks{#2}%
- \@@scratchtoks\expandafter{\gobbleoneargument#1}%
- \doifintokselse\@@scratchtoks\@@toks\donothing\dodoappendtoks}
+\def\syst_helpers_append_toks_once#1\to#2%
+ {\let\m_syst_helpers_scratch#2%
+ \t_syst_helpers_scratch\expandafter{\gobbleoneargument#1}%
+ \doifintokselse\t_syst_helpers_scratch\m_syst_helpers_scratch\donothing\syst_helpers_append_toks_indeed}
-\def\doprependtoksonce#1\to#2%
- {\def\@@toks{#2}%
- \@@scratchtoks\expandafter{\gobbleoneargument#1}%
- \doifintokselse\@@scratchtoks\@@toks\donothing\dodoprependtoks}
+\def\syst_helpers_prepend_toks_once#1\to#2%
+ {\let\m_syst_helpers_scratch#2%
+ \t_syst_helpers_scratch\expandafter{\gobbleoneargument#1}%
+ \doifintokselse\t_syst_helpers_scratch\m_syst_helpers_scratch\donothing\syst_helpers_prepend_toks_indeed}
%D The test macro:
@@ -4616,70 +4711,27 @@
% {\scratchtoks{a\relax b} \removetoks \relax\from\scratchtoks [\showthe\scratchtoks]}
\unexpanded\def\removetoks#1\from#2%
- {\def\doremovetoks##1#1##2\empty\empty\empty##3\\%
- {\def\!!stringa{##3}%
- \ifx\!!stringa\empty#2{##1}\else#2{##1##2}\fi}%
- \expandafter\doremovetoks\the#2\empty\empty\empty#1\empty\empty\empty\\}
+ {\def\syst_helpers_remove_toks##1#1##2\empty\empty\empty##3\_e_o_t_
+ {\def\m_syst_string_one{##3}%
+ \ifx\m_syst_string_one\empty#2{##1}\else#2{##1##2}\fi}%
+ \expandafter\syst_helpers_remove_toks\the#2\empty\empty\empty#1\empty\empty\empty\_e_o_t_}
%D Also:
-\unexpanded\def\appendetoks #1\to{\normalexpanded{\noexpand\appendtoks #1}\to}
-\unexpanded\def\prependetoks#1\to{\normalexpanded{\noexpand\prependtoks#1}\to}
+\unexpanded\def\appendetoks #1\to{\normalexpanded{\appendtoks #1}\to}
+\unexpanded\def\prependetoks#1\to{\normalexpanded{\prependtoks#1}\to}
%D Hm.
-\def\flushtoks#1% nb: can reassing to #1 again, hence the indirectness
- {\@@scratchtoks#1\relax
+\unexpanded\def\flushtoks#1% nb: can reassing to #1 again, hence the indirectness
+ {\t_syst_helpers_scratch#1\relax
\dodoglobal#1\emptytoks
- \the\@@scratchtoks\relax}
+ \the\t_syst_helpers_scratch\relax}
% better: \def\flushtoks#1{\normalexpanded{\noexpand\dodoglobal#1\emptytoks\the#\relax}}
\let\dotoks\the
-% The following code is obsolete (and names are reused for the more
-% advanced counter mechanism and those macros are not compatible!
-%
-% %D \macros
-% %D {makecounter,pluscounter,minuscounter,
-% %D resetcounter,setcounter,countervalue}
-% %D
-% %D Declaring, setting and resetting \COUNTERS\ can be done
-% %D with the next set of commands.
-% %D
-% %D \starttyping
-% %D \makecounter {name}
-% %D \pluscounter {name}
-% %D \minuscounter {name}
-% %D \resetcounter {name}
-% %D \setcounter {name} {value}
-% %D \countervalue {name}
-% %D \stoptyping
-%
-% \def\makecounter#1%
-% {\global\expandafter\let\csname#1\endcsname\zerocountervalue} % see earlier
-%
-% \def\countervalue#1%
-% {\ifcsname#1\endcsname\csname#1\endcsname\fi}
-%
-% \def\pluscounter#1%
-% {\expandafter\xdef\csname#1\endcsname{\the\numexpr\csname#1\endcsname+\plusone\relax}}
-%
-% \def\minuscounter#1%
-% {\expandafter\xdef\csname#1\endcsname{\the\numexpr\csname#1\endcsname-\plusone\relax}}
-%
-% \def\resetcounter#1%
-% {\global\expandafter\let\csname#1\endcsname\zerocountervalue}
-%
-% \def\setcounter#1#2%
-% {\expandafter\xdef\csname#1\endcsname{\the\numexpr#2\relax}}
-%
-% \def\savecounter#1%
-% {\expandafter\xdef\csname ! #1 !\endcsname{\the\numexpr\csname#1\endcsname\relax}}
-%
-% \def\restorecounter#1%
-% {\expandafter\xdef\csname#1\endcsname{\the\numexpr\csname ! #1 !\endcsname\relax}}
-
%D \macros
%D {beforesplitstring,aftersplitstring}
%D
@@ -4697,33 +4749,35 @@
%D Both implementations show some insight in the manipulation
%D of arguments.
-\def\beforesplitstring#1\at#2\to#3%
- {\def\dosplitstring##1#2##2#2##3\\%
+\let\syst_helpers_split_string\relax
+
+\unexpanded\def\beforesplitstring#1\at#2\to#3%
+ {\def\syst_helpers_split_string##1#2##2#2##3\\%
{\def#3{##1}}%
- \expandafter\dosplitstring#1#2#2\\}
+ \expandafter\syst_helpers_split_string#1#2#2\\}
-\def\aftersplitstring#1\at#2\to#3%
- {\def\dosplitstring##1#2##2@@@##3\\%
+\unexpanded\def\aftersplitstring#1\at#2\to#3%
+ {\def\syst_helpers_split_string##1#2##2@@@##3\\%
{\def#3{##2}}%
- \expandafter\dosplitstring#1@@@#2@@@\\}
+ \expandafter\syst_helpers_split_string#1@@@#2@@@\\}
%D \macros
%D {splitstring,greedysplitstring}
%D
%D A bonus macro.
-\def\splitstring#1\at#2\to#3\and#4%
- {\def\dosplitstring##1#2##2\empty\empty\empty##3\\%
+\unexpanded\def\splitstring#1\at#2\to#3\and#4%
+ {\def\syst_helpers_split_string##1#2##2\empty\empty\empty##3\\%
{\def#3{##1}%
- \def\dosplitstring{##3}%
- \ifx\dosplitstring\empty
+ \def\syst_helpers_split_string{##3}%
+ \ifx\syst_helpers_split_string\empty
\let#4\empty
\else
\def#4{##2}%
\fi}%
- \expandafter\dosplitstring#1\empty\empty\empty#2\empty\empty\empty\\}
+ \expandafter\syst_helpers_split_string#1\empty\empty\empty#2\empty\empty\empty\\}
-\def\greedysplitstring#1\at#2\to#3\and#4%
+\unexpanded\def\greedysplitstring#1\at#2\to#3\and#4%
{\edef\asciib{#1}%
\let\asciic\asciib
\let#3\empty
@@ -4745,20 +4799,20 @@
%D aftertestandsplitstring,
%D testandsplitstring}
-\def\beforetestandsplitstring#1\at#2\to#3%
- {\def\dosplitstring##1#2##2#2##3##4\\%
+\unexpanded\def\beforetestandsplitstring#1\at#2\to#3%
+ {\def\syst_helpers_split_string##1#2##2#2##3##4\\%
{\ifx##3\empty\let#3\empty\else\def#3{##1}\fi}%
- \expandafter\dosplitstring#1#2#2\empty\\}
+ \expandafter\syst_helpers_split_string#1#2#2\empty\\}
-\def\aftertestandsplitstring#1\at#2\to#3%
- {\def\dosplitstring ##1#2##2@@@##3##4\\%
+\unexpanded\def\aftertestandsplitstring#1\at#2\to#3%
+ {\def\syst_helpers_split_string ##1#2##2@@@##3##4\\%
{\ifx##3\empty\let#3\empty\else\def#3{##2}\fi}%
- \expandafter\dosplitstring #1@@@#2@@@\empty\\}
+ \expandafter\syst_helpers_split_string #1@@@#2@@@\empty\\}
\def\testandsplitstring#1\at#2\to#3\and#4%
- {\def\dosplitstring##1#2##2#2##3##4\\%
+ {\def\syst_helpers_split_string##1#2##2#2##3##4\\%
{\ifx##3\empty\let#3\empty\let#4\empty\else\def#3{##1}\def#4{##2}\fi}%
- \expandafter\dosplitstring#1#2#2\empty\\}
+ \expandafter\syst_helpers_split_string#1#2#2\empty\\}
%D \macros
%D {removesubstring}
@@ -4771,9 +4825,9 @@
%D
%D Which in terms of \TEX\ looks like:
-\def\removesubstring#1\from#2\to#3%
- {\splitstring#2\to\!!stringa\and\!!stringb
- \dodoglobal#3{\!!stringa\!!stringb}}
+\unexpanded\def\removesubstring#1\from#2\to#3%
+ {\splitstring#2\to\m_syst_string_one\and\m_syst_string_two
+ \dodoglobal#3{\m_syst_string_one\m_syst_string_two}}
%D \macros
%D {appendtocommalist,prependtocommalist,
@@ -4848,9 +4902,9 @@
{\dodoglobal\edef#2{#1\ifx#2\empty\else,#2\fi}}}
\unexpanded\def\robustdoifinsetelse#1#2%
- {\edef\!!stringa{\detokenize\expandafter{\normalexpanded{#1}}}%
- \edef\!!stringb{\detokenize\expandafter{\normalexpanded{#2}}}%
- \rawdoifinsetelse\!!stringa\!!stringb}
+ {\edef\m_syst_string_one{\detokenize\expandafter{\normalexpanded{#1}}}%
+ \edef\m_syst_string_two{\detokenize\expandafter{\normalexpanded{#2}}}%
+ \rawdoifinsetelse\m_syst_string_one\m_syst_string_two}
\unexpanded\def\robustaddtocommalist#1#2% {item} \cs
{\robustdoifinsetelse{#1}#2\resetglobal
@@ -4860,11 +4914,11 @@
{\robustdoifinsetelse{#1}#2\resetglobal
{\dodoglobal\edef#2{#1\ifx#2\empty\else,#2\fi}}}
-\def\xsplitstring#1#2% \cs {str}
- {\def\dosplitstring##1,#2,##2,#2,##3\\%
- {\edef\!!stringa{\bcleanedupcommalist##1\empty\empty\relax}%
- \edef\!!stringb{\acleanedupcommalist##2,,\relax}}%
- \expandafter\dosplitstring\expandafter,#1,,#2,,#2,\\}
+\unexpanded\def\xsplitstring#1#2% \cs {str}
+ {\def\syst_helpers_split_string##1,#2,##2,#2,##3\\%
+ {\edef\m_syst_string_one{\bcleanedupcommalist##1\empty\empty\relax}%
+ \edef\m_syst_string_two{\acleanedupcommalist##2,,\relax}}%
+ \expandafter\syst_helpers_split_string\expandafter,#1,,#2,,#2,\\}
\def\bcleanedupcommalist#1#2#3\relax{\if#1,\else#1\fi\if#2,\else#2\fi#3}
\def\bcleanedupcommalist#1#2\relax{\if#1,\else#1\fi#2}
@@ -4872,12 +4926,12 @@
\unexpanded\def\removefromcommalist#1#2% to be sped up
{\rawdoifinsetelse{#1}#2%
- {\normalexpanded{\noexpand\xsplitstring\noexpand#2{#1}}%
+ {\normalexpanded{\xsplitstring\noexpand#2{#1}}%
\dodoglobal\edef#2%
- {\ifx\!!stringa\empty
- \!!stringb
+ {\ifx\m_syst_string_one\empty
+ \m_syst_string_two
\else
- \!!stringa\ifx\!!stringb\empty\else,\!!stringb\fi
+ \m_syst_string_one\ifx\m_syst_string_two\empty\else,\m_syst_string_two\fi
\fi}}
\resetglobal}
@@ -4897,52 +4951,35 @@
%D \substituteincommalist{old}{new}{list}
%D \stoptyping
-\def\substituteincommalist#1#2#3% old, new, list (slooow)
- {\edef\!!stringb{#1}%
- \edef\!!stringd{#2}%
- \let\!!stringa#3%
- \let#3\empty
- \def\dosubstituteincommalist##1%
- {\edef\!!stringc{##1}%
- \ifx\!!stringb\!!stringc
- \ifx\!!stringd\empty\else
- \edef#3{#3\ifx#3\empty\else,\fi\!!stringd}%
- \fi
- \def\docommand####1{\edef#3{#3,####1}}%
- \else
- \edef#3{#3\ifx#3\empty\else,\fi##1}%
- \fi}%
- \expandafter\rawprocesscommacommand\expandafter[\!!stringa]\dosubstituteincommalist}
-
-%D A not so useful macro:
-
-\def\dodofrontstrip[#1#2]#3%
- {\ifx#1\space
- \def#3{#2}%
+\def\syst_helpers_substitute_in_comma_list_step#1%
+ {\edef\m_syst_string_three{#1}%
+ \ifx\m_syst_string_one\m_syst_string_three
+ \ifx\m_syst_string_two\empty \else
+ \edef\m_syst_string_four{\ifx\m_syst_string_four\empty\else\m_syst_string_four,\fi\m_syst_string_two}%
+ \fi
\else
- \def#3{#1#2}%
+ \edef\m_syst_string_four{\ifx\m_syst_string_four\empty\else\m_syst_string_four,\fi#1}%
\fi}
-\def\dofrontstrip#1%
- {\edef\!!stringa{#1}%
- \ifx\!!stringa\empty \else
- \expandafter\dodofrontstrip\expandafter[#1]#1%
- \fi}
+\unexpanded\def\substituteincommalist#1#2#3% old, new, list (slooow)
+ {\edef\m_syst_string_one{#1}%
+ \edef\m_syst_string_two{#2}%
+ \let\m_syst_string_four\empty
+ \normalexpanded{\rawprocesscommacommand[#3]}\syst_helpers_substitute_in_comma_list_step
+ \let#3\m_syst_string_four}
%D \macros
%D {replaceincommalist}
%D
-%D The next macro can be used to replace an indexed element
-%D in a commalist:
+%D The next macro can be used to replace an indexed element in a commalist:
%D
%D \starttyping
%D \replaceincommalist\MyList{2}
%D \stoptyping
%D
-%D Element~2 will be replaced by the current meaning of the macro
-%D \type {\newcommalistelement}. The old meaning is saved in
-%D \type {\commalistelement}. The replacement honors grouped items,
-%D like in:
+%D Element~2 will be replaced by the current meaning of the macro \type
+%D {\newcommalistelement}. The old meaning is saved in \type {\commalistelement}.
+%D The replacement honors grouped items, like in:
%D
%D \starttyping
%D \def\MyList{a,b,c,d,e,f} \replaceincommalist\MyList{3}
@@ -4950,43 +4987,50 @@
%D \def\MyList{a,{b,c},d,e,f} \replaceincommalist\MyList{3}
%D \def\MyList{a,b,c,{d,e,f}} \replaceincommalist\MyList{3}
%D \stoptyping
+%D
+%D This macro was used in the bibtex code (and is probably no longer needed).
+
+\newcount\c_syst_helpers_comma_list_index
+\let \m_syst_helpers_comma_list_target\empty
\let\newcommalistelement\empty
-\def\replaceincommalist#1#2% #1 = commalistelement #2 = position starts at 1
- {\def\doreplaceincommalist##1%
- {\ifnum\commalistcounter=#2\relax
- \ifx\newcommalistelement\empty\else
- \ifx\newcommalist\empty
- \let\newcommalist\newcommalistelement
- \else
- \expandafter\expandafter\expandafter\def\expandafter\expandafter\expandafter
- \newcommalist\expandafter\expandafter\expandafter
- {\expandafter\newcommalist\expandafter,\newcommalistelement}%
- \fi
- \fi
- \def\commalistelement{##1}%
- \else
- \ifx\newcommalist\empty
- \ifx\nexttoken\bgroup % is known -)
- \def\newcommalist{{##1}}%
- \else
- \def\newcommalist{##1}%
- \fi
- \else
- \ifx\nexttoken\bgroup % is known -)
- \expandafter\def\expandafter\newcommalist\expandafter{\newcommalist,{##1}}%
- \else
- \expandafter\def\expandafter\newcommalist\expandafter{\newcommalist,##1}%
- \fi
- \fi
- \fi
- \advance\commalistcounter\plusone}%
+\def\syst_helpers_replace_in_comma_list_step#1%
+ {\ifnum\commalistcounter=\c_syst_helpers_comma_list_index\relax
+ \ifx\newcommalistelement\empty\else
+ \ifx\m_syst_helpers_comma_list_target\empty
+ \let\m_syst_helpers_comma_list_target\newcommalistelement
+ \else
+ \expandafter\expandafter\expandafter\def\expandafter\expandafter\expandafter
+ \m_syst_helpers_comma_list_target\expandafter\expandafter\expandafter
+ {\expandafter\m_syst_helpers_comma_list_target\expandafter,\newcommalistelement}%
+ \fi
+ \fi
+ \def\commalistelement{#1}%
+ \else
+ \ifx\m_syst_helpers_comma_list_target\empty
+ \ifx\nexttoken\bgroup % is known -)
+ \def\m_syst_helpers_comma_list_target{{#1}}%
+ \else
+ \def\m_syst_helpers_comma_list_target{#1}%
+ \fi
+ \else
+ \ifx\nexttoken\bgroup % is known -)
+ \expandafter\def\expandafter\m_syst_helpers_comma_list_target\expandafter{\m_syst_helpers_comma_list_target,{#1}}%
+ \else
+ \expandafter\def\expandafter\m_syst_helpers_comma_list_target\expandafter{\m_syst_helpers_comma_list_target,#1}%
+ \fi
+ \fi
+ \fi
+ \advance\commalistcounter\plusone}
+
+\unexpanded\def\replaceincommalist#1#2% #1 = commalistelement #2 = position starts at 1
+ {\c_syst_helpers_comma_list_index#2\relax
+ \let\m_syst_helpers_comma_list_target\empty
\let\commalistelement\empty
- \let\newcommalist\empty
\commalistcounter\plusone
- \expandafter\processcommalist\expandafter[#1]\doreplaceincommalist
- \dodoglobal\let#1\newcommalist}
+ \expandafter\processcommalist\expandafter[#1]\syst_helpers_replace_in_comma_list_step
+ \dodoglobal\let#1\m_syst_helpers_comma_list_target}
%D \macros
%D {globalprocesscommalist}
@@ -4997,15 +5041,17 @@
%D handling comma lists in alignments. In these situations the
%D next macro can be of use.
-\def\globalprocesscommaitem#1,%
+\let\m_syst_helpers_comma_list_command_global\empty
+
+\def\syst_helpers_comma_list_command_global_step#1,%
{\if]#1\else
- \globalcommacommand{#1}%
- \expandafter\globalprocesscommaitem
+ \m_syst_helpers_comma_list_command_global{#1}%
+ \expandafter\syst_helpers_comma_list_command_global_step
\fi}
-\def\globalprocesscommalist[#1]#2%
- {\global\let\globalcommacommand#2%
- \expandafter\globalprocesscommaitem#1,],}
+\unexpanded\def\globalprocesscommalist[#1]#2%
+ {\global\let\m_syst_helpers_comma_list_command_global#2%
+ \expandafter\syst_helpers_comma_list_command_global_step#1,],}
%D \macros
%D {withoutpt,PtToCm,
@@ -5082,36 +5128,36 @@
%D
%D Beware: global!
-\def\@sl@{@sl@}
-\def\@sg@{@sg@}
+\installsystemnamespace{localpushedmacro}
+\installsystemnamespace{globalpushedmacro}
-\let\@@pushedmacro\empty
+\let\m_syst_helpers_push_macro\empty
-\def\globalpushmacro#1%
- {\xdef\@@pushedmacro{\string#1}%
- \ifcsname\@sg@\@@pushedmacro\endcsname \else
- \expandafter\newcount\csname\@sg@\@@pushedmacro\endcsname
+\unexpanded\def\globalpushmacro#1%
+ {\xdef\m_syst_helpers_push_macro{\string#1}%
+ \ifcsname\??globalpushedmacro\m_syst_helpers_push_macro\endcsname \else
+ \expandafter\newcount\csname\??globalpushedmacro\m_syst_helpers_push_macro\endcsname
\fi
- \global\advance\csname\@sg@\@@pushedmacro\endcsname \plusone
- \global\expandafter\let\csname\the\csname\@sg@\@@pushedmacro\endcsname\@@pushedmacro\endcsname#1}
-
-\def\globalpopmacro#1%
- {\xdef\@@pushedmacro{\string#1}%
- \global\expandafter\let\expandafter#1\csname\the\csname\@sg@\@@pushedmacro\endcsname\@@pushedmacro\endcsname
- \global\advance\csname\@sg@\@@pushedmacro\endcsname \minusone}
-
-\def\localpushmacro#1% this one can be used to push a value over an \egroup
- {\xdef\@@pushedmacro{\string#1}%
- \ifcsname\@sl@\@@pushedmacro\endcsname \else
- \expandafter\newcount\csname\@sl@\@@pushedmacro\endcsname
+ \global\advance\csname\??globalpushedmacro\m_syst_helpers_push_macro\endcsname \plusone
+ \global\expandafter\let\csname\the\csname\??globalpushedmacro\m_syst_helpers_push_macro\endcsname\m_syst_helpers_push_macro\endcsname#1}
+
+\unexpanded\def\globalpopmacro#1%
+ {\xdef\m_syst_helpers_push_macro{\string#1}%
+ \global\expandafter\let\expandafter#1\csname\the\csname\??globalpushedmacro\m_syst_helpers_push_macro\endcsname\m_syst_helpers_push_macro\endcsname
+ \global\advance\csname\??globalpushedmacro\m_syst_helpers_push_macro\endcsname \minusone}
+
+\unexpanded\def\localpushmacro#1% this one can be used to push a value over an \egroup
+ {\xdef\m_syst_helpers_push_macro{\string#1}%
+ \ifcsname\??localpushedmacro\m_syst_helpers_push_macro\endcsname \else
+ \expandafter\newcount\csname\??localpushedmacro\m_syst_helpers_push_macro\endcsname
\fi
- \global\advance\csname\@sl@\@@pushedmacro\endcsname \plusone
- \global\expandafter\let\csname\the\csname\@sl@\@@pushedmacro\endcsname\@@pushedmacro\endcsname#1}
+ \global\advance\csname\??localpushedmacro\m_syst_helpers_push_macro\endcsname \plusone
+ \global\expandafter\let\csname\the\csname\??localpushedmacro\m_syst_helpers_push_macro\endcsname\m_syst_helpers_push_macro\endcsname#1}
-\def\localpopmacro#1%
- {\xdef\@@pushedmacro{\string#1}%
- \expandafter\let\expandafter#1\csname\the\csname\@sl@\@@pushedmacro\endcsname\@@pushedmacro\endcsname
- \global\advance\csname\@sl@\@@pushedmacro\endcsname \minusone }
+\unexpanded\def\localpopmacro#1%
+ {\xdef\m_syst_helpers_push_macro{\string#1}%
+ \expandafter\let\expandafter#1\csname\the\csname\??localpushedmacro\m_syst_helpers_push_macro\endcsname\m_syst_helpers_push_macro\endcsname
+ \global\advance\csname\??localpushedmacro\m_syst_helpers_push_macro\endcsname \minusone }
\let\pushmacro\localpushmacro
\let\popmacro \localpopmacro
@@ -5133,49 +5179,28 @@
%D These examples show us that an optional can be used. The
%D value provided is added to \type{\localhsize}.
-% todo: a fast non argument variant
-
\newdimen\localhsize
-% \def\complexsetlocalhsize[#1]% don't change !
-% {\localhsize\hsize
-% \ifnum\hangafter<\zerocount
-% \advance\localhsize\ifdim\hangindent>\zeropoint-\fi\hangindent
-% \fi
-% \advance\localhsize -\leftskip
-% \advance\localhsize -\rightskip
-% \advance\localhsize #1\relax}
-%
-% \def\simplesetlocalhsize
-% {\complexsetlocalhsize[\zeropoint]}
-%
-% \definecomplexorsimple\setlocalhsize
-
\unexpanded\def\setlocalhsize % don't change !
{\doifnextoptionalelse
\syst_helpers_set_local_hsize_yes
\syst_helpers_set_local_hsize_nop}
-% \def\syst_helpers_set_local_hsize_nop
-% {\localhsize\hsize
-% \ifnum\hangafter<\zerocount
-% \advance\localhsize\ifdim\hangindent>\zeropoint-\fi\hangindent
-% \fi
-% \advance\localhsize -\leftskip
-% \advance\localhsize -\rightskip}
-
\def\syst_helpers_set_local_hsize_nop
- {\localhsize\dimexpr
- \hsize -\leftskip -\rightskip
- \ifnum\hangafter<\zerocount
- \ifdim\hangindent>\zeropoint-\else+\fi\hangindent
- \fi
- \relax}
+ {\localhsize\availablehsize}
\def\syst_helpers_set_local_hsize_yes[#1]%
{\syst_helpers_set_local_hsize_nop
\advance\localhsize#1\relax}
+\def\availablehsize
+ {\dimexpr
+ \hsize-\leftskip-\rightskip
+ \ifnum\hangafter<\zerocount
+ \ifdim\hangindent>\zeropoint-\else+\fi\hangindent
+ \fi
+ \relax}
+
%D \macros
%D {doifvalue,doifnotvalue,doifelsevalue,
%D doifnothing,doifsomething,doifelsenothing,
@@ -5187,81 +5212,84 @@
%D tokens per call. Anyone familiar with the not||values
%D ones, can derive their meaning from the definitions.
-\def\doifvalue#1#2%
- {\edef\!!stringa{\csname#1\endcsname}\edef\!!stringb{#2}%
- \ifx\!!stringa\!!stringb
+\unexpanded\def\doifvalue#1#2%
+ {\edef\m_syst_string_one{\csname#1\endcsname}%
+ \edef\m_syst_string_two{#2}%
+ \ifx\m_syst_string_one\m_syst_string_two
\expandafter\firstofoneargument
\else
\expandafter\gobbleoneargument
\fi}
-\def\doifnotvalue#1#2%
- {\edef\!!stringa{\csname#1\endcsname}\edef\!!stringb{#2}%
- \ifx\!!stringa\!!stringb
+\unexpanded\def\doifnotvalue#1#2%
+ {\edef\m_syst_string_one{\csname#1\endcsname}%
+ \edef\m_syst_string_two{#2}%
+ \ifx\m_syst_string_one\m_syst_string_two
\expandafter\gobbleoneargument
\else
\expandafter\firstofoneargument
\fi}
-\def\doifelsevalue#1#2%
- {\edef\!!stringa{\csname#1\endcsname}\edef\!!stringb{#2}%
- \ifx\!!stringa\!!stringb
+\unexpanded\def\doifelsevalue#1#2%
+ {\edef\m_syst_string_one{\csname#1\endcsname}%
+ \edef\m_syst_string_two{#2}%
+ \ifx\m_syst_string_one\m_syst_string_two
\expandafter\firstoftwoarguments
\else
\expandafter\secondoftwoarguments
\fi}
-\def\doifnothing#1%
- {\edef\!!stringa{#1}%
- \ifx\!!stringa\empty
+\unexpanded\def\doifnothing#1%
+ {\edef\m_syst_string_one{#1}%
+ \ifx\m_syst_string_one\empty
\expandafter\firstofoneargument
\else
\expandafter\gobbleoneargument
\fi}
-\def\doifsomething#1%
- {\edef\!!stringa{#1}%
- \ifx\!!stringa\empty
+\unexpanded\def\doifsomething#1%
+ {\edef\m_syst_string_one{#1}%
+ \ifx\m_syst_string_one\empty
\expandafter\gobbleoneargument
\else
\expandafter\firstofoneargument
\fi}
-\def\doifelsenothing#1%
- {\edef\!!stringa{#1}%
- \ifx\!!stringa\empty
+\unexpanded\def\doifelsenothing#1%
+ {\edef\m_syst_string_one{#1}%
+ \ifx\m_syst_string_one\empty
\expandafter\firstoftwoarguments
\else
\expandafter\secondoftwoarguments
\fi}
-\def\doifsomethingelse#1%
- {\edef\!!stringa{#1}%
- \ifx\!!stringa\empty
+\unexpanded\def\doifsomethingelse#1%
+ {\edef\m_syst_string_one{#1}%
+ \ifx\m_syst_string_one\empty
\expandafter\secondoftwoarguments
\else
\expandafter\firstoftwoarguments
\fi}
-\def\doifvaluenothing#1%
- {\edef\!!stringa{\csname#1\endcsname}%
- \ifx\!!stringa\empty
+\unexpanded\def\doifvaluenothing#1%
+ {\edef\m_syst_string_one{\csname#1\endcsname}%
+ \ifx\m_syst_string_one\empty
\expandafter\firstofoneargument
\else
\expandafter\gobbleoneargument
\fi}
-\def\doifvaluesomething#1%
- {\edef\!!stringa{\csname#1\endcsname}%
- \ifx\!!stringa\empty
+\unexpanded\def\doifvaluesomething#1%
+ {\edef\m_syst_string_one{\csname#1\endcsname}%
+ \ifx\m_syst_string_one\empty
\expandafter\gobbleoneargument
\else
\expandafter\firstofoneargument
\fi}
-\def\doifelsevaluenothing#1%
- {\edef\!!stringa{\csname#1\endcsname}%
- \ifx\!!stringa\empty
+\unexpanded\def\doifelsevaluenothing#1%
+ {\edef\m_syst_string_one{\csname#1\endcsname}%
+ \ifx\m_syst_string_one\empty
\expandafter\firstoftwoarguments
\else
\expandafter\secondoftwoarguments
@@ -5308,9 +5336,9 @@
\processcommalist[#3]\syst_helpers_do_common_check_all
\ifdone\expandafter#1\else\expandafter#2\fi}
-\def\doifallcommonelse{\syst_helpers_do_if_all_common_else\firstoftwoarguments\secondoftwoarguments}
-\def\doifallcommon {\syst_helpers_do_if_all_common_else\firstofonearguments\gobbleoneargument }
-\def\doifnotallcommon {\syst_helpers_do_if_all_common_else\gobbleoneargument \firstofonearguments }
+\unexpanded\def\doifallcommonelse{\syst_helpers_do_if_all_common_else\firstoftwoarguments\secondoftwoarguments}
+\unexpanded\def\doifallcommon {\syst_helpers_do_if_all_common_else\firstofonearguments\gobbleoneargument }
+\unexpanded\def\doifnotallcommon {\syst_helpers_do_if_all_common_else\gobbleoneargument \firstofonearguments }
%D \macros
%D {DOIF,DOIFELSE,DOIFNOT}
@@ -5328,39 +5356,42 @@
%D
%D We have to use a two||step implementation, because the
%D expansion has to take place outside \type{\uppercase}.
+%D
+%D These might up as \LUA based helpers (i.e. considere these
+%D obsolete:
\unexpanded\def\syst_helpers_do_IF#1#2%
- {\uppercase{{$#1$}{$#2$}}%
+ {\uppercase{\syst_helpers_do_if_in_string_else{$#1$}{$#2$}}%
\expandafter\firstofoneargument
\else
\expandafter\gobbleoneargument
\fi}
\unexpanded\def\syst_helpers_do_IF_NOT#1#2%
- {\uppercase{{$#1$}{$#2$}}%
+ {\uppercase{\syst_helpers_do_if_in_string_else{$#1$}{$#2$}}%
\expandafter\gobbleoneargument
\else
\expandafter\firstofoneargument
\fi}
\unexpanded\def\syst_helpers_do_IF_ELSE#1#2%
- {\uppercase{{$#1$}{$#2$}}%
+ {\uppercase{\syst_helpers_do_if_in_string_else{$#1$}{$#2$}}%
\expandafter\firstoftwoarguments
\else
\expandafter\secondoftwoarguments
\fi}
\unexpanded\def\syst_helpers_do_IF_INSTRING_ELSE#1#2%
- {\uppercase{{$#1$}{$#2$}}%
+ {\uppercase{\syst_helpers_do_if_in_string_else{$#1$}{$#2$}}%
\expandafter\firstoftwoarguments
\else
\expandafter\secondoftwoarguments
\fi}
-\unexpanded\def\DOIF #1#2{\normalexpanded{\syst_helpers_do_IF {#1}{#2}}}
-\unexpanded\def\DOIFNOT #1#2{\normalexpanded{\syst_helpers_do_IF_NOT {#1}{#2}}}
-\unexpanded\def\DOIFELSE #1#2{\normalexpanded{\syst_helpers_do_IF_ELSE {#1}{#2}}}
-\unexpanded\def\DOIFINSTRINGELSE #1#2{\normalexpanded{\syst_helpers_do_IF_INSTRING_ELSE{#1}{#2}}}
+\unexpanded\def\DOIF #1#2{\normalexpanded{\syst_helpers_do_IF {#1}{#2}}}% will become obsolete
+\unexpanded\def\DOIFNOT #1#2{\normalexpanded{\syst_helpers_do_IF_NOT {#1}{#2}}}% will become obsolete
+\unexpanded\def\DOIFELSE #1#2{\normalexpanded{\syst_helpers_do_IF_ELSE {#1}{#2}}}% will become obsolete
+\unexpanded\def\DOIFINSTRINGELSE #1#2{\normalexpanded{\syst_helpers_do_IF_INSTRING_ELSE{#1}{#2}}}% will become obsolete
%D \macros
%D {dosingleargumentwithset,
@@ -5396,25 +5427,25 @@
%D \starttyping
%D \definesomething[alfa,beta,...][variable=...,...]
%D \stoptyping
-%D
-%D Now a whole bunch of variables like \type{\@@xxalfavariable}
-%D and \type{\@@xxbetavariable} is defined.
-\def\dodoublewithset[#1][#2]%
+\let\m_syst_helpers_with_set_command\empty
+\let\syst_helpers_with_set_step \relax
+
+\def\syst_helpers_with_set_double[#1][#2]%
{\doifsomething{#1}
- {\def\@@dodowithsetcommand##1{\@@dowithsetcommand[##1][#2]}%
- \processcommalist[#1]\@@dodowithsetcommand}}
+ {\def\syst_helpers_with_set_step##1{\m_syst_helpers_with_set_command[##1][#2]}%
+ \processcommalist[#1]\syst_helpers_with_set_step}}
-\def\dotriplewithset[#1][#2][#3]%
+\def\syst_helpers_with_set_triple[#1][#2][#3]%
{\doifsomething{#1}
- {\def\@@dodowithsetcommand##1{\@@dowithsetcommand[##1][#2][#3]}%
- \processcommalist[#1]\@@dodowithsetcommand}}
+ {\def\syst_helpers_with_set_step##1{\m_syst_helpers_with_set_command[##1][#2][#3]}%
+ \processcommalist[#1]\syst_helpers_with_set_step}}
-\def\dodoubleemptywithset #1{\let\@@dowithsetcommand#1\dodoubleempty \dodoublewithset} % \command
-\def\dodoubleargumentwithset#1{\let\@@dowithsetcommand#1\dodoubleargument\dodoublewithset} % \command
+\def\dodoubleemptywithset #1{\let\m_syst_helpers_with_set_command#1\dodoubleempty \syst_helpers_with_set_double} % \command
+\def\dodoubleargumentwithset#1{\let\m_syst_helpers_with_set_command#1\dodoubleargument\syst_helpers_with_set_double} % \command
-\def\dotripleemptywithset #1{\let\@@dowithsetcommand#1\dotripleempty \dotriplewithset} % \command
-\def\dotripleargumentwithset#1{\let\@@dowithsetcommand#1\dotripleargument\dotriplewithset} % \command
+\def\dotripleemptywithset #1{\let\m_syst_helpers_with_set_command#1\dotripleempty \syst_helpers_with_set_triple} % \command
+\def\dotripleargumentwithset#1{\let\m_syst_helpers_with_set_command#1\dotripleargument\syst_helpers_with_set_triple} % \command
%D \macros
%D {stripcharacters,stripspaces}
@@ -5441,16 +5472,18 @@
%D As we can see below, spaces following a control sequence are
%D to enclosed in \type{{}}.
-\def\stripcharacter#1\from#2\to#3%
- {\def\dostripcharacter##1#1##2\end
- {\edef\!!strippedstring{\!!strippedstring##1}%
- \doifnotempty{##2}{\dostripcharacter##2\end}}%
- \let\!!strippedstring\empty
- \edef\!!stringa{#2}%
- \expandafter\dostripcharacter\!!stringa#1\end
- \dodoglobal\let#3\!!strippedstring}
+\let\m_syst_helpers_strip_character\empty
-\def\stripspaces\from#1\to#2% will become \unspacestring#1\from#2
+\unexpanded\def\stripcharacter#1\from#2\to#3%
+ {\def\syst_helpers_strip_character##1#1##2\end
+ {\edef\m_syst_helpers_strip_character{\m_syst_helpers_strip_character##1}%
+ \doifnotempty{##2}{\syst_helpers_strip_character##2\end}}%
+ \let\m_syst_helpers_strip_character\empty
+ \edef\m_syst_string_one{#2}%
+ \expandafter\syst_helpers_strip_character\m_syst_string_one#1\end
+ \dodoglobal\let#3\m_syst_helpers_strip_character}
+
+\unexpanded\def\stripspaces\from#1\to#2% will become \unspacestring#1\from#2
{\stripcharacter{ }\from#1\to#2}
%D \macros
@@ -5459,7 +5492,7 @@
%D The next macro does the same but is more compatible with other macros,
%D like \type {\convert...}.
-\def\unspacestring#1\to#2%
+\unexpanded\def\unspacestring#1\to#2%
{\stripcharacter{ }\from#1\to#2}
%D \macros
@@ -5482,8 +5515,6 @@
%D We can of course gobble more arguments using the
%D appropriate gobbling command.
-\newif\ifexecuted % general purpose
-
\def\executeifdefined#1% #2 / never change this one again
{\ifcsname#1\endcsname
\csname#1\expandafter\expandafter\expandafter\endcsname\expandafter\gobbleoneargument
@@ -5510,65 +5541,15 @@
%D Is this one still needed?
-\def\p!doifsomespaceelse#1 #2#3\war{\if\noexpand#2@}
+\def\syst_helpers_if_some_space_else#1 #2#3\_e_o_s_{\if\noexpand#2@}
-\def\doifsomespaceelse#1% % #2#3%
- {\p!doifsomespaceelse#1 @ @\war % #3\else#2\fi}
+\def\doifsomespaceelse#1% % #2#3%
+ {\syst_helpers_if_some_space_else#1 @ @\_e_o_s_ % #3\else#2\fi}
\expandafter\secondoftwoarguments
\else
\expandafter\firstoftwoarguments
\fi}
-% %D \macros
-% %D {adaptdimension,balancedimensions}
-% %D
-% %D Again we introduce some macros that are closely related to
-% %D an interface aspect of \CONTEXT. The first command can be
-% %D used to adapt a \DIMENSION.
-% %D
-% %D \starttyping
-% %D \adaptdimension {dimension} {value}
-% %D \stoptyping
-% %D
-% %D When the value is preceed by a \type{+} or minus, the
-% %D dimension is advanced accordingly, otherwise it gets the
-% %D value.
-%
-% \def\doadaptdimension#1#2\\#3\\%
-% {\if#1+%
-% \dodoglobal\advance
-% \else\if#1-%
-% \dodoglobal\advance
-% \else
-% \dodoglobal
-% \fi\fi
-% #3 #1#2\relax}
-%
-% \def\adaptdimension#1#2%
-% {\expandafter\doadaptdimension#2\\#1\\}
-%
-% %D A second command takes two \DIMENSIONS. Both are adapted,
-% %D depending on the sign of the given value.
-% %D maat. This time we take the value as it is, and don't look
-% %D explicitly at the preceding sign.
-% %D
-% %D \starttyping
-% %D \balancedimensions {dimension 1} {dimension 2} {value}
-% %D \stoptyping
-% %D
-% %D When a positive value is given, the first dimension is
-% %D incremented, the second ond is decremented. A negative value
-% %D has the opposite result.
-%
-% \def\balancedimensions#1#2#3%
-% {\scratchdimen#3\relax
-% \redoglobal\advance#1 \scratchdimen
-% \dodoglobal\advance#2 -\scratchdimen}
-%
-% %D Both commands can be preceded by \type{\doglobal}. Here we
-% %D use \type{\redo} first, because \type{\dodo} resets the
-% %D global character.
-
%D \macros
%D {processseparatedlist}
%D
@@ -5600,56 +5581,61 @@
%D Therefore we smuggle a \type {\relax} in front of the
%D argument, which we remove afterwards.
-\def\doprocessseparatedlist#1]#2[#3]#4%
- {\def\dodoprocessseparatedlist##1##2#3%
- {\def\!!stringa{##2}% suggested by VZ
+\let\syst_helpers_process_separated_list_step\relax
+
+\def\syst_helpers_process_separated_list#1]#2[#3]#4%
+ {\def\syst_helpers_process_separated_list_step##1##2#3%
+ {\def\m_syst_string_one{##2}% suggested by VZ
\if]##1%
- \let\dodoprocessseparatedlist\relax
- \else\ifx\blankspace\!!stringa
+ \let\syst_helpers_process_separated_list_step\relax
+ \else\ifx\blankspace\m_syst_string_one
#4{##1}%
\else\if]##2%
- \let\dodoprocessseparatedlist\relax
+ \let\syst_helpers_process_separated_list_step\relax
\else
#4{##1##2}%
\fi\fi\fi
- \dodoprocessseparatedlist}%
- \expandafter\dodoprocessseparatedlist\gobbleoneargument#1#3]#3}
+ \syst_helpers_process_separated_list_step}%
+ \expandafter\syst_helpers_process_separated_list_step\gobbleoneargument#1#3]#3}
-\def\processseparatedlist[%
- {\doprocessseparatedlist\relax}
+\unexpanded\def\processseparatedlist[%
+ {\syst_helpers_process_separated_list\relax}
%D \macros
%D {processlist}
%D
-%D An even more general list processing macro is the
-%D following one:
+%D An even more general list processing macro is the following one:
%D
%D \starttyping
%D \processlist{beginsym}{endsym}{separator}\docommand list
%D \stoptyping
%D
-%D This one supports arbitrary open and close symbols as well
-%D as user defined separators.
+%D This one supports arbitrary open and close symbols as well as user
+%D defined separators.
%D
%D \starttyping
%D \processlist(){=>}\docommand(a=>b=>c=>d)
%D \stoptyping
-\def\processlist#1#2#3#4% no blank skipping !
- {\def\doprocesslist##1#2%
- {\def\dodoprocesslist####1####2#3%
+\let\syst_helpers_process_any_list \relax
+\let\syst_helpers_process_any_list_indeed\relax
+\let\syst_helpers_process_any_list_step \relax
+
+\unexpanded\def\processlist#1#2#3#4% no blank skipping !
+ {\def\syst_helpers_process_any_list_indeed##1#2%
+ {\def\syst_helpers_process_any_list_step####1####2#3%
{\ifx#2####1%
- \let\dodoprocesslist\relax
+ \let\syst_helpers_process_any_list_step\relax
\else\ifx#2####2%
- \let\dodoprocesslist\relax
+ \let\syst_helpers_process_any_list_step\relax
\else
#4{####1####2}%
\fi\fi
- \dodoprocesslist}%
- \expandafter\dodoprocesslist\gobbleoneargument##1#3#2#3}%
- \def\dodoprocesslist#1%
- {\doprocesslist\relax}%
- \dodoprocesslist}
+ \syst_helpers_process_any_list_step}%
+ \expandafter\syst_helpers_process_any_list_step\gobbleoneargument##1#3#2#3}%
+ \def\syst_helpers_process_any_list#1%
+ {\syst_helpers_process_any_list_indeed\relax}%
+ \syst_helpers_process_any_list}
%D \macros
%D {processassignlist}
@@ -5665,12 +5651,12 @@
%D This command can be integrated in \type{\getparameters}, but
%D we decided best not to do so.
-\def\processassignlist#1[#2]#3%
- {\def\p!dodogetparameter[##1=##2=##3]%
+\unexpanded\def\processassignlist#1[#2]#3%
+ {\def\syst_helpers_process_assign_list_assign[##1=##2=##3]%
{\doifnot{##3}\relax{#3{##1}}}%
- \def\p!dogetparameter##1%
- {\p!dodogetparameter[##1==\relax]}%
- \processcommalist[#2]\p!dogetparameter}
+ \def\syst_helpers_process_assign_list_step##1%
+ {\syst_helpers_process_assign_list_assign[##1==\relax]}%
+ \processcommalist[#2]\syst_helpers_process_assign_list_step}
%D \macros
%D {untextargument
@@ -5687,20 +5673,22 @@
%D They remove braces and backslashes and give us something to
%D sort.
-\def\untexsomething
+\let\m_syst_helpers_untexed\empty
+
+\unexpanded\def\untexsomething
{\begingroup
\catcode\leftbraceasciicode \ignorecatcode
\catcode\rightbraceasciicode\ignorecatcode
\escapechar\minusone
- \dountexsomething}
+ \syst_helpers_untex_something}
-\def\dountexsomething#1#2\to#3%
- {\doglobal#1#2\to\untexedargument
+\def\syst_helpers_untex_something#1#2\to#3%
+ {\doglobal#1#2\to\m_syst_helpers_untexed
\endgroup
- \let#3\untexedargument}
+ \let#3\m_syst_helpers_untexed}
-\def\untexargument{\untexsomething\convertargument}
-\def\untexcommand {\untexsomething\convertcommand}
+\unexpanded\def\untexargument{\untexsomething\convertargument}
+\unexpanded\def\untexcommand {\untexsomething\convertcommand}
%D \macros
%D {ScaledPointsToBigPoints,ScaledPointsToWholeBigPoints}
@@ -5725,14 +5713,14 @@
% \PointsToWholeBigPoints{10.53941pt}\test \test
% \PointsToWholeBigPoints{10.53942pt}\test \test
-\def\PointsToBigPoints#1#2%
+\unexpanded\def\PointsToBigPoints#1#2%
{\edef#2{\withoutpt\the\dimexpr.996264\dimexpr#1\relax\relax}}
-\def\PointsToWholeBigPoints#1#2%
+\unexpanded\def\PointsToWholeBigPoints#1#2%
{\edef#2{\the\numexpr\dimexpr.996264\dimexpr#1\relax\relax/\maxcard\relax}}
-\def\ScaledPointsToBigPoints #1{\PointsToBigPoints {\number#1\scaledpoint}}
-\def\ScaledPointsToWholeBigPoints#1{\PointsToWholeBigPoints{\number#1\scaledpoint}}
+\unexpanded\def\ScaledPointsToBigPoints #1{\PointsToBigPoints {\number#1\scaledpoint}}
+\unexpanded\def\ScaledPointsToWholeBigPoints#1{\PointsToWholeBigPoints{\number#1\scaledpoint}}
%D \macros
%D {PointsToReal}
@@ -5744,9 +5732,8 @@
%D \PointsToReal {dimension} \target
%D \stoptyping
-\def\PointsToReal#1#2%
- {\scratchdimen#1%
- \edef#2{\withoutpt\the\scratchdimen}}
+\unexpanded\def\PointsToReal#1#2%
+ {\edef#2{\withoutpt\the\dimexpr#1}}
%D \macros
%D {dontleavehmode}
@@ -5777,16 +5764,17 @@
%D
%D And finaly we got the following alternative, one that avoids
%D interfering grouping at the cost of a box.
-
-\newbox\@@dlhbox
-
-\unexpanded\def\dontleavehmode
- {\ifhmode\else \ifmmode\else
- \setbox\@@dlhbox\hbox{\mathsurround\zeropoint\everymath\emptytoks$ $}\unhbox\@@dlhbox
- \fi \fi}
-
-%D But, if you run a recent version of \TEX, we can use the new
-%D primitive:
+%D
+%D \starttyping
+%D \newbox\b_syst_helpers_dlh
+%D
+%D \unexpanded\def\dontleavehmode
+%D {\ifhmode\else \ifmmode\else
+%D \setbox\b_syst_helpers_dlh\hbox{\mathsurround\zeropoint\everymath\emptytoks$ $}\unhbox\b_syst_helpers_dlh
+%D \fi \fi}
+%D \stoptyping
+%D
+%D But, as we run a recent version of \TEX, we can use the new primitive:
\ifdefined\normalquitvmode \let\dontleavehmode\normalquitvmode \fi
@@ -5800,14 +5788,17 @@
%D \lowercasestring somestring\to\somestring
%D \stoptyping
%D
-%D the first argument may be a \type{\macro}.
+%D The first argument may be a \type{\macro}.
-\def\uppercasestring#1\to#2%
+\unexpanded\def\uppercasestring#1\to#2%
{\uppercase\expandafter{\expandafter\dodoglobal\expandafter\edef\expandafter#2\expandafter{\normalexpanded{#1}}}}
-\def\lowercasestring#1\to#2%
+\unexpanded\def\lowercasestring#1\to#2%
{\lowercase\expandafter{\expandafter\dodoglobal\expandafter\edef\expandafter#2\expandafter{\normalexpanded{#1}}}}
+%D These macros are sort of obsolete as we never use uppercase this
+%D way.
+
%D \macros
%D {handletokens}
%D
@@ -5843,25 +5834,26 @@
%D takes a real counter. The macro can be preceded by \type
%D {\doglobal}.
-\def\counttoken#1\in#2\to#3%
+\def\syst_helpers_count_token#1% obeys {}
+ {\def\m_syst_string_three{#1}%
+ \ifx\m_syst_string_two\m_syst_string_three \else
+ \ifx\m_syst_string_one\m_syst_string_three
+ \advance\scratchcounter\plusone
+ \fi
+ \expandafter\syst_helpers_count_token
+ \fi}
+
+\unexpanded\def\counttoken#1\in#2\to#3%
{\scratchcounter\zerocount
- \def\!!stringa{#1}%
- \def\!!stringb{\end}%
- \def\docounttoken##1% obeys {}
- {\def\!!stringc{##1}%
- \ifx\!!stringb\!!stringc \else
- \ifx\!!stringa\!!stringc
- \advance\scratchcounter\plusone
- \fi
- \expandafter\docounttoken
- \fi}%
- \docounttoken#2\end
+ \def\m_syst_string_one{#1}%
+ \def\m_syst_string_two{\end}%
+ \syst_helpers_count_token#2\end
\dodoglobal#3\scratchcounter}
-\def\counttokens#1\to#2%
+\unexpanded\def\counttokens#1\to#2%
{\scratchcounter\zerocount
- \def\docounttoken##1{\advance\scratchcounter\plusone}%
- \handletokens#1\with\docounttoken
+ \def\syst_helpers_count_token##1{\advance\scratchcounter\plusone}%
+ \handletokens#1\with\syst_helpers_count_token
\dodoglobal#2\scratchcounter}
%D \macros
@@ -5871,17 +5863,17 @@
%D Consider for instance the macro for which I originally
%D wrote this token handler.
-\def\splitofftokens#1\from#2\to#3% slow but hardly used
+\unexpanded\def\splitofftokens#1\from#2\to#3% slow but hardly used
{\ifnum#1>\zerocount
\scratchcounter#1\relax
- \def\dosplitofftokens##1%
+ \def\syst_helpers_split_off_tokens##1%
{\ifnum\scratchcounter>\zerocount
\advance\scratchcounter \minusone
\edef#3{#3##1}%
\fi}%
% \let#3\empty % #3 can be #2, so:
\expandafter\let\expandafter#3\expandafter\empty
- \expandafter\handletokens#2\with\dosplitofftokens
+ \expandafter\handletokens#2\with\syst_helpers_split_off_tokens
\else
\edef#3{#2}%
\fi}
@@ -5918,27 +5910,27 @@
%D way we can handle the sentinal, a blank space and grouped
%D tokens.
-\def\dohandletokens % \nexthandledtoken is part of interface
- {\futurelet\nexthandledtoken\dodohandletokens}
+\unexpanded\def\syst_helpers_handle_tokens % \nexthandledtoken is part of interface
+ {\futurelet\nexthandledtoken\syst_helpers_handle_tokens_indeed}
\def\handletokens#1\with#2%
- {\gdef\dododohandletokens{#2}% permits more complex #2's
- \dohandletokens#1\end}
+ {\gdef\syst_helpers_handle_tokens_command{#2}% permits more complex #2's
+ \syst_helpers_handle_tokens#1\end}
-\def\dodohandletokens
+\def\syst_helpers_handle_tokens_indeed
{\ifx\nexthandledtoken\blankspace
- \expandafter\dodohandletokensone
+ \expandafter\syst_helpers_handle_tokens_indeed_one
\else\ifx\nexthandledtoken\end
\expandafter\expandafter\expandafter\gobbletwoarguments % also gobble the \end
\else
- \expandafter\expandafter\expandafter\dodohandletokenstwo
+ \expandafter\expandafter\expandafter\syst_helpers_handle_tokens_indeed_two
\fi\fi *}
-\def\dodohandletokensone * %
- {\dododohandletokens{ }\dohandletokens}
+\def\syst_helpers_handle_tokens_indeed_one * %
+ {\syst_helpers_handle_tokens_command{ }\syst_helpers_handle_tokens}
-\def\dodohandletokenstwo *#1%
- {\dododohandletokens{#1}\dohandletokens}
+\def\syst_helpers_handle_tokens_indeed_two *#1%
+ {\syst_helpers_handle_tokens_command{#1}\syst_helpers_handle_tokens}
%D This macro is tested on:
%D
@@ -6021,130 +6013,35 @@
%D This macro is first used in the tabulation macros.
\unexpanded\def\processcontent#1%
- {\begingroup\expandafter\doprocesscontent\csname#1\endcsname}
+ {\begingroup\expandafter\syst_helpers_process_content\csname#1\endcsname}
-\unexpanded\def\doprocesscontent#1#2#3%
- {\unexpanded\def\doprocesscontent##1#1%
+\unexpanded\def\syst_helpers_process_content#1#2#3%
+ {\unexpanded\def\syst_helpers_process_content##1#1%
{\endgroup\def#2{##1}#3}%
- \doprocesscontent}
+ \syst_helpers_process_content}
%D \macros
%D {dogobblesingleempty, dogobbledoubleempty}
%D
%D These two macros savely grab and dispose two arguments.
-\def\dogobblesingleempty{\dosingleempty\dodogobblesingleempty}
-\def\dogobbledoubleempty{\dodoubleempty\dodogobbledoubleempty}
+\def\dogobblesingleempty{\dosingleempty\syst_helpers_gobble_single_empty}
+\def\dogobbledoubleempty{\dodoubleempty\syst_helpers_gobble_double_empty}
-\def\dodogobblesingleempty [#1]{}
-\def\dodogobbledoubleempty[#1][#2]{}
+\def\syst_helpers_gobble_single_empty [#1]{}
+\def\syst_helpers_gobble_double_empty[#1][#2]{}
\let\gobblesingleempty\dogobblesingleempty % also used
\let\gobbledoubleempty\dogobbledoubleempty % also used
%D \macros
-%D {sortcommalist,sortcommacommand,
-%D donumericcompare,comparedresult}
-%D
-%D Sometimes we need to sort a commalist, so here is Taco's
-%D solution. This will in many cases be a list that is stored
-%D in a \type{\csname}, so both commalist and commacommands are
-%D supported. The sorting algorithm is very simple, so the list
-%D should not be too long or sorting will be very slow.
-%D
-%D \starttyping
-%D \sortcommalist[10,2,4,5,6,1,2,3,4,10,20]\donumericcompare
-%D
-%D \def\test{10,2,4,5,6,1,2,3,4,10,20}
-%D
-%D \sortcommacommand[\test]\donumericcompare
-%D \stoptyping
-%D
-%D In both cases, the result is available in the macro \type
-%D {\sortedcommalist}.
-%D
-%D Parameter \type{#2} is a macro that should accept two
-%D parameters, and it has to decide which one is larger, by
-%D setting the counter \type{\comparedresult} to~0 (for equal),
-%D 1~(if it's first argument is larger), or~2 (if it's second
-%D argument is larger).
-%D
-%D As said, these macro are largely written by Taco, and are
-%D (maybe therefore) also the first application of \type
-%D {\replaceincommalist}.
-
-\newcount\comparedresult
-
-\def\sortcommacommand[#1]%
- {\expandafter\sortcommalist\expandafter[#1]}
-
-\def\sortcommalist[#1]#2%
- {\getcommalistsize[#1]%
- \ifnum\commalistsize>1
- \let\sortedcommalist\empty
- \let\comparecommand#2%
- \processcommalist[#1]\dosortcommacommand
- \else
- \def\sortedcommalist{#1}%
- \fi}
-
-\def\dosortcommacommand#1%
- {\ifx\sortedcommalist\empty
- \def\sortedcommalist{#1}%
- \else
- \def\!!tempa{#1}%
- \ifx\!!tempa\empty\else
- \scratchcounter\plusone
- \expandafter\getcommalistsize\expandafter[\sortedcommalist]%
- \expandafter\processcommalist\expandafter[\sortedcommalist]\docompareitems
- \fi
- \fi}
-
-%D All those \type{\expandafter}'s are there because I do not
-%D want to use \type{\edef}.
-
-\def\docompareitems#1%
- {\doifnotempty{#1}
- {\expandafter\comparecommand\expandafter{\!!tempa}{#1}\relax
- %\ifcase\compareresult % equal
- \ifnum\comparedresult<2
- \ifnum\scratchcounter=\commalistsize
- \expandafter\expandafter\expandafter\def
- \expandafter\expandafter\expandafter\sortedcommalist
- \expandafter\expandafter\expandafter{\expandafter\sortedcommalist\expandafter,\!!tempa}%
- \fi
- %\or % new element larger
- % \ifnum\scratchcounter=\commalistsize
- % \expandafter\expandafter\expandafter\def
- % \expandafter\expandafter\expandafter\sortedcommalist
- % \expandafter\expandafter\expandafter{\expandafter\sortedcommalist\expandafter,\!!tempa}%
- % \fi
- \else % old element larger
- \expandafter\def\expandafter\newcommalistelement\expandafter{\!!tempa,#1}%
- \replaceincommalist\sortedcommalist\scratchcounter
- \expandafter\quitcommalist
- \fi}%
- \advance\scratchcounter \plusone} % bug, was \minusone
-
-%D The macro \type{\donumericcompare} considers everything
-%D that is not a number to be larger than any number.
-
-% 0: both are equal, 1: #1 is larger, 2: #2 is larger
-
-\def\thenumericcompare#1#2% no \relax es inside hee
- {\doifnumberelse{#1}
- {\doifnumberelse{#2}{\ifnum#1>#2 \plusone\else\ifnum#1<#2 \plustwo\else\zerocount\fi\fi}\plustwo}
- \plusone}
-
-\def\donumericcompare
- {\comparedresult\thenumericcompare}
-
-%D \macros
%D {@True, @False, @Not, @And}
%D
%D Some predicate logic functions, used in for instance the
%D math module.
+% These have rather ugly names ... will change:
+
\def\@True {00}
\def\@False {01}
\def\@Not #1{0\ifcase#11 \or\expandafter 1\else \expandafter 0\fi}
@@ -6167,10 +6064,10 @@
%D assignment inside a box. The \type{\empty}'s permits
%D gobbling while preventing spurious \type{\relax}'s.
-\def\setdimensionwithunit#1#2#3% number unit dimension / nice trick
+\unexpanded\def\setdimensionwithunit#1#2#3% number unit dimension / nice trick
{\afterassignment\gobblefourarguments#1=#2#3pt\relax\empty\empty\empty\empty}
-\def\freezedimensionwithunit#1#2%
+\unexpanded\def\freezedimensionwithunit#1#2%
{\setdimensionwithunit\scratchdimen#1{#2}\edef#1{\the\scratchdimen}}
%D \macros
@@ -6179,25 +6076,25 @@
%D Not that fast I guess, but here's a way to test for token
%D registers being empty.
-\def\doifsometokselse#1%
- {\edef\!!stringa{\the#1}% one level expansion so quite ok
- \ifx\!!stringa\empty
+\unexpanded\def\doifsometokselse#1%
+ {\edef\m_syst_string_one{\the#1}% one level expansion so quite ok
+ \ifx\m_syst_string_one\empty
\expandafter\secondoftwoarguments
\else
\expandafter\firstoftwoarguments
\fi}
-\def\doifsometoks#1%
- {\edef\!!stringa{\the#1}% one level expansion so quite ok
- \ifx\!!stringa\empty
+\unexpanded\def\doifsometoks#1%
+ {\edef\m_syst_string_one{\the#1}% one level expansion so quite ok
+ \ifx\m_syst_string_one\empty
\expandafter\gobbleoneargument
\else
\expandafter\firstofoneargument
\fi}
-\def\doifemptytoks#1%
- {\edef\!!stringa{\the#1}% one level expansion so quite ok
- \ifx\!!stringa\empty
+\unexpanded\def\doifemptytoks#1%
+ {\edef\m_syst_string_one{\the#1}% one level expansion so quite ok
+ \ifx\m_syst_string_one\empty
\expandafter\firstofoneargument
\else
\expandafter\gobbleoneargument
@@ -6212,9 +6109,9 @@
\def\syst_helpers_strict_inspect_next_character% no user macro !
{\ifx\nexttoken\charactertoken
- \expandafter\!!stringa
+ \expandafter\m_syst_action_yes
\else
- \expandafter\!!stringb
+ \expandafter\m_syst_action_nop
\fi}
% better: push/pop
@@ -6267,10 +6164,14 @@
%D
%D Concatenate commalists:
-\def\serializecommalist[#1]%
+\let\syst_helpers_serialize_comma_list_step\relax
+
+\def\syst_helpers_serialize_comma_list_step#1%
+ {\edef\serializedcommalist{\serializedcommalist#1}}
+
+\unexpanded\def\serializecommalist[#1]%
{\let\serializedcommalist\empty
- \def\docommand##1{\edef\serializedcommalist{\serializedcommalist##1}}%
- \processcommacommand[#1]\docommand}
+ \processcommacommand[#1]\syst_helpers_serialize_comma_list_step}
%D \macros
%D {purenumber}
@@ -6346,7 +6247,7 @@
\def\filterfromvalue#1#2#3% value max n
{\expandafter\doubleexpandafter\csname % we use the fact that an
\expandafter\ifx\csname#1\endcsname\relax % undefined cs has become \relax
- \strippedcsname\gobbleoneargument % which we then gobble here
+ \strippedcsname\gobbleoneargument % which we then gobble here
\else
\dofilterfromstr{#2}{#3}%
\fi
@@ -6364,13 +6265,27 @@
%D ... \measure{mywidth} ...
%D \stoptyping
-\def\??dm{@@dm} % brrr
+\installsystemnamespace{measure}
\unexpanded\def\definemeasure
- {\dodoubleargument\dodefinemeasure}
+ {\dodoubleargument\syst_helpers_define_measure}
+
+\def\syst_helpers_define_measure[#1][#2]%
+ {\expandafter\def\csname\??measure#1\endcsname{#2}}
-\def\dodefinemeasure[#1][#2]%
- {\expandafter\def\csname\??dm#1\endcsname{#2}}
+\unexpanded\def\freezemeasure
+ {\dodoubleargument\syst_helpers_freeze_measure}
+
+\def\syst_helpers_freede_measure[#1][#2]%
+ {\expandafter\edef\csname\??measure#1\endcsname{\the\dimexpr#2}}
+
+\unexpanded\def\setmeasure #1#2{\expandafter\def \csname\??measure#1\endcsname{#2}} % quick way
+\unexpanded\def\setgmeasure#1#2{\expandafter\gdef\csname\??measure#1\endcsname{#2}} % quick way
+\unexpanded\def\setemeasure#1#2{\expandafter\edef\csname\??measure#1\endcsname{\the\dimexpr#2}} % quick way
+\unexpanded\def\setxmeasure#1#2{\expandafter\xdef\csname\??measure#1\endcsname{\the\dimexpr#2}} % quick way
+
+\def\measure#1% maybe \dimexpr ... \relax
+ {\ifcsname\??measure#1\endcsname\csname\??measure#1\endcsname\else\zeropoint\fi}
% #2 could be omitted, but we want to support spaces
%
@@ -6378,13 +6293,21 @@
% \setmeasure {xx} {1cm}
% \setmeasure {xxx}{1cm}
-\unexpanded\def\setmeasure #1#2{\expandafter\def \csname\??dm#1\endcsname{#2}} % quick way
-\unexpanded\def\setemeasure#1#2{\expandafter\edef\csname\??dm#1\endcsname{#2}} % quick way
-\unexpanded\def\setgmeasure#1#2{\expandafter\gdef\csname\??dm#1\endcsname{#2}} % quick way
-\unexpanded\def\setxmeasure#1#2{\expandafter\xdef\csname\??dm#1\endcsname{#2}} % quick way
+%D \macros
+%D {dividedsize}
+%D
+%D This one can be used inside a measure (used in m4all):
+%D
+%D \starttyping
+%D \definemeasure[columnwidth][\dividedsize\textwidth{1em}{3}]
+%D \stoptyping
-\def\measure#1% maybe \dimexpr ... \relax
- {\ifcsname\??dm#1\endcsname\csname\??dm#1\endcsname\else\zeropoint\fi}
+\def\dividedsize#1#2#3% size gap n
+ {\dimexpr
+ \ifnum\dimexpr#1\relax>\plusone
+ (\dimexpr#1\relax-\numexpr#3-\plusone\relax\dimexpr#2\relax)/#3\else#1%
+ \fi
+ \relax}
%D \macros
%D {doifdimensionelse}
@@ -6392,9 +6315,9 @@
%D This is a dirty one: we simply append a unit and discard it when needed.
\def\doifdimensionelse#1%
- {\afterassignment\dodoifdimensionelse\scratchdimen#1pt\relax}
+ {\afterassignment\syst_helpers_if_dimension_else\scratchdimen#1pt\relax}
-\def\dodoifdimensionelse#1%
+\def\syst_helpers_if_dimension_else#1%
{\ifx#1\relax
\expandafter\secondoftwoarguments
\else % #1=p ... t\relax
@@ -6418,53 +6341,57 @@
%D \NC 1 \NC \doifdimenstringelse {1}{yes}{no} \NC \NR
%D \stoptabulate
+\installsystemnamespace{dimenchecka}
+\installsystemnamespace{dimencheckb}
+\installsystemnamespace{dimencheckc}
+
\def\doifdimenstringelse#1{\normalexpanded{\noexpand\dodimenteststageone#1}\empty\empty]}
-\def\dodimenteststageone #1#2{\csname d!1!\ifcsname d!1!#2\endcsname#2\else x\fi\endcsname#2}
-\def\dodimenteststagetwo #1#2{\csname d!2!\ifcsname d!2!#2\endcsname#2\else x\fi\endcsname#2}
-\def\dodimenteststagethree #1]{\csname d!3!\ifcsname d!3!#1\endcsname#1\else x\fi\endcsname}
-
-\expandafter\let\csname d!1!x\endcsname\dodimenteststagethree
-\expandafter\let\csname d!2!x\endcsname\dodimenteststagethree
-\expandafter\let\csname d!3!x\endcsname\secondoftwoarguments
-
-\expandafter\let\csname d!1!.\endcsname\dodimenteststagetwo
-\expandafter\let\csname d!1!,\endcsname\dodimenteststagetwo
-\expandafter\let\csname d!1!1\endcsname\dodimenteststageone
-\expandafter\let\csname d!1!2\endcsname\dodimenteststageone
-\expandafter\let\csname d!1!3\endcsname\dodimenteststageone
-\expandafter\let\csname d!1!4\endcsname\dodimenteststageone
-\expandafter\let\csname d!1!5\endcsname\dodimenteststageone
-\expandafter\let\csname d!1!6\endcsname\dodimenteststageone
-\expandafter\let\csname d!1!7\endcsname\dodimenteststageone
-\expandafter\let\csname d!1!8\endcsname\dodimenteststageone
-\expandafter\let\csname d!1!9\endcsname\dodimenteststageone
-\expandafter\let\csname d!1!0\endcsname\dodimenteststageone
-
-\expandafter\let\csname d!2!1\endcsname\dodimenteststagetwo
-\expandafter\let\csname d!2!2\endcsname\dodimenteststagetwo
-\expandafter\let\csname d!2!3\endcsname\dodimenteststagetwo
-\expandafter\let\csname d!2!4\endcsname\dodimenteststagetwo
-\expandafter\let\csname d!2!5\endcsname\dodimenteststagetwo
-\expandafter\let\csname d!2!6\endcsname\dodimenteststagetwo
-\expandafter\let\csname d!2!7\endcsname\dodimenteststagetwo
-\expandafter\let\csname d!2!8\endcsname\dodimenteststagetwo
-\expandafter\let\csname d!2!9\endcsname\dodimenteststagetwo
-\expandafter\let\csname d!2!0\endcsname\dodimenteststagetwo
-
-\expandafter\let\csname d!3!pt\endcsname\firstoftwoarguments
-\expandafter\let\csname d!3!pc\endcsname\firstoftwoarguments
-\expandafter\let\csname d!3!in\endcsname\firstoftwoarguments
-\expandafter\let\csname d!3!bp\endcsname\firstoftwoarguments
-\expandafter\let\csname d!3!cm\endcsname\firstoftwoarguments
-\expandafter\let\csname d!3!mm\endcsname\firstoftwoarguments
-\expandafter\let\csname d!3!dd\endcsname\firstoftwoarguments
-\expandafter\let\csname d!3!cc\endcsname\firstoftwoarguments
-\expandafter\let\csname d!3!sp\endcsname\firstoftwoarguments
-\expandafter\let\csname d!3!ex\endcsname\firstoftwoarguments
-\expandafter\let\csname d!3!em\endcsname\firstoftwoarguments
-\expandafter\let\csname d!3!nd\endcsname\firstoftwoarguments
-\expandafter\let\csname d!3!nc\endcsname\firstoftwoarguments
+\def\dodimenteststageone #1#2{\csname \??dimenchecka\ifcsname \??dimenchecka#2\endcsname#2\else x\fi\endcsname#2}
+\def\dodimenteststagetwo #1#2{\csname \??dimencheckb\ifcsname \??dimencheckb#2\endcsname#2\else x\fi\endcsname#2}
+\def\dodimenteststagethree #1]{\csname \??dimencheckc\ifcsname \??dimencheckc#1\endcsname#1\else x\fi\endcsname}
+
+\expandafter\let\csname \??dimenchecka x\endcsname\dodimenteststagethree
+\expandafter\let\csname \??dimencheckb x\endcsname\dodimenteststagethree
+\expandafter\let\csname \??dimencheckc x\endcsname\secondoftwoarguments
+
+\expandafter\let\csname \??dimenchecka.\endcsname\dodimenteststagetwo
+\expandafter\let\csname \??dimenchecka,\endcsname\dodimenteststagetwo
+\expandafter\let\csname \??dimenchecka1\endcsname\dodimenteststageone
+\expandafter\let\csname \??dimenchecka2\endcsname\dodimenteststageone
+\expandafter\let\csname \??dimenchecka3\endcsname\dodimenteststageone
+\expandafter\let\csname \??dimenchecka4\endcsname\dodimenteststageone
+\expandafter\let\csname \??dimenchecka5\endcsname\dodimenteststageone
+\expandafter\let\csname \??dimenchecka6\endcsname\dodimenteststageone
+\expandafter\let\csname \??dimenchecka7\endcsname\dodimenteststageone
+\expandafter\let\csname \??dimenchecka8\endcsname\dodimenteststageone
+\expandafter\let\csname \??dimenchecka9\endcsname\dodimenteststageone
+\expandafter\let\csname \??dimenchecka0\endcsname\dodimenteststageone
+
+\expandafter\let\csname \??dimencheckb1\endcsname\dodimenteststagetwo
+\expandafter\let\csname \??dimencheckb2\endcsname\dodimenteststagetwo
+\expandafter\let\csname \??dimencheckb3\endcsname\dodimenteststagetwo
+\expandafter\let\csname \??dimencheckb4\endcsname\dodimenteststagetwo
+\expandafter\let\csname \??dimencheckb5\endcsname\dodimenteststagetwo
+\expandafter\let\csname \??dimencheckb6\endcsname\dodimenteststagetwo
+\expandafter\let\csname \??dimencheckb7\endcsname\dodimenteststagetwo
+\expandafter\let\csname \??dimencheckb8\endcsname\dodimenteststagetwo
+\expandafter\let\csname \??dimencheckb9\endcsname\dodimenteststagetwo
+\expandafter\let\csname \??dimencheckb0\endcsname\dodimenteststagetwo
+
+\expandafter\let\csname \??dimencheckc pt\endcsname\firstoftwoarguments
+\expandafter\let\csname \??dimencheckc pc\endcsname\firstoftwoarguments
+\expandafter\let\csname \??dimencheckc in\endcsname\firstoftwoarguments
+\expandafter\let\csname \??dimencheckc bp\endcsname\firstoftwoarguments
+\expandafter\let\csname \??dimencheckc cm\endcsname\firstoftwoarguments
+\expandafter\let\csname \??dimencheckc mm\endcsname\firstoftwoarguments
+\expandafter\let\csname \??dimencheckc dd\endcsname\firstoftwoarguments
+\expandafter\let\csname \??dimencheckc cc\endcsname\firstoftwoarguments
+\expandafter\let\csname \??dimencheckc sp\endcsname\firstoftwoarguments
+\expandafter\let\csname \??dimencheckc ex\endcsname\firstoftwoarguments
+\expandafter\let\csname \??dimencheckc em\endcsname\firstoftwoarguments
+\expandafter\let\csname \??dimencheckc nd\endcsname\firstoftwoarguments
+\expandafter\let\csname \??dimencheckc nc\endcsname\firstoftwoarguments
%D \macros
%D {comparedimension,comparedimensioneps}
@@ -6502,51 +6429,51 @@
% \copycsname xxx\endcsname\csname ..\endcsname
-\def\copycsname{\expandafter\expandafter\expandafter\let\expandafter\expandafter\csname}
+\unexpanded\def\copycsname{\expandafter\expandafter\expandafter\let\expandafter\expandafter\csname}
% \letcscsname \crap \csname ..\endcsname
% \letcsnamecs \csname ..\endcsname\crap
% \letcsnamecsname\csname ..\endcsname\csname ..\endcsname
-\def\letcscsname {\expandafter\let\expandafter}
-\def\letcsnamecs {\expandafter\let}
-\def\letcsnamecsname{\expandafter\expandafter\expandafter\let\expandafter\expandafter}
+\unexpanded\def\letcscsname {\expandafter\let\expandafter}
+\unexpanded\def\letcsnamecs {\expandafter\let}
+\unexpanded\def\letcsnamecsname{\expandafter\expandafter\expandafter\let\expandafter\expandafter}
% another one, add an item to a commalist
-\def\addvalue#1#2% cs item
+\unexpanded\def\addvalue#1#2% cs item
{\ifcsname#1\endcsname\else\expandafter\let\csname#1\endcsname\empty\fi
\normalexpanded{\noexpand\addtocommalist{#2}\expandafter\noexpand\csname#1\endcsname}}
\def\unspaced#1%
- {\dounspaced#1\end}
+ {\syst_helpers_unspaced#1\end}
-\def\dounspaced#1%
+\def\syst_helpers_unspaced#1%
{\ifx#1\end
\expandafter\gobbleoneargument
\else
\ifx#1\blankspace\else#1\fi
\fi
- \dounspaced}
+ \syst_helpers_unspaced}
-\def\unspaceargument#1\to#2%
+\unexpanded\def\unspaceargument#1\to#2%
{\scratchcounter\catcode\spaceasciicode
\catcode\spaceasciicode\ignorecatcode
\scantextokens{\edef#2{#1}}%
\catcode\spaceasciicode\scratchcounter}
-\def\unspaceafter#1#2%
+\unexpanded\def\unspaceafter#1#2%
{\unspaceargument#2\to\ascii
\expandafter#1\expandafter{\ascii}}
% sometimes handy:
-\def\doifhasspaceelse#1%
- {\edef\!!stringa{#1}%
- \normalexpanded{\noexpand\dodoifhasspaceelse#1\space}\empty\relax}
+\unexpanded\def\doifhasspaceelse#1%
+ {\edef\m_syst_string_one{#1}%
+ \normalexpanded{\syst_helpers_if_has_space_else#1\space}\empty\relax}
-\def\dodoifhasspaceelse#1 #2#3\relax % \space\empty\relax
- {\ifx\!!stringa\space
+\unexpanded\def\syst_helpers_if_has_space_else#1 #2#3\relax % \space\empty\relax
+ {\ifx\m_syst_string_one\space
\expandafter\firstoftwoarguments
\else\ifx#2\empty
\doubleexpandafter\secondoftwoarguments
@@ -6556,67 +6483,35 @@
% this will replace loadfile once and alike !!! todo
-\def\@flg@{@flg@}
-
-\def\setflag #1{\expandafter\dodoglobal\expandafter\let\csname\@flg@#1\endcsname\zerocount}
-\def\resetflag#1{\expandafter\dodoglobal\expandafter\let\csname\@flg@#1\endcsname\plusone}
+\installsystemnamespace{flag}
-\let\ifflagged\ifcase
+\unexpanded\def\setflag #1{\expandafter\dodoglobal\expandafter\let\csname\??flag#1\endcsname\zerocount}
+\unexpanded\def\resetflag#1{\expandafter\dodoglobal\expandafter\let\csname\??flag#1\endcsname\plusone}
-\def\flag#1{\csname\@flg@#1\endcsname}
+\def\flag#1{\csname\??flag#1\endcsname}
\def\doifelseflagged#1%
- {\expandafter\ifx\csname\@flg@#1\endcsname\relax
+ {\expandafter\ifx\csname\??flag#1\endcsname\relax
\expandafter\secondoftwoarguments
- \else\ifcase\csname\@flg@#1\endcsname
+ \else\ifcase\csname\??flag#1\endcsname
\doubleexpandafter\firstoftwoarguments
\else
\doubleexpandafter\secondoftwoarguments
\fi\fi}
\def\doifnotflagged#1%
- {\expandafter\ifx\csname\@flg@#1\endcsname\relax
+ {\expandafter\ifx\csname\??flag#1\endcsname\relax
\expandafter\firstofoneargument
- \else\ifcase\csname\@flg@#1\endcsname
+ \else\ifcase\csname\??flag#1\endcsname
\doubleexpandafter\gobbleoneargument
\else
\doubleexpandafter\firstofoneargument
\fi\fi}
-\def\inheritparameter[#1]#2[#3]#4[#5]% tag tokey fromkey
+\unexpanded\def\inheritparameter[#1]#2[#3]#4[#5]% tag tokey fromkey
{\expandafter\def\csname#1#3\expandafter\endcsname\expandafter{\csname#1#5\endcsname}}
-% \buildarray[test][aa,bb,cc,dd,ee,ff]
-% \setarrayelement{test}{1}{qq}
-% \arrayelement{test}{1}
-% \arraylength{test}
-%
-% \def\buildarray[#1][#2]%
-% {\scratchcounter=0
-% \def\docommand##1%
-% {\advance\scratchcounter by 1
-% \setvalue{@@aa#1\the\scratchcounter}{##1}}%
-% \processcommalist[#2]\docommand
-% \setevalue{@@aa#1}{\the\scratchcounter}}%
-%
-% \def\setarrayelement#1#2{\setvalue{@@aa#1#2}}
-% \def\arrayelement #1#2{\getvalue{@@aa#1#2}}
-% \def\arraylength #1{\getvalue{@@aa#1}}
-
-% \newsignal\junksignal
-%
-% \def\setjunksignal%
-% {\ifhmode
-% \hskip\junksignal
-% \let\removejunkspaces\doremovejunkspaces
-% \else
-% \let\removejunkspaces\relax
-% \fi}
-%
-% \def\doremovejunkspaces%
-% {\doloop{\ifdim\lastskip=\junksignal\unskip\else\exitloop\fi}}
-
-\def\dodoifnonzeropositiveelse#1#2\end % #3#4%
+\def\syst_helpers_if_non_zero_positive_else#1#2\end % #3#4%
{\ifx#1\relax
\ifcase\scratchcounter
\endgroup
@@ -6631,45 +6526,42 @@
\fi}
\def\doifnonzeropositiveelse#1%
- {\begingroup\afterassignment\dodoifnonzeropositiveelse\scratchcounter=0#1\relax\empty\end}
+ {\begingroup\afterassignment\syst_helpers_if_non_zero_positive_else\scratchcounter=0#1\relax\empty\end}
% here ?
-\def\dosetrawvalue #1#2#3{\expandafter \def\csname#1#2\endcsname{#3}}
-\def\dosetrawevalue#1#2#3{\expandafter\edef\csname#1#2\endcsname{#3}}
-\def\dosetrawgvalue#1#2#3{\expandafter\gdef\csname#1#2\endcsname{#3}}
-\def\dosetrawxvalue#1#2#3{\expandafter\xdef\csname#1#2\endcsname{#3}}
+\unexpanded\def\dosetrawvalue #1#2#3{\expandafter \def\csname#1#2\endcsname{#3}}
+\unexpanded\def\dosetrawevalue#1#2#3{\expandafter\edef\csname#1#2\endcsname{#3}}
+\unexpanded\def\dosetrawgvalue#1#2#3{\expandafter\gdef\csname#1#2\endcsname{#3}}
+\unexpanded\def\dosetrawxvalue#1#2#3{\expandafter\xdef\csname#1#2\endcsname{#3}}
-\def\getrawparameters {\dogetparameters\dosetrawvalue }
-\def\getraweparameters {\dogetparameters\dosetrawevalue}
-\def\getrawgparameters {\dogetparameters\dosetrawgvalue}
-\def\getrawxparameters {\dogetparameters\dosetrawxvalue}
+\unexpanded\def\getrawparameters {\dogetparameters\dosetrawvalue }
+\unexpanded\def\getraweparameters {\dogetparameters\dosetrawevalue}
+\unexpanded\def\getrawgparameters {\dogetparameters\dosetrawgvalue}
+\unexpanded\def\getrawxparameters {\dogetparameters\dosetrawxvalue}
-\def\globalgetrawparameters{\dogetparameters\dosetrawgvalue} % obsolete
-
-\def\splitskip#1%
- {\scratchskip#1\relax
- \dimen0\scratchskip
- \dimen2\gluestretch\scratchskip
- \dimen4\glueshrink\scratchskip}
+\unexpanded\def\globalgetrawparameters{\dogetparameters\dosetrawgvalue} % obsolete
-\newcount\modcounter
+%D Sort of obsolete:
-\def\dosetmodulo#1#2#3%
- {\modcounter#1\divide\modcounter#2\multiply\modcounter#2%
- #3#1\advance#3-\modcounter}
+\newcount\c_syst_helpers_mod
-\def\dosetdivision#1#2#3%
+\unexpanded\def\dosetmodulo#1#2#3%
+ {\c_syst_helpers_mod#1\divide\c_syst_helpers_mod#2\multiply\c_syst_helpers_mod#2%
+ #3#1\advance#3-\c_syst_helpers_mod}
+
+\unexpanded\def\dosetdivision#1#2#3%
{#3#1\divide#3 #2\relax}
-\def\DoMod#1by#2to#3{\dosetmodulo {#1}{#2}{#3}}
-\def\DoDiv#1by#2to#3{\dosetdivision{#1}{#2}{#3}}
+\unexpanded\def\DoMod#1by#2to#3{\dosetmodulo {#1}{#2}{#3}}
+\unexpanded\def\DoDiv#1by#2to#3{\dosetdivision{#1}{#2}{#3}}
-\def\dounprotected#1\par
+\def\syst_helpers_unprotected#1\par
{#1\protect}
-\def\unprotected
- {\unprotect\dounprotected}
+\unexpanded\def\unprotected
+ {\unprotect
+ \syst_helpers_unprotected}
% awaiting the definitive implementation
@@ -6683,20 +6575,21 @@
\def\elapsedtime {\ctxcommand{elapsedtime()}}
\let\elapsedseconds \elapsedtime
-\newcount\featuretest
+\newcount\c_syst_helpers_test_feature_n
\unexpanded\def\testfeature#1#2%
- {\def\dotestfeature
- {\advance\featuretest \plusone
- \ifnum\featuretest>#1\else#2\expandafter\dotestfeature\fi}%
+ {\def\syst_helpers_test_feature_step
+ {\advance\c_syst_helpers_test_feature_n\plusone
+ \ifnum\c_syst_helpers_test_feature_n>#1\else#2\expandafter\syst_helpers_test_feature_step\fi}%
\retestfeature}
-\def\retestfeature % timer support is new per 10/5/2005
+\unexpanded\def\retestfeature % timer support is new per 10/5/2005
{\bgroup
\ifcase\interactionmode\let\wait\relax\fi
\writestatus\m!system{starting feature test}\wait
\resettimer
- \featuretest\zerocount \dotestfeature
+ \c_syst_helpers_test_feature_n\zerocount
+ \syst_helpers_test_feature_step
\writestatus\m!system{feature test done (\elapsedseconds s)}%
\wait
\egroup}
@@ -6719,7 +6612,7 @@
%D \freezedimenmacro\leftmargindistance
%D \stoptyping
-\def\freezedimenmacro#1%
+\unexpanded\def\freezedimenmacro#1%
{\edef#1{\the\dimexpr#1}}
%D The next macro negates a macro (dimension or number, or actually, whatever.
@@ -6735,191 +6628,19 @@
\def\gobbleassigndimen#1\\{}
\def\assigndimen#1#2%
- {\afterassignment\gobbleassigndimen#1=#2\!!zeropoint\\}
-
-\def\setusage#1%
- {\expandafter\let\csname#1\endcsname\iftrue}
-
-\def\resetusage#1%
- {\expandafter\let\csname#1\endcsname\iffalse}
-
-\def\ifusage#1%
- {\ifcsname#1\endcsname\else
- \resetusage{#1}%
- \fi
- \csname#1\endcsname}
-
-%D Very handy, more efficient than \type{{}}, and more readable
-%D than \type {\empty}.
-
-\let\donothing\empty
+ {\afterassignment\gobbleassigndimen#1=#2\zeropoint\\}
-% The following macros are used in XML handling.
+\unexpanded\def\appended#1#2#3{\expandafter#1\expandafter#2\expandafter{#2#3}}
+\unexpanded\def\appendvalue #1{\expandafter\appended\expandafter \def\csname#1\endcsname}
+\unexpanded\def\appendgvalue#1{\expandafter\appended\expandafter\gdef\csname#1\endcsname}
-\setvalue{@u@s@"}#1#2"{#2} \setvalue{@g@s@"}#1#2"{\scratchtoks{#2}}
-\setvalue{@u@s@'}#1#2'{#2} \setvalue{@g@s@'}#1#2'{\scratchtoks{#2}}
-\setvalue{@u@s@ }#1#2 {#2} \setvalue{@g@s@ }#1#2 {\scratchtoks{#2}}
-
-\def\unstringed#1{\csname\ifcsname @u@s@#1\endcsname @u@s@#1\else\s!empty\fi\endcsname#1}
-\def\grabstring#1{\csname\ifcsname @g@s@#1\endcsname @g@s@#1\else\s!empty\fi\endcsname#1}
-
-\def\dowithgrabbedstring#1%
- {\def\@@dowithgrabbedstring{#1}%
- \afterassignment\@@dowithgrabbedstring\grabstring}
-
-\def\expifequalelse#1#2%
- {\@@ifequal#1\relax\relax\@@and#2\relax\relax\@@then}
-
-\def\@@ifequal#1#2\@@and#3%
- {\ifx#1\relax
- \ifx#3\relax
- \doubleexpandafter\@@if@@equal@@true
- \else
- \doubleexpandafter\@@if@@equal@@false
- \fi
- \else
- \ifx#3\relax
- \tripleexpandafter\@@if@@equal@@false
- \else\ifx#1#3%
- % go on
- \else
- \tripleexpandafter\@@if@@equal@@false
- \fi\fi
- \fi
- \@@ifequal#2\@@and}
-
-\def\@@if@@equal@@true #1\@@then#2#3{#2}
-\def\@@if@@equal@@false#1\@@then#2#3{#3}
-
-\def\appended#1#2#3{\expandafter#1\expandafter#2\expandafter{#2#3}}
-\def\appendvalue #1{\expandafter\appended\expandafter \def\csname#1\endcsname}
-\def\appendgvalue#1{\expandafter\appended\expandafter\gdef\csname#1\endcsname}
-
-\def\prepended#1#2#3%
- {\scratchtoks{#3}%
+\unexpanded\def\prepended#1#2#3%
+ {\t_syst_helpers_scratch{#3}%
\expandafter\expandafter\expandafter#1\expandafter\expandafter\expandafter#2\expandafter\expandafter\expandafter
- {\expandafter\the\expandafter\scratchtoks#2}}
+ {\expandafter\the\expandafter\t_syst_helpers_scratch#2}}
-\def\prependvalue #1{\expandafter\prepended\expandafter \def\csname#1\endcsname}
-\def\prependgvalue#1{\expandafter\prepended\expandafter\gdef\csname#1\endcsname}
-
-%D \macros
-%D {compresscommacommandnrs,compresscommalistnrs,compressedcommalistnrs,
-%D compresscommacommand,compresscommalist,compressedcommalist,
-%D reversecommacommand,reversecommalist,reversedcommalist}
-%D
-%D The following two list processing macros are needed by Taco's
-%D bibliography module. The numbers compressor converts the
-%D list in a list of ranges. The normal compressor remove duplicate
-%D and empty entries.
-%D
-%D This is now obsolete (and more a \LUA\ thing anyway).
-
-\def\compresscommalistnrs[#1]%
- {\let\compressedlist\empty
- \!!counta\maxdimen
- \!!countb\maxdimen
- \processcommalist[#1]\docompresslistnrs
- \ifnum\!!counta=\maxdimen\else\dodocompresslistnrs\fi}
-
-\def\compresscommacommandnrs[#1]%
- {\normalexpanded{\noexpand\compresscommalistnrs[#1]}}
-
-\def\docompresslistnrs#1%
- {\edef\commalistelement{#1}%
- \ifx\commalistelement\empty\else
- \ifnum\!!counta=\maxdimen
- \!!counta\commalistelement\relax
- \!!countb\!!counta
- \else
- \advance\!!countb\plusone
- \ifnum\commalistelement>\!!countb
- \advance\!!countb\minusone
- \dodocompresslistnrs
- \!!counta\commalistelement\relax
- \!!countb\!!counta
- \fi
- \fi
- \fi}
-
-\def\dodocompresslistnrs
- {\edef\compressedlist
- {\ifx\compressedlist\empty\else\compressedlist,\fi
- {\the\!!counta}{\ifnum\!!countb>\!!counta\the\!!countb\fi}}}
-
-%D \def\test#1{{\tttf#1->\compresscommalistnrs[#1]\defconvertedcommand\ascii\compressedlist\ascii}}
-%D \startlines
-%D \test{}
-%D \test{1}
-%D \test{1,3}
-%D \test{1,3,4}
-%D \test{1,3,3,4,5}
-%D \test{1,3,3,4,5,8}
-%D \test{1,3,3,4,5,5,8,10}
-%D \test{1,3,4,5,8,10,11}
-%D \test{1,,3,,4,,5,,8,,10,,11,}
-%D \stoplines
-
-\def\compresscommalist[#1]%
- {\let\compressedlist\empty
- \let\!!stringa\empty
- \processcommalist[#1]\docompresslist}
-
-\def\compresscommacommand[#1]%
- {\normalexpanded{\noexpand\compresscommalist[#1]}}
-
-\def\docompresslist#1%
- {\edef\commalistelement{#1}%
- \ifx\commalistelement\empty \else
- \ifx\!!stringa\commalistelement \else
- \ifx\compressedlist\empty
- \def\compressedlist{#1}%
- \else
- \appended\def\compressedlist{,#1}%
- \fi
- \let\!!stringa\commalistelement
- \fi
- \fi}
-
-%D \def\test#1{{\tttf#1->\compresscommalist[#1]\defconvertedcommand\ascii\compressedlist\ascii}}
-%D \startlines
-%D \test{}
-%D \test{1}
-%D \test{1,3}
-%D \test{1,3,4}
-%D \test{1,3,3,4,5}
-%D \test{1,3,3,4,5,8}
-%D \test{1,3,3,4,5,5,8,10}
-%D \test{1,3,4,5,8,10,11}
-%D \test{1,,3,,4,,5,,8,,10,,11,}
-%D \stoplines
-
-\def\reversecommalist[#1]%
- {\let\reversedlist\empty
- \processcommalist[#1]\doreverselist}
-
-\def\doreverselist#1%
- {\ifx\reversedlist\empty
- \def\reversedlist{#1}%
- \else
- \prepended\def\reversedlist{#1,}%
- \fi}
-
-\def\reversecommacommand[#1]%
- {\normalexpanded{\noexpand\reversecommalist[#1]}}
-
-%D \def\test#1{{\tttf#1->\reversecommalist[#1]\defconvertedcommand\ascii\reversedlist\ascii}}
-%D \startlines
-%D \test{}
-%D \test{1}
-%D \test{1,3}
-%D \test{1,3,4}
-%D \test{1,3,3,4,5}
-%D \test{1,3,3,4,5,8}
-%D \test{1,3,3,4,5,5,8,10}
-%D \test{1,3,4,5,8,10,11}
-%D \test{1,,3,,4,,5,,8,,10,,11,}
-%D \stoplines
+\unexpanded\def\prependvalue #1{\expandafter\prepended\expandafter \def\csname#1\endcsname}
+\unexpanded\def\prependgvalue#1{\expandafter\prepended\expandafter\gdef\csname#1\endcsname}
%D \macros
%D {dowithrange}
@@ -6927,44 +6648,11 @@
%D This one is for Mojca Miklavec, who made me aware of the fact that
%D \type {page-imp.tex} was not the best place to hide it.
-\def\dowithrange#1#2% #2 takes number
+\unexpanded\def\dowithrange#1#2% #2 takes number
{\splitstring#1\at:\to\fromrange\and\torange
\ifx\torange\empty\let\torange\fromrange\fi
\dostepwiserecurse\fromrange\torange1{#2{\recurselevel}}}
-
-%D \macros {uncompresslist}
-%D
-%D When given a list like \type{1,4-7,9} as argument, this macro
-%D will store the expanded commalist in \type{\uncompressedlist}.
-%D
-%D \startbuffer
-%D \def\MojcaHasToDoTheTasks[#1]#2%
-%D {{\uncompresslist[#1]%
-%D \def\processitem##1{I have to do ##1 #2\par}%
-%D \processcommacommand[\uncompressedlist]\processitem}}
-%D
-%D \MojcaHasToDoTheTasks [1-4,7,9-11] {until tomorrow}
-%D \stopbuffer
-%D
-%D Here is an example of how to use \type {\uncompresslist}:
-%D \typebuffer
-%D
-%D The output of this is:
-%D
-%D \getbuffer
-\def\uncompresslist[#1]% by TH
- {\let\uncompressedlist\empty
- \def\docompressedlistitem##1-##2-%
- {\expandafter\dorecurse\expandafter
- {\the\numexpr1+##2-##1\relax}%
- {\expandafter\appendtocommalist\expandafter{\the\numexpr##1-1+####1\relax}\uncompressedlist}}%
- \def\douncompresslist##1%
- {\doifinstringelse{-}{##1}
- {\docompressedlistitem##1-}
- {\appendtocommalist{##1}\uncompressedlist}}%
- \processcommalist[#1]\douncompresslist}
-
%D \macros
%D {ignoreimplicitspaces}
%D
@@ -6977,7 +6665,7 @@
%D
%D \typebuffer \getbuffer
-\def\ignoreimplicitspaces
+\unexpanded\def\ignoreimplicitspaces
{\doifnextcharelse\relax\relax\relax}
%D \macros
@@ -6985,74 +6673,39 @@
%D
%D Not that sophisticated but sometimes users (like in metafun).
-\def\doprocesswords#1 #2\od
- {\doifsomething{#1}{\processword{#1} \doprocesswords#2 \od}}
-% {\doifsomething{\detokenize{#1}}{\processword{#1} \doprocesswords#2 \od}} % to be tested
+\def\syst_helpers_process_word#1 #2\_e_o_w_
+ {\doifsomething{#1}{\processword{#1} \syst_helpers_process_word#2 \_e_o_w_}}
\def\processwords#1%
- {\doprocesswords#1 \od}% no \unskip
+ {\syst_helpers_process_word#1 \_e_o_w_}% no \unskip
\let\processword\relax
-% new
-%
-% \startnointerference
-% all kind of code
-% \stopnointerference
+%D \macros
+%D {startnointerference}
+%D
+%D \starttyping
+%D \startnointerference
+%D all kind of code
+%D \stopnointerference
+%D \stoptyping
-\newbox\nointerferencebox
+\newbox\b_syst_helpers_no_interference
\unexpanded\def\startnointerference % not even grouped !
- {\setbox\nointerferencebox\vbox
+ {\setbox\b_syst_helpers_no_interference\vbox
\bgroup}
\unexpanded\def\stopnointerference
{\egroup
- \setbox\nointerferencebox\emptybox}
-
-% \def\appendtovaluelist#1#2%
-% {\ifcsname#1\endcsname
-% \expandafter\ifx\csname#1\endcsname\empty
-% \expandafter\def\csname#1\endcsname{#2}%
-% \else
-% \expandafter\def\csname#1\expandafter\expandafter\expandafter\endcsname
-% \expandafter\expandafter\expandafter{\csname#1\endcsname,#2}%
-% \fi
-% \else
-% \expandafter\def\csname#1\endcsname{#2}%
-% \fi}
-%
-% or
-%
-% \def\appendtovaluelist#1%
-% {\ifcsname#1\endcsname
-% \expandafter\ifx\csname#1\endcsname\empty
-% \expandafter\noappendtovaluelist\csname#1\expandafter\expandafter\expandafter\endcsname
-% \else
-% \expandafter\doappendtovaluelist\csname#1\expandafter\expandafter\expandafter\endcsname
-% \fi
-% \else
-% \expandafter\noappendtovaluelist\csname#1\expandafter\endcsname
-% \fi}
-
-% \def\doappendtovaluelist#1#2{\expandafter\def\expandafter#1\expandafter{#1,#2}}
-% \def\noappendtovaluelist#1#2{\def#1{#2}}
-
-% \appendtovaluelist{mylist}{aap}
-% \appendtovaluelist{mylist}{noot}
-% \appendtovaluelist{mylist}{mies}
-
-% \showvalue{mylist}
+ \setbox\b_syst_helpers_no_interference\emptybox}
%D A variant for \type {\executeifdefined}:
-% \def\expandcheckedcsname#1#2#3%
-% {\csname#1\ifcsname#1#2\endcsname#2\else#3\fi\endcsname}
-
\def\expandcheckedcsname#1#2% #2 is often a \xxxparameter so let's expand it once
- {\normalexpanded{\noexpand\doexpandcheckedcsname{#1}{#2}}}
+ {\normalexpanded{\noexpand\syst_helpers_expand_checked_csname{#1}{#2}}}
-\def\doexpandcheckedcsname#1#2#3%
+\def\syst_helpers_expand_checked_csname#1#2#3%
{\csname#1\ifcsname#1#2\endcsname#2\else#3\fi\endcsname}
%D Signal. Some fonts have a char0 rendering so we need to make sure that it
@@ -7060,7 +6713,41 @@
\unexpanded\def\signalcharacter{\char\zerocount} % \zwj
-%D Here are some nasty helpers:
+%D A few secial variants of commands defined here. Some more will be moved here (e.g.
+%D from table modules.
+
+\def\dodirectdoubleempty#1#2% used in math (lookahead issues)
+ {\ifx#2[%
+ \expandafter\syst_helpers_direct_double_empty_one_yes
+ \else
+ \expandafter\syst_helpers_direct_double_empty_one_nop
+ \fi#1#2}
+
+\def\syst_helpers_direct_double_empty_one_yes#1[#2]#3%
+ {\ifx#3[\else\expandafter\syst_helpers_direct_double_empty_two_nop\fi#1[#2]#3}
+
+\def\syst_helpers_direct_double_empty_one_nop#1{#1[][]}
+\def\syst_helpers_direct_double_empty_two_nop#1[#2]{#1[#2][]}
+
+%D Used in math definitions (in an type {\edef}):
+
+%D \startbuffer
+%D [\docheckedpair{}]
+%D [\docheckedpair{a}]
+%D [\docheckedpair{a,b}]
+%D [\docheckedpair{a,b,c}]
+%D \stopbuffer
+%D
+%D \typebuffer \startlines \getbuffer \stoplines
+
+\def\docheckedpair#1%
+ {\syst_helpers_checked_pair#1,,\_o_e_p_}
+
+\def\syst_helpers_checked_pair#1,#2,#3\_o_e_p_
+ {#1,#2}
+
+%D Here are some nasty helpers. They can be used to fill often expanded token
+%D lists efficiently (see tabulate for an example).
\def\constantnumber#1%
{\ifcase#1\zerocount
@@ -7111,8 +6798,53 @@
{#1}%
\fi}
-%D These can be used when constructing often reused token lists,
-%D as we do with tabulates.
+% %D Maybe some day (moved from cont-new):
+% %D
+% %D \starttyping
+% %D \the\dimexpr(\dimchoice {7pt}{{<10pt}{8pt}{<12pt}{9pt}{<15pt}{10pt}{=11pt}{12pt}})
+% %D \the\dimexpr(\dimchoice{11pt}{{<10pt}{8pt}{<12pt}{9pt}{<15pt}{10pt}{=11pt}{12pt}})
+% %D \the\dimexpr(\dimchoice{14pt}{{<10pt}{8pt}{<12pt}{9pt}{<15pt}{10pt}{=11pt}{12pt}})
+% %D \stoptyping
+%
+% \def\syst_helpers_choice_finish#1\empty{}
+%
+% \def\syst_helpers_choice_dim#1#2#3%
+% {\ifdim#1#2%
+% #3\expandafter\syst_helpers_choice_finish
+% \else
+% \expandafter\syst_helpers_choice_dim
+% \fi{#1}}
+%
+% \def\syst_helpers_choice_num#1#2#3%
+% {\ifnum#1#2%
+% #3\expandafter\syst_helpers_choice_finish
+% \else
+% \expandafter\syst_helpers_choice_num
+% \fi{#1}}
+%
+% \def\dimchoice#1#2{\syst_helpers_choice_dim{#1}#2{=#1}{#1}\empty}
+% \def\numchoice#1#2{\syst_helpers_choice_num{#1}#2{=#1}{#1}\empty}
+
+%D \macros
+%D {getsubstring}
+%D \startbuffer
+%D
+%D \getsubstring{4}{}{Who Wants This}
+%D \getsubstring{4}{9}{Who Wants This}
+%D \getsubstring{9}{-2}{Who Wants This}
+%D \getsubstring{1}{5}{Who Wants This}
+%D \stopbuffer
+%D
+%D \typebuffer
+%D
+%D \startlines
+%D \getbuffer
+%D \stoplines
+
+% expandable:
+
+%def\getsubstring#1#2#3{\cldcontext{utf.sub([[#3]],tonumber("#1"),tonumber("#2"))}}
+\def\getsubstring#1#2#3{\ctxcommand{getsubstring(\!!bs#3\!!es,"#1","#2")}}
\protect \endinput
@@ -7174,3 +6906,37 @@
% nothing
% }
% \stopchoice
+
+% \def\appendtovaluelist#1#2%
+% {\ifcsname#1\endcsname
+% \expandafter\ifx\csname#1\endcsname\empty
+% \expandafter\def\csname#1\endcsname{#2}%
+% \else
+% \expandafter\def\csname#1\expandafter\expandafter\expandafter\endcsname
+% \expandafter\expandafter\expandafter{\csname#1\endcsname,#2}%
+% \fi
+% \else
+% \expandafter\def\csname#1\endcsname{#2}%
+% \fi}
+%
+% or
+%
+% \def\appendtovaluelist#1%
+% {\ifcsname#1\endcsname
+% \expandafter\ifx\csname#1\endcsname\empty
+% \expandafter\noappendtovaluelist\csname#1\expandafter\expandafter\expandafter\endcsname
+% \else
+% \expandafter\doappendtovaluelist\csname#1\expandafter\expandafter\expandafter\endcsname
+% \fi
+% \else
+% \expandafter\noappendtovaluelist\csname#1\expandafter\endcsname
+% \fi}
+%
+% \def\doappendtovaluelist#1#2{\expandafter\def\expandafter#1\expandafter{#1,#2}}
+% \def\noappendtovaluelist#1#2{\def#1{#2}}
+%
+% \appendtovaluelist{mylist}{aap}
+% \appendtovaluelist{mylist}{noot}
+% \appendtovaluelist{mylist}{mies}
+%
+% \showvalue{mylist}
diff --git a/Master/texmf-dist/tex/context/base/syst-con.lua b/Master/texmf-dist/tex/context/base/syst-con.lua
index 2eaf98fd730..48f02da3a5d 100644
--- a/Master/texmf-dist/tex/context/base/syst-con.lua
+++ b/Master/texmf-dist/tex/context/base/syst-con.lua
@@ -14,7 +14,7 @@ the top of <l n='luatex'/>'s char range but outside the unicode range.</p>
--ldx]]--
local tonumber = tonumber
-local utfchar = unicode.utf8.char
+local utfchar = utf.char
local gsub, format = string.gsub, string.format
function converters.hexstringtonumber(n) tonumber(n,16) end
@@ -39,3 +39,24 @@ function commands.format(fmt,...) -- used ?
fmt = gsub(fmt,"@","%%")
context(fmt,...)
end
+
+local cosd, sind, tand = math.cosd, math.sind, math.tand
+local cos, sin, tan = math.cos, math.sin, math.tan
+
+-- unfortunately %s spits out: 6.1230317691119e-017
+--
+-- function commands.sind(n) context(sind(n)) end
+-- function commands.cosd(n) context(cosd(n)) end
+-- function commands.tand(n) context(tand(n)) end
+--
+-- function commands.sin (n) context(sin (n)) end
+-- function commands.cos (n) context(cos (n)) end
+-- function commands.tan (n) context(tan (n)) end
+
+function commands.sind(n) context("%0.6f",sind(n)) end
+function commands.cosd(n) context("%0.6f",cosd(n)) end
+function commands.tand(n) context("%0.6f",tand(n)) end
+
+function commands.sin (n) context("%0.6f",sin (n)) end
+function commands.cos (n) context("%0.6f",cos (n)) end
+function commands.tan (n) context("%0.6f",tan (n)) end
diff --git a/Master/texmf-dist/tex/context/base/syst-con.mkiv b/Master/texmf-dist/tex/context/base/syst-con.mkiv
index 6ef734c8fcf..de8ed597e1a 100644
--- a/Master/texmf-dist/tex/context/base/syst-con.mkiv
+++ b/Master/texmf-dist/tex/context/base/syst-con.mkiv
@@ -134,9 +134,13 @@
% \let\calculatecos\gobbleoneargument
% \let\calculatetan\gobbleoneargument
-\def\setcalculatedsin#1#2{\edef#1{\cldcontext{math.sind(#2)}}}
-\def\setcalculatedcos#1#2{\edef#1{\cldcontext{math.cosd(#2)}}}
-\def\setcalculatedtan#1#2{\edef#1{\cldcontext{math.tand(#2)}}}
+% \def\setcalculatedsin#1#2{\edef#1{\cldcontext{math.sind(#2)}}} % jit-unsafe
+% \def\setcalculatedcos#1#2{\edef#1{\cldcontext{math.cosd(#2)}}} % jit-unsafe
+% \def\setcalculatedtan#1#2{\edef#1{\cldcontext{math.tand(#2)}}} % jit-unsafe
+
+\def\setcalculatedsin#1#2{\edef#1{\ctxcommand{sind(#2)}}}
+\def\setcalculatedcos#1#2{\edef#1{\ctxcommand{cosd(#2)}}}
+\def\setcalculatedtan#1#2{\edef#1{\ctxcommand{tand(#2)}}}
\def\formatted#1{\ctxcommand{format(#1)}}
\unexpanded\def\format #1{\ctxcommand{format(#1)}}
diff --git a/Master/texmf-dist/tex/context/base/syst-fnt.mkiv b/Master/texmf-dist/tex/context/base/syst-fnt.mkiv
index f5c9ea34979..625b952b7e4 100644
--- a/Master/texmf-dist/tex/context/base/syst-fnt.mkiv
+++ b/Master/texmf-dist/tex/context/base/syst-fnt.mkiv
@@ -38,7 +38,7 @@
\def\mathsubcombined {\fontdimen17 } % to be remapped
\def\mathaxisheight {\fontdimen22 } % to be remapped
-\def\currentspaceskip {\interwordspace\!!plus\interwordstretch\!!minus\interwordshrink\relax}
+\def\currentspaceskip {\interwordspace\s!plus\interwordstretch\s!minus\interwordshrink\relax}
\def\mathstacktotal {\dimexpr\Umathstacknumup\scriptstyle+\Umathstackdenomdown\scriptstyle\relax}
\def\mathstackvgap {\Umathstackvgap\scriptstyle}
diff --git a/Master/texmf-dist/tex/context/base/syst-gen.mkii b/Master/texmf-dist/tex/context/base/syst-gen.mkii
index bb0f54d11fa..8b71b77d5fc 100644
--- a/Master/texmf-dist/tex/context/base/syst-gen.mkii
+++ b/Master/texmf-dist/tex/context/base/syst-gen.mkii
@@ -657,6 +657,11 @@
\def\resetvalue #1{\expandafter\let\csname#1\endcsname\empty}
\def\ignorevalue#1#2{\expandafter\let\csname#1\endcsname\empty}
+\def\setuvalue #1{\normalprotected\expandafter \def\csname#1\endcsname}
+\def\setuevalue #1{\normalprotected\expandafter\edef\csname#1\endcsname}
+\def\setugvalue #1{\normalprotected\expandafter\gdef\csname#1\endcsname}
+\def\setuxvalue #1{\normalprotected\expandafter\xdef\csname#1\endcsname}
+
%D \macros
%D {globallet,glet}
%D
diff --git a/Master/texmf-dist/tex/context/base/syst-ini.mkiv b/Master/texmf-dist/tex/context/base/syst-ini.mkiv
index 983bedc52a7..59313ba494c 100644
--- a/Master/texmf-dist/tex/context/base/syst-ini.mkiv
+++ b/Master/texmf-dist/tex/context/base/syst-ini.mkiv
@@ -11,16 +11,14 @@
%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
%C details.
-%D We used to load plain \TEX\ in a special way, but redefining
-%D a couple of primitives so that for instance font loading was
-%D ignored. For those interested, this loader is found in
-%D \type {syst-tex.tex}. Some of the comment's are Don Knuth's
-%D and more of it can be found in the plain \TEX\ format.
-
-%D Characters can have special states, that can be triggered
-%D by setting their category coded. Some are preset, others
-%D are to be set as soon as possible, otherwise we cannot
-%D define any useful macros.
+%D We used to load plain \TEX\ in a special way, but redefining a couple of
+%D primitives so that for instance font loading was ignored. For those interested,
+%D this loader is found in \type {syst-tex.tex}. Some of the comment's are Don
+%D Knuths and more of it can be found in the plain \TEX\ format.
+%D
+%D Characters can have special states, that can be triggered by setting their
+%D category coded. Some are preset, others are to be set as soon as possible,
+%D otherwise we cannot define any useful macros.
%catcode`\^^@ = 9 % ascii null is ignored
%catcode`\\ = 0 % backslash is TeX escape character
@@ -58,8 +56,7 @@
\catcode`\^^Z=9
-%D It makes sense to know what engine we're running so let's
-%D try to deduce it.
+%D It makes sense to know what engine we're running so let's try to deduce it.
\chardef\unknownengine = 0
\chardef\pdftexengine = 1
@@ -117,16 +114,15 @@
}
\fi
-%D \ETEX\ has a not so handy way of telling you the version number,
-%D i.e. the revision number has a period in it:
+%D \ETEX\ has a not so handy way of telling you the version number, i.e. the revision
+%D number has a period in it:
\long\def\gobbleoneargument#1{} % will be defined later on anyway
\mathchardef\etexversion = \numexpr\eTeXversion*100+\expandafter\gobbleoneargument\eTeXrevision\relax
-%D First we define a simplified version of the \CONTEXT\
-%D protection mechanism. Later we will implement a better
-%D variant.
+%D First we define a simplified version of the \CONTEXT\ protection mechanism.
+%D Later we will implement a better variant.
\def\unprotect
{\edef\protect
@@ -149,18 +145,21 @@
\let\bgroup={
\let\egroup=}
-%D Allocation of registers is done slightly different than in plain
-%D \TEX. First of all we use different reserved counters. We also
-%D don't implement a family handler because users are not supposed
-%D to implement their own math. We reserve the lowest 31 registers
-%D for scratch purposes. Keep in mind that in the core engine
-%D some registers are reserved: counters 0 upto 9, and counter 255.
+%D \macros
+%D {normalbgroup,normalgroup}
%D
-%D As with plain \TEX\ we recommend that macro designers always use
-%D \type {\global} assignments with respect to registers numbered 1,
-%D 3, 5 \unknown\ 31, and always non||\type {\global} assignments
-%D with respect to registers 0, 2, 4, \unknown\ 30. This will prevent
-%D \quote {save stack buildup} that might otherwise occur.
+%D No comment.
+
+%D Allocation of registers is done slightly different than in plain \TEX. First of
+%D all we use different reserved counters. We also don't implement a family handler
+%D because users are not supposed to implement their own math. We reserve the lowest
+%D 31 registers for scratch purposes. Keep in mind that in the core engine some
+%D registers are reserved: counters 0 upto 9, and counter 255.
+%D
+%D As with plain \TEX\ we recommend that macro designers always use \type {\global}
+%D assignments with respect to registers numbered 1, 3, 5 \unknown\ 31, and always
+%D non||\type {\global} assignments with respect to registers 0, 2, 4, \unknown\ 30.
+%D This will prevent \quote {save stack buildup} that might otherwise occur.
%D
%D We reserve some registers for special (management) purposes:
@@ -212,11 +211,10 @@
\dimendef \dimen@i = 1 % global only
\dimendef \dimen@ii = 2
-%D So, effectively we start allocating from 256 and upwards. The
-%D inserts sit in the range 128 upto 254. Page numbers use the
-%D counters 0 upto 9 and the pagebox is 255. Users can use the
-%D scratch registers upto 31 without problem but all others are
-%D reserved.
+%D So, effectively we start allocating from 256 and upwards. The inserts sit in the
+%D range 128 upto 254. Page numbers use the counters 0 upto 9 and the pagebox is
+%D 255. Users can use the scratch registers upto 31 without problem but all others
+%D are reserved.
\let\wlog\gobbleoneargument % Let's get rid of this one.
@@ -254,12 +252,11 @@
%newlinechar=10 \def\outputnewlinechar{\rawcharacter{10}}
\newlinechar=10 \edef\outputnewlinechar{^^J}
-%D One reason to start high with allocation is that it permits us to
-%D allocate consecutive ranges more easily, for instance if for \MPLIB\
-%D we want to allocate a continuous range of boxes. It also permits us
-%D to do a proper upward allocation for inserts. The current code
-%D evolved from code that dealt with older engines but as all engines
-%D now provide many registers we removed all traces.
+%D One reason to start high with allocation is that it permits us to allocate
+%D consecutive ranges more easily, for instance if for \MPLIB\ we want to allocate a
+%D continuous range of boxes. It also permits us to do a proper upward allocation
+%D for inserts. The current code evolved from code that dealt with older engines but
+%D as all engines now provide many registers we removed all traces.
\ifdefined\writestatus \else
\normalprotected\def\writestatus#1#2{\immediate\write16{#1: #2}}
@@ -286,29 +283,27 @@
\fi\fi
#1#2#3#4#5}
-%D Since the number of chars exceed 256 now, we can use \type
-%D {\chardef} instead of the more limited \type {\mathchardef}.
+%D Since the number of chars exceed 256 now, we can use \type {\chardef} instead of
+%D the more limited \type {\mathchardef}.
\ifnum\texengine>\pdftexengine
\normalprotected\def\newbox {\syst_basics_allocate\c_syst_last_allocated_box \box \chardef\c_syst_max_allocated_register}
\normalprotected\def\newmarks{\syst_basics_allocate\c_syst_last_allocated_marks\marks\chardef\c_syst_max_allocated_register}
\fi
-%D Attributes are something very \LUATEX. In \CONTEXT\ you are not
-%D supposed to use the attributes directly but always allocate then
-%D first. For instance attribute~0 is reserved for special purposes
-%D (this might change). Attributes in the range 128-1023 are private
-%D and should not be touched.
+%D Attributes are something very \LUATEX. In \CONTEXT\ you are not supposed to use
+%D the attributes directly but always allocate then first. For instance attribute~0
+%D is reserved for special purposes (this might change). Attributes in the range
+%D 128-1023 are private and should not be touched.
\ifnum\texengine=\luatexengine
\let\attributeunsetvalue\c_syst_min_counter_value % used to be \minusone
\normalprotected\def\newattribute{\syst_basics_allocate\c_syst_min_allocated_attribute\attribute\attributedef\c_syst_max_allocated_register}
\fi
-%D Not used by \CONTEXT\ but for instance \PICTEX\ needs it. It's a
-%D trick to force strings instead of tokens that take more memory.
-%D It's a trick to trick to force strings. This macro is never used
-%D in \CONTEXT.
+%D Not used by \CONTEXT\ but for instance \PICTEX\ needs it. It's a trick to force
+%D strings instead of tokens that take more memory. It's a trick to trick to force
+%D strings. This macro is never used in \CONTEXT.
\normalprotected\def\newhelp#1#2{\newtoks#1#1\expandafter{\csname#2\endcsname}}
@@ -318,8 +313,8 @@
%D scratchbox,
%D scratchtoks}
%D
-%D We now define a few scratch registers, so that successive
-%D loads at least have some available.
+%D We now define a few scratch registers, so that successive loads at least have
+%D some available.
\newcount \scratchcounter \newcount \globalscratchcounter
\newdimen \scratchdimen \newdimen \globalscratchdimen
@@ -343,21 +338,51 @@
\newdimen\scratchwidth
\newdimen\scratchheight
\newdimen\scratchdepth
+
\newdimen\scratchoffset
+\newdimen\scratchleftoffset
+\newdimen\scratchrightoffset
+\newdimen\scratchtopoffset
+\newdimen\scratchbottomoffset
+
\newdimen\scratchdistance
+
\newdimen\scratchhsize
\newdimen\scratchvsize
+\newdimen\scratchxoffset
+\newdimen\scratchyoffset
+\newdimen\scratchhoffset
+\newdimen\scratchvoffset
+
+\newdimen\scratchxposition
+\newdimen\scratchyposition
+
+\newcount\scratchnx
+\newcount\scratchny
+
+\newcount\scratchmx
+\newcount\scratchmy
+
+\newcount\scratchunicode
+
+\newdimen\scratchleftskip
+\newdimen\scratchrightskip
+\newdimen\scratchtopskip
+\newdimen\scratchbottomskip
+
%D More allocations:
-\newskip \zeroskip \zeroskip = 0pt plus 0pt minus 0pt
-\newdimen\zeropoint \zeropoint = 0pt
-\newdimen\onepoint \onepoint = 1pt
-\newdimen\halfapoint \halfapoint = 0.5pt
-\newdimen\maxdimen \maxdimen = 16383.99999pt
-\newdimen\onebasepoint \onebasepoint = 1bp
-\newdimen\scaledpoint \scaledpoint = 1sp
-\newdimen\thousandpoint \thousandpoint = 1000pt
+\newskip \zeroskip \zeroskip = 0pt plus 0pt minus 0pt
+\newdimen \zeropoint \zeropoint = 0pt
+\newdimen \onepoint \onepoint = 1pt
+\newdimen \halfapoint \halfapoint = 0.5pt
+\newdimen \maxdimen \maxdimen = 16383.99999pt
+\newdimen \onebasepoint \onebasepoint = 1bp
+\newdimen \scaledpoint \scaledpoint = 1sp
+\newdimen \thousandpoint \thousandpoint = 1000pt
+\newmuskip\zeromuskip \zeromuskip = 0mu
+\newmuskip\onemuskip \onemuskip = 1mu
\let\points \onepoint
\let\halfpoint\halfapoint
@@ -410,8 +435,8 @@
\def\doubleexpandafter{\expandafter\expandafter\expandafter}
\def\tripleexpandafter{\expandafter\doubleexpandafter\expandafter}
-%D We prefer the more readable variant than in plain
-%D \TEX. User should only use \type {\emptybox}:
+%D We prefer the more readable variant than in plain \TEX. User should only
+%D use \type {\emptybox}:
\newbox\voidbox % public
@@ -425,8 +450,19 @@
\let\leavevmode\unvoidbox % we prefer to use \dontleavehmode
-%D Some expected plain variants follow. We don't reuse registers
-%D because we don't want clashes.
+%D \macros
+%D {dontcomplain}
+%D
+%D We need this one soon:
+
+\normalprotected\def\dontcomplain
+ {\hbadness\plustenthousand
+ \vbadness\plustenthousand
+ \hfuzz \maxdimen
+ \vfuzz \maxdimen}
+
+%D Some expected plain variants follow. We don't reuse registers because we
+%D don't want clashes.
\newdimen\p@ \p@ \onepoint
\newcount\m@ne \m@ne \minusone
@@ -440,8 +476,8 @@
\newbox \voidb@x
\newtoks \toks@
-%D We define \type {\newif} a la plain \TEX, but will
-%D redefine it later. As Knuth says:
+%D We define \type {\newif} a la plain \TEX, but will redefine it later. As
+%D Knuth says:
%D
%D \startnarrower
%D And here's a different sort of allocation: for example,
@@ -450,8 +486,7 @@
%D \newif\iffoo
%D \stoptyping
%D
-%D creates \type {\footrue}, \type {\foofalse} to go
-%D with \type {\iffoo}.
+%D creates \type {\footrue}, \type {\foofalse} to go with \type {\iffoo}.
%D \stopnarrower
\normalprotected\def\newif#1%
@@ -479,8 +514,16 @@
\ifdefined\htdp \else \def\htdp#1{\dimexpr\ht#1+\dp#1\relax} \fi
-%D The catcode constants will be redefined in later catcode
-%D related modules but they can be used in the same way.
+%D A few shortcuts:
+
+\normalprotected\def\glet {\global \let }
+\normalprotected\def\udef {\normalprotected\def }
+\normalprotected\def\ugdef{\normalprotected\gdef}
+\normalprotected\def\uedef{\normalprotected\edef}
+\normalprotected\def\uxdef{\normalprotected\xdef}
+
+%D The catcode constants will be redefined in later catcode related modules
+%D but they can be used in the same way.
\chardef\escapecatcode = 0
\chardef\begingroupcatcode = 1
@@ -499,6 +542,11 @@
\chardef\commentcatcode = 14
\chardef\invalidcatcode = 15
+%D For a while we keep the following, as systems like tikz need it. Best
+%D not use that one \CONTEXT.
+
+\let\active\activecatcode
+
%D Constants to be used with \type {\currentgrouptype}.
\chardef\bottomlevelgroupcode = 0
@@ -527,13 +575,12 @@
\chardef\scrollmodecode = 2
\chardef\errorstopmodecode = 3
-%D Constants to be used with \type {\lastnodetype}. The \type
-%D {\lastnodetype} primitive is \ETEX\ compliant. The valid range is
-%D still -1 .. 15 and glyph nodes have number 0 (used to be char
-%D node) and ligature nodes are mapped to 7. That way macro packages
-%D can use the same symbolic names as in traditional \ETEX. Keep in
-%D mind that the internal node numbers are different and that there
-%D are more node types that 15.
+%D Constants to be used with \type {\lastnodetype}. The \type {\lastnodetype}
+%D primitive is \ETEX\ compliant. The valid range is still -1 .. 15 and glyph nodes
+%D have number 0 (used to be char node) and ligature nodes are mapped to 7. That way
+%D macro packages can use the same symbolic names as in traditional \ETEX. Keep in
+%D mind that the internal node numbers are different and that there are more node
+%D types that 15.
\chardef\charnodecode = 0
\chardef\hlistnodecode = 1
@@ -552,8 +599,8 @@
\chardef\unsetnodecode = 14
\chardef\mathsnodecode = 15
-%D Constants to be used with \type {\currentiftype}. I wonder if
-%D we will ever use these in \CONTEXT.
+%D Constants to be used with \type {\currentiftype}. I wonder if we will ever
+%D use these in \CONTEXT.
\chardef\charifcode = 1
\chardef\catifcode = 2
@@ -576,11 +623,11 @@
\chardef\csnameifcode = 19
\chardef\fontcharifcode = 20
-%D Of course we want even bigger log files, so we copied this
-%D from the \ETEX\ source files.
+%D Of course we want even bigger log files, so we copied this from the \ETEX\
+%D source files.
%D
-%D When watching such logs, beware of nasty side effects of
-%D \type {\scantokens}, as in:
+%D When watching such logs, beware of nasty side effects of \type {\scantokens},
+%D as in:
%D
%D \starttyping
%D \bgroup
@@ -641,14 +688,13 @@
\ifdefined\normalinput \else \let\normalinput\input \fi
-%D We don't like outer commands, and we always want access
-%D to the original \type {\input} primitive.
+%D We don't like outer commands, and we always want access to the original
+%D \type {\input} primitive.
\let\normalouter\outer \def\outer{} % no longer \relax
-%D To circumvent dependencies, we can postpone certain
-%D initializations to dumping time, by appending them to the
-%D \type {\everydump} token register.
+%D To circumvent dependencies, we can postpone certain initializations to
+%D dumping time, by appending them to the \type {\everydump} token register.
\ifdefined\normaldump \else \let\normaldump\dump \fi
@@ -669,11 +715,10 @@
%D settrue, setfalse,
%D ifconditional,then}
%D
-%D \TEX's lacks boolean variables, although the \PLAIN\ format
-%D implements \type{\newif}. The main disadvantage of this
-%D scheme is that it takes three hash table entries. A more
-%D memory saving alternative is presented here. A conditional
-%D is defined by:
+%D \TEX's lacks boolean variables, although the \PLAIN\ format implements \type
+%D {\newif}. The main disadvantage of this scheme is that it takes three hash table
+%D entries. A more memory saving alternative is presented here. A conditional is
+%D defined by:
%D
%D \starttyping
%D \newconditional\doublesided
@@ -699,9 +744,8 @@
%D \def\setfalse#1{\let#1=\iffalse}
%D \stoptyping
%D
-%D Such an implementation gives problems with nested
-%D conditionals. The next implementation is about as fast
-%D and just as straightforward:
+%D Such an implementation gives problems with nested conditionals. The next
+%D implementation is about as fast and just as straightforward:
\let\conditionalfalse\plusone % maybe we will have a dedicated count/chardef
\let\conditionaltrue \zerocount % maybe we will have a dedicated count/chardef
@@ -717,8 +761,8 @@
%D \macros
%D {newmacro,setnewmacro,newfraction}
%D
-%D Let's be complete and also introduce some definers. These are
-%D not mandate but handy for grepping.
+%D Let's be complete and also introduce some definers. These are not mandate
+%D but handy for grepping.
\normalprotected\def\newmacro #1{\let#1\empty}
\normalprotected\def\setnewmacro#1{\let#1}
@@ -727,17 +771,15 @@
\normalprotected\def\newfraction#1{\let#1\!!plusone}
-%D It would be handy to have a primitive \unless\ifcase because
-%D then we could use nicer values. Anyhow, this conditional code
-%D used to be in the \type {syst-aux} module but is now promoted
-%D to here.
+%D It would be handy to have a primitive \unless\ifcase because then we could
+%D use nicer values. Anyhow, this conditional code used to be in the \type
+%D {syst-aux} module but is now promoted to here.
%D \macros
%D {ifzeropt}
%D
-%D The next macro is both cosmetic and byte saving. It is
-%D pretty \type{\if}||safe too. It can be used in cases
-%D like:
+%D The next macro is both cosmetic and byte saving. It is pretty \type
+%D {\if}||safe too. It can be used in cases like:
%D
%D \starttyping
%D \ifzeropt \somedimen ... \else ... \fi
@@ -785,7 +827,23 @@
\def\begcsname#1\endcsname{\ifcsname#1\endcsname\csname#1\endcsname\fi}
-%D Now come a few macros that might be needed in successive loading:
+%D Now come a few macros that might be needed in successive loading. We redefine the
+%D \type {\par} primitive pretty soon so that we get the equivalents right.
+
+% too tricky: \par is use more often than a par starts so we have too much change
+% that we get assymetrical behaviour
+%
+% \newtoks\everyendpar
+%
+% \normalprotected\def\endpar{\the\everyendpar\normalpar}
+% \normalprotected\def\par {\endpar}
+%
+% \normalprotected\def\reseteverypar
+% {\everypar \emptytoks
+% \everyendpar\emptytoks}
+
+\normalprotected\def\reseteverypar
+ {\everypar\emptytoks}
\let\endgraf\par
\let\endline\cr
@@ -794,9 +852,8 @@
\def\empty{}
\def\null {\hbox{}}
-%D The following two might be overloaded later on but some modules need
-%D then earlier. These functionality is reflected in the name and will not
-%D change.
+%D The following two might be overloaded later on but some modules need then
+%D earlier. These functionality is reflected in the name and will not change.
\bgroup
\catcode`\^^M=\activecatcode%
@@ -813,16 +870,15 @@
\let\endoflinetoken=^^M
-%D Also needed might be a simple loop structure and we borrow
-%D plain \TEX's one as it is often expected to be present and
-%D it is about the fastest you can get. Beware: this macro
-%D does not support nested loops. We use a namespace prefix
-%D \type {@@pln}.
+%D Also needed might be a simple loop structure and we borrow plain \TEX's one
+%D as it is often expected to be present and it is about the fastest you can
+%D get. Beware: this macro does not support nested loops. We use a namespace
+%D prefix \type {@@pln}.
\long\def\loop#1\repeat{\long\def\@@plnbody{#1}\@@plniterate} % might go
-%D The following makes \type {\loop} \unknown\ \type {\if}
-%D \unknown\ \type {\repeat} skippable (clever trick):
+%D The following makes \type {\loop} \unknown\ \type {\if} \unknown\ \type
+%D {\repeat} skippable (clever trick):
\let\repeat\fi % so both \loop and \repeat are reserved words!
@@ -842,23 +898,22 @@
\def\@@plniterate{\@@plnbody\expandafter\@@plniterate\else\fi}
-%D We don't define a real output routine yet but at least get rid
-%D of pages:
+%D We don't define a real output routine yet but at least get rid of pages:
\output{\shipout\box\normalpagebox}
-%D Although we don't add pagenumbers yet we alias the default
-%D register used for counting pages:
+%D Although we don't add pagenumbers yet we alias the default register used
+%D for counting pages:
\countdef\pageno=0 \pageno=1 % first page is number 1
-%D Beside the raw counter \type {\pageno} the \type {\folio}
-%D macro provides the value.
+%D Beside the raw counter \type {\pageno} the \type {\folio} macro provides
+%D the value.
\def\folio{\the\pageno} % kind of expected and therefore reserved
-%D The following registers are kind of standard and (for the moment)
-%D we define them here. This might change.
+%D The following registers are kind of standard and (for the moment) we define
+%D them here. This might change.
\newskip \bigskipamount \bigskipamount = 12pt plus 4pt minus 4pt
\newskip \medskipamount \medskipamount = 6pt plus 2pt minus 2pt
@@ -893,9 +948,8 @@
\spanomit \advance\mscount\minusone
\repeat}
-%D The next section deals with selective definitions in
-%D later modules. One can of course use the \type {\texengine}
-%D number that we defined earlier instead.
+%D The next section deals with selective definitions in later modules. One can of
+%D course use the \type {\texengine} number that we defined earlier instead.
\bgroup \obeylines
\gdef\pickupSOMETEX#1%
@@ -927,19 +981,19 @@
\fi
%D \macros
-%D {bindprimitive}
+%D {bindprimitive}
%D
-%D We can remap primitives (which is needed because of changes in
-%D for instance \PDFTEX).
+%D We can remap primitives (which is needed because of changes in for instance
+%D \PDFTEX).
\def\bindprimitive#1 #2 % new old
{\ifcsname#1\endcsname \else \ifcsname#2\endcsname
\expandafter\let\csname#1\expandafter\endcsname\csname#2\endcsname
\fi \fi}
-%D Because \XETEX\ also implements some \PDFTEX\ functionality, we take
-%D care of this here instead of a dedicated module. Later modules need
-%D to handle the undefined cases.
+%D Because \XETEX\ also implements some \PDFTEX\ functionality, we take care of this
+%D here instead of a dedicated module. Later modules need to handle the undefined
+%D cases.
%D These messy checks will disappear.
@@ -997,8 +1051,8 @@
\newif\ifproductionrun
-%D We need to make sure that we start up in \DVI\ mode, so,
-%D after testing for running \PDFTEX, we default to \DVI.
+%D We need to make sure that we start up in \DVI\ mode, so, after testing for running
+%D \PDFTEX, we default to \DVI.
\ifx\pdftexversion\undefined \newcount\pdfoutput \fi \pdfoutput=0
@@ -1044,15 +1098,14 @@
\let\synctex\undefined \newcount\synctex
-%D We get rid of the funny \TEX\ offset defaults of one
-%D inch by setting them to zero.
+%D We get rid of the funny \TEX\ offset defaults of one inch by setting them to zero.
-\voffset \zeropoint \newdimen\voffset % prevent messing up
-\hoffset \zeropoint \newdimen\hoffset % prevent messing up
+\voffset\zeropoint \let\voffset\relax \newdimen\voffset % prevent messing up
+\hoffset\zeropoint \let\hoffset\relax \newdimen\hoffset % prevent messing up
-%D While cleaning this code up a bit I was listening to Heather
-%D Nova's \CD\ Redbird. The first song on that \CD\ ends with
-%D a few lines suitable for ending this initialization module:
+%D While cleaning this code up a bit I was listening to Heather Nova's \CD\ Redbird.
+%D The first song on that \CD\ ends with a few lines suitable for ending this
+%D initialization module:
%D
%D \startlines
%D And there's so much I can do for you
@@ -1065,7 +1118,6 @@
%D Come inside
%D \stoplines
%D
-%D So let's see what \TEX\ can do now that we've opened up
-%D the basic machinery.
+%D So let's see what \TEX\ can do now that we've opened up the basic machinery.
\protect \endinput
diff --git a/Master/texmf-dist/tex/context/base/syst-lua.lua b/Master/texmf-dist/tex/context/base/syst-lua.lua
index f735b1962e6..ef524c339fa 100644
--- a/Master/texmf-dist/tex/context/base/syst-lua.lua
+++ b/Master/texmf-dist/tex/context/base/syst-lua.lua
@@ -16,14 +16,17 @@ commands = commands or { }
function commands.writestatus(...) logs.status(...) end -- overloaded later
--- todo: use shorter names i.e. less tokenization, like prtcatcodes + f_o_t_a
-
local firstoftwoarguments = context.firstoftwoarguments -- context.constructcsonly("firstoftwoarguments" )
local secondoftwoarguments = context.secondoftwoarguments -- context.constructcsonly("secondoftwoarguments")
local firstofoneargument = context.firstofoneargument -- context.constructcsonly("firstofoneargument" )
local gobbleoneargument = context.gobbleoneargument -- context.constructcsonly("gobbleoneargument" )
-local function testcase(b)
+-- contextsprint(prtcatcodes,[[\ui_fo]]) -- firstofonearguments
+-- contextsprint(prtcatcodes,[[\ui_go]]) -- gobbleonearguments
+-- contextsprint(prtcatcodes,[[\ui_ft]]) -- firstoftwoarguments
+-- contextsprint(prtcatcodes,[[\ui_st]]) -- secondoftwoarguments
+
+function commands.doifelse(b)
if b then
firstoftwoarguments()
else
@@ -47,33 +50,45 @@ function commands.doifnot(b)
end
end
-commands.testcase = testcase
-commands.doifelse = testcase
+commands.testcase = commands.doifelse -- obsolete
function commands.boolcase(b)
context(b and 1 or 0)
end
function commands.doifelsespaces(str)
- return testcase(find(str,"^ +$"))
+ if find(str,"^ +$") then
+ firstoftwoarguments()
+ else
+ secondoftwoarguments()
+ end
end
local s = lpegtsplitat(",")
local h = { }
-function commands.doifcommonelse(a,b)
+function commands.doifcommonelse(a,b) -- often the same test
local ha = h[a]
local hb = h[b]
- if not ha then ha = lpegmatch(s,a) h[a] = ha end
- if not hb then hb = lpegmatch(s,b) h[b] = hb end
- for i=1,#ha do
- for j=1,#hb do
+ if not ha then
+ ha = lpegmatch(s,a)
+ h[a] = ha
+ end
+ if not hb then
+ hb = lpegmatch(s,b)
+ h[b] = hb
+ end
+ local na = #ha
+ local nb = #hb
+ for i=1,na do
+ for j=1,nb do
if ha[i] == hb[j] then
- return testcase(true)
+ firstoftwoarguments()
+ return
end
end
end
- return testcase(false)
+ secondoftwoarguments()
end
function commands.doifinsetelse(a,b)
@@ -81,16 +96,21 @@ function commands.doifinsetelse(a,b)
if not hb then hb = lpegmatch(s,b) h[b] = hb end
for i=1,#hb do
if a == hb[i] then
- return testcase(true)
+ firstoftwoarguments()
+ return
end
end
- return testcase(false)
+ secondoftwoarguments()
end
local pattern = lpeg.patterns.validdimen
function commands.doifdimenstringelse(str)
- testcase(lpegmatch(pattern,str))
+ if lpegmatch(pattern,str) then
+ firstoftwoarguments()
+ else
+ secondoftwoarguments()
+ end
end
function commands.firstinset(str)
diff --git a/Master/texmf-dist/tex/context/base/syst-lua.mkiv b/Master/texmf-dist/tex/context/base/syst-lua.mkiv
index 0d72e4a019b..88a8c246e26 100644
--- a/Master/texmf-dist/tex/context/base/syst-lua.mkiv
+++ b/Master/texmf-dist/tex/context/base/syst-lua.mkiv
@@ -19,7 +19,7 @@
\def\expdoif #1#2{\ctxcommand{doif (\!!bs#1\!!es==\!!bs#2\!!es)}}
\def\expdoifnot #1#2{\ctxcommand{doifnot (\!!bs#1\!!es==\!!bs#2\!!es)}}
-% \testfeatureonce{100000}{\doifelse{hello world}{here i am}{}} % 0.3
+% \testfeatureonce{100000}{\doifelse{hello world}{here i am}{}} % 0.3
% \testfeatureonce{100000}{\expandabledoifelse{hello world}{here i am}{}} % 1.5
\def\expdoifcommonelse#1#2{\ctxcommand{doifcommonelse("#1","#2")}}
@@ -34,10 +34,20 @@
\unexpanded\def\writestatus#1#2{\ctxcommand{writestatus(\!!bs#1\!!es,\!!bs#2\!!es)}}
-% a handy helper (we can probably omit the tex.ctxcatcodes here as nowadays we seldom
-% change the regime at the tex end
+% A handy helper:
-%def\luaexpr#1{\ctxlua {context(tostring(#1))}}
-\def\luaexpr#1{\directlua\zerocount{context(tostring(#1))}} % wrap in global function ?
+% \def\luaexpr#1{\ctxlua{context(tostring(#1))}} % more efficient:
+
+% We can omit the tex.ctxcatcodes here as nowadays we seldom
+% change the regime at the \TEX\ end:
+
+\def\luaexpr#1{\directlua{tex.print(tostring(#1))}}
+
+% helpers:
+
+\def\ui_fo #1{#1}
+\def\ui_go #1{}
+\def\ui_ft#1#2{#1}
+\def\ui_st#1#2{#2}
\protect \endinput
diff --git a/Master/texmf-dist/tex/context/base/tabl-ltb.mkiv b/Master/texmf-dist/tex/context/base/tabl-ltb.mkiv
index 542d256f2fc..f7fbc0390df 100644
--- a/Master/texmf-dist/tex/context/base/tabl-ltb.mkiv
+++ b/Master/texmf-dist/tex/context/base/tabl-ltb.mkiv
@@ -63,6 +63,8 @@
\newconstant\linetableheadstate
\newconstant\linetablemode
+\definesystemvariable {le} % LinetablE
+
\edef\??ler{\??le:r:}
\edef\??lec{\??le:c:}
\edef\??lew{\??le:w:}
@@ -721,10 +723,10 @@
\startlinetablerun \readfile{#1}\donothing\donothing\stoplinetablerun
\egroup}
-\protect \endinput
+\protect
+
+\continueifinputfile{tabl-ltb.mkiv}
-\doifnotmode{demo}{\endinput}
-
\setuplinetable[n=6,m={2,2,2},lines=25] % m ?
\setuplinetable[c][1] [width=2cm,background=color,backgroundcolor=red]
diff --git a/Master/texmf-dist/tex/context/base/tabl-mis.mkiv b/Master/texmf-dist/tex/context/base/tabl-mis.mkiv
new file mode 100644
index 00000000000..9a0f13853ac
--- /dev/null
+++ b/Master/texmf-dist/tex/context/base/tabl-mis.mkiv
@@ -0,0 +1,288 @@
+%D \module
+%D [ file=tabl-mis,
+%D version=2012.06.28,
+%D title=\CONTEXT\ Table Macros,
+%D subtitle=Miscellaneous,
+%D author=Hans Hagen,
+%D date=\currentdate,
+%D copyright={PRAGMA ADE \& \CONTEXT\ Development Team}]
+%C
+%C This module is part of the \CONTEXT\ macro||package and is
+%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
+%C details.
+
+\writestatus{loading}{ConTeXt Table Macros / Miscellaneous}
+
+\unprotect
+
+%D \macros
+%D {somekindoftab,kindoftabposition}
+%D
+%D This macro can be used to create tabs:
+%D
+%D \starttyping
+%D \setupheadertexts[{\somekindoftab[alternative=horizontal]{\framed{\kindoftabposition}}}]
+%D \setuptexttexts [{\somekindoftab[alternative=vertical] {\framed{\kindoftabposition}}}]
+%D
+%D \starttext
+%D \showframe \dorecurse{10}{test\page}
+%D \stoptext
+%D \stoptyping
+%D
+%D (This rather old but updated code used to be in \type {core-mis.mkiv}.)
+
+\let\kindoftabposition\!!zerocount
+
+\unexpanded\def\somekindoftab
+ {\dosingleempty\typo_kindoftab}
+
+\def\typo_kindoftab[#1]%
+ {\bgroup
+ \getdummyparameters
+ [\c!alternative=\v!vertical,
+ \c!width=\textwidth,\c!height=\textheight,
+ \c!n=\lastpage,\c!m=\realpageno,
+ #1]%
+ \doifelse{\directdummyparameter\c!alternative}\v!vertical
+ {\typo_kindoftab_indeed\vbox\vskip\c!height}
+ {\typo_kindoftab_indeed\hbox\hskip\c!width }}
+
+\def\typo_kindoftab_indeed#1#2#3#4%
+ {#1 to \directdummyparameter#3 \bgroup
+ \forgetall
+ \scratchnx\directdummyparameter\c!n\relax
+ \scratchmx\directdummyparameter\c!m\relax
+ \edef\kindoftabposition{\the\scratchmx}%
+ \ifnum\scratchmx>\plusone
+ #2\zeropoint \s!plus \the\numexpr\scratchmx-\plusone \relax\s!fill\relax
+ \fi
+ #4% can use \kindoftabposition
+ \ifnum\scratchmx<\scratchnx\relax
+ #2\zeropoint \s!plus \the\numexpr\scratchnx-\scratchmx\relax\s!fill\relax
+ \fi
+ \egroup
+ \egroup}
+
+%D The following paragraphs mechanism is probably one of the oldest of
+%D \CONTEXT\ and mostly served as a table mechanism capable of dealing
+%D with paragraphs. Nowadays one can also use tabulate or natural tables.
+%D
+%D \startbuffer
+%D \defineparagraphs[sample][n=2,rule=on]
+%D
+%D \startsample
+%D first \nextsample
+%D second \nextsample
+%D third
+%D \stopsample
+%D
+%D \startsample
+%D \input tufte \nextsample
+%D \input ward \nextsample
+%D \input davis \nextsample
+%D \input zapf
+%D \stopsample
+%D
+%D \startparagraphs[sample]
+%D first \nextsample
+%D second \nextsample
+%D third
+%D \stopparagraphs
+%D
+%D \startparagraphs[sample]
+%D \startparagraphscell
+%D first
+%D \stopparagraphscell
+%D \startparagraphscell
+%D second
+%D \stopparagraphscell
+%D \startparagraphscell
+%D third
+%D \stopparagraphscell
+%D \stopparagraphs
+%D \stopbuffer
+%D
+%D \typebuffer \getbuffer
+
+\installcorenamespace{paragraphs}
+
+\installcommandhandler \??paragraphs {paragraphs} \??paragraphs
+
+\setupparagraphs
+ [\c!n=3,
+ \c!before=\blank,
+ \c!after=\blank,
+ \c!distance=\emwidth,
+ \c!height=\v!fit,
+ \c!width=\availablehsize,
+ \c!rule=\v!off,
+ \c!command=,
+ \c!align=,
+ \c!tolerance=\v!tolerant, % obsolete
+ \c!rulethickness=\linewidth,
+ \c!rulecolor=,
+ \c!style=,
+ \c!color=,
+ \c!top=\vss,
+ \c!bottom=\vfill]
+
+\let\typo_paragraphs_setup_saved\setupparagraphs
+
+\unexpanded\def\setupparagraphs
+ {\dotripleempty\typo_paragraphs_setup}
+
+\def\typo_paragraphs_setup[#1][#2][#3]% we are downward compatible with [each] and [1,3]
+ {\ifthirdargument
+ \doifelse{#2}\v!each
+ {\typo_paragraphs_setup_saved[#1][#3]}
+ {\def\typo_paragraphs_setup_step##1{\typo_paragraphs_setup_saved[#1:##1][#3]}%
+ \processcommalist[#2]\typo_paragraphs_setup_step}%
+ \else\ifsecondargument
+ \typo_paragraphs_setup_saved[#1][#2]%
+ \else\iffirstargument
+ \typo_paragraphs_setup_saved[#1]%
+ \fi\fi\fi}
+
+\appendtoks
+ \letvalue{\e!next \currentparagraphs}\nextparagraphs
+ \letvalue\currentparagraphs\nextparagraphs
+ \setuevalue{\e!start\currentparagraphs}{\startparagraphs[\currentparagraphs]}%
+ \letvalue{\e!stop \currentparagraphs}\stopparagraphs
+ %setuevalue{\e!setup\currentparagraph\e!endsetup}{\typo_paragraphs_setup_saved[\currentparagraphs]}%
+ \dorecurse{\paragraphsparameter\c!n}
+ {\normalexpanded{\typo_paragraphs_setup_saved[\currentparagraphs:\recurselevel][\c!width=,\s!parent=\??paragraphs\currentparagraphs]}}%
+ \typo_paragraphs_setup_saved[\currentparagraphs:1][\c!distance=\zeropoint]%
+\to \everydefineparagraphs
+
+\newcount\c_typo_paragraphs_n
+\newcount\c_typo_paragraphs_max
+\newdimen\d_typo_paragraphs_width
+\newdimen\d_typo_paragraphs_auto
+
+\unexpanded\def\startparagraphs[#1]% quite slow
+ {\bgroup % (1)
+ \edef\currentparagraphs{#1}%
+ \paragraphsparameter\c!before
+ \edef\p_width{\paragraphsparameter\c!width}%
+ \ifx\p_width\empty
+ \d_typo_paragraphs_width\availablehsize
+ \else
+ \d_typo_paragraphs_width\p_width\relax
+ \fi
+ \c_typo_paragraphs_max\paragraphsparameter\c!n\relax
+ \d_typo_paragraphs_auto\d_typo_paragraphs_width\relax
+ \scratchcounter\zerocount
+ \dorecurse\c_typo_paragraphs_max
+ {\edef\p_width{\namedparagraphsparameter{\currentparagraphs:\recurselevel}\c!width}%
+ \ifx\p_width\empty
+ \advance\scratchcounter\plusone
+ \else
+ \advance\d_typo_paragraphs_auto-\p_width\relax
+ \fi
+ \ifnum\recurselevel>\plusone
+ \advance\d_typo_paragraphs_auto-\namedparagraphsparameter{\currentparagraphs:\recurselevel}\c!distance\relax
+ \fi}%
+ \ifnum\scratchcounter>\zerocount
+ \divide\d_typo_paragraphs_auto\scratchcounter
+ \else
+ \d_typo_paragraphs_auto\zeropoint
+ \fi
+ \parindent\zeropoint
+ \c_typo_paragraphs_n\zerocount
+ \let\\=\typo_paragraphs_next % downward compatible
+ \dontleavehmode\hbox
+ \bgroup % (2)
+ \forgetall
+ \let\typo_paragraphs_start_cell\typo_paragraphs_start_cell_indeed
+ \let\typo_paragraphs_stop_cell \typo_paragraphs_stop_cell_indeed
+ \typo_paragraphs_start_cell_indeed}
+
+\unexpanded\def\stopparagraphs
+ {\stopparagraphscell
+ \egroup % (2)
+ \paragraphsparameter\c!after
+ \egroup} % (1)
+
+\unexpanded\def\nextparagraphs
+ {\stopparagraphscell
+ \startparagraphscell}
+
+\unexpanded\def\startparagraphscell
+ {\typo_paragraphs_start_cell}
+
+\def\typo_paragraphs_start_cell_indeed
+ {\removeunwantedspaces
+ \advance\c_typo_paragraphs_n\plusone
+ \ifnum\c_typo_paragraphs_n>\c_typo_paragraphs_max
+ \expandafter\typo_paragraphs_start_cell_nop
+ \else
+ \expandafter\typo_paragraphs_start_cell_yes
+ \fi}
+
+\def\typo_paragraphs_start_cell_nop
+ {\begingroup
+ % message: too many cells in paragraphs
+ \let\typo_paragraphs_start_cell\relax
+ \let\typo_paragraphs_stop_cell\typo_paragraphs_stop_cell_indeed
+ \setbox\scratchbox\vbox\bgroup}
+
+\def\typo_paragraphs_start_cell_yes
+ {\begingroup
+ \let\typo_paragraphs_start_cell\relax
+ \let\typo_paragraphs_stop_cell\typo_paragraphs_stop_cell_indeed
+ \edef\currentparagraphs{\currentparagraphs:\the\c_typo_paragraphs_n}%
+ \ifnum\c_typo_paragraphs_n>\plusone
+ \typo_paragraphs_separator
+ \fi
+ \edef\p_height{\paragraphsparameter\c!height}%
+ \edef\p_width {\paragraphsparameter\c!width }%
+ \useparagraphsstyleandcolor\c!style\c!color
+ \setbox\scratchbox\vtop \ifx\p_height\empty \else\ifx\p_height\v!fit \else to \p_height \fi\fi
+ \bgroup % (2)
+ \blank[\v!disable]%
+ \paragraphsparameter\c!top
+ \hsize\ifx\p_width\empty \d_typo_paragraphs_auto \else \p_width \fi \relax
+ \usealignparameter\paragraphsparameter
+ \paragraphsparameter\c!inner
+ \everypar{\begstrut\everypar\emptytoks}%
+ \ignorespaces
+ \paragraphsparameter\c!command}
+
+\unexpanded\def\stopparagraphscell
+ {\typo_paragraphs_stop_cell
+ \let\typo_paragraphs_stop_cell\relax}
+
+\def\typo_paragraphs_stop_cell_indeed
+ {\ifnum\c_typo_paragraphs_n>\c_typo_paragraphs_max
+ \expandafter\typo_paragraphs_stop_cell_nop
+ \else
+ \expandafter\typo_paragraphs_stop_cell_yes
+ \fi}
+
+\def\typo_paragraphs_stop_cell_nop
+ {\egroup
+ \endgroup}
+
+\def\typo_paragraphs_stop_cell_yes
+ {\ifvmode
+ \removelastskip
+ \else
+ \removeunwantedspaces
+ \endstrut
+ \endgraf
+ \fi
+ \paragraphsparameter\c!bottom
+ \egroup % (2)
+ \dontleavehmode\hbox{\raise\strutheight\box\scratchbox}%
+ \endgroup}
+
+\def\typo_paragraphs_separator
+ {\scratchdistance\paragraphsparameter\c!distance
+ \doif{\paragraphsparameter\c!rule}\v!on
+ {\scratchwidth\paragraphsparameter\c!rulethickness
+ \scratchdistance\dimexpr(\scratchdistance-\scratchwidth)/2\relax
+ \hskip\scratchdistance
+ \color[\paragraphsparameter\c!rulecolor]{\vrule\s!width\scratchwidth}}%
+ \hskip\scratchdistance}
+
+\protect \endinput
diff --git a/Master/texmf-dist/tex/context/base/tabl-ntb.mkiv b/Master/texmf-dist/tex/context/base/tabl-ntb.mkiv
index 99da4b5e8a3..9927256b1bb 100644
--- a/Master/texmf-dist/tex/context/base/tabl-ntb.mkiv
+++ b/Master/texmf-dist/tex/context/base/tabl-ntb.mkiv
@@ -1,5 +1,5 @@
%D \module
-%D [ file=core-ntb,
+%D [ file=tabl-ntb,
%D version=2000.04.18,
%D title=\CONTEXT\ Table Macros,
%D subtitle=Natural Tables,
@@ -11,16 +11,15 @@
%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
%C details.
-%D This is an unfinished, preliminary module. At least two
-%D runs are needed to get the table fixed. Ugly code.
+%D This module as a more modern variant in xtables but as we follow a bit
+%D different approach with settings there, this mechanism will stay. In fact
+%D each of them has its advantages. This module could be sped up a bit and made
+%D more efficient by delegating some housekeeping to \LUA\ but it's not worth
+%D the effort. The code could me made more readable but again, there is no
+%D real purpose in it. If needed I can squeeze out a few more percentages
+%D runtime.
-% todo: TABLE TBL -> ntb
-% todo: special parsetb for argless variant
-% todo: protect \tbl...
-% todo: tblnx also count
-% todo: get rid of recurse
-% todo: fast if
-% todo: avoid halign (just do it manual) and thereby globals
+\writestatus{loading}{ConTeXt Table Macros / Natural Tables}
% bug: width 3cm is not honored and column becomes too wide
% as given width is added to distributed width
@@ -56,65 +55,14 @@
% \stopcelltable
% \stoptext
-% optie=rek beschrijven
-
-\writestatus{loading}{ConTeXt Table Macros / Natural Tables}
-
-%D As always, this is the nth version. Much time went in
+%D As always, this is the n\high{th} version. Much time went in
%D trying to speed up the many cell calculations, some
%D optimizations were rejected in order not to complicate this
-%D module too much (and in order to prevail extensibility).
+%D module too much (and in order to prevail extensibility). In the
+%D meantime we've sacrified some speed for readability.
-% shapebox fails here in mkii
-%
-% \setupcolors[state=start]
-% \bTABLE
-% \bTR [align=middle]\bTH Range\eTH{}\bTH Value\eTH{}\eTR
-% \bTR \bTD \type{<} 12\eTD{}\bTD 3\eTD{}\eTR
-% \bTR \bTD 12--16\eTD{}\bTD 2\eTD{}\eTR
-% \bTR \bTD \type{>}16\eTD{}\bTD 1\eTD{}\eTR
-% \eTABLE
-
-% \starttext
-% \placefigure[left]{}{}
-% \startlinecorrection \dontleavehmode \bTABLE
-% \bTR \bTD oeps \eTD \eTR
-% \eTABLE \stoplinecorrection
-% \placefigure[right]{}{}
-% \startlinecorrection \dontleavehmode \bTABLE
-% \bTR \bTD oeps \eTD \eTR
-% \eTABLE \stoplinecorrection
-% \stoptext
-
-%D To Do:
-%D
-%D \starttyping
-%D break over pagina
-%D kop herhalen
-%D reset settings
-%D
-%D \setupTABLE [c|column|x] [nx|odd|even|first|last][a=b]
-%D \setupTABLE [r|row |y] [nx|odd|even|first|last][a=b]
-%D \setupTABLE [nx|odd|even|first|last][ny|odd|even|first|last][a=b]
-%D \setupTABLE [nx|odd|even|first|last] [a=b]
-%D \setupTABLE [a=b]
-%D
-%D \bTH \eTH
-%D \stoptyping
-
-% the section setup does not work yet, data needs to be stored,
-% i.e.each row should know if it's a head/body/foot, and there
-% should be \setupTABLE[head]... and alike
-
\unprotect
-%D A simple way to force equal line spacing is to say:
-%D
-%D \starttyping
-%D \def\bTBLCELL{\begstrut}
-%D \def\eTBLCELL{\endstrut}
-%D \stoptyping
-%D
%D The next alternative also takes care of preceding and following
%D white space.
%D
@@ -126,22 +74,25 @@
%D
%D \typebuffer \getbuffer
-\ifdefined\dotagTABLEcell \else \let\dotagTABLEcell \relax \fi
-\ifdefined\dotagTABLEsignal \else \let\dotagTABLEsignal\relax \fi
+\ifdefined\dotagTABLEcell \else \let\dotagTABLEcell \relax \fi % todo: namespace
+\ifdefined\dotagTABLEsignal \else \let\dotagTABLEsignal\relax \fi % todo: namespace
+
+\let\tabl_tnb_next_level\relax
-\def\bTBLCELL % why not \doinhibitblank
+\unexpanded\def\tabl_tnb_cell_start
{\inhibitblank
\dotagTABLEcell
- \dousestyleparameter\tbltblstyle
- \everypar{\tbltblleft\delayedbegstrut}}
+% \tabl_tnb_next_level
+ \usenaturaltablelocalstyleandcolor\c!style\c!color
+ \everypar{\naturaltablelocalparameter\c!left\delayedbegstrut}}
-\def\eTBLCELL
+\unexpanded\def\tabl_tnb_cell_stop
{\ifhmode
\delayedendstrut
- \tbltblright
+ \naturaltablelocalparameter\c!right
\par % added 13/4/2006
\else
- % not sure yet:\tbltblright
+ % not sure yet:\naturaltablelocalparameter\c!right
\par
\ifdim\prevdepth<\zeropoint % =-1000pt ?
\vskip-\strutdp
@@ -150,92 +101,195 @@
\fi
\fi}
-\newcount\currenttbl
+\newcount\c_tabl_ntb_row
+\newcount\c_tabl_ntb_col
+\newcount\c_tabl_ntb_spn
-\def\@@tbl{tbl} \def\tblcell{1} \def\tblnone{2}
+\newcount\c_tabl_ntb_nx
+\newcount\c_tabl_ntb_ny
-\def\@@tblprefix{tbl:} \let\@@rawtblprefix\@@tblprefix
+\setnewconstant\c_tabl_ntb_cell \plusone
+\setnewconstant\c_tabl_ntb_none \plustwo
-%D This should be done more efficient: soon
+\newcount\c_tabl_ntb_current_row
+\newcount\c_tabl_ntb_current_col
+\newcount\c_tabl_ntb_current_row_one
+\newcount\c_tabl_ntb_current_col_one
+\newcount\c_tabl_ntb_current_row_two
+\newcount\c_tabl_ntb_current_col_two
+\newcount\c_tabl_ntb_current_row_three
+\newcount\c_tabl_ntb_current_col_three
+\newcount\c_tabl_ntb_current_row_four
+\newcount\c_tabl_ntb_current_col_four
-% \let as well as \expandafter\edef's
+\newcount\c_tabl_ntb_running_col
+\newcount\c_tabl_ntb_maximum_row
+\newcount\c_tabl_ntb_maximum_col
+\newcount\c_tabl_ntb_maximum_row_span
+\newcount\c_tabl_ntb_maximum_col_span
-\newcounter\TBLlevel
+\newtoks\t_tabl_ntb
+\newtoks\t_tabl_ntb_row
-\def\@@tblprefix{\@@tbl:\ifnum\TBLlevel>1 :\TBLlevel:\fi}
+\newconstant\c_tabl_tbl_pass
-% \def\tblsetprefix % not yet used, figure out when .. may interfere with setup
-% {\edef\@@tblprefix{\@@tbl:\ifnum\TBLlevel>1 :\TBLlevel:\fi}}
+\newtoks\t_tabl_ntb_head
+\newtoks\t_tabl_ntb_next
+\newtoks\t_tabl_ntb_body
+\newtoks\t_tabl_ntb_foot
-\def\settblnob#1{\expandafter\let\csname\@@tblprefix\number#1:b\endcsname\plusone}
-\def\gettblnob#1{\ifcsname\@@tblprefix\number#1:b\endcsname\plusone\else\zerocount\fi}
+\newcount\c_tabl_ntb_n_of_head_lines
+\newcount\c_tabl_ntb_n_of_next_lines
+\newcount\c_tabl_ntb_n_of_hdnx_lines
-\def\settbltag#1#2{\expandafter\edef\csname\@@tblprefix\number#1:\number#2:s\endcsname}
-\def\settblcol#1#2{\expandafter\edef\csname\@@tblprefix\number#1:\number#2:c\endcsname}
-\def\settblrow#1#2{\expandafter\edef\csname\@@tblprefix\number#1:\number#2:r\endcsname}
+\newdimen\d_tabl_ntb_height
+\newdimen\d_tabl_ntb_width
-\def\lettbltag#1#2{\expandafter\let\csname\@@tblprefix\number#1:\number#2:s\endcsname}
-\def\lettblcol#1#2{\expandafter\let\csname\@@tblprefix\number#1:\number#2:c\endcsname}
-\def\lettblrow#1#2{\expandafter\let\csname\@@tblprefix\number#1:\number#2:r\endcsname}
+\newtoks\everyTABLEpass % public
-\def\settblwd#1#2{\expandafter\xdef\csname\@@tblprefix\number#1:\number#2:wd\endcsname} % global !
-\def\settblht#1#2{\expandafter\xdef\csname\@@tblprefix\number#1:\number#2:ht\endcsname} % global !
-\def\lettblwd#1#2{\global\expandafter\let\csname\@@tblprefix\number#1:\number#2:wd\endcsname} % global !
-\def\lettblht#1#2{\global\expandafter\let\csname\@@tblprefix\number#1:\number#2:ht\endcsname} % global !
+\newcount\tablecellrows % public (needs checking)
+\newcount\tablecellcolumns % public (needs checking)
-\def\gettbltag#1#2{\csname\@@tblprefix\number#1:\number#2:s\endcsname}
-\def\gettblcol#1#2{\csname\@@tblprefix\number#1:\number#2:c\endcsname}
-\def\gettblrow#1#2{\csname\@@tblprefix\number#1:\number#2:r\endcsname}
+\newbox\b_tabl_ntb_final
-\def\gettblwd #1#2{\csname\@@tblprefix\number#1:\number#2:wd\endcsname}
-\def\gettblht #1#2{\csname\@@tblprefix\number#1:\number#2:ht\endcsname}
+%D We have already prepared the previous macros for nesting,
+%D so we only have to pop in the right ones:
-\def\settblwid#1{\expandafter\xdef\csname\@@tblprefix\number#1:w\endcsname} % {#2} global !
-\def\settblhei#1{\expandafter\xdef\csname\@@tblprefix\number#1:h\endcsname} % {#2} global !
-\def\settbldis#1{\expandafter\xdef\csname\@@tblprefix\number#1:d\endcsname} % {#2} global !
-\def\settblaut#1{\expandafter\xdef\csname\@@tblprefix\number#1:a\endcsname} % {#2} global !
+\newcount\c_tabl_level
-\def\lettblwid#1{\global\expandafter\let\csname\@@tblprefix\number#1:w\endcsname} % {#2} global !
-\def\lettblhei#1{\global\expandafter\let\csname\@@tblprefix\number#1:h\endcsname} % {#2} global !
-\def\lettbldis#1{\global\expandafter\let\csname\@@tblprefix\number#1:d\endcsname} % {#2} global !
-\def\lettblaut#1{\global\expandafter\let\csname\@@tblprefix\number#1:a\endcsname} % {#2} global !
+\unexpanded\def\tabl_ntb_table_push
+ {\ifnum\m_tabl_tbl_level>\plusone
+ \tabl_ntb_parameters_reset
+ % we need a proper count push/pop
+ \xdef\m_tabl_ntb_saved_row{\the\c_tabl_ntb_row}\globalpushmacro\m_tabl_ntb_saved_row
+ \xdef\m_tabl_ntb_saved_col{\the\c_tabl_ntb_col}\globalpushmacro\m_tabl_ntb_saved_col
+ \else
+ \global\intabletrue
+ \fi}
-\def\gettblwid#1{\ifcsname\@@tblprefix\number#1:w\endcsname\csname\@@tblprefix\number#1:w\endcsname\else\zeropoint\fi}
-\def\gettblhei#1{\ifcsname\@@tblprefix\number#1:h\endcsname\csname\@@tblprefix\number#1:h\endcsname\else\zeropoint\fi}
-\def\gettbldis#1{\ifcsname\@@tblprefix\number#1:d\endcsname\csname\@@tblprefix\number#1:d\endcsname\else\zeropoint\fi}
-\def\gettblaut#1{\csname \@@tblprefix\number#1:a\endcsname}
+\unexpanded\def\tabl_ntb_table_pop
+ {\ifnum\m_tabl_tbl_level>\plusone
+ \globalpopmacro\m_tabl_ntb_saved_row\global\c_tabl_ntb_row\m_tabl_ntb_saved_row
+ \globalpopmacro\m_tabl_ntb_saved_col\global\c_tabl_ntb_col\m_tabl_ntb_saved_col
+ \else
+ \global\intablefalse
+ \fi}
-\def\doiftbltag #1#2{\ifcsname\@@tblprefix\number#1:\number#2:s\endcsname\@EA\firstofoneargument \else\@EA\gobbleoneargument \fi}
-\def\doifnottbltag #1#2{\ifcsname\@@tblprefix\number#1:\number#2:s\endcsname\@EA\gobbleoneargument \else\@EA\firstofoneargument \fi}
-\def\doifelsetbltag#1#2{\ifcsname\@@tblprefix\number#1:\number#2:s\endcsname\@EA\firstoftwoarguments\else\@EA\secondoftwoarguments\fi}
-\def\doiftblrow #1#2{\ifcsname\@@tblprefix\number#1:\number#2:r\endcsname\@EA\firstofoneargument \else\@EA\gobbleoneargument \fi}
-\def\doiftblcol #1#2{\ifcsname\@@tblprefix\number#1:\number#2:c\endcsname\@EA\firstofoneargument \else\@EA\gobbleoneargument \fi}
-\def\doifnottblcol #1#2{\ifcsname\@@tblprefix\number#1:\number#2:c\endcsname\@EA\gobbleoneargument \else\@EA\firstofoneargument \fi}
+\unexpanded\def\tabl_tnb_next_level
+ {\advance\c_tabl_level\plusone
+ \edef\m_tabl_tbl_level{\the\c_tabl_level}}
+
+\unexpanded\def\tabl_tnb_prev_level
+ {\advance\c_tabl_level\minusone
+ \edef\m_tabl_tbl_level{\the\c_tabl_level}}
+
+\tabl_tnb_next_level % go to level 1
+
+\installcorenamespace{naturaltable} % was tbl
+\installcorenamespace{naturaltablelocal} % was tbltbl
+
+\installdirectcommandhandler \??naturaltable {naturaltable} % \??naturaltable
+\installsimpleframedcommandhandler \??naturaltablelocal {naturaltablelocal} \??naturaltablelocal
+
+\installcorenamespace{naturaltablenob}
+\installcorenamespace{naturaltabletag}
+\installcorenamespace{naturaltablecol}
+\installcorenamespace{naturaltablerow}
+\installcorenamespace{naturaltablewd}
+\installcorenamespace{naturaltableht}
+\installcorenamespace{naturaltabledp}
+\installcorenamespace{naturaltablewid}
+\installcorenamespace{naturaltablehei}
+\installcorenamespace{naturaltabledis}
+\installcorenamespace{naturaltableaut}
+\installcorenamespace{naturaltabletxt}
+\installcorenamespace{naturaltablespn}
+\installcorenamespace{naturaltableref}
+\installcorenamespace{naturaltableset}
+\installcorenamespace{naturaltablecell}
+
+\def\tabl_ntb_set_nob#1{\expandafter\let\csname\??naturaltablenob\m_tabl_tbl_level:\number#1\endcsname\plusone}
+\def\tabl_ntb_get_nob#1{\ifcsname\??naturaltablenob\m_tabl_tbl_level:\number#1\endcsname\plusone\else\zerocount\fi}
+
+\def\tabl_ntb_set_tag#1#2{\expandafter\edef\csname\??naturaltabletag\m_tabl_tbl_level:\number#1:\number#2\endcsname}
+\def\tabl_ntb_set_col#1#2{\expandafter\edef\csname\??naturaltablecol\m_tabl_tbl_level:\number#1:\number#2\endcsname}
+\def\tabl_ntb_set_row#1#2{\expandafter\edef\csname\??naturaltablerow\m_tabl_tbl_level:\number#1:\number#2\endcsname}
+
+\def\tabl_ntb_let_tag#1#2{\expandafter\let\csname\??naturaltabletag\m_tabl_tbl_level:\number#1:\number#2\endcsname}
+\def\tabl_ntb_let_col#1#2{\expandafter\let\csname\??naturaltablecol\m_tabl_tbl_level:\number#1:\number#2\endcsname}
+\def\tabl_ntb_let_row#1#2{\expandafter\let\csname\??naturaltablerow\m_tabl_tbl_level:\number#1:\number#2\endcsname}
+
+\def\tabl_ntb_set_wd#1#2{\expandafter\xdef\csname\??naturaltablewd\m_tabl_tbl_level:\number#1:\number#2\endcsname} % global !
+\def\tabl_ntb_set_ht#1#2{\expandafter\xdef\csname\??naturaltableht\m_tabl_tbl_level:\number#1:\number#2\endcsname} % global !
+
+\def\tabl_ntb_let_wd#1#2{\global\expandafter\let\csname\??naturaltablewd\m_tabl_tbl_level:\number#1:\number#2\endcsname} % global !
+\def\tabl_ntb_let_ht#1#2{\global\expandafter\let\csname\??naturaltableht\m_tabl_tbl_level:\number#1:\number#2\endcsname} % global !
+
+\def\tabl_ntb_get_tag#1#2{\csname\??naturaltabletag\m_tabl_tbl_level:\number#1:\number#2\endcsname}
+\def\tabl_ntb_get_col#1#2{\csname\??naturaltablecol\m_tabl_tbl_level:\number#1:\number#2\endcsname}
+\def\tabl_ntb_get_row#1#2{\csname\??naturaltablerow\m_tabl_tbl_level:\number#1:\number#2\endcsname}
+
+\def\tabl_ntb_get_wd#1#2{\csname\??naturaltablewd\m_tabl_tbl_level:\number#1:\number#2\endcsname}
+\def\tabl_ntb_get_ht#1#2{\csname\??naturaltableht\m_tabl_tbl_level:\number#1:\number#2\endcsname}
+
+\def\tabl_ntb_set_wid#1{\expandafter\xdef\csname\??naturaltablewid\m_tabl_tbl_level:\number#1\endcsname} % {#2} global !
+\def\tabl_ntb_set_hei#1{\expandafter\xdef\csname\??naturaltablehei\m_tabl_tbl_level:\number#1\endcsname} % {#2} global !
+\def\tabl_ntb_set_dis#1{\expandafter\xdef\csname\??naturaltabledis\m_tabl_tbl_level:\number#1\endcsname} % {#2} global !
+\def\tabl_ntb_set_aut#1{\expandafter\xdef\csname\??naturaltableaut\m_tabl_tbl_level:\number#1\endcsname} % {#2} global !
+
+\def\tabl_ntb_let_wid#1{\global\expandafter\let\csname\??naturaltablewid\m_tabl_tbl_level:\number#1\endcsname} % {#2} global !
+\def\tabl_ntb_let_hei#1{\global\expandafter\let\csname\??naturaltablehei\m_tabl_tbl_level:\number#1\endcsname} % {#2} global !
+\def\tabl_ntb_let_dis#1{\global\expandafter\let\csname\??naturaltabledis\m_tabl_tbl_level:\number#1\endcsname} % {#2} global !
+\def\tabl_ntb_let_aut#1{\global\expandafter\let\csname\??naturaltableaut\m_tabl_tbl_level:\number#1\endcsname} % {#2} global !
+
+\def\tabl_ntb_get_wid#1{\ifcsname\??naturaltablewid\m_tabl_tbl_level:\number#1\endcsname\csname\??naturaltablewid\m_tabl_tbl_level:\number#1\endcsname\else\zeropoint\fi}
+\def\tabl_ntb_get_hei#1{\ifcsname\??naturaltablehei\m_tabl_tbl_level:\number#1\endcsname\csname\??naturaltablehei\m_tabl_tbl_level:\number#1\endcsname\else\zeropoint\fi}
+\def\tabl_ntb_get_dis#1{\ifcsname\??naturaltabledis\m_tabl_tbl_level:\number#1\endcsname\csname\??naturaltabledis\m_tabl_tbl_level:\number#1\endcsname\else\zeropoint\fi}
+\def\tabl_ntb_get_aut#1{\csname \??naturaltableaut\m_tabl_tbl_level:\number#1\endcsname}
+
+\def\tabl_ntb_tag_pattern#1#2{\??naturaltabletag\m_tabl_tbl_level:\number#1:\number#2}
+\def\tabl_ntb_row_pattern#1#2{\??naturaltablerow\m_tabl_tbl_level:\number#1:\number#2}
+\def\tabl_ntb_col_pattern#1#2{\??naturaltablecol\m_tabl_tbl_level:\number#1:\number#2}
+
+\def\tabl_ntb_tag_doif #1#2{\ifcsname\??naturaltabletag\m_tabl_tbl_level:\number#1:\number#2\endcsname\expandafter\firstofoneargument \else\expandafter\gobbleoneargument \fi}
+\def\tabl_ntb_tag_doifnot #1#2{\ifcsname\??naturaltabletag\m_tabl_tbl_level:\number#1:\number#2\endcsname\expandafter\gobbleoneargument \else\expandafter\firstofoneargument \fi}
+\def\tabl_ntb_tag_doifelse#1#2{\ifcsname\??naturaltabletag\m_tabl_tbl_level:\number#1:\number#2\endcsname\expandafter\firstoftwoarguments\else\expandafter\secondoftwoarguments\fi}
+\def\tabl_ntb_row_doif #1#2{\ifcsname\??naturaltablerow\m_tabl_tbl_level:\number#1:\number#2\endcsname\expandafter\firstofoneargument \else\expandafter\gobbleoneargument \fi}
+\def\tabl_ntb_col_doif #1#2{\ifcsname\??naturaltablecol\m_tabl_tbl_level:\number#1:\number#2\endcsname\expandafter\firstofoneargument \else\expandafter\gobbleoneargument \fi}
+\def\tabl_ntb_col_doifnot #1#2{\ifcsname\??naturaltablecol\m_tabl_tbl_level:\number#1:\number#2\endcsname\expandafter\gobbleoneargument \else\expandafter\firstofoneargument \fi}
+
+% not used
+%
+% \def\tabl_ntb_tag_state#1#2{\ifcsname\??naturaltabletag\m_tabl_tbl_level:\number#1:\number#2\endcsname\zerocount\else\plusone\fi}
+% \def\tabl_ntb_row_state#1#2{\ifcsname\??naturaltablerow\m_tabl_tbl_level:\number#1:\number#2\endcsname\zerocount\else\plusone\fi}
+% \def\tabl_ntb_col_state#1#2{\ifcsname\??naturaltablecol\m_tabl_tbl_level:\number#1:\number#2\endcsname\zerocount\else\plusone\fi}
-\def\tbltagstate#1#2{\ifcsname\@@tblprefix\number#1:\number#2:s\endcsname\zerocount\else\plusone\fi}
-\def\tblrowstate#1#2{\ifcsname\@@tblprefix\number#1:\number#2:r\endcsname\zerocount\else\plusone\fi}
-\def\tblcolstate#1#2{\ifcsname\@@tblprefix\number#1:\number#2:c\endcsname\zerocount\else\plusone\fi}
+\def\tabl_ntb_set_spn #1{\expandafter\let\csname\??naturaltablespn\m_tabl_tbl_level:\number#1\endcsname \!!plusone}
+\def\tabl_ntb_spn_doifelse#1{\doifelse {\csname\??naturaltablespn\m_tabl_tbl_level:\number#1\endcsname}\!!plusone}
-\def\settblspn #1{\expandafter\let\csname\@@tblprefix\number#1:s\endcsname \!!plusone}
-\def\doifelsetblspn#1{\doifelse {\csname\@@tblprefix\number#1:s\endcsname}\!!plusone}
-% \def\doifelsetblspn#1{\@EA\ifx\csname\@@tblprefix\number#1:s\endcsname\plusone\@EA\firstoftwoarguments\else\@EA\secondoftwoarguments\fi}
+\def\tabl_ntb_set_spn #1{\setvalue {\??naturaltablespn\m_tabl_tbl_level:\number#1}{1}}
+\def\tabl_ntb_spn_doifelse#1{\doifelsevalue{\??naturaltablespn\m_tabl_tbl_level:\number#1}{1}}
-\def\settblspn #1{\setvalue {\@@tblprefix\number#1:s}{1}}
-\def\doifelsetblspn#1{\doifelsevalue{\@@tblprefix\number#1:s}{1}}
+\def\tabl_ntb_let_ref#1#2{\expandafter\glet\csname\??naturaltableref\m_tabl_tbl_level:\number#1:\number#2\endcsname}
+\def\tabl_ntb_set_ref#1#2{\expandafter\xdef\csname\??naturaltableref\m_tabl_tbl_level:\number#1:\number#2\endcsname}
+\def\tabl_ntb_get_ref#1#2{\ifcsname\??naturaltableref\m_tabl_tbl_level:\number#1:\number#2\endcsname\csname\??naturaltableref\m_tabl_tbl_level:\number#1:\number#2\endcsname\fi}
-\long\def\settbltxt#1#2#3%
- {\long\@EA\def\csname\@@tblprefix\number#1:\number#2:t\@EA\endcsname\@EA{\@EA\def\@EA\TBLlevel\@EA{\TBLlevel}#3}}
+% keep for a while:
+%
+% \unexpanded\def\tabl_ntb_set_txt_process#1#2#3#4#5#6% nasty: we restore the level
+% {\expandafter\def\csname\??naturaltabletxt\m_tabl_tbl_level:\number#1:\number#2\expandafter\endcsname\expandafter
+% {\expandafter\def\expandafter\m_tabl_tbl_level\expandafter{\m_tabl_tbl_level}\tabl_ntb_cell_process{#3}{#4}[#5]{#6}}}
-\def\gettbltxt#1#2%
- {\csname\@@tblprefix\number#1:\number#2:t\endcsname}
+\unexpanded\def\tabl_ntb_set_txt_process#1#2#3#4#5#6%
+ {\expandafter\def\csname\??naturaltabletxt\m_tabl_tbl_level:\number#1:\number#2\endcsname
+ {\tabl_ntb_cell_process{#3}{#4}[#5]{\tabl_tnb_next_level#6\tabl_tnb_prev_level}}}
-\newtoks\tbltoks
-\newtoks\tblrowtoks
+\def\tabl_ntb_get_txt#1#2%
+ {\csname\??naturaltabletxt\m_tabl_tbl_level:\number#1:\number#2\endcsname}
-\let\pushTBLparameters\relax
-\let\popTBLparameters \relax
+% to be changed:
-\newif\ifsqueezeTBLspan \squeezeTBLspantrue % spans one column cell over multi column par cells
+\newif\ifsqueezeTBLspan \squeezeTBLspantrue % spans one column cell over multi column par cells
\newif\ifautosqueezeTBLspan \autosqueezeTBLspantrue % unless explicit widths are given
\newif\ifautoTBLspread \autoTBLspreadfalse
\newif\ifautoTBLhsize \autoTBLhsizetrue
@@ -249,78 +303,90 @@
\newif\iftraceTABLE \traceTABLEfalse
-\def\noftblheadlines{0}
-\def\noftblnextlines{0}
-\def\noftblhdnxlines{0}
-
-\long\def\handleTBLcell#1#2[#3]{}
+% so far
-\long\def\bTC#1\eTC{\bTD#1\eTD}
-\long\def\bTX#1\eTX{\bTD#1\eTD}
-\long\def\bTY#1\eTY{\bTR#1\eTR}
+\unexpanded\def\tabl_ntb_cell_process#1#2[#3]{}
-\let\getTABLEparameters\getparameters
+\unexpanded\def\bTC#1\eTC{\bTD#1\eTD} \let\eTC\relax
+\unexpanded\def\bTX#1\eTX{\bTD#1\eTD} \let\eTX\relax
+\unexpanded\def\bTY#1\eTY{\bTR#1\eTR} \let\eTY\relax
\unexpanded\def\setupTABLE
- {\dotripleempty\dosetupTABLE}
+ {\dotripleempty\tabl_ntb_setup}
-\def\dosetupTABLE[#1][#2][#3]%
+\let\tabl_ntb_parameters_get\setupcurrentnaturaltablelocal
+
+\def\tabl_ntb_setup
{\ifthirdargument
- \processaction
- [#1]
- [ \v!row=>{\dosetupTABLExy[\c!y][#2][#3]},%
- \v!column=>{\dosetupTABLExy[\c!x][#2][#3]},%
- r=>{\dosetupTABLExy[\c!y][#2][#3]},%
- c=>{\dosetupTABLExy[\c!x][#2][#3]},%
- y=>{\dosetupTABLExy[\c!y][#2][#3]},%
- x=>{\dosetupTABLExy[\c!x][#2][#3]},%
- \v!start=>{\dosetupTABLExy[#1][#2][#3]},%
- \v!header=>{\dosetupTABLExy[#1][#2][#3]},%
- \s!unknown=>{\dosetupTABLEzz[#1][#2][#3]}]%
+ \expandafter\tabl_ntb_setup_three
\else\ifsecondargument
- \processaction
- [#1]
- [ \v!row=>{\dosetupTABLExy[\c!y][\v!each][#2]},%
- \v!column=>{\dosetupTABLExy[\c!x][\v!each][#2]},%
- r=>{\dosetupTABLExy[\c!y][\v!each][#2]},%
- c=>{\dosetupTABLExy[\c!x][\v!each][#2]},%
- y=>{\dosetupTABLExy[\c!y][\v!each][#2]},%
- x=>{\dosetupTABLExy[\c!x][\v!each][#2]},%
- \v!start=>{\dosetupTABLExy[#1][\v!each][#2]},%
- \v!header=>{\dosetupTABLExy[#1][\v!each][#2]},%
- \s!unknown=>{\dosetupTABLEzz[\c!x][#1][#2]}]%
+ \doubleexpandafter\tabl_ntb_setup_two
\else
- \getparameters[\@@tbl\@@tbl][#1]%
+ \doubleexpandafter\tabl_ntb_setup_one
\fi\fi}
-\def\dosetupTABLExy[#1][#2][#3]%
- {\def\dodosetupTABLE##1{\setTABLEparameters[#1##1][#3]}%
- \processcommalist[#2]\dodosetupTABLE}
-
-\def\dosetupTABLEzz[#1][#2][#3]%
- {\def\dodosetupTABLE##1%
- {\def\dododosetupTABLE####1{\setTABLEparameters[\c!x##1\c!y####1][#3]}%
- \processcommalist[#2]\dododosetupTABLE}%
- \processcommalist[#1]\dodosetupTABLE}
-
-\def\nopTABLEparameters[#1][#2]%
- {\letvalueempty{\@@tblprefix#1}} % can be made faster
-
-\def\setTABLEparameters[#1][#2]%
- {\pushTBLparameters
- \ifappendTBLsetups
- \doifdefinedelse{\@@tblprefix#1}
- {\def\getTABLEparameters[##1][##2]%
- {\setvalue{\@@tblprefix#1}{\getTABLEparameters[\@@tbl\@@tbl][##2,#2]}}%
- \getvalue{\@@tblprefix#1}%
- \let\getTABLEparameters\getparameters}
- {\setvalue{\@@tblprefix#1}{\getTABLEparameters[\@@tbl\@@tbl][#2]}}%
+\def\tabl_ntb_setup_one[#1][#2][#3]%
+ {\setupcurrentnaturaltablelocal[#1]}
+
+\def\tabl_ntb_setup_xy[#1][#2][#3]%
+ {\def\tabl_ntb_setup_step##1{\tabl_ntb_parameters_set[#1##1][#3]}%
+ \processcommalist[#2]\tabl_ntb_setup_step}
+
+\def\tabl_ntb_setup_un[#1][#2][#3]%
+ {\def\tabl_ntb_setup_step##1%
+ {\def\tabl_ntb_setup_step_step####1{\tabl_ntb_parameters_set[\c!x##1\c!y####1][#3]}%
+ \processcommalist[#2]\tabl_ntb_setup_step_step}%
+ \processcommalist[#1]\tabl_ntb_setup_step}
+
+\def\tabl_ntb_setup_each[#1][#2][#3]% ignores #3
+ {\tabl_ntb_parameters_set[#1\v!each][#2]}
+
+\def\tabl_ntb_setup_ux[#1][#2][#3]% ignores #3
+ {\def\tabl_ntb_setup_step##1{\tabl_ntb_parameters_set[\c!x##1][#2]}%
+ \processcommalist[#1]\tabl_ntb_setup_step}
+
+\installcorenamespace{naturaltablesetupthree}
+\installcorenamespace{naturaltablesetuptwo}
+
+\def\tabl_ntb_setup_three[#1]{\csname\??naturaltablesetupthree\ifcsname\??naturaltablesetupthree#1\endcsname#1\else\s!unknown\fi\endcsname[#1]}
+\def\tabl_ntb_setup_two [#1]{\csname\??naturaltablesetuptwo \ifcsname\??naturaltablesetuptwo #1\endcsname#1\else\s!unknown\fi\endcsname[#1]}
+
+\setvalue{\??naturaltablesetupthree \v!row}[#1]{\tabl_ntb_setup_xy [\c!y]}
+\setvalue{\??naturaltablesetupthree \v!column}[#1]{\tabl_ntb_setup_xy [\c!x]}
+\setvalue{\??naturaltablesetupthree \v!start}[#1]{\tabl_ntb_setup_xy [\v!start]}
+\setvalue{\??naturaltablesetupthree \v!header}[#1]{\tabl_ntb_setup_xy [\v!header]}
+
+\setvalue{\??naturaltablesetuptwo \v!row}[#1]{\tabl_ntb_setup_each[\c!y]}
+\setvalue{\??naturaltablesetuptwo \v!column}[#1]{\tabl_ntb_setup_each[\c!x]}
+\setvalue{\??naturaltablesetuptwo \v!start}[#1]{\tabl_ntb_setup_each[\v!start]}
+\setvalue{\??naturaltablesetuptwo \v!header}[#1]{\tabl_ntb_setup_each[\v!header]}
+
+\letvalue{\??naturaltablesetupthree\s!unknown}\tabl_ntb_setup_un
+\letvalue{\??naturaltablesetuptwo \s!unknown}\tabl_ntb_setup_ux
+
+\letcsnamecsname\csname\??naturaltablesetupthree r\endcsname\csname\??naturaltablesetupthree \v!row\endcsname
+\letcsnamecsname\csname\??naturaltablesetupthree c\endcsname\csname\??naturaltablesetupthree\v!column\endcsname
+\letcsnamecsname\csname\??naturaltablesetupthree y\endcsname\csname\??naturaltablesetupthree \v!row\endcsname
+\letcsnamecsname\csname\??naturaltablesetupthree x\endcsname\csname\??naturaltablesetupthree\v!column\endcsname
+
+\letcsnamecsname\csname\??naturaltablesetuptwo r\endcsname\csname\??naturaltablesetuptwo \v!row\endcsname
+\letcsnamecsname\csname\??naturaltablesetuptwo c\endcsname\csname\??naturaltablesetuptwo \v!column\endcsname
+\letcsnamecsname\csname\??naturaltablesetuptwo y\endcsname\csname\??naturaltablesetuptwo \v!row\endcsname
+\letcsnamecsname\csname\??naturaltablesetuptwo x\endcsname\csname\??naturaltablesetuptwo \v!column\endcsname
+
+\def\tabl_ntb_parameters_set[#1][#2]%
+ {\ifappendTBLsetups
+ \ifcsname\??naturaltableset\m_tabl_tbl_level:#1\endcsname
+ \def\tabl_ntb_parameters_get[##1]%
+ {\setvalue{\??naturaltableset\m_tabl_tbl_level:#1}{\tabl_ntb_parameters_get[##1,#2]}}%
+ \getvalue{\??naturaltableset\m_tabl_tbl_level:#1}%
+ \let\tabl_ntb_parameters_get\setupcurrentnaturaltablelocal
+ \else
+ \setvalue{\??naturaltableset\m_tabl_tbl_level:#1}{\tabl_ntb_parameters_get[#2]}%
+ \fi
\else
- \setvalue{\@@tblprefix#1}{\getTABLEparameters[\@@tbl\@@tbl][#2]}%
- \fi
- \popTBLparameters}
-
-\let\setupTBLsection\relax
+ \setvalue{\??naturaltableset\m_tabl_tbl_level:#1}{\tabl_ntb_parameters_get[#2]}%
+ \fi}
% % \setupTABLE [y] [first][background=color,backgroundcolor=blue,frame=off,bottomframe=on,topframe=on,framecolor=white]
% \setupTABLE [first][first][backgroundcorner=2,corner=10,frame=on]
@@ -382,87 +448,91 @@
%D
%D \start \tightTBLrowspanfalse \getbuffer \stop
-\unexpanded\def\setupTBLcell#1#2% cell over col over row
- {\setupTBLsection % already forgotten
- \edef\positiverow{\number#1}%
- \edef\positivecol{\number#2}%
- \edef\negativerow{\the\numexpr-\maximumrow+#1+\minusone\relax}%
- \edef\negativecol{\the\numexpr-\maximumcol+#2+\minusone\relax}%
+\let\tabl_ntb_setup_section\relax
+
+\unexpanded\def\tabl_ntb_setup_cell#1#2% cell over col over row
+ {\tabl_ntb_setup_section % already forgotten
+ \edef\m_tabl_ntb_positive_row{\number#1}%
+ \edef\m_tabl_ntb_positive_col{\number#2}%
+ \edef\m_tabl_ntb_negative_row{\the\numexpr-\c_tabl_ntb_maximum_row+#1+\minusone\relax}%
+ \edef\m_tabl_ntb_negative_col{\the\numexpr-\c_tabl_ntb_maximum_col+#2+\minusone\relax}%
+ % saves tokens (no speed gain)
+ \edef\m_tabl_ntb_prefix{\??naturaltableset\m_tabl_tbl_level:}%
% each each
- \csname\@@tblprefix\c!x\v!each\c!y\v!each\endcsname
- \csname\@@tblprefix\c!y\v!each\endcsname
- \csname\@@tblprefix\c!x\v!each\endcsname
+ \csname\m_tabl_ntb_prefix\c!x\v!each\c!y\v!each\endcsname
+ \csname\m_tabl_ntb_prefix\c!y\v!each\endcsname
+ \csname\m_tabl_ntb_prefix\c!x\v!each\endcsname
% odd even
- \csname\@@tblprefix\c!y\v!oddeven\positiverow\endcsname
- \csname\@@tblprefix\c!x\v!oddeven\positivecol\endcsname
- \csname\@@tblprefix\c!x\v!oddeven\positivecol\c!y\v!oddeven\positiverow\endcsname
+ \csname\m_tabl_ntb_prefix\c!y\v!oddeven\m_tabl_ntb_positive_row\endcsname
+ \csname\m_tabl_ntb_prefix\c!x\v!oddeven\m_tabl_ntb_positive_col\endcsname
+ \csname\m_tabl_ntb_prefix\c!x\v!oddeven\m_tabl_ntb_positive_col\c!y\v!oddeven\m_tabl_ntb_positive_row\endcsname
% row/col number combinations
- \ifcsname\@@tblprefix\c!y\positiverow\endcsname\csname\@@tblprefix\c!y\positiverow\endcsname\fi
- \ifcsname\@@tblprefix\c!y\negativerow\endcsname\csname\@@tblprefix\c!y\negativerow\endcsname\fi
- \csname\@@tbl\@@tbl\c!extras\endcsname
- \@EA\let\csname\@@tbl\@@tbl\c!extras\endcsname\relax % new, see x-fo
- \ifcsname\@@tblprefix\c!x\positivecol\endcsname\csname\@@tblprefix\c!x\positivecol\endcsname\fi
- \ifcsname\@@tblprefix\c!x\negativecol\endcsname\csname\@@tblprefix\c!x\negativecol\endcsname\fi
- \csname\@@tbl\@@tbl\c!extras\endcsname
- \@EA\let\csname\@@tbl\@@tbl\c!extras\endcsname\relax % new, see x-fo
+ \ifcsname\m_tabl_ntb_prefix\c!y\m_tabl_ntb_positive_row\endcsname\csname\m_tabl_ntb_prefix\c!y\m_tabl_ntb_positive_row\endcsname\fi
+ \ifcsname\m_tabl_ntb_prefix\c!y\m_tabl_ntb_negative_row\endcsname\csname\m_tabl_ntb_prefix\c!y\m_tabl_ntb_negative_row\endcsname\fi
+ \naturaltablelocalparameter\c!extras
+ \letnaturaltablelocalparameter\c!extras\relax % new, see x-fo
+ \ifcsname\m_tabl_ntb_prefix\c!x\m_tabl_ntb_positive_col\endcsname\csname\m_tabl_ntb_prefix\c!x\m_tabl_ntb_positive_col\endcsname\fi
+ \ifcsname\m_tabl_ntb_prefix\c!x\m_tabl_ntb_negative_col\endcsname\csname\m_tabl_ntb_prefix\c!x\m_tabl_ntb_negative_col\endcsname\fi
+ \naturaltablelocalparameter\c!extras
+ \letnaturaltablelocalparameter\c!extras\relax % new, see x-fo
% first/last combinations
- \ifnum\positiverow=\plusone
- \csname\@@tblprefix\c!y\v!first\endcsname
- \ifcsname\@@tblprefix\c!x\positivecol\c!y\v!first\endcsname\csname\@@tblprefix\c!x\positivecol\c!y\v!first\endcsname\fi
+ \ifnum\m_tabl_ntb_positive_row=\plusone
+ \csname\m_tabl_ntb_prefix\c!y\v!first\endcsname
+ \ifcsname\m_tabl_ntb_prefix\c!x\m_tabl_ntb_positive_col\c!y\v!first\endcsname\csname\m_tabl_ntb_prefix\c!x\m_tabl_ntb_positive_col\c!y\v!first\endcsname\fi
\fi
- \ifnum\positivecol=\plusone
- \csname\@@tblprefix\c!x\v!first\endcsname
- \ifcsname\@@tblprefix\c!x\v!first\c!y\positiverow\endcsname\csname\@@tblprefix\c!x\v!first\c!y\positiverow\endcsname\fi
+ \ifnum\m_tabl_ntb_positive_col=\plusone
+ \csname\m_tabl_ntb_prefix\c!x\v!first\endcsname
+ \ifcsname\m_tabl_ntb_prefix\c!x\v!first\c!y\m_tabl_ntb_positive_row\endcsname\csname\m_tabl_ntb_prefix\c!x\v!first\c!y\m_tabl_ntb_positive_row\endcsname\fi
\fi
- \ifnum\positiverow=\maximumrow\relax
- \csname\@@tblprefix\c!y\v!last\endcsname
- \ifcsname\@@tblprefix\c!x\positivecol\c!y\v!last\endcsname\csname\@@tblprefix\c!x\positivecol\c!y\v!last\endcsname\fi
+ \ifnum\m_tabl_ntb_positive_row=\c_tabl_ntb_maximum_row\relax
+ \csname\m_tabl_ntb_prefix\c!y\v!last\endcsname
+ \ifcsname\m_tabl_ntb_prefix\c!x\m_tabl_ntb_positive_col\c!y\v!last\endcsname\csname\m_tabl_ntb_prefix\c!x\m_tabl_ntb_positive_col\c!y\v!last\endcsname\fi
\fi
- \ifnum\positivecol=\maximumcol\relax
- \csname\@@tblprefix\c!x\v!last\endcsname
- \ifcsname\@@tblprefix\c!x\v!last\c!y\positiverow\endcsname\csname\@@tblprefix\c!x\v!last\c!y\positiverow\endcsname\fi
+ \ifnum\m_tabl_ntb_positive_col=\c_tabl_ntb_maximum_col\relax
+ \csname\m_tabl_ntb_prefix\c!x\v!last\endcsname
+ \ifcsname\m_tabl_ntb_prefix\c!x\v!last\c!y\m_tabl_ntb_positive_row\endcsname\csname\m_tabl_ntb_prefix\c!x\v!last\c!y\m_tabl_ntb_positive_row\endcsname\fi
\fi
- \ifnum\positiverow=\maximumrow\relax \ifnum\positivecol=\maximumcol\relax
- \csname\@@tblprefix\c!x\v!last\c!y\v!last\endcsname
+ \ifnum\m_tabl_ntb_positive_row=\c_tabl_ntb_maximum_row\relax \ifnum\m_tabl_ntb_positive_col=\c_tabl_ntb_maximum_col\relax
+ \csname\m_tabl_ntb_prefix\c!x\v!last\c!y\v!last\endcsname
\fi\fi
- \ifnum\positiverow=\plusone \ifnum\positivecol=\plusone
- \csname\@@tblprefix\c!x\v!first\c!y\v!first\endcsname
+ \ifnum\m_tabl_ntb_positive_row=\plusone \ifnum\m_tabl_ntb_positive_col=\plusone
+ \csname\m_tabl_ntb_prefix\c!x\v!first\c!y\v!first\endcsname
\fi\fi
- \ifnum\positiverow=\plusone \ifnum\positivecol=\maximumcol\relax
- \csname\@@tblprefix\c!x\v!last\c!y\v!first\endcsname
+ \ifnum\m_tabl_ntb_positive_row=\plusone \ifnum\m_tabl_ntb_positive_col=\c_tabl_ntb_maximum_col\relax
+ \csname\m_tabl_ntb_prefix\c!x\v!last\c!y\v!first\endcsname
\fi\fi
- \ifnum\positiverow=\maximumrow\relax \ifnum\positivecol=\plusone
- \csname\@@tblprefix\c!x\v!first\c!y\v!last\endcsname
+ \ifnum\m_tabl_ntb_positive_row=\c_tabl_ntb_maximum_row\relax \ifnum\m_tabl_ntb_positive_col=\plusone
+ \csname\m_tabl_ntb_prefix\c!x\v!first\c!y\v!last\endcsname
\fi\fi
% special case: two rows and last row : two&first and two&last (round corners)
- \ifnum\maximumrow=\plustwo\relax
- \ifnum\positiverow=\maximumrow\relax \ifnum\positivecol=\plusone
- \csname\@@tblprefix\c!x\v!first\c!y\v!two\endcsname
+ \ifnum\c_tabl_ntb_maximum_row=\plustwo\relax
+ \ifnum\m_tabl_ntb_positive_row=\c_tabl_ntb_maximum_row\relax \ifnum\m_tabl_ntb_positive_col=\plusone
+ \csname\m_tabl_ntb_prefix\c!x\v!first\c!y\v!two\endcsname
\fi\fi
- \ifnum\positiverow=\maximumrow\relax \ifnum\positivecol=\maximumcol\relax
- \csname\@@tblprefix\c!x\v!last\c!y\v!two\endcsname
+ \ifnum\m_tabl_ntb_positive_row=\c_tabl_ntb_maximum_row\relax \ifnum\m_tabl_ntb_positive_col=\c_tabl_ntb_maximum_col\relax
+ \csname\m_tabl_ntb_prefix\c!x\v!last\c!y\v!two\endcsname
\fi\fi
\fi
- \ifnum\gettblcol\positiverow\positivecol=\maximumcol\relax % top span over whole width
- \ifnum\positiverow=\plusone
- \csname\@@tblprefix\c!x\v!one\c!y\v!first\endcsname
+ \ifnum\tabl_ntb_get_col\m_tabl_ntb_positive_row\m_tabl_ntb_positive_col=\c_tabl_ntb_maximum_col\relax % top span over whole width
+ \ifnum\m_tabl_ntb_positive_row=\plusone
+ \csname\m_tabl_ntb_prefix\c!x\v!one\c!y\v!first\endcsname
\fi
- \ifnum\positiverow=\maximumrow\relax
- \csname\@@tblprefix\c!x\v!one\c!y\v!last\endcsname
+ \ifnum\m_tabl_ntb_positive_row=\c_tabl_ntb_maximum_row\relax
+ \csname\m_tabl_ntb_prefix\c!x\v!one\c!y\v!last\endcsname
\fi
\fi
% header things
- \ifnum#1>\noftblhdnxlines\else
- \ifcsname\@@tblprefix\v!header\v!each \endcsname\csname\@@tblprefix\v!header\v!each \endcsname\fi
- \ifcsname\@@tblprefix\v!header\positivecol\endcsname\csname\@@tblprefix\v!header\positivecol\endcsname\fi
+ \ifnum#1>\c_tabl_ntb_n_of_hdnx_lines\else
+ \ifcsname\m_tabl_ntb_prefix\v!header\v!each \endcsname\csname\m_tabl_ntb_prefix\v!header\v!each \endcsname\fi
+ \ifcsname\m_tabl_ntb_prefix\v!header\m_tabl_ntb_positive_col\endcsname\csname\m_tabl_ntb_prefix\v!header\m_tabl_ntb_positive_col\endcsname\fi
\fi
% explicit cells
- \ifcsname\@@tblprefix\c!x\positivecol\c!y\positiverow\endcsname\csname\@@tblprefix\c!x\positivecol\c!y\positiverow\endcsname\fi
- \ifcsname\@@tblprefix\c!x\negativecol\c!y\negativerow\endcsname\csname\@@tblprefix\c!x\negativecol\c!y\negativerow\endcsname\fi
+ \ifcsname\m_tabl_ntb_prefix\c!x\m_tabl_ntb_positive_col\c!y\m_tabl_ntb_positive_row\endcsname\csname\m_tabl_ntb_prefix\c!x\m_tabl_ntb_positive_col\c!y\m_tabl_ntb_positive_row\endcsname\fi
+ \ifcsname\m_tabl_ntb_prefix\c!x\m_tabl_ntb_negative_col\c!y\m_tabl_ntb_negative_row\endcsname\csname\m_tabl_ntb_prefix\c!x\m_tabl_ntb_negative_col\c!y\m_tabl_ntb_negative_row\endcsname\fi
% local
- \ifcsname\@@tblprefix\c!y++\positiverow\endcsname\csname\@@tblprefix\c!y++\positiverow\endcsname\fi
+ \ifcsname\m_tabl_ntb_prefix\c!y++\m_tabl_ntb_positive_row\endcsname\csname\m_tabl_ntb_prefix\c!y++\m_tabl_ntb_positive_row\endcsname\fi
% done
- \global\letcscsname\@@tblsplitafter\csname\@@tbl\@@tbl\c!after\endcsname
+ \xdef\m_tabl_ntb_after_split{\naturaltablelocalparameter\c!after}% to be checked
\relax}
% we cannot use +n (checking on number/last/first would slow down too much)
@@ -478,7 +548,7 @@
% \dorecurse{10}{\bTR \dorecurse{6}{\bTD xxx \eTD} \eTR}
% \eTABLE
-\globallet\@@tblsplitafter\relax
+\let\m_tabl_ntb_after_split\relax
% split + page:
%
@@ -488,101 +558,152 @@
% \bTR \bTD left \eTD\bTD right \eTD\eTR
% \eTABLE
-% todo: protect counters
+\unexpanded\def\tabl_ntb_tr
+ {\c_tabl_ntb_running_col\zerocount
+ \advance\c_tabl_ntb_maximum_row\plusone
+ \iffirstargument
+ \expandafter\tabl_ntb_tr_yes
+ \else
+ \expandafter\gobbleoneoptional
+ \fi}
-\newcount\row \newcount\col
-\newcount\xrow \newcount\xcol
-\newcount\xxrow \newcount\xxcol
-\newcount\maximumrow \newcount\maximumcol \newcount\maximumrowspan
- \newcount\currentcol
-\newcount\tblspn
+\def\tabl_ntb_tr_yes[#1]%
+ {\setvalue{\??naturaltableset\m_tabl_tbl_level:\c!y++\the\c_tabl_ntb_maximum_row}{\setupcurrentnaturaltablelocal[#1]}}
-\def\settblref#1#2{\expandafter\xdef\csname\@@tblprefix\number#1:\number#2:x\endcsname}
-\def\gettblref#1#2{\ifcsname\@@tblprefix\number#1:\number#2:x\endcsname\csname\@@tblprefix\number#1:\number#2:x\endcsname\fi}
+\def\m_tabl_ntb_default_nr{\naturaltableparameter\c!nr}
+\def\m_tabl_ntb_default_nc{\naturaltableparameter\c!nc}
-\def\parseTR[#1]%
- {\currentcol\zerocount
- \advance\maximumrow\plusone
- \iffirstargument
- \setvalue{\@@tblprefix\c!y++\number\maximumrow}{\getparameters[\@@tbl\@@tbl][#1]}% maybe also in mkii
+\unexpanded\def\tabl_ntb_td
+ {\iffirstargument
+ \expandafter\tabl_ntb_td_yes
+ \else
+ \expandafter\tabl_ntb_td_nop
\fi}
-\long\def\parseTD[#1]#2\eTD
- {\def\tblny{\tblnr}%
- \def\tblnx{\tblnc}%
- \let\tblnc\plusone
- \let\tblnr\plusone
- \let\tbln\currentcol
- \let\tblm\empty
- \iffirstargument
- \getparameters[\@@tbl][#1]%
- \fi
- % goto first cell % NEW, n/m=cellnumber
- \edef\@@tblnindeed{\csname\@@tbl\c!n\endcsname}%
- \ifx\@@tblnindeed\empty
- \global\advance\tblspn\tblnx\relax
- \else\ifnum\@@tblnindeed=\currentcol\else
- \scratchcounter\numexpr\@@tblnindeed-\currentcol+\minusone-\tblspn\relax
- \ifnum\scratchcounter>\zerocount
- \normalexpanded{\noexpand\parseTD[\c!nx=\the\scratchcounter,\c!n=,\c!m=,*sq=\v!no]}\eTD
- \fi
- % can also be made faster
- \getparameters[\@@tbl][\c!ny=\tblnr,\c!nx=\tblnc,nc=1,nr=1,#1,\c!n=,\c!m=]%
+\def\tabl_ntb_td_yes[#1]#2\eTD
+ {\letnaturaltableparameter\c!ny \m_tabl_ntb_default_nr
+ \letnaturaltableparameter\c!nx \m_tabl_ntb_default_nc
+ \letnaturaltableparameter\c!nc \plusone
+ \letnaturaltableparameter\c!nr \plusone
+ \letnaturaltableparameter\c!n \c_tabl_ntb_running_col
+ \letnaturaltableparameter\c!m \empty
+ \letnaturaltableparameter\c!action\empty % not that important
+ \setupcurrentnaturaltable[#1]%
+ %
+ \c_tabl_ntb_nx\naturaltableparameter\c!nx\relax
+ \c_tabl_ntb_ny\naturaltableparameter\c!ny\relax
+ % goto first cell n/m=cellnumber
+ \edef\m_tabl_ntb_n{\naturaltableparameter\c!n}%
+ \edef\m_tabl_ntb_m{\naturaltableparameter\c!m}%
+ %
+ \ifx\m_tabl_ntb_n\empty
+ \global\advance\c_tabl_ntb_spn\c_tabl_ntb_nx\relax
+ \else\ifnum\m_tabl_ntb_n=\c_tabl_ntb_running_col\else
+ \tabl_ntb_td_pass_n{#1}%
\fi\fi
- \edef\@@tblmindeed{\csname\@@tbl\c!m\endcsname}%
- \ifx\@@tblmindeed\empty \else
- \ifnum\@@tblmindeed=\currentcol \else
- \scratchcounter\numexpr\@@tblmindeed-\currentcol+\minusone-\tblspn\relax
- \dorecurse\scratchcounter{\normalexpanded{\noexpand\parseTD[\c!n=,\c!m=]}\eTD}%
- % can be sped up
- \getparameters[\@@tbl][\c!ny=\tblnr,\c!nx=\tblnc,nc=1,nr=1,#1,\c!n=,\c!m=]% kind of double, see prev
+ \ifx\m_tabl_ntb_m\empty \else
+ \ifnum\m_tabl_ntb_m=\c_tabl_ntb_running_col\else
+ \tabl_ntb_td_pass_m{#1}%
\fi
\fi
\doloop % skip over columns that result from earlier span
- {\advance\currentcol\plusone
- \doifnottbltag\maximumrow\currentcol\exitloop}%
- % == \def\next{\advance\currentcol\plusone\doiftbltag\maximumrow\currentcol\next}\next
+ {\advance\c_tabl_ntb_running_col\plusone
+ \ifcsname\tabl_ntb_tag_pattern\c_tabl_ntb_maximum_row\c_tabl_ntb_running_col\endcsname \else
+ \exitloop
+ \fi}%
% fill r*c cells and set span
- \ifnum\tblnx=\plusone
- \ifnum\tblny=\plusone
- \ifnum\currentcol>\maximumcol\relax
- \maximumcol\currentcol
+ \c_tabl_ntb_nx\naturaltableparameter\c!nx\relax
+ \c_tabl_ntb_ny\naturaltableparameter\c!ny\relax
+ \ifnum\c_tabl_ntb_nx=\plusone
+ \ifnum\c_tabl_ntb_ny=\plusone
+ \ifnum\c_tabl_ntb_running_col>\c_tabl_ntb_maximum_col\relax
+ \c_tabl_ntb_maximum_col\c_tabl_ntb_running_col
\fi
\else
- \presetTBLcell
+ \tabl_ntb_cell_preset
\fi
\else
- \presetTBLcell
+ \tabl_ntb_cell_preset
\fi
% set values
- \lettbltag\maximumrow\currentcol\tblcell
- \settblcol\maximumrow\currentcol{\number\tblnx}%
- \settblrow\maximumrow\currentcol{\number\tblny}%
+ \tabl_ntb_let_tag\c_tabl_ntb_maximum_row\c_tabl_ntb_running_col\c_tabl_ntb_cell
+ \tabl_ntb_set_col\c_tabl_ntb_maximum_row\c_tabl_ntb_running_col{\the\c_tabl_ntb_nx}%
+ \tabl_ntb_set_row\c_tabl_ntb_maximum_row\c_tabl_ntb_running_col{\the\c_tabl_ntb_ny}%
% the action key will change!
- \settblref\maximumrow\currentcol{\ifcsname\@@tbl\c!action\endcsname\csname\@@tbl\c!action\endcsname\fi}%
+ \tabl_ntb_set_ref\c_tabl_ntb_maximum_row\c_tabl_ntb_running_col{\naturaltableparameter\c!action}%
% save text
- \edef\celltag{{\number\maximumrow}{\number\currentcol}}%
- \@EA\settbltxt\@EA\maximumrow\@EA\currentcol\@EA{\@EA\handleTBLcell\celltag[#1]{#2}}}
-
-\def\presetTBLcell
- {\row\maximumrow
- \col\currentcol
- \dorecurse\tblny
- {\col\currentcol
- \settblcol\row\col{\number\tblnx}%
- \ifnum\tblnx>\maximumrowspan\relax
- \maximumrowspan\tblnx
- \fi
- \dorecurse\tblnx
- {\lettbltag\row\col\tblnone
- \advance\col\plusone}%
- \advance\row\plusone}%
+ \normalexpanded
+ {\tabl_ntb_set_txt_process\c_tabl_ntb_maximum_row\c_tabl_ntb_running_col{\the\c_tabl_ntb_maximum_row}{\the\c_tabl_ntb_running_col}}%
+ {#1}{#2}}
+
+\def\tabl_ntb_td_nop[#1]#2\eTD
+ {\global\advance\c_tabl_ntb_spn\plusone\relax
+ \doloop
+ {\advance\c_tabl_ntb_running_col\plusone
+ \ifcsname\tabl_ntb_tag_pattern\c_tabl_ntb_maximum_row\c_tabl_ntb_running_col\endcsname \else
+ \exitloop
+ \fi}%
+ \c_tabl_ntb_nx\plusone
+ \c_tabl_ntb_ny\plusone
+ \ifnum\c_tabl_ntb_running_col>\c_tabl_ntb_maximum_col\relax
+ \c_tabl_ntb_maximum_col\c_tabl_ntb_running_col
+ \fi
+ \tabl_ntb_let_tag\c_tabl_ntb_maximum_row\c_tabl_ntb_running_col\c_tabl_ntb_cell
+ \tabl_ntb_set_col\c_tabl_ntb_maximum_row\c_tabl_ntb_running_col{\the\c_tabl_ntb_nx}%
+ \tabl_ntb_set_row\c_tabl_ntb_maximum_row\c_tabl_ntb_running_col{\the\c_tabl_ntb_ny}%
+ \tabl_ntb_let_ref\c_tabl_ntb_maximum_row\c_tabl_ntb_running_col\empty
+ \normalexpanded
+ {\tabl_ntb_set_txt_process\c_tabl_ntb_maximum_row\c_tabl_ntb_running_col{\the\c_tabl_ntb_maximum_row}{\the\c_tabl_ntb_running_col}}%
+ {#1}{#2}}
+
+\def\tabl_ntb_td_pass_n#1%
+ {\scratchcounter\numexpr\m_tabl_ntb_n-\c_tabl_ntb_running_col+\minusone-\c_tabl_ntb_spn\relax
+ \ifnum\scratchcounter>\zerocount
+ \normalexpanded{\tabl_ntb_td[\c!nx=\the\scratchcounter,\c!n=,\c!m=,*sq=\v!no]}\eTD
+ \fi
+ \letnaturaltableparameter\c!ny\m_tabl_ntb_default_nr
+ \letnaturaltableparameter\c!nx\m_tabl_ntb_default_nc
+ \letnaturaltableparameter\c!nc\plusone
+ \letnaturaltableparameter\c!nr\plusone
+ \setupcurrentnaturaltable[#1]%
+ \letnaturaltableparameter\c!n \empty
+ \letnaturaltableparameter\c!m \empty}
+
+\def\tabl_ntb_td_pass_m#1%
+ {\scratchcounter\numexpr\m_tabl_ntb_m-\c_tabl_ntb_running_col+\minusone-\c_tabl_ntb_spn\relax
+ \dorecurse\scratchcounter{\normalexpanded{\tabl_ntb_td[\c!n=,\c!m=]}\eTD}%
+ % can be sped up
+ \letnaturaltableparameter\c!ny\m_tabl_ntb_default_nr
+ \letnaturaltableparameter\c!nx\m_tabl_ntb_default_nc
+ \letnaturaltableparameter\c!nc\plusone
+ \letnaturaltableparameter\c!nr\plusone
+ \setupcurrentnaturaltable[#1]%
+ \letnaturaltableparameter\c!n \empty
+ \letnaturaltableparameter\c!m \empty}
+
+\def\tabl_ntb_cell_preset
+ {\c_tabl_ntb_current_row\c_tabl_ntb_maximum_row
+ \c_tabl_ntb_current_col\c_tabl_ntb_running_col
+ \dorecurse\c_tabl_ntb_ny\tabl_ntb_cell_preset_rows
% check max column
- \advance\col\minusone
- \ifnum\col>\maximumcol\relax
- \maximumcol\col
+ \advance\c_tabl_ntb_current_col\minusone
+ \ifnum\c_tabl_ntb_current_col>\c_tabl_ntb_maximum_col\relax
+ \c_tabl_ntb_maximum_col\c_tabl_ntb_current_col
\fi}
+\def\tabl_ntb_cell_preset_rows
+ {\c_tabl_ntb_current_col\c_tabl_ntb_running_col
+ \tabl_ntb_set_col\c_tabl_ntb_current_row\c_tabl_ntb_current_col{\the\c_tabl_ntb_nx}%
+ \ifnum\c_tabl_ntb_nx>\c_tabl_ntb_maximum_row_span\relax
+ \c_tabl_ntb_maximum_row_span\c_tabl_ntb_nx
+ \fi
+ \dorecurse\c_tabl_ntb_nx\tabl_ntb_cell_preset_cells
+ \advance\c_tabl_ntb_current_row\plusone}
+
+\def\tabl_ntb_cell_preset_cells
+ {\tabl_ntb_let_tag\c_tabl_ntb_current_row\c_tabl_ntb_current_col\c_tabl_ntb_none
+ \advance\c_tabl_ntb_current_col\plusone}
+
%D The usage of n and m:
%D
%D \startbuffer
@@ -617,13 +738,13 @@
%D \stopbuffer
%D
%D \typebuffer \getbuffer
-
-\long\def\parseTH[#1]#2\eTH
- {\parseTD[#1,\c!color=\tbltblheadcolor,\c!style=\tbltblheadstyle,\c!aligncharacter=\v!no]#2\eTD}
-\long\def\parseTN[#1]#2\eTN
- {\parseTD[#1]\digits#2\relax\eTD}
-
+\def\tabl_ntb_th[#1]#2\eTH
+ {\tabl_ntb_td[#1,\c!color=\naturaltablelocalparameter\c!headcolor,\c!style=\naturaltablelocalparameter\c!headstyle,\c!aligncharacter=\v!no]#2\eTD}
+
+\def\tabl_ntb_tn[#1]#2\eTN
+ {\tabl_ntb_td[#1]\digits#2\relax\eTD}
+
%D Vit Zyka needed the option to create a distance between columns, so I
%D added support for individual column distances.
%D
@@ -668,86 +789,94 @@
%D \stopbuffer
%D
%D \typebuffer \startlinecorrection \getbuffer \stoplinecorrection
-
-\newtoks\TBLhead
-\newtoks\TBLnext
-\newtoks\TBLbody
-\newtoks\TBLfoot
% to be done: head <raw> foot, dus state var
-\unexpanded\def\bTABLEhead{\dosingleempty\doTABLEhead} \let\eTABLEhead\relax
-\unexpanded\def\bTABLEnext{\dosingleempty\doTABLEnext} \let\eTABLEnext\relax
-\unexpanded\def\bTABLEbody{\dosingleempty\doTABLEbody} \let\eTABLEbody\relax
-\unexpanded\def\bTABLEfoot{\dosingleempty\doTABLEfoot} \let\eTABLEfoot\relax
+\unexpanded\def\bTABLEhead{\dosingleempty\tabl_ntb_head} \let\eTABLEhead\relax
+\unexpanded\def\bTABLEnext{\dosingleempty\tabl_ntb_next} \let\eTABLEnext\relax
+\unexpanded\def\bTABLEbody{\dosingleempty\tabl_ntb_body} \let\eTABLEbody\relax
+\unexpanded\def\bTABLEfoot{\dosingleempty\tabl_ntb_foot} \let\eTABLEfoot\relax
-\long\def\doTABLEhead[#1]#2\eTABLEhead{\appendtoks\doTABLEsection[#1]{#2}\to\TBLhead}
-\long\def\doTABLEnext[#1]#2\eTABLEnext{\appendtoks\doTABLEsection[#1]{#2}\to\TBLnext}
-\long\def\doTABLEbody[#1]#2\eTABLEbody{\appendtoks\doTABLEsection[#1]{#2}\to\TBLbody}
-\long\def\doTABLEfoot[#1]#2\eTABLEfoot{\appendtoks\doTABLEsection[#1]{#2}\to\TBLfoot}
+\def\tabl_ntb_head[#1]#2\eTABLEhead{\appendtoks\tabl_ntb_section[#1]{#2}\to\t_tabl_ntb_head}
+\def\tabl_ntb_next[#1]#2\eTABLEnext{\appendtoks\tabl_ntb_section[#1]{#2}\to\t_tabl_ntb_next}
+\def\tabl_ntb_body[#1]#2\eTABLEbody{\appendtoks\tabl_ntb_section[#1]{#2}\to\t_tabl_ntb_body}
+\def\tabl_ntb_foot[#1]#2\eTABLEfoot{\appendtoks\tabl_ntb_section[#1]{#2}\to\t_tabl_ntb_foot}
-\long\def\doTABLEsection[#1]#2%
- {\unexpanded\def\setupTBLsection{\getparameters[\@@tbl\@@tbl][#1]}%
+\def\tabl_ntb_section[#1]#2% also used in tabl-nte
+ {\unexpanded\def\tabl_ntb_setup_section{\setupcurrentnaturaltablelocal[#1]}%
#2%
- \let\setupTBLsection\relax}
-
-\let\pushTBL\relax
-\let\popTBL \relax
-
-\newconstant\tblpass
+ \let\tabl_ntb_setup_section\relax}
-\def\presetallTABLEparameters% each odd|even level / can be sped up but only once per table
- {\executeifdefined{\@@rawtblprefix\v!start\v!each}\relax
- \executeifdefined{\@@rawtblprefix\v!start\v!oddeven\TBLlevel}\relax
- \executeifdefined{\@@rawtblprefix\v!start\number\TBLlevel}\relax}
+\def\tabl_ntb_preset_parameters% each odd|even level / can be sped up but only once per table
+ {\ifcsname\??naturaltableset\m_tabl_tbl_level:\v!start\v!each\endcsname
+ \csname\??naturaltableset\m_tabl_tbl_level:\v!start\v!each\endcsname
+ \fi
+ \ifcsname\??naturaltableset\m_tabl_tbl_level:\v!start\v!oddeven\m_tabl_tbl_level\endcsname % hm
+ \csname\??naturaltableset\m_tabl_tbl_level:\v!start\v!oddeven\m_tabl_tbl_level\endcsname
+ \fi
+ \ifcsname\??naturaltableset\m_tabl_tbl_level:\v!start\m_tabl_tbl_level\endcsname % hm
+ \csname\??naturaltableset\m_tabl_tbl_level:\v!start\m_tabl_tbl_level\endcsname
+ \fi}
\unexpanded\def\bTABLE
- {\dosingleempty\dobTABLE}
+ {\dosingleempty\tabl_ntb_table}
-\def\dobTABLE[#1]%
- {\pushTBL
+\def\tabl_ntb_table[#1]%
+ {\tabl_ntb_table_push
% box not here
\bgroup
- \TBLhead\emptytoks
- \TBLnext\emptytoks
- \TBLbody\emptytoks
- \TBLfoot\emptytoks
+ \t_tabl_ntb_head\emptytoks
+ \t_tabl_ntb_next\emptytoks
+ \t_tabl_ntb_body\emptytoks
+ \t_tabl_ntb_foot\emptytoks
\ifhmode\kern\zeropoint\fi % blocks \removeunwantedspaces: check this on icare handelingsschema
\resetcharacteralign % new
- \getparameters
- [\@@tbl\@@tbl]
- [\c!align={\v!right,\v!broad,\v!high},#1]%
- \ifx\tbltblsetups\empty\else
- \doprocesslocalsetups\tbltblsetups
- \fi
- \hsize\tbltbltextwidth
+ \setupcurrentnaturaltablelocal[\c!align={\v!right,\v!broad,\v!high},#1]%
+ \usesetupsparameter\naturaltablelocalparameter
+ \doifelse{\naturaltablelocalparameter\c!textwidth}\v!local
+ {\hsize\availablehsize}
+ {\hsize\naturaltablelocalparameter\c!textwidth}%
\processaction
- [\tbltblsplit]
+ [\naturaltablelocalparameter\c!split]
[ \v!yes=>\enableTBLbreaktrue,
\v!repeat=>\enableTBLbreaktrue\multipleTBLheadstrue,
\v!auto=>\ifinsidesplitfloat\enableTBLbreaktrue\fi]
\processaction
- [\tbltblheader]
+ [\naturaltablelocalparameter\c!header]
[\v!repeat=>\multipleTBLheadstrue]%
- \presetallTABLEparameters
+ \tabl_ntb_preset_parameters
\processallactionsinset
- [\tbltbloption]
+ [\naturaltablelocalparameter\c!option]
[\v!stretch=>\autoTBLspreadtrue]%
- \linewidth\tbltblrulethickness % needs to be frozen
+ \linewidth\naturaltablelocalparameter\c!rulethickness % needs to be frozen
\dontcomplain
- \currentcol\zerocount
- \maximumrowspan\plusone
- \maximumcol\zerocount
- \maximumrow\zerocount
+ \c_tabl_ntb_running_col \zerocount
+ \c_tabl_ntb_maximum_col \zerocount
+ \c_tabl_ntb_maximum_row \zerocount
+ \c_tabl_ntb_maximum_row_span\plusone
+ \let\currentTABLErow \tabl_ntb_current_row
+ \let\currentTABLEcolumn\tabl_ntb_current_column
+ \let\nofTABLErows \tabl_ntb_n_of_rows
+ \let\nofTABLEcolumns \tabl_ntb_n_of_columns
\let\bTR\dobTR
\let\bTD\dobTD
\let\bTH\dobTH
\let\bTN\dobTN}
-\unexpanded\def\dobTR{\dosingleempty\parseTR}
-\unexpanded\def\dobTD{\dosingleempty\parseTD}
-\unexpanded\def\dobTH{\dosingleempty\parseTH}
-\unexpanded\def\dobTN{\dosingleempty\parseTN}
+\def\tabl_ntb_current_row {\m_tabl_ntb_positive_row}
+\def\tabl_ntb_current_column{\m_tabl_ntb_positive_col}
+\def\tabl_ntb_n_of_rows {\number\c_tabl_ntb_maximum_row}
+\def\tabl_ntb_n_of_columns {\number\c_tabl_ntb_maximum_col}
+
+\let\currentTABLErow \!!zerocount
+\let\currentTABLEcolumn\!!zerocount
+\let\nofTABLErows \!!zerocount
+\let\nofTABLEcolumns \!!zerocount
+
+\unexpanded\def\dobTR{\dosingleempty\tabl_ntb_tr} % also used in tabl-nte
+\unexpanded\def\dobTD{\dosingleempty\tabl_ntb_td} % also used in tabl-nte
+\unexpanded\def\dobTH{\dosingleempty\tabl_ntb_th} % also used in tabl-nte
+\unexpanded\def\dobTN{\dosingleempty\tabl_ntb_tn} % also used in tabl-nte
% permits \expanded{\bTD ... \eTD}
@@ -758,302 +887,348 @@
\unexpanded\def\eTABLE % beware, we need to get rid of spurious spaces when in hmode
{% tricky and dirty order -)
- \doifsometokselse\TBLhead % slow, better a flag
- {\the\TBLhead
- \edef\noftblheadlines{\number\maximumrow}%
- \doifsometokselse\TBLnext
- {\the\TBLnext
- \edef\noftblnextlines{\number\numexpr\maximumrow-\noftblheadlines\relax}}%
- {\let\noftblnextlines\zerocount}% was 1
- \edef\noftblhdnxlines{\number\maximumrow}}
- {\let\noftblheadlines\zerocount % was 1
- \let\noftblnextlines\zerocount
- \let\noftblhdnxlines\zerocount}%
- \the\TBLbody
- \the\TBLfoot
+ \doifsometokselse\t_tabl_ntb_head % slow, better a flag
+ {\the\t_tabl_ntb_head
+ \c_tabl_ntb_n_of_head_lines\c_tabl_ntb_maximum_row\relax
+ \doifsometokselse\t_tabl_ntb_next
+ {\the\t_tabl_ntb_next
+ \c_tabl_ntb_n_of_next_lines\numexpr\c_tabl_ntb_maximum_row-\c_tabl_ntb_n_of_head_lines\relax}%
+ {\c_tabl_ntb_n_of_next_lines\zerocount}% was 1
+ \c_tabl_ntb_n_of_hdnx_lines\c_tabl_ntb_maximum_row}
+ {\c_tabl_ntb_n_of_head_lines\zerocount % was 1
+ \c_tabl_ntb_n_of_next_lines\zerocount
+ \c_tabl_ntb_n_of_hdnx_lines\zerocount}%
+ \the\t_tabl_ntb_body
+ \the\t_tabl_ntb_foot
\removeunwantedspaces % only if hmode
% finish cells
- \dorecurse\maximumrow
- {\row\recurselevel\relax
- \dorecurse\maximumcol
- {\col\recurselevel\relax
- \doifnottbltag\row\col
- {\xxcol\col
- \xxrow\row
- \xrow\row
- \doloop
- {\xcol\col
- \doloop
- {\doifelsetbltag\xrow\xcol \exitloop
- {\advance\xcol\plusone
- \ifnum\xcol>\maximumcol\relax \exitloop \fi}}%
- \doifelsetbltag\xrow\xcol \exitloop
- {\xxrow\xrow \xxcol\xcol \advance\xrow\plusone
- \ifnum\xrow>\maximumrow \exitloop \fi}}%
- \ifnum\xxrow>\maximumrow\xxrow\maximumrow\fi
- \ifnum\xxcol>\maximumcol\xxcol\maximumcol\fi
- \xxrow\numexpr\xxrow-\row+\plusone\relax
- \xxcol\numexpr\xxcol-\col+\plusone\relax
- \xrow\row
- \dorecurse\xxrow
- {\xcol\col \settblcol\xrow\xcol{\number\xxcol}%
- \dorecurse\xxcol
- {\lettbltag\xrow\xcol\tblnone \advance\xcol\plusone}%
- \advance\xrow\plusone}%
- \lettbltag\row\col\tblcell
- \settblcol\row\col{\the\xxcol}%
- \settblrow\row\col{\the\xxrow}%
- \ifautoTBLemptycell
- \edef\celltag{{\number\row}{\number\col}}%
- \@EA\settbltxt\@EA\row\@EA\col\@EA{\@EA\handleTBLcell\celltag[]{\strut}}%
- \fi}}}%
+ \tabl_ntb_loop_one
% to be sure
- \dorecurse\maximumrow
- {\row\recurselevel\relax
- \dorecurse\maximumcol
- {\col\recurselevel\relax
- \doiftblrow\row\col
- {\scratchcounter\numexpr\maximumrow-\row+\plusone\relax
- \ifnum\gettblrow\row\col>\scratchcounter
- \settblrow\row\col{\the\scratchcounter}%
- \fi}%
- \lettblht\row\col\zeropoint
- \lettblwd\row\col\zeropoint
- \doifnottblcol\row\col{\lettblcol\row\col\zerocount}%
- \doifnottbltag\row\col{\lettbltag\row\col\tblnone}}}%
+ \tabl_ntb_loop_two
% check and do
- \ifcase\maximumcol\else
+ \ifcase\c_tabl_ntb_maximum_col\else
\startTBLprocessing
- \begTBL
- \dorecurse\maximumrow
- {\bTBL
- \row\recurselevel\relax
- \dorecurse\maximumcol
- {\col\recurselevel\relax
- \normalexpanded{\noexpand\doTBL{\number\row}{\number\col}}}%
- \eTBL}%
+ \tabl_ntb_table_start
+ \dorecurse\c_tabl_ntb_maximum_row
+ {\tabl_ntb_row_start
+ \c_tabl_ntb_current_row\recurselevel\relax
+ \dorecurse\c_tabl_ntb_maximum_col
+ {\c_tabl_ntb_current_col\recurselevel\relax
+ \normalexpanded{\tabl_ntb_cell{\the\c_tabl_ntb_current_row}{\the\c_tabl_ntb_current_col}}}%
+ \tabl_ntb_row_stop}%
\removeunwantedspaces % only if hmode
- \endTBL
+ \tabl_ntb_table_stop
\stopTBLprocessing
% wrong ! ! ! better to have an auto-offset-overlay
- % \ifnum\TBLlevel>1
+ % \ifnum\m_tabl_tbl_level>1
% \vskip-\strutdp
% \fi
\fi
% tracing
% \iftrue
% \blank \tttf
- % \dorecurse\maximumrow
- % {\row\recurselevel\relax
- % \dorecurse\maximumcol
- % {\col\recurselevel\relax
- % [r=\the\row,c=\the\col,h=\the\dimexpr\gettblht\row\col,w=\the\dimexpr\gettblwd\row\col]}%
+ % \dorecurse\c_tabl_ntb_maximum_row
+ % {\c_tabl_ntb_current_row\recurselevel\relax
+ % \dorecurse\c_tabl_ntb_maximum_col
+ % {\c_tabl_ntb_current_col\recurselevel\relax
+ % [r=\the\c_tabl_ntb_current_row,c=\the\c_tabl_ntb_current_col,h=\the\dimexpr\tabl_ntb_get_ht\c_tabl_ntb_current_row\c_tabl_ntb_current_col,w=\the\dimexpr\tabl_ntb_get_wd\c_tabl_ntb_current_row\c_tabl_ntb_current_col]}%
% \par}%
% \blank
% \fi
\egroup
- \popTBL}
+ \tabl_ntb_table_pop}
-\let\startTBLprocessing\relax
-\let\stopTBLprocessing \relax
+\def\tabl_ntb_loop_one
+ {\dorecurse\c_tabl_ntb_maximum_row{\tabl_ntb_loop_one_rows}}
-\newcount\prelocatedTBLrows % \prelocateTBLrows{1000} may speed up large tables
+\def\tabl_ntb_loop_one_rows
+ {\c_tabl_ntb_current_row\recurselevel\relax
+ \dorecurse\c_tabl_ntb_maximum_col\tabl_ntb_loop_one_cells}
-\def\bTBL{\tblrowtoks\emptytoks}
-\def\eTBL{\tbltoks\@EA\@EA\@EA{\@EA\the\@EA\tbltoks\@EA\begintblrow\the\tblrowtoks\endtblrow}}%
-
-\def\prelocateTBLerror
- {\writestatus\m!system{fatal error: use \string\prelocateTBLrows\space to increase table memory (now: \number\prelocatedTBLrows)}}
+\def\tabl_ntb_loop_one_cells
+ {\c_tabl_ntb_current_col\recurselevel\relax
+ \ifcsname\tabl_ntb_tag_pattern\c_tabl_ntb_current_row\c_tabl_ntb_current_col\endcsname \else
+ \tabl_ntb_loop_one_cells_indeed
+ \fi}
-\def\prelocateTBLrows#1% we start at zero so we have one to much, better play safe anyway
- {\dostepwiserecurse\prelocatedTBLrows{#1}\plusone{\expandafter\newtoks\csname tbl:\recurselevel\endcsname}%
- \def\bTBL
- {\ifnum\tblrow<\prelocatedTBLrows\relax
- \@EA\let\@EA\tblrowtoks\csname tbl:\the\tblrow\endcsname\tblrowtoks\emptytoks
+\def\tabl_ntb_loop_one_cells_indeed
+ {\c_tabl_ntb_current_col_two\c_tabl_ntb_current_col
+ \c_tabl_ntb_current_row_two\c_tabl_ntb_current_row
+ \c_tabl_ntb_current_row_one\c_tabl_ntb_current_row
+ \doloop
+ {\c_tabl_ntb_current_col_one\c_tabl_ntb_current_col
+ \doloop
+ {\ifcsname\tabl_ntb_tag_pattern\c_tabl_ntb_current_row_one\c_tabl_ntb_current_col_one\endcsname
+ \exitloop
+ \else
+ \advance\c_tabl_ntb_current_col_one\plusone
+ \ifnum\c_tabl_ntb_current_col_one>\c_tabl_ntb_maximum_col\relax
+ \exitloop
+ \fi
+ \fi}%
+ \ifcsname\tabl_ntb_tag_pattern\c_tabl_ntb_current_row_one\c_tabl_ntb_current_col_one\endcsname
+ \exitloop
\else
- \prelocateTBLerror
+ \c_tabl_ntb_current_row_two\c_tabl_ntb_current_row_one
+ \c_tabl_ntb_current_col_two\c_tabl_ntb_current_col_one
+ \advance\c_tabl_ntb_current_row_one\plusone
+ \ifnum\c_tabl_ntb_current_row_one>\c_tabl_ntb_maximum_row
+ \exitloop
+ \fi
\fi}%
- \def\eTBL
- {\tbltoks\@EA\@EA\@EA{\@EA\the\@EA\tbltoks\@EA\begintblrow\@EA\the\csname tbl:\the\tblrow\endcsname\endtblrow}}%
- \global\prelocatedTBLrows#1\relax}
+ \ifnum\c_tabl_ntb_current_row_two>\c_tabl_ntb_maximum_row\c_tabl_ntb_current_row_two\c_tabl_ntb_maximum_row\fi
+ \ifnum\c_tabl_ntb_current_col_two>\c_tabl_ntb_maximum_col\c_tabl_ntb_current_col_two\c_tabl_ntb_maximum_col\fi
+ \c_tabl_ntb_current_row_two\numexpr\c_tabl_ntb_current_row_two-\c_tabl_ntb_current_row+\plusone\relax
+ \c_tabl_ntb_current_col_two\numexpr\c_tabl_ntb_current_col_two-\c_tabl_ntb_current_col+\plusone\relax
+ \c_tabl_ntb_current_row_one\c_tabl_ntb_current_row
+ \dorecurse\c_tabl_ntb_current_row_two
+ {\c_tabl_ntb_current_col_one\c_tabl_ntb_current_col
+ \tabl_ntb_set_col\c_tabl_ntb_current_row_one\c_tabl_ntb_current_col_one{\the\c_tabl_ntb_current_col_two}%
+ \dorecurse\c_tabl_ntb_current_col_two
+ {\tabl_ntb_let_tag\c_tabl_ntb_current_row_one\c_tabl_ntb_current_col_one\c_tabl_ntb_none
+ \advance\c_tabl_ntb_current_col_one\plusone}%
+ \advance\c_tabl_ntb_current_row_one\plusone}%
+ \tabl_ntb_let_tag\c_tabl_ntb_current_row\c_tabl_ntb_current_col\c_tabl_ntb_cell
+ \tabl_ntb_set_col\c_tabl_ntb_current_row\c_tabl_ntb_current_col{\the\c_tabl_ntb_current_col_two}%
+ \tabl_ntb_set_row\c_tabl_ntb_current_row\c_tabl_ntb_current_col{\the\c_tabl_ntb_current_row_two}%
+ \ifautoTBLemptycell
+ \normalexpanded
+ {\tabl_ntb_set_txt_process\c_tabl_ntb_current_row\c_tabl_ntb_current_col{\the\c_tabl_ntb_current_row}{\the\c_tabl_ntb_current_col}}%
+ {}{\strut}%
+ \fi}
+
+\def\tabl_ntb_loop_two
+ {\dorecurse\c_tabl_ntb_maximum_row\tabl_ntb_loop_two_rows}
+
+\def\tabl_ntb_loop_two_rows
+ {\c_tabl_ntb_current_row\recurselevel\relax
+ \dorecurse\c_tabl_ntb_maximum_col\tabl_ntb_loop_two_cells}
+
+\def\tabl_ntb_loop_two_cells
+ {\c_tabl_ntb_current_col\recurselevel\relax
+ \ifcsname\tabl_ntb_row_pattern\c_tabl_ntb_current_row\c_tabl_ntb_current_col\endcsname
+ \scratchcounter\numexpr\c_tabl_ntb_maximum_row-\c_tabl_ntb_current_row+\plusone\relax
+ \ifnum\tabl_ntb_get_row\c_tabl_ntb_current_row\c_tabl_ntb_current_col>\scratchcounter
+ \tabl_ntb_set_row\c_tabl_ntb_current_row\c_tabl_ntb_current_col{\the\scratchcounter}%
+ \fi
+ \fi
+ \tabl_ntb_let_ht\c_tabl_ntb_current_row\c_tabl_ntb_current_col\zeropoint
+ \tabl_ntb_let_wd\c_tabl_ntb_current_row\c_tabl_ntb_current_col\zeropoint
+ \ifcsname\tabl_ntb_col_pattern\c_tabl_ntb_current_row\c_tabl_ntb_current_col\endcsname \else
+ \tabl_ntb_let_col\c_tabl_ntb_current_row\c_tabl_ntb_current_col\zerocount
+ \fi
+ \ifcsname\tabl_ntb_tag_pattern\c_tabl_ntb_current_row\c_tabl_ntb_current_col\endcsname \else
+ \tabl_ntb_let_tag\c_tabl_ntb_current_row\c_tabl_ntb_current_col\c_tabl_ntb_none
+ \fi}
+
+\let\startTBLprocessing\relax % public
+\let\stopTBLprocessing \relax % public
+
+\newcount\c_tabl_prelocated_rows % \prelocateTBLrows{1000} may speed up large tables
+
+\def\tabl_ntb_row_start{\t_tabl_ntb_row\emptytoks}
+\def\tabl_ntb_row_stop {\normalexpanded{\t_tabl_ntb{\the\t_tabl_ntb\noexpand\tabl_ntb_row_align_start\the\t_tabl_ntb_row\tabl_ntb_row_align_stop}}}
+
+\def\tabl_ntb_prelocate_error
+ {\writestatus\m!system{fatal error: use \string\prelocateTBLrows\space to increase table memory (now: \the\c_tabl_prelocated_rows)}}
% \prelocateTBLrows{1000} % may speed up large tables
+\installcorenamespace{naturaltabletok}
+
+\def\prelocateTBLrows#1% we start at zero so we have one to much, better play safe anyway
+ {\dostepwiserecurse\c_tabl_prelocated_rows{#1}\plusone{\expandafter\newtoks\csname\??naturaltabletok\recurselevel\endcsname}%
+ \def\tabl_ntb_row_start
+ {\ifnum\c_tabl_ntb_row<\c_tabl_prelocated_rows\relax
+ \expandafter\let\expandafter\t_tabl_ntb_row\csname\??naturaltabletok\the\c_tabl_ntb_row\endcsname\t_tabl_ntb_row\emptytoks
+ \else
+ \tabl_ntb_prelocate_error
+ \fi}%
+ \def\tabl_ntb_row_stop
+ {\normalexpanded{\t_tabl_ntb{\the\t_tabl_ntb\noexpand\tabl_ntb_row_align_start\the\csname\??naturaltabletok\the\c_tabl_ntb_row\endcsname\tabl_ntb_row_align_stop}}}%
+ \global\c_tabl_prelocated_rows#1\relax}
+
% We use aligments to handle the empty (skipped) columns, so
% that we don't have to (re|)|calculate these.
-\def\skiptblcol
- {\global\advance\tblcol\plusone}
+\def\tabl_ntb_column_skip
+ {\global\advance\c_tabl_ntb_col\plusone}
-\def\nexttblcol
- {\global\advance\tblcol\plusone
- \kern\tbltblcolumndistance
- &}
+\def\tabl_ntb_column_next
+ {\global\advance\c_tabl_ntb_col\plusone
+ \kern\naturaltablelocalparameter\c!columndistance
+ \aligntab}
-\def\spantblcol
+\def\tabl_ntb_column_span
{\span}
-\newcount\tblrow
-\newcount\tblcol
+\let\m_tabl_ntb_saved_row\!!zerocount
+\let\m_tabl_ntb_saved_col\!!zerocount
-\let\savedtblrow\!!zerocount
-\let\savedtblcol\!!zerocount
+\def\tabl_ntb_row_align_start
+ {\noalign{\tabl_ntb_row_align_reset}%
+ \tabl_ntb_column_next
+ \kern\dimexpr\naturaltablelocalparameter\c!leftmargindistance-\naturaltablelocalparameter\c!columndistance\relax}
-\def\begintblrow
- {\noalign
- {\global\advance\tblrow\plusone
- \global\tblcol\zerocount
- \global\tblspn\zerocount}%
- % \iftrue
- % \bgroup\tbox{\tttf[\number\tblrow]}\egroup
- % \fi
- \nexttblcol
- \kern\dimexpr\tbltblleftmargindistance-\tbltblcolumndistance\relax}
+\unexpanded\def\tabl_ntb_row_align_reset
+ {\global\advance\c_tabl_ntb_row\plusone
+ \global\c_tabl_ntb_col\zerocount
+ \global\c_tabl_ntb_spn\zerocount}
-\def\endtblrow
- {\kern\dimexpr\tbltblrightmargindistance-\tbltblcolumndistance\relax
+\unexpanded\def\tabl_ntb_row_align_stop
+ {\kern\dimexpr\naturaltablelocalparameter\c!rightmargindistance-\naturaltablelocalparameter\c!columndistance\relax
\crcr
\noalign
{\nointerlineskip
- \ifnum\tblrow>\noftblheadlines
- \ifnum\gettblnob\tblrow=\zerocount
+ \ifnum\c_tabl_ntb_row>\c_tabl_ntb_n_of_head_lines
+ \ifnum\tabl_ntb_get_nob\c_tabl_ntb_row=\zerocount
\allowbreak
\fi
\else
\allowbreak % else no proper head split off
\fi
\bgroup % protect local vars
- \@@tblsplitafter
+ \m_tabl_ntb_after_split
\egroup
\bgroup % protect local vars
- \scratchcounter\numexpr\tblrow+\plusone\relax
- \ifnum\scratchcounter>\noftblhdnxlines\relax
- \ifnum\scratchcounter<\maximumrow\relax
- \doifsomething\tbltblspaceinbetween{\blank[\tbltblspaceinbetween]}%
+ \scratchcounter\numexpr\c_tabl_ntb_row+\plusone\relax
+ \ifnum\scratchcounter>\c_tabl_ntb_n_of_hdnx_lines\relax
+ \ifnum\scratchcounter<\c_tabl_ntb_maximum_row\relax
+ \doifsomething{\naturaltablelocalparameter\c!spaceinbetween}
+ {\blank[\naturaltablelocalparameter\c!spaceinbetween]}%
\fi
\fi
\egroup}}
-\def\begintbl
- {\global\tblspn\zerocount
- \global\tblcol\zerocount
- \global\tblrow\zerocount
- \global\advance\tblrow\minusone
+\def\tabl_ntb_flush_content
+ {\the\everyTABLEpass
+ \global\c_tabl_ntb_spn\zerocount
+ \global\c_tabl_ntb_col\zerocount
+ \global\c_tabl_ntb_row\zerocount
+ \global\advance\c_tabl_ntb_row\minusone
\tabskip\zeropoint
\dostarttagged\t!table\empty
\dostarttagged\t!tablerow\empty
\appendtoks\dostoptagged\dostarttagged\t!tablerow\empty\to\everycr
\halign\bgroup
- \registerparoptions
- % watch out: tagging the cell happens at the outer level (faster)
-% \ignorespaces##\unskip&&\dostarttagged\t!tablecell\empty\ignorespaces##\unskip\dostoptagged\cr} % one too many
- \ignorespaces##\unskip&&\ignorespaces##\unskip\cr} % one too many
-
-\def\endtbl
- {\dostoptagged
+ \registerparoptions
+ % watch out: tagging the cell happens at the outer level (faster)
+ \ignorespaces\alignmark\alignmark\unskip
+ \aligntab\aligntab
+ \ignorespaces\alignmark\alignmark\unskip
+ \cr % one too many
+ \the\t_tabl_ntb
+ \dostoptagged
\egroup
\dostoptagged}
-\setvalue{\tblnone TBL}#1#2%
- {\spanTBL{#1}{#2}}
-
-\setvalue{\tblcell TBL}#1#2%
- {\tblrowtoks\expandafter{\the\tblrowtoks\makeTBL #1 #2 }% space delimited -> less tokens
- \spanTBL{#1}{#2}}
-
-\def\spanTBL#1#2%
- {\scratchcounter\gettblcol{#1}{#2}\relax
+\setvalue{\??naturaltablecell\the\c_tabl_ntb_none}#1#2%
+ {\scratchcounter\tabl_ntb_get_col{#1}{#2}\relax
\ifnum\scratchcounter>\zerocount
- \advance\scratchcounter \minusone
- \dorecurse\scratchcounter{\tblrowtoks\expandafter{\the\tblrowtoks\spantblcol}}%
- \dorecurse\scratchcounter{\tblrowtoks\expandafter{\the\tblrowtoks\skiptblcol}}%
- \tblrowtoks\expandafter{\the\tblrowtoks\nexttblcol}%
+ \advance\scratchcounter\minusone
+ \ifnum\scratchcounter>\zerocount
+ \tabl_ntb_span
+ \fi
+ \t_tabl_ntb_row\expandafter{\the\t_tabl_ntb_row\tabl_ntb_column_next}
\fi}
-\def\doTBL#1#2%
- {\csname\gettbltag{#1}{#2}TBL\endcsname{#1}{#2}}
+\setvalue{\??naturaltablecell\the\c_tabl_ntb_cell}#1#2%
+ {\t_tabl_ntb_row\expandafter{\the\t_tabl_ntb_row\tabl_ntb_pass #1 #2 }% space delimited -> less tokens
+ \scratchcounter\tabl_ntb_get_col{#1}{#2}\relax
+ \ifnum\scratchcounter>\zerocount
+ \advance\scratchcounter\minusone
+ \ifnum\scratchcounter>\zerocount
+ \tabl_ntb_span
+ \fi
+ \t_tabl_ntb_row\expandafter{\the\t_tabl_ntb_row\tabl_ntb_column_next}
+ \fi}
-\def\begTBL
- {\global\tblspn\zerocount
- \global\tblrow\zerocount
- \global\tblcol\zerocount
- \tblpass\zerocount
- \tbltoks\emptytoks}
+\def\tabl_ntb_span
+ {\dorecurse\scratchcounter{\t_tabl_ntb_row\expandafter{\the\t_tabl_ntb_row\tabl_ntb_column_span}}%
+ \dorecurse\scratchcounter{\t_tabl_ntb_row\expandafter{\the\t_tabl_ntb_row\tabl_ntb_column_skip}}}
-\newtoks\everyTABLEpass
+\unexpanded\def\tabl_ntb_cell#1#2%
+ {\csname\??naturaltablecell\the\tabl_ntb_get_tag{#1}{#2}\endcsname{#1}{#2}}
-\def\flushtbltoks
- {\the\everyTABLEpass
- \begintbl
- \the\tbltoks
- \endtbl}
+\unexpanded\def\tabl_ntb_table_start
+ {\global\c_tabl_ntb_spn\zerocount
+ \global\c_tabl_ntb_row\zerocount
+ \global\c_tabl_ntb_col\zerocount
+ \c_tabl_tbl_pass\zerocount
+ \t_tabl_ntb\emptytoks}
-\def\domakeTBLone#1 #2 %
- {\gettbltxt{#1}{#2}}%
+\def\tabl_ntb_pass_one#1 #2 %
+ {\tabl_ntb_get_txt{#1}{#2}}%
-\def\domakeTBLtwo#1 #2 % meer in cellD
- {\scratchdimen\zeropoint
- \scratchcounter\tblcol
- \!!counta\gettblcol{#1}{#2}\relax
+\def\tabl_ntb_pass_two#1 #2 % meer in cellD
+ {\d_tabl_ntb_width\zeropoint
+ \scratchcounter\c_tabl_ntb_col
+ \!!counta\tabl_ntb_get_col{#1}{#2}\relax
\dorecurse\!!counta
- {\advance\scratchdimen\dimexpr\gettblwid\scratchcounter+\tbltblcolumndistance\relax
- \ifnum\recurselevel<\!!counta \advance\scratchdimen \gettbldis\scratchcounter\fi
+ {\advance\d_tabl_ntb_width\dimexpr
+ \tabl_ntb_get_wid\scratchcounter
+ +\naturaltablelocalparameter\c!columndistance
+ \ifnum\recurselevel<\!!counta
+ +\tabl_ntb_get_dis\scratchcounter
+ \fi
+ \relax
\advance\scratchcounter\plusone}%
- \edef\widthTBL{\the\dimexpr\scratchdimen-\tbltblcolumndistance\relax}%
- \setbox\scratchbox\hbox{\gettbltxt{#1}{#2}}%
- \settblht{#1}{#2}{\the\ht\scratchbox}%
- \settblwd{#1}{#2}{\the\wd\scratchbox}%
- \ifdim\ht\scratchbox>\gettblhei{#1}\relax
- \settblhei{#1}{\the\ht\scratchbox}%
+ \advance\d_tabl_ntb_width-\naturaltablelocalparameter\c!columndistance\relax
+ \setbox\scratchbox\hbox{\tabl_ntb_get_txt{#1}{#2}}%
+ \tabl_ntb_set_ht{#1}{#2}{\the\ht\scratchbox}%
+ \tabl_ntb_set_wd{#1}{#2}{\the\wd\scratchbox}%
+ \ifdim\ht\scratchbox>\tabl_ntb_get_hei{#1}\relax
+ \tabl_ntb_set_hei{#1}{\the\ht\scratchbox}%
\fi}%
-\newcount\tablecellrows
-\newcount\tablecellcolumns
-
-\def\domakeTBLthree#1 #2 %
+\def\tabl_ntb_pass_three#1 #2 %
{% height
\dostarttagged\t!tablecell\empty
- \!!counta \gettblcol{#1}{#2}\relax
- \!!countb \gettblrow{#1}{#2}\relax
- \!!heighta\gettblht {#1}{#2}\relax
+ \!!counta \tabl_ntb_get_col{#1}{#2}\relax
+ \!!countb \tabl_ntb_get_row{#1}{#2}\relax
+ \!!heighta\tabl_ntb_get_ht {#1}{#2}\relax
\tablecellcolumns\!!counta % used later so don't adapt these
\tablecellrows \!!countb % used later so don't adapt these
- \scratchdimen\zeropoint
- \ifnum\!!counta=\maximumcol\relax
+ \d_tabl_ntb_height\zeropoint
+ \ifnum\!!counta=\c_tabl_ntb_maximum_col\relax
% case: nc=maxcolumns
\else
\scratchcounter#1\relax
\dorecurse\!!countb
- {\advance\scratchdimen\gettblhei\scratchcounter
+ {\advance\d_tabl_ntb_height\tabl_ntb_get_hei\scratchcounter
\advance\scratchcounter\plusone}%
- \ifdim\scratchdimen<\!!heighta\relax
- \scratchdimen\!!heighta
+ \ifdim\d_tabl_ntb_height<\!!heighta\relax
+ \d_tabl_ntb_height\!!heighta
\fi
\fi
- \edef\heightTBL{\the\scratchdimen}%
% width
- \scratchdimen\zeropoint
- \scratchcounter\tblcol
+ \d_tabl_ntb_width\zeropoint
+ \scratchcounter\c_tabl_ntb_col
\dorecurse\!!counta
- {\advance\scratchdimen\dimexpr\gettblwid\scratchcounter+\tbltblcolumndistance\relax
- \ifnum\recurselevel<\!!counta \advance\scratchdimen \gettbldis\scratchcounter\fi
+ {\advance\d_tabl_ntb_width\dimexpr
+ \tabl_ntb_get_wid\scratchcounter
+ +\naturaltablelocalparameter\c!columndistance
+ \ifnum\recurselevel<\!!counta
+ +\tabl_ntb_get_dis\scratchcounter
+ \fi
+ \relax
\advance\scratchcounter\plusone}%
- \edef\widthTBL{\the\dimexpr\scratchdimen-\tbltblcolumndistance\relax}%
+ \advance\d_tabl_ntb_width-\naturaltablelocalparameter\c!columndistance\relax
% cell
\setbox\scratchbox\hbox attr \taggedattribute \attribute\taggedattribute \bgroup
\dotagTABLEsignal % maybe we need to add some packaging in this case
- \gettbltxt{#1}{#2}%
+ \tabl_ntb_get_txt{#1}{#2}%
\egroup
- \ifnum\!!counta=\maximumcol\relax
+ \ifnum\!!counta=\c_tabl_ntb_maximum_col\relax
% case: nc=maxcolumns
\else
- \scratchdimen\gettblhei{#1}%
+ \scratchdimen\tabl_ntb_get_hei{#1}%
\setbox\scratchbox\hbox
{\lower\ht\scratchbox\hbox{\raise\scratchdimen\box\scratchbox}}%
\ht\scratchbox\scratchdimen
\fi
\dp\scratchbox\zeropoint
- \edef\!!stringa{\gettblref{#1}{#2}}%
+ \edef\!!stringa{\tabl_ntb_get_ref{#1}{#2}}%
\ifx\!!stringa\empty
\box\scratchbox
\else
@@ -1061,108 +1236,103 @@
\fi
\dostoptagged} % right spot
-\def\inTBLcell#1#2% hm, do we need #1 #2 ? we use tblcol anyway
+\def\tabl_tnb_cell_finalize
{\doifnotinset\localwidth{\v!fit,\v!broad}% user set
- {\scratchdimen\gettblaut\tblcol\relax
+ {\scratchdimen\tabl_ntb_get_aut\c_tabl_ntb_col\relax
\ifdim\localwidth>\scratchdimen
- \settblaut\tblcol{\the\dimexpr\localwidth\relax}%
- \fi}}%
+ \tabl_ntb_set_aut\c_tabl_ntb_col{\the\dimexpr\localwidth}%
+ \fi}}
-\def\endTBL
+\def\tabl_ntb_table_stop
{\setbox\scratchbox\hbox
- {\localframed
- [\@@tbl\@@tbl]
- [\c!frame=\v!off,\c!background=,\c!align=\v!no]
- {\strut}}%
- \edef\minimalcellheight{\the\ht\scratchbox}%
- \dorecurse\maximumcol
- {\lettblaut\recurselevel\zeropoint
+ {\setupcurrentnaturaltablelocal[\c!frame=\v!off,\c!background=,\c!align=\v!no]%
+ \inheritednaturaltablelocalframed{\strut}}%
+ \edef\minimalcellheight{\the\ht\scratchbox}% not used
+ \dorecurse\c_tabl_ntb_maximum_col
+ {\tabl_ntb_let_aut\recurselevel\zeropoint
% new
- \xcol\recurselevel\relax
- \dorecurse\maximumrow
- {\lettblwd\recurselevel\xcol\zeropoint
- \lettblht\recurselevel\xcol\zeropoint}%
+ \c_tabl_ntb_current_col_one\recurselevel\relax
+ \dorecurse\c_tabl_ntb_maximum_row
+ {\tabl_ntb_let_wd\recurselevel\c_tabl_ntb_current_col_one\zeropoint
+ \tabl_ntb_let_ht\recurselevel\c_tabl_ntb_current_col_one\zeropoint}%
% till here
- \lettblwid\recurselevel\zeropoint
- \lettbldis\recurselevel\zeropoint}%
- \dorecurse\maximumrow
- {\lettblhei\recurselevel\maxdimen}%
- \tblpass\plusone
- \let\makeTBL\domakeTBLone
- \let\handleTBLcell\dohandleTBLcellA
- \setbox0\vbox{\settrialtypesetting \flushtbltoks}%
-% \setbox\scratchbox\vbox{\settrialtypesetting \flushtbltoks}%
- \lettbldis\maximumcol\zeropoint
+ \tabl_ntb_let_wid\recurselevel\zeropoint
+ \tabl_ntb_let_dis\recurselevel\zeropoint}%
+ \dorecurse\c_tabl_ntb_maximum_row
+ {\tabl_ntb_let_hei\recurselevel\maxdimen}%
+ \c_tabl_tbl_pass\plusone
+ \let\tabl_ntb_pass\tabl_ntb_pass_one
+ \let\tabl_ntb_cell_process\tabl_ntb_cell_process_a
+ \setbox0\vbox{\settrialtypesetting \tabl_ntb_flush_content}%
+ \tabl_ntb_let_dis\c_tabl_ntb_maximum_col\zeropoint
\ifautoTBLspread
% experimental, stretch non fixed cells to \hsize
- \checktblwidthsone % trial run
- \checktblwidthstwo % real run
- \stretchtblwidths
- \let\handleTBLcell\dohandleTBLcellB
- \setbox\scratchbox\vbox{\settrialtypesetting \flushtbltoks}%
+ \tabl_ntb_check_widths_one % trial run
+ \tabl_ntb_check_widths_two % real run
+ \tabl_ntb_stretch_widths
+ \let\tabl_ntb_cell_process\tabl_ntb_cell_process_b
+ \setbox\scratchbox\vbox{\settrialtypesetting \tabl_ntb_flush_content}%
\else\ifdim\wd0>\hsize
\ifautoTBLhsize
- \checktblwidthsone % trial run
- \checktblwidthstwo % real run
- \let\handleTBLcell\dohandleTBLcellB
- \setbox\scratchbox\vbox{\settrialtypesetting \flushtbltoks}%
+ \tabl_ntb_check_widths_one % trial run
+ \tabl_ntb_check_widths_two % real run
+ \let\tabl_ntb_cell_process\tabl_ntb_cell_process_b
+ \setbox\scratchbox\vbox{\settrialtypesetting \tabl_ntb_flush_content}%
\fi
- \else\ifautoTBLrowspan\ifnum\maximumrowspan>1 % max ?
+ \else\ifautoTBLrowspan\ifnum\c_tabl_ntb_maximum_row_span>1 % max ?
% added jan 2002 because nx=* did no longer work
\edef\savedhsize{\the\hsize}%
\hsize\wd0\relax % new per 17/04/2006
- \checktblwidthsone % trial run
- \checktblwidthstwo % real run
+ \tabl_ntb_check_widths_one % trial run
+ \tabl_ntb_check_widths_two % real run
\hsize\savedhsize
%
- \let\handleTBLcell\dohandleTBLcellC
- \setbox\scratchbox\vbox{\settrialtypesetting \flushtbltoks}%
+ \let\tabl_ntb_cell_process\tabl_ntb_cell_process_c
+ \setbox\scratchbox\vbox{\settrialtypesetting \tabl_ntb_flush_content}%
\fi\fi\fi\fi
- \let\handleTBLcell\dohandleTBLcellD
- \tblpass\plustwo
- \let\makeTBL\domakeTBLtwo
- \setbox\scratchbox\vbox{\settrialtypesetting \flushtbltoks}%
- \checktblheightsone
- \checktblheightstwo
- \let\handleTBLcell\dohandleTBLcellE
- \tblpass\plusthree
- \let\makeTBL\domakeTBLthree
- \ifnum\TBLlevel>\plusone
- \@EA\notsplittblbox
+ \let\tabl_ntb_cell_process\tabl_ntb_cell_process_d
+ \c_tabl_tbl_pass\plustwo
+ \let\tabl_ntb_pass\tabl_ntb_pass_two
+ \setbox\scratchbox\vbox{\settrialtypesetting \tabl_ntb_flush_content}%
+ \tabl_ntb_check_heights_one
+ \tabl_ntb_check_heights_two
+ \let\tabl_ntb_cell_process\tabl_ntb_cell_process_e
+ \c_tabl_tbl_pass\plusthree
+ \let\tabl_ntb_pass\tabl_ntb_pass_three
+ \ifnum\m_tabl_tbl_level>\plusone
+ \expandafter\tabl_tbl_split_nop
\else\ifenableTBLbreak
- \@EAEAEA\splittblbox
+ \doubleexpandafter\tabl_tbl_split_yes
\else
- \@EAEAEA\notsplittblbox
- \fi\fi{\flushtbltoks}}
+ \doubleexpandafter\tabl_tbl_split_nop
+ \fi\fi{\tabl_ntb_flush_content}}
-\def\stretchtblwidths % more variants, e.g. a max to \dimend
- {\ifcase\maximumcol\else % else division by zero
+\def\tabl_ntb_stretch_widths % more variants, e.g. a max to \dimend
+ {\ifcase\c_tabl_ntb_maximum_col\else % else division by zero
\!!dimend\zeropoint
\!!dimene\hsize
- \dorecurse\maximumcol
- {\advance\!!dimend\dimexpr\gettblwid\recurselevel+\tbltblcolumndistance\relax
- \advance\!!dimene-\gettbldis\recurselevel}%
- \advance\!!dimend\dimexpr-\tbltblcolumndistance+\tbltblleftmargindistance+\tbltblrightmargindistance\relax
+ \dorecurse\c_tabl_ntb_maximum_col
+ {\advance\!!dimend\dimexpr\tabl_ntb_get_wid\recurselevel+\naturaltablelocalparameter\c!columndistance\relax
+ \advance\!!dimene-\tabl_ntb_get_dis\recurselevel}%
+ \advance\!!dimend\dimexpr-\naturaltablelocalparameter\c!columndistance+\naturaltablelocalparameter\c!leftmargindistance+\naturaltablelocalparameter\c!rightmargindistance\relax
% distribute width (stretch)
\ifdim\!!dimend<\!!dimene
\advance\!!dimend-\!!dimene
\!!dimend-\!!dimend
- \divide\!!dimend\maximumcol
- \dorecurse\maximumcol
- {\settblwid\recurselevel{\the\dimexpr\gettblwid\recurselevel+\!!dimend\relax}}%
+ \divide\!!dimend\c_tabl_ntb_maximum_col
+ \dorecurse\c_tabl_ntb_maximum_col
+ {\tabl_ntb_set_wid\recurselevel{\the\dimexpr\tabl_ntb_get_wid\recurselevel+\!!dimend\relax}}%
\fi
\fi}
-\newbox\finaltblbox
-
-\def\notsplittblbox#1%
- {\setbox\finaltblbox\vbox{#1}%
- \postprocessTABLEbox\finaltblbox
+\def\tabl_tbl_split_nop#1%
+ {\setbox\b_tabl_ntb_final\vbox{#1}%
+ \postprocessTABLEbox\b_tabl_ntb_final
\beforeTABLEbox
- \box\finaltblbox
+ \box\b_tabl_ntb_final
\afterTABLEbox}
-\def\splittblbox#1%
+\def\tabl_tbl_split_yes % #1
{\ifinsidesplitfloat
\donetrue
\else\ifinsidefloat
@@ -1171,33 +1341,33 @@
\donetrue
\fi\fi
\ifdone
- \executeifdefined{dosplittblbox\tbltblsplitmethod}\dosplittblbox{#1}%
+ \expandafter\tabl_ntb_split_box
\else
- \notsplittblbox{#1}%
+ \expandafter\tabl_tbl_split_nop
\fi}
\newbox\TABLEsplitbox % public, don't change
\let\extratblsplitheight\zeropoint % additional space taken by before/afterTABLEsplitbox
-\def\dosplittblbox#1%
+\def\tabl_ntb_split_box#1%
{\resettsplit
\def\tsplitminimumfreelines{2}%
- \def\tsplitminimumfreespace{\dimexpr\extratblsplitheight+\tbltblsplitoffset\relax}%
+ \def\tsplitminimumfreespace{\dimexpr\extratblsplitheight+\naturaltablelocalparameter\c!splitoffset\relax}%
\def\tsplitbeforeresult {\beforeTABLEsplitbox}%
\def\tsplitafterresult {\afterTABLEsplitbox}%
- \def\tsplitafter {\@@tblsplitafter}%
+ \def\tsplitafter {\m_tabl_ntb_after_split}%
\setbox\tsplitcontent\vbox{#1}%
\ifmultipleTBLheads
- \dorecurse\noftblheadlines
+ \dorecurse\c_tabl_ntb_n_of_head_lines
{\setbox\scratchbox\vsplit\tsplitcontent to \lineheight
\setbox\tsplithead\vbox{\unvcopy\tsplithead\unvcopy\scratchbox}}%
- \dorecurse\noftblnextlines
+ \dorecurse\c_tabl_ntb_n_of_next_lines
{\setbox\scratchbox\vsplit\tsplitcontent to \lineheight
\setbox\tsplitnext\vbox{\unvcopy\tsplitnext\unvcopy\scratchbox}}%
\fi
- \doifsomething\tbltblspaceinbetween
- {\def\tsplitinbetween{\blank[\tbltblspaceinbetween]}}%
+ \doifsomething{\naturaltablelocalparameter\c!spaceinbetween}
+ {\def\tsplitinbetween{\blank[\naturaltablelocalparameter\c!spaceinbetween]}}%
\def\postprocesstsplit{\postprocessTABLEsplitbox{\box\tsplitresult}}%
\handletsplit}
@@ -1211,22 +1381,22 @@
\let\beforeTABLEbox \relax
\let\afterTABLEbox \relax
-\def\checktblwidthsone{\dochecktblwidths0} % 0 = trial run
-\def\checktblwidthstwo{\dochecktblwidths1} % 1 = real run
+\def\tabl_ntb_check_widths_one{\tabl_ntb_check_widths_indeed0} % 0 = trial run
+\def\tabl_ntb_check_widths_two{\tabl_ntb_check_widths_indeed1} % 1 = real run
-\def\dochecktblwidths#1%
- {\iftraceTABLE\showtblwids{B#1}\fi
+\def\tabl_ntb_check_widths_indeed#1%
+ {\iftraceTABLE\tabl_ntb_show_widths{B#1}\fi
\!!counta\zerocount
- \!!dimena\dimexpr\hsize-\tbltblleftmargindistance-\tbltblrightmargindistance-\tbltblcolumndistance\relax
- \dorecurse\maximumcol
- {\scratchdimen\gettblaut\recurselevel\relax
- \advance\!!dimena-\gettbldis\recurselevel\relax
+ \!!dimena\dimexpr\hsize-\naturaltablelocalparameter\c!leftmargindistance-\naturaltablelocalparameter\c!rightmargindistance-\naturaltablelocalparameter\c!columndistance\relax
+ \dorecurse\c_tabl_ntb_maximum_col
+ {\scratchdimen\tabl_ntb_get_aut\recurselevel\relax
+ \advance\!!dimena-\tabl_ntb_get_dis\recurselevel\relax
\ifdim\scratchdimen>\zeropoint\relax
\advance\!!dimena -\scratchdimen
\else
- \scratchdimen\gettblwid\recurselevel\relax
- \ifdim\scratchdimen>\tbltblmaxwidth\relax
- \ifcase#1\else\lettblwid\recurselevel\zeropoint\fi
+ \scratchdimen\tabl_ntb_get_wid\recurselevel\relax
+ \ifdim\scratchdimen>\naturaltablelocalparameter\c!maxwidth\relax
+ \ifcase#1\else\tabl_ntb_let_wid\recurselevel\zeropoint\fi
\advance\!!counta \plusone
\else
\ifdim\scratchdimen>\zeropoint\relax
@@ -1239,163 +1409,157 @@
\fi
\fi}%
\ifcase\!!counta \else \divide\!!dimena \!!counta \fi
- \dorecurse\maximumcol
- {\scratchdimen\gettblwid\recurselevel\relax
+ \dorecurse\c_tabl_ntb_maximum_col
+ {\scratchdimen\tabl_ntb_get_wid\recurselevel\relax
\ifcase#1\relax
\ifdim\scratchdimen<\!!dimena % take natural width
- \settblaut\recurselevel{\the\scratchdimen}%
+ \tabl_ntb_set_aut\recurselevel{\the\scratchdimen}%
\fi
\else
\ifdim\scratchdimen=\zeropoint % auto set width
- \settblwid\recurselevel{\the\!!dimena}%
+ \tabl_ntb_set_wid\recurselevel{\the\!!dimena}%
\fi
\fi}%
- \iftraceTABLE\showtblwids{E#1}\fi}
-
-\newcount\xrowTBL
-\newcount\xcolTBL
-\newcount\xxrowTBL
+ \iftraceTABLE\tabl_ntb_show_widths{E#1}\fi}
-% dikke arg naar recurse wegwerken
-
-\def\dochecktblheightsone
- {\!!countb\gettblrow\xrowTBL\xcolTBL\relax
+\def\tabl_ntb_check_heights_one_indeed
+ {\!!countb\tabl_ntb_get_row\c_tabl_ntb_current_row_three\c_tabl_ntb_current_col_three\relax
% check row span
\ifnum\!!countb>\plusone
% current height in row
- \dimen0=\gettblht\xrowTBL\xcolTBL
+ \dimen0=\tabl_ntb_get_ht\c_tabl_ntb_current_row_three\c_tabl_ntb_current_col_three
% find nearest height in row
\dimen2=\zeropoint
- \dorecurse\maximumcol
- {\ifnum\recurselevel=\xcolTBL\else
- \doiftblrow\xrowTBL\recurselevel
- {\!!countc=\gettblrow\xrowTBL\recurselevel\relax
- \ifnum\!!countc=\plusone
- \dimen4=\gettblht\xrowTBL\recurselevel\relax
- \ifdim\dimen2<\dimen4
- \dimen2=\dimen4
- \fi
- \fi}%
+ \dorecurse\c_tabl_ntb_maximum_col
+ {\ifnum\recurselevel=\c_tabl_ntb_current_col_three\else
+ \ifcsname\tabl_ntb_row_pattern\c_tabl_ntb_current_row_three\recurselevel\endcsname
+ \!!countc=\tabl_ntb_get_row\c_tabl_ntb_current_row_three\recurselevel\relax
+ \ifnum\!!countc=\plusone
+ \dimen4=\tabl_ntb_get_ht\c_tabl_ntb_current_row_three\recurselevel\relax
+ \ifdim\dimen2<\dimen4
+ \dimen2=\dimen4
+ \fi
+ \fi
+ \fi
\fi}%
- \xxrowTBL\xrowTBL
+ \c_tabl_ntb_current_row_four\c_tabl_ntb_current_row_three
% calculate cummulative height
\dimen4=\dimen2
- \!!countc\xrowTBL
+ \!!countc\c_tabl_ntb_current_row_three
\advance\!!countc\minusone
\dorecurse\!!countb
- {\ifnum\xxrowTBL=\xrowTBL\else
- \advance\dimen4 \gettblhei\xxrowTBL
+ {\ifnum\c_tabl_ntb_current_row_four=\c_tabl_ntb_current_row_three\else
+ \advance\dimen4 \tabl_ntb_get_hei\c_tabl_ntb_current_row_four
\fi
\ifnum\recurselevel=\!!countb\else
- \settblnob\!!countc
+ \tabl_ntb_set_nob\!!countc
\advance\!!countc\plusone
\fi
- \advance\xxrowTBL\plusone}%
+ \advance\c_tabl_ntb_current_row_four\plusone}%
% distribute overshoot equally
\ifdim\dimen2>\zeropoint % new: test on natural-003
\ifdim\dimen4<\dimen0
\advance\dimen0 -\dimen4
\divide\dimen0 \!!countb
- \xxrowTBL\xrowTBL
- \settblhei\xrowTBL{\the\dimen2}%
+ \c_tabl_ntb_current_row_four\c_tabl_ntb_current_row_three
+ \tabl_ntb_set_hei\c_tabl_ntb_current_row_three{\the\dimen2}%
\dorecurse\!!countb
- {\dorecurse\maximumcol
- {\ifnum\recurselevel=\xcolTBL\else
- \scratchdimen\dimexpr\gettblht\xxrowTBL\recurselevel+\dimen0\relax
- \settblht\xxrowTBL\recurselevel{\the\scratchdimen}%
- \ifdim\gettblhei\xxrowTBL<\scratchdimen
- \settblhei\xxrowTBL{\the\scratchdimen}%
+ {\dorecurse\c_tabl_ntb_maximum_col
+ {\ifnum\recurselevel=\c_tabl_ntb_current_col_three\else
+ \scratchdimen\dimexpr\tabl_ntb_get_ht\c_tabl_ntb_current_row_four\recurselevel+\dimen0\relax
+ \tabl_ntb_set_ht\c_tabl_ntb_current_row_four\recurselevel{\the\scratchdimen}%
+ \ifdim\tabl_ntb_get_hei\c_tabl_ntb_current_row_four<\scratchdimen
+ \tabl_ntb_set_hei\c_tabl_ntb_current_row_four{\the\scratchdimen}%
\fi
\fi}%
- \advance\xxrowTBL\plusone}%
+ \advance\c_tabl_ntb_current_row_four\plusone}%
\else\ifdim\dimen4>\dimen0
\iftightTBLrowspan
- \settblhei\xrowTBL{\the\dimen2}%
+ \tabl_ntb_set_hei\c_tabl_ntb_current_row_three{\the\dimen2}%
\fi
\fi\fi
\fi
\fi}
-\def\checktblheightsone
- {\dorecurse\maximumrow
- {\xrowTBL\recurselevel\relax
- \dorecurse\maximumcol
- {\xcolTBL\recurselevel\relax
- \doiftblrow\xrowTBL\xcolTBL\dochecktblheightsone}}}
-\def\checktblheightstwo
+\def\tabl_ntb_check_heights_one
+ {\dorecurse\c_tabl_ntb_maximum_row
+ {\c_tabl_ntb_current_row_three\recurselevel\relax
+ \dorecurse\c_tabl_ntb_maximum_col
+ {\c_tabl_ntb_current_col_three\recurselevel\relax
+ \ifcsname\tabl_ntb_row_pattern\c_tabl_ntb_current_row_three\c_tabl_ntb_current_col_three\endcsname
+ \tabl_ntb_check_heights_one_indeed
+ \fi}}}
+
+\def\tabl_ntb_check_heights_two
{}
-\def\showtblwids#1%
+\def\tabl_ntb_show_widths#1%
{\vbox
- {\forgetall\tttf[#1]\dorecurse\maximumcol
- {\scratchdimen\gettblwid\recurselevel\relax
+ {\forgetall\tttf[#1]\dorecurse\c_tabl_ntb_maximum_col
+ {\scratchdimen\tabl_ntb_get_wid\recurselevel\relax
[\recurselevel:\the\scratchdimen]}}}
-\def\TBLcharalign
- {\doifelse\tbltblaligncharacter\v!yes
- \doTBLcharalign\gobbleoneargument}
+\def\tabl_ntb_char_align
+ {\doifelse{\naturaltablelocalparameter\c!aligncharacter}\v!yes
+ \tabl_ntb_char_align_indeed\gobbleoneargument}
-\long\def\doTBLcharalign#1#2% column data
+\def\tabl_ntb_char_align_indeed#1#2% column data
{\edef\alignmentclass{#1}%
- \edef\alignmentcharacter{\tbltblalignmentcharacter}%
- \ifcase\tblpass\or
+ \edef\alignmentcharacter{\naturaltablelocalparameter\c!alignmentcharacter}%
+ \ifcase\c_tabl_tbl_pass\or
\setfirstpasscharacteralign\checkalignment{#2}% {\strut#2\unskip}%
\fi % force hsize, so always a second
\setsecondpasscharacteralign \checkalignment{#2}% {\strut#2\unskip}%
\ignorespaces}
-% new, needed for icare first col of 'doeltabel', experimental
-
-\long\def\dohandleTBLcellA#1#2[#3]#4% grouping added ! ! !
+\unexpanded\def\tabl_ntb_cell_process_a#1#2[#3]#4% grouping added ! ! !
{\bgroup
- \setupTBLcell{#1}{#2}%
+ \tabl_ntb_setup_cell{#1}{#2}%
\setbox\scratchbox\hbox
- {\scratchdimen\tbltbldistance\relax
- \ifdim\scratchdimen>\gettbldis{#2}\relax
- \settbldis{#2}{\the\scratchdimen}%
+ {\scratchdimen\naturaltablelocalparameter\c!distance\relax
+ \ifdim\scratchdimen>\tabl_ntb_get_dis{#2}\relax
+ \tabl_ntb_set_dis{#2}{\the\scratchdimen}%
\fi
- \localframed
- [\@@tbl\@@tbl]
- [#3,\c!background=,\c!frame=\v!off]% 25% faster
- {\bTBLCELL\TBLcharalign{#2}{#4}\eTBLCELL\inTBLcell{#1}{#2}}}%
- \scratchdimen\gettblwid\tblcol\relax
+ \setupcurrentnaturaltablelocal[#3,\c!background=,\c!frame=\v!off]% 25% faster
+ \inheritednaturaltablelocalframed{\tabl_tnb_cell_start\tabl_ntb_char_align{#2}{#4}\tabl_tnb_cell_stop\tabl_tnb_cell_finalize}}%
+ \scratchdimen\tabl_ntb_get_wid\c_tabl_ntb_col\relax
\ifdim\wd\scratchbox>\scratchdimen
\ifsqueezeTBLspan
\ifautosqueezeTBLspan
- \doifinsetelse\tbltblwidth{\v!fit,\v!fixed,\v!broad,\v!local}
+ \doifinsetelse{\naturaltablelocalparameter\c!width}{\v!fit,\v!fixed,\v!broad,\v!local}
\donetrue \donefalse
\else
\donetrue
\fi
\ifdone % brr, 0
- \ifnum\number\gettblcol{#1}{#2}>\plusone \settblspn\tblcol\fi
+ \ifnum\tabl_ntb_get_col{#1}{#2}>\plusone \tabl_ntb_set_spn\c_tabl_ntb_col\fi
\fi
\fi
- \doifelsetblspn\tblcol
+ \tabl_ntb_spn_doifelse\c_tabl_ntb_col
\donothing
- {\ifdim\gettblwid\tblcol<\wd\scratchbox
- \settblwid\tblcol{\the\wd\scratchbox}%
+ {\ifdim\tabl_ntb_get_wid\c_tabl_ntb_col<\wd\scratchbox
+ \tabl_ntb_set_wid\c_tabl_ntb_col{\the\wd\scratchbox}%
\fi}% auto set
\fi
- \scratchcounter\numexpr\tblrow+\plusone\relax
- \scratchdimen\gettblhei\scratchcounter\relax
+ \scratchcounter\numexpr\c_tabl_ntb_row+\plusone\relax
+ \scratchdimen\tabl_ntb_get_hei\scratchcounter\relax
\ifdim\ht\scratchbox<\scratchdimen
- \settblhei\scratchcounter{\the\ht\scratchbox}% auto set
+ \tabl_ntb_set_hei\scratchcounter{\the\ht\scratchbox}% auto set
\fi
- \settblht{#1}{#2}{\the\ht\scratchbox}%
- \settblwd{#1}{#2}{\the\wd\scratchbox}%
+ \tabl_ntb_set_ht{#1}{#2}{\the\ht\scratchbox}%
+ \tabl_ntb_set_wd{#1}{#2}{\the\wd\scratchbox}%
\ifautoTBLcheckwidth
\ifdim\wd\scratchbox<.75\hsize % fuzzy guess
\ifdim\ht\scratchbox>2\openlineheight % honor width since this
- \scratchdimen\gettblaut\tblcol\relax % can be a figure or so
+ \scratchdimen\tabl_ntb_get_aut\c_tabl_ntb_col\relax % can be a figure or so
\ifdim\scratchdimen=\zeropoint
% side effect: when width is set to 0pt,
% we can force a span that fits the sum of spans widths
- \settblaut\tblcol{\the\scratchdimen}%
+ \tabl_ntb_set_aut\c_tabl_ntb_col{\the\scratchdimen}%
\else\ifdim\wd\scratchbox>\scratchdimen
% unless span
- \settblaut\tblcol{\the\wd\scratchbox}%
+ \tabl_ntb_set_aut\c_tabl_ntb_col{\the\wd\scratchbox}%
% to be translated
\writestatus\m!TABLE
{no auto width in (\number#1,\number#2)\space\the\wd\scratchbox/\the\hsize}%
@@ -1404,155 +1568,102 @@
\fi
\fi
\setbox2\emptyhbox
- \wd2\wd\scratchbox \ht2\ht\scratchbox \dp2\dp\scratchbox
+ \wd2\wd\scratchbox
+ \ht2\ht\scratchbox
+ \dp2\dp\scratchbox
\box2
\egroup}
-\long\def\dohandleTBLcellBC#1#2#3[#4]#5%
+\unexpanded\def\tabl_ntb_cell_process_b_c#1#2#3[#4]#5%
{\setbox\scratchbox\hbox
- {\setupTBLcell{#2}{#3}%
- \localframed
- [\@@tbl\@@tbl]
- [#4,#1,\c!frame=\v!off,\c!background=]
- {\bTBLCELL#5\eTBLCELL}}%
+ {\tabl_ntb_setup_cell{#2}{#3}%
+ \setupcurrentnaturaltablelocal[#4,#1,\c!frame=\v!off,\c!background=]%
+ \inheritednaturaltablelocalframed{\tabl_tnb_cell_start#5\tabl_tnb_cell_stop}}%
\setbox2\emptyhbox
- \wd2\wd\scratchbox \ht2\ht\scratchbox \dp2\dp\scratchbox
+ \wd2\wd\scratchbox
+ \ht2\ht\scratchbox
+ \dp2\dp\scratchbox
\ifautoTBLrowspan
- \scratchcounter\numexpr\tblrow+\plusone\relax
- \doiftblrow\scratchcounter\tblcol
- {\scratchdimen\gettblhei\scratchcounter\relax % moved inside test
- \ifnum\gettblrow\scratchcounter\tblcol>\plusone \ifdim\ht\scratchbox>\scratchdimen
- \scratchdimen-\scratchdimen \advance\scratchdimen -\ht\scratchbox
- \ht2\scratchdimen
- \fi \fi}%
+ \scratchcounter\numexpr\c_tabl_ntb_row+\plusone\relax
+ \ifcsname\tabl_ntb_row_pattern\scratchcounter\c_tabl_ntb_col\endcsname
+ \scratchdimen\tabl_ntb_get_hei\scratchcounter\relax
+ \ifnum\tabl_ntb_get_row\scratchcounter\c_tabl_ntb_col>\plusone
+ \ifdim\ht\scratchbox>\scratchdimen
+ \ht2\dimexpr-\scratchdimen-\ht\scratchbox\relax
+ \fi
+ \fi
+ \fi
\fi
\box2 }
-\long\def\dohandleTBLcellB#1#2[#3]#4%
- {\scratchdimen\gettblaut\tblcol\relax
+\unexpanded\def\tabl_ntb_cell_process_b#1#2[#3]#4%
+ {\scratchdimen\tabl_ntb_get_aut\c_tabl_ntb_col\relax
\ifdim\scratchdimen>\zeropoint\relax
- \let\tblwidthkey\c!width
- \edef\tblwidth{\the\scratchdimen}%
\else
- \scratchdimen\gettblwid\tblcol\relax
+ \scratchdimen\tabl_ntb_get_wid\c_tabl_ntb_col\relax
\ifdim\scratchdimen>\zeropoint\relax
- \ifnum\gettblcol{#1}{#2}=\maximumcol\relax
+ \ifnum\tabl_ntb_get_col{#1}{#2}=\c_tabl_ntb_maximum_col\relax
\scratchdimen\hsize
\fi
- \let\tblwidthkey\c!width
- \edef\tblwidth{\the\scratchdimen}%
- \else
- \let\tblwidthkey\s!unknown
- \let\tblwidth\zeropoint
\fi
\fi
- \dohandleTBLcellBC{\tblwidthkey=\tblwidth}{#1}{#2}[#3]{\TBLcharalign{#2}{#4}}}
+ \normalexpanded{\tabl_ntb_cell_process_b_c{\ifdim\scratchdimen>\zeropoint \c!width=\the\scratchdimen\fi}}%
+ {#1}{#2}[#3]{\tabl_ntb_char_align{#2}{#4}}}
-\long\def\dohandleTBLcellC
- {\dohandleTBLcellBC{}}
+\unexpanded\def\tabl_ntb_cell_process_c
+ {\tabl_ntb_cell_process_b_c{}}
-\long\def\dohandleTBLcellD#1#2[#3]#4%
- {\setupTBLcell{#1}{#2}%
+\unexpanded\def\tabl_ntb_cell_process_d#1#2[#3]#4%
+ {\tabl_ntb_setup_cell{#1}{#2}%
\bgroup
- \localframed
- [\@@tbl\@@tbl]
- [#3,\c!width=\widthTBL,\c!background=,\c!frame=\v!off]% 25% faster
- {\bTBLCELL\TBLcharalign{#2}{#4}\eTBLCELL}%
+ \setupcurrentnaturaltablelocal[#3,\c!width=\d_tabl_ntb_width,\c!background=,\c!frame=\v!off]% 25% faster
+ \inheritednaturaltablelocalframed{\tabl_tnb_cell_start\tabl_ntb_char_align{#2}{#4}\tabl_tnb_cell_stop}%
\egroup}
-\long\def\dohandleTBLcellE#1#2[#3]#4%
- {\setupTBLcell{#1}{#2}%
- \getparameters[\@@tbl\@@tbl][#3]% to get the color right, the way we
+\unexpanded\def\tabl_ntb_cell_process_e#1#2[#3]#4%
+ {\tabl_ntb_setup_cell{#1}{#2}%
+ \setupcurrentnaturaltablelocal[#3]% to get the color right, the way we
\color % handle color here prevents interference due to whatsit nodes
- [\tbltblcolor] % as well as permits local colors to take precedence
- {\ifdim\heightTBL=\zeropoint\relax % case: nc=maxcolumns
- \localframed
- [\@@tbl\@@tbl]
- [\c!color=,\c!width=\widthTBL]
- {\bTBLCELL\TBLcharalign{#2}{#4}\eTBLCELL}%
+ [\naturaltablelocalparameter\c!color] % as well as permits local colors to take precedence
+ {\ifdim\d_tabl_ntb_height=\zeropoint\relax % case: nc=maxcolumns
+ \setupcurrentnaturaltablelocal[\c!color=,\c!width=\d_tabl_ntb_width]%
\else
- \localframed
- [\@@tbl\@@tbl]
- [\c!color=,\c!width=\widthTBL,\c!height=\heightTBL]
- {\bTBLCELL\TBLcharalign{#2}{#4}\eTBLCELL}%
- \fi}%
- \hskip\gettbldis{#2}}
-
-\presetlocalframed % todo: tableparameter etc
- [\@@tbl\@@tbl]
-
-\setupTABLE [%
- \c!frameoffset=.5\linewidth,
- \c!backgroundoffset=\v!frame,
- \c!framecolor=\s!black,
- \c!width=\v!fit,
- \c!height=\v!fit,
- \c!autowidth=\v!yes,
- \c!rulethickness=\linewidth,
- \c!strut=\v!yes,
- \c!autostrut=\v!no,
- %
- \c!color=,
- \c!style=,
- \c!headstyle=\v!bold,
- \c!headcolor=,
- \c!aligncharacter=\v!no,
- \c!alignmentcharacter={,},
- \c!option=, % \v!stretch
- \c!header=,
- \c!spaceinbetween=,
- \c!maxwidth=8em,
- \c!textwidth=\hsize,
- \c!split=\v!auto,
- \c!splitoffset=0pt,
- \c!distance=\zeropoint, % individual column
- \c!columndistance=\zeropoint, % each column (whole table)
- \c!leftmargindistance=\zeropoint, % whole table
- \c!rightmargindistance=\zeropoint,% whole table
- \c!left=,
- \c!right=,
- \c!setups=,
- \c!splitmethod=a%
-]
-
-%D We have already prepared the previous macros for nesting,
-%D so we only have to pop in the right ones:
-
-%D New:
-
-\def\pushTBLparameters
- {\globalpushmacro\TBLlevel
- \ifcase\tblpass
- % we're just after \bTABLE
- \else\ifnum\TBLlevel>\zerocount
- \doglobal\increment\TBLlevel\relax
- \fi\fi}
-
-\def\popTBLparameters
- {\globalpopmacro\TBLlevel}
-
-\def\pushTBL
- {\ifnum\TBLlevel=\zerocount
- \global\advance\currenttbl\plusone
- \fi
- \doglobal\increment\TBLlevel\relax
- \ifnum\TBLlevel>\plusone
- \resetallTABLEparameters
- % we need a proper count push/pop
- \xdef\savedtblrow{\the\tblrow}\globalpushmacro\savedtblrow
- \xdef\savedtblcol{\the\tblcol}\globalpushmacro\savedtblcol
- \else
- \global\intabletrue
- \fi}
-
-\def\popTBL
- {\ifnum\TBLlevel>\plusone
- \globalpopmacro\savedtblrow\global\tblrow\savedtblrow
- \globalpopmacro\savedtblcol\global\tblcol\savedtblcol
- \else
- \global\intablefalse
- \fi
- \doglobal\decrement\TBLlevel\relax}
+ \setupcurrentnaturaltablelocal[\c!color=,\c!width=\d_tabl_ntb_width,\c!height=\d_tabl_ntb_height]%
+ \fi
+ \inheritednaturaltablelocalframed{\tabl_tnb_cell_start\tabl_ntb_char_align{#2}{#4}\tabl_tnb_cell_stop}}%
+ \hskip\tabl_ntb_get_dis{#2}}
+
+\setupTABLE
+ [\c!frameoffset=.5\linewidth,
+ \c!backgroundoffset=\v!frame,
+ \c!framecolor=\s!black,
+ \c!width=\v!fit,
+ \c!height=\v!fit,
+ \c!autowidth=\v!yes,
+ \c!rulethickness=\linewidth,
+ \c!strut=\v!yes,
+ \c!autostrut=\v!no,
+ %
+ \c!color=,
+ \c!style=,
+ \c!headstyle=\v!bold,
+ \c!headcolor=,
+ \c!aligncharacter=\v!no,
+ \c!alignmentcharacter={,},
+ \c!option=, % \v!stretch
+ \c!header=,
+ \c!spaceinbetween=,
+ \c!maxwidth=8\emwidth,
+ \c!textwidth=\v!local, % was \hsize
+ \c!split=\v!auto,
+ \c!splitoffset=\zeropoint,
+ \c!distance=\zeropoint, % individual column
+ \c!columndistance=\zeropoint, % each column (whole table)
+ \c!leftmargindistance=\zeropoint, % whole table
+ \c!rightmargindistance=\zeropoint,% whole table
+ \c!left=,
+ \c!right=,
+ \c!setups=]
% \bgroup
% \setupTABLE[column][1][aligncharacter=yes, alignmentcharacter={,}]
@@ -1570,30 +1681,28 @@
\newconditional\resetTABLEmode \settrue\resetTABLEmode
-\def\resetallTABLEparameters% moet genest wel werken
- {\ifnum\TBLlevel>\plusone % in ieder geval
+\def\tabl_ntb_parameters_reset
+ {\ifnum\m_tabl_tbl_level>\plusone % in ieder geval
\ifconditional\resetTABLEmode
-% \presetlocalframed % breedte hoogte diepte offset
-% [\@@tbl\@@tbl]% % achtergrond, achtergrondraster, achtergrondkleur
% not ok yet
- \setupTABLE [%
- \c!frameoffset=.5\linewidth,
+ \setupTABLE
+ [\c!frameoffset=.5\linewidth,
\c!backgroundoffset=\v!frame,
\c!framecolor=\s!black,
\c!width=fit,
\c!height=fit,
-\c!autowidth=\v!yes,
-% \c!rulethickness=\linewidth,
+ \c!autowidth=\v!yes,
+ % \c!rulethickness=\linewidth,
\c!strut=\v!no,
-\c!strut=\v!yes, % needed for mathml, but ... maybe we need another resetTABLEmode
-\c!autostrut=\v!no,
+ \c!strut=\v!yes, % needed for mathml, but ... maybe we need another resetTABLEmode
+ \c!autostrut=\v!no,
\c!color=,
\c!style=,
\c!headstyle=,
\c!headcolor=,
\c!aligncharacter=\v!no,
\c!alignmentcharacter={,},
- \c!maxwidth=8em]%
+ \c!maxwidth=8\emwidth]%
\else
\setupTABLE
[\c!width=\v!fit,
@@ -1611,21 +1720,22 @@
% \bTRs[xx] \bTD oeps \eTD \bTD oeps \eTD \eTRs
% \eTABLE
+\installcorenamespace{naturaltablesetup}
+
\unexpanded\def\defineTABLEsetup
- {\dodoubleargument\dodefineTABLEsetup}
+ {\dodoubleargument\tabl_ntb_define_setup}
+
+\def\tabl_ntb_define_setup[#1][#2]%
+ {\setvalue{\??naturaltablesetup#1}{#2}}
-\def\dodefineTABLEsetup[#1][#2]%
- {\setvalue{\@@tbl:set:#1}{#2}}
+\let\eTDs\relax
+\let\eTRs\relax
-\long\def\bTDs[#1]#2\eTDs
- {\doifdefinedelse{\@@tbl:set:#1}
- {\@EA\@EA\@EA\bTD\@EA\@EA\@EA[\csname\@@tbl:set:#1\endcsname]#2\eTD}
- {\bTD[]#2\eTD}}
+\unexpanded\def\bTDs[#1]#2\eTDs
+ {\normalexpanded{\bTD[\ifcsname\??naturaltablesetup#1\endcsname\csname\??naturaltablesetup#1\endcsname\fi]}#2\eTD}
-\long\def\bTRs[#1]#2\eTRs
- {\doifdefinedelse{\@@tbl:set:#1}
- {\@EA\@EA\@EA\bTR\@EA\@EA\@EA[\csname\@@tbl:set:#1\endcsname]#2\eTR}
- {\bTR[]#2\eTR}}
+\unexpanded\def\bTRs[#1]#2\eTRs
+ {\normalexpanded{\bTR[\ifcsname\??naturaltablesetup#1\endcsname\csname\??naturaltablesetup#1\endcsname\fi]}#2\eTR}
\protect \endinput
diff --git a/Master/texmf-dist/tex/context/base/tabl-nte.mkiv b/Master/texmf-dist/tex/context/base/tabl-nte.mkiv
index 08ab34f0fa0..4a9774cb0f7 100644
--- a/Master/texmf-dist/tex/context/base/tabl-nte.mkiv
+++ b/Master/texmf-dist/tex/context/base/tabl-nte.mkiv
@@ -28,7 +28,7 @@
%D
%D Let us start with the original macros:
%D
-%D \starttyping
+%D \startbuffer
%D \bTABLE
%D \bTR
%D \bTD Text 1 \eTD
@@ -39,65 +39,72 @@
%D \bTD Text 4 \eTD
%D \eTR
%D \eTABLE
-%D \stoptyping
+%D \stopbuffer
+%D
+%D \typebuffer \getbuffer
%D
%D Watch how the new macros use less code:
%D
-%D \starttyping
+%D \startbuffer
%D \startTABLE
%D \NC Text 1 \NC Text 2 \NC\NR
%D \NC Text 3 \NC Text 4 \NC\NR
%D \stopTABLE
-%D \stoptyping
+%D \stopbuffer
+%D
+%D \typebuffer \getbuffer
%D
%D The actual code differs from the prototype that it does not need
%D to collect whole rows and parse them but looks ahead instead.
-\def\startTABLE
- {\dosingleempty\dostartTABLE}
+\newconditional\c_tabl_nte_in_nc
-\def\dostartTABLE[#1]%
+\unexpanded\def\startTABLE
+ {\dosingleempty\tabl_nte_start}
+
+\def\tabl_nte_start[#1]%
{\bgroup
\bTABLE[#1]%
- \let\NC\doTABLENC
- \let\NR\doTABLENR
+ \let\NC\tabl_nte_start_nc
+ \let\NR\tabl_nte_start_nr
\let\bTR\relax
\let\bTD\relax
\let\bTH\relax
\let\bTN\relax}
-\def\stopTABLE
+\unexpanded\def\stopTABLE
{\eTABLE
\egroup}
-\newconditional\inTABLEnc
-
-\unexpanded\def\doTABLENR
+\unexpanded\def\tabl_nte_start_nr
{\eTR
- \setfalse\inTABLEnc}
+ \setfalse\c_tabl_nte_in_nc}
-\unexpanded\def\doTABLENC
- {\futurelet\next\dodoTABLENC}
+\unexpanded\def\tabl_nte_start_nc
+ {\futurelet\next\tabl_nte_start_nc_indeed}
-\def\dodoTABLENC
- {\ifx\next\doTABLENR \else
- \expandafter\dododoTABLENC
+\def\tabl_nte_start_nc_indeed
+ {\ifx\next\tabl_nte_start_nr \else
+ \expandafter\tabl_nte_start_nc_finish
\fi}
-\long\def\dododoTABLENC#1\NC
- {\ifconditional\inTABLEnc\else\settrue\inTABLEnc\dobTR[]\fi
- \dobTD#1\eTD\NC}
+\def\tabl_nte_start_nc_finish#1\NC
+ {\ifconditional\c_tabl_nte_in_nc \else
+ \settrue\c_tabl_nte_in_nc
+ \dobTR[]%
+ \fi
+ \dobTD#1\eTD\NC}
%D The related structure commands are also available:
-\unexpanded\def\startTABLEhead{\dosingleempty\dostartTABLEhead} \let\stopTABLEhead\relax
-\unexpanded\def\startTABLEnext{\dosingleempty\dostartTABLEnext} \let\stopTABLEnext\relax
-\unexpanded\def\startTABLEbody{\dosingleempty\dostartTABLEbody} \let\stopTABLEbody\relax
-\unexpanded\def\startTABLEfoot{\dosingleempty\dostartTABLEfoot} \let\stopTABLEfoot\relax
+\unexpanded\def\startTABLEhead{\dosingleempty\tabl_nte_start_head} \let\stopTABLEhead\relax
+\unexpanded\def\startTABLEnext{\dosingleempty\tabl_nte_start_next} \let\stopTABLEnext\relax
+\unexpanded\def\startTABLEbody{\dosingleempty\tabl_nte_start_body} \let\stopTABLEbody\relax
+\unexpanded\def\startTABLEfoot{\dosingleempty\tabl_nte_start_foot} \let\stopTABLEfoot\relax
-\long\def\dostartTABLEhead[#1]#2\stopTABLEhead{\appendtoks\doTABLEsection[#1]{#2}\to\TBLhead}
-\long\def\dostartTABLEnext[#1]#2\stopTABLEnext{\appendtoks\doTABLEsection[#1]{#2}\to\TBLnext}
-\long\def\dostartTABLEbody[#1]#2\stopTABLEbody{\appendtoks\doTABLEsection[#1]{#2}\to\TBLbody}
-\long\def\dostartTABLEfoot[#1]#2\stopTABLEfoot{\appendtoks\doTABLEsection[#1]{#2}\to\TBLfoot}
+\def\tabl_nte_start_head[#1]#2\stopTABLEhead{\appendtoks\doTABLEsection[#1]{#2}\to\TBLhead}
+\def\tabl_nte_start_next[#1]#2\stopTABLEnext{\appendtoks\doTABLEsection[#1]{#2}\to\TBLnext}
+\def\tabl_nte_start_body[#1]#2\stopTABLEbody{\appendtoks\doTABLEsection[#1]{#2}\to\TBLbody}
+\def\tabl_nte_start_foot[#1]#2\stopTABLEfoot{\appendtoks\doTABLEsection[#1]{#2}\to\TBLfoot}
\protect \endinput
diff --git a/Master/texmf-dist/tex/context/base/tabl-pln.mkiv b/Master/texmf-dist/tex/context/base/tabl-pln.mkiv
index 9e65f7e40d8..3638006c79a 100644
--- a/Master/texmf-dist/tex/context/base/tabl-pln.mkiv
+++ b/Master/texmf-dist/tex/context/base/tabl-pln.mkiv
@@ -9,83 +9,124 @@
\writestatus{loading}{ConTeXt Table Macros / Plain Tabular}
+%D This code might become a module.
+
\unprotect
-\newif \if@@plnusetab
-\newif \if@@plncr
-\newbox \@@plntabs
-\newbox \@@plntabsyet
-\newbox \@@plntabsdone
-\newdimen \@@plntabdimen
+\newconditional \c_tabl_plain_cr
+\newconditional \c_tabl_plain_use_tab
+\newbox \b_tabl_plain_tabs
+\newbox \b_tabl_plain_tabs_yet
+\newbox \b_tabl_plain_tabs_done
+\newdimen \d_tabl_plain_tab
+
+\let\m_tabl_plain_next\relax
\def\cleartabs % visible
- {\global\setbox\@@plntabsyet\emptyhbox
- \setbox\@@plntabs\emptyhbox}
+ {\global\setbox\b_tabl_plain_tabs_yet\emptyhbox
+ \setbox\b_tabl_plain_tabs\emptyhbox}
\def\settabs % visible
- {\setbox\@@plntabs\emptyhbox
- \futurelet\next\@@plnsettabs}
+ {\setbox\b_tabl_plain_tabs\emptyhbox
+ \futurelet\m_tabl_plain_next\tabl_plain_set_tabs}
\def\tabalign % visible
- {\@@plnusetabtrue\@@plnmaketabbox}
+ {\settrue\c_tabl_plain_use_tab
+ \tabl_plain_make_tab_box}
-\let\+\tabalign % no outer here (can be overloaded)
+\ifdefined\+ \else
+ \let\+\tabalign % no outer here (can be overloaded)
+\fi
-\def\@@plnsettabs
- {\ifx\next\+%
- \def\nxt{\afterassignment\@@plnsettab\let\nxt}%
+\def\tabl_plain_set_tabs
+ {\ifx\m_tabl_plain_next\+%
+ \let\tabl_plain_nxt\tabl_plain_align
+ \else\ifx\m_tabl_plain_next\tabalign % added hh
+ \let\tabl_plain_nxt\tabl_plain_align
\else
- \let\nxt\@@plnsetcols
- \fi
- \let\next\relax
- \nxt}
+ \let\tabl_plain_nxt\tabl_plain_set_cols
+ \fi\fi
+ \let\m_tabl_plain_next\relax
+ \tabl_plain_nxt}
-\def\@@plnsettab
- {\let\nxt\relax
- \@@plnusetabfalse\@@plnmaketabbox}
+\def\tabl_plain_align
+ {\afterassignment\tabl_plain_set_tab
+ \let\tabl_plain_nxt}
-\def\@@plnsetcols#1\columns
- {\scratchcounter#1%
- \@@plntabdimen\hsize
+\def\tabl_plain_set_tab
+ {\let\tabl_plain_nxt\relax
+ \setfalse\c_tabl_plain_use_tab
+ \tabl_plain_make_tab_box}
+
+\def\tabl_plain_set_cols#1\columns
+ {\scratchcounter#1\relax
+ \d_tabl_plain_tab\hsize
\loop
- \ifnum\scratchcounter>\zerocount \@nother
+ \ifnum\scratchcounter>\zerocount
+ \tabl_plain_other
\repeat}
-\def\@nother
- {\scratchdimen\@@plntabdimen
+\def\tabl_plain_other
+ {\scratchdimen\d_tabl_plain_tab
\divide\scratchdimen\scratchcounter
- \setbox\@@plntabs\hbox{\hbox to\scratchdimen{}\unhbox\@@plntabs}%
- \advance\@@plntabdimen-\scratchdimen
+ \setbox\b_tabl_plain_tabs\hbox
+ {\hbox to\scratchdimen{}%
+ \unhbox\b_tabl_plain_tabs}%
+ \advance\d_tabl_plain_tab-\scratchdimen
\advance\scratchcounter\minusone}
-\def\@@plnmaketabbox
+\def\tabl_plain_make_tab_box
{\begingroup
- \global\setbox\@@plntabsyet\copy\@@plntabs
- \global\setbox\@@plntabsdone\emptyhbox
- \def\cr
- {\@@plncrtrue\crcr\egroup\egroup
- \if@@plnusetab\unvbox\zerocount\lastbox\fi\endgroup
- \setbox\@@plntabs\hbox{\unhbox\@@plntabsyet\unhbox\@@plntabsdone}}%
- \setbox\zerocount\vbox\bgroup\@@plncrfalse
- \ialign\bgroup&\@@plnbegintabbox##\@@plnendtabbox\crcr}
-
-\def\@@plnbegintabbox
- {\setbox\zerocount\hbox\bgroup}
-
-\def\@@plnendtabbox
- {\if@@plncr
- \egroup % now \box\zerocount holds the column
+ \let\+\tabalign % added hh
+ \global\setbox\b_tabl_plain_tabs_yet\copy\b_tabl_plain_tabs
+ \global\setbox\b_tabl_plain_tabs_done\emptyhbox
+ \let\cr\tabl_plain_cr
+ \setbox\scratchbox\vbox\bgroup
+ \setfalse\c_tabl_plain_cr
+ \ialign\bgroup
+ \aligntab
+ \tabl_plain_begin_tab_box
+ \alignmark\alignmark
+ \tabl_plain_end_tab_box
+ \crcr}
+
+\def\tabl_plain_cr
+ {\settrue\c_tabl_plain_cr
+ \crcr
+ \egroup
+ \egroup
+ \ifconditional\c_tabl_plain_use_tab
+ \unvbox\scratchbox
+ \lastbox % okay?
+ \fi
+ \endgroup
+ \setbox\b_tabl_plain_tabs\hbox
+ {\unhbox\b_tabl_plain_tabs_yet
+ \unhbox\b_tabl_plain_tabs_done}}
+
+\def\tabl_plain_begin_tab_box
+ {\setbox\scratchbox\hbox\bgroup}
+
+\def\tabl_plain_end_tab_box
+ {\ifconditional\c_tabl_plain_cr
+ \egroup % now \box\scratchbox holds the column
\else
- \hss\egroup
- \global\setbox\@@plntabsyet\hbox
- {\unhbox\@@plntabsyet\global\setbox\plusone\lastbox}% now \box\plusone holds its size
- \ifvoid\plusone
- \global\setbox\plusone\hbox to\wd\zerocount{}%
- \else
- \setbox\zerocount\hbox to\wd\plusone{\unhbox\zerocount}%
- \fi
- \global\setbox\@@plntabsdone\hbox{\box\plusone\unhbox\@@plntabsdone}%
- \fi
- \box\zerocount}
+ \hss
+ \egroup
+ \global\setbox\b_tabl_plain_tabs_yet\hbox
+ {\unhbox\b_tabl_plain_tabs_yet
+ \global\setbox\globalscratchbox\lastbox}% now \box\globalscratchbox holds its size
+ \ifvoid\globalscratchbox
+ \global\setbox\globalscratchbox\hbox to \wd\scratchbox
+ {}%
+ \else
+ \setbox\scratchbox\hbox to \wd\globalscratchbox
+ {\unhbox\scratchbox}%
+ \fi
+ \global\setbox\b_tabl_plain_tabs_done\hbox
+ {\box\globalscratchbox
+ \unhbox\b_tabl_plain_tabs_done}%
+ \fi
+ \box\scratchbox}
\protect \endinput
diff --git a/Master/texmf-dist/tex/context/base/tabl-tab.mkiv b/Master/texmf-dist/tex/context/base/tabl-tab.mkiv
index b50be8531ef..f9ac27e7011 100644
--- a/Master/texmf-dist/tex/context/base/tabl-tab.mkiv
+++ b/Master/texmf-dist/tex/context/base/tabl-tab.mkiv
@@ -14,19 +14,17 @@
% Todo: consistent namespace and get rid of not used code
-% In \MKIV\ the old table macros are sort of obsolete. The
-% color extensions have been removed and some code is stripped.
-% For practical reasons the \TABLE\ macros that are used are
-% embedded in this file.
+% In \MKIV\ the old table macros are sort of obsolete. The color extensions
+% have been removed and some code is stripped. For practical reasons the
+% \TABLE\ macros that are used are embedded in this file.
%
-% The following code is based on TABLE 1.0 by Michael J. Wichura
-% (August 1988. We used a patched version with many overloads
-% and extensions. The documented (and larger) source can be found
-% in \type {thrd-tab.tex}.
+% The following code is based on TABLE 1.0 by Michael J. Wichura (August 1988.
+% We used a patched version with many overloads and extensions. The documented
+% (and larger) source can be found in \type {thrd-tab.tex}.
%
-% Some code has been stripped. Some color has been added. Some macros
-% have been renamed. Registers have been replaces. And probably much
-% more can be cleaned up.
+% Some code has been stripped. Some color has been added. Some macros have
+% been renamed. Registers have been replaces. And probably much more can be
+% cleaned up. We also need to use \tabl_tab_ prefixes here.
\unprotect
@@ -80,7 +78,7 @@
\appendtoks
\parindent\zeropoint
\raggedright
- \rightskip\zeropoint \!!plus 4em \relax
+ \rightskip\zeropoint \s!plus 4em \relax
\to \everytableparbox
\newskip \tablelefttabskip
@@ -854,11 +852,8 @@
\lineskiplimit\zeropoint
\lineskip \zeropoint
\tabskip \zeropoint
- \doifelsenothing\@@titextwidth
- {\halign}
- {\doifelse\@@titextwidth\v!max
- {\halign to \hsize}
- {\halign to \@@titextwidth}}%
+ \edef\p_tabl_table_textwidth{\directtablesparameter\c!textwidth}%
+ \halign \ifx\p_tabl_table_textwidth\empty \else to \ifx\p_tabl_table_textwidth\v!max \hsize \else \p_tabl_table_textwidth \fi\fi
% \the\!taTableSpread
\bgroup
\span
@@ -878,9 +873,9 @@
\def\donormaltablelineformat#1#2%
{\vrule
- \!!width \zeropoint
- \!!height\dimexpr\tablestrutheightfactor\tablestrutunit+#1\tablestrutunit\relax
- \!!depth \dimexpr\tablestrutdepthfactor \tablestrutunit+#2\tablestrutunit\relax
+ \s!width \zeropoint
+ \s!height\dimexpr\tablestrutheightfactor\tablestrutunit+#1\tablestrutunit\relax
+ \s!depth \dimexpr\tablestrutdepthfactor \tablestrutunit+#2\tablestrutunit\relax
\relax
\cr}
@@ -977,7 +972,7 @@
\def\donormaltablefullrule
{\starttablenoalign
\!ttGetHalfRuleThickness
- \hrule\!!height\scratchdimen\!!depth\scratchdimen
+ \hrule\s!height\scratchdimen\s!depth\scratchdimen
\stoptablenoalign}
\def\donormaltableshortrule % was: \!ttShortHrule
@@ -986,7 +981,7 @@
\ifx\tablecurrenthrulecolor\empty\else
\switchtocolor[\tablecurrenthrulecolor]% see *DL*
\fi
- \leaders\hrule\!!height\scratchdimen\!!depth\scratchdimen\hfill
+ \leaders\hrule\s!height\scratchdimen\s!depth\scratchdimen\hfill
\emptyhbox
\ignorespaces}
@@ -1022,7 +1017,7 @@
% SetTableToWidth -> textwidth=dimension [to dimension]
% Expand -> textwidth=max [to \hsize]
% WidenTableBy -> [spread #1]
-% \tablelefttabskip\zeropoint\!!plus1fill
+% \tablelefttabskip\zeropoint\s!plus1\s!fill
% \tablerighttabskip\tablelefttabskip
% LongLines -> [spread \hsize]
@@ -1330,26 +1325,38 @@
\newconditional\tablerepeathead
\newconditional\tablerepeattail
-
\unexpanded\def\starttable
{\bgroup
\dodoubleempty\dostarttable}
\unexpanded\def\dostarttable[#1][#2]% preamble optional-settings
{\ifsecondargument
- \getparameters[\??ti][#2]%
+ \setupcurrenttables[#2]%
\fi
\let\stoptable\dostoptable
- \doif\@@tisplit\v!auto
- {\ifinsidesplitfloat\let\@@tisplit\v!yes\fi}%
- \doifinsetelse\@@tisplit{\v!yes,\v!repeat}
- {\unexpanded\def\stoptable{\stoptables\egroup}%
- \starttables}
- {\doifelsenothing\@@tiframe
- {\ifinsidefloat\else\startbaselinecorrection\fi}
- {\startframedcontent[\@@tiframe]}%
- \postponenotes
- \firststagestarttable}%
+ \edef\p_tabl_table_split{\directtablesparameter\c!split}%
+ \edef\p_tabl_table_frame{\directtablesparameter\c!frame}%
+ \ifx\p_tabl_table_split\v!auto
+ \ifinsidesplitfloat
+ \let\p_tabl_table_split\v!yes
+ \lettablesparameter\c!split\v!yes % might be used later, best make a proper mode
+ \fi
+ \fi
+ \ifx\p_tabl_table_split\v!yes
+ \def\stoptable{\dostoptables\egroup}% not \unexpanded as we look ahead
+ \expandafter\starttables
+ \else\ifx\p_tabl_table_split\v!repeat
+ \def\stoptable{\dostoptables\egroup}% not \unexpanded as we look ahead
+ \doubleexpandafter\starttables
+ \else
+ \ifx\p_tabl_table_frame\empty
+ \ifinsidefloat\else\startbaselinecorrection\fi
+ \else
+ \startframedcontent[\p_tabl_table_frame]%
+ \fi
+ \postponenotes
+ \doubleexpandafter\firststagestarttable
+ \fi\fi
[#1]}
% We cannot define \unexpanded\def\dostoptable a ssomehow lookahead
@@ -1361,16 +1368,18 @@
{\dochucktableautorow % before the tail, else noalign problem
\doinserttabletail
\starttablenoalign
- \globalletempty\dotablehead
- \globalletempty\dotabletail
+ \global\let\dotablehead\empty
+ \global\let\dotabletail\empty
\stoptablenoalign
\dofinishtable
- \doifelsenothing\@@tiframe
- {\ifinsidefloat\else
+ \ifx\p_tabl_table_frame\empty
+ \ifinsidefloat\else
\stopbaselinecorrection
\goodbreak % compensates all the nobreaks
- \fi}
+ \fi
+ \else
\stopframedcontent
+ \fi
\egroup}
%D Before we can grab the argument, we have to make sure that
@@ -1465,7 +1474,7 @@
% \def\doverysimpletableHL % todo
% {\starttablenoalign
-% \expandafter\donormaltablefullrule\@@tiHLheight
+% \normalexpanded{\noexpand\donormaltablefullrule\m_tabl_table_HLheight}%
% \stoptablenoalign}
\def\dorestarttable#1%
@@ -1565,9 +1574,14 @@
{\bgroup
\let\stoptables\dostoptables
\splittablestrue
- \doifelse\@@tisplit\v!repeat
- {\settrue \tablerepeathead\settrue \tablerepeattail}
- {\setfalse\tablerepeathead\setfalse\tablerepeattail}%
+ \edef\p_tabl_table_split{\directtablesparameter\c!split}%
+ \ifx\p_tabl_table_split\v!repeat
+ \settrue\tablerepeathead
+ \settrue\tablerepeattail
+ \else
+ \setfalse\tablerepeathead
+ \setfalse\tablerepeattail
+ \fi
\flushnotes
\setbox\tablecontentbox\vbox\bgroup
\forgetall
@@ -1575,13 +1589,15 @@
\let\stoptables\relax % needed for \noalign
-\def\dostoptables
+\def\dostoptables % not \unexpanded as we need the lookahead (brrr)
{\dochucktableautorow % AM: before the tail, else noalign problem
\ifconditional\tablerepeattail\else\doinserttabletail\fi
\dofinishtable
\egroup
\dontcomplain
\dosplittablebox\tablecontentbox
+ \global\let\dotablehead\empty % new here
+ \global\let\dotabletail\empty % new here
\flushnotes
\egroup}
@@ -1635,18 +1651,10 @@
\let\dotablehead\empty % needs checking
\let\dotabletail\empty % needs checking
-\letbeundefined{\e!start\v!tablehead}
-\letbeundefined{\e!stop \v!tablehead}
-\letbeundefined{\e!start\v!tabletail}
-\letbeundefined{\e!stop \v!tabletail}
-
-\expanded
- {\long\def\csname\e!start\v!tablehead\endcsname##1\csname\e!stop\v!tablehead\endcsname%
- {\noexpand\settablehead##1\noexpand\end}}
-
-\expanded
- {\long\def\csname\e!start\v!tabletail\endcsname##1\csname\e!stop\v!tabletail\endcsname%
- {\noexpand\settabletail##1\noexpand\end}}
+\letvalue{\e!start\v!tablehead}\relax
+\letvalue{\e!stop \v!tablehead}\relax
+\letvalue{\e!start\v!tabletail}\relax
+\letvalue{\e!stop \v!tabletail}\relax
%D The second argument is a dummy one, by scanning for it, we
%D get rid of interfering spaces.
@@ -1655,11 +1663,27 @@
\newconditional\hassometablehead
\newconditional\hassometabletail
-\def\settablehead{\dodoubleempty\dosettablehead}
-\def\settabletail{\dodoubleempty\dosettabletail}
+\unexpanded\def\settablehead{\dodoubleempty\dosettablehead}
+\unexpanded\def\settabletail{\dodoubleempty\dosettabletail}
+
+% \def\dosettablehead[#1][#2]#3\end{\setvalue{\??tablehead#1}{\tablenoalign{\global\settrue\hassometablehead}#3}}
+% \def\dosettabletail[#1][#2]#3\end{\setvalue{\??tabletail#1}{\tablenoalign{\global\settrue\hassometabletail}#3}}
+
+\def\dosettablehead[#1][#2]#3\end
+ {\gdef\dotablehead{\executeifdefined{\??tablehead#1}\empty}% new
+ \setvalue{\??tablehead#1}{\tablenoalign{\global\settrue\hassometablehead}#3}}
+
+\def\dosettabletail[#1][#2]#3\end
+ {\gdef\dotabletail{\executeifdefined{\??tabletail#1}\empty}% new
+ \setvalue{\??tabletail#1}{\tablenoalign{\global\settrue\hassometabletail}#3}}
-\long\def\dosettablehead[#1][#2]#3\end{\setvalue{\??tablehead#1}{\tablenoalign{\global\settrue\hassometablehead}#3}}
-\long\def\dosettabletail[#1][#2]#3\end{\setvalue{\??tabletail#1}{\tablenoalign{\global\settrue\hassometabletail}#3}}
+\normalexpanded
+ {\def\csname\e!start\v!tablehead\endcsname#1\csname\e!stop\v!tablehead\endcsname%
+ {\settablehead#1\noexpand\end}}
+
+\normalexpanded
+ {\def\csname\e!start\v!tabletail\endcsname#1\csname\e!stop\v!tabletail\endcsname%
+ {\settabletail#1\noexpand\end}}
%D Redundant \type{\HL}'s are removed automatically, so
%D mid||lines can be used without problems.
@@ -1899,7 +1923,7 @@
\def\dotablevrulecommand#1% global assignments
{\doifnumberelse{#1}
{\global\tablevrulethicknessfactor#1\relax
- \global\multiply\tablevrulethicknessfactor\@@tiVLwidth\relax}
+ \global\multiply\tablevrulethicknessfactor\m_tabl_table_VLwidth\relax}
{\xdef\tablecurrentvrulecolor{#1}}}
\unexpanded\def\dotableVL
@@ -1909,7 +1933,7 @@
\def\dodotableVL[#1]%
{\global\let\tablecurrentvrulecolor\empty
- \global\tablevrulethicknessfactor\@@tiVLwidth\relax
+ \global\tablevrulethicknessfactor\m_tabl_table_VLwidth\relax
\iffirstargument
\rawprocesscommalist[#1]\dotablevrulecommand
\fi
@@ -1933,7 +1957,7 @@
\def\dotablehrulecommand#1% global assignments
{\doifnumberelse{#1}
{\global\tablehrulethicknessfactor#1\relax
- \global\multiply\tablehrulethicknessfactor\@@tiHLheight\relax}
+ \global\multiply\tablehrulethicknessfactor\m_tabl_table_HLheight\relax}
{\xdef\tablecurrenthrulecolor{#1}}}
\unexpanded\def\dotableHL
@@ -1953,7 +1977,7 @@
\writestatus\m!TABLE{change \string\MR\space into \string\SR}%
\fi\fi
\bgroup
- \global\tablehrulethicknessfactor\@@tiHLheight\relax
+ \global\tablehrulethicknessfactor\m_tabl_table_HLheight\relax
\iffirstargument
\global\let\tablecurrenthrulecolor\empty
\rawprocesscommalist[#1]\dotablehrulecommand
@@ -2004,10 +2028,10 @@
{\dochucktableautorow
\dofinishtablerow
\starttablenoalign
- \dosingleempty\dotableTB}
+ \dosingleempty\dodotableTB}
-\def\dotableTB[#1]%
- {\blank[\iffirstargument#1\else\@@tiNL\fi]%
+\def\dodotableTB[#1]%
+ {\blank[\iffirstargument#1\else\directtablesparameter\c!NL\fi]%
\nobreak
\stoptablenoalign}
@@ -2056,7 +2080,7 @@
\global\tabledrulespan#1\relax
\else
\global\tablehrulethicknessfactor#1\relax
- \global\multiply\tablehrulethicknessfactor\@@tiVLwidth\relax
+ \global\multiply\tablehrulethicknessfactor\m_tabl_table_VLwidth\relax
\fi}
{\xdef\tablecurrenthrulecolor{#1}}}
@@ -2074,7 +2098,7 @@
\writestatus\m!TABLE{change \string\MR\space into \string\SR}%
\fi\fi
\dosettableaction\tableunknownstate
- \global\tablehrulethicknessfactor\@@tiHLheight\relax
+ \global\tablehrulethicknessfactor\m_tabl_table_HLheight\relax
\global\tabledrulespan\zerocount
\iffirstargument
\global\let\tablecurrenthrulecolor\empty
@@ -2136,17 +2160,28 @@
\let\REF \dotablereformat
\to \localtabledefinitions
-\setvalue{\??ti:\c!distance:\v!none }{\dotableOpenUp00\def\LOW{\Lower6 }}
-\setvalue{\??ti:\c!distance:\v!small }{\dotableOpenUp00\def\LOW{\Lower6 }} % == baseline
-\setvalue{\??ti:\c!distance:\v!medium}{\dotableOpenUp11\def\LOW{\Lower7 }}
-\setvalue{\??ti:\c!distance:\v!big }{\dotableOpenUp22\def\LOW{\Lower8 }}
+\installcorenamespace{tables}
+\installcorenamespace{tabledistance}
+\installcorenamespace{tablealign}
+
+\installsetuponlycommandhandler \??tables {tables} % some day we can have named tables
+
+\setvalue{\??tabledistance\v!none }{\dotableOpenUp00\def\LOW{\Lower6 }}
+\setvalue{\??tabledistance\v!small }{\dotableOpenUp00\def\LOW{\Lower6 }} % == baseline
+\setvalue{\??tabledistance\v!medium}{\dotableOpenUp11\def\LOW{\Lower7 }}
+\setvalue{\??tabledistance\v!big }{\dotableOpenUp22\def\LOW{\Lower8 }}
\appendtoks
- \getvalue{\??ti:\c!distance:\@@tidistance}%
+ \expandnamespaceparameter\??tabledistance\directtablesparameter\c!distance\v!medium
\to \localtabledefinitions
+\setvalue{\??tablealign\v!right }{\def\dotableparalignment{\raggedright}}
+\setvalue{\??tablealign\v!left }{\def\dotableparalignment{\raggedleft}}
+\setvalue{\??tablealign\v!middle }{\def\dotableparalignment{\raggedcenter}}
+\setvalue{\??tablealign\s!unknown}{\def\dotableparalignment{\notragged}}
+
\appendtoks
- \doifelse\@@tidistance\v!none
+ \doifelse{\directtablesparameter\c!distance}\v!none
{\tablerowfactor\zerocount}
{\tablerowfactor\plustwo }%
\to \localtabledefinitions
@@ -2160,31 +2195,28 @@
\@EAEAEA\dotextmodebar
\fi\fi}
-\unexpanded\def\setuptables
- {\dosingleargument\dosetuptables}
-
-\def\dosetuptables[#1]%
- {\getparameters[\??ti][#1]%
- \processaction % we have a command for this
- [\@@tialign]
- [ \v!right=>\def\dotableparalignment{\raggedright},
- \v!left=>\def\dotableparalignment{\raggedleft},
- \v!middle=>\def\dotableparalignment{\raggedcenter},
- \s!default=>\def\dotableparalignment{\notragged},
- \s!unknown=>\def\dotableparalignment{\notragged}]%
- \assignalfadimension\@@tiVL\@@tiVLwidth 246%
- \assignalfadimension\@@tiHL\@@tiHLheight246}
+\appendtoks
+ \expandnamespaceparameter\??tablealign\directtablesparameter\c!align\s!unknown
+ \assignalfadimension{\directtablesparameter\c!VL}\m_tabl_table_VLwidth 246%
+ \assignalfadimension{\directtablesparameter\c!HL}\m_tabl_table_HLheight246%
+\to \everysetuptables
\def\dolocaltablesetup
- {\@@ticommands\relax
- \doifsomething\@@tibodyfont{\switchtobodyfont[\@@tibodyfont]}%
- \tablelinethicknessunit\dimexpr\@@tirulethickness/\tablelinethicknessfactor\relax
- \doifelse\@@tiheight\v!strut
- {\let\tablestrutheightfactor\tablestrutheightfactor}
- {\let\tablestrutheightfactor\@@tiheight}%
- \doifelse\@@tidepth\v!strut
- {\let\tablestrutdepthfactor\tablestrutdepthfactor}
- {\let\tablestrutdepthfactor\@@tidepth}%
+ {\directtablesparameter\c!commands\relax
+ \doifsomething{\directtablesparameter\c!bodyfont}{\switchtobodyfont[\directtablesparameter\c!bodyfont]}%
+ \tablelinethicknessunit\dimexpr\directtablesparameter\c!rulethickness/\tablelinethicknessfactor\relax
+ \edef\p_tabl_table_height{\directtablesparameter\c!height}%
+ \edef\p_tabl_table_depth{\directtablesparameter\c!depth}%
+ \ifx\p_tabl_table_height\v!strut
+ \let\tablestrutheightfactor\tablestrutheightfactor
+ \else
+ \let\tablestrutheightfactor\p_tabl_table_height
+ \fi
+ \ifx\p_tabl_table_depth\v!strut
+ \let\tablestrutdepthfactor\tablestrutdepthfactor
+ \else
+ \let\tablestrutdepthfactor\p_tabl_table_depth
+ \fi
\edef\tablestrutheightfactor{\withoutpt\the\dimexpr10\dimexpr\tablestrutheightfactor\points}%
\edef\tablestrutdepthfactor {\withoutpt\the\dimexpr10\dimexpr\tablestrutdepthfactor \points}%
\tablestrutunit\dimexpr\normalbaselineskip/12\relax % 12 is default bodyfont
@@ -2243,9 +2275,9 @@
%D \stopcombination
\setuptables
- [HL=\v!medium,
- VL=\v!medium,
- NL=\v!small,
+ [\c!HL=\v!medium,
+ \c!VL=\v!medium,
+ \c!NL=\v!small,
\c!frame=,
\c!align=\v!right,
\c!depth=.40, % \v!strut
@@ -2256,8 +2288,8 @@
\c!distance=\v!medium,
\c!bodyfont=,
\c!commands=,
- \c!background=\v!screen,
- \c!backgroundscreen=\@@rsscreen,
+ \c!background=\v!screen, % huh?
+ \c!backgroundscreen=\defaultbackgroundscreen,
\c!backgroundcolor=,
\c!split=\v!auto]
diff --git a/Master/texmf-dist/tex/context/base/tabl-tbl.lua b/Master/texmf-dist/tex/context/base/tabl-tbl.lua
index c48c5100d14..19548e7b3de 100644
--- a/Master/texmf-dist/tex/context/base/tabl-tbl.lua
+++ b/Master/texmf-dist/tex/context/base/tabl-tbl.lua
@@ -6,12 +6,13 @@ if not modules then modules = { } end modules ['tabl-tbl'] = {
license = "see context related readme files"
}
--- A couple of hacks ... easier to do in Lua than in regular
--- TeX. More will follow.
+-- A couple of hacks ... easier to do in Lua than in regular TeX. More will
+-- follow.
+
+local context, commands = context, commands
local tonumber = tonumber
local gsub, rep, sub, find = string.gsub, string.rep, string.sub, string.find
-
local P, C, Cc, Ct, lpegmatch = lpeg.P, lpeg.C, lpeg.Cc, lpeg.Ct, lpeg.match
local settexcount = tex.setcount
diff --git a/Master/texmf-dist/tex/context/base/tabl-tbl.mkiv b/Master/texmf-dist/tex/context/base/tabl-tbl.mkiv
index 20ddac4614a..2fa8c48052a 100644
--- a/Master/texmf-dist/tex/context/base/tabl-tbl.mkiv
+++ b/Master/texmf-dist/tex/context/base/tabl-tbl.mkiv
@@ -63,7 +63,7 @@
% k i<n> skip around column
% d digits (~)
%
-% C [C\L\M\R] {color}
+% C [LMRT] {color} % T is text color
%
% | {color,n}
%
@@ -185,7 +185,6 @@
\newconditional \c_tabl_tabulate_handlepbreak \settrue\c_tabl_tabulate_handlepbreak
\newconditional \c_tabl_tabulate_autorulespacing \settrue\c_tabl_tabulate_autorulespacing
\newconditional \c_tabl_tabulate_someamble
-\newconditional \c_tabl_tabulate_has_colors
\newconditional \c_tabl_tabulate_tolerant_break
\newconditional \c_tabl_tabulate_splitoff_whitespace
\newconditional \c_tabl_tabulate_pwidth_set
@@ -221,6 +220,8 @@
\newconstant \c_tabl_tabulate_localcolorspan
\newconstant \c_tabl_tabulate_modus
+\let\tabulatesplitlinemode\c_tabl_tabulate_splitlinemode % temp hack, we need an interface
+
\let \m_tabl_tabulate_separator_factor \empty % fraction
\newif \iftracetabulate % will become a tracker
@@ -305,13 +306,13 @@
\def\tabl_tabulate_nobreak_inject_tracer
{\red % maybe use the fast color switcher here
- \hrule\!!height.5\linewidth\!!depth.5\linewidth
+ \hrule\s!height.5\linewidth\s!depth.5\linewidth
\par
\kern-\linewidth
- \nobreak}
+ \tabl_tabulate_break_no}
\def\tabl_tabulate_nobreak_inject_indeed
- {\nobreak
+ {\tabl_tabulate_break_no
\iftracetabulate
\tabl_tabulate_nobreak_inject_tracer
\fi}
@@ -399,11 +400,11 @@
% \unexpanded % we can expand this one
\def\tabl_tabulate_inject_pre_skip#1%
{\ifdim#1>\zeropoint
- \hskip#1\relax
+ \kern#1\relax % was \hskip
\else\ifnum\c_tabl_tabulate_column=\zerocount
\ifconditional\c_tabl_tabulate_autorulespacing
\ifcase\c_tabl_tabulate_has_rule_spec_first\else
- \hskip\s_tabl_tabulate_first\relax
+ \kern\s_tabl_tabulate_first\relax % was \hskip
\fi
\fi
\fi\fi}
@@ -411,11 +412,11 @@
% \unexpanded % we can expand this one
\def\tabl_tabulate_inject_post_skip#1%
{\ifdim#1>\zeropoint
- \hskip#1\relax
+ \kern#1\relax % was \hskip
\else\ifnum\c_tabl_tabulate_columns=\c_tabl_tabulate_nofcolumns
\ifconditional\c_tabl_tabulate_autorulespacing
\ifcase\c_tabl_tabulate_has_rule_spec_last\else
- \hskip\s_tabl_tabulate_last\relax
+ \kern\s_tabl_tabulate_last\relax % was \hskip
\fi
\fi
\fi\fi}
@@ -455,7 +456,7 @@
\bgroup
\tabl_tabulate_bbskip
\bgroup % we cannot combine the if because a cell may have only one ##
-\tabl_tabulate_hook_b
+ \tabl_tabulate_hook_b
\c_tabl_tabulate_align\constantnumber\c_tabl_tabulate_align % needed in tag passing
\noexpand\dostarttagged\noexpand\t!tabulatecell\noexpand\empty
\noexpand\dotagtabulatecell
@@ -469,6 +470,11 @@
\the\t_tabl_tabulate_font
\the\t_tabl_tabulate_settings
\the\t_tabl_tabulate_before
+ \ifx\m_tabl_tabulate_text_color\empty
+ \expandafter\gobbleoneargument
+ \else
+ \expandafter\dofastcoloractivation
+ \fi\m_tabl_tabulate_text_color
\noexpand\fi
% grouping needs to be outside macros (or expandable), nice test
% example \NC \string \aligntab \NC which will fail otherwise (mk)
@@ -485,7 +491,7 @@
\tabl_tabulate_shaped_par_end
\fi
\noexpand#2%
-\tabl_tabulate_hook_e
+ \tabl_tabulate_hook_e
\egroup
\egroup
\aligntab
@@ -528,7 +534,8 @@
\tabl_tabulate_set_preamble}
\installtabulatepreambleoption{R}{\t_tabl_tabulate_font{\rm}%
\tabl_tabulate_set_preamble}
-\installtabulatepreambleoption{m}{\t_tabl_tabulate_bmath{$}\t_tabl_tabulate_emath{$}%
+\installtabulatepreambleoption{m}{\t_tabl_tabulate_bmath{$}%
+ \t_tabl_tabulate_emath{$}%
\tabl_tabulate_set_preamble}
\installtabulatepreambleoption{M}{\t_tabl_tabulate_bmath{$\displaystyle}\t_tabl_tabulate_emath{$}%
\tabl_tabulate_set_preamble}
@@ -704,9 +711,20 @@
\def\tabl_tabulate_set_width_simple
{\tabl_tabulate_set_preamble_step\tabl_tabulate_xbskip\tabl_tabulate_xeskip}
+% \def\tabl_tabulate_set_color_span#1#2%
+% {\xdef\m_tabl_tabulate_color{#2}%
+% \global\c_tabl_tabulate_colorspan\if#1L\plusone\else\if#1M\plustwo\else\if#1R\plusthree\else\zerocount\fi\fi\fi\relax
+% \tabl_tabulate_set_preamble}
+
+\installcorenamespace{tabulatecolorspec}
+
+\setvalue{\??tabulatecolorspec L}#1{\xdef\m_tabl_tabulate_color {#1}\global\c_tabl_tabulate_colorspan\plusone }
+\setvalue{\??tabulatecolorspec M}#1{\xdef\m_tabl_tabulate_color {#1}\global\c_tabl_tabulate_colorspan\plustwo }
+\setvalue{\??tabulatecolorspec R}#1{\xdef\m_tabl_tabulate_color {#1}\global\c_tabl_tabulate_colorspan\plusthree}
+\setvalue{\??tabulatecolorspec T}#1{\xdef\m_tabl_tabulate_text_color{#1}}
+
\def\tabl_tabulate_set_color_span#1#2%
- {\xdef\m_tabl_tabulate_color{#2}%
- \global\c_tabl_tabulate_colorspan\if#1L\plusone\else\if#1M\plustwo\else\if#1R\plusthree\else\zerocount\fi\fi\fi\relax
+ {\csname\??tabulatecolorspec#1\endcsname{#2}%
\tabl_tabulate_set_preamble}
\def\tabl_tabulate_set_vrule_command#1%
@@ -726,6 +744,7 @@
\t_tabl_tabulate_font\emptytoks
\t_tabl_tabulate_settings\emptytoks
\global\let\m_tabl_tabulate_color\empty
+ \global\let\m_tabl_tabulate_text_color\empty
\global\let\m_tabl_tabulate_vrule_color\empty
\global\c_tabl_tabulate_colorspan\zerocount
\global\advance\c_tabl_tabulate_columns\plusone
@@ -803,9 +822,9 @@
\ifconditional\c_tabl_tabulate_splitoff_whitespace
\tabl_tabulate_check_whitespace
\fi
+ \tabl_tabulate_color_repeat % needs to end up in a cell
\setbox\b_tabl_tabulate\hbox to \wd\b_tabl_tabulate
- {\tabl_tabulate_color_repeat
- \hss\tabl_tabulate_hook_yes{\box\b_tabl_tabulate}\hss}%
+ {\hss\tabl_tabulate_hook_yes{\box\b_tabl_tabulate}\hss}%
\tabl_tabulate_normalize_splitline
\box\b_tabl_tabulate}
@@ -1044,17 +1063,48 @@
\def\tabl_tabulate_start_head_nop{\tabl_tabulate_start_head_yes[]}
\def\tabl_tabulate_start_foot_nop{\tabl_tabulate_start_foot_yes[]}
+% \unexpanded\def\tabl_start_defined[#1]%
+% {\bgroup
+% \edef\currenttabulationparent{#1}%
+% \let\currenttabulation\currenttabulationparent
+% \doifnextoptionalelse\tabl_start_defined_yes\tabl_start_defined_nop}
+%
+% \def\tabl_start_defined_yes[#1]%
+% {\edef\currenttabulation{\currenttabulation:#1}%
+% \tabl_tabulate_start_building}
+%
+% \def\tabl_start_defined_nop
+% {\tabl_tabulate_start_building}
+
\unexpanded\def\tabl_start_defined[#1]%
{\bgroup
\edef\currenttabulationparent{#1}%
\let\currenttabulation\currenttabulationparent
- \doifnextoptionalelse\tabl_start_defined_yes\tabl_start_defined_nop}
+ \dodoubleargument\tabl_start_defined_indeed}
+
+\def\tabl_start_defined_indeed
+ {\iffirstargument
+ \ifsecondargument
+ \doubleexpandafter\tabl_start_defined_two
+ \else
+ \doubleexpandafter\tabl_start_defined_one
+ \fi
+ \else
+ \singleexpandafter\tabl_start_defined_zero
+ \fi}
+
+\def\tabl_start_defined_one[#1][#2]%
+ {\doifassignmentelse{#1}
+ {\setuptabulation[\currenttabulation][#1]}%
+ {\edef\currenttabulation{\currenttabulation:#1}}%
+ \tabl_tabulate_start_building}
-\def\tabl_start_defined_yes[#1]%
+\def\tabl_start_defined_two[#1][#2]%
{\edef\currenttabulation{\currenttabulation:#1}%
+ \setuptabulation[\currenttabulation][#2]%
\tabl_tabulate_start_building}
-\def\tabl_start_defined_nop
+\def\tabl_start_defined_zero[#1][#2]%
{\tabl_tabulate_start_building}
% \definetabulate[\v!tabulate][|l|p|] % we need to get rid of this one
@@ -1063,18 +1113,47 @@
{\bgroup % whole thing
\dodoubleempty\tabl_start_regular}
-\def\tabl_start_regular[#1][#2]%
+% \def\tabl_start_regular[#1][#2]%
+% {%\let\currenttabulationparent\v!tabulate
+% \let\currenttabulationparent\empty
+% \let\currenttabulation\currenttabulationparent
+% \def\p_format{#1}%
+% \ifx\p_format\empty
+% \def\p_format{|l|p|}%
+% \fi
+% \lettabulationparameter\c!format\p_format
+% \ifsecondargument
+% \setupcurrenttabulation[#2]%
+% \fi
+% \tabl_tabulate_start_building}
+
+\def\tabl_start_regular
{%\let\currenttabulationparent\v!tabulate
\let\currenttabulationparent\empty
\let\currenttabulation\currenttabulationparent
- \def\p_format{#1}%
+ \ifsecondargument
+ \expandafter\tabl_start_regular_two
+ \else
+ \expandafter\tabl_start_regular_one
+ \fi}
+
+\def\tabl_start_regular_one[#1][#2]%
+ {\doifassignmentelse{#1}
+ {\setupcurrenttabulation[\c!format={|l|p|},#1]}
+ {\def\p_format{#1}%
+ \ifx\p_format\empty
+ \def\p_format{|l|p|}%
+ \fi
+ \lettabulationparameter\c!format\p_format}%
+ \tabl_tabulate_start_building}
+
+\def\tabl_start_regular_two[#1][#2]%
+ {\def\p_format{#1}%
\ifx\p_format\empty
\def\p_format{|l|p|}%
\fi
\lettabulationparameter\c!format\p_format
- \ifsecondargument
- \setupcurrenttabulation[#2]%
- \fi
+ \setupcurrenttabulation[#2]%
\tabl_tabulate_start_building}
\letvalue{\e!stop\v!tabulate }\relax
@@ -1150,6 +1229,7 @@
\let\m_tabl_tabulate_color_previous \empty
\let\m_tabl_tabulate_color \empty
+\let\m_tabl_tabulate_text_color \empty
\let\m_tabl_tabulate_color_local \empty
\let\m_tabl_tabulate_vrule_color \empty
\let\m_tabl_tabulate_vrule_color_local \empty
@@ -1157,12 +1237,13 @@
\let\m_tabl_tabulate_hrule_color_default\empty % used local
\appendtoks
- \glet\m_tabl_tabulate_color_previous \empty
- \glet\m_tabl_tabulate_color \empty
- \glet\m_tabl_tabulate_color_local \empty
- \glet\m_tabl_tabulate_vrule_color \empty
- \glet\m_tabl_tabulate_vrule_color_local\empty
- \global\d_tabl_tabulate_vrulethickness_local\zeropoint
+ \global\let\m_tabl_tabulate_color_previous \empty
+ \global\let\m_tabl_tabulate_color \empty
+ \global\let\m_tabl_tabulate_text_color \empty
+ \global\let\m_tabl_tabulate_color_local \empty
+ \global\let\m_tabl_tabulate_vrule_color \empty
+ \global\let\m_tabl_tabulate_vrule_color_local \empty
+ \global \d_tabl_tabulate_vrulethickness_local\zeropoint
\to \t_tabl_tabulate_every_row
\unexpanded\def\tabl_tabulate_color_side_right_second
@@ -1261,11 +1342,11 @@
\tabl_tabulate_column_normal#1}
\def\tabl_tabulate_column_vruled_normal
- {\vrule\!!width\d_tabl_tabulate_vrulethickness\relax}
+ {\vrule\s!width\d_tabl_tabulate_vrulethickness\relax}
\def\tabl_tabulate_column_vruled_colored
{\dousecolorparameter\m_tabl_tabulate_vrule_color
- \vrule\!!width\d_tabl_tabulate_vrulethickness\relax}
+ \vrule\s!width\d_tabl_tabulate_vrulethickness\relax}
\unexpanded\def\tabl_tabulate_column_vrule_inject_first
{\ifcase\d_tabl_tabulate_vrulethickness\else
@@ -1303,7 +1384,7 @@
\expandafter\ignorespaces % interferes with the more tricky hooks
\fi}
-\def\setquicktabulate#1% see \startlegend \startgiven (for the moment still public)
+\unexpanded\def\setquicktabulate#1% see \startlegend \startgiven (for the moment still public)
{\let#1\tabl_tabulate_column_inject_auto
\let\\\tabl_tabulate_column_inject_auto} % brrr, will go
@@ -1330,7 +1411,11 @@
\processcommacommand[\p_distance]\tabl_tabulate_column_rule_separator_step
\s_tabl_tabulate_separator\m_tabl_tabulate_separator_factor\s_tabl_tabulate_separator
\fi
- \ifconditional\c_tabl_tabulate_someamble\kern\else\vskip\fi\s_tabl_tabulate_separator % new
+ % someamble: footer or header: unfortunately a skip can trigger a page break (weird
+ % as we have lots of nobreaks)
+ % \ifconditional\c_tabl_tabulate_someamble\kern\else\vskip\fi\s_tabl_tabulate_separator % new
+ % \directvspacing{\the\s_tabl_tabulate_separator}% new
+ \directvskip\s_tabl_tabulate_separator
\egroup}
\def\tabl_tabulate_hrule_spec_ignore#1%
@@ -1354,8 +1439,8 @@
\def\tabl_tabulate_hrule_inject_normal
{\hrule
- \!!height.5\d_tabl_tabulate_hrulethickness_local
- \!!depth .5\d_tabl_tabulate_hrulethickness_local
+ \s!height.5\d_tabl_tabulate_hrulethickness_local
+ \s!depth .5\d_tabl_tabulate_hrulethickness_local
\relax}
\def\tabl_tabulate_hrule_inject_colored
@@ -1386,87 +1471,60 @@
\let\tabl_tabulate_hrule_inject\tabl_tabulate_hrule_inject_second
\to \t_tabl_tabulate_initializers_second
-% \def\totaltabulatecolumns{0}
-%
-% \def\donormaltabulatehlinerule
-% {\leaders \hrule
-% \!!height\dimexpr.5\lineheight-\strutdepth
-% \!!depth-\dimexpr.5\lineheight-\strutdepth+\d_tabl_tabulate_hrulethickness_local
-% \hfill}
-%
-% \def\docoloredtabulatehlinerule
-% {\dousecolorparameter\currenttabulationlocalhrulecolor
-% \donormaltabulatehlinerule}
-%
-% \def\dotabulatelinerule
-% {\multispan\totaltabulatecolumns % \multispan is a plain macro
-% % for the moment this one
-% \strut\hskip\d_tabl_tabulate_margin
-% % neg values are ok !
-% \hskip\d_tabl_tabulate_indent\relax % new august 2003
-% \ifcase\d_tabl_tabulate_hrulethickness_local\else
-% \iftrialtypesetting % does not happen as we nil the caller
-% \donormaltabulatehlinerule
-% \else\ifx\currenttabulationlocalhrulecolor\empty
-% \donormaltabulatehlinerule
-% \else
-% \docoloredtabulatehlinerule
-% \fi\fi
-% \fi
-% \cr}
-
%D Color:
% \starttabulate[||p||]
% \NC test \NC test \NC test \NC \NR
-% \NC test \NC[green] \input tufte \NC[yellow] test \NC \NR
+% \NC test \CC[green] \input tufte \CC[yellow] test \NC \NR
% \NC test \NC test \NC test \NC \NR
% \NC test \NC test \NC test \NC \NR
% \NC test \NC test \NC test \NC \NR
% \NC test \NC test \NC test \NC \NR
-% \NC[blue] test \NC[red] test \NC test \NC \NR
+% \CC[blue] test \CC[red] test \NC test \NC \NR
% \NC test \NC test \NC test \NC \NR
% \NC test \NC test \NC test \NC \NR
% \NC test \NC test \NC test \NC \NR
-% \NC test \NC test \NC[gray] test \NC \NR
+% \NC test \NC test \CC[gray] test \NC \NR
% \NC test \NC test \NC test \NC \NR
% \NC test \NC test \NC test \NC \NR
-% \NC[blue] test \NC test \NC test \NC \NR
+% \CC[blue] test \NC test \NC test \NC \NR
% \NC test \NC test \NC test \NC \NR
-% \NC test \NC test \NC[magenta] test \NC \NR
+% \NC test \NC test \CC[magenta] test \NC \NR
% \NC test \NC test \NC test \NC \NR
-% \NC test \NC[cyan] \dorecurse{10}{\input ward }\NC test \NC \NR
+% \NC test \CC[cyan] \dorecurse{10}{\input ward }\NC test \NC \NR
% \NC test \NC test \NC test \NC \NR
-% \NC test \NC[yellow] test \NC test \NC \NR
+% \NC test \CC[yellow] test \NC test \NC \NR
% \stoptabulate
\unexpanded\def\tabl_tabulate_color_set#1% we could store the attributes at the cost of a lua call
{\begingroup
- \node_backgrounds_boxes_initialize % name might change
+ \node_backgrounds_align_initialize % name might change
+ \global\let\tabl_tabulate_color_repeat\tabl_tabulate_color_repeat_second
\global\settrue\c_tabl_tabulate_has_colors
\ifnum\c_tabl_tabulate_column>\c_tabl_tabulate_max_colorcolumn
\global\c_tabl_tabulate_max_colorcolumn\c_tabl_tabulate_column
\fi
\setxvalue{\??tabulatecolor\the\c_tabl_tabulate_column}{#1}%
- \attribute\backgroundattribute\plusone
- \dousecolorparameter{#1}\strut\char\zerocount % hack
+ %\attribute\alignbackgroundattribute\plusone
+ %\dousecolorparameter{#1}\char\zerocount\strut % hack
+ \hbox \thealignbackgroundcolorattr{#1}{}%
\endgroup}
\def\tabl_tabulate_color_repeat_second % for split off lines
{\begingroup
- \ifcsname\??tabulatecolor\the\c_tabl_tabulate_column\endcsname
- \attribute\backgroundattribute\plusone
- \expandafter\dousecolorparameter\csname\??tabulatecolor\the\c_tabl_tabulate_column\endcsname\strut\char\zerocount % hack
+ \scratchcounter\numexpr\c_tabl_tabulate_column-\plusone\relax % ugly !
+ \ifcsname\??tabulatecolor\the\scratchcounter\endcsname
+ %\expandafter\dousecolorparameter\csname\??tabulatecolor\the\scratchcounter\endcsname
+ %\strut\char\zerocount % hack
+ \hbox \thealignbackgroundcolorattr{\csname\??tabulatecolor\the\scratchcounter\endcsname}{}%
\fi
\endgroup}
\let\tabl_tabulate_color_repeat\relax
\appendtoks
- \ifconditional\c_tabl_tabulate_has_colors
- \let\tabl_tabulate_color_repeat\tabl_tabulate_color_repeat_second
- \fi
-\to \t_tabl_tabulate_initializers_second
+ \let\tabl_tabulate_color_repeat\relax
+\to \everytabulate
\def\tabl_tabulate_color_reset
{\ifcase\c_tabl_tabulate_max_colorcolumn\else
@@ -1486,10 +1544,6 @@
\tabl_tabulate_color_reset
\to \t_tabl_tabulate_every_after_row
-\appendtoks
- \global\setfalse\c_tabl_tabulate_has_colors
-\to \everytabulate
-
% \def\tabl_tabulate_register_par_options_indeed
% {\iftrialtypesetting \else
% \registerparoptions
@@ -1585,6 +1639,43 @@
{\def\dobaselinecorrection{\vskip\dimexpr-\prevdepth+\strutdp+\strutdp\relax}% todo: mkiv
\baselinecorrection}
+% some hack to prevent an allowbreak ... actually we could set up a system then
+% that is dealt with atthe lua end in the skip handler: turn penalties with attributes
+% values into other penalties that get removed
+
+\installcorenamespace{tabulatenobreak}
+
+% \def\tabl_tabulate_break_state_set
+% {%\writestatus{SET}{\the\c_tabl_tabulate_noflines}%
+% \global\expandafter\let\csname\??tabulatenobreak\the\c_tabl_tabulate_noflines\endcsname\conditionaltrue}
+%
+% \def\tabl_tabulate_break_state_reset
+% {\ifcsname\??tabulatenobreak\the\c_tabl_tabulate_noflines\endcsname
+% %\writestatus{RESET}{\the\c_tabl_tabulate_noflines}%
+% \global\expandafter\let\csname\??tabulatenobreak\the\c_tabl_tabulate_noflines\endcsname\undefined
+% \fi}
+%
+% \def\tabl_tabulate_break_state_allowbreak
+% {\ifcsname\??tabulatenobreak\the\c_tabl_tabulate_noflines\endcsname
+% %\writestatus{TRUE}{\the\c_tabl_tabulate_noflines}%
+% \else
+% %\writestatus{FALSE}{\the\c_tabl_tabulate_noflines}%
+% \tabl_tabulate_break_allow % needed with pbreak prevention
+% \fi}
+%
+% \let\tabl_tabulate_break_allow\allowbreak
+% \let\tabl_tabulate_break_no \nobreak
+
+\def\tabl_tabulate_break_allow{\directvpenalty\zerocount}
+\def\tabl_tabulate_break_maybe{\directvpenalty\zerocount}
+\def\tabl_tabulate_break_no {\directvpenalty\plustenthousand} % ,order:2}}
+
+\let\tabl_tabulate_break_state_set \relax
+\let\tabl_tabulate_break_state_reset \relax
+\let\tabl_tabulate_break_state_allowbreak\tabl_tabulate_break_maybe
+
+% so far
+
\unexpanded\def\tabl_tabulate_VL_first{\tabl_tabulate_column_vruled\zerocount}
\unexpanded\def\tabl_tabulate_NC_first{\tabl_tabulate_column_normal\zerocount}
\unexpanded\def\tabl_tabulate_RC_first{\tabl_tabulate_column_normal\plusone}
@@ -1596,8 +1687,12 @@
\unexpanded\def\tabl_tabulate_NG_first{\NC\tabl_tabulate_charalign}
\unexpanded\def\tabl_tabulate_NN_first{\NC\tabl_tabulate_digits} % new, undocumented, test first
\unexpanded\def\tabl_tabulate_ND_first{\NC\tabl_tabulate_digits} % same, for old times sake
-\unexpanded\def\tabl_tabulate_NR_first{\tabl_tabulate_NR_common\tabl_tabulate_check_penalties} % next row
-\unexpanded\def\tabl_tabulate_NB_first{\tabl_tabulate_NR_common\tabl_tabulate_nobreak_inject} % next row no break
+
+\unexpanded\def\tabl_tabulate_NR_first {\tabl_tabulate_NR_common\conditionaltrue \tabl_tabulate_check_penalties} % next row
+\unexpanded\def\tabl_tabulate_NB_first {\tabl_tabulate_NR_common\conditionaltrue \tabl_tabulate_nobreak_inject } % next row no break
+
+\unexpanded\def\tabl_tabulate_NR_second{\tabl_tabulate_NR_common\conditionalfalse\tabl_tabulate_check_penalties} % next row
+\unexpanded\def\tabl_tabulate_NB_second{\tabl_tabulate_NR_common\conditionalfalse\tabl_tabulate_nobreak_inject } % next row no break
\unexpanded\def\tabl_tabulate_CC_first{\global\c_tabl_tabulate_localcolorspan\zerocount\tabl_tabulate_set_color_column\zerocount}
\unexpanded\def\tabl_tabulate_CL_first{\global\c_tabl_tabulate_localcolorspan\plusone \tabl_tabulate_set_color_column\zerocount}
@@ -1624,6 +1719,11 @@
\to \t_tabl_tabulate_initializers_first
\appendtoks
+ \let\NR\tabl_tabulate_NR_second
+ \let\NB\tabl_tabulate_NB_second
+\to \t_tabl_tabulate_initializers_second
+
+\appendtoks
\let\SR\NR
\let\FR\NR
\let\MR\NR
@@ -1631,37 +1731,41 @@
\let\AR\NR
\to \t_tabl_tabulate_initializers_first
-\unexpanded\def\tabl_tabulate_NR_common#1%
+\unexpanded\def\tabl_tabulate_NR_common#1#2%
{\global\advance\c_tabl_tabulate_noflines\plusone
\global\setfalse\c_tabl_tabulate_firstflushed
\global\setfalse\c_tabl_tabulate_equal
\global\c_tabl_tabulate_column\zerocount
+ \ifconditional#1\relax
+ \tabl_tabulate_break_state_reset
+ \fi
\tabl_tabulate_pheight_reset
\unskip\unskip\crcr\tabl_tabulate_flush_collected
+ % can we omit the next one in the first run? probably
\starttabulatenoalign
\the\t_tabl_tabulate_every_after_row
- #1%
+ #2%
\stoptabulatenoalign}
\def\tabl_tabulate_check_penalties
{\ifconditional\c_tabl_tabulate_tolerant_break\else
\ifnum\c_tabl_tabulate_totalnoflines=\plusone
- % \allowbreak
+ % \tabl_tabulate_break_allow
\else
\ifconditional\c_tabl_tabulate_someamble \ifcase\c_tabl_tabulate_repeathead \else
- \allowbreak
+ \tabl_tabulate_break_allow
\fi \fi
\ifnum\c_tabl_tabulate_noflines=\plusone
\tabl_tabulate_nobreak_inject
- \else\ifnum\c_tabl_tabulate_noflines=\c_tabl_tabulate_minusnoflines
+ \else \ifnum\c_tabl_tabulate_noflines=\c_tabl_tabulate_minusnoflines
\ifnum\c_tabl_tabulate_plines_max<\plustwo
\tabl_tabulate_nobreak_inject
\else
- \allowbreak % needed with pbreak prevention
+ \tabl_tabulate_break_allow % needed with pbreak prevention
\fi
\else
- \allowbreak % needed with pbreak prevention
- \fi\fi
+ \tabl_tabulate_break_state_allowbreak
+ \fi \fi
\fi
\fi
\global\setfalse\c_tabl_tabulate_firstflushed}
@@ -1751,7 +1855,11 @@
% \starttabulate[||] \dorecurse{100}{\NC Eins \NC \NR \HL} \stoptabulate
% \stoptext
-\def\tabl_tabulate_XX_none{\starttabulatenoalign\tabl_tabulate_hrule_spec_ignore\stoptabulatenoalign}
+\def\tabl_tabulate_XX_none
+ {\starttabulatenoalign
+ \tabl_tabulate_break_state_set
+ \tabl_tabulate_hrule_spec_ignore
+ \stoptabulatenoalign}
\def\tabl_tabulate_FL_second{\starttabulatenoalign\tabl_tabulate_hrule_spec_pickup\tabl_tabulate_FL_second_indeed}
\def\tabl_tabulate_ML_second{\starttabulatenoalign\tabl_tabulate_hrule_spec_pickup\tabl_tabulate_ML_second_indeed}
@@ -1775,7 +1883,7 @@
\tabl_tabulate_hrule_inject
\vskip-\p_rulethickness\relax
\tabl_tabulate_hrule_inject
- \nobreak
+ \tabl_tabulate_break_no
\tabl_tabulate_column_rule_separator_inject
\stoptabulatenoalign}
@@ -1846,12 +1954,12 @@
% \stoptabulatenoalign
% \dotabulateautoline
% \starttabulatenoalign
-% \nobreak
+% \tabl_tabulate_break_no
% \ifx\dotabulateautoline\dotabulatelinerule\kern-\lineheight\fi
% \ifnum\noftabulatelines=\totalnoftabulatelines
% \@EA\dotabulatenobreak
% \else
-% \@EA\allowbreak
+% \@EA\tabl_tabulate_break_allow
% \fi
% \stoptabulatenoalign
% \dotabulateautoline
@@ -1904,10 +2012,10 @@
{\c_tabl_tabulate_pass\plusone
\tabl_tabulate_check_full_content
\edef\v_tabl_tabulate_align{\executeifdefined{\??tabulatealigning\p_align}0}%
- \s_tabl_tabulate_post\zeropoint
- \s_tabl_tabulate_pre\zeropoint
\s_tabl_tabulate_first.5\d_tabl_tabulate_unit
\s_tabl_tabulate_last\s_tabl_tabulate_first
+ \s_tabl_tabulate_pre\zeropoint
+ \s_tabl_tabulate_post\s_tabl_tabulate_first % was: \zeropoint
\global\c_tabl_tabulate_columns\zerocount
\global\c_tabl_tabulate_nofauto\zerocount
\global\c_tabl_tabulate_noflines\zerocount
@@ -1968,7 +2076,8 @@
\d_tabl_tabulate_width\zeropoint
\tabl_tabulate_initialize_boxes\c_tabl_tabulate_columns
\t_tabl_tabulate_preamble\expandafter{\the\t_tabl_tabulate_preamble
- \aligntab\alignmark\alignmark\global\advance\c_tabl_tabulate_column\plusone
+ \aligntab\alignmark\alignmark
+ \global\advance\c_tabl_tabulate_column\plusone % maybe just set it already
}%
\t_tabl_tabulate_dummy\expandafter{\the\t_tabl_tabulate_dummy
\NC\unskip\unskip\crcr\tabl_tabulate_flush_collected % no count
@@ -2029,6 +2138,7 @@
%
\dostarttagged\t!tabulate\empty
\dostarttagged\t!tabulaterow\empty
+ \setfalse\inhibitmargindata % new per 2012.06.13 ... really needed
\everycr\expandafter{\the\everycr\dostoptagged\dostarttagged\t!tabulaterow\empty}%
\expandafter\halign\expandafter{\the\t_tabl_tabulate_preamble\crcr\tabl_tabulate_insert_content\crcr}%
\dostoptagged
@@ -2150,6 +2260,26 @@
\let\TB\tabl_tabulate_TB
\to \everytabulate
+% %D Between alignment lines certain rules apply, and even a simple test can mess
+% %D up a table, which is why we have a special test facilityL
+% %D
+% %D \startbuffer
+% %D \starttabulate[|l|p|]
+% %D \NC 1test \NC test \NC \NR
+% %D \tableifelse{\doifelse{a}{a}}{\NC Xtest \NC test \NC \NR}{}%
+% %D \stoptabulate
+% %D \stopbuffer
+% %D
+% %D \typebuffer \getbuffer
+%
+% \def\tableifelse#1% should be tabulatenoalign then
+% {\tablenoalign
+% {#1%
+% {\aftergroup \firstoftwoarguments}%
+% {\aftergroup\secondoftwoarguments}}}
+%
+% \def\tableiftextelse#1{\tableifelse{\doiftextelse{#1}}}
+
%D Some new trickery:
%D
%D \startbuffer
@@ -2237,4 +2367,53 @@
% \NC \digits $@@@.@@1,@@$ \NC\NR
% \stoptabulatie
+%D Predefined categories (moved from core-mis):
+
+\definetabulate
+ [\v!legend]
+ [|emj1|i1|mR|]
+
+\setuptabulate
+ [\v!legend]
+ [\c!unit=.75em,\c!inner=\setquicktabulate\leg,EQ={=}]
+
+\definetabulate
+ [\v!legend][\v!two]
+ [|emj1|emk1|i1|mR|]
+
+\definetabulate
+ [\v!fact]
+ [|R|ecmj1|i1mR|]
+
+\setuptabulate
+ [\v!fact]
+ [\c!unit=.75em,\c!inner=\setquicktabulate\fact,EQ={=}]
+
+%D Another example:
+%D
+%D \starttyping
+%D \definetabulate
+%D [whatever]
+%D [|l|r|]
+%D
+%D \definetabulate
+%D [whatever][else]
+%D [|l|c|r|]
+%D
+%D \startwhatever
+%D \NC l \NC r \NC \NR
+%D \NC left \NC right \NC \NR
+%D \stopwhatever
+%D
+%D \startwhatever[else]
+%D \NC l \NC m \NC r \NC \NR
+%D \NC left \NC middle \NC right \NC \NR
+%D \stopwhatever
+%D
+%D \startwhatever[else][format={|c|c|c|c|}]
+%D \NC l \NC m \NC m \NC r \NC \NR
+%D \NC left \NC middle \NC middle \NC right \NC \NR
+%D \stopwhatever
+%D \stoptyping
+
\protect \endinput
diff --git a/Master/texmf-dist/tex/context/base/tabl-tsp.mkiv b/Master/texmf-dist/tex/context/base/tabl-tsp.mkiv
index 21182a98872..0138697af8e 100644
--- a/Master/texmf-dist/tex/context/base/tabl-tsp.mkiv
+++ b/Master/texmf-dist/tex/context/base/tabl-tsp.mkiv
@@ -13,8 +13,11 @@
\writestatus{loading}{ConTeXt Table Macros / Splitting}
-%D The code in this file is move here from other places and needs
-%D a mkiv cleanup.
+%D The code in this file is moved here from other places and needs
+%D a mkiv cleanup. As it mostly targets at tables the code lives in
+%D the tabl and page namespaces.
+
+% work in progress
\unprotect
@@ -26,8 +29,6 @@
% \splitfloat [settings] {\placetable[optional args]{test}} {content}
-% there is no need for a tracked structure number here
-
%D When \type {inbetween} is made empty instead of the
%D default \type {\page}, we will get delayed flushing
%D and text may continue below the graphic.
@@ -55,61 +56,81 @@
%D \dorecurse{10}{\input tufte }
%D \stoptyping
-\newcount\noffloatssplits
-
-\settrue \onlyonesplitofffloat
-\setfalse\somenextsplitofffloat
+\installcorenamespace{floatsplitting}
-\newconditional\splitfloatdone
-
-\newif\ifinsidesplitfloat % will become chardef
+\installdirectcommandhandler \??floatsplitting {floatsplitting} % \??floatsplitting
-\newtoks\everysplitfloatsetup
+\setupfloatsplitting
+ [\c!conversion=\v!character, % \v!romannumerals
+ \c!lines=3,
+ \c!before=,
+ \c!inbetween=\page,
+ \c!after=]
-\def\extrasplitfloatlines{0}
+\newconditional\splitfloatfirstdone
+\newconditional\somenextsplitofffloat
+\newconditional\splitfloatdone
+\newconditional\onlyonesplitofffloat \settrue\onlyonesplitofffloat
-\let\splitfloatfinalizer\relax
+\newif \ifinsidesplitfloat % will become conditional
-\ifx\floatcaptionsuffix\undefined \else
- \let\floatcaptionsuffix\empty % will become \splitfloatcaptionsuffix
-\fi
+\newcount \noffloatssplits
+\newtoks \everysplitfloatsetup
-\unexpanded\def\setupfloatsplitting
- {\dodoubleargument\getparameters[\??si]}
+\let \extrasplitfloatlines \!!zerocount
+\let \splitfloatfinalizer \relax
+\let \floatcaptionsuffix \empty
\unexpanded\def\splitfloat
- {\dosingleempty\dosplitfloat}
+ {\dosingleempty\page_split_float}
-\def\dosplitfloat[#1]#2% nog dubbele refs
+\def\page_split_float[#1]#2% nog dubbele refs
{\bgroup
\global\setfalse\splitfloatdone
- \aftergroup\checksplitfloat
+ \aftergroup\page_split_float_check
\insidefloattrue
\insidesplitfloattrue
- \getparameters[\??si][#1]%
+ \setupcurrentfloatsplitting[#1]%
\global\noffloatssplits\zerocount
- \def\floatcaptionsuffix{\convertnumber\@@siconversion\noffloatssplits}%
- \let\extrasplitfloatlines\@@silines
+ \let\floatcaptionsuffix\page_split_float_suffix
+ \edef\extrasplitfloatlines{\floatsplittingparameter\c!lines}%
\the\everysplitfloatsetup
\def\splitfloatcommand{#2}%
\global\settrue \onlyonesplitofffloat
\global\setfalse\somenextsplitofffloat
- \dopushsavedfloats
- \@@sibefore
+ \page_floats_push_saved
+ \floatsplittingparameter\c!before
\let\next} % \bgroup
-\unexpanded\def\checksplitfloat
+\unexpanded\def\page_split_float_suffix
+ {\begingroup
+ \usefloatsplittingstyleandcolor\c!style\c!color % only the suffix
+ \convertnumber{\floatsplittingparameter\c!conversion}\noffloatssplits
+ \endgroup}
+
+\unexpanded\def\page_split_float_check
{\ifconditional\splitfloatdone
\splitfloatfinalizer % a weird place (could interfere with flushing)
\else
- \blank{\tttf \getmessage\m!floatblocks{13}\empty}\blank
+ \blank
+ \begingroup
+ \tttf \dontleavehmode \getmessage\m!floatblocks{13}\empty
+ \endgroup
+ \blank
\showmessage\m!floatblocks{13}\empty
\fi}
-\def\dodowithsplitofffloat
- {\dowithnextboxcs\dodowithsplitofffloatfinish\vbox}
+\def\page_split_float_process % nextbox
+ {\ifinsidesplitfloat
+ \expandafter\page_split_float_process_yes
+ \else
+ \expandafter\page_split_float_process_nop
+ \fi}
+
+\def\page_split_float_process_yes
+ {\dowithnextboxcs\page_split_float_process_finish\vbox}
-\def\dodowithsplitofffloatfinish
+\def\page_split_float_process_finish
{\forgetall
\dontcomplain
\global\settrue\splitfloatdone
@@ -127,26 +148,29 @@
\splitfloatcommand{\box\nextbox}%
\egroup
\ifconditional\somenextsplitofffloat
- \doifelsenothing\@@siinbetween
- {\ifconditional\splitfloatfirstdone\else\page\fi}
- \@@siinbetween
+ \edef\p_inbetween{\floatsplittingparameter\c!inbetween}%
+ \ifx\p_inbetween\empty
+ \ifconditional\splitfloatfirstdone\else\page\fi
+ \else
+ \p_inbetween
+ \fi
\else
- \@@siafter
- \dopopsavedfloats
- \doflushsavedfloats
+ \floatsplittingparameter\c!after
+ \page_floats_pop_saved
+ \page_floats_flush_saved
\fi
\global\settrue\splitfloatfirstdone}
-\def\nodowithsplitofffloat
- {\dowithnextboxcs\nodowithsplitofffloatfinish\vbox}
+\def\page_split_float_process_nop
+ {\dowithnextboxcs\page_split_float_process_nop_finish\vbox}
-\def\nodowithsplitofffloatfinish
+\def\page_split_float_process_nop_finish
{\forgetall
\dontcomplain
\box\nextbox % maybe an option to unvbox
\global\settrue\splitfloatfirstdone}
-\def\dochecksplitofffloat#1% box
+\def\page_split_float_check_content#1% box
{\ifinsidesplitfloat
% \ifdim\ht#1=\zeropoint % funny: \ifcase does not check for overflow
\ifcase\ht#1\relax
@@ -157,7 +181,7 @@
\fi
\fi}
-\def\analyzesplitfloatcaption#1% depends on page-flt .. pretty messy
+\def\page_split_float_check_caption#1% depends on page-flt .. pretty messy
{\edef\extrasplitfloatlines{\extrasplitfloatlines}%
\ifx\extrasplitfloatlines\v!auto
\bgroup
@@ -174,27 +198,15 @@
\doifnumberelse\extrasplitfloatlines\donothing{\def\extrasplitfloatlines{1}}%
\fi}
-\def\dowithsplitofffloat % nextbox
+\unexpanded\def\doifnotinsidesplitfloat
{\ifinsidesplitfloat
- \expandafter\dodowithsplitofffloat
- \else
- \expandafter\nodowithsplitofffloat
+ \expandafter\gobbleoneargument
\fi}
-\def\doifnotinsidesplitfloat
- {\ifinsidesplitfloat\expandafter\gobbleoneargument\fi}
-
-%D Some defaults:
-
-\setupfloatsplitting
- [\c!conversion=\v!character, % \v!romannumerals
- \c!lines=3,
- \c!before=,
- \c!inbetween=\page,
- \c!after=]
-
%D Table splitter, on top of previous code:
+% todo: keep tail to rest, so we need a lookahead
+
\newbox\tsplitcontent
\newbox\tsplitresult
\newbox\tsplithead
@@ -203,198 +215,216 @@
\newtoks\everyresettsplit
-\def\resettsplit{\the\everyresettsplit}
-
\appendtoks
- \def\tsplitminimumfreelines{0}%
- \def\tsplitminimumfreespace{0pt}%
- \setbox\tsplitcontent\emptyvbox
- \setbox\tsplitresult \emptyvbox
- \setbox\tsplithead \emptyvbox
- \setbox\tsplitnext \emptyvbox
- \setbox\tsplittail \emptyvbox
- \let\tsplitbeforeresult\donothing
- \let\tsplitafterresult \donothing
- \let\tsplitinbetween \donothing
- \let\tsplitbefore \donothing
- \let\tsplitafter \donothing
- \let\postprocesstsplit \donothing
+ \let \tsplitminimumfreelines\!!zerocount
+ \let \tsplitminimumfreespace\!!zeropoint
+ \setbox\tsplitcontent \emptyvbox
+ \setbox\tsplitresult \emptyvbox
+ \setbox\tsplithead \emptyvbox
+ \setbox\tsplitnext \emptyvbox
+ \setbox\tsplittail \emptyvbox
+ \let \tsplitbeforeresult \donothing
+ \let \tsplitafterresult \donothing
+ \let \tsplitinbetween \donothing
+ \let \tsplitbefore \donothing
+ \let \tsplitafter \donothing
+ \let \postprocesstsplit \donothing
\to \everyresettsplit
+\unexpanded\def\resettsplit
+ {\the\everyresettsplit}
+
\resettsplit
-% todo: keep tail to rest, so we need a lookahead
+\def\tsplitdirectwidth{\hsize}
-\newconditional\splitfloatfirstdone
+\newconditional\c_tabl_split_done
+\newconditional\c_tabl_split_head
+\newconditional\c_tabl_split_full
-\def\handletsplit
- {\analyzesplitfloatcaption{\wd\tsplitcontent}%
+\newdimen \d_tabl_split_available
+
+\unexpanded\def\handletsplit
+ {\page_split_float_check_caption{\wd\tsplitcontent}%
\global\setfalse\splitfloatfirstdone
\testpagesync % new, sync, but still tricky
[\tsplitminimumfreelines]
[\dimexpr\tsplitminimumfreespace+\extrasplitfloatlines\lineheight\relax]%
\setbox\scratchbox\vbox{\tsplitinbetween}%
\edef\tsplitinbetweenheight{\the\htdp\scratchbox}% etex
- \!!doneafalse
- \doloop
- {\ifinsidecolumns
- % brrr, assumes empty columns
- \global\setfalse\splitfloatfirstdone
- \scratchdimen\textheight
- \!!donectrue
- \else
- \ifconditional\splitfloatfirstdone
- \scratchdimen\textheight
- \!!donectrue
- \else\ifdim\pagegoal<\maxdimen
- \scratchdimen\dimexpr\pagegoal-\pagetotal\relax
- \!!donecfalse
- \else
- \scratchdimen\textheight
- \!!donectrue
- \fi\fi
- \fi
- \scratchdimen\dimexpr\scratchdimen-\tsplitinbetweenheight-\tsplitminimumfreespace-\extrasplitfloatlines\lineheight\relax
- \ifdim\htdp\tsplittail>\zeropoint
- \advance\scratchdimen-\htdp\tsplittail
- \fi
- \setbox\tsplitresult\vbox
- {\ifdim\ht\tsplithead>\zeropoint
- \unvcopy\tsplithead
+ \setfalse\c_tabl_split_done
+ \doloop\tabl_split_loop_body
+ \global\setfalse\usesamefloatnumber % new, prevent next increment
+ \global\setfalse\splitfloatfirstdone} % we can use this one for tests
+
+\def\tabl_split_loop_body
+ {\ifinsidecolumns
+ % brrr, assumes empty columns
+ \global\setfalse\splitfloatfirstdone
+ \d_tabl_split_available\textheight
+ \settrue\c_tabl_split_full
+ \else
+ \ifconditional\splitfloatfirstdone
+ \d_tabl_split_available\textheight
+ \settrue\c_tabl_split_full
+ \else\ifdim\pagegoal<\maxdimen
+ \d_tabl_split_available\dimexpr\pagegoal-\pagetotal\relax
+ \setfalse\c_tabl_split_full
+ \else
+ \d_tabl_split_available\textheight
+ \settrue\c_tabl_split_full
+ \fi\fi
+ \fi
+ \d_tabl_split_available \dimexpr
+ \d_tabl_split_available
+ -\tsplitinbetweenheight
+ -\tsplitminimumfreespace
+ -\extrasplitfloatlines\lineheight
+ \relax
+ \ifdim\htdp\tsplittail>\zeropoint
+ \advance\d_tabl_split_available-\htdp\tsplittail
+ \fi
+ \setbox\tsplitresult\vbox
+ {\ifdim\ht\tsplithead>\zeropoint
+ \unvcopy\tsplithead
+ \tsplitinbetween
+ \fi}%
+ \ifconditional\c_tabl_split_done \else
+ \ifdim\ht\tsplitnext>\zeropoint
+ \setbox\tsplithead\box\tsplitnext
+ \fi
+ \fi
+ \settrue\c_tabl_split_done
+ \ifdim\ht\tsplitresult>\zeropoint
+ \settrue\c_tabl_split_head % table head
+ \else
+ \setfalse\c_tabl_split_head % no tablehead
+ \fi
+ \splittopskip\zeropoint
+ \doloop % inner loop
+ {\setbox\scratchbox\vsplit\tsplitcontent to \onepoint % \lineheight
+ \setbox\scratchbox\vbox{\unvbox\scratchbox}%
+ \ifdim\dimexpr\d_tabl_split_available-\htdp\scratchbox-\htdp\tsplitresult\relax>\zeropoint
+ \setbox\tsplitresult\vbox
+ {\unvbox\tsplitresult
\tsplitinbetween
- \fi}%
- \if!!donea\else\ifdim\ht\tsplitnext>\zeropoint
- \setbox\tsplithead\box\tsplitnext
- \fi\fi
- \!!doneatrue
- \ifdim\ht\tsplitresult>\zeropoint
- \!!donedtrue % table head
- \else
- \!!donedfalse % no tablehead
- \fi
- \splittopskip\zeropoint
- \doloop
- {\setbox\scratchbox\vsplit\tsplitcontent to \onepoint % \lineheight
- \setbox\scratchbox\vbox{\unvbox\scratchbox}%
- \ifdim\dimexpr\scratchdimen-\htdp\scratchbox-\htdp\tsplitresult\relax>\zeropoint
- \setbox\tsplitresult\vbox
- {\unvbox\tsplitresult
- \tsplitinbetween
- \unvbox\scratchbox}%
- \ifvoid\tsplitcontent \exitloop \fi
- \else\if!!doned
- % we only have a tablehead so far
- \setbox\tsplitresult\vbox{\unvbox\tsplitresult\unvbox\scratchbox}%
- \exitloop
- \else\if!!donec
- % we have text height available, but the (one) cell is too
- % large to fit, so, in order to avoid loops/deadcycles we do:
- \setbox\tsplitresult\vbox
- {\unvbox\tsplitresult
- \tsplitinbetween
- \unvbox\scratchbox}%
- \exitloop
- \else
- \setbox\tsplitcontent\vbox
- {\unvbox\scratchbox
- \tsplitinbetween
- \ifvoid\tsplitcontent\else\unvbox\tsplitcontent\fi}%
- \exitloop
- \fi\fi\fi
- \!!donedfalse
- \!!donecfalse}%
- \postprocesstsplit
- \dochecksplitofffloat\tsplitcontent
- \ifvoid\tsplitcontent
+ \unvbox\scratchbox}%
+ \ifvoid\tsplitcontent \exitloop \fi
+ \else\ifconditional\c_tabl_split_head
+ % we only have a tablehead so far
+ \setbox\tsplitresult\vbox{\unvbox\tsplitresult\unvbox\scratchbox}%
+ \exitloop
+ \else\ifconditional\c_tabl_split_full
+ % we have text height available, but the (one) cell is too
+ % large to fit, so, in order to avoid loops/deadcycles we do:
\setbox\tsplitresult\vbox
{\unvbox\tsplitresult
\tsplitinbetween
- \unvcopy\tsplittail}%
- \dowithsplitofffloat{\tsplitbeforeresult\box\tsplitresult\tsplitafterresult}%
- \doifnotinsidesplitfloat\tsplitafter
- \endgraf
+ \unvbox\scratchbox}%
\exitloop
\else
- % hack
- \ifdim\pagegoal<\maxdimen
- \global\pagegoal\dimexpr\pagegoal+\lineheight\relax % etex
- \fi
- % brrr
- \ifdim\ht\tsplitresult>\zeropoint
- \setbox\tsplitresult\vbox
- {\unvbox\tsplitresult
- \tsplitinbetween
- \unvcopy\tsplittail}%
- \dowithsplitofffloat{\tsplitbeforeresult\box\tsplitresult\tsplitafterresult}%
- \doifnotinsidesplitfloat\tsplitafter
- \endgraf
- \fi
- \ifinsidecolumns
- \goodbreak % was \doifnotinsidesplitfloat\goodbreak
- \else
- \page % was \doifnotinsidesplitfloat\page
- \fi
- \global\settrue\usesamefloatnumber % new, prevent next increment
- \fi}%
- \global\setfalse\usesamefloatnumber % new, prevent next increment
- \global\setfalse\splitfloatfirstdone} % we can use this one for tests
+ \setbox\tsplitcontent\vbox
+ {\unvbox\scratchbox
+ \tsplitinbetween
+ \ifvoid\tsplitcontent\else\unvbox\tsplitcontent\fi}%
+ \exitloop
+ \fi\fi\fi
+ \setfalse\c_tabl_split_head
+ \setfalse\c_tabl_split_full}%
+ \postprocesstsplit
+ \page_split_float_check_content\tsplitcontent
+ \ifvoid\tsplitcontent
+ \setbox\tsplitresult\vbox
+ {\unvbox\tsplitresult
+ \tsplitinbetween
+ \unvcopy\tsplittail}%
+ \page_split_float_process{\tsplitbeforeresult\box\tsplitresult\tsplitafterresult}%
+ \doifnotinsidesplitfloat\tsplitafter
+ \endgraf
+ \exitloop
+ \else
+ % hack
+ \ifdim\pagegoal<\maxdimen
+ \pagegoal\dimexpr\pagegoal+\lineheight\relax % etex
+ \fi
+ % brrr
+ \ifdim\ht\tsplitresult>\zeropoint
+ \setbox\tsplitresult\vbox
+ {\unvbox\tsplitresult
+ \tsplitinbetween
+ \unvcopy\tsplittail}%
+ \page_split_float_process{\tsplitbeforeresult\box\tsplitresult\tsplitafterresult}%
+ \doifnotinsidesplitfloat\tsplitafter
+ \endgraf
+ \fi
+ \ifinsidecolumns
+ \goodbreak % was \doifnotinsidesplitfloat\goodbreak
+ \else
+ \page % was \doifnotinsidesplitfloat\page
+ \fi
+ \global\settrue\usesamefloatnumber % new, prevent next increment
+ \fi}
%D The next one assumes that the split takes place elsewhere. This is
%D used in xtables.
\let\resetdirecttsplit\resettsplit
-\def\tsplitdirectwidth{\hsize}
-
-\def\handledirecttsplit
- {\analyzesplitfloatcaption{\tsplitdirectwidth}%
+\unexpanded\def\handledirecttsplit
+ {\page_split_float_check_caption{\tsplitdirectwidth}%
\global\setfalse\splitfloatfirstdone
\testpagesync % new, sync, but still tricky
[\tsplitminimumfreelines]
[\dimexpr\tsplitminimumfreespace+\extrasplitfloatlines\lineheight\relax]%
- \doloop
- {\ifinsidecolumns
- \global\setfalse\splitfloatfirstdone
- \scratchdimen\textheight
- \else\ifconditional\splitfloatfirstdone
- \scratchdimen\textheight
- \else\ifdim\pagegoal<\maxdimen
- \scratchdimen\dimexpr\pagegoal-\pagetotal\relax
- \else
- \scratchdimen\textheight
- \fi\fi\fi
- \scratchdimen\dimexpr\scratchdimen-\tsplitminimumfreespace-\extrasplitfloatlines\lineheight\relax
- \tsplitdirectsplitter\scratchdimen % also sets state
- \ifdim\ht\tsplitresult>\zeropoint
- \ifconditional\somenextsplitofffloat
- \global\setfalse\onlyonesplitofffloat
- \fi
- \ifdim\pagegoal<\maxdimen
- \global\pagegoal\dimexpr\pagegoal+\lineheight\relax % etex
- \fi
- \dowithsplitofffloat{\tsplitbeforeresult\box\tsplitresult\tsplitafterresult}%
- \global\settrue\usesamefloatnumber % new, prevent next increment
- \endgraf
- \ifconditional\somenextsplitofffloat
- \ifinsidecolumns
- \goodbreak
- \else
- \page
- \fi
- \fi
- \global\settrue\splitfloatfirstdone
- \else\ifconditional\somenextsplitofffloat
- \ifinsidecolumns
- \goodbreak
- \else
- \page % no room
- \fi
- \else
- \exitloop
- \fi\fi}%
- \global\setfalse\usesamefloatnumber % new, prevent next increment
+ \doloop\tabl_split_direct_loop_body
+ \global\setfalse\usesamefloatnumber % new, prevent next increment
\global\setfalse\splitfloatfirstdone} % we can use this one for tests
+\def\tabl_split_direct_loop_body
+ {\ifinsidecolumns
+ \global\setfalse\splitfloatfirstdone
+ \d_tabl_split_available\textheight
+ \else\ifconditional\splitfloatfirstdone
+ \d_tabl_split_available\textheight
+ \else\ifdim\pagegoal<\maxdimen
+ \d_tabl_split_available\dimexpr\pagegoal-\pagetotal\relax
+ \else
+ \d_tabl_split_available\textheight
+ \fi\fi\fi
+ \d_tabl_split_available\dimexpr
+ \d_tabl_split_available
+ -\tsplitminimumfreespace
+ -\extrasplitfloatlines\lineheight
+ \relax
+ \tsplitdirectsplitter\d_tabl_split_available % also sets state
+ \ifdim\ht\tsplitresult>\zeropoint
+ \ifconditional\somenextsplitofffloat
+ \global\setfalse\onlyonesplitofffloat
+ \fi
+ \ifdim\pagegoal<\maxdimen
+ \pagegoal\dimexpr\pagegoal+\lineheight\relax % etex
+ \fi
+ \page_split_float_process{\tsplitbeforeresult\box\tsplitresult\tsplitafterresult}%
+ \global\settrue\usesamefloatnumber % new, prevent next increment
+ \endgraf
+ \ifconditional\somenextsplitofffloat
+ \ifinsidecolumns
+ \goodbreak
+ \else
+ \page
+ \fi
+ \fi
+ \global\settrue\splitfloatfirstdone
+ \else\ifconditional\somenextsplitofffloat
+ \ifinsidecolumns
+ \goodbreak
+ \else
+ \page % no room
+ \fi
+ \else
+ \exitloop
+ \fi\fi}
+
\protect \endinput
% test cases
diff --git a/Master/texmf-dist/tex/context/base/tabl-xnt.mkvi b/Master/texmf-dist/tex/context/base/tabl-xnt.mkvi
index 35451abe0d5..ffa1f501ea5 100644
--- a/Master/texmf-dist/tex/context/base/tabl-xnt.mkvi
+++ b/Master/texmf-dist/tex/context/base/tabl-xnt.mkvi
@@ -129,7 +129,7 @@
\unexpanded\def\tabl_x_TABLE_start_indeed[#settings]%
{\bgroup
\tabl_x_prepare{#settings}%
- \edef\tabl_x_current_buffer{\x_table_default_buffer}%
+ \edef\tabl_x_current_buffer{\tabl_x_default_buffer}%
\buff_pickup\tabl_x_current_buffer{bTABLE}{eTABLE}\relax\tabl_x_process}
\protect \endinput
diff --git a/Master/texmf-dist/tex/context/base/tabl-xtb.lua b/Master/texmf-dist/tex/context/base/tabl-xtb.lua
index 395d65a032c..cf9a4a0a603 100644
--- a/Master/texmf-dist/tex/context/base/tabl-xtb.lua
+++ b/Master/texmf-dist/tex/context/base/tabl-xtb.lua
@@ -25,6 +25,8 @@ this mechamism will be improved so that it can replace its older cousin.
-- todo: use linked list instead of r/c array
+local commands, context, tex, node = commands, context, tex, node
+
local texdimen = tex.dimen
local texcount = tex.count
local texbox = tex.box
@@ -391,7 +393,7 @@ function xtables.reflow_width()
for c=1,nofcolumns do
local drc = row[c]
if drc.list then
- --- flush_node_list(drc.list)
+ -- flush_node_list(drc.list)
drc.list = false
end
end
@@ -466,13 +468,12 @@ function xtables.reflow_width()
if not options[v_stretch] then
-- not needed
if trace_xtable then
- report_xtable("too wide but no stretch, delta: %s",points(delta))
+ report_xtable("too wide but no stretch, delta %p",delta)
end
elseif options[v_width] then
local factor = delta / width
if trace_xtable then
- report_xtable("proportional stretch, delta: %s, width: %s, factor: %s",
- points(delta),points(width),factor)
+ report_xtable("proportional stretch, delta %p, width %p, factor %a",delta,width,factor)
end
for c=1,nofcolumns do
widths[c] = widths[c] + factor * widths[c]
@@ -481,8 +482,7 @@ function xtables.reflow_width()
-- frozen -> a column with option=fixed will not stretch
local extra = delta / (nofcolumns - noffrozen)
if trace_xtable then
- report_xtable("normal stretch, delta: %s, extra: %s",
- points(delta),points(extra))
+ report_xtable("normal stretch, delta %p, extra %p",delta,extra)
end
for c=1,nofcolumns do
if not frozencolumns[c] then
@@ -495,8 +495,7 @@ function xtables.reflow_width()
done = false
local available = (widetotal + delta) / nofwide
if trace_xtable then
- report_xtable("shrink check, total: %s, delta: %s, columns: %s, fixed: %s",
- points(widetotal),points(delta),nofwide,points(available))
+ report_xtable("shrink check, total %p, delta %p, columns %s, fixed %p",widetotal,delta,nofwide,available)
end
for c=1,nofcolumns do
if autowidths[c] and available >= widths[c] then
@@ -512,12 +511,10 @@ function xtables.reflow_width()
end
-- maybe also options[v_width] here but tricky as width does not say
-- much about amount
-
if options[v_width] then -- not that much (we could have a clever vpack loop balancing .. no fun)
local factor = (widetotal + delta) / width
if trace_xtable then
- report_xtable("proportional shrink used, total: %s, delta: %s, columns: %s, factor: %s",
- points(widetotal),points(delta),nofwide,factor)
+ report_xtable("proportional shrink used, total %p, delta %p, columns %s, factor %s",widetotal,delta,nofwide,factor)
end
for c=1,nofcolumns do
if autowidths[c] then
@@ -527,8 +524,7 @@ function xtables.reflow_width()
else
local available = (widetotal + delta) / nofwide
if trace_xtable then
- report_xtable("normal shrink used, total: %s, delta: %s, columns: %s, fixed: %s",
- points(widetotal),points(delta),nofwide,points(available))
+ report_xtable("normal shrink used, total %p, delta %p, columns %s, fixed %p",widetotal,delta,nofwide,available)
end
for c=1,nofcolumns do
if autowidths[c] then
@@ -590,7 +586,7 @@ local function showspans(data)
line[#line+1] = "none"
end
end
- report_xtable("%3d : %s : %s",r,namedmodes[modes[r]] or "----",concat(line," "))
+ report_xtable("%3d : %s : % t",r,namedmodes[modes[r]] or "----",line)
end
end
diff --git a/Master/texmf-dist/tex/context/base/tabl-xtb.mkvi b/Master/texmf-dist/tex/context/base/tabl-xtb.mkvi
index 2382cae2190..03c68e60332 100644
--- a/Master/texmf-dist/tex/context/base/tabl-xtb.mkvi
+++ b/Master/texmf-dist/tex/context/base/tabl-xtb.mkvi
@@ -59,8 +59,6 @@
\unprotect
-% \def\v!xtable{xtable}
-
% option=stretch : equal distribution
% option={stretch,width} : proportional distribution
% option={max} : prefer max over forced width/height
@@ -97,6 +95,8 @@
\newcount\c_tabl_x_nesting
\newcount\c_tabl_x_skip_mode % 1 = skip
+\newdimen\d_tabl_x_textwidth
+
% \setupxtable[one][parent][a=b,c=d]
% \setupxtable[one] [a=b,c=d]
% \setupxtable [a=b,c=d]
@@ -111,10 +111,10 @@
\to \everysetupxtable
\setupxtable[%
- \c!nr=1,
- \c!nc=1,
- \c!nx=1, % slow
- \c!ny=1, % slow
+ \c!nr=\plusone,
+ \c!nc=\plusone,
+ \c!nx=\plusone, % slow
+ \c!ny=\plusone, % slow
\c!align=\v!table, % {\v!flushleft,\v!broad,\v!high}, % just as \bTABLE .. \eTABLE
\c!frameoffset=.5\linewidth,
\c!backgroundoffset=\v!frame,
@@ -124,7 +124,7 @@
% \c!bodyfont=,
\c!width=\v!fit,
\c!height=\v!fit,
- \c!maxwidth=8em,
+ \c!maxwidth=8\emwidth,
\c!autowidth=\v!yes, % controls framed
\c!rulethickness=\linewidth,
\c!strut=\v!yes,
@@ -137,8 +137,8 @@
% \c!footer=,
% \c!header=,
\c!spaceinbetween=,
- \c!textwidth=\hsize,
- \c!textheight=\vsize,
+ \c!textwidth=\v!local, % was \hsize,
+ \c!textheight=\vsize, % used for vertical spread
\c!distance=\zeropoint, % individual column
\c!columndistance=\zeropoint, % each column (whole table)
\c!leftmargindistance=\zeropoint, % whole table
@@ -164,7 +164,9 @@
% These direct buffers can be somewhat faster but it's probably neglectable.
% Anyway, no nesting is supported as we then need to catch (e.g.) rows and
% keep track of nesting and have a more complex redefinition of nested
-% instanced \unknown\ it's not worth the trouble.
+% instanced \unknown\ it's not worth the trouble. Only use them when you
+% really need them and use the embeddedxtable command when nesting them.
+% Implementing nesting would be slower than not using direct buffers.
\def\tabl_x_process_buffer_directly[#name]%
{\bgroup
@@ -179,9 +181,10 @@
\def\tabl_x_process_buffer[#settings]%
{\tabl_x_prepare{#settings}%
\let\tabl_x_start_table\tabl_x_start_ignore
- \gobbleuntil\stopxtable}
+ \gobbleuntil\stopxtable} % nested xtables are not supported,
-%D A bonus: you can use the following construct inside a macro.
+%D A bonus: you have to use the following construct inside a macro or
+%D direct buffer.
\unexpanded\def\startembeddedxtable
{\dosingleempty\tabl_x_embedded_start}
@@ -211,10 +214,11 @@
\unexpanded\def\tabl_x_start_named_indeed[#settings]%
{\advance\c_tabl_x_nesting\plusone
\dostarttagged\t!table\empty
- \forgetall
\iffirstargument
\setupcurrentxtable[#settings]%
\fi
+ \tabl_x_check_textwidth
+ \forgetall
\edef\tabl_x_current_buffer{\tabl_x_default_buffer}%
\normalexpanded{\buff_pickup{\tabl_x_current_buffer}{\e!start\currentxtable}{\e!stop\currentxtable}\relax\tabl_x_process}}
@@ -223,17 +227,28 @@
%D Now we come to processing:
+\def\tabl_x_check_textwidth
+ {\edef\p_textwidth{\xtableparameter\c!textwidth}%
+ \ifx\p_textwidth\v!local
+ \d_tabl_x_textwidth\availablehsize
+ \else
+ \d_tabl_x_textwidth\p_textwidth
+ \fi}
+
\def\tabl_x_prepare#settings% assumes \iffirstargument to be set
{\advance\c_tabl_x_nesting\plusone
\dostarttagged\t!table\empty
- \forgetall
\iffirstargument
\tabl_x_set_checked{#settings}%
- \fi}
+ \fi
+ \tabl_x_check_textwidth
+ \forgetall}
\def\tabl_x_get_buffer
{\ctxcommand{gettexbuffer("\tabl_x_current_buffer")}}
+\let\tabl_x_stop_cell\relax
+
\unexpanded\def\tabl_x_process
{\begingroup % *
\doifsomething{\xtableparameter\c!bodyfont}
@@ -242,7 +257,7 @@
{\xtableparameter\c!spaceinbetween}%
\ctxcommand{x_table_create {
option = "\xtableparameter\c!option",
- textwidth = \number\dimexpr\xtableparameter\c!textwidth,
+ textwidth = \number\d_tabl_x_textwidth,
textheight = \number\dimexpr\xtableparameter\c!textheight,
maxwidth = \number\dimexpr\xtableparameter\c!maxwidth,
lineheight = \number\openlineheight,
@@ -296,15 +311,64 @@
\else\ifinsidefloat
\tabl_x_flush_float_normal
\else
- \doifelse{\xtableparameter\c!split}\v!yes
- \tabl_x_flush_flow_split
- \tabl_x_flush_flow_normal
+ \tabl_x_flush_text_checked
\fi\fi
\ctxcommand{x_table_cleanup()}%
\dostoptagged
\resetbuffer[\tabl_x_current_buffer]%
\egroup}
+% text flow split modes
+
+\installcorenamespace{xtableflushsplit}
+
+\def\tabl_x_flush_text_checked
+ {\expandnamespaceparameter\??xtableflushsplit\xtableparameter\c!split\v!no}
+
+% in text flow: headers and footers only once
+
+\setvalue{\??xtableflushsplit\v!yes}%
+ {\ctxcommand{x_table_flush{ method = "\v!split" }}}
+
+% in text flow: headers and footers only once
+
+\setvalue{\??xtableflushsplit\v!no}%
+ {\dontleavehmode % else no leftskip etc
+ \ctxcommand{x_table_flush{ method = "\v!normal" }}}
+
+% in text flow: headers and footers get repeated
+
+\setvalue{\??xtableflushsplit\v!repeat}%
+ {\doloop
+ {\ctxcommand{x_table_flush{ method = "\v!split", vsize = \number\ifdim\pagegoal=\maxdimen\textheight\else\pagegoal\fi }}%
+ \ifcase\c_tabl_x_state
+ \exitloop
+ \else
+ \page
+ \fi}}
+
+% \setvalue{\??xtableflushsplit\v!setups}%
+% {\directsetup{xtable:split:user}}
+%
+% \startsetups[xtable:split:user]
+% \doloop {
+% \xtablesplitflush % uses \xtablesplitvsize (a macro)
+% \ifcase\xtablesplitstate
+% \exitloop
+% \else
+% \page
+% \fi
+% }
+% \stopsetups
+%
+% \unexpanded\def\xtablesplitflush
+% {\ctxcommand{x_table_flush{ method = "\v!split", vsize = \number\dimexpr\xtablesplitvsize\relax}}\relax}
+%
+% \def\xtablesplitvsize
+% {\ifdim\pagegoal=\maxdimen\textheight\else\pagegoal\fi}
+%
+% \let\xtablesplitstate\c_tabl_x_state
+
\let\extratxtablesplitheight\zeropoint % might disappear so don't depend on it
\def\tabl_x_flush_float_normal
@@ -319,12 +383,6 @@
\let\tsplitdirectwidth \d_tabl_x_final_width
\handledirecttsplit}
-\def\tabl_x_flush_flow_normal
- {\ctxcommand{x_table_flush{ method = "\v!normal" }}}
-
-\def\tabl_x_flush_flow_split
- {\ctxcommand{x_table_flush{ method = "\v!split" }}}
-
\def\tabl_x_split_splitter#vsize%
{\setbox\tsplitresult\vbox
{\ctxcommand{x_table_flush{ method = "\v!split", vsize = \number\dimexpr#vsize }}}%
diff --git a/Master/texmf-dist/tex/context/base/task-ini.lua b/Master/texmf-dist/tex/context/base/task-ini.lua
index 1ec38e181e6..7f560335331 100644
--- a/Master/texmf-dist/tex/context/base/task-ini.lua
+++ b/Master/texmf-dist/tex/context/base/task-ini.lua
@@ -20,9 +20,10 @@ local freezegroup = tasks.freezegroup
local freezecallbacks = callbacks.freeze
appendaction("processors", "normalizers", "typesetters.characters.handler") -- always on
-appendaction("processors", "normalizers", "fonts.collections.process") -- todo
+appendaction("processors", "normalizers", "fonts.collections.process") -- disabled
appendaction("processors", "normalizers", "fonts.checkers.missing") -- disabled
+appendaction("processors", "characters", "scripts.autofontfeature.handler")
appendaction("processors", "characters", "typesetters.cleaners.handler") -- disabled
appendaction("processors", "characters", "typesetters.directions.handler") -- disabled
appendaction("processors", "characters", "typesetters.cases.handler") -- disabled
@@ -36,17 +37,15 @@ appendaction("processors", "fonts", "builders.paragraphs.solutions.split
appendaction("processors", "fonts", "nodes.handlers.characters") -- maybe todo
appendaction("processors", "fonts", "nodes.injections.handler") -- maybe todo
appendaction("processors", "fonts", "nodes.handlers.protectglyphs", nil, "nohead") -- maybe todo
-appendaction("processors", "fonts", "builders.kernel.ligaturing") -- always on
-appendaction("processors", "fonts", "builders.kernel.kerning") -- always on
+appendaction("processors", "fonts", "builders.kernel.ligaturing") -- always on (could be selective: if only node mode)
+appendaction("processors", "fonts", "builders.kernel.kerning") -- always on (could be selective: if only node mode)
appendaction("processors", "fonts", "nodes.handlers.stripping") -- disabled (might move)
+------------("processors", "fonts", "typesetters.italics.handler") -- disabled (after otf/kern handling)
appendaction("processors", "lists", "typesetters.spacings.handler") -- disabled
appendaction("processors", "lists", "typesetters.kerns.handler") -- disabled
appendaction("processors", "lists", "typesetters.digits.handler") -- disabled (after otf handling)
appendaction("processors", "lists", "typesetters.italics.handler") -- disabled (after otf/kern handling)
-
--- appendaction("processors", "fonts", "typesetters.italics.handler") -- disabled (after otf/kern handling)
-
appendaction("processors", "lists", "typesetters.paragraphs.handler") -- disabled
appendaction("shipouts", "normalizers", "nodes.handlers.cleanuppage") -- disabled
@@ -58,9 +57,10 @@ appendaction("shipouts", "normalizers", "nodes.shifts.handler")
appendaction("shipouts", "normalizers", "structures.tags.handler") -- disabled
appendaction("shipouts", "normalizers", "nodes.handlers.accessibility") -- disabled
appendaction("shipouts", "normalizers", "nodes.handlers.backgrounds") -- disabled
+appendaction("shipouts", "normalizers", "nodes.handlers.alignbackgrounds") -- disabled
+------------("shipouts", "normalizers", "nodes.handlers.export") -- disabled
---~ appendaction("shipouts", "normalizers", "nodes.handlers.export") -- disabled
-
+appendaction("shipouts", "finishers", "nodes.visualizers.handler") -- disabled
appendaction("shipouts", "finishers", "attributes.colors.handler") -- disabled
appendaction("shipouts", "finishers", "attributes.transparencies.handler") -- disabled
appendaction("shipouts", "finishers", "attributes.colorintents.handler") -- disabled
@@ -75,12 +75,13 @@ appendaction("math", "normalizers", "noads.handlers.relocate", nil, "noh
appendaction("math", "normalizers", "noads.handlers.render", nil, "nohead") -- always on
appendaction("math", "normalizers", "noads.handlers.collapse", nil, "nohead") -- always on
appendaction("math", "normalizers", "noads.handlers.resize", nil, "nohead") -- always on
-appendaction("math", "normalizers", "noads.handlers.respace", nil, "nohead") -- always on
+------------("math", "normalizers", "noads.handlers.respace", nil, "nohead") -- always on
appendaction("math", "normalizers", "noads.handlers.check", nil, "nohead") -- always on
appendaction("math", "normalizers", "noads.handlers.tags", nil, "nohead") -- disabled
appendaction("math", "normalizers", "noads.handlers.italics", nil, "nohead") -- disabled
appendaction("math", "builders", "builders.kernel.mlist_to_hlist") -- always on
+------------("math", "builders", "noads.handlers.italics", nil, "nohead") -- disabled
-- quite experimental (nodes.handlers.graphicvadjust might go away)
@@ -96,8 +97,15 @@ appendaction("mvlbuilders", "normalizers", "builders.vspacing.pagehandler")
appendaction("vboxbuilders", "normalizers", "builders.vspacing.vboxhandler") --
+-- experimental too
+
+appendaction("mvlbuilders","normalizers","typesetters.checkers.handler")
+appendaction("vboxbuilders","normalizers","typesetters.checkers.handler")
+
-- speedup: only kick in when used
+disableaction("processors", "scripts.autofontfeature.handler")
+disableaction("processors", "fonts.collections.process")
disableaction("processors", "fonts.checkers.missing")
disableaction("processors", "chars.handle_breakpoints")
disableaction("processors", "typesetters.cleaners.handler")
@@ -122,8 +130,10 @@ disableaction("shipouts", "attributes.effects.handler")
disableaction("shipouts", "attributes.negatives.handler")
disableaction("shipouts", "attributes.viewerlayers.handler")
disableaction("shipouts", "structures.tags.handler")
+disableaction("shipouts", "nodes.visualizers.handler")
disableaction("shipouts", "nodes.handlers.accessibility")
disableaction("shipouts", "nodes.handlers.backgrounds")
+disableaction("shipouts", "nodes.handlers.alignbackgrounds")
disableaction("shipouts", "nodes.handlers.cleanuppage")
disableaction("shipouts", "nodes.references.handler")
@@ -143,6 +153,9 @@ disableaction("finalizers", "builders.paragraphs.tag")
disableaction("math", "noads.handlers.tags")
disableaction("math", "noads.handlers.italics")
+disableaction("mvlbuilders", "typesetters.checkers.handler")
+disableaction("vboxbuilders","typesetters.checkers.handler")
+
freezecallbacks("find_.*_file", "find file using resolver")
freezecallbacks("read_.*_file", "read file at once")
freezecallbacks("open_.*_file", "open file for reading")
@@ -165,8 +178,8 @@ freezegroup("shipouts", "finishers")
freezegroup("mvlbuilders", "normalizers")
freezegroup("vboxbuilders", "normalizers")
---~ freezegroup("parbuilders", "lists")
---~ freezegroup("pagebuilders", "lists")
+-----------("parbuilders", "lists")
+-----------("pagebuilders", "lists")
freezegroup("math", "normalizers")
freezegroup("math", "builders")
diff --git a/Master/texmf-dist/tex/context/base/toks-ini.lua b/Master/texmf-dist/tex/context/base/toks-ini.lua
index 1f4d48466b2..ef4b5406b66 100644
--- a/Master/texmf-dist/tex/context/base/toks-ini.lua
+++ b/Master/texmf-dist/tex/context/base/toks-ini.lua
@@ -5,8 +5,7 @@ if not modules then modules = { } end modules ['toks-ini'] = {
license = "see context related readme files"
}
-local utf = unicode.utf8
-local utfbyte, utfchar = utf.byte, utf.char
+local utfbyte, utfchar, utfvalues = utf.byte, utf.char, utf.values
local format, gsub = string.format, string.gsub
--[[ldx--
@@ -56,7 +55,7 @@ tokens.other = function(chr) return createtoken(utfbyte(chr), 12) end
tokens.letters = function(str)
local t, n = { }, 0
- for chr in string.utfvalues(str) do
+ for chr in utfvalues(str) do
n = n + 1
t[n] = createtoken(chr, 11)
end
@@ -198,7 +197,7 @@ collectors.showtoken = showtoken
function collectors.trace()
local t = get_next()
- texio.write_nl(showtoken(t))
+ logs.report("tokenlist",showtoken(t))
return t
end
diff --git a/Master/texmf-dist/tex/context/base/trac-ctx.lua b/Master/texmf-dist/tex/context/base/trac-ctx.lua
new file mode 100644
index 00000000000..706e7a2447a
--- /dev/null
+++ b/Master/texmf-dist/tex/context/base/trac-ctx.lua
@@ -0,0 +1,48 @@
+if not modules then modules = { } end modules ['trac-ctx'] = {
+ version = 1.001,
+ comment = "companion to trac-ctx.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+local commands = commands
+local context = context
+local register = trackers.register
+
+local textrackers = tex.trackers or { }
+local texdirectives = tex.directives or { }
+
+tex.trackers = textrackers
+tex.directives = texdirectives
+
+storage.register("tex/trackers", textrackers, "tex.trackers")
+storage.register("tex/directives",texdirectives,"tex.directives")
+
+local function doit(category,tag,v)
+ local tt = category[tag]
+ if tt then
+ context.unprotect()
+ context(v and tt[1] or tt[2]) -- could be one call
+ context.protect()
+ end
+end
+
+local function initialize(category,register)
+ for tag, commands in next, category do
+ register(tag, function(v) doit(category,tag,v) end) -- todo: v,tag in caller
+ end
+end
+
+local function install(category,register,tag,enable,disable)
+ category[tag] = { enable, disable }
+ register(tag, function(v) doit(category,tag,v) end) -- todo: v,tag in caller
+end
+
+function commands.initializetextrackers () initialize(textrackers ,trackers .register ) end
+function commands.initializetexdirectives() initialize(texdirectives,directives.register) end
+
+-- commands.install(tag,enable,disable):
+
+function commands.installtextracker (...) install(textrackers ,trackers .register,...) end
+function commands.installtexdirective(...) install(texdirectives,directives.register,...) end
diff --git a/Master/texmf-dist/tex/context/base/trac-ctx.mkiv b/Master/texmf-dist/tex/context/base/trac-ctx.mkiv
new file mode 100644
index 00000000000..3baddede2a8
--- /dev/null
+++ b/Master/texmf-dist/tex/context/base/trac-ctx.mkiv
@@ -0,0 +1,60 @@
+
+%D \module
+%D [ file=trac-ctx,
+%D version=2012.07.13,
+%D title=\CONTEXT\ Tracing Macros,
+%D subtitle=TeX Trackers,
+%D author=Hans Hagen,
+%D date=\currentdate,
+%D copyright={PRAGMA ADE \& \CONTEXT\ Development Team}]
+%C
+%C This module is part of the \CONTEXT\ macro||package and is
+%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
+%C details.
+
+\writestatus{loading}{ConTeXt Tracing Macros / TeX Trackers}
+
+\registerctxluafile{trac-ctx}{1.001}
+
+\unprotect
+
+\unexpanded\def\installtextracker#1#2#3%
+ {\ctxcommand{installtextracker("#1",\!!bs\detokenize{#2}\!!es,\!!bs\detokenize{#3}\!!es)}}
+
+\unexpanded\def\installtexdirective#1#2#3%
+ {\ctxcommand{installtexdirective("#1",\!!bs\detokenize{#2}\!!es,\!!bs\detokenize{#3}\!!es)}}
+
+\appendtoks
+ \ctxcommand{initializetextrackers ()}%
+ \ctxcommand{initializetexdirectives()}%
+\to \everyjob
+
+\protect \endinput
+
+% this is one option:
+%
+% \newconditional\c_math_virtual_tx
+% \newconditional\c_math_virtual_px
+% \newconditional\c_math_virtual_lm
+%
+% \installtexdirective {fonts.math.virtual.tx} {\settrue \c_math_virtual_tx} {\setfalse\c_math_virtual_tx}
+% \installtexdirective {fonts.math.virtual.px} {\settrue \c_math_virtual_px} {\setfalse\c_math_virtual_px}
+% \installtexdirective {fonts.math.virtual.lm} {\settrue \c_math_virtual_lm} {\setfalse\c_math_virtual_lm}
+%
+% but as it makes the typescript look ugly we wil use:
+%
+% \installtexdirective {fonts.math.virtual.tx} {\enablemode[fonts.math.virtual.tx]} {\disablemode[fonts.math.virtual.tx]}
+% \installtexdirective {fonts.math.virtual.px} {\enablemode[fonts.math.virtual.px]} {\disablemode[fonts.math.virtual.px]}
+% \installtexdirective {fonts.math.virtual.lm} {\enablemode[fonts.math.virtual.lm]} {\disablemode[fonts.math.virtual.lm]}
+%
+% which could be:
+%
+% \def\mode_f_m_v_tx{fonts.math.virtual.tx}
+% \def\mode_f_m_v_px{fonts.math.virtual.px}
+% \def\mode_f_m_v_lm{fonts.math.virtual.lm}
+%
+% \installtexdirective \mode_f_m_v_tx {\enablemode[\mode_f_m_v_tx]} {\disablemode[\mode_f_m_v_tx]}
+% \installtexdirective \mode_f_m_v_px {\enablemode[\mode_f_m_v_px]} {\disablemode[\mode_f_m_v_px]}
+% \installtexdirective \mode_f_m_v_lm {\enablemode[\mode_f_m_v_lm]} {\disablemode[\mode_f_m_v_lm]}
+%
+% alas .. we don't want to define this in the core
diff --git a/Master/texmf-dist/tex/context/base/trac-deb.lua b/Master/texmf-dist/tex/context/base/trac-deb.lua
index 87434a13d9d..fe167c343e2 100644
--- a/Master/texmf-dist/tex/context/base/trac-deb.lua
+++ b/Master/texmf-dist/tex/context/base/trac-deb.lua
@@ -6,7 +6,8 @@ if not modules then modules = { } end modules ['trac-deb'] = {
license = "see context related readme files"
}
-local lpeg = lpeg
+local lpeg, status = lpeg, status
+
local lpegmatch = lpeg.match
local format, concat, match = string.format, table.concat, string.match
local tonumber, tostring = tonumber, tostring
@@ -94,20 +95,42 @@ end
function tracers.showlines(filename,linenumber,offset,errorstr)
local data = io.loaddata(filename)
+ if not data or data == "" then
+ local hash = url.hashed(filename)
+ if not hash.noscheme then
+ local ok, d, n = resolvers.loaders.byscheme(hash.scheme,filename)
+ if ok and n > 0 then
+ data = d
+ end
+ end
+ end
local lines = data and string.splitlines(data)
if lines and #lines > 0 then
- -- this does not work yet as we cannot access the last lua error
- -- table.print(status.list())
- -- this will be a plugin sequence
- local what, where = match(errorstr,"LuaTeX error <main (%a+) instance>:(%d+)")
- if what and where then
+ -- This does not work completely as we cannot access the last Lua error using
+ -- table.print(status.list()). This is on the agenda. Eventually we will
+ -- have a sequence of checks here (tex, lua, mp) at this end.
+ --
+ -- Actually, in 0.75+ the lua error message is even weirder as you can
+ -- get:
+ --
+ -- LuaTeX error [string "\directlua "]:3: unexpected symbol near '1' ...
+ --
+ -- <inserted text> \endgroup \directlua {
+ --
+ -- So there is some work to be done in the LuaTeX engine.
+ --
+ local what, where = match(errorstr,[[LuaTeX error <main (%a+) instance>:(%d+)]])
+ or match(errorstr,[[LuaTeX error %[string "\\(.-lua) "%]:(%d+)]]) -- buglet
+ if where then
-- lua error: linenumber points to last line
- local start, stop = "\\start" .. what .. "code", "\\stop" .. what .. "code"
+ local start = "\\startluacode"
+ local stop = "\\stopluacode"
+ local where = tonumber(where)
if lines[linenumber] == start then
local n = linenumber
for i=n,1,-1 do
if lines[i] == start then
- local n = i + tonumber(where)
+ local n = i + where
if n <= linenumber then
linenumber = n
end
@@ -134,7 +157,9 @@ function tracers.showlines(filename,linenumber,offset,errorstr)
end
function tracers.printerror(offset)
- local filename, linenumber = status.filename, tonumber(status.linenumber) or 0
+ local inputstack = resolvers.inputstack
+ local filename = inputstack[#inputstack] or status.filename
+ local linenumber = tonumber(status.linenumber) or 0
if not filename then
report_system("error not related to input file: %s ...",status.lasterrorstring)
elseif type(filename) == "number" then
@@ -144,6 +169,7 @@ function tracers.printerror(offset)
-- add a bit of spacing around our variant
texio.write_nl("\n")
local errorstr = status.lasterrorstring or "?"
+ -- inspect(status.list())
report_system("error on line %s in file %s: %s ...\n",linenumber,filename,errorstr) -- lua error?
texio.write_nl(tracers.showlines(filename,linenumber,offset,errorstr),"\n")
end
diff --git a/Master/texmf-dist/tex/context/base/trac-deb.mkiv b/Master/texmf-dist/tex/context/base/trac-deb.mkiv
index 4f5f0e931db..fe5dd02dc4a 100644
--- a/Master/texmf-dist/tex/context/base/trac-deb.mkiv
+++ b/Master/texmf-dist/tex/context/base/trac-deb.mkiv
@@ -13,7 +13,7 @@
\writestatus{loading}{ConTeXt Tracing Macros / Debugger}
-\registerctxluafile{trac-lmx}{1.001}
+%registerctxluafile{trac-lmx}{1.001}
\registerctxluafile{trac-deb}{1.001}
\unexpanded\def\breakpoint{\showdebuginfo\wait}
@@ -31,7 +31,7 @@
\unexpanded\def\enableexperiments [#1]{\ctxlua{experiments.enable("#1")}}
\unexpanded\def\disableexperiments[#1]{\ctxlua{experiments.disable("#1")}}
-\unexpanded\def\showdebuginfo{\ctxlua{lmx.showdebuginfo()}}
-\unexpanded\def\overloaderror{\ctxlua{lmx.overloaderror()}} % \enabledirectives[system.showerror]
+\unexpanded\def\showdebuginfo {\ctxlua{lmx.showdebuginfo()}}
+\unexpanded\def\overloaderror {\ctxlua{lmx.overloaderror()}} % \enabledirectives[system.showerror]
\unexpanded\def\showlogcategories {\ctxlua{logs.show()}}
diff --git a/Master/texmf-dist/tex/context/base/trac-exp.lua b/Master/texmf-dist/tex/context/base/trac-exp.lua
new file mode 100644
index 00000000000..5879f1b7bad
--- /dev/null
+++ b/Master/texmf-dist/tex/context/base/trac-exp.lua
@@ -0,0 +1,229 @@
+if not modules then modules = { } end modules ['trac-exp'] = {
+ version = 1.001,
+ comment = "companion to trac-log.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+local formatters = string.formatters
+local reporters = logs.reporters
+local xmlserialize = xml.serialize
+local xmlcollected = xml.collected
+local xmltext = xml.text
+local xmlfirst = xml.first
+local xmlfilter = xml.filter
+
+-- there is no need for a newhandlers { name = "help", parent = "string" }
+
+local function flagdata(flag)
+ local name = flag.at.name or ""
+ local value = flag.at.value or ""
+ -- local short = xmlfirst(s,"/short")
+ -- local short = xmlserialize(short,xs)
+ local short = xmltext(xmlfirst(flag,"/short")) or ""
+ return name, value, short
+end
+
+local function exampledata(example)
+ local command = xmltext(xmlfirst(example,"/command")) or ""
+ local comment = xmltext(xmlfirst(example,"/comment")) or ""
+ return command, comment
+end
+
+local function categorytitle(category)
+ return xmltext(xmlfirst(category,"/title")) or ""
+end
+
+local exporters = logs.exporters
+
+function exporters.man(specification,...)
+ local root = xml.convert(specification.helpinfo or "")
+ if not root then
+ return
+ end
+ local xs = xml.gethandlers("string")
+ xml.sethandlersfunction(xs,"short",function(e,handler) xmlserialize(e.dt,handler) end)
+ xml.sethandlersfunction(xs,"ref", function(e,handler) handler.handle("--"..e.at.name) end)
+ local wantedcategories = select("#",...) == 0 and true or table.tohash { ... }
+ local nofcategories = xml.count(root,"/application/flags/category")
+ local name = xmlfilter(root,"/application/metadata/entry[@name='name']/text()")
+ local detail = xmlfilter(root,"/application/metadata/entry[@name='detail']/text()") or name
+ local version = xmlfilter(root,"/application/metadata/entry[@name='version']/text()") or "0.00"
+ local banner = specification.banner or detail or name
+ --
+ local result = { }
+ --
+ -- .TH "context" "1" "some date" "version" "ConTeXt" -- we use a fake date as I don't want to polute the git repos
+ --
+ local runner = string.match(name,"^mtx%-(.*)")
+ if runner then
+ runner = formatters["mtxrun --script %s"](runner)
+ else
+ runner = name
+ end
+ --
+ result[#result+1] = formatters['.TH "%s" "1" "%s" "version %s" "%s"'](name,os.date("01-01-%Y"),version,detail)
+ result[#result+1] = formatters[".SH NAME\n.B %s"](name)
+ result[#result+1] = formatters[".SH SYNOPSIS\n.B %s [\n.I OPTIONS ...\n.B ] [\n.I FILENAMES\n.B ]"](runner)
+ result[#result+1] = formatters[".SH DESCRIPTION\n.B %s"](detail)
+ --
+ for category in xmlcollected(root,"/application/flags/category") do
+ if nofcategories > 1 then
+ result[#result+1] = formatters['.SH OPTIONS: %s'](string.upper(category.at.name or "all"))
+ else
+ result[#result+1] = ".SH OPTIONS"
+ end
+ for subcategory in xmlcollected(category,"/subcategory") do
+ for flag in xmlcollected(subcategory,"/flag") do
+ local name, value, short = flagdata(flag)
+ if value == "" then
+ result[#result+1] = formatters[".TP\n.B --%s\n%s"](name,short)
+ else
+ result[#result+1] = formatters[".TP\n.B --%s=%s\n%s"](name,value,short)
+ end
+ end
+ end
+ end
+ local moreinfo = specification.moreinfo
+ if moreinfo and moreinfo ~= "" then
+ moreinfo = string.gsub(moreinfo,"[\n\r]([%a]+)%s*:%s*",'\n\n.B "%1:"\n')
+ result[#result+1] = formatters[".SH AUTHOR\n%s"](moreinfo)
+ end
+ return table.concat(result,"\n")
+end
+
+local craptemplate = [[
+<?xml version="1.0"?>
+<application>
+<metadata>
+<entry name="banner">%s</entry>
+</metadata>
+<verbose>
+%s
+</verbose>
+]]
+
+function exporters.xml(specification,...)
+ local helpinfo = specification.helpinfo
+ if type(helpinfo) == "string" then
+ if string.find(helpinfo,"^<%?xml") then
+ return helpinfo
+ end
+ elseif type(helpinfo) == "table" then
+ helpinfo = table.concat(helpinfo,"\n\n")
+ else
+ helpinfo = "no help"
+ end
+ return formatters[craptemplate](specification.banner or "?",helpinfo)
+end
+
+-- the following template is optimized a bit for space
+
+-- local bodytemplate = [[
+-- <h1>Command line options</h1>
+-- <table>
+-- <tr>
+-- <th style="width: 10em">flag</th>
+-- <th style="width: 8em">value</th>
+-- <th>description</th>
+-- </tr>
+-- <?lua
+-- for category in xml.collected(variables.root,"/application/flags/category") do
+-- if variables.nofcategories > 1 then
+-- ?><tr>
+-- <th colspan="3"><?lua inject(category.at.name) ?></th>
+-- </tr><?lua
+-- end
+-- for subcategory in xml.collected(category,"/subcategory") do
+-- ?><tr><th/><td/><td/></tr><?lua
+-- for flag in xml.collected(subcategory,"/flag") do
+-- local name, value, short = variables.flagdata(flag)
+-- ?><tr>
+-- <th>--<?lua inject(name) ?></th>
+-- <td><?lua inject(value) ?></td>
+-- <td><?lua inject(short) ?></td>
+-- </tr><?lua
+-- end
+-- end
+-- end
+-- ?>
+-- </table>
+-- <br/>
+-- <?lua
+-- for category in xml.collected(variables.root,"/application/examples/category") do
+-- local title = variables.categorytitle(category)
+-- if title ~= "" then
+-- ?><h1><?lua inject(title) ?></h1><?lua
+-- end
+-- for subcategory in xml.collected(category,"/subcategory") do
+-- for example in xml.collected(subcategory,"/example") do
+-- local command, comment = variables.exampledata(example)
+-- ?><tt><?lua inject(command) ?></tt><br/><?lua
+-- end
+-- ?><br/><?lua
+-- end
+-- end
+-- for comment in xml.collected(root,"/application/comments/comment") do
+-- ?><br/><?lua inject(xml.text(comment)) ?><br/><?lua
+-- end
+-- ?>
+-- ]]
+
+local bodytemplate = [[
+<h1>Command line options</h1>
+<table>
+ <tr><th style="width: 10em">flag</th><th style="width: 8em">value</th><th>description</th></tr>
+ <?lua for category in xml.collected(variables.root,"/application/flags/category") do if variables.nofcategories > 1 then ?>
+ <tr><th colspan="3"><?lua inject(category.at.name) ?></th></tr>
+ <?lua end for subcategory in xml.collected(category,"/subcategory") do ?>
+ <tr><th/><td/><td/></tr>
+ <?lua for flag in xml.collected(subcategory,"/flag") do local name, value, short = variables.flagdata(flag) ?>
+ <tr><th>--<?lua inject(name) ?></th><td><?lua inject(value) ?></td><td><?lua inject(short) ?></td></tr>
+ <?lua end end end ?>
+</table>
+<br/>
+<?lua for category in xml.collected(variables.root,"/application/examples/category") do local title = variables.categorytitle(category) if title ~= "" then ?>
+<h1><?lua inject(title) ?></h1>
+<?lua end for subcategory in xml.collected(category,"/subcategory") do for example in xml.collected(subcategory,"/example") do local command, comment = variables.exampledata(example) ?>
+<tt><?lua inject(command) ?></tt>
+<br/><?lua end ?><br/><?lua end end for comment in xml.collected(root,"/application/comments/comment") do ?>
+<br/><?lua inject(xml.text(comment)) ?><br/><?lua end ?>
+]]
+
+function exporters.html(specification,...)
+ local root = xml.convert(specification.helpinfo or "")
+ if not root then
+ return
+ end
+ local xs = xml.gethandlers("string")
+ xml.sethandlersfunction(xs,"short",function(e,handler) xmlserialize(e.dt,handler) end)
+ xml.sethandlersfunction(xs,"ref", function(e,handler) handler.handle("--"..e.at.name) end)
+ local wantedcategories = select("#",...) == 0 and true or table.tohash { ... }
+ local nofcategories = xml.count(root,"/application/flags/category")
+ local name = xmlfilter(root,"/application/metadata/entry[@name='name']/text()")
+ local detail = xmlfilter(root,"/application/metadata/entry[@name='detail']/text()") or name
+ local version = xmlfilter(root,"/application/metadata/entry[@name='version']/text()") or "0.00"
+ local banner = specification.banner or detail or name
+ --
+ dofile(resolvers.findfile("trac-lmx.lua","tex"))
+ --
+ local htmltemplate = io.loaddata(resolvers.findfile("context-base.lmx","tex")) or "no template"
+ --
+ local body = lmx.convertstring(bodytemplate, {
+ nofcategories = nofcategories,
+ wantedcategories = wantedcategories,
+ root = root,
+ -- moreinfo = specification.moreinfo,
+ flagdata = flagdata,
+ exampledata = exampledata,
+ categorytitle = categorytitle,
+ })
+ local html = lmx.convertstring(htmltemplate, {
+ maintext = body,
+ title = banner,
+ bottomtext = "wiki: http://contextgarden.net | mail: ntg-context@ntg.nl | website: http://www.pragma-ade.nl",
+ })
+ --
+ return html
+end
diff --git a/Master/texmf-dist/tex/context/base/trac-fil.lua b/Master/texmf-dist/tex/context/base/trac-fil.lua
index bf291ff6b88..8cc903e2a62 100644
--- a/Master/texmf-dist/tex/context/base/trac-fil.lua
+++ b/Master/texmf-dist/tex/context/base/trac-fil.lua
@@ -6,18 +6,20 @@ if not modules then modules = { } end modules ['trac-fil'] = {
license = "see context related readme files"
}
+local rawset, tonumber, type, pcall = rawset, tonumber, type, pcall
local format, concat = string.format, table.concat
local openfile = io.open
local date = os.date
-local rawset, tonumber = rawset, tonumber
+local sortedpairs = table.sortedpairs
-local P, C, Cc, Cg, Cf, Ct, Cs = lpeg.P, lpeg.C, lpeg.Cc, lpeg.Cg, lpeg.Cf, lpeg.Ct, lpeg.Cs
+local P, C, Cc, Cg, Cf, Ct, Cs, Carg = lpeg.P, lpeg.C, lpeg.Cc, lpeg.Cg, lpeg.Cf, lpeg.Ct, lpeg.Cs, lpeg.Carg
+local lpegmatch = lpeg.match
local patterns = lpeg.patterns
local cardinal = patterns.cardinal
local whitespace = patterns.whitespace^0
-patterns.timestamp = Cf(Ct("") * (
+local timestamp = Cf(Ct("") * (
Cg (Cc("year") * (cardinal/tonumber)) * P("-")
* Cg (Cc("month") * (cardinal/tonumber)) * P("-")
* Cg (Cc("day") * (cardinal/tonumber)) * P(" ")
@@ -28,53 +30,65 @@ patterns.timestamp = Cf(Ct("") * (
* Cg (Cc("tminute") * (cardinal/tonumber))
)^0, rawset)
-patterns.keysvalues = Cf(Ct("") * (
+local keysvalues = Cf(Ct("") * (
Cg(C(patterns.letter^0) * whitespace * "=" * whitespace * Cs(patterns.unquoted) * whitespace)
)^0, rawset)
-patterns.statusline = Cf(Ct("") * (
- whitespace * P("[") * Cg(Cc("timestamp") * patterns.timestamp ) * P("]")
- * whitespace * Cg(Cc("status" ) * patterns.keysvalues)
+local statusline = Cf(Ct("") * (
+ whitespace * P("[") * Cg(Cc("timestamp") * timestamp ) * P("]")
+ * whitespace * Cg(Cc("status" ) * keysvalues)
),rawset)
+patterns.keysvalues = keysvalues
+patterns.statusline = statusline
+patterns.timestamp = timestamp
loggers = loggers or { }
-local tz = os.timezone(true)
-
-local bugged = { }
-
-function loggers.message(filename,t)
- if not bugged[filename] then
- local f = openfile(filename,"a+")
- if not f then
- dir.mkdirs(file.dirname(filename))
- f = openfile(filename,"a+")
- end
- if f then
- f:write("[",date("!%Y-%m-%d %H:%M:%S"),tz,"]")
- for k, v in table.sortedpairs(t) do
- f:write(" ",k,'="',v,'"')
+local timeformat = format("[%%s%s]",os.timezone(true))
+local dateformat = "!%Y-%m-%d %H:%M:%S"
+
+function loggers.makeline(t)
+ local result = { } -- minimize time that file is open
+ result[#result+1] = format(timeformat,date(dateformat))
+ for k, v in sortedpairs(t) do
+ local tv = type(v)
+ if tv == "string" then
+ if v ~= "password" then
+ result[#result+1] = format(" %s=%q",k,v)
end
- f:write("\n")
- f:close()
- else
- bugged[filename] = true
+ elseif tv == "number" or tv == "boolean" then
+ result[#result+1] = format(" %s=%q",k,tostring(v))
end
end
+ return concat(result," ")
+end
+
+local function append(filename,...)
+ local f = openfile(filename,"a+")
+ if not f then
+ dir.mkdirs(file.dirname(filename))
+ f = openfile(filename,"a+")
+ end
+ if f then
+ f:write(...)
+ f:close()
+ return true
+ else
+ return false
+ end
end
---~ function loggers.collect(filename)
---~ if lfs.isfile(filename) then
---~ return lpeg.match(Ct(patterns.statusline^0),io.loaddata(filename))
---~ else
---~ return { }
---~ end
---~ end
+function loggers.store(filename,data) -- a log service is nicer
+ if type(data) == "table"then
+ data = loggers.makeline(data)
+ end
+ pcall(append,filename,data,"\n")
+end
function loggers.collect(filename,result)
if lfs.isfile(filename) then
- local r = lpeg.match(Ct(patterns.statusline^0),io.loaddata(filename))
+ local r = lpegmatch(Ct(statusline^0),io.loaddata(filename))
if result then -- append
local nofresult = #result
for i=1,#r do
@@ -90,60 +104,78 @@ function loggers.collect(filename,result)
end
end
---~ local template = [[
---~ <table>
---~ <tr>%s</tr>
---~ %s
---~ </table>
---~ ]]
-
---~ function loggers.tohtml(entries,fields)
---~ if not fields or #fields == 0 then
---~ return ""
---~ end
---~ if type(entries) == "string" then
---~ entries = loggers.collect(entries)
---~ end
---~ local scratch, lines = { }, { }
---~ for i=1,#entries do
---~ local entry = entries[i]
---~ local status = entry.status
---~ for i=1,#fields do
---~ local field = fields[i]
---~ local v = status[field.name]
---~ if v ~= nil then
---~ v = tostring(v)
---~ local f = field.format
---~ if f then v = format(f,v) end
---~ scratch[i] = format("<td nowrap='nowrap' align='%s'>%s</td>",field.align or "left",v)
---~ else
---~ scratch[i] = "<td/>"
---~ end
---~ end
---~ lines[i] = "<tr>" .. concat(scratch) .. "</tr>"
---~ end
---~ for i=1,#fields do
---~ local field = fields[i]
---~ scratch[i] = format("<th nowrap='nowrap' align='left'>%s</th>", field.label or field.name)
---~ end
---~ local result = format(template,concat(scratch),concat(lines,"\n"))
---~ return result, entries
---~ end
-
---~ -- loggers.message("test.log","name","whatever","more",123)
-
---~ local fields = {
---~ -- { name = "id", align = "left" },
---~ -- { name = "timestamp", align = "left" },
---~ { name = "assessment", align = "left" },
---~ { name = "assessmentname", align = "left" },
---~ -- { name = "category", align = "left" },
---~ { name = "filesize", align = "right" },
---~ { name = "nofimages", align = "center" },
---~ -- { name = "product", align = "left" },
---~ { name = "resultsize", align = "right" },
---~ { name = "fetchtime", align = "right", format = "%2.3f" },
---~ { name = "runtime", align = "right", format = "%2.3f" },
---~ { name = "organization", align = "left" },
---~ -- { name = "username", align = "left" },
---~ }
+function loggers.fields(results) -- returns hash of fields with counts so that we can decide on importance
+ local fields = { }
+ if results then
+ for i=1,#results do
+ local r = results[i]
+ for k, v in next, r do
+ local f = fields[k]
+ if not f then
+ fields[k] = 1
+ else
+ fields[k] = f + 1
+ end
+ end
+ end
+ end
+ return fields
+end
+
+local template = [[<!-- log entries: begin --!>
+<table>
+<tr>%s</tr>
+%s
+</table>
+<!-- log entries: end --!>
+]]
+
+function loggers.tohtml(entries,fields)
+ if not fields or #fields == 0 then
+ return ""
+ end
+ if type(entries) == "string" then
+ entries = loggers.collect(entries)
+ end
+ local scratch, lines = { }, { }
+ for i=1,#entries do
+ local entry = entries[i]
+ local status = entry.status
+ for i=1,#fields do
+ local field = fields[i]
+ local v = status[field.name]
+ if v ~= nil then
+ v = tostring(v)
+ local f = field.format
+ if f then
+ v = format(f,v)
+ end
+ scratch[i] = format("<td nowrap='nowrap' align='%s'>%s</td>",field.align or "left",v)
+ else
+ scratch[i] = "<td/>"
+ end
+ end
+ lines[i] = format("<tr>%s</tr>",concat(scratch))
+ end
+ for i=1,#fields do
+ local field = fields[i]
+ scratch[i] = format("<th nowrap='nowrap' align='left'>%s</th>", field.label or field.name)
+ end
+ local result = format(template,concat(scratch),concat(lines,"\n"))
+ return result, entries
+end
+
+-- loggers.store("test.log", { name = "whatever", more = math.random(1,100) })
+
+-- local fields = {
+-- { name = "name", align = "left" },
+-- { name = "more", align = "right" },
+-- }
+
+-- local entries = loggers.collect("test.log")
+-- local html = loggers.tohtml(entries,fields)
+
+-- inspect(entries)
+-- inspect(fields)
+-- inspect(html)
+
diff --git a/Master/texmf-dist/tex/context/base/trac-inf.lua b/Master/texmf-dist/tex/context/base/trac-inf.lua
index cbc9d13f880..fdc07d97d39 100644
--- a/Master/texmf-dist/tex/context/base/trac-inf.lua
+++ b/Master/texmf-dist/tex/context/base/trac-inf.lua
@@ -11,18 +11,25 @@ if not modules then modules = { } end modules ['trac-inf'] = {
-- get warnings about assignments. This is more efficient than using rawset
-- and rawget.
+local type, tonumber = type, tonumber
local format, lower = string.format, string.lower
+local concat = table.concat
local clock = os.gettimeofday or os.clock -- should go in environment
-local write_nl = texio.write_nl
statistics = statistics or { }
local statistics = statistics
statistics.enable = true
-statistics.threshold = 0.05
+statistics.threshold = 0.01
local statusinfo, n, registered, timers = { }, 0, { }, { }
+table.setmetatableindex(timers,function(t,k)
+ local v = { timing = 0, loadtime = 0 }
+ t[k] = v
+ return v
+end)
+
local function hastiming(instance)
return instance and timers[instance]
end
@@ -33,14 +40,7 @@ end
local function starttiming(instance)
local timer = timers[instance or "notimer"]
- if not timer then
- timer = { }
- timers[instance or "notimer"] = timer
- end
- local it = timer.timing
- if not it then
- it = 0
- end
+ local it = timer.timing or 0
if it == 0 then
timer.starttime = clock()
if not timer.loadtime then
@@ -72,19 +72,26 @@ local function stoptiming(instance, report)
return 0
end
+local function elapsed(instance)
+ if type(instance) == "number" then
+ return instance or 0
+ else
+ local timer = timers[instance or "notimer"]
+ return timer and timer.loadtime or 0
+ end
+end
+
local function elapsedtime(instance)
- local timer = timers[instance or "notimer"]
- return format("%0.3f",timer and timer.loadtime or 0)
+ return format("%0.3f",elapsed(instance))
end
local function elapsedindeed(instance)
- local timer = timers[instance or "notimer"]
- return (timer and timer.loadtime or 0) > statistics.threshold
+ return elapsed(instance) > statistics.threshold
end
local function elapsedseconds(instance,rest) -- returns nil if 0 seconds
if elapsedindeed(instance) then
- return format("%s seconds %s", elapsedtime(instance),rest or "")
+ return format("%0.3f seconds %s", elapsed(instance),rest or "")
end
end
@@ -92,11 +99,12 @@ statistics.hastiming = hastiming
statistics.resettiming = resettiming
statistics.starttiming = starttiming
statistics.stoptiming = stoptiming
+statistics.elapsed = elapsed
statistics.elapsedtime = elapsedtime
statistics.elapsedindeed = elapsedindeed
statistics.elapsedseconds = elapsedseconds
--- general function
+-- general function .. we might split this module
function statistics.register(tag,fnc)
if statistics.enable and type(fnc) == "function" then
@@ -107,9 +115,10 @@ function statistics.register(tag,fnc)
end
end
-function statistics.show(reporter)
+local report = logs.reporter("mkiv lua stats")
+
+function statistics.show()
if statistics.enable then
- if not reporter then reporter = function(tag,data,n) write_nl(tag .. " " .. data) end end
-- this code will move
local register = statistics.register
register("luatex banner", function()
@@ -122,39 +131,31 @@ function statistics.show(reporter)
local total, indirect = status.callbacks or 0, status.indirect_callbacks or 0
return format("%s direct, %s indirect, %s total", total-indirect, indirect, total)
end)
- collectgarbage("collect")
- register("current memory usage", statistics.memused)
+ if jit then
+ local status = { jit.status() }
+ if status[1] then
+ register("luajit status", function()
+ return concat(status," ",2)
+ end)
+ end
+ end
+ -- so far
+ -- collectgarbage("collect")
+ register("current memory usage",statistics.memused)
register("runtime",statistics.runtime)
+ logs.newline() -- initial newline
for i=1,#statusinfo do
local s = statusinfo[i]
local r = s[2]()
if r then
- reporter(s[1],r,n)
+ report("%s: %s",s[1],r)
end
end
- write_nl("") -- final newline
+ -- logs.newline() -- final newline
statistics.enable = false
end
end
-local template, report_statistics, nn = nil, nil, 0 -- we only calcute it once
-
-function statistics.showjobstat(tag,data,n)
- if not logs then
- -- sorry
- elseif type(data) == "table" then
- for i=1,#data do
- statistics.showjobstat(tag,data[i],n)
- end
- else
- if not template or n > nn then
- template, n = format("%%-%ss - %%s",n), nn
- report_statistics = logs.reporter("mkiv lua stats")
- end
- report_statistics(format(template,tag,data))
- end
-end
-
function statistics.memused() -- no math.round yet -)
local round = math.round or math.floor
return format("%s MB (ctx: %s MB)",round(collectgarbage("count")/1000), round(status.luastate_bytes/1000000))
@@ -171,8 +172,9 @@ function statistics.runtime()
return statistics.formatruntime(elapsedtime(statistics))
end
-function statistics.timed(action,report)
- report = report or logs.reporter("system")
+local report = logs.reporter("system")
+
+function statistics.timed(action)
starttiming("run")
action()
stoptiming("run")
diff --git a/Master/texmf-dist/tex/context/base/trac-jus.lua b/Master/texmf-dist/tex/context/base/trac-jus.lua
new file mode 100644
index 00000000000..9d99f059d36
--- /dev/null
+++ b/Master/texmf-dist/tex/context/base/trac-jus.lua
@@ -0,0 +1,136 @@
+if not modules then modules = { } end modules ['trac-jus'] = {
+ version = 1.001,
+ comment = "companion to trac-jus.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+local checkers = typesetters.checkers or { }
+typesetters.checkers = checkers
+
+----- report_justification = logs.reporter("visualize","justification")
+
+local a_alignstate = attributes.private("alignstate")
+local a_justification = attributes.private("justification")
+
+local tracers = nodes.tracers
+local setcolor = tracers.colors.set
+local settransparency = tracers.transparencies.set
+
+local new_rule = nodes.pool.rule
+local new_glue = nodes.pool.glue
+local new_kern = nodes.pool.kern
+local concat_nodes = nodes.concat
+local hpack_nodes = node.hpack
+local copy_node = node.copy
+local get_list_dimensions = node.dimensions
+local hlist_code = nodes.nodecodes.hlist
+
+local tex_set_attribute = tex.setattribute
+local unsetvalue = attributes.unsetvalue
+
+local min_threshold = 0
+local max_threshold = 0
+
+local function set(n)
+ nodes.tasks.enableaction("mvlbuilders", "typesetters.checkers.handler")
+ nodes.tasks.enableaction("vboxbuilders","typesetters.checkers.handler")
+ tex_set_attribute(a_justification,n or 1)
+ function typesetters.checkers.set(n)
+ tex_set_attribute(a_justification,n or 1)
+ end
+end
+
+local function reset()
+ tex_set_attribute(a_justification,unsetvalue)
+end
+
+checkers.set = set
+checkers.reset = reset
+
+function commands.showjustification(n)
+ set(n)
+end
+
+trackers.register("visualizers.justification", function(v)
+ if v then
+ set(1)
+ else
+ reset()
+ end
+end)
+
+function checkers.handler(head)
+ for current in node.traverse_id(hlist_code,head) do
+ if current[a_justification] == 1 then
+ current[a_justification] = 0
+ local width = current.width
+ if width > 0 then
+ local list = current.list
+ if list then
+ local naturalwidth, naturalheight, naturaldepth = get_list_dimensions(list)
+ local delta = naturalwidth - width
+ if naturalwidth == 0 or delta == 0 then
+ -- special box
+ elseif delta >= max_threshold then
+ local rule = new_rule(delta,naturalheight,naturaldepth)
+ list = hpack_nodes(list,width,"exactly")
+ if list.glue_set == 1 then
+ setcolor(rule,"trace:dr")
+ settransparency(rule,"trace:dr")
+ else
+ setcolor(rule,"trace:db")
+ settransparency(rule,"trace:db")
+ end
+ rule = hpack_nodes(rule)
+ rule.width = 0
+ rule.height = 0
+ rule.depth = 0
+ current.list = concat_nodes { list, rule }
+ -- current.list = concat_nodes { list, new_kern(-naturalwidth+width), rule }
+ elseif delta <= min_threshold then
+ local alignstate = list[a_alignstate]
+ if alignstate == 1 then
+ local rule = new_rule(-delta,naturalheight,naturaldepth)
+ setcolor(rule,"trace:dc")
+ settransparency(rule,"trace:dc")
+ rule = hpack_nodes(rule)
+ rule.height = 0
+ rule.depth = 0
+ rule.width = 0
+ current.list = nodes.concat { rule, list }
+ elseif alignstate == 2 then
+ local rule = new_rule(-delta/2,naturalheight,naturaldepth)
+ setcolor(rule,"trace:dy")
+ settransparency(rule,"trace:dy")
+ rule = hpack_nodes(rule)
+ rule.width = 0
+ rule.height = 0
+ rule.depth = 0
+ current.list = concat_nodes { copy_node(rule), list, new_kern(delta/2), rule }
+ elseif alignstate == 3 then
+ local rule = new_rule(-delta,naturalheight,naturaldepth)
+ setcolor(rule,"trace:dm")
+ settransparency(rule,"trace:dm")
+ rule = hpack_nodes(rule)
+ rule.height = 0
+ rule.depth = 0
+ current.list = concat_nodes { list, new_kern(delta), rule }
+ else
+ local rule = new_rule(-delta,naturalheight,naturaldepth)
+ setcolor(rule,"trace:dg")
+ settransparency(rule,"trace:dg")
+ rule = hpack_nodes(rule)
+ rule.height = 0
+ rule.depth = 0
+ rule.width = 0
+ current.list = concat_nodes { list, new_kern(delta), rule }
+ end
+ end
+ end
+ end
+ end
+ end
+ return head
+end
diff --git a/Master/texmf-dist/tex/context/base/trac-jus.mkiv b/Master/texmf-dist/tex/context/base/trac-jus.mkiv
new file mode 100644
index 00000000000..7a5347da844
--- /dev/null
+++ b/Master/texmf-dist/tex/context/base/trac-jus.mkiv
@@ -0,0 +1,25 @@
+%D \module
+%D [ file=trac-jus,
+%D version=2112.11.27,
+%D title=\CONTEXT\ Tracing Macros,
+%D subtitle=Justification,
+%D author=Hans Hagen,
+%D date=\currentdate,
+%D copyright={PRAGMA ADE \& \CONTEXT\ Development Team}]
+%C
+%C This module is part of the \CONTEXT\ macro||package and is
+%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
+%C details.
+
+\writestatus{loading}{ConTeXt Tracing Macros / Justification}
+
+\registerctxluafile{trac-jus}{1.001}
+
+\unprotect
+
+\definesystemattribute[justification] [public]
+
+\unexpanded\def\showjustification
+ {\ctxcommand{showjustification()}}
+
+\protect \endinput
diff --git a/Master/texmf-dist/tex/context/base/trac-lmx.lua b/Master/texmf-dist/tex/context/base/trac-lmx.lua
index 452d03002f9..18c7f60204f 100644
--- a/Master/texmf-dist/tex/context/base/trac-lmx.lua
+++ b/Master/texmf-dist/tex/context/base/trac-lmx.lua
@@ -6,42 +6,66 @@ if not modules then modules = { } end modules ['trac-lmx'] = {
license = "see context related readme files"
}
--- todo: use lpeg instead (although not really needed)
+-- this one will be adpated to the latest helpers
-local gsub, format, concat, byte = string.gsub, string.format, table.concat, string.byte
+local type, tostring, rawget, loadstring, pcall = type, tostring, rawget, loadstring, pcall
+local format, sub, gsub = string.format, string.sub, string.gsub
+local concat = table.concat
+local collapsespaces = string.collapsespaces
+local P, Cc, Cs, C, Carg, lpegmatch = lpeg.P, lpeg.Cc, lpeg.Cs, lpeg.C, lpeg.Carg, lpeg.match
+local joinpath, replacesuffix, pathpart, filesuffix = file.join, file.replacesuffix, file.pathpart, file.suffix
-local allocate = utilities.storage.allocate
+local allocate = utilities.storage.allocate
+local setmetatableindex = table.setmetatableindex
-lmx = lmx or { }
-local lmx = lmx
+----- trace_templates = false trackers .register("lmx.templates", function(v) trace_templates = v end)
+local trace_variables = false trackers .register("lmx.variables", function(v) trace_variables = v end)
-lmx.variables = allocate()
-local lmxvariables = lmx.variables
+local cache_templates = true directives.register("lmx.cache.templates",function(v) cache_templates = v end)
+local cache_files = true directives.register("lmx.cache.files", function(v) cache_files = v end)
-local escapes = allocate {
- ['&'] = '&amp;',
- ['<'] = '&lt;',
- ['>'] = '&gt;',
- ['"'] = '&quot;'
-}
+local report_lmx = logs.reporter("lmx")
+local report_error = logs.reporter("lmx","error")
--- variables
+lmx = lmx or { }
+local lmx = lmx
-lmxvariables['title-default'] = 'ConTeXt LMX File'
-lmxvariables['title'] = lmx.variables['title-default']
-lmxvariables['color-background-green'] = '#4F6F6F'
-lmxvariables['color-background-blue'] = '#6F6F8F'
-lmxvariables['color-background-yellow'] = '#8F8F6F'
-lmxvariables['color-background-purple'] = '#8F6F8F'
-lmxvariables['color-background-body'] = '#808080'
-lmxvariables['color-background-main'] = '#3F3F3F'
-lmxvariables['color-background-one'] = lmxvariables['color-background-green']
-lmxvariables['color-background-two'] = lmxvariables['color-background-blue']
+-- This will change: we will just pass the global defaults as argument, but then we need
+-- to rewrite some older code or come up with an ugly trick.
-lmxvariables['color-background-three'] = function() return lmxvariables['color-background-one'] end
-lmxvariables['color-background-four'] = function() return lmxvariables['color-background-two'] end
+local lmxvariables = {
+ ['title-default'] = 'ConTeXt LMX File',
+ ['color-background-green'] = '#4F6F6F',
+ ['color-background-blue'] = '#6F6F8F',
+ ['color-background-yellow'] = '#8F8F6F',
+ ['color-background-purple'] = '#8F6F8F',
+ ['color-background-body'] = '#808080',
+ ['color-background-main'] = '#3F3F3F',
+}
-function lmx.set(key, value)
+local lmxinherited = {
+ ['title'] = 'title-default',
+ ['color-background-one'] = 'color-background-green',
+ ['color-background-two'] = 'color-background-blue',
+ ['color-background-three'] = 'color-background-one',
+ ['color-background-four'] = 'color-background-two',
+}
+
+lmx.variables = lmxvariables
+lmx.inherited = lmxinherited
+
+setmetatableindex(lmxvariables,function(t,k)
+ k = lmxinherited[k]
+ while k do
+ local v = rawget(lmxvariables,k)
+ if v then
+ return v
+ end
+ k = lmxinherited[k]
+ end
+end)
+
+function lmx.set(key,value)
lmxvariables[key] = value
end
@@ -49,9 +73,16 @@ function lmx.get(key)
return lmxvariables[key] or ""
end
+lmx.report = report_lmx
+
-- helpers
-local variables, result = { } -- we assume no nesting
+-- the variables table is an empty one that gets linked to a defaults table
+-- that gets passed with a creation (first time only) and that itself links
+-- to one that gets passed to the converter
+
+local variables = { } -- we assume no nesting
+local result = { } -- we assume no nesting
local function do_print(one,two,...)
if two then
@@ -61,173 +92,641 @@ local function do_print(one,two,...)
end
end
-local function do_escape(str)
- str = tostring(str)
- str = gsub(str,'&','&amp;')
- str = gsub(str,'[<>"]',escapes)
- return str
+-- Although it does not make much sense for most elements, we provide a mechanism
+-- to print wrapped content, something that is more efficient when we are constructing
+-- tables.
+
+local html = { }
+lmx.html = html
+
+function html.td(str)
+ if type(str) == "table" then
+ for i=1,#str do -- spoils t !
+ str[i] = format("<td>%s</td>",str[i] or "")
+ end
+ result[#result+1] = concat(str)
+ else
+ result[#result+1] = format("<td>%s</td>",str or "")
+ end
end
-local function do_urlescaped(str)
- return (gsub(str,"[^%a%d]",format("%%0x",byte("%1"))))
+function html.th(str)
+ if type(str) == "table" then
+ for i=1,#str do -- spoils t !
+ str[i] = format("<th>%s</th>",str[i])
+ end
+ result[#result+1] = concat(str)
+ else
+ result[#result+1] = format("<th>%s</th>",str or "")
+ end
end
-local function do_type(str)
- if str then do_print("<tt>" .. do_escape(str) .. "</tt>") end
+function html.a(text,url)
+ result[#result+1] = format("<a href=%q>%s</a>",url,text)
+end
+
+setmetatableindex(html,function(t,k)
+ local f = format("<%s>%%s</%s>",k,k)
+ local v = function(str) result[#result+1] = format(f,str or "") end
+ t[k] = v
+ return v
+end)
+
+-- Loading templates:
+
+local function loadedfile(name)
+ name = resolvers and resolvers.findfile and resolvers.findfile(name) or name
+ local data = io.loaddata(name)
+ if not data or data == "" then
+ report_lmx("file %a is empty",name)
+ end
+ return data
+end
+
+local function loadedsubfile(name)
+ return io.loaddata(resolvers and resolvers.findfile and resolvers.findfile(name) or name)
+end
+
+lmx.loadedfile = loadedfile
+
+-- A few helpers (the next one could end up in l-lpeg):
+
+local usedpaths = { }
+local givenpath = nil
+
+local do_nested_include = nil
+
+local pattern = lpeg.replacer {
+ ["&"] = "&amp;",
+ [">"] = "&gt;",
+ ["<"] = "&lt;",
+ ['"'] = "&quot;",
+}
+
+local function do_escape(str)
+ return lpegmatch(pattern,str) or str
end
local function do_variable(str)
- local value = variables[str] or lmxvariables[str] -- or format("<!-- unset lmx instance variable: %s -->",str or "?")
- if type(value) == "function" then
+ local value = variables[str]
+ if not trace_variables then
+ -- nothing
+ elseif type(value) == "string" then
+ if #value > 80 then
+ report_lmx("variable %a is set to: %s ...",str,collapsespaces(sub(value,1,80)))
+ else
+ report_lmx("variable %a is set to: %s",str,collapsespaces(value))
+ end
+ elseif type(value) == "nil" then
+ report_lmx("variable %a is set to: %s",str,"<!-- unset -->")
+ else
+ report_lmx("variable %a is set to: %S",str,value)
+ end
+ if type(value) == "function" then -- obsolete ... will go away
return value(str)
else
return value
end
end
-function lmx.loadedfile(name)
- name = (resolvers and resolvers.findfile and resolvers.findfile(name)) or name
- return io.loaddata(name)
+local function do_type(str)
+ if str and str ~= "" then
+ result[#result+1] = format("<tt>%s</tt>",do_escape(str))
+ end
+end
+
+local function do_fprint(str,...)
+ if str and str ~= "" then
+ result[#result+1] = format(str,...)
+ end
+end
+
+local function do_eprint(str,...)
+ if str and str ~= "" then
+ result[#result+1] = lpegmatch(pattern,format(str,...))
+ end
+end
+
+local function do_print_variable(str)
+ local str = do_variable(str) -- variables[str]
+ if str and str ~= "" then
+ result[#result+1] = str
+ end
end
-local function do_include(filename)
- local stylepath = do_variable('includepath') -- todo: store paths of loaded files
- local data = lmx.loadedfile(filename)
- if (not data or data == "") and stylepath and stylepath ~= "" then
- data = lmx.loadedfile(file.join(stylepath,filename))
+local function do_type_variable(str)
+ local str = do_variable(str) -- variables[str]
+ if str and str ~= "" then
+ result[#result+1] = format("<tt>%s</tt>",do_escape(str))
+ end
+end
+
+local function do_include(filename,option)
+ local data = loadedsubfile(filename)
+ if (not data or data == "") and givenpath then
+ data = loadedsubfile(joinpath(givenpath,filename))
+ end
+ if (not data or data == "") and type(usedpaths) == "table" then
+ for i=1,#usedpaths do
+ data = loadedsubfile(joinpath(usedpaths[i],filename))
+ if data and data ~= "" then
+ break
+ end
+ end
end
if not data or data == "" then
data = format("<!-- unknown lmx include file: %s -->",filename)
+ report_lmx("include file %a is empty",filename)
+ else
+ -- report_lmx("included file: %s",filename)
+ data = do_nested_include(data)
+ end
+ if filesuffix(filename,"css") and option == "strip" then -- new
+ data = lmx.stripcss(data)
end
return data
end
+-- Flushers:
+
lmx.print = do_print
lmx.type = do_type
+lmx.eprint = do_eprint
+lmx.fprint = do_fprint
+
lmx.escape = do_escape
-lmx.urlescape = do_escape
+lmx.urlescape = url.escape
lmx.variable = do_variable
lmx.include = do_include
-function lmx.pv(str)
- do_print(do_variable(str) or "")
+lmx.inject = do_print
+lmx.finject = do_fprint
+lmx.einject = do_eprint
+
+lmx.pv = do_print_variable
+lmx.tv = do_type_variable
+
+-- The next functions set up the closure.
+
+function lmx.initialize(d,v)
+ if not v then
+ setmetatableindex(d,lmxvariables)
+ if variables ~= d then
+ setmetatableindex(variables,d)
+ if trace_variables then
+ report_lmx("using chain: variables => given defaults => lmx variables")
+ end
+ elseif trace_variables then
+ report_lmx("using chain: variables == given defaults => lmx variables")
+ end
+ elseif d ~= v then
+ setmetatableindex(v,d)
+ if d ~= lmxvariables then
+ setmetatableindex(d,lmxvariables)
+ if variables ~= v then
+ setmetatableindex(variables,v)
+ if trace_variables then
+ report_lmx("using chain: variables => given variables => given defaults => lmx variables")
+ end
+ elseif trace_variables then
+ report_lmx("using chain: variables == given variables => given defaults => lmx variables")
+ end
+ else
+ if variables ~= v then
+ setmetatableindex(variables,v)
+ if trace_variables then
+ report_lmx("using chain: variabes => given variables => given defaults")
+ end
+ elseif trace_variables then
+ report_lmx("using chain: variables == given variables => given defaults")
+ end
+ end
+ else
+ setmetatableindex(v,lmxvariables)
+ if variables ~= v then
+ setmetatableindex(variables,v)
+ if trace_variables then
+ report_lmx("using chain: variables => given variables => lmx variables")
+ end
+ elseif trace_variables then
+ report_lmx("using chain: variables == given variables => lmx variables")
+ end
+ end
+ result = { }
end
-function lmx.tv(str)
- lmx.type(do_variable(str) or "")
+function lmx.finalized()
+ local collapsed = concat(result)
+ result = { } -- free memory
+ return collapsed
end
+function lmx.getvariables()
+ return variables
+end
+
+function lmx.reset()
+ -- obsolete
+end
+
+-- Creation: (todo: strip <!-- -->)
+
+-- local template = [[
+-- return function(defaults,variables)
+--
+-- -- initialize
+--
+-- lmx.initialize(defaults,variables)
+--
+-- -- interface
+--
+-- local definitions = { }
+-- local variables = lmx.getvariables()
+-- local html = lmx.html
+-- local inject = lmx.print
+-- local finject = lmx.fprint
+-- local einject = lmx.eprint
+-- local escape = lmx.escape
+-- local verbose = lmx.type
+--
+-- -- shortcuts (sort of obsolete as there is no gain)
+--
+-- local p = lmx.print
+-- local f = lmx.fprint
+-- local v = lmx.variable
+-- local e = lmx.escape
+-- local t = lmx.type
+-- local pv = lmx.pv
+-- local tv = lmx.tv
+--
+-- -- generator
+--
+-- %s
+--
+-- -- finalize
+--
+-- return lmx.finalized()
+--
+-- end
+-- ]]
+
local template = [[
+-- interface
+
+local html = lmx.html
+local inject = lmx.print
+local finject = lmx.fprint -- better use the following
+local einject = lmx.eprint -- better use the following
+local injectf = lmx.fprint
+local injecte = lmx.eprint
+local injectfmt = lmx.fprint
+local injectesc = lmx.eprint
+local escape = lmx.escape
+local verbose = lmx.type
+
+local i_n_j_e_c_t = lmx.print
+
+-- shortcuts (sort of obsolete as there is no gain)
+
+local p = lmx.print
+local f = lmx.fprint
+local v = lmx.variable
+local e = lmx.escape
+local t = lmx.type
+local pv = lmx.pv
+local tv = lmx.tv
+
+local lmx_initialize = lmx.initialize
+local lmx_finalized = lmx.finalized
+local lmx_getvariables = lmx.getvariables
+
+-- generator
+
+return function(defaults,variables)
+
+ lmx_initialize(defaults,variables)
+
local definitions = { }
- local p, v, e, t, pv, tv = lmx.print, lmx.variable, lmx.escape, lmx.type, lmx.pv, lmx.tv
- %s
+ local variables = lmx_getvariables()
+
+ %s -- the action: appends to result
+
+ return lmx_finalized()
+
+end
]]
+local function savedefinition(definitions,tag,content)
+ definitions[tag] = content
+ return ""
+end
+
+local function getdefinition(definitions,tag)
+ return definitions[tag] or ""
+end
+
+local whitespace = lpeg.patterns.whitespace
+local optionalspaces = whitespace^0
+
+local dquote = P('"')
+
+local begincomment = P("<!--")
+local endcomment = P("-->")
+
+local beginembedxml = P("<?")
+local endembedxml = P("?>")
+
+local beginembedcss = P("/*")
+local endembedcss = P("*/")
+
+local gobbledendxml = (optionalspaces * endembedxml) / ""
+----- argumentxml = (1-gobbledendxml)^0
+local argumentxml = (whitespace^1 + dquote * C((1-dquote)^1) * dquote + C((1-gobbledendxml-whitespace)^1))^0
+
+local gobbledendcss = (optionalspaces * endembedcss) / ""
+----- argumentcss = (1-gobbledendcss)^0
+local argumentcss = (whitespace^1 + dquote * C((1-dquote)^1) * dquote + C((1-gobbledendcss-whitespace)^1))^0
+
+local commentxml = (begincomment * (1-endcomment)^0 * endcomment) / ""
+
+local beginluaxml = (beginembedxml * P("lua")) / ""
+local endluaxml = endembedxml / ""
+
+local luacodexml = beginluaxml
+ * (1-endluaxml)^1
+ * endluaxml
+
+local beginluacss = (beginembedcss * P("lua")) / ""
+local endluacss = endembedcss / ""
+
+local luacodecss = beginluacss
+ * (1-endluacss)^1
+ * endluacss
+
+local othercode = (1-beginluaxml-beginluacss)^1 / " i_n_j_e_c_t[==[%0]==] "
+
+local includexml = ((beginembedxml * P("lmx-include") * optionalspaces) / "")
+ * (argumentxml / do_include)
+ * gobbledendxml
+
+local includecss = ((beginembedcss * P("lmx-include") * optionalspaces) / "")
+ * (argumentcss / do_include)
+ * gobbledendcss
+
+local definexml_b = ((beginembedxml * P("lmx-define-begin") * optionalspaces) / "")
+ * argumentxml
+ * gobbledendxml
+
+local definexml_e = ((beginembedxml * P("lmx-define-end") * optionalspaces) / "")
+ * argumentxml
+ * gobbledendxml
+
+local definexml_c = C((1-definexml_e)^0)
+
+local definexml = (Carg(1) * C(definexml_b) * definexml_c * definexml_e) / savedefinition
+
+local resolvexml = ((beginembedxml * P("lmx-resolve") * optionalspaces) / "")
+ * ((Carg(1) * C(argumentxml)) / getdefinition)
+ * gobbledendxml
+
+local definecss_b = ((beginembedcss * P("lmx-define-begin") * optionalspaces) / "")
+ * argumentcss
+ * gobbledendcss
+
+local definecss_e = ((beginembedcss * P("lmx-define-end") * optionalspaces) / "")
+ * argumentcss
+ * gobbledendcss
+
+local definecss_c = C((1-definecss_e)^0)
+
+local definecss = (Carg(1) * C(definecss_b) * definecss_c * definecss_e) / savedefinition
+
+local resolvecss = ((beginembedcss * P("lmx-resolve") * optionalspaces) / "")
+ * ((Carg(1) * C(argumentcss)) / getdefinition)
+ * gobbledendcss
+
+local pattern_1 = Cs((commentxml + includexml + includecss + P(1))^0) -- get rid of xml comments asap
+local pattern_2 = Cs((definexml + resolvexml + definecss + resolvecss + P(1))^0)
+local pattern_3 = Cs((luacodexml + luacodecss + othercode)^0)
+
local cache = { }
-local trace = false
+local function lmxerror(str)
+ report_error(str)
+ return html.tt(str)
+end
+
+local function wrapper(converter,defaults,variables)
+ local outcome, message = pcall(converter,defaults,variables)
+ if not outcome then
+ return lmxerror(format("error in conversion: %s",message))
+ else
+ return message
+ end
+end
-function lmx.new(data,variables)
+do_nested_include = function(data) -- also used in include
+ return lpegmatch(pattern_1,data)
+end
+
+function lmxnew(data,defaults,nocache,path) -- todo: use defaults in calling routines
data = data or ""
local known = cache[data]
if not known then
- local definitions = { }
- data = gsub(data,"<%?lmx%-include%s+(.-)%s-%?>", function(filename)
- return lmx.include(filename)
- end)
- local definitions = { }
- data = gsub(data,"<%?lmx%-define%-begin%s+(%S-)%s-%?>(.-)<%?lmx%-define%-end%s-%?>", function(tag,content)
- definitions[tag] = content
- return ""
- end)
- data = gsub(data,"<%?lmx%-resolve%s+(%S-)%s-%?>", function(tag)
- return definitions[tag] or ""
- end)
- data = gsub(data .. "<?lua ?>","(.-)<%?lua%s+(.-)%s*%?>", function(txt,lua)
- txt = gsub(txt,"%c+","\n")
- return format("p(%q)%s ",txt,lua) -- nb! space
- end)
- data = format(template,data)
+ givenpath = path
+ usedpaths = lmxvariables.includepath or { }
+ if type(usedpaths) == "string" then
+ usedpaths = { usedpaths }
+ end
+ data = lpegmatch(pattern_1,data)
+ data = lpegmatch(pattern_2,data,1,{})
+ data = lpegmatch(pattern_3,data)
+ local converted = loadstring(format(template,data))
+ if converted then
+ converted = converted()
+ end
+ defaults = defaults or { }
+ local converter
+ if converted then
+ converter = function(variables)
+ return wrapper(converted,defaults,variables)
+ end
+ else
+ report_error("error in:\n%s\n:",data)
+ converter = function() lmxerror("error in template") end
+ end
known = {
- data = trace and data,
- variables = variables or { },
- converter = loadstring(data),
+ data = defaults.trace and data or "",
+ variables = defaults,
+ converter = converter,
}
+ if cache_templates and nocache ~= false then
+ cache[data] = known
+ end
elseif variables then
known.variables = variables
end
return known, known.variables
end
-function lmx.reset(self)
- self.variables = { }
-end
-
-function lmx.result(self)
- if trace then
- return self.data
+local function lmxresult(self,variables)
+ if self then
+ local converter = self.converter
+ if converter then
+ local converted = converter(variables)
+ if trace_variables then -- will become templates
+ report_lmx("converted size: %s",#converted)
+ end
+ return converted or lmxerror("no result from converter")
+ else
+ return lmxerror("invalid converter")
+ end
else
- variables, result = self.variables, { }
- self.converter()
- return concat(result)
+ return lmxerror("invalid specification")
end
end
--- file converter
+lmx.new = lmxnew
+lmx.result = lmxresult
-local loaded = { }
+local loadedfiles = { }
-function lmx.convert(templatefile,resultfile,variables)
- local data = loaded[templatefile]
- if not data then
- data = lmx.new(lmx.loadedfile(templatefile),variables)
- loaded[template] = data
- elseif variables then
- data.variables = variables
+function lmx.convertstring(templatestring,variables,nocache,path)
+ return lmxresult(lmxnew(templatestring,nil,nocache,path),variables)
+end
+
+function lmx.convertfile(templatefile,variables,nocache)
+ if trace_variables then -- will become templates
+ report_lmx("converting file %a",templatefile)
+ end
+ local converter = loadedfiles[templatefile]
+ if not converter then
+ converter = lmxnew(loadedfile(templatefile),nil,nocache,pathpart(templatefile))
+ loadedfiles[templatefile] = converter
+ end
+ return lmxresult(converter,variables)
+end
+
+function lmxconvert(templatefile,resultfile,variables,nocache) -- or (templatefile,variables)
+ if trace_variables then -- will become templates
+ report_lmx("converting file %a",templatefile)
+ end
+ if not variables and type(resultfile) == "table" then
+ variables = resultfile
+ end
+ local converter = loadedfiles[templatefile]
+ if not converter then
+ converter = lmxnew(loadedfile(templatefile),nil,nocache,pathpart(templatefile))
+ if cache_files then
+ loadedfiles[templatefile] = converter
+ end
end
- local result = lmx.result(data)
+ local result = lmxresult(converter,variables)
if resultfile then
io.savedata(resultfile,result)
else
- return lmx.result(data,result)
+ return result
end
end
--- these can be overloaded; we assume that the os handles filename associations
+lmx.convert = lmxconvert
-lmx.lmxfile = function(filename) return filename end -- beware, these can be set!
-lmx.htmfile = function(filename) return filename end -- beware, these can be set!
+-- helpers
+
+local nocomment = (beginembedcss * (1 - endembedcss)^1 * endembedcss) / ""
+local nowhitespace = whitespace^1 / " " -- ""
+local semistripped = whitespace^1 / "" * P(";")
+local stripper = Cs((nocomment + semistripped + nowhitespace + 1)^1)
-if os.type == "windows" then
- lmx.popupfile = function(filename) os.execute("start " .. filename) end
-else
- lmx.popupfile = function(filename) os.execute(filename) end
+function lmx.stripcss(str)
+ return lpegmatch(stripper,str)
end
-function lmx.make(name,variables)
+function lmx.color(r,g,b,a)
+ if r > 1 then
+ r = 1
+ end
+ if g > 1 then
+ g = 1
+ end
+ if b > 1 then
+ b = 1
+ end
+ if not a then
+ a= 0
+ elseif a > 1 then
+ a = 1
+ end
+ if a > 0 then
+ return format("rgba(%s%%,%s%%,%s%%,%s)",r*100,g*100,b*100,a)
+ else
+ return format("rgb(%s%%,%s%%,%s%%)",r*100,g*100,b*100)
+ end
+end
+
+
+-- these can be overloaded
+
+lmx.lmxfile = string.itself
+lmx.htmfile = string.itself
+lmx.popupfile = os.launch
+
+function lmxmake(name,variables)
local lmxfile = lmx.lmxfile(name)
local htmfile = lmx.htmfile(name)
if lmxfile == htmfile then
- htmfile = gsub(lmxfile, "%.%a+$", "html")
+ htmfile = replacesuffix(lmxfile,"html")
end
- lmx.convert(lmxfile,htmfile,variables)
+ lmxconvert(lmxfile,htmfile,variables)
return htmfile
end
+lmxmake = lmx.make
+
function lmx.show(name,variables)
- local htmfile = lmx.make(name,variables)
+ local htmfile = lmxmake(name,variables)
lmx.popupfile(htmfile)
return htmfile
end
--- test
-
---~ print(lmx.result(lmx.new(io.loaddata("t:/sources/context-timing.lmx"))))
-
--- command line
+-- Command line (will become mtx-lmx):
if arg then
if arg[1] == "--show" then if arg[2] then lmx.show (arg[2]) end
elseif arg[1] == "--convert" then if arg[2] then lmx.convert(arg[2], arg[3] or "temp.html") end
end
end
+
+-- Test 1:
+
+-- inspect(lmx.result(lmx.new(io.loaddata("t:/sources/context-timing.lmx"))))
+
+-- Test 2:
+
+-- local str = [[
+-- <?lmx-include context.css strip ?>
+-- <test>
+-- <?lmx-define-begin whatever?>some content a<?lmx-define-end ?>
+-- <?lmx-define-begin somemore?>some content b<?lmx-define-end ?>
+-- <more>
+-- <?lmx-resolve whatever ?>
+-- <?lua
+-- for i=1,10 do end
+-- ?>
+-- <?lmx-resolve somemore ?>
+-- </more>
+-- <td><?lua p(100) ?></td>
+-- <td><?lua p(variables.a) ?></td>
+-- <td><?lua p(variables.b) ?></td>
+-- <td><?lua p(variables.c) ?></td>
+-- <td><?lua pv('title-default') ?></td>
+-- </test>
+-- ]]
+
+-- local defaults = { trace = true, a = 3, b = 3 }
+-- local result = lmx.new(str,defaults)
+-- inspect(result.data)
+-- inspect(result.converter(defaults))
+-- inspect(result.converter { a = 1 })
+-- inspect(lmx.result(result, { b = 2 }))
+-- inspect(lmx.result(result, { a = 20000, b = 40000 }))
diff --git a/Master/texmf-dist/tex/context/base/trac-log.lua b/Master/texmf-dist/tex/context/base/trac-log.lua
index 44de8ebe6d0..0dadb855ec2 100644
--- a/Master/texmf-dist/tex/context/base/trac-log.lua
+++ b/Master/texmf-dist/tex/context/base/trac-log.lua
@@ -6,23 +6,78 @@ if not modules then modules = { } end modules ['trac-log'] = {
license = "see context related readme files"
}
--- todo: less categories, more subcategories (e.g. nodes)
+-- if tex and (tex.jobname or tex.formatname) then
+--
+-- -- quick hack, awaiting speedup in engine (8 -> 6.4 sec for --make with console2)
+-- -- still needed for luajittex
+--
+-- local texio_write_nl = texio.write_nl
+-- local texio_write = texio.write
+-- local io_write = io.write
+--
+-- local write_nl = function(target,...)
+-- if not io_write then
+-- io_write = io.write
+-- end
+-- if target == "term and log" then
+-- texio_write_nl("log",...)
+-- texio_write_nl("term","")
+-- io_write(...)
+-- elseif target == "log" then
+-- texio_write_nl("log",...)
+-- elseif target == "term" then
+-- texio_write_nl("term","")
+-- io_write(...)
+-- else
+-- texio_write_nl("log",target,...)
+-- texio_write_nl("term","")
+-- io_write(target,...)
+-- end
+-- end
+--
+-- local write = function(target,...)
+-- if not io_write then
+-- io_write = io.write
+-- end
+-- if target == "term and log" then
+-- texio_write("log",...)
+-- io_write(...)
+-- elseif target == "log" then
+-- texio_write("log",...)
+-- elseif target == "term" then
+-- io_write(...)
+-- else
+-- texio_write("log",target,...)
+-- io_write(target,...)
+-- end
+-- end
+--
+-- texio.write = write
+-- texio.write_nl = write_nl
+--
+-- else
+--
+-- -- texlua or just lua
+--
+-- end
---~ io.stdout:setvbuf("no")
---~ io.stderr:setvbuf("no")
+-- todo: less categories, more subcategories (e.g. nodes)
+-- todo: split into basics and ctx specific
local write_nl, write = texio and texio.write_nl or print, texio and texio.write or io.write
local format, gmatch, find = string.format, string.gmatch, string.find
local concat, insert, remove = table.concat, table.insert, table.remove
-local escapedpattern = string.escapedpattern
+local topattern = string.topattern
local texcount = tex and tex.count
-local next, type = next, type
+local next, type, select = next, type, select
+local utfchar = utf.char
local setmetatableindex = table.setmetatableindex
+local formatters = string.formatters
--[[ldx--
<p>This is a prelude to a more extensive logging module. We no longer
-provide <l n='xml'/> based logging a sparsing is relatively easy anyway.</p>
+provide <l n='xml'/> based logging as parsing is relatively easy anyway.</p>
--ldx]]--
logs = logs or { }
@@ -30,12 +85,42 @@ local logs = logs
local moreinfo = [[
More information about ConTeXt and the tools that come with it can be found at:
-
+]] .. "\n" .. [[
maillist : ntg-context@ntg.nl / http://www.ntg.nl/mailman/listinfo/ntg-context
webpage : http://www.pragma-ade.nl / http://tex.aanhet.net
wiki : http://contextgarden.net
]]
+-- -- we extend the formatters:
+--
+-- function utilities.strings.unichr(s) return "U+" .. format("%05X",s) .. " (" .. utfchar(s) .. ")" end
+-- function utilities.strings.chruni(s) return utfchar(s) .. " (U+" .. format("%05X",s) .. ")" end
+--
+-- utilities.strings.formatters.add (
+-- string.formatters, "uni",
+-- [[unichr(%s)]],
+-- [[local unichr = utilities.strings.unichr]]
+-- )
+--
+-- utilities.strings.formatters.add (
+-- string.formatters, "chr",
+-- [[chruni(%s)]],
+-- [[local chruni = utilities.strings.chruni]]
+-- )
+
+utilities.strings.formatters.add (
+ formatters, "unichr",
+ [["U+" .. format("%%05X",%s) .. " (" .. utfchar(%s) .. ")"]]
+)
+
+utilities.strings.formatters.add (
+ formatters, "chruni",
+ [[utfchar(%s) .. " (U+" .. format("%%05X",%s) .. ")"]]
+)
+
+-- print(formatters["Missing character %!chruni! in font."](234))
+-- print(formatters["Missing character %!unichr! in font."](234))
+
-- basic loggers
local function ignore() end
@@ -48,6 +133,8 @@ local direct, subdirect, writer, pushtarget, poptarget
if tex and (tex.jobname or tex.formatname) then
+ -- local format = string.formatter
+
local valueiskey = { __index = function(t,k) t[k] = k return k end } -- will be helper
local target = "term and log"
@@ -65,61 +152,80 @@ if tex and (tex.jobname or tex.formatname) then
write_nl(target,"\n")
end
+ local f_one = formatters["%-15s > %s\n"]
+ local f_two = formatters["%-15s >\n"]
+
+ -- we can use formatters but best check for % then because for simple messages
+ -- we con't want this overhead for single messages (not that there are that
+ -- many; we could have a special weak table)
+
report = function(a,b,c,...)
if c then
- write_nl(target,format("%-15s > %s\n",translations[a],format(formats[b],c,...)))
+ write_nl(target,f_one(translations[a],formatters[formats[b]](c,...)))
elseif b then
- write_nl(target,format("%-15s > %s\n",translations[a],formats[b]))
+ write_nl(target,f_one(translations[a],formats[b]))
elseif a then
- write_nl(target,format("%-15s >\n", translations[a]))
+ write_nl(target,f_two(translations[a]))
else
write_nl(target,"\n")
end
end
+ local f_one = formatters["%-15s > %s"]
+ local f_two = formatters["%-15s >"]
+
direct = function(a,b,c,...)
if c then
- return format("%-15s > %s",translations[a],format(formats[b],c,...))
+ return f_one(translations[a],formatters[formats[b]](c,...))
elseif b then
- return format("%-15s > %s",translations[a],formats[b])
+ return f_one(translations[a],formats[b])
elseif a then
- return format("%-15s >", translations[a])
+ return f_two(translations[a])
else
return ""
end
end
+ local f_one = formatters["%-15s > %s > %s\n"]
+ local f_two = formatters["%-15s > %s >\n"]
+
subreport = function(a,s,b,c,...)
if c then
- write_nl(target,format("%-15s > %s > %s\n",translations[a],translations[s],format(formats[b],c,...)))
+ write_nl(target,f_one(translations[a],translations[s],formatters[formats[b]](c,...)))
elseif b then
- write_nl(target,format("%-15s > %s > %s\n",translations[a],translations[s],formats[b]))
+ write_nl(target,f_one(translations[a],translations[s],formats[b]))
elseif a then
- write_nl(target,format("%-15s > %s >\n", translations[a],translations[s]))
+ write_nl(target,f_two(translations[a],translations[s]))
else
write_nl(target,"\n")
end
end
+ local f_one = formatters["%-15s > %s > %s"]
+ local f_two = formatters["%-15s > %s >"]
+
subdirect = function(a,s,b,c,...)
if c then
- return format("%-15s > %s > %s",translations[a],translations[s],format(formats[b],c,...))
+ return f_one(translations[a],translations[s],formatters[formats[b]](c,...))
elseif b then
- return format("%-15s > %s > %s",translations[a],translations[s],formats[b])
+ return f_one(translations[a],translations[s],formats[b])
elseif a then
- return format("%-15s > %s >", translations[a],translations[s])
+ return f_two(translations[a],translations[s])
else
return ""
end
end
+ local f_one = formatters["%-15s : %s\n"]
+ local f_two = formatters["%-15s :\n"]
+
status = function(a,b,c,...)
if c then
- write_nl(target,format("%-15s : %s\n",translations[a],format(formats[b],c,...)))
+ write_nl(target,f_one(translations[a],formatters[formats[b]](c,...)))
elseif b then
- write_nl(target,format("%-15s : %s\n",translations[a],formats[b]))
+ write_nl(target,f_one(translations[a],formats[b]))
elseif a then
- write_nl(target,format("%-15s :\n", translations[a]))
+ write_nl(target,f_two(translations[a]))
else
write_nl(target,"\n")
end
@@ -174,37 +280,46 @@ else
write_nl("\n")
end
+ local f_one = formatters["%-15s | %s"]
+ local f_two = formatters["%-15s |"]
+
report = function(a,b,c,...)
if c then
- write_nl(format("%-15s | %s",a,format(b,c,...)))
+ write_nl(f_one(a,formatters[b](c,...)))
elseif b then
- write_nl(format("%-15s | %s",a,b))
+ write_nl(f_one(a,b))
elseif a then
- write_nl(format("%-15s |", a))
+ write_nl(f_two(a))
else
write_nl("")
end
end
+ local f_one = formatters["%-15s | %s | %s"]
+ local f_two = formatters["%-15s | %s |"]
+
subreport = function(a,sub,b,c,...)
if c then
- write_nl(format("%-15s | %s | %s",a,sub,format(b,c,...)))
+ write_nl(f_one(a,sub,formatters[b](c,...)))
elseif b then
- write_nl(format("%-15s | %s | %s",a,sub,b))
+ write_nl(f_one(a,sub,b))
elseif a then
- write_nl(format("%-15s | %s |", a,sub))
+ write_nl(f_two(a,sub))
else
write_nl("")
end
end
+ local f_one = formatters["%-15s : %s\n"]
+ local f_two = formatters["%-15s :\n"]
+
status = function(a,b,c,...) -- not to be used in lua anyway
if c then
- write_nl(format("%-15s : %s\n",a,format(b,c,...)))
+ write_nl(f_one(a,formatters[b](c,...)))
elseif b then
- write_nl(format("%-15s : %s\n",a,b)) -- b can have %'s
+ write_nl(f_one(a,b)) -- b can have %'s
elseif a then
- write_nl(format("%-15s :\n", a))
+ write_nl(f_two(a))
else
write_nl("\n")
end
@@ -330,7 +445,7 @@ local function setblocked(category,value)
if data[c] then
v.state = value
else
- c = escapedpattern(c,true)
+ c = topattern(c,true,true)
for k, v in next, data do
if find(k,c) then
v.state = value
@@ -382,11 +497,23 @@ function logs.show()
state = "unknown"
end
-- no new here
- report("logging","category: '%s', subcategories: '%s', state: '%s'",category,subcategories,state)
+ report("logging","category %a, subcategories %a, state %a",category,subcategories,state)
end
report("logging","categories: %s, max category: %s, max subcategory: %s, max combined: %s",n,c,s,max)
end
+local delayed_reporters = { }
+
+setmetatableindex(delayed_reporters,function(t,k)
+ local v = logs.reporter(k.name)
+ t[k] = v
+ return v
+end)
+
+function utilities.setters.report(setter,...)
+ delayed_reporters[setter](...)
+end
+
directives.register("logs.blocked", function(v)
setblocked(v,true)
end)
@@ -428,46 +555,35 @@ function logs.stop_page_number() -- the first page can includes the initializati
average = (stoptime - starttime) / (real - 1)
end
lasttime = stoptime
- if real > 0 then
- if user > 0 then
- if sub > 0 then
- report_pages("flushing realpage %s, userpage %s, subpage %s, time %0.04f / %0.04f",real,user,sub,elapsed,average)
- else
- report_pages("flushing realpage %s, userpage %s, time %0.04f / %0.04f",real,user,elapsed,average)
- end
- else
- report_pages("flushing realpage %s, time %0.04f / %0.04f",real,elapsed,average)
- end
- else
+ if real <= 0 then
report_pages("flushing page, time %0.04f / %0.04f",elapsed,average)
+ elseif user <= 0 then
+ report_pages("flushing realpage %s, time %0.04f / %0.04f",real,elapsed,average)
+ elseif sub <= 0 then
+ report_pages("flushing realpage %s, userpage %s, time %0.04f / %0.04f",real,user,elapsed,average)
+ else
+ report_pages("flushing realpage %s, userpage %s, subpage %s, time %0.04f / %0.04f",real,user,sub,elapsed,average)
end
else
- if real > 0 then
- if user > 0 then
- if sub > 0 then
- report_pages("flushing realpage %s, userpage %s, subpage %s",real,user,sub)
- else
- report_pages("flushing realpage %s, userpage %s",real,user)
- end
- else
- report_pages("flushing realpage %s",real)
- end
- else
+ if real <= 0 then
report_pages("flushing page")
+ elseif user <= 0 then
+ report_pages("flushing realpage %s",real)
+ elseif sub <= 0 then
+ report_pages("flushing realpage %s, userpage %s",real,user)
+ else
+ report_pages("flushing realpage %s, userpage %s, subpage %s",real,user,sub)
end
end
logs.flush()
end
-logs.report_job_stat = statistics and statistics.showjobstat
+-- we don't have show_open and show_close callbacks yet
local report_files = logs.reporter("files")
-
-local nesting = 0
-local verbose = false
-local hasscheme = url.hasscheme
-
--- we don't have show_open and show_close callbacks yet
+local nesting = 0
+local verbose = false
+local hasscheme = url.hasscheme
function logs.show_open(name)
-- if hasscheme(name) ~= "virtual" then
@@ -475,7 +591,7 @@ function logs.show_open(name)
-- nesting = nesting + 1
-- report_files("level %s, opening %s",nesting,name)
-- else
- -- write(format("(%s",name)) -- tex adds a space
+ -- write(formatters["(%s"](name)) -- tex adds a space
-- end
-- end
end
@@ -496,7 +612,7 @@ function logs.show_load(name)
-- if verbose then
-- report_files("level %s, loading %s",nesting+1,name)
-- else
- -- write(format("(%s)",name))
+ -- write(formatters["(%s)"](name))
-- end
-- end
end
@@ -520,11 +636,26 @@ function logs.help () end -- obsolete
-- applications
+-- local function reportlines(t,str)
+-- if str then
+-- for line in gmatch(str,"([^\n\r]*)[\n\r]") do
+-- t.report(line)
+-- end
+-- end
+-- end
+
+local Carg, C, lpegmatch = lpeg.Carg, lpeg.C, lpeg.match
+local p_newline = lpeg.patterns.newline
+
+local linewise = (
+ Carg(1) * C((1-p_newline)^1) / function(t,s) t.report(s) end
+ + Carg(1) * p_newline^2 / function(t) t.report() end
+ + p_newline
+)^1
+
local function reportlines(t,str)
if str then
- for line in gmatch(str,"(.-)[\n\r]") do
- t.report(line)
- end
+ lpegmatch(linewise,str,1,t)
end
end
@@ -548,10 +679,9 @@ local function reporthelp(t,...)
if type(helpinfo) == "string" then
reportlines(t,helpinfo)
elseif type(helpinfo) == "table" then
- local tags = { ... }
- for i=1,#tags do
- reportlines(t,t.helpinfo[tags[i]])
- if i < #tags then
+ for i=1,select("#",...) do
+ reportlines(t,t.helpinfo[select(i,...)])
+ if i < n then
t.report()
end
end
@@ -560,33 +690,65 @@ end
local function reportinfo(t)
t.report()
- reportlines(t,moreinfo)
+ reportlines(t,t.moreinfo)
end
+local function reportexport(t,method)
+ report(t.helpinfo)
+end
+
+local reporters = {
+ lines = reportlines, -- not to be overloaded
+ banner = reportbanner,
+ version = reportversion,
+ help = reporthelp,
+ info = reportinfo,
+ export = reportexport,
+}
+
+local exporters = {
+ -- empty
+}
+
+logs.reporters = reporters
+logs.exporters = exporters
+
function logs.application(t)
t.name = t.name or "unknown"
t.banner = t.banner
+ t.moreinfo = moreinfo
t.report = logs.reporter(t.name)
- t.help = function(...) reportbanner(t) ; reporthelp(t,...) ; reportinfo(t) end
- t.identify = function() reportbanner(t) end
- t.version = function() reportversion(t) end
+ t.help = function(...)
+ reporters.banner(t)
+ reporters.help(t,...)
+ reporters.info(t)
+ end
+ t.export = function(...)
+ reporters.export(t,...)
+ end
+ t.identify = function()
+ reporters.banner(t)
+ end
+ t.version = function()
+ reporters.version(t)
+ end
return t
end
--- somewhat special
+-- somewhat special .. will be redone (already a better solution in place in lmx)
-- logging to a file
---~ local syslogname = "oeps.xxx"
---~
---~ for i=1,10 do
---~ logs.system(syslogname,"context","test","fonts","font %s recached due to newer version (%s)","blabla","123")
---~ end
+-- local syslogname = "oeps.xxx"
+--
+-- for i=1,10 do
+-- logs.system(syslogname,"context","test","fonts","font %s recached due to newer version (%s)","blabla","123")
+-- end
function logs.system(whereto,process,jobname,category,...)
- local message = format("%s %s => %s => %s => %s\r",os.date("%d/%m/%y %H:%m:%S"),process,jobname,category,format(...))
+ local message = formatters["%s %s => %s => %s => %s\r"](os.date("%d/%m/%y %H:%m:%S"),process,jobname,category,format(...))
for i=1,10 do
- local f = io.open(whereto,"a") -- we can consider keepint the file open
+ local f = io.open(whereto,"a") -- we can consider keeping the file open
if f then
f:write(message)
f:close()
@@ -603,18 +765,18 @@ function logs.obsolete(old,new)
local o = loadstring("return " .. new)()
if type(o) == "function" then
return function(...)
- report_system("function %s is obsolete, use %s",old,new)
+ report_system("function %a is obsolete, use %a",old,new)
loadstring(old .. "=" .. new .. " return ".. old)()(...)
end
elseif type(o) == "table" then
local t, m = { }, { }
m.__index = function(t,k)
- report_system("table %s is obsolete, use %s",old,new)
+ report_system("table %a is obsolete, use %a",old,new)
m.__index, m.__newindex = o, o
return o[k]
end
m.__newindex = function(t,k,v)
- report_system("table %s is obsolete, use %s",old,new)
+ report_system("table %a is obsolete, use %a",old,new)
m.__index, m.__newindex = o, o
o[k] = v
end
@@ -640,7 +802,11 @@ else
end
end
--- do we still need io.flush then?
+-- this is somewhat slower but prevents out-of-order messages when print is mixed
+-- with texio.write
io.stdout:setvbuf('no')
io.stderr:setvbuf('no')
+
+-- windows: > nul 2>&1
+-- unix : > null 2>&1
diff --git a/Master/texmf-dist/tex/context/base/trac-pro.lua b/Master/texmf-dist/tex/context/base/trac-pro.lua
index 43c5ef7db74..d6e0d03396f 100644
--- a/Master/texmf-dist/tex/context/base/trac-pro.lua
+++ b/Master/texmf-dist/tex/context/base/trac-pro.lua
@@ -26,17 +26,17 @@ local registered = { }
local function report_index(k,name)
if trace_namespaces then
- report_system("reference to '%s' in protected namespace '%s', %s",k,name,debug.traceback())
+ report_system("reference to %a in protected namespace %a: %s",k,name,debug.traceback())
else
- report_system("reference to '%s' in protected namespace '%s'",k,name)
+ report_system("reference to %a in protected namespace %a",k,name)
end
end
local function report_newindex(k,name)
if trace_namespaces then
- report_system("assignment to '%s' in protected namespace '%s', %s",k,name,debug.traceback())
+ report_system("assignment to %a in protected namespace %a: %s",k,name,debug.traceback())
else
- report_system("assignment to '%s' in protected namespace '%s'",k,name)
+ report_system("assignment to %a in protected namespace %a",k,name)
end
end
diff --git a/Master/texmf-dist/tex/context/base/trac-set.lua b/Master/texmf-dist/tex/context/base/trac-set.lua
index bc0070eb42b..95fdc43b30f 100644
--- a/Master/texmf-dist/tex/context/base/trac-set.lua
+++ b/Master/texmf-dist/tex/context/base/trac-set.lua
@@ -6,19 +6,22 @@ if not modules then modules = { } end modules ['trac-set'] = { -- might become u
license = "see context related readme files"
}
+-- maybe this should be util-set.lua
+
local type, next, tostring = type, next, tostring
local concat = table.concat
-local format, find, lower, gsub, escapedpattern = string.format, string.find, string.lower, string.gsub, string.escapedpattern
+local format, find, lower, gsub, topattern = string.format, string.find, string.lower, string.gsub, string.topattern
local is_boolean = string.is_boolean
local settings_to_hash = utilities.parsers.settings_to_hash
local allocate = utilities.storage.allocate
utilities = utilities or { }
local utilities = utilities
-utilities.setters = utilities.setters or { }
-local setters = utilities.setters
-local data = { } -- maybe just local
+local setters = utilities.setters or { }
+utilities.setters = setters
+
+local data = { }
-- We can initialize from the cnf file. This is sort of tricky as
-- later defined setters also need to be initialized then. If set
@@ -29,33 +32,41 @@ local trace_initialize = false -- only for testing during development
function setters.initialize(filename,name,values) -- filename only for diagnostics
local setter = data[name]
if setter then
+ frozen = true -- don't permitoverload
+ -- trace_initialize = true
local data = setter.data
if data then
- for key, value in next, values do
- -- key = gsub(key,"_",".")
- value = is_boolean(value,value)
+ for key, newvalue in next, values do
+ local newvalue = is_boolean(newvalue,newvalue)
local functions = data[key]
if functions then
- if #functions > 0 and not functions.value then
+ local oldvalue = functions.value
+ if functions.frozen then
+ if trace_initialize then
+ setter.report("%s: %a is %s to %a",filename,key,"frozen",oldvalue)
+ end
+ elseif #functions > 0 and not oldvalue then
+-- elseif #functions > 0 and oldvalue == nil then
if trace_initialize then
- setter.report("executing %s (%s -> %s)",key,filename,tostring(value))
+ setter.report("%s: %a is %s to %a",filename,key,"set",newvalue)
end
for i=1,#functions do
- functions[i](value)
+ functions[i](newvalue)
end
- functions.value = value
+ functions.value = newvalue
+ functions.frozen = functions.frozen or frozen
else
if trace_initialize then
- setter.report("skipping %s (%s -> %s)",key,filename,tostring(value))
+ setter.report("%s: %a is %s as %a",filename,key,"kept",oldvalue)
end
end
else
-- we do a simple preregistration i.e. not in the
-- list as it might be an obsolete entry
- functions = { default = value }
+ functions = { default = newvalue, frozen = frozen }
data[key] = functions
if trace_initialize then
- setter.report("storing %s (%s -> %s)",key,filename,tostring(value))
+ setter.report("%s: %a is %s to %a",filename,key,"defaulted",newvalue)
end
end
end
@@ -67,46 +78,52 @@ end
-- user interface code
local function set(t,what,newvalue)
- local data, done = t.data, t.done
- if type(what) == "string" then
- what = settings_to_hash(what) -- inefficient but ok
- end
- if type(what) ~= "table" then
- return
- end
- if not done then -- catch ... why not set?
- done = { }
- t.done = done
- end
- for w, value in next, what do
- if value == "" then
- value = newvalue
- elseif not value then
- value = false -- catch nil
- else
- value = is_boolean(value,value)
+ local data = t.data
+ if not data.frozen then
+ local done = t.done
+ if type(what) == "string" then
+ what = settings_to_hash(what) -- inefficient but ok
end
- w = "^" .. escapedpattern(w,true) .. "$" -- new: anchored
- for name, functions in next, data do
- if done[name] then
- -- prevent recursion due to wildcards
- elseif find(name,w) then
- done[name] = true
- for i=1,#functions do
- functions[i](value)
+ if type(what) ~= "table" then
+ return
+ end
+ if not done then -- catch ... why not set?
+ done = { }
+ t.done = done
+ end
+ for w, value in next, what do
+ if value == "" then
+ value = newvalue
+ elseif not value then
+ value = false -- catch nil
+ else
+ value = is_boolean(value,value)
+ end
+ w = topattern(w,true,true)
+ for name, functions in next, data do
+ if done[name] then
+ -- prevent recursion due to wildcards
+ elseif find(name,w) then
+ done[name] = true
+ for i=1,#functions do
+ functions[i](value)
+ end
+ functions.value = value
end
- functions.value = value
end
end
end
end
local function reset(t)
- for name, functions in next, t.data do
- for i=1,#functions do
- functions[i](false)
+ local data = t.data
+ if not data.frozen then
+ for name, functions in next, data do
+ for i=1,#functions do
+ functions[i](false)
+ end
+ functions.value = false
end
- functions.value = false
end
end
@@ -132,15 +149,16 @@ function setters.register(t,what,...)
functions = { }
data[what] = functions
if trace_initialize then
- t.report("defining %s",what)
+ t.report("defining %a",what)
end
end
local default = functions.default -- can be set from cnf file
- for _, fnc in next, { ... } do
+ for i=1,select("#",...) do
+ local fnc = select(i,...)
local typ = type(fnc)
if typ == "string" then
if trace_initialize then
- t.report("coupling %s to %s",what,fnc)
+ t.report("coupling %a to %a",what,fnc)
end
local s = fnc -- else wrong reference
fnc = function(value) set(t,s,value) end
@@ -205,7 +223,7 @@ function setters.show(t)
local value, default, modules = functions.value, functions.default, #functions
value = value == nil and "unset" or tostring(value)
default = default == nil and "unset" or tostring(default)
- t.report("%-30s modules: %2i default: %6s value: %6s",name,modules,default,value)
+ t.report("%-50s modules: %2i default: %-12s value: %-12s",name,modules,default,value)
end
end
t.report()
@@ -218,26 +236,33 @@ end
local enable, disable, register, list, show = setters.enable, setters.disable, setters.register, setters.list, setters.show
-local function report(setter,...)
- local report = logs and logs.report
- if report then
- report(setter.name,...)
- else -- fallback, as this module is loaded before the logger
- write_nl(format("%-15s : %s\n",setter.name,format(...)))
- end
+function setters.report(setter,...)
+ print(format("%-15s : %s\n",setter.name,format(...)))
+end
+
+local function default(setter,name)
+ local d = setter.data[name]
+ return d and d.default
end
-function setters.new(name)
+local function value(setter,name)
+ local d = setter.data[name]
+ return d and (d.value or d.default)
+end
+
+function setters.new(name) -- we could use foo:bar syntax (but not used that often)
local setter -- we need to access it in setter itself
setter = {
data = allocate(), -- indexed, but also default and value fields
name = name,
- report = function(...) report (setter,...) end,
- enable = function(...) enable (setter,...) end,
- disable = function(...) disable (setter,...) end,
- register = function(...) register(setter,...) end,
- list = function(...) list (setter,...) end,
- show = function(...) show (setter,...) end,
+ report = function(...) setters.report (setter,...) end,
+ enable = function(...) enable (setter,...) end,
+ disable = function(...) disable (setter,...) end,
+ register = function(...) register(setter,...) end,
+ list = function(...) list (setter,...) end,
+ show = function(...) show (setter,...) end,
+ default = function(...) return default (setter,...) end,
+ value = function(...) return value (setter,...) end,
}
data[name] = setter
return setter
@@ -247,9 +272,9 @@ trackers = setters.new("trackers")
directives = setters.new("directives")
experiments = setters.new("experiments")
-local t_enable, t_disable, t_report = trackers .enable, trackers .disable, trackers .report
-local d_enable, d_disable, d_report = directives .enable, directives .disable, directives .report
-local e_enable, e_disable, e_report = experiments.enable, experiments.disable, experiments.report
+local t_enable, t_disable = trackers .enable, trackers .disable
+local d_enable, d_disable = directives .enable, directives .disable
+local e_enable, e_disable = experiments.enable, experiments.disable
-- nice trick: we overload two of the directives related functions with variants that
-- do tracing (itself using a tracker) .. proof of concept
@@ -259,28 +284,28 @@ local trace_experiments = false local trace_experiments = false trackers.regist
function directives.enable(...)
if trace_directives then
- d_report("enabling: %s",concat({...}," "))
+ directives.report("enabling: % t",{...})
end
d_enable(...)
end
function directives.disable(...)
if trace_directives then
- d_report("disabling: %s",concat({...}," "))
+ directives.report("disabling: % t",{...})
end
d_disable(...)
end
function experiments.enable(...)
if trace_experiments then
- e_report("enabling: %s",concat({...}," "))
+ experiments.report("enabling: % t",{...})
end
e_enable(...)
end
function experiments.disable(...)
if trace_experiments then
- e_report("disabling: %s",concat({...}," "))
+ experiments.report("disabling: % t",{...})
end
e_disable(...)
end
@@ -288,26 +313,44 @@ end
-- a useful example
directives.register("system.nostatistics", function(v)
- statistics.enable = not v
+ if statistics then
+ statistics.enable = not v
+ else
+ -- forget about it
+ end
end)
directives.register("system.nolibraries", function(v)
- libraries = nil -- we discard this tracing for security
+ if libraries then
+ libraries = nil -- we discard this tracing for security
+ else
+ -- no libraries defined
+ end
end)
-- experiment
-local flags = environment and environment.engineflags
+if environment then
-if flags then
- if trackers and flags.trackers then
- setters.initialize("flags","trackers", settings_to_hash(flags.trackers))
- -- t_enable(flags.trackers)
- end
- if directives and flags.directives then
- setters.initialize("flags","directives", settings_to_hash(flags.directives))
- -- d_enable(flags.directives)
+ -- The engineflags are known earlier than environment.arguments but maybe we
+ -- need to handle them both as the later are parsed differently. The c: prefix
+ -- is used by mtx-context to isolate the flags from those that concern luatex.
+
+ local engineflags = environment.engineflags
+
+ if engineflags then
+ local list = engineflags["c:trackers"] or engineflags["trackers"]
+ if type(list) == "string" then
+ setters.initialize("commandline flags","trackers",settings_to_hash(list))
+ -- t_enable(list)
+ end
+ local list = engineflags["c:directives"] or engineflags["directives"]
+ if type(list) == "string" then
+ setters.initialize("commandline flags","directives", settings_to_hash(list))
+ -- d_enable(list)
+ end
end
+
end
-- here
diff --git a/Master/texmf-dist/tex/context/base/trac-tex.lua b/Master/texmf-dist/tex/context/base/trac-tex.lua
index 7920bd1cb72..7e340607313 100644
--- a/Master/texmf-dist/tex/context/base/trac-tex.lua
+++ b/Master/texmf-dist/tex/context/base/trac-tex.lua
@@ -1,4 +1,4 @@
-if not modules then modules = { } end modules ['trac-hsh'] = {
+if not modules then modules = { } end modules ['trac-tex'] = {
version = 1.001,
comment = "companion to trac-deb.mkiv",
author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
@@ -58,6 +58,7 @@ local function saveusedfilesintrees(format)
local data = {
jobname = environment.jobname or "?",
version = environment.version or "?",
+ kind = environment.kind or "?",
files = resolvers.instance.foundintrees
}
local filename = file.replacesuffix(environment.jobname or "context-job",'jlg')
diff --git a/Master/texmf-dist/tex/context/base/trac-tim.lua b/Master/texmf-dist/tex/context/base/trac-tim.lua
index de7ceca4610..15ac9bf1bcf 100644
--- a/Master/texmf-dist/tex/context/base/trac-tim.lua
+++ b/Master/texmf-dist/tex/context/base/trac-tim.lua
@@ -11,60 +11,39 @@ local concat, sort = table.concat, table.sort
local next, tonumber = next, tonumber
moduledata = moduledata or { }
-moduledata.progress = moduledata.progress or { }
-local progress = moduledata.progress
+local progress = moduledata.progress or { }
+moduledata.progress = progress
-progress.defaultfilename = ((tex and tex.jobname) or "whatever") .. "-luatex-progress"
+local report_timing = logs.reporter("timing")
-local params = {
- "cs_count",
- "dyn_used",
- "elapsed_time",
- "luabytecode_bytes",
- "luastate_bytes",
- "max_buf_stack",
- "obj_ptr",
- "pdf_mem_ptr",
- "pdf_mem_size",
- "pdf_os_cntr",
--- "pool_ptr", -- obsolete
- "str_ptr",
-}
+if not nodes then nodes = { } end -- when loaded in mtxrun
--- storage
+progress.parameters = nodes and nodes.snapshots.getparameters
+progress.defaultfilename = ((tex and tex.jobname) or "whatever") .. "-luatex-progress"
-local last = os.clock()
-local data = { }
+-- storage
-function progress.save(name)
- io.savedata((name or progress.defaultfilename) .. ".lut",table.serialize(data,true))
- data = { }
+function progress.store()
+ nodes.snapshots.takesample()
end
-function progress.store()
- local c = os.clock()
- local t = {
- elapsed_time = c - last,
- node_memory = nodes.pool.usage(),
- }
- for k, v in next, params do
- if status[v] then t[v] = status[v] end
- end
- data[#data+1] = t
- last = c
+function progress.save(name)
+ local filename = (name or progress.defaultfilename) .. ".lut"
+ report_timing("saving data in %a",filename)
+ table.save(filename,nodes.snapshots.getsamples())
+ nodes.snapshots.resetsamples()
end
-- conversion
-local processed = { }
+local processed = { }
+local parameters = progress.parameters()
local function convert(name)
- name = ((name ~= "") and name) or progress.defaultfilename
+ name = name ~= "" and name or progress.defaultfilename
if not processed[name] then
local names, top, bot, pages, paths, keys = { }, { }, { }, 0, { }, { }
- local data = io.loaddata(name .. ".lut")
- if data then data = loadstring(data) end
- if data then data = data() end
+ local data = table.load(name .. ".lut")
if data then
pages = #data
if pages > 1 then
@@ -109,12 +88,12 @@ local function convert(name)
delta = factor/delta
end
for k=1,#s do
- s[k] = "(" .. k .. "," .. (s[k]-b)*delta .. ")"
+ s[k] = format("(%s,%s)",k,(s[k]-b)*delta)
end
paths[tagname] = concat(s,"--")
end
- for _, tag in next, params do
- path(tag)
+ for i=1,#parameters do
+ path(parameters[i])
end
for tag, _ in next, keys do
path("node_memory",tag)
@@ -157,6 +136,3 @@ function progress.nodes(name)
return convert(name).names or { }
end
-function progress.parameters(name)
- return params -- shared
-end
diff --git a/Master/texmf-dist/tex/context/base/trac-vis.lua b/Master/texmf-dist/tex/context/base/trac-vis.lua
new file mode 100644
index 00000000000..88d2c886e47
--- /dev/null
+++ b/Master/texmf-dist/tex/context/base/trac-vis.lua
@@ -0,0 +1,913 @@
+if not modules then modules = { } end modules ['trac-vis'] = {
+ version = 1.001,
+ comment = "companion to trac-vis.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+local string, number, table = string, number, table
+local node, nodes, attributes, fonts, tex = node, nodes, attributes, fonts, tex
+local type = type
+local format = string.format
+local formatters = string.formatters
+
+-- This module started out in the early days of mkiv and luatex with
+-- visualizing kerns related to fonts. In the process of cleaning up the
+-- visual debugger code it made sense to integrate some other code that
+-- I had laying around and replace the old supp-vis debugging code. As
+-- only a subset of the old visual debugger makes sense it has become a
+-- different implementation. Soms of the m-visual functionality will also
+-- be ported. The code is rather trivial. The caching is not really needed
+-- but saves upto 50% of the time needed to add visualization. Of course
+-- the overall runtime is larger because of color and layer processing in
+-- the backend (can be times as much) so the runtime is somewhat larger
+-- with full visualization enabled. In practice this will never happen
+-- unless one is demoing.
+
+-- We could use pdf literals and re stream codes but it's not worth the
+-- trouble because we would end up in color etc mess. Maybe one day I'll
+-- make a nodeinjection variant.
+
+-- todo: global switch (so no attributes)
+-- todo: maybe also xoffset, yoffset of glyph
+-- todo: inline concat (more efficient)
+
+local nodecodes = nodes.nodecodes
+local disc_code = nodecodes.disc
+local kern_code = nodecodes.kern
+local glyph_code = nodecodes.glyph
+local disc_code = nodecodes.disc
+local hlist_code = nodecodes.hlist
+local vlist_code = nodecodes.vlist
+local glue_code = nodecodes.glue
+local penalty_code = nodecodes.penalty
+local whatsit_code = nodecodes.whatsit
+local user_code = nodecodes.user
+
+local kerncodes = nodes.kerncodes
+local font_kern_code = kerncodes.fontkern
+local user_kern_code = kerncodes.userkern
+
+local gluecodes = nodes.gluecodes
+local cleaders_code = gluecodes.cleaders
+local userskip_code = gluecodes.userskip
+local space_code = gluecodes.space
+local xspace_code = gluecodes.xspace
+local leftskip_code = gluecodes.leftskip
+local rightskip_code = gluecodes.rightskip
+
+local whatsitcodes = nodes.whatsitcodes
+
+local concat_nodes = nodes.concat
+local hpack_nodes = node.hpack
+local vpack_nodes = node.vpack
+local hpack_string = typesetters.hpack
+local fast_hpack_string = typesetters.fast_hpack
+local copy_node = node.copy
+local copy_list = node.copy_list
+local free_node = node.free
+local free_node_list = node.flush_list
+local insert_node_before = node.insert_before
+local insert_node_after = node.insert_after
+local fast_hpack = nodes.fasthpack
+local traverse_nodes = node.traverse
+
+local tex_attribute = tex.attribute
+local tex_box = tex.box
+local unsetvalue = attributes.unsetvalue
+
+local current_font = font.current
+
+local exheights = fonts.hashes.exheights
+local emwidths = fonts.hashes.emwidths
+local pt_factor = number.dimenfactors.pt
+
+local nodepool = nodes.pool
+local new_rule = nodepool.rule
+local new_kern = nodepool.kern
+local new_glue = nodepool.glue
+local new_penalty = nodepool.penalty
+
+local tracers = nodes.tracers
+local visualizers = nodes.visualizers
+
+local setcolor = tracers.colors.set
+local setlistcolor = tracers.colors.setlist
+local settransparency = tracers.transparencies.set
+local setlisttransparency = tracers.transparencies.setlist
+
+local starttiming = statistics.starttiming
+local stoptiming = statistics.stoptiming
+
+local a_visual = attributes.private("visual")
+local a_fontkern = attributes.private("fontkern")
+local a_layer = attributes.private("viewerlayer")
+
+local hasbit = number.hasbit
+local bit = number.bit
+local setbit = number.setbit
+local clearbit = number.clearbit
+
+local trace_hbox
+local trace_vbox
+local trace_vtop
+local trace_kern
+local trace_glue
+local trace_penalty
+local trace_fontkern
+local trace_strut
+local trace_whatsit
+local trace_user
+
+local report_visualize = logs.reporter("visualize")
+
+local modes = {
+ hbox = 1,
+ vbox = 2,
+ vtop = 4,
+ kern = 8,
+ glue = 16,
+ penalty = 32,
+ fontkern = 64,
+ strut = 128,
+ whatsit = 256,
+ glyph = 512,
+ simple = 1024,
+ simplehbox = 1024 + 1,
+ simplevbox = 1024 + 2,
+ simplevtop = 1024 + 4,
+ user = 2048,
+}
+
+local modes_makeup = { "hbox", "vbox", "kern", "glue", "penalty" }
+local modes_boxes = { "hbox", "vbox" }
+local modes_all = { "hbox", "vbox", "kern", "glue", "penalty", "fontkern", "whatsit", "glyph", "user" }
+
+local usedfont, exheight, emwidth
+local l_penalty, l_glue, l_kern, l_fontkern, l_hbox, l_vbox, l_vtop, l_strut, l_whatsit, l_glyph, l_user
+
+local enabled = false
+local layers = { }
+
+local preset_boxes = modes.hbox + modes.vbox
+local preset_makeup = preset_boxes + modes.kern + modes.glue + modes.penalty
+local preset_all = preset_makeup + modes.fontkern + modes.whatsit + modes.glyph + modes.user
+
+function visualizers.setfont(id)
+ usedfont = id or current_font()
+ exheight = exheights[usedfont]
+ emwidth = emwidths[usedfont]
+end
+
+-- we can preset a bunch of bits
+
+local function enable()
+ if not usedfont then
+ -- we use a narrow monospaced font
+ visualizers.setfont(fonts.definers.define { name = "lmmonoltcond10regular", size = tex.sp("4pt") })
+ end
+ for mode, value in next, modes do
+ local tag = formatters["v_%s"](mode)
+ attributes.viewerlayers.define {
+ tag = tag,
+ title = formatters["visualizer %s"](mode),
+ visible = "start",
+ editable = "yes",
+ printable = "yes"
+ }
+ layers[mode] = attributes.viewerlayers.register(tag,true)
+ end
+ l_hbox = layers.hbox
+ l_vbox = layers.vbox
+ l_vtop = layers.vtop
+ l_glue = layers.glue
+ l_kern = layers.kern
+ l_penalty = layers.penalty
+ l_fontkern = layers.fontkern
+ l_strut = layers.strut
+ l_whatsit = layers.whatsit
+ l_glyph = layers.glyph
+ l_user = layers.user
+ nodes.tasks.enableaction("shipouts","nodes.visualizers.handler")
+ report_visualize("enabled")
+ enabled = true
+ tex.setcount("global","c_syst_visualizers_state",1) -- so that we can optimize at the tex end
+end
+
+local function setvisual(n,a,what) -- this will become more efficient when we have the bit lib linked in
+ if not n or n == "reset" then
+ return unsetvalue
+ elseif n == "makeup" then
+ if not a or a == 0 or a == unsetvalue then
+ a = preset_makeup
+ else
+ a = setbit(a,preset_makeup)
+ -- for i=1,#modes_makeup do
+ -- a = setvisual(modes_makeup[i],a)
+ -- end
+ end
+ elseif n == "boxes" then
+ if not a or a == 0 or a == unsetvalue then
+ a = preset_boxes
+ else
+ a = setbit(a,preset_boxes)
+ -- for i=1,#modes_boxes do
+ -- a = setvisual(modes_boxes[i],a)
+ -- end
+ end
+ elseif n == "all" then
+ if what == false then
+ return unsetvalue
+ elseif not a or a == 0 or a == unsetvalue then
+ a = preset_all
+ else
+ a = setbit(a,preset_all)
+ -- for i=1,#modes_all do
+ -- a = setvisual(modes_all[i],a)
+ -- end
+ end
+ else
+ local m = modes[n]
+ if not m then
+ -- go on
+ elseif a == unsetvalue then
+ if what == false then
+ return unsetvalue
+ else
+ -- a = setbit(0,m)
+ a = m
+ end
+ elseif what == false then
+ a = clearbit(a,m)
+ elseif not a or a == 0 then
+ a = m
+ else
+ a = setbit(a,m)
+ end
+ end
+ if not a or a == 0 or a == unsetvalue then
+ return unsetvalue
+ elseif not enabled then -- must happen at runtime (as we don't store layers yet)
+ enable()
+ end
+ return a
+end
+
+function visualizers.setvisual(n)
+ tex_attribute[a_visual] = setvisual(n,tex_attribute[a_visual])
+end
+
+function visualizers.setlayer(n)
+ tex_attribute[a_layer] = layers[n] or unsetvalue
+end
+
+commands.setvisual = visualizers.setvisual
+commands.setlayer = visualizers.setlayer
+
+function commands.visual(n)
+ context(setvisual(n))
+end
+
+local function set(mode,v)
+ tex_attribute[a_visual] = setvisual(mode,tex_attribute[a_visual],v)
+end
+
+for mode, value in next, modes do
+ trackers.register(formatters["visualizers.%s"](mode), function(v) set(mode,v) end)
+end
+
+trackers.register("visualizers.reset", function(v) set("reset", v) end)
+trackers.register("visualizers.all", function(v) set("all", v) end)
+trackers.register("visualizers.makeup",function(v) set("makeup",v) end)
+trackers.register("visualizers.boxes", function(v) set("boxes", v) end)
+
+local c_positive = "trace:b"
+local c_negative = "trace:r"
+local c_zero = "trace:g"
+local c_text = "trace:s"
+local c_space = "trace:y"
+local c_skip_a = "trace:c"
+local c_skip_b = "trace:m"
+local c_glyph = "trace:o"
+
+local c_positive_d = "trace:db"
+local c_negative_d = "trace:dr"
+local c_zero_d = "trace:dg"
+local c_text_d = "trace:ds"
+local c_space_d = "trace:dy"
+local c_skip_a_d = "trace:dc"
+local c_skip_b_d = "trace:dm"
+local c_glyph_d = "trace:do"
+
+local function sometext(str,layer,color)
+ local text = fast_hpack_string(str,usedfont)
+ local size = text.width
+ local rule = new_rule(size,2*exheight,exheight/2)
+ local kern = new_kern(-size)
+ setcolor(rule,color)
+ local info = concat_nodes {
+ rule,
+ kern,
+ text,
+ }
+ setlisttransparency(info,c_zero)
+ info = fast_hpack(info)
+ if layer then
+ info[a_layer] = layer
+ end
+ local width = info.width
+ info.width = 0
+ info.height = 0
+ info.depth = 0
+ return info, width
+end
+
+local f_cache = { }
+
+local function fontkern(head,current)
+ local kern = current.kern
+ local info = f_cache[kern]
+ if info then
+ -- print("hit fontkern")
+ else
+ local text = fast_hpack_string(formatters[" %0.3f"](kern*pt_factor),usedfont)
+ local rule = new_rule(emwidth/10,6*exheight,2*exheight)
+ local list = text.list
+ if kern > 0 then
+ setlistcolor(list,c_positive_d)
+ elseif kern < 0 then
+ setlistcolor(list,c_negative_d)
+ else
+ setlistcolor(list,c_zero_d)
+ end
+ setlisttransparency(list,c_text_d)
+ settransparency(rule,c_text_d)
+ text.shift = -5 * exheight
+ info = concat_nodes {
+ rule,
+ text,
+ }
+ info = fast_hpack(info)
+ info[a_layer] = l_fontkern
+ info.width = 0
+ info.height = 0
+ info.depth = 0
+ f_cache[kern] = info
+ end
+ head = insert_node_before(head,current,copy_list(info))
+ return head, current
+end
+
+local w_cache = { }
+
+local tags = {
+ open = "FIC",
+ write = "FIW",
+ close = "FIC",
+ special = "SPE",
+ localpar = "PAR",
+ dir = "DIR",
+ pdfliteral = "PDF",
+ pdfrefobj = "PDF",
+ pdfrefxform = "PDF",
+ pdfrefximage = "PDF",
+ pdfannot = "PDF",
+ pdfstartlink = "PDF",
+ pdfendlink = "PDF",
+ pdfdest = "PDF",
+ pdfthread = "PDF",
+ pdfstartthread = "PDF",
+ pdfendthread = "PDF",
+ pdfsavepos = "PDF",
+ pdfthreaddata = "PDF",
+ pdflinkdata = "PDF",
+ pdfcolorstack = "PDF",
+ pdfsetmatrix = "PDF",
+ pdfsave = "PDF",
+ pdfrestore = "PDF",
+ latelua = "LUA",
+ closelua = "LUA",
+ cancelboundary = "CBD",
+ userdefined = "USR",
+}
+
+local function whatsit(head,current)
+ local what = current.subtype
+ local info = w_cache[what]
+ if info then
+ -- print("hit whatsit")
+ else
+ local tag = whatsitcodes[what]
+ info = sometext(formatters["W:%s"](tag and tags[tag] or what),usedfont)
+ info[a_layer] = l_whatsit
+ w_cache[what] = info
+ end
+ head, current = insert_node_after(head,current,copy_list(info))
+ return head, current
+end
+
+local function user(head,current)
+ local what = current.subtype
+ local info = w_cache[what]
+ if info then
+ -- print("hit user")
+ else
+ info = sometext(formatters["U:%s"](what),usedfont)
+ info[a_layer] = l_user
+ w_cache[what] = info
+ end
+ head, current = insert_node_after(head,current,copy_list(info))
+ return head, current
+end
+
+local b_cache = { }
+
+local function ruledbox(head,current,vertical,layer,what,simple)
+ local wd = current.width
+ if wd ~= 0 then
+ local ht, dp = current.height, current.depth
+ local next, prev = current.next, current.prev
+ current.next, current.prev = nil, nil
+ local linewidth = emwidth/10
+ local baseline, baseskip
+ if dp ~= 0 and ht ~= 0 then
+ if wd > 20*linewidth then
+ baseline = b_cache.baseline
+ if not baseline then
+ -- due to an optimized leader color/transparency we need to set the glue node in order
+ -- to trigger this mechanism
+ local leader = concat_nodes {
+ new_glue(2*linewidth), -- 2.5
+ new_rule(6*linewidth,linewidth,0), -- 5.0
+ new_glue(2*linewidth), -- 2.5
+ }
+ -- setlisttransparency(leader,c_text)
+ leader = fast_hpack(leader)
+ -- setlisttransparency(leader,c_text)
+ baseline = new_glue(0)
+ baseline.leader = leader
+ baseline.subtype = cleaders_code
+ baseline.spec.stretch = 65536
+ baseline.spec.stretch_order = 2
+ setlisttransparency(baseline,c_text)
+ b_cache.baseline = baseline
+ end
+ baseline = copy_list(baseline)
+ baseline = fast_hpack(baseline,wd-2*linewidth)
+ -- or new hpack node, set head and also:
+ -- baseline.width = wd
+ -- baseline.glue_set = wd/65536
+ -- baseline.glue_order = 2
+ -- baseline.glue_sign = 1
+ baseskip = new_kern(-wd+linewidth)
+ else
+ baseline = new_rule(wd-2*linewidth,linewidth,0)
+ baseskip = new_kern(-wd+2*linewidth)
+ end
+ end
+ local this
+ if not simple then
+ this = b_cache[what]
+ if not this then
+ local text = fast_hpack_string(what,usedfont)
+ this = concat_nodes {
+ new_kern(-text.width),
+ text,
+ }
+ setlisttransparency(this,c_text)
+ this = fast_hpack(this)
+ this.width = 0
+ this.height = 0
+ this.depth = 0
+ b_cache[what] = this
+ end
+ end
+ local info = concat_nodes {
+ this and copy_list(this) or nil, -- this also triggers the right mode (else sometimes no whatits)
+ new_rule(linewidth,ht,dp),
+ new_rule(wd-2*linewidth,-dp+linewidth,dp),
+ new_rule(linewidth,ht,dp),
+ new_kern(-wd+linewidth),
+ new_rule(wd-2*linewidth,ht,-ht+linewidth),
+ baseskip,
+ baseline,
+ }
+ setlisttransparency(info,c_text)
+ info = fast_hpack(info)
+ info.width = 0
+ info.height = 0
+ info.depth = 0
+ info[a_layer] = layer
+ local info = concat_nodes {
+ current,
+ new_kern(-wd),
+ info,
+ }
+ info = fast_hpack(info,wd)
+ if vertical then
+ info = vpack_nodes(info)
+ end
+ if next then
+ info.next = next
+ next.prev = info
+ end
+ if prev then
+ info.prev = prev
+ prev.next = info
+ end
+ if head == current then
+ return info, info
+ else
+ return head, info
+ end
+ else
+ return head, current
+ end
+end
+
+local function ruledglyph(head,current)
+ local wd = current.width
+ if wd ~= 0 then
+ local ht, dp = current.height, current.depth
+ local next, prev = current.next, current.prev
+ current.next, current.prev = nil, nil
+ local linewidth = emwidth/20
+ local baseline
+ if dp ~= 0 and ht ~= 0 then
+ baseline = new_rule(wd-2*linewidth,linewidth,0)
+ end
+ local doublelinewidth = 2*linewidth
+ local info = concat_nodes {
+ new_rule(linewidth,ht,dp),
+ new_rule(wd-doublelinewidth,-dp+linewidth,dp),
+ new_rule(linewidth,ht,dp),
+ new_kern(-wd+linewidth),
+ new_rule(wd-doublelinewidth,ht,-ht+linewidth),
+ new_kern(-wd+doublelinewidth),
+ baseline,
+ }
+ setlistcolor(info,c_glyph)
+ setlisttransparency(info,c_glyph_d)
+ info = fast_hpack(info)
+ info.width = 0
+ info.height = 0
+ info.depth = 0
+ info[a_layer] = l_glyph
+ local info = concat_nodes {
+ current,
+ new_kern(-wd),
+ info,
+ }
+ info = fast_hpack(info)
+ info.width = wd
+ if next then
+ info.next = next
+ next.prev = info
+ end
+ if prev then
+ info.prev = prev
+ prev.next = info
+ end
+ if head == current then
+ return info, info
+ else
+ return head, info
+ end
+ else
+ return head, current
+ end
+end
+
+local g_cache = { }
+
+local tags = {
+ -- userskip = "US",
+ lineskip = "LS",
+ baselineskip = "BS",
+ parskip = "PS",
+ abovedisplayskip = "DA",
+ belowdisplayskip = "DB",
+ abovedisplayshortskip = "SA",
+ belowdisplayshortskip = "SB",
+ leftskip = "LS",
+ rightskip = "RS",
+ topskip = "TS",
+ splittopskip = "ST",
+ tabskip = "AS",
+ spaceskip = "SS",
+ xspaceskip = "XS",
+ parfillskip = "PF",
+ thinmuskip = "MS",
+ medmuskip = "MM",
+ thickmuskip = "ML",
+ leaders = "NL",
+ cleaders = "CL",
+ xleaders = "XL",
+ gleaders = "GL",
+ -- true = "VS",
+ -- false = "HS",
+}
+
+local function ruledglue(head,current,vertical)
+ local spec = current.spec
+ local width = spec.width
+ local subtype = current.subtype
+ local amount = formatters["%s:%0.3f"](tags[subtype] or (vertical and "VS") or "HS",width*pt_factor)
+ local info = g_cache[amount]
+ if info then
+ -- print("glue hit")
+ else
+ if subtype == space_code or subtype == xspace_code then -- not yet all space
+ info = sometext(amount,l_glue,c_space)
+ elseif subtype == leftskip_code or subtype == rightskip_code then
+ info = sometext(amount,l_glue,c_skip_a)
+ elseif subtype == userskip_code then
+ if width > 0 then
+ info = sometext(amount,l_glue,c_positive)
+ elseif width < 0 then
+ info = sometext(amount,l_glue,c_negative)
+ else
+ info = sometext(amount,l_glue,c_zero)
+ end
+ else
+ info = sometext(amount,l_glue,c_skip_b)
+ end
+ g_cache[amount] = info
+ end
+ info = copy_list(info)
+ if vertical then
+ info = vpack_nodes(info)
+ end
+ head, current = insert_node_before(head,current,info)
+ return head, current.next
+end
+
+local k_cache = { }
+
+local function ruledkern(head,current,vertical)
+ local kern = current.kern
+ local info = k_cache[kern]
+ if info then
+ -- print("kern hit")
+ else
+ local amount = formatters["%s:%0.3f"](vertical and "VK" or "HK",kern*pt_factor)
+ if kern > 0 then
+ info = sometext(amount,l_kern,c_positive)
+ elseif kern < 0 then
+ info = sometext(amount,l_kern,c_negative)
+ else
+ info = sometext(amount,l_kern,c_zero)
+ end
+ k_cache[kern] = info
+ end
+ info = copy_list(info)
+ if vertical then
+ info = vpack_nodes(info)
+ end
+ head, current = insert_node_before(head,current,info)
+ return head, current.next
+end
+
+local p_cache = { }
+
+local function ruledpenalty(head,current,vertical)
+ local penalty = current.penalty
+ local info = p_cache[penalty]
+ if info then
+ -- print("penalty hit")
+ else
+ local amount = formatters["%s:%s"](vertical and "VP" or "HP",penalty)
+ if penalty > 0 then
+ info = sometext(amount,l_penalty,c_positive)
+ elseif penalty < 0 then
+ info = sometext(amount,l_penalty,c_negative)
+ else
+ info = sometext(amount,l_penalty,c_zero)
+ end
+ p_cache[penalty] = info
+ end
+ info = copy_list(info)
+ if vertical then
+ info = vpack_nodes(info)
+ end
+ head, current = insert_node_before(head,current,info)
+ return head, current.next
+end
+
+local function visualize(head,vertical)
+ local trace_hbox = false
+ local trace_vbox = false
+ local trace_vtop = false
+ local trace_kern = false
+ local trace_glue = false
+ local trace_penalty = false
+ local trace_fontkern = false
+ local trace_strut = false
+ local trace_whatsit = false
+ local trace_glyph = false
+ local trace_simple = false
+ local trace_user = false
+ local current = head
+ local prev_trace_fontkern = nil
+ local attr = unsetvalue
+ while current do
+ local id = current.id
+ local a = current[a_visual] or unsetvalue
+ if a ~= attr then
+ prev_trace_fontkern = trace_fontkern
+ if a == unsetvalue then
+ trace_hbox = false
+ trace_vbox = false
+ trace_vtop = false
+ trace_kern = false
+ trace_glue = false
+ trace_penalty = false
+ trace_fontkern = false
+ trace_strut = false
+ trace_whatsit = false
+ trace_glyph = false
+ trace_simple = false
+ trace_user = false
+ else -- dead slow:
+ trace_hbox = hasbit(a, 1)
+ trace_vbox = hasbit(a, 2)
+ trace_vtop = hasbit(a, 4)
+ trace_kern = hasbit(a, 8)
+ trace_glue = hasbit(a, 16)
+ trace_penalty = hasbit(a, 32)
+ trace_fontkern = hasbit(a, 64)
+ trace_strut = hasbit(a, 128)
+ trace_whatsit = hasbit(a, 256)
+ trace_glyph = hasbit(a, 512)
+ trace_simple = hasbit(a,1024)
+ trace_user = hasbit(a,2048)
+ end
+ attr = a
+ end
+ if trace_strut then
+ current[a_layer] = l_strut
+ elseif id == glyph_code then
+ if trace_glyph then
+ head, current = ruledglyph(head,current)
+ end
+ elseif id == disc_code then
+ if trace_glyph then
+ local pre = current.pre
+ if pre then
+ current.pre = ruledglyph(pre,pre)
+ end
+ local post = current.post
+ if post then
+ current.post = ruledglyph(post,post)
+ end
+ local replace = current.replace
+ if replace then
+ current.replace = ruledglyph(replace,replace)
+ end
+ end
+ elseif id == kern_code then
+ local subtype = current.subtype
+ -- tricky ... we don't copy the trace attribute in node-inj (yet)
+ if subtype == font_kern_code or current[a_fontkern] then
+ if trace_fontkern or prev_trace_fontkern then
+ head, current = fontkern(head,current)
+ end
+ elseif subtype == user_kern_code then
+ if trace_kern then
+ head, current = ruledkern(head,current,vertical)
+ end
+ end
+ elseif id == glue_code then
+ local content = current.leader
+ if content then
+ current.leader = visualize(content,false)
+ elseif trace_glue then
+ head, current = ruledglue(head,current,vertical)
+ end
+ elseif id == penalty_code then
+ if trace_penalty then
+ head, current = ruledpenalty(head,current,vertical)
+ end
+ elseif id == disc_code then
+ current.pre = visualize(current.pre)
+ current.post = visualize(current.post)
+ current.replace = visualize(current.replace)
+ elseif id == hlist_code then
+ local content = current.list
+ if content then
+ current.list = visualize(content,false)
+ end
+ if trace_hbox then
+ head, current = ruledbox(head,current,false,l_hbox,"H__",trace_simple)
+ end
+ elseif id == vlist_code then
+ local content = current.list
+ if content then
+ current.list = visualize(content,true)
+ end
+ if trace_vtop then
+ head, current = ruledbox(head,current,true,l_vtop,"_T_",trace_simple)
+ elseif trace_vbox then
+ head, current = ruledbox(head,current,true,l_vbox,"__V",trace_simple)
+ end
+ elseif id == whatsit_code then
+ if trace_whatsit then
+ head, current = whatsit(head,current)
+ end
+ elseif id == user_code then
+ if trace_whatsit then
+ head, current = user(head,current)
+ end
+ end
+ current = current.next
+ end
+ return head
+end
+
+local function freed(cache)
+ local n = 0
+ for k, v in next, cache do
+ free_node_list(v)
+ n = n + 1
+ end
+ if n == 0 then
+ return 0, cache
+ else
+ return n, { }
+ end
+end
+
+local function cleanup()
+ local hf, ng, np, nk, nw
+ nf, f_cache = freed(f_cache)
+ ng, g_cache = freed(g_cache)
+ np, p_cache = freed(p_cache)
+ nk, k_cache = freed(k_cache)
+ nw, w_cache = freed(w_cache)
+ nb, b_cache = freed(b_cache)
+ -- report_visualize("cache: %s fontkerns, %s skips, %s penalties, %s kerns, %s whatsits, %s boxes",nf,ng,np,nk,nw,nb)
+end
+
+function visualizers.handler(head)
+ if usedfont then
+ starttiming(visualizers)
+ -- local l = tex_attribute[a_layer]
+ -- local v = tex_attribute[a_visual]
+ -- tex_attribute[a_layer] = unsetvalue
+ -- tex_attribute[a_visual] = unsetvalue
+ head = visualize(head)
+ -- tex_attribute[a_layer] = l
+ -- tex_attribute[a_visual] = v
+ -- -- cleanup()
+ stoptiming(visualizers)
+ end
+ return head, false
+end
+
+function visualizers.box(n)
+ tex_box[n].list = visualizers.handler(tex_box[n].list)
+end
+
+local last = nil
+local used = nil
+
+local mark = {
+ "trace:1", "trace:2", "trace:3",
+ "trace:4", "trace:5", "trace:6",
+ "trace:7",
+}
+
+local function markfonts(list)
+ for n in traverse_nodes(list) do
+ local id = n.id
+ if id == glyph_code then
+ local font = n.font
+ local okay = used[font]
+ if not okay then
+ last = last + 1
+ okay = mark[last]
+ used[font] = okay
+ end
+ setcolor(n,okay)
+ elseif id == hlist_code or id == vlist_code then
+ markfonts(n.list)
+ end
+ end
+end
+
+function visualizers.markfonts(list)
+ last, used = 0, { }
+ markfonts(type(n) == "number" and tex_box[n].list or n)
+end
+
+function commands.markfonts(n)
+ visualizers.markfonts(n)
+end
+
+statistics.register("visualization time",function()
+ if enabled then
+ cleanup() -- in case we don't don't do it each time
+ return format("%s seconds",statistics.elapsedtime(visualizers))
+ end
+end)
diff --git a/Master/texmf-dist/tex/context/base/trac-vis.mkiv b/Master/texmf-dist/tex/context/base/trac-vis.mkiv
index e906bb50dbb..fbc6ad6c452 100644
--- a/Master/texmf-dist/tex/context/base/trac-vis.mkiv
+++ b/Master/texmf-dist/tex/context/base/trac-vis.mkiv
@@ -1,7 +1,7 @@
%D \module
-%D [ file=trac-vis, % was core-vis,
-%D version=1996.06.01,
-%D title=\CONTEXT\ Tracking Macros,
+%D [ file=trac-vis, % replaces supp-vis plus some s-* modules
+%D version=2112.06.23, % 1996.10.21,
+%D title=\CONTEXT\ Tracing Macros,
%D subtitle=Visualization,
%D author=Hans Hagen,
%D date=\currentdate,
@@ -11,711 +11,191 @@
%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
%C details.
-%D This module adds some more visualization cues to the ones
-%D supplied in the support module.
+%D The old visual debugger dates from 1996 and started out as joke. In practice
+%D it's not used that often although the \type {\ruledhbox} cum suis macros come
+%D in handy when writing code. In the process of luafication some additional
+%D tracing options were tested, for instance showing font kerns. As part of the
+%D \MKIV\ cleanup the decision was made to reimplement, reintegrate and reconsider
+%D all these features. The old code is gone and the new code will get extended
+%D when needed. We still provide placeholders for some old visualization commands
+%D but they might go away.
%D
-%D %\everypar dual character, \the\everypar and \everypar=
-%D %\hrule cannot be grabbed in advance, switches mode
-%D %\vrule cannot be grabbed in advance, switches mode
-%D %
-%D %\indent only explicit ones
-%D %\noindent only explicit ones
-%D %\par only explicit ones
+%D Control over what gets visualized happens with the tracker command:
%D
-%D %\leftskip only if explicit one
-%D %\rightskip only if explicit one
+%D \enabletrackers[visualizer.*]
+%D
+%D Possible values are: \type {fontkern}, \type {kern}, \type {glue}, \type
+%D {penalty}, \type {hbox}, \type {vbox}, \type {all}, \type {reset}, \type
+%D {makeup}, \type {whatsit}, \type{glyph}.
+%D
+%D In due time some special visualzation of math will move here as well.
-\writestatus{loading}{ConTeXt Tracking Macros / Visualization}
+\writestatus{loading}{ConTeXt Tracing Macros / Visualization}
+
+\registerctxluafile{trac-vis}{1.001}
\unprotect
-%D \macros
-%D {indent, noindent, par}
-%D
-%D \TeX\ acts upon paragraphs. In mosts documents paragraphs
-%D are separated by empty lines, which internally are handled as
-%D \type{\par}. Paragraphs can be indented or not, depending on
-%D the setting of \type{\parindent}, the first token of a
-%D paragraph and/or user suppressed or forced indentation.
-%D
-%D Because the actual typesetting is based on both explicit
-%D user and implicit system actions, visualization is only
-%D possible for the user supplied \type{\indent},
-%D \type{\noindent}, and \type{\par}. Other
-%D 'clever' tricks will quite certainly lead to more failures
-%D than successes, so we only support these three explicit
-%D primitives and one macro:
-
-\def\showparagraphcue#1#2#3#4#5%
- {\bgroup
- \scratchdimen#1\relax
- \dontinterfere
- \dontcomplain
- \boxrulewidth5\testrulewidth
- #3#4\relax
- \setbox\scratchbox\normalhbox to \scratchdimen
- {#2{\ruledhbox to \scratchdimen
- {\vrule #5 20\testrulewidth \!!width \zeropoint
- \normalhss}}}%
- \smashbox\scratchbox
- \normalpenalty\plustenthousand
- \box\scratchbox
- \egroup}
-
-\def\ruledhanging
- {\ifdim\hangindent>\zeropoint
- \ifnum\hangafter<\zerocount
- \normalhbox
- {\boxrulewidth5\testrulewidth
- \setbox\scratchbox\ruledhbox to \hangindent
- {\scratchdimen\strutht
- \advance\scratchdimen \strutdp
- \vrule
- \!!width \zeropoint
- \!!height \zeropoint
- \!!depth -\hangafter\scratchdimen}%
- \normalhskip-\hangindent
- \smashbox\scratchbox
- \raise\strutht\box\scratchbox}%
- \fi
- \fi}
-
-\def\ruledparagraphcues
- {\bgroup
- \dontcomplain
- \normalhbox to \zeropoint
- {\ifdim\leftskip>\zeropoint\relax
- \showparagraphcue\leftskip\llap\relax\relax\!!depth
- \normalhskip-\leftskip
- \fi
- \ruledhanging
- \normalhskip\hsize
- \ifdim\rightskip>\zeropoint\relax
- \normalhskip-\rightskip
- \showparagraphcue\rightskip\relax\relax\relax\!!depth
- \fi}%
- \egroup}
-
-\def\ruledpar
- {\relax
- \ifhmode
- \showparagraphcue{40\testrulewidth}\relax\rightrulefalse\relax\!!height
- \fi
- \normalpar}
-
-\def\rulednoindent
- {\relax
- \normalnoindent
- \ruledparagraphcues
- \showparagraphcue{40\testrulewidth}\llap\leftrulefalse\relax\!!height}
-
-\def\ruledindent
- {\relax
- \normalnoindent
- \ruledparagraphcues
- \ifdim\parindent>\zeropoint
- \showparagraphcue\parindent\relax\relax\relax\!!height
- \else
- \showparagraphcue{40\testrulewidth}\llap\relax\relax\!!height
- \fi
- \normalhskip\parindent}
-
-\def\dontshowimplicits
- {\let\noindent \normalnoindent
- \let\indent \normalindent
- \let\par \normalpar}
-
-\def\showimplicits
- {\testrulewidth \defaulttestrulewidth
- \let\noindent \rulednoindent
- \let\indent \ruledindent
- \let\par \ruledpar}
-
-%D The next few||line examples show the four cues. Keep in
-%D mind that we only see them when we explicitly open or close
-%D a paragraph.
-%D
-%D \bgroup
-%D \def\voorbeeld#1%
-%D {#1Visualizing some \TeX\ primitives and Plain \TeX\
-%D macros can be very instructive, at least it is to me.
-%D Here we see {\tt\string#1} and {\tt\string\ruledpar} in
-%D action, while {\tt\string\parindent} equals
-%D {\tt\the\parindent}.\ruledpar}
-%D
-%D \showimplicits
-%D
-%D \voorbeeld \indent
-%D \voorbeeld \noindent
-%D
-%D \parindent=60pt
-%D
-%D \voorbeeld \indent
-%D \voorbeeld \noindent
-%D
-%D \startnarrower
-%D \voorbeeld \indent
-%D \voorbeeld \noindent
-%D \stopnarrower
-%D \egroup
-%D
-%D These examples also demonstrate the visualization of
-%D \type {\leftskip} and \type {\rightskip}. The macro
-%D \type {\nofruledbaselines} determines the number of lines
-%D shown.
-
-\newcounter\ruledbaselines
-
-\def\nofruledbaselines{3}
-
-\def\debuggertext#1%
- {\ifx\ttxx\undefined
- $\scriptscriptstyle#1$%
- \else
- {\ttxx#1}%
- \fi}
-
-\def\ruledbaseline
- {\vrule \!!width \zeropoint
- \bgroup
- \dontinterfere
- \doglobal\increment\ruledbaselines
- \scratchdimen\nofruledbaselines\baselineskip
- \setbox\scratchbox\normalvbox to 2\scratchdimen
- {\leaders
- \normalhbox
- {\strut
- \vrule
- \!!height \testrulewidth
- \!!depth \testrulewidth
- \!!width 120\points}
- \normalvfill}%
- \smashbox\scratchbox
- \advance\scratchdimen \strutheightfactor\baselineskip
- \setbox\scratchbox\normalhbox
- {\normalhskip -48\points
- \normalhbox to 24\points
- {\normalhss\debuggertext\ruledbaselines\normalhskip6\points}%
- \raise\scratchdimen\box\scratchbox}%
- \smashbox\scratchbox
- \box\scratchbox
- \egroup}
-
-\def\showbaselines
- {\testrulewidth\defaulttestrulewidth
- \EveryPar{\ruledbaseline}}
-
-%D \macros
-%D {showpagebuilder}
-%D
-%D The next tracing option probaly is only of use to me and a
-%D few \CONTEXT\ hackers.
-
-\def\showpagebuilder
- {\EveryPar{\doshowpagebuilder}}
-
-\def\doshowpagebuilder
- {\strut\llap
- {\startcolor[blue]\vl
- \high{\infofont v:\the\vsize }\vl
- \high{\infofont g:\the\pagegoal }\vl
- \high{\infofont t:\the\pagetotal}\vl
- \stopcolor}}
-
-%D \macros
-%D {makecutbox, cuthbox, cutvbox, cutvtop}
-%D
-%D Although mainly used for marking the page, these macros can
-%D also serve local use.
-%D
-%D \startbuffer
-%D \setbox0=\vbox{a real \crlf vertical box} \makecutbox0
-%D \stopbuffer
-%D
-%D \typebuffer
-%D
-%D This marked \type{\vbox} shows up as:
-%D
-%D \startlinecorrection
-%D \getbuffer
-%D \stoplinecorrection
-%D
-%D The alternative macros are used as:
-%D
-%D \startbuffer
-%D \cuthbox{a made cut box}
-%D \stopbuffer
-%D
-%D \typebuffer
-%D
-%D This is typeset as:
-%D
-%D \startlinecorrection
-%D \getbuffer
-%D \stoplinecorrection
-%D
-%D By setting the next macros one can influence the length of
-%D the marks as well as the horizontal and vertical divisions.
-
-\newdimen\tractempwidth
-\newdimen\tractempheight
-\newdimen\tractempdepth
-
-\def \cutmarklength {2\bodyfontsize}
-
-\newcount\horizontalcutmarks \horizontalcutmarks = 2
-\newcount\verticalcutmarks \verticalcutmarks = 2
-\newcount\cutmarkoffset \cutmarkoffset = 1
-
-\let \cutmarksymbol \relax
-\let \cutmarktoptext \empty
-\let \cutmarkbottomtext \empty
-\let \cutmarkhoffset \empty
-\let \cutmarkvoffset \empty
-
-\def\horizontalcuts
- {\normalhbox to \tractempwidth
- {\dorecurse\horizontalcutmarks{\vrule\!!width\boxrulewidth\!!height\cutmarklength\normalhfill}%
- \unskip}}
-
-\def\verticalcuts
- {\normalvbox to \dimexpr\tractempheight+\tractempdepth\relax
- {\hsize\cutmarklength
- \dorecurse\verticalcutmarks{\vrule\!!height\boxrulewidth\!!width\hsize\normalvfill}%
- \unskip}}
-
-\def\baselinecuts
- {\ifdim\tractempdepth>\zeropoint
- \normalvbox to \dimexpr\tractempheight+\tractempdepth\relax
- {\hsize\dimexpr\cutmarklength/2\relax
- \normalvskip\zeropoint\!!plus\tractempheight
- \vrule\!!height\boxrulewidth\!!width\hsize
- \normalvskip\zeropoint\!!plus\tractempdepth}%
- \fi}
-
-\def\cutmarksymbols#1%
- {\normalhbox to \tractempwidth
- {\setbox\scratchbox\normalhbox to \cutmarklength
- {\normalhss\infofont\cutmarksymbol\normalhss}%
- \normalhss
- \normalvbox to \cutmarklength
- {\scratchdimen\dimexpr\cutmarklength/2\relax
- \scratchskip \ifx\cutmarkhoffset\empty\cutmarkoffset\scratchdimen\else\cutmarkhoffset\fi
- \normalvss
- \hbox to \tractempwidth
- {\llap{\copy\scratchbox\normalhskip\scratchskip}%
- \normalhskip\scratchdimen\hss\infofont#1\hss\normalhskip\scratchdimen
- \rlap{\normalhskip\scratchskip\copy\scratchbox}}%
- \normalvss}%
- \normalhss}}
-
-\def\makecutbox#1% simplier with layers, todo
- {\tractempheight\ht#1%
- \tractempdepth \dp#1%
- \tractempwidth \wd#1%
- \setbox#1\normalhbox
- {\dontcomplain
- \forgetall
- \boxmaxdepth\maxdimen
- \offinterlineskip
- \scratchdimen\dimexpr\cutmarklength/2\relax
- \hsize\tractempwidth
- \setbox\scratchbox\normalvbox
- {\setbox\scratchbox\normalhbox{\horizontalcuts}%
- \scratchskip\ifx\cutmarkvoffset\empty\cutmarkoffset\scratchdimen\else\cutmarkvoffset\fi
-% \normalvskip\dimexpr-\scratchskip-2\scratchdimen\relax
-% \copy\scratchbox
-% \normalvskip\scratchskip
- \tlap{\copy\scratchbox\normalvskip\scratchskip}%
- \hbox to \tractempwidth
- {\scratchskip\ifx\cutmarkhoffset\empty\cutmarkoffset\scratchdimen\else\cutmarkhoffset\fi
- \setbox\scratchbox\normalhbox{\verticalcuts}%
- \llap{\copy\scratchbox\normalhskip\scratchskip}%
- \ifdim\tractempdepth=\zeropoint
- \normalhfill
- \else
- \bgroup
- \setbox\scratchbox\normalhbox{\baselinecuts}%
- \llap{\copy\scratchbox\normalhskip\scratchskip}%
- \normalhfill
- \rlap{\normalhskip\scratchskip\copy\scratchbox}%
- \egroup
- \fi
- \rlap{\normalhskip\scratchskip\copy\scratchbox}}%
-% \normalvskip\scratchskip
-% \copy\scratchbox}%
- \blap{\normalvskip\scratchskip\copy\scratchbox}}%
- \ht\scratchbox\tractempheight
- \dp\scratchbox\tractempdepth
- \wd\scratchbox\zeropoint
- \startcolor[\defaulttextcolor]%
- \box\scratchbox
- \ifx\cutmarksymbol\relax \else
- \setbox\scratchbox\normalvbox
- {\scratchskip\ifx\cutmarkvoffset\empty\cutmarkoffset\scratchdimen\else\cutmarkvoffset\fi
- \vskip-\scratchskip
- \vskip-\cutmarklength
- \normalhbox{\cutmarksymbols\cutmarktoptext}%
- \vskip\scratchskip
- \vskip\tractempheight
- \vskip\tractempdepth
- \vskip\scratchskip
- \normalhbox{\cutmarksymbols\cutmarkbottomtext}}%
- \ht\scratchbox\tractempheight
- \dp\scratchbox\tractempdepth
- \wd\scratchbox\zeropoint
- \box\scratchbox
- \fi
- \stopcolor
- \box#1}%
- \wd#1\tractempwidth
- \ht#1\tractempheight
- \dp#1\tractempdepth}
-
-\def\cuthbox{\normalhbox\bgroup\dowithnextbox{\makecutbox\nextbox\flushnextbox\egroup}\normalhbox}
-\def\cutvbox{\normalvbox\bgroup\dowithnextbox{\makecutbox\nextbox\flushnextbox\egroup}\normalvbox}
-\def\cutvtop{\normalvtop\bgroup\dowithnextbox{\makecutbox\nextbox\flushnextbox\egroup}\normalvtop}
-
-%D \macros
-%D {colormarkbox,rastermarkbox}
-%D
-%D This macro is used in the pagebody routine. No other use
-%D is advocated here.
-%D
-%D \starttyping
-%D \colormarkbox0
-%D \stoptyping
-
-\def\colormarkoffset{\cutmarkoffset}
-\def\colormarklength{\cutmarklength}
-
-\def\dodocolorrangeA#1%
- {\fastcolored[#1]{\hrule\!!width3em\!!height\scratchdimen\!!depth\zeropoint}}
-
-\def\docolorrangeA#1 #2 %
- {\vbox
- {\hsize3em % \scratchdimen
- \ifcase#1\or
- \dodocolorrangeA{c=#2}\or
- \dodocolorrangeA{m=#2}\or
- \dodocolorrangeA{y=#2}\or
- \dodocolorrangeA{m=#2,y=#2}\or
- \dodocolorrangeA{c=#2,y=#2}\or
- \dodocolorrangeA{c=#2,m=#2}\fi
- \ifdim\scratchdimen>1ex
- \vskip-\scratchdimen
- \vbox to \scratchdimen
- {\vss\hbox to 3em{\hss#2\hss}\vss}%
- \fi}}
-
-\def\colorrangeA#1%
- {\vbox
- {\startcolor[\s!white]%
- \scratchdimen\dimexpr(-\colormarklength*4+\tractempheight+\tractempdepth)/21\relax
- \offinterlineskip
- \docolorrangeA #1 1.00 \docolorrangeA #1 0.95
- \docolorrangeA #1 0.75
- \docolorrangeA #1 0.50
- \docolorrangeA #1 0.25 \docolorrangeA #1 0.05
- \docolorrangeA #1 0.00
- \stopcolor}}
-
-\def\docolorrangeB #1 #2 #3 #4 #5 %
- {\fastcolored
- [\c!c=#2,\c!m=#3,\c!y=#4,\c!k=#5]
- {\vrule\!!width\scratchdimen\!!height\colormarklength\!!depth\zeropoint}%
- \ifdim\scratchdimen>2em
- \hskip-\scratchdimen
- \vbox to \colormarklength
- {\vss\hbox to \scratchdimen{\hss#1\hss}\vss}%
- \fi}
-
-\def\colorrangeB
- {\hbox
- {\startcolor[\s!white]%
- \scratchdimen\dimexpr(-\colormarklength*\plustwo+\tractempwidth)/11\relax
- \docolorrangeB .5~C .5 0 0 0
- \docolorrangeB .5~M 0 .5 0 0
- \docolorrangeB .5~Y 0 0 .5 0
- \docolorrangeB .5~K 0 0 0 .5
- \docolorrangeB C 1 0 0 0
- \docolorrangeB G 1 0 1 0
- \docolorrangeB Y 0 0 1 0
- \docolorrangeB R 0 1 1 0
- \docolorrangeB M 0 1 0 0
- \docolorrangeB B 1 1 0 0
- \docolorrangeB K 0 0 0 1
- \stopcolor}}
-
-\def\docolorrangeC#1 %
- {\fastcolored
- [\c!s=#1]%
- {\vrule\!!width\scratchdimen\!!height\colormarklength\!!depth\zeropoint}%
- \ifdim\scratchdimen>2em
- \hskip-\scratchdimen
- \vbox to \colormarklength
- {\vss\hbox to \scratchdimen{\hss#1\hss}\vss}%
- \fi}
-
-\def\colorrangeC
- {\hbox
- {\startcolor[\s!white]%
- \scratchdimen\dimexpr(-\colormarklength*2+\tractempwidth)/14\relax
- \docolorrangeC 1 \docolorrangeC .95
- \docolorrangeC .9 \docolorrangeC .85
- \docolorrangeC .8 \docolorrangeC .75
- \docolorrangeC .7
- \docolorrangeC .6
- \docolorrangeC .5
- \docolorrangeC .4
- \docolorrangeC .3
- \docolorrangeC .2
- \docolorrangeC .1
- \docolorrangeC 0
- \stopcolor}}
-
-\def\docolormarkbox#1#2%
- {\tractempheight\ht#2%
- \tractempdepth \dp#2%
- \tractempwidth \wd#2%
- \setbox#2\hbox
- {\scratchdimen\dimexpr\colormarklength/2\relax
- \forgetall
- \ssxx
- \setbox\scratchbox\vbox
- {\offinterlineskip
- \vskip\dimexpr-\colormarkoffset\scratchdimen-2\scratchdimen\relax
- \ifcase#1\relax
- \vskip\dimexpr\colormarklength+\scratchdimen+\tractempheight\relax
- \else
- \hbox to \tractempwidth{\hss\hbox{\colorrangeB}\hss}%
- \vskip\colormarkoffset\scratchdimen
- \vbox to \tractempheight
- {\vss
- \hbox to \tractempwidth
- {\llap{\colorrangeA1\hskip\colormarkoffset\scratchdimen}\hfill
- \rlap{\hskip\colormarkoffset\scratchdimen\colorrangeA4}}%
- \vss
- \hbox to \tractempwidth
- {\llap{\colorrangeA2\hskip\colormarkoffset\scratchdimen}\hfill
- \rlap{\hskip\colormarkoffset\scratchdimen\colorrangeA5}}%
- \vss
- \hbox to \tractempwidth
- {\llap{\colorrangeA3\hskip\colormarkoffset\scratchdimen}\hfill
- \rlap{\hskip\colormarkoffset\scratchdimen\colorrangeA6}}%
- \vss}%
- \fi
- \vskip\colormarkoffset\scratchdimen
- \hbox to \tractempwidth
- {\hss\lower\tractempdepth\hbox{\colorrangeC}\hss}}%
- \ht\scratchbox\tractempheight
- \dp\scratchbox\tractempdepth
- \wd\scratchbox\zeropoint
- \box\scratchbox
- \box#2}%
- \wd#2\tractempwidth
- \ht#2\tractempheight
- \dp#2\tractempdepth}
-
-\def\colormarkbox {\docolormarkbox\plusone } % #1
-\def\rastermarkbox{\docolormarkbox\zerocount} % #1
-
-%D \macros
-%D {showwhatsits, dontshowwhatsits}
-%D
-%D \TEX\ has three so called whatsits: \type {\mark}, \type
-%D {\write} and \type {\special}. The first one keeps track of
-%D the current state at page boundaries, the last two are used
-%D to communicate to the outside world. Due to fact that
-%D especially \type {\write} is often used in conjunction with
-%D \type {\edef}, we can only savely support that one in \ETEX.
-%D
-%D \bgroup \showwhatsits \setupcolors[state=start]
-%D
-%D Whatsits show up \color[blue]{in color} and are
-%D characterized bij their first character.\footnote [some note]
-%D {So we may encounter \type {w}, \type {m} and \type{s}.}
-%D They are \writestatus{dummy}{demo}\color[yellow]{stacked}.
-%D
-%D \egroup
-
-\newif\ifimmediatewrite
-
-\ifx\eTeXversion\undefined
-
- \let\showwhatsits \relax
- \let\dontshowwhatsits\relax
-
-\else
-
- \let\supernormalmark \normalmark % mark may already been superseded
- \let\supernormalmarks \normalmarks % mark may already been superseded
-
- \def\showwhatsits
- {\protected\def\normalmark {\visualwhatsit100+m\supernormalmark }%
- \protected\def\normalmarks{\visualwhatsit100+m\supernormalmarks}%
- \protected\def\special {\visualwhatsit0100s\normalspecial }%
- \protected\def\write {\visualwhatsit001-w\normalwrite }%
- \let\immediate\immediatewhatsit
- \appendtoks\dontshowwhatsits\to\everystoptext}
-
- \def\immediatewhatsit
- {\bgroup\futurelet\next\doimmediatewhatsit}
-
- \def\doimmediatewhatsit
- {\ifx\next\write
- \egroup\immediatewritetrue
- \else
- \egroup\expandafter\normalimmediate
- \fi}
-
- \def\dontshowwhatsits
- {\let\immediate \normalimmediate
- \let\normalmark\supernormalmark
- \let\special \normalspecial
- \let\write \normalwrite}
-
- \def\visualwhatsit#1#2#3#4#5%
- {\bgroup
- \pushwhatsit
- \dontinterfere
- \dontcomplain
- \dontshowcomposition
- \dontshowwhatsits
- \ttx
- \ifvmode\donetrue\else\donefalse\fi
- \setbox\scratchbox\hbox
- {\ifdone
- \colored[r=#1,g=#2,b=#3]{#5}% temp hack
- \else
- \colored[s=0]{#5}% temp hack
- \fi}%
- \setbox\scratchbox\hbox
- {\ifdone
- \colored[r=#1,g=#2,b=#3]{\vrule\!!width\wd\scratchbox}% temp hack
- \else
- \colored[s=0]{\vrule\!!width\wd\scratchbox}% temp hack
- \fi
- \hskip-\wd\scratchbox\box\scratchbox}%
- \scratchdimen1ex
- \setbox\scratchbox\hbox
- {\ifdone\hskip\else\raise#4\fi\scratchdimen\box\scratchbox}%
- \smashbox\scratchbox
- \ifdone\nointerlineskip\fi
- \box\scratchbox
- \ifvmode\nointerlineskip\fi
- \popwhatsit
- \egroup
- \ifimmediatewrite
- \immediatewritefalse
- \expandafter\normalimmediate
- \fi}
-
- \def\pushwhatsit
- {\ifzeropt\lastskip
- \ifcase\lastpenalty
- \ifzeropt\lastkern
- \ifhmode
- \let\popwhatsit\relax
- \else
- \edef\popwhatsit{\prevdepth\the\prevdepth}%
- \fi
- \else
- \ifhmode
- \edef\popwhatsit{\kern\the\lastkern}\unkern
- \else
- \edef\popwhatsit{\kern\the\lastkern\prevdepth\the\prevdepth}%
- \kern-\lastkern
- \fi
- \fi
- \else
- \ifhmode
- \edef\popwhatsit{\the\lastpenalty}%
- \unpenalty
- \else
- \edef\popwhatsit{\penalty\the\lastpenalty\prevdepth\the\prevdepth}%
- %\nobreak
- \fi
- \fi
- \else
- \ifhmode
- \edef\popwhatsit{\hskip\the\lastskip}\unskip
- \else
- \edef\popwhatsit{\vskip\the\lastskip\prevdepth\the\prevdepth}%
- \vskip-\lastskip
- \fi
- \fi}
-
-\fi
-
-%D The next macro can be used to keep track of classes of
-%D boxes (handy for development cq.\ tracing).
-
-\def\dodotagbox#1#2#3% can be reimplemented
- {\def\next##1##2##3##4%
- {\vbox to \ht#2{##3\hbox to \wd#2{##1#3##2}##4}}%
- \processaction
- [#1]
- [ l=>\next\relax\hfill\vfill\vfill,
- r=>\next\hfill\relax\vfill\vfill,
- t=>\next\hfill\hfill\relax\vfill,
- b=>\next\hfill\hfill\vfill\relax,
- lt=>\next\relax\hfill\relax\vfill,
- lb=>\next\relax\hfill\vfill\relax,
- rt=>\next\hfill\relax\relax\vfill,
- rb=>\next\hfill\relax\vfill\relax,
- tl=>\next\relax\hfill\relax\vfill,
- bl=>\next\relax\hfill\vfill\relax,
- tr=>\next\hfill\relax\relax\vfill,
- br=>\next\hfill\relax\vfill\relax,
- \s!default=>\next\hfill\hfill\vfill\vfill,
- \s!unknown=>\next\hfill\hfill\vfill\vfill]}
-
-\def\dotagbox[#1]#2%
- {\bgroup
- \dowithnextbox
- {\setbox\scratchbox\flushnextbox
- \setbox\nextbox\ifhbox\nextbox\hbox\else\vbox\fi
- \bgroup
- \startoverlay
- {\copy\scratchbox}
- {\dodotagbox{#1}\scratchbox{\framed
- [\c!background=\v!screen,\c!backgroundscreen=1]{#2}}}
- \stopoverlay
- \egroup
- \nextboxwd\the\wd\scratchbox
- \nextboxht\the\ht\scratchbox
- \nextboxdp\the\dp\scratchbox
- \flushnextbox
- \egroup}}
-
-\def\tagbox
- {\dosingleempty\dotagbox}
-
-%D \macros
-%D {coloredhbox,coloredvbox,coloredvtop,
-%D coloredstrut}
-%D
-%D The following visualizations are used in some of the manuals:
-
-\definecolor[boxcolor:ht][r=.5,g=.75,b=.5]
-\definecolor[boxcolor:dp][r=.5,g=.5,b=.75]
-\definecolor[boxcolor:wd][r=.75,g=.5,b=.5]
-\definecolor[strutcolor] [r=.5,g=.25,b=.25]
-
-\def\coloredbox#1%
- {\dowithnextbox{#1{\hbox
- {\blackrule[\c!width=\nextboxwd,\c!height=\nextboxht,\c!depth=\zeropoint,\c!color=boxcolor:ht]%
- \hskip-\nextboxwd
- \blackrule[\c!width=\nextboxwd,\c!height=\zeropoint,\c!depth=\nextboxdp,\c!color=boxcolor:dp]%
- \hskip-\nextboxwd
- \box\nextbox}}}#1}
-
-\def\coloredhbox{\coloredbox\hbox}
-\def\coloredvbox{\coloredbox\vbox}
-\def\coloredvtop{\coloredbox\vtop}
-
-\def\coloredstrut
- {\color[strutcolor]{\def\strutwidth{2\points}\setstrut\strut}}
+\newconstant\c_syst_visualizers_state
+\newtoks \t_syst_visualizers_optimize
+
+\definesystemattribute[visual][public,global] % global ?
+
+% no, but can become an option:
+%
+% \appendtoks
+% \attribute\visualattribute\attributeunsetvalue
+% \to \everybeforepagebody
+
+%D We only provide box visualizers as they can come in handy for testing
+%D macros. In due time we will move some of the m-visual code here too.
+
+\let\syst_visualizers_hbox\hbox
+\let\syst_visualizers_vbox\vbox
+\let\syst_visualizers_vtop\vtop
+
+\unexpanded\def\ruledhbox{\syst_visualizers_hbox attr \visualattribute \ctxcommand{visual("simplehbox")} }
+\unexpanded\def\ruledvbox{\syst_visualizers_vbox attr \visualattribute \ctxcommand{visual("simplevbox")} }
+\unexpanded\def\ruledvtop{\syst_visualizers_vtop attr \visualattribute \ctxcommand{visual("simplevtop")} } % special case
+\unexpanded\def\ruledtopv{\syst_visualizers_vtop attr \visualattribute \ctxcommand{visual("vtop")} }
+
+\appendtoks
+ \ifcase\c_syst_visualizers_state\else
+ \syst_visualizers_speedup
+ \fi
+\to \everyshipout
+
+\appendtoks
+ \global\let\syst_visualizers_speedup\relax
+\to \t_syst_visualizers_optimize
+
+\def\syst_visualizers_speedup{\the\t_syst_visualizers_optimize}
+
+\appendtoks
+ \normalexpanded {%
+ \unexpanded\xdef\ruledhbox\expandafter{\ruledhbox}%
+ \unexpanded\xdef\ruledvbox\expandafter{\ruledvbox}%
+ \unexpanded\xdef\ruledvtop\expandafter{\ruledvtop}%
+ \unexpanded\xdef\ruledtopv\expandafter{\ruledtopv}%
+ }%
+\to \t_syst_visualizers_optimize
+
+\unexpanded\def\showmakeup
+ {\ctxcommand{setvisual("makeup")}%
+ \let\normalvtop\ruledtopv
+ \let\vtop \ruledtopv}
+
+\unexpanded\def\showallmakeup
+ {\ctxcommand{setvisual("all")}%
+ \let\normalvtop\ruledtopv
+ \let\vtop \ruledtopv
+ \showstruts}
+
+\unexpanded\def\showboxes
+ {\ctxcommand{setvisual("boxes")}%
+ \let\normalvtop\ruledtopv
+ \let\vtop \ruledtopv}
+
+\unexpanded\def\showglyphs
+ {\ctxcommand{setvisual("glyph")}}
+
+\unexpanded\def\showfontkerns
+ {\ctxcommand{setvisual("fontkern")}}
+
+\unexpanded\def\setvisualizerfont
+ {\dosingleempty\syst_visualizers_setfont}
+
+\def\syst_visualizers_setfont[#1]% somename at 4pt
+ {\begingroup
+ \doifelsenothing{#1}{\definedfont[Mono at 4pt]}{\definedfont[#1]}%
+ \ctxlua{nodes.visualizers.setfont()}%
+ \endgroup}
+
+%D New (these might also be a visualizers):
+
+\definecolor[f:r:t][a=1,t=.25,r=1]
+\definecolor[f:g:t][a=1,t=.25,g=1]
+\definecolor[f:b:t][a=1,t=.25,b=1]
+\definecolor[f:c:t][a=1,t=.25,c=1]
+\definecolor[f:m:t][a=1,t=.25,m=1]
+\definecolor[f:y:t][a=1,t=.25,y=1]
+\definecolor[f:k:t][a=1,t=.25,s=0]
+
+% \def\node_backgrounds_boxes_add#1[#2]%
+% {\node_backgrounds_boxes_initialize
+% #1\backgroundcolorattr{#2}}
+
+\def\syst_visualizers_filled_hbox#1%
+ {\node_backgrounds_boxes_initialize\hbox\backgroundcolorattr{#1}}
+
+\unexpanded\def\filledhboxr{\syst_visualizers_filled_hbox{f:r:t}}
+\unexpanded\def\filledhboxg{\syst_visualizers_filled_hbox{f:g:t}}
+\unexpanded\def\filledhboxb{\syst_visualizers_filled_hbox{f:b:t}}
+\unexpanded\def\filledhboxc{\syst_visualizers_filled_hbox{f:c:t}}
+\unexpanded\def\filledhboxm{\syst_visualizers_filled_hbox{f:m:t}}
+\unexpanded\def\filledhboxy{\syst_visualizers_filled_hbox{f:y:t}}
+\unexpanded\def\filledhboxk{\syst_visualizers_filled_hbox{f:k:t}}
+
+%D Overload:
+
+% \def\spac_struts_vide_hbox
+% {\hbox attr \visualattribute \ctxcommand{visual("strut")} }
+%
+% \def\spac_struts_vide_hbox
+% {\xdef\spac_struts_vide_hbox{\hbox attr \visualattribute \ctxcommand{visual("strut")} }%
+% \spac_struts_vide_hbox}
+
+\unexpanded\def\spac_struts_vide_hbox
+ {\hbox attr \visualattribute \ctxcommand{visual("strut")} }
+
+\appendtoks
+ \normalexpanded{\unexpanded\xdef\spac_struts_vide_hbox\expandafter{\spac_struts_vide_hbox}}%
+\to \t_syst_visualizers_optimize
+
+%D For manuals:
+
+\unexpanded\def\markfonts#1%
+ {\dontleavehmode
+ \begingroup
+ \setbox\scratchbox\hbox{\getbuffer}%
+ \ctxcommand{markfonts(\number\scratchbox)}%
+ \unhbox\scratchbox
+ \endgroup}
+
+%D We keep this one:
+
+\unexpanded\def\dontinterfere
+ {\reseteverypar
+ \parindent\zeropoint
+ \parskip \zeropoint
+ \leftskip \zeropoint
+ \rightskip\zeropoint
+ \relax}
+
+%D We keep these for a while:
+
+\let\ruledvcenter \vcenter
+\let\ruledhss \hss
+\let\ruledhfil \hfil
+\let\ruledhfill \hfill
+\let\ruledhfilll \hfilll
+\let\ruledhfilneg \hfilneg
+\let\ruledhfillneg \hfillneg
+\let\ruledhfilllneg \hfilllneg
+\let\ruledvss \vss
+\let\ruledvfil \vfil
+\let\ruledvfill \vfill
+\let\ruledvfilll \vfilll
+\let\ruledvfilneg \vfilneg
+\let\ruledvfillneg \vfillneg
+\let\ruledvfilllneg \vfilllneg
+\let\ruledhskip \hskip
+\let\ruledvskip \vskip
+\let\ruledkern \kern
+\let\ruledhglue \hglue
+\let\ruledvglue \vglue
+\let\ruledmkern \mkern
+\let\ruledmskip \mskip
+\let\ruledpenalty \penalty
\protect \endinput
diff --git a/Master/texmf-dist/tex/context/base/trac-xml.lua b/Master/texmf-dist/tex/context/base/trac-xml.lua
new file mode 100644
index 00000000000..cd8b8c0a5f5
--- /dev/null
+++ b/Master/texmf-dist/tex/context/base/trac-xml.lua
@@ -0,0 +1,183 @@
+if not modules then modules = { } end modules ['trac-xml'] = {
+ version = 1.001,
+ comment = "companion to trac-log.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+-- Application helpinfo can be defined in several ways:
+--
+-- helpinfo = "big blob of help"
+--
+-- helpinfo = { basic = "blob of basic help", extra = "blob of extra help" }
+--
+-- helpinfo = "<?xml version=1.0?><application>...</application/>"
+--
+-- helpinfo = "somefile.xml"
+--
+-- In the case of an xml file, the file should be either present on the same path
+-- as the script, or we should be be able to locate it using the resolver.
+
+local formatters = string.formatters
+local reporters = logs.reporters
+local xmlserialize = xml.serialize
+local xmlcollected = xml.collected
+local xmltext = xml.text
+local xmlfirst = xml.first
+
+-- there is no need for a newhandlers { name = "help", parent = "string" }
+
+local function showhelp(specification,...)
+ local root = xml.convert(specification.helpinfo or "")
+ if not root then
+ return
+ end
+ local xs = xml.gethandlers("string")
+ xml.sethandlersfunction(xs,"short",function(e,handler) xmlserialize(e.dt,handler) end)
+ xml.sethandlersfunction(xs,"ref", function(e,handler) handler.handle("--"..e.at.name) end)
+ local wantedcategories = select("#",...) == 0 and true or table.tohash { ... }
+ local nofcategories = xml.count(root,"/application/flags/category")
+ local report = specification.report
+ for category in xmlcollected(root,"/application/flags/category") do
+ local categoryname = category.at.name or ""
+ if wantedcategories == true or wantedcategories[categoryname] then
+ if nofcategories > 1 then
+ report("%s options:",categoryname)
+ report()
+ end
+ for subcategory in xmlcollected(category,"/subcategory") do
+ for flag in xmlcollected(subcategory,"/flag") do
+ local name = flag.at.name
+ local value = flag.at.value
+ -- local short = xmlfirst(s,"/short")
+ -- local short = xmlserialize(short,xs)
+ local short = xmltext(xmlfirst(flag,"/short"))
+ if value then
+ report("--%-20s %s",formatters["%s=%s"](name,value),short)
+ else
+ report("--%-20s %s",name,short)
+ end
+ end
+ report()
+ end
+ end
+ end
+ for category in xmlcollected(root,"/application/examples/category") do
+ local title = xmltext(xmlfirst(category,"/title"))
+ if title and title ~= "" then
+ report()
+ report(title)
+ report()
+ end
+ for subcategory in xmlcollected(category,"/subcategory") do
+ for example in xmlcollected(subcategory,"/example") do
+ local command = xmltext(xmlfirst(example,"/command"))
+ local comment = xmltext(xmlfirst(example,"/comment"))
+ report(command)
+ end
+ report()
+ end
+ end
+ for comment in xmlcollected(root,"/application/comments/comment") do
+ local comment = xmltext(comment)
+ report()
+ report(comment)
+ report()
+ end
+end
+
+local reporthelp = reporters.help
+local exporthelp = reporters.export
+
+local function xmlfound(t)
+ local helpinfo = t.helpinfo
+ if type(helpinfo) == "table" then
+ return false
+ end
+ if type(helpinfo) ~= "string" then
+ helpinfo = "Warning: no helpinfo found."
+ t.helpinfo = helpinfo
+ return false
+ end
+ if string.find(helpinfo,".xml$") then
+ local ownscript = environment.ownscript
+ local helpdata = false
+ if ownscript then
+ local helpfile = file.join(file.pathpart(ownscript),helpinfo)
+ helpdata = io.loaddata(helpfile)
+ if helpdata == "" then
+ helpdata = false
+ end
+ end
+ if not helpdata then
+ local helpfile = resolvers.findfile(helpinfo,"tex")
+ helpdata = helpfile and io.loaddata(helpfile)
+ end
+ if helpdata and helpdata ~= "" then
+ helpinfo = helpdata
+ else
+ helpinfo = formatters["Warning: help file %a is not found."](helpinfo)
+ end
+ end
+ t.helpinfo = helpinfo
+ return string.find(t.helpinfo,"^<%?xml") and true or false
+end
+
+function reporters.help(t,...)
+ if xmlfound(t) then
+ showhelp(t,...)
+ else
+ reporthelp(t,...)
+ end
+end
+
+function reporters.export(t,methods,filename)
+ if not xmlfound(t) then
+ return exporthelp(t)
+ end
+ if not methods or methods == "" then
+ methods = environment.arguments["exporthelp"]
+ end
+ if not filename or filename == "" then
+ filename = environment.files[1]
+ end
+ dofile(resolvers.findfile("trac-exp.lua","tex"))
+ local exporters = logs.exporters
+ if not exporters or not methods then
+ return exporthelp(t)
+ end
+ if methods == "all" then
+ methods = table.keys(exporters)
+ elseif type(methods) == "string" then
+ methods = utilities.parsers.settings_to_array(methods)
+ else
+ return exporthelp(t)
+ end
+ if type(filename) ~= "string" or filename == "" then
+ filename = false
+ elseif file.pathpart(filename) == "" then
+ t.report("export file %a will not be saved on the current path (safeguard)",filename)
+ return
+ end
+ for i=1,#methods do
+ local method = methods[i]
+ local exporter = exporters[method]
+ if exporter then
+ local result = exporter(t,method)
+ if result and result ~= "" then
+ if filename then
+ local fullname = file.replacesuffix(filename,method)
+ t.report("saving export in %a",fullname)
+ io.savedata(fullname,result)
+ else
+ reporters.lines(t,result)
+ end
+ else
+ t.report("no output from exporter %a",method)
+ end
+ else
+ t.report("unknown exporter %a",method)
+ end
+ end
+end
diff --git a/Master/texmf-dist/tex/context/base/type-imp-husayni.mkiv b/Master/texmf-dist/tex/context/base/type-imp-husayni.mkiv
index b3c1af5b708..b81996c3379 100644
--- a/Master/texmf-dist/tex/context/base/type-imp-husayni.mkiv
+++ b/Master/texmf-dist/tex/context/base/type-imp-husayni.mkiv
@@ -1,137 +1,144 @@
-\definefontfeature
+%D \module
+%D [ file=type-imp-husayni,
+%D version=2008.08.08, % or whatever ... onwards
+%D title=\CONTEXT\ Typescript Macros,
+%D subtitle=Husayni,
+%D author=Idris Samawi Hamid \& Hans Hagen,
+%D date=\currentdate,
+%D copyright={PRAGMA ADE \& \CONTEXT\ Development Team}]
+%C
+%C This module is part of the \CONTEXT\ macro||package and is
+%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
+%C details.
+
+%D For a detailed list of features and names, see husayni.lfg. In fact, the following
+%D sets will go there.
+
+\definefontfeature
[husayni-default]
- [analyze=yes,mode=node,
- language=dflt,script=arab,
- ccmp=yes, % adds identity dots to unicode letters; includes dynamic dot placement
- init=yes,medi=yes,fina=yes, % contextual analysis
- % dlig=yes, % Hamzahbelow under YaaHamzahabove
- rlig=yes, % indispensable
- calt=yes, % ayah+numerals
- salt=yes, % includes manual justification with Tatwiil
- % fchr=yes % formatting chars
- anum=yes, % replace latin numerals with arabic-script ones
- ss01=yes, % Allah, Muhammad,
- % ss02=yes, % ss01 + Allah_final
- ss03=yes, % level-1 stack over Jiim, initial entry only
- % ss04=yes, % level-1 stack over Jiim, initial/medial entry
- % ss05=yes, % multi-level Jiim stacking, initial/medial entry
- % ss06=yes, % aesthetic Faa/Qaaf for FJ_mm, FJ_mf connection
- ss07=yes, % initial-entry stacking over Haa
- % ss08=yes, % initial/medial stacking over Haa, minus HM_mf strings
- % ss09=yes, % initial/medial Haa stacking plus HM_mf strings
- ss10=yes, % basic dipped Miim, initial-entry B_S-stack over Miim
- % ss11=yes, % full dipped Miim, initial-entry B_S-stack over Miim
- ss12=yes, % XBM_im initial-medial entry B_S-stack over Miim,
- % ss13=yes, % full initial-medial entry B_S-stacked Miim
- % ss14=yes, initial entry, stacked Laam on Miim
- ss15=yes, % full stacked Laam-on-Miim
- ss16=yes, % initial entry, stacked Ayn-on-Miim
- % ss17=yes, % full stacked Ayn-on-Miim
- % ss18=yes, % LMJ_im % already contained in ss03--05, may remove
- ss19=yes, % LM_im
- % ss20=yes, % KLM_m, sloped Miim
- % ss21=yes, % KLM_i_mm/LM_mm, sloped Miim
- % ss22=yes, % filled sloped Miim
- % ss23=yes, % LM_mm, non-sloped Miim
- ss24=yes, % BR_i_mf, BN_i_mf
- ss25=yes, % basic LH_im % might merge with ss24
- ss26=yes, % full Yaa.final special strings: BY_if, BY_mf, LY_mf
- ss27=yes, % basic thin Miim.final
- % ss28=yes, % full thin Miim.final % to be moved to jsnn
- % ss29=yes, % basic short Miim.final
- % ss30=yes, % full short Miim.final % to be moved to jsnn
- ss31=yes, % basic Raa.final strings: JR and SR
- % ss32=yes, % basic Raa.final strings: JR, SR, and BR
- % ss33=yes, % TtR % to be moved to jsnn
- ss34=yes, % AyR style % also available in jsnn
- ss35=yes, % full Kaaf contexts
- ss36=yes, % full Laam contexts
- ss37=yes, % Miim-Miim contexts
- ss38=yes, % basic dipped Haa, B_SH_mm
- % ss39=yes, % full dipped Haa, B_S_LH_i_mm_Mf
- % ss40=yes, % aesthetic dipped medial Haa
- ss41=yes, % high and low Baa strings
- ss42=yes, % diagonal entry
- ss43=yes, % initial alternates
- % ss44=yes, % hooked final alif
- % ss45=yes, % BMA_f
- % ss46=yes, % BM_mm_alt, for JBM combinations
- % ss47=yes, % Shaddah-<kasrah> combo
- % ss48=yes, % Auto-sukuun
- % ss49=yes, % No vowels
- % ss50=yes, % Shaddah/MaaddahHamzah only
- % ss51=yes, % No Skuun
- % ss52=yes, % No Waslah
- % ss53=yes, % No Waslah
- % ss54=yes, % chopped finals
- % ss55=yes, % idgham-tanwin
- ss60=yes, %
- % js01=yes, % Raawide
- % js02=yes, % Yaawide
- % js03=yes, % Kaafwide
- % js04=yes, % Nuunwide
- % js05=yes, % Kaafwide Nuunwide Siinwide Baawide
- % js06=yes, % final Haa wide
- % js07=yes, % thin Miim
- % js08=yes, % short Miim
- % js09=yes, % wide Siin
- % js10=yes, % thuluth-style initial Haa, final Miim, MRw_mf
- % js11=yes, % level-1 stretching
- % js12=yes, % level-2 stretching
- % js13=yes, % level-3 stretching
- % js14=yes, % final Alif
- % js15=yes, % hooked final Alif
- js16=yes, % aesthetic medial Faa/Qaaf
- % js17=yes, % fancy isol Haa after Daal, Raa, and Waaw
- % js18=yes, % Laamwide, alternate substitution
- % js19=yes, % level-4 stretching, only siin and Hhaa for basmalah
- % js20=yes, % level-5 stretching, only siin and Hhaa for basmalah
- % js21=yes, % Haa.final_alt2
- % calt=yes, % to be used for vowel/dot-based dynamics
- % ttwl=yes, % for simple, horizontal stretching, not yet implemented in luatex/mkiv
- % flts=yes, % Final Glyph On Line Alternates Short: Baa-like
- % fltw=yes, % Final Glyph On Line Alternates Wide: Baa-like
- % gclr=yes, % Geometric Cursive LR : for testing GPOS only!
- % gcrl=yes, % Geometric Cursive RL
- % gklr=yes, % Geometric Kerning LR
- % gkrl=yes, % Geometric Kerning RL
- kern=yes, % includes kerning of dot-base pairs, and final pairs in continuous strings
- curs=yes, % we don't curs final pairs, padj for that
- mark=yes, % vowels
- mkmk=yes, % stacked vowels
+ [analyze=yes,
+ mode=node,
+ language=dflt,
+ script=arab,
+ ccmp=yes, % adds identity dots to unicode letters; includes dynamic dot placement
+ init=yes, % contextual analysis
+ medi=yes, % contextual analysis
+ fina=yes, % contextual analysis
+ rlig=yes, % indispensable
+ calt=yes, % ayah+numerals
+ salt=yes, % includes manual justification with Tatwiil
+ anum=yes, % replace latin numerals with arabic-script ones
+ ss01=yes, % Allah, Muhammad,
+ ss03=yes, % level-1 stack over Jiim, initial entry only
+ ss10=yes, % basic dipped Miim, initial-entry B_S-stack over Miim
+ ss12=yes, % XBM_im initial-medial entry B_S-stack over Miim,
+ ss15=yes, % full stacked Laam-on-Miim
+ ss16=yes, % initial entry, stacked Ayn-on-Miim
+ ss19=yes, % LM_im
+ ss24=yes, % BR_i_mf, BN_i_mf
+ ss25=yes, % basic LH_im % might merge with ss24
+ ss26=yes, % full Yaa.final special strings: BY_if, BY_mf, LY_mf
+ ss27=yes, % basic thin Miim.final
+ ss31=yes, % basic Raa.final strings: JR and SR
+ ss34=yes, % AyR style % also available in jsnn
+ ss35=yes, % full Kaaf contexts
+ ss36=yes, % full Laam contexts
+ ss37=yes, % Miim-Miim contexts
+ ss38=yes, % basic dipped Haa, B_SH_mm
+ ss41=yes, % high and low Baa strings
+ ss42=yes, % diagonal entry
+ ss43=yes, % initial alternates
+ ss60=yes, %
+ js16=yes, % aesthetic medial Faa/Qaaf
+ kern=yes, % includes kerning of dot-base pairs, and final pairs in continuous strings
+ curs=yes, % we don't curs final pairs, padj for that
+ mark=yes, % vowels
+ mkmk=yes, % stacked vowels
tlig=yes,
- colorscheme=husayni:default]
+ goodies=husayni,
+ colorscheme=default]
+
+\definefontfeature
+ [husayni-alternative] % was husayni-default2
+ [analyze=yes,
+ mode=node,
+ language=dflt,
+ script=arab,
+ ccmp=yes, % adds identity dots to unicode letters; includes dynamic dot placement
+ init=yes, % contextual analysis
+ medi=yes, % contextual analysis
+ fina=yes, % contextual analysis
+ rlig=yes, % indispensable
+ calt=yes, % ayah+numerals
+ salt=yes, % includes manual justification with Tatwiil
+ anum=yes, % replace latin numerals with arabic-script ones
+ ss01=yes, % Allah, Muhammad,
+ ss03=yes, % level-1 stack over Jiim, initial entry only
+ ss10=yes, % basic dipped Miim, initial-entry B_S-stack over Miim
+ ss12=yes, % XBM_im initial-medial entry B_S-stack over Miim,
+ ss15=yes, % full stacked Laam-on-Miim
+ ss16=yes, % initial entry, stacked Ayn-on-Miim
+ ss19=yes, % LM_im
+ ss24=yes, % BR_i_mf, BN_i_mf
+ ss25=yes, % basic LH_im % might merge with ss24
+ ss26=yes, % full Yaa.final special strings: BY_if, BY_mf, LY_mf
+ ss27=yes, % basic thin Miim.final
+ ss31=yes, % basic Raa.final strings: JR and SR
+ ss34=yes, % AyR style % also available in jsnn
+ ss35=yes, % full Kaaf contexts
+ ss36=yes, % full Laam contexts
+ ss37=yes, % Miim-Miim contexts
+ ss38=yes, % basic dipped Haa, B_SH_mm
+ ss41=yes, % high and low Baa strings
+ ss42=yes, % diagonal entry
+ ss43=yes, % initial alternates
+ ss60=yes, %
+ js16=yes, % aesthetic medial Faa/Qaaf
+ kern=yes, % includes kerning of dot-base pairs, and final pairs in continuous strings
+ curs=yes, % we don't curs final pairs, padj for that
+ mark=yes, % vowels
+ mkmk=yes, % stacked vowels
+ tlig=yes]
\definefontfeature
[husayni-tt]
- [analyze=yes,mode=node,
- language=dflt,script=arab,
- ccmp=yes, % adds identity dots to unicode letters; includes dynamic dot placement
- init=yes,medi=yes,fina=yes, % contextual analysis
- rlig=yes, % indispensable
+ [analyze=yes,
+ mode=node,
+ language=dflt,
+ script=arab,
+ ccmp=yes,
+ init=yes,
+ medi=yes,
+ fina=yes,
+ rlig=yes,
fchr=yes,
- kern=yes, % includes kerning of dot-base pairs, and final pairs in continuous strings
- curs=yes, % we don't curs final pairs, padj for that
- mark=yes, % vowels
- mkmk=yes] % stacked vowels
+ kern=yes,
+ curs=yes,
+ mark=yes,
+ mkmk=yes]
\definefontfeature
[husayni-mono]
- [analyze=yes,mode=node,
- language=dflt,script=arab,ccmp=no,
- init=yes,medi=yes,fina=yes,isol=yes,
- rlig=yes,liga=yes,
+ [analyze=yes,
+ mode=node,
+ language=dflt,
+ script=arab,
+ ccmp=no,
+ init=yes,
+ medi=yes,
+ fina=yes,
+ isol=yes,
mset=yes]
-
+
\definedelimitedtext
[ornatequote]
[left={‏\char"FD3F\penalty10000},
- right={‏\char"FD3E\penalty10000}]
-
+ right={‏\char"FD3E\penalty10000}]
+
\definedelimitedtext
[arparenthesis]
[left={‏\char"0028},
- right={‏\char"0029}]
+ right={‏\char"0029}]
\definedelimitedtext
[arquotation]
@@ -143,4 +150,49 @@
[arquote]
[left={‏\symbol[rightquote]},
right={‏\symbol[leftquote]},
- leftmargin=standard] \ No newline at end of file
+ leftmargin=standard]
+
+\definebodyfontenvironment
+ [husayni-default]
+ [default]
+ [interlinespace=4.4ex]
+
+\definefontfallback
+ [husayni-latin]
+ [file:lmroman12-regular*default]
+ [basiclatin,latin-1supplement,latinextended-A,latinextendedadditional]
+ [force=no,
+ rscale=.7]
+
+\definefontfallback
+ [husayni-mono]
+ [file:cour*husayni-mono]
+ [arabic,arabicpresentationformsa,arabicpresentationformsb,arabicsupplement,0x200C,0x200D,0x200E,0x200F]
+ % [force=no]
+
+\definebodyfontenvironment[20.7pt]
+\definebodyfontenvironment[24.8pt]
+\definebodyfontenvironment[29.8pt]
+\definebodyfontenvironment[35.8pt]
+
+\starttypescriptcollection[husayni]
+
+ \starttypescript [naskh] [husayni] [name]
+ \definefontsynonym [Arabic-Light] [name:husayni] [goodies=husayni,features=husayni-default]
+ \definefontsynonym [Arabic-Bold] [name:husayni] [goodies=husayni,features=husayni-default]
+ \definefontsynonym [Arabic-Italic] [name:husayni] [goodies=husayni,features=husayni-default]
+ \definefontsynonym [Arabic-Bold-Italic] [name:husayni] [goodies=husayni,features=husayni-default]
+
+ \definefontsynonym [Serif] [Arabic-Light]
+ \definefontsynonym [SerifItalic] [Arabic-Italic]
+ \definefontsynonym [SerifBold] [Arabic-Bold]
+ \definefontsynonym [SerifBoldItalic] [Arabic-Bold-Italic]
+ \stoptypescript
+
+ \starttypescript [husayni-default]
+ \definetypeface [husayni-default] [rm] [naskh] [husayni] [default] [fallbacks=husayni-latin]
+ \definetypeface [husayni-default] [tt] [mono] [modern] [default] [fallbacks=husayni-latin]
+ \definetypeface [husayni-default] [mm] [math] [xitsbidi] [default]
+ \stoptypescript
+
+\stoptypescriptcollection
diff --git a/Master/texmf-dist/tex/context/base/type-imp-latinmodern.mkiv b/Master/texmf-dist/tex/context/base/type-imp-latinmodern.mkiv
index b851763b8ea..b0e25e82ca1 100644
--- a/Master/texmf-dist/tex/context/base/type-imp-latinmodern.mkiv
+++ b/Master/texmf-dist/tex/context/base/type-imp-latinmodern.mkiv
@@ -149,7 +149,6 @@
\stoptypescript
\starttypescript [\s!mono] [modern,latin-modern,modern-variable,latin-modern-variable,modern-condensed,latin-modern-condensed]
- \loadfontgoodies[lm]
\definefontsynonym [LMTypewriter-Regular] [\s!file:lmmono10-regular] [\s!features=\s!none]
\definefontsynonym [LMTypewriter-Italic] [\s!file:lmmono10-italic] [\s!features=\s!none]
\definefontsynonym [LMTypewriter-Oblique] [\s!file:lmmonoslant10-regular] [\s!features=\s!none]
@@ -169,13 +168,19 @@
\definefontsynonym [LMTypewriterVarWd-DarkOblique] [\s!file:lmmonoproplt10-boldoblique] [\s!features=\s!default]
\stoptypescript
+% \starttypescript [\s!math] [modern,latin-modern]
+% \loadfontgoodies[lm]
+% \definefontsynonym [LMMathRoman-Regular] [\v!file:latinmodern-math-regular.otf] [\s!features=\s!math\mathsizesuffix,\s!goodies=lm]
+% \definefontsynonym [LMMathRoman-Bold] [\v!file:latinmodern-math-regular.otf] [\s!features=\s!math\mathsizesuffix,\s!goodies=lm]
+% \stoptypescript
+
\starttypescript [\s!math] [modern,latin-modern]
\loadfontgoodies[lm]
- \definefontsynonym [LMMathRoman-Regular] [\v!file:latinmodernmath-regular.otf] [\s!features=\s!math\mathsizesuffix,\s!goodies=lm]
- \definefontsynonym [LMMathRoman-Bold] [\v!file:latinmodernmath-regular.otf] [\s!features=\s!math\mathsizesuffix,\s!goodies=lm]
+ \definefontsynonym [LMMathRoman-Regular] [\v!file:latinmodern-math-regular.otf] [\s!features={\s!math\mathsizesuffix,lm-math},\s!goodies=lm]
+ \definefontsynonym [LMMathRoman-Bold] [\v!file:latinmodern-math-regular.otf] [\s!features={\s!math\mathsizesuffix,lm-math},\s!goodies=lm]
\stoptypescript
- \starttypescript [modern-designsize]
+ \starttypescript [modern-designsize-virtual]
\definetypeface [\typescriptone] [\s!rm] [\s!serif] [latin-modern-designsize] [\s!default] [\s!designsize=\s!auto]
\definetypeface [\typescriptone] [\s!ss] [\s!sans] [latin-modern-designsize] [\s!default] [\s!designsize=\s!auto]
\definetypeface [\typescriptone] [\s!tt] [\s!mono] [latin-modern-designsize] [\s!default] [\s!designsize=\s!auto]
@@ -183,6 +188,14 @@
\quittypescriptscanning
\stoptypescript
+ \starttypescript [modern-designsize]
+ \definetypeface [\typescriptone] [\s!rm] [\s!serif] [latin-modern-designsize] [\s!default] [\s!designsize=\s!auto]
+ \definetypeface [\typescriptone] [\s!ss] [\s!sans] [latin-modern-designsize] [\s!default] [\s!designsize=\s!auto]
+ \definetypeface [\typescriptone] [\s!tt] [\s!mono] [latin-modern-designsize] [\s!default] [\s!designsize=\s!auto]
+ \definetypeface [\typescriptone] [\s!mm] [\s!math] [modern] [\s!default]
+ \quittypescriptscanning
+ \stoptypescript
+
\starttypescript [modern,modern-base]
\definetypeface [\typescriptone] [\s!rm] [\s!serif] [modern] [\s!default]
\definetypeface [\typescriptone] [\s!ss] [\s!sans] [modern] [\s!default]
diff --git a/Master/texmf-dist/tex/context/base/type-imp-lucida-opentype.mkiv b/Master/texmf-dist/tex/context/base/type-imp-lucida-opentype.mkiv
index 8c4ff3bbae5..63a331cad6b 100644
--- a/Master/texmf-dist/tex/context/base/type-imp-lucida-opentype.mkiv
+++ b/Master/texmf-dist/tex/context/base/type-imp-lucida-opentype.mkiv
@@ -56,8 +56,8 @@
\stoptypescript
\starttypescript [\s!math] [lucidanova,lucidaot,otlucida]
- \definefontsynonym [\s!MathRoman] [\s!file:LucidaBrightMathOT.otf] [\s!features=\s!math\mathsizesuffix,\s!goodies=lucidanova-math]
- \definefontsynonym [\s!MathRomanBold] [\s!file:LucidaBrightMathOT-Demi.otf] [\s!features=\s!math\mathsizesuffix,\s!goodies=lucidanova-math]
+ \definefontsynonym [\s!MathRoman] [\s!file:LucidaBrightMathOT.otf] [\s!features=\s!math\mathsizesuffix,\s!goodies=lucida-opentype-math]
+ \definefontsynonym [\s!MathRomanBold] [\s!file:LucidaBrightMathOT-Demi.otf] [\s!features=\s!math\mathsizesuffix,\s!goodies=lucida-opentype-math]
\stoptypescript
\starttypescript [\s!handwriting] [lucidanova,lucidaot,otlucida]
diff --git a/Master/texmf-dist/tex/context/base/type-imp-texgyre.mkiv b/Master/texmf-dist/tex/context/base/type-imp-texgyre.mkiv
index 823f6b4a9dc..c404729a246 100644
--- a/Master/texmf-dist/tex/context/base/type-imp-texgyre.mkiv
+++ b/Master/texmf-dist/tex/context/base/type-imp-texgyre.mkiv
@@ -197,26 +197,56 @@
\stoptypescriptcollection
-\starttypescriptcollection[xmath]
+\startmode[txmath]
- % Times Math (tx)
+ \starttypescriptcollection[texgyre-math-times]
- \starttypescript [\s!math][times,termes][\s!all]
- \loadfontgoodies[tx-math]
- \definefontsynonym[\s!MathRoman][txmath@tx-math]
- % \definefontsynonym[\s!MathRomanBold][MathRoman]
- \stoptypescript
+ \starttypescript [\s!math][times,termes][\s!all]
+ \loadfontgoodies[tx-math]
+ \definefontsynonym[\s!MathRoman][txmath@tx-math]
+ % \definefontsynonym[\s!MathRomanBold][MathRoman]
+ \stoptypescript
- % Palatino Math (px)
+ \stoptypescriptcollection
- \starttypescript [\s!math][palatino,pagella][\s!all]
- \loadfontgoodies[px-math]
- \definefontsynonym[\s!MathRoman][pxmath@px-math]
- % \definefontsynonym[\s!MathRomanBold][MathRoman]
- \stoptypescript
+\stopmode
-% \starttypescript [\s!math][palatino,pagella][\s!all]
-% \definefontsynonym[\s!MathRoman][file:texgyrepagella-math.otf][\s!features=\s!math\mathsizesuffix]
-% \stoptypescript
+\startnotmode[txmath]
-\stoptypescriptcollection
+ \starttypescriptcollection[texgyre-math-times]
+
+ \starttypescript [\s!math][times,termes][\s!all]
+ \loadfontgoodies[texgyre]
+ \definefontsynonym[\s!MathRoman][file:texgyre-termes-math-regular.otf][\s!features=\s!math\mathsizesuffix]
+ \stoptypescript
+
+ \stoptypescriptcollection
+
+\stopnotmode
+
+\startmode[pxmath]
+
+ \starttypescriptcollection[texgyre-math]
+
+ \starttypescript [\s!math][palatino,pagella][\s!all]
+ \loadfontgoodies[px-math]
+ \definefontsynonym[\s!MathRoman][pxmath@px-math]
+ % \definefontsynonym[\s!MathRomanBold][MathRoman]
+ \stoptypescript
+
+ \stoptypescriptcollection
+
+\stopmode
+
+\startnotmode[pxmath]
+
+ \starttypescriptcollection[texgyre-math]
+
+ \starttypescript [\s!math][palatino,pagella][\s!all]
+ \loadfontgoodies[texgyre]
+ \definefontsynonym[\s!MathRoman][file:texgyre-pagella-math-regular.otf][\s!features=\s!math\mathsizesuffix]
+ \stoptypescript
+
+ \stoptypescriptcollection
+
+\stopnotmode
diff --git a/Master/texmf-dist/tex/context/base/type-imp-xits.mkiv b/Master/texmf-dist/tex/context/base/type-imp-xits.mkiv
index cf3856eaeb8..f83050e14ac 100644
--- a/Master/texmf-dist/tex/context/base/type-imp-xits.mkiv
+++ b/Master/texmf-dist/tex/context/base/type-imp-xits.mkiv
@@ -23,12 +23,12 @@
\starttypescript [\s!math] [xits,xitsbidi] [\s!name]
\loadfontgoodies[xits-math]
- \definefontsynonym[\s!MathRoman ][\s!file:xits-math.otf] [\s!features=\s!math\mathsizesuffix,\s!goodies=xits-math]
- \definefontsynonym[\s!MathRoman L2R][\s!file:xits-math.otf] [\s!features=\s!math\mathsizesuffix-l2r,\s!goodies=xits-math]
- \definefontsynonym[\s!MathRoman R2L][\s!file:xits-math.otf] [\s!features=\s!math\mathsizesuffix-r2l,\s!goodies=xits-math]
- \definefontsynonym[\s!MathBold ][\s!file:xits-mathbold.otf][\s!features=\s!math\mathsizesuffix,\s!goodies=xits-math]
- \definefontsynonym[\s!MathBold L2R][\s!file:xits-mathbold.otf][\s!features=\s!math\mathsizesuffix-l2r,\s!goodies=xits-math]
- \definefontsynonym[\s!MathBold R2L][\s!file:xits-mathbold.otf][\s!features=\s!math\mathsizesuffix-r2l,\s!goodies=xits-math]
+ \definefontsynonym[\s!MathRoman ][\s!file:xits-math.otf] [\s!features=\s!math\mathsizesuffix,\s!goodies=xits-math]
+ \definefontsynonym[\s!MathRoman L2R][\s!file:xits-math.otf] [\s!features=\s!math\mathsizesuffix-l2r,\s!goodies=xits-math]
+ \definefontsynonym[\s!MathRoman R2L][\s!file:xits-math.otf] [\s!features=\s!math\mathsizesuffix-r2l,\s!goodies=xits-math]
+ \definefontsynonym[\s!MathRomanBold ][\s!file:xits-mathbold.otf][\s!features=\s!math\mathsizesuffix,\s!goodies=xits-math]
+ \definefontsynonym[\s!MathRomanBold L2R][\s!file:xits-mathbold.otf][\s!features=\s!math\mathsizesuffix-l2r,\s!goodies=xits-math]
+ \definefontsynonym[\s!MathRomanBold R2L][\s!file:xits-mathbold.otf][\s!features=\s!math\mathsizesuffix-r2l,\s!goodies=xits-math]
\stoptypescript
\starttypescript [\s!serif] [xits] [\s!name]
@@ -52,7 +52,7 @@
\definetypeface [xitsbidi] [\s!rm] [\s!serif] [xits] [\s!default]
\definetypeface [xitsbidi] [\s!ss] [\s!sans] [heros] [\s!default] [\s!rscale=0.9]
\definetypeface [xitsbidi] [\s!tt] [\s!mono] [modern] [\s!default] [\s!rscale=1.05]
- %definetypeface [xitsbidi] [\s!mm] [\s!math] [xitsbidi] [bidi] [\s!direction=\s!both]
+ %definetypeface [xitsbidi] [\s!mm] [\s!math] [xitsbidi] [bidi] [\s!direction=\s!both]
\definetypeface [xitsbidi] [\s!mm] [\s!math] [xitsbidi] [\s!default] [\s!direction=\s!both]
\stoptypescript
diff --git a/Master/texmf-dist/tex/context/base/type-ini.lua b/Master/texmf-dist/tex/context/base/type-ini.lua
index 4ce953168f5..03cdb0fe571 100644
--- a/Master/texmf-dist/tex/context/base/type-ini.lua
+++ b/Master/texmf-dist/tex/context/base/type-ini.lua
@@ -8,6 +8,8 @@ if not modules then modules = { } end modules ['type-ini'] = {
-- more code will move here
+local commands, context = commands, context
+
local gsub = string.gsub
local report_typescripts = logs.reporter("fonts","typescripts")
@@ -15,25 +17,26 @@ local report_typescripts = logs.reporter("fonts","typescripts")
local patterns = { "type-imp-%s.mkiv", "type-imp-%s.tex", "type-%s.mkiv", "type-%s.tex" } -- this will be imp only
local function action(name,foundname)
- context.startreadingfile()
- context.pushendofline()
- context.unprotect()
- context.input(foundname)
- context.protect()
- context.popendofline()
- context.stopreadingfile()
+ -- context.startreadingfile()
+ -- context.pushendofline()
+ -- context.unprotect()
+ -- context.input(foundname)
+ -- context.protect()
+ -- context.popendofline()
+ -- context.stopreadingfile()
+ context.loadfoundtypescriptfile(foundname)
end
local name_one, name_two
local function failure_two(name)
- report_typescripts("unknown: library '%s' or '%s'",name_one,name_two)
+ report_typescripts("unknown library %a or %a",name_one,name_two)
end
local function failure_one(name)
name_two = gsub(name,"%-.*$","")
if name_two == name then
- report_typescripts("unknown: library '%s'",name_one)
+ report_typescripts("unknown library %a",name_one)
else
commands.uselibrary {
name = name_two,
@@ -59,7 +62,7 @@ end
local patterns = { "type-imp-%s.mkiv", "type-imp-%s.tex" }
local function failure(name)
- report_typescripts("unknown: library '%s'",name)
+ report_typescripts("unknown library %a",name)
end
function commands.loadtypescriptfile(name) -- a more specific name
diff --git a/Master/texmf-dist/tex/context/base/type-ini.mkvi b/Master/texmf-dist/tex/context/base/type-ini.mkvi
index 8ca42854f39..ddf7cad8f6f 100644
--- a/Master/texmf-dist/tex/context/base/type-ini.mkvi
+++ b/Master/texmf-dist/tex/context/base/type-ini.mkvi
@@ -27,6 +27,8 @@
\unprotect
+\definesystemvariable {ts} % TypeScript / for the moment we keep this one
+
\newcount \c_font_typescripts_n_of_preloaded
\newconditional\c_font_typescripts_quit
\newtoks \c_font_typescripts_document
@@ -134,15 +136,21 @@
% The next will change .. we can load a file inside a typescript but as the state is
% 1 then, it doesn't get stored without doing that explicitly
-% \unexpanded\def\loadtypescriptfile[#1]%
-% {\ctxcommand{loadtypescriptfile("#1")}}
-
\unexpanded\def\loadtypescriptfile[#1]%
{\pushmacro\typescriptstate
\let\typescriptstate\plustwo % assumes 2 at the outer level
\ctxcommand{loadtypescriptfile("#1")}%
\popmacro\typescriptstate}
+\unexpanded\def\loadfoundtypescriptfile#1%
+ {\startreadingfile
+ \pushendofline
+ \unprotect
+ \input{#1}%
+ \protect
+ \popendofline
+ \stopreadingfile}
+
\unexpanded\def\quittypescriptscanning
{\settrue\c_font_typescripts_quit} % public
@@ -452,10 +460,6 @@
\def\font_typefaces_define_b[#name][#style][#fontshape][#fontname][#dummya][#dummyb]%
{\font_typefaces_define_a[#name][#style][#fontshape][#fontname][\s!default][#dummyb]}
-% \def\font_typefaces_define_c[#name][#style][#settings][#dummya][#dummyb][#dummyc]% misuse for settings
-% {\font_typefaces_define_indeed[#name][#style]%
-% \getparameters[\??tf#name#style][#settings]} % not used
-
\def\font_typefaces_define_c[#name][#style][#dummya][#dummyb][#dummyc][#dummyd]%
{\font_typefaces_define_indeed[#name][#style]}
@@ -553,7 +557,7 @@
%D \inherittypeface[palatino] % == [rm,ss,tt,mm]
%D \stoptyping
-\def\inherittypeface
+\unexpanded\def\inherittypeface
{\dotripleempty\font_typescripts_inherit_indeed}
\def\font_typescripts_inherit_indeed[#name][#styles][#parentclass]%
diff --git a/Master/texmf-dist/tex/context/base/typo-bld.lua b/Master/texmf-dist/tex/context/base/typo-bld.lua
new file mode 100644
index 00000000000..ed700add7d5
--- /dev/null
+++ b/Master/texmf-dist/tex/context/base/typo-bld.lua
@@ -0,0 +1,185 @@
+if not modules then modules = { } end modules ['typo-bld'] = { -- was node-par
+ version = 1.001,
+ comment = "companion to typo-bld.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+local insert, remove = table.insert, table.remove
+
+local builders, nodes, node = builders, nodes, node
+
+builders.paragraphs = builders.paragraphs or { }
+local parbuilders = builders.paragraphs
+
+parbuilders.constructors = parbuilders.constructors or { }
+local constructors = parbuilders.constructors
+
+constructors.names = constructors.names or { }
+local names = constructors.names
+
+constructors.numbers = constructors.numbers or { }
+local numbers = constructors.numbers
+
+constructors.methods = constructors.methods or { }
+local methods = constructors.methods
+
+local a_parbuilder = attributes.numbers['parbuilder'] or 999 -- why 999
+constructors.attribute = a_parbuilder
+
+local unsetvalue = attributes.unsetvalue
+local texsetattribute = tex.setattribute
+local texnest = tex.nest
+
+local nodepool = nodes.pool
+local new_baselineskip = nodepool.baselineskip
+local new_lineskip = nodepool.lineskip
+local insert_node_before = node.insert_before
+local hpack_node = node.hpack
+
+local starttiming = statistics.starttiming
+local stoptiming = statistics.stoptiming
+
+storage.register("builders/paragraphs/constructors/names", names, "builders.paragraphs.constructors.names")
+storage.register("builders/paragraphs/constructors/numbers", numbers, "builders.paragraphs.constructors.numbers")
+
+local report_parbuilders = logs.reporter("parbuilders")
+
+local mainconstructor = nil -- not stored in format
+local nofconstructors = 0
+local stack = { }
+
+function constructors.define(name)
+ nofconstructors = nofconstructors + 1
+ names[nofconstructors] = name
+ numbers[name] = nofconstructors
+end
+
+function constructors.set(name) --- will go
+ if name then
+ mainconstructor = numbers[name] or unsetvalue
+ else
+ mainconstructor = stack[#stack] or unsetvalue
+ end
+ texsetattribute(a_parbuilder,mainconstructor)
+ if mainconstructor ~= unsetvalue then
+ constructors.enable()
+ end
+end
+
+function constructors.start(name)
+ local number = numbers[name]
+ insert(stack,number)
+ mainconstructor = number or unsetvalue
+ texsetattribute(a_parbuilder,mainconstructor)
+ if mainconstructor ~= unsetvalue then
+ constructors.enable()
+ end
+ -- report_parbuilders("start %a",name)
+end
+
+function constructors.stop()
+ remove(stack)
+ mainconstructor = stack[#stack] or unsetvalue
+ texsetattribute(a_parbuilder,mainconstructor)
+ if mainconstructor == unsetvalue then
+ constructors.disable()
+ end
+ -- report_parbuilders("stop")
+end
+
+-- return values:
+--
+-- true : tex will break itself
+-- false : idem but dangerous
+-- head : list of valid vmode nodes with last being hlist
+
+function constructors.handler(head,followed_by_display)
+ if type(head) == "boolean" then
+ return head
+ else
+ local attribute = head[a_parbuilder] -- or mainconstructor
+ if attribute then
+ local method = names[attribute]
+ if method then
+ local handler = methods[method]
+ if handler then
+ return handler(head,followed_by_display)
+ else
+ report_parbuilders("contructor method %a is not defined",tostring(method))
+ return true -- let tex break
+ end
+ end
+ end
+ return true -- let tex break
+ end
+end
+
+-- just for testing
+
+function constructors.methods.default(head,followed_by_display)
+ return true -- let tex break
+end
+
+-- also for testing (now also surrounding spacing done)
+
+function builders.paragraphs.constructors.methods.oneline(head,followed_by_display)
+ -- when needed we will turn this into a helper
+ local t = texnest[texnest.ptr]
+ local h = hpack_node(head)
+ local d = tex.baselineskip.width - t.prevdepth - h.height
+ t.prevdepth = h.depth
+ t.prevgraf = 1
+ if d < tex.lineskiplimit then
+ return insert_node_before(h,h,new_lineskip(tex.lineskip))
+ else
+ return insert_node_before(h,h,new_baselineskip(d))
+ end
+end
+
+-- It makes no sense to have a sequence here as we already have
+-- pre and post hooks and only one parbuilder makes sense, so no:
+--
+-- local actions = nodes.tasks.actions("parbuilders")
+--
+-- yet ... maybe some day.
+
+local actions = constructors.handler
+local enabled = false
+
+local function processor(head,followed_by_display)
+ -- todo: not again in otr so we need to flag
+ if enabled then
+ starttiming(parbuilders)
+ local head = actions(head,followed_by_display)
+ stoptiming(parbuilders)
+ return head
+ else
+ return true -- let tex do the work
+ end
+end
+
+function constructors.enable()
+ enabled = true
+end
+
+function constructors.disable()
+ enabled = false
+end
+
+
+callbacks.register('linebreak_filter', processor, "breaking paragraps into lines")
+
+statistics.register("linebreak processing time", function()
+ return statistics.elapsedseconds(parbuilders)
+end)
+
+-- interface
+
+commands.defineparbuilder = constructors.define
+commands.startparbuilder = constructors.start
+commands.stopparbuilder = constructors.stop
+commands.setparbuilder = constructors.set
+commands.enableparbuilder = constructors.enable
+commands.disableparbuilder = constructors.disable
diff --git a/Master/texmf-dist/tex/context/base/typo-bld.mkiv b/Master/texmf-dist/tex/context/base/typo-bld.mkiv
new file mode 100644
index 00000000000..10502005b9a
--- /dev/null
+++ b/Master/texmf-dist/tex/context/base/typo-bld.mkiv
@@ -0,0 +1,64 @@
+%D \module
+%D [ file=typo-bld, % was node-par,
+%D version=2008.09.30,
+%D title=\CONTEXT\ Typesetting Macros,
+%D subtitle=Paragraph Building,
+%D author=Hans Hagen,
+%D date=\currentdate,
+%D copyright={PRAGMA ADE \& \CONTEXT\ Development Team}]
+%C
+%C This module is part of the \CONTEXT\ macro||package and is
+%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
+%C details.
+
+\writestatus{loading}{ConTeXt Node Macros / Paragraph Building}
+
+%D This is very experimental, undocumented, subjected to changes, etc. just as
+%D the underlying interfaces. But at least it's cleaned as part of the status-mkiv
+%D cleanup.
+
+% \startparbuilder[default]
+% \input tufte \par
+% \startparbuilder[oneline]
+% \input tufte \par
+% \stopparbuilder
+% \input tufte \par
+% \stopparbuilder
+%
+% \startparbuilder[oneline]
+% \dorecurse{100}{\input ward \par}
+% \stopparbuilder
+
+\unprotect
+
+\registerctxluafile{typo-bld}{1.001}
+
+\definesystemattribute[parbuilder][public]
+
+\installcorenamespace {parbuilder}
+
+\unexpanded\def\defineparbuilder[#1]%
+ {\ctxcommand{defineparbuilder("#1")}}
+
+\unexpanded\def\startparbuilder[#1]%
+ {\ifhmode\par\fi
+ \ctxcommand{startparbuilder("#1")}}
+
+\unexpanded\def\stopparbuilder
+ {\ifhmode\par\fi
+ \ctxcommand{stopparbuilder()}}
+
+\unexpanded\def\setmainparbuilder[#1]%
+ {\ctxcommand{setparbuilder("#1")}}
+
+% no high level interface, after all implementing a linebreaker is not something that
+% the average user will do
+
+\defineparbuilder[default] % just for testing
+\defineparbuilder[oneline] % just for testing
+\defineparbuilder[basic] % just for testing
+
+\def\enableparbuilders {\ctxcommand{enableparbuilder()}} % hooks in otr so we need to pickup
+\def\disableparbuilders{\ctxcommand{disableparbuilder()}} % hooks in otr so we need to pickup
+
+\protect \endinput
diff --git a/Master/texmf-dist/tex/context/base/typo-brk.lua b/Master/texmf-dist/tex/context/base/typo-brk.lua
index 14f0b8b4744..d6326ebebc8 100644
--- a/Master/texmf-dist/tex/context/base/typo-brk.lua
+++ b/Master/texmf-dist/tex/context/base/typo-brk.lua
@@ -20,16 +20,14 @@ local report_breakpoints = logs.reporter("typesetting","breakpoints")
local nodes, node = nodes, node
local settings_to_array = utilities.parsers.settings_to_array
-local has_attribute = node.has_attribute
-local unset_attribute = node.unset_attribute
-local set_attribute = node.set_attribute
local copy_node = node.copy
local copy_nodelist = node.copy_list
local free_node = node.free
local insert_node_before = node.insert_before
local insert_node_after = node.insert_after
local remove_node = nodes.remove -- ! nodes
-local link_nodes = nodes.link
+
+local tonodes = nodes.tonodes
local texattribute = tex.attribute
local unsetvalue = attributes.unsetvalue
@@ -139,10 +137,18 @@ methods[5] = function(head,start,settings) -- x => p q r
head, start, tmp = remove_node(head,start)
head, start = insert_node_before(head,start,new_disc())
local attr = tmp.attr
+ local font = tmp.font
start.attr = copy_nodelist(attr) -- todo: critical only
- start.pre = link_nodes(settings.right,tmp,attr)
- start.post = link_nodes(settings.left,tmp,attr)
- start.replace = link_nodes(settings.middle,tmp,attr)
+ local left, right, middle = settings.left, settings.right, settings.middle
+ if left then
+ start.pre = tonodes(tostring(left),font,attr) -- was right
+ end
+ if right then
+ start.post = tonodes(tostring(right),font,attr) -- was left
+ end
+ if middle then
+ start.replace = tonodes(tostring(middle),font,attr)
+ end
free_node(tmp)
insert_break(head,start,10000,10000)
end
@@ -155,9 +161,9 @@ local function process(namespace,attribute,head)
while start do
local id = start.id
if id == glyph_code then
- local attr = has_attribute(start,a_breakpoints)
+ local attr = start[a_breakpoints]
if attr and attr > 0 then
- unset_attribute(start,a_breakpoints) -- maybe test for subtype > 256 (faster)
+ start[a_breakpoints] = unsetvalue -- maybe test for subtype > 256 (faster)
-- look ahead and back n chars
local data = mapping[attr]
if data then
diff --git a/Master/texmf-dist/tex/context/base/typo-brk.mkiv b/Master/texmf-dist/tex/context/base/typo-brk.mkiv
index 029f4402f00..af498bfecb5 100644
--- a/Master/texmf-dist/tex/context/base/typo-brk.mkiv
+++ b/Master/texmf-dist/tex/context/base/typo-brk.mkiv
@@ -38,14 +38,14 @@
\def\typo_breakpoints_define_character[#1][#2][#3]% name char settings
{\begingroup
- \getparameters[\??bp][\c!type=1,\c!nleft=3,\c!nright=3,\s!language=,\c!left=,\c!right=,\c!middle=,#3]%
- \ctxcommand{definebreakpoint("#1", "#2", "\reallanguagetag\@@bplanguage", {
- type = \@@bptype,
- nleft = "\@@bpnleft",
- nright = "\@@bpnright",
- right = "\@@bpright",
- left = "\@@bpleft",
- middle = "\@@bpmiddle"
+ \getdummyparameters[\c!type=1,\c!nleft=3,\c!nright=3,\s!language=,\c!left=,\c!right=,\c!middle=,#3]%
+ \ctxcommand{definebreakpoint("#1", "#2", "\reallanguagetag{\directdummyparameter\s!language}", { % maybe deal with #3 at the lua end
+ type = \directdummyparameter\c!type,
+ nleft = "\directdummyparameter\c!nleft",
+ nright = "\directdummyparameter\c!nright",
+ right = "\directdummyparameter\c!right",
+ left = "\directdummyparameter\c!left",
+ middle = "\directdummyparameter\c!middle"
} )}%
\endgroup}
@@ -77,6 +77,8 @@
% \setbreakpoints[compound]
% \start \hsize 1mm test-test \par \stop
+%D Maybe some day default:
+%
% \setbreakpoints[compound]
\protect \endinput
diff --git a/Master/texmf-dist/tex/context/base/typo-cap.lua b/Master/texmf-dist/tex/context/base/typo-cap.lua
index 3233a6ba222..fdbf2e35338 100644
--- a/Master/texmf-dist/tex/context/base/typo-cap.lua
+++ b/Master/texmf-dist/tex/context/base/typo-cap.lua
@@ -16,11 +16,9 @@ local report_casing = logs.reporter("typesetting","casing")
local nodes, node = nodes, node
-local has_attribute = node.has_attribute
-local unset_attribute = node.unset_attribute
-local set_attribute = node.set_attribute
local traverse_id = node.traverse_id
local copy_node = node.copy
+local end_of_math = node.end_of_math
local texattribute = tex.attribute
local unsetvalue = attributes.unsetvalue
@@ -31,6 +29,7 @@ local kerncodes = nodes.kerncodes
local glyph_code = nodecodes.glyph
local kern_code = nodecodes.kern
+local math_code = nodecodes.math
local kerning_code = kerncodes.kerning
local userskip_code = skipcodes.userskip
@@ -86,7 +85,6 @@ local function helper(start, codes, special, attribute, once)
if next then
next.prev = prev
end
---~ node.free(start)
return prev, true
elseif lastfont and start.prev.id ~= glyph_code then
fnt = lastfont
@@ -171,8 +169,8 @@ local function Word(start,attribute,attr)
if not prev or prev.id ~= glyph_code then
--- only the first character is treated
for n in traverse_id(glyph_code,start.next) do
- if has_attribute(n,attribute) == attr then
- unset_attribute(n,attribute)
+ if n[attribute] == attr then
+ n[attribute] = unsetvalue
else
-- break -- we can have nested mess
end
@@ -264,26 +262,28 @@ local function process(namespace,attribute,head) -- not real fast but also not u
while start do -- while because start can jump ahead
local id = start.id
if id == glyph_code then
- local attr = has_attribute(start,attribute)
+ local attr = start[attribute]
if attr and attr > 0 then
if attr ~= lastattr then
lastfont = nil
lastattr = attr
end
- unset_attribute(start,attribute)
+ start[attribute] = unsetvalue
local action = actions[attr%100] -- map back to low number
if action then
start, ok = action(start,attribute,attr)
done = done and ok
if trace_casing then
- report_casing("case trigger %s, instance %s, result %s",attr%100,div(attr,100),tostring(ok))
+ report_casing("case trigger %a, instance %a, result %a",attr%100,div(attr,100),ok)
end
elseif trace_casing then
- report_casing("unknown case trigger %s",attr)
+ report_casing("unknown case trigger %a",attr)
end
end
+ elseif id == math_code then
+ start = end_of_math(start)
end
- if start then
+ if start then -- why test
start = start.next
end
end
diff --git a/Master/texmf-dist/tex/context/base/typo-cap.mkiv b/Master/texmf-dist/tex/context/base/typo-cap.mkiv
index 28ced7c42cf..25a0ff9ee46 100644
--- a/Master/texmf-dist/tex/context/base/typo-cap.mkiv
+++ b/Master/texmf-dist/tex/context/base/typo-cap.mkiv
@@ -15,6 +15,9 @@
\unprotect
+%D Maybe we need a more clever system: either command or style mode etc. so
+%D that we can avoid the grouped mess in a simple style switch.
+
\registerctxluafile{typo-cap}{1.001}
\definesystemattribute[case][public]
@@ -146,6 +149,14 @@
\unexpanded\def\realSmallcapped {\groupedcommand{\sc\setcharactercasing[\v!Word ]}{}} % one upper + font
\unexpanded\def\realSmallCapped {\groupedcommand{\sc\setcharactercasing[\v!Words ]}{}} % some upper
+\unexpanded\def\font_style_pseudosmallcapped{\setcharactercasing [\v!WORD ]\signalcharacter\tx} % all upper
+\unexpanded\def\font_style_pseudoSmallcapped{\setcharactercasing [\v!capital]\signalcharacter\tx} % one upper + font
+\unexpanded\def\font_style_pseudoSmallCapped{\setcharactercasing [\v!Capital]\signalcharacter\tx} % some upper + font
+
+\unexpanded\def\font_style_realsmallcapped {\sc\setcharactercasing[\v!WORD ]} % all lower
+\unexpanded\def\font_style_realSmallcapped {\sc\setcharactercasing[\v!Word ]} % one upper + font
+\unexpanded\def\font_style_realSmallCapped {\sc\setcharactercasing[\v!Words ]} % some upper
+
\unexpanded\def\typo_capitals_smallcaps
{\ifconditional\c_typo_capitals_pseudo
\expandafter\firstoftwoarguments
@@ -157,6 +168,10 @@
\unexpanded\def\Smallcapped{\typo_capitals_smallcaps\pseudoSmallcapped\realSmallcapped}
\unexpanded\def\SmallCapped{\typo_capitals_smallcaps\pseudoSmallCapped\realSmallCapped}
+\unexpanded\def\font_style_smallcapped{\typo_capitals_smallcaps\font_style_pseudosmallcapped\font_style_realsmallcapped}
+\unexpanded\def\font_style_Smallcapped{\typo_capitals_smallcaps\font_style_pseudoSmallcapped\font_style_realSmallcapped}
+\unexpanded\def\font_style_SmallCapped{\typo_capitals_smallcaps\font_style_pseudoSmallCapped\font_style_realSmallCapped}
+
\unexpanded\def\autocap{\ifmmode\expandafter\normalcap\else\expandafter\smallcapped\fi}
\appendtoks
@@ -171,6 +186,10 @@
\let\normalWORD \WORD
\let\normalword \word
+\let\font_style_normalsmallcapped\font_style_smallcapped
+\let\font_style_normalWORD \WORD
+\let\font_style_normalword \word
+
%D As suggested by WS:
\unexpanded\def\notsmallcapped{\groupedcommand{\setcharactercasing[\v!word]\signalcharacter}{}}
@@ -183,21 +202,22 @@
%D
%D \showsetup{setupcapitals}
-\let\normalsmallcapped\smallcapped
+\installcorenamespace{capitals}
-\definesystemvariable{kk}
+\installsetuponlycommandhandler \??capitals {capitals}
-\unexpanded\def\setupcapitals
- {\dosingleempty\typo_capitals_setup}
+\let\normalsmallcapped\smallcapped
-\def\typo_capitals_setup[#1]% todo: don't use grouping just a switch
- {\getparameters[\??kk][#1]%
- \doifelse\@@kktitle\v!yes
- {\definealternativestyle[\v!capital ][\normalsmallcapped][\normalsmallcapped]%
- \definealternativestyle[\v!smallcaps][\sc ][\sc ]}
- {\definealternativestyle[\v!capital ][\normalsmallcapped][\normalWORD ]%
- \definealternativestyle[\v!smallcaps][\sc ][\normalWORD ]}%
- \doifelse\@@kksc\v!yes\userealcaps\usepseudocaps}
+\appendtoks
+ \doifelse{\directcapitalsparameter\c!title}\v!yes
+ {\definealternativestyle[\v!capital ][\font_style_normalsmallcapped][\font_style_normalsmallcapped]%
+ \definealternativestyle[\v!smallcaps][\sc][\sc]}
+ {\definealternativestyle[\v!capital ][\font_style_normalsmallcapped][\font_style_normalWORD]%
+ \definealternativestyle[\v!smallcaps][\sc][\font_style_normalWORD]}%
+ \doifelse{\directcapitalsparameter\s!sc}\v!yes
+ \userealcaps
+ \usepseudocaps
+\to \everysetupcapitals
\let\uppercased\normalWORD
\let\lowercased\normalword
diff --git a/Master/texmf-dist/tex/context/base/typo-cln.lua b/Master/texmf-dist/tex/context/base/typo-cln.lua
index c6a2707910d..be00ac10df1 100644
--- a/Master/texmf-dist/tex/context/base/typo-cln.lua
+++ b/Master/texmf-dist/tex/context/base/typo-cln.lua
@@ -28,9 +28,10 @@ local tasks = nodes.tasks
local texattribute = tex.attribute
-local has_attribute = node.has_attribute
local traverse_id = node.traverse_id
+local unsetvalue = attributes.unsetvalue
+
local glyph_code = nodecodes.glyph
local uccodes = characters.uccodes
@@ -52,7 +53,7 @@ local function process(namespace,attribute,head)
if resetter[char] then
inline = false
elseif not inline then
- local a = has_attribute(n,attribute)
+ local a = n[attribute]
if a == 1 then -- currently only one cleaner so no need to be fancy
local upper = uccodes[char]
if type(upper) == "table" then
diff --git a/Master/texmf-dist/tex/context/base/typo-del.mkiv b/Master/texmf-dist/tex/context/base/typo-del.mkiv
index b2213db65bb..82cc7472d44 100644
--- a/Master/texmf-dist/tex/context/base/typo-del.mkiv
+++ b/Master/texmf-dist/tex/context/base/typo-del.mkiv
@@ -17,14 +17,19 @@
% THIS IS OBSOLETE:
-\unexpanded\def\setuphyphenmark
- {\dodoubleargument\getparameters[\??kp]}
+\installcorenamespace{hyphenmarksign} % let's not waste a setuphandler (yet)
\unexpanded\def\setuphyphenmark[#1]% sign=normal|wide
- {\dodoubleargument\getparameters[\??kp][#1]%
- \doifelse\@@kpsign {\v!normal}% was inset?
- {\let\textmodehyphen\normalhyphen \let\textmodehyphendiscretionary\normalhyphendiscretionary}
- {\let\textmodehyphen\composedhyphen\let\textmodehyphendiscretionary\composedhyphendiscretionary}}
+ {\getdummyparameters[#1]%
+ \expandnamespaceparameter\??hyphenmarksign\dummyparameter\c!sign\v!normal}
+
+\setvalue{\??hyphenmarksign\v!normal}%
+ {\let\textmodehyphen\normalhyphen
+ \let\textmodehyphendiscretionary\normalhyphendiscretionary}
+
+\setvalue{\??hyphenmarksign\v!wide}%
+ {\let\textmodehyphen\composedhyphen
+ \let\textmodehyphendiscretionary\composedhyphendiscretionary}
\setuphyphenmark[\c!sign=\v!wide]
@@ -32,19 +37,19 @@
\definesymbol[\c!righthyphen] [\languageparameter\c!righthyphen]
\definesymbol[\c!hyphen] [\languageparameter\c!hyphen]
-\def\normalhyphen
+\unexpanded\def\normalhyphen
{\hbox{\directsymbol\empty\c!hyphen}}
-\def\composedhyphen
+\unexpanded\def\composedhyphen
{\hbox{\directsymbol\empty\c!compoundhyphen}}
-\def\normalhyphendiscretionary
+\unexpanded\def\normalhyphendiscretionary
{\discretionary
{\hbox{\directsymbol\empty\c!righthyphen}}
{\hbox{\directsymbol\empty\c!lefthyphen}}
{\hbox{\directsymbol\empty\c!hyphen}}}
-\def\composedhyphendiscretionary
+\unexpanded\def\composedhyphendiscretionary
{\discretionary
{\hbox{\directsymbol\empty\c!rightcompoundhyphen}}
{\hbox{\directsymbol\empty\c!leftcompoundhyphen}}
@@ -219,8 +224,10 @@
[\c!middlespeech]
[\leftboundarycharacter\c!middlespeech{speech}]
-\appendtoks\def\quotation#1{"#1"}\to\simplifiedcommands
-\appendtoks\def\quote #1{'#1'}\to\simplifiedcommands
+\appendtoks
+ \def\quotation#1{"#1"}%
+ \def\quote #1{'#1'}%
+\to \everysimplifycommands
%D The next features was so desperately needed by Giuseppe
%D Bilotta that he made a module for it. Since this is a
@@ -352,17 +359,17 @@
\ignorespaces}
\def\typo_delimited_stop_par
- {\removeunwantedspaces
- \removelastskip
- \rightdelimitedtextmark
- \endgroup
- \popmacro\checkindentation
- \typo_delimited_stop_par_indeed
- \delimitedtextparameter\c!after
- \doifsomething{\delimitedtextparameter\c!spaceafter}
- {\blank[\delimitedtextparameter\c!spaceafter]}%
- \useindentnextparameter\delimitedtextparameter
- \dorechecknextindentation}% AM: This was missing!
+ {\removeunwantedspaces
+ \removelastskip
+ \rightdelimitedtextmark
+ \carryoverpar\endgroup % new per 2013-01-21 ... please left floats
+ \popmacro\checkindentation
+ \typo_delimited_stop_par_indeed
+ \delimitedtextparameter\c!after
+ \doifsomething{\delimitedtextparameter\c!spaceafter}
+ {\blank[\delimitedtextparameter\c!spaceafter]}%
+ \useindentnextparameter\delimitedtextparameter
+ \dorechecknextindentation}% AM: This was missing!
\def\typo_delimited_start_txt
{\let\typo_delimited_stop\typo_delimited_stop_txt
@@ -469,6 +476,7 @@
\fi
\strut % new, needed below
\delimitedtextparameter#1% unhbox\scratchbox
+ \penalty\plustenthousand % new per 2013-03-09 WS mailing list
\hskip\d_typo_delimited_signal % +- \prewordbreak
\fi
\endgroup}
@@ -530,10 +538,10 @@
\def\typo_delimited_fontdriven_b
{\dostarttagged\t!delimited\currentdelimitedtext
- \languageparameter{\c!left\currentdelimitedtext}}
+ \languageparameter{\c!left\currentparentdelimitedtext}}% was: \currentdelimitedtext
\def\typo_delimited_fontdriven_e
- {\languageparameter{\c!right\currentdelimitedtext}%
+ {\languageparameter{\c!right\currentparentdelimitedtext}% was: \currentdelimitedtext
\dostoptagged
\typo_delimited_pop}
diff --git a/Master/texmf-dist/tex/context/base/typo-dig.lua b/Master/texmf-dist/tex/context/base/typo-dig.lua
index f11b3b0e378..e6c65399d19 100644
--- a/Master/texmf-dist/tex/context/base/typo-dig.lua
+++ b/Master/texmf-dist/tex/context/base/typo-dig.lua
@@ -19,56 +19,53 @@ local report_digits = logs.reporter("typesetting","digits")
local nodes, node = nodes, node
-local has_attribute = node.has_attribute
-local unset_attribute = node.unset_attribute
-local set_attribute = node.set_attribute
-local hpack_node = node.hpack
-local traverse_id = node.traverse_id
-local insert_before = node.insert_before
-local insert_after = node.insert_after
+local hpack_node = node.hpack
+local traverse_id = node.traverse_id
+local insert_node_before = node.insert_before
+local insert_node_after = node.insert_after
-local texattribute = tex.attribute
-local unsetvalue = attributes.unsetvalue
+local texattribute = tex.attribute
+local unsetvalue = attributes.unsetvalue
-local nodecodes = nodes.nodecodes
-local glyph_code = nodecodes.glyph
+local nodecodes = nodes.nodecodes
+local glyph_code = nodecodes.glyph
-local nodepool = nodes.pool
-local tasks = nodes.tasks
+local nodepool = nodes.pool
+local tasks = nodes.tasks
-local new_glue = nodepool.glue
+local new_glue = nodepool.glue
-local fonthashes = fonts.hashes
-local fontdata = fonthashes.identifiers
-local chardata = fonthashes.characters
-local quaddata = fonthashes.quads
+local fonthashes = fonts.hashes
+local fontdata = fonthashes.identifiers
+local chardata = fonthashes.characters
+local quaddata = fonthashes.quads
-local v_reset = interfaces.variables.reset
+local v_reset = interfaces.variables.reset
-local charbase = characters.data
-local getdigitwidth = fonts.helpers.getdigitwidth
+local charbase = characters.data
+local getdigitwidth = fonts.helpers.getdigitwidth
-typesetters = typesetters or { }
-local typesetters = typesetters
+typesetters = typesetters or { }
+local typesetters = typesetters
-typesetters.digits = typesetters.digits or { }
-local digits = typesetters.digits
+typesetters.digits = typesetters.digits or { }
+local digits = typesetters.digits
-digits.actions = { }
-local actions = digits.actions
+digits.actions = { }
+local actions = digits.actions
-local a_digits = attributes.private("digits")
-digits.attribute = a_digits
+local a_digits = attributes.private("digits")
+digits.attribute = a_digits
-- at some point we can manipulate the glyph node so then i need
-- to rewrite this then
function nodes.aligned(head,start,stop,width,how)
if how == "flushright" or how == "middle" then
- head, start = insert_before(head,start,new_glue(0,65536,65536))
+ head, start = insert_node_before(head,start,new_glue(0,65536,65536))
end
if how == "flushleft" or how == "middle" then
- head, stop = insert_after(head,stop,new_glue(0,65536,65536))
+ head, stop = insert_node_after(head,stop,new_glue(0,65536,65536))
end
local prv, nxt = start.prev, stop.next
start.prev, stop.next = nil, nil
@@ -95,7 +92,7 @@ actions[1] = function(head,start,attribute,attr)
local oldwidth, newwidth = start.width, getdigitwidth(font)
if newwidth ~= oldwidth then
if trace_digits then
- report_digits("digit trigger %s, instance %s, char 0x%05X, unicode 0x%05X, delta %s",
+ report_digits("digit trigger %a, instance %a, char %C, unicode %U, delta %s",
attr%100,div(attr,100),char,what,newwidth-oldwidth)
end
head, start = nodes.aligned(head,start,start,newwidth,"middle")
@@ -109,15 +106,15 @@ local function process(namespace,attribute,head)
local done, current, ok = false, head, false
while current do
if current.id == glyph_code then
- local attr = has_attribute(current,attribute)
+ local attr = current[attribute]
if attr and attr > 0 then
- unset_attribute(current,attribute)
+ current[attribute] = unsetvalue
local action = actions[attr%100] -- map back to low number
if action then
head, current, ok = action(head,current,attribute,attr)
done = done and ok
elseif trace_digits then
- report_digits("unknown digit trigger %s",attr)
+ report_digits("unknown digit trigger %a",attr)
end
end
end
diff --git a/Master/texmf-dist/tex/context/base/typo-dir.lua b/Master/texmf-dist/tex/context/base/typo-dir.lua
index 620862e85a8..7e5f8c2d3a8 100644
--- a/Master/texmf-dist/tex/context/base/typo-dir.lua
+++ b/Master/texmf-dist/tex/context/base/typo-dir.lua
@@ -6,7 +6,7 @@ if not modules then modules = { } end modules ['typo-dir'] = {
license = "see context related readme files"
}
-local utf = unicode.utf8
+-- todo: also use end_of_math here?
local next, type = next, type
local format, insert, sub, find, match = string.format, table.insert, string.sub, string.find, string.match
@@ -20,9 +20,6 @@ local trace_directions = false trackers.register("typesetters.directions", func
local report_directions = logs.reporter("typesetting","directions")
-local has_attribute = node.has_attribute
-local unset_attribute = node.unset_attribute
-local set_attribute = node.set_attribute
local traverse_id = node.traverse_id
local insert_node_before = node.insert_before
local insert_node_after = node.insert_after
@@ -170,6 +167,8 @@ end
-- todo: use new dir functions
+local s_isol = fonts.analyzers.states.isol
+
function directions.process(namespace,attribute,start) -- todo: make faster
if not start.next then
return start, false
@@ -182,7 +181,6 @@ function directions.process(namespace,attribute,start) -- todo: make faster
local lro, rlo, prevattr, inmath = false, false, 0, false
while current do
local id = current.id
---~ print(id,attribute,has_attribute(current,attribute))
if skipmath and id == math_code then
local subtype = current.subtype
if subtype == beginmath_code then
@@ -196,9 +194,9 @@ function directions.process(namespace,attribute,start) -- todo: make faster
elseif inmath then
current = current.next
else
- local attr = has_attribute(current,attribute)
+ local attr = current[attribute]
if attr and attr > 0 then
- -- unset_attribute(current,attribute) -- slow, needed?
+ -- current[attribute] = unsetvalue -- slow, needed?
if attr == 1 then
-- bidi parsing mode
elseif attr ~= prevattr then
@@ -242,7 +240,7 @@ function directions.process(namespace,attribute,start) -- todo: make faster
end
elseif lro or override < 0 then
if d == "r" or d == "al" then
- set_attribute(current,a_state,4) -- maybe better have a special bidi attr value -> override (9) -> todo
+ current[a_state] = s_isol -- maybe better have a special bidi attr value -> override (9) -> todo
if trace_directions then
list[#list+1] = format("char %s (%s / U+%04X) of class %s overidden to l (bidi=%s) (state=isol)",utfchar(char),char,char,d,attr)
end
@@ -276,7 +274,7 @@ function directions.process(namespace,attribute,start) -- todo: make faster
end
end
elseif d == "l" or d == "en" then -- european number
- if autodir <= 0 then
+ if autodir <= 0 then -- could be option
force_auto_left_before()
end
elseif d == "r" or d == "al" then -- arabic number
@@ -285,9 +283,11 @@ function directions.process(namespace,attribute,start) -- todo: make faster
end
elseif d == "an" then -- arabic number
-- actually this is language dependent ...
- if autodir <= 0 then
---~ force_auto_right_before()
- force_auto_left_before()
+-- if autodir <= 0 then
+-- force_auto_left_before()
+-- end
+ if autodir >= 0 then
+ force_auto_right_before()
end
elseif d == "lro" then -- Left-to-Right Override -> right becomes left
if trace_directions then
@@ -421,7 +421,7 @@ end
--~ if n then
--~ local id = n.id
--~ if id == glyph_code then
---~ local attr = has_attribute(n,attribute)
+--~ local attr = n[attribute]
--~ if attr and attr > 0 then
--~ local d = chardirs[n.char]
--~ if d == "r" or d == "al" then -- override
diff --git a/Master/texmf-dist/tex/context/base/typo-dir.mkiv b/Master/texmf-dist/tex/context/base/typo-dir.mkiv
index b46be47631a..d35dfeb669b 100644
--- a/Master/texmf-dist/tex/context/base/typo-dir.mkiv
+++ b/Master/texmf-dist/tex/context/base/typo-dir.mkiv
@@ -64,11 +64,11 @@
\setupdirections % maybe start/stop
[\c!bidi=\v!off]
-\unexpanded\def\bidilre{\utfchar{"0x202A}} % maybe \edef's
-\unexpanded\def\bidirle{\utfchar{"0x202B}}
-\unexpanded\def\bidipop{\utfchar{"0x202C}}
-\unexpanded\def\bidilro{\utfchar{"0x202D}}
-\unexpanded\def\bidirlo{\utfchar{"0x202E}}
+\unexpanded\edef\bidilre{\normalUchar"202A} % maybe \edef's
+\unexpanded\edef\bidirle{\normalUchar"202B}
+\unexpanded\edef\bidipop{\normalUchar"202C}
+\unexpanded\edef\bidilro{\normalUchar"202D}
+\unexpanded\edef\bidirlo{\normalUchar"202E}
\unexpanded\def\dirlre{\ifcase\directionsbidimode\or\bidilre\or\textdir TLT\fi}
\unexpanded\def\dirrle{\ifcase\directionsbidimode\or\bidirle\or\textdir TRT\fi}
diff --git a/Master/texmf-dist/tex/context/base/typo-itc.lua b/Master/texmf-dist/tex/context/base/typo-itc.lua
index d294dd60a2e..b39ea2f23cd 100644
--- a/Master/texmf-dist/tex/context/base/typo-itc.lua
+++ b/Master/texmf-dist/tex/context/base/typo-itc.lua
@@ -20,12 +20,13 @@ local glyph_code = nodecodes.glyph
local kern_code = nodecodes.kern
local glue_code = nodecodes.glue
local disc_code = nodecodes.disc
+local math_code = nodecodes.math
local tasks = nodes.tasks
local insert_node_after = node.insert_after
local delete_node = nodes.delete
-local has_attribute = node.has_attribute
+local end_of_math = node.end_of_math
local texattribute = tex.attribute
local a_italics = attributes.private("italics")
@@ -34,8 +35,6 @@ local unsetvalue = attributes.unsetvalue
local new_correction_kern = nodes.pool.fontkern
local new_correction_glue = nodes.pool.glue
-local points = number.points
-
local fonthashes = fonts.hashes
local fontdata = fonthashes.identifiers
local italicsdata = fonthashes.italics
@@ -70,7 +69,7 @@ local function setitalicinfont(font,char)
end
end
if trace_italics then
- report_italics("setting italic correction of %s (U+%05X) of font %s to %s",utfchar(char),char,font,points(italic))
+ report_italics("setting italic correction of %C of font %a to %p",char,font,italic)
end
character.italic_correction = italic or 0
end
@@ -101,18 +100,18 @@ local function process(namespace,attribute,head)
if italic ~= 0 then
if data then
if trace_italics then
- report_italics("ignoring %s between italic %s and italic %s",points(italic),utfchar(prevchar),utfchar(char))
+ report_italics("ignoring %p between italic %C and italic %C",italic,prevchar,char)
end
else
if trace_italics then
- report_italics("inserting %s between italic %s and regular %s",points(italic),utfchar(prevchar),utfchar(char))
+ report_italics("inserting %p between italic %C and regular %C",italic,prevchar,char)
end
insert_node_after(head,previous,new_correction_kern(italic))
done = true
end
elseif inserted and data then
if trace_italics then
- report_italics("deleting last correction before %s",utfchar(char))
+ report_italics("deleting last correction before %C",char)
end
delete_node(head,inserted)
else
@@ -121,7 +120,7 @@ local function process(namespace,attribute,head)
lastfont = font
end
if data then
- local attr = forcedvariant or has_attribute(current,attribute)
+ local attr = forcedvariant or current[attribute]
if attr and attr > 0 then
local cd = data[char]
if not cd then
@@ -155,16 +154,18 @@ local function process(namespace,attribute,head)
elseif id == glue_code then
if italic ~= 0 then
if trace_italics then
- report_italics("inserting %s between italic %s and glue",points(italic),utfchar(prevchar))
+ report_italics("inserting %p between italic %C and glue",italic,prevchar)
end
inserted = new_correction_glue(italic) -- maybe just add ? else problem with penalties
insert_node_after(head,previous,inserted)
italic = 0
done = true
end
+ elseif id == math_code then
+ current = end_of_math(current)
elseif italic ~= 0 then
if trace_italics then
- report_italics("inserting %s between italic %s and whatever",points(italic),utfchar(prevchar))
+ report_italics("inserting %p between italic %C and whatever",italic,prevchar)
end
inserted = nil
insert_node_after(head,previous,new_correction_kern(italic))
@@ -175,7 +176,7 @@ local function process(namespace,attribute,head)
end
if italic ~= 0 and lastattr > 1 then -- more control is needed here
if trace_italics then
- report_italics("inserting %s between italic %s and end of list",points(italic),utfchar(prevchar))
+ report_italics("inserting %p between italic %C and end of list",italic,prevchar)
end
insert_node_after(head,previous,new_correction_kern(italic))
done = true
@@ -236,7 +237,7 @@ function commands.setupitaliccorrection(option) -- no grouping !
texattribute[a_italics] = variant
end
if trace_italics then
- report_italics("force: %s, variant: %s",tostring(forcedvariant),tostring(variant ~= unsetvalue and variant))
+ report_italics("forcing %a, variant %a",forcedvariant,variant ~= unsetvalue and variant)
end
end
diff --git a/Master/texmf-dist/tex/context/base/typo-itc.mkvi b/Master/texmf-dist/tex/context/base/typo-itc.mkvi
index 809c7a25245..4a3bba51850 100644
--- a/Master/texmf-dist/tex/context/base/typo-itc.mkvi
+++ b/Master/texmf-dist/tex/context/base/typo-itc.mkvi
@@ -18,7 +18,7 @@
%D The brave might try:
%D
%D \starttyping
-%D \definefontfeature[default][default][itlc=yes,notextitalics=yes]
+%D \definefontfeature[default][default][itlc=yes,textitalics=yes]
%D \setupitaliccorrection[global,always]
%D \stoptyping
diff --git a/Master/texmf-dist/tex/context/base/typo-itm.mkiv b/Master/texmf-dist/tex/context/base/typo-itm.mkiv
new file mode 100644
index 00000000000..eb47e407616
--- /dev/null
+++ b/Master/texmf-dist/tex/context/base/typo-itm.mkiv
@@ -0,0 +1,273 @@
+%D \module
+%D [ file=typo-itm, % comes from core-mis
+%D version=2012.06.28,
+%D title=\CONTEXT\ Typesetting Macros,
+%D subtitle=Item Lists,
+%D author=Hans Hagen,
+%D date=\currentdate,
+%D copyright={PRAGMA ADE \& \CONTEXT\ Development Team}]
+%C
+%C This module is part of the \CONTEXT\ macro||package and is
+%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
+%C details.
+
+\writestatus{loading}{ConTeXt Node Macros / Item Lists}
+
+\unprotect
+
+%D This is a real old mechanism that we once used for questionaries. As it is documented
+%D we keep it around. A more modern implementation would probably use another approach
+%D but I don't want to spend too much time on it now. There are a couple of changes:
+%D
+%D \startitemize
+%D \startitem textwidth sets the combined width \stopitem
+%D \startitem width sets the symbolwidth \stopitem
+%D \startitem alternative is used instead of location \stopitem
+%D \stopitemize
+%D
+%D \startbuffer
+%D \items[alternative=left]{a,b,c}
+%D \items[alternative=left,align=middle,textalign=flushright,distance=1em]{a,b,c}
+%D \items[alternative=right]{a,b,c}
+%D \items[alternative=inmargin]{a,b,c}
+%D \items[alternative=top]{a,b,c}
+%D \items[alternative=bottom]{a,b,c}
+%D \items[alternative=bottom,align=flushleft,textstyle=bold,color=red,textcolor=green]{a,b,c}
+%D \stopbuffer
+%D
+%D \typebuffer
+%D
+%D This renders as:
+%D
+%D \getbuffer
+
+\installcorenamespace{items}
+\installcorenamespace{itemsalternatives}
+\installcorenamespace{itemshorizontal}
+\installcorenamespace{itemsvertical}
+
+\installcommandhandler \??items {items} \??items
+
+\setupitems
+ [\c!alternative=\v!left,
+ \c!symbol=5,
+ \c!textwidth=\availablehsize,
+ \c!align=\v!middle,
+ \c!textalign=\v!flushleft,
+ \c!distance=\zeropoint,
+ %\c!n=,
+ \c!before=\blank,
+ \c!inbetween={\blank[\v!medium]},
+ \c!after=\blank]
+
+\unexpanded\def\items
+ {\dosingleempty\typo_items_process}
+
+\newcount\c_typo_items_n
+\newcount\c_typo_items_m
+\newdimen\d_typo_items_text_width
+\newdimen\d_typo_items_symbol_width
+\newdimen\d_typo_items_distance
+\newbox \b_typo_items_symbols
+\newbox \b_typo_items_texts
+
+\def\typo_items_process[#1]#2%
+ {\bgroup
+ \setupitems[#1]%
+ \edef\p_typo_items_alternative{\itemsparameter\c!alternative}%
+ \ifcsname\??itemsalternatives\p_typo_items_alternative\endcsname \else
+ \let\p_typo_items_alternative\v!left
+ \fi
+ \let\currentitems\p_typo_items_alternative
+ \setupcurrentitems[#1]%
+ %
+ \edef\p_typo_items_textwidth{\itemsparameter\c!textwidth}%
+ \ifx\p_typo_items_textwidth\empty
+ \d_typo_items_text_width\availablehsize
+ \else
+ \d_typo_items_text_width\p_typo_items_textwidth\relax
+ \fi
+ %
+ \edef\p_typo_items_width{\itemsparameter\c!width}%
+ \ifx\p_typo_items_width\empty
+ \d_typo_items_symbol_width1.5\emwidth
+ \else
+ \d_typo_items_symbol_width\p_typo_items_width\relax
+ \fi
+ %
+ \edef\p_typo_items_distance{\itemsparameter\c!distance}%
+ \ifx\p_typo_items_distance\empty
+ \d_typo_items_distance\zeropoint
+ \else
+ \d_typo_items_distance\p_typo_items_distance\relax
+ \fi
+ %
+ \edef\p_typo_items_symbol{\itemsparameter\c!symbol}%
+ \ifx\p_typo_items_symbol\empty
+ \let\m_typo_items_symbol\firstofoneargument
+ \else\ifx\p_typo_items_symbol\v!none
+ \let\p_typo_items_symbol\empty
+ \let\m_typo_items_symbol\firstofoneargument
+ \else
+ \doifconversiondefinedelse\p_typo_items_symbol
+ {\def\m_typo_items_symbol{\convertnumber\p_typo_items_symbol}}
+ {\doifsymboldefinedelse\p_typo_items_symbol
+ {\def\m_typo_items_symbol{\symbol[\p_typo_items_symbol]\gobbleoneargument}}
+ {\let\m_typo_items_symbol\firstofoneargument}}%
+ \fi\fi
+ %
+ \edef\p_typo_items_align{\itemsparameter\c!align}%
+ \edef\p_typo_items_textalign{\itemsparameter\c!textalign}%
+ %
+ \edef\p_typo_items_n{\itemsparameter\c!n}%
+ \ifx\p_typo_items_n\empty
+ \getcommalistsize[#2]%
+ \c_typo_items_n\commalistsize\relax
+ \else
+ \c_typo_items_n\p_typo_items_n\relax
+ \fi
+ %
+ \parindent\zeropoint
+ \dontcomplain
+ %
+ \itemsparameter\c!before
+ \csname\??itemsalternatives\p_typo_items_alternative\endcsname{#2}%
+ \itemsparameter\c!after
+ \egroup}
+
+% rendering
+
+\setvalue{\??itemshorizontal\v!margin}#1%
+ {\ifnum\c_typo_items_m=\plusone\hss\else\hfill\fi
+ \strut#1%
+ \ifnum\c_typo_items_m=\c_typo_items_n\hss\else\hfill\fi}
+
+\setvalue{\??itemshorizontal\s!unknown}%
+ {\simplealignedbox\scratchwidth\m_typo_items_align}
+
+\def\typo_items_item_horizontal
+ {\advance\c_typo_items_m\plusone
+ \csname\??itemshorizontal
+ \ifcsname\??itemshorizontal\p_typo_items_align\endcsname
+ \p_typo_items_align
+ \else
+ \s!unknown
+ \fi
+ \endcsname}
+
+\setvalue{\??itemsvertical\s!unknown}%
+ {\simplealignedbox\scratchwidth\m_typo_items_align}
+
+\def\typo_items_item_vertical
+ {\advance\c_typo_items_m\plusone
+ \csname\??itemsvertical
+ \ifcsname\??itemsvertical\p_typo_items_align\endcsname
+ \p_typo_items_align
+ \else
+ \s!unknown
+ \fi
+ \endcsname}
+
+\def\typo_items_make_horizontal#1%
+ {\divide\scratchwidth\c_typo_items_n
+ \hbox{#1}}
+
+\def\typo_items_make_vertical#1%
+ {\vbox{#1}}
+
+\def\typo_items_construct_items_boxes#1%
+ {\setbox\b_typo_items_texts\hbox
+ {\c_typo_items_m\zerocount
+ \let\m_typo_items_align\p_typo_items_textalign
+ \scratchwidth\d_typo_items_text_width
+ \useitemsstyleandcolor\c!textstyle\c!textcolor
+ \typo_items_make{\processcommalist[#1]\typo_items_item}}%
+ \ifx\p_typo_items_symbol\empty
+ \setbox\b_typo_items_symbols\emptyhbox
+ \else
+ \setbox\b_typo_items_symbols\hbox
+ {\c_typo_items_m\zerocount
+ \let\m_typo_items_align\p_typo_items_align
+ \scratchwidth\d_typo_items_symbol_width
+ \useitemsstyleandcolor\c!style\c!color
+ \typo_items_make{\dorecurse\c_typo_items_n{\typo_items_item{\strut\m_typo_items_symbol\recurselevel}}}}%
+ \fi}
+
+% alternatives:
+
+\defineitems[\v!top][\c!width=\d_typo_items_text_width,\c!textalign=\itemsparameter\c!align]
+
+\setvalue{\??itemsalternatives\v!top}#1%
+ {\let\typo_items_make\typo_items_make_horizontal
+ \let\typo_items_item\typo_items_item_horizontal
+ \typo_items_construct_items_boxes{#1}%
+ \noindent\vbox\bgroup
+ \forgetall
+ \ifvoid\b_typo_items_symbols \else
+ \box\b_typo_items_symbols
+ \itemsparameter\c!inbetween
+ \nointerlineskip
+ \fi
+ \box\b_typo_items_texts\
+ \egroup}
+
+\defineitems[\v!bottom][\c!width=\d_typo_items_text_width,\c!textalign=\itemsparameter\c!align]
+
+\setvalue{\??itemsalternatives\v!bottom}#1%
+ {\let\typo_items_make\typo_items_make_horizontal
+ \let\typo_items_item\typo_items_item_horizontal
+ \typo_items_construct_items_boxes{#1}%
+ \noindent\vbox\bgroup
+ \forgetall
+ \box\b_typo_items_texts
+ \ifvoid\b_typo_items_symbols \else
+ \itemsparameter\c!inbetween
+ \nointerlineskip
+ \box\b_typo_items_symbols
+ \fi
+ \egroup}
+
+\defineitems[\v!inmargin][\c!width=1.5\emwidth,\c!align=\v!flushright,\c!distance=\leftmargindistance]
+
+\setvalue{\??itemsalternatives\v!inmargin}#1%
+ {\let\typo_items_make\typo_items_make_vertical
+ \let\typo_items_item\typo_items_item_vertical
+ \typo_items_construct_items_boxes{#1}%
+ \noindent\hbox\bgroup
+ \ifvoid\b_typo_items_symbols \else
+ \llap{\box\b_typo_items_symbols\hskip\d_typo_items_distance}%
+ \fi
+ \box\b_typo_items_texts
+ \egroup}
+
+\defineitems[\v!left][\c!width=1.5\emwidth,\c!align=\v!flushleft]
+
+\setvalue{\??itemsalternatives\v!left}#1%
+ {\let\typo_items_make\typo_items_make_vertical
+ \let\typo_items_item\typo_items_item_vertical
+ \advance\d_typo_items_text_width-\dimexpr\d_typo_items_symbol_width+\d_typo_items_distance\relax
+ \typo_items_construct_items_boxes{#1}%
+ \noindent\hbox\bgroup
+ \ifvoid\b_typo_items_symbols \else
+ \box\b_typo_items_symbols
+ \kern\d_typo_items_distance
+ \fi
+ \box\b_typo_items_texts
+ \egroup}
+
+\defineitems[\v!right][\c!width=1.5\emwidth,\c!align=\v!flushright]
+
+\setvalue{\??itemsalternatives\v!right}#1%
+ {\let\typo_items_make\typo_items_make_vertical
+ \let\typo_items_item\typo_items_item_vertical
+ \advance\d_typo_items_text_width-\dimexpr\d_typo_items_symbol_width+\d_typo_items_distance\relax
+ \typo_items_construct_items_boxes{#1}%
+ \noindent\hbox\bgroup
+ \box\b_typo_items_texts
+ \ifvoid\b_typo_items_symbols \else
+ \kern\d_typo_items_distance
+ \box\b_typo_items_symbols
+ \fi
+ \egroup}
+
+\protect \endinput
diff --git a/Master/texmf-dist/tex/context/base/typo-krn.lua b/Master/texmf-dist/tex/context/base/typo-krn.lua
index 54b1fd2d3cf..fb28d3b2de4 100644
--- a/Master/texmf-dist/tex/context/base/typo-krn.lua
+++ b/Master/texmf-dist/tex/context/base/typo-krn.lua
@@ -6,15 +6,11 @@ if not modules then modules = { } end modules ['typo-krn'] = {
license = "see context related readme files"
}
-local utf = unicode.utf8
-
-local next, type = next, type
+local next, type, tonumber = next, type, tonumber
local utfchar = utf.char
local nodes, node, fonts = nodes, node, fonts
-local has_attribute = node.has_attribute
-local unset_attribute = node.unset_attribute
local find_node_tail = node.tail or node.slide
local free_node = node.free
local free_nodelist = node.flush_list
@@ -22,6 +18,7 @@ local copy_node = node.copy
local copy_nodelist = node.copy_list
local insert_node_before = node.insert_before
local insert_node_after = node.insert_after
+local end_of_math = node.end_of_math
local texattribute = tex.attribute
local unsetvalue = attributes.unsetvalue
@@ -31,6 +28,7 @@ local tasks = nodes.tasks
local new_gluespec = nodepool.gluespec
local new_kern = nodepool.kern
+local new_glue = nodepool.glue
local nodecodes = nodes.nodecodes
local kerncodes = nodes.kerncodes
@@ -42,6 +40,7 @@ local disc_code = nodecodes.disc
local glue_code = nodecodes.glue
local hlist_code = nodecodes.hlist
local vlist_code = nodecodes.vlist
+local math_code = nodecodes.math
local kerning_code = kerncodes.kerning
local userkern_code = kerncodes.userkern
@@ -55,6 +54,8 @@ local chardata = fonthashes.characters
local quaddata = fonthashes.quads
local markdata = fonthashes.marks
+local v_max = interfaces.variables.max
+
typesetters = typesetters or { }
local typesetters = typesetters
@@ -86,16 +87,47 @@ kerns.keeptogether = false -- just for fun (todo: control setting with key/value
-- can be optimized .. the prev thing .. but hardly worth the effort
+local function kern_injector(fillup,kern)
+ if fillup then
+ local g = new_glue(kern)
+ local s = g.spec
+ s.stretch = kern
+ s.stretch_order = 1
+ return g
+ else
+ return new_kern(kern)
+ end
+end
+
+local function spec_injector(fillup,width,stretch,shrink)
+ if fillup then
+ local s = new_gluespec(width,2*stretch,2*shrink)
+ s.stretch_order = 1
+ return s
+ else
+ return new_gluespec(width,stretch,shrink)
+ end
+end
+
+-- needs checking ... base mode / node mode
+
local function do_process(namespace,attribute,head,force) -- todo: glue so that we can fully stretch
local start, done, lastfont = head, false, nil
local keepligature = kerns.keepligature
local keeptogether = kerns.keeptogether
+ local fillup = false
while start do
-- faster to test for attr first
- local attr = force or has_attribute(start,attribute)
+ local attr = force or start[attribute]
if attr and attr > 0 then
- unset_attribute(start,attribute)
+ start[attribute] = unsetvalue
local krn = mapping[attr]
+ if krn == v_max then
+ krn = .25
+ fillup = true
+ else
+ fillup = false
+ end
if krn and krn ~= 0 then
local id = start.id
if id == glyph_code then
@@ -136,7 +168,7 @@ local function do_process(namespace,attribute,head,force) -- todo: glue so that
if not pid then
-- nothing
elseif pid == kern_code then
- if prev.subtype == kerning_code or has_attribute(prev,a_fontkern) then
+ if prev.subtype == kerning_code or prev[a_fontkern] then
if keeptogether and prev.prev.id == glyph_code and keeptogether(prev.prev,start) then -- we could also pass start
-- keep 'm
else
@@ -155,12 +187,12 @@ local function do_process(namespace,attribute,head,force) -- todo: glue so that
local kerns = chardata[lastfont][prevchar].kerns
local kern = kerns and kerns[lastchar] or 0
krn = kern + quaddata[lastfont]*krn -- here
- insert_node_before(head,start,new_kern(krn))
+ insert_node_before(head,start,kern_injector(fillup,krn))
done = true
end
else
krn = quaddata[lastfont]*krn -- here
- insert_node_before(head,start,new_kern(krn))
+ insert_node_before(head,start,kern_injector(fillup,krn))
done = true
end
elseif pid == disc_code then
@@ -218,7 +250,7 @@ local function do_process(namespace,attribute,head,force) -- todo: glue so that
else
krn = quaddata[lastfont]*krn -- here
end
- disc.replace = new_kern(krn)
+ disc.replace = kern_injector(false,krn) -- only kerns permitted, no glue
end
end
end
@@ -229,7 +261,7 @@ local function do_process(namespace,attribute,head,force) -- todo: glue so that
local w = s.width
if w > 0 then
local width, stretch, shrink = w+gluefactor*w*krn, s.stretch, s.shrink
- start.spec = new_gluespec(width,stretch*width/w,shrink*width/w)
+ start.spec = spec_injector(fillup,width,stretch*width/w,shrink*width/w)
done = true
end
end
@@ -244,14 +276,16 @@ local function do_process(namespace,attribute,head,force) -- todo: glue so that
elseif lastfont and (id == hlist_code or id == vlist_code) then -- todo: lookahead
local p = start.prev
if p and p.id ~= glue_code then
- insert_node_before(head,start,new_kern(quaddata[lastfont]*krn))
+ insert_node_before(head,start,kern_injector(fillup,quaddata[lastfont]*krn))
done = true
end
local n = start.next
if n and n.id ~= glue_code then
- insert_node_after(head,start,new_kern(quaddata[lastfont]*krn))
+ insert_node_after(head,start,kern_injector(fillup,quaddata[lastfont]*krn))
done = true
end
+ elseif id == math_code then
+ start = end_of_math(start)
end
end
end
@@ -265,7 +299,10 @@ end
local enabled = false
function kerns.set(factor)
- if factor ~= 0 then
+ if factor ~= v_max then
+ factor = tonumber(factor) or 0
+ end
+ if factor == v_max or factor ~= 0 then
if not enabled then
tasks.enableaction("processors","typesetters.kerns.handler")
enabled = true
diff --git a/Master/texmf-dist/tex/context/base/typo-krn.mkiv b/Master/texmf-dist/tex/context/base/typo-krn.mkiv
index e1e404a7bef..a47bd2ac578 100644
--- a/Master/texmf-dist/tex/context/base/typo-krn.mkiv
+++ b/Master/texmf-dist/tex/context/base/typo-krn.mkiv
@@ -40,7 +40,7 @@
\def\typo_kerning_set
{\usecharacterkerningstyleandcolor\c!style\c!color % goodie, maybe also strut
- \ctxcommand{setcharacterkerning(\characterkerningparameter\c!factor)}}
+ \ctxcommand{setcharacterkerning("\characterkerningparameter\c!factor")}}
\unexpanded\def\resetcharacterkerning % fast one
{\attribute\kernattribute\attributeunsetvalue}
@@ -62,8 +62,6 @@
%D then we always would get a command defined which is not beforehand
%D a good idea.
-\def\v!kerncharacters{kerncharacters} % no time now for translations
-
\definecharacterkerning [\v!kerncharacters] [\c!factor=.125]
% Here we need to keep the groupedcommand solution as it is
@@ -87,4 +85,57 @@
{\let\currentcharacterkerning\v!kerncharacters
\typo_kerning_set}
+%D \macros
+%D {stretched}
+%D
+%D Stretching characters in a word is a sort of typographical
+%D murder. Nevertheless we support this manipulation for use in
+%D for instance titles.
+%D
+%D \starttyping
+%D \hbox to 5cm{\stretched{to the limit}}
+%D \stretched{to the limit}
+%D \stretched[width=10cm]{to the limit}
+%D \stoptyping
+%D
+%D \typebuffer
+%D
+%D or
+%D
+%D \startexample
+%D \getbuffer
+%D \stopexample
+%D
+%D \showsetup{stretched}
+%D
+%D This command replaces the old \MKII\ variant.
+
+\definecharacterkerning
+ [\v!stretched]
+ [\c!factor=\v!max,
+ \c!width=\availablehsize]
+
+\unexpanded\def\setupstretched
+ {\setupcharacterkerning[\v!stretched]}
+
+\unexpanded\def\stretched
+ {\dosingleempty\typo_kerning_stretched}
+
+\unexpanded\def\typo_kerning_stretched[#1]#2%
+ {\begingroup
+ \let\currentcharacterkerning\v!stretched
+ \iffirstargument
+ \setupcurrentcharacterkerning[#1]%
+ \fi
+ \edef\p_width{\characterkerningparameter\c!width}%
+ \ifx\p_width\empty \else
+ \hbox to \p_width
+ \fi
+ \bgroup
+ \usecharacterkerningstyleandcolor\c!style\c!color
+ \typo_kerning_set
+ #2%
+ \egroup
+ \endgroup}
+
\protect \endinput
diff --git a/Master/texmf-dist/tex/context/base/typo-lan.lua b/Master/texmf-dist/tex/context/base/typo-lan.lua
new file mode 100644
index 00000000000..50927f74493
--- /dev/null
+++ b/Master/texmf-dist/tex/context/base/typo-lan.lua
@@ -0,0 +1,72 @@
+if not modules then modules = { } end modules ['typo-lan'] = {
+ version = 1.001,
+ comment = "companion to typo-lan.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+local type, next = type, next
+
+local currentfont = font.current
+local setmetatableindex = table.setmetatableindex
+local utfbyte = utf.byte
+
+local hashes = fonts.hashes
+local fontdata = hashes.characters
+local emwidths = hashes.emwidths
+
+local frequencies = languages.frequencies or { }
+languages.frequencies = frequencies
+
+local frequencydata = { }
+local frequencyfile = string.formatters["lang-frq-%s.lua"]
+local frequencycache = { }
+
+setmetatableindex(frequencydata, function(t,language)
+ local fullname = resolvers.findfile(frequencyfile(language))
+ local v = fullname ~= "" and dofile(fullname)
+ if not v or not v.frequencies then
+ v = t.en
+ end
+ t[language] = v
+ return v
+end)
+
+setmetatableindex(frequencycache, function(t,language)
+ local dataset = frequencydata[language]
+ local frequencies = dataset.frequencies
+ if not frequencies then
+ return t.en
+ end
+ local v = { }
+ setmetatableindex(v, function(t,font)
+ local average = emwidths[font] / 2
+ if frequencies then
+ local characters = fontdata[font]
+ local sum, tot = 0, 0
+ for k, v in next, frequencies do
+ local character = characters[k] -- characters[type(k) == "number" and k or utfbyte(k)]
+ tot = tot + v
+ sum = sum + v * (character and character.width or average)
+ end
+ average = sum / tot -- widths
+ end
+ t[font] = average
+ return average
+ end)
+ t[language] = v
+ return v
+end)
+
+function frequencies.getdata(language)
+ return frequencydata[language]
+end
+
+function frequencies.averagecharwidth(language,font)
+ return frequencycache[language or "en"][font or currentfont()]
+end
+
+function commands.averagecharwidth(language,font)
+ context(frequencycache[language or "en"][font or currentfont()])
+end
diff --git a/Master/texmf-dist/tex/context/base/typo-lan.mkiv b/Master/texmf-dist/tex/context/base/typo-lan.mkiv
new file mode 100644
index 00000000000..bb4ed204205
--- /dev/null
+++ b/Master/texmf-dist/tex/context/base/typo-lan.mkiv
@@ -0,0 +1,63 @@
+%D \module
+%D [ file=typo-lan,
+%D version=2013.03.22,
+%D title=\CONTEXT\ Typesetting Macros,
+%D subtitle=Language Goodies,
+%D author=Hans Hagen,
+%D date=\currentdate,
+%D copyright={PRAGMA ADE \& \CONTEXT\ Development Team}]
+%C
+%C This module is part of the \CONTEXT\ macro||package and is
+%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
+%C details.
+
+\writestatus{loading}{ConTeXt Typography Macros / Languages}
+
+\unprotect
+
+\registerctxluafile{typo-lan}{1.001}
+
+%D \macros
+%D {averagecharwidth, charwidthlanguage}
+%D
+%D This is a more \MKIV-ish variant of lang-frq.mkiv. The methods are
+%D gone as one doesn't need the tables for them. The main macro is
+%D \type {\averagecharwidth} which behaves like a dimension register.
+%D
+%D I finally decided to reimplement this as I needed it for a manual
+%D (which is often a reason for such a rewrite). With some inspiring
+%D Porcupine Tree in the background it's not the worst thing to do.
+
+\def\charwidthlanguage{\currentmainlanguage}
+
+\def\averagecharwidth{\dimexpr\ctxcommand{averagecharwidth("\charwidthlanguage")}\scaledpoint\relax}
+
+\protect
+
+\continueifinputfile{typo-lan.mkiv}
+
+\setuplayout[backspace=4cm]
+
+\showframe
+
+\starttext
+
+\startbuffer
+
+\mainlanguage[en] \hsize65\averagecharwidth \normalexpanded{\inleft{\the\hsize}} \input ward \par
+\mainlanguage[de] \hsize65\averagecharwidth \normalexpanded{\inleft{\the\hsize}} \input ward \par
+\mainlanguage[nl] \hsize65\averagecharwidth \normalexpanded{\inleft{\the\hsize}} \input ward \par
+
+\stopbuffer
+
+\getbuffer \blank
+
+\switchtobodyfont[pagella]
+
+\getbuffer \blank
+
+\switchtobodyfont[tt,8pt]
+
+\getbuffer
+
+\stoptext
diff --git a/Master/texmf-dist/tex/context/base/typo-mar.lua b/Master/texmf-dist/tex/context/base/typo-mar.lua
index a21d9b70a1b..ec827883db1 100644
--- a/Master/texmf-dist/tex/context/base/typo-mar.lua
+++ b/Master/texmf-dist/tex/context/base/typo-mar.lua
@@ -36,11 +36,11 @@ if not modules then modules = { } end modules ['typo-mar'] = {
-- if not w then
-- -- error
-- elseif how == "horizontal" or how == "h" then
--- pdfprint("page",format(" q 1 0 0 1 %s 0 cm ", (w[1] - pdf.h) * factor))
+-- pdfprint("page",format(" q 1 0 0 1 %f 0 cm ", (w[1] - pdf.h) * factor))
-- elseif how == "vertical" or how == "v" then
--- pdfprint("page",format(" q 1 0 0 1 0 %s cm ", (w[2] - pdf.v) * factor))
+-- pdfprint("page",format(" q 1 0 0 1 0 %f cm ", (w[2] - pdf.v) * factor))
-- else
--- pdfprint("page",format(" q 1 0 0 1 %s %s cm ", (w[1] - pdf.h) * factor, (w[2] - pdf.v) * factor))
+-- pdfprint("page",format(" q 1 0 0 1 %f %f cm ", (w[1] - pdf.h) * factor, (w[2] - pdf.v) * factor))
-- end
-- end
--
@@ -56,24 +56,24 @@ if not modules then modules = { } end modules ['typo-mar'] = {
-- local latelua = nodes.pool.latelua
--
-- function anchors.node_set(tag)
--- return latelua(format("anchors.set(%q)",tag))
+-- return latelua(formatters["anchors.set(%q)"](tag))
-- end
--
-- function anchors.node_reset(tag)
--- return latelua(format("anchors.reset(%q)",tag))
+-- return latelua(formatters["anchors.reset(%q)"](tag))
-- end
--
-- function anchors.node_start_move(tag,how)
--- return latelua(format("anchors.startmove(%q,%q)",tag,how))
+-- return latelua(formatters["anchors.startmove(%q,%q)](tag,how))
-- end
--
-- function anchors.node_stop_move(tag)
--- return latelua(format("anchors.stopmove(%q)",tag))
+-- return latelua(formatters["anchors.stopmove(%q)"](tag))
-- end
-- so far
-local format = string.format
+local format, validstring = string.format, string.valid
local insert, remove = table.insert, table.remove
local setmetatable, next = setmetatable, next
@@ -81,6 +81,7 @@ local attributes, nodes, node, variables = attributes, nodes, node, variables
local trace_margindata = false trackers.register("typesetters.margindata", function(v) trace_margindata = v end)
local trace_marginstack = false trackers.register("typesetters.margindata.stack", function(v) trace_marginstack = v end)
+local trace_margingroup = false trackers.register("typesetters.margindata.group", function(v) trace_margingroup = v end)
local report_margindata = logs.reporter("typesetters","margindata")
@@ -92,6 +93,7 @@ local enableaction = tasks.enableaction
local variables = interfaces.variables
local conditionals = tex.conditionals
+local systemmodes = tex.systemmodes
local v_top = variables.top
local v_depth = variables.depth
@@ -113,9 +115,6 @@ local v_first = variables.first
local v_text = variables.text
local v_column = variables.column
-local has_attribute = node.has_attribute
-local set_attribute = node.set_attribute
-local unset_attribute = node.unset_attribute
local copy_node_list = node.copy_list
local slide_nodes = node.slide
local hpack_nodes = node.hpack -- nodes.fasthpack not really faster here
@@ -124,7 +123,7 @@ local free_node_list = node.flush_list
local insert_node_after = node.insert_after
local insert_node_before = node.insert_before
-local link_nodes = nodes.link
+local concat_nodes = nodes.concat
local nodecodes = nodes.nodecodes
local listcodes = nodes.listcodes
@@ -138,6 +137,8 @@ local kern_code = nodecodes.kern
local penalty_code = nodecodes.penalty
local whatsit_code = nodecodes.whatsit
local line_code = listcodes.line
+local cell_code = listcodes.cell
+local alignment_code = listcodes.alignment
local leftskip_code = gluecodes.leftskip
local rightskip_code = gluecodes.rightskip
local userdefined_code = whatsitcodes.userdefined
@@ -229,49 +230,56 @@ local defaults = {
local enablelocal, enableglobal -- forward reference (delayed initialization)
-local function showstore(store,banner)
+local function showstore(store,banner,location)
if next(store) then
for i, si in table.sortedpairs(store) do
local si =store[i]
- report_margindata("%s: stored at %s: %s => %s",banner,i,si.name or "no name",nodes.toutf(si.box.list))
+ report_margindata("%s: stored in %a at %s: %a => %s",banner,location,i,validstring(si.name,"no name"),nodes.toutf(si.box.list))
end
else
- report_margindata("%s: nothing stored",banner)
+ report_margindata("%s: nothing stored in location %a",banner,location)
end
end
function margins.save(t)
setmetatable(t,defaults)
- local inline = t.inline
+ local content = texbox[t.number]
local location = t.location
local category = t.category
- local scope = t.scope
+ local inline = t.inline
+ local scope = t.scope or v_global
+ if not content then
+ report_margindata("ignoring empty margin data %a",location or "unknown")
+ return
+ end
local store
if inline then
store = inlinestore
else
store = displaystore[category][location]
if not store then
- report_margindata("invalid location: %s",location)
+ report_margindata("invalid location %a",location)
return
end
store = store[scope]
end
if not store then
- report_margindata("invalid scope: %s",scope)
+ report_margindata("invalid scope %a",scope)
return
end
if enablelocal and scope == v_local then
enablelocal()
- end
- if enableglobal and scope == v_global then
+ if enableglobal then
+ enableglobal() -- is the fallback
+ end
+ elseif enableglobal and scope == v_global then
enableglobal()
end
nofsaved = nofsaved + 1
nofstored = nofstored + 1
local name = t.name
if trace_marginstack then
- showstore(store,"before ")
+ showstore(store,"before",location)
end
if name and name ~= "" then
if inlinestore then -- todo: inline store has to be done differently (not sparse)
@@ -292,12 +300,12 @@ function margins.save(t)
end
end
if trace_marginstack then
- showstore(store,"between")
+ showstore(store,"between",location)
end
end
if t.number then
-- better make a new table and make t entry in t
- t.box = copy_node_list(texbox[t.number])
+ t.box = copy_node_list(content)
t.n = nofsaved
-- used later (we will clean up this natural mess later)
-- nice is to make a special status table mechanism
@@ -327,10 +335,10 @@ function margins.save(t)
end
end
if trace_marginstack then
- showstore(store,"after ")
+ showstore(store,"after",location)
end
if trace_margindata then
- report_margindata("saved: %s, location: %s, scope: %s, inline: %s",nofsaved,location,scope,tostring(inline))
+ report_margindata("saved %a, location %a, scope %a, inline %a",nofsaved,location,scope,inline)
end
end
@@ -409,7 +417,8 @@ local function realign(current,candidate)
if not anchor or anchor == "" then
anchor = v_text
end
- if inline or anchor ~= v_text then
+ if inline or anchor ~= v_text or candidate.psubtype == alignment_code then
+ -- the alignment_code check catches margintexts ste before a tabulate
h_anchors = h_anchors + 1
anchornode = new_latelua(format("_plib_.set('md:h',%i,{x=true,c=true})",h_anchors))
local blob = jobpositions.get('md:h', h_anchors)
@@ -430,15 +439,15 @@ local function realign(current,candidate)
if move_x then
delta = delta - move_x
if trace_margindata then
- report_margindata("realigned: %s, location: %s, margin: %s, move: %s",candidate.n,location,margin,points(move_x))
+ report_margindata("realigned %a, location %a, margin %a, move %p",candidate.n,location,margin,move_x)
end
else
if trace_margindata then
- report_margindata("realigned: %s, location: %s, margin: %s",candidate.n,location,margin)
+ report_margindata("realigned %a, location %a, margin %a",candidate.n,location,margin)
end
end
- current.list = hpack_nodes(link_nodes(anchornode,new_kern(-delta),current.list,new_kern(delta))) -- anchor == nil is ok in link_nodes
+ current.list = hpack_nodes(concat_nodes{anchornode,new_kern(-delta),current.list,new_kern(delta)})
current.width = 0
end
@@ -481,7 +490,7 @@ local function markovershoot(current)
v_anchors = v_anchors + 1
cache[v_anchors] = stacked
local anchor = new_latelua(format("typesetters.margins.ha(%s)",v_anchors)) -- todo: alleen als offset > line
- current.list = hpack_nodes(link_nodes(anchor,current.list))
+ current.list = hpack_nodes(concat_nodes{anchor,current.list})
end
local function getovershoot(location)
@@ -492,8 +501,7 @@ local function getovershoot(location)
local offset = p[location] or 0
local overshoot = offset - distance
if trace_marginstack then
- report_margindata("location: %s, distance: %s, offset: %s, overshoot: %s",
- location,points(distance),points(offset),points(overshoot))
+ report_margindata("location %a, distance %p, offset %p, overshoot %p",location,distance,offset,overshoot)
end
if overshoot > 0 then
return overshoot
@@ -516,6 +524,7 @@ local function inject(parent,head,candidate)
local baseline = candidate.baseline
local strutheight = candidate.strutheight
local strutdepth = candidate.strutdepth
+ local psubtype = parent.subtype
local offset = stacked[location]
local firstonstack = offset == false or offset == nil
nofstatus = nofstatus + 1
@@ -538,8 +547,9 @@ local function inject(parent,head,candidate)
end
candidate.width = width
candidate.hsize = parent.width -- we can also pass textwidth
+ candidate.psubtype = psubtype
if trace_margindata then
- report_margindata("processing, index %s, height: %s, depth: %s",candidate.n,height,depth)
+ report_margindata("processing, index %s, height %p, depth %p, parent %s",candidate.n,height,depth,listcodes[psubtype])
end
if firstonstack then
offset = 0
@@ -565,7 +575,7 @@ local function inject(parent,head,candidate)
if method == v_top then
local delta = height - parent.height
if trace_margindata then
- report_margindata("top aligned, amount: %s",delta)
+ report_margindata("top aligned by %p",delta)
end
if delta < candidate.threshold then
shift = shift + voffset + delta
@@ -582,18 +592,18 @@ local function inject(parent,head,candidate)
elseif method == v_depth then
local delta = strutdepth
if trace_margindata then
- report_margindata("depth aligned, amount: %s",delta)
+ report_margindata("depth aligned by %p",delta)
end
shift = shift + voffset + delta
elseif method == v_height then
local delta = - strutheight
if trace_margindata then
- report_margindata("height aligned, amount: %s",delta)
+ report_margindata("height aligned by %p",delta)
end
shift = shift + voffset + delta
elseif voffset ~= 0 then
if trace_margindata then
- report_margindata("voffset applied: %s",voffset)
+ report_margindata("voffset %p applied",voffset)
end
shift = shift + voffset
end
@@ -601,7 +611,7 @@ local function inject(parent,head,candidate)
if line ~= 0 then
local delta = line * candidate.lineheight
if trace_margindata then
- report_margindata("line offset applied: %s (%s)",line,delta)
+ report_margindata("offset %p applied to line %s",delta,line)
end
shift = shift + delta
offset = offset + delta
@@ -613,7 +623,7 @@ local function inject(parent,head,candidate)
elseif head.id == whatsit_code and head.subtype == localpar_code then
-- experimental
if head.dir == "TRT" then
- box.list = hpack_nodes(link_nodes(new_kern(candidate.hsize),box.list,new_kern(-candidate.hsize)))
+ box.list = hpack_nodes(concat_nodes{new_kern(candidate.hsize),box.list,new_kern(-candidate.hsize)})
end
insert_node_after(head,head,box)
else
@@ -621,9 +631,9 @@ local function inject(parent,head,candidate)
box.next = head
head = box
end
- set_attribute(box,a_margindata,nofstatus)
+ box[a_margindata] = nofstatus
if trace_margindata then
- report_margindata("injected, location: %s, shift: %s",location,shift)
+ report_margindata("injected, location %a, shift %p",location,shift)
end
-- we need to add line etc to offset as well
offset = offset + depth
@@ -637,7 +647,7 @@ local function inject(parent,head,candidate)
stacked[location] = offset -- weird, no table ?
-- todo: if no real depth then zero
if trace_margindata then
- report_margindata("status, offset: %s",offset)
+ report_margindata("status, offset %s",offset)
end
return head, room, stack == v_continue
end
@@ -708,11 +718,11 @@ local function flushed(scope,parent) -- current is hlist
done = done or don
end
if done then
-local a = has_attribute(head,a_linenumber) -- hack .. we need a more decent critical attribute inheritance mechanism
+ local a = head[a_linenumber] -- hack .. we need a more decent critical attribute inheritance mechanism
parent.list = hpack_nodes(head,parent.width,"exactly")
-if a then
- set_attribute(parent.list,a_linenumber,a)
-end
+ if a then
+ parent.list[a_linenumber] = a
+ end
-- resetstacked()
end
return done, continue
@@ -724,16 +734,16 @@ end
local function handler(scope,head,group)
if nofstored > 0 then
if trace_margindata then
- report_margindata("flushing stage one, stored: %s, scope: %s, delayed: %s, group: %s",nofstored,scope,nofdelayed,group)
+ report_margindata("flushing stage one, stored %s, scope %s, delayed %s, group %a",nofstored,scope,nofdelayed,group)
end
local current = head
local done = false
while current do
local id = current.id
- if (id == vlist_code or id == hlist_code) and not has_attribute(current,a_margindata) then
+ if (id == vlist_code or id == hlist_code) and not current[a_margindata] then
local don, continue = flushed(scope,current)
if don then
- set_attribute(current,a_margindata,0) -- signal to prevent duplicate processing
+ current[a_margindata] = 0 -- signal to prevent duplicate processing
if continue then
markovershoot(current)
end
@@ -754,29 +764,44 @@ local function handler(scope,head,group)
end
end
-function margins.localhandler(head,group)
- if conditionals.inhibitmargindata then
+function margins.localhandler(head,group) -- sometimes group is "" which is weird
+ local inhibit = conditionals.inhibitmargindata
+ if inhibit then
+ if trace_margingroup then
+ report_margindata("ignored 3, group %a, stored %s, inhibit %a",group,nofstored,inhibit)
+ end
return head, false
elseif nofstored > 0 then
return handler(v_local,head,group)
else
+ if trace_margingroup then
+ report_margindata("ignored 4, group %a, stored %s, inhibit %a",group,nofstored,inhibit)
+ end
return head, false
end
end
function margins.globalhandler(head,group) -- check group
--- print(group)
- if conditionals.inhibitmargindata or nofstored == 0 then
+ local inhibit = conditionals.inhibitmargindata
+ if inhibit or nofstored == 0 then
+ if trace_margingroup then
+ report_margindata("ignored 1, group %a, stored %s, inhibit %a",group,nofstored,inhibit)
+ end
return head, false
elseif group == "hmode_par" then
return handler("global",head,group)
elseif group == "vmode_par" then -- experiment (for alignments)
return handler("global",head,group)
- -- this needs checking as we then get quite some one liners to process and
- -- we cannot look ahead then:
+ -- this needs checking as we then get quite some one liners to process and
+ -- we cannot look ahead then:
elseif group == "box" then -- experiment (for alignments)
return handler("global",head,group)
+ elseif group == "alignment" then -- experiment (for alignments)
+ return handler("global",head,group)
else
+ if trace_margingroup then
+ report_margindata("ignored 2, group %a, stored %s, inhibit %a",group,nofstored,inhibit)
+ end
return head, false
end
end
@@ -788,7 +813,7 @@ local function finalhandler(head)
while current do
local id = current.id
if id == hlist_code then
- local a = has_attribute(current,a_margindata)
+ local a = current[a_margindata]
if not a or a == 0 then
finalhandler(current.list)
elseif realigned(current,a) then
@@ -823,12 +848,12 @@ end
-- go horizontal. So this needs more testing.
prependaction("finalizers", "lists", "typesetters.margins.localhandler")
--- prependaction("vboxbuilders", "normalizers", "typesetters.margins.localhandler")
+-- ("vboxbuilders", "normalizers", "typesetters.margins.localhandler")
prependaction("mvlbuilders", "normalizers", "typesetters.margins.globalhandler")
prependaction("shipouts", "normalizers", "typesetters.margins.finalhandler")
disableaction("finalizers", "typesetters.margins.localhandler")
--- disableaction("vboxbuilders", "typesetters.margins.localhandler")
+-- ("vboxbuilders", "typesetters.margins.localhandler")
disableaction("mvlbuilders", "typesetters.margins.globalhandler")
disableaction("shipouts", "typesetters.margins.finalhandler")
diff --git a/Master/texmf-dist/tex/context/base/typo-mar.mkiv b/Master/texmf-dist/tex/context/base/typo-mar.mkiv
index a393fc25098..595cf37560f 100644
--- a/Master/texmf-dist/tex/context/base/typo-mar.mkiv
+++ b/Master/texmf-dist/tex/context/base/typo-mar.mkiv
@@ -110,7 +110,7 @@
% \c!align=,
% \c!method=,
\c!style=\v!bold,
- \c!color=, % maybe textcolor
+ \c!color=, % maybe \maintextcolor
% \c!name=,
% \c!category=,
\c!threshold=.25ex,
@@ -148,7 +148,7 @@
\appendtoks
\forgetall
\tf
- \deactivatecolor
+ \resetallattributes % \deactivatecolor % needed, but maybe we should switch to maintextcolor: \onlyinheritmaintextcolor
\to \everymargindatacontent
% trialtypesetting: no need for margin stuff while trialing as
@@ -243,7 +243,7 @@
\hsize\currentmargindatawidth
\raggedcommand
\ifx\currentmargindatastrut\empty \else
- \dosetupstrut[\currentmargindatastrut]%
+ \synchronizestrut\currentmargindatastrut
\fi
\begstrut
\strc_references_flush_destination_nodes
@@ -299,6 +299,7 @@
category = "\margindataparameter\c!category",
name = "\margindataparameter\c!name",
scope = "\margindataparameter\c!scope",
+ number = \number\nextbox,
}}%
\fi
\endgroup}
diff --git a/Master/texmf-dist/tex/context/base/typo-pag.lua b/Master/texmf-dist/tex/context/base/typo-pag.lua
index 482a3a9f866..0dd75ddf97d 100644
--- a/Master/texmf-dist/tex/context/base/typo-pag.lua
+++ b/Master/texmf-dist/tex/context/base/typo-pag.lua
@@ -16,11 +16,8 @@ local penalty_code = nodecodes.penalty
local insert_node_after = node.insert_after
local new_penalty = nodes.pool.penalty
-local has_attribute = node.has_attribute
-local unset_attribute = node.unset_attribute
-local set_attribute = node.set_attribute
-local points = number.points
+local unsetvalue = attributes.unsetvalue
local a_keeptogether = attributes.private("keeptogether")
@@ -40,7 +37,7 @@ function builders.paragraphs.registertogether(line,specification) -- might chang
if not enabled then
nodes.tasks.enableaction("finalizers","builders.paragraphs.keeptogether")
end
- local a = has_attribute(line,a_keeptogether)
+ local a = line[a_keeptogether]
local c = a and cache[a]
if c then
local height = specification.height
@@ -67,7 +64,7 @@ function builders.paragraphs.registertogether(line,specification) -- might chang
if not specification.slack then
specification.slack = 0
end
- set_attribute(line,a_keeptogether,last)
+ line[a_keeptogether] = last
end
if trace_keeptogether then
local a = a or last
@@ -82,8 +79,7 @@ function builders.paragraphs.registertogether(line,specification) -- might chang
else
noflines = math.round((height + depth - slack) / noflines)
end
- report_keeptogether("registered, index: %s, height: %s, depth: %s, slack: %s, noflines: %s",
- a,points(height),points(depth),points(slack),noflines)
+ report_keeptogether("registered, index %s, height %p, depth %p, slack %p, noflines %a",a,height,depth,slack,noflines)
end
end
end
@@ -98,14 +94,14 @@ local function keeptogether(start,a)
local slack = specification.slack
local threshold = specification.depth - slack
if trace_keeptogether then
- report_keeptogether("list, index: %s, total: %s, threshold: %s, slack: %s",a,points(total),points(threshold),points(slack))
+ report_keeptogether("%s, index %s, total %p, threshold %p, slack %p","list",a,total,threshold,slack)
end
while current do
local id = current.id
if id == vlist_code or id == hlist_code then
total = total + current.height + current.depth
if trace_keeptogether then
- report_keeptogether("list, index: %s, total: %s, threshold: %s",a,points(total),points(threshold))
+ report_keeptogether("%s, index %s, total %p, threshold %p","list",a,total,threshold)
end
if total <= threshold then
if previous.id == penalty_code then
@@ -120,7 +116,7 @@ local function keeptogether(start,a)
-- hm, breakpoint, maybe turn this into kern
total = total + current.spec.width
if trace_keeptogether then
- report_keeptogether("glue, index: %s, total: %s, threshold: %s",a,points(total),points(threshold))
+ report_keeptogether("%s, index %s, total %p, threshold %p","glue",a,total,threshold)
end
if total <= threshold then
if previous.id == penalty_code then
@@ -134,7 +130,7 @@ local function keeptogether(start,a)
elseif id == kern_code then
total = total + current.kern
if trace_keeptogether then
- report_keeptogether("kern, index: %s, total: %s, threshold: %s",a,points(total),points(threshold))
+ report_keeptogether("%s, index %s, total %s, threshold %s","kern",a,total,threshold)
end
if total <= threshold then
if previous.id == penalty_code then
@@ -169,10 +165,10 @@ function builders.paragraphs.keeptogether(head)
local current = head
while current do
if current.id == hlist_code then
- local a = has_attribute(current,a_keeptogether)
+ local a = current[a_keeptogether]
if a and a > 0 then
keeptogether(current,a)
- unset_attribute(current,a_keeptogether)
+ current[a_keeptogether] = unsetvalue
cache[a] = nil
done = true
end
diff --git a/Master/texmf-dist/tex/context/base/typo-par.lua b/Master/texmf-dist/tex/context/base/typo-par.lua
index 0261d2e4e3f..b25ae4a5bcc 100644
--- a/Master/texmf-dist/tex/context/base/typo-par.lua
+++ b/Master/texmf-dist/tex/context/base/typo-par.lua
@@ -29,9 +29,6 @@ local variables = interfaces.variables
local texattribute = tex.attribute
local unsetvalue = attributes.unsetvalue
-local has_attribute = node.has_attribute
-local set_attribute = node.set_attribute
-
local glyph_code = nodecodes.glyph
local hlist_code = nodecodes.hlist
local kern_node = nodecodes.kern
@@ -84,7 +81,7 @@ local function process(namespace,attribute,head)
local done = false
if head.id == whatsit_code and head.subtype == localpar_code then
-- begin of par
- local a = has_attribute(head,attribute)
+ local a = head[attribute]
if a and a > 0 then
if dropper.enabled then
dropper.enabled = false -- dangerous for e.g. nested || in tufte
@@ -112,11 +109,11 @@ local function process(namespace,attribute,head)
local ca = dropper.ca
local ta = dropper.ta
if ca and ca > 0 then
- set_attribute(first,a_colorspace,ma == 0 and 1 or ma)
- set_attribute(first,a_color,ca)
+ first[a_colorspace] = ma == 0 and 1 or ma
+ first[a_color] = ca
end
if ta and ta > 0 then
- set_attribute(first,a_transparency,ta)
+ first[a_transparency] = ta
end
--
local width = first.width
diff --git a/Master/texmf-dist/tex/context/base/typo-prc.lua b/Master/texmf-dist/tex/context/base/typo-prc.lua
index bb965ff66dd..5b74abd0b4c 100644
--- a/Master/texmf-dist/tex/context/base/typo-prc.lua
+++ b/Master/texmf-dist/tex/context/base/typo-prc.lua
@@ -9,7 +9,7 @@ if not modules then modules = { } end modules ['typo-prc'] = {
-- moved from strc-ini.lua
-local format = string.format
+local formatters = string.formatters
local lpegmatch, patterns, P, C, Cs = lpeg.match, lpeg.patterns, lpeg.P, lpeg.C, lpeg.Cs
-- processors: syntax: processor->data ... not ok yet
@@ -53,17 +53,17 @@ function processors.apply(p,s)
end
if p and registered[p] then
if trace_processors then
- report_processors("known: %s, argument: %s",p,s or "")
+ report_processors("applying %s processor %a, argument: %s","known",p,s)
end
context.applyprocessor(p,s)
elseif s then
if trace_processors then
- report_processors("unknown: %s, argument: %s",p or "?",s)
+ report_processors("applying %s processor %a, argument: %s","unknown",p,s)
end
context(s)
elseif str then
if trace_processors then
- report_processors("direct: %s",str)
+ report_processors("applying %s processor, data: %s","ignored",str)
end
context(str)
end
@@ -76,21 +76,21 @@ function processors.startapply(p,s)
end
if p and registered[p] then
if trace_processors then
- report_processors("start: %s",p or "?")
+ report_processors("start applying %s processor %a","known",p)
end
context.applyprocessor(p)
context("{")
return s
elseif p then
if trace_processors then
- report_processors("start: %s (unknown)",p)
+ report_processors("start applying %s processor %a","unknown",p)
end
context.firstofoneargument()
context("{")
return s
else
if trace_processors then
- report_processors("start: ? (unset)")
+ report_processors("start applying %s processor","ignored")
end
context.firstofoneargument()
context("{")
@@ -101,14 +101,14 @@ end
function processors.stopapply()
context("}")
if trace_processors then
- report_processors("stop")
+ report_processors("stop applying processor")
end
end
function processors.tostring(str)
local p, s = lpegmatch(splitter,str)
if registered[p] then
- return format("\\applyprocessor{%s}{%s}",p,s)
+ return formatters["\\applyprocessor{%s}{%s}"](p,s)
else
return str
end
@@ -123,4 +123,3 @@ end
commands.registerstructureprocessor = processors.register
commands.resetstructureprocessor = processors.reset
-
diff --git a/Master/texmf-dist/tex/context/base/typo-rep.lua b/Master/texmf-dist/tex/context/base/typo-rep.lua
index 0d9bf8cc1b7..8451ce52b4b 100644
--- a/Master/texmf-dist/tex/context/base/typo-rep.lua
+++ b/Master/texmf-dist/tex/context/base/typo-rep.lua
@@ -20,7 +20,6 @@ local nodes, node = nodes, node
local delete_node = nodes.delete
local replace_node = nodes.replace
local copy_node = node.copy
-local has_attribute = node.has_attribute
local chardata = characters.data
local collected = false
@@ -55,20 +54,20 @@ end
local function process(what,head,current,char)
if what == true then
if trace_stripping then
- report_stripping("deleting 0x%05X from text",char)
+ report_stripping("deleting %C from text",char)
end
head, current = delete_node(head,current)
elseif type(what) == "function" then
head, current = what(head,current)
current = current.next
if trace_stripping then
- report_stripping("processing 0x%05X in text",char)
+ report_stripping("processing %C in text",char)
end
elseif what then -- assume node
head, current = replace_node(head,current,copy_node(what))
current = current.next
if trace_stripping then
- report_stripping("replacing 0x%05X in text",char)
+ report_stripping("replacing %C in text",char)
end
end
return head, current
@@ -79,7 +78,7 @@ function nodes.handlers.stripping(head)
while current do
if current.id == glyph_code then
-- it's more efficient to keep track of what needs to be kept
- local todo = has_attribute(current,a_stripping)
+ local todo = current[a_stripping]
if todo == 1 then
local char = current.char
local what = glyphs[char]
diff --git a/Master/texmf-dist/tex/context/base/typo-scr.mkiv b/Master/texmf-dist/tex/context/base/typo-scr.mkiv
index 4b448752227..6249c390a93 100644
--- a/Master/texmf-dist/tex/context/base/typo-scr.mkiv
+++ b/Master/texmf-dist/tex/context/base/typo-scr.mkiv
@@ -19,7 +19,7 @@
%D \type {shiftup} and \type {shiftdown} that can work across paragraphs.
%D \macros
-%D {low, high, lohi, hilo}
+%D {low, high, definelow, definehigh, setuplow, setuphigh}
%D
%D Although \TEX\ is pretty well aware of super- and subscripts, its mechanism
%D is mainly tuned for math mode. The next few commands take care of script
@@ -36,69 +36,233 @@
%D space. The implementation looks a bit fuzzy, since some \type {\fontdimen}'s
%D are involved to determine the optimal placement.
-% These might become parameters: \setupscripts but we need fo come up with
-% nice keys.
+\installcorenamespace {low}
+\installcorenamespace {high}
-\def\highvfraction {0}
-\def\lowvfraction {0}
-\def\highlowvfraction{.1}
-\def\highlowhfraction{.1}
+\installcommandhandler \??low {low} \??low
+\installcommandhandler \??high {high} \??high
-\unexpanded\def\low {\typo_scripts_high_low\lower\mathsubnormal{.48}\lowvfraction \t!sub}
-\unexpanded\def\high{\typo_scripts_high_low\raise\mathsupnormal{.86}\highvfraction\t!sup}
+\setuplow [\c!style=\tx,\c!distance=\zeropoint,\c!down=.48\exheight] % historical
+\setuphigh[\c!style=\tx,\c!distance=\zeropoint,\c!up =.86\exheight] % values
-\def\typo_scripts_high_low#1#2#3#4#5#6% textscript mathscript fraction extra tag
+\appendtoks \setuevalue\currentlow {\typo_scripts_low {\currentlow }}\to \everydefinelow
+\appendtoks \setuevalue\currenthigh{\typo_scripts_high{\currenthigh}}\to \everydefinehigh
+
+\unexpanded\def\typo_scripts_low#1#2%
+ {\dontleavehmode
+ \begingroup
+ \edef\currentlow{#1}%
+ \kern\lowparameter\c!distance\relax
+ \setbox\scratchbox\hbox\bgroup
+ \lower\lowparameter\c!down\hbox\bgroup
+ \ifx\fontsize\empty
+ \ifmmode
+ \mr % no color yet
+ \else
+ \uselowstyleandcolor\c!style\c!color
+ \fi
+ \else
+ \uselowstyleandcolor\c!style\c!color
+ \fi
+ \dostarttagged\t!sub\currentlow
+ #2%
+ \dostoptagged
+ \egroup
+ \egroup
+ \ht\scratchbox\strutht
+ \dp\scratchbox\strutdp
+ \box\scratchbox
+ \endgroup}
+
+\unexpanded\def\typo_scripts_high#1#2%
{\dontleavehmode
\begingroup
- \scratchdimen\dimexpr#3\exheight+#4\exheight\relax
- \kern\highlowhfraction\exheight
- \setbox\scratchbox\hbox{#1\scratchdimen\hbox
- {\ifx\fontsize\empty\ifmmode\mr\else\tx\fi\else\tx\fi
- \dostarttagged
- #5\empty#6%
- \dostoptagged}}%
- \ht\scratchbox\strutheight
- \dp\scratchbox\strutdepth
+ \edef\currenthigh{#1}%
+ \kern\highparameter\c!distance\relax
+ \setbox\scratchbox\hbox\bgroup
+ \raise\highparameter\c!up\hbox\bgroup
+ \ifx\fontsize\empty
+ \ifmmode
+ \mr % no color yet
+ \else
+ \usehighstyleandcolor\c!style\c!color
+ \fi
+ \else
+ \usehighstyleandcolor\c!style\c!color
+ \fi
+ \dostarttagged\t!sup\currenthigh
+ #2%
+ \dostoptagged
+ \egroup
+ \egroup
+ \ht\scratchbox\strutht
+ \dp\scratchbox\strutdp
\box\scratchbox
\endgroup}
+\unexpanded\def\low {\typo_scripts_low \empty}
+\unexpanded\def\high{\typo_scripts_high\empty}
+
+%D \macros
+%D {lohi, hilo, definelohi, setuplohi}
+%D
%D You can provide an optional keyword \type {left}, in which case the super and
%D subscripts will be aligned in a way that permits placement at the left of a word
%D (which means that it will be right aligned).
%D
%D \startbuffer
-%D \lohi{aha}{ah} test \lohi{aha}{ah} test
-%D \lohi[left]{aha}{ah} test \lohi[left]{aha}{ah} test
-%D \lohi{aha}{ah} test\lohi{aha}{ah} test
-%D \lohi[left]{aha}{ah}test \lohi[left]{aha}{ah}test
+%D \lohi {aha} {ah} test \lohi {aha} {ah} test
+%D \lohi [left] {aha} {ah} test \lohi [left] {aha} {ah} test
+%D \lohi {aha} {ah} test \lohi {aha} {ah} test
+%D \lohi [left] {aha} {ah}test \lohi [left] {aha} {ah} test
%D \stopbuffer
%D
%D \typebuffer
%D \getbuffer
-\unexpanded\def\lohi{\dosingleempty\typo_scripts_lohi}
-\unexpanded\def\hilo{\dosingleempty\typo_scripts_hilo}
+\installcorenamespace {lowhigh}
+
+\installcommandhandler \??lowhigh {lowhigh} \??lowhigh
+
+\setuplowhigh
+ [\c!style=\tx,
+ \c!distance=.1\exheight, % these are
+ \c!up=.96\exheight, % historical
+ \c!down=.58\exheight] % values
-\def\typo_scripts_lohi[#1]#2#3%
+\appendtoks
+ \setuevalue\currentlowhigh{\typo_scripts_lowhigh{\currentlowhigh}\typo_scripts_lowhigh_indeed}%
+\to \everydefinelowhigh
+
+\unexpanded\def\typo_scripts_lowhigh#1% #2
{\dontleavehmode
- \hbox
- {\dostarttagged\t!subsup\empty
- \setbox4\hbox{\typo_scripts_high_low\lower\mathsubnormal{.48}\highlowvfraction\t!sub{#2}}%
- \setbox6\hbox{\typo_scripts_high_low\raise\mathsupnormal{.86}\highlowvfraction\t!sup{#3}}%
- \doif{#1}{\v!left}
- {\ifdim\wd4<\wd6
- \setbox4\hbox to \wd6{\hss\box4}%
- \else
- \setbox6\hbox to \wd4{\hss\box6}%
- \fi}%
- \ifdim\wd4<\wd6
- \wd4=\zeropoint\box4\box6
+ \hbox\bgroup
+ \edef\currentlowhigh{#1}%
+ \dosingleempty} % #2
+
+\unexpanded\def\typo_scripts_lowhigh_indeed[#1]#2#3% todo: align .. [#1] is compatible hack
+ {\dostarttagged\t!subsup\currentlowhigh
+ \setbox\plusfour\hbox{\typo_scripts_lowhigh_low_high\lower\c!down\t!sub{#2}}%
+ \setbox\plussix \hbox{\typo_scripts_lowhigh_low_high\raise\c!up \t!sup{#3}}%
+ \doif{#1}{\v!left}
+ {\ifdim\wd\plusfour<\wd\plussix
+ \setbox\plusfour\hbox to \wd\plussix {\hss\box\plusfour}%
\else
- \wd6=\zeropoint\box6\box4
- \fi
- \dostoptagged}}
+ \setbox\plussix \hbox to \wd\plusfour{\hss\box\plussix }%
+ \fi}%
+ \ifdim\wd\plusfour<\wd\plussix
+ \wd\plusfour\zeropoint
+ \box\plusfour
+ \box\plussix
+ \else
+ \wd\plussix\zeropoint
+ \box\plussix
+ \box\plusfour
+ \fi
+ \dostoptagged
+ \egroup}
+
+\def\typo_scripts_lowhigh_low_high#1#2#3#4%
+ {\dontleavehmode
+ \begingroup
+ \kern\lowhighparameter\c!distance\relax
+ \setbox\scratchbox\hbox\bgroup
+ #1\lowhighparameter#2\hbox\bgroup
+ \ifx\fontsize\empty
+ \ifmmode
+ \mr % no color yet
+ \else
+ \uselowhighstyleandcolor\c!style\c!color
+ \fi
+ \else
+ \uselowhighstyleandcolor\c!style\c!color
+ \fi
+ \dostarttagged#3\empty
+ #4%
+ \dostoptagged
+ \egroup
+ \egroup
+ \ht\scratchbox\strutht
+ \dp\scratchbox\strutdp
+ \box\scratchbox
+ \endgroup}
+
+\unexpanded\def\typo_scripts_highlow_indeed[#1]#2#3%
+ {\typo_scripts_lowhigh_indeed[#1]{#3}{#2}}
+
+\unexpanded\def\lohi{\typo_scripts_lowhigh\empty\typo_scripts_lowhigh_indeed}
+\unexpanded\def\hilo{\typo_scripts_lowhigh\empty\typo_scripts_highlow_indeed}
+
+%D \macros
+%D {lowmidhigh, definelowmidhigh, setuplowmidhigh}
+%D
+%D The previous command originally didn't have definers. These were introduced when
+%D the next showed up:
+%D
+%D \startbuffer
+%D \definelow [MyLow] [style=\txx]
+%D \definehigh [MyHigh] [style=\txx]
+%D \definelowhigh [MyLoHi] [style=\txx]
+%D \definelowmidhigh[MyLoMiHi][style=\txx]
+%D
+%D We have
+%D \ruledhbox{\low {L}} and \ruledhbox{\MyLow {L}} and
+%D \ruledhbox{\high {H}} and \ruledhbox{\MyHigh {H}} and
+%D \ruledhbox{\lohi {L}{H}} and \ruledhbox{\MyLoHi {L}{H}} and
+%D \ruledhbox{\lomihi{L}{M}{H}} and \ruledhbox{\MyLoMiHi{L}{M}{H}}.
+%D \stopbuffer
+%D
+%D \typebuffer \getbuffer
+
+\installcorenamespace {lowmidhigh}
+
+\installcommandhandler \??lowmidhigh {lowmidhigh} \??lowmidhigh
+
+\setuplowmidhigh
+ [\c!style=\tx,
+ \c!up=.8\struthtdp,
+ \c!down=.8\struthtdp]
+
+\appendtoks
+ \setuevalue\currentlowmidhigh{\typo_scripts_lowmidhigh{\currentlowmidhigh}}%
+\to \everydefinelowmidhigh
+
+\unexpanded\def\typo_scripts_lowmidhigh#1#2#3#4%
+ {\dontleavehmode \hbox \bgroup
+ \edef\currentlowmidhigh{#1}%
+ \dostarttagged\t!subsup\currentlowmidhigh
+ \uselowmidhighstyleandcolor\c!style\c!color
+ \setstrut
+ \setbox\plustwo \hbox{\strut\dostarttagged\t!sub\empty#2\dostoptagged}%
+ \setbox\plusfour\hbox{\strut\dostarttagged\t!mid\empty#3\dostoptagged}% inefficient
+ \setbox\plussix \hbox{\strut\dostarttagged\t!sup\empty#4\dostoptagged}%
+ \scratchdimen \wd
+ \ifdim\wd\plustwo>\wd\plusfour
+ \ifdim\wd\plustwo>\wd\plussix
+ \plustwo
+ \else
+ \plussix
+ \fi
+ \else
+ \ifdim\wd\plusfour>\wd\plussix
+ \plusfour
+ \else
+ \plussix
+ \fi
+ \fi
+ \relax
+ \setbox\plustwo \hbox to \scratchdimen{\hss\lower\lowmidhighparameter\c!down\box\plustwo \hss}%
+ \setbox\plusfour\hbox to \scratchdimen{\hss \box\plusfour\hss}%
+ \setbox\plussix \hbox to \scratchdimen{\hss\raise\lowmidhighparameter\c!up \box\plussix \hss}%
+ \wd\plustwo \zeropoint
+ \wd\plusfour\zeropoint
+ \box\plusfour
+ \box\plustwo
+ \box\plussix
+ \dostoptagged
+ \egroup}
-\def\typo_scripts_hilo[#1]#2#3%
- {\typo_scripts_lohi[#1]{#3}{#2}}
+\unexpanded\def\lomihi {\typo_scripts_lowmidhigh\empty}
+\unexpanded\def\himilo#1#2#3{\typo_scripts_lowmidhigh\empty{#3}{#2}{#1}}
\protect \endinput
diff --git a/Master/texmf-dist/tex/context/base/typo-spa.lua b/Master/texmf-dist/tex/context/base/typo-spa.lua
index 9e653ad5563..5eba22889a8 100644
--- a/Master/texmf-dist/tex/context/base/typo-spa.lua
+++ b/Master/texmf-dist/tex/context/base/typo-spa.lua
@@ -6,8 +6,6 @@ if not modules then modules = { } end modules ['typo-spa'] = {
license = "see context related readme files"
}
-local utf = unicode.utf8
-
local next, type = next, type
local utfchar = utf.char
@@ -17,11 +15,10 @@ local report_spacing = logs.reporter("typesetting","spacing")
local nodes, fonts, node = nodes, fonts, node
-local has_attribute = node.has_attribute
-local unset_attribute = node.unset_attribute
local insert_node_before = node.insert_before
local insert_node_after = node.insert_after
local remove_node = nodes.remove
+local end_of_math = node.end_of_math
local fonthashes = fonts.hashes
local fontdata = fonthashes.identifiers
@@ -34,6 +31,7 @@ local v_reset = interfaces.variables.reset
local nodecodes = nodes.nodecodes
local glyph_code = nodecodes.glyph
+local math_code = nodecodes.math
local somespace = nodes.somespace
local somepenalty = nodes.somepenalty
@@ -74,13 +72,15 @@ local function process(namespace,attribute,head)
-- head is always begin of par (whatsit), so we have at least two prev nodes
-- penalty followed by glue
while start do
- if start.id == glyph_code then
- local attr = has_attribute(start,attribute)
+ local id = start.id
+ if id == glyph_code then
+ local attr = start[attribute]
if attr and attr > 0 then
local data = mapping[attr]
if data then
- local map = data.characters[start.char]
- unset_attribute(start,attribute) -- needed?
+ local char = start.char
+ local map = data.characters[char]
+ start[attribute] = unsetvalue -- needed?
if map then
local left = map.left
local right = map.right
@@ -89,31 +89,31 @@ local function process(namespace,attribute,head)
local prev = start.prev
if left and left ~= 0 and prev then
local ok = false
+ local prevprev = prev.prev
if alternative == 1 then
local somespace = somespace(prev,true)
if somespace then
- local prevprev = prev.prev
local somepenalty = somepenalty(prevprev,10000)
if somepenalty then
if trace_spacing then
- report_spacing("removing penalty and space before %s (left)", utfchar(start.char))
+ report_spacing("removing penalty and space before %C (left)",char)
end
head = remove_node(head,prev,true)
head = remove_node(head,prevprev,true)
else
if trace_spacing then
- report_spacing("removing space before %s (left)", utfchar(start.char))
+ report_spacing("removing space before %C (left)",char)
end
head = remove_node(head,prev,true)
end
end
ok = true
else
- ok = not (somespace(prev,true) and somepenalty(prev.prev,true)) or somespace(prev,true)
+ ok = not (somespace(prev,true) and somepenalty(prevprev,true)) or somespace(prev,true)
end
if ok then
if trace_spacing then
- report_spacing("inserting penalty and space before %s (left)", utfchar(start.char))
+ report_spacing("inserting penalty and space before %C (left)",char)
end
insert_node_before(head,start,new_penalty(10000))
insert_node_before(head,start,new_glue(left*quad))
@@ -123,14 +123,14 @@ local function process(namespace,attribute,head)
local next = start.next
if right and right ~= 0 and next then
local ok = false
+ local nextnext = next.next
if alternative == 1 then
local somepenalty = somepenalty(next,10000)
if somepenalty then
- local nextnext = next.next
local somespace = somespace(nextnext,true)
if somespace then
if trace_spacing then
- report_spacing("removing penalty and space after %s (right)", utfchar(start.char))
+ report_spacing("removing penalty and space after %C right",char)
end
head = remove_node(head,next,true)
head = remove_node(head,nextnext,true)
@@ -139,18 +139,18 @@ local function process(namespace,attribute,head)
local somespace = somespace(next,true)
if somespace then
if trace_spacing then
- report_spacing("removing space after %s (right)", utfchar(start.char))
+ report_spacing("removing space after %C (right)", char)
end
head = remove_node(head,next,true)
end
end
ok = true
else
- ok = not (somepenalty(next,10000) and somespace(next.next,true)) or somespace(next,true)
+ ok = not (somepenalty(next,10000) and somespace(nextnext,true)) or somespace(next,true)
end
if ok then
if trace_spacing then
- report_spacing("inserting penalty and space after %s (right)", utfchar(start.char))
+ report_spacing("inserting penalty and space after %C (right)",char)
end
insert_node_after(head,start,new_glue(right*quad))
insert_node_after(head,start,new_penalty(10000))
@@ -160,8 +160,12 @@ local function process(namespace,attribute,head)
end
end
end
+ elseif id == math_code then
+ start = end_of_math(start) -- weird, can return nil .. no math end?
+ end
+ if start then
+ start = start.next
end
- start = start.next
end
return head, done
end
diff --git a/Master/texmf-dist/tex/context/base/typo-spa.mkiv b/Master/texmf-dist/tex/context/base/typo-spa.mkiv
index 0cfe446102b..d783353d67d 100644
--- a/Master/texmf-dist/tex/context/base/typo-spa.mkiv
+++ b/Master/texmf-dist/tex/context/base/typo-spa.mkiv
@@ -22,10 +22,17 @@
% experimental spacing
%
% test: oeps {\setcharacterspacing[frenchpunctuation]x: xx \bfd x: xx} oeps: test
+%
+% todo: page | text => pagebody or text only
\installcorenamespace{characterspacing}
-\definesystemvariable{cs} % maybe a dummy namespace
+\installsetuponlycommandhandler \??characterspacing {_p_characterspacing} % private
+
+\setup_p_characterspacing
+ [\c!left=0,
+ \c!right=0,
+ \c!alternative=0]
\unexpanded\def\definecharacterspacing[#1]%
{\ctxcommand{definecharacterspacing("#1")}}
@@ -35,11 +42,11 @@
\def\typo_characterspacing_setup[#1][#2][#3]% todo: #2 list
{\begingroup
- \getparameters[\??cs][\c!left=0,\c!right=0,\c!alternative=0,#3]%
- \ctxcommand{setupcharacterspacing("#1",\number#2, {
- left = \@@csleft,
- right = \@@csright,
- alternative = \@@csalternative
+ \setupcurrent_p_characterspacing[#3]%
+ \ctxcommand{setupcharacterspacing("#1",\number#2, { % todo: just pass #3 to the lua end
+ left = \direct_p_characterspacingparameter\c!left,
+ right = \direct_p_characterspacingparameter\c!right,
+ alternative = \direct_p_characterspacingparameter\c!alternative
})}%
\endgroup}
diff --git a/Master/texmf-dist/tex/context/base/typo-txt.mkvi b/Master/texmf-dist/tex/context/base/typo-txt.mkvi
index f2859b58fe9..f1c80c1bd18 100644
--- a/Master/texmf-dist/tex/context/base/typo-txt.mkvi
+++ b/Master/texmf-dist/tex/context/base/typo-txt.mkvi
@@ -2,7 +2,7 @@
%D [ file=typo-txt,
%D version=2011.10.27,
%D title=\CONTEXT\ Typesetting Macros,
-%D subtitle=Texts,
+%D subtitle=Text Hacks,
%D author=Hans Hagen,
%D date=\currentdate,
%D copyright={PRAGMA ADE \& \CONTEXT\ Development Team}]
@@ -13,12 +13,63 @@
%D This module replaces the by now rather old supp-fun module.
+\writestatus{loading}{ConTeXt Typesetting Macros / Text Hacks}
+
\unprotect
-%D \NormalizeFontHeight \name {sample text} {height} {font}
-%D \NormalizeFontWidth \name {sample text} {width} {font}
-%D \NormalizeTextHeight {font} {height} {text}
-%D \NormalizeTextWidth {font} {width} {text}
+\registerctxluafile{typo-txt}{1.001}
+
+%D \macros
+%D {normalizefontheight,normalizefontwidth,normalizedfontsize}
+%D
+%D Next we introduce some font manipulation macros. When we
+%D want to typeset some text spread in a well defined area, it
+%D can be considered bad practice to manipulate character and
+%D word spacing. In such situations the next few macros can be
+%D of help:
+%D
+%D \starttyping
+%D \normalizefontheight \name {sample text} {height} {font}
+%D \normalizefontwidth \name {sample text} {width} {font}
+%D \stoptyping
+%D
+%D Consider for instance:
+%D
+%D \startbuffer[a]
+%D \NormalizeFontHeight \TempFont {X} {2\baselineskip} {Serif}
+%D \stopbuffer
+%D
+%D \startbuffer[b]
+%D \ruledhbox{\TempFont To Be Or Not To Be}
+%D \stopbuffer
+%D
+%D \typebuffer[a,b] \getbuffer[a]
+%D
+%D This shows up as:
+%D
+%D \startlinecorrection
+%D \ruledhbox{\getbuffer[b]}
+%D \stoplinecorrection
+%D
+%D The horizontal counterpart is:
+%D
+%D \startbuffer[a]
+%D \NormalizeFontWidth \TempFont {This Line Fits} {\hsize} {Serif}
+%D \stopbuffer
+%D
+%D \startbuffer[b]
+%D \ruledhbox{\TempFont This Line Fits}
+%D \stopbuffer
+%D
+%D \typebuffer[a,b] \getbuffer[a]
+%D
+%D This gives:
+%D
+%D \startlinecorrection
+%D \ruledhbox{\getbuffer[b]}
+%D \stoplinecorrection
+%D
+%D The calculated font scale is avaliable in \type {\normalizedfontsize}.
\newbox\b_typo_normalizers
@@ -26,14 +77,14 @@
{\dimexpr\ifdim#1\b_typo_normalizers=\zeropoint
\bodyfontsize
\else
- \cldcontext{\number\dimexpr10pt\relax*\number\dimexpr#size\relax/\number#axis\b_typo_normalizers}\scaledpoint
+ \luaexpr{\number\dimexpr10pt\relax*\number\dimexpr#size\relax/\number#axis\b_typo_normalizers}\scaledpoint
\fi\relax}
\def\typo_normalizers_font_at_size#axis#cs#text#size#font% avoid overflow by using lua
{\begingroup
\setbox\b_typo_normalizers\hbox{\definedfont[#font at 10pt]\settrialtypesetting#text}%
- \normalexpanded{\endgroup\edef\noexpand\TheNormalizedFontSize{\the\typo_normalizers_size{#axis}{#size}}}%
- \edef#cs{\noexpand\definedfont[#font at \TheNormalizedFontSize]}}
+ \normalexpanded{\endgroup\edef\noexpand\normalizedfontsize{\the\typo_normalizers_size{#axis}{#size}}}%
+ \definefont[\strippedcsname#cs][#font at \normalizedfontsize]}
\unexpanded\def\typo_normalizers_text_at_size#axis#font#size#text%
{\dontleavehmode
@@ -42,30 +93,105 @@
\definedfont[#font at \the\typo_normalizers_size{#axis}{#size}]#text%
\endgroup}
-\let\TheNormalizedFontSize\!!zeropoint
+\def\normalizedfontsize{\bodyfontsize}
+
+\unexpanded\def\normalizetextwidth {\typo_normalizers_text_at_size\wd}
+\unexpanded\def\normalizetextheight{\typo_normalizers_text_at_size\ht}
+\unexpanded\def\normalizetextdepth {\typo_normalizers_text_at_size\dp}
+\unexpanded\def\normalizetextline {\typo_normalizers_text_at_size\htdp}
+
+\unexpanded\def\normalizefontwidth {\typo_normalizers_font_at_size\wd}
+\unexpanded\def\normalizefontheight{\typo_normalizers_font_at_size\ht}
+\unexpanded\def\normalizefontdepth {\typo_normalizers_font_at_size\dp}
+\unexpanded\def\normalizefontline {\typo_normalizers_font_at_size\htdp}
-%D Traditionally we use UpperCasedNames for this kind of
-%D functionality.
+\unexpanded\def\widthspanningtext #text#size#specification{\hbox{\normalizefontwidth \temp{#text}{#size}{#specification}\temp#text}}
+\unexpanded\def\heightspanningtext#text#size#specification{\hbox{\normalizefontheight\temp{#text}{#size}{#specification}\temp#text}}
+\unexpanded\def\depthspanningtext #text#size#specification{\hbox{\normalizefontdepth \temp{#text}{#size}{#specification}\temp#text}}
+\unexpanded\def\linespanningtext #text#size#specification{\hbox{\normalizefontline \temp{#text}{#size}{#specification}\temp#text}}
-\unexpanded\def\NormalizeFontHeight{\typo_normalizers_font_at_size\ht}
-\unexpanded\def\NormalizeFontWidth {\typo_normalizers_font_at_size\wd}
+%D Traditionally we use UpperCasedNames for this kind of functionality.
-\unexpanded\def\NormalizeTextHeight{\typo_normalizers_text_at_size\ht}
-\unexpanded\def\NormalizeTextWidth {\typo_normalizers_text_at_size\wd}
+\let\NormalizeFontHeight \normalizefontheight
+\let\NormalizeFontWidth \normalizefontwidth
+\let\NormalizeTextHeight \normalizetextheight
+\let\NormalizeTextWidth \normalizetextwidth
-\unexpanded\def\WidthSpanningText#text#width#font% compatibility macro
- {\hbox{\NormalizeFontWidth\temp{#text}{#width}{#font}\temp\the\everydefinedfont#1}}
+\let\WidthSpanningText \widthspanningtext
+\def\TheNormalizedFontSize{\normalizedfontsize}
+
+%D \macros
+%D {vulgarfraction}
+%D
+%D This code is moved from \type {cor-mis.mkiv}. We show three versions. First
+%D the simple one using \type {\low} and \type {high}:
+%D
%D \startbuffer
-%D \NormalizeFontWidth \MyFontName {sample text} {10cm} {Serif*default}
+%D \def\vfrac#1#2%
+%D {\hbox{\high{\tx#1\kern-.25em}/\low{\kern-.25em\tx#2}}}
+%D
+%D test \vfrac{1}{2} test \vfrac{123}{456} test
+%D \stopbuffer
+%D
+%D \typebuffer {\showmakeup\getbuffer}
+%D
+%D A better way to handle the kerning is the following, here we kind of assume
+%D that tye slash is symmetrical and has nearly zero width.
%D
-%D \ruledhbox{\MyFontName sample text}
-%D \blank
-%D \blackrule[width=10cm]
-%D \blank
-%D \ruledhbox{\NormalizeTextWidth {Serif*default} {10cm} {sample text}}
+%D \startbuffer
+%D \def\vfract#1#2%
+%D {\hbox{\high{\tx#1}\hbox to \zeropoint{\hss/\hss}\low{\tx#2}}}
%D \stopbuffer
%D
-%D \typebuffer \getbuffer
+%D \typebuffer {\showmakeup\getbuffer}
+%D
+%D The third and best alternative is the following:
+%D
+%D {\showmakeup\getbuffer}\crlf\getbuffer
+%D
+%D This time we measure the height of the \type {/} and shift over the maximum
+%D height and depths of this character and the fractional digits (we use 57 as
+%D sample). Here we combine all methods in one macros.
+
+\setnewconstant\vulgarfractionmethod\plusthree
+
+\definehspace[\v!vulgarfraction][.25em] % [.15em]
+\definesymbol[\v!vulgarfraction][/] % [\raise.2ex\hbox{/}]
+
+\unexpanded\def\vulgarfraction#1#2%
+ {\dontleavehmode
+ \hbox
+ {\def\vulgarfraction{vulgarfraction}%
+ \ifcase\vulgarfractionmethod
+ #1\symbol[\v!vulgarfraction]#2%
+ \or
+ \high{\tx#1\kern-\hspaceamount\empty\v!vulgarfraction}%
+ \symbol[\vulgarfraction]%
+ \low {\kern-\hspaceamount\empty\v!vulgarfraction\tx#2}%
+ \or
+ \high{\tx#1}%
+ \hbox to \zeropoint{\hss\symbol[\v!vulgarfraction]\hss}%
+ \low{\tx#2}%
+ \or
+ \setbox0\hbox{\symbol[\vulgarfraction]}%
+ \setbox2\hbox{\txx57}%
+ \raise\ht0\hbox{\lower\ht2\hbox{\txx#1}}%
+ \hbox to \zeropoint{\hss\symbol[\v!vulgarfraction]\hss}%
+ \lower\dp0\hbox{\raise\dp2\hbox{\txx#2}}%
+ \fi}}
+
+\ifdefined\vfrac \else \let\vfrac\vulgarfraction \fi
+
+%D \starttabulate[|l|l|]
+%D \HL
+%D \NC \bf method \NC \bf visualization \NC\NR
+%D \HL
+%D \NC 0 \NC \vulgarfractionmethod0 \vulgarfraction{1}{2} \NC\NR
+%D \NC 1 \NC \vulgarfractionmethod1 \vulgarfraction{1}{2} \NC\NR
+%D \NC 2 \NC \vulgarfractionmethod2 \vulgarfraction{1}{2} \NC\NR
+%D \NC 3 \NC \vulgarfractionmethod3 \vulgarfraction{1}{2} \NC\NR
+%D \HL
+%D \stoptabulate
\protect \endinput
diff --git a/Master/texmf-dist/tex/context/base/unic-ini.mkiv b/Master/texmf-dist/tex/context/base/unic-ini.mkiv
index 55764dbdc88..ece0da28355 100644
--- a/Master/texmf-dist/tex/context/base/unic-ini.mkiv
+++ b/Master/texmf-dist/tex/context/base/unic-ini.mkiv
@@ -30,7 +30,7 @@
\def\unicodechar#1{\ctxcommand{unicodechar("#1")}}
\unexpanded\def\unknownchar
- {\dontleavehmode\hbox{\vrule\!!width.5em\!!height1ex\!!depth\zeropoint}}
+ {\dontleavehmode\hbox{\vrule\s!width.5\emwidth\s!height\exheight\s!depth\zeropoint}}
\ifdefined\zwnbsp\else \let\zwnbsp\relax \fi % zerowidthnonbreakablespace
diff --git a/Master/texmf-dist/tex/context/base/util-deb.lua b/Master/texmf-dist/tex/context/base/util-deb.lua
index d82c1a114f5..785373f862a 100644
--- a/Master/texmf-dist/tex/context/base/util-deb.lua
+++ b/Master/texmf-dist/tex/context/base/util-deb.lua
@@ -1,4 +1,4 @@
-if not modules then modules = { } end modules ['util.deb'] = {
+if not modules then modules = { } end modules ['util-deb'] = {
version = 1.001,
comment = "companion to luat-lib.mkiv",
author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
@@ -18,11 +18,13 @@ local format, find = string.format, string.find
local is_boolean = string.is_boolean
utilities = utilities or { }
-utilities.debugger = utilities.debugger or { }
-local debugger = utilities.debugger
+local debugger = utilities.debugger or { }
+utilities.debugger = debugger
-local counters = { }
-local names = { }
+local counters = { }
+local names = { }
+
+local report = logs.reporter("debugger")
-- one
@@ -50,7 +52,7 @@ local function hook()
end
function debugger.showstats(printer,threshold) -- hm, something has changed, rubish now
- printer = printer or texio.write or print
+ printer = printer or report
threshold = threshold or 0
local total, grandtotal, functions = 0, 0, 0
local dataset = { }
@@ -108,22 +110,6 @@ end
--~ print("")
--~ debugger.showstats(print,3)
-local is_node = node and node.is_node
-local is_lpeg = lpeg and lpeg.type
-
-function inspect(i) -- global function
- local ti = type(i)
- if ti == "table" then
- table.print(i,"table")
- elseif is_node and is_node(i) then
- table.print(nodes.astable(i),tostring(i))
- elseif is_lpeg and is_lpeg(i) then
- lpeg.print(i)
- else
- print(tostring(i))
- end
-end
-
-- from the lua book:
function traceback()
diff --git a/Master/texmf-dist/tex/context/base/util-dim.lua b/Master/texmf-dist/tex/context/base/util-dim.lua
index 47e43c386e4..47b2706b736 100644
--- a/Master/texmf-dist/tex/context/base/util-dim.lua
+++ b/Master/texmf-dist/tex/context/base/util-dim.lua
@@ -16,10 +16,11 @@ table.</p>
--ldx]]--
local format, match, gsub, type, setmetatable = string.format, string.match, string.gsub, type, setmetatable
-local P, S, R, Cc, lpegmatch = lpeg.P, lpeg.S, lpeg.R, lpeg.Cc, lpeg.match
+local P, S, R, Cc, C, lpegmatch = lpeg.P, lpeg.S, lpeg.R, lpeg.Cc, lpeg.C, lpeg.match
local allocate = utilities.storage.allocate
local setmetatableindex = table.setmetatableindex
+local formatters = string.formatters
--this might become another namespace
@@ -86,6 +87,7 @@ local dimenfactors = allocate {
format (string) is implemented using this table.</p>
--ldx]]--
+
local function numbertodimen(n,unit,fmt)
if type(n) == 'string' then
return n
@@ -141,6 +143,12 @@ local dimenpair = amount/tonumber * (unit^1/dimenfactors + Cc(1)) -- tonumber is
lpeg.patterns.dimenpair = dimenpair
+local splitter = amount/tonumber * C(unit^1)
+
+function number.splitdimen(str)
+ return lpegmatch(splitter,str)
+end
+
--[[ldx--
<p>We use a metatable to intercept errors. When no key is found in
the table with factors, the metatable will be consulted for an
@@ -430,12 +438,12 @@ probably use a hash instead of a one-element table.</p>
<p>Goodie:s</p>
--ldx]]--
-function number.percent(n) -- will be cleaned up once luatex 0.30 is out
- local hsize = tex.hsize
- if type(hsize) == "string" then
- hsize = stringtodimen(hsize)
+function number.percent(n,d) -- will be cleaned up once luatex 0.30 is out
+ d = d or tex.hsize
+ if type(d) == "string" then
+ d = stringtodimen(d)
end
- return (n/100) * hsize
+ return (n/100) * d
end
number["%"] = number.percent
diff --git a/Master/texmf-dist/tex/context/base/util-env.lua b/Master/texmf-dist/tex/context/base/util-env.lua
new file mode 100644
index 00000000000..283b91c0a11
--- /dev/null
+++ b/Master/texmf-dist/tex/context/base/util-env.lua
@@ -0,0 +1,258 @@
+if not modules then modules = { } end modules ['util-env'] = {
+ version = 1.001,
+ comment = "companion to luat-lib.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+local allocate, mark = utilities.storage.allocate, utilities.storage.mark
+
+local format, sub, match, gsub, find = string.format, string.sub, string.match, string.gsub, string.find
+local unquoted, quoted = string.unquoted, string.quoted
+local concat, insert, remove = table.concat, table.insert, table.remove
+
+environment = environment or { }
+local environment = environment
+
+-- precautions
+
+os.setlocale(nil,nil) -- useless feature and even dangerous in luatex
+
+function os.setlocale()
+ -- no way you can mess with it
+end
+
+-- dirty tricks (we will replace the texlua call by luatex --luaonly)
+
+local validengines = allocate {
+ ["luatex"] = true,
+ ["luajittex"] = true,
+ -- ["luatex.exe"] = true,
+ -- ["luajittex.exe"] = true,
+}
+
+local basicengines = allocate {
+ ["luatex"] = "luatex",
+ ["texlua"] = "luatex",
+ ["texluac"] = "luatex",
+ ["luajittex"] = "luajittex",
+ ["texluajit"] = "luajittex",
+ -- ["texlua.exe"] = "luatex",
+ -- ["texluajit.exe"] = "luajittex",
+}
+
+local luaengines=allocate {
+ ["lua"] = true,
+ ["luajit"] = true,
+}
+
+environment.validengines = validengines
+environment.basicengines = basicengines
+
+-- [-1] = binary
+-- [ 0] = self
+-- [ 1] = argument 1 ...
+
+-- instead we could set ranges
+
+if not arg then
+ -- used as library
+elseif luaengines[file.removesuffix(arg[-1])] then
+-- arg[-1] = arg[0]
+-- arg[ 0] = arg[1]
+-- for k=2,#arg do
+-- arg[k-1] = arg[k]
+-- end
+-- remove(arg) -- last
+elseif validengines[file.removesuffix(arg[0])] then
+ if arg[1] == "--luaonly" then
+ arg[-1] = arg[0]
+ arg[ 0] = arg[2]
+ for k=3,#arg do
+ arg[k-2] = arg[k]
+ end
+ remove(arg) -- last
+ remove(arg) -- pre-last
+ else
+ -- tex run
+ end
+
+ -- This is an ugly hack but it permits symlinking a script (say 'context') to 'mtxrun' as in:
+ --
+ -- ln -s /opt/minimals/tex/texmf-linux-64/bin/mtxrun context
+ --
+ -- The special mapping hack is needed because 'luatools' boils down to 'mtxrun --script base'
+ -- but it's unlikely that there will be more of this
+
+ local originalzero = file.basename(arg[0])
+ local specialmapping = { luatools == "base" }
+
+ if originalzero ~= "mtxrun" and originalzero ~= "mtxrun.lua" then
+ arg[0] = specialmapping[originalzero] or originalzero
+ insert(arg,0,"--script")
+ insert(arg,0,"mtxrun")
+ end
+
+end
+
+-- environment
+
+environment.arguments = allocate()
+environment.files = allocate()
+environment.sortedflags = nil
+
+-- context specific arguments (in order not to confuse the engine)
+
+function environment.initializearguments(arg)
+ local arguments, files = { }, { }
+ environment.arguments, environment.files, environment.sortedflags = arguments, files, nil
+ for index=1,#arg do
+ local argument = arg[index]
+ if index > 0 then
+ local flag, value = match(argument,"^%-+(.-)=(.-)$")
+ if flag then
+ flag = gsub(flag,"^c:","")
+ arguments[flag] = unquoted(value or "")
+ else
+ flag = match(argument,"^%-+(.+)")
+ if flag then
+ flag = gsub(flag,"^c:","")
+ arguments[flag] = true
+ else
+ files[#files+1] = argument
+ end
+ end
+ end
+ end
+ environment.ownname = file.reslash(environment.ownname or arg[0] or 'unknown.lua')
+end
+
+function environment.setargument(name,value)
+ environment.arguments[name] = value
+end
+
+-- todo: defaults, better checks e.g on type (boolean versus string)
+--
+-- tricky: too many hits when we support partials unless we add
+-- a registration of arguments so from now on we have 'partial'
+
+function environment.getargument(name,partial)
+ local arguments, sortedflags = environment.arguments, environment.sortedflags
+ if arguments[name] then
+ return arguments[name]
+ elseif partial then
+ if not sortedflags then
+ sortedflags = allocate(table.sortedkeys(arguments))
+ for k=1,#sortedflags do
+ sortedflags[k] = "^" .. sortedflags[k]
+ end
+ environment.sortedflags = sortedflags
+ end
+ -- example of potential clash: ^mode ^modefile
+ for k=1,#sortedflags do
+ local v = sortedflags[k]
+ if find(name,v) then
+ return arguments[sub(v,2,#v)]
+ end
+ end
+ end
+ return nil
+end
+
+environment.argument = environment.getargument
+
+function environment.splitarguments(separator) -- rather special, cut-off before separator
+ local done, before, after = false, { }, { }
+ local originalarguments = environment.originalarguments
+ for k=1,#originalarguments do
+ local v = originalarguments[k]
+ if not done and v == separator then
+ done = true
+ elseif done then
+ after[#after+1] = v
+ else
+ before[#before+1] = v
+ end
+ end
+ return before, after
+end
+
+function environment.reconstructcommandline(arg,noquote)
+ arg = arg or environment.originalarguments
+ if noquote and #arg == 1 then
+ -- we could just do: return unquoted(resolvers.resolve(arg[i]))
+ local a = arg[1]
+ a = resolvers.resolve(a)
+ a = unquoted(a)
+ return a
+ elseif #arg > 0 then
+ local result = { }
+ for i=1,#arg do
+ -- we could just do: result[#result+1] = format("%q",unquoted(resolvers.resolve(arg[i])))
+ local a = arg[i]
+ a = resolvers.resolve(a)
+ a = unquoted(a)
+ a = gsub(a,'"','\\"') -- tricky
+ if find(a," ") then
+ result[#result+1] = quoted(a)
+ else
+ result[#result+1] = a
+ end
+ end
+ return concat(result," ")
+ else
+ return ""
+ end
+end
+
+-- -- to be tested:
+--
+-- function environment.reconstructcommandline(arg,noquote)
+-- arg = arg or environment.originalarguments
+-- if noquote and #arg == 1 then
+-- return unquoted(resolvers.resolve(arg[1]))
+-- elseif #arg > 0 then
+-- local result = { }
+-- for i=1,#arg do
+-- result[#result+1] = format("%q",unquoted(resolvers.resolve(arg[i]))) -- always quote
+-- end
+-- return concat(result," ")
+-- else
+-- return ""
+-- end
+-- end
+
+if arg then
+
+ -- new, reconstruct quoted snippets (maybe better just remove the " then and add them later)
+ local newarg, instring = { }, false
+
+ for index=1,#arg do
+ local argument = arg[index]
+ if find(argument,"^\"") then
+ newarg[#newarg+1] = gsub(argument,"^\"","")
+ if not find(argument,"\"$") then
+ instring = true
+ end
+ elseif find(argument,"\"$") then
+ newarg[#newarg] = newarg[#newarg] .. " " .. gsub(argument,"\"$","")
+ instring = false
+ elseif instring then
+ newarg[#newarg] = newarg[#newarg] .. " " .. argument
+ else
+ newarg[#newarg+1] = argument
+ end
+ end
+ for i=1,-5,-1 do
+ newarg[i] = arg[i]
+ end
+
+ environment.initializearguments(newarg)
+
+ environment.originalarguments = mark(newarg)
+ environment.rawarguments = mark(arg)
+
+ arg = { } -- prevent duplicate handling
+
+end
diff --git a/Master/texmf-dist/tex/context/base/util-fmt.lua b/Master/texmf-dist/tex/context/base/util-fmt.lua
index e049d0b94b3..371a5dfcece 100644
--- a/Master/texmf-dist/tex/context/base/util-fmt.lua
+++ b/Master/texmf-dist/tex/context/base/util-fmt.lua
@@ -14,28 +14,8 @@ local concat, format = table.concat, string.format
local tostring, type = tostring, type
local strip = string.strip
-local P, R, Cs = lpeg.P, lpeg.R, lpeg.Cs
local lpegmatch = lpeg.match
-
--- temporary here
-
-local digit = R("09")
-local period = P(".")
-local zero = P("0")
-local trailingzeros = zero^0 * -digit -- suggested by Roberto R
-local case_1 = period * trailingzeros / ""
-local case_2 = period * (digit - trailingzeros)^1 * (trailingzeros / "")
-local number = digit^1 * (case_1 + case_2)
-local stripper = Cs((number + 1)^0)
-
---~ local sample = "bla 11.00 bla 11 bla 0.1100 bla 1.00100 bla 0.00 bla 0.001 bla 1.1100 bla 0.100100100 bla 0.00100100100"
---~ collectgarbage("collect")
---~ str = string.rep(sample,10000)
---~ local ts = os.clock()
---~ lpegmatch(stripper,str)
---~ print(#str, os.clock()-ts, lpegmatch(stripper,sample))
-
-lpeg.patterns.stripzeros = stripper
+local stripper = lpeg.patterns.stripzeros
function formatters.stripzeros(str)
return lpegmatch(stripper,str)
diff --git a/Master/texmf-dist/tex/context/base/util-jsn.lua b/Master/texmf-dist/tex/context/base/util-jsn.lua
new file mode 100644
index 00000000000..7493f108d22
--- /dev/null
+++ b/Master/texmf-dist/tex/context/base/util-jsn.lua
@@ -0,0 +1,145 @@
+if not modules then modules = { } end modules ['util-jsn'] = {
+ version = 1.001,
+ comment = "companion to m-json.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+-- Of course we could make a nice complete parser with proper error messages but
+-- as json is generated programmatically errors are systematic and we can assume
+-- a correct stream. If not, we have some fatal error anyway. So, we can just rely
+-- on strings being strings (apart from the unicode escape which is not in 5.1) and
+-- as we first catch known types we just assume that anything else is a number.
+
+local P, V, R, S, C, Cc, Cs, Ct, Cf, Cg = lpeg.P, lpeg.V, lpeg.R, lpeg.S, lpeg.C, lpeg.Cc, lpeg.Cs, lpeg.Ct, lpeg.Cf, lpeg.Cg
+local lpegmatch = lpeg.match
+local format = string.format
+local utfchar = utf.char
+local concat = table.concat
+
+local tonumber, tostring, rawset, type = tonumber, tostring, rawset, type
+
+local json = utilities.json or { }
+utilities.json = json
+
+-- moduledata = moduledata or { }
+-- moduledata.json = json
+
+-- \\ \/ \b \f \n \r \t \uHHHH
+
+local lbrace = P("{")
+local rbrace = P("}")
+local lparent = P("[")
+local rparent = P("]")
+local comma = P(",")
+local colon = P(":")
+local dquote = P('"')
+
+local whitespace = lpeg.patterns.whitespace
+local optionalws = whitespace^0
+
+local escape = C(P("\\u") / "0x" * S("09","AF","af")) / function(s) return utfchar(tonumber(s)) end
+local jstring = dquote * Cs((escape + (1-dquote))^0) * dquote
+local jtrue = P("true") * Cc(true)
+local jfalse = P("false") * Cc(false)
+local jnull = P("null") * Cc(nil)
+local jnumber = (1-whitespace-rparent-rbrace-comma)^1 / tonumber
+
+local key = jstring
+
+local jsonconverter = { "value",
+ object = lbrace * Cf(Ct("") * V("pair") * (comma * V("pair"))^0,rawset) * rbrace,
+ pair = Cg(optionalws * key * optionalws * colon * V("value")),
+ array = Ct(lparent * V("value") * (comma * V("value"))^0 * rparent),
+ value = optionalws * (jstring + V("object") + V("array") + jtrue + jfalse + jnull + jnumber + #rparent) * optionalws,
+}
+
+-- local jsonconverter = { "value",
+-- object = lbrace * Cf(Ct("") * V("pair") * (comma * V("pair"))^0,rawset) * rbrace,
+-- pair = Cg(optionalws * V("string") * optionalws * colon * V("value")),
+-- array = Ct(lparent * V("value") * (comma * V("value"))^0 * rparent),
+-- string = jstring,
+-- value = optionalws * (V("string") + V("object") + V("array") + jtrue + jfalse + jnull + jnumber) * optionalws,
+-- }
+
+-- lpeg.print(jsonconverter) -- size 181
+
+function json.tolua(str)
+ return lpegmatch(jsonconverter,str)
+end
+
+local function tojson(value,t) -- we could optimize #t
+ local kind = type(value)
+ if kind == "table" then
+ local done = false
+ local size = #value
+ if size == 0 then
+ for k, v in next, value do
+ if done then
+ t[#t+1] = ","
+ else
+ t[#t+1] = "{"
+ done = true
+ end
+ t[#t+1] = format("%q:",k)
+ tojson(v,t)
+ end
+ if done then
+ t[#t+1] = "}"
+ else
+ t[#t+1] = "{}"
+ end
+ elseif size == 1 then
+ -- we can optimize for non tables
+ t[#t+1] = "["
+ tojson(value[1],t)
+ t[#t+1] = "]"
+ else
+ for i=1,size do
+ if done then
+ t[#t+1] = ","
+ else
+ t[#t+1] = "["
+ done = true
+ end
+ tojson(value[i],t)
+ end
+ t[#t+1] = "]"
+ end
+ elseif kind == "string" then
+ t[#t+1] = format("%q",value)
+ elseif kind == "number" then
+ t[#t+1] = value
+ elseif kind == "boolean" then
+ t[#t+1] = tostring(value)
+ end
+ return t
+end
+
+function json.tostring(value)
+ -- todo optimize for non table
+ local kind = type(value)
+ if kind == "table" then
+ return concat(tojson(value,{}),"")
+ elseif kind == "string" or kind == "number" then
+ return value
+ else
+ return tostring(value)
+ end
+end
+
+-- local tmp = [[ { "a" : true, "b" : [ 123 , 456E-10, { "a" : true, "b" : [ 123 , 456 ] } ] } ]]
+
+-- tmp = json.tolua(tmp)
+-- inspect(tmp)
+-- tmp = json.tostring(tmp)
+-- inspect(tmp)
+-- tmp = json.tolua(tmp)
+-- inspect(tmp)
+-- tmp = json.tostring(tmp)
+-- inspect(tmp)
+
+-- inspect(json.tostring(true))
+
+return json
diff --git a/Master/texmf-dist/tex/context/base/util-lib.lua b/Master/texmf-dist/tex/context/base/util-lib.lua
new file mode 100644
index 00000000000..fde1f987e7f
--- /dev/null
+++ b/Master/texmf-dist/tex/context/base/util-lib.lua
@@ -0,0 +1,233 @@
+if not modules then modules = { } end modules ['util-lib'] = {
+ version = 1.001,
+ comment = "companion to luat-lib.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files",
+}
+
+-- This is experimental code for Hans and Luigi. Don't depend on it! There
+-- will be a plain variant.
+
+--[[
+
+The problem with library bindings is manyfold. They are of course platform
+dependent and while a binary with its directly related libraries are often
+easy to maintain and load, additional libraries can each have their demands.
+
+One important aspect is that loading additional libraries from within the
+loaded one is also operating system dependent. There can be shared libraries
+elsewhere on the system and as there can be multiple libraries with the same
+name but different usage and versioning there can be clashes. So there has to
+be some logic in where to look for these sublibraries.
+
+We found out that for instance on windows libraries are by default sought on
+the parents path and then on the binary paths and these of course can be in
+an out of our control, thereby enlarging the changes on a clash. A rather
+safe solution for that to load the library on the path where it sits.
+
+Another aspect is initialization. When you ask for a library t.e.x it will
+try to initialize luaopen_t_e_x no matter if such an inializer is present.
+However, because loading is configurable and in the case of luatex is already
+partly under out control, this is easy to deal with. We only have to make
+sure that we inform the loader that the library has been loaded so that
+it won't load it twice.
+
+In swiglib we have chosen for a clear organization and although one can use
+variants normally in the tex directory structure predictability is more or
+less the standard. For instance:
+
+.../tex/texmf-mswin/bin/lib/luatex/lua/swiglib/mysql/core.dll
+.../tex/texmf-mswin/bin/lib/luajittex/lua/swiglib/mysql/core.dll
+.../tex/texmf-mswin/bin/lib/luatex/context/lua/swiglib/mysql/core.dll
+.../tex/texmf-mswin/bin/lib/swiglib/lua/mysql/core.dll
+.../tex/texmf-mswin/bin/lib/swiglib/lua/mysql/5.6/core.dll
+
+The lookups are determined via an entry in texmfcnf.lua:
+
+CLUAINPUTS = ".;$SELFAUTOLOC/lib/{$engine,luatex}/lua//",
+
+A request for t.e.x is converted to t/e/x.dll or t/e/x.so depending on the
+platform. Then we use the regular finder to locate the file in the tex
+directory structure. Once located we goto the path where it sits, load the
+file and return to the original path. We register as t.e.x in order to
+prevent reloading and also because the base name is seldom unique.
+
+The main function is a big one and evolved out of experiments that Luigi
+Scarso and I conducted when playing with variants of SwigLib. The function
+locates the library using the context mkiv resolver that operates on the
+tds tree and if that doesn't work out well, the normal clib path is used.
+
+The lookups is somewhat clever in the sense that it can deal with (optional)
+versions and can fall back on non versioned alternatives if needed, either
+or not using a wildcard lookup.
+
+This code is experimental and by providing a special abstract loader (called
+swiglib) we can start using the libraries.
+
+]]--
+
+-- seems to be clua in recent texlive
+
+local gsub, find = string.gsub, string.find
+local pathpart, nameonly, joinfile = file.pathpart, file.nameonly, file.join
+local findfile, findfiles = resolvers and resolvers.findfile, resolvers and resolvers.findfiles
+
+local loaded = package.loaded
+
+local report_swiglib = logs.reporter("swiglib")
+local trace_swiglib = false trackers.register("resolvers.swiglib", function(v) trace_swiglib = v end)
+
+-- We can check if there are more that one component, and if not, we can
+-- append 'core'.
+
+local function requireswiglib(required,version)
+ local library = loaded[required]
+ if library == nil then
+ -- initialize a few variables
+ local required_full = gsub(required,"%.","/")
+ local required_path = pathpart(required_full)
+ local required_base = nameonly(required_full)
+ local required_name = required_base .. "." .. os.libsuffix
+ local version = type(version) == "string" and version ~= "" and version or false
+ -- helper
+ local function check(locate,...)
+ local found_library = nil
+ if version then
+ local asked_library = joinfile(required_path,version,required_name)
+ if trace_swiglib then
+ report_swiglib("checking %s: %a","with version",asked_library)
+ end
+ found_library = locate(asked_library,...)
+ if not found_library or found_library == ""then
+ asked_library = joinfile(required_path,required_name)
+ if trace_swiglib then
+ report_swiglib("checking %s: %a","without version",asked_library)
+ end
+ found_library = locate(asked_library,...)
+ end
+ else
+ local asked_library = joinfile(required_path,required_name)
+ if trace_swiglib then
+ report_swiglib("checking %s: %a","without version",asked_library)
+ end
+ found_library = locate(asked_library,...)
+ end
+ return found_library and found_library ~= "" and found_library or false
+ end
+ -- check cnf spec using name and version
+ local found_library = findfile and check(findfile,"lib")
+ -- check cnf spec using wildcard
+ if findfiles and not found_library then
+ local asked_library = joinfile(required_path,".*",required_name)
+ if trace_swiglib then
+ report_swiglib("checking %s: %a","latest version",asked_library)
+ end
+ local list = findfiles(asked_library,"lib",true)
+ if list and #list > 0 then
+ table.sort(list)
+ found_library = list[#list]
+ end
+ end
+ -- check clib paths using name and version
+ if not found_library then
+ package.extraclibpath(environment.ownpath)
+ local paths = package.clibpaths()
+ for i=1,#paths do
+ local found_library = check(lfs.isfile)
+ if found_library then
+ break
+ end
+ end
+ end
+ -- load and initialize when found
+ if not found_library then
+ if trace_swiglib then
+ report_swiglib("not found: %a",asked_library)
+ end
+ library = false
+ else
+ local path = pathpart(found_library)
+ local base = nameonly(found_library)
+ dir.push(path)
+ if trace_swiglib then
+ report_swiglib("found: %a",found_library)
+ end
+ library = package.loadlib(found_library,"luaopen_" .. required_base)
+ if type(library) == "function" then
+ library = library()
+ else
+ library = false
+ end
+ dir.pop()
+ end
+ -- cache result
+ if not library then
+ report_swiglib("unknown: %a",required)
+ elseif trace_swiglib then
+ report_swiglib("stored: %a",required)
+ end
+ loaded[required] = library
+ else
+ report_swiglib("reused: %a",required)
+ end
+ return library
+end
+
+--[[
+
+For convenience we make the require loader function swiglib aware. Alternatively
+we could put the specific loader in the global namespace.
+
+]]--
+
+local savedrequire = require
+
+function require(name,version)
+ if find(name,"^swiglib%.") then
+ return requireswiglib(name,version)
+ else
+ return savedrequire(name)
+ end
+end
+
+--[[
+
+At the cost of some overhead we provide a specific loader so that we can keep
+track of swiglib usage which is handy for development. In context this is the
+recommended loader.
+
+]]--
+
+local swiglibs = { }
+
+function swiglib(name,version)
+ local library = swiglibs[name]
+ if not library then
+ statistics.starttiming(swiglibs)
+ report_swiglib("loading %a",name)
+ library = requireswiglib("swiglib." .. name,version)
+ swiglibs[name] = library
+ statistics.stoptiming(swiglibs)
+ end
+ return library
+end
+
+statistics.register("used swiglibs", function()
+ if next(swiglibs) then
+ return string.format("%s, initial load time %s seconds",table.concat(table.sortedkeys(swiglibs)," "),statistics.elapsedtime(swiglibs))
+ end
+end)
+
+--[[
+
+So, we now have:
+
+local gm = require("swiglib.gmwand.core")
+local gm = swiglib("gmwand.core")
+local sq = swiglib("mysql.core")
+local sq = swiglib("mysql.core","5.6")
+
+Watch out, the last one is less explicit and lacks the swiglib prefix.
+
+]]--
diff --git a/Master/texmf-dist/tex/context/base/util-lua.lua b/Master/texmf-dist/tex/context/base/util-lua.lua
index b49e93beb38..f3be9dcd261 100644
--- a/Master/texmf-dist/tex/context/base/util-lua.lua
+++ b/Master/texmf-dist/tex/context/base/util-lua.lua
@@ -2,62 +2,350 @@ if not modules then modules = { } end modules ['util-lua'] = {
version = 1.001,
comment = "companion to luat-lib.mkiv",
author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ comment = "the strip code is written by Peter Cawley",
copyright = "PRAGMA ADE / ConTeXt Development Team",
license = "see context related readme files"
}
-utilities = utilities or {}
-utilities.lua = utilities.lua or { }
-utilities.report = logs and logs.reporter("system") or print
+-- we will remove the 5.1 code some day soon
-local function stupidcompile(luafile,lucfile)
- local data = io.loaddata(luafile)
- if data and data ~= "" then
- data = string.dump(data)
- if data and data ~= "" then
- io.savedata(lucfile,data)
+local rep, sub, byte, dump, format = string.rep, string.sub, string.byte, string.dump, string.format
+local load, loadfile, type = load, loadfile, type
+
+utilities = utilities or {}
+utilities.lua = utilities.lua or { }
+local luautilities = utilities.lua
+
+local report_lua = logs.reporter("system","lua")
+
+local tracestripping = false
+local forcestupidcompile = true -- use internal bytecode compiler
+luautilities.stripcode = true -- support stripping when asked for
+luautilities.alwaysstripcode = false -- saves 1 meg on 7 meg compressed format file (2012.08.12)
+luautilities.nofstrippedchunks = 0
+luautilities.nofstrippedbytes = 0
+local strippedchunks = { } -- allocate()
+luautilities.strippedchunks = strippedchunks
+
+luautilities.suffixes = {
+ tma = "tma",
+ tmc = jit and "tmb" or "tmc",
+ lua = "lua",
+ luc = jit and "lub" or "luc",
+ lui = "lui",
+ luv = "luv",
+ luj = "luj",
+ tua = "tua",
+ tuc = "tuc",
+}
+
+-- environment.loadpreprocessedfile can be set to a preprocessor
+
+if jit or status.luatex_version >= 74 then
+
+ local function register(name)
+ if tracestripping then
+ report_lua("stripped bytecode from %a",name or "unknown")
end
+ strippedchunks[#strippedchunks+1] = name
+ luautilities.nofstrippedchunks = luautilities.nofstrippedchunks + 1
end
-end
-function utilities.lua.compile(luafile,lucfile,cleanup,strip,fallback) -- defaults: cleanup=false strip=true
- utilities.report("lua: compiling %s into %s",luafile,lucfile)
- os.remove(lucfile)
- local command = "-o " .. string.quoted(lucfile) .. " " .. string.quoted(luafile)
- if strip ~= false then
- command = "-s " .. command
+ local function stupidcompile(luafile,lucfile,strip)
+ local code = io.loaddata(luafile)
+ if code and code ~= "" then
+ code = load(code)
+ if code then
+ code = dump(code,strip and luautilities.stripcode or luautilities.alwaysstripcode)
+ if code and code ~= "" then
+ register(name)
+ io.savedata(lucfile,code)
+ return true, 0
+ end
+ else
+ report_lua("fatal error %a in file %a",1,luafile)
+ end
+ else
+ report_lua("fatal error %a in file %a",2,luafile)
+ end
+ return false, 0
end
- local done = os.spawn("texluac " .. command) == 0 -- or os.spawn("luac " .. command) == 0
- if not done and fallback then
- utilities.report("lua: dumping %s into %s (unstripped)",luafile,lucfile)
- stupidcompile(luafile,lucfile) -- maybe use the stripper we have elsewhere
- cleanup = false -- better see how worse it is
+
+ -- quite subtle ... doing this wrong incidentally can give more bytes
+
+ function luautilities.loadedluacode(fullname,forcestrip,name)
+ -- quite subtle ... doing this wrong incidentally can give more bytes
+ name = name or fullname
+ local code = environment.loadpreprocessedfile and environment.loadpreprocessedfile(fullname) or loadfile(fullname)
+ if code then
+ code()
+ end
+ if forcestrip and luautilities.stripcode then
+ if type(forcestrip) == "function" then
+ forcestrip = forcestrip(fullname)
+ end
+ if forcestrip or luautilities.alwaysstripcode then
+ register(name)
+ return load(dump(code,true)), 0
+ else
+ return code, 0
+ end
+ elseif luautilities.alwaysstripcode then
+ register(name)
+ return load(dump(code,true)), 0
+ else
+ return code, 0
+ end
end
- if done and cleanup == true and lfs.isfile(lucfile) and lfs.isfile(luafile) then
- utilities.report("lua: removing %s",luafile)
- os.remove(luafile)
+
+ function luautilities.strippedloadstring(code,forcestrip,name) -- not executed
+ if forcestrip and luautilities.stripcode or luautilities.alwaysstripcode then
+ code = load(code)
+ if not code then
+ report_lua("fatal error %a in file %a",3,name)
+ end
+ register(name)
+ code = dump(code,true)
+ end
+ return load(code), 0
end
- return done
-end
---~ local getmetatable, type = getmetatable, type
+ function luautilities.compile(luafile,lucfile,cleanup,strip,fallback) -- defaults: cleanup=false strip=true
+ report_lua("compiling %a into %a",luafile,lucfile)
+ os.remove(lucfile)
+ local done = stupidcompile(luafile,lucfile,strip ~= false)
+ if done then
+ report_lua("dumping %a into %a stripped",luafile,lucfile)
+ if cleanup == true and lfs.isfile(lucfile) and lfs.isfile(luafile) then
+ report_lua("removing %a",luafile)
+ os.remove(luafile)
+ end
+ end
+ return done
+ end
+
+ function luautilities.loadstripped(...)
+ local l = load(...)
+ if l then
+ return load(dump(l,true))
+ end
+ end
+
+else
+
+ -- The next function was posted by Peter Cawley on the lua list and strips line
+ -- number information etc. from the bytecode data blob. We only apply this trick
+ -- when we store data tables. Stripping makes the compressed format file about
+ -- 1MB smaller (and uncompressed we save at least 6MB).
+ --
+ -- You can consider this feature an experiment, so it might disappear. There is
+ -- no noticeable gain in runtime although the memory footprint should be somewhat
+ -- smaller (and the file system has a bit less to deal with).
+ --
+ -- Begin of borrowed code ... works for Lua 5.1 which LuaTeX currently uses ...
+
+ local function register(name,before,after)
+ local delta = before - after
+ if tracestripping then
+ report_lua("bytecodes stripped from %a, # before %s, # after %s, delta %s",name,before,after,delta)
+ end
+ strippedchunks[#strippedchunks+1] = name
+ luautilities.nofstrippedchunks = luautilities.nofstrippedchunks + 1
+ luautilities.nofstrippedbytes = luautilities.nofstrippedbytes + delta
+ return delta
+ end
+
+ local strip_code_pc
+
+ if _MAJORVERSION == 5 and _MINORVERSION == 1 then
+
+ strip_code_pc = function(dump,name)
+ local before = #dump
+ local version, format, endian, int, size, ins, num = byte(dump,5,11)
+ local subint
+ if endian == 1 then
+ subint = function(dump, i, l)
+ local val = 0
+ for n = l, 1, -1 do
+ val = val * 256 + byte(dump,i + n - 1)
+ end
+ return val, i + l
+ end
+ else
+ subint = function(dump, i, l)
+ local val = 0
+ for n = 1, l, 1 do
+ val = val * 256 + byte(dump,i + n - 1)
+ end
+ return val, i + l
+ end
+ end
+ local strip_function
+ strip_function = function(dump)
+ local count, offset = subint(dump, 1, size)
+ local stripped, dirty = rep("\0", size), offset + count
+ offset = offset + count + int * 2 + 4
+ offset = offset + int + subint(dump, offset, int) * ins
+ count, offset = subint(dump, offset, int)
+ for n = 1, count do
+ local t
+ t, offset = subint(dump, offset, 1)
+ if t == 1 then
+ offset = offset + 1
+ elseif t == 4 then
+ offset = offset + size + subint(dump, offset, size)
+ elseif t == 3 then
+ offset = offset + num
+ end
+ end
+ count, offset = subint(dump, offset, int)
+ stripped = stripped .. sub(dump,dirty, offset - 1)
+ for n = 1, count do
+ local proto, off = strip_function(sub(dump,offset, -1))
+ stripped, offset = stripped .. proto, offset + off - 1
+ end
+ offset = offset + subint(dump, offset, int) * int + int
+ count, offset = subint(dump, offset, int)
+ for n = 1, count do
+ offset = offset + subint(dump, offset, size) + size + int * 2
+ end
+ count, offset = subint(dump, offset, int)
+ for n = 1, count do
+ offset = offset + subint(dump, offset, size) + size
+ end
+ stripped = stripped .. rep("\0", int * 3)
+ return stripped, offset
+ end
+ dump = sub(dump,1,12) .. strip_function(sub(dump,13,-1))
+ local after = #dump
+ local delta = register(name,before,after)
+ return dump, delta
+ end
+
+ else
+
+ strip_code_pc = function(dump,name)
+ return dump, 0
+ end
+
+ end
---~ local types = { }
+ -- ... end of borrowed code.
---~ function utilities.lua.registerdatatype(d,name)
---~ types[getmetatable(d)] = name
---~ end
+ -- quite subtle ... doing this wrong incidentally can give more bytes
---~ function utilities.lua.datatype(d)
---~ local t = type(d)
---~ if t == "userdata" then
---~ local m = getmetatable(d)
---~ return m and types[m] or "userdata"
---~ else
---~ return t
---~ end
---~ end
+ function luautilities.loadedluacode(fullname,forcestrip,name)
+ -- quite subtle ... doing this wrong incidentally can give more bytes
+ local code = environment.loadpreprocessedfile and environment.preprocessedloadfile(fullname) or loadfile(fullname)
+ if code then
+ code()
+ end
+ if forcestrip and luautilities.stripcode then
+ if type(forcestrip) == "function" then
+ forcestrip = forcestrip(fullname)
+ end
+ if forcestrip then
+ local code, n = strip_code_pc(dump(code),name)
+ return load(code), n
+ elseif luautilities.alwaysstripcode then
+ return load(strip_code_pc(dump(code),name))
+ else
+ return code, 0
+ end
+ elseif luautilities.alwaysstripcode then
+ return load(strip_code_pc(dump(code),name))
+ else
+ return code, 0
+ end
+ end
---~ utilities.lua.registerdatatype(lpeg.P("!"),"lpeg")
+ function luautilities.strippedloadstring(code,forcestrip,name) -- not executed
+ local n = 0
+ if (forcestrip and luautilities.stripcode) or luautilities.alwaysstripcode then
+ code = load(code)
+ if not code then
+ report_lua("fatal error in file %a",name)
+ end
+ code, n = strip_code_pc(dump(code),name)
+ end
+ return load(code), n
+ end
+
+ local function stupidcompile(luafile,lucfile,strip)
+ local code = io.loaddata(luafile)
+ local n = 0
+ if code and code ~= "" then
+ code = load(code)
+ if not code then
+ report_lua("fatal error in file %a",luafile)
+ end
+ code = dump(code)
+ if strip then
+ code, n = strip_code_pc(code,luautilities.stripcode or luautilities.alwaysstripcode,luafile) -- last one is reported
+ end
+ if code and code ~= "" then
+ io.savedata(lucfile,code)
+ end
+ end
+ return n
+ end
+
+ local luac_normal = "texluac -o %q %q"
+ local luac_strip = "texluac -s -o %q %q"
+
+ function luautilities.compile(luafile,lucfile,cleanup,strip,fallback) -- defaults: cleanup=false strip=true
+ report_lua("compiling %a into %a",luafile,lucfile)
+ os.remove(lucfile)
+ local done = false
+ if strip ~= false then
+ strip = true
+ end
+ if forcestupidcompile then
+ fallback = true
+ elseif strip then
+ done = os.spawn(format(luac_strip, lucfile,luafile)) == 0
+ else
+ done = os.spawn(format(luac_normal,lucfile,luafile)) == 0
+ end
+ if not done and fallback then
+ local n = stupidcompile(luafile,lucfile,strip)
+ if n > 0 then
+ report_lua("%a dumped into %a (%i bytes stripped)",luafile,lucfile,n)
+ else
+ report_lua("%a dumped into %a (unstripped)",luafile,lucfile)
+ end
+ cleanup = false -- better see how bad it is
+ done = true -- hm
+ end
+ if done and cleanup == true and lfs.isfile(lucfile) and lfs.isfile(luafile) then
+ report_lua("removing %a",luafile)
+ os.remove(luafile)
+ end
+ return done
+ end
+
+ luautilities.loadstripped = loadstring
+
+end
---~ print(utilities.lua.datatype(lpeg.P("oeps")))
+-- local getmetatable, type = getmetatable, type
+--
+-- local types = { }
+--
+-- function luautilities.registerdatatype(d,name)
+-- types[getmetatable(d)] = name
+-- end
+--
+-- function luautilities.datatype(d)
+-- local t = type(d)
+-- if t == "userdata" then
+-- local m = getmetatable(d)
+-- return m and types[m] or "userdata"
+-- else
+-- return t
+-- end
+-- end
+--
+-- luautilities.registerdatatype(lpeg.P("!"),"lpeg")
+--
+-- print(luautilities.datatype(lpeg.P("oeps")))
diff --git a/Master/texmf-dist/tex/context/base/util-mrg.lua b/Master/texmf-dist/tex/context/base/util-mrg.lua
index acf04fead70..78b23dcbbff 100644
--- a/Master/texmf-dist/tex/context/base/util-mrg.lua
+++ b/Master/texmf-dist/tex/context/base/util-mrg.lua
@@ -12,14 +12,17 @@ local gsub, format = string.gsub, string.format
local concat = table.concat
local type, next = type, next
-utilities = utilities or {}
-utilities.merger = utilities.merger or { } -- maybe mergers
-utilities.report = logs and logs.reporter("system") or print
-
-local merger = utilities.merger
+local P, R, S, V, Ct, C, Cs, Cc, Cp, Cmt, Cb, Cg = lpeg.P, lpeg.R, lpeg.S, lpeg.V, lpeg.Ct, lpeg.C, lpeg.Cs, lpeg.Cc, lpeg.Cp, lpeg.Cmt, lpeg.Cb, lpeg.Cg
+local lpegmatch, patterns = lpeg.match, lpeg.patterns
+utilities = utilities or { }
+local merger = utilities.merger or { }
+utilities.merger = merger
merger.strip_comment = true
+local report = logs.reporter("system","merge")
+utilities.report = report
+
local m_begin_merge = "begin library merge"
local m_end_merge = "end library merge"
local m_begin_closure = "do -- create closure to overcome 200 locals limit"
@@ -42,6 +45,15 @@ local m_faked =
"-- " .. m_begin_merge .. "\n\n" ..
"-- " .. m_end_merge .. "\n\n"
+local m_report = [[
+-- used libraries : %s
+-- skipped libraries : %s
+-- original bytes : %s
+-- stripped bytes : %s
+]]
+
+local m_preloaded = [[package.loaded[%q] = package.loaded[%q] or true]]
+
local function self_fake()
return m_faked
end
@@ -53,23 +65,87 @@ end
local function self_load(name)
local data = io.loaddata(name) or ""
if data == "" then
- utilities.report("merge: unknown file %s",name)
+ report("unknown file %a",name)
else
- utilities.report("merge: inserting %s",name)
+ report("inserting file %a",name)
end
return data or ""
end
+-- -- saves some 20K .. scite comments
+-- data = gsub(data,"%-%-~[^\n\r]*[\r\n]","")
+-- -- saves some 20K .. ldx comments
+-- data = gsub(data,"%-%-%[%[ldx%-%-.-%-%-ldx%]%]%-%-","")
+
+local space = patterns.space
+local eol = patterns.newline
+local equals = P("=")^0
+local open = P("[") * Cg(equals,"init") * P("[") * P("\n")^-1
+local close = P("]") * C(equals) * P("]")
+local closeeq = Cmt(close * Cb("init"), function(s,i,a,b) return a == b end)
+local longstring = open * (1 - closeeq)^0 * close
+
+local quoted = patterns.quoted
+local emptyline = space^0 * eol
+local operator1 = P("<=") + P(">=") + P("~=") + P("..") + S("/^<>=*+%%")
+local operator2 = S("*+/")
+local operator3 = S("-")
+local separator = S(",;")
+
+local ignore = (P("]") * space^1 * P("=") * space^1 * P("]")) / "]=[" +
+ (P("=") * space^1 * P("{")) / "={" +
+ (P("(") * space^1) / "(" +
+ (P("{") * (space+eol)^1 * P("}")) / "{}"
+local strings = quoted -- / function (s) print("<<"..s..">>") return s end
+local longcmt = (emptyline^0 * P("--") * longstring * emptyline^0) / ""
+local longstr = longstring
+local comment = emptyline^0 * P("--") * P("-")^0 * (1-eol)^0 * emptyline^1 / "\n"
+local pack = ((eol+space)^0 / "") * operator1 * ((eol+space)^0 / "") +
+ ((eol+space)^0 / "") * operator2 * ((space)^0 / "") +
+ ((eol+space)^1 / "") * operator3 * ((space)^1 / "") +
+ ((space)^0 / "") * separator * ((space)^0 / "")
+local lines = emptyline^2 / "\n"
+local spaces = (space * space) / " "
+----- spaces = ((space+eol)^1 ) / " "
+
+local compact = Cs ( (
+ ignore +
+ strings +
+ longcmt +
+ longstr +
+ comment +
+ pack +
+ lines +
+ spaces +
+ 1
+)^1 )
+
+local strip = Cs((emptyline^2/"\n" + 1)^0)
+local stripreturn = Cs((1-P("return") * space^1 * P(1-space-eol)^1 * (space+eol)^0 * P(-1))^1)
+
+function merger.compact(data)
+ return lpegmatch(strip,lpegmatch(compact,data))
+end
+
+local function self_compact(data)
+ local delta = 0
+ if merger.strip_comment then
+ local before = #data
+ data = lpegmatch(compact,data)
+ data = lpegmatch(strip,data) -- also strips in longstrings ... alas
+ -- data = string.strip(data)
+ local after = #data
+ delta = before - after
+ report("original size %s, compacted to %s, stripped %s",before,after,delta)
+ data = format("-- original size: %s, stripped down to: %s\n\n%s",before,after,data)
+ end
+ return lpegmatch(stripreturn,data) or data, delta
+end
+
local function self_save(name, data)
if data ~= "" then
- if merger.strip_comment then
- -- saves some 20K
- local n = #data
- data = gsub(data,"%-%-~[^\n\r]*[\r\n]","")
- utilities.report("merge: %s bytes of comment stripped, %s bytes of code left",n-#data,#data)
- end
io.savedata(name,data)
- utilities.report("merge: saving %s",name)
+ report("saving %s with size %s",name,#data)
end
end
@@ -86,7 +162,7 @@ local function self_libs(libs,list)
local lib = libs[i]
for j=1,#list do
local pth = gsub(list[j],"\\","/") -- file.clean_path
- utilities.report("merge: checking library path %s",pth)
+ report("checking library path %a",pth)
local name = pth .. "/" .. lib
if lfs.isfile(name) then
foundpath = pth
@@ -95,30 +171,37 @@ local function self_libs(libs,list)
if foundpath then break end
end
if foundpath then
- utilities.report("merge: using library path %s",foundpath)
- local right, wrong = { }, { }
+ report("using library path %a",foundpath)
+ local right, wrong, original, stripped = { }, { }, 0, 0
for i=1,#libs do
local lib = libs[i]
local fullname = foundpath .. "/" .. lib
if lfs.isfile(fullname) then
- utilities.report("merge: using library %s",fullname)
+ report("using library %a",fullname)
+ local preloaded = file.nameonly(lib)
+ local data = io.loaddata(fullname,true)
+ original = original + #data
+ local data, delta = self_compact(data)
right[#right+1] = lib
result[#result+1] = m_begin_closure
- result[#result+1] = io.loaddata(fullname,true)
+ result[#result+1] = format(m_preloaded,preloaded,preloaded)
+ result[#result+1] = data
result[#result+1] = m_end_closure
+ stripped = stripped + delta
else
- utilities.report("merge: skipping library %s",fullname)
+ report("skipping library %a",fullname)
wrong[#wrong+1] = lib
end
end
- if #right > 0 then
- utilities.report("merge: used libraries: %s",concat(right," "))
- end
- if #wrong > 0 then
- utilities.report("merge: skipped libraries: %s",concat(wrong," "))
- end
+ right = #right > 0 and concat(right," ") or "-"
+ wrong = #wrong > 0 and concat(wrong," ") or "-"
+ report("used libraries: %a",right)
+ report("skipped libraries: %a",wrong)
+ report("original bytes: %a",original)
+ report("stripped bytes: %a",stripped)
+ result[#result+1] = format(m_report,right,wrong,original,stripped)
else
- utilities.report("merge: no valid library path found")
+ report("no valid library path found")
end
return concat(result, "\n\n")
end
diff --git a/Master/texmf-dist/tex/context/base/util-pck.lua b/Master/texmf-dist/tex/context/base/util-pck.lua
index d964c7a23a2..cf1445b4036 100644
--- a/Master/texmf-dist/tex/context/base/util-pck.lua
+++ b/Master/texmf-dist/tex/context/base/util-pck.lua
@@ -10,6 +10,7 @@ if not modules then modules = { } end modules ['util-pck'] = {
local next, tostring, type = next, tostring, type
local sort, concat = table.sort, table.concat
+local sortedhashkeys, sortedkeys = table.sortedhashkeys, table.sortedkeys
utilities = utilities or { }
utilities.packers = utilities.packers or { }
@@ -43,27 +44,19 @@ end
packers.hashed = hashed
packers.simplehashed = simplehashed
---~ local function pack(t,keys,hash,index)
---~ for k,v in next, t do
---~ if type(v) == "table" then
---~ pack(v,keys,hash,index)
---~ end
---~ if keys[k] and type(v) == "table" then
---~ local h = hashed(v)
---~ local i = hash[h]
---~ if not i then
---~ i = #index + 1
---~ index[i] = v
---~ hash[h] = i
---~ end
---~ t[k] = i
---~ end
---~ end
---~ end
+-- In luatex < 0.74 (lua 5.1) a next chain was the same for each run so no sort was needed,
+-- but in the latest greatest versions (lua 5.2) we really need to sort the keys in order
+-- not to get endless runs due to a difference in tuc files.
local function pack(t,keys,hash,index)
if t then
- for k,v in next, t do
+ -- for k, v in next, t do
+ -- local sk = sortedkeys(t)
+ local sk = sortedhashkeys(t)
+ for i=1,#sk do
+ local k = sk[i]
+ local v = t[k]
+ --
if type(v) == "table" then
pack(v,keys,hash,index)
if keys[k] then
@@ -134,11 +127,12 @@ function packers.unpack(t,p,shared)
if tp.version == (p and p.version or packers.version) then
unpack(t,tp.keys,tp.index)
else
- -- fatal error, wrong version
+ return false
end
t.packer = nil
end
end
+ return true
end
function packers.strip(p)
diff --git a/Master/texmf-dist/tex/context/base/util-prs.lua b/Master/texmf-dist/tex/context/base/util-prs.lua
index ab1bfb8eb26..31e7ffa535b 100644
--- a/Master/texmf-dist/tex/context/base/util-prs.lua
+++ b/Master/texmf-dist/tex/context/base/util-prs.lua
@@ -6,44 +6,64 @@ if not modules then modules = { } end modules ['util-prs'] = {
license = "see context related readme files"
}
-local P, R, V, C, Ct, Cs, Carg = lpeg.P, lpeg.R, lpeg.V, lpeg.C, lpeg.Ct, lpeg.Cs, lpeg.Carg
-local lpegmatch = lpeg.match
+local lpeg, table, string = lpeg, table, string
+local P, R, V, S, C, Ct, Cs, Carg, Cc, Cg, Cf, Cp = lpeg.P, lpeg.R, lpeg.V, lpeg.S, lpeg.C, lpeg.Ct, lpeg.Cs, lpeg.Carg, lpeg.Cc, lpeg.Cg, lpeg.Cf, lpeg.Cp
+local lpegmatch, lpegpatterns = lpeg.match, lpeg.patterns
local concat, format, gmatch, find = table.concat, string.format, string.gmatch, string.find
-local tostring, type, next = tostring, type, next
+local tostring, type, next, rawset = tostring, type, next, rawset
utilities = utilities or {}
-utilities.parsers = utilities.parsers or { }
-local parsers = utilities.parsers
-parsers.patterns = parsers.patterns or { }
+local parsers = utilities.parsers or { }
+utilities.parsers = parsers
+local patterns = parsers.patterns or { }
+parsers.patterns = patterns
local setmetatableindex = table.setmetatableindex
local sortedhash = table.sortedhash
+-- we share some patterns
+
+local digit = R("09")
+local space = P(' ')
+local equal = P("=")
+local comma = P(",")
+local lbrace = P("{")
+local rbrace = P("}")
+local lparent = P("(")
+local rparent = P(")")
+local period = S(".")
+local punctuation = S(".,:;")
+local spacer = lpegpatterns.spacer
+local whitespace = lpegpatterns.whitespace
+local newline = lpegpatterns.newline
+local anything = lpegpatterns.anything
+local endofstring = lpegpatterns.endofstring
+
+local nobrace = 1 - ( lbrace + rbrace )
+local noparent = 1 - ( lparent + rparent)
+
-- we could use a Cf Cg construct
local escape, left, right = P("\\"), P('{'), P('}')
-lpeg.patterns.balanced = P {
+lpegpatterns.balanced = P {
[1] = ((escape * (left+right)) + (1 - (left+right)) + V(2))^0,
[2] = left * V(1) * right
}
-local space = P(' ')
-local equal = P("=")
-local comma = P(",")
-local lbrace = P("{")
-local rbrace = P("}")
-local nobrace = 1 - (lbrace+rbrace)
-local nested = P { lbrace * (nobrace + V(1))^0 * rbrace }
-local spaces = space^0
-local argument = Cs((lbrace/"") * ((nobrace + nested)^0) * (rbrace/""))
-local content = (1-P(-1))^0
+local nestedbraces = P { lbrace * (nobrace + V(1))^0 * rbrace }
+local nestedparents = P { lparent * (noparent + V(1))^0 * rparent }
+local spaces = space^0
+local argument = Cs((lbrace/"") * ((nobrace + nestedbraces)^0) * (rbrace/""))
+local content = (1-endofstring)^0
-lpeg.patterns.nested = nested -- no capture
-lpeg.patterns.argument = argument -- argument after e.g. =
-lpeg.patterns.content = content -- rest after e.g =
+lpegpatterns.nestedbraces = nestedbraces -- no capture
+lpegpatterns.nestedparents = nestedparents -- no capture
+lpegpatterns.nested = nestedbraces -- no capture
+lpegpatterns.argument = argument -- argument after e.g. =
+lpegpatterns.content = content -- rest after e.g =
-local value = P(lbrace * C((nobrace + nested)^0) * rbrace) + C((nested + (1-comma))^0)
+local value = P(lbrace * C((nobrace + nestedbraces)^0) * rbrace) + C((nestedbraces + (1-comma))^0)
local key = C((1-equal-comma)^1)
local pattern_a = (space+comma)^0 * (key * equal * value + key * C(""))
@@ -54,11 +74,11 @@ local pattern_b = spaces * comma^0 * spaces * (key * ((spaces * equal * spaces *
-- "a=1, b=2, c=3, d={a{b,c}d}, e=12345, f=xx{a{b,c}d}xx, g={}" : outer {} removes, leading spaces ignored
-local hash = { }
+-- todo: rewrite to fold etc
+--
+-- parse = lpeg.Cf(lpeg.Carg(1) * lpeg.Cg(key * equal * value) * separator^0,rawset)^0 -- lpeg.match(parse,"...",1,hash)
-local function set(key,value)
- hash[key] = value
-end
+local hash = { }
local function set(key,value)
hash[key] = value
@@ -68,9 +88,9 @@ local pattern_a_s = (pattern_a/set)^1
local pattern_b_s = (pattern_b/set)^1
local pattern_c_s = (pattern_c/set)^1
-parsers.patterns.settings_to_hash_a = pattern_a_s
-parsers.patterns.settings_to_hash_b = pattern_b_s
-parsers.patterns.settings_to_hash_c = pattern_c_s
+patterns.settings_to_hash_a = pattern_a_s
+patterns.settings_to_hash_b = pattern_b_s
+patterns.settings_to_hash_c = pattern_c_s
function parsers.make_settings_to_hash_pattern(set,how)
if how == "strict" then
@@ -113,12 +133,13 @@ function parsers.settings_to_hash_strict(str,existing)
end
local separator = comma * space^0
-local value = P(lbrace * C((nobrace + nested)^0) * rbrace) + C((nested + (1-comma))^0)
-local pattern = Ct(value*(separator*value)^0)
+local value = P(lbrace * C((nobrace + nestedbraces)^0) * rbrace)
+ + C((nestedbraces + (1-comma))^0)
+local pattern = spaces * Ct(value*(separator*value)^0)
-- "aap, {noot}, mies" : outer {} removes, leading spaces ignored
-parsers.patterns.settings_to_array = pattern
+patterns.settings_to_array = pattern
-- we could use a weak table as cache
@@ -208,7 +229,49 @@ function parsers.simple_hash_to_string(h, separator)
return concat(t,separator or ",")
end
-local value = lbrace * C((nobrace + nested)^0) * rbrace
+-- for chem (currently one level)
+
+local value = P(lbrace * C((nobrace + nestedbraces)^0) * rbrace)
+ + C(digit^1 * lparent * (noparent + nestedparents)^1 * rparent)
+ + C((nestedbraces + (1-comma))^1)
+local pattern_a = spaces * Ct(value*(separator*value)^0)
+
+local function repeater(n,str)
+ if not n then
+ return str
+ else
+ local s = lpegmatch(pattern_a,str)
+ if n == 1 then
+ return unpack(s)
+ else
+ local t, tn = { }, 0
+ for i=1,n do
+ for j=1,#s do
+ tn = tn + 1
+ t[tn] = s[j]
+ end
+ end
+ return unpack(t)
+ end
+ end
+end
+
+local value = P(lbrace * C((nobrace + nestedbraces)^0) * rbrace)
+ + (C(digit^1)/tonumber * lparent * Cs((noparent + nestedparents)^1) * rparent) / repeater
+ + C((nestedbraces + (1-comma))^1)
+local pattern_b = spaces * Ct(value*(separator*value)^0)
+
+function parsers.settings_to_array_with_repeat(str,expand) -- beware: "" => { }
+ if expand then
+ return lpegmatch(pattern_b,str) or { }
+ else
+ return lpegmatch(pattern_a,str) or { }
+ end
+end
+
+--
+
+local value = lbrace * C((nobrace + nestedbraces)^0) * rbrace
local pattern = Ct((space + value)^0)
function parsers.arguments_to_table(str)
@@ -237,3 +300,263 @@ end
function parsers.listitem(str)
return gmatch(str,"[^, ]+")
end
+
+--
+
+local pattern = Cs { "start",
+ start = V("one") + V("two") + V("three"),
+ rest = (Cc(",") * V("thousand"))^0 * (P(".") + endofstring) * anything^0,
+ thousand = digit * digit * digit,
+ one = digit * V("rest"),
+ two = digit * digit * V("rest"),
+ three = V("thousand") * V("rest"),
+}
+
+lpegpatterns.splitthousands = pattern -- maybe better in the parsers namespace ?
+
+function parsers.splitthousands(str)
+ return lpegmatch(pattern,str) or str
+end
+
+-- print(parsers.splitthousands("11111111111.11"))
+
+local optionalwhitespace = whitespace^0
+
+lpegpatterns.words = Ct((Cs((1-punctuation-whitespace)^1) + anything)^1)
+lpegpatterns.sentences = Ct((optionalwhitespace * Cs((1-period)^0 * period))^1)
+lpegpatterns.paragraphs = Ct((optionalwhitespace * Cs((whitespace^1*endofstring/"" + 1 - (spacer^0*newline*newline))^1))^1)
+
+-- local str = " Word1 word2. \n Word3 word4. \n\n Word5 word6.\n "
+-- inspect(lpegmatch(lpegpatterns.paragraphs,str))
+-- inspect(lpegmatch(lpegpatterns.sentences,str))
+-- inspect(lpegmatch(lpegpatterns.words,str))
+
+-- handy for k="v" [, ] k="v"
+
+local dquote = P('"')
+local equal = P('=')
+local escape = P('\\')
+local separator = S(' ,')
+
+local key = C((1-equal)^1)
+local value = dquote * C((1-dquote-escape*dquote)^0) * dquote
+
+local pattern = Cf(Ct("") * Cg(key * equal * value) * separator^0,rawset)^0 * P(-1)
+
+patterns.keq_to_hash_c = pattern
+
+function parsers.keq_to_hash(str)
+ if str and str ~= "" then
+ return lpegmatch(pattern,str)
+ else
+ return { }
+ end
+end
+
+-- inspect(lpeg.match(pattern,[[key="value"]]))
+
+local defaultspecification = { separator = ",", quote = '"' }
+
+-- this version accepts multiple separators and quotes as used in the
+-- database module
+
+function parsers.csvsplitter(specification)
+ specification = specification and table.setmetatableindex(specification,defaultspecification) or defaultspecification
+ local separator = specification.separator
+ local quotechar = specification.quote
+ local separator = S(separator ~= "" and separator or ",")
+ local whatever = C((1 - separator - newline)^0)
+ if quotechar and quotechar ~= "" then
+ local quotedata = nil
+ for chr in gmatch(quotechar,".") do
+ local quotechar = P(chr)
+ local quoteword = quotechar * C((1 - quotechar)^0) * quotechar
+ if quotedata then
+ quotedata = quotedata + quoteword
+ else
+ quotedata = quoteword
+ end
+ end
+ whatever = quotedata + whatever
+ end
+ local parser = Ct((Ct(whatever * (separator * whatever)^0) * S("\n\r"))^0 )
+ return function(data)
+ return lpegmatch(parser,data)
+ end
+end
+
+-- and this is a slightly patched version of a version posted by Philipp Gesang
+
+-- local mycsvsplitter = utilities.parsers.rfc4180splitter()
+--
+-- local crap = [[
+-- first,second,third,fourth
+-- "1","2","3","4"
+-- "a","b","c","d"
+-- "foo","bar""baz","boogie","xyzzy"
+-- ]]
+--
+-- local list, names = mycsvsplitter(crap,true) inspect(list) inspect(names)
+-- local list, names = mycsvsplitter(crap) inspect(list) inspect(names)
+
+function parsers.rfc4180splitter(specification)
+ specification = specification and table.setmetatableindex(specification,defaultspecification) or defaultspecification
+ local separator = specification.separator --> rfc: COMMA
+ local quotechar = P(specification.quote) --> DQUOTE
+ local dquotechar = quotechar * quotechar --> 2DQUOTE
+ / specification.quote
+ local separator = S(separator ~= "" and separator or ",")
+ local escaped = quotechar
+ * Cs((dquotechar + (1 - quotechar))^0)
+ * quotechar
+ local non_escaped = C((1 - quotechar - newline - separator)^1)
+ local field = escaped + non_escaped
+ local record = Ct((field * separator^-1)^1)
+ local headerline = record * Cp()
+ local wholeblob = Ct((newline^-1 * record)^0)
+ return function(data,getheader)
+ if getheader then
+ local header, position = lpegmatch(headerline,data)
+ local data = lpegmatch(wholeblob,data,position)
+ return data, header
+ else
+ return lpegmatch(wholeblob,data)
+ end
+ end
+end
+
+-- utilities.parsers.stepper("1,7-",9,function(i) print(">>>",i) end)
+-- utilities.parsers.stepper("1-3,7,8,9")
+-- utilities.parsers.stepper("1-3,6,7",function(i) print(">>>",i) end)
+-- utilities.parsers.stepper(" 1 : 3, ,7 ")
+-- utilities.parsers.stepper("1:4,9:13,24:*",30)
+
+local function ranger(first,last,n,action)
+ if not first then
+ -- forget about it
+ elseif last == true then
+ for i=first,n or first do
+ action(i)
+ end
+ elseif last then
+ for i=first,last do
+ action(i)
+ end
+ else
+ action(first)
+ end
+end
+
+local cardinal = lpegpatterns.cardinal / tonumber
+local spacers = lpegpatterns.spacer^0
+local endofstring = lpegpatterns.endofstring
+
+local stepper = spacers * ( C(cardinal) * ( spacers * S(":-") * spacers * ( C(cardinal) + Cc(true) ) + Cc(false) )
+ * Carg(1) * Carg(2) / ranger * S(", ")^0 )^1
+
+local stepper = spacers * ( C(cardinal) * ( spacers * S(":-") * spacers * ( C(cardinal) + (P("*") + endofstring) * Cc(true) ) + Cc(false) )
+ * Carg(1) * Carg(2) / ranger * S(", ")^0 )^1 * endofstring -- we're sort of strict (could do without endofstring)
+
+function parsers.stepper(str,n,action)
+ if type(n) == "function" then
+ lpegmatch(stepper,str,1,false,n or print)
+ else
+ lpegmatch(stepper,str,1,n,action or print)
+ end
+end
+
+--
+
+local pattern_math = Cs((P("%")/"\\percent " + P("^") * Cc("{") * lpegpatterns.integer * Cc("}") + P(1))^0)
+local pattern_text = Cs((P("%")/"\\percent " + (P("^")/"\\high") * Cc("{") * lpegpatterns.integer * Cc("}") + P(1))^0)
+
+patterns.unittotex = pattern
+
+function parsers.unittotex(str,textmode)
+ return lpegmatch(textmode and pattern_text or pattern_math,str)
+end
+
+local pattern = Cs((P("^") / "<sup>" * lpegpatterns.integer * Cc("</sup>") + P(1))^0)
+
+function parsers.unittoxml(str)
+ return lpegmatch(pattern,str)
+end
+
+-- print(utilities.parsers.unittotex("10^-32 %"),utilities.parsers.unittoxml("10^32 %"))
+
+local cache = { }
+local spaces = lpeg.patterns.space^0
+local dummy = function() end
+
+table.setmetatableindex(cache,function(t,k)
+ local separator = P(k)
+ local value = (1-separator)^0
+ local pattern = spaces * C(value) * separator^0 * Cp()
+ t[k] = pattern
+ return pattern
+end)
+
+local commalistiterator = cache[","]
+
+function utilities.parsers.iterator(str,separator)
+ local n = #str
+ if n == 0 then
+ return dummy
+ else
+ local pattern = separator and cache[separator] or commalistiterator
+ local p = 1
+ return function()
+ if p <= n then
+ local s, e = lpegmatch(pattern,str,p)
+ if e then
+ p = e
+ return s
+ end
+ end
+ end
+ end
+end
+
+-- for s in utilities.parsers.iterator("a b c,b,c") do
+-- print(s)
+-- end
+
+local function initialize(t,name)
+ local source = t[name]
+ if source then
+ local result = { }
+ for k, v in next, t[name] do
+ result[k] = v
+ end
+ return result
+ else
+ return { }
+ end
+end
+
+local function fetch(t,name)
+ return t[name] or { }
+end
+
+function process(result,more)
+ for k, v in next, more do
+ result[k] = v
+ end
+ return result
+end
+
+local name = C((1-S(", "))^1)
+local parser = (Carg(1) * name / initialize) * (S(", ")^1 * (Carg(1) * name / fetch))^0
+local merge = Cf(parser,process)
+
+function utilities.parsers.mergehashes(hash,list)
+ return lpegmatch(merge,list,1,hash)
+end
+
+-- local t = {
+-- aa = { alpha = 1, beta = 2, gamma = 3, },
+-- bb = { alpha = 4, beta = 5, delta = 6, },
+-- cc = { epsilon = 3 },
+-- }
+--
+-- inspect(utilities.parsers.mergehashes(t,"aa, bb, cc"))
diff --git a/Master/texmf-dist/tex/context/base/util-ran.lua b/Master/texmf-dist/tex/context/base/util-ran.lua
new file mode 100644
index 00000000000..50d0a7082ac
--- /dev/null
+++ b/Master/texmf-dist/tex/context/base/util-ran.lua
@@ -0,0 +1,107 @@
+if not modules then modules = { } end modules ['util-ran'] = {
+ version = 1.001,
+ comment = "companion to luat-lib.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+local random = math.random
+local concat = table.concat
+local sub, upper = string.sub, string.upper
+
+local randomizers = utilities.randomizers or { }
+utilities.randomizers = randomizers
+
+local l_one = "bcdfghjklmnpqrstvwxz"
+local l_two = "aeiouy"
+
+local u_one = upper(l_one)
+local u_two = upper(l_two)
+
+local n_one = #l_one
+local n_two = #l_two
+
+function randomizers.word(min,max,separator)
+ local t = { }
+ for i=1,random(min,max) do
+ if i % 2 == 0 then
+ local r = random(1,n_one)
+ t[i] = sub(l_one,r,r)
+ else
+ local r = random(1,n_two)
+ t[i] = sub(l_two,r,r)
+ end
+ end
+ return concat(t,separator)
+end
+
+function randomizers.initials(min,max)
+ if not min then
+ if not max then
+ min, max = 1, 3
+ else
+ min, max = 1, min
+ end
+ elseif not max then
+ max = min
+ end
+ local t = { }
+ local n = random(min or 1,max or 3)
+ local m = 0
+ for i=1,n do
+ m = m + 1
+ if i % 2 == 0 then
+ local r = random(1,n_one)
+ t[m] = sub(u_one,r,r)
+ else
+ local r = random(1,n_two)
+ t[m] = sub(u_two,r,r)
+ end
+ m = m + 1
+ t[m] = "."
+ end
+ return concat(t)
+end
+
+function randomizers.firstname(min,max)
+ if not min then
+ if not max then
+ min, max = 3, 10
+ else
+ min, max = 1, min
+ end
+ elseif not max then
+ max = min
+ end
+ local t = { }
+ local n = random(min,max)
+ local b = true
+ if n % 2 == 0 then
+ local r = random(1,n_two)
+ t[1] = sub(u_two,r,r)
+ b = true
+ else
+ local r = random(1,n_one)
+ t[1] = sub(u_one,r,r)
+ b = false
+ end
+ for i=2,n do
+ if b then
+ local r = random(1,n_one)
+ t[i] = sub(l_one,r,r)
+ b = false
+ else
+ local r = random(1,n_two)
+ t[i] = sub(l_two,r,r)
+ b = true
+ end
+ end
+ return concat(t,separator)
+end
+
+randomizers.surname = randomizers.firstname
+
+-- for i=1,10 do
+-- print(randomizers.initials(1,3),randomizers.firstname(5,10),randomizers.surname(5,15))
+-- end
diff --git a/Master/texmf-dist/tex/context/base/util-seq.lua b/Master/texmf-dist/tex/context/base/util-seq.lua
index c3361b7be9e..27f95f0eeed 100644
--- a/Master/texmf-dist/tex/context/base/util-seq.lua
+++ b/Master/texmf-dist/tex/context/base/util-seq.lua
@@ -18,7 +18,7 @@ use locals to refer to them when compiling the chain.</p>
-- todo: protect groups (as in tasks)
local format, gsub, concat, gmatch = string.format, string.gsub, table.concat, string.gmatch
-local type, loadstring = type, loadstring
+local type, load = type, load
utilities = utilities or { }
local tables = utilities.tables
@@ -235,6 +235,7 @@ local function construct(t)
t.compiled = format("%s\nreturn function(%s)\n%s\nend",variables,arguments,calls)
end
end
+-- print(t.compiled)
return t.compiled -- also stored so that we can trace
end
@@ -257,7 +258,7 @@ compile = function(t,compiler,n) -- already referred to in sequencers.new
if compiled == "" then
runner = false
else
- runner = compiled and loadstring(compiled)()
+ runner = compiled and load(compiled)() -- we can use loadstripped here
end
t.runner = runner
return runner
@@ -273,7 +274,7 @@ sequencers.compile = compile
-- todo: use sequencer (can have arguments and returnvalues etc now)
-local template = [[
+local template_yes = [[
%s
return function(head%s)
local ok, done = false, false
@@ -281,6 +282,11 @@ return function(head%s)
return head, done
end]]
+local template_nop = [[
+return function()
+ return false, false
+end]]
+
function sequencers.nodeprocessor(t,nofarguments) -- todo: handle 'kind' in plug into tostring
local list, order, kind, gskip, askip = t.list, t.order, t.kind, t.gskip, t.askip
local vars, calls, args, n = { }, { }, nil, 0
@@ -319,7 +325,6 @@ function sequencers.nodeprocessor(t,nofarguments) -- todo: handle 'kind' in plug
end
end
end
- local processor = format(template,concat(vars,"\n"),args,concat(calls,"\n"))
- -- print(processor)
+ local processor = #calls > 0 and format(template_yes,concat(vars,"\n"),args,concat(calls,"\n")) or template_nop
return processor
end
diff --git a/Master/texmf-dist/tex/context/base/util-sql-imp-client.lua b/Master/texmf-dist/tex/context/base/util-sql-imp-client.lua
new file mode 100644
index 00000000000..e09dfde945a
--- /dev/null
+++ b/Master/texmf-dist/tex/context/base/util-sql-imp-client.lua
@@ -0,0 +1,256 @@
+if not modules then modules = { } end modules ['util-sql-client'] = {
+ version = 1.001,
+ comment = "companion to util-sql.lua",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+-- todo: make a converter
+
+local rawset, setmetatable = rawset, setmetatable
+local P, S, V, C, Cs, Ct, Cc, Cg, Cf, patterns, lpegmatch = lpeg.P, lpeg.S, lpeg.V, lpeg.C, lpeg.Cs, lpeg.Ct, lpeg.Cc, lpeg.Cg, lpeg.Cf, lpeg.patterns, lpeg.match
+
+local trace_sql = false trackers.register("sql.trace", function(v) trace_sql = v end)
+local trace_queries = false trackers.register("sql.queries",function(v) trace_queries = v end)
+local report_state = logs.reporter("sql","client")
+
+local sql = utilities.sql
+local helpers = sql.helpers
+local methods = sql.methods
+local validspecification = helpers.validspecification
+local preparetemplate = helpers.preparetemplate
+local splitdata = helpers.splitdata
+local replacetemplate = utilities.templates.replace
+local serialize = sql.serialize
+local deserialize = sql.deserialize
+
+-- Experiments with an p/action demonstrated that there is not much gain. We could do a runtime
+-- capture but creating all the small tables is not faster and it doesn't work well anyway.
+
+local separator = P("\t")
+local newline = patterns.newline
+local empty = Cc("")
+
+local entry = C((1-separator-newline)^0) -- C 10% faster than Cs
+
+local unescaped = P("\\n") / "\n"
+ + P("\\t") / "\t"
+ + P("\\0") / "\000"
+ + P("\\\\") / "\\"
+
+local entry = Cs((unescaped + (1-separator-newline))^0) -- C 10% faster than Cs but Cs needed due to nesting
+
+local getfirst = Ct( entry * (separator * (entry+empty))^0) + newline
+local skipfirst = (1-newline)^1 * newline
+local getfirstline = C((1-newline)^0)
+
+local cache = { }
+
+local function splitdata(data) -- todo: hash on first line ... maybe move to client module
+ if data == "" then
+ if trace_sql then
+ report_state("no data")
+ end
+ return { }, { }
+ end
+ local first = lpegmatch(getfirstline,data)
+ if not first then
+ if trace_sql then
+ report_state("no data")
+ end
+ return { }, { }
+ end
+ local p = cache[first]
+ if p then
+ -- report_state("reusing: %s",first)
+ local entries = lpegmatch(p.parser,data)
+ return entries or { }, p.keys
+ elseif p == false then
+ return { }, { }
+ elseif p == nil then
+ local keys = lpegmatch(getfirst,first) or { }
+ if #keys == 0 then
+ if trace_sql then
+ report_state("no banner")
+ end
+ cache[first] = false
+ return { }, { }
+ end
+ -- quite generic, could be a helper
+ local n = #keys
+ if n == 0 then
+ report_state("no fields")
+ cache[first] = false
+ return { }, { }
+ end
+ if n == 1 then
+ local key = keys[1]
+ if trace_sql then
+ report_state("one field with name %a",key)
+ end
+ p = Cg(Cc(key) * entry)
+ else
+ for i=1,n do
+ local key = keys[i]
+ if trace_sql then
+ report_state("field %s has name %a",i,key)
+ end
+ local s = Cg(Cc(key) * entry)
+ if p then
+ p = p * separator * s
+ else
+ p = s
+ end
+ end
+ end
+ p = Cf(Ct("") * p,rawset) * newline^1
+ p = skipfirst * Ct(p^0)
+ cache[first] = { parser = p, keys = keys }
+ local entries = lpegmatch(p,data)
+ return entries or { }, keys
+ end
+end
+
+local splitter = skipfirst * Ct((Ct(entry * (separator * entry)^0) * newline^1)^0)
+
+local function getdata(data)
+ return lpegmatch(splitter,data)
+end
+
+helpers.splitdata = splitdata
+helpers.getdata = getdata
+
+local function dataprepared(specification)
+ local query = preparetemplate(specification)
+ if query then
+ io.savedata(specification.queryfile,query)
+ os.remove(specification.resultfile)
+ if trace_queries then
+ report_state("query: %s",query)
+ end
+ return true
+ else
+ -- maybe push an error
+ os.remove(specification.queryfile)
+ os.remove(specification.resultfile)
+ end
+end
+
+local function datafetched(specification,runner)
+ local command = replacetemplate(runner,specification)
+ if trace_sql then
+ local t = osclock()
+ report_state("command: %s",command)
+ local okay = os.execute(command)
+ report_state("fetchtime: %.3f sec",osclock()-t) -- not okay under linux
+ return okay == 0
+ else
+ return os.execute(command) == 0
+ end
+end
+
+local function dataloaded(specification)
+ if trace_sql then
+ local t = osclock()
+ local data = io.loaddata(specification.resultfile) or ""
+ report_state("datasize: %.3f MB",#data/1024/1024)
+ report_state("loadtime: %.3f sec",osclock()-t)
+ return data
+ else
+ return io.loaddata(specification.resultfile) or ""
+ end
+end
+
+local function dataconverted(data,converter)
+ if converter then
+ local data = getdata(data)
+ if data then
+ data = converter.client(data)
+ end
+ return data
+ elseif trace_sql then
+ local t = osclock()
+ local data, keys = splitdata(data,target)
+ report_state("converttime: %.3f",osclock()-t)
+ report_state("keys: %s ",#keys)
+ report_state("entries: %s ",#data)
+ return data, keys
+ else
+ return splitdata(data)
+ end
+end
+
+-- todo: new, etc
+
+local function execute(specification)
+ if trace_sql then
+ report_state("executing client")
+ end
+ if not validspecification(specification) then
+ report_state("error in specification")
+ return
+ end
+ if not dataprepared(specification) then
+ report_state("error in preparation")
+ return
+ end
+ if not datafetched(specification,methods.client.runner) then
+ report_state("error in fetching, query: %s",string.collapsespaces(io.loaddata(specification.queryfile)))
+ return
+ end
+ local data = dataloaded(specification)
+ if not data then
+ report_state("error in loading")
+ return
+ end
+ local data, keys = dataconverted(data,specification.converter)
+ if not data then
+ report_state("error in converting or no data")
+ return
+ end
+ local one = data[1]
+ if one then
+ setmetatable(data,{ __index = one } )
+ end
+ return data, keys
+end
+
+-- The following is not that (memory) efficient but normally we will use
+-- the lib anyway. Of course we could make a dedicated converter and/or
+-- hook into the splitter code but ... it makes not much sense because then
+-- we can as well move the builder to the library modules.
+--
+-- Here we reuse data as the indexes are the same, unless we hash.
+
+local wraptemplate = [[
+local converters = utilities.sql.converters
+local deserialize = utilities.sql.deserialize
+
+local tostring = tostring
+local tonumber = tonumber
+local booleanstring = string.booleanstring
+
+%s
+
+return function(data)
+ local target = %s -- data or { }
+ for i=1,#data do
+ local cells = data[i]
+ target[%s] = {
+ %s
+ }
+ end
+ return target
+end
+]]
+
+local celltemplate = "cells[%s]"
+
+methods.client = {
+ runner = [[mysql --batch --user="%username%" --password="%password%" --host="%host%" --port=%port% --database="%database%" --default-character-set=utf8 < "%queryfile%" > "%resultfile%"]],
+ execute = execute,
+ usesfiles = true,
+ wraptemplate = wraptemplate,
+ celltemplate = celltemplate,
+}
diff --git a/Master/texmf-dist/tex/context/base/util-sql-imp-library.lua b/Master/texmf-dist/tex/context/base/util-sql-imp-library.lua
new file mode 100644
index 00000000000..15754e26ab1
--- /dev/null
+++ b/Master/texmf-dist/tex/context/base/util-sql-imp-library.lua
@@ -0,0 +1,289 @@
+if not modules then modules = { } end modules ['util-sql-library'] = {
+ version = 1.001,
+ comment = "companion to util-sql.lua",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+-- local function pcall(f,...) return true, f(...) end
+
+-- For some reason the sql lib partially fails in luatex when creating hashed row. So far
+-- we couldn't figure it out (some issue with adapting the table that is passes as first
+-- argument in the fetch routine. Apart from this it looks like the mysql binding has some
+-- efficiency issues (like creating a keys and types table for each row) but that could be
+-- optimized. Anyhow, fecthing results can be done as follows:
+
+-- local function collect_1(r)
+-- local t = { }
+-- for i=1,r:numrows() do
+-- t[#t+1] = r:fetch({},"a")
+-- end
+-- return t
+-- end
+--
+-- local function collect_2(r)
+-- local keys = r:getcolnames()
+-- local n = #keys
+-- local t = { }
+-- for i=1,r:numrows() do
+-- local v = { r:fetch() }
+-- local r = { }
+-- for i=1,n do
+-- r[keys[i]] = v[i]
+-- end
+-- t[#t+1] = r
+-- end
+-- return t
+-- end
+--
+-- local function collect_3(r)
+-- local keys = r:getcolnames()
+-- local n = #keys
+-- local t = { }
+-- for i=1,r:numrows() do
+-- local v = r:fetch({},"n")
+-- local r = { }
+-- for i=1,n do
+-- r[keys[i]] = v[i]
+-- end
+-- t[#t+1] = r
+-- end
+-- return t
+-- end
+--
+-- On a large table with some 8 columns (mixed text and numbers) we get the following
+-- timings (the 'a' alternative is already using the more efficient variant in the
+-- binding).
+--
+-- collect_1 : 1.31
+-- collect_2 : 1.39
+-- collect_3 : 1.75
+--
+-- Some, as a workaround for this 'bug' the second alternative can be used.
+
+local format = string.format
+local lpegmatch = lpeg.match
+local setmetatable, type = setmetatable, type
+
+local trace_sql = false trackers.register("sql.trace", function(v) trace_sql = v end)
+local trace_queries = false trackers.register("sql.queries",function(v) trace_queries = v end)
+local report_state = logs.reporter("sql","library")
+
+local sql = utilities.sql
+local mysql = require("luasql.mysql")
+local cache = { }
+local helpers = sql.helpers
+local methods = sql.methods
+local validspecification = helpers.validspecification
+local querysplitter = helpers.querysplitter
+local dataprepared = helpers.preparetemplate
+local serialize = sql.serialize
+local deserialize = sql.deserialize
+local formatters = string.formatters
+
+local initialize = mysql.mysql
+
+local function connect(session,specification)
+ return session:connect(
+ specification.database or "",
+ specification.username or "",
+ specification.password or "",
+ specification.host or "",
+ specification.port
+ )
+end
+
+local function fetched(specification,query,converter)
+ if not query or query == "" then
+ report_state("no valid query")
+ return false
+ end
+ local id = specification.id
+ local session, connection
+ if id then
+ local c = cache[id]
+ if c then
+ session = c.session
+ connection = c.connection
+ end
+ if not connection then
+ session = initialize()
+ if not session then
+ return formatters["no session for %a"](id)
+ end
+ connection = connect(session,specification)
+ if not connection then
+ return formatters["no connection for %a"](id)
+ end
+ cache[id] = { session = session, connection = connection }
+ end
+ else
+ session = initialize()
+ if not session then
+ return "no session"
+ end
+ connection = connect(session,specification)
+ if not connection then
+ return "no connection"
+ end
+ end
+ if not connection then
+ report_state("error in connection: %s@%s to %s:%s",
+ specification.database or "no database",
+ specification.username or "no username",
+ specification.host or "no host",
+ specification.port or "no port"
+ )
+ return "no connection"
+ end
+ query = lpegmatch(querysplitter,query)
+ local result, okay
+ for i=1,#query do
+ local q = query[i]
+ local r, m = connection:execute(q)
+ if m then
+ report_state("error in query to host %a: %s",specification.host,string.collapsespaces(q))
+ if m then
+ report_state("message: %s",m)
+ end
+ end
+ local t = type(r)
+ if t == "userdata" then
+ result = r
+ okay = true
+ elseif t == "number" then
+ okay = true
+ end
+ end
+ if not okay then -- can go
+ if session then
+ session:close()
+ end
+ if connection then
+ connection:close()
+ end
+ if id then
+ cache[id] = nil
+ end
+ return "execution error"
+ end
+ local data, keys
+ if result then
+ if converter then
+ data = converter.library(result)
+ else
+ keys = result:getcolnames()
+ if keys then
+ data = { }
+ local n = result:numrows() or 0
+ if n > 0 then
+ local k = #keys
+ for i=1,n do
+ local v = { result:fetch() }
+ local d = { }
+ for i=1,k do
+ d[keys[i]] = v[i]
+ end
+ data[#data+1] = d
+ end
+ end
+ end
+ end
+ result:close()
+ end
+ if not id then
+ if connection then
+ connection:close()
+ end
+ if session then
+ session:close()
+ end
+ end
+ return false, data, keys
+end
+
+local function datafetched(specification,query,converter)
+ local callokay, connectionerror, data, keys = pcall(fetched,specification,query,converter)
+ if not callokay then
+ report_state("call error, retrying")
+ callokay, connectionerror, data, keys = pcall(fetched,specification,query,converter)
+ elseif connectionerror then
+ report_state("error: %s, retrying",connectionerror)
+ callokay, connectionerror, data, keys = pcall(fetched,specification,query,converter)
+ end
+ if not callokay then
+ report_state("persistent call error")
+ elseif connectionerror then
+ report_state("persistent error: %s",connectionerror)
+ end
+ return data or { }, keys or { }
+end
+
+local function execute(specification)
+ if trace_sql then
+ report_state("executing library")
+ end
+ if not validspecification(specification) then
+ report_state("error in specification")
+ return
+ end
+ local query = dataprepared(specification)
+ if not query then
+ report_state("error in preparation")
+ return
+ end
+ local data, keys = datafetched(specification,query,specification.converter)
+ if not data then
+ report_state("error in fetching")
+ return
+ end
+ local one = data[1]
+ if one then
+ setmetatable(data,{ __index = one } )
+ end
+ return data, keys
+end
+
+-- Here we build the dataset stepwise so we don't use the data hack that
+-- is used in the client variant.
+
+local wraptemplate = [[
+local converters = utilities.sql.converters
+local deserialize = utilities.sql.deserialize
+
+local tostring = tostring
+local tonumber = tonumber
+local booleanstring = string.booleanstring
+
+%s
+
+return function(result)
+ if not result then
+ return { }
+ end
+ local nofrows = result:numrows() or 0
+ if nofrows == 0 then
+ return { }
+ end
+ local target = { } -- no %s needed here
+ for i=1,nofrows do
+ local cells = { result:fetch() }
+ target[%s] = {
+ %s
+ }
+ end
+ return target
+end
+]]
+
+local celltemplate = "cells[%s]"
+
+methods.library = {
+ runner = function() end, -- never called
+ execute = execute,
+ initialize = initialize, -- returns session
+ usesfiles = false,
+ wraptemplate = wraptemplate,
+ celltemplate = celltemplate,
+}
diff --git a/Master/texmf-dist/tex/context/base/util-sql-imp-swiglib.lua b/Master/texmf-dist/tex/context/base/util-sql-imp-swiglib.lua
new file mode 100644
index 00000000000..9a22477a9b1
--- /dev/null
+++ b/Master/texmf-dist/tex/context/base/util-sql-imp-swiglib.lua
@@ -0,0 +1,490 @@
+if not modules then modules = { } end modules ['util-sql-swiglib'] = {
+ version = 1.001,
+ comment = "companion to util-sql.lua",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+-- As the regular library is flawed (i.e. there are crashes in the table
+-- construction code) and also not that efficient, Luigi Scarso looked into
+-- a swig binding. This is a bit more low level approach but as we stay
+-- closer to the original library it's also less dependant.
+
+local concat = table.concat
+local format = string.format
+local lpegmatch = lpeg.match
+local setmetatable, type = setmetatable, type
+local sleep = os.sleep
+
+local trace_sql = false trackers.register("sql.trace", function(v) trace_sql = v end)
+local trace_queries = false trackers.register("sql.queries",function(v) trace_queries = v end)
+local report_state = logs.reporter("sql","swiglib")
+
+local sql = utilities.sql
+local mysql = require("swiglib.mysql.core") -- "5.6"
+
+-- inspect(table.sortedkeys(mysql))
+
+local nofretries = 5
+local retrydelay = 1
+
+local cache = { }
+local helpers = sql.helpers
+local methods = sql.methods
+local validspecification = helpers.validspecification
+local querysplitter = helpers.querysplitter
+local dataprepared = helpers.preparetemplate
+local serialize = sql.serialize
+local deserialize = sql.deserialize
+
+local mysql_initialize = mysql.mysql_init
+
+local mysql_open_connection = mysql.mysql_real_connect
+local mysql_execute_query = mysql.mysql_real_query
+local mysql_close_connection = mysql.mysql_close
+
+local mysql_field_seek = mysql.mysql_field_seek
+local mysql_num_fields = mysql.mysql_num_fields
+local mysql_fetch_field = mysql.mysql_fetch_field
+local mysql_num_rows = mysql.mysql_num_rows
+local mysql_fetch_row = mysql.mysql_fetch_row
+local mysql_fetch_lengths = mysql.mysql_fetch_lengths
+local mysql_init = mysql.mysql_init
+local mysql_store_result = mysql.mysql_store_result
+local mysql_free_result = mysql.mysql_free_result
+local mysql_use_result = mysql.mysql_use_result
+
+local mysql_error_message = mysql.mysql_error
+local mysql_options_argument = mysql.mysql_options_argument
+
+local instance = mysql.MYSQL()
+
+local mysql_constant_false = false
+local mysql_constant_true = true
+
+-- if mysql_options_argument then
+--
+-- mysql_constant_false = mysql_options_argument(false) -- 0 "\0"
+-- mysql_constant_true = mysql_options_argument(true) -- 1 "\1"
+--
+-- -- print(swig_type(mysql_constant_false))
+-- -- print(swig_type(mysql_constant_true))
+--
+-- mysql.mysql_options(instance,mysql.MYSQL_OPT_RECONNECT,mysql_constant_true);
+--
+-- else
+--
+-- print("")
+-- print("incomplete swiglib.mysql interface")
+-- print("")
+--
+-- end
+
+local typemap = mysql.MYSQL_TYPE_VAR_STRING and {
+ [mysql.MYSQL_TYPE_VAR_STRING ] = "string",
+ [mysql.MYSQL_TYPE_STRING ] = "string",
+ [mysql.MYSQL_TYPE_DECIMAL ] = "number",
+ [mysql.MYSQL_TYPE_SHORT ] = "number",
+ [mysql.MYSQL_TYPE_LONG ] = "number",
+ [mysql.MYSQL_TYPE_FLOAT ] = "number",
+ [mysql.MYSQL_TYPE_DOUBLE ] = "number",
+ [mysql.MYSQL_TYPE_LONGLONG ] = "number",
+ [mysql.MYSQL_TYPE_INT24 ] = "number",
+ [mysql.MYSQL_TYPE_YEAR ] = "number",
+ [mysql.MYSQL_TYPE_TINY ] = "number",
+ [mysql.MYSQL_TYPE_TINY_BLOB ] = "binary",
+ [mysql.MYSQL_TYPE_MEDIUM_BLOB] = "binary",
+ [mysql.MYSQL_TYPE_LONG_BLOB ] = "binary",
+ [mysql.MYSQL_TYPE_BLOB ] = "binary",
+ [mysql.MYSQL_TYPE_DATE ] = "date",
+ [mysql.MYSQL_TYPE_NEWDATE ] = "date",
+ [mysql.MYSQL_TYPE_DATETIME ] = "datetime",
+ [mysql.MYSQL_TYPE_TIME ] = "time",
+ [mysql.MYSQL_TYPE_TIMESTAMP ] = "time",
+ [mysql.MYSQL_TYPE_ENUM ] = "set",
+ [mysql.MYSQL_TYPE_SET ] = "set",
+ [mysql.MYSQL_TYPE_NULL ] = "null",
+}
+
+-- real_escape_string
+
+local function finish(t)
+ mysql_free_result(t._result_)
+end
+
+-- will become metatable magic
+
+-- local function analyze(result)
+-- mysql_field_seek(result,0)
+-- local nofrows = mysql_num_rows(result) or 0
+-- local noffields = mysql_num_fields(result)
+-- local names = { }
+-- local types = { }
+-- for i=1,noffields do
+-- local field = mysql_fetch_field(result)
+-- names[i] = field.name
+-- types[i] = field.type
+-- end
+-- return names, types, noffields, nofrows
+-- end
+
+local function getcolnames(t)
+ return t.names
+end
+
+local function getcoltypes(t)
+ return t.types
+end
+
+local function numrows(t)
+ return t.nofrows
+end
+
+-- swig_type
+
+-- local ulongArray_getitem = mysql.ulongArray_getitem
+-- local util_getbytearray = mysql.util_getbytearray
+
+-- local function list(t)
+-- local result = t._result_
+-- local row = mysql_fetch_row(result)
+-- local len = mysql_fetch_lengths(result)
+-- local result = { }
+-- for i=1,t.noffields do
+-- local r = i - 1 -- zero offset
+-- result[i] = util_getbytearray(row,r,ulongArray_getitem(len,r))
+-- end
+-- return result
+-- end
+
+-- local function hash(t)
+-- local list = util_mysql_fetch_fields_from_current_row(t._result_)
+-- local result = t._result_
+-- local fields = t.names
+-- local row = mysql_fetch_row(result)
+-- local len = mysql_fetch_lengths(result)
+-- local result = { }
+-- for i=1,t.noffields do
+-- local r = i - 1 -- zero offset
+-- result[fields[i]] = util_getbytearray(row,r,ulongArray_getitem(len,r))
+-- end
+-- return result
+-- end
+
+local util_mysql_fetch_fields_from_current_row = mysql.util_mysql_fetch_fields_from_current_row
+local util_mysql_fetch_all_rows = mysql.util_mysql_fetch_all_rows
+
+local function list(t)
+ return util_mysql_fetch_fields_from_current_row(t._result_)
+end
+
+local function hash(t)
+ local list = util_mysql_fetch_fields_from_current_row(t._result_)
+ local fields = t.names
+ local data = { }
+ for i=1,t.noffields do
+ data[fields[i]] = list[i]
+ end
+ return data
+end
+
+local function wholelist(t)
+ return util_mysql_fetch_all_rows(t._result_)
+end
+
+local mt = { __index = {
+ -- regular
+ finish = finish,
+ list = list,
+ hash = hash,
+ wholelist = wholelist,
+ -- compatibility
+ numrows = numrows,
+ getcolnames = getcolnames,
+ getcoltypes = getcoltypes,
+ }
+}
+
+-- session
+
+local function close(t)
+ mysql_close_connection(t._connection_)
+end
+
+local function execute(t,query)
+ if query and query ~= "" then
+ local connection = t._connection_
+ local result = mysql_execute_query(connection,query,#query)
+ if result == 0 then
+ local result = mysql_store_result(connection)
+ mysql_field_seek(result,0)
+ local nofrows = mysql_num_rows(result) or 0
+ local noffields = mysql_num_fields(result)
+ local names = { }
+ local types = { }
+ for i=1,noffields do
+ local field = mysql_fetch_field(result)
+ names[i] = field.name
+ types[i] = field.type
+ end
+ local t = {
+ _result_ = result,
+ names = names,
+ types = types,
+ noffields = noffields,
+ nofrows = nofrows,
+ }
+ return setmetatable(t,mt)
+ end
+ end
+ return false
+end
+
+local mt = { __index = {
+ close = close,
+ execute = execute,
+ }
+}
+
+local function open(t,database,username,password,host,port)
+ local connection = mysql_open_connection(t._session_,host or "localhost",username or "",password or "",database or "",port or 0,0,0)
+ if connection then
+ local t = {
+ _connection_ = connection,
+ }
+ return setmetatable(t,mt)
+ end
+end
+
+local function message(t)
+ return mysql_error_message(t._session_)
+end
+
+local function close(t)
+ -- dummy, as we have a global session
+end
+
+local mt = {
+ __index = {
+ connect = open,
+ close = close,
+ message = message,
+ }
+}
+
+local function initialize()
+ local session = {
+ _session_ = mysql_initialize(instance) -- maybe share, single thread anyway
+ }
+ return setmetatable(session,mt)
+end
+
+-- -- -- --
+
+local function connect(session,specification)
+ return session:connect(
+ specification.database or "",
+ specification.username or "",
+ specification.password or "",
+ specification.host or "",
+ specification.port
+ )
+end
+
+local function error_in_connection(specification,action)
+ report_state("error in connection: [%s] %s@%s to %s:%s",
+ action or "unknown",
+ specification.database or "no database",
+ specification.username or "no username",
+ specification.host or "no host",
+ specification.port or "no port"
+ )
+end
+
+local function datafetched(specification,query,converter)
+ if not query or query == "" then
+ report_state("no valid query")
+ return { }, { }
+ end
+ local id = specification.id
+ local session, connection
+ if id then
+ local c = cache[id]
+ if c then
+ session = c.session
+ connection = c.connection
+ end
+ if not connection then
+ session = initialize()
+ connection = connect(session,specification)
+ if not connection then
+ for i=1,nofretries do
+ sleep(retrydelay)
+ report_state("retrying to connect: [%s.%s] %s@%s to %s:%s",
+ id,i,
+ specification.database or "no database",
+ specification.username or "no username",
+ specification.host or "no host",
+ specification.port or "no port"
+ )
+ connection = connect(session,specification)
+ if connection then
+ break
+ end
+ end
+ end
+ if connection then
+ cache[id] = { session = session, connection = connection }
+ end
+ end
+ else
+ session = initialize()
+ connection = connect(session,specification)
+ if not connection then
+ for i=1,nofretries do
+ sleep(retrydelay)
+ report_state("retrying to connect: [%s] %s@%s to %s:%s",
+ i,
+ specification.database or "no database",
+ specification.username or "no username",
+ specification.host or "no host",
+ specification.port or "no port"
+ )
+ connection = connect(session,specification)
+ if connection then
+ break
+ end
+ end
+ end
+ end
+ if not connection then
+ report_state("error in connection: %s@%s to %s:%s",
+ specification.database or "no database",
+ specification.username or "no username",
+ specification.host or "no host",
+ specification.port or "no port"
+ )
+ return { }, { }
+ end
+ query = lpegmatch(querysplitter,query)
+ local result, message, okay
+ for i=1,#query do
+ local q = query[i]
+ local r, m = connection:execute(q)
+ if m then
+ report_state("error in query, stage: %s",string.collapsespaces(q))
+ message = message and format("%s\n%s",message,m) or m
+ end
+ if type(r) == "table" then
+ result = r
+ okay = true
+ elseif not m then
+ okay = true
+ end
+ end
+ local data, keys
+ if result then
+ if converter then
+ data = converter.swiglib(result)
+ else
+ keys = result.names
+ data = { }
+ for i=1,result.nofrows do
+ data[i] = result:hash()
+ end
+ end
+ result:finish() -- result:close()
+ elseif message then
+ report_state("message %s",message)
+ end
+ if not keys then
+ keys = { }
+ end
+ if not data then
+ data = { }
+ end
+ if not id then
+ connection:close()
+ session:close()
+ end
+ return data, keys
+end
+
+local function execute(specification)
+ if trace_sql then
+ report_state("executing library")
+ end
+ if not validspecification(specification) then
+ report_state("error in specification")
+ return
+ end
+ local query = dataprepared(specification)
+ if not query then
+ report_state("error in preparation")
+ return
+ end
+ local data, keys = datafetched(specification,query,specification.converter)
+ if not data then
+ report_state("error in fetching")
+ return
+ end
+ local one = data[1]
+ if one then
+ setmetatable(data,{ __index = one } )
+ end
+ return data, keys
+end
+
+local wraptemplate = [[
+local mysql = require("swigluamysql") -- will be stored in method
+
+----- mysql_fetch_row = mysql.mysql_fetch_row
+----- mysql_fetch_lengths = mysql.mysql_fetch_lengths
+----- util_unpackbytearray = mysql.util_unpackbytearray
+local util_mysql_fetch_fields_from_current_row
+ = mysql.util_mysql_fetch_fields_from_current_row
+
+local converters = utilities.sql.converters
+local deserialize = utilities.sql.deserialize
+
+local tostring = tostring
+local tonumber = tonumber
+local booleanstring = string.booleanstring
+
+%s
+
+return function(result)
+ if not result then
+ return { }
+ end
+ local nofrows = result.nofrows or 0
+ if nofrows == 0 then
+ return { }
+ end
+ local noffields = result.noffields or 0
+ local target = { } -- no %s needed here
+ result = result._result_
+ for i=1,nofrows do
+ -- local row = mysql_fetch_row(result)
+ -- local len = mysql_fetch_lengths(result)
+ -- local cells = util_unpackbytearray(row,noffields,len)
+ local cells = util_mysql_fetch_fields_from_current_row(result)
+ target[%s] = {
+ %s
+ }
+ end
+ return target
+end
+]]
+
+local celltemplate = "cells[%s]"
+
+methods.swiglib = {
+ runner = function() end, -- never called
+ execute = execute,
+ initialize = initialize, -- returns session
+ usesfiles = false,
+ wraptemplate = wraptemplate,
+ celltemplate = celltemplate,
+}
diff --git a/Master/texmf-dist/tex/context/base/util-sql-loggers.lua b/Master/texmf-dist/tex/context/base/util-sql-loggers.lua
new file mode 100644
index 00000000000..7fceb8032c0
--- /dev/null
+++ b/Master/texmf-dist/tex/context/base/util-sql-loggers.lua
@@ -0,0 +1,277 @@
+if not modules then modules = { } end modules ['util-sql-loggers'] = {
+ version = 1.001,
+ comment = "companion to lmx-*",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+-- This is experimental code and currently part of the base installation simply
+-- because it's easier to dirtribute this way. Eventually it will be documented
+-- and the related scripts will show up as well.
+
+local tonumber = tonumber
+local format = string.format
+local concat = table.concat
+local ostime, uuid, osfulltime = os.time, os.uuid, os.fulltime
+local random = math.random
+
+local sql = utilities.sql
+local loggers = { }
+sql.loggers = loggers
+
+local trace_sql = false trackers.register("sql.loggers.trace", function(v) trace_sql = v end)
+local report = logs.reporter("sql","loggers")
+
+loggers.newtoken = sql.tokens.new
+local makeconverter = sql.makeconverter
+
+local function checkeddb(presets,datatable)
+ return sql.usedatabase(presets,datatable or presets.datatable or "loggers")
+end
+
+loggers.usedb = checkeddb
+
+local totype = {
+ ["error"] = 1, [1] = 1, ["1"] = 1,
+ ["warning"] = 2, [2] = 2, ["2"] = 2,
+ ["debug"] = 3, [3] = 3, ["3"] = 3,
+ ["info"] = 4, [4] = 4, ["4"] = 4,
+}
+
+local fromtype = {
+ ["error"] = "error", [1] = "error", ["1"] = "error",
+ ["warning"] = "warning", [2] = "warning", ["2"] = "warning",
+ ["debug"] = "debug", [3] = "debug", ["3"] = "debug",
+ ["info"] = "info", [4] = "info", ["4"] = "info",
+}
+
+table.setmetatableindex(totype, function() return 4 end)
+table.setmetatableindex(fromtype,function() return "info" end)
+
+loggers.totype = totype
+loggers.fromtype = fromtype
+
+local template =[[
+ CREATE TABLE IF NOT EXISTS %basename% (
+ `id` int(11) NOT NULL AUTO_INCREMENT,
+ `time` int(11) NOT NULL,
+ `type` int(11) NOT NULL,
+ `action` varchar(15) NOT NULL,
+ `data` longtext,
+ PRIMARY KEY (`id`),
+ UNIQUE KEY `id_unique_key` (`id`)
+ )
+ DEFAULT CHARSET = utf8 ;
+]]
+
+function loggers.createdb(presets,datatable)
+
+ local db = checkeddb(presets,datatable)
+
+ db.execute {
+ template = template,
+ variables = {
+ basename = db.basename,
+ },
+ }
+
+ report("datatable %a created in %a",db.name,db.base)
+
+ return db
+
+end
+
+local template =[[
+ DROP TABLE IF EXISTS %basename% ;
+]]
+
+function loggers.deletedb(presets,datatable)
+
+ local db = checkeddb(presets,datatable)
+
+ db.execute {
+ template = template,
+ variables = {
+ basename = db.basename,
+ },
+ }
+
+ report("datatable %a removed in %a",db.name,db.base)
+
+end
+
+local template =[[
+ INSERT INTO %basename% (
+ `time`,
+ `type`,
+ `action`,
+ `data`
+ ) VALUES (
+ %time%,
+ %type%,
+ '%action%',
+ '%[data]%'
+ ) ;
+]]
+
+function loggers.save(db,data) -- beware, we pass type and action in the data (saves a table)
+
+ if data then
+
+ local time = ostime()
+ local kind = totype[data.type]
+ local action = data.action or "unknown"
+
+ data.type = nil
+ data.action = nil
+
+ db.execute {
+ template = template,
+ variables = {
+ basename = db.basename,
+ time = ostime(),
+ type = kind,
+ action = action,
+ data = data and db.serialize(data,"return") or "",
+ },
+ }
+
+ end
+
+end
+
+-- local template =[[
+-- REMOVE FROM
+-- %basename%
+-- WHERE
+-- `token` = '%token%' ;
+-- ]]
+--
+-- function loggers.remove(db,token)
+--
+-- db.execute {
+-- template = template,
+-- variables = {
+-- basename = db.basename,
+-- token = token,
+-- },
+-- }
+--
+-- if trace_sql then
+-- report("removed: %s",token)
+-- end
+--
+-- end
+
+local template_nop =[[
+ SELECT
+ `time`,
+ `type`,
+ `action`,
+ `data`
+ FROM
+ %basename%
+ ORDER BY
+ `time`, `type`, `action`
+ DESC LIMIT
+ %limit% ;
+]]
+
+local template_yes =[[
+ SELECT
+ `time`,
+ `type`,
+ `action`,
+ `data`
+ FROM
+ %basename%
+ %WHERE%
+ ORDER BY
+ `time`, `type`, `action`
+ DESC LIMIT
+ %limit% ;
+]]
+
+local converter = makeconverter {
+ -- { name = "time", type = os.localtime },
+ { name = "time", type = "number" },
+ { name = "type", type = fromtype },
+ { name = "action", type = "string" },
+ { name = "data", type = "deserialize" },
+}
+
+function loggers.collect(db,specification)
+
+ specification = specification or { }
+
+ local start = specification.start
+ local stop = specification.stop
+ local limit = specification.limit or 100
+ local kind = specification.type
+ local action = specification.action
+
+ local filtered = start or stop
+
+ local where = { }
+
+ if filtered then
+ local today = os.date("*t")
+
+ if type(start) ~= "table" then
+ start = { }
+ end
+ start = os.time {
+ day = start.day or today.day,
+ month = start.month or today.month,
+ year = start.year or today.year,
+ hour = start.hour or 0,
+ minute = start.minute or 0,
+ second = start.second or 0,
+ isdst = true,
+ }
+
+ if type(stop) ~= "table" then
+ stop = { }
+ end
+ stop = os.time {
+ day = stop.day or today.day,
+ month = stop.month or today.month,
+ year = stop.year or today.year,
+ hour = stop.hour or 24,
+ minute = stop.minute or 0,
+ second = stop.second or 0,
+ isdst = true,
+ }
+
+ -- report("filter: %s => %s",start,stop)
+
+ where[#where+1] = format("`time` BETWEEN %s AND %s",start,stop)
+
+ end
+
+ if kind then
+ where[#where+1] = format("`type` = %s",totype[kind])
+ end
+
+ if action then
+ where[#where+1] = format("`action` = '%s'",action)
+ end
+
+ local records = db.execute {
+ template = filtered and template_yes or template_nop,
+ converter = converter,
+ variables = {
+ basename = db.basename,
+ limit = limit,
+ WHERE = #where > 0 and format("WHERE\n%s",concat(where," AND ")) or "",
+ },
+ }
+
+ if trace_sql then
+ report("collected: %s loggers",#records)
+ end
+
+ return records, keys
+
+end
diff --git a/Master/texmf-dist/tex/context/base/util-sql-sessions.lua b/Master/texmf-dist/tex/context/base/util-sql-sessions.lua
new file mode 100644
index 00000000000..76bb91962fa
--- /dev/null
+++ b/Master/texmf-dist/tex/context/base/util-sql-sessions.lua
@@ -0,0 +1,349 @@
+if not modules then modules = { } end modules ['util-sql-sessions'] = {
+ version = 1.001,
+ comment = "companion to lmx-*",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+-- This is experimental code and currently part of the base installation simply
+-- because it's easier to dirtribute this way. Eventually it will be documented
+-- and the related scripts will show up as well.
+
+-- maybe store threshold in session (in seconds)
+
+local tonumber = tonumber
+local format = string.format
+local ostime, uuid, osfulltime = os.time, os.uuid, os.fulltime
+local random = math.random
+
+-- In older frameworks we kept a session table in memory. This time we
+-- follow a route where we store session data in a sql table. Each session
+-- has a token (similar to what we do on q2p and pod services), a data
+-- blob which is just a serialized lua table (we could consider a dump instead)
+-- and two times: the creation and last accessed time. The first one is handy
+-- for statistics and the second one for cleanup. Both are just numbers so that
+-- we don't have to waste code on conversions. Anyhow, we provide variants so that
+-- we can always choose what is best.
+
+local sql = utilities.sql
+local sessions = { }
+sql.sessions = sessions
+
+local trace_sql = false trackers.register("sql.sessions.trace", function(v) trace_sql = v end)
+local report = logs.reporter("sql","sessions")
+
+sessions.newtoken = sql.tokens.new
+
+local function checkeddb(presets,datatable)
+ return sql.usedatabase(presets,datatable or presets.datatable or "sessions")
+end
+
+sessions.usedb = checkeddb
+
+local template =[[
+ CREATE TABLE IF NOT EXISTS %basename% (
+ `token` varchar(50) NOT NULL,
+ `data` longtext NOT NULL,
+ `created` int(11) NOT NULL,
+ `accessed` int(11) NOT NULL,
+ UNIQUE KEY `token_unique_key` (`token`)
+ )
+ DEFAULT CHARSET = utf8 ;
+]]
+
+function sessions.createdb(presets,datatable)
+
+ local db = checkeddb(presets,datatable)
+
+ db.execute {
+ template = template,
+ variables = {
+ basename = db.basename,
+ },
+ }
+
+ report("datatable %a created in %a",db.name,db.base)
+
+ return db
+
+end
+
+local template =[[
+ DROP TABLE IF EXISTS %basename% ;
+]]
+
+function sessions.deletedb(presets,datatable)
+
+ local db = checkeddb(presets,datatable)
+
+ db.execute {
+ template = template,
+ variables = {
+ basename = db.basename,
+ },
+ }
+
+ report("datatable %a removed in %a",db.name,db.base)
+
+end
+
+local template =[[
+ INSERT INTO %basename% (
+ `token`,
+ `created`,
+ `accessed`,
+ `data`
+ ) VALUES (
+ '%token%',
+ %time%,
+ %time%,
+ '%[data]%'
+ ) ;
+]]
+
+function sessions.create(db,data)
+
+ local token = sessions.newtoken()
+ local time = ostime()
+
+ db.execute {
+ template = template,
+ variables = {
+ basename = db.basename,
+ token = token,
+ time = time,
+ data = db.serialize(data or { },"return")
+ },
+ }
+
+ if trace_sql then
+ report("created: %s at %s",token,osfulltime(time))
+ end
+
+ return {
+ token = token,
+ created = time,
+ accessed = time,
+ data = data,
+ }
+end
+
+local template =[[
+ UPDATE
+ %basename%
+ SET
+ `data` = '%[data]%',
+ `accessed` = %time%
+ WHERE
+ `token` = '%token%' ;
+]]
+
+function sessions.save(db,session)
+
+ local time = ostime()
+ local data = db.serialize(session.data or { },"return")
+ local token = session.token
+
+ session.accessed = time
+
+ db.execute {
+ template = template,
+ variables = {
+ basename = db.basename,
+ token = token,
+ time = ostime(),
+ data = data,
+ },
+ }
+
+ if trace_sql then
+ report("saved: %s at %s",token,osfulltime(time))
+ end
+
+ return session
+end
+
+local template = [[
+ UPDATE
+ %basename%
+ SET
+ `accessed` = %time%
+ WHERE
+ `token` = '%token%' ;
+]]
+
+function sessions.touch(db,token)
+
+ db.execute {
+ template = template,
+ variables = {
+ basename = db.basename,
+ token = token,
+ time = ostime(),
+ },
+ }
+
+end
+
+local template = [[
+ UPDATE
+ %basename%
+ SET
+ `accessed` = %time%
+ WHERE
+ `token` = '%token%' ;
+ SELECT
+ *
+ FROM
+ %basename%
+ WHERE
+ `token` = '%token%' ;
+]]
+
+function sessions.restore(db,token)
+
+ local records, keys = db.execute {
+ template = template,
+ variables = {
+ basename = db.basename,
+ token = token,
+ time = ostime(),
+ },
+ }
+
+ local record = records and records[1]
+
+ if record then
+ if trace_sql then
+ report("restored: %s",token)
+ end
+ record.data = db.deserialize(record.data or "")
+ return record, keys
+ elseif trace_sql then
+ report("unknown: %s",token)
+ end
+
+end
+
+local template =[[
+ DELETE FROM
+ %basename%
+ WHERE
+ `token` = '%token%' ;
+]]
+
+function sessions.remove(db,token)
+
+ db.execute {
+ template = template,
+ variables = {
+ basename = db.basename,
+ token = token,
+ },
+ }
+
+ if trace_sql then
+ report("removed: %s",token)
+ end
+
+end
+
+local template_collect_yes =[[
+ SELECT
+ *
+ FROM
+ %basename%
+ ORDER BY
+ `created` ;
+]]
+
+local template_collect_nop =[[
+ SELECT
+ `accessed`,
+ `created`,
+ `accessed`,
+ `token`
+ FROM
+ %basename%
+ ORDER BY
+ `created` ;
+]]
+
+function sessions.collect(db,nodata)
+
+ local records, keys = db.execute {
+ template = nodata and template_collect_nop or template_collect_yes,
+ variables = {
+ basename = db.basename,
+ },
+ }
+
+ if not nodata then
+ db.unpackdata(records)
+ end
+
+ if trace_sql then
+ report("collected: %s sessions",#records)
+ end
+
+ return records, keys
+
+end
+
+local template_cleanup_yes =[[
+ SELECT
+ *
+ FROM
+ %basename%
+ WHERE
+ `accessed` < %time%
+ ORDER BY
+ `created` ;
+ DELETE FROM
+ %basename%
+ WHERE
+ `accessed` < %time% ;
+]]
+
+local template_cleanup_nop =[[
+ SELECT
+ `accessed`,
+ `created`,
+ `accessed`,
+ `token`
+ FROM
+ %basename%
+ WHERE
+ `accessed` < %time%
+ ORDER BY
+ `created` ;
+ DELETE FROM
+ %basename%
+ WHERE
+ `accessed` < %time% ;
+]]
+
+function sessions.cleanupdb(db,delta,nodata)
+
+ local time = ostime()
+
+ local records, keys = db.execute {
+ template = nodata and template_cleanup_nop or template_cleanup_yes,
+ variables = {
+ basename = db.basename,
+ time = time - delta
+ },
+ }
+
+ if not nodata then
+ db.unpackdata(records)
+ end
+
+ if trace_sql then
+ report("cleaned: %s seconds before %s",delta,osfulltime(time))
+ end
+
+ return records, keys
+
+end
diff --git a/Master/texmf-dist/tex/context/base/util-sql-tickets.lua b/Master/texmf-dist/tex/context/base/util-sql-tickets.lua
new file mode 100644
index 00000000000..5e958299deb
--- /dev/null
+++ b/Master/texmf-dist/tex/context/base/util-sql-tickets.lua
@@ -0,0 +1,772 @@
+if not modules then modules = { } end modules ['util-sql-tickets'] = {
+ version = 1.001,
+ comment = "companion to lmx-*",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+-- TODO: MAKE SOME INTO STORED PROCUDURES
+
+-- This is experimental code and currently part of the base installation simply
+-- because it's easier to distribute this way. Eventually it will be documented
+-- and the related scripts will show up as well.
+
+local tonumber = tonumber
+local format = string.format
+local ostime, uuid, osfulltime = os.time, os.uuid, os.fulltime
+local random = math.random
+local concat = table.concat
+
+local sql = utilities.sql
+local tickets = { }
+sql.tickets = tickets
+
+local trace_sql = false trackers.register("sql.tickets.trace", function(v) trace_sql = v end)
+local report = logs.reporter("sql","tickets")
+
+local serialize = sql.serialize
+local deserialize = sql.deserialize
+local execute = sql.execute
+
+tickets.newtoken = sql.tokens.new
+
+-- Beware as an index can be a string or a number, we will create
+-- a combination of hash and index.
+
+local statustags = { [0] =
+ "unknown",
+ "pending",
+ "busy",
+ "finished",
+ "dependent", -- same token but different subtoken (so we only need to find the first)
+ "reserved-1",
+ "reserved-2",
+ "error",
+ "deleted",
+}
+
+local status = table.swapped(statustags)
+tickets.status = status
+tickets.statustags = statustags
+
+local s_unknown = status.unknown
+local s_pending = status.pending
+local s_busy = status.busy
+local s_finished = status.finished
+local s_dependent = status.dependent
+local s_error = status.error
+local s_deleted = status.deleted
+
+local s_rubish = s_error -- and higher
+
+local function checkeddb(presets,datatable)
+ return sql.usedatabase(presets,datatable or presets.datatable or "tickets")
+end
+
+tickets.usedb = checkeddb
+
+local template =[[
+ CREATE TABLE IF NOT EXISTS %basename% (
+ `id` int(11) NOT NULL AUTO_INCREMENT,
+ `token` varchar(50) NOT NULL,
+ `subtoken` INT(11) NOT NULL,
+ `created` int(11) NOT NULL,
+ `accessed` int(11) NOT NULL,
+ `category` int(11) NOT NULL,
+ `status` int(11) NOT NULL,
+ `usertoken` varchar(50) NOT NULL,
+ `data` longtext NOT NULL,
+ `comment` longtext NOT NULL,
+
+ PRIMARY KEY (`id`),
+ UNIQUE INDEX `id_unique_index` (`id` ASC),
+ KEY `token_unique_key` (`token`)
+ )
+ DEFAULT CHARSET = utf8 ;
+]]
+
+function tickets.createdb(presets,datatable)
+ local db = checkeddb(presets,datatable)
+ local data, keys = db.execute {
+ template = template,
+ variables = {
+ basename = db.basename,
+ },
+ }
+
+ report("datatable %a created in %a",db.name,db.base)
+
+ return db
+
+end
+
+local template =[[
+ DROP TABLE IF EXISTS %basename% ;
+]]
+
+function tickets.deletedb(presets,datatable)
+
+ local db = checkeddb(presets,datatable)
+
+ local data, keys = db.execute {
+ template = template,
+ variables = {
+ basename = db.basename,
+ },
+ }
+
+ report("datatable %a removed in %a",db.name,db.base)
+
+end
+
+local template_push =[[
+ INSERT INTO %basename% (
+ `token`,
+ `subtoken`,
+ `created`,
+ `accessed`,
+ `status`,
+ `category`,
+ `usertoken`,
+ `data`,
+ `comment`
+ ) VALUES (
+ '%token%',
+ %subtoken%,
+ %time%,
+ %time%,
+ %status%,
+ %category%,
+ '%usertoken%',
+ '%[data]%',
+ '%[comment]%'
+ ) ;
+]]
+
+local template_fetch =[[
+ SELECT
+ *
+ FROM
+ %basename%
+ WHERE
+ `token` = '%token%'
+ AND
+ `subtoken` = '%subtoken%'
+ ;
+]]
+
+function tickets.create(db,ticket)
+
+ -- We assume a unique token .. if not we're toast anyway. We used to lock and
+ -- get the last id etc etc but there is no real need for that.
+
+ -- we could check for dependent here but we don't want the lookup
+
+ local token = ticket.token or tickets.newtoken()
+ local time = ostime()
+ local status = ticket.status
+ local category = ticket.category or 0
+ local subtoken = ticket.subtoken or 0
+ local usertoken = ticket.usertoken or ""
+ local comment = ticket.comment or ""
+
+ status = not status and subtoken > 1 and s_dependent or s_pending
+
+ local result, message = db.execute {
+ template = template_push,
+ variables = {
+ basename = db.basename,
+ token = token,
+ subtoken = subtoken,
+ time = time,
+ status = status,
+ category = category,
+ usertoken = usertoken,
+ data = db.serialize(ticket.data or { },"return"),
+ comment = comment,
+ },
+ }
+
+ -- We could stick to only fetching the id and make the table here
+ -- but we're not pushing that many tickets so we can as well follow
+ -- the lazy approach and fetch the whole.
+
+ local result, message = db.execute {
+ template = template_fetch,
+ variables = {
+ basename = db.basename,
+ token = token,
+ subtoken = subtoken,
+ },
+ }
+
+ if result and #result > 0 then
+ if trace_sql then
+ report("created: %s at %s",token,osfulltime(time))
+ end
+ return result[1]
+ else
+ report("failed: %s at %s",token,osfulltime(time))
+ end
+
+end
+
+local template =[[
+ UPDATE
+ %basename%
+ SET
+ `data` = '%[data]%',
+ `status` = %status%,
+ `accessed` = %time%
+ WHERE
+ `id` = %id% ;
+]]
+
+function tickets.save(db,ticket)
+
+ local time = ostime()
+ local data = db.serialize(ticket.data or { },"return")
+ local status = ticket.status or s_error
+
+-- print("SETTING")
+-- inspect(data)
+
+ ticket.status = status
+ ticket.accessed = time
+
+ db.execute {
+ template = template,
+ variables = {
+ basename = db.basename,
+ id = ticket.id,
+ time = ostime(),
+ status = status,
+ data = data,
+ },
+ }
+
+ if trace_sql then
+ report("saved: id %s, time %s",id,osfulltime(time))
+ end
+
+ return ticket
+end
+
+local template =[[
+ UPDATE
+ %basename%
+ SET
+ `accessed` = %time%
+ WHERE
+ `token` = '%token%' ;
+
+ SELECT
+ *
+ FROM
+ %basename%
+ WHERE
+ `id` = %id% ;
+]]
+
+function tickets.restore(db,id)
+
+ local record, keys = db.execute {
+ template = template,
+ variables = {
+ basename = db.basename,
+ id = id,
+ time = ostime(),
+ },
+ }
+
+ local record = record and record[1]
+
+ if record then
+ if trace_sql then
+ report("restored: id %s",id)
+ end
+ record.data = db.deserialize(record.data or "")
+ return record
+ elseif trace_sql then
+ report("unknown: id %s",id)
+ end
+
+end
+
+local template =[[
+ DELETE FROM
+ %basename%
+ WHERE
+ `id` = %id% ;
+]]
+
+function tickets.remove(db,id)
+
+ db.execute {
+ template = template,
+ variables = {
+ basename = db.basename,
+ id = id,
+ },
+ }
+
+ if trace_sql then
+ report("removed: id %s",id)
+ end
+
+end
+
+local template_yes =[[
+ SELECT
+ *
+ FROM
+ %basename%
+ ORDER BY
+ `id` ;
+]]
+
+local template_nop =[[
+ SELECT
+ `created`,
+ `usertoken`,
+ `accessed`,
+ `status`
+ FROM
+ %basename%
+ ORDER BY
+ `id` ;
+]]
+
+function tickets.collect(db,nodata)
+
+ local records, keys = db.execute {
+ template = nodata and template_nop or template_yes,
+ variables = {
+ basename = db.basename,
+ token = token,
+ },
+ }
+
+ if not nodata then
+ db.unpackdata(records)
+ end
+
+ if trace_sql then
+ report("collected: %s tickets",#records)
+ end
+
+ return records, keys
+
+end
+
+-- We aleays keep the last select in the execute so one can have
+-- an update afterwards.
+
+local template =[[
+ DELETE FROM
+ %basename%
+ WHERE
+ `accessed` < %time% OR `status` >= %rubish% ;
+]]
+
+local template_cleanup_yes =[[
+ SELECT
+ *
+ FROM
+ %basename%
+ WHERE
+ `accessed` < %time%
+ ORDER BY
+ `id` ;
+]] .. template
+
+local template_cleanup_nop =[[
+ SELECT
+ `accessed`,
+ `created`,
+ `accessed`,
+ `token`
+ `usertoken`
+ FROM
+ %basename%
+ WHERE
+ `accessed` < %time%
+ ORDER BY
+ `id` ;
+]] .. template
+
+function tickets.cleanupdb(db,delta,nodata) -- maybe delta in db
+
+ local time = delta and (ostime() - delta) or 0
+
+ local records, keys = db.execute {
+ template = nodata and template_cleanup_nop or template_cleanup_yes,
+ variables = {
+ basename = db.basename,
+ time = time,
+ rubish = s_rubish,
+ },
+ }
+
+ if not nodata then
+ db.unpackdata(records)
+ end
+
+ if trace_sql then
+ report("cleaned: %s seconds before %s",delta,osfulltime(time))
+ end
+
+ return records, keys
+
+end
+
+-- status related functions
+
+local template =[[
+ SELECT
+ `status`
+ FROM
+ %basename%
+ WHERE
+ `token` = '%token%'
+ ORDER BY
+ `id`
+ ;
+]]
+
+function tickets.getstatus(db,token)
+
+ local record, keys = db.execute {
+ template = template,
+ variables = {
+ basename = db.basename,
+ token = token,
+ },
+ }
+
+ local record = record and record[1]
+
+ return record and record.status or s_unknown
+
+end
+
+local template =[[
+ SELECT
+ `status`
+ FROM
+ %basename%
+ WHERE
+ `status` >= %rubish% OR `accessed` < %time%
+ ORDER BY
+ `id`
+ ;
+]]
+
+function tickets.getobsolete(db,delta)
+
+ local time = delta and (ostime() - delta) or 0
+
+ local records = db.execute {
+ template = template,
+ variables = {
+ basename = db.basename,
+ time = time,
+ rubish = s_rubish,
+ },
+ }
+
+ db.unpackdata(records)
+
+ return records
+
+end
+
+local template =[[
+ SELECT
+ `id`
+ FROM
+ %basename%
+ WHERE
+ `status` = %status%
+ LIMIT
+ 1 ;
+]]
+
+function tickets.hasstatus(db,status)
+
+ local records = db.execute {
+ template = template,
+ variables = {
+ basename = db.basename,
+ status = status or s_unknown,
+ },
+ }
+
+ return records and #records > 0 or false
+
+end
+
+local template =[[
+ UPDATE
+ %basename%
+ SET
+ `status` = %status%,
+ `accessed` = %time%
+ WHERE
+ `id` = %id% ;
+]]
+
+function tickets.setstatus(db,id,status)
+
+ db.execute {
+ template = template,
+ variables = {
+ basename = db.basename,
+ id = id,
+ time = ostime(),
+ status = status or s_error,
+ },
+ }
+
+end
+
+local template =[[
+ DELETE FROM
+ %basename%
+ WHERE
+ `status` IN (%status%) ;
+]]
+
+function tickets.prunedb(db,status)
+
+ if type(status) == "table" then
+ status = concat(status,",")
+ end
+
+ local data, keys = db.execute {
+ template = template,
+ variables = {
+ basename = db.basename,
+ status = status or s_unknown,
+ },
+ }
+
+ if trace_sql then
+ report("pruned: status %s removed",status)
+ end
+
+end
+
+-- START TRANSACTION ; ... COMMIT ;
+-- LOCK TABLES %basename% WRITE ; ... UNLOCK TABLES ;
+
+local template_a = [[
+ SET
+ @last_ticket_token = '' ;
+ UPDATE
+ %basename%
+ SET
+ `token` = (@last_ticket_token := `token`),
+ `status` = %newstatus%,
+ `accessed` = %time%
+ WHERE
+ `status` = %status%
+ ORDER BY
+ `id`
+ LIMIT
+ 1
+ ;
+ SELECT
+ *
+ FROM
+ %basename%
+ WHERE
+ `token` = @last_ticket_token
+ ORDER BY
+ `id`
+ ;
+]]
+
+local template_b = [[
+ SELECT
+ *
+ FROM
+ tickets
+ WHERE
+ `status` = %status%
+ ORDER BY
+ `id`
+ LIMIT
+ 1
+ ;
+]]
+
+function tickets.getfirstwithstatus(db,status,newstatus)
+
+ local records
+
+ if type(newstatus) == "number" then -- todo: also accept string
+
+ records = db.execute {
+ template = template_a,
+ variables = {
+ basename = db.basename,
+ status = status or s_pending,
+ newstatus = newstatus,
+ time = ostime(),
+ },
+ }
+
+
+ else
+
+ records = db.execute {
+ template = template_b,
+ variables = {
+ basename = db.basename,
+ status = status or s_pending,
+ },
+ }
+
+ end
+
+ if type(records) == "table" and #records > 0 then
+
+ for i=1,#records do
+ local record = records[i]
+ record.data = db.deserialize(record.data or "")
+ record.status = newstatus or s_busy
+ end
+
+ return records
+
+ end
+end
+
+-- The next getter assumes that we have a sheduler running so that there is
+-- one process in charge of changing the status.
+
+local template = [[
+ SET
+ @last_ticket_token = '' ;
+ UPDATE
+ %basename%
+ SET
+ `token` = (@last_ticket_token := `token`),
+ `status` = %newstatus%,
+ `accessed` = %time%
+ WHERE
+ `status` = %status%
+ ORDER BY
+ `id`
+ LIMIT
+ 1
+ ;
+ SELECT
+ @last_ticket_token AS `token`
+ ;
+]]
+
+function tickets.getfirstinqueue(db,status,newstatus)
+
+ local records = db.execute {
+ template = template,
+ variables = {
+ basename = db.basename,
+ status = status or s_pending,
+ newstatus = newstatus or s_busy,
+ time = ostime(),
+ },
+ }
+
+ local token = type(records) == "table" and #records > 0 and records[1].token
+
+ return token ~= "" and token
+
+end
+
+local template =[[
+ SELECT
+ *
+ FROM
+ %basename%
+ WHERE
+ `token` = '%token%'
+ ORDER BY
+ `id` ;
+]]
+
+function tickets.getticketsbytoken(db,token)
+
+ local records, keys = db.execute {
+ template = template,
+ variables = {
+ basename = db.basename,
+ token = token,
+ },
+ }
+
+ db.unpackdata(records)
+
+ return records
+
+end
+
+local template =[[
+ SELECT
+ *
+ FROM
+ %basename%
+ WHERE
+ `usertoken` = '%usertoken%' AND `status` < %rubish%
+ ORDER BY
+ `id` ;
+]]
+
+function tickets.getusertickets(db,usertoken)
+
+ -- todo: update accessed
+ -- todo: get less fields
+ -- maybe only data for status changed (hard to check)
+
+ local records, keys = db.execute {
+ template = template,
+ variables = {
+ basename = db.basename,
+ usertoken = usertoken,
+ rubish = s_rubish,
+ },
+ }
+
+ db.unpackdata(records)
+
+ return records
+
+end
+
+local template =[[
+ UPDATE
+ %basename%
+ SET
+ `status` = %deleted%
+ WHERE
+ `usertoken` = '%usertoken%' ;
+]]
+
+function tickets.removeusertickets(db,usertoken)
+
+ db.execute {
+ template = template,
+ variables = {
+ basename = db.basename,
+ usertoken = usertoken,
+ deleted = s_deleted,
+ },
+ }
+
+ if trace_sql then
+ report("removed: usertoken %s",usertoken)
+ end
+
+end
diff --git a/Master/texmf-dist/tex/context/base/util-sql-users.lua b/Master/texmf-dist/tex/context/base/util-sql-users.lua
new file mode 100644
index 00000000000..ea8fb4e07f5
--- /dev/null
+++ b/Master/texmf-dist/tex/context/base/util-sql-users.lua
@@ -0,0 +1,410 @@
+if not modules then modules = { } end modules ['util-sql-users'] = {
+ version = 1.001,
+ comment = "companion to lmx-*",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+-- This is experimental code and currently part of the base installation simply
+-- because it's easier to dirtribute this way. Eventually it will be documented
+-- and the related scripts will show up as well.
+
+-- local sql = sql or (utilities and utilities.sql) or require("util-sql")
+-- local md5 = md5 or require("md5")
+
+local sql = utilities.sql
+
+local format, upper, find, gsub, topattern = string.format, string.upper, string.find, string.gsub, string.topattern
+local sumhexa = md5.sumhexa
+local booleanstring = string.booleanstring
+
+local sql = utilities.sql
+local users = { }
+sql.users = users
+
+local trace_sql = false trackers.register("sql.users.trace", function(v) trace_sql = v end)
+local report = logs.reporter("sql","users")
+
+local function encryptpassword(str)
+ if not str or str == "" then
+ return ""
+ elseif find(str,"^MD5:") then
+ return str
+ else
+ return upper(format("MD5:%s",sumhexa(str)))
+ end
+end
+
+local function cleanuppassword(str)
+ return (gsub(str,"^MD5:",""))
+end
+
+local function samepasswords(one,two)
+ if not one or not two then
+ return false
+ end
+ if not find(one,"^MD5:") then
+ one = encryptpassword(one)
+ end
+ if not find(two,"^MD5:") then
+ two = encryptpassword(two)
+ end
+ return one == two
+end
+
+local function validaddress(address,addresses)
+ if address and addresses and address ~= "" and addresses ~= "" then
+ if find(address,topattern(addresses,true,true)) then
+ return true, "valid remote address"
+ end
+ return false, "invalid remote address"
+ else
+ return true, "no remote address check"
+ end
+end
+
+
+users.encryptpassword = encryptpassword
+users.cleanuppassword = cleanuppassword
+users.samepasswords = samepasswords
+users.validaddress = validaddress
+
+-- print(users.encryptpassword("test")) -- MD5:098F6BCD4621D373CADE4E832627B4F6
+
+local function checkeddb(presets,datatable)
+ return sql.usedatabase(presets,datatable or presets.datatable or "users")
+end
+
+users.usedb = checkeddb
+
+local groupnames = { }
+local groupnumbers = { }
+
+local function registergroup(name)
+ local n = #groupnames + 1
+ groupnames [n] = name
+ groupnames [tostring(n)] = name
+ groupnames [name] = name
+ groupnumbers[n] = n
+ groupnumbers[tostring(n)] = n
+ groupnumbers[name] = n
+ return n
+end
+
+registergroup("superuser")
+registergroup("administrator")
+registergroup("user")
+registergroup("guest")
+
+users.groupnames = groupnames
+users.groupnumbers = groupnumbers
+
+-- password 'test':
+--
+-- INSERT insert into users (`name`,`password`,`group`,`enabled`) values ('...','MD5:098F6BCD4621D373CADE4E832627B4F6',1,1) ;
+
+local template =[[
+ CREATE TABLE `users` (
+ `id` int(11) NOT NULL AUTO_INCREMENT,
+ `name` varchar(80) NOT NULL,
+ `fullname` varchar(80) NOT NULL,
+ `password` varchar(50) DEFAULT NULL,
+ `group` int(11) NOT NULL,
+ `enabled` int(11) DEFAULT '1',
+ `email` varchar(80) DEFAULT NULL,
+ `address` varchar(256) DEFAULT NULL,
+ `theme` varchar(50) DEFAULT NULL,
+ `data` longtext,
+ PRIMARY KEY (`id`),
+ UNIQUE KEY `name_unique` (`name`)
+ ) DEFAULT CHARSET = utf8 ;
+]]
+
+local converter, fields = sql.makeconverter {
+ { name = "id", type = "number" },
+ { name = "name", type = "string" },
+ { name = "fullname", type = "string" },
+ { name = "password", type = "string" },
+ { name = "group", type = groupnames },
+ { name = "enabled", type = "boolean" },
+ { name = "email", type = "string" },
+ { name = "address", type = "string" },
+ { name = "theme", type = "string" },
+ { name = "data", type = "deserialize" },
+}
+
+function users.createdb(presets,datatable)
+
+ local db = checkeddb(presets,datatable)
+
+ db.execute {
+ template = template,
+ variables = {
+ basename = db.basename,
+ },
+ }
+
+ report("datatable %a created in %a",db.name,db.base)
+
+ return db
+
+end
+
+local template =[[
+ SELECT
+ %fields%
+ FROM
+ %basename%
+ WHERE
+ `name` = '%[name]%'
+ AND
+ `password` = '%[password]%'
+ ;
+]]
+
+local template =[[
+ SELECT
+ %fields%
+ FROM
+ %basename%
+ WHERE
+ `name` = '%[name]%'
+ ;
+]]
+
+function users.valid(db,username,password,address)
+
+ local data = db.execute {
+ template = template,
+ converter = converter,
+ variables = {
+ basename = db.basename,
+ fields = fields,
+ name = username,
+ },
+ }
+
+ local data = data and data[1]
+
+ if not data then
+ return false, "unknown user"
+ elseif not data.enabled then
+ return false, "disabled user"
+ elseif data.password ~= encryptpassword(password) then
+ return false, "wrong password"
+ elseif not validaddress(address,data.address) then
+ return false, "invalid address"
+ else
+ data.password = nil
+ return data, "okay"
+ end
+
+end
+
+local template =[[
+ INSERT INTO %basename% (
+ `name`,
+ `fullname`,
+ `password`,
+ `group`,
+ `enabled`,
+ `email`,
+ `address`,
+ `theme`,
+ `data`
+ ) VALUES (
+ '%[name]%',
+ '%[fullname]%',
+ '%[password]%',
+ '%[group]%',
+ '%[enabled]%',
+ '%[email]%',
+ '%[address]%',
+ '%[theme]%',
+ '%[data]%'
+ ) ;
+]]
+
+function users.add(db,specification)
+
+ local name = specification.username or specification.name
+
+ if not name or name == "" then
+ return
+ end
+
+ local data = specification.data
+
+ db.execute {
+ template = template,
+ variables = {
+ basename = db.basename,
+ name = name,
+ fullname = name or fullname,
+ password = encryptpassword(specification.password or ""),
+ group = groupnumbers[specification.group] or groupnumbers.guest,
+ enabled = booleanstring(specification.enabled) and "1" or "0",
+ email = specification.email,
+ address = specification.address,
+ theme = specification.theme,
+ data = type(data) == "table" and db.serialize(data,"return") or "",
+ },
+ }
+
+end
+
+local template =[[
+ SELECT
+ %fields%
+ FROM
+ %basename%
+ WHERE
+ `name` = '%[name]%' ;
+]]
+
+function users.getbyname(db,name)
+
+ local data = db.execute {
+ template = template,
+ converter = converter,
+ variables = {
+ basename = db.basename,
+ fields = fields,
+ name = name,
+ },
+ }
+
+ return data and data[1] or nil
+
+end
+
+local template =[[
+ SELECT
+ %fields%
+ FROM
+ %basename%
+ WHERE
+ `id` = '%id%' ;
+]]
+
+local function getbyid(db,id)
+
+ local data = db.execute {
+ template = template,
+ converter = converter,
+ variables = {
+ basename = db.basename,
+ fields = fields,
+ id = id,
+ },
+ }
+
+ return data and data[1] or nil
+
+end
+
+users.getbyid = getbyid
+
+local template =[[
+ UPDATE
+ %basename%
+ SET
+ `fullname` = '%[fullname]%',
+ `password` = '%[password]%',
+ `group` = '%[group]%',
+ `enabled` = '%[enabled]%',
+ `email` = '%[email]%',
+ `address` = '%[address]%',
+ `theme` = '%[theme]%',
+ `data` = '%[data]%'
+ WHERE
+ `id` = '%id%'
+ ;
+]]
+
+function users.save(db,id,specification)
+
+ id = tonumber(id)
+
+ if not id then
+ return
+ end
+
+ local user = getbyid(db,id)
+
+ if tonumber(user.id) ~= id then
+ return
+ end
+
+ local fullname = specification.fullname == nil and user.fulname or specification.fullname
+ local password = specification.password == nil and user.password or specification.password
+ local group = specification.group == nil and user.group or specification.group
+ local enabled = specification.enabled == nil and user.enabled or specification.enabled
+ local email = specification.email == nil and user.email or specification.email
+ local address = specification.address == nil and user.address or specification.address
+ local theme = specification.theme == nil and user.theme or specification.theme
+ local data = specification.data == nil and user.data or specification.data
+
+ db.execute {
+ template = template,
+ variables = {
+ basename = db.basename,
+ id = id,
+ fullname = fullname,
+ password = encryptpassword(password),
+ group = groupnumbers[group],
+ enabled = booleanstring(enabled) and "1" or "0",
+ email = email,
+ address = address,
+ theme = theme,
+ data = type(data) == "table" and db.serialize(data,"return") or "",
+ },
+ }
+
+ return getbyid(db,id)
+
+end
+
+local template =[[
+ DELETE FROM
+ %basename%
+ WHERE
+ `id` = '%id%' ;
+]]
+
+function users.remove(db,id)
+
+ db.execute {
+ template = template,
+ variables = {
+ basename = db.basename,
+ id = id,
+ },
+ }
+
+end
+
+local template =[[
+ SELECT
+ %fields%
+ FROM
+ %basename%
+ ORDER BY
+ `name` ;
+]]
+
+function users.collect(db) -- maybe also an id/name only variant
+
+ local records, keys = db.execute {
+ template = template,
+ converter = converter,
+ variables = {
+ basename = db.basename,
+ fields = fields,
+ },
+ }
+
+ return records, keys
+
+end
diff --git a/Master/texmf-dist/tex/context/base/util-sql.lua b/Master/texmf-dist/tex/context/base/util-sql.lua
new file mode 100644
index 00000000000..1c1766edf93
--- /dev/null
+++ b/Master/texmf-dist/tex/context/base/util-sql.lua
@@ -0,0 +1,443 @@
+if not modules then modules = { } end modules ['util-sql'] = {
+ version = 1.001,
+ comment = "companion to m-sql.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+-- todo: templates as table (saves splitting)
+
+-- Of course we could use a library but we don't want another depedency and there is
+-- a bit of flux in these libraries. Also, we want the data back in a way that we
+-- like.
+--
+-- This is the first of set of sql related modules that are providing functionality
+-- for a web based framework that we use for typesetting (related) services. We're
+-- talking of session management, job ticket processing, storage, (xml) file processing
+-- and dealing with data from databases (often ambitiously called database publishing).
+--
+-- There is no generic solution for such services, but from our perspective, as we use
+-- context in a regular tds tree (the standard distribution) it makes sense to put shared
+-- code in the context distribution. That way we don't need to reinvent wheels every time.
+
+-- We use the template mechanism from util-tpl which inturn is just using the dos cq
+-- windows convention of %whatever% variables that I've used for ages.
+
+-- util-sql-imp-client.lua
+-- util-sql-imp-library.lua
+-- util-sql-imp-swiglib.lua
+-- util-sql-imp-lmxsql.lua
+
+-- local sql = require("util-sql")
+--
+-- local converter = sql.makeconverter {
+-- { name = "id", type = "number" },
+-- { name = "data",type = "string" },
+-- }
+--
+-- local execute = sql.methods.swiglib.execute
+-- -- local execute = sql.methods.library.execute
+-- -- local execute = sql.methods.client.execute
+-- -- local execute = sql.methods.lmxsql.execute
+--
+-- result = execute {
+-- presets = {
+-- host = "localhost",
+-- username = "root",
+-- password = "test",
+-- database = "test",
+-- id = "test", -- forces persistent session
+-- },
+-- template = "select * from `test` where `id` > %criterium% ;",
+-- variables = {
+-- criterium = 2,
+-- },
+-- converter = converter
+-- }
+--
+-- inspect(result)
+
+local format, match = string.format, string.match
+local random = math.random
+local rawset, setmetatable, getmetatable, load, type = rawset, setmetatable, getmetatable, load, type
+local P, S, V, C, Cs, Ct, Cc, Cg, Cf, patterns, lpegmatch = lpeg.P, lpeg.S, lpeg.V, lpeg.C, lpeg.Cs, lpeg.Ct, lpeg.Cc, lpeg.Cg, lpeg.Cf, lpeg.patterns, lpeg.match
+local concat = table.concat
+
+local osuuid = os.uuid
+local osclock = os.clock or os.time
+local ostime = os.time
+local setmetatableindex = table.setmetatableindex
+
+local trace_sql = false trackers.register("sql.trace", function(v) trace_sql = v end)
+local trace_queries = false trackers.register("sql.queries",function(v) trace_queries = v end)
+local report_state = logs.reporter("sql")
+
+-- trace_sql = true
+-- trace_queries = true
+
+utilities.sql = utilities.sql or { }
+local sql = utilities.sql
+
+local replacetemplate = utilities.templates.replace
+local loadtemplate = utilities.templates.load
+
+local methods = { }
+sql.methods = methods
+
+local helpers = { }
+sql.helpers = helpers
+
+local serialize = table.fastserialize
+local deserialize = table.deserialize
+
+sql.serialize = serialize
+sql.deserialize = deserialize
+
+helpers.serialize = serialize -- bonus
+helpers.deserialize = deserialize -- bonus
+
+local defaults = { __index =
+ {
+ resultfile = "result.dat",
+ templatefile = "template.sql",
+ queryfile = "query.sql",
+ variables = { },
+ username = "default",
+ password = "default",
+ host = "localhost",
+ port = 3306,
+ database = "default",
+ },
+}
+
+setmetatableindex(sql.methods,function(t,k)
+ report_state("start loading method %a",k)
+ require("util-sql-imp-"..k)
+ report_state("loading method %a done",k)
+ return rawget(t,k)
+end)
+
+-- converters
+
+local converters = { }
+sql.converters = converters
+
+local function makeconverter(entries,celltemplate,wraptemplate)
+ local shortcuts = { }
+ local assignments = { }
+ local key = false
+ for i=1,#entries do
+ local entry = entries[i]
+ local name = entry.name
+ local kind = entry.type or entry.kind
+ local value = format(celltemplate,i,i)
+ if kind == "boolean" then
+ assignments[#assignments+1] = format("[%q] = booleanstring(%s),",name,value)
+ elseif kind == "number" then
+ assignments[#assignments+1] = format("[%q] = tonumber(%s),",name,value)
+ elseif type(kind) == "function" then
+ local c = #converters + 1
+ converters[c] = kind
+ shortcuts[#shortcuts+1] = format("local fun_%s = converters[%s]",c,c)
+ assignments[#assignments+1] = format("[%q] = fun_%s(%s),",name,c,value)
+ elseif type(kind) == "table" then
+ local c = #converters + 1
+ converters[c] = kind
+ shortcuts[#shortcuts+1] = format("local tab_%s = converters[%s]",c,c)
+ assignments[#assignments+1] = format("[%q] = tab_%s[%s],",name,#converters,value)
+ elseif kind == "deserialize" then
+ assignments[#assignments+1] = format("[%q] = deserialize(%s),",name,value)
+ elseif kind == "key" then
+ -- hashed instead of indexed
+ key = value
+ elseif kind == "entry" then
+ -- so we can (efficiently) extend the hashed table
+ local default = entry.default or ""
+ if type(default) == "string" then
+ assignments[#assignments+1] = format("[%q] = %q,",name,default)
+ else
+ assignments[#assignments+1] = format("[%q] = %s,",name,tostring(default))
+ end
+ else
+ assignments[#assignments+1] = format("[%q] = %s,",name,value)
+ end
+ end
+ local code = format(wraptemplate,concat(shortcuts,"\n"),key and "{ }" or "data",key or "i",concat(assignments,"\n "))
+ -- print(code)
+ local func = load(code)
+ return func and func()
+end
+
+function sql.makeconverter(entries)
+ local fields = { }
+ for i=1,#entries do
+ fields[i] = format("`%s`",entries[i].name)
+ end
+ fields = concat(fields, ", ")
+ local converter = {
+ fields = fields
+ }
+ setmetatableindex(converter, function(t,k)
+ local sqlmethod = methods[k]
+ local v = makeconverter(entries,sqlmethod.celltemplate,sqlmethod.wraptemplate)
+ t[k] = v
+ return v
+ end)
+ return converter, fields
+end
+
+-- helper for libraries:
+
+local function validspecification(specification)
+ local presets = specification.presets
+ if type(presets) == "string" then
+ presets = dofile(presets)
+ end
+ if type(presets) == "table" then
+ setmetatable(presets,defaults)
+ setmetatable(specification,{ __index = presets })
+ else
+ setmetatable(specification,defaults)
+ end
+ return true
+end
+
+helpers.validspecification = validspecification
+
+local whitespace = patterns.whitespace^0
+local eol = patterns.eol
+local separator = P(";")
+local escaped = patterns.escaped
+local dquote = patterns.dquote
+local squote = patterns.squote
+local dsquote = squote * squote
+---- quoted = patterns.quoted
+local quoted = dquote * (escaped + (1-dquote))^0 * dquote
+ + squote * (escaped + dsquote + (1-squote))^0 * squote
+local comment = P("--") * (1-eol) / ""
+local query = whitespace
+ * Cs((quoted + comment + 1 - separator)^1 * Cc(";"))
+ * whitespace
+local splitter = Ct(query * (separator * query)^0)
+
+helpers.querysplitter = splitter
+
+-- I will add a bit more checking.
+
+local function validspecification(specification)
+ local presets = specification.presets
+ if type(presets) == "string" then
+ presets = dofile(presets)
+ end
+ if type(presets) == "table" then
+ local m = getmetatable(presets)
+ if m then
+ setmetatable(m,defaults)
+ else
+ setmetatable(presets,defaults)
+ end
+ setmetatable(specification,{ __index = presets })
+ else
+ setmetatable(specification,defaults)
+ end
+ local templatefile = specification.templatefile or "query"
+ local queryfile = specification.queryfile or presets.queryfile or file.nameonly(templatefile) .. "-temp.sql"
+ local resultfile = specification.resultfile or presets.resultfile or file.nameonly(templatefile) .. "-temp.dat"
+ specification.queryfile = queryfile
+ specification.resultfile = resultfile
+ if trace_sql then
+ report_state("template file: %s",templatefile or "<none>")
+ report_state("query file: %s",queryfile)
+ report_state("result file: %s",resultfile)
+ end
+ return true
+end
+
+local function preparetemplate(specification)
+ local template = specification.template
+ if template then
+ local query = replacetemplate(template,specification.variables,'sql')
+ if not query then
+ report_state("error in template: %s",template)
+ elseif trace_queries then
+ report_state("query from template: %s",query)
+ end
+ return query
+ end
+ local templatefile = specification.templatefile
+ if templatefile then
+ local query = loadtemplate(templatefile,specification.variables,'sql')
+ if not query then
+ report_state("error in template file %a",templatefile)
+ elseif trace_queries then
+ report_state("query from template file %a: %s",templatefile,query)
+ end
+ return query
+ end
+ report_state("no query template or templatefile")
+end
+
+helpers.preparetemplate = preparetemplate
+
+-- -- -- we delay setting this -- -- --
+
+local currentmethod
+
+local function firstexecute(...)
+ local execute = methods[currentmethod].execute
+ sql.execute = execute
+ return execute(...)
+end
+
+function sql.setmethod(method)
+ currentmethod = method
+ sql.execute = firstexecute
+end
+
+sql.setmethod("library")
+
+-- helper:
+
+function sql.usedatabase(presets,datatable)
+ local name = datatable or presets.datatable
+ if name then
+ local method = presets.method and sql.methods[presets.method] or sql.methods.client
+ local base = presets.database or "test"
+ local basename = format("`%s`.`%s`",base,name)
+ local execute = nil
+ local m_execute = method.execute
+ if method.usesfiles then
+ local queryfile = presets.queryfile or format("%s-temp.sql",name)
+ local resultfile = presets.resultfile or format("%s-temp.dat",name)
+ execute = function(specification) -- variables template
+ if not specification.presets then specification.presets = presets end
+ if not specification.queryfile then specification.queryfile = queryfile end
+ if not specification.resultfile then specification.resultfile = queryfile end
+ return m_execute(specification)
+ end
+ else
+ execute = function(specification) -- variables template
+ if not specification.presets then specification.presets = presets end
+ return m_execute(specification)
+ end
+ end
+ local function unpackdata(records,name)
+ if records then
+ name = name or "data"
+ for i=1,#records do
+ local record = records[i]
+ local data = record[name]
+ if data then
+ record[name] = deserialize(data)
+ end
+ end
+ end
+ end
+ return {
+ presets = preset,
+ base = base,
+ name = name,
+ basename = basename,
+ execute = execute,
+ serialize = serialize,
+ deserialize = deserialize,
+ unpackdata = unpackdata,
+ }
+ else
+ report_state("missing name in usedatabase specification")
+ end
+end
+
+-- local data = utilities.sql.prepare {
+-- templatefile = "test.sql",
+-- variables = { },
+-- host = "...",
+-- username = "...",
+-- password = "...",
+-- database = "...",
+-- }
+
+-- local presets = {
+-- host = "...",
+-- username = "...",
+-- password = "...",
+-- database = "...",
+-- }
+--
+-- local data = utilities.sql.prepare {
+-- templatefile = "test.sql",
+-- variables = { },
+-- presets = presets,
+-- }
+
+-- local data = utilities.sql.prepare {
+-- templatefile = "test.sql",
+-- variables = { },
+-- presets = dofile(...),
+-- }
+
+-- local data = utilities.sql.prepare {
+-- templatefile = "test.sql",
+-- variables = { },
+-- presets = "...",
+-- }
+
+-- for i=1,10 do
+-- local dummy = uuid() -- else same every time, don't ask
+-- end
+
+sql.tokens = {
+ length = 42, -- but in practice we will reserve some 50 characters
+ new = function()
+ return format("%s-%x06",osuuid(),random(0xFFFFF)) -- 36 + 1 + 6 = 42
+ end,
+}
+
+-- -- --
+
+-- local func, code = sql.makeconverter {
+-- { name = "a", type = "number" },
+-- { name = "b", type = "string" },
+-- { name = "c", type = "boolean" },
+-- { name = "d", type = { x = "1" } },
+-- { name = "e", type = os.fulltime },
+-- }
+--
+-- print(code)
+
+-- -- --
+
+if tex and tex.systemmodes then
+
+ local droptable = table.drop
+ local threshold = 16 * 1024 -- use slower but less memory hungry variant
+
+ function sql.prepare(specification,tag)
+ -- could go into tuc if needed
+ -- todo: serialize per column
+ local tag = tag or specification.tag or "last"
+ local filename = format("%s-sql-result-%s.tuc",tex.jobname,tag)
+ if tex.systemmodes["first"] then
+ local data, keys = sql.execute(specification)
+ if not data then
+ data = { }
+ end
+ if not keys then
+ keys = { }
+ end
+ io.savedata(filename,droptable({ data = data, keys = keys },#keys*#data>threshold))
+ return data, keys
+ else
+ local result = table.load(filename)
+ return result.data, result.keys
+ end
+ end
+
+else
+
+ sql.prepare = sql.execute
+
+end
+
+return sql
diff --git a/Master/texmf-dist/tex/context/base/util-sta.lua b/Master/texmf-dist/tex/context/base/util-sta.lua
new file mode 100644
index 00000000000..1a61ec4e6c4
--- /dev/null
+++ b/Master/texmf-dist/tex/context/base/util-sta.lua
@@ -0,0 +1,342 @@
+if not modules then modules = { } end modules ['util-sta'] = {
+ version = 1.001,
+ comment = "companion to util-ini.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+local insert, remove, fastcopy, concat = table.insert, table.remove, table.fastcopy, table.concat
+local format = string.format
+local select, tostring = select, tostring
+
+local trace_stacker = false trackers.register("stacker.resolve", function(v) trace_stacker = v end)
+
+local stacker = stacker or { }
+
+utilities.stacker = stacker
+
+local function start(s,t,first,last)
+ if s.mode == "switch" then
+ local n = tostring(t[last])
+ if trace_stacker then
+ s.report("start: %s",n)
+ end
+ return n
+ else
+ local r = { }
+ for i=first,last do
+ r[#r+1] = tostring(t[i])
+ end
+ local n = concat(r," ")
+ if trace_stacker then
+ s.report("start: %s",n)
+ end
+ return n
+ end
+end
+
+local function stop(s,t,first,last)
+ if s.mode == "switch" then
+ local n = tostring(false)
+ if trace_stacker then
+ s.report("stop: %s",n)
+ end
+ return n
+ else
+ local r = { }
+ for i=last,first,-1 do
+ r[#r+1] = tostring(false)
+ end
+ local n = concat(r," ")
+ if trace_stacker then
+ s.report("stop: %s",n)
+ end
+ return n
+ end
+end
+
+local function change(s,t1,first1,last1,t2,first2,last2)
+ if s.mode == "switch" then
+ local n = tostring(t2[last2])
+ if trace_stacker then
+ s.report("change: %s",n)
+ end
+ return n
+ else
+ local r = { }
+ for i=last1,first1,-1 do
+ r[#r+1] = tostring(false)
+ end
+ local n = concat(r," ")
+ for i=first2,last2 do
+ r[#r+1] = tostring(t2[i])
+ end
+ if trace_stacker then
+ s.report("change: %s",n)
+ end
+ return n
+ end
+end
+
+function stacker.new(name)
+
+ local s
+
+ local stack = { }
+ local list = { }
+ local ids = { }
+ local hash = { }
+
+ local hashing = true
+
+ local function push(...)
+ for i=1,select("#",...) do
+ insert(stack,(select(i,...))) -- watch the ()
+ end
+ if hashing then
+ local c = concat(stack,"|")
+ local n = hash[c]
+ if not n then
+ n = #list+1
+ hash[c] = n
+ list[n] = fastcopy(stack)
+ end
+ insert(ids,n)
+ return n
+ else
+ local n = #list+1
+ list[n] = fastcopy(stack)
+ insert(ids,n)
+ return n
+ end
+ end
+
+ local function pop()
+ remove(stack)
+ remove(ids)
+ return ids[#ids] or s.unset or -1
+ end
+
+ local function clean()
+ if #stack == 0 then
+ if trace_stacker then
+ s.report("%s list entries, %s stack entries",#list,#stack)
+ end
+ end
+ end
+
+ local tops = { }
+ local top, switch
+
+ local function resolve_begin(mode)
+ if mode then
+ switch = mode == "switch"
+ else
+ switch = s.mode == "switch"
+ end
+ top = { switch = switch }
+ insert(tops,top)
+ end
+
+ local function resolve_step(ti) -- keep track of changes outside function !
+ -- todo: optimize for n=1 etc
+ local result = nil
+ local noftop = #top
+ if ti > 0 then
+ local current = list[ti]
+ if current then
+ local noflist = #current
+ local nofsame = 0
+ if noflist > noftop then
+ for i=1,noflist do
+ if current[i] == top[i] then
+ nofsame = i
+ else
+ break
+ end
+ end
+ else
+ for i=1,noflist do
+ if current[i] == top[i] then
+ nofsame = i
+ else
+ break
+ end
+ end
+ end
+ local plus = nofsame + 1
+ if plus <= noftop then
+ if plus <= noflist then
+ if switch then
+ result = s.change(s,top,plus,noftop,current,nofsame,noflist)
+ else
+ result = s.change(s,top,plus,noftop,current,plus,noflist)
+ end
+ else
+ if switch then
+ result = s.change(s,top,plus,noftop,current,nofsame,noflist)
+ else
+ result = s.stop(s,top,plus,noftop)
+ end
+ end
+ elseif plus <= noflist then
+ if switch then
+ result = s.start(s,current,nofsame,noflist)
+ else
+ result = s.start(s,current,plus,noflist)
+ end
+ end
+ top = current
+ else
+ if 1 <= noftop then
+ result = s.stop(s,top,1,noftop)
+ end
+ top = { }
+ end
+ return result
+ else
+ if 1 <= noftop then
+ result = s.stop(s,top,1,noftop)
+ end
+ top = { }
+ return result
+ end
+ end
+
+ local function resolve_end()
+ -- resolve_step(s.unset)
+ local noftop = #top
+ if noftop > 0 then
+ local result = s.stop(s,top,1,#top)
+ remove(tops)
+ top = tops[#tops]
+ switch = top and top.switch
+ return result
+ end
+ end
+
+ local function resolve(t)
+ resolve_begin()
+ for i=1,#t do
+ resolve_step(t[i])
+ end
+ resolve_end()
+ end
+
+ local report = logs.reporter("stacker",name or nil)
+
+ s = {
+ name = name or "unknown",
+ unset = -1,
+ report = report,
+ start = start,
+ stop = stop,
+ change = change,
+ push = push,
+ pop = pop,
+ clean = clean,
+ resolve = resolve,
+ resolve_begin = resolve_begin,
+ resolve_step = resolve_step,
+ resolve_end = resolve_end,
+ }
+
+ return s -- we can overload functions
+
+end
+
+-- local s = utilities.stacker.new("demo")
+--
+-- local unset = s.unset
+-- local push = s.push
+-- local pop = s.pop
+--
+-- local t = {
+-- unset,
+-- unset,
+-- push("a"), -- a
+-- push("b","c"), -- a b c
+-- pop(), -- a b
+-- push("d"), -- a b d
+-- pop(), -- a b
+-- unset,
+-- pop(), -- a
+-- pop(), -- b
+-- unset,
+-- unset,
+-- }
+--
+-- s.resolve(t)
+
+-- demostacker = utilities.stacker.new("demos")
+--
+-- local whatever = {
+-- one = "1 0 0 RG 1 0 0 rg",
+-- two = "1 1 0 RG 1 1 0 rg",
+-- [false] = "0 G 0 g",
+-- }
+--
+-- local concat = table.concat
+--
+-- local pdfliteral = nodes.pool.pdfliteral
+--
+-- function demostacker.start(s,t,first,last)
+-- local n = whatever[t[last]]
+-- -- s.report("start: %s",n)
+-- return pdfliteral(n)
+-- end
+--
+-- function demostacker.stop(s,t,first,last)
+-- local n = whatever[false]
+-- -- s.report("stop: %s",n)
+-- return pdfliteral(n)
+-- end
+--
+-- function demostacker.change(s,t1,first1,last1,t2,first2,last2)
+-- local n = whatever[t2[last2]]
+-- -- s.report("change: %s",n)
+-- return pdfliteral(n)
+-- end
+--
+-- demostacker.mode = "switch"
+--
+-- local whatever = {
+-- one = "/OC /test1 BDC",
+-- two = "/OC /test2 BDC",
+-- [false] = "EMC",
+-- }
+--
+-- demostacker = utilities.stacker.new("demos")
+--
+-- function demostacker.start(s,t,first,last)
+-- local r = { }
+-- for i=first,last do
+-- r[#r+1] = whatever[t[i]]
+-- end
+-- -- s.report("start: %s",concat(r," "))
+-- return pdfliteral(concat(r," "))
+-- end
+--
+-- function demostacker.stop(s,t,first,last)
+-- local r = { }
+-- for i=last,first,-1 do
+-- r[#r+1] = whatever[false]
+-- end
+-- -- s.report("stop: %s",concat(r," "))
+-- return pdfliteral(concat(r," "))
+-- end
+--
+-- function demostacker.change(s,t1,first1,last1,t2,first2,last2)
+-- local r = { }
+-- for i=last1,first1,-1 do
+-- r[#r+1] = whatever[false]
+-- end
+-- for i=first2,last2 do
+-- r[#r+1] = whatever[t2[i]]
+-- end
+-- -- s.report("change: %s",concat(r," "))
+-- return pdfliteral(concat(r," "))
+-- end
+--
+-- demostacker.mode = "stack"
diff --git a/Master/texmf-dist/tex/context/base/util-sto.lua b/Master/texmf-dist/tex/context/base/util-sto.lua
index 42ee6cf003b..191d6cd73b2 100644
--- a/Master/texmf-dist/tex/context/base/util-sto.lua
+++ b/Master/texmf-dist/tex/context/base/util-sto.lua
@@ -6,7 +6,7 @@ if not modules then modules = { } end modules ['util-sto'] = {
license = "see context related readme files"
}
-local setmetatable, getmetatable = setmetatable, getmetatable
+local setmetatable, getmetatable, type = setmetatable, getmetatable, type
utilities = utilities or { }
utilities.storage = utilities.storage or { }
@@ -14,8 +14,9 @@ local storage = utilities.storage
function storage.mark(t)
if not t then
- texio.write_nl("fatal error: storage cannot be marked")
- return -- os.exit()
+ print("\nfatal error: storage cannot be marked\n")
+ os.exit()
+ return
end
local m = getmetatable(t)
if not m then
@@ -44,36 +45,37 @@ end
function storage.checked(t)
if not t then
- texio.write_nl("fatal error: storage has not been allocated")
- return -- os.exit()
+ report("\nfatal error: storage has not been allocated\n")
+ os.exit()
+ return
end
return t
end
---~ function utilities.storage.delay(parent,name,filename)
---~ local m = getmetatable(parent)
---~ m.__list[name] = filename
---~ end
---~
---~ function utilities.storage.predefine(parent)
---~ local list = { }
---~ local m = getmetatable(parent) or {
---~ __list = list,
---~ __index = function(t,k)
---~ local l = require(list[k])
---~ t[k] = l
---~ return l
---~ end
---~ }
---~ setmetatable(parent,m)
---~ end
---~
---~ bla = { }
---~ utilities.storage.predefine(bla)
---~ utilities.storage.delay(bla,"test","oepsoeps")
---~ local t = bla.test
---~ table.print(t)
---~ print(t.a)
+-- function utilities.storage.delay(parent,name,filename)
+-- local m = getmetatable(parent)
+-- m.__list[name] = filename
+-- end
+--
+-- function utilities.storage.predefine(parent)
+-- local list = { }
+-- local m = getmetatable(parent) or {
+-- __list = list,
+-- __index = function(t,k)
+-- local l = require(list[k])
+-- t[k] = l
+-- return l
+-- end
+-- }
+-- setmetatable(parent,m)
+-- end
+--
+-- bla = { }
+-- utilities.storage.predefine(bla)
+-- utilities.storage.delay(bla,"test","oepsoeps")
+-- local t = bla.test
+-- table.print(t)
+-- print(t.a)
function storage.setinitializer(data,initialize)
local m = getmetatable(data) or { }
@@ -98,21 +100,28 @@ end
-- table namespace ?
-local function f_empty () return "" end -- t,k
-local function f_self (t,k) t[k] = k return k end
-local function f_ignore() end -- t,k,v
+local function f_empty () return "" end -- t,k
+local function f_self (t,k) t[k] = k return k end
+local function f_table (t,k) local v = { } t[k] = v return v end
+local function f_ignore() end -- t,k,v
local t_empty = { __index = f_empty }
local t_self = { __index = f_self }
+local t_table = { __index = f_table }
local t_ignore = { __newindex = f_ignore }
function table.setmetatableindex(t,f)
+ if type(t) ~= "table" then
+ f, t = t, { }
+ end
local m = getmetatable(t)
if m then
if f == "empty" then
m.__index = f_empty
elseif f == "key" then
m.__index = f_self
+ elseif f == "table" then
+ m.__index = f_table
else
m.__index = f
end
@@ -121,6 +130,8 @@ function table.setmetatableindex(t,f)
setmetatable(t, t_empty)
elseif f == "key" then
setmetatable(t, t_self)
+ elseif f == "table" then
+ setmetatable(t, t_table)
else
setmetatable(t,{ __index = f })
end
@@ -129,6 +140,9 @@ function table.setmetatableindex(t,f)
end
function table.setmetatablenewindex(t,f)
+ if type(t) ~= "table" then
+ f, t = t, { }
+ end
local m = getmetatable(t)
if m then
if f == "ignore" then
@@ -147,6 +161,9 @@ function table.setmetatablenewindex(t,f)
end
function table.setmetatablecall(t,f)
+ if type(t) ~= "table" then
+ f, t = t, { }
+ end
local m = getmetatable(t)
if m then
m.__call = f
diff --git a/Master/texmf-dist/tex/context/base/util-str.lua b/Master/texmf-dist/tex/context/base/util-str.lua
index 75cf443fbde..4890a11d606 100644
--- a/Master/texmf-dist/tex/context/base/util-str.lua
+++ b/Master/texmf-dist/tex/context/base/util-str.lua
@@ -10,9 +10,36 @@ utilities = utilities or {}
utilities.strings = utilities.strings or { }
local strings = utilities.strings
-local gsub, rep = string.gsub, string.rep
-local Cs, C, Cp, P, Carg = lpeg.Cs, lpeg.C, lpeg.Cp, lpeg.P, lpeg.Carg
+local format, gsub, rep, sub = string.format, string.gsub, string.rep, string.sub
+local load, dump = load, string.dump
+local tonumber, type, tostring = tonumber, type, tostring
+local unpack, concat = table.unpack, table.concat
+local P, V, C, S, R, Ct, Cs, Cp, Carg, Cc = lpeg.P, lpeg.V, lpeg.C, lpeg.S, lpeg.R, lpeg.Ct, lpeg.Cs, lpeg.Cp, lpeg.Carg, lpeg.Cc
local patterns, lpegmatch = lpeg.patterns, lpeg.match
+local utfchar, utfbyte = utf.char, utf.byte
+----- loadstripped = utilities.lua.loadstripped
+----- setmetatableindex = table.setmetatableindex
+
+local loadstripped = _LUAVERSION < 5.2 and load or function(str)
+ return load(dump(load(str),true)) -- it only makes sense in luajit and luatex where we have a stipped load
+end
+
+-- todo: make a special namespace for the formatter
+
+if not number then number = { } end -- temp hack for luatex-fonts
+
+local stripper = patterns.stripzeros
+
+local function points(n)
+ return (not n or n == 0) and "0pt" or lpegmatch(stripper,format("%.5fpt",n/65536))
+end
+
+local function basepoints(n)
+ return (not n or n == 0) and "0bp" or lpegmatch(stripper,format("%.5fbp", n*(7200/7227)/65536))
+end
+
+number.points = points
+number.basepoints = basepoints
-- str = " \n \ntest \n test\ntest "
-- print("["..string.gsub(string.collapsecrlf(str),"\n","+").."]")
@@ -47,31 +74,28 @@ function strings.newrepeater(str,offset)
return t
end
t = { }
- setmetatable(t, {
- __index = function(t,k)
- if not k then
- return ""
- end
- local n = k + offset
- local s = n > 0 and rep(str,n) or ""
- t[k] = s
- return s
+ setmetatable(t, { __index = function(t,k)
+ if not k then
+ return ""
end
- } )
+ local n = k + offset
+ local s = n > 0 and rep(str,n) or ""
+ t[k] = s
+ return s
+ end })
s[offset] = t
return t
end
---~ local dashes = strings.newrepeater("--",-1)
-
---~ print(dashes[2])
---~ print(dashes[3])
---~ print(dashes[1])
+-- local dashes = strings.newrepeater("--",-1)
+-- print(dashes[2],dashes[3],dashes[1])
local extra, tab, start = 0, 0, 4, 0
local nspaces = strings.newrepeater(" ")
+string.nspaces = nspaces
+
local pattern =
Carg(1) / function(t)
extra, tab, start = 0, t or 7, 1
@@ -97,20 +121,20 @@ function strings.tabtospace(str,tab)
return lpegmatch(pattern,str,1,tab or 7)
end
---~ local t = {
---~ "1234567123456712345671234567",
---~ "\tb\tc",
---~ "a\tb\tc",
---~ "aa\tbb\tcc",
---~ "aaa\tbbb\tccc",
---~ "aaaa\tbbbb\tcccc",
---~ "aaaaa\tbbbbb\tccccc",
---~ "aaaaaa\tbbbbbb\tcccccc\n aaaaaa\tbbbbbb\tcccccc",
---~ "one\n two\nxxx three\nxx four\nx five\nsix",
---~ }
---~ for k=1,#t do
---~ print(strings.tabtospace(t[k]))
---~ end
+-- local t = {
+-- "1234567123456712345671234567",
+-- "\tb\tc",
+-- "a\tb\tc",
+-- "aa\tbb\tcc",
+-- "aaa\tbbb\tccc",
+-- "aaaa\tbbbb\tcccc",
+-- "aaaaa\tbbbbb\tccccc",
+-- "aaaaaa\tbbbbbb\tcccccc\n aaaaaa\tbbbbbb\tcccccc",
+-- "one\n two\nxxx three\nxx four\nx five\nsix",
+-- }
+-- for k=1,#t do
+-- print(strings.tabtospace(t[k]))
+-- end
function strings.striplong(str) -- strips all leading spaces
str = gsub(str,"^%s*","")
@@ -118,13 +142,625 @@ function strings.striplong(str) -- strips all leading spaces
return str
end
---~ local template = string.striplong([[
---~ aaaa
---~ bb
---~ cccccc
---~ ]])
+-- local template = string.striplong([[
+-- aaaa
+-- bb
+-- cccccc
+-- ]])
function strings.nice(str)
str = gsub(str,"[:%-+_]+"," ") -- maybe more
return str
end
+
+-- Work in progress. Interesting is that compared to the built-in this is faster in
+-- luatex than in luajittex where we have a comparable speed. It only makes sense
+-- to use the formatter when a (somewhat) complex format is used a lot. Each formatter
+-- is a function so there is some overhead and not all formatted output is worth that
+-- overhead. Keep in mind that there is an extra function call involved. In principle
+-- we end up with a string concatination so one could inline such a sequence but often
+-- at the cost of less readabinity. So, it's a sort of (visual) compromise. Of course
+-- there is the benefit of more variants. (Concerning the speed: a simple format like
+-- %05fpt is better off with format than with a formatter, but as soon as you put
+-- something in front formatters become faster. Passing the pt as extra argument makes
+-- formatters behave better. Of course this is rather implementation dependent. Also,
+-- when a specific format is only used a few times the overhead in creating it is not
+-- compensated by speed.)
+--
+-- More info can be found in cld-mkiv.pdf so here I stick to a simple list.
+--
+-- integer %...i number
+-- integer %...d number
+-- unsigned %...u number
+-- character %...c number
+-- hexadecimal %...x number
+-- HEXADECIMAL %...X number
+-- octal %...o number
+-- string %...s string number
+-- float %...f number
+-- exponential %...e number
+-- exponential %...E number
+-- autofloat %...g number
+-- autofloat %...G number
+-- utf character %...c number
+-- force tostring %...S any
+-- force tostring %Q any
+-- force tonumber %N number (strip leading zeros)
+-- signed number %I number
+-- rounded number %r number
+-- 0xhexadecimal %...h character number
+-- 0xHEXADECIMAL %...H character number
+-- U+hexadecimal %...u character number
+-- U+HEXADECIMAL %...U character number
+-- points %p number (scaled points)
+-- basepoints %b number (scaled points)
+-- table concat %...t table
+-- serialize %...T sequenced (no nested tables)
+-- boolean (logic) %l boolean
+-- BOOLEAN %L boolean
+-- whitespace %...w
+-- automatic %...a 'whatever' (string, table, ...)
+-- automatic %...a "whatever" (string, table, ...)
+
+local n = 0
+
+-- we are somewhat sloppy in parsing prefixes as it's not that critical
+
+-- hard to avoid but we can collect them in a private namespace if needed
+
+-- inline the next two makes no sense as we only use this in logging
+
+local sequenced = table.sequenced
+
+function string.autodouble(s,sep)
+ if s == nil then
+ return '""'
+ end
+ local t = type(s)
+ if t == "number" then
+ return tostring(s) -- tostring not really needed
+ end
+ if t == "table" then
+ return ('"' .. sequenced(s,sep or ",") .. '"')
+ end
+ return ('"' .. tostring(s) .. '"')
+end
+
+function string.autosingle(s,sep)
+ if s == nil then
+ return "''"
+ end
+ local t = type(s)
+ if t == "number" then
+ return tostring(s) -- tostring not really needed
+ end
+ if t == "table" then
+ return ("'" .. sequenced(s,sep or ",") .. "'")
+ end
+ return ("'" .. tostring(s) .. "'")
+end
+
+local tracedchars = { }
+string.tracedchars = tracedchars
+strings.tracers = tracedchars
+
+function string.tracedchar(b)
+ -- todo: table
+ if type(b) == "number" then
+ return tracedchars[b] or (utfchar(b) .. " (U+" .. format('%05X',b) .. ")")
+ else
+ local c = utfbyte(b)
+ return tracedchars[c] or (b .. " (U+" .. format('%05X',c) .. ")")
+ end
+end
+
+function number.signed(i)
+ if i > 0 then
+ return "+", i
+ else
+ return "-", -i
+ end
+end
+
+local preamble = [[
+local type = type
+local tostring = tostring
+local tonumber = tonumber
+local format = string.format
+local concat = table.concat
+local signed = number.signed
+local points = number.points
+local basepoints = number.basepoints
+local utfchar = utf.char
+local utfbyte = utf.byte
+local lpegmatch = lpeg.match
+local nspaces = string.nspaces
+local tracedchar = string.tracedchar
+local autosingle = string.autosingle
+local autodouble = string.autodouble
+local sequenced = table.sequenced
+]]
+
+local template = [[
+%s
+%s
+return function(%s) return %s end
+]]
+
+local arguments = { "a1" } -- faster than previously used (select(n,...))
+
+setmetatable(arguments, { __index =
+ function(t,k)
+ local v = t[k-1] .. ",a" .. k
+ t[k] = v
+ return v
+ end
+})
+
+local prefix_any = C((S("+- .") + R("09"))^0)
+local prefix_tab = C((1-R("az","AZ","09","%%"))^0)
+
+-- we've split all cases as then we can optimize them (let's omit the fuzzy u)
+
+-- todo: replace outer formats in next by ..
+
+local format_s = function(f)
+ n = n + 1
+ if f and f ~= "" then
+ return format("format('%%%ss',a%s)",f,n)
+ else -- best no tostring in order to stay compatible (.. does a selective tostring too)
+ return format("(a%s or '')",n) -- goodie: nil check
+ end
+end
+
+local format_S = function(f) -- can be optimized
+ n = n + 1
+ if f and f ~= "" then
+ return format("format('%%%ss',tostring(a%s))",f,n)
+ else
+ return format("tostring(a%s)",n)
+ end
+end
+
+local format_q = function()
+ n = n + 1
+ return format("(a%s and format('%%q',a%s) or '')",n,n) -- goodie: nil check (maybe separate lpeg, not faster)
+end
+
+local format_Q = function() -- can be optimized
+ n = n + 1
+ return format("format('%%q',tostring(a%s))",n)
+end
+
+local format_i = function(f)
+ n = n + 1
+ if f and f ~= "" then
+ return format("format('%%%si',a%s)",f,n)
+ else
+ return format("a%s",n)
+ end
+end
+
+local format_d = format_i
+
+local format_I = function(f)
+ n = n + 1
+ return format("format('%%s%%%si',signed(a%s))",f,n)
+end
+
+local format_f = function(f)
+ n = n + 1
+ return format("format('%%%sf',a%s)",f,n)
+end
+
+local format_g = function(f)
+ n = n + 1
+ return format("format('%%%sg',a%s)",f,n)
+end
+
+local format_G = function(f)
+ n = n + 1
+ return format("format('%%%sG',a%s)",f,n)
+end
+
+local format_e = function(f)
+ n = n + 1
+ return format("format('%%%se',a%s)",f,n)
+end
+
+local format_E = function(f)
+ n = n + 1
+ return format("format('%%%sE',a%s)",f,n)
+end
+
+local format_x = function(f)
+ n = n + 1
+ return format("format('%%%sx',a%s)",f,n)
+end
+
+local format_X = function(f)
+ n = n + 1
+ return format("format('%%%sX',a%s)",f,n)
+end
+
+local format_o = function(f)
+ n = n + 1
+ return format("format('%%%so',a%s)",f,n)
+end
+
+local format_c = function()
+ n = n + 1
+ return format("utfchar(a%s)",n)
+end
+
+local format_C = function()
+ n = n + 1
+ return format("tracedchar(a%s)",n)
+end
+
+local format_r = function(f)
+ n = n + 1
+ return format("format('%%%s.0f',a%s)",f,n)
+end
+
+local format_h = function(f)
+ n = n + 1
+ if f == "-" then
+ f = sub(f,2)
+ return format("format('%%%sx',type(a%s) == 'number' and a%s or utfbyte(a%s))",f == "" and "05" or f,n,n,n)
+ else
+ return format("format('0x%%%sx',type(a%s) == 'number' and a%s or utfbyte(a%s))",f == "" and "05" or f,n,n,n)
+ end
+end
+
+local format_H = function(f)
+ n = n + 1
+ if f == "-" then
+ f = sub(f,2)
+ return format("format('%%%sX',type(a%s) == 'number' and a%s or utfbyte(a%s))",f == "" and "05" or f,n,n,n)
+ else
+ return format("format('0x%%%sX',type(a%s) == 'number' and a%s or utfbyte(a%s))",f == "" and "05" or f,n,n,n)
+ end
+end
+
+local format_u = function(f)
+ n = n + 1
+ if f == "-" then
+ f = sub(f,2)
+ return format("format('%%%sx',type(a%s) == 'number' and a%s or utfbyte(a%s))",f == "" and "05" or f,n,n,n)
+ else
+ return format("format('u+%%%sx',type(a%s) == 'number' and a%s or utfbyte(a%s))",f == "" and "05" or f,n,n,n)
+ end
+end
+
+local format_U = function(f)
+ n = n + 1
+ if f == "-" then
+ f = sub(f,2)
+ return format("format('%%%sX',type(a%s) == 'number' and a%s or utfbyte(a%s))",f == "" and "05" or f,n,n,n)
+ else
+ return format("format('U+%%%sX',type(a%s) == 'number' and a%s or utfbyte(a%s))",f == "" and "05" or f,n,n,n)
+ end
+end
+
+local format_p = function()
+ n = n + 1
+ return format("points(a%s)",n)
+end
+
+local format_b = function()
+ n = n + 1
+ return format("basepoints(a%s)",n)
+end
+
+local format_t = function(f)
+ n = n + 1
+ if f and f ~= "" then
+ return format("concat(a%s,%q)",n,f)
+ else
+ return format("concat(a%s)",n)
+ end
+end
+
+local format_T = function(f)
+ n = n + 1
+ if f and f ~= "" then
+ return format("sequenced(a%s,%q)",n,f)
+ else
+ return format("sequenced(a%s)",n)
+ end
+end
+
+local format_l = function()
+ n = n + 1
+ return format("(a%s and 'true' or 'false')",n)
+end
+
+local format_L = function()
+ n = n + 1
+ return format("(a%s and 'TRUE' or 'FALSE')",n)
+end
+
+local format_N = function() -- strips leading zeros
+ n = n + 1
+ return format("tostring(tonumber(a%s) or a%s)",n,n)
+end
+
+local format_a = function(f)
+ n = n + 1
+ if f and f ~= "" then
+ return format("autosingle(a%s,%q)",n,f)
+ else
+ return format("autosingle(a%s)",n)
+ end
+end
+
+local format_A = function(f)
+ n = n + 1
+ if f and f ~= "" then
+ return format("autodouble(a%s,%q)",n,f)
+ else
+ return format("autodouble(a%s)",n)
+ end
+end
+
+local format_w = function(f) -- handy when doing depth related indent
+ n = n + 1
+ f = tonumber(f)
+ if f then -- not that useful
+ return format("nspaces[%s+a%s]",f,n) -- no real need for tonumber
+ else
+ return format("nspaces[a%s]",n) -- no real need for tonumber
+ end
+end
+
+local format_W = function(f) -- handy when doing depth related indent
+ return format("nspaces[%s]",tonumber(f) or 0)
+end
+
+local format_rest = function(s)
+ return format("%q",s) -- catches " and \n and such
+end
+
+local format_extension = function(extensions,f,name)
+ local extension = extensions[name] or "tostring(%s)"
+ local f = tonumber(f) or 1
+ if f == 0 then
+ return extension
+ elseif f == 1 then
+ n = n + 1
+ local a = "a" .. n
+ return format(extension,a,a) -- maybe more times?
+ elseif f < 0 then
+ local a = "a" .. (n + f + 1)
+ return format(extension,a,a)
+ else
+ local t = { }
+ for i=1,f do
+ n = n + 1
+ t[#t+1] = "a" .. n
+ end
+ return format(extension,unpack(t))
+ end
+end
+
+local builder = Cs { "start",
+ start = (
+ (
+ P("%") / ""
+ * (
+ V("!") -- new
+ + V("s") + V("q")
+ + V("i") + V("d")
+ + V("f") + V("g") + V("G") + V("e") + V("E")
+ + V("x") + V("X") + V("o")
+ --
+ + V("c")
+ + V("C")
+ + V("S") -- new
+ + V("Q") -- new
+ + V("N") -- new
+ --
+ + V("r")
+ + V("h") + V("H") + V("u") + V("U")
+ + V("p") + V("b")
+ + V("t") + V("T")
+ + V("l") + V("L")
+ + V("I")
+ + V("h") -- new
+ + V("w") -- new
+ + V("W") -- new
+ + V("a") -- new
+ + V("A") -- new
+ --
+ + V("*") -- ignores probably messed up %
+ )
+ + V("*")
+ )
+ * (P(-1) + Carg(1))
+ )^0,
+ --
+ ["s"] = (prefix_any * P("s")) / format_s, -- %s => regular %s (string)
+ ["q"] = (prefix_any * P("q")) / format_q, -- %q => regular %q (quoted string)
+ ["i"] = (prefix_any * P("i")) / format_i, -- %i => regular %i (integer)
+ ["d"] = (prefix_any * P("d")) / format_d, -- %d => regular %d (integer)
+ ["f"] = (prefix_any * P("f")) / format_f, -- %f => regular %f (float)
+ ["g"] = (prefix_any * P("g")) / format_g, -- %g => regular %g (float)
+ ["G"] = (prefix_any * P("G")) / format_G, -- %G => regular %G (float)
+ ["e"] = (prefix_any * P("e")) / format_e, -- %e => regular %e (float)
+ ["E"] = (prefix_any * P("E")) / format_E, -- %E => regular %E (float)
+ ["x"] = (prefix_any * P("x")) / format_x, -- %x => regular %x (hexadecimal)
+ ["X"] = (prefix_any * P("X")) / format_X, -- %X => regular %X (HEXADECIMAL)
+ ["o"] = (prefix_any * P("o")) / format_o, -- %o => regular %o (octal)
+ --
+ ["S"] = (prefix_any * P("S")) / format_S, -- %S => %s (tostring)
+ ["Q"] = (prefix_any * P("Q")) / format_S, -- %Q => %q (tostring)
+ ["N"] = (prefix_any * P("N")) / format_N, -- %N => tonumber (strips leading zeros)
+ ["c"] = (prefix_any * P("c")) / format_c, -- %c => utf character (extension to regular)
+ ["C"] = (prefix_any * P("C")) / format_C, -- %c => U+.... utf character
+ --
+ ["r"] = (prefix_any * P("r")) / format_r, -- %r => round
+ ["h"] = (prefix_any * P("h")) / format_h, -- %h => 0x0a1b2 (when - no 0x) was v
+ ["H"] = (prefix_any * P("H")) / format_H, -- %H => 0x0A1B2 (when - no 0x) was V
+ ["u"] = (prefix_any * P("u")) / format_u, -- %u => u+0a1b2 (when - no u+)
+ ["U"] = (prefix_any * P("U")) / format_U, -- %U => U+0A1B2 (when - no U+)
+ ["p"] = (prefix_any * P("p")) / format_p, -- %p => 12.345pt / maybe: P (and more units)
+ ["b"] = (prefix_any * P("b")) / format_b, -- %b => 12.342bp / maybe: B (and more units)
+ ["t"] = (prefix_tab * P("t")) / format_t, -- %t => concat
+ ["T"] = (prefix_tab * P("T")) / format_T, -- %t => sequenced
+ ["l"] = (prefix_tab * P("l")) / format_l, -- %l => boolean
+ ["L"] = (prefix_tab * P("L")) / format_L, -- %L => BOOLEAN
+ ["I"] = (prefix_any * P("I")) / format_I, -- %I => signed integer
+ --
+ ["w"] = (prefix_any * P("w")) / format_w, -- %w => n spaces (optional prefix is added)
+ ["W"] = (prefix_any * P("W")) / format_W, -- %W => mandate prefix, no specifier
+ --
+ ["a"] = (prefix_any * P("a")) / format_a, -- %a => '...' (forces tostring)
+ ["A"] = (prefix_any * P("A")) / format_A, -- %A => "..." (forces tostring)
+ --
+ ["*"] = Cs(((1-P("%"))^1 + P("%%")/"%%%%")^1) / format_rest, -- rest (including %%)
+ --
+ ["!"] = Carg(2) * prefix_any * P("!") * C((1-P("!"))^1) * P("!") / format_extension,
+}
+
+-- we can be clever and only alias what is needed
+
+local direct = Cs (
+ P("%")/""
+ * Cc([[local format = string.format return function(str) return format("%]])
+ * (S("+- .") + R("09"))^0
+ * S("sqidfgGeExXo")
+ * Cc([[",str) end]])
+ * P(-1)
+ )
+
+local function make(t,str)
+ local f
+ local p
+ local p = lpegmatch(direct,str)
+ if p then
+ f = loadstripped(p)()
+ else
+ n = 0
+ p = lpegmatch(builder,str,1,"..",t._extensions_) -- after this we know n
+ if n > 0 then
+ p = format(template,preamble,t._preamble_,arguments[n],p)
+-- print("builder>",p)
+ f = loadstripped(p)()
+ else
+ f = function() return str end
+ end
+ end
+ t[str] = f
+ return f
+end
+
+-- -- collect periodically
+--
+-- local threshold = 1000 -- max nof cached formats
+--
+-- local function make(t,str)
+-- local f = rawget(t,str)
+-- if f then
+-- return f
+-- end
+-- local parent = t._t_
+-- if parent._n_ > threshold then
+-- local m = { _t_ = parent }
+-- getmetatable(parent).__index = m
+-- setmetatable(m, { __index = make })
+-- else
+-- parent._n_ = parent._n_ + 1
+-- end
+-- local f
+-- local p = lpegmatch(direct,str)
+-- if p then
+-- f = loadstripped(p)()
+-- else
+-- n = 0
+-- p = lpegmatch(builder,str,1,"..",parent._extensions_) -- after this we know n
+-- if n > 0 then
+-- p = format(template,preamble,parent._preamble_,arguments[n],p)
+-- -- print("builder>",p)
+-- f = loadstripped(p)()
+-- else
+-- f = function() return str end
+-- end
+-- end
+-- t[str] = f
+-- return f
+-- end
+
+local function use(t,fmt,...)
+ return t[fmt](...)
+end
+
+strings.formatters = { }
+
+-- we cannot make these tables weak, unless we start using an indirect
+-- table (metatable) in which case we could better keep a count and
+-- clear that table when a threshold is reached
+
+function strings.formatters.new()
+ local t = { _extensions_ = { }, _preamble_ = "", _type_ = "formatter" }
+ setmetatable(t, { __index = make, __call = use })
+ return t
+end
+
+-- function strings.formatters.new()
+-- local t = { _extensions_ = { }, _preamble_ = "", _type_ = "formatter", _n_ = 0 }
+-- local m = { _t_ = t }
+-- setmetatable(t, { __index = m, __call = use })
+-- setmetatable(m, { __index = make })
+-- return t
+-- end
+
+local formatters = strings.formatters.new() -- the default instance
+
+string.formatters = formatters -- in the main string namespace
+string.formatter = function(str,...) return formatters[str](...) end -- sometimes nicer name
+
+local function add(t,name,template,preamble)
+ if type(t) == "table" and t._type_ == "formatter" then
+ t._extensions_[name] = template or "%s"
+ if preamble then
+ t._preamble_ = preamble .. "\n" .. t._preamble_ -- so no overload !
+ end
+ end
+end
+
+strings.formatters.add = add
+
+-- registered in the default instance (should we fall back on this one?)
+
+lpeg.patterns.xmlescape = Cs((P("<")/"&lt;" + P(">")/"&gt;" + P("&")/"&amp;" + P('"')/"&quot;" + P(1))^0)
+lpeg.patterns.texescape = Cs((C(S("#$%\\{}"))/"\\%1" + P(1))^0)
+
+add(formatters,"xml",[[lpegmatch(xmlescape,%s)]],[[local xmlescape = lpeg.patterns.xmlescape]])
+add(formatters,"tex",[[lpegmatch(texescape,%s)]],[[local texescape = lpeg.patterns.texescape]])
+
+-- -- yes or no:
+--
+-- local function make(t,str)
+-- local f
+-- local p = lpegmatch(direct,str)
+-- if p then
+-- f = loadstripped(p)()
+-- else
+-- n = 0
+-- p = lpegmatch(builder,str,1,",") -- after this we know n
+-- if n > 0 then
+-- p = format(template,template_shortcuts,arguments[n],p)
+-- f = loadstripped(p)()
+-- else
+-- f = function() return str end
+-- end
+-- end
+-- t[str] = f
+-- return f
+-- end
+--
+-- local formatteds = string.formatteds or { }
+-- string.formatteds = formatteds
+--
+-- setmetatable(formatteds, { __index = make, __call = use })
diff --git a/Master/texmf-dist/tex/context/base/util-tab.lua b/Master/texmf-dist/tex/context/base/util-tab.lua
index 28a6b8cc531..ecf36b13726 100644
--- a/Master/texmf-dist/tex/context/base/util-tab.lua
+++ b/Master/texmf-dist/tex/context/base/util-tab.lua
@@ -10,23 +10,53 @@ utilities = utilities or {}
utilities.tables = utilities.tables or { }
local tables = utilities.tables
-local format, gmatch, rep = string.format, string.gmatch, string.rep
+local format, gmatch, gsub = string.format, string.gmatch, string.gsub
local concat, insert, remove = table.concat, table.insert, table.remove
local setmetatable, getmetatable, tonumber, tostring = setmetatable, getmetatable, tonumber, tostring
-local type, next, rawset, tonumber = type, next, rawset, tonumber
+local type, next, rawset, tonumber, tostring, load, select = type, next, rawset, tonumber, tostring, load, select
+local lpegmatch, P, Cs, Cc = lpeg.match, lpeg.P, lpeg.Cs, lpeg.Cc
+local serialize, sortedkeys, sortedpairs = table.serialize, table.sortedkeys, table.sortedpairs
+local formatters = string.formatters
-function tables.definetable(target) -- defines undefined tables
- local composed, t, n = nil, { }, 0
- for name in gmatch(target,"([^%.]+)") do
- n = n + 1
+local splitter = lpeg.tsplitat(".")
+
+function tables.definetable(target,nofirst,nolast) -- defines undefined tables
+ local composed, shortcut, t = nil, nil, { }
+ local snippets = lpegmatch(splitter,target)
+ for i=1,#snippets - (nolast and 1 or 0) do
+ local name = snippets[i]
if composed then
- composed = composed .. "." .. name
+ composed = shortcut .. "." .. name
+ shortcut = shortcut .. "_" .. name
+ t[#t+1] = formatters["local %s = %s if not %s then %s = { } %s = %s end"](shortcut,composed,shortcut,shortcut,composed,shortcut)
else
composed = name
+ shortcut = name
+ if not nofirst then
+ t[#t+1] = formatters["%s = %s or { }"](composed,composed)
+ end
+ end
+ end
+ if nolast then
+ composed = shortcut .. "." .. snippets[#snippets]
+ end
+ return concat(t,"\n"), composed
+end
+
+-- local t = tables.definedtable("a","b","c","d")
+
+function tables.definedtable(...)
+ local t = _G
+ for i=1,select("#",...) do
+ local li = select(i,...)
+ local tl = t[li]
+ if not tl then
+ tl = { }
+ t[li] = tl
end
- t[n] = format("%s = %s or { }",composed,composed)
+ t = tl
end
- return concat(t,"\n")
+ return t
end
function tables.accesstable(target,root)
@@ -97,35 +127,131 @@ end
-- experimental
+local escape = Cs(Cc('"') * ((P('"')/'""' + P(1))^0) * Cc('"'))
+
+function table.tocsv(t,specification)
+ if t and #t > 0 then
+ local result = { }
+ local r = { }
+ specification = specification or { }
+ local fields = specification.fields
+ if type(fields) ~= "string" then
+ fields = sortedkeys(t[1])
+ end
+ local separator = specification.separator or ","
+ if specification.preamble == true then
+ for f=1,#fields do
+ r[f] = lpegmatch(escape,tostring(fields[f]))
+ end
+ result[1] = concat(r,separator)
+ end
+ for i=1,#t do
+ local ti = t[i]
+ for f=1,#fields do
+ local field = ti[fields[f]]
+ if type(field) == "string" then
+ r[f] = lpegmatch(escape,field)
+ else
+ r[f] = tostring(field)
+ end
+ end
+ result[#result+1] = concat(r,separator)
+ end
+ return concat(result,"\n")
+ else
+ return ""
+ end
+end
+
+-- local nspaces = utilities.strings.newrepeater(" ")
+-- local escape = Cs((P("<")/"&lt;" + P(">")/"&gt;" + P("&")/"&amp;" + P(1))^0)
+--
+-- local function toxml(t,d,result,step)
+-- for k, v in sortedpairs(t) do
+-- local s = nspaces[d]
+-- local tk = type(k)
+-- local tv = type(v)
+-- if tv == "table" then
+-- if tk == "number" then
+-- result[#result+1] = format("%s<entry n='%s'>",s,k)
+-- toxml(v,d+step,result,step)
+-- result[#result+1] = format("%s</entry>",s,k)
+-- else
+-- result[#result+1] = format("%s<%s>",s,k)
+-- toxml(v,d+step,result,step)
+-- result[#result+1] = format("%s</%s>",s,k)
+-- end
+-- elseif tv == "string" then
+-- if tk == "number" then
+-- result[#result+1] = format("%s<entry n='%s'>%s</entry>",s,k,lpegmatch(escape,v),k)
+-- else
+-- result[#result+1] = format("%s<%s>%s</%s>",s,k,lpegmatch(escape,v),k)
+-- end
+-- elseif tk == "number" then
+-- result[#result+1] = format("%s<entry n='%s'>%s</entry>",s,k,tostring(v),k)
+-- else
+-- result[#result+1] = format("%s<%s>%s</%s>",s,k,tostring(v),k)
+-- end
+-- end
+-- end
+--
+-- much faster
+
+local nspaces = utilities.strings.newrepeater(" ")
+
local function toxml(t,d,result,step)
- for k, v in table.sortedpairs(t) do
- if type(v) == "table" then
- if type(k) == "number" then
- result[#result+1] = format("%s<entry n='%s'>",d,k)
- toxml(v,d..step,result,step)
- result[#result+1] = format("%s</entry>",d,k)
+ for k, v in sortedpairs(t) do
+ local s = nspaces[d] -- inlining this is somewhat faster but gives more formatters
+ local tk = type(k)
+ local tv = type(v)
+ if tv == "table" then
+ if tk == "number" then
+ result[#result+1] = formatters["%s<entry n='%s'>"](s,k)
+ toxml(v,d+step,result,step)
+ result[#result+1] = formatters["%s</entry>"](s,k)
else
- result[#result+1] = format("%s<%s>",d,k)
- toxml(v,d..step,result,step)
- result[#result+1] = format("%s</%s>",d,k)
+ result[#result+1] = formatters["%s<%s>"](s,k)
+ toxml(v,d+step,result,step)
+ result[#result+1] = formatters["%s</%s>"](s,k)
end
- elseif type(k) == "number" then
- result[#result+1] = format("%s<entry n='%s'>%s</entry>",d,k,v,k)
+ elseif tv == "string" then
+ if tk == "number" then
+ result[#result+1] = formatters["%s<entry n='%s'>%!xml!</entry>"](s,k,v,k)
+ else
+ result[#result+1] = formatters["%s<%s>%!xml!</%s>"](s,k,v,k)
+ end
+ elseif tk == "number" then
+ result[#result+1] = formatters["%s<entry n='%s'>%S</entry>"](s,k,v,k)
else
- result[#result+1] = format("%s<%s>%s</%s>",d,k,tostring(v),k)
+ result[#result+1] = formatters["%s<%s>%S</%s>"](s,k,v,k)
end
end
end
-function table.toxml(t,name,nobanner,indent,spaces)
+-- function table.toxml(t,name,nobanner,indent,spaces)
+-- local noroot = name == false
+-- local result = (nobanner or noroot) and { } or { "<?xml version='1.0' standalone='yes' ?>" }
+-- local indent = rep(" ",indent or 0)
+-- local spaces = rep(" ",spaces or 1)
+-- if noroot then
+-- toxml( t, inndent, result, spaces)
+-- else
+-- toxml( { [name or "root"] = t }, indent, result, spaces)
+-- end
+-- return concat(result,"\n")
+-- end
+
+function table.toxml(t,specification)
+ specification = specification or { }
+ local name = specification.name
local noroot = name == false
- local result = (nobanner or noroot) and { } or { "<?xml version='1.0' standalone='yes' ?>" }
- local indent = rep(" ",indent or 0)
- local spaces = rep(" ",spaces or 1)
+ local result = (specification.nobanner or noroot) and { } or { "<?xml version='1.0' standalone='yes' ?>" }
+ local indent = specification.indent or 0
+ local spaces = specification.spaces or 1
if noroot then
- toxml( t, inndent, result, spaces)
+ toxml( t, indent, result, spaces)
else
- toxml( { [name or "root"] = t }, indent, result, spaces)
+ toxml( { [name or "data"] = t }, indent, result, spaces)
end
return concat(result,"\n")
end
@@ -143,7 +269,7 @@ function tables.encapsulate(core,capsule,protect)
end
for key, value in next, core do
if capsule[key] then
- print(format("\ninvalid inheritance '%s' in '%s': %s",key,tostring(core)))
+ print(formatters["\ninvalid %s %a in %a"]("inheritance",key,core))
os.exit()
else
capsule[key] = value
@@ -157,7 +283,7 @@ function tables.encapsulate(core,capsule,protect)
__index = capsule,
__newindex = function(t,key,value)
if capsule[key] then
- print(format("\ninvalid overload '%s' in '%s'",key,tostring(core)))
+ print(formatters["\ninvalid %s %a' in %a"]("overload",key,core))
os.exit()
else
rawset(t,key,value)
@@ -166,3 +292,202 @@ function tables.encapsulate(core,capsule,protect)
} )
end
end
+
+local function fastserialize(t,r,outer) -- no mixes
+ r[#r+1] = "{"
+ local n = #t
+ if n > 0 then
+ for i=1,n do
+ local v = t[i]
+ local tv = type(v)
+ if tv == "string" then
+ r[#r+1] = formatters["%q,"](v)
+ elseif tv == "number" then
+ r[#r+1] = formatters["%s,"](v)
+ elseif tv == "table" then
+ fastserialize(v,r)
+ elseif tv == "boolean" then
+ r[#r+1] = formatters["%S,"](v)
+ end
+ end
+ else
+ for k, v in next, t do
+ local tv = type(v)
+ if tv == "string" then
+ r[#r+1] = formatters["[%q]=%q,"](k,v)
+ elseif tv == "number" then
+ r[#r+1] = formatters["[%q]=%s,"](k,v)
+ elseif tv == "table" then
+ r[#r+1] = formatters["[%q]="](k)
+ fastserialize(v,r)
+ elseif tv == "boolean" then
+ r[#r+1] = formatters["[%q]=%S,"](k,v)
+ end
+ end
+ end
+ if outer then
+ r[#r+1] = "}"
+ else
+ r[#r+1] = "},"
+ end
+ return r
+end
+
+-- local f_hashed_string = formatters["[%q]=%q,"]
+-- local f_hashed_number = formatters["[%q]=%s,"]
+-- local f_hashed_table = formatters["[%q]="]
+-- local f_hashed_true = formatters["[%q]=true,"]
+-- local f_hashed_false = formatters["[%q]=false,"]
+--
+-- local f_indexed_string = formatters["%q,"]
+-- local f_indexed_number = formatters["%s,"]
+-- ----- f_indexed_true = formatters["true,"]
+-- ----- f_indexed_false = formatters["false,"]
+--
+-- local function fastserialize(t,r,outer) -- no mixes
+-- r[#r+1] = "{"
+-- local n = #t
+-- if n > 0 then
+-- for i=1,n do
+-- local v = t[i]
+-- local tv = type(v)
+-- if tv == "string" then
+-- r[#r+1] = f_indexed_string(v)
+-- elseif tv == "number" then
+-- r[#r+1] = f_indexed_number(v)
+-- elseif tv == "table" then
+-- fastserialize(v,r)
+-- elseif tv == "boolean" then
+-- -- r[#r+1] = v and f_indexed_true(k) or f_indexed_false(k)
+-- r[#r+1] = v and "true," or "false,"
+-- end
+-- end
+-- else
+-- for k, v in next, t do
+-- local tv = type(v)
+-- if tv == "string" then
+-- r[#r+1] = f_hashed_string(k,v)
+-- elseif tv == "number" then
+-- r[#r+1] = f_hashed_number(k,v)
+-- elseif tv == "table" then
+-- r[#r+1] = f_hashed_table(k)
+-- fastserialize(v,r)
+-- elseif tv == "boolean" then
+-- r[#r+1] = v and f_hashed_true(k) or f_hashed_false(k)
+-- end
+-- end
+-- end
+-- if outer then
+-- r[#r+1] = "}"
+-- else
+-- r[#r+1] = "},"
+-- end
+-- return r
+-- end
+
+function table.fastserialize(t,prefix) -- so prefix should contain the =
+ return concat(fastserialize(t,{ prefix or "return" },true))
+end
+
+function table.deserialize(str)
+ if not str or str == "" then
+ return
+ end
+ local code = load(str)
+ if not code then
+ return
+ end
+ code = code()
+ if not code then
+ return
+ end
+ return code
+end
+
+-- inspect(table.fastserialize { a = 1, b = { 4, { 5, 6 } }, c = { d = 7, e = 'f"g\nh' } })
+
+function table.load(filename)
+ if filename then
+ local t = io.loaddata(filename)
+ if t and t ~= "" then
+ t = load(t)
+ if type(t) == "function" then
+ t = t()
+ if type(t) == "table" then
+ return t
+ end
+ end
+ end
+ end
+end
+
+function table.save(filename,t,n,...)
+ io.savedata(filename,serialize(t,n == nil and true or n,...))
+end
+
+local function slowdrop(t)
+ local r = { }
+ local l = { }
+ for i=1,#t do
+ local ti = t[i]
+ local j = 0
+ for k, v in next, ti do
+ j = j + 1
+ l[j] = formatters["%s=%q"](k,v)
+ end
+ r[i] = formatters[" {%t},\n"](l)
+ end
+ return formatters["return {\n%st}"](r)
+end
+
+local function fastdrop(t)
+ local r = { "return {\n" }
+ for i=1,#t do
+ local ti = t[i]
+ r[#r+1] = " {"
+ for k, v in next, ti do
+ r[#r+1] = formatters["%s=%q"](k,v)
+ end
+ r[#r+1] = "},\n"
+ end
+ r[#r+1] = "}"
+ return concat(r)
+end
+
+function table.drop(t,slow) -- only { { a=2 }, {a=3} }
+ if #t == 0 then
+ return "return { }"
+ elseif slow == true then
+ return slowdrop(t) -- less memory
+ else
+ return fastdrop(t) -- some 15% faster
+ end
+end
+
+function table.autokey(t,k)
+ local v = { }
+ t[k] = v
+ return v
+end
+
+local selfmapper = { __index = function(t,k) t[k] = k return k end }
+
+function table.twowaymapper(t)
+ if not t then
+ t = { }
+ else
+ for i=0,#t do
+ local ti = t[i] -- t[1] = "one"
+ if ti then
+ local i = tostring(i)
+ t[i] = ti -- t["1"] = "one"
+ t[ti] = i -- t["one"] = "1"
+ end
+ end
+ t[""] = t[0] or ""
+ end
+ -- setmetatableindex(t,"key")
+ setmetatable(t,selfmapper)
+ return t
+end
+
diff --git a/Master/texmf-dist/tex/context/base/util-tpl.lua b/Master/texmf-dist/tex/context/base/util-tpl.lua
new file mode 100644
index 00000000000..7a6abefd692
--- /dev/null
+++ b/Master/texmf-dist/tex/context/base/util-tpl.lua
@@ -0,0 +1,174 @@
+if not modules then modules = { } end modules ['util-tpl'] = {
+ version = 1.001,
+ comment = "companion to luat-lib.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+-- This is experimental code. Coming from dos and windows, I've always used %whatever%
+-- as template variables so let's stick to it. After all, it's easy to parse and stands
+-- out well. A double %% is turned into a regular %.
+
+utilities.templates = utilities.templates or { }
+local templates = utilities.templates
+
+local trace_template = false trackers.register("templates.trace",function(v) trace_template = v end)
+local report_template = logs.reporter("template")
+
+local tostring = tostring
+local format, sub = string.format, string.sub
+local P, C, Cs, Carg, lpegmatch = lpeg.P, lpeg.C, lpeg.Cs, lpeg.Carg, lpeg.match
+
+-- todo: make installable template.new
+
+local replacer
+
+local function replacekey(k,t,how,recursive)
+ local v = t[k]
+ if not v then
+ if trace_template then
+ report_template("unknown key %a",k)
+ end
+ return ""
+ else
+ v = tostring(v)
+ if trace_template then
+ report_template("setting key %a to value %a",k,v)
+ end
+ if recursive then
+ return lpegmatch(replacer,v,1,t,how,recursive)
+ else
+ return v
+ end
+ end
+end
+
+local sqlescape = lpeg.replacer {
+ { "'", "''" },
+ { "\\", "\\\\" },
+ { "\r\n", "\\n" },
+ { "\r", "\\n" },
+ -- { "\t", "\\t" },
+}
+
+local sqlquotedescape = lpeg.Cs(lpeg.Cc("'") * sqlescape * lpeg.Cc("'"))
+
+-- escapeset : \0\1\2\3\4\5\6\7\8\9\10\11\12\13\14\15\16\17\18\19\20\21\22\23\24\25\26\27\28\29\30\31\"\\\127
+-- test string: [[1\0\31test23"\\]] .. string.char(19) .. "23"
+--
+-- slow:
+--
+-- local luaescape = lpeg.replacer {
+-- { '"', [[\"]] },
+-- { '\\', [[\\]] },
+-- { R("\0\9") * #R("09"), function(s) return "\\00" .. byte(s) end },
+-- { R("\10\31") * #R("09"), function(s) return "\\0" .. byte(s) end },
+-- { R("\0\31") , function(s) return "\\" .. byte(s) end },
+-- }
+--
+-- slightly faster:
+--
+-- local luaescape = Cs ((
+-- P('"' ) / [[\"]] +
+-- P('\\') / [[\\]] +
+-- Cc("\\00") * (R("\0\9") / byte) * #R("09") +
+-- Cc("\\0") * (R("\10\31") / byte) * #R("09") +
+-- Cc("\\") * (R("\0\31") / byte) +
+-- P(1)
+-- )^0)
+
+local escapers = {
+ lua = function(s)
+ return sub(format("%q",s),2,-2)
+ end,
+ sql = function(s)
+ return lpegmatch(sqlescape,s)
+ end,
+}
+
+local quotedescapers = {
+ lua = function(s)
+ return format("%q",s)
+ end,
+ sql = function(s)
+ return lpegmatch(sqlquotedescape,s)
+ end,
+}
+
+lpeg.patterns.sqlescape = sqlescape
+lpeg.patterns.sqlescape = sqlquotedescape
+
+local luaescaper = escapers.lua
+local quotedluaescaper = quotedescapers.lua
+
+local function replacekeyunquoted(s,t,how,recurse) -- ".. \" "
+ local escaper = how and escapers[how] or luaescaper
+ return escaper(replacekey(s,t,how,recurse))
+end
+
+local function replacekeyquoted(s,t,how,recurse) -- ".. \" "
+ local escaper = how and quotedescapers[how] or quotedluaescaper
+ return escaper(replacekey(s,t,how,recurse))
+end
+
+local single = P("%") -- test %test% test : resolves test
+local double = P("%%") -- test 10%% test : %% becomes %
+local lquoted = P("%[") -- test '%[test]%' test : resolves to test with escaped "'s
+local rquoted = P("]%") --
+local lquotedq = P("%(") -- test %(test)% test : resolves to 'test' with escaped "'s
+local rquotedq = P(")%") --
+
+local escape = double / '%%'
+local nosingle = single / ''
+local nodouble = double / ''
+local nolquoted = lquoted / ''
+local norquoted = rquoted / ''
+local nolquotedq = lquotedq / ''
+local norquotedq = rquotedq / ''
+
+local key = nosingle * ((C((1-nosingle )^1) * Carg(1) * Carg(2) * Carg(3)) / replacekey ) * nosingle
+local quoted = nolquotedq * ((C((1-norquotedq)^1) * Carg(1) * Carg(2) * Carg(3)) / replacekeyquoted ) * norquotedq
+local unquoted = nolquoted * ((C((1-norquoted )^1) * Carg(1) * Carg(2) * Carg(3)) / replacekeyunquoted) * norquoted
+local any = P(1)
+
+ replacer = Cs((unquoted + quoted + escape + key + any)^0)
+
+local function replace(str,mapping,how,recurse)
+ if mapping and str then
+ return lpegmatch(replacer,str,1,mapping,how or "lua",recurse or false) or str
+ else
+ return str
+ end
+end
+
+-- print(replace("test '%[x]%' test",{ x = [[a 'x'  a]] }))
+-- print(replace("test '%[x]%' test",{ x = true }))
+-- print(replace("test '%[x]%' test",{ x = [[a 'x'  a]], y = "oeps" },'sql'))
+-- print(replace("test '%[x]%' test",{ x = [[a '%y%'  a]], y = "oeps" },'sql',true))
+-- print(replace([[test %[x]% test]],{ x = [[a "x"  a]]}))
+-- print(replace([[test %(x)% test]],{ x = [[a "x"  a]]}))
+
+templates.replace = replace
+
+function templates.load(filename,mapping,how,recurse)
+ local data = io.loaddata(filename) or ""
+ if mapping and next(mapping) then
+ return replace(data,mapping,how,recurse)
+ else
+ return data
+ end
+end
+
+function templates.resolve(t,mapping,how,recurse)
+ if not mapping then
+ mapping = t
+ end
+ for k, v in next, t do
+ t[k] = replace(v,mapping,how,recurse)
+ end
+ return t
+end
+
+-- inspect(utilities.templates.replace("test %one% test", { one = "%two%", two = "two" }))
+-- inspect(utilities.templates.resolve({ one = "%two%", two = "two", three = "%three%" }))
diff --git a/Master/texmf-dist/tex/context/base/x-asciimath.mkiv b/Master/texmf-dist/tex/context/base/x-asciimath.mkiv
index 23caf13aa3c..b555115ffc3 100644
--- a/Master/texmf-dist/tex/context/base/x-asciimath.mkiv
+++ b/Master/texmf-dist/tex/context/base/x-asciimath.mkiv
@@ -15,7 +15,7 @@
\registerctxluafile{x-asciimath}{}
-\def\ctxmoduleasciimath#1{\directlua\zerocount{moduledata.asciimath.#1}}
+\def\ctxmoduleasciimath#1{\ctxlua{moduledata.asciimath.#1}}
%D The following code is not officially supported and is only meant
%D for the Math4All project.
@@ -68,7 +68,7 @@
\protect
-\doifnotmode{demo}{\endinput}
+\continueifinputfile{x-asciimath.mkiv}
\enabletrackers[modules.asciimath.mapping]
diff --git a/Master/texmf-dist/tex/context/base/x-calcmath.lua b/Master/texmf-dist/tex/context/base/x-calcmath.lua
index 707abe82a22..1394f34504c 100644
--- a/Master/texmf-dist/tex/context/base/x-calcmath.lua
+++ b/Master/texmf-dist/tex/context/base/x-calcmath.lua
@@ -6,6 +6,8 @@ if not modules then modules = { } end modules ['x-calcmath'] = {
license = "see context related readme files"
}
+-- this really needs to be redone
+
local format, lower, upper, gsub, sub = string.format, string.lower, string.upper, string.gsub, string.sub
local concat = table.concat
local lpegmatch = lpeg.match
@@ -156,8 +158,8 @@ local function totex(str,mode)
end
-- parenthesis (optional)
if mode == 2 then
- str = gsub(str,"%(", "\\left\(")
- str = gsub(str,"%)", "\\right\)")
+ str = gsub(str,"%(", "\\left(")
+ str = gsub(str,"%)", "\\right)")
end
-- csnames
str = gsub(str,"(\\[A-Z]+)", lower)
@@ -223,11 +225,8 @@ if false then
),
}
-
local parser = space * grammar * -1
- local texprint = function(...) texio.write(concat{ ... }) end
-
local function has_factor(t)
for i=1,#t do
if t[i] == "factor" then
@@ -236,112 +235,114 @@ if false then
end
end
+ -- can be sped up if needed ...
+
function totex(t)
if t then
local one = t[1]
if type(one) == "string" then
local two, three = t[2], t[3]
if one == "number" then
- texprint(two)
+ context(two)
elseif one == "real" then
- texprint(two)
+ context(two)
elseif one == "float" then
- texprint("\\scinot{",two,"}{",three,"}")
+ context("\\scinot{",two,"}{",three,"}")
elseif one == "identifier" then
- texprint(two)
+ context(two)
elseif one == "constant" then
- texprint("\\"..two)
+ context("\\"..two)
elseif one == "function" then
if two == "sqrt" then
- texprint("\\sqrt{")
+ context("\\sqrt{")
totex(three)
- texprint("}")
+ context("}")
elseif two == "exp" then
- texprint(" e^{")
+ context(" e^{")
totex(three)
- texprint("}")
+ context("}")
elseif two == "abs" then
- texprint("\\left|")
+ context("\\left|")
totex(three)
- texprint("\\right|")
+ context("\\right|")
elseif two == "mean" then
- texprint("\\overline{")
+ context("\\overline{")
totex(three)
- texprint("}")
+ context("}")
elseif two == "int" or two == "prod" or two == "sum" then
local four, five = t[4], t[5]
if five then
- texprint("\\"..two.."^{")
+ context("\\"..two.."^{") -- context[two]("{")
totex(three)
- texprint("}_{")
+ context("}_{")
totex(four)
- texprint("}")
+ context("}")
totex(five)
elseif four then
- texprint("\\"..two.."^{")
+ context("\\"..two.."^{")
totex(three)
- texprint("}")
+ context("}")
totex(four)
elseif three then
- texprint("\\"..two.." ") -- " " not needed
+ context("\\"..two.." ") -- " " not needed
totex(three)
else
- texprint("\\"..two)
+ context("\\"..two)
end
else
- texprint("\\"..two.."(")
+ context("\\"..two.."(")
totex(three)
- texprint(")")
+ context(")")
end
end
else
local nt = #t
local hasfactor = has_factor(t)
if hasfactor then
- texprint("\\left(")
+ context("\\left(")
end
totex(one)
for i=2,nt,3 do
local what, how, rest = t[i], t[i+1], t[i+2]
if what == "factor" then
if how == '^' or how == "_" then
- texprint(how)
- texprint("{")
+ context(how)
+ context("{")
totex(rest)
- texprint("}")
+ context("}")
else
- texprint(how)
+ context(how)
totex(rest)
end
elseif what == "term" then
if how == '/' then
- texprint("\\frac{")
+ context("\\frac{")
totex(rest)
- texprint("}{")
+ context("}{")
totex(t[i+3] or "")
- texprint("}")
+ context("}")
elseif how == '*' then
- texprint("\\times")
+ context("\\times")
totex(rest)
else
- texprint(how)
+ context(how)
totex(three)
end
elseif what == "compare" then
if two == ">=" then
- texprint("\\ge")
+ context("\\ge")
elseif two == "<=" then
- texprint("\\le")
+ context("\\le")
elseif two == "&gt;" then
- texprint(">")
+ context(">")
elseif two == "&lt;" then
- texprint("<")
+ context("<")
end
totex(three)
end
end
if hasfactor then
- texprint("\\right)")
+ context("\\right)")
end
end
end
diff --git a/Master/texmf-dist/tex/context/base/x-calcmath.mkiv b/Master/texmf-dist/tex/context/base/x-calcmath.mkiv
index ce4f95ad233..dda88bb3ee5 100644
--- a/Master/texmf-dist/tex/context/base/x-calcmath.mkiv
+++ b/Master/texmf-dist/tex/context/base/x-calcmath.mkiv
@@ -15,7 +15,7 @@
\registerctxluafile{x-calcmath}{}
-\def\ctxmodulecalcmath#1{\directlua\zerocount{moduledata.calcmath.#1}}
+\def\ctxmodulecalcmath#1{\ctxlua{moduledata.calcmath.#1}}
%D Interface:
diff --git a/Master/texmf-dist/tex/context/base/x-chemml.lua b/Master/texmf-dist/tex/context/base/x-chemml.lua
index 387935c8ba5..79c1d9525ce 100644
--- a/Master/texmf-dist/tex/context/base/x-chemml.lua
+++ b/Master/texmf-dist/tex/context/base/x-chemml.lua
@@ -8,7 +8,7 @@ if not modules then modules = { } end modules ['x-chemml'] = {
-- not yet acceptable cld
-local format, lower, upper, gsub, sub = string.format, string.lower, string.upper, string.gsub, string.sub
+local format, lower, upper, gsub, sub, match = string.format, string.lower, string.upper, string.gsub, string.sub, string.match
local concat = table.concat
local chemml = { }
@@ -17,7 +17,7 @@ moduledata.chemml = chemml
function chemml.pi(id)
local str = xml.content(lxml.id(id))
- local _, class, key, value = str:match("^(%S+)%s+(%S+)%s+(%S+)%s+(%S+)%s*$")
+ local _, class, key, value = match(str,"^(%S+)%s+(%S+)%s+(%S+)%s+(%S+)%s*$")
if key and value then
context("\\setupCMLappearance[%s][%s=%s]",class, key, value)
end
@@ -28,7 +28,7 @@ function chemml.do_graphic(id)
for r, d, k in xml.elements(lxml.id(id),"cml:graphic") do
t[#t+1] = xml.tostring(d[k].dt)
end
- concat(concat(t,","))
+ context(concat(t,","))
end
function chemml.no_graphic(id)
diff --git a/Master/texmf-dist/tex/context/base/x-chemml.mkiv b/Master/texmf-dist/tex/context/base/x-chemml.mkiv
index 34aba3011eb..bb9065921c7 100644
--- a/Master/texmf-dist/tex/context/base/x-chemml.mkiv
+++ b/Master/texmf-dist/tex/context/base/x-chemml.mkiv
@@ -11,34 +11,34 @@
%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
%C details.
+% This needs an update!
+
\writestatus{loading}{ConTeXt XML Macros / Chemistry}
\registerctxluafile{x-chemml}{}
-\def\ctxmodulechemml#1{\directlua\zerocount{moduledata.chemml.#1}}
-
-\usemodule[pictex,chemic] % someday we will do structural fomulas in mp
+\def\ctxmodulechemml#1{\ctxlua{moduledata.chemml.#1}}
%D The following code assumes a load||flush approach to \XML.
\unprotect
\startxmlsetups xml:cml:process
- \xmlstrip {\xmldocument} {cml:chem|cml:ichem|cml:dchem|cml:reaction|cml:molecule|cml:ion|cml:structure}
+ \xmlstrip {#1} {cml:chem|cml:ichem|cml:dchem|cml:reaction|cml:molecule|cml:ion|cml:structure}
- \xmlgrab {\xmldocument} {cml:*} {*}
- \xmlgrab {\xmldocument} {cml:gives|cml:equilibrium|cml:mesomeric} {cml:arrow}
- \xmlgrab {\xmldocument} {cml:plus|cml:minus|cml:equal} {cml:operator}
- \xmlgrab {\xmldocument} {cml:bond|cml:singlebond|cml:doublebound|cml:triplebond} {cml:bond}
+ \xmlgrab {#1} {cml:*} {cml:*}
+ \xmlgrab {#1} {cml:gives|cml:equilibrium|cml:mesomeric} {cml:arrow}
+ \xmlgrab {#1} {cml:plus|cml:minus|cml:equal} {cml:operator}
+ \xmlgrab {#1} {cml:bond|cml:singlebond|cml:doublebound|cml:triplebond} {cml:bond}
- \xmlgrab {\xmldocument} {pi::chemml} {cml:pi}
+ \xmlgrab {#1} {pi::chemml} {cml:pi}
\stopxmlsetups
\xmlregistersetup{xml:cml:process}
\xmlregisterns{cml}{chemml}
-\unexpanded\def\setupCMLappearance[#1]{\dodoubleargument\getparameters[@@CML#1]}
+\unexpanded\def\setupCMLappearance[#1]{\dodoubleargument\getparameters[@@CML#1]} % old stuff
\setupCMLappearance [ion] [\c!alternative=\v!a]
@@ -67,7 +67,7 @@
\xmlflush{#1}
\stopxmlsetups
-\def\doCMLtext#1#2#3% main top bot
+\unexpanded\def\doCMLtext#1#2#3% main top bot
{\setbox0\hbox{\doifsomething{#2}{\txx\setstrut\strut\ignorespaces#2\unskip}}%
\setbox2\hbox{\ignorespaces\strut#1\unskip}%
\setbox4\hbox{\doifsomething{#3}{\txx\setstrut\strut\ignorespaces#3\unskip}}%
@@ -163,7 +163,7 @@
\stopxmlsetups
\def\doCMLbond
- {\hrule\!!width\hsize\!!height.1ex\relax}
+ {\hrule\s!width\hsize\s!height.1ex\relax}
\def\dodoCMLbond#1#2#3%
{\begingroup
diff --git a/Master/texmf-dist/tex/context/base/x-dir-05.mkiv b/Master/texmf-dist/tex/context/base/x-dir-05.mkiv
index de1d3fa5f26..379b3220a0b 100644
--- a/Master/texmf-dist/tex/context/base/x-dir-05.mkiv
+++ b/Master/texmf-dist/tex/context/base/x-dir-05.mkiv
@@ -52,13 +52,13 @@
\stopluacode
\def\getfilestatevariable#1#2%
- {\ctxlua{commands.getfilestatevariable("#1","#2")}}
+ {\ctxcommand{getfilestatevariable("#1","#2")}}
\def\savefilestate
{\dodoubleargument\dosavefilestate}
\def\dosavefilestate[#1][#2]%
- {\ctxlua{commands.savefilestate("#1","#2")}%
+ {\ctxcommand{savefilestate("#1","#2")}%
\setxvariables
[#1]
[name={#2},
diff --git a/Master/texmf-dist/tex/context/base/x-ldx.ctx b/Master/texmf-dist/tex/context/base/x-ldx.ctx
index 0dddc9734fc..edbffc2854c 100644
--- a/Master/texmf-dist/tex/context/base/x-ldx.ctx
+++ b/Master/texmf-dist/tex/context/base/x-ldx.ctx
@@ -4,7 +4,7 @@
<ctx:message>Lua Documentation Generator</ctx:message>
<ctx:preprocess>
<ctx:processors>
- <ctx:processor name='ldx'>mtxrun --internal x-ldx.lua <ctx:value name='old'/> <ctx:value name='new'/></ctx:processor>
+ <ctx:processor name='ldx'>mtxrun --script x-ldx.lua <ctx:value name='old'/> <ctx:value name='new'/></ctx:processor>
</ctx:processors>
<ctx:files>
<ctx:file processor='ldx'><ctx:value name='old'/></ctx:file>
@@ -16,7 +16,7 @@
</ctx:flags>
<ctx:process>
<ctx:resources>
- <ctx:module>ldx</ctx:module>
+ <ctx:environment>x-ldx.mkiv</ctx:environment>
</ctx:resources>
</ctx:process>
</ctx:job>
diff --git a/Master/texmf-dist/tex/context/base/x-ldx.lua b/Master/texmf-dist/tex/context/base/x-ldx.lua
index 2a04d6126a9..31cbebf1392 100644
--- a/Master/texmf-dist/tex/context/base/x-ldx.lua
+++ b/Master/texmf-dist/tex/context/base/x-ldx.lua
@@ -316,16 +316,16 @@ will produce an ldx file that can be processed with <logo label='context'/>
by running:
<typing>
-texexec --use=x-ldx --forcexml somefile.ldx
+context --use=x-ldx --forcexml somefile.ldx
</typing>
You can do this in one step by saying:
<typing>
-texmfstart texexec --ctx=x-ldx somefile.lua
+context --ctx=x-ldx somefile.lua
</typing>
-This will trigger <logo label='texexec'/> into loading the mentioned
+This will trigger <logo label='context'/> into loading the mentioned
<logo label='ctx'/> file. That file describes the conversion as well
as the module to be used.
@@ -334,8 +334,8 @@ The main conversion call is:
-- todo: assume usage of "mtxrun --script x-ldx", maybe make it mtx-ldx
-if arg and arg[1] then
- ldx.convert(arg[1],arg[2])
+if environment.files and environment.files[1] then
+ ldx.convert(environment.files[1],environment.files[2])
end
--~ exit(1)
diff --git a/Master/texmf-dist/tex/context/base/x-ldx.mkiv b/Master/texmf-dist/tex/context/base/x-ldx.mkiv
index 4f4da5acb77..0156f2c5569 100644
--- a/Master/texmf-dist/tex/context/base/x-ldx.mkiv
+++ b/Master/texmf-dist/tex/context/base/x-ldx.mkiv
@@ -11,18 +11,19 @@
%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
%C details.
+% this will become an extra
+
\setupxml[default=hidden]
\usemodule[x][mathml]
\usemodule[abr-02]
-\xmlregisterdocumentsetup{ldx}{xml:mml:define}
-\xmlregisterdocumentsetup{ldx}{xml:ldx:define}
+\xmlregistersetup{xml:mml:define}
+\xmlregistersetup{xml:ldx:define}
\xmlregisterns{ldx}{ldx}
\startxmlsetups xml:ldx:define
-% \xmlgrab {\xmldocument} {ldx:*} {*}
\xmlsetsetup {#1} {ldx:*} {ldx:*}
\stopxmlsetups
@@ -142,10 +143,6 @@
\dontleavehmode{\tt\xmlflush{#1}}
\stopxmlsetups
-%
-
-\def\xmldocument{ldx}
-
% key -> kw
% dqs -> dq
% sqs -> sq
diff --git a/Master/texmf-dist/tex/context/base/x-mathml.lua b/Master/texmf-dist/tex/context/base/x-mathml.lua
index 30e77019028..31483bbeabf 100644
--- a/Master/texmf-dist/tex/context/base/x-mathml.lua
+++ b/Master/texmf-dist/tex/context/base/x-mathml.lua
@@ -9,13 +9,11 @@ if not modules then modules = { } end modules ['x-mathml'] = {
-- This needs an upgrade to the latest greatest mechanisms.
local type, next = type, next
-local utf = unicode.utf8
local format, lower, find, gsub = string.format, string.lower, string.find, string.gsub
local strip = string.strip
-local utfchar, utffind, utfgmatch, utfgsub = utf.char, utf.find, utf.gmatch, utf.gsub
local xmlsprint, xmlcprint, xmltext, xmlcontent = xml.sprint, xml.cprint, xml.text, xml.content
local getid = lxml.getid
-local utfcharacters, utfvalues = string.utfcharacters, string.utfvalues
+local utfchar, utfcharacters, utfvalues = utf.char, utf.characters, utf.values
local lpegmatch = lpeg.match
local mathml = { }
@@ -87,6 +85,8 @@ local o_replacements = { -- in main table
}
+local simpleoperatorremapper = utf.remapper(o_replacements)
+
--~ languages.data.labels.functions
local i_replacements = {
@@ -104,6 +104,7 @@ local i_replacements = {
["identity"] = "\\mathopnolimits{id}",
["image"] = "\\mathopnolimits{image}",
["lcm"] = "\\mathopnolimits{lcm}",
+ ["lim"] = "\\mathopnolimits{lim}",
["max"] = "\\mathopnolimits{max}",
["median"] = "\\mathopnolimits{median}",
["min"] = "\\mathopnolimits{min}",
@@ -457,18 +458,13 @@ function xml.functions.remapopenmath(e)
end
function mathml.checked_operator(str)
- str = utfgsub(str,".",o_replacements)
- context(str)
+ context(simpleoperatorremapper(str))
end
function mathml.stripped(str)
context(strip(str))
end
-function characters.remapentity(chr,slot) -- Brrrrrr, this will be replaced!
- context("{\\catcode%s=13\\xdef%s{\\string%s}}",slot,utfchar(slot),chr)
-end
-
function mathml.mn(id,pattern)
-- maybe at some point we need to interpret the number, but
-- currently we assume an upright font
@@ -481,24 +477,29 @@ end
function mathml.mo(id)
local str = xmlcontent(getid(id)) or ""
- local rep = gsub(str,"&.-;","")
- local rep = utfgsub(rep,".",o_replacements)
- context(rep)
- -- context.mo(rep) -- fails with \left etc
+ local rep = gsub(str,"&.-;","") -- todo
+ context(simpleoperatorremapper(rep))
end
function mathml.mi(id)
+ -- we need to strip comments etc .. todo when reading in tree
local e = getid(id)
local str = e.dt
- if type(str) == "string" then -- we need a helper for this in the xml namespace ... xml.type(e)
- -- local str = xmlcontent(e) or ""
- local str = gsub(str,"&.-;","") -- needed?
- local rep = i_replacements[str]
- if not rep then
- rep = gsub(str,".",i_replacements)
+ if type(str) == "string" then
+ local n = #str
+ if n == 0 then
+ -- nothing to do
+ elseif n == 1 then
+ local str = gsub(str[1],"&.-;","") -- bah
+ local rep = i_replacements[str]
+ if not rep then
+ rep = gsub(str,".",i_replacements)
+ end
+ context(rep)
+ -- context.mi(rep)
+ else
+ context.xmlflush(id) -- xmlsprint or so
end
- context(rep)
- -- context.mi(rep)
else
context.xmlflush(id) -- xmlsprint or so
end
@@ -524,10 +525,7 @@ function mathml.mfenced(id) -- multiple separators
elseif n == 1 then
xmlsprint(collected[1]) -- to be checked
else
- local t = { }
- for s in utfgmatch(separators,"[^%s]") do
- t[#t+1] = s
- end
+ local t = utf.split(separators,true)
for i=1,n do
xmlsprint(collected[i]) -- to be checked
if i < n then
@@ -646,8 +644,8 @@ function mathml.mcolumn(root)
local tag = e.tg
if tag == "mi" or tag == "mn" or tag == "mo" or tag == "mtext" then
local str = xmltext(e)
-str = gsub(str,"&.-;","")
- for s in utfcharacters(str) do -- utf.gmatch(str,".") btw, the gmatch was bugged
+ str = gsub(str,"&.-;","")
+ for s in utfcharacters(str) do
m[#m+1] = { tag, s }
end
if tag == "mn" then
@@ -658,7 +656,7 @@ str = gsub(str,"&.-;","")
end
elseif tag == "mspace" or tag == "mline" then
local str = e.at.spacing or ""
- for s in utfcharacters(str) do -- utf.gmatch(str,".") btw, the gmatch was bugged
+ for s in utfcharacters(str) do
m[#m+1] = { tag, s }
end
-- elseif tag == "mline" then
diff --git a/Master/texmf-dist/tex/context/base/x-mathml.mkiv b/Master/texmf-dist/tex/context/base/x-mathml.mkiv
index f64fdbc74bc..99b9f92afde 100644
--- a/Master/texmf-dist/tex/context/base/x-mathml.mkiv
+++ b/Master/texmf-dist/tex/context/base/x-mathml.mkiv
@@ -2,7 +2,7 @@
%D [ file=x-mathml,
%D version=2008.05.29,
%D title=\CONTEXT\ XML Modules,
-%D subtitle=Loading \MATHML\ Filters,
+%D subtitle=\MATHML,
%D author=Hans Hagen,
%D date=\currentdate,
%D copyright={PRAGMA ADE \& \CONTEXT\ Development Team}]
@@ -17,6 +17,9 @@
% xml, tex and lua. I could rewrite the lot but it also shows how context evolves.
%
% no m:text strip (needs checking, maybe nbsp is mandate
+%
+% todo: more will be moved to lua (less hassle)
+% todo: move left/right to the lua end
\writestatus{loading}{ConTeXt XML Macros / MathML Renderer}
@@ -29,16 +32,16 @@
\registerctxluafile{x-mathml}{}
-\def\ctxmodulemathml#1{\directlua\zerocount{moduledata.mathml.#1}}
+\def\ctxmodulemathml#1{\ctxlua{moduledata.mathml.#1}}
\startxmlsetups xml:mml:define
- \xmlsetsetup{\xmldocument} {(formula|subformula)} {mml:formula}
- \xmlfilter {\xmldocument} {omt:*/function(remapopenmath)}
- \xmlfilter {\xmldocument} {mml:bind/function(remapmmlbind)}
- \xmlfilter {\xmldocument} {mml:csymbol/function(remapmmlcsymbol)}
- \xmlsetsetup{\xmldocument} {mml:*} {mml:*}
- \xmlsetsetup{\xmldocument} {mml:apply/mml:apply/mml:inverse/../..} {mml:apply:inverse}
- \xmlstrip {\xmldocument} {(mml:mi|mml:mo|mml:mn|mml:csymbol)}
+ \xmlsetsetup{#1} {(formula|subformula)} {mml:formula}
+ \xmlfilter {#1} {omt:*/function(remapopenmath)}
+ \xmlfilter {#1} {mml:bind/function(remapmmlbind)}
+ \xmlfilter {#1} {mml:csymbol/function(remapmmlcsymbol)}
+ \xmlsetsetup{#1} {mml:*} {mml:*}
+ \xmlsetsetup{#1} {mml:apply/mml:apply/mml:inverse/../..} {mml:apply:inverse}
+ \xmlstrip {#1} {(mml:mi|mml:mo|mml:mn|mml:csymbol)}
\stopxmlsetups
\xmlregisterns{omt}{openmath}
@@ -46,7 +49,10 @@
\xmlregistersetup{xml:mml:define}
-\def\MMLhack{\let\MMLpar\par \let\par\relax \everyvbox{\let\par\MMLpar}}
+\unexpanded\def\MMLhack
+ {\let\MMLpar\par
+ \let\par\relax
+ \everyvbox{\let\par\MMLpar}}
\xmlmapvalue {mml:math:mode} {display} {\displaymathematics} % we had this already
\xmlmapvalue {mml:math:mode} {inline} {\inlinemathematics }
@@ -118,7 +124,6 @@
% \def\mmlmiddledelimiter#1{\ifconditional\mmlignoredelimiter#1\else\normalordelimiter{#1}{%
% \ifcase\delimiternesting\MMLleft\else\MMLmiddle\fi#1}\fi}
-
%D Remark: from now on this is a module and no longer an xtag
%D filter. There is an intermediate cleaner module but it has
%D some namespace limitations. Here we do it the \MKIV\ way.
@@ -1676,7 +1681,7 @@
\ifdefined\asciimath
\expanded{\asciimath{\xmlflush{#1}}}
\else
- \hbox{\tt no am loaded}%
+ \hbox{\tt no am loaded}
\fi
} {
\xmlall{#1}{../!mml:annotation}
@@ -2071,26 +2076,46 @@
\xmlflush{#1}
\stopxmlsetups
+% \startxmlsetups mml:mrow
+% \begingroup
+% \edef\nofmmlrows{\xmlcount{#1}{/mml:mo}}%
+% \ifnum\nofmmlrows=\plustwo
+% \xmldoifelse {#1} {/mml:mo[position()==1 or position()==\nofmmlrows]} {% we need a {}
+% \def\MMLleft {\left }
+% \def\MMLright {\right}
+% \def\MMLmiddle{\middle}
+% \enabledelimiter
+% \checkdelimiters{\xmlall{#1}{/mml:mo}}
+% \fakeleftdelimiter
+% \xmlflush{#1}
+% \fakerightdelimiter
+% \disabledelimiter
+% } {
+% \xmlflush{#1}
+% }
+% \else
+% \xmlflush{#1}
+% \fi
+% \endgroup
+% \stopxmlsetups
+%
+% fails on { ... so we need
+
\startxmlsetups mml:mrow
\begingroup
- \edef\nofmmlrows{\xmlcount{#1}{/mml:mo}}%
- \ifnum\nofmmlrows=\plustwo
- \xmldoifelse {#1} {/mml:mo[position()==1 or position()==\nofmmlrows]} {% we need a {}
- \def\MMLleft {\left }
- \def\MMLright {\right}
- \def\MMLmiddle{\middle}
- \enabledelimiter
- \checkdelimiters{\xmlall{#1}{/mml:mo}}
- \fakeleftdelimiter
- \xmlflush{#1}
- \fakerightdelimiter
- \disabledelimiter
- } {
- \xmlflush{#1}
- }
- \else
+ \xmldoifelse {#1} {/mml:mo[first() or last()]} {% we need a {}
+ \def\MMLleft {\left }
+ \def\MMLright {\right}
+ \def\MMLmiddle{\middle}
+ \enabledelimiter
+ \checkdelimiters{\xmlall{#1}{/mml:mo}}
+ \fakeleftdelimiter
\xmlflush{#1}
- \fi
+ \fakerightdelimiter
+ \disabledelimiter
+ } {
+ \xmlflush{#1}
+ }
\endgroup
\stopxmlsetups
@@ -2104,69 +2129,91 @@
\setupMMLappearance[scripts][\c!alternative=\v!a] % {} rond base
+% brrr no { } when limop .. todo: better in lua
+% speed up with ifx and setups or just in lua
+
\startxmlsetups mml:msub
- \doifelse\MMLscriptsalternative\v!a {
- {\mmlfirst{#1}}\normalsubscript{\mmlsecond{#1}}
- } {
+ \edef\mmlnucleus{\xmlraw{#1}{/mml:*[1]}}
+ \doifelse {\utfmathclass\mmlnucleus} {limop} {
\mmlfirst{#1} \normalsubscript{\mmlsecond{#1}}
+ } {
+ \doifelse\MMLscriptsalternative\v!a {
+ {\mmlfirst{#1}}\normalsubscript{\mmlsecond{#1}}
+ } {
+ \mmlfirst{#1} \normalsubscript{\mmlsecond{#1}}
+ }
}
\stopxmlsetups
\startxmlsetups mml:msup
- \doifelse\MMLscriptsalternative\v!a {
- {\mmlfirst{#1}}\normalsuperscript{\mmlsecond{#1}}
- } {
+ \edef\mmlnucleus{\xmlraw{#1}{/mml:*[1]}}
+ \doifelse {\utfmathclass\mmlnucleus} {limop} {
\mmlfirst{#1} \normalsuperscript{\mmlsecond{#1}}
+ } {
+ \doifelse\MMLscriptsalternative\v!a {
+ {\mmlfirst{#1}}\normalsuperscript{\mmlsecond{#1}}
+ } {
+ \mmlfirst{#1} \normalsuperscript{\mmlsecond{#1}}
+ }
}
\stopxmlsetups
\startxmlsetups mml:msubsup
- \doifelse\MMLscriptsalternative\v!a {
- {\mmlfirst{#1}}\normalsubscript{\mmlsecond{#1}}\normalsuperscript{\mmlthird{#1}}
+ \edef\mmlnucleus{\xmlraw{#1}{/mml:*[1]}}
+ \doifelse {\utfmathclass\mmlnucleus} {limop} {
+ \mmlfirst{#1}\normalsubscript{\mmlsecond{#1}}\normalsuperscript{\mmlthird{#1}}
} {
- \mmlfirst{#1} \normalsubscript{\mmlsecond{#1}}\normalsuperscript{\mmlthird{#1}}
+ \doifelse\MMLscriptsalternative\v!a {
+ {\mmlfirst{#1}}\normalsubscript{\mmlsecond{#1}}\normalsuperscript{\mmlthird{#1}}
+ } {
+ \mmlfirst{#1}\normalsubscript{\mmlsecond{#1}}\normalsuperscript{\mmlthird{#1}}
+ }
}
\stopxmlsetups
-\def\mmlexecuteifdefined#1%
+\unexpanded\def\mmlexecuteifdefined#1%
{\ifx#1\empty
\expandafter\secondoftwoarguments
\else\ifcsname#1\endcsname
- \expandafter\expandafter\expandafter\firstoftwoarguments
+ \doubleexpandafter\firstoftwoarguments
\else
- \expandafter\expandafter\expandafter\secondoftwoarguments
+ \doubleexpandafter\secondoftwoarguments
\fi\fi
{\csname#1\endcsname}}
\startxmlsetups mml:mover
-% \mathop {
- \edef\mmlovertoken{\xmlraw{#1}{/mml:*[2]}}
- \doifelse{\utfmathclass\mmlovertoken}{accent} {
- \edef\mmlovercommand{\utfmathcommand\mmlovertoken}
- \mmlexecuteifdefined\mmlovercommand\mathematics{\mmlfirst{#1}}
- } {
- \edef\mmlbasetoken{\xmlraw{#1}{/mml:*[1]}}
- \edef\mmlbasecommand{\utfmathfiller\mmlbasetoken}
- \edef\mmlovercommand{\utfmathfiller\mmlovertoken}
- \vbox {
- \mathsurround\zeropoint \ialign {
- \hss##\hss\crcr
- \noalign{\kern3\onepoint}%
- \mmlexecuteifdefined\mmlovercommand{\mathematics{\mmlsecond{#1}}}\crcr
- \noalign{\kern3\onepoint\nointerlineskip}%
- \mmlexecuteifdefined\mmlbasecommand{\mathematics{\mmlfirst{#1}}}\crcr
- }
+ \edef\mmlovertoken{\xmlraw{#1}{/mml:*[2]}}
+ \doifelseutfmathaccentfiltered\mmlovertoken{topaccent} {% not ok
+ \edef\mmlovercommand{\utfmathcommandfiltered\mmlovertoken{topaccent}}
+ \mmlexecuteifdefined\mmlovercommand\mathematics{\mmlfirst{#1}}
+ } {
+ \edef\mmlbasetoken{\xmlraw{#1}{/mml:*[1]}}
+ \edef\mmlbasecommand{\utfmathfiller\mmlbasetoken}
+ \edef\mmlovercommand{\utfmathfiller\mmlovertoken}
+ \vbox {
+ \mathsurround\zeropoint
+ \ialign {
+ \hss$\alignmark\alignmark$\hss
+ \crcr
+ \noalign{\kern3\onepoint}%
+ \mmlexecuteifdefined\mmlovercommand{\mmlsecond{#1}}{}% extra {} is safeguard
+ \crcr
+ \noalign{\kern3\onepoint\nointerlineskip}%
+ \mmlexecuteifdefined\mmlbasecommand{\mmlfirst{#1}}{}% extra {} is safeguard
+ \crcr
}
}
-% }
+ }
% \limits % spoils spacing
\stopxmlsetups
+% messy: <munder><mo>(</mo><mo>&UnderBar;</mo></munder>
+
\startxmlsetups mml:munder
% \mathop {
\edef\mmlundertoken{\xmlraw{#1}{/mml:*[2]}}
- \doifelse{\utfmathclass\mmlundertoken}{accent} {
- \edef\mmlundercommand{\utfmathcommand\mmlundertoken}
+ \doifelseutfmathaccentfiltered\mmlundertoken{botaccent} {
+ \edef\mmlundercommand{\utfmathcommandfiltered\mmlundertoken{botaccent}}
\mmlexecuteifdefined\mmlundercommand\mathematics{\mmlfirst{#1}}
} {
\edef\mmlbasetoken {\xmlraw{#1}{/mml:*[1]}}
@@ -2174,10 +2221,13 @@
\edef\mmlundercommand{\utfmathfiller\mmlundertoken}
\vtop {
\mathsurround\zeropoint \ialign {
- \hss##\hss\crcr
- \mmlexecuteifdefined\mmlbasecommand {\mathematics{\mmlfirst{#1}}}\crcr
+ \hss$##$\hss
+ \crcr
+ \mmlexecuteifdefined\mmlbasecommand {\mmlfirst{#1}}
+ \crcr
\noalign{\kern3\onepoint\nointerlineskip}%
- \mmlexecuteifdefined\mmlundercommand{\mathematics{\mmlsecond{#1}}}\crcr
+ \mmlexecuteifdefined\mmlundercommand{\mmlsecond{#1}}
+ \crcr
\noalign{\kern3\onepoint}
}
}
@@ -2218,10 +2268,10 @@
\!!deptha \xmlattdef{#1}{depth} \!!zeropoint
\ifdim\!!heighta=\zeropoint
\ifdim\!!deptha=\zeropoint\else
- \hbox{\vrule\!!depth\!!deptha\!!height\zeropoint\!!width\zeropoint}%
+ \hbox{\vrule\s!depth\!!deptha\s!height\zeropoint\s!width\zeropoint}%
\fi
\else
- \hbox{\vrule\!!depth\zeropoint\!!height\!!heighta\!!width\zeropoint}%
+ \hbox{\vrule\s!depth\zeropoint\s!height\!!heighta\s!width\zeropoint}%
\fi
\ifdim\!!widtha=\zeropoint\else
\hskip\!!widtha
@@ -2255,7 +2305,7 @@
% \setbox\scratchbox\hbox{\mathematics{\mathstyle{\mmllinetext}}}% not ok
% \!!widtha\wd\scratchbox
% \fi
-% \hbox{\vrule\!!width\!!widtha\!!depth\!!deptha\!!height\!!heighta}
+% \hbox{\vrule\s!width\!!widtha\s!depth\!!deptha\s!height\!!heighta}
% \stopxmlsetups
\startxmlsetups mml:mglyph % probably never ok (hbox is needed in order to switch to normal font)
diff --git a/Master/texmf-dist/tex/context/base/x-res-01.mkiv b/Master/texmf-dist/tex/context/base/x-res-01.mkiv
index 0ebb8933d11..e234e98677a 100644
--- a/Master/texmf-dist/tex/context/base/x-res-01.mkiv
+++ b/Master/texmf-dist/tex/context/base/x-res-01.mkiv
@@ -53,7 +53,6 @@
\defineregister
[figureindex]
- [figureindices]
\setupregister
[figureindex]
@@ -135,26 +134,26 @@
\setupbodyfont
[tt,10pt]
-\definesymbol [attachment] [{\strut\bf\color[darkred]{\jobname.xml}}]
-\setupattachments [symbol=attachment,alternative=]
+\definesymbol [attachment] [{\strut\bf\color[darkred]{\inputfilename}}] % jobname.xml}}]
+\setupattachments [symbol=attachment,alternative=,location=text]
\useattachment [datafile] [\inputfilename]
\xmlloadonly{main}{\inputfilename}{}
-\mainlanguage[\xmlattributedef{main}{/rl:library}{language}{en}]
+\mainlanguage[\xmlattributedef{main}{/rlx:library}{language}{en}]
\startxmlsetups xml:resource:asis
\startTEXpage[pagestate=start]
- \xmldoifelsetext{#1}{/rl:label} {
- \edef\CurrentLabel{\xmltext{#1}{rl:label}}
+ \xmldoifelsetext{#1}{/rlx:label} {
+ \edef\CurrentLabel{\xmltext{#1}{rlx:label}}
} {
- \edef\CurrentLabel{\xmltext{#1}{rl:file}}
+ \edef\CurrentLabel{\xmltext{#1}{rlx:file}}
}
\pagereference [
asis:\CurrentLabel
]
\gotobox {
- \externalfigure[\xmltext{#1}{/rl:file}]
+ \externalfigure[\xmltext{#1}{/rlx:file}]
}[% tricky no space before [
data:\CurrentLabel
]
@@ -196,7 +195,7 @@
]
{
\externalfigure
- [\xmltext{#1}{rl:file}]
+ [\xmltext{#1}{rlx:file}]
[factor=max]
}
[
@@ -217,7 +216,7 @@
]
{
\externalfigure [
- \xmltext{#1}{rl:file}
+ \xmltext{#1}{rlx:file}
] [
factor=max
]
@@ -228,10 +227,10 @@
% using a layer makes more sense but we had this ...
- \xmldoifelsetext{#1}{/rl:label} {
- \edef\CurrentLabel{\xmltext{#1}{rl:label}}
+ \xmldoifelsetext{#1}{/rlx:label} {
+ \edef\CurrentLabel{\xmltext{#1}{rlx:label}}
} {
- \edef\CurrentLabel{\xmltext{#1}{rl:file}}
+ \edef\CurrentLabel{\xmltext{#1}{rlx:file}}
}
\button {
@@ -242,7 +241,7 @@
%\ifnum\CurrentPage=1 \pagereference[begin]\fi
- \expanded{\figureindex{\xmltext{#1}{/rl:label}}}
+ \expanded{\figureindex{\xmltext{#1}{/rlx:label}}}
\vbox to 100pt {
\hsize30pt
@@ -287,7 +286,7 @@
] {
\externalfigure
[
- \xmltext{#1}{rl:file}
+ \xmltext{#1}{rlx:file}
] [
reset=yes
]
@@ -302,12 +301,12 @@
\vbox to 100pt {
\blank[disable]
\starttabulate[|Bel|p|]
- \NC file \NC \xmltext{#1}{/rl:file} \NC \NR
- \xmldoif{#1}{/rl:label} {\NC label \NC \xmltext{#1}{/rl:label} \NC \NR}
+ \NC file \NC \xmltext{#1}{/rlx:file} \NC \NR
+ \xmldoif{#1}{/rlx:label} {\NC label \NC \xmltext{#1}{/rlx:label} \NC \NR}
\NC dimensions \NC \CurrentWidth\ * \CurrentHeight \NC \NR
- \xmldoif{#1}{/rl:copyright} {\NC copyright \NC \xmltext{#1}{/rl:copyright} \NC \NR}
- \xmldoif{#1}{/rl:status} {\NC status \NC \xmltext{#1}{/rl:status} \NC \NR}
- \xmldoif{#1}{/rl:comment} {\NC comment \NC \xmltext{#1}{/rl:comment} \NC \NR}
+ \xmldoif{#1}{/rlx:copyright} {\NC copyright \NC \xmltext{#1}{/rlx:copyright} \NC \NR}
+ \xmldoif{#1}{/rlx:status} {\NC status \NC \xmltext{#1}{/rlx:status} \NC \NR}
+ \xmldoif{#1}{/rlx:comment} {\NC comment \NC \xmltext{#1}{/rlx:comment} \NC \NR}
\stoptabulate
\vfill
}
@@ -323,10 +322,10 @@
\startxmlsetups xml:description
\starttabulate[|lBe|p|]
- \xmldoif{#1}{/rl:organization} {\NC organization \NC \xmltext{#1}{/rl:organization} \NC \NR}
- \xmldoif{#1}{/rl:project} {\NC project \NC \xmltext{#1}{/rl:project} \NC \NR}
- \xmldoif{#1}{/rl:product} {\NC product \NC \xmltext{#1}{/rl:product} \NC \NR}
- \xmldoif{#1}{/rl:comment} {\NC comment \NC \xmltext{#1}{/rl:comment} \NC \NR}
+ \xmldoif{#1}{/rlx:organization} {\NC organization \NC \xmltext{#1}{/rlx:organization} \NC \NR}
+ \xmldoif{#1}{/rlx:project} {\NC project \NC \xmltext{#1}{/rlx:project} \NC \NR}
+ \xmldoif{#1}{/rlx:product} {\NC product \NC \xmltext{#1}{/rlx:product} \NC \NR}
+ \xmldoif{#1}{/rlx:comment} {\NC comment \NC \xmltext{#1}{/rlx:comment} \NC \NR}
\NC specification \NC \attachment[datafile] \NC \NR
\stoptabulate
@@ -334,15 +333,15 @@
\starttext
- \xmlfilter{main}{/rl:library/rl:resource/command(xml:resource:asis)}
+ \xmlfilter{main}{/rlx:library/rlx:resource/command(xml:resource:asis)}
\subject {Figure collection}
- \xmlfilter{main}{/rl:library/rl:description/command(xml:description)}
+ \xmlfilter{main}{/rlx:library/rlx:description/command(xml:description)}
\subject [list] {List of figures}
- \xmlfilter{main}{/rl:library/rl:resource/command(xml:resource:data)}
+ \xmlfilter{main}{/rlx:library/rlx:resource/command(xml:resource:data)}
\page
@@ -407,19 +406,19 @@
% \def\StopFigureD
% {\doglobal\increment\CurrentPage
% \setupbackgrounds[page][background=page]
-% \startpagefigure[\XMLflush{rl:file}][offset=20pt]%
-% \doifelsenothing{\XMLflush{rl:label}}
-% {\expanded{\definereference[Description][about:\XMLflush{rl:file}]}%
-% \expanded{\pagereference[grid:\XMLflush{rl:file}]}}
-% {\expanded{\definereference[Description][about:\XMLflush{rl:label}]}%
-% \expanded{\pagereference[grid:\XMLflush{rl:label}]}}
+% \startpagefigure[\XMLflush{rlx:file}][offset=20pt]%
+% \doifelsenothing{\XMLflush{rlx:label}}
+% {\expanded{\definereference[Description][about:\XMLflush{rlx:file}]}%
+% \expanded{\pagereference[grid:\XMLflush{rlx:file}]}}
+% {\expanded{\definereference[Description][about:\XMLflush{rlx:label}]}%
+% \expanded{\pagereference[grid:\XMLflush{rlx:label}]}}
% \stoppagefigure
-% %\pagefigure[\XMLflush{rl:file}][offset=20pt]
+% %\pagefigure[\XMLflush{rlx:file}][offset=20pt]
% \setupbackgrounds[page][background=]
% \egroup}
-% \defineXMLignore [rl:description]
-% \defineXMLenvironment [rl:figure] \StartFigureD \StopFigureD
+% \defineXMLignore [rlx:description]
+% \defineXMLenvironment [rlx:figure] \StartFigureD \StopFigureD
% \doglobal\newcounter\CurrentPage
diff --git a/Master/texmf-dist/tex/context/base/x-steps.mkiv b/Master/texmf-dist/tex/context/base/x-steps.mkiv
new file mode 100644
index 00000000000..29b3f7eaa6e
--- /dev/null
+++ b/Master/texmf-dist/tex/context/base/x-steps.mkiv
@@ -0,0 +1,102 @@
+%D \module
+%D [ file=m-steps,
+%D version=2001.05.28,
+%D title=\CONTEXT\ Modules,
+%D subtitle=Step Charts \& Tables,
+%D author=Hans Hagen,
+%D date=\currentdate,
+%D copyright={PRAGMA ADE \& \CONTEXT\ Development Team}]
+%C
+%C This module is part of the \CONTEXT\ macro||package and is
+%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
+%C details.
+
+%D The \XML\ interface. This module can be made way more efficient
+%D in \MKIV\ using textext but it makes only sense to do this when
+%D I really need it in a demanding application. Probably half of the
+%D code in m-steps.tex can go.
+
+\usemodule[m][steps]
+
+\unprotect
+
+\installcorenamespace {xmlstepchart}
+\installcorenamespace {xmlsteptable}
+
+\def\xmlstepchartdirective#1{\executeifdefined{\??xmlstepchart#1}\gobbletwoarguments} % {#2}{#3}
+\def\xmlsteptabledirective#1{\executeifdefined{\??xmlsteptable#1}\gobbletwoarguments} % {#2}{#3}
+
+\setvalue{\??xmlstepchart charts}{\setsomevalue\@@STPC}
+\setvalue{\??xmlstepchart cells}{\setsomevalue\@@STEC}
+\setvalue{\??xmlstepchart texts}{\setsomevalue\@@STET}
+\setvalue{\??xmlstepchart lines}{\setsomevalue\@@STEL}
+
+\setvalue{\??xmlsteptable tables}{\setsomevalue\@@STPT}
+\setvalue{\??xmlsteptable cells}{\setsomevalue\@@STEC}
+\setvalue{\??xmlsteptable texts}{\setsomevalue\@@STET}
+\setvalue{\??xmlsteptable lines}{\setsomevalue\@@STEL}
+
+\startxmlsetups xml:ct:define
+ \xmlsetsetup {#1} {ct:*} {xml:ct:*}
+ % \xmlsetsetup {#1} {ct:stepaligntable/cells} {xml:ct:stepaligntable:cells}
+ % \xmlsetsetup {#1} {ct:stepaligntable/lines} {xml:ct:stepaligntable:lines}
+\stopxmlsetups
+
+\xmlregisterns{ct}{stepcharts}
+
+\xmlregistersetup{xml:ct:define}
+
+\startxmlsetups xml:ct:prep
+ \expanded{\prep[\xmltoparameters{#1}]}{\xmlflush{#1}}
+\stopxmlsetups
+
+\startxmlsetups xml:ct:text
+ \expanded{\text[\xmltoparameters{#1}]}{\xmlflush{#1}}
+\stopxmlsetups
+
+\startxmlsetups xml:ct:texts
+ \expanded{\texts[\xmltoparameters{#1}]}{\xmltext{#1}{/top}} {\xmltext{#1}{/bot}}
+\stopxmlsetups
+
+\startxmlsetups xml:ct:cell
+ \expanded{\cell[\xmltoparameters{#1}]}{\xmlflush{#1}}
+\stopxmlsetups
+
+\startxmlsetups xml:ct:cells
+ \expanded{\cells[\xmltoparameters{#1}]}{\xmltext{#1}{/top}} {\xmltext{#1}{/bot}}
+\stopxmlsetups
+
+\startxmlsetups xml:ct:lines
+ \expanded{\startlines[\xmltoparameters{#1}]}
+ \xmlflush{#1}
+ \stoplines
+\stopxmlsetups
+
+\startxmlsetups xml:ct:steptable
+ \expanded{\startSTEPtable[\xmltoparameters{#1}]}
+ \xmlflush{#1}
+ \stopSTEPtable
+\stopxmlsetups
+
+\startxmlsetups xml:ct:stepchart
+ \expanded{\startSTEPchart[\xmltoparameters{#1}]}
+ \xmlflush{#1}
+ \stopSTEPchart
+\stopxmlsetups
+
+% \startxmlsetups xml:ct:stepaligntable
+% \expanded{\startSTEPaligntable[\xmltoparameters{#1}]}
+% \xmlflush{#1}
+% \stopSTEPaligntable
+% \stopxmlsetups
+%
+% \startxmlsetups xml:ct:stepaligntable:cells
+% \expanded{\cells[\xmltoparameters{#1}]} {\xmltext{#1}{/ct:c1}} {\xmltext{#1}{/ct:c2}} {\xmltext{#1}{/ct:c3}}
+% \stopxmlsetups
+%
+% \startxmlsetups xml:ct:stepaligntable:lines
+% \expanded{\setupSTEPlines[\xmltoparameters{#1}]}
+% \xmlflush{#1}
+% \stopxmlsetups
+
+\protect \endinput
diff --git a/Master/texmf-dist/tex/context/base/x-udhr.mkiv b/Master/texmf-dist/tex/context/base/x-udhr.mkiv
index 81075ac702b..e081bfd5971 100644
--- a/Master/texmf-dist/tex/context/base/x-udhr.mkiv
+++ b/Master/texmf-dist/tex/context/base/x-udhr.mkiv
@@ -87,7 +87,7 @@
\setuptolerance
[verytolerant]
-\doifnotmode{demo}{\endinput}
+\continueifinputfile{x-udhr.mkiv}
% todo: when argument given then process it
diff --git a/Master/texmf-dist/tex/context/base/x-xfdf.mkiv b/Master/texmf-dist/tex/context/base/x-xfdf.mkiv
new file mode 100644
index 00000000000..460220ed943
--- /dev/null
+++ b/Master/texmf-dist/tex/context/base/x-xfdf.mkiv
@@ -0,0 +1,72 @@
+%D \module
+%D [ file=x-xfdf,
+%D version=2011.09.07,
+%D title=\CONTEXT\ XML Modules,
+%D subtitle=\XFDF,
+%D author=Hans Hagen,
+%D date=\currentdate,
+%D copyright={PRAGMA ADE \& \CONTEXT\ Development Team}]
+%C
+%C This module is part of the \CONTEXT\ macro||package and is
+%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
+%C details.
+
+%D This is a revival of using \XFDF, but now in a more \MKIV-ish way. We
+%D supported it long ago already in \MKII\ but never used it at a large
+%D scale (not that much user interest anyway).
+
+\startmodule[xfdf]
+
+% see xfdf-001.xfdf and xfdf-001.tex
+
+% %D Possible speedup but hardly worth the trouble.
+%
+% \startluacode
+%
+% local hashes = { } table.setmetatableindex(hashes,function(t,k) local v = { } t[k] = v return v end)
+%
+% function xml.functions.xfdf_collect_values(root)
+% local hash = hashes[root]
+% for c in xml.collected(root,"/xfdf/fields/field/value") do
+% hash[xml.parent(c).at.name] = c
+% end
+% end
+%
+% function xml.functions.xfdf_get_values(root,name)
+% return hashes[root][name]
+% end
+%
+% function lxml.xfdf_get_values(root,name)
+% xml.sprint(hashes[lxml.id(root)][name])
+% end
+%
+% \stopluacode
+%
+% \def\xfdfvalue#1#2%
+% {\ctxlua{lxml.xfdf_get_values("#1","#2")}}
+
+\startxmlsetups xfdf:define
+ \xmlsetsetup{#1}{*}{xfdf:*}
+ % \xmlfilter {#1}{./function(xfdf_collect_values)}
+\stopxmlsetups
+
+\xmlregisterns{xfdf}{http://ns.adobe.com/xfdf/}
+
+\xmlregisterdocumentsetup{xfdf}{xfdf:define}
+
+\startxmlsetups xfdf:value
+ \xmlflush{#1}
+\stopxmlsetups
+
+\def\xfdfload #1#2{\xmlloadonly{#1}{#2}{xfdf}}
+\def\xfdfvalue#1#2{\xmlfirst{#1}{/xfdf/fields/field[@name='#2']/value}}
+\def\xfdftext #1#2{\xmlfirst{#1}{/xfdf/fields/field[@name='#2']/value/paragraphs()}}
+
+% \startxmlsetups xfdf:b
+% \bold{\xmlflush{#1}}
+% \stopxmlsetups
+
+% \xfdfload {whatever}{xfdf-001.xfdf}
+% \xfdfvalue{whatever}{somefield}
+
+\stopmodule
diff --git a/Master/texmf-dist/tex/context/base/xtag-ini.mkii b/Master/texmf-dist/tex/context/base/xtag-ini.mkii
index 8f10c4f55cd..ce3cbacd0b1 100644
--- a/Master/texmf-dist/tex/context/base/xtag-ini.mkii
+++ b/Master/texmf-dist/tex/context/base/xtag-ini.mkii
@@ -395,22 +395,22 @@
% \def\disableXML
% {\setnormalcatcodes\processingXMLfalse}
-\defcatcodecommand\xmlcatcodese `\& \doXMLentity
-\defcatcodecommand\xmlcatcodese `\< \doXMLelement
-\defcatcodecommand\xmlcatcodesr `\& \doXMLentity
-\defcatcodecommand\xmlcatcodesr `\< \doXMLelement
-\defcatcodecommand\xmlcatcodesn `\& \doXMLentity
-\defcatcodecommand\xmlcatcodesn `\< \doXMLelement
-
-\letcatcodecommand\xmlcatcodesr `\# \letterhash
-\letcatcodecommand\xmlcatcodesr `\$ \letterdollar
-\letcatcodecommand\xmlcatcodesr `\% \letterpercent
-\letcatcodecommand\xmlcatcodesr `\\ \letterbackslash
-\letcatcodecommand\xmlcatcodesr `\^ \letterhat
-\letcatcodecommand\xmlcatcodesr `\_ \letterunderscore
-\letcatcodecommand\xmlcatcodesr `\{ \letterleftbrace
-\letcatcodecommand\xmlcatcodesr `\} \letterrightbrace
-\letcatcodecommand\xmlcatcodesr `\| \letterbar
+\defcatcodecommand\xmlcatcodese 046 \doXMLentity
+\defcatcodecommand\xmlcatcodese 060 \doXMLelement
+\defcatcodecommand\xmlcatcodesr 046 \doXMLentity
+\defcatcodecommand\xmlcatcodesr 060 \doXMLelement
+\defcatcodecommand\xmlcatcodesn 046 \doXMLentity
+\defcatcodecommand\xmlcatcodesn 060 \doXMLelement
+
+\letcatcodecommand\xmlcatcodesr 035 \letterhash
+\letcatcodecommand\xmlcatcodesr 036 \letterdollar
+\letcatcodecommand\xmlcatcodesr 037 \letterpercent
+\letcatcodecommand\xmlcatcodesr 092 \letterbackslash
+\letcatcodecommand\xmlcatcodesr 094 \letterhat
+\letcatcodecommand\xmlcatcodesr 095 \letterunderscore
+\letcatcodecommand\xmlcatcodesr 123 \letterleftbrace
+\letcatcodecommand\xmlcatcodesr 125 \letterrightbrace
+\letcatcodecommand\xmlcatcodesr 124 \letterbar
\bgroup \catcode`\&=13 \let&\relax
@@ -426,15 +426,15 @@
\egroup
-\letcatcodecommand\xmlcatcodese `\# \entityhash
-\letcatcodecommand\xmlcatcodese `\$ \entitydollar
-\letcatcodecommand\xmlcatcodese `\% \entitypercent
-\letcatcodecommand\xmlcatcodese `\\ \entitybackslash
-\letcatcodecommand\xmlcatcodese `\^ \entityhat
-\letcatcodecommand\xmlcatcodese `\_ \entityunderscore
-\letcatcodecommand\xmlcatcodese `\{ \entityleftbrace
-\letcatcodecommand\xmlcatcodese `\} \entityrightbrace
-\letcatcodecommand\xmlcatcodese `\| \entitybar
+\letcatcodecommand\xmlcatcodese 035 \entityhash
+\letcatcodecommand\xmlcatcodese 036 \entitydollar
+\letcatcodecommand\xmlcatcodese 037 \entitypercent
+\letcatcodecommand\xmlcatcodese 092 \entitybackslash
+\letcatcodecommand\xmlcatcodese 094 \entityhat
+\letcatcodecommand\xmlcatcodese 095 \entityunderscore
+\letcatcodecommand\xmlcatcodese 123 \entityleftbrace
+\letcatcodecommand\xmlcatcodese 125 \entityrightbrace
+\letcatcodecommand\xmlcatcodese 124 \entitybar
% we speed things up by explicitly setting the active char's < &
diff --git a/Master/texmf-dist/tex/context/base/xtag-pre.mkii b/Master/texmf-dist/tex/context/base/xtag-pre.mkii
index 63706d64c3a..5e75197b76b 100644
--- a/Master/texmf-dist/tex/context/base/xtag-pre.mkii
+++ b/Master/texmf-dist/tex/context/base/xtag-pre.mkii
@@ -18,6 +18,19 @@
\unprotect
+%D For \MKIV:
+
+\setvalue{@u@s@"}#1#2"{#2} \setvalue{@g@s@"}#1#2"{\scratchtoks{#2}}
+\setvalue{@u@s@'}#1#2'{#2} \setvalue{@g@s@'}#1#2'{\scratchtoks{#2}}
+\setvalue{@u@s@ }#1#2 {#2} \setvalue{@g@s@ }#1#2 {\scratchtoks{#2}}
+
+\def\unstringed#1{\csname\ifcsname @u@s@#1\endcsname @u@s@#1\else\s!empty\fi\endcsname#1}
+\def\grabstring#1{\csname\ifcsname @g@s@#1\endcsname @g@s@#1\else\s!empty\fi\endcsname#1}
+
+\def\dowithgrabbedstring#1%
+ {\def\@@dowithgrabbedstring{#1}%
+ \afterassignment\@@dowithgrabbedstring\grabstring}
+
%D Let's get rid of ligatures:
% \definefonthandling [default] [noligs]
diff --git a/Master/texmf-dist/tex/context/colors/icc/colorprofiles.lua b/Master/texmf-dist/tex/context/colors/icc/context/colorprofiles.lua
index b2ae731a78c..b2ae731a78c 100644
--- a/Master/texmf-dist/tex/context/colors/icc/colorprofiles.lua
+++ b/Master/texmf-dist/tex/context/colors/icc/context/colorprofiles.lua
diff --git a/Master/texmf-dist/tex/context/colors/icc/colorprofiles.xml b/Master/texmf-dist/tex/context/colors/icc/context/colorprofiles.xml
index 96b8b54c590..96b8b54c590 100644
--- a/Master/texmf-dist/tex/context/colors/icc/colorprofiles.xml
+++ b/Master/texmf-dist/tex/context/colors/icc/context/colorprofiles.xml
diff --git a/Master/texmf-dist/tex/context/config/cont-de.ini b/Master/texmf-dist/tex/context/config/cont-de.ini
deleted file mode 100644
index 1d62331ca07..00000000000
--- a/Master/texmf-dist/tex/context/config/cont-de.ini
+++ /dev/null
@@ -1,2 +0,0 @@
-\input cont-de.mkii
-
diff --git a/Master/texmf-dist/tex/context/config/cont-en.ini b/Master/texmf-dist/tex/context/config/cont-en.ini
deleted file mode 100644
index 1e83879082e..00000000000
--- a/Master/texmf-dist/tex/context/config/cont-en.ini
+++ /dev/null
@@ -1,2 +0,0 @@
-\input cont-en.mkii
-
diff --git a/Master/texmf-dist/tex/context/config/cont-fr.ini b/Master/texmf-dist/tex/context/config/cont-fr.ini
deleted file mode 100644
index 4f565a33b1a..00000000000
--- a/Master/texmf-dist/tex/context/config/cont-fr.ini
+++ /dev/null
@@ -1,2 +0,0 @@
-\input cont-fr.mkii
-
diff --git a/Master/texmf-dist/tex/context/config/cont-it.ini b/Master/texmf-dist/tex/context/config/cont-it.ini
deleted file mode 100644
index da6b81f008c..00000000000
--- a/Master/texmf-dist/tex/context/config/cont-it.ini
+++ /dev/null
@@ -1,2 +0,0 @@
-\input cont-it.mkii
-
diff --git a/Master/texmf-dist/tex/context/config/cont-nl.ini b/Master/texmf-dist/tex/context/config/cont-nl.ini
deleted file mode 100644
index 52079929546..00000000000
--- a/Master/texmf-dist/tex/context/config/cont-nl.ini
+++ /dev/null
@@ -1,2 +0,0 @@
-\input cont-nl.mkii
-
diff --git a/Master/texmf-dist/tex/context/config/cont-ro.ini b/Master/texmf-dist/tex/context/config/cont-ro.ini
deleted file mode 100644
index 9222d4aca72..00000000000
--- a/Master/texmf-dist/tex/context/config/cont-ro.ini
+++ /dev/null
@@ -1,2 +0,0 @@
-\input cont-ro.mkii
-
diff --git a/Master/texmf-dist/tex/context/fonts/cc-icons.lfg b/Master/texmf-dist/tex/context/fonts/cc-icons.lfg
new file mode 100644
index 00000000000..2739918afee
--- /dev/null
+++ b/Master/texmf-dist/tex/context/fonts/cc-icons.lfg
@@ -0,0 +1,23 @@
+local utfbyte = utf.byte
+
+return {
+ name = "cc-icons",
+ version = "1.00",
+ comment = "Goodies that complement creative commons icons.",
+ author = "Hans Hagen",
+ copyright = "ConTeXt development team",
+ remapping = {
+ tounicode = true,
+ unicodes = {
+ sa = utfbyte("a"),
+ by = utfbyte("b"),
+ cc = utfbyte("c"),
+ nd = utfbyte("d"),
+ nc = utfbyte("n"),
+ sampling = utfbyte("m"),
+ share = utfbyte("s"),
+ remix = utfbyte("r"),
+ logo = utfbyte("C"),
+ },
+ },
+}
diff --git a/Master/texmf-dist/tex/context/fonts/demo.lfg b/Master/texmf-dist/tex/context/fonts/demo.lfg
index 00ca3ee6e73..06c59e64427 100644
--- a/Master/texmf-dist/tex/context/fonts/demo.lfg
+++ b/Master/texmf-dist/tex/context/fonts/demo.lfg
@@ -30,11 +30,36 @@ return {
comment = "An example of goodies.",
author = "Hans Hagen",
featuresets = {
+ simple = {
+ mode = "node",
+ script = "latn"
+ },
default = {
- default,
+ mode = "node",
+ script = "latn",
+ kern = "yes",
+ },
+ ligatures = {
+ mode = "node",
+ script = "latn",
+ kern = "yes",
+ liga = "yes",
},
smallcaps = {
- default, smallcaps,
+ mode = "node",
+ script = "latn",
+ kern = "yes",
+ smcp = "yes",
+ },
+ },
+ solutions = { -- here we have references to featuresets, so we use strings!
+ experimental = {
+ less = {
+ "ligatures", "simple",
+ },
+ more = {
+ "smallcaps",
+ },
},
},
colorschemes = {
diff --git a/Master/texmf-dist/tex/context/fonts/husayni.lfg b/Master/texmf-dist/tex/context/fonts/husayni.lfg
index ff456f63ca3..2f45add21ef 100644
--- a/Master/texmf-dist/tex/context/fonts/husayni.lfg
+++ b/Master/texmf-dist/tex/context/fonts/husayni.lfg
@@ -17,6 +17,7 @@ local regular = {
ss01 = yes, ss03 = yes, ss07 = yes, ss10 = yes, ss12 = yes, ss15 = yes, ss16 = yes,
ss19 = yes, ss24 = yes, ss25 = yes, ss26 = yes, ss27 = yes, ss31 = yes, ss34 = yes,
ss35 = yes, ss36 = yes, ss37 = yes, ss38 = yes, ss41 = yes, ss42 = yes, ss43 = yes,
+ ss55 = yes,
js16 = yes,
}
@@ -31,6 +32,7 @@ local minimal_stretching = {
local medium_stretching = {
js12=yes, js05=yes,
}
+
local maximal_stretching= {
js13 = yes, js05 = yes, js09 = yes,
}
@@ -43,7 +45,7 @@ local shrink = {
flts = yes, js17 = yes, ss05 = yes, ss11 = yes, ss06 = yes, ss09 = yes,
}
-local default = {
+local default = { -- we need to merge the typescript definition in here
basics, analysis, regular, positioning, -- xxxx = yes, yyyy = 2,
}
@@ -75,7 +77,7 @@ return {
solutions = { -- here we have references to featuresets, so we use strings!
experimental = {
less = {
- "shrink"
+ "shrink", -- we need an extra one
},
more = {
"minimal_stretching", "medium_stretching", "maximal_stretching", "wide_all"
diff --git a/Master/texmf-dist/tex/context/fonts/lm-math.lfg b/Master/texmf-dist/tex/context/fonts/lm-math.lfg
index a2c095bc2b5..87c37cd78d9 100644
--- a/Master/texmf-dist/tex/context/fonts/lm-math.lfg
+++ b/Master/texmf-dist/tex/context/fonts/lm-math.lfg
@@ -44,19 +44,19 @@
local five = {
{ name = "lmroman5-regular.otf", features = "virtualmath", main = true },
{ name = "rm-lmr5.tfm", vector = "tex-mr-missing" } ,
- { name = "lmmi5.tfm", vector = "tex-mi", skewchar=0x7F },
- { name = "lmmi5.tfm", vector = "tex-it", skewchar=0x7F },
- { name = "lmsy5.tfm", vector = "tex-sy", skewchar=0x30, parameters = true } ,
+ { name = "lmmi5.tfm", vector = "tex-mi", skewchar = 0x7F },
+ { name = "lmmi5.tfm", vector = "tex-it", skewchar = 0x7F },
+ { name = "lmsy5.tfm", vector = "tex-sy", skewchar = 0x30, parameters = true } ,
{ name = "lmex10.tfm", vector = "tex-ex", extension = true } ,
{ name = "msam5.tfm", vector = "tex-ma" },
{ name = "msbm5.tfm", vector = "tex-mb" },
{ name = "stmary10.afm", vector = "tex-mc" },
-- { name = "rm-lmbx5.tfm", vector = "tex-bf" } ,
{ name = "lmroman5-bold", vector = "tex-bf" } ,
- { name = "lmmib5.tfm", vector = "tex-bi", skewchar=0x7F } ,
- { name = "lmsans8-regular.otf", vector = "tex-ss", optional=true },
- { name = "lmmono8-regular.otf", vector = "tex-tt", optional=true },
- { name = "eufm5.tfm", vector = "tex-fraktur", optional=true },
+ { name = "lmmib5.tfm", vector = "tex-bi", skewchar = 0x7F } ,
+ { name = "lmsans8-regular.otf", vector = "tex-ss", optional = true },
+ { name = "lmmono8-regular.otf", vector = "tex-tt", optional = true },
+ { name = "eufm5.tfm", vector = "tex-fraktur", optional = true },
}
-- rm-lmr6 : LMMathRoman6-Regular
@@ -67,20 +67,20 @@ local five = {
local six = {
{ name = "lmroman6-regular.otf", features = "virtualmath", main = true },
{ name = "rm-lmr6.tfm", vector = "tex-mr-missing" } ,
- { name = "lmmi6.tfm", vector = "tex-mi", skewchar=0x7F },
- { name = "lmmi6.tfm", vector = "tex-it", skewchar=0x7F },
- { name = "lmsy6.tfm", vector = "tex-sy", skewchar=0x30, parameters = true } ,
+ { name = "lmmi6.tfm", vector = "tex-mi", skewchar = 0x7F },
+ { name = "lmmi6.tfm", vector = "tex-it", skewchar = 0x7F },
+ { name = "lmsy6.tfm", vector = "tex-sy", skewchar = 0x30, parameters = true } ,
{ name = "lmex10.tfm", vector = "tex-ex", extension = true } ,
{ name = "msam5.tfm", vector = "tex-ma" },
{ name = "msbm5.tfm", vector = "tex-mb" },
{ name = "stmary10.afm", vector = "tex-mc" },
-- { name = "rm-lmbx6.tfm", vector = "tex-bf" } ,
{ name = "lmroman6-bold.otf", vector = "tex-bf" } ,
- { name = "lmmib5.tfm", vector = "tex-bi", skewchar=0x7F } ,
- { name = "lmsans8-regular.otf", vector = "tex-ss", optional=true },
- { name = "lmmono8-regular.otf", vector = "tex-tt", optional=true },
- { name = "eufm5.tfm", vector = "tex-fraktur", optional=true },
- { name = "eufb5.tfm", vector = "tex-fraktur-bold", optional=true },
+ { name = "lmmib5.tfm", vector = "tex-bi", skewchar = 0x7F } ,
+ { name = "lmsans8-regular.otf", vector = "tex-ss", optional = true },
+ { name = "lmmono8-regular.otf", vector = "tex-tt", optional = true },
+ { name = "eufm5.tfm", vector = "tex-fraktur", optional = true },
+ { name = "eufb5.tfm", vector = "tex-fraktur-bold", optional = true },
}
-- rm-lmr7 : LMMathRoman7-Regular
@@ -93,20 +93,20 @@ local six = {
local seven = {
{ name = "lmroman7-regular.otf", features = "virtualmath", main = true },
{ name = "rm-lmr7.tfm", vector = "tex-mr-missing" } ,
- { name = "lmmi7.tfm", vector = "tex-mi", skewchar=0x7F },
- { name = "lmmi7.tfm", vector = "tex-it", skewchar=0x7F },
- { name = "lmsy7.tfm", vector = "tex-sy", skewchar=0x30, parameters = true } ,
+ { name = "lmmi7.tfm", vector = "tex-mi", skewchar = 0x7F },
+ { name = "lmmi7.tfm", vector = "tex-it", skewchar = 0x7F },
+ { name = "lmsy7.tfm", vector = "tex-sy", skewchar = 0x30, parameters = true } ,
{ name = "lmex10.tfm", vector = "tex-ex", extension = true } ,
{ name = "msam7.tfm", vector = "tex-ma" },
{ name = "msbm7.tfm", vector = "tex-mb" },
{ name = "stmary10.afm", vector = "tex-mc" },
-- { name = "rm-lmbx7.tfm", vector = "tex-bf" } ,
{ name = "lmroman7-bold.otf", vector = "tex-bf" } ,
- { name = "lmmib7.tfm", vector = "tex-bi", skewchar=0x7F } ,
- { name = "lmsans8-regular.otf", vector = "tex-ss", optional=true },
- { name = "lmmono8-regular.otf", vector = "tex-tt", optional=true },
- { name = "eufm7.tfm", vector = "tex-fraktur", optional=true },
- { name = "eufb7.tfm", vector = "tex-fraktur-bold", optional=true },
+ { name = "lmmib7.tfm", vector = "tex-bi", skewchar = 0x7F } ,
+ { name = "lmsans8-regular.otf", vector = "tex-ss", optional = true },
+ { name = "lmmono8-regular.otf", vector = "tex-tt", optional = true },
+ { name = "eufm7.tfm", vector = "tex-fraktur", optional = true },
+ { name = "eufb7.tfm", vector = "tex-fraktur-bold", optional = true },
}
-- rm-lmr8 : LMMathRoman8-Regular
@@ -117,20 +117,20 @@ local seven = {
local eight = {
{ name = "lmroman8-regular.otf", features = "virtualmath", main = true },
{ name = "rm-lmr8.tfm", vector = "tex-mr-missing" } ,
- { name = "lmmi8.tfm", vector = "tex-mi", skewchar=0x7F },
- { name = "lmmi8.tfm", vector = "tex-it", skewchar=0x7F },
- { name = "lmsy8.tfm", vector = "tex-sy", skewchar=0x30, parameters = true } ,
+ { name = "lmmi8.tfm", vector = "tex-mi", skewchar = 0x7F },
+ { name = "lmmi8.tfm", vector = "tex-it", skewchar = 0x7F },
+ { name = "lmsy8.tfm", vector = "tex-sy", skewchar = 0x30, parameters = true } ,
{ name = "lmex10.tfm", vector = "tex-ex", extension = true } ,
{ name = "msam7.tfm", vector = "tex-ma" },
{ name = "msbm7.tfm", vector = "tex-mb" },
{ name = "stmary10.afm", vector = "tex-mc" },
-- { name = "rm-lmbx8.tfm", vector = "tex-bf" } ,
{ name = "lmroman8-bold.otf", vector = "tex-bf" } ,
- { name = "lmmib7.tfm", vector = "tex-bi", skewchar=0x7F } ,
- { name = "lmsans8-regular.otf", vector = "tex-ss", optional=true },
- { name = "lmmono8-regular.otf", vector = "tex-tt", optional=true },
- { name = "eufm7.tfm", vector = "tex-fraktur", optional=true },
- { name = "eufb7.tfm", vector = "tex-fraktur-bold", optional=true },
+ { name = "lmmib7.tfm", vector = "tex-bi", skewchar = 0x7F } ,
+ { name = "lmsans8-regular.otf", vector = "tex-ss", optional = true },
+ { name = "lmmono8-regular.otf", vector = "tex-tt", optional = true },
+ { name = "eufm7.tfm", vector = "tex-fraktur", optional = true },
+ { name = "eufb7.tfm", vector = "tex-fraktur-bold", optional = true },
}
-- rm-lmr9 : LMMathRoman9-Regular
@@ -141,20 +141,20 @@ local eight = {
local nine = {
{ name = "lmroman9-regular.otf", features = "virtualmath", main = true },
{ name = "rm-lmr9.tfm", vector = "tex-mr-missing" } ,
- { name = "lmmi9.tfm", vector = "tex-mi", skewchar=0x7F },
- { name = "lmmi9.tfm", vector = "tex-it", skewchar=0x7F },
- { name = "lmsy9.tfm", vector = "tex-sy", skewchar=0x30, parameters = true } ,
+ { name = "lmmi9.tfm", vector = "tex-mi", skewchar = 0x7F },
+ { name = "lmmi9.tfm", vector = "tex-it", skewchar = 0x7F },
+ { name = "lmsy9.tfm", vector = "tex-sy", skewchar = 0x30, parameters = true } ,
{ name = "lmex10.tfm", vector = "tex-ex", extension = true } ,
{ name = "msam10.tfm", vector = "tex-ma" },
{ name = "msbm10.tfm", vector = "tex-mb" },
{ name = "stmary10.afm", vector = "tex-mc" },
-- { name = "rm-lmbx9.tfm", vector = "tex-bf" } ,
{ name = "lmroman9-bold.otf", vector = "tex-bf" } ,
- { name = "lmmib10.tfm", vector = "tex-bi", skewchar=0x7F } ,
- { name = "lmsans9-regular.otf", vector = "tex-ss", optional=true },
- { name = "lmmono9-regular.otf", vector = "tex-tt", optional=true },
- { name = "eufm10.tfm", vector = "tex-fraktur", optional=true },
- { name = "eufb10.tfm", vector = "tex-fraktur-bold", optional=true },
+ { name = "lmmib10.tfm", vector = "tex-bi", skewchar = 0x7F } ,
+ { name = "lmsans9-regular.otf", vector = "tex-ss", optional = true },
+ { name = "lmmono9-regular.otf", vector = "tex-tt", optional = true },
+ { name = "eufm10.tfm", vector = "tex-fraktur", optional = true },
+ { name = "eufb10.tfm", vector = "tex-fraktur-bold", optional = true },
}
-- rm-lmr10 : LMMathRoman10-Regular
@@ -168,20 +168,20 @@ local nine = {
local ten = {
{ name = "lmroman10-regular.otf", features = "virtualmath", main = true },
{ name = "rm-lmr10.tfm", vector = "tex-mr-missing" } ,
- { name = "lmmi10.tfm", vector = "tex-mi", skewchar=0x7F },
- { name = "lmmi10.tfm", vector = "tex-it", skewchar=0x7F },
- { name = "lmsy10.tfm", vector = "tex-sy", skewchar=0x30, parameters = true } ,
+ { name = "lmmi10.tfm", vector = "tex-mi", skewchar = 0x7F },
+ { name = "lmmi10.tfm", vector = "tex-it", skewchar = 0x7F },
+ { name = "lmsy10.tfm", vector = "tex-sy", skewchar = 0x30, parameters = true } ,
{ name = "lmex10.tfm", vector = "tex-ex", extension = true } ,
{ name = "msam10.tfm", vector = "tex-ma" },
{ name = "msbm10.tfm", vector = "tex-mb" },
{ name = "stmary10.afm", vector = "tex-mc" },
-- { name = "rm-lmbx10.tfm", vector = "tex-bf" } ,
{ name = "lmroman10-bold.otf", vector = "tex-bf" } ,
- { name = "lmmib10.tfm", vector = "tex-bi", skewchar=0x7F } ,
- { name = "lmsans10-regular.otf", vector = "tex-ss", optional=true },
- { name = "lmmono10-regular.otf", vector = "tex-tt", optional=true },
- { name = "eufm10.tfm", vector = "tex-fraktur", optional=true },
- { name = "eufb10.tfm", vector = "tex-fraktur-bold", optional=true },
+ { name = "lmmib10.tfm", vector = "tex-bi", skewchar = 0x7F } ,
+ { name = "lmsans10-regular.otf", vector = "tex-ss", optional = true },
+ { name = "lmmono10-regular.otf", vector = "tex-tt", optional = true },
+ { name = "eufm10.tfm", vector = "tex-fraktur", optional = true },
+ { name = "eufb10.tfm", vector = "tex-fraktur-bold", optional = true },
}
-- rm-lmr12 : LMMathRoman12-Regular
@@ -191,20 +191,20 @@ local ten = {
local twelve = {
{ name = "lmroman12-regular.otf", features = "virtualmath", main = true },
{ name = "rm-lmr12.tfm", vector = "tex-mr-missing" },
- { name = "lmmi12.tfm", vector = "tex-mi", skewchar=0x7F },
- { name = "lmmi12.tfm", vector = "tex-it", skewchar=0x7F },
- { name = "lmsy10.tfm", vector = "tex-sy", skewchar=0x30, parameters = true } ,
+ { name = "lmmi12.tfm", vector = "tex-mi", skewchar = 0x7F },
+ { name = "lmmi12.tfm", vector = "tex-it", skewchar = 0x7F },
+ { name = "lmsy10.tfm", vector = "tex-sy", skewchar = 0x30, parameters = true } ,
{ name = "lmex10.tfm", vector = "tex-ex", extension = true } ,
{ name = "msam10.tfm", vector = "tex-ma" },
{ name = "msbm10.tfm", vector = "tex-mb" },
{ name = "stmary10.afm", vector = "tex-mc" },
-- { name = "rm-lmbx12.tfm", vector = "tex-bf" } ,
{ name = "lmroman12-bold.otf", vector = "tex-bf" } ,
- { name = "lmmib10.tfm", vector = "tex-bi", skewchar=0x7F } ,
- { name = "lmsans12-regular.otf", vector = "tex-ss", optional=true },
- { name = "lmmono12-regular.otf", vector = "tex-tt", optional=true },
- { name = "eufm10.tfm", vector = "tex-fraktur", optional=true },
- { name = "eufb10.tfm", vector = "tex-fraktur-bold", optional=true },
+ { name = "lmmib10.tfm", vector = "tex-bi", skewchar = 0x7F } ,
+ { name = "lmsans12-regular.otf", vector = "tex-ss", optional = true },
+ { name = "lmmono12-regular.otf", vector = "tex-tt", optional = true },
+ { name = "eufm10.tfm", vector = "tex-fraktur", optional = true },
+ { name = "eufb10.tfm", vector = "tex-fraktur-bold", optional = true },
}
-- rm-lmr17 : LMMathRoman17-Regular
@@ -212,20 +212,20 @@ local twelve = {
local seventeen = {
{ name = "lmroman17-regular.otf", features = "virtualmath", main = true },
{ name = "rm-lmr12.tfm", vector = "tex-mr-missing" } ,
- { name = "lmmi12.tfm", vector = "tex-mi", skewchar=0x7F },
- { name = "lmmi12.tfm", vector = "tex-it", skewchar=0x7F },
- { name = "lmsy10.tfm", vector = "tex-sy", skewchar=0x30, parameters = true } ,
+ { name = "lmmi12.tfm", vector = "tex-mi", skewchar = 0x7F },
+ { name = "lmmi12.tfm", vector = "tex-it", skewchar = 0x7F },
+ { name = "lmsy10.tfm", vector = "tex-sy", skewchar = 0x30, parameters = true } ,
{ name = "lmex10.tfm", vector = "tex-ex", extension = true } ,
{ name = "msam10.tfm", vector = "tex-ma" },
{ name = "msbm10.tfm", vector = "tex-mb" },
{ name = "stmary10.afm", vector = "tex-mc" },
-- { name = "rm-lmbx12.tfm", vector = "tex-bf" } ,
{ name = "lmroman12-bold.otf", vector = "tex-bf" } ,
- { name = "lmmib10.tfm", vector = "tex-bi", skewchar=0x7F } ,
- { name = "lmsans17-regular.otf", vector = "tex-ss", optional=true },
- { name = "lmmono17-regular.otf", vector = "tex-tt", optional=true },
- { name = "eufm10.tfm", vector = "tex-fraktur", optional=true },
- { name = "eufb10.tfm", vector = "tex-fraktur-bold", optional=true },
+ { name = "lmmib10.tfm", vector = "tex-bi", skewchar = 0x7F } ,
+ { name = "lmsans17-regular.otf", vector = "tex-ss", optional = true },
+ { name = "lmmono17-regular.otf", vector = "tex-tt", optional = true },
+ { name = "eufm10.tfm", vector = "tex-fraktur", optional = true },
+ { name = "eufb10.tfm", vector = "tex-fraktur-bold", optional = true },
}
return {
diff --git a/Master/texmf-dist/tex/context/fonts/lm.lfg b/Master/texmf-dist/tex/context/fonts/lm.lfg
index 406902ef2bc..792e723e82b 100644
--- a/Master/texmf-dist/tex/context/fonts/lm.lfg
+++ b/Master/texmf-dist/tex/context/fonts/lm.lfg
@@ -1,3 +1,6 @@
+-- In order to be ale to use beta math fonts, we use our own file name and
+-- always remap.
+
return {
name = "latin modern",
version = "1.00",
@@ -10,6 +13,38 @@ return {
mathematics.tweaks.fixbadprime, -- prime is too low
},
},
+ dimensions = {
+ -- always applied
+-- default = {
+-- },
+ -- driven by 'mathdimensions' feature
+ signs = {
+ -- set dimensions
+ -- [0x00B1] = { -- ±
+ -- height = 500, depth = 0,
+ -- },
+ -- [0x2213] = { -- ∓
+ -- height = 500, depth = 0,
+ -- },
+ -- move in boundingbox
+ [0x00B1] = { -- ±
+ yoffset = 100,
+ },
+ [0x2213] = { -- ∓
+ yoffset = -100,
+ },
+ }
+ }
+ },
+ filenames = {
+ ["latinmodern-math-regular.otf"] = {
+ "latinmodern-math.otf", -- the beta
+ "lmmath-regular.otf",
+ "latinmodernmath-regular.otf",
+ "lmmath-regular.otf",
+ "lmodernmath-regular.otf",
+ "lmodern-math.otf",
+ },
},
designsizes = {
["LMMathRoman-Regular"] = {
diff --git a/Master/texmf-dist/tex/context/fonts/lucida-opentype-math.lfg b/Master/texmf-dist/tex/context/fonts/lucida-opentype-math.lfg
index 38d54eee325..c8556ebca5b 100644
--- a/Master/texmf-dist/tex/context/fonts/lucida-opentype-math.lfg
+++ b/Master/texmf-dist/tex/context/fonts/lucida-opentype-math.lfg
@@ -6,7 +6,12 @@ return {
copyright = "ConTeXt development team",
mathematics = {
alternates = {
- italic = { feature = 'ss01', value = 1, comment = "Mathematical Alternative Italic" },
+ italic = { feature = 'ss01', value = 1, comment = "Mathematical Alternative Lowercase Italic" },
+ arrow = { feature = 'ss02', value = 1, comment = "Mathematical Alternative Smaller Arrows" },
+ operator = { feature = 'ss03', value = 1, comment = "Mathematical Alternative Smaller Operators" },
+ calligraphic = { feature = 'ss04', value = 1, comment = "Mathematical Alternative Calligraphic Characters" },
+ zero = { feature = 'ss05', value = 1, comment = "Mathematical Alternative Zero" },
+ partial = { feature = 'ss20', value = 1, comment = "Mathematical Alternative Upright Partial Differential" },
}
}
}
diff --git a/Master/texmf-dist/tex/context/fonts/texgyre.lfg b/Master/texmf-dist/tex/context/fonts/texgyre.lfg
new file mode 100644
index 00000000000..95369223225
--- /dev/null
+++ b/Master/texmf-dist/tex/context/fonts/texgyre.lfg
@@ -0,0 +1,24 @@
+-- In order to be ale to use beta math fonts, we use our own file name and
+-- always remap.
+
+return {
+ name = "tex gyre",
+ version = "1.00",
+ comment = "Goodies that complement tex gyre.",
+ author = "Hans Hagen",
+ copyright = "ConTeXt development team",
+ filenames = {
+ ["texgyre-pagella-math-regular.otf"] = {
+ "texgyrepagella-math.otf", -- beta
+ "texgyrepagellamath-regular.otf",
+ "tgpagellamath-regular.otf",
+ "tgpagella-math.otf",
+ },
+ ["texgyre-termes-math-regular.otf"] = {
+ "texgyretermes-math.otf", -- beta
+ "texgyretermesmath-regular.otf",
+ "tgtermesmath-regular.otf",
+ "tgtermes-math.otf",
+ },
+ },
+}
diff --git a/Master/texmf-dist/tex/context/interface/keys-cs.xml b/Master/texmf-dist/tex/context/interface/keys-cs.xml
index cd95319e02a..ad0cf2dca35 100644
--- a/Master/texmf-dist/tex/context/interface/keys-cs.xml
+++ b/Master/texmf-dist/tex/context/interface/keys-cs.xml
@@ -123,6 +123,7 @@
<cd:variable name='chapter' value='kapitola'/>
<cd:variable name='character' value='pismeno'/>
<cd:variable name='characters' value='pismena'/>
+ <cd:variable name='chemistry' value='chemistry'/>
<cd:variable name='cite' value='cite'/>
<cd:variable name='color' value='barevne'/>
<cd:variable name='column' value='column'/>
@@ -160,6 +161,7 @@
<cd:variable name='enumeration' value='vycet'/>
<cd:variable name='environment' value='prostredi'/>
<cd:variable name='even' value='sude'/>
+ <cd:variable name='export' value='export'/>
<cd:variable name='external' value='externi'/>
<cd:variable name='fact' value='fakt'/>
<cd:variable name='february' value='unor'/>
@@ -261,6 +263,7 @@
<cd:variable name='leftpage' value='levastranka'/>
<cd:variable name='lefttoright' value='lefttoright'/>
<cd:variable name='legend' value='legenda'/>
+ <cd:variable name='less' value='less'/>
<cd:variable name='lesshyphenation' value='lesshyphenation'/>
<cd:variable name='line' value='radek'/>
<cd:variable name='linenote' value='linenote'/>
@@ -301,6 +304,7 @@
<cd:variable name='monday' value='pondeli'/>
<cd:variable name='mono' value='mono'/>
<cd:variable name='month' value='mesic'/>
+ <cd:variable name='more' value='more'/>
<cd:variable name='morehyphenation' value='morehyphenation'/>
<cd:variable name='name' value='jmeno'/>
<cd:variable name='narrow' value='uzky'/>
@@ -458,6 +462,7 @@
<cd:variable name='subforward' value='podvpred'/>
<cd:variable name='subject' value='tema'/>
<cd:variable name='subpage' value='podstranka'/>
+ <cd:variable name='subs' value='subs'/>
<cd:variable name='subsection' value='podsekce'/>
<cd:variable name='subsubject' value='podtema'/>
<cd:variable name='subsubsection' value='podpodsekce'/>
@@ -598,6 +603,7 @@
<cd:constant name='bottom' value='spodek'/>
<cd:constant name='bottomafter' value='bottomafter'/>
<cd:constant name='bottombefore' value='bottombefore'/>
+ <cd:constant name='bottomcommand' value='bottomcommand'/>
<cd:constant name='bottomdistance' value='vzdalenostspodku'/>
<cd:constant name='bottomframe' value='ramecekdole'/>
<cd:constant name='bottomoffset' value='offsetspodku'/>
@@ -690,6 +696,7 @@
<cd:constant name='filtercommand' value='filtercommand'/>
<cd:constant name='finalnamesep' value='finalnamesep'/>
<cd:constant name='firstnamesep' value='firstnamesep'/>
+ <cd:constant name='firstpage' value='prvnistranka'/>
<cd:constant name='focus' value='zaostreni'/>
<cd:constant name='focusin' value='focusin'/>
<cd:constant name='focusout' value='focusout'/>
@@ -756,6 +763,7 @@
<cd:constant name='label' value='popisek'/>
<cd:constant name='labeloffset' value='labeloffset'/>
<cd:constant name='lastnamesep' value='lastnamesep'/>
+ <cd:constant name='lastpage' value='poslednistrana'/>
<cd:constant name='lastpubsep' value='lastpubsep'/>
<cd:constant name='layout' value='layout'/>
<cd:constant name='left' value='vlevo'/>
@@ -776,6 +784,7 @@
<cd:constant name='leftsubsentence' value='podvetavlevo'/>
<cd:constant name='lefttext' value='textvlevo'/>
<cd:constant name='leftwidth' value='sirkavlevo'/>
+ <cd:constant name='less' value='less'/>
<cd:constant name='level' value='uroven'/>
<cd:constant name='levels' value='urovne'/>
<cd:constant name='limittext' value='limittext'/>
@@ -798,6 +807,7 @@
<cd:constant name='marking' value='znaceni'/>
<cd:constant name='marstyle' value='stylsnacky'/>
<cd:constant name='mask' value='mask'/>
+ <cd:constant name='mathstyle' value='mathstyle'/>
<cd:constant name='max' value='max'/>
<cd:constant name='maxdepth' value='maxdepth'/>
<cd:constant name='maxheight' value='maxvyska'/>
@@ -806,6 +816,7 @@
<cd:constant name='menu' value='menu'/>
<cd:constant name='method' value='metoda'/>
<cd:constant name='middle' value='stredni'/>
+ <cd:constant name='middlecommand' value='middlecommand'/>
<cd:constant name='middlespeech' value='middlespeech'/>
<cd:constant name='middletext' value='strednitext'/>
<cd:constant name='midsentence' value='midsentence'/>
@@ -814,6 +825,7 @@
<cd:constant name='minheight' value='minvyska'/>
<cd:constant name='minwidth' value='minsirka'/>
<cd:constant name='monthconversion' value='monthconversion'/>
+ <cd:constant name='more' value='more'/>
<cd:constant name='n' value='n'/>
<cd:constant name='name' value='jmeno'/>
<cd:constant name='namesep' value='namesep'/>
@@ -968,6 +980,7 @@
<cd:constant name='separator' value='oddelovac'/>
<cd:constant name='set' value='set'/>
<cd:constant name='setups' value='setups'/>
+ <cd:constant name='shrink' value='shrink'/>
<cd:constant name='side' value='pocitat'/>
<cd:constant name='sidealign' value='sidealign'/>
<cd:constant name='sidemethod' value='sidemethod'/>
@@ -976,6 +989,7 @@
<cd:constant name='sign' value='znak'/>
<cd:constant name='size' value='velikost'/>
<cd:constant name='small' value='male'/>
+ <cd:constant name='solution' value='solution'/>
<cd:constant name='sort' value='sort'/>
<cd:constant name='sorttype' value='sorttype'/>
<cd:constant name='source' value='zdroj'/>
@@ -1043,6 +1057,7 @@
<cd:constant name='toffset' value='toffset'/>
<cd:constant name='tolerance' value='tolerance'/>
<cd:constant name='top' value='vrsek'/>
+ <cd:constant name='topcommand' value='topcommand'/>
<cd:constant name='topdistance' value='vzdalenostvrsku'/>
<cd:constant name='topframe' value='rameceknahore'/>
<cd:constant name='topoffset' value='offsetvrsku'/>
@@ -1703,6 +1718,7 @@
<cd:command name='switchtorawfont' value='switchtorawfont'/>
<cd:command name='sym' value='sym'/>
<cd:command name='symbol' value='symbol'/>
+ <cd:command name='symoffset' value='symoffset'/>
<cd:command name='synchronizationbar' value='synchronizacnilista'/>
<cd:command name='synchronize' value='synchronizovat'/>
<cd:command name='tab' value='tab'/>
diff --git a/Master/texmf-dist/tex/context/interface/keys-de.xml b/Master/texmf-dist/tex/context/interface/keys-de.xml
index a28d23bcfb6..5d107ca40c8 100644
--- a/Master/texmf-dist/tex/context/interface/keys-de.xml
+++ b/Master/texmf-dist/tex/context/interface/keys-de.xml
@@ -123,6 +123,7 @@
<cd:variable name='chapter' value='kapitel'/>
<cd:variable name='character' value='buchstabe'/>
<cd:variable name='characters' value='buchstaben'/>
+ <cd:variable name='chemistry' value='chemistry'/>
<cd:variable name='cite' value='cite'/>
<cd:variable name='color' value='farbe'/>
<cd:variable name='column' value='column'/>
@@ -160,6 +161,7 @@
<cd:variable name='enumeration' value='nummerierung'/>
<cd:variable name='environment' value='umgebung'/>
<cd:variable name='even' value='gerade'/>
+ <cd:variable name='export' value='export'/>
<cd:variable name='external' value='extern'/>
<cd:variable name='fact' value='gegeben'/>
<cd:variable name='february' value='februar'/>
@@ -261,6 +263,7 @@
<cd:variable name='leftpage' value='linkerseite'/>
<cd:variable name='lefttoright' value='lefttoright'/>
<cd:variable name='legend' value='legende'/>
+ <cd:variable name='less' value='less'/>
<cd:variable name='lesshyphenation' value='lesshyphenation'/>
<cd:variable name='line' value='zeile'/>
<cd:variable name='linenote' value='linenote'/>
@@ -301,6 +304,7 @@
<cd:variable name='monday' value='montag'/>
<cd:variable name='mono' value='mono'/>
<cd:variable name='month' value='monat'/>
+ <cd:variable name='more' value='more'/>
<cd:variable name='morehyphenation' value='morehyphenation'/>
<cd:variable name='name' value='name'/>
<cd:variable name='narrow' value='schmall'/>
@@ -458,6 +462,7 @@
<cd:variable name='subforward' value='untervorwaerts'/>
<cd:variable name='subject' value='thema'/>
<cd:variable name='subpage' value='unterseite'/>
+ <cd:variable name='subs' value='subs'/>
<cd:variable name='subsection' value='unterabsatz'/>
<cd:variable name='subsubject' value='unterthema'/>
<cd:variable name='subsubsection' value='unterunterabsatz'/>
@@ -598,6 +603,7 @@
<cd:constant name='bottom' value='unten'/>
<cd:constant name='bottomafter' value='bottomafter'/>
<cd:constant name='bottombefore' value='bottombefore'/>
+ <cd:constant name='bottomcommand' value='bottomcommand'/>
<cd:constant name='bottomdistance' value='abstandunten'/>
<cd:constant name='bottomframe' value='untenrahmen'/>
<cd:constant name='bottomoffset' value='untenoffset'/>
@@ -690,6 +696,7 @@
<cd:constant name='filtercommand' value='filtercommand'/>
<cd:constant name='finalnamesep' value='finalnamesep'/>
<cd:constant name='firstnamesep' value='firstnamesep'/>
+ <cd:constant name='firstpage' value='ersteseite'/>
<cd:constant name='focus' value='focus'/>
<cd:constant name='focusin' value='focusin'/>
<cd:constant name='focusout' value='focusout'/>
@@ -756,6 +763,7 @@
<cd:constant name='label' value='label'/>
<cd:constant name='labeloffset' value='labeloffset'/>
<cd:constant name='lastnamesep' value='lastnamesep'/>
+ <cd:constant name='lastpage' value='letzteseite'/>
<cd:constant name='lastpubsep' value='lastpubsep'/>
<cd:constant name='layout' value='layout'/>
<cd:constant name='left' value='links'/>
@@ -776,6 +784,7 @@
<cd:constant name='leftsubsentence' value='linkersubsatz'/>
<cd:constant name='lefttext' value='linkertext'/>
<cd:constant name='leftwidth' value='linkerbreite'/>
+ <cd:constant name='less' value='less'/>
<cd:constant name='level' value='niveau'/>
<cd:constant name='levels' value='niveaus'/>
<cd:constant name='limittext' value='limittext'/>
@@ -798,6 +807,7 @@
<cd:constant name='marking' value='beschriftung'/>
<cd:constant name='marstyle' value='beschrstil'/>
<cd:constant name='mask' value='mask'/>
+ <cd:constant name='mathstyle' value='mathstyle'/>
<cd:constant name='max' value='max'/>
<cd:constant name='maxdepth' value='maxdepth'/>
<cd:constant name='maxheight' value='maxhoehe'/>
@@ -806,6 +816,7 @@
<cd:constant name='menu' value='menue'/>
<cd:constant name='method' value='methode'/>
<cd:constant name='middle' value='mittig'/>
+ <cd:constant name='middlecommand' value='middlecommand'/>
<cd:constant name='middlespeech' value='middlespeech'/>
<cd:constant name='middletext' value='mittigertext'/>
<cd:constant name='midsentence' value='midsentence'/>
@@ -814,6 +825,7 @@
<cd:constant name='minheight' value='minhoehe'/>
<cd:constant name='minwidth' value='minbreite'/>
<cd:constant name='monthconversion' value='monthconversion'/>
+ <cd:constant name='more' value='more'/>
<cd:constant name='n' value='n'/>
<cd:constant name='name' value='name'/>
<cd:constant name='namesep' value='namesep'/>
@@ -968,6 +980,7 @@
<cd:constant name='separator' value='seperator'/>
<cd:constant name='set' value='set'/>
<cd:constant name='setups' value='setups'/>
+ <cd:constant name='shrink' value='shrink'/>
<cd:constant name='side' value='objektabstand'/>
<cd:constant name='sidealign' value='sidealign'/>
<cd:constant name='sidemethod' value='sidemethod'/>
@@ -976,6 +989,7 @@
<cd:constant name='sign' value='zeichen'/>
<cd:constant name='size' value='groesse'/>
<cd:constant name='small' value='klein'/>
+ <cd:constant name='solution' value='solution'/>
<cd:constant name='sort' value='sort'/>
<cd:constant name='sorttype' value='sorttype'/>
<cd:constant name='source' value='quelle'/>
@@ -1043,6 +1057,7 @@
<cd:constant name='toffset' value='toffset'/>
<cd:constant name='tolerance' value='toleranz'/>
<cd:constant name='top' value='oben'/>
+ <cd:constant name='topcommand' value='topcommand'/>
<cd:constant name='topdistance' value='obenabstand'/>
<cd:constant name='topframe' value='obenrahmen'/>
<cd:constant name='topoffset' value='obenoffset'/>
@@ -1703,6 +1718,7 @@
<cd:command name='switchtorawfont' value='switchtorawfont'/>
<cd:command name='sym' value='sym'/>
<cd:command name='symbol' value='symbol'/>
+ <cd:command name='symoffset' value='symoffset'/>
<cd:command name='synchronizationbar' value='synchronisationsbalken'/>
<cd:command name='synchronize' value='synchronisieren'/>
<cd:command name='tab' value='tab'/>
diff --git a/Master/texmf-dist/tex/context/interface/keys-en.xml b/Master/texmf-dist/tex/context/interface/keys-en.xml
index 80ff3c2e1a3..d9166d1075d 100644
--- a/Master/texmf-dist/tex/context/interface/keys-en.xml
+++ b/Master/texmf-dist/tex/context/interface/keys-en.xml
@@ -123,6 +123,7 @@
<cd:variable name='chapter' value='chapter'/>
<cd:variable name='character' value='character'/>
<cd:variable name='characters' value='characters'/>
+ <cd:variable name='chemistry' value='chemistry'/>
<cd:variable name='cite' value='cite'/>
<cd:variable name='color' value='color'/>
<cd:variable name='column' value='column'/>
@@ -160,6 +161,7 @@
<cd:variable name='enumeration' value='enumeration'/>
<cd:variable name='environment' value='environment'/>
<cd:variable name='even' value='even'/>
+ <cd:variable name='export' value='export'/>
<cd:variable name='external' value='external'/>
<cd:variable name='fact' value='fact'/>
<cd:variable name='february' value='february'/>
@@ -261,6 +263,7 @@
<cd:variable name='leftpage' value='leftpage'/>
<cd:variable name='lefttoright' value='lefttoright'/>
<cd:variable name='legend' value='legend'/>
+ <cd:variable name='less' value='less'/>
<cd:variable name='lesshyphenation' value='lesshyphenation'/>
<cd:variable name='line' value='line'/>
<cd:variable name='linenote' value='linenote'/>
@@ -301,6 +304,7 @@
<cd:variable name='monday' value='monday'/>
<cd:variable name='mono' value='mono'/>
<cd:variable name='month' value='month'/>
+ <cd:variable name='more' value='more'/>
<cd:variable name='morehyphenation' value='morehyphenation'/>
<cd:variable name='name' value='name'/>
<cd:variable name='narrow' value='narrow'/>
@@ -458,6 +462,7 @@
<cd:variable name='subforward' value='subforward'/>
<cd:variable name='subject' value='subject'/>
<cd:variable name='subpage' value='subpage'/>
+ <cd:variable name='subs' value='subs'/>
<cd:variable name='subsection' value='subsection'/>
<cd:variable name='subsubject' value='subsubject'/>
<cd:variable name='subsubsection' value='subsubsection'/>
@@ -598,6 +603,7 @@
<cd:constant name='bottom' value='bottom'/>
<cd:constant name='bottomafter' value='bottomafter'/>
<cd:constant name='bottombefore' value='bottombefore'/>
+ <cd:constant name='bottomcommand' value='bottomcommand'/>
<cd:constant name='bottomdistance' value='bottomdistance'/>
<cd:constant name='bottomframe' value='bottomframe'/>
<cd:constant name='bottomoffset' value='bottomoffset'/>
@@ -690,6 +696,7 @@
<cd:constant name='filtercommand' value='filtercommand'/>
<cd:constant name='finalnamesep' value='finalnamesep'/>
<cd:constant name='firstnamesep' value='firstnamesep'/>
+ <cd:constant name='firstpage' value='firstpage'/>
<cd:constant name='focus' value='focus'/>
<cd:constant name='focusin' value='focusin'/>
<cd:constant name='focusout' value='focusout'/>
@@ -756,6 +763,7 @@
<cd:constant name='label' value='label'/>
<cd:constant name='labeloffset' value='labeloffset'/>
<cd:constant name='lastnamesep' value='lastnamesep'/>
+ <cd:constant name='lastpage' value='lastpage'/>
<cd:constant name='lastpubsep' value='lastpubsep'/>
<cd:constant name='layout' value='layout'/>
<cd:constant name='left' value='left'/>
@@ -776,6 +784,7 @@
<cd:constant name='leftsubsentence' value='leftsubsentence'/>
<cd:constant name='lefttext' value='lefttext'/>
<cd:constant name='leftwidth' value='leftwidth'/>
+ <cd:constant name='less' value='less'/>
<cd:constant name='level' value='level'/>
<cd:constant name='levels' value='levels'/>
<cd:constant name='limittext' value='limittext'/>
@@ -798,6 +807,7 @@
<cd:constant name='marking' value='marking'/>
<cd:constant name='marstyle' value='marstyle'/>
<cd:constant name='mask' value='mask'/>
+ <cd:constant name='mathstyle' value='mathstyle'/>
<cd:constant name='max' value='max'/>
<cd:constant name='maxdepth' value='maxdepth'/>
<cd:constant name='maxheight' value='maxheight'/>
@@ -806,6 +816,7 @@
<cd:constant name='menu' value='menu'/>
<cd:constant name='method' value='method'/>
<cd:constant name='middle' value='middle'/>
+ <cd:constant name='middlecommand' value='middlecommand'/>
<cd:constant name='middlespeech' value='middlespeech'/>
<cd:constant name='middletext' value='middletext'/>
<cd:constant name='midsentence' value='midsentence'/>
@@ -814,6 +825,7 @@
<cd:constant name='minheight' value='minheight'/>
<cd:constant name='minwidth' value='minwidth'/>
<cd:constant name='monthconversion' value='monthconversion'/>
+ <cd:constant name='more' value='more'/>
<cd:constant name='n' value='n'/>
<cd:constant name='name' value='name'/>
<cd:constant name='namesep' value='namesep'/>
@@ -968,6 +980,7 @@
<cd:constant name='separator' value='separator'/>
<cd:constant name='set' value='set'/>
<cd:constant name='setups' value='setups'/>
+ <cd:constant name='shrink' value='shrink'/>
<cd:constant name='side' value='side'/>
<cd:constant name='sidealign' value='sidealign'/>
<cd:constant name='sidemethod' value='sidemethod'/>
@@ -976,6 +989,7 @@
<cd:constant name='sign' value='sign'/>
<cd:constant name='size' value='size'/>
<cd:constant name='small' value='small'/>
+ <cd:constant name='solution' value='solution'/>
<cd:constant name='sort' value='sort'/>
<cd:constant name='sorttype' value='sorttype'/>
<cd:constant name='source' value='source'/>
@@ -1043,6 +1057,7 @@
<cd:constant name='toffset' value='toffset'/>
<cd:constant name='tolerance' value='tolerance'/>
<cd:constant name='top' value='top'/>
+ <cd:constant name='topcommand' value='topcommand'/>
<cd:constant name='topdistance' value='topdistance'/>
<cd:constant name='topframe' value='topframe'/>
<cd:constant name='topoffset' value='topoffset'/>
@@ -1703,6 +1718,7 @@
<cd:command name='switchtorawfont' value='switchtorawfont'/>
<cd:command name='sym' value='sym'/>
<cd:command name='symbol' value='symbol'/>
+ <cd:command name='symoffset' value='symoffset'/>
<cd:command name='synchronizationbar' value='synchronizationbar'/>
<cd:command name='synchronize' value='synchronize'/>
<cd:command name='tab' value='tab'/>
diff --git a/Master/texmf-dist/tex/context/interface/keys-fr.xml b/Master/texmf-dist/tex/context/interface/keys-fr.xml
index d97bf932ae8..c98826cf36d 100644
--- a/Master/texmf-dist/tex/context/interface/keys-fr.xml
+++ b/Master/texmf-dist/tex/context/interface/keys-fr.xml
@@ -123,6 +123,7 @@
<cd:variable name='chapter' value='chapitre'/>
<cd:variable name='character' value='caractere'/>
<cd:variable name='characters' value='caracteres'/>
+ <cd:variable name='chemistry' value='chemistry'/>
<cd:variable name='cite' value='cite'/>
<cd:variable name='color' value='couleur'/>
<cd:variable name='column' value='colonne'/>
@@ -160,6 +161,7 @@
<cd:variable name='enumeration' value='enumeration'/>
<cd:variable name='environment' value='environement'/>
<cd:variable name='even' value='paire'/>
+ <cd:variable name='export' value='export'/>
<cd:variable name='external' value='external'/>
<cd:variable name='fact' value='fait'/>
<cd:variable name='february' value='fevrier'/>
@@ -261,6 +263,7 @@
<cd:variable name='leftpage' value='pagegauche'/>
<cd:variable name='lefttoright' value='lefttoright'/>
<cd:variable name='legend' value='legende'/>
+ <cd:variable name='less' value='less'/>
<cd:variable name='lesshyphenation' value='lesshyphenation'/>
<cd:variable name='line' value='ligne'/>
<cd:variable name='linenote' value='noteligne'/>
@@ -301,6 +304,7 @@
<cd:variable name='monday' value='lundi'/>
<cd:variable name='mono' value='mono'/>
<cd:variable name='month' value='mois'/>
+ <cd:variable name='more' value='more'/>
<cd:variable name='morehyphenation' value='morehyphenation'/>
<cd:variable name='name' value='nom'/>
<cd:variable name='narrow' value='etroit'/>
@@ -458,6 +462,7 @@
<cd:variable name='subforward' value='sousavance'/>
<cd:variable name='subject' value='sujet'/>
<cd:variable name='subpage' value='souspage'/>
+ <cd:variable name='subs' value='subs'/>
<cd:variable name='subsection' value='soussection'/>
<cd:variable name='subsubject' value='soussujet'/>
<cd:variable name='subsubsection' value='soussoussection'/>
@@ -598,6 +603,7 @@
<cd:constant name='bottom' value='inf'/>
<cd:constant name='bottomafter' value='bottomafter'/>
<cd:constant name='bottombefore' value='bottombefore'/>
+ <cd:constant name='bottomcommand' value='bottomcommand'/>
<cd:constant name='bottomdistance' value='distanceinf'/>
<cd:constant name='bottomframe' value='cadreinf'/>
<cd:constant name='bottomoffset' value='decalageinf'/>
@@ -690,6 +696,7 @@
<cd:constant name='filtercommand' value='filtercommand'/>
<cd:constant name='finalnamesep' value='finalnamesep'/>
<cd:constant name='firstnamesep' value='firstnamesep'/>
+ <cd:constant name='firstpage' value='premierepage'/>
<cd:constant name='focus' value='focus'/>
<cd:constant name='focusin' value='focusin'/>
<cd:constant name='focusout' value='focusout'/>
@@ -756,6 +763,7 @@
<cd:constant name='label' value='etiquette'/>
<cd:constant name='labeloffset' value='labeloffset'/>
<cd:constant name='lastnamesep' value='lastnamesep'/>
+ <cd:constant name='lastpage' value='dernierepage'/>
<cd:constant name='lastpubsep' value='lastpubsep'/>
<cd:constant name='layout' value='layout'/>
<cd:constant name='left' value='gauche'/>
@@ -776,6 +784,7 @@
<cd:constant name='leftsubsentence' value='sousphrasegauche'/>
<cd:constant name='lefttext' value='textegauche'/>
<cd:constant name='leftwidth' value='largeurgauche'/>
+ <cd:constant name='less' value='less'/>
<cd:constant name='level' value='niveau'/>
<cd:constant name='levels' value='niveaux'/>
<cd:constant name='limittext' value='limittext'/>
@@ -798,6 +807,7 @@
<cd:constant name='marking' value='marquage'/>
<cd:constant name='marstyle' value='stylemarquage'/>
<cd:constant name='mask' value='mask'/>
+ <cd:constant name='mathstyle' value='mathstyle'/>
<cd:constant name='max' value='max'/>
<cd:constant name='maxdepth' value='maxdepth'/>
<cd:constant name='maxheight' value='hauteurmax'/>
@@ -806,6 +816,7 @@
<cd:constant name='menu' value='menu'/>
<cd:constant name='method' value='methode'/>
<cd:constant name='middle' value='milieu'/>
+ <cd:constant name='middlecommand' value='middlecommand'/>
<cd:constant name='middlespeech' value='middlespeech'/>
<cd:constant name='middletext' value='textecentre'/>
<cd:constant name='midsentence' value='midsentence'/>
@@ -814,6 +825,7 @@
<cd:constant name='minheight' value='hauteurmin'/>
<cd:constant name='minwidth' value='largeurmin'/>
<cd:constant name='monthconversion' value='monthconversion'/>
+ <cd:constant name='more' value='more'/>
<cd:constant name='n' value='n'/>
<cd:constant name='name' value='nom'/>
<cd:constant name='namesep' value='namesep'/>
@@ -968,6 +980,7 @@
<cd:constant name='separator' value='separateur'/>
<cd:constant name='set' value='set'/>
<cd:constant name='setups' value='reglages'/>
+ <cd:constant name='shrink' value='shrink'/>
<cd:constant name='side' value='cote'/>
<cd:constant name='sidealign' value='sidealign'/>
<cd:constant name='sidemethod' value='sidemethod'/>
@@ -976,6 +989,7 @@
<cd:constant name='sign' value='signe'/>
<cd:constant name='size' value='dimension'/>
<cd:constant name='small' value='petit'/>
+ <cd:constant name='solution' value='solution'/>
<cd:constant name='sort' value='sort'/>
<cd:constant name='sorttype' value='sorttype'/>
<cd:constant name='source' value='origine'/>
@@ -1043,6 +1057,7 @@
<cd:constant name='toffset' value='toffset'/>
<cd:constant name='tolerance' value='tolerance'/>
<cd:constant name='top' value='sup'/>
+ <cd:constant name='topcommand' value='topcommand'/>
<cd:constant name='topdistance' value='distancesup'/>
<cd:constant name='topframe' value='cadresup'/>
<cd:constant name='topoffset' value='decalagesup'/>
@@ -1703,6 +1718,7 @@
<cd:command name='switchtorawfont' value='changepolicebrute'/>
<cd:command name='sym' value='sym'/>
<cd:command name='symbol' value='symbole'/>
+ <cd:command name='symoffset' value='symoffset'/>
<cd:command name='synchronizationbar' value='barresynchronisation'/>
<cd:command name='synchronize' value='synchronise'/>
<cd:command name='tab' value='tab'/>
diff --git a/Master/texmf-dist/tex/context/interface/keys-it.xml b/Master/texmf-dist/tex/context/interface/keys-it.xml
index a03a88ba391..afe3b8360b6 100644
--- a/Master/texmf-dist/tex/context/interface/keys-it.xml
+++ b/Master/texmf-dist/tex/context/interface/keys-it.xml
@@ -123,6 +123,7 @@
<cd:variable name='chapter' value='capitolo'/>
<cd:variable name='character' value='lettera'/>
<cd:variable name='characters' value='lettere'/>
+ <cd:variable name='chemistry' value='chemistry'/>
<cd:variable name='cite' value='cite'/>
<cd:variable name='color' value='colore'/>
<cd:variable name='column' value='colonna'/>
@@ -160,6 +161,7 @@
<cd:variable name='enumeration' value='enumerazione'/>
<cd:variable name='environment' value='ambiente'/>
<cd:variable name='even' value='pari'/>
+ <cd:variable name='export' value='export'/>
<cd:variable name='external' value='esterno'/>
<cd:variable name='fact' value='fatto'/>
<cd:variable name='february' value='febbraio'/>
@@ -261,6 +263,7 @@
<cd:variable name='leftpage' value='paginasinistra'/>
<cd:variable name='lefttoright' value='lefttoright'/>
<cd:variable name='legend' value='legenda'/>
+ <cd:variable name='less' value='less'/>
<cd:variable name='lesshyphenation' value='lesshyphenation'/>
<cd:variable name='line' value='riga'/>
<cd:variable name='linenote' value='linenote'/>
@@ -301,6 +304,7 @@
<cd:variable name='monday' value='lunedi'/>
<cd:variable name='mono' value='mono'/>
<cd:variable name='month' value='mese'/>
+ <cd:variable name='more' value='more'/>
<cd:variable name='morehyphenation' value='morehyphenation'/>
<cd:variable name='name' value='nome'/>
<cd:variable name='narrow' value='stretto'/>
@@ -458,6 +462,7 @@
<cd:variable name='subforward' value='sottoavanti'/>
<cd:variable name='subject' value='argomento'/>
<cd:variable name='subpage' value='sottopagina'/>
+ <cd:variable name='subs' value='subs'/>
<cd:variable name='subsection' value='sottocapoverso'/>
<cd:variable name='subsubject' value='sottoargomento'/>
<cd:variable name='subsubsection' value='sottosottocapoverso'/>
@@ -598,6 +603,7 @@
<cd:constant name='bottom' value='fondo'/>
<cd:constant name='bottomafter' value='bottomafter'/>
<cd:constant name='bottombefore' value='bottombefore'/>
+ <cd:constant name='bottomcommand' value='bottomcommand'/>
<cd:constant name='bottomdistance' value='distanzafondo'/>
<cd:constant name='bottomframe' value='cornicefondo'/>
<cd:constant name='bottomoffset' value='offsetfondo'/>
@@ -690,6 +696,7 @@
<cd:constant name='filtercommand' value='filtercommand'/>
<cd:constant name='finalnamesep' value='finalnamesep'/>
<cd:constant name='firstnamesep' value='firstnamesep'/>
+ <cd:constant name='firstpage' value='primapagina'/>
<cd:constant name='focus' value='focus'/>
<cd:constant name='focusin' value='focusin'/>
<cd:constant name='focusout' value='focusout'/>
@@ -756,6 +763,7 @@
<cd:constant name='label' value='etichetta'/>
<cd:constant name='labeloffset' value='labeloffset'/>
<cd:constant name='lastnamesep' value='lastnamesep'/>
+ <cd:constant name='lastpage' value='ultimapagina'/>
<cd:constant name='lastpubsep' value='lastpubsep'/>
<cd:constant name='layout' value='layout'/>
<cd:constant name='left' value='sinistra'/>
@@ -776,6 +784,7 @@
<cd:constant name='leftsubsentence' value='sottofrasesinistra'/>
<cd:constant name='lefttext' value='testosinistro'/>
<cd:constant name='leftwidth' value='ampiezzasinistra'/>
+ <cd:constant name='less' value='less'/>
<cd:constant name='level' value='livello'/>
<cd:constant name='levels' value='livelli'/>
<cd:constant name='limittext' value='limittext'/>
@@ -798,6 +807,7 @@
<cd:constant name='marking' value='marcatura'/>
<cd:constant name='marstyle' value='stilemarcatura'/>
<cd:constant name='mask' value='mask'/>
+ <cd:constant name='mathstyle' value='mathstyle'/>
<cd:constant name='max' value='max'/>
<cd:constant name='maxdepth' value='maxdepth'/>
<cd:constant name='maxheight' value='altezzamax'/>
@@ -806,6 +816,7 @@
<cd:constant name='menu' value='menu'/>
<cd:constant name='method' value='metodo'/>
<cd:constant name='middle' value='centro'/>
+ <cd:constant name='middlecommand' value='middlecommand'/>
<cd:constant name='middlespeech' value='middlespeech'/>
<cd:constant name='middletext' value='testocentro'/>
<cd:constant name='midsentence' value='midsentence'/>
@@ -814,6 +825,7 @@
<cd:constant name='minheight' value='altezzamin'/>
<cd:constant name='minwidth' value='ampiezzamin'/>
<cd:constant name='monthconversion' value='monthconversion'/>
+ <cd:constant name='more' value='more'/>
<cd:constant name='n' value='n'/>
<cd:constant name='name' value='nome'/>
<cd:constant name='namesep' value='namesep'/>
@@ -968,6 +980,7 @@
<cd:constant name='separator' value='separatore'/>
<cd:constant name='set' value='set'/>
<cd:constant name='setups' value='setups'/>
+ <cd:constant name='shrink' value='shrink'/>
<cd:constant name='side' value='lato'/>
<cd:constant name='sidealign' value='sidealign'/>
<cd:constant name='sidemethod' value='sidemethod'/>
@@ -976,6 +989,7 @@
<cd:constant name='sign' value='segno'/>
<cd:constant name='size' value='dimensione'/>
<cd:constant name='small' value='piccolo'/>
+ <cd:constant name='solution' value='solution'/>
<cd:constant name='sort' value='sort'/>
<cd:constant name='sorttype' value='sorttype'/>
<cd:constant name='source' value='origine'/>
@@ -1043,6 +1057,7 @@
<cd:constant name='toffset' value='toffset'/>
<cd:constant name='tolerance' value='tolleranza'/>
<cd:constant name='top' value='cima'/>
+ <cd:constant name='topcommand' value='topcommand'/>
<cd:constant name='topdistance' value='distanzacima'/>
<cd:constant name='topframe' value='cornicecima'/>
<cd:constant name='topoffset' value='offsetcima'/>
@@ -1703,6 +1718,7 @@
<cd:command name='switchtorawfont' value='passaafontgrezzo'/>
<cd:command name='sym' value='sim'/>
<cd:command name='symbol' value='simbolo'/>
+ <cd:command name='symoffset' value='symoffset'/>
<cd:command name='synchronizationbar' value='barrasincronizzazione'/>
<cd:command name='synchronize' value='sincronizza'/>
<cd:command name='tab' value='tab'/>
diff --git a/Master/texmf-dist/tex/context/interface/keys-nl.xml b/Master/texmf-dist/tex/context/interface/keys-nl.xml
index 2c9113c39bd..226c9683921 100644
--- a/Master/texmf-dist/tex/context/interface/keys-nl.xml
+++ b/Master/texmf-dist/tex/context/interface/keys-nl.xml
@@ -123,6 +123,7 @@
<cd:variable name='chapter' value='hoofdstuk'/>
<cd:variable name='character' value='letter'/>
<cd:variable name='characters' value='letters'/>
+ <cd:variable name='chemistry' value='chemie'/>
<cd:variable name='cite' value='cite'/>
<cd:variable name='color' value='kleur'/>
<cd:variable name='column' value='kolom'/>
@@ -160,6 +161,7 @@
<cd:variable name='enumeration' value='doornummering'/>
<cd:variable name='environment' value='omgeving'/>
<cd:variable name='even' value='even'/>
+ <cd:variable name='export' value='export'/>
<cd:variable name='external' value='extern'/>
<cd:variable name='fact' value='gegeven'/>
<cd:variable name='february' value='februari'/>
@@ -261,6 +263,7 @@
<cd:variable name='leftpage' value='linkerpagina'/>
<cd:variable name='lefttoright' value='lefttoright'/>
<cd:variable name='legend' value='legenda'/>
+ <cd:variable name='less' value='minder'/>
<cd:variable name='lesshyphenation' value='lesshyphenation'/>
<cd:variable name='line' value='regel'/>
<cd:variable name='linenote' value='regelnoot'/>
@@ -301,6 +304,7 @@
<cd:variable name='monday' value='maandag'/>
<cd:variable name='mono' value='mono'/>
<cd:variable name='month' value='maand'/>
+ <cd:variable name='more' value='meer'/>
<cd:variable name='morehyphenation' value='morehyphenation'/>
<cd:variable name='name' value='naam'/>
<cd:variable name='narrow' value='smal'/>
@@ -458,6 +462,7 @@
<cd:variable name='subforward' value='subvooruit'/>
<cd:variable name='subject' value='onderwerp'/>
<cd:variable name='subpage' value='subpagina'/>
+ <cd:variable name='subs' value='subs'/>
<cd:variable name='subsection' value='subparagraaf'/>
<cd:variable name='subsubject' value='subonderwerp'/>
<cd:variable name='subsubsection' value='subsubparagraaf'/>
@@ -598,6 +603,7 @@
<cd:constant name='bottom' value='onder'/>
<cd:constant name='bottomafter' value='bottomafter'/>
<cd:constant name='bottombefore' value='bottombefore'/>
+ <cd:constant name='bottomcommand' value='ondercommando'/>
<cd:constant name='bottomdistance' value='onderafstand'/>
<cd:constant name='bottomframe' value='onderkader'/>
<cd:constant name='bottomoffset' value='onderoffset'/>
@@ -690,6 +696,7 @@
<cd:constant name='filtercommand' value='filtercommand'/>
<cd:constant name='finalnamesep' value='finalnamesep'/>
<cd:constant name='firstnamesep' value='firstnamesep'/>
+ <cd:constant name='firstpage' value='eerstepagina'/>
<cd:constant name='focus' value='focus'/>
<cd:constant name='focusin' value='focusin'/>
<cd:constant name='focusout' value='focusuit'/>
@@ -756,6 +763,7 @@
<cd:constant name='label' value='label'/>
<cd:constant name='labeloffset' value='labeloffset'/>
<cd:constant name='lastnamesep' value='lastnamesep'/>
+ <cd:constant name='lastpage' value='laatstepagina'/>
<cd:constant name='lastpubsep' value='lastpubsep'/>
<cd:constant name='layout' value='layout'/>
<cd:constant name='left' value='links'/>
@@ -776,6 +784,7 @@
<cd:constant name='leftsubsentence' value='linkersubzin'/>
<cd:constant name='lefttext' value='linkertekst'/>
<cd:constant name='leftwidth' value='linkerbreedte'/>
+ <cd:constant name='less' value='minder'/>
<cd:constant name='level' value='niveau'/>
<cd:constant name='levels' value='niveaus'/>
<cd:constant name='limittext' value='limiettekst'/>
@@ -798,6 +807,7 @@
<cd:constant name='marking' value='markering'/>
<cd:constant name='marstyle' value='marletter'/>
<cd:constant name='mask' value='masker'/>
+ <cd:constant name='mathstyle' value='mathstyle'/>
<cd:constant name='max' value='max'/>
<cd:constant name='maxdepth' value='maxdepth'/>
<cd:constant name='maxheight' value='maxhoogte'/>
@@ -806,6 +816,7 @@
<cd:constant name='menu' value='menu'/>
<cd:constant name='method' value='methode'/>
<cd:constant name='middle' value='midden'/>
+ <cd:constant name='middlecommand' value='middencommando'/>
<cd:constant name='middlespeech' value='middenuitspraak'/>
<cd:constant name='middletext' value='middentekst'/>
<cd:constant name='midsentence' value='middenzin'/>
@@ -814,6 +825,7 @@
<cd:constant name='minheight' value='minhoogte'/>
<cd:constant name='minwidth' value='minbreedte'/>
<cd:constant name='monthconversion' value='maandconversie'/>
+ <cd:constant name='more' value='meer'/>
<cd:constant name='n' value='n'/>
<cd:constant name='name' value='naam'/>
<cd:constant name='namesep' value='namesep'/>
@@ -968,6 +980,7 @@
<cd:constant name='separator' value='scheider'/>
<cd:constant name='set' value='set'/>
<cd:constant name='setups' value='setups'/>
+ <cd:constant name='shrink' value='krimp'/>
<cd:constant name='side' value='zij'/>
<cd:constant name='sidealign' value='zijuitlijnen'/>
<cd:constant name='sidemethod' value='zijmethode'/>
@@ -976,6 +989,7 @@
<cd:constant name='sign' value='teken'/>
<cd:constant name='size' value='formaat'/>
<cd:constant name='small' value='klein'/>
+ <cd:constant name='solution' value='oplossing'/>
<cd:constant name='sort' value='sort'/>
<cd:constant name='sorttype' value='sortering'/>
<cd:constant name='source' value='bron'/>
@@ -1043,6 +1057,7 @@
<cd:constant name='toffset' value='toffset'/>
<cd:constant name='tolerance' value='tolerantie'/>
<cd:constant name='top' value='boven'/>
+ <cd:constant name='topcommand' value='bovencommando'/>
<cd:constant name='topdistance' value='bovenafstand'/>
<cd:constant name='topframe' value='bovenkader'/>
<cd:constant name='topoffset' value='bovenoffset'/>
@@ -1703,6 +1718,7 @@
<cd:command name='switchtorawfont' value='switchtorawfont'/>
<cd:command name='sym' value='sym'/>
<cd:command name='symbol' value='symbool'/>
+ <cd:command name='symoffset' value='symoffset'/>
<cd:command name='synchronizationbar' value='synchronisatiebalk'/>
<cd:command name='synchronize' value='synchroniseer'/>
<cd:command name='tab' value='tab'/>
diff --git a/Master/texmf-dist/tex/context/interface/keys-pe.xml b/Master/texmf-dist/tex/context/interface/keys-pe.xml
index 5f48acdd00e..9303c29fd94 100644
--- a/Master/texmf-dist/tex/context/interface/keys-pe.xml
+++ b/Master/texmf-dist/tex/context/interface/keys-pe.xml
@@ -123,6 +123,7 @@
<cd:variable name='chapter' value='فصل'/>
<cd:variable name='character' value='حرف'/>
<cd:variable name='characters' value='حرفها'/>
+ <cd:variable name='chemistry' value='chemistry'/>
<cd:variable name='cite' value='cite'/>
<cd:variable name='color' value='رنگ'/>
<cd:variable name='column' value='ستون'/>
@@ -160,6 +161,7 @@
<cd:variable name='enumeration' value='شماره‌بندی'/>
<cd:variable name='environment' value='محیط'/>
<cd:variable name='even' value='زوج'/>
+ <cd:variable name='export' value='export'/>
<cd:variable name='external' value='خارجی'/>
<cd:variable name='fact' value='fact'/>
<cd:variable name='february' value='فوریه'/>
@@ -261,6 +263,7 @@
<cd:variable name='leftpage' value='صفحه‌چپ'/>
<cd:variable name='lefttoright' value='lefttoright'/>
<cd:variable name='legend' value='راهنما'/>
+ <cd:variable name='less' value='less'/>
<cd:variable name='lesshyphenation' value='شکست‌کلمات‌کمتر'/>
<cd:variable name='line' value='خط'/>
<cd:variable name='linenote' value='خط‌نوشت'/>
@@ -301,6 +304,7 @@
<cd:variable name='monday' value='دوشنبه'/>
<cd:variable name='mono' value='مونو'/>
<cd:variable name='month' value='ماه'/>
+ <cd:variable name='more' value='more'/>
<cd:variable name='morehyphenation' value='شکست‌کلمات‌بیشتر'/>
<cd:variable name='name' value='نام'/>
<cd:variable name='narrow' value='نازک'/>
@@ -458,6 +462,7 @@
<cd:variable name='subforward' value='زیرجلوگرد'/>
<cd:variable name='subject' value='موضوع'/>
<cd:variable name='subpage' value='زیرصفحه'/>
+ <cd:variable name='subs' value='subs'/>
<cd:variable name='subsection' value='زیربخش'/>
<cd:variable name='subsubject' value='زیرموضوع'/>
<cd:variable name='subsubsection' value='زیرزیربخش'/>
@@ -598,6 +603,7 @@
<cd:constant name='bottom' value='پایین'/>
<cd:constant name='bottomafter' value='bottomafter'/>
<cd:constant name='bottombefore' value='bottombefore'/>
+ <cd:constant name='bottomcommand' value='bottomcommand'/>
<cd:constant name='bottomdistance' value='فاصله‌پایین'/>
<cd:constant name='bottomframe' value='قالب‌پایین'/>
<cd:constant name='bottomoffset' value='آفست‌پایین'/>
@@ -690,6 +696,7 @@
<cd:constant name='filtercommand' value='filtercommand'/>
<cd:constant name='finalnamesep' value='finalnamesep'/>
<cd:constant name='firstnamesep' value='firstnamesep'/>
+ <cd:constant name='firstpage' value='صفحه‌اول'/>
<cd:constant name='focus' value='تمرکز'/>
<cd:constant name='focusin' value='تمرکزدرون'/>
<cd:constant name='focusout' value='تمرکزبیرون'/>
@@ -756,6 +763,7 @@
<cd:constant name='label' value='برچسب'/>
<cd:constant name='labeloffset' value='labeloffset'/>
<cd:constant name='lastnamesep' value='lastnamesep'/>
+ <cd:constant name='lastpage' value='صفحه‌آخر'/>
<cd:constant name='lastpubsep' value='lastpubsep'/>
<cd:constant name='layout' value='layout'/>
<cd:constant name='left' value='چپ'/>
@@ -776,6 +784,7 @@
<cd:constant name='leftsubsentence' value='زیرجمله‌چپ'/>
<cd:constant name='lefttext' value='متن‌چپ'/>
<cd:constant name='leftwidth' value='عرض‌خط'/>
+ <cd:constant name='less' value='less'/>
<cd:constant name='level' value='مرحله'/>
<cd:constant name='levels' value='مرحله‌ها'/>
<cd:constant name='limittext' value='مرزمتن'/>
@@ -798,6 +807,7 @@
<cd:constant name='marking' value='نشانه‌گذاری'/>
<cd:constant name='marstyle' value='سبک‌حاش'/>
<cd:constant name='mask' value='mask'/>
+ <cd:constant name='mathstyle' value='mathstyle'/>
<cd:constant name='max' value='بیشترین'/>
<cd:constant name='maxdepth' value='maxdepth'/>
<cd:constant name='maxheight' value='بیشترین‌ارتفاع'/>
@@ -806,6 +816,7 @@
<cd:constant name='menu' value='منو'/>
<cd:constant name='method' value='روش'/>
<cd:constant name='middle' value='میان'/>
+ <cd:constant name='middlecommand' value='middlecommand'/>
<cd:constant name='middlespeech' value='سخنرانی‌میانی'/>
<cd:constant name='middletext' value='متن‌میانی'/>
<cd:constant name='midsentence' value='جمله‌میانی'/>
@@ -814,6 +825,7 @@
<cd:constant name='minheight' value='کمترین‌ارتفاع'/>
<cd:constant name='minwidth' value='کمترین‌عرض'/>
<cd:constant name='monthconversion' value='monthconversion'/>
+ <cd:constant name='more' value='more'/>
<cd:constant name='n' value='n'/>
<cd:constant name='name' value='نام'/>
<cd:constant name='namesep' value='namesep'/>
@@ -968,6 +980,7 @@
<cd:constant name='separator' value='جداکننده'/>
<cd:constant name='set' value='قراربده'/>
<cd:constant name='setups' value='بارگذاریها'/>
+ <cd:constant name='shrink' value='shrink'/>
<cd:constant name='side' value='کنار'/>
<cd:constant name='sidealign' value='تنظیم‌کنار'/>
<cd:constant name='sidemethod' value='روش‌کنار'/>
@@ -976,6 +989,7 @@
<cd:constant name='sign' value='علامت'/>
<cd:constant name='size' value='اندازه'/>
<cd:constant name='small' value='کوچک'/>
+ <cd:constant name='solution' value='solution'/>
<cd:constant name='sort' value='sort'/>
<cd:constant name='sorttype' value='ترتیب‌تایپ'/>
<cd:constant name='source' value='منبع'/>
@@ -1043,6 +1057,7 @@
<cd:constant name='toffset' value='toffset'/>
<cd:constant name='tolerance' value='بردباری'/>
<cd:constant name='top' value='بالا'/>
+ <cd:constant name='topcommand' value='topcommand'/>
<cd:constant name='topdistance' value='فاصله‌بالا'/>
<cd:constant name='topframe' value='قالب‌راست'/>
<cd:constant name='topoffset' value='آفست‌بالا'/>
@@ -1703,6 +1718,7 @@
<cd:command name='switchtorawfont' value='تغییربه‌قلم‌خام'/>
<cd:command name='sym' value='نم'/>
<cd:command name='symbol' value='نماد'/>
+ <cd:command name='symoffset' value='symoffset'/>
<cd:command name='synchronizationbar' value='میله‌تطابق'/>
<cd:command name='synchronize' value='تطابق'/>
<cd:command name='tab' value='تب'/>
diff --git a/Master/texmf-dist/tex/context/interface/keys-ro.xml b/Master/texmf-dist/tex/context/interface/keys-ro.xml
index 22ccf09c056..29368c9bc64 100644
--- a/Master/texmf-dist/tex/context/interface/keys-ro.xml
+++ b/Master/texmf-dist/tex/context/interface/keys-ro.xml
@@ -123,6 +123,7 @@
<cd:variable name='chapter' value='capitol'/>
<cd:variable name='character' value='caracter'/>
<cd:variable name='characters' value='caractere'/>
+ <cd:variable name='chemistry' value='chemistry'/>
<cd:variable name='cite' value='cite'/>
<cd:variable name='color' value='culoare'/>
<cd:variable name='column' value='coloana'/>
@@ -160,6 +161,7 @@
<cd:variable name='enumeration' value='enumerare'/>
<cd:variable name='environment' value='mediu'/>
<cd:variable name='even' value='par'/>
+ <cd:variable name='export' value='export'/>
<cd:variable name='external' value='extern'/>
<cd:variable name='fact' value='fapt'/>
<cd:variable name='february' value='februarie'/>
@@ -261,6 +263,7 @@
<cd:variable name='leftpage' value='paginastanga'/>
<cd:variable name='lefttoright' value='lefttoright'/>
<cd:variable name='legend' value='legenda'/>
+ <cd:variable name='less' value='less'/>
<cd:variable name='lesshyphenation' value='lesshyphenation'/>
<cd:variable name='line' value='linie'/>
<cd:variable name='linenote' value='linenote'/>
@@ -301,6 +304,7 @@
<cd:variable name='monday' value='luni'/>
<cd:variable name='mono' value='mono'/>
<cd:variable name='month' value='luna'/>
+ <cd:variable name='more' value='more'/>
<cd:variable name='morehyphenation' value='morehyphenation'/>
<cd:variable name='name' value='nume'/>
<cd:variable name='narrow' value='ingust'/>
@@ -458,6 +462,7 @@
<cd:variable name='subforward' value='subavans'/>
<cd:variable name='subject' value='subiect'/>
<cd:variable name='subpage' value='subpagina'/>
+ <cd:variable name='subs' value='subs'/>
<cd:variable name='subsection' value='subsectiune'/>
<cd:variable name='subsubject' value='subsubiect'/>
<cd:variable name='subsubsection' value='subsubsectiune'/>
@@ -598,6 +603,7 @@
<cd:constant name='bottom' value='jos'/>
<cd:constant name='bottomafter' value='bottomafter'/>
<cd:constant name='bottombefore' value='bottombefore'/>
+ <cd:constant name='bottomcommand' value='bottomcommand'/>
<cd:constant name='bottomdistance' value='distantajos'/>
<cd:constant name='bottomframe' value='framejos'/>
<cd:constant name='bottomoffset' value='offsetjos'/>
@@ -690,6 +696,7 @@
<cd:constant name='filtercommand' value='filtercommand'/>
<cd:constant name='finalnamesep' value='finalnamesep'/>
<cd:constant name='firstnamesep' value='firstnamesep'/>
+ <cd:constant name='firstpage' value='primapagina'/>
<cd:constant name='focus' value='focus'/>
<cd:constant name='focusin' value='focusin'/>
<cd:constant name='focusout' value='focusout'/>
@@ -756,6 +763,7 @@
<cd:constant name='label' value='eticheta'/>
<cd:constant name='labeloffset' value='labeloffset'/>
<cd:constant name='lastnamesep' value='lastnamesep'/>
+ <cd:constant name='lastpage' value='ultimapagina'/>
<cd:constant name='lastpubsep' value='lastpubsep'/>
<cd:constant name='layout' value='layout'/>
<cd:constant name='left' value='stanga'/>
@@ -776,6 +784,7 @@
<cd:constant name='leftsubsentence' value='subpropozitiestanga'/>
<cd:constant name='lefttext' value='textstanga'/>
<cd:constant name='leftwidth' value='latimestanga'/>
+ <cd:constant name='less' value='less'/>
<cd:constant name='level' value='nivel'/>
<cd:constant name='levels' value='nivele'/>
<cd:constant name='limittext' value='limittext'/>
@@ -798,6 +807,7 @@
<cd:constant name='marking' value='marcaje'/>
<cd:constant name='marstyle' value='stilmarcaj'/>
<cd:constant name='mask' value='mask'/>
+ <cd:constant name='mathstyle' value='mathstyle'/>
<cd:constant name='max' value='max'/>
<cd:constant name='maxdepth' value='maxdepth'/>
<cd:constant name='maxheight' value='inaltimemaxima'/>
@@ -806,6 +816,7 @@
<cd:constant name='menu' value='meniu'/>
<cd:constant name='method' value='metoda'/>
<cd:constant name='middle' value='mijloc'/>
+ <cd:constant name='middlecommand' value='middlecommand'/>
<cd:constant name='middlespeech' value='middlespeech'/>
<cd:constant name='middletext' value='textmijloc'/>
<cd:constant name='midsentence' value='midsentence'/>
@@ -814,6 +825,7 @@
<cd:constant name='minheight' value='inaltimeminima'/>
<cd:constant name='minwidth' value='latimeminima'/>
<cd:constant name='monthconversion' value='monthconversion'/>
+ <cd:constant name='more' value='more'/>
<cd:constant name='n' value='n'/>
<cd:constant name='name' value='nume'/>
<cd:constant name='namesep' value='namesep'/>
@@ -968,6 +980,7 @@
<cd:constant name='separator' value='separator'/>
<cd:constant name='set' value='set'/>
<cd:constant name='setups' value='setups'/>
+ <cd:constant name='shrink' value='shrink'/>
<cd:constant name='side' value='parte'/>
<cd:constant name='sidealign' value='sidealign'/>
<cd:constant name='sidemethod' value='sidemethod'/>
@@ -976,6 +989,7 @@
<cd:constant name='sign' value='semn'/>
<cd:constant name='size' value='dimensiune'/>
<cd:constant name='small' value='mic'/>
+ <cd:constant name='solution' value='solution'/>
<cd:constant name='sort' value='sort'/>
<cd:constant name='sorttype' value='sorttype'/>
<cd:constant name='source' value='sursa'/>
@@ -1043,6 +1057,7 @@
<cd:constant name='toffset' value='toffset'/>
<cd:constant name='tolerance' value='toleranta'/>
<cd:constant name='top' value='sus'/>
+ <cd:constant name='topcommand' value='topcommand'/>
<cd:constant name='topdistance' value='distantasus'/>
<cd:constant name='topframe' value='framesus'/>
<cd:constant name='topoffset' value='offsetsus'/>
@@ -1703,6 +1718,7 @@
<cd:command name='switchtorawfont' value='trecilafontraw'/>
<cd:command name='sym' value='sim'/>
<cd:command name='symbol' value='simbol'/>
+ <cd:command name='symoffset' value='symoffset'/>
<cd:command name='synchronizationbar' value='barasincronizare'/>
<cd:command name='synchronize' value='sincronizeaza'/>
<cd:command name='tab' value='tab'/>
diff --git a/Master/texmf-dist/tex/context/sample/d-res-01.xml b/Master/texmf-dist/tex/context/sample/d-res-01.xml
new file mode 100644
index 00000000000..23f22cc3205
--- /dev/null
+++ b/Master/texmf-dist/tex/context/sample/d-res-01.xml
@@ -0,0 +1,50 @@
+<!-- DOCTYPE figurelibrary SYSTEM "x-res-00.xsd" -->
+
+<rl:library xmlns:rl="x-res-00.xsd" language="nl">
+
+ <rl:description>
+ <rl:organization>PRAGMA ADE</rl:organization>
+ <rl:project>context</rl:project>
+ <rl:product>figure demo</rl:product>
+ <rl:comment>This file demonstrates how to use the ConTeXt
+ figure base macros.</rl:comment>
+ </rl:description>
+
+ <rl:instance>
+ <rl:label>part of a dutch cow</rl:label>
+ <rl:original>a simple dutch cow</rl:original>
+ <rl:manipulation>
+ <rl:viewport width="2cm" height="2cm" hoffset="2cm" voffset="2cm"/>
+ <rl:background r=".8"/>
+ </rl:manipulation>
+ </rl:instance>
+
+ <rl:resource>
+ <rl:label>a simple dutch cow</rl:label>
+ <rl:file>cow.pdf</rl:file>
+ <rl:copyright>who knows</rl:copyright>
+ <rl:comment>This is just an example graphic.</rl:comment>
+ <rl:status>unknown</rl:status>
+ </rl:resource>
+
+ <rl:resource>
+ <rl:label>mill in hasselt</rl:label>
+ <rl:file>mill.png</rl:file>
+ <rl:copyright>johan jonker</rl:copyright>
+ <rl:comment>A Dutch mill.</rl:comment>
+ <rl:status>unknown</rl:status>
+ </rl:resource>
+
+ <rl:resource>
+ <rl:label>another simple dutch cow</rl:label>
+ <rl:file>cow.pdf</rl:file>
+ <rl:copyright>Who Kowns</rl:copyright>
+ <rl:comment>Again some demo graphic.</rl:comment>
+ <rl:status>obsolete</rl:status>
+ <rl:manipulation>
+ <rl:viewport width="2cm" height="2cm" hoffset="2cm" voffset="2cm"/>
+ <rl:background s=".8"/>
+ </rl:manipulation>
+ </rl:resource>
+
+</rl:library>
diff --git a/Master/texmf-dist/tex/context/sample/lorem.tex b/Master/texmf-dist/tex/context/sample/lorem.tex
new file mode 100644
index 00000000000..0132f659f0a
--- /dev/null
+++ b/Master/texmf-dist/tex/context/sample/lorem.tex
@@ -0,0 +1,11 @@
+Lorem ipsum dolor sit amet, consectetur adipiscing elit. Curabitur massa turpis,
+semper quis fringilla ut, viverra nec risus. Pellentesque habitant morbi
+tristique senectus et netus et malesuada fames ac turpis egestas. Donec nunc
+lorem, sollicitudin vel sodales eget, vehicula nec mi. Proin ullamcorper rutrum
+nibh, at porttitor nunc euismod et. Donec faucibus nisi faucibus ipsum porttitor
+pharetra. Sed elementum, lectus nec congue imperdiet, ipsum leo viverra nisi, sit
+amet commodo odio odio id nisl. Fusce sagittis lobortis nisi sed consectetur. Nam
+egestas, sem ut fermentum convallis, ipsum tellus venenatis augue, eget
+condimentum risus quam id erat. Sed metus dui, sollicitudin pharetra pellentesque
+sed, placerat eget augue. Mauris sodales pretium tortor vitae rutrum. Proin quam
+sem, lobortis tincidunt pretium vitae, feugiat eu lacus.
diff --git a/Master/texmf-dist/tex/context/test/pdf-x1a-2001.mkiv b/Master/texmf-dist/tex/context/test/pdf-x1a-2001.mkiv
index a304c8963c3..f1c5ef53105 100644
--- a/Master/texmf-dist/tex/context/test/pdf-x1a-2001.mkiv
+++ b/Master/texmf-dist/tex/context/test/pdf-x1a-2001.mkiv
@@ -1,6 +1,6 @@
% PDF/X-1a:2001
-\enabletrackers[backend.pdfx]
+\enabletrackers[backend.format,backend.variables]
\setupbackend
[format=PDF/X-1a:2001,
diff --git a/Master/texmf-dist/tex/context/test/pdf-x1a-2003.mkiv b/Master/texmf-dist/tex/context/test/pdf-x1a-2003.mkiv
index 89a0f6066e8..7fbec5acbf4 100644
--- a/Master/texmf-dist/tex/context/test/pdf-x1a-2003.mkiv
+++ b/Master/texmf-dist/tex/context/test/pdf-x1a-2003.mkiv
@@ -1,6 +1,6 @@
% PDF/X-1a:2003
-\enabletrackers[backend.pdfx]
+\enabletrackers[backend.format,backend.variables]
\setupbackend
[format=PDF/X-1a:2003,
diff --git a/Master/texmf-dist/tex/context/test/pdf-x3-2002.mkiv b/Master/texmf-dist/tex/context/test/pdf-x3-2002.mkiv
index 1e2310135ec..be757212e0b 100644
--- a/Master/texmf-dist/tex/context/test/pdf-x3-2002.mkiv
+++ b/Master/texmf-dist/tex/context/test/pdf-x3-2002.mkiv
@@ -1,6 +1,6 @@
% PDF/X-3:2002
-\enabletrackers[backend.pdfx]
+\enabletrackers[backend.format,backend.variables]
\setupbackend
[format=PDF/X-3:2002,
diff --git a/Master/texmf-dist/tex/context/test/pdf-x3-2003.mkiv b/Master/texmf-dist/tex/context/test/pdf-x3-2003.mkiv
index bedcde527b6..cbebe1033f4 100644
--- a/Master/texmf-dist/tex/context/test/pdf-x3-2003.mkiv
+++ b/Master/texmf-dist/tex/context/test/pdf-x3-2003.mkiv
@@ -1,6 +1,6 @@
% PDF/X-3:2003
-\enabletrackers[backend.pdfx]
+\enabletrackers[backend.format,backend.variables]
\setupbackend
[format=PDF/X-3:2003,
diff --git a/Master/texmf-dist/tex/context/test/pdf-x4.mkiv b/Master/texmf-dist/tex/context/test/pdf-x4.mkiv
index 0566475b5af..062de3b3420 100644
--- a/Master/texmf-dist/tex/context/test/pdf-x4.mkiv
+++ b/Master/texmf-dist/tex/context/test/pdf-x4.mkiv
@@ -2,7 +2,7 @@
% \nopdfcompression
-\enabletrackers[backend.pdfx]
+\enabletrackers[backend.format,backend.variables]
\setupbackend
[format=PDF/X-4,
diff --git a/Master/texmf-dist/tex/context/test/pdf-x4p.mkiv b/Master/texmf-dist/tex/context/test/pdf-x4p.mkiv
index 9ec8897f811..bfee0b2383e 100644
--- a/Master/texmf-dist/tex/context/test/pdf-x4p.mkiv
+++ b/Master/texmf-dist/tex/context/test/pdf-x4p.mkiv
@@ -1,10 +1,10 @@
% PDF/X-4p
-\enabletrackers[backend.pdfx]
+\enabletrackers[backend.format,backend.variables]
\setupbackend
[format=PDF/X-4p,
- intent=ISOcoated_v2_eci.icc,
+ intent={ISO Coated v2 (ECI)},
profile=sRGB.icc] % test for default colorspace
\input pdf-x-common.mkiv
diff --git a/Master/texmf-dist/tex/generic/context/luatex/luatex-basics-gen.lua b/Master/texmf-dist/tex/generic/context/luatex/luatex-basics-gen.lua
index bdbc3cf5188..288cfa2e15d 100644
--- a/Master/texmf-dist/tex/generic/context/luatex/luatex-basics-gen.lua
+++ b/Master/texmf-dist/tex/generic/context/luatex/luatex-basics-gen.lua
@@ -12,7 +12,8 @@ if context then
end
local dummyfunction = function() end
-local dummyreporter = function(c) return function(...) texio.write(c .. " : " .. string.format(...)) end end
+----- dummyreporter = function(c) return function(...) texio.write_nl(c .. " : " .. string.format(...)) end end
+local dummyreporter = function(c) return function(...) texio.write_nl(c .. " : " .. string.formatters(...)) end end
statistics = {
register = dummyfunction,
@@ -74,32 +75,42 @@ texconfig.kpse_init = true
resolvers = resolvers or { } -- no fancy file helpers used
local remapper = {
- otf = "opentype fonts",
- ttf = "truetype fonts",
- ttc = "truetype fonts",
- dfont = "truetype fonts", -- "truetype dictionary",
- cid = "cid maps",
- fea = "font feature files",
- pfa = "type1 fonts", -- this is for Khaled, in ConTeXt we don't use this!
- pfb = "type1 fonts", -- this is for Khaled, in ConTeXt we don't use this!
+ otf = "opentype fonts",
+ ttf = "truetype fonts",
+ ttc = "truetype fonts",
+ dfont = "truetype fonts", -- "truetype dictionary",
+ cid = "cid maps",
+ cidmap = "cid maps",
+ fea = "font feature files",
+ pfa = "type1 fonts", -- this is for Khaled, in ConTeXt we don't use this!
+ pfb = "type1 fonts", -- this is for Khaled, in ConTeXt we don't use this!
}
function resolvers.findfile(name,fileformat)
- name = string.gsub(name,"\\","\/")
- fileformat = fileformat and string.lower(fileformat)
- local found = kpse.find_file(name,(fileformat and fileformat ~= "" and (remapper[fileformat] or fileformat)) or file.extname(name,"tex"))
+ name = string.gsub(name,"\\","/")
+ if not fileformat or fileformat == "" then
+ fileformat = file.suffix(name)
+ if fileformat == "" then
+ fileformat = "tex"
+ end
+ end
+ fileformat = string.lower(fileformat)
+ fileformat = remapper[fileformat] or fileformat
+ local found = kpse.find_file(name,fileformat)
if not found or found == "" then
found = kpse.find_file(name,"other text files")
end
return found
end
-function resolvers.findbinfile(name,fileformat)
- if not fileformat or fileformat == "" then
- fileformat = file.extname(name) -- string.match(name,"%.([^%.]-)$")
- end
- return resolvers.findfile(name,(fileformat and remapper[fileformat]) or fileformat)
-end
+-- function resolvers.findbinfile(name,fileformat)
+-- if not fileformat or fileformat == "" then
+-- fileformat = file.suffix(name)
+-- end
+-- return resolvers.findfile(name,(fileformat and remapper[fileformat]) or fileformat)
+-- end
+
+resolvers.findbinfile = resolvers.findfile
function resolvers.resolve(s)
return s
@@ -206,15 +217,28 @@ function caches.loaddata(paths,name)
for i=1,#paths do
local data = false
local luaname, lucname = makefullname(paths[i],name)
- if lucname and lfs.isfile(lucname) then
- texio.write(string.format("(load: %s)",lucname))
+ if lucname and lfs.isfile(lucname) then -- maybe also check for size
+ texio.write(string.format("(load luc: %s)",lucname))
data = loadfile(lucname)
+ if data then
+ data = data()
+ end
+ if data then
+ return data
+ else
+ texio.write(string.format("(loading failed: %s)",lucname))
+ end
end
- if not data and luaname and lfs.isfile(luaname) then
- texio.write(string.format("(load: %s)",luaname))
+ if luaname and lfs.isfile(luaname) then
+ texio.write(string.format("(load lua: %s)",luaname))
data = loadfile(luaname)
+ if data then
+ data = data()
+ end
+ if data then
+ return data
+ end
end
- return data and data()
end
end
@@ -241,21 +265,25 @@ end
-- this) in which case one should limit the method to luac and enable support
-- for execution.
-caches.compilemethod = "luac" -- luac dump both
+caches.compilemethod = "both"
function caches.compile(data,luaname,lucname)
local done = false
if caches.compilemethod == "luac" or caches.compilemethod == "both" then
- local command = "-o " .. string.quoted(lucname) .. " -s " .. string.quoted(luaname)
- done = os.spawn("texluac " .. command) == 0
+ done = os.spawn("texluac -o " .. string.quoted(lucname) .. " -s " .. string.quoted(luaname)) == 0
end
if not done and (caches.compilemethod == "dump" or caches.compilemethod == "both") then
- local d = table.serialize(data,true)
+ local d = io.loaddata(luaname)
+ if not d or d == "" then
+ d = table.serialize(data,true) -- slow
+ end
if d and d ~= "" then
local f = io.open(lucname,'w')
if f then
local s = loadstring(d)
- f:write(string.dump(s))
+ if s then
+ f:write(string.dump(s,true))
+ end
f:close()
end
end
diff --git a/Master/texmf-dist/tex/generic/context/luatex/luatex-basics-nod.lua b/Master/texmf-dist/tex/generic/context/luatex/luatex-basics-nod.lua
index 151d98a8f31..ec515001e4a 100644
--- a/Master/texmf-dist/tex/generic/context/luatex/luatex-basics-nod.lua
+++ b/Master/texmf-dist/tex/generic/context/luatex/luatex-basics-nod.lua
@@ -63,6 +63,9 @@ nodes.glyphcodes = glyphcodes
local free_node = node.free
local remove_node = node.remove
local new_node = node.new
+local traverse_id = node.traverse_id
+
+local math_code = nodecodes.math
nodes.handlers.protectglyphs = node.protect_glyphs
nodes.handlers.unprotectglyphs = node.unprotect_glyphs
@@ -93,3 +96,9 @@ function nodes.pool.kern(k)
n.kern = k
return n
end
+
+function nodes.endofmath(n)
+ for n in traverse_id(math_code,n.next) do
+ return n
+ end
+end
diff --git a/Master/texmf-dist/tex/generic/context/luatex/luatex-fonts-ext.lua b/Master/texmf-dist/tex/generic/context/luatex/luatex-fonts-ext.lua
index d8884ccc74c..b60d0451228 100644
--- a/Master/texmf-dist/tex/generic/context/luatex/luatex-fonts-ext.lua
+++ b/Master/texmf-dist/tex/generic/context/luatex/luatex-fonts-ext.lua
@@ -18,18 +18,14 @@ local otffeatures = fonts.constructors.newfeatures("otf")
local function initializeitlc(tfmdata,value)
if value then
- -- the magic 40 and it formula come from Dohyun Kim
- local parameters = tfmdata.parameters
+ -- the magic 40 and it formula come from Dohyun Kim but we might need another guess
+ local parameters = tfmdata.parameters
local italicangle = parameters.italicangle
if italicangle and italicangle ~= 0 then
- local uwidth = (parameters.uwidth or 40)/2
- for unicode, d in next, tfmdata.descriptions do
- local it = d.boundingbox[3] - d.width + uwidth
- if it ~= 0 then
- d.italic = it
- end
- end
- tfmdata.properties.hasitalics = true
+ local properties = tfmdata.properties
+ local factor = tonumber(value) or 1
+ properties.hasitalics = true
+ properties.autoitalicamount = factor * (parameters.uwidth or 40)/2
end
end
end
diff --git a/Master/texmf-dist/tex/generic/context/luatex/luatex-fonts-merged.lua b/Master/texmf-dist/tex/generic/context/luatex/luatex-fonts-merged.lua
index ea509c3386b..2cb036df01e 100644
--- a/Master/texmf-dist/tex/generic/context/luatex/luatex-fonts-merged.lua
+++ b/Master/texmf-dist/tex/generic/context/luatex/luatex-fonts-merged.lua
@@ -1,13077 +1,11178 @@
-- merged file : luatex-fonts-merged.lua
-- parent file : luatex-fonts.lua
--- merge date : 05/30/12 11:26:34
+-- merge date : 04/07/13 14:05:24
do -- begin closure to overcome local limits and interference
-if not modules then modules = { } end modules ['l-string'] = {
- version = 1.001,
- comment = "companion to luat-lib.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
+if not modules then modules={} end modules ['l-lua']={
+ version=1.001,
+ comment="companion to luat-lib.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
}
+local major,minor=string.match(_VERSION,"^[^%d]+(%d+)%.(%d+).*$")
+_MAJORVERSION=tonumber(major) or 5
+_MINORVERSION=tonumber(minor) or 1
+_LUAVERSION=_MAJORVERSION+_MINORVERSION/10
+if not lpeg then
+ lpeg=require("lpeg")
+end
+if loadstring then
+ local loadnormal=load
+ function load(first,...)
+ if type(first)=="string" then
+ return loadstring(first,...)
+ else
+ return loadnormal(first,...)
+ end
+ end
+else
+ loadstring=load
+end
+if not ipairs then
+ local function iterate(a,i)
+ i=i+1
+ local v=a[i]
+ if v~=nil then
+ return i,v
+ end
+ end
+ function ipairs(a)
+ return iterate,a,0
+ end
+end
+if not pairs then
+ function pairs(t)
+ return next,t
+ end
+end
+if not table.unpack then
+ table.unpack=_G.unpack
+elseif not unpack then
+ _G.unpack=table.unpack
+end
+if not package.loaders then
+ package.loaders=package.searchers
+end
+local print,select,tostring=print,select,tostring
+local inspectors={}
+function setinspector(inspector)
+ inspectors[#inspectors+1]=inspector
+end
+function inspect(...)
+ for s=1,select("#",...) do
+ local value=select(s,...)
+ local done=false
+ for i=1,#inspectors do
+ done=inspectors[i](value)
+ if done then
+ break
+ end
+ end
+ if not done then
+ print(tostring(value))
+ end
+ end
+end
+local dummy=function() end
+function optionalrequire(...)
+ local ok,result=xpcall(require,dummy,...)
+ if ok then
+ return result
+ end
+end
+local type=type
+local gsub,format=string.gsub,string.format
+local package=package
+local searchers=package.searchers or package.loaders
+local libpaths=nil
+local clibpaths=nil
+local libhash={}
+local clibhash={}
+local libextras={}
+local clibextras={}
+local filejoin=file and file.join or function(path,name) return path.."/"..name end
+local isreadable=file and file.is_readable or function(name) local f=io.open(name) if f then f:close() return true end end
+local addsuffix=file and file.addsuffix or function(name,suffix) return name.."."..suffix end
+local function cleanpath(path)
+ return path
+end
+local helpers=package.helpers or {
+ libpaths=function() return {} end,
+ clibpaths=function() return {} end,
+ cleanpath=cleanpath,
+ trace=false,
+ report=function(...) print(format(...)) end,
+}
+package.helpers=helpers
+local function getlibpaths()
+ return libpaths or helpers.libpaths(libhash)
+end
+local function getclibpaths()
+ return clibpaths or helpers.clibpaths(clibhash)
+end
+package.libpaths=getlibpaths
+package.clibpaths=getclibpaths
+local function addpath(what,paths,extras,hash,...)
+ local pathlist={... }
+ local cleanpath=helpers.cleanpath
+ local trace=helpers.trace
+ local report=helpers.report
+ local function add(path)
+ local path=cleanpath(path)
+ if not hash[path] then
+ if trace then
+ report("extra %s path: %s",what,path)
+ end
+ paths [#paths+1]=path
+ extras[#extras+1]=path
+ end
+ end
+ for p=1,#pathlist do
+ local path=pathlist[p]
+ if type(path)=="table" then
+ for i=1,#path do
+ add(path[i])
+ end
+ else
+ add(path)
+ end
+ end
+ return paths,extras
+end
+function package.extralibpath(...)
+ libpaths,libextras=addpath("lua",getlibpaths(),libextras,libhash,...)
+end
+function package.extraclibpath(...)
+ clibpaths,clibextras=addpath("lib",getclibpaths(),clibextras,clibhash,...)
+end
+if not searchers[-2] then
+ searchers[-2]=searchers[2]
+end
+searchers[2]=function(name)
+ return helpers.loaded(name)
+end
+searchers[3]=nil
+local function loadedaslib(resolved,rawname)
+ local init="luaopen_"..gsub(rawname,"%.","_")
+ if helpers.trace then
+ helpers.report("calling loadlib with '%s' with init '%s'",resolved,init)
+ end
+ return package.loadlib(resolved,init)
+end
+local function loadedbylua(name)
+ if helpers.trace then
+ helpers.report("locating '%s' using normal loader",name)
+ end
+ return true,searchers[-2](name)
+end
+local function loadedbypath(name,rawname,paths,islib,what)
+ local trace=helpers.trace
+ local report=helpers.report
+ if trace then
+ report("locating '%s' as '%s' on '%s' paths",rawname,name,what)
+ end
+ for p=1,#paths do
+ local path=paths[p]
+ local resolved=filejoin(path,name)
+ if trace then
+ report("checking for '%s' using '%s' path '%s'",name,what,path)
+ end
+ if isreadable(resolved) then
+ if trace then
+ report("lib '%s' located on '%s'",name,resolved)
+ end
+ if islib then
+ return true,loadedaslib(resolved,rawname)
+ else
+ return true,loadfile(resolved)
+ end
+ end
+ end
+end
+local function notloaded(name)
+ if helpers.trace then
+ helpers.report("? unable to locate library '%s'",name)
+ end
+end
+helpers.loadedaslib=loadedaslib
+helpers.loadedbylua=loadedbylua
+helpers.loadedbypath=loadedbypath
+helpers.notloaded=notloaded
+function helpers.loaded(name)
+ local thename=gsub(name,"%.","/")
+ local luaname=addsuffix(thename,"lua")
+ local libname=addsuffix(thename,os.libsuffix or "so")
+ local libpaths=getlibpaths()
+ local clibpaths=getclibpaths()
+ local done,result=loadedbypath(luaname,name,libpaths,false,"lua")
+ if done then
+ return result
+ end
+ local done,result=loadedbypath(luaname,name,clibpaths,false,"lua")
+ if done then
+ return result
+ end
+ local done,result=loadedbypath(libname,name,clibpaths,true,"lib")
+ if done then
+ return result
+ end
+ local done,result=loadedbylua(name)
+ if done then
+ return result
+ end
+ return notloaded(name)
+end
-local string = string
-local sub, gsub, find, match, gmatch, format, char, byte, rep, lower = string.sub, string.gsub, string.find, string.match, string.gmatch, string.format, string.char, string.byte, string.rep, string.lower
-local lpegmatch, S, C, Ct = lpeg.match, lpeg.S, lpeg.C, lpeg.Ct
-
--- some functions may disappear as they are not used anywhere
-
-if not string.split then
+end -- closure
- -- this will be overloaded by a faster lpeg variant
+do -- begin closure to overcome local limits and interference
- function string.split(str,pattern)
- local t = { }
- if #str > 0 then
- local n = 1
- for s in gmatch(str..pattern,"(.-)"..pattern) do
- t[n] = s
- n = n + 1
- end
- end
- return t
+if not modules then modules={} end modules ['l-lpeg']={
+ version=1.001,
+ comment="companion to luat-lib.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+lpeg=require("lpeg")
+local type,next,tostring=type,next,tostring
+local byte,char,gmatch,format=string.byte,string.char,string.gmatch,string.format
+local floor=math.floor
+local P,R,S,V,Ct,C,Cs,Cc,Cp,Cmt=lpeg.P,lpeg.R,lpeg.S,lpeg.V,lpeg.Ct,lpeg.C,lpeg.Cs,lpeg.Cc,lpeg.Cp,lpeg.Cmt
+local lpegtype,lpegmatch,lpegprint=lpeg.type,lpeg.match,lpeg.print
+setinspector(function(v) if lpegtype(v) then lpegprint(v) return true end end)
+lpeg.patterns=lpeg.patterns or {}
+local patterns=lpeg.patterns
+local anything=P(1)
+local endofstring=P(-1)
+local alwaysmatched=P(true)
+patterns.anything=anything
+patterns.endofstring=endofstring
+patterns.beginofstring=alwaysmatched
+patterns.alwaysmatched=alwaysmatched
+local digit,sign=R('09'),S('+-')
+local cr,lf,crlf=P("\r"),P("\n"),P("\r\n")
+local newline=crlf+S("\r\n")
+local escaped=P("\\")*anything
+local squote=P("'")
+local dquote=P('"')
+local space=P(" ")
+local utfbom_32_be=P('\000\000\254\255')
+local utfbom_32_le=P('\255\254\000\000')
+local utfbom_16_be=P('\255\254')
+local utfbom_16_le=P('\254\255')
+local utfbom_8=P('\239\187\191')
+local utfbom=utfbom_32_be+utfbom_32_le+utfbom_16_be+utfbom_16_le+utfbom_8
+local utftype=utfbom_32_be*Cc("utf-32-be")+utfbom_32_le*Cc("utf-32-le")+utfbom_16_be*Cc("utf-16-be")+utfbom_16_le*Cc("utf-16-le")+utfbom_8*Cc("utf-8")+alwaysmatched*Cc("utf-8")
+local utfoffset=utfbom_32_be*Cc(4)+utfbom_32_le*Cc(4)+utfbom_16_be*Cc(2)+utfbom_16_le*Cc(2)+utfbom_8*Cc(3)+Cc(0)
+local utf8next=R("\128\191")
+patterns.utf8one=R("\000\127")
+patterns.utf8two=R("\194\223")*utf8next
+patterns.utf8three=R("\224\239")*utf8next*utf8next
+patterns.utf8four=R("\240\244")*utf8next*utf8next*utf8next
+patterns.utfbom=utfbom
+patterns.utftype=utftype
+patterns.utfoffset=utfoffset
+local utf8char=patterns.utf8one+patterns.utf8two+patterns.utf8three+patterns.utf8four
+local validutf8char=utf8char^0*endofstring*Cc(true)+Cc(false)
+local utf8character=P(1)*R("\128\191")^0
+patterns.utf8=utf8char
+patterns.utf8char=utf8char
+patterns.utf8character=utf8character
+patterns.validutf8=validutf8char
+patterns.validutf8char=validutf8char
+local eol=S("\n\r")
+local spacer=S(" \t\f\v")
+local whitespace=eol+spacer
+local nonspacer=1-spacer
+local nonwhitespace=1-whitespace
+patterns.eol=eol
+patterns.spacer=spacer
+patterns.whitespace=whitespace
+patterns.nonspacer=nonspacer
+patterns.nonwhitespace=nonwhitespace
+local stripper=spacer^0*C((spacer^0*nonspacer^1)^0)
+local collapser=Cs(spacer^0/""*nonspacer^0*((spacer^0/" "*nonspacer^1)^0))
+patterns.stripper=stripper
+patterns.collapser=collapser
+patterns.digit=digit
+patterns.sign=sign
+patterns.cardinal=sign^0*digit^1
+patterns.integer=sign^0*digit^1
+patterns.unsigned=digit^0*P('.')*digit^1
+patterns.float=sign^0*patterns.unsigned
+patterns.cunsigned=digit^0*P(',')*digit^1
+patterns.cfloat=sign^0*patterns.cunsigned
+patterns.number=patterns.float+patterns.integer
+patterns.cnumber=patterns.cfloat+patterns.integer
+patterns.oct=P("0")*R("07")^1
+patterns.octal=patterns.oct
+patterns.HEX=P("0x")*R("09","AF")^1
+patterns.hex=P("0x")*R("09","af")^1
+patterns.hexadecimal=P("0x")*R("09","AF","af")^1
+patterns.lowercase=R("az")
+patterns.uppercase=R("AZ")
+patterns.letter=patterns.lowercase+patterns.uppercase
+patterns.space=space
+patterns.tab=P("\t")
+patterns.spaceortab=patterns.space+patterns.tab
+patterns.newline=newline
+patterns.emptyline=newline^1
+patterns.equal=P("=")
+patterns.comma=P(",")
+patterns.commaspacer=P(",")*spacer^0
+patterns.period=P(".")
+patterns.colon=P(":")
+patterns.semicolon=P(";")
+patterns.underscore=P("_")
+patterns.escaped=escaped
+patterns.squote=squote
+patterns.dquote=dquote
+patterns.nosquote=(escaped+(1-squote))^0
+patterns.nodquote=(escaped+(1-dquote))^0
+patterns.unsingle=(squote/"")*patterns.nosquote*(squote/"")
+patterns.undouble=(dquote/"")*patterns.nodquote*(dquote/"")
+patterns.unquoted=patterns.undouble+patterns.unsingle
+patterns.unspacer=((patterns.spacer^1)/"")^0
+patterns.singlequoted=squote*patterns.nosquote*squote
+patterns.doublequoted=dquote*patterns.nodquote*dquote
+patterns.quoted=patterns.doublequoted+patterns.singlequoted
+patterns.propername=R("AZ","az","__")*R("09","AZ","az","__")^0*P(-1)
+patterns.somecontent=(anything-newline-space)^1
+patterns.beginline=#(1-newline)
+patterns.longtostring=Cs(whitespace^0/""*nonwhitespace^0*((whitespace^0/" "*(patterns.quoted+nonwhitespace)^1)^0))
+local function anywhere(pattern)
+ return P { P(pattern)+1*V(1) }
+end
+lpeg.anywhere=anywhere
+function lpeg.instringchecker(p)
+ p=anywhere(p)
+ return function(str)
+ return lpegmatch(p,str) and true or false
+ end
+end
+function lpeg.splitter(pattern,action)
+ return (((1-P(pattern))^1)/action+1)^0
+end
+function lpeg.tsplitter(pattern,action)
+ return Ct((((1-P(pattern))^1)/action+1)^0)
+end
+local splitters_s,splitters_m,splitters_t={},{},{}
+local function splitat(separator,single)
+ local splitter=(single and splitters_s[separator]) or splitters_m[separator]
+ if not splitter then
+ separator=P(separator)
+ local other=C((1-separator)^0)
+ if single then
+ local any=anything
+ splitter=other*(separator*C(any^0)+"")
+ splitters_s[separator]=splitter
+ else
+ splitter=other*(separator*other)^0
+ splitters_m[separator]=splitter
end
-
+ end
+ return splitter
end
-
-function string.unquoted(str)
- return (gsub(str,"^([\"\'])(.*)%1$","%2"))
+local function tsplitat(separator)
+ local splitter=splitters_t[separator]
+ if not splitter then
+ splitter=Ct(splitat(separator))
+ splitters_t[separator]=splitter
+ end
+ return splitter
+end
+lpeg.splitat=splitat
+lpeg.tsplitat=tsplitat
+function string.splitup(str,separator)
+ if not separator then
+ separator=","
+ end
+ return lpegmatch(splitters_m[separator] or splitat(separator),str)
end
-
---~ function stringunquoted(str)
---~ if find(str,"^[\'\"]") then
---~ return sub(str,2,-2)
---~ else
---~ return str
---~ end
---~ end
-
-function string.quoted(str)
- return format("%q",str) -- always "
+local cache={}
+function lpeg.split(separator,str)
+ local c=cache[separator]
+ if not c then
+ c=tsplitat(separator)
+ cache[separator]=c
+ end
+ return lpegmatch(c,str)
end
-
-function string.count(str,pattern) -- variant 3
- local n = 0
- for _ in gmatch(str,pattern) do -- not for utf
- n = n + 1
+function string.split(str,separator)
+ if separator then
+ local c=cache[separator]
+ if not c then
+ c=tsplitat(separator)
+ cache[separator]=c
+ end
+ return lpegmatch(c,str)
+ else
+ return { str }
+ end
+end
+local spacing=patterns.spacer^0*newline
+local empty=spacing*Cc("")
+local nonempty=Cs((1-spacing)^1)*spacing^-1
+local content=(empty+nonempty)^1
+patterns.textline=content
+local linesplitter=tsplitat(newline)
+patterns.linesplitter=linesplitter
+function string.splitlines(str)
+ return lpegmatch(linesplitter,str)
+end
+local cache={}
+function lpeg.checkedsplit(separator,str)
+ local c=cache[separator]
+ if not c then
+ separator=P(separator)
+ local other=C((1-separator)^1)
+ c=Ct(separator^0*other*(separator^1*other)^0)
+ cache[separator]=c
+ end
+ return lpegmatch(c,str)
+end
+function string.checkedsplit(str,separator)
+ local c=cache[separator]
+ if not c then
+ separator=P(separator)
+ local other=C((1-separator)^1)
+ c=Ct(separator^0*other*(separator^1*other)^0)
+ cache[separator]=c
+ end
+ return lpegmatch(c,str)
+end
+local function f2(s) local c1,c2=byte(s,1,2) return c1*64+c2-12416 end
+local function f3(s) local c1,c2,c3=byte(s,1,3) return (c1*64+c2)*64+c3-925824 end
+local function f4(s) local c1,c2,c3,c4=byte(s,1,4) return ((c1*64+c2)*64+c3)*64+c4-63447168 end
+local utf8byte=patterns.utf8one/byte+patterns.utf8two/f2+patterns.utf8three/f3+patterns.utf8four/f4
+patterns.utf8byte=utf8byte
+local cache={}
+function lpeg.stripper(str)
+ if type(str)=="string" then
+ local s=cache[str]
+ if not s then
+ s=Cs(((S(str)^1)/""+1)^0)
+ cache[str]=s
end
- return n
+ return s
+ else
+ return Cs(((str^1)/""+1)^0)
+ end
end
-
-function string.limit(str,n,sentinel) -- not utf proof
- if #str > n then
- sentinel = sentinel or "..."
- return sub(str,1,(n-#sentinel)) .. sentinel
+local cache={}
+function lpeg.keeper(str)
+ if type(str)=="string" then
+ local s=cache[str]
+ if not s then
+ s=Cs((((1-S(str))^1)/""+1)^0)
+ cache[str]=s
+ end
+ return s
+ else
+ return Cs((((1-str)^1)/""+1)^0)
+ end
+end
+function lpeg.frontstripper(str)
+ return (P(str)+P(true))*Cs(anything^0)
+end
+function lpeg.endstripper(str)
+ return Cs((1-P(str)*endofstring)^0)
+end
+function lpeg.replacer(one,two,makefunction,isutf)
+ local pattern
+ local u=isutf and utf8char or 1
+ if type(one)=="table" then
+ local no=#one
+ local p=P(false)
+ if no==0 then
+ for k,v in next,one do
+ p=p+P(k)/v
+ end
+ pattern=Cs((p+u)^0)
+ elseif no==1 then
+ local o=one[1]
+ one,two=P(o[1]),o[2]
+ pattern=Cs((one/two+u)^0)
+ else
+ for i=1,no do
+ local o=one[i]
+ p=p+P(o[1])/o[2]
+ end
+ pattern=Cs((p+u)^0)
+ end
+ else
+ pattern=Cs((P(one)/(two or "")+u)^0)
+ end
+ if makefunction then
+ return function(str)
+ return lpegmatch(pattern,str)
+ end
+ else
+ return pattern
+ end
+end
+function lpeg.finder(lst,makefunction)
+ local pattern
+ if type(lst)=="table" then
+ pattern=P(false)
+ if #lst==0 then
+ for k,v in next,lst do
+ pattern=pattern+P(k)
+ end
else
- return str
+ for i=1,#lst do
+ pattern=pattern+P(lst[i])
+ end
+ end
+ else
+ pattern=P(lst)
+ end
+ pattern=(1-pattern)^0*pattern
+ if makefunction then
+ return function(str)
+ return lpegmatch(pattern,str)
+ end
+ else
+ return pattern
+ end
+end
+local splitters_f,splitters_s={},{}
+function lpeg.firstofsplit(separator)
+ local splitter=splitters_f[separator]
+ if not splitter then
+ separator=P(separator)
+ splitter=C((1-separator)^0)
+ splitters_f[separator]=splitter
+ end
+ return splitter
+end
+function lpeg.secondofsplit(separator)
+ local splitter=splitters_s[separator]
+ if not splitter then
+ separator=P(separator)
+ splitter=(1-separator)^0*separator*C(anything^0)
+ splitters_s[separator]=splitter
+ end
+ return splitter
+end
+function lpeg.balancer(left,right)
+ left,right=P(left),P(right)
+ return P { left*((1-left-right)+V(1))^0*right }
+end
+local nany=utf8char/""
+function lpeg.counter(pattern)
+ pattern=Cs((P(pattern)/" "+nany)^0)
+ return function(str)
+ return #lpegmatch(pattern,str)
+ end
+end
+utf=utf or (unicode and unicode.utf8) or {}
+local utfcharacters=utf and utf.characters or string.utfcharacters
+local utfgmatch=utf and utf.gmatch
+local utfchar=utf and utf.char
+lpeg.UP=lpeg.P
+if utfcharacters then
+ function lpeg.US(str)
+ local p=P(false)
+ for uc in utfcharacters(str) do
+ p=p+P(uc)
+ end
+ return p
+ end
+elseif utfgmatch then
+ function lpeg.US(str)
+ local p=P(false)
+ for uc in utfgmatch(str,".") do
+ p=p+P(uc)
+ end
+ return p
+ end
+else
+ function lpeg.US(str)
+ local p=P(false)
+ local f=function(uc)
+ p=p+P(uc)
end
+ lpegmatch((utf8char/f)^0,str)
+ return p
+ end
end
-
-local space = S(" \t\v\n")
-local nospace = 1 - space
-local stripper = space^0 * C((space^0 * nospace^1)^0) -- roberto's code
-
-function string.strip(str)
- return lpegmatch(stripper,str) or ""
+local range=utf8byte*utf8byte+Cc(false)
+function lpeg.UR(str,more)
+ local first,last
+ if type(str)=="number" then
+ first=str
+ last=more or first
+ else
+ first,last=lpegmatch(range,str)
+ if not last then
+ return P(str)
+ end
+ end
+ if first==last then
+ return P(str)
+ elseif utfchar and (last-first<8) then
+ local p=P(false)
+ for i=first,last do
+ p=p+P(utfchar(i))
+ end
+ return p
+ else
+ local f=function(b)
+ return b>=first and b<=last
+ end
+ return utf8byte/f
+ end
end
-
-function string.is_empty(str)
- return not find(str,"%S")
+function lpeg.is_lpeg(p)
+ return p and lpegtype(p)=="pattern"
+end
+function lpeg.oneof(list,...)
+ if type(list)~="table" then
+ list={ list,... }
+ end
+ local p=P(list[1])
+ for l=2,#list do
+ p=p+P(list[l])
+ end
+ return p
+end
+local sort=table.sort
+local function copyindexed(old)
+ local new={}
+ for i=1,#old do
+ new[i]=old
+ end
+ return new
end
-
-local patterns_escapes = {
- ["%"] = "%%",
- ["."] = "%.",
- ["+"] = "%+", ["-"] = "%-", ["*"] = "%*",
- ["["] = "%[", ["]"] = "%]",
- ["("] = "%(", [")"] = "%)",
- -- ["{"] = "%{", ["}"] = "%}"
- -- ["^"] = "%^", ["$"] = "%$",
-}
-
-local simple_escapes = {
- ["-"] = "%-",
- ["."] = "%.",
- ["?"] = ".",
- ["*"] = ".*",
-}
-
-function string.escapedpattern(str,simple)
- return (gsub(str,".",simple and simple_escapes or patterns_escapes))
+local function sortedkeys(tab)
+ local keys,s={},0
+ for key,_ in next,tab do
+ s=s+1
+ keys[s]=key
+ end
+ sort(keys)
+ return keys
end
-
-function string.topattern(str,lowercase,strict)
- if str == "" then
- return ".*"
+function lpeg.append(list,pp,delayed,checked)
+ local p=pp
+ if #list>0 then
+ local keys=copyindexed(list)
+ sort(keys)
+ for i=#keys,1,-1 do
+ local k=keys[i]
+ if p then
+ p=P(k)+p
+ else
+ p=P(k)
+ end
+ end
+ elseif delayed then
+ local keys=sortedkeys(list)
+ if p then
+ for i=1,#keys,1 do
+ local k=keys[i]
+ local v=list[k]
+ p=P(k)/list+p
+ end
else
- str = gsub(str,".",simple_escapes)
- if lowercase then
- str = lower(str)
+ for i=1,#keys do
+ local k=keys[i]
+ local v=list[k]
+ if p then
+ p=P(k)+p
+ else
+ p=P(k)
+ end
+ end
+ if p then
+ p=p/list
+ end
+ end
+ elseif checked then
+ local keys=sortedkeys(list)
+ for i=1,#keys do
+ local k=keys[i]
+ local v=list[k]
+ if p then
+ if k==v then
+ p=P(k)+p
+ else
+ p=P(k)/v+p
end
- if strict then
- return "^" .. str .. "$"
+ else
+ if k==v then
+ p=P(k)
else
- return str
+ p=P(k)/v
end
+ end
end
+ else
+ local keys=sortedkeys(list)
+ for i=1,#keys do
+ local k=keys[i]
+ local v=list[k]
+ if p then
+ p=P(k)/v+p
+ else
+ p=P(k)/v
+ end
+ end
+ end
+ return p
end
-
--- obsolete names:
-
-string.quote = string.quoted
-string.unquote = string.unquoted
+local function make(t)
+ local p
+ local keys=sortedkeys(t)
+ for i=1,#keys do
+ local k=keys[i]
+ local v=t[k]
+ if not p then
+ if next(v) then
+ p=P(k)*make(v)
+ else
+ p=P(k)
+ end
+ else
+ if next(v) then
+ p=p+P(k)*make(v)
+ else
+ p=p+P(k)
+ end
+ end
+ end
+ return p
+end
+function lpeg.utfchartabletopattern(list)
+ local tree={}
+ for i=1,#list do
+ local t=tree
+ for c in gmatch(list[i],".") do
+ if not t[c] then
+ t[c]={}
+ end
+ t=t[c]
+ end
+ end
+ return make(tree)
+end
+patterns.containseol=lpeg.finder(eol)
+local function nextstep(n,step,result)
+ local m=n%step
+ local d=floor(n/step)
+ if d>0 then
+ local v=V(tostring(step))
+ local s=result.start
+ for i=1,d do
+ if s then
+ s=v*s
+ else
+ s=v
+ end
+ end
+ result.start=s
+ end
+ if step>1 and result.start then
+ local v=V(tostring(step/2))
+ result[tostring(step)]=v*v
+ end
+ if step>0 then
+ return nextstep(m,step/2,result)
+ else
+ return result
+ end
+end
+function lpeg.times(pattern,n)
+ return P(nextstep(n,2^16,{ "start",["1"]=pattern }))
+end
+local digit=R("09")
+local period=P(".")
+local zero=P("0")
+local trailingzeros=zero^0*-digit
+local case_1=period*trailingzeros/""
+local case_2=period*(digit-trailingzeros)^1*(trailingzeros/"")
+local number=digit^1*(case_1+case_2)
+local stripper=Cs((number+1)^0)
+lpeg.patterns.stripzeros=stripper
end -- closure
do -- begin closure to overcome local limits and interference
-if not modules then modules = { } end modules ['l-table'] = {
- version = 1.001,
- comment = "companion to luat-lib.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
+if not modules then modules={} end modules ['l-functions']={
+ version=1.001,
+ comment="companion to luat-lib.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
}
+functions=functions or {}
+function functions.dummy() end
-local type, next, tostring, tonumber, ipairs, table, string = type, next, tostring, tonumber, ipairs, table, string
-local concat, sort, insert, remove = table.concat, table.sort, table.insert, table.remove
-local format, find, gsub, lower, dump, match = string.format, string.find, string.gsub, string.lower, string.dump, string.match
-local getmetatable, setmetatable = getmetatable, setmetatable
-local getinfo = debug.getinfo
-
--- Starting with version 5.2 Lua no longer provide ipairs, which makes
--- sense. As we already used the for loop and # in most places the
--- impact on ConTeXt was not that large; the remaining ipairs already
--- have been replaced. In a similar fashion we also hardly used pairs.
---
--- Just in case, we provide the fallbacks as discussed in Programming
--- in Lua (http://www.lua.org/pil/7.3.html):
-
-if not ipairs then
-
- -- for k, v in ipairs(t) do ... end
- -- for k=1,#t do local v = t[k] ... end
-
- local function iterate(a,i)
- i = i + 1
- local v = a[i]
- if v ~= nil then
- return i, v --, nil
- end
- end
+end -- closure
- function ipairs(a)
- return iterate, a, 0
- end
+do -- begin closure to overcome local limits and interference
+if not modules then modules={} end modules ['l-string']={
+ version=1.001,
+ comment="companion to luat-lib.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+local string=string
+local sub,gmatch,format,char,byte,rep,lower=string.sub,string.gmatch,string.format,string.char,string.byte,string.rep,string.lower
+local lpegmatch,patterns=lpeg.match,lpeg.patterns
+local P,S,C,Ct,Cc,Cs=lpeg.P,lpeg.S,lpeg.C,lpeg.Ct,lpeg.Cc,lpeg.Cs
+local unquoted=patterns.squote*C(patterns.nosquote)*patterns.squote+patterns.dquote*C(patterns.nodquote)*patterns.dquote
+function string.unquoted(str)
+ return lpegmatch(unquoted,str) or str
end
-
-if not pairs then
-
- -- for k, v in pairs(t) do ... end
- -- for k, v in next, t do ... end
-
- function pairs(t)
- return next, t -- , nil
- end
-
+function string.quoted(str)
+ return format("%q",str)
+end
+function string.count(str,pattern)
+ local n=0
+ for _ in gmatch(str,pattern) do
+ n=n+1
+ end
+ return n
+end
+function string.limit(str,n,sentinel)
+ if #str>n then
+ sentinel=sentinel or "..."
+ return sub(str,1,(n-#sentinel))..sentinel
+ else
+ return str
+ end
end
-
--- Also, unpack has been moved to the table table, and for compatiility
--- reasons we provide both now.
-
-if not table.unpack then
- table.unpack = _G.unpack
-elseif not unpack then
- _G.unpack = table.unpack
+local stripper=patterns.stripper
+local collapser=patterns.collapser
+local longtostring=patterns.longtostring
+function string.strip(str)
+ return lpegmatch(stripper,str) or ""
+end
+function string.collapsespaces(str)
+ return lpegmatch(collapser,str) or ""
+end
+function string.longtostring(str)
+ return lpegmatch(longtostring,str) or ""
end
+local pattern=P(" ")^0*P(-1)
+function string.is_empty(str)
+ if str=="" then
+ return true
+ else
+ return lpegmatch(pattern,str) and true or false
+ end
+end
+local anything=patterns.anything
+local allescapes=Cc("%")*S(".-+%?()[]*")
+local someescapes=Cc("%")*S(".-+%()[]")
+local matchescapes=Cc(".")*S("*?")
+local pattern_a=Cs ((allescapes+anything )^0 )
+local pattern_b=Cs ((someescapes+matchescapes+anything )^0 )
+local pattern_c=Cs (Cc("^")*(someescapes+matchescapes+anything )^0*Cc("$") )
+function string.escapedpattern(str,simple)
+ return lpegmatch(simple and pattern_b or pattern_a,str)
+end
+function string.topattern(str,lowercase,strict)
+ if str=="" or type(str)~="string" then
+ return ".*"
+ elseif strict then
+ str=lpegmatch(pattern_c,str)
+ else
+ str=lpegmatch(pattern_b,str)
+ end
+ if lowercase then
+ return lower(str)
+ else
+ return str
+ end
+end
+function string.valid(str,default)
+ return (type(str)=="string" and str~="" and str) or default or nil
+end
+string.itself=function(s) return s end
+local pattern=Ct(C(1)^0)
+function string.totable(str)
+ return lpegmatch(pattern,str)
+end
+local replacer=lpeg.replacer("@","%%")
+function string.tformat(fmt,...)
+ return format(lpegmatch(replacer,fmt),...)
+end
+string.quote=string.quoted
+string.unquote=string.unquoted
--- extra functions, some might go (when not used)
+end -- closure
+
+do -- begin closure to overcome local limits and interference
+if not modules then modules={} end modules ['l-table']={
+ version=1.001,
+ comment="companion to luat-lib.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+local type,next,tostring,tonumber,ipairs,select=type,next,tostring,tonumber,ipairs,select
+local table,string=table,string
+local concat,sort,insert,remove=table.concat,table.sort,table.insert,table.remove
+local format,lower,dump=string.format,string.lower,string.dump
+local getmetatable,setmetatable=getmetatable,setmetatable
+local getinfo=debug.getinfo
+local lpegmatch,patterns=lpeg.match,lpeg.patterns
+local floor=math.floor
+local stripper=patterns.stripper
function table.strip(tab)
- local lst, l = { }, 0
- for i=1,#tab do
- local s = gsub(tab[i],"^%s*(.-)%s*$","%1")
- if s == "" then
- -- skip this one
- else
- l = l + 1
- lst[l] = s
- end
+ local lst,l={},0
+ for i=1,#tab do
+ local s=lpegmatch(stripper,tab[i]) or ""
+ if s=="" then
+ else
+ l=l+1
+ lst[l]=s
end
- return lst
+ end
+ return lst
end
-
function table.keys(t)
- local keys, k = { }, 0
- for key, _ in next, t do
- k = k + 1
- keys[k] = key
+ if t then
+ local keys,k={},0
+ for key,_ in next,t do
+ k=k+1
+ keys[k]=key
end
return keys
+ else
+ return {}
+ end
end
-
local function compare(a,b)
- local ta, tb = type(a), type(b) -- needed, else 11 < 2
- if ta == tb then
- return a < b
- else
- return tostring(a) < tostring(b)
- end
+ local ta,tb=type(a),type(b)
+ if ta==tb then
+ return a<b
+ else
+ return tostring(a)<tostring(b)
+ end
end
-
local function sortedkeys(tab)
- local srt, category, s = { }, 0, 0 -- 0=unknown 1=string, 2=number 3=mixed
- for key,_ in next, tab do
- s = s + 1
- srt[s] = key
- if category == 3 then
- -- no further check
+ if tab then
+ local srt,category,s={},0,0
+ for key,_ in next,tab do
+ s=s+1
+ srt[s]=key
+ if category==3 then
+ else
+ local tkey=type(key)
+ if tkey=="string" then
+ category=(category==2 and 3) or 1
+ elseif tkey=="number" then
+ category=(category==1 and 3) or 2
else
- local tkey = type(key)
- if tkey == "string" then
- category = (category == 2 and 3) or 1
- elseif tkey == "number" then
- category = (category == 1 and 3) or 2
- else
- category = 3
- end
+ category=3
end
+ end
end
- if category == 0 or category == 3 then
- sort(srt,compare)
+ if category==0 or category==3 then
+ sort(srt,compare)
else
- sort(srt)
+ sort(srt)
end
return srt
+ else
+ return {}
+ end
+end
+local function sortedhashkeys(tab,cmp)
+ if tab then
+ local srt,s={},0
+ for key,_ in next,tab do
+ if key then
+ s=s+1
+ srt[s]=key
+ end
+ end
+ sort(srt,cmp)
+ return srt
+ else
+ return {}
+ end
end
-
-local function sortedhashkeys(tab) -- fast one
- local srt, s = { }, 0
- for key,_ in next, tab do
- if key then
- s= s + 1
- srt[s] = key
- end
+function table.allkeys(t)
+ local keys={}
+ for k,v in next,t do
+ for k,v in next,v do
+ keys[k]=true
end
- sort(srt)
- return srt
+ end
+ return sortedkeys(keys)
end
-
-table.sortedkeys = sortedkeys
-table.sortedhashkeys = sortedhashkeys
-
+table.sortedkeys=sortedkeys
+table.sortedhashkeys=sortedhashkeys
local function nothing() end
-
-local function sortedhash(t)
- if t then
- local n, s = 0, sortedkeys(t) -- the robust one
- local function kv(s)
- n = n + 1
- local k = s[n]
- return k, t[k]
- end
- return kv, s
+local function sortedhash(t,cmp)
+ if t then
+ local s
+ if cmp then
+ s=sortedhashkeys(t,function(a,b) return cmp(t,a,b) end)
else
- return nothing
- end
-end
-
-table.sortedhash = sortedhash
-table.sortedpairs = sortedhash
-
-function table.append(t, list)
- local n = #t
- for i=1,#list do
- n = n + 1
- t[n] = list[i]
- end
- return t
-end
-
-function table.prepend(t, list)
- local nl = #list
- local nt = nl + #t
- for i=#t,1,-1 do
- t[nt] = t[i]
- nt = nt - 1
- end
- for i=1,#list do
- t[i] = list[i]
- end
- return t
-end
-
-function table.merge(t, ...) -- first one is target
- t = t or { }
- local lst = { ... }
- for i=1,#lst do
- for k, v in next, lst[i] do
- t[k] = v
- end
- end
- return t
+ s=sortedkeys(t)
+ end
+ local n=0
+ local function kv(s)
+ n=n+1
+ local k=s[n]
+ return k,t[k]
+ end
+ return kv,s
+ else
+ return nothing
+ end
+end
+table.sortedhash=sortedhash
+table.sortedpairs=sortedhash
+function table.append(t,list)
+ local n=#t
+ for i=1,#list do
+ n=n+1
+ t[n]=list[i]
+ end
+ return t
+end
+function table.prepend(t,list)
+ local nl=#list
+ local nt=nl+#t
+ for i=#t,1,-1 do
+ t[nt]=t[i]
+ nt=nt-1
+ end
+ for i=1,#list do
+ t[i]=list[i]
+ end
+ return t
+end
+function table.merge(t,...)
+ t=t or {}
+ for i=1,select("#",...) do
+ for k,v in next,(select(i,...)) do
+ t[k]=v
+ end
+ end
+ return t
end
-
function table.merged(...)
- local tmp, lst = { }, { ... }
- for i=1,#lst do
- for k, v in next, lst[i] do
- tmp[k] = v
- end
+ local t={}
+ for i=1,select("#",...) do
+ for k,v in next,(select(i,...)) do
+ t[k]=v
end
- return tmp
+ end
+ return t
end
-
-function table.imerge(t, ...)
- local lst, nt = { ... }, #t
- for i=1,#lst do
- local nst = lst[i]
- for j=1,#nst do
- nt = nt + 1
- t[nt] = nst[j]
- end
+function table.imerge(t,...)
+ local nt=#t
+ for i=1,select("#",...) do
+ local nst=select(i,...)
+ for j=1,#nst do
+ nt=nt+1
+ t[nt]=nst[j]
end
- return t
+ end
+ return t
end
-
function table.imerged(...)
- local tmp, ntmp, lst = { }, 0, {...}
- for i=1,#lst do
- local nst = lst[i]
- for j=1,#nst do
- ntmp = ntmp + 1
- tmp[ntmp] = nst[j]
- end
+ local tmp,ntmp={},0
+ for i=1,select("#",...) do
+ local nst=select(i,...)
+ for j=1,#nst do
+ ntmp=ntmp+1
+ tmp[ntmp]=nst[j]
+ end
+ end
+ return tmp
+end
+local function fastcopy(old,metatabletoo)
+ if old then
+ local new={}
+ for k,v in next,old do
+ if type(v)=="table" then
+ new[k]=fastcopy(v,metatabletoo)
+ else
+ new[k]=v
+ end
+ end
+ if metatabletoo then
+ local mt=getmetatable(old)
+ if mt then
+ setmetatable(new,mt)
+ end
end
- return tmp
-end
-
-local function fastcopy(old,metatabletoo) -- fast one
- if old then
- local new = { }
- for k,v in next, old do
- if type(v) == "table" then
- new[k] = fastcopy(v,metatabletoo) -- was just table.copy
- else
- new[k] = v
- end
- end
- if metatabletoo then
- -- optional second arg
- local mt = getmetatable(old)
- if mt then
- setmetatable(new,mt)
- end
- end
- return new
+ return new
+ else
+ return {}
+ end
+end
+local function copy(t,tables)
+ tables=tables or {}
+ local tcopy={}
+ if not tables[t] then
+ tables[t]=tcopy
+ end
+ for i,v in next,t do
+ if type(i)=="table" then
+ if tables[i] then
+ i=tables[i]
+ else
+ i=copy(i,tables)
+ end
+ end
+ if type(v)~="table" then
+ tcopy[i]=v
+ elseif tables[v] then
+ tcopy[i]=tables[v]
else
- return { }
- end
+ tcopy[i]=copy(v,tables)
+ end
+ end
+ local mt=getmetatable(t)
+ if mt then
+ setmetatable(tcopy,mt)
+ end
+ return tcopy
+end
+table.fastcopy=fastcopy
+table.copy=copy
+function table.derive(parent)
+ local child={}
+ if parent then
+ setmetatable(child,{ __index=parent })
+ end
+ return child
end
-
--- todo : copy without metatable
-
-local function copy(t, tables) -- taken from lua wiki, slightly adapted
- tables = tables or { }
- local tcopy = {}
- if not tables[t] then
- tables[t] = tcopy
- end
- for i,v in next, t do -- brrr, what happens with sparse indexed
- if type(i) == "table" then
- if tables[i] then
- i = tables[i]
- else
- i = copy(i, tables)
- end
- end
- if type(v) ~= "table" then
- tcopy[i] = v
- elseif tables[v] then
- tcopy[i] = tables[v]
- else
- tcopy[i] = copy(v, tables)
- end
- end
- local mt = getmetatable(t)
- if mt then
- setmetatable(tcopy,mt)
- end
- return tcopy
-end
-
-table.fastcopy = fastcopy
-table.copy = copy
-
-function table.derive(parent)
- local child = { }
- if parent then
- setmetatable(child,{ __index = parent })
- end
- return child
-end
-
function table.tohash(t,value)
- local h = { }
- if t then
- if value == nil then value = true end
- for _, v in next, t do -- no ipairs here
- h[v] = value
- end
+ local h={}
+ if t then
+ if value==nil then value=true end
+ for _,v in next,t do
+ h[v]=value
end
- return h
+ end
+ return h
end
-
function table.fromhash(t)
- local hsh, h = { }, 0
- for k, v in next, t do -- no ipairs here
- if v then
- h = h + 1
- hsh[h] = k
- end
- end
- return hsh
-end
-
-local noquotes, hexify, handle, reduce, compact, inline, functions
-
-local reserved = table.tohash { -- intercept a language inconvenience: no reserved words as key
- 'and', 'break', 'do', 'else', 'elseif', 'end', 'false', 'for', 'function', 'if',
- 'in', 'local', 'nil', 'not', 'or', 'repeat', 'return', 'then', 'true', 'until', 'while',
+ local hsh,h={},0
+ for k,v in next,t do
+ if v then
+ h=h+1
+ hsh[h]=k
+ end
+ end
+ return hsh
+end
+local noquotes,hexify,handle,reduce,compact,inline,functions
+local reserved=table.tohash {
+ 'and','break','do','else','elseif','end','false','for','function','if',
+ 'in','local','nil','not','or','repeat','return','then','true','until','while',
}
-
local function simple_table(t)
- if #t > 0 then
- local n = 0
- for _,v in next, t do
- n = n + 1
- end
- if n == #t then
- local tt, nt = { }, 0
- for i=1,#t do
- local v = t[i]
- local tv = type(v)
- if tv == "number" then
- nt = nt + 1
- if hexify then
- tt[nt] = format("0x%04X",v)
- else
- tt[nt] = tostring(v) -- tostring not needed
- end
- elseif tv == "boolean" then
- nt = nt + 1
- tt[nt] = tostring(v)
- elseif tv == "string" then
- nt = nt + 1
- tt[nt] = format("%q",v)
- else
- tt = nil
- break
- end
- end
- return tt
+ if #t>0 then
+ local n=0
+ for _,v in next,t do
+ n=n+1
+ end
+ if n==#t then
+ local tt,nt={},0
+ for i=1,#t do
+ local v=t[i]
+ local tv=type(v)
+ if tv=="number" then
+ nt=nt+1
+ if hexify then
+ tt[nt]=format("0x%04X",v)
+ else
+ tt[nt]=tostring(v)
+ end
+ elseif tv=="boolean" then
+ nt=nt+1
+ tt[nt]=tostring(v)
+ elseif tv=="string" then
+ nt=nt+1
+ tt[nt]=format("%q",v)
+ else
+ tt=nil
+ break
end
+ end
+ return tt
end
- return nil
+ end
+ return nil
end
-
--- Because this is a core function of mkiv I moved some function calls
--- inline.
---
--- twice as fast in a test:
---
--- local propername = lpeg.P(lpeg.R("AZ","az","__") * lpeg.R("09","AZ","az", "__")^0 * lpeg.P(-1) )
-
--- problem: there no good number_to_string converter with the best resolution
-
+local propername=patterns.propername
local function dummy() end
-
local function do_serialize(root,name,depth,level,indexed)
- if level > 0 then
- depth = depth .. " "
- if indexed then
- handle(format("%s{",depth))
+ if level>0 then
+ depth=depth.." "
+ if indexed then
+ handle(format("%s{",depth))
+ else
+ local tn=type(name)
+ if tn=="number" then
+ if hexify then
+ handle(format("%s[0x%04X]={",depth,name))
else
- local tn = type(name)
- if tn == "number" then -- or find(k,"^%d+$") then
- if hexify then
- handle(format("%s[0x%04X]={",depth,name))
- else
- handle(format("%s[%s]={",depth,name))
- end
- elseif tn == "string" then
- if noquotes and not reserved[name] and find(name,"^%a[%w%_]*$") then
- handle(format("%s%s={",depth,name))
- else
- handle(format("%s[%q]={",depth,name))
- end
- elseif tn == "boolean" then
- handle(format("%s[%s]={",depth,tostring(name)))
- else
- handle(format("%s{",depth))
- end
+ handle(format("%s[%s]={",depth,name))
end
- end
- -- we could check for k (index) being number (cardinal)
- if root and next(root) then
- local first, last = nil, 0 -- #root cannot be trusted here (will be ok in 5.2 when ipairs is gone)
- if compact then
- -- NOT: for k=1,#root do (we need to quit at nil)
- for k,v in ipairs(root) do -- can we use next?
- if not first then first = k end
- last = last + 1
- end
- end
- local sk = sortedkeys(root)
- for i=1,#sk do
- local k = sk[i]
- local v = root[k]
- --~ if v == root then
- -- circular
- --~ else
- local t, tk = type(v), type(k)
- if compact and first and tk == "number" and k >= first and k <= last then
- if t == "number" then
- if hexify then
- handle(format("%s 0x%04X,",depth,v))
- else
- handle(format("%s %s,",depth,v)) -- %.99g
- end
- elseif t == "string" then
- if reduce and tonumber(v) then
- handle(format("%s %s,",depth,v))
- else
- handle(format("%s %q,",depth,v))
- end
- elseif t == "table" then
- if not next(v) then
- handle(format("%s {},",depth))
- elseif inline then -- and #t > 0
- local st = simple_table(v)
- if st then
- handle(format("%s { %s },",depth,concat(st,", ")))
- else
- do_serialize(v,k,depth,level+1,true)
- end
- else
- do_serialize(v,k,depth,level+1,true)
- end
- elseif t == "boolean" then
- handle(format("%s %s,",depth,tostring(v)))
- elseif t == "function" then
- if functions then
- handle(format('%s loadstring(%q),',depth,dump(v)))
- else
- handle(format('%s "function",',depth))
- end
- else
- handle(format("%s %q,",depth,tostring(v)))
- end
- elseif k == "__p__" then -- parent
- if false then
- handle(format("%s __p__=nil,",depth))
- end
- elseif t == "number" then
- if tk == "number" then -- or find(k,"^%d+$") then
- if hexify then
- handle(format("%s [0x%04X]=0x%04X,",depth,k,v))
- else
- handle(format("%s [%s]=%s,",depth,k,v)) -- %.99g
- end
- elseif tk == "boolean" then
- if hexify then
- handle(format("%s [%s]=0x%04X,",depth,tostring(k),v))
- else
- handle(format("%s [%s]=%s,",depth,tostring(k),v)) -- %.99g
- end
- elseif noquotes and not reserved[k] and find(k,"^%a[%w%_]*$") then
- if hexify then
- handle(format("%s %s=0x%04X,",depth,k,v))
- else
- handle(format("%s %s=%s,",depth,k,v)) -- %.99g
- end
- else
- if hexify then
- handle(format("%s [%q]=0x%04X,",depth,k,v))
- else
- handle(format("%s [%q]=%s,",depth,k,v)) -- %.99g
- end
- end
- elseif t == "string" then
- if reduce and tonumber(v) then
- if tk == "number" then -- or find(k,"^%d+$") then
- if hexify then
- handle(format("%s [0x%04X]=%s,",depth,k,v))
- else
- handle(format("%s [%s]=%s,",depth,k,v))
- end
- elseif tk == "boolean" then
- handle(format("%s [%s]=%s,",depth,tostring(k),v))
- elseif noquotes and not reserved[k] and find(k,"^%a[%w%_]*$") then
- handle(format("%s %s=%s,",depth,k,v))
- else
- handle(format("%s [%q]=%s,",depth,k,v))
- end
- else
- if tk == "number" then -- or find(k,"^%d+$") then
- if hexify then
- handle(format("%s [0x%04X]=%q,",depth,k,v))
- else
- handle(format("%s [%s]=%q,",depth,k,v))
- end
- elseif tk == "boolean" then
- handle(format("%s [%s]=%q,",depth,tostring(k),v))
- elseif noquotes and not reserved[k] and find(k,"^%a[%w%_]*$") then
- handle(format("%s %s=%q,",depth,k,v))
- else
- handle(format("%s [%q]=%q,",depth,k,v))
- end
- end
- elseif t == "table" then
- if not next(v) then
- if tk == "number" then -- or find(k,"^%d+$") then
- if hexify then
- handle(format("%s [0x%04X]={},",depth,k))
- else
- handle(format("%s [%s]={},",depth,k))
- end
- elseif tk == "boolean" then
- handle(format("%s [%s]={},",depth,tostring(k)))
- elseif noquotes and not reserved[k] and find(k,"^%a[%w%_]*$") then
- handle(format("%s %s={},",depth,k))
- else
- handle(format("%s [%q]={},",depth,k))
- end
- elseif inline then
- local st = simple_table(v)
- if st then
- if tk == "number" then -- or find(k,"^%d+$") then
- if hexify then
- handle(format("%s [0x%04X]={ %s },",depth,k,concat(st,", ")))
- else
- handle(format("%s [%s]={ %s },",depth,k,concat(st,", ")))
- end
- elseif tk == "boolean" then -- or find(k,"^%d+$") then
- handle(format("%s [%s]={ %s },",depth,tostring(k),concat(st,", ")))
- elseif noquotes and not reserved[k] and find(k,"^%a[%w%_]*$") then
- handle(format("%s %s={ %s },",depth,k,concat(st,", ")))
- else
- handle(format("%s [%q]={ %s },",depth,k,concat(st,", ")))
- end
- else
- do_serialize(v,k,depth,level+1)
- end
- else
- do_serialize(v,k,depth,level+1)
- end
- elseif t == "boolean" then
- if tk == "number" then -- or find(k,"^%d+$") then
- if hexify then
- handle(format("%s [0x%04X]=%s,",depth,k,tostring(v)))
- else
- handle(format("%s [%s]=%s,",depth,k,tostring(v)))
- end
- elseif tk == "boolean" then -- or find(k,"^%d+$") then
- handle(format("%s [%s]=%s,",depth,tostring(k),tostring(v)))
- elseif noquotes and not reserved[k] and find(k,"^%a[%w%_]*$") then
- handle(format("%s %s=%s,",depth,k,tostring(v)))
- else
- handle(format("%s [%q]=%s,",depth,k,tostring(v)))
- end
- elseif t == "function" then
- if functions then
- local f = getinfo(v).what == "C" and dump(dummy) or dump(v)
- -- local f = getinfo(v).what == "C" and dump(function(...) return v(...) end) or dump(v)
- if tk == "number" then -- or find(k,"^%d+$") then
- if hexify then
- handle(format("%s [0x%04X]=loadstring(%q),",depth,k,f))
- else
- handle(format("%s [%s]=loadstring(%q),",depth,k,f))
- end
- elseif tk == "boolean" then
- handle(format("%s [%s]=loadstring(%q),",depth,tostring(k),f))
- elseif noquotes and not reserved[k] and find(k,"^%a[%w%_]*$") then
- handle(format("%s %s=loadstring(%q),",depth,k,f))
- else
- handle(format("%s [%q]=loadstring(%q),",depth,k,f))
- end
- end
+ elseif tn=="string" then
+ if noquotes and not reserved[name] and lpegmatch(propername,name) then
+ handle(format("%s%s={",depth,name))
+ else
+ handle(format("%s[%q]={",depth,name))
+ end
+ elseif tn=="boolean" then
+ handle(format("%s[%s]={",depth,tostring(name)))
+ else
+ handle(format("%s{",depth))
+ end
+ end
+ end
+ if root and next(root) then
+ local first,last=nil,0
+ if compact then
+ last=#root
+ for k=1,last do
+ if root[k]==nil then
+ last=k-1
+ break
+ end
+ end
+ if last>0 then
+ first=1
+ end
+ end
+ local sk=sortedkeys(root)
+ for i=1,#sk do
+ local k=sk[i]
+ local v=root[k]
+ local t,tk=type(v),type(k)
+ if compact and first and tk=="number" and k>=first and k<=last then
+ if t=="number" then
+ if hexify then
+ handle(format("%s 0x%04X,",depth,v))
+ else
+ handle(format("%s %s,",depth,v))
+ end
+ elseif t=="string" then
+ if reduce and tonumber(v) then
+ handle(format("%s %s,",depth,v))
+ else
+ handle(format("%s %q,",depth,v))
+ end
+ elseif t=="table" then
+ if not next(v) then
+ handle(format("%s {},",depth))
+ elseif inline then
+ local st=simple_table(v)
+ if st then
+ handle(format("%s { %s },",depth,concat(st,", ")))
else
- if tk == "number" then -- or find(k,"^%d+$") then
- if hexify then
- handle(format("%s [0x%04X]=%q,",depth,k,tostring(v)))
- else
- handle(format("%s [%s]=%q,",depth,k,tostring(v)))
- end
- elseif tk == "boolean" then -- or find(k,"^%d+$") then
- handle(format("%s [%s]=%q,",depth,tostring(k),tostring(v)))
- elseif noquotes and not reserved[k] and find(k,"^%a[%w%_]*$") then
- handle(format("%s %s=%q,",depth,k,tostring(v)))
- else
- handle(format("%s [%q]=%q,",depth,k,tostring(v)))
- end
+ do_serialize(v,k,depth,level+1,true)
+ end
+ else
+ do_serialize(v,k,depth,level+1,true)
+ end
+ elseif t=="boolean" then
+ handle(format("%s %s,",depth,tostring(v)))
+ elseif t=="function" then
+ if functions then
+ handle(format('%s load(%q),',depth,dump(v)))
+ else
+ handle(format('%s "function",',depth))
+ end
+ else
+ handle(format("%s %q,",depth,tostring(v)))
+ end
+ elseif k=="__p__" then
+ if false then
+ handle(format("%s __p__=nil,",depth))
+ end
+ elseif t=="number" then
+ if tk=="number" then
+ if hexify then
+ handle(format("%s [0x%04X]=0x%04X,",depth,k,v))
+ else
+ handle(format("%s [%s]=%s,",depth,k,v))
+ end
+ elseif tk=="boolean" then
+ if hexify then
+ handle(format("%s [%s]=0x%04X,",depth,tostring(k),v))
+ else
+ handle(format("%s [%s]=%s,",depth,tostring(k),v))
+ end
+ elseif noquotes and not reserved[k] and lpegmatch(propername,k) then
+ if hexify then
+ handle(format("%s %s=0x%04X,",depth,k,v))
+ else
+ handle(format("%s %s=%s,",depth,k,v))
+ end
+ else
+ if hexify then
+ handle(format("%s [%q]=0x%04X,",depth,k,v))
+ else
+ handle(format("%s [%q]=%s,",depth,k,v))
+ end
+ end
+ elseif t=="string" then
+ if reduce and tonumber(v) then
+ if tk=="number" then
+ if hexify then
+ handle(format("%s [0x%04X]=%s,",depth,k,v))
+ else
+ handle(format("%s [%s]=%s,",depth,k,v))
+ end
+ elseif tk=="boolean" then
+ handle(format("%s [%s]=%s,",depth,tostring(k),v))
+ elseif noquotes and not reserved[k] and lpegmatch(propername,k) then
+ handle(format("%s %s=%s,",depth,k,v))
+ else
+ handle(format("%s [%q]=%s,",depth,k,v))
+ end
+ else
+ if tk=="number" then
+ if hexify then
+ handle(format("%s [0x%04X]=%q,",depth,k,v))
+ else
+ handle(format("%s [%s]=%q,",depth,k,v))
+ end
+ elseif tk=="boolean" then
+ handle(format("%s [%s]=%q,",depth,tostring(k),v))
+ elseif noquotes and not reserved[k] and lpegmatch(propername,k) then
+ handle(format("%s %s=%q,",depth,k,v))
+ else
+ handle(format("%s [%q]=%q,",depth,k,v))
+ end
+ end
+ elseif t=="table" then
+ if not next(v) then
+ if tk=="number" then
+ if hexify then
+ handle(format("%s [0x%04X]={},",depth,k))
+ else
+ handle(format("%s [%s]={},",depth,k))
+ end
+ elseif tk=="boolean" then
+ handle(format("%s [%s]={},",depth,tostring(k)))
+ elseif noquotes and not reserved[k] and lpegmatch(propername,k) then
+ handle(format("%s %s={},",depth,k))
+ else
+ handle(format("%s [%q]={},",depth,k))
+ end
+ elseif inline then
+ local st=simple_table(v)
+ if st then
+ if tk=="number" then
+ if hexify then
+ handle(format("%s [0x%04X]={ %s },",depth,k,concat(st,", ")))
+ else
+ handle(format("%s [%s]={ %s },",depth,k,concat(st,", ")))
+ end
+ elseif tk=="boolean" then
+ handle(format("%s [%s]={ %s },",depth,tostring(k),concat(st,", ")))
+ elseif noquotes and not reserved[k] and lpegmatch(propername,k) then
+ handle(format("%s %s={ %s },",depth,k,concat(st,", ")))
+ else
+ handle(format("%s [%q]={ %s },",depth,k,concat(st,", ")))
end
- --~ end
- end
- end
- if level > 0 then
- handle(format("%s},",depth))
- end
-end
-
--- replacing handle by a direct t[#t+1] = ... (plus test) is not much
--- faster (0.03 on 1.00 for zapfino.tma)
-
-local function serialize(_handle,root,name,specification) -- handle wins
- local tname = type(name)
- if type(specification) == "table" then
- noquotes = specification.noquotes
- hexify = specification.hexify
- handle = _handle or specification.handle or print
- reduce = specification.reduce or false
- functions = specification.functions
- compact = specification.compact
- inline = specification.inline and compact
- if functions == nil then
- functions = true
- end
- if compact == nil then
- compact = true
- end
- if inline == nil then
- inline = compact
- end
- else
- noquotes = false
- hexify = false
- handle = _handle or print
- reduce = false
- compact = true
- inline = true
- functions = true
- end
- if tname == "string" then
- if name == "return" then
- handle("return {")
+ else
+ do_serialize(v,k,depth,level+1)
+ end
else
- handle(name .. "={")
- end
- elseif tname == "number" then
- if hexify then
- handle(format("[0x%04X]={",name))
+ do_serialize(v,k,depth,level+1)
+ end
+ elseif t=="boolean" then
+ if tk=="number" then
+ if hexify then
+ handle(format("%s [0x%04X]=%s,",depth,k,tostring(v)))
+ else
+ handle(format("%s [%s]=%s,",depth,k,tostring(v)))
+ end
+ elseif tk=="boolean" then
+ handle(format("%s [%s]=%s,",depth,tostring(k),tostring(v)))
+ elseif noquotes and not reserved[k] and lpegmatch(propername,k) then
+ handle(format("%s %s=%s,",depth,k,tostring(v)))
else
- handle("[" .. name .. "]={")
- end
- elseif tname == "boolean" then
- if name then
- handle("return {")
+ handle(format("%s [%q]=%s,",depth,k,tostring(v)))
+ end
+ elseif t=="function" then
+ if functions then
+ local f=getinfo(v).what=="C" and dump(dummy) or dump(v)
+ if tk=="number" then
+ if hexify then
+ handle(format("%s [0x%04X]=load(%q),",depth,k,f))
+ else
+ handle(format("%s [%s]=load(%q),",depth,k,f))
+ end
+ elseif tk=="boolean" then
+ handle(format("%s [%s]=load(%q),",depth,tostring(k),f))
+ elseif noquotes and not reserved[k] and lpegmatch(propername,k) then
+ handle(format("%s %s=load(%q),",depth,k,f))
+ else
+ handle(format("%s [%q]=load(%q),",depth,k,f))
+ end
+ end
+ else
+ if tk=="number" then
+ if hexify then
+ handle(format("%s [0x%04X]=%q,",depth,k,tostring(v)))
+ else
+ handle(format("%s [%s]=%q,",depth,k,tostring(v)))
+ end
+ elseif tk=="boolean" then
+ handle(format("%s [%s]=%q,",depth,tostring(k),tostring(v)))
+ elseif noquotes and not reserved[k] and lpegmatch(propername,k) then
+ handle(format("%s %s=%q,",depth,k,tostring(v)))
else
- handle("{")
- end
+ handle(format("%s [%q]=%q,",depth,k,tostring(v)))
+ end
+ end
+ end
+ end
+ if level>0 then
+ handle(format("%s},",depth))
+ end
+end
+local function serialize(_handle,root,name,specification)
+ local tname=type(name)
+ if type(specification)=="table" then
+ noquotes=specification.noquotes
+ hexify=specification.hexify
+ handle=_handle or specification.handle or print
+ reduce=specification.reduce or false
+ functions=specification.functions
+ compact=specification.compact
+ inline=specification.inline and compact
+ if functions==nil then
+ functions=true
+ end
+ if compact==nil then
+ compact=true
+ end
+ if inline==nil then
+ inline=compact
+ end
+ else
+ noquotes=false
+ hexify=false
+ handle=_handle or print
+ reduce=false
+ compact=true
+ inline=true
+ functions=true
+ end
+ if tname=="string" then
+ if name=="return" then
+ handle("return {")
else
- handle("t={")
- end
- if root then
- -- The dummy access will initialize a table that has a delayed initialization
- -- using a metatable. (maybe explicitly test for metatable)
- if getmetatable(root) then -- todo: make this an option, maybe even per subtable
- local dummy = root._w_h_a_t_e_v_e_r_
- root._w_h_a_t_e_v_e_r_ = nil
- end
- -- Let's forget about empty tables.
- if next(root) then
- do_serialize(root,name,"",0)
- end
+ handle(name.."={")
end
- handle("}")
-end
-
---~ name:
---~
---~ true : return { }
---~ false : { }
---~ nil : t = { }
---~ string : string = { }
---~ 'return' : return { }
---~ number : [number] = { }
-
-function table.serialize(root,name,specification)
- local t, n = { }, 0
- local function flush(s)
- n = n + 1
- t[n] = s
+ elseif tname=="number" then
+ if hexify then
+ handle(format("[0x%04X]={",name))
+ else
+ handle("["..name.."]={")
end
- serialize(flush,root,name,specification)
- return concat(t,"\n")
-end
-
-table.tohandle = serialize
-
--- sometimes tables are real use (zapfino extra pro is some 85M) in which
--- case a stepwise serialization is nice; actually, we could consider:
---
--- for line in table.serializer(root,name,reduce,noquotes) do
--- ...(line)
--- end
---
--- so this is on the todo list
-
-local maxtab = 2*1024
-
-function table.tofile(filename,root,name,specification)
- local f = io.open(filename,'w')
- if f then
- if maxtab > 1 then
- local t, n = { }, 0
- local function flush(s)
- n = n + 1
- t[n] = s
- if n > maxtab then
- f:write(concat(t,"\n"),"\n") -- hm, write(sometable) should be nice
- t, n = { }, 0 -- we could recycle t if needed
- end
- end
- serialize(flush,root,name,specification)
- f:write(concat(t,"\n"),"\n")
- else
- local function flush(s)
- f:write(s,"\n")
- end
- serialize(flush,root,name,specification)
- end
- f:close()
- io.flush()
+ elseif tname=="boolean" then
+ if name then
+ handle("return {")
+ else
+ handle("{")
end
-end
-
-local function flattened(t,f,depth)
- if f == nil then
- f = { }
- depth = 0xFFFF
- elseif tonumber(f) then
- -- assume then only two arguments are given
- depth = f
- f = { }
- elseif not depth then
- depth = 0xFFFF
- end
- for k, v in next, t do
- if type(k) ~= "number" then
- if depth > 0 and type(v) == "table" then
- flattened(v,f,depth-1)
- else
- f[k] = v
- end
- end
+ else
+ handle("t={")
+ end
+ if root then
+ if getmetatable(root) then
+ local dummy=root._w_h_a_t_e_v_e_r_
+ root._w_h_a_t_e_v_e_r_=nil
end
- local n = #f
- for k=1,#t do
- local v = t[k]
- if depth > 0 and type(v) == "table" then
- flattened(v,f,depth-1)
- n = #f
- else
- n = n + 1
- f[n] = v
- end
+ if next(root) then
+ do_serialize(root,name,"",0)
end
- return f
+ end
+ handle("}")
end
-
-table.flattened = flattened
-
-local function unnest(t,f) -- only used in mk, for old times sake
- if not f then -- and only relevant for token lists
- f = { }
- end
- for i=1,#t do
- local v = t[i]
- if type(v) == "table" then
- if type(v[1]) == "table" then
- unnest(v,f)
- else
- f[#f+1] = v
- end
- else
- f[#f+1] = v
- end
+function table.serialize(root,name,specification)
+ local t,n={},0
+ local function flush(s)
+ n=n+1
+ t[n]=s
+ end
+ serialize(flush,root,name,specification)
+ return concat(t,"\n")
+end
+table.tohandle=serialize
+local maxtab=2*1024
+function table.tofile(filename,root,name,specification)
+ local f=io.open(filename,'w')
+ if f then
+ if maxtab>1 then
+ local t,n={},0
+ local function flush(s)
+ n=n+1
+ t[n]=s
+ if n>maxtab then
+ f:write(concat(t,"\n"),"\n")
+ t,n={},0
+ end
+ end
+ serialize(flush,root,name,specification)
+ f:write(concat(t,"\n"),"\n")
+ else
+ local function flush(s)
+ f:write(s,"\n")
+ end
+ serialize(flush,root,name,specification)
end
- return f
+ f:close()
+ io.flush()
+ end
end
-
-function table.unnest(t) -- bad name
- return unnest(t)
-end
-
-local function are_equal(a,b,n,m) -- indexed
- if a and b and #a == #b then
- n = n or 1
- m = m or #a
- for i=n,m do
- local ai, bi = a[i], b[i]
- if ai==bi then
- -- same
- elseif type(ai)=="table" and type(bi)=="table" then
- if not are_equal(ai,bi) then
- return false
- end
- else
- return false
- end
- end
- return true
+local function flattened(t,f,depth)
+ if f==nil then
+ f={}
+ depth=0xFFFF
+ elseif tonumber(f) then
+ depth=f
+ f={}
+ elseif not depth then
+ depth=0xFFFF
+ end
+ for k,v in next,t do
+ if type(k)~="number" then
+ if depth>0 and type(v)=="table" then
+ flattened(v,f,depth-1)
+ else
+ f[k]=v
+ end
+ end
+ end
+ local n=#f
+ for k=1,#t do
+ local v=t[k]
+ if depth>0 and type(v)=="table" then
+ flattened(v,f,depth-1)
+ n=#f
+ else
+ n=n+1
+ f[n]=v
+ end
+ end
+ return f
+end
+table.flattened=flattened
+local function unnest(t,f)
+ if not f then
+ f={}
+ end
+ for i=1,#t do
+ local v=t[i]
+ if type(v)=="table" then
+ if type(v[1])=="table" then
+ unnest(v,f)
+ else
+ f[#f+1]=v
+ end
else
+ f[#f+1]=v
+ end
+ end
+ return f
+end
+function table.unnest(t)
+ return unnest(t)
+end
+local function are_equal(a,b,n,m)
+ if a and b and #a==#b then
+ n=n or 1
+ m=m or #a
+ for i=n,m do
+ local ai,bi=a[i],b[i]
+ if ai==bi then
+ elseif type(ai)=="table" and type(bi)=="table" then
+ if not are_equal(ai,bi) then
+ return false
+ end
+ else
return false
- end
-end
-
-local function identical(a,b) -- assumes same structure
- for ka, va in next, a do
- local vb = b[ka]
- if va == vb then
- -- same
- elseif type(va) == "table" and type(vb) == "table" then
- if not identical(va,vb) then
- return false
- end
- else
- return false
- end
+ end
end
return true
-end
-
-table.identical = identical
-table.are_equal = are_equal
-
--- maybe also make a combined one
-
-function table.compact(t)
- if t then
- for k,v in next, t do
- if not next(v) then
- t[k] = nil
- end
- end
+ else
+ return false
+ end
+end
+local function identical(a,b)
+ for ka,va in next,a do
+ local vb=b[ka]
+ if va==vb then
+ elseif type(va)=="table" and type(vb)=="table" then
+ if not identical(va,vb) then
+ return false
+ end
+ else
+ return false
end
+ end
+ return true
end
-
-function table.contains(t, v)
- if t then
- for i=1, #t do
- if t[i] == v then
- return i
- end
- end
+table.identical=identical
+table.are_equal=are_equal
+function table.compact(t)
+ if t then
+ for k,v in next,t do
+ if not next(v) then
+ t[k]=nil
+ end
end
- return false
+ end
end
-
-function table.count(t)
- local n = 0
- for k, v in next, t do
- n = n + 1
+function table.contains(t,v)
+ if t then
+ for i=1,#t do
+ if t[i]==v then
+ return i
+ end
end
- return n
+ end
+ return false
end
-
-function table.swapped(t,s) -- hash
- local n = { }
- if s then
---~ for i=1,#s do
---~ n[i] = s[i]
---~ end
- for k, v in next, s do
- n[k] = v
- end
- end
---~ for i=1,#t do
---~ local ti = t[i] -- don't ask but t[i] can be nil
---~ if ti then
---~ n[ti] = i
---~ end
---~ end
- for k, v in next, t do
- n[v] = k
- end
- return n
+function table.count(t)
+ local n=0
+ for k,v in next,t do
+ n=n+1
+ end
+ return n
+end
+function table.swapped(t,s)
+ local n={}
+ if s then
+ for k,v in next,s do
+ n[k]=v
+ end
+ end
+ for k,v in next,t do
+ n[v]=k
+ end
+ return n
+end
+function table.mirrored(t)
+ local n={}
+ for k,v in next,t do
+ n[v]=k
+ n[k]=v
+ end
+ return n
end
-
function table.reversed(t)
- if t then
- local tt, tn = { }, #t
- if tn > 0 then
- local ttn = 0
- for i=tn,1,-1 do
- ttn = ttn + 1
- tt[ttn] = t[i]
- end
- end
- return tt
+ if t then
+ local tt,tn={},#t
+ if tn>0 then
+ local ttn=0
+ for i=tn,1,-1 do
+ ttn=ttn+1
+ tt[ttn]=t[i]
+ end
+ end
+ return tt
+ end
+end
+function table.reverse(t)
+ if t then
+ local n=#t
+ for i=1,floor(n/2) do
+ local j=n-i+1
+ t[i],t[j]=t[j],t[i]
end
+ return t
+ end
end
-
-function table.sequenced(t,sep,simple) -- hash only
- local s, n = { }, 0
- for k, v in sortedhash(t) do
- if simple then
- if v == true then
- n = n + 1
- s[n] = k
- elseif v and v~= "" then
- n = n + 1
- s[n] = k .. "=" .. tostring(v)
- end
- else
- n = n + 1
- s[n] = k .. "=" .. tostring(v)
- end
- end
- return concat(s, sep or " | ")
+function table.sequenced(t,sep,simple)
+ if not t then
+ return ""
+ end
+ local n=#t
+ local s={}
+ if n>0 then
+ for i=1,n do
+ s[i]=tostring(t[i])
+ end
+ else
+ n=0
+ for k,v in sortedhash(t) do
+ if simple then
+ if v==true then
+ n=n+1
+ s[n]=k
+ elseif v and v~="" then
+ n=n+1
+ s[n]=k.."="..tostring(v)
+ end
+ else
+ n=n+1
+ s[n]=k.."="..tostring(v)
+ end
+ end
+ end
+ return concat(s,sep or " | ")
end
-
function table.print(t,...)
- if type(t) ~= "table" then
- print(tostring(t))
- else
- table.tohandle(print,t,...)
- end
+ if type(t)~="table" then
+ print(tostring(t))
+ else
+ serialize(print,t,...)
+ end
end
-
--- -- -- obsolete but we keep them for a while and might comment them later -- -- --
-
--- roughly: copy-loop : unpack : sub == 0.9 : 0.4 : 0.45 (so in critical apps, use unpack)
-
+setinspector(function(v) if type(v)=="table" then serialize(print,v,"table") return true end end)
function table.sub(t,i,j)
- return { unpack(t,i,j) }
+ return { unpack(t,i,j) }
end
-
--- slower than #t on indexed tables (#t only returns the size of the numerically indexed slice)
-
function table.is_empty(t)
- return not t or not next(t)
+ return not t or not next(t)
end
-
function table.has_one_entry(t)
- return t and not next(t,next(t))
+ return t and not next(t,next(t))
end
-
--- new
-
-function table.loweredkeys(t) -- maybe utf
- local l = { }
- for k, v in next, t do
- l[lower(k)] = v
- end
- return l
+function table.loweredkeys(t)
+ local l={}
+ for k,v in next,t do
+ l[lower(k)]=v
+ end
+ return l
end
-
--- new, might move (maybe duplicate)
-
function table.unique(old)
- local hash = { }
- local new = { }
- local n = 0
- for i=1,#old do
- local oi = old[i]
- if not hash[oi] then
- n = n + 1
- new[n] = oi
- hash[oi] = true
- end
+ local hash={}
+ local new={}
+ local n=0
+ for i=1,#old do
+ local oi=old[i]
+ if not hash[oi] then
+ n=n+1
+ new[n]=oi
+ hash[oi]=true
end
- return new
+ end
+ return new
+end
+function table.sorted(t,...)
+ sort(t,...)
+ return t
end
-
--- function table.sorted(t,...)
--- table.sort(t,...)
--- return t -- still sorts in-place
--- end
end -- closure
do -- begin closure to overcome local limits and interference
-if not modules then modules = { } end modules ['l-lpeg'] = {
- version = 1.001,
- comment = "companion to luat-lib.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
+if not modules then modules={} end modules ['l-io']={
+ version=1.001,
+ comment="companion to luat-lib.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
}
-
-
--- a new lpeg fails on a #(1-P(":")) test and really needs a + P(-1)
-
-local lpeg = require("lpeg")
-
--- tracing (only used when we encounter a problem in integration of lpeg in luatex)
-
-local report = texio and texio.write_nl or print
-
--- local lpmatch = lpeg.match
--- local lpprint = lpeg.print
--- local lpp = lpeg.P
--- local lpr = lpeg.R
--- local lps = lpeg.S
--- local lpc = lpeg.C
--- local lpb = lpeg.B
--- local lpv = lpeg.V
--- local lpcf = lpeg.Cf
--- local lpcb = lpeg.Cb
--- local lpcg = lpeg.Cg
--- local lpct = lpeg.Ct
--- local lpcs = lpeg.Cs
--- local lpcc = lpeg.Cc
--- local lpcmt = lpeg.Cmt
--- local lpcarg = lpeg.Carg
-
--- function lpeg.match(l,...) report("LPEG MATCH") lpprint(l) return lpmatch(l,...) end
-
--- function lpeg.P (l) local p = lpp (l) report("LPEG P =") lpprint(l) return p end
--- function lpeg.R (l) local p = lpr (l) report("LPEG R =") lpprint(l) return p end
--- function lpeg.S (l) local p = lps (l) report("LPEG S =") lpprint(l) return p end
--- function lpeg.C (l) local p = lpc (l) report("LPEG C =") lpprint(l) return p end
--- function lpeg.B (l) local p = lpb (l) report("LPEG B =") lpprint(l) return p end
--- function lpeg.V (l) local p = lpv (l) report("LPEG V =") lpprint(l) return p end
--- function lpeg.Cf (l) local p = lpcf (l) report("LPEG Cf =") lpprint(l) return p end
--- function lpeg.Cb (l) local p = lpcb (l) report("LPEG Cb =") lpprint(l) return p end
--- function lpeg.Cg (l) local p = lpcg (l) report("LPEG Cg =") lpprint(l) return p end
--- function lpeg.Ct (l) local p = lpct (l) report("LPEG Ct =") lpprint(l) return p end
--- function lpeg.Cs (l) local p = lpcs (l) report("LPEG Cs =") lpprint(l) return p end
--- function lpeg.Cc (l) local p = lpcc (l) report("LPEG Cc =") lpprint(l) return p end
--- function lpeg.Cmt (l) local p = lpcmt (l) report("LPEG Cmt =") lpprint(l) return p end
--- function lpeg.Carg (l) local p = lpcarg(l) report("LPEG Carg =") lpprint(l) return p end
-
-local type = type
-local byte, char, gmatch = string.byte, string.char, string.gmatch
-
--- Beware, we predefine a bunch of patterns here and one reason for doing so
--- is that we get consistent behaviour in some of the visualizers.
-
-lpeg.patterns = lpeg.patterns or { } -- so that we can share
-local patterns = lpeg.patterns
-
-local P, R, S, V, match = lpeg.P, lpeg.R, lpeg.S, lpeg.V, lpeg.match
-local Ct, C, Cs, Cc = lpeg.Ct, lpeg.C, lpeg.Cs, lpeg.Cc
-local lpegtype = lpeg.type
-
-local utfcharacters = string.utfcharacters
-local utfgmatch = unicode and unicode.utf8.gmatch
-
-local anything = P(1)
-local endofstring = P(-1)
-local alwaysmatched = P(true)
-
-patterns.anything = anything
-patterns.endofstring = endofstring
-patterns.beginofstring = alwaysmatched
-patterns.alwaysmatched = alwaysmatched
-
-local digit, sign = R('09'), S('+-')
-local cr, lf, crlf = P("\r"), P("\n"), P("\r\n")
-local newline = crlf + S("\r\n") -- cr + lf
-local escaped = P("\\") * anything
-local squote = P("'")
-local dquote = P('"')
-local space = P(" ")
-
-local utfbom_32_be = P('\000\000\254\255')
-local utfbom_32_le = P('\255\254\000\000')
-local utfbom_16_be = P('\255\254')
-local utfbom_16_le = P('\254\255')
-local utfbom_8 = P('\239\187\191')
-local utfbom = utfbom_32_be + utfbom_32_le
- + utfbom_16_be + utfbom_16_le
- + utfbom_8
-local utftype = utfbom_32_be / "utf-32-be" + utfbom_32_le / "utf-32-le"
- + utfbom_16_be / "utf-16-be" + utfbom_16_le / "utf-16-le"
- + utfbom_8 / "utf-8" + alwaysmatched / "unknown"
-
-local utf8next = R("\128\191")
-
-patterns.utf8one = R("\000\127")
-patterns.utf8two = R("\194\223") * utf8next
-patterns.utf8three = R("\224\239") * utf8next * utf8next
-patterns.utf8four = R("\240\244") * utf8next * utf8next * utf8next
-patterns.utfbom = utfbom
-patterns.utftype = utftype
-
-local utf8char = patterns.utf8one + patterns.utf8two + patterns.utf8three + patterns.utf8four
-local validutf8char = utf8char^0 * endofstring * Cc(true) + Cc(false)
-
-patterns.utf8 = utf8char
-patterns.utf8char = utf8char
-patterns.validutf8 = validutf8char
-patterns.validutf8char = validutf8char
-
-patterns.digit = digit
-patterns.sign = sign
-patterns.cardinal = sign^0 * digit^1
-patterns.integer = sign^0 * digit^1
-patterns.float = sign^0 * digit^0 * P('.') * digit^1
-patterns.cfloat = sign^0 * digit^0 * P(',') * digit^1
-patterns.number = patterns.float + patterns.integer
-patterns.cnumber = patterns.cfloat + patterns.integer
-patterns.oct = P("0") * R("07")^1
-patterns.octal = patterns.oct
-patterns.HEX = P("0x") * R("09","AF")^1
-patterns.hex = P("0x") * R("09","af")^1
-patterns.hexadecimal = P("0x") * R("09","AF","af")^1
-patterns.lowercase = R("az")
-patterns.uppercase = R("AZ")
-patterns.letter = patterns.lowercase + patterns.uppercase
-patterns.space = space
-patterns.tab = P("\t")
-patterns.spaceortab = patterns.space + patterns.tab
-patterns.eol = S("\n\r")
-patterns.spacer = S(" \t\f\v") -- + char(0xc2, 0xa0) if we want utf (cf mail roberto)
-patterns.newline = newline
-patterns.emptyline = newline^1
-patterns.nonspacer = 1 - patterns.spacer
-patterns.whitespace = patterns.eol + patterns.spacer
-patterns.nonwhitespace = 1 - patterns.whitespace
-patterns.equal = P("=")
-patterns.comma = P(",")
-patterns.commaspacer = P(",") * patterns.spacer^0
-patterns.period = P(".")
-patterns.colon = P(":")
-patterns.semicolon = P(";")
-patterns.underscore = P("_")
-patterns.escaped = escaped
-patterns.squote = squote
-patterns.dquote = dquote
-patterns.nosquote = (escaped + (1-squote))^0
-patterns.nodquote = (escaped + (1-dquote))^0
-patterns.unsingle = (squote/"") * patterns.nosquote * (squote/"")
-patterns.undouble = (dquote/"") * patterns.nodquote * (dquote/"")
-patterns.unquoted = patterns.undouble + patterns.unsingle -- more often undouble
-patterns.unspacer = ((patterns.spacer^1)/"")^0
-
-patterns.somecontent = (anything - newline - space)^1 -- (utf8char - newline - space)^1
-patterns.beginline = #(1-newline)
-
--- print(string.unquoted("test"))
--- print(string.unquoted([["t\"est"]]))
--- print(string.unquoted([["t\"est"x]]))
--- print(string.unquoted("\'test\'"))
--- print(string.unquoted('"test"'))
--- print(string.unquoted('"test"'))
-
-function lpeg.anywhere(pattern) --slightly adapted from website
- return P { P(pattern) + 1 * V(1) } -- why so complex?
-end
-
-function lpeg.splitter(pattern, action)
- return (((1-P(pattern))^1)/action+1)^0
-end
-
-function lpeg.tsplitter(pattern, action)
- return Ct((((1-P(pattern))^1)/action+1)^0)
-end
-
--- probleem: separator can be lpeg and that does not hash too well, but
--- it's quite okay as the key is then not garbage collected
-
-local splitters_s, splitters_m, splitters_t = { }, { }, { }
-
-local function splitat(separator,single)
- local splitter = (single and splitters_s[separator]) or splitters_m[separator]
- if not splitter then
- separator = P(separator)
- local other = C((1 - separator)^0)
- if single then
- local any = anything
- splitter = other * (separator * C(any^0) + "") -- ?
- splitters_s[separator] = splitter
- else
- splitter = other * (separator * other)^0
- splitters_m[separator] = splitter
- end
- end
- return splitter
-end
-
-local function tsplitat(separator)
- local splitter = splitters_t[separator]
- if not splitter then
- splitter = Ct(splitat(separator))
- splitters_t[separator] = splitter
- end
- return splitter
-end
-
-lpeg.splitat = splitat
-lpeg.tsplitat = tsplitat
-
-function string.splitup(str,separator)
- if not separator then
- separator = ","
- end
- return match(splitters_m[separator] or splitat(separator),str)
-end
-
---~ local p = splitat("->",false) print(match(p,"oeps->what->more")) -- oeps what more
---~ local p = splitat("->",true) print(match(p,"oeps->what->more")) -- oeps what->more
---~ local p = splitat("->",false) print(match(p,"oeps")) -- oeps
---~ local p = splitat("->",true) print(match(p,"oeps")) -- oeps
-
-local cache = { }
-
-function lpeg.split(separator,str)
- local c = cache[separator]
- if not c then
- c = tsplitat(separator)
- cache[separator] = c
- end
- return match(c,str)
-end
-
-function string.split(str,separator)
- local c = cache[separator]
- if not c then
- c = tsplitat(separator)
- cache[separator] = c
- end
- return match(c,str)
-end
-
-local spacing = patterns.spacer^0 * newline -- sort of strip
-local empty = spacing * Cc("")
-local nonempty = Cs((1-spacing)^1) * spacing^-1
-local content = (empty + nonempty)^1
-
-patterns.textline = content
-
---~ local linesplitter = Ct(content^0)
---~
---~ function string.splitlines(str)
---~ return match(linesplitter,str)
---~ end
-
-local linesplitter = tsplitat(newline)
-
-patterns.linesplitter = linesplitter
-
-function string.splitlines(str)
- return match(linesplitter,str)
-end
-
-local utflinesplitter = utfbom^-1 * tsplitat(newline)
-
-patterns.utflinesplitter = utflinesplitter
-
-function string.utfsplitlines(str)
- return match(utflinesplitter,str or "")
-end
-
---~ lpeg.splitters = cache -- no longer public
-
-local cache = { }
-
-function lpeg.checkedsplit(separator,str)
- local c = cache[separator]
- if not c then
- separator = P(separator)
- local other = C((1 - separator)^1)
- c = Ct(separator^0 * other * (separator^1 * other)^0)
- cache[separator] = c
- end
- return match(c,str)
+local io=io
+local byte,find,gsub,format=string.byte,string.find,string.gsub,string.format
+local concat=table.concat
+local floor=math.floor
+local type=type
+if string.find(os.getenv("PATH"),";") then
+ io.fileseparator,io.pathseparator="\\",";"
+else
+ io.fileseparator,io.pathseparator="/",":"
end
-
-function string.checkedsplit(str,separator)
- local c = cache[separator]
- if not c then
- separator = P(separator)
- local other = C((1 - separator)^1)
- c = Ct(separator^0 * other * (separator^1 * other)^0)
- cache[separator] = c
- end
- return match(c,str)
+local function readall(f)
+ return f:read("*all")
end
-
---~ from roberto's site:
-
-local function f2(s) local c1, c2 = byte(s,1,2) return c1 * 64 + c2 - 12416 end
-local function f3(s) local c1, c2, c3 = byte(s,1,3) return (c1 * 64 + c2) * 64 + c3 - 925824 end
-local function f4(s) local c1, c2, c3, c4 = byte(s,1,4) return ((c1 * 64 + c2) * 64 + c3) * 64 + c4 - 63447168 end
-
-local utf8byte = patterns.utf8one/byte + patterns.utf8two/f2 + patterns.utf8three/f3 + patterns.utf8four/f4
-
-patterns.utf8byte = utf8byte
-
---~ local str = " a b c d "
-
---~ local s = lpeg.stripper(lpeg.R("az")) print("["..lpeg.match(s,str).."]")
---~ local s = lpeg.keeper(lpeg.R("az")) print("["..lpeg.match(s,str).."]")
---~ local s = lpeg.stripper("ab") print("["..lpeg.match(s,str).."]")
---~ local s = lpeg.keeper("ab") print("["..lpeg.match(s,str).."]")
-
-local cache = { }
-
-function lpeg.stripper(str)
- if type(str) == "string" then
- local s = cache[str]
- if not s then
- s = Cs(((S(str)^1)/"" + 1)^0)
- cache[str] = s
- end
- return s
+local function readall(f)
+ local size=f:seek("end")
+ if size==0 then
+ return ""
+ elseif size<1024*1024 then
+ f:seek("set",0)
+ return f:read('*all')
+ else
+ local done=f:seek("set",0)
+ if size<1024*1024 then
+ step=1024*1024
+ elseif size>16*1024*1024 then
+ step=16*1024*1024
else
- return Cs(((str^1)/"" + 1)^0)
+ step=floor(size/(1024*1024))*1024*1024/8
end
+ local data={}
+ while true do
+ local r=f:read(step)
+ if not r then
+ return concat(data)
+ else
+ data[#data+1]=r
+ end
+ end
+ end
+end
+io.readall=readall
+function io.loaddata(filename,textmode)
+ local f=io.open(filename,(textmode and 'r') or 'rb')
+ if f then
+ local data=readall(f)
+ f:close()
+ if #data>0 then
+ return data
+ end
+ end
end
-
-local cache = { }
-
-function lpeg.keeper(str)
- if type(str) == "string" then
- local s = cache[str]
- if not s then
- s = Cs((((1-S(str))^1)/"" + 1)^0)
- cache[str] = s
- end
- return s
+function io.savedata(filename,data,joiner)
+ local f=io.open(filename,"wb")
+ if f then
+ if type(data)=="table" then
+ f:write(concat(data,joiner or ""))
+ elseif type(data)=="function" then
+ data(f)
else
- return Cs((((1-str)^1)/"" + 1)^0)
+ f:write(data or "")
end
+ f:close()
+ io.flush()
+ return true
+ else
+ return false
+ end
+end
+function io.loadlines(filename,n)
+ local f=io.open(filename,'r')
+ if not f then
+ elseif n then
+ local lines={}
+ for i=1,n do
+ local line=f:read("*lines")
+ if line then
+ lines[#lines+1]=line
+ else
+ break
+ end
+ end
+ f:close()
+ lines=concat(lines,"\n")
+ if #lines>0 then
+ return lines
+ end
+ else
+ local line=f:read("*line") or ""
+ f:close()
+ if #line>0 then
+ return line
+ end
+ end
+end
+function io.loadchunk(filename,n)
+ local f=io.open(filename,'rb')
+ if f then
+ local data=f:read(n or 1024)
+ f:close()
+ if #data>0 then
+ return data
+ end
+ end
end
-
-function lpeg.frontstripper(str) -- or pattern (yet undocumented)
- return (P(str) + P(true)) * Cs(P(1)^0)
+function io.exists(filename)
+ local f=io.open(filename)
+ if f==nil then
+ return false
+ else
+ f:close()
+ return true
+ end
end
-
-function lpeg.endstripper(str) -- or pattern (yet undocumented)
- return Cs((1 - P(str) * P(-1))^0)
+function io.size(filename)
+ local f=io.open(filename)
+ if f==nil then
+ return 0
+ else
+ local s=f:seek("end")
+ f:close()
+ return s
+ end
end
-
--- Just for fun I looked at the used bytecode and
--- p = (p and p + pp) or pp gets one more (testset).
-
-function lpeg.replacer(one,two)
- if type(one) == "table" then
- local no = #one
- if no > 0 then
- local p
- for i=1,no do
- local o = one[i]
- local pp = P(o[1]) / o[2]
- if p then
- p = p + pp
- else
- p = pp
- end
- end
- return Cs((p + 1)^0)
- end
+function io.noflines(f)
+ if type(f)=="string" then
+ local f=io.open(filename)
+ if f then
+ local n=f and io.noflines(f) or 0
+ f:close()
+ return n
else
- two = two or ""
- return Cs((P(one)/two + 1)^0)
- end
-end
-
-local splitters_f, splitters_s = { }, { }
-
-function lpeg.firstofsplit(separator) -- always return value
- local splitter = splitters_f[separator]
- if not splitter then
- separator = P(separator)
- splitter = C((1 - separator)^0)
- splitters_f[separator] = splitter
+ return 0
end
- return splitter
-end
-
-function lpeg.secondofsplit(separator) -- nil if not split
- local splitter = splitters_s[separator]
- if not splitter then
- separator = P(separator)
- splitter = (1 - separator)^0 * separator * C(anything^0)
- splitters_s[separator] = splitter
- end
- return splitter
-end
-
-function lpeg.balancer(left,right)
- left, right = P(left), P(right)
- return P { left * ((1 - left - right) + V(1))^0 * right }
-end
-
---~ print(1,match(lpeg.firstofsplit(":"),"bc:de"))
---~ print(2,match(lpeg.firstofsplit(":"),":de")) -- empty
---~ print(3,match(lpeg.firstofsplit(":"),"bc"))
---~ print(4,match(lpeg.secondofsplit(":"),"bc:de"))
---~ print(5,match(lpeg.secondofsplit(":"),"bc:")) -- empty
---~ print(6,match(lpeg.secondofsplit(":",""),"bc"))
---~ print(7,match(lpeg.secondofsplit(":"),"bc"))
---~ print(9,match(lpeg.secondofsplit(":","123"),"bc"))
-
---~ -- slower:
---~
---~ function lpeg.counter(pattern)
---~ local n, pattern = 0, (lpeg.P(pattern)/function() n = n + 1 end + lpeg.anything)^0
---~ return function(str) n = 0 ; lpegmatch(pattern,str) ; return n end
---~ end
-
-local nany = utf8char/""
-
-function lpeg.counter(pattern)
- pattern = Cs((P(pattern)/" " + nany)^0)
- return function(str)
- return #match(pattern,str)
- end
-end
-
-if utfgmatch then
-
- function lpeg.count(str,what) -- replaces string.count
- if type(what) == "string" then
- local n = 0
- for _ in utfgmatch(str,what) do
- n = n + 1
- end
- return n
- else -- 4 times slower but still faster than / function
- return #match(Cs((P(what)/" " + nany)^0),str)
- end
- end
-
-else
-
- local cache = { }
-
- function lpeg.count(str,what) -- replaces string.count
- if type(what) == "string" then
- local p = cache[what]
- if not p then
- p = Cs((P(what)/" " + nany)^0)
- cache[p] = p
- end
- return #match(p,str)
- else -- 4 times slower but still faster than / function
- return #match(Cs((P(what)/" " + nany)^0),str)
- end
+ else
+ local n=0
+ for _ in f:lines() do
+ n=n+1
end
-
-end
-
-local patterns_escapes = { -- also defines in l-string
- ["%"] = "%%",
- ["."] = "%.",
- ["+"] = "%+", ["-"] = "%-", ["*"] = "%*",
- ["["] = "%[", ["]"] = "%]",
- ["("] = "%)", [")"] = "%)",
- -- ["{"] = "%{", ["}"] = "%}"
- -- ["^"] = "%^", ["$"] = "%$",
+ f:seek('set',0)
+ return n
+ end
+end
+local nextchar={
+ [ 4]=function(f)
+ return f:read(1,1,1,1)
+ end,
+ [ 2]=function(f)
+ return f:read(1,1)
+ end,
+ [ 1]=function(f)
+ return f:read(1)
+ end,
+ [-2]=function(f)
+ local a,b=f:read(1,1)
+ return b,a
+ end,
+ [-4]=function(f)
+ local a,b,c,d=f:read(1,1,1,1)
+ return d,c,b,a
+ end
}
-
-local simple_escapes = { -- also defines in l-string
- ["-"] = "%-",
- ["."] = "%.",
- ["?"] = ".",
- ["*"] = ".*",
+function io.characters(f,n)
+ if f then
+ return nextchar[n or 1],f
+ end
+end
+local nextbyte={
+ [4]=function(f)
+ local a,b,c,d=f:read(1,1,1,1)
+ if d then
+ return byte(a),byte(b),byte(c),byte(d)
+ end
+ end,
+ [3]=function(f)
+ local a,b,c=f:read(1,1,1)
+ if b then
+ return byte(a),byte(b),byte(c)
+ end
+ end,
+ [2]=function(f)
+ local a,b=f:read(1,1)
+ if b then
+ return byte(a),byte(b)
+ end
+ end,
+ [1]=function (f)
+ local a=f:read(1)
+ if a then
+ return byte(a)
+ end
+ end,
+ [-2]=function (f)
+ local a,b=f:read(1,1)
+ if b then
+ return byte(b),byte(a)
+ end
+ end,
+ [-3]=function(f)
+ local a,b,c=f:read(1,1,1)
+ if b then
+ return byte(c),byte(b),byte(a)
+ end
+ end,
+ [-4]=function(f)
+ local a,b,c,d=f:read(1,1,1,1)
+ if d then
+ return byte(d),byte(c),byte(b),byte(a)
+ end
+ end
}
-
-local p = Cs((S("-.+*%()[]") / patterns_escapes + anything)^0)
-local s = Cs((S("-.+*%()[]") / simple_escapes + anything)^0)
-
-function string.escapedpattern(str,simple)
- return match(simple and s or p,str)
-end
-
--- utf extensies
-
-lpeg.UP = lpeg.P
-
-if utfcharacters then
-
- function lpeg.US(str)
- local p
- for uc in utfcharacters(str) do
- if p then
- p = p + P(uc)
- else
- p = P(uc)
- end
- end
- return p
- end
-
-
-elseif utfgmatch then
-
- function lpeg.US(str)
- local p
- for uc in utfgmatch(str,".") do
- if p then
- p = p + P(uc)
- else
- p = P(uc)
- end
- end
- return p
- end
-
-else
-
- function lpeg.US(str)
- local p
- local f = function(uc)
- if p then
- p = p + P(uc)
- else
- p = P(uc)
- end
- end
- match((utf8char/f)^0,str)
- return p
- end
-
-end
-
-local range = Cs(utf8byte) * (Cs(utf8byte) + Cc(false))
-
-local utfchar = unicode and unicode.utf8 and unicode.utf8.char
-
-function lpeg.UR(str,more)
- local first, last
- if type(str) == "number" then
- first = str
- last = more or first
- else
- first, last = match(range,str)
- if not last then
- return P(str)
- end
- end
- if first == last then
- return P(str)
- elseif utfchar and last - first < 8 then -- a somewhat arbitrary criterium
- local p
- for i=first,last do
- if p then
- p = p + P(utfchar(i))
- else
- p = P(utfchar(i))
- end
- end
- return p -- nil when invalid range
- else
- local f = function(b)
- return b >= first and b <= last
- end
- return utf8byte / f -- nil when invalid range
- end
-end
-
---~ lpeg.print(lpeg.R("ab","cd","gh"))
---~ lpeg.print(lpeg.P("a","b","c"))
---~ lpeg.print(lpeg.S("a","b","c"))
-
---~ print(lpeg.count("äáàa",lpeg.P("á") + lpeg.P("à")))
---~ print(lpeg.count("äáàa",lpeg.UP("áà")))
---~ print(lpeg.count("äáàa",lpeg.US("àá")))
---~ print(lpeg.count("äáàa",lpeg.UR("aá")))
---~ print(lpeg.count("äáàa",lpeg.UR("àá")))
---~ print(lpeg.count("äáàa",lpeg.UR(0x0000,0xFFFF)))
-
-function lpeg.oneof(list,...) -- lpeg.oneof("elseif","else","if","then")
- if type(list) ~= "table" then
- list = { list, ... }
- end
- -- sort(list) -- longest match first
- local p = P(list[1])
- for l=2,#list do
- p = p + P(list[l])
- end
- return p
-end
-
-function lpeg.is_lpeg(p)
- return p and lpegtype(p) == "pattern"
+function io.bytes(f,n)
+ if f then
+ return nextbyte[n or 1],f
+ else
+ return nil,nil
+ end
end
-
--- For the moment here, but it might move to utilities. Beware, we need to
--- have the longest keyword first, so 'aaa' comes beforte 'aa' which is why we
--- loop back from the end cq. prepend.
-
-local sort, fastcopy, sortedkeys = table.sort, table.fastcopy, table.sortedkeys -- dependency!
-
-function lpeg.append(list,pp,delayed,checked)
- local p = pp
- if #list > 0 then
- local keys = fastcopy(list)
- sort(keys)
- for i=#keys,1,-1 do
- local k = keys[i]
- if p then
- p = P(k) + p
- else
- p = P(k)
- end
- end
- elseif delayed then -- hm, it looks like the lpeg parser resolves anyway
- local keys = sortedkeys(list)
- if p then
- for i=1,#keys,1 do
- local k = keys[i]
- local v = list[k]
- p = P(k)/list + p
- end
- else
- for i=1,#keys do
- local k = keys[i]
- local v = list[k]
- if p then
- p = P(k) + p
- else
- p = P(k)
- end
- end
- if p then
- p = p / list
- end
- end
- elseif checked then
- -- problem: substitution gives a capture
- local keys = sortedkeys(list)
- for i=1,#keys do
- local k = keys[i]
- local v = list[k]
- if p then
- if k == v then
- p = P(k) + p
- else
- p = P(k)/v + p
- end
- else
- if k == v then
- p = P(k)
- else
- p = P(k)/v
- end
- end
- end
+function io.ask(question,default,options)
+ while true do
+ io.write(question)
+ if options then
+ io.write(format(" [%s]",concat(options,"|")))
+ end
+ if default then
+ io.write(format(" [%s]",default))
+ end
+ io.write(format(" "))
+ io.flush()
+ local answer=io.read()
+ answer=gsub(answer,"^%s*(.*)%s*$","%1")
+ if answer=="" and default then
+ return default
+ elseif not options then
+ return answer
else
- local keys = sortedkeys(list)
- for i=1,#keys do
- local k = keys[i]
- local v = list[k]
- if p then
- p = P(k)/v + p
- else
- p = P(k)/v
- end
+ for k=1,#options do
+ if options[k]==answer then
+ return answer
end
- end
- return p
-end
-
--- inspect(lpeg.append({ a = "1", aa = "1", aaa = "1" } ,nil,true))
--- inspect(lpeg.append({ ["degree celsius"] = "1", celsius = "1", degree = "1" } ,nil,true))
-
--- function lpeg.exact_match(words,case_insensitive)
--- local pattern = concat(words)
--- if case_insensitive then
--- local pattern = S(upper(characters)) + S(lower(characters))
--- local list = { }
--- for i=1,#words do
--- list[lower(words[i])] = true
--- end
--- return Cmt(pattern^1, function(_,i,s)
--- return list[lower(s)] and i
--- end)
--- else
--- local pattern = S(concat(words))
--- local list = { }
--- for i=1,#words do
--- list[words[i]] = true
--- end
--- return Cmt(pattern^1, function(_,i,s)
--- return list[s] and i
--- end)
--- end
--- end
-
--- experiment:
-
-local function make(t)
- local p
--- for k, v in next, t do
- for k, v in table.sortedhash(t) do
- if not p then
- if next(v) then
- p = P(k) * make(v)
- else
- p = P(k)
- end
- else
- if next(v) then
- p = p + P(k) * make(v)
- else
- p = p + P(k)
- end
+ end
+ local pattern="^"..answer
+ for k=1,#options do
+ local v=options[k]
+ if find(v,pattern) then
+ return v
end
+ end
end
- return p
+ end
end
-
-function lpeg.utfchartabletopattern(list)
- local tree = { }
- for i=1,#list do
- local t = tree
- for c in gmatch(list[i],".") do
- if not t[c] then
- t[c] = { }
- end
- t = t[c]
- end
- end
- return make(tree)
+local function readnumber(f,n,m)
+ if m then
+ f:seek("set",n)
+ n=m
+ end
+ if n==1 then
+ return byte(f:read(1))
+ elseif n==2 then
+ local a,b=byte(f:read(2),1,2)
+ return 256*a+b
+ elseif n==3 then
+ local a,b,c=byte(f:read(3),1,3)
+ return 256*256*a+256*b+c
+ elseif n==4 then
+ local a,b,c,d=byte(f:read(4),1,4)
+ return 256*256*256*a+256*256*b+256*c+d
+ elseif n==8 then
+ local a,b=readnumber(f,4),readnumber(f,4)
+ return 256*a+b
+ elseif n==12 then
+ local a,b,c=readnumber(f,4),readnumber(f,4),readnumber(f,4)
+ return 256*256*a+256*b+c
+ elseif n==-2 then
+ local b,a=byte(f:read(2),1,2)
+ return 256*a+b
+ elseif n==-3 then
+ local c,b,a=byte(f:read(3),1,3)
+ return 256*256*a+256*b+c
+ elseif n==-4 then
+ local d,c,b,a=byte(f:read(4),1,4)
+ return 256*256*256*a+256*256*b+256*c+d
+ elseif n==-8 then
+ local h,g,f,e,d,c,b,a=byte(f:read(8),1,8)
+ return 256*256*256*256*256*256*256*a+256*256*256*256*256*256*b+256*256*256*256*256*c+256*256*256*256*d+256*256*256*e+256*256*f+256*g+h
+ else
+ return 0
+ end
+end
+io.readnumber=readnumber
+function io.readstring(f,n,m)
+ if m then
+ f:seek("set",n)
+ n=m
+ end
+ local str=gsub(f:read(n),"\000","")
+ return str
end
-
--- inspect ( lpeg.utfchartabletopattern {
--- utfchar(0x00A0), -- nbsp
--- utfchar(0x2000), -- enquad
--- utfchar(0x2001), -- emquad
--- utfchar(0x2002), -- enspace
--- utfchar(0x2003), -- emspace
--- utfchar(0x2004), -- threeperemspace
--- utfchar(0x2005), -- fourperemspace
--- utfchar(0x2006), -- sixperemspace
--- utfchar(0x2007), -- figurespace
--- utfchar(0x2008), -- punctuationspace
--- utfchar(0x2009), -- breakablethinspace
--- utfchar(0x200A), -- hairspace
--- utfchar(0x200B), -- zerowidthspace
--- utfchar(0x202F), -- narrownobreakspace
--- utfchar(0x205F), -- math thinspace
--- } )
+if not io.i_limiter then function io.i_limiter() end end
+if not io.o_limiter then function io.o_limiter() end end
end -- closure
do -- begin closure to overcome local limits and interference
-if not modules then modules = { } end modules ['l-boolean'] = {
- version = 1.001,
- comment = "companion to luat-lib.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
+if not modules then modules={} end modules ['l-file']={
+ version=1.001,
+ comment="companion to luat-lib.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
}
-
-local type, tonumber = type, tonumber
-
-boolean = boolean or { }
-local boolean = boolean
-
-function boolean.tonumber(b)
- if b then return 1 else return 0 end -- test and return or return
-end
-
-function toboolean(str,tolerant)
- if tolerant then
- local tstr = type(str)
- if tstr == "string" then
- return str == "true" or str == "yes" or str == "on" or str == "1" or str == "t"
- elseif tstr == "number" then
- return tonumber(str) ~= 0
- elseif tstr == "nil" then
- return false
- else
- return str
- end
- elseif str == "true" then
+file=file or {}
+local file=file
+if not lfs then
+ lfs=optionalrequire("lfs")
+end
+if not lfs then
+ lfs={
+ getcurrentdir=function()
+ return "."
+ end,
+ attributes=function()
+ return nil
+ end,
+ isfile=function(name)
+ local f=io.open(name,'rb')
+ if f then
+ f:close()
return true
- elseif str == "false" then
- return false
- else
- return str
- end
-end
-
-string.toboolean = toboolean
-
-function string.is_boolean(str,default)
- if type(str) == "string" then
- if str == "true" or str == "yes" or str == "on" or str == "t" then
- return true
- elseif str == "false" or str == "no" or str == "off" or str == "f" then
- return false
- end
- end
- return default
-end
-
-end -- closure
-
-do -- begin closure to overcome local limits and interference
-
-if not modules then modules = { } end modules ['l-math'] = {
- version = 1.001,
- comment = "companion to luat-lib.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
-local floor, sin, cos, tan = math.floor, math.sin, math.cos, math.tan
-
-if not math.round then
- function math.round(x) return floor(x + 0.5) end
-end
-
-if not math.div then
- function math.div(n,m) return floor(n/m) end
-end
-
-if not math.mod then
- function math.mod(n,m) return n % m end
-end
-
-local pipi = 2*math.pi/360
-
-if not math.sind then
- function math.sind(d) return sin(d*pipi) end
- function math.cosd(d) return cos(d*pipi) end
- function math.tand(d) return tan(d*pipi) end
-end
-
-if not math.odd then
- function math.odd (n) return n % 2 ~= 0 end
- function math.even(n) return n % 2 == 0 end
-end
-
-end -- closure
-
-do -- begin closure to overcome local limits and interference
-
-if not modules then modules = { } end modules ['l-file'] = {
- version = 1.001,
- comment = "companion to luat-lib.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
--- needs a cleanup
-
-file = file or { }
-local file = file
-
-local insert, concat = table.insert, table.concat
-local find, gmatch, match, gsub, sub, char, lower = string.find, string.gmatch, string.match, string.gsub, string.sub, string.char, string.lower
-local lpegmatch = lpeg.match
-local getcurrentdir, attributes = lfs.currentdir, lfs.attributes
-
-local P, R, S, C, Cs, Cp, Cc = lpeg.P, lpeg.R, lpeg.S, lpeg.C, lpeg.Cs, lpeg.Cp, lpeg.Cc
-
-local function dirname(name,default)
- return match(name,"^(.+)[/\\].-$") or (default or "")
-end
-
+ end
+ end,
+ isdir=function(name)
+ print("you need to load lfs")
+ return false
+ end
+ }
+elseif not lfs.isfile then
+ local attributes=lfs.attributes
+ function lfs.isdir(name)
+ return attributes(name,"mode")=="directory"
+ end
+ function lfs.isfile(name)
+ return attributes(name,"mode")=="file"
+ end
+end
+local insert,concat=table.insert,table.concat
+local match=string.match
+local lpegmatch=lpeg.match
+local getcurrentdir,attributes=lfs.currentdir,lfs.attributes
+local checkedsplit=string.checkedsplit
+local P,R,S,C,Cs,Cp,Cc,Ct=lpeg.P,lpeg.R,lpeg.S,lpeg.C,lpeg.Cs,lpeg.Cp,lpeg.Cc,lpeg.Ct
+local colon=P(":")
+local period=P(".")
+local periods=P("..")
+local fwslash=P("/")
+local bwslash=P("\\")
+local slashes=S("\\/")
+local noperiod=1-period
+local noslashes=1-slashes
+local name=noperiod^1
+local suffix=period/""*(1-period-slashes)^1*-1
+local pattern=C((1-(slashes^1*noslashes^1*-1))^1)*P(1)
+local function pathpart(name,default)
+ return name and lpegmatch(pattern,name) or default or ""
+end
+local pattern=(noslashes^0*slashes)^1*C(noslashes^1)*-1
local function basename(name)
- return match(name,"^.+[/\\](.-)$") or name
+ return name and lpegmatch(pattern,name) or name
end
-
--- local function nameonly(name)
--- return (gsub(match(name,"^.+[/\\](.-)$") or name,"%..*$",""))
--- end
-
+local pattern=(noslashes^0*slashes^1)^0*Cs((1-suffix)^1)*suffix^0
local function nameonly(name)
- return (gsub(match(name,"^.+[/\\](.-)$") or name,"%.[%a%d]+$",""))
-end
-
-local function extname(name,default)
- return match(name,"^.+%.([^/\\]-)$") or default or ""
-end
-
-local function splitname(name)
- local n, s = match(name,"^(.+)%.([^/\\]-)$")
- return n or name, s or ""
-end
-
-file.basename = basename
-file.dirname = dirname
-file.nameonly = nameonly
-file.extname = extname
-file.suffix = extname
-
-function file.removesuffix(filename)
- return (gsub(filename,"%.[%a%d]+$",""))
-end
-
-function file.addsuffix(filename, suffix, criterium)
- if not suffix or suffix == "" then
- return filename
- elseif criterium == true then
- return filename .. "." .. suffix
- elseif not criterium then
- local n, s = splitname(filename)
- if not s or s == "" then
- return filename .. "." .. suffix
- else
+ return name and lpegmatch(pattern,name) or name
+end
+local pattern=(noslashes^0*slashes)^0*(noperiod^1*period)^1*C(noperiod^1)*-1
+local function suffixonly(name)
+ return name and lpegmatch(pattern,name) or ""
+end
+file.pathpart=pathpart
+file.basename=basename
+file.nameonly=nameonly
+file.suffixonly=suffixonly
+file.suffix=suffixonly
+file.dirname=pathpart
+file.extname=suffixonly
+local drive=C(R("az","AZ"))*colon
+local path=C((noslashes^0*slashes)^0)
+local suffix=period*C(P(1-period)^0*P(-1))
+local base=C((1-suffix)^0)
+local rest=C(P(1)^0)
+drive=drive+Cc("")
+path=path+Cc("")
+base=base+Cc("")
+suffix=suffix+Cc("")
+local pattern_a=drive*path*base*suffix
+local pattern_b=path*base*suffix
+local pattern_c=C(drive*path)*C(base*suffix)
+local pattern_d=path*rest
+function file.splitname(str,splitdrive)
+ if not str then
+ elseif splitdrive then
+ return lpegmatch(pattern_a,str)
+ else
+ return lpegmatch(pattern_b,str)
+ end
+end
+function file.splitbase(str)
+ return str and lpegmatch(pattern_d,str)
+end
+function file.nametotable(str,splitdrive)
+ if str then
+ local path,drive,subpath,name,base,suffix=lpegmatch(pattern_c,str)
+ if splitdrive then
+ return {
+ path=path,
+ drive=drive,
+ subpath=subpath,
+ name=name,
+ base=base,
+ suffix=suffix,
+ }
+ else
+ return {
+ path=path,
+ name=name,
+ base=base,
+ suffix=suffix,
+ }
+ end
+ end
+end
+local pattern=Cs(((period*(1-period-slashes)^1*-1)/""+1)^1)
+function file.removesuffix(name)
+ return name and lpegmatch(pattern,name)
+end
+local suffix=period/""*(1-period-slashes)^1*-1
+local pattern=Cs((noslashes^0*slashes^1)^0*((1-suffix)^1))*Cs(suffix)
+function file.addsuffix(filename,suffix,criterium)
+ if not filename or not suffix or suffix=="" then
+ return filename
+ elseif criterium==true then
+ return filename.."."..suffix
+ elseif not criterium then
+ local n,s=lpegmatch(pattern,filename)
+ if not s or s=="" then
+ return filename.."."..suffix
+ else
+ return filename
+ end
+ else
+ local n,s=lpegmatch(pattern,filename)
+ if s and s~="" then
+ local t=type(criterium)
+ if t=="table" then
+ for i=1,#criterium do
+ if s==criterium[i] then
return filename
+ end
end
- else
- local n, s = splitname(filename)
- if s and s ~= "" then
- local t = type(criterium)
- if t == "table" then
- -- keep if in criterium
- for i=1,#criterium do
- if s == criterium[i] then
- return filename
- end
- end
- elseif t == "string" then
- -- keep if criterium
- if s == criterium then
- return filename
- end
- end
+ elseif t=="string" then
+ if s==criterium then
+ return filename
end
- return n .. "." .. suffix
+ end
end
+ return (n or filename).."."..suffix
+ end
end
-
---~ print("1 " .. file.addsuffix("name","new") .. " -> name.new")
---~ print("2 " .. file.addsuffix("name.old","new") .. " -> name.old")
---~ print("3 " .. file.addsuffix("name.old","new",true) .. " -> name.old.new")
---~ print("4 " .. file.addsuffix("name.old","new","new") .. " -> name.new")
---~ print("5 " .. file.addsuffix("name.old","new","old") .. " -> name.old")
---~ print("6 " .. file.addsuffix("name.old","new","foo") .. " -> name.new")
---~ print("7 " .. file.addsuffix("name.old","new",{"foo","bar"}) .. " -> name.new")
---~ print("8 " .. file.addsuffix("name.old","new",{"old","bar"}) .. " -> name.old")
-
-function file.replacesuffix(filename, suffix)
- return (gsub(filename,"%.[%a%d]+$","")) .. "." .. suffix
+local suffix=period*(1-period-slashes)^1*-1
+local pattern=Cs((1-suffix)^0)
+function file.replacesuffix(name,suffix)
+ if name and suffix and suffix~="" then
+ return lpegmatch(pattern,name).."."..suffix
+ else
+ return name
+ end
end
-
---~ function file.join(...)
---~ local pth = concat({...},"/")
---~ pth = gsub(pth,"\\","/")
---~ local a, b = match(pth,"^(.*://)(.*)$")
---~ if a and b then
---~ return a .. gsub(b,"//+","/")
---~ end
---~ a, b = match(pth,"^(//)(.*)$")
---~ if a and b then
---~ return a .. gsub(b,"//+","/")
---~ end
---~ return (gsub(pth,"//+","/"))
---~ end
-
-local trick_1 = char(1)
-local trick_2 = "^" .. trick_1 .. "/+"
-
-function file.join(...) -- rather dirty
- local lst = { ... }
- local a, b = lst[1], lst[2]
- if not a or a == "" then -- not a added
- lst[1] = trick_1
- elseif b and find(a,"^/+$") and find(b,"^/") then
- lst[1] = ""
- lst[2] = gsub(b,"^/+","")
- end
- local pth = concat(lst,"/")
- pth = gsub(pth,"\\","/")
- local a, b = match(pth,"^(.*://)(.*)$")
- if a and b then
- return a .. gsub(b,"//+","/")
- end
- a, b = match(pth,"^(//)(.*)$")
- if a and b then
- return a .. gsub(b,"//+","/")
- end
- pth = gsub(pth,trick_2,"")
- return (gsub(pth,"//+","/"))
+local reslasher=lpeg.replacer(P("\\"),"/")
+function file.reslash(str)
+ return str and lpegmatch(reslasher,str)
end
-
---~ print(file.join("//","/y"))
---~ print(file.join("/","/y"))
---~ print(file.join("","/y"))
---~ print(file.join("/x/","/y"))
---~ print(file.join("x/","/y"))
---~ print(file.join("http://","/y"))
---~ print(file.join("http://a","/y"))
---~ print(file.join("http:///a","/y"))
---~ print(file.join("//nas-1","/y"))
-
--- We should be able to use:
---
--- function file.is_writable(name)
--- local a = attributes(name) or attributes(dirname(name,"."))
--- return a and sub(a.permissions,2,2) == "w"
--- end
---
--- But after some testing Taco and I came up with:
-
function file.is_writable(name)
- if lfs.isdir(name) then
- name = name .. "/m_t_x_t_e_s_t.tmp"
- local f = io.open(name,"wb")
- if f then
- f:close()
- os.remove(name)
- return true
- end
- elseif lfs.isfile(name) then
- local f = io.open(name,"ab")
- if f then
- f:close()
- return true
- end
- else
- local f = io.open(name,"ab")
- if f then
- f:close()
- os.remove(name)
- return true
- end
+ if not name then
+ elseif lfs.isdir(name) then
+ name=name.."/m_t_x_t_e_s_t.tmp"
+ local f=io.open(name,"wb")
+ if f then
+ f:close()
+ os.remove(name)
+ return true
end
- return false
-end
-
-function file.is_readable(name)
- local a = attributes(name)
- return a and sub(a.permissions,1,1) == "r"
-end
-
-file.isreadable = file.is_readable -- depricated
-file.iswritable = file.is_writable -- depricated
-
--- todo: lpeg \\ / .. does not save much
-
-local checkedsplit = string.checkedsplit
-
-function file.splitpath(str,separator) -- string
- str = gsub(str,"\\","/")
- return checkedsplit(str,separator or io.pathseparator)
-end
-
-function file.joinpath(tab,separator) -- table
- return concat(tab,separator or io.pathseparator) -- can have trailing //
-end
-
--- we can hash them weakly
-
---~ function file.collapsepath(str) -- fails on b.c/..
---~ str = gsub(str,"\\","/")
---~ if find(str,"/") then
---~ str = gsub(str,"^%./",(gsub(getcurrentdir(),"\\","/")) .. "/") -- ./xx in qualified
---~ str = gsub(str,"/%./","/")
---~ local n, m = 1, 1
---~ while n > 0 or m > 0 do
---~ str, n = gsub(str,"[^/%.]+/%.%.$","")
---~ str, m = gsub(str,"[^/%.]+/%.%./","")
---~ end
---~ str = gsub(str,"([^/])/$","%1")
---~ -- str = gsub(str,"^%./","") -- ./xx in qualified
---~ str = gsub(str,"/%.$","")
---~ end
---~ if str == "" then str = "." end
---~ return str
---~ end
---~
---~ The previous one fails on "a.b/c" so Taco came up with a split based
---~ variant. After some skyping we got it sort of compatible with the old
---~ one. After that the anchoring to currentdir was added in a better way.
---~ Of course there are some optimizations too. Finally we had to deal with
---~ windows drive prefixes and things like sys://.
-
-function file.collapsepath(str,anchor)
- if anchor and not find(str,"^/") and not find(str,"^%a:") then
- str = getcurrentdir() .. "/" .. str
- end
- if str == "" or str =="." then
- return "."
- elseif find(str,"^%.%.") then
- str = gsub(str,"\\","/")
- return str
- elseif not find(str,"%.") then
- str = gsub(str,"\\","/")
- return str
- end
- str = gsub(str,"\\","/")
- local starter, rest = match(str,"^(%a+:/*)(.-)$")
- if starter then
- str = rest
- end
- local oldelements = checkedsplit(str,"/")
- local newelements = { }
- local i = #oldelements
- while i > 0 do
- local element = oldelements[i]
- if element == '.' then
- -- do nothing
- elseif element == '..' then
- local n = i - 1
- while n > 0 do
- local element = oldelements[n]
- if element ~= '..' and element ~= '.' then
- oldelements[n] = '.'
- break
- else
- n = n - 1
- end
- end
- if n < 1 then
- insert(newelements,1,'..')
- end
- elseif element ~= "" then
- insert(newelements,1,element)
- end
- i = i - 1
+ elseif lfs.isfile(name) then
+ local f=io.open(name,"ab")
+ if f then
+ f:close()
+ return true
end
- if #newelements == 0 then
- return starter or "."
- elseif starter then
- return starter .. concat(newelements, '/')
- elseif find(str,"^/") then
- return "/" .. concat(newelements,'/')
- else
- return concat(newelements, '/')
+ else
+ local f=io.open(name,"ab")
+ if f then
+ f:close()
+ os.remove(name)
+ return true
end
+ end
+ return false
end
-
---~ local function test(str)
---~ print(string.format("%-20s %-15s %-15s",str,file.collapsepath(str),file.collapsepath(str,true)))
---~ end
---~ test("a/b.c/d") test("b.c/d") test("b.c/..")
---~ test("/") test("c:/..") test("sys://..")
---~ test("") test("./") test(".") test("..") test("./..") test("../..")
---~ test("a") test("./a") test("/a") test("a/../..")
---~ test("a/./b/..") test("a/aa/../b/bb") test("a/.././././b/..") test("a/./././b/..")
---~ test("a/b/c/../..") test("./a/b/c/../..") test("a/b/c/../..")
-
+local readable=P("r")*Cc(true)
+function file.is_readable(name)
+ if name then
+ local a=attributes(name)
+ return a and lpegmatch(readable,a.permissions) or false
+ else
+ return false
+ end
+end
+file.isreadable=file.is_readable
+file.iswritable=file.is_writable
+function file.size(name)
+ if name then
+ local a=attributes(name)
+ return a and a.size or 0
+ else
+ return 0
+ end
+end
+function file.splitpath(str,separator)
+ return str and checkedsplit(lpegmatch(reslasher,str),separator or io.pathseparator)
+end
+function file.joinpath(tab,separator)
+ return tab and concat(tab,separator or io.pathseparator)
+end
+local stripper=Cs(P(fwslash)^0/""*reslasher)
+local isnetwork=fwslash*fwslash*(1-fwslash)+(1-fwslash-colon)^1*colon
+local isroot=fwslash^1*-1
+local hasroot=fwslash^1
+local deslasher=lpeg.replacer(S("\\/")^1,"/")
+function file.join(...)
+ local lst={... }
+ local one=lst[1]
+ if lpegmatch(isnetwork,one) then
+ local two=lpegmatch(deslasher,concat(lst,"/",2))
+ return one.."/"..two
+ elseif lpegmatch(isroot,one) then
+ local two=lpegmatch(deslasher,concat(lst,"/",2))
+ if lpegmatch(hasroot,two) then
+ return two
+ else
+ return "/"..two
+ end
+ elseif one=="" then
+ return lpegmatch(stripper,concat(lst,"/",2))
+ else
+ return lpegmatch(deslasher,concat(lst,"/"))
+ end
+end
+local drivespec=R("az","AZ")^1*colon
+local anchors=fwslash+drivespec
+local untouched=periods+(1-period)^1*P(-1)
+local splitstarter=(Cs(drivespec*(bwslash/"/"+fwslash)^0)+Cc(false))*Ct(lpeg.splitat(S("/\\")^1))
+local absolute=fwslash
+function file.collapsepath(str,anchor)
+ if not str then
+ return
+ end
+ if anchor and not lpegmatch(anchors,str) then
+ str=getcurrentdir().."/"..str
+ end
+ if str=="" or str=="." then
+ return "."
+ elseif lpegmatch(untouched,str) then
+ return lpegmatch(reslasher,str)
+ end
+ local starter,oldelements=lpegmatch(splitstarter,str)
+ local newelements={}
+ local i=#oldelements
+ while i>0 do
+ local element=oldelements[i]
+ if element=='.' then
+ elseif element=='..' then
+ local n=i-1
+ while n>0 do
+ local element=oldelements[n]
+ if element~='..' and element~='.' then
+ oldelements[n]='.'
+ break
+ else
+ n=n-1
+ end
+ end
+ if n<1 then
+ insert(newelements,1,'..')
+ end
+ elseif element~="" then
+ insert(newelements,1,element)
+ end
+ i=i-1
+ end
+ if #newelements==0 then
+ return starter or "."
+ elseif starter then
+ return starter..concat(newelements,'/')
+ elseif lpegmatch(absolute,str) then
+ return "/"..concat(newelements,'/')
+ else
+ return concat(newelements,'/')
+ end
+end
+local validchars=R("az","09","AZ","--","..")
+local pattern_a=lpeg.replacer(1-validchars)
+local pattern_a=Cs((validchars+P(1)/"-")^1)
+local whatever=P("-")^0/""
+local pattern_b=Cs(whatever*(1-whatever*-1)^1)
function file.robustname(str,strict)
- str = gsub(str,"[^%a%d%/%-%.\\]+","-")
+ if str then
+ str=lpegmatch(pattern_a,str) or str
if strict then
- return lower(gsub(str,"^%-*(.-)%-*$","%1"))
+ return lpegmatch(pattern_b,str) or str
else
- return str
+ return str
end
+ end
end
-
-file.readdata = io.loaddata
-file.savedata = io.savedata
-
+file.readdata=io.loaddata
+file.savedata=io.savedata
function file.copy(oldname,newname)
- file.savedata(newname,io.loaddata(oldname))
-end
-
--- lpeg variants, slightly faster, not always
-
---~ local period = P(".")
---~ local slashes = S("\\/")
---~ local noperiod = 1-period
---~ local noslashes = 1-slashes
---~ local name = noperiod^1
-
---~ local pattern = (noslashes^0 * slashes)^0 * (noperiod^1 * period)^1 * C(noperiod^1) * -1
-
---~ function file.extname(name)
---~ return lpegmatch(pattern,name) or ""
---~ end
-
---~ local pattern = Cs(((period * noperiod^1 * -1)/"" + 1)^1)
-
---~ function file.removesuffix(name)
---~ return lpegmatch(pattern,name)
---~ end
-
---~ local pattern = (noslashes^0 * slashes)^1 * C(noslashes^1) * -1
-
---~ function file.basename(name)
---~ return lpegmatch(pattern,name) or name
---~ end
-
---~ local pattern = (noslashes^0 * slashes)^1 * Cp() * noslashes^1 * -1
-
---~ function file.dirname(name)
---~ local p = lpegmatch(pattern,name)
---~ if p then
---~ return sub(name,1,p-2)
---~ else
---~ return ""
---~ end
---~ end
-
---~ local pattern = (noslashes^0 * slashes)^0 * (noperiod^1 * period)^1 * Cp() * noperiod^1 * -1
-
---~ function file.addsuffix(name, suffix)
---~ local p = lpegmatch(pattern,name)
---~ if p then
---~ return name
---~ else
---~ return name .. "." .. suffix
---~ end
---~ end
-
---~ local pattern = (noslashes^0 * slashes)^0 * (noperiod^1 * period)^1 * Cp() * noperiod^1 * -1
-
---~ function file.replacesuffix(name,suffix)
---~ local p = lpegmatch(pattern,name)
---~ if p then
---~ return sub(name,1,p-2) .. "." .. suffix
---~ else
---~ return name .. "." .. suffix
---~ end
---~ end
-
---~ local pattern = (noslashes^0 * slashes)^0 * Cp() * ((noperiod^1 * period)^1 * Cp() + P(true)) * noperiod^1 * -1
-
---~ function file.nameonly(name)
---~ local a, b = lpegmatch(pattern,name)
---~ if b then
---~ return sub(name,a,b-2)
---~ elseif a then
---~ return sub(name,a)
---~ else
---~ return name
---~ end
---~ end
-
---~ local test = file.extname
---~ local test = file.basename
---~ local test = file.dirname
---~ local test = file.addsuffix
---~ local test = file.replacesuffix
---~ local test = file.nameonly
-
---~ print(1,test("./a/b/c/abd.def.xxx","!!!"))
---~ print(2,test("./../b/c/abd.def.xxx","!!!"))
---~ print(3,test("a/b/c/abd.def.xxx","!!!"))
---~ print(4,test("a/b/c/def.xxx","!!!"))
---~ print(5,test("a/b/c/def","!!!"))
---~ print(6,test("def","!!!"))
---~ print(7,test("def.xxx","!!!"))
-
---~ local tim = os.clock() for i=1,250000 do local ext = test("abd.def.xxx","!!!") end print(os.clock()-tim)
-
--- also rewrite previous
-
-local letter = R("az","AZ") + S("_-+")
-local separator = P("://")
-
-local qualified = P(".")^0 * P("/") + letter*P(":") + letter^1*separator + letter^1 * P("/")
-local rootbased = P("/") + letter*P(":")
-
-lpeg.patterns.qualified = qualified
-lpeg.patterns.rootbased = rootbased
-
--- ./name ../name /name c: :// name/name
-
+ if oldname and newname then
+ local data=io.loaddata(oldname)
+ if data and data~="" then
+ file.savedata(newname,data)
+ end
+ end
+end
+local letter=R("az","AZ")+S("_-+")
+local separator=P("://")
+local qualified=period^0*fwslash+letter*colon+letter^1*separator+letter^1*fwslash
+local rootbased=fwslash+letter*colon
+lpeg.patterns.qualified=qualified
+lpeg.patterns.rootbased=rootbased
function file.is_qualified_path(filename)
- return lpegmatch(qualified,filename) ~= nil
+ return filename and lpegmatch(qualified,filename)~=nil
end
-
function file.is_rootbased_path(filename)
- return lpegmatch(rootbased,filename) ~= nil
-end
-
--- actually these are schemes
-
-local slash = S("\\/")
-local period = P(".")
-local drive = C(R("az","AZ")) * P(":")
-local path = C(((1-slash)^0 * slash)^0)
-local suffix = period * C(P(1-period)^0 * P(-1))
-local base = C((1-suffix)^0)
-
-drive = drive + Cc("")
-path = path + Cc("")
-base = base + Cc("")
-suffix = suffix + Cc("")
-
-local pattern_a = drive * path * base * suffix
-local pattern_b = path * base * suffix
-local pattern_c = C(drive * path) * C(base * suffix)
-
-function file.splitname(str,splitdrive)
- if splitdrive then
- return lpegmatch(pattern_a,str) -- returns drive, path, base, suffix
- else
- return lpegmatch(pattern_b,str) -- returns path, base, suffix
- end
+ return filename and lpegmatch(rootbased,filename)~=nil
end
-
-function file.nametotable(str,splitdrive) -- returns table
- local path, drive, subpath, name, base, suffix = lpegmatch(pattern_c,str)
- if splitdrive then
- return {
- path = path,
- drive = drive,
- subpath = subpath,
- name = name,
- base = base,
- suffix = suffix,
- }
- else
- return {
- path = path,
- name = name,
- base = base,
- suffix = suffix,
- }
- end
-end
-
--- function test(t) for k, v in next, t do print(v, "=>", file.splitname(v)) end end
---
--- test { "c:", "c:/aa", "c:/aa/bb", "c:/aa/bb/cc", "c:/aa/bb/cc.dd", "c:/aa/bb/cc.dd.ee" }
--- test { "c:", "c:aa", "c:aa/bb", "c:aa/bb/cc", "c:aa/bb/cc.dd", "c:aa/bb/cc.dd.ee" }
--- test { "/aa", "/aa/bb", "/aa/bb/cc", "/aa/bb/cc.dd", "/aa/bb/cc.dd.ee" }
--- test { "aa", "aa/bb", "aa/bb/cc", "aa/bb/cc.dd", "aa/bb/cc.dd.ee" }
-
---~ -- todo:
---~
---~ if os.type == "windows" then
---~ local currentdir = getcurrentdir
---~ function getcurrentdir()
---~ return (gsub(currentdir(),"\\","/"))
---~ end
---~ end
-
--- for myself:
-
function file.strip(name,dir)
- local b, a = match(name,"^(.-)" .. dir .. "(.*)$")
- return a ~= "" and a or name
+ if name then
+ local b,a=match(name,"^(.-)"..dir.."(.*)$")
+ return a~="" and a or name
+ end
end
end -- closure
do -- begin closure to overcome local limits and interference
-if not modules then modules = { } end modules ['l-io'] = {
- version = 1.001,
- comment = "companion to luat-lib.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
+if not modules then modules={} end modules ['l-boolean']={
+ version=1.001,
+ comment="companion to luat-lib.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
}
-
-local io = io
-local byte, find, gsub, format = string.byte, string.find, string.gsub, string.format
-local concat = table.concat
-local type = type
-
-if string.find(os.getenv("PATH"),";") then
- io.fileseparator, io.pathseparator = "\\", ";"
-else
- io.fileseparator, io.pathseparator = "/" , ":"
+local type,tonumber=type,tonumber
+boolean=boolean or {}
+local boolean=boolean
+function boolean.tonumber(b)
+ if b then return 1 else return 0 end
end
-
-function io.loaddata(filename,textmode)
- local f = io.open(filename,(textmode and 'r') or 'rb')
- if f then
- local data = f:read('*all')
- f:close()
- return data
- else
- return nil
- end
+function toboolean(str,tolerant)
+ if str==nil then
+ return false
+ elseif str==false then
+ return false
+ elseif str==true then
+ return true
+ elseif str=="true" then
+ return true
+ elseif str=="false" then
+ return false
+ elseif not tolerant then
+ return false
+ elseif str==0 then
+ return false
+ elseif (tonumber(str) or 0)>0 then
+ return true
+ else
+ return str=="yes" or str=="on" or str=="t"
+ end
end
-
-function io.savedata(filename,data,joiner)
- local f = io.open(filename,"wb")
- if f then
- if type(data) == "table" then
- f:write(concat(data,joiner or ""))
- elseif type(data) == "function" then
- data(f)
- else
- f:write(data or "")
- end
- f:close()
- io.flush()
- return true
- else
- return false
- end
+string.toboolean=toboolean
+function string.booleanstring(str)
+ if str=="0" then
+ return false
+ elseif str=="1" then
+ return true
+ elseif str=="" then
+ return false
+ elseif str=="false" then
+ return false
+ elseif str=="true" then
+ return true
+ elseif (tonumber(str) or 0)>0 then
+ return true
+ else
+ return str=="yes" or str=="on" or str=="t"
+ end
end
-
-function io.exists(filename)
- local f = io.open(filename)
- if f == nil then
- return false
- else
- assert(f:close())
- return true
+function string.is_boolean(str,default)
+ if type(str)=="string" then
+ if str=="true" or str=="yes" or str=="on" or str=="t" then
+ return true
+ elseif str=="false" or str=="no" or str=="off" or str=="f" then
+ return false
end
+ end
+ return default
end
-function io.size(filename)
- local f = io.open(filename)
- if f == nil then
- return 0
- else
- local s = f:seek("end")
- assert(f:close())
- return s
- end
-end
+end -- closure
-function io.noflines(f)
- if type(f) == "string" then
- local f = io.open(filename)
- local n = f and io.noflines(f) or 0
- assert(f:close())
- return n
- else
- local n = 0
- for _ in f:lines() do
- n = n + 1
- end
- f:seek('set',0)
- return n
- end
-end
+do -- begin closure to overcome local limits and interference
-local nextchar = {
- [ 4] = function(f)
- return f:read(1,1,1,1)
- end,
- [ 2] = function(f)
- return f:read(1,1)
- end,
- [ 1] = function(f)
- return f:read(1)
- end,
- [-2] = function(f)
- local a, b = f:read(1,1)
- return b, a
- end,
- [-4] = function(f)
- local a, b, c, d = f:read(1,1,1,1)
- return d, c, b, a
- end
+if not modules then modules={} end modules ['l-math']={
+ version=1.001,
+ comment="companion to luat-lib.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
}
-
-function io.characters(f,n)
- if f then
- return nextchar[n or 1], f
- end
+local floor,sin,cos,tan=math.floor,math.sin,math.cos,math.tan
+if not math.round then
+ function math.round(x) return floor(x+0.5) end
end
-
-local nextbyte = {
- [4] = function(f)
- local a, b, c, d = f:read(1,1,1,1)
- if d then
- return byte(a), byte(b), byte(c), byte(d)
- end
- end,
- [3] = function(f)
- local a, b, c = f:read(1,1,1)
- if b then
- return byte(a), byte(b), byte(c)
- end
- end,
- [2] = function(f)
- local a, b = f:read(1,1)
- if b then
- return byte(a), byte(b)
- end
- end,
- [1] = function (f)
- local a = f:read(1)
- if a then
- return byte(a)
- end
- end,
- [-2] = function (f)
- local a, b = f:read(1,1)
- if b then
- return byte(b), byte(a)
- end
- end,
- [-3] = function(f)
- local a, b, c = f:read(1,1,1)
- if b then
- return byte(c), byte(b), byte(a)
- end
- end,
- [-4] = function(f)
- local a, b, c, d = f:read(1,1,1,1)
- if d then
- return byte(d), byte(c), byte(b), byte(a)
- end
- end
-}
-
-function io.bytes(f,n)
- if f then
- return nextbyte[n or 1], f
- else
- return nil, nil
- end
+if not math.div then
+ function math.div(n,m) return floor(n/m) end
end
-
-function io.ask(question,default,options)
- while true do
- io.write(question)
- if options then
- io.write(format(" [%s]",concat(options,"|")))
- end
- if default then
- io.write(format(" [%s]",default))
- end
- io.write(format(" "))
- io.flush()
- local answer = io.read()
- answer = gsub(answer,"^%s*(.*)%s*$","%1")
- if answer == "" and default then
- return default
- elseif not options then
- return answer
- else
- for k=1,#options do
- if options[k] == answer then
- return answer
- end
- end
- local pattern = "^" .. answer
- for k=1,#options do
- local v = options[k]
- if find(v,pattern) then
- return v
- end
- end
- end
- end
+if not math.mod then
+ function math.mod(n,m) return n%m end
end
-
-local function readnumber(f,n,m)
- if m then
- f:seek("set",n)
- n = m
- end
- if n == 1 then
- return byte(f:read(1))
- elseif n == 2 then
- local a, b = byte(f:read(2),1,2)
- return 256 * a + b
- elseif n == 3 then
- local a, b, c = byte(f:read(3),1,3)
- return 256*256 * a + 256 * b + c
- elseif n == 4 then
- local a, b, c, d = byte(f:read(4),1,4)
- return 256*256*256 * a + 256*256 * b + 256 * c + d
- elseif n == 8 then
- local a, b = readnumber(f,4), readnumber(f,4)
- return 256 * a + b
- elseif n == 12 then
- local a, b, c = readnumber(f,4), readnumber(f,4), readnumber(f,4)
- return 256*256 * a + 256 * b + c
- elseif n == -2 then
- local b, a = byte(f:read(2),1,2)
- return 256*a + b
- elseif n == -3 then
- local c, b, a = byte(f:read(3),1,3)
- return 256*256 * a + 256 * b + c
- elseif n == -4 then
- local d, c, b, a = byte(f:read(4),1,4)
- return 256*256*256 * a + 256*256 * b + 256*c + d
- elseif n == -8 then
- local h, g, f, e, d, c, b, a = byte(f:read(8),1,8)
- return 256*256*256*256*256*256*256 * a +
- 256*256*256*256*256*256 * b +
- 256*256*256*256*256 * c +
- 256*256*256*256 * d +
- 256*256*256 * e +
- 256*256 * f +
- 256 * g +
- h
- else
- return 0
- end
+local pipi=2*math.pi/360
+if not math.sind then
+ function math.sind(d) return sin(d*pipi) end
+ function math.cosd(d) return cos(d*pipi) end
+ function math.tand(d) return tan(d*pipi) end
end
-
-io.readnumber = readnumber
-
-function io.readstring(f,n,m)
- if m then
- f:seek("set",n)
- n = m
- end
- local str = gsub(f:read(n),"%z","")
- return str
+if not math.odd then
+ function math.odd (n) return n%2~=0 end
+ function math.even(n) return n%2==0 end
end
---
-
-if not io.i_limiter then function io.i_limiter() end end -- dummy so we can test safely
-if not io.o_limiter then function io.o_limiter() end end -- dummy so we can test safely
-
end -- closure
do -- begin closure to overcome local limits and interference
-if not modules then modules = { } end modules ['luat-basics-gen'] = {
- version = 1.100,
- comment = "companion to luatex-*.tex",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
+if not modules then modules={} end modules ['util-str']={
+ version=1.001,
+ comment="companion to luat-lib.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
}
+utilities=utilities or {}
+utilities.strings=utilities.strings or {}
+local strings=utilities.strings
+local format,gsub,rep,sub=string.format,string.gsub,string.rep,string.sub
+local load,dump=load,string.dump
+local tonumber,type,tostring=tonumber,type,tostring
+local unpack,concat=table.unpack,table.concat
+local P,V,C,S,R,Ct,Cs,Cp,Carg,Cc=lpeg.P,lpeg.V,lpeg.C,lpeg.S,lpeg.R,lpeg.Ct,lpeg.Cs,lpeg.Cp,lpeg.Carg,lpeg.Cc
+local patterns,lpegmatch=lpeg.patterns,lpeg.match
+local utfchar,utfbyte=utf.char,utf.byte
+local loadstripped=_LUAVERSION<5.2 and load or function(str)
+ return load(dump(load(str),true))
+end
+if not number then number={} end
+local stripper=patterns.stripzeros
+local function points(n)
+ return (not n or n==0) and "0pt" or lpegmatch(stripper,format("%.5fpt",n/65536))
+end
+local function basepoints(n)
+ return (not n or n==0) and "0bp" or lpegmatch(stripper,format("%.5fbp",n*(7200/7227)/65536))
+end
+number.points=points
+number.basepoints=basepoints
+local rubish=patterns.spaceortab^0*patterns.newline
+local anyrubish=patterns.spaceortab+patterns.newline
+local anything=patterns.anything
+local stripped=(patterns.spaceortab^1/"")*patterns.newline
+local leading=rubish^0/""
+local trailing=(anyrubish^1*patterns.endofstring)/""
+local redundant=rubish^3/"\n"
+local pattern=Cs(leading*(trailing+redundant+stripped+anything)^0)
+function strings.collapsecrlf(str)
+ return lpegmatch(pattern,str)
+end
+local repeaters={}
+function strings.newrepeater(str,offset)
+ offset=offset or 0
+ local s=repeaters[str]
+ if not s then
+ s={}
+ repeaters[str]=s
+ end
+ local t=s[offset]
+ if t then
+ return t
+ end
+ t={}
+ setmetatable(t,{ __index=function(t,k)
+ if not k then
+ return ""
+ end
+ local n=k+offset
+ local s=n>0 and rep(str,n) or ""
+ t[k]=s
+ return s
+ end })
+ s[offset]=t
+ return t
+end
+local extra,tab,start=0,0,4,0
+local nspaces=strings.newrepeater(" ")
+string.nspaces=nspaces
+local pattern=Carg(1)/function(t)
+ extra,tab,start=0,t or 7,1
+ end*Cs((
+ Cp()*patterns.tab/function(position)
+ local current=(position-start+1)+extra
+ local spaces=tab-(current-1)%tab
+ if spaces>0 then
+ extra=extra+spaces-1
+ return nspaces[spaces]
+ else
+ return ""
+ end
+ end+patterns.newline*Cp()/function(position)
+ extra,start=0,position
+ end+patterns.anything
+ )^1)
+function strings.tabtospace(str,tab)
+ return lpegmatch(pattern,str,1,tab or 7)
+end
+function strings.striplong(str)
+ str=gsub(str,"^%s*","")
+ str=gsub(str,"[\n\r]+ *","\n")
+ return str
+end
+function strings.nice(str)
+ str=gsub(str,"[:%-+_]+"," ")
+ return str
+end
+local n=0
+local sequenced=table.sequenced
+function string.autodouble(s,sep)
+ if s==nil then
+ return '""'
+ end
+ local t=type(s)
+ if t=="number" then
+ return tostring(s)
+ end
+ if t=="table" then
+ return ('"'..sequenced(s,sep or ",")..'"')
+ end
+ return ('"'..tostring(s)..'"')
+end
+function string.autosingle(s,sep)
+ if s==nil then
+ return "''"
+ end
+ local t=type(s)
+ if t=="number" then
+ return tostring(s)
+ end
+ if t=="table" then
+ return ("'"..sequenced(s,sep or ",").."'")
+ end
+ return ("'"..tostring(s).."'")
+end
+local tracedchars={}
+string.tracedchars=tracedchars
+strings.tracers=tracedchars
+function string.tracedchar(b)
+ if type(b)=="number" then
+ return tracedchars[b] or (utfchar(b).." (U+"..format('%05X',b)..")")
+ else
+ local c=utfbyte(b)
+ return tracedchars[c] or (b.." (U+"..format('%05X',c)..")")
+ end
+end
+function number.signed(i)
+ if i>0 then
+ return "+",i
+ else
+ return "-",-i
+ end
+end
+local preamble=[[
+local type = type
+local tostring = tostring
+local tonumber = tonumber
+local format = string.format
+local concat = table.concat
+local signed = number.signed
+local points = number.points
+local basepoints = number.basepoints
+local utfchar = utf.char
+local utfbyte = utf.byte
+local lpegmatch = lpeg.match
+local nspaces = string.nspaces
+local tracedchar = string.tracedchar
+local autosingle = string.autosingle
+local autodouble = string.autodouble
+local sequenced = table.sequenced
+]]
+local template=[[
+%s
+%s
+return function(%s) return %s end
+]]
+local arguments={ "a1" }
+setmetatable(arguments,{ __index=function(t,k)
+ local v=t[k-1]..",a"..k
+ t[k]=v
+ return v
+ end
+})
+local prefix_any=C((S("+- .")+R("09"))^0)
+local prefix_tab=C((1-R("az","AZ","09","%%"))^0)
+local format_s=function(f)
+ n=n+1
+ if f and f~="" then
+ return format("format('%%%ss',a%s)",f,n)
+ else
+ return format("(a%s or '')",n)
+ end
+end
+local format_S=function(f)
+ n=n+1
+ if f and f~="" then
+ return format("format('%%%ss',tostring(a%s))",f,n)
+ else
+ return format("tostring(a%s)",n)
+ end
+end
+local format_q=function()
+ n=n+1
+ return format("(a%s and format('%%q',a%s) or '')",n,n)
+end
+local format_Q=function()
+ n=n+1
+ return format("format('%%q',tostring(a%s))",n)
+end
+local format_i=function(f)
+ n=n+1
+ if f and f~="" then
+ return format("format('%%%si',a%s)",f,n)
+ else
+ return format("a%s",n)
+ end
+end
+local format_d=format_i
+local format_I=function(f)
+ n=n+1
+ return format("format('%%s%%%si',signed(a%s))",f,n)
+end
+local format_f=function(f)
+ n=n+1
+ return format("format('%%%sf',a%s)",f,n)
+end
+local format_g=function(f)
+ n=n+1
+ return format("format('%%%sg',a%s)",f,n)
+end
+local format_G=function(f)
+ n=n+1
+ return format("format('%%%sG',a%s)",f,n)
+end
+local format_e=function(f)
+ n=n+1
+ return format("format('%%%se',a%s)",f,n)
+end
+local format_E=function(f)
+ n=n+1
+ return format("format('%%%sE',a%s)",f,n)
+end
+local format_x=function(f)
+ n=n+1
+ return format("format('%%%sx',a%s)",f,n)
+end
+local format_X=function(f)
+ n=n+1
+ return format("format('%%%sX',a%s)",f,n)
+end
+local format_o=function(f)
+ n=n+1
+ return format("format('%%%so',a%s)",f,n)
+end
+local format_c=function()
+ n=n+1
+ return format("utfchar(a%s)",n)
+end
+local format_C=function()
+ n=n+1
+ return format("tracedchar(a%s)",n)
+end
+local format_r=function(f)
+ n=n+1
+ return format("format('%%%s.0f',a%s)",f,n)
+end
+local format_h=function(f)
+ n=n+1
+ if f=="-" then
+ f=sub(f,2)
+ return format("format('%%%sx',type(a%s) == 'number' and a%s or utfbyte(a%s))",f=="" and "05" or f,n,n,n)
+ else
+ return format("format('0x%%%sx',type(a%s) == 'number' and a%s or utfbyte(a%s))",f=="" and "05" or f,n,n,n)
+ end
+end
+local format_H=function(f)
+ n=n+1
+ if f=="-" then
+ f=sub(f,2)
+ return format("format('%%%sX',type(a%s) == 'number' and a%s or utfbyte(a%s))",f=="" and "05" or f,n,n,n)
+ else
+ return format("format('0x%%%sX',type(a%s) == 'number' and a%s or utfbyte(a%s))",f=="" and "05" or f,n,n,n)
+ end
+end
+local format_u=function(f)
+ n=n+1
+ if f=="-" then
+ f=sub(f,2)
+ return format("format('%%%sx',type(a%s) == 'number' and a%s or utfbyte(a%s))",f=="" and "05" or f,n,n,n)
+ else
+ return format("format('u+%%%sx',type(a%s) == 'number' and a%s or utfbyte(a%s))",f=="" and "05" or f,n,n,n)
+ end
+end
+local format_U=function(f)
+ n=n+1
+ if f=="-" then
+ f=sub(f,2)
+ return format("format('%%%sX',type(a%s) == 'number' and a%s or utfbyte(a%s))",f=="" and "05" or f,n,n,n)
+ else
+ return format("format('U+%%%sX',type(a%s) == 'number' and a%s or utfbyte(a%s))",f=="" and "05" or f,n,n,n)
+ end
+end
+local format_p=function()
+ n=n+1
+ return format("points(a%s)",n)
+end
+local format_b=function()
+ n=n+1
+ return format("basepoints(a%s)",n)
+end
+local format_t=function(f)
+ n=n+1
+ if f and f~="" then
+ return format("concat(a%s,%q)",n,f)
+ else
+ return format("concat(a%s)",n)
+ end
+end
+local format_T=function(f)
+ n=n+1
+ if f and f~="" then
+ return format("sequenced(a%s,%q)",n,f)
+ else
+ return format("sequenced(a%s)",n)
+ end
+end
+local format_l=function()
+ n=n+1
+ return format("(a%s and 'true' or 'false')",n)
+end
+local format_L=function()
+ n=n+1
+ return format("(a%s and 'TRUE' or 'FALSE')",n)
+end
+local format_N=function()
+ n=n+1
+ return format("tostring(tonumber(a%s) or a%s)",n,n)
+end
+local format_a=function(f)
+ n=n+1
+ if f and f~="" then
+ return format("autosingle(a%s,%q)",n,f)
+ else
+ return format("autosingle(a%s)",n)
+ end
+end
+local format_A=function(f)
+ n=n+1
+ if f and f~="" then
+ return format("autodouble(a%s,%q)",n,f)
+ else
+ return format("autodouble(a%s)",n)
+ end
+end
+local format_w=function(f)
+ n=n+1
+ f=tonumber(f)
+ if f then
+ return format("nspaces[%s+a%s]",f,n)
+ else
+ return format("nspaces[a%s]",n)
+ end
+end
+local format_W=function(f)
+ return format("nspaces[%s]",tonumber(f) or 0)
+end
+local format_rest=function(s)
+ return format("%q",s)
+end
+local format_extension=function(extensions,f,name)
+ local extension=extensions[name] or "tostring(%s)"
+ local f=tonumber(f) or 1
+ if f==0 then
+ return extension
+ elseif f==1 then
+ n=n+1
+ local a="a"..n
+ return format(extension,a,a)
+ elseif f<0 then
+ local a="a"..(n+f+1)
+ return format(extension,a,a)
+ else
+ local t={}
+ for i=1,f do
+ n=n+1
+ t[#t+1]="a"..n
+ end
+ return format(extension,unpack(t))
+ end
+end
+local builder=Cs { "start",
+ start=(
+ (
+ P("%")/""*(
+ V("!")
++V("s")+V("q")+V("i")+V("d")+V("f")+V("g")+V("G")+V("e")+V("E")+V("x")+V("X")+V("o")
++V("c")+V("C")+V("S")
++V("Q")
++V("N")
++V("r")+V("h")+V("H")+V("u")+V("U")+V("p")+V("b")+V("t")+V("T")+V("l")+V("L")+V("I")+V("h")
++V("w")
++V("W")
++V("a")
++V("A")
++V("*")
+ )+V("*")
+ )*(P(-1)+Carg(1))
+ )^0,
+ ["s"]=(prefix_any*P("s"))/format_s,
+ ["q"]=(prefix_any*P("q"))/format_q,
+ ["i"]=(prefix_any*P("i"))/format_i,
+ ["d"]=(prefix_any*P("d"))/format_d,
+ ["f"]=(prefix_any*P("f"))/format_f,
+ ["g"]=(prefix_any*P("g"))/format_g,
+ ["G"]=(prefix_any*P("G"))/format_G,
+ ["e"]=(prefix_any*P("e"))/format_e,
+ ["E"]=(prefix_any*P("E"))/format_E,
+ ["x"]=(prefix_any*P("x"))/format_x,
+ ["X"]=(prefix_any*P("X"))/format_X,
+ ["o"]=(prefix_any*P("o"))/format_o,
+ ["S"]=(prefix_any*P("S"))/format_S,
+ ["Q"]=(prefix_any*P("Q"))/format_S,
+ ["N"]=(prefix_any*P("N"))/format_N,
+ ["c"]=(prefix_any*P("c"))/format_c,
+ ["C"]=(prefix_any*P("C"))/format_C,
+ ["r"]=(prefix_any*P("r"))/format_r,
+ ["h"]=(prefix_any*P("h"))/format_h,
+ ["H"]=(prefix_any*P("H"))/format_H,
+ ["u"]=(prefix_any*P("u"))/format_u,
+ ["U"]=(prefix_any*P("U"))/format_U,
+ ["p"]=(prefix_any*P("p"))/format_p,
+ ["b"]=(prefix_any*P("b"))/format_b,
+ ["t"]=(prefix_tab*P("t"))/format_t,
+ ["T"]=(prefix_tab*P("T"))/format_T,
+ ["l"]=(prefix_tab*P("l"))/format_l,
+ ["L"]=(prefix_tab*P("L"))/format_L,
+ ["I"]=(prefix_any*P("I"))/format_I,
+ ["w"]=(prefix_any*P("w"))/format_w,
+ ["W"]=(prefix_any*P("W"))/format_W,
+ ["a"]=(prefix_any*P("a"))/format_a,
+ ["A"]=(prefix_any*P("A"))/format_A,
+ ["*"]=Cs(((1-P("%"))^1+P("%%")/"%%%%")^1)/format_rest,
+ ["!"]=Carg(2)*prefix_any*P("!")*C((1-P("!"))^1)*P("!")/format_extension,
+}
+local direct=Cs (
+ P("%")/""*Cc([[local format = string.format return function(str) return format("%]])*(S("+- .")+R("09"))^0*S("sqidfgGeExXo")*Cc([[",str) end]])*P(-1)
+ )
+local function make(t,str)
+ local f
+ local p
+ local p=lpegmatch(direct,str)
+ if p then
+ f=loadstripped(p)()
+ else
+ n=0
+ p=lpegmatch(builder,str,1,"..",t._extensions_)
+ if n>0 then
+ p=format(template,preamble,t._preamble_,arguments[n],p)
+ f=loadstripped(p)()
+ else
+ f=function() return str end
+ end
+ end
+ t[str]=f
+ return f
+end
+local function use(t,fmt,...)
+ return t[fmt](...)
+end
+strings.formatters={}
+function strings.formatters.new()
+ local t={ _extensions_={},_preamble_="",_type_="formatter" }
+ setmetatable(t,{ __index=make,__call=use })
+ return t
+end
+local formatters=strings.formatters.new()
+string.formatters=formatters
+string.formatter=function(str,...) return formatters[str](...) end
+local function add(t,name,template,preamble)
+ if type(t)=="table" and t._type_=="formatter" then
+ t._extensions_[name]=template or "%s"
+ if preamble then
+ t._preamble_=preamble.."\n"..t._preamble_
+ end
+ end
+end
+strings.formatters.add=add
+lpeg.patterns.xmlescape=Cs((P("<")/"&lt;"+P(">")/"&gt;"+P("&")/"&amp;"+P('"')/"&quot;"+P(1))^0)
+lpeg.patterns.texescape=Cs((C(S("#$%\\{}"))/"\\%1"+P(1))^0)
+add(formatters,"xml",[[lpegmatch(xmlescape,%s)]],[[local xmlescape = lpeg.patterns.xmlescape]])
+add(formatters,"tex",[[lpegmatch(texescape,%s)]],[[local texescape = lpeg.patterns.texescape]])
-if context then
- texio.write_nl("fatal error: this module is not for context")
- os.exit()
-end
+end -- closure
-local dummyfunction = function() end
-local dummyreporter = function(c) return function(...) texio.write(c .. " : " .. string.format(...)) end end
+do -- begin closure to overcome local limits and interference
-statistics = {
- register = dummyfunction,
- starttiming = dummyfunction,
- stoptiming = dummyfunction,
- elapsedtime = nil,
+if not modules then modules={} end modules ['luat-basics-gen']={
+ version=1.100,
+ comment="companion to luatex-*.tex",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
}
-
-directives = {
- register = dummyfunction,
- enable = dummyfunction,
- disable = dummyfunction,
+if context then
+ texio.write_nl("fatal error: this module is not for context")
+ os.exit()
+end
+local dummyfunction=function() end
+local dummyreporter=function(c) return function(...) texio.write_nl(c.." : "..string.formatters(...)) end end
+statistics={
+ register=dummyfunction,
+ starttiming=dummyfunction,
+ stoptiming=dummyfunction,
+ elapsedtime=nil,
}
-
-trackers = {
- register = dummyfunction,
- enable = dummyfunction,
- disable = dummyfunction,
+directives={
+ register=dummyfunction,
+ enable=dummyfunction,
+ disable=dummyfunction,
}
-
-experiments = {
- register = dummyfunction,
- enable = dummyfunction,
- disable = dummyfunction,
+trackers={
+ register=dummyfunction,
+ enable=dummyfunction,
+ disable=dummyfunction,
}
-
-storage = { -- probably no longer needed
- register = dummyfunction,
- shared = { },
+experiments={
+ register=dummyfunction,
+ enable=dummyfunction,
+ disable=dummyfunction,
}
-
-logs = {
- new = dummyreporter,
- reporter = dummyreporter,
- messenger = dummyreporter,
- report = dummyfunction,
+storage={
+ register=dummyfunction,
+ shared={},
}
-
-callbacks = {
- register = function(n,f) return callback.register(n,f) end,
-
+logs={
+ new=dummyreporter,
+ reporter=dummyreporter,
+ messenger=dummyreporter,
+ report=dummyfunction,
}
-
-utilities = {
- storage = {
- allocate = function(t) return t or { } end,
- mark = function(t) return t or { } end,
- },
+callbacks={
+ register=function(n,f) return callback.register(n,f) end,
}
-
-characters = characters or {
- data = { }
+utilities={
+ storage={
+ allocate=function(t) return t or {} end,
+ mark=function(t) return t or {} end,
+ },
}
-
--- we need to cheat a bit here
-
-texconfig.kpse_init = true
-
-resolvers = resolvers or { } -- no fancy file helpers used
-
-local remapper = {
- otf = "opentype fonts",
- ttf = "truetype fonts",
- ttc = "truetype fonts",
- dfont = "truetype fonts", -- "truetype dictionary",
- cid = "cid maps",
- fea = "font feature files",
- pfa = "type1 fonts", -- this is for Khaled, in ConTeXt we don't use this!
- pfb = "type1 fonts", -- this is for Khaled, in ConTeXt we don't use this!
+characters=characters or {
+ data={}
+}
+texconfig.kpse_init=true
+resolvers=resolvers or {}
+local remapper={
+ otf="opentype fonts",
+ ttf="truetype fonts",
+ ttc="truetype fonts",
+ dfont="truetype fonts",
+ cid="cid maps",
+ cidmap="cid maps",
+ fea="font feature files",
+ pfa="type1 fonts",
+ pfb="type1 fonts",
}
-
function resolvers.findfile(name,fileformat)
- name = string.gsub(name,"\\","\/")
- fileformat = fileformat and string.lower(fileformat)
- local found = kpse.find_file(name,(fileformat and fileformat ~= "" and (remapper[fileformat] or fileformat)) or file.extname(name,"tex"))
- if not found or found == "" then
- found = kpse.find_file(name,"other text files")
- end
- return found
-end
-
-function resolvers.findbinfile(name,fileformat)
- if not fileformat or fileformat == "" then
- fileformat = file.extname(name) -- string.match(name,"%.([^%.]-)$")
- end
- return resolvers.findfile(name,(fileformat and remapper[fileformat]) or fileformat)
-end
-
+ name=string.gsub(name,"\\","/")
+ if not fileformat or fileformat=="" then
+ fileformat=file.suffix(name)
+ if fileformat=="" then
+ fileformat="tex"
+ end
+ end
+ fileformat=string.lower(fileformat)
+ fileformat=remapper[fileformat] or fileformat
+ local found=kpse.find_file(name,fileformat)
+ if not found or found=="" then
+ found=kpse.find_file(name,"other text files")
+ end
+ return found
+end
+resolvers.findbinfile=resolvers.findfile
function resolvers.resolve(s)
- return s
+ return s
end
-
function resolvers.unresolve(s)
- return s
+ return s
end
-
--- Caches ... I will make a real stupid version some day when I'm in the
--- mood. After all, the generic code does not need the more advanced
--- ConTeXt features. Cached data is not shared between ConTeXt and other
--- usage as I don't want any dependency at all. Also, ConTeXt might have
--- different needs and tricks added.
-
---~ containers.usecache = true
-
-caches = { }
-
-local writable, readables = nil, { }
-
-if not caches.namespace or caches.namespace == "" or caches.namespace == "context" then
- caches.namespace = 'generic'
+caches={}
+local writable,readables=nil,{}
+if not caches.namespace or caches.namespace=="" or caches.namespace=="context" then
+ caches.namespace='generic'
end
-
do
-
- local cachepaths = kpse.expand_path('$TEXMFCACHE') or ""
-
- if cachepaths == "" then
- cachepaths = kpse.expand_path('$TEXMFVAR')
- end
-
- if cachepaths == "" then
- cachepaths = kpse.expand_path('$VARTEXMF')
- end
-
- if cachepaths == "" then
- cachepaths = "."
- end
-
- cachepaths = string.split(cachepaths,os.type == "windows" and ";" or ":")
-
- for i=1,#cachepaths do
- if file.is_writable(cachepaths[i]) then
- writable = file.join(cachepaths[i],"luatex-cache")
- lfs.mkdir(writable)
- writable = file.join(writable,caches.namespace)
- lfs.mkdir(writable)
- break
- end
- end
-
- for i=1,#cachepaths do
- if file.is_readable(cachepaths[i]) then
- readables[#readables+1] = file.join(cachepaths[i],"luatex-cache",caches.namespace)
- end
- end
-
- if not writable then
- texio.write_nl("quiting: fix your writable cache path")
- os.exit()
- elseif #readables == 0 then
- texio.write_nl("quiting: fix your readable cache path")
- os.exit()
- elseif #readables == 1 and readables[1] == writable then
- texio.write(string.format("(using cache: %s)",writable))
- else
- texio.write(string.format("(using write cache: %s)",writable))
- texio.write(string.format("(using read cache: %s)",table.concat(readables, " ")))
- end
-
+ local cachepaths=kpse.expand_path('$TEXMFCACHE') or ""
+ if cachepaths=="" then
+ cachepaths=kpse.expand_path('$TEXMFVAR')
+ end
+ if cachepaths=="" then
+ cachepaths=kpse.expand_path('$VARTEXMF')
+ end
+ if cachepaths=="" then
+ cachepaths="."
+ end
+ cachepaths=string.split(cachepaths,os.type=="windows" and ";" or ":")
+ for i=1,#cachepaths do
+ if file.is_writable(cachepaths[i]) then
+ writable=file.join(cachepaths[i],"luatex-cache")
+ lfs.mkdir(writable)
+ writable=file.join(writable,caches.namespace)
+ lfs.mkdir(writable)
+ break
+ end
+ end
+ for i=1,#cachepaths do
+ if file.is_readable(cachepaths[i]) then
+ readables[#readables+1]=file.join(cachepaths[i],"luatex-cache",caches.namespace)
+ end
+ end
+ if not writable then
+ texio.write_nl("quiting: fix your writable cache path")
+ os.exit()
+ elseif #readables==0 then
+ texio.write_nl("quiting: fix your readable cache path")
+ os.exit()
+ elseif #readables==1 and readables[1]==writable then
+ texio.write(string.format("(using cache: %s)",writable))
+ else
+ texio.write(string.format("(using write cache: %s)",writable))
+ texio.write(string.format("(using read cache: %s)",table.concat(readables," ")))
+ end
end
-
function caches.getwritablepath(category,subcategory)
- local path = file.join(writable,category)
- lfs.mkdir(path)
- path = file.join(path,subcategory)
- lfs.mkdir(path)
- return path
+ local path=file.join(writable,category)
+ lfs.mkdir(path)
+ path=file.join(path,subcategory)
+ lfs.mkdir(path)
+ return path
end
-
function caches.getreadablepaths(category,subcategory)
- local t = { }
- for i=1,#readables do
- t[i] = file.join(readables[i],category,subcategory)
- end
- return t
+ local t={}
+ for i=1,#readables do
+ t[i]=file.join(readables[i],category,subcategory)
+ end
+ return t
end
-
local function makefullname(path,name)
- if path and path ~= "" then
- name = "temp-" .. name -- clash prevention
- return file.addsuffix(file.join(path,name),"lua"), file.addsuffix(file.join(path,name),"luc")
- end
+ if path and path~="" then
+ name="temp-"..name
+ return file.addsuffix(file.join(path,name),"lua"),file.addsuffix(file.join(path,name),"luc")
+ end
end
-
function caches.is_writable(path,name)
- local fullname = makefullname(path,name)
- return fullname and file.is_writable(fullname)
+ local fullname=makefullname(path,name)
+ return fullname and file.is_writable(fullname)
end
-
function caches.loaddata(paths,name)
- for i=1,#paths do
- local data = false
- local luaname, lucname = makefullname(paths[i],name)
- if lucname and lfs.isfile(lucname) then
- texio.write(string.format("(load: %s)",lucname))
- data = loadfile(lucname)
- end
- if not data and luaname and lfs.isfile(luaname) then
- texio.write(string.format("(load: %s)",luaname))
- data = loadfile(luaname)
- end
- return data and data()
+ for i=1,#paths do
+ local data=false
+ local luaname,lucname=makefullname(paths[i],name)
+ if lucname and lfs.isfile(lucname) then
+ texio.write(string.format("(load luc: %s)",lucname))
+ data=loadfile(lucname)
+ if data then
+ data=data()
+ end
+ if data then
+ return data
+ else
+ texio.write(string.format("(loading failed: %s)",lucname))
+ end
+ end
+ if luaname and lfs.isfile(luaname) then
+ texio.write(string.format("(load lua: %s)",luaname))
+ data=loadfile(luaname)
+ if data then
+ data=data()
+ end
+ if data then
+ return data
+ end
end
+ end
end
-
function caches.savedata(path,name,data)
- local luaname, lucname = makefullname(path,name)
- if luaname then
- texio.write(string.format("(save: %s)",luaname))
- table.tofile(luaname,data,true,{ reduce = true })
- if lucname and type(caches.compile) == "function" then
- os.remove(lucname) -- better be safe
- texio.write(string.format("(save: %s)",lucname))
- caches.compile(data,luaname,lucname)
- end
- end
-end
-
--- According to KH os.execute is not permitted in plain/latex so there is
--- no reason to use the normal context way. So the method here is slightly
--- different from the one we have in context. We also use different suffixes
--- as we don't want any clashes (sharing cache files is not that handy as
--- context moves on faster.)
---
--- Beware: serialization might fail on large files (so maybe we should pcall
--- this) in which case one should limit the method to luac and enable support
--- for execution.
-
-caches.compilemethod = "luac" -- luac dump both
-
+ local luaname,lucname=makefullname(path,name)
+ if luaname then
+ texio.write(string.format("(save: %s)",luaname))
+ table.tofile(luaname,data,true,{ reduce=true })
+ if lucname and type(caches.compile)=="function" then
+ os.remove(lucname)
+ texio.write(string.format("(save: %s)",lucname))
+ caches.compile(data,luaname,lucname)
+ end
+ end
+end
+caches.compilemethod="both"
function caches.compile(data,luaname,lucname)
- local done = false
- if caches.compilemethod == "luac" or caches.compilemethod == "both" then
- local command = "-o " .. string.quoted(lucname) .. " -s " .. string.quoted(luaname)
- done = os.spawn("texluac " .. command) == 0
- end
- if not done and (caches.compilemethod == "dump" or caches.compilemethod == "both") then
- local d = table.serialize(data,true)
- if d and d ~= "" then
- local f = io.open(lucname,'w')
- if f then
- local s = loadstring(d)
- f:write(string.dump(s))
- f:close()
- end
+ local done=false
+ if caches.compilemethod=="luac" or caches.compilemethod=="both" then
+ done=os.spawn("texluac -o "..string.quoted(lucname).." -s "..string.quoted(luaname))==0
+ end
+ if not done and (caches.compilemethod=="dump" or caches.compilemethod=="both") then
+ local d=io.loaddata(luaname)
+ if not d or d=="" then
+ d=table.serialize(data,true)
+ end
+ if d and d~="" then
+ local f=io.open(lucname,'w')
+ if f then
+ local s=loadstring(d)
+ if s then
+ f:write(string.dump(s,true))
end
+ f:close()
+ end
end
+ end
end
-
---
-
function table.setmetatableindex(t,f)
- setmetatable(t,{ __index = f })
+ setmetatable(t,{ __index=f })
end
end -- closure
do -- begin closure to overcome local limits and interference
-if not modules then modules = { } end modules ['data-con'] = {
- version = 1.100,
- comment = "companion to luat-lib.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
+if not modules then modules={} end modules ['data-con']={
+ version=1.100,
+ comment="companion to luat-lib.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
}
-
-local format, lower, gsub = string.format, string.lower, string.gsub
-
-local trace_cache = false trackers.register("resolvers.cache", function(v) trace_cache = v end)
-local trace_containers = false trackers.register("resolvers.containers", function(v) trace_containers = v end)
-local trace_storage = false trackers.register("resolvers.storage", function(v) trace_storage = v end)
-
---[[ldx--
-<p>Once we found ourselves defining similar cache constructs
-several times, containers were introduced. Containers are used
-to collect tables in memory and reuse them when possible based
-on (unique) hashes (to be provided by the calling function).</p>
-
-<p>Caching to disk is disabled by default. Version numbers are
-stored in the saved table which makes it possible to change the
-table structures without bothering about the disk cache.</p>
-
-<p>Examples of usage can be found in the font related code.</p>
---ldx]]--
-
-containers = containers or { }
-local containers = containers
-containers.usecache = true
-
-local report_containers = logs.reporter("resolvers","containers")
-
-local function report(container,tag,name)
- if trace_cache or trace_containers then
- report_containers("container: %s, tag: %s, name: %s",container.subcategory,tag,name or 'invalid')
- end
-end
-
-local allocated = { }
-
-local mt = {
- __index = function(t,k)
- if k == "writable" then
- local writable = caches.getwritablepath(t.category,t.subcategory) or { "." }
- t.writable = writable
- return writable
- elseif k == "readables" then
- local readables = caches.getreadablepaths(t.category,t.subcategory) or { "." }
- t.readables = readables
- return readables
- end
- end,
- __storage__ = true
+local format,lower,gsub=string.format,string.lower,string.gsub
+local trace_cache=false trackers.register("resolvers.cache",function(v) trace_cache=v end)
+local trace_containers=false trackers.register("resolvers.containers",function(v) trace_containers=v end)
+local trace_storage=false trackers.register("resolvers.storage",function(v) trace_storage=v end)
+containers=containers or {}
+local containers=containers
+containers.usecache=true
+local report_containers=logs.reporter("resolvers","containers")
+local allocated={}
+local mt={
+ __index=function(t,k)
+ if k=="writable" then
+ local writable=caches.getwritablepath(t.category,t.subcategory) or { "." }
+ t.writable=writable
+ return writable
+ elseif k=="readables" then
+ local readables=caches.getreadablepaths(t.category,t.subcategory) or { "." }
+ t.readables=readables
+ return readables
+ end
+ end,
+ __storage__=true
}
-
-function containers.define(category, subcategory, version, enabled)
- if category and subcategory then
- local c = allocated[category]
- if not c then
- c = { }
- allocated[category] = c
- end
- local s = c[subcategory]
- if not s then
- s = {
- category = category,
- subcategory = subcategory,
- storage = { },
- enabled = enabled,
- version = version or math.pi, -- after all, this is TeX
- trace = false,
- -- writable = caches.getwritablepath and caches.getwritablepath (category,subcategory) or { "." },
- -- readables = caches.getreadablepaths and caches.getreadablepaths(category,subcategory) or { "." },
- }
- setmetatable(s,mt)
- c[subcategory] = s
- end
- return s
+function containers.define(category,subcategory,version,enabled)
+ if category and subcategory then
+ local c=allocated[category]
+ if not c then
+ c={}
+ allocated[category]=c
+ end
+ local s=c[subcategory]
+ if not s then
+ s={
+ category=category,
+ subcategory=subcategory,
+ storage={},
+ enabled=enabled,
+ version=version or math.pi,
+ trace=false,
+ }
+ setmetatable(s,mt)
+ c[subcategory]=s
end
+ return s
+ end
end
-
-function containers.is_usable(container, name)
- return container.enabled and caches and caches.is_writable(container.writable, name)
+function containers.is_usable(container,name)
+ return container.enabled and caches and caches.is_writable(container.writable,name)
end
-
-function containers.is_valid(container, name)
- if name and name ~= "" then
- local storage = container.storage[name]
- return storage and storage.cache_version == container.version
- else
- return false
- end
+function containers.is_valid(container,name)
+ if name and name~="" then
+ local storage=container.storage[name]
+ return storage and storage.cache_version==container.version
+ else
+ return false
+ end
end
-
function containers.read(container,name)
- local storage = container.storage
- local stored = storage[name]
- if not stored and container.enabled and caches and containers.usecache then
- stored = caches.loaddata(container.readables,name)
- if stored and stored.cache_version == container.version then
- report(container,"loaded",name)
- else
- stored = nil
- end
- storage[name] = stored
- elseif stored then
- report(container,"reusing",name)
+ local storage=container.storage
+ local stored=storage[name]
+ if not stored and container.enabled and caches and containers.usecache then
+ stored=caches.loaddata(container.readables,name)
+ if stored and stored.cache_version==container.version then
+ if trace_cache or trace_containers then
+ report_containers("action %a, category %a, name %a","load",container.subcategory,name)
+ end
+ else
+ stored=nil
end
- return stored
-end
-
-function containers.write(container, name, data)
- if data then
- data.cache_version = container.version
- if container.enabled and caches then
- local unique, shared = data.unique, data.shared
- data.unique, data.shared = nil, nil
- caches.savedata(container.writable, name, data)
- report(container,"saved",name)
- data.unique, data.shared = unique, shared
- end
- report(container,"stored",name)
- container.storage[name] = data
+ storage[name]=stored
+ elseif stored then
+ if trace_cache or trace_containers then
+ report_containers("action %a, category %a, name %a","reuse",container.subcategory,name)
+ end
+ end
+ return stored
+end
+function containers.write(container,name,data)
+ if data then
+ data.cache_version=container.version
+ if container.enabled and caches then
+ local unique,shared=data.unique,data.shared
+ data.unique,data.shared=nil,nil
+ caches.savedata(container.writable,name,data)
+ if trace_cache or trace_containers then
+ report_containers("action %a, category %a, name %a","save",container.subcategory,name)
+ end
+ data.unique,data.shared=unique,shared
end
- return data
+ if trace_cache or trace_containers then
+ report_containers("action %a, category %a, name %a","store",container.subcategory,name)
+ end
+ container.storage[name]=data
+ end
+ return data
end
-
function containers.content(container,name)
- return container.storage[name]
+ return container.storage[name]
end
-
function containers.cleanname(name)
- return (gsub(lower(name),"[^%w%d]+","-"))
+ return (gsub(lower(name),"[^%w%d]+","-"))
end
end -- closure
do -- begin closure to overcome local limits and interference
-if not modules then modules = { } end modules ['luatex-fonts-nod'] = {
- version = 1.001,
- comment = "companion to luatex-fonts.lua",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
+if not modules then modules={} end modules ['luatex-fonts-nod']={
+ version=1.001,
+ comment="companion to luatex-fonts.lua",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
}
-
if context then
- texio.write_nl("fatal error: this module is not for context")
- os.exit()
-end
-
--- Don't depend on code here as it is only needed to complement the
--- font handler code.
-
--- Attributes:
-
-if tex.attribute[0] ~= 0 then
-
- texio.write_nl("log","!")
- texio.write_nl("log","! Attribute 0 is reserved for ConTeXt's font feature management and has to be")
- texio.write_nl("log","! set to zero. Also, some attributes in the range 1-255 are used for special")
- texio.write_nl("log","! purposes so setting them at the TeX end might break the font handler.")
- texio.write_nl("log","!")
-
- tex.attribute[0] = 0 -- else no features
-
-end
-
-attributes = { }
-attributes.unsetvalue = -0x7FFFFFFF
-
-local numbers, last = { }, 127
-
+ texio.write_nl("fatal error: this module is not for context")
+ os.exit()
+end
+if tex.attribute[0]~=0 then
+ texio.write_nl("log","!")
+ texio.write_nl("log","! Attribute 0 is reserved for ConTeXt's font feature management and has to be")
+ texio.write_nl("log","! set to zero. Also, some attributes in the range 1-255 are used for special")
+ texio.write_nl("log","! purposes so setting them at the TeX end might break the font handler.")
+ texio.write_nl("log","!")
+ tex.attribute[0]=0
+end
+attributes={}
+attributes.unsetvalue=-0x7FFFFFFF
+local numbers,last={},127
function attributes.private(name)
- local number = numbers[name]
- if not number then
- if last < 255 then
- last = last + 1
- end
- number = last
- numbers[name] = number
+ local number=numbers[name]
+ if not number then
+ if last<255 then
+ last=last+1
+ end
+ number=last
+ numbers[name]=number
+ end
+ return number
+end
+nodes={}
+nodes.pool={}
+nodes.handlers={}
+local nodecodes={} for k,v in next,node.types () do nodecodes[string.gsub(v,"_","")]=k end
+local whatcodes={} for k,v in next,node.whatsits() do whatcodes[string.gsub(v,"_","")]=k end
+local glyphcodes={ [0]="character","glyph","ligature","ghost","left","right" }
+nodes.nodecodes=nodecodes
+nodes.whatcodes=whatcodes
+nodes.whatsitcodes=whatcodes
+nodes.glyphcodes=glyphcodes
+local free_node=node.free
+local remove_node=node.remove
+local new_node=node.new
+local traverse_id=node.traverse_id
+local math_code=nodecodes.math
+nodes.handlers.protectglyphs=node.protect_glyphs
+nodes.handlers.unprotectglyphs=node.unprotect_glyphs
+function nodes.remove(head,current,free_too)
+ local t=current
+ head,current=remove_node(head,current)
+ if t then
+ if free_too then
+ free_node(t)
+ t=nil
+ else
+ t.next,t.prev=nil,nil
end
- return number
+ end
+ return head,current,t
end
-
--- Nodes:
-
-nodes = { }
-nodes.pool = { }
-nodes.handlers = { }
-
-local nodecodes = { } for k,v in next, node.types () do nodecodes[string.gsub(v,"_","")] = k end
-local whatcodes = { } for k,v in next, node.whatsits() do whatcodes[string.gsub(v,"_","")] = k end
-local glyphcodes = { [0] = "character", "glyph", "ligature", "ghost", "left", "right" }
-
-nodes.nodecodes = nodecodes
-nodes.whatcodes = whatcodes
-nodes.whatsitcodes = whatcodes
-nodes.glyphcodes = glyphcodes
-
-local free_node = node.free
-local remove_node = node.remove
-local new_node = node.new
-
-nodes.handlers.protectglyphs = node.protect_glyphs
-nodes.handlers.unprotectglyphs = node.unprotect_glyphs
-
-function nodes.remove(head, current, free_too)
- local t = current
- head, current = remove_node(head,current)
- if t then
- if free_too then
- free_node(t)
- t = nil
- else
- t.next, t.prev = nil, nil
- end
- end
- return head, current, t
-end
-
function nodes.delete(head,current)
- return nodes.remove(head,current,true)
+ return nodes.remove(head,current,true)
end
-
-nodes.before = node.insert_before
-nodes.after = node.insert_after
-
+nodes.before=node.insert_before
+nodes.after=node.insert_after
function nodes.pool.kern(k)
- local n = new_node("kern",1)
- n.kern = k
+ local n=new_node("kern",1)
+ n.kern=k
+ return n
+end
+function nodes.endofmath(n)
+ for n in traverse_id(math_code,n.next) do
return n
+ end
end
end -- closure
do -- begin closure to overcome local limits and interference
-if not modules then modules = { } end modules ['font-ini'] = {
- version = 1.001,
- comment = "companion to font-ini.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
+if not modules then modules={} end modules ['font-ini']={
+ version=1.001,
+ comment="companion to font-ini.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
}
-
--- basemethods -> can also be in list
--- presetcontext -> defaults
--- hashfeatures -> ctx version
-
---[[ldx--
-<p>Not much is happening here.</p>
---ldx]]--
-
-local lower = string.lower
-local allocate, mark = utilities.storage.allocate, utilities.storage.mark
-
-local report_defining = logs.reporter("fonts","defining")
-
-fontloader.totable = fontloader.to_table
-
-fonts = fonts or { } -- already defined in context
-local fonts = fonts
-
--- some of these might move to where they are used first:
-
-fonts.hashes = { identifiers = allocate() }
-fonts.analyzers = { } -- not needed here
-fonts.readers = { }
-fonts.tables = { }
-fonts.definers = { methods = { } }
-fonts.specifiers = fonts.specifiers or { } -- in format !
-fonts.loggers = { register = function() end }
-fonts.helpers = { }
-
-fonts.tracers = { } -- for the moment till we have move to moduledata
+local allocate=utilities.storage.allocate
+local report_defining=logs.reporter("fonts","defining")
+fonts=fonts or {}
+local fonts=fonts
+fonts.hashes={ identifiers=allocate() }
+fonts.tables=fonts.tables or {}
+fonts.helpers=fonts.helpers or {}
+fonts.tracers=fonts.tracers or {}
+fonts.specifiers=fonts.specifiers or {}
+fonts.analyzers={}
+fonts.readers={}
+fonts.definers={ methods={} }
+fonts.loggers={ register=function() end }
+fontloader.totable=fontloader.to_table
end -- closure
do -- begin closure to overcome local limits and interference
-if not modules then modules = { } end modules ['font-con'] = {
- version = 1.001,
- comment = "companion to font-ini.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
+if not modules then modules={} end modules ['font-con']={
+ version=1.001,
+ comment="companion to font-ini.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
}
-
-
--- some names of table entries will be changed (no _)
-
-local utf = unicode.utf8
-
-local next, tostring, rawget = next, tostring, rawget
-local format, match, lower, gsub = string.format, string.match, string.lower, string.gsub
-local utfbyte = utf.byte
-local sort, insert, concat, sortedkeys, serialize, fastcopy = table.sort, table.insert, table.concat, table.sortedkeys, table.serialize, table.fastcopy
-local derivetable = table.derive
-
-local trace_defining = false trackers.register("fonts.defining", function(v) trace_defining = v end)
-local trace_scaling = false trackers.register("fonts.scaling" , function(v) trace_scaling = v end)
-
-local report_defining = logs.reporter("fonts","defining")
-
--- watch out: no negative depths and negative eights permitted in regular fonts
-
---[[ldx--
-<p>Here we only implement a few helper functions.</p>
---ldx]]--
-
-local fonts = fonts
-local constructors = { }
-fonts.constructors = constructors
-local handlers = { }
-fonts.handlers = handlers
-
-local specifiers = fonts.specifiers
-local contextsetups = specifiers.contextsetups
-local contextnumbers = specifiers.contextnumbers
-
-local allocate = utilities.storage.allocate
-local setmetatableindex = table.setmetatableindex
-
--- will be directives
-
-constructors.dontembed = allocate()
-constructors.autocleanup = true
-constructors.namemode = "fullpath" -- will be a function
-
-constructors.version = 1.01
-constructors.cache = containers.define("fonts", "constructors", constructors.version, false)
-
-constructors.privateoffset = 0xF0000 -- 0x10FFFF
-
--- Some experimental helpers (handy for tracing):
---
--- todo: extra:
---
--- extra_space => space.extra
--- space => space.width
--- space_stretch => space.stretch
--- space_shrink => space.shrink
-
--- We do keep the x-height, extra_space, space_shrink and space_stretch
--- around as these are low level official names.
-
-constructors.keys = {
- properties = {
- encodingbytes = "number",
- embedding = "number",
- cidinfo = {
- },
- format = "string",
- fontname = "string",
- fullname = "string",
- filename = "filename",
- psname = "string",
- name = "string",
- virtualized = "boolean",
- hasitalics = "boolean",
- autoitalicamount = "basepoints",
- nostackmath = "boolean",
- noglyphnames = "boolean",
- mode = "string",
- hasmath = "boolean",
- mathitalics = "boolean",
- textitalics = "boolean",
- finalized = "boolean",
- },
- parameters = {
- mathsize = "number",
- scriptpercentage = "float",
- scriptscriptpercentage = "float",
- units = "cardinal",
- designsize = "scaledpoints",
- expansion = {
- stretch = "integerscale", -- might become float
- shrink = "integerscale", -- might become float
- step = "integerscale", -- might become float
- auto = "boolean",
- },
- protrusion = {
- auto = "boolean",
- },
- slantfactor = "float",
- extendfactor = "float",
- factor = "float",
- hfactor = "float",
- vfactor = "float",
- size = "scaledpoints",
- units = "scaledpoints",
- scaledpoints = "scaledpoints",
- slantperpoint = "scaledpoints",
- spacing = {
- width = "scaledpoints",
- stretch = "scaledpoints",
- shrink = "scaledpoints",
- extra = "scaledpoints",
- },
- xheight = "scaledpoints",
- quad = "scaledpoints",
- ascender = "scaledpoints",
- descender = "scaledpoints",
- synonyms = {
- space = "spacing.width",
- spacestretch = "spacing.stretch",
- spaceshrink = "spacing.shrink",
- extraspace = "spacing.extra",
- x_height = "xheight",
- space_stretch = "spacing.stretch",
- space_shrink = "spacing.shrink",
- extra_space = "spacing.extra",
- em = "quad",
- ex = "xheight",
- slant = "slantperpoint",
- },
- },
- description = {
- width = "basepoints",
- height = "basepoints",
- depth = "basepoints",
- boundingbox = { },
- },
- character = {
- width = "scaledpoints",
- height = "scaledpoints",
- depth = "scaledpoints",
- italic = "scaledpoints",
- },
+local next,tostring,rawget=next,tostring,rawget
+local format,match,lower,gsub=string.format,string.match,string.lower,string.gsub
+local utfbyte=utf.byte
+local sort,insert,concat,sortedkeys,serialize,fastcopy=table.sort,table.insert,table.concat,table.sortedkeys,table.serialize,table.fastcopy
+local derivetable=table.derive
+local trace_defining=false trackers.register("fonts.defining",function(v) trace_defining=v end)
+local trace_scaling=false trackers.register("fonts.scaling",function(v) trace_scaling=v end)
+local report_defining=logs.reporter("fonts","defining")
+local fonts=fonts
+local constructors=fonts.constructors or {}
+fonts.constructors=constructors
+local handlers=fonts.handlers or {}
+fonts.handlers=handlers
+local allocate=utilities.storage.allocate
+local setmetatableindex=table.setmetatableindex
+constructors.dontembed=allocate()
+constructors.autocleanup=true
+constructors.namemode="fullpath"
+constructors.version=1.01
+constructors.cache=containers.define("fonts","constructors",constructors.version,false)
+constructors.privateoffset=0xF0000
+constructors.keys={
+ properties={
+ encodingbytes="number",
+ embedding="number",
+ cidinfo={},
+ format="string",
+ fontname="string",
+ fullname="string",
+ filename="filename",
+ psname="string",
+ name="string",
+ virtualized="boolean",
+ hasitalics="boolean",
+ autoitalicamount="basepoints",
+ nostackmath="boolean",
+ noglyphnames="boolean",
+ mode="string",
+ hasmath="boolean",
+ mathitalics="boolean",
+ textitalics="boolean",
+ finalized="boolean",
+ },
+ parameters={
+ mathsize="number",
+ scriptpercentage="float",
+ scriptscriptpercentage="float",
+ units="cardinal",
+ designsize="scaledpoints",
+ expansion={
+ stretch="integerscale",
+ shrink="integerscale",
+ step="integerscale",
+ auto="boolean",
+ },
+ protrusion={
+ auto="boolean",
+ },
+ slantfactor="float",
+ extendfactor="float",
+ factor="float",
+ hfactor="float",
+ vfactor="float",
+ size="scaledpoints",
+ units="scaledpoints",
+ scaledpoints="scaledpoints",
+ slantperpoint="scaledpoints",
+ spacing={
+ width="scaledpoints",
+ stretch="scaledpoints",
+ shrink="scaledpoints",
+ extra="scaledpoints",
+ },
+ xheight="scaledpoints",
+ quad="scaledpoints",
+ ascender="scaledpoints",
+ descender="scaledpoints",
+ synonyms={
+ space="spacing.width",
+ spacestretch="spacing.stretch",
+ spaceshrink="spacing.shrink",
+ extraspace="spacing.extra",
+ x_height="xheight",
+ space_stretch="spacing.stretch",
+ space_shrink="spacing.shrink",
+ extra_space="spacing.extra",
+ em="quad",
+ ex="xheight",
+ slant="slantperpoint",
+ },
+ },
+ description={
+ width="basepoints",
+ height="basepoints",
+ depth="basepoints",
+ boundingbox={},
+ },
+ character={
+ width="scaledpoints",
+ height="scaledpoints",
+ depth="scaledpoints",
+ italic="scaledpoints",
+ },
}
-
--- This might become an interface:
-
-local designsizes = allocate()
-constructors.designsizes = designsizes
-local loadedfonts = allocate()
-constructors.loadedfonts = loadedfonts
-
---[[ldx--
-<p>We need to normalize the scale factor (in scaled points). This has to
-do with the fact that <l n='tex'/> uses a negative multiple of 1000 as
-a signal for a font scaled based on the design size.</p>
---ldx]]--
-
-local factors = {
- pt = 65536.0,
- bp = 65781.8,
+local designsizes=allocate()
+constructors.designsizes=designsizes
+local loadedfonts=allocate()
+constructors.loadedfonts=loadedfonts
+local factors={
+ pt=65536.0,
+ bp=65781.8,
}
-
function constructors.setfactor(f)
- constructors.factor = factors[f or 'pt'] or factors.pt
+ constructors.factor=factors[f or 'pt'] or factors.pt
end
-
constructors.setfactor()
-
-function constructors.scaled(scaledpoints, designsize) -- handles designsize in sp as well
- if scaledpoints < 0 then
- if designsize then
- local factor = constructors.factor
- if designsize > factor then -- or just 1000 / when? mp?
- return (- scaledpoints/1000) * designsize -- sp's
- else
- return (- scaledpoints/1000) * designsize * factor
- end
- else
- return (- scaledpoints/1000) * 10 * factor
- end
+function constructors.scaled(scaledpoints,designsize)
+ if scaledpoints<0 then
+ if designsize then
+ local factor=constructors.factor
+ if designsize>factor then
+ return (- scaledpoints/1000)*designsize
+ else
+ return (- scaledpoints/1000)*designsize*factor
+ end
else
- return scaledpoints
+ return (- scaledpoints/1000)*10*factor
end
+ else
+ return scaledpoints
+ end
end
-
---[[ldx--
-<p>Beware, the boundingbox is passed as reference so we may not overwrite it
-in the process; numbers are of course copies. Here 65536 equals 1pt. (Due to
-excessive memory usage in CJK fonts, we no longer pass the boundingbox.)</p>
---ldx]]--
-
--- The scaler is only used for otf and afm and virtual fonts. If
--- a virtual font has italic correction make sure to set the
--- hasitalics flag. Some more flags will be added in
--- the future.
-
---[[ldx--
-<p>The reason why the scaler was originally split, is that for a while we experimented
-with a helper function. However, in practice the <l n='api'/> calls are too slow to
-make this profitable and the <l n='lua'/> based variant was just faster. A days
-wasted day but an experience richer.</p>
---ldx]]--
-
--- we can get rid of the tfm instance when we have fast access to the
--- scaled character dimensions at the tex end, e.g. a fontobject.width
--- actually we already have soem of that now as virtual keys in glyphs
---
--- flushing the kern and ligature tables from memory saves a lot (only
--- base mode) but it complicates vf building where the new characters
--- demand this data .. solution: functions that access them
-
function constructors.cleanuptable(tfmdata)
- if constructors.autocleanup and tfmdata.properties.virtualized then
- for k, v in next, tfmdata.characters do
- if v.commands then v.commands = nil end
- -- if v.kerns then v.kerns = nil end
- end
+ if constructors.autocleanup and tfmdata.properties.virtualized then
+ for k,v in next,tfmdata.characters do
+ if v.commands then v.commands=nil end
end
+ end
end
-
--- experimental, sharing kerns (unscaled and scaled) saves memory
--- local sharedkerns, basekerns = constructors.check_base_kerns(tfmdata)
--- loop over descriptions (afm and otf have descriptions, tfm not)
--- there is no need (yet) to assign a value to chr.tonunicode
-
--- constructors.prepare_base_kerns(tfmdata) -- optimalization
-
--- we have target.name=metricfile and target.fullname=RealName and target.filename=diskfilename
--- when collapsing fonts, luatex looks as both target.name and target.fullname as ttc files
--- can have multiple subfonts
-
function constructors.calculatescale(tfmdata,scaledpoints)
- local parameters = tfmdata.parameters
- if scaledpoints < 0 then
- scaledpoints = (- scaledpoints/1000) * (tfmdata.designsize or parameters.designsize) -- already in sp
- end
- return scaledpoints, scaledpoints / (parameters.units or 1000) -- delta
-end
-
-local unscaled = {
- ScriptPercentScaleDown = true,
- ScriptScriptPercentScaleDown = true,
- RadicalDegreeBottomRaisePercent = true
+ local parameters=tfmdata.parameters
+ if scaledpoints<0 then
+ scaledpoints=(- scaledpoints/1000)*(tfmdata.designsize or parameters.designsize)
+ end
+ return scaledpoints,scaledpoints/(parameters.units or 1000)
+end
+local unscaled={
+ ScriptPercentScaleDown=true,
+ ScriptScriptPercentScaleDown=true,
+ RadicalDegreeBottomRaisePercent=true
}
-
-function constructors.assignmathparameters(target,original) -- simple variant, not used in context
- -- when a tfm file is loaded, it has already been scaled
- -- and it never enters the scaled so this is otf only and
- -- even then we do some extra in the context math plugins
- local mathparameters = original.mathparameters
- if mathparameters and next(mathparameters) then
- local targetparameters = target.parameters
- local targetproperties = target.properties
- local targetmathparameters = { }
- local factor = targetproperties.math_is_scaled and 1 or targetparameters.factor
- for name, value in next, mathparameters do
- if unscaled[name] then
- targetmathparameters[name] = value
- else
- targetmathparameters[name] = value * factor
- end
- end
- if not targetmathparameters.FractionDelimiterSize then
- targetmathparameters.FractionDelimiterSize = 1.01 * targetparameters.size
- end
- if not mathparameters.FractionDelimiterDisplayStyleSize then
- targetmathparameters.FractionDelimiterDisplayStyleSize = 2.40 * targetparameters.size
- end
- target.mathparameters = targetmathparameters
- end
+function constructors.assignmathparameters(target,original)
+ local mathparameters=original.mathparameters
+ if mathparameters and next(mathparameters) then
+ local targetparameters=target.parameters
+ local targetproperties=target.properties
+ local targetmathparameters={}
+ local factor=targetproperties.math_is_scaled and 1 or targetparameters.factor
+ for name,value in next,mathparameters do
+ if unscaled[name] then
+ targetmathparameters[name]=value
+ else
+ targetmathparameters[name]=value*factor
+ end
+ end
+ if not targetmathparameters.FractionDelimiterSize then
+ targetmathparameters.FractionDelimiterSize=1.01*targetparameters.size
+ end
+ if not mathparameters.FractionDelimiterDisplayStyleSize then
+ targetmathparameters.FractionDelimiterDisplayStyleSize=2.40*targetparameters.size
+ end
+ target.mathparameters=targetmathparameters
+ end
end
-
function constructors.beforecopyingcharacters(target,original)
- -- can be used for additional tweaking
end
-
function constructors.aftercopyingcharacters(target,original)
- -- can be used for additional tweaking
end
-
function constructors.enhanceparameters(parameters)
- local xheight = parameters.x_height
- local quad = parameters.quad
- local space = parameters.space
- local stretch = parameters.space_stretch
- local shrink = parameters.space_shrink
- local extra = parameters.extra_space
- local slant = parameters.slant
- parameters.xheight = xheight
- parameters.spacestretch = stretch
- parameters.spaceshrink = shrink
- parameters.extraspace = extra
- parameters.em = quad
- parameters.ex = xheight
- parameters.slantperpoint = slant
- parameters.spacing = {
- width = space,
- stretch = stretch,
- shrink = shrink,
- extra = extra,
- }
+ local xheight=parameters.x_height
+ local quad=parameters.quad
+ local space=parameters.space
+ local stretch=parameters.space_stretch
+ local shrink=parameters.space_shrink
+ local extra=parameters.extra_space
+ local slant=parameters.slant
+ parameters.xheight=xheight
+ parameters.spacestretch=stretch
+ parameters.spaceshrink=shrink
+ parameters.extraspace=extra
+ parameters.em=quad
+ parameters.ex=xheight
+ parameters.slantperpoint=slant
+ parameters.spacing={
+ width=space,
+ stretch=stretch,
+ shrink=shrink,
+ extra=extra,
+ }
end
-
function constructors.scale(tfmdata,specification)
- local target = { } -- the new table
- --
- if tonumber(specification) then
- specification = { size = specification }
- end
- --
- local scaledpoints = specification.size
- local relativeid = specification.relativeid
- --
- local properties = tfmdata.properties or { }
- local goodies = tfmdata.goodies or { }
- local resources = tfmdata.resources or { }
- local descriptions = tfmdata.descriptions or { } -- bad news if empty
- local characters = tfmdata.characters or { } -- bad news if empty
- local changed = tfmdata.changed or { } -- for base mode
- local shared = tfmdata.shared or { }
- local parameters = tfmdata.parameters or { }
- local mathparameters = tfmdata.mathparameters or { }
- --
- local targetcharacters = { }
- local targetdescriptions = derivetable(descriptions)
- local targetparameters = derivetable(parameters)
- local targetproperties = derivetable(properties)
- local targetgoodies = goodies -- we need to loop so no metatable
- target.characters = targetcharacters
- target.descriptions = targetdescriptions
- target.parameters = targetparameters
- -- target.mathparameters = targetmathparameters -- happens elsewhere
- target.properties = targetproperties
- target.goodies = targetgoodies
- target.shared = shared
- target.resources = resources
- target.unscaled = tfmdata -- the original unscaled one
- --
- -- specification.mathsize : 1=text 2=script 3=scriptscript
- -- specification.textsize : natural (text)size
- -- parameters.mathsize : 1=text 2=script 3=scriptscript >1000 enforced size (feature value other than yes)
- --
- local mathsize = tonumber(specification.mathsize) or 0
- local textsize = tonumber(specification.textsize) or scaledpoints
- local forcedsize = tonumber(parameters.mathsize ) or 0
- local extrafactor = tonumber(specification.factor ) or 1
- if (mathsize == 2 or forcedsize == 2) and parameters.scriptpercentage then
- scaledpoints = parameters.scriptpercentage * textsize / 100
- elseif (mathsize == 3 or forcedsize == 3) and parameters.scriptscriptpercentage then
- scaledpoints = parameters.scriptscriptpercentage * textsize / 100
- elseif forcedsize > 1000 then -- safeguard
- scaledpoints = forcedsize
- end
- --
- local tounicode = resources.tounicode
- local defaultwidth = resources.defaultwidth or 0
- local defaultheight = resources.defaultheight or 0
- local defaultdepth = resources.defaultdepth or 0
- local units = parameters.units or 1000
- --
- if target.fonts then
- target.fonts = fastcopy(target.fonts) -- maybe we virtualize more afterwards
- end
- --
- -- boundary keys are no longer needed as we now have a string 'right_boundary'
- -- that can be used in relevant tables (kerns and ligatures) ... not that I ever
- -- used them
- --
- -- boundarychar_label = 0, -- not needed
- -- boundarychar = 65536, -- there is now a string 'right_boundary'
- -- false_boundarychar = 65536, -- produces invalid tfm in luatex
- --
- targetproperties.language = properties.language or "dflt" -- inherited
- targetproperties.script = properties.script or "dflt" -- inherited
- targetproperties.mode = properties.mode or "base" -- inherited
- --
- local askedscaledpoints = scaledpoints
- local scaledpoints, delta = constructors.calculatescale(tfmdata,scaledpoints) -- no shortcut, dan be redefined
- --
- local hdelta = delta
- local vdelta = delta
- --
- target.designsize = parameters.designsize -- not really needed so it muight become obsolete
- target.units_per_em = units -- just a trigger for the backend (does luatex use this? if not it will go)
- --
- local direction = properties.direction or tfmdata.direction or 0 -- pointless, as we don't use omf fonts at all
- target.direction = direction
- properties.direction = direction
- --
- target.size = scaledpoints
- --
- target.encodingbytes = properties.encodingbytes or 1
- target.embedding = properties.embedding or "subset"
- target.tounicode = 1
- target.cidinfo = properties.cidinfo
- target.format = properties.format
- --
- local fontname = properties.fontname or tfmdata.fontname -- for the moment we fall back on
- local fullname = properties.fullname or tfmdata.fullname -- names in the tfmdata although
- local filename = properties.filename or tfmdata.filename -- that is not the right place to
- local psname = properties.psname or tfmdata.psname -- pass them
- local name = properties.name or tfmdata.name
- --
- if not psname or psname == "" then
- -- name used in pdf file as well as for selecting subfont in ttc/dfont
- psname = fontname or (fullname and fonts.names.cleanname(fullname))
- end
- target.fontname = fontname
- target.fullname = fullname
- target.filename = filename
- target.psname = psname
- target.name = name
- --
- properties.fontname = fontname
- properties.fullname = fullname
- properties.filename = filename
- properties.psname = psname
- properties.name = name
- -- expansion (hz)
- local expansion = parameters.expansion
- if expansion then
- target.stretch = expansion.stretch
- target.shrink = expansion.shrink
- target.step = expansion.step
- target.auto_expand = expansion.auto
- end
- -- protrusion
- local protrusion = parameters.protrusion
- if protrusion then
- target.auto_protrude = protrusion.auto
- end
- -- widening
- local extendfactor = parameters.extendfactor or 0
- if extendfactor ~= 0 and extendfactor ~= 1 then
- hdelta = hdelta * extendfactor
- target.extend = extendfactor * 1000 -- extent ?
+ local target={}
+ if tonumber(specification) then
+ specification={ size=specification }
+ end
+ local scaledpoints=specification.size
+ local relativeid=specification.relativeid
+ local properties=tfmdata.properties or {}
+ local goodies=tfmdata.goodies or {}
+ local resources=tfmdata.resources or {}
+ local descriptions=tfmdata.descriptions or {}
+ local characters=tfmdata.characters or {}
+ local changed=tfmdata.changed or {}
+ local shared=tfmdata.shared or {}
+ local parameters=tfmdata.parameters or {}
+ local mathparameters=tfmdata.mathparameters or {}
+ local targetcharacters={}
+ local targetdescriptions=derivetable(descriptions)
+ local targetparameters=derivetable(parameters)
+ local targetproperties=derivetable(properties)
+ local targetgoodies=goodies
+ target.characters=targetcharacters
+ target.descriptions=targetdescriptions
+ target.parameters=targetparameters
+ target.properties=targetproperties
+ target.goodies=targetgoodies
+ target.shared=shared
+ target.resources=resources
+ target.unscaled=tfmdata
+ local mathsize=tonumber(specification.mathsize) or 0
+ local textsize=tonumber(specification.textsize) or scaledpoints
+ local forcedsize=tonumber(parameters.mathsize ) or 0
+ local extrafactor=tonumber(specification.factor ) or 1
+ if (mathsize==2 or forcedsize==2) and parameters.scriptpercentage then
+ scaledpoints=parameters.scriptpercentage*textsize/100
+ elseif (mathsize==3 or forcedsize==3) and parameters.scriptscriptpercentage then
+ scaledpoints=parameters.scriptscriptpercentage*textsize/100
+ elseif forcedsize>1000 then
+ scaledpoints=forcedsize
+ end
+ targetparameters.mathsize=mathsize
+ targetparameters.textsize=textsize
+ targetparameters.forcedsize=forcedsize
+ targetparameters.extrafactor=extrafactor
+ local tounicode=resources.tounicode
+ local defaultwidth=resources.defaultwidth or 0
+ local defaultheight=resources.defaultheight or 0
+ local defaultdepth=resources.defaultdepth or 0
+ local units=parameters.units or 1000
+ if target.fonts then
+ target.fonts=fastcopy(target.fonts)
+ end
+ targetproperties.language=properties.language or "dflt"
+ targetproperties.script=properties.script or "dflt"
+ targetproperties.mode=properties.mode or "base"
+ local askedscaledpoints=scaledpoints
+ local scaledpoints,delta=constructors.calculatescale(tfmdata,scaledpoints)
+ local hdelta=delta
+ local vdelta=delta
+ target.designsize=parameters.designsize
+ target.units_per_em=units
+ local direction=properties.direction or tfmdata.direction or 0
+ target.direction=direction
+ properties.direction=direction
+ target.size=scaledpoints
+ target.encodingbytes=properties.encodingbytes or 1
+ target.embedding=properties.embedding or "subset"
+ target.tounicode=1
+ target.cidinfo=properties.cidinfo
+ target.format=properties.format
+ local fontname=properties.fontname or tfmdata.fontname
+ local fullname=properties.fullname or tfmdata.fullname
+ local filename=properties.filename or tfmdata.filename
+ local psname=properties.psname or tfmdata.psname
+ local name=properties.name or tfmdata.name
+ if not psname or psname=="" then
+ psname=fontname or (fullname and fonts.names.cleanname(fullname))
+ end
+ target.fontname=fontname
+ target.fullname=fullname
+ target.filename=filename
+ target.psname=psname
+ target.name=name
+ properties.fontname=fontname
+ properties.fullname=fullname
+ properties.filename=filename
+ properties.psname=psname
+ properties.name=name
+ local expansion=parameters.expansion
+ if expansion then
+ target.stretch=expansion.stretch
+ target.shrink=expansion.shrink
+ target.step=expansion.step
+ target.auto_expand=expansion.auto
+ end
+ local protrusion=parameters.protrusion
+ if protrusion then
+ target.auto_protrude=protrusion.auto
+ end
+ local extendfactor=parameters.extendfactor or 0
+ if extendfactor~=0 and extendfactor~=1 then
+ hdelta=hdelta*extendfactor
+ target.extend=extendfactor*1000
+ else
+ target.extend=1000
+ end
+ local slantfactor=parameters.slantfactor or 0
+ if slantfactor~=0 then
+ target.slant=slantfactor*1000
+ else
+ target.slant=0
+ end
+ targetparameters.factor=delta
+ targetparameters.hfactor=hdelta
+ targetparameters.vfactor=vdelta
+ targetparameters.size=scaledpoints
+ targetparameters.units=units
+ targetparameters.scaledpoints=askedscaledpoints
+ local isvirtual=properties.virtualized or tfmdata.type=="virtual"
+ local hasquality=target.auto_expand or target.auto_protrude
+ local hasitalics=properties.hasitalics
+ local autoitalicamount=properties.autoitalicamount
+ local stackmath=not properties.nostackmath
+ local nonames=properties.noglyphnames
+ local nodemode=properties.mode=="node"
+ if changed and not next(changed) then
+ changed=false
+ end
+ target.type=isvirtual and "virtual" or "real"
+ target.postprocessors=tfmdata.postprocessors
+ local targetslant=(parameters.slant or parameters[1] or 0)
+ local targetspace=(parameters.space or parameters[2] or 0)*hdelta
+ local targetspace_stretch=(parameters.space_stretch or parameters[3] or 0)*hdelta
+ local targetspace_shrink=(parameters.space_shrink or parameters[4] or 0)*hdelta
+ local targetx_height=(parameters.x_height or parameters[5] or 0)*vdelta
+ local targetquad=(parameters.quad or parameters[6] or 0)*hdelta
+ local targetextra_space=(parameters.extra_space or parameters[7] or 0)*hdelta
+ targetparameters.slant=targetslant
+ targetparameters.space=targetspace
+ targetparameters.space_stretch=targetspace_stretch
+ targetparameters.space_shrink=targetspace_shrink
+ targetparameters.x_height=targetx_height
+ targetparameters.quad=targetquad
+ targetparameters.extra_space=targetextra_space
+ local ascender=parameters.ascender
+ if ascender then
+ targetparameters.ascender=delta*ascender
+ end
+ local descender=parameters.descender
+ if descender then
+ targetparameters.descender=delta*descender
+ end
+ constructors.enhanceparameters(targetparameters)
+ local protrusionfactor=(targetquad~=0 and 1000/targetquad) or 0
+ local scaledwidth=defaultwidth*hdelta
+ local scaledheight=defaultheight*vdelta
+ local scaleddepth=defaultdepth*vdelta
+ local hasmath=(properties.hasmath or next(mathparameters)) and true
+ if hasmath then
+ constructors.assignmathparameters(target,tfmdata)
+ properties.hasmath=true
+ target.nomath=false
+ target.MathConstants=target.mathparameters
+ else
+ properties.hasmath=false
+ target.nomath=true
+ target.mathparameters=nil
+ end
+ local italickey="italic"
+ local useitalics=true
+ if hasmath then
+ autoitalicamount=false
+ elseif properties.textitalics then
+ italickey="italic_correction"
+ useitalics=false
+ if properties.delaytextitalics then
+ autoitalicamount=false
+ end
+ end
+ if trace_defining then
+ report_defining("defining tfm, name %a, fullname %a, filename %a, hscale %a, vscale %a, math %a, italics %a",
+ name,fullname,filename,hdelta,vdelta,
+ hasmath and "enabled" or "disabled",useitalics and "enabled" or "disabled")
+ end
+ constructors.beforecopyingcharacters(target,tfmdata)
+ local sharedkerns={}
+ for unicode,character in next,characters do
+ local chr,description,index,touni
+ if changed then
+ local c=changed[unicode]
+ if c then
+ description=descriptions[c] or descriptions[unicode] or character
+ character=characters[c] or character
+ index=description.index or c
+ if tounicode then
+ touni=tounicode[index]
+ if not touni then
+ local d=descriptions[unicode] or characters[unicode]
+ local i=d.index or unicode
+ touni=tounicode[i]
+ end
+ end
+ else
+ description=descriptions[unicode] or character
+ index=description.index or unicode
+ if tounicode then
+ touni=tounicode[index]
+ end
+ end
else
- target.extend = 1000 -- extent ?
- end
- -- slanting
- local slantfactor = parameters.slantfactor or 0
- if slantfactor ~= 0 then
- target.slant = slantfactor * 1000
+ description=descriptions[unicode] or character
+ index=description.index or unicode
+ if tounicode then
+ touni=tounicode[index]
+ end
+ end
+ local width=description.width
+ local height=description.height
+ local depth=description.depth
+ if width then width=hdelta*width else width=scaledwidth end
+ if height then height=vdelta*height else height=scaledheight end
+ if depth and depth~=0 then
+ depth=delta*depth
+ if nonames then
+ chr={
+ index=index,
+ height=height,
+ depth=depth,
+ width=width,
+ }
+ else
+ chr={
+ name=description.name,
+ index=index,
+ height=height,
+ depth=depth,
+ width=width,
+ }
+ end
else
- target.slant = 0
- end
- --
- targetparameters.factor = delta
- targetparameters.hfactor = hdelta
- targetparameters.vfactor = vdelta
- targetparameters.size = scaledpoints
- targetparameters.units = units
- targetparameters.scaledpoints = askedscaledpoints
- --
- local isvirtual = properties.virtualized or tfmdata.type == "virtual"
- local hasquality = target.auto_expand or target.auto_protrude
- local hasitalics = properties.hasitalics
- local autoitalicamount = properties.autoitalicamount
- local stackmath = not properties.nostackmath
- local nonames = properties.noglyphnames
- local nodemode = properties.mode == "node"
- --
- if changed and not next(changed) then
- changed = false
- end
- --
- target.type = isvirtual and "virtual" or "real"
- --
- target.postprocessors = tfmdata.postprocessors
- --
- local targetslant = (parameters.slant or parameters[1] or 0)
- local targetspace = (parameters.space or parameters[2] or 0)*hdelta
- local targetspace_stretch = (parameters.space_stretch or parameters[3] or 0)*hdelta
- local targetspace_shrink = (parameters.space_shrink or parameters[4] or 0)*hdelta
- local targetx_height = (parameters.x_height or parameters[5] or 0)*vdelta
- local targetquad = (parameters.quad or parameters[6] or 0)*hdelta
- local targetextra_space = (parameters.extra_space or parameters[7] or 0)*hdelta
- --
- targetparameters.slant = targetslant -- slantperpoint
- targetparameters.space = targetspace
- targetparameters.space_stretch = targetspace_stretch
- targetparameters.space_shrink = targetspace_shrink
- targetparameters.x_height = targetx_height
- targetparameters.quad = targetquad
- targetparameters.extra_space = targetextra_space
- --
- local ascender = parameters.ascender
- if ascender then
- targetparameters.ascender = delta * ascender
- end
- local descender = parameters.descender
- if descender then
- targetparameters.descender = delta * descender
- end
- --
- constructors.enhanceparameters(targetparameters) -- official copies for us
- --
- local protrusionfactor = (targetquad ~= 0 and 1000/targetquad) or 0
- local scaledwidth = defaultwidth * hdelta
- local scaledheight = defaultheight * vdelta
- local scaleddepth = defaultdepth * vdelta
- --
- if trace_defining then
- report_defining("scaling by (%s,%s): name '%s', fullname: '%s', filename: '%s'",
- hdelta,vdelta,name or "noname",fullname or "nofullname",filename or "nofilename")
+ if nonames then
+ chr={
+ index=index,
+ height=height,
+ width=width,
+ }
+ else
+ chr={
+ name=description.name,
+ index=index,
+ height=height,
+ width=width,
+ }
+ end
+ end
+ if touni then
+ chr.tounicode=touni
+ end
+ if hasquality then
+ local ve=character.expansion_factor
+ if ve then
+ chr.expansion_factor=ve*1000
+ end
+ local vl=character.left_protruding
+ if vl then
+ chr.left_protruding=protrusionfactor*width*vl
+ end
+ local vr=character.right_protruding
+ if vr then
+ chr.right_protruding=protrusionfactor*width*vr
+ end
+ end
+ if autoitalicamount then
+ local vi=description.italic
+ if not vi then
+ local vi=description.boundingbox[3]-description.width+autoitalicamount
+ if vi>0 then
+ chr[italickey]=vi*hdelta
+ end
+ elseif vi~=0 then
+ chr[italickey]=vi*hdelta
+ end
+ elseif hasitalics then
+ local vi=description.italic
+ if vi and vi~=0 then
+ chr[italickey]=vi*hdelta
+ end
end
- --
- local hasmath = (properties.hasmath or next(mathparameters)) and true
- if hasmath then
- if trace_defining then
- report_defining("math enabled for: name '%s', fullname: '%s', filename: '%s'",
- name or "noname",fullname or "nofullname",filename or "nofilename")
- end
- constructors.assignmathparameters(target,tfmdata) -- does scaling and whatever is needed
- properties.hasmath = true
- target.nomath = false
- target.MathConstants = target.mathparameters
- else
- if trace_defining then
- report_defining("math disabled for: name '%s', fullname: '%s', filename: '%s'",
- name or "noname",fullname or "nofullname",filename or "nofilename")
- end
- properties.hasmath = false
- target.nomath = true
- target.mathparameters = nil -- nop
- end
- --
- local italickey = "italic"
- --
- -- some context specific trickery (this will move to a plugin)
- --
if hasmath then
- if properties.mathitalics then
- italickey = "italic_correction"
- if trace_defining then
- report_defining("math italics disabled for: name '%s', fullname: '%s', filename: '%s'",
- name or "noname",fullname or "nofullname",filename or "nofilename")
- end
- end
- autoitalicamount = false -- new
- else
- if properties.textitalics then
- italickey = "italic_correction"
- if trace_defining then
- report_defining("text italics disabled for: name '%s', fullname: '%s', filename: '%s'",
- name or "noname",fullname or "nofullname",filename or "nofilename")
- end
- if properties.delaytextitalics then
- autoitalicamount = false
- end
- end
- end
- --
- -- end of context specific trickery
- --
- constructors.beforecopyingcharacters(target,tfmdata)
- --
- local sharedkerns = { }
- --
- -- we can have a dumb mode (basemode without math etc) that skips most
- --
- for unicode, character in next, characters do
- local chr, description, index, touni
- if changed then
- -- basemode hack (we try to catch missing tounicodes, e.g. needed for ssty in math cambria)
- local c = changed[unicode]
- if c then
- description = descriptions[c] or descriptions[unicode] or character
- character = characters[c] or character
- index = description.index or c
- if tounicode then
- touni = tounicode[index] -- nb: index!
- if not touni then -- goodie
- local d = descriptions[unicode] or characters[unicode]
- local i = d.index or unicode
- touni = tounicode[i] -- nb: index!
- end
- end
- else
- description = descriptions[unicode] or character
- index = description.index or unicode
- if tounicode then
- touni = tounicode[index] -- nb: index!
- end
- end
+ local vn=character.next
+ if vn then
+ chr.next=vn
+ else
+ local vv=character.vert_variants
+ if vv then
+ local t={}
+ for i=1,#vv do
+ local vvi=vv[i]
+ t[i]={
+ ["start"]=(vvi["start"] or 0)*vdelta,
+ ["end"]=(vvi["end"] or 0)*vdelta,
+ ["advance"]=(vvi["advance"] or 0)*vdelta,
+ ["extender"]=vvi["extender"],
+ ["glyph"]=vvi["glyph"],
+ }
+ end
+ chr.vert_variants=t
else
- description = descriptions[unicode] or character
- index = description.index or unicode
- if tounicode then
- touni = tounicode[index] -- nb: index!
- end
- end
- local width = description.width
- local height = description.height
- local depth = description.depth
- if width then width = hdelta*width else width = scaledwidth end
- if height then height = vdelta*height else height = scaledheight end
- -- if depth then depth = vdelta*depth else depth = scaleddepth end
- if depth and depth ~= 0 then
- depth = delta*depth
- if nonames then
- chr = {
- index = index,
- height = height,
- depth = depth,
- width = width,
- }
- else
- chr = {
- name = description.name,
- index = index,
- height = height,
- depth = depth,
- width = width,
- }
- end
+ local hv=character.horiz_variants
+ if hv then
+ local t={}
+ for i=1,#hv do
+ local hvi=hv[i]
+ t[i]={
+ ["start"]=(hvi["start"] or 0)*hdelta,
+ ["end"]=(hvi["end"] or 0)*hdelta,
+ ["advance"]=(hvi["advance"] or 0)*hdelta,
+ ["extender"]=hvi["extender"],
+ ["glyph"]=hvi["glyph"],
+ }
+ end
+ chr.horiz_variants=t
+ end
+ end
+ end
+ local va=character.top_accent
+ if va then
+ chr.top_accent=vdelta*va
+ end
+ if stackmath then
+ local mk=character.mathkerns
+ if mk then
+ local kerns={}
+ local v=mk.top_right if v then local k={} for i=1,#v do local vi=v[i]
+ k[i]={ height=vdelta*vi.height,kern=vdelta*vi.kern }
+ end kerns.top_right=k end
+ local v=mk.top_left if v then local k={} for i=1,#v do local vi=v[i]
+ k[i]={ height=vdelta*vi.height,kern=vdelta*vi.kern }
+ end kerns.top_left=k end
+ local v=mk.bottom_left if v then local k={} for i=1,#v do local vi=v[i]
+ k[i]={ height=vdelta*vi.height,kern=vdelta*vi.kern }
+ end kerns.bottom_left=k end
+ local v=mk.bottom_right if v then local k={} for i=1,#v do local vi=v[i]
+ k[i]={ height=vdelta*vi.height,kern=vdelta*vi.kern }
+ end kerns.bottom_right=k end
+ chr.mathkern=kerns
+ end
+ end
+ end
+ if not nodemode then
+ local vk=character.kerns
+ if vk then
+ local s=sharedkerns[vk]
+ if not s then
+ s={}
+ for k,v in next,vk do s[k]=v*hdelta end
+ sharedkerns[vk]=s
+ end
+ chr.kerns=s
+ end
+ local vl=character.ligatures
+ if vl then
+ if true then
+ chr.ligatures=vl
else
- -- this saves a little bit of memory time and memory, esp for big cjk fonts
- if nonames then
- chr = {
- index = index,
- height = height,
- width = width,
- }
- else
- chr = {
- name = description.name,
- index = index,
- height = height,
- width = width,
- }
- end
- end
- if touni then
- chr.tounicode = touni
- end
- -- if trace_scaling then
- -- report_defining("t=%s, u=%s, i=%s, n=%s c=%s",k,chr.tounicode or "",index or 0,description.name or '-',description.class or '-')
- -- end
- if hasquality then
- -- we could move these calculations elsewhere (saves calculations)
- local ve = character.expansion_factor
- if ve then
- chr.expansion_factor = ve*1000 -- expansionfactor, hm, can happen elsewhere
- end
- local vl = character.left_protruding
- if vl then
- chr.left_protruding = protrusionfactor*width*vl
- end
- local vr = character.right_protruding
- if vr then
- chr.right_protruding = protrusionfactor*width*vr
- end
- end
- --
- if autoitalicamount then
- local vi = description.italic
- if not vi then
- local vi = description.boundingbox[3] - description.width + autoitalicamount
- if vi > 0 then -- < 0 indicates no overshoot or a very small auto italic
- chr[italickey] = vi*hdelta
- end
- elseif vi ~= 0 then
- chr[italickey] = vi*hdelta
- end
- elseif hasitalics then
- local vi = description.italic
- if vi and vi ~= 0 then
- chr[italickey] = vi*hdelta
- end
- end
- -- to be tested
- if hasmath then
- -- todo, just operate on descriptions.math
- local vn = character.next
- if vn then
- chr.next = vn
- -- if character.vert_variants or character.horiz_variants then
- -- report_defining("glyph U+%05X has combination of next, vert_variants and horiz_variants",index)
- -- end
- else
- local vv = character.vert_variants
- if vv then
- local t = { }
- for i=1,#vv do
- local vvi = vv[i]
- t[i] = {
- ["start"] = (vvi["start"] or 0)*vdelta,
- ["end"] = (vvi["end"] or 0)*vdelta,
- ["advance"] = (vvi["advance"] or 0)*vdelta,
- ["extender"] = vvi["extender"],
- ["glyph"] = vvi["glyph"],
- }
- end
- chr.vert_variants = t
- else
- local hv = character.horiz_variants
- if hv then
- local t = { }
- for i=1,#hv do
- local hvi = hv[i]
- t[i] = {
- ["start"] = (hvi["start"] or 0)*hdelta,
- ["end"] = (hvi["end"] or 0)*hdelta,
- ["advance"] = (hvi["advance"] or 0)*hdelta,
- ["extender"] = hvi["extender"],
- ["glyph"] = hvi["glyph"],
- }
- end
- chr.horiz_variants = t
- end
- end
- end
- local va = character.top_accent
- if va then
- chr.top_accent = vdelta*va
- end
- if stackmath then
- local mk = character.mathkerns -- not in math ?
- if mk then
- local kerns = { }
- local v = mk.top_right if v then local k = { } for i=1,#v do local vi = v[i]
- k[i] = { height = vdelta*vi.height, kern = vdelta*vi.kern }
- end kerns.top_right = k end
- local v = mk.top_left if v then local k = { } for i=1,#v do local vi = v[i]
- k[i] = { height = vdelta*vi.height, kern = vdelta*vi.kern }
- end kerns.top_left = k end
- local v = mk.bottom_left if v then local k = { } for i=1,#v do local vi = v[i]
- k[i] = { height = vdelta*vi.height, kern = vdelta*vi.kern }
- end kerns.bottom_left = k end
- local v = mk.bottom_right if v then local k = { } for i=1,#v do local vi = v[i]
- k[i] = { height = vdelta*vi.height, kern = vdelta*vi.kern }
- end kerns.bottom_right = k end
- chr.mathkern = kerns -- singular -> should be patched in luatex !
- end
- end
- end
- if not nodemode then
- local vk = character.kerns
- if vk then
- local s = sharedkerns[vk]
- if not s then
- s = { }
- for k,v in next, vk do s[k] = v*hdelta end
- sharedkerns[vk] = s
- end
- chr.kerns = s
- end
- local vl = character.ligatures
- if vl then
- if true then
- chr.ligatures = vl -- shared
- else
- local tt = { }
- for i,l in next, vl do
- tt[i] = l
- end
- chr.ligatures = tt
- end
- end
- end
- if isvirtual then
- local vc = character.commands
- if vc then
- -- we assume non scaled commands here
- -- tricky .. we need to scale pseudo math glyphs too
- -- which is why we deal with rules too
- local ok = false
- for i=1,#vc do
- local key = vc[i][1]
- if key == "right" or key == "down" then
- ok = true
- break
- end
- end
- if ok then
- local tt = { }
- for i=1,#vc do
- local ivc = vc[i]
- local key = ivc[1]
- if key == "right" then
- tt[i] = { key, ivc[2]*hdelta }
- elseif key == "down" then
- tt[i] = { key, ivc[2]*vdelta }
- elseif key == "rule" then
- tt[i] = { key, ivc[2]*vdelta, ivc[3]*hdelta }
- else -- not comment
- tt[i] = ivc -- shared since in cache and untouched
- end
- end
- chr.commands = tt
- else
- chr.commands = vc
- end
- chr.index = nil
- end
+ local tt={}
+ for i,l in next,vl do
+ tt[i]=l
+ end
+ chr.ligatures=tt
+ end
+ end
+ end
+ if isvirtual then
+ local vc=character.commands
+ if vc then
+ local ok=false
+ for i=1,#vc do
+ local key=vc[i][1]
+ if key=="right" or key=="down" then
+ ok=true
+ break
+ end
+ end
+ if ok then
+ local tt={}
+ for i=1,#vc do
+ local ivc=vc[i]
+ local key=ivc[1]
+ if key=="right" then
+ tt[i]={ key,ivc[2]*hdelta }
+ elseif key=="down" then
+ tt[i]={ key,ivc[2]*vdelta }
+ elseif key=="rule" then
+ tt[i]={ key,ivc[2]*vdelta,ivc[3]*hdelta }
+ else
+ tt[i]=ivc
+ end
+ end
+ chr.commands=tt
+ else
+ chr.commands=vc
end
- targetcharacters[unicode] = chr
+ chr.index=nil
+ end
end
- --
- constructors.aftercopyingcharacters(target,tfmdata)
- --
- return target
+ targetcharacters[unicode]=chr
+ end
+ constructors.aftercopyingcharacters(target,tfmdata)
+ return target
end
-
function constructors.finalize(tfmdata)
- if tfmdata.properties and tfmdata.properties.finalized then
- return
- end
- --
- if not tfmdata.characters then
- return nil
- end
- --
- if not tfmdata.goodies then
- tfmdata.goodies = { } -- context specific
- end
- --
- local parameters = tfmdata.parameters
- if not parameters then
- return nil
- end
- --
- if not parameters.expansion then
- parameters.expansion = {
- stretch = tfmdata.stretch or 0,
- shrink = tfmdata.shrink or 0,
- step = tfmdata.step or 0,
- auto = tfmdata.auto_expand or false,
- }
- end
- --
- if not parameters.protrusion then
- parameters.protrusion = {
- auto = auto_protrude
- }
- end
- --
- if not parameters.size then
- parameters.size = tfmdata.size
- end
- --
- if not parameters.extendfactor then
- parameters.extendfactor = tfmdata.extend or 0
- end
- --
- if not parameters.slantfactor then
- parameters.slantfactor = tfmdata.slant or 0
- end
- --
- if not parameters.designsize then
- parameters.designsize = tfmdata.designsize or 655360
- end
- --
- if not parameters.units then
- parameters.units = tfmdata.units_per_em or 1000
- end
- --
- if not tfmdata.descriptions then
- local descriptions = { } -- yes or no
- setmetatableindex(descriptions, function(t,k) local v = { } t[k] = v return v end)
- tfmdata.descriptions = descriptions
- end
- --
- local properties = tfmdata.properties
- if not properties then
- properties = { }
- tfmdata.properties = properties
- end
- --
- if not properties.virtualized then
- properties.virtualized = tfmdata.type == "virtual"
- end
- --
- if not tfmdata.properties then
- tfmdata.properties = {
- fontname = tfmdata.fontname,
- filename = tfmdata.filename,
- fullname = tfmdata.fullname,
- name = tfmdata.name,
- psname = tfmdata.psname,
- --
- encodingbytes = tfmdata.encodingbytes or 1,
- embedding = tfmdata.embedding or "subset",
- tounicode = tfmdata.tounicode or 1,
- cidinfo = tfmdata.cidinfo or nil,
- format = tfmdata.format or "type1",
- direction = tfmdata.direction or 0,
- }
- end
- if not tfmdata.resources then
- tfmdata.resources = { }
- end
- if not tfmdata.shared then
- tfmdata.shared = { }
- end
- --
- -- tfmdata.fonts
- -- tfmdata.unscaled
- --
- if not properties.hasmath then
- properties.hasmath = not tfmdata.nomath
- end
- --
- tfmdata.MathConstants = nil
- tfmdata.postprocessors = nil
- --
- tfmdata.fontname = nil
- tfmdata.filename = nil
- tfmdata.fullname = nil
- tfmdata.name = nil -- most tricky part
- tfmdata.psname = nil
- --
- tfmdata.encodingbytes = nil
- tfmdata.embedding = nil
- tfmdata.tounicode = nil
- tfmdata.cidinfo = nil
- tfmdata.format = nil
- tfmdata.direction = nil
- tfmdata.type = nil
- tfmdata.nomath = nil
- tfmdata.designsize = nil
- --
- tfmdata.size = nil
- tfmdata.stretch = nil
- tfmdata.shrink = nil
- tfmdata.step = nil
- tfmdata.auto_expand = nil
- tfmdata.auto_protrude = nil
- tfmdata.extend = nil
- tfmdata.slant = nil
- tfmdata.units_per_em = nil
- --
- properties.finalized = true
- --
- return tfmdata
-end
-
---[[ldx--
-<p>A unique hash value is generated by:</p>
---ldx]]--
-
-local hashmethods = { }
-constructors.hashmethods = hashmethods
-
-function constructors.hashfeatures(specification) -- will be overloaded
- local features = specification.features
- if features then
- local t, tn = { }, 0
- for category, list in next, features do
- if next(list) then
- local hasher = hashmethods[category]
- if hasher then
- local hash = hasher(list)
- if hash then
- tn = tn + 1
- t[tn] = category .. ":" .. hash
- end
- end
- end
- end
- if tn > 0 then
- return concat(t," & ")
- end
- end
- return "unknown"
-end
-
-hashmethods.normal = function(list)
- local s = { }
- local n = 0
- for k, v in next, list do
- if k ~= "number" and k ~= "features" then -- I need to figure this out, features
- n = n + 1
- s[n] = k
- end
+ if tfmdata.properties and tfmdata.properties.finalized then
+ return
+ end
+ if not tfmdata.characters then
+ return nil
+ end
+ if not tfmdata.goodies then
+ tfmdata.goodies={}
+ end
+ local parameters=tfmdata.parameters
+ if not parameters then
+ return nil
+ end
+ if not parameters.expansion then
+ parameters.expansion={
+ stretch=tfmdata.stretch or 0,
+ shrink=tfmdata.shrink or 0,
+ step=tfmdata.step or 0,
+ auto=tfmdata.auto_expand or false,
+ }
+ end
+ if not parameters.protrusion then
+ parameters.protrusion={
+ auto=auto_protrude
+ }
+ end
+ if not parameters.size then
+ parameters.size=tfmdata.size
+ end
+ if not parameters.extendfactor then
+ parameters.extendfactor=tfmdata.extend or 0
+ end
+ if not parameters.slantfactor then
+ parameters.slantfactor=tfmdata.slant or 0
+ end
+ if not parameters.designsize then
+ parameters.designsize=tfmdata.designsize or 655360
+ end
+ if not parameters.units then
+ parameters.units=tfmdata.units_per_em or 1000
+ end
+ if not tfmdata.descriptions then
+ local descriptions={}
+ setmetatableindex(descriptions,function(t,k) local v={} t[k]=v return v end)
+ tfmdata.descriptions=descriptions
+ end
+ local properties=tfmdata.properties
+ if not properties then
+ properties={}
+ tfmdata.properties=properties
+ end
+ if not properties.virtualized then
+ properties.virtualized=tfmdata.type=="virtual"
+ end
+ if not tfmdata.properties then
+ tfmdata.properties={
+ fontname=tfmdata.fontname,
+ filename=tfmdata.filename,
+ fullname=tfmdata.fullname,
+ name=tfmdata.name,
+ psname=tfmdata.psname,
+ encodingbytes=tfmdata.encodingbytes or 1,
+ embedding=tfmdata.embedding or "subset",
+ tounicode=tfmdata.tounicode or 1,
+ cidinfo=tfmdata.cidinfo or nil,
+ format=tfmdata.format or "type1",
+ direction=tfmdata.direction or 0,
+ }
+ end
+ if not tfmdata.resources then
+ tfmdata.resources={}
+ end
+ if not tfmdata.shared then
+ tfmdata.shared={}
+ end
+ if not properties.hasmath then
+ properties.hasmath=not tfmdata.nomath
+ end
+ tfmdata.MathConstants=nil
+ tfmdata.postprocessors=nil
+ tfmdata.fontname=nil
+ tfmdata.filename=nil
+ tfmdata.fullname=nil
+ tfmdata.name=nil
+ tfmdata.psname=nil
+ tfmdata.encodingbytes=nil
+ tfmdata.embedding=nil
+ tfmdata.tounicode=nil
+ tfmdata.cidinfo=nil
+ tfmdata.format=nil
+ tfmdata.direction=nil
+ tfmdata.type=nil
+ tfmdata.nomath=nil
+ tfmdata.designsize=nil
+ tfmdata.size=nil
+ tfmdata.stretch=nil
+ tfmdata.shrink=nil
+ tfmdata.step=nil
+ tfmdata.auto_expand=nil
+ tfmdata.auto_protrude=nil
+ tfmdata.extend=nil
+ tfmdata.slant=nil
+ tfmdata.units_per_em=nil
+ properties.finalized=true
+ return tfmdata
+end
+local hashmethods={}
+constructors.hashmethods=hashmethods
+function constructors.hashfeatures(specification)
+ local features=specification.features
+ if features then
+ local t,tn={},0
+ for category,list in next,features do
+ if next(list) then
+ local hasher=hashmethods[category]
+ if hasher then
+ local hash=hasher(list)
+ if hash then
+ tn=tn+1
+ t[tn]=category..":"..hash
+ end
+ end
+ end
+ end
+ if tn>0 then
+ return concat(t," & ")
+ end
+ end
+ return "unknown"
+end
+hashmethods.normal=function(list)
+ local s={}
+ local n=0
+ for k,v in next,list do
+ if not k then
+ elseif k=="number" or k=="features" then
+ else
+ n=n+1
+ s[n]=k
end
- if n > 0 then
- sort(s)
- for i=1,n do
- local k = s[i]
- s[i] = k .. '=' .. tostring(list[k])
- end
- return concat(s,"+")
+ end
+ if n>0 then
+ sort(s)
+ for i=1,n do
+ local k=s[i]
+ s[i]=k..'='..tostring(list[k])
end
+ return concat(s,"+")
+ end
end
-
---[[ldx--
-<p>In principle we can share tfm tables when we are in node for a font, but then
-we need to define a font switch as an id/attr switch which is no fun, so in that
-case users can best use dynamic features ... so, we will not use that speedup. Okay,
-when we get rid of base mode we can optimize even further by sharing, but then we
-loose our testcases for <l n='luatex'/>.</p>
---ldx]]--
-
function constructors.hashinstance(specification,force)
- local hash, size, fallbacks = specification.hash, specification.size, specification.fallbacks
- if force or not hash then
- hash = constructors.hashfeatures(specification)
- specification.hash = hash
- end
- if size < 1000 and designsizes[hash] then
- size = math.round(constructors.scaled(size,designsizes[hash]))
- specification.size = size
- end
- -- local mathsize = specification.mathsize or 0
- -- if mathsize > 0 then
- -- local textsize = specification.textsize
- -- if fallbacks then
- -- return hash .. ' @ ' .. tostring(size) .. ' [ ' .. tostring(mathsize) .. ' : ' .. tostring(textsize) .. ' ] @ ' .. fallbacks
- -- else
- -- return hash .. ' @ ' .. tostring(size) .. ' [ ' .. tostring(mathsize) .. ' : ' .. tostring(textsize) .. ' ]'
- -- end
- -- else
- if fallbacks then
- return hash .. ' @ ' .. tostring(size) .. ' @ ' .. fallbacks
- else
- return hash .. ' @ ' .. tostring(size)
- end
- -- end
-end
-
-function constructors.setname(tfmdata,specification) -- todo: get specification from tfmdata
- if constructors.namemode == "specification" then
- -- not to be used in context !
- local specname = specification.specification
- if specname then
- tfmdata.properties.name = specname
- if trace_defining then
- report_otf("overloaded fontname: '%s'",specname)
- end
- end
+ local hash,size,fallbacks=specification.hash,specification.size,specification.fallbacks
+ if force or not hash then
+ hash=constructors.hashfeatures(specification)
+ specification.hash=hash
+ end
+ if size<1000 and designsizes[hash] then
+ size=math.round(constructors.scaled(size,designsizes[hash]))
+ specification.size=size
+ end
+ if fallbacks then
+ return hash..' @ '..tostring(size)..' @ '..fallbacks
+ else
+ return hash..' @ '..tostring(size)
end
end
-
-function constructors.checkedfilename(data)
- local foundfilename = data.foundfilename
- if not foundfilename then
- local askedfilename = data.filename or ""
- if askedfilename ~= "" then
- askedfilename = resolvers.resolve(askedfilename) -- no shortcut
- foundfilename = resolvers.findbinfile(askedfilename,"") or ""
- if foundfilename == "" then
- report_defining("source file '%s' is not found",askedfilename)
- foundfilename = resolvers.findbinfile(file.basename(askedfilename),"") or ""
- if foundfilename ~= "" then
- report_defining("using source file '%s' (cache mismatch)",foundfilename)
- end
- end
- end
- data.foundfilename = foundfilename
+function constructors.setname(tfmdata,specification)
+ if constructors.namemode=="specification" then
+ local specname=specification.specification
+ if specname then
+ tfmdata.properties.name=specname
+ if trace_defining then
+ report_otf("overloaded fontname %a",specname)
+ end
end
- return foundfilename
+ end
end
-
-local formats = allocate()
-fonts.formats = formats
-
-setmetatableindex(formats, function(t,k)
- local l = lower(k)
- if rawget(t,k) then
- t[k] = l
- return l
- end
- return rawget(t,file.extname(l))
+function constructors.checkedfilename(data)
+ local foundfilename=data.foundfilename
+ if not foundfilename then
+ local askedfilename=data.filename or ""
+ if askedfilename~="" then
+ askedfilename=resolvers.resolve(askedfilename)
+ foundfilename=resolvers.findbinfile(askedfilename,"") or ""
+ if foundfilename=="" then
+ report_defining("source file %a is not found",askedfilename)
+ foundfilename=resolvers.findbinfile(file.basename(askedfilename),"") or ""
+ if foundfilename~="" then
+ report_defining("using source file %a due to cache mismatch",foundfilename)
+ end
+ end
+ end
+ data.foundfilename=foundfilename
+ end
+ return foundfilename
+end
+local formats=allocate()
+fonts.formats=formats
+setmetatableindex(formats,function(t,k)
+ local l=lower(k)
+ if rawget(t,k) then
+ t[k]=l
+ return l
+ end
+ return rawget(t,file.suffix(l))
end)
-
-local locations = { }
-
+local locations={}
local function setindeed(mode,target,group,name,action,position)
- local t = target[mode]
- if not t then
- report_defining("fatal error in setting feature '%s', group '%s', mode '%s'",name or "?",group or "?",mode)
- os.exit()
- elseif position then
- -- todo: remove existing
- insert(t, position, { name = name, action = action })
- else
- for i=1,#t do
- local ti = t[i]
- if ti.name == name then
- ti.action = action
- return
- end
- end
- insert(t, { name = name, action = action })
+ local t=target[mode]
+ if not t then
+ report_defining("fatal error in setting feature %a, group %a, mode %a",name,group,mode)
+ os.exit()
+ elseif position then
+ insert(t,position,{ name=name,action=action })
+ else
+ for i=1,#t do
+ local ti=t[i]
+ if ti.name==name then
+ ti.action=action
+ return
+ end
end
+ insert(t,{ name=name,action=action })
+ end
end
-
local function set(group,name,target,source)
- target = target[group]
- if not target then
- report_defining("fatal target error in setting feature '%s', group '%s'",name or "?",group or "?")
- os.exit()
+ target=target[group]
+ if not target then
+ report_defining("fatal target error in setting feature %a, group %a",name,group)
+ os.exit()
+ end
+ local source=source[group]
+ if not source then
+ report_defining("fatal source error in setting feature %a, group %a",name,group)
+ os.exit()
+ end
+ local node=source.node
+ local base=source.base
+ local position=source.position
+ if node then
+ setindeed("node",target,group,name,node,position)
+ end
+ if base then
+ setindeed("base",target,group,name,base,position)
+ end
+end
+local function register(where,specification)
+ local name=specification.name
+ if name and name~="" then
+ local default=specification.default
+ local description=specification.description
+ local initializers=specification.initializers
+ local processors=specification.processors
+ local manipulators=specification.manipulators
+ local modechecker=specification.modechecker
+ if default then
+ where.defaults[name]=default
end
- local source = source[group]
- if not source then
- report_defining("fatal source error in setting feature '%s', group '%s'",name or "?",group or "?")
- os.exit()
+ if description and description~="" then
+ where.descriptions[name]=description
end
- local node = source.node
- local base = source.base
- local position = source.position
- if node then
- setindeed("node",target,group,name,node,position)
+ if initializers then
+ set('initializers',name,where,specification)
end
- if base then
- setindeed("base",target,group,name,base,position)
+ if processors then
+ set('processors',name,where,specification)
end
-end
-
-local function register(where,specification)
- local name = specification.name
- if name and name ~= "" then
- local default = specification.default
- local description = specification.description
- local initializers = specification.initializers
- local processors = specification.processors
- local manipulators = specification.manipulators
- local modechecker = specification.modechecker
- if default then
- where.defaults[name] = default
- end
- if description and description ~= "" then
- where.descriptions[name] = description
- end
- if initializers then
- set('initializers',name,where,specification)
- end
- if processors then
- set('processors', name,where,specification)
- end
- if manipulators then
- set('manipulators',name,where,specification)
- end
- if modechecker then
- where.modechecker = modechecker
- end
+ if manipulators then
+ set('manipulators',name,where,specification)
end
-end
-
-constructors.registerfeature = register
-
-function constructors.getfeatureaction(what,where,mode,name)
- what = handlers[what].features
- if what then
- where = what[where]
- if where then
- mode = where[mode]
- if mode then
- for i=1,#mode do
- local m = mode[i]
- if m.name == name then
- return m.action
- end
- end
- end
- end
+ if modechecker then
+ where.modechecker=modechecker
end
+ end
end
-
-function constructors.newfeatures(what)
- local features = handlers[what].features
- if not features then
- local tables = handlers[what].tables -- can be preloaded
- features = allocate {
- defaults = { },
- descriptions = tables and tables.features or { },
- initializers = { base = { }, node = { } },
- processors = { base = { }, node = { } },
- manipulators = { base = { }, node = { } },
- }
- features.register = function(specification) return register(features,specification) end
- handlers[what].features = features -- will also become hidden
- end
- return features
+constructors.registerfeature=register
+function constructors.getfeatureaction(what,where,mode,name)
+ what=handlers[what].features
+ if what then
+ where=what[where]
+ if where then
+ mode=where[mode]
+ if mode then
+ for i=1,#mode do
+ local m=mode[i]
+ if m.name==name then
+ return m.action
+ end
+ end
+ end
+ end
+ end
+end
+function constructors.newhandler(what)
+ local handler=handlers[what]
+ if not handler then
+ handler={}
+ handlers[what]=handler
+ end
+ return handler
+end
+function constructors.newfeatures(what)
+ local handler=handlers[what]
+ local features=handler.features
+ if not features then
+ local tables=handler.tables
+ local statistics=handler.statistics
+ features=allocate {
+ defaults={},
+ descriptions=tables and tables.features or {},
+ used=statistics and statistics.usedfeatures or {},
+ initializers={ base={},node={} },
+ processors={ base={},node={} },
+ manipulators={ base={},node={} },
+ }
+ features.register=function(specification) return register(features,specification) end
+ handler.features=features
+ end
+ return features
end
-
---[[ldx--
-<p>We need to check for default features. For this we provide
-a helper function.</p>
---ldx]]--
-
function constructors.checkedfeatures(what,features)
- local defaults = handlers[what].features.defaults
- if features and next(features) then
- features = fastcopy(features) -- can be inherited (mt) but then no loops possible
- for key, value in next, defaults do
- if features[key] == nil then
- features[key] = value
- end
- end
- return features
- else
- return fastcopy(defaults) -- we can change features in place
+ local defaults=handlers[what].features.defaults
+ if features and next(features) then
+ features=fastcopy(features)
+ for key,value in next,defaults do
+ if features[key]==nil then
+ features[key]=value
+ end
end
+ return features
+ else
+ return fastcopy(defaults)
+ end
end
-
--- before scaling
-
function constructors.initializefeatures(what,tfmdata,features,trace,report)
- if features and next(features) then
- local properties = tfmdata.properties or { } -- brrr
- local whathandler = handlers[what]
- local whatfeatures = whathandler.features
- local whatinitializers = whatfeatures.initializers
- local whatmodechecker = whatfeatures.modechecker
- -- properties.mode can be enforces (for instance in font-otd)
- local mode = properties.mode or (whatmodechecker and whatmodechecker(tfmdata,features,features.mode)) or features.mode or "base"
- properties.mode = mode -- also status
- features.mode = mode -- both properties.mode or features.mode can be changed
- --
- local done = { }
- while true do
- local redo = false
- local initializers = whatfeatures.initializers[mode]
- if initializers then
- for i=1,#initializers do
- local step = initializers[i]
- local feature = step.name
--- we could intercept mode here .. needs a rewrite of this whole loop then but it's cleaner that way
- local value = features[feature]
- if not value then
- -- disabled
- elseif done[feature] then
- -- already done
- else
- local action = step.action
- if trace then
- report("initializing feature %s to %s for mode %s for font %s",feature,
- tostring(value),mode or 'unknown', tfmdata.properties.fullname or 'unknown')
- end
- action(tfmdata,value,features) -- can set mode (e.g. goodies) so it can trigger a restart
- if mode ~= properties.mode or mode ~= features.mode then
- if whatmodechecker then
- properties.mode = whatmodechecker(tfmdata,features,properties.mode) -- force checking
- features.mode = properties.mode
- end
- if mode ~= properties.mode then
- mode = properties.mode
- redo = true
- end
- end
- done[feature] = true
- end
- if redo then
- break
- end
- end
- if not redo then
- break
- end
- else
- break
- end
+ if features and next(features) then
+ local properties=tfmdata.properties or {}
+ local whathandler=handlers[what]
+ local whatfeatures=whathandler.features
+ local whatinitializers=whatfeatures.initializers
+ local whatmodechecker=whatfeatures.modechecker
+ local mode=properties.mode or (whatmodechecker and whatmodechecker(tfmdata,features,features.mode)) or features.mode or "base"
+ properties.mode=mode
+ features.mode=mode
+ local done={}
+ while true do
+ local redo=false
+ local initializers=whatfeatures.initializers[mode]
+ if initializers then
+ for i=1,#initializers do
+ local step=initializers[i]
+ local feature=step.name
+ local value=features[feature]
+ if not value then
+ elseif done[feature] then
+ else
+ local action=step.action
+ if trace then
+ report("initializing feature %a to %a for mode %a for font %a",feature,
+ value,mode,tfmdata.properties.fullname)
+ end
+ action(tfmdata,value,features)
+ if mode~=properties.mode or mode~=features.mode then
+ if whatmodechecker then
+ properties.mode=whatmodechecker(tfmdata,features,properties.mode)
+ features.mode=properties.mode
+ end
+ if mode~=properties.mode then
+ mode=properties.mode
+ redo=true
+ end
+ end
+ done[feature]=true
+ end
+ if redo then
+ break
+ end
end
- properties.mode = mode -- to be sure
- return true
- else
- return false
+ if not redo then
+ break
+ end
+ else
+ break
+ end
end
+ properties.mode=mode
+ return true
+ else
+ return false
+ end
end
-
--- while typesetting
-
function constructors.collectprocessors(what,tfmdata,features,trace,report)
- local processes, nofprocesses = { }, 0
- if features and next(features) then
- local properties = tfmdata.properties
- local whathandler = handlers[what]
- local whatfeatures = whathandler.features
- local whatprocessors = whatfeatures.processors
- local processors = whatprocessors[properties.mode]
- if processors then
- for i=1,#processors do
- local step = processors[i]
- local feature = step.name
- if features[feature] then
- local action = step.action
- if trace then
- report("installing feature processor %s for mode %s for font %s",feature,
- mode or 'unknown', tfmdata.properties.fullname or 'unknown')
- end
- if action then
- nofprocesses = nofprocesses + 1
- processes[nofprocesses] = action
- end
- end
- end
- elseif trace then
- report("no feature processors for mode %s for font %s",
- mode or 'unknown', tfmdata.properties.fullname or 'unknown')
- end
- end
- return processes
+ local processes,nofprocesses={},0
+ if features and next(features) then
+ local properties=tfmdata.properties
+ local whathandler=handlers[what]
+ local whatfeatures=whathandler.features
+ local whatprocessors=whatfeatures.processors
+ local processors=whatprocessors[properties.mode]
+ if processors then
+ for i=1,#processors do
+ local step=processors[i]
+ local feature=step.name
+ if features[feature] then
+ local action=step.action
+ if trace then
+ report("installing feature processor %a for mode %a for font %a",feature,mode,tfmdata.properties.fullname)
+ end
+ if action then
+ nofprocesses=nofprocesses+1
+ processes[nofprocesses]=action
+ end
+ end
+ end
+ elseif trace then
+ report("no feature processors for mode %a for font %a",mode,tfmdata.properties.fullname)
+ end
+ end
+ return processes
end
-
--- after scaling
-
function constructors.applymanipulators(what,tfmdata,features,trace,report)
- if features and next(features) then
- local properties = tfmdata.properties
- local whathandler = handlers[what]
- local whatfeatures = whathandler.features
- local whatmanipulators = whatfeatures.manipulators
- local manipulators = whatmanipulators[properties.mode]
- if manipulators then
- for i=1,#manipulators do
- local step = manipulators[i]
- local feature = step.name
- local value = features[feature]
- if value then
- local action = step.action
- if trace then
- report("applying feature manipulator %s for mode %s for font %s",feature,
- mode or 'unknown', tfmdata.properties.fullname or 'unknown')
- end
- if action then
- action(tfmdata,feature,value)
- end
- end
- end
- end
- end
+ if features and next(features) then
+ local properties=tfmdata.properties
+ local whathandler=handlers[what]
+ local whatfeatures=whathandler.features
+ local whatmanipulators=whatfeatures.manipulators
+ local manipulators=whatmanipulators[properties.mode]
+ if manipulators then
+ for i=1,#manipulators do
+ local step=manipulators[i]
+ local feature=step.name
+ local value=features[feature]
+ if value then
+ local action=step.action
+ if trace then
+ report("applying feature manipulator %a for mode %a for font %a",feature,mode,tfmdata.properties.fullname)
+ end
+ if action then
+ action(tfmdata,feature,value)
+ end
+ end
+ end
+ end
+ end
end
end -- closure
do -- begin closure to overcome local limits and interference
-if not modules then modules = { } end modules ['luatex-font-enc'] = {
- version = 1.001,
- comment = "companion to luatex-*.tex",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
+if not modules then modules={} end modules ['luatex-font-enc']={
+ version=1.001,
+ comment="companion to luatex-*.tex",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
}
-
if context then
- texio.write_nl("fatal error: this module is not for context")
- os.exit()
-end
-
-local fonts = fonts
-fonts.encodings = { }
-fonts.encodings.agl = { }
-
-setmetatable(fonts.encodings.agl, { __index = function(t,k)
- if k == "unicodes" then
- texio.write(" <loading (extended) adobe glyph list>")
- local unicodes = dofile(resolvers.findfile("font-age.lua"))
- fonts.encodings.agl = { unicodes = unicodes }
- return unicodes
- else
- return nil
- end
+ texio.write_nl("fatal error: this module is not for context")
+ os.exit()
+end
+local fonts=fonts
+fonts.encodings={}
+fonts.encodings.agl={}
+setmetatable(fonts.encodings.agl,{ __index=function(t,k)
+ if k=="unicodes" then
+ texio.write(" <loading (extended) adobe glyph list>")
+ local unicodes=dofile(resolvers.findfile("font-age.lua"))
+ fonts.encodings.agl={ unicodes=unicodes }
+ return unicodes
+ else
+ return nil
+ end
end })
-
end -- closure
do -- begin closure to overcome local limits and interference
-if not modules then modules = { } end modules ['font-cid'] = {
- version = 1.001,
- comment = "companion to font-otf.lua (cidmaps)",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
+if not modules then modules={} end modules ['font-cid']={
+ version=1.001,
+ comment="companion to font-otf.lua (cidmaps)",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
}
-
-local format, match, lower = string.format, string.match, string.lower
-local tonumber = tonumber
-local P, S, R, C, V, lpegmatch = lpeg.P, lpeg.S, lpeg.R, lpeg.C, lpeg.V, lpeg.match
-
-local trace_loading = false trackers.register("otf.loading", function(v) trace_loading = v end)
-
-local report_otf = logs.reporter("fonts","otf loading")
-
-local fonts = fonts
-
-local cid = { }
-fonts.cid = cid
-
-local cidmap = { }
-local cidmax = 10
-
--- original string parser: 0.109, lpeg parser: 0.036 seconds for Adobe-CNS1-4.cidmap
---
--- 18964 18964 (leader)
--- 0 /.notdef
--- 1..95 0020
--- 99 3000
-
-local number = C(R("09","af","AF")^1)
-local space = S(" \n\r\t")
-local spaces = space^0
-local period = P(".")
-local periods = period * period
-local name = P("/") * C((1-space)^1)
-
-local unicodes, names = { }, { } -- we could use Carg now
-
+local format,match,lower=string.format,string.match,string.lower
+local tonumber=tonumber
+local P,S,R,C,V,lpegmatch=lpeg.P,lpeg.S,lpeg.R,lpeg.C,lpeg.V,lpeg.match
+local fonts,logs,trackers=fonts,logs,trackers
+local trace_loading=false trackers.register("otf.loading",function(v) trace_loading=v end)
+local report_otf=logs.reporter("fonts","otf loading")
+local cid={}
+fonts.cid=cid
+local cidmap={}
+local cidmax=10
+local number=C(R("09","af","AF")^1)
+local space=S(" \n\r\t")
+local spaces=space^0
+local period=P(".")
+local periods=period*period
+local name=P("/")*C((1-space)^1)
+local unicodes,names={},{}
local function do_one(a,b)
- unicodes[tonumber(a)] = tonumber(b,16)
+ unicodes[tonumber(a)]=tonumber(b,16)
end
-
local function do_range(a,b,c)
- c = tonumber(c,16)
- for i=tonumber(a),tonumber(b) do
- unicodes[i] = c
- c = c + 1
- end
+ c=tonumber(c,16)
+ for i=tonumber(a),tonumber(b) do
+ unicodes[i]=c
+ c=c+1
+ end
end
-
local function do_name(a,b)
- names[tonumber(a)] = b
-end
-
-local grammar = P { "start",
- start = number * spaces * number * V("series"),
- series = (spaces * (V("one") + V("range") + V("named")))^1,
- one = (number * spaces * number) / do_one,
- range = (number * periods * number * spaces * number) / do_range,
- named = (number * spaces * name) / do_name
+ names[tonumber(a)]=b
+end
+local grammar=P { "start",
+ start=number*spaces*number*V("series"),
+ series=(spaces*(V("one")+V("range")+V("named")))^1,
+ one=(number*spaces*number)/do_one,
+ range=(number*periods*number*spaces*number)/do_range,
+ named=(number*spaces*name)/do_name
}
-
local function loadcidfile(filename)
- local data = io.loaddata(filename)
- if data then
- unicodes, names = { }, { }
- lpegmatch(grammar,data)
- local supplement, registry, ordering = match(filename,"^(.-)%-(.-)%-()%.(.-)$")
- return {
- supplement = supplement,
- registry = registry,
- ordering = ordering,
- filename = filename,
- unicodes = unicodes,
- names = names
- }
- end
+ local data=io.loaddata(filename)
+ if data then
+ unicodes,names={},{}
+ lpegmatch(grammar,data)
+ local supplement,registry,ordering=match(filename,"^(.-)%-(.-)%-()%.(.-)$")
+ return {
+ supplement=supplement,
+ registry=registry,
+ ordering=ordering,
+ filename=filename,
+ unicodes=unicodes,
+ names=names
+ }
+ end
end
-
-cid.loadfile = loadcidfile -- we use the frozen variant
-
-local template = "%s-%s-%s.cidmap"
-
+cid.loadfile=loadcidfile
+local template="%s-%s-%s.cidmap"
local function locate(registry,ordering,supplement)
- local filename = format(template,registry,ordering,supplement)
- local hashname = lower(filename)
- local found = cidmap[hashname]
- if not found then
+ local filename=format(template,registry,ordering,supplement)
+ local hashname=lower(filename)
+ local found=cidmap[hashname]
+ if not found then
+ if trace_loading then
+ report_otf("checking cidmap, registry %a, ordering %a, supplement %a, filename %a",registry,ordering,supplement,filename)
+ end
+ local fullname=resolvers.findfile(filename,'cid') or ""
+ if fullname~="" then
+ found=loadcidfile(fullname)
+ if found then
if trace_loading then
- report_otf("checking cidmap, registry: %s, ordering: %s, supplement: %s, filename: %s",registry,ordering,supplement,filename)
- end
- local fullname = resolvers.findfile(filename,'cid') or ""
- if fullname ~= "" then
- found = loadcidfile(fullname)
- if found then
- if trace_loading then
- report_otf("using cidmap file %s",filename)
- end
- cidmap[hashname] = found
- found.usedname = file.basename(filename)
- end
+ report_otf("using cidmap file %a",filename)
end
+ cidmap[hashname]=found
+ found.usedname=file.basename(filename)
+ end
end
- return found
+ end
+ return found
end
-
--- cf Arthur R. we can safely scan upwards since cids are downward compatible
-
function cid.getmap(specification)
- if not specification then
- report_otf("invalid cidinfo specification (table expected)")
- return
- end
- local registry = specification.registry
- local ordering = specification.ordering
- local supplement = specification.supplement
- -- check for already loaded file
- local filename = format(registry,ordering,supplement)
- local found = cidmap[lower(filename)]
- if found then
- return found
- end
- if trace_loading then
- report_otf("needed cidmap, registry: %s, ordering: %s, supplement: %s",registry,ordering,supplement)
- end
- found = locate(registry,ordering,supplement)
- if not found then
- local supnum = tonumber(supplement)
- local cidnum = nil
- -- next highest (alternatively we could start high)
- if supnum < cidmax then
- for s=supnum+1,cidmax do
- local c = locate(registry,ordering,s)
- if c then
- found, cidnum = c, s
- break
- end
- end
- end
- -- next lowest (least worse fit)
- if not found and supnum > 0 then
- for s=supnum-1,0,-1 do
- local c = locate(registry,ordering,s)
- if c then
- found, cidnum = c, s
- break
- end
- end
- end
- -- prevent further lookups -- somewhat tricky
- registry = lower(registry)
- ordering = lower(ordering)
- if found and cidnum > 0 then
- for s=0,cidnum-1 do
- local filename = format(template,registry,ordering,s)
- if not cidmap[filename] then
- cidmap[filename] = found
- end
- end
- end
- end
+ if not specification then
+ report_otf("invalid cidinfo specification, table expected")
+ return
+ end
+ local registry=specification.registry
+ local ordering=specification.ordering
+ local supplement=specification.supplement
+ local filename=format(registry,ordering,supplement)
+ local found=cidmap[lower(filename)]
+ if found then
return found
+ end
+ if trace_loading then
+ report_otf("cidmap needed, registry %a, ordering %a, supplement %a",registry,ordering,supplement)
+ end
+ found=locate(registry,ordering,supplement)
+ if not found then
+ local supnum=tonumber(supplement)
+ local cidnum=nil
+ if supnum<cidmax then
+ for s=supnum+1,cidmax do
+ local c=locate(registry,ordering,s)
+ if c then
+ found,cidnum=c,s
+ break
+ end
+ end
+ end
+ if not found and supnum>0 then
+ for s=supnum-1,0,-1 do
+ local c=locate(registry,ordering,s)
+ if c then
+ found,cidnum=c,s
+ break
+ end
+ end
+ end
+ registry=lower(registry)
+ ordering=lower(ordering)
+ if found and cidnum>0 then
+ for s=0,cidnum-1 do
+ local filename=format(template,registry,ordering,s)
+ if not cidmap[filename] then
+ cidmap[filename]=found
+ end
+ end
+ end
+ end
+ return found
end
end -- closure
do -- begin closure to overcome local limits and interference
-if not modules then modules = { } end modules ['font-map'] = {
- version = 1.001,
- comment = "companion to font-ini.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
+if not modules then modules={} end modules ['font-map']={
+ version=1.001,
+ comment="companion to font-ini.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
}
-
-local match, format, find, concat, gsub, lower = string.match, string.format, string.find, table.concat, string.gsub, string.lower
-local P, R, S, C, Ct, Cc, lpegmatch = lpeg.P, lpeg.R, lpeg.S, lpeg.C, lpeg.Ct, lpeg.Cc, lpeg.match
-local utfbyte = utf.byte
-
-local trace_loading = false trackers.register("fonts.loading", function(v) trace_loading = v end)
-local trace_mapping = false trackers.register("fonts.mapping", function(v) trace_unimapping = v end)
-
-local report_fonts = logs.reporter("fonts","loading") -- not otf only
-
-local fonts = fonts
-local mappings = { }
-fonts.mappings = mappings
-
---[[ldx--
-<p>Eventually this code will disappear because map files are kind
-of obsolete. Some code may move to runtime or auxiliary modules.</p>
-<p>The name to unciode related code will stay of course.</p>
---ldx]]--
-
-local function loadlumtable(filename) -- will move to font goodies
- local lumname = file.replacesuffix(file.basename(filename),"lum")
- local lumfile = resolvers.findfile(lumname,"map") or ""
- if lumfile ~= "" and lfs.isfile(lumfile) then
- if trace_loading or trace_mapping then
- report_fonts("enhance: loading %s ",lumfile)
- end
- lumunic = dofile(lumfile)
- return lumunic, lumfile
- end
-end
-
-local hex = R("AF","09")
-local hexfour = (hex*hex*hex*hex) / function(s) return tonumber(s,16) end
-local hexsix = (hex^1) / function(s) return tonumber(s,16) end
-local dec = (R("09")^1) / tonumber
-local period = P(".")
-local unicode = P("uni") * (hexfour * (period + P(-1)) * Cc(false) + Ct(hexfour^1) * Cc(true))
-local ucode = P("u") * (hexsix * (period + P(-1)) * Cc(false) + Ct(hexsix ^1) * Cc(true))
-local index = P("index") * dec * Cc(false)
-
-local parser = unicode + ucode + index
-
-local parsers = { }
-
+local tonumber=tonumber
+local match,format,find,concat,gsub,lower=string.match,string.format,string.find,table.concat,string.gsub,string.lower
+local P,R,S,C,Ct,Cc,lpegmatch=lpeg.P,lpeg.R,lpeg.S,lpeg.C,lpeg.Ct,lpeg.Cc,lpeg.match
+local utfbyte=utf.byte
+local floor=math.floor
+local trace_loading=false trackers.register("fonts.loading",function(v) trace_loading=v end)
+local trace_mapping=false trackers.register("fonts.mapping",function(v) trace_unimapping=v end)
+local report_fonts=logs.reporter("fonts","loading")
+local fonts=fonts
+local mappings=fonts.mappings or {}
+fonts.mappings=mappings
+local function loadlumtable(filename)
+ local lumname=file.replacesuffix(file.basename(filename),"lum")
+ local lumfile=resolvers.findfile(lumname,"map") or ""
+ if lumfile~="" and lfs.isfile(lumfile) then
+ if trace_loading or trace_mapping then
+ report_fonts("loading map table %a",lumfile)
+ end
+ lumunic=dofile(lumfile)
+ return lumunic,lumfile
+ end
+end
+local hex=R("AF","09")
+local hexfour=(hex*hex*hex*hex)/function(s) return tonumber(s,16) end
+local hexsix=(hex^1)/function(s) return tonumber(s,16) end
+local dec=(R("09")^1)/tonumber
+local period=P(".")
+local unicode=P("uni")*(hexfour*(period+P(-1))*Cc(false)+Ct(hexfour^1)*Cc(true))
+local ucode=P("u")*(hexsix*(period+P(-1))*Cc(false)+Ct(hexsix^1)*Cc(true))
+local index=P("index")*dec*Cc(false)
+local parser=unicode+ucode+index
+local parsers={}
local function makenameparser(str)
- if not str or str == "" then
- return parser
- else
- local p = parsers[str]
- if not p then
- p = P(str) * period * dec * Cc(false)
- parsers[str] = p
- end
- return p
+ if not str or str=="" then
+ return parser
+ else
+ local p=parsers[str]
+ if not p then
+ p=P(str)*period*dec*Cc(false)
+ parsers[str]=p
end
+ return p
+ end
end
-
---~ local parser = mappings.makenameparser("Japan1")
---~ local parser = mappings.makenameparser()
---~ local function test(str)
---~ local b, a = lpegmatch(parser,str)
---~ print((a and table.serialize(b)) or b)
---~ end
---~ test("a.sc")
---~ test("a")
---~ test("uni1234")
---~ test("uni1234.xx")
---~ test("uni12349876")
---~ test("index1234")
---~ test("Japan1.123")
-
local function tounicode16(unicode)
- if unicode < 0x10000 then
- return format("%04X",unicode)
- else
- return format("%04X%04X",unicode/1024+0xD800,unicode%1024+0xDC00)
- end
+ if unicode<0x10000 then
+ return format("%04X",unicode)
+ elseif unicode<0x1FFFFFFFFF then
+ return format("%04X%04X",floor(unicode/1024),unicode%1024+0xDC00)
+ else
+ report_fonts("can't convert %a into tounicode",unicode)
+ end
end
-
local function tounicode16sequence(unicodes)
- local t = { }
- for l=1,#unicodes do
- local unicode = unicodes[l]
- if unicode < 0x10000 then
- t[l] = format("%04X",unicode)
- else
- t[l] = format("%04X%04X",unicode/1024+0xD800,unicode%1024+0xDC00)
- end
- end
- return concat(t)
-end
-
-local function fromunicode16(str)
- if #str == 4 then
- return tonumber(str,16)
+ local t={}
+ for l=1,#unicodes do
+ local unicode=unicodes[l]
+ if unicode<0x10000 then
+ t[l]=format("%04X",unicode)
+ elseif unicode<0x1FFFFFFFFF then
+ t[l]=format("%04X%04X",floor(unicode/1024),unicode%1024+0xDC00)
else
- local l, r = match(str,"(....)(....)")
- return (tonumber(l,16)- 0xD800)*0x400 + tonumber(r,16) - 0xDC00
+ report_fonts ("can't convert %a into tounicode",unicode)
end
+ end
+ return concat(t)
end
-
---~ This is quite a bit faster but at the cost of some memory but if we
---~ do this we will also use it elsewhere so let's not follow this route
---~ now. I might use this method in the plain variant (no caching there)
---~ but then I need a flag that distinguishes between code branches.
---~
---~ local cache = { }
---~
---~ function mappings.tounicode16(unicode)
---~ local s = cache[unicode]
---~ if not s then
---~ if unicode < 0x10000 then
---~ s = format("%04X",unicode)
---~ else
---~ s = format("%04X%04X",unicode/1024+0xD800,unicode%1024+0xDC00)
---~ end
---~ cache[unicode] = s
---~ end
---~ return s
---~ end
-
-mappings.loadlumtable = loadlumtable
-mappings.makenameparser = makenameparser
-mappings.tounicode16 = tounicode16
-mappings.tounicode16sequence = tounicode16sequence
-mappings.fromunicode16 = fromunicode16
-
-local separator = S("_.")
-local other = C((1 - separator)^1)
-local ligsplitter = Ct(other * (separator * other)^0)
-
---~ print(table.serialize(lpegmatch(ligsplitter,"this")))
---~ print(table.serialize(lpegmatch(ligsplitter,"this.that")))
---~ print(table.serialize(lpegmatch(ligsplitter,"japan1.123")))
---~ print(table.serialize(lpegmatch(ligsplitter,"such_so_more")))
---~ print(table.serialize(lpegmatch(ligsplitter,"such_so_more.that")))
-
+local function fromunicode16(str)
+ if #str==4 then
+ return tonumber(str,16)
+ else
+ local l,r=match(str,"(....)(....)")
+ return (tonumber(l,16)- 0xD800)*0x400+tonumber(r,16)-0xDC00
+ end
+end
+mappings.loadlumtable=loadlumtable
+mappings.makenameparser=makenameparser
+mappings.tounicode16=tounicode16
+mappings.tounicode16sequence=tounicode16sequence
+mappings.fromunicode16=fromunicode16
+local separator=S("_.")
+local other=C((1-separator)^1)
+local ligsplitter=Ct(other*(separator*other)^0)
function mappings.addtounicode(data,filename)
- local resources = data.resources
- local properties = data.properties
- local descriptions = data.descriptions
- local unicodes = resources.unicodes
- if not unicodes then
- return
- end
- -- we need to move this code
- unicodes['space'] = unicodes['space'] or 32
- unicodes['hyphen'] = unicodes['hyphen'] or 45
- unicodes['zwj'] = unicodes['zwj'] or 0x200D
- unicodes['zwnj'] = unicodes['zwnj'] or 0x200C
- -- the tounicode mapping is sparse and only needed for alternatives
- local private = fonts.constructors.privateoffset
- local unknown = format("%04X",utfbyte("?"))
- local unicodevector = fonts.encodings.agl.unicodes -- loaded runtime in context
- local tounicode = { }
- local originals = { }
- resources.tounicode = tounicode
- resources.originals = originals
- local lumunic, uparser, oparser
- local cidinfo, cidnames, cidcodes, usedmap
- if false then -- will become an option
- lumunic = loadlumtable(filename)
- lumunic = lumunic and lumunic.tounicode
- end
- --
- cidinfo = properties.cidinfo
- usedmap = cidinfo and fonts.cid.getmap(cidinfo)
- --
- if usedmap then
- oparser = usedmap and makenameparser(cidinfo.ordering)
- cidnames = usedmap.names
- cidcodes = usedmap.unicodes
- end
- uparser = makenameparser()
- local ns, nl = 0, 0
- for unic, glyph in next, descriptions do
- local index = glyph.index
- local name = glyph.name
- if unic == -1 or unic >= private or (unic >= 0xE000 and unic <= 0xF8FF) or unic == 0xFFFE or unic == 0xFFFF then
- local unicode = lumunic and lumunic[name] or unicodevector[name]
- if unicode then
- originals[index] = unicode
- tounicode[index] = tounicode16(unicode)
- ns = ns + 1
- end
- -- cidmap heuristics, beware, there is no guarantee for a match unless
- -- the chain resolves
- if (not unicode) and usedmap then
- local foundindex = lpegmatch(oparser,name)
- if foundindex then
- unicode = cidcodes[foundindex] -- name to number
- if unicode then
- originals[index] = unicode
- tounicode[index] = tounicode16(unicode)
- ns = ns + 1
- else
- local reference = cidnames[foundindex] -- number to name
- if reference then
- local foundindex = lpegmatch(oparser,reference)
- if foundindex then
- unicode = cidcodes[foundindex]
- if unicode then
- originals[index] = unicode
- tounicode[index] = tounicode16(unicode)
- ns = ns + 1
- end
- end
- if not unicode then
- local foundcodes, multiple = lpegmatch(uparser,reference)
- if foundcodes then
- originals[index] = foundcodes
- if multiple then
- tounicode[index] = tounicode16sequence(foundcodes)
- nl = nl + 1
- unicode = true
- else
- tounicode[index] = tounicode16(foundcodes)
- ns = ns + 1
- unicode = foundcodes
- end
- end
- end
- end
- end
- end
- end
- -- a.whatever or a_b_c.whatever or a_b_c (no numbers)
- if not unicode then
- local split = lpegmatch(ligsplitter,name)
- local nplit = split and #split or 0
- if nplit >= 2 then
- local t, n = { }, 0
- for l=1,nplit do
- local base = split[l]
- local u = unicodes[base] or unicodevector[base]
- if not u then
- break
- elseif type(u) == "table" then
- n = n + 1
- t[n] = u[1]
- else
- n = n + 1
- t[n] = u
- end
- end
- if n == 0 then -- done then
- -- nothing
- elseif n == 1 then
- originals[index] = t[1]
- tounicode[index] = tounicode16(t[1])
- else
- originals[index] = t
- tounicode[index] = tounicode16sequence(t)
- end
- nl = nl + 1
- unicode = true
- else
- -- skip: already checked and we don't want privates here
- end
- end
- -- last resort (we might need to catch private here as well)
- if not unicode then
- local foundcodes, multiple = lpegmatch(uparser,name)
+ local resources=data.resources
+ local properties=data.properties
+ local descriptions=data.descriptions
+ local unicodes=resources.unicodes
+ if not unicodes then
+ return
+ end
+ unicodes['space']=unicodes['space'] or 32
+ unicodes['hyphen']=unicodes['hyphen'] or 45
+ unicodes['zwj']=unicodes['zwj'] or 0x200D
+ unicodes['zwnj']=unicodes['zwnj'] or 0x200C
+ local private=fonts.constructors.privateoffset
+ local unknown=format("%04X",utfbyte("?"))
+ local unicodevector=fonts.encodings.agl.unicodes
+ local tounicode={}
+ local originals={}
+ resources.tounicode=tounicode
+ resources.originals=originals
+ local lumunic,uparser,oparser
+ local cidinfo,cidnames,cidcodes,usedmap
+ if false then
+ lumunic=loadlumtable(filename)
+ lumunic=lumunic and lumunic.tounicode
+ end
+ cidinfo=properties.cidinfo
+ usedmap=cidinfo and fonts.cid.getmap(cidinfo)
+ if usedmap then
+ oparser=usedmap and makenameparser(cidinfo.ordering)
+ cidnames=usedmap.names
+ cidcodes=usedmap.unicodes
+ end
+ uparser=makenameparser()
+ local ns,nl=0,0
+ for unic,glyph in next,descriptions do
+ local index=glyph.index
+ local name=glyph.name
+ if unic==-1 or unic>=private or (unic>=0xE000 and unic<=0xF8FF) or unic==0xFFFE or unic==0xFFFF then
+ local unicode=lumunic and lumunic[name] or unicodevector[name]
+ if unicode then
+ originals[index]=unicode
+ tounicode[index]=tounicode16(unicode)
+ ns=ns+1
+ end
+ if (not unicode) and usedmap then
+ local foundindex=lpegmatch(oparser,name)
+ if foundindex then
+ unicode=cidcodes[foundindex]
+ if unicode then
+ originals[index]=unicode
+ tounicode[index]=tounicode16(unicode)
+ ns=ns+1
+ else
+ local reference=cidnames[foundindex]
+ if reference then
+ local foundindex=lpegmatch(oparser,reference)
+ if foundindex then
+ unicode=cidcodes[foundindex]
+ if unicode then
+ originals[index]=unicode
+ tounicode[index]=tounicode16(unicode)
+ ns=ns+1
+ end
+ end
+ if not unicode then
+ local foundcodes,multiple=lpegmatch(uparser,reference)
if foundcodes then
- if multiple then
- originals[index] = foundcodes
- tounicode[index] = tounicode16sequence(foundcodes)
- nl = nl + 1
- unicode = true
- else
- originals[index] = foundcodes
- tounicode[index] = tounicode16(foundcodes)
- ns = ns + 1
- unicode = foundcodes
- end
- end
- end
- -- if not unicode then
- -- originals[index] = 0xFFFD
- -- tounicode[index] = "FFFD"
- -- end
- end
- end
- if trace_mapping then
- for unic, glyph in table.sortedhash(descriptions) do
- local name = glyph.name
- local index = glyph.index
- local toun = tounicode[index]
- if toun then
- report_fonts("internal: 0x%05X, name: %s, unicode: U+%05X, tounicode: %s",index,name,unic,toun)
+ originals[index]=foundcodes
+ if multiple then
+ tounicode[index]=tounicode16sequence(foundcodes)
+ nl=nl+1
+ unicode=true
+ else
+ tounicode[index]=tounicode16(foundcodes)
+ ns=ns+1
+ unicode=foundcodes
+ end
+ end
+ end
+ end
+ end
+ end
+ end
+ if not unicode then
+ local split=lpegmatch(ligsplitter,name)
+ local nplit=split and #split or 0
+ if nplit>=2 then
+ local t,n={},0
+ for l=1,nplit do
+ local base=split[l]
+ local u=unicodes[base] or unicodevector[base]
+ if not u then
+ break
+ elseif type(u)=="table" then
+ n=n+1
+ t[n]=u[1]
else
- report_fonts("internal: 0x%05X, name: %s, unicode: U+%05X",index,name,unic)
- end
+ n=n+1
+ t[n]=u
+ end
+ end
+ if n==0 then
+ elseif n==1 then
+ originals[index]=t[1]
+ tounicode[index]=tounicode16(t[1])
+ else
+ originals[index]=t
+ tounicode[index]=tounicode16sequence(t)
+ end
+ nl=nl+1
+ unicode=true
+ else
end
- end
- if trace_loading and (ns > 0 or nl > 0) then
- report_fonts("enhance: %s tounicode entries added (%s ligatures)",nl+ns, ns)
- end
+ end
+ if not unicode then
+ local foundcodes,multiple=lpegmatch(uparser,name)
+ if foundcodes then
+ if multiple then
+ originals[index]=foundcodes
+ tounicode[index]=tounicode16sequence(foundcodes)
+ nl=nl+1
+ unicode=true
+ else
+ originals[index]=foundcodes
+ tounicode[index]=tounicode16(foundcodes)
+ ns=ns+1
+ unicode=foundcodes
+ end
+ end
+ end
+ end
+ end
+ if trace_mapping then
+ for unic,glyph in table.sortedhash(descriptions) do
+ local name=glyph.name
+ local index=glyph.index
+ local toun=tounicode[index]
+ if toun then
+ report_fonts("internal slot %U, name %a, unicode %U, tounicode %a",index,name,unic,toun)
+ else
+ report_fonts("internal slot %U, name %a, unicode %U",index,name,unic)
+ end
+ end
+ end
+ if trace_loading and (ns>0 or nl>0) then
+ report_fonts("%s tounicode entries added, ligatures %s",nl+ns,ns)
+ end
end
end -- closure
do -- begin closure to overcome local limits and interference
-if not modules then modules = { } end modules ['luatex-fonts-syn'] = {
- version = 1.001,
- comment = "companion to luatex-*.tex",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
+if not modules then modules={} end modules ['luatex-fonts-syn']={
+ version=1.001,
+ comment="companion to luatex-*.tex",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
}
-
if context then
- texio.write_nl("fatal error: this module is not for context")
- os.exit()
-end
-
--- Generic font names support.
---
--- Watch out, the version number is the same as the one used in
--- the mtx-fonts.lua function scripts.fonts.names as we use a
--- simplified font database in the plain solution and by using
--- a different number we're less dependent on context.
---
--- mtxrun --script font --reload --simple
---
--- The format of the file is as follows:
---
--- return {
--- ["version"] = 1.001,
--- ["mappings"] = {
--- ["somettcfontone"] = { "Some TTC Font One", "SomeFontA.ttc", 1 },
--- ["somettcfonttwo"] = { "Some TTC Font Two", "SomeFontA.ttc", 2 },
--- ["somettffont"] = { "Some TTF Font", "SomeFontB.ttf" },
--- ["someotffont"] = { "Some OTF Font", "SomeFontC.otf" },
--- },
--- }
-
-local fonts = fonts
-fonts.names = fonts.names or { }
-
-fonts.names.version = 1.001 -- not the same as in context
-fonts.names.basename = "luatex-fonts-names.lua"
-fonts.names.new_to_old = { }
-fonts.names.old_to_new = { }
-
-local data, loaded = nil, false
-
-local fileformats = { "lua", "tex", "other text files" }
-
+ texio.write_nl("fatal error: this module is not for context")
+ os.exit()
+end
+local fonts=fonts
+fonts.names=fonts.names or {}
+fonts.names.version=1.001
+fonts.names.basename="luatex-fonts-names.lua"
+fonts.names.new_to_old={}
+fonts.names.old_to_new={}
+local data,loaded=nil,false
+local fileformats={ "lua","tex","other text files" }
function fonts.names.resolve(name,sub)
- if not loaded then
- local basename = fonts.names.basename
- if basename and basename ~= "" then
- for i=1,#fileformats do
- local format = fileformats[i]
- local foundname = resolvers.findfile(basename,format) or ""
- if foundname ~= "" then
- data = dofile(foundname)
- texio.write("<font database loaded: ",foundname,">")
- break
- end
- end
- end
- loaded = true
- end
- if type(data) == "table" and data.version == fonts.names.version then
- local condensed = string.gsub(string.lower(name),"[^%a%d]","")
- local found = data.mappings and data.mappings[condensed]
- if found then
- local fontname, filename, subfont = found[1], found[2], found[3]
- if subfont then
- return filename, fontname
- else
- return filename, false
- end
- else
- return name, false -- fallback to filename
- end
+ if not loaded then
+ local basename=fonts.names.basename
+ if basename and basename~="" then
+ for i=1,#fileformats do
+ local format=fileformats[i]
+ local foundname=resolvers.findfile(basename,format) or ""
+ if foundname~="" then
+ data=dofile(foundname)
+ texio.write("<font database loaded: ",foundname,">")
+ break
+ end
+ end
+ end
+ loaded=true
+ end
+ if type(data)=="table" and data.version==fonts.names.version then
+ local condensed=string.gsub(string.lower(name),"[^%a%d]","")
+ local found=data.mappings and data.mappings[condensed]
+ if found then
+ local fontname,filename,subfont=found[1],found[2],found[3]
+ if subfont then
+ return filename,fontname
+ else
+ return filename,false
+ end
+ else
+ return name,false
end
+ end
end
-
-fonts.names.resolvespec = fonts.names.resolve -- only supported in mkiv
-
-function fonts.names.getfilename(askedname,suffix) -- only supported in mkiv
- return ""
+fonts.names.resolvespec=fonts.names.resolve
+function fonts.names.getfilename(askedname,suffix)
+ return ""
end
end -- closure
do -- begin closure to overcome local limits and interference
-if not modules then modules = { } end modules ['luatex-fonts-tfm'] = {
- version = 1.001,
- comment = "companion to luatex-*.tex",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
+if not modules then modules={} end modules ['luatex-fonts-tfm']={
+ version=1.001,
+ comment="companion to luatex-*.tex",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
}
-
if context then
- texio.write_nl("fatal error: this module is not for context")
- os.exit()
+ texio.write_nl("fatal error: this module is not for context")
+ os.exit()
end
-
-local fonts = fonts
-local tfm = { }
-fonts.handlers.tfm = tfm
-fonts.formats.tfm = "type1" -- we need to have at least a value here
-
+local fonts=fonts
+local tfm={}
+fonts.handlers.tfm=tfm
+fonts.formats.tfm="type1"
function fonts.readers.tfm(specification)
- local fullname = specification.filename or ""
- if fullname == "" then
- local forced = specification.forced or ""
- if forced ~= "" then
- fullname = specification.name .. "." .. forced
- else
- fullname = specification.name
- end
- end
- local foundname = resolvers.findbinfile(fullname, 'tfm') or ""
- if foundname == "" then
- foundname = resolvers.findbinfile(fullname, 'ofm') or ""
- end
- if foundname ~= "" then
- specification.filename = foundname
- specification.format = "ofm"
- return font.read_tfm(specification.filename,specification.size)
+ local fullname=specification.filename or ""
+ if fullname=="" then
+ local forced=specification.forced or ""
+ if forced~="" then
+ fullname=specification.name.."."..forced
+ else
+ fullname=specification.name
end
+ end
+ local foundname=resolvers.findbinfile(fullname,'tfm') or ""
+ if foundname=="" then
+ foundname=resolvers.findbinfile(fullname,'ofm') or ""
+ end
+ if foundname~="" then
+ specification.filename=foundname
+ specification.format="ofm"
+ return font.read_tfm(specification.filename,specification.size)
+ end
end
end -- closure
do -- begin closure to overcome local limits and interference
-if not modules then modules = { } end modules ['font-oti'] = {
- version = 1.001,
- comment = "companion to font-ini.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
+if not modules then modules={} end modules ['font-oti']={
+ version=1.001,
+ comment="companion to font-ini.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
}
-
-local lower = string.lower
-
-local allocate = utilities.storage.allocate
-
-local fonts = fonts
-local otf = { }
-fonts.handlers.otf = otf
-
-local otffeatures = fonts.constructors.newfeatures("otf")
-local registerotffeature = otffeatures.register
-
+local lower=string.lower
+local fonts=fonts
+local constructors=fonts.constructors
+local otf=constructors.newhandler("otf")
+local otffeatures=constructors.newfeatures("otf")
+local otftables=otf.tables
+local registerotffeature=otffeatures.register
+local allocate=utilities.storage.allocate
registerotffeature {
- name = "features",
- description = "initialization of feature handler",
- default = true,
+ name="features",
+ description="initialization of feature handler",
+ default=true,
}
-
--- these are later hooked into node and base initializaters
-
-local otftables = otf.tables -- not always defined
-
local function setmode(tfmdata,value)
- if value then
- tfmdata.properties.mode = lower(value)
- end
+ if value then
+ tfmdata.properties.mode=lower(value)
+ end
end
-
local function setlanguage(tfmdata,value)
- if value then
- local cleanvalue = lower(value)
- local languages = otftables and otftables.languages
- local properties = tfmdata.properties
- if not languages then
- properties.language = cleanvalue
- elseif languages[value] then
- properties.language = cleanvalue
- else
- properties.language = "dflt"
- end
+ if value then
+ local cleanvalue=lower(value)
+ local languages=otftables and otftables.languages
+ local properties=tfmdata.properties
+ if not languages then
+ properties.language=cleanvalue
+ elseif languages[value] then
+ properties.language=cleanvalue
+ else
+ properties.language="dflt"
end
+ end
end
-
local function setscript(tfmdata,value)
- if value then
- local cleanvalue = lower(value)
- local scripts = otftables and otftables.scripts
- local properties = tfmdata.properties
- if not scripts then
- properties.script = cleanvalue
- elseif scripts[value] then
- properties.script = cleanvalue
- else
- properties.script = "dflt"
- end
+ if value then
+ local cleanvalue=lower(value)
+ local scripts=otftables and otftables.scripts
+ local properties=tfmdata.properties
+ if not scripts then
+ properties.script=cleanvalue
+ elseif scripts[value] then
+ properties.script=cleanvalue
+ else
+ properties.script="dflt"
end
+ end
end
-
registerotffeature {
- name = "mode",
- description = "mode",
- initializers = {
- base = setmode,
- node = setmode,
- }
+ name="mode",
+ description="mode",
+ initializers={
+ base=setmode,
+ node=setmode,
+ }
}
-
registerotffeature {
- name = "language",
- description = "language",
- initializers = {
- base = setlanguage,
- node = setlanguage,
- }
+ name="language",
+ description="language",
+ initializers={
+ base=setlanguage,
+ node=setlanguage,
+ }
}
-
registerotffeature {
- name = "script",
- description = "script",
- initializers = {
- base = setscript,
- node = setscript,
- }
+ name="script",
+ description="script",
+ initializers={
+ base=setscript,
+ node=setscript,
+ }
}
-
end -- closure
do -- begin closure to overcome local limits and interference
-if not modules then modules = { } end modules ['font-otf'] = {
- version = 1.001,
- comment = "companion to font-ini.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
+if not modules then modules={} end modules ['font-otf']={
+ version=1.001,
+ comment="companion to font-ini.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
}
-
--- langs -> languages enz
--- anchor_classes vs kernclasses
--- modification/creationtime in subfont is runtime dus zinloos
--- to_table -> totable
--- ascent descent
-
--- more checking against low level calls of functions
-
-local utf = unicode.utf8
-
-local utfbyte = utf.byte
-local format, gmatch, gsub, find, match, lower, strip = string.format, string.gmatch, string.gsub, string.find, string.match, string.lower, string.strip
-local type, next, tonumber, tostring = type, next, tonumber, tostring
-local abs = math.abs
-local getn = table.getn
-local lpegmatch = lpeg.match
-local reversed, concat, remove = table.reversed, table.concat, table.remove
-local ioflush = io.flush
-local fastcopy, tohash, derivetable = table.fastcopy, table.tohash, table.derive
-
-local allocate = utilities.storage.allocate
-local registertracker = trackers.register
-local registerdirective = directives.register
-local starttiming = statistics.starttiming
-local stoptiming = statistics.stoptiming
-local elapsedtime = statistics.elapsedtime
-local findbinfile = resolvers.findbinfile
-
-local trace_private = false registertracker("otf.private", function(v) trace_private = v end)
-local trace_loading = false registertracker("otf.loading", function(v) trace_loading = v end)
-local trace_features = false registertracker("otf.features", function(v) trace_features = v end)
-local trace_dynamics = false registertracker("otf.dynamics", function(v) trace_dynamics = v end)
-local trace_sequences = false registertracker("otf.sequences", function(v) trace_sequences = v end)
-local trace_markwidth = false registertracker("otf.markwidth", function(v) trace_markwidth = v end)
-local trace_defining = false registertracker("fonts.defining", function(v) trace_defining = v end)
-
-local report_otf = logs.reporter("fonts","otf loading")
-
-local fonts = fonts
-local otf = fonts.handlers.otf
-
-otf.glists = { "gsub", "gpos" }
-
-otf.version = 2.737 -- beware: also sync font-mis.lua
-otf.cache = containers.define("fonts", "otf", otf.version, true)
-
-local fontdata = fonts.hashes.identifiers
-local chardata = characters and characters.data -- not used
-
-local otffeatures = fonts.constructors.newfeatures("otf")
-local registerotffeature = otffeatures.register
-
-local enhancers = allocate()
-otf.enhancers = enhancers
-local patches = { }
-enhancers.patches = patches
-
-local definers = fonts.definers
-local readers = fonts.readers
-local constructors = fonts.constructors
-
-local forceload = false
-local cleanup = 0 -- mk: 0=885M 1=765M 2=735M (regular run 730M)
-local usemetatables = false -- .4 slower on mk but 30 M less mem so we might change the default -- will be directive
-local packdata = true
-local syncspace = true
-local forcenotdef = false
-
-local wildcard = "*"
-local default = "dflt"
-
-local fontloaderfields = fontloader.fields
-local mainfields = nil
-local glyphfields = nil -- not used yet
-
-registerdirective("fonts.otf.loader.cleanup", function(v) cleanup = tonumber(v) or (v and 1) or 0 end)
-registerdirective("fonts.otf.loader.force", function(v) forceload = v end)
-registerdirective("fonts.otf.loader.usemetatables", function(v) usemetatables = v end)
-registerdirective("fonts.otf.loader.pack", function(v) packdata = v end)
-registerdirective("fonts.otf.loader.syncspace", function(v) syncspace = v end)
-registerdirective("fonts.otf.loader.forcenotdef", function(v) forcenotdef = v end)
-
+local utfbyte=utf.byte
+local format,gmatch,gsub,find,match,lower,strip=string.format,string.gmatch,string.gsub,string.find,string.match,string.lower,string.strip
+local type,next,tonumber,tostring=type,next,tonumber,tostring
+local abs=math.abs
+local getn=table.getn
+local lpegmatch=lpeg.match
+local reversed,concat,remove=table.reversed,table.concat,table.remove
+local ioflush=io.flush
+local fastcopy,tohash,derivetable=table.fastcopy,table.tohash,table.derive
+local allocate=utilities.storage.allocate
+local registertracker=trackers.register
+local registerdirective=directives.register
+local starttiming=statistics.starttiming
+local stoptiming=statistics.stoptiming
+local elapsedtime=statistics.elapsedtime
+local findbinfile=resolvers.findbinfile
+local trace_private=false registertracker("otf.private",function(v) trace_private=v end)
+local trace_loading=false registertracker("otf.loading",function(v) trace_loading=v end)
+local trace_features=false registertracker("otf.features",function(v) trace_features=v end)
+local trace_dynamics=false registertracker("otf.dynamics",function(v) trace_dynamics=v end)
+local trace_sequences=false registertracker("otf.sequences",function(v) trace_sequences=v end)
+local trace_markwidth=false registertracker("otf.markwidth",function(v) trace_markwidth=v end)
+local trace_defining=false registertracker("fonts.defining",function(v) trace_defining=v end)
+local report_otf=logs.reporter("fonts","otf loading")
+local fonts=fonts
+local otf=fonts.handlers.otf
+otf.glists={ "gsub","gpos" }
+otf.version=2.741
+otf.cache=containers.define("fonts","otf",otf.version,true)
+local fontdata=fonts.hashes.identifiers
+local chardata=characters and characters.data
+local otffeatures=fonts.constructors.newfeatures("otf")
+local registerotffeature=otffeatures.register
+local enhancers=allocate()
+otf.enhancers=enhancers
+local patches={}
+enhancers.patches=patches
+local definers=fonts.definers
+local readers=fonts.readers
+local constructors=fonts.constructors
+local forceload=false
+local cleanup=0
+local usemetatables=false
+local packdata=true
+local syncspace=true
+local forcenotdef=false
+local wildcard="*"
+local default="dflt"
+local fontloaderfields=fontloader.fields
+local mainfields=nil
+local glyphfields=nil
+registerdirective("fonts.otf.loader.cleanup",function(v) cleanup=tonumber(v) or (v and 1) or 0 end)
+registerdirective("fonts.otf.loader.force",function(v) forceload=v end)
+registerdirective("fonts.otf.loader.usemetatables",function(v) usemetatables=v end)
+registerdirective("fonts.otf.loader.pack",function(v) packdata=v end)
+registerdirective("fonts.otf.loader.syncspace",function(v) syncspace=v end)
+registerdirective("fonts.otf.loader.forcenotdef",function(v) forcenotdef=v end)
local function load_featurefile(raw,featurefile)
- if featurefile and featurefile ~= "" then
- if trace_loading then
- report_otf("featurefile: %s", featurefile)
- end
- fontloader.apply_featurefile(raw, featurefile)
+ if featurefile and featurefile~="" then
+ if trace_loading then
+ report_otf("using featurefile %a",featurefile)
end
+ fontloader.apply_featurefile(raw,featurefile)
+ end
end
-
local function showfeatureorder(rawdata,filename)
- local sequences = rawdata.resources.sequences
- if sequences and #sequences > 0 then
- if trace_loading then
- report_otf("font %s has %s sequences",filename,#sequences)
- report_otf(" ")
- end
- for nos=1,#sequences do
- local sequence = sequences[nos]
- local typ = sequence.type or "no-type"
- local name = sequence.name or "no-name"
- local subtables = sequence.subtables or { "no-subtables" }
- local features = sequence.features
+ local sequences=rawdata.resources.sequences
+ if sequences and #sequences>0 then
+ if trace_loading then
+ report_otf("font %a has %s sequences",filename,#sequences)
+ report_otf(" ")
+ end
+ for nos=1,#sequences do
+ local sequence=sequences[nos]
+ local typ=sequence.type or "no-type"
+ local name=sequence.name or "no-name"
+ local subtables=sequence.subtables or { "no-subtables" }
+ local features=sequence.features
+ if trace_loading then
+ report_otf("%3i %-15s %-20s [% t]",nos,name,typ,subtables)
+ end
+ if features then
+ for feature,scripts in next,features do
+ local tt={}
+ if type(scripts)=="table" then
+ for script,languages in next,scripts do
+ local ttt={}
+ for language,_ in next,languages do
+ ttt[#ttt+1]=language
+ end
+ tt[#tt+1]=formatters["[%s: % t]"](script,ttt)
+ end
if trace_loading then
- report_otf("%3i %-15s %-20s [%s]",nos,name,typ,concat(subtables,","))
+ report_otf(" %s: % t",feature,tt)
end
- if features then
- for feature, scripts in next, features do
- local tt = { }
- if type(scripts) == "table" then
- for script, languages in next, scripts do
- local ttt = { }
- for language, _ in next, languages do
- ttt[#ttt+1] = language
- end
- tt[#tt+1] = format("[%s: %s]",script,concat(ttt," "))
- end
- if trace_loading then
- report_otf(" %s: %s",feature,concat(tt," "))
- end
- else
- if trace_loading then
- report_otf(" %s: %s",feature,tostring(scripts))
- end
- end
- end
+ else
+ if trace_loading then
+ report_otf(" %s: %S",feature,scripts)
end
+ end
end
- if trace_loading then
- report_otf("\n")
- end
- elseif trace_loading then
- report_otf("font %s has no sequences",filename)
+ end
end
-end
-
---[[ldx--
-<p>We start with a lot of tables and related functions.</p>
---ldx]]--
-
-local valid_fields = table.tohash {
- -- "anchor_classes",
- "ascent",
- -- "cache_version",
- "cidinfo",
- "copyright",
- -- "creationtime",
- "descent",
- "design_range_bottom",
- "design_range_top",
- "design_size",
- "encodingchanged",
- "extrema_bound",
- "familyname",
- "fontname",
- "fontname",
- "fontstyle_id",
- "fontstyle_name",
- "fullname",
- -- "glyphs",
- "hasvmetrics",
- -- "head_optimized_for_cleartype",
- "horiz_base",
- "issans",
- "isserif",
- "italicangle",
- -- "kerns",
- -- "lookups",
- "macstyle",
- -- "modificationtime",
- "onlybitmaps",
- "origname",
- "os2_version",
- "pfminfo",
- -- "private",
- "serifcheck",
- "sfd_version",
- -- "size",
- "strokedfont",
- "strokewidth",
- -- "subfonts",
- "table_version",
- -- "tables",
- -- "ttf_tab_saved",
- "ttf_tables",
- "uni_interp",
- "uniqueid",
- "units_per_em",
- "upos",
- "use_typo_metrics",
- "uwidth",
- -- "validation_state",
- "version",
- "vert_base",
- "weight",
- "weight_width_slope_only",
- -- "xuid",
+ if trace_loading then
+ report_otf("\n")
+ end
+ elseif trace_loading then
+ report_otf("font %a has no sequences",filename)
+ end
+end
+local valid_fields=table.tohash {
+ "ascent",
+ "cidinfo",
+ "copyright",
+ "descent",
+ "design_range_bottom",
+ "design_range_top",
+ "design_size",
+ "encodingchanged",
+ "extrema_bound",
+ "familyname",
+ "fontname",
+ "fontname",
+ "fontstyle_id",
+ "fontstyle_name",
+ "fullname",
+ "hasvmetrics",
+ "horiz_base",
+ "issans",
+ "isserif",
+ "italicangle",
+ "macstyle",
+ "onlybitmaps",
+ "origname",
+ "os2_version",
+ "pfminfo",
+ "serifcheck",
+ "sfd_version",
+ "strokedfont",
+ "strokewidth",
+ "table_version",
+ "ttf_tables",
+ "uni_interp",
+ "uniqueid",
+ "units_per_em",
+ "upos",
+ "use_typo_metrics",
+ "uwidth",
+ "version",
+ "vert_base",
+ "weight",
+ "weight_width_slope_only",
}
-
-local ordered_enhancers = {
- "prepare tables",
- "prepare glyphs",
- "prepare lookups",
-
- "analyze glyphs",
- "analyze math",
-
- "prepare tounicode", -- maybe merge with prepare
-
- "reorganize lookups",
- "reorganize mark classes",
- "reorganize anchor classes",
-
- "reorganize glyph kerns",
- "reorganize glyph lookups",
- "reorganize glyph anchors",
-
- "merge kern classes",
-
- "reorganize features",
- "reorganize subtables",
-
- "check glyphs",
- "check metadata",
- "check extra features", -- after metadata
-
- "add duplicates",
- "check encoding",
-
- "cleanup tables",
+local ordered_enhancers={
+ "prepare tables",
+ "prepare glyphs",
+ "prepare lookups",
+ "analyze glyphs",
+ "analyze math",
+ "prepare tounicode",
+ "reorganize lookups",
+ "reorganize mark classes",
+ "reorganize anchor classes",
+ "reorganize glyph kerns",
+ "reorganize glyph lookups",
+ "reorganize glyph anchors",
+ "merge kern classes",
+ "reorganize features",
+ "reorganize subtables",
+ "check glyphs",
+ "check metadata",
+ "check extra features",
+ "add duplicates",
+ "check encoding",
+ "cleanup tables",
}
-
---[[ldx--
-<p>Here we go.</p>
---ldx]]--
-
-local actions = allocate()
-local before = allocate()
-local after = allocate()
-
-patches.before = before
-patches.after = after
-
+local actions=allocate()
+local before=allocate()
+local after=allocate()
+patches.before=before
+patches.after=after
local function enhance(name,data,filename,raw)
- local enhancer = actions[name]
- if enhancer then
- if trace_loading then
- report_otf("enhance: %s (%s)",name,filename)
- ioflush()
- end
- enhancer(data,filename,raw)
- elseif trace_loading then
- -- report_otf("enhance: %s is undefined",name)
+ local enhancer=actions[name]
+ if enhancer then
+ if trace_loading then
+ report_otf("apply enhancement %a to file %a",name,filename)
+ ioflush()
end
+ enhancer(data,filename,raw)
+ else
+ end
end
-
function enhancers.apply(data,filename,raw)
- local basename = file.basename(lower(filename))
- if trace_loading then
- report_otf("start enhancing: %s",filename)
- end
- ioflush() -- we want instant messages
- for e=1,#ordered_enhancers do
- local enhancer = ordered_enhancers[e]
- local b = before[enhancer]
- if b then
- for pattern, action in next, b do
- if find(basename,pattern) then
- action(data,filename,raw)
- end
- end
- end
- enhance(enhancer,data,filename,raw)
- local a = after[enhancer]
- if a then
- for pattern, action in next, a do
- if find(basename,pattern) then
- action(data,filename,raw)
- end
- end
- end
- ioflush() -- we want instant messages
- end
- if trace_loading then
- report_otf("stop enhancing")
- end
- ioflush() -- we want instant messages
+ local basename=file.basename(lower(filename))
+ if trace_loading then
+ report_otf("%s enhancing file %a","start",filename)
+ end
+ ioflush()
+ for e=1,#ordered_enhancers do
+ local enhancer=ordered_enhancers[e]
+ local b=before[enhancer]
+ if b then
+ for pattern,action in next,b do
+ if find(basename,pattern) then
+ action(data,filename,raw)
+ end
+ end
+ end
+ enhance(enhancer,data,filename,raw)
+ local a=after[enhancer]
+ if a then
+ for pattern,action in next,a do
+ if find(basename,pattern) then
+ action(data,filename,raw)
+ end
+ end
+ end
+ ioflush()
+ end
+ if trace_loading then
+ report_otf("%s enhancing file %a","stop",filename)
+ end
+ ioflush()
end
-
--- patches.register("before","migrate metadata","cambria",function() end)
-
function patches.register(what,where,pattern,action)
- local pw = patches[what]
- if pw then
- local ww = pw[where]
- if ww then
- ww[pattern] = action
- else
- pw[where] = { [pattern] = action}
- end
+ local pw=patches[what]
+ if pw then
+ local ww=pw[where]
+ if ww then
+ ww[pattern]=action
+ else
+ pw[where]={ [pattern]=action}
end
+ end
end
-
function patches.report(fmt,...)
- if trace_loading then
- report_otf("patching: " ..fmt,...)
- end
+ if trace_loading then
+ report_otf("patching: %s",formatters[fmt](...))
+ end
end
-
-function enhancers.register(what,action) -- only already registered can be overloaded
- actions[what] = action
+function enhancers.register(what,action)
+ actions[what]=action
end
-
function otf.load(filename,format,sub,featurefile)
- local name = file.basename(file.removesuffix(filename))
- local attr = lfs.attributes(filename)
- local size = attr and attr.size or 0
- local time = attr and attr.modification or 0
- if featurefile then
- name = name .. "@" .. file.removesuffix(file.basename(featurefile))
+ local base=file.basename(file.removesuffix(filename))
+ local name=file.removesuffix(base)
+ local attr=lfs.attributes(filename)
+ local size=attr and attr.size or 0
+ local time=attr and attr.modification or 0
+ if featurefile then
+ name=name.."@"..file.removesuffix(file.basename(featurefile))
+ end
+ if sub=="" then
+ sub=false
+ end
+ local hash=name
+ if sub then
+ hash=hash.."-"..sub
+ end
+ hash=containers.cleanname(hash)
+ local featurefiles
+ if featurefile then
+ featurefiles={}
+ for s in gmatch(featurefile,"[^,]+") do
+ local name=resolvers.findfile(file.addsuffix(s,'fea'),'fea') or ""
+ if name=="" then
+ report_otf("loading error, no featurefile %a",s)
+ else
+ local attr=lfs.attributes(name)
+ featurefiles[#featurefiles+1]={
+ name=name,
+ size=attr and attr.size or 0,
+ time=attr and attr.modification or 0,
+ }
+ end
+ end
+ if #featurefiles==0 then
+ featurefiles=nil
+ end
+ end
+ local data=containers.read(otf.cache,hash)
+ local reload=not data or data.size~=size or data.time~=time
+ if forceload then
+ report_otf("forced reload of %a due to hard coded flag",filename)
+ reload=true
+ end
+ if not reload then
+ local featuredata=data.featuredata
+ if featurefiles then
+ if not featuredata or #featuredata~=#featurefiles then
+ reload=true
+ else
+ for i=1,#featurefiles do
+ local fi,fd=featurefiles[i],featuredata[i]
+ if fi.name~=fd.name or fi.size~=fd.size or fi.time~=fd.time then
+ reload=true
+ break
+ end
+ end
+ end
+ elseif featuredata then
+ reload=true
end
- if sub == "" then
- sub = false
+ if reload then
+ report_otf("loading: forced reload due to changed featurefile specification %a",featurefile)
end
- local hash = name
+ end
+ if reload then
+ report_otf("loading %a, hash %a",filename,hash)
+ local fontdata,messages
if sub then
- hash = hash .. "-" .. sub
- end
- hash = containers.cleanname(hash)
- local featurefiles
- if featurefile then
- featurefiles = { }
- for s in gmatch(featurefile,"[^,]+") do
- local name = resolvers.findfile(file.addsuffix(s,'fea'),'fea') or ""
- if name == "" then
- report_otf("loading: no featurefile '%s'",s)
- else
- local attr = lfs.attributes(name)
- featurefiles[#featurefiles+1] = {
- name = name,
- size = attr and attr.size or 0,
- time = attr and attr.modification or 0,
- }
- end
- end
- if #featurefiles == 0 then
- featurefiles = nil
- end
- end
- local data = containers.read(otf.cache,hash)
- local reload = not data or data.size ~= size or data.time ~= time
- if forceload then
- report_otf("loading: forced reload due to hard coded flag")
- reload = true
+ fontdata,messages=fontloader.open(filename,sub)
+ else
+ fontdata,messages=fontloader.open(filename)
end
- if not reload then
- local featuredata = data.featuredata
- if featurefiles then
- if not featuredata or #featuredata ~= #featurefiles then
- reload = true
- else
- for i=1,#featurefiles do
- local fi, fd = featurefiles[i], featuredata[i]
- if fi.name ~= fd.name or fi.size ~= fd.size or fi.time ~= fd.time then
- reload = true
- break
- end
- end
- end
- elseif featuredata then
- reload = true
- end
- if reload then
- report_otf("loading: forced reload due to changed featurefile specification: %s",featurefile or "--")
- end
- end
- if reload then
- report_otf("loading: %s (hash: %s)",filename,hash)
- local fontdata, messages
- if sub then
- fontdata, messages = fontloader.open(filename,sub)
- else
- fontdata, messages = fontloader.open(filename)
- end
- if fontdata then
- mainfields = mainfields or (fontloaderfields and fontloaderfields(fontdata))
- end
- if trace_loading and messages and #messages > 0 then
- if type(messages) == "string" then
- report_otf("warning: %s",messages)
- else
- for m=1,#messages do
- report_otf("warning: %s",tostring(messages[m]))
- end
- end
- else
- report_otf("font loaded okay")
- end
- if fontdata then
- if featurefiles then
- for i=1,#featurefiles do
- load_featurefile(fontdata,featurefiles[i].name)
- end
- end
- local unicodes = {
- -- names to unicodes
- }
- local splitter = lpeg.splitter(" ",unicodes)
- data = {
- size = size,
- time = time,
- format = format,
- featuredata = featurefiles,
- resources = {
- filename = resolvers.unresolve(filename), -- no shortcut
- version = otf.version,
- creator = "context mkiv",
- unicodes = unicodes,
- indices = {
- -- index to unicodes
- },
- duplicates = {
- -- alternative unicodes
- },
- variants = {
- -- alternative unicodes (variants)
- },
- lookuptypes = {
- },
- },
- metadata = {
- -- raw metadata, not to be used
- },
- properties = {
- -- normalized metadata
- },
- descriptions = {
- },
- goodies = {
- },
- helpers = {
- tounicodelist = splitter,
- tounicodetable = lpeg.Ct(splitter),
- },
- }
- starttiming(data)
- report_otf("file size: %s", size)
- enhancers.apply(data,filename,fontdata)
- if packdata then
- if cleanup > 0 then
- collectgarbage("collect")
---~ lua.collectgarbage()
- end
- enhance("pack",data,filename,nil)
- end
- report_otf("saving in cache: %s",filename)
- data = containers.write(otf.cache, hash, data)
- if cleanup > 1 then
- collectgarbage("collect")
---~ lua.collectgarbage()
- end
- stoptiming(data)
- if elapsedtime then -- not in generic
- report_otf("preprocessing and caching took %s seconds",elapsedtime(data))
- end
- fontloader.close(fontdata) -- free memory
- if cleanup > 3 then
- collectgarbage("collect")
---~ lua.collectgarbage()
- end
- data = containers.read(otf.cache, hash) -- this frees the old table and load the sparse one
- if cleanup > 2 then
- collectgarbage("collect")
---~ lua.collectgarbage()
- end
- else
- data = nil
- report_otf("loading failed (file read error)")
- end
+ if fontdata then
+ mainfields=mainfields or (fontloaderfields and fontloaderfields(fontdata))
end
- if data then
- if trace_defining then
- report_otf("loading from cache: %s",hash)
- end
- enhance("unpack",data,filename,nil,false)
- enhance("add dimensions",data,filename,nil,false)
- if trace_sequences then
- showfeatureorder(data,filename)
+ if trace_loading and messages and #messages>0 then
+ if type(messages)=="string" then
+ report_otf("warning: %s",messages)
+ else
+ for m=1,#messages do
+ report_otf("warning: %S",messages[m])
end
+ end
+ else
+ report_otf("loading done")
end
- return data
-end
-
-local mt = {
- __index = function(t,k) -- maybe set it
- if k == "height" then
- local ht = t.boundingbox[4]
- return ht < 0 and 0 or ht
- elseif k == "depth" then
- local dp = -t.boundingbox[2]
- return dp < 0 and 0 or dp
- elseif k == "width" then
- return 0
- elseif k == "name" then -- or maybe uni*
- return forcenotdef and ".notdef"
- end
+ if fontdata then
+ if featurefiles then
+ for i=1,#featurefiles do
+ load_featurefile(fontdata,featurefiles[i].name)
+ end
+ end
+ local unicodes={
+ }
+ local splitter=lpeg.splitter(" ",unicodes)
+ data={
+ size=size,
+ time=time,
+ format=format,
+ featuredata=featurefiles,
+ resources={
+ filename=resolvers.unresolve(filename),
+ version=otf.version,
+ creator="context mkiv",
+ unicodes=unicodes,
+ indices={
+ },
+ duplicates={
+ },
+ variants={
+ },
+ lookuptypes={},
+ },
+ metadata={
+ },
+ properties={
+ },
+ descriptions={},
+ goodies={},
+ helpers={
+ tounicodelist=splitter,
+ tounicodetable=lpeg.Ct(splitter),
+ },
+ }
+ starttiming(data)
+ report_otf("file size: %s",size)
+ enhancers.apply(data,filename,fontdata)
+ local packtime={}
+ if packdata then
+ if cleanup>0 then
+ collectgarbage("collect")
+ end
+ starttiming(packtime)
+ enhance("pack",data,filename,nil)
+ stoptiming(packtime)
+ end
+ report_otf("saving %a in cache",filename)
+ data=containers.write(otf.cache,hash,data)
+ if cleanup>1 then
+ collectgarbage("collect")
+ end
+ stoptiming(data)
+ if elapsedtime then
+ report_otf("preprocessing and caching time %s, packtime %s",
+ elapsedtime(data),packdata and elapsedtime(packtime) or 0)
+ end
+ fontloader.close(fontdata)
+ if cleanup>3 then
+ collectgarbage("collect")
+ end
+ data=containers.read(otf.cache,hash)
+ if cleanup>2 then
+ collectgarbage("collect")
+ end
+ else
+ data=nil
+ report_otf("loading failed due to read error")
end
+ end
+ if data then
+ if trace_defining then
+ report_otf("loading from cache using hash %a",hash)
+ end
+ enhance("unpack",data,filename,nil,false)
+ enhance("add dimensions",data,filename,nil,false)
+ if trace_sequences then
+ showfeatureorder(data,filename)
+ end
+ end
+ return data
+end
+local mt={
+ __index=function(t,k)
+ if k=="height" then
+ local ht=t.boundingbox[4]
+ return ht<0 and 0 or ht
+ elseif k=="depth" then
+ local dp=-t.boundingbox[2]
+ return dp<0 and 0 or dp
+ elseif k=="width" then
+ return 0
+ elseif k=="name" then
+ return forcenotdef and ".notdef"
+ end
+ end
}
-
-actions["prepare tables"] = function(data,filename,raw)
- data.properties.hasitalics = false
-end
-
-actions["add dimensions"] = function(data,filename)
- -- todo: forget about the width if it's the defaultwidth (saves mem)
- -- we could also build the marks hash here (instead of storing it)
- if data then
- local descriptions = data.descriptions
- local resources = data.resources
- local defaultwidth = resources.defaultwidth or 0
- local defaultheight = resources.defaultheight or 0
- local defaultdepth = resources.defaultdepth or 0
- if usemetatables then
- for _, d in next, descriptions do
- local wd = d.width
- if not wd then
- d.width = defaultwidth
- elseif trace_markwidth and wd ~= 0 and d.class == "mark" then
- report_otf("mark with width %s (%s) in %s",wd,d.name or "<noname>",file.basename(filename))
- -- d.width = -wd
- end
- setmetatable(d,mt)
- end
+actions["prepare tables"]=function(data,filename,raw)
+ data.properties.hasitalics=false
+end
+actions["add dimensions"]=function(data,filename)
+ if data then
+ local descriptions=data.descriptions
+ local resources=data.resources
+ local defaultwidth=resources.defaultwidth or 0
+ local defaultheight=resources.defaultheight or 0
+ local defaultdepth=resources.defaultdepth or 0
+ local basename=trace_markwidth and file.basename(filename)
+ if usemetatables then
+ for _,d in next,descriptions do
+ local wd=d.width
+ if not wd then
+ d.width=defaultwidth
+ elseif trace_markwidth and wd~=0 and d.class=="mark" then
+ report_otf("mark %a with width %b found in %a",d.name or "<noname>",wd,basename)
+ end
+ setmetatable(d,mt)
+ end
+ else
+ for _,d in next,descriptions do
+ local bb,wd=d.boundingbox,d.width
+ if not wd then
+ d.width=defaultwidth
+ elseif trace_markwidth and wd~=0 and d.class=="mark" then
+ report_otf("mark %a with width %b found in %a",d.name or "<noname>",wd,basename)
+ end
+ if bb then
+ local ht,dp=bb[4],-bb[2]
+ if ht==0 or ht<0 then
+ else
+ d.height=ht
+ end
+ if dp==0 or dp<0 then
+ else
+ d.depth=dp
+ end
+ end
+ end
+ end
+ end
+end
+local function somecopy(old)
+ if old then
+ local new={}
+ if type(old)=="table" then
+ for k,v in next,old do
+ if k=="glyphs" then
+ elseif type(v)=="table" then
+ new[k]=somecopy(v)
else
- for _, d in next, descriptions do
- local bb, wd = d.boundingbox, d.width
- if not wd then
- d.width = defaultwidth
- elseif trace_markwidth and wd ~= 0 and d.class == "mark" then
- report_otf("mark with width %s (%s) in %s",wd,d.name or "<noname>",file.basename(filename))
- -- d.width = -wd
- end
- -- if forcenotdef and not d.name then
- -- d.name = ".notdef"
- -- end
- if bb then
- local ht, dp = bb[4], -bb[2]
- if ht == 0 or ht < 0 then
- -- not set
- else
- d.height = ht
- end
- if dp == 0 or dp < 0 then
- -- not set
- else
- d.depth = dp
- end
- end
- end
+ new[k]=v
end
- end
-end
-
-local function somecopy(old) -- fast one
- if old then
- local new = { }
- if type(old) == "table" then
- for k, v in next, old do
- if k == "glyphs" then
- -- skip
- elseif type(v) == "table" then
- new[k] = somecopy(v)
- else
- new[k] = v
- end
- end
+ end
+ else
+ for i=1,#mainfields do
+ local k=mainfields[i]
+ local v=old[k]
+ if k=="glyphs" then
+ elseif type(v)=="table" then
+ new[k]=somecopy(v)
else
- for i=1,#mainfields do
- local k = mainfields[i]
- local v = old[k]
- if k == "glyphs" then
- -- skip
- elseif type(v) == "table" then
- new[k] = somecopy(v)
- else
- new[k] = v
- end
- end
+ new[k]=v
end
- return new
- else
- return { }
+ end
end
-end
-
--- not setting hasitalics and class (when nil) during
--- table cronstruction can save some mem
-
-actions["prepare glyphs"] = function(data,filename,raw)
- local rawglyphs = raw.glyphs
- local rawsubfonts = raw.subfonts
- local rawcidinfo = raw.cidinfo
- local criterium = constructors.privateoffset
- local private = criterium
- local resources = data.resources
- local metadata = data.metadata
- local properties = data.properties
- local descriptions = data.descriptions
- local unicodes = resources.unicodes -- name to unicode
- local indices = resources.indices -- index to unicode
- local duplicates = resources.duplicates
- local variants = resources.variants
-
- if rawsubfonts then
-
- metadata.subfonts = { }
- properties.cidinfo = rawcidinfo
-
- if rawcidinfo.registry then
- local cidmap = fonts.cid.getmap(rawcidinfo)
- if cidmap then
- rawcidinfo.usedname = cidmap.usedname
- local nofnames, nofunicodes = 0, 0
- local cidunicodes, cidnames = cidmap.unicodes, cidmap.names
- for cidindex=1,#rawsubfonts do
- local subfont = rawsubfonts[cidindex]
- local cidglyphs = subfont.glyphs
- metadata.subfonts[cidindex] = somecopy(subfont)
- for index=0,subfont.glyphcnt-1 do -- we could take the previous glyphcnt instead of 0
- local glyph = cidglyphs[index]
- if glyph then
- local unicode = glyph.unicode
- local name = glyph.name or cidnames[index]
- if not unicode or unicode == -1 or unicode >= criterium then
- unicode = cidunicodes[index]
- end
- if not unicode or unicode == -1 or unicode >= criterium then
- if not name then
- name = format("u%06X",private)
- end
- unicode = private
- unicodes[name] = private
- if trace_private then
- report_otf("enhance: glyph %s at index 0x%04X is moved to private unicode slot U+%05X",name,index,private)
- end
- private = private + 1
- nofnames = nofnames + 1
- else
- if not name then
- name = format("u%06X",unicode)
- end
- unicodes[name] = unicode
- nofunicodes = nofunicodes + 1
- end
- indices[index] = unicode -- each index is unique (at least now)
-
- local description = {
- -- width = glyph.width,
- boundingbox = glyph.boundingbox,
- name = glyph.name or name or "unknown", -- uniXXXX
- cidindex = cidindex,
- index = index,
- glyph = glyph,
- }
-
- descriptions[unicode] = description
- else
- -- report_otf("potential problem: glyph 0x%04X is used but empty",index)
- end
- end
- end
- if trace_loading then
- report_otf("cid font remapped, %s unicode points, %s symbolic names, %s glyphs",nofunicodes, nofnames, nofunicodes+nofnames)
- end
- elseif trace_loading then
- report_otf("unable to remap cid font, missing cid file for %s",filename)
- end
- elseif trace_loading then
- report_otf("font %s has no glyphs",filename)
- end
-
- else
-
- for index=0,raw.glyphcnt-1 do -- not raw.glyphmax-1 (as that will crash)
- local glyph = rawglyphs[index]
+ return new
+ else
+ return {}
+ end
+end
+actions["prepare glyphs"]=function(data,filename,raw)
+ local rawglyphs=raw.glyphs
+ local rawsubfonts=raw.subfonts
+ local rawcidinfo=raw.cidinfo
+ local criterium=constructors.privateoffset
+ local private=criterium
+ local resources=data.resources
+ local metadata=data.metadata
+ local properties=data.properties
+ local descriptions=data.descriptions
+ local unicodes=resources.unicodes
+ local indices=resources.indices
+ local duplicates=resources.duplicates
+ local variants=resources.variants
+ if rawsubfonts then
+ metadata.subfonts={}
+ properties.cidinfo=rawcidinfo
+ if rawcidinfo.registry then
+ local cidmap=fonts.cid.getmap(rawcidinfo)
+ if cidmap then
+ rawcidinfo.usedname=cidmap.usedname
+ local nofnames,nofunicodes=0,0
+ local cidunicodes,cidnames=cidmap.unicodes,cidmap.names
+ for cidindex=1,#rawsubfonts do
+ local subfont=rawsubfonts[cidindex]
+ local cidglyphs=subfont.glyphs
+ metadata.subfonts[cidindex]=somecopy(subfont)
+ for index=0,subfont.glyphcnt-1 do
+ local glyph=cidglyphs[index]
if glyph then
- local unicode = glyph.unicode
- local name = glyph.name
- if not unicode or unicode == -1 or unicode >= criterium then
- unicode = private
- unicodes[name] = private
- if trace_private then
- report_otf("enhance: glyph %s at index 0x%04X is moved to private unicode slot U+%05X",name,index,private)
- end
- private = private + 1
- else
- unicodes[name] = unicode
- end
- indices[index] = unicode
+ local unicode=glyph.unicode
+ local name=glyph.name or cidnames[index]
+ if not unicode or unicode==-1 or unicode>=criterium then
+ unicode=cidunicodes[index]
+ end
+ if not unicode or unicode==-1 or unicode>=criterium then
if not name then
- name = format("u%06X",unicode)
+ name=format("u%06X",private)
end
- descriptions[unicode] = {
- -- width = glyph.width,
- boundingbox = glyph.boundingbox,
- name = name,
- index = index,
- glyph = glyph,
- }
- local altuni = glyph.altuni
- if altuni then
- local d
- for i=1,#altuni do
- local a = altuni[i]
- local u = a.unicode
- local v = a.variant
- if v then
- local vv = variants[v]
- if vv then
- vv[u] = unicode
- else -- xits-math has some:
- vv = { [u] = unicode }
- variants[v] = vv
- end
- elseif d then
- d[#d+1] = u
- else
- d = { u }
- end
- end
- if d then
- duplicates[unicode] = d
- end
+ unicode=private
+ unicodes[name]=private
+ if trace_private then
+ report_otf("glyph %a at index %H is moved to private unicode slot %U",name,index,private)
end
+ private=private+1
+ nofnames=nofnames+1
+ else
+ if not name then
+ name=format("u%06X",unicode)
+ end
+ unicodes[name]=unicode
+ nofunicodes=nofunicodes+1
+ end
+ indices[index]=unicode
+ local description={
+ boundingbox=glyph.boundingbox,
+ name=glyph.name or name or "unknown",
+ cidindex=cidindex,
+ index=index,
+ glyph=glyph,
+ }
+ descriptions[unicode]=description
else
- report_otf("potential problem: glyph 0x%04X is used but empty",index)
end
+ end
end
-
- end
-
- resources.private = private
-
-end
-
--- the next one is still messy but will get better when we have
--- flattened map/enc tables in the font loader
-
-actions["check encoding"] = function(data,filename,raw)
- local descriptions = data.descriptions
- local resources = data.resources
- local properties = data.properties
- local unicodes = resources.unicodes -- name to unicode
- local indices = resources.indices -- index to unicodes
-
- -- begin of messy (not needed when cidmap)
-
- local mapdata = raw.map or { }
- local unicodetoindex = mapdata and mapdata.map or { }
- -- local encname = lower(data.enc_name or raw.enc_name or mapdata.enc_name or "")
- local encname = lower(data.enc_name or mapdata.enc_name or "")
- local criterium = 0xFFFF -- for instance cambria has a lot of mess up there
-
- -- end of messy
-
- if find(encname,"unicode") then -- unicodebmp, unicodefull, ...
if trace_loading then
- report_otf("checking embedded unicode map '%s'",encname)
+ report_otf("cid font remapped, %s unicode points, %s symbolic names, %s glyphs",nofunicodes,nofnames,nofunicodes+nofnames)
end
- for unicode, index in next, unicodetoindex do -- altuni already covers this
- if unicode <= criterium and not descriptions[unicode] then
- local parent = indices[index] -- why nil?
- if parent then
- report_otf("weird, unicode U+%05X points to U+%05X with index 0x%04X",unicode,parent,index)
- else
- report_otf("weird, unicode U+%05X points to nowhere with index 0x%04X",unicode,index)
- end
- end
+ elseif trace_loading then
+ report_otf("unable to remap cid font, missing cid file for %a",filename)
+ end
+ elseif trace_loading then
+ report_otf("font %a has no glyphs",filename)
+ end
+ else
+ for index=0,raw.glyphcnt-1 do
+ local glyph=rawglyphs[index]
+ if glyph then
+ local unicode=glyph.unicode
+ local name=glyph.name
+ if not unicode or unicode==-1 or unicode>=criterium then
+ unicode=private
+ unicodes[name]=private
+ if trace_private then
+ report_otf("glyph %a at index %H is moved to private unicode slot %U",name,index,private)
+ end
+ private=private+1
+ else
+ unicodes[name]=unicode
end
- elseif properties.cidinfo then
- report_otf("warning: no unicode map, used cidmap '%s'",properties.cidinfo.usedname or "?")
- else
- report_otf("warning: non unicode map '%s', only using glyph unicode data",encname or "whatever")
- end
-
- if mapdata then
- mapdata.map = { } -- clear some memory
- end
-end
-
--- for the moment we assume that a fotn with lookups will not use
--- altuni so we stick to kerns only
-
-actions["add duplicates"] = function(data,filename,raw)
- local descriptions = data.descriptions
- local resources = data.resources
- local properties = data.properties
- local unicodes = resources.unicodes -- name to unicode
- local indices = resources.indices -- index to unicodes
- local duplicates = resources.duplicates
-
- for unicode, d in next, duplicates do
- for i=1,#d do
- local u = d[i]
- if not descriptions[u] then
- local description = descriptions[unicode]
- local duplicate = table.copy(description) -- else packing problem
- duplicate.comment = format("copy of U+%05X", unicode)
- descriptions[u] = duplicate
- local n = 0
- for _, description in next, descriptions do
- if kerns then
- local kerns = description.kerns
- for _, k in next, kerns do
- local ku = k[unicode]
- if ku then
- k[u] = ku
- n = n + 1
- end
- end
- end
- -- todo: lookups etc
- end
- if trace_loading then
- report_otf("duplicating U+%05X to U+%05X with index 0x%04X (%s kerns)",unicode,u,description.index,n)
- end
- end
+ indices[index]=unicode
+ if not name then
+ name=format("u%06X",unicode)
end
+ descriptions[unicode]={
+ boundingbox=glyph.boundingbox,
+ name=name,
+ index=index,
+ glyph=glyph,
+ }
+ local altuni=glyph.altuni
+ if altuni then
+ local d
+ for i=1,#altuni do
+ local a=altuni[i]
+ local u=a.unicode
+ local v=a.variant
+ if v then
+ local vv=variants[v]
+ if vv then
+ vv[u]=unicode
+ else
+ vv={ [u]=unicode }
+ variants[v]=vv
+ end
+ elseif d then
+ d[#d+1]=u
+ else
+ d={ u }
+ end
+ end
+ if d then
+ duplicates[unicode]=d
+ end
+ end
+ else
+ report_otf("potential problem: glyph %U is used but empty",index)
+ end
+ end
+ end
+ resources.private=private
+end
+actions["check encoding"]=function(data,filename,raw)
+ local descriptions=data.descriptions
+ local resources=data.resources
+ local properties=data.properties
+ local unicodes=resources.unicodes
+ local indices=resources.indices
+ local mapdata=raw.map or {}
+ local unicodetoindex=mapdata and mapdata.map or {}
+ local encname=lower(data.enc_name or mapdata.enc_name or "")
+ local criterium=0xFFFF
+ if find(encname,"unicode") then
+ if trace_loading then
+ report_otf("checking embedded unicode map %a",encname)
end
-end
-
--- class : nil base mark ligature component (maybe we don't need it in description)
--- boundingbox: split into ht/dp takes more memory (larger tables and less sharing)
-
-actions["analyze glyphs"] = function(data,filename,raw) -- maybe integrate this in the previous
- local descriptions = data.descriptions
- local resources = data.resources
- local metadata = data.metadata
- local properties = data.properties
- local hasitalics = false
- local widths = { }
- local marks = { } -- always present (saves checking)
- for unicode, description in next, descriptions do
- local glyph = description.glyph
- local italic = glyph.italic_correction
- if not italic then
- -- skip
- elseif italic == 0 then
- -- skip
+ for unicode,index in next,unicodetoindex do
+ if unicode<=criterium and not descriptions[unicode] then
+ local parent=indices[index]
+ if parent then
+ report_otf("weird, unicode %U points to %U with index %H",unicode,parent,index)
else
- description.italic = italic
- hasitalics = true
- end
- local width = glyph.width
- widths[width] = (widths[width] or 0) + 1
- local class = glyph.class
- if class then
- if class == "mark" then
- marks[unicode] = true
- end
- description.class = class
- end
- end
- -- flag italic
- properties.hasitalics = hasitalics
- -- flag marks
- resources.marks = marks
- -- share most common width for cjk fonts
- local wd, most = 0, 1
- for k,v in next, widths do
- if v > most then
- wd, most = k, v
+ report_otf("weird, unicode %U points to nowhere with index %H",unicode,index)
+ end
+ end
+ end
+ elseif properties.cidinfo then
+ report_otf("warning: no unicode map, used cidmap %a",properties.cidinfo.usedname)
+ else
+ report_otf("warning: non unicode map %a, only using glyph unicode data",encname or "whatever")
+ end
+ if mapdata then
+ mapdata.map={}
+ end
+end
+actions["add duplicates"]=function(data,filename,raw)
+ local descriptions=data.descriptions
+ local resources=data.resources
+ local properties=data.properties
+ local unicodes=resources.unicodes
+ local indices=resources.indices
+ local duplicates=resources.duplicates
+ for unicode,d in next,duplicates do
+ for i=1,#d do
+ local u=d[i]
+ if not descriptions[u] then
+ local description=descriptions[unicode]
+ local duplicate=table.copy(description)
+ duplicate.comment=format("copy of U+%05X",unicode)
+ descriptions[u]=duplicate
+ local n=0
+ for _,description in next,descriptions do
+ if kerns then
+ local kerns=description.kerns
+ for _,k in next,kerns do
+ local ku=k[unicode]
+ if ku then
+ k[u]=ku
+ n=n+1
+ end
+ end
+ end
end
- end
- if most > 1000 then -- maybe 500
if trace_loading then
- report_otf("most common width: %s (%s times), sharing (cjk font)",wd,most)
- end
- for unicode, description in next, descriptions do
- if description.width == wd then
- -- description.width = nil
- else
- description.width = description.glyph.width
- end
- end
- resources.defaultwidth = wd
+ report_otf("duplicating %U to %U with index %H (%s kerns)",unicode,u,description.index,n)
+ end
+ end
+ end
+ end
+end
+actions["analyze glyphs"]=function(data,filename,raw)
+ local descriptions=data.descriptions
+ local resources=data.resources
+ local metadata=data.metadata
+ local properties=data.properties
+ local hasitalics=false
+ local widths={}
+ local marks={}
+ for unicode,description in next,descriptions do
+ local glyph=description.glyph
+ local italic=glyph.italic_correction
+ if not italic then
+ elseif italic==0 then
else
- for unicode, description in next, descriptions do
- description.width = description.glyph.width
- end
- end
-end
-
-actions["reorganize mark classes"] = function(data,filename,raw)
- local mark_classes = raw.mark_classes
- if mark_classes then
- local resources = data.resources
- local unicodes = resources.unicodes
- local markclasses = { }
- resources.markclasses = markclasses -- reversed
- for name, class in next, mark_classes do
- local t = { }
- for s in gmatch(class,"[^ ]+") do
- t[unicodes[s]] = true
- end
- markclasses[name] = t
- end
- end
-end
-
-actions["reorganize features"] = function(data,filename,raw) -- combine with other
- local features = { }
- data.resources.features = features
- for k, what in next, otf.glists do
- local dw = raw[what]
- if dw then
- local f = { }
- features[what] = f
- for i=1,#dw do
- local d= dw[i]
- local dfeatures = d.features
- if dfeatures then
- for i=1,#dfeatures do
- local df = dfeatures[i]
- local tag = strip(lower(df.tag))
- local ft = f[tag]
- if not ft then
- ft = { }
- f[tag] = ft
- end
- local dscripts = df.scripts
- for i=1,#dscripts do
- local d = dscripts[i]
- local languages = d.langs
- local script = strip(lower(d.script))
- local fts = ft[script] if not fts then fts = {} ft[script] = fts end
- for i=1,#languages do
- fts[strip(lower(languages[i]))] = true
- end
- end
- end
- end
- end
- end
- end
-end
-
-actions["reorganize anchor classes"] = function(data,filename,raw)
- local resources = data.resources
- local anchor_to_lookup = { }
- local lookup_to_anchor = { }
- resources.anchor_to_lookup = anchor_to_lookup
- resources.lookup_to_anchor = lookup_to_anchor
- local classes = raw.anchor_classes -- anchor classes not in final table
- if classes then
- for c=1,#classes do
- local class = classes[c]
- local anchor = class.name
- local lookups = class.lookup
- if type(lookups) ~= "table" then
- lookups = { lookups }
- end
- local a = anchor_to_lookup[anchor]
- if not a then
- a = { }
- anchor_to_lookup[anchor] = a
- end
- for l=1,#lookups do
- local lookup = lookups[l]
- local l = lookup_to_anchor[lookup]
- if l then
- l[anchor] = true
- else
- l = { [anchor] = true }
- lookup_to_anchor[lookup] = l
- end
- a[lookup] = true
- end
+ description.italic=italic
+ hasitalics=true
+ end
+ local width=glyph.width
+ widths[width]=(widths[width] or 0)+1
+ local class=glyph.class
+ if class then
+ if class=="mark" then
+ marks[unicode]=true
+ end
+ description.class=class
+ end
+ end
+ properties.hasitalics=hasitalics
+ resources.marks=marks
+ local wd,most=0,1
+ for k,v in next,widths do
+ if v>most then
+ wd,most=k,v
+ end
+ end
+ if most>1000 then
+ if trace_loading then
+ report_otf("most common width: %s (%s times), sharing (cjk font)",wd,most)
+ end
+ for unicode,description in next,descriptions do
+ if description.width==wd then
+ else
+ description.width=description.glyph.width
+ end
+ end
+ resources.defaultwidth=wd
+ else
+ for unicode,description in next,descriptions do
+ description.width=description.glyph.width
+ end
+ end
+end
+actions["reorganize mark classes"]=function(data,filename,raw)
+ local mark_classes=raw.mark_classes
+ if mark_classes then
+ local resources=data.resources
+ local unicodes=resources.unicodes
+ local markclasses={}
+ resources.markclasses=markclasses
+ for name,class in next,mark_classes do
+ local t={}
+ for s in gmatch(class,"[^ ]+") do
+ t[unicodes[s]]=true
+ end
+ markclasses[name]=t
+ end
+ end
+end
+actions["reorganize features"]=function(data,filename,raw)
+ local features={}
+ data.resources.features=features
+ for k,what in next,otf.glists do
+ local dw=raw[what]
+ if dw then
+ local f={}
+ features[what]=f
+ for i=1,#dw do
+ local d=dw[i]
+ local dfeatures=d.features
+ if dfeatures then
+ for i=1,#dfeatures do
+ local df=dfeatures[i]
+ local tag=strip(lower(df.tag))
+ local ft=f[tag]
+ if not ft then
+ ft={}
+ f[tag]=ft
+ end
+ local dscripts=df.scripts
+ for i=1,#dscripts do
+ local d=dscripts[i]
+ local languages=d.langs
+ local script=strip(lower(d.script))
+ local fts=ft[script] if not fts then fts={} ft[script]=fts end
+ for i=1,#languages do
+ fts[strip(lower(languages[i]))]=true
+ end
+ end
+ end
+ end
+ end
+ end
+ end
+end
+actions["reorganize anchor classes"]=function(data,filename,raw)
+ local resources=data.resources
+ local anchor_to_lookup={}
+ local lookup_to_anchor={}
+ resources.anchor_to_lookup=anchor_to_lookup
+ resources.lookup_to_anchor=lookup_to_anchor
+ local classes=raw.anchor_classes
+ if classes then
+ for c=1,#classes do
+ local class=classes[c]
+ local anchor=class.name
+ local lookups=class.lookup
+ if type(lookups)~="table" then
+ lookups={ lookups }
+ end
+ local a=anchor_to_lookup[anchor]
+ if not a then
+ a={}
+ anchor_to_lookup[anchor]=a
+ end
+ for l=1,#lookups do
+ local lookup=lookups[l]
+ local l=lookup_to_anchor[lookup]
+ if l then
+ l[anchor]=true
+ else
+ l={ [anchor]=true }
+ lookup_to_anchor[lookup]=l
end
+ a[lookup]=true
+ end
end
+ end
end
-
-actions["prepare tounicode"] = function(data,filename,raw)
- fonts.mappings.addtounicode(data,filename)
+actions["prepare tounicode"]=function(data,filename,raw)
+ fonts.mappings.addtounicode(data,filename)
end
-
-local g_directions = {
- gsub_contextchain = 1,
- gpos_contextchain = 1,
- -- gsub_context = 1,
- -- gpos_context = 1,
- gsub_reversecontextchain = -1,
- gpos_reversecontextchain = -1,
+local g_directions={
+ gsub_contextchain=1,
+ gpos_contextchain=1,
+ gsub_reversecontextchain=-1,
+ gpos_reversecontextchain=-1,
}
-
--- Research by Khaled Hosny has demonstrated that the font loader merges
--- regular and AAT features and that these can interfere (especially because
--- we dropped checking for valid features elsewhere. So, we just check for
--- the special flag and drop the feature if such a tag is found.
-
local function supported(features)
- for i=1,#features do
- if features[i].ismac then
- return false
+ for i=1,#features do
+ if features[i].ismac then
+ return false
+ end
+ end
+ return true
+end
+actions["reorganize subtables"]=function(data,filename,raw)
+ local resources=data.resources
+ local sequences={}
+ local lookups={}
+ local chainedfeatures={}
+ resources.sequences=sequences
+ resources.lookups=lookups
+ for _,what in next,otf.glists do
+ local dw=raw[what]
+ if dw then
+ for k=1,#dw do
+ local gk=dw[k]
+ local features=gk.features
+ if not features or supported(features) then
+ local typ=gk.type
+ local chain=g_directions[typ] or 0
+ local subtables=gk.subtables
+ if subtables then
+ local t={}
+ for s=1,#subtables do
+ t[s]=subtables[s].name
+ end
+ subtables=t
+ end
+ local flags,markclass=gk.flags,nil
+ if flags then
+ local t={
+ (flags.ignorecombiningmarks and "mark") or false,
+ (flags.ignoreligatures and "ligature") or false,
+ (flags.ignorebaseglyphs and "base") or false,
+ flags.r2l or false,
+ }
+ markclass=flags.mark_class
+ if markclass then
+ markclass=resources.markclasses[markclass]
+ end
+ flags=t
+ end
+ local name=gk.name
+ if not name then
+ report_otf("skipping weird lookup number %s",k)
+ elseif features then
+ local f={}
+ for i=1,#features do
+ local df=features[i]
+ local tag=strip(lower(df.tag))
+ local ft=f[tag] if not ft then ft={} f[tag]=ft end
+ local dscripts=df.scripts
+ for i=1,#dscripts do
+ local d=dscripts[i]
+ local languages=d.langs
+ local script=strip(lower(d.script))
+ local fts=ft[script] if not fts then fts={} ft[script]=fts end
+ for i=1,#languages do
+ fts[strip(lower(languages[i]))]=true
+ end
+ end
+ end
+ sequences[#sequences+1]={
+ type=typ,
+ chain=chain,
+ flags=flags,
+ name=name,
+ subtables=subtables,
+ markclass=markclass,
+ features=f,
+ }
+ else
+ lookups[name]={
+ type=typ,
+ chain=chain,
+ flags=flags,
+ subtables=subtables,
+ markclass=markclass,
+ }
+ end
end
+ end
end
- return true
+ end
end
-
-actions["reorganize subtables"] = function(data,filename,raw)
- local resources = data.resources
- local sequences = { }
- local lookups = { }
- local chainedfeatures = { }
- resources.sequences = sequences
- resources.lookups = lookups
- for _, what in next, otf.glists do
- local dw = raw[what]
- if dw then
- for k=1,#dw do
- local gk = dw[k]
- local features = gk.features
--- if features and supported(features) then
- if not features or supported(features) then -- not always features !
- local typ = gk.type
- local chain = g_directions[typ] or 0
- local subtables = gk.subtables
- if subtables then
- local t = { }
- for s=1,#subtables do
- t[s] = subtables[s].name
- end
- subtables = t
- end
- local flags, markclass = gk.flags, nil
- if flags then
- local t = { -- forcing false packs nicer
- (flags.ignorecombiningmarks and "mark") or false,
- (flags.ignoreligatures and "ligature") or false,
- (flags.ignorebaseglyphs and "base") or false,
- flags.r2l or false,
- }
- markclass = flags.mark_class
- if markclass then
- markclass = resources.markclasses[markclass]
- end
- flags = t
- end
- --
- local name = gk.name
- --
- if features then
- -- scripts, tag, ismac
- local f = { }
- for i=1,#features do
- local df = features[i]
- local tag = strip(lower(df.tag))
- local ft = f[tag] if not ft then ft = {} f[tag] = ft end
- local dscripts = df.scripts
- for i=1,#dscripts do
- local d = dscripts[i]
- local languages = d.langs
- local script = strip(lower(d.script))
- local fts = ft[script] if not fts then fts = {} ft[script] = fts end
- for i=1,#languages do
- fts[strip(lower(languages[i]))] = true
- end
- end
- end
- sequences[#sequences+1] = {
- type = typ,
- chain = chain,
- flags = flags,
- name = name,
- subtables = subtables,
- markclass = markclass,
- features = f,
- }
- else
- lookups[name] = {
- type = typ,
- chain = chain,
- flags = flags,
- subtables = subtables,
- markclass = markclass,
- }
- end
- end
- end
- end
- end
+actions["prepare lookups"]=function(data,filename,raw)
+ local lookups=raw.lookups
+ if lookups then
+ data.lookups=lookups
+ end
end
-
--- test this:
---
--- for _, what in next, otf.glists do
--- raw[what] = nil
--- end
-
-actions["prepare lookups"] = function(data,filename,raw)
- local lookups = raw.lookups
- if lookups then
- data.lookups = lookups
- end
-end
-
--- The reverse handler does a bit redundant splitting but it's seldom
--- seen so we don' tbother too much. We could store the replacement
--- in the current list (value instead of true) but it makes other code
--- uglier. Maybe some day.
-
local function t_uncover(splitter,cache,covers)
- local result = { }
- for n=1,#covers do
- local cover = covers[n]
- local uncovered = cache[cover]
- if not uncovered then
- uncovered = lpegmatch(splitter,cover)
- cache[cover] = uncovered
- end
- result[n] = uncovered
- end
- return result
+ local result={}
+ for n=1,#covers do
+ local cover=covers[n]
+ local uncovered=cache[cover]
+ if not uncovered then
+ uncovered=lpegmatch(splitter,cover)
+ cache[cover]=uncovered
+ end
+ result[n]=uncovered
+ end
+ return result
end
-
-local function t_hashed(t,cache)
- if t then
- local ht = { }
- for i=1,#t do
- local ti = t[i]
- local tih = cache[ti]
- if not tih then
- tih = { }
- for i=1,#ti do
- tih[ti[i]] = true
- end
- cache[ti] = tih
- end
- ht[i] = tih
- end
- return ht
- else
- return nil
- end
-end
-
local function s_uncover(splitter,cache,cover)
- if cover == "" then
- return nil
- else
- local uncovered = cache[cover]
- if not uncovered then
- uncovered = lpegmatch(splitter,cover)
- for i=1,#uncovered do
- uncovered[i] = { [uncovered[i]] = true }
- end
- cache[cover] = uncovered
- end
- return uncovered
+ if cover=="" then
+ return nil
+ else
+ local uncovered=cache[cover]
+ if not uncovered then
+ uncovered=lpegmatch(splitter,cover)
+ cache[cover]=uncovered
end
+ return { uncovered }
+ end
end
-
-local s_hashed = t_hashed
-
-local function r_uncover(splitter,cache,cover,replacements)
- if cover == "" then
- return nil
- else
- -- we always have current as { } even in the case of one
- local uncovered = cover[1]
- local replaced = cache[replacements]
- if not replaced then
- replaced = lpegmatch(splitter,replacements)
- cache[replacements] = replaced
- end
- local nu, nr = #uncovered, #replaced
- local r = { }
- if nu == nr then
- for i=1,nu do
- r[uncovered[i]] = replaced[i]
- end
- end
- return r
- end
+local function t_hashed(t,cache)
+ if t then
+ local ht={}
+ for i=1,#t do
+ local ti=t[i]
+ local tih=cache[ti]
+ if not tih then
+ tih={}
+ for i=1,#ti do
+ tih[ti[i]]=true
+ end
+ cache[ti]=tih
+ end
+ ht[i]=tih
+ end
+ return ht
+ else
+ return nil
+ end
end
-
-actions["reorganize lookups"] = function(data,filename,raw)
- -- we prefer the before lookups in a normal order
- if data.lookups then
- local splitter = data.helpers.tounicodetable
- local cache, h_cache = { }, { }
- for _, lookup in next, data.lookups do
- local rules = lookup.rules
- if rules then
- local format = lookup.format
- if format == "class" then
- local before_class = lookup.before_class
- if before_class then
- before_class = t_uncover(splitter,cache,reversed(before_class))
- end
- local current_class = lookup.current_class
- if current_class then
- current_class = t_uncover(splitter,cache,current_class)
- end
- local after_class = lookup.after_class
- if after_class then
- after_class = t_uncover(splitter,cache,after_class)
- end
- for i=1,#rules do
- local rule = rules[i]
- local class = rule.class
- local before = class.before
- if before then
- for i=1,#before do
- before[i] = before_class[before[i]] or { }
- end
- rule.before = t_hashed(before,h_cache)
- end
- local current = class.current
- local lookups = rule.lookups
- if current then
- for i=1,#current do
- current[i] = current_class[current[i]] or { }
- if lookups and not lookups[i] then
- lookups[i] = false -- e.g. we can have two lookups and one replacement
- end
- end
- rule.current = t_hashed(current,h_cache)
- end
- local after = class.after
- if after then
- for i=1,#after do
- after[i] = after_class[after[i]] or { }
- end
- rule.after = t_hashed(after,h_cache)
- end
- rule.class = nil
- end
- lookup.before_class = nil
- lookup.current_class = nil
- lookup.after_class = nil
- lookup.format = "coverage"
- elseif format == "coverage" then
- for i=1,#rules do
- local rule = rules[i]
- local coverage = rule.coverage
- if coverage then
- local before = coverage.before
- if before then
- before = t_uncover(splitter,cache,reversed(before))
- rule.before = t_hashed(before,h_cache)
- end
- local current = coverage.current
- if current then
- current = t_uncover(splitter,cache,current)
- rule.current = t_hashed(current,h_cache)
- end
- local after = coverage.after
- if after then
- after = t_uncover(splitter,cache,after)
- rule.after = t_hashed(after,h_cache)
- end
- rule.coverage = nil
- end
- end
- elseif format == "reversecoverage" then -- special case, single substitution only
- for i=1,#rules do
- local rule = rules[i]
- local reversecoverage = rule.reversecoverage
- if reversecoverage then
- local before = reversecoverage.before
- if before then
- before = t_uncover(splitter,cache,reversed(before))
- rule.before = t_hashed(before,h_cache)
- end
- local current = reversecoverage.current
- if current then
- current = t_uncover(splitter,cache,current)
- rule.current = t_hashed(current,h_cache)
- end
- local after = reversecoverage.after
- if after then
- after = t_uncover(splitter,cache,after)
- rule.after = t_hashed(after,h_cache)
- end
- local replacements = reversecoverage.replacements
- if replacements then
- rule.replacements = r_uncover(splitter,cache,current,replacements)
- end
- rule.reversecoverage = nil
- end
- end
- elseif format == "glyphs" then
- for i=1,#rules do
- local rule = rules[i]
- local glyphs = rule.glyphs
- if glyphs then
- local fore = glyphs.fore
- if fore then
- fore = s_uncover(splitter,cache,fore)
- rule.before = s_hashed(fore,h_cache)
- end
- local back = glyphs.back
- if back then
- back = s_uncover(splitter,cache,back)
- rule.after = s_hashed(back,h_cache)
- end
- local names = glyphs.names
- if names then
- names = s_uncover(splitter,cache,names)
- rule.current = s_hashed(names,h_cache)
- end
- rule.glyphs = nil
- end
- end
- end
- end
- end
- end
+local s_hashed=t_hashed
+local function r_uncover(splitter,cache,cover,replacements)
+ if cover=="" then
+ return nil
+ else
+ local uncovered=cover[1]
+ local replaced=cache[replacements]
+ if not replaced then
+ replaced=lpegmatch(splitter,replacements)
+ cache[replacements]=replaced
+ end
+ local nu,nr=#uncovered,#replaced
+ local r={}
+ if nu==nr then
+ for i=1,nu do
+ r[uncovered[i]]=replaced[i]
+ end
+ end
+ return r
+ end
+end
+actions["reorganize lookups"]=function(data,filename,raw)
+ if data.lookups then
+ local splitter=data.helpers.tounicodetable
+ local t_u_cache={}
+ local s_u_cache=t_u_cache
+ local t_h_cache={}
+ local s_h_cache=t_h_cache
+ local r_u_cache={}
+ for _,lookup in next,data.lookups do
+ local rules=lookup.rules
+ if rules then
+ local format=lookup.format
+ if format=="class" then
+ local before_class=lookup.before_class
+ if before_class then
+ before_class=t_uncover(splitter,t_u_cache,reversed(before_class))
+ end
+ local current_class=lookup.current_class
+ if current_class then
+ current_class=t_uncover(splitter,t_u_cache,current_class)
+ end
+ local after_class=lookup.after_class
+ if after_class then
+ after_class=t_uncover(splitter,t_u_cache,after_class)
+ end
+ for i=1,#rules do
+ local rule=rules[i]
+ local class=rule.class
+ local before=class.before
+ if before then
+ for i=1,#before do
+ before[i]=before_class[before[i]] or {}
+ end
+ rule.before=t_hashed(before,t_h_cache)
+ end
+ local current=class.current
+ local lookups=rule.lookups
+ if current then
+ for i=1,#current do
+ current[i]=current_class[current[i]] or {}
+ if lookups and not lookups[i] then
+ lookups[i]=""
+ end
+ end
+ rule.current=t_hashed(current,t_h_cache)
+ end
+ local after=class.after
+ if after then
+ for i=1,#after do
+ after[i]=after_class[after[i]] or {}
+ end
+ rule.after=t_hashed(after,t_h_cache)
+ end
+ rule.class=nil
+ end
+ lookup.before_class=nil
+ lookup.current_class=nil
+ lookup.after_class=nil
+ lookup.format="coverage"
+ elseif format=="coverage" then
+ for i=1,#rules do
+ local rule=rules[i]
+ local coverage=rule.coverage
+ if coverage then
+ local before=coverage.before
+ if before then
+ before=t_uncover(splitter,t_u_cache,reversed(before))
+ rule.before=t_hashed(before,t_h_cache)
+ end
+ local current=coverage.current
+ if current then
+ current=t_uncover(splitter,t_u_cache,current)
+ rule.current=t_hashed(current,t_h_cache)
+ end
+ local after=coverage.after
+ if after then
+ after=t_uncover(splitter,t_u_cache,after)
+ rule.after=t_hashed(after,t_h_cache)
+ end
+ rule.coverage=nil
+ end
+ end
+ elseif format=="reversecoverage" then
+ for i=1,#rules do
+ local rule=rules[i]
+ local reversecoverage=rule.reversecoverage
+ if reversecoverage then
+ local before=reversecoverage.before
+ if before then
+ before=t_uncover(splitter,t_u_cache,reversed(before))
+ rule.before=t_hashed(before,t_h_cache)
+ end
+ local current=reversecoverage.current
+ if current then
+ current=t_uncover(splitter,t_u_cache,current)
+ rule.current=t_hashed(current,t_h_cache)
+ end
+ local after=reversecoverage.after
+ if after then
+ after=t_uncover(splitter,t_u_cache,after)
+ rule.after=t_hashed(after,t_h_cache)
+ end
+ local replacements=reversecoverage.replacements
+ if replacements then
+ rule.replacements=r_uncover(splitter,r_u_cache,current,replacements)
+ end
+ rule.reversecoverage=nil
+ end
+ end
+ elseif format=="glyphs" then
+ for i=1,#rules do
+ local rule=rules[i]
+ local glyphs=rule.glyphs
+ if glyphs then
+ local fore=glyphs.fore
+ if fore and fore~="" then
+ fore=s_uncover(splitter,s_u_cache,fore)
+ rule.before=s_hashed(fore,s_h_cache)
+ end
+ local back=glyphs.back
+ if back then
+ back=s_uncover(splitter,s_u_cache,back)
+ rule.after=s_hashed(back,s_h_cache)
+ end
+ local names=glyphs.names
+ if names then
+ names=s_uncover(splitter,s_u_cache,names)
+ rule.current=s_hashed(names,s_h_cache)
+ end
+ rule.glyphs=nil
+ end
+ end
+ end
+ end
+ end
+ end
end
-
local function check_variants(unicode,the_variants,splitter,unicodes)
- local variants = the_variants.variants
- if variants then -- use splitter
- local glyphs = lpegmatch(splitter,variants)
- local done = { [unicode] = true }
- local n = 0
- for i=1,#glyphs do
- local g = glyphs[i]
- if done[g] then
- report_otf("skipping cyclic reference U+%05X in math variant U+%05X",g,unicode)
+ local variants=the_variants.variants
+ if variants then
+ local glyphs=lpegmatch(splitter,variants)
+ local done={ [unicode]=true }
+ local n=0
+ for i=1,#glyphs do
+ local g=glyphs[i]
+ if done[g] then
+ report_otf("skipping cyclic reference %U in math variant %U",g,unicode)
+ else
+ if n==0 then
+ n=1
+ variants={ g }
+ else
+ n=n+1
+ variants[n]=g
+ end
+ done[g]=true
+ end
+ end
+ if n==0 then
+ variants=nil
+ end
+ end
+ local parts=the_variants.parts
+ if parts then
+ local p=#parts
+ if p>0 then
+ for i=1,p do
+ local pi=parts[i]
+ pi.glyph=unicodes[pi.component] or 0
+ pi.component=nil
+ end
+ else
+ parts=nil
+ end
+ end
+ local italic_correction=the_variants.italic_correction
+ if italic_correction and italic_correction==0 then
+ italic_correction=nil
+ end
+ return variants,parts,italic_correction
+end
+actions["analyze math"]=function(data,filename,raw)
+ if raw.math then
+ data.metadata.math=raw.math
+ local unicodes=data.resources.unicodes
+ local splitter=data.helpers.tounicodetable
+ for unicode,description in next,data.descriptions do
+ local glyph=description.glyph
+ local mathkerns=glyph.mathkern
+ local horiz_variants=glyph.horiz_variants
+ local vert_variants=glyph.vert_variants
+ local top_accent=glyph.top_accent
+ if mathkerns or horiz_variants or vert_variants or top_accent then
+ local math={}
+ if top_accent then
+ math.top_accent=top_accent
+ end
+ if mathkerns then
+ for k,v in next,mathkerns do
+ if not next(v) then
+ mathkerns[k]=nil
else
- if n == 0 then
- n = 1
- variants = { g }
- else
- n = n + 1
- variants[n] = g
+ for k,v in next,v do
+ if v==0 then
+ k[v]=nil
end
- done[g] = true
+ end
end
+ end
+ math.kerns=mathkerns
end
- if n == 0 then
- variants = nil
+ if horiz_variants then
+ math.horiz_variants,math.horiz_parts,math.horiz_italic_correction=check_variants(unicode,horiz_variants,splitter,unicodes)
end
- end
- local parts = the_variants.parts
- if parts then
- local p = #parts
- if p > 0 then
- for i=1,p do
- local pi = parts[i]
- pi.glyph = unicodes[pi.component] or 0
- pi.component = nil
- end
- else
- parts = nil
+ if vert_variants then
+ math.vert_variants,math.vert_parts,math.vert_italic_correction=check_variants(unicode,vert_variants,splitter,unicodes)
end
- end
- local italic_correction = the_variants.italic_correction
- if italic_correction and italic_correction == 0 then
- italic_correction = nil
- end
- return variants, parts, italic_correction
-end
-
-actions["analyze math"] = function(data,filename,raw)
- if raw.math then
- data.metadata.math = raw.math
- local unicodes = data.resources.unicodes
- local splitter = data.helpers.tounicodetable
- for unicode, description in next, data.descriptions do
- local glyph = description.glyph
- local mathkerns = glyph.mathkern -- singular
- local horiz_variants = glyph.horiz_variants
- local vert_variants = glyph.vert_variants
- local top_accent = glyph.top_accent
- if mathkerns or horiz_variants or vert_variants or top_accent then
- local math = { }
- if top_accent then
- math.top_accent = top_accent
- end
- if mathkerns then
- for k, v in next, mathkerns do
- if not next(v) then
- mathkerns[k] = nil
- else
- for k, v in next, v do
- if v == 0 then
- k[v] = nil -- height / kern can be zero
- end
- end
- end
- end
- math.kerns = mathkerns
- end
- if horiz_variants then
- math.horiz_variants, math.horiz_parts, math.horiz_italic_correction = check_variants(unicode,horiz_variants,splitter,unicodes)
- end
- if vert_variants then
- math.vert_variants, math.vert_parts, math.vert_italic_correction = check_variants(unicode,vert_variants,splitter,unicodes)
- end
- local italic_correction = description.italic
- if italic_correction and italic_correction ~= 0 then
- math.italic_correction = italic_correction
- end
- description.math = math
- end
+ local italic_correction=description.italic
+ if italic_correction and italic_correction~=0 then
+ math.italic_correction=italic_correction
end
+ description.math=math
+ end
end
+ end
end
-
-actions["reorganize glyph kerns"] = function(data,filename,raw)
- local descriptions = data.descriptions
- local resources = data.resources
- local unicodes = resources.unicodes
- for unicode, description in next, descriptions do
- local kerns = description.glyph.kerns
- if kerns then
- local newkerns = { }
- for k, kern in next, kerns do
- local name = kern.char
- local offset = kern.off
- local lookup = kern.lookup
- if name and offset and lookup then
- local unicode = unicodes[name]
- if unicode then
- if type(lookup) == "table" then
- for l=1,#lookup do
- local lookup = lookup[l]
- local lookupkerns = newkerns[lookup]
- if lookupkerns then
- lookupkerns[unicode] = offset
- else
- newkerns[lookup] = { [unicode] = offset }
- end
- end
- else
- local lookupkerns = newkerns[lookup]
- if lookupkerns then
- lookupkerns[unicode] = offset
- else
- newkerns[lookup] = { [unicode] = offset }
- end
- end
- elseif trace_loading then
- report_otf("problems with unicode %s of kern %s of glyph U+%05X",name,k,unicode)
- end
+actions["reorganize glyph kerns"]=function(data,filename,raw)
+ local descriptions=data.descriptions
+ local resources=data.resources
+ local unicodes=resources.unicodes
+ for unicode,description in next,descriptions do
+ local kerns=description.glyph.kerns
+ if kerns then
+ local newkerns={}
+ for k,kern in next,kerns do
+ local name=kern.char
+ local offset=kern.off
+ local lookup=kern.lookup
+ if name and offset and lookup then
+ local unicode=unicodes[name]
+ if unicode then
+ if type(lookup)=="table" then
+ for l=1,#lookup do
+ local lookup=lookup[l]
+ local lookupkerns=newkerns[lookup]
+ if lookupkerns then
+ lookupkerns[unicode]=offset
+ else
+ newkerns[lookup]={ [unicode]=offset }
end
- end
- description.kerns = newkerns
- end
- end
-end
-
-actions["merge kern classes"] = function(data,filename,raw)
- local gposlist = raw.gpos
- if gposlist then
- local descriptions = data.descriptions
- local resources = data.resources
- local unicodes = resources.unicodes
- local splitter = data.helpers.tounicodetable
- for gp=1,#gposlist do
- local gpos = gposlist[gp]
- local subtables = gpos.subtables
- if subtables then
- for s=1,#subtables do
- local subtable = subtables[s]
- local kernclass = subtable.kernclass -- name is inconsistent with anchor_classes
- if kernclass then -- the next one is quite slow
- local split = { } -- saves time
- for k=1,#kernclass do
- local kcl = kernclass[k]
- local firsts = kcl.firsts
- local seconds = kcl.seconds
- local offsets = kcl.offsets
- local lookups = kcl.lookup -- singular
- if type(lookups) ~= "table" then
- lookups = { lookups }
- end
- -- we can check the max in the loop
- -- local maxseconds = getn(seconds)
- for n, s in next, firsts do
- split[s] = split[s] or lpegmatch(splitter,s)
- end
- local maxseconds = 0
- for n, s in next, seconds do
- if n > maxseconds then
- maxseconds = n
- end
- split[s] = split[s] or lpegmatch(splitter,s)
- end
- for l=1,#lookups do
- local lookup = lookups[l]
- for fk=1,#firsts do -- maxfirsts ?
- local fv = firsts[fk]
- local splt = split[fv]
- if splt then
- local extrakerns = { }
- local baseoffset = (fk-1) * maxseconds
- -- for sk=2,maxseconds do
- -- local sv = seconds[sk]
- for sk, sv in next, seconds do
- local splt = split[sv]
- if splt then -- redundant test
- local offset = offsets[baseoffset + sk]
- if offset then
- for i=1,#splt do
- extrakerns[splt[i]] = offset
- end
- end
- end
- end
- for i=1,#splt do
- local first_unicode = splt[i]
- local description = descriptions[first_unicode]
- if description then
- local kerns = description.kerns
- if not kerns then
- kerns = { } -- unicode indexed !
- description.kerns = kerns
- end
- local lookupkerns = kerns[lookup]
- if not lookupkerns then
- lookupkerns = { }
- kerns[lookup] = lookupkerns
- end
- for second_unicode, kern in next, extrakerns do
- lookupkerns[second_unicode] = kern
- end
- elseif trace_loading then
- report_otf("no glyph data for U+%05X", first_unicode)
- end
- end
- end
- end
- end
+ end
+ else
+ local lookupkerns=newkerns[lookup]
+ if lookupkerns then
+ lookupkerns[unicode]=offset
+ else
+ newkerns[lookup]={ [unicode]=offset }
+ end
+ end
+ elseif trace_loading then
+ report_otf("problems with unicode %a of kern %a of glyph %U",name,k,unicode)
+ end
+ end
+ end
+ description.kerns=newkerns
+ end
+ end
+end
+actions["merge kern classes"]=function(data,filename,raw)
+ local gposlist=raw.gpos
+ if gposlist then
+ local descriptions=data.descriptions
+ local resources=data.resources
+ local unicodes=resources.unicodes
+ local splitter=data.helpers.tounicodetable
+ for gp=1,#gposlist do
+ local gpos=gposlist[gp]
+ local subtables=gpos.subtables
+ if subtables then
+ for s=1,#subtables do
+ local subtable=subtables[s]
+ local kernclass=subtable.kernclass
+ if kernclass then
+ local split={}
+ for k=1,#kernclass do
+ local kcl=kernclass[k]
+ local firsts=kcl.firsts
+ local seconds=kcl.seconds
+ local offsets=kcl.offsets
+ local lookups=kcl.lookup
+ if type(lookups)~="table" then
+ lookups={ lookups }
+ end
+ for n,s in next,firsts do
+ split[s]=split[s] or lpegmatch(splitter,s)
+ end
+ local maxseconds=0
+ for n,s in next,seconds do
+ if n>maxseconds then
+ maxseconds=n
+ end
+ split[s]=split[s] or lpegmatch(splitter,s)
+ end
+ for l=1,#lookups do
+ local lookup=lookups[l]
+ for fk=1,#firsts do
+ local fv=firsts[fk]
+ local splt=split[fv]
+ if splt then
+ local extrakerns={}
+ local baseoffset=(fk-1)*maxseconds
+ for sk=2,maxseconds do
+ local sv=seconds[sk]
+ local splt=split[sv]
+ if splt then
+ local offset=offsets[baseoffset+sk]
+ if offset then
+ for i=1,#splt do
+ extrakerns[splt[i]]=offset
+ end
end
- subtable.kernclass = { }
- end
- end
- end
- end
- end
-end
-
-actions["check glyphs"] = function(data,filename,raw)
- for unicode, description in next, data.descriptions do
- description.glyph = nil
- end
-end
-
--- future versions will remove _
-
-actions["check metadata"] = function(data,filename,raw)
- local metadata = data.metadata
- for _, k in next, mainfields do
- if valid_fields[k] then
- local v = raw[k]
- if not metadata[k] then
- metadata[k] = v
- end
- end
- end
- -- metadata.pfminfo = raw.pfminfo -- not already done?
- local ttftables = metadata.ttf_tables
- if ttftables then
- for i=1,#ttftables do
- ttftables[i].data = "deleted"
- end
- end
-end
-
-actions["cleanup tables"] = function(data,filename,raw)
- data.resources.indices = nil -- not needed
- data.helpers = nil
-end
-
--- kern: ttf has a table with kerns
---
--- Weird, as maxfirst and maxseconds can have holes, first seems to be indexed, but
--- seconds can start at 2 .. this need to be fixed as getn as well as # are sort of
--- unpredictable alternatively we could force an [1] if not set (maybe I will do that
--- anyway).
-
--- we can share { } as it is never set
-
---- ligatures have an extra specification.char entry that we don't use
-
-actions["reorganize glyph lookups"] = function(data,filename,raw)
- local resources = data.resources
- local unicodes = resources.unicodes
- local descriptions = data.descriptions
- local splitter = data.helpers.tounicodelist
-
- local lookuptypes = resources.lookuptypes
-
- for unicode, description in next, descriptions do
- local lookups = description.glyph.lookups
- if lookups then
- for tag, lookuplist in next, lookups do
- for l=1,#lookuplist do
- local lookup = lookuplist[l]
- local specification = lookup.specification
- local lookuptype = lookup.type
- local lt = lookuptypes[tag]
- if not lt then
- lookuptypes[tag] = lookuptype
- elseif lt ~= lookuptype then
- report_otf("conflicting lookuptypes: %s => %s and %s",tag,lt,lookuptype)
- end
- if lookuptype == "ligature" then
- lookuplist[l] = { lpegmatch(splitter,specification.components) }
- elseif lookuptype == "alternate" then
- lookuplist[l] = { lpegmatch(splitter,specification.components) }
- elseif lookuptype == "substitution" then
- lookuplist[l] = unicodes[specification.variant]
- elseif lookuptype == "multiple" then
- lookuplist[l] = { lpegmatch(splitter,specification.components) }
- elseif lookuptype == "position" then
- lookuplist[l] = {
- specification.x or 0,
- specification.y or 0,
- specification.h or 0,
- specification.v or 0
- }
- elseif lookuptype == "pair" then
- local one = specification.offsets[1]
- local two = specification.offsets[2]
- local paired = unicodes[specification.paired]
- if one then
- if two then
- lookuplist[l] = { paired, { one.x or 0, one.y or 0, one.h or 0, one.v or 0 }, { two.x or 0, two.y or 0, two.h or 0, two.v or 0 } }
- else
- lookuplist[l] = { paired, { one.x or 0, one.y or 0, one.h or 0, one.v or 0 } }
- end
- else
- if two then
- lookuplist[l] = { paired, { }, { two.x or 0, two.y or 0, two.h or 0, two.v or 0} } -- maybe nil instead of { }
- else
- lookuplist[l] = { paired }
- end
+ end
+ end
+ for i=1,#splt do
+ local first_unicode=splt[i]
+ local description=descriptions[first_unicode]
+ if description then
+ local kerns=description.kerns
+ if not kerns then
+ kerns={}
+ description.kerns=kerns
end
- end
- end
- end
- local slookups, mlookups
- for tag, lookuplist in next, lookups do
- if #lookuplist == 1 then
- if slookups then
- slookups[tag] = lookuplist[1]
- else
- slookups = { [tag] = lookuplist[1] }
- end
- else
- if mlookups then
- mlookups[tag] = lookuplist
- else
- mlookups = { [tag] = lookuplist }
- end
- end
- end
- if slookups then
- description.slookups = slookups
- end
- if mlookups then
- description.mlookups = mlookups
- end
- end
- end
-
-end
-
-actions["reorganize glyph anchors"] = function(data,filename,raw) -- when we replace inplace we safe entries
- local descriptions = data.descriptions
- for unicode, description in next, descriptions do
- local anchors = description.glyph.anchors
- if anchors then
- for class, data in next, anchors do
- if class == "baselig" then
- for tag, specification in next, data do
- for i=1,#specification do
- local si = specification[i]
- specification[i] = { si.x or 0, si.y or 0 }
+ local lookupkerns=kerns[lookup]
+ if not lookupkerns then
+ lookupkerns={}
+ kerns[lookup]=lookupkerns
end
- end
- else
- for tag, specification in next, data do
- data[tag] = { specification.x or 0, specification.y or 0 }
- end
- end
- end
- description.anchors = anchors
+ for second_unicode,kern in next,extrakerns do
+ lookupkerns[second_unicode]=kern
+ end
+ elseif trace_loading then
+ report_otf("no glyph data for %U",first_unicode)
+ end
+ end
+ end
+ end
+ end
+ end
+ subtable.kernclass={}
+ end
+ end
+ end
+ end
+ end
+end
+actions["check glyphs"]=function(data,filename,raw)
+ for unicode,description in next,data.descriptions do
+ description.glyph=nil
+ end
+end
+actions["check metadata"]=function(data,filename,raw)
+ local metadata=data.metadata
+ for _,k in next,mainfields do
+ if valid_fields[k] then
+ local v=raw[k]
+ if not metadata[k] then
+ metadata[k]=v
+ end
+ end
+ end
+ local ttftables=metadata.ttf_tables
+ if ttftables then
+ for i=1,#ttftables do
+ ttftables[i].data="deleted"
+ end
+ end
+end
+actions["cleanup tables"]=function(data,filename,raw)
+ data.resources.indices=nil
+ data.helpers=nil
+end
+actions["reorganize glyph lookups"]=function(data,filename,raw)
+ local resources=data.resources
+ local unicodes=resources.unicodes
+ local descriptions=data.descriptions
+ local splitter=data.helpers.tounicodelist
+ local lookuptypes=resources.lookuptypes
+ for unicode,description in next,descriptions do
+ local lookups=description.glyph.lookups
+ if lookups then
+ for tag,lookuplist in next,lookups do
+ for l=1,#lookuplist do
+ local lookup=lookuplist[l]
+ local specification=lookup.specification
+ local lookuptype=lookup.type
+ local lt=lookuptypes[tag]
+ if not lt then
+ lookuptypes[tag]=lookuptype
+ elseif lt~=lookuptype then
+ report_otf("conflicting lookuptypes, %a points to %a and %a",tag,lt,lookuptype)
+ end
+ if lookuptype=="ligature" then
+ lookuplist[l]={ lpegmatch(splitter,specification.components) }
+ elseif lookuptype=="alternate" then
+ lookuplist[l]={ lpegmatch(splitter,specification.components) }
+ elseif lookuptype=="substitution" then
+ lookuplist[l]=unicodes[specification.variant]
+ elseif lookuptype=="multiple" then
+ lookuplist[l]={ lpegmatch(splitter,specification.components) }
+ elseif lookuptype=="position" then
+ lookuplist[l]={
+ specification.x or 0,
+ specification.y or 0,
+ specification.h or 0,
+ specification.v or 0
+ }
+ elseif lookuptype=="pair" then
+ local one=specification.offsets[1]
+ local two=specification.offsets[2]
+ local paired=unicodes[specification.paired]
+ if one then
+ if two then
+ lookuplist[l]={ paired,{ one.x or 0,one.y or 0,one.h or 0,one.v or 0 },{ two.x or 0,two.y or 0,two.h or 0,two.v or 0 } }
+ else
+ lookuplist[l]={ paired,{ one.x or 0,one.y or 0,one.h or 0,one.v or 0 } }
+ end
+ else
+ if two then
+ lookuplist[l]={ paired,{},{ two.x or 0,two.y or 0,two.h or 0,two.v or 0} }
+ else
+ lookuplist[l]={ paired }
+ end
+ end
+ end
+ end
+ end
+ local slookups,mlookups
+ for tag,lookuplist in next,lookups do
+ if #lookuplist==1 then
+ if slookups then
+ slookups[tag]=lookuplist[1]
+ else
+ slookups={ [tag]=lookuplist[1] }
+ end
+ else
+ if mlookups then
+ mlookups[tag]=lookuplist
+ else
+ mlookups={ [tag]=lookuplist }
+ end
+ end
+ end
+ if slookups then
+ description.slookups=slookups
+ end
+ if mlookups then
+ description.mlookups=mlookups
+ end
+ end
+ end
+end
+actions["reorganize glyph anchors"]=function(data,filename,raw)
+ local descriptions=data.descriptions
+ for unicode,description in next,descriptions do
+ local anchors=description.glyph.anchors
+ if anchors then
+ for class,data in next,anchors do
+ if class=="baselig" then
+ for tag,specification in next,data do
+ for i=1,#specification do
+ local si=specification[i]
+ specification[i]={ si.x or 0,si.y or 0 }
+ end
+ end
+ else
+ for tag,specification in next,data do
+ data[tag]={ specification.x or 0,specification.y or 0 }
+ end
end
+ end
+ description.anchors=anchors
end
+ end
end
-
--- modes: node, base, none
-
function otf.setfeatures(tfmdata,features)
- local okay = constructors.initializefeatures("otf",tfmdata,features,trace_features,report_otf)
- if okay then
- return constructors.collectprocessors("otf",tfmdata,features,trace_features,report_otf)
- else
- return { } -- will become false
- end
+ local okay=constructors.initializefeatures("otf",tfmdata,features,trace_features,report_otf)
+ if okay then
+ return constructors.collectprocessors("otf",tfmdata,features,trace_features,report_otf)
+ else
+ return {}
+ end
end
-
--- the first version made a top/mid/not extensible table, now we just
--- pass on the variants data and deal with it in the tfm scaler (there
--- is no longer an extensible table anyway)
---
--- we cannot share descriptions as virtual fonts might extend them (ok,
--- we could use a cache with a hash
---
--- we already assing an empty tabel to characters as we can add for
--- instance protruding info and loop over characters; one is not supposed
--- to change descriptions and if one does so one should make a copy!
-
local function copytotfm(data,cache_id)
- if data then
- local metadata = data.metadata
- local resources = data.resources
- local properties = derivetable(data.properties)
- local descriptions = derivetable(data.descriptions)
- local goodies = derivetable(data.goodies)
- local characters = { }
- local parameters = { }
- local mathparameters = { }
- --
- local pfminfo = metadata.pfminfo or { }
- local resources = data.resources
- local unicodes = resources.unicodes
- -- local mode = data.mode or "base"
- local spaceunits = 500
- local spacer = "space"
- local designsize = metadata.designsize or metadata.design_size or 100
- local mathspecs = metadata.math
- --
- if designsize == 0 then
- designsize = 100
- end
- if mathspecs then
- for name, value in next, mathspecs do
- mathparameters[name] = value
- end
- end
- for unicode, _ in next, data.descriptions do -- use parent table
- characters[unicode] = { }
- end
- if mathspecs then
- -- we could move this to the scaler but not that much is saved
- -- and this is cleaner
- for unicode, character in next, characters do
- local d = descriptions[unicode]
- local m = d.math
- if m then
- -- watch out: luatex uses horiz_variants for the parts
- local variants = m.horiz_variants
- local parts = m.horiz_parts
- -- local done = { [unicode] = true }
- if variants then
- local c = character
- for i=1,#variants do
- local un = variants[i]
- -- if done[un] then
- -- -- report_otf("skipping cyclic reference U+%05X in math variant U+%05X",un,unicode)
- -- else
- c.next = un
- c = characters[un]
- -- done[un] = true
- -- end
- end -- c is now last in chain
- c.horiz_variants = parts
- elseif parts then
- character.horiz_variants = parts
- end
- local variants = m.vert_variants
- local parts = m.vert_parts
- -- local done = { [unicode] = true }
- if variants then
- local c = character
- for i=1,#variants do
- local un = variants[i]
- -- if done[un] then
- -- -- report_otf("skipping cyclic reference U+%05X in math variant U+%05X",un,unicode)
- -- else
- c.next = un
- c = characters[un]
- -- done[un] = true
- -- end
- end -- c is now last in chain
- c.vert_variants = parts
- elseif parts then
- character.vert_variants = parts
- end
- local italic_correction = m.vert_italic_correction
- if italic_correction then
- character.vert_italic_correction = italic_correction -- was c.
- end
- local top_accent = m.top_accent
- if top_accent then
- character.top_accent = top_accent
- end
- local kerns = m.kerns
- if kerns then
- character.mathkerns = kerns
- end
- end
- end
- end
- -- end math
- local monospaced = metadata.isfixedpitch or (pfminfo.panose and pfminfo.panose.proportion == "Monospaced")
- local charwidth = pfminfo.avgwidth -- or unset
- local italicangle = metadata.italicangle
- local charxheight = pfminfo.os2_xheight and pfminfo.os2_xheight > 0 and pfminfo.os2_xheight
- properties.monospaced = monospaced
- parameters.italicangle = italicangle
- parameters.charwidth = charwidth
- parameters.charxheight = charxheight
- --
- local space = 0x0020 -- unicodes['space'], unicodes['emdash']
- local emdash = 0x2014 -- unicodes['space'], unicodes['emdash']
- if monospaced then
- if descriptions[space] then
- spaceunits, spacer = descriptions[space].width, "space"
- end
- if not spaceunits and descriptions[emdash] then
- spaceunits, spacer = descriptions[emdash].width, "emdash"
- end
- if not spaceunits and charwidth then
- spaceunits, spacer = charwidth, "charwidth"
- end
- else
- if descriptions[space] then
- spaceunits, spacer = descriptions[space].width, "space"
- end
- if not spaceunits and descriptions[emdash] then
- spaceunits, spacer = descriptions[emdash].width/2, "emdash/2"
- end
- if not spaceunits and charwidth then
- spaceunits, spacer = charwidth, "charwidth"
- end
- end
- spaceunits = tonumber(spaceunits) or 500 -- brrr
- -- we need a runtime lookup because of running from cdrom or zip, brrr (shouldn't we use the basename then?)
- local filename = constructors.checkedfilename(resources)
- local fontname = metadata.fontname
- local fullname = metadata.fullname or fontname
- local units = metadata.units_per_em or 1000
- --
- if units == 0 then -- catch bugs in fonts
- units = 1000
- metadata.units_per_em = 1000
- end
- --
- parameters.slant = 0
- parameters.space = spaceunits -- 3.333 (cmr10)
- parameters.space_stretch = units/2 -- 500 -- 1.666 (cmr10)
- parameters.space_shrink = 1*units/3 -- 333 -- 1.111 (cmr10)
- parameters.x_height = 2*units/5 -- 400
- parameters.quad = units -- 1000
- if spaceunits < 2*units/5 then
- -- todo: warning
- end
- if italicangle then
- parameters.italicangle = italicangle
- parameters.italicfactor = math.cos(math.rad(90+italicangle))
- parameters.slant = - math.round(math.tan(italicangle*math.pi/180))
- end
- if monospaced then
- parameters.space_stretch = 0
- parameters.space_shrink = 0
- elseif syncspace then --
- parameters.space_stretch = spaceunits/2
- parameters.space_shrink = spaceunits/3
- end
- parameters.extra_space = parameters.space_shrink -- 1.111 (cmr10)
- if charxheight then
- parameters.x_height = charxheight
- else
- local x = 0x78 -- unicodes['x']
- if x then
- local x = descriptions[x]
- if x then
- parameters.x_height = x.height
- end
- end
- end
- --
- parameters.designsize = (designsize/10)*65536
- parameters.ascender = abs(metadata.ascent or 0)
- parameters.descender = abs(metadata.descent or 0)
- parameters.units = units
- --
- properties.space = spacer
- properties.encodingbytes = 2
- properties.format = data.format or fonts.formats[filename] or "opentype"
- properties.noglyphnames = true
- properties.filename = filename
- properties.fontname = fontname
- properties.fullname = fullname
- properties.psname = fontname or fullname
- properties.name = filename or fullname
- --
- -- properties.name = specification.name
- -- properties.sub = specification.sub
- return {
- characters = characters,
- descriptions = descriptions,
- parameters = parameters,
- mathparameters = mathparameters,
- resources = resources,
- properties = properties,
- goodies = goodies,
- }
- end
+ if data then
+ local metadata=data.metadata
+ local resources=data.resources
+ local properties=derivetable(data.properties)
+ local descriptions=derivetable(data.descriptions)
+ local goodies=derivetable(data.goodies)
+ local characters={}
+ local parameters={}
+ local mathparameters={}
+ local pfminfo=metadata.pfminfo or {}
+ local resources=data.resources
+ local unicodes=resources.unicodes
+ local spaceunits=500
+ local spacer="space"
+ local designsize=metadata.designsize or metadata.design_size or 100
+ local mathspecs=metadata.math
+ if designsize==0 then
+ designsize=100
+ end
+ if mathspecs then
+ for name,value in next,mathspecs do
+ mathparameters[name]=value
+ end
+ end
+ for unicode,_ in next,data.descriptions do
+ characters[unicode]={}
+ end
+ if mathspecs then
+ for unicode,character in next,characters do
+ local d=descriptions[unicode]
+ local m=d.math
+ if m then
+ local variants=m.horiz_variants
+ local parts=m.horiz_parts
+ if variants then
+ local c=character
+ for i=1,#variants do
+ local un=variants[i]
+ c.next=un
+ c=characters[un]
+ end
+ c.horiz_variants=parts
+ elseif parts then
+ character.horiz_variants=parts
+ end
+ local variants=m.vert_variants
+ local parts=m.vert_parts
+ if variants then
+ local c=character
+ for i=1,#variants do
+ local un=variants[i]
+ c.next=un
+ c=characters[un]
+ end
+ c.vert_variants=parts
+ elseif parts then
+ character.vert_variants=parts
+ end
+ local italic_correction=m.vert_italic_correction
+ if italic_correction then
+ character.vert_italic_correction=italic_correction
+ end
+ local top_accent=m.top_accent
+ if top_accent then
+ character.top_accent=top_accent
+ end
+ local kerns=m.kerns
+ if kerns then
+ character.mathkerns=kerns
+ end
+ end
+ end
+ end
+ local monospaced=metadata.isfixedpitch or (pfminfo.panose and pfminfo.panose.proportion=="Monospaced")
+ local charwidth=pfminfo.avgwidth
+ local italicangle=metadata.italicangle
+ local charxheight=pfminfo.os2_xheight and pfminfo.os2_xheight>0 and pfminfo.os2_xheight
+ properties.monospaced=monospaced
+ parameters.italicangle=italicangle
+ parameters.charwidth=charwidth
+ parameters.charxheight=charxheight
+ local space=0x0020
+ local emdash=0x2014
+ if monospaced then
+ if descriptions[space] then
+ spaceunits,spacer=descriptions[space].width,"space"
+ end
+ if not spaceunits and descriptions[emdash] then
+ spaceunits,spacer=descriptions[emdash].width,"emdash"
+ end
+ if not spaceunits and charwidth then
+ spaceunits,spacer=charwidth,"charwidth"
+ end
+ else
+ if descriptions[space] then
+ spaceunits,spacer=descriptions[space].width,"space"
+ end
+ if not spaceunits and descriptions[emdash] then
+ spaceunits,spacer=descriptions[emdash].width/2,"emdash/2"
+ end
+ if not spaceunits and charwidth then
+ spaceunits,spacer=charwidth,"charwidth"
+ end
+ end
+ spaceunits=tonumber(spaceunits) or 500
+ local filename=constructors.checkedfilename(resources)
+ local fontname=metadata.fontname
+ local fullname=metadata.fullname or fontname
+ local units=metadata.units_per_em or 1000
+ if units==0 then
+ units=1000
+ metadata.units_per_em=1000
+ end
+ parameters.slant=0
+ parameters.space=spaceunits
+ parameters.space_stretch=units/2
+ parameters.space_shrink=1*units/3
+ parameters.x_height=2*units/5
+ parameters.quad=units
+ if spaceunits<2*units/5 then
+ end
+ if italicangle then
+ parameters.italicangle=italicangle
+ parameters.italicfactor=math.cos(math.rad(90+italicangle))
+ parameters.slant=- math.round(math.tan(italicangle*math.pi/180))
+ end
+ if monospaced then
+ parameters.space_stretch=0
+ parameters.space_shrink=0
+ elseif syncspace then
+ parameters.space_stretch=spaceunits/2
+ parameters.space_shrink=spaceunits/3
+ end
+ parameters.extra_space=parameters.space_shrink
+ if charxheight then
+ parameters.x_height=charxheight
+ else
+ local x=0x78
+ if x then
+ local x=descriptions[x]
+ if x then
+ parameters.x_height=x.height
+ end
+ end
+ end
+ parameters.designsize=(designsize/10)*65536
+ parameters.ascender=abs(metadata.ascent or 0)
+ parameters.descender=abs(metadata.descent or 0)
+ parameters.units=units
+ properties.space=spacer
+ properties.encodingbytes=2
+ properties.format=data.format or fonts.formats[filename] or "opentype"
+ properties.noglyphnames=true
+ properties.filename=filename
+ properties.fontname=fontname
+ properties.fullname=fullname
+ properties.psname=fontname or fullname
+ properties.name=filename or fullname
+ return {
+ characters=characters,
+ descriptions=descriptions,
+ parameters=parameters,
+ mathparameters=mathparameters,
+ resources=resources,
+ properties=properties,
+ goodies=goodies,
+ }
+ end
end
-
local function otftotfm(specification)
- local cache_id = specification.hash
- local tfmdata = containers.read(constructors.cache,cache_id)
- if not tfmdata then
- local name = specification.name
- local sub = specification.sub
- local filename = specification.filename
- local format = specification.format
- local features = specification.features.normal
- local rawdata = otf.load(filename,format,sub,features and features.featurefile)
- if rawdata and next(rawdata) then
- rawdata.lookuphash = { }
- tfmdata = copytotfm(rawdata,cache_id)
- if tfmdata and next(tfmdata) then
- -- at this moment no characters are assigned yet, only empty slots
- local features = constructors.checkedfeatures("otf",features)
- local shared = tfmdata.shared
- if not shared then
- shared = { }
- tfmdata.shared = shared
- end
- shared.rawdata = rawdata
- -- shared.features = features -- default
- shared.dynamics = { }
- -- shared.processes = { }
- tfmdata.changed = { }
- shared.features = features
- shared.processes = otf.setfeatures(tfmdata,features)
- end
- end
- containers.write(constructors.cache,cache_id,tfmdata)
- end
- return tfmdata
+ local cache_id=specification.hash
+ local tfmdata=containers.read(constructors.cache,cache_id)
+ if not tfmdata then
+ local name=specification.name
+ local sub=specification.sub
+ local filename=specification.filename
+ local format=specification.format
+ local features=specification.features.normal
+ local rawdata=otf.load(filename,format,sub,features and features.featurefile)
+ if rawdata and next(rawdata) then
+ rawdata.lookuphash={}
+ tfmdata=copytotfm(rawdata,cache_id)
+ if tfmdata and next(tfmdata) then
+ local features=constructors.checkedfeatures("otf",features)
+ local shared=tfmdata.shared
+ if not shared then
+ shared={}
+ tfmdata.shared=shared
+ end
+ shared.rawdata=rawdata
+ shared.dynamics={}
+ tfmdata.changed={}
+ shared.features=features
+ shared.processes=otf.setfeatures(tfmdata,features)
+ end
+ end
+ containers.write(constructors.cache,cache_id,tfmdata)
+ end
+ return tfmdata
end
-
local function read_from_otf(specification)
- local tfmdata = otftotfm(specification)
- if tfmdata then
- -- this late ? .. needs checking
- tfmdata.properties.name = specification.name
- tfmdata.properties.sub = specification.sub
- --
- tfmdata = constructors.scale(tfmdata,specification)
- local allfeatures = tfmdata.shared.features or specification.features.normal
- constructors.applymanipulators("otf",tfmdata,allfeatures,trace_features,report_otf)
- constructors.setname(tfmdata,specification) -- only otf?
- fonts.loggers.register(tfmdata,file.extname(specification.filename),specification)
- end
- return tfmdata
+ local tfmdata=otftotfm(specification)
+ if tfmdata then
+ tfmdata.properties.name=specification.name
+ tfmdata.properties.sub=specification.sub
+ tfmdata=constructors.scale(tfmdata,specification)
+ local allfeatures=tfmdata.shared.features or specification.features.normal
+ constructors.applymanipulators("otf",tfmdata,allfeatures,trace_features,report_otf)
+ constructors.setname(tfmdata,specification)
+ fonts.loggers.register(tfmdata,file.suffix(specification.filename),specification)
+ end
+ return tfmdata
end
-
local function checkmathsize(tfmdata,mathsize)
- local mathdata = tfmdata.shared.rawdata.metadata.math
- local mathsize = tonumber(mathsize)
- if mathdata then -- we cannot use mathparameters as luatex will complain
- local parameters = tfmdata.parameters
- parameters.scriptpercentage = mathdata.ScriptPercentScaleDown
- parameters.scriptscriptpercentage = mathdata.ScriptScriptPercentScaleDown
- parameters.mathsize = mathsize
- end
+ local mathdata=tfmdata.shared.rawdata.metadata.math
+ local mathsize=tonumber(mathsize)
+ if mathdata then
+ local parameters=tfmdata.parameters
+ parameters.scriptpercentage=mathdata.ScriptPercentScaleDown
+ parameters.scriptscriptpercentage=mathdata.ScriptScriptPercentScaleDown
+ parameters.mathsize=mathsize
+ end
end
-
registerotffeature {
- name = "mathsize",
- description = "apply mathsize as specified in the font",
- initializers = {
- base = checkmathsize,
- node = checkmathsize,
- }
+ name="mathsize",
+ description="apply mathsize as specified in the font",
+ initializers={
+ base=checkmathsize,
+ node=checkmathsize,
+ }
}
-
--- helpers
-
function otf.collectlookups(rawdata,kind,script,language)
- local sequences = rawdata.resources.sequences
- if sequences then
- local featuremap, featurelist = { }, { }
- for s=1,#sequences do
- local sequence = sequences[s]
- local features = sequence.features
- features = features and features[kind]
- features = features and (features[script] or features[default] or features[wildcard])
- features = features and (features[language] or features[default] or features[wildcard])
- if features then
- local subtables = sequence.subtables
- if subtables then
- for s=1,#subtables do
- local ss = subtables[s]
- if not featuremap[s] then
- featuremap[ss] = true
- featurelist[#featurelist+1] = ss
- end
- end
- end
- end
- end
- if #featurelist > 0 then
- return featuremap, featurelist
- end
- end
- return nil, nil
+ local sequences=rawdata.resources.sequences
+ if sequences then
+ local featuremap,featurelist={},{}
+ for s=1,#sequences do
+ local sequence=sequences[s]
+ local features=sequence.features
+ features=features and features[kind]
+ features=features and (features[script] or features[default] or features[wildcard])
+ features=features and (features[language] or features[default] or features[wildcard])
+ if features then
+ local subtables=sequence.subtables
+ if subtables then
+ for s=1,#subtables do
+ local ss=subtables[s]
+ if not featuremap[s] then
+ featuremap[ss]=true
+ featurelist[#featurelist+1]=ss
+ end
+ end
+ end
+ end
+ end
+ if #featurelist>0 then
+ return featuremap,featurelist
+ end
+ end
+ return nil,nil
end
-
--- readers
-
local function check_otf(forced,specification,suffix,what)
- local name = specification.name
- if forced then
- name = file.addsuffix(name,suffix,true)
- end
- local fullname = findbinfile(name,suffix) or ""
- if fullname == "" then
- fullname = fonts.names.getfilename(name,suffix) or ""
- end
- if fullname ~= "" then
- specification.filename = fullname
- specification.format = what
- return read_from_otf(specification)
- end
+ local name=specification.name
+ if forced then
+ name=file.addsuffix(name,suffix,true)
+ end
+ local fullname=findbinfile(name,suffix) or ""
+ if fullname=="" then
+ fullname=fonts.names.getfilename(name,suffix) or ""
+ end
+ if fullname~="" then
+ specification.filename=fullname
+ specification.format=what
+ return read_from_otf(specification)
+ end
end
-
local function opentypereader(specification,suffix,what)
- local forced = specification.forced or ""
- if forced == "otf" then
- return check_otf(true,specification,forced,"opentype")
- elseif forced == "ttf" or forced == "ttc" or forced == "dfont" then
- return check_otf(true,specification,forced,"truetype")
- else
- return check_otf(false,specification,suffix,what)
- end
-end
-
-readers.opentype = opentypereader
-
-local formats = fonts.formats
-
-formats.otf = "opentype"
-formats.ttf = "truetype"
-formats.ttc = "truetype"
-formats.dfont = "truetype"
-
-function readers.otf (specification) return opentypereader(specification,"otf",formats.otf ) end
-function readers.ttf (specification) return opentypereader(specification,"ttf",formats.ttf ) end
-function readers.ttc (specification) return opentypereader(specification,"ttf",formats.ttc ) end
+ local forced=specification.forced or ""
+ if forced=="otf" then
+ return check_otf(true,specification,forced,"opentype")
+ elseif forced=="ttf" or forced=="ttc" or forced=="dfont" then
+ return check_otf(true,specification,forced,"truetype")
+ else
+ return check_otf(false,specification,suffix,what)
+ end
+end
+readers.opentype=opentypereader
+local formats=fonts.formats
+formats.otf="opentype"
+formats.ttf="truetype"
+formats.ttc="truetype"
+formats.dfont="truetype"
+function readers.otf (specification) return opentypereader(specification,"otf",formats.otf ) end
+function readers.ttf (specification) return opentypereader(specification,"ttf",formats.ttf ) end
+function readers.ttc (specification) return opentypereader(specification,"ttf",formats.ttc ) end
function readers.dfont(specification) return opentypereader(specification,"ttf",formats.dfont) end
-
--- this will be overloaded
-
function otf.scriptandlanguage(tfmdata,attr)
- local properties = tfmdata.properties
- return properties.script or "dflt", properties.language or "dflt"
+ local properties=tfmdata.properties
+ return properties.script or "dflt",properties.language or "dflt"
end
end -- closure
do -- begin closure to overcome local limits and interference
-if not modules then modules = { } end modules ['font-otb'] = {
- version = 1.001,
- comment = "companion to font-ini.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
+if not modules then modules={} end modules ['font-otb']={
+ version=1.001,
+ comment="companion to font-ini.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
}
-local concat = table.concat
-local format, gmatch, gsub, find, match, lower, strip = string.format, string.gmatch, string.gsub, string.find, string.match, string.lower, string.strip
-local type, next, tonumber, tostring = type, next, tonumber, tostring
-local lpegmatch = lpeg.match
-local utfchar = utf.char
-
-local trace_baseinit = false trackers.register("otf.baseinit", function(v) trace_baseinit = v end)
-local trace_singles = false trackers.register("otf.singles", function(v) trace_singles = v end)
-local trace_multiples = false trackers.register("otf.multiples", function(v) trace_multiples = v end)
-local trace_alternatives = false trackers.register("otf.alternatives", function(v) trace_alternatives = v end)
-local trace_ligatures = false trackers.register("otf.ligatures", function(v) trace_ligatures = v end)
-local trace_kerns = false trackers.register("otf.kerns", function(v) trace_kerns = v end)
-local trace_preparing = false trackers.register("otf.preparing", function(v) trace_preparing = v end)
-
-local report_prepare = logs.reporter("fonts","otf prepare")
-
-local fonts = fonts
-local otf = fonts.handlers.otf
-
-local otffeatures = fonts.constructors.newfeatures("otf")
-local registerotffeature = otffeatures.register
-
-otf.defaultbasealternate = "none" -- first last
-
-local wildcard = "*"
-local default = "dflt"
-
+local concat=table.concat
+local format,gmatch,gsub,find,match,lower,strip=string.format,string.gmatch,string.gsub,string.find,string.match,string.lower,string.strip
+local type,next,tonumber,tostring=type,next,tonumber,tostring
+local lpegmatch=lpeg.match
+local utfchar=utf.char
+local trace_baseinit=false trackers.register("otf.baseinit",function(v) trace_baseinit=v end)
+local trace_singles=false trackers.register("otf.singles",function(v) trace_singles=v end)
+local trace_multiples=false trackers.register("otf.multiples",function(v) trace_multiples=v end)
+local trace_alternatives=false trackers.register("otf.alternatives",function(v) trace_alternatives=v end)
+local trace_ligatures=false trackers.register("otf.ligatures",function(v) trace_ligatures=v end)
+local trace_ligatures_detail=false trackers.register("otf.ligatures.detail",function(v) trace_ligatures_detail=v end)
+local trace_kerns=false trackers.register("otf.kerns",function(v) trace_kerns=v end)
+local trace_preparing=false trackers.register("otf.preparing",function(v) trace_preparing=v end)
+local report_prepare=logs.reporter("fonts","otf prepare")
+local fonts=fonts
+local otf=fonts.handlers.otf
+local otffeatures=otf.features
+local registerotffeature=otffeatures.register
+otf.defaultbasealternate="none"
+local wildcard="*"
+local default="dflt"
+local formatters=string.formatters
+local f_unicode=formatters["%U"]
+local f_uniname=formatters["%U (%s)"]
+local f_unilist=formatters["% t (% t)"]
local function gref(descriptions,n)
- if type(n) == "number" then
- local name = descriptions[n].name
- if name then
- return format("U+%05X (%s)",n,name)
- else
- return format("U+%05X")
- end
- elseif n then
- local num, nam = { }, { }
- for i=2,#n do -- first is likely a key
- local ni = n[i]
- num[i] = format("U+%05X",ni)
- nam[i] = descriptions[ni].name or "?"
- end
- return format("%s (%s)",concat(num," "), concat(nam," "))
+ if type(n)=="number" then
+ local name=descriptions[n].name
+ if name then
+ return f_uniname(n,name)
else
- return "?"
- end
+ return f_unicode(n)
+ end
+ elseif n then
+ local num,nam={},{}
+ for i=2,#n do
+ local ni=n[i]
+ if tonumber(ni) then
+ local di=descriptions[ni]
+ num[i]=f_unicode(ni)
+ nam[i]=di and di.name or "-"
+ end
+ end
+ return f_unilist(num,nam)
+ else
+ return "<error in base mode tracing>"
+ end
end
-
local function cref(feature,lookupname)
- if lookupname then
- return format("feature %s, lookup %s",feature,lookupname)
- else
- return format("feature %s",feature)
- end
+ if lookupname then
+ return formatters["feature %a, lookup %a"](feature,lookupname)
+ else
+ return formatters["feature %a"](feature)
+ end
end
-
local function report_alternate(feature,lookupname,descriptions,unicode,replacement,value,comment)
- report_prepare("%s: base alternate %s => %s (%s => %s)",cref(feature,lookupname),
- gref(descriptions,unicode),replacement and gref(descriptions,replacement) or "-",
- tostring(value),comment)
+ report_prepare("%s: base alternate %s => %s (%S => %S)",
+ cref(feature,lookupname),
+ gref(descriptions,unicode),
+ replacement and gref(descriptions,replacement),
+ value,
+ comment)
end
-
local function report_substitution(feature,lookupname,descriptions,unicode,substitution)
- report_prepare("%s: base substitution %s => %s",cref(feature,lookupname),
- gref(descriptions,unicode),gref(descriptions,substitution))
+ report_prepare("%s: base substitution %s => %S",
+ cref(feature,lookupname),
+ gref(descriptions,unicode),
+ gref(descriptions,substitution))
end
-
local function report_ligature(feature,lookupname,descriptions,unicode,ligature)
- report_prepare("%s: base ligature %s => %s",cref(feature,lookupname),
- gref(descriptions,ligature),gref(descriptions,unicode))
-end
-
-local basemethods = { }
-local basemethod = "<unset>"
-
+ report_prepare("%s: base ligature %s => %S",
+ cref(feature,lookupname),
+ gref(descriptions,ligature),
+ gref(descriptions,unicode))
+end
+local function report_kern(feature,lookupname,descriptions,unicode,otherunicode,value)
+ report_prepare("%s: base kern %s + %s => %S",
+ cref(feature,lookupname),
+ gref(descriptions,unicode),
+ gref(descriptions,otherunicode),
+ value)
+end
+local basemethods={}
+local basemethod="<unset>"
local function applybasemethod(what,...)
- local m = basemethods[basemethod][what]
- if m then
- return m(...)
- end
+ local m=basemethods[basemethod][what]
+ if m then
+ return m(...)
+ end
end
-
--- We need to make sure that luatex sees the difference between
--- base fonts that have different glyphs in the same slots in fonts
--- that have the same fullname (or filename). LuaTeX will merge fonts
--- eventually (and subset later on). If needed we can use a more
--- verbose name as long as we don't use <()<>[]{}/%> and the length
--- is < 128.
-
-local basehash, basehashes, applied = { }, 1, { }
-
+local basehash,basehashes,applied={},1,{}
local function registerbasehash(tfmdata)
- local properties = tfmdata.properties
- local hash = concat(applied," ")
- local base = basehash[hash]
- if not base then
- basehashes = basehashes + 1
- base = basehashes
- basehash[hash] = base
- end
- properties.basehash = base
- properties.fullname = properties.fullname .. "-" .. base
- -- report_prepare("fullname base hash: '%s', featureset '%s'",tfmdata.properties.fullname,hash)
- applied = { }
+ local properties=tfmdata.properties
+ local hash=concat(applied," ")
+ local base=basehash[hash]
+ if not base then
+ basehashes=basehashes+1
+ base=basehashes
+ basehash[hash]=base
+ end
+ properties.basehash=base
+ properties.fullname=properties.fullname.."-"..base
+ applied={}
end
-
local function registerbasefeature(feature,value)
- applied[#applied+1] = feature .. "=" .. tostring(value)
+ applied[#applied+1]=feature.."="..tostring(value)
end
-
--- The original basemode ligature builder used the names of components
--- and did some expression juggling to get the chain right. The current
--- variant starts with unicodes but still uses names to make the chain.
--- This is needed because we have to create intermediates when needed
--- but use predefined snippets when available. To some extend the
--- current builder is more stupid but I don't worry that much about it
--- as ligatures are rather predicatable.
---
--- Personally I think that an ff + i == ffi rule as used in for instance
--- latin modern is pretty weird as no sane person will key that in and
--- expect a glyph for that ligature plus the following character. Anyhow,
--- as we need to deal with this, we do, but no guarantes are given.
---
--- latin modern dejavu
---
--- f+f 102 102 102 102
--- f+i 102 105 102 105
--- f+l 102 108 102 108
--- f+f+i 102 102 105
--- f+f+l 102 102 108 102 102 108
--- ff+i 64256 105 64256 105
--- ff+l 64256 108
---
--- As you can see here, latin modern is less complete than dejavu but
--- in practice one will not notice it.
---
--- The while loop is needed because we need to resolve for instance
--- pseudo names like hyphen_hyphen to endash so in practice we end
--- up with a bit too many definitions but the overhead is neglectable.
---
--- Todo: if changed[first] or changed[second] then ... end
-
-local trace = false
-
+local trace=false
local function finalize_ligatures(tfmdata,ligatures)
- local nofligatures = #ligatures
- if nofligatures > 0 then
- local characters = tfmdata.characters
- local descriptions = tfmdata.descriptions
- local resources = tfmdata.resources
- local unicodes = resources.unicodes
- local private = resources.private
- local alldone = false
- while not alldone do
- local done = 0
- for i=1,nofligatures do
- local ligature = ligatures[i]
- if ligature then
- local unicode, lookupdata = ligature[1], ligature[2]
- if trace then
- print("BUILDING",concat(lookupdata," "),unicode)
- end
- local size = #lookupdata
- local firstcode = lookupdata[1] -- [2]
- local firstdata = characters[firstcode]
- local okay = false
- if firstdata then
- local firstname = "ctx_" .. firstcode
- for i=1,size-1 do -- for i=2,size-1 do
- local firstdata = characters[firstcode]
- if not firstdata then
- firstcode = private
- if trace then
- print(" DEFINING",firstname,firstcode)
- end
- unicodes[firstname] = firstcode
- firstdata = { intermediate = true, ligatures = { } }
- characters[firstcode] = firstdata
- descriptions[firstcode] = { name = firstname }
- private = private + 1
- end
- local target
- local secondcode = lookupdata[i+1]
- local secondname = firstname .. "_" .. secondcode
- if i == size - 1 then
- target = unicode
- if not unicodes[secondname] then
- unicodes[secondname] = unicode -- map final ligature onto intermediates
- end
- okay = true
- else
- target = unicodes[secondname]
- if not target then
- break
- end
- end
- if trace then
- print("CODES",firstname,firstcode,secondname,secondcode,target)
- end
- local firstligs = firstdata.ligatures
- if firstligs then
- firstligs[secondcode] = { char = target }
- else
- firstdata.ligatures = { [secondcode] = { char = target } }
- end
- firstcode = target
- firstname = secondname
- end
- end
- if okay then
- ligatures[i] = false
- done = done + 1
- end
- end
- end
- alldone = done == 0
- end
- if trace then
- for k, v in next, characters do
- if v.ligatures then table.print(v,k) end
- end
- end
- tfmdata.resources.private = private
- end
+ local nofligatures=#ligatures
+ if nofligatures>0 then
+ local characters=tfmdata.characters
+ local descriptions=tfmdata.descriptions
+ local resources=tfmdata.resources
+ local unicodes=resources.unicodes
+ local private=resources.private
+ local alldone=false
+ while not alldone do
+ local done=0
+ for i=1,nofligatures do
+ local ligature=ligatures[i]
+ if ligature then
+ local unicode,lookupdata=ligature[1],ligature[2]
+ if trace then
+ trace_ligatures_detail("building % a into %a",lookupdata,unicode)
+ end
+ local size=#lookupdata
+ local firstcode=lookupdata[1]
+ local firstdata=characters[firstcode]
+ local okay=false
+ if firstdata then
+ local firstname="ctx_"..firstcode
+ for i=1,size-1 do
+ local firstdata=characters[firstcode]
+ if not firstdata then
+ firstcode=private
+ if trace then
+ trace_ligatures_detail("defining %a as %a",firstname,firstcode)
+ end
+ unicodes[firstname]=firstcode
+ firstdata={ intermediate=true,ligatures={} }
+ characters[firstcode]=firstdata
+ descriptions[firstcode]={ name=firstname }
+ private=private+1
+ end
+ local target
+ local secondcode=lookupdata[i+1]
+ local secondname=firstname.."_"..secondcode
+ if i==size-1 then
+ target=unicode
+ if not unicodes[secondname] then
+ unicodes[secondname]=unicode
+ end
+ okay=true
+ else
+ target=unicodes[secondname]
+ if not target then
+ break
+ end
+ end
+ if trace then
+ trace_ligatures_detail("codes (%a,%a) + (%a,%a) -> %a",firstname,firstcode,secondname,secondcode,target)
+ end
+ local firstligs=firstdata.ligatures
+ if firstligs then
+ firstligs[secondcode]={ char=target }
+ else
+ firstdata.ligatures={ [secondcode]={ char=target } }
+ end
+ firstcode=target
+ firstname=secondname
+ end
+ end
+ if okay then
+ ligatures[i]=false
+ done=done+1
+ end
+ end
+ end
+ alldone=done==0
+ end
+ if trace then
+ for k,v in next,characters do
+ if v.ligatures then table.print(v,k) end
+ end
+ end
+ tfmdata.resources.private=private
+ end
end
-
local function preparesubstitutions(tfmdata,feature,value,validlookups,lookuplist)
- local characters = tfmdata.characters
- local descriptions = tfmdata.descriptions
- local resources = tfmdata.resources
- local changed = tfmdata.changed
- local unicodes = resources.unicodes
- local lookuphash = resources.lookuphash
- local lookuptypes = resources.lookuptypes
-
- local ligatures = { }
- local alternate = tonumber(value)
- local defaultalt = otf.defaultbasealternate
-
- local trace_singles = trace_baseinit and trace_singles
- local trace_alternatives = trace_baseinit and trace_alternatives
- local trace_ligatures = trace_baseinit and trace_ligatures
-
- local actions = {
- substitution = function(lookupdata,lookupname,description,unicode)
- if trace_singles then
- report_substitution(feature,lookupname,descriptions,unicode,lookupdata)
- end
- changed[unicode] = lookupdata
- end,
- alternate = function(lookupdata,lookupname,description,unicode)
- local replacement = lookupdata[alternate]
- if replacement then
- changed[unicode] = replacement
- if trace_alternatives then
- report_alternate(feature,lookupname,descriptions,unicode,replacement,value,"normal")
- end
- elseif defaultalt == "first" then
- replacement = lookupdata[1]
- changed[unicode] = replacement
- if trace_alternatives then
- report_alternate(feature,lookupname,descriptions,unicode,replacement,value,defaultalt)
- end
- elseif defaultalt == "last" then
- replacement = lookupdata[#data]
- if trace_alternatives then
- report_alternate(feature,lookupname,descriptions,unicode,replacement,value,defaultalt)
- end
- else
- if trace_alternatives then
- report_alternate(feature,lookupname,descriptions,unicode,replacement,value,"unknown")
- end
- end
- end,
- ligature = function(lookupdata,lookupname,description,unicode)
- if trace_ligatures then
- report_ligature(feature,lookupname,descriptions,unicode,lookupdata)
- end
- ligatures[#ligatures+1] = { unicode, lookupdata }
- end,
- }
-
- for unicode, character in next, characters do
- local description = descriptions[unicode]
- local lookups = description.slookups
- if lookups then
- for l=1,#lookuplist do
- local lookupname = lookuplist[l]
- local lookupdata = lookups[lookupname]
- if lookupdata then
- local lookuptype = lookuptypes[lookupname]
- local action = actions[lookuptype]
- if action then
- action(lookupdata,lookupname,description,unicode)
- end
- end
- end
+ local characters=tfmdata.characters
+ local descriptions=tfmdata.descriptions
+ local resources=tfmdata.resources
+ local changed=tfmdata.changed
+ local unicodes=resources.unicodes
+ local lookuphash=resources.lookuphash
+ local lookuptypes=resources.lookuptypes
+ local ligatures={}
+ local alternate=tonumber(value)
+ local defaultalt=otf.defaultbasealternate
+ local trace_singles=trace_baseinit and trace_singles
+ local trace_alternatives=trace_baseinit and trace_alternatives
+ local trace_ligatures=trace_baseinit and trace_ligatures
+ local actions={
+ substitution=function(lookupdata,lookupname,description,unicode)
+ if trace_singles then
+ report_substitution(feature,lookupname,descriptions,unicode,lookupdata)
+ end
+ changed[unicode]=lookupdata
+ end,
+ alternate=function(lookupdata,lookupname,description,unicode)
+ local replacement=lookupdata[alternate]
+ if replacement then
+ changed[unicode]=replacement
+ if trace_alternatives then
+ report_alternate(feature,lookupname,descriptions,unicode,replacement,value,"normal")
end
- local lookups = description.mlookups
- if lookups then
- for l=1,#lookuplist do
- local lookupname = lookuplist[l]
- local lookuplist = lookups[lookupname]
- if lookuplist then
- local lookuptype = lookuptypes[lookupname]
- local action = actions[lookuptype]
- if action then
- for i=1,#lookuplist do
- action(lookuplist[i],lookupname,description,unicode)
- end
- end
- end
- end
+ elseif defaultalt=="first" then
+ replacement=lookupdata[1]
+ changed[unicode]=replacement
+ if trace_alternatives then
+ report_alternate(feature,lookupname,descriptions,unicode,replacement,value,defaultalt)
end
- end
-
- finalize_ligatures(tfmdata,ligatures)
-end
-
-local function preparepositionings(tfmdata,feature,value,validlookups,lookuplist) -- todo what kind of kerns, currently all
- local characters = tfmdata.characters
- local descriptions = tfmdata.descriptions
- local resources = tfmdata.resources
- local unicodes = resources.unicodes
- local sharedkerns = { }
- local traceindeed = trace_baseinit and trace_kerns
- for unicode, character in next, characters do
- local description = descriptions[unicode]
- local rawkerns = description.kerns -- shared
- if rawkerns then
- local s = sharedkerns[rawkerns]
- if s == false then
- -- skip
- elseif s then
- character.kerns = s
- else
- local newkerns = character.kerns
- local done = false
- for l=1,#lookuplist do
- local lookup = lookuplist[l]
- local kerns = rawkerns[lookup]
- if kerns then
- for otherunicode, value in next, kerns do
- if value == 0 then
- -- maybe no 0 test here
- elseif not newkerns then
- newkerns = { [otherunicode] = value }
- done = true
- if traceindeed then
- report_prepare("%s: base kern %s + %s => %s",cref(feature,lookup),
- gref(descriptions,unicode),gref(descriptions,otherunicode),value)
- end
- elseif not newkerns[otherunicode] then -- first wins
- newkerns[otherunicode] = value
- done = true
- if traceindeed then
- report_prepare("%s: base kern %s + %s => %s",cref(feature,lookup),
- gref(descriptions,unicode),gref(descriptions,otherunicode),value)
- end
- end
- end
- end
- end
- if done then
- sharedkerns[rawkerns] = newkerns
- character.kerns = newkerns -- no empty assignments
- else
- sharedkerns[rawkerns] = false
- end
- end
+ elseif defaultalt=="last" then
+ replacement=lookupdata[#data]
+ if trace_alternatives then
+ report_alternate(feature,lookupname,descriptions,unicode,replacement,value,defaultalt)
+ end
+ else
+ if trace_alternatives then
+ report_alternate(feature,lookupname,descriptions,unicode,replacement,value,"unknown")
+ end
+ end
+ end,
+ ligature=function(lookupdata,lookupname,description,unicode)
+ if trace_ligatures then
+ report_ligature(feature,lookupname,descriptions,unicode,lookupdata)
+ end
+ ligatures[#ligatures+1]={ unicode,lookupdata }
+ end,
+ }
+ for unicode,character in next,characters do
+ local description=descriptions[unicode]
+ local lookups=description.slookups
+ if lookups then
+ for l=1,#lookuplist do
+ local lookupname=lookuplist[l]
+ local lookupdata=lookups[lookupname]
+ if lookupdata then
+ local lookuptype=lookuptypes[lookupname]
+ local action=actions[lookuptype]
+ if action then
+ action(lookupdata,lookupname,description,unicode)
+ end
+ end
+ end
+ end
+ local lookups=description.mlookups
+ if lookups then
+ for l=1,#lookuplist do
+ local lookupname=lookuplist[l]
+ local lookuplist=lookups[lookupname]
+ if lookuplist then
+ local lookuptype=lookuptypes[lookupname]
+ local action=actions[lookuptype]
+ if action then
+ for i=1,#lookuplist do
+ action(lookuplist[i],lookupname,description,unicode)
+ end
+ end
+ end
+ end
+ end
+ end
+ finalize_ligatures(tfmdata,ligatures)
+end
+local function preparepositionings(tfmdata,feature,value,validlookups,lookuplist)
+ local characters=tfmdata.characters
+ local descriptions=tfmdata.descriptions
+ local resources=tfmdata.resources
+ local unicodes=resources.unicodes
+ local sharedkerns={}
+ local traceindeed=trace_baseinit and trace_kerns
+ for unicode,character in next,characters do
+ local description=descriptions[unicode]
+ local rawkerns=description.kerns
+ if rawkerns then
+ local s=sharedkerns[rawkerns]
+ if s==false then
+ elseif s then
+ character.kerns=s
+ else
+ local newkerns=character.kerns
+ local done=false
+ for l=1,#lookuplist do
+ local lookup=lookuplist[l]
+ local kerns=rawkerns[lookup]
+ if kerns then
+ for otherunicode,value in next,kerns do
+ if value==0 then
+ elseif not newkerns then
+ newkerns={ [otherunicode]=value }
+ done=true
+ if traceindeed then
+ report_kern(feature,lookup,descriptions,unicode,otherunicode,value)
+ end
+ elseif not newkerns[otherunicode] then
+ newkerns[otherunicode]=value
+ done=true
+ if traceindeed then
+ report_kern(feature,lookup,descriptions,unicode,otherunicode,value)
+ end
+ end
+ end
+ end
+ end
+ if done then
+ sharedkerns[rawkerns]=newkerns
+ character.kerns=newkerns
+ else
+ sharedkerns[rawkerns]=false
end
+ end
end
+ end
end
-
-basemethods.independent = {
- preparesubstitutions = preparesubstitutions,
- preparepositionings = preparepositionings,
+basemethods.independent={
+ preparesubstitutions=preparesubstitutions,
+ preparepositionings=preparepositionings,
}
-
local function makefake(tfmdata,name,present)
- local resources = tfmdata.resources
- local private = resources.private
- local character = { intermediate = true, ligatures = { } }
- resources.unicodes[name] = private
- tfmdata.characters[private] = character
- tfmdata.descriptions[private] = { name = name }
- resources.private = private + 1
- present[name] = private
- return character
+ local resources=tfmdata.resources
+ local private=resources.private
+ local character={ intermediate=true,ligatures={} }
+ resources.unicodes[name]=private
+ tfmdata.characters[private]=character
+ tfmdata.descriptions[private]={ name=name }
+ resources.private=private+1
+ present[name]=private
+ return character
end
-
local function make_1(present,tree,name)
- for k, v in next, tree do
- if k == "ligature" then
- present[name] = v
- else
- make_1(present,v,name .. "_" .. k)
- end
+ for k,v in next,tree do
+ if k=="ligature" then
+ present[name]=v
+ else
+ make_1(present,v,name.."_"..k)
end
+ end
end
-
local function make_2(present,tfmdata,characters,tree,name,preceding,unicode,done,lookupname)
- for k, v in next, tree do
- if k == "ligature" then
- local character = characters[preceding]
- if not character then
- if trace_baseinit then
- report_prepare("weird ligature in lookup %s: U+%05X (%s), preceding U+%05X (%s)",lookupname,v,utfchar(v),preceding,utfchar(preceding))
- end
- character = makefake(tfmdata,name,present)
- end
- local ligatures = character.ligatures
- if ligatures then
- ligatures[unicode] = { char = v }
- else
- character.ligatures = { [unicode] = { char = v } }
- end
- if done then
- local d = done[lookupname]
- if not d then
- done[lookupname] = { "dummy", v }
- else
- d[#d+1] = v
- end
- end
+ for k,v in next,tree do
+ if k=="ligature" then
+ local character=characters[preceding]
+ if not character then
+ if trace_baseinit then
+ report_prepare("weird ligature in lookup %a, current %C, preceding %C",lookupname,v,preceding)
+ end
+ character=makefake(tfmdata,name,present)
+ end
+ local ligatures=character.ligatures
+ if ligatures then
+ ligatures[unicode]={ char=v }
+ else
+ character.ligatures={ [unicode]={ char=v } }
+ end
+ if done then
+ local d=done[lookupname]
+ if not d then
+ done[lookupname]={ "dummy",v }
else
- local code = present[name] or unicode
- local name = name .. "_" .. k
- make_2(present,tfmdata,characters,v,name,code,k,done,lookupname)
+ d[#d+1]=v
end
+ end
+ else
+ local code=present[name] or unicode
+ local name=name.."_"..k
+ make_2(present,tfmdata,characters,v,name,code,k,done,lookupname)
end
+ end
end
-
local function preparesubstitutions(tfmdata,feature,value,validlookups,lookuplist)
- local characters = tfmdata.characters
- local descriptions = tfmdata.descriptions
- local resources = tfmdata.resources
- local changed = tfmdata.changed
- local lookuphash = resources.lookuphash
- local lookuptypes = resources.lookuptypes
-
- local ligatures = { }
- local alternate = tonumber(value)
- local defaultalt = otf.defaultbasealternate
-
- local trace_singles = trace_baseinit and trace_singles
- local trace_alternatives = trace_baseinit and trace_alternatives
- local trace_ligatures = trace_baseinit and trace_ligatures
-
- for l=1,#lookuplist do
- local lookupname = lookuplist[l]
- local lookupdata = lookuphash[lookupname]
- local lookuptype = lookuptypes[lookupname]
- for unicode, data in next, lookupdata do
- if lookuptype == "substitution" then
- if trace_singles then
- report_substitution(feature,lookupname,descriptions,unicode,data)
- end
- changed[unicode] = data
- elseif lookuptype == "alternate" then
- local replacement = data[alternate]
- if replacement then
- changed[unicode] = replacement
- if trace_alternatives then
- report_alternate(feature,lookupname,descriptions,unicode,replacement,value,"normal")
- end
- elseif defaultalt == "first" then
- replacement = data[1]
- changed[unicode] = replacement
- if trace_alternatives then
- report_alternate(feature,lookupname,descriptions,unicode,replacement,value,defaultalt)
- end
- elseif defaultalt == "last" then
- replacement = data[#data]
- if trace_alternatives then
- report_alternate(feature,lookupname,descriptions,unicode,replacement,value,defaultalt)
- end
- else
- if trace_alternatives then
- report_alternate(feature,lookupname,descriptions,unicode,replacement,value,"unknown")
- end
- end
- elseif lookuptype == "ligature" then
- ligatures[#ligatures+1] = { unicode, data, lookupname }
- if trace_ligatures then
- report_ligature(feature,lookupname,descriptions,unicode,data)
- end
- end
- end
- end
-
- local nofligatures = #ligatures
-
- if nofligatures > 0 then
-
- local characters = tfmdata.characters
- local present = { }
- local done = trace_baseinit and trace_ligatures and { }
-
- for i=1,nofligatures do
- local ligature = ligatures[i]
- local unicode, tree = ligature[1], ligature[2]
- make_1(present,tree,"ctx_"..unicode)
- end
-
- for i=1,nofligatures do
- local ligature = ligatures[i]
- local unicode, tree, lookupname = ligature[1], ligature[2], ligature[3]
- make_2(present,tfmdata,characters,tree,"ctx_"..unicode,unicode,unicode,done,lookupname)
- end
-
- end
-
+ local characters=tfmdata.characters
+ local descriptions=tfmdata.descriptions
+ local resources=tfmdata.resources
+ local changed=tfmdata.changed
+ local lookuphash=resources.lookuphash
+ local lookuptypes=resources.lookuptypes
+ local ligatures={}
+ local alternate=tonumber(value)
+ local defaultalt=otf.defaultbasealternate
+ local trace_singles=trace_baseinit and trace_singles
+ local trace_alternatives=trace_baseinit and trace_alternatives
+ local trace_ligatures=trace_baseinit and trace_ligatures
+ for l=1,#lookuplist do
+ local lookupname=lookuplist[l]
+ local lookupdata=lookuphash[lookupname]
+ local lookuptype=lookuptypes[lookupname]
+ for unicode,data in next,lookupdata do
+ if lookuptype=="substitution" then
+ if trace_singles then
+ report_substitution(feature,lookupname,descriptions,unicode,data)
+ end
+ changed[unicode]=data
+ elseif lookuptype=="alternate" then
+ local replacement=data[alternate]
+ if replacement then
+ changed[unicode]=replacement
+ if trace_alternatives then
+ report_alternate(feature,lookupname,descriptions,unicode,replacement,value,"normal")
+ end
+ elseif defaultalt=="first" then
+ replacement=data[1]
+ changed[unicode]=replacement
+ if trace_alternatives then
+ report_alternate(feature,lookupname,descriptions,unicode,replacement,value,defaultalt)
+ end
+ elseif defaultalt=="last" then
+ replacement=data[#data]
+ if trace_alternatives then
+ report_alternate(feature,lookupname,descriptions,unicode,replacement,value,defaultalt)
+ end
+ else
+ if trace_alternatives then
+ report_alternate(feature,lookupname,descriptions,unicode,replacement,value,"unknown")
+ end
+ end
+ elseif lookuptype=="ligature" then
+ ligatures[#ligatures+1]={ unicode,data,lookupname }
+ if trace_ligatures then
+ report_ligature(feature,lookupname,descriptions,unicode,data)
+ end
+ end
+ end
+ end
+ local nofligatures=#ligatures
+ if nofligatures>0 then
+ local characters=tfmdata.characters
+ local present={}
+ local done=trace_baseinit and trace_ligatures and {}
+ for i=1,nofligatures do
+ local ligature=ligatures[i]
+ local unicode,tree=ligature[1],ligature[2]
+ make_1(present,tree,"ctx_"..unicode)
+ end
+ for i=1,nofligatures do
+ local ligature=ligatures[i]
+ local unicode,tree,lookupname=ligature[1],ligature[2],ligature[3]
+ make_2(present,tfmdata,characters,tree,"ctx_"..unicode,unicode,unicode,done,lookupname)
+ end
+ end
end
-
local function preparepositionings(tfmdata,feature,value,validlookups,lookuplist)
- local characters = tfmdata.characters
- local descriptions = tfmdata.descriptions
- local resources = tfmdata.resources
- local lookuphash = resources.lookuphash
- local traceindeed = trace_baseinit and trace_kerns
-
- -- check out this sharedkerns trickery
-
- for l=1,#lookuplist do
- local lookupname = lookuplist[l]
- local lookupdata = lookuphash[lookupname]
- for unicode, data in next, lookupdata do
- local character = characters[unicode]
- local kerns = character.kerns
- if not kerns then
- kerns = { }
- character.kerns = kerns
- end
- if traceindeed then
- for otherunicode, kern in next, data do
- if not kerns[otherunicode] and kern ~= 0 then
- kerns[otherunicode] = kern
- report_prepare("%s: base kern %s + %s => %s",cref(feature,lookup),
- gref(descriptions,unicode),gref(descriptions,otherunicode),kern)
- end
- end
- else
- for otherunicode, kern in next, data do
- if not kerns[otherunicode] and kern ~= 0 then
- kerns[otherunicode] = kern
- end
- end
- end
- end
- end
-
+ local characters=tfmdata.characters
+ local descriptions=tfmdata.descriptions
+ local resources=tfmdata.resources
+ local lookuphash=resources.lookuphash
+ local traceindeed=trace_baseinit and trace_kerns
+ for l=1,#lookuplist do
+ local lookupname=lookuplist[l]
+ local lookupdata=lookuphash[lookupname]
+ for unicode,data in next,lookupdata do
+ local character=characters[unicode]
+ local kerns=character.kerns
+ if not kerns then
+ kerns={}
+ character.kerns=kerns
+ end
+ if traceindeed then
+ for otherunicode,kern in next,data do
+ if not kerns[otherunicode] and kern~=0 then
+ kerns[otherunicode]=kern
+ report_kern(feature,lookup,descriptions,unicode,otherunicode,kern)
+ end
+ end
+ else
+ for otherunicode,kern in next,data do
+ if not kerns[otherunicode] and kern~=0 then
+ kerns[otherunicode]=kern
+ end
+ end
+ end
+ end
+ end
end
-
local function initializehashes(tfmdata)
- nodeinitializers.features(tfmdata)
+ nodeinitializers.features(tfmdata)
end
-
-basemethods.shared = {
- initializehashes = initializehashes,
- preparesubstitutions = preparesubstitutions,
- preparepositionings = preparepositionings,
+basemethods.shared={
+ initializehashes=initializehashes,
+ preparesubstitutions=preparesubstitutions,
+ preparepositionings=preparepositionings,
}
-
-basemethod = "independent"
-
+basemethod="independent"
local function featuresinitializer(tfmdata,value)
- if true then -- value then
- local t = trace_preparing and os.clock()
- local features = tfmdata.shared.features
- if features then
- applybasemethod("initializehashes",tfmdata)
- local collectlookups = otf.collectlookups
- local rawdata = tfmdata.shared.rawdata
- local properties = tfmdata.properties
- local script = properties.script
- local language = properties.language
- local basesubstitutions = rawdata.resources.features.gsub
- local basepositionings = rawdata.resources.features.gpos
- if basesubstitutions then
- for feature, data in next, basesubstitutions do
- local value = features[feature]
- if value then
- local validlookups, lookuplist = collectlookups(rawdata,feature,script,language)
- if validlookups then
- applybasemethod("preparesubstitutions",tfmdata,feature,value,validlookups,lookuplist)
- registerbasefeature(feature,value)
- end
- end
- end
- end
- if basepositions then
- for feature, data in next, basepositions do
- local value = features[feature]
- if value then
- local validlookups, lookuplist = collectlookups(rawdata,feature,script,language)
- if validlookups then
- applybasemethod("preparepositionings",tfmdata,feature,features[feature],validlookups,lookuplist)
- registerbasefeature(feature,value)
- end
- end
- end
- end
- registerbasehash(tfmdata)
- end
- if trace_preparing then
- report_prepare("preparation time is %0.3f seconds for %s",os.clock()-t,tfmdata.properties.fullname or "?")
- end
- end
+ if true then
+ local t=trace_preparing and os.clock()
+ local features=tfmdata.shared.features
+ if features then
+ applybasemethod("initializehashes",tfmdata)
+ local collectlookups=otf.collectlookups
+ local rawdata=tfmdata.shared.rawdata
+ local properties=tfmdata.properties
+ local script=properties.script
+ local language=properties.language
+ local basesubstitutions=rawdata.resources.features.gsub
+ local basepositionings=rawdata.resources.features.gpos
+ if basesubstitutions then
+ for feature,data in next,basesubstitutions do
+ local value=features[feature]
+ if value then
+ local validlookups,lookuplist=collectlookups(rawdata,feature,script,language)
+ if validlookups then
+ applybasemethod("preparesubstitutions",tfmdata,feature,value,validlookups,lookuplist)
+ registerbasefeature(feature,value)
+ end
+ end
+ end
+ end
+ if basepositions then
+ for feature,data in next,basepositions do
+ local value=features[feature]
+ if value then
+ local validlookups,lookuplist=collectlookups(rawdata,feature,script,language)
+ if validlookups then
+ applybasemethod("preparepositionings",tfmdata,feature,features[feature],validlookups,lookuplist)
+ registerbasefeature(feature,value)
+ end
+ end
+ end
+ end
+ registerbasehash(tfmdata)
+ end
+ if trace_preparing then
+ report_prepare("preparation time is %0.3f seconds for %a",os.clock()-t,tfmdata.properties.fullname)
+ end
+ end
end
-
registerotffeature {
- name = "features",
- description = "features",
- default = true,
- initializers = {
- -- position = 1, -- after setscript (temp hack ... we need to force script / language to 1
- base = featuresinitializer,
- }
+ name="features",
+ description="features",
+ default=true,
+ initializers={
+ base=featuresinitializer,
+ }
}
-
--- independent : collect lookups independently (takes more runtime ... neglectable)
--- shared : shares lookups with node mode (takes more memory unless also a node mode variant is used ... noticeable)
-
-directives.register("fonts.otf.loader.basemethod", function(v)
- if basemethods[v] then
- basemethod = v
- end
+directives.register("fonts.otf.loader.basemethod",function(v)
+ if basemethods[v] then
+ basemethod=v
+ end
end)
end -- closure
do -- begin closure to overcome local limits and interference
-if not modules then modules = { } end modules ['node-inj'] = {
- version = 1.001,
- comment = "companion to node-ini.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
+if not modules then modules={} end modules ['node-inj']={
+ version=1.001,
+ comment="companion to node-ini.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files",
}
-
--- This is very experimental (this will change when we have luatex > .50 and
--- a few pending thingies are available. Also, Idris needs to make a few more
--- test fonts. Btw, future versions of luatex will have extended glyph properties
--- that can be of help.
-
-local next = next
-
-local trace_injections = false trackers.register("nodes.injections", function(v) trace_injections = v end)
-
-local report_injections = logs.reporter("nodes","injections")
-
-local attributes, nodes, node = attributes, nodes, node
-
-fonts = fonts
-local fontdata = fonts.hashes.identifiers
-
-nodes.injections = nodes.injections or { }
-local injections = nodes.injections
-
-local nodecodes = nodes.nodecodes
-local glyph_code = nodecodes.glyph
-local nodepool = nodes.pool
-local newkern = nodepool.kern
-
-local traverse_id = node.traverse_id
-local unset_attribute = node.unset_attribute
-local has_attribute = node.has_attribute
-local set_attribute = node.set_attribute
-local copy_node = node.copy
-local insert_node_before = node.insert_before
-local insert_node_after = node.insert_after
-
-local markbase = attributes.private('markbase')
-local markmark = attributes.private('markmark')
-local markdone = attributes.private('markdone')
-local cursbase = attributes.private('cursbase')
-local curscurs = attributes.private('curscurs')
-local cursdone = attributes.private('cursdone')
-local kernpair = attributes.private('kernpair')
-local ligacomp = attributes.private('ligacomp')
-local fontkern = attributes.private('fontkern')
-
-if context then
-
- local kern = nodes.pool.register(newkern())
-
- set_attribute(kern,fontkern,1) -- we can have several, attributes are shared
-
- newkern = function(k)
- local c = copy_node(kern)
- c.kern = k
- return c
- end
-
-end
-
--- This injector has been tested by Idris Samawi Hamid (several arabic fonts as well as
--- the rather demanding Husayni font), Khaled Hosny (latin and arabic) and Kaj Eigner
--- (arabic, hebrew and thai) and myself (whatever font I come across). I'm pretty sure
--- that this code is not 100% okay but examples are needed to figure things out.
-
-local cursives = { }
-local marks = { }
-local kerns = { }
-
--- Currently we do gpos/kern in a bit inofficial way but when we have the extra fields in
--- glyphnodes to manipulate ht/dp/wd explicitly I will provide an alternative; also, we
--- can share tables.
-
--- For the moment we pass the r2l key ... volt/arabtype tests .. idris: this needs
--- checking with husayni (volt and fontforge).
-
+local next=next
+local utfchar=utf.char
+local trace_injections=false trackers.register("nodes.injections",function(v) trace_injections=v end)
+local report_injections=logs.reporter("nodes","injections")
+local attributes,nodes,node=attributes,nodes,node
+fonts=fonts
+local fontdata=fonts.hashes.identifiers
+nodes.injections=nodes.injections or {}
+local injections=nodes.injections
+local nodecodes=nodes.nodecodes
+local glyph_code=nodecodes.glyph
+local nodepool=nodes.pool
+local newkern=nodepool.kern
+local traverse_id=node.traverse_id
+local insert_node_before=node.insert_before
+local insert_node_after=node.insert_after
+local a_kernpair=attributes.private('kernpair')
+local a_ligacomp=attributes.private('ligacomp')
+local a_markbase=attributes.private('markbase')
+local a_markmark=attributes.private('markmark')
+local a_markdone=attributes.private('markdone')
+local a_cursbase=attributes.private('cursbase')
+local a_curscurs=attributes.private('curscurs')
+local a_cursdone=attributes.private('cursdone')
+function injections.installnewkern(nk)
+ newkern=nk or newkern
+end
+local cursives={}
+local marks={}
+local kerns={}
function injections.setcursive(start,nxt,factor,rlmode,exit,entry,tfmstart,tfmnext)
- local dx, dy = factor*(exit[1]-entry[1]), factor*(exit[2]-entry[2])
- local ws, wn = tfmstart.width, tfmnext.width
- local bound = #cursives + 1
- set_attribute(start,cursbase,bound)
- set_attribute(nxt,curscurs,bound)
- cursives[bound] = { rlmode, dx, dy, ws, wn }
- return dx, dy, bound
+ local dx,dy=factor*(exit[1]-entry[1]),factor*(exit[2]-entry[2])
+ local ws,wn=tfmstart.width,tfmnext.width
+ local bound=#cursives+1
+ start[a_cursbase]=bound
+ nxt[a_curscurs]=bound
+ cursives[bound]={ rlmode,dx,dy,ws,wn }
+ return dx,dy,bound
end
-
function injections.setpair(current,factor,rlmode,r2lflag,spec,tfmchr)
- local x, y, w, h = factor*spec[1], factor*spec[2], factor*spec[3], factor*spec[4]
- -- dy = y - h
- if x ~= 0 or w ~= 0 or y ~= 0 or h ~= 0 then
- local bound = has_attribute(current,kernpair)
- if bound then
- local kb = kerns[bound]
- -- inefficient but singles have less, but weird anyway, needs checking
- kb[2], kb[3], kb[4], kb[5] = (kb[2] or 0) + x, (kb[3] or 0) + y, (kb[4] or 0)+ w, (kb[5] or 0) + h
- else
- bound = #kerns + 1
- set_attribute(current,kernpair,bound)
- kerns[bound] = { rlmode, x, y, w, h, r2lflag, tfmchr.width }
- end
- return x, y, w, h, bound
+ local x,y,w,h=factor*spec[1],factor*spec[2],factor*spec[3],factor*spec[4]
+ if x~=0 or w~=0 or y~=0 or h~=0 then
+ local bound=current[a_kernpair]
+ if bound then
+ local kb=kerns[bound]
+ kb[2],kb[3],kb[4],kb[5]=(kb[2] or 0)+x,(kb[3] or 0)+y,(kb[4] or 0)+w,(kb[5] or 0)+h
+ else
+ bound=#kerns+1
+ current[a_kernpair]=bound
+ kerns[bound]={ rlmode,x,y,w,h,r2lflag,tfmchr.width }
end
- return x, y, w, h -- no bound
+ return x,y,w,h,bound
+ end
+ return x,y,w,h
end
-
function injections.setkern(current,factor,rlmode,x,tfmchr)
- local dx = factor*x
- if dx ~= 0 then
- local bound = #kerns + 1
- set_attribute(current,kernpair,bound)
- kerns[bound] = { rlmode, dx }
- return dx, bound
+ local dx=factor*x
+ if dx~=0 then
+ local bound=#kerns+1
+ current[a_kernpair]=bound
+ kerns[bound]={ rlmode,dx }
+ return dx,bound
+ else
+ return 0,0
+ end
+end
+function injections.setmark(start,base,factor,rlmode,ba,ma,index)
+ local dx,dy=factor*(ba[1]-ma[1]),factor*(ba[2]-ma[2])
+ local bound=base[a_markbase]
+ local index=1
+ if bound then
+ local mb=marks[bound]
+ if mb then
+ index=#mb+1
+ mb[index]={ dx,dy,rlmode }
+ start[a_markmark]=bound
+ start[a_markdone]=index
+ return dx,dy,bound
else
- return 0, 0
+ report_injections("possible problem, %U is base mark without data (id %a)",base.char,bound)
end
+ end
+ index=index or 1
+ bound=#marks+1
+ base[a_markbase]=bound
+ start[a_markmark]=bound
+ start[a_markdone]=index
+ marks[bound]={ [index]={ dx,dy,rlmode } }
+ return dx,dy,bound
end
-
-function injections.setmark(start,base,factor,rlmode,ba,ma,index) -- ba=baseanchor, ma=markanchor
- local dx, dy = factor*(ba[1]-ma[1]), factor*(ba[2]-ma[2]) -- the index argument is no longer used but when this
- local bound = has_attribute(base,markbase) -- fails again we should pass it
-local index = 1
- if bound then
- local mb = marks[bound]
+local function dir(n)
+ return (n and n<0 and "r-to-l") or (n and n>0 and "l-to-r") or "unset"
+end
+local function trace(head)
+ report_injections("begin run")
+ for n in traverse_id(glyph_code,head) do
+ if n.subtype<256 then
+ local kp=n[a_kernpair]
+ local mb=n[a_markbase]
+ local mm=n[a_markmark]
+ local md=n[a_markdone]
+ local cb=n[a_cursbase]
+ local cc=n[a_curscurs]
+ local char=n.char
+ report_injections("font %s, char %U, glyph %c",char,n.font,char)
+ if kp then
+ local k=kerns[kp]
+ if k[3] then
+ report_injections(" pairkern: dir %a, x %p, y %p, w %p, h %p",dir(k[1]),k[2],k[3],k[4],k[5])
+ else
+ report_injections(" kern: dir %a, dx %p",dir(k[1]),k[2])
+ end
+ end
+ if mb then
+ report_injections(" markbase: bound %a",mb)
+ end
+ if mm then
+ local m=marks[mm]
if mb then
- -- if not index then index = #mb + 1 end
-index = #mb + 1
- mb[index] = { dx, dy, rlmode }
- set_attribute(start,markmark,bound)
- set_attribute(start,markdone,index)
- return dx, dy, bound
+ local m=m[mb]
+ if m then
+ report_injections(" markmark: bound %a, index %a, dx %p, dy %p",mm,md,m[1],m[2])
+ else
+ report_injections(" markmark: bound %a, missing index",mm)
+ end
else
- report_injections("possible problem, U+%05X is base mark without data (id: %s)",base.char,bound)
+ m=m[1]
+ report_injections(" markmark: bound %a, dx %p, dy %p",mm,m and m[1],m and m[2])
end
+ end
+ if cb then
+ report_injections(" cursbase: bound %a",cb)
+ end
+ if cc then
+ local c=cursives[cc]
+ report_injections(" curscurs: bound %a, dir %a, dx %p, dy %p",cc,dir(c[1]),c[2],c[3])
+ end
end
--- index = index or 1
- index = index or 1
- bound = #marks + 1
- set_attribute(base,markbase,bound)
- set_attribute(start,markmark,bound)
- set_attribute(start,markdone,index)
- marks[bound] = { [index] = { dx, dy, rlmode } }
- return dx, dy, bound
-end
-
-local function dir(n)
- return (n and n<0 and "r-to-l") or (n and n>0 and "l-to-r") or "unset"
+ end
+ report_injections("end run")
end
-
-local function trace(head)
- report_injections("begin run")
- for n in traverse_id(glyph_code,head) do
- if n.subtype < 256 then
- local kp = has_attribute(n,kernpair)
- local mb = has_attribute(n,markbase)
- local mm = has_attribute(n,markmark)
- local md = has_attribute(n,markdone)
- local cb = has_attribute(n,cursbase)
- local cc = has_attribute(n,curscurs)
- report_injections("char U+%05X, font=%s",n.char,n.font)
- if kp then
- local k = kerns[kp]
- if k[3] then
- report_injections(" pairkern: dir=%s, x=%s, y=%s, w=%s, h=%s",dir(k[1]),k[2] or "?",k[3] or "?",k[4] or "?",k[5] or "?")
+function injections.handler(head,where,keep)
+ local has_marks,has_cursives,has_kerns=next(marks),next(cursives),next(kerns)
+ if has_marks or has_cursives then
+ if trace_injections then
+ trace(head)
+ end
+ local done,ky,rl,valid,cx,wx,mk,nofvalid=false,{},{},{},{},{},{},0
+ if has_kerns then
+ local nf,tm=nil,nil
+ for n in traverse_id(glyph_code,head) do
+ if n.subtype<256 then
+ nofvalid=nofvalid+1
+ valid[nofvalid]=n
+ if n.font~=nf then
+ nf=n.font
+ tm=fontdata[nf].resources.marks
+ end
+ if tm then
+ mk[n]=tm[n.char]
+ end
+ local k=n[a_kernpair]
+ if k then
+ local kk=kerns[k]
+ if kk then
+ local x,y,w,h=kk[2] or 0,kk[3] or 0,kk[4] or 0,kk[5] or 0
+ local dy=y-h
+ if dy~=0 then
+ ky[n]=dy
+ end
+ if w~=0 or x~=0 then
+ wx[n]=kk
+ end
+ rl[n]=kk[1]
+ end
+ end
+ end
+ end
+ else
+ local nf,tm=nil,nil
+ for n in traverse_id(glyph_code,head) do
+ if n.subtype<256 then
+ nofvalid=nofvalid+1
+ valid[nofvalid]=n
+ if n.font~=nf then
+ nf=n.font
+ tm=fontdata[nf].resources.marks
+ end
+ if tm then
+ mk[n]=tm[n.char]
+ end
+ end
+ end
+ end
+ if nofvalid>0 then
+ local cx={}
+ if has_kerns and next(ky) then
+ for n,k in next,ky do
+ n.yoffset=k
+ end
+ end
+ if has_cursives then
+ local p_cursbase,p=nil,nil
+ local t,d,maxt={},{},0
+ for i=1,nofvalid do
+ local n=valid[i]
+ if not mk[n] then
+ local n_cursbase=n[a_cursbase]
+ if p_cursbase then
+ local n_curscurs=n[a_curscurs]
+ if p_cursbase==n_curscurs then
+ local c=cursives[n_curscurs]
+ if c then
+ local rlmode,dx,dy,ws,wn=c[1],c[2],c[3],c[4],c[5]
+ if rlmode>=0 then
+ dx=dx-ws
+ else
+ dx=dx+wn
+ end
+ if dx~=0 then
+ cx[n]=dx
+ rl[n]=rlmode
+ end
+ dy=-dy
+ maxt=maxt+1
+ t[maxt]=p
+ d[maxt]=dy
else
- report_injections(" kern: dir=%s, dx=%s",dir(k[1]),k[2] or "?")
- end
- end
- if mb then
- report_injections(" markbase: bound=%s",mb)
- end
- if mm then
- local m = marks[mm]
- if mb then
- local m = m[mb]
- if m then
- report_injections(" markmark: bound=%s, index=%s, dx=%s, dy=%s",mm,md or "?",m[1] or "?",m[2] or "?")
+ maxt=0
+ end
+ end
+ elseif maxt>0 then
+ local ny=n.yoffset
+ for i=maxt,1,-1 do
+ ny=ny+d[i]
+ local ti=t[i]
+ ti.yoffset=ti.yoffset+ny
+ end
+ maxt=0
+ end
+ if not n_cursbase and maxt>0 then
+ local ny=n.yoffset
+ for i=maxt,1,-1 do
+ ny=ny+d[i]
+ local ti=t[i]
+ ti.yoffset=ny
+ end
+ maxt=0
+ end
+ p_cursbase,p=n_cursbase,n
+ end
+ end
+ if maxt>0 then
+ local ny=n.yoffset
+ for i=maxt,1,-1 do
+ ny=ny+d[i]
+ local ti=t[i]
+ ti.yoffset=ny
+ end
+ maxt=0
+ end
+ if not keep then
+ cursives={}
+ end
+ end
+ if has_marks then
+ for i=1,nofvalid do
+ local p=valid[i]
+ local p_markbase=p[a_markbase]
+ if p_markbase then
+ local mrks=marks[p_markbase]
+ local nofmarks=#mrks
+ for n in traverse_id(glyph_code,p.next) do
+ local n_markmark=n[a_markmark]
+ if p_markbase==n_markmark then
+ local index=n[a_markdone] or 1
+ local d=mrks[index]
+ if d then
+ local rlmode=d[3]
+ if rlmode and rlmode>=0 then
+ local k=wx[p]
+ if k then
+ n.xoffset=p.xoffset-p.width+d[1]-k[2]
else
- report_injections(" markmark: bound=%s, missing index",mm)
+ n.xoffset=p.xoffset-p.width+d[1]
end
- else
- m = m[1]
- report_injections(" markmark: bound=%s, dx=%s, dy=%s",mm,m and m[1] or "?",m and m[2] or "?")
+ else
+ local k=wx[p]
+ if k then
+ n.xoffset=p.xoffset-d[1]-k[2]
+ else
+ n.xoffset=p.xoffset-d[1]
+ end
+ end
+ if mk[p] then
+ n.yoffset=p.yoffset+d[2]
+ else
+ n.yoffset=n.yoffset+p.yoffset+d[2]
+ end
+ if nofmarks==1 then
+ break
+ else
+ nofmarks=nofmarks-1
+ end
end
+ else
+ end
end
- if cb then
- report_injections(" cursbase: bound=%s",cb)
+ end
+ end
+ if not keep then
+ marks={}
+ end
+ end
+ if next(wx) then
+ for n,k in next,wx do
+ local x,w=k[2] or 0,k[4]
+ if w then
+ local rl=k[1]
+ local wx=w-x
+ if rl<0 then
+ if wx~=0 then
+ insert_node_before(head,n,newkern(wx))
+ end
+ if x~=0 then
+ insert_node_after (head,n,newkern(x))
+ end
+ else
+ if x~=0 then
+ insert_node_before(head,n,newkern(x))
+ end
+ if wx~=0 then
+ insert_node_after(head,n,newkern(wx))
+ end
+ end
+ elseif x~=0 then
+ insert_node_before(head,n,newkern(x))
+ end
+ end
+ end
+ if next(cx) then
+ for n,k in next,cx do
+ if k~=0 then
+ local rln=rl[n]
+ if rln and rln<0 then
+ insert_node_before(head,n,newkern(-k))
+ else
+ insert_node_before(head,n,newkern(k))
end
- if cc then
- local c = cursives[cc]
- report_injections(" curscurs: bound=%s, dir=%s, dx=%s, dy=%s",cc,dir(c[1]),c[2] or "?",c[3] or "?")
+ end
+ end
+ end
+ if not keep then
+ kerns={}
+ end
+ return head,true
+ elseif not keep then
+ kerns,cursives,marks={},{},{}
+ end
+ elseif has_kerns then
+ if trace_injections then
+ trace(head)
+ end
+ for n in traverse_id(glyph_code,head) do
+ if n.subtype<256 then
+ local k=n[a_kernpair]
+ if k then
+ local kk=kerns[k]
+ if kk then
+ local rl,x,y,w=kk[1],kk[2] or 0,kk[3],kk[4]
+ if y and y~=0 then
+ n.yoffset=y
+ end
+ if w then
+ local wx=w-x
+ if rl<0 then
+ if wx~=0 then
+ insert_node_before(head,n,newkern(wx))
+ end
+ if x~=0 then
+ insert_node_after (head,n,newkern(x))
+ end
+ else
+ if x~=0 then
+ insert_node_before(head,n,newkern(x))
+ end
+ if wx~=0 then
+ insert_node_after(head,n,newkern(wx))
+ end
+ end
+ else
+ if x~=0 then
+ insert_node_before(head,n,newkern(x))
+ end
end
+ end
end
+ end
end
- report_injections("end run")
+ if not keep then
+ kerns={}
+ end
+ return head,true
+ else
+ end
+ return head,false
end
--- todo: reuse tables (i.e. no collection), but will be extra fields anyway
--- todo: check for attribute
+end -- closure
--- We can have a fast test on a font being processed, so we can check faster for marks etc
--- but I'll make a context variant anyway.
+do -- begin closure to overcome local limits and interference
-function injections.handler(head,where,keep)
- local has_marks, has_cursives, has_kerns = next(marks), next(cursives), next(kerns)
- if has_marks or has_cursives then
- if trace_injections then
- trace(head)
- end
- -- in the future variant we will not copy items but refs to tables
- local done, ky, rl, valid, cx, wx, mk, nofvalid = false, { }, { }, { }, { }, { }, { }, 0
- if has_kerns then -- move outside loop
- local nf, tm = nil, nil
- for n in traverse_id(glyph_code,head) do -- only needed for relevant fonts
- if n.subtype < 256 then
- nofvalid = nofvalid + 1
- valid[nofvalid] = n
- if n.font ~= nf then
- nf = n.font
- tm = fontdata[nf].resources.marks
- end
- if tm then
- mk[n] = tm[n.char]
- end
- local k = has_attribute(n,kernpair)
- if k then
- local kk = kerns[k]
- if kk then
- local x, y, w, h = kk[2] or 0, kk[3] or 0, kk[4] or 0, kk[5] or 0
- local dy = y - h
- if dy ~= 0 then
- ky[n] = dy
- end
- if w ~= 0 or x ~= 0 then
- wx[n] = kk
- end
- rl[n] = kk[1] -- could move in test
- end
- end
- end
- end
+if not modules then modules={} end modules ['font-ota']={
+ version=1.001,
+ comment="companion to font-otf.lua (analysing)",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+local type=type
+if not trackers then trackers={ register=function() end } end
+local fonts,nodes,node=fonts,nodes,node
+local allocate=utilities.storage.allocate
+local otf=fonts.handlers.otf
+local analyzers=fonts.analyzers
+local initializers=allocate()
+local methods=allocate()
+analyzers.initializers=initializers
+analyzers.methods=methods
+analyzers.useunicodemarks=false
+local a_state=attributes.private('state')
+local nodecodes=nodes.nodecodes
+local glyph_code=nodecodes.glyph
+local math_code=nodecodes.math
+local traverse_id=node.traverse_id
+local traverse_node_list=node.traverse
+local end_of_math=node.end_of_math
+local fontdata=fonts.hashes.identifiers
+local categories=characters and characters.categories or {}
+local otffeatures=fonts.constructors.newfeatures("otf")
+local registerotffeature=otffeatures.register
+local s_init=1 local s_rphf=7
+local s_medi=2 local s_half=8
+local s_fina=3 local s_pref=9
+local s_isol=4 local s_blwf=10
+local s_mark=5 local s_pstf=11
+local s_rest=6
+local states={
+ init=s_init,
+ medi=s_medi,
+ fina=s_fina,
+ isol=s_isol,
+ mark=s_mark,
+ rest=s_rest,
+ rphf=s_rphf,
+ half=s_half,
+ pref=s_pref,
+ blwf=s_blwf,
+ pstf=s_pstf,
+}
+local features={
+ init=s_init,
+ medi=s_medi,
+ fina=s_fina,
+ isol=s_isol,
+}
+analyzers.states=states
+analyzers.features=features
+function analyzers.setstate(head,font)
+ local useunicodemarks=analyzers.useunicodemarks
+ local tfmdata=fontdata[font]
+ local descriptions=tfmdata.descriptions
+ local first,last,current,n,done=nil,nil,head,0,false
+ while current do
+ local id=current.id
+ if id==glyph_code and current.font==font then
+ done=true
+ local char=current.char
+ local d=descriptions[char]
+ if d then
+ if d.class=="mark" or (useunicodemarks and categories[char]=="mn") then
+ done=true
+ current[a_state]=s_mark
+ elseif n==0 then
+ first,last,n=current,current,1
+ current[a_state]=s_init
else
- local nf, tm = nil, nil
- for n in traverse_id(glyph_code,head) do
- if n.subtype < 256 then
- nofvalid = nofvalid + 1
- valid[nofvalid] = n
- if n.font ~= nf then
- nf = n.font
- tm = fontdata[nf].resources.marks
- end
- if tm then
- mk[n] = tm[n.char]
- end
- end
- end
- end
- if nofvalid > 0 then
- -- we can assume done == true because we have cursives and marks
- local cx = { }
- if has_kerns and next(ky) then
- for n, k in next, ky do
- n.yoffset = k
- end
- end
- -- todo: reuse t and use maxt
- if has_cursives then
- local p_cursbase, p = nil, nil
- -- since we need valid[n+1] we can also use a "while true do"
- local t, d, maxt = { }, { }, 0
- for i=1,nofvalid do -- valid == glyphs
- local n = valid[i]
- if not mk[n] then
- local n_cursbase = has_attribute(n,cursbase)
- if p_cursbase then
- local n_curscurs = has_attribute(n,curscurs)
- if p_cursbase == n_curscurs then
- local c = cursives[n_curscurs]
- if c then
- local rlmode, dx, dy, ws, wn = c[1], c[2], c[3], c[4], c[5]
- if rlmode >= 0 then
- dx = dx - ws
- else
- dx = dx + wn
- end
- if dx ~= 0 then
- cx[n] = dx
- rl[n] = rlmode
- end
- -- if rlmode and rlmode < 0 then
- dy = -dy
- -- end
- maxt = maxt + 1
- t[maxt] = p
- d[maxt] = dy
- else
- maxt = 0
- end
- end
- elseif maxt > 0 then
- local ny = n.yoffset
- for i=maxt,1,-1 do
- ny = ny + d[i]
- local ti = t[i]
- ti.yoffset = ti.yoffset + ny
- end
- maxt = 0
- end
- if not n_cursbase and maxt > 0 then
- local ny = n.yoffset
- for i=maxt,1,-1 do
- ny = ny + d[i]
- local ti = t[i]
- ti.yoffset = ny
- end
- maxt = 0
- end
- p_cursbase, p = n_cursbase, n
- end
- end
- if maxt > 0 then
- local ny = n.yoffset
- for i=maxt,1,-1 do
- ny = ny + d[i]
- local ti = t[i]
- ti.yoffset = ny
- end
- maxt = 0
- end
- if not keep then
- cursives = { }
- end
- end
- if has_marks then
- for i=1,nofvalid do
- local p = valid[i]
- local p_markbase = has_attribute(p,markbase)
- if p_markbase then
- local mrks = marks[p_markbase]
- local nofmarks = #mrks
- for n in traverse_id(glyph_code,p.next) do
- local n_markmark = has_attribute(n,markmark)
- if p_markbase == n_markmark then
- local index = has_attribute(n,markdone) or 1
- local d = mrks[index]
- if d then
- local rlmode = d[3]
- if rlmode and rlmode >= 0 then
- -- new per 2010-10-06, width adapted per 2010-02-03
- -- we used to negate the width of marks because in tfm
- -- that makes sense but we no longer do that so as a
- -- consequence the sign of p.width was changed (we need
- -- to keep an eye on it as we don't have that many fonts
- -- that enter this branch .. I'm still not sure if this
- -- one is right
- local k = wx[p]
- if k then
- n.xoffset = p.xoffset + p.width + d[1] - k[2]
- else
- -- n.xoffset = p.xoffset + p.width + d[1]
- -- lucida U\char"032F (default+mark)
- n.xoffset = p.xoffset - p.width + d[1] -- 01-05-2011
- end
- else
- local k = wx[p]
- if k then
- n.xoffset = p.xoffset - d[1] - k[2]
- else
- n.xoffset = p.xoffset - d[1]
- end
- end
- if mk[p] then
- n.yoffset = p.yoffset + d[2]
- else
- n.yoffset = n.yoffset + p.yoffset + d[2]
- end
- if nofmarks == 1 then
- break
- else
- nofmarks = nofmarks - 1
- end
- end
- else
- -- KE: there can be <mark> <mkmk> <mark> sequences in ligatures
- end
- end
- end
- end
- if not keep then
- marks = { }
- end
- end
- -- todo : combine
- if next(wx) then
- for n, k in next, wx do
- -- only w can be nil (kernclasses), can be sped up when w == nil
- local x, w = k[2] or 0, k[4]
- if w then
- local rl = k[1] -- r2l = k[6]
- local wx = w - x
- if rl < 0 then -- KE: don't use r2l here
- if wx ~= 0 then
- insert_node_before(head,n,newkern(wx))
- end
- if x ~= 0 then
- insert_node_after (head,n,newkern(x))
- end
- else
- if x ~= 0 then
- insert_node_before(head,n,newkern(x))
- end
- if wx ~= 0 then
- insert_node_after(head,n,newkern(wx))
- end
- end
- elseif x ~= 0 then
- -- this needs checking for rl < 0 but it is unlikely that a r2l script
- -- uses kernclasses between glyphs so we're probably safe (KE has a
- -- problematic font where marks interfere with rl < 0 in the previous
- -- case)
- insert_node_before(head,n,newkern(x))
- end
- end
- end
- if next(cx) then
- for n, k in next, cx do
- if k ~= 0 then
- local rln = rl[n]
- if rln and rln < 0 then
- insert_node_before(head,n,newkern(-k))
- else
- insert_node_before(head,n,newkern(k))
- end
- end
- end
- end
- if not keep then
- kerns = { }
- end
- return head, true
- elseif not keep then
- kerns, cursives, marks = { }, { }, { }
- end
- elseif has_kerns then
- if trace_injections then
- trace(head)
- end
- for n in traverse_id(glyph_code,head) do
- if n.subtype < 256 then
- local k = has_attribute(n,kernpair)
- if k then
- local kk = kerns[k]
- if kk then
- local rl, x, y, w = kk[1], kk[2] or 0, kk[3], kk[4]
- if y and y ~= 0 then
- n.yoffset = y -- todo: h ?
- end
- if w then
- -- copied from above
- -- local r2l = kk[6]
- local wx = w - x
- if rl < 0 then -- KE: don't use r2l here
- if wx ~= 0 then
- insert_node_before(head,n,newkern(wx))
- end
- if x ~= 0 then
- insert_node_after (head,n,newkern(x))
- end
- else
- if x ~= 0 then
- insert_node_before(head,n,newkern(x))
- end
- if wx ~= 0 then
- insert_node_after(head,n,newkern(wx))
- end
- end
- else
- -- simple (e.g. kernclass kerns)
- if x ~= 0 then
- insert_node_before(head,n,newkern(x))
- end
- end
- end
- end
- end
- end
- if not keep then
- kerns = { }
- end
- return head, true
- else
- -- no tracing needed
+ last,n=current,n+1
+ current[a_state]=s_medi
+ end
+ else
+ if first and first==last then
+ last[a_state]=s_isol
+ elseif last then
+ last[a_state]=s_fina
+ end
+ first,last,n=nil,nil,0
+ end
+ elseif id==disc_code then
+ current[a_state]=s_midi
+ last=current
+ else
+ if first and first==last then
+ last[a_state]=s_isol
+ elseif last then
+ last[a_state]=s_fina
+ end
+ first,last,n=nil,nil,0
+ if id==math_code then
+ current=end_of_math(current)
+ end
+ end
+ current=current.next
+ end
+ if first and first==last then
+ last[a_state]=s_isol
+ elseif last then
+ last[a_state]=s_fina
+ end
+ return head,done
+end
+local function analyzeinitializer(tfmdata,value)
+ local script,language=otf.scriptandlanguage(tfmdata)
+ local action=initializers[script]
+ if not action then
+ elseif type(action)=="function" then
+ return action(tfmdata,value)
+ else
+ local action=action[language]
+ if action then
+ return action(tfmdata,value)
+ end
+ end
+end
+local function analyzeprocessor(head,font,attr)
+ local tfmdata=fontdata[font]
+ local script,language=otf.scriptandlanguage(tfmdata,attr)
+ local action=methods[script]
+ if not action then
+ elseif type(action)=="function" then
+ return action(head,font,attr)
+ else
+ action=action[language]
+ if action then
+ return action(head,font,attr)
end
- return head, false
+ end
+ return head,false
+end
+registerotffeature {
+ name="analyze",
+ description="analysis of (for instance) character classes",
+ default=true,
+ initializers={
+ node=analyzeinitializer,
+ },
+ processors={
+ position=1,
+ node=analyzeprocessor,
+ }
+}
+methods.latn=analyzers.setstate
+local tatweel=0x0640
+local zwnj=0x200C
+local zwj=0x200D
+local isolated={
+ [0x0600]=true,[0x0601]=true,[0x0602]=true,[0x0603]=true,
+ [0x0604]=true,
+ [0x0608]=true,[0x060B]=true,[0x0621]=true,[0x0674]=true,
+ [0x06DD]=true,
+ [0x0856]=true,[0x0858]=true,[0x0857]=true,
+ [0x07FA]=true,
+ [zwnj]=true,
+}
+local final={
+ [0x0622]=true,[0x0623]=true,[0x0624]=true,[0x0625]=true,
+ [0x0627]=true,[0x0629]=true,[0x062F]=true,[0x0630]=true,
+ [0x0631]=true,[0x0632]=true,[0x0648]=true,[0x0671]=true,
+ [0x0672]=true,[0x0673]=true,[0x0675]=true,[0x0676]=true,
+ [0x0677]=true,[0x0688]=true,[0x0689]=true,[0x068A]=true,
+ [0x068B]=true,[0x068C]=true,[0x068D]=true,[0x068E]=true,
+ [0x068F]=true,[0x0690]=true,[0x0691]=true,[0x0692]=true,
+ [0x0693]=true,[0x0694]=true,[0x0695]=true,[0x0696]=true,
+ [0x0697]=true,[0x0698]=true,[0x0699]=true,[0x06C0]=true,
+ [0x06C3]=true,[0x06C4]=true,[0x06C5]=true,[0x06C6]=true,
+ [0x06C7]=true,[0x06C8]=true,[0x06C9]=true,[0x06CA]=true,
+ [0x06CB]=true,[0x06CD]=true,[0x06CF]=true,[0x06D2]=true,
+ [0x06D3]=true,[0x06D5]=true,[0x06EE]=true,[0x06EF]=true,
+ [0x0759]=true,[0x075A]=true,[0x075B]=true,[0x076B]=true,
+ [0x076C]=true,[0x0771]=true,[0x0773]=true,[0x0774]=true,
+ [0x0778]=true,[0x0779]=true,
+ [0x08AA]=true,[0x08AB]=true,[0x08AC]=true,
+ [0xFEF5]=true,[0xFEF7]=true,[0xFEF9]=true,[0xFEFB]=true,
+ [0x0710]=true,[0x0715]=true,[0x0716]=true,[0x0717]=true,
+ [0x0718]=true,[0x0719]=true,[0x0728]=true,[0x072A]=true,
+ [0x072C]=true,[0x071E]=true,
+ [0x072F]=true,[0x074D]=true,
+ [0x0840]=true,[0x0849]=true,[0x0854]=true,[0x0846]=true,
+ [0x084F]=true
+}
+local medial={
+ [0x0626]=true,[0x0628]=true,[0x062A]=true,[0x062B]=true,
+ [0x062C]=true,[0x062D]=true,[0x062E]=true,[0x0633]=true,
+ [0x0634]=true,[0x0635]=true,[0x0636]=true,[0x0637]=true,
+ [0x0638]=true,[0x0639]=true,[0x063A]=true,[0x063B]=true,
+ [0x063C]=true,[0x063D]=true,[0x063E]=true,[0x063F]=true,
+ [0x0641]=true,[0x0642]=true,[0x0643]=true,
+ [0x0644]=true,[0x0645]=true,[0x0646]=true,[0x0647]=true,
+ [0x0649]=true,[0x064A]=true,[0x066E]=true,[0x066F]=true,
+ [0x0678]=true,[0x0679]=true,[0x067A]=true,[0x067B]=true,
+ [0x067C]=true,[0x067D]=true,[0x067E]=true,[0x067F]=true,
+ [0x0680]=true,[0x0681]=true,[0x0682]=true,[0x0683]=true,
+ [0x0684]=true,[0x0685]=true,[0x0686]=true,[0x0687]=true,
+ [0x069A]=true,[0x069B]=true,[0x069C]=true,[0x069D]=true,
+ [0x069E]=true,[0x069F]=true,[0x06A0]=true,[0x06A1]=true,
+ [0x06A2]=true,[0x06A3]=true,[0x06A4]=true,[0x06A5]=true,
+ [0x06A6]=true,[0x06A7]=true,[0x06A8]=true,[0x06A9]=true,
+ [0x06AA]=true,[0x06AB]=true,[0x06AC]=true,[0x06AD]=true,
+ [0x06AE]=true,[0x06AF]=true,[0x06B0]=true,[0x06B1]=true,
+ [0x06B2]=true,[0x06B3]=true,[0x06B4]=true,[0x06B5]=true,
+ [0x06B6]=true,[0x06B7]=true,[0x06B8]=true,[0x06B9]=true,
+ [0x06BA]=true,[0x06BB]=true,[0x06BC]=true,[0x06BD]=true,
+ [0x06BE]=true,[0x06BF]=true,[0x06C1]=true,[0x06C2]=true,
+ [0x06CC]=true,[0x06CE]=true,[0x06D0]=true,[0x06D1]=true,
+ [0x06FA]=true,[0x06FB]=true,[0x06FC]=true,[0x06FF]=true,
+ [0x0750]=true,[0x0751]=true,[0x0752]=true,[0x0753]=true,
+ [0x0754]=true,[0x0755]=true,[0x0756]=true,[0x0757]=true,
+ [0x0758]=true,[0x075C]=true,[0x075D]=true,[0x075E]=true,
+ [0x075F]=true,[0x0760]=true,[0x0761]=true,[0x0762]=true,
+ [0x0763]=true,[0x0764]=true,[0x0765]=true,[0x0766]=true,
+ [0x0767]=true,[0x0768]=true,[0x0769]=true,[0x076A]=true,
+ [0x076D]=true,[0x076E]=true,[0x076F]=true,[0x0770]=true,
+ [0x0772]=true,[0x0775]=true,[0x0776]=true,[0x0777]=true,
+ [0x077A]=true,[0x077B]=true,[0x077C]=true,[0x077D]=true,
+ [0x077E]=true,[0x077F]=true,
+ [0x08A0]=true,[0x08A2]=true,[0x08A4]=true,[0x08A5]=true,
+ [0x08A6]=true,[0x0620]=true,[0x08A8]=true,[0x08A9]=true,
+ [0x08A7]=true,[0x08A3]=true,
+ [0x0712]=true,[0x0713]=true,[0x0714]=true,[0x071A]=true,
+ [0x071B]=true,[0x071C]=true,[0x071D]=true,[0x071F]=true,
+ [0x0720]=true,[0x0721]=true,[0x0722]=true,[0x0723]=true,
+ [0x0724]=true,[0x0725]=true,[0x0726]=true,[0x0727]=true,
+ [0x0729]=true,[0x072B]=true,[0x072D]=true,[0x072E]=true,
+ [0x074E]=true,[0x074F]=true,
+ [0x0841]=true,[0x0842]=true,[0x0843]=true,[0x0844]=true,
+ [0x0845]=true,[0x0847]=true,[0x0848]=true,[0x0855]=true,
+ [0x0851]=true,[0x084E]=true,[0x084D]=true,[0x084A]=true,
+ [0x084B]=true,[0x084C]=true,[0x0850]=true,[0x0852]=true,
+ [0x0853]=true,
+ [0x07D7]=true,[0x07E8]=true,[0x07D9]=true,[0x07EA]=true,
+ [0x07CA]=true,[0x07DB]=true,[0x07CC]=true,[0x07DD]=true,
+ [0x07CE]=true,[0x07DF]=true,[0x07D4]=true,[0x07E5]=true,
+ [0x07E9]=true,[0x07E7]=true,[0x07E3]=true,[0x07E2]=true,
+ [0x07E0]=true,[0x07E1]=true,[0x07DE]=true,[0x07DC]=true,
+ [0x07D1]=true,[0x07DA]=true,[0x07D8]=true,[0x07D6]=true,
+ [0x07D2]=true,[0x07D0]=true,[0x07CF]=true,[0x07CD]=true,
+ [0x07CB]=true,[0x07D3]=true,[0x07E4]=true,[0x07D5]=true,
+ [0x07E6]=true,
+ [tatweel]=true,
+ [zwj]=true,
+}
+local arab_warned={}
+local function warning(current,what)
+ local char=current.char
+ if not arab_warned[char] then
+ log.report("analyze","arab: character %C has no %a class",char,what)
+ arab_warned[char]=true
+ end
end
+local function finish(first,last)
+ if last then
+ if first==last then
+ local fc=first.char
+ if medial[fc] or final[fc] then
+ first[a_state]=s_isol
+ else
+ warning(first,"isol")
+ first[a_state]=s_error
+ end
+ else
+ local lc=last.char
+ if medial[lc] or final[lc] then
+ last[a_state]=s_fina
+ else
+ warning(last,"fina")
+ last[a_state]=s_error
+ end
+ end
+ first,last=nil,nil
+ elseif first then
+ local fc=first.char
+ if medial[fc] or final[fc] then
+ first[a_state]=s_isol
+ else
+ warning(first,"isol")
+ first[a_state]=s_error
+ end
+ first=nil
+ end
+ return first,last
+end
+function methods.arab(head,font,attr)
+ local useunicodemarks=analyzers.useunicodemarks
+ local tfmdata=fontdata[font]
+ local marks=tfmdata.resources.marks
+ local first,last,current,done=nil,nil,head,false
+ while current do
+ local id=current.id
+ if id==glyph_code and current.font==font and current.subtype<256 and not current[a_state] then
+ done=true
+ local char=current.char
+ if marks[char] or (useunicodemarks and categories[char]=="mn") then
+ current[a_state]=s_mark
+ elseif isolated[char] then
+ first,last=finish(first,last)
+ current[a_state]=s_isol
+ first,last=nil,nil
+ elseif not first then
+ if medial[char] then
+ current[a_state]=s_init
+ first,last=first or current,current
+ elseif final[char] then
+ current[a_state]=s_isol
+ first,last=nil,nil
+ else
+ first,last=finish(first,last)
+ end
+ elseif medial[char] then
+ first,last=first or current,current
+ current[a_state]=s_medi
+ elseif final[char] then
+ if not last[a_state]==s_init then
+ last[a_state]=s_medi
+ end
+ current[a_state]=s_fina
+ first,last=nil,nil
+ elseif char>=0x0600 and char<=0x06FF then
+ current[a_state]=s_rest
+ first,last=finish(first,last)
+ else
+ first,last=finish(first,last)
+ end
+ else
+ if first or last then
+ first,last=finish(first,last)
+ end
+ if id==math_code then
+ current=end_of_math(current)
+ end
+ end
+ current=current.next
+ end
+ if first or last then
+ finish(first,last)
+ end
+ return head,done
+end
+methods.syrc=methods.arab
+methods.mand=methods.arab
+methods.nko=methods.arab
+directives.register("otf.analyze.useunicodemarks",function(v)
+ analyzers.useunicodemarks=v
+end)
end -- closure
do -- begin closure to overcome local limits and interference
-if not modules then modules = { } end modules ['font-otn'] = {
- version = 1.001,
- comment = "companion to font-ini.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
+if not modules then modules={} end modules ['font-otn']={
+ version=1.001,
+ comment="companion to font-ini.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files",
}
-
--- this is still somewhat preliminary and it will get better in due time;
--- much functionality could only be implemented thanks to the husayni font
--- of Idris Samawi Hamid to who we dedicate this module.
-
--- in retrospect it always looks easy but believe it or not, it took a lot
--- of work to get proper open type support done: buggy fonts, fuzzy specs,
--- special made testfonts, many skype sessions between taco, idris and me,
--- torture tests etc etc ... unfortunately the code does not show how much
--- time it took ...
-
--- todo:
---
--- kerning is probably not yet ok for latin around dics nodes
--- extension infrastructure (for usage out of context)
--- sorting features according to vendors/renderers
--- alternative loop quitters
--- check cursive and r2l
--- find out where ignore-mark-classes went
--- default features (per language, script)
--- handle positions (we need example fonts)
--- handle gpos_single (we might want an extra width field in glyph nodes because adding kerns might interfere)
--- mark (to mark) code is still not what it should be (too messy but we need some more extreem husayni tests)
-
---[[ldx--
-<p>This module is a bit more split up that I'd like but since we also want to test
-with plain <l n='tex'/> it has to be so. This module is part of <l n='context'/>
-and discussion about improvements and functionality mostly happens on the
-<l n='context'/> mailing list.</p>
-
-<p>The specification of OpenType is kind of vague. Apart from a lack of a proper
-free specifications there's also the problem that Microsoft and Adobe
-may have their own interpretation of how and in what order to apply features.
-In general the Microsoft website has more detailed specifications and is a
-better reference. There is also some information in the FontForge help files.</p>
-
-<p>Because there is so much possible, fonts might contain bugs and/or be made to
-work with certain rederers. These may evolve over time which may have the side
-effect that suddenly fonts behave differently.</p>
-
-<p>After a lot of experiments (mostly by Taco, me and Idris) we're now at yet another
-implementation. Of course all errors are mine and of course the code can be
-improved. There are quite some optimizations going on here and processing speed
-is currently acceptable. Not all functions are implemented yet, often because I
-lack the fonts for testing. Many scripts are not yet supported either, but I will
-look into them as soon as <l n='context'/> users ask for it.</p>
-
-<p>Because there are different interpretations possible, I will extend the code
-with more (configureable) variants. I can also add hooks for users so that they can
-write their own extensions.</p>
-
-<p>Glyphs are indexed not by unicode but in their own way. This is because there is no
-relationship with unicode at all, apart from the fact that a font might cover certain
-ranges of characters. One character can have multiple shapes. However, at the
-<l n='tex'/> end we use unicode so and all extra glyphs are mapped into a private
-space. This is needed because we need to access them and <l n='tex'/> has to include
-then in the output eventually.</p>
-
-<p>The raw table as it coms from <l n='fontforge'/> gets reorganized in to fit out needs.
-In <l n='context'/> that table is packed (similar tables are shared) and cached on disk
-so that successive runs can use the optimized table (after loading the table is
-unpacked). The flattening code used later is a prelude to an even more compact table
-format (and as such it keeps evolving).</p>
-
-<p>This module is sparsely documented because it is a moving target. The table format
-of the reader changes and we experiment a lot with different methods for supporting
-features.</p>
-
-<p>As with the <l n='afm'/> code, we may decide to store more information in the
-<l n='otf'/> table.</p>
-
-<p>Incrementing the version number will force a re-cache. We jump the number by one
-when there's a fix in the <l n='fontforge'/> library or <l n='lua'/> code that
-results in different tables.</p>
---ldx]]--
-
--- action handler chainproc chainmore comment
---
--- gsub_single ok ok ok
--- gsub_multiple ok ok not implemented yet
--- gsub_alternate ok ok not implemented yet
--- gsub_ligature ok ok ok
--- gsub_context ok --
--- gsub_contextchain ok --
--- gsub_reversecontextchain ok --
--- chainsub -- ok
--- reversesub -- ok
--- gpos_mark2base ok ok
--- gpos_mark2ligature ok ok
--- gpos_mark2mark ok ok
--- gpos_cursive ok untested
--- gpos_single ok ok
--- gpos_pair ok ok
--- gpos_context ok --
--- gpos_contextchain ok --
---
--- todo: contextpos and contextsub and class stuff
---
--- actions:
---
--- handler : actions triggered by lookup
--- chainproc : actions triggered by contextual lookup
--- chainmore : multiple substitutions triggered by contextual lookup (e.g. fij -> f + ij)
---
--- remark: the 'not implemented yet' variants will be done when we have fonts that use them
--- remark: we need to check what to do with discretionaries
-
--- We used to have independent hashes for lookups but as the tags are unique
--- we now use only one hash. If needed we can have multiple again but in that
--- case I will probably prefix (i.e. rename) the lookups in the cached font file.
-
-local concat, insert, remove = table.concat, table.insert, table.remove
-local format, gmatch, gsub, find, match, lower, strip = string.format, string.gmatch, string.gsub, string.find, string.match, string.lower, string.strip
-local type, next, tonumber, tostring = type, next, tonumber, tostring
-local lpegmatch = lpeg.match
-local random = math.random
-
-local logs, trackers, nodes, attributes = logs, trackers, nodes, attributes
-
-local registertracker = trackers.register
-
-local fonts = fonts
-local otf = fonts.handlers.otf
-
-local trace_lookups = false registertracker("otf.lookups", function(v) trace_lookups = v end)
-local trace_singles = false registertracker("otf.singles", function(v) trace_singles = v end)
-local trace_multiples = false registertracker("otf.multiples", function(v) trace_multiples = v end)
-local trace_alternatives = false registertracker("otf.alternatives", function(v) trace_alternatives = v end)
-local trace_ligatures = false registertracker("otf.ligatures", function(v) trace_ligatures = v end)
-local trace_contexts = false registertracker("otf.contexts", function(v) trace_contexts = v end)
-local trace_marks = false registertracker("otf.marks", function(v) trace_marks = v end)
-local trace_kerns = false registertracker("otf.kerns", function(v) trace_kerns = v end)
-local trace_cursive = false registertracker("otf.cursive", function(v) trace_cursive = v end)
-local trace_preparing = false registertracker("otf.preparing", function(v) trace_preparing = v end)
-local trace_bugs = false registertracker("otf.bugs", function(v) trace_bugs = v end)
-local trace_details = false registertracker("otf.details", function(v) trace_details = v end)
-local trace_applied = false registertracker("otf.applied", function(v) trace_applied = v end)
-local trace_steps = false registertracker("otf.steps", function(v) trace_steps = v end)
-local trace_skips = false registertracker("otf.skips", function(v) trace_skips = v end)
-local trace_directions = false registertracker("otf.directions", function(v) trace_directions = v end)
-
-local report_direct = logs.reporter("fonts","otf direct")
-local report_subchain = logs.reporter("fonts","otf subchain")
-local report_chain = logs.reporter("fonts","otf chain")
-local report_process = logs.reporter("fonts","otf process")
-local report_prepare = logs.reporter("fonts","otf prepare")
-
-registertracker("otf.verbose_chain", function(v) otf.setcontextchain(v and "verbose") end)
-registertracker("otf.normal_chain", function(v) otf.setcontextchain(v and "normal") end)
-
-registertracker("otf.replacements", "otf.singles,otf.multiples,otf.alternatives,otf.ligatures")
+local concat,insert,remove=table.concat,table.insert,table.remove
+local gmatch,gsub,find,match,lower,strip=string.gmatch,string.gsub,string.find,string.match,string.lower,string.strip
+local type,next,tonumber,tostring=type,next,tonumber,tostring
+local lpegmatch=lpeg.match
+local random=math.random
+local formatters=string.formatters
+local logs,trackers,nodes,attributes=logs,trackers,nodes,attributes
+local registertracker=trackers.register
+local fonts=fonts
+local otf=fonts.handlers.otf
+local trace_lookups=false registertracker("otf.lookups",function(v) trace_lookups=v end)
+local trace_singles=false registertracker("otf.singles",function(v) trace_singles=v end)
+local trace_multiples=false registertracker("otf.multiples",function(v) trace_multiples=v end)
+local trace_alternatives=false registertracker("otf.alternatives",function(v) trace_alternatives=v end)
+local trace_ligatures=false registertracker("otf.ligatures",function(v) trace_ligatures=v end)
+local trace_contexts=false registertracker("otf.contexts",function(v) trace_contexts=v end)
+local trace_marks=false registertracker("otf.marks",function(v) trace_marks=v end)
+local trace_kerns=false registertracker("otf.kerns",function(v) trace_kerns=v end)
+local trace_cursive=false registertracker("otf.cursive",function(v) trace_cursive=v end)
+local trace_preparing=false registertracker("otf.preparing",function(v) trace_preparing=v end)
+local trace_bugs=false registertracker("otf.bugs",function(v) trace_bugs=v end)
+local trace_details=false registertracker("otf.details",function(v) trace_details=v end)
+local trace_applied=false registertracker("otf.applied",function(v) trace_applied=v end)
+local trace_steps=false registertracker("otf.steps",function(v) trace_steps=v end)
+local trace_skips=false registertracker("otf.skips",function(v) trace_skips=v end)
+local trace_directions=false registertracker("otf.directions",function(v) trace_directions=v end)
+local report_direct=logs.reporter("fonts","otf direct")
+local report_subchain=logs.reporter("fonts","otf subchain")
+local report_chain=logs.reporter("fonts","otf chain")
+local report_process=logs.reporter("fonts","otf process")
+local report_prepare=logs.reporter("fonts","otf prepare")
+local report_warning=logs.reporter("fonts","otf warning")
+registertracker("otf.verbose_chain",function(v) otf.setcontextchain(v and "verbose") end)
+registertracker("otf.normal_chain",function(v) otf.setcontextchain(v and "normal") end)
+registertracker("otf.replacements","otf.singles,otf.multiples,otf.alternatives,otf.ligatures")
registertracker("otf.positions","otf.marks,otf.kerns,otf.cursive")
registertracker("otf.actions","otf.replacements,otf.positions")
registertracker("otf.injections","nodes.injections")
-
registertracker("*otf.sample","otf.steps,otf.actions,otf.analyzing")
-
-local insert_node_after = node.insert_after
-local delete_node = nodes.delete
-local copy_node = node.copy
-local find_node_tail = node.tail or node.slide
-local set_attribute = node.set_attribute
-local has_attribute = node.has_attribute
-local flush_node_list = node.flush_list
-
-local setmetatableindex = table.setmetatableindex
-
-local zwnj = 0x200C
-local zwj = 0x200D
-local wildcard = "*"
-local default = "dflt"
-
-local nodecodes = nodes.nodecodes
-local whatcodes = nodes.whatcodes
-local glyphcodes = nodes.glyphcodes
-
-local glyph_code = nodecodes.glyph
-local glue_code = nodecodes.glue
-local disc_code = nodecodes.disc
-local whatsit_code = nodecodes.whatsit
-
-local dir_code = whatcodes.dir
-local localpar_code = whatcodes.localpar
-
-local ligature_code = glyphcodes.ligature
-
-local privateattribute = attributes.private
-
--- Something is messed up: we have two mark / ligature indices, one at the injection
--- end and one here ... this is bases in KE's patches but there is something fishy
--- there as I'm pretty sure that for husayni we need some connection (as it's much
--- more complex than an average font) but I need proper examples of all cases, not
--- of only some.
-
-local state = privateattribute('state')
-local markbase = privateattribute('markbase')
-local markmark = privateattribute('markmark')
-local markdone = privateattribute('markdone') -- assigned at the injection end
-local cursbase = privateattribute('cursbase')
-local curscurs = privateattribute('curscurs')
-local cursdone = privateattribute('cursdone')
-local kernpair = privateattribute('kernpair')
-local ligacomp = privateattribute('ligacomp') -- assigned here (ideally it should be combined)
-
-local injections = nodes.injections
-local setmark = injections.setmark
-local setcursive = injections.setcursive
-local setkern = injections.setkern
-local setpair = injections.setpair
-
-local markonce = true
-local cursonce = true
-local kernonce = true
-
-local fonthashes = fonts.hashes
-local fontdata = fonthashes.identifiers
-
-local otffeatures = fonts.constructors.newfeatures("otf")
-local registerotffeature = otffeatures.register
-
-local onetimemessage = fonts.loggers.onetimemessage
-
-otf.defaultnodealternate = "none" -- first last
-
--- we share some vars here, after all, we have no nested lookups and
--- less code
-
-local tfmdata = false
-local characters = false
-local descriptions = false
-local resources = false
-local marks = false
-local currentfont = false
-local lookuptable = false
-local anchorlookups = false
-local lookuptypes = false
-local handlers = { }
-local rlmode = 0
-local featurevalue = false
-
--- we cannot optimize with "start = first_glyph(head)" because then we don't
--- know which rlmode we're in which messes up cursive handling later on
---
--- head is always a whatsit so we can safely assume that head is not changed
-
--- we use this for special testing and documentation
-
-local checkstep = (nodes and nodes.tracers and nodes.tracers.steppers.check) or function() end
-local registerstep = (nodes and nodes.tracers and nodes.tracers.steppers.register) or function() end
-local registermessage = (nodes and nodes.tracers and nodes.tracers.steppers.message) or function() end
-
+local insert_node_after=node.insert_after
+local delete_node=nodes.delete
+local copy_node=node.copy
+local find_node_tail=node.tail or node.slide
+local flush_node_list=node.flush_list
+local end_of_math=node.end_of_math
+local setmetatableindex=table.setmetatableindex
+local zwnj=0x200C
+local zwj=0x200D
+local wildcard="*"
+local default="dflt"
+local nodecodes=nodes.nodecodes
+local whatcodes=nodes.whatcodes
+local glyphcodes=nodes.glyphcodes
+local glyph_code=nodecodes.glyph
+local glue_code=nodecodes.glue
+local disc_code=nodecodes.disc
+local whatsit_code=nodecodes.whatsit
+local math_code=nodecodes.math
+local dir_code=whatcodes.dir
+local localpar_code=whatcodes.localpar
+local ligature_code=glyphcodes.ligature
+local privateattribute=attributes.private
+local a_state=privateattribute('state')
+local a_markbase=privateattribute('markbase')
+local a_markmark=privateattribute('markmark')
+local a_markdone=privateattribute('markdone')
+local a_cursbase=privateattribute('cursbase')
+local a_curscurs=privateattribute('curscurs')
+local a_cursdone=privateattribute('cursdone')
+local a_kernpair=privateattribute('kernpair')
+local a_ligacomp=privateattribute('ligacomp')
+local injections=nodes.injections
+local setmark=injections.setmark
+local setcursive=injections.setcursive
+local setkern=injections.setkern
+local setpair=injections.setpair
+local markonce=true
+local cursonce=true
+local kernonce=true
+local fonthashes=fonts.hashes
+local fontdata=fonthashes.identifiers
+local otffeatures=fonts.constructors.newfeatures("otf")
+local registerotffeature=otffeatures.register
+local onetimemessage=fonts.loggers.onetimemessage
+otf.defaultnodealternate="none"
+local tfmdata=false
+local characters=false
+local descriptions=false
+local resources=false
+local marks=false
+local currentfont=false
+local lookuptable=false
+local anchorlookups=false
+local lookuptypes=false
+local handlers={}
+local rlmode=0
+local featurevalue=false
+local checkstep=(nodes and nodes.tracers and nodes.tracers.steppers.check) or function() end
+local registerstep=(nodes and nodes.tracers and nodes.tracers.steppers.register) or function() end
+local registermessage=(nodes and nodes.tracers and nodes.tracers.steppers.message) or function() end
local function logprocess(...)
- if trace_steps then
- registermessage(...)
- end
- report_direct(...)
+ if trace_steps then
+ registermessage(...)
+ end
+ report_direct(...)
end
-
local function logwarning(...)
- report_direct(...)
-end
-
-local function gref(n)
- if type(n) == "number" then
- local description = descriptions[n]
- local name = description and description.name
- if name then
- return format("U+%05X (%s)",n,name)
- else
- return format("U+%05X",n)
- end
- elseif not n then
- return "<error in tracing>"
+ report_direct(...)
+end
+local f_unicode=formatters["%U"]
+local f_uniname=formatters["%U (%s)"]
+local f_unilist=formatters["% t (% t)"]
+local function gref(n)
+ if type(n)=="number" then
+ local description=descriptions[n]
+ local name=description and description.name
+ if name then
+ return f_uniname(n,name)
else
- local num, nam = { }, { }
- for i=1,#n do
- local ni = n[i]
- if tonumber(ni) then -- later we will start at 2
- local di = descriptions[ni]
- num[i] = format("U+%05X",ni)
- nam[i] = di and di.name or "?"
- end
- end
- return format("%s (%s)",concat(num," "), concat(nam," "))
- end
+ return f_unicode(n)
+ end
+ elseif n then
+ local num,nam={},{}
+ for i=1,#n do
+ local ni=n[i]
+ if tonumber(ni) then
+ local di=descriptions[ni]
+ num[i]=f_unicode(ni)
+ nam[i]=di and di.name or "-"
+ end
+ end
+ return f_unilist(num,nam)
+ else
+ return "<error in node mode tracing>"
+ end
+end
+local function cref(kind,chainname,chainlookupname,lookupname,index)
+ if index then
+ return formatters["feature %a, chain %a, sub %a, lookup %a, index %a"](kind,chainname,chainlookupname,lookupname,index)
+ elseif lookupname then
+ return formatters["feature %a, chain %a, sub %a, lookup %a"](kind,chainname,chainlookupname,lookupname)
+ elseif chainlookupname then
+ return formatters["feature %a, chain %a, sub %a"](kind,chainname,chainlookupname)
+ elseif chainname then
+ return formatters["feature %a, chain %a"](kind,chainname)
+ else
+ return formatters["feature %a"](kind)
+ end
end
-
-local function cref(kind,chainname,chainlookupname,lookupname,index)
- if index then
- return format("feature %s, chain %s, sub %s, lookup %s, index %s",kind,chainname,chainlookupname,lookupname,index)
- elseif lookupname then
- return format("feature %s, chain %s, sub %s, lookup %s",kind,chainname or "?",chainlookupname or "?",lookupname)
- elseif chainlookupname then
- return format("feature %s, chain %s, sub %s",kind,chainname or "?",chainlookupname)
- elseif chainname then
- return format("feature %s, chain %s",kind,chainname)
- else
- return format("feature %s",kind)
- end
-end
-
local function pref(kind,lookupname)
- return format("feature %s, lookup %s",kind,lookupname)
-end
-
--- we can assume that languages that use marks are not hyphenated
--- we can also assume that at most one discretionary is present
-
-local function markstoligature(kind,lookupname,start,stop,char)
- local n = copy_node(start)
- local keep = start
- local current
- current, start = insert_node_after(start,start,n)
- local snext = stop.next
- current.next = snext
- if snext then
- snext.prev = current
- end
- start.prev, stop.next = nil, nil
- current.char, current.subtype, current.components = char, ligature_code, start
- return keep
-end
-
-local function toligature(kind,lookupname,start,stop,char,markflag,discfound) -- brr head
- if start == stop then
- start.char = char
- return start
- elseif discfound then
- -- print("start->stop",nodes.tosequence(start,stop))
- local components = start.components
- if components then
- flush_node_list(components)
- start.components = nil
- end
- local lignode = copy_node(start)
- lignode.font = start.font
- lignode.char = char
- lignode.subtype = ligature_code
- local next = stop.next
- local prev = start.prev
- stop.next = nil
- start.prev = nil
- lignode.components = start
- -- print("lignode",nodes.tosequence(lignode))
- -- print("components",nodes.tosequence(lignode.components))
- prev.next = lignode
- if next then
- next.prev = lignode
- end
- lignode.next = next
- lignode.prev = prev
- -- print("start->end",nodes.tosequence(start))
- return lignode
- else
- -- start is the ligature
- local deletemarks = markflag ~= "mark"
- local n = copy_node(start)
- local current
- current, start = insert_node_after(start,start,n)
- local snext = stop.next
- current.next = snext
- if snext then
- snext.prev = current
- end
- start.prev = nil
- stop.next = nil
- current.char = char
- current.subtype = ligature_code
- current.components = start
- local head = current
- -- this is messy ... we should get rid of the components eventually
- local i = 0 -- is index of base
- while start do
- if not marks[start.char] then
- i = i + 1
- elseif not deletemarks then -- quite fishy
- set_attribute(start,ligacomp,i)
- if trace_marks then
- logwarning("%s: keep mark %s, gets index %s",pref(kind,lookupname),gref(start.char),i)
- end
- head, current = insert_node_after(head,current,copy_node(start))
- end
- start = start.next
- end
- start = current.next
- while start and start.id == glyph_code do
- if marks[start.char] then
- set_attribute(start,ligacomp,i)
- if trace_marks then
- logwarning("%s: keep mark %s, gets index %s",pref(kind,lookupname),gref(start.char),i)
- end
- else
- break
- end
- start = start.next
- end
- --
- -- we do need components in funny kerning mode but maybe I can better reconstruct then
- -- as we do have the font components info available; removing components makes the
- -- previous code much simpler
- --
- -- flush_node_list(head.components)
- return head
- end
-end
-
-function handlers.gsub_single(start,kind,lookupname,replacement)
- if trace_singles then
- logprocess("%s: replacing %s by single %s",pref(kind,lookupname),gref(start.char),gref(replacement))
- end
- start.char = replacement
- return start, true
-end
-
-local function get_alternative_glyph(start,alternatives,value)
- -- needs checking: (global value, brrr)
- local choice = nil
- local n = #alternatives
- local char = start.char
- --
- if value == "random" then
- local r = random(1,n)
- value, choice = format("random, choice %s",r), alternatives[r]
- elseif value == "first" then
- value, choice = format("first, choice %s",1), alternatives[1]
- elseif value == "last" then
- value, choice = format("last, choice %s",n), alternatives[n]
- else
- value = tonumber(value)
- if type(value) ~= "number" then
- value, choice = "default, choice 1", alternatives[1]
- elseif value > n then
- local defaultalt = otf.defaultnodealternate
- if defaultalt == "first" then
- value, choice = format("no %s variants, taking %s",value,n), alternatives[n]
- elseif defaultalt == "last" then
- value, choice = format("no %s variants, taking %s",value,1), alternatives[1]
- else
- value, choice = format("no %s variants, ignoring",value), false
- end
- elseif value == 0 then
- value, choice = format("choice %s (no change)",value), char
- elseif value < 1 then
- value, choice = format("no %s variants, taking %s",value,1), alternatives[1]
- else
- value, choice = format("choice %s",value), alternatives[value]
- end
- end
- return choice
-end
-
-local function multiple_glyphs(start,multiple) -- marks ?
- local nofmultiples = #multiple
- if nofmultiples > 0 then
- start.char = multiple[1]
- if nofmultiples > 1 then
- local sn = start.next
- for k=2,nofmultiples do -- todo: use insert_node
- local n = copy_node(start)
- n.char = multiple[k]
- n.next = sn
- n.prev = start
- if sn then
- sn.prev = n
- end
- start.next = n
- start = n
- end
- end
- return start, true
- else
- if trace_multiples then
- logprocess("no multiple for %s",gref(start.char))
- end
- return start, false
- end
-end
-
-function handlers.gsub_alternate(start,kind,lookupname,alternative,sequence)
- local value = featurevalue == true and tfmdata.shared.features[kind] or featurevalue
- local choice = get_alternative_glyph(start,alternative,value)
- if choice then
- if trace_alternatives then
- logprocess("%s: replacing %s by alternative %s (%s)",pref(kind,lookupname),gref(char),gref(choice),choice)
- end
- start.char = choice
+ return formatters["feature %a, lookup %a"](kind,lookupname)
+end
+local function copy_glyph(g)
+ local components=g.components
+ if components then
+ g.components=nil
+ local n=copy_node(g)
+ g.components=components
+ return n
+ else
+ return copy_node(g)
+ end
+end
+local function markstoligature(kind,lookupname,head,start,stop,char)
+ if start==stop and start.char==char then
+ return head,start
+ else
+ local prev=start.prev
+ local next=stop.next
+ start.prev=nil
+ stop.next=nil
+ local base=copy_glyph(start)
+ if head==start then
+ head=base
+ end
+ base.char=char
+ base.subtype=ligature_code
+ base.components=start
+ if prev then
+ prev.next=base
+ end
+ if next then
+ next.prev=base
+ end
+ base.next=next
+ base.prev=prev
+ return head,base
+ end
+end
+local function getcomponentindex(start)
+ if start.id~=glyph_code then
+ return 0
+ elseif start.subtype==ligature_code then
+ local i=0
+ local components=start.components
+ while components do
+ i=i+getcomponentindex(components)
+ components=components.next
+ end
+ return i
+ elseif not marks[start.char] then
+ return 1
+ else
+ return 0
+ end
+end
+local function toligature(kind,lookupname,head,start,stop,char,markflag,discfound)
+ if start==stop and start.char==char then
+ start.char=char
+ return head,start
+ end
+ local prev=start.prev
+ local next=stop.next
+ start.prev=nil
+ stop.next=nil
+ local base=copy_glyph(start)
+ if start==head then
+ head=base
+ end
+ base.char=char
+ base.subtype=ligature_code
+ base.components=start
+ if prev then
+ prev.next=base
+ end
+ if next then
+ next.prev=base
+ end
+ base.next=next
+ base.prev=prev
+ if not discfound then
+ local deletemarks=markflag~="mark"
+ local components=start
+ local baseindex=0
+ local componentindex=0
+ local head=base
+ local current=base
+ while start do
+ local char=start.char
+ if not marks[char] then
+ baseindex=baseindex+componentindex
+ componentindex=getcomponentindex(start)
+ elseif not deletemarks then
+ start[a_ligacomp]=baseindex+(start[a_ligacomp] or componentindex)
+ if trace_marks then
+ logwarning("%s: keep mark %s, gets index %s",pref(kind,lookupname),gref(char),start[a_ligacomp])
+ end
+ head,current=insert_node_after(head,current,copy_node(start))
+ end
+ start=start.next
+ end
+ local start=components
+ while start and start.id==glyph_code do
+ local char=start.char
+ if marks[char] then
+ start[a_ligacomp]=baseindex+(start[a_ligacomp] or componentindex)
+ if trace_marks then
+ logwarning("%s: keep mark %s, gets index %s",pref(kind,lookupname),gref(char),start[a_ligacomp])
+ end
+ else
+ break
+ end
+ start=start.next
+ end
+ end
+ return head,base
+end
+function handlers.gsub_single(head,start,kind,lookupname,replacement)
+ if trace_singles then
+ logprocess("%s: replacing %s by single %s",pref(kind,lookupname),gref(start.char),gref(replacement))
+ end
+ start.char=replacement
+ return head,start,true
+end
+local function get_alternative_glyph(start,alternatives,value,trace_alternatives)
+ local n=#alternatives
+ if value=="random" then
+ local r=random(1,n)
+ return alternatives[r],trace_alternatives and formatters["value %a, taking %a"](value,r)
+ elseif value=="first" then
+ return alternatives[1],trace_alternatives and formatters["value %a, taking %a"](value,1)
+ elseif value=="last" then
+ return alternatives[n],trace_alternatives and formatters["value %a, taking %a"](value,n)
+ else
+ value=tonumber(value)
+ if type(value)~="number" then
+ return alternatives[1],trace_alternatives and formatters["invalid value %s, taking %a"](value,1)
+ elseif value>n then
+ local defaultalt=otf.defaultnodealternate
+ if defaultalt=="first" then
+ return alternatives[n],trace_alternatives and formatters["invalid value %s, taking %a"](value,1)
+ elseif defaultalt=="last" then
+ return alternatives[1],trace_alternatives and formatters["invalid value %s, taking %a"](value,n)
+ else
+ return false,trace_alternatives and formatters["invalid value %a, %s"](value,"out of range")
+ end
+ elseif value==0 then
+ return start.char,trace_alternatives and formatters["invalid value %a, %s"](value,"no change")
+ elseif value<1 then
+ return alternatives[1],trace_alternatives and formatters["invalid value %a, taking %a"](value,1)
else
- if trace_alternatives then
- logwarning("%s: no variant %s for %s",pref(kind,lookupname),tostring(value),gref(char))
- end
- end
- return start, true
-end
-
-function handlers.gsub_multiple(start,kind,lookupname,multiple)
+ return alternatives[value],trace_alternatives and formatters["value %a, taking %a"](value,value)
+ end
+ end
+end
+local function multiple_glyphs(head,start,multiple)
+ local nofmultiples=#multiple
+ if nofmultiples>0 then
+ start.char=multiple[1]
+ if nofmultiples>1 then
+ local sn=start.next
+ for k=2,nofmultiples do
+ local n=copy_node(start)
+ n.char=multiple[k]
+ n.next=sn
+ n.prev=start
+ if sn then
+ sn.prev=n
+ end
+ start.next=n
+ start=n
+ end
+ end
+ return head,start,true
+ else
if trace_multiples then
- logprocess("%s: replacing %s by multiple %s",pref(kind,lookupname),gref(start.char),gref(multiple))
- end
- return multiple_glyphs(start,multiple)
-end
-
-function handlers.gsub_ligature(start,kind,lookupname,ligature,sequence)
- local s, stop, discfound = start.next, nil, false
- local startchar = start.char
- if marks[startchar] then
- while s do
- local id = s.id
- if id == glyph_code and s.subtype<256 and s.font == currentfont then
- local lg = ligature[s.char]
- if lg then
- stop = s
- ligature = lg
- s = s.next
- else
- break
- end
- else
- break
- end
- end
- if stop then
- local lig = ligature.ligature
- if lig then
- if trace_ligatures then
- local stopchar = stop.char
- start = markstoligature(kind,lookupname,start,stop,lig)
- logprocess("%s: replacing %s upto %s by ligature %s",pref(kind,lookupname),gref(startchar),gref(stopchar),gref(start.char))
- else
- start = markstoligature(kind,lookupname,start,stop,lig)
- end
- return start, true
- else
- -- ok, goto next lookup
- end
- end
- else
- local skipmark = sequence.flags[1]
- while s do
- local id = s.id
- if id == glyph_code and s.subtype<256 then
- if s.font == currentfont then
- local char = s.char
- if skipmark and marks[char] then
- s = s.next
- else
- local lg = ligature[char]
- if lg then
- stop = s
- ligature = lg
- s = s.next
- else
- break
- end
- end
- else
- break
- end
- elseif id == disc_code then
- discfound = true
- s = s.next
- else
- break
- end
- end
- if stop then
- local lig = ligature.ligature
- if lig then
- if trace_ligatures then
- local stopchar = stop.char
- start = toligature(kind,lookupname,start,stop,lig,skipmark,discfound)
- logprocess("%s: replacing %s upto %s by ligature %s",pref(kind,lookupname),gref(startchar),gref(stopchar),gref(start.char))
- else
- start = toligature(kind,lookupname,start,stop,lig,skipmark,discfound)
- end
- return start, true
+ logprocess("no multiple for %s",gref(start.char))
+ end
+ return head,start,false
+ end
+end
+function handlers.gsub_alternate(head,start,kind,lookupname,alternative,sequence)
+ local value=featurevalue==true and tfmdata.shared.features[kind] or featurevalue
+ local choice,comment=get_alternative_glyph(start,alternative,value,trace_alternatives)
+ if choice then
+ if trace_alternatives then
+ logprocess("%s: replacing %s by alternative %a to %s, %s",pref(kind,lookupname),gref(start.char),choice,gref(choice),comment)
+ end
+ start.char=choice
+ else
+ if trace_alternatives then
+ logwarning("%s: no variant %a for %s, %s",pref(kind,lookupname),value,gref(start.char),comment)
+ end
+ end
+ return head,start,true
+end
+function handlers.gsub_multiple(head,start,kind,lookupname,multiple)
+ if trace_multiples then
+ logprocess("%s: replacing %s by multiple %s",pref(kind,lookupname),gref(start.char),gref(multiple))
+ end
+ return multiple_glyphs(head,start,multiple)
+end
+function handlers.gsub_ligature(head,start,kind,lookupname,ligature,sequence)
+ local s,stop,discfound=start.next,nil,false
+ local startchar=start.char
+ if marks[startchar] then
+ while s do
+ local id=s.id
+ if id==glyph_code and s.font==currentfont and s.subtype<256 then
+ local lg=ligature[s.char]
+ if lg then
+ stop=s
+ ligature=lg
+ s=s.next
+ else
+ break
+ end
+ else
+ break
+ end
+ end
+ if stop then
+ local lig=ligature.ligature
+ if lig then
+ if trace_ligatures then
+ local stopchar=stop.char
+ head,start=markstoligature(kind,lookupname,head,start,stop,lig)
+ logprocess("%s: replacing %s upto %s by ligature %s case 1",pref(kind,lookupname),gref(startchar),gref(stopchar),gref(start.char))
+ else
+ head,start=markstoligature(kind,lookupname,head,start,stop,lig)
+ end
+ return head,start,true
+ else
+ end
+ end
+ else
+ local skipmark=sequence.flags[1]
+ while s do
+ local id=s.id
+ if id==glyph_code and s.subtype<256 then
+ if s.font==currentfont then
+ local char=s.char
+ if skipmark and marks[char] then
+ s=s.next
+ else
+ local lg=ligature[char]
+ if lg then
+ stop=s
+ ligature=lg
+ s=s.next
else
- -- ok, goto next lookup
- end
- end
- end
- return start, false
-end
-
---[[ldx--
-<p>We get hits on a mark, but we're not sure if the it has to be applied so
-we need to explicitly test for basechar, baselig and basemark entries.</p>
---ldx]]--
-
-function handlers.gpos_mark2base(start,kind,lookupname,markanchors,sequence)
- local markchar = start.char
- if marks[markchar] then
- local base = start.prev -- [glyph] [start=mark]
- if base and base.id == glyph_code and base.subtype<256 and base.font == currentfont then
- local basechar = base.char
- if marks[basechar] then
- while true do
- base = base.prev
- if base and base.id == glyph_code and base.subtype<256 and base.font == currentfont then
- basechar = base.char
- if not marks[basechar] then
- break
- end
- else
- if trace_bugs then
- logwarning("%s: no base for mark %s",pref(kind,lookupname),gref(markchar))
- end
- return start, false
- end
- end
+ break
end
- local baseanchors = descriptions[basechar]
- if baseanchors then
- baseanchors = baseanchors.anchors
+ end
+ else
+ break
+ end
+ elseif id==disc_code then
+ discfound=true
+ s=s.next
+ else
+ break
+ end
+ end
+ if stop then
+ local lig=ligature.ligature
+ if lig then
+ if trace_ligatures then
+ local stopchar=stop.char
+ head,start=toligature(kind,lookupname,head,start,stop,lig,skipmark,discfound)
+ logprocess("%s: replacing %s upto %s by ligature %s case 2",pref(kind,lookupname),gref(startchar),gref(stopchar),gref(start.char))
+ else
+ head,start=toligature(kind,lookupname,head,start,stop,lig,skipmark,discfound)
+ end
+ return head,start,true
+ else
+ end
+ end
+ end
+ return head,start,false
+end
+function handlers.gpos_mark2base(head,start,kind,lookupname,markanchors,sequence)
+ local markchar=start.char
+ if marks[markchar] then
+ local base=start.prev
+ if base and base.id==glyph_code and base.font==currentfont and base.subtype<256 then
+ local basechar=base.char
+ if marks[basechar] then
+ while true do
+ base=base.prev
+ if base and base.id==glyph_code and base.font==currentfont and base.subtype<256 then
+ basechar=base.char
+ if not marks[basechar] then
+ break
end
- if baseanchors then
- local baseanchors = baseanchors['basechar']
- if baseanchors then
- local al = anchorlookups[lookupname]
- for anchor,ba in next, baseanchors do
- if al[anchor] then
- local ma = markanchors[anchor]
- if ma then
- local dx, dy, bound = setmark(start,base,tfmdata.parameters.factor,rlmode,ba,ma)
- if trace_marks then
- logprocess("%s, anchor %s, bound %s: anchoring mark %s to basechar %s => (%s,%s)",
- pref(kind,lookupname),anchor,bound,gref(markchar),gref(basechar),dx,dy)
- end
- return start, true
- end
- end
- end
- if trace_bugs then
- logwarning("%s, no matching anchors for mark %s and base %s",pref(kind,lookupname),gref(markchar),gref(basechar))
- end
+ else
+ if trace_bugs then
+ logwarning("%s: no base for mark %s",pref(kind,lookupname),gref(markchar))
+ end
+ return head,start,false
+ end
+ end
+ end
+ local baseanchors=descriptions[basechar]
+ if baseanchors then
+ baseanchors=baseanchors.anchors
+ end
+ if baseanchors then
+ local baseanchors=baseanchors['basechar']
+ if baseanchors then
+ local al=anchorlookups[lookupname]
+ for anchor,ba in next,baseanchors do
+ if al[anchor] then
+ local ma=markanchors[anchor]
+ if ma then
+ local dx,dy,bound=setmark(start,base,tfmdata.parameters.factor,rlmode,ba,ma)
+ if trace_marks then
+ logprocess("%s, anchor %s, bound %s: anchoring mark %s to basechar %s => (%p,%p)",
+ pref(kind,lookupname),anchor,bound,gref(markchar),gref(basechar),dx,dy)
end
- else -- if trace_bugs then
- -- logwarning("%s: char %s is missing in font",pref(kind,lookupname),gref(basechar))
- onetimemessage(currentfont,basechar,"no base anchors",report_fonts)
+ return head,start,true
+ end
end
- elseif trace_bugs then
- logwarning("%s: prev node is no char",pref(kind,lookupname))
+ end
+ if trace_bugs then
+ logwarning("%s, no matching anchors for mark %s and base %s",pref(kind,lookupname),gref(markchar),gref(basechar))
+ end
end
+ else
+ onetimemessage(currentfont,basechar,"no base anchors",report_fonts)
+ end
elseif trace_bugs then
- logwarning("%s: mark %s is no mark",pref(kind,lookupname),gref(markchar))
- end
- return start, false
-end
-
-function handlers.gpos_mark2ligature(start,kind,lookupname,markanchors,sequence)
- -- check chainpos variant
- local markchar = start.char
- if marks[markchar] then
- local base = start.prev -- [glyph] [optional marks] [start=mark]
- if base and base.id == glyph_code and base.subtype<256 and base.font == currentfont then
- local basechar = base.char
- if marks[basechar] then
- while true do
- base = base.prev
- if base and base.id == glyph_code and base.subtype<256 and base.font == currentfont then
- basechar = base.char
- if not marks[basechar] then
- break
- end
- else
- if trace_bugs then
- logwarning("%s: no base for mark %s",pref(kind,lookupname),gref(markchar))
- end
- return start, false
- end
- end
+ logwarning("%s: prev node is no char",pref(kind,lookupname))
+ end
+ elseif trace_bugs then
+ logwarning("%s: mark %s is no mark",pref(kind,lookupname),gref(markchar))
+ end
+ return head,start,false
+end
+function handlers.gpos_mark2ligature(head,start,kind,lookupname,markanchors,sequence)
+ local markchar=start.char
+ if marks[markchar] then
+ local base=start.prev
+ if base and base.id==glyph_code and base.font==currentfont and base.subtype<256 then
+ local basechar=base.char
+ if marks[basechar] then
+ while true do
+ base=base.prev
+ if base and base.id==glyph_code and base.font==currentfont and base.subtype<256 then
+ basechar=base.char
+ if not marks[basechar] then
+ break
end
- local index = has_attribute(start,ligacomp)
- local baseanchors = descriptions[basechar]
- if baseanchors then
- baseanchors = baseanchors.anchors
- if baseanchors then
- local baseanchors = baseanchors['baselig']
- if baseanchors then
- local al = anchorlookups[lookupname]
- for anchor,ba in next, baseanchors do
- if al[anchor] then
- local ma = markanchors[anchor]
- if ma then
- ba = ba[index]
- if ba then
- local dx, dy, bound = setmark(start,base,tfmdata.parameters.factor,rlmode,ba,ma) -- index
- if trace_marks then
- logprocess("%s, anchor %s, index %s, bound %s: anchoring mark %s to baselig %s at index %s => (%s,%s)",
- pref(kind,lookupname),anchor,index,bound,gref(markchar),gref(basechar),index,dx,dy)
- end
- return start, true
- end
- end
- end
- end
- if trace_bugs then
- logwarning("%s: no matching anchors for mark %s and baselig %s",pref(kind,lookupname),gref(markchar),gref(basechar))
- end
- end
- end
- else -- if trace_bugs then
- -- logwarning("%s: char %s is missing in font",pref(kind,lookupname),gref(basechar))
- onetimemessage(currentfont,basechar,"no base anchors",report_fonts)
+ else
+ if trace_bugs then
+ logwarning("%s: no base for mark %s",pref(kind,lookupname),gref(markchar))
+ end
+ return head,start,false
+ end
+ end
+ end
+ local index=start[a_ligacomp]
+ local baseanchors=descriptions[basechar]
+ if baseanchors then
+ baseanchors=baseanchors.anchors
+ if baseanchors then
+ local baseanchors=baseanchors['baselig']
+ if baseanchors then
+ local al=anchorlookups[lookupname]
+ for anchor,ba in next,baseanchors do
+ if al[anchor] then
+ local ma=markanchors[anchor]
+ if ma then
+ ba=ba[index]
+ if ba then
+ local dx,dy,bound=setmark(start,base,tfmdata.parameters.factor,rlmode,ba,ma)
+ if trace_marks then
+ logprocess("%s, anchor %s, index %s, bound %s: anchoring mark %s to baselig %s at index %s => (%p,%p)",
+ pref(kind,lookupname),anchor,index,bound,gref(markchar),gref(basechar),index,dx,dy)
+ end
+ return head,start,true
+ end
+ end
+ end
end
- elseif trace_bugs then
- logwarning("%s: prev node is no char",pref(kind,lookupname))
+ if trace_bugs then
+ logwarning("%s: no matching anchors for mark %s and baselig %s",pref(kind,lookupname),gref(markchar),gref(basechar))
+ end
+ end
end
+ else
+ onetimemessage(currentfont,basechar,"no base anchors",report_fonts)
+ end
elseif trace_bugs then
- logwarning("%s: mark %s is no mark",pref(kind,lookupname),gref(markchar))
- end
- return start, false
-end
-
-function handlers.gpos_mark2mark(start,kind,lookupname,markanchors,sequence)
- local markchar = start.char
- if marks[markchar] then
- local base = start.prev -- [glyph] [basemark] [start=mark]
- -- while base and has_attribute(base,ligacomp) and has_attribute(base,ligacomp) ~= has_attribute(start,ligacomp) do
- -- base = base.prev -- KE: prevents mkmk for marks on different components of a ligature
- -- end
- local slc = has_attribute(start,ligacomp)
- if slc then -- a rather messy loop ... needs checking with husayni
- while base do
- local blc = has_attribute(base,ligacomp)
- if blc and blc ~= slc then
- base = base.prev
- else
- break
- end
+ logwarning("%s: prev node is no char",pref(kind,lookupname))
+ end
+ elseif trace_bugs then
+ logwarning("%s: mark %s is no mark",pref(kind,lookupname),gref(markchar))
+ end
+ return head,start,false
+end
+function handlers.gpos_mark2mark(head,start,kind,lookupname,markanchors,sequence)
+ local markchar=start.char
+ if marks[markchar] then
+ local base=start.prev
+ local slc=start[a_ligacomp]
+ if slc then
+ while base do
+ local blc=base[a_ligacomp]
+ if blc and blc~=slc then
+ base=base.prev
+ else
+ break
+ end
+ end
+ end
+ if base and base.id==glyph_code and base.font==currentfont and base.subtype<256 then
+ local basechar=base.char
+ local baseanchors=descriptions[basechar]
+ if baseanchors then
+ baseanchors=baseanchors.anchors
+ if baseanchors then
+ baseanchors=baseanchors['basemark']
+ if baseanchors then
+ local al=anchorlookups[lookupname]
+ for anchor,ba in next,baseanchors do
+ if al[anchor] then
+ local ma=markanchors[anchor]
+ if ma then
+ local dx,dy,bound=setmark(start,base,tfmdata.parameters.factor,rlmode,ba,ma)
+ if trace_marks then
+ logprocess("%s, anchor %s, bound %s: anchoring mark %s to basemark %s => (%p,%p)",
+ pref(kind,lookupname),anchor,bound,gref(markchar),gref(basechar),dx,dy)
+ end
+ return head,start,true
+ end
+ end
end
- end
- if base and base.id == glyph_code and base.subtype<256 and base.font == currentfont then -- subtype test can go
- local basechar = base.char
- local baseanchors = descriptions[basechar]
- if baseanchors then
- baseanchors = baseanchors.anchors
- if baseanchors then
- baseanchors = baseanchors['basemark']
- if baseanchors then
- local al = anchorlookups[lookupname]
- for anchor,ba in next, baseanchors do
- if al[anchor] then
- local ma = markanchors[anchor]
- if ma then
- local dx, dy, bound = setmark(start,base,tfmdata.parameters.factor,rlmode,ba,ma)
- if trace_marks then
- logprocess("%s, anchor %s, bound %s: anchoring mark %s to basemark %s => (%s,%s)",
- pref(kind,lookupname),anchor,bound,gref(markchar),gref(basechar),dx,dy)
- end
- return start,true
- end
- end
- end
- if trace_bugs then
- logwarning("%s: no matching anchors for mark %s and basemark %s",pref(kind,lookupname),gref(markchar),gref(basechar))
- end
- end
- end
- else -- if trace_bugs then
- -- logwarning("%s: char %s is missing in font",pref(kind,lookupname),gref(basechar))
- onetimemessage(currentfont,basechar,"no base anchors",report_fonts)
+ if trace_bugs then
+ logwarning("%s: no matching anchors for mark %s and basemark %s",pref(kind,lookupname),gref(markchar),gref(basechar))
end
- elseif trace_bugs then
- logwarning("%s: prev node is no mark",pref(kind,lookupname))
+ end
end
+ else
+ onetimemessage(currentfont,basechar,"no base anchors",report_fonts)
+ end
elseif trace_bugs then
- logwarning("%s: mark %s is no mark",pref(kind,lookupname),gref(markchar))
- end
- return start,false
+ logwarning("%s: prev node is no mark",pref(kind,lookupname))
+ end
+ elseif trace_bugs then
+ logwarning("%s: mark %s is no mark",pref(kind,lookupname),gref(markchar))
+ end
+ return head,start,false
+end
+function handlers.gpos_cursive(head,start,kind,lookupname,exitanchors,sequence)
+ local alreadydone=cursonce and start[a_cursbase]
+ if not alreadydone then
+ local done=false
+ local startchar=start.char
+ if marks[startchar] then
+ if trace_cursive then
+ logprocess("%s: ignoring cursive for mark %s",pref(kind,lookupname),gref(startchar))
+ end
+ else
+ local nxt=start.next
+ while not done and nxt and nxt.id==glyph_code and nxt.font==currentfont and nxt.subtype<256 do
+ local nextchar=nxt.char
+ if marks[nextchar] then
+ nxt=nxt.next
+ else
+ local entryanchors=descriptions[nextchar]
+ if entryanchors then
+ entryanchors=entryanchors.anchors
+ if entryanchors then
+ entryanchors=entryanchors['centry']
+ if entryanchors then
+ local al=anchorlookups[lookupname]
+ for anchor,entry in next,entryanchors do
+ if al[anchor] then
+ local exit=exitanchors[anchor]
+ if exit then
+ local dx,dy,bound=setcursive(start,nxt,tfmdata.parameters.factor,rlmode,exit,entry,characters[startchar],characters[nextchar])
+ if trace_cursive then
+ logprocess("%s: moving %s to %s cursive (%p,%p) using anchor %s and bound %s in rlmode %s",pref(kind,lookupname),gref(startchar),gref(nextchar),dx,dy,anchor,bound,rlmode)
+ end
+ done=true
+ break
+ end
+ end
+ end
+ end
+ end
+ else
+ onetimemessage(currentfont,startchar,"no entry anchors",report_fonts)
+ end
+ break
+ end
+ end
+ end
+ return head,start,done
+ else
+ if trace_cursive and trace_details then
+ logprocess("%s, cursive %s is already done",pref(kind,lookupname),gref(start.char),alreadydone)
+ end
+ return head,start,false
+ end
+end
+function handlers.gpos_single(head,start,kind,lookupname,kerns,sequence)
+ local startchar=start.char
+ local dx,dy,w,h=setpair(start,tfmdata.parameters.factor,rlmode,sequence.flags[4],kerns,characters[startchar])
+ if trace_kerns then
+ logprocess("%s: shifting single %s by (%p,%p) and correction (%p,%p)",pref(kind,lookupname),gref(startchar),dx,dy,w,h)
+ end
+ return head,start,false
+end
+function handlers.gpos_pair(head,start,kind,lookupname,kerns,sequence)
+ local snext=start.next
+ if not snext then
+ return head,start,false
+ else
+ local prev,done=start,false
+ local factor=tfmdata.parameters.factor
+ local lookuptype=lookuptypes[lookupname]
+ while snext and snext.id==glyph_code and snext.font==currentfont and snext.subtype<256 do
+ local nextchar=snext.char
+ local krn=kerns[nextchar]
+ if not krn and marks[nextchar] then
+ prev=snext
+ snext=snext.next
+ else
+ local krn=kerns[nextchar]
+ if not krn then
+ elseif type(krn)=="table" then
+ if lookuptype=="pair" then
+ local a,b=krn[2],krn[3]
+ if a and #a>0 then
+ local startchar=start.char
+ local x,y,w,h=setpair(start,factor,rlmode,sequence.flags[4],a,characters[startchar])
+ if trace_kerns then
+ logprocess("%s: shifting first of pair %s and %s by (%p,%p) and correction (%p,%p)",pref(kind,lookupname),gref(startchar),gref(nextchar),x,y,w,h)
+ end
+ end
+ if b and #b>0 then
+ local startchar=start.char
+ local x,y,w,h=setpair(snext,factor,rlmode,sequence.flags[4],b,characters[nextchar])
+ if trace_kerns then
+ logprocess("%s: shifting second of pair %s and %s by (%p,%p) and correction (%p,%p)",pref(kind,lookupname),gref(startchar),gref(nextchar),x,y,w,h)
+ end
+ end
+ else
+ report_process("%s: check this out (old kern stuff)",pref(kind,lookupname))
+ end
+ done=true
+ elseif krn~=0 then
+ local k=setkern(snext,factor,rlmode,krn)
+ if trace_kerns then
+ logprocess("%s: inserting kern %s between %s and %s",pref(kind,lookupname),k,gref(prev.char),gref(nextchar))
+ end
+ done=true
+ end
+ break
+ end
+ end
+ return head,start,done
+ end
+end
+local chainmores={}
+local chainprocs={}
+local function logprocess(...)
+ if trace_steps then
+ registermessage(...)
+ end
+ report_subchain(...)
end
-
-function handlers.gpos_cursive(start,kind,lookupname,exitanchors,sequence) -- to be checked
- local alreadydone = cursonce and has_attribute(start,cursbase)
- if not alreadydone then
- local done = false
- local startchar = start.char
- if marks[startchar] then
- if trace_cursive then
- logprocess("%s: ignoring cursive for mark %s",pref(kind,lookupname),gref(startchar))
- end
+local logwarning=report_subchain
+local function logprocess(...)
+ if trace_steps then
+ registermessage(...)
+ end
+ report_chain(...)
+end
+local logwarning=report_chain
+function chainprocs.chainsub(head,start,stop,kind,chainname,currentcontext,lookuphash,lookuplist,chainlookupname)
+ logwarning("%s: a direct call to chainsub cannot happen",cref(kind,chainname,chainlookupname))
+ return head,start,false
+end
+function chainmores.chainsub(head,start,stop,kind,chainname,currentcontext,lookuphash,lookuplist,chainlookupname,n)
+ logprocess("%s: a direct call to chainsub cannot happen",cref(kind,chainname,chainlookupname))
+ return head,start,false
+end
+function chainprocs.reversesub(head,start,stop,kind,chainname,currentcontext,lookuphash,replacements)
+ local char=start.char
+ local replacement=replacements[char]
+ if replacement then
+ if trace_singles then
+ logprocess("%s: single reverse replacement of %s by %s",cref(kind,chainname),gref(char),gref(replacement))
+ end
+ start.char=replacement
+ return head,start,true
+ else
+ return head,start,false
+ end
+end
+local function delete_till_stop(start,stop,ignoremarks)
+ local n=1
+ if start==stop then
+ elseif ignoremarks then
+ repeat
+ local next=start.next
+ if not marks[next.char] then
+ local components=next.components
+ if components then
+ flush_node_list(components)
+ end
+ delete_node(start,next)
+ end
+ n=n+1
+ until next==stop
+ else
+ repeat
+ local next=start.next
+ local components=next.components
+ if components then
+ flush_node_list(components)
+ end
+ delete_node(start,next)
+ n=n+1
+ until next==stop
+ end
+ return n
+end
+function chainprocs.gsub_single(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname,chainindex)
+ local current=start
+ local subtables=currentlookup.subtables
+ if #subtables>1 then
+ logwarning("todo: check if we need to loop over the replacements: %s",concat(subtables," "))
+ end
+ while current do
+ if current.id==glyph_code then
+ local currentchar=current.char
+ local lookupname=subtables[1]
+ local replacement=lookuphash[lookupname]
+ if not replacement then
+ if trace_bugs then
+ logwarning("%s: no single hits",cref(kind,chainname,chainlookupname,lookupname,chainindex))
+ end
+ else
+ replacement=replacement[currentchar]
+ if not replacement or replacement=="" then
+ if trace_bugs then
+ logwarning("%s: no single for %s",cref(kind,chainname,chainlookupname,lookupname,chainindex),gref(currentchar))
+ end
else
- local nxt = start.next
- while not done and nxt and nxt.id == glyph_code and nxt.subtype<256 and nxt.font == currentfont do
- local nextchar = nxt.char
- if marks[nextchar] then
- -- should not happen (maybe warning)
- nxt = nxt.next
- else
- local entryanchors = descriptions[nextchar]
- if entryanchors then
- entryanchors = entryanchors.anchors
- if entryanchors then
- entryanchors = entryanchors['centry']
- if entryanchors then
- local al = anchorlookups[lookupname]
- for anchor, entry in next, entryanchors do
- if al[anchor] then
- local exit = exitanchors[anchor]
- if exit then
- local dx, dy, bound = setcursive(start,nxt,tfmdata.parameters.factor,rlmode,exit,entry,characters[startchar],characters[nextchar])
- if trace_cursive then
- logprocess("%s: moving %s to %s cursive (%s,%s) using anchor %s and bound %s in rlmode %s",pref(kind,lookupname),gref(startchar),gref(nextchar),dx,dy,anchor,bound,rlmode)
- end
- done = true
- break
- end
- end
- end
- end
- end
- else -- if trace_bugs then
- -- logwarning("%s: char %s is missing in font",pref(kind,lookupname),gref(startchar))
- onetimemessage(currentfont,startchar,"no entry anchors",report_fonts)
- end
- break
- end
- end
- end
- return start, done
+ if trace_singles then
+ logprocess("%s: replacing single %s by %s",cref(kind,chainname,chainlookupname,lookupname,chainindex),gref(currentchar),gref(replacement))
+ end
+ current.char=replacement
+ end
+ end
+ return head,start,true
+ elseif current==stop then
+ break
+ else
+ current=current.next
+ end
+ end
+ return head,start,false
+end
+chainmores.gsub_single=chainprocs.gsub_single
+function chainprocs.gsub_multiple(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname)
+ delete_till_stop(start,stop)
+ local startchar=start.char
+ local subtables=currentlookup.subtables
+ local lookupname=subtables[1]
+ local replacements=lookuphash[lookupname]
+ if not replacements then
+ if trace_bugs then
+ logwarning("%s: no multiple hits",cref(kind,chainname,chainlookupname,lookupname))
+ end
+ else
+ replacements=replacements[startchar]
+ if not replacements or replacement=="" then
+ if trace_bugs then
+ logwarning("%s: no multiple for %s",cref(kind,chainname,chainlookupname,lookupname),gref(startchar))
+ end
else
- if trace_cursive and trace_details then
- logprocess("%s, cursive %s is already done",pref(kind,lookupname),gref(start.char),alreadydone)
+ if trace_multiples then
+ logprocess("%s: replacing %s by multiple characters %s",cref(kind,chainname,chainlookupname,lookupname),gref(startchar),gref(replacements))
+ end
+ return multiple_glyphs(head,start,replacements)
+ end
+ end
+ return head,start,false
+end
+chainmores.gsub_multiple=chainprocs.gsub_multiple
+function chainprocs.gsub_alternate(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname)
+ local current=start
+ local subtables=currentlookup.subtables
+ local value=featurevalue==true and tfmdata.shared.features[kind] or featurevalue
+ while current do
+ if current.id==glyph_code then
+ local currentchar=current.char
+ local lookupname=subtables[1]
+ local alternatives=lookuphash[lookupname]
+ if not alternatives then
+ if trace_bugs then
+ logwarning("%s: no alternative hit",cref(kind,chainname,chainlookupname,lookupname))
+ end
+ else
+ alternatives=alternatives[currentchar]
+ if alternatives then
+ local choice,comment=get_alternative_glyph(current,alternatives,value,trace_alternatives)
+ if choice then
+ if trace_alternatives then
+ logprocess("%s: replacing %s by alternative %a to %s, %s",cref(kind,chainname,chainlookupname,lookupname),gref(char),choice,gref(choice),comment)
+ end
+ start.char=choice
+ else
+ if trace_alternatives then
+ logwarning("%s: no variant %a for %s, %s",cref(kind,chainname,chainlookupname,lookupname),value,gref(char),comment)
+ end
+ end
+ elseif trace_bugs then
+ logwarning("%s: no alternative for %s, %s",cref(kind,chainname,chainlookupname,lookupname),gref(currentchar),comment)
end
- return start, false
- end
-end
-
-function handlers.gpos_single(start,kind,lookupname,kerns,sequence)
- local startchar = start.char
- local dx, dy, w, h = setpair(start,tfmdata.parameters.factor,rlmode,sequence.flags[4],kerns,characters[startchar])
- if trace_kerns then
- logprocess("%s: shifting single %s by (%s,%s) and correction (%s,%s)",pref(kind,lookupname),gref(startchar),dx,dy,w,h)
- end
- return start, false
-end
-
-function handlers.gpos_pair(start,kind,lookupname,kerns,sequence)
- -- todo: kerns in disc nodes: pre, post, replace -> loop over disc too
- -- todo: kerns in components of ligatures
- local snext = start.next
- if not snext then
- return start, false
+ end
+ return head,start,true
+ elseif current==stop then
+ break
+ else
+ current=current.next
+ end
+ end
+ return head,start,false
+end
+chainmores.gsub_alternate=chainprocs.gsub_alternate
+function chainprocs.gsub_ligature(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname,chainindex)
+ local startchar=start.char
+ local subtables=currentlookup.subtables
+ local lookupname=subtables[1]
+ local ligatures=lookuphash[lookupname]
+ if not ligatures then
+ if trace_bugs then
+ logwarning("%s: no ligature hits",cref(kind,chainname,chainlookupname,lookupname,chainindex))
+ end
+ else
+ ligatures=ligatures[startchar]
+ if not ligatures then
+ if trace_bugs then
+ logwarning("%s: no ligatures starting with %s",cref(kind,chainname,chainlookupname,lookupname,chainindex),gref(startchar))
+ end
else
- local prev, done = start, false
- local factor = tfmdata.parameters.factor
- local lookuptype = lookuptypes[lookupname]
- while snext and snext.id == glyph_code and snext.subtype<256 and snext.font == currentfont do
- local nextchar = snext.char
- local krn = kerns[nextchar]
- if not krn and marks[nextchar] then
- prev = snext
- snext = snext.next
+ local s=start.next
+ local discfound=false
+ local last=stop
+ local nofreplacements=0
+ local skipmark=currentlookup.flags[1]
+ while s do
+ local id=s.id
+ if id==disc_code then
+ s=s.next
+ discfound=true
+ else
+ local schar=s.char
+ if skipmark and marks[schar] then
+ s=s.next
+ else
+ local lg=ligatures[schar]
+ if lg then
+ ligatures,last,nofreplacements=lg,s,nofreplacements+1
+ if s==stop then
+ break
+ else
+ s=s.next
+ end
else
- local krn = kerns[nextchar]
- if not krn then
- -- skip
- elseif type(krn) == "table" then
- if lookuptype == "pair" then -- probably not needed
- local a, b = krn[2], krn[3]
- if a and #a > 0 then
- local startchar = start.char
- local x, y, w, h = setpair(start,factor,rlmode,sequence.flags[4],a,characters[startchar])
- if trace_kerns then
- logprocess("%s: shifting first of pair %s and %s by (%s,%s) and correction (%s,%s)",pref(kind,lookupname),gref(startchar),gref(nextchar),x,y,w,h)
- end
- end
- if b and #b > 0 then
- local startchar = start.char
- local x, y, w, h = setpair(snext,factor,rlmode,sequence.flags[4],b,characters[nextchar])
- if trace_kerns then
- logprocess("%s: shifting second of pair %s and %s by (%s,%s) and correction (%s,%s)",pref(kind,lookupname),gref(startchar),gref(nextchar),x,y,w,h)
- end
- end
- else -- wrong ... position has different entries
- report_process("%s: check this out (old kern stuff)",pref(kind,lookupname))
- -- local a, b = krn[2], krn[6]
- -- if a and a ~= 0 then
- -- local k = setkern(snext,factor,rlmode,a)
- -- if trace_kerns then
- -- logprocess("%s: inserting first kern %s between %s and %s",pref(kind,lookupname),k,gref(prev.char),gref(nextchar))
- -- end
- -- end
- -- if b and b ~= 0 then
- -- logwarning("%s: ignoring second kern xoff %s",pref(kind,lookupname),b*factor)
- -- end
- end
- done = true
- elseif krn ~= 0 then
- local k = setkern(snext,factor,rlmode,krn)
- if trace_kerns then
- logprocess("%s: inserting kern %s between %s and %s",pref(kind,lookupname),k,gref(prev.char),gref(nextchar))
- end
- done = true
- end
+ break
+ end
+ end
+ end
+ end
+ local l2=ligatures.ligature
+ if l2 then
+ if chainindex then
+ stop=last
+ end
+ if trace_ligatures then
+ if start==stop then
+ logprocess("%s: replacing character %s by ligature %s case 3",cref(kind,chainname,chainlookupname,lookupname,chainindex),gref(startchar),gref(l2))
+ else
+ logprocess("%s: replacing character %s upto %s by ligature %s case 4",cref(kind,chainname,chainlookupname,lookupname,chainindex),gref(startchar),gref(stop.char),gref(l2))
+ end
+ end
+ head,start=toligature(kind,lookupname,head,start,stop,l2,currentlookup.flags[1],discfound)
+ return head,start,true,nofreplacements
+ elseif trace_bugs then
+ if start==stop then
+ logwarning("%s: replacing character %s by ligature fails",cref(kind,chainname,chainlookupname,lookupname,chainindex),gref(startchar))
+ else
+ logwarning("%s: replacing character %s upto %s by ligature fails",cref(kind,chainname,chainlookupname,lookupname,chainindex),gref(startchar),gref(stop.char))
+ end
+ end
+ end
+ end
+ return head,start,false,0
+end
+chainmores.gsub_ligature=chainprocs.gsub_ligature
+function chainprocs.gpos_mark2base(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname)
+ local markchar=start.char
+ if marks[markchar] then
+ local subtables=currentlookup.subtables
+ local lookupname=subtables[1]
+ local markanchors=lookuphash[lookupname]
+ if markanchors then
+ markanchors=markanchors[markchar]
+ end
+ if markanchors then
+ local base=start.prev
+ if base and base.id==glyph_code and base.font==currentfont and base.subtype<256 then
+ local basechar=base.char
+ if marks[basechar] then
+ while true do
+ base=base.prev
+ if base and base.id==glyph_code and base.font==currentfont and base.subtype<256 then
+ basechar=base.char
+ if not marks[basechar] then
break
- end
- end
- return start, done
- end
-end
-
---[[ldx--
-<p>I will implement multiple chain replacements once I run into a font that uses
-it. It's not that complex to handle.</p>
---ldx]]--
-
-local chainmores = { }
-local chainprocs = { }
-
-local function logprocess(...)
- if trace_steps then
- registermessage(...)
- end
- report_subchain(...)
-end
-
-local logwarning = report_subchain
-
-local function logprocess(...)
- if trace_steps then
- registermessage(...)
- end
- report_chain(...)
-end
-
-local logwarning = report_chain
-
--- We could share functions but that would lead to extra function calls with many
--- arguments, redundant tests and confusing messages.
-
-function chainprocs.chainsub(start,stop,kind,chainname,currentcontext,lookuphash,lookuplist,chainlookupname)
- logwarning("%s: a direct call to chainsub cannot happen",cref(kind,chainname,chainlookupname))
- return start, false
-end
-
-function chainmores.chainsub(start,stop,kind,chainname,currentcontext,lookuphash,lookuplist,chainlookupname,n)
- logprocess("%s: a direct call to chainsub cannot happen",cref(kind,chainname,chainlookupname))
- return start, false
-end
-
--- The reversesub is a special case, which is why we need to store the replacements
--- in a bit weird way. There is no lookup and the replacement comes from the lookup
--- itself. It is meant mostly for dealing with Urdu.
-
-function chainprocs.reversesub(start,stop,kind,chainname,currentcontext,lookuphash,replacements)
- local char = start.char
- local replacement = replacements[char]
- if replacement then
- if trace_singles then
- logprocess("%s: single reverse replacement of %s by %s",cref(kind,chainname),gref(char),gref(replacement))
- end
- start.char = replacement
- return start, true
- else
- return start, false
- end
-end
-
---[[ldx--
-<p>This chain stuff is somewhat tricky since we can have a sequence of actions to be
-applied: single, alternate, multiple or ligature where ligature can be an invalid
-one in the sense that it will replace multiple by one but not neccessary one that
-looks like the combination (i.e. it is the counterpart of multiple then). For
-example, the following is valid:</p>
-
-<typing>
-<line>xxxabcdexxx [single a->A][multiple b->BCD][ligature cde->E] xxxABCDExxx</line>
-</typing>
-
-<p>Therefore we we don't really do the replacement here already unless we have the
-single lookup case. The efficiency of the replacements can be improved by deleting
-as less as needed but that would also make the code even more messy.</p>
---ldx]]--
-
-local function delete_till_stop(start,stop,ignoremarks) -- keeps start
- local n = 1
- if start == stop then
- -- done
- elseif ignoremarks then
- repeat -- start x x m x x stop => start m
- local next = start.next
- if not marks[next.char] then
- delete_node(start,next)
- end
- n = n + 1
- until next == stop
- else -- start x x x stop => start
- repeat
- local next = start.next
- delete_node(start,next)
- n = n + 1
- until next == stop
- end
- return n
-end
-
---[[ldx--
-<p>Here we replace start by a single variant, First we delete the rest of the
-match.</p>
---ldx]]--
-
-function chainprocs.gsub_single(start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname,chainindex)
- -- todo: marks ?
- local current = start
- local subtables = currentlookup.subtables
- if #subtables > 1 then
- logwarning("todo: check if we need to loop over the replacements: %s",concat(subtables," "))
- end
- while current do
- if current.id == glyph_code then
- local currentchar = current.char
- local lookupname = subtables[1] -- only 1
- local replacement = lookuphash[lookupname]
- if not replacement then
- if trace_bugs then
- logwarning("%s: no single hits",cref(kind,chainname,chainlookupname,lookupname,chainindex))
- end
+ end
else
- replacement = replacement[currentchar]
- if not replacement then
- if trace_bugs then
- logwarning("%s: no single for %s",cref(kind,chainname,chainlookupname,lookupname,chainindex),gref(currentchar))
- end
- else
- if trace_singles then
- logprocess("%s: replacing single %s by %s",cref(kind,chainname,chainlookupname,lookupname,chainindex),gref(currentchar),gref(replacement))
- end
- current.char = replacement
- end
+ if trace_bugs then
+ logwarning("%s: no base for mark %s",pref(kind,lookupname),gref(markchar))
+ end
+ return head,start,false
+ end
+ end
+ end
+ local baseanchors=descriptions[basechar].anchors
+ if baseanchors then
+ local baseanchors=baseanchors['basechar']
+ if baseanchors then
+ local al=anchorlookups[lookupname]
+ for anchor,ba in next,baseanchors do
+ if al[anchor] then
+ local ma=markanchors[anchor]
+ if ma then
+ local dx,dy,bound=setmark(start,base,tfmdata.parameters.factor,rlmode,ba,ma)
+ if trace_marks then
+ logprocess("%s, anchor %s, bound %s: anchoring mark %s to basechar %s => (%p,%p)",
+ cref(kind,chainname,chainlookupname,lookupname),anchor,bound,gref(markchar),gref(basechar),dx,dy)
+ end
+ return head,start,true
+ end
+ end
end
- return start, true
- elseif current == stop then
- break
- else
- current = current.next
- end
- end
- return start, false
-end
-
-chainmores.gsub_single = chainprocs.gsub_single
-
---[[ldx--
-<p>Here we replace start by a sequence of new glyphs. First we delete the rest of
-the match.</p>
---ldx]]--
-
-function chainprocs.gsub_multiple(start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname)
- delete_till_stop(start,stop) -- we could pass ignoremarks as #3 ..
- local startchar = start.char
- local subtables = currentlookup.subtables
- local lookupname = subtables[1]
- local replacements = lookuphash[lookupname]
- if not replacements then
- if trace_bugs then
- logwarning("%s: no multiple hits",cref(kind,chainname,chainlookupname,lookupname))
- end
- else
- replacements = replacements[startchar]
- if not replacements then
if trace_bugs then
- logwarning("%s: no multiple for %s",cref(kind,chainname,chainlookupname,lookupname),gref(startchar))
- end
- else
- if trace_multiples then
- logprocess("%s: replacing %s by multiple characters %s",cref(kind,chainname,chainlookupname,lookupname),gref(startchar),gref(replacements))
+ logwarning("%s, no matching anchors for mark %s and base %s",cref(kind,chainname,chainlookupname,lookupname),gref(markchar),gref(basechar))
end
- return multiple_glyphs(start,replacements)
+ end
end
- end
- return start, false
-end
-
--- function chainmores.gsub_multiple(start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname,n)
--- logprocess("%s: gsub_multiple not yet supported",cref(kind,chainname,chainlookupname))
--- return start, false
--- end
-
-chainmores.gsub_multiple = chainprocs.gsub_multiple
-
---[[ldx--
-<p>Here we replace start by new glyph. First we delete the rest of the match.</p>
---ldx]]--
-
--- char_1 mark_1 -> char_x mark_1 (ignore marks)
--- char_1 mark_1 -> char_x
-
--- to be checked: do we always have just one glyph?
--- we can also have alternates for marks
--- marks come last anyway
--- are there cases where we need to delete the mark
-
-function chainprocs.gsub_alternate(start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname)
- local current = start
- local subtables = currentlookup.subtables
- local value = featurevalue == true and tfmdata.shared.features[kind] or featurevalue
- while current do
- if current.id == glyph_code then -- is this check needed?
- local currentchar = current.char
- local lookupname = subtables[1]
- local alternatives = lookuphash[lookupname]
- if not alternatives then
- if trace_bugs then
- logwarning("%s: no alternative hit",cref(kind,chainname,chainlookupname,lookupname))
- end
+ elseif trace_bugs then
+ logwarning("%s: prev node is no char",cref(kind,chainname,chainlookupname,lookupname))
+ end
+ elseif trace_bugs then
+ logwarning("%s: mark %s has no anchors",cref(kind,chainname,chainlookupname,lookupname),gref(markchar))
+ end
+ elseif trace_bugs then
+ logwarning("%s: mark %s is no mark",cref(kind,chainname,chainlookupname),gref(markchar))
+ end
+ return head,start,false
+end
+function chainprocs.gpos_mark2ligature(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname)
+ local markchar=start.char
+ if marks[markchar] then
+ local subtables=currentlookup.subtables
+ local lookupname=subtables[1]
+ local markanchors=lookuphash[lookupname]
+ if markanchors then
+ markanchors=markanchors[markchar]
+ end
+ if markanchors then
+ local base=start.prev
+ if base and base.id==glyph_code and base.font==currentfont and base.subtype<256 then
+ local basechar=base.char
+ if marks[basechar] then
+ while true do
+ base=base.prev
+ if base and base.id==glyph_code and base.font==currentfont and base.subtype<256 then
+ basechar=base.char
+ if not marks[basechar] then
+ break
+ end
else
- alternatives = alternatives[currentchar]
- if alternatives then
- local choice = get_alternative_glyph(current,alternatives,value)
- if choice then
- if trace_alternatives then
- logprocess("%s: replacing %s by alternative %s (%s)",cref(kind,chainname,chainlookupname,lookupname),gref(char),gref(choice),choice)
- end
- start.char = choice
- else
- if trace_alternatives then
- logwarning("%s: no variant %s for %s",cref(kind,chainname,chainlookupname,lookupname),tostring(value),gref(char))
- end
- end
- elseif trace_bugs then
- logwarning("%s: no alternative for %s",cref(kind,chainname,chainlookupname,lookupname),gref(currentchar))
- end
+ if trace_bugs then
+ logwarning("%s: no base for mark %s",cref(kind,chainname,chainlookupname,lookupname),markchar)
+ end
+ return head,start,false
+ end
+ end
+ end
+ local index=start[a_ligacomp]
+ local baseanchors=descriptions[basechar].anchors
+ if baseanchors then
+ local baseanchors=baseanchors['baselig']
+ if baseanchors then
+ local al=anchorlookups[lookupname]
+ for anchor,ba in next,baseanchors do
+ if al[anchor] then
+ local ma=markanchors[anchor]
+ if ma then
+ ba=ba[index]
+ if ba then
+ local dx,dy,bound=setmark(start,base,tfmdata.parameters.factor,rlmode,ba,ma)
+ if trace_marks then
+ logprocess("%s, anchor %s, bound %s: anchoring mark %s to baselig %s at index %s => (%p,%p)",
+ cref(kind,chainname,chainlookupname,lookupname),anchor,a or bound,gref(markchar),gref(basechar),index,dx,dy)
+ end
+ return head,start,true
+ end
+ end
+ end
end
- return start, true
- elseif current == stop then
- break
- else
- current = current.next
- end
- end
- return start, false
-end
-
--- function chainmores.gsub_alternate(start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname,n)
--- logprocess("%s: gsub_alternate not yet supported",cref(kind,chainname,chainlookupname))
--- return start, false
--- end
-
-chainmores.gsub_alternate = chainprocs.gsub_alternate
-
---[[ldx--
-<p>When we replace ligatures we use a helper that handles the marks. I might change
-this function (move code inline and handle the marks by a separate function). We
-assume rather stupid ligatures (no complex disc nodes).</p>
---ldx]]--
-
-function chainprocs.gsub_ligature(start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname,chainindex)
- local startchar = start.char
- local subtables = currentlookup.subtables
- local lookupname = subtables[1]
- local ligatures = lookuphash[lookupname]
- if not ligatures then
- if trace_bugs then
- logwarning("%s: no ligature hits",cref(kind,chainname,chainlookupname,lookupname,chainindex))
- end
- else
- ligatures = ligatures[startchar]
- if not ligatures then
if trace_bugs then
- logwarning("%s: no ligatures starting with %s",cref(kind,chainname,chainlookupname,lookupname,chainindex),gref(startchar))
- end
- else
- local s = start.next
- local discfound = false
- local last = stop
- local nofreplacements = 0
- local skipmark = currentlookup.flags[1]
- while s do
- local id = s.id
- if id == disc_code then
- s = s.next
- discfound = true
- else
- local schar = s.char
- if skipmark and marks[schar] then -- marks
- s = s.next
- else
- local lg = ligatures[schar]
- if lg then
- ligatures, last, nofreplacements = lg, s, nofreplacements + 1
- if s == stop then
- break
- else
- s = s.next
- end
- else
- break
- end
- end
- end
- end
- local l2 = ligatures.ligature
- if l2 then
- if chainindex then
- stop = last
- end
- if trace_ligatures then
- if start == stop then
- logprocess("%s: replacing character %s by ligature %s",cref(kind,chainname,chainlookupname,lookupname,chainindex),gref(startchar),gref(l2))
- else
- logprocess("%s: replacing character %s upto %s by ligature %s",cref(kind,chainname,chainlookupname,lookupname,chainindex),gref(startchar),gref(stop.char),gref(l2))
- end
- end
- start = toligature(kind,lookupname,start,stop,l2,currentlookup.flags[1],discfound)
- return start, true, nofreplacements
- elseif trace_bugs then
- if start == stop then
- logwarning("%s: replacing character %s by ligature fails",cref(kind,chainname,chainlookupname,lookupname,chainindex),gref(startchar))
- else
- logwarning("%s: replacing character %s upto %s by ligature fails",cref(kind,chainname,chainlookupname,lookupname,chainindex),gref(startchar),gref(stop.char))
- end
+ logwarning("%s: no matching anchors for mark %s and baselig %s",cref(kind,chainname,chainlookupname,lookupname),gref(markchar),gref(basechar))
end
+ end
end
- end
- return start, false, 0
-end
-
-chainmores.gsub_ligature = chainprocs.gsub_ligature
-
-function chainprocs.gpos_mark2base(start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname)
- local markchar = start.char
- if marks[markchar] then
- local subtables = currentlookup.subtables
- local lookupname = subtables[1]
- local markanchors = lookuphash[lookupname]
- if markanchors then
- markanchors = markanchors[markchar]
- end
- if markanchors then
- local base = start.prev -- [glyph] [start=mark]
- if base and base.id == glyph_code and base.subtype<256 and base.font == currentfont then
- local basechar = base.char
- if marks[basechar] then
- while true do
- base = base.prev
- if base and base.id == glyph_code and base.subtype<256 and base.font == currentfont then
- basechar = base.char
- if not marks[basechar] then
- break
- end
- else
- if trace_bugs then
- logwarning("%s: no base for mark %s",pref(kind,lookupname),gref(markchar))
- end
- return start, false
- end
- end
- end
- local baseanchors = descriptions[basechar].anchors
- if baseanchors then
- local baseanchors = baseanchors['basechar']
- if baseanchors then
- local al = anchorlookups[lookupname]
- for anchor,ba in next, baseanchors do
- if al[anchor] then
- local ma = markanchors[anchor]
- if ma then
- local dx, dy, bound = setmark(start,base,tfmdata.parameters.factor,rlmode,ba,ma)
- if trace_marks then
- logprocess("%s, anchor %s, bound %s: anchoring mark %s to basechar %s => (%s,%s)",
- cref(kind,chainname,chainlookupname,lookupname),anchor,bound,gref(markchar),gref(basechar),dx,dy)
- end
- return start, true
- end
- end
- end
- if trace_bugs then
- logwarning("%s, no matching anchors for mark %s and base %s",cref(kind,chainname,chainlookupname,lookupname),gref(markchar),gref(basechar))
- end
- end
- end
- elseif trace_bugs then
- logwarning("%s: prev node is no char",cref(kind,chainname,chainlookupname,lookupname))
- end
- elseif trace_bugs then
- logwarning("%s: mark %s has no anchors",cref(kind,chainname,chainlookupname,lookupname),gref(markchar))
- end
+ elseif trace_bugs then
+ logwarning("feature %s, lookup %s: prev node is no char",kind,lookupname)
+ end
elseif trace_bugs then
- logwarning("%s: mark %s is no mark",cref(kind,chainname,chainlookupname),gref(markchar))
- end
- return start, false
-end
-
-function chainprocs.gpos_mark2ligature(start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname)
- local markchar = start.char
- if marks[markchar] then
- local subtables = currentlookup.subtables
- local lookupname = subtables[1]
- local markanchors = lookuphash[lookupname]
- if markanchors then
- markanchors = markanchors[markchar]
- end
- if markanchors then
- local base = start.prev -- [glyph] [optional marks] [start=mark]
- if base and base.id == glyph_code and base.subtype<256 and base.font == currentfont then
- local basechar = base.char
- if marks[basechar] then
- while true do
- base = base.prev
- if base and base.id == glyph_code and base.subtype<256 and base.font == currentfont then
- basechar = base.char
- if not marks[basechar] then
- break
- end
- else
- if trace_bugs then
- logwarning("%s: no base for mark %s",cref(kind,chainname,chainlookupname,lookupname),markchar)
- end
- return start, false
- end
- end
- end
- -- todo: like marks a ligatures hash
- local index = has_attribute(start,ligacomp)
- local baseanchors = descriptions[basechar].anchors
- if baseanchors then
- local baseanchors = baseanchors['baselig']
- if baseanchors then
- local al = anchorlookups[lookupname]
- for anchor,ba in next, baseanchors do
- if al[anchor] then
- local ma = markanchors[anchor]
- if ma then
- ba = ba[index]
- if ba then
- local dx, dy, bound = setmark(start,base,tfmdata.parameters.factor,rlmode,ba,ma) -- index
- if trace_marks then
- logprocess("%s, anchor %s, bound %s: anchoring mark %s to baselig %s at index %s => (%s,%s)",
- cref(kind,chainname,chainlookupname,lookupname),anchor,a or bound,gref(markchar),gref(basechar),index,dx,dy)
- end
- return start, true
- end
- end
- end
- end
- if trace_bugs then
- logwarning("%s: no matching anchors for mark %s and baselig %s",cref(kind,chainname,chainlookupname,lookupname),gref(markchar),gref(basechar))
- end
- end
- end
- elseif trace_bugs then
- logwarning("feature %s, lookup %s: prev node is no char",kind,lookupname)
+ logwarning("%s: mark %s has no anchors",cref(kind,chainname,chainlookupname,lookupname),gref(markchar))
+ end
+ elseif trace_bugs then
+ logwarning("%s: mark %s is no mark",cref(kind,chainname,chainlookupname),gref(markchar))
+ end
+ return head,start,false
+end
+function chainprocs.gpos_mark2mark(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname)
+ local markchar=start.char
+ if marks[markchar] then
+ local subtables=currentlookup.subtables
+ local lookupname=subtables[1]
+ local markanchors=lookuphash[lookupname]
+ if markanchors then
+ markanchors=markanchors[markchar]
+ end
+ if markanchors then
+ local base=start.prev
+ local slc=start[a_ligacomp]
+ if slc then
+ while base do
+ local blc=base[a_ligacomp]
+ if blc and blc~=slc then
+ base=base.prev
+ else
+ break
end
- elseif trace_bugs then
- logwarning("%s: mark %s has no anchors",cref(kind,chainname,chainlookupname,lookupname),gref(markchar))
+ end
end
- elseif trace_bugs then
- logwarning("%s: mark %s is no mark",cref(kind,chainname,chainlookupname),gref(markchar))
- end
- return start, false
-end
-
-function chainprocs.gpos_mark2mark(start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname)
- local markchar = start.char
- if marks[markchar] then
---~ local alreadydone = markonce and has_attribute(start,markmark)
---~ if not alreadydone then
- -- local markanchors = descriptions[markchar].anchors markanchors = markanchors and markanchors.mark
- local subtables = currentlookup.subtables
- local lookupname = subtables[1]
- local markanchors = lookuphash[lookupname]
- if markanchors then
- markanchors = markanchors[markchar]
- end
- if markanchors then
- local base = start.prev -- [glyph] [basemark] [start=mark]
- -- while (base and has_attribute(base,ligacomp) and has_attribute(base,ligacomp) ~= has_attribute(start,ligacomp)) do
- -- base = base.prev -- KE: prevents mkmk for marks on different components of a ligature
- -- end
- local slc = has_attribute(start,ligacomp)
- if slc then -- a rather messy loop ... needs checking with husayni
- while base do
- local blc = has_attribute(base,ligacomp)
- if blc and blc ~= slc then
- base = base.prev
- else
- break
- end
- end
- end
- if base and base.id == glyph_code and base.subtype<256 and base.font == currentfont then -- subtype test can go
- local basechar = base.char
- local baseanchors = descriptions[basechar].anchors
- if baseanchors then
- baseanchors = baseanchors['basemark']
- if baseanchors then
- local al = anchorlookups[lookupname]
- for anchor,ba in next, baseanchors do
- if al[anchor] then
- local ma = markanchors[anchor]
- if ma then
- local dx, dy, bound = setmark(start,base,tfmdata.parameters.factor,rlmode,ba,ma)
- if trace_marks then
- logprocess("%s, anchor %s, bound %s: anchoring mark %s to basemark %s => (%s,%s)",
- cref(kind,chainname,chainlookupname,lookupname),anchor,bound,gref(markchar),gref(basechar),dx,dy)
- end
- return start, true
- end
- end
- end
- if trace_bugs then
- logwarning("%s: no matching anchors for mark %s and basemark %s",gref(kind,chainname,chainlookupname,lookupname),gref(markchar),gref(basechar))
- end
- end
- end
- elseif trace_bugs then
- logwarning("%s: prev node is no mark",cref(kind,chainname,chainlookupname,lookupname))
- end
- elseif trace_bugs then
- logwarning("%s: mark %s has no anchors",cref(kind,chainname,chainlookupname,lookupname),gref(markchar))
- end
---~ elseif trace_marks and trace_details then
---~ logprocess("%s, mark %s is already bound (n=%s), ignoring mark2mark",pref(kind,lookupname),gref(markchar),alreadydone)
---~ end
- elseif trace_bugs then
- logwarning("%s: mark %s is no mark",cref(kind,chainname,chainlookupname),gref(markchar))
- end
- return start, false
-end
-
--- ! ! ! untested ! ! !
-
-function chainprocs.gpos_cursive(start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname)
- local alreadydone = cursonce and has_attribute(start,cursbase)
- if not alreadydone then
- local startchar = start.char
- local subtables = currentlookup.subtables
- local lookupname = subtables[1]
- local exitanchors = lookuphash[lookupname]
- if exitanchors then
- exitanchors = exitanchors[startchar]
- end
- if exitanchors then
- local done = false
- if marks[startchar] then
- if trace_cursive then
- logprocess("%s: ignoring cursive for mark %s",pref(kind,lookupname),gref(startchar))
- end
- else
- local nxt = start.next
- while not done and nxt and nxt.id == glyph_code and nxt.subtype<256 and nxt.font == currentfont do
- local nextchar = nxt.char
- if marks[nextchar] then
- -- should not happen (maybe warning)
- nxt = nxt.next
- else
- local entryanchors = descriptions[nextchar]
- if entryanchors then
- entryanchors = entryanchors.anchors
- if entryanchors then
- entryanchors = entryanchors['centry']
- if entryanchors then
- local al = anchorlookups[lookupname]
- for anchor, entry in next, entryanchors do
- if al[anchor] then
- local exit = exitanchors[anchor]
- if exit then
- local dx, dy, bound = setcursive(start,nxt,tfmdata.parameters.factor,rlmode,exit,entry,characters[startchar],characters[nextchar])
- if trace_cursive then
- logprocess("%s: moving %s to %s cursive (%s,%s) using anchor %s and bound %s in rlmode %s",pref(kind,lookupname),gref(startchar),gref(nextchar),dx,dy,anchor,bound,rlmode)
- end
- done = true
- break
- end
- end
- end
- end
- end
- else -- if trace_bugs then
- -- logwarning("%s: char %s is missing in font",pref(kind,lookupname),gref(startchar))
- onetimemessage(currentfont,startchar,"no entry anchors",report_fonts)
+ if base and base.id==glyph_code and base.font==currentfont and base.subtype<256 then
+ local basechar=base.char
+ local baseanchors=descriptions[basechar].anchors
+ if baseanchors then
+ baseanchors=baseanchors['basemark']
+ if baseanchors then
+ local al=anchorlookups[lookupname]
+ for anchor,ba in next,baseanchors do
+ if al[anchor] then
+ local ma=markanchors[anchor]
+ if ma then
+ local dx,dy,bound=setmark(start,base,tfmdata.parameters.factor,rlmode,ba,ma)
+ if trace_marks then
+ logprocess("%s, anchor %s, bound %s: anchoring mark %s to basemark %s => (%p,%p)",
+ cref(kind,chainname,chainlookupname,lookupname),anchor,bound,gref(markchar),gref(basechar),dx,dy)
+ end
+ return head,start,true
+ end
+ end
+ end
+ if trace_bugs then
+ logwarning("%s: no matching anchors for mark %s and basemark %s",gref(kind,chainname,chainlookupname,lookupname),gref(markchar),gref(basechar))
+ end
+ end
+ end
+ elseif trace_bugs then
+ logwarning("%s: prev node is no mark",cref(kind,chainname,chainlookupname,lookupname))
+ end
+ elseif trace_bugs then
+ logwarning("%s: mark %s has no anchors",cref(kind,chainname,chainlookupname,lookupname),gref(markchar))
+ end
+ elseif trace_bugs then
+ logwarning("%s: mark %s is no mark",cref(kind,chainname,chainlookupname),gref(markchar))
+ end
+ return head,start,false
+end
+function chainprocs.gpos_cursive(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname)
+ local alreadydone=cursonce and start[a_cursbase]
+ if not alreadydone then
+ local startchar=start.char
+ local subtables=currentlookup.subtables
+ local lookupname=subtables[1]
+ local exitanchors=lookuphash[lookupname]
+ if exitanchors then
+ exitanchors=exitanchors[startchar]
+ end
+ if exitanchors then
+ local done=false
+ if marks[startchar] then
+ if trace_cursive then
+ logprocess("%s: ignoring cursive for mark %s",pref(kind,lookupname),gref(startchar))
+ end
+ else
+ local nxt=start.next
+ while not done and nxt and nxt.id==glyph_code and nxt.font==currentfont and nxt.subtype<256 do
+ local nextchar=nxt.char
+ if marks[nextchar] then
+ nxt=nxt.next
+ else
+ local entryanchors=descriptions[nextchar]
+ if entryanchors then
+ entryanchors=entryanchors.anchors
+ if entryanchors then
+ entryanchors=entryanchors['centry']
+ if entryanchors then
+ local al=anchorlookups[lookupname]
+ for anchor,entry in next,entryanchors do
+ if al[anchor] then
+ local exit=exitanchors[anchor]
+ if exit then
+ local dx,dy,bound=setcursive(start,nxt,tfmdata.parameters.factor,rlmode,exit,entry,characters[startchar],characters[nextchar])
+ if trace_cursive then
+ logprocess("%s: moving %s to %s cursive (%p,%p) using anchor %s and bound %s in rlmode %s",pref(kind,lookupname),gref(startchar),gref(nextchar),dx,dy,anchor,bound,rlmode)
end
+ done=true
break
+ end
end
+ end
end
+ end
+ else
+ onetimemessage(currentfont,startchar,"no entry anchors",report_fonts)
end
- return start, done
- else
- if trace_cursive and trace_details then
- logprocess("%s, cursive %s is already done",pref(kind,lookupname),gref(start.char),alreadydone)
- end
- return start, false
+ break
+ end
end
- end
- return start, false
-end
-
-function chainprocs.gpos_single(start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname,chainindex,sequence)
- -- untested .. needs checking for the new model
- local startchar = start.char
- local subtables = currentlookup.subtables
- local lookupname = subtables[1]
- local kerns = lookuphash[lookupname]
+ end
+ return head,start,done
+ else
+ if trace_cursive and trace_details then
+ logprocess("%s, cursive %s is already done",pref(kind,lookupname),gref(start.char),alreadydone)
+ end
+ return head,start,false
+ end
+ end
+ return head,start,false
+end
+function chainprocs.gpos_single(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname,chainindex,sequence)
+ local startchar=start.char
+ local subtables=currentlookup.subtables
+ local lookupname=subtables[1]
+ local kerns=lookuphash[lookupname]
+ if kerns then
+ kerns=kerns[startchar]
if kerns then
- kerns = kerns[startchar] -- needed ?
- if kerns then
- local dx, dy, w, h = setpair(start,tfmdata.parameters.factor,rlmode,sequence.flags[4],kerns,characters[startchar])
- if trace_kerns then
- logprocess("%s: shifting single %s by (%s,%s) and correction (%s,%s)",cref(kind,chainname,chainlookupname),gref(startchar),dx,dy,w,h)
- end
- end
- end
- return start, false
-end
-
--- when machines become faster i will make a shared function
-
-function chainprocs.gpos_pair(start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname,chainindex,sequence)
--- logwarning("%s: gpos_pair not yet supported",cref(kind,chainname,chainlookupname))
- local snext = start.next
- if snext then
- local startchar = start.char
- local subtables = currentlookup.subtables
- local lookupname = subtables[1]
- local kerns = lookuphash[lookupname]
- if kerns then
- kerns = kerns[startchar]
- if kerns then
- local lookuptype = lookuptypes[lookupname]
- local prev, done = start, false
- local factor = tfmdata.parameters.factor
- while snext and snext.id == glyph_code and snext.subtype<256 and snext.font == currentfont do
- local nextchar = snext.char
- local krn = kerns[nextchar]
- if not krn and marks[nextchar] then
- prev = snext
- snext = snext.next
- else
- if not krn then
- -- skip
- elseif type(krn) == "table" then
- if lookuptype == "pair" then
- local a, b = krn[2], krn[3]
- if a and #a > 0 then
- local startchar = start.char
- local x, y, w, h = setpair(start,factor,rlmode,sequence.flags[4],a,characters[startchar])
- if trace_kerns then
- logprocess("%s: shifting first of pair %s and %s by (%s,%s) and correction (%s,%s)",cref(kind,chainname,chainlookupname),gref(startchar),gref(nextchar),x,y,w,h)
- end
- end
- if b and #b > 0 then
- local startchar = start.char
- local x, y, w, h = setpair(snext,factor,rlmode,sequence.flags[4],b,characters[nextchar])
- if trace_kerns then
- logprocess("%s: shifting second of pair %s and %s by (%s,%s) and correction (%s,%s)",cref(kind,chainname,chainlookupname),gref(startchar),gref(nextchar),x,y,w,h)
- end
- end
- else
- report_process("%s: check this out (old kern stuff)",cref(kind,chainname,chainlookupname))
- local a, b = krn[2], krn[6]
- if a and a ~= 0 then
- local k = setkern(snext,factor,rlmode,a)
- if trace_kerns then
- logprocess("%s: inserting first kern %s between %s and %s",cref(kind,chainname,chainlookupname),k,gref(prev.char),gref(nextchar))
- end
- end
- if b and b ~= 0 then
- logwarning("%s: ignoring second kern xoff %s",cref(kind,chainname,chainlookupname),b*factor)
- end
- end
- done = true
- elseif krn ~= 0 then
- local k = setkern(snext,factor,rlmode,krn)
- if trace_kerns then
- logprocess("%s: inserting kern %s between %s and %s",cref(kind,chainname,chainlookupname),k,gref(prev.char),gref(nextchar))
- end
- done = true
- end
- break
- end
- end
- return start, done
+ local dx,dy,w,h=setpair(start,tfmdata.parameters.factor,rlmode,sequence.flags[4],kerns,characters[startchar])
+ if trace_kerns then
+ logprocess("%s: shifting single %s by (%p,%p) and correction (%p,%p)",cref(kind,chainname,chainlookupname),gref(startchar),dx,dy,w,h)
+ end
+ end
+ end
+ return head,start,false
+end
+function chainprocs.gpos_pair(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname,chainindex,sequence)
+ local snext=start.next
+ if snext then
+ local startchar=start.char
+ local subtables=currentlookup.subtables
+ local lookupname=subtables[1]
+ local kerns=lookuphash[lookupname]
+ if kerns then
+ kerns=kerns[startchar]
+ if kerns then
+ local lookuptype=lookuptypes[lookupname]
+ local prev,done=start,false
+ local factor=tfmdata.parameters.factor
+ while snext and snext.id==glyph_code and snext.font==currentfont and snext.subtype<256 do
+ local nextchar=snext.char
+ local krn=kerns[nextchar]
+ if not krn and marks[nextchar] then
+ prev=snext
+ snext=snext.next
+ else
+ if not krn then
+ elseif type(krn)=="table" then
+ if lookuptype=="pair" then
+ local a,b=krn[2],krn[3]
+ if a and #a>0 then
+ local startchar=start.char
+ local x,y,w,h=setpair(start,factor,rlmode,sequence.flags[4],a,characters[startchar])
+ if trace_kerns then
+ logprocess("%s: shifting first of pair %s and %s by (%p,%p) and correction (%p,%p)",cref(kind,chainname,chainlookupname),gref(startchar),gref(nextchar),x,y,w,h)
+ end
+ end
+ if b and #b>0 then
+ local startchar=start.char
+ local x,y,w,h=setpair(snext,factor,rlmode,sequence.flags[4],b,characters[nextchar])
+ if trace_kerns then
+ logprocess("%s: shifting second of pair %s and %s by (%p,%p) and correction (%p,%p)",cref(kind,chainname,chainlookupname),gref(startchar),gref(nextchar),x,y,w,h)
+ end
+ end
+ else
+ report_process("%s: check this out (old kern stuff)",cref(kind,chainname,chainlookupname))
+ local a,b=krn[2],krn[6]
+ if a and a~=0 then
+ local k=setkern(snext,factor,rlmode,a)
+ if trace_kerns then
+ logprocess("%s: inserting first kern %s between %s and %s",cref(kind,chainname,chainlookupname),k,gref(prev.char),gref(nextchar))
+ end
+ end
+ if b and b~=0 then
+ logwarning("%s: ignoring second kern xoff %s",cref(kind,chainname,chainlookupname),b*factor)
+ end
+ end
+ done=true
+ elseif krn~=0 then
+ local k=setkern(snext,factor,rlmode,krn)
+ if trace_kerns then
+ logprocess("%s: inserting kern %s between %s and %s",cref(kind,chainname,chainlookupname),k,gref(prev.char),gref(nextchar))
+ end
+ done=true
end
+ break
+ end
end
+ return head,start,done
+ end
end
- return start, false
+ end
+ return head,start,false
end
-
--- what pointer to return, spec says stop
--- to be discussed ... is bidi changer a space?
--- elseif char == zwnj and sequence[n][32] then -- brrr
-
--- somehow l or f is global
--- we don't need to pass the currentcontext, saves a bit
--- make a slow variant then can be activated but with more tracing
-
local function show_skip(kind,chainname,char,ck,class)
- if ck[9] then
- logwarning("%s: skipping char %s (%s) in rule %s, lookuptype %s (%s=>%s)",cref(kind,chainname),gref(char),class,ck[1],ck[2],ck[9],ck[10])
+ if ck[9] then
+ logwarning("%s: skipping char %s, class %a, rule %a, lookuptype %a, %a => %a",cref(kind,chainname),gref(char),class,ck[1],ck[2],ck[9],ck[10])
+ else
+ logwarning("%s: skipping char %s, class %a, rule %a, lookuptype %a",cref(kind,chainname),gref(char),class,ck[1],ck[2])
+ end
+end
+local function normal_handle_contextchain(head,start,kind,chainname,contexts,sequence,lookuphash)
+ local flags=sequence.flags
+ local done=false
+ local skipmark=flags[1]
+ local skipligature=flags[2]
+ local skipbase=flags[3]
+ local someskip=skipmark or skipligature or skipbase
+ local markclass=sequence.markclass
+ local skipped=false
+ for k=1,#contexts do
+ local match=true
+ local current=start
+ local last=start
+ local ck=contexts[k]
+ local seq=ck[3]
+ local s=#seq
+ if s==1 then
+ match=current.id==glyph_code and current.font==currentfont and current.subtype<256 and seq[1][current.char]
else
- logwarning("%s: skipping char %s (%s) in rule %s, lookuptype %s",cref(kind,chainname),gref(char),class,ck[1],ck[2])
- end
-end
-
-local function normal_handle_contextchain(start,kind,chainname,contexts,sequence,lookuphash)
- -- local rule, lookuptype, sequence, f, l, lookups = ck[1], ck[2] ,ck[3], ck[4], ck[5], ck[6]
- local flags = sequence.flags
- local done = false
- local skipmark = flags[1]
- local skipligature = flags[2]
- local skipbase = flags[3]
- local someskip = skipmark or skipligature or skipbase -- could be stored in flags for a fast test (hm, flags could be false !)
- local markclass = sequence.markclass -- todo, first we need a proper test
- local skipped = false
- for k=1,#contexts do
- local match = true
- local current = start
- local last = start
- local ck = contexts[k]
- local seq = ck[3]
- local s = #seq
- -- f..l = mid string
- if s == 1 then
- -- never happens
- match = current.id == glyph_code and current.subtype<256 and current.font == currentfont and seq[1][current.char]
+ local f,l=ck[4],ck[5]
+ if f==1 and f==l then
+ else
+ if f==l then
else
- -- maybe we need a better space check (maybe check for glue or category or combination)
- -- we cannot optimize for n=2 because there can be disc nodes
- local f, l = ck[4], ck[5]
- -- current match
- if f == 1 and f == l then -- current only
- -- already a hit
- -- match = true
- else -- before/current/after | before/current | current/after
- -- no need to test first hit (to be optimized)
- if f == l then -- new, else last out of sync (f is > 1)
- -- match = true
- else
- local n = f + 1
- last = last.next
- while n <= l do
- if last then
- local id = last.id
- if id == glyph_code then
- if last.subtype<256 and last.font == currentfont then
- local char = last.char
- local ccd = descriptions[char]
- if ccd then
- local class = ccd.class
- if class == skipmark or class == skipligature or class == skipbase or (markclass and class == "mark" and not markclass[char]) then
- skipped = true
- if trace_skips then
- show_skip(kind,chainname,char,ck,class)
- end
- last = last.next
- elseif seq[n][char] then
- if n < l then
- last = last.next
- end
- n = n + 1
- else
- match = false
- break
- end
- else
- match = false
- break
- end
- else
- match = false
- break
- end
- elseif id == disc_code then
- last = last.next
- else
- match = false
- break
- end
- else
- match = false
- break
- end
+ local n=f+1
+ last=last.next
+ while n<=l do
+ if last then
+ local id=last.id
+ if id==glyph_code then
+ if last.font==currentfont and last.subtype<256 then
+ local char=last.char
+ local ccd=descriptions[char]
+ if ccd then
+ local class=ccd.class
+ if class==skipmark or class==skipligature or class==skipbase or (markclass and class=="mark" and not markclass[char]) then
+ skipped=true
+ if trace_skips then
+ show_skip(kind,chainname,char,ck,class)
+ end
+ last=last.next
+ elseif seq[n][char] then
+ if n<l then
+ last=last.next
+ end
+ n=n+1
+ else
+ match=false
+ break
end
+ else
+ match=false
+ break
+ end
+ else
+ match=false
+ break
end
- end
- -- before
- if match and f > 1 then
- local prev = start.prev
- if prev then
- local n = f-1
- while n >= 1 do
- if prev then
- local id = prev.id
- if id == glyph_code then
- if prev.subtype<256 and prev.font == currentfont then -- normal char
- local char = prev.char
- local ccd = descriptions[char]
- if ccd then
- local class = ccd.class
- if class == skipmark or class == skipligature or class == skipbase or (markclass and class == "mark" and not markclass[char]) then
- skipped = true
- if trace_skips then
- show_skip(kind,chainname,char,ck,class)
- end
- elseif seq[n][char] then
- n = n -1
- else
- match = false
- break
- end
- else
- match = false
- break
- end
- else
- match = false
- break
- end
- elseif id == disc_code then
- -- skip 'm
- elseif seq[n][32] then
- n = n -1
- else
- match = false
- break
- end
- prev = prev.prev
- elseif seq[n][32] then -- somehat special, as zapfino can have many preceding spaces
- n = n -1
- else
- match = false
- break
- end
+ elseif id==disc_code then
+ last=last.next
+ else
+ match=false
+ break
+ end
+ else
+ match=false
+ break
+ end
+ end
+ end
+ end
+ if match and f>1 then
+ local prev=start.prev
+ if prev then
+ local n=f-1
+ while n>=1 do
+ if prev then
+ local id=prev.id
+ if id==glyph_code then
+ if prev.font==currentfont and prev.subtype<256 then
+ local char=prev.char
+ local ccd=descriptions[char]
+ if ccd then
+ local class=ccd.class
+ if class==skipmark or class==skipligature or class==skipbase or (markclass and class=="mark" and not markclass[char]) then
+ skipped=true
+ if trace_skips then
+ show_skip(kind,chainname,char,ck,class)
+ end
+ elseif seq[n][char] then
+ n=n -1
+ else
+ match=false
+ break
end
- elseif f == 2 then
- match = seq[1][32]
+ else
+ match=false
+ break
+ end
else
- for n=f-1,1 do
- if not seq[n][32] then
- match = false
- break
- end
- end
- end
+ match=false
+ break
+ end
+ elseif id==disc_code then
+ elseif seq[n][32] then
+ n=n -1
+ else
+ match=false
+ break
+ end
+ prev=prev.prev
+ elseif seq[n][32] then
+ n=n -1
+ else
+ match=false
+ break
end
- -- after
- if match and s > l then
- local current = last and last.next
- if current then
- -- removed optimization for s-l == 1, we have to deal with marks anyway
- local n = l + 1
- while n <= s do
- if current then
- local id = current.id
- if id == glyph_code then
- if current.subtype<256 and current.font == currentfont then -- normal char
- local char = current.char
- local ccd = descriptions[char]
- if ccd then
- local class = ccd.class
- if class == skipmark or class == skipligature or class == skipbase or (markclass and class == "mark" and not markclass[char]) then
- skipped = true
- if trace_skips then
- show_skip(kind,chainname,char,ck,class)
- end
- elseif seq[n][char] then
- n = n + 1
- else
- match = false
- break
- end
- else
- match = false
- break
- end
- else
- match = false
- break
- end
- elseif id == disc_code then
- -- skip 'm
- elseif seq[n][32] then -- brrr
- n = n + 1
- else
- match = false
- break
- end
- current = current.next
- elseif seq[n][32] then
- n = n + 1
- else
- match = false
- break
- end
+ end
+ elseif f==2 then
+ match=seq[1][32]
+ else
+ for n=f-1,1 do
+ if not seq[n][32] then
+ match=false
+ break
+ end
+ end
+ end
+ end
+ if match and s>l then
+ local current=last and last.next
+ if current then
+ local n=l+1
+ while n<=s do
+ if current then
+ local id=current.id
+ if id==glyph_code then
+ if current.font==currentfont and current.subtype<256 then
+ local char=current.char
+ local ccd=descriptions[char]
+ if ccd then
+ local class=ccd.class
+ if class==skipmark or class==skipligature or class==skipbase or (markclass and class=="mark" and not markclass[char]) then
+ skipped=true
+ if trace_skips then
+ show_skip(kind,chainname,char,ck,class)
+ end
+ elseif seq[n][char] then
+ n=n+1
+ else
+ match=false
+ break
end
- elseif s-l == 1 then
- match = seq[s][32]
+ else
+ match=false
+ break
+ end
else
- for n=l+1,s do
- if not seq[n][32] then
- match = false
- break
- end
- end
- end
+ match=false
+ break
+ end
+ elseif id==disc_code then
+ elseif seq[n][32] then
+ n=n+1
+ else
+ match=false
+ break
+ end
+ current=current.next
+ elseif seq[n][32] then
+ n=n+1
+ else
+ match=false
+ break
end
- end
- if match then
- -- ck == currentcontext
- if trace_contexts then
- local rule, lookuptype, f, l = ck[1], ck[2], ck[4], ck[5]
- local char = start.char
- if ck[9] then
- logwarning("%s: rule %s matches at char %s for (%s,%s,%s) chars, lookuptype %s (%s=>%s)",
- cref(kind,chainname),rule,gref(char),f-1,l-f+1,s-l,lookuptype,ck[9],ck[10])
+ end
+ elseif s-l==1 then
+ match=seq[s][32]
+ else
+ for n=l+1,s do
+ if not seq[n][32] then
+ match=false
+ break
+ end
+ end
+ end
+ end
+ end
+ if match then
+ if trace_contexts then
+ local rule,lookuptype,f,l=ck[1],ck[2],ck[4],ck[5]
+ local char=start.char
+ if ck[9] then
+ logwarning("%s: rule %s matches at char %s for (%s,%s,%s) chars, lookuptype %a, %a => %a",
+ cref(kind,chainname),rule,gref(char),f-1,l-f+1,s-l,lookuptype,ck[9],ck[10])
+ else
+ logwarning("%s: rule %s matches at char %s for (%s,%s,%s) chars, lookuptype %a",
+ cref(kind,chainname),rule,gref(char),f-1,l-f+1,s-l,lookuptype)
+ end
+ end
+ local chainlookups=ck[6]
+ if chainlookups then
+ local nofchainlookups=#chainlookups
+ if nofchainlookups==1 then
+ local chainlookupname=chainlookups[1]
+ local chainlookup=lookuptable[chainlookupname]
+ if chainlookup then
+ local cp=chainprocs[chainlookup.type]
+ if cp then
+ head,start,done=cp(head,start,last,kind,chainname,ck,lookuphash,chainlookup,chainlookupname,nil,sequence)
+ else
+ logprocess("%s: %s is not yet supported",cref(kind,chainname,chainlookupname),chainlookup.type)
+ end
+ else
+ logprocess("%s is not yet supported",cref(kind,chainname,chainlookupname))
+ end
+ else
+ local i=1
+ repeat
+ if skipped then
+ while true do
+ local char=start.char
+ local ccd=descriptions[char]
+ if ccd then
+ local class=ccd.class
+ if class==skipmark or class==skipligature or class==skipbase or (markclass and class=="mark" and not markclass[char]) then
+ start=start.next
+ else
+ break
+ end
else
- logwarning("%s: rule %s matches at char %s for (%s,%s,%s) chars, lookuptype %s",
- cref(kind,chainname),rule,gref(char),f-1,l-f+1,s-l,lookuptype)
- end
+ break
+ end
+ end
+ end
+ local chainlookupname=chainlookups[i]
+ local chainlookup=lookuptable[chainlookupname]
+ local cp=chainlookup and chainmores[chainlookup.type]
+ if cp then
+ local ok,n
+ head,start,ok,n=cp(head,start,last,kind,chainname,ck,lookuphash,chainlookup,chainlookupname,i,sequence)
+ if ok then
+ done=true
+ i=i+(n or 1)
+ else
+ i=i+1
+ end
+ else
+ i=i+1
end
- local chainlookups = ck[6]
- if chainlookups then
- local nofchainlookups = #chainlookups
- -- we can speed this up if needed
- if nofchainlookups == 1 then
- local chainlookupname = chainlookups[1]
- local chainlookup = lookuptable[chainlookupname]
- if chainlookup then
- local cp = chainprocs[chainlookup.type]
- if cp then
- start, done = cp(start,last,kind,chainname,ck,lookuphash,chainlookup,chainlookupname,nil,sequence)
- else
- logprocess("%s: %s is not yet supported",cref(kind,chainname,chainlookupname),chainlookup.type)
- end
- else -- shouldn't happen
- logprocess("%s is not yet supported",cref(kind,chainname,chainlookupname))
- end
- else
- local i = 1
- repeat
- if skipped then
- while true do
- local char = start.char
- local ccd = descriptions[char]
- if ccd then
- local class = ccd.class
- if class == skipmark or class == skipligature or class == skipbase or (markclass and class == "mark" and not markclass[char]) then
- start = start.next
- else
- break
- end
- else
- break
- end
- end
- end
- local chainlookupname = chainlookups[i]
- local chainlookup = lookuptable[chainlookupname] -- can be false (n matches, <n replacement)
- local cp = chainlookup and chainmores[chainlookup.type]
- if cp then
- local ok, n
- start, ok, n = cp(start,last,kind,chainname,ck,lookuphash,chainlookup,chainlookupname,i,sequence)
- -- messy since last can be changed !
- if ok then
- done = true
- -- skip next one(s) if ligature
- i = i + (n or 1)
- else
- i = i + 1
- end
- else
- -- is valid
- -- logprocess("%s: multiple subchains for %s are not yet supported",cref(kind,chainname,chainlookupname),chainlookup and chainlookup.type or "?")
- i = i + 1
- end
- start = start.next
- until i > nofchainlookups
- end
+ if start then
+ start=start.next
else
- local replacements = ck[7]
- if replacements then
- start, done = chainprocs.reversesub(start,last,kind,chainname,ck,lookuphash,replacements) -- sequence
- else
- done = true -- can be meant to be skipped
- if trace_contexts then
- logprocess("%s: skipping match",cref(kind,chainname))
- end
- end
end
+ until i>nofchainlookups
end
+ else
+ local replacements=ck[7]
+ if replacements then
+ head,start,done=chainprocs.reversesub(head,start,last,kind,chainname,ck,lookuphash,replacements)
+ else
+ done=true
+ if trace_contexts then
+ logprocess("%s: skipping match",cref(kind,chainname))
+ end
+ end
+ end
end
- return start, done
+ end
+ return head,start,done
end
-
--- Because we want to keep this elsewhere (an because speed is less an issue) we
--- pass the font id so that the verbose variant can access the relevant helper tables.
-
-local verbose_handle_contextchain = function(font,...)
- logwarning("no verbose handler installed, reverting to 'normal'")
- otf.setcontextchain()
- return normal_handle_contextchain(...)
+local verbose_handle_contextchain=function(font,...)
+ logwarning("no verbose handler installed, reverting to 'normal'")
+ otf.setcontextchain()
+ return normal_handle_contextchain(...)
end
-
-otf.chainhandlers = {
- normal = normal_handle_contextchain,
- verbose = verbose_handle_contextchain,
+otf.chainhandlers={
+ normal=normal_handle_contextchain,
+ verbose=verbose_handle_contextchain,
}
-
function otf.setcontextchain(method)
- if not method or method == "normal" or not otf.chainhandlers[method] then
- if handlers.contextchain then -- no need for a message while making the format
- logwarning("installing normal contextchain handler")
- end
- handlers.contextchain = normal_handle_contextchain
- else
- logwarning("installing contextchain handler '%s'",method)
- local handler = otf.chainhandlers[method]
- handlers.contextchain = function(...)
- return handler(currentfont,...) -- hm, get rid of ...
- end
- end
- handlers.gsub_context = handlers.contextchain
- handlers.gsub_contextchain = handlers.contextchain
- handlers.gsub_reversecontextchain = handlers.contextchain
- handlers.gpos_contextchain = handlers.contextchain
- handlers.gpos_context = handlers.contextchain
+ if not method or method=="normal" or not otf.chainhandlers[method] then
+ if handlers.contextchain then
+ logwarning("installing normal contextchain handler")
+ end
+ handlers.contextchain=normal_handle_contextchain
+ else
+ logwarning("installing contextchain handler %a",method)
+ local handler=otf.chainhandlers[method]
+ handlers.contextchain=function(...)
+ return handler(currentfont,...)
+ end
+ end
+ handlers.gsub_context=handlers.contextchain
+ handlers.gsub_contextchain=handlers.contextchain
+ handlers.gsub_reversecontextchain=handlers.contextchain
+ handlers.gpos_contextchain=handlers.contextchain
+ handlers.gpos_context=handlers.contextchain
end
-
otf.setcontextchain()
-
-local missing = { } -- we only report once
-
+local missing={}
local function logprocess(...)
- if trace_steps then
- registermessage(...)
- end
- report_process(...)
+ if trace_steps then
+ registermessage(...)
+ end
+ report_process(...)
end
-
-local logwarning = report_process
-
+local logwarning=report_process
local function report_missing_cache(typ,lookup)
- local f = missing[currentfont] if not f then f = { } missing[currentfont] = f end
- local t = f[typ] if not t then t = { } f[typ] = t end
- if not t[lookup] then
- t[lookup] = true
- logwarning("missing cache for lookup %s of type %s in font %s (%s)",lookup,typ,currentfont,tfmdata.properties.fullname)
- end
-end
-
-local resolved = { } -- we only resolve a font,script,language pair once
-
--- todo: pass all these 'locals' in a table
-
-local lookuphashes = { }
-
-setmetatableindex(lookuphashes, function(t,font)
- local lookuphash = fontdata[font].resources.lookuphash
- if not lookuphash or not next(lookuphash) then
- lookuphash = false
- end
- t[font] = lookuphash
- return lookuphash
+ local f=missing[currentfont] if not f then f={} missing[currentfont]=f end
+ local t=f[typ] if not t then t={} f[typ]=t end
+ if not t[lookup] then
+ t[lookup]=true
+ logwarning("missing cache for lookup %a, type %a, font %a, name %a",lookup,typ,currentfont,tfmdata.properties.fullname)
+ end
+end
+local resolved={}
+local lookuphashes={}
+setmetatableindex(lookuphashes,function(t,font)
+ local lookuphash=fontdata[font].resources.lookuphash
+ if not lookuphash or not next(lookuphash) then
+ lookuphash=false
+ end
+ t[font]=lookuphash
+ return lookuphash
end)
-
--- fonts.hashes.lookups = lookuphashes
-
-local special_attributes = {
- init = 1,
- medi = 2,
- fina = 3,
- isol = 4
-}
-
+local autofeatures=fonts.analyzers.features
local function initialize(sequence,script,language,enabled)
- local features = sequence.features
- if features then
- for kind, scripts in next, features do
- local valid = enabled[kind]
- if valid then
- local languages = scripts[script] or scripts[wildcard]
- if languages and (languages[language] or languages[wildcard]) then
- return { valid, special_attributes[kind] or false, sequence.chain or 0, kind, sequence }
- end
- end
- end
- end
- return false
+ local features=sequence.features
+ if features then
+ for kind,scripts in next,features do
+ local valid=enabled[kind]
+ if valid then
+ local languages=scripts[script] or scripts[wildcard]
+ if languages and (languages[language] or languages[wildcard]) then
+ return { valid,autofeatures[kind] or false,sequence.chain or 0,kind,sequence }
+ end
+ end
+ end
+ end
+ return false
+end
+function otf.dataset(tfmdata,font)
+ local shared=tfmdata.shared
+ local properties=tfmdata.properties
+ local language=properties.language or "dflt"
+ local script=properties.script or "dflt"
+ local enabled=shared.features
+ local res=resolved[font]
+ if not res then
+ res={}
+ resolved[font]=res
+ end
+ local rs=res[script]
+ if not rs then
+ rs={}
+ res[script]=rs
+ end
+ local rl=rs[language]
+ if not rl then
+ rl={
+ }
+ rs[language]=rl
+ local sequences=tfmdata.resources.sequences
+for s=1,#sequences do
+ local v=enabled and initialize(sequences[s],script,language,enabled)
+ if v then
+ rl[#rl+1]=v
+ end
end
-
-function otf.dataset(tfmdata,sequences,font) -- generic variant, overloaded in context
- local shared = tfmdata.shared
- local properties = tfmdata.properties
- local language = properties.language or "dflt"
- local script = properties.script or "dflt"
- local enabled = shared.features
- local res = resolved[font]
- if not res then
- res = { }
- resolved[font] = res
- end
- local rs = res[script]
- if not rs then
- rs = { }
- res[script] = rs
- end
- local rl = rs[language]
- if not rl then
- rl = { }
- rs[language] = rl
- setmetatableindex(rl, function(t,k)
- local v = enabled and initialize(sequences[k],script,language,enabled)
- t[k] = v
- return v
- end)
- end
- return rl
+ end
+ return rl
end
-
--- elseif id == glue_code then
--- if p[5] then -- chain
--- local pc = pp[32]
--- if pc then
--- start, ok = start, false -- p[1](start,kind,p[2],pc,p[3],p[4])
--- if ok then
--- done = true
--- end
--- if start then start = start.next end
--- else
--- start = start.next
--- end
--- else
--- start = start.next
--- end
-
local function featuresprocessor(head,font,attr)
-
- local lookuphash = lookuphashes[font] -- we can also check sequences here
-
- if not lookuphash then
- return head, false
- end
-
- if trace_steps then
- checkstep(head)
- end
-
- tfmdata = fontdata[font]
- descriptions = tfmdata.descriptions
- characters = tfmdata.characters
- resources = tfmdata.resources
-
- marks = resources.marks
- anchorlookups = resources.lookup_to_anchor
- lookuptable = resources.lookups
- lookuptypes = resources.lookuptypes
-
- currentfont = font
- rlmode = 0
-
- local sequences = resources.sequences
- local done = false
- local datasets = otf.dataset(tfmdata,sequences,font,attr)
-
- local dirstack = { } -- could move outside function
-
- -- We could work on sub start-stop ranges instead but I wonder if there is that
- -- much speed gain (experiments showed that it made not much sense) and we need
- -- to keep track of directions anyway. Also at some point I want to play with
- -- font interactions and then we do need the full sweeps.
-
- for s=1,#sequences do
- local dataset = datasets[s]
- if dataset then
- featurevalue = dataset[1] -- todo: pass to function instead of using a global
- if featurevalue then
- local sequence = sequences[s] -- also dataset[5]
- local rlparmode = 0
- local topstack = 0
- local success = false
- local attribute = dataset[2]
- local chain = dataset[3] -- sequence.chain or 0
- local typ = sequence.type
- local subtables = sequence.subtables
- if chain < 0 then
- -- this is a limited case, no special treatments like 'init' etc
- local handler = handlers[typ]
- -- we need to get rid of this slide! probably no longer needed in latest luatex
- local start = find_node_tail(head) -- slow (we can store tail because there's always a skip at the end): todo
- while start do
- local id = start.id
- if id == glyph_code then
- if start.subtype<256 and start.font == font then
- local a = has_attribute(start,0)
- if a then
- a = a == attr
- else
- a = true
- end
- if a then
- for i=1,#subtables do
- local lookupname = subtables[i]
- local lookupcache = lookuphash[lookupname]
- if lookupcache then
- local lookupmatch = lookupcache[start.char]
- if lookupmatch then
- start, success = handler(start,dataset[4],lookupname,lookupmatch,sequence,lookuphash,i)
- if success then
- break
- end
- end
- else
- report_missing_cache(typ,lookupname)
- end
- end
- if start then start = start.prev end
- else
- start = start.prev
- end
- else
- start = start.prev
- end
- else
- start = start.prev
- end
- end
+ local lookuphash=lookuphashes[font]
+ if not lookuphash then
+ return head,false
+ end
+ if trace_steps then
+ checkstep(head)
+ end
+ tfmdata=fontdata[font]
+ descriptions=tfmdata.descriptions
+ characters=tfmdata.characters
+ resources=tfmdata.resources
+ marks=resources.marks
+ anchorlookups=resources.lookup_to_anchor
+ lookuptable=resources.lookups
+ lookuptypes=resources.lookuptypes
+ currentfont=font
+ rlmode=0
+ local sequences=resources.sequences
+ local done=false
+ local datasets=otf.dataset(tfmdata,font,attr)
+ local dirstack={}
+for s=1,#datasets do
+ local dataset=datasets[s]
+ featurevalue=dataset[1]
+ local sequence=dataset[5]
+ local rlparmode=0
+ local topstack=0
+ local success=false
+ local attribute=dataset[2]
+ local chain=dataset[3]
+ local typ=sequence.type
+ local subtables=sequence.subtables
+ if chain<0 then
+ local handler=handlers[typ]
+ local start=find_node_tail(head)
+ while start do
+ local id=start.id
+ if id==glyph_code then
+ if start.font==font and start.subtype<256 then
+ local a=start[0]
+ if a then
+ a=a==attr
else
- local handler = handlers[typ]
- local ns = #subtables
- local start = head -- local ?
- rlmode = 0 -- to be checked ?
- if ns == 1 then -- happens often
- local lookupname = subtables[1]
- local lookupcache = lookuphash[lookupname]
- if not lookupcache then -- also check for empty cache
- report_missing_cache(typ,lookupname)
- else
- while start do
- local id = start.id
- if id == glyph_code then
- if start.subtype<256 and start.font == font then
- local a = has_attribute(start,0)
- if a then
- a = (a == attr) and (not attribute or has_attribute(start,state,attribute))
- else
- a = not attribute or has_attribute(start,state,attribute)
- end
- if a then
- local lookupmatch = lookupcache[start.char]
- if lookupmatch then
- -- sequence kan weg
- local ok
- start, ok = handler(start,dataset[4],lookupname,lookupmatch,sequence,lookuphash,1)
- if ok then
- success = true
- end
- end
- if start then start = start.next end
- else
- start = start.next
- end
- else
- start = start.next
- end
- elseif id == whatsit_code then -- will be function
- local subtype = start.subtype
- if subtype == dir_code then
- local dir = start.dir
- if dir == "+TRT" or dir == "+TLT" then
- topstack = topstack + 1
- dirstack[topstack] = dir
- elseif dir == "-TRT" or dir == "-TLT" then
- topstack = topstack - 1
- end
- local newdir = dirstack[topstack]
- if newdir == "+TRT" then
- rlmode = -1
- elseif newdir == "+TLT" then
- rlmode = 1
- else
- rlmode = rlparmode
- end
- if trace_directions then
- report_process("directions after txtdir %s: txtdir=%s:%s, parmode=%s, txtmode=%s",dir,topstack,newdir or "unset",rlparmode,rlmode)
- end
- elseif subtype == localpar_code then
- local dir = start.dir
- if dir == "TRT" then
- rlparmode = -1
- elseif dir == "TLT" then
- rlparmode = 1
- else
- rlparmode = 0
- end
- rlmode = rlparmode
- if trace_directions then
- report_process("directions after pardir %s: parmode=%s, txtmode=%s",dir,rlparmode,rlmode)
- end
- end
- start = start.next
- else
- start = start.next
- end
- end
+ a=true
+ end
+ if a then
+ for i=1,#subtables do
+ local lookupname=subtables[i]
+ local lookupcache=lookuphash[lookupname]
+ if lookupcache then
+ local lookupmatch=lookupcache[start.char]
+ if lookupmatch then
+ head,start,success=handler(head,start,dataset[4],lookupname,lookupmatch,sequence,lookuphash,i)
+ if success then
+ break
end
+ end
else
- while start do
- local id = start.id
- if id == glyph_code then
- if start.subtype<256 and start.font == font then
- local a = has_attribute(start,0)
- if a then
- a = (a == attr) and (not attribute or has_attribute(start,state,attribute))
- else
- a = not attribute or has_attribute(start,state,attribute)
- end
- if a then
- for i=1,ns do
- local lookupname = subtables[i]
- local lookupcache = lookuphash[lookupname]
- if lookupcache then
- local lookupmatch = lookupcache[start.char]
- if lookupmatch then
- -- we could move all code inline but that makes things even more unreadable
- local ok
- start, ok = handler(start,dataset[4],lookupname,lookupmatch,sequence,lookuphash,i)
- if ok then
- success = true
- break
- end
- end
- else
- report_missing_cache(typ,lookupname)
- end
- end
- if start then start = start.next end
- else
- start = start.next
- end
- else
- start = start.next
- end
- elseif id == whatsit_code then
- local subtype = start.subtype
- if subtype == dir_code then
- local dir = start.dir
- if dir == "+TRT" or dir == "+TLT" then
- topstack = topstack + 1
- dirstack[topstack] = dir
- elseif dir == "-TRT" or dir == "-TLT" then
- topstack = topstack - 1
- end
- local newdir = dirstack[topstack]
- if newdir == "+TRT" then
- rlmode = -1
- elseif newdir == "+TLT" then
- rlmode = 1
- else
- rlmode = rlparmode
- end
- if trace_directions then
- report_process("directions after txtdir %s: txtdir=%s:%s, parmode=%s, txtmode=%s",dir,topstack,newdir or "unset",rlparmode,rlmode)
- end
- elseif subtype == localpar_code then
- local dir = start.dir
- if dir == "TRT" then
- rlparmode = -1
- elseif dir == "TLT" then
- rlparmode = 1
- else
- rlparmode = 0
- end
- rlmode = rlparmode
- if trace_directions then
- report_process("directions after pardir %s: parmode=%s, txtmode=%s",dir,rlparmode,rlmode)
- end
- end
- start = start.next
- else
- start = start.next
- end
- end
+ report_missing_cache(typ,lookupname)
end
+ end
+ if start then start=start.prev end
+ else
+ start=start.prev
end
- if success then
- done = true
- end
- if trace_steps then -- ?
- registerstep(head)
- end
- end
- end
- end
- return head, done
-end
-
-local function generic(lookupdata,lookupname,unicode,lookuphash)
- local target = lookuphash[lookupname]
- if target then
- target[unicode] = lookupdata
- else
- lookuphash[lookupname] = { [unicode] = lookupdata }
- end
-end
-
-local action = {
-
- substitution = generic,
- multiple = generic,
- alternate = generic,
- position = generic,
-
- ligature = function(lookupdata,lookupname,unicode,lookuphash)
- local target = lookuphash[lookupname]
- if not target then
- target = { }
- lookuphash[lookupname] = target
- end
- for i=1,#lookupdata do
- local li = lookupdata[i]
- local tu = target[li]
- if not tu then
- tu = { }
- target[li] = tu
+ else
+ start=start.prev
+ end
+ else
+ start=start.prev
end
- target = tu
- end
- target.ligature = unicode
- end,
-
- pair = function(lookupdata,lookupname,unicode,lookuphash)
- local target = lookuphash[lookupname]
- if not target then
- target = { }
- lookuphash[lookupname] = target
- end
- local others = target[unicode]
- local paired = lookupdata[1]
- if others then
- others[paired] = lookupdata
+ end
else
- others = { [paired] = lookupdata }
- target[unicode] = others
- end
- end,
-
-}
-
-local function prepare_lookups(tfmdata)
-
- local rawdata = tfmdata.shared.rawdata
- local resources = rawdata.resources
- local lookuphash = resources.lookuphash
- local anchor_to_lookup = resources.anchor_to_lookup
- local lookup_to_anchor = resources.lookup_to_anchor
- local lookuptypes = resources.lookuptypes
- local characters = tfmdata.characters
- local descriptions = tfmdata.descriptions
-
- -- we cannot free the entries in the descriptions as sometimes we access
- -- then directly (for instance anchors) ... selectively freeing does save
- -- much memory as it's only a reference to a table and the slot in the
- -- description hash is not freed anyway
-
- for unicode, character in next, characters do -- we cannot loop over descriptions !
-
- local description = descriptions[unicode]
-
- if description then
-
- local lookups = description.slookups
- if lookups then
- for lookupname, lookupdata in next, lookups do
- action[lookuptypes[lookupname]](lookupdata,lookupname,unicode,lookuphash)
- end
- end
-
- local lookups = description.mlookups
- if lookups then
- for lookupname, lookuplist in next, lookups do
- local lookuptype = lookuptypes[lookupname]
- for l=1,#lookuplist do
- local lookupdata = lookuplist[l]
- action[lookuptype](lookupdata,lookupname,unicode,lookuphash)
- end
- end
- end
-
- local list = description.kerns
- if list then
- for lookup, krn in next, list do -- ref to glyph, saves lookup
- local target = lookuphash[lookup]
- if target then
- target[unicode] = krn
+ local handler=handlers[typ]
+ local ns=#subtables
+ local start=head
+ rlmode=0
+ if ns==1 then
+ local lookupname=subtables[1]
+ local lookupcache=lookuphash[lookupname]
+ if not lookupcache then
+ report_missing_cache(typ,lookupname)
+ else
+ while start do
+ local id=start.id
+ if id==glyph_code then
+ if start.font==font and start.subtype<256 then
+ local a=start[0]
+ if a then
+ a=(a==attr) and (not attribute or start[a_state]==attribute)
else
- lookuphash[lookup] = { [unicode] = krn }
- end
- end
- end
-
- local list = description.anchors
- if list then
- for typ, anchors in next, list do -- types
- if typ == "mark" or typ == "cexit" then -- or entry?
- for name, anchor in next, anchors do
- local lookups = anchor_to_lookup[name]
- if lookups then
- for lookup, _ in next, lookups do
- local target = lookuphash[lookup]
- if target then
- target[unicode] = anchors
- else
- lookuphash[lookup] = { [unicode] = anchors }
- end
- end
- end
+ a=not attribute or start[a_state]==attribute
+ end
+ if a then
+ local lookupmatch=lookupcache[start.char]
+ if lookupmatch then
+ local ok
+ head,start,ok=handler(head,start,dataset[4],lookupname,lookupmatch,sequence,lookuphash,1)
+ if ok then
+ success=true
end
+ end
+ if start then start=start.next end
+ else
+ start=start.next
+ end
+ elseif id==math_code then
+ start=end_of_math(start).next
+ else
+ start=start.next
+ end
+ elseif id==whatsit_code then
+ local subtype=start.subtype
+ if subtype==dir_code then
+ local dir=start.dir
+ if dir=="+TRT" or dir=="+TLT" then
+ topstack=topstack+1
+ dirstack[topstack]=dir
+ elseif dir=="-TRT" or dir=="-TLT" then
+ topstack=topstack-1
+ end
+ local newdir=dirstack[topstack]
+ if newdir=="+TRT" then
+ rlmode=-1
+ elseif newdir=="+TLT" then
+ rlmode=1
+ else
+ rlmode=rlparmode
end
- end
- end
-
- end
-
- end
-
-end
-
-local function split(replacement,original)
- local result = { }
- for i=1,#replacement do
- result[original[i]] = replacement[i]
- end
- return result
-end
-
-local valid = {
- coverage = { chainsub = true, chainpos = true, contextsub = true },
- reversecoverage = { reversesub = true },
- glyphs = { chainsub = true, chainpos = true },
-}
-
-local function prepare_contextchains(tfmdata)
- local rawdata = tfmdata.shared.rawdata
- local resources = rawdata.resources
- local lookuphash = resources.lookuphash
- local lookups = rawdata.lookups
- if lookups then
- for lookupname, lookupdata in next, rawdata.lookups do
- local lookuptype = lookupdata.type
- if lookuptype then
- local rules = lookupdata.rules
- if rules then
- local format = lookupdata.format
- local validformat = valid[format]
- if not validformat then
- report_prepare("unsupported format %s",format)
- elseif not validformat[lookuptype] then
- -- todo: dejavu-serif has one (but i need to see what use it has)
- report_prepare("unsupported %s %s for %s",format,lookuptype,lookupname)
+ if trace_directions then
+ report_process("directions after txtdir %a: parmode %a, txtmode %a, # stack %a, new dir %a",dir,rlparmode,rlmode,topstack,newdir)
+ end
+ elseif subtype==localpar_code then
+ local dir=start.dir
+ if dir=="TRT" then
+ rlparmode=-1
+ elseif dir=="TLT" then
+ rlparmode=1
else
- local contexts = lookuphash[lookupname]
- if not contexts then
- contexts = { }
- lookuphash[lookupname] = contexts
- end
- local t, nt = { }, 0
- for nofrules=1,#rules do
- local rule = rules[nofrules]
- local current = rule.current
- local before = rule.before
- local after = rule.after
- local replacements = rule.replacements
- local sequence = { }
- local nofsequences = 0
- -- Wventually we can store start, stop and sequence in the cached file
- -- but then less sharing takes place so best not do that without a lot
- -- of profiling so let's forget about it.
- if before then
- for n=1,#before do
- nofsequences = nofsequences + 1
- sequence[nofsequences] = before[n]
- end
- end
- local start = nofsequences + 1
- for n=1,#current do
- nofsequences = nofsequences + 1
- sequence[nofsequences] = current[n]
- end
- local stop = nofsequences
- if after then
- for n=1,#after do
- nofsequences = nofsequences + 1
- sequence[nofsequences] = after[n]
- end
- end
- if sequence[1] then
- -- Replacements only happen with reverse lookups as they are single only. We
- -- could pack them into current (replacement value instead of true) and then
- -- use sequence[start] instead but it's somewhat ugly.
- nt = nt + 1
- t[nt] = { nofrules, lookuptype, sequence, start, stop, rule.lookups, replacements }
- for unic, _ in next, sequence[start] do
- local cu = contexts[unic]
- if not cu then
- contexts[unic] = t
- end
- end
- end
- end
+ rlparmode=0
+ end
+ rlmode=rlparmode
+ if trace_directions then
+ report_process("directions after pardir %a: parmode %a, txtmode %a",dir,rlparmode,rlmode)
end
+ end
+ start=start.next
+ elseif id==math_code then
+ start=end_of_math(start).next
else
- -- no rules
- end
- else
- report_prepare("missing lookuptype for %s",lookupname)
- end
- end
- end
+ start=start.next
+ end
+ end
+ end
+ else
+ while start do
+ local id=start.id
+ if id==glyph_code then
+ if start.font==font and start.subtype<256 then
+ local a=start[0]
+ if a then
+ a=(a==attr) and (not attribute or start[a_state]==attribute)
+ else
+ a=not attribute or start[a_state]==attribute
+ end
+ if a then
+ for i=1,ns do
+ local lookupname=subtables[i]
+ local lookupcache=lookuphash[lookupname]
+ if lookupcache then
+ local lookupmatch=lookupcache[start.char]
+ if lookupmatch then
+ local ok
+ head,start,ok=handler(head,start,dataset[4],lookupname,lookupmatch,sequence,lookuphash,i)
+ if ok then
+ success=true
+ break
+ end
+ end
+ else
+ report_missing_cache(typ,lookupname)
+ end
+ end
+ if start then start=start.next end
+ else
+ start=start.next
+ end
+ else
+ start=start.next
+ end
+ elseif id==whatsit_code then
+ local subtype=start.subtype
+ if subtype==dir_code then
+ local dir=start.dir
+ if dir=="+TRT" or dir=="+TLT" then
+ topstack=topstack+1
+ dirstack[topstack]=dir
+ elseif dir=="-TRT" or dir=="-TLT" then
+ topstack=topstack-1
+ end
+ local newdir=dirstack[topstack]
+ if newdir=="+TRT" then
+ rlmode=-1
+ elseif newdir=="+TLT" then
+ rlmode=1
+ else
+ rlmode=rlparmode
+ end
+ if trace_directions then
+ report_process("directions after txtdir %a: parmode %a, txtmode %a, # stack %a, new dir %a",dir,rlparmode,rlmode,topstack,newdir)
+ end
+ elseif subtype==localpar_code then
+ local dir=start.dir
+ if dir=="TRT" then
+ rlparmode=-1
+ elseif dir=="TLT" then
+ rlparmode=1
+ else
+ rlparmode=0
+ end
+ rlmode=rlparmode
+ if trace_directions then
+ report_process("directions after pardir %a: parmode %a, txtmode %a",dir,rlparmode,rlmode)
+ end
+ end
+ start=start.next
+ elseif id==math_code then
+ start=end_of_math(start).next
+ else
+ start=start.next
+ end
+ end
+ end
+ end
+ if success then
+ done=true
+ end
+ if trace_steps then
+ registerstep(head)
+ end
+ end
+ return head,done
end
-
--- we can consider lookuphash == false (initialized but empty) vs lookuphash == table
-
-local function featuresinitializer(tfmdata,value)
- if true then -- value then
- -- beware we need to use the topmost properties table
- local rawdata = tfmdata.shared.rawdata
- local properties = rawdata.properties
- if not properties.initialized then
- local starttime = trace_preparing and os.clock()
- local resources = rawdata.resources
- resources.lookuphash = resources.lookuphash or { }
- prepare_contextchains(tfmdata)
- prepare_lookups(tfmdata)
- properties.initialized = true
- if trace_preparing then
- report_prepare("preparation time is %0.3f seconds for %s",os.clock()-starttime,tfmdata.properties.fullname or "?")
- end
- end
+local function generic(lookupdata,lookupname,unicode,lookuphash)
+ local target=lookuphash[lookupname]
+ if target then
+ target[unicode]=lookupdata
+ else
+ lookuphash[lookupname]={ [unicode]=lookupdata }
+ end
+end
+local action={
+ substitution=generic,
+ multiple=generic,
+ alternate=generic,
+ position=generic,
+ ligature=function(lookupdata,lookupname,unicode,lookuphash)
+ local target=lookuphash[lookupname]
+ if not target then
+ target={}
+ lookuphash[lookupname]=target
+ end
+ for i=1,#lookupdata do
+ local li=lookupdata[i]
+ local tu=target[li]
+ if not tu then
+ tu={}
+ target[li]=tu
+ end
+ target=tu
+ end
+ target.ligature=unicode
+ end,
+ pair=function(lookupdata,lookupname,unicode,lookuphash)
+ local target=lookuphash[lookupname]
+ if not target then
+ target={}
+ lookuphash[lookupname]=target
end
-end
-
-registerotffeature {
- name = "features",
- description = "features",
- default = true,
- initializers = {
- position = 1,
- node = featuresinitializer,
- },
- processors = {
- node = featuresprocessor,
- }
-}
-
-end -- closure
-
-do -- begin closure to overcome local limits and interference
-
-if not modules then modules = { } end modules ['luatex-fonts-chr'] = {
- version = 1.001,
- comment = "companion to luatex-fonts.lua",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
+ local others=target[unicode]
+ local paired=lookupdata[1]
+ if others then
+ others[paired]=lookupdata
+ else
+ others={ [paired]=lookupdata }
+ target[unicode]=others
+ end
+ end,
}
-
-if context then
- texio.write_nl("fatal error: this module is not for context")
- os.exit()
+local function prepare_lookups(tfmdata)
+ local rawdata=tfmdata.shared.rawdata
+ local resources=rawdata.resources
+ local lookuphash=resources.lookuphash
+ local anchor_to_lookup=resources.anchor_to_lookup
+ local lookup_to_anchor=resources.lookup_to_anchor
+ local lookuptypes=resources.lookuptypes
+ local characters=tfmdata.characters
+ local descriptions=tfmdata.descriptions
+ for unicode,character in next,characters do
+ local description=descriptions[unicode]
+ if description then
+ local lookups=description.slookups
+ if lookups then
+ for lookupname,lookupdata in next,lookups do
+ action[lookuptypes[lookupname]](lookupdata,lookupname,unicode,lookuphash)
+ end
+ end
+ local lookups=description.mlookups
+ if lookups then
+ for lookupname,lookuplist in next,lookups do
+ local lookuptype=lookuptypes[lookupname]
+ for l=1,#lookuplist do
+ local lookupdata=lookuplist[l]
+ action[lookuptype](lookupdata,lookupname,unicode,lookuphash)
+ end
+ end
+ end
+ local list=description.kerns
+ if list then
+ for lookup,krn in next,list do
+ local target=lookuphash[lookup]
+ if target then
+ target[unicode]=krn
+ else
+ lookuphash[lookup]={ [unicode]=krn }
+ end
+ end
+ end
+ local list=description.anchors
+ if list then
+ for typ,anchors in next,list do
+ if typ=="mark" or typ=="cexit" then
+ for name,anchor in next,anchors do
+ local lookups=anchor_to_lookup[name]
+ if lookups then
+ for lookup,_ in next,lookups do
+ local target=lookuphash[lookup]
+ if target then
+ target[unicode]=anchors
+ else
+ lookuphash[lookup]={ [unicode]=anchors }
+ end
+ end
+ end
+ end
+ end
+ end
+ end
+ end
+ end
end
-
-characters = characters or { }
-characters.categories = {
- [0x0300]="mn",
- [0x0301]="mn",
- [0x0302]="mn",
- [0x0303]="mn",
- [0x0304]="mn",
- [0x0305]="mn",
- [0x0306]="mn",
- [0x0307]="mn",
- [0x0308]="mn",
- [0x0309]="mn",
- [0x030A]="mn",
- [0x030B]="mn",
- [0x030C]="mn",
- [0x030D]="mn",
- [0x030E]="mn",
- [0x030F]="mn",
- [0x0310]="mn",
- [0x0311]="mn",
- [0x0312]="mn",
- [0x0313]="mn",
- [0x0314]="mn",
- [0x0315]="mn",
- [0x0316]="mn",
- [0x0317]="mn",
- [0x0318]="mn",
- [0x0319]="mn",
- [0x031A]="mn",
- [0x031B]="mn",
- [0x031C]="mn",
- [0x031D]="mn",
- [0x031E]="mn",
- [0x031F]="mn",
- [0x0320]="mn",
- [0x0321]="mn",
- [0x0322]="mn",
- [0x0323]="mn",
- [0x0324]="mn",
- [0x0325]="mn",
- [0x0326]="mn",
- [0x0327]="mn",
- [0x0328]="mn",
- [0x0329]="mn",
- [0x032A]="mn",
- [0x032B]="mn",
- [0x032C]="mn",
- [0x032D]="mn",
- [0x032E]="mn",
- [0x032F]="mn",
- [0x0330]="mn",
- [0x0331]="mn",
- [0x0332]="mn",
- [0x0333]="mn",
- [0x0334]="mn",
- [0x0335]="mn",
- [0x0336]="mn",
- [0x0337]="mn",
- [0x0338]="mn",
- [0x0339]="mn",
- [0x033A]="mn",
- [0x033B]="mn",
- [0x033C]="mn",
- [0x033D]="mn",
- [0x033E]="mn",
- [0x033F]="mn",
- [0x0340]="mn",
- [0x0341]="mn",
- [0x0342]="mn",
- [0x0343]="mn",
- [0x0344]="mn",
- [0x0345]="mn",
- [0x0346]="mn",
- [0x0347]="mn",
- [0x0348]="mn",
- [0x0349]="mn",
- [0x034A]="mn",
- [0x034B]="mn",
- [0x034C]="mn",
- [0x034D]="mn",
- [0x034E]="mn",
- [0x034F]="mn",
- [0x0350]="mn",
- [0x0351]="mn",
- [0x0352]="mn",
- [0x0353]="mn",
- [0x0354]="mn",
- [0x0355]="mn",
- [0x0356]="mn",
- [0x0357]="mn",
- [0x0358]="mn",
- [0x0359]="mn",
- [0x035A]="mn",
- [0x035B]="mn",
- [0x035C]="mn",
- [0x035D]="mn",
- [0x035E]="mn",
- [0x035F]="mn",
- [0x0360]="mn",
- [0x0361]="mn",
- [0x0362]="mn",
- [0x0363]="mn",
- [0x0364]="mn",
- [0x0365]="mn",
- [0x0366]="mn",
- [0x0367]="mn",
- [0x0368]="mn",
- [0x0369]="mn",
- [0x036A]="mn",
- [0x036B]="mn",
- [0x036C]="mn",
- [0x036D]="mn",
- [0x036E]="mn",
- [0x036F]="mn",
- [0x0483]="mn",
- [0x0484]="mn",
- [0x0485]="mn",
- [0x0486]="mn",
- [0x0591]="mn",
- [0x0592]="mn",
- [0x0593]="mn",
- [0x0594]="mn",
- [0x0595]="mn",
- [0x0596]="mn",
- [0x0597]="mn",
- [0x0598]="mn",
- [0x0599]="mn",
- [0x059A]="mn",
- [0x059B]="mn",
- [0x059C]="mn",
- [0x059D]="mn",
- [0x059E]="mn",
- [0x059F]="mn",
- [0x05A0]="mn",
- [0x05A1]="mn",
- [0x05A2]="mn",
- [0x05A3]="mn",
- [0x05A4]="mn",
- [0x05A5]="mn",
- [0x05A6]="mn",
- [0x05A7]="mn",
- [0x05A8]="mn",
- [0x05A9]="mn",
- [0x05AA]="mn",
- [0x05AB]="mn",
- [0x05AC]="mn",
- [0x05AD]="mn",
- [0x05AE]="mn",
- [0x05AF]="mn",
- [0x05B0]="mn",
- [0x05B1]="mn",
- [0x05B2]="mn",
- [0x05B3]="mn",
- [0x05B4]="mn",
- [0x05B5]="mn",
- [0x05B6]="mn",
- [0x05B7]="mn",
- [0x05B8]="mn",
- [0x05B9]="mn",
- [0x05BA]="mn",
- [0x05BB]="mn",
- [0x05BC]="mn",
- [0x05BD]="mn",
- [0x05BF]="mn",
- [0x05C1]="mn",
- [0x05C2]="mn",
- [0x05C4]="mn",
- [0x05C5]="mn",
- [0x05C7]="mn",
- [0x0610]="mn",
- [0x0611]="mn",
- [0x0612]="mn",
- [0x0613]="mn",
- [0x0614]="mn",
- [0x0615]="mn",
- [0x064B]="mn",
- [0x064C]="mn",
- [0x064D]="mn",
- [0x064E]="mn",
- [0x064F]="mn",
- [0x0650]="mn",
- [0x0651]="mn",
- [0x0652]="mn",
- [0x0653]="mn",
- [0x0654]="mn",
- [0x0655]="mn",
- [0x0656]="mn",
- [0x0657]="mn",
- [0x0658]="mn",
- [0x0659]="mn",
- [0x065A]="mn",
- [0x065B]="mn",
- [0x065C]="mn",
- [0x065D]="mn",
- [0x065E]="mn",
- [0x0670]="mn",
- [0x06D6]="mn",
- [0x06D7]="mn",
- [0x06D8]="mn",
- [0x06D9]="mn",
- [0x06DA]="mn",
- [0x06DB]="mn",
- [0x06DC]="mn",
- [0x06DF]="mn",
- [0x06E0]="mn",
- [0x06E1]="mn",
- [0x06E2]="mn",
- [0x06E3]="mn",
- [0x06E4]="mn",
- [0x06E7]="mn",
- [0x06E8]="mn",
- [0x06EA]="mn",
- [0x06EB]="mn",
- [0x06EC]="mn",
- [0x06ED]="mn",
- [0x0711]="mn",
- [0x0730]="mn",
- [0x0731]="mn",
- [0x0732]="mn",
- [0x0733]="mn",
- [0x0734]="mn",
- [0x0735]="mn",
- [0x0736]="mn",
- [0x0737]="mn",
- [0x0738]="mn",
- [0x0739]="mn",
- [0x073A]="mn",
- [0x073B]="mn",
- [0x073C]="mn",
- [0x073D]="mn",
- [0x073E]="mn",
- [0x073F]="mn",
- [0x0740]="mn",
- [0x0741]="mn",
- [0x0742]="mn",
- [0x0743]="mn",
- [0x0744]="mn",
- [0x0745]="mn",
- [0x0746]="mn",
- [0x0747]="mn",
- [0x0748]="mn",
- [0x0749]="mn",
- [0x074A]="mn",
- [0x07A6]="mn",
- [0x07A7]="mn",
- [0x07A8]="mn",
- [0x07A9]="mn",
- [0x07AA]="mn",
- [0x07AB]="mn",
- [0x07AC]="mn",
- [0x07AD]="mn",
- [0x07AE]="mn",
- [0x07AF]="mn",
- [0x07B0]="mn",
- [0x07EB]="mn",
- [0x07EC]="mn",
- [0x07ED]="mn",
- [0x07EE]="mn",
- [0x07EF]="mn",
- [0x07F0]="mn",
- [0x07F1]="mn",
- [0x07F2]="mn",
- [0x07F3]="mn",
- [0x0901]="mn",
- [0x0902]="mn",
- [0x093C]="mn",
- [0x0941]="mn",
- [0x0942]="mn",
- [0x0943]="mn",
- [0x0944]="mn",
- [0x0945]="mn",
- [0x0946]="mn",
- [0x0947]="mn",
- [0x0948]="mn",
- [0x094D]="mn",
- [0x0951]="mn",
- [0x0952]="mn",
- [0x0953]="mn",
- [0x0954]="mn",
- [0x0962]="mn",
- [0x0963]="mn",
- [0x0981]="mn",
- [0x09BC]="mn",
- [0x09C1]="mn",
- [0x09C2]="mn",
- [0x09C3]="mn",
- [0x09C4]="mn",
- [0x09CD]="mn",
- [0x09E2]="mn",
- [0x09E3]="mn",
- [0x0A01]="mn",
- [0x0A02]="mn",
- [0x0A3C]="mn",
- [0x0A41]="mn",
- [0x0A42]="mn",
- [0x0A47]="mn",
- [0x0A48]="mn",
- [0x0A4B]="mn",
- [0x0A4C]="mn",
- [0x0A4D]="mn",
- [0x0A70]="mn",
- [0x0A71]="mn",
- [0x0A81]="mn",
- [0x0A82]="mn",
- [0x0ABC]="mn",
- [0x0AC1]="mn",
- [0x0AC2]="mn",
- [0x0AC3]="mn",
- [0x0AC4]="mn",
- [0x0AC5]="mn",
- [0x0AC7]="mn",
- [0x0AC8]="mn",
- [0x0ACD]="mn",
- [0x0AE2]="mn",
- [0x0AE3]="mn",
- [0x0B01]="mn",
- [0x0B3C]="mn",
- [0x0B3F]="mn",
- [0x0B41]="mn",
- [0x0B42]="mn",
- [0x0B43]="mn",
- [0x0B4D]="mn",
- [0x0B56]="mn",
- [0x0B82]="mn",
- [0x0BC0]="mn",
- [0x0BCD]="mn",
- [0x0C3E]="mn",
- [0x0C3F]="mn",
- [0x0C40]="mn",
- [0x0C46]="mn",
- [0x0C47]="mn",
- [0x0C48]="mn",
- [0x0C4A]="mn",
- [0x0C4B]="mn",
- [0x0C4C]="mn",
- [0x0C4D]="mn",
- [0x0C55]="mn",
- [0x0C56]="mn",
- [0x0CBC]="mn",
- [0x0CBF]="mn",
- [0x0CC6]="mn",
- [0x0CCC]="mn",
- [0x0CCD]="mn",
- [0x0CE2]="mn",
- [0x0CE3]="mn",
- [0x0D41]="mn",
- [0x0D42]="mn",
- [0x0D43]="mn",
- [0x0D4D]="mn",
- [0x0DCA]="mn",
- [0x0DD2]="mn",
- [0x0DD3]="mn",
- [0x0DD4]="mn",
- [0x0DD6]="mn",
- [0x0E31]="mn",
- [0x0E34]="mn",
- [0x0E35]="mn",
- [0x0E36]="mn",
- [0x0E37]="mn",
- [0x0E38]="mn",
- [0x0E39]="mn",
- [0x0E3A]="mn",
- [0x0E47]="mn",
- [0x0E48]="mn",
- [0x0E49]="mn",
- [0x0E4A]="mn",
- [0x0E4B]="mn",
- [0x0E4C]="mn",
- [0x0E4D]="mn",
- [0x0E4E]="mn",
- [0x0EB1]="mn",
- [0x0EB4]="mn",
- [0x0EB5]="mn",
- [0x0EB6]="mn",
- [0x0EB7]="mn",
- [0x0EB8]="mn",
- [0x0EB9]="mn",
- [0x0EBB]="mn",
- [0x0EBC]="mn",
- [0x0EC8]="mn",
- [0x0EC9]="mn",
- [0x0ECA]="mn",
- [0x0ECB]="mn",
- [0x0ECC]="mn",
- [0x0ECD]="mn",
- [0x0F18]="mn",
- [0x0F19]="mn",
- [0x0F35]="mn",
- [0x0F37]="mn",
- [0x0F39]="mn",
- [0x0F71]="mn",
- [0x0F72]="mn",
- [0x0F73]="mn",
- [0x0F74]="mn",
- [0x0F75]="mn",
- [0x0F76]="mn",
- [0x0F77]="mn",
- [0x0F78]="mn",
- [0x0F79]="mn",
- [0x0F7A]="mn",
- [0x0F7B]="mn",
- [0x0F7C]="mn",
- [0x0F7D]="mn",
- [0x0F7E]="mn",
- [0x0F80]="mn",
- [0x0F81]="mn",
- [0x0F82]="mn",
- [0x0F83]="mn",
- [0x0F84]="mn",
- [0x0F86]="mn",
- [0x0F87]="mn",
- [0x0F90]="mn",
- [0x0F91]="mn",
- [0x0F92]="mn",
- [0x0F93]="mn",
- [0x0F94]="mn",
- [0x0F95]="mn",
- [0x0F96]="mn",
- [0x0F97]="mn",
- [0x0F99]="mn",
- [0x0F9A]="mn",
- [0x0F9B]="mn",
- [0x0F9C]="mn",
- [0x0F9D]="mn",
- [0x0F9E]="mn",
- [0x0F9F]="mn",
- [0x0FA0]="mn",
- [0x0FA1]="mn",
- [0x0FA2]="mn",
- [0x0FA3]="mn",
- [0x0FA4]="mn",
- [0x0FA5]="mn",
- [0x0FA6]="mn",
- [0x0FA7]="mn",
- [0x0FA8]="mn",
- [0x0FA9]="mn",
- [0x0FAA]="mn",
- [0x0FAB]="mn",
- [0x0FAC]="mn",
- [0x0FAD]="mn",
- [0x0FAE]="mn",
- [0x0FAF]="mn",
- [0x0FB0]="mn",
- [0x0FB1]="mn",
- [0x0FB2]="mn",
- [0x0FB3]="mn",
- [0x0FB4]="mn",
- [0x0FB5]="mn",
- [0x0FB6]="mn",
- [0x0FB7]="mn",
- [0x0FB8]="mn",
- [0x0FB9]="mn",
- [0x0FBA]="mn",
- [0x0FBB]="mn",
- [0x0FBC]="mn",
- [0x0FC6]="mn",
- [0x102D]="mn",
- [0x102E]="mn",
- [0x102F]="mn",
- [0x1030]="mn",
- [0x1032]="mn",
- [0x1036]="mn",
- [0x1037]="mn",
- [0x1039]="mn",
- [0x1058]="mn",
- [0x1059]="mn",
- [0x135F]="mn",
- [0x1712]="mn",
- [0x1713]="mn",
- [0x1714]="mn",
- [0x1732]="mn",
- [0x1733]="mn",
- [0x1734]="mn",
- [0x1752]="mn",
- [0x1753]="mn",
- [0x1772]="mn",
- [0x1773]="mn",
- [0x17B7]="mn",
- [0x17B8]="mn",
- [0x17B9]="mn",
- [0x17BA]="mn",
- [0x17BB]="mn",
- [0x17BC]="mn",
- [0x17BD]="mn",
- [0x17C6]="mn",
- [0x17C9]="mn",
- [0x17CA]="mn",
- [0x17CB]="mn",
- [0x17CC]="mn",
- [0x17CD]="mn",
- [0x17CE]="mn",
- [0x17CF]="mn",
- [0x17D0]="mn",
- [0x17D1]="mn",
- [0x17D2]="mn",
- [0x17D3]="mn",
- [0x17DD]="mn",
- [0x180B]="mn",
- [0x180C]="mn",
- [0x180D]="mn",
- [0x18A9]="mn",
- [0x1920]="mn",
- [0x1921]="mn",
- [0x1922]="mn",
- [0x1927]="mn",
- [0x1928]="mn",
- [0x1932]="mn",
- [0x1939]="mn",
- [0x193A]="mn",
- [0x193B]="mn",
- [0x1A17]="mn",
- [0x1A18]="mn",
- [0x1B00]="mn",
- [0x1B01]="mn",
- [0x1B02]="mn",
- [0x1B03]="mn",
- [0x1B34]="mn",
- [0x1B36]="mn",
- [0x1B37]="mn",
- [0x1B38]="mn",
- [0x1B39]="mn",
- [0x1B3A]="mn",
- [0x1B3C]="mn",
- [0x1B42]="mn",
- [0x1B6B]="mn",
- [0x1B6C]="mn",
- [0x1B6D]="mn",
- [0x1B6E]="mn",
- [0x1B6F]="mn",
- [0x1B70]="mn",
- [0x1B71]="mn",
- [0x1B72]="mn",
- [0x1B73]="mn",
- [0x1DC0]="mn",
- [0x1DC1]="mn",
- [0x1DC2]="mn",
- [0x1DC3]="mn",
- [0x1DC4]="mn",
- [0x1DC5]="mn",
- [0x1DC6]="mn",
- [0x1DC7]="mn",
- [0x1DC8]="mn",
- [0x1DC9]="mn",
- [0x1DCA]="mn",
- [0x1DFE]="mn",
- [0x1DFF]="mn",
- [0x20D0]="mn",
- [0x20D1]="mn",
- [0x20D2]="mn",
- [0x20D3]="mn",
- [0x20D4]="mn",
- [0x20D5]="mn",
- [0x20D6]="mn",
- [0x20D7]="mn",
- [0x20D8]="mn",
- [0x20D9]="mn",
- [0x20DA]="mn",
- [0x20DB]="mn",
- [0x20DC]="mn",
- [0x20E1]="mn",
- [0x20E5]="mn",
- [0x20E6]="mn",
- [0x20E7]="mn",
- [0x20E8]="mn",
- [0x20E9]="mn",
- [0x20EA]="mn",
- [0x20EB]="mn",
- [0x20EC]="mn",
- [0x20ED]="mn",
- [0x20EE]="mn",
- [0x20EF]="mn",
- [0x302A]="mn",
- [0x302B]="mn",
- [0x302C]="mn",
- [0x302D]="mn",
- [0x302E]="mn",
- [0x302F]="mn",
- [0x3099]="mn",
- [0x309A]="mn",
- [0xA806]="mn",
- [0xA80B]="mn",
- [0xA825]="mn",
- [0xA826]="mn",
- [0xFB1E]="mn",
- [0xFE00]="mn",
- [0xFE01]="mn",
- [0xFE02]="mn",
- [0xFE03]="mn",
- [0xFE04]="mn",
- [0xFE05]="mn",
- [0xFE06]="mn",
- [0xFE07]="mn",
- [0xFE08]="mn",
- [0xFE09]="mn",
- [0xFE0A]="mn",
- [0xFE0B]="mn",
- [0xFE0C]="mn",
- [0xFE0D]="mn",
- [0xFE0E]="mn",
- [0xFE0F]="mn",
- [0xFE20]="mn",
- [0xFE21]="mn",
- [0xFE22]="mn",
- [0xFE23]="mn",
-}
-
-end -- closure
-
-do -- begin closure to overcome local limits and interference
-
-if not modules then modules = { } end modules ['font-ota'] = {
- version = 1.001,
- comment = "companion to font-otf.lua (analysing)",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
+local function split(replacement,original)
+ local result={}
+ for i=1,#replacement do
+ result[original[i]]=replacement[i]
+ end
+ return result
+end
+local valid={
+ coverage={ chainsub=true,chainpos=true,contextsub=true },
+ reversecoverage={ reversesub=true },
+ glyphs={ chainsub=true,chainpos=true },
}
-
--- this might become scrp-*.lua
-
-local type, tostring, match, format, concat = type, tostring, string.match, string.format, table.concat
-
-if not trackers then trackers = { register = function() end } end
-
-local trace_analyzing = false trackers.register("otf.analyzing", function(v) trace_analyzing = v end)
-
-local fonts, nodes, node = fonts, nodes, node
-
-local allocate = utilities.storage.allocate
-
-local otf = fonts.handlers.otf
-
-local analyzers = fonts.analyzers
-local initializers = allocate()
-local methods = allocate()
-
-analyzers.initializers = initializers
-analyzers.methods = methods
-analyzers.useunicodemarks = false
-
-local nodecodes = nodes.nodecodes
-local glyph_code = nodecodes.glyph
-
-local set_attribute = node.set_attribute
-local has_attribute = node.has_attribute
-local traverse_id = node.traverse_id
-local traverse_node_list = node.traverse
-
-local fontdata = fonts.hashes.identifiers
-local state = attributes.private('state')
-local categories = characters and characters.categories or { } -- sorry, only in context
-
-local tracers = nodes.tracers
-local colortracers = tracers and tracers.colors
-local setnodecolor = colortracers and colortracers.set or function() end
-local resetnodecolor = colortracers and colortracers.reset or function() end
-
-local otffeatures = fonts.constructors.newfeatures("otf")
-local registerotffeature = otffeatures.register
-
---[[ldx--
-<p>Analyzers run per script and/or language and are needed in order to
-process features right.</p>
---ldx]]--
-
--- todo: analyzers per script/lang, cross font, so we need an font id hash -> script
--- e.g. latin -> hyphenate, arab -> 1/2/3 analyze -- its own namespace
-
-local state = attributes.private('state')
-
-function analyzers.setstate(head,font)
- local useunicodemarks = analyzers.useunicodemarks
- local tfmdata = fontdata[font]
- local characters = tfmdata.characters
- local descriptions = tfmdata.descriptions
- local first, last, current, n, done = nil, nil, head, 0, false -- maybe make n boolean
- while current do
- local id = current.id
- if id == glyph_code and current.font == font then
- local char = current.char
- local d = descriptions[char]
- if d then
- if d.class == "mark" or (useunicodemarks and categories[char] == "mn") then
- done = true
- set_attribute(current,state,5) -- mark
- elseif n == 0 then
- first, last, n = current, current, 1
- set_attribute(current,state,1) -- init
- else
- last, n = current, n+1
- set_attribute(current,state,2) -- medi
- end
- else -- finish
- if first and first == last then
- set_attribute(last,state,4) -- isol
- elseif last then
- set_attribute(last,state,3) -- fina
- end
- first, last, n = nil, nil, 0
- end
- elseif id == disc_code then
- -- always in the middle
- set_attribute(current,state,2) -- midi
- last = current
- else -- finish
- if first and first == last then
- set_attribute(last,state,4) -- isol
- elseif last then
- set_attribute(last,state,3) -- fina
- end
- first, last, n = nil, nil, 0
- end
- current = current.next
- end
- if first and first == last then
- set_attribute(last,state,4) -- isol
- elseif last then
- set_attribute(last,state,3) -- fina
- end
- return head, done
-end
-
--- in the future we will use language/script attributes instead of the
--- font related value, but then we also need dynamic features which is
--- somewhat slower; and .. we need a chain of them
-
-local function analyzeinitializer(tfmdata,value) -- attr
- local script, language = otf.scriptandlanguage(tfmdata) -- attr
- local action = initializers[script]
- if action then
- if type(action) == "function" then
- return action(tfmdata,value)
+local function prepare_contextchains(tfmdata)
+ local rawdata=tfmdata.shared.rawdata
+ local resources=rawdata.resources
+ local lookuphash=resources.lookuphash
+ local lookups=rawdata.lookups
+ if lookups then
+ for lookupname,lookupdata in next,rawdata.lookups do
+ local lookuptype=lookupdata.type
+ if lookuptype then
+ local rules=lookupdata.rules
+ if rules then
+ local format=lookupdata.format
+ local validformat=valid[format]
+ if not validformat then
+ report_prepare("unsupported format %a",format)
+ elseif not validformat[lookuptype] then
+ report_prepare("unsupported format %a, lookuptype %a, lookupname %a",format,lookuptype,lookupname)
+ else
+ local contexts=lookuphash[lookupname]
+ if not contexts then
+ contexts={}
+ lookuphash[lookupname]=contexts
+ end
+ local t,nt={},0
+ for nofrules=1,#rules do
+ local rule=rules[nofrules]
+ local current=rule.current
+ local before=rule.before
+ local after=rule.after
+ local replacements=rule.replacements
+ local sequence={}
+ local nofsequences=0
+ if before then
+ for n=1,#before do
+ nofsequences=nofsequences+1
+ sequence[nofsequences]=before[n]
+ end
+ end
+ local start=nofsequences+1
+ for n=1,#current do
+ nofsequences=nofsequences+1
+ sequence[nofsequences]=current[n]
+ end
+ local stop=nofsequences
+ if after then
+ for n=1,#after do
+ nofsequences=nofsequences+1
+ sequence[nofsequences]=after[n]
+ end
+ end
+ if sequence[1] then
+ nt=nt+1
+ t[nt]={ nofrules,lookuptype,sequence,start,stop,rule.lookups,replacements }
+ for unic,_ in next,sequence[start] do
+ local cu=contexts[unic]
+ if not cu then
+ contexts[unic]=t
+ end
+ end
+ end
+ end
+ end
else
- local action = action[language]
- if action then
- return action(tfmdata,value)
- end
end
+ else
+ report_prepare("missing lookuptype for lookupname %a",lookupname)
+ end
end
+ end
end
-
-local function analyzeprocessor(head,font,attr)
- local tfmdata = fontdata[font]
- local script, language = otf.scriptandlanguage(tfmdata,attr)
- local action = methods[script]
- if action then
- if type(action) == "function" then
- return action(head,font,attr)
- else
- action = action[language]
- if action then
- return action(head,font,attr)
- end
- end
- end
- return head, false
+local function featuresinitializer(tfmdata,value)
+ if true then
+ local rawdata=tfmdata.shared.rawdata
+ local properties=rawdata.properties
+ if not properties.initialized then
+ local starttime=trace_preparing and os.clock()
+ local resources=rawdata.resources
+ resources.lookuphash=resources.lookuphash or {}
+ prepare_contextchains(tfmdata)
+ prepare_lookups(tfmdata)
+ properties.initialized=true
+ if trace_preparing then
+ report_prepare("preparation time is %0.3f seconds for %a",os.clock()-starttime,tfmdata.properties.fullname)
+ end
+ end
+ end
end
-
registerotffeature {
- name = "analyze",
- description = "analysis of (for instance) character classes",
- default = true,
- initializers = {
- node = analyzeinitializer,
- },
- processors = {
- position = 1,
- node = analyzeprocessor,
- }
-}
-
--- latin
-
-methods.latn = analyzers.setstate
-
--- this info eventually will go into char-def and we will have a state
--- table for generic then
-
-local zwnj = 0x200C
-local zwj = 0x200D
-
-local isol = {
- [0x0600] = true, [0x0601] = true, [0x0602] = true, [0x0603] = true,
- [0x0608] = true, [0x060B] = true, [0x0621] = true, [0x0674] = true,
- [0x06DD] = true, [zwnj] = true,
-}
-
-local isol_fina = {
- [0x0622] = true, [0x0623] = true, [0x0624] = true, [0x0625] = true,
- [0x0627] = true, [0x0629] = true, [0x062F] = true, [0x0630] = true,
- [0x0631] = true, [0x0632] = true, [0x0648] = true, [0x0671] = true,
- [0x0672] = true, [0x0673] = true, [0x0675] = true, [0x0676] = true,
- [0x0677] = true, [0x0688] = true, [0x0689] = true, [0x068A] = true,
- [0x068B] = true, [0x068C] = true, [0x068D] = true, [0x068E] = true,
- [0x068F] = true, [0x0690] = true, [0x0691] = true, [0x0692] = true,
- [0x0693] = true, [0x0694] = true, [0x0695] = true, [0x0696] = true,
- [0x0697] = true, [0x0698] = true, [0x0699] = true, [0x06C0] = true,
- [0x06C3] = true, [0x06C4] = true, [0x06C5] = true, [0x06C6] = true,
- [0x06C7] = true, [0x06C8] = true, [0x06C9] = true, [0x06CA] = true,
- [0x06CB] = true, [0x06CD] = true, [0x06CF] = true, [0x06D2] = true,
- [0x06D3] = true, [0x06D5] = true, [0x06EE] = true, [0x06EF] = true,
- [0x0759] = true, [0x075A] = true, [0x075B] = true, [0x076B] = true,
- [0x076C] = true, [0x0771] = true, [0x0773] = true, [0x0774] = true,
- [0x0778] = true, [0x0779] = true, [0xFEF5] = true, [0xFEF7] = true,
- [0xFEF9] = true, [0xFEFB] = true,
-
- -- syriac
-
- [0x0710] = true, [0x0715] = true, [0x0716] = true, [0x0717] = true,
- [0x0718] = true, [0x0719] = true, [0x0728] = true, [0x072A] = true,
- [0x072C] = true, [0x071E] = true,
+ name="features",
+ description="features",
+ default=true,
+ initializers={
+ position=1,
+ node=featuresinitializer,
+ },
+ processors={
+ node=featuresprocessor,
+ }
}
-
-local isol_fina_medi_init = {
- [0x0626] = true, [0x0628] = true, [0x062A] = true, [0x062B] = true,
- [0x062C] = true, [0x062D] = true, [0x062E] = true, [0x0633] = true,
- [0x0634] = true, [0x0635] = true, [0x0636] = true, [0x0637] = true,
- [0x0638] = true, [0x0639] = true, [0x063A] = true, [0x063B] = true,
- [0x063C] = true, [0x063D] = true, [0x063E] = true, [0x063F] = true,
- [0x0640] = true, [0x0641] = true, [0x0642] = true, [0x0643] = true,
- [0x0644] = true, [0x0645] = true, [0x0646] = true, [0x0647] = true,
- [0x0649] = true, [0x064A] = true, [0x066E] = true, [0x066F] = true,
- [0x0678] = true, [0x0679] = true, [0x067A] = true, [0x067B] = true,
- [0x067C] = true, [0x067D] = true, [0x067E] = true, [0x067F] = true,
- [0x0680] = true, [0x0681] = true, [0x0682] = true, [0x0683] = true,
- [0x0684] = true, [0x0685] = true, [0x0686] = true, [0x0687] = true,
- [0x069A] = true, [0x069B] = true, [0x069C] = true, [0x069D] = true,
- [0x069E] = true, [0x069F] = true, [0x06A0] = true, [0x06A1] = true,
- [0x06A2] = true, [0x06A3] = true, [0x06A4] = true, [0x06A5] = true,
- [0x06A6] = true, [0x06A7] = true, [0x06A8] = true, [0x06A9] = true,
- [0x06AA] = true, [0x06AB] = true, [0x06AC] = true, [0x06AD] = true,
- [0x06AE] = true, [0x06AF] = true, [0x06B0] = true, [0x06B1] = true,
- [0x06B2] = true, [0x06B3] = true, [0x06B4] = true, [0x06B5] = true,
- [0x06B6] = true, [0x06B7] = true, [0x06B8] = true, [0x06B9] = true,
- [0x06BA] = true, [0x06BB] = true, [0x06BC] = true, [0x06BD] = true,
- [0x06BE] = true, [0x06BF] = true, [0x06C1] = true, [0x06C2] = true,
- [0x06CC] = true, [0x06CE] = true, [0x06D0] = true, [0x06D1] = true,
- [0x06FA] = true, [0x06FB] = true, [0x06FC] = true, [0x06FF] = true,
- [0x0750] = true, [0x0751] = true, [0x0752] = true, [0x0753] = true,
- [0x0754] = true, [0x0755] = true, [0x0756] = true, [0x0757] = true,
- [0x0758] = true, [0x075C] = true, [0x075D] = true, [0x075E] = true,
- [0x075F] = true, [0x0760] = true, [0x0761] = true, [0x0762] = true,
- [0x0763] = true, [0x0764] = true, [0x0765] = true, [0x0766] = true,
- [0x0767] = true, [0x0768] = true, [0x0769] = true, [0x076A] = true,
- [0x076D] = true, [0x076E] = true, [0x076F] = true, [0x0770] = true,
- [0x0772] = true, [0x0775] = true, [0x0776] = true, [0x0777] = true,
- [0x077A] = true, [0x077B] = true, [0x077C] = true, [0x077D] = true,
- [0x077E] = true, [0x077F] = true,
-
- -- syriac
-
- [0x0712] = true, [0x0713] = true, [0x0714] = true, [0x071A] = true,
- [0x071B] = true, [0x071C] = true, [0x071D] = true, [0x071F] = true,
- [0x0720] = true, [0x0721] = true, [0x0722] = true, [0x0723] = true,
- [0x0724] = true, [0x0725] = true, [0x0726] = true, [0x0727] = true,
- [0x0729] = true, [0x072B] = true,
-
- -- also
-
- [zwj] = true,
-}
-
-local arab_warned = { }
-
-
--- todo: gref
-
-local function warning(current,what)
- local char = current.char
- if not arab_warned[char] then
- log.report("analyze","arab: character %s (U+%05X) has no %s class", char, char, what)
- arab_warned[char] = true
- end
-end
-
-function methods.nocolor(head,font,attr)
- for n in traverse_id(glyph_code,head) do
- if not font or n.font == font then
- resetnodecolor(n)
- end
- end
- return head, true
-end
-
-local function finish(first,last)
- if last then
- if first == last then
- local fc = first.char
- if isol_fina_medi_init[fc] or isol_fina[fc] then
- set_attribute(first,state,4) -- isol
- if trace_analyzing then setnodecolor(first,"font:isol") end
- else
- warning(first,"isol")
- set_attribute(first,state,0) -- error
- if trace_analyzing then resetnodecolor(first) end
- end
- else
- local lc = last.char
- if isol_fina_medi_init[lc] or isol_fina[lc] then -- why isol here ?
- -- if laststate == 1 or laststate == 2 or laststate == 4 then
- set_attribute(last,state,3) -- fina
- if trace_analyzing then setnodecolor(last,"font:fina") end
- else
- warning(last,"fina")
- set_attribute(last,state,0) -- error
- if trace_analyzing then resetnodecolor(last) end
- end
- end
- first, last = nil, nil
- elseif first then
- -- first and last are either both set so we never com here
- local fc = first.char
- if isol_fina_medi_init[fc] or isol_fina[fc] then
- set_attribute(first,state,4) -- isol
- if trace_analyzing then setnodecolor(first,"font:isol") end
- else
- warning(first,"isol")
- set_attribute(first,state,0) -- error
- if trace_analyzing then resetnodecolor(first) end
- end
- first = nil
- end
- return first, last
-end
-
-function methods.arab(head,font,attr) -- maybe make a special version with no trace
- local useunicodemarks = analyzers.useunicodemarks
- local tfmdata = fontdata[font]
- local marks = tfmdata.resources.marks
- local first, last, current, done = nil, nil, head, false
- while current do
- if current.id == glyph_code and current.subtype<256 and current.font == font and not has_attribute(current,state) then
- done = true
- local char = current.char
- if marks[char] or (useunicodemarks and categories[char] == "mn") then
- set_attribute(current,state,5) -- mark
- if trace_analyzing then setnodecolor(current,"font:mark") end
- elseif isol[char] then -- can be zwj or zwnj too
- first, last = finish(first,last)
- set_attribute(current,state,4) -- isol
- if trace_analyzing then setnodecolor(current,"font:isol") end
- first, last = nil, nil
- elseif not first then
- if isol_fina_medi_init[char] then
- set_attribute(current,state,1) -- init
- if trace_analyzing then setnodecolor(current,"font:init") end
- first, last = first or current, current
- elseif isol_fina[char] then
- set_attribute(current,state,4) -- isol
- if trace_analyzing then setnodecolor(current,"font:isol") end
- first, last = nil, nil
- else -- no arab
- first, last = finish(first,last)
- end
- elseif isol_fina_medi_init[char] then
- first, last = first or current, current
- set_attribute(current,state,2) -- medi
- if trace_analyzing then setnodecolor(current,"font:medi") end
- elseif isol_fina[char] then
- if not has_attribute(last,state,1) then
- -- tricky, we need to check what last may be !
- set_attribute(last,state,2) -- medi
- if trace_analyzing then setnodecolor(last,"font:medi") end
- end
- set_attribute(current,state,3) -- fina
- if trace_analyzing then setnodecolor(current,"font:fina") end
- first, last = nil, nil
- elseif char >= 0x0600 and char <= 0x06FF then
- if trace_analyzing then setnodecolor(current,"font:rest") end
- first, last = finish(first,last)
- else --no
- first, last = finish(first,last)
- end
- else
- first, last = finish(first,last)
- end
- current = current.next
- end
- first, last = finish(first,last)
- return head, done
-end
-
-methods.syrc = methods.arab
-
-directives.register("otf.analyze.useunicodemarks",function(v)
- analyzers.useunicodemarks = v
-end)
+otf.handlers=handlers
end -- closure
do -- begin closure to overcome local limits and interference
-if not modules then modules = { } end modules ['luatex-fonts-lua'] = {
- version = 1.001,
- comment = "companion to luatex-*.tex",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
+if not modules then modules={} end modules ['luatex-fonts-lua']={
+ version=1.001,
+ comment="companion to luatex-*.tex",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
}
-
if context then
- texio.write_nl("fatal error: this module is not for context")
- os.exit()
+ texio.write_nl("fatal error: this module is not for context")
+ os.exit()
end
-
-local fonts = fonts
-fonts.formats.lua = "lua"
-
+local fonts=fonts
+fonts.formats.lua="lua"
function fonts.readers.lua(specification)
- local fullname = specification.filename or ""
- if fullname == "" then
- local forced = specification.forced or ""
- if forced ~= "" then
- fullname = specification.name .. "." .. forced
- else
- fullname = specification.name
- end
- end
- local fullname = resolvers.findfile(fullname) or ""
- if fullname ~= "" then
- local loader = loadfile(fullname)
- loader = loader and loader()
- return loader and loader(specification)
+ local fullname=specification.filename or ""
+ if fullname=="" then
+ local forced=specification.forced or ""
+ if forced~="" then
+ fullname=specification.name.."."..forced
+ else
+ fullname=specification.name
end
+ end
+ local fullname=resolvers.findfile(fullname) or ""
+ if fullname~="" then
+ local loader=loadfile(fullname)
+ loader=loader and loader()
+ return loader and loader(specification)
+ end
end
end -- closure
do -- begin closure to overcome local limits and interference
-if not modules then modules = { } end modules ['font-def'] = {
- version = 1.001,
- comment = "companion to font-ini.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
+if not modules then modules={} end modules ['font-def']={
+ version=1.001,
+ comment="companion to font-ini.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
}
-
-local concat = table.concat
-local format, gmatch, match, find, lower, gsub = string.format, string.gmatch, string.match, string.find, string.lower, string.gsub
-local tostring, next = tostring, next
-local lpegmatch = lpeg.match
-
-local allocate = utilities.storage.allocate
-
-local trace_defining = false trackers .register("fonts.defining", function(v) trace_defining = v end)
-local directive_embedall = false directives.register("fonts.embedall", function(v) directive_embedall = v end)
-
-trackers.register("fonts.loading", "fonts.defining", "otf.loading", "afm.loading", "tfm.loading")
-trackers.register("fonts.all", "fonts.*", "otf.*", "afm.*", "tfm.*")
-
-local report_defining = logs.reporter("fonts","defining")
-
---[[ldx--
-<p>Here we deal with defining fonts. We do so by intercepting the
-default loader that only handles <l n='tfm'/>.</p>
---ldx]]--
-
-local fonts = fonts
-local fontdata = fonts.hashes.identifiers
-local readers = fonts.readers
-local definers = fonts.definers
-local specifiers = fonts.specifiers
-local constructors = fonts.constructors
-
-readers.sequence = allocate { 'otf', 'ttf', 'afm', 'tfm', 'lua' } -- dfont ttc
-
-local variants = allocate()
-specifiers.variants = variants
-
-definers.methods = definers.methods or { }
-
-local internalized = allocate() -- internal tex numbers (private)
-
-
-local loadedfonts = constructors.loadedfonts
-local designsizes = constructors.designsizes
-
---[[ldx--
-<p>We hardly gain anything when we cache the final (pre scaled)
-<l n='tfm'/> table. But it can be handy for debugging, so we no
-longer carry this code along. Also, we now have quite some reference
-to other tables so we would end up with lots of catches.</p>
---ldx]]--
-
---[[ldx--
-<p>We can prefix a font specification by <type>name:</type> or
-<type>file:</type>. The first case will result in a lookup in the
-synonym table.</p>
-
-<typing>
-[ name: | file: ] identifier [ separator [ specification ] ]
-</typing>
-
-<p>The following function split the font specification into components
-and prepares a table that will move along as we proceed.</p>
---ldx]]--
-
--- beware, we discard additional specs
---
--- method:name method:name(sub) method:name(sub)*spec method:name*spec
--- name name(sub) name(sub)*spec name*spec
--- name@spec*oeps
-
-local splitter, splitspecifiers = nil, ""
-
-local P, C, S, Cc = lpeg.P, lpeg.C, lpeg.S, lpeg.Cc
-
-local left = P("(")
-local right = P(")")
-local colon = P(":")
-local space = P(" ")
-
-definers.defaultlookup = "file"
-
-local prefixpattern = P(false)
-
+local format,gmatch,match,find,lower,gsub=string.format,string.gmatch,string.match,string.find,string.lower,string.gsub
+local tostring,next=tostring,next
+local lpegmatch=lpeg.match
+local allocate=utilities.storage.allocate
+local trace_defining=false trackers .register("fonts.defining",function(v) trace_defining=v end)
+local directive_embedall=false directives.register("fonts.embedall",function(v) directive_embedall=v end)
+trackers.register("fonts.loading","fonts.defining","otf.loading","afm.loading","tfm.loading")
+trackers.register("fonts.all","fonts.*","otf.*","afm.*","tfm.*")
+local report_defining=logs.reporter("fonts","defining")
+local fonts=fonts
+local fontdata=fonts.hashes.identifiers
+local readers=fonts.readers
+local definers=fonts.definers
+local specifiers=fonts.specifiers
+local constructors=fonts.constructors
+local fontgoodies=fonts.goodies
+readers.sequence=allocate { 'otf','ttf','afm','tfm','lua' }
+local variants=allocate()
+specifiers.variants=variants
+definers.methods=definers.methods or {}
+local internalized=allocate()
+local loadedfonts=constructors.loadedfonts
+local designsizes=constructors.designsizes
+local resolvefile=fontgoodies and fontgoodies.filenames and fontgoodies.filenames.resolve or function(s) return s end
+local splitter,splitspecifiers=nil,""
+local P,C,S,Cc=lpeg.P,lpeg.C,lpeg.S,lpeg.Cc
+local left=P("(")
+local right=P(")")
+local colon=P(":")
+local space=P(" ")
+definers.defaultlookup="file"
+local prefixpattern=P(false)
local function addspecifier(symbol)
- splitspecifiers = splitspecifiers .. symbol
- local method = S(splitspecifiers)
- local lookup = C(prefixpattern) * colon
- local sub = left * C(P(1-left-right-method)^1) * right
- local specification = C(method) * C(P(1)^1)
- local name = C((1-sub-specification)^1)
- splitter = P((lookup + Cc("")) * name * (sub + Cc("")) * (specification + Cc("")))
+ splitspecifiers=splitspecifiers..symbol
+ local method=S(splitspecifiers)
+ local lookup=C(prefixpattern)*colon
+ local sub=left*C(P(1-left-right-method)^1)*right
+ local specification=C(method)*C(P(1)^1)
+ local name=C((1-sub-specification)^1)
+ splitter=P((lookup+Cc(""))*name*(sub+Cc(""))*(specification+Cc("")))
end
-
local function addlookup(str,default)
- prefixpattern = prefixpattern + P(str)
+ prefixpattern=prefixpattern+P(str)
end
-
-definers.addlookup = addlookup
-
+definers.addlookup=addlookup
addlookup("file")
addlookup("name")
addlookup("spec")
-
local function getspecification(str)
- return lpegmatch(splitter,str)
+ return lpegmatch(splitter,str)
end
-
-definers.getspecification = getspecification
-
+definers.getspecification=getspecification
function definers.registersplit(symbol,action,verbosename)
- addspecifier(symbol)
- variants[symbol] = action
- if verbosename then
- variants[verbosename] = action
- end
-end
-
-function definers.makespecification(specification,lookup,name,sub,method,detail,size)
- size = size or 655360
- if trace_defining then
- report_defining("%s -> lookup: %s, name: %s, sub: %s, method: %s, detail: %s",
- specification, (lookup ~= "" and lookup) or "[file]", (name ~= "" and name) or "-",
- (sub ~= "" and sub) or "-", (method ~= "" and method) or "-", (detail ~= "" and detail) or "-")
- end
- if not lookup or lookup == "" then
- lookup = definers.defaultlookup
- end
- local t = {
- lookup = lookup, -- forced type
- specification = specification, -- full specification
- size = size, -- size in scaled points or -1000*n
- name = name, -- font or filename
- sub = sub, -- subfont (eg in ttc)
- method = method, -- specification method
- detail = detail, -- specification
- resolved = "", -- resolved font name
- forced = "", -- forced loader
- features = { }, -- preprocessed features
- }
- return t
-end
-
-function definers.analyze(specification, size)
- -- can be optimized with locals
- local lookup, name, sub, method, detail = getspecification(specification or "")
- return definers.makespecification(specification, lookup, name, sub, method, detail, size)
-end
-
---[[ldx--
-<p>We can resolve the filename using the next function:</p>
---ldx]]--
-
-definers.resolvers = definers.resolvers or { }
-local resolvers = definers.resolvers
-
--- todo: reporter
-
+ addspecifier(symbol)
+ variants[symbol]=action
+ if verbosename then
+ variants[verbosename]=action
+ end
+end
+local function makespecification(specification,lookup,name,sub,method,detail,size)
+ size=size or 655360
+ if not lookup or lookup=="" then
+ lookup=definers.defaultlookup
+ end
+ if trace_defining then
+ report_defining("specification %a, lookup %a, name %a, sub %a, method %a, detail %a",
+ specification,lookup,name,sub,method,detail)
+ end
+ local t={
+ lookup=lookup,
+ specification=specification,
+ size=size,
+ name=name,
+ sub=sub,
+ method=method,
+ detail=detail,
+ resolved="",
+ forced="",
+ features={},
+ }
+ return t
+end
+definers.makespecification=makespecification
+function definers.analyze(specification,size)
+ local lookup,name,sub,method,detail=getspecification(specification or "")
+ return makespecification(specification,lookup,name,sub,method,detail,size)
+end
+definers.resolvers=definers.resolvers or {}
+local resolvers=definers.resolvers
function resolvers.file(specification)
- local suffix = file.suffix(specification.name)
- if fonts.formats[suffix] then
- specification.forced = suffix
- specification.name = file.removesuffix(specification.name)
- end
+ local name=resolvefile(specification.name)
+ local suffix=file.suffix(name)
+ if fonts.formats[suffix] then
+ specification.forced=suffix
+ specification.name=file.removesuffix(name)
+ else
+ specification.name=name
+ end
end
-
function resolvers.name(specification)
- local resolve = fonts.names.resolve
- if resolve then
- local resolved, sub = resolve(specification.name,specification.sub,specification) -- we pass specification for overloaded versions
- if resolved then
- specification.resolved = resolved
- specification.sub = sub
- local suffix = file.suffix(resolved)
- if fonts.formats[suffix] then
- specification.forced = suffix
- specification.name = file.removesuffix(resolved)
- else
- specification.name = resolved
- end
- end
- else
- resolvers.file(specification)
- end
+ local resolve=fonts.names.resolve
+ if resolve then
+ local resolved,sub=resolve(specification.name,specification.sub,specification)
+ if resolved then
+ specification.resolved=resolved
+ specification.sub=sub
+ local suffix=file.suffix(resolved)
+ if fonts.formats[suffix] then
+ specification.forced=suffix
+ specification.name=file.removesuffix(resolved)
+ else
+ specification.name=resolved
+ end
+ end
+ else
+ resolvers.file(specification)
+ end
end
-
function resolvers.spec(specification)
- local resolvespec = fonts.names.resolvespec
- if resolvespec then
- local resolved, sub = resolvespec(specification.name,specification.sub,specification) -- we pass specification for overloaded versions
- if resolved then
- specification.resolved = resolved
- specification.sub = sub
- specification.forced = file.extname(resolved)
- specification.name = file.removesuffix(resolved)
- end
- else
- resolvers.name(specification)
- end
+ local resolvespec=fonts.names.resolvespec
+ if resolvespec then
+ local resolved,sub=resolvespec(specification.name,specification.sub,specification)
+ if resolved then
+ specification.resolved=resolved
+ specification.sub=sub
+ specification.forced=file.suffix(resolved)
+ specification.name=file.removesuffix(resolved)
+ end
+ else
+ resolvers.name(specification)
+ end
end
-
function definers.resolve(specification)
- if not specification.resolved or specification.resolved == "" then -- resolved itself not per se in mapping hash
- local r = resolvers[specification.lookup]
- if r then
- r(specification)
- end
- end
- if specification.forced == "" then
- specification.forced = nil
- else
- specification.forced = specification.forced
- end
- specification.hash = lower(specification.name .. ' @ ' .. constructors.hashfeatures(specification))
- if specification.sub and specification.sub ~= "" then
- specification.hash = specification.sub .. ' @ ' .. specification.hash
- end
- return specification
+ if not specification.resolved or specification.resolved=="" then
+ local r=resolvers[specification.lookup]
+ if r then
+ r(specification)
+ end
+ end
+ if specification.forced=="" then
+ specification.forced=nil
+ else
+ specification.forced=specification.forced
+ end
+ specification.hash=lower(specification.name..' @ '..constructors.hashfeatures(specification))
+ if specification.sub and specification.sub~="" then
+ specification.hash=specification.sub..' @ '..specification.hash
+ end
+ return specification
end
-
---[[ldx--
-<p>The main read function either uses a forced reader (as determined by
-a lookup) or tries to resolve the name using the list of readers.</p>
-
-<p>We need to cache when possible. We do cache raw tfm data (from <l
-n='tfm'/>, <l n='afm'/> or <l n='otf'/>). After that we can cache based
-on specificstion (name) and size, that is, <l n='tex'/> only needs a number
-for an already loaded fonts. However, it may make sense to cache fonts
-before they're scaled as well (store <l n='tfm'/>'s with applied methods
-and features). However, there may be a relation between the size and
-features (esp in virtual fonts) so let's not do that now.</p>
-
-<p>Watch out, here we do load a font, but we don't prepare the
-specification yet.</p>
---ldx]]--
-
--- very experimental:
-
function definers.applypostprocessors(tfmdata)
- local postprocessors = tfmdata.postprocessors
- if postprocessors then
- for i=1,#postprocessors do
- local extrahash = postprocessors[i](tfmdata) -- after scaling etc
- if type(extrahash) == "string" and extrahash ~= "" then
- -- e.g. a reencoding needs this
- extrahash = gsub(lower(extrahash),"[^a-z]","-")
- tfmdata.properties.fullname = format("%s-%s",tfmdata.properties.fullname,extrahash)
- end
- end
- end
- return tfmdata
+ local postprocessors=tfmdata.postprocessors
+ if postprocessors then
+ local properties=tfmdata.properties
+ for i=1,#postprocessors do
+ local extrahash=postprocessors[i](tfmdata)
+ if type(extrahash)=="string" and extrahash~="" then
+ extrahash=gsub(lower(extrahash),"[^a-z]","-")
+ properties.fullname=format("%s-%s",properties.fullname,extrahash)
+ end
+ end
+ end
+ return tfmdata
end
-
--- function definers.applypostprocessors(tfmdata)
--- return tfmdata
--- end
-
local function checkembedding(tfmdata)
- local properties = tfmdata.properties
- local embedding
- if directive_embedall then
- embedding = "full"
- elseif properties and properties.filename and constructors.dontembed[properties.filename] then
- embedding = "no"
- else
- embedding = "subset"
- end
- if properties then
- properties.embedding = embedding
- else
- tfmdata.properties = { embedding = embedding }
- end
- tfmdata.embedding = embedding
+ local properties=tfmdata.properties
+ local embedding
+ if directive_embedall then
+ embedding="full"
+ elseif properties and properties.filename and constructors.dontembed[properties.filename] then
+ embedding="no"
+ else
+ embedding="subset"
+ end
+ if properties then
+ properties.embedding=embedding
+ else
+ tfmdata.properties={ embedding=embedding }
+ end
+ tfmdata.embedding=embedding
end
-
function definers.loadfont(specification)
- local hash = constructors.hashinstance(specification)
- local tfmdata = loadedfonts[hash] -- hashes by size !
- if not tfmdata then
- local forced = specification.forced or ""
- if forced ~= "" then
- local reader = readers[lower(forced)]
- tfmdata = reader and reader(specification)
- if not tfmdata then
- report_defining("forced type %s of %s not found",forced,specification.name)
- end
- else
- local sequence = readers.sequence -- can be overloaded so only a shortcut here
- for s=1,#sequence do
- local reader = sequence[s]
- if readers[reader] then -- we skip not loaded readers
- if trace_defining then
- report_defining("trying (reader sequence driven) type %s for %s with file %s",reader,specification.name,specification.filename or "unknown")
- end
- tfmdata = readers[reader](specification)
- if tfmdata then
- break
- else
- specification.filename = nil
- end
- end
- end
- end
- if tfmdata then
- tfmdata = definers.applypostprocessors(tfmdata)
- checkembedding(tfmdata) -- todo: general postprocessor
- loadedfonts[hash] = tfmdata
- designsizes[specification.hash] = tfmdata.parameters.designsize
+ local hash=constructors.hashinstance(specification)
+ local tfmdata=loadedfonts[hash]
+ if not tfmdata then
+ local forced=specification.forced or ""
+ if forced~="" then
+ local reader=readers[lower(forced)]
+ tfmdata=reader and reader(specification)
+ if not tfmdata then
+ report_defining("forced type %a of %a not found",forced,specification.name)
+ end
+ else
+ local sequence=readers.sequence
+ for s=1,#sequence do
+ local reader=sequence[s]
+ if readers[reader] then
+ if trace_defining then
+ report_defining("trying (reader sequence driven) type %a for %a with file %a",reader,specification.name,specification.filename)
+ end
+ tfmdata=readers[reader](specification)
+ if tfmdata then
+ break
+ else
+ specification.filename=nil
+ end
end
+ end
end
- if not tfmdata then
- report_defining("font with asked name '%s' is not found using lookup '%s'",specification.name,specification.lookup)
- end
- return tfmdata
-end
-
---[[ldx--
-<p>For virtual fonts we need a slightly different approach:</p>
---ldx]]--
-
-function constructors.readanddefine(name,size) -- no id -- maybe a dummy first
- local specification = definers.analyze(name,size)
- local method = specification.method
- if method and variants[method] then
- specification = variants[method](specification)
- end
- specification = definers.resolve(specification)
- local hash = constructors.hashinstance(specification)
- local id = definers.registered(hash)
- if not id then
- local tfmdata = definers.loadfont(specification)
- if tfmdata then
- tfmdata.properties.hash = hash
- id = font.define(tfmdata)
- definers.register(tfmdata,id)
- else
- id = 0 -- signal
- end
+ if tfmdata then
+ tfmdata=definers.applypostprocessors(tfmdata)
+ checkembedding(tfmdata)
+ loadedfonts[hash]=tfmdata
+ designsizes[specification.hash]=tfmdata.parameters.designsize
+ end
+ end
+ if not tfmdata then
+ report_defining("font with asked name %a is not found using lookup %a",specification.name,specification.lookup)
+ end
+ return tfmdata
+end
+local function checkvirtual(tfmdata)
+ local fonts=tfmdata.fonts
+ local selfid=font.nextid()
+ if fonts and #fonts>0 then
+ for i=1,#fonts do
+ if fonts[i][2]==0 then
+ fonts[i][2]=selfid
+ end
+ end
+ else
+ tfmdata.fonts={ "id",selfid }
+ end
+end
+function constructors.readanddefine(name,size)
+ local specification=definers.analyze(name,size)
+ local method=specification.method
+ if method and variants[method] then
+ specification=variants[method](specification)
+ end
+ specification=definers.resolve(specification)
+ local hash=constructors.hashinstance(specification)
+ local id=definers.registered(hash)
+ if not id then
+ local tfmdata=definers.loadfont(specification)
+ if tfmdata then
+ checkvirtual(tfmdata)
+ id=font.define(tfmdata)
+ definers.register(tfmdata,id)
+ else
+ id=0
end
- return fontdata[id], id
+ end
+ return fontdata[id],id
end
-
---[[ldx--
-<p>So far the specifiers. Now comes the real definer. Here we cache
-based on id's. Here we also intercept the virtual font handler. Since
-it evolved stepwise I may rewrite this bit (combine code).</p>
-
-In the previously defined reader (the one resulting in a <l n='tfm'/>
-table) we cached the (scaled) instances. Here we cache them again, but
-this time based on id. We could combine this in one cache but this does
-not gain much. By the way, passing id's back to in the callback was
-introduced later in the development.</p>
---ldx]]--
-
-local lastdefined = nil -- we don't want this one to end up in s-tra-02
-local internalized = { }
-
-function definers.current() -- or maybe current
- return lastdefined
+local lastdefined=nil
+local internalized={}
+function definers.current()
+ return lastdefined
end
-
function definers.registered(hash)
- local id = internalized[hash]
- return id, id and fontdata[id]
+ local id=internalized[hash]
+ return id,id and fontdata[id]
end
-
function definers.register(tfmdata,id)
- if tfmdata and id then
- local hash = tfmdata.properties.hash
- if not internalized[hash] then
- internalized[hash] = id
- if trace_defining then
- report_defining("registering font, id: %s, hash: %s",id or "?",hash or "?")
- end
- fontdata[id] = tfmdata
- end
+ if tfmdata and id then
+ local hash=tfmdata.properties.hash
+ if not internalized[hash] then
+ internalized[hash]=id
+ if trace_defining then
+ report_defining("registering font, id %s, hash %a",id,hash)
+ end
+ fontdata[id]=tfmdata
+ end
+ end
+end
+function definers.read(specification,size,id)
+ statistics.starttiming(fonts)
+ if type(specification)=="string" then
+ specification=definers.analyze(specification,size)
+ end
+ local method=specification.method
+ if method and variants[method] then
+ specification=variants[method](specification)
+ end
+ specification=definers.resolve(specification)
+ local hash=constructors.hashinstance(specification)
+ local tfmdata=definers.registered(hash)
+ if tfmdata then
+ if trace_defining then
+ report_defining("already hashed: %s",hash)
end
-end
-
-function definers.read(specification,size,id) -- id can be optional, name can already be table
- statistics.starttiming(fonts)
- if type(specification) == "string" then
- specification = definers.analyze(specification,size)
- end
- local method = specification.method
- if method and variants[method] then
- specification = variants[method](specification)
- end
- specification = definers.resolve(specification)
- local hash = constructors.hashinstance(specification)
- local tfmdata = definers.registered(hash) -- id
+ else
+ tfmdata=definers.loadfont(specification)
if tfmdata then
- if trace_defining then
- report_defining("already hashed: %s",hash)
- end
+ if trace_defining then
+ report_defining("loaded and hashed: %s",hash)
+ end
+ tfmdata.properties.hash=hash
+ if id then
+ definers.register(tfmdata,id)
+ end
else
- tfmdata = definers.loadfont(specification) -- can be overloaded
- if tfmdata then
- if trace_defining then
- report_defining("loaded and hashed: %s",hash)
- end
- --~ constructors.checkvirtualid(tfmdata) -- interferes
- tfmdata.properties.hash = hash
- if id then
- definers.register(tfmdata,id)
- end
- else
- if trace_defining then
- report_defining("not loaded and hashed: %s",hash)
- end
- end
- end
- lastdefined = tfmdata or id -- todo ! ! ! ! !
- if not tfmdata then -- or id?
- report_defining( "unknown font %s, loading aborted",specification.name)
- elseif trace_defining and type(tfmdata) == "table" then
- local properties = tfmdata.properties or { }
- local parameters = tfmdata.parameters or { }
- report_defining("using %s font with id %s, name:%s size:%s bytes:%s encoding:%s fullname:%s filename:%s",
- properties.format or "unknown",
- id or "?",
- properties.name or "?",
- parameters.size or "default",
- properties.encodingbytes or "?",
- properties.encodingname or "unicode",
- properties.fullname or "?",
- file.basename(properties.filename or "?"))
- end
- statistics.stoptiming(fonts)
- return tfmdata
-end
-
---[[ldx--
-<p>We overload the <l n='tfm'/> reader.</p>
---ldx]]--
-
-callbacks.register('define_font', definers.read, "definition of fonts (tfmdata preparation)")
+ if trace_defining then
+ report_defining("not loaded and hashed: %s",hash)
+ end
+ end
+ end
+ lastdefined=tfmdata or id
+ if not tfmdata then
+ report_defining("unknown font %a, loading aborted",specification.name)
+ elseif trace_defining and type(tfmdata)=="table" then
+ local properties=tfmdata.properties or {}
+ local parameters=tfmdata.parameters or {}
+ report_defining("using %s font with id %a, name %a, size %a, bytes %a, encoding %a, fullname %a, filename %a",
+ properties.format,id,properties.name,parameters.size,properties.encodingbytes,
+ properties.encodingname,properties.fullname,file.basename(properties.filename))
+ end
+ statistics.stoptiming(fonts)
+ return tfmdata
+end
+function font.getfont(id)
+ return fontdata[id]
+end
+callbacks.register('define_font',definers.read,"definition of fonts (tfmdata preparation)")
end -- closure
do -- begin closure to overcome local limits and interference
-if not modules then modules = { } end modules ['luatex-font-def'] = {
- version = 1.001,
- comment = "companion to luatex-*.tex",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
+if not modules then modules={} end modules ['luatex-font-def']={
+ version=1.001,
+ comment="companion to luatex-*.tex",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
}
-
if context then
- texio.write_nl("fatal error: this module is not for context")
- os.exit()
+ texio.write_nl("fatal error: this module is not for context")
+ os.exit()
end
-
-local fonts = fonts
-
--- A bit of tuning for definitions.
-
-fonts.constructors.namemode = "specification" -- somehow latex needs this (changed name!) => will change into an overload
-
--- tricky: we sort of bypass the parser and directly feed all into
--- the sub parser
-
+local fonts=fonts
+fonts.constructors.namemode="specification"
function fonts.definers.getspecification(str)
- return "", str, "", ":", str
+ return "",str,"",":",str
+end
+local list={}
+local function issome () list.lookup='name' end
+local function isfile () list.lookup='file' end
+local function isname () list.lookup='name' end
+local function thename(s) list.name=s end
+local function issub (v) list.sub=v end
+local function iscrap (s) list.crap=string.lower(s) end
+local function iskey (k,v) list[k]=v end
+local function istrue (s) list[s]=true end
+local function isfalse(s) list[s]=false end
+local P,S,R,C=lpeg.P,lpeg.S,lpeg.R,lpeg.C
+local spaces=P(" ")^0
+local namespec=(1-S("/:("))^0
+local crapspec=spaces*P("/")*(((1-P(":"))^0)/iscrap)*spaces
+local filename_1=P("file:")/isfile*(namespec/thename)
+local filename_2=P("[")*P(true)/isname*(((1-P("]"))^0)/thename)*P("]")
+local fontname_1=P("name:")/isname*(namespec/thename)
+local fontname_2=P(true)/issome*(namespec/thename)
+local sometext=(R("az","AZ","09")+S("+-."))^1
+local truevalue=P("+")*spaces*(sometext/istrue)
+local falsevalue=P("-")*spaces*(sometext/isfalse)
+local keyvalue=(C(sometext)*spaces*P("=")*spaces*C(sometext))/iskey
+local somevalue=sometext/istrue
+local subvalue=P("(")*(C(P(1-S("()"))^1)/issub)*P(")")
+local option=spaces*(keyvalue+falsevalue+truevalue+somevalue)*spaces
+local options=P(":")*spaces*(P(";")^0*option)^0
+local pattern=(filename_1+filename_2+fontname_1+fontname_2)*subvalue^0*crapspec^0*options^0
+local function colonized(specification)
+ list={}
+ lpeg.match(pattern,specification.specification)
+ list.crap=nil
+ if list.name then
+ specification.name=list.name
+ list.name=nil
+ end
+ if list.lookup then
+ specification.lookup=list.lookup
+ list.lookup=nil
+ end
+ if list.sub then
+ specification.sub=list.sub
+ list.sub=nil
+ end
+ specification.features.normal=fonts.handlers.otf.features.normalize(list)
+ return specification
end
-
--- the generic name parser (different from context!)
-
-local list = { }
-
-local function issome () list.lookup = 'name' end -- xetex mode prefers name (not in context!)
-local function isfile () list.lookup = 'file' end
-local function isname () list.lookup = 'name' end
-local function thename(s) list.name = s end
-local function issub (v) list.sub = v end
-local function iscrap (s) list.crap = string.lower(s) end
-local function iskey (k,v) list[k] = v end
-local function istrue (s) list[s] = true end
-local function isfalse(s) list[s] = false end
-
-local P, S, R, C = lpeg.P, lpeg.S, lpeg.R, lpeg.C
-
-local spaces = P(" ")^0
-local namespec = (1-S("/:("))^0 -- was: (1-S("/: ("))^0
-local crapspec = spaces * P("/") * (((1-P(":"))^0)/iscrap) * spaces
-local filename_1 = P("file:")/isfile * (namespec/thename)
-local filename_2 = P("[") * P(true)/isname * (((1-P("]"))^0)/thename) * P("]")
-local fontname_1 = P("name:")/isname * (namespec/thename)
-local fontname_2 = P(true)/issome * (namespec/thename)
-local sometext = (R("az","AZ","09") + S("+-."))^1
-local truevalue = P("+") * spaces * (sometext/istrue)
-local falsevalue = P("-") * spaces * (sometext/isfalse)
-local keyvalue = (C(sometext) * spaces * P("=") * spaces * C(sometext))/iskey
-local somevalue = sometext/istrue
-local subvalue = P("(") * (C(P(1-S("()"))^1)/issub) * P(")") -- for Kim
-local option = spaces * (keyvalue + falsevalue + truevalue + somevalue) * spaces
-local options = P(":") * spaces * (P(";")^0 * option)^0
-
-local pattern = (filename_1 + filename_2 + fontname_1 + fontname_2) * subvalue^0 * crapspec^0 * options^0
-
-local function colonized(specification) -- xetex mode
- list = { }
- lpeg.match(pattern,specification.specification)
- list.crap = nil -- style not supported, maybe some day
- if list.name then
- specification.name = list.name
- list.name = nil
- end
- if list.lookup then
- specification.lookup = list.lookup
- list.lookup = nil
- end
- if list.sub then
- specification.sub = list.sub
- list.sub = nil
- end
- specification.features.normal = fonts.handlers.otf.features.normalize(list)
- return specification
-end
-
fonts.definers.registersplit(":",colonized,"cryptic")
-fonts.definers.registersplit("", colonized,"more cryptic") -- catches \font\text=[names]
-
+fonts.definers.registersplit("",colonized,"more cryptic")
function fonts.definers.applypostprocessors(tfmdata)
- local postprocessors = tfmdata.postprocessors
- if postprocessors then
- for i=1,#postprocessors do
- local extrahash = postprocessors[i](tfmdata) -- after scaling etc
- if type(extrahash) == "string" and extrahash ~= "" then
- -- e.g. a reencoding needs this
- extrahash = string.gsub(lower(extrahash),"[^a-z]","-")
- tfmdata.properties.fullname = format("%s-%s",tfmdata.properties.fullname,extrahash)
- end
- end
+ local postprocessors=tfmdata.postprocessors
+ if postprocessors then
+ for i=1,#postprocessors do
+ local extrahash=postprocessors[i](tfmdata)
+ if type(extrahash)=="string" and extrahash~="" then
+ extrahash=string.gsub(lower(extrahash),"[^a-z]","-")
+ tfmdata.properties.fullname=format("%s-%s",tfmdata.properties.fullname,extrahash)
+ end
end
- return tfmdata
+ end
+ return tfmdata
end
end -- closure
do -- begin closure to overcome local limits and interference
-if not modules then modules = { } end modules ['luatex-fonts-ext'] = {
- version = 1.001,
- comment = "companion to luatex-*.tex",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
+if not modules then modules={} end modules ['luatex-fonts-ext']={
+ version=1.001,
+ comment="companion to luatex-*.tex",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
}
-
if context then
- texio.write_nl("fatal error: this module is not for context")
- os.exit()
+ texio.write_nl("fatal error: this module is not for context")
+ os.exit()
end
-
-local fonts = fonts
-local otffeatures = fonts.constructors.newfeatures("otf")
-
--- A few generic extensions.
-
+local fonts=fonts
+local otffeatures=fonts.constructors.newfeatures("otf")
local function initializeitlc(tfmdata,value)
- if value then
- -- the magic 40 and it formula come from Dohyun Kim
- local parameters = tfmdata.parameters
- local italicangle = parameters.italicangle
- if italicangle and italicangle ~= 0 then
- local uwidth = (parameters.uwidth or 40)/2
- for unicode, d in next, tfmdata.descriptions do
- local it = d.boundingbox[3] - d.width + uwidth
- if it ~= 0 then
- d.italic = it
- end
- end
- tfmdata.properties.hasitalics = true
- end
+ if value then
+ local parameters=tfmdata.parameters
+ local italicangle=parameters.italicangle
+ if italicangle and italicangle~=0 then
+ local properties=tfmdata.properties
+ local factor=tonumber(value) or 1
+ properties.hasitalics=true
+ properties.autoitalicamount=factor*(parameters.uwidth or 40)/2
end
+ end
end
-
otffeatures.register {
- name = "itlc",
- description = "italic correction",
- initializers = {
- base = initializeitlc,
- node = initializeitlc,
- }
+ name="itlc",
+ description="italic correction",
+ initializers={
+ base=initializeitlc,
+ node=initializeitlc,
+ }
}
-
--- slant and extend
-
local function initializeslant(tfmdata,value)
- value = tonumber(value)
- if not value then
- value = 0
- elseif value > 1 then
- value = 1
- elseif value < -1 then
- value = -1
- end
- tfmdata.parameters.slantfactor = value
+ value=tonumber(value)
+ if not value then
+ value=0
+ elseif value>1 then
+ value=1
+ elseif value<-1 then
+ value=-1
+ end
+ tfmdata.parameters.slantfactor=value
end
-
otffeatures.register {
- name = "slant",
- description = "slant glyphs",
- initializers = {
- base = initializeslant,
- node = initializeslant,
- }
+ name="slant",
+ description="slant glyphs",
+ initializers={
+ base=initializeslant,
+ node=initializeslant,
+ }
}
-
local function initializeextend(tfmdata,value)
- value = tonumber(value)
- if not value then
- value = 0
- elseif value > 10 then
- value = 10
- elseif value < -10 then
- value = -10
- end
- tfmdata.parameters.extendfactor = value
+ value=tonumber(value)
+ if not value then
+ value=0
+ elseif value>10 then
+ value=10
+ elseif value<-10 then
+ value=-10
+ end
+ tfmdata.parameters.extendfactor=value
end
-
otffeatures.register {
- name = "extend",
- description = "scale glyphs horizontally",
- initializers = {
- base = initializeextend,
- node = initializeextend,
- }
+ name="extend",
+ description="scale glyphs horizontally",
+ initializers={
+ base=initializeextend,
+ node=initializeextend,
+ }
}
-
--- expansion and protrusion
-
-fonts.protrusions = fonts.protrusions or { }
-fonts.protrusions.setups = fonts.protrusions.setups or { }
-
-local setups = fonts.protrusions.setups
-
+fonts.protrusions=fonts.protrusions or {}
+fonts.protrusions.setups=fonts.protrusions.setups or {}
+local setups=fonts.protrusions.setups
local function initializeprotrusion(tfmdata,value)
- if value then
- local setup = setups[value]
- if setup then
- local factor, left, right = setup.factor or 1, setup.left or 1, setup.right or 1
- local emwidth = tfmdata.parameters.quad
- tfmdata.parameters.protrusion = {
- auto = true,
- }
- for i, chr in next, tfmdata.characters do
- local v, pl, pr = setup[i], nil, nil
- if v then
- pl, pr = v[1], v[2]
- end
- if pl and pl ~= 0 then chr.left_protruding = left *pl*factor end
- if pr and pr ~= 0 then chr.right_protruding = right*pr*factor end
- end
+ if value then
+ local setup=setups[value]
+ if setup then
+ local factor,left,right=setup.factor or 1,setup.left or 1,setup.right or 1
+ local emwidth=tfmdata.parameters.quad
+ tfmdata.parameters.protrusion={
+ auto=true,
+ }
+ for i,chr in next,tfmdata.characters do
+ local v,pl,pr=setup[i],nil,nil
+ if v then
+ pl,pr=v[1],v[2]
end
+ if pl and pl~=0 then chr.left_protruding=left*pl*factor end
+ if pr and pr~=0 then chr.right_protruding=right*pr*factor end
+ end
end
+ end
end
-
otffeatures.register {
- name = "protrusion",
- description = "shift characters into the left and or right margin",
- initializers = {
- base = initializeprotrusion,
- node = initializeprotrusion,
- }
+ name="protrusion",
+ description="shift characters into the left and or right margin",
+ initializers={
+ base=initializeprotrusion,
+ node=initializeprotrusion,
+ }
}
-
-fonts.expansions = fonts.expansions or { }
-fonts.expansions.setups = fonts.expansions.setups or { }
-
-local setups = fonts.expansions.setups
-
+fonts.expansions=fonts.expansions or {}
+fonts.expansions.setups=fonts.expansions.setups or {}
+local setups=fonts.expansions.setups
local function initializeexpansion(tfmdata,value)
- if value then
- local setup = setups[value]
- if setup then
- local factor = setup.factor or 1
- tfmdata.parameters.expansion = {
- stretch = 10 * (setup.stretch or 0),
- shrink = 10 * (setup.shrink or 0),
- step = 10 * (setup.step or 0),
- auto = true,
- }
- for i, chr in next, tfmdata.characters do
- local v = setup[i]
- if v and v ~= 0 then
- chr.expansion_factor = v*factor
- else -- can be option
- chr.expansion_factor = factor
- end
- end
- end
- end
+ if value then
+ local setup=setups[value]
+ if setup then
+ local factor=setup.factor or 1
+ tfmdata.parameters.expansion={
+ stretch=10*(setup.stretch or 0),
+ shrink=10*(setup.shrink or 0),
+ step=10*(setup.step or 0),
+ auto=true,
+ }
+ for i,chr in next,tfmdata.characters do
+ local v=setup[i]
+ if v and v~=0 then
+ chr.expansion_factor=v*factor
+ else
+ chr.expansion_factor=factor
+ end
+ end
+ end
+ end
end
-
otffeatures.register {
- name = "expansion",
- description = "apply hz optimization",
- initializers = {
- base = initializeexpansion,
- node = initializeexpansion,
- }
+ name="expansion",
+ description="apply hz optimization",
+ initializers={
+ base=initializeexpansion,
+ node=initializeexpansion,
+ }
}
-
--- left over
-
function fonts.loggers.onetimemessage() end
-
--- example vectors
-
-local byte = string.byte
-
-fonts.expansions.setups['default'] = {
-
- stretch = 2, shrink = 2, step = .5, factor = 1,
-
- [byte('A')] = 0.5, [byte('B')] = 0.7, [byte('C')] = 0.7, [byte('D')] = 0.5, [byte('E')] = 0.7,
- [byte('F')] = 0.7, [byte('G')] = 0.5, [byte('H')] = 0.7, [byte('K')] = 0.7, [byte('M')] = 0.7,
- [byte('N')] = 0.7, [byte('O')] = 0.5, [byte('P')] = 0.7, [byte('Q')] = 0.5, [byte('R')] = 0.7,
- [byte('S')] = 0.7, [byte('U')] = 0.7, [byte('W')] = 0.7, [byte('Z')] = 0.7,
- [byte('a')] = 0.7, [byte('b')] = 0.7, [byte('c')] = 0.7, [byte('d')] = 0.7, [byte('e')] = 0.7,
- [byte('g')] = 0.7, [byte('h')] = 0.7, [byte('k')] = 0.7, [byte('m')] = 0.7, [byte('n')] = 0.7,
- [byte('o')] = 0.7, [byte('p')] = 0.7, [byte('q')] = 0.7, [byte('s')] = 0.7, [byte('u')] = 0.7,
- [byte('w')] = 0.7, [byte('z')] = 0.7,
- [byte('2')] = 0.7, [byte('3')] = 0.7, [byte('6')] = 0.7, [byte('8')] = 0.7, [byte('9')] = 0.7,
+local byte=string.byte
+fonts.expansions.setups['default']={
+ stretch=2,shrink=2,step=.5,factor=1,
+ [byte('A')]=0.5,[byte('B')]=0.7,[byte('C')]=0.7,[byte('D')]=0.5,[byte('E')]=0.7,
+ [byte('F')]=0.7,[byte('G')]=0.5,[byte('H')]=0.7,[byte('K')]=0.7,[byte('M')]=0.7,
+ [byte('N')]=0.7,[byte('O')]=0.5,[byte('P')]=0.7,[byte('Q')]=0.5,[byte('R')]=0.7,
+ [byte('S')]=0.7,[byte('U')]=0.7,[byte('W')]=0.7,[byte('Z')]=0.7,
+ [byte('a')]=0.7,[byte('b')]=0.7,[byte('c')]=0.7,[byte('d')]=0.7,[byte('e')]=0.7,
+ [byte('g')]=0.7,[byte('h')]=0.7,[byte('k')]=0.7,[byte('m')]=0.7,[byte('n')]=0.7,
+ [byte('o')]=0.7,[byte('p')]=0.7,[byte('q')]=0.7,[byte('s')]=0.7,[byte('u')]=0.7,
+ [byte('w')]=0.7,[byte('z')]=0.7,
+ [byte('2')]=0.7,[byte('3')]=0.7,[byte('6')]=0.7,[byte('8')]=0.7,[byte('9')]=0.7,
}
-
-fonts.protrusions.setups['default'] = {
-
- factor = 1, left = 1, right = 1,
-
- [0x002C] = { 0, 1 }, -- comma
- [0x002E] = { 0, 1 }, -- period
- [0x003A] = { 0, 1 }, -- colon
- [0x003B] = { 0, 1 }, -- semicolon
- [0x002D] = { 0, 1 }, -- hyphen
- [0x2013] = { 0, 0.50 }, -- endash
- [0x2014] = { 0, 0.33 }, -- emdash
- [0x3001] = { 0, 1 }, -- ideographic comma 、
- [0x3002] = { 0, 1 }, -- ideographic full stop 。
- [0x060C] = { 0, 1 }, -- arabic comma ،
- [0x061B] = { 0, 1 }, -- arabic semicolon ؛
- [0x06D4] = { 0, 1 }, -- arabic full stop ۔
-
+fonts.protrusions.setups['default']={
+ factor=1,left=1,right=1,
+ [0x002C]={ 0,1 },
+ [0x002E]={ 0,1 },
+ [0x003A]={ 0,1 },
+ [0x003B]={ 0,1 },
+ [0x002D]={ 0,1 },
+ [0x2013]={ 0,0.50 },
+ [0x2014]={ 0,0.33 },
+ [0x3001]={ 0,1 },
+ [0x3002]={ 0,1 },
+ [0x060C]={ 0,1 },
+ [0x061B]={ 0,1 },
+ [0x06D4]={ 0,1 },
}
-
--- normalizer
-
-fonts.handlers.otf.features.normalize = function(t)
- if t.rand then
- t.rand = "random"
- end
- return t
+fonts.handlers.otf.features.normalize=function(t)
+ if t.rand then
+ t.rand="random"
+ end
+ return t
end
-
--- bonus
-
function fonts.helpers.nametoslot(name)
- local t = type(name)
- if t == "string" then
- local tfmdata = fonts.hashes.identifiers[currentfont()]
- local shared = tfmdata and tfmdata.shared
- local fntdata = shared and shared.rawdata
- return fntdata and fntdata.resources.unicodes[name]
- elseif t == "number" then
- return n
- end
+ local t=type(name)
+ if t=="string" then
+ local tfmdata=fonts.hashes.identifiers[currentfont()]
+ local shared=tfmdata and tfmdata.shared
+ local fntdata=shared and shared.rawdata
+ return fntdata and fntdata.resources.unicodes[name]
+ elseif t=="number" then
+ return n
+ end
end
-
--- \font\test=file:somefont:reencode=mymessup
---
--- fonts.encodings.reencodings.mymessup = {
--- [109] = 110, -- m
--- [110] = 109, -- n
--- }
-
-fonts.encodings = fonts.encodings or { }
-local reencodings = { }
-fonts.encodings.reencodings = reencodings
-
+fonts.encodings=fonts.encodings or {}
+local reencodings={}
+fonts.encodings.reencodings=reencodings
local function specialreencode(tfmdata,value)
- -- we forget about kerns as we assume symbols and we
- -- could issue a message if ther are kerns but it's
- -- a hack anyway so we odn't care too much here
- local encoding = value and reencodings[value]
- if encoding then
- local temp = { }
- local char = tfmdata.characters
- for k, v in next, encoding do
- temp[k] = char[v]
- end
- for k, v in next, temp do
- char[k] = temp[k]
- end
- -- if we use the font otherwise luatex gets confused so
- -- we return an additional hash component for fullname
- return string.format("reencoded:%s",value)
+ local encoding=value and reencodings[value]
+ if encoding then
+ local temp={}
+ local char=tfmdata.characters
+ for k,v in next,encoding do
+ temp[k]=char[v]
+ end
+ for k,v in next,temp do
+ char[k]=temp[k]
end
+ return string.format("reencoded:%s",value)
+ end
end
-
local function reencode(tfmdata,value)
- tfmdata.postprocessors = tfmdata.postprocessors or { }
- table.insert(tfmdata.postprocessors,
- function(tfmdata)
- return specialreencode(tfmdata,value)
- end
- )
+ tfmdata.postprocessors=tfmdata.postprocessors or {}
+ table.insert(tfmdata.postprocessors,
+ function(tfmdata)
+ return specialreencode(tfmdata,value)
+ end
+ )
end
-
otffeatures.register {
- name = "reencode",
- description = "reencode characters",
- manipulators = {
- base = reencode,
- node = reencode,
- }
+ name="reencode",
+ description="reencode characters",
+ manipulators={
+ base=reencode,
+ node=reencode,
+ }
}
end -- closure
do -- begin closure to overcome local limits and interference
-if not modules then modules = { } end modules ['luatex-fonts-cbk'] = {
- version = 1.001,
- comment = "companion to luatex-*.tex",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
+if not modules then modules={} end modules ['luatex-fonts-cbk']={
+ version=1.001,
+ comment="companion to luatex-*.tex",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
}
-
if context then
- texio.write_nl("fatal error: this module is not for context")
- os.exit()
+ texio.write_nl("fatal error: this module is not for context")
+ os.exit()
end
-
-local fonts = fonts
-local nodes = nodes
-
--- Fonts: (might move to node-gef.lua)
-
-local traverse_id = node.traverse_id
-local glyph_code = nodes.nodecodes.glyph
-
+local fonts=fonts
+local nodes=nodes
+local traverse_id=node.traverse_id
+local glyph_code=nodes.nodecodes.glyph
function nodes.handlers.characters(head)
- local fontdata = fonts.hashes.identifiers
- if fontdata then
- local usedfonts, done, prevfont = { }, false, nil
- for n in traverse_id(glyph_code,head) do
- local font = n.font
- if font ~= prevfont then
- prevfont = font
- local used = usedfonts[font]
- if not used then
- local tfmdata = fontdata[font] --
- if tfmdata then
- local shared = tfmdata.shared -- we need to check shared, only when same features
- if shared then
- local processors = shared.processes
- if processors and #processors > 0 then
- usedfonts[font] = processors
- done = true
- end
- end
- end
- end
- end
- end
- if done then
- for font, processors in next, usedfonts do
- for i=1,#processors do
- local h, d = processors[i](head,font,0)
- head, done = h or head, done or d
- end
- end
- end
- return head, true
- else
- return head, false
- end
+ local fontdata=fonts.hashes.identifiers
+ if fontdata then
+ local usedfonts,done,prevfont={},false,nil
+ for n in traverse_id(glyph_code,head) do
+ local font=n.font
+ if font~=prevfont then
+ prevfont=font
+ local used=usedfonts[font]
+ if not used then
+ local tfmdata=fontdata[font]
+ if tfmdata then
+ local shared=tfmdata.shared
+ if shared then
+ local processors=shared.processes
+ if processors and #processors>0 then
+ usedfonts[font]=processors
+ done=true
+ end
+ end
+ end
+ end
+ end
+ end
+ if done then
+ for font,processors in next,usedfonts do
+ for i=1,#processors do
+ local h,d=processors[i](head,font,0)
+ head,done=h or head,done or d
+ end
+ end
+ end
+ return head,true
+ else
+ return head,false
+ end
end
-
function nodes.simple_font_handler(head)
--- lang.hyphenate(head)
- head = nodes.handlers.characters(head)
- nodes.injections.handler(head)
- nodes.handlers.protectglyphs(head)
- head = node.ligaturing(head)
- head = node.kerning(head)
- return head
+ head=nodes.handlers.characters(head)
+ nodes.injections.handler(head)
+ nodes.handlers.protectglyphs(head)
+ head=node.ligaturing(head)
+ head=node.kerning(head)
+ return head
end
end -- closure
diff --git a/Master/texmf-dist/tex/generic/context/luatex/luatex-fonts.lua b/Master/texmf-dist/tex/generic/context/luatex/luatex-fonts.lua
index f5045a4e32a..fc7b7899d10 100644
--- a/Master/texmf-dist/tex/generic/context/luatex/luatex-fonts.lua
+++ b/Master/texmf-dist/tex/generic/context/luatex/luatex-fonts.lua
@@ -6,23 +6,18 @@ if not modules then modules = { } end modules ['luatex-fonts'] = {
license = "see context related readme files"
}
--- The following code isolates the generic ConTeXt code from already
--- defined or to be defined namespaces. This is the reference loader
--- for plain, but the generic code is also used in luaotfload (which
--- is is a file meant for latex) and that is maintained by Khaled
+-- The following code isolates the generic context code from already defined or to be defined
+-- namespaces. This is the reference loader for plain, but the generic code is also used in
+-- luaotfload (which is is a file meant for latex) and that used to be maintained by Khaled
-- Hosny. We do our best to keep the interface as clean as possible.
--
--- The code base is rather stable now, especially if you stay away from
--- the non generic code. All relevant data is organized in tables within
--- the main table of a font instance. There are a few places where in
--- context other code is plugged in, but this does not affect the core
--- code. Users can (given that their macro package provides this option)
--- access the font data (characters, descriptions, properties, parameters,
--- etc) of this main table.
---
--- Todo: all global namespaces in called modules will get local shortcuts.
+-- The code base is rather stable now, especially if you stay away from the non generic code. All
+-- relevant data is organized in tables within the main table of a font instance. There are a few
+-- places where in context other code is plugged in, but this does not affect the core code. Users
+-- can (given that their macro package provides this option) access the font data (characters,
+-- descriptions, properties, parameters, etc) of this main table.
-utf = unicode.utf8
+utf = utf or unicode.utf8
if not generic_context then
@@ -67,20 +62,21 @@ end
local whatever = generic_context.push_namespaces()
--- We keep track of load time by storing the current time. That
--- way we cannot be accused of slowing down loading too much.
+-- We keep track of load time by storing the current time. That way we cannot be accused
+-- of slowing down loading too much. Anyhow, there is no reason for this library to perform
+-- slower in any other package as it does in context.
--
--- Please don't update to this version without proper testing. It
--- might be that this version lags behind stock context and the only
--- formal release takes place around tex live code freeze.
+-- Please don't update to this version without proper testing. It might be that this version
+-- lags behind stock context and the only formal release takes place around tex live code
+-- freeze.
local starttime = os.gettimeofday()
--- As we don't use the ConTeXt file searching, we need to
--- initialize the kpse library. As the progname can be anything
--- we will temporary switch to the ConTeXt namespace if needed.
--- Just adding the context paths to the path specification is
--- somewhat faster
+-- As we don't use the context file searching, we need to initialize the kpse library. As the
+-- progname can be anything we will temporary switch to the context namespace if needed. Just
+-- adding the context paths to the path specification is somewhat faster.
+--
+-- Now, with lua 5.2 being used we might create a special ENV for this.
-- kpse.set_program_name("luatex")
@@ -128,42 +124,55 @@ if fonts then
else
- -- The following helpers are a bit overkill but I don't want to
- -- mess up ConTeXt code for the sake of general generality. Around
- -- version 1.0 there will be an official api defined.
-
- loadmodule('l-string.lua')
- loadmodule('l-table.lua')
- loadmodule('l-lpeg.lua')
- loadmodule('l-boolean.lua')
- loadmodule('l-math.lua')
- loadmodule('l-file.lua')
- loadmodule('l-io.lua')
-
- -- The following modules contain code that is either not used
- -- at all outside ConTeXt or will fail when enabled due to
- -- lack of other modules.
-
- -- First we load a few helper modules. This is about the miminum
- -- needed to let the font modules do their work. Don't depend on
- -- their functions as we might strip them in future versions of
- -- this generic variant.
+ -- The following helpers are a bit overkill but I don't want to mess up context code for the
+ -- sake of general generality. Around version 1.0 there will be an official api defined.
+ --
+ -- So, I will strip these libraries and see what is really needed so that we don't have this
+ -- overhead in the generic modules. The next section is only there for the packager, so stick
+ -- to using luatex-fonts with luatex-fonts-merged.lua and forget about the rest. The following
+ -- list might change without prior notice (for instance because we shuffled code around).
+
+ loadmodule("l-lua.lua")
+ loadmodule("l-lpeg.lua")
+ loadmodule("l-function.lua")
+ loadmodule("l-string.lua")
+ loadmodule("l-table.lua")
+ loadmodule("l-io.lua")
+ ----------("l-number.lua")
+ ----------("l-set.lua")
+ ----------("l-os.lua")
+ loadmodule("l-file.lua")
+ ----------("l-md5.lua")
+ ----------("l-url.lua")
+ ----------("l-dir.lua")
+ loadmodule("l-boolean.lua")
+ ----------("l-unicode.lua")
+ loadmodule("l-math.lua")
+ loadmodule("util-str.lua")
+
+
+ -- The following modules contain code that is either not used at all outside context or will fail
+ -- when enabled due to lack of other modules.
+
+ -- First we load a few helper modules. This is about the miminum needed to let the font modules do
+ -- their work. Don't depend on their functions as we might strip them in future versions of his
+ -- generic variant.
loadmodule('luatex-basics-gen.lua')
loadmodule('data-con.lua')
- -- We do need some basic node support. The code in there is not for
- -- general use as it might change.
+ -- We do need some basic node support. The code in there is not for general use as it might change.
loadmodule('luatex-basics-nod.lua')
- -- Now come the font modules that deal with traditional TeX fonts
- -- as well as open type fonts. We only support OpenType fonts here.
+ -- Now come the font modules that deal with traditional tex fonts as well as open type fonts. We only
+ -- support OpenType fonts here.
--
- -- The font database file (if used at all) must be put someplace
- -- visible for kpse and is not shared with ConTeXt. The mtx-fonts
- -- script can be used to genate this file (using the --names
- -- option).
+ -- The font database file (if used at all) must be put someplace visible for kpse and is not shared
+ -- with context. The mtx-fonts script can be used to genate this file (using the --names option).
+
+ -- in 2013/14 we will merge/move some generic files into luatex-fonts-* files (copies) so that
+ -- intermediate updates of context not interfere
loadmodule('font-ini.lua')
loadmodule('font-con.lua')
@@ -176,16 +185,16 @@ else
loadmodule('font-otf.lua')
loadmodule('font-otb.lua')
loadmodule('node-inj.lua') -- will be replaced (luatex >= .70)
- loadmodule('font-otn.lua')
- -- loadmodule('luatex-fonts-chr.lua')
loadmodule('font-ota.lua')
+ loadmodule('font-otn.lua')
+ ----------('luatex-fonts-chr.lua')
loadmodule('luatex-fonts-lua.lua')
loadmodule('font-def.lua')
loadmodule('luatex-fonts-def.lua')
loadmodule('luatex-fonts-ext.lua') -- some extensions
- -- We need to plug into a callback and the following module implements
- -- the handlers. Actual plugging in happens later.
+ -- We need to plug into a callback and the following module implements the handlers. Actual plugging
+ -- in happens later.
loadmodule('luatex-fonts-cbk.lua')
@@ -193,9 +202,8 @@ end
resolvers.loadmodule = loadmodule
--- In order to deal with the fonts we need to initialize some
--- callbacks. One can overload them later on if needed. First
--- a bit of abstraction.
+-- In order to deal with the fonts we need to initialize some callbacks. One can overload them later on if
+-- needed. First a bit of abstraction.
generic_context.callback_ligaturing = false
generic_context.callback_kerning = false
@@ -203,9 +211,10 @@ generic_context.callback_pre_linebreak_filter = nodes.simple_font_handler
generic_context.callback_hpack_filter = nodes.simple_font_handler
generic_context.callback_define_font = fonts.definers.read
--- The next ones can be done at a different moment if needed. You can create
--- a generic_context namespace and set no_callbacks_yet to true, load this
--- module, and enable the callbacks later.
+-- The next ones can be done at a different moment if needed. You can create a generic_context namespace
+-- and set no_callbacks_yet to true, load this module, and enable the callbacks later. So, there is really
+-- *no* need to create a alternative for luatex-fonts.lua and luatex-fonts-merged.lua: just load this one
+-- and overload if needed.
if not generic_context.no_callbacks_yet then
diff --git a/Master/texmf-dist/tex/generic/context/luatex/luatex-languages.lua b/Master/texmf-dist/tex/generic/context/luatex/luatex-languages.lua
new file mode 100644
index 00000000000..1ea8c1fd12a
--- /dev/null
+++ b/Master/texmf-dist/tex/generic/context/luatex/luatex-languages.lua
@@ -0,0 +1,45 @@
+if not modules then modules = { } end modules ['luatex-languages'] = {
+ version = 1.001,
+ comment = "companion to luatex-languages.tex",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+-- We borrow from ConTeXt.
+
+languages = languages or { }
+
+local loaded = { }
+
+function languages.loadpatterns(tag)
+ if not loaded[tag] then
+ loaded[tag] = 0
+ local filename = kpse.find_file("lang-" .. tag .. ".lua")
+ if filename and filename == "" then
+ print("<unknown language file for: " .. tag .. ">")
+ else
+ local whatever = loadfile(filename)
+ if type(whatever) == "function" then
+ whatever = whatever()
+ if type(whatever) == "table" then
+ local characters = whatever.patterns.characters or ""
+ local patterns = whatever.patterns.data or ""
+ local exceptions = whatever.exceptions.data or ""
+ local language = lang.new()
+ for b in string.utfvalues(characters) do
+ tex.setlccode(b,b)
+ end
+ lang.patterns(language, patterns)
+ lang.hyphenation(language, exceptions)
+ loaded[tag] = lang.id(language)
+ else
+ print("<invalid language table: " .. tag .. ">")
+ end
+ else
+ print("<invalid language file: " .. tag .. ">")
+ end
+ end
+ end
+ return loaded[tag]
+end
diff --git a/Master/texmf-dist/tex/generic/context/luatex/luatex-languages.tex b/Master/texmf-dist/tex/generic/context/luatex/luatex-languages.tex
new file mode 100644
index 00000000000..9778da39a44
--- /dev/null
+++ b/Master/texmf-dist/tex/generic/context/luatex/luatex-languages.tex
@@ -0,0 +1,17 @@
+%D \module
+%D [ file=luatex-fonts,
+%D version=2009.12.01,
+%D title=\LUATEX\ Support Macros,
+%D subtitle=Generic \OPENTYPE\ Font Handler,
+%D author=Hans Hagen,
+%D copyright={PRAGMA ADE \& \CONTEXT\ Development Team}]
+
+%D Cf. discussion on \CONTEXT\ list:
+
+\directlua {
+ dofile(kpse.find_file("luatex-languages.lua","tex"))
+}
+
+\def\loadpatterns#1{\directlua{tex.language = languages.loadpatterns("#1")}}
+
+\endinput
diff --git a/Master/texmf-dist/tex/generic/context/luatex/luatex-mplib.tex b/Master/texmf-dist/tex/generic/context/luatex/luatex-mplib.tex
index ef6dfff958b..8af9f2d8a7b 100644
--- a/Master/texmf-dist/tex/generic/context/luatex/luatex-mplib.tex
+++ b/Master/texmf-dist/tex/generic/context/luatex/luatex-mplib.tex
@@ -31,8 +31,15 @@
\def\setmplibformat#1{\def\mplibformat{#1}}
\def\setupmplibcatcodes
- {\catcode`\{=12 \catcode`\}=12 \catcode`\#=12 \catcode`\^=12 \catcode`\~=12
- \catcode`\_=12 \catcode`\%=12 \catcode`\&=12 \catcode`\$=12 }
+ {\catcode`\{=12 % could be optional .. not really needed
+ \catcode`\}=12 % could be optional .. not really needed
+ \catcode`\#=12
+ \catcode`\^=12
+ \catcode`\~=12
+ \catcode`\_=12
+ \catcode`\%=12
+ \catcode`\&=12
+ \catcode`\$=12 }
\def\mplibcode
{\bgroup
diff --git a/Master/texmf-dist/tex/generic/context/luatex/luatex-plain.tex b/Master/texmf-dist/tex/generic/context/luatex/luatex-plain.tex
index e47ad58ad9a..028d4ab0ec7 100644
--- a/Master/texmf-dist/tex/generic/context/luatex/luatex-plain.tex
+++ b/Master/texmf-dist/tex/generic/context/luatex/luatex-plain.tex
@@ -15,9 +15,10 @@
\everyjob \expandafter {%
\the\everyjob
- \input luatex-basics\relax
- \input luatex-fonts\relax
- \input luatex-mplib\relax
+ \input {luatex-basics}%
+ \input {luatex-fonts}%
+ \input {luatex-languages}%
+ \input {luatex-mplib}%
}
\edef\fmtversion{\fmtversion+luatex}
diff --git a/Master/texmf-dist/tex/generic/context/luatex/luatex-swiglib-test.lua b/Master/texmf-dist/tex/generic/context/luatex/luatex-swiglib-test.lua
new file mode 100644
index 00000000000..db6a729098d
--- /dev/null
+++ b/Master/texmf-dist/tex/generic/context/luatex/luatex-swiglib-test.lua
@@ -0,0 +1,25 @@
+local gm = swiglib("gmwand.core")
+
+gm.InitializeMagick(".")
+
+local magick_wand = gm.NewMagickWand()
+local drawing_wand = gm.NewDrawingWand()
+
+gm.MagickSetSize(magick_wand,800,600)
+gm.MagickReadImage(magick_wand,"xc:red")
+
+gm.DrawPushGraphicContext(drawing_wand)
+
+gm.DrawSetFillColor(drawing_wand,gm.NewPixelWand())
+
+-- gm.DrawSetFont(drawing_wand, kpse.findfile("DejaVuSerifBold.ttf"))
+-- gm.DrawSetFontSize(drawing_wand, 96)
+-- gm.DrawAnnotation(drawing_wand,300,200, "LuaTeX")
+
+gm.DrawPopGraphicContext(drawing_wand)
+gm.MagickDrawImage(magick_wand,drawing_wand)
+
+gm.MagickWriteImages(magick_wand,"./luatex-swiglib-test.jpg",1)
+
+gm.DestroyDrawingWand(drawing_wand)
+gm.DestroyMagickWand(magick_wand)
diff --git a/Master/texmf-dist/tex/generic/context/luatex/luatex-swiglib-test.tex b/Master/texmf-dist/tex/generic/context/luatex/luatex-swiglib-test.tex
new file mode 100644
index 00000000000..d26bb6ffbb3
--- /dev/null
+++ b/Master/texmf-dist/tex/generic/context/luatex/luatex-swiglib-test.tex
@@ -0,0 +1,11 @@
+% luatex --fmt=luatex=plain luatex-swiglib-test.tex
+
+\input luatex-swiglib.tex
+
+\directlua {
+ dofile("luatex-swiglib-test.lua")
+}
+
+\pdfximage {luatex-swiglib-test.jpg} \pdfrefximage\pdflastximage
+
+\end
diff --git a/Master/texmf-dist/tex/generic/context/luatex/luatex-swiglib.lua b/Master/texmf-dist/tex/generic/context/luatex/luatex-swiglib.lua
new file mode 100644
index 00000000000..7ffcdc37562
--- /dev/null
+++ b/Master/texmf-dist/tex/generic/context/luatex/luatex-swiglib.lua
@@ -0,0 +1,62 @@
+if not modules then modules = { } end modules ['luatex-swiglib'] = {
+ version = 1.001,
+ comment = "companion to luatex-swiglib.tex",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+local savedrequire = require
+
+local libsuffix = os.type == "windows" and ".dll" or ".so"
+
+function requireswiglib(required,version)
+ local library = package.loaded[required]
+ if library then
+ return library
+ else
+ local name = string.gsub(required,"%.","/") .. libsuffix
+ local list = kpse.show_path("clua")
+ for root in string.gmatch(list,"([^;]+)") do
+ local full = false
+ if type(version) == "string" and version ~= "" then
+ full = root .. "/" .. version .. "/" .. name
+ full = lfs.isfile(full) and full
+ end
+ if not full then
+ full = root .. "/" .. name
+ full = lfs.isfile(full) and full
+ end
+ if full then
+ local path, base = string.match(full,"^(.-)([^\\/]+)" .. libsuffix .."$")
+ local savedlibrary = package.loaded[base]
+ package.loaded[base] = nil
+ local savedpath = lfs.currentdir()
+ lfs.chdir(path)
+ library = package.loadlib(full,"luaopen_" .. base)
+ if type(library) == "function" then
+ library = library()
+ texio.write("<swiglib: '",required,"' is loaded>")
+ end
+ lfs.chdir(savedpath)
+ package.loaded[base] = savedlibrary
+ package.loaded[required] = library
+ return library
+ end
+ end
+ texio.write("<swiglib: '",name,"'is not found on '",list,"'")
+ end
+ texio.write("<swiglib: '",required,"' is not found>")
+end
+
+function require(name)
+ if string.find(name,"^swiglib%.") then
+ return requireswiglib(name)
+ else
+ return savedrequire(name)
+ end
+end
+
+function swiglib(name,version)
+ return requireswiglib("swiglib." .. name,version)
+end
diff --git a/Master/texmf-dist/tex/generic/context/luatex/luatex-swiglib.tex b/Master/texmf-dist/tex/generic/context/luatex/luatex-swiglib.tex
new file mode 100644
index 00000000000..7c437751587
--- /dev/null
+++ b/Master/texmf-dist/tex/generic/context/luatex/luatex-swiglib.tex
@@ -0,0 +1,20 @@
+%D \module
+%D [ file=luatex-swiglib,
+%D version=2013.03.30,
+%D title=\LUATEX\ Support Macros,
+%D subtitle=Generic \SWIGLIB\ Font Handler,
+%D author=Hans Hagen,
+%D copyright={PRAGMA ADE \& \CONTEXT\ Development Team}]
+
+%D This is an experimental setup. Usage:
+%D
+%D \starttyping
+%D local gm = swiglib("gmwand.core")
+%D local gm = require("swiglib.gmwand.core")
+%D local sq = swiglib("mysql.core")
+%D local sq = swiglib("mysql.core","5.6")
+%D \stoptyping
+
+\directlua {
+ dofile(kpse.find_file("luatex-swiglib.lua","tex"))
+}
diff --git a/Master/texmf-dist/tex/generic/context/luatex/luatex-test.tex b/Master/texmf-dist/tex/generic/context/luatex/luatex-test.tex
index 3020b5594cc..f757445d5c9 100644
--- a/Master/texmf-dist/tex/generic/context/luatex/luatex-test.tex
+++ b/Master/texmf-dist/tex/generic/context/luatex/luatex-test.tex
@@ -10,6 +10,13 @@
%D See \type {luatex-plain.tex} (or on my machine \type {luatex.tex}
%D for how to make a format.
+% You can generate a font database with:
+%
+% mtxrun --script fonts --reload --save
+%
+% The file luatex-fonts-names.lua has to be moved to a place
+% where kpse can find it.
+
\pdfoutput=1
% \directlua{generic_context.caches.compilemethod = "both" } % none luac dump both
@@ -54,8 +61,17 @@
\mine \input tufte \par
-
% \font\mine=file:luatex-fonts-demo-vf-2.lua at 12pt \mine [abab] \par
% \font\mine=file:luatex-fonts-demo-vf-3.lua at 12pt \mine [abab] \par
+\font\test=dejavuserif:+kern at 10pt \test
+
+\hsize 1mm
+
+\noindent Циолковский
+
+\loadpatterns{ru}
+
+\noindent Циолковский
+
\end
diff --git a/Master/texmf-dist/tex/mptopdf/config/mptopdf.ini b/Master/texmf-dist/tex/mptopdf/config/mptopdf.ini
deleted file mode 100644
index 1c1275568ce..00000000000
--- a/Master/texmf-dist/tex/mptopdf/config/mptopdf.ini
+++ /dev/null
@@ -1,6 +0,0 @@
-% Thomas Esser, 1998. public domain.
-% hyphenation tables are now set up in language.dat
-\input pdftexconfig.tex
-\input mptopdf
-\dump
-\endinput
diff --git a/Master/tlpkg/libexec/ctan2tds b/Master/tlpkg/libexec/ctan2tds
index 7bb103f23e4..9f456d64611 100755
--- a/Master/tlpkg/libexec/ctan2tds
+++ b/Master/tlpkg/libexec/ctan2tds
@@ -4929,7 +4929,9 @@ sub POSTcontext {
# remove mptopdf files, since they are in a separate package.
chomp (my @mptopdf_files = `tlpfiles mptopdf | sed 's,[^/]*/,,'`);
- &SYSTEM ("$RM -f @mptopdf_files");
+ &SYSTEM ("$RM -f @mptopdf_files ");
+ &SYSTEM ("$RM doc/context/scripts/mkii/mptopdf.man");
+ &SYSTEM ("$RM scripts/context/stubs/mswin/mptopdf.exe");
print "still POST$package - user-level executables\n";
# similar to biber, executables go in both the Master bin dir and the
diff --git a/Master/tlpkg/libexec/mptopdf-extract b/Master/tlpkg/libexec/mptopdf-extract
index f959721a68f..530f64ef6bc 100755
--- a/Master/tlpkg/libexec/mptopdf-extract
+++ b/Master/tlpkg/libexec/mptopdf-extract
@@ -12,9 +12,17 @@ if test -z "$files"; then
exit 1
fi
+# Add man page in cont-tmf location:
+files="$files doc/context/scripts/mkii/mptopdf.man"
+
+# Add w32 .exe stub too:
+files="$files scripts/context/stubs/mswin/mptopdf.exe"
+
# We will extract them from the current context release.
ctan=/home/ftp/mirror/rsync.tex.ac.uk/CTAN
context_zip=$ctan/macros/context/current/cont-tmf.zip
+# eventually ...
+context_zip=/home/ftp/mirror/www.pragma-ade.com/context/beta/cont-tmf.zip
if test ! -s $context_zip; then
echo "$0: no context zip:" >&2
ls -l $context_zip >&2